]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/cp/lambda.cc
Update copyright years.
[thirdparty/gcc.git] / gcc / cp / lambda.cc
1 /* Perform the semantic phase of lambda parsing, i.e., the process of
2 building tree structure, checking semantic consistency, and
3 building RTL. These routines are used both during actual parsing
4 and during the instantiation of template functions.
5
6 Copyright (C) 1998-2024 Free Software Foundation, Inc.
7
8 This file is part of GCC.
9
10 GCC is free software; you can redistribute it and/or modify it
11 under the terms of the GNU General Public License as published by
12 the Free Software Foundation; either version 3, or (at your option)
13 any later version.
14
15 GCC is distributed in the hope that it will be useful, but
16 WITHOUT ANY WARRANTY; without even the implied warranty of
17 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
18 General Public License for more details.
19
20 You should have received a copy of the GNU General Public License
21 along with GCC; see the file COPYING3. If not see
22 <http://www.gnu.org/licenses/>. */
23
24 #include "config.h"
25 #include "system.h"
26 #include "coretypes.h"
27 #include "cp-tree.h"
28 #include "stringpool.h"
29 #include "cgraph.h"
30 #include "tree-iterator.h"
31 #include "toplev.h"
32 #include "gimplify.h"
33 #include "target.h"
34 #include "decl.h"
35
36 /* Constructor for a lambda expression. */
37
38 tree
39 build_lambda_expr (void)
40 {
41 tree lambda = make_node (LAMBDA_EXPR);
42 LAMBDA_EXPR_DEFAULT_CAPTURE_MODE (lambda) = CPLD_NONE;
43 LAMBDA_EXPR_CAPTURE_LIST (lambda) = NULL_TREE;
44 LAMBDA_EXPR_THIS_CAPTURE (lambda) = NULL_TREE;
45 LAMBDA_EXPR_REGEN_INFO (lambda) = NULL_TREE;
46 LAMBDA_EXPR_PENDING_PROXIES (lambda) = NULL;
47 return lambda;
48 }
49
50 /* Create the closure object for a LAMBDA_EXPR. */
51
52 tree
53 build_lambda_object (tree lambda_expr)
54 {
55 /* Build aggregate constructor call.
56 - cp_parser_braced_list
57 - cp_parser_functional_cast */
58 vec<constructor_elt, va_gc> *elts = NULL;
59 tree node, expr, type;
60
61 if (processing_template_decl || lambda_expr == error_mark_node)
62 return lambda_expr;
63
64 /* Make sure any error messages refer to the lambda-introducer. */
65 location_t loc = LAMBDA_EXPR_LOCATION (lambda_expr);
66 iloc_sentinel il (loc);
67
68 for (node = LAMBDA_EXPR_CAPTURE_LIST (lambda_expr);
69 node;
70 node = TREE_CHAIN (node))
71 {
72 tree field = TREE_PURPOSE (node);
73 tree val = TREE_VALUE (node);
74
75 if (field == error_mark_node)
76 {
77 expr = error_mark_node;
78 goto out;
79 }
80
81 if (TREE_CODE (val) == TREE_LIST)
82 val = build_x_compound_expr_from_list (val, ELK_INIT,
83 tf_warning_or_error);
84
85 if (DECL_P (val))
86 mark_used (val);
87
88 /* Mere mortals can't copy arrays with aggregate initialization, so
89 do some magic to make it work here. */
90 if (TREE_CODE (TREE_TYPE (field)) == ARRAY_TYPE)
91 val = build_array_copy (val);
92 else if (DECL_NORMAL_CAPTURE_P (field)
93 && !DECL_VLA_CAPTURE_P (field)
94 && !TYPE_REF_P (TREE_TYPE (field)))
95 {
96 /* "the entities that are captured by copy are used to
97 direct-initialize each corresponding non-static data
98 member of the resulting closure object."
99
100 There's normally no way to express direct-initialization
101 from an element of a CONSTRUCTOR, so we build up a special
102 TARGET_EXPR to bypass the usual copy-initialization. */
103 val = force_rvalue (val, tf_warning_or_error);
104 if (TREE_CODE (val) == TARGET_EXPR)
105 TARGET_EXPR_DIRECT_INIT_P (val) = true;
106 }
107
108 CONSTRUCTOR_APPEND_ELT (elts, DECL_NAME (field), val);
109 }
110
111 expr = build_constructor (init_list_type_node, elts);
112 CONSTRUCTOR_IS_DIRECT_INIT (expr) = 1;
113
114 /* N2927: "[The closure] class type is not an aggregate."
115 But we briefly treat it as an aggregate to make this simpler. */
116 type = LAMBDA_EXPR_CLOSURE (lambda_expr);
117 CLASSTYPE_NON_AGGREGATE (type) = 0;
118 expr = finish_compound_literal (type, expr, tf_warning_or_error);
119 protected_set_expr_location (expr, loc);
120 CLASSTYPE_NON_AGGREGATE (type) = 1;
121
122 out:
123 return expr;
124 }
125
126 /* Return an initialized RECORD_TYPE for LAMBDA.
127 LAMBDA must have its explicit captures already. */
128
129 tree
130 begin_lambda_type (tree lambda)
131 {
132 /* Lambda names are nearly but not quite anonymous. */
133 tree name = make_anon_name ();
134 IDENTIFIER_LAMBDA_P (name) = true;
135
136 /* Create the new RECORD_TYPE for this lambda. */
137 tree type = xref_tag (/*tag_code=*/record_type, name);
138 if (type == error_mark_node)
139 return error_mark_node;
140
141 /* Designate it as a struct so that we can use aggregate initialization. */
142 CLASSTYPE_DECLARED_CLASS (type) = false;
143
144 /* Cross-reference the expression and the type. */
145 LAMBDA_EXPR_CLOSURE (lambda) = type;
146 CLASSTYPE_LAMBDA_EXPR (type) = lambda;
147
148 /* In C++17, assume the closure is literal; we'll clear the flag later if
149 necessary. */
150 if (cxx_dialect >= cxx17)
151 CLASSTYPE_LITERAL_P (type) = true;
152
153 /* Clear base types. */
154 xref_basetypes (type, /*bases=*/NULL_TREE);
155
156 /* Start the class. */
157 type = begin_class_definition (type);
158
159 return type;
160 }
161
162 /* Given a LAMBDA_EXPR or closure type LAMBDA, return the op() of the
163 closure type. */
164
165 tree
166 lambda_function (tree lambda)
167 {
168 tree type;
169 if (TREE_CODE (lambda) == LAMBDA_EXPR)
170 type = LAMBDA_EXPR_CLOSURE (lambda);
171 else
172 type = lambda;
173 gcc_assert (LAMBDA_TYPE_P (type));
174 /* Don't let debug_tree cause instantiation. */
175 if (CLASSTYPE_TEMPLATE_INSTANTIATION (type)
176 && !COMPLETE_OR_OPEN_TYPE_P (type))
177 return NULL_TREE;
178 lambda = lookup_member (type, call_op_identifier,
179 /*protect=*/0, /*want_type=*/false,
180 tf_warning_or_error);
181 if (lambda)
182 lambda = STRIP_TEMPLATE (get_first_fn (lambda));
183 return lambda;
184 }
185
186 /* True if EXPR is an expression whose type can be used directly in lambda
187 capture. Not to be used for 'auto'. */
188
189 static bool
190 type_deducible_expression_p (tree expr)
191 {
192 if (!type_dependent_expression_p (expr))
193 return true;
194 if (BRACE_ENCLOSED_INITIALIZER_P (expr)
195 || TREE_CODE (expr) == EXPR_PACK_EXPANSION)
196 return false;
197 tree t = non_reference (TREE_TYPE (expr));
198 return (t && TREE_CODE (t) != TYPE_PACK_EXPANSION
199 && !WILDCARD_TYPE_P (t) && !LAMBDA_TYPE_P (t)
200 && !array_of_unknown_bound_p (t)
201 && !type_uses_auto (t));
202 }
203
204 /* Returns the type to use for the FIELD_DECL corresponding to the
205 capture of EXPR. EXPLICIT_INIT_P indicates whether this is a
206 C++14 init capture, and BY_REFERENCE_P indicates whether we're
207 capturing by reference. */
208
209 tree
210 lambda_capture_field_type (tree expr, bool explicit_init_p,
211 bool by_reference_p)
212 {
213 tree type;
214 bool is_this = is_this_parameter (tree_strip_nop_conversions (expr));
215
216 if (is_this)
217 type = TREE_TYPE (expr);
218 else if (explicit_init_p)
219 {
220 tree auto_node = make_auto ();
221
222 type = auto_node;
223 if (by_reference_p)
224 /* Add the reference now, so deduction doesn't lose
225 outermost CV qualifiers of EXPR. */
226 type = build_reference_type (type);
227 if (uses_parameter_packs (expr))
228 /* Stick with 'auto' even if the type could be deduced. */;
229 else
230 type = do_auto_deduction (type, expr, auto_node);
231 }
232 else if (!type_deducible_expression_p (expr))
233 {
234 type = cxx_make_type (DECLTYPE_TYPE);
235 DECLTYPE_TYPE_EXPR (type) = expr;
236 DECLTYPE_FOR_LAMBDA_CAPTURE (type) = true;
237 DECLTYPE_FOR_REF_CAPTURE (type) = by_reference_p;
238 SET_TYPE_STRUCTURAL_EQUALITY (type);
239 }
240 else
241 {
242 STRIP_ANY_LOCATION_WRAPPER (expr);
243
244 if (!by_reference_p && is_capture_proxy (expr))
245 {
246 /* When capturing by-value another capture proxy from an enclosing
247 lambda, consider the type of the corresponding field instead,
248 as the proxy may be additionally const-qualifed if the enclosing
249 lambda is non-mutable (PR94376). */
250 gcc_assert (TREE_CODE (DECL_VALUE_EXPR (expr)) == COMPONENT_REF);
251 expr = TREE_OPERAND (DECL_VALUE_EXPR (expr), 1);
252 }
253
254 type = non_reference (unlowered_expr_type (expr));
255
256 if (by_reference_p || TREE_CODE (type) == FUNCTION_TYPE)
257 type = build_reference_type (type);
258 }
259
260 return type;
261 }
262
263 /* Returns true iff DECL is a lambda capture proxy variable created by
264 build_capture_proxy. */
265
266 bool
267 is_capture_proxy (tree decl)
268 {
269 /* Location wrappers should be stripped or otherwise handled by the
270 caller before using this predicate. */
271 gcc_checking_assert (!location_wrapper_p (decl));
272
273 return (VAR_P (decl)
274 && DECL_HAS_VALUE_EXPR_P (decl)
275 && !DECL_ANON_UNION_VAR_P (decl)
276 && !DECL_DECOMPOSITION_P (decl)
277 && !DECL_FNAME_P (decl)
278 && !(DECL_ARTIFICIAL (decl)
279 && DECL_LANG_SPECIFIC (decl)
280 && DECL_OMP_PRIVATIZED_MEMBER (decl))
281 && LAMBDA_FUNCTION_P (DECL_CONTEXT (decl)));
282 }
283
284 /* Returns true iff DECL is a capture proxy for a normal capture
285 (i.e. without explicit initializer). */
286
287 bool
288 is_normal_capture_proxy (tree decl)
289 {
290 if (!is_capture_proxy (decl))
291 /* It's not a capture proxy. */
292 return false;
293
294 return (DECL_LANG_SPECIFIC (decl)
295 && DECL_CAPTURED_VARIABLE (decl));
296 }
297
298 /* Returns true iff DECL is a capture proxy for a normal capture
299 of a constant variable. */
300
301 bool
302 is_constant_capture_proxy (tree decl)
303 {
304 if (is_normal_capture_proxy (decl))
305 return decl_constant_var_p (DECL_CAPTURED_VARIABLE (decl));
306 return false;
307 }
308
309 /* VAR is a capture proxy created by build_capture_proxy; add it to the
310 current function, which is the operator() for the appropriate lambda. */
311
312 void
313 insert_capture_proxy (tree var)
314 {
315 if (is_normal_capture_proxy (var))
316 {
317 tree cap = DECL_CAPTURED_VARIABLE (var);
318 if (CHECKING_P)
319 {
320 gcc_assert (!is_normal_capture_proxy (cap));
321 tree old = retrieve_local_specialization (cap);
322 if (old)
323 gcc_assert (DECL_CONTEXT (old) != DECL_CONTEXT (var));
324 }
325 register_local_specialization (var, cap);
326 }
327
328 /* Put the capture proxy in the extra body block so that it won't clash
329 with a later local variable. */
330 pushdecl_outermost_localscope (var);
331
332 /* And put a DECL_EXPR in the STATEMENT_LIST for the same block. */
333 var = build_stmt (DECL_SOURCE_LOCATION (var), DECL_EXPR, var);
334 tree stmt_list = (*stmt_list_stack)[1];
335 gcc_assert (stmt_list);
336 append_to_statement_list_force (var, &stmt_list);
337 }
338
339 /* We've just finished processing a lambda; if the containing scope is also
340 a lambda, insert any capture proxies that were created while processing
341 the nested lambda. */
342
343 void
344 insert_pending_capture_proxies (void)
345 {
346 tree lam;
347 vec<tree, va_gc> *proxies;
348 unsigned i;
349
350 if (!current_function_decl || !LAMBDA_FUNCTION_P (current_function_decl))
351 return;
352
353 lam = CLASSTYPE_LAMBDA_EXPR (DECL_CONTEXT (current_function_decl));
354 proxies = LAMBDA_EXPR_PENDING_PROXIES (lam);
355 for (i = 0; i < vec_safe_length (proxies); ++i)
356 {
357 tree var = (*proxies)[i];
358 insert_capture_proxy (var);
359 }
360 release_tree_vector (LAMBDA_EXPR_PENDING_PROXIES (lam));
361 LAMBDA_EXPR_PENDING_PROXIES (lam) = NULL;
362 }
363
364 /* Given REF, a COMPONENT_REF designating a field in the lambda closure,
365 return the type we want the proxy to have: the type of the field itself,
366 with added const-qualification if the lambda isn't mutable and the
367 capture is by value. */
368
369 tree
370 lambda_proxy_type (tree ref)
371 {
372 tree type;
373 if (ref == error_mark_node)
374 return error_mark_node;
375 if (REFERENCE_REF_P (ref))
376 ref = TREE_OPERAND (ref, 0);
377 gcc_assert (TREE_CODE (ref) == COMPONENT_REF);
378 type = TREE_TYPE (ref);
379 if (!type || WILDCARD_TYPE_P (non_reference (type)))
380 {
381 type = cxx_make_type (DECLTYPE_TYPE);
382 DECLTYPE_TYPE_EXPR (type) = ref;
383 DECLTYPE_FOR_LAMBDA_PROXY (type) = true;
384 SET_TYPE_STRUCTURAL_EQUALITY (type);
385 }
386 if (DECL_PACK_P (TREE_OPERAND (ref, 1)))
387 type = make_pack_expansion (type);
388 return type;
389 }
390
391 /* MEMBER is a capture field in a lambda closure class. Now that we're
392 inside the operator(), build a placeholder var for future lookups and
393 debugging. */
394
395 static tree
396 build_capture_proxy (tree member, tree init)
397 {
398 tree var, object, fn, closure, name, lam, type;
399
400 if (PACK_EXPANSION_P (member))
401 member = PACK_EXPANSION_PATTERN (member);
402
403 closure = DECL_CONTEXT (member);
404 fn = lambda_function (closure);
405 lam = CLASSTYPE_LAMBDA_EXPR (closure);
406
407 /* The proxy variable forwards to the capture field. */
408 object = build_fold_indirect_ref (DECL_ARGUMENTS (fn));
409 object = finish_non_static_data_member (member, object, NULL_TREE);
410 if (REFERENCE_REF_P (object))
411 object = TREE_OPERAND (object, 0);
412
413 /* Remove the __ inserted by add_capture. */
414 if (IDENTIFIER_POINTER (DECL_NAME (member))[2] == '_'
415 && IDENTIFIER_POINTER (DECL_NAME (member))[3] == '.')
416 name = get_identifier ("_");
417 else
418 name = get_identifier (IDENTIFIER_POINTER (DECL_NAME (member)) + 2);
419
420 type = lambda_proxy_type (object);
421
422 if (name == this_identifier && !INDIRECT_TYPE_P (type))
423 {
424 type = build_pointer_type (type);
425 type = cp_build_qualified_type (type, TYPE_QUAL_CONST);
426 object = build_fold_addr_expr_with_type (object, type);
427 }
428
429 if (DECL_VLA_CAPTURE_P (member))
430 {
431 /* Rebuild the VLA type from the pointer and maxindex. */
432 tree field = next_aggregate_field (TYPE_FIELDS (type));
433 tree ptr = build_simple_component_ref (object, field);
434 field = next_aggregate_field (DECL_CHAIN (field));
435 tree max = build_simple_component_ref (object, field);
436 type = build_cplus_array_type (TREE_TYPE (TREE_TYPE (ptr)),
437 build_index_type (max));
438 type = build_reference_type (type);
439 object = convert (type, ptr);
440 }
441
442 complete_type (type);
443
444 var = build_decl (input_location, VAR_DECL, name, type);
445 SET_DECL_VALUE_EXPR (var, object);
446 DECL_HAS_VALUE_EXPR_P (var) = 1;
447 DECL_ARTIFICIAL (var) = 1;
448 TREE_USED (var) = 1;
449 DECL_CONTEXT (var) = fn;
450
451 if (DECL_NORMAL_CAPTURE_P (member))
452 {
453 if (DECL_VLA_CAPTURE_P (member))
454 {
455 init = CONSTRUCTOR_ELT (init, 0)->value;
456 init = TREE_OPERAND (init, 0); // Strip ADDR_EXPR.
457 init = TREE_OPERAND (init, 0); // Strip ARRAY_REF.
458 }
459 else
460 {
461 if (PACK_EXPANSION_P (init))
462 init = PACK_EXPANSION_PATTERN (init);
463 }
464
465 if (INDIRECT_REF_P (init))
466 init = TREE_OPERAND (init, 0);
467 STRIP_NOPS (init);
468
469 gcc_assert (VAR_P (init) || TREE_CODE (init) == PARM_DECL);
470 while (is_normal_capture_proxy (init))
471 init = DECL_CAPTURED_VARIABLE (init);
472 retrofit_lang_decl (var);
473 DECL_CAPTURED_VARIABLE (var) = init;
474 }
475
476 if (name == this_identifier)
477 {
478 gcc_assert (LAMBDA_EXPR_THIS_CAPTURE (lam) == member);
479 LAMBDA_EXPR_THIS_CAPTURE (lam) = var;
480 }
481
482 if (fn == current_function_decl)
483 insert_capture_proxy (var);
484 else
485 vec_safe_push (LAMBDA_EXPR_PENDING_PROXIES (lam), var);
486
487 return var;
488 }
489
490 static GTY(()) tree ptr_id;
491 static GTY(()) tree max_id;
492
493 /* Return a struct containing a pointer and a length for lambda capture of
494 an array of runtime length. */
495
496 static tree
497 vla_capture_type (tree array_type)
498 {
499 tree type = xref_tag (record_type, make_anon_name ());
500 xref_basetypes (type, NULL_TREE);
501 type = begin_class_definition (type);
502 if (!ptr_id)
503 {
504 ptr_id = get_identifier ("ptr");
505 max_id = get_identifier ("max");
506 }
507 tree ptrtype = build_pointer_type (TREE_TYPE (array_type));
508 tree field = build_decl (input_location, FIELD_DECL, ptr_id, ptrtype);
509 finish_member_declaration (field);
510 field = build_decl (input_location, FIELD_DECL, max_id, sizetype);
511 finish_member_declaration (field);
512 return finish_struct (type, NULL_TREE);
513 }
514
515 /* From an ID and INITIALIZER, create a capture (by reference if
516 BY_REFERENCE_P is true), add it to the capture-list for LAMBDA,
517 and return it. If ID is `this', BY_REFERENCE_P says whether
518 `*this' is captured by reference. */
519
520 tree
521 add_capture (tree lambda, tree id, tree orig_init, bool by_reference_p,
522 bool explicit_init_p, unsigned *name_independent_cnt)
523 {
524 char *buf;
525 tree type, member, name;
526 bool vla = false;
527 bool variadic = false;
528 tree initializer = orig_init;
529
530 if (PACK_EXPANSION_P (initializer))
531 {
532 initializer = PACK_EXPANSION_PATTERN (initializer);
533 variadic = true;
534 }
535
536 if (TREE_CODE (initializer) == TREE_LIST
537 /* A pack expansion might end up with multiple elements. */
538 && !PACK_EXPANSION_P (TREE_VALUE (initializer)))
539 initializer = build_x_compound_expr_from_list (initializer, ELK_INIT,
540 tf_warning_or_error);
541 type = TREE_TYPE (initializer);
542 if (type == error_mark_node)
543 return error_mark_node;
544
545 if (!dependent_type_p (type) && array_of_runtime_bound_p (type))
546 {
547 vla = true;
548 if (!by_reference_p)
549 error ("array of runtime bound cannot be captured by copy, "
550 "only by reference");
551
552 /* For a VLA, we capture the address of the first element and the
553 maximum index, and then reconstruct the VLA for the proxy. */
554 tree elt = cp_build_array_ref (input_location, initializer,
555 integer_zero_node, tf_warning_or_error);
556 initializer = build_constructor_va (init_list_type_node, 2,
557 NULL_TREE, build_address (elt),
558 NULL_TREE, array_type_nelts (type));
559 type = vla_capture_type (type);
560 }
561 else if (!dependent_type_p (type)
562 && variably_modified_type_p (type, NULL_TREE))
563 {
564 sorry ("capture of variably-modified type %qT that is not an N3639 array "
565 "of runtime bound", type);
566 if (TREE_CODE (type) == ARRAY_TYPE
567 && variably_modified_type_p (TREE_TYPE (type), NULL_TREE))
568 inform (input_location, "because the array element type %qT has "
569 "variable size", TREE_TYPE (type));
570 return error_mark_node;
571 }
572 else
573 {
574 type = lambda_capture_field_type (initializer, explicit_init_p,
575 by_reference_p);
576 if (type == error_mark_node)
577 return error_mark_node;
578
579 if (id == this_identifier && !by_reference_p)
580 {
581 gcc_assert (INDIRECT_TYPE_P (type));
582 type = TREE_TYPE (type);
583 initializer = cp_build_fold_indirect_ref (initializer);
584 }
585
586 if (dependent_type_p (type))
587 ;
588 else if (id != this_identifier && by_reference_p)
589 {
590 if (!lvalue_p (initializer))
591 {
592 error ("cannot capture %qE by reference", initializer);
593 return error_mark_node;
594 }
595 }
596 else
597 {
598 /* Capture by copy requires a complete type. */
599 type = complete_type (type);
600 if (!COMPLETE_TYPE_P (type))
601 {
602 error ("capture by copy of incomplete type %qT", type);
603 cxx_incomplete_type_inform (type);
604 return error_mark_node;
605 }
606 else if (!verify_type_context (input_location,
607 TCTX_CAPTURE_BY_COPY, type))
608 return error_mark_node;
609 }
610 }
611
612 /* Add __ to the beginning of the field name so that user code
613 won't find the field with name lookup. We can't just leave the name
614 unset because template instantiation uses the name to find
615 instantiated fields. */
616 if (id_equal (id, "_") && name_independent_cnt)
617 {
618 if (*name_independent_cnt == 0)
619 name = get_identifier ("___");
620 else
621 {
622 /* For 2nd and later name-independent capture use
623 unique names. */
624 char buf2[5 + (HOST_BITS_PER_INT + 2) / 3];
625 sprintf (buf2, "___.%u", *name_independent_cnt);
626 name = get_identifier (buf2);
627 }
628 name_independent_cnt[0]++;
629 }
630 else
631 {
632 buf = XALLOCAVEC (char, IDENTIFIER_LENGTH (id) + 3);
633 buf[1] = buf[0] = '_';
634 memcpy (buf + 2, IDENTIFIER_POINTER (id),
635 IDENTIFIER_LENGTH (id) + 1);
636 name = get_identifier (buf);
637 }
638
639 if (variadic)
640 {
641 type = make_pack_expansion (type);
642 if (explicit_init_p)
643 /* With an explicit initializer 'type' is auto, which isn't really a
644 parameter pack in this context. We will want as many fields as we
645 have elements in the expansion of the initializer, so use its packs
646 instead. */
647 {
648 PACK_EXPANSION_PARAMETER_PACKS (type)
649 = uses_parameter_packs (initializer);
650 PACK_EXPANSION_AUTO_P (type) = true;
651 }
652 }
653
654 /* Make member variable. */
655 member = build_decl (input_location, FIELD_DECL, name, type);
656 DECL_VLA_CAPTURE_P (member) = vla;
657
658 if (!explicit_init_p)
659 /* Normal captures are invisible to name lookup but uses are replaced
660 with references to the capture field; we implement this by only
661 really making them invisible in unevaluated context; see
662 qualify_lookup. For now, let's make explicitly initialized captures
663 always visible. */
664 DECL_NORMAL_CAPTURE_P (member) = true;
665
666 if (id == this_identifier)
667 LAMBDA_EXPR_THIS_CAPTURE (lambda) = member;
668
669 /* Add it to the appropriate closure class if we've started it. */
670 if (current_class_type
671 && current_class_type == LAMBDA_EXPR_CLOSURE (lambda))
672 {
673 if (COMPLETE_TYPE_P (current_class_type))
674 internal_error ("trying to capture %qD in instantiation of "
675 "generic lambda", id);
676 finish_member_declaration (member);
677 }
678
679 tree listmem = member;
680 if (variadic)
681 {
682 listmem = make_pack_expansion (member);
683 initializer = orig_init;
684 }
685 LAMBDA_EXPR_CAPTURE_LIST (lambda)
686 = tree_cons (listmem, initializer, LAMBDA_EXPR_CAPTURE_LIST (lambda));
687
688 if (LAMBDA_EXPR_CLOSURE (lambda))
689 return build_capture_proxy (member, initializer);
690 /* For explicit captures we haven't started the function yet, so we wait
691 and build the proxy from cp_parser_lambda_body. */
692 LAMBDA_CAPTURE_EXPLICIT_P (LAMBDA_EXPR_CAPTURE_LIST (lambda)) = true;
693 return NULL_TREE;
694 }
695
696 /* Register all the capture members on the list CAPTURES, which is the
697 LAMBDA_EXPR_CAPTURE_LIST for the lambda after the introducer. */
698
699 void
700 register_capture_members (tree captures)
701 {
702 if (captures == NULL_TREE)
703 return;
704
705 register_capture_members (TREE_CHAIN (captures));
706
707 tree field = TREE_PURPOSE (captures);
708 if (PACK_EXPANSION_P (field))
709 field = PACK_EXPANSION_PATTERN (field);
710
711 finish_member_declaration (field);
712 }
713
714 /* Similar to add_capture, except this works on a stack of nested lambdas.
715 BY_REFERENCE_P in this case is derived from the default capture mode.
716 Returns the capture for the lambda at the bottom of the stack. */
717
718 tree
719 add_default_capture (tree lambda_stack, tree id, tree initializer)
720 {
721 bool this_capture_p = (id == this_identifier);
722 tree var = NULL_TREE;
723 tree saved_class_type = current_class_type;
724
725 for (tree node = lambda_stack;
726 node;
727 node = TREE_CHAIN (node))
728 {
729 tree lambda = TREE_VALUE (node);
730
731 current_class_type = LAMBDA_EXPR_CLOSURE (lambda);
732 if (DECL_PACK_P (initializer))
733 initializer = make_pack_expansion (initializer);
734 var = add_capture (lambda,
735 id,
736 initializer,
737 /*by_reference_p=*/
738 (this_capture_p
739 || (LAMBDA_EXPR_DEFAULT_CAPTURE_MODE (lambda)
740 == CPLD_REFERENCE)),
741 /*explicit_init_p=*/false, NULL);
742 initializer = convert_from_reference (var);
743
744 /* Warn about deprecated implicit capture of this via [=]. */
745 if (cxx_dialect >= cxx20
746 && this_capture_p
747 && LAMBDA_EXPR_DEFAULT_CAPTURE_MODE (lambda) == CPLD_COPY)
748 {
749 if (warning_at (LAMBDA_EXPR_LOCATION (lambda), OPT_Wdeprecated,
750 "implicit capture of %qE via %<[=]%> is deprecated "
751 "in C++20", this_identifier))
752 inform (LAMBDA_EXPR_LOCATION (lambda), "add explicit %<this%> or "
753 "%<*this%> capture");
754 }
755 }
756
757 current_class_type = saved_class_type;
758
759 return var;
760 }
761
762 /* Return the capture pertaining to a use of 'this' in LAMBDA, in the
763 form of an INDIRECT_REF, possibly adding it through default
764 capturing, if ADD_CAPTURE_P is nonzero. If ADD_CAPTURE_P is negative,
765 try to capture but don't complain if we can't. */
766
767 tree
768 lambda_expr_this_capture (tree lambda, int add_capture_p)
769 {
770 tree result;
771
772 tree this_capture = LAMBDA_EXPR_THIS_CAPTURE (lambda);
773
774 /* In unevaluated context this isn't an odr-use, so don't capture. */
775 if (cp_unevaluated_operand)
776 add_capture_p = false;
777
778 /* Try to default capture 'this' if we can. */
779 if (!this_capture)
780 {
781 tree lambda_stack = NULL_TREE;
782 tree init = NULL_TREE;
783 bool saw_complete = false;
784
785 /* If we are in a lambda function, we can move out until we hit:
786 1. a non-lambda function or NSDMI,
787 2. a lambda function capturing 'this', or
788 3. a non-default capturing lambda function. */
789 for (tree tlambda = lambda; ;)
790 {
791 if (add_capture_p
792 && LAMBDA_EXPR_DEFAULT_CAPTURE_MODE (tlambda) == CPLD_NONE)
793 /* tlambda won't let us capture 'this'. */
794 break;
795
796 if (add_capture_p)
797 lambda_stack = tree_cons (NULL_TREE,
798 tlambda,
799 lambda_stack);
800
801 tree closure = LAMBDA_EXPR_CLOSURE (tlambda);
802 if (COMPLETE_TYPE_P (closure))
803 /* We're instantiating a generic lambda op(), the containing
804 scope may be gone. */
805 saw_complete = true;
806
807 tree containing_function
808 = decl_function_context (TYPE_NAME (closure));
809
810 tree ex = LAMBDA_EXPR_EXTRA_SCOPE (tlambda);
811 if (ex && TREE_CODE (ex) == FIELD_DECL)
812 {
813 /* Lambda in an NSDMI. We don't have a function to look up
814 'this' in, but we can find (or rebuild) the fake one from
815 inject_this_parameter. */
816 if (!containing_function && !saw_complete)
817 /* If we're parsing a lambda in a non-local class,
818 we can find the fake 'this' in scope_chain. */
819 init = scope_chain->x_current_class_ptr;
820 else
821 /* Otherwise it's either gone or buried in
822 function_context_stack, so make another. */
823 init = build_this_parm (NULL_TREE, DECL_CONTEXT (ex),
824 TYPE_UNQUALIFIED);
825 gcc_checking_assert
826 (init && (TREE_TYPE (TREE_TYPE (init))
827 == current_nonlambda_class_type ()));
828 break;
829 }
830
831 if (containing_function == NULL_TREE)
832 /* We ran out of scopes; there's no 'this' to capture. */
833 break;
834
835 if (!LAMBDA_FUNCTION_P (containing_function))
836 {
837 /* We found a non-lambda function. */
838 if (DECL_NONSTATIC_MEMBER_FUNCTION_P (containing_function))
839 /* First parameter is 'this'. */
840 init = DECL_ARGUMENTS (containing_function);
841 break;
842 }
843
844 tlambda
845 = CLASSTYPE_LAMBDA_EXPR (DECL_CONTEXT (containing_function));
846
847 if (LAMBDA_EXPR_THIS_CAPTURE (tlambda))
848 {
849 /* An outer lambda has already captured 'this'. */
850 init = LAMBDA_EXPR_THIS_CAPTURE (tlambda);
851 break;
852 }
853 }
854
855 if (init)
856 {
857 if (add_capture_p)
858 this_capture = add_default_capture (lambda_stack,
859 /*id=*/this_identifier,
860 init);
861 else
862 this_capture = init;
863 }
864 }
865
866 if (cp_unevaluated_operand)
867 result = this_capture;
868 else if (!this_capture)
869 {
870 if (add_capture_p == 1)
871 {
872 error ("%<this%> was not captured for this lambda function");
873 result = error_mark_node;
874 }
875 else
876 result = NULL_TREE;
877 }
878 else
879 {
880 /* To make sure that current_class_ref is for the lambda. */
881 gcc_assert (TYPE_MAIN_VARIANT (TREE_TYPE (current_class_ref))
882 == LAMBDA_EXPR_CLOSURE (lambda));
883
884 result = this_capture;
885
886 /* If 'this' is captured, each use of 'this' is transformed into an
887 access to the corresponding unnamed data member of the closure
888 type cast (_expr.cast_ 5.4) to the type of 'this'. [ The cast
889 ensures that the transformed expression is an rvalue. ] */
890 result = rvalue (result);
891 }
892
893 return result;
894 }
895
896 /* Return the innermost LAMBDA_EXPR we're currently in, if any. */
897
898 tree
899 current_lambda_expr (void)
900 {
901 tree type = current_class_type;
902 while (type && !LAMBDA_TYPE_P (type))
903 type = decl_type_context (TYPE_NAME (type));
904 if (type)
905 return CLASSTYPE_LAMBDA_EXPR (type);
906 else
907 return NULL_TREE;
908 }
909
910 /* Return the current LAMBDA_EXPR, if this is a resolvable dummy
911 object. NULL otherwise.. */
912
913 static tree
914 resolvable_dummy_lambda (tree object)
915 {
916 if (!is_dummy_object (object))
917 return NULL_TREE;
918
919 tree type = TYPE_MAIN_VARIANT (TREE_TYPE (object));
920 gcc_assert (!TYPE_PTR_P (type));
921
922 if (type != current_class_type
923 && current_class_type
924 && LAMBDA_TYPE_P (current_class_type)
925 && lambda_function (current_class_type)
926 && DERIVED_FROM_P (type, nonlambda_method_basetype()))
927 return CLASSTYPE_LAMBDA_EXPR (current_class_type);
928
929 return NULL_TREE;
930 }
931
932 /* We don't want to capture 'this' until we know we need it, i.e. after
933 overload resolution has chosen a non-static member function. At that
934 point we call this function to turn a dummy object into a use of the
935 'this' capture. */
936
937 tree
938 maybe_resolve_dummy (tree object, bool add_capture_p)
939 {
940 if (tree lam = resolvable_dummy_lambda (object))
941 if (tree cap = lambda_expr_this_capture (lam, add_capture_p))
942 if (cap != error_mark_node)
943 object = build_fold_indirect_ref (cap);
944
945 return object;
946 }
947
948 /* When parsing a generic lambda containing an argument-dependent
949 member function call we defer overload resolution to instantiation
950 time. But we have to know now whether to capture this or not.
951 Do that if FNS contains any non-static fns.
952 The std doesn't anticipate this case, but I expect this to be the
953 outcome of discussion. */
954
955 void
956 maybe_generic_this_capture (tree object, tree fns)
957 {
958 if (tree lam = resolvable_dummy_lambda (object))
959 if (!LAMBDA_EXPR_THIS_CAPTURE (lam))
960 {
961 /* We've not yet captured, so look at the function set of
962 interest. */
963 if (BASELINK_P (fns))
964 fns = BASELINK_FUNCTIONS (fns);
965 bool id_expr = TREE_CODE (fns) == TEMPLATE_ID_EXPR;
966 if (id_expr)
967 fns = TREE_OPERAND (fns, 0);
968
969 for (lkp_iterator iter (fns); iter; ++iter)
970 if (((!id_expr && TREE_CODE (*iter) != USING_DECL)
971 || TREE_CODE (*iter) == TEMPLATE_DECL)
972 && DECL_NONSTATIC_MEMBER_FUNCTION_P (*iter))
973 {
974 /* Found a non-static member. Capture this. */
975 lambda_expr_this_capture (lam, /*maybe*/-1);
976 break;
977 }
978 }
979 }
980
981 /* Returns the innermost non-lambda function. */
982
983 tree
984 current_nonlambda_function (void)
985 {
986 tree fn = current_function_decl;
987 while (fn && LAMBDA_FUNCTION_P (fn))
988 fn = decl_function_context (fn);
989 return fn;
990 }
991
992 /* Returns the method basetype of the innermost non-lambda function, including
993 a hypothetical constructor if inside an NSDMI, or NULL_TREE if none. */
994
995 tree
996 nonlambda_method_basetype (void)
997 {
998 if (!current_class_ref)
999 return NULL_TREE;
1000
1001 tree type = current_class_type;
1002 if (!type || !LAMBDA_TYPE_P (type))
1003 return type;
1004
1005 while (true)
1006 {
1007 tree lam = CLASSTYPE_LAMBDA_EXPR (type);
1008 tree ex = LAMBDA_EXPR_EXTRA_SCOPE (lam);
1009 if (ex && TREE_CODE (ex) == FIELD_DECL)
1010 /* Lambda in an NSDMI. */
1011 return DECL_CONTEXT (ex);
1012
1013 tree fn = TYPE_CONTEXT (type);
1014 if (!fn || TREE_CODE (fn) != FUNCTION_DECL
1015 || !DECL_NONSTATIC_MEMBER_FUNCTION_P (fn))
1016 /* No enclosing non-lambda method. */
1017 return NULL_TREE;
1018 if (!LAMBDA_FUNCTION_P (fn))
1019 /* Found an enclosing non-lambda method. */
1020 return TYPE_METHOD_BASETYPE (TREE_TYPE (fn));
1021 type = DECL_CONTEXT (fn);
1022 }
1023 }
1024
1025 /* Like current_scope, but looking through lambdas. */
1026
1027 tree
1028 current_nonlambda_scope (void)
1029 {
1030 tree scope = current_scope ();
1031 for (;;)
1032 {
1033 if (TREE_CODE (scope) == FUNCTION_DECL
1034 && LAMBDA_FUNCTION_P (scope))
1035 {
1036 scope = CP_TYPE_CONTEXT (DECL_CONTEXT (scope));
1037 continue;
1038 }
1039 else if (LAMBDA_TYPE_P (scope))
1040 {
1041 scope = CP_TYPE_CONTEXT (scope);
1042 continue;
1043 }
1044 break;
1045 }
1046 return scope;
1047 }
1048
1049 /* Helper function for maybe_add_lambda_conv_op; build a CALL_EXPR with
1050 indicated FN and NARGS, but do not initialize the return type or any of the
1051 argument slots. */
1052
1053 static tree
1054 prepare_op_call (tree fn, int nargs)
1055 {
1056 tree t;
1057
1058 t = build_vl_exp (CALL_EXPR, nargs + 3);
1059 CALL_EXPR_FN (t) = fn;
1060 CALL_EXPR_STATIC_CHAIN (t) = NULL;
1061
1062 return t;
1063 }
1064
1065 /* Return true iff CALLOP is the op() for a generic lambda. */
1066
1067 bool
1068 generic_lambda_fn_p (tree callop)
1069 {
1070 return (LAMBDA_FUNCTION_P (callop)
1071 && DECL_TEMPLATE_INFO (callop)
1072 && PRIMARY_TEMPLATE_P (DECL_TI_TEMPLATE (callop)));
1073 }
1074
1075 /* If the closure TYPE has a static op(), also add a conversion to function
1076 pointer. */
1077
1078 void
1079 maybe_add_lambda_conv_op (tree type)
1080 {
1081 bool nested = (cfun != NULL);
1082 bool nested_def = decl_function_context (TYPE_MAIN_DECL (type));
1083 tree callop = lambda_function (type);
1084 tree lam = CLASSTYPE_LAMBDA_EXPR (type);
1085
1086 if (LAMBDA_EXPR_CAPTURE_LIST (lam) != NULL_TREE
1087 || LAMBDA_EXPR_DEFAULT_CAPTURE_MODE (lam) != CPLD_NONE)
1088 return;
1089
1090 if (processing_template_decl)
1091 return;
1092
1093 bool const generic_lambda_p = generic_lambda_fn_p (callop);
1094
1095 if (!generic_lambda_p && undeduced_auto_decl (callop))
1096 {
1097 /* If the op() wasn't deduced due to errors, give up. */
1098 gcc_assert (errorcount || sorrycount);
1099 return;
1100 }
1101
1102 /* Non-generic non-capturing lambdas only have a conversion function to
1103 pointer to function when the trailing requires-clause's constraints are
1104 satisfied. */
1105 if (!generic_lambda_p && !constraints_satisfied_p (callop))
1106 return;
1107
1108 /* Non-template conversion operators are defined directly with build_call_a
1109 and using DIRECT_ARGVEC for arguments (including 'this'). Templates are
1110 deferred and the CALL is built in-place. In the case of a deduced return
1111 call op, the decltype expression, DECLTYPE_CALL, used as a substitute for
1112 the return type is also built in-place. The arguments of DECLTYPE_CALL in
1113 the return expression may differ in flags from those in the body CALL. In
1114 particular, parameter pack expansions are marked PACK_EXPANSION_LOCAL_P in
1115 the body CALL, but not in DECLTYPE_CALL. */
1116
1117 vec<tree, va_gc> *direct_argvec = 0;
1118 tree decltype_call = 0, call = 0;
1119 tree optype = TREE_TYPE (callop);
1120 tree fn_result = TREE_TYPE (optype);
1121
1122 tree thisarg = NULL_TREE;
1123 if (TREE_CODE (optype) == METHOD_TYPE)
1124 thisarg = build_int_cst (TREE_TYPE (DECL_ARGUMENTS (callop)), 0);
1125 if (generic_lambda_p)
1126 {
1127 ++processing_template_decl;
1128
1129 /* Prepare the dependent member call for the static member function
1130 '_FUN' and, potentially, prepare another call to be used in a decltype
1131 return expression for a deduced return call op to allow for simple
1132 implementation of the conversion operator. */
1133
1134 tree objfn;
1135 int nargs = list_length (DECL_ARGUMENTS (callop));
1136 if (thisarg)
1137 {
1138 tree instance = cp_build_fold_indirect_ref (thisarg);
1139 objfn = lookup_template_function (DECL_NAME (callop),
1140 DECL_TI_ARGS (callop));
1141 objfn = build_min (COMPONENT_REF, NULL_TREE,
1142 instance, objfn, NULL_TREE);
1143 --nargs;
1144 call = prepare_op_call (objfn, nargs);
1145 }
1146 else
1147 objfn = callop;
1148
1149 if (type_uses_auto (fn_result))
1150 decltype_call = prepare_op_call (objfn, nargs);
1151 }
1152 else if (thisarg)
1153 {
1154 direct_argvec = make_tree_vector ();
1155 direct_argvec->quick_push (thisarg);
1156 }
1157
1158 /* Copy CALLOP's argument list (as per 'copy_list') as FN_ARGS in order to
1159 declare the static member function "_FUN" below. For each arg append to
1160 DIRECT_ARGVEC (for the non-template case) or populate the pre-allocated
1161 call args (for the template case). If a parameter pack is found, expand
1162 it, flagging it as PACK_EXPANSION_LOCAL_P for the body call. */
1163
1164 tree fn_args = NULL_TREE;
1165 {
1166 int ix = 0;
1167 tree src = FUNCTION_FIRST_USER_PARM (callop);
1168 tree tgt = NULL;
1169
1170 if (!thisarg && !decltype_call)
1171 src = NULL_TREE;
1172 while (src)
1173 {
1174 tree new_node = copy_node (src);
1175 /* We set DECL_CONTEXT of NEW_NODE to the statfn below.
1176 Notice this is creating a recursive type! */
1177
1178 /* Clear TREE_ADDRESSABLE on thunk arguments. */
1179 TREE_ADDRESSABLE (new_node) = 0;
1180
1181 if (!fn_args)
1182 fn_args = tgt = new_node;
1183 else
1184 {
1185 TREE_CHAIN (tgt) = new_node;
1186 tgt = new_node;
1187 }
1188
1189 mark_exp_read (tgt);
1190
1191 if (generic_lambda_p)
1192 {
1193 tree a = tgt;
1194 if (thisarg)
1195 {
1196 if (DECL_PACK_P (tgt))
1197 {
1198 a = make_pack_expansion (a);
1199 PACK_EXPANSION_LOCAL_P (a) = true;
1200 }
1201 CALL_EXPR_ARG (call, ix) = a;
1202 }
1203
1204 if (decltype_call)
1205 {
1206 /* Avoid capturing variables in this context. */
1207 ++cp_unevaluated_operand;
1208 CALL_EXPR_ARG (decltype_call, ix) = forward_parm (tgt);
1209 --cp_unevaluated_operand;
1210 }
1211
1212 ++ix;
1213 }
1214 else
1215 vec_safe_push (direct_argvec, tgt);
1216
1217 src = TREE_CHAIN (src);
1218 }
1219 }
1220
1221 if (generic_lambda_p)
1222 {
1223 if (decltype_call)
1224 {
1225 fn_result = finish_decltype_type
1226 (decltype_call, /*id_expression_or_member_access_p=*/false,
1227 tf_warning_or_error);
1228 }
1229 }
1230 else if (thisarg)
1231 {
1232 /* Don't warn on deprecated or unavailable lambda declarations, unless
1233 the lambda is actually called. */
1234 auto du = make_temp_override (deprecated_state,
1235 UNAVAILABLE_DEPRECATED_SUPPRESS);
1236 call = build_call_a (callop, direct_argvec->length (),
1237 direct_argvec->address ());
1238 }
1239
1240 if (thisarg)
1241 {
1242 CALL_FROM_THUNK_P (call) = 1;
1243 SET_EXPR_LOCATION (call, UNKNOWN_LOCATION);
1244 }
1245
1246 tree stattype
1247 = build_function_type (fn_result, FUNCTION_FIRST_USER_PARMTYPE (callop));
1248 stattype = (cp_build_type_attribute_variant
1249 (stattype, TYPE_ATTRIBUTES (optype)));
1250 if (flag_noexcept_type
1251 && TYPE_NOTHROW_P (TREE_TYPE (callop)))
1252 stattype = build_exception_variant (stattype, noexcept_true_spec);
1253
1254 if (generic_lambda_p)
1255 --processing_template_decl;
1256
1257 /* First build up the conversion op. */
1258
1259 tree rettype = build_pointer_type (stattype);
1260 tree name = make_conv_op_name (rettype);
1261 tree thistype = cp_build_qualified_type (type, TYPE_QUAL_CONST);
1262 tree fntype = build_method_type_directly (thistype, rettype, void_list_node);
1263 /* DR 1722: The conversion function should be noexcept. */
1264 fntype = build_exception_variant (fntype, noexcept_true_spec);
1265 tree convfn = build_lang_decl (FUNCTION_DECL, name, fntype);
1266 SET_DECL_LANGUAGE (convfn, lang_cplusplus);
1267 tree fn = convfn;
1268 DECL_SOURCE_LOCATION (fn) = DECL_SOURCE_LOCATION (callop);
1269 SET_DECL_ALIGN (fn, MINIMUM_METHOD_BOUNDARY);
1270 grokclassfn (type, fn, NO_SPECIAL);
1271 set_linkage_according_to_type (type, fn);
1272 rest_of_decl_compilation (fn, namespace_bindings_p (), at_eof);
1273 DECL_IN_AGGR_P (fn) = 1;
1274 DECL_ARTIFICIAL (fn) = 1;
1275 DECL_NOT_REALLY_EXTERN (fn) = 1;
1276 DECL_DECLARED_INLINE_P (fn) = 1;
1277 DECL_DECLARED_CONSTEXPR_P (fn) = DECL_DECLARED_CONSTEXPR_P (callop);
1278 if (DECL_IMMEDIATE_FUNCTION_P (callop))
1279 SET_DECL_IMMEDIATE_FUNCTION_P (fn);
1280 DECL_ARGUMENTS (fn) = build_this_parm (fn, fntype, TYPE_QUAL_CONST);
1281
1282 if (nested_def)
1283 DECL_INTERFACE_KNOWN (fn) = 1;
1284
1285 if (generic_lambda_p)
1286 fn = add_inherited_template_parms (fn, DECL_TI_TEMPLATE (callop));
1287
1288 add_method (type, fn, false);
1289
1290 if (thisarg == NULL_TREE)
1291 {
1292 /* For static lambda, just return operator(). */
1293 if (nested)
1294 push_function_context ();
1295 else
1296 /* Still increment function_depth so that we don't GC in the
1297 middle of an expression. */
1298 ++function_depth;
1299
1300 /* Generate the body of the conversion op. */
1301
1302 start_preparsed_function (convfn, NULL_TREE,
1303 SF_PRE_PARSED | SF_INCLASS_INLINE);
1304 tree body = begin_function_body ();
1305 tree compound_stmt = begin_compound_stmt (0);
1306
1307 /* decl_needed_p needs to see that it's used. */
1308 TREE_USED (callop) = 1;
1309 finish_return_stmt (decay_conversion (callop, tf_warning_or_error));
1310
1311 finish_compound_stmt (compound_stmt);
1312 finish_function_body (body);
1313
1314 fn = finish_function (/*inline_p=*/true);
1315 if (!generic_lambda_p)
1316 expand_or_defer_fn (fn);
1317
1318 if (nested)
1319 pop_function_context ();
1320 else
1321 --function_depth;
1322 return;
1323 }
1324
1325 /* Generic thunk code fails for varargs; we'll complain in mark_used if
1326 the conversion op is used. */
1327 if (varargs_function_p (callop))
1328 {
1329 DECL_DELETED_FN (fn) = 1;
1330 return;
1331 }
1332
1333 /* Now build up the thunk to be returned. */
1334
1335 tree statfn = build_lang_decl (FUNCTION_DECL, fun_identifier, stattype);
1336 SET_DECL_LANGUAGE (statfn, lang_cplusplus);
1337 fn = statfn;
1338 DECL_SOURCE_LOCATION (fn) = DECL_SOURCE_LOCATION (callop);
1339 grokclassfn (type, fn, NO_SPECIAL);
1340 set_linkage_according_to_type (type, fn);
1341 rest_of_decl_compilation (fn, namespace_bindings_p (), at_eof);
1342 DECL_IN_AGGR_P (fn) = 1;
1343 DECL_ARTIFICIAL (fn) = 1;
1344 DECL_NOT_REALLY_EXTERN (fn) = 1;
1345 DECL_DECLARED_INLINE_P (fn) = 1;
1346 DECL_STATIC_FUNCTION_P (fn) = 1;
1347 DECL_DECLARED_CONSTEXPR_P (fn) = DECL_DECLARED_CONSTEXPR_P (callop);
1348 if (DECL_IMMEDIATE_FUNCTION_P (callop))
1349 SET_DECL_IMMEDIATE_FUNCTION_P (fn);
1350 DECL_ARGUMENTS (fn) = fn_args;
1351 for (tree arg = fn_args; arg; arg = DECL_CHAIN (arg))
1352 {
1353 /* Avoid duplicate -Wshadow warnings. */
1354 DECL_NAME (arg) = NULL_TREE;
1355 DECL_CONTEXT (arg) = fn;
1356 }
1357 if (nested_def)
1358 DECL_INTERFACE_KNOWN (fn) = 1;
1359
1360 if (generic_lambda_p)
1361 fn = add_inherited_template_parms (fn, DECL_TI_TEMPLATE (callop));
1362
1363 if (flag_sanitize & SANITIZE_NULL)
1364 /* Don't UBsan this function; we're deliberately calling op() with a null
1365 object argument. */
1366 add_no_sanitize_value (fn, SANITIZE_UNDEFINED);
1367
1368 add_method (type, fn, false);
1369
1370 if (nested)
1371 push_function_context ();
1372 else
1373 /* Still increment function_depth so that we don't GC in the
1374 middle of an expression. */
1375 ++function_depth;
1376
1377 /* Generate the body of the thunk. */
1378
1379 start_preparsed_function (statfn, NULL_TREE,
1380 SF_PRE_PARSED | SF_INCLASS_INLINE);
1381 tree body = begin_function_body ();
1382 tree compound_stmt = begin_compound_stmt (0);
1383 if (!generic_lambda_p)
1384 {
1385 set_flags_from_callee (call);
1386 if (MAYBE_CLASS_TYPE_P (TREE_TYPE (call)))
1387 call = build_cplus_new (TREE_TYPE (call), call, tf_warning_or_error);
1388 }
1389 call = convert_from_reference (call);
1390 finish_return_stmt (call);
1391
1392 finish_compound_stmt (compound_stmt);
1393 finish_function_body (body);
1394
1395 fn = finish_function (/*inline_p=*/true);
1396 if (!generic_lambda_p)
1397 expand_or_defer_fn (fn);
1398
1399 /* Generate the body of the conversion op. */
1400
1401 start_preparsed_function (convfn, NULL_TREE,
1402 SF_PRE_PARSED | SF_INCLASS_INLINE);
1403 body = begin_function_body ();
1404 compound_stmt = begin_compound_stmt (0);
1405
1406 /* decl_needed_p needs to see that it's used. */
1407 TREE_USED (statfn) = 1;
1408 finish_return_stmt (decay_conversion (statfn, tf_warning_or_error));
1409
1410 finish_compound_stmt (compound_stmt);
1411 finish_function_body (body);
1412
1413 fn = finish_function (/*inline_p=*/true);
1414 if (!generic_lambda_p)
1415 expand_or_defer_fn (fn);
1416
1417 if (nested)
1418 pop_function_context ();
1419 else
1420 --function_depth;
1421 }
1422
1423 /* True if FN is the static function "_FUN" that gets returned from the lambda
1424 conversion operator. */
1425
1426 bool
1427 lambda_static_thunk_p (tree fn)
1428 {
1429 return (fn && TREE_CODE (fn) == FUNCTION_DECL
1430 && DECL_ARTIFICIAL (fn)
1431 && DECL_STATIC_FUNCTION_P (fn)
1432 && LAMBDA_TYPE_P (CP_DECL_CONTEXT (fn)));
1433 }
1434
1435 bool
1436 call_from_lambda_thunk_p (tree call)
1437 {
1438 return (CALL_FROM_THUNK_P (call)
1439 && lambda_static_thunk_p (current_function_decl));
1440 }
1441
1442 /* Returns true iff VAL is a lambda-related declaration which should
1443 be ignored by unqualified lookup. */
1444
1445 bool
1446 is_lambda_ignored_entity (tree val)
1447 {
1448 /* Look past normal, non-VLA capture proxies. */
1449 if (is_normal_capture_proxy (val)
1450 && !variably_modified_type_p (TREE_TYPE (val), NULL_TREE))
1451 return true;
1452
1453 /* Always ignore lambda fields, their names are only for debugging. */
1454 if (TREE_CODE (val) == FIELD_DECL
1455 && CLASSTYPE_LAMBDA_EXPR (DECL_CONTEXT (val)))
1456 return true;
1457
1458 /* None of the lookups that use qualify_lookup want the op() from the
1459 lambda; they want the one from the enclosing class. */
1460 if (tree fns = maybe_get_fns (val))
1461 if (LAMBDA_FUNCTION_P (OVL_FIRST (fns)))
1462 return true;
1463
1464 return false;
1465 }
1466
1467 /* Lambdas that appear in variable initializer or default argument
1468 scope get that in their mangling, so we need to record it. Also,
1469 multiple lambdas in the same scope may need a mangling
1470 discriminator. In ABI <= 17, there is a single per-scope sequence
1471 number. In ABI >= 18, there are per-scope per-signature sequence
1472 numbers. */
1473 struct GTY(()) lambda_sig_count
1474 {
1475 tree fn; // The lambda fn whose sig this is.
1476 unsigned count;
1477 };
1478 struct GTY(()) lambda_discriminator
1479 {
1480 tree scope;
1481 unsigned nesting; // Inside a function, VAR_DECLs get the function
1482 // as scope. This counts that nesting.
1483 unsigned count; // The per-scope counter.
1484 vec<lambda_sig_count, va_gc> *discriminators; // Per-signature counters
1485 };
1486 // The current scope.
1487 static GTY(()) lambda_discriminator lambda_scope;
1488 // Stack of previous scopes.
1489 static GTY(()) vec<lambda_discriminator, va_gc> *lambda_scope_stack;
1490
1491 // Push DECL as lambda extra scope, also new discriminator counters.
1492
1493 void
1494 start_lambda_scope (tree decl)
1495 {
1496 gcc_checking_assert (decl);
1497 if (current_function_decl && VAR_P (decl))
1498 // If we're inside a function, we ignore variable scope. Don't push.
1499 lambda_scope.nesting++;
1500 else
1501 {
1502 vec_safe_push (lambda_scope_stack, lambda_scope);
1503 lambda_scope.scope = decl;
1504 lambda_scope.nesting = 0;
1505 lambda_scope.count = 0;
1506 lambda_scope.discriminators = nullptr;
1507 }
1508 }
1509
1510 // Pop from the current lambda extra scope.
1511
1512 void
1513 finish_lambda_scope (void)
1514 {
1515 if (!lambda_scope.nesting--)
1516 {
1517 lambda_scope = lambda_scope_stack->last ();
1518 lambda_scope_stack->pop ();
1519 }
1520 }
1521
1522 // Record the current lambda scope into LAMBDA
1523
1524 void
1525 record_lambda_scope (tree lambda)
1526 {
1527 LAMBDA_EXPR_EXTRA_SCOPE (lambda) = lambda_scope.scope;
1528 if (lambda_scope.scope)
1529 {
1530 tree closure = LAMBDA_EXPR_CLOSURE (lambda);
1531 gcc_checking_assert (closure);
1532 maybe_key_decl (lambda_scope.scope, TYPE_NAME (closure));
1533 }
1534 }
1535
1536 // Compare lambda template heads TMPL_A and TMPL_B, used for both
1537 // templated lambdas, and template template parameters of said lambda.
1538
1539 static bool
1540 compare_lambda_template_head (tree tmpl_a, tree tmpl_b)
1541 {
1542 // We only need one level of template parms
1543 tree inner_a = INNERMOST_TEMPLATE_PARMS (DECL_TEMPLATE_PARMS (tmpl_a));
1544 tree inner_b = INNERMOST_TEMPLATE_PARMS (DECL_TEMPLATE_PARMS (tmpl_b));
1545
1546 // We only compare explicit template parms, ignoring trailing
1547 // synthetic ones.
1548 int len_a = TREE_VEC_LENGTH (inner_a);
1549 int len_b = TREE_VEC_LENGTH (inner_b);
1550
1551 for (int ix = 0, len = MAX (len_a, len_b); ix != len; ix++)
1552 {
1553 tree parm_a = NULL_TREE;
1554 if (ix < len_a)
1555 {
1556 parm_a = TREE_VEC_ELT (inner_a, ix);
1557 if (parm_a == error_mark_node)
1558 return false;
1559 parm_a = TREE_VALUE (parm_a);
1560 if (parm_a == error_mark_node)
1561 return false;
1562 if (DECL_VIRTUAL_P (parm_a))
1563 parm_a = NULL_TREE;
1564 }
1565
1566 tree parm_b = NULL_TREE;
1567 if (ix < len_b)
1568 {
1569 parm_b = TREE_VEC_ELT (inner_b, ix);
1570 if (parm_b == error_mark_node)
1571 return false;
1572 parm_b = TREE_VALUE (parm_b);
1573 if (parm_b == error_mark_node)
1574 return false;
1575 if (DECL_VIRTUAL_P (parm_b))
1576 parm_b = NULL_TREE;
1577 }
1578
1579 if (!parm_a && !parm_b)
1580 // we're done
1581 break;
1582
1583 if (!(parm_a && parm_b))
1584 return false;
1585
1586 if (TREE_CODE (parm_a) != TREE_CODE (parm_b))
1587 return false;
1588
1589 if (TREE_CODE (parm_a) == PARM_DECL)
1590 {
1591 if (TEMPLATE_PARM_PARAMETER_PACK (DECL_INITIAL (parm_a))
1592 != TEMPLATE_PARM_PARAMETER_PACK (DECL_INITIAL (parm_b)))
1593 return false;
1594
1595 if (!same_type_p (TREE_TYPE (parm_a), TREE_TYPE (parm_b)))
1596 return false;
1597 }
1598 else
1599 {
1600 if (TEMPLATE_TYPE_PARAMETER_PACK (TREE_TYPE (parm_a))
1601 != TEMPLATE_TYPE_PARAMETER_PACK (TREE_TYPE (parm_b)))
1602 return false;
1603
1604 if (TREE_CODE (parm_a) != TEMPLATE_DECL)
1605 gcc_checking_assert (TREE_CODE (parm_a) == TYPE_DECL);
1606 else if (!compare_lambda_template_head (parm_a, parm_b))
1607 return false;
1608 }
1609 }
1610
1611 return true;
1612 }
1613
1614 // Compare lambda signatures FN_A and FN_B, they may be TEMPLATE_DECLs too.
1615
1616 static bool
1617 compare_lambda_sig (tree fn_a, tree fn_b)
1618 {
1619 if (TREE_CODE (fn_a) == TEMPLATE_DECL
1620 && TREE_CODE (fn_b) == TEMPLATE_DECL)
1621 {
1622 if (!compare_lambda_template_head (fn_a, fn_b))
1623 return false;
1624 fn_a = DECL_TEMPLATE_RESULT (fn_a);
1625 fn_b = DECL_TEMPLATE_RESULT (fn_b);
1626 }
1627 else if (TREE_CODE (fn_a) == TEMPLATE_DECL
1628 || TREE_CODE (fn_b) == TEMPLATE_DECL)
1629 return false;
1630
1631 if (fn_a == error_mark_node
1632 || fn_b == error_mark_node)
1633 return false;
1634
1635 for (tree args_a = TREE_CHAIN (TYPE_ARG_TYPES (TREE_TYPE (fn_a))),
1636 args_b = TREE_CHAIN (TYPE_ARG_TYPES (TREE_TYPE (fn_b)));
1637 args_a || args_b;
1638 args_a = TREE_CHAIN (args_a), args_b = TREE_CHAIN (args_b))
1639 {
1640 if (!args_a || !args_b)
1641 return false;
1642 // This check also deals with differing variadicness
1643 if (!same_type_p (TREE_VALUE (args_a), TREE_VALUE (args_b)))
1644 return false;
1645 }
1646
1647 return true;
1648 }
1649
1650 // Record the per-scope discriminator of LAMBDA. If the extra scope
1651 // is empty, we must use the empty scope counter, which might not be
1652 // the live one.
1653
1654 void
1655 record_lambda_scope_discriminator (tree lambda)
1656 {
1657 auto *slot = (vec_safe_is_empty (lambda_scope_stack)
1658 || LAMBDA_EXPR_EXTRA_SCOPE (lambda)
1659 ? &lambda_scope : lambda_scope_stack->begin ());
1660 LAMBDA_EXPR_SCOPE_ONLY_DISCRIMINATOR (lambda) = slot->count++;
1661 }
1662
1663 // Record the per-scope per-signature discriminator of LAMBDA. If the
1664 // extra scope is empty, we must use the empty scope counter, which
1665 // might not be the live one.
1666
1667 void
1668 record_lambda_scope_sig_discriminator (tree lambda, tree fn)
1669 {
1670 auto *slot = (vec_safe_is_empty (lambda_scope_stack)
1671 || LAMBDA_EXPR_EXTRA_SCOPE (lambda)
1672 ? &lambda_scope : lambda_scope_stack->begin ());
1673 gcc_checking_assert (LAMBDA_EXPR_EXTRA_SCOPE (lambda) == slot->scope);
1674
1675 // A linear search, we're not expecting this to be a big list, and
1676 // this avoids needing a signature hash function.
1677 lambda_sig_count *sig;
1678 if (unsigned ix = vec_safe_length (slot->discriminators))
1679 for (sig = slot->discriminators->begin (); ix--; sig++)
1680 if (compare_lambda_sig (fn, sig->fn))
1681 goto found;
1682 {
1683 lambda_sig_count init = {fn, 0};
1684 sig = vec_safe_push (slot->discriminators, init);
1685 }
1686 found:
1687 LAMBDA_EXPR_SCOPE_SIG_DISCRIMINATOR (lambda) = sig->count++;
1688 }
1689
1690 tree
1691 start_lambda_function (tree fco, tree lambda_expr)
1692 {
1693 /* Let the front end know that we are going to be defining this
1694 function. */
1695 start_preparsed_function (fco,
1696 NULL_TREE,
1697 SF_PRE_PARSED | SF_INCLASS_INLINE);
1698
1699 tree body = begin_function_body ();
1700
1701 /* Push the proxies for any explicit captures. */
1702 for (tree cap = LAMBDA_EXPR_CAPTURE_LIST (lambda_expr); cap;
1703 cap = TREE_CHAIN (cap))
1704 build_capture_proxy (TREE_PURPOSE (cap), TREE_VALUE (cap));
1705
1706 return body;
1707 }
1708
1709 /* Subroutine of prune_lambda_captures: CAP is a node in
1710 LAMBDA_EXPR_CAPTURE_LIST. Return the variable it captures for which we
1711 might optimize away the capture, or NULL_TREE if there is no such
1712 variable. */
1713
1714 static tree
1715 var_to_maybe_prune (tree cap)
1716 {
1717 if (LAMBDA_CAPTURE_EXPLICIT_P (cap))
1718 /* Don't prune explicit captures. */
1719 return NULL_TREE;
1720
1721 tree mem = TREE_PURPOSE (cap);
1722 if (!DECL_P (mem) || !DECL_NORMAL_CAPTURE_P (mem))
1723 /* Packs and init-captures aren't captures of constant vars. */
1724 return NULL_TREE;
1725
1726 tree init = TREE_VALUE (cap);
1727 if (is_normal_capture_proxy (init))
1728 init = DECL_CAPTURED_VARIABLE (init);
1729 if (decl_constant_var_p (init))
1730 return init;
1731
1732 return NULL_TREE;
1733 }
1734
1735 /* walk_tree helper for prune_lambda_captures: Remember which capture proxies
1736 for constant variables are actually used in the lambda body.
1737
1738 There will always be a DECL_EXPR for the capture proxy; remember it when we
1739 see it, but replace it with any other use. */
1740
1741 static tree
1742 mark_const_cap_r (tree *t, int *walk_subtrees, void *data)
1743 {
1744 hash_map<tree,tree*> &const_vars = *(hash_map<tree,tree*>*)data;
1745
1746 tree var = NULL_TREE;
1747 if (TREE_CODE (*t) == DECL_EXPR)
1748 {
1749 tree decl = DECL_EXPR_DECL (*t);
1750 if (is_constant_capture_proxy (decl))
1751 {
1752 var = DECL_CAPTURED_VARIABLE (decl);
1753 *walk_subtrees = 0;
1754 }
1755 }
1756 else if (!location_wrapper_p (*t) /* is_capture_proxy dislikes them. */
1757 && is_constant_capture_proxy (*t))
1758 var = DECL_CAPTURED_VARIABLE (*t);
1759
1760 if (var)
1761 {
1762 tree *&slot = const_vars.get_or_insert (var);
1763 if (!slot || VAR_P (*t))
1764 slot = t;
1765 }
1766
1767 return NULL_TREE;
1768 }
1769
1770 /* We're at the end of processing a lambda; go back and remove any captures of
1771 constant variables for which we've folded away all uses. */
1772
1773 static void
1774 prune_lambda_captures (tree body)
1775 {
1776 tree lam = current_lambda_expr ();
1777 if (!LAMBDA_EXPR_CAPTURE_OPTIMIZED (lam))
1778 /* No uses were optimized away. */
1779 return;
1780 if (LAMBDA_EXPR_DEFAULT_CAPTURE_MODE (lam) == CPLD_NONE)
1781 /* No default captures, and we don't prune explicit captures. */
1782 return;
1783 /* Don't bother pruning in a template, we'll prune at instantiation time. */
1784 if (dependent_type_p (TREE_TYPE (lam)))
1785 return;
1786
1787 hash_map<tree,tree*> const_vars;
1788
1789 cp_walk_tree_without_duplicates (&body, mark_const_cap_r, &const_vars);
1790
1791 tree *fieldp = &TYPE_FIELDS (LAMBDA_EXPR_CLOSURE (lam));
1792 for (tree *capp = &LAMBDA_EXPR_CAPTURE_LIST (lam); *capp; )
1793 {
1794 tree cap = *capp;
1795 if (tree var = var_to_maybe_prune (cap))
1796 {
1797 tree **use = const_vars.get (var);
1798 if (use && TREE_CODE (**use) == DECL_EXPR)
1799 {
1800 /* All uses of this capture were folded away, leaving only the
1801 proxy declaration. */
1802
1803 /* Splice the capture out of LAMBDA_EXPR_CAPTURE_LIST. */
1804 *capp = TREE_CHAIN (cap);
1805
1806 /* And out of TYPE_FIELDS. */
1807 tree field = TREE_PURPOSE (cap);
1808 while (*fieldp != field)
1809 fieldp = &DECL_CHAIN (*fieldp);
1810 *fieldp = DECL_CHAIN (*fieldp);
1811
1812 /* And remove the capture proxy declaration. */
1813 **use = void_node;
1814 continue;
1815 }
1816 }
1817
1818 capp = &TREE_CHAIN (cap);
1819 }
1820 }
1821
1822 // Record the per-scope per-signature discriminator of LAMBDA. If the
1823 // extra scope is empty, we must use the empty scope counter, which
1824 // might not be the live one.
1825
1826 void
1827 finish_lambda_function (tree body)
1828 {
1829 finish_function_body (body);
1830
1831 prune_lambda_captures (body);
1832
1833 /* Finish the function and generate code for it if necessary. */
1834 tree fn = finish_function (/*inline_p=*/true);
1835
1836 /* Only expand if the call op is not a template. */
1837 if (!DECL_TEMPLATE_INFO (fn))
1838 expand_or_defer_fn (fn);
1839 }
1840
1841 #include "gt-cp-lambda.h"