]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/cp/lambda.cc
c++: lambda capturing structured bindings [PR85889]
[thirdparty/gcc.git] / gcc / cp / lambda.cc
CommitLineData
a960e808
AB
1/* Perform the semantic phase of lambda parsing, i.e., the process of
2 building tree structure, checking semantic consistency, and
3 building RTL. These routines are used both during actual parsing
4 and during the instantiation of template functions.
5
a945c346 6 Copyright (C) 1998-2024 Free Software Foundation, Inc.
a960e808
AB
7
8 This file is part of GCC.
9
10 GCC is free software; you can redistribute it and/or modify it
11 under the terms of the GNU General Public License as published by
12 the Free Software Foundation; either version 3, or (at your option)
13 any later version.
14
15 GCC is distributed in the hope that it will be useful, but
16 WITHOUT ANY WARRANTY; without even the implied warranty of
17 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
18 General Public License for more details.
19
20You should have received a copy of the GNU General Public License
21along with GCC; see the file COPYING3. If not see
22<http://www.gnu.org/licenses/>. */
23
24#include "config.h"
25#include "system.h"
26#include "coretypes.h"
2adfab87
AM
27#include "cp-tree.h"
28#include "stringpool.h"
a960e808
AB
29#include "cgraph.h"
30#include "tree-iterator.h"
a960e808 31#include "toplev.h"
bd28a34f 32#include "gimplify.h"
02a32ab4 33#include "target.h"
1c8e9bed 34#include "decl.h"
a960e808
AB
35
36/* Constructor for a lambda expression. */
37
38tree
39build_lambda_expr (void)
40{
41 tree lambda = make_node (LAMBDA_EXPR);
42 LAMBDA_EXPR_DEFAULT_CAPTURE_MODE (lambda) = CPLD_NONE;
43 LAMBDA_EXPR_CAPTURE_LIST (lambda) = NULL_TREE;
44 LAMBDA_EXPR_THIS_CAPTURE (lambda) = NULL_TREE;
b2576d75 45 LAMBDA_EXPR_REGEN_INFO (lambda) = NULL_TREE;
a960e808 46 LAMBDA_EXPR_PENDING_PROXIES (lambda) = NULL;
a960e808
AB
47 return lambda;
48}
49
50/* Create the closure object for a LAMBDA_EXPR. */
51
52tree
53build_lambda_object (tree lambda_expr)
54{
55 /* Build aggregate constructor call.
56 - cp_parser_braced_list
57 - cp_parser_functional_cast */
58 vec<constructor_elt, va_gc> *elts = NULL;
59 tree node, expr, type;
a960e808 60
11399477 61 if (processing_template_decl || lambda_expr == error_mark_node)
a960e808
AB
62 return lambda_expr;
63
64 /* Make sure any error messages refer to the lambda-introducer. */
2317082c
JM
65 location_t loc = LAMBDA_EXPR_LOCATION (lambda_expr);
66 iloc_sentinel il (loc);
a960e808
AB
67
68 for (node = LAMBDA_EXPR_CAPTURE_LIST (lambda_expr);
69 node;
70 node = TREE_CHAIN (node))
71 {
72 tree field = TREE_PURPOSE (node);
73 tree val = TREE_VALUE (node);
74
75 if (field == error_mark_node)
76 {
77 expr = error_mark_node;
78 goto out;
79 }
80
47265942
JM
81 if (TREE_CODE (val) == TREE_LIST)
82 val = build_x_compound_expr_from_list (val, ELK_INIT,
83 tf_warning_or_error);
84
a960e808
AB
85 if (DECL_P (val))
86 mark_used (val);
87
88 /* Mere mortals can't copy arrays with aggregate initialization, so
89 do some magic to make it work here. */
90 if (TREE_CODE (TREE_TYPE (field)) == ARRAY_TYPE)
91 val = build_array_copy (val);
92 else if (DECL_NORMAL_CAPTURE_P (field)
93 && !DECL_VLA_CAPTURE_P (field)
9f613f06 94 && !TYPE_REF_P (TREE_TYPE (field)))
a960e808
AB
95 {
96 /* "the entities that are captured by copy are used to
97 direct-initialize each corresponding non-static data
98 member of the resulting closure object."
99
100 There's normally no way to express direct-initialization
101 from an element of a CONSTRUCTOR, so we build up a special
102 TARGET_EXPR to bypass the usual copy-initialization. */
103 val = force_rvalue (val, tf_warning_or_error);
104 if (TREE_CODE (val) == TARGET_EXPR)
105 TARGET_EXPR_DIRECT_INIT_P (val) = true;
106 }
107
108 CONSTRUCTOR_APPEND_ELT (elts, DECL_NAME (field), val);
109 }
110
111 expr = build_constructor (init_list_type_node, elts);
112 CONSTRUCTOR_IS_DIRECT_INIT (expr) = 1;
113
114 /* N2927: "[The closure] class type is not an aggregate."
115 But we briefly treat it as an aggregate to make this simpler. */
116 type = LAMBDA_EXPR_CLOSURE (lambda_expr);
117 CLASSTYPE_NON_AGGREGATE (type) = 0;
118 expr = finish_compound_literal (type, expr, tf_warning_or_error);
2317082c 119 protected_set_expr_location (expr, loc);
a960e808
AB
120 CLASSTYPE_NON_AGGREGATE (type) = 1;
121
122 out:
a960e808
AB
123 return expr;
124}
125
126/* Return an initialized RECORD_TYPE for LAMBDA.
127 LAMBDA must have its explicit captures already. */
128
129tree
130begin_lambda_type (tree lambda)
131{
ca3edeae
NS
132 /* Lambda names are nearly but not quite anonymous. */
133 tree name = make_anon_name ();
134 IDENTIFIER_LAMBDA_P (name) = true;
135
136 /* Create the new RECORD_TYPE for this lambda. */
d13c0ae8 137 tree type = xref_tag (/*tag_code=*/record_type, name);
ca3edeae
NS
138 if (type == error_mark_node)
139 return error_mark_node;
a960e808
AB
140
141 /* Designate it as a struct so that we can use aggregate initialization. */
142 CLASSTYPE_DECLARED_CLASS (type) = false;
143
144 /* Cross-reference the expression and the type. */
145 LAMBDA_EXPR_CLOSURE (lambda) = type;
146 CLASSTYPE_LAMBDA_EXPR (type) = lambda;
147
98e5a19a
JM
148 /* In C++17, assume the closure is literal; we'll clear the flag later if
149 necessary. */
7b936140 150 if (cxx_dialect >= cxx17)
98e5a19a
JM
151 CLASSTYPE_LITERAL_P (type) = true;
152
a960e808
AB
153 /* Clear base types. */
154 xref_basetypes (type, /*bases=*/NULL_TREE);
155
156 /* Start the class. */
157 type = begin_class_definition (type);
a960e808
AB
158
159 return type;
160}
161
a960e808
AB
162/* Given a LAMBDA_EXPR or closure type LAMBDA, return the op() of the
163 closure type. */
164
165tree
166lambda_function (tree lambda)
167{
168 tree type;
169 if (TREE_CODE (lambda) == LAMBDA_EXPR)
170 type = LAMBDA_EXPR_CLOSURE (lambda);
171 else
172 type = lambda;
173 gcc_assert (LAMBDA_TYPE_P (type));
174 /* Don't let debug_tree cause instantiation. */
175 if (CLASSTYPE_TEMPLATE_INSTANTIATION (type)
176 && !COMPLETE_OR_OPEN_TYPE_P (type))
177 return NULL_TREE;
ce67b75e 178 lambda = get_class_binding_direct (type, call_op_identifier);
a960e808 179 if (lambda)
6a8b219b 180 lambda = STRIP_TEMPLATE (get_first_fn (lambda));
a960e808
AB
181 return lambda;
182}
183
74b2e202
JM
184/* True if EXPR is an expression whose type can be used directly in lambda
185 capture. Not to be used for 'auto'. */
186
187static bool
188type_deducible_expression_p (tree expr)
189{
190 if (!type_dependent_expression_p (expr))
191 return true;
192 if (BRACE_ENCLOSED_INITIALIZER_P (expr)
193 || TREE_CODE (expr) == EXPR_PACK_EXPANSION)
194 return false;
195 tree t = non_reference (TREE_TYPE (expr));
53e72521
JM
196 return (t && TREE_CODE (t) != TYPE_PACK_EXPANSION
197 && !WILDCARD_TYPE_P (t) && !LAMBDA_TYPE_P (t)
7c989a8e 198 && !array_of_unknown_bound_p (t)
53e72521 199 && !type_uses_auto (t));
74b2e202
JM
200}
201
a960e808 202/* Returns the type to use for the FIELD_DECL corresponding to the
4bf07f3f
NS
203 capture of EXPR. EXPLICIT_INIT_P indicates whether this is a
204 C++14 init capture, and BY_REFERENCE_P indicates whether we're
205 capturing by reference. */
a960e808
AB
206
207tree
4bf07f3f
NS
208lambda_capture_field_type (tree expr, bool explicit_init_p,
209 bool by_reference_p)
a960e808
AB
210{
211 tree type;
130ee9a9 212 bool is_this = is_this_parameter (tree_strip_nop_conversions (expr));
4bf07f3f 213
fd740165
PP
214 if (is_this)
215 type = TREE_TYPE (expr);
216 else if (explicit_init_p)
130ee9a9 217 {
4bf07f3f
NS
218 tree auto_node = make_auto ();
219
220 type = auto_node;
221 if (by_reference_p)
222 /* Add the reference now, so deduction doesn't lose
223 outermost CV qualifiers of EXPR. */
224 type = build_reference_type (type);
a28edad3
PP
225 if (uses_parameter_packs (expr))
226 /* Stick with 'auto' even if the type could be deduced. */;
227 else
228 type = do_auto_deduction (type, expr, auto_node);
130ee9a9 229 }
74b2e202 230 else if (!type_deducible_expression_p (expr))
10acaf4d
JM
231 {
232 type = cxx_make_type (DECLTYPE_TYPE);
233 DECLTYPE_TYPE_EXPR (type) = expr;
234 DECLTYPE_FOR_LAMBDA_CAPTURE (type) = true;
235 DECLTYPE_FOR_REF_CAPTURE (type) = by_reference_p;
236 SET_TYPE_STRUCTURAL_EQUALITY (type);
237 }
130ee9a9 238 else
4bf07f3f 239 {
89cf57ea
PP
240 STRIP_ANY_LOCATION_WRAPPER (expr);
241
fd740165
PP
242 if (!by_reference_p && is_capture_proxy (expr))
243 {
244 /* When capturing by-value another capture proxy from an enclosing
245 lambda, consider the type of the corresponding field instead,
246 as the proxy may be additionally const-qualifed if the enclosing
247 lambda is non-mutable (PR94376). */
248 gcc_assert (TREE_CODE (DECL_VALUE_EXPR (expr)) == COMPONENT_REF);
249 expr = TREE_OPERAND (DECL_VALUE_EXPR (expr), 1);
250 }
251
4bf07f3f
NS
252 type = non_reference (unlowered_expr_type (expr));
253
fd740165 254 if (by_reference_p || TREE_CODE (type) == FUNCTION_TYPE)
4bf07f3f
NS
255 type = build_reference_type (type);
256 }
257
a960e808
AB
258 return type;
259}
260
261/* Returns true iff DECL is a lambda capture proxy variable created by
262 build_capture_proxy. */
263
264bool
265is_capture_proxy (tree decl)
266{
89cf57ea
PP
267 /* Location wrappers should be stripped or otherwise handled by the
268 caller before using this predicate. */
269 gcc_checking_assert (!location_wrapper_p (decl));
270
a960e808
AB
271 return (VAR_P (decl)
272 && DECL_HAS_VALUE_EXPR_P (decl)
273 && !DECL_ANON_UNION_VAR_P (decl)
6f58bc58 274 && !DECL_DECOMPOSITION_P (decl)
ddd0d18c 275 && !DECL_FNAME_P (decl)
8b44f8ec
JJ
276 && !(DECL_ARTIFICIAL (decl)
277 && DECL_LANG_SPECIFIC (decl)
278 && DECL_OMP_PRIVATIZED_MEMBER (decl))
a960e808
AB
279 && LAMBDA_FUNCTION_P (DECL_CONTEXT (decl)));
280}
281
282/* Returns true iff DECL is a capture proxy for a normal capture
283 (i.e. without explicit initializer). */
284
285bool
286is_normal_capture_proxy (tree decl)
287{
288 if (!is_capture_proxy (decl))
289 /* It's not a capture proxy. */
290 return false;
291
d36b4bf7
JM
292 return (DECL_LANG_SPECIFIC (decl)
293 && DECL_CAPTURED_VARIABLE (decl));
a960e808
AB
294}
295
1577f10a
JM
296/* Returns true iff DECL is a capture proxy for a normal capture
297 of a constant variable. */
298
299bool
300is_constant_capture_proxy (tree decl)
301{
302 if (is_normal_capture_proxy (decl))
303 return decl_constant_var_p (DECL_CAPTURED_VARIABLE (decl));
304 return false;
305}
306
a960e808
AB
307/* VAR is a capture proxy created by build_capture_proxy; add it to the
308 current function, which is the operator() for the appropriate lambda. */
309
310void
311insert_capture_proxy (tree var)
312{
68ad1bf7 313 if (is_normal_capture_proxy (var))
84dd815f
JM
314 {
315 tree cap = DECL_CAPTURED_VARIABLE (var);
316 if (CHECKING_P)
317 {
318 gcc_assert (!is_normal_capture_proxy (cap));
319 tree old = retrieve_local_specialization (cap);
320 if (old)
321 gcc_assert (DECL_CONTEXT (old) != DECL_CONTEXT (var));
322 }
323 register_local_specialization (var, cap);
324 }
5c263e84 325
a960e808
AB
326 /* Put the capture proxy in the extra body block so that it won't clash
327 with a later local variable. */
d16d5eac 328 pushdecl_outermost_localscope (var);
a960e808
AB
329
330 /* And put a DECL_EXPR in the STATEMENT_LIST for the same block. */
331 var = build_stmt (DECL_SOURCE_LOCATION (var), DECL_EXPR, var);
d16d5eac 332 tree stmt_list = (*stmt_list_stack)[1];
a960e808
AB
333 gcc_assert (stmt_list);
334 append_to_statement_list_force (var, &stmt_list);
335}
336
337/* We've just finished processing a lambda; if the containing scope is also
338 a lambda, insert any capture proxies that were created while processing
339 the nested lambda. */
340
341void
342insert_pending_capture_proxies (void)
343{
344 tree lam;
345 vec<tree, va_gc> *proxies;
346 unsigned i;
347
348 if (!current_function_decl || !LAMBDA_FUNCTION_P (current_function_decl))
349 return;
350
351 lam = CLASSTYPE_LAMBDA_EXPR (DECL_CONTEXT (current_function_decl));
352 proxies = LAMBDA_EXPR_PENDING_PROXIES (lam);
353 for (i = 0; i < vec_safe_length (proxies); ++i)
354 {
355 tree var = (*proxies)[i];
356 insert_capture_proxy (var);
357 }
358 release_tree_vector (LAMBDA_EXPR_PENDING_PROXIES (lam));
359 LAMBDA_EXPR_PENDING_PROXIES (lam) = NULL;
360}
361
362/* Given REF, a COMPONENT_REF designating a field in the lambda closure,
363 return the type we want the proxy to have: the type of the field itself,
364 with added const-qualification if the lambda isn't mutable and the
365 capture is by value. */
366
367tree
368lambda_proxy_type (tree ref)
369{
370 tree type;
2993d08a
JM
371 if (ref == error_mark_node)
372 return error_mark_node;
a960e808
AB
373 if (REFERENCE_REF_P (ref))
374 ref = TREE_OPERAND (ref, 0);
2993d08a 375 gcc_assert (TREE_CODE (ref) == COMPONENT_REF);
a960e808 376 type = TREE_TYPE (ref);
2993d08a
JM
377 if (!type || WILDCARD_TYPE_P (non_reference (type)))
378 {
379 type = cxx_make_type (DECLTYPE_TYPE);
380 DECLTYPE_TYPE_EXPR (type) = ref;
381 DECLTYPE_FOR_LAMBDA_PROXY (type) = true;
382 SET_TYPE_STRUCTURAL_EQUALITY (type);
383 }
384 if (DECL_PACK_P (TREE_OPERAND (ref, 1)))
385 type = make_pack_expansion (type);
a960e808
AB
386 return type;
387}
388
389/* MEMBER is a capture field in a lambda closure class. Now that we're
390 inside the operator(), build a placeholder var for future lookups and
391 debugging. */
392
856c79ea 393static tree
5c263e84 394build_capture_proxy (tree member, tree init)
a960e808
AB
395{
396 tree var, object, fn, closure, name, lam, type;
397
2993d08a
JM
398 if (PACK_EXPANSION_P (member))
399 member = PACK_EXPANSION_PATTERN (member);
400
a960e808
AB
401 closure = DECL_CONTEXT (member);
402 fn = lambda_function (closure);
403 lam = CLASSTYPE_LAMBDA_EXPR (closure);
404
07d09f0a 405 object = DECL_ARGUMENTS (fn);
a960e808 406 /* The proxy variable forwards to the capture field. */
07d09f0a 407 if (INDIRECT_TYPE_P (TREE_TYPE (object)))
408 object = build_fold_indirect_ref (object);
a960e808
AB
409 object = finish_non_static_data_member (member, object, NULL_TREE);
410 if (REFERENCE_REF_P (object))
411 object = TREE_OPERAND (object, 0);
412
413 /* Remove the __ inserted by add_capture. */
6c9973e4
JJ
414 if (IDENTIFIER_POINTER (DECL_NAME (member))[2] == '_'
415 && IDENTIFIER_POINTER (DECL_NAME (member))[3] == '.')
416 name = get_identifier ("_");
417 else
418 name = get_identifier (IDENTIFIER_POINTER (DECL_NAME (member)) + 2);
a960e808
AB
419
420 type = lambda_proxy_type (object);
421
71a93b08 422 if (name == this_identifier && !INDIRECT_TYPE_P (type))
8ca33347
JJ
423 {
424 type = build_pointer_type (type);
425 type = cp_build_qualified_type (type, TYPE_QUAL_CONST);
426 object = build_fold_addr_expr_with_type (object, type);
427 }
428
a960e808
AB
429 if (DECL_VLA_CAPTURE_P (member))
430 {
431 /* Rebuild the VLA type from the pointer and maxindex. */
0c7bce0a 432 tree field = next_aggregate_field (TYPE_FIELDS (type));
a960e808 433 tree ptr = build_simple_component_ref (object, field);
0c7bce0a 434 field = next_aggregate_field (DECL_CHAIN (field));
a960e808 435 tree max = build_simple_component_ref (object, field);
8bebb953
JM
436 type = build_cplus_array_type (TREE_TYPE (TREE_TYPE (ptr)),
437 build_index_type (max));
a960e808 438 type = build_reference_type (type);
a960e808
AB
439 object = convert (type, ptr);
440 }
441
11b782f7
JM
442 complete_type (type);
443
a960e808
AB
444 var = build_decl (input_location, VAR_DECL, name, type);
445 SET_DECL_VALUE_EXPR (var, object);
446 DECL_HAS_VALUE_EXPR_P (var) = 1;
447 DECL_ARTIFICIAL (var) = 1;
448 TREE_USED (var) = 1;
449 DECL_CONTEXT (var) = fn;
450
5c263e84
JM
451 if (DECL_NORMAL_CAPTURE_P (member))
452 {
453 if (DECL_VLA_CAPTURE_P (member))
454 {
455 init = CONSTRUCTOR_ELT (init, 0)->value;
456 init = TREE_OPERAND (init, 0); // Strip ADDR_EXPR.
457 init = TREE_OPERAND (init, 0); // Strip ARRAY_REF.
458 }
459 else
460 {
461 if (PACK_EXPANSION_P (init))
462 init = PACK_EXPANSION_PATTERN (init);
5c263e84 463 }
fe23b12a 464
329a89d3
JM
465 if (INDIRECT_REF_P (init))
466 init = TREE_OPERAND (init, 0);
467 STRIP_NOPS (init);
468
dc58fa9f
JM
469 gcc_assert (VAR_P (init) || TREE_CODE (init) == PARM_DECL);
470 while (is_normal_capture_proxy (init))
471 init = DECL_CAPTURED_VARIABLE (init);
472 retrofit_lang_decl (var);
473 DECL_CAPTURED_VARIABLE (var) = init;
5c263e84
JM
474 }
475
a960e808
AB
476 if (name == this_identifier)
477 {
478 gcc_assert (LAMBDA_EXPR_THIS_CAPTURE (lam) == member);
479 LAMBDA_EXPR_THIS_CAPTURE (lam) = var;
480 }
481
482 if (fn == current_function_decl)
483 insert_capture_proxy (var);
484 else
485 vec_safe_push (LAMBDA_EXPR_PENDING_PROXIES (lam), var);
486
487 return var;
488}
489
ff502317
BE
490static GTY(()) tree ptr_id;
491static GTY(()) tree max_id;
492
a960e808
AB
493/* Return a struct containing a pointer and a length for lambda capture of
494 an array of runtime length. */
495
496static tree
1eb7a085 497vla_capture_type (tree array_type)
a960e808 498{
d13c0ae8 499 tree type = xref_tag (record_type, make_anon_name ());
a960e808
AB
500 xref_basetypes (type, NULL_TREE);
501 type = begin_class_definition (type);
502 if (!ptr_id)
503 {
504 ptr_id = get_identifier ("ptr");
505 max_id = get_identifier ("max");
506 }
507 tree ptrtype = build_pointer_type (TREE_TYPE (array_type));
508 tree field = build_decl (input_location, FIELD_DECL, ptr_id, ptrtype);
509 finish_member_declaration (field);
510 field = build_decl (input_location, FIELD_DECL, max_id, sizetype);
511 finish_member_declaration (field);
512 return finish_struct (type, NULL_TREE);
513}
514
515/* From an ID and INITIALIZER, create a capture (by reference if
516 BY_REFERENCE_P is true), add it to the capture-list for LAMBDA,
8ca33347
JJ
517 and return it. If ID is `this', BY_REFERENCE_P says whether
518 `*this' is captured by reference. */
a960e808
AB
519
520tree
2993d08a 521add_capture (tree lambda, tree id, tree orig_init, bool by_reference_p,
6c9973e4 522 bool explicit_init_p, unsigned *name_independent_cnt)
a960e808
AB
523{
524 char *buf;
525 tree type, member, name;
526 bool vla = false;
2993d08a
JM
527 bool variadic = false;
528 tree initializer = orig_init;
529
530 if (PACK_EXPANSION_P (initializer))
531 {
532 initializer = PACK_EXPANSION_PATTERN (initializer);
533 variadic = true;
534 }
a960e808 535
47265942
JM
536 if (TREE_CODE (initializer) == TREE_LIST
537 /* A pack expansion might end up with multiple elements. */
538 && !PACK_EXPANSION_P (TREE_VALUE (initializer)))
a960e808
AB
539 initializer = build_x_compound_expr_from_list (initializer, ELK_INIT,
540 tf_warning_or_error);
43b781fa 541 type = TREE_TYPE (initializer);
0c018b6f
PC
542 if (type == error_mark_node)
543 return error_mark_node;
544
1eb7a085 545 if (!dependent_type_p (type) && array_of_runtime_bound_p (type))
a960e808
AB
546 {
547 vla = true;
548 if (!by_reference_p)
549 error ("array of runtime bound cannot be captured by copy, "
550 "only by reference");
551
552 /* For a VLA, we capture the address of the first element and the
553 maximum index, and then reconstruct the VLA for the proxy. */
554 tree elt = cp_build_array_ref (input_location, initializer,
555 integer_zero_node, tf_warning_or_error);
556 initializer = build_constructor_va (init_list_type_node, 2,
557 NULL_TREE, build_address (elt),
558 NULL_TREE, array_type_nelts (type));
1eb7a085 559 type = vla_capture_type (type);
a960e808 560 }
18e780d4
JM
561 else if (!dependent_type_p (type)
562 && variably_modified_type_p (type, NULL_TREE))
a960e808 563 {
13c60208 564 sorry ("capture of variably-modified type %qT that is not an N3639 array "
a960e808
AB
565 "of runtime bound", type);
566 if (TREE_CODE (type) == ARRAY_TYPE
567 && variably_modified_type_p (TREE_TYPE (type), NULL_TREE))
568 inform (input_location, "because the array element type %qT has "
569 "variable size", TREE_TYPE (type));
13c60208 570 return error_mark_node;
a960e808 571 }
43b781fa 572 else
a960e808 573 {
4bf07f3f
NS
574 type = lambda_capture_field_type (initializer, explicit_init_p,
575 by_reference_p);
4473482d
PC
576 if (type == error_mark_node)
577 return error_mark_node;
4bf07f3f 578
8ca33347
JJ
579 if (id == this_identifier && !by_reference_p)
580 {
71a93b08 581 gcc_assert (INDIRECT_TYPE_P (type));
8ca33347 582 type = TREE_TYPE (type);
04757a2a 583 initializer = cp_build_fold_indirect_ref (initializer);
8ca33347 584 }
4bf07f3f
NS
585
586 if (dependent_type_p (type))
587 ;
588 else if (id != this_identifier && by_reference_p)
43b781fa 589 {
4bf07f3f 590 if (!lvalue_p (initializer))
eb086562
PC
591 {
592 error ("cannot capture %qE by reference", initializer);
593 return error_mark_node;
594 }
43b781fa
JM
595 }
596 else
0c018b6f
PC
597 {
598 /* Capture by copy requires a complete type. */
599 type = complete_type (type);
4bf07f3f 600 if (!COMPLETE_TYPE_P (type))
0c018b6f
PC
601 {
602 error ("capture by copy of incomplete type %qT", type);
603 cxx_incomplete_type_inform (type);
604 return error_mark_node;
605 }
02a32ab4
RS
606 else if (!verify_type_context (input_location,
607 TCTX_CAPTURE_BY_COPY, type))
608 return error_mark_node;
0c018b6f 609 }
46bafd9a
MP
610
611 if (cxx_dialect < cxx20)
612 {
613 auto_diagnostic_group d;
614 tree stripped_init = tree_strip_any_location_wrapper (initializer);
615 if (DECL_DECOMPOSITION_P (stripped_init)
616 && pedwarn (input_location, OPT_Wc__20_extensions,
617 "captured structured bindings are a C++20 extension"))
618 inform (DECL_SOURCE_LOCATION (stripped_init), "declared here");
619 }
a960e808 620 }
a960e808
AB
621
622 /* Add __ to the beginning of the field name so that user code
623 won't find the field with name lookup. We can't just leave the name
624 unset because template instantiation uses the name to find
625 instantiated fields. */
6c9973e4
JJ
626 if (id_equal (id, "_") && name_independent_cnt)
627 {
628 if (*name_independent_cnt == 0)
629 name = get_identifier ("___");
630 else
631 {
632 /* For 2nd and later name-independent capture use
633 unique names. */
634 char buf2[5 + (HOST_BITS_PER_INT + 2) / 3];
635 sprintf (buf2, "___.%u", *name_independent_cnt);
636 name = get_identifier (buf2);
637 }
638 name_independent_cnt[0]++;
639 }
640 else
641 {
642 buf = XALLOCAVEC (char, IDENTIFIER_LENGTH (id) + 3);
643 buf[1] = buf[0] = '_';
644 memcpy (buf + 2, IDENTIFIER_POINTER (id),
645 IDENTIFIER_LENGTH (id) + 1);
646 name = get_identifier (buf);
647 }
a960e808 648
2993d08a 649 if (variadic)
10acaf4d
JM
650 {
651 type = make_pack_expansion (type);
652 if (explicit_init_p)
653 /* With an explicit initializer 'type' is auto, which isn't really a
654 parameter pack in this context. We will want as many fields as we
655 have elements in the expansion of the initializer, so use its packs
656 instead. */
2a26351b
JM
657 {
658 PACK_EXPANSION_PARAMETER_PACKS (type)
659 = uses_parameter_packs (initializer);
660 PACK_EXPANSION_AUTO_P (type) = true;
661 }
10acaf4d 662 }
2993d08a 663
a960e808 664 /* Make member variable. */
018795f4 665 member = build_decl (input_location, FIELD_DECL, name, type);
a960e808
AB
666 DECL_VLA_CAPTURE_P (member) = vla;
667
668 if (!explicit_init_p)
669 /* Normal captures are invisible to name lookup but uses are replaced
670 with references to the capture field; we implement this by only
671 really making them invisible in unevaluated context; see
672 qualify_lookup. For now, let's make explicitly initialized captures
673 always visible. */
674 DECL_NORMAL_CAPTURE_P (member) = true;
675
676 if (id == this_identifier)
677 LAMBDA_EXPR_THIS_CAPTURE (lambda) = member;
678
679 /* Add it to the appropriate closure class if we've started it. */
680 if (current_class_type
681 && current_class_type == LAMBDA_EXPR_CLOSURE (lambda))
88b811bd
JM
682 {
683 if (COMPLETE_TYPE_P (current_class_type))
697a7a57
JM
684 internal_error ("trying to capture %qD in instantiation of "
685 "generic lambda", id);
88b811bd
JM
686 finish_member_declaration (member);
687 }
a960e808 688
2993d08a
JM
689 tree listmem = member;
690 if (variadic)
691 {
692 listmem = make_pack_expansion (member);
693 initializer = orig_init;
694 }
a960e808 695 LAMBDA_EXPR_CAPTURE_LIST (lambda)
2993d08a 696 = tree_cons (listmem, initializer, LAMBDA_EXPR_CAPTURE_LIST (lambda));
a960e808
AB
697
698 if (LAMBDA_EXPR_CLOSURE (lambda))
5c263e84 699 return build_capture_proxy (member, initializer);
a960e808
AB
700 /* For explicit captures we haven't started the function yet, so we wait
701 and build the proxy from cp_parser_lambda_body. */
1577f10a 702 LAMBDA_CAPTURE_EXPLICIT_P (LAMBDA_EXPR_CAPTURE_LIST (lambda)) = true;
a960e808
AB
703 return NULL_TREE;
704}
705
706/* Register all the capture members on the list CAPTURES, which is the
707 LAMBDA_EXPR_CAPTURE_LIST for the lambda after the introducer. */
708
709void
710register_capture_members (tree captures)
711{
712 if (captures == NULL_TREE)
713 return;
714
715 register_capture_members (TREE_CHAIN (captures));
2993d08a
JM
716
717 tree field = TREE_PURPOSE (captures);
718 if (PACK_EXPANSION_P (field))
719 field = PACK_EXPANSION_PATTERN (field);
720
2993d08a 721 finish_member_declaration (field);
a960e808
AB
722}
723
724/* Similar to add_capture, except this works on a stack of nested lambdas.
725 BY_REFERENCE_P in this case is derived from the default capture mode.
726 Returns the capture for the lambda at the bottom of the stack. */
727
728tree
729add_default_capture (tree lambda_stack, tree id, tree initializer)
730{
731 bool this_capture_p = (id == this_identifier);
a960e808 732 tree var = NULL_TREE;
a960e808
AB
733 tree saved_class_type = current_class_type;
734
e6a1e5fe 735 for (tree node = lambda_stack;
a960e808
AB
736 node;
737 node = TREE_CHAIN (node))
738 {
739 tree lambda = TREE_VALUE (node);
740
741 current_class_type = LAMBDA_EXPR_CLOSURE (lambda);
2993d08a
JM
742 if (DECL_PACK_P (initializer))
743 initializer = make_pack_expansion (initializer);
a960e808
AB
744 var = add_capture (lambda,
745 id,
746 initializer,
747 /*by_reference_p=*/
8ca33347
JJ
748 (this_capture_p
749 || (LAMBDA_EXPR_DEFAULT_CAPTURE_MODE (lambda)
a960e808 750 == CPLD_REFERENCE)),
6c9973e4 751 /*explicit_init_p=*/false, NULL);
a960e808 752 initializer = convert_from_reference (var);
e6a1e5fe
MP
753
754 /* Warn about deprecated implicit capture of this via [=]. */
b04445d4 755 if (cxx_dialect >= cxx20
e6a1e5fe 756 && this_capture_p
ac4c7868 757 && LAMBDA_EXPR_DEFAULT_CAPTURE_MODE (lambda) == CPLD_COPY)
e6a1e5fe
MP
758 {
759 if (warning_at (LAMBDA_EXPR_LOCATION (lambda), OPT_Wdeprecated,
760 "implicit capture of %qE via %<[=]%> is deprecated "
761 "in C++20", this_identifier))
762 inform (LAMBDA_EXPR_LOCATION (lambda), "add explicit %<this%> or "
763 "%<*this%> capture");
764 }
a960e808
AB
765 }
766
767 current_class_type = saved_class_type;
768
769 return var;
770}
771
0b360a07
MV
772/* Return the capture pertaining to a use of 'this' in LAMBDA, in the
773 form of an INDIRECT_REF, possibly adding it through default
9adf74a3
JM
774 capturing, if ADD_CAPTURE_P is nonzero. If ADD_CAPTURE_P is negative,
775 try to capture but don't complain if we can't. */
a960e808
AB
776
777tree
9adf74a3 778lambda_expr_this_capture (tree lambda, int add_capture_p)
a960e808
AB
779{
780 tree result;
781
782 tree this_capture = LAMBDA_EXPR_THIS_CAPTURE (lambda);
783
5ce3039e 784 /* In unevaluated context this isn't an odr-use, so don't capture. */
a960e808 785 if (cp_unevaluated_operand)
5ce3039e 786 add_capture_p = false;
a960e808
AB
787
788 /* Try to default capture 'this' if we can. */
86cf1084 789 if (!this_capture)
a960e808
AB
790 {
791 tree lambda_stack = NULL_TREE;
792 tree init = NULL_TREE;
8e4339f5 793 bool saw_complete = false;
a960e808
AB
794
795 /* If we are in a lambda function, we can move out until we hit:
796 1. a non-lambda function or NSDMI,
797 2. a lambda function capturing 'this', or
798 3. a non-default capturing lambda function. */
799 for (tree tlambda = lambda; ;)
800 {
86cf1084
JM
801 if (add_capture_p
802 && LAMBDA_EXPR_DEFAULT_CAPTURE_MODE (tlambda) == CPLD_NONE)
803 /* tlambda won't let us capture 'this'. */
804 break;
805
806 if (add_capture_p)
807 lambda_stack = tree_cons (NULL_TREE,
808 tlambda,
809 lambda_stack);
a960e808 810
91e534b0 811 tree closure = LAMBDA_EXPR_CLOSURE (tlambda);
8e4339f5
JM
812 if (COMPLETE_TYPE_P (closure))
813 /* We're instantiating a generic lambda op(), the containing
814 scope may be gone. */
815 saw_complete = true;
816
91e534b0
JM
817 tree containing_function
818 = decl_function_context (TYPE_NAME (closure));
819
820 tree ex = LAMBDA_EXPR_EXTRA_SCOPE (tlambda);
821 if (ex && TREE_CODE (ex) == FIELD_DECL)
a960e808 822 {
91e534b0
JM
823 /* Lambda in an NSDMI. We don't have a function to look up
824 'this' in, but we can find (or rebuild) the fake one from
825 inject_this_parameter. */
8e4339f5 826 if (!containing_function && !saw_complete)
91e534b0
JM
827 /* If we're parsing a lambda in a non-local class,
828 we can find the fake 'this' in scope_chain. */
829 init = scope_chain->x_current_class_ptr;
830 else
831 /* Otherwise it's either gone or buried in
832 function_context_stack, so make another. */
833 init = build_this_parm (NULL_TREE, DECL_CONTEXT (ex),
834 TYPE_UNQUALIFIED);
a960e808
AB
835 gcc_checking_assert
836 (init && (TREE_TYPE (TREE_TYPE (init))
837 == current_nonlambda_class_type ()));
838 break;
839 }
840
a960e808
AB
841 if (containing_function == NULL_TREE)
842 /* We ran out of scopes; there's no 'this' to capture. */
843 break;
844
845 if (!LAMBDA_FUNCTION_P (containing_function))
846 {
f9fbf93d 847 /* We found a non-lambda function.
848 There is no this pointer in xobj member functions. */
849 if (DECL_IOBJ_MEMBER_FUNCTION_P (containing_function))
a960e808
AB
850 /* First parameter is 'this'. */
851 init = DECL_ARGUMENTS (containing_function);
852 break;
853 }
854
855 tlambda
856 = CLASSTYPE_LAMBDA_EXPR (DECL_CONTEXT (containing_function));
857
858 if (LAMBDA_EXPR_THIS_CAPTURE (tlambda))
859 {
860 /* An outer lambda has already captured 'this'. */
861 init = LAMBDA_EXPR_THIS_CAPTURE (tlambda);
862 break;
863 }
a960e808
AB
864 }
865
866 if (init)
0b360a07
MV
867 {
868 if (add_capture_p)
869 this_capture = add_default_capture (lambda_stack,
870 /*id=*/this_identifier,
871 init);
872 else
873 this_capture = init;
874 }
a960e808
AB
875 }
876
5ce3039e
JM
877 if (cp_unevaluated_operand)
878 result = this_capture;
879 else if (!this_capture)
a960e808 880 {
9adf74a3 881 if (add_capture_p == 1)
5ce3039e
JM
882 {
883 error ("%<this%> was not captured for this lambda function");
884 result = error_mark_node;
885 }
886 else
887 result = NULL_TREE;
a960e808
AB
888 }
889 else
890 {
891 /* To make sure that current_class_ref is for the lambda. */
892 gcc_assert (TYPE_MAIN_VARIANT (TREE_TYPE (current_class_ref))
893 == LAMBDA_EXPR_CLOSURE (lambda));
894
895 result = this_capture;
896
897 /* If 'this' is captured, each use of 'this' is transformed into an
898 access to the corresponding unnamed data member of the closure
899 type cast (_expr.cast_ 5.4) to the type of 'this'. [ The cast
900 ensures that the transformed expression is an rvalue. ] */
901 result = rvalue (result);
902 }
903
904 return result;
905}
906
1577f10a
JM
907/* Return the innermost LAMBDA_EXPR we're currently in, if any. */
908
909tree
910current_lambda_expr (void)
911{
912 tree type = current_class_type;
913 while (type && !LAMBDA_TYPE_P (type))
914 type = decl_type_context (TYPE_NAME (type));
915 if (type)
916 return CLASSTYPE_LAMBDA_EXPR (type);
917 else
918 return NULL_TREE;
919}
920
8ddfdbc2
NS
921/* Return the current LAMBDA_EXPR, if this is a resolvable dummy
922 object. NULL otherwise.. */
a960e808 923
8ddfdbc2
NS
924static tree
925resolvable_dummy_lambda (tree object)
a960e808
AB
926{
927 if (!is_dummy_object (object))
8ddfdbc2 928 return NULL_TREE;
a960e808
AB
929
930 tree type = TYPE_MAIN_VARIANT (TREE_TYPE (object));
931 gcc_assert (!TYPE_PTR_P (type));
932
933 if (type != current_class_type
934 && current_class_type
935 && LAMBDA_TYPE_P (current_class_type)
e7b67047 936 && lambda_function (current_class_type)
4cda703e 937 && DERIVED_FROM_P (type, nonlambda_method_basetype()))
8ddfdbc2
NS
938 return CLASSTYPE_LAMBDA_EXPR (current_class_type);
939
940 return NULL_TREE;
941}
942
943/* We don't want to capture 'this' until we know we need it, i.e. after
944 overload resolution has chosen a non-static member function. At that
945 point we call this function to turn a dummy object into a use of the
946 'this' capture. */
947
948tree
949maybe_resolve_dummy (tree object, bool add_capture_p)
950{
951 if (tree lam = resolvable_dummy_lambda (object))
952 if (tree cap = lambda_expr_this_capture (lam, add_capture_p))
953 if (cap != error_mark_node)
04757a2a 954 object = build_fold_indirect_ref (cap);
a960e808
AB
955
956 return object;
957}
958
8ddfdbc2
NS
959/* When parsing a generic lambda containing an argument-dependent
960 member function call we defer overload resolution to instantiation
961 time. But we have to know now whether to capture this or not.
962 Do that if FNS contains any non-static fns.
963 The std doesn't anticipate this case, but I expect this to be the
964 outcome of discussion. */
965
966void
967maybe_generic_this_capture (tree object, tree fns)
968{
969 if (tree lam = resolvable_dummy_lambda (object))
970 if (!LAMBDA_EXPR_THIS_CAPTURE (lam))
971 {
972 /* We've not yet captured, so look at the function set of
973 interest. */
974 if (BASELINK_P (fns))
975 fns = BASELINK_FUNCTIONS (fns);
7f357c61
NS
976 bool id_expr = TREE_CODE (fns) == TEMPLATE_ID_EXPR;
977 if (id_expr)
978 fns = TREE_OPERAND (fns, 0);
3f267553
NS
979
980 for (lkp_iterator iter (fns); iter; ++iter)
d5fe39d4
JJ
981 if (((!id_expr && TREE_CODE (*iter) != USING_DECL)
982 || TREE_CODE (*iter) == TEMPLATE_DECL)
f9fbf93d 983 && DECL_IOBJ_MEMBER_FUNCTION_P (*iter))
3f267553
NS
984 {
985 /* Found a non-static member. Capture this. */
9adf74a3 986 lambda_expr_this_capture (lam, /*maybe*/-1);
3f267553
NS
987 break;
988 }
8ddfdbc2
NS
989 }
990}
991
2bf492a1
JM
992/* Returns the innermost non-lambda function. */
993
994tree
995current_nonlambda_function (void)
996{
997 tree fn = current_function_decl;
998 while (fn && LAMBDA_FUNCTION_P (fn))
999 fn = decl_function_context (fn);
1000 return fn;
1001}
1002
4cda703e
JM
1003/* Returns the method basetype of the innermost non-lambda function, including
1004 a hypothetical constructor if inside an NSDMI, or NULL_TREE if none. */
a960e808
AB
1005
1006tree
1007nonlambda_method_basetype (void)
1008{
a960e808
AB
1009 if (!current_class_ref)
1010 return NULL_TREE;
1011
4cda703e 1012 tree type = current_class_type;
cc6fe784 1013 if (!type || !LAMBDA_TYPE_P (type))
a960e808
AB
1014 return type;
1015
4cda703e
JM
1016 while (true)
1017 {
1018 tree lam = CLASSTYPE_LAMBDA_EXPR (type);
1019 tree ex = LAMBDA_EXPR_EXTRA_SCOPE (lam);
1020 if (ex && TREE_CODE (ex) == FIELD_DECL)
1021 /* Lambda in an NSDMI. */
1022 return DECL_CONTEXT (ex);
1023
1024 tree fn = TYPE_CONTEXT (type);
1025 if (!fn || TREE_CODE (fn) != FUNCTION_DECL
f9fbf93d 1026 || !DECL_IOBJ_MEMBER_FUNCTION_P (fn))
4cda703e
JM
1027 /* No enclosing non-lambda method. */
1028 return NULL_TREE;
1029 if (!LAMBDA_FUNCTION_P (fn))
1030 /* Found an enclosing non-lambda method. */
1031 return TYPE_METHOD_BASETYPE (TREE_TYPE (fn));
1032 type = DECL_CONTEXT (fn);
1033 }
a960e808
AB
1034}
1035
18c4fa8e
JM
1036/* Like current_scope, but looking through lambdas. */
1037
1038tree
1039current_nonlambda_scope (void)
1040{
1041 tree scope = current_scope ();
1042 for (;;)
1043 {
1044 if (TREE_CODE (scope) == FUNCTION_DECL
1045 && LAMBDA_FUNCTION_P (scope))
1046 {
1047 scope = CP_TYPE_CONTEXT (DECL_CONTEXT (scope));
1048 continue;
1049 }
1050 else if (LAMBDA_TYPE_P (scope))
1051 {
1052 scope = CP_TYPE_CONTEXT (scope);
1053 continue;
1054 }
1055 break;
1056 }
1057 return scope;
1058}
1059
6a8b219b
AB
1060/* Helper function for maybe_add_lambda_conv_op; build a CALL_EXPR with
1061 indicated FN and NARGS, but do not initialize the return type or any of the
1062 argument slots. */
1063
1064static tree
1065prepare_op_call (tree fn, int nargs)
1066{
1067 tree t;
1068
1069 t = build_vl_exp (CALL_EXPR, nargs + 3);
1070 CALL_EXPR_FN (t) = fn;
1071 CALL_EXPR_STATIC_CHAIN (t) = NULL;
1072
1073 return t;
1074}
1075
72013ec5
JM
1076/* Return true iff CALLOP is the op() for a generic lambda. */
1077
1078bool
1079generic_lambda_fn_p (tree callop)
1080{
1081 return (LAMBDA_FUNCTION_P (callop)
1082 && DECL_TEMPLATE_INFO (callop)
1083 && PRIMARY_TEMPLATE_P (DECL_TI_TEMPLATE (callop)));
1084}
1085
a960e808
AB
1086/* If the closure TYPE has a static op(), also add a conversion to function
1087 pointer. */
1088
1089void
1090maybe_add_lambda_conv_op (tree type)
1091{
2cc7f90b 1092 bool nested = (cfun != NULL);
5802281e 1093 bool nested_def = decl_function_context (TYPE_MAIN_DECL (type));
a960e808 1094 tree callop = lambda_function (type);
036dc0a0 1095 tree lam = CLASSTYPE_LAMBDA_EXPR (type);
a960e808 1096
036dc0a0
PC
1097 if (LAMBDA_EXPR_CAPTURE_LIST (lam) != NULL_TREE
1098 || LAMBDA_EXPR_DEFAULT_CAPTURE_MODE (lam) != CPLD_NONE)
a960e808
AB
1099 return;
1100
1101 if (processing_template_decl)
1102 return;
1103
72013ec5 1104 bool const generic_lambda_p = generic_lambda_fn_p (callop);
6a8b219b 1105
cb57504a 1106 if (!generic_lambda_p && undeduced_auto_decl (callop))
a960e808 1107 {
cb57504a 1108 /* If the op() wasn't deduced due to errors, give up. */
a960e808
AB
1109 gcc_assert (errorcount || sorrycount);
1110 return;
1111 }
1112
e4c43126
JCI
1113 /* Non-generic non-capturing lambdas only have a conversion function to
1114 pointer to function when the trailing requires-clause's constraints are
1115 satisfied. */
1116 if (!generic_lambda_p && !constraints_satisfied_p (callop))
1117 return;
1118
6a8b219b
AB
1119 /* Non-template conversion operators are defined directly with build_call_a
1120 and using DIRECT_ARGVEC for arguments (including 'this'). Templates are
1121 deferred and the CALL is built in-place. In the case of a deduced return
1122 call op, the decltype expression, DECLTYPE_CALL, used as a substitute for
1123 the return type is also built in-place. The arguments of DECLTYPE_CALL in
1124 the return expression may differ in flags from those in the body CALL. In
1125 particular, parameter pack expansions are marked PACK_EXPANSION_LOCAL_P in
1126 the body CALL, but not in DECLTYPE_CALL. */
1127
1128 vec<tree, va_gc> *direct_argvec = 0;
1129 tree decltype_call = 0, call = 0;
b8fd7909
JM
1130 tree optype = TREE_TYPE (callop);
1131 tree fn_result = TREE_TYPE (optype);
6a8b219b 1132
303976a6
JJ
1133 tree thisarg = NULL_TREE;
1134 if (TREE_CODE (optype) == METHOD_TYPE)
1135 thisarg = build_int_cst (TREE_TYPE (DECL_ARGUMENTS (callop)), 0);
6a8b219b
AB
1136 if (generic_lambda_p)
1137 {
d4b1d43c
JM
1138 ++processing_template_decl;
1139
6a8b219b
AB
1140 /* Prepare the dependent member call for the static member function
1141 '_FUN' and, potentially, prepare another call to be used in a decltype
1142 return expression for a deduced return call op to allow for simple
1143 implementation of the conversion operator. */
1144
303976a6
JJ
1145 tree objfn;
1146 int nargs = list_length (DECL_ARGUMENTS (callop));
1147 if (thisarg)
1148 {
1149 tree instance = cp_build_fold_indirect_ref (thisarg);
1150 objfn = lookup_template_function (DECL_NAME (callop),
1151 DECL_TI_ARGS (callop));
1152 objfn = build_min (COMPONENT_REF, NULL_TREE,
1153 instance, objfn, NULL_TREE);
1154 --nargs;
1155 call = prepare_op_call (objfn, nargs);
1156 }
1157 else
1158 objfn = callop;
6a8b219b 1159
6a8b219b
AB
1160 if (type_uses_auto (fn_result))
1161 decltype_call = prepare_op_call (objfn, nargs);
1162 }
303976a6 1163 else if (thisarg)
6a8b219b
AB
1164 {
1165 direct_argvec = make_tree_vector ();
0596c448 1166 direct_argvec->quick_push (thisarg);
6a8b219b
AB
1167 }
1168
1169 /* Copy CALLOP's argument list (as per 'copy_list') as FN_ARGS in order to
1170 declare the static member function "_FUN" below. For each arg append to
1171 DIRECT_ARGVEC (for the non-template case) or populate the pre-allocated
1172 call args (for the template case). If a parameter pack is found, expand
1173 it, flagging it as PACK_EXPANSION_LOCAL_P for the body call. */
1174
1175 tree fn_args = NULL_TREE;
1176 {
1177 int ix = 0;
303976a6 1178 tree src = FUNCTION_FIRST_USER_PARM (callop);
5d341f08 1179 tree tgt = NULL;
6a8b219b 1180
303976a6
JJ
1181 if (!thisarg && !decltype_call)
1182 src = NULL_TREE;
6a8b219b
AB
1183 while (src)
1184 {
1185 tree new_node = copy_node (src);
262784be
NS
1186 /* We set DECL_CONTEXT of NEW_NODE to the statfn below.
1187 Notice this is creating a recursive type! */
6a8b219b 1188
d397e394
JJ
1189 /* Clear TREE_ADDRESSABLE on thunk arguments. */
1190 TREE_ADDRESSABLE (new_node) = 0;
1191
6a8b219b
AB
1192 if (!fn_args)
1193 fn_args = tgt = new_node;
1194 else
1195 {
1196 TREE_CHAIN (tgt) = new_node;
1197 tgt = new_node;
1198 }
1199
1200 mark_exp_read (tgt);
1201
1202 if (generic_lambda_p)
1203 {
538a5308 1204 tree a = tgt;
303976a6 1205 if (thisarg)
538a5308 1206 {
303976a6
JJ
1207 if (DECL_PACK_P (tgt))
1208 {
1209 a = make_pack_expansion (a);
1210 PACK_EXPANSION_LOCAL_P (a) = true;
1211 }
1212 CALL_EXPR_ARG (call, ix) = a;
538a5308 1213 }
bd28a34f 1214
538a5308
JM
1215 if (decltype_call)
1216 {
1217 /* Avoid capturing variables in this context. */
1218 ++cp_unevaluated_operand;
1219 CALL_EXPR_ARG (decltype_call, ix) = forward_parm (tgt);
1220 --cp_unevaluated_operand;
1221 }
bd28a34f 1222
6a8b219b
AB
1223 ++ix;
1224 }
1225 else
1226 vec_safe_push (direct_argvec, tgt);
1227
1228 src = TREE_CHAIN (src);
1229 }
1230 }
1231
6a8b219b
AB
1232 if (generic_lambda_p)
1233 {
1234 if (decltype_call)
1235 {
6a8b219b
AB
1236 fn_result = finish_decltype_type
1237 (decltype_call, /*id_expression_or_member_access_p=*/false,
1238 tf_warning_or_error);
6a8b219b
AB
1239 }
1240 }
303976a6 1241 else if (thisarg)
1c8e9bed
JJ
1242 {
1243 /* Don't warn on deprecated or unavailable lambda declarations, unless
1244 the lambda is actually called. */
1245 auto du = make_temp_override (deprecated_state,
1246 UNAVAILABLE_DEPRECATED_SUPPRESS);
1247 call = build_call_a (callop, direct_argvec->length (),
1248 direct_argvec->address ());
1249 }
6a8b219b 1250
303976a6
JJ
1251 if (thisarg)
1252 {
1253 CALL_FROM_THUNK_P (call) = 1;
1254 SET_EXPR_LOCATION (call, UNKNOWN_LOCATION);
1255 }
6a8b219b 1256
303976a6
JJ
1257 tree stattype
1258 = build_function_type (fn_result, FUNCTION_FIRST_USER_PARMTYPE (callop));
b8fd7909
JM
1259 stattype = (cp_build_type_attribute_variant
1260 (stattype, TYPE_ATTRIBUTES (optype)));
51dc6603
JM
1261 if (flag_noexcept_type
1262 && TYPE_NOTHROW_P (TREE_TYPE (callop)))
1263 stattype = build_exception_variant (stattype, noexcept_true_spec);
a960e808 1264
d4b1d43c
JM
1265 if (generic_lambda_p)
1266 --processing_template_decl;
1267
a960e808
AB
1268 /* First build up the conversion op. */
1269
6a8b219b 1270 tree rettype = build_pointer_type (stattype);
08fb1316 1271 tree name = make_conv_op_name (rettype);
6a8b219b
AB
1272 tree thistype = cp_build_qualified_type (type, TYPE_QUAL_CONST);
1273 tree fntype = build_method_type_directly (thistype, rettype, void_list_node);
defceb20
MP
1274 /* DR 1722: The conversion function should be noexcept. */
1275 fntype = build_exception_variant (fntype, noexcept_true_spec);
6a8b219b 1276 tree convfn = build_lang_decl (FUNCTION_DECL, name, fntype);
143f00e1 1277 SET_DECL_LANGUAGE (convfn, lang_cplusplus);
6a8b219b 1278 tree fn = convfn;
a960e808 1279 DECL_SOURCE_LOCATION (fn) = DECL_SOURCE_LOCATION (callop);
fe37c7af 1280 SET_DECL_ALIGN (fn, MINIMUM_METHOD_BOUNDARY);
a960e808
AB
1281 grokclassfn (type, fn, NO_SPECIAL);
1282 set_linkage_according_to_type (type, fn);
056a17ee 1283 rest_of_decl_compilation (fn, namespace_bindings_p (), at_eof);
a960e808
AB
1284 DECL_IN_AGGR_P (fn) = 1;
1285 DECL_ARTIFICIAL (fn) = 1;
1286 DECL_NOT_REALLY_EXTERN (fn) = 1;
1287 DECL_DECLARED_INLINE_P (fn) = 1;
f968ef9b
JJ
1288 DECL_DECLARED_CONSTEXPR_P (fn) = DECL_DECLARED_CONSTEXPR_P (callop);
1289 if (DECL_IMMEDIATE_FUNCTION_P (callop))
1290 SET_DECL_IMMEDIATE_FUNCTION_P (fn);
e249fcad
NS
1291 DECL_ARGUMENTS (fn) = build_this_parm (fn, fntype, TYPE_QUAL_CONST);
1292
5802281e 1293 if (nested_def)
a960e808
AB
1294 DECL_INTERFACE_KNOWN (fn) = 1;
1295
6a8b219b
AB
1296 if (generic_lambda_p)
1297 fn = add_inherited_template_parms (fn, DECL_TI_TEMPLATE (callop));
1298
d5a2f455 1299 add_method (type, fn, false);
a960e808 1300
303976a6
JJ
1301 if (thisarg == NULL_TREE)
1302 {
1303 /* For static lambda, just return operator(). */
1304 if (nested)
1305 push_function_context ();
1306 else
1307 /* Still increment function_depth so that we don't GC in the
1308 middle of an expression. */
1309 ++function_depth;
1310
1311 /* Generate the body of the conversion op. */
1312
1313 start_preparsed_function (convfn, NULL_TREE,
1314 SF_PRE_PARSED | SF_INCLASS_INLINE);
1315 tree body = begin_function_body ();
1316 tree compound_stmt = begin_compound_stmt (0);
1317
1318 /* decl_needed_p needs to see that it's used. */
1319 TREE_USED (callop) = 1;
1320 finish_return_stmt (decay_conversion (callop, tf_warning_or_error));
1321
1322 finish_compound_stmt (compound_stmt);
1323 finish_function_body (body);
1324
1325 fn = finish_function (/*inline_p=*/true);
1326 if (!generic_lambda_p)
1327 expand_or_defer_fn (fn);
1328
1329 if (nested)
1330 pop_function_context ();
1331 else
1332 --function_depth;
1333 return;
1334 }
1335
a960e808
AB
1336 /* Generic thunk code fails for varargs; we'll complain in mark_used if
1337 the conversion op is used. */
1338 if (varargs_function_p (callop))
1339 {
f1ee5eaf 1340 DECL_DELETED_FN (fn) = 1;
a960e808
AB
1341 return;
1342 }
1343
1344 /* Now build up the thunk to be returned. */
1345
cf7fb52d 1346 tree statfn = build_lang_decl (FUNCTION_DECL, fun_identifier, stattype);
143f00e1 1347 SET_DECL_LANGUAGE (statfn, lang_cplusplus);
6a8b219b 1348 fn = statfn;
a960e808 1349 DECL_SOURCE_LOCATION (fn) = DECL_SOURCE_LOCATION (callop);
a960e808
AB
1350 grokclassfn (type, fn, NO_SPECIAL);
1351 set_linkage_according_to_type (type, fn);
056a17ee 1352 rest_of_decl_compilation (fn, namespace_bindings_p (), at_eof);
a960e808
AB
1353 DECL_IN_AGGR_P (fn) = 1;
1354 DECL_ARTIFICIAL (fn) = 1;
1355 DECL_NOT_REALLY_EXTERN (fn) = 1;
1356 DECL_DECLARED_INLINE_P (fn) = 1;
1357 DECL_STATIC_FUNCTION_P (fn) = 1;
f968ef9b
JJ
1358 DECL_DECLARED_CONSTEXPR_P (fn) = DECL_DECLARED_CONSTEXPR_P (callop);
1359 if (DECL_IMMEDIATE_FUNCTION_P (callop))
1360 SET_DECL_IMMEDIATE_FUNCTION_P (fn);
6a8b219b
AB
1361 DECL_ARGUMENTS (fn) = fn_args;
1362 for (tree arg = fn_args; arg; arg = DECL_CHAIN (arg))
a960e808
AB
1363 {
1364 /* Avoid duplicate -Wshadow warnings. */
1365 DECL_NAME (arg) = NULL_TREE;
1366 DECL_CONTEXT (arg) = fn;
1367 }
5802281e 1368 if (nested_def)
a960e808
AB
1369 DECL_INTERFACE_KNOWN (fn) = 1;
1370
6a8b219b
AB
1371 if (generic_lambda_p)
1372 fn = add_inherited_template_parms (fn, DECL_TI_TEMPLATE (callop));
1373
75729cff 1374 if (flag_sanitize & SANITIZE_NULL)
137073d3
NS
1375 /* Don't UBsan this function; we're deliberately calling op() with a null
1376 object argument. */
1377 add_no_sanitize_value (fn, SANITIZE_UNDEFINED);
5546e160 1378
d5a2f455 1379 add_method (type, fn, false);
a960e808
AB
1380
1381 if (nested)
1382 push_function_context ();
1383 else
1384 /* Still increment function_depth so that we don't GC in the
1385 middle of an expression. */
1386 ++function_depth;
1387
1388 /* Generate the body of the thunk. */
1389
1390 start_preparsed_function (statfn, NULL_TREE,
1391 SF_PRE_PARSED | SF_INCLASS_INLINE);
6a8b219b
AB
1392 tree body = begin_function_body ();
1393 tree compound_stmt = begin_compound_stmt (0);
1394 if (!generic_lambda_p)
f3a880f8 1395 {
6a8b219b
AB
1396 set_flags_from_callee (call);
1397 if (MAYBE_CLASS_TYPE_P (TREE_TYPE (call)))
1398 call = build_cplus_new (TREE_TYPE (call), call, tf_warning_or_error);
f3a880f8 1399 }
a960e808
AB
1400 call = convert_from_reference (call);
1401 finish_return_stmt (call);
1402
1403 finish_compound_stmt (compound_stmt);
1404 finish_function_body (body);
1405
90e3c064 1406 fn = finish_function (/*inline_p=*/true);
6a8b219b
AB
1407 if (!generic_lambda_p)
1408 expand_or_defer_fn (fn);
a960e808
AB
1409
1410 /* Generate the body of the conversion op. */
1411
1412 start_preparsed_function (convfn, NULL_TREE,
1413 SF_PRE_PARSED | SF_INCLASS_INLINE);
1414 body = begin_function_body ();
1415 compound_stmt = begin_compound_stmt (0);
1416
1417 /* decl_needed_p needs to see that it's used. */
1418 TREE_USED (statfn) = 1;
1419 finish_return_stmt (decay_conversion (statfn, tf_warning_or_error));
1420
1421 finish_compound_stmt (compound_stmt);
1422 finish_function_body (body);
1423
90e3c064 1424 fn = finish_function (/*inline_p=*/true);
6a8b219b
AB
1425 if (!generic_lambda_p)
1426 expand_or_defer_fn (fn);
a960e808
AB
1427
1428 if (nested)
1429 pop_function_context ();
1430 else
1431 --function_depth;
1432}
1433
98e5a19a
JM
1434/* True if FN is the static function "_FUN" that gets returned from the lambda
1435 conversion operator. */
1436
1437bool
1438lambda_static_thunk_p (tree fn)
1439{
1440 return (fn && TREE_CODE (fn) == FUNCTION_DECL
1441 && DECL_ARTIFICIAL (fn)
1442 && DECL_STATIC_FUNCTION_P (fn)
1443 && LAMBDA_TYPE_P (CP_DECL_CONTEXT (fn)));
1444}
1445
b6158faa
JM
1446bool
1447call_from_lambda_thunk_p (tree call)
1448{
1449 return (CALL_FROM_THUNK_P (call)
1450 && lambda_static_thunk_p (current_function_decl));
1451}
1452
a960e808
AB
1453/* Returns true iff VAL is a lambda-related declaration which should
1454 be ignored by unqualified lookup. */
1455
1456bool
1457is_lambda_ignored_entity (tree val)
1458{
ad09440a
JM
1459 /* Look past normal, non-VLA capture proxies. */
1460 if (is_normal_capture_proxy (val)
1461 && !variably_modified_type_p (TREE_TYPE (val), NULL_TREE))
a960e808
AB
1462 return true;
1463
1464 /* Always ignore lambda fields, their names are only for debugging. */
1465 if (TREE_CODE (val) == FIELD_DECL
1466 && CLASSTYPE_LAMBDA_EXPR (DECL_CONTEXT (val)))
1467 return true;
1468
1469 /* None of the lookups that use qualify_lookup want the op() from the
1470 lambda; they want the one from the enclosing class. */
1a98f830
JM
1471 if (tree fns = maybe_get_fns (val))
1472 if (LAMBDA_FUNCTION_P (OVL_FIRST (fns)))
1473 return true;
a960e808
AB
1474
1475 return false;
1476}
ff502317 1477
0122faae
NS
1478/* Lambdas that appear in variable initializer or default argument
1479 scope get that in their mangling, so we need to record it. Also,
1480 multiple lambdas in the same scope may need a mangling
2b0e81d5
NS
1481 discriminator. In ABI <= 17, there is a single per-scope sequence
1482 number. In ABI >= 18, there are per-scope per-signature sequence
1483 numbers. */
1484struct GTY(()) lambda_sig_count
1485{
1486 tree fn; // The lambda fn whose sig this is.
1487 unsigned count;
1488};
0122faae 1489struct GTY(()) lambda_discriminator
f44a8dd5 1490{
0122faae
NS
1491 tree scope;
1492 unsigned nesting; // Inside a function, VAR_DECLs get the function
1493 // as scope. This counts that nesting.
1494 unsigned count; // The per-scope counter.
2b0e81d5 1495 vec<lambda_sig_count, va_gc> *discriminators; // Per-signature counters
f44a8dd5 1496};
0122faae
NS
1497// The current scope.
1498static GTY(()) lambda_discriminator lambda_scope;
1499// Stack of previous scopes.
1500static GTY(()) vec<lambda_discriminator, va_gc> *lambda_scope_stack;
1501
1502// Push DECL as lambda extra scope, also new discriminator counters.
f44a8dd5
JM
1503
1504void
1505start_lambda_scope (tree decl)
1506{
0122faae 1507 gcc_checking_assert (decl);
8861c807 1508 if (current_function_decl && VAR_P (decl))
0122faae
NS
1509 // If we're inside a function, we ignore variable scope. Don't push.
1510 lambda_scope.nesting++;
1511 else
f44a8dd5 1512 {
0122faae
NS
1513 vec_safe_push (lambda_scope_stack, lambda_scope);
1514 lambda_scope.scope = decl;
1515 lambda_scope.nesting = 0;
1516 lambda_scope.count = 0;
2b0e81d5 1517 lambda_scope.discriminators = nullptr;
f44a8dd5
JM
1518 }
1519}
1520
0122faae
NS
1521// Pop from the current lambda extra scope.
1522
f44a8dd5 1523void
0122faae 1524finish_lambda_scope (void)
f44a8dd5 1525{
0122faae 1526 if (!lambda_scope.nesting--)
262784be 1527 {
0122faae
NS
1528 lambda_scope = lambda_scope_stack->last ();
1529 lambda_scope_stack->pop ();
262784be 1530 }
f44a8dd5
JM
1531}
1532
0122faae 1533// Record the current lambda scope into LAMBDA
582f844c
JM
1534
1535void
0122faae 1536record_lambda_scope (tree lambda)
582f844c 1537{
0122faae
NS
1538 LAMBDA_EXPR_EXTRA_SCOPE (lambda) = lambda_scope.scope;
1539 if (lambda_scope.scope)
582f844c 1540 {
0122faae
NS
1541 tree closure = LAMBDA_EXPR_CLOSURE (lambda);
1542 gcc_checking_assert (closure);
1543 maybe_key_decl (lambda_scope.scope, TYPE_NAME (closure));
582f844c 1544 }
582f844c
JM
1545}
1546
2b0e81d5
NS
1547// Compare lambda template heads TMPL_A and TMPL_B, used for both
1548// templated lambdas, and template template parameters of said lambda.
1549
1550static bool
1551compare_lambda_template_head (tree tmpl_a, tree tmpl_b)
1552{
1553 // We only need one level of template parms
1554 tree inner_a = INNERMOST_TEMPLATE_PARMS (DECL_TEMPLATE_PARMS (tmpl_a));
1555 tree inner_b = INNERMOST_TEMPLATE_PARMS (DECL_TEMPLATE_PARMS (tmpl_b));
1556
1557 // We only compare explicit template parms, ignoring trailing
1558 // synthetic ones.
1559 int len_a = TREE_VEC_LENGTH (inner_a);
1560 int len_b = TREE_VEC_LENGTH (inner_b);
1561
1562 for (int ix = 0, len = MAX (len_a, len_b); ix != len; ix++)
1563 {
1564 tree parm_a = NULL_TREE;
1565 if (ix < len_a)
1566 {
1567 parm_a = TREE_VEC_ELT (inner_a, ix);
1568 if (parm_a == error_mark_node)
1569 return false;
1570 parm_a = TREE_VALUE (parm_a);
a915c29a
JM
1571 if (parm_a == error_mark_node)
1572 return false;
2b0e81d5
NS
1573 if (DECL_VIRTUAL_P (parm_a))
1574 parm_a = NULL_TREE;
1575 }
1576
1577 tree parm_b = NULL_TREE;
1578 if (ix < len_b)
1579 {
1580 parm_b = TREE_VEC_ELT (inner_b, ix);
1581 if (parm_b == error_mark_node)
1582 return false;
1583 parm_b = TREE_VALUE (parm_b);
a915c29a
JM
1584 if (parm_b == error_mark_node)
1585 return false;
2b0e81d5
NS
1586 if (DECL_VIRTUAL_P (parm_b))
1587 parm_b = NULL_TREE;
1588 }
1589
1590 if (!parm_a && !parm_b)
1591 // we're done
1592 break;
1593
1594 if (!(parm_a && parm_b))
1595 return false;
1596
1597 if (TREE_CODE (parm_a) != TREE_CODE (parm_b))
1598 return false;
1599
1600 if (TREE_CODE (parm_a) == PARM_DECL)
1601 {
1602 if (TEMPLATE_PARM_PARAMETER_PACK (DECL_INITIAL (parm_a))
1603 != TEMPLATE_PARM_PARAMETER_PACK (DECL_INITIAL (parm_b)))
1604 return false;
1605
1606 if (!same_type_p (TREE_TYPE (parm_a), TREE_TYPE (parm_b)))
1607 return false;
1608 }
1609 else
1610 {
1611 if (TEMPLATE_TYPE_PARAMETER_PACK (TREE_TYPE (parm_a))
1612 != TEMPLATE_TYPE_PARAMETER_PACK (TREE_TYPE (parm_b)))
1613 return false;
1614
1615 if (TREE_CODE (parm_a) != TEMPLATE_DECL)
1616 gcc_checking_assert (TREE_CODE (parm_a) == TYPE_DECL);
1617 else if (!compare_lambda_template_head (parm_a, parm_b))
1618 return false;
1619 }
1620 }
1621
1622 return true;
1623}
1624
1625// Compare lambda signatures FN_A and FN_B, they may be TEMPLATE_DECLs too.
1626
1627static bool
1628compare_lambda_sig (tree fn_a, tree fn_b)
1629{
1630 if (TREE_CODE (fn_a) == TEMPLATE_DECL
1631 && TREE_CODE (fn_b) == TEMPLATE_DECL)
1632 {
1633 if (!compare_lambda_template_head (fn_a, fn_b))
1634 return false;
1635 fn_a = DECL_TEMPLATE_RESULT (fn_a);
1636 fn_b = DECL_TEMPLATE_RESULT (fn_b);
1637 }
1638 else if (TREE_CODE (fn_a) == TEMPLATE_DECL
1639 || TREE_CODE (fn_b) == TEMPLATE_DECL)
1640 return false;
1641
1642 if (fn_a == error_mark_node
1643 || fn_b == error_mark_node)
1644 return false;
1645
1646 for (tree args_a = TREE_CHAIN (TYPE_ARG_TYPES (TREE_TYPE (fn_a))),
1647 args_b = TREE_CHAIN (TYPE_ARG_TYPES (TREE_TYPE (fn_b)));
1648 args_a || args_b;
1649 args_a = TREE_CHAIN (args_a), args_b = TREE_CHAIN (args_b))
1650 {
1651 if (!args_a || !args_b)
1652 return false;
721cdcd1 1653 // This check also deals with differing variadicness
2b0e81d5
NS
1654 if (!same_type_p (TREE_VALUE (args_a), TREE_VALUE (args_b)))
1655 return false;
1656 }
1657
1658 return true;
1659}
1660
0122faae
NS
1661// Record the per-scope discriminator of LAMBDA. If the extra scope
1662// is empty, we must use the empty scope counter, which might not be
1663// the live one.
1664
f44a8dd5 1665void
0122faae 1666record_lambda_scope_discriminator (tree lambda)
f44a8dd5 1667{
0122faae
NS
1668 auto *slot = (vec_safe_is_empty (lambda_scope_stack)
1669 || LAMBDA_EXPR_EXTRA_SCOPE (lambda)
1670 ? &lambda_scope : lambda_scope_stack->begin ());
1671 LAMBDA_EXPR_SCOPE_ONLY_DISCRIMINATOR (lambda) = slot->count++;
f44a8dd5
JM
1672}
1673
2b0e81d5
NS
1674// Record the per-scope per-signature discriminator of LAMBDA. If the
1675// extra scope is empty, we must use the empty scope counter, which
1676// might not be the live one.
1677
1678void
1679record_lambda_scope_sig_discriminator (tree lambda, tree fn)
1680{
1681 auto *slot = (vec_safe_is_empty (lambda_scope_stack)
1682 || LAMBDA_EXPR_EXTRA_SCOPE (lambda)
1683 ? &lambda_scope : lambda_scope_stack->begin ());
1684 gcc_checking_assert (LAMBDA_EXPR_EXTRA_SCOPE (lambda) == slot->scope);
1685
1686 // A linear search, we're not expecting this to be a big list, and
1687 // this avoids needing a signature hash function.
1688 lambda_sig_count *sig;
1689 if (unsigned ix = vec_safe_length (slot->discriminators))
1690 for (sig = slot->discriminators->begin (); ix--; sig++)
1691 if (compare_lambda_sig (fn, sig->fn))
1692 goto found;
1693 {
1694 lambda_sig_count init = {fn, 0};
1695 sig = vec_safe_push (slot->discriminators, init);
1696 }
1697 found:
1698 LAMBDA_EXPR_SCOPE_SIG_DISCRIMINATOR (lambda) = sig->count++;
1699}
1700
f44a8dd5
JM
1701tree
1702start_lambda_function (tree fco, tree lambda_expr)
1703{
1704 /* Let the front end know that we are going to be defining this
1705 function. */
1706 start_preparsed_function (fco,
1707 NULL_TREE,
1708 SF_PRE_PARSED | SF_INCLASS_INLINE);
1709
1710 tree body = begin_function_body ();
1711
1712 /* Push the proxies for any explicit captures. */
1713 for (tree cap = LAMBDA_EXPR_CAPTURE_LIST (lambda_expr); cap;
1714 cap = TREE_CHAIN (cap))
5c263e84 1715 build_capture_proxy (TREE_PURPOSE (cap), TREE_VALUE (cap));
f44a8dd5
JM
1716
1717 return body;
1718}
1719
1577f10a
JM
1720/* Subroutine of prune_lambda_captures: CAP is a node in
1721 LAMBDA_EXPR_CAPTURE_LIST. Return the variable it captures for which we
1722 might optimize away the capture, or NULL_TREE if there is no such
1723 variable. */
1724
1725static tree
1726var_to_maybe_prune (tree cap)
1727{
1728 if (LAMBDA_CAPTURE_EXPLICIT_P (cap))
1729 /* Don't prune explicit captures. */
1730 return NULL_TREE;
1731
1732 tree mem = TREE_PURPOSE (cap);
1733 if (!DECL_P (mem) || !DECL_NORMAL_CAPTURE_P (mem))
1734 /* Packs and init-captures aren't captures of constant vars. */
1735 return NULL_TREE;
1736
1737 tree init = TREE_VALUE (cap);
1738 if (is_normal_capture_proxy (init))
1739 init = DECL_CAPTURED_VARIABLE (init);
1740 if (decl_constant_var_p (init))
1741 return init;
1742
1743 return NULL_TREE;
1744}
1745
1746/* walk_tree helper for prune_lambda_captures: Remember which capture proxies
1747 for constant variables are actually used in the lambda body.
1748
1749 There will always be a DECL_EXPR for the capture proxy; remember it when we
1750 see it, but replace it with any other use. */
1751
1752static tree
1753mark_const_cap_r (tree *t, int *walk_subtrees, void *data)
1754{
1755 hash_map<tree,tree*> &const_vars = *(hash_map<tree,tree*>*)data;
1756
1757 tree var = NULL_TREE;
1758 if (TREE_CODE (*t) == DECL_EXPR)
1759 {
1760 tree decl = DECL_EXPR_DECL (*t);
1761 if (is_constant_capture_proxy (decl))
01826160
JM
1762 {
1763 var = DECL_CAPTURED_VARIABLE (decl);
1764 *walk_subtrees = 0;
1765 }
1577f10a 1766 }
89cf57ea
PP
1767 else if (!location_wrapper_p (*t) /* is_capture_proxy dislikes them. */
1768 && is_constant_capture_proxy (*t))
1577f10a
JM
1769 var = DECL_CAPTURED_VARIABLE (*t);
1770
1771 if (var)
1772 {
1773 tree *&slot = const_vars.get_or_insert (var);
1774 if (!slot || VAR_P (*t))
1775 slot = t;
1776 }
1777
1778 return NULL_TREE;
1779}
1780
1781/* We're at the end of processing a lambda; go back and remove any captures of
1782 constant variables for which we've folded away all uses. */
1783
1784static void
1785prune_lambda_captures (tree body)
1786{
1787 tree lam = current_lambda_expr ();
1788 if (!LAMBDA_EXPR_CAPTURE_OPTIMIZED (lam))
1789 /* No uses were optimized away. */
1790 return;
1791 if (LAMBDA_EXPR_DEFAULT_CAPTURE_MODE (lam) == CPLD_NONE)
1792 /* No default captures, and we don't prune explicit captures. */
1793 return;
6f90de97
JM
1794 /* Don't bother pruning in a template, we'll prune at instantiation time. */
1795 if (dependent_type_p (TREE_TYPE (lam)))
1796 return;
1577f10a
JM
1797
1798 hash_map<tree,tree*> const_vars;
1799
1800 cp_walk_tree_without_duplicates (&body, mark_const_cap_r, &const_vars);
1801
1802 tree *fieldp = &TYPE_FIELDS (LAMBDA_EXPR_CLOSURE (lam));
1803 for (tree *capp = &LAMBDA_EXPR_CAPTURE_LIST (lam); *capp; )
1804 {
1805 tree cap = *capp;
1806 if (tree var = var_to_maybe_prune (cap))
1807 {
86e95f35
PB
1808 tree **use = const_vars.get (var);
1809 if (use && TREE_CODE (**use) == DECL_EXPR)
1577f10a
JM
1810 {
1811 /* All uses of this capture were folded away, leaving only the
1812 proxy declaration. */
1813
1814 /* Splice the capture out of LAMBDA_EXPR_CAPTURE_LIST. */
1815 *capp = TREE_CHAIN (cap);
1816
1817 /* And out of TYPE_FIELDS. */
1818 tree field = TREE_PURPOSE (cap);
1819 while (*fieldp != field)
1820 fieldp = &DECL_CHAIN (*fieldp);
1821 *fieldp = DECL_CHAIN (*fieldp);
1822
1823 /* And remove the capture proxy declaration. */
86e95f35 1824 **use = void_node;
1577f10a
JM
1825 continue;
1826 }
1827 }
1828
1829 capp = &TREE_CHAIN (cap);
1830 }
1831}
1832
0122faae
NS
1833// Record the per-scope per-signature discriminator of LAMBDA. If the
1834// extra scope is empty, we must use the empty scope counter, which
1835// might not be the live one.
1836
f44a8dd5
JM
1837void
1838finish_lambda_function (tree body)
1839{
1840 finish_function_body (body);
1841
1577f10a
JM
1842 prune_lambda_captures (body);
1843
f44a8dd5 1844 /* Finish the function and generate code for it if necessary. */
90e3c064 1845 tree fn = finish_function (/*inline_p=*/true);
f44a8dd5
JM
1846
1847 /* Only expand if the call op is not a template. */
1848 if (!DECL_TEMPLATE_INFO (fn))
1849 expand_or_defer_fn (fn);
1850}
1851
ff502317 1852#include "gt-cp-lambda.h"