]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/cp/lambda.c
PR c++/86969 - ICE with constexpr if and recursive generic lambdas.
[thirdparty/gcc.git] / gcc / cp / lambda.c
CommitLineData
5d9fd871 1/* Perform the semantic phase of lambda parsing, i.e., the process of
2 building tree structure, checking semantic consistency, and
3 building RTL. These routines are used both during actual parsing
4 and during the instantiation of template functions.
5
fbd26352 6 Copyright (C) 1998-2019 Free Software Foundation, Inc.
5d9fd871 7
8 This file is part of GCC.
9
10 GCC is free software; you can redistribute it and/or modify it
11 under the terms of the GNU General Public License as published by
12 the Free Software Foundation; either version 3, or (at your option)
13 any later version.
14
15 GCC is distributed in the hope that it will be useful, but
16 WITHOUT ANY WARRANTY; without even the implied warranty of
17 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
18 General Public License for more details.
19
20You should have received a copy of the GNU General Public License
21along with GCC; see the file COPYING3. If not see
22<http://www.gnu.org/licenses/>. */
23
24#include "config.h"
25#include "system.h"
26#include "coretypes.h"
4cba6f60 27#include "cp-tree.h"
28#include "stringpool.h"
5d9fd871 29#include "cgraph.h"
30#include "tree-iterator.h"
5d9fd871 31#include "toplev.h"
72f9352a 32#include "gimplify.h"
5d9fd871 33
34/* Constructor for a lambda expression. */
35
36tree
37build_lambda_expr (void)
38{
39 tree lambda = make_node (LAMBDA_EXPR);
40 LAMBDA_EXPR_DEFAULT_CAPTURE_MODE (lambda) = CPLD_NONE;
41 LAMBDA_EXPR_CAPTURE_LIST (lambda) = NULL_TREE;
42 LAMBDA_EXPR_THIS_CAPTURE (lambda) = NULL_TREE;
43 LAMBDA_EXPR_PENDING_PROXIES (lambda) = NULL;
5d9fd871 44 LAMBDA_EXPR_MUTABLE_P (lambda) = false;
45 return lambda;
46}
47
48/* Create the closure object for a LAMBDA_EXPR. */
49
50tree
51build_lambda_object (tree lambda_expr)
52{
53 /* Build aggregate constructor call.
54 - cp_parser_braced_list
55 - cp_parser_functional_cast */
56 vec<constructor_elt, va_gc> *elts = NULL;
57 tree node, expr, type;
58 location_t saved_loc;
59
1dcd84cf 60 if (processing_template_decl || lambda_expr == error_mark_node)
5d9fd871 61 return lambda_expr;
62
63 /* Make sure any error messages refer to the lambda-introducer. */
64 saved_loc = input_location;
65 input_location = LAMBDA_EXPR_LOCATION (lambda_expr);
66
67 for (node = LAMBDA_EXPR_CAPTURE_LIST (lambda_expr);
68 node;
69 node = TREE_CHAIN (node))
70 {
71 tree field = TREE_PURPOSE (node);
72 tree val = TREE_VALUE (node);
73
74 if (field == error_mark_node)
75 {
76 expr = error_mark_node;
77 goto out;
78 }
79
845d5e95 80 if (TREE_CODE (val) == TREE_LIST)
81 val = build_x_compound_expr_from_list (val, ELK_INIT,
82 tf_warning_or_error);
83
5d9fd871 84 if (DECL_P (val))
85 mark_used (val);
86
87 /* Mere mortals can't copy arrays with aggregate initialization, so
88 do some magic to make it work here. */
89 if (TREE_CODE (TREE_TYPE (field)) == ARRAY_TYPE)
90 val = build_array_copy (val);
91 else if (DECL_NORMAL_CAPTURE_P (field)
92 && !DECL_VLA_CAPTURE_P (field)
90ad495b 93 && !TYPE_REF_P (TREE_TYPE (field)))
5d9fd871 94 {
95 /* "the entities that are captured by copy are used to
96 direct-initialize each corresponding non-static data
97 member of the resulting closure object."
98
99 There's normally no way to express direct-initialization
100 from an element of a CONSTRUCTOR, so we build up a special
101 TARGET_EXPR to bypass the usual copy-initialization. */
102 val = force_rvalue (val, tf_warning_or_error);
103 if (TREE_CODE (val) == TARGET_EXPR)
104 TARGET_EXPR_DIRECT_INIT_P (val) = true;
105 }
106
107 CONSTRUCTOR_APPEND_ELT (elts, DECL_NAME (field), val);
108 }
109
110 expr = build_constructor (init_list_type_node, elts);
111 CONSTRUCTOR_IS_DIRECT_INIT (expr) = 1;
112
113 /* N2927: "[The closure] class type is not an aggregate."
114 But we briefly treat it as an aggregate to make this simpler. */
115 type = LAMBDA_EXPR_CLOSURE (lambda_expr);
116 CLASSTYPE_NON_AGGREGATE (type) = 0;
117 expr = finish_compound_literal (type, expr, tf_warning_or_error);
118 CLASSTYPE_NON_AGGREGATE (type) = 1;
119
120 out:
121 input_location = saved_loc;
122 return expr;
123}
124
125/* Return an initialized RECORD_TYPE for LAMBDA.
126 LAMBDA must have its explicit captures already. */
127
128tree
129begin_lambda_type (tree lambda)
130{
131 tree type;
132
133 {
134 /* Unique name. This is just like an unnamed class, but we cannot use
4f86cbb0 135 make_anon_name because of certain checks against TYPE_UNNAMED_P. */
5d9fd871 136 tree name;
137 name = make_lambda_name ();
138
139 /* Create the new RECORD_TYPE for this lambda. */
140 type = xref_tag (/*tag_code=*/record_type,
141 name,
142 /*scope=*/ts_lambda,
143 /*template_header_p=*/false);
240cc9cf 144 if (type == error_mark_node)
145 return error_mark_node;
5d9fd871 146 }
147
148 /* Designate it as a struct so that we can use aggregate initialization. */
149 CLASSTYPE_DECLARED_CLASS (type) = false;
150
151 /* Cross-reference the expression and the type. */
152 LAMBDA_EXPR_CLOSURE (lambda) = type;
153 CLASSTYPE_LAMBDA_EXPR (type) = lambda;
154
33603066 155 /* In C++17, assume the closure is literal; we'll clear the flag later if
156 necessary. */
40e2decb 157 if (cxx_dialect >= cxx17)
33603066 158 CLASSTYPE_LITERAL_P (type) = true;
159
5d9fd871 160 /* Clear base types. */
161 xref_basetypes (type, /*bases=*/NULL_TREE);
162
163 /* Start the class. */
164 type = begin_class_definition (type);
5d9fd871 165
166 return type;
167}
168
169/* Returns the type to use for the return type of the operator() of a
170 closure class. */
171
172tree
173lambda_return_type (tree expr)
174{
175 if (expr == NULL_TREE)
176 return void_type_node;
177 if (type_unknown_p (expr)
178 || BRACE_ENCLOSED_INITIALIZER_P (expr))
179 {
180 cxx_incomplete_type_error (expr, TREE_TYPE (expr));
86771497 181 return error_mark_node;
5d9fd871 182 }
183 gcc_checking_assert (!type_dependent_expression_p (expr));
184 return cv_unqualified (type_decays_to (unlowered_expr_type (expr)));
185}
186
187/* Given a LAMBDA_EXPR or closure type LAMBDA, return the op() of the
188 closure type. */
189
190tree
191lambda_function (tree lambda)
192{
193 tree type;
194 if (TREE_CODE (lambda) == LAMBDA_EXPR)
195 type = LAMBDA_EXPR_CLOSURE (lambda);
196 else
197 type = lambda;
198 gcc_assert (LAMBDA_TYPE_P (type));
199 /* Don't let debug_tree cause instantiation. */
200 if (CLASSTYPE_TEMPLATE_INSTANTIATION (type)
201 && !COMPLETE_OR_OPEN_TYPE_P (type))
202 return NULL_TREE;
ef8f6502 203 lambda = lookup_member (type, call_op_identifier,
5d9fd871 204 /*protect=*/0, /*want_type=*/false,
205 tf_warning_or_error);
206 if (lambda)
814b90ef 207 lambda = STRIP_TEMPLATE (get_first_fn (lambda));
5d9fd871 208 return lambda;
209}
210
211/* Returns the type to use for the FIELD_DECL corresponding to the
adcbdb02 212 capture of EXPR. EXPLICIT_INIT_P indicates whether this is a
213 C++14 init capture, and BY_REFERENCE_P indicates whether we're
214 capturing by reference. */
5d9fd871 215
216tree
adcbdb02 217lambda_capture_field_type (tree expr, bool explicit_init_p,
218 bool by_reference_p)
5d9fd871 219{
220 tree type;
fbde726a 221 bool is_this = is_this_parameter (tree_strip_nop_conversions (expr));
adcbdb02 222
fbde726a 223 if (!is_this && type_dependent_expression_p (expr))
5d9fd871 224 {
225 type = cxx_make_type (DECLTYPE_TYPE);
226 DECLTYPE_TYPE_EXPR (type) = expr;
227 DECLTYPE_FOR_LAMBDA_CAPTURE (type) = true;
228 DECLTYPE_FOR_INIT_CAPTURE (type) = explicit_init_p;
adcbdb02 229 DECLTYPE_FOR_REF_CAPTURE (type) = by_reference_p;
5d9fd871 230 SET_TYPE_STRUCTURAL_EQUALITY (type);
231 }
fbde726a 232 else if (!is_this && explicit_init_p)
233 {
adcbdb02 234 tree auto_node = make_auto ();
235
236 type = auto_node;
237 if (by_reference_p)
238 /* Add the reference now, so deduction doesn't lose
239 outermost CV qualifiers of EXPR. */
240 type = build_reference_type (type);
241 type = do_auto_deduction (type, expr, auto_node);
fbde726a 242 }
243 else
adcbdb02 244 {
245 type = non_reference (unlowered_expr_type (expr));
246
d46b9539 247 if (!is_this
248 && (by_reference_p || TREE_CODE (type) == FUNCTION_TYPE))
adcbdb02 249 type = build_reference_type (type);
250 }
251
5d9fd871 252 return type;
253}
254
255/* Returns true iff DECL is a lambda capture proxy variable created by
256 build_capture_proxy. */
257
258bool
259is_capture_proxy (tree decl)
260{
261 return (VAR_P (decl)
262 && DECL_HAS_VALUE_EXPR_P (decl)
263 && !DECL_ANON_UNION_VAR_P (decl)
c2f14a91 264 && !DECL_DECOMPOSITION_P (decl)
c3a961ad 265 && !DECL_FNAME_P (decl)
9b0e9786 266 && !(DECL_ARTIFICIAL (decl)
267 && DECL_LANG_SPECIFIC (decl)
268 && DECL_OMP_PRIVATIZED_MEMBER (decl))
5d9fd871 269 && LAMBDA_FUNCTION_P (DECL_CONTEXT (decl)));
270}
271
272/* Returns true iff DECL is a capture proxy for a normal capture
273 (i.e. without explicit initializer). */
274
275bool
276is_normal_capture_proxy (tree decl)
277{
278 if (!is_capture_proxy (decl))
279 /* It's not a capture proxy. */
280 return false;
281
a7ea8f96 282 return (DECL_LANG_SPECIFIC (decl)
283 && DECL_CAPTURED_VARIABLE (decl));
5d9fd871 284}
285
80fdc40f 286/* Returns true iff DECL is a capture proxy for a normal capture
287 of a constant variable. */
288
289bool
290is_constant_capture_proxy (tree decl)
291{
292 if (is_normal_capture_proxy (decl))
293 return decl_constant_var_p (DECL_CAPTURED_VARIABLE (decl));
294 return false;
295}
296
5d9fd871 297/* VAR is a capture proxy created by build_capture_proxy; add it to the
298 current function, which is the operator() for the appropriate lambda. */
299
300void
301insert_capture_proxy (tree var)
302{
d0e2b7e7 303 if (is_normal_capture_proxy (var))
c017458d 304 {
305 tree cap = DECL_CAPTURED_VARIABLE (var);
306 if (CHECKING_P)
307 {
308 gcc_assert (!is_normal_capture_proxy (cap));
309 tree old = retrieve_local_specialization (cap);
310 if (old)
311 gcc_assert (DECL_CONTEXT (old) != DECL_CONTEXT (var));
312 }
313 register_local_specialization (var, cap);
314 }
6f20c785 315
5d9fd871 316 /* Put the capture proxy in the extra body block so that it won't clash
317 with a later local variable. */
adf347c7 318 pushdecl_outermost_localscope (var);
5d9fd871 319
320 /* And put a DECL_EXPR in the STATEMENT_LIST for the same block. */
321 var = build_stmt (DECL_SOURCE_LOCATION (var), DECL_EXPR, var);
adf347c7 322 tree stmt_list = (*stmt_list_stack)[1];
5d9fd871 323 gcc_assert (stmt_list);
324 append_to_statement_list_force (var, &stmt_list);
325}
326
327/* We've just finished processing a lambda; if the containing scope is also
328 a lambda, insert any capture proxies that were created while processing
329 the nested lambda. */
330
331void
332insert_pending_capture_proxies (void)
333{
334 tree lam;
335 vec<tree, va_gc> *proxies;
336 unsigned i;
337
338 if (!current_function_decl || !LAMBDA_FUNCTION_P (current_function_decl))
339 return;
340
341 lam = CLASSTYPE_LAMBDA_EXPR (DECL_CONTEXT (current_function_decl));
342 proxies = LAMBDA_EXPR_PENDING_PROXIES (lam);
343 for (i = 0; i < vec_safe_length (proxies); ++i)
344 {
345 tree var = (*proxies)[i];
346 insert_capture_proxy (var);
347 }
348 release_tree_vector (LAMBDA_EXPR_PENDING_PROXIES (lam));
349 LAMBDA_EXPR_PENDING_PROXIES (lam) = NULL;
350}
351
352/* Given REF, a COMPONENT_REF designating a field in the lambda closure,
353 return the type we want the proxy to have: the type of the field itself,
354 with added const-qualification if the lambda isn't mutable and the
355 capture is by value. */
356
357tree
358lambda_proxy_type (tree ref)
359{
360 tree type;
6dcf5c5f 361 if (ref == error_mark_node)
362 return error_mark_node;
5d9fd871 363 if (REFERENCE_REF_P (ref))
364 ref = TREE_OPERAND (ref, 0);
6dcf5c5f 365 gcc_assert (TREE_CODE (ref) == COMPONENT_REF);
5d9fd871 366 type = TREE_TYPE (ref);
6dcf5c5f 367 if (!type || WILDCARD_TYPE_P (non_reference (type)))
368 {
369 type = cxx_make_type (DECLTYPE_TYPE);
370 DECLTYPE_TYPE_EXPR (type) = ref;
371 DECLTYPE_FOR_LAMBDA_PROXY (type) = true;
372 SET_TYPE_STRUCTURAL_EQUALITY (type);
373 }
374 if (DECL_PACK_P (TREE_OPERAND (ref, 1)))
375 type = make_pack_expansion (type);
5d9fd871 376 return type;
377}
378
379/* MEMBER is a capture field in a lambda closure class. Now that we're
380 inside the operator(), build a placeholder var for future lookups and
381 debugging. */
382
67ad87d3 383static tree
6f20c785 384build_capture_proxy (tree member, tree init)
5d9fd871 385{
386 tree var, object, fn, closure, name, lam, type;
387
6dcf5c5f 388 if (PACK_EXPANSION_P (member))
389 member = PACK_EXPANSION_PATTERN (member);
390
5d9fd871 391 closure = DECL_CONTEXT (member);
392 fn = lambda_function (closure);
393 lam = CLASSTYPE_LAMBDA_EXPR (closure);
394
395 /* The proxy variable forwards to the capture field. */
396 object = build_fold_indirect_ref (DECL_ARGUMENTS (fn));
397 object = finish_non_static_data_member (member, object, NULL_TREE);
398 if (REFERENCE_REF_P (object))
399 object = TREE_OPERAND (object, 0);
400
401 /* Remove the __ inserted by add_capture. */
5402533b 402 name = get_identifier (IDENTIFIER_POINTER (DECL_NAME (member)) + 2);
5d9fd871 403
404 type = lambda_proxy_type (object);
405
d03fa520 406 if (name == this_identifier && !INDIRECT_TYPE_P (type))
66108a57 407 {
408 type = build_pointer_type (type);
409 type = cp_build_qualified_type (type, TYPE_QUAL_CONST);
410 object = build_fold_addr_expr_with_type (object, type);
411 }
412
5d9fd871 413 if (DECL_VLA_CAPTURE_P (member))
414 {
415 /* Rebuild the VLA type from the pointer and maxindex. */
416 tree field = next_initializable_field (TYPE_FIELDS (type));
417 tree ptr = build_simple_component_ref (object, field);
418 field = next_initializable_field (DECL_CHAIN (field));
419 tree max = build_simple_component_ref (object, field);
b46a48d6 420 type = build_cplus_array_type (TREE_TYPE (TREE_TYPE (ptr)),
421 build_index_type (max));
5d9fd871 422 type = build_reference_type (type);
423 REFERENCE_VLA_OK (type) = true;
424 object = convert (type, ptr);
425 }
426
33d103ed 427 complete_type (type);
428
5d9fd871 429 var = build_decl (input_location, VAR_DECL, name, type);
430 SET_DECL_VALUE_EXPR (var, object);
431 DECL_HAS_VALUE_EXPR_P (var) = 1;
432 DECL_ARTIFICIAL (var) = 1;
433 TREE_USED (var) = 1;
434 DECL_CONTEXT (var) = fn;
435
6f20c785 436 if (DECL_NORMAL_CAPTURE_P (member))
437 {
438 if (DECL_VLA_CAPTURE_P (member))
439 {
440 init = CONSTRUCTOR_ELT (init, 0)->value;
441 init = TREE_OPERAND (init, 0); // Strip ADDR_EXPR.
442 init = TREE_OPERAND (init, 0); // Strip ARRAY_REF.
443 }
444 else
445 {
446 if (PACK_EXPANSION_P (init))
447 init = PACK_EXPANSION_PATTERN (init);
6f20c785 448 }
ba23f9fb 449
e86f32c0 450 if (INDIRECT_REF_P (init))
451 init = TREE_OPERAND (init, 0);
452 STRIP_NOPS (init);
453
b448c6f3 454 gcc_assert (VAR_P (init) || TREE_CODE (init) == PARM_DECL);
455 while (is_normal_capture_proxy (init))
456 init = DECL_CAPTURED_VARIABLE (init);
457 retrofit_lang_decl (var);
458 DECL_CAPTURED_VARIABLE (var) = init;
6f20c785 459 }
460
5d9fd871 461 if (name == this_identifier)
462 {
463 gcc_assert (LAMBDA_EXPR_THIS_CAPTURE (lam) == member);
464 LAMBDA_EXPR_THIS_CAPTURE (lam) = var;
465 }
466
467 if (fn == current_function_decl)
468 insert_capture_proxy (var);
469 else
470 vec_safe_push (LAMBDA_EXPR_PENDING_PROXIES (lam), var);
471
472 return var;
473}
474
5407f1e9 475static GTY(()) tree ptr_id;
476static GTY(()) tree max_id;
477
5d9fd871 478/* Return a struct containing a pointer and a length for lambda capture of
479 an array of runtime length. */
480
481static tree
9ecd4044 482vla_capture_type (tree array_type, tree lambda)
5d9fd871 483{
9ecd4044 484 tree closure = LAMBDA_EXPR_CLOSURE (lambda);
485 tree type = make_class_type (RECORD_TYPE);
486 cp_binding_level *slev = current_binding_level;
487 if (closure)
488 {
489 /* If we're already inside the lambda body, force the capture type out
490 into the enclosing context, so we don't crash trying to instantiate
491 the capture field in tsubst_lambda_expr. We won't have a TAG_DEFN
492 from finish_struct in the enclosing context, which we work around in
493 tsubst_lambda_expr. */
494 TYPE_CONTEXT (type) = TYPE_CONTEXT (closure);
495 cp_binding_level *b = current_binding_level;
496 for (;; b = b->level_chain)
497 if (b->this_entity == closure)
498 {
499 while (b->this_entity == closure)
500 b = b->level_chain;
501 break;
502 }
503 current_binding_level = b;
504 }
505 type = pushtag (make_anon_name (), type, ts_current);
506 current_binding_level = slev;
5d9fd871 507 xref_basetypes (type, NULL_TREE);
508 type = begin_class_definition (type);
509 if (!ptr_id)
510 {
511 ptr_id = get_identifier ("ptr");
512 max_id = get_identifier ("max");
513 }
514 tree ptrtype = build_pointer_type (TREE_TYPE (array_type));
515 tree field = build_decl (input_location, FIELD_DECL, ptr_id, ptrtype);
516 finish_member_declaration (field);
517 field = build_decl (input_location, FIELD_DECL, max_id, sizetype);
518 finish_member_declaration (field);
519 return finish_struct (type, NULL_TREE);
520}
521
522/* From an ID and INITIALIZER, create a capture (by reference if
523 BY_REFERENCE_P is true), add it to the capture-list for LAMBDA,
66108a57 524 and return it. If ID is `this', BY_REFERENCE_P says whether
525 `*this' is captured by reference. */
5d9fd871 526
527tree
6dcf5c5f 528add_capture (tree lambda, tree id, tree orig_init, bool by_reference_p,
5d9fd871 529 bool explicit_init_p)
530{
531 char *buf;
532 tree type, member, name;
533 bool vla = false;
6dcf5c5f 534 bool variadic = false;
535 tree initializer = orig_init;
536
537 if (PACK_EXPANSION_P (initializer))
538 {
539 initializer = PACK_EXPANSION_PATTERN (initializer);
540 variadic = true;
541 }
5d9fd871 542
845d5e95 543 if (TREE_CODE (initializer) == TREE_LIST
544 /* A pack expansion might end up with multiple elements. */
545 && !PACK_EXPANSION_P (TREE_VALUE (initializer)))
5d9fd871 546 initializer = build_x_compound_expr_from_list (initializer, ELK_INIT,
547 tf_warning_or_error);
384b0418 548 type = TREE_TYPE (initializer);
5444a0b4 549 if (type == error_mark_node)
550 return error_mark_node;
551
5d9fd871 552 if (array_of_runtime_bound_p (type))
553 {
554 vla = true;
555 if (!by_reference_p)
556 error ("array of runtime bound cannot be captured by copy, "
557 "only by reference");
558
559 /* For a VLA, we capture the address of the first element and the
560 maximum index, and then reconstruct the VLA for the proxy. */
561 tree elt = cp_build_array_ref (input_location, initializer,
562 integer_zero_node, tf_warning_or_error);
563 initializer = build_constructor_va (init_list_type_node, 2,
564 NULL_TREE, build_address (elt),
565 NULL_TREE, array_type_nelts (type));
9ecd4044 566 type = vla_capture_type (type, lambda);
5d9fd871 567 }
d3155b08 568 else if (!dependent_type_p (type)
569 && variably_modified_type_p (type, NULL_TREE))
5d9fd871 570 {
14dd004e 571 sorry ("capture of variably-modified type %qT that is not an N3639 array "
5d9fd871 572 "of runtime bound", type);
573 if (TREE_CODE (type) == ARRAY_TYPE
574 && variably_modified_type_p (TREE_TYPE (type), NULL_TREE))
575 inform (input_location, "because the array element type %qT has "
576 "variable size", TREE_TYPE (type));
14dd004e 577 return error_mark_node;
5d9fd871 578 }
384b0418 579 else
5d9fd871 580 {
adcbdb02 581 type = lambda_capture_field_type (initializer, explicit_init_p,
582 by_reference_p);
8dbf49cb 583 if (type == error_mark_node)
584 return error_mark_node;
adcbdb02 585
66108a57 586 if (id == this_identifier && !by_reference_p)
587 {
d03fa520 588 gcc_assert (INDIRECT_TYPE_P (type));
66108a57 589 type = TREE_TYPE (type);
0744a0c1 590 initializer = cp_build_fold_indirect_ref (initializer);
66108a57 591 }
adcbdb02 592
593 if (dependent_type_p (type))
594 ;
595 else if (id != this_identifier && by_reference_p)
384b0418 596 {
adcbdb02 597 if (!lvalue_p (initializer))
c8766acf 598 {
599 error ("cannot capture %qE by reference", initializer);
600 return error_mark_node;
601 }
384b0418 602 }
603 else
5444a0b4 604 {
605 /* Capture by copy requires a complete type. */
606 type = complete_type (type);
adcbdb02 607 if (!COMPLETE_TYPE_P (type))
5444a0b4 608 {
609 error ("capture by copy of incomplete type %qT", type);
610 cxx_incomplete_type_inform (type);
611 return error_mark_node;
612 }
613 }
5d9fd871 614 }
5d9fd871 615
616 /* Add __ to the beginning of the field name so that user code
617 won't find the field with name lookup. We can't just leave the name
618 unset because template instantiation uses the name to find
619 instantiated fields. */
5402533b 620 buf = (char *) alloca (IDENTIFIER_LENGTH (id) + 3);
621 buf[1] = buf[0] = '_';
622 memcpy (buf + 2, IDENTIFIER_POINTER (id),
623 IDENTIFIER_LENGTH (id) + 1);
624 name = get_identifier (buf);
5d9fd871 625
626 /* If TREE_TYPE isn't set, we're still in the introducer, so check
627 for duplicates. */
628 if (!LAMBDA_EXPR_CLOSURE (lambda))
629 {
630 if (IDENTIFIER_MARKED (name))
631 {
632 pedwarn (input_location, 0,
633 "already captured %qD in lambda expression", id);
634 return NULL_TREE;
635 }
636 IDENTIFIER_MARKED (name) = true;
637 }
638
6dcf5c5f 639 if (variadic)
640 type = make_pack_expansion (type);
641
5d9fd871 642 /* Make member variable. */
df623cbd 643 member = build_decl (input_location, FIELD_DECL, name, type);
5d9fd871 644 DECL_VLA_CAPTURE_P (member) = vla;
645
646 if (!explicit_init_p)
647 /* Normal captures are invisible to name lookup but uses are replaced
648 with references to the capture field; we implement this by only
649 really making them invisible in unevaluated context; see
650 qualify_lookup. For now, let's make explicitly initialized captures
651 always visible. */
652 DECL_NORMAL_CAPTURE_P (member) = true;
653
654 if (id == this_identifier)
655 LAMBDA_EXPR_THIS_CAPTURE (lambda) = member;
656
657 /* Add it to the appropriate closure class if we've started it. */
658 if (current_class_type
659 && current_class_type == LAMBDA_EXPR_CLOSURE (lambda))
9e6bae05 660 {
661 if (COMPLETE_TYPE_P (current_class_type))
153f22fa 662 internal_error ("trying to capture %qD in instantiation of "
663 "generic lambda", id);
9e6bae05 664 finish_member_declaration (member);
665 }
5d9fd871 666
6dcf5c5f 667 tree listmem = member;
668 if (variadic)
669 {
670 listmem = make_pack_expansion (member);
671 initializer = orig_init;
672 }
5d9fd871 673 LAMBDA_EXPR_CAPTURE_LIST (lambda)
6dcf5c5f 674 = tree_cons (listmem, initializer, LAMBDA_EXPR_CAPTURE_LIST (lambda));
5d9fd871 675
676 if (LAMBDA_EXPR_CLOSURE (lambda))
6f20c785 677 return build_capture_proxy (member, initializer);
5d9fd871 678 /* For explicit captures we haven't started the function yet, so we wait
679 and build the proxy from cp_parser_lambda_body. */
80fdc40f 680 LAMBDA_CAPTURE_EXPLICIT_P (LAMBDA_EXPR_CAPTURE_LIST (lambda)) = true;
5d9fd871 681 return NULL_TREE;
682}
683
684/* Register all the capture members on the list CAPTURES, which is the
685 LAMBDA_EXPR_CAPTURE_LIST for the lambda after the introducer. */
686
687void
688register_capture_members (tree captures)
689{
690 if (captures == NULL_TREE)
691 return;
692
693 register_capture_members (TREE_CHAIN (captures));
6dcf5c5f 694
695 tree field = TREE_PURPOSE (captures);
696 if (PACK_EXPANSION_P (field))
697 field = PACK_EXPANSION_PATTERN (field);
698
5d9fd871 699 /* We set this in add_capture to avoid duplicates. */
6dcf5c5f 700 IDENTIFIER_MARKED (DECL_NAME (field)) = false;
701 finish_member_declaration (field);
5d9fd871 702}
703
704/* Similar to add_capture, except this works on a stack of nested lambdas.
705 BY_REFERENCE_P in this case is derived from the default capture mode.
706 Returns the capture for the lambda at the bottom of the stack. */
707
708tree
709add_default_capture (tree lambda_stack, tree id, tree initializer)
710{
711 bool this_capture_p = (id == this_identifier);
5d9fd871 712 tree var = NULL_TREE;
5d9fd871 713 tree saved_class_type = current_class_type;
714
c9d00375 715 for (tree node = lambda_stack;
5d9fd871 716 node;
717 node = TREE_CHAIN (node))
718 {
719 tree lambda = TREE_VALUE (node);
720
721 current_class_type = LAMBDA_EXPR_CLOSURE (lambda);
6dcf5c5f 722 if (DECL_PACK_P (initializer))
723 initializer = make_pack_expansion (initializer);
5d9fd871 724 var = add_capture (lambda,
725 id,
726 initializer,
727 /*by_reference_p=*/
66108a57 728 (this_capture_p
729 || (LAMBDA_EXPR_DEFAULT_CAPTURE_MODE (lambda)
5d9fd871 730 == CPLD_REFERENCE)),
731 /*explicit_init_p=*/false);
732 initializer = convert_from_reference (var);
c9d00375 733
734 /* Warn about deprecated implicit capture of this via [=]. */
735 if (cxx_dialect >= cxx2a
736 && this_capture_p
737 && LAMBDA_EXPR_DEFAULT_CAPTURE_MODE (lambda) == CPLD_COPY
738 && !in_system_header_at (LAMBDA_EXPR_LOCATION (lambda)))
739 {
740 if (warning_at (LAMBDA_EXPR_LOCATION (lambda), OPT_Wdeprecated,
741 "implicit capture of %qE via %<[=]%> is deprecated "
742 "in C++20", this_identifier))
743 inform (LAMBDA_EXPR_LOCATION (lambda), "add explicit %<this%> or "
744 "%<*this%> capture");
745 }
5d9fd871 746 }
747
748 current_class_type = saved_class_type;
749
750 return var;
751}
752
f1ec53b6 753/* Return the capture pertaining to a use of 'this' in LAMBDA, in the
754 form of an INDIRECT_REF, possibly adding it through default
532cca3f 755 capturing, if ADD_CAPTURE_P is nonzero. If ADD_CAPTURE_P is negative,
756 try to capture but don't complain if we can't. */
5d9fd871 757
758tree
532cca3f 759lambda_expr_this_capture (tree lambda, int add_capture_p)
5d9fd871 760{
761 tree result;
762
763 tree this_capture = LAMBDA_EXPR_THIS_CAPTURE (lambda);
764
7d07c4a7 765 /* In unevaluated context this isn't an odr-use, so don't capture. */
5d9fd871 766 if (cp_unevaluated_operand)
7d07c4a7 767 add_capture_p = false;
5d9fd871 768
769 /* Try to default capture 'this' if we can. */
4bcb6f7f 770 if (!this_capture)
5d9fd871 771 {
772 tree lambda_stack = NULL_TREE;
773 tree init = NULL_TREE;
774
775 /* If we are in a lambda function, we can move out until we hit:
776 1. a non-lambda function or NSDMI,
777 2. a lambda function capturing 'this', or
778 3. a non-default capturing lambda function. */
779 for (tree tlambda = lambda; ;)
780 {
4bcb6f7f 781 if (add_capture_p
782 && LAMBDA_EXPR_DEFAULT_CAPTURE_MODE (tlambda) == CPLD_NONE)
783 /* tlambda won't let us capture 'this'. */
784 break;
785
786 if (add_capture_p)
787 lambda_stack = tree_cons (NULL_TREE,
788 tlambda,
789 lambda_stack);
5d9fd871 790
fc25a333 791 tree closure = LAMBDA_EXPR_CLOSURE (tlambda);
792 tree containing_function
793 = decl_function_context (TYPE_NAME (closure));
794
795 tree ex = LAMBDA_EXPR_EXTRA_SCOPE (tlambda);
796 if (ex && TREE_CODE (ex) == FIELD_DECL)
5d9fd871 797 {
fc25a333 798 /* Lambda in an NSDMI. We don't have a function to look up
799 'this' in, but we can find (or rebuild) the fake one from
800 inject_this_parameter. */
801 if (!containing_function && !COMPLETE_TYPE_P (closure))
802 /* If we're parsing a lambda in a non-local class,
803 we can find the fake 'this' in scope_chain. */
804 init = scope_chain->x_current_class_ptr;
805 else
806 /* Otherwise it's either gone or buried in
807 function_context_stack, so make another. */
808 init = build_this_parm (NULL_TREE, DECL_CONTEXT (ex),
809 TYPE_UNQUALIFIED);
5d9fd871 810 gcc_checking_assert
811 (init && (TREE_TYPE (TREE_TYPE (init))
812 == current_nonlambda_class_type ()));
813 break;
814 }
815
5d9fd871 816 if (containing_function == NULL_TREE)
817 /* We ran out of scopes; there's no 'this' to capture. */
818 break;
819
820 if (!LAMBDA_FUNCTION_P (containing_function))
821 {
822 /* We found a non-lambda function. */
823 if (DECL_NONSTATIC_MEMBER_FUNCTION_P (containing_function))
824 /* First parameter is 'this'. */
825 init = DECL_ARGUMENTS (containing_function);
826 break;
827 }
828
829 tlambda
830 = CLASSTYPE_LAMBDA_EXPR (DECL_CONTEXT (containing_function));
831
832 if (LAMBDA_EXPR_THIS_CAPTURE (tlambda))
833 {
834 /* An outer lambda has already captured 'this'. */
835 init = LAMBDA_EXPR_THIS_CAPTURE (tlambda);
836 break;
837 }
5d9fd871 838 }
839
840 if (init)
f1ec53b6 841 {
842 if (add_capture_p)
843 this_capture = add_default_capture (lambda_stack,
844 /*id=*/this_identifier,
845 init);
846 else
847 this_capture = init;
848 }
5d9fd871 849 }
850
7d07c4a7 851 if (cp_unevaluated_operand)
852 result = this_capture;
853 else if (!this_capture)
5d9fd871 854 {
532cca3f 855 if (add_capture_p == 1)
7d07c4a7 856 {
857 error ("%<this%> was not captured for this lambda function");
858 result = error_mark_node;
859 }
860 else
861 result = NULL_TREE;
5d9fd871 862 }
863 else
864 {
865 /* To make sure that current_class_ref is for the lambda. */
866 gcc_assert (TYPE_MAIN_VARIANT (TREE_TYPE (current_class_ref))
867 == LAMBDA_EXPR_CLOSURE (lambda));
868
869 result = this_capture;
870
871 /* If 'this' is captured, each use of 'this' is transformed into an
872 access to the corresponding unnamed data member of the closure
873 type cast (_expr.cast_ 5.4) to the type of 'this'. [ The cast
874 ensures that the transformed expression is an rvalue. ] */
875 result = rvalue (result);
876 }
877
878 return result;
879}
880
80fdc40f 881/* Return the innermost LAMBDA_EXPR we're currently in, if any. */
882
883tree
884current_lambda_expr (void)
885{
886 tree type = current_class_type;
887 while (type && !LAMBDA_TYPE_P (type))
888 type = decl_type_context (TYPE_NAME (type));
889 if (type)
890 return CLASSTYPE_LAMBDA_EXPR (type);
891 else
892 return NULL_TREE;
893}
894
e395357f 895/* Return the current LAMBDA_EXPR, if this is a resolvable dummy
896 object. NULL otherwise.. */
5d9fd871 897
e395357f 898static tree
899resolvable_dummy_lambda (tree object)
5d9fd871 900{
901 if (!is_dummy_object (object))
e395357f 902 return NULL_TREE;
5d9fd871 903
904 tree type = TYPE_MAIN_VARIANT (TREE_TYPE (object));
905 gcc_assert (!TYPE_PTR_P (type));
906
907 if (type != current_class_type
908 && current_class_type
909 && LAMBDA_TYPE_P (current_class_type)
855ff3cc 910 && lambda_function (current_class_type)
e269786f 911 && DERIVED_FROM_P (type, nonlambda_method_basetype()))
e395357f 912 return CLASSTYPE_LAMBDA_EXPR (current_class_type);
913
914 return NULL_TREE;
915}
916
917/* We don't want to capture 'this' until we know we need it, i.e. after
918 overload resolution has chosen a non-static member function. At that
919 point we call this function to turn a dummy object into a use of the
920 'this' capture. */
921
922tree
923maybe_resolve_dummy (tree object, bool add_capture_p)
924{
925 if (tree lam = resolvable_dummy_lambda (object))
926 if (tree cap = lambda_expr_this_capture (lam, add_capture_p))
927 if (cap != error_mark_node)
0744a0c1 928 object = build_fold_indirect_ref (cap);
5d9fd871 929
930 return object;
931}
932
e395357f 933/* When parsing a generic lambda containing an argument-dependent
934 member function call we defer overload resolution to instantiation
935 time. But we have to know now whether to capture this or not.
936 Do that if FNS contains any non-static fns.
937 The std doesn't anticipate this case, but I expect this to be the
938 outcome of discussion. */
939
940void
941maybe_generic_this_capture (tree object, tree fns)
942{
943 if (tree lam = resolvable_dummy_lambda (object))
944 if (!LAMBDA_EXPR_THIS_CAPTURE (lam))
945 {
946 /* We've not yet captured, so look at the function set of
947 interest. */
948 if (BASELINK_P (fns))
949 fns = BASELINK_FUNCTIONS (fns);
3ac2178b 950 bool id_expr = TREE_CODE (fns) == TEMPLATE_ID_EXPR;
951 if (id_expr)
952 fns = TREE_OPERAND (fns, 0);
97a86f58 953
954 for (lkp_iterator iter (fns); iter; ++iter)
5ebe5c44 955 if (((!id_expr && TREE_CODE (*iter) != USING_DECL)
956 || TREE_CODE (*iter) == TEMPLATE_DECL)
97a86f58 957 && DECL_NONSTATIC_MEMBER_FUNCTION_P (*iter))
958 {
959 /* Found a non-static member. Capture this. */
532cca3f 960 lambda_expr_this_capture (lam, /*maybe*/-1);
97a86f58 961 break;
962 }
e395357f 963 }
964}
965
ed7bf2d1 966/* Returns the innermost non-lambda function. */
967
968tree
969current_nonlambda_function (void)
970{
971 tree fn = current_function_decl;
972 while (fn && LAMBDA_FUNCTION_P (fn))
973 fn = decl_function_context (fn);
974 return fn;
975}
976
e269786f 977/* Returns the method basetype of the innermost non-lambda function, including
978 a hypothetical constructor if inside an NSDMI, or NULL_TREE if none. */
5d9fd871 979
980tree
981nonlambda_method_basetype (void)
982{
5d9fd871 983 if (!current_class_ref)
984 return NULL_TREE;
985
e269786f 986 tree type = current_class_type;
069eebda 987 if (!type || !LAMBDA_TYPE_P (type))
5d9fd871 988 return type;
989
e269786f 990 while (true)
991 {
992 tree lam = CLASSTYPE_LAMBDA_EXPR (type);
993 tree ex = LAMBDA_EXPR_EXTRA_SCOPE (lam);
994 if (ex && TREE_CODE (ex) == FIELD_DECL)
995 /* Lambda in an NSDMI. */
996 return DECL_CONTEXT (ex);
997
998 tree fn = TYPE_CONTEXT (type);
999 if (!fn || TREE_CODE (fn) != FUNCTION_DECL
1000 || !DECL_NONSTATIC_MEMBER_FUNCTION_P (fn))
1001 /* No enclosing non-lambda method. */
1002 return NULL_TREE;
1003 if (!LAMBDA_FUNCTION_P (fn))
1004 /* Found an enclosing non-lambda method. */
1005 return TYPE_METHOD_BASETYPE (TREE_TYPE (fn));
1006 type = DECL_CONTEXT (fn);
1007 }
5d9fd871 1008}
1009
d05ba3ef 1010/* Like current_scope, but looking through lambdas. */
1011
1012tree
1013current_nonlambda_scope (void)
1014{
1015 tree scope = current_scope ();
1016 for (;;)
1017 {
1018 if (TREE_CODE (scope) == FUNCTION_DECL
1019 && LAMBDA_FUNCTION_P (scope))
1020 {
1021 scope = CP_TYPE_CONTEXT (DECL_CONTEXT (scope));
1022 continue;
1023 }
1024 else if (LAMBDA_TYPE_P (scope))
1025 {
1026 scope = CP_TYPE_CONTEXT (scope);
1027 continue;
1028 }
1029 break;
1030 }
1031 return scope;
1032}
1033
814b90ef 1034/* Helper function for maybe_add_lambda_conv_op; build a CALL_EXPR with
1035 indicated FN and NARGS, but do not initialize the return type or any of the
1036 argument slots. */
1037
1038static tree
1039prepare_op_call (tree fn, int nargs)
1040{
1041 tree t;
1042
1043 t = build_vl_exp (CALL_EXPR, nargs + 3);
1044 CALL_EXPR_FN (t) = fn;
1045 CALL_EXPR_STATIC_CHAIN (t) = NULL;
1046
1047 return t;
1048}
1049
3311d302 1050/* Return true iff CALLOP is the op() for a generic lambda. */
1051
1052bool
1053generic_lambda_fn_p (tree callop)
1054{
1055 return (LAMBDA_FUNCTION_P (callop)
1056 && DECL_TEMPLATE_INFO (callop)
1057 && PRIMARY_TEMPLATE_P (DECL_TI_TEMPLATE (callop)));
1058}
1059
5d9fd871 1060/* If the closure TYPE has a static op(), also add a conversion to function
1061 pointer. */
1062
1063void
1064maybe_add_lambda_conv_op (tree type)
1065{
cbb83bc5 1066 bool nested = (cfun != NULL);
64d8d39e 1067 bool nested_def = decl_function_context (TYPE_MAIN_DECL (type));
5d9fd871 1068 tree callop = lambda_function (type);
f16153b7 1069 tree lam = CLASSTYPE_LAMBDA_EXPR (type);
5d9fd871 1070
f16153b7 1071 if (LAMBDA_EXPR_CAPTURE_LIST (lam) != NULL_TREE
1072 || LAMBDA_EXPR_DEFAULT_CAPTURE_MODE (lam) != CPLD_NONE)
5d9fd871 1073 return;
1074
1075 if (processing_template_decl)
1076 return;
1077
3311d302 1078 bool const generic_lambda_p = generic_lambda_fn_p (callop);
814b90ef 1079
4c0924ef 1080 if (!generic_lambda_p && DECL_INITIAL (callop) == NULL_TREE)
5d9fd871 1081 {
1082 /* If the op() wasn't instantiated due to errors, give up. */
1083 gcc_assert (errorcount || sorrycount);
1084 return;
1085 }
1086
814b90ef 1087 /* Non-template conversion operators are defined directly with build_call_a
1088 and using DIRECT_ARGVEC for arguments (including 'this'). Templates are
1089 deferred and the CALL is built in-place. In the case of a deduced return
1090 call op, the decltype expression, DECLTYPE_CALL, used as a substitute for
1091 the return type is also built in-place. The arguments of DECLTYPE_CALL in
1092 the return expression may differ in flags from those in the body CALL. In
1093 particular, parameter pack expansions are marked PACK_EXPANSION_LOCAL_P in
1094 the body CALL, but not in DECLTYPE_CALL. */
1095
1096 vec<tree, va_gc> *direct_argvec = 0;
1097 tree decltype_call = 0, call = 0;
6d02e6b2 1098 tree optype = TREE_TYPE (callop);
1099 tree fn_result = TREE_TYPE (optype);
814b90ef 1100
02d7a132 1101 tree thisarg = build_int_cst (TREE_TYPE (DECL_ARGUMENTS (callop)), 0);
814b90ef 1102 if (generic_lambda_p)
1103 {
23f59d67 1104 ++processing_template_decl;
1105
814b90ef 1106 /* Prepare the dependent member call for the static member function
1107 '_FUN' and, potentially, prepare another call to be used in a decltype
1108 return expression for a deduced return call op to allow for simple
1109 implementation of the conversion operator. */
1110
0744a0c1 1111 tree instance = cp_build_fold_indirect_ref (thisarg);
64e3499e 1112 tree objfn = lookup_template_function (DECL_NAME (callop),
1113 DECL_TI_ARGS (callop));
1114 objfn = build_min (COMPONENT_REF, NULL_TREE,
1115 instance, objfn, NULL_TREE);
814b90ef 1116 int nargs = list_length (DECL_ARGUMENTS (callop)) - 1;
1117
1118 call = prepare_op_call (objfn, nargs);
1119 if (type_uses_auto (fn_result))
1120 decltype_call = prepare_op_call (objfn, nargs);
1121 }
1122 else
1123 {
1124 direct_argvec = make_tree_vector ();
3a3fc4a7 1125 direct_argvec->quick_push (thisarg);
814b90ef 1126 }
1127
1128 /* Copy CALLOP's argument list (as per 'copy_list') as FN_ARGS in order to
1129 declare the static member function "_FUN" below. For each arg append to
1130 DIRECT_ARGVEC (for the non-template case) or populate the pre-allocated
1131 call args (for the template case). If a parameter pack is found, expand
1132 it, flagging it as PACK_EXPANSION_LOCAL_P for the body call. */
1133
1134 tree fn_args = NULL_TREE;
1135 {
1136 int ix = 0;
1137 tree src = DECL_CHAIN (DECL_ARGUMENTS (callop));
a575a67c 1138 tree tgt = NULL;
814b90ef 1139
1140 while (src)
1141 {
1142 tree new_node = copy_node (src);
1143
b5a6b127 1144 /* Clear TREE_ADDRESSABLE on thunk arguments. */
1145 TREE_ADDRESSABLE (new_node) = 0;
1146
814b90ef 1147 if (!fn_args)
1148 fn_args = tgt = new_node;
1149 else
1150 {
1151 TREE_CHAIN (tgt) = new_node;
1152 tgt = new_node;
1153 }
1154
1155 mark_exp_read (tgt);
1156
1157 if (generic_lambda_p)
1158 {
64e3499e 1159 tree a = tgt;
1160 if (DECL_PACK_P (tgt))
1161 {
1162 a = make_pack_expansion (a);
1163 PACK_EXPANSION_LOCAL_P (a) = true;
1164 }
72f9352a 1165 CALL_EXPR_ARG (call, ix) = a;
72f9352a 1166
64e3499e 1167 if (decltype_call)
1168 {
1169 /* Avoid capturing variables in this context. */
1170 ++cp_unevaluated_operand;
1171 CALL_EXPR_ARG (decltype_call, ix) = forward_parm (tgt);
1172 --cp_unevaluated_operand;
1173 }
72f9352a 1174
814b90ef 1175 ++ix;
1176 }
1177 else
1178 vec_safe_push (direct_argvec, tgt);
1179
1180 src = TREE_CHAIN (src);
1181 }
1182 }
1183
814b90ef 1184 if (generic_lambda_p)
1185 {
1186 if (decltype_call)
1187 {
814b90ef 1188 fn_result = finish_decltype_type
1189 (decltype_call, /*id_expression_or_member_access_p=*/false,
1190 tf_warning_or_error);
814b90ef 1191 }
1192 }
1193 else
1194 call = build_call_a (callop,
1195 direct_argvec->length (),
1196 direct_argvec->address ());
1197
1198 CALL_FROM_THUNK_P (call) = 1;
33603066 1199 SET_EXPR_LOCATION (call, UNKNOWN_LOCATION);
814b90ef 1200
1201 tree stattype = build_function_type (fn_result, FUNCTION_ARG_CHAIN (callop));
6d02e6b2 1202 stattype = (cp_build_type_attribute_variant
1203 (stattype, TYPE_ATTRIBUTES (optype)));
2e9e9363 1204 if (flag_noexcept_type
1205 && TYPE_NOTHROW_P (TREE_TYPE (callop)))
1206 stattype = build_exception_variant (stattype, noexcept_true_spec);
5d9fd871 1207
23f59d67 1208 if (generic_lambda_p)
1209 --processing_template_decl;
1210
5d9fd871 1211 /* First build up the conversion op. */
1212
814b90ef 1213 tree rettype = build_pointer_type (stattype);
b423f98b 1214 tree name = make_conv_op_name (rettype);
814b90ef 1215 tree thistype = cp_build_qualified_type (type, TYPE_QUAL_CONST);
1216 tree fntype = build_method_type_directly (thistype, rettype, void_list_node);
1217 tree convfn = build_lang_decl (FUNCTION_DECL, name, fntype);
1c4a339e 1218 SET_DECL_LANGUAGE (convfn, lang_cplusplus);
814b90ef 1219 tree fn = convfn;
5d9fd871 1220 DECL_SOURCE_LOCATION (fn) = DECL_SOURCE_LOCATION (callop);
5d4b30ea 1221 SET_DECL_ALIGN (fn, MINIMUM_METHOD_BOUNDARY);
5d9fd871 1222 grokclassfn (type, fn, NO_SPECIAL);
1223 set_linkage_according_to_type (type, fn);
a3145045 1224 rest_of_decl_compilation (fn, namespace_bindings_p (), at_eof);
5d9fd871 1225 DECL_IN_AGGR_P (fn) = 1;
1226 DECL_ARTIFICIAL (fn) = 1;
1227 DECL_NOT_REALLY_EXTERN (fn) = 1;
1228 DECL_DECLARED_INLINE_P (fn) = 1;
aa3ab9ba 1229 DECL_ARGUMENTS (fn) = build_this_parm (fn, fntype, TYPE_QUAL_CONST);
1230
64d8d39e 1231 if (nested_def)
5d9fd871 1232 DECL_INTERFACE_KNOWN (fn) = 1;
1233
814b90ef 1234 if (generic_lambda_p)
1235 fn = add_inherited_template_parms (fn, DECL_TI_TEMPLATE (callop));
1236
9320a233 1237 add_method (type, fn, false);
5d9fd871 1238
1239 /* Generic thunk code fails for varargs; we'll complain in mark_used if
1240 the conversion op is used. */
1241 if (varargs_function_p (callop))
1242 {
ea17a80d 1243 DECL_DELETED_FN (fn) = 1;
5d9fd871 1244 return;
1245 }
1246
1247 /* Now build up the thunk to be returned. */
1248
3a491e82 1249 tree statfn = build_lang_decl (FUNCTION_DECL, fun_identifier, stattype);
1c4a339e 1250 SET_DECL_LANGUAGE (statfn, lang_cplusplus);
814b90ef 1251 fn = statfn;
5d9fd871 1252 DECL_SOURCE_LOCATION (fn) = DECL_SOURCE_LOCATION (callop);
5d9fd871 1253 grokclassfn (type, fn, NO_SPECIAL);
1254 set_linkage_according_to_type (type, fn);
a3145045 1255 rest_of_decl_compilation (fn, namespace_bindings_p (), at_eof);
5d9fd871 1256 DECL_IN_AGGR_P (fn) = 1;
1257 DECL_ARTIFICIAL (fn) = 1;
1258 DECL_NOT_REALLY_EXTERN (fn) = 1;
1259 DECL_DECLARED_INLINE_P (fn) = 1;
1260 DECL_STATIC_FUNCTION_P (fn) = 1;
814b90ef 1261 DECL_ARGUMENTS (fn) = fn_args;
1262 for (tree arg = fn_args; arg; arg = DECL_CHAIN (arg))
5d9fd871 1263 {
1264 /* Avoid duplicate -Wshadow warnings. */
1265 DECL_NAME (arg) = NULL_TREE;
1266 DECL_CONTEXT (arg) = fn;
1267 }
64d8d39e 1268 if (nested_def)
5d9fd871 1269 DECL_INTERFACE_KNOWN (fn) = 1;
1270
814b90ef 1271 if (generic_lambda_p)
1272 fn = add_inherited_template_parms (fn, DECL_TI_TEMPLATE (callop));
1273
59ec98b2 1274 if (flag_sanitize & SANITIZE_NULL)
ef8f6502 1275 /* Don't UBsan this function; we're deliberately calling op() with a null
1276 object argument. */
1277 add_no_sanitize_value (fn, SANITIZE_UNDEFINED);
7caa8236 1278
9320a233 1279 add_method (type, fn, false);
5d9fd871 1280
1281 if (nested)
1282 push_function_context ();
1283 else
1284 /* Still increment function_depth so that we don't GC in the
1285 middle of an expression. */
1286 ++function_depth;
1287
1288 /* Generate the body of the thunk. */
1289
1290 start_preparsed_function (statfn, NULL_TREE,
1291 SF_PRE_PARSED | SF_INCLASS_INLINE);
1292 if (DECL_ONE_ONLY (statfn))
1293 {
1294 /* Put the thunk in the same comdat group as the call op. */
415d1b9a 1295 cgraph_node::get_create (statfn)->add_to_same_comdat_group
1296 (cgraph_node::get_create (callop));
5d9fd871 1297 }
814b90ef 1298 tree body = begin_function_body ();
1299 tree compound_stmt = begin_compound_stmt (0);
1300 if (!generic_lambda_p)
9f10a108 1301 {
814b90ef 1302 set_flags_from_callee (call);
1303 if (MAYBE_CLASS_TYPE_P (TREE_TYPE (call)))
1304 call = build_cplus_new (TREE_TYPE (call), call, tf_warning_or_error);
9f10a108 1305 }
5d9fd871 1306 call = convert_from_reference (call);
1307 finish_return_stmt (call);
1308
1309 finish_compound_stmt (compound_stmt);
1310 finish_function_body (body);
1311
4775c814 1312 fn = finish_function (/*inline_p=*/true);
814b90ef 1313 if (!generic_lambda_p)
1314 expand_or_defer_fn (fn);
5d9fd871 1315
1316 /* Generate the body of the conversion op. */
1317
1318 start_preparsed_function (convfn, NULL_TREE,
1319 SF_PRE_PARSED | SF_INCLASS_INLINE);
1320 body = begin_function_body ();
1321 compound_stmt = begin_compound_stmt (0);
1322
1323 /* decl_needed_p needs to see that it's used. */
1324 TREE_USED (statfn) = 1;
1325 finish_return_stmt (decay_conversion (statfn, tf_warning_or_error));
1326
1327 finish_compound_stmt (compound_stmt);
1328 finish_function_body (body);
1329
4775c814 1330 fn = finish_function (/*inline_p=*/true);
814b90ef 1331 if (!generic_lambda_p)
1332 expand_or_defer_fn (fn);
5d9fd871 1333
1334 if (nested)
1335 pop_function_context ();
1336 else
1337 --function_depth;
1338}
1339
33603066 1340/* True if FN is the static function "_FUN" that gets returned from the lambda
1341 conversion operator. */
1342
1343bool
1344lambda_static_thunk_p (tree fn)
1345{
1346 return (fn && TREE_CODE (fn) == FUNCTION_DECL
1347 && DECL_ARTIFICIAL (fn)
1348 && DECL_STATIC_FUNCTION_P (fn)
1349 && LAMBDA_TYPE_P (CP_DECL_CONTEXT (fn)));
1350}
1351
5d9fd871 1352/* Returns true iff VAL is a lambda-related declaration which should
1353 be ignored by unqualified lookup. */
1354
1355bool
1356is_lambda_ignored_entity (tree val)
1357{
6f20c785 1358 /* Look past normal capture proxies. */
1359 if (is_normal_capture_proxy (val))
5d9fd871 1360 return true;
1361
1362 /* Always ignore lambda fields, their names are only for debugging. */
1363 if (TREE_CODE (val) == FIELD_DECL
1364 && CLASSTYPE_LAMBDA_EXPR (DECL_CONTEXT (val)))
1365 return true;
1366
1367 /* None of the lookups that use qualify_lookup want the op() from the
1368 lambda; they want the one from the enclosing class. */
1369 if (TREE_CODE (val) == FUNCTION_DECL && LAMBDA_FUNCTION_P (val))
1370 return true;
1371
1372 return false;
1373}
5407f1e9 1374
52e76545 1375/* Lambdas that appear in variable initializer or default argument scope
1376 get that in their mangling, so we need to record it. We might as well
1377 use the count for function and namespace scopes as well. */
1378static GTY(()) tree lambda_scope;
1379static GTY(()) int lambda_count;
1380struct GTY(()) tree_int
1381{
1382 tree t;
1383 int i;
1384};
1385static GTY(()) vec<tree_int, va_gc> *lambda_scope_stack;
1386
1387void
1388start_lambda_scope (tree decl)
1389{
1390 tree_int ti;
1391 gcc_assert (decl);
1392 /* Once we're inside a function, we ignore variable scope and just push
1393 the function again so that popping works properly. */
1394 if (current_function_decl && TREE_CODE (decl) == VAR_DECL)
1395 decl = current_function_decl;
1396 ti.t = lambda_scope;
1397 ti.i = lambda_count;
1398 vec_safe_push (lambda_scope_stack, ti);
1399 if (lambda_scope != decl)
1400 {
1401 /* Don't reset the count if we're still in the same function. */
1402 lambda_scope = decl;
1403 lambda_count = 0;
1404 }
1405}
1406
1407void
1408record_lambda_scope (tree lambda)
1409{
1410 LAMBDA_EXPR_EXTRA_SCOPE (lambda) = lambda_scope;
1411 LAMBDA_EXPR_DISCRIMINATOR (lambda) = lambda_count++;
1412}
1413
4b1e842f 1414/* This lambda is an instantiation of a lambda in a template default argument
1415 that got no LAMBDA_EXPR_EXTRA_SCOPE, so this shouldn't either. But we do
1416 need to use and increment the global count to avoid collisions. */
1417
1418void
1419record_null_lambda_scope (tree lambda)
1420{
1421 if (vec_safe_is_empty (lambda_scope_stack))
1422 record_lambda_scope (lambda);
1423 else
1424 {
1425 tree_int *p = lambda_scope_stack->begin();
1426 LAMBDA_EXPR_EXTRA_SCOPE (lambda) = p->t;
1427 LAMBDA_EXPR_DISCRIMINATOR (lambda) = p->i++;
1428 }
1429 gcc_assert (LAMBDA_EXPR_EXTRA_SCOPE (lambda) == NULL_TREE);
1430}
1431
52e76545 1432void
1433finish_lambda_scope (void)
1434{
1435 tree_int *p = &lambda_scope_stack->last ();
1436 if (lambda_scope != p->t)
1437 {
1438 lambda_scope = p->t;
1439 lambda_count = p->i;
1440 }
1441 lambda_scope_stack->pop ();
1442}
1443
1444tree
1445start_lambda_function (tree fco, tree lambda_expr)
1446{
1447 /* Let the front end know that we are going to be defining this
1448 function. */
1449 start_preparsed_function (fco,
1450 NULL_TREE,
1451 SF_PRE_PARSED | SF_INCLASS_INLINE);
1452
1453 tree body = begin_function_body ();
1454
1455 /* Push the proxies for any explicit captures. */
1456 for (tree cap = LAMBDA_EXPR_CAPTURE_LIST (lambda_expr); cap;
1457 cap = TREE_CHAIN (cap))
6f20c785 1458 build_capture_proxy (TREE_PURPOSE (cap), TREE_VALUE (cap));
52e76545 1459
1460 return body;
1461}
1462
80fdc40f 1463/* Subroutine of prune_lambda_captures: CAP is a node in
1464 LAMBDA_EXPR_CAPTURE_LIST. Return the variable it captures for which we
1465 might optimize away the capture, or NULL_TREE if there is no such
1466 variable. */
1467
1468static tree
1469var_to_maybe_prune (tree cap)
1470{
1471 if (LAMBDA_CAPTURE_EXPLICIT_P (cap))
1472 /* Don't prune explicit captures. */
1473 return NULL_TREE;
1474
1475 tree mem = TREE_PURPOSE (cap);
1476 if (!DECL_P (mem) || !DECL_NORMAL_CAPTURE_P (mem))
1477 /* Packs and init-captures aren't captures of constant vars. */
1478 return NULL_TREE;
1479
1480 tree init = TREE_VALUE (cap);
1481 if (is_normal_capture_proxy (init))
1482 init = DECL_CAPTURED_VARIABLE (init);
1483 if (decl_constant_var_p (init))
1484 return init;
1485
1486 return NULL_TREE;
1487}
1488
1489/* walk_tree helper for prune_lambda_captures: Remember which capture proxies
1490 for constant variables are actually used in the lambda body.
1491
1492 There will always be a DECL_EXPR for the capture proxy; remember it when we
1493 see it, but replace it with any other use. */
1494
1495static tree
1496mark_const_cap_r (tree *t, int *walk_subtrees, void *data)
1497{
1498 hash_map<tree,tree*> &const_vars = *(hash_map<tree,tree*>*)data;
1499
1500 tree var = NULL_TREE;
1501 if (TREE_CODE (*t) == DECL_EXPR)
1502 {
1503 tree decl = DECL_EXPR_DECL (*t);
1504 if (is_constant_capture_proxy (decl))
c758dd5a 1505 {
1506 var = DECL_CAPTURED_VARIABLE (decl);
1507 *walk_subtrees = 0;
1508 }
80fdc40f 1509 }
1510 else if (is_constant_capture_proxy (*t))
1511 var = DECL_CAPTURED_VARIABLE (*t);
1512
1513 if (var)
1514 {
1515 tree *&slot = const_vars.get_or_insert (var);
1516 if (!slot || VAR_P (*t))
1517 slot = t;
1518 }
1519
1520 return NULL_TREE;
1521}
1522
1523/* We're at the end of processing a lambda; go back and remove any captures of
1524 constant variables for which we've folded away all uses. */
1525
1526static void
1527prune_lambda_captures (tree body)
1528{
1529 tree lam = current_lambda_expr ();
1530 if (!LAMBDA_EXPR_CAPTURE_OPTIMIZED (lam))
1531 /* No uses were optimized away. */
1532 return;
1533 if (LAMBDA_EXPR_DEFAULT_CAPTURE_MODE (lam) == CPLD_NONE)
1534 /* No default captures, and we don't prune explicit captures. */
1535 return;
1536
1537 hash_map<tree,tree*> const_vars;
1538
1539 cp_walk_tree_without_duplicates (&body, mark_const_cap_r, &const_vars);
1540
1541 tree *fieldp = &TYPE_FIELDS (LAMBDA_EXPR_CLOSURE (lam));
1542 for (tree *capp = &LAMBDA_EXPR_CAPTURE_LIST (lam); *capp; )
1543 {
1544 tree cap = *capp;
1545 if (tree var = var_to_maybe_prune (cap))
1546 {
79ec669d 1547 tree **use = const_vars.get (var);
1548 if (use && TREE_CODE (**use) == DECL_EXPR)
80fdc40f 1549 {
1550 /* All uses of this capture were folded away, leaving only the
1551 proxy declaration. */
1552
1553 /* Splice the capture out of LAMBDA_EXPR_CAPTURE_LIST. */
1554 *capp = TREE_CHAIN (cap);
1555
1556 /* And out of TYPE_FIELDS. */
1557 tree field = TREE_PURPOSE (cap);
1558 while (*fieldp != field)
1559 fieldp = &DECL_CHAIN (*fieldp);
1560 *fieldp = DECL_CHAIN (*fieldp);
1561
1562 /* And remove the capture proxy declaration. */
79ec669d 1563 **use = void_node;
80fdc40f 1564 continue;
1565 }
1566 }
1567
1568 capp = &TREE_CHAIN (cap);
1569 }
1570}
1571
52e76545 1572void
1573finish_lambda_function (tree body)
1574{
1575 finish_function_body (body);
1576
80fdc40f 1577 prune_lambda_captures (body);
1578
52e76545 1579 /* Finish the function and generate code for it if necessary. */
4775c814 1580 tree fn = finish_function (/*inline_p=*/true);
52e76545 1581
1582 /* Only expand if the call op is not a template. */
1583 if (!DECL_TEMPLATE_INFO (fn))
1584 expand_or_defer_fn (fn);
1585}
1586
5407f1e9 1587#include "gt-cp-lambda.h"