]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/cp/lambda.c
2019-02-27 Bernd Edlinger <bernd.edlinger@hotmail.de>
[thirdparty/gcc.git] / gcc / cp / lambda.c
CommitLineData
5d9fd871 1/* Perform the semantic phase of lambda parsing, i.e., the process of
2 building tree structure, checking semantic consistency, and
3 building RTL. These routines are used both during actual parsing
4 and during the instantiation of template functions.
5
fbd26352 6 Copyright (C) 1998-2019 Free Software Foundation, Inc.
5d9fd871 7
8 This file is part of GCC.
9
10 GCC is free software; you can redistribute it and/or modify it
11 under the terms of the GNU General Public License as published by
12 the Free Software Foundation; either version 3, or (at your option)
13 any later version.
14
15 GCC is distributed in the hope that it will be useful, but
16 WITHOUT ANY WARRANTY; without even the implied warranty of
17 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
18 General Public License for more details.
19
20You should have received a copy of the GNU General Public License
21along with GCC; see the file COPYING3. If not see
22<http://www.gnu.org/licenses/>. */
23
24#include "config.h"
25#include "system.h"
26#include "coretypes.h"
4cba6f60 27#include "cp-tree.h"
28#include "stringpool.h"
5d9fd871 29#include "cgraph.h"
30#include "tree-iterator.h"
5d9fd871 31#include "toplev.h"
72f9352a 32#include "gimplify.h"
5d9fd871 33
34/* Constructor for a lambda expression. */
35
36tree
37build_lambda_expr (void)
38{
39 tree lambda = make_node (LAMBDA_EXPR);
40 LAMBDA_EXPR_DEFAULT_CAPTURE_MODE (lambda) = CPLD_NONE;
41 LAMBDA_EXPR_CAPTURE_LIST (lambda) = NULL_TREE;
42 LAMBDA_EXPR_THIS_CAPTURE (lambda) = NULL_TREE;
43 LAMBDA_EXPR_PENDING_PROXIES (lambda) = NULL;
5d9fd871 44 LAMBDA_EXPR_MUTABLE_P (lambda) = false;
45 return lambda;
46}
47
48/* Create the closure object for a LAMBDA_EXPR. */
49
50tree
51build_lambda_object (tree lambda_expr)
52{
53 /* Build aggregate constructor call.
54 - cp_parser_braced_list
55 - cp_parser_functional_cast */
56 vec<constructor_elt, va_gc> *elts = NULL;
57 tree node, expr, type;
58 location_t saved_loc;
59
1dcd84cf 60 if (processing_template_decl || lambda_expr == error_mark_node)
5d9fd871 61 return lambda_expr;
62
63 /* Make sure any error messages refer to the lambda-introducer. */
64 saved_loc = input_location;
65 input_location = LAMBDA_EXPR_LOCATION (lambda_expr);
66
67 for (node = LAMBDA_EXPR_CAPTURE_LIST (lambda_expr);
68 node;
69 node = TREE_CHAIN (node))
70 {
71 tree field = TREE_PURPOSE (node);
72 tree val = TREE_VALUE (node);
73
74 if (field == error_mark_node)
75 {
76 expr = error_mark_node;
77 goto out;
78 }
79
845d5e95 80 if (TREE_CODE (val) == TREE_LIST)
81 val = build_x_compound_expr_from_list (val, ELK_INIT,
82 tf_warning_or_error);
83
5d9fd871 84 if (DECL_P (val))
85 mark_used (val);
86
87 /* Mere mortals can't copy arrays with aggregate initialization, so
88 do some magic to make it work here. */
89 if (TREE_CODE (TREE_TYPE (field)) == ARRAY_TYPE)
90 val = build_array_copy (val);
91 else if (DECL_NORMAL_CAPTURE_P (field)
92 && !DECL_VLA_CAPTURE_P (field)
90ad495b 93 && !TYPE_REF_P (TREE_TYPE (field)))
5d9fd871 94 {
95 /* "the entities that are captured by copy are used to
96 direct-initialize each corresponding non-static data
97 member of the resulting closure object."
98
99 There's normally no way to express direct-initialization
100 from an element of a CONSTRUCTOR, so we build up a special
101 TARGET_EXPR to bypass the usual copy-initialization. */
102 val = force_rvalue (val, tf_warning_or_error);
103 if (TREE_CODE (val) == TARGET_EXPR)
104 TARGET_EXPR_DIRECT_INIT_P (val) = true;
105 }
106
107 CONSTRUCTOR_APPEND_ELT (elts, DECL_NAME (field), val);
108 }
109
110 expr = build_constructor (init_list_type_node, elts);
111 CONSTRUCTOR_IS_DIRECT_INIT (expr) = 1;
112
113 /* N2927: "[The closure] class type is not an aggregate."
114 But we briefly treat it as an aggregate to make this simpler. */
115 type = LAMBDA_EXPR_CLOSURE (lambda_expr);
116 CLASSTYPE_NON_AGGREGATE (type) = 0;
117 expr = finish_compound_literal (type, expr, tf_warning_or_error);
118 CLASSTYPE_NON_AGGREGATE (type) = 1;
119
120 out:
121 input_location = saved_loc;
122 return expr;
123}
124
125/* Return an initialized RECORD_TYPE for LAMBDA.
126 LAMBDA must have its explicit captures already. */
127
128tree
129begin_lambda_type (tree lambda)
130{
131 tree type;
132
133 {
134 /* Unique name. This is just like an unnamed class, but we cannot use
4f86cbb0 135 make_anon_name because of certain checks against TYPE_UNNAMED_P. */
5d9fd871 136 tree name;
137 name = make_lambda_name ();
138
139 /* Create the new RECORD_TYPE for this lambda. */
140 type = xref_tag (/*tag_code=*/record_type,
141 name,
142 /*scope=*/ts_lambda,
143 /*template_header_p=*/false);
240cc9cf 144 if (type == error_mark_node)
145 return error_mark_node;
5d9fd871 146 }
147
148 /* Designate it as a struct so that we can use aggregate initialization. */
149 CLASSTYPE_DECLARED_CLASS (type) = false;
150
151 /* Cross-reference the expression and the type. */
152 LAMBDA_EXPR_CLOSURE (lambda) = type;
153 CLASSTYPE_LAMBDA_EXPR (type) = lambda;
154
33603066 155 /* In C++17, assume the closure is literal; we'll clear the flag later if
156 necessary. */
40e2decb 157 if (cxx_dialect >= cxx17)
33603066 158 CLASSTYPE_LITERAL_P (type) = true;
159
5d9fd871 160 /* Clear base types. */
161 xref_basetypes (type, /*bases=*/NULL_TREE);
162
163 /* Start the class. */
164 type = begin_class_definition (type);
5d9fd871 165
166 return type;
167}
168
169/* Returns the type to use for the return type of the operator() of a
170 closure class. */
171
172tree
173lambda_return_type (tree expr)
174{
175 if (expr == NULL_TREE)
176 return void_type_node;
177 if (type_unknown_p (expr)
178 || BRACE_ENCLOSED_INITIALIZER_P (expr))
179 {
180 cxx_incomplete_type_error (expr, TREE_TYPE (expr));
86771497 181 return error_mark_node;
5d9fd871 182 }
183 gcc_checking_assert (!type_dependent_expression_p (expr));
184 return cv_unqualified (type_decays_to (unlowered_expr_type (expr)));
185}
186
187/* Given a LAMBDA_EXPR or closure type LAMBDA, return the op() of the
188 closure type. */
189
190tree
191lambda_function (tree lambda)
192{
193 tree type;
194 if (TREE_CODE (lambda) == LAMBDA_EXPR)
195 type = LAMBDA_EXPR_CLOSURE (lambda);
196 else
197 type = lambda;
198 gcc_assert (LAMBDA_TYPE_P (type));
199 /* Don't let debug_tree cause instantiation. */
200 if (CLASSTYPE_TEMPLATE_INSTANTIATION (type)
201 && !COMPLETE_OR_OPEN_TYPE_P (type))
202 return NULL_TREE;
ef8f6502 203 lambda = lookup_member (type, call_op_identifier,
5d9fd871 204 /*protect=*/0, /*want_type=*/false,
205 tf_warning_or_error);
206 if (lambda)
814b90ef 207 lambda = STRIP_TEMPLATE (get_first_fn (lambda));
5d9fd871 208 return lambda;
209}
210
211/* Returns the type to use for the FIELD_DECL corresponding to the
adcbdb02 212 capture of EXPR. EXPLICIT_INIT_P indicates whether this is a
213 C++14 init capture, and BY_REFERENCE_P indicates whether we're
214 capturing by reference. */
5d9fd871 215
216tree
adcbdb02 217lambda_capture_field_type (tree expr, bool explicit_init_p,
218 bool by_reference_p)
5d9fd871 219{
220 tree type;
fbde726a 221 bool is_this = is_this_parameter (tree_strip_nop_conversions (expr));
adcbdb02 222
fbde726a 223 if (!is_this && type_dependent_expression_p (expr))
5d9fd871 224 {
225 type = cxx_make_type (DECLTYPE_TYPE);
226 DECLTYPE_TYPE_EXPR (type) = expr;
227 DECLTYPE_FOR_LAMBDA_CAPTURE (type) = true;
228 DECLTYPE_FOR_INIT_CAPTURE (type) = explicit_init_p;
adcbdb02 229 DECLTYPE_FOR_REF_CAPTURE (type) = by_reference_p;
5d9fd871 230 SET_TYPE_STRUCTURAL_EQUALITY (type);
231 }
fbde726a 232 else if (!is_this && explicit_init_p)
233 {
adcbdb02 234 tree auto_node = make_auto ();
235
236 type = auto_node;
237 if (by_reference_p)
238 /* Add the reference now, so deduction doesn't lose
239 outermost CV qualifiers of EXPR. */
240 type = build_reference_type (type);
241 type = do_auto_deduction (type, expr, auto_node);
fbde726a 242 }
243 else
adcbdb02 244 {
245 type = non_reference (unlowered_expr_type (expr));
246
d46b9539 247 if (!is_this
248 && (by_reference_p || TREE_CODE (type) == FUNCTION_TYPE))
adcbdb02 249 type = build_reference_type (type);
250 }
251
5d9fd871 252 return type;
253}
254
255/* Returns true iff DECL is a lambda capture proxy variable created by
256 build_capture_proxy. */
257
258bool
259is_capture_proxy (tree decl)
260{
261 return (VAR_P (decl)
262 && DECL_HAS_VALUE_EXPR_P (decl)
263 && !DECL_ANON_UNION_VAR_P (decl)
c2f14a91 264 && !DECL_DECOMPOSITION_P (decl)
c3a961ad 265 && !DECL_FNAME_P (decl)
9b0e9786 266 && !(DECL_ARTIFICIAL (decl)
267 && DECL_LANG_SPECIFIC (decl)
268 && DECL_OMP_PRIVATIZED_MEMBER (decl))
5d9fd871 269 && LAMBDA_FUNCTION_P (DECL_CONTEXT (decl)));
270}
271
272/* Returns true iff DECL is a capture proxy for a normal capture
273 (i.e. without explicit initializer). */
274
275bool
276is_normal_capture_proxy (tree decl)
277{
278 if (!is_capture_proxy (decl))
279 /* It's not a capture proxy. */
280 return false;
281
a7ea8f96 282 return (DECL_LANG_SPECIFIC (decl)
283 && DECL_CAPTURED_VARIABLE (decl));
5d9fd871 284}
285
80fdc40f 286/* Returns true iff DECL is a capture proxy for a normal capture
287 of a constant variable. */
288
289bool
290is_constant_capture_proxy (tree decl)
291{
292 if (is_normal_capture_proxy (decl))
293 return decl_constant_var_p (DECL_CAPTURED_VARIABLE (decl));
294 return false;
295}
296
5d9fd871 297/* VAR is a capture proxy created by build_capture_proxy; add it to the
298 current function, which is the operator() for the appropriate lambda. */
299
300void
301insert_capture_proxy (tree var)
302{
d0e2b7e7 303 if (is_normal_capture_proxy (var))
c017458d 304 {
305 tree cap = DECL_CAPTURED_VARIABLE (var);
306 if (CHECKING_P)
307 {
308 gcc_assert (!is_normal_capture_proxy (cap));
309 tree old = retrieve_local_specialization (cap);
310 if (old)
311 gcc_assert (DECL_CONTEXT (old) != DECL_CONTEXT (var));
312 }
313 register_local_specialization (var, cap);
314 }
6f20c785 315
5d9fd871 316 /* Put the capture proxy in the extra body block so that it won't clash
317 with a later local variable. */
adf347c7 318 pushdecl_outermost_localscope (var);
5d9fd871 319
320 /* And put a DECL_EXPR in the STATEMENT_LIST for the same block. */
321 var = build_stmt (DECL_SOURCE_LOCATION (var), DECL_EXPR, var);
adf347c7 322 tree stmt_list = (*stmt_list_stack)[1];
5d9fd871 323 gcc_assert (stmt_list);
324 append_to_statement_list_force (var, &stmt_list);
325}
326
327/* We've just finished processing a lambda; if the containing scope is also
328 a lambda, insert any capture proxies that were created while processing
329 the nested lambda. */
330
331void
332insert_pending_capture_proxies (void)
333{
334 tree lam;
335 vec<tree, va_gc> *proxies;
336 unsigned i;
337
338 if (!current_function_decl || !LAMBDA_FUNCTION_P (current_function_decl))
339 return;
340
341 lam = CLASSTYPE_LAMBDA_EXPR (DECL_CONTEXT (current_function_decl));
342 proxies = LAMBDA_EXPR_PENDING_PROXIES (lam);
343 for (i = 0; i < vec_safe_length (proxies); ++i)
344 {
345 tree var = (*proxies)[i];
346 insert_capture_proxy (var);
347 }
348 release_tree_vector (LAMBDA_EXPR_PENDING_PROXIES (lam));
349 LAMBDA_EXPR_PENDING_PROXIES (lam) = NULL;
350}
351
352/* Given REF, a COMPONENT_REF designating a field in the lambda closure,
353 return the type we want the proxy to have: the type of the field itself,
354 with added const-qualification if the lambda isn't mutable and the
355 capture is by value. */
356
357tree
358lambda_proxy_type (tree ref)
359{
360 tree type;
6dcf5c5f 361 if (ref == error_mark_node)
362 return error_mark_node;
5d9fd871 363 if (REFERENCE_REF_P (ref))
364 ref = TREE_OPERAND (ref, 0);
6dcf5c5f 365 gcc_assert (TREE_CODE (ref) == COMPONENT_REF);
5d9fd871 366 type = TREE_TYPE (ref);
6dcf5c5f 367 if (!type || WILDCARD_TYPE_P (non_reference (type)))
368 {
369 type = cxx_make_type (DECLTYPE_TYPE);
370 DECLTYPE_TYPE_EXPR (type) = ref;
371 DECLTYPE_FOR_LAMBDA_PROXY (type) = true;
372 SET_TYPE_STRUCTURAL_EQUALITY (type);
373 }
374 if (DECL_PACK_P (TREE_OPERAND (ref, 1)))
375 type = make_pack_expansion (type);
5d9fd871 376 return type;
377}
378
379/* MEMBER is a capture field in a lambda closure class. Now that we're
380 inside the operator(), build a placeholder var for future lookups and
381 debugging. */
382
67ad87d3 383static tree
6f20c785 384build_capture_proxy (tree member, tree init)
5d9fd871 385{
386 tree var, object, fn, closure, name, lam, type;
387
6dcf5c5f 388 if (PACK_EXPANSION_P (member))
389 member = PACK_EXPANSION_PATTERN (member);
390
5d9fd871 391 closure = DECL_CONTEXT (member);
392 fn = lambda_function (closure);
393 lam = CLASSTYPE_LAMBDA_EXPR (closure);
394
395 /* The proxy variable forwards to the capture field. */
396 object = build_fold_indirect_ref (DECL_ARGUMENTS (fn));
397 object = finish_non_static_data_member (member, object, NULL_TREE);
398 if (REFERENCE_REF_P (object))
399 object = TREE_OPERAND (object, 0);
400
401 /* Remove the __ inserted by add_capture. */
5402533b 402 name = get_identifier (IDENTIFIER_POINTER (DECL_NAME (member)) + 2);
5d9fd871 403
404 type = lambda_proxy_type (object);
405
d03fa520 406 if (name == this_identifier && !INDIRECT_TYPE_P (type))
66108a57 407 {
408 type = build_pointer_type (type);
409 type = cp_build_qualified_type (type, TYPE_QUAL_CONST);
410 object = build_fold_addr_expr_with_type (object, type);
411 }
412
5d9fd871 413 if (DECL_VLA_CAPTURE_P (member))
414 {
415 /* Rebuild the VLA type from the pointer and maxindex. */
416 tree field = next_initializable_field (TYPE_FIELDS (type));
417 tree ptr = build_simple_component_ref (object, field);
418 field = next_initializable_field (DECL_CHAIN (field));
419 tree max = build_simple_component_ref (object, field);
b46a48d6 420 type = build_cplus_array_type (TREE_TYPE (TREE_TYPE (ptr)),
421 build_index_type (max));
5d9fd871 422 type = build_reference_type (type);
423 REFERENCE_VLA_OK (type) = true;
424 object = convert (type, ptr);
425 }
426
33d103ed 427 complete_type (type);
428
5d9fd871 429 var = build_decl (input_location, VAR_DECL, name, type);
430 SET_DECL_VALUE_EXPR (var, object);
431 DECL_HAS_VALUE_EXPR_P (var) = 1;
432 DECL_ARTIFICIAL (var) = 1;
433 TREE_USED (var) = 1;
434 DECL_CONTEXT (var) = fn;
435
6f20c785 436 if (DECL_NORMAL_CAPTURE_P (member))
437 {
438 if (DECL_VLA_CAPTURE_P (member))
439 {
440 init = CONSTRUCTOR_ELT (init, 0)->value;
441 init = TREE_OPERAND (init, 0); // Strip ADDR_EXPR.
442 init = TREE_OPERAND (init, 0); // Strip ARRAY_REF.
443 }
444 else
445 {
446 if (PACK_EXPANSION_P (init))
447 init = PACK_EXPANSION_PATTERN (init);
6f20c785 448 }
ba23f9fb 449
e86f32c0 450 if (INDIRECT_REF_P (init))
451 init = TREE_OPERAND (init, 0);
452 STRIP_NOPS (init);
453
b448c6f3 454 gcc_assert (VAR_P (init) || TREE_CODE (init) == PARM_DECL);
455 while (is_normal_capture_proxy (init))
456 init = DECL_CAPTURED_VARIABLE (init);
457 retrofit_lang_decl (var);
458 DECL_CAPTURED_VARIABLE (var) = init;
6f20c785 459 }
460
5d9fd871 461 if (name == this_identifier)
462 {
463 gcc_assert (LAMBDA_EXPR_THIS_CAPTURE (lam) == member);
464 LAMBDA_EXPR_THIS_CAPTURE (lam) = var;
465 }
466
467 if (fn == current_function_decl)
468 insert_capture_proxy (var);
469 else
470 vec_safe_push (LAMBDA_EXPR_PENDING_PROXIES (lam), var);
471
472 return var;
473}
474
5407f1e9 475static GTY(()) tree ptr_id;
476static GTY(()) tree max_id;
477
5d9fd871 478/* Return a struct containing a pointer and a length for lambda capture of
479 an array of runtime length. */
480
481static tree
482vla_capture_type (tree array_type)
483{
5d9fd871 484 tree type = xref_tag (record_type, make_anon_name (), ts_current, false);
485 xref_basetypes (type, NULL_TREE);
486 type = begin_class_definition (type);
487 if (!ptr_id)
488 {
489 ptr_id = get_identifier ("ptr");
490 max_id = get_identifier ("max");
491 }
492 tree ptrtype = build_pointer_type (TREE_TYPE (array_type));
493 tree field = build_decl (input_location, FIELD_DECL, ptr_id, ptrtype);
494 finish_member_declaration (field);
495 field = build_decl (input_location, FIELD_DECL, max_id, sizetype);
496 finish_member_declaration (field);
497 return finish_struct (type, NULL_TREE);
498}
499
500/* From an ID and INITIALIZER, create a capture (by reference if
501 BY_REFERENCE_P is true), add it to the capture-list for LAMBDA,
66108a57 502 and return it. If ID is `this', BY_REFERENCE_P says whether
503 `*this' is captured by reference. */
5d9fd871 504
505tree
6dcf5c5f 506add_capture (tree lambda, tree id, tree orig_init, bool by_reference_p,
5d9fd871 507 bool explicit_init_p)
508{
509 char *buf;
510 tree type, member, name;
511 bool vla = false;
6dcf5c5f 512 bool variadic = false;
513 tree initializer = orig_init;
514
515 if (PACK_EXPANSION_P (initializer))
516 {
517 initializer = PACK_EXPANSION_PATTERN (initializer);
518 variadic = true;
519 }
5d9fd871 520
845d5e95 521 if (TREE_CODE (initializer) == TREE_LIST
522 /* A pack expansion might end up with multiple elements. */
523 && !PACK_EXPANSION_P (TREE_VALUE (initializer)))
5d9fd871 524 initializer = build_x_compound_expr_from_list (initializer, ELK_INIT,
525 tf_warning_or_error);
384b0418 526 type = TREE_TYPE (initializer);
5444a0b4 527 if (type == error_mark_node)
528 return error_mark_node;
529
5d9fd871 530 if (array_of_runtime_bound_p (type))
531 {
532 vla = true;
533 if (!by_reference_p)
534 error ("array of runtime bound cannot be captured by copy, "
535 "only by reference");
536
537 /* For a VLA, we capture the address of the first element and the
538 maximum index, and then reconstruct the VLA for the proxy. */
539 tree elt = cp_build_array_ref (input_location, initializer,
540 integer_zero_node, tf_warning_or_error);
541 initializer = build_constructor_va (init_list_type_node, 2,
542 NULL_TREE, build_address (elt),
543 NULL_TREE, array_type_nelts (type));
544 type = vla_capture_type (type);
545 }
d3155b08 546 else if (!dependent_type_p (type)
547 && variably_modified_type_p (type, NULL_TREE))
5d9fd871 548 {
14dd004e 549 sorry ("capture of variably-modified type %qT that is not an N3639 array "
5d9fd871 550 "of runtime bound", type);
551 if (TREE_CODE (type) == ARRAY_TYPE
552 && variably_modified_type_p (TREE_TYPE (type), NULL_TREE))
553 inform (input_location, "because the array element type %qT has "
554 "variable size", TREE_TYPE (type));
14dd004e 555 return error_mark_node;
5d9fd871 556 }
384b0418 557 else
5d9fd871 558 {
adcbdb02 559 type = lambda_capture_field_type (initializer, explicit_init_p,
560 by_reference_p);
8dbf49cb 561 if (type == error_mark_node)
562 return error_mark_node;
adcbdb02 563
66108a57 564 if (id == this_identifier && !by_reference_p)
565 {
d03fa520 566 gcc_assert (INDIRECT_TYPE_P (type));
66108a57 567 type = TREE_TYPE (type);
0744a0c1 568 initializer = cp_build_fold_indirect_ref (initializer);
66108a57 569 }
adcbdb02 570
571 if (dependent_type_p (type))
572 ;
573 else if (id != this_identifier && by_reference_p)
384b0418 574 {
adcbdb02 575 if (!lvalue_p (initializer))
c8766acf 576 {
577 error ("cannot capture %qE by reference", initializer);
578 return error_mark_node;
579 }
384b0418 580 }
581 else
5444a0b4 582 {
583 /* Capture by copy requires a complete type. */
584 type = complete_type (type);
adcbdb02 585 if (!COMPLETE_TYPE_P (type))
5444a0b4 586 {
587 error ("capture by copy of incomplete type %qT", type);
588 cxx_incomplete_type_inform (type);
589 return error_mark_node;
590 }
591 }
5d9fd871 592 }
5d9fd871 593
594 /* Add __ to the beginning of the field name so that user code
595 won't find the field with name lookup. We can't just leave the name
596 unset because template instantiation uses the name to find
597 instantiated fields. */
5402533b 598 buf = (char *) alloca (IDENTIFIER_LENGTH (id) + 3);
599 buf[1] = buf[0] = '_';
600 memcpy (buf + 2, IDENTIFIER_POINTER (id),
601 IDENTIFIER_LENGTH (id) + 1);
602 name = get_identifier (buf);
5d9fd871 603
604 /* If TREE_TYPE isn't set, we're still in the introducer, so check
605 for duplicates. */
606 if (!LAMBDA_EXPR_CLOSURE (lambda))
607 {
608 if (IDENTIFIER_MARKED (name))
609 {
610 pedwarn (input_location, 0,
611 "already captured %qD in lambda expression", id);
612 return NULL_TREE;
613 }
614 IDENTIFIER_MARKED (name) = true;
615 }
616
6dcf5c5f 617 if (variadic)
618 type = make_pack_expansion (type);
619
5d9fd871 620 /* Make member variable. */
df623cbd 621 member = build_decl (input_location, FIELD_DECL, name, type);
5d9fd871 622 DECL_VLA_CAPTURE_P (member) = vla;
623
624 if (!explicit_init_p)
625 /* Normal captures are invisible to name lookup but uses are replaced
626 with references to the capture field; we implement this by only
627 really making them invisible in unevaluated context; see
628 qualify_lookup. For now, let's make explicitly initialized captures
629 always visible. */
630 DECL_NORMAL_CAPTURE_P (member) = true;
631
632 if (id == this_identifier)
633 LAMBDA_EXPR_THIS_CAPTURE (lambda) = member;
634
635 /* Add it to the appropriate closure class if we've started it. */
636 if (current_class_type
637 && current_class_type == LAMBDA_EXPR_CLOSURE (lambda))
9e6bae05 638 {
639 if (COMPLETE_TYPE_P (current_class_type))
153f22fa 640 internal_error ("trying to capture %qD in instantiation of "
641 "generic lambda", id);
9e6bae05 642 finish_member_declaration (member);
643 }
5d9fd871 644
6dcf5c5f 645 tree listmem = member;
646 if (variadic)
647 {
648 listmem = make_pack_expansion (member);
649 initializer = orig_init;
650 }
5d9fd871 651 LAMBDA_EXPR_CAPTURE_LIST (lambda)
6dcf5c5f 652 = tree_cons (listmem, initializer, LAMBDA_EXPR_CAPTURE_LIST (lambda));
5d9fd871 653
654 if (LAMBDA_EXPR_CLOSURE (lambda))
6f20c785 655 return build_capture_proxy (member, initializer);
5d9fd871 656 /* For explicit captures we haven't started the function yet, so we wait
657 and build the proxy from cp_parser_lambda_body. */
80fdc40f 658 LAMBDA_CAPTURE_EXPLICIT_P (LAMBDA_EXPR_CAPTURE_LIST (lambda)) = true;
5d9fd871 659 return NULL_TREE;
660}
661
662/* Register all the capture members on the list CAPTURES, which is the
663 LAMBDA_EXPR_CAPTURE_LIST for the lambda after the introducer. */
664
665void
666register_capture_members (tree captures)
667{
668 if (captures == NULL_TREE)
669 return;
670
671 register_capture_members (TREE_CHAIN (captures));
6dcf5c5f 672
673 tree field = TREE_PURPOSE (captures);
674 if (PACK_EXPANSION_P (field))
675 field = PACK_EXPANSION_PATTERN (field);
676
5d9fd871 677 /* We set this in add_capture to avoid duplicates. */
6dcf5c5f 678 IDENTIFIER_MARKED (DECL_NAME (field)) = false;
679 finish_member_declaration (field);
5d9fd871 680}
681
682/* Similar to add_capture, except this works on a stack of nested lambdas.
683 BY_REFERENCE_P in this case is derived from the default capture mode.
684 Returns the capture for the lambda at the bottom of the stack. */
685
686tree
687add_default_capture (tree lambda_stack, tree id, tree initializer)
688{
689 bool this_capture_p = (id == this_identifier);
5d9fd871 690 tree var = NULL_TREE;
5d9fd871 691 tree saved_class_type = current_class_type;
692
c9d00375 693 for (tree node = lambda_stack;
5d9fd871 694 node;
695 node = TREE_CHAIN (node))
696 {
697 tree lambda = TREE_VALUE (node);
698
699 current_class_type = LAMBDA_EXPR_CLOSURE (lambda);
6dcf5c5f 700 if (DECL_PACK_P (initializer))
701 initializer = make_pack_expansion (initializer);
5d9fd871 702 var = add_capture (lambda,
703 id,
704 initializer,
705 /*by_reference_p=*/
66108a57 706 (this_capture_p
707 || (LAMBDA_EXPR_DEFAULT_CAPTURE_MODE (lambda)
5d9fd871 708 == CPLD_REFERENCE)),
709 /*explicit_init_p=*/false);
710 initializer = convert_from_reference (var);
c9d00375 711
712 /* Warn about deprecated implicit capture of this via [=]. */
713 if (cxx_dialect >= cxx2a
714 && this_capture_p
715 && LAMBDA_EXPR_DEFAULT_CAPTURE_MODE (lambda) == CPLD_COPY
716 && !in_system_header_at (LAMBDA_EXPR_LOCATION (lambda)))
717 {
718 if (warning_at (LAMBDA_EXPR_LOCATION (lambda), OPT_Wdeprecated,
719 "implicit capture of %qE via %<[=]%> is deprecated "
720 "in C++20", this_identifier))
721 inform (LAMBDA_EXPR_LOCATION (lambda), "add explicit %<this%> or "
722 "%<*this%> capture");
723 }
5d9fd871 724 }
725
726 current_class_type = saved_class_type;
727
728 return var;
729}
730
f1ec53b6 731/* Return the capture pertaining to a use of 'this' in LAMBDA, in the
732 form of an INDIRECT_REF, possibly adding it through default
532cca3f 733 capturing, if ADD_CAPTURE_P is nonzero. If ADD_CAPTURE_P is negative,
734 try to capture but don't complain if we can't. */
5d9fd871 735
736tree
532cca3f 737lambda_expr_this_capture (tree lambda, int add_capture_p)
5d9fd871 738{
739 tree result;
740
741 tree this_capture = LAMBDA_EXPR_THIS_CAPTURE (lambda);
742
7d07c4a7 743 /* In unevaluated context this isn't an odr-use, so don't capture. */
5d9fd871 744 if (cp_unevaluated_operand)
7d07c4a7 745 add_capture_p = false;
5d9fd871 746
747 /* Try to default capture 'this' if we can. */
4bcb6f7f 748 if (!this_capture)
5d9fd871 749 {
750 tree lambda_stack = NULL_TREE;
751 tree init = NULL_TREE;
752
753 /* If we are in a lambda function, we can move out until we hit:
754 1. a non-lambda function or NSDMI,
755 2. a lambda function capturing 'this', or
756 3. a non-default capturing lambda function. */
757 for (tree tlambda = lambda; ;)
758 {
4bcb6f7f 759 if (add_capture_p
760 && LAMBDA_EXPR_DEFAULT_CAPTURE_MODE (tlambda) == CPLD_NONE)
761 /* tlambda won't let us capture 'this'. */
762 break;
763
764 if (add_capture_p)
765 lambda_stack = tree_cons (NULL_TREE,
766 tlambda,
767 lambda_stack);
5d9fd871 768
fc25a333 769 tree closure = LAMBDA_EXPR_CLOSURE (tlambda);
770 tree containing_function
771 = decl_function_context (TYPE_NAME (closure));
772
773 tree ex = LAMBDA_EXPR_EXTRA_SCOPE (tlambda);
774 if (ex && TREE_CODE (ex) == FIELD_DECL)
5d9fd871 775 {
fc25a333 776 /* Lambda in an NSDMI. We don't have a function to look up
777 'this' in, but we can find (or rebuild) the fake one from
778 inject_this_parameter. */
779 if (!containing_function && !COMPLETE_TYPE_P (closure))
780 /* If we're parsing a lambda in a non-local class,
781 we can find the fake 'this' in scope_chain. */
782 init = scope_chain->x_current_class_ptr;
783 else
784 /* Otherwise it's either gone or buried in
785 function_context_stack, so make another. */
786 init = build_this_parm (NULL_TREE, DECL_CONTEXT (ex),
787 TYPE_UNQUALIFIED);
5d9fd871 788 gcc_checking_assert
789 (init && (TREE_TYPE (TREE_TYPE (init))
790 == current_nonlambda_class_type ()));
791 break;
792 }
793
5d9fd871 794 if (containing_function == NULL_TREE)
795 /* We ran out of scopes; there's no 'this' to capture. */
796 break;
797
798 if (!LAMBDA_FUNCTION_P (containing_function))
799 {
800 /* We found a non-lambda function. */
801 if (DECL_NONSTATIC_MEMBER_FUNCTION_P (containing_function))
802 /* First parameter is 'this'. */
803 init = DECL_ARGUMENTS (containing_function);
804 break;
805 }
806
807 tlambda
808 = CLASSTYPE_LAMBDA_EXPR (DECL_CONTEXT (containing_function));
809
810 if (LAMBDA_EXPR_THIS_CAPTURE (tlambda))
811 {
812 /* An outer lambda has already captured 'this'. */
813 init = LAMBDA_EXPR_THIS_CAPTURE (tlambda);
814 break;
815 }
5d9fd871 816 }
817
818 if (init)
f1ec53b6 819 {
820 if (add_capture_p)
821 this_capture = add_default_capture (lambda_stack,
822 /*id=*/this_identifier,
823 init);
824 else
825 this_capture = init;
826 }
5d9fd871 827 }
828
7d07c4a7 829 if (cp_unevaluated_operand)
830 result = this_capture;
831 else if (!this_capture)
5d9fd871 832 {
532cca3f 833 if (add_capture_p == 1)
7d07c4a7 834 {
835 error ("%<this%> was not captured for this lambda function");
836 result = error_mark_node;
837 }
838 else
839 result = NULL_TREE;
5d9fd871 840 }
841 else
842 {
843 /* To make sure that current_class_ref is for the lambda. */
844 gcc_assert (TYPE_MAIN_VARIANT (TREE_TYPE (current_class_ref))
845 == LAMBDA_EXPR_CLOSURE (lambda));
846
847 result = this_capture;
848
849 /* If 'this' is captured, each use of 'this' is transformed into an
850 access to the corresponding unnamed data member of the closure
851 type cast (_expr.cast_ 5.4) to the type of 'this'. [ The cast
852 ensures that the transformed expression is an rvalue. ] */
853 result = rvalue (result);
854 }
855
856 return result;
857}
858
80fdc40f 859/* Return the innermost LAMBDA_EXPR we're currently in, if any. */
860
861tree
862current_lambda_expr (void)
863{
864 tree type = current_class_type;
865 while (type && !LAMBDA_TYPE_P (type))
866 type = decl_type_context (TYPE_NAME (type));
867 if (type)
868 return CLASSTYPE_LAMBDA_EXPR (type);
869 else
870 return NULL_TREE;
871}
872
e395357f 873/* Return the current LAMBDA_EXPR, if this is a resolvable dummy
874 object. NULL otherwise.. */
5d9fd871 875
e395357f 876static tree
877resolvable_dummy_lambda (tree object)
5d9fd871 878{
879 if (!is_dummy_object (object))
e395357f 880 return NULL_TREE;
5d9fd871 881
882 tree type = TYPE_MAIN_VARIANT (TREE_TYPE (object));
883 gcc_assert (!TYPE_PTR_P (type));
884
885 if (type != current_class_type
886 && current_class_type
887 && LAMBDA_TYPE_P (current_class_type)
855ff3cc 888 && lambda_function (current_class_type)
e269786f 889 && DERIVED_FROM_P (type, nonlambda_method_basetype()))
e395357f 890 return CLASSTYPE_LAMBDA_EXPR (current_class_type);
891
892 return NULL_TREE;
893}
894
895/* We don't want to capture 'this' until we know we need it, i.e. after
896 overload resolution has chosen a non-static member function. At that
897 point we call this function to turn a dummy object into a use of the
898 'this' capture. */
899
900tree
901maybe_resolve_dummy (tree object, bool add_capture_p)
902{
903 if (tree lam = resolvable_dummy_lambda (object))
904 if (tree cap = lambda_expr_this_capture (lam, add_capture_p))
905 if (cap != error_mark_node)
0744a0c1 906 object = build_fold_indirect_ref (cap);
5d9fd871 907
908 return object;
909}
910
e395357f 911/* When parsing a generic lambda containing an argument-dependent
912 member function call we defer overload resolution to instantiation
913 time. But we have to know now whether to capture this or not.
914 Do that if FNS contains any non-static fns.
915 The std doesn't anticipate this case, but I expect this to be the
916 outcome of discussion. */
917
918void
919maybe_generic_this_capture (tree object, tree fns)
920{
921 if (tree lam = resolvable_dummy_lambda (object))
922 if (!LAMBDA_EXPR_THIS_CAPTURE (lam))
923 {
924 /* We've not yet captured, so look at the function set of
925 interest. */
926 if (BASELINK_P (fns))
927 fns = BASELINK_FUNCTIONS (fns);
3ac2178b 928 bool id_expr = TREE_CODE (fns) == TEMPLATE_ID_EXPR;
929 if (id_expr)
930 fns = TREE_OPERAND (fns, 0);
97a86f58 931
932 for (lkp_iterator iter (fns); iter; ++iter)
5ebe5c44 933 if (((!id_expr && TREE_CODE (*iter) != USING_DECL)
934 || TREE_CODE (*iter) == TEMPLATE_DECL)
97a86f58 935 && DECL_NONSTATIC_MEMBER_FUNCTION_P (*iter))
936 {
937 /* Found a non-static member. Capture this. */
532cca3f 938 lambda_expr_this_capture (lam, /*maybe*/-1);
97a86f58 939 break;
940 }
e395357f 941 }
942}
943
ed7bf2d1 944/* Returns the innermost non-lambda function. */
945
946tree
947current_nonlambda_function (void)
948{
949 tree fn = current_function_decl;
950 while (fn && LAMBDA_FUNCTION_P (fn))
951 fn = decl_function_context (fn);
952 return fn;
953}
954
e269786f 955/* Returns the method basetype of the innermost non-lambda function, including
956 a hypothetical constructor if inside an NSDMI, or NULL_TREE if none. */
5d9fd871 957
958tree
959nonlambda_method_basetype (void)
960{
5d9fd871 961 if (!current_class_ref)
962 return NULL_TREE;
963
e269786f 964 tree type = current_class_type;
069eebda 965 if (!type || !LAMBDA_TYPE_P (type))
5d9fd871 966 return type;
967
e269786f 968 while (true)
969 {
970 tree lam = CLASSTYPE_LAMBDA_EXPR (type);
971 tree ex = LAMBDA_EXPR_EXTRA_SCOPE (lam);
972 if (ex && TREE_CODE (ex) == FIELD_DECL)
973 /* Lambda in an NSDMI. */
974 return DECL_CONTEXT (ex);
975
976 tree fn = TYPE_CONTEXT (type);
977 if (!fn || TREE_CODE (fn) != FUNCTION_DECL
978 || !DECL_NONSTATIC_MEMBER_FUNCTION_P (fn))
979 /* No enclosing non-lambda method. */
980 return NULL_TREE;
981 if (!LAMBDA_FUNCTION_P (fn))
982 /* Found an enclosing non-lambda method. */
983 return TYPE_METHOD_BASETYPE (TREE_TYPE (fn));
984 type = DECL_CONTEXT (fn);
985 }
5d9fd871 986}
987
d05ba3ef 988/* Like current_scope, but looking through lambdas. */
989
990tree
991current_nonlambda_scope (void)
992{
993 tree scope = current_scope ();
994 for (;;)
995 {
996 if (TREE_CODE (scope) == FUNCTION_DECL
997 && LAMBDA_FUNCTION_P (scope))
998 {
999 scope = CP_TYPE_CONTEXT (DECL_CONTEXT (scope));
1000 continue;
1001 }
1002 else if (LAMBDA_TYPE_P (scope))
1003 {
1004 scope = CP_TYPE_CONTEXT (scope);
1005 continue;
1006 }
1007 break;
1008 }
1009 return scope;
1010}
1011
814b90ef 1012/* Helper function for maybe_add_lambda_conv_op; build a CALL_EXPR with
1013 indicated FN and NARGS, but do not initialize the return type or any of the
1014 argument slots. */
1015
1016static tree
1017prepare_op_call (tree fn, int nargs)
1018{
1019 tree t;
1020
1021 t = build_vl_exp (CALL_EXPR, nargs + 3);
1022 CALL_EXPR_FN (t) = fn;
1023 CALL_EXPR_STATIC_CHAIN (t) = NULL;
1024
1025 return t;
1026}
1027
3311d302 1028/* Return true iff CALLOP is the op() for a generic lambda. */
1029
1030bool
1031generic_lambda_fn_p (tree callop)
1032{
1033 return (LAMBDA_FUNCTION_P (callop)
1034 && DECL_TEMPLATE_INFO (callop)
1035 && PRIMARY_TEMPLATE_P (DECL_TI_TEMPLATE (callop)));
1036}
1037
5d9fd871 1038/* If the closure TYPE has a static op(), also add a conversion to function
1039 pointer. */
1040
1041void
1042maybe_add_lambda_conv_op (tree type)
1043{
cbb83bc5 1044 bool nested = (cfun != NULL);
64d8d39e 1045 bool nested_def = decl_function_context (TYPE_MAIN_DECL (type));
5d9fd871 1046 tree callop = lambda_function (type);
f16153b7 1047 tree lam = CLASSTYPE_LAMBDA_EXPR (type);
5d9fd871 1048
f16153b7 1049 if (LAMBDA_EXPR_CAPTURE_LIST (lam) != NULL_TREE
1050 || LAMBDA_EXPR_DEFAULT_CAPTURE_MODE (lam) != CPLD_NONE)
5d9fd871 1051 return;
1052
1053 if (processing_template_decl)
1054 return;
1055
3311d302 1056 bool const generic_lambda_p = generic_lambda_fn_p (callop);
814b90ef 1057
4c0924ef 1058 if (!generic_lambda_p && DECL_INITIAL (callop) == NULL_TREE)
5d9fd871 1059 {
1060 /* If the op() wasn't instantiated due to errors, give up. */
1061 gcc_assert (errorcount || sorrycount);
1062 return;
1063 }
1064
814b90ef 1065 /* Non-template conversion operators are defined directly with build_call_a
1066 and using DIRECT_ARGVEC for arguments (including 'this'). Templates are
1067 deferred and the CALL is built in-place. In the case of a deduced return
1068 call op, the decltype expression, DECLTYPE_CALL, used as a substitute for
1069 the return type is also built in-place. The arguments of DECLTYPE_CALL in
1070 the return expression may differ in flags from those in the body CALL. In
1071 particular, parameter pack expansions are marked PACK_EXPANSION_LOCAL_P in
1072 the body CALL, but not in DECLTYPE_CALL. */
1073
1074 vec<tree, va_gc> *direct_argvec = 0;
1075 tree decltype_call = 0, call = 0;
6d02e6b2 1076 tree optype = TREE_TYPE (callop);
1077 tree fn_result = TREE_TYPE (optype);
814b90ef 1078
02d7a132 1079 tree thisarg = build_int_cst (TREE_TYPE (DECL_ARGUMENTS (callop)), 0);
814b90ef 1080 if (generic_lambda_p)
1081 {
23f59d67 1082 ++processing_template_decl;
1083
814b90ef 1084 /* Prepare the dependent member call for the static member function
1085 '_FUN' and, potentially, prepare another call to be used in a decltype
1086 return expression for a deduced return call op to allow for simple
1087 implementation of the conversion operator. */
1088
0744a0c1 1089 tree instance = cp_build_fold_indirect_ref (thisarg);
64e3499e 1090 tree objfn = lookup_template_function (DECL_NAME (callop),
1091 DECL_TI_ARGS (callop));
1092 objfn = build_min (COMPONENT_REF, NULL_TREE,
1093 instance, objfn, NULL_TREE);
814b90ef 1094 int nargs = list_length (DECL_ARGUMENTS (callop)) - 1;
1095
1096 call = prepare_op_call (objfn, nargs);
1097 if (type_uses_auto (fn_result))
1098 decltype_call = prepare_op_call (objfn, nargs);
1099 }
1100 else
1101 {
1102 direct_argvec = make_tree_vector ();
3a3fc4a7 1103 direct_argvec->quick_push (thisarg);
814b90ef 1104 }
1105
1106 /* Copy CALLOP's argument list (as per 'copy_list') as FN_ARGS in order to
1107 declare the static member function "_FUN" below. For each arg append to
1108 DIRECT_ARGVEC (for the non-template case) or populate the pre-allocated
1109 call args (for the template case). If a parameter pack is found, expand
1110 it, flagging it as PACK_EXPANSION_LOCAL_P for the body call. */
1111
1112 tree fn_args = NULL_TREE;
1113 {
1114 int ix = 0;
1115 tree src = DECL_CHAIN (DECL_ARGUMENTS (callop));
a575a67c 1116 tree tgt = NULL;
814b90ef 1117
1118 while (src)
1119 {
1120 tree new_node = copy_node (src);
1121
b5a6b127 1122 /* Clear TREE_ADDRESSABLE on thunk arguments. */
1123 TREE_ADDRESSABLE (new_node) = 0;
1124
814b90ef 1125 if (!fn_args)
1126 fn_args = tgt = new_node;
1127 else
1128 {
1129 TREE_CHAIN (tgt) = new_node;
1130 tgt = new_node;
1131 }
1132
1133 mark_exp_read (tgt);
1134
1135 if (generic_lambda_p)
1136 {
64e3499e 1137 tree a = tgt;
1138 if (DECL_PACK_P (tgt))
1139 {
1140 a = make_pack_expansion (a);
1141 PACK_EXPANSION_LOCAL_P (a) = true;
1142 }
72f9352a 1143 CALL_EXPR_ARG (call, ix) = a;
72f9352a 1144
64e3499e 1145 if (decltype_call)
1146 {
1147 /* Avoid capturing variables in this context. */
1148 ++cp_unevaluated_operand;
1149 CALL_EXPR_ARG (decltype_call, ix) = forward_parm (tgt);
1150 --cp_unevaluated_operand;
1151 }
72f9352a 1152
814b90ef 1153 ++ix;
1154 }
1155 else
1156 vec_safe_push (direct_argvec, tgt);
1157
1158 src = TREE_CHAIN (src);
1159 }
1160 }
1161
814b90ef 1162 if (generic_lambda_p)
1163 {
1164 if (decltype_call)
1165 {
814b90ef 1166 fn_result = finish_decltype_type
1167 (decltype_call, /*id_expression_or_member_access_p=*/false,
1168 tf_warning_or_error);
814b90ef 1169 }
1170 }
1171 else
1172 call = build_call_a (callop,
1173 direct_argvec->length (),
1174 direct_argvec->address ());
1175
1176 CALL_FROM_THUNK_P (call) = 1;
33603066 1177 SET_EXPR_LOCATION (call, UNKNOWN_LOCATION);
814b90ef 1178
1179 tree stattype = build_function_type (fn_result, FUNCTION_ARG_CHAIN (callop));
6d02e6b2 1180 stattype = (cp_build_type_attribute_variant
1181 (stattype, TYPE_ATTRIBUTES (optype)));
2e9e9363 1182 if (flag_noexcept_type
1183 && TYPE_NOTHROW_P (TREE_TYPE (callop)))
1184 stattype = build_exception_variant (stattype, noexcept_true_spec);
5d9fd871 1185
23f59d67 1186 if (generic_lambda_p)
1187 --processing_template_decl;
1188
5d9fd871 1189 /* First build up the conversion op. */
1190
814b90ef 1191 tree rettype = build_pointer_type (stattype);
b423f98b 1192 tree name = make_conv_op_name (rettype);
814b90ef 1193 tree thistype = cp_build_qualified_type (type, TYPE_QUAL_CONST);
1194 tree fntype = build_method_type_directly (thistype, rettype, void_list_node);
1195 tree convfn = build_lang_decl (FUNCTION_DECL, name, fntype);
1c4a339e 1196 SET_DECL_LANGUAGE (convfn, lang_cplusplus);
814b90ef 1197 tree fn = convfn;
5d9fd871 1198 DECL_SOURCE_LOCATION (fn) = DECL_SOURCE_LOCATION (callop);
5d4b30ea 1199 SET_DECL_ALIGN (fn, MINIMUM_METHOD_BOUNDARY);
5d9fd871 1200 grokclassfn (type, fn, NO_SPECIAL);
1201 set_linkage_according_to_type (type, fn);
a3145045 1202 rest_of_decl_compilation (fn, namespace_bindings_p (), at_eof);
5d9fd871 1203 DECL_IN_AGGR_P (fn) = 1;
1204 DECL_ARTIFICIAL (fn) = 1;
1205 DECL_NOT_REALLY_EXTERN (fn) = 1;
1206 DECL_DECLARED_INLINE_P (fn) = 1;
aa3ab9ba 1207 DECL_ARGUMENTS (fn) = build_this_parm (fn, fntype, TYPE_QUAL_CONST);
1208
64d8d39e 1209 if (nested_def)
5d9fd871 1210 DECL_INTERFACE_KNOWN (fn) = 1;
1211
814b90ef 1212 if (generic_lambda_p)
1213 fn = add_inherited_template_parms (fn, DECL_TI_TEMPLATE (callop));
1214
9320a233 1215 add_method (type, fn, false);
5d9fd871 1216
1217 /* Generic thunk code fails for varargs; we'll complain in mark_used if
1218 the conversion op is used. */
1219 if (varargs_function_p (callop))
1220 {
ea17a80d 1221 DECL_DELETED_FN (fn) = 1;
5d9fd871 1222 return;
1223 }
1224
1225 /* Now build up the thunk to be returned. */
1226
3a491e82 1227 tree statfn = build_lang_decl (FUNCTION_DECL, fun_identifier, stattype);
1c4a339e 1228 SET_DECL_LANGUAGE (statfn, lang_cplusplus);
814b90ef 1229 fn = statfn;
5d9fd871 1230 DECL_SOURCE_LOCATION (fn) = DECL_SOURCE_LOCATION (callop);
5d9fd871 1231 grokclassfn (type, fn, NO_SPECIAL);
1232 set_linkage_according_to_type (type, fn);
a3145045 1233 rest_of_decl_compilation (fn, namespace_bindings_p (), at_eof);
5d9fd871 1234 DECL_IN_AGGR_P (fn) = 1;
1235 DECL_ARTIFICIAL (fn) = 1;
1236 DECL_NOT_REALLY_EXTERN (fn) = 1;
1237 DECL_DECLARED_INLINE_P (fn) = 1;
1238 DECL_STATIC_FUNCTION_P (fn) = 1;
814b90ef 1239 DECL_ARGUMENTS (fn) = fn_args;
1240 for (tree arg = fn_args; arg; arg = DECL_CHAIN (arg))
5d9fd871 1241 {
1242 /* Avoid duplicate -Wshadow warnings. */
1243 DECL_NAME (arg) = NULL_TREE;
1244 DECL_CONTEXT (arg) = fn;
1245 }
64d8d39e 1246 if (nested_def)
5d9fd871 1247 DECL_INTERFACE_KNOWN (fn) = 1;
1248
814b90ef 1249 if (generic_lambda_p)
1250 fn = add_inherited_template_parms (fn, DECL_TI_TEMPLATE (callop));
1251
59ec98b2 1252 if (flag_sanitize & SANITIZE_NULL)
ef8f6502 1253 /* Don't UBsan this function; we're deliberately calling op() with a null
1254 object argument. */
1255 add_no_sanitize_value (fn, SANITIZE_UNDEFINED);
7caa8236 1256
9320a233 1257 add_method (type, fn, false);
5d9fd871 1258
1259 if (nested)
1260 push_function_context ();
1261 else
1262 /* Still increment function_depth so that we don't GC in the
1263 middle of an expression. */
1264 ++function_depth;
1265
1266 /* Generate the body of the thunk. */
1267
1268 start_preparsed_function (statfn, NULL_TREE,
1269 SF_PRE_PARSED | SF_INCLASS_INLINE);
1270 if (DECL_ONE_ONLY (statfn))
1271 {
1272 /* Put the thunk in the same comdat group as the call op. */
415d1b9a 1273 cgraph_node::get_create (statfn)->add_to_same_comdat_group
1274 (cgraph_node::get_create (callop));
5d9fd871 1275 }
814b90ef 1276 tree body = begin_function_body ();
1277 tree compound_stmt = begin_compound_stmt (0);
1278 if (!generic_lambda_p)
9f10a108 1279 {
814b90ef 1280 set_flags_from_callee (call);
1281 if (MAYBE_CLASS_TYPE_P (TREE_TYPE (call)))
1282 call = build_cplus_new (TREE_TYPE (call), call, tf_warning_or_error);
9f10a108 1283 }
5d9fd871 1284 call = convert_from_reference (call);
1285 finish_return_stmt (call);
1286
1287 finish_compound_stmt (compound_stmt);
1288 finish_function_body (body);
1289
4775c814 1290 fn = finish_function (/*inline_p=*/true);
814b90ef 1291 if (!generic_lambda_p)
1292 expand_or_defer_fn (fn);
5d9fd871 1293
1294 /* Generate the body of the conversion op. */
1295
1296 start_preparsed_function (convfn, NULL_TREE,
1297 SF_PRE_PARSED | SF_INCLASS_INLINE);
1298 body = begin_function_body ();
1299 compound_stmt = begin_compound_stmt (0);
1300
1301 /* decl_needed_p needs to see that it's used. */
1302 TREE_USED (statfn) = 1;
1303 finish_return_stmt (decay_conversion (statfn, tf_warning_or_error));
1304
1305 finish_compound_stmt (compound_stmt);
1306 finish_function_body (body);
1307
4775c814 1308 fn = finish_function (/*inline_p=*/true);
814b90ef 1309 if (!generic_lambda_p)
1310 expand_or_defer_fn (fn);
5d9fd871 1311
1312 if (nested)
1313 pop_function_context ();
1314 else
1315 --function_depth;
1316}
1317
33603066 1318/* True if FN is the static function "_FUN" that gets returned from the lambda
1319 conversion operator. */
1320
1321bool
1322lambda_static_thunk_p (tree fn)
1323{
1324 return (fn && TREE_CODE (fn) == FUNCTION_DECL
1325 && DECL_ARTIFICIAL (fn)
1326 && DECL_STATIC_FUNCTION_P (fn)
1327 && LAMBDA_TYPE_P (CP_DECL_CONTEXT (fn)));
1328}
1329
5d9fd871 1330/* Returns true iff VAL is a lambda-related declaration which should
1331 be ignored by unqualified lookup. */
1332
1333bool
1334is_lambda_ignored_entity (tree val)
1335{
6f20c785 1336 /* Look past normal capture proxies. */
1337 if (is_normal_capture_proxy (val))
5d9fd871 1338 return true;
1339
1340 /* Always ignore lambda fields, their names are only for debugging. */
1341 if (TREE_CODE (val) == FIELD_DECL
1342 && CLASSTYPE_LAMBDA_EXPR (DECL_CONTEXT (val)))
1343 return true;
1344
1345 /* None of the lookups that use qualify_lookup want the op() from the
1346 lambda; they want the one from the enclosing class. */
1347 if (TREE_CODE (val) == FUNCTION_DECL && LAMBDA_FUNCTION_P (val))
1348 return true;
1349
1350 return false;
1351}
5407f1e9 1352
52e76545 1353/* Lambdas that appear in variable initializer or default argument scope
1354 get that in their mangling, so we need to record it. We might as well
1355 use the count for function and namespace scopes as well. */
1356static GTY(()) tree lambda_scope;
1357static GTY(()) int lambda_count;
1358struct GTY(()) tree_int
1359{
1360 tree t;
1361 int i;
1362};
1363static GTY(()) vec<tree_int, va_gc> *lambda_scope_stack;
1364
1365void
1366start_lambda_scope (tree decl)
1367{
1368 tree_int ti;
1369 gcc_assert (decl);
1370 /* Once we're inside a function, we ignore variable scope and just push
1371 the function again so that popping works properly. */
1372 if (current_function_decl && TREE_CODE (decl) == VAR_DECL)
1373 decl = current_function_decl;
1374 ti.t = lambda_scope;
1375 ti.i = lambda_count;
1376 vec_safe_push (lambda_scope_stack, ti);
1377 if (lambda_scope != decl)
1378 {
1379 /* Don't reset the count if we're still in the same function. */
1380 lambda_scope = decl;
1381 lambda_count = 0;
1382 }
1383}
1384
1385void
1386record_lambda_scope (tree lambda)
1387{
1388 LAMBDA_EXPR_EXTRA_SCOPE (lambda) = lambda_scope;
1389 LAMBDA_EXPR_DISCRIMINATOR (lambda) = lambda_count++;
1390}
1391
4b1e842f 1392/* This lambda is an instantiation of a lambda in a template default argument
1393 that got no LAMBDA_EXPR_EXTRA_SCOPE, so this shouldn't either. But we do
1394 need to use and increment the global count to avoid collisions. */
1395
1396void
1397record_null_lambda_scope (tree lambda)
1398{
1399 if (vec_safe_is_empty (lambda_scope_stack))
1400 record_lambda_scope (lambda);
1401 else
1402 {
1403 tree_int *p = lambda_scope_stack->begin();
1404 LAMBDA_EXPR_EXTRA_SCOPE (lambda) = p->t;
1405 LAMBDA_EXPR_DISCRIMINATOR (lambda) = p->i++;
1406 }
1407 gcc_assert (LAMBDA_EXPR_EXTRA_SCOPE (lambda) == NULL_TREE);
1408}
1409
52e76545 1410void
1411finish_lambda_scope (void)
1412{
1413 tree_int *p = &lambda_scope_stack->last ();
1414 if (lambda_scope != p->t)
1415 {
1416 lambda_scope = p->t;
1417 lambda_count = p->i;
1418 }
1419 lambda_scope_stack->pop ();
1420}
1421
1422tree
1423start_lambda_function (tree fco, tree lambda_expr)
1424{
1425 /* Let the front end know that we are going to be defining this
1426 function. */
1427 start_preparsed_function (fco,
1428 NULL_TREE,
1429 SF_PRE_PARSED | SF_INCLASS_INLINE);
1430
1431 tree body = begin_function_body ();
1432
1433 /* Push the proxies for any explicit captures. */
1434 for (tree cap = LAMBDA_EXPR_CAPTURE_LIST (lambda_expr); cap;
1435 cap = TREE_CHAIN (cap))
6f20c785 1436 build_capture_proxy (TREE_PURPOSE (cap), TREE_VALUE (cap));
52e76545 1437
1438 return body;
1439}
1440
80fdc40f 1441/* Subroutine of prune_lambda_captures: CAP is a node in
1442 LAMBDA_EXPR_CAPTURE_LIST. Return the variable it captures for which we
1443 might optimize away the capture, or NULL_TREE if there is no such
1444 variable. */
1445
1446static tree
1447var_to_maybe_prune (tree cap)
1448{
1449 if (LAMBDA_CAPTURE_EXPLICIT_P (cap))
1450 /* Don't prune explicit captures. */
1451 return NULL_TREE;
1452
1453 tree mem = TREE_PURPOSE (cap);
1454 if (!DECL_P (mem) || !DECL_NORMAL_CAPTURE_P (mem))
1455 /* Packs and init-captures aren't captures of constant vars. */
1456 return NULL_TREE;
1457
1458 tree init = TREE_VALUE (cap);
1459 if (is_normal_capture_proxy (init))
1460 init = DECL_CAPTURED_VARIABLE (init);
1461 if (decl_constant_var_p (init))
1462 return init;
1463
1464 return NULL_TREE;
1465}
1466
1467/* walk_tree helper for prune_lambda_captures: Remember which capture proxies
1468 for constant variables are actually used in the lambda body.
1469
1470 There will always be a DECL_EXPR for the capture proxy; remember it when we
1471 see it, but replace it with any other use. */
1472
1473static tree
1474mark_const_cap_r (tree *t, int *walk_subtrees, void *data)
1475{
1476 hash_map<tree,tree*> &const_vars = *(hash_map<tree,tree*>*)data;
1477
1478 tree var = NULL_TREE;
1479 if (TREE_CODE (*t) == DECL_EXPR)
1480 {
1481 tree decl = DECL_EXPR_DECL (*t);
1482 if (is_constant_capture_proxy (decl))
c758dd5a 1483 {
1484 var = DECL_CAPTURED_VARIABLE (decl);
1485 *walk_subtrees = 0;
1486 }
80fdc40f 1487 }
1488 else if (is_constant_capture_proxy (*t))
1489 var = DECL_CAPTURED_VARIABLE (*t);
1490
1491 if (var)
1492 {
1493 tree *&slot = const_vars.get_or_insert (var);
1494 if (!slot || VAR_P (*t))
1495 slot = t;
1496 }
1497
1498 return NULL_TREE;
1499}
1500
1501/* We're at the end of processing a lambda; go back and remove any captures of
1502 constant variables for which we've folded away all uses. */
1503
1504static void
1505prune_lambda_captures (tree body)
1506{
1507 tree lam = current_lambda_expr ();
1508 if (!LAMBDA_EXPR_CAPTURE_OPTIMIZED (lam))
1509 /* No uses were optimized away. */
1510 return;
1511 if (LAMBDA_EXPR_DEFAULT_CAPTURE_MODE (lam) == CPLD_NONE)
1512 /* No default captures, and we don't prune explicit captures. */
1513 return;
1514
1515 hash_map<tree,tree*> const_vars;
1516
1517 cp_walk_tree_without_duplicates (&body, mark_const_cap_r, &const_vars);
1518
1519 tree *fieldp = &TYPE_FIELDS (LAMBDA_EXPR_CLOSURE (lam));
1520 for (tree *capp = &LAMBDA_EXPR_CAPTURE_LIST (lam); *capp; )
1521 {
1522 tree cap = *capp;
1523 if (tree var = var_to_maybe_prune (cap))
1524 {
79ec669d 1525 tree **use = const_vars.get (var);
1526 if (use && TREE_CODE (**use) == DECL_EXPR)
80fdc40f 1527 {
1528 /* All uses of this capture were folded away, leaving only the
1529 proxy declaration. */
1530
1531 /* Splice the capture out of LAMBDA_EXPR_CAPTURE_LIST. */
1532 *capp = TREE_CHAIN (cap);
1533
1534 /* And out of TYPE_FIELDS. */
1535 tree field = TREE_PURPOSE (cap);
1536 while (*fieldp != field)
1537 fieldp = &DECL_CHAIN (*fieldp);
1538 *fieldp = DECL_CHAIN (*fieldp);
1539
1540 /* And remove the capture proxy declaration. */
79ec669d 1541 **use = void_node;
80fdc40f 1542 continue;
1543 }
1544 }
1545
1546 capp = &TREE_CHAIN (cap);
1547 }
1548}
1549
52e76545 1550void
1551finish_lambda_function (tree body)
1552{
1553 finish_function_body (body);
1554
80fdc40f 1555 prune_lambda_captures (body);
1556
52e76545 1557 /* Finish the function and generate code for it if necessary. */
4775c814 1558 tree fn = finish_function (/*inline_p=*/true);
52e76545 1559
1560 /* Only expand if the call op is not a template. */
1561 if (!DECL_TEMPLATE_INFO (fn))
1562 expand_or_defer_fn (fn);
1563}
1564
5407f1e9 1565#include "gt-cp-lambda.h"