]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/cp/lambda.c
Update copyright years.
[thirdparty/gcc.git] / gcc / cp / lambda.c
CommitLineData
5d9fd871 1/* Perform the semantic phase of lambda parsing, i.e., the process of
2 building tree structure, checking semantic consistency, and
3 building RTL. These routines are used both during actual parsing
4 and during the instantiation of template functions.
5
aad93da1 6 Copyright (C) 1998-2017 Free Software Foundation, Inc.
5d9fd871 7
8 This file is part of GCC.
9
10 GCC is free software; you can redistribute it and/or modify it
11 under the terms of the GNU General Public License as published by
12 the Free Software Foundation; either version 3, or (at your option)
13 any later version.
14
15 GCC is distributed in the hope that it will be useful, but
16 WITHOUT ANY WARRANTY; without even the implied warranty of
17 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
18 General Public License for more details.
19
20You should have received a copy of the GNU General Public License
21along with GCC; see the file COPYING3. If not see
22<http://www.gnu.org/licenses/>. */
23
24#include "config.h"
25#include "system.h"
26#include "coretypes.h"
4cba6f60 27#include "cp-tree.h"
28#include "stringpool.h"
5d9fd871 29#include "cgraph.h"
30#include "tree-iterator.h"
5d9fd871 31#include "toplev.h"
72f9352a 32#include "gimplify.h"
605a4556 33#include "cp-cilkplus.h"
5d9fd871 34
35/* Constructor for a lambda expression. */
36
37tree
38build_lambda_expr (void)
39{
40 tree lambda = make_node (LAMBDA_EXPR);
41 LAMBDA_EXPR_DEFAULT_CAPTURE_MODE (lambda) = CPLD_NONE;
42 LAMBDA_EXPR_CAPTURE_LIST (lambda) = NULL_TREE;
43 LAMBDA_EXPR_THIS_CAPTURE (lambda) = NULL_TREE;
44 LAMBDA_EXPR_PENDING_PROXIES (lambda) = NULL;
45 LAMBDA_EXPR_RETURN_TYPE (lambda) = NULL_TREE;
46 LAMBDA_EXPR_MUTABLE_P (lambda) = false;
47 return lambda;
48}
49
50/* Create the closure object for a LAMBDA_EXPR. */
51
52tree
53build_lambda_object (tree lambda_expr)
54{
55 /* Build aggregate constructor call.
56 - cp_parser_braced_list
57 - cp_parser_functional_cast */
58 vec<constructor_elt, va_gc> *elts = NULL;
59 tree node, expr, type;
60 location_t saved_loc;
61
62 if (processing_template_decl)
63 return lambda_expr;
64
65 /* Make sure any error messages refer to the lambda-introducer. */
66 saved_loc = input_location;
67 input_location = LAMBDA_EXPR_LOCATION (lambda_expr);
68
69 for (node = LAMBDA_EXPR_CAPTURE_LIST (lambda_expr);
70 node;
71 node = TREE_CHAIN (node))
72 {
73 tree field = TREE_PURPOSE (node);
74 tree val = TREE_VALUE (node);
75
76 if (field == error_mark_node)
77 {
78 expr = error_mark_node;
79 goto out;
80 }
81
845d5e95 82 if (TREE_CODE (val) == TREE_LIST)
83 val = build_x_compound_expr_from_list (val, ELK_INIT,
84 tf_warning_or_error);
85
5d9fd871 86 if (DECL_P (val))
87 mark_used (val);
88
89 /* Mere mortals can't copy arrays with aggregate initialization, so
90 do some magic to make it work here. */
91 if (TREE_CODE (TREE_TYPE (field)) == ARRAY_TYPE)
92 val = build_array_copy (val);
93 else if (DECL_NORMAL_CAPTURE_P (field)
94 && !DECL_VLA_CAPTURE_P (field)
95 && TREE_CODE (TREE_TYPE (field)) != REFERENCE_TYPE)
96 {
97 /* "the entities that are captured by copy are used to
98 direct-initialize each corresponding non-static data
99 member of the resulting closure object."
100
101 There's normally no way to express direct-initialization
102 from an element of a CONSTRUCTOR, so we build up a special
103 TARGET_EXPR to bypass the usual copy-initialization. */
104 val = force_rvalue (val, tf_warning_or_error);
105 if (TREE_CODE (val) == TARGET_EXPR)
106 TARGET_EXPR_DIRECT_INIT_P (val) = true;
107 }
108
109 CONSTRUCTOR_APPEND_ELT (elts, DECL_NAME (field), val);
110 }
111
112 expr = build_constructor (init_list_type_node, elts);
113 CONSTRUCTOR_IS_DIRECT_INIT (expr) = 1;
114
115 /* N2927: "[The closure] class type is not an aggregate."
116 But we briefly treat it as an aggregate to make this simpler. */
117 type = LAMBDA_EXPR_CLOSURE (lambda_expr);
118 CLASSTYPE_NON_AGGREGATE (type) = 0;
119 expr = finish_compound_literal (type, expr, tf_warning_or_error);
120 CLASSTYPE_NON_AGGREGATE (type) = 1;
121
122 out:
123 input_location = saved_loc;
124 return expr;
125}
126
127/* Return an initialized RECORD_TYPE for LAMBDA.
128 LAMBDA must have its explicit captures already. */
129
130tree
131begin_lambda_type (tree lambda)
132{
133 tree type;
134
135 {
136 /* Unique name. This is just like an unnamed class, but we cannot use
4f86cbb0 137 make_anon_name because of certain checks against TYPE_UNNAMED_P. */
5d9fd871 138 tree name;
139 name = make_lambda_name ();
140
141 /* Create the new RECORD_TYPE for this lambda. */
142 type = xref_tag (/*tag_code=*/record_type,
143 name,
144 /*scope=*/ts_lambda,
145 /*template_header_p=*/false);
240cc9cf 146 if (type == error_mark_node)
147 return error_mark_node;
5d9fd871 148 }
149
150 /* Designate it as a struct so that we can use aggregate initialization. */
151 CLASSTYPE_DECLARED_CLASS (type) = false;
152
153 /* Cross-reference the expression and the type. */
154 LAMBDA_EXPR_CLOSURE (lambda) = type;
155 CLASSTYPE_LAMBDA_EXPR (type) = lambda;
156
33603066 157 /* In C++17, assume the closure is literal; we'll clear the flag later if
158 necessary. */
159 if (cxx_dialect >= cxx1z)
160 CLASSTYPE_LITERAL_P (type) = true;
161
5d9fd871 162 /* Clear base types. */
163 xref_basetypes (type, /*bases=*/NULL_TREE);
164
165 /* Start the class. */
166 type = begin_class_definition (type);
5d9fd871 167
168 return type;
169}
170
171/* Returns the type to use for the return type of the operator() of a
172 closure class. */
173
174tree
175lambda_return_type (tree expr)
176{
177 if (expr == NULL_TREE)
178 return void_type_node;
179 if (type_unknown_p (expr)
180 || BRACE_ENCLOSED_INITIALIZER_P (expr))
181 {
182 cxx_incomplete_type_error (expr, TREE_TYPE (expr));
86771497 183 return error_mark_node;
5d9fd871 184 }
185 gcc_checking_assert (!type_dependent_expression_p (expr));
186 return cv_unqualified (type_decays_to (unlowered_expr_type (expr)));
187}
188
189/* Given a LAMBDA_EXPR or closure type LAMBDA, return the op() of the
190 closure type. */
191
192tree
193lambda_function (tree lambda)
194{
195 tree type;
196 if (TREE_CODE (lambda) == LAMBDA_EXPR)
197 type = LAMBDA_EXPR_CLOSURE (lambda);
198 else
199 type = lambda;
200 gcc_assert (LAMBDA_TYPE_P (type));
201 /* Don't let debug_tree cause instantiation. */
202 if (CLASSTYPE_TEMPLATE_INSTANTIATION (type)
203 && !COMPLETE_OR_OPEN_TYPE_P (type))
204 return NULL_TREE;
205 lambda = lookup_member (type, ansi_opname (CALL_EXPR),
206 /*protect=*/0, /*want_type=*/false,
207 tf_warning_or_error);
208 if (lambda)
814b90ef 209 lambda = STRIP_TEMPLATE (get_first_fn (lambda));
5d9fd871 210 return lambda;
211}
212
213/* Returns the type to use for the FIELD_DECL corresponding to the
214 capture of EXPR.
215 The caller should add REFERENCE_TYPE for capture by reference. */
216
217tree
218lambda_capture_field_type (tree expr, bool explicit_init_p)
219{
220 tree type;
fbde726a 221 bool is_this = is_this_parameter (tree_strip_nop_conversions (expr));
222 if (!is_this && type_dependent_expression_p (expr))
5d9fd871 223 {
224 type = cxx_make_type (DECLTYPE_TYPE);
225 DECLTYPE_TYPE_EXPR (type) = expr;
226 DECLTYPE_FOR_LAMBDA_CAPTURE (type) = true;
227 DECLTYPE_FOR_INIT_CAPTURE (type) = explicit_init_p;
228 SET_TYPE_STRUCTURAL_EQUALITY (type);
229 }
fbde726a 230 else if (!is_this && explicit_init_p)
231 {
232 type = make_auto ();
233 type = do_auto_deduction (type, expr, type);
234 }
235 else
236 type = non_reference (unlowered_expr_type (expr));
5d9fd871 237 return type;
238}
239
240/* Returns true iff DECL is a lambda capture proxy variable created by
241 build_capture_proxy. */
242
243bool
244is_capture_proxy (tree decl)
245{
246 return (VAR_P (decl)
247 && DECL_HAS_VALUE_EXPR_P (decl)
248 && !DECL_ANON_UNION_VAR_P (decl)
249 && LAMBDA_FUNCTION_P (DECL_CONTEXT (decl)));
250}
251
252/* Returns true iff DECL is a capture proxy for a normal capture
253 (i.e. without explicit initializer). */
254
255bool
256is_normal_capture_proxy (tree decl)
257{
258 if (!is_capture_proxy (decl))
259 /* It's not a capture proxy. */
260 return false;
261
b9c9af4c 262 if (variably_modified_type_p (TREE_TYPE (decl), NULL_TREE))
263 /* VLA capture. */
264 return true;
265
5d9fd871 266 /* It is a capture proxy, is it a normal capture? */
267 tree val = DECL_VALUE_EXPR (decl);
268 if (val == error_mark_node)
269 return true;
270
271 gcc_assert (TREE_CODE (val) == COMPONENT_REF);
272 val = TREE_OPERAND (val, 1);
273 return DECL_NORMAL_CAPTURE_P (val);
274}
275
276/* VAR is a capture proxy created by build_capture_proxy; add it to the
277 current function, which is the operator() for the appropriate lambda. */
278
279void
280insert_capture_proxy (tree var)
281{
282 cp_binding_level *b;
283 tree stmt_list;
284
285 /* Put the capture proxy in the extra body block so that it won't clash
286 with a later local variable. */
287 b = current_binding_level;
288 for (;;)
289 {
290 cp_binding_level *n = b->level_chain;
291 if (n->kind == sk_function_parms)
292 break;
293 b = n;
294 }
295 pushdecl_with_scope (var, b, false);
296
297 /* And put a DECL_EXPR in the STATEMENT_LIST for the same block. */
298 var = build_stmt (DECL_SOURCE_LOCATION (var), DECL_EXPR, var);
299 stmt_list = (*stmt_list_stack)[1];
300 gcc_assert (stmt_list);
301 append_to_statement_list_force (var, &stmt_list);
302}
303
304/* We've just finished processing a lambda; if the containing scope is also
305 a lambda, insert any capture proxies that were created while processing
306 the nested lambda. */
307
308void
309insert_pending_capture_proxies (void)
310{
311 tree lam;
312 vec<tree, va_gc> *proxies;
313 unsigned i;
314
315 if (!current_function_decl || !LAMBDA_FUNCTION_P (current_function_decl))
316 return;
317
318 lam = CLASSTYPE_LAMBDA_EXPR (DECL_CONTEXT (current_function_decl));
319 proxies = LAMBDA_EXPR_PENDING_PROXIES (lam);
320 for (i = 0; i < vec_safe_length (proxies); ++i)
321 {
322 tree var = (*proxies)[i];
323 insert_capture_proxy (var);
324 }
325 release_tree_vector (LAMBDA_EXPR_PENDING_PROXIES (lam));
326 LAMBDA_EXPR_PENDING_PROXIES (lam) = NULL;
327}
328
329/* Given REF, a COMPONENT_REF designating a field in the lambda closure,
330 return the type we want the proxy to have: the type of the field itself,
331 with added const-qualification if the lambda isn't mutable and the
332 capture is by value. */
333
334tree
335lambda_proxy_type (tree ref)
336{
337 tree type;
6dcf5c5f 338 if (ref == error_mark_node)
339 return error_mark_node;
5d9fd871 340 if (REFERENCE_REF_P (ref))
341 ref = TREE_OPERAND (ref, 0);
6dcf5c5f 342 gcc_assert (TREE_CODE (ref) == COMPONENT_REF);
5d9fd871 343 type = TREE_TYPE (ref);
6dcf5c5f 344 if (!type || WILDCARD_TYPE_P (non_reference (type)))
345 {
346 type = cxx_make_type (DECLTYPE_TYPE);
347 DECLTYPE_TYPE_EXPR (type) = ref;
348 DECLTYPE_FOR_LAMBDA_PROXY (type) = true;
349 SET_TYPE_STRUCTURAL_EQUALITY (type);
350 }
351 if (DECL_PACK_P (TREE_OPERAND (ref, 1)))
352 type = make_pack_expansion (type);
5d9fd871 353 return type;
354}
355
356/* MEMBER is a capture field in a lambda closure class. Now that we're
357 inside the operator(), build a placeholder var for future lookups and
358 debugging. */
359
360tree
361build_capture_proxy (tree member)
362{
363 tree var, object, fn, closure, name, lam, type;
364
6dcf5c5f 365 if (PACK_EXPANSION_P (member))
366 member = PACK_EXPANSION_PATTERN (member);
367
5d9fd871 368 closure = DECL_CONTEXT (member);
369 fn = lambda_function (closure);
370 lam = CLASSTYPE_LAMBDA_EXPR (closure);
371
372 /* The proxy variable forwards to the capture field. */
373 object = build_fold_indirect_ref (DECL_ARGUMENTS (fn));
374 object = finish_non_static_data_member (member, object, NULL_TREE);
375 if (REFERENCE_REF_P (object))
376 object = TREE_OPERAND (object, 0);
377
378 /* Remove the __ inserted by add_capture. */
5402533b 379 name = get_identifier (IDENTIFIER_POINTER (DECL_NAME (member)) + 2);
5d9fd871 380
381 type = lambda_proxy_type (object);
382
66108a57 383 if (name == this_identifier && !POINTER_TYPE_P (type))
384 {
385 type = build_pointer_type (type);
386 type = cp_build_qualified_type (type, TYPE_QUAL_CONST);
387 object = build_fold_addr_expr_with_type (object, type);
388 }
389
5d9fd871 390 if (DECL_VLA_CAPTURE_P (member))
391 {
392 /* Rebuild the VLA type from the pointer and maxindex. */
393 tree field = next_initializable_field (TYPE_FIELDS (type));
394 tree ptr = build_simple_component_ref (object, field);
395 field = next_initializable_field (DECL_CHAIN (field));
396 tree max = build_simple_component_ref (object, field);
b46a48d6 397 type = build_cplus_array_type (TREE_TYPE (TREE_TYPE (ptr)),
398 build_index_type (max));
5d9fd871 399 type = build_reference_type (type);
400 REFERENCE_VLA_OK (type) = true;
401 object = convert (type, ptr);
402 }
403
404 var = build_decl (input_location, VAR_DECL, name, type);
405 SET_DECL_VALUE_EXPR (var, object);
406 DECL_HAS_VALUE_EXPR_P (var) = 1;
407 DECL_ARTIFICIAL (var) = 1;
408 TREE_USED (var) = 1;
409 DECL_CONTEXT (var) = fn;
410
411 if (name == this_identifier)
412 {
413 gcc_assert (LAMBDA_EXPR_THIS_CAPTURE (lam) == member);
414 LAMBDA_EXPR_THIS_CAPTURE (lam) = var;
415 }
416
417 if (fn == current_function_decl)
418 insert_capture_proxy (var);
419 else
420 vec_safe_push (LAMBDA_EXPR_PENDING_PROXIES (lam), var);
421
422 return var;
423}
424
425/* Return a struct containing a pointer and a length for lambda capture of
426 an array of runtime length. */
427
428static tree
429vla_capture_type (tree array_type)
430{
431 static tree ptr_id, max_id;
432 tree type = xref_tag (record_type, make_anon_name (), ts_current, false);
433 xref_basetypes (type, NULL_TREE);
434 type = begin_class_definition (type);
435 if (!ptr_id)
436 {
437 ptr_id = get_identifier ("ptr");
438 max_id = get_identifier ("max");
439 }
440 tree ptrtype = build_pointer_type (TREE_TYPE (array_type));
441 tree field = build_decl (input_location, FIELD_DECL, ptr_id, ptrtype);
442 finish_member_declaration (field);
443 field = build_decl (input_location, FIELD_DECL, max_id, sizetype);
444 finish_member_declaration (field);
445 return finish_struct (type, NULL_TREE);
446}
447
448/* From an ID and INITIALIZER, create a capture (by reference if
449 BY_REFERENCE_P is true), add it to the capture-list for LAMBDA,
66108a57 450 and return it. If ID is `this', BY_REFERENCE_P says whether
451 `*this' is captured by reference. */
5d9fd871 452
453tree
6dcf5c5f 454add_capture (tree lambda, tree id, tree orig_init, bool by_reference_p,
5d9fd871 455 bool explicit_init_p)
456{
457 char *buf;
458 tree type, member, name;
459 bool vla = false;
6dcf5c5f 460 bool variadic = false;
461 tree initializer = orig_init;
462
463 if (PACK_EXPANSION_P (initializer))
464 {
465 initializer = PACK_EXPANSION_PATTERN (initializer);
466 variadic = true;
467 }
5d9fd871 468
845d5e95 469 if (TREE_CODE (initializer) == TREE_LIST
470 /* A pack expansion might end up with multiple elements. */
471 && !PACK_EXPANSION_P (TREE_VALUE (initializer)))
5d9fd871 472 initializer = build_x_compound_expr_from_list (initializer, ELK_INIT,
473 tf_warning_or_error);
384b0418 474 type = TREE_TYPE (initializer);
5444a0b4 475 if (type == error_mark_node)
476 return error_mark_node;
477
5d9fd871 478 if (array_of_runtime_bound_p (type))
479 {
480 vla = true;
481 if (!by_reference_p)
482 error ("array of runtime bound cannot be captured by copy, "
483 "only by reference");
484
485 /* For a VLA, we capture the address of the first element and the
486 maximum index, and then reconstruct the VLA for the proxy. */
487 tree elt = cp_build_array_ref (input_location, initializer,
488 integer_zero_node, tf_warning_or_error);
489 initializer = build_constructor_va (init_list_type_node, 2,
490 NULL_TREE, build_address (elt),
491 NULL_TREE, array_type_nelts (type));
492 type = vla_capture_type (type);
493 }
d3155b08 494 else if (!dependent_type_p (type)
495 && variably_modified_type_p (type, NULL_TREE))
5d9fd871 496 {
4972ed5d 497 error ("capture of variable-size type %qT that is not an N3639 array "
5d9fd871 498 "of runtime bound", type);
499 if (TREE_CODE (type) == ARRAY_TYPE
500 && variably_modified_type_p (TREE_TYPE (type), NULL_TREE))
501 inform (input_location, "because the array element type %qT has "
502 "variable size", TREE_TYPE (type));
503 type = error_mark_node;
504 }
384b0418 505 else
5d9fd871 506 {
384b0418 507 type = lambda_capture_field_type (initializer, explicit_init_p);
8dbf49cb 508 if (type == error_mark_node)
509 return error_mark_node;
66108a57 510 if (id == this_identifier && !by_reference_p)
511 {
512 gcc_assert (POINTER_TYPE_P (type));
513 type = TREE_TYPE (type);
514 initializer = cp_build_indirect_ref (initializer, RO_NULL,
515 tf_warning_or_error);
516 }
517 if (id != this_identifier && by_reference_p)
384b0418 518 {
519 type = build_reference_type (type);
18bede74 520 if (!dependent_type_p (type) && !lvalue_p (initializer))
384b0418 521 error ("cannot capture %qE by reference", initializer);
522 }
523 else
5444a0b4 524 {
525 /* Capture by copy requires a complete type. */
526 type = complete_type (type);
527 if (!dependent_type_p (type) && !COMPLETE_TYPE_P (type))
528 {
529 error ("capture by copy of incomplete type %qT", type);
530 cxx_incomplete_type_inform (type);
531 return error_mark_node;
532 }
533 }
5d9fd871 534 }
5d9fd871 535
536 /* Add __ to the beginning of the field name so that user code
537 won't find the field with name lookup. We can't just leave the name
538 unset because template instantiation uses the name to find
539 instantiated fields. */
5402533b 540 buf = (char *) alloca (IDENTIFIER_LENGTH (id) + 3);
541 buf[1] = buf[0] = '_';
542 memcpy (buf + 2, IDENTIFIER_POINTER (id),
543 IDENTIFIER_LENGTH (id) + 1);
544 name = get_identifier (buf);
5d9fd871 545
546 /* If TREE_TYPE isn't set, we're still in the introducer, so check
547 for duplicates. */
548 if (!LAMBDA_EXPR_CLOSURE (lambda))
549 {
550 if (IDENTIFIER_MARKED (name))
551 {
552 pedwarn (input_location, 0,
553 "already captured %qD in lambda expression", id);
554 return NULL_TREE;
555 }
556 IDENTIFIER_MARKED (name) = true;
557 }
558
6dcf5c5f 559 if (variadic)
560 type = make_pack_expansion (type);
561
5d9fd871 562 /* Make member variable. */
df623cbd 563 member = build_decl (input_location, FIELD_DECL, name, type);
5d9fd871 564 DECL_VLA_CAPTURE_P (member) = vla;
565
566 if (!explicit_init_p)
567 /* Normal captures are invisible to name lookup but uses are replaced
568 with references to the capture field; we implement this by only
569 really making them invisible in unevaluated context; see
570 qualify_lookup. For now, let's make explicitly initialized captures
571 always visible. */
572 DECL_NORMAL_CAPTURE_P (member) = true;
573
574 if (id == this_identifier)
575 LAMBDA_EXPR_THIS_CAPTURE (lambda) = member;
576
577 /* Add it to the appropriate closure class if we've started it. */
578 if (current_class_type
579 && current_class_type == LAMBDA_EXPR_CLOSURE (lambda))
580 finish_member_declaration (member);
581
6dcf5c5f 582 tree listmem = member;
583 if (variadic)
584 {
585 listmem = make_pack_expansion (member);
586 initializer = orig_init;
587 }
5d9fd871 588 LAMBDA_EXPR_CAPTURE_LIST (lambda)
6dcf5c5f 589 = tree_cons (listmem, initializer, LAMBDA_EXPR_CAPTURE_LIST (lambda));
5d9fd871 590
591 if (LAMBDA_EXPR_CLOSURE (lambda))
592 return build_capture_proxy (member);
593 /* For explicit captures we haven't started the function yet, so we wait
594 and build the proxy from cp_parser_lambda_body. */
595 return NULL_TREE;
596}
597
598/* Register all the capture members on the list CAPTURES, which is the
599 LAMBDA_EXPR_CAPTURE_LIST for the lambda after the introducer. */
600
601void
602register_capture_members (tree captures)
603{
604 if (captures == NULL_TREE)
605 return;
606
607 register_capture_members (TREE_CHAIN (captures));
6dcf5c5f 608
609 tree field = TREE_PURPOSE (captures);
610 if (PACK_EXPANSION_P (field))
611 field = PACK_EXPANSION_PATTERN (field);
612
5d9fd871 613 /* We set this in add_capture to avoid duplicates. */
6dcf5c5f 614 IDENTIFIER_MARKED (DECL_NAME (field)) = false;
615 finish_member_declaration (field);
5d9fd871 616}
617
618/* Similar to add_capture, except this works on a stack of nested lambdas.
619 BY_REFERENCE_P in this case is derived from the default capture mode.
620 Returns the capture for the lambda at the bottom of the stack. */
621
622tree
623add_default_capture (tree lambda_stack, tree id, tree initializer)
624{
625 bool this_capture_p = (id == this_identifier);
626
627 tree var = NULL_TREE;
628
629 tree saved_class_type = current_class_type;
630
631 tree node;
632
633 for (node = lambda_stack;
634 node;
635 node = TREE_CHAIN (node))
636 {
637 tree lambda = TREE_VALUE (node);
638
639 current_class_type = LAMBDA_EXPR_CLOSURE (lambda);
6dcf5c5f 640 if (DECL_PACK_P (initializer))
641 initializer = make_pack_expansion (initializer);
5d9fd871 642 var = add_capture (lambda,
643 id,
644 initializer,
645 /*by_reference_p=*/
66108a57 646 (this_capture_p
647 || (LAMBDA_EXPR_DEFAULT_CAPTURE_MODE (lambda)
5d9fd871 648 == CPLD_REFERENCE)),
649 /*explicit_init_p=*/false);
650 initializer = convert_from_reference (var);
651 }
652
653 current_class_type = saved_class_type;
654
655 return var;
656}
657
f1ec53b6 658/* Return the capture pertaining to a use of 'this' in LAMBDA, in the
659 form of an INDIRECT_REF, possibly adding it through default
7d07c4a7 660 capturing, if ADD_CAPTURE_P is true. */
5d9fd871 661
662tree
f1ec53b6 663lambda_expr_this_capture (tree lambda, bool add_capture_p)
5d9fd871 664{
665 tree result;
666
667 tree this_capture = LAMBDA_EXPR_THIS_CAPTURE (lambda);
668
7d07c4a7 669 /* In unevaluated context this isn't an odr-use, so don't capture. */
5d9fd871 670 if (cp_unevaluated_operand)
7d07c4a7 671 add_capture_p = false;
5d9fd871 672
673 /* Try to default capture 'this' if we can. */
674 if (!this_capture
f1ec53b6 675 && (!add_capture_p
676 || LAMBDA_EXPR_DEFAULT_CAPTURE_MODE (lambda) != CPLD_NONE))
5d9fd871 677 {
678 tree lambda_stack = NULL_TREE;
679 tree init = NULL_TREE;
680
681 /* If we are in a lambda function, we can move out until we hit:
682 1. a non-lambda function or NSDMI,
683 2. a lambda function capturing 'this', or
684 3. a non-default capturing lambda function. */
685 for (tree tlambda = lambda; ;)
686 {
687 lambda_stack = tree_cons (NULL_TREE,
688 tlambda,
689 lambda_stack);
690
691 if (LAMBDA_EXPR_EXTRA_SCOPE (tlambda)
692 && TREE_CODE (LAMBDA_EXPR_EXTRA_SCOPE (tlambda)) == FIELD_DECL)
693 {
694 /* In an NSDMI, we don't have a function to look up the decl in,
695 but the fake 'this' pointer that we're using for parsing is
696 in scope_chain. */
697 init = scope_chain->x_current_class_ptr;
698 gcc_checking_assert
699 (init && (TREE_TYPE (TREE_TYPE (init))
700 == current_nonlambda_class_type ()));
701 break;
702 }
703
704 tree closure_decl = TYPE_NAME (LAMBDA_EXPR_CLOSURE (tlambda));
705 tree containing_function = decl_function_context (closure_decl);
706
707 if (containing_function == NULL_TREE)
708 /* We ran out of scopes; there's no 'this' to capture. */
709 break;
710
711 if (!LAMBDA_FUNCTION_P (containing_function))
712 {
713 /* We found a non-lambda function. */
714 if (DECL_NONSTATIC_MEMBER_FUNCTION_P (containing_function))
715 /* First parameter is 'this'. */
716 init = DECL_ARGUMENTS (containing_function);
717 break;
718 }
719
720 tlambda
721 = CLASSTYPE_LAMBDA_EXPR (DECL_CONTEXT (containing_function));
722
723 if (LAMBDA_EXPR_THIS_CAPTURE (tlambda))
724 {
725 /* An outer lambda has already captured 'this'. */
726 init = LAMBDA_EXPR_THIS_CAPTURE (tlambda);
727 break;
728 }
729
730 if (LAMBDA_EXPR_DEFAULT_CAPTURE_MODE (tlambda) == CPLD_NONE)
731 /* An outer lambda won't let us capture 'this'. */
732 break;
733 }
734
735 if (init)
f1ec53b6 736 {
737 if (add_capture_p)
738 this_capture = add_default_capture (lambda_stack,
739 /*id=*/this_identifier,
740 init);
741 else
742 this_capture = init;
743 }
5d9fd871 744 }
745
7d07c4a7 746 if (cp_unevaluated_operand)
747 result = this_capture;
748 else if (!this_capture)
5d9fd871 749 {
2e227398 750 if (add_capture_p)
7d07c4a7 751 {
752 error ("%<this%> was not captured for this lambda function");
753 result = error_mark_node;
754 }
755 else
756 result = NULL_TREE;
5d9fd871 757 }
758 else
759 {
760 /* To make sure that current_class_ref is for the lambda. */
761 gcc_assert (TYPE_MAIN_VARIANT (TREE_TYPE (current_class_ref))
762 == LAMBDA_EXPR_CLOSURE (lambda));
763
764 result = this_capture;
765
766 /* If 'this' is captured, each use of 'this' is transformed into an
767 access to the corresponding unnamed data member of the closure
768 type cast (_expr.cast_ 5.4) to the type of 'this'. [ The cast
769 ensures that the transformed expression is an rvalue. ] */
770 result = rvalue (result);
771 }
772
773 return result;
774}
775
776/* We don't want to capture 'this' until we know we need it, i.e. after
777 overload resolution has chosen a non-static member function. At that
778 point we call this function to turn a dummy object into a use of the
779 'this' capture. */
780
781tree
f1ec53b6 782maybe_resolve_dummy (tree object, bool add_capture_p)
5d9fd871 783{
784 if (!is_dummy_object (object))
785 return object;
786
787 tree type = TYPE_MAIN_VARIANT (TREE_TYPE (object));
788 gcc_assert (!TYPE_PTR_P (type));
789
790 if (type != current_class_type
791 && current_class_type
792 && LAMBDA_TYPE_P (current_class_type)
855ff3cc 793 && lambda_function (current_class_type)
794 && DERIVED_FROM_P (type, current_nonlambda_class_type ()))
5d9fd871 795 {
796 /* In a lambda, need to go through 'this' capture. */
797 tree lam = CLASSTYPE_LAMBDA_EXPR (current_class_type);
f1ec53b6 798 tree cap = lambda_expr_this_capture (lam, add_capture_p);
7d07c4a7 799 if (cap && cap != error_mark_node)
2e227398 800 object = build_x_indirect_ref (EXPR_LOCATION (object), cap,
801 RO_NULL, tf_warning_or_error);
5d9fd871 802 }
803
804 return object;
805}
806
ed7bf2d1 807/* Returns the innermost non-lambda function. */
808
809tree
810current_nonlambda_function (void)
811{
812 tree fn = current_function_decl;
813 while (fn && LAMBDA_FUNCTION_P (fn))
814 fn = decl_function_context (fn);
815 return fn;
816}
817
5d9fd871 818/* Returns the method basetype of the innermost non-lambda function, or
819 NULL_TREE if none. */
820
821tree
822nonlambda_method_basetype (void)
823{
824 tree fn, type;
825 if (!current_class_ref)
826 return NULL_TREE;
827
828 type = current_class_type;
829 if (!LAMBDA_TYPE_P (type))
830 return type;
831
832 /* Find the nearest enclosing non-lambda function. */
833 fn = TYPE_NAME (type);
834 do
835 fn = decl_function_context (fn);
836 while (fn && LAMBDA_FUNCTION_P (fn));
837
838 if (!fn || !DECL_NONSTATIC_MEMBER_FUNCTION_P (fn))
839 return NULL_TREE;
840
841 return TYPE_METHOD_BASETYPE (TREE_TYPE (fn));
842}
843
d05ba3ef 844/* Like current_scope, but looking through lambdas. */
845
846tree
847current_nonlambda_scope (void)
848{
849 tree scope = current_scope ();
850 for (;;)
851 {
852 if (TREE_CODE (scope) == FUNCTION_DECL
853 && LAMBDA_FUNCTION_P (scope))
854 {
855 scope = CP_TYPE_CONTEXT (DECL_CONTEXT (scope));
856 continue;
857 }
858 else if (LAMBDA_TYPE_P (scope))
859 {
860 scope = CP_TYPE_CONTEXT (scope);
861 continue;
862 }
863 break;
864 }
865 return scope;
866}
867
814b90ef 868/* Helper function for maybe_add_lambda_conv_op; build a CALL_EXPR with
869 indicated FN and NARGS, but do not initialize the return type or any of the
870 argument slots. */
871
872static tree
873prepare_op_call (tree fn, int nargs)
874{
875 tree t;
876
877 t = build_vl_exp (CALL_EXPR, nargs + 3);
878 CALL_EXPR_FN (t) = fn;
879 CALL_EXPR_STATIC_CHAIN (t) = NULL;
880
881 return t;
882}
883
3311d302 884/* Return true iff CALLOP is the op() for a generic lambda. */
885
886bool
887generic_lambda_fn_p (tree callop)
888{
889 return (LAMBDA_FUNCTION_P (callop)
890 && DECL_TEMPLATE_INFO (callop)
891 && PRIMARY_TEMPLATE_P (DECL_TI_TEMPLATE (callop)));
892}
893
5d9fd871 894/* If the closure TYPE has a static op(), also add a conversion to function
895 pointer. */
896
897void
898maybe_add_lambda_conv_op (tree type)
899{
cbb83bc5 900 bool nested = (cfun != NULL);
64d8d39e 901 bool nested_def = decl_function_context (TYPE_MAIN_DECL (type));
5d9fd871 902 tree callop = lambda_function (type);
f16153b7 903 tree lam = CLASSTYPE_LAMBDA_EXPR (type);
5d9fd871 904
f16153b7 905 if (LAMBDA_EXPR_CAPTURE_LIST (lam) != NULL_TREE
906 || LAMBDA_EXPR_DEFAULT_CAPTURE_MODE (lam) != CPLD_NONE)
5d9fd871 907 return;
908
909 if (processing_template_decl)
910 return;
911
3311d302 912 bool const generic_lambda_p = generic_lambda_fn_p (callop);
814b90ef 913
4c0924ef 914 if (!generic_lambda_p && DECL_INITIAL (callop) == NULL_TREE)
5d9fd871 915 {
916 /* If the op() wasn't instantiated due to errors, give up. */
917 gcc_assert (errorcount || sorrycount);
918 return;
919 }
920
814b90ef 921 /* Non-template conversion operators are defined directly with build_call_a
922 and using DIRECT_ARGVEC for arguments (including 'this'). Templates are
923 deferred and the CALL is built in-place. In the case of a deduced return
924 call op, the decltype expression, DECLTYPE_CALL, used as a substitute for
925 the return type is also built in-place. The arguments of DECLTYPE_CALL in
926 the return expression may differ in flags from those in the body CALL. In
927 particular, parameter pack expansions are marked PACK_EXPANSION_LOCAL_P in
928 the body CALL, but not in DECLTYPE_CALL. */
929
930 vec<tree, va_gc> *direct_argvec = 0;
931 tree decltype_call = 0, call = 0;
6d02e6b2 932 tree optype = TREE_TYPE (callop);
933 tree fn_result = TREE_TYPE (optype);
814b90ef 934
3a3fc4a7 935 tree thisarg = build_nop (TREE_TYPE (DECL_ARGUMENTS (callop)),
936 null_pointer_node);
814b90ef 937 if (generic_lambda_p)
938 {
939 /* Prepare the dependent member call for the static member function
940 '_FUN' and, potentially, prepare another call to be used in a decltype
941 return expression for a deduced return call op to allow for simple
942 implementation of the conversion operator. */
943
3a3fc4a7 944 tree instance = cp_build_indirect_ref (thisarg, RO_NULL,
945 tf_warning_or_error);
814b90ef 946 tree objfn = build_min (COMPONENT_REF, NULL_TREE,
947 instance, DECL_NAME (callop), NULL_TREE);
948 int nargs = list_length (DECL_ARGUMENTS (callop)) - 1;
949
950 call = prepare_op_call (objfn, nargs);
951 if (type_uses_auto (fn_result))
952 decltype_call = prepare_op_call (objfn, nargs);
953 }
954 else
955 {
956 direct_argvec = make_tree_vector ();
3a3fc4a7 957 direct_argvec->quick_push (thisarg);
814b90ef 958 }
959
960 /* Copy CALLOP's argument list (as per 'copy_list') as FN_ARGS in order to
961 declare the static member function "_FUN" below. For each arg append to
962 DIRECT_ARGVEC (for the non-template case) or populate the pre-allocated
963 call args (for the template case). If a parameter pack is found, expand
964 it, flagging it as PACK_EXPANSION_LOCAL_P for the body call. */
965
966 tree fn_args = NULL_TREE;
967 {
968 int ix = 0;
969 tree src = DECL_CHAIN (DECL_ARGUMENTS (callop));
a575a67c 970 tree tgt = NULL;
814b90ef 971
972 while (src)
973 {
974 tree new_node = copy_node (src);
975
976 if (!fn_args)
977 fn_args = tgt = new_node;
978 else
979 {
980 TREE_CHAIN (tgt) = new_node;
981 tgt = new_node;
982 }
983
984 mark_exp_read (tgt);
985
986 if (generic_lambda_p)
987 {
72f9352a 988 ++processing_template_decl;
989 tree a = forward_parm (tgt);
990 --processing_template_decl;
991
992 CALL_EXPR_ARG (call, ix) = a;
993 if (decltype_call)
994 CALL_EXPR_ARG (decltype_call, ix) = unshare_expr (a);
995
996 if (PACK_EXPANSION_P (a))
997 /* Set this after unsharing so it's not in decltype_call. */
998 PACK_EXPANSION_LOCAL_P (a) = true;
999
814b90ef 1000 ++ix;
1001 }
1002 else
1003 vec_safe_push (direct_argvec, tgt);
1004
1005 src = TREE_CHAIN (src);
1006 }
1007 }
1008
1009
1010 if (generic_lambda_p)
1011 {
1012 if (decltype_call)
1013 {
1014 ++processing_template_decl;
1015 fn_result = finish_decltype_type
1016 (decltype_call, /*id_expression_or_member_access_p=*/false,
1017 tf_warning_or_error);
1018 --processing_template_decl;
1019 }
1020 }
1021 else
1022 call = build_call_a (callop,
1023 direct_argvec->length (),
1024 direct_argvec->address ());
1025
1026 CALL_FROM_THUNK_P (call) = 1;
33603066 1027 SET_EXPR_LOCATION (call, UNKNOWN_LOCATION);
814b90ef 1028
1029 tree stattype = build_function_type (fn_result, FUNCTION_ARG_CHAIN (callop));
6d02e6b2 1030 stattype = (cp_build_type_attribute_variant
1031 (stattype, TYPE_ATTRIBUTES (optype)));
2e9e9363 1032 if (flag_noexcept_type
1033 && TYPE_NOTHROW_P (TREE_TYPE (callop)))
1034 stattype = build_exception_variant (stattype, noexcept_true_spec);
5d9fd871 1035
1036 /* First build up the conversion op. */
1037
814b90ef 1038 tree rettype = build_pointer_type (stattype);
1039 tree name = mangle_conv_op_name_for_type (rettype);
1040 tree thistype = cp_build_qualified_type (type, TYPE_QUAL_CONST);
1041 tree fntype = build_method_type_directly (thistype, rettype, void_list_node);
1042 tree convfn = build_lang_decl (FUNCTION_DECL, name, fntype);
1043 tree fn = convfn;
5d9fd871 1044 DECL_SOURCE_LOCATION (fn) = DECL_SOURCE_LOCATION (callop);
5d4b30ea 1045 SET_DECL_ALIGN (fn, MINIMUM_METHOD_BOUNDARY);
5d9fd871 1046 SET_OVERLOADED_OPERATOR_CODE (fn, TYPE_EXPR);
1047 grokclassfn (type, fn, NO_SPECIAL);
1048 set_linkage_according_to_type (type, fn);
1049 rest_of_decl_compilation (fn, toplevel_bindings_p (), at_eof);
1050 DECL_IN_AGGR_P (fn) = 1;
1051 DECL_ARTIFICIAL (fn) = 1;
1052 DECL_NOT_REALLY_EXTERN (fn) = 1;
1053 DECL_DECLARED_INLINE_P (fn) = 1;
1054 DECL_ARGUMENTS (fn) = build_this_parm (fntype, TYPE_QUAL_CONST);
64d8d39e 1055 if (nested_def)
5d9fd871 1056 DECL_INTERFACE_KNOWN (fn) = 1;
1057
814b90ef 1058 if (generic_lambda_p)
1059 fn = add_inherited_template_parms (fn, DECL_TI_TEMPLATE (callop));
1060
5d9fd871 1061 add_method (type, fn, NULL_TREE);
1062
1063 /* Generic thunk code fails for varargs; we'll complain in mark_used if
1064 the conversion op is used. */
1065 if (varargs_function_p (callop))
1066 {
ea17a80d 1067 DECL_DELETED_FN (fn) = 1;
5d9fd871 1068 return;
1069 }
1070
1071 /* Now build up the thunk to be returned. */
1072
1073 name = get_identifier ("_FUN");
814b90ef 1074 tree statfn = build_lang_decl (FUNCTION_DECL, name, stattype);
1075 fn = statfn;
5d9fd871 1076 DECL_SOURCE_LOCATION (fn) = DECL_SOURCE_LOCATION (callop);
5d9fd871 1077 grokclassfn (type, fn, NO_SPECIAL);
1078 set_linkage_according_to_type (type, fn);
1079 rest_of_decl_compilation (fn, toplevel_bindings_p (), at_eof);
1080 DECL_IN_AGGR_P (fn) = 1;
1081 DECL_ARTIFICIAL (fn) = 1;
1082 DECL_NOT_REALLY_EXTERN (fn) = 1;
1083 DECL_DECLARED_INLINE_P (fn) = 1;
1084 DECL_STATIC_FUNCTION_P (fn) = 1;
814b90ef 1085 DECL_ARGUMENTS (fn) = fn_args;
1086 for (tree arg = fn_args; arg; arg = DECL_CHAIN (arg))
5d9fd871 1087 {
1088 /* Avoid duplicate -Wshadow warnings. */
1089 DECL_NAME (arg) = NULL_TREE;
1090 DECL_CONTEXT (arg) = fn;
1091 }
64d8d39e 1092 if (nested_def)
5d9fd871 1093 DECL_INTERFACE_KNOWN (fn) = 1;
1094
814b90ef 1095 if (generic_lambda_p)
1096 fn = add_inherited_template_parms (fn, DECL_TI_TEMPLATE (callop));
1097
59ec98b2 1098 if (flag_sanitize & SANITIZE_NULL)
1099 {
1100 /* Don't UBsan this function; we're deliberately calling op() with a null
1101 object argument. */
1102 tree attrs = build_tree_list (get_identifier ("no_sanitize_undefined"),
1103 NULL_TREE);
1104 cplus_decl_attributes (&fn, attrs, 0);
1105 }
7caa8236 1106
5d9fd871 1107 add_method (type, fn, NULL_TREE);
1108
1109 if (nested)
1110 push_function_context ();
1111 else
1112 /* Still increment function_depth so that we don't GC in the
1113 middle of an expression. */
1114 ++function_depth;
1115
1116 /* Generate the body of the thunk. */
1117
1118 start_preparsed_function (statfn, NULL_TREE,
1119 SF_PRE_PARSED | SF_INCLASS_INLINE);
1120 if (DECL_ONE_ONLY (statfn))
1121 {
1122 /* Put the thunk in the same comdat group as the call op. */
415d1b9a 1123 cgraph_node::get_create (statfn)->add_to_same_comdat_group
1124 (cgraph_node::get_create (callop));
5d9fd871 1125 }
814b90ef 1126 tree body = begin_function_body ();
1127 tree compound_stmt = begin_compound_stmt (0);
1128 if (!generic_lambda_p)
9f10a108 1129 {
814b90ef 1130 set_flags_from_callee (call);
1131 if (MAYBE_CLASS_TYPE_P (TREE_TYPE (call)))
1132 call = build_cplus_new (TREE_TYPE (call), call, tf_warning_or_error);
9f10a108 1133 }
5d9fd871 1134 call = convert_from_reference (call);
1135 finish_return_stmt (call);
1136
1137 finish_compound_stmt (compound_stmt);
1138 finish_function_body (body);
1139
814b90ef 1140 fn = finish_function (/*inline*/2);
1141 if (!generic_lambda_p)
1142 expand_or_defer_fn (fn);
5d9fd871 1143
1144 /* Generate the body of the conversion op. */
1145
1146 start_preparsed_function (convfn, NULL_TREE,
1147 SF_PRE_PARSED | SF_INCLASS_INLINE);
1148 body = begin_function_body ();
1149 compound_stmt = begin_compound_stmt (0);
1150
1151 /* decl_needed_p needs to see that it's used. */
1152 TREE_USED (statfn) = 1;
1153 finish_return_stmt (decay_conversion (statfn, tf_warning_or_error));
1154
1155 finish_compound_stmt (compound_stmt);
1156 finish_function_body (body);
1157
814b90ef 1158 fn = finish_function (/*inline*/2);
1159 if (!generic_lambda_p)
1160 expand_or_defer_fn (fn);
5d9fd871 1161
1162 if (nested)
1163 pop_function_context ();
1164 else
1165 --function_depth;
1166}
1167
33603066 1168/* True if FN is the static function "_FUN" that gets returned from the lambda
1169 conversion operator. */
1170
1171bool
1172lambda_static_thunk_p (tree fn)
1173{
1174 return (fn && TREE_CODE (fn) == FUNCTION_DECL
1175 && DECL_ARTIFICIAL (fn)
1176 && DECL_STATIC_FUNCTION_P (fn)
1177 && LAMBDA_TYPE_P (CP_DECL_CONTEXT (fn)));
1178}
1179
5d9fd871 1180/* Returns true iff VAL is a lambda-related declaration which should
1181 be ignored by unqualified lookup. */
1182
1183bool
1184is_lambda_ignored_entity (tree val)
1185{
1186 /* In unevaluated context, look past normal capture proxies. */
1187 if (cp_unevaluated_operand && is_normal_capture_proxy (val))
1188 return true;
1189
1190 /* Always ignore lambda fields, their names are only for debugging. */
1191 if (TREE_CODE (val) == FIELD_DECL
1192 && CLASSTYPE_LAMBDA_EXPR (DECL_CONTEXT (val)))
1193 return true;
1194
1195 /* None of the lookups that use qualify_lookup want the op() from the
1196 lambda; they want the one from the enclosing class. */
1197 if (TREE_CODE (val) == FUNCTION_DECL && LAMBDA_FUNCTION_P (val))
1198 return true;
1199
1200 return false;
1201}