]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/cp/lambda.c
config-lang.in (gtfiles): Add cp/lex.c.
[thirdparty/gcc.git] / gcc / cp / lambda.c
1 /* Perform the semantic phase of lambda parsing, i.e., the process of
2 building tree structure, checking semantic consistency, and
3 building RTL. These routines are used both during actual parsing
4 and during the instantiation of template functions.
5
6 Copyright (C) 1998-2017 Free Software Foundation, Inc.
7
8 This file is part of GCC.
9
10 GCC is free software; you can redistribute it and/or modify it
11 under the terms of the GNU General Public License as published by
12 the Free Software Foundation; either version 3, or (at your option)
13 any later version.
14
15 GCC is distributed in the hope that it will be useful, but
16 WITHOUT ANY WARRANTY; without even the implied warranty of
17 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
18 General Public License for more details.
19
20 You should have received a copy of the GNU General Public License
21 along with GCC; see the file COPYING3. If not see
22 <http://www.gnu.org/licenses/>. */
23
24 #include "config.h"
25 #include "system.h"
26 #include "coretypes.h"
27 #include "cp-tree.h"
28 #include "stringpool.h"
29 #include "cgraph.h"
30 #include "tree-iterator.h"
31 #include "toplev.h"
32 #include "gimplify.h"
33 #include "cp-cilkplus.h"
34
35 /* Constructor for a lambda expression. */
36
37 tree
38 build_lambda_expr (void)
39 {
40 tree lambda = make_node (LAMBDA_EXPR);
41 LAMBDA_EXPR_DEFAULT_CAPTURE_MODE (lambda) = CPLD_NONE;
42 LAMBDA_EXPR_CAPTURE_LIST (lambda) = NULL_TREE;
43 LAMBDA_EXPR_THIS_CAPTURE (lambda) = NULL_TREE;
44 LAMBDA_EXPR_PENDING_PROXIES (lambda) = NULL;
45 LAMBDA_EXPR_RETURN_TYPE (lambda) = NULL_TREE;
46 LAMBDA_EXPR_MUTABLE_P (lambda) = false;
47 return lambda;
48 }
49
50 /* Create the closure object for a LAMBDA_EXPR. */
51
52 tree
53 build_lambda_object (tree lambda_expr)
54 {
55 /* Build aggregate constructor call.
56 - cp_parser_braced_list
57 - cp_parser_functional_cast */
58 vec<constructor_elt, va_gc> *elts = NULL;
59 tree node, expr, type;
60 location_t saved_loc;
61
62 if (processing_template_decl)
63 return lambda_expr;
64
65 /* Make sure any error messages refer to the lambda-introducer. */
66 saved_loc = input_location;
67 input_location = LAMBDA_EXPR_LOCATION (lambda_expr);
68
69 for (node = LAMBDA_EXPR_CAPTURE_LIST (lambda_expr);
70 node;
71 node = TREE_CHAIN (node))
72 {
73 tree field = TREE_PURPOSE (node);
74 tree val = TREE_VALUE (node);
75
76 if (field == error_mark_node)
77 {
78 expr = error_mark_node;
79 goto out;
80 }
81
82 if (TREE_CODE (val) == TREE_LIST)
83 val = build_x_compound_expr_from_list (val, ELK_INIT,
84 tf_warning_or_error);
85
86 if (DECL_P (val))
87 mark_used (val);
88
89 /* Mere mortals can't copy arrays with aggregate initialization, so
90 do some magic to make it work here. */
91 if (TREE_CODE (TREE_TYPE (field)) == ARRAY_TYPE)
92 val = build_array_copy (val);
93 else if (DECL_NORMAL_CAPTURE_P (field)
94 && !DECL_VLA_CAPTURE_P (field)
95 && TREE_CODE (TREE_TYPE (field)) != REFERENCE_TYPE)
96 {
97 /* "the entities that are captured by copy are used to
98 direct-initialize each corresponding non-static data
99 member of the resulting closure object."
100
101 There's normally no way to express direct-initialization
102 from an element of a CONSTRUCTOR, so we build up a special
103 TARGET_EXPR to bypass the usual copy-initialization. */
104 val = force_rvalue (val, tf_warning_or_error);
105 if (TREE_CODE (val) == TARGET_EXPR)
106 TARGET_EXPR_DIRECT_INIT_P (val) = true;
107 }
108
109 CONSTRUCTOR_APPEND_ELT (elts, DECL_NAME (field), val);
110 }
111
112 expr = build_constructor (init_list_type_node, elts);
113 CONSTRUCTOR_IS_DIRECT_INIT (expr) = 1;
114
115 /* N2927: "[The closure] class type is not an aggregate."
116 But we briefly treat it as an aggregate to make this simpler. */
117 type = LAMBDA_EXPR_CLOSURE (lambda_expr);
118 CLASSTYPE_NON_AGGREGATE (type) = 0;
119 expr = finish_compound_literal (type, expr, tf_warning_or_error);
120 CLASSTYPE_NON_AGGREGATE (type) = 1;
121
122 out:
123 input_location = saved_loc;
124 return expr;
125 }
126
127 /* Return an initialized RECORD_TYPE for LAMBDA.
128 LAMBDA must have its explicit captures already. */
129
130 tree
131 begin_lambda_type (tree lambda)
132 {
133 tree type;
134
135 {
136 /* Unique name. This is just like an unnamed class, but we cannot use
137 make_anon_name because of certain checks against TYPE_UNNAMED_P. */
138 tree name;
139 name = make_lambda_name ();
140
141 /* Create the new RECORD_TYPE for this lambda. */
142 type = xref_tag (/*tag_code=*/record_type,
143 name,
144 /*scope=*/ts_lambda,
145 /*template_header_p=*/false);
146 if (type == error_mark_node)
147 return error_mark_node;
148 }
149
150 /* Designate it as a struct so that we can use aggregate initialization. */
151 CLASSTYPE_DECLARED_CLASS (type) = false;
152
153 /* Cross-reference the expression and the type. */
154 LAMBDA_EXPR_CLOSURE (lambda) = type;
155 CLASSTYPE_LAMBDA_EXPR (type) = lambda;
156
157 /* In C++17, assume the closure is literal; we'll clear the flag later if
158 necessary. */
159 if (cxx_dialect >= cxx1z)
160 CLASSTYPE_LITERAL_P (type) = true;
161
162 /* Clear base types. */
163 xref_basetypes (type, /*bases=*/NULL_TREE);
164
165 /* Start the class. */
166 type = begin_class_definition (type);
167
168 return type;
169 }
170
171 /* Returns the type to use for the return type of the operator() of a
172 closure class. */
173
174 tree
175 lambda_return_type (tree expr)
176 {
177 if (expr == NULL_TREE)
178 return void_type_node;
179 if (type_unknown_p (expr)
180 || BRACE_ENCLOSED_INITIALIZER_P (expr))
181 {
182 cxx_incomplete_type_error (expr, TREE_TYPE (expr));
183 return error_mark_node;
184 }
185 gcc_checking_assert (!type_dependent_expression_p (expr));
186 return cv_unqualified (type_decays_to (unlowered_expr_type (expr)));
187 }
188
189 /* Given a LAMBDA_EXPR or closure type LAMBDA, return the op() of the
190 closure type. */
191
192 tree
193 lambda_function (tree lambda)
194 {
195 tree type;
196 if (TREE_CODE (lambda) == LAMBDA_EXPR)
197 type = LAMBDA_EXPR_CLOSURE (lambda);
198 else
199 type = lambda;
200 gcc_assert (LAMBDA_TYPE_P (type));
201 /* Don't let debug_tree cause instantiation. */
202 if (CLASSTYPE_TEMPLATE_INSTANTIATION (type)
203 && !COMPLETE_OR_OPEN_TYPE_P (type))
204 return NULL_TREE;
205 lambda = lookup_member (type, cp_operator_id (CALL_EXPR),
206 /*protect=*/0, /*want_type=*/false,
207 tf_warning_or_error);
208 if (lambda)
209 lambda = STRIP_TEMPLATE (get_first_fn (lambda));
210 return lambda;
211 }
212
213 /* Returns the type to use for the FIELD_DECL corresponding to the
214 capture of EXPR. EXPLICIT_INIT_P indicates whether this is a
215 C++14 init capture, and BY_REFERENCE_P indicates whether we're
216 capturing by reference. */
217
218 tree
219 lambda_capture_field_type (tree expr, bool explicit_init_p,
220 bool by_reference_p)
221 {
222 tree type;
223 bool is_this = is_this_parameter (tree_strip_nop_conversions (expr));
224
225 if (!is_this && type_dependent_expression_p (expr))
226 {
227 type = cxx_make_type (DECLTYPE_TYPE);
228 DECLTYPE_TYPE_EXPR (type) = expr;
229 DECLTYPE_FOR_LAMBDA_CAPTURE (type) = true;
230 DECLTYPE_FOR_INIT_CAPTURE (type) = explicit_init_p;
231 DECLTYPE_FOR_REF_CAPTURE (type) = by_reference_p;
232 SET_TYPE_STRUCTURAL_EQUALITY (type);
233 }
234 else if (!is_this && explicit_init_p)
235 {
236 tree auto_node = make_auto ();
237
238 type = auto_node;
239 if (by_reference_p)
240 /* Add the reference now, so deduction doesn't lose
241 outermost CV qualifiers of EXPR. */
242 type = build_reference_type (type);
243 type = do_auto_deduction (type, expr, auto_node);
244 }
245 else
246 {
247 type = non_reference (unlowered_expr_type (expr));
248
249 if (!is_this && by_reference_p)
250 type = build_reference_type (type);
251 }
252
253 return type;
254 }
255
256 /* Returns true iff DECL is a lambda capture proxy variable created by
257 build_capture_proxy. */
258
259 bool
260 is_capture_proxy (tree decl)
261 {
262 return (VAR_P (decl)
263 && DECL_HAS_VALUE_EXPR_P (decl)
264 && !DECL_ANON_UNION_VAR_P (decl)
265 && LAMBDA_FUNCTION_P (DECL_CONTEXT (decl)));
266 }
267
268 /* Returns true iff DECL is a capture proxy for a normal capture
269 (i.e. without explicit initializer). */
270
271 bool
272 is_normal_capture_proxy (tree decl)
273 {
274 if (!is_capture_proxy (decl))
275 /* It's not a capture proxy. */
276 return false;
277
278 if (variably_modified_type_p (TREE_TYPE (decl), NULL_TREE))
279 /* VLA capture. */
280 return true;
281
282 /* It is a capture proxy, is it a normal capture? */
283 tree val = DECL_VALUE_EXPR (decl);
284 if (val == error_mark_node)
285 return true;
286
287 gcc_assert (TREE_CODE (val) == COMPONENT_REF);
288 val = TREE_OPERAND (val, 1);
289 return DECL_NORMAL_CAPTURE_P (val);
290 }
291
292 /* VAR is a capture proxy created by build_capture_proxy; add it to the
293 current function, which is the operator() for the appropriate lambda. */
294
295 void
296 insert_capture_proxy (tree var)
297 {
298 /* Put the capture proxy in the extra body block so that it won't clash
299 with a later local variable. */
300 pushdecl_outermost_localscope (var);
301
302 /* And put a DECL_EXPR in the STATEMENT_LIST for the same block. */
303 var = build_stmt (DECL_SOURCE_LOCATION (var), DECL_EXPR, var);
304 tree stmt_list = (*stmt_list_stack)[1];
305 gcc_assert (stmt_list);
306 append_to_statement_list_force (var, &stmt_list);
307 }
308
309 /* We've just finished processing a lambda; if the containing scope is also
310 a lambda, insert any capture proxies that were created while processing
311 the nested lambda. */
312
313 void
314 insert_pending_capture_proxies (void)
315 {
316 tree lam;
317 vec<tree, va_gc> *proxies;
318 unsigned i;
319
320 if (!current_function_decl || !LAMBDA_FUNCTION_P (current_function_decl))
321 return;
322
323 lam = CLASSTYPE_LAMBDA_EXPR (DECL_CONTEXT (current_function_decl));
324 proxies = LAMBDA_EXPR_PENDING_PROXIES (lam);
325 for (i = 0; i < vec_safe_length (proxies); ++i)
326 {
327 tree var = (*proxies)[i];
328 insert_capture_proxy (var);
329 }
330 release_tree_vector (LAMBDA_EXPR_PENDING_PROXIES (lam));
331 LAMBDA_EXPR_PENDING_PROXIES (lam) = NULL;
332 }
333
334 /* Given REF, a COMPONENT_REF designating a field in the lambda closure,
335 return the type we want the proxy to have: the type of the field itself,
336 with added const-qualification if the lambda isn't mutable and the
337 capture is by value. */
338
339 tree
340 lambda_proxy_type (tree ref)
341 {
342 tree type;
343 if (ref == error_mark_node)
344 return error_mark_node;
345 if (REFERENCE_REF_P (ref))
346 ref = TREE_OPERAND (ref, 0);
347 gcc_assert (TREE_CODE (ref) == COMPONENT_REF);
348 type = TREE_TYPE (ref);
349 if (!type || WILDCARD_TYPE_P (non_reference (type)))
350 {
351 type = cxx_make_type (DECLTYPE_TYPE);
352 DECLTYPE_TYPE_EXPR (type) = ref;
353 DECLTYPE_FOR_LAMBDA_PROXY (type) = true;
354 SET_TYPE_STRUCTURAL_EQUALITY (type);
355 }
356 if (DECL_PACK_P (TREE_OPERAND (ref, 1)))
357 type = make_pack_expansion (type);
358 return type;
359 }
360
361 /* MEMBER is a capture field in a lambda closure class. Now that we're
362 inside the operator(), build a placeholder var for future lookups and
363 debugging. */
364
365 tree
366 build_capture_proxy (tree member)
367 {
368 tree var, object, fn, closure, name, lam, type;
369
370 if (PACK_EXPANSION_P (member))
371 member = PACK_EXPANSION_PATTERN (member);
372
373 closure = DECL_CONTEXT (member);
374 fn = lambda_function (closure);
375 lam = CLASSTYPE_LAMBDA_EXPR (closure);
376
377 /* The proxy variable forwards to the capture field. */
378 object = build_fold_indirect_ref (DECL_ARGUMENTS (fn));
379 object = finish_non_static_data_member (member, object, NULL_TREE);
380 if (REFERENCE_REF_P (object))
381 object = TREE_OPERAND (object, 0);
382
383 /* Remove the __ inserted by add_capture. */
384 name = get_identifier (IDENTIFIER_POINTER (DECL_NAME (member)) + 2);
385
386 type = lambda_proxy_type (object);
387
388 if (name == this_identifier && !POINTER_TYPE_P (type))
389 {
390 type = build_pointer_type (type);
391 type = cp_build_qualified_type (type, TYPE_QUAL_CONST);
392 object = build_fold_addr_expr_with_type (object, type);
393 }
394
395 if (DECL_VLA_CAPTURE_P (member))
396 {
397 /* Rebuild the VLA type from the pointer and maxindex. */
398 tree field = next_initializable_field (TYPE_FIELDS (type));
399 tree ptr = build_simple_component_ref (object, field);
400 field = next_initializable_field (DECL_CHAIN (field));
401 tree max = build_simple_component_ref (object, field);
402 type = build_cplus_array_type (TREE_TYPE (TREE_TYPE (ptr)),
403 build_index_type (max));
404 type = build_reference_type (type);
405 REFERENCE_VLA_OK (type) = true;
406 object = convert (type, ptr);
407 }
408
409 var = build_decl (input_location, VAR_DECL, name, type);
410 SET_DECL_VALUE_EXPR (var, object);
411 DECL_HAS_VALUE_EXPR_P (var) = 1;
412 DECL_ARTIFICIAL (var) = 1;
413 TREE_USED (var) = 1;
414 DECL_CONTEXT (var) = fn;
415
416 if (name == this_identifier)
417 {
418 gcc_assert (LAMBDA_EXPR_THIS_CAPTURE (lam) == member);
419 LAMBDA_EXPR_THIS_CAPTURE (lam) = var;
420 }
421
422 if (fn == current_function_decl)
423 insert_capture_proxy (var);
424 else
425 vec_safe_push (LAMBDA_EXPR_PENDING_PROXIES (lam), var);
426
427 return var;
428 }
429
430 static GTY(()) tree ptr_id;
431 static GTY(()) tree max_id;
432
433 /* Return a struct containing a pointer and a length for lambda capture of
434 an array of runtime length. */
435
436 static tree
437 vla_capture_type (tree array_type)
438 {
439 tree type = xref_tag (record_type, make_anon_name (), ts_current, false);
440 xref_basetypes (type, NULL_TREE);
441 type = begin_class_definition (type);
442 if (!ptr_id)
443 {
444 ptr_id = get_identifier ("ptr");
445 max_id = get_identifier ("max");
446 }
447 tree ptrtype = build_pointer_type (TREE_TYPE (array_type));
448 tree field = build_decl (input_location, FIELD_DECL, ptr_id, ptrtype);
449 finish_member_declaration (field);
450 field = build_decl (input_location, FIELD_DECL, max_id, sizetype);
451 finish_member_declaration (field);
452 return finish_struct (type, NULL_TREE);
453 }
454
455 /* From an ID and INITIALIZER, create a capture (by reference if
456 BY_REFERENCE_P is true), add it to the capture-list for LAMBDA,
457 and return it. If ID is `this', BY_REFERENCE_P says whether
458 `*this' is captured by reference. */
459
460 tree
461 add_capture (tree lambda, tree id, tree orig_init, bool by_reference_p,
462 bool explicit_init_p)
463 {
464 char *buf;
465 tree type, member, name;
466 bool vla = false;
467 bool variadic = false;
468 tree initializer = orig_init;
469
470 if (PACK_EXPANSION_P (initializer))
471 {
472 initializer = PACK_EXPANSION_PATTERN (initializer);
473 variadic = true;
474 }
475
476 if (TREE_CODE (initializer) == TREE_LIST
477 /* A pack expansion might end up with multiple elements. */
478 && !PACK_EXPANSION_P (TREE_VALUE (initializer)))
479 initializer = build_x_compound_expr_from_list (initializer, ELK_INIT,
480 tf_warning_or_error);
481 type = TREE_TYPE (initializer);
482 if (type == error_mark_node)
483 return error_mark_node;
484
485 if (array_of_runtime_bound_p (type))
486 {
487 vla = true;
488 if (!by_reference_p)
489 error ("array of runtime bound cannot be captured by copy, "
490 "only by reference");
491
492 /* For a VLA, we capture the address of the first element and the
493 maximum index, and then reconstruct the VLA for the proxy. */
494 tree elt = cp_build_array_ref (input_location, initializer,
495 integer_zero_node, tf_warning_or_error);
496 initializer = build_constructor_va (init_list_type_node, 2,
497 NULL_TREE, build_address (elt),
498 NULL_TREE, array_type_nelts (type));
499 type = vla_capture_type (type);
500 }
501 else if (!dependent_type_p (type)
502 && variably_modified_type_p (type, NULL_TREE))
503 {
504 error ("capture of variable-size type %qT that is not an N3639 array "
505 "of runtime bound", type);
506 if (TREE_CODE (type) == ARRAY_TYPE
507 && variably_modified_type_p (TREE_TYPE (type), NULL_TREE))
508 inform (input_location, "because the array element type %qT has "
509 "variable size", TREE_TYPE (type));
510 type = error_mark_node;
511 }
512 else
513 {
514 type = lambda_capture_field_type (initializer, explicit_init_p,
515 by_reference_p);
516 if (type == error_mark_node)
517 return error_mark_node;
518
519 if (id == this_identifier && !by_reference_p)
520 {
521 gcc_assert (POINTER_TYPE_P (type));
522 type = TREE_TYPE (type);
523 initializer = cp_build_indirect_ref (initializer, RO_NULL,
524 tf_warning_or_error);
525 }
526
527 if (dependent_type_p (type))
528 ;
529 else if (id != this_identifier && by_reference_p)
530 {
531 if (!lvalue_p (initializer))
532 error ("cannot capture %qE by reference", initializer);
533 }
534 else
535 {
536 /* Capture by copy requires a complete type. */
537 type = complete_type (type);
538 if (!COMPLETE_TYPE_P (type))
539 {
540 error ("capture by copy of incomplete type %qT", type);
541 cxx_incomplete_type_inform (type);
542 return error_mark_node;
543 }
544 }
545 }
546
547 /* Add __ to the beginning of the field name so that user code
548 won't find the field with name lookup. We can't just leave the name
549 unset because template instantiation uses the name to find
550 instantiated fields. */
551 buf = (char *) alloca (IDENTIFIER_LENGTH (id) + 3);
552 buf[1] = buf[0] = '_';
553 memcpy (buf + 2, IDENTIFIER_POINTER (id),
554 IDENTIFIER_LENGTH (id) + 1);
555 name = get_identifier (buf);
556
557 /* If TREE_TYPE isn't set, we're still in the introducer, so check
558 for duplicates. */
559 if (!LAMBDA_EXPR_CLOSURE (lambda))
560 {
561 if (IDENTIFIER_MARKED (name))
562 {
563 pedwarn (input_location, 0,
564 "already captured %qD in lambda expression", id);
565 return NULL_TREE;
566 }
567 IDENTIFIER_MARKED (name) = true;
568 }
569
570 if (variadic)
571 type = make_pack_expansion (type);
572
573 /* Make member variable. */
574 member = build_decl (input_location, FIELD_DECL, name, type);
575 DECL_VLA_CAPTURE_P (member) = vla;
576
577 if (!explicit_init_p)
578 /* Normal captures are invisible to name lookup but uses are replaced
579 with references to the capture field; we implement this by only
580 really making them invisible in unevaluated context; see
581 qualify_lookup. For now, let's make explicitly initialized captures
582 always visible. */
583 DECL_NORMAL_CAPTURE_P (member) = true;
584
585 if (id == this_identifier)
586 LAMBDA_EXPR_THIS_CAPTURE (lambda) = member;
587
588 /* Add it to the appropriate closure class if we've started it. */
589 if (current_class_type
590 && current_class_type == LAMBDA_EXPR_CLOSURE (lambda))
591 finish_member_declaration (member);
592
593 tree listmem = member;
594 if (variadic)
595 {
596 listmem = make_pack_expansion (member);
597 initializer = orig_init;
598 }
599 LAMBDA_EXPR_CAPTURE_LIST (lambda)
600 = tree_cons (listmem, initializer, LAMBDA_EXPR_CAPTURE_LIST (lambda));
601
602 if (LAMBDA_EXPR_CLOSURE (lambda))
603 return build_capture_proxy (member);
604 /* For explicit captures we haven't started the function yet, so we wait
605 and build the proxy from cp_parser_lambda_body. */
606 return NULL_TREE;
607 }
608
609 /* Register all the capture members on the list CAPTURES, which is the
610 LAMBDA_EXPR_CAPTURE_LIST for the lambda after the introducer. */
611
612 void
613 register_capture_members (tree captures)
614 {
615 if (captures == NULL_TREE)
616 return;
617
618 register_capture_members (TREE_CHAIN (captures));
619
620 tree field = TREE_PURPOSE (captures);
621 if (PACK_EXPANSION_P (field))
622 field = PACK_EXPANSION_PATTERN (field);
623
624 /* We set this in add_capture to avoid duplicates. */
625 IDENTIFIER_MARKED (DECL_NAME (field)) = false;
626 finish_member_declaration (field);
627 }
628
629 /* Similar to add_capture, except this works on a stack of nested lambdas.
630 BY_REFERENCE_P in this case is derived from the default capture mode.
631 Returns the capture for the lambda at the bottom of the stack. */
632
633 tree
634 add_default_capture (tree lambda_stack, tree id, tree initializer)
635 {
636 bool this_capture_p = (id == this_identifier);
637
638 tree var = NULL_TREE;
639
640 tree saved_class_type = current_class_type;
641
642 tree node;
643
644 for (node = lambda_stack;
645 node;
646 node = TREE_CHAIN (node))
647 {
648 tree lambda = TREE_VALUE (node);
649
650 current_class_type = LAMBDA_EXPR_CLOSURE (lambda);
651 if (DECL_PACK_P (initializer))
652 initializer = make_pack_expansion (initializer);
653 var = add_capture (lambda,
654 id,
655 initializer,
656 /*by_reference_p=*/
657 (this_capture_p
658 || (LAMBDA_EXPR_DEFAULT_CAPTURE_MODE (lambda)
659 == CPLD_REFERENCE)),
660 /*explicit_init_p=*/false);
661 initializer = convert_from_reference (var);
662 }
663
664 current_class_type = saved_class_type;
665
666 return var;
667 }
668
669 /* Return the capture pertaining to a use of 'this' in LAMBDA, in the
670 form of an INDIRECT_REF, possibly adding it through default
671 capturing, if ADD_CAPTURE_P is true. */
672
673 tree
674 lambda_expr_this_capture (tree lambda, bool add_capture_p)
675 {
676 tree result;
677
678 tree this_capture = LAMBDA_EXPR_THIS_CAPTURE (lambda);
679
680 /* In unevaluated context this isn't an odr-use, so don't capture. */
681 if (cp_unevaluated_operand)
682 add_capture_p = false;
683
684 /* Try to default capture 'this' if we can. */
685 if (!this_capture
686 && (!add_capture_p
687 || LAMBDA_EXPR_DEFAULT_CAPTURE_MODE (lambda) != CPLD_NONE))
688 {
689 tree lambda_stack = NULL_TREE;
690 tree init = NULL_TREE;
691
692 /* If we are in a lambda function, we can move out until we hit:
693 1. a non-lambda function or NSDMI,
694 2. a lambda function capturing 'this', or
695 3. a non-default capturing lambda function. */
696 for (tree tlambda = lambda; ;)
697 {
698 lambda_stack = tree_cons (NULL_TREE,
699 tlambda,
700 lambda_stack);
701
702 if (LAMBDA_EXPR_EXTRA_SCOPE (tlambda)
703 && TREE_CODE (LAMBDA_EXPR_EXTRA_SCOPE (tlambda)) == FIELD_DECL)
704 {
705 /* In an NSDMI, we don't have a function to look up the decl in,
706 but the fake 'this' pointer that we're using for parsing is
707 in scope_chain. */
708 init = scope_chain->x_current_class_ptr;
709 gcc_checking_assert
710 (init && (TREE_TYPE (TREE_TYPE (init))
711 == current_nonlambda_class_type ()));
712 break;
713 }
714
715 tree closure_decl = TYPE_NAME (LAMBDA_EXPR_CLOSURE (tlambda));
716 tree containing_function = decl_function_context (closure_decl);
717
718 if (containing_function == NULL_TREE)
719 /* We ran out of scopes; there's no 'this' to capture. */
720 break;
721
722 if (!LAMBDA_FUNCTION_P (containing_function))
723 {
724 /* We found a non-lambda function. */
725 if (DECL_NONSTATIC_MEMBER_FUNCTION_P (containing_function))
726 /* First parameter is 'this'. */
727 init = DECL_ARGUMENTS (containing_function);
728 break;
729 }
730
731 tlambda
732 = CLASSTYPE_LAMBDA_EXPR (DECL_CONTEXT (containing_function));
733
734 if (LAMBDA_EXPR_THIS_CAPTURE (tlambda))
735 {
736 /* An outer lambda has already captured 'this'. */
737 init = LAMBDA_EXPR_THIS_CAPTURE (tlambda);
738 break;
739 }
740
741 if (LAMBDA_EXPR_DEFAULT_CAPTURE_MODE (tlambda) == CPLD_NONE)
742 /* An outer lambda won't let us capture 'this'. */
743 break;
744 }
745
746 if (init)
747 {
748 if (add_capture_p)
749 this_capture = add_default_capture (lambda_stack,
750 /*id=*/this_identifier,
751 init);
752 else
753 this_capture = init;
754 }
755 }
756
757 if (cp_unevaluated_operand)
758 result = this_capture;
759 else if (!this_capture)
760 {
761 if (add_capture_p)
762 {
763 error ("%<this%> was not captured for this lambda function");
764 result = error_mark_node;
765 }
766 else
767 result = NULL_TREE;
768 }
769 else
770 {
771 /* To make sure that current_class_ref is for the lambda. */
772 gcc_assert (TYPE_MAIN_VARIANT (TREE_TYPE (current_class_ref))
773 == LAMBDA_EXPR_CLOSURE (lambda));
774
775 result = this_capture;
776
777 /* If 'this' is captured, each use of 'this' is transformed into an
778 access to the corresponding unnamed data member of the closure
779 type cast (_expr.cast_ 5.4) to the type of 'this'. [ The cast
780 ensures that the transformed expression is an rvalue. ] */
781 result = rvalue (result);
782 }
783
784 return result;
785 }
786
787 /* Return the current LAMBDA_EXPR, if this is a resolvable dummy
788 object. NULL otherwise.. */
789
790 static tree
791 resolvable_dummy_lambda (tree object)
792 {
793 if (!is_dummy_object (object))
794 return NULL_TREE;
795
796 tree type = TYPE_MAIN_VARIANT (TREE_TYPE (object));
797 gcc_assert (!TYPE_PTR_P (type));
798
799 if (type != current_class_type
800 && current_class_type
801 && LAMBDA_TYPE_P (current_class_type)
802 && lambda_function (current_class_type)
803 && DERIVED_FROM_P (type, current_nonlambda_class_type ()))
804 return CLASSTYPE_LAMBDA_EXPR (current_class_type);
805
806 return NULL_TREE;
807 }
808
809 /* We don't want to capture 'this' until we know we need it, i.e. after
810 overload resolution has chosen a non-static member function. At that
811 point we call this function to turn a dummy object into a use of the
812 'this' capture. */
813
814 tree
815 maybe_resolve_dummy (tree object, bool add_capture_p)
816 {
817 if (tree lam = resolvable_dummy_lambda (object))
818 if (tree cap = lambda_expr_this_capture (lam, add_capture_p))
819 if (cap != error_mark_node)
820 object = build_x_indirect_ref (EXPR_LOCATION (object), cap,
821 RO_NULL, tf_warning_or_error);
822
823 return object;
824 }
825
826 /* When parsing a generic lambda containing an argument-dependent
827 member function call we defer overload resolution to instantiation
828 time. But we have to know now whether to capture this or not.
829 Do that if FNS contains any non-static fns.
830 The std doesn't anticipate this case, but I expect this to be the
831 outcome of discussion. */
832
833 void
834 maybe_generic_this_capture (tree object, tree fns)
835 {
836 if (tree lam = resolvable_dummy_lambda (object))
837 if (!LAMBDA_EXPR_THIS_CAPTURE (lam))
838 {
839 /* We've not yet captured, so look at the function set of
840 interest. */
841 if (BASELINK_P (fns))
842 fns = BASELINK_FUNCTIONS (fns);
843 bool id_expr = TREE_CODE (fns) == TEMPLATE_ID_EXPR;
844 if (id_expr)
845 fns = TREE_OPERAND (fns, 0);
846
847 for (lkp_iterator iter (fns); iter; ++iter)
848 if ((!id_expr || TREE_CODE (*iter) == TEMPLATE_DECL)
849 && DECL_NONSTATIC_MEMBER_FUNCTION_P (*iter))
850 {
851 /* Found a non-static member. Capture this. */
852 lambda_expr_this_capture (lam, true);
853 break;
854 }
855 }
856 }
857
858 /* Returns the innermost non-lambda function. */
859
860 tree
861 current_nonlambda_function (void)
862 {
863 tree fn = current_function_decl;
864 while (fn && LAMBDA_FUNCTION_P (fn))
865 fn = decl_function_context (fn);
866 return fn;
867 }
868
869 /* Returns the method basetype of the innermost non-lambda function, or
870 NULL_TREE if none. */
871
872 tree
873 nonlambda_method_basetype (void)
874 {
875 tree fn, type;
876 if (!current_class_ref)
877 return NULL_TREE;
878
879 type = current_class_type;
880 if (!LAMBDA_TYPE_P (type))
881 return type;
882
883 /* Find the nearest enclosing non-lambda function. */
884 fn = TYPE_NAME (type);
885 do
886 fn = decl_function_context (fn);
887 while (fn && LAMBDA_FUNCTION_P (fn));
888
889 if (!fn || !DECL_NONSTATIC_MEMBER_FUNCTION_P (fn))
890 return NULL_TREE;
891
892 return TYPE_METHOD_BASETYPE (TREE_TYPE (fn));
893 }
894
895 /* Like current_scope, but looking through lambdas. */
896
897 tree
898 current_nonlambda_scope (void)
899 {
900 tree scope = current_scope ();
901 for (;;)
902 {
903 if (TREE_CODE (scope) == FUNCTION_DECL
904 && LAMBDA_FUNCTION_P (scope))
905 {
906 scope = CP_TYPE_CONTEXT (DECL_CONTEXT (scope));
907 continue;
908 }
909 else if (LAMBDA_TYPE_P (scope))
910 {
911 scope = CP_TYPE_CONTEXT (scope);
912 continue;
913 }
914 break;
915 }
916 return scope;
917 }
918
919 /* Helper function for maybe_add_lambda_conv_op; build a CALL_EXPR with
920 indicated FN and NARGS, but do not initialize the return type or any of the
921 argument slots. */
922
923 static tree
924 prepare_op_call (tree fn, int nargs)
925 {
926 tree t;
927
928 t = build_vl_exp (CALL_EXPR, nargs + 3);
929 CALL_EXPR_FN (t) = fn;
930 CALL_EXPR_STATIC_CHAIN (t) = NULL;
931
932 return t;
933 }
934
935 /* Return true iff CALLOP is the op() for a generic lambda. */
936
937 bool
938 generic_lambda_fn_p (tree callop)
939 {
940 return (LAMBDA_FUNCTION_P (callop)
941 && DECL_TEMPLATE_INFO (callop)
942 && PRIMARY_TEMPLATE_P (DECL_TI_TEMPLATE (callop)));
943 }
944
945 /* If the closure TYPE has a static op(), also add a conversion to function
946 pointer. */
947
948 void
949 maybe_add_lambda_conv_op (tree type)
950 {
951 bool nested = (cfun != NULL);
952 bool nested_def = decl_function_context (TYPE_MAIN_DECL (type));
953 tree callop = lambda_function (type);
954 tree lam = CLASSTYPE_LAMBDA_EXPR (type);
955
956 if (LAMBDA_EXPR_CAPTURE_LIST (lam) != NULL_TREE
957 || LAMBDA_EXPR_DEFAULT_CAPTURE_MODE (lam) != CPLD_NONE)
958 return;
959
960 if (processing_template_decl)
961 return;
962
963 bool const generic_lambda_p = generic_lambda_fn_p (callop);
964
965 if (!generic_lambda_p && DECL_INITIAL (callop) == NULL_TREE)
966 {
967 /* If the op() wasn't instantiated due to errors, give up. */
968 gcc_assert (errorcount || sorrycount);
969 return;
970 }
971
972 /* Non-template conversion operators are defined directly with build_call_a
973 and using DIRECT_ARGVEC for arguments (including 'this'). Templates are
974 deferred and the CALL is built in-place. In the case of a deduced return
975 call op, the decltype expression, DECLTYPE_CALL, used as a substitute for
976 the return type is also built in-place. The arguments of DECLTYPE_CALL in
977 the return expression may differ in flags from those in the body CALL. In
978 particular, parameter pack expansions are marked PACK_EXPANSION_LOCAL_P in
979 the body CALL, but not in DECLTYPE_CALL. */
980
981 vec<tree, va_gc> *direct_argvec = 0;
982 tree decltype_call = 0, call = 0;
983 tree optype = TREE_TYPE (callop);
984 tree fn_result = TREE_TYPE (optype);
985
986 tree thisarg = build_nop (TREE_TYPE (DECL_ARGUMENTS (callop)),
987 null_pointer_node);
988 if (generic_lambda_p)
989 {
990 ++processing_template_decl;
991
992 /* Prepare the dependent member call for the static member function
993 '_FUN' and, potentially, prepare another call to be used in a decltype
994 return expression for a deduced return call op to allow for simple
995 implementation of the conversion operator. */
996
997 tree instance = cp_build_indirect_ref (thisarg, RO_NULL,
998 tf_warning_or_error);
999 tree objfn = build_min (COMPONENT_REF, NULL_TREE,
1000 instance, DECL_NAME (callop), NULL_TREE);
1001 int nargs = list_length (DECL_ARGUMENTS (callop)) - 1;
1002
1003 call = prepare_op_call (objfn, nargs);
1004 if (type_uses_auto (fn_result))
1005 decltype_call = prepare_op_call (objfn, nargs);
1006 }
1007 else
1008 {
1009 direct_argvec = make_tree_vector ();
1010 direct_argvec->quick_push (thisarg);
1011 }
1012
1013 /* Copy CALLOP's argument list (as per 'copy_list') as FN_ARGS in order to
1014 declare the static member function "_FUN" below. For each arg append to
1015 DIRECT_ARGVEC (for the non-template case) or populate the pre-allocated
1016 call args (for the template case). If a parameter pack is found, expand
1017 it, flagging it as PACK_EXPANSION_LOCAL_P for the body call. */
1018
1019 tree fn_args = NULL_TREE;
1020 {
1021 int ix = 0;
1022 tree src = DECL_CHAIN (DECL_ARGUMENTS (callop));
1023 tree tgt = NULL;
1024
1025 while (src)
1026 {
1027 tree new_node = copy_node (src);
1028
1029 if (!fn_args)
1030 fn_args = tgt = new_node;
1031 else
1032 {
1033 TREE_CHAIN (tgt) = new_node;
1034 tgt = new_node;
1035 }
1036
1037 mark_exp_read (tgt);
1038
1039 if (generic_lambda_p)
1040 {
1041 tree a = forward_parm (tgt);
1042
1043 CALL_EXPR_ARG (call, ix) = a;
1044 if (decltype_call)
1045 CALL_EXPR_ARG (decltype_call, ix) = unshare_expr (a);
1046
1047 if (PACK_EXPANSION_P (a))
1048 /* Set this after unsharing so it's not in decltype_call. */
1049 PACK_EXPANSION_LOCAL_P (a) = true;
1050
1051 ++ix;
1052 }
1053 else
1054 vec_safe_push (direct_argvec, tgt);
1055
1056 src = TREE_CHAIN (src);
1057 }
1058 }
1059
1060
1061 if (generic_lambda_p)
1062 {
1063 if (decltype_call)
1064 {
1065 fn_result = finish_decltype_type
1066 (decltype_call, /*id_expression_or_member_access_p=*/false,
1067 tf_warning_or_error);
1068 }
1069 }
1070 else
1071 call = build_call_a (callop,
1072 direct_argvec->length (),
1073 direct_argvec->address ());
1074
1075 CALL_FROM_THUNK_P (call) = 1;
1076 SET_EXPR_LOCATION (call, UNKNOWN_LOCATION);
1077
1078 tree stattype = build_function_type (fn_result, FUNCTION_ARG_CHAIN (callop));
1079 stattype = (cp_build_type_attribute_variant
1080 (stattype, TYPE_ATTRIBUTES (optype)));
1081 if (flag_noexcept_type
1082 && TYPE_NOTHROW_P (TREE_TYPE (callop)))
1083 stattype = build_exception_variant (stattype, noexcept_true_spec);
1084
1085 if (generic_lambda_p)
1086 --processing_template_decl;
1087
1088 /* First build up the conversion op. */
1089
1090 tree rettype = build_pointer_type (stattype);
1091 tree name = make_conv_op_name (rettype);
1092 tree thistype = cp_build_qualified_type (type, TYPE_QUAL_CONST);
1093 tree fntype = build_method_type_directly (thistype, rettype, void_list_node);
1094 tree convfn = build_lang_decl (FUNCTION_DECL, name, fntype);
1095 tree fn = convfn;
1096 DECL_SOURCE_LOCATION (fn) = DECL_SOURCE_LOCATION (callop);
1097 SET_DECL_ALIGN (fn, MINIMUM_METHOD_BOUNDARY);
1098 SET_OVERLOADED_OPERATOR_CODE (fn, TYPE_EXPR);
1099 grokclassfn (type, fn, NO_SPECIAL);
1100 set_linkage_according_to_type (type, fn);
1101 rest_of_decl_compilation (fn, namespace_bindings_p (), at_eof);
1102 DECL_IN_AGGR_P (fn) = 1;
1103 DECL_ARTIFICIAL (fn) = 1;
1104 DECL_NOT_REALLY_EXTERN (fn) = 1;
1105 DECL_DECLARED_INLINE_P (fn) = 1;
1106 DECL_ARGUMENTS (fn) = build_this_parm (fn, fntype, TYPE_QUAL_CONST);
1107
1108 if (nested_def)
1109 DECL_INTERFACE_KNOWN (fn) = 1;
1110
1111 if (generic_lambda_p)
1112 fn = add_inherited_template_parms (fn, DECL_TI_TEMPLATE (callop));
1113
1114 add_method (type, fn, false);
1115
1116 /* Generic thunk code fails for varargs; we'll complain in mark_used if
1117 the conversion op is used. */
1118 if (varargs_function_p (callop))
1119 {
1120 DECL_DELETED_FN (fn) = 1;
1121 return;
1122 }
1123
1124 /* Now build up the thunk to be returned. */
1125
1126 name = get_identifier ("_FUN");
1127 tree statfn = build_lang_decl (FUNCTION_DECL, name, stattype);
1128 fn = statfn;
1129 DECL_SOURCE_LOCATION (fn) = DECL_SOURCE_LOCATION (callop);
1130 grokclassfn (type, fn, NO_SPECIAL);
1131 set_linkage_according_to_type (type, fn);
1132 rest_of_decl_compilation (fn, namespace_bindings_p (), at_eof);
1133 DECL_IN_AGGR_P (fn) = 1;
1134 DECL_ARTIFICIAL (fn) = 1;
1135 DECL_NOT_REALLY_EXTERN (fn) = 1;
1136 DECL_DECLARED_INLINE_P (fn) = 1;
1137 DECL_STATIC_FUNCTION_P (fn) = 1;
1138 DECL_ARGUMENTS (fn) = fn_args;
1139 for (tree arg = fn_args; arg; arg = DECL_CHAIN (arg))
1140 {
1141 /* Avoid duplicate -Wshadow warnings. */
1142 DECL_NAME (arg) = NULL_TREE;
1143 DECL_CONTEXT (arg) = fn;
1144 }
1145 if (nested_def)
1146 DECL_INTERFACE_KNOWN (fn) = 1;
1147
1148 if (generic_lambda_p)
1149 fn = add_inherited_template_parms (fn, DECL_TI_TEMPLATE (callop));
1150
1151 if (flag_sanitize & SANITIZE_NULL)
1152 {
1153 /* Don't UBsan this function; we're deliberately calling op() with a null
1154 object argument. */
1155 add_no_sanitize_value (fn, SANITIZE_UNDEFINED);
1156 }
1157
1158 add_method (type, fn, false);
1159
1160 if (nested)
1161 push_function_context ();
1162 else
1163 /* Still increment function_depth so that we don't GC in the
1164 middle of an expression. */
1165 ++function_depth;
1166
1167 /* Generate the body of the thunk. */
1168
1169 start_preparsed_function (statfn, NULL_TREE,
1170 SF_PRE_PARSED | SF_INCLASS_INLINE);
1171 if (DECL_ONE_ONLY (statfn))
1172 {
1173 /* Put the thunk in the same comdat group as the call op. */
1174 cgraph_node::get_create (statfn)->add_to_same_comdat_group
1175 (cgraph_node::get_create (callop));
1176 }
1177 tree body = begin_function_body ();
1178 tree compound_stmt = begin_compound_stmt (0);
1179 if (!generic_lambda_p)
1180 {
1181 set_flags_from_callee (call);
1182 if (MAYBE_CLASS_TYPE_P (TREE_TYPE (call)))
1183 call = build_cplus_new (TREE_TYPE (call), call, tf_warning_or_error);
1184 }
1185 call = convert_from_reference (call);
1186 finish_return_stmt (call);
1187
1188 finish_compound_stmt (compound_stmt);
1189 finish_function_body (body);
1190
1191 fn = finish_function (/*inline*/2);
1192 if (!generic_lambda_p)
1193 expand_or_defer_fn (fn);
1194
1195 /* Generate the body of the conversion op. */
1196
1197 start_preparsed_function (convfn, NULL_TREE,
1198 SF_PRE_PARSED | SF_INCLASS_INLINE);
1199 body = begin_function_body ();
1200 compound_stmt = begin_compound_stmt (0);
1201
1202 /* decl_needed_p needs to see that it's used. */
1203 TREE_USED (statfn) = 1;
1204 finish_return_stmt (decay_conversion (statfn, tf_warning_or_error));
1205
1206 finish_compound_stmt (compound_stmt);
1207 finish_function_body (body);
1208
1209 fn = finish_function (/*inline*/2);
1210 if (!generic_lambda_p)
1211 expand_or_defer_fn (fn);
1212
1213 if (nested)
1214 pop_function_context ();
1215 else
1216 --function_depth;
1217 }
1218
1219 /* True if FN is the static function "_FUN" that gets returned from the lambda
1220 conversion operator. */
1221
1222 bool
1223 lambda_static_thunk_p (tree fn)
1224 {
1225 return (fn && TREE_CODE (fn) == FUNCTION_DECL
1226 && DECL_ARTIFICIAL (fn)
1227 && DECL_STATIC_FUNCTION_P (fn)
1228 && LAMBDA_TYPE_P (CP_DECL_CONTEXT (fn)));
1229 }
1230
1231 /* Returns true iff VAL is a lambda-related declaration which should
1232 be ignored by unqualified lookup. */
1233
1234 bool
1235 is_lambda_ignored_entity (tree val)
1236 {
1237 /* In unevaluated context, look past normal capture proxies. */
1238 if (cp_unevaluated_operand && is_normal_capture_proxy (val))
1239 return true;
1240
1241 /* Always ignore lambda fields, their names are only for debugging. */
1242 if (TREE_CODE (val) == FIELD_DECL
1243 && CLASSTYPE_LAMBDA_EXPR (DECL_CONTEXT (val)))
1244 return true;
1245
1246 /* None of the lookups that use qualify_lookup want the op() from the
1247 lambda; they want the one from the enclosing class. */
1248 if (TREE_CODE (val) == FUNCTION_DECL && LAMBDA_FUNCTION_P (val))
1249 return true;
1250
1251 return false;
1252 }
1253
1254 #include "gt-cp-lambda.h"