]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/cp/lambda.c
Autogenerated fixes of "->symbol." to "->"
[thirdparty/gcc.git] / gcc / cp / lambda.c
CommitLineData
5d9fd871 1/* Perform the semantic phase of lambda parsing, i.e., the process of
2 building tree structure, checking semantic consistency, and
3 building RTL. These routines are used both during actual parsing
4 and during the instantiation of template functions.
5
6 Copyright (C) 1998-2013 Free Software Foundation, Inc.
7
8 This file is part of GCC.
9
10 GCC is free software; you can redistribute it and/or modify it
11 under the terms of the GNU General Public License as published by
12 the Free Software Foundation; either version 3, or (at your option)
13 any later version.
14
15 GCC is distributed in the hope that it will be useful, but
16 WITHOUT ANY WARRANTY; without even the implied warranty of
17 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
18 General Public License for more details.
19
20You should have received a copy of the GNU General Public License
21along with GCC; see the file COPYING3. If not see
22<http://www.gnu.org/licenses/>. */
23
24#include "config.h"
25#include "system.h"
26#include "coretypes.h"
27#include "tree.h"
28#include "cgraph.h"
29#include "tree-iterator.h"
30#include "cp-tree.h"
31#include "toplev.h"
32#include "vec.h"
33
34/* Constructor for a lambda expression. */
35
36tree
37build_lambda_expr (void)
38{
39 tree lambda = make_node (LAMBDA_EXPR);
40 LAMBDA_EXPR_DEFAULT_CAPTURE_MODE (lambda) = CPLD_NONE;
41 LAMBDA_EXPR_CAPTURE_LIST (lambda) = NULL_TREE;
42 LAMBDA_EXPR_THIS_CAPTURE (lambda) = NULL_TREE;
43 LAMBDA_EXPR_PENDING_PROXIES (lambda) = NULL;
44 LAMBDA_EXPR_RETURN_TYPE (lambda) = NULL_TREE;
45 LAMBDA_EXPR_MUTABLE_P (lambda) = false;
46 return lambda;
47}
48
49/* Create the closure object for a LAMBDA_EXPR. */
50
51tree
52build_lambda_object (tree lambda_expr)
53{
54 /* Build aggregate constructor call.
55 - cp_parser_braced_list
56 - cp_parser_functional_cast */
57 vec<constructor_elt, va_gc> *elts = NULL;
58 tree node, expr, type;
59 location_t saved_loc;
60
61 if (processing_template_decl)
62 return lambda_expr;
63
64 /* Make sure any error messages refer to the lambda-introducer. */
65 saved_loc = input_location;
66 input_location = LAMBDA_EXPR_LOCATION (lambda_expr);
67
68 for (node = LAMBDA_EXPR_CAPTURE_LIST (lambda_expr);
69 node;
70 node = TREE_CHAIN (node))
71 {
72 tree field = TREE_PURPOSE (node);
73 tree val = TREE_VALUE (node);
74
75 if (field == error_mark_node)
76 {
77 expr = error_mark_node;
78 goto out;
79 }
80
81 if (DECL_P (val))
82 mark_used (val);
83
84 /* Mere mortals can't copy arrays with aggregate initialization, so
85 do some magic to make it work here. */
86 if (TREE_CODE (TREE_TYPE (field)) == ARRAY_TYPE)
87 val = build_array_copy (val);
88 else if (DECL_NORMAL_CAPTURE_P (field)
89 && !DECL_VLA_CAPTURE_P (field)
90 && TREE_CODE (TREE_TYPE (field)) != REFERENCE_TYPE)
91 {
92 /* "the entities that are captured by copy are used to
93 direct-initialize each corresponding non-static data
94 member of the resulting closure object."
95
96 There's normally no way to express direct-initialization
97 from an element of a CONSTRUCTOR, so we build up a special
98 TARGET_EXPR to bypass the usual copy-initialization. */
99 val = force_rvalue (val, tf_warning_or_error);
100 if (TREE_CODE (val) == TARGET_EXPR)
101 TARGET_EXPR_DIRECT_INIT_P (val) = true;
102 }
103
104 CONSTRUCTOR_APPEND_ELT (elts, DECL_NAME (field), val);
105 }
106
107 expr = build_constructor (init_list_type_node, elts);
108 CONSTRUCTOR_IS_DIRECT_INIT (expr) = 1;
109
110 /* N2927: "[The closure] class type is not an aggregate."
111 But we briefly treat it as an aggregate to make this simpler. */
112 type = LAMBDA_EXPR_CLOSURE (lambda_expr);
113 CLASSTYPE_NON_AGGREGATE (type) = 0;
114 expr = finish_compound_literal (type, expr, tf_warning_or_error);
115 CLASSTYPE_NON_AGGREGATE (type) = 1;
116
117 out:
118 input_location = saved_loc;
119 return expr;
120}
121
122/* Return an initialized RECORD_TYPE for LAMBDA.
123 LAMBDA must have its explicit captures already. */
124
125tree
126begin_lambda_type (tree lambda)
127{
128 tree type;
129
130 {
131 /* Unique name. This is just like an unnamed class, but we cannot use
132 make_anon_name because of certain checks against TYPE_ANONYMOUS_P. */
133 tree name;
134 name = make_lambda_name ();
135
136 /* Create the new RECORD_TYPE for this lambda. */
137 type = xref_tag (/*tag_code=*/record_type,
138 name,
139 /*scope=*/ts_lambda,
140 /*template_header_p=*/false);
240cc9cf 141 if (type == error_mark_node)
142 return error_mark_node;
5d9fd871 143 }
144
145 /* Designate it as a struct so that we can use aggregate initialization. */
146 CLASSTYPE_DECLARED_CLASS (type) = false;
147
148 /* Cross-reference the expression and the type. */
149 LAMBDA_EXPR_CLOSURE (lambda) = type;
150 CLASSTYPE_LAMBDA_EXPR (type) = lambda;
151
152 /* Clear base types. */
153 xref_basetypes (type, /*bases=*/NULL_TREE);
154
155 /* Start the class. */
156 type = begin_class_definition (type);
5d9fd871 157
158 return type;
159}
160
161/* Returns the type to use for the return type of the operator() of a
162 closure class. */
163
164tree
165lambda_return_type (tree expr)
166{
167 if (expr == NULL_TREE)
168 return void_type_node;
169 if (type_unknown_p (expr)
170 || BRACE_ENCLOSED_INITIALIZER_P (expr))
171 {
172 cxx_incomplete_type_error (expr, TREE_TYPE (expr));
173 return void_type_node;
174 }
175 gcc_checking_assert (!type_dependent_expression_p (expr));
176 return cv_unqualified (type_decays_to (unlowered_expr_type (expr)));
177}
178
179/* Given a LAMBDA_EXPR or closure type LAMBDA, return the op() of the
180 closure type. */
181
182tree
183lambda_function (tree lambda)
184{
185 tree type;
186 if (TREE_CODE (lambda) == LAMBDA_EXPR)
187 type = LAMBDA_EXPR_CLOSURE (lambda);
188 else
189 type = lambda;
190 gcc_assert (LAMBDA_TYPE_P (type));
191 /* Don't let debug_tree cause instantiation. */
192 if (CLASSTYPE_TEMPLATE_INSTANTIATION (type)
193 && !COMPLETE_OR_OPEN_TYPE_P (type))
194 return NULL_TREE;
195 lambda = lookup_member (type, ansi_opname (CALL_EXPR),
196 /*protect=*/0, /*want_type=*/false,
197 tf_warning_or_error);
198 if (lambda)
814b90ef 199 lambda = STRIP_TEMPLATE (get_first_fn (lambda));
5d9fd871 200 return lambda;
201}
202
203/* Returns the type to use for the FIELD_DECL corresponding to the
204 capture of EXPR.
205 The caller should add REFERENCE_TYPE for capture by reference. */
206
207tree
208lambda_capture_field_type (tree expr, bool explicit_init_p)
209{
210 tree type;
211 if (explicit_init_p)
212 {
213 type = make_auto ();
214 type = do_auto_deduction (type, expr, type);
215 }
216 else
217 type = non_reference (unlowered_expr_type (expr));
6dcf5c5f 218 if (!type || WILDCARD_TYPE_P (type) || type_uses_auto (type)
219 || DECL_PACK_P (expr))
5d9fd871 220 {
221 type = cxx_make_type (DECLTYPE_TYPE);
222 DECLTYPE_TYPE_EXPR (type) = expr;
223 DECLTYPE_FOR_LAMBDA_CAPTURE (type) = true;
224 DECLTYPE_FOR_INIT_CAPTURE (type) = explicit_init_p;
225 SET_TYPE_STRUCTURAL_EQUALITY (type);
226 }
227 return type;
228}
229
230/* Returns true iff DECL is a lambda capture proxy variable created by
231 build_capture_proxy. */
232
233bool
234is_capture_proxy (tree decl)
235{
236 return (VAR_P (decl)
237 && DECL_HAS_VALUE_EXPR_P (decl)
238 && !DECL_ANON_UNION_VAR_P (decl)
239 && LAMBDA_FUNCTION_P (DECL_CONTEXT (decl)));
240}
241
242/* Returns true iff DECL is a capture proxy for a normal capture
243 (i.e. without explicit initializer). */
244
245bool
246is_normal_capture_proxy (tree decl)
247{
248 if (!is_capture_proxy (decl))
249 /* It's not a capture proxy. */
250 return false;
251
252 /* It is a capture proxy, is it a normal capture? */
253 tree val = DECL_VALUE_EXPR (decl);
254 if (val == error_mark_node)
255 return true;
256
257 gcc_assert (TREE_CODE (val) == COMPONENT_REF);
258 val = TREE_OPERAND (val, 1);
259 return DECL_NORMAL_CAPTURE_P (val);
260}
261
262/* VAR is a capture proxy created by build_capture_proxy; add it to the
263 current function, which is the operator() for the appropriate lambda. */
264
265void
266insert_capture_proxy (tree var)
267{
268 cp_binding_level *b;
269 tree stmt_list;
270
271 /* Put the capture proxy in the extra body block so that it won't clash
272 with a later local variable. */
273 b = current_binding_level;
274 for (;;)
275 {
276 cp_binding_level *n = b->level_chain;
277 if (n->kind == sk_function_parms)
278 break;
279 b = n;
280 }
281 pushdecl_with_scope (var, b, false);
282
283 /* And put a DECL_EXPR in the STATEMENT_LIST for the same block. */
284 var = build_stmt (DECL_SOURCE_LOCATION (var), DECL_EXPR, var);
285 stmt_list = (*stmt_list_stack)[1];
286 gcc_assert (stmt_list);
287 append_to_statement_list_force (var, &stmt_list);
288}
289
290/* We've just finished processing a lambda; if the containing scope is also
291 a lambda, insert any capture proxies that were created while processing
292 the nested lambda. */
293
294void
295insert_pending_capture_proxies (void)
296{
297 tree lam;
298 vec<tree, va_gc> *proxies;
299 unsigned i;
300
301 if (!current_function_decl || !LAMBDA_FUNCTION_P (current_function_decl))
302 return;
303
304 lam = CLASSTYPE_LAMBDA_EXPR (DECL_CONTEXT (current_function_decl));
305 proxies = LAMBDA_EXPR_PENDING_PROXIES (lam);
306 for (i = 0; i < vec_safe_length (proxies); ++i)
307 {
308 tree var = (*proxies)[i];
309 insert_capture_proxy (var);
310 }
311 release_tree_vector (LAMBDA_EXPR_PENDING_PROXIES (lam));
312 LAMBDA_EXPR_PENDING_PROXIES (lam) = NULL;
313}
314
315/* Given REF, a COMPONENT_REF designating a field in the lambda closure,
316 return the type we want the proxy to have: the type of the field itself,
317 with added const-qualification if the lambda isn't mutable and the
318 capture is by value. */
319
320tree
321lambda_proxy_type (tree ref)
322{
323 tree type;
6dcf5c5f 324 if (ref == error_mark_node)
325 return error_mark_node;
5d9fd871 326 if (REFERENCE_REF_P (ref))
327 ref = TREE_OPERAND (ref, 0);
6dcf5c5f 328 gcc_assert (TREE_CODE (ref) == COMPONENT_REF);
5d9fd871 329 type = TREE_TYPE (ref);
6dcf5c5f 330 if (!type || WILDCARD_TYPE_P (non_reference (type)))
331 {
332 type = cxx_make_type (DECLTYPE_TYPE);
333 DECLTYPE_TYPE_EXPR (type) = ref;
334 DECLTYPE_FOR_LAMBDA_PROXY (type) = true;
335 SET_TYPE_STRUCTURAL_EQUALITY (type);
336 }
337 if (DECL_PACK_P (TREE_OPERAND (ref, 1)))
338 type = make_pack_expansion (type);
5d9fd871 339 return type;
340}
341
342/* MEMBER is a capture field in a lambda closure class. Now that we're
343 inside the operator(), build a placeholder var for future lookups and
344 debugging. */
345
346tree
347build_capture_proxy (tree member)
348{
349 tree var, object, fn, closure, name, lam, type;
350
6dcf5c5f 351 if (PACK_EXPANSION_P (member))
352 member = PACK_EXPANSION_PATTERN (member);
353
5d9fd871 354 closure = DECL_CONTEXT (member);
355 fn = lambda_function (closure);
356 lam = CLASSTYPE_LAMBDA_EXPR (closure);
357
358 /* The proxy variable forwards to the capture field. */
359 object = build_fold_indirect_ref (DECL_ARGUMENTS (fn));
360 object = finish_non_static_data_member (member, object, NULL_TREE);
361 if (REFERENCE_REF_P (object))
362 object = TREE_OPERAND (object, 0);
363
364 /* Remove the __ inserted by add_capture. */
365 if (DECL_NORMAL_CAPTURE_P (member))
366 name = get_identifier (IDENTIFIER_POINTER (DECL_NAME (member)) + 2);
367 else
368 name = DECL_NAME (member);
369
370 type = lambda_proxy_type (object);
371
372 if (DECL_VLA_CAPTURE_P (member))
373 {
374 /* Rebuild the VLA type from the pointer and maxindex. */
375 tree field = next_initializable_field (TYPE_FIELDS (type));
376 tree ptr = build_simple_component_ref (object, field);
377 field = next_initializable_field (DECL_CHAIN (field));
378 tree max = build_simple_component_ref (object, field);
379 type = build_array_type (TREE_TYPE (TREE_TYPE (ptr)),
380 build_index_type (max));
381 type = build_reference_type (type);
382 REFERENCE_VLA_OK (type) = true;
383 object = convert (type, ptr);
384 }
385
386 var = build_decl (input_location, VAR_DECL, name, type);
387 SET_DECL_VALUE_EXPR (var, object);
388 DECL_HAS_VALUE_EXPR_P (var) = 1;
389 DECL_ARTIFICIAL (var) = 1;
390 TREE_USED (var) = 1;
391 DECL_CONTEXT (var) = fn;
392
393 if (name == this_identifier)
394 {
395 gcc_assert (LAMBDA_EXPR_THIS_CAPTURE (lam) == member);
396 LAMBDA_EXPR_THIS_CAPTURE (lam) = var;
397 }
398
399 if (fn == current_function_decl)
400 insert_capture_proxy (var);
401 else
402 vec_safe_push (LAMBDA_EXPR_PENDING_PROXIES (lam), var);
403
404 return var;
405}
406
407/* Return a struct containing a pointer and a length for lambda capture of
408 an array of runtime length. */
409
410static tree
411vla_capture_type (tree array_type)
412{
413 static tree ptr_id, max_id;
414 tree type = xref_tag (record_type, make_anon_name (), ts_current, false);
415 xref_basetypes (type, NULL_TREE);
416 type = begin_class_definition (type);
417 if (!ptr_id)
418 {
419 ptr_id = get_identifier ("ptr");
420 max_id = get_identifier ("max");
421 }
422 tree ptrtype = build_pointer_type (TREE_TYPE (array_type));
423 tree field = build_decl (input_location, FIELD_DECL, ptr_id, ptrtype);
424 finish_member_declaration (field);
425 field = build_decl (input_location, FIELD_DECL, max_id, sizetype);
426 finish_member_declaration (field);
427 return finish_struct (type, NULL_TREE);
428}
429
430/* From an ID and INITIALIZER, create a capture (by reference if
431 BY_REFERENCE_P is true), add it to the capture-list for LAMBDA,
432 and return it. */
433
434tree
6dcf5c5f 435add_capture (tree lambda, tree id, tree orig_init, bool by_reference_p,
5d9fd871 436 bool explicit_init_p)
437{
438 char *buf;
439 tree type, member, name;
440 bool vla = false;
6dcf5c5f 441 bool variadic = false;
442 tree initializer = orig_init;
443
444 if (PACK_EXPANSION_P (initializer))
445 {
446 initializer = PACK_EXPANSION_PATTERN (initializer);
447 variadic = true;
448 }
5d9fd871 449
450 if (TREE_CODE (initializer) == TREE_LIST)
451 initializer = build_x_compound_expr_from_list (initializer, ELK_INIT,
452 tf_warning_or_error);
453 type = lambda_capture_field_type (initializer, explicit_init_p);
454 if (array_of_runtime_bound_p (type))
455 {
456 vla = true;
457 if (!by_reference_p)
458 error ("array of runtime bound cannot be captured by copy, "
459 "only by reference");
460
461 /* For a VLA, we capture the address of the first element and the
462 maximum index, and then reconstruct the VLA for the proxy. */
463 tree elt = cp_build_array_ref (input_location, initializer,
464 integer_zero_node, tf_warning_or_error);
465 initializer = build_constructor_va (init_list_type_node, 2,
466 NULL_TREE, build_address (elt),
467 NULL_TREE, array_type_nelts (type));
468 type = vla_capture_type (type);
469 }
470 else if (variably_modified_type_p (type, NULL_TREE))
471 {
472 error ("capture of variable-size type %qT that is not a C++1y array "
473 "of runtime bound", type);
474 if (TREE_CODE (type) == ARRAY_TYPE
475 && variably_modified_type_p (TREE_TYPE (type), NULL_TREE))
476 inform (input_location, "because the array element type %qT has "
477 "variable size", TREE_TYPE (type));
478 type = error_mark_node;
479 }
480 else if (by_reference_p)
481 {
482 type = build_reference_type (type);
483 if (!real_lvalue_p (initializer))
484 error ("cannot capture %qE by reference", initializer);
485 }
486 else
487 /* Capture by copy requires a complete type. */
488 type = complete_type (type);
489
490 /* Add __ to the beginning of the field name so that user code
491 won't find the field with name lookup. We can't just leave the name
492 unset because template instantiation uses the name to find
493 instantiated fields. */
494 if (!explicit_init_p)
495 {
496 buf = (char *) alloca (IDENTIFIER_LENGTH (id) + 3);
497 buf[1] = buf[0] = '_';
498 memcpy (buf + 2, IDENTIFIER_POINTER (id),
499 IDENTIFIER_LENGTH (id) + 1);
500 name = get_identifier (buf);
501 }
502 else
503 /* But captures with explicit initializers are named. */
504 name = id;
505
506 /* If TREE_TYPE isn't set, we're still in the introducer, so check
507 for duplicates. */
508 if (!LAMBDA_EXPR_CLOSURE (lambda))
509 {
510 if (IDENTIFIER_MARKED (name))
511 {
512 pedwarn (input_location, 0,
513 "already captured %qD in lambda expression", id);
514 return NULL_TREE;
515 }
516 IDENTIFIER_MARKED (name) = true;
517 }
518
6dcf5c5f 519 if (variadic)
520 type = make_pack_expansion (type);
521
5d9fd871 522 /* Make member variable. */
df623cbd 523 member = build_decl (input_location, FIELD_DECL, name, type);
5d9fd871 524 DECL_VLA_CAPTURE_P (member) = vla;
525
526 if (!explicit_init_p)
527 /* Normal captures are invisible to name lookup but uses are replaced
528 with references to the capture field; we implement this by only
529 really making them invisible in unevaluated context; see
530 qualify_lookup. For now, let's make explicitly initialized captures
531 always visible. */
532 DECL_NORMAL_CAPTURE_P (member) = true;
533
534 if (id == this_identifier)
535 LAMBDA_EXPR_THIS_CAPTURE (lambda) = member;
536
537 /* Add it to the appropriate closure class if we've started it. */
538 if (current_class_type
539 && current_class_type == LAMBDA_EXPR_CLOSURE (lambda))
540 finish_member_declaration (member);
541
6dcf5c5f 542 tree listmem = member;
543 if (variadic)
544 {
545 listmem = make_pack_expansion (member);
546 initializer = orig_init;
547 }
5d9fd871 548 LAMBDA_EXPR_CAPTURE_LIST (lambda)
6dcf5c5f 549 = tree_cons (listmem, initializer, LAMBDA_EXPR_CAPTURE_LIST (lambda));
5d9fd871 550
551 if (LAMBDA_EXPR_CLOSURE (lambda))
552 return build_capture_proxy (member);
553 /* For explicit captures we haven't started the function yet, so we wait
554 and build the proxy from cp_parser_lambda_body. */
555 return NULL_TREE;
556}
557
558/* Register all the capture members on the list CAPTURES, which is the
559 LAMBDA_EXPR_CAPTURE_LIST for the lambda after the introducer. */
560
561void
562register_capture_members (tree captures)
563{
564 if (captures == NULL_TREE)
565 return;
566
567 register_capture_members (TREE_CHAIN (captures));
6dcf5c5f 568
569 tree field = TREE_PURPOSE (captures);
570 if (PACK_EXPANSION_P (field))
571 field = PACK_EXPANSION_PATTERN (field);
572
5d9fd871 573 /* We set this in add_capture to avoid duplicates. */
6dcf5c5f 574 IDENTIFIER_MARKED (DECL_NAME (field)) = false;
575 finish_member_declaration (field);
5d9fd871 576}
577
578/* Similar to add_capture, except this works on a stack of nested lambdas.
579 BY_REFERENCE_P in this case is derived from the default capture mode.
580 Returns the capture for the lambda at the bottom of the stack. */
581
582tree
583add_default_capture (tree lambda_stack, tree id, tree initializer)
584{
585 bool this_capture_p = (id == this_identifier);
586
587 tree var = NULL_TREE;
588
589 tree saved_class_type = current_class_type;
590
591 tree node;
592
593 for (node = lambda_stack;
594 node;
595 node = TREE_CHAIN (node))
596 {
597 tree lambda = TREE_VALUE (node);
598
599 current_class_type = LAMBDA_EXPR_CLOSURE (lambda);
6dcf5c5f 600 if (DECL_PACK_P (initializer))
601 initializer = make_pack_expansion (initializer);
5d9fd871 602 var = add_capture (lambda,
603 id,
604 initializer,
605 /*by_reference_p=*/
606 (!this_capture_p
607 && (LAMBDA_EXPR_DEFAULT_CAPTURE_MODE (lambda)
608 == CPLD_REFERENCE)),
609 /*explicit_init_p=*/false);
610 initializer = convert_from_reference (var);
611 }
612
613 current_class_type = saved_class_type;
614
615 return var;
616}
617
618/* Return the capture pertaining to a use of 'this' in LAMBDA, in the form of an
619 INDIRECT_REF, possibly adding it through default capturing. */
620
621tree
622lambda_expr_this_capture (tree lambda)
623{
624 tree result;
625
626 tree this_capture = LAMBDA_EXPR_THIS_CAPTURE (lambda);
627
628 /* In unevaluated context this isn't an odr-use, so just return the
629 nearest 'this'. */
630 if (cp_unevaluated_operand)
f263d53c 631 {
632 /* In an NSDMI the fake 'this' pointer that we're using for
633 parsing is in scope_chain. */
634 if (LAMBDA_EXPR_EXTRA_SCOPE (lambda)
635 && TREE_CODE (LAMBDA_EXPR_EXTRA_SCOPE (lambda)) == FIELD_DECL)
636 return scope_chain->x_current_class_ptr;
637 return lookup_name (this_identifier);
638 }
5d9fd871 639
640 /* Try to default capture 'this' if we can. */
641 if (!this_capture
642 && LAMBDA_EXPR_DEFAULT_CAPTURE_MODE (lambda) != CPLD_NONE)
643 {
644 tree lambda_stack = NULL_TREE;
645 tree init = NULL_TREE;
646
647 /* If we are in a lambda function, we can move out until we hit:
648 1. a non-lambda function or NSDMI,
649 2. a lambda function capturing 'this', or
650 3. a non-default capturing lambda function. */
651 for (tree tlambda = lambda; ;)
652 {
653 lambda_stack = tree_cons (NULL_TREE,
654 tlambda,
655 lambda_stack);
656
657 if (LAMBDA_EXPR_EXTRA_SCOPE (tlambda)
658 && TREE_CODE (LAMBDA_EXPR_EXTRA_SCOPE (tlambda)) == FIELD_DECL)
659 {
660 /* In an NSDMI, we don't have a function to look up the decl in,
661 but the fake 'this' pointer that we're using for parsing is
662 in scope_chain. */
663 init = scope_chain->x_current_class_ptr;
664 gcc_checking_assert
665 (init && (TREE_TYPE (TREE_TYPE (init))
666 == current_nonlambda_class_type ()));
667 break;
668 }
669
670 tree closure_decl = TYPE_NAME (LAMBDA_EXPR_CLOSURE (tlambda));
671 tree containing_function = decl_function_context (closure_decl);
672
673 if (containing_function == NULL_TREE)
674 /* We ran out of scopes; there's no 'this' to capture. */
675 break;
676
677 if (!LAMBDA_FUNCTION_P (containing_function))
678 {
679 /* We found a non-lambda function. */
680 if (DECL_NONSTATIC_MEMBER_FUNCTION_P (containing_function))
681 /* First parameter is 'this'. */
682 init = DECL_ARGUMENTS (containing_function);
683 break;
684 }
685
686 tlambda
687 = CLASSTYPE_LAMBDA_EXPR (DECL_CONTEXT (containing_function));
688
689 if (LAMBDA_EXPR_THIS_CAPTURE (tlambda))
690 {
691 /* An outer lambda has already captured 'this'. */
692 init = LAMBDA_EXPR_THIS_CAPTURE (tlambda);
693 break;
694 }
695
696 if (LAMBDA_EXPR_DEFAULT_CAPTURE_MODE (tlambda) == CPLD_NONE)
697 /* An outer lambda won't let us capture 'this'. */
698 break;
699 }
700
701 if (init)
702 this_capture = add_default_capture (lambda_stack,
703 /*id=*/this_identifier,
704 init);
705 }
706
707 if (!this_capture)
708 {
709 error ("%<this%> was not captured for this lambda function");
710 result = error_mark_node;
711 }
712 else
713 {
714 /* To make sure that current_class_ref is for the lambda. */
715 gcc_assert (TYPE_MAIN_VARIANT (TREE_TYPE (current_class_ref))
716 == LAMBDA_EXPR_CLOSURE (lambda));
717
718 result = this_capture;
719
720 /* If 'this' is captured, each use of 'this' is transformed into an
721 access to the corresponding unnamed data member of the closure
722 type cast (_expr.cast_ 5.4) to the type of 'this'. [ The cast
723 ensures that the transformed expression is an rvalue. ] */
724 result = rvalue (result);
725 }
726
727 return result;
728}
729
730/* We don't want to capture 'this' until we know we need it, i.e. after
731 overload resolution has chosen a non-static member function. At that
732 point we call this function to turn a dummy object into a use of the
733 'this' capture. */
734
735tree
736maybe_resolve_dummy (tree object)
737{
738 if (!is_dummy_object (object))
739 return object;
740
741 tree type = TYPE_MAIN_VARIANT (TREE_TYPE (object));
742 gcc_assert (!TYPE_PTR_P (type));
743
744 if (type != current_class_type
745 && current_class_type
746 && LAMBDA_TYPE_P (current_class_type)
747 && DERIVED_FROM_P (type, current_nonlambda_class_type ()))
748 {
749 /* In a lambda, need to go through 'this' capture. */
750 tree lam = CLASSTYPE_LAMBDA_EXPR (current_class_type);
751 tree cap = lambda_expr_this_capture (lam);
752 object = build_x_indirect_ref (EXPR_LOCATION (object), cap,
753 RO_NULL, tf_warning_or_error);
754 }
755
756 return object;
757}
758
759/* Returns the method basetype of the innermost non-lambda function, or
760 NULL_TREE if none. */
761
762tree
763nonlambda_method_basetype (void)
764{
765 tree fn, type;
766 if (!current_class_ref)
767 return NULL_TREE;
768
769 type = current_class_type;
770 if (!LAMBDA_TYPE_P (type))
771 return type;
772
773 /* Find the nearest enclosing non-lambda function. */
774 fn = TYPE_NAME (type);
775 do
776 fn = decl_function_context (fn);
777 while (fn && LAMBDA_FUNCTION_P (fn));
778
779 if (!fn || !DECL_NONSTATIC_MEMBER_FUNCTION_P (fn))
780 return NULL_TREE;
781
782 return TYPE_METHOD_BASETYPE (TREE_TYPE (fn));
783}
784
814b90ef 785/* Helper function for maybe_add_lambda_conv_op; build a CALL_EXPR with
786 indicated FN and NARGS, but do not initialize the return type or any of the
787 argument slots. */
788
789static tree
790prepare_op_call (tree fn, int nargs)
791{
792 tree t;
793
794 t = build_vl_exp (CALL_EXPR, nargs + 3);
795 CALL_EXPR_FN (t) = fn;
796 CALL_EXPR_STATIC_CHAIN (t) = NULL;
797
798 return t;
799}
800
5d9fd871 801/* If the closure TYPE has a static op(), also add a conversion to function
802 pointer. */
803
804void
805maybe_add_lambda_conv_op (tree type)
806{
807 bool nested = (current_function_decl != NULL_TREE);
808 tree callop = lambda_function (type);
5d9fd871 809
810 if (LAMBDA_EXPR_CAPTURE_LIST (CLASSTYPE_LAMBDA_EXPR (type)) != NULL_TREE)
811 return;
812
813 if (processing_template_decl)
814 return;
815
814b90ef 816 bool const generic_lambda_p
817 = (DECL_TEMPLATE_INFO (callop)
818 && DECL_TEMPLATE_RESULT (DECL_TI_TEMPLATE (callop)) == callop);
819
4c0924ef 820 if (!generic_lambda_p && DECL_INITIAL (callop) == NULL_TREE)
5d9fd871 821 {
822 /* If the op() wasn't instantiated due to errors, give up. */
823 gcc_assert (errorcount || sorrycount);
824 return;
825 }
826
814b90ef 827 /* Non-template conversion operators are defined directly with build_call_a
828 and using DIRECT_ARGVEC for arguments (including 'this'). Templates are
829 deferred and the CALL is built in-place. In the case of a deduced return
830 call op, the decltype expression, DECLTYPE_CALL, used as a substitute for
831 the return type is also built in-place. The arguments of DECLTYPE_CALL in
832 the return expression may differ in flags from those in the body CALL. In
833 particular, parameter pack expansions are marked PACK_EXPANSION_LOCAL_P in
834 the body CALL, but not in DECLTYPE_CALL. */
835
836 vec<tree, va_gc> *direct_argvec = 0;
837 tree decltype_call = 0, call = 0;
838 tree fn_result = TREE_TYPE (TREE_TYPE (callop));
839
840 if (generic_lambda_p)
841 {
842 /* Prepare the dependent member call for the static member function
843 '_FUN' and, potentially, prepare another call to be used in a decltype
844 return expression for a deduced return call op to allow for simple
845 implementation of the conversion operator. */
846
847 tree instance = build_nop (type, null_pointer_node);
848 tree objfn = build_min (COMPONENT_REF, NULL_TREE,
849 instance, DECL_NAME (callop), NULL_TREE);
850 int nargs = list_length (DECL_ARGUMENTS (callop)) - 1;
851
852 call = prepare_op_call (objfn, nargs);
853 if (type_uses_auto (fn_result))
854 decltype_call = prepare_op_call (objfn, nargs);
855 }
856 else
857 {
858 direct_argvec = make_tree_vector ();
859 direct_argvec->quick_push (build1 (NOP_EXPR,
860 TREE_TYPE (DECL_ARGUMENTS (callop)),
861 null_pointer_node));
862 }
863
864 /* Copy CALLOP's argument list (as per 'copy_list') as FN_ARGS in order to
865 declare the static member function "_FUN" below. For each arg append to
866 DIRECT_ARGVEC (for the non-template case) or populate the pre-allocated
867 call args (for the template case). If a parameter pack is found, expand
868 it, flagging it as PACK_EXPANSION_LOCAL_P for the body call. */
869
870 tree fn_args = NULL_TREE;
871 {
872 int ix = 0;
873 tree src = DECL_CHAIN (DECL_ARGUMENTS (callop));
874 tree tgt;
875
876 while (src)
877 {
878 tree new_node = copy_node (src);
879
880 if (!fn_args)
881 fn_args = tgt = new_node;
882 else
883 {
884 TREE_CHAIN (tgt) = new_node;
885 tgt = new_node;
886 }
887
888 mark_exp_read (tgt);
889
890 if (generic_lambda_p)
891 {
892 if (DECL_PACK_P (tgt))
893 {
894 tree a = make_pack_expansion (tgt);
895 if (decltype_call)
896 CALL_EXPR_ARG (decltype_call, ix) = copy_node (a);
897 PACK_EXPANSION_LOCAL_P (a) = true;
898 CALL_EXPR_ARG (call, ix) = a;
899 }
900 else
901 {
902 tree a = convert_from_reference (tgt);
903 CALL_EXPR_ARG (call, ix) = a;
904 if (decltype_call)
905 CALL_EXPR_ARG (decltype_call, ix) = copy_node (a);
906 }
907 ++ix;
908 }
909 else
910 vec_safe_push (direct_argvec, tgt);
911
912 src = TREE_CHAIN (src);
913 }
914 }
915
916
917 if (generic_lambda_p)
918 {
919 if (decltype_call)
920 {
921 ++processing_template_decl;
922 fn_result = finish_decltype_type
923 (decltype_call, /*id_expression_or_member_access_p=*/false,
924 tf_warning_or_error);
925 --processing_template_decl;
926 }
927 }
928 else
929 call = build_call_a (callop,
930 direct_argvec->length (),
931 direct_argvec->address ());
932
933 CALL_FROM_THUNK_P (call) = 1;
934
935 tree stattype = build_function_type (fn_result, FUNCTION_ARG_CHAIN (callop));
5d9fd871 936
937 /* First build up the conversion op. */
938
814b90ef 939 tree rettype = build_pointer_type (stattype);
940 tree name = mangle_conv_op_name_for_type (rettype);
941 tree thistype = cp_build_qualified_type (type, TYPE_QUAL_CONST);
942 tree fntype = build_method_type_directly (thistype, rettype, void_list_node);
943 tree convfn = build_lang_decl (FUNCTION_DECL, name, fntype);
944 tree fn = convfn;
5d9fd871 945 DECL_SOURCE_LOCATION (fn) = DECL_SOURCE_LOCATION (callop);
946
947 if (TARGET_PTRMEMFUNC_VBIT_LOCATION == ptrmemfunc_vbit_in_pfn
948 && DECL_ALIGN (fn) < 2 * BITS_PER_UNIT)
949 DECL_ALIGN (fn) = 2 * BITS_PER_UNIT;
950
951 SET_OVERLOADED_OPERATOR_CODE (fn, TYPE_EXPR);
952 grokclassfn (type, fn, NO_SPECIAL);
953 set_linkage_according_to_type (type, fn);
954 rest_of_decl_compilation (fn, toplevel_bindings_p (), at_eof);
955 DECL_IN_AGGR_P (fn) = 1;
956 DECL_ARTIFICIAL (fn) = 1;
957 DECL_NOT_REALLY_EXTERN (fn) = 1;
958 DECL_DECLARED_INLINE_P (fn) = 1;
959 DECL_ARGUMENTS (fn) = build_this_parm (fntype, TYPE_QUAL_CONST);
960 if (nested)
961 DECL_INTERFACE_KNOWN (fn) = 1;
962
814b90ef 963 if (generic_lambda_p)
964 fn = add_inherited_template_parms (fn, DECL_TI_TEMPLATE (callop));
965
5d9fd871 966 add_method (type, fn, NULL_TREE);
967
968 /* Generic thunk code fails for varargs; we'll complain in mark_used if
969 the conversion op is used. */
970 if (varargs_function_p (callop))
971 {
972 DECL_DELETED_FN (fn) = 1;
973 return;
974 }
975
976 /* Now build up the thunk to be returned. */
977
978 name = get_identifier ("_FUN");
814b90ef 979 tree statfn = build_lang_decl (FUNCTION_DECL, name, stattype);
980 fn = statfn;
5d9fd871 981 DECL_SOURCE_LOCATION (fn) = DECL_SOURCE_LOCATION (callop);
982 if (TARGET_PTRMEMFUNC_VBIT_LOCATION == ptrmemfunc_vbit_in_pfn
983 && DECL_ALIGN (fn) < 2 * BITS_PER_UNIT)
984 DECL_ALIGN (fn) = 2 * BITS_PER_UNIT;
985 grokclassfn (type, fn, NO_SPECIAL);
986 set_linkage_according_to_type (type, fn);
987 rest_of_decl_compilation (fn, toplevel_bindings_p (), at_eof);
988 DECL_IN_AGGR_P (fn) = 1;
989 DECL_ARTIFICIAL (fn) = 1;
990 DECL_NOT_REALLY_EXTERN (fn) = 1;
991 DECL_DECLARED_INLINE_P (fn) = 1;
992 DECL_STATIC_FUNCTION_P (fn) = 1;
814b90ef 993 DECL_ARGUMENTS (fn) = fn_args;
994 for (tree arg = fn_args; arg; arg = DECL_CHAIN (arg))
5d9fd871 995 {
996 /* Avoid duplicate -Wshadow warnings. */
997 DECL_NAME (arg) = NULL_TREE;
998 DECL_CONTEXT (arg) = fn;
999 }
1000 if (nested)
1001 DECL_INTERFACE_KNOWN (fn) = 1;
1002
814b90ef 1003 if (generic_lambda_p)
1004 fn = add_inherited_template_parms (fn, DECL_TI_TEMPLATE (callop));
1005
5d9fd871 1006 add_method (type, fn, NULL_TREE);
1007
1008 if (nested)
1009 push_function_context ();
1010 else
1011 /* Still increment function_depth so that we don't GC in the
1012 middle of an expression. */
1013 ++function_depth;
1014
1015 /* Generate the body of the thunk. */
1016
1017 start_preparsed_function (statfn, NULL_TREE,
1018 SF_PRE_PARSED | SF_INCLASS_INLINE);
1019 if (DECL_ONE_ONLY (statfn))
1020 {
1021 /* Put the thunk in the same comdat group as the call op. */
1022 symtab_add_to_same_comdat_group
02774f2d 1023 (cgraph_get_create_node (statfn),
1024 cgraph_get_create_node (callop));
5d9fd871 1025 }
814b90ef 1026 tree body = begin_function_body ();
1027 tree compound_stmt = begin_compound_stmt (0);
1028 if (!generic_lambda_p)
9f10a108 1029 {
814b90ef 1030 set_flags_from_callee (call);
1031 if (MAYBE_CLASS_TYPE_P (TREE_TYPE (call)))
1032 call = build_cplus_new (TREE_TYPE (call), call, tf_warning_or_error);
9f10a108 1033 }
5d9fd871 1034 call = convert_from_reference (call);
1035 finish_return_stmt (call);
1036
1037 finish_compound_stmt (compound_stmt);
1038 finish_function_body (body);
1039
814b90ef 1040 fn = finish_function (/*inline*/2);
1041 if (!generic_lambda_p)
1042 expand_or_defer_fn (fn);
5d9fd871 1043
1044 /* Generate the body of the conversion op. */
1045
1046 start_preparsed_function (convfn, NULL_TREE,
1047 SF_PRE_PARSED | SF_INCLASS_INLINE);
1048 body = begin_function_body ();
1049 compound_stmt = begin_compound_stmt (0);
1050
1051 /* decl_needed_p needs to see that it's used. */
1052 TREE_USED (statfn) = 1;
1053 finish_return_stmt (decay_conversion (statfn, tf_warning_or_error));
1054
1055 finish_compound_stmt (compound_stmt);
1056 finish_function_body (body);
1057
814b90ef 1058 fn = finish_function (/*inline*/2);
1059 if (!generic_lambda_p)
1060 expand_or_defer_fn (fn);
5d9fd871 1061
1062 if (nested)
1063 pop_function_context ();
1064 else
1065 --function_depth;
1066}
1067
1068/* Returns true iff VAL is a lambda-related declaration which should
1069 be ignored by unqualified lookup. */
1070
1071bool
1072is_lambda_ignored_entity (tree val)
1073{
1074 /* In unevaluated context, look past normal capture proxies. */
1075 if (cp_unevaluated_operand && is_normal_capture_proxy (val))
1076 return true;
1077
1078 /* Always ignore lambda fields, their names are only for debugging. */
1079 if (TREE_CODE (val) == FIELD_DECL
1080 && CLASSTYPE_LAMBDA_EXPR (DECL_CONTEXT (val)))
1081 return true;
1082
1083 /* None of the lookups that use qualify_lookup want the op() from the
1084 lambda; they want the one from the enclosing class. */
1085 if (TREE_CODE (val) == FUNCTION_DECL && LAMBDA_FUNCTION_P (val))
1086 return true;
1087
1088 return false;
1089}