]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/cp/lambda.c
2015-06-04 Andrew MacLeod <amacleod@redhat.com>
[thirdparty/gcc.git] / gcc / cp / lambda.c
CommitLineData
5d9fd871 1/* Perform the semantic phase of lambda parsing, i.e., the process of
2 building tree structure, checking semantic consistency, and
3 building RTL. These routines are used both during actual parsing
4 and during the instantiation of template functions.
5
d353bf18 6 Copyright (C) 1998-2015 Free Software Foundation, Inc.
5d9fd871 7
8 This file is part of GCC.
9
10 GCC is free software; you can redistribute it and/or modify it
11 under the terms of the GNU General Public License as published by
12 the Free Software Foundation; either version 3, or (at your option)
13 any later version.
14
15 GCC is distributed in the hope that it will be useful, but
16 WITHOUT ANY WARRANTY; without even the implied warranty of
17 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
18 General Public License for more details.
19
20You should have received a copy of the GNU General Public License
21along with GCC; see the file COPYING3. If not see
22<http://www.gnu.org/licenses/>. */
23
24#include "config.h"
25#include "system.h"
26#include "coretypes.h"
b20a8bb4 27#include "hash-set.h"
b20a8bb4 28#include "vec.h"
b20a8bb4 29#include "input.h"
30#include "alias.h"
31#include "symtab.h"
32#include "options.h"
b20a8bb4 33#include "inchash.h"
5d9fd871 34#include "tree.h"
9ed99284 35#include "stringpool.h"
1140c305 36#include "hash-map.h"
37#include "is-a.h"
38#include "plugin-api.h"
1140c305 39#include "tm.h"
40#include "hard-reg-set.h"
41#include "input.h"
42#include "function.h"
43#include "ipa-ref.h"
5d9fd871 44#include "cgraph.h"
45#include "tree-iterator.h"
46#include "cp-tree.h"
47#include "toplev.h"
5d9fd871 48
49/* Constructor for a lambda expression. */
50
51tree
52build_lambda_expr (void)
53{
54 tree lambda = make_node (LAMBDA_EXPR);
55 LAMBDA_EXPR_DEFAULT_CAPTURE_MODE (lambda) = CPLD_NONE;
56 LAMBDA_EXPR_CAPTURE_LIST (lambda) = NULL_TREE;
57 LAMBDA_EXPR_THIS_CAPTURE (lambda) = NULL_TREE;
58 LAMBDA_EXPR_PENDING_PROXIES (lambda) = NULL;
59 LAMBDA_EXPR_RETURN_TYPE (lambda) = NULL_TREE;
60 LAMBDA_EXPR_MUTABLE_P (lambda) = false;
61 return lambda;
62}
63
64/* Create the closure object for a LAMBDA_EXPR. */
65
66tree
67build_lambda_object (tree lambda_expr)
68{
69 /* Build aggregate constructor call.
70 - cp_parser_braced_list
71 - cp_parser_functional_cast */
72 vec<constructor_elt, va_gc> *elts = NULL;
73 tree node, expr, type;
74 location_t saved_loc;
75
76 if (processing_template_decl)
77 return lambda_expr;
78
79 /* Make sure any error messages refer to the lambda-introducer. */
80 saved_loc = input_location;
81 input_location = LAMBDA_EXPR_LOCATION (lambda_expr);
82
83 for (node = LAMBDA_EXPR_CAPTURE_LIST (lambda_expr);
84 node;
85 node = TREE_CHAIN (node))
86 {
87 tree field = TREE_PURPOSE (node);
88 tree val = TREE_VALUE (node);
89
90 if (field == error_mark_node)
91 {
92 expr = error_mark_node;
93 goto out;
94 }
95
96 if (DECL_P (val))
97 mark_used (val);
98
99 /* Mere mortals can't copy arrays with aggregate initialization, so
100 do some magic to make it work here. */
101 if (TREE_CODE (TREE_TYPE (field)) == ARRAY_TYPE)
102 val = build_array_copy (val);
103 else if (DECL_NORMAL_CAPTURE_P (field)
104 && !DECL_VLA_CAPTURE_P (field)
105 && TREE_CODE (TREE_TYPE (field)) != REFERENCE_TYPE)
106 {
107 /* "the entities that are captured by copy are used to
108 direct-initialize each corresponding non-static data
109 member of the resulting closure object."
110
111 There's normally no way to express direct-initialization
112 from an element of a CONSTRUCTOR, so we build up a special
113 TARGET_EXPR to bypass the usual copy-initialization. */
114 val = force_rvalue (val, tf_warning_or_error);
115 if (TREE_CODE (val) == TARGET_EXPR)
116 TARGET_EXPR_DIRECT_INIT_P (val) = true;
117 }
118
119 CONSTRUCTOR_APPEND_ELT (elts, DECL_NAME (field), val);
120 }
121
122 expr = build_constructor (init_list_type_node, elts);
123 CONSTRUCTOR_IS_DIRECT_INIT (expr) = 1;
124
125 /* N2927: "[The closure] class type is not an aggregate."
126 But we briefly treat it as an aggregate to make this simpler. */
127 type = LAMBDA_EXPR_CLOSURE (lambda_expr);
128 CLASSTYPE_NON_AGGREGATE (type) = 0;
129 expr = finish_compound_literal (type, expr, tf_warning_or_error);
130 CLASSTYPE_NON_AGGREGATE (type) = 1;
131
132 out:
133 input_location = saved_loc;
134 return expr;
135}
136
137/* Return an initialized RECORD_TYPE for LAMBDA.
138 LAMBDA must have its explicit captures already. */
139
140tree
141begin_lambda_type (tree lambda)
142{
143 tree type;
144
145 {
146 /* Unique name. This is just like an unnamed class, but we cannot use
147 make_anon_name because of certain checks against TYPE_ANONYMOUS_P. */
148 tree name;
149 name = make_lambda_name ();
150
151 /* Create the new RECORD_TYPE for this lambda. */
152 type = xref_tag (/*tag_code=*/record_type,
153 name,
154 /*scope=*/ts_lambda,
155 /*template_header_p=*/false);
240cc9cf 156 if (type == error_mark_node)
157 return error_mark_node;
5d9fd871 158 }
159
160 /* Designate it as a struct so that we can use aggregate initialization. */
161 CLASSTYPE_DECLARED_CLASS (type) = false;
162
163 /* Cross-reference the expression and the type. */
164 LAMBDA_EXPR_CLOSURE (lambda) = type;
165 CLASSTYPE_LAMBDA_EXPR (type) = lambda;
166
167 /* Clear base types. */
168 xref_basetypes (type, /*bases=*/NULL_TREE);
169
170 /* Start the class. */
171 type = begin_class_definition (type);
5d9fd871 172
173 return type;
174}
175
176/* Returns the type to use for the return type of the operator() of a
177 closure class. */
178
179tree
180lambda_return_type (tree expr)
181{
182 if (expr == NULL_TREE)
183 return void_type_node;
184 if (type_unknown_p (expr)
185 || BRACE_ENCLOSED_INITIALIZER_P (expr))
186 {
187 cxx_incomplete_type_error (expr, TREE_TYPE (expr));
188 return void_type_node;
189 }
190 gcc_checking_assert (!type_dependent_expression_p (expr));
191 return cv_unqualified (type_decays_to (unlowered_expr_type (expr)));
192}
193
194/* Given a LAMBDA_EXPR or closure type LAMBDA, return the op() of the
195 closure type. */
196
197tree
198lambda_function (tree lambda)
199{
200 tree type;
201 if (TREE_CODE (lambda) == LAMBDA_EXPR)
202 type = LAMBDA_EXPR_CLOSURE (lambda);
203 else
204 type = lambda;
205 gcc_assert (LAMBDA_TYPE_P (type));
206 /* Don't let debug_tree cause instantiation. */
207 if (CLASSTYPE_TEMPLATE_INSTANTIATION (type)
208 && !COMPLETE_OR_OPEN_TYPE_P (type))
209 return NULL_TREE;
210 lambda = lookup_member (type, ansi_opname (CALL_EXPR),
211 /*protect=*/0, /*want_type=*/false,
212 tf_warning_or_error);
213 if (lambda)
814b90ef 214 lambda = STRIP_TEMPLATE (get_first_fn (lambda));
5d9fd871 215 return lambda;
216}
217
218/* Returns the type to use for the FIELD_DECL corresponding to the
219 capture of EXPR.
220 The caller should add REFERENCE_TYPE for capture by reference. */
221
222tree
223lambda_capture_field_type (tree expr, bool explicit_init_p)
224{
225 tree type;
226 if (explicit_init_p)
227 {
228 type = make_auto ();
229 type = do_auto_deduction (type, expr, type);
230 }
231 else
232 type = non_reference (unlowered_expr_type (expr));
384b0418 233 if (type_dependent_expression_p (expr)
234 && !is_this_parameter (tree_strip_nop_conversions (expr)))
5d9fd871 235 {
236 type = cxx_make_type (DECLTYPE_TYPE);
237 DECLTYPE_TYPE_EXPR (type) = expr;
238 DECLTYPE_FOR_LAMBDA_CAPTURE (type) = true;
239 DECLTYPE_FOR_INIT_CAPTURE (type) = explicit_init_p;
240 SET_TYPE_STRUCTURAL_EQUALITY (type);
241 }
242 return type;
243}
244
245/* Returns true iff DECL is a lambda capture proxy variable created by
246 build_capture_proxy. */
247
248bool
249is_capture_proxy (tree decl)
250{
251 return (VAR_P (decl)
252 && DECL_HAS_VALUE_EXPR_P (decl)
253 && !DECL_ANON_UNION_VAR_P (decl)
254 && LAMBDA_FUNCTION_P (DECL_CONTEXT (decl)));
255}
256
257/* Returns true iff DECL is a capture proxy for a normal capture
258 (i.e. without explicit initializer). */
259
260bool
261is_normal_capture_proxy (tree decl)
262{
263 if (!is_capture_proxy (decl))
264 /* It's not a capture proxy. */
265 return false;
266
b9c9af4c 267 if (variably_modified_type_p (TREE_TYPE (decl), NULL_TREE))
268 /* VLA capture. */
269 return true;
270
5d9fd871 271 /* It is a capture proxy, is it a normal capture? */
272 tree val = DECL_VALUE_EXPR (decl);
273 if (val == error_mark_node)
274 return true;
275
276 gcc_assert (TREE_CODE (val) == COMPONENT_REF);
277 val = TREE_OPERAND (val, 1);
278 return DECL_NORMAL_CAPTURE_P (val);
279}
280
281/* VAR is a capture proxy created by build_capture_proxy; add it to the
282 current function, which is the operator() for the appropriate lambda. */
283
284void
285insert_capture_proxy (tree var)
286{
287 cp_binding_level *b;
288 tree stmt_list;
289
290 /* Put the capture proxy in the extra body block so that it won't clash
291 with a later local variable. */
292 b = current_binding_level;
293 for (;;)
294 {
295 cp_binding_level *n = b->level_chain;
296 if (n->kind == sk_function_parms)
297 break;
298 b = n;
299 }
300 pushdecl_with_scope (var, b, false);
301
302 /* And put a DECL_EXPR in the STATEMENT_LIST for the same block. */
303 var = build_stmt (DECL_SOURCE_LOCATION (var), DECL_EXPR, var);
304 stmt_list = (*stmt_list_stack)[1];
305 gcc_assert (stmt_list);
306 append_to_statement_list_force (var, &stmt_list);
307}
308
309/* We've just finished processing a lambda; if the containing scope is also
310 a lambda, insert any capture proxies that were created while processing
311 the nested lambda. */
312
313void
314insert_pending_capture_proxies (void)
315{
316 tree lam;
317 vec<tree, va_gc> *proxies;
318 unsigned i;
319
320 if (!current_function_decl || !LAMBDA_FUNCTION_P (current_function_decl))
321 return;
322
323 lam = CLASSTYPE_LAMBDA_EXPR (DECL_CONTEXT (current_function_decl));
324 proxies = LAMBDA_EXPR_PENDING_PROXIES (lam);
325 for (i = 0; i < vec_safe_length (proxies); ++i)
326 {
327 tree var = (*proxies)[i];
328 insert_capture_proxy (var);
329 }
330 release_tree_vector (LAMBDA_EXPR_PENDING_PROXIES (lam));
331 LAMBDA_EXPR_PENDING_PROXIES (lam) = NULL;
332}
333
334/* Given REF, a COMPONENT_REF designating a field in the lambda closure,
335 return the type we want the proxy to have: the type of the field itself,
336 with added const-qualification if the lambda isn't mutable and the
337 capture is by value. */
338
339tree
340lambda_proxy_type (tree ref)
341{
342 tree type;
6dcf5c5f 343 if (ref == error_mark_node)
344 return error_mark_node;
5d9fd871 345 if (REFERENCE_REF_P (ref))
346 ref = TREE_OPERAND (ref, 0);
6dcf5c5f 347 gcc_assert (TREE_CODE (ref) == COMPONENT_REF);
5d9fd871 348 type = TREE_TYPE (ref);
6dcf5c5f 349 if (!type || WILDCARD_TYPE_P (non_reference (type)))
350 {
351 type = cxx_make_type (DECLTYPE_TYPE);
352 DECLTYPE_TYPE_EXPR (type) = ref;
353 DECLTYPE_FOR_LAMBDA_PROXY (type) = true;
354 SET_TYPE_STRUCTURAL_EQUALITY (type);
355 }
356 if (DECL_PACK_P (TREE_OPERAND (ref, 1)))
357 type = make_pack_expansion (type);
5d9fd871 358 return type;
359}
360
361/* MEMBER is a capture field in a lambda closure class. Now that we're
362 inside the operator(), build a placeholder var for future lookups and
363 debugging. */
364
365tree
366build_capture_proxy (tree member)
367{
368 tree var, object, fn, closure, name, lam, type;
369
6dcf5c5f 370 if (PACK_EXPANSION_P (member))
371 member = PACK_EXPANSION_PATTERN (member);
372
5d9fd871 373 closure = DECL_CONTEXT (member);
374 fn = lambda_function (closure);
375 lam = CLASSTYPE_LAMBDA_EXPR (closure);
376
377 /* The proxy variable forwards to the capture field. */
378 object = build_fold_indirect_ref (DECL_ARGUMENTS (fn));
379 object = finish_non_static_data_member (member, object, NULL_TREE);
380 if (REFERENCE_REF_P (object))
381 object = TREE_OPERAND (object, 0);
382
383 /* Remove the __ inserted by add_capture. */
5402533b 384 name = get_identifier (IDENTIFIER_POINTER (DECL_NAME (member)) + 2);
5d9fd871 385
386 type = lambda_proxy_type (object);
387
388 if (DECL_VLA_CAPTURE_P (member))
389 {
390 /* Rebuild the VLA type from the pointer and maxindex. */
391 tree field = next_initializable_field (TYPE_FIELDS (type));
392 tree ptr = build_simple_component_ref (object, field);
393 field = next_initializable_field (DECL_CHAIN (field));
394 tree max = build_simple_component_ref (object, field);
b46a48d6 395 type = build_cplus_array_type (TREE_TYPE (TREE_TYPE (ptr)),
396 build_index_type (max));
5d9fd871 397 type = build_reference_type (type);
398 REFERENCE_VLA_OK (type) = true;
399 object = convert (type, ptr);
400 }
401
402 var = build_decl (input_location, VAR_DECL, name, type);
403 SET_DECL_VALUE_EXPR (var, object);
404 DECL_HAS_VALUE_EXPR_P (var) = 1;
405 DECL_ARTIFICIAL (var) = 1;
406 TREE_USED (var) = 1;
407 DECL_CONTEXT (var) = fn;
408
409 if (name == this_identifier)
410 {
411 gcc_assert (LAMBDA_EXPR_THIS_CAPTURE (lam) == member);
412 LAMBDA_EXPR_THIS_CAPTURE (lam) = var;
413 }
414
415 if (fn == current_function_decl)
416 insert_capture_proxy (var);
417 else
418 vec_safe_push (LAMBDA_EXPR_PENDING_PROXIES (lam), var);
419
420 return var;
421}
422
423/* Return a struct containing a pointer and a length for lambda capture of
424 an array of runtime length. */
425
426static tree
427vla_capture_type (tree array_type)
428{
429 static tree ptr_id, max_id;
430 tree type = xref_tag (record_type, make_anon_name (), ts_current, false);
431 xref_basetypes (type, NULL_TREE);
432 type = begin_class_definition (type);
433 if (!ptr_id)
434 {
435 ptr_id = get_identifier ("ptr");
436 max_id = get_identifier ("max");
437 }
438 tree ptrtype = build_pointer_type (TREE_TYPE (array_type));
439 tree field = build_decl (input_location, FIELD_DECL, ptr_id, ptrtype);
440 finish_member_declaration (field);
441 field = build_decl (input_location, FIELD_DECL, max_id, sizetype);
442 finish_member_declaration (field);
443 return finish_struct (type, NULL_TREE);
444}
445
446/* From an ID and INITIALIZER, create a capture (by reference if
447 BY_REFERENCE_P is true), add it to the capture-list for LAMBDA,
448 and return it. */
449
450tree
6dcf5c5f 451add_capture (tree lambda, tree id, tree orig_init, bool by_reference_p,
5d9fd871 452 bool explicit_init_p)
453{
454 char *buf;
455 tree type, member, name;
456 bool vla = false;
6dcf5c5f 457 bool variadic = false;
458 tree initializer = orig_init;
459
460 if (PACK_EXPANSION_P (initializer))
461 {
462 initializer = PACK_EXPANSION_PATTERN (initializer);
463 variadic = true;
464 }
5d9fd871 465
466 if (TREE_CODE (initializer) == TREE_LIST)
467 initializer = build_x_compound_expr_from_list (initializer, ELK_INIT,
468 tf_warning_or_error);
384b0418 469 type = TREE_TYPE (initializer);
5444a0b4 470 if (type == error_mark_node)
471 return error_mark_node;
472
5d9fd871 473 if (array_of_runtime_bound_p (type))
474 {
475 vla = true;
476 if (!by_reference_p)
477 error ("array of runtime bound cannot be captured by copy, "
478 "only by reference");
479
480 /* For a VLA, we capture the address of the first element and the
481 maximum index, and then reconstruct the VLA for the proxy. */
482 tree elt = cp_build_array_ref (input_location, initializer,
483 integer_zero_node, tf_warning_or_error);
484 initializer = build_constructor_va (init_list_type_node, 2,
485 NULL_TREE, build_address (elt),
486 NULL_TREE, array_type_nelts (type));
487 type = vla_capture_type (type);
488 }
d3155b08 489 else if (!dependent_type_p (type)
490 && variably_modified_type_p (type, NULL_TREE))
5d9fd871 491 {
4972ed5d 492 error ("capture of variable-size type %qT that is not an N3639 array "
5d9fd871 493 "of runtime bound", type);
494 if (TREE_CODE (type) == ARRAY_TYPE
495 && variably_modified_type_p (TREE_TYPE (type), NULL_TREE))
496 inform (input_location, "because the array element type %qT has "
497 "variable size", TREE_TYPE (type));
498 type = error_mark_node;
499 }
384b0418 500 else
5d9fd871 501 {
384b0418 502 type = lambda_capture_field_type (initializer, explicit_init_p);
503 if (by_reference_p)
504 {
505 type = build_reference_type (type);
356d4500 506 if (!dependent_type_p (type) && !real_lvalue_p (initializer))
384b0418 507 error ("cannot capture %qE by reference", initializer);
508 }
509 else
5444a0b4 510 {
511 /* Capture by copy requires a complete type. */
512 type = complete_type (type);
513 if (!dependent_type_p (type) && !COMPLETE_TYPE_P (type))
514 {
515 error ("capture by copy of incomplete type %qT", type);
516 cxx_incomplete_type_inform (type);
517 return error_mark_node;
518 }
519 }
5d9fd871 520 }
5d9fd871 521
522 /* Add __ to the beginning of the field name so that user code
523 won't find the field with name lookup. We can't just leave the name
524 unset because template instantiation uses the name to find
525 instantiated fields. */
5402533b 526 buf = (char *) alloca (IDENTIFIER_LENGTH (id) + 3);
527 buf[1] = buf[0] = '_';
528 memcpy (buf + 2, IDENTIFIER_POINTER (id),
529 IDENTIFIER_LENGTH (id) + 1);
530 name = get_identifier (buf);
5d9fd871 531
532 /* If TREE_TYPE isn't set, we're still in the introducer, so check
533 for duplicates. */
534 if (!LAMBDA_EXPR_CLOSURE (lambda))
535 {
536 if (IDENTIFIER_MARKED (name))
537 {
538 pedwarn (input_location, 0,
539 "already captured %qD in lambda expression", id);
540 return NULL_TREE;
541 }
542 IDENTIFIER_MARKED (name) = true;
543 }
544
6dcf5c5f 545 if (variadic)
546 type = make_pack_expansion (type);
547
5d9fd871 548 /* Make member variable. */
df623cbd 549 member = build_decl (input_location, FIELD_DECL, name, type);
5d9fd871 550 DECL_VLA_CAPTURE_P (member) = vla;
551
552 if (!explicit_init_p)
553 /* Normal captures are invisible to name lookup but uses are replaced
554 with references to the capture field; we implement this by only
555 really making them invisible in unevaluated context; see
556 qualify_lookup. For now, let's make explicitly initialized captures
557 always visible. */
558 DECL_NORMAL_CAPTURE_P (member) = true;
559
560 if (id == this_identifier)
561 LAMBDA_EXPR_THIS_CAPTURE (lambda) = member;
562
563 /* Add it to the appropriate closure class if we've started it. */
564 if (current_class_type
565 && current_class_type == LAMBDA_EXPR_CLOSURE (lambda))
566 finish_member_declaration (member);
567
6dcf5c5f 568 tree listmem = member;
569 if (variadic)
570 {
571 listmem = make_pack_expansion (member);
572 initializer = orig_init;
573 }
5d9fd871 574 LAMBDA_EXPR_CAPTURE_LIST (lambda)
6dcf5c5f 575 = tree_cons (listmem, initializer, LAMBDA_EXPR_CAPTURE_LIST (lambda));
5d9fd871 576
577 if (LAMBDA_EXPR_CLOSURE (lambda))
578 return build_capture_proxy (member);
579 /* For explicit captures we haven't started the function yet, so we wait
580 and build the proxy from cp_parser_lambda_body. */
581 return NULL_TREE;
582}
583
584/* Register all the capture members on the list CAPTURES, which is the
585 LAMBDA_EXPR_CAPTURE_LIST for the lambda after the introducer. */
586
587void
588register_capture_members (tree captures)
589{
590 if (captures == NULL_TREE)
591 return;
592
593 register_capture_members (TREE_CHAIN (captures));
6dcf5c5f 594
595 tree field = TREE_PURPOSE (captures);
596 if (PACK_EXPANSION_P (field))
597 field = PACK_EXPANSION_PATTERN (field);
598
5d9fd871 599 /* We set this in add_capture to avoid duplicates. */
6dcf5c5f 600 IDENTIFIER_MARKED (DECL_NAME (field)) = false;
601 finish_member_declaration (field);
5d9fd871 602}
603
604/* Similar to add_capture, except this works on a stack of nested lambdas.
605 BY_REFERENCE_P in this case is derived from the default capture mode.
606 Returns the capture for the lambda at the bottom of the stack. */
607
608tree
609add_default_capture (tree lambda_stack, tree id, tree initializer)
610{
611 bool this_capture_p = (id == this_identifier);
612
613 tree var = NULL_TREE;
614
615 tree saved_class_type = current_class_type;
616
617 tree node;
618
619 for (node = lambda_stack;
620 node;
621 node = TREE_CHAIN (node))
622 {
623 tree lambda = TREE_VALUE (node);
624
625 current_class_type = LAMBDA_EXPR_CLOSURE (lambda);
6dcf5c5f 626 if (DECL_PACK_P (initializer))
627 initializer = make_pack_expansion (initializer);
5d9fd871 628 var = add_capture (lambda,
629 id,
630 initializer,
631 /*by_reference_p=*/
632 (!this_capture_p
633 && (LAMBDA_EXPR_DEFAULT_CAPTURE_MODE (lambda)
634 == CPLD_REFERENCE)),
635 /*explicit_init_p=*/false);
636 initializer = convert_from_reference (var);
637 }
638
639 current_class_type = saved_class_type;
640
641 return var;
642}
643
f1ec53b6 644/* Return the capture pertaining to a use of 'this' in LAMBDA, in the
645 form of an INDIRECT_REF, possibly adding it through default
7d07c4a7 646 capturing, if ADD_CAPTURE_P is true. */
5d9fd871 647
648tree
f1ec53b6 649lambda_expr_this_capture (tree lambda, bool add_capture_p)
5d9fd871 650{
651 tree result;
652
653 tree this_capture = LAMBDA_EXPR_THIS_CAPTURE (lambda);
654
7d07c4a7 655 /* In unevaluated context this isn't an odr-use, so don't capture. */
5d9fd871 656 if (cp_unevaluated_operand)
7d07c4a7 657 add_capture_p = false;
5d9fd871 658
659 /* Try to default capture 'this' if we can. */
660 if (!this_capture
f1ec53b6 661 && (!add_capture_p
662 || LAMBDA_EXPR_DEFAULT_CAPTURE_MODE (lambda) != CPLD_NONE))
5d9fd871 663 {
664 tree lambda_stack = NULL_TREE;
665 tree init = NULL_TREE;
666
667 /* If we are in a lambda function, we can move out until we hit:
668 1. a non-lambda function or NSDMI,
669 2. a lambda function capturing 'this', or
670 3. a non-default capturing lambda function. */
671 for (tree tlambda = lambda; ;)
672 {
673 lambda_stack = tree_cons (NULL_TREE,
674 tlambda,
675 lambda_stack);
676
677 if (LAMBDA_EXPR_EXTRA_SCOPE (tlambda)
678 && TREE_CODE (LAMBDA_EXPR_EXTRA_SCOPE (tlambda)) == FIELD_DECL)
679 {
680 /* In an NSDMI, we don't have a function to look up the decl in,
681 but the fake 'this' pointer that we're using for parsing is
682 in scope_chain. */
683 init = scope_chain->x_current_class_ptr;
684 gcc_checking_assert
685 (init && (TREE_TYPE (TREE_TYPE (init))
686 == current_nonlambda_class_type ()));
687 break;
688 }
689
690 tree closure_decl = TYPE_NAME (LAMBDA_EXPR_CLOSURE (tlambda));
691 tree containing_function = decl_function_context (closure_decl);
692
693 if (containing_function == NULL_TREE)
694 /* We ran out of scopes; there's no 'this' to capture. */
695 break;
696
697 if (!LAMBDA_FUNCTION_P (containing_function))
698 {
699 /* We found a non-lambda function. */
700 if (DECL_NONSTATIC_MEMBER_FUNCTION_P (containing_function))
701 /* First parameter is 'this'. */
702 init = DECL_ARGUMENTS (containing_function);
703 break;
704 }
705
706 tlambda
707 = CLASSTYPE_LAMBDA_EXPR (DECL_CONTEXT (containing_function));
708
709 if (LAMBDA_EXPR_THIS_CAPTURE (tlambda))
710 {
711 /* An outer lambda has already captured 'this'. */
712 init = LAMBDA_EXPR_THIS_CAPTURE (tlambda);
713 break;
714 }
715
716 if (LAMBDA_EXPR_DEFAULT_CAPTURE_MODE (tlambda) == CPLD_NONE)
717 /* An outer lambda won't let us capture 'this'. */
718 break;
719 }
720
721 if (init)
f1ec53b6 722 {
723 if (add_capture_p)
724 this_capture = add_default_capture (lambda_stack,
725 /*id=*/this_identifier,
726 init);
727 else
728 this_capture = init;
729 }
5d9fd871 730 }
731
7d07c4a7 732 if (cp_unevaluated_operand)
733 result = this_capture;
734 else if (!this_capture)
5d9fd871 735 {
2e227398 736 if (add_capture_p)
7d07c4a7 737 {
738 error ("%<this%> was not captured for this lambda function");
739 result = error_mark_node;
740 }
741 else
742 result = NULL_TREE;
5d9fd871 743 }
744 else
745 {
746 /* To make sure that current_class_ref is for the lambda. */
747 gcc_assert (TYPE_MAIN_VARIANT (TREE_TYPE (current_class_ref))
748 == LAMBDA_EXPR_CLOSURE (lambda));
749
750 result = this_capture;
751
752 /* If 'this' is captured, each use of 'this' is transformed into an
753 access to the corresponding unnamed data member of the closure
754 type cast (_expr.cast_ 5.4) to the type of 'this'. [ The cast
755 ensures that the transformed expression is an rvalue. ] */
756 result = rvalue (result);
757 }
758
759 return result;
760}
761
762/* We don't want to capture 'this' until we know we need it, i.e. after
763 overload resolution has chosen a non-static member function. At that
764 point we call this function to turn a dummy object into a use of the
765 'this' capture. */
766
767tree
f1ec53b6 768maybe_resolve_dummy (tree object, bool add_capture_p)
5d9fd871 769{
770 if (!is_dummy_object (object))
771 return object;
772
773 tree type = TYPE_MAIN_VARIANT (TREE_TYPE (object));
774 gcc_assert (!TYPE_PTR_P (type));
775
776 if (type != current_class_type
777 && current_class_type
778 && LAMBDA_TYPE_P (current_class_type)
855ff3cc 779 && lambda_function (current_class_type)
780 && DERIVED_FROM_P (type, current_nonlambda_class_type ()))
5d9fd871 781 {
782 /* In a lambda, need to go through 'this' capture. */
783 tree lam = CLASSTYPE_LAMBDA_EXPR (current_class_type);
f1ec53b6 784 tree cap = lambda_expr_this_capture (lam, add_capture_p);
7d07c4a7 785 if (cap && cap != error_mark_node)
2e227398 786 object = build_x_indirect_ref (EXPR_LOCATION (object), cap,
787 RO_NULL, tf_warning_or_error);
5d9fd871 788 }
789
790 return object;
791}
792
ed7bf2d1 793/* Returns the innermost non-lambda function. */
794
795tree
796current_nonlambda_function (void)
797{
798 tree fn = current_function_decl;
799 while (fn && LAMBDA_FUNCTION_P (fn))
800 fn = decl_function_context (fn);
801 return fn;
802}
803
5d9fd871 804/* Returns the method basetype of the innermost non-lambda function, or
805 NULL_TREE if none. */
806
807tree
808nonlambda_method_basetype (void)
809{
810 tree fn, type;
811 if (!current_class_ref)
812 return NULL_TREE;
813
814 type = current_class_type;
815 if (!LAMBDA_TYPE_P (type))
816 return type;
817
818 /* Find the nearest enclosing non-lambda function. */
819 fn = TYPE_NAME (type);
820 do
821 fn = decl_function_context (fn);
822 while (fn && LAMBDA_FUNCTION_P (fn));
823
824 if (!fn || !DECL_NONSTATIC_MEMBER_FUNCTION_P (fn))
825 return NULL_TREE;
826
827 return TYPE_METHOD_BASETYPE (TREE_TYPE (fn));
828}
829
814b90ef 830/* Helper function for maybe_add_lambda_conv_op; build a CALL_EXPR with
831 indicated FN and NARGS, but do not initialize the return type or any of the
832 argument slots. */
833
834static tree
835prepare_op_call (tree fn, int nargs)
836{
837 tree t;
838
839 t = build_vl_exp (CALL_EXPR, nargs + 3);
840 CALL_EXPR_FN (t) = fn;
841 CALL_EXPR_STATIC_CHAIN (t) = NULL;
842
843 return t;
844}
845
5d9fd871 846/* If the closure TYPE has a static op(), also add a conversion to function
847 pointer. */
848
849void
850maybe_add_lambda_conv_op (tree type)
851{
cbb83bc5 852 bool nested = (cfun != NULL);
64d8d39e 853 bool nested_def = decl_function_context (TYPE_MAIN_DECL (type));
5d9fd871 854 tree callop = lambda_function (type);
5d9fd871 855
856 if (LAMBDA_EXPR_CAPTURE_LIST (CLASSTYPE_LAMBDA_EXPR (type)) != NULL_TREE)
857 return;
858
859 if (processing_template_decl)
860 return;
861
814b90ef 862 bool const generic_lambda_p
863 = (DECL_TEMPLATE_INFO (callop)
864 && DECL_TEMPLATE_RESULT (DECL_TI_TEMPLATE (callop)) == callop);
865
4c0924ef 866 if (!generic_lambda_p && DECL_INITIAL (callop) == NULL_TREE)
5d9fd871 867 {
868 /* If the op() wasn't instantiated due to errors, give up. */
869 gcc_assert (errorcount || sorrycount);
870 return;
871 }
872
814b90ef 873 /* Non-template conversion operators are defined directly with build_call_a
874 and using DIRECT_ARGVEC for arguments (including 'this'). Templates are
875 deferred and the CALL is built in-place. In the case of a deduced return
876 call op, the decltype expression, DECLTYPE_CALL, used as a substitute for
877 the return type is also built in-place. The arguments of DECLTYPE_CALL in
878 the return expression may differ in flags from those in the body CALL. In
879 particular, parameter pack expansions are marked PACK_EXPANSION_LOCAL_P in
880 the body CALL, but not in DECLTYPE_CALL. */
881
882 vec<tree, va_gc> *direct_argvec = 0;
883 tree decltype_call = 0, call = 0;
884 tree fn_result = TREE_TYPE (TREE_TYPE (callop));
885
886 if (generic_lambda_p)
887 {
888 /* Prepare the dependent member call for the static member function
889 '_FUN' and, potentially, prepare another call to be used in a decltype
890 return expression for a deduced return call op to allow for simple
891 implementation of the conversion operator. */
892
893 tree instance = build_nop (type, null_pointer_node);
894 tree objfn = build_min (COMPONENT_REF, NULL_TREE,
895 instance, DECL_NAME (callop), NULL_TREE);
896 int nargs = list_length (DECL_ARGUMENTS (callop)) - 1;
897
898 call = prepare_op_call (objfn, nargs);
899 if (type_uses_auto (fn_result))
900 decltype_call = prepare_op_call (objfn, nargs);
901 }
902 else
903 {
904 direct_argvec = make_tree_vector ();
905 direct_argvec->quick_push (build1 (NOP_EXPR,
906 TREE_TYPE (DECL_ARGUMENTS (callop)),
907 null_pointer_node));
908 }
909
910 /* Copy CALLOP's argument list (as per 'copy_list') as FN_ARGS in order to
911 declare the static member function "_FUN" below. For each arg append to
912 DIRECT_ARGVEC (for the non-template case) or populate the pre-allocated
913 call args (for the template case). If a parameter pack is found, expand
914 it, flagging it as PACK_EXPANSION_LOCAL_P for the body call. */
915
916 tree fn_args = NULL_TREE;
917 {
918 int ix = 0;
919 tree src = DECL_CHAIN (DECL_ARGUMENTS (callop));
920 tree tgt;
921
922 while (src)
923 {
924 tree new_node = copy_node (src);
925
926 if (!fn_args)
927 fn_args = tgt = new_node;
928 else
929 {
930 TREE_CHAIN (tgt) = new_node;
931 tgt = new_node;
932 }
933
934 mark_exp_read (tgt);
935
936 if (generic_lambda_p)
937 {
938 if (DECL_PACK_P (tgt))
939 {
940 tree a = make_pack_expansion (tgt);
941 if (decltype_call)
942 CALL_EXPR_ARG (decltype_call, ix) = copy_node (a);
943 PACK_EXPANSION_LOCAL_P (a) = true;
944 CALL_EXPR_ARG (call, ix) = a;
945 }
946 else
947 {
948 tree a = convert_from_reference (tgt);
949 CALL_EXPR_ARG (call, ix) = a;
950 if (decltype_call)
951 CALL_EXPR_ARG (decltype_call, ix) = copy_node (a);
952 }
953 ++ix;
954 }
955 else
956 vec_safe_push (direct_argvec, tgt);
957
958 src = TREE_CHAIN (src);
959 }
960 }
961
962
963 if (generic_lambda_p)
964 {
965 if (decltype_call)
966 {
967 ++processing_template_decl;
968 fn_result = finish_decltype_type
969 (decltype_call, /*id_expression_or_member_access_p=*/false,
970 tf_warning_or_error);
971 --processing_template_decl;
972 }
973 }
974 else
975 call = build_call_a (callop,
976 direct_argvec->length (),
977 direct_argvec->address ());
978
979 CALL_FROM_THUNK_P (call) = 1;
980
981 tree stattype = build_function_type (fn_result, FUNCTION_ARG_CHAIN (callop));
5d9fd871 982
983 /* First build up the conversion op. */
984
814b90ef 985 tree rettype = build_pointer_type (stattype);
986 tree name = mangle_conv_op_name_for_type (rettype);
987 tree thistype = cp_build_qualified_type (type, TYPE_QUAL_CONST);
988 tree fntype = build_method_type_directly (thistype, rettype, void_list_node);
989 tree convfn = build_lang_decl (FUNCTION_DECL, name, fntype);
990 tree fn = convfn;
5d9fd871 991 DECL_SOURCE_LOCATION (fn) = DECL_SOURCE_LOCATION (callop);
992
993 if (TARGET_PTRMEMFUNC_VBIT_LOCATION == ptrmemfunc_vbit_in_pfn
994 && DECL_ALIGN (fn) < 2 * BITS_PER_UNIT)
995 DECL_ALIGN (fn) = 2 * BITS_PER_UNIT;
996
997 SET_OVERLOADED_OPERATOR_CODE (fn, TYPE_EXPR);
998 grokclassfn (type, fn, NO_SPECIAL);
999 set_linkage_according_to_type (type, fn);
1000 rest_of_decl_compilation (fn, toplevel_bindings_p (), at_eof);
1001 DECL_IN_AGGR_P (fn) = 1;
1002 DECL_ARTIFICIAL (fn) = 1;
1003 DECL_NOT_REALLY_EXTERN (fn) = 1;
1004 DECL_DECLARED_INLINE_P (fn) = 1;
1005 DECL_ARGUMENTS (fn) = build_this_parm (fntype, TYPE_QUAL_CONST);
64d8d39e 1006 if (nested_def)
5d9fd871 1007 DECL_INTERFACE_KNOWN (fn) = 1;
1008
814b90ef 1009 if (generic_lambda_p)
1010 fn = add_inherited_template_parms (fn, DECL_TI_TEMPLATE (callop));
1011
5d9fd871 1012 add_method (type, fn, NULL_TREE);
1013
1014 /* Generic thunk code fails for varargs; we'll complain in mark_used if
1015 the conversion op is used. */
1016 if (varargs_function_p (callop))
1017 {
ea17a80d 1018 DECL_DELETED_FN (fn) = 1;
5d9fd871 1019 return;
1020 }
1021
1022 /* Now build up the thunk to be returned. */
1023
1024 name = get_identifier ("_FUN");
814b90ef 1025 tree statfn = build_lang_decl (FUNCTION_DECL, name, stattype);
1026 fn = statfn;
5d9fd871 1027 DECL_SOURCE_LOCATION (fn) = DECL_SOURCE_LOCATION (callop);
1028 if (TARGET_PTRMEMFUNC_VBIT_LOCATION == ptrmemfunc_vbit_in_pfn
1029 && DECL_ALIGN (fn) < 2 * BITS_PER_UNIT)
1030 DECL_ALIGN (fn) = 2 * BITS_PER_UNIT;
1031 grokclassfn (type, fn, NO_SPECIAL);
1032 set_linkage_according_to_type (type, fn);
1033 rest_of_decl_compilation (fn, toplevel_bindings_p (), at_eof);
1034 DECL_IN_AGGR_P (fn) = 1;
1035 DECL_ARTIFICIAL (fn) = 1;
1036 DECL_NOT_REALLY_EXTERN (fn) = 1;
1037 DECL_DECLARED_INLINE_P (fn) = 1;
1038 DECL_STATIC_FUNCTION_P (fn) = 1;
814b90ef 1039 DECL_ARGUMENTS (fn) = fn_args;
1040 for (tree arg = fn_args; arg; arg = DECL_CHAIN (arg))
5d9fd871 1041 {
1042 /* Avoid duplicate -Wshadow warnings. */
1043 DECL_NAME (arg) = NULL_TREE;
1044 DECL_CONTEXT (arg) = fn;
1045 }
64d8d39e 1046 if (nested_def)
5d9fd871 1047 DECL_INTERFACE_KNOWN (fn) = 1;
1048
814b90ef 1049 if (generic_lambda_p)
1050 fn = add_inherited_template_parms (fn, DECL_TI_TEMPLATE (callop));
1051
5d9fd871 1052 add_method (type, fn, NULL_TREE);
1053
1054 if (nested)
1055 push_function_context ();
1056 else
1057 /* Still increment function_depth so that we don't GC in the
1058 middle of an expression. */
1059 ++function_depth;
1060
1061 /* Generate the body of the thunk. */
1062
1063 start_preparsed_function (statfn, NULL_TREE,
1064 SF_PRE_PARSED | SF_INCLASS_INLINE);
1065 if (DECL_ONE_ONLY (statfn))
1066 {
1067 /* Put the thunk in the same comdat group as the call op. */
415d1b9a 1068 cgraph_node::get_create (statfn)->add_to_same_comdat_group
1069 (cgraph_node::get_create (callop));
5d9fd871 1070 }
814b90ef 1071 tree body = begin_function_body ();
1072 tree compound_stmt = begin_compound_stmt (0);
1073 if (!generic_lambda_p)
9f10a108 1074 {
814b90ef 1075 set_flags_from_callee (call);
1076 if (MAYBE_CLASS_TYPE_P (TREE_TYPE (call)))
1077 call = build_cplus_new (TREE_TYPE (call), call, tf_warning_or_error);
9f10a108 1078 }
5d9fd871 1079 call = convert_from_reference (call);
1080 finish_return_stmt (call);
1081
1082 finish_compound_stmt (compound_stmt);
1083 finish_function_body (body);
1084
814b90ef 1085 fn = finish_function (/*inline*/2);
1086 if (!generic_lambda_p)
1087 expand_or_defer_fn (fn);
5d9fd871 1088
1089 /* Generate the body of the conversion op. */
1090
1091 start_preparsed_function (convfn, NULL_TREE,
1092 SF_PRE_PARSED | SF_INCLASS_INLINE);
1093 body = begin_function_body ();
1094 compound_stmt = begin_compound_stmt (0);
1095
1096 /* decl_needed_p needs to see that it's used. */
1097 TREE_USED (statfn) = 1;
1098 finish_return_stmt (decay_conversion (statfn, tf_warning_or_error));
1099
1100 finish_compound_stmt (compound_stmt);
1101 finish_function_body (body);
1102
814b90ef 1103 fn = finish_function (/*inline*/2);
1104 if (!generic_lambda_p)
1105 expand_or_defer_fn (fn);
5d9fd871 1106
1107 if (nested)
1108 pop_function_context ();
1109 else
1110 --function_depth;
1111}
1112
1113/* Returns true iff VAL is a lambda-related declaration which should
1114 be ignored by unqualified lookup. */
1115
1116bool
1117is_lambda_ignored_entity (tree val)
1118{
1119 /* In unevaluated context, look past normal capture proxies. */
1120 if (cp_unevaluated_operand && is_normal_capture_proxy (val))
1121 return true;
1122
1123 /* Always ignore lambda fields, their names are only for debugging. */
1124 if (TREE_CODE (val) == FIELD_DECL
1125 && CLASSTYPE_LAMBDA_EXPR (DECL_CONTEXT (val)))
1126 return true;
1127
1128 /* None of the lookups that use qualify_lookup want the op() from the
1129 lambda; they want the one from the enclosing class. */
1130 if (TREE_CODE (val) == FUNCTION_DECL && LAMBDA_FUNCTION_P (val))
1131 return true;
1132
1133 return false;
1134}