]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/cp/lambda.c
Remove Cilk Plus support.
[thirdparty/gcc.git] / gcc / cp / lambda.c
1 /* Perform the semantic phase of lambda parsing, i.e., the process of
2 building tree structure, checking semantic consistency, and
3 building RTL. These routines are used both during actual parsing
4 and during the instantiation of template functions.
5
6 Copyright (C) 1998-2017 Free Software Foundation, Inc.
7
8 This file is part of GCC.
9
10 GCC is free software; you can redistribute it and/or modify it
11 under the terms of the GNU General Public License as published by
12 the Free Software Foundation; either version 3, or (at your option)
13 any later version.
14
15 GCC is distributed in the hope that it will be useful, but
16 WITHOUT ANY WARRANTY; without even the implied warranty of
17 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
18 General Public License for more details.
19
20 You should have received a copy of the GNU General Public License
21 along with GCC; see the file COPYING3. If not see
22 <http://www.gnu.org/licenses/>. */
23
24 #include "config.h"
25 #include "system.h"
26 #include "coretypes.h"
27 #include "cp-tree.h"
28 #include "stringpool.h"
29 #include "cgraph.h"
30 #include "tree-iterator.h"
31 #include "toplev.h"
32 #include "gimplify.h"
33
34 /* Constructor for a lambda expression. */
35
36 tree
37 build_lambda_expr (void)
38 {
39 tree lambda = make_node (LAMBDA_EXPR);
40 LAMBDA_EXPR_DEFAULT_CAPTURE_MODE (lambda) = CPLD_NONE;
41 LAMBDA_EXPR_CAPTURE_LIST (lambda) = NULL_TREE;
42 LAMBDA_EXPR_THIS_CAPTURE (lambda) = NULL_TREE;
43 LAMBDA_EXPR_PENDING_PROXIES (lambda) = NULL;
44 LAMBDA_EXPR_MUTABLE_P (lambda) = false;
45 return lambda;
46 }
47
48 /* Create the closure object for a LAMBDA_EXPR. */
49
50 tree
51 build_lambda_object (tree lambda_expr)
52 {
53 /* Build aggregate constructor call.
54 - cp_parser_braced_list
55 - cp_parser_functional_cast */
56 vec<constructor_elt, va_gc> *elts = NULL;
57 tree node, expr, type;
58 location_t saved_loc;
59
60 if (processing_template_decl || lambda_expr == error_mark_node)
61 return lambda_expr;
62
63 /* Make sure any error messages refer to the lambda-introducer. */
64 saved_loc = input_location;
65 input_location = LAMBDA_EXPR_LOCATION (lambda_expr);
66
67 for (node = LAMBDA_EXPR_CAPTURE_LIST (lambda_expr);
68 node;
69 node = TREE_CHAIN (node))
70 {
71 tree field = TREE_PURPOSE (node);
72 tree val = TREE_VALUE (node);
73
74 if (field == error_mark_node)
75 {
76 expr = error_mark_node;
77 goto out;
78 }
79
80 if (TREE_CODE (val) == TREE_LIST)
81 val = build_x_compound_expr_from_list (val, ELK_INIT,
82 tf_warning_or_error);
83
84 if (DECL_P (val))
85 mark_used (val);
86
87 /* Mere mortals can't copy arrays with aggregate initialization, so
88 do some magic to make it work here. */
89 if (TREE_CODE (TREE_TYPE (field)) == ARRAY_TYPE)
90 val = build_array_copy (val);
91 else if (DECL_NORMAL_CAPTURE_P (field)
92 && !DECL_VLA_CAPTURE_P (field)
93 && TREE_CODE (TREE_TYPE (field)) != REFERENCE_TYPE)
94 {
95 /* "the entities that are captured by copy are used to
96 direct-initialize each corresponding non-static data
97 member of the resulting closure object."
98
99 There's normally no way to express direct-initialization
100 from an element of a CONSTRUCTOR, so we build up a special
101 TARGET_EXPR to bypass the usual copy-initialization. */
102 val = force_rvalue (val, tf_warning_or_error);
103 if (TREE_CODE (val) == TARGET_EXPR)
104 TARGET_EXPR_DIRECT_INIT_P (val) = true;
105 }
106
107 CONSTRUCTOR_APPEND_ELT (elts, DECL_NAME (field), val);
108 }
109
110 expr = build_constructor (init_list_type_node, elts);
111 CONSTRUCTOR_IS_DIRECT_INIT (expr) = 1;
112
113 /* N2927: "[The closure] class type is not an aggregate."
114 But we briefly treat it as an aggregate to make this simpler. */
115 type = LAMBDA_EXPR_CLOSURE (lambda_expr);
116 CLASSTYPE_NON_AGGREGATE (type) = 0;
117 expr = finish_compound_literal (type, expr, tf_warning_or_error);
118 CLASSTYPE_NON_AGGREGATE (type) = 1;
119
120 out:
121 input_location = saved_loc;
122 return expr;
123 }
124
125 /* Return an initialized RECORD_TYPE for LAMBDA.
126 LAMBDA must have its explicit captures already. */
127
128 tree
129 begin_lambda_type (tree lambda)
130 {
131 tree type;
132
133 {
134 /* Unique name. This is just like an unnamed class, but we cannot use
135 make_anon_name because of certain checks against TYPE_UNNAMED_P. */
136 tree name;
137 name = make_lambda_name ();
138
139 /* Create the new RECORD_TYPE for this lambda. */
140 type = xref_tag (/*tag_code=*/record_type,
141 name,
142 /*scope=*/ts_lambda,
143 /*template_header_p=*/false);
144 if (type == error_mark_node)
145 return error_mark_node;
146 }
147
148 /* Designate it as a struct so that we can use aggregate initialization. */
149 CLASSTYPE_DECLARED_CLASS (type) = false;
150
151 /* Cross-reference the expression and the type. */
152 LAMBDA_EXPR_CLOSURE (lambda) = type;
153 CLASSTYPE_LAMBDA_EXPR (type) = lambda;
154
155 /* In C++17, assume the closure is literal; we'll clear the flag later if
156 necessary. */
157 if (cxx_dialect >= cxx17)
158 CLASSTYPE_LITERAL_P (type) = true;
159
160 /* Clear base types. */
161 xref_basetypes (type, /*bases=*/NULL_TREE);
162
163 /* Start the class. */
164 type = begin_class_definition (type);
165
166 return type;
167 }
168
169 /* Returns the type to use for the return type of the operator() of a
170 closure class. */
171
172 tree
173 lambda_return_type (tree expr)
174 {
175 if (expr == NULL_TREE)
176 return void_type_node;
177 if (type_unknown_p (expr)
178 || BRACE_ENCLOSED_INITIALIZER_P (expr))
179 {
180 cxx_incomplete_type_error (expr, TREE_TYPE (expr));
181 return error_mark_node;
182 }
183 gcc_checking_assert (!type_dependent_expression_p (expr));
184 return cv_unqualified (type_decays_to (unlowered_expr_type (expr)));
185 }
186
187 /* Given a LAMBDA_EXPR or closure type LAMBDA, return the op() of the
188 closure type. */
189
190 tree
191 lambda_function (tree lambda)
192 {
193 tree type;
194 if (TREE_CODE (lambda) == LAMBDA_EXPR)
195 type = LAMBDA_EXPR_CLOSURE (lambda);
196 else
197 type = lambda;
198 gcc_assert (LAMBDA_TYPE_P (type));
199 /* Don't let debug_tree cause instantiation. */
200 if (CLASSTYPE_TEMPLATE_INSTANTIATION (type)
201 && !COMPLETE_OR_OPEN_TYPE_P (type))
202 return NULL_TREE;
203 lambda = lookup_member (type, call_op_identifier,
204 /*protect=*/0, /*want_type=*/false,
205 tf_warning_or_error);
206 if (lambda)
207 lambda = STRIP_TEMPLATE (get_first_fn (lambda));
208 return lambda;
209 }
210
211 /* Returns the type to use for the FIELD_DECL corresponding to the
212 capture of EXPR. EXPLICIT_INIT_P indicates whether this is a
213 C++14 init capture, and BY_REFERENCE_P indicates whether we're
214 capturing by reference. */
215
216 tree
217 lambda_capture_field_type (tree expr, bool explicit_init_p,
218 bool by_reference_p)
219 {
220 tree type;
221 bool is_this = is_this_parameter (tree_strip_nop_conversions (expr));
222
223 if (!is_this && type_dependent_expression_p (expr))
224 {
225 type = cxx_make_type (DECLTYPE_TYPE);
226 DECLTYPE_TYPE_EXPR (type) = expr;
227 DECLTYPE_FOR_LAMBDA_CAPTURE (type) = true;
228 DECLTYPE_FOR_INIT_CAPTURE (type) = explicit_init_p;
229 DECLTYPE_FOR_REF_CAPTURE (type) = by_reference_p;
230 SET_TYPE_STRUCTURAL_EQUALITY (type);
231 }
232 else if (!is_this && explicit_init_p)
233 {
234 tree auto_node = make_auto ();
235
236 type = auto_node;
237 if (by_reference_p)
238 /* Add the reference now, so deduction doesn't lose
239 outermost CV qualifiers of EXPR. */
240 type = build_reference_type (type);
241 type = do_auto_deduction (type, expr, auto_node);
242 }
243 else
244 {
245 type = non_reference (unlowered_expr_type (expr));
246
247 if (!is_this
248 && (by_reference_p || TREE_CODE (type) == FUNCTION_TYPE))
249 type = build_reference_type (type);
250 }
251
252 return type;
253 }
254
255 /* Returns true iff DECL is a lambda capture proxy variable created by
256 build_capture_proxy. */
257
258 bool
259 is_capture_proxy (tree decl)
260 {
261 return (VAR_P (decl)
262 && DECL_HAS_VALUE_EXPR_P (decl)
263 && !DECL_ANON_UNION_VAR_P (decl)
264 && LAMBDA_FUNCTION_P (DECL_CONTEXT (decl)));
265 }
266
267 /* Returns true iff DECL is a capture proxy for a normal capture
268 (i.e. without explicit initializer). */
269
270 bool
271 is_normal_capture_proxy (tree decl)
272 {
273 if (!is_capture_proxy (decl))
274 /* It's not a capture proxy. */
275 return false;
276
277 if (variably_modified_type_p (TREE_TYPE (decl), NULL_TREE))
278 /* VLA capture. */
279 return true;
280
281 /* It is a capture proxy, is it a normal capture? */
282 tree val = DECL_VALUE_EXPR (decl);
283 if (val == error_mark_node)
284 return true;
285
286 if (TREE_CODE (val) == ADDR_EXPR)
287 val = TREE_OPERAND (val, 0);
288 gcc_assert (TREE_CODE (val) == COMPONENT_REF);
289 val = TREE_OPERAND (val, 1);
290 return DECL_NORMAL_CAPTURE_P (val);
291 }
292
293 /* VAR is a capture proxy created by build_capture_proxy; add it to the
294 current function, which is the operator() for the appropriate lambda. */
295
296 void
297 insert_capture_proxy (tree var)
298 {
299 if (is_normal_capture_proxy (var))
300 {
301 tree cap = DECL_CAPTURED_VARIABLE (var);
302 if (CHECKING_P)
303 {
304 gcc_assert (!is_normal_capture_proxy (cap));
305 tree old = retrieve_local_specialization (cap);
306 if (old)
307 gcc_assert (DECL_CONTEXT (old) != DECL_CONTEXT (var));
308 }
309 register_local_specialization (var, cap);
310 }
311
312 /* Put the capture proxy in the extra body block so that it won't clash
313 with a later local variable. */
314 pushdecl_outermost_localscope (var);
315
316 /* And put a DECL_EXPR in the STATEMENT_LIST for the same block. */
317 var = build_stmt (DECL_SOURCE_LOCATION (var), DECL_EXPR, var);
318 tree stmt_list = (*stmt_list_stack)[1];
319 gcc_assert (stmt_list);
320 append_to_statement_list_force (var, &stmt_list);
321 }
322
323 /* We've just finished processing a lambda; if the containing scope is also
324 a lambda, insert any capture proxies that were created while processing
325 the nested lambda. */
326
327 void
328 insert_pending_capture_proxies (void)
329 {
330 tree lam;
331 vec<tree, va_gc> *proxies;
332 unsigned i;
333
334 if (!current_function_decl || !LAMBDA_FUNCTION_P (current_function_decl))
335 return;
336
337 lam = CLASSTYPE_LAMBDA_EXPR (DECL_CONTEXT (current_function_decl));
338 proxies = LAMBDA_EXPR_PENDING_PROXIES (lam);
339 for (i = 0; i < vec_safe_length (proxies); ++i)
340 {
341 tree var = (*proxies)[i];
342 insert_capture_proxy (var);
343 }
344 release_tree_vector (LAMBDA_EXPR_PENDING_PROXIES (lam));
345 LAMBDA_EXPR_PENDING_PROXIES (lam) = NULL;
346 }
347
348 /* Given REF, a COMPONENT_REF designating a field in the lambda closure,
349 return the type we want the proxy to have: the type of the field itself,
350 with added const-qualification if the lambda isn't mutable and the
351 capture is by value. */
352
353 tree
354 lambda_proxy_type (tree ref)
355 {
356 tree type;
357 if (ref == error_mark_node)
358 return error_mark_node;
359 if (REFERENCE_REF_P (ref))
360 ref = TREE_OPERAND (ref, 0);
361 gcc_assert (TREE_CODE (ref) == COMPONENT_REF);
362 type = TREE_TYPE (ref);
363 if (!type || WILDCARD_TYPE_P (non_reference (type)))
364 {
365 type = cxx_make_type (DECLTYPE_TYPE);
366 DECLTYPE_TYPE_EXPR (type) = ref;
367 DECLTYPE_FOR_LAMBDA_PROXY (type) = true;
368 SET_TYPE_STRUCTURAL_EQUALITY (type);
369 }
370 if (DECL_PACK_P (TREE_OPERAND (ref, 1)))
371 type = make_pack_expansion (type);
372 return type;
373 }
374
375 /* MEMBER is a capture field in a lambda closure class. Now that we're
376 inside the operator(), build a placeholder var for future lookups and
377 debugging. */
378
379 tree
380 build_capture_proxy (tree member, tree init)
381 {
382 tree var, object, fn, closure, name, lam, type;
383
384 if (PACK_EXPANSION_P (member))
385 member = PACK_EXPANSION_PATTERN (member);
386
387 closure = DECL_CONTEXT (member);
388 fn = lambda_function (closure);
389 lam = CLASSTYPE_LAMBDA_EXPR (closure);
390
391 /* The proxy variable forwards to the capture field. */
392 object = build_fold_indirect_ref (DECL_ARGUMENTS (fn));
393 object = finish_non_static_data_member (member, object, NULL_TREE);
394 if (REFERENCE_REF_P (object))
395 object = TREE_OPERAND (object, 0);
396
397 /* Remove the __ inserted by add_capture. */
398 name = get_identifier (IDENTIFIER_POINTER (DECL_NAME (member)) + 2);
399
400 type = lambda_proxy_type (object);
401
402 if (name == this_identifier && !POINTER_TYPE_P (type))
403 {
404 type = build_pointer_type (type);
405 type = cp_build_qualified_type (type, TYPE_QUAL_CONST);
406 object = build_fold_addr_expr_with_type (object, type);
407 }
408
409 if (DECL_VLA_CAPTURE_P (member))
410 {
411 /* Rebuild the VLA type from the pointer and maxindex. */
412 tree field = next_initializable_field (TYPE_FIELDS (type));
413 tree ptr = build_simple_component_ref (object, field);
414 field = next_initializable_field (DECL_CHAIN (field));
415 tree max = build_simple_component_ref (object, field);
416 type = build_cplus_array_type (TREE_TYPE (TREE_TYPE (ptr)),
417 build_index_type (max));
418 type = build_reference_type (type);
419 REFERENCE_VLA_OK (type) = true;
420 object = convert (type, ptr);
421 }
422
423 var = build_decl (input_location, VAR_DECL, name, type);
424 SET_DECL_VALUE_EXPR (var, object);
425 DECL_HAS_VALUE_EXPR_P (var) = 1;
426 DECL_ARTIFICIAL (var) = 1;
427 TREE_USED (var) = 1;
428 DECL_CONTEXT (var) = fn;
429
430 if (DECL_NORMAL_CAPTURE_P (member))
431 {
432 if (DECL_VLA_CAPTURE_P (member))
433 {
434 init = CONSTRUCTOR_ELT (init, 0)->value;
435 init = TREE_OPERAND (init, 0); // Strip ADDR_EXPR.
436 init = TREE_OPERAND (init, 0); // Strip ARRAY_REF.
437 }
438 else
439 {
440 if (PACK_EXPANSION_P (init))
441 init = PACK_EXPANSION_PATTERN (init);
442 if (TREE_CODE (init) == INDIRECT_REF)
443 init = TREE_OPERAND (init, 0);
444 STRIP_NOPS (init);
445 }
446 gcc_assert (VAR_P (init) || TREE_CODE (init) == PARM_DECL);
447 while (is_normal_capture_proxy (init))
448 init = DECL_CAPTURED_VARIABLE (init);
449 retrofit_lang_decl (var);
450 DECL_CAPTURED_VARIABLE (var) = init;
451 }
452
453 if (name == this_identifier)
454 {
455 gcc_assert (LAMBDA_EXPR_THIS_CAPTURE (lam) == member);
456 LAMBDA_EXPR_THIS_CAPTURE (lam) = var;
457 }
458
459 if (fn == current_function_decl)
460 insert_capture_proxy (var);
461 else
462 vec_safe_push (LAMBDA_EXPR_PENDING_PROXIES (lam), var);
463
464 return var;
465 }
466
467 static GTY(()) tree ptr_id;
468 static GTY(()) tree max_id;
469
470 /* Return a struct containing a pointer and a length for lambda capture of
471 an array of runtime length. */
472
473 static tree
474 vla_capture_type (tree array_type)
475 {
476 tree type = xref_tag (record_type, make_anon_name (), ts_current, false);
477 xref_basetypes (type, NULL_TREE);
478 type = begin_class_definition (type);
479 if (!ptr_id)
480 {
481 ptr_id = get_identifier ("ptr");
482 max_id = get_identifier ("max");
483 }
484 tree ptrtype = build_pointer_type (TREE_TYPE (array_type));
485 tree field = build_decl (input_location, FIELD_DECL, ptr_id, ptrtype);
486 finish_member_declaration (field);
487 field = build_decl (input_location, FIELD_DECL, max_id, sizetype);
488 finish_member_declaration (field);
489 return finish_struct (type, NULL_TREE);
490 }
491
492 /* From an ID and INITIALIZER, create a capture (by reference if
493 BY_REFERENCE_P is true), add it to the capture-list for LAMBDA,
494 and return it. If ID is `this', BY_REFERENCE_P says whether
495 `*this' is captured by reference. */
496
497 tree
498 add_capture (tree lambda, tree id, tree orig_init, bool by_reference_p,
499 bool explicit_init_p)
500 {
501 char *buf;
502 tree type, member, name;
503 bool vla = false;
504 bool variadic = false;
505 tree initializer = orig_init;
506
507 if (PACK_EXPANSION_P (initializer))
508 {
509 initializer = PACK_EXPANSION_PATTERN (initializer);
510 variadic = true;
511 }
512
513 if (TREE_CODE (initializer) == TREE_LIST
514 /* A pack expansion might end up with multiple elements. */
515 && !PACK_EXPANSION_P (TREE_VALUE (initializer)))
516 initializer = build_x_compound_expr_from_list (initializer, ELK_INIT,
517 tf_warning_or_error);
518 type = TREE_TYPE (initializer);
519 if (type == error_mark_node)
520 return error_mark_node;
521
522 if (array_of_runtime_bound_p (type))
523 {
524 vla = true;
525 if (!by_reference_p)
526 error ("array of runtime bound cannot be captured by copy, "
527 "only by reference");
528
529 /* For a VLA, we capture the address of the first element and the
530 maximum index, and then reconstruct the VLA for the proxy. */
531 tree elt = cp_build_array_ref (input_location, initializer,
532 integer_zero_node, tf_warning_or_error);
533 initializer = build_constructor_va (init_list_type_node, 2,
534 NULL_TREE, build_address (elt),
535 NULL_TREE, array_type_nelts (type));
536 type = vla_capture_type (type);
537 }
538 else if (!dependent_type_p (type)
539 && variably_modified_type_p (type, NULL_TREE))
540 {
541 error ("capture of variable-size type %qT that is not an N3639 array "
542 "of runtime bound", type);
543 if (TREE_CODE (type) == ARRAY_TYPE
544 && variably_modified_type_p (TREE_TYPE (type), NULL_TREE))
545 inform (input_location, "because the array element type %qT has "
546 "variable size", TREE_TYPE (type));
547 type = error_mark_node;
548 }
549 else
550 {
551 type = lambda_capture_field_type (initializer, explicit_init_p,
552 by_reference_p);
553 if (type == error_mark_node)
554 return error_mark_node;
555
556 if (id == this_identifier && !by_reference_p)
557 {
558 gcc_assert (POINTER_TYPE_P (type));
559 type = TREE_TYPE (type);
560 initializer = cp_build_fold_indirect_ref (initializer);
561 }
562
563 if (dependent_type_p (type))
564 ;
565 else if (id != this_identifier && by_reference_p)
566 {
567 if (!lvalue_p (initializer))
568 {
569 error ("cannot capture %qE by reference", initializer);
570 return error_mark_node;
571 }
572 }
573 else
574 {
575 /* Capture by copy requires a complete type. */
576 type = complete_type (type);
577 if (!COMPLETE_TYPE_P (type))
578 {
579 error ("capture by copy of incomplete type %qT", type);
580 cxx_incomplete_type_inform (type);
581 return error_mark_node;
582 }
583 }
584 }
585
586 /* Add __ to the beginning of the field name so that user code
587 won't find the field with name lookup. We can't just leave the name
588 unset because template instantiation uses the name to find
589 instantiated fields. */
590 buf = (char *) alloca (IDENTIFIER_LENGTH (id) + 3);
591 buf[1] = buf[0] = '_';
592 memcpy (buf + 2, IDENTIFIER_POINTER (id),
593 IDENTIFIER_LENGTH (id) + 1);
594 name = get_identifier (buf);
595
596 /* If TREE_TYPE isn't set, we're still in the introducer, so check
597 for duplicates. */
598 if (!LAMBDA_EXPR_CLOSURE (lambda))
599 {
600 if (IDENTIFIER_MARKED (name))
601 {
602 pedwarn (input_location, 0,
603 "already captured %qD in lambda expression", id);
604 return NULL_TREE;
605 }
606 IDENTIFIER_MARKED (name) = true;
607 }
608
609 if (variadic)
610 type = make_pack_expansion (type);
611
612 /* Make member variable. */
613 member = build_decl (input_location, FIELD_DECL, name, type);
614 DECL_VLA_CAPTURE_P (member) = vla;
615
616 if (!explicit_init_p)
617 /* Normal captures are invisible to name lookup but uses are replaced
618 with references to the capture field; we implement this by only
619 really making them invisible in unevaluated context; see
620 qualify_lookup. For now, let's make explicitly initialized captures
621 always visible. */
622 DECL_NORMAL_CAPTURE_P (member) = true;
623
624 if (id == this_identifier)
625 LAMBDA_EXPR_THIS_CAPTURE (lambda) = member;
626
627 /* Add it to the appropriate closure class if we've started it. */
628 if (current_class_type
629 && current_class_type == LAMBDA_EXPR_CLOSURE (lambda))
630 {
631 if (COMPLETE_TYPE_P (current_class_type))
632 internal_error ("trying to capture %qD in instantiation of "
633 "generic lambda", id);
634 finish_member_declaration (member);
635 }
636
637 tree listmem = member;
638 if (variadic)
639 {
640 listmem = make_pack_expansion (member);
641 initializer = orig_init;
642 }
643 LAMBDA_EXPR_CAPTURE_LIST (lambda)
644 = tree_cons (listmem, initializer, LAMBDA_EXPR_CAPTURE_LIST (lambda));
645
646 if (LAMBDA_EXPR_CLOSURE (lambda))
647 return build_capture_proxy (member, initializer);
648 /* For explicit captures we haven't started the function yet, so we wait
649 and build the proxy from cp_parser_lambda_body. */
650 return NULL_TREE;
651 }
652
653 /* Register all the capture members on the list CAPTURES, which is the
654 LAMBDA_EXPR_CAPTURE_LIST for the lambda after the introducer. */
655
656 void
657 register_capture_members (tree captures)
658 {
659 if (captures == NULL_TREE)
660 return;
661
662 register_capture_members (TREE_CHAIN (captures));
663
664 tree field = TREE_PURPOSE (captures);
665 if (PACK_EXPANSION_P (field))
666 field = PACK_EXPANSION_PATTERN (field);
667
668 /* We set this in add_capture to avoid duplicates. */
669 IDENTIFIER_MARKED (DECL_NAME (field)) = false;
670 finish_member_declaration (field);
671 }
672
673 /* Similar to add_capture, except this works on a stack of nested lambdas.
674 BY_REFERENCE_P in this case is derived from the default capture mode.
675 Returns the capture for the lambda at the bottom of the stack. */
676
677 tree
678 add_default_capture (tree lambda_stack, tree id, tree initializer)
679 {
680 bool this_capture_p = (id == this_identifier);
681
682 tree var = NULL_TREE;
683
684 tree saved_class_type = current_class_type;
685
686 tree node;
687
688 for (node = lambda_stack;
689 node;
690 node = TREE_CHAIN (node))
691 {
692 tree lambda = TREE_VALUE (node);
693
694 current_class_type = LAMBDA_EXPR_CLOSURE (lambda);
695 if (DECL_PACK_P (initializer))
696 initializer = make_pack_expansion (initializer);
697 var = add_capture (lambda,
698 id,
699 initializer,
700 /*by_reference_p=*/
701 (this_capture_p
702 || (LAMBDA_EXPR_DEFAULT_CAPTURE_MODE (lambda)
703 == CPLD_REFERENCE)),
704 /*explicit_init_p=*/false);
705 initializer = convert_from_reference (var);
706 }
707
708 current_class_type = saved_class_type;
709
710 return var;
711 }
712
713 /* Return the capture pertaining to a use of 'this' in LAMBDA, in the
714 form of an INDIRECT_REF, possibly adding it through default
715 capturing, if ADD_CAPTURE_P is true. */
716
717 tree
718 lambda_expr_this_capture (tree lambda, bool add_capture_p)
719 {
720 tree result;
721
722 tree this_capture = LAMBDA_EXPR_THIS_CAPTURE (lambda);
723
724 /* In unevaluated context this isn't an odr-use, so don't capture. */
725 if (cp_unevaluated_operand)
726 add_capture_p = false;
727
728 /* Try to default capture 'this' if we can. */
729 if (!this_capture
730 && (!add_capture_p
731 || LAMBDA_EXPR_DEFAULT_CAPTURE_MODE (lambda) != CPLD_NONE))
732 {
733 tree lambda_stack = NULL_TREE;
734 tree init = NULL_TREE;
735
736 /* If we are in a lambda function, we can move out until we hit:
737 1. a non-lambda function or NSDMI,
738 2. a lambda function capturing 'this', or
739 3. a non-default capturing lambda function. */
740 for (tree tlambda = lambda; ;)
741 {
742 lambda_stack = tree_cons (NULL_TREE,
743 tlambda,
744 lambda_stack);
745
746 if (LAMBDA_EXPR_EXTRA_SCOPE (tlambda)
747 && TREE_CODE (LAMBDA_EXPR_EXTRA_SCOPE (tlambda)) == FIELD_DECL)
748 {
749 /* In an NSDMI, we don't have a function to look up the decl in,
750 but the fake 'this' pointer that we're using for parsing is
751 in scope_chain. */
752 init = scope_chain->x_current_class_ptr;
753 gcc_checking_assert
754 (init && (TREE_TYPE (TREE_TYPE (init))
755 == current_nonlambda_class_type ()));
756 break;
757 }
758
759 tree closure_decl = TYPE_NAME (LAMBDA_EXPR_CLOSURE (tlambda));
760 tree containing_function = decl_function_context (closure_decl);
761
762 if (containing_function == NULL_TREE)
763 /* We ran out of scopes; there's no 'this' to capture. */
764 break;
765
766 if (!LAMBDA_FUNCTION_P (containing_function))
767 {
768 /* We found a non-lambda function. */
769 if (DECL_NONSTATIC_MEMBER_FUNCTION_P (containing_function))
770 /* First parameter is 'this'. */
771 init = DECL_ARGUMENTS (containing_function);
772 break;
773 }
774
775 tlambda
776 = CLASSTYPE_LAMBDA_EXPR (DECL_CONTEXT (containing_function));
777
778 if (LAMBDA_EXPR_THIS_CAPTURE (tlambda))
779 {
780 /* An outer lambda has already captured 'this'. */
781 init = LAMBDA_EXPR_THIS_CAPTURE (tlambda);
782 break;
783 }
784
785 if (LAMBDA_EXPR_DEFAULT_CAPTURE_MODE (tlambda) == CPLD_NONE)
786 /* An outer lambda won't let us capture 'this'. */
787 break;
788 }
789
790 if (init)
791 {
792 if (add_capture_p)
793 this_capture = add_default_capture (lambda_stack,
794 /*id=*/this_identifier,
795 init);
796 else
797 this_capture = init;
798 }
799 }
800
801 if (cp_unevaluated_operand)
802 result = this_capture;
803 else if (!this_capture)
804 {
805 if (add_capture_p)
806 {
807 error ("%<this%> was not captured for this lambda function");
808 result = error_mark_node;
809 }
810 else
811 result = NULL_TREE;
812 }
813 else
814 {
815 /* To make sure that current_class_ref is for the lambda. */
816 gcc_assert (TYPE_MAIN_VARIANT (TREE_TYPE (current_class_ref))
817 == LAMBDA_EXPR_CLOSURE (lambda));
818
819 result = this_capture;
820
821 /* If 'this' is captured, each use of 'this' is transformed into an
822 access to the corresponding unnamed data member of the closure
823 type cast (_expr.cast_ 5.4) to the type of 'this'. [ The cast
824 ensures that the transformed expression is an rvalue. ] */
825 result = rvalue (result);
826 }
827
828 return result;
829 }
830
831 /* Return the current LAMBDA_EXPR, if this is a resolvable dummy
832 object. NULL otherwise.. */
833
834 static tree
835 resolvable_dummy_lambda (tree object)
836 {
837 if (!is_dummy_object (object))
838 return NULL_TREE;
839
840 tree type = TYPE_MAIN_VARIANT (TREE_TYPE (object));
841 gcc_assert (!TYPE_PTR_P (type));
842
843 if (type != current_class_type
844 && current_class_type
845 && LAMBDA_TYPE_P (current_class_type)
846 && lambda_function (current_class_type)
847 && DERIVED_FROM_P (type, current_nonlambda_class_type ()))
848 return CLASSTYPE_LAMBDA_EXPR (current_class_type);
849
850 return NULL_TREE;
851 }
852
853 /* We don't want to capture 'this' until we know we need it, i.e. after
854 overload resolution has chosen a non-static member function. At that
855 point we call this function to turn a dummy object into a use of the
856 'this' capture. */
857
858 tree
859 maybe_resolve_dummy (tree object, bool add_capture_p)
860 {
861 if (tree lam = resolvable_dummy_lambda (object))
862 if (tree cap = lambda_expr_this_capture (lam, add_capture_p))
863 if (cap != error_mark_node)
864 object = build_fold_indirect_ref (cap);
865
866 return object;
867 }
868
869 /* When parsing a generic lambda containing an argument-dependent
870 member function call we defer overload resolution to instantiation
871 time. But we have to know now whether to capture this or not.
872 Do that if FNS contains any non-static fns.
873 The std doesn't anticipate this case, but I expect this to be the
874 outcome of discussion. */
875
876 void
877 maybe_generic_this_capture (tree object, tree fns)
878 {
879 if (tree lam = resolvable_dummy_lambda (object))
880 if (!LAMBDA_EXPR_THIS_CAPTURE (lam))
881 {
882 /* We've not yet captured, so look at the function set of
883 interest. */
884 if (BASELINK_P (fns))
885 fns = BASELINK_FUNCTIONS (fns);
886 bool id_expr = TREE_CODE (fns) == TEMPLATE_ID_EXPR;
887 if (id_expr)
888 fns = TREE_OPERAND (fns, 0);
889
890 for (lkp_iterator iter (fns); iter; ++iter)
891 if ((!id_expr || TREE_CODE (*iter) == TEMPLATE_DECL)
892 && DECL_NONSTATIC_MEMBER_FUNCTION_P (*iter))
893 {
894 /* Found a non-static member. Capture this. */
895 lambda_expr_this_capture (lam, true);
896 break;
897 }
898 }
899 }
900
901 /* Returns the innermost non-lambda function. */
902
903 tree
904 current_nonlambda_function (void)
905 {
906 tree fn = current_function_decl;
907 while (fn && LAMBDA_FUNCTION_P (fn))
908 fn = decl_function_context (fn);
909 return fn;
910 }
911
912 /* Returns the method basetype of the innermost non-lambda function, or
913 NULL_TREE if none. */
914
915 tree
916 nonlambda_method_basetype (void)
917 {
918 tree fn, type;
919 if (!current_class_ref)
920 return NULL_TREE;
921
922 type = current_class_type;
923 if (!LAMBDA_TYPE_P (type))
924 return type;
925
926 /* Find the nearest enclosing non-lambda function. */
927 fn = TYPE_NAME (type);
928 do
929 fn = decl_function_context (fn);
930 while (fn && LAMBDA_FUNCTION_P (fn));
931
932 if (!fn || !DECL_NONSTATIC_MEMBER_FUNCTION_P (fn))
933 return NULL_TREE;
934
935 return TYPE_METHOD_BASETYPE (TREE_TYPE (fn));
936 }
937
938 /* Like current_scope, but looking through lambdas. */
939
940 tree
941 current_nonlambda_scope (void)
942 {
943 tree scope = current_scope ();
944 for (;;)
945 {
946 if (TREE_CODE (scope) == FUNCTION_DECL
947 && LAMBDA_FUNCTION_P (scope))
948 {
949 scope = CP_TYPE_CONTEXT (DECL_CONTEXT (scope));
950 continue;
951 }
952 else if (LAMBDA_TYPE_P (scope))
953 {
954 scope = CP_TYPE_CONTEXT (scope);
955 continue;
956 }
957 break;
958 }
959 return scope;
960 }
961
962 /* Helper function for maybe_add_lambda_conv_op; build a CALL_EXPR with
963 indicated FN and NARGS, but do not initialize the return type or any of the
964 argument slots. */
965
966 static tree
967 prepare_op_call (tree fn, int nargs)
968 {
969 tree t;
970
971 t = build_vl_exp (CALL_EXPR, nargs + 3);
972 CALL_EXPR_FN (t) = fn;
973 CALL_EXPR_STATIC_CHAIN (t) = NULL;
974
975 return t;
976 }
977
978 /* Return true iff CALLOP is the op() for a generic lambda. */
979
980 bool
981 generic_lambda_fn_p (tree callop)
982 {
983 return (LAMBDA_FUNCTION_P (callop)
984 && DECL_TEMPLATE_INFO (callop)
985 && PRIMARY_TEMPLATE_P (DECL_TI_TEMPLATE (callop)));
986 }
987
988 /* If the closure TYPE has a static op(), also add a conversion to function
989 pointer. */
990
991 void
992 maybe_add_lambda_conv_op (tree type)
993 {
994 bool nested = (cfun != NULL);
995 bool nested_def = decl_function_context (TYPE_MAIN_DECL (type));
996 tree callop = lambda_function (type);
997 tree lam = CLASSTYPE_LAMBDA_EXPR (type);
998
999 if (LAMBDA_EXPR_CAPTURE_LIST (lam) != NULL_TREE
1000 || LAMBDA_EXPR_DEFAULT_CAPTURE_MODE (lam) != CPLD_NONE)
1001 return;
1002
1003 if (processing_template_decl)
1004 return;
1005
1006 bool const generic_lambda_p = generic_lambda_fn_p (callop);
1007
1008 if (!generic_lambda_p && DECL_INITIAL (callop) == NULL_TREE)
1009 {
1010 /* If the op() wasn't instantiated due to errors, give up. */
1011 gcc_assert (errorcount || sorrycount);
1012 return;
1013 }
1014
1015 /* Non-template conversion operators are defined directly with build_call_a
1016 and using DIRECT_ARGVEC for arguments (including 'this'). Templates are
1017 deferred and the CALL is built in-place. In the case of a deduced return
1018 call op, the decltype expression, DECLTYPE_CALL, used as a substitute for
1019 the return type is also built in-place. The arguments of DECLTYPE_CALL in
1020 the return expression may differ in flags from those in the body CALL. In
1021 particular, parameter pack expansions are marked PACK_EXPANSION_LOCAL_P in
1022 the body CALL, but not in DECLTYPE_CALL. */
1023
1024 vec<tree, va_gc> *direct_argvec = 0;
1025 tree decltype_call = 0, call = 0;
1026 tree optype = TREE_TYPE (callop);
1027 tree fn_result = TREE_TYPE (optype);
1028
1029 tree thisarg = build_nop (TREE_TYPE (DECL_ARGUMENTS (callop)),
1030 null_pointer_node);
1031 if (generic_lambda_p)
1032 {
1033 ++processing_template_decl;
1034
1035 /* Prepare the dependent member call for the static member function
1036 '_FUN' and, potentially, prepare another call to be used in a decltype
1037 return expression for a deduced return call op to allow for simple
1038 implementation of the conversion operator. */
1039
1040 tree instance = cp_build_fold_indirect_ref (thisarg);
1041 tree objfn = build_min (COMPONENT_REF, NULL_TREE,
1042 instance, DECL_NAME (callop), NULL_TREE);
1043 int nargs = list_length (DECL_ARGUMENTS (callop)) - 1;
1044
1045 call = prepare_op_call (objfn, nargs);
1046 if (type_uses_auto (fn_result))
1047 decltype_call = prepare_op_call (objfn, nargs);
1048 }
1049 else
1050 {
1051 direct_argvec = make_tree_vector ();
1052 direct_argvec->quick_push (thisarg);
1053 }
1054
1055 /* Copy CALLOP's argument list (as per 'copy_list') as FN_ARGS in order to
1056 declare the static member function "_FUN" below. For each arg append to
1057 DIRECT_ARGVEC (for the non-template case) or populate the pre-allocated
1058 call args (for the template case). If a parameter pack is found, expand
1059 it, flagging it as PACK_EXPANSION_LOCAL_P for the body call. */
1060
1061 tree fn_args = NULL_TREE;
1062 {
1063 int ix = 0;
1064 tree src = DECL_CHAIN (DECL_ARGUMENTS (callop));
1065 tree tgt = NULL;
1066
1067 while (src)
1068 {
1069 tree new_node = copy_node (src);
1070
1071 if (!fn_args)
1072 fn_args = tgt = new_node;
1073 else
1074 {
1075 TREE_CHAIN (tgt) = new_node;
1076 tgt = new_node;
1077 }
1078
1079 mark_exp_read (tgt);
1080
1081 if (generic_lambda_p)
1082 {
1083 /* Avoid capturing variables in this context. */
1084 ++cp_unevaluated_operand;
1085 tree a = forward_parm (tgt);
1086 --cp_unevaluated_operand;
1087
1088 CALL_EXPR_ARG (call, ix) = a;
1089 if (decltype_call)
1090 CALL_EXPR_ARG (decltype_call, ix) = unshare_expr (a);
1091
1092 if (PACK_EXPANSION_P (a))
1093 /* Set this after unsharing so it's not in decltype_call. */
1094 PACK_EXPANSION_LOCAL_P (a) = true;
1095
1096 ++ix;
1097 }
1098 else
1099 vec_safe_push (direct_argvec, tgt);
1100
1101 src = TREE_CHAIN (src);
1102 }
1103 }
1104
1105 if (generic_lambda_p)
1106 {
1107 if (decltype_call)
1108 {
1109 fn_result = finish_decltype_type
1110 (decltype_call, /*id_expression_or_member_access_p=*/false,
1111 tf_warning_or_error);
1112 }
1113 }
1114 else
1115 call = build_call_a (callop,
1116 direct_argvec->length (),
1117 direct_argvec->address ());
1118
1119 CALL_FROM_THUNK_P (call) = 1;
1120 SET_EXPR_LOCATION (call, UNKNOWN_LOCATION);
1121
1122 tree stattype = build_function_type (fn_result, FUNCTION_ARG_CHAIN (callop));
1123 stattype = (cp_build_type_attribute_variant
1124 (stattype, TYPE_ATTRIBUTES (optype)));
1125 if (flag_noexcept_type
1126 && TYPE_NOTHROW_P (TREE_TYPE (callop)))
1127 stattype = build_exception_variant (stattype, noexcept_true_spec);
1128
1129 if (generic_lambda_p)
1130 --processing_template_decl;
1131
1132 /* First build up the conversion op. */
1133
1134 tree rettype = build_pointer_type (stattype);
1135 tree name = make_conv_op_name (rettype);
1136 tree thistype = cp_build_qualified_type (type, TYPE_QUAL_CONST);
1137 tree fntype = build_method_type_directly (thistype, rettype, void_list_node);
1138 tree convfn = build_lang_decl (FUNCTION_DECL, name, fntype);
1139 tree fn = convfn;
1140 DECL_SOURCE_LOCATION (fn) = DECL_SOURCE_LOCATION (callop);
1141 SET_DECL_ALIGN (fn, MINIMUM_METHOD_BOUNDARY);
1142 grokclassfn (type, fn, NO_SPECIAL);
1143 set_linkage_according_to_type (type, fn);
1144 rest_of_decl_compilation (fn, namespace_bindings_p (), at_eof);
1145 DECL_IN_AGGR_P (fn) = 1;
1146 DECL_ARTIFICIAL (fn) = 1;
1147 DECL_NOT_REALLY_EXTERN (fn) = 1;
1148 DECL_DECLARED_INLINE_P (fn) = 1;
1149 DECL_ARGUMENTS (fn) = build_this_parm (fn, fntype, TYPE_QUAL_CONST);
1150
1151 if (nested_def)
1152 DECL_INTERFACE_KNOWN (fn) = 1;
1153
1154 if (generic_lambda_p)
1155 fn = add_inherited_template_parms (fn, DECL_TI_TEMPLATE (callop));
1156
1157 add_method (type, fn, false);
1158
1159 /* Generic thunk code fails for varargs; we'll complain in mark_used if
1160 the conversion op is used. */
1161 if (varargs_function_p (callop))
1162 {
1163 DECL_DELETED_FN (fn) = 1;
1164 return;
1165 }
1166
1167 /* Now build up the thunk to be returned. */
1168
1169 name = get_identifier ("_FUN");
1170 tree statfn = build_lang_decl (FUNCTION_DECL, name, stattype);
1171 fn = statfn;
1172 DECL_SOURCE_LOCATION (fn) = DECL_SOURCE_LOCATION (callop);
1173 grokclassfn (type, fn, NO_SPECIAL);
1174 set_linkage_according_to_type (type, fn);
1175 rest_of_decl_compilation (fn, namespace_bindings_p (), at_eof);
1176 DECL_IN_AGGR_P (fn) = 1;
1177 DECL_ARTIFICIAL (fn) = 1;
1178 DECL_NOT_REALLY_EXTERN (fn) = 1;
1179 DECL_DECLARED_INLINE_P (fn) = 1;
1180 DECL_STATIC_FUNCTION_P (fn) = 1;
1181 DECL_ARGUMENTS (fn) = fn_args;
1182 for (tree arg = fn_args; arg; arg = DECL_CHAIN (arg))
1183 {
1184 /* Avoid duplicate -Wshadow warnings. */
1185 DECL_NAME (arg) = NULL_TREE;
1186 DECL_CONTEXT (arg) = fn;
1187 }
1188 if (nested_def)
1189 DECL_INTERFACE_KNOWN (fn) = 1;
1190
1191 if (generic_lambda_p)
1192 fn = add_inherited_template_parms (fn, DECL_TI_TEMPLATE (callop));
1193
1194 if (flag_sanitize & SANITIZE_NULL)
1195 /* Don't UBsan this function; we're deliberately calling op() with a null
1196 object argument. */
1197 add_no_sanitize_value (fn, SANITIZE_UNDEFINED);
1198
1199 add_method (type, fn, false);
1200
1201 if (nested)
1202 push_function_context ();
1203 else
1204 /* Still increment function_depth so that we don't GC in the
1205 middle of an expression. */
1206 ++function_depth;
1207
1208 /* Generate the body of the thunk. */
1209
1210 start_preparsed_function (statfn, NULL_TREE,
1211 SF_PRE_PARSED | SF_INCLASS_INLINE);
1212 if (DECL_ONE_ONLY (statfn))
1213 {
1214 /* Put the thunk in the same comdat group as the call op. */
1215 cgraph_node::get_create (statfn)->add_to_same_comdat_group
1216 (cgraph_node::get_create (callop));
1217 }
1218 tree body = begin_function_body ();
1219 tree compound_stmt = begin_compound_stmt (0);
1220 if (!generic_lambda_p)
1221 {
1222 set_flags_from_callee (call);
1223 if (MAYBE_CLASS_TYPE_P (TREE_TYPE (call)))
1224 call = build_cplus_new (TREE_TYPE (call), call, tf_warning_or_error);
1225 }
1226 call = convert_from_reference (call);
1227 finish_return_stmt (call);
1228
1229 finish_compound_stmt (compound_stmt);
1230 finish_function_body (body);
1231
1232 fn = finish_function (/*inline_p=*/true);
1233 if (!generic_lambda_p)
1234 expand_or_defer_fn (fn);
1235
1236 /* Generate the body of the conversion op. */
1237
1238 start_preparsed_function (convfn, NULL_TREE,
1239 SF_PRE_PARSED | SF_INCLASS_INLINE);
1240 body = begin_function_body ();
1241 compound_stmt = begin_compound_stmt (0);
1242
1243 /* decl_needed_p needs to see that it's used. */
1244 TREE_USED (statfn) = 1;
1245 finish_return_stmt (decay_conversion (statfn, tf_warning_or_error));
1246
1247 finish_compound_stmt (compound_stmt);
1248 finish_function_body (body);
1249
1250 fn = finish_function (/*inline_p=*/true);
1251 if (!generic_lambda_p)
1252 expand_or_defer_fn (fn);
1253
1254 if (nested)
1255 pop_function_context ();
1256 else
1257 --function_depth;
1258 }
1259
1260 /* True if FN is the static function "_FUN" that gets returned from the lambda
1261 conversion operator. */
1262
1263 bool
1264 lambda_static_thunk_p (tree fn)
1265 {
1266 return (fn && TREE_CODE (fn) == FUNCTION_DECL
1267 && DECL_ARTIFICIAL (fn)
1268 && DECL_STATIC_FUNCTION_P (fn)
1269 && LAMBDA_TYPE_P (CP_DECL_CONTEXT (fn)));
1270 }
1271
1272 /* Returns true iff VAL is a lambda-related declaration which should
1273 be ignored by unqualified lookup. */
1274
1275 bool
1276 is_lambda_ignored_entity (tree val)
1277 {
1278 /* Look past normal capture proxies. */
1279 if (is_normal_capture_proxy (val))
1280 return true;
1281
1282 /* Always ignore lambda fields, their names are only for debugging. */
1283 if (TREE_CODE (val) == FIELD_DECL
1284 && CLASSTYPE_LAMBDA_EXPR (DECL_CONTEXT (val)))
1285 return true;
1286
1287 /* None of the lookups that use qualify_lookup want the op() from the
1288 lambda; they want the one from the enclosing class. */
1289 if (TREE_CODE (val) == FUNCTION_DECL && LAMBDA_FUNCTION_P (val))
1290 return true;
1291
1292 return false;
1293 }
1294
1295 /* Lambdas that appear in variable initializer or default argument scope
1296 get that in their mangling, so we need to record it. We might as well
1297 use the count for function and namespace scopes as well. */
1298 static GTY(()) tree lambda_scope;
1299 static GTY(()) int lambda_count;
1300 struct GTY(()) tree_int
1301 {
1302 tree t;
1303 int i;
1304 };
1305 static GTY(()) vec<tree_int, va_gc> *lambda_scope_stack;
1306
1307 void
1308 start_lambda_scope (tree decl)
1309 {
1310 tree_int ti;
1311 gcc_assert (decl);
1312 /* Once we're inside a function, we ignore variable scope and just push
1313 the function again so that popping works properly. */
1314 if (current_function_decl && TREE_CODE (decl) == VAR_DECL)
1315 decl = current_function_decl;
1316 ti.t = lambda_scope;
1317 ti.i = lambda_count;
1318 vec_safe_push (lambda_scope_stack, ti);
1319 if (lambda_scope != decl)
1320 {
1321 /* Don't reset the count if we're still in the same function. */
1322 lambda_scope = decl;
1323 lambda_count = 0;
1324 }
1325 }
1326
1327 void
1328 record_lambda_scope (tree lambda)
1329 {
1330 LAMBDA_EXPR_EXTRA_SCOPE (lambda) = lambda_scope;
1331 LAMBDA_EXPR_DISCRIMINATOR (lambda) = lambda_count++;
1332 }
1333
1334 void
1335 finish_lambda_scope (void)
1336 {
1337 tree_int *p = &lambda_scope_stack->last ();
1338 if (lambda_scope != p->t)
1339 {
1340 lambda_scope = p->t;
1341 lambda_count = p->i;
1342 }
1343 lambda_scope_stack->pop ();
1344 }
1345
1346 tree
1347 start_lambda_function (tree fco, tree lambda_expr)
1348 {
1349 /* Let the front end know that we are going to be defining this
1350 function. */
1351 start_preparsed_function (fco,
1352 NULL_TREE,
1353 SF_PRE_PARSED | SF_INCLASS_INLINE);
1354
1355 tree body = begin_function_body ();
1356
1357 /* Push the proxies for any explicit captures. */
1358 for (tree cap = LAMBDA_EXPR_CAPTURE_LIST (lambda_expr); cap;
1359 cap = TREE_CHAIN (cap))
1360 build_capture_proxy (TREE_PURPOSE (cap), TREE_VALUE (cap));
1361
1362 return body;
1363 }
1364
1365 void
1366 finish_lambda_function (tree body)
1367 {
1368 finish_function_body (body);
1369
1370 /* Finish the function and generate code for it if necessary. */
1371 tree fn = finish_function (/*inline_p=*/true);
1372
1373 /* Only expand if the call op is not a template. */
1374 if (!DECL_TEMPLATE_INFO (fn))
1375 expand_or_defer_fn (fn);
1376 }
1377
1378 #include "gt-cp-lambda.h"