]>
Commit | Line | Data |
---|---|---|
a960e808 AB |
1 | /* Perform the semantic phase of lambda parsing, i.e., the process of |
2 | building tree structure, checking semantic consistency, and | |
3 | building RTL. These routines are used both during actual parsing | |
4 | and during the instantiation of template functions. | |
5 | ||
a5544970 | 6 | Copyright (C) 1998-2019 Free Software Foundation, Inc. |
a960e808 AB |
7 | |
8 | This file is part of GCC. | |
9 | ||
10 | GCC is free software; you can redistribute it and/or modify it | |
11 | under the terms of the GNU General Public License as published by | |
12 | the Free Software Foundation; either version 3, or (at your option) | |
13 | any later version. | |
14 | ||
15 | GCC is distributed in the hope that it will be useful, but | |
16 | WITHOUT ANY WARRANTY; without even the implied warranty of | |
17 | MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU | |
18 | General Public License for more details. | |
19 | ||
20 | You should have received a copy of the GNU General Public License | |
21 | along with GCC; see the file COPYING3. If not see | |
22 | <http://www.gnu.org/licenses/>. */ | |
23 | ||
24 | #include "config.h" | |
25 | #include "system.h" | |
26 | #include "coretypes.h" | |
2adfab87 AM |
27 | #include "cp-tree.h" |
28 | #include "stringpool.h" | |
a960e808 AB |
29 | #include "cgraph.h" |
30 | #include "tree-iterator.h" | |
a960e808 | 31 | #include "toplev.h" |
bd28a34f | 32 | #include "gimplify.h" |
a960e808 AB |
33 | |
34 | /* Constructor for a lambda expression. */ | |
35 | ||
36 | tree | |
37 | build_lambda_expr (void) | |
38 | { | |
39 | tree lambda = make_node (LAMBDA_EXPR); | |
40 | LAMBDA_EXPR_DEFAULT_CAPTURE_MODE (lambda) = CPLD_NONE; | |
41 | LAMBDA_EXPR_CAPTURE_LIST (lambda) = NULL_TREE; | |
42 | LAMBDA_EXPR_THIS_CAPTURE (lambda) = NULL_TREE; | |
43 | LAMBDA_EXPR_PENDING_PROXIES (lambda) = NULL; | |
a960e808 AB |
44 | LAMBDA_EXPR_MUTABLE_P (lambda) = false; |
45 | return lambda; | |
46 | } | |
47 | ||
48 | /* Create the closure object for a LAMBDA_EXPR. */ | |
49 | ||
50 | tree | |
51 | build_lambda_object (tree lambda_expr) | |
52 | { | |
53 | /* Build aggregate constructor call. | |
54 | - cp_parser_braced_list | |
55 | - cp_parser_functional_cast */ | |
56 | vec<constructor_elt, va_gc> *elts = NULL; | |
57 | tree node, expr, type; | |
58 | location_t saved_loc; | |
59 | ||
11399477 | 60 | if (processing_template_decl || lambda_expr == error_mark_node) |
a960e808 AB |
61 | return lambda_expr; |
62 | ||
63 | /* Make sure any error messages refer to the lambda-introducer. */ | |
64 | saved_loc = input_location; | |
65 | input_location = LAMBDA_EXPR_LOCATION (lambda_expr); | |
66 | ||
67 | for (node = LAMBDA_EXPR_CAPTURE_LIST (lambda_expr); | |
68 | node; | |
69 | node = TREE_CHAIN (node)) | |
70 | { | |
71 | tree field = TREE_PURPOSE (node); | |
72 | tree val = TREE_VALUE (node); | |
73 | ||
74 | if (field == error_mark_node) | |
75 | { | |
76 | expr = error_mark_node; | |
77 | goto out; | |
78 | } | |
79 | ||
47265942 JM |
80 | if (TREE_CODE (val) == TREE_LIST) |
81 | val = build_x_compound_expr_from_list (val, ELK_INIT, | |
82 | tf_warning_or_error); | |
83 | ||
a960e808 AB |
84 | if (DECL_P (val)) |
85 | mark_used (val); | |
86 | ||
87 | /* Mere mortals can't copy arrays with aggregate initialization, so | |
88 | do some magic to make it work here. */ | |
89 | if (TREE_CODE (TREE_TYPE (field)) == ARRAY_TYPE) | |
90 | val = build_array_copy (val); | |
91 | else if (DECL_NORMAL_CAPTURE_P (field) | |
92 | && !DECL_VLA_CAPTURE_P (field) | |
9f613f06 | 93 | && !TYPE_REF_P (TREE_TYPE (field))) |
a960e808 AB |
94 | { |
95 | /* "the entities that are captured by copy are used to | |
96 | direct-initialize each corresponding non-static data | |
97 | member of the resulting closure object." | |
98 | ||
99 | There's normally no way to express direct-initialization | |
100 | from an element of a CONSTRUCTOR, so we build up a special | |
101 | TARGET_EXPR to bypass the usual copy-initialization. */ | |
102 | val = force_rvalue (val, tf_warning_or_error); | |
103 | if (TREE_CODE (val) == TARGET_EXPR) | |
104 | TARGET_EXPR_DIRECT_INIT_P (val) = true; | |
105 | } | |
106 | ||
107 | CONSTRUCTOR_APPEND_ELT (elts, DECL_NAME (field), val); | |
108 | } | |
109 | ||
110 | expr = build_constructor (init_list_type_node, elts); | |
111 | CONSTRUCTOR_IS_DIRECT_INIT (expr) = 1; | |
112 | ||
113 | /* N2927: "[The closure] class type is not an aggregate." | |
114 | But we briefly treat it as an aggregate to make this simpler. */ | |
115 | type = LAMBDA_EXPR_CLOSURE (lambda_expr); | |
116 | CLASSTYPE_NON_AGGREGATE (type) = 0; | |
117 | expr = finish_compound_literal (type, expr, tf_warning_or_error); | |
118 | CLASSTYPE_NON_AGGREGATE (type) = 1; | |
119 | ||
120 | out: | |
121 | input_location = saved_loc; | |
122 | return expr; | |
123 | } | |
124 | ||
125 | /* Return an initialized RECORD_TYPE for LAMBDA. | |
126 | LAMBDA must have its explicit captures already. */ | |
127 | ||
128 | tree | |
129 | begin_lambda_type (tree lambda) | |
130 | { | |
131 | tree type; | |
132 | ||
133 | { | |
134 | /* Unique name. This is just like an unnamed class, but we cannot use | |
6a7b9203 | 135 | make_anon_name because of certain checks against TYPE_UNNAMED_P. */ |
a960e808 AB |
136 | tree name; |
137 | name = make_lambda_name (); | |
138 | ||
139 | /* Create the new RECORD_TYPE for this lambda. */ | |
140 | type = xref_tag (/*tag_code=*/record_type, | |
141 | name, | |
142 | /*scope=*/ts_lambda, | |
143 | /*template_header_p=*/false); | |
c80217c9 PC |
144 | if (type == error_mark_node) |
145 | return error_mark_node; | |
a960e808 AB |
146 | } |
147 | ||
148 | /* Designate it as a struct so that we can use aggregate initialization. */ | |
149 | CLASSTYPE_DECLARED_CLASS (type) = false; | |
150 | ||
151 | /* Cross-reference the expression and the type. */ | |
152 | LAMBDA_EXPR_CLOSURE (lambda) = type; | |
153 | CLASSTYPE_LAMBDA_EXPR (type) = lambda; | |
154 | ||
98e5a19a JM |
155 | /* In C++17, assume the closure is literal; we'll clear the flag later if |
156 | necessary. */ | |
7b936140 | 157 | if (cxx_dialect >= cxx17) |
98e5a19a JM |
158 | CLASSTYPE_LITERAL_P (type) = true; |
159 | ||
a960e808 AB |
160 | /* Clear base types. */ |
161 | xref_basetypes (type, /*bases=*/NULL_TREE); | |
162 | ||
163 | /* Start the class. */ | |
164 | type = begin_class_definition (type); | |
a960e808 AB |
165 | |
166 | return type; | |
167 | } | |
168 | ||
169 | /* Returns the type to use for the return type of the operator() of a | |
170 | closure class. */ | |
171 | ||
172 | tree | |
173 | lambda_return_type (tree expr) | |
174 | { | |
175 | if (expr == NULL_TREE) | |
176 | return void_type_node; | |
177 | if (type_unknown_p (expr) | |
178 | || BRACE_ENCLOSED_INITIALIZER_P (expr)) | |
179 | { | |
180 | cxx_incomplete_type_error (expr, TREE_TYPE (expr)); | |
7624ca36 | 181 | return error_mark_node; |
a960e808 AB |
182 | } |
183 | gcc_checking_assert (!type_dependent_expression_p (expr)); | |
184 | return cv_unqualified (type_decays_to (unlowered_expr_type (expr))); | |
185 | } | |
186 | ||
187 | /* Given a LAMBDA_EXPR or closure type LAMBDA, return the op() of the | |
188 | closure type. */ | |
189 | ||
190 | tree | |
191 | lambda_function (tree lambda) | |
192 | { | |
193 | tree type; | |
194 | if (TREE_CODE (lambda) == LAMBDA_EXPR) | |
195 | type = LAMBDA_EXPR_CLOSURE (lambda); | |
196 | else | |
197 | type = lambda; | |
198 | gcc_assert (LAMBDA_TYPE_P (type)); | |
199 | /* Don't let debug_tree cause instantiation. */ | |
200 | if (CLASSTYPE_TEMPLATE_INSTANTIATION (type) | |
201 | && !COMPLETE_OR_OPEN_TYPE_P (type)) | |
202 | return NULL_TREE; | |
137073d3 | 203 | lambda = lookup_member (type, call_op_identifier, |
a960e808 AB |
204 | /*protect=*/0, /*want_type=*/false, |
205 | tf_warning_or_error); | |
206 | if (lambda) | |
6a8b219b | 207 | lambda = STRIP_TEMPLATE (get_first_fn (lambda)); |
a960e808 AB |
208 | return lambda; |
209 | } | |
210 | ||
211 | /* Returns the type to use for the FIELD_DECL corresponding to the | |
4bf07f3f NS |
212 | capture of EXPR. EXPLICIT_INIT_P indicates whether this is a |
213 | C++14 init capture, and BY_REFERENCE_P indicates whether we're | |
214 | capturing by reference. */ | |
a960e808 AB |
215 | |
216 | tree | |
4bf07f3f NS |
217 | lambda_capture_field_type (tree expr, bool explicit_init_p, |
218 | bool by_reference_p) | |
a960e808 AB |
219 | { |
220 | tree type; | |
130ee9a9 | 221 | bool is_this = is_this_parameter (tree_strip_nop_conversions (expr)); |
4bf07f3f | 222 | |
130ee9a9 | 223 | if (!is_this && type_dependent_expression_p (expr)) |
a960e808 AB |
224 | { |
225 | type = cxx_make_type (DECLTYPE_TYPE); | |
226 | DECLTYPE_TYPE_EXPR (type) = expr; | |
227 | DECLTYPE_FOR_LAMBDA_CAPTURE (type) = true; | |
228 | DECLTYPE_FOR_INIT_CAPTURE (type) = explicit_init_p; | |
4bf07f3f | 229 | DECLTYPE_FOR_REF_CAPTURE (type) = by_reference_p; |
a960e808 AB |
230 | SET_TYPE_STRUCTURAL_EQUALITY (type); |
231 | } | |
130ee9a9 JM |
232 | else if (!is_this && explicit_init_p) |
233 | { | |
4bf07f3f NS |
234 | tree auto_node = make_auto (); |
235 | ||
236 | type = auto_node; | |
237 | if (by_reference_p) | |
238 | /* Add the reference now, so deduction doesn't lose | |
239 | outermost CV qualifiers of EXPR. */ | |
240 | type = build_reference_type (type); | |
241 | type = do_auto_deduction (type, expr, auto_node); | |
130ee9a9 JM |
242 | } |
243 | else | |
4bf07f3f NS |
244 | { |
245 | type = non_reference (unlowered_expr_type (expr)); | |
246 | ||
03ab2eb7 NS |
247 | if (!is_this |
248 | && (by_reference_p || TREE_CODE (type) == FUNCTION_TYPE)) | |
4bf07f3f NS |
249 | type = build_reference_type (type); |
250 | } | |
251 | ||
a960e808 AB |
252 | return type; |
253 | } | |
254 | ||
255 | /* Returns true iff DECL is a lambda capture proxy variable created by | |
256 | build_capture_proxy. */ | |
257 | ||
258 | bool | |
259 | is_capture_proxy (tree decl) | |
260 | { | |
261 | return (VAR_P (decl) | |
262 | && DECL_HAS_VALUE_EXPR_P (decl) | |
263 | && !DECL_ANON_UNION_VAR_P (decl) | |
6f58bc58 | 264 | && !DECL_DECOMPOSITION_P (decl) |
ddd0d18c | 265 | && !DECL_FNAME_P (decl) |
a960e808 AB |
266 | && LAMBDA_FUNCTION_P (DECL_CONTEXT (decl))); |
267 | } | |
268 | ||
269 | /* Returns true iff DECL is a capture proxy for a normal capture | |
270 | (i.e. without explicit initializer). */ | |
271 | ||
272 | bool | |
273 | is_normal_capture_proxy (tree decl) | |
274 | { | |
275 | if (!is_capture_proxy (decl)) | |
276 | /* It's not a capture proxy. */ | |
277 | return false; | |
278 | ||
e429bc35 JM |
279 | if (variably_modified_type_p (TREE_TYPE (decl), NULL_TREE)) |
280 | /* VLA capture. */ | |
281 | return true; | |
282 | ||
a960e808 AB |
283 | /* It is a capture proxy, is it a normal capture? */ |
284 | tree val = DECL_VALUE_EXPR (decl); | |
285 | if (val == error_mark_node) | |
286 | return true; | |
287 | ||
697a7a57 JM |
288 | if (TREE_CODE (val) == ADDR_EXPR) |
289 | val = TREE_OPERAND (val, 0); | |
a960e808 AB |
290 | gcc_assert (TREE_CODE (val) == COMPONENT_REF); |
291 | val = TREE_OPERAND (val, 1); | |
292 | return DECL_NORMAL_CAPTURE_P (val); | |
293 | } | |
294 | ||
1577f10a JM |
295 | /* Returns true iff DECL is a capture proxy for a normal capture |
296 | of a constant variable. */ | |
297 | ||
298 | bool | |
299 | is_constant_capture_proxy (tree decl) | |
300 | { | |
301 | if (is_normal_capture_proxy (decl)) | |
302 | return decl_constant_var_p (DECL_CAPTURED_VARIABLE (decl)); | |
303 | return false; | |
304 | } | |
305 | ||
a960e808 AB |
306 | /* VAR is a capture proxy created by build_capture_proxy; add it to the |
307 | current function, which is the operator() for the appropriate lambda. */ | |
308 | ||
309 | void | |
310 | insert_capture_proxy (tree var) | |
311 | { | |
68ad1bf7 | 312 | if (is_normal_capture_proxy (var)) |
84dd815f JM |
313 | { |
314 | tree cap = DECL_CAPTURED_VARIABLE (var); | |
315 | if (CHECKING_P) | |
316 | { | |
317 | gcc_assert (!is_normal_capture_proxy (cap)); | |
318 | tree old = retrieve_local_specialization (cap); | |
319 | if (old) | |
320 | gcc_assert (DECL_CONTEXT (old) != DECL_CONTEXT (var)); | |
321 | } | |
322 | register_local_specialization (var, cap); | |
323 | } | |
5c263e84 | 324 | |
a960e808 AB |
325 | /* Put the capture proxy in the extra body block so that it won't clash |
326 | with a later local variable. */ | |
d16d5eac | 327 | pushdecl_outermost_localscope (var); |
a960e808 AB |
328 | |
329 | /* And put a DECL_EXPR in the STATEMENT_LIST for the same block. */ | |
330 | var = build_stmt (DECL_SOURCE_LOCATION (var), DECL_EXPR, var); | |
d16d5eac | 331 | tree stmt_list = (*stmt_list_stack)[1]; |
a960e808 AB |
332 | gcc_assert (stmt_list); |
333 | append_to_statement_list_force (var, &stmt_list); | |
334 | } | |
335 | ||
336 | /* We've just finished processing a lambda; if the containing scope is also | |
337 | a lambda, insert any capture proxies that were created while processing | |
338 | the nested lambda. */ | |
339 | ||
340 | void | |
341 | insert_pending_capture_proxies (void) | |
342 | { | |
343 | tree lam; | |
344 | vec<tree, va_gc> *proxies; | |
345 | unsigned i; | |
346 | ||
347 | if (!current_function_decl || !LAMBDA_FUNCTION_P (current_function_decl)) | |
348 | return; | |
349 | ||
350 | lam = CLASSTYPE_LAMBDA_EXPR (DECL_CONTEXT (current_function_decl)); | |
351 | proxies = LAMBDA_EXPR_PENDING_PROXIES (lam); | |
352 | for (i = 0; i < vec_safe_length (proxies); ++i) | |
353 | { | |
354 | tree var = (*proxies)[i]; | |
355 | insert_capture_proxy (var); | |
356 | } | |
357 | release_tree_vector (LAMBDA_EXPR_PENDING_PROXIES (lam)); | |
358 | LAMBDA_EXPR_PENDING_PROXIES (lam) = NULL; | |
359 | } | |
360 | ||
361 | /* Given REF, a COMPONENT_REF designating a field in the lambda closure, | |
362 | return the type we want the proxy to have: the type of the field itself, | |
363 | with added const-qualification if the lambda isn't mutable and the | |
364 | capture is by value. */ | |
365 | ||
366 | tree | |
367 | lambda_proxy_type (tree ref) | |
368 | { | |
369 | tree type; | |
2993d08a JM |
370 | if (ref == error_mark_node) |
371 | return error_mark_node; | |
a960e808 AB |
372 | if (REFERENCE_REF_P (ref)) |
373 | ref = TREE_OPERAND (ref, 0); | |
2993d08a | 374 | gcc_assert (TREE_CODE (ref) == COMPONENT_REF); |
a960e808 | 375 | type = TREE_TYPE (ref); |
2993d08a JM |
376 | if (!type || WILDCARD_TYPE_P (non_reference (type))) |
377 | { | |
378 | type = cxx_make_type (DECLTYPE_TYPE); | |
379 | DECLTYPE_TYPE_EXPR (type) = ref; | |
380 | DECLTYPE_FOR_LAMBDA_PROXY (type) = true; | |
381 | SET_TYPE_STRUCTURAL_EQUALITY (type); | |
382 | } | |
383 | if (DECL_PACK_P (TREE_OPERAND (ref, 1))) | |
384 | type = make_pack_expansion (type); | |
a960e808 AB |
385 | return type; |
386 | } | |
387 | ||
388 | /* MEMBER is a capture field in a lambda closure class. Now that we're | |
389 | inside the operator(), build a placeholder var for future lookups and | |
390 | debugging. */ | |
391 | ||
856c79ea | 392 | static tree |
5c263e84 | 393 | build_capture_proxy (tree member, tree init) |
a960e808 AB |
394 | { |
395 | tree var, object, fn, closure, name, lam, type; | |
396 | ||
2993d08a JM |
397 | if (PACK_EXPANSION_P (member)) |
398 | member = PACK_EXPANSION_PATTERN (member); | |
399 | ||
a960e808 AB |
400 | closure = DECL_CONTEXT (member); |
401 | fn = lambda_function (closure); | |
402 | lam = CLASSTYPE_LAMBDA_EXPR (closure); | |
403 | ||
404 | /* The proxy variable forwards to the capture field. */ | |
405 | object = build_fold_indirect_ref (DECL_ARGUMENTS (fn)); | |
406 | object = finish_non_static_data_member (member, object, NULL_TREE); | |
407 | if (REFERENCE_REF_P (object)) | |
408 | object = TREE_OPERAND (object, 0); | |
409 | ||
410 | /* Remove the __ inserted by add_capture. */ | |
174ebf65 | 411 | name = get_identifier (IDENTIFIER_POINTER (DECL_NAME (member)) + 2); |
a960e808 AB |
412 | |
413 | type = lambda_proxy_type (object); | |
414 | ||
71a93b08 | 415 | if (name == this_identifier && !INDIRECT_TYPE_P (type)) |
8ca33347 JJ |
416 | { |
417 | type = build_pointer_type (type); | |
418 | type = cp_build_qualified_type (type, TYPE_QUAL_CONST); | |
419 | object = build_fold_addr_expr_with_type (object, type); | |
420 | } | |
421 | ||
a960e808 AB |
422 | if (DECL_VLA_CAPTURE_P (member)) |
423 | { | |
424 | /* Rebuild the VLA type from the pointer and maxindex. */ | |
425 | tree field = next_initializable_field (TYPE_FIELDS (type)); | |
426 | tree ptr = build_simple_component_ref (object, field); | |
427 | field = next_initializable_field (DECL_CHAIN (field)); | |
428 | tree max = build_simple_component_ref (object, field); | |
8bebb953 JM |
429 | type = build_cplus_array_type (TREE_TYPE (TREE_TYPE (ptr)), |
430 | build_index_type (max)); | |
a960e808 AB |
431 | type = build_reference_type (type); |
432 | REFERENCE_VLA_OK (type) = true; | |
433 | object = convert (type, ptr); | |
434 | } | |
435 | ||
11b782f7 JM |
436 | complete_type (type); |
437 | ||
a960e808 AB |
438 | var = build_decl (input_location, VAR_DECL, name, type); |
439 | SET_DECL_VALUE_EXPR (var, object); | |
440 | DECL_HAS_VALUE_EXPR_P (var) = 1; | |
441 | DECL_ARTIFICIAL (var) = 1; | |
442 | TREE_USED (var) = 1; | |
443 | DECL_CONTEXT (var) = fn; | |
444 | ||
5c263e84 JM |
445 | if (DECL_NORMAL_CAPTURE_P (member)) |
446 | { | |
447 | if (DECL_VLA_CAPTURE_P (member)) | |
448 | { | |
449 | init = CONSTRUCTOR_ELT (init, 0)->value; | |
450 | init = TREE_OPERAND (init, 0); // Strip ADDR_EXPR. | |
451 | init = TREE_OPERAND (init, 0); // Strip ARRAY_REF. | |
452 | } | |
453 | else | |
454 | { | |
455 | if (PACK_EXPANSION_P (init)) | |
456 | init = PACK_EXPANSION_PATTERN (init); | |
5c263e84 | 457 | } |
fe23b12a | 458 | |
329a89d3 JM |
459 | if (INDIRECT_REF_P (init)) |
460 | init = TREE_OPERAND (init, 0); | |
461 | STRIP_NOPS (init); | |
462 | ||
dc58fa9f JM |
463 | gcc_assert (VAR_P (init) || TREE_CODE (init) == PARM_DECL); |
464 | while (is_normal_capture_proxy (init)) | |
465 | init = DECL_CAPTURED_VARIABLE (init); | |
466 | retrofit_lang_decl (var); | |
467 | DECL_CAPTURED_VARIABLE (var) = init; | |
5c263e84 JM |
468 | } |
469 | ||
a960e808 AB |
470 | if (name == this_identifier) |
471 | { | |
472 | gcc_assert (LAMBDA_EXPR_THIS_CAPTURE (lam) == member); | |
473 | LAMBDA_EXPR_THIS_CAPTURE (lam) = var; | |
474 | } | |
475 | ||
476 | if (fn == current_function_decl) | |
477 | insert_capture_proxy (var); | |
478 | else | |
479 | vec_safe_push (LAMBDA_EXPR_PENDING_PROXIES (lam), var); | |
480 | ||
481 | return var; | |
482 | } | |
483 | ||
ff502317 BE |
484 | static GTY(()) tree ptr_id; |
485 | static GTY(()) tree max_id; | |
486 | ||
a960e808 AB |
487 | /* Return a struct containing a pointer and a length for lambda capture of |
488 | an array of runtime length. */ | |
489 | ||
490 | static tree | |
491 | vla_capture_type (tree array_type) | |
492 | { | |
a960e808 AB |
493 | tree type = xref_tag (record_type, make_anon_name (), ts_current, false); |
494 | xref_basetypes (type, NULL_TREE); | |
495 | type = begin_class_definition (type); | |
496 | if (!ptr_id) | |
497 | { | |
498 | ptr_id = get_identifier ("ptr"); | |
499 | max_id = get_identifier ("max"); | |
500 | } | |
501 | tree ptrtype = build_pointer_type (TREE_TYPE (array_type)); | |
502 | tree field = build_decl (input_location, FIELD_DECL, ptr_id, ptrtype); | |
503 | finish_member_declaration (field); | |
504 | field = build_decl (input_location, FIELD_DECL, max_id, sizetype); | |
505 | finish_member_declaration (field); | |
506 | return finish_struct (type, NULL_TREE); | |
507 | } | |
508 | ||
509 | /* From an ID and INITIALIZER, create a capture (by reference if | |
510 | BY_REFERENCE_P is true), add it to the capture-list for LAMBDA, | |
8ca33347 JJ |
511 | and return it. If ID is `this', BY_REFERENCE_P says whether |
512 | `*this' is captured by reference. */ | |
a960e808 AB |
513 | |
514 | tree | |
2993d08a | 515 | add_capture (tree lambda, tree id, tree orig_init, bool by_reference_p, |
a960e808 AB |
516 | bool explicit_init_p) |
517 | { | |
518 | char *buf; | |
519 | tree type, member, name; | |
520 | bool vla = false; | |
2993d08a JM |
521 | bool variadic = false; |
522 | tree initializer = orig_init; | |
523 | ||
524 | if (PACK_EXPANSION_P (initializer)) | |
525 | { | |
526 | initializer = PACK_EXPANSION_PATTERN (initializer); | |
527 | variadic = true; | |
528 | } | |
a960e808 | 529 | |
47265942 JM |
530 | if (TREE_CODE (initializer) == TREE_LIST |
531 | /* A pack expansion might end up with multiple elements. */ | |
532 | && !PACK_EXPANSION_P (TREE_VALUE (initializer))) | |
a960e808 AB |
533 | initializer = build_x_compound_expr_from_list (initializer, ELK_INIT, |
534 | tf_warning_or_error); | |
43b781fa | 535 | type = TREE_TYPE (initializer); |
0c018b6f PC |
536 | if (type == error_mark_node) |
537 | return error_mark_node; | |
538 | ||
a960e808 AB |
539 | if (array_of_runtime_bound_p (type)) |
540 | { | |
541 | vla = true; | |
542 | if (!by_reference_p) | |
543 | error ("array of runtime bound cannot be captured by copy, " | |
544 | "only by reference"); | |
545 | ||
546 | /* For a VLA, we capture the address of the first element and the | |
547 | maximum index, and then reconstruct the VLA for the proxy. */ | |
548 | tree elt = cp_build_array_ref (input_location, initializer, | |
549 | integer_zero_node, tf_warning_or_error); | |
550 | initializer = build_constructor_va (init_list_type_node, 2, | |
551 | NULL_TREE, build_address (elt), | |
552 | NULL_TREE, array_type_nelts (type)); | |
553 | type = vla_capture_type (type); | |
554 | } | |
18e780d4 JM |
555 | else if (!dependent_type_p (type) |
556 | && variably_modified_type_p (type, NULL_TREE)) | |
a960e808 | 557 | { |
13c60208 | 558 | sorry ("capture of variably-modified type %qT that is not an N3639 array " |
a960e808 AB |
559 | "of runtime bound", type); |
560 | if (TREE_CODE (type) == ARRAY_TYPE | |
561 | && variably_modified_type_p (TREE_TYPE (type), NULL_TREE)) | |
562 | inform (input_location, "because the array element type %qT has " | |
563 | "variable size", TREE_TYPE (type)); | |
13c60208 | 564 | return error_mark_node; |
a960e808 | 565 | } |
43b781fa | 566 | else |
a960e808 | 567 | { |
4bf07f3f NS |
568 | type = lambda_capture_field_type (initializer, explicit_init_p, |
569 | by_reference_p); | |
4473482d PC |
570 | if (type == error_mark_node) |
571 | return error_mark_node; | |
4bf07f3f | 572 | |
8ca33347 JJ |
573 | if (id == this_identifier && !by_reference_p) |
574 | { | |
71a93b08 | 575 | gcc_assert (INDIRECT_TYPE_P (type)); |
8ca33347 | 576 | type = TREE_TYPE (type); |
04757a2a | 577 | initializer = cp_build_fold_indirect_ref (initializer); |
8ca33347 | 578 | } |
4bf07f3f NS |
579 | |
580 | if (dependent_type_p (type)) | |
581 | ; | |
582 | else if (id != this_identifier && by_reference_p) | |
43b781fa | 583 | { |
4bf07f3f | 584 | if (!lvalue_p (initializer)) |
eb086562 PC |
585 | { |
586 | error ("cannot capture %qE by reference", initializer); | |
587 | return error_mark_node; | |
588 | } | |
43b781fa JM |
589 | } |
590 | else | |
0c018b6f PC |
591 | { |
592 | /* Capture by copy requires a complete type. */ | |
593 | type = complete_type (type); | |
4bf07f3f | 594 | if (!COMPLETE_TYPE_P (type)) |
0c018b6f PC |
595 | { |
596 | error ("capture by copy of incomplete type %qT", type); | |
597 | cxx_incomplete_type_inform (type); | |
598 | return error_mark_node; | |
599 | } | |
600 | } | |
a960e808 | 601 | } |
a960e808 AB |
602 | |
603 | /* Add __ to the beginning of the field name so that user code | |
604 | won't find the field with name lookup. We can't just leave the name | |
605 | unset because template instantiation uses the name to find | |
606 | instantiated fields. */ | |
174ebf65 VV |
607 | buf = (char *) alloca (IDENTIFIER_LENGTH (id) + 3); |
608 | buf[1] = buf[0] = '_'; | |
609 | memcpy (buf + 2, IDENTIFIER_POINTER (id), | |
610 | IDENTIFIER_LENGTH (id) + 1); | |
611 | name = get_identifier (buf); | |
a960e808 AB |
612 | |
613 | /* If TREE_TYPE isn't set, we're still in the introducer, so check | |
614 | for duplicates. */ | |
615 | if (!LAMBDA_EXPR_CLOSURE (lambda)) | |
616 | { | |
617 | if (IDENTIFIER_MARKED (name)) | |
618 | { | |
619 | pedwarn (input_location, 0, | |
620 | "already captured %qD in lambda expression", id); | |
621 | return NULL_TREE; | |
622 | } | |
623 | IDENTIFIER_MARKED (name) = true; | |
624 | } | |
625 | ||
2993d08a JM |
626 | if (variadic) |
627 | type = make_pack_expansion (type); | |
628 | ||
a960e808 | 629 | /* Make member variable. */ |
018795f4 | 630 | member = build_decl (input_location, FIELD_DECL, name, type); |
a960e808 AB |
631 | DECL_VLA_CAPTURE_P (member) = vla; |
632 | ||
633 | if (!explicit_init_p) | |
634 | /* Normal captures are invisible to name lookup but uses are replaced | |
635 | with references to the capture field; we implement this by only | |
636 | really making them invisible in unevaluated context; see | |
637 | qualify_lookup. For now, let's make explicitly initialized captures | |
638 | always visible. */ | |
639 | DECL_NORMAL_CAPTURE_P (member) = true; | |
640 | ||
641 | if (id == this_identifier) | |
642 | LAMBDA_EXPR_THIS_CAPTURE (lambda) = member; | |
643 | ||
644 | /* Add it to the appropriate closure class if we've started it. */ | |
645 | if (current_class_type | |
646 | && current_class_type == LAMBDA_EXPR_CLOSURE (lambda)) | |
88b811bd JM |
647 | { |
648 | if (COMPLETE_TYPE_P (current_class_type)) | |
697a7a57 JM |
649 | internal_error ("trying to capture %qD in instantiation of " |
650 | "generic lambda", id); | |
88b811bd JM |
651 | finish_member_declaration (member); |
652 | } | |
a960e808 | 653 | |
2993d08a JM |
654 | tree listmem = member; |
655 | if (variadic) | |
656 | { | |
657 | listmem = make_pack_expansion (member); | |
658 | initializer = orig_init; | |
659 | } | |
a960e808 | 660 | LAMBDA_EXPR_CAPTURE_LIST (lambda) |
2993d08a | 661 | = tree_cons (listmem, initializer, LAMBDA_EXPR_CAPTURE_LIST (lambda)); |
a960e808 AB |
662 | |
663 | if (LAMBDA_EXPR_CLOSURE (lambda)) | |
5c263e84 | 664 | return build_capture_proxy (member, initializer); |
a960e808 AB |
665 | /* For explicit captures we haven't started the function yet, so we wait |
666 | and build the proxy from cp_parser_lambda_body. */ | |
1577f10a | 667 | LAMBDA_CAPTURE_EXPLICIT_P (LAMBDA_EXPR_CAPTURE_LIST (lambda)) = true; |
a960e808 AB |
668 | return NULL_TREE; |
669 | } | |
670 | ||
671 | /* Register all the capture members on the list CAPTURES, which is the | |
672 | LAMBDA_EXPR_CAPTURE_LIST for the lambda after the introducer. */ | |
673 | ||
674 | void | |
675 | register_capture_members (tree captures) | |
676 | { | |
677 | if (captures == NULL_TREE) | |
678 | return; | |
679 | ||
680 | register_capture_members (TREE_CHAIN (captures)); | |
2993d08a JM |
681 | |
682 | tree field = TREE_PURPOSE (captures); | |
683 | if (PACK_EXPANSION_P (field)) | |
684 | field = PACK_EXPANSION_PATTERN (field); | |
685 | ||
a960e808 | 686 | /* We set this in add_capture to avoid duplicates. */ |
2993d08a JM |
687 | IDENTIFIER_MARKED (DECL_NAME (field)) = false; |
688 | finish_member_declaration (field); | |
a960e808 AB |
689 | } |
690 | ||
691 | /* Similar to add_capture, except this works on a stack of nested lambdas. | |
692 | BY_REFERENCE_P in this case is derived from the default capture mode. | |
693 | Returns the capture for the lambda at the bottom of the stack. */ | |
694 | ||
695 | tree | |
696 | add_default_capture (tree lambda_stack, tree id, tree initializer) | |
697 | { | |
698 | bool this_capture_p = (id == this_identifier); | |
a960e808 | 699 | tree var = NULL_TREE; |
a960e808 AB |
700 | tree saved_class_type = current_class_type; |
701 | ||
e6a1e5fe | 702 | for (tree node = lambda_stack; |
a960e808 AB |
703 | node; |
704 | node = TREE_CHAIN (node)) | |
705 | { | |
706 | tree lambda = TREE_VALUE (node); | |
707 | ||
708 | current_class_type = LAMBDA_EXPR_CLOSURE (lambda); | |
2993d08a JM |
709 | if (DECL_PACK_P (initializer)) |
710 | initializer = make_pack_expansion (initializer); | |
a960e808 AB |
711 | var = add_capture (lambda, |
712 | id, | |
713 | initializer, | |
714 | /*by_reference_p=*/ | |
8ca33347 JJ |
715 | (this_capture_p |
716 | || (LAMBDA_EXPR_DEFAULT_CAPTURE_MODE (lambda) | |
a960e808 AB |
717 | == CPLD_REFERENCE)), |
718 | /*explicit_init_p=*/false); | |
719 | initializer = convert_from_reference (var); | |
e6a1e5fe MP |
720 | |
721 | /* Warn about deprecated implicit capture of this via [=]. */ | |
722 | if (cxx_dialect >= cxx2a | |
723 | && this_capture_p | |
724 | && LAMBDA_EXPR_DEFAULT_CAPTURE_MODE (lambda) == CPLD_COPY | |
725 | && !in_system_header_at (LAMBDA_EXPR_LOCATION (lambda))) | |
726 | { | |
727 | if (warning_at (LAMBDA_EXPR_LOCATION (lambda), OPT_Wdeprecated, | |
728 | "implicit capture of %qE via %<[=]%> is deprecated " | |
729 | "in C++20", this_identifier)) | |
730 | inform (LAMBDA_EXPR_LOCATION (lambda), "add explicit %<this%> or " | |
731 | "%<*this%> capture"); | |
732 | } | |
a960e808 AB |
733 | } |
734 | ||
735 | current_class_type = saved_class_type; | |
736 | ||
737 | return var; | |
738 | } | |
739 | ||
0b360a07 MV |
740 | /* Return the capture pertaining to a use of 'this' in LAMBDA, in the |
741 | form of an INDIRECT_REF, possibly adding it through default | |
5ce3039e | 742 | capturing, if ADD_CAPTURE_P is true. */ |
a960e808 AB |
743 | |
744 | tree | |
0b360a07 | 745 | lambda_expr_this_capture (tree lambda, bool add_capture_p) |
a960e808 AB |
746 | { |
747 | tree result; | |
748 | ||
749 | tree this_capture = LAMBDA_EXPR_THIS_CAPTURE (lambda); | |
750 | ||
5ce3039e | 751 | /* In unevaluated context this isn't an odr-use, so don't capture. */ |
a960e808 | 752 | if (cp_unevaluated_operand) |
5ce3039e | 753 | add_capture_p = false; |
a960e808 AB |
754 | |
755 | /* Try to default capture 'this' if we can. */ | |
86cf1084 | 756 | if (!this_capture) |
a960e808 AB |
757 | { |
758 | tree lambda_stack = NULL_TREE; | |
759 | tree init = NULL_TREE; | |
760 | ||
761 | /* If we are in a lambda function, we can move out until we hit: | |
762 | 1. a non-lambda function or NSDMI, | |
763 | 2. a lambda function capturing 'this', or | |
764 | 3. a non-default capturing lambda function. */ | |
765 | for (tree tlambda = lambda; ;) | |
766 | { | |
86cf1084 JM |
767 | if (add_capture_p |
768 | && LAMBDA_EXPR_DEFAULT_CAPTURE_MODE (tlambda) == CPLD_NONE) | |
769 | /* tlambda won't let us capture 'this'. */ | |
770 | break; | |
771 | ||
772 | if (add_capture_p) | |
773 | lambda_stack = tree_cons (NULL_TREE, | |
774 | tlambda, | |
775 | lambda_stack); | |
a960e808 | 776 | |
91e534b0 JM |
777 | tree closure = LAMBDA_EXPR_CLOSURE (tlambda); |
778 | tree containing_function | |
779 | = decl_function_context (TYPE_NAME (closure)); | |
780 | ||
781 | tree ex = LAMBDA_EXPR_EXTRA_SCOPE (tlambda); | |
782 | if (ex && TREE_CODE (ex) == FIELD_DECL) | |
a960e808 | 783 | { |
91e534b0 JM |
784 | /* Lambda in an NSDMI. We don't have a function to look up |
785 | 'this' in, but we can find (or rebuild) the fake one from | |
786 | inject_this_parameter. */ | |
787 | if (!containing_function && !COMPLETE_TYPE_P (closure)) | |
788 | /* If we're parsing a lambda in a non-local class, | |
789 | we can find the fake 'this' in scope_chain. */ | |
790 | init = scope_chain->x_current_class_ptr; | |
791 | else | |
792 | /* Otherwise it's either gone or buried in | |
793 | function_context_stack, so make another. */ | |
794 | init = build_this_parm (NULL_TREE, DECL_CONTEXT (ex), | |
795 | TYPE_UNQUALIFIED); | |
a960e808 AB |
796 | gcc_checking_assert |
797 | (init && (TREE_TYPE (TREE_TYPE (init)) | |
798 | == current_nonlambda_class_type ())); | |
799 | break; | |
800 | } | |
801 | ||
a960e808 AB |
802 | if (containing_function == NULL_TREE) |
803 | /* We ran out of scopes; there's no 'this' to capture. */ | |
804 | break; | |
805 | ||
806 | if (!LAMBDA_FUNCTION_P (containing_function)) | |
807 | { | |
808 | /* We found a non-lambda function. */ | |
809 | if (DECL_NONSTATIC_MEMBER_FUNCTION_P (containing_function)) | |
810 | /* First parameter is 'this'. */ | |
811 | init = DECL_ARGUMENTS (containing_function); | |
812 | break; | |
813 | } | |
814 | ||
815 | tlambda | |
816 | = CLASSTYPE_LAMBDA_EXPR (DECL_CONTEXT (containing_function)); | |
817 | ||
818 | if (LAMBDA_EXPR_THIS_CAPTURE (tlambda)) | |
819 | { | |
820 | /* An outer lambda has already captured 'this'. */ | |
821 | init = LAMBDA_EXPR_THIS_CAPTURE (tlambda); | |
822 | break; | |
823 | } | |
a960e808 AB |
824 | } |
825 | ||
826 | if (init) | |
0b360a07 MV |
827 | { |
828 | if (add_capture_p) | |
829 | this_capture = add_default_capture (lambda_stack, | |
830 | /*id=*/this_identifier, | |
831 | init); | |
832 | else | |
833 | this_capture = init; | |
834 | } | |
a960e808 AB |
835 | } |
836 | ||
5ce3039e JM |
837 | if (cp_unevaluated_operand) |
838 | result = this_capture; | |
839 | else if (!this_capture) | |
a960e808 | 840 | { |
a546927c | 841 | if (add_capture_p) |
5ce3039e JM |
842 | { |
843 | error ("%<this%> was not captured for this lambda function"); | |
844 | result = error_mark_node; | |
845 | } | |
846 | else | |
847 | result = NULL_TREE; | |
a960e808 AB |
848 | } |
849 | else | |
850 | { | |
851 | /* To make sure that current_class_ref is for the lambda. */ | |
852 | gcc_assert (TYPE_MAIN_VARIANT (TREE_TYPE (current_class_ref)) | |
853 | == LAMBDA_EXPR_CLOSURE (lambda)); | |
854 | ||
855 | result = this_capture; | |
856 | ||
857 | /* If 'this' is captured, each use of 'this' is transformed into an | |
858 | access to the corresponding unnamed data member of the closure | |
859 | type cast (_expr.cast_ 5.4) to the type of 'this'. [ The cast | |
860 | ensures that the transformed expression is an rvalue. ] */ | |
861 | result = rvalue (result); | |
862 | } | |
863 | ||
864 | return result; | |
865 | } | |
866 | ||
1577f10a JM |
867 | /* Return the innermost LAMBDA_EXPR we're currently in, if any. */ |
868 | ||
869 | tree | |
870 | current_lambda_expr (void) | |
871 | { | |
872 | tree type = current_class_type; | |
873 | while (type && !LAMBDA_TYPE_P (type)) | |
874 | type = decl_type_context (TYPE_NAME (type)); | |
875 | if (type) | |
876 | return CLASSTYPE_LAMBDA_EXPR (type); | |
877 | else | |
878 | return NULL_TREE; | |
879 | } | |
880 | ||
8ddfdbc2 NS |
881 | /* Return the current LAMBDA_EXPR, if this is a resolvable dummy |
882 | object. NULL otherwise.. */ | |
a960e808 | 883 | |
8ddfdbc2 NS |
884 | static tree |
885 | resolvable_dummy_lambda (tree object) | |
a960e808 AB |
886 | { |
887 | if (!is_dummy_object (object)) | |
8ddfdbc2 | 888 | return NULL_TREE; |
a960e808 AB |
889 | |
890 | tree type = TYPE_MAIN_VARIANT (TREE_TYPE (object)); | |
891 | gcc_assert (!TYPE_PTR_P (type)); | |
892 | ||
893 | if (type != current_class_type | |
894 | && current_class_type | |
895 | && LAMBDA_TYPE_P (current_class_type) | |
e7b67047 | 896 | && lambda_function (current_class_type) |
4cda703e | 897 | && DERIVED_FROM_P (type, nonlambda_method_basetype())) |
8ddfdbc2 NS |
898 | return CLASSTYPE_LAMBDA_EXPR (current_class_type); |
899 | ||
900 | return NULL_TREE; | |
901 | } | |
902 | ||
903 | /* We don't want to capture 'this' until we know we need it, i.e. after | |
904 | overload resolution has chosen a non-static member function. At that | |
905 | point we call this function to turn a dummy object into a use of the | |
906 | 'this' capture. */ | |
907 | ||
908 | tree | |
909 | maybe_resolve_dummy (tree object, bool add_capture_p) | |
910 | { | |
911 | if (tree lam = resolvable_dummy_lambda (object)) | |
912 | if (tree cap = lambda_expr_this_capture (lam, add_capture_p)) | |
913 | if (cap != error_mark_node) | |
04757a2a | 914 | object = build_fold_indirect_ref (cap); |
a960e808 AB |
915 | |
916 | return object; | |
917 | } | |
918 | ||
8ddfdbc2 NS |
919 | /* When parsing a generic lambda containing an argument-dependent |
920 | member function call we defer overload resolution to instantiation | |
921 | time. But we have to know now whether to capture this or not. | |
922 | Do that if FNS contains any non-static fns. | |
923 | The std doesn't anticipate this case, but I expect this to be the | |
924 | outcome of discussion. */ | |
925 | ||
926 | void | |
927 | maybe_generic_this_capture (tree object, tree fns) | |
928 | { | |
929 | if (tree lam = resolvable_dummy_lambda (object)) | |
930 | if (!LAMBDA_EXPR_THIS_CAPTURE (lam)) | |
931 | { | |
932 | /* We've not yet captured, so look at the function set of | |
933 | interest. */ | |
934 | if (BASELINK_P (fns)) | |
935 | fns = BASELINK_FUNCTIONS (fns); | |
7f357c61 NS |
936 | bool id_expr = TREE_CODE (fns) == TEMPLATE_ID_EXPR; |
937 | if (id_expr) | |
938 | fns = TREE_OPERAND (fns, 0); | |
3f267553 NS |
939 | |
940 | for (lkp_iterator iter (fns); iter; ++iter) | |
941 | if ((!id_expr || TREE_CODE (*iter) == TEMPLATE_DECL) | |
942 | && DECL_NONSTATIC_MEMBER_FUNCTION_P (*iter)) | |
943 | { | |
944 | /* Found a non-static member. Capture this. */ | |
945 | lambda_expr_this_capture (lam, true); | |
946 | break; | |
947 | } | |
8ddfdbc2 NS |
948 | } |
949 | } | |
950 | ||
2bf492a1 JM |
951 | /* Returns the innermost non-lambda function. */ |
952 | ||
953 | tree | |
954 | current_nonlambda_function (void) | |
955 | { | |
956 | tree fn = current_function_decl; | |
957 | while (fn && LAMBDA_FUNCTION_P (fn)) | |
958 | fn = decl_function_context (fn); | |
959 | return fn; | |
960 | } | |
961 | ||
4cda703e JM |
962 | /* Returns the method basetype of the innermost non-lambda function, including |
963 | a hypothetical constructor if inside an NSDMI, or NULL_TREE if none. */ | |
a960e808 AB |
964 | |
965 | tree | |
966 | nonlambda_method_basetype (void) | |
967 | { | |
a960e808 AB |
968 | if (!current_class_ref) |
969 | return NULL_TREE; | |
970 | ||
4cda703e | 971 | tree type = current_class_type; |
cc6fe784 | 972 | if (!type || !LAMBDA_TYPE_P (type)) |
a960e808 AB |
973 | return type; |
974 | ||
4cda703e JM |
975 | while (true) |
976 | { | |
977 | tree lam = CLASSTYPE_LAMBDA_EXPR (type); | |
978 | tree ex = LAMBDA_EXPR_EXTRA_SCOPE (lam); | |
979 | if (ex && TREE_CODE (ex) == FIELD_DECL) | |
980 | /* Lambda in an NSDMI. */ | |
981 | return DECL_CONTEXT (ex); | |
982 | ||
983 | tree fn = TYPE_CONTEXT (type); | |
984 | if (!fn || TREE_CODE (fn) != FUNCTION_DECL | |
985 | || !DECL_NONSTATIC_MEMBER_FUNCTION_P (fn)) | |
986 | /* No enclosing non-lambda method. */ | |
987 | return NULL_TREE; | |
988 | if (!LAMBDA_FUNCTION_P (fn)) | |
989 | /* Found an enclosing non-lambda method. */ | |
990 | return TYPE_METHOD_BASETYPE (TREE_TYPE (fn)); | |
991 | type = DECL_CONTEXT (fn); | |
992 | } | |
a960e808 AB |
993 | } |
994 | ||
18c4fa8e JM |
995 | /* Like current_scope, but looking through lambdas. */ |
996 | ||
997 | tree | |
998 | current_nonlambda_scope (void) | |
999 | { | |
1000 | tree scope = current_scope (); | |
1001 | for (;;) | |
1002 | { | |
1003 | if (TREE_CODE (scope) == FUNCTION_DECL | |
1004 | && LAMBDA_FUNCTION_P (scope)) | |
1005 | { | |
1006 | scope = CP_TYPE_CONTEXT (DECL_CONTEXT (scope)); | |
1007 | continue; | |
1008 | } | |
1009 | else if (LAMBDA_TYPE_P (scope)) | |
1010 | { | |
1011 | scope = CP_TYPE_CONTEXT (scope); | |
1012 | continue; | |
1013 | } | |
1014 | break; | |
1015 | } | |
1016 | return scope; | |
1017 | } | |
1018 | ||
6a8b219b AB |
1019 | /* Helper function for maybe_add_lambda_conv_op; build a CALL_EXPR with |
1020 | indicated FN and NARGS, but do not initialize the return type or any of the | |
1021 | argument slots. */ | |
1022 | ||
1023 | static tree | |
1024 | prepare_op_call (tree fn, int nargs) | |
1025 | { | |
1026 | tree t; | |
1027 | ||
1028 | t = build_vl_exp (CALL_EXPR, nargs + 3); | |
1029 | CALL_EXPR_FN (t) = fn; | |
1030 | CALL_EXPR_STATIC_CHAIN (t) = NULL; | |
1031 | ||
1032 | return t; | |
1033 | } | |
1034 | ||
72013ec5 JM |
1035 | /* Return true iff CALLOP is the op() for a generic lambda. */ |
1036 | ||
1037 | bool | |
1038 | generic_lambda_fn_p (tree callop) | |
1039 | { | |
1040 | return (LAMBDA_FUNCTION_P (callop) | |
1041 | && DECL_TEMPLATE_INFO (callop) | |
1042 | && PRIMARY_TEMPLATE_P (DECL_TI_TEMPLATE (callop))); | |
1043 | } | |
1044 | ||
a960e808 AB |
1045 | /* If the closure TYPE has a static op(), also add a conversion to function |
1046 | pointer. */ | |
1047 | ||
1048 | void | |
1049 | maybe_add_lambda_conv_op (tree type) | |
1050 | { | |
2cc7f90b | 1051 | bool nested = (cfun != NULL); |
5802281e | 1052 | bool nested_def = decl_function_context (TYPE_MAIN_DECL (type)); |
a960e808 | 1053 | tree callop = lambda_function (type); |
036dc0a0 | 1054 | tree lam = CLASSTYPE_LAMBDA_EXPR (type); |
a960e808 | 1055 | |
036dc0a0 PC |
1056 | if (LAMBDA_EXPR_CAPTURE_LIST (lam) != NULL_TREE |
1057 | || LAMBDA_EXPR_DEFAULT_CAPTURE_MODE (lam) != CPLD_NONE) | |
a960e808 AB |
1058 | return; |
1059 | ||
1060 | if (processing_template_decl) | |
1061 | return; | |
1062 | ||
72013ec5 | 1063 | bool const generic_lambda_p = generic_lambda_fn_p (callop); |
6a8b219b | 1064 | |
162b25fa | 1065 | if (!generic_lambda_p && DECL_INITIAL (callop) == NULL_TREE) |
a960e808 AB |
1066 | { |
1067 | /* If the op() wasn't instantiated due to errors, give up. */ | |
1068 | gcc_assert (errorcount || sorrycount); | |
1069 | return; | |
1070 | } | |
1071 | ||
6a8b219b AB |
1072 | /* Non-template conversion operators are defined directly with build_call_a |
1073 | and using DIRECT_ARGVEC for arguments (including 'this'). Templates are | |
1074 | deferred and the CALL is built in-place. In the case of a deduced return | |
1075 | call op, the decltype expression, DECLTYPE_CALL, used as a substitute for | |
1076 | the return type is also built in-place. The arguments of DECLTYPE_CALL in | |
1077 | the return expression may differ in flags from those in the body CALL. In | |
1078 | particular, parameter pack expansions are marked PACK_EXPANSION_LOCAL_P in | |
1079 | the body CALL, but not in DECLTYPE_CALL. */ | |
1080 | ||
1081 | vec<tree, va_gc> *direct_argvec = 0; | |
1082 | tree decltype_call = 0, call = 0; | |
b8fd7909 JM |
1083 | tree optype = TREE_TYPE (callop); |
1084 | tree fn_result = TREE_TYPE (optype); | |
6a8b219b | 1085 | |
0596c448 JM |
1086 | tree thisarg = build_nop (TREE_TYPE (DECL_ARGUMENTS (callop)), |
1087 | null_pointer_node); | |
6a8b219b AB |
1088 | if (generic_lambda_p) |
1089 | { | |
d4b1d43c JM |
1090 | ++processing_template_decl; |
1091 | ||
6a8b219b AB |
1092 | /* Prepare the dependent member call for the static member function |
1093 | '_FUN' and, potentially, prepare another call to be used in a decltype | |
1094 | return expression for a deduced return call op to allow for simple | |
1095 | implementation of the conversion operator. */ | |
1096 | ||
04757a2a | 1097 | tree instance = cp_build_fold_indirect_ref (thisarg); |
6a8b219b AB |
1098 | tree objfn = build_min (COMPONENT_REF, NULL_TREE, |
1099 | instance, DECL_NAME (callop), NULL_TREE); | |
1100 | int nargs = list_length (DECL_ARGUMENTS (callop)) - 1; | |
1101 | ||
1102 | call = prepare_op_call (objfn, nargs); | |
1103 | if (type_uses_auto (fn_result)) | |
1104 | decltype_call = prepare_op_call (objfn, nargs); | |
1105 | } | |
1106 | else | |
1107 | { | |
1108 | direct_argvec = make_tree_vector (); | |
0596c448 | 1109 | direct_argvec->quick_push (thisarg); |
6a8b219b AB |
1110 | } |
1111 | ||
1112 | /* Copy CALLOP's argument list (as per 'copy_list') as FN_ARGS in order to | |
1113 | declare the static member function "_FUN" below. For each arg append to | |
1114 | DIRECT_ARGVEC (for the non-template case) or populate the pre-allocated | |
1115 | call args (for the template case). If a parameter pack is found, expand | |
1116 | it, flagging it as PACK_EXPANSION_LOCAL_P for the body call. */ | |
1117 | ||
1118 | tree fn_args = NULL_TREE; | |
1119 | { | |
1120 | int ix = 0; | |
1121 | tree src = DECL_CHAIN (DECL_ARGUMENTS (callop)); | |
5d341f08 | 1122 | tree tgt = NULL; |
6a8b219b AB |
1123 | |
1124 | while (src) | |
1125 | { | |
1126 | tree new_node = copy_node (src); | |
1127 | ||
1128 | if (!fn_args) | |
1129 | fn_args = tgt = new_node; | |
1130 | else | |
1131 | { | |
1132 | TREE_CHAIN (tgt) = new_node; | |
1133 | tgt = new_node; | |
1134 | } | |
1135 | ||
1136 | mark_exp_read (tgt); | |
1137 | ||
1138 | if (generic_lambda_p) | |
1139 | { | |
84dd815f JM |
1140 | /* Avoid capturing variables in this context. */ |
1141 | ++cp_unevaluated_operand; | |
bd28a34f | 1142 | tree a = forward_parm (tgt); |
84dd815f | 1143 | --cp_unevaluated_operand; |
bd28a34f JM |
1144 | |
1145 | CALL_EXPR_ARG (call, ix) = a; | |
1146 | if (decltype_call) | |
1147 | CALL_EXPR_ARG (decltype_call, ix) = unshare_expr (a); | |
1148 | ||
1149 | if (PACK_EXPANSION_P (a)) | |
1150 | /* Set this after unsharing so it's not in decltype_call. */ | |
1151 | PACK_EXPANSION_LOCAL_P (a) = true; | |
1152 | ||
6a8b219b AB |
1153 | ++ix; |
1154 | } | |
1155 | else | |
1156 | vec_safe_push (direct_argvec, tgt); | |
1157 | ||
1158 | src = TREE_CHAIN (src); | |
1159 | } | |
1160 | } | |
1161 | ||
6a8b219b AB |
1162 | if (generic_lambda_p) |
1163 | { | |
1164 | if (decltype_call) | |
1165 | { | |
6a8b219b AB |
1166 | fn_result = finish_decltype_type |
1167 | (decltype_call, /*id_expression_or_member_access_p=*/false, | |
1168 | tf_warning_or_error); | |
6a8b219b AB |
1169 | } |
1170 | } | |
1171 | else | |
1172 | call = build_call_a (callop, | |
1173 | direct_argvec->length (), | |
1174 | direct_argvec->address ()); | |
1175 | ||
1176 | CALL_FROM_THUNK_P (call) = 1; | |
98e5a19a | 1177 | SET_EXPR_LOCATION (call, UNKNOWN_LOCATION); |
6a8b219b AB |
1178 | |
1179 | tree stattype = build_function_type (fn_result, FUNCTION_ARG_CHAIN (callop)); | |
b8fd7909 JM |
1180 | stattype = (cp_build_type_attribute_variant |
1181 | (stattype, TYPE_ATTRIBUTES (optype))); | |
51dc6603 JM |
1182 | if (flag_noexcept_type |
1183 | && TYPE_NOTHROW_P (TREE_TYPE (callop))) | |
1184 | stattype = build_exception_variant (stattype, noexcept_true_spec); | |
a960e808 | 1185 | |
d4b1d43c JM |
1186 | if (generic_lambda_p) |
1187 | --processing_template_decl; | |
1188 | ||
a960e808 AB |
1189 | /* First build up the conversion op. */ |
1190 | ||
6a8b219b | 1191 | tree rettype = build_pointer_type (stattype); |
08fb1316 | 1192 | tree name = make_conv_op_name (rettype); |
6a8b219b AB |
1193 | tree thistype = cp_build_qualified_type (type, TYPE_QUAL_CONST); |
1194 | tree fntype = build_method_type_directly (thistype, rettype, void_list_node); | |
1195 | tree convfn = build_lang_decl (FUNCTION_DECL, name, fntype); | |
143f00e1 | 1196 | SET_DECL_LANGUAGE (convfn, lang_cplusplus); |
6a8b219b | 1197 | tree fn = convfn; |
a960e808 | 1198 | DECL_SOURCE_LOCATION (fn) = DECL_SOURCE_LOCATION (callop); |
fe37c7af | 1199 | SET_DECL_ALIGN (fn, MINIMUM_METHOD_BOUNDARY); |
a960e808 AB |
1200 | grokclassfn (type, fn, NO_SPECIAL); |
1201 | set_linkage_according_to_type (type, fn); | |
056a17ee | 1202 | rest_of_decl_compilation (fn, namespace_bindings_p (), at_eof); |
a960e808 AB |
1203 | DECL_IN_AGGR_P (fn) = 1; |
1204 | DECL_ARTIFICIAL (fn) = 1; | |
1205 | DECL_NOT_REALLY_EXTERN (fn) = 1; | |
1206 | DECL_DECLARED_INLINE_P (fn) = 1; | |
e249fcad NS |
1207 | DECL_ARGUMENTS (fn) = build_this_parm (fn, fntype, TYPE_QUAL_CONST); |
1208 | ||
5802281e | 1209 | if (nested_def) |
a960e808 AB |
1210 | DECL_INTERFACE_KNOWN (fn) = 1; |
1211 | ||
6a8b219b AB |
1212 | if (generic_lambda_p) |
1213 | fn = add_inherited_template_parms (fn, DECL_TI_TEMPLATE (callop)); | |
1214 | ||
d5a2f455 | 1215 | add_method (type, fn, false); |
a960e808 AB |
1216 | |
1217 | /* Generic thunk code fails for varargs; we'll complain in mark_used if | |
1218 | the conversion op is used. */ | |
1219 | if (varargs_function_p (callop)) | |
1220 | { | |
f1ee5eaf | 1221 | DECL_DELETED_FN (fn) = 1; |
a960e808 AB |
1222 | return; |
1223 | } | |
1224 | ||
1225 | /* Now build up the thunk to be returned. */ | |
1226 | ||
cf7fb52d | 1227 | tree statfn = build_lang_decl (FUNCTION_DECL, fun_identifier, stattype); |
143f00e1 | 1228 | SET_DECL_LANGUAGE (statfn, lang_cplusplus); |
6a8b219b | 1229 | fn = statfn; |
a960e808 | 1230 | DECL_SOURCE_LOCATION (fn) = DECL_SOURCE_LOCATION (callop); |
a960e808 AB |
1231 | grokclassfn (type, fn, NO_SPECIAL); |
1232 | set_linkage_according_to_type (type, fn); | |
056a17ee | 1233 | rest_of_decl_compilation (fn, namespace_bindings_p (), at_eof); |
a960e808 AB |
1234 | DECL_IN_AGGR_P (fn) = 1; |
1235 | DECL_ARTIFICIAL (fn) = 1; | |
1236 | DECL_NOT_REALLY_EXTERN (fn) = 1; | |
1237 | DECL_DECLARED_INLINE_P (fn) = 1; | |
1238 | DECL_STATIC_FUNCTION_P (fn) = 1; | |
6a8b219b AB |
1239 | DECL_ARGUMENTS (fn) = fn_args; |
1240 | for (tree arg = fn_args; arg; arg = DECL_CHAIN (arg)) | |
a960e808 AB |
1241 | { |
1242 | /* Avoid duplicate -Wshadow warnings. */ | |
1243 | DECL_NAME (arg) = NULL_TREE; | |
1244 | DECL_CONTEXT (arg) = fn; | |
1245 | } | |
5802281e | 1246 | if (nested_def) |
a960e808 AB |
1247 | DECL_INTERFACE_KNOWN (fn) = 1; |
1248 | ||
6a8b219b AB |
1249 | if (generic_lambda_p) |
1250 | fn = add_inherited_template_parms (fn, DECL_TI_TEMPLATE (callop)); | |
1251 | ||
75729cff | 1252 | if (flag_sanitize & SANITIZE_NULL) |
137073d3 NS |
1253 | /* Don't UBsan this function; we're deliberately calling op() with a null |
1254 | object argument. */ | |
1255 | add_no_sanitize_value (fn, SANITIZE_UNDEFINED); | |
5546e160 | 1256 | |
d5a2f455 | 1257 | add_method (type, fn, false); |
a960e808 AB |
1258 | |
1259 | if (nested) | |
1260 | push_function_context (); | |
1261 | else | |
1262 | /* Still increment function_depth so that we don't GC in the | |
1263 | middle of an expression. */ | |
1264 | ++function_depth; | |
1265 | ||
1266 | /* Generate the body of the thunk. */ | |
1267 | ||
1268 | start_preparsed_function (statfn, NULL_TREE, | |
1269 | SF_PRE_PARSED | SF_INCLASS_INLINE); | |
1270 | if (DECL_ONE_ONLY (statfn)) | |
1271 | { | |
1272 | /* Put the thunk in the same comdat group as the call op. */ | |
d52f5295 ML |
1273 | cgraph_node::get_create (statfn)->add_to_same_comdat_group |
1274 | (cgraph_node::get_create (callop)); | |
a960e808 | 1275 | } |
6a8b219b AB |
1276 | tree body = begin_function_body (); |
1277 | tree compound_stmt = begin_compound_stmt (0); | |
1278 | if (!generic_lambda_p) | |
f3a880f8 | 1279 | { |
6a8b219b AB |
1280 | set_flags_from_callee (call); |
1281 | if (MAYBE_CLASS_TYPE_P (TREE_TYPE (call))) | |
1282 | call = build_cplus_new (TREE_TYPE (call), call, tf_warning_or_error); | |
f3a880f8 | 1283 | } |
a960e808 AB |
1284 | call = convert_from_reference (call); |
1285 | finish_return_stmt (call); | |
1286 | ||
1287 | finish_compound_stmt (compound_stmt); | |
1288 | finish_function_body (body); | |
1289 | ||
90e3c064 | 1290 | fn = finish_function (/*inline_p=*/true); |
6a8b219b AB |
1291 | if (!generic_lambda_p) |
1292 | expand_or_defer_fn (fn); | |
a960e808 AB |
1293 | |
1294 | /* Generate the body of the conversion op. */ | |
1295 | ||
1296 | start_preparsed_function (convfn, NULL_TREE, | |
1297 | SF_PRE_PARSED | SF_INCLASS_INLINE); | |
1298 | body = begin_function_body (); | |
1299 | compound_stmt = begin_compound_stmt (0); | |
1300 | ||
1301 | /* decl_needed_p needs to see that it's used. */ | |
1302 | TREE_USED (statfn) = 1; | |
1303 | finish_return_stmt (decay_conversion (statfn, tf_warning_or_error)); | |
1304 | ||
1305 | finish_compound_stmt (compound_stmt); | |
1306 | finish_function_body (body); | |
1307 | ||
90e3c064 | 1308 | fn = finish_function (/*inline_p=*/true); |
6a8b219b AB |
1309 | if (!generic_lambda_p) |
1310 | expand_or_defer_fn (fn); | |
a960e808 AB |
1311 | |
1312 | if (nested) | |
1313 | pop_function_context (); | |
1314 | else | |
1315 | --function_depth; | |
1316 | } | |
1317 | ||
98e5a19a JM |
1318 | /* True if FN is the static function "_FUN" that gets returned from the lambda |
1319 | conversion operator. */ | |
1320 | ||
1321 | bool | |
1322 | lambda_static_thunk_p (tree fn) | |
1323 | { | |
1324 | return (fn && TREE_CODE (fn) == FUNCTION_DECL | |
1325 | && DECL_ARTIFICIAL (fn) | |
1326 | && DECL_STATIC_FUNCTION_P (fn) | |
1327 | && LAMBDA_TYPE_P (CP_DECL_CONTEXT (fn))); | |
1328 | } | |
1329 | ||
a960e808 AB |
1330 | /* Returns true iff VAL is a lambda-related declaration which should |
1331 | be ignored by unqualified lookup. */ | |
1332 | ||
1333 | bool | |
1334 | is_lambda_ignored_entity (tree val) | |
1335 | { | |
5c263e84 JM |
1336 | /* Look past normal capture proxies. */ |
1337 | if (is_normal_capture_proxy (val)) | |
a960e808 AB |
1338 | return true; |
1339 | ||
1340 | /* Always ignore lambda fields, their names are only for debugging. */ | |
1341 | if (TREE_CODE (val) == FIELD_DECL | |
1342 | && CLASSTYPE_LAMBDA_EXPR (DECL_CONTEXT (val))) | |
1343 | return true; | |
1344 | ||
1345 | /* None of the lookups that use qualify_lookup want the op() from the | |
1346 | lambda; they want the one from the enclosing class. */ | |
1347 | if (TREE_CODE (val) == FUNCTION_DECL && LAMBDA_FUNCTION_P (val)) | |
1348 | return true; | |
1349 | ||
1350 | return false; | |
1351 | } | |
ff502317 | 1352 | |
f44a8dd5 JM |
1353 | /* Lambdas that appear in variable initializer or default argument scope |
1354 | get that in their mangling, so we need to record it. We might as well | |
1355 | use the count for function and namespace scopes as well. */ | |
1356 | static GTY(()) tree lambda_scope; | |
1357 | static GTY(()) int lambda_count; | |
1358 | struct GTY(()) tree_int | |
1359 | { | |
1360 | tree t; | |
1361 | int i; | |
1362 | }; | |
1363 | static GTY(()) vec<tree_int, va_gc> *lambda_scope_stack; | |
1364 | ||
1365 | void | |
1366 | start_lambda_scope (tree decl) | |
1367 | { | |
1368 | tree_int ti; | |
1369 | gcc_assert (decl); | |
1370 | /* Once we're inside a function, we ignore variable scope and just push | |
1371 | the function again so that popping works properly. */ | |
1372 | if (current_function_decl && TREE_CODE (decl) == VAR_DECL) | |
1373 | decl = current_function_decl; | |
1374 | ti.t = lambda_scope; | |
1375 | ti.i = lambda_count; | |
1376 | vec_safe_push (lambda_scope_stack, ti); | |
1377 | if (lambda_scope != decl) | |
1378 | { | |
1379 | /* Don't reset the count if we're still in the same function. */ | |
1380 | lambda_scope = decl; | |
1381 | lambda_count = 0; | |
1382 | } | |
1383 | } | |
1384 | ||
1385 | void | |
1386 | record_lambda_scope (tree lambda) | |
1387 | { | |
1388 | LAMBDA_EXPR_EXTRA_SCOPE (lambda) = lambda_scope; | |
1389 | LAMBDA_EXPR_DISCRIMINATOR (lambda) = lambda_count++; | |
1390 | } | |
1391 | ||
582f844c JM |
1392 | /* This lambda is an instantiation of a lambda in a template default argument |
1393 | that got no LAMBDA_EXPR_EXTRA_SCOPE, so this shouldn't either. But we do | |
1394 | need to use and increment the global count to avoid collisions. */ | |
1395 | ||
1396 | void | |
1397 | record_null_lambda_scope (tree lambda) | |
1398 | { | |
1399 | if (vec_safe_is_empty (lambda_scope_stack)) | |
1400 | record_lambda_scope (lambda); | |
1401 | else | |
1402 | { | |
1403 | tree_int *p = lambda_scope_stack->begin(); | |
1404 | LAMBDA_EXPR_EXTRA_SCOPE (lambda) = p->t; | |
1405 | LAMBDA_EXPR_DISCRIMINATOR (lambda) = p->i++; | |
1406 | } | |
1407 | gcc_assert (LAMBDA_EXPR_EXTRA_SCOPE (lambda) == NULL_TREE); | |
1408 | } | |
1409 | ||
f44a8dd5 JM |
1410 | void |
1411 | finish_lambda_scope (void) | |
1412 | { | |
1413 | tree_int *p = &lambda_scope_stack->last (); | |
1414 | if (lambda_scope != p->t) | |
1415 | { | |
1416 | lambda_scope = p->t; | |
1417 | lambda_count = p->i; | |
1418 | } | |
1419 | lambda_scope_stack->pop (); | |
1420 | } | |
1421 | ||
1422 | tree | |
1423 | start_lambda_function (tree fco, tree lambda_expr) | |
1424 | { | |
1425 | /* Let the front end know that we are going to be defining this | |
1426 | function. */ | |
1427 | start_preparsed_function (fco, | |
1428 | NULL_TREE, | |
1429 | SF_PRE_PARSED | SF_INCLASS_INLINE); | |
1430 | ||
1431 | tree body = begin_function_body (); | |
1432 | ||
1433 | /* Push the proxies for any explicit captures. */ | |
1434 | for (tree cap = LAMBDA_EXPR_CAPTURE_LIST (lambda_expr); cap; | |
1435 | cap = TREE_CHAIN (cap)) | |
5c263e84 | 1436 | build_capture_proxy (TREE_PURPOSE (cap), TREE_VALUE (cap)); |
f44a8dd5 JM |
1437 | |
1438 | return body; | |
1439 | } | |
1440 | ||
1577f10a JM |
1441 | /* Subroutine of prune_lambda_captures: CAP is a node in |
1442 | LAMBDA_EXPR_CAPTURE_LIST. Return the variable it captures for which we | |
1443 | might optimize away the capture, or NULL_TREE if there is no such | |
1444 | variable. */ | |
1445 | ||
1446 | static tree | |
1447 | var_to_maybe_prune (tree cap) | |
1448 | { | |
1449 | if (LAMBDA_CAPTURE_EXPLICIT_P (cap)) | |
1450 | /* Don't prune explicit captures. */ | |
1451 | return NULL_TREE; | |
1452 | ||
1453 | tree mem = TREE_PURPOSE (cap); | |
1454 | if (!DECL_P (mem) || !DECL_NORMAL_CAPTURE_P (mem)) | |
1455 | /* Packs and init-captures aren't captures of constant vars. */ | |
1456 | return NULL_TREE; | |
1457 | ||
1458 | tree init = TREE_VALUE (cap); | |
1459 | if (is_normal_capture_proxy (init)) | |
1460 | init = DECL_CAPTURED_VARIABLE (init); | |
1461 | if (decl_constant_var_p (init)) | |
1462 | return init; | |
1463 | ||
1464 | return NULL_TREE; | |
1465 | } | |
1466 | ||
1467 | /* walk_tree helper for prune_lambda_captures: Remember which capture proxies | |
1468 | for constant variables are actually used in the lambda body. | |
1469 | ||
1470 | There will always be a DECL_EXPR for the capture proxy; remember it when we | |
1471 | see it, but replace it with any other use. */ | |
1472 | ||
1473 | static tree | |
1474 | mark_const_cap_r (tree *t, int *walk_subtrees, void *data) | |
1475 | { | |
1476 | hash_map<tree,tree*> &const_vars = *(hash_map<tree,tree*>*)data; | |
1477 | ||
1478 | tree var = NULL_TREE; | |
1479 | if (TREE_CODE (*t) == DECL_EXPR) | |
1480 | { | |
1481 | tree decl = DECL_EXPR_DECL (*t); | |
1482 | if (is_constant_capture_proxy (decl)) | |
1483 | var = DECL_CAPTURED_VARIABLE (decl); | |
1484 | *walk_subtrees = 0; | |
1485 | } | |
1486 | else if (is_constant_capture_proxy (*t)) | |
1487 | var = DECL_CAPTURED_VARIABLE (*t); | |
1488 | ||
1489 | if (var) | |
1490 | { | |
1491 | tree *&slot = const_vars.get_or_insert (var); | |
1492 | if (!slot || VAR_P (*t)) | |
1493 | slot = t; | |
1494 | } | |
1495 | ||
1496 | return NULL_TREE; | |
1497 | } | |
1498 | ||
1499 | /* We're at the end of processing a lambda; go back and remove any captures of | |
1500 | constant variables for which we've folded away all uses. */ | |
1501 | ||
1502 | static void | |
1503 | prune_lambda_captures (tree body) | |
1504 | { | |
1505 | tree lam = current_lambda_expr (); | |
1506 | if (!LAMBDA_EXPR_CAPTURE_OPTIMIZED (lam)) | |
1507 | /* No uses were optimized away. */ | |
1508 | return; | |
1509 | if (LAMBDA_EXPR_DEFAULT_CAPTURE_MODE (lam) == CPLD_NONE) | |
1510 | /* No default captures, and we don't prune explicit captures. */ | |
1511 | return; | |
1512 | ||
1513 | hash_map<tree,tree*> const_vars; | |
1514 | ||
1515 | cp_walk_tree_without_duplicates (&body, mark_const_cap_r, &const_vars); | |
1516 | ||
1517 | tree *fieldp = &TYPE_FIELDS (LAMBDA_EXPR_CLOSURE (lam)); | |
1518 | for (tree *capp = &LAMBDA_EXPR_CAPTURE_LIST (lam); *capp; ) | |
1519 | { | |
1520 | tree cap = *capp; | |
1521 | if (tree var = var_to_maybe_prune (cap)) | |
1522 | { | |
86e95f35 PB |
1523 | tree **use = const_vars.get (var); |
1524 | if (use && TREE_CODE (**use) == DECL_EXPR) | |
1577f10a JM |
1525 | { |
1526 | /* All uses of this capture were folded away, leaving only the | |
1527 | proxy declaration. */ | |
1528 | ||
1529 | /* Splice the capture out of LAMBDA_EXPR_CAPTURE_LIST. */ | |
1530 | *capp = TREE_CHAIN (cap); | |
1531 | ||
1532 | /* And out of TYPE_FIELDS. */ | |
1533 | tree field = TREE_PURPOSE (cap); | |
1534 | while (*fieldp != field) | |
1535 | fieldp = &DECL_CHAIN (*fieldp); | |
1536 | *fieldp = DECL_CHAIN (*fieldp); | |
1537 | ||
1538 | /* And remove the capture proxy declaration. */ | |
86e95f35 | 1539 | **use = void_node; |
1577f10a JM |
1540 | continue; |
1541 | } | |
1542 | } | |
1543 | ||
1544 | capp = &TREE_CHAIN (cap); | |
1545 | } | |
1546 | } | |
1547 | ||
f44a8dd5 JM |
1548 | void |
1549 | finish_lambda_function (tree body) | |
1550 | { | |
1551 | finish_function_body (body); | |
1552 | ||
1577f10a JM |
1553 | prune_lambda_captures (body); |
1554 | ||
f44a8dd5 | 1555 | /* Finish the function and generate code for it if necessary. */ |
90e3c064 | 1556 | tree fn = finish_function (/*inline_p=*/true); |
f44a8dd5 JM |
1557 | |
1558 | /* Only expand if the call op is not a template. */ | |
1559 | if (!DECL_TEMPLATE_INFO (fn)) | |
1560 | expand_or_defer_fn (fn); | |
1561 | } | |
1562 | ||
ff502317 | 1563 | #include "gt-cp-lambda.h" |