]>
Commit | Line | Data |
---|---|---|
5d9fd871 | 1 | /* Perform the semantic phase of lambda parsing, i.e., the process of |
2 | building tree structure, checking semantic consistency, and | |
3 | building RTL. These routines are used both during actual parsing | |
4 | and during the instantiation of template functions. | |
5 | ||
fbd26352 | 6 | Copyright (C) 1998-2019 Free Software Foundation, Inc. |
5d9fd871 | 7 | |
8 | This file is part of GCC. | |
9 | ||
10 | GCC is free software; you can redistribute it and/or modify it | |
11 | under the terms of the GNU General Public License as published by | |
12 | the Free Software Foundation; either version 3, or (at your option) | |
13 | any later version. | |
14 | ||
15 | GCC is distributed in the hope that it will be useful, but | |
16 | WITHOUT ANY WARRANTY; without even the implied warranty of | |
17 | MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU | |
18 | General Public License for more details. | |
19 | ||
20 | You should have received a copy of the GNU General Public License | |
21 | along with GCC; see the file COPYING3. If not see | |
22 | <http://www.gnu.org/licenses/>. */ | |
23 | ||
24 | #include "config.h" | |
25 | #include "system.h" | |
26 | #include "coretypes.h" | |
4cba6f60 | 27 | #include "cp-tree.h" |
28 | #include "stringpool.h" | |
5d9fd871 | 29 | #include "cgraph.h" |
30 | #include "tree-iterator.h" | |
5d9fd871 | 31 | #include "toplev.h" |
72f9352a | 32 | #include "gimplify.h" |
5d9fd871 | 33 | |
34 | /* Constructor for a lambda expression. */ | |
35 | ||
36 | tree | |
37 | build_lambda_expr (void) | |
38 | { | |
39 | tree lambda = make_node (LAMBDA_EXPR); | |
40 | LAMBDA_EXPR_DEFAULT_CAPTURE_MODE (lambda) = CPLD_NONE; | |
41 | LAMBDA_EXPR_CAPTURE_LIST (lambda) = NULL_TREE; | |
42 | LAMBDA_EXPR_THIS_CAPTURE (lambda) = NULL_TREE; | |
43 | LAMBDA_EXPR_PENDING_PROXIES (lambda) = NULL; | |
5d9fd871 | 44 | LAMBDA_EXPR_MUTABLE_P (lambda) = false; |
45 | return lambda; | |
46 | } | |
47 | ||
48 | /* Create the closure object for a LAMBDA_EXPR. */ | |
49 | ||
50 | tree | |
51 | build_lambda_object (tree lambda_expr) | |
52 | { | |
53 | /* Build aggregate constructor call. | |
54 | - cp_parser_braced_list | |
55 | - cp_parser_functional_cast */ | |
56 | vec<constructor_elt, va_gc> *elts = NULL; | |
57 | tree node, expr, type; | |
58 | location_t saved_loc; | |
59 | ||
1dcd84cf | 60 | if (processing_template_decl || lambda_expr == error_mark_node) |
5d9fd871 | 61 | return lambda_expr; |
62 | ||
63 | /* Make sure any error messages refer to the lambda-introducer. */ | |
64 | saved_loc = input_location; | |
65 | input_location = LAMBDA_EXPR_LOCATION (lambda_expr); | |
66 | ||
67 | for (node = LAMBDA_EXPR_CAPTURE_LIST (lambda_expr); | |
68 | node; | |
69 | node = TREE_CHAIN (node)) | |
70 | { | |
71 | tree field = TREE_PURPOSE (node); | |
72 | tree val = TREE_VALUE (node); | |
73 | ||
74 | if (field == error_mark_node) | |
75 | { | |
76 | expr = error_mark_node; | |
77 | goto out; | |
78 | } | |
79 | ||
845d5e95 | 80 | if (TREE_CODE (val) == TREE_LIST) |
81 | val = build_x_compound_expr_from_list (val, ELK_INIT, | |
82 | tf_warning_or_error); | |
83 | ||
5d9fd871 | 84 | if (DECL_P (val)) |
85 | mark_used (val); | |
86 | ||
87 | /* Mere mortals can't copy arrays with aggregate initialization, so | |
88 | do some magic to make it work here. */ | |
89 | if (TREE_CODE (TREE_TYPE (field)) == ARRAY_TYPE) | |
90 | val = build_array_copy (val); | |
91 | else if (DECL_NORMAL_CAPTURE_P (field) | |
92 | && !DECL_VLA_CAPTURE_P (field) | |
90ad495b | 93 | && !TYPE_REF_P (TREE_TYPE (field))) |
5d9fd871 | 94 | { |
95 | /* "the entities that are captured by copy are used to | |
96 | direct-initialize each corresponding non-static data | |
97 | member of the resulting closure object." | |
98 | ||
99 | There's normally no way to express direct-initialization | |
100 | from an element of a CONSTRUCTOR, so we build up a special | |
101 | TARGET_EXPR to bypass the usual copy-initialization. */ | |
102 | val = force_rvalue (val, tf_warning_or_error); | |
103 | if (TREE_CODE (val) == TARGET_EXPR) | |
104 | TARGET_EXPR_DIRECT_INIT_P (val) = true; | |
105 | } | |
106 | ||
107 | CONSTRUCTOR_APPEND_ELT (elts, DECL_NAME (field), val); | |
108 | } | |
109 | ||
110 | expr = build_constructor (init_list_type_node, elts); | |
111 | CONSTRUCTOR_IS_DIRECT_INIT (expr) = 1; | |
112 | ||
113 | /* N2927: "[The closure] class type is not an aggregate." | |
114 | But we briefly treat it as an aggregate to make this simpler. */ | |
115 | type = LAMBDA_EXPR_CLOSURE (lambda_expr); | |
116 | CLASSTYPE_NON_AGGREGATE (type) = 0; | |
117 | expr = finish_compound_literal (type, expr, tf_warning_or_error); | |
118 | CLASSTYPE_NON_AGGREGATE (type) = 1; | |
119 | ||
120 | out: | |
121 | input_location = saved_loc; | |
122 | return expr; | |
123 | } | |
124 | ||
125 | /* Return an initialized RECORD_TYPE for LAMBDA. | |
126 | LAMBDA must have its explicit captures already. */ | |
127 | ||
128 | tree | |
129 | begin_lambda_type (tree lambda) | |
130 | { | |
131 | tree type; | |
132 | ||
133 | { | |
134 | /* Unique name. This is just like an unnamed class, but we cannot use | |
4f86cbb0 | 135 | make_anon_name because of certain checks against TYPE_UNNAMED_P. */ |
5d9fd871 | 136 | tree name; |
137 | name = make_lambda_name (); | |
138 | ||
139 | /* Create the new RECORD_TYPE for this lambda. */ | |
140 | type = xref_tag (/*tag_code=*/record_type, | |
141 | name, | |
142 | /*scope=*/ts_lambda, | |
143 | /*template_header_p=*/false); | |
240cc9cf | 144 | if (type == error_mark_node) |
145 | return error_mark_node; | |
5d9fd871 | 146 | } |
147 | ||
148 | /* Designate it as a struct so that we can use aggregate initialization. */ | |
149 | CLASSTYPE_DECLARED_CLASS (type) = false; | |
150 | ||
151 | /* Cross-reference the expression and the type. */ | |
152 | LAMBDA_EXPR_CLOSURE (lambda) = type; | |
153 | CLASSTYPE_LAMBDA_EXPR (type) = lambda; | |
154 | ||
33603066 | 155 | /* In C++17, assume the closure is literal; we'll clear the flag later if |
156 | necessary. */ | |
40e2decb | 157 | if (cxx_dialect >= cxx17) |
33603066 | 158 | CLASSTYPE_LITERAL_P (type) = true; |
159 | ||
5d9fd871 | 160 | /* Clear base types. */ |
161 | xref_basetypes (type, /*bases=*/NULL_TREE); | |
162 | ||
163 | /* Start the class. */ | |
164 | type = begin_class_definition (type); | |
5d9fd871 | 165 | |
166 | return type; | |
167 | } | |
168 | ||
169 | /* Returns the type to use for the return type of the operator() of a | |
170 | closure class. */ | |
171 | ||
172 | tree | |
173 | lambda_return_type (tree expr) | |
174 | { | |
175 | if (expr == NULL_TREE) | |
176 | return void_type_node; | |
177 | if (type_unknown_p (expr) | |
178 | || BRACE_ENCLOSED_INITIALIZER_P (expr)) | |
179 | { | |
180 | cxx_incomplete_type_error (expr, TREE_TYPE (expr)); | |
86771497 | 181 | return error_mark_node; |
5d9fd871 | 182 | } |
183 | gcc_checking_assert (!type_dependent_expression_p (expr)); | |
184 | return cv_unqualified (type_decays_to (unlowered_expr_type (expr))); | |
185 | } | |
186 | ||
187 | /* Given a LAMBDA_EXPR or closure type LAMBDA, return the op() of the | |
188 | closure type. */ | |
189 | ||
190 | tree | |
191 | lambda_function (tree lambda) | |
192 | { | |
193 | tree type; | |
194 | if (TREE_CODE (lambda) == LAMBDA_EXPR) | |
195 | type = LAMBDA_EXPR_CLOSURE (lambda); | |
196 | else | |
197 | type = lambda; | |
198 | gcc_assert (LAMBDA_TYPE_P (type)); | |
199 | /* Don't let debug_tree cause instantiation. */ | |
200 | if (CLASSTYPE_TEMPLATE_INSTANTIATION (type) | |
201 | && !COMPLETE_OR_OPEN_TYPE_P (type)) | |
202 | return NULL_TREE; | |
ef8f6502 | 203 | lambda = lookup_member (type, call_op_identifier, |
5d9fd871 | 204 | /*protect=*/0, /*want_type=*/false, |
205 | tf_warning_or_error); | |
206 | if (lambda) | |
814b90ef | 207 | lambda = STRIP_TEMPLATE (get_first_fn (lambda)); |
5d9fd871 | 208 | return lambda; |
209 | } | |
210 | ||
211 | /* Returns the type to use for the FIELD_DECL corresponding to the | |
adcbdb02 | 212 | capture of EXPR. EXPLICIT_INIT_P indicates whether this is a |
213 | C++14 init capture, and BY_REFERENCE_P indicates whether we're | |
214 | capturing by reference. */ | |
5d9fd871 | 215 | |
216 | tree | |
adcbdb02 | 217 | lambda_capture_field_type (tree expr, bool explicit_init_p, |
218 | bool by_reference_p) | |
5d9fd871 | 219 | { |
220 | tree type; | |
fbde726a | 221 | bool is_this = is_this_parameter (tree_strip_nop_conversions (expr)); |
adcbdb02 | 222 | |
fbde726a | 223 | if (!is_this && type_dependent_expression_p (expr)) |
5d9fd871 | 224 | { |
225 | type = cxx_make_type (DECLTYPE_TYPE); | |
226 | DECLTYPE_TYPE_EXPR (type) = expr; | |
227 | DECLTYPE_FOR_LAMBDA_CAPTURE (type) = true; | |
228 | DECLTYPE_FOR_INIT_CAPTURE (type) = explicit_init_p; | |
adcbdb02 | 229 | DECLTYPE_FOR_REF_CAPTURE (type) = by_reference_p; |
5d9fd871 | 230 | SET_TYPE_STRUCTURAL_EQUALITY (type); |
231 | } | |
fbde726a | 232 | else if (!is_this && explicit_init_p) |
233 | { | |
adcbdb02 | 234 | tree auto_node = make_auto (); |
235 | ||
236 | type = auto_node; | |
237 | if (by_reference_p) | |
238 | /* Add the reference now, so deduction doesn't lose | |
239 | outermost CV qualifiers of EXPR. */ | |
240 | type = build_reference_type (type); | |
241 | type = do_auto_deduction (type, expr, auto_node); | |
fbde726a | 242 | } |
243 | else | |
adcbdb02 | 244 | { |
245 | type = non_reference (unlowered_expr_type (expr)); | |
246 | ||
d46b9539 | 247 | if (!is_this |
248 | && (by_reference_p || TREE_CODE (type) == FUNCTION_TYPE)) | |
adcbdb02 | 249 | type = build_reference_type (type); |
250 | } | |
251 | ||
5d9fd871 | 252 | return type; |
253 | } | |
254 | ||
255 | /* Returns true iff DECL is a lambda capture proxy variable created by | |
256 | build_capture_proxy. */ | |
257 | ||
258 | bool | |
259 | is_capture_proxy (tree decl) | |
260 | { | |
261 | return (VAR_P (decl) | |
262 | && DECL_HAS_VALUE_EXPR_P (decl) | |
263 | && !DECL_ANON_UNION_VAR_P (decl) | |
c2f14a91 | 264 | && !DECL_DECOMPOSITION_P (decl) |
c3a961ad | 265 | && !DECL_FNAME_P (decl) |
9b0e9786 | 266 | && !(DECL_ARTIFICIAL (decl) |
267 | && DECL_LANG_SPECIFIC (decl) | |
268 | && DECL_OMP_PRIVATIZED_MEMBER (decl)) | |
5d9fd871 | 269 | && LAMBDA_FUNCTION_P (DECL_CONTEXT (decl))); |
270 | } | |
271 | ||
272 | /* Returns true iff DECL is a capture proxy for a normal capture | |
273 | (i.e. without explicit initializer). */ | |
274 | ||
275 | bool | |
276 | is_normal_capture_proxy (tree decl) | |
277 | { | |
278 | if (!is_capture_proxy (decl)) | |
279 | /* It's not a capture proxy. */ | |
280 | return false; | |
281 | ||
a7ea8f96 | 282 | return (DECL_LANG_SPECIFIC (decl) |
283 | && DECL_CAPTURED_VARIABLE (decl)); | |
5d9fd871 | 284 | } |
285 | ||
80fdc40f | 286 | /* Returns true iff DECL is a capture proxy for a normal capture |
287 | of a constant variable. */ | |
288 | ||
289 | bool | |
290 | is_constant_capture_proxy (tree decl) | |
291 | { | |
292 | if (is_normal_capture_proxy (decl)) | |
293 | return decl_constant_var_p (DECL_CAPTURED_VARIABLE (decl)); | |
294 | return false; | |
295 | } | |
296 | ||
5d9fd871 | 297 | /* VAR is a capture proxy created by build_capture_proxy; add it to the |
298 | current function, which is the operator() for the appropriate lambda. */ | |
299 | ||
300 | void | |
301 | insert_capture_proxy (tree var) | |
302 | { | |
d0e2b7e7 | 303 | if (is_normal_capture_proxy (var)) |
c017458d | 304 | { |
305 | tree cap = DECL_CAPTURED_VARIABLE (var); | |
306 | if (CHECKING_P) | |
307 | { | |
308 | gcc_assert (!is_normal_capture_proxy (cap)); | |
309 | tree old = retrieve_local_specialization (cap); | |
310 | if (old) | |
311 | gcc_assert (DECL_CONTEXT (old) != DECL_CONTEXT (var)); | |
312 | } | |
313 | register_local_specialization (var, cap); | |
314 | } | |
6f20c785 | 315 | |
5d9fd871 | 316 | /* Put the capture proxy in the extra body block so that it won't clash |
317 | with a later local variable. */ | |
adf347c7 | 318 | pushdecl_outermost_localscope (var); |
5d9fd871 | 319 | |
320 | /* And put a DECL_EXPR in the STATEMENT_LIST for the same block. */ | |
321 | var = build_stmt (DECL_SOURCE_LOCATION (var), DECL_EXPR, var); | |
adf347c7 | 322 | tree stmt_list = (*stmt_list_stack)[1]; |
5d9fd871 | 323 | gcc_assert (stmt_list); |
324 | append_to_statement_list_force (var, &stmt_list); | |
325 | } | |
326 | ||
327 | /* We've just finished processing a lambda; if the containing scope is also | |
328 | a lambda, insert any capture proxies that were created while processing | |
329 | the nested lambda. */ | |
330 | ||
331 | void | |
332 | insert_pending_capture_proxies (void) | |
333 | { | |
334 | tree lam; | |
335 | vec<tree, va_gc> *proxies; | |
336 | unsigned i; | |
337 | ||
338 | if (!current_function_decl || !LAMBDA_FUNCTION_P (current_function_decl)) | |
339 | return; | |
340 | ||
341 | lam = CLASSTYPE_LAMBDA_EXPR (DECL_CONTEXT (current_function_decl)); | |
342 | proxies = LAMBDA_EXPR_PENDING_PROXIES (lam); | |
343 | for (i = 0; i < vec_safe_length (proxies); ++i) | |
344 | { | |
345 | tree var = (*proxies)[i]; | |
346 | insert_capture_proxy (var); | |
347 | } | |
348 | release_tree_vector (LAMBDA_EXPR_PENDING_PROXIES (lam)); | |
349 | LAMBDA_EXPR_PENDING_PROXIES (lam) = NULL; | |
350 | } | |
351 | ||
352 | /* Given REF, a COMPONENT_REF designating a field in the lambda closure, | |
353 | return the type we want the proxy to have: the type of the field itself, | |
354 | with added const-qualification if the lambda isn't mutable and the | |
355 | capture is by value. */ | |
356 | ||
357 | tree | |
358 | lambda_proxy_type (tree ref) | |
359 | { | |
360 | tree type; | |
6dcf5c5f | 361 | if (ref == error_mark_node) |
362 | return error_mark_node; | |
5d9fd871 | 363 | if (REFERENCE_REF_P (ref)) |
364 | ref = TREE_OPERAND (ref, 0); | |
6dcf5c5f | 365 | gcc_assert (TREE_CODE (ref) == COMPONENT_REF); |
5d9fd871 | 366 | type = TREE_TYPE (ref); |
6dcf5c5f | 367 | if (!type || WILDCARD_TYPE_P (non_reference (type))) |
368 | { | |
369 | type = cxx_make_type (DECLTYPE_TYPE); | |
370 | DECLTYPE_TYPE_EXPR (type) = ref; | |
371 | DECLTYPE_FOR_LAMBDA_PROXY (type) = true; | |
372 | SET_TYPE_STRUCTURAL_EQUALITY (type); | |
373 | } | |
374 | if (DECL_PACK_P (TREE_OPERAND (ref, 1))) | |
375 | type = make_pack_expansion (type); | |
5d9fd871 | 376 | return type; |
377 | } | |
378 | ||
379 | /* MEMBER is a capture field in a lambda closure class. Now that we're | |
380 | inside the operator(), build a placeholder var for future lookups and | |
381 | debugging. */ | |
382 | ||
67ad87d3 | 383 | static tree |
6f20c785 | 384 | build_capture_proxy (tree member, tree init) |
5d9fd871 | 385 | { |
386 | tree var, object, fn, closure, name, lam, type; | |
387 | ||
6dcf5c5f | 388 | if (PACK_EXPANSION_P (member)) |
389 | member = PACK_EXPANSION_PATTERN (member); | |
390 | ||
5d9fd871 | 391 | closure = DECL_CONTEXT (member); |
392 | fn = lambda_function (closure); | |
393 | lam = CLASSTYPE_LAMBDA_EXPR (closure); | |
394 | ||
395 | /* The proxy variable forwards to the capture field. */ | |
396 | object = build_fold_indirect_ref (DECL_ARGUMENTS (fn)); | |
397 | object = finish_non_static_data_member (member, object, NULL_TREE); | |
398 | if (REFERENCE_REF_P (object)) | |
399 | object = TREE_OPERAND (object, 0); | |
400 | ||
401 | /* Remove the __ inserted by add_capture. */ | |
5402533b | 402 | name = get_identifier (IDENTIFIER_POINTER (DECL_NAME (member)) + 2); |
5d9fd871 | 403 | |
404 | type = lambda_proxy_type (object); | |
405 | ||
d03fa520 | 406 | if (name == this_identifier && !INDIRECT_TYPE_P (type)) |
66108a57 | 407 | { |
408 | type = build_pointer_type (type); | |
409 | type = cp_build_qualified_type (type, TYPE_QUAL_CONST); | |
410 | object = build_fold_addr_expr_with_type (object, type); | |
411 | } | |
412 | ||
5d9fd871 | 413 | if (DECL_VLA_CAPTURE_P (member)) |
414 | { | |
415 | /* Rebuild the VLA type from the pointer and maxindex. */ | |
416 | tree field = next_initializable_field (TYPE_FIELDS (type)); | |
417 | tree ptr = build_simple_component_ref (object, field); | |
418 | field = next_initializable_field (DECL_CHAIN (field)); | |
419 | tree max = build_simple_component_ref (object, field); | |
b46a48d6 | 420 | type = build_cplus_array_type (TREE_TYPE (TREE_TYPE (ptr)), |
421 | build_index_type (max)); | |
5d9fd871 | 422 | type = build_reference_type (type); |
423 | REFERENCE_VLA_OK (type) = true; | |
424 | object = convert (type, ptr); | |
425 | } | |
426 | ||
33d103ed | 427 | complete_type (type); |
428 | ||
5d9fd871 | 429 | var = build_decl (input_location, VAR_DECL, name, type); |
430 | SET_DECL_VALUE_EXPR (var, object); | |
431 | DECL_HAS_VALUE_EXPR_P (var) = 1; | |
432 | DECL_ARTIFICIAL (var) = 1; | |
433 | TREE_USED (var) = 1; | |
434 | DECL_CONTEXT (var) = fn; | |
435 | ||
6f20c785 | 436 | if (DECL_NORMAL_CAPTURE_P (member)) |
437 | { | |
438 | if (DECL_VLA_CAPTURE_P (member)) | |
439 | { | |
440 | init = CONSTRUCTOR_ELT (init, 0)->value; | |
441 | init = TREE_OPERAND (init, 0); // Strip ADDR_EXPR. | |
442 | init = TREE_OPERAND (init, 0); // Strip ARRAY_REF. | |
443 | } | |
444 | else | |
445 | { | |
446 | if (PACK_EXPANSION_P (init)) | |
447 | init = PACK_EXPANSION_PATTERN (init); | |
6f20c785 | 448 | } |
ba23f9fb | 449 | |
e86f32c0 | 450 | if (INDIRECT_REF_P (init)) |
451 | init = TREE_OPERAND (init, 0); | |
452 | STRIP_NOPS (init); | |
453 | ||
b448c6f3 | 454 | gcc_assert (VAR_P (init) || TREE_CODE (init) == PARM_DECL); |
455 | while (is_normal_capture_proxy (init)) | |
456 | init = DECL_CAPTURED_VARIABLE (init); | |
457 | retrofit_lang_decl (var); | |
458 | DECL_CAPTURED_VARIABLE (var) = init; | |
6f20c785 | 459 | } |
460 | ||
5d9fd871 | 461 | if (name == this_identifier) |
462 | { | |
463 | gcc_assert (LAMBDA_EXPR_THIS_CAPTURE (lam) == member); | |
464 | LAMBDA_EXPR_THIS_CAPTURE (lam) = var; | |
465 | } | |
466 | ||
467 | if (fn == current_function_decl) | |
468 | insert_capture_proxy (var); | |
469 | else | |
470 | vec_safe_push (LAMBDA_EXPR_PENDING_PROXIES (lam), var); | |
471 | ||
472 | return var; | |
473 | } | |
474 | ||
5407f1e9 | 475 | static GTY(()) tree ptr_id; |
476 | static GTY(()) tree max_id; | |
477 | ||
5d9fd871 | 478 | /* Return a struct containing a pointer and a length for lambda capture of |
479 | an array of runtime length. */ | |
480 | ||
481 | static tree | |
9b4cf3ba | 482 | vla_capture_type (tree array_type) |
5d9fd871 | 483 | { |
9b4cf3ba | 484 | tree type = xref_tag (record_type, make_anon_name (), ts_current, false); |
5d9fd871 | 485 | xref_basetypes (type, NULL_TREE); |
486 | type = begin_class_definition (type); | |
487 | if (!ptr_id) | |
488 | { | |
489 | ptr_id = get_identifier ("ptr"); | |
490 | max_id = get_identifier ("max"); | |
491 | } | |
492 | tree ptrtype = build_pointer_type (TREE_TYPE (array_type)); | |
493 | tree field = build_decl (input_location, FIELD_DECL, ptr_id, ptrtype); | |
494 | finish_member_declaration (field); | |
495 | field = build_decl (input_location, FIELD_DECL, max_id, sizetype); | |
496 | finish_member_declaration (field); | |
497 | return finish_struct (type, NULL_TREE); | |
498 | } | |
499 | ||
500 | /* From an ID and INITIALIZER, create a capture (by reference if | |
501 | BY_REFERENCE_P is true), add it to the capture-list for LAMBDA, | |
66108a57 | 502 | and return it. If ID is `this', BY_REFERENCE_P says whether |
503 | `*this' is captured by reference. */ | |
5d9fd871 | 504 | |
505 | tree | |
6dcf5c5f | 506 | add_capture (tree lambda, tree id, tree orig_init, bool by_reference_p, |
5d9fd871 | 507 | bool explicit_init_p) |
508 | { | |
509 | char *buf; | |
510 | tree type, member, name; | |
511 | bool vla = false; | |
6dcf5c5f | 512 | bool variadic = false; |
513 | tree initializer = orig_init; | |
514 | ||
515 | if (PACK_EXPANSION_P (initializer)) | |
516 | { | |
517 | initializer = PACK_EXPANSION_PATTERN (initializer); | |
518 | variadic = true; | |
519 | } | |
5d9fd871 | 520 | |
845d5e95 | 521 | if (TREE_CODE (initializer) == TREE_LIST |
522 | /* A pack expansion might end up with multiple elements. */ | |
523 | && !PACK_EXPANSION_P (TREE_VALUE (initializer))) | |
5d9fd871 | 524 | initializer = build_x_compound_expr_from_list (initializer, ELK_INIT, |
525 | tf_warning_or_error); | |
384b0418 | 526 | type = TREE_TYPE (initializer); |
5444a0b4 | 527 | if (type == error_mark_node) |
528 | return error_mark_node; | |
529 | ||
9b4cf3ba | 530 | if (!dependent_type_p (type) && array_of_runtime_bound_p (type)) |
5d9fd871 | 531 | { |
532 | vla = true; | |
533 | if (!by_reference_p) | |
534 | error ("array of runtime bound cannot be captured by copy, " | |
535 | "only by reference"); | |
536 | ||
537 | /* For a VLA, we capture the address of the first element and the | |
538 | maximum index, and then reconstruct the VLA for the proxy. */ | |
539 | tree elt = cp_build_array_ref (input_location, initializer, | |
540 | integer_zero_node, tf_warning_or_error); | |
541 | initializer = build_constructor_va (init_list_type_node, 2, | |
542 | NULL_TREE, build_address (elt), | |
543 | NULL_TREE, array_type_nelts (type)); | |
9b4cf3ba | 544 | type = vla_capture_type (type); |
5d9fd871 | 545 | } |
d3155b08 | 546 | else if (!dependent_type_p (type) |
547 | && variably_modified_type_p (type, NULL_TREE)) | |
5d9fd871 | 548 | { |
14dd004e | 549 | sorry ("capture of variably-modified type %qT that is not an N3639 array " |
5d9fd871 | 550 | "of runtime bound", type); |
551 | if (TREE_CODE (type) == ARRAY_TYPE | |
552 | && variably_modified_type_p (TREE_TYPE (type), NULL_TREE)) | |
553 | inform (input_location, "because the array element type %qT has " | |
554 | "variable size", TREE_TYPE (type)); | |
14dd004e | 555 | return error_mark_node; |
5d9fd871 | 556 | } |
384b0418 | 557 | else |
5d9fd871 | 558 | { |
adcbdb02 | 559 | type = lambda_capture_field_type (initializer, explicit_init_p, |
560 | by_reference_p); | |
8dbf49cb | 561 | if (type == error_mark_node) |
562 | return error_mark_node; | |
adcbdb02 | 563 | |
66108a57 | 564 | if (id == this_identifier && !by_reference_p) |
565 | { | |
d03fa520 | 566 | gcc_assert (INDIRECT_TYPE_P (type)); |
66108a57 | 567 | type = TREE_TYPE (type); |
0744a0c1 | 568 | initializer = cp_build_fold_indirect_ref (initializer); |
66108a57 | 569 | } |
adcbdb02 | 570 | |
571 | if (dependent_type_p (type)) | |
572 | ; | |
573 | else if (id != this_identifier && by_reference_p) | |
384b0418 | 574 | { |
adcbdb02 | 575 | if (!lvalue_p (initializer)) |
c8766acf | 576 | { |
577 | error ("cannot capture %qE by reference", initializer); | |
578 | return error_mark_node; | |
579 | } | |
384b0418 | 580 | } |
581 | else | |
5444a0b4 | 582 | { |
583 | /* Capture by copy requires a complete type. */ | |
584 | type = complete_type (type); | |
adcbdb02 | 585 | if (!COMPLETE_TYPE_P (type)) |
5444a0b4 | 586 | { |
587 | error ("capture by copy of incomplete type %qT", type); | |
588 | cxx_incomplete_type_inform (type); | |
589 | return error_mark_node; | |
590 | } | |
591 | } | |
5d9fd871 | 592 | } |
5d9fd871 | 593 | |
594 | /* Add __ to the beginning of the field name so that user code | |
595 | won't find the field with name lookup. We can't just leave the name | |
596 | unset because template instantiation uses the name to find | |
597 | instantiated fields. */ | |
5402533b | 598 | buf = (char *) alloca (IDENTIFIER_LENGTH (id) + 3); |
599 | buf[1] = buf[0] = '_'; | |
600 | memcpy (buf + 2, IDENTIFIER_POINTER (id), | |
601 | IDENTIFIER_LENGTH (id) + 1); | |
602 | name = get_identifier (buf); | |
5d9fd871 | 603 | |
6dcf5c5f | 604 | if (variadic) |
605 | type = make_pack_expansion (type); | |
606 | ||
5d9fd871 | 607 | /* Make member variable. */ |
df623cbd | 608 | member = build_decl (input_location, FIELD_DECL, name, type); |
5d9fd871 | 609 | DECL_VLA_CAPTURE_P (member) = vla; |
610 | ||
611 | if (!explicit_init_p) | |
612 | /* Normal captures are invisible to name lookup but uses are replaced | |
613 | with references to the capture field; we implement this by only | |
614 | really making them invisible in unevaluated context; see | |
615 | qualify_lookup. For now, let's make explicitly initialized captures | |
616 | always visible. */ | |
617 | DECL_NORMAL_CAPTURE_P (member) = true; | |
618 | ||
619 | if (id == this_identifier) | |
620 | LAMBDA_EXPR_THIS_CAPTURE (lambda) = member; | |
621 | ||
622 | /* Add it to the appropriate closure class if we've started it. */ | |
623 | if (current_class_type | |
624 | && current_class_type == LAMBDA_EXPR_CLOSURE (lambda)) | |
9e6bae05 | 625 | { |
626 | if (COMPLETE_TYPE_P (current_class_type)) | |
153f22fa | 627 | internal_error ("trying to capture %qD in instantiation of " |
628 | "generic lambda", id); | |
9e6bae05 | 629 | finish_member_declaration (member); |
630 | } | |
5d9fd871 | 631 | |
6dcf5c5f | 632 | tree listmem = member; |
633 | if (variadic) | |
634 | { | |
635 | listmem = make_pack_expansion (member); | |
636 | initializer = orig_init; | |
637 | } | |
5d9fd871 | 638 | LAMBDA_EXPR_CAPTURE_LIST (lambda) |
6dcf5c5f | 639 | = tree_cons (listmem, initializer, LAMBDA_EXPR_CAPTURE_LIST (lambda)); |
5d9fd871 | 640 | |
641 | if (LAMBDA_EXPR_CLOSURE (lambda)) | |
6f20c785 | 642 | return build_capture_proxy (member, initializer); |
5d9fd871 | 643 | /* For explicit captures we haven't started the function yet, so we wait |
644 | and build the proxy from cp_parser_lambda_body. */ | |
80fdc40f | 645 | LAMBDA_CAPTURE_EXPLICIT_P (LAMBDA_EXPR_CAPTURE_LIST (lambda)) = true; |
5d9fd871 | 646 | return NULL_TREE; |
647 | } | |
648 | ||
649 | /* Register all the capture members on the list CAPTURES, which is the | |
650 | LAMBDA_EXPR_CAPTURE_LIST for the lambda after the introducer. */ | |
651 | ||
652 | void | |
653 | register_capture_members (tree captures) | |
654 | { | |
655 | if (captures == NULL_TREE) | |
656 | return; | |
657 | ||
658 | register_capture_members (TREE_CHAIN (captures)); | |
6dcf5c5f | 659 | |
660 | tree field = TREE_PURPOSE (captures); | |
661 | if (PACK_EXPANSION_P (field)) | |
662 | field = PACK_EXPANSION_PATTERN (field); | |
663 | ||
6dcf5c5f | 664 | finish_member_declaration (field); |
5d9fd871 | 665 | } |
666 | ||
667 | /* Similar to add_capture, except this works on a stack of nested lambdas. | |
668 | BY_REFERENCE_P in this case is derived from the default capture mode. | |
669 | Returns the capture for the lambda at the bottom of the stack. */ | |
670 | ||
671 | tree | |
672 | add_default_capture (tree lambda_stack, tree id, tree initializer) | |
673 | { | |
674 | bool this_capture_p = (id == this_identifier); | |
5d9fd871 | 675 | tree var = NULL_TREE; |
5d9fd871 | 676 | tree saved_class_type = current_class_type; |
677 | ||
c9d00375 | 678 | for (tree node = lambda_stack; |
5d9fd871 | 679 | node; |
680 | node = TREE_CHAIN (node)) | |
681 | { | |
682 | tree lambda = TREE_VALUE (node); | |
683 | ||
684 | current_class_type = LAMBDA_EXPR_CLOSURE (lambda); | |
6dcf5c5f | 685 | if (DECL_PACK_P (initializer)) |
686 | initializer = make_pack_expansion (initializer); | |
5d9fd871 | 687 | var = add_capture (lambda, |
688 | id, | |
689 | initializer, | |
690 | /*by_reference_p=*/ | |
66108a57 | 691 | (this_capture_p |
692 | || (LAMBDA_EXPR_DEFAULT_CAPTURE_MODE (lambda) | |
5d9fd871 | 693 | == CPLD_REFERENCE)), |
694 | /*explicit_init_p=*/false); | |
695 | initializer = convert_from_reference (var); | |
c9d00375 | 696 | |
697 | /* Warn about deprecated implicit capture of this via [=]. */ | |
698 | if (cxx_dialect >= cxx2a | |
699 | && this_capture_p | |
700 | && LAMBDA_EXPR_DEFAULT_CAPTURE_MODE (lambda) == CPLD_COPY | |
701 | && !in_system_header_at (LAMBDA_EXPR_LOCATION (lambda))) | |
702 | { | |
703 | if (warning_at (LAMBDA_EXPR_LOCATION (lambda), OPT_Wdeprecated, | |
704 | "implicit capture of %qE via %<[=]%> is deprecated " | |
705 | "in C++20", this_identifier)) | |
706 | inform (LAMBDA_EXPR_LOCATION (lambda), "add explicit %<this%> or " | |
707 | "%<*this%> capture"); | |
708 | } | |
5d9fd871 | 709 | } |
710 | ||
711 | current_class_type = saved_class_type; | |
712 | ||
713 | return var; | |
714 | } | |
715 | ||
f1ec53b6 | 716 | /* Return the capture pertaining to a use of 'this' in LAMBDA, in the |
717 | form of an INDIRECT_REF, possibly adding it through default | |
532cca3f | 718 | capturing, if ADD_CAPTURE_P is nonzero. If ADD_CAPTURE_P is negative, |
719 | try to capture but don't complain if we can't. */ | |
5d9fd871 | 720 | |
721 | tree | |
532cca3f | 722 | lambda_expr_this_capture (tree lambda, int add_capture_p) |
5d9fd871 | 723 | { |
724 | tree result; | |
725 | ||
726 | tree this_capture = LAMBDA_EXPR_THIS_CAPTURE (lambda); | |
727 | ||
7d07c4a7 | 728 | /* In unevaluated context this isn't an odr-use, so don't capture. */ |
5d9fd871 | 729 | if (cp_unevaluated_operand) |
7d07c4a7 | 730 | add_capture_p = false; |
5d9fd871 | 731 | |
732 | /* Try to default capture 'this' if we can. */ | |
4bcb6f7f | 733 | if (!this_capture) |
5d9fd871 | 734 | { |
735 | tree lambda_stack = NULL_TREE; | |
736 | tree init = NULL_TREE; | |
737 | ||
738 | /* If we are in a lambda function, we can move out until we hit: | |
739 | 1. a non-lambda function or NSDMI, | |
740 | 2. a lambda function capturing 'this', or | |
741 | 3. a non-default capturing lambda function. */ | |
742 | for (tree tlambda = lambda; ;) | |
743 | { | |
4bcb6f7f | 744 | if (add_capture_p |
745 | && LAMBDA_EXPR_DEFAULT_CAPTURE_MODE (tlambda) == CPLD_NONE) | |
746 | /* tlambda won't let us capture 'this'. */ | |
747 | break; | |
748 | ||
749 | if (add_capture_p) | |
750 | lambda_stack = tree_cons (NULL_TREE, | |
751 | tlambda, | |
752 | lambda_stack); | |
5d9fd871 | 753 | |
fc25a333 | 754 | tree closure = LAMBDA_EXPR_CLOSURE (tlambda); |
755 | tree containing_function | |
756 | = decl_function_context (TYPE_NAME (closure)); | |
757 | ||
758 | tree ex = LAMBDA_EXPR_EXTRA_SCOPE (tlambda); | |
759 | if (ex && TREE_CODE (ex) == FIELD_DECL) | |
5d9fd871 | 760 | { |
fc25a333 | 761 | /* Lambda in an NSDMI. We don't have a function to look up |
762 | 'this' in, but we can find (or rebuild) the fake one from | |
763 | inject_this_parameter. */ | |
764 | if (!containing_function && !COMPLETE_TYPE_P (closure)) | |
765 | /* If we're parsing a lambda in a non-local class, | |
766 | we can find the fake 'this' in scope_chain. */ | |
767 | init = scope_chain->x_current_class_ptr; | |
768 | else | |
769 | /* Otherwise it's either gone or buried in | |
770 | function_context_stack, so make another. */ | |
771 | init = build_this_parm (NULL_TREE, DECL_CONTEXT (ex), | |
772 | TYPE_UNQUALIFIED); | |
5d9fd871 | 773 | gcc_checking_assert |
774 | (init && (TREE_TYPE (TREE_TYPE (init)) | |
775 | == current_nonlambda_class_type ())); | |
776 | break; | |
777 | } | |
778 | ||
5d9fd871 | 779 | if (containing_function == NULL_TREE) |
780 | /* We ran out of scopes; there's no 'this' to capture. */ | |
781 | break; | |
782 | ||
783 | if (!LAMBDA_FUNCTION_P (containing_function)) | |
784 | { | |
785 | /* We found a non-lambda function. */ | |
786 | if (DECL_NONSTATIC_MEMBER_FUNCTION_P (containing_function)) | |
787 | /* First parameter is 'this'. */ | |
788 | init = DECL_ARGUMENTS (containing_function); | |
789 | break; | |
790 | } | |
791 | ||
792 | tlambda | |
793 | = CLASSTYPE_LAMBDA_EXPR (DECL_CONTEXT (containing_function)); | |
794 | ||
795 | if (LAMBDA_EXPR_THIS_CAPTURE (tlambda)) | |
796 | { | |
797 | /* An outer lambda has already captured 'this'. */ | |
798 | init = LAMBDA_EXPR_THIS_CAPTURE (tlambda); | |
799 | break; | |
800 | } | |
5d9fd871 | 801 | } |
802 | ||
803 | if (init) | |
f1ec53b6 | 804 | { |
805 | if (add_capture_p) | |
806 | this_capture = add_default_capture (lambda_stack, | |
807 | /*id=*/this_identifier, | |
808 | init); | |
809 | else | |
810 | this_capture = init; | |
811 | } | |
5d9fd871 | 812 | } |
813 | ||
7d07c4a7 | 814 | if (cp_unevaluated_operand) |
815 | result = this_capture; | |
816 | else if (!this_capture) | |
5d9fd871 | 817 | { |
532cca3f | 818 | if (add_capture_p == 1) |
7d07c4a7 | 819 | { |
820 | error ("%<this%> was not captured for this lambda function"); | |
821 | result = error_mark_node; | |
822 | } | |
823 | else | |
824 | result = NULL_TREE; | |
5d9fd871 | 825 | } |
826 | else | |
827 | { | |
828 | /* To make sure that current_class_ref is for the lambda. */ | |
829 | gcc_assert (TYPE_MAIN_VARIANT (TREE_TYPE (current_class_ref)) | |
830 | == LAMBDA_EXPR_CLOSURE (lambda)); | |
831 | ||
832 | result = this_capture; | |
833 | ||
834 | /* If 'this' is captured, each use of 'this' is transformed into an | |
835 | access to the corresponding unnamed data member of the closure | |
836 | type cast (_expr.cast_ 5.4) to the type of 'this'. [ The cast | |
837 | ensures that the transformed expression is an rvalue. ] */ | |
838 | result = rvalue (result); | |
839 | } | |
840 | ||
841 | return result; | |
842 | } | |
843 | ||
80fdc40f | 844 | /* Return the innermost LAMBDA_EXPR we're currently in, if any. */ |
845 | ||
846 | tree | |
847 | current_lambda_expr (void) | |
848 | { | |
849 | tree type = current_class_type; | |
850 | while (type && !LAMBDA_TYPE_P (type)) | |
851 | type = decl_type_context (TYPE_NAME (type)); | |
852 | if (type) | |
853 | return CLASSTYPE_LAMBDA_EXPR (type); | |
854 | else | |
855 | return NULL_TREE; | |
856 | } | |
857 | ||
e395357f | 858 | /* Return the current LAMBDA_EXPR, if this is a resolvable dummy |
859 | object. NULL otherwise.. */ | |
5d9fd871 | 860 | |
e395357f | 861 | static tree |
862 | resolvable_dummy_lambda (tree object) | |
5d9fd871 | 863 | { |
864 | if (!is_dummy_object (object)) | |
e395357f | 865 | return NULL_TREE; |
5d9fd871 | 866 | |
867 | tree type = TYPE_MAIN_VARIANT (TREE_TYPE (object)); | |
868 | gcc_assert (!TYPE_PTR_P (type)); | |
869 | ||
870 | if (type != current_class_type | |
871 | && current_class_type | |
872 | && LAMBDA_TYPE_P (current_class_type) | |
855ff3cc | 873 | && lambda_function (current_class_type) |
e269786f | 874 | && DERIVED_FROM_P (type, nonlambda_method_basetype())) |
e395357f | 875 | return CLASSTYPE_LAMBDA_EXPR (current_class_type); |
876 | ||
877 | return NULL_TREE; | |
878 | } | |
879 | ||
880 | /* We don't want to capture 'this' until we know we need it, i.e. after | |
881 | overload resolution has chosen a non-static member function. At that | |
882 | point we call this function to turn a dummy object into a use of the | |
883 | 'this' capture. */ | |
884 | ||
885 | tree | |
886 | maybe_resolve_dummy (tree object, bool add_capture_p) | |
887 | { | |
888 | if (tree lam = resolvable_dummy_lambda (object)) | |
889 | if (tree cap = lambda_expr_this_capture (lam, add_capture_p)) | |
890 | if (cap != error_mark_node) | |
0744a0c1 | 891 | object = build_fold_indirect_ref (cap); |
5d9fd871 | 892 | |
893 | return object; | |
894 | } | |
895 | ||
e395357f | 896 | /* When parsing a generic lambda containing an argument-dependent |
897 | member function call we defer overload resolution to instantiation | |
898 | time. But we have to know now whether to capture this or not. | |
899 | Do that if FNS contains any non-static fns. | |
900 | The std doesn't anticipate this case, but I expect this to be the | |
901 | outcome of discussion. */ | |
902 | ||
903 | void | |
904 | maybe_generic_this_capture (tree object, tree fns) | |
905 | { | |
906 | if (tree lam = resolvable_dummy_lambda (object)) | |
907 | if (!LAMBDA_EXPR_THIS_CAPTURE (lam)) | |
908 | { | |
909 | /* We've not yet captured, so look at the function set of | |
910 | interest. */ | |
911 | if (BASELINK_P (fns)) | |
912 | fns = BASELINK_FUNCTIONS (fns); | |
3ac2178b | 913 | bool id_expr = TREE_CODE (fns) == TEMPLATE_ID_EXPR; |
914 | if (id_expr) | |
915 | fns = TREE_OPERAND (fns, 0); | |
97a86f58 | 916 | |
917 | for (lkp_iterator iter (fns); iter; ++iter) | |
5ebe5c44 | 918 | if (((!id_expr && TREE_CODE (*iter) != USING_DECL) |
919 | || TREE_CODE (*iter) == TEMPLATE_DECL) | |
97a86f58 | 920 | && DECL_NONSTATIC_MEMBER_FUNCTION_P (*iter)) |
921 | { | |
922 | /* Found a non-static member. Capture this. */ | |
532cca3f | 923 | lambda_expr_this_capture (lam, /*maybe*/-1); |
97a86f58 | 924 | break; |
925 | } | |
e395357f | 926 | } |
927 | } | |
928 | ||
ed7bf2d1 | 929 | /* Returns the innermost non-lambda function. */ |
930 | ||
931 | tree | |
932 | current_nonlambda_function (void) | |
933 | { | |
934 | tree fn = current_function_decl; | |
935 | while (fn && LAMBDA_FUNCTION_P (fn)) | |
936 | fn = decl_function_context (fn); | |
937 | return fn; | |
938 | } | |
939 | ||
e269786f | 940 | /* Returns the method basetype of the innermost non-lambda function, including |
941 | a hypothetical constructor if inside an NSDMI, or NULL_TREE if none. */ | |
5d9fd871 | 942 | |
943 | tree | |
944 | nonlambda_method_basetype (void) | |
945 | { | |
5d9fd871 | 946 | if (!current_class_ref) |
947 | return NULL_TREE; | |
948 | ||
e269786f | 949 | tree type = current_class_type; |
069eebda | 950 | if (!type || !LAMBDA_TYPE_P (type)) |
5d9fd871 | 951 | return type; |
952 | ||
e269786f | 953 | while (true) |
954 | { | |
955 | tree lam = CLASSTYPE_LAMBDA_EXPR (type); | |
956 | tree ex = LAMBDA_EXPR_EXTRA_SCOPE (lam); | |
957 | if (ex && TREE_CODE (ex) == FIELD_DECL) | |
958 | /* Lambda in an NSDMI. */ | |
959 | return DECL_CONTEXT (ex); | |
960 | ||
961 | tree fn = TYPE_CONTEXT (type); | |
962 | if (!fn || TREE_CODE (fn) != FUNCTION_DECL | |
963 | || !DECL_NONSTATIC_MEMBER_FUNCTION_P (fn)) | |
964 | /* No enclosing non-lambda method. */ | |
965 | return NULL_TREE; | |
966 | if (!LAMBDA_FUNCTION_P (fn)) | |
967 | /* Found an enclosing non-lambda method. */ | |
968 | return TYPE_METHOD_BASETYPE (TREE_TYPE (fn)); | |
969 | type = DECL_CONTEXT (fn); | |
970 | } | |
5d9fd871 | 971 | } |
972 | ||
d05ba3ef | 973 | /* Like current_scope, but looking through lambdas. */ |
974 | ||
975 | tree | |
976 | current_nonlambda_scope (void) | |
977 | { | |
978 | tree scope = current_scope (); | |
979 | for (;;) | |
980 | { | |
981 | if (TREE_CODE (scope) == FUNCTION_DECL | |
982 | && LAMBDA_FUNCTION_P (scope)) | |
983 | { | |
984 | scope = CP_TYPE_CONTEXT (DECL_CONTEXT (scope)); | |
985 | continue; | |
986 | } | |
987 | else if (LAMBDA_TYPE_P (scope)) | |
988 | { | |
989 | scope = CP_TYPE_CONTEXT (scope); | |
990 | continue; | |
991 | } | |
992 | break; | |
993 | } | |
994 | return scope; | |
995 | } | |
996 | ||
814b90ef | 997 | /* Helper function for maybe_add_lambda_conv_op; build a CALL_EXPR with |
998 | indicated FN and NARGS, but do not initialize the return type or any of the | |
999 | argument slots. */ | |
1000 | ||
1001 | static tree | |
1002 | prepare_op_call (tree fn, int nargs) | |
1003 | { | |
1004 | tree t; | |
1005 | ||
1006 | t = build_vl_exp (CALL_EXPR, nargs + 3); | |
1007 | CALL_EXPR_FN (t) = fn; | |
1008 | CALL_EXPR_STATIC_CHAIN (t) = NULL; | |
1009 | ||
1010 | return t; | |
1011 | } | |
1012 | ||
3311d302 | 1013 | /* Return true iff CALLOP is the op() for a generic lambda. */ |
1014 | ||
1015 | bool | |
1016 | generic_lambda_fn_p (tree callop) | |
1017 | { | |
1018 | return (LAMBDA_FUNCTION_P (callop) | |
1019 | && DECL_TEMPLATE_INFO (callop) | |
1020 | && PRIMARY_TEMPLATE_P (DECL_TI_TEMPLATE (callop))); | |
1021 | } | |
1022 | ||
5d9fd871 | 1023 | /* If the closure TYPE has a static op(), also add a conversion to function |
1024 | pointer. */ | |
1025 | ||
1026 | void | |
1027 | maybe_add_lambda_conv_op (tree type) | |
1028 | { | |
cbb83bc5 | 1029 | bool nested = (cfun != NULL); |
64d8d39e | 1030 | bool nested_def = decl_function_context (TYPE_MAIN_DECL (type)); |
5d9fd871 | 1031 | tree callop = lambda_function (type); |
f16153b7 | 1032 | tree lam = CLASSTYPE_LAMBDA_EXPR (type); |
5d9fd871 | 1033 | |
f16153b7 | 1034 | if (LAMBDA_EXPR_CAPTURE_LIST (lam) != NULL_TREE |
1035 | || LAMBDA_EXPR_DEFAULT_CAPTURE_MODE (lam) != CPLD_NONE) | |
5d9fd871 | 1036 | return; |
1037 | ||
1038 | if (processing_template_decl) | |
1039 | return; | |
1040 | ||
3311d302 | 1041 | bool const generic_lambda_p = generic_lambda_fn_p (callop); |
814b90ef | 1042 | |
4c0924ef | 1043 | if (!generic_lambda_p && DECL_INITIAL (callop) == NULL_TREE) |
5d9fd871 | 1044 | { |
1045 | /* If the op() wasn't instantiated due to errors, give up. */ | |
1046 | gcc_assert (errorcount || sorrycount); | |
1047 | return; | |
1048 | } | |
1049 | ||
814b90ef | 1050 | /* Non-template conversion operators are defined directly with build_call_a |
1051 | and using DIRECT_ARGVEC for arguments (including 'this'). Templates are | |
1052 | deferred and the CALL is built in-place. In the case of a deduced return | |
1053 | call op, the decltype expression, DECLTYPE_CALL, used as a substitute for | |
1054 | the return type is also built in-place. The arguments of DECLTYPE_CALL in | |
1055 | the return expression may differ in flags from those in the body CALL. In | |
1056 | particular, parameter pack expansions are marked PACK_EXPANSION_LOCAL_P in | |
1057 | the body CALL, but not in DECLTYPE_CALL. */ | |
1058 | ||
1059 | vec<tree, va_gc> *direct_argvec = 0; | |
1060 | tree decltype_call = 0, call = 0; | |
6d02e6b2 | 1061 | tree optype = TREE_TYPE (callop); |
1062 | tree fn_result = TREE_TYPE (optype); | |
814b90ef | 1063 | |
02d7a132 | 1064 | tree thisarg = build_int_cst (TREE_TYPE (DECL_ARGUMENTS (callop)), 0); |
814b90ef | 1065 | if (generic_lambda_p) |
1066 | { | |
23f59d67 | 1067 | ++processing_template_decl; |
1068 | ||
814b90ef | 1069 | /* Prepare the dependent member call for the static member function |
1070 | '_FUN' and, potentially, prepare another call to be used in a decltype | |
1071 | return expression for a deduced return call op to allow for simple | |
1072 | implementation of the conversion operator. */ | |
1073 | ||
0744a0c1 | 1074 | tree instance = cp_build_fold_indirect_ref (thisarg); |
64e3499e | 1075 | tree objfn = lookup_template_function (DECL_NAME (callop), |
1076 | DECL_TI_ARGS (callop)); | |
1077 | objfn = build_min (COMPONENT_REF, NULL_TREE, | |
1078 | instance, objfn, NULL_TREE); | |
814b90ef | 1079 | int nargs = list_length (DECL_ARGUMENTS (callop)) - 1; |
1080 | ||
1081 | call = prepare_op_call (objfn, nargs); | |
1082 | if (type_uses_auto (fn_result)) | |
1083 | decltype_call = prepare_op_call (objfn, nargs); | |
1084 | } | |
1085 | else | |
1086 | { | |
1087 | direct_argvec = make_tree_vector (); | |
3a3fc4a7 | 1088 | direct_argvec->quick_push (thisarg); |
814b90ef | 1089 | } |
1090 | ||
1091 | /* Copy CALLOP's argument list (as per 'copy_list') as FN_ARGS in order to | |
1092 | declare the static member function "_FUN" below. For each arg append to | |
1093 | DIRECT_ARGVEC (for the non-template case) or populate the pre-allocated | |
1094 | call args (for the template case). If a parameter pack is found, expand | |
1095 | it, flagging it as PACK_EXPANSION_LOCAL_P for the body call. */ | |
1096 | ||
1097 | tree fn_args = NULL_TREE; | |
1098 | { | |
1099 | int ix = 0; | |
1100 | tree src = DECL_CHAIN (DECL_ARGUMENTS (callop)); | |
a575a67c | 1101 | tree tgt = NULL; |
814b90ef | 1102 | |
1103 | while (src) | |
1104 | { | |
1105 | tree new_node = copy_node (src); | |
1106 | ||
b5a6b127 | 1107 | /* Clear TREE_ADDRESSABLE on thunk arguments. */ |
1108 | TREE_ADDRESSABLE (new_node) = 0; | |
1109 | ||
814b90ef | 1110 | if (!fn_args) |
1111 | fn_args = tgt = new_node; | |
1112 | else | |
1113 | { | |
1114 | TREE_CHAIN (tgt) = new_node; | |
1115 | tgt = new_node; | |
1116 | } | |
1117 | ||
1118 | mark_exp_read (tgt); | |
1119 | ||
1120 | if (generic_lambda_p) | |
1121 | { | |
64e3499e | 1122 | tree a = tgt; |
1123 | if (DECL_PACK_P (tgt)) | |
1124 | { | |
1125 | a = make_pack_expansion (a); | |
1126 | PACK_EXPANSION_LOCAL_P (a) = true; | |
1127 | } | |
72f9352a | 1128 | CALL_EXPR_ARG (call, ix) = a; |
72f9352a | 1129 | |
64e3499e | 1130 | if (decltype_call) |
1131 | { | |
1132 | /* Avoid capturing variables in this context. */ | |
1133 | ++cp_unevaluated_operand; | |
1134 | CALL_EXPR_ARG (decltype_call, ix) = forward_parm (tgt); | |
1135 | --cp_unevaluated_operand; | |
1136 | } | |
72f9352a | 1137 | |
814b90ef | 1138 | ++ix; |
1139 | } | |
1140 | else | |
1141 | vec_safe_push (direct_argvec, tgt); | |
1142 | ||
1143 | src = TREE_CHAIN (src); | |
1144 | } | |
1145 | } | |
1146 | ||
814b90ef | 1147 | if (generic_lambda_p) |
1148 | { | |
1149 | if (decltype_call) | |
1150 | { | |
814b90ef | 1151 | fn_result = finish_decltype_type |
1152 | (decltype_call, /*id_expression_or_member_access_p=*/false, | |
1153 | tf_warning_or_error); | |
814b90ef | 1154 | } |
1155 | } | |
1156 | else | |
1157 | call = build_call_a (callop, | |
1158 | direct_argvec->length (), | |
1159 | direct_argvec->address ()); | |
1160 | ||
1161 | CALL_FROM_THUNK_P (call) = 1; | |
33603066 | 1162 | SET_EXPR_LOCATION (call, UNKNOWN_LOCATION); |
814b90ef | 1163 | |
1164 | tree stattype = build_function_type (fn_result, FUNCTION_ARG_CHAIN (callop)); | |
6d02e6b2 | 1165 | stattype = (cp_build_type_attribute_variant |
1166 | (stattype, TYPE_ATTRIBUTES (optype))); | |
2e9e9363 | 1167 | if (flag_noexcept_type |
1168 | && TYPE_NOTHROW_P (TREE_TYPE (callop))) | |
1169 | stattype = build_exception_variant (stattype, noexcept_true_spec); | |
5d9fd871 | 1170 | |
23f59d67 | 1171 | if (generic_lambda_p) |
1172 | --processing_template_decl; | |
1173 | ||
5d9fd871 | 1174 | /* First build up the conversion op. */ |
1175 | ||
814b90ef | 1176 | tree rettype = build_pointer_type (stattype); |
b423f98b | 1177 | tree name = make_conv_op_name (rettype); |
814b90ef | 1178 | tree thistype = cp_build_qualified_type (type, TYPE_QUAL_CONST); |
1179 | tree fntype = build_method_type_directly (thistype, rettype, void_list_node); | |
1180 | tree convfn = build_lang_decl (FUNCTION_DECL, name, fntype); | |
1c4a339e | 1181 | SET_DECL_LANGUAGE (convfn, lang_cplusplus); |
814b90ef | 1182 | tree fn = convfn; |
5d9fd871 | 1183 | DECL_SOURCE_LOCATION (fn) = DECL_SOURCE_LOCATION (callop); |
5d4b30ea | 1184 | SET_DECL_ALIGN (fn, MINIMUM_METHOD_BOUNDARY); |
5d9fd871 | 1185 | grokclassfn (type, fn, NO_SPECIAL); |
1186 | set_linkage_according_to_type (type, fn); | |
a3145045 | 1187 | rest_of_decl_compilation (fn, namespace_bindings_p (), at_eof); |
5d9fd871 | 1188 | DECL_IN_AGGR_P (fn) = 1; |
1189 | DECL_ARTIFICIAL (fn) = 1; | |
1190 | DECL_NOT_REALLY_EXTERN (fn) = 1; | |
1191 | DECL_DECLARED_INLINE_P (fn) = 1; | |
aa3ab9ba | 1192 | DECL_ARGUMENTS (fn) = build_this_parm (fn, fntype, TYPE_QUAL_CONST); |
1193 | ||
64d8d39e | 1194 | if (nested_def) |
5d9fd871 | 1195 | DECL_INTERFACE_KNOWN (fn) = 1; |
1196 | ||
814b90ef | 1197 | if (generic_lambda_p) |
1198 | fn = add_inherited_template_parms (fn, DECL_TI_TEMPLATE (callop)); | |
1199 | ||
9320a233 | 1200 | add_method (type, fn, false); |
5d9fd871 | 1201 | |
1202 | /* Generic thunk code fails for varargs; we'll complain in mark_used if | |
1203 | the conversion op is used. */ | |
1204 | if (varargs_function_p (callop)) | |
1205 | { | |
ea17a80d | 1206 | DECL_DELETED_FN (fn) = 1; |
5d9fd871 | 1207 | return; |
1208 | } | |
1209 | ||
1210 | /* Now build up the thunk to be returned. */ | |
1211 | ||
3a491e82 | 1212 | tree statfn = build_lang_decl (FUNCTION_DECL, fun_identifier, stattype); |
1c4a339e | 1213 | SET_DECL_LANGUAGE (statfn, lang_cplusplus); |
814b90ef | 1214 | fn = statfn; |
5d9fd871 | 1215 | DECL_SOURCE_LOCATION (fn) = DECL_SOURCE_LOCATION (callop); |
5d9fd871 | 1216 | grokclassfn (type, fn, NO_SPECIAL); |
1217 | set_linkage_according_to_type (type, fn); | |
a3145045 | 1218 | rest_of_decl_compilation (fn, namespace_bindings_p (), at_eof); |
5d9fd871 | 1219 | DECL_IN_AGGR_P (fn) = 1; |
1220 | DECL_ARTIFICIAL (fn) = 1; | |
1221 | DECL_NOT_REALLY_EXTERN (fn) = 1; | |
1222 | DECL_DECLARED_INLINE_P (fn) = 1; | |
1223 | DECL_STATIC_FUNCTION_P (fn) = 1; | |
814b90ef | 1224 | DECL_ARGUMENTS (fn) = fn_args; |
1225 | for (tree arg = fn_args; arg; arg = DECL_CHAIN (arg)) | |
5d9fd871 | 1226 | { |
1227 | /* Avoid duplicate -Wshadow warnings. */ | |
1228 | DECL_NAME (arg) = NULL_TREE; | |
1229 | DECL_CONTEXT (arg) = fn; | |
1230 | } | |
64d8d39e | 1231 | if (nested_def) |
5d9fd871 | 1232 | DECL_INTERFACE_KNOWN (fn) = 1; |
1233 | ||
814b90ef | 1234 | if (generic_lambda_p) |
1235 | fn = add_inherited_template_parms (fn, DECL_TI_TEMPLATE (callop)); | |
1236 | ||
59ec98b2 | 1237 | if (flag_sanitize & SANITIZE_NULL) |
ef8f6502 | 1238 | /* Don't UBsan this function; we're deliberately calling op() with a null |
1239 | object argument. */ | |
1240 | add_no_sanitize_value (fn, SANITIZE_UNDEFINED); | |
7caa8236 | 1241 | |
9320a233 | 1242 | add_method (type, fn, false); |
5d9fd871 | 1243 | |
1244 | if (nested) | |
1245 | push_function_context (); | |
1246 | else | |
1247 | /* Still increment function_depth so that we don't GC in the | |
1248 | middle of an expression. */ | |
1249 | ++function_depth; | |
1250 | ||
1251 | /* Generate the body of the thunk. */ | |
1252 | ||
1253 | start_preparsed_function (statfn, NULL_TREE, | |
1254 | SF_PRE_PARSED | SF_INCLASS_INLINE); | |
1255 | if (DECL_ONE_ONLY (statfn)) | |
1256 | { | |
1257 | /* Put the thunk in the same comdat group as the call op. */ | |
415d1b9a | 1258 | cgraph_node::get_create (statfn)->add_to_same_comdat_group |
1259 | (cgraph_node::get_create (callop)); | |
5d9fd871 | 1260 | } |
814b90ef | 1261 | tree body = begin_function_body (); |
1262 | tree compound_stmt = begin_compound_stmt (0); | |
1263 | if (!generic_lambda_p) | |
9f10a108 | 1264 | { |
814b90ef | 1265 | set_flags_from_callee (call); |
1266 | if (MAYBE_CLASS_TYPE_P (TREE_TYPE (call))) | |
1267 | call = build_cplus_new (TREE_TYPE (call), call, tf_warning_or_error); | |
9f10a108 | 1268 | } |
5d9fd871 | 1269 | call = convert_from_reference (call); |
1270 | finish_return_stmt (call); | |
1271 | ||
1272 | finish_compound_stmt (compound_stmt); | |
1273 | finish_function_body (body); | |
1274 | ||
4775c814 | 1275 | fn = finish_function (/*inline_p=*/true); |
814b90ef | 1276 | if (!generic_lambda_p) |
1277 | expand_or_defer_fn (fn); | |
5d9fd871 | 1278 | |
1279 | /* Generate the body of the conversion op. */ | |
1280 | ||
1281 | start_preparsed_function (convfn, NULL_TREE, | |
1282 | SF_PRE_PARSED | SF_INCLASS_INLINE); | |
1283 | body = begin_function_body (); | |
1284 | compound_stmt = begin_compound_stmt (0); | |
1285 | ||
1286 | /* decl_needed_p needs to see that it's used. */ | |
1287 | TREE_USED (statfn) = 1; | |
1288 | finish_return_stmt (decay_conversion (statfn, tf_warning_or_error)); | |
1289 | ||
1290 | finish_compound_stmt (compound_stmt); | |
1291 | finish_function_body (body); | |
1292 | ||
4775c814 | 1293 | fn = finish_function (/*inline_p=*/true); |
814b90ef | 1294 | if (!generic_lambda_p) |
1295 | expand_or_defer_fn (fn); | |
5d9fd871 | 1296 | |
1297 | if (nested) | |
1298 | pop_function_context (); | |
1299 | else | |
1300 | --function_depth; | |
1301 | } | |
1302 | ||
33603066 | 1303 | /* True if FN is the static function "_FUN" that gets returned from the lambda |
1304 | conversion operator. */ | |
1305 | ||
1306 | bool | |
1307 | lambda_static_thunk_p (tree fn) | |
1308 | { | |
1309 | return (fn && TREE_CODE (fn) == FUNCTION_DECL | |
1310 | && DECL_ARTIFICIAL (fn) | |
1311 | && DECL_STATIC_FUNCTION_P (fn) | |
1312 | && LAMBDA_TYPE_P (CP_DECL_CONTEXT (fn))); | |
1313 | } | |
1314 | ||
5d9fd871 | 1315 | /* Returns true iff VAL is a lambda-related declaration which should |
1316 | be ignored by unqualified lookup. */ | |
1317 | ||
1318 | bool | |
1319 | is_lambda_ignored_entity (tree val) | |
1320 | { | |
6f20c785 | 1321 | /* Look past normal capture proxies. */ |
1322 | if (is_normal_capture_proxy (val)) | |
5d9fd871 | 1323 | return true; |
1324 | ||
1325 | /* Always ignore lambda fields, their names are only for debugging. */ | |
1326 | if (TREE_CODE (val) == FIELD_DECL | |
1327 | && CLASSTYPE_LAMBDA_EXPR (DECL_CONTEXT (val))) | |
1328 | return true; | |
1329 | ||
1330 | /* None of the lookups that use qualify_lookup want the op() from the | |
1331 | lambda; they want the one from the enclosing class. */ | |
1332 | if (TREE_CODE (val) == FUNCTION_DECL && LAMBDA_FUNCTION_P (val)) | |
1333 | return true; | |
1334 | ||
1335 | return false; | |
1336 | } | |
5407f1e9 | 1337 | |
52e76545 | 1338 | /* Lambdas that appear in variable initializer or default argument scope |
1339 | get that in their mangling, so we need to record it. We might as well | |
1340 | use the count for function and namespace scopes as well. */ | |
1341 | static GTY(()) tree lambda_scope; | |
1342 | static GTY(()) int lambda_count; | |
1343 | struct GTY(()) tree_int | |
1344 | { | |
1345 | tree t; | |
1346 | int i; | |
1347 | }; | |
1348 | static GTY(()) vec<tree_int, va_gc> *lambda_scope_stack; | |
1349 | ||
1350 | void | |
1351 | start_lambda_scope (tree decl) | |
1352 | { | |
1353 | tree_int ti; | |
1354 | gcc_assert (decl); | |
1355 | /* Once we're inside a function, we ignore variable scope and just push | |
1356 | the function again so that popping works properly. */ | |
1357 | if (current_function_decl && TREE_CODE (decl) == VAR_DECL) | |
1358 | decl = current_function_decl; | |
1359 | ti.t = lambda_scope; | |
1360 | ti.i = lambda_count; | |
1361 | vec_safe_push (lambda_scope_stack, ti); | |
1362 | if (lambda_scope != decl) | |
1363 | { | |
1364 | /* Don't reset the count if we're still in the same function. */ | |
1365 | lambda_scope = decl; | |
1366 | lambda_count = 0; | |
1367 | } | |
1368 | } | |
1369 | ||
1370 | void | |
1371 | record_lambda_scope (tree lambda) | |
1372 | { | |
1373 | LAMBDA_EXPR_EXTRA_SCOPE (lambda) = lambda_scope; | |
1374 | LAMBDA_EXPR_DISCRIMINATOR (lambda) = lambda_count++; | |
1375 | } | |
1376 | ||
4b1e842f | 1377 | /* This lambda is an instantiation of a lambda in a template default argument |
1378 | that got no LAMBDA_EXPR_EXTRA_SCOPE, so this shouldn't either. But we do | |
1379 | need to use and increment the global count to avoid collisions. */ | |
1380 | ||
1381 | void | |
1382 | record_null_lambda_scope (tree lambda) | |
1383 | { | |
1384 | if (vec_safe_is_empty (lambda_scope_stack)) | |
1385 | record_lambda_scope (lambda); | |
1386 | else | |
1387 | { | |
1388 | tree_int *p = lambda_scope_stack->begin(); | |
1389 | LAMBDA_EXPR_EXTRA_SCOPE (lambda) = p->t; | |
1390 | LAMBDA_EXPR_DISCRIMINATOR (lambda) = p->i++; | |
1391 | } | |
1392 | gcc_assert (LAMBDA_EXPR_EXTRA_SCOPE (lambda) == NULL_TREE); | |
1393 | } | |
1394 | ||
52e76545 | 1395 | void |
1396 | finish_lambda_scope (void) | |
1397 | { | |
1398 | tree_int *p = &lambda_scope_stack->last (); | |
1399 | if (lambda_scope != p->t) | |
1400 | { | |
1401 | lambda_scope = p->t; | |
1402 | lambda_count = p->i; | |
1403 | } | |
1404 | lambda_scope_stack->pop (); | |
1405 | } | |
1406 | ||
1407 | tree | |
1408 | start_lambda_function (tree fco, tree lambda_expr) | |
1409 | { | |
1410 | /* Let the front end know that we are going to be defining this | |
1411 | function. */ | |
1412 | start_preparsed_function (fco, | |
1413 | NULL_TREE, | |
1414 | SF_PRE_PARSED | SF_INCLASS_INLINE); | |
1415 | ||
1416 | tree body = begin_function_body (); | |
1417 | ||
1418 | /* Push the proxies for any explicit captures. */ | |
1419 | for (tree cap = LAMBDA_EXPR_CAPTURE_LIST (lambda_expr); cap; | |
1420 | cap = TREE_CHAIN (cap)) | |
6f20c785 | 1421 | build_capture_proxy (TREE_PURPOSE (cap), TREE_VALUE (cap)); |
52e76545 | 1422 | |
1423 | return body; | |
1424 | } | |
1425 | ||
80fdc40f | 1426 | /* Subroutine of prune_lambda_captures: CAP is a node in |
1427 | LAMBDA_EXPR_CAPTURE_LIST. Return the variable it captures for which we | |
1428 | might optimize away the capture, or NULL_TREE if there is no such | |
1429 | variable. */ | |
1430 | ||
1431 | static tree | |
1432 | var_to_maybe_prune (tree cap) | |
1433 | { | |
1434 | if (LAMBDA_CAPTURE_EXPLICIT_P (cap)) | |
1435 | /* Don't prune explicit captures. */ | |
1436 | return NULL_TREE; | |
1437 | ||
1438 | tree mem = TREE_PURPOSE (cap); | |
1439 | if (!DECL_P (mem) || !DECL_NORMAL_CAPTURE_P (mem)) | |
1440 | /* Packs and init-captures aren't captures of constant vars. */ | |
1441 | return NULL_TREE; | |
1442 | ||
1443 | tree init = TREE_VALUE (cap); | |
1444 | if (is_normal_capture_proxy (init)) | |
1445 | init = DECL_CAPTURED_VARIABLE (init); | |
1446 | if (decl_constant_var_p (init)) | |
1447 | return init; | |
1448 | ||
1449 | return NULL_TREE; | |
1450 | } | |
1451 | ||
1452 | /* walk_tree helper for prune_lambda_captures: Remember which capture proxies | |
1453 | for constant variables are actually used in the lambda body. | |
1454 | ||
1455 | There will always be a DECL_EXPR for the capture proxy; remember it when we | |
1456 | see it, but replace it with any other use. */ | |
1457 | ||
1458 | static tree | |
1459 | mark_const_cap_r (tree *t, int *walk_subtrees, void *data) | |
1460 | { | |
1461 | hash_map<tree,tree*> &const_vars = *(hash_map<tree,tree*>*)data; | |
1462 | ||
1463 | tree var = NULL_TREE; | |
1464 | if (TREE_CODE (*t) == DECL_EXPR) | |
1465 | { | |
1466 | tree decl = DECL_EXPR_DECL (*t); | |
1467 | if (is_constant_capture_proxy (decl)) | |
c758dd5a | 1468 | { |
1469 | var = DECL_CAPTURED_VARIABLE (decl); | |
1470 | *walk_subtrees = 0; | |
1471 | } | |
80fdc40f | 1472 | } |
1473 | else if (is_constant_capture_proxy (*t)) | |
1474 | var = DECL_CAPTURED_VARIABLE (*t); | |
1475 | ||
1476 | if (var) | |
1477 | { | |
1478 | tree *&slot = const_vars.get_or_insert (var); | |
1479 | if (!slot || VAR_P (*t)) | |
1480 | slot = t; | |
1481 | } | |
1482 | ||
1483 | return NULL_TREE; | |
1484 | } | |
1485 | ||
1486 | /* We're at the end of processing a lambda; go back and remove any captures of | |
1487 | constant variables for which we've folded away all uses. */ | |
1488 | ||
1489 | static void | |
1490 | prune_lambda_captures (tree body) | |
1491 | { | |
1492 | tree lam = current_lambda_expr (); | |
1493 | if (!LAMBDA_EXPR_CAPTURE_OPTIMIZED (lam)) | |
1494 | /* No uses were optimized away. */ | |
1495 | return; | |
1496 | if (LAMBDA_EXPR_DEFAULT_CAPTURE_MODE (lam) == CPLD_NONE) | |
1497 | /* No default captures, and we don't prune explicit captures. */ | |
1498 | return; | |
1499 | ||
1500 | hash_map<tree,tree*> const_vars; | |
1501 | ||
1502 | cp_walk_tree_without_duplicates (&body, mark_const_cap_r, &const_vars); | |
1503 | ||
1504 | tree *fieldp = &TYPE_FIELDS (LAMBDA_EXPR_CLOSURE (lam)); | |
1505 | for (tree *capp = &LAMBDA_EXPR_CAPTURE_LIST (lam); *capp; ) | |
1506 | { | |
1507 | tree cap = *capp; | |
1508 | if (tree var = var_to_maybe_prune (cap)) | |
1509 | { | |
79ec669d | 1510 | tree **use = const_vars.get (var); |
1511 | if (use && TREE_CODE (**use) == DECL_EXPR) | |
80fdc40f | 1512 | { |
1513 | /* All uses of this capture were folded away, leaving only the | |
1514 | proxy declaration. */ | |
1515 | ||
1516 | /* Splice the capture out of LAMBDA_EXPR_CAPTURE_LIST. */ | |
1517 | *capp = TREE_CHAIN (cap); | |
1518 | ||
1519 | /* And out of TYPE_FIELDS. */ | |
1520 | tree field = TREE_PURPOSE (cap); | |
1521 | while (*fieldp != field) | |
1522 | fieldp = &DECL_CHAIN (*fieldp); | |
1523 | *fieldp = DECL_CHAIN (*fieldp); | |
1524 | ||
1525 | /* And remove the capture proxy declaration. */ | |
79ec669d | 1526 | **use = void_node; |
80fdc40f | 1527 | continue; |
1528 | } | |
1529 | } | |
1530 | ||
1531 | capp = &TREE_CHAIN (cap); | |
1532 | } | |
1533 | } | |
1534 | ||
52e76545 | 1535 | void |
1536 | finish_lambda_function (tree body) | |
1537 | { | |
1538 | finish_function_body (body); | |
1539 | ||
80fdc40f | 1540 | prune_lambda_captures (body); |
1541 | ||
52e76545 | 1542 | /* Finish the function and generate code for it if necessary. */ |
4775c814 | 1543 | tree fn = finish_function (/*inline_p=*/true); |
52e76545 | 1544 | |
1545 | /* Only expand if the call op is not a template. */ | |
1546 | if (!DECL_TEMPLATE_INFO (fn)) | |
1547 | expand_or_defer_fn (fn); | |
1548 | } | |
1549 | ||
5407f1e9 | 1550 | #include "gt-cp-lambda.h" |