]>
Commit | Line | Data |
---|---|---|
5d9fd871 | 1 | /* Perform the semantic phase of lambda parsing, i.e., the process of |
2 | building tree structure, checking semantic consistency, and | |
3 | building RTL. These routines are used both during actual parsing | |
4 | and during the instantiation of template functions. | |
5 | ||
d353bf18 | 6 | Copyright (C) 1998-2015 Free Software Foundation, Inc. |
5d9fd871 | 7 | |
8 | This file is part of GCC. | |
9 | ||
10 | GCC is free software; you can redistribute it and/or modify it | |
11 | under the terms of the GNU General Public License as published by | |
12 | the Free Software Foundation; either version 3, or (at your option) | |
13 | any later version. | |
14 | ||
15 | GCC is distributed in the hope that it will be useful, but | |
16 | WITHOUT ANY WARRANTY; without even the implied warranty of | |
17 | MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU | |
18 | General Public License for more details. | |
19 | ||
20 | You should have received a copy of the GNU General Public License | |
21 | along with GCC; see the file COPYING3. If not see | |
22 | <http://www.gnu.org/licenses/>. */ | |
23 | ||
24 | #include "config.h" | |
25 | #include "system.h" | |
26 | #include "coretypes.h" | |
b20a8bb4 | 27 | #include "alias.h" |
5d9fd871 | 28 | #include "tree.h" |
9ef16211 | 29 | #include "options.h" |
9ed99284 | 30 | #include "stringpool.h" |
1140c305 | 31 | #include "tm.h" |
32 | #include "hard-reg-set.h" | |
1140c305 | 33 | #include "function.h" |
5d9fd871 | 34 | #include "cgraph.h" |
35 | #include "tree-iterator.h" | |
36 | #include "cp-tree.h" | |
37 | #include "toplev.h" | |
5d9fd871 | 38 | |
39 | /* Constructor for a lambda expression. */ | |
40 | ||
41 | tree | |
42 | build_lambda_expr (void) | |
43 | { | |
44 | tree lambda = make_node (LAMBDA_EXPR); | |
45 | LAMBDA_EXPR_DEFAULT_CAPTURE_MODE (lambda) = CPLD_NONE; | |
46 | LAMBDA_EXPR_CAPTURE_LIST (lambda) = NULL_TREE; | |
47 | LAMBDA_EXPR_THIS_CAPTURE (lambda) = NULL_TREE; | |
48 | LAMBDA_EXPR_PENDING_PROXIES (lambda) = NULL; | |
49 | LAMBDA_EXPR_RETURN_TYPE (lambda) = NULL_TREE; | |
50 | LAMBDA_EXPR_MUTABLE_P (lambda) = false; | |
51 | return lambda; | |
52 | } | |
53 | ||
54 | /* Create the closure object for a LAMBDA_EXPR. */ | |
55 | ||
56 | tree | |
57 | build_lambda_object (tree lambda_expr) | |
58 | { | |
59 | /* Build aggregate constructor call. | |
60 | - cp_parser_braced_list | |
61 | - cp_parser_functional_cast */ | |
62 | vec<constructor_elt, va_gc> *elts = NULL; | |
63 | tree node, expr, type; | |
64 | location_t saved_loc; | |
65 | ||
66 | if (processing_template_decl) | |
67 | return lambda_expr; | |
68 | ||
69 | /* Make sure any error messages refer to the lambda-introducer. */ | |
70 | saved_loc = input_location; | |
71 | input_location = LAMBDA_EXPR_LOCATION (lambda_expr); | |
72 | ||
73 | for (node = LAMBDA_EXPR_CAPTURE_LIST (lambda_expr); | |
74 | node; | |
75 | node = TREE_CHAIN (node)) | |
76 | { | |
77 | tree field = TREE_PURPOSE (node); | |
78 | tree val = TREE_VALUE (node); | |
79 | ||
80 | if (field == error_mark_node) | |
81 | { | |
82 | expr = error_mark_node; | |
83 | goto out; | |
84 | } | |
85 | ||
86 | if (DECL_P (val)) | |
87 | mark_used (val); | |
88 | ||
89 | /* Mere mortals can't copy arrays with aggregate initialization, so | |
90 | do some magic to make it work here. */ | |
91 | if (TREE_CODE (TREE_TYPE (field)) == ARRAY_TYPE) | |
92 | val = build_array_copy (val); | |
93 | else if (DECL_NORMAL_CAPTURE_P (field) | |
94 | && !DECL_VLA_CAPTURE_P (field) | |
95 | && TREE_CODE (TREE_TYPE (field)) != REFERENCE_TYPE) | |
96 | { | |
97 | /* "the entities that are captured by copy are used to | |
98 | direct-initialize each corresponding non-static data | |
99 | member of the resulting closure object." | |
100 | ||
101 | There's normally no way to express direct-initialization | |
102 | from an element of a CONSTRUCTOR, so we build up a special | |
103 | TARGET_EXPR to bypass the usual copy-initialization. */ | |
104 | val = force_rvalue (val, tf_warning_or_error); | |
105 | if (TREE_CODE (val) == TARGET_EXPR) | |
106 | TARGET_EXPR_DIRECT_INIT_P (val) = true; | |
107 | } | |
108 | ||
109 | CONSTRUCTOR_APPEND_ELT (elts, DECL_NAME (field), val); | |
110 | } | |
111 | ||
112 | expr = build_constructor (init_list_type_node, elts); | |
113 | CONSTRUCTOR_IS_DIRECT_INIT (expr) = 1; | |
114 | ||
115 | /* N2927: "[The closure] class type is not an aggregate." | |
116 | But we briefly treat it as an aggregate to make this simpler. */ | |
117 | type = LAMBDA_EXPR_CLOSURE (lambda_expr); | |
118 | CLASSTYPE_NON_AGGREGATE (type) = 0; | |
119 | expr = finish_compound_literal (type, expr, tf_warning_or_error); | |
120 | CLASSTYPE_NON_AGGREGATE (type) = 1; | |
121 | ||
122 | out: | |
123 | input_location = saved_loc; | |
124 | return expr; | |
125 | } | |
126 | ||
127 | /* Return an initialized RECORD_TYPE for LAMBDA. | |
128 | LAMBDA must have its explicit captures already. */ | |
129 | ||
130 | tree | |
131 | begin_lambda_type (tree lambda) | |
132 | { | |
133 | tree type; | |
134 | ||
135 | { | |
136 | /* Unique name. This is just like an unnamed class, but we cannot use | |
137 | make_anon_name because of certain checks against TYPE_ANONYMOUS_P. */ | |
138 | tree name; | |
139 | name = make_lambda_name (); | |
140 | ||
141 | /* Create the new RECORD_TYPE for this lambda. */ | |
142 | type = xref_tag (/*tag_code=*/record_type, | |
143 | name, | |
144 | /*scope=*/ts_lambda, | |
145 | /*template_header_p=*/false); | |
240cc9cf | 146 | if (type == error_mark_node) |
147 | return error_mark_node; | |
5d9fd871 | 148 | } |
149 | ||
150 | /* Designate it as a struct so that we can use aggregate initialization. */ | |
151 | CLASSTYPE_DECLARED_CLASS (type) = false; | |
152 | ||
153 | /* Cross-reference the expression and the type. */ | |
154 | LAMBDA_EXPR_CLOSURE (lambda) = type; | |
155 | CLASSTYPE_LAMBDA_EXPR (type) = lambda; | |
156 | ||
157 | /* Clear base types. */ | |
158 | xref_basetypes (type, /*bases=*/NULL_TREE); | |
159 | ||
160 | /* Start the class. */ | |
161 | type = begin_class_definition (type); | |
5d9fd871 | 162 | |
163 | return type; | |
164 | } | |
165 | ||
166 | /* Returns the type to use for the return type of the operator() of a | |
167 | closure class. */ | |
168 | ||
169 | tree | |
170 | lambda_return_type (tree expr) | |
171 | { | |
172 | if (expr == NULL_TREE) | |
173 | return void_type_node; | |
174 | if (type_unknown_p (expr) | |
175 | || BRACE_ENCLOSED_INITIALIZER_P (expr)) | |
176 | { | |
177 | cxx_incomplete_type_error (expr, TREE_TYPE (expr)); | |
178 | return void_type_node; | |
179 | } | |
180 | gcc_checking_assert (!type_dependent_expression_p (expr)); | |
181 | return cv_unqualified (type_decays_to (unlowered_expr_type (expr))); | |
182 | } | |
183 | ||
184 | /* Given a LAMBDA_EXPR or closure type LAMBDA, return the op() of the | |
185 | closure type. */ | |
186 | ||
187 | tree | |
188 | lambda_function (tree lambda) | |
189 | { | |
190 | tree type; | |
191 | if (TREE_CODE (lambda) == LAMBDA_EXPR) | |
192 | type = LAMBDA_EXPR_CLOSURE (lambda); | |
193 | else | |
194 | type = lambda; | |
195 | gcc_assert (LAMBDA_TYPE_P (type)); | |
196 | /* Don't let debug_tree cause instantiation. */ | |
197 | if (CLASSTYPE_TEMPLATE_INSTANTIATION (type) | |
198 | && !COMPLETE_OR_OPEN_TYPE_P (type)) | |
199 | return NULL_TREE; | |
200 | lambda = lookup_member (type, ansi_opname (CALL_EXPR), | |
201 | /*protect=*/0, /*want_type=*/false, | |
202 | tf_warning_or_error); | |
203 | if (lambda) | |
814b90ef | 204 | lambda = STRIP_TEMPLATE (get_first_fn (lambda)); |
5d9fd871 | 205 | return lambda; |
206 | } | |
207 | ||
208 | /* Returns the type to use for the FIELD_DECL corresponding to the | |
209 | capture of EXPR. | |
210 | The caller should add REFERENCE_TYPE for capture by reference. */ | |
211 | ||
212 | tree | |
213 | lambda_capture_field_type (tree expr, bool explicit_init_p) | |
214 | { | |
215 | tree type; | |
216 | if (explicit_init_p) | |
217 | { | |
218 | type = make_auto (); | |
219 | type = do_auto_deduction (type, expr, type); | |
220 | } | |
221 | else | |
222 | type = non_reference (unlowered_expr_type (expr)); | |
384b0418 | 223 | if (type_dependent_expression_p (expr) |
224 | && !is_this_parameter (tree_strip_nop_conversions (expr))) | |
5d9fd871 | 225 | { |
226 | type = cxx_make_type (DECLTYPE_TYPE); | |
227 | DECLTYPE_TYPE_EXPR (type) = expr; | |
228 | DECLTYPE_FOR_LAMBDA_CAPTURE (type) = true; | |
229 | DECLTYPE_FOR_INIT_CAPTURE (type) = explicit_init_p; | |
230 | SET_TYPE_STRUCTURAL_EQUALITY (type); | |
231 | } | |
232 | return type; | |
233 | } | |
234 | ||
235 | /* Returns true iff DECL is a lambda capture proxy variable created by | |
236 | build_capture_proxy. */ | |
237 | ||
238 | bool | |
239 | is_capture_proxy (tree decl) | |
240 | { | |
241 | return (VAR_P (decl) | |
242 | && DECL_HAS_VALUE_EXPR_P (decl) | |
243 | && !DECL_ANON_UNION_VAR_P (decl) | |
244 | && LAMBDA_FUNCTION_P (DECL_CONTEXT (decl))); | |
245 | } | |
246 | ||
247 | /* Returns true iff DECL is a capture proxy for a normal capture | |
248 | (i.e. without explicit initializer). */ | |
249 | ||
250 | bool | |
251 | is_normal_capture_proxy (tree decl) | |
252 | { | |
253 | if (!is_capture_proxy (decl)) | |
254 | /* It's not a capture proxy. */ | |
255 | return false; | |
256 | ||
b9c9af4c | 257 | if (variably_modified_type_p (TREE_TYPE (decl), NULL_TREE)) |
258 | /* VLA capture. */ | |
259 | return true; | |
260 | ||
5d9fd871 | 261 | /* It is a capture proxy, is it a normal capture? */ |
262 | tree val = DECL_VALUE_EXPR (decl); | |
263 | if (val == error_mark_node) | |
264 | return true; | |
265 | ||
266 | gcc_assert (TREE_CODE (val) == COMPONENT_REF); | |
267 | val = TREE_OPERAND (val, 1); | |
268 | return DECL_NORMAL_CAPTURE_P (val); | |
269 | } | |
270 | ||
271 | /* VAR is a capture proxy created by build_capture_proxy; add it to the | |
272 | current function, which is the operator() for the appropriate lambda. */ | |
273 | ||
274 | void | |
275 | insert_capture_proxy (tree var) | |
276 | { | |
277 | cp_binding_level *b; | |
278 | tree stmt_list; | |
279 | ||
280 | /* Put the capture proxy in the extra body block so that it won't clash | |
281 | with a later local variable. */ | |
282 | b = current_binding_level; | |
283 | for (;;) | |
284 | { | |
285 | cp_binding_level *n = b->level_chain; | |
286 | if (n->kind == sk_function_parms) | |
287 | break; | |
288 | b = n; | |
289 | } | |
290 | pushdecl_with_scope (var, b, false); | |
291 | ||
292 | /* And put a DECL_EXPR in the STATEMENT_LIST for the same block. */ | |
293 | var = build_stmt (DECL_SOURCE_LOCATION (var), DECL_EXPR, var); | |
294 | stmt_list = (*stmt_list_stack)[1]; | |
295 | gcc_assert (stmt_list); | |
296 | append_to_statement_list_force (var, &stmt_list); | |
297 | } | |
298 | ||
299 | /* We've just finished processing a lambda; if the containing scope is also | |
300 | a lambda, insert any capture proxies that were created while processing | |
301 | the nested lambda. */ | |
302 | ||
303 | void | |
304 | insert_pending_capture_proxies (void) | |
305 | { | |
306 | tree lam; | |
307 | vec<tree, va_gc> *proxies; | |
308 | unsigned i; | |
309 | ||
310 | if (!current_function_decl || !LAMBDA_FUNCTION_P (current_function_decl)) | |
311 | return; | |
312 | ||
313 | lam = CLASSTYPE_LAMBDA_EXPR (DECL_CONTEXT (current_function_decl)); | |
314 | proxies = LAMBDA_EXPR_PENDING_PROXIES (lam); | |
315 | for (i = 0; i < vec_safe_length (proxies); ++i) | |
316 | { | |
317 | tree var = (*proxies)[i]; | |
318 | insert_capture_proxy (var); | |
319 | } | |
320 | release_tree_vector (LAMBDA_EXPR_PENDING_PROXIES (lam)); | |
321 | LAMBDA_EXPR_PENDING_PROXIES (lam) = NULL; | |
322 | } | |
323 | ||
324 | /* Given REF, a COMPONENT_REF designating a field in the lambda closure, | |
325 | return the type we want the proxy to have: the type of the field itself, | |
326 | with added const-qualification if the lambda isn't mutable and the | |
327 | capture is by value. */ | |
328 | ||
329 | tree | |
330 | lambda_proxy_type (tree ref) | |
331 | { | |
332 | tree type; | |
6dcf5c5f | 333 | if (ref == error_mark_node) |
334 | return error_mark_node; | |
5d9fd871 | 335 | if (REFERENCE_REF_P (ref)) |
336 | ref = TREE_OPERAND (ref, 0); | |
6dcf5c5f | 337 | gcc_assert (TREE_CODE (ref) == COMPONENT_REF); |
5d9fd871 | 338 | type = TREE_TYPE (ref); |
6dcf5c5f | 339 | if (!type || WILDCARD_TYPE_P (non_reference (type))) |
340 | { | |
341 | type = cxx_make_type (DECLTYPE_TYPE); | |
342 | DECLTYPE_TYPE_EXPR (type) = ref; | |
343 | DECLTYPE_FOR_LAMBDA_PROXY (type) = true; | |
344 | SET_TYPE_STRUCTURAL_EQUALITY (type); | |
345 | } | |
346 | if (DECL_PACK_P (TREE_OPERAND (ref, 1))) | |
347 | type = make_pack_expansion (type); | |
5d9fd871 | 348 | return type; |
349 | } | |
350 | ||
351 | /* MEMBER is a capture field in a lambda closure class. Now that we're | |
352 | inside the operator(), build a placeholder var for future lookups and | |
353 | debugging. */ | |
354 | ||
355 | tree | |
356 | build_capture_proxy (tree member) | |
357 | { | |
358 | tree var, object, fn, closure, name, lam, type; | |
359 | ||
6dcf5c5f | 360 | if (PACK_EXPANSION_P (member)) |
361 | member = PACK_EXPANSION_PATTERN (member); | |
362 | ||
5d9fd871 | 363 | closure = DECL_CONTEXT (member); |
364 | fn = lambda_function (closure); | |
365 | lam = CLASSTYPE_LAMBDA_EXPR (closure); | |
366 | ||
367 | /* The proxy variable forwards to the capture field. */ | |
368 | object = build_fold_indirect_ref (DECL_ARGUMENTS (fn)); | |
369 | object = finish_non_static_data_member (member, object, NULL_TREE); | |
370 | if (REFERENCE_REF_P (object)) | |
371 | object = TREE_OPERAND (object, 0); | |
372 | ||
373 | /* Remove the __ inserted by add_capture. */ | |
5402533b | 374 | name = get_identifier (IDENTIFIER_POINTER (DECL_NAME (member)) + 2); |
5d9fd871 | 375 | |
376 | type = lambda_proxy_type (object); | |
377 | ||
378 | if (DECL_VLA_CAPTURE_P (member)) | |
379 | { | |
380 | /* Rebuild the VLA type from the pointer and maxindex. */ | |
381 | tree field = next_initializable_field (TYPE_FIELDS (type)); | |
382 | tree ptr = build_simple_component_ref (object, field); | |
383 | field = next_initializable_field (DECL_CHAIN (field)); | |
384 | tree max = build_simple_component_ref (object, field); | |
b46a48d6 | 385 | type = build_cplus_array_type (TREE_TYPE (TREE_TYPE (ptr)), |
386 | build_index_type (max)); | |
5d9fd871 | 387 | type = build_reference_type (type); |
388 | REFERENCE_VLA_OK (type) = true; | |
389 | object = convert (type, ptr); | |
390 | } | |
391 | ||
392 | var = build_decl (input_location, VAR_DECL, name, type); | |
393 | SET_DECL_VALUE_EXPR (var, object); | |
394 | DECL_HAS_VALUE_EXPR_P (var) = 1; | |
395 | DECL_ARTIFICIAL (var) = 1; | |
396 | TREE_USED (var) = 1; | |
397 | DECL_CONTEXT (var) = fn; | |
398 | ||
399 | if (name == this_identifier) | |
400 | { | |
401 | gcc_assert (LAMBDA_EXPR_THIS_CAPTURE (lam) == member); | |
402 | LAMBDA_EXPR_THIS_CAPTURE (lam) = var; | |
403 | } | |
404 | ||
405 | if (fn == current_function_decl) | |
406 | insert_capture_proxy (var); | |
407 | else | |
408 | vec_safe_push (LAMBDA_EXPR_PENDING_PROXIES (lam), var); | |
409 | ||
410 | return var; | |
411 | } | |
412 | ||
413 | /* Return a struct containing a pointer and a length for lambda capture of | |
414 | an array of runtime length. */ | |
415 | ||
416 | static tree | |
417 | vla_capture_type (tree array_type) | |
418 | { | |
419 | static tree ptr_id, max_id; | |
420 | tree type = xref_tag (record_type, make_anon_name (), ts_current, false); | |
421 | xref_basetypes (type, NULL_TREE); | |
422 | type = begin_class_definition (type); | |
423 | if (!ptr_id) | |
424 | { | |
425 | ptr_id = get_identifier ("ptr"); | |
426 | max_id = get_identifier ("max"); | |
427 | } | |
428 | tree ptrtype = build_pointer_type (TREE_TYPE (array_type)); | |
429 | tree field = build_decl (input_location, FIELD_DECL, ptr_id, ptrtype); | |
430 | finish_member_declaration (field); | |
431 | field = build_decl (input_location, FIELD_DECL, max_id, sizetype); | |
432 | finish_member_declaration (field); | |
433 | return finish_struct (type, NULL_TREE); | |
434 | } | |
435 | ||
436 | /* From an ID and INITIALIZER, create a capture (by reference if | |
437 | BY_REFERENCE_P is true), add it to the capture-list for LAMBDA, | |
438 | and return it. */ | |
439 | ||
440 | tree | |
6dcf5c5f | 441 | add_capture (tree lambda, tree id, tree orig_init, bool by_reference_p, |
5d9fd871 | 442 | bool explicit_init_p) |
443 | { | |
444 | char *buf; | |
445 | tree type, member, name; | |
446 | bool vla = false; | |
6dcf5c5f | 447 | bool variadic = false; |
448 | tree initializer = orig_init; | |
449 | ||
450 | if (PACK_EXPANSION_P (initializer)) | |
451 | { | |
452 | initializer = PACK_EXPANSION_PATTERN (initializer); | |
453 | variadic = true; | |
454 | } | |
5d9fd871 | 455 | |
456 | if (TREE_CODE (initializer) == TREE_LIST) | |
457 | initializer = build_x_compound_expr_from_list (initializer, ELK_INIT, | |
458 | tf_warning_or_error); | |
384b0418 | 459 | type = TREE_TYPE (initializer); |
5444a0b4 | 460 | if (type == error_mark_node) |
461 | return error_mark_node; | |
462 | ||
5d9fd871 | 463 | if (array_of_runtime_bound_p (type)) |
464 | { | |
465 | vla = true; | |
466 | if (!by_reference_p) | |
467 | error ("array of runtime bound cannot be captured by copy, " | |
468 | "only by reference"); | |
469 | ||
470 | /* For a VLA, we capture the address of the first element and the | |
471 | maximum index, and then reconstruct the VLA for the proxy. */ | |
472 | tree elt = cp_build_array_ref (input_location, initializer, | |
473 | integer_zero_node, tf_warning_or_error); | |
474 | initializer = build_constructor_va (init_list_type_node, 2, | |
475 | NULL_TREE, build_address (elt), | |
476 | NULL_TREE, array_type_nelts (type)); | |
477 | type = vla_capture_type (type); | |
478 | } | |
d3155b08 | 479 | else if (!dependent_type_p (type) |
480 | && variably_modified_type_p (type, NULL_TREE)) | |
5d9fd871 | 481 | { |
4972ed5d | 482 | error ("capture of variable-size type %qT that is not an N3639 array " |
5d9fd871 | 483 | "of runtime bound", type); |
484 | if (TREE_CODE (type) == ARRAY_TYPE | |
485 | && variably_modified_type_p (TREE_TYPE (type), NULL_TREE)) | |
486 | inform (input_location, "because the array element type %qT has " | |
487 | "variable size", TREE_TYPE (type)); | |
488 | type = error_mark_node; | |
489 | } | |
384b0418 | 490 | else |
5d9fd871 | 491 | { |
384b0418 | 492 | type = lambda_capture_field_type (initializer, explicit_init_p); |
493 | if (by_reference_p) | |
494 | { | |
495 | type = build_reference_type (type); | |
356d4500 | 496 | if (!dependent_type_p (type) && !real_lvalue_p (initializer)) |
384b0418 | 497 | error ("cannot capture %qE by reference", initializer); |
498 | } | |
499 | else | |
5444a0b4 | 500 | { |
501 | /* Capture by copy requires a complete type. */ | |
502 | type = complete_type (type); | |
503 | if (!dependent_type_p (type) && !COMPLETE_TYPE_P (type)) | |
504 | { | |
505 | error ("capture by copy of incomplete type %qT", type); | |
506 | cxx_incomplete_type_inform (type); | |
507 | return error_mark_node; | |
508 | } | |
509 | } | |
5d9fd871 | 510 | } |
5d9fd871 | 511 | |
512 | /* Add __ to the beginning of the field name so that user code | |
513 | won't find the field with name lookup. We can't just leave the name | |
514 | unset because template instantiation uses the name to find | |
515 | instantiated fields. */ | |
5402533b | 516 | buf = (char *) alloca (IDENTIFIER_LENGTH (id) + 3); |
517 | buf[1] = buf[0] = '_'; | |
518 | memcpy (buf + 2, IDENTIFIER_POINTER (id), | |
519 | IDENTIFIER_LENGTH (id) + 1); | |
520 | name = get_identifier (buf); | |
5d9fd871 | 521 | |
522 | /* If TREE_TYPE isn't set, we're still in the introducer, so check | |
523 | for duplicates. */ | |
524 | if (!LAMBDA_EXPR_CLOSURE (lambda)) | |
525 | { | |
526 | if (IDENTIFIER_MARKED (name)) | |
527 | { | |
528 | pedwarn (input_location, 0, | |
529 | "already captured %qD in lambda expression", id); | |
530 | return NULL_TREE; | |
531 | } | |
532 | IDENTIFIER_MARKED (name) = true; | |
533 | } | |
534 | ||
6dcf5c5f | 535 | if (variadic) |
536 | type = make_pack_expansion (type); | |
537 | ||
5d9fd871 | 538 | /* Make member variable. */ |
df623cbd | 539 | member = build_decl (input_location, FIELD_DECL, name, type); |
5d9fd871 | 540 | DECL_VLA_CAPTURE_P (member) = vla; |
541 | ||
542 | if (!explicit_init_p) | |
543 | /* Normal captures are invisible to name lookup but uses are replaced | |
544 | with references to the capture field; we implement this by only | |
545 | really making them invisible in unevaluated context; see | |
546 | qualify_lookup. For now, let's make explicitly initialized captures | |
547 | always visible. */ | |
548 | DECL_NORMAL_CAPTURE_P (member) = true; | |
549 | ||
550 | if (id == this_identifier) | |
551 | LAMBDA_EXPR_THIS_CAPTURE (lambda) = member; | |
552 | ||
553 | /* Add it to the appropriate closure class if we've started it. */ | |
554 | if (current_class_type | |
555 | && current_class_type == LAMBDA_EXPR_CLOSURE (lambda)) | |
556 | finish_member_declaration (member); | |
557 | ||
6dcf5c5f | 558 | tree listmem = member; |
559 | if (variadic) | |
560 | { | |
561 | listmem = make_pack_expansion (member); | |
562 | initializer = orig_init; | |
563 | } | |
5d9fd871 | 564 | LAMBDA_EXPR_CAPTURE_LIST (lambda) |
6dcf5c5f | 565 | = tree_cons (listmem, initializer, LAMBDA_EXPR_CAPTURE_LIST (lambda)); |
5d9fd871 | 566 | |
567 | if (LAMBDA_EXPR_CLOSURE (lambda)) | |
568 | return build_capture_proxy (member); | |
569 | /* For explicit captures we haven't started the function yet, so we wait | |
570 | and build the proxy from cp_parser_lambda_body. */ | |
571 | return NULL_TREE; | |
572 | } | |
573 | ||
574 | /* Register all the capture members on the list CAPTURES, which is the | |
575 | LAMBDA_EXPR_CAPTURE_LIST for the lambda after the introducer. */ | |
576 | ||
577 | void | |
578 | register_capture_members (tree captures) | |
579 | { | |
580 | if (captures == NULL_TREE) | |
581 | return; | |
582 | ||
583 | register_capture_members (TREE_CHAIN (captures)); | |
6dcf5c5f | 584 | |
585 | tree field = TREE_PURPOSE (captures); | |
586 | if (PACK_EXPANSION_P (field)) | |
587 | field = PACK_EXPANSION_PATTERN (field); | |
588 | ||
5d9fd871 | 589 | /* We set this in add_capture to avoid duplicates. */ |
6dcf5c5f | 590 | IDENTIFIER_MARKED (DECL_NAME (field)) = false; |
591 | finish_member_declaration (field); | |
5d9fd871 | 592 | } |
593 | ||
594 | /* Similar to add_capture, except this works on a stack of nested lambdas. | |
595 | BY_REFERENCE_P in this case is derived from the default capture mode. | |
596 | Returns the capture for the lambda at the bottom of the stack. */ | |
597 | ||
598 | tree | |
599 | add_default_capture (tree lambda_stack, tree id, tree initializer) | |
600 | { | |
601 | bool this_capture_p = (id == this_identifier); | |
602 | ||
603 | tree var = NULL_TREE; | |
604 | ||
605 | tree saved_class_type = current_class_type; | |
606 | ||
607 | tree node; | |
608 | ||
609 | for (node = lambda_stack; | |
610 | node; | |
611 | node = TREE_CHAIN (node)) | |
612 | { | |
613 | tree lambda = TREE_VALUE (node); | |
614 | ||
615 | current_class_type = LAMBDA_EXPR_CLOSURE (lambda); | |
6dcf5c5f | 616 | if (DECL_PACK_P (initializer)) |
617 | initializer = make_pack_expansion (initializer); | |
5d9fd871 | 618 | var = add_capture (lambda, |
619 | id, | |
620 | initializer, | |
621 | /*by_reference_p=*/ | |
622 | (!this_capture_p | |
623 | && (LAMBDA_EXPR_DEFAULT_CAPTURE_MODE (lambda) | |
624 | == CPLD_REFERENCE)), | |
625 | /*explicit_init_p=*/false); | |
626 | initializer = convert_from_reference (var); | |
627 | } | |
628 | ||
629 | current_class_type = saved_class_type; | |
630 | ||
631 | return var; | |
632 | } | |
633 | ||
f1ec53b6 | 634 | /* Return the capture pertaining to a use of 'this' in LAMBDA, in the |
635 | form of an INDIRECT_REF, possibly adding it through default | |
7d07c4a7 | 636 | capturing, if ADD_CAPTURE_P is true. */ |
5d9fd871 | 637 | |
638 | tree | |
f1ec53b6 | 639 | lambda_expr_this_capture (tree lambda, bool add_capture_p) |
5d9fd871 | 640 | { |
641 | tree result; | |
642 | ||
643 | tree this_capture = LAMBDA_EXPR_THIS_CAPTURE (lambda); | |
644 | ||
7d07c4a7 | 645 | /* In unevaluated context this isn't an odr-use, so don't capture. */ |
5d9fd871 | 646 | if (cp_unevaluated_operand) |
7d07c4a7 | 647 | add_capture_p = false; |
5d9fd871 | 648 | |
649 | /* Try to default capture 'this' if we can. */ | |
650 | if (!this_capture | |
f1ec53b6 | 651 | && (!add_capture_p |
652 | || LAMBDA_EXPR_DEFAULT_CAPTURE_MODE (lambda) != CPLD_NONE)) | |
5d9fd871 | 653 | { |
654 | tree lambda_stack = NULL_TREE; | |
655 | tree init = NULL_TREE; | |
656 | ||
657 | /* If we are in a lambda function, we can move out until we hit: | |
658 | 1. a non-lambda function or NSDMI, | |
659 | 2. a lambda function capturing 'this', or | |
660 | 3. a non-default capturing lambda function. */ | |
661 | for (tree tlambda = lambda; ;) | |
662 | { | |
663 | lambda_stack = tree_cons (NULL_TREE, | |
664 | tlambda, | |
665 | lambda_stack); | |
666 | ||
667 | if (LAMBDA_EXPR_EXTRA_SCOPE (tlambda) | |
668 | && TREE_CODE (LAMBDA_EXPR_EXTRA_SCOPE (tlambda)) == FIELD_DECL) | |
669 | { | |
670 | /* In an NSDMI, we don't have a function to look up the decl in, | |
671 | but the fake 'this' pointer that we're using for parsing is | |
672 | in scope_chain. */ | |
673 | init = scope_chain->x_current_class_ptr; | |
674 | gcc_checking_assert | |
675 | (init && (TREE_TYPE (TREE_TYPE (init)) | |
676 | == current_nonlambda_class_type ())); | |
677 | break; | |
678 | } | |
679 | ||
680 | tree closure_decl = TYPE_NAME (LAMBDA_EXPR_CLOSURE (tlambda)); | |
681 | tree containing_function = decl_function_context (closure_decl); | |
682 | ||
683 | if (containing_function == NULL_TREE) | |
684 | /* We ran out of scopes; there's no 'this' to capture. */ | |
685 | break; | |
686 | ||
687 | if (!LAMBDA_FUNCTION_P (containing_function)) | |
688 | { | |
689 | /* We found a non-lambda function. */ | |
690 | if (DECL_NONSTATIC_MEMBER_FUNCTION_P (containing_function)) | |
691 | /* First parameter is 'this'. */ | |
692 | init = DECL_ARGUMENTS (containing_function); | |
693 | break; | |
694 | } | |
695 | ||
696 | tlambda | |
697 | = CLASSTYPE_LAMBDA_EXPR (DECL_CONTEXT (containing_function)); | |
698 | ||
699 | if (LAMBDA_EXPR_THIS_CAPTURE (tlambda)) | |
700 | { | |
701 | /* An outer lambda has already captured 'this'. */ | |
702 | init = LAMBDA_EXPR_THIS_CAPTURE (tlambda); | |
703 | break; | |
704 | } | |
705 | ||
706 | if (LAMBDA_EXPR_DEFAULT_CAPTURE_MODE (tlambda) == CPLD_NONE) | |
707 | /* An outer lambda won't let us capture 'this'. */ | |
708 | break; | |
709 | } | |
710 | ||
711 | if (init) | |
f1ec53b6 | 712 | { |
713 | if (add_capture_p) | |
714 | this_capture = add_default_capture (lambda_stack, | |
715 | /*id=*/this_identifier, | |
716 | init); | |
717 | else | |
718 | this_capture = init; | |
719 | } | |
5d9fd871 | 720 | } |
721 | ||
7d07c4a7 | 722 | if (cp_unevaluated_operand) |
723 | result = this_capture; | |
724 | else if (!this_capture) | |
5d9fd871 | 725 | { |
2e227398 | 726 | if (add_capture_p) |
7d07c4a7 | 727 | { |
728 | error ("%<this%> was not captured for this lambda function"); | |
729 | result = error_mark_node; | |
730 | } | |
731 | else | |
732 | result = NULL_TREE; | |
5d9fd871 | 733 | } |
734 | else | |
735 | { | |
736 | /* To make sure that current_class_ref is for the lambda. */ | |
737 | gcc_assert (TYPE_MAIN_VARIANT (TREE_TYPE (current_class_ref)) | |
738 | == LAMBDA_EXPR_CLOSURE (lambda)); | |
739 | ||
740 | result = this_capture; | |
741 | ||
742 | /* If 'this' is captured, each use of 'this' is transformed into an | |
743 | access to the corresponding unnamed data member of the closure | |
744 | type cast (_expr.cast_ 5.4) to the type of 'this'. [ The cast | |
745 | ensures that the transformed expression is an rvalue. ] */ | |
746 | result = rvalue (result); | |
747 | } | |
748 | ||
749 | return result; | |
750 | } | |
751 | ||
752 | /* We don't want to capture 'this' until we know we need it, i.e. after | |
753 | overload resolution has chosen a non-static member function. At that | |
754 | point we call this function to turn a dummy object into a use of the | |
755 | 'this' capture. */ | |
756 | ||
757 | tree | |
f1ec53b6 | 758 | maybe_resolve_dummy (tree object, bool add_capture_p) |
5d9fd871 | 759 | { |
760 | if (!is_dummy_object (object)) | |
761 | return object; | |
762 | ||
763 | tree type = TYPE_MAIN_VARIANT (TREE_TYPE (object)); | |
764 | gcc_assert (!TYPE_PTR_P (type)); | |
765 | ||
766 | if (type != current_class_type | |
767 | && current_class_type | |
768 | && LAMBDA_TYPE_P (current_class_type) | |
855ff3cc | 769 | && lambda_function (current_class_type) |
770 | && DERIVED_FROM_P (type, current_nonlambda_class_type ())) | |
5d9fd871 | 771 | { |
772 | /* In a lambda, need to go through 'this' capture. */ | |
773 | tree lam = CLASSTYPE_LAMBDA_EXPR (current_class_type); | |
f1ec53b6 | 774 | tree cap = lambda_expr_this_capture (lam, add_capture_p); |
7d07c4a7 | 775 | if (cap && cap != error_mark_node) |
2e227398 | 776 | object = build_x_indirect_ref (EXPR_LOCATION (object), cap, |
777 | RO_NULL, tf_warning_or_error); | |
5d9fd871 | 778 | } |
779 | ||
780 | return object; | |
781 | } | |
782 | ||
ed7bf2d1 | 783 | /* Returns the innermost non-lambda function. */ |
784 | ||
785 | tree | |
786 | current_nonlambda_function (void) | |
787 | { | |
788 | tree fn = current_function_decl; | |
789 | while (fn && LAMBDA_FUNCTION_P (fn)) | |
790 | fn = decl_function_context (fn); | |
791 | return fn; | |
792 | } | |
793 | ||
5d9fd871 | 794 | /* Returns the method basetype of the innermost non-lambda function, or |
795 | NULL_TREE if none. */ | |
796 | ||
797 | tree | |
798 | nonlambda_method_basetype (void) | |
799 | { | |
800 | tree fn, type; | |
801 | if (!current_class_ref) | |
802 | return NULL_TREE; | |
803 | ||
804 | type = current_class_type; | |
805 | if (!LAMBDA_TYPE_P (type)) | |
806 | return type; | |
807 | ||
808 | /* Find the nearest enclosing non-lambda function. */ | |
809 | fn = TYPE_NAME (type); | |
810 | do | |
811 | fn = decl_function_context (fn); | |
812 | while (fn && LAMBDA_FUNCTION_P (fn)); | |
813 | ||
814 | if (!fn || !DECL_NONSTATIC_MEMBER_FUNCTION_P (fn)) | |
815 | return NULL_TREE; | |
816 | ||
817 | return TYPE_METHOD_BASETYPE (TREE_TYPE (fn)); | |
818 | } | |
819 | ||
814b90ef | 820 | /* Helper function for maybe_add_lambda_conv_op; build a CALL_EXPR with |
821 | indicated FN and NARGS, but do not initialize the return type or any of the | |
822 | argument slots. */ | |
823 | ||
824 | static tree | |
825 | prepare_op_call (tree fn, int nargs) | |
826 | { | |
827 | tree t; | |
828 | ||
829 | t = build_vl_exp (CALL_EXPR, nargs + 3); | |
830 | CALL_EXPR_FN (t) = fn; | |
831 | CALL_EXPR_STATIC_CHAIN (t) = NULL; | |
832 | ||
833 | return t; | |
834 | } | |
835 | ||
5d9fd871 | 836 | /* If the closure TYPE has a static op(), also add a conversion to function |
837 | pointer. */ | |
838 | ||
839 | void | |
840 | maybe_add_lambda_conv_op (tree type) | |
841 | { | |
cbb83bc5 | 842 | bool nested = (cfun != NULL); |
64d8d39e | 843 | bool nested_def = decl_function_context (TYPE_MAIN_DECL (type)); |
5d9fd871 | 844 | tree callop = lambda_function (type); |
5d9fd871 | 845 | |
846 | if (LAMBDA_EXPR_CAPTURE_LIST (CLASSTYPE_LAMBDA_EXPR (type)) != NULL_TREE) | |
847 | return; | |
848 | ||
849 | if (processing_template_decl) | |
850 | return; | |
851 | ||
814b90ef | 852 | bool const generic_lambda_p |
853 | = (DECL_TEMPLATE_INFO (callop) | |
854 | && DECL_TEMPLATE_RESULT (DECL_TI_TEMPLATE (callop)) == callop); | |
855 | ||
4c0924ef | 856 | if (!generic_lambda_p && DECL_INITIAL (callop) == NULL_TREE) |
5d9fd871 | 857 | { |
858 | /* If the op() wasn't instantiated due to errors, give up. */ | |
859 | gcc_assert (errorcount || sorrycount); | |
860 | return; | |
861 | } | |
862 | ||
814b90ef | 863 | /* Non-template conversion operators are defined directly with build_call_a |
864 | and using DIRECT_ARGVEC for arguments (including 'this'). Templates are | |
865 | deferred and the CALL is built in-place. In the case of a deduced return | |
866 | call op, the decltype expression, DECLTYPE_CALL, used as a substitute for | |
867 | the return type is also built in-place. The arguments of DECLTYPE_CALL in | |
868 | the return expression may differ in flags from those in the body CALL. In | |
869 | particular, parameter pack expansions are marked PACK_EXPANSION_LOCAL_P in | |
870 | the body CALL, but not in DECLTYPE_CALL. */ | |
871 | ||
872 | vec<tree, va_gc> *direct_argvec = 0; | |
873 | tree decltype_call = 0, call = 0; | |
874 | tree fn_result = TREE_TYPE (TREE_TYPE (callop)); | |
875 | ||
876 | if (generic_lambda_p) | |
877 | { | |
878 | /* Prepare the dependent member call for the static member function | |
879 | '_FUN' and, potentially, prepare another call to be used in a decltype | |
880 | return expression for a deduced return call op to allow for simple | |
881 | implementation of the conversion operator. */ | |
882 | ||
883 | tree instance = build_nop (type, null_pointer_node); | |
884 | tree objfn = build_min (COMPONENT_REF, NULL_TREE, | |
885 | instance, DECL_NAME (callop), NULL_TREE); | |
886 | int nargs = list_length (DECL_ARGUMENTS (callop)) - 1; | |
887 | ||
888 | call = prepare_op_call (objfn, nargs); | |
889 | if (type_uses_auto (fn_result)) | |
890 | decltype_call = prepare_op_call (objfn, nargs); | |
891 | } | |
892 | else | |
893 | { | |
894 | direct_argvec = make_tree_vector (); | |
895 | direct_argvec->quick_push (build1 (NOP_EXPR, | |
896 | TREE_TYPE (DECL_ARGUMENTS (callop)), | |
897 | null_pointer_node)); | |
898 | } | |
899 | ||
900 | /* Copy CALLOP's argument list (as per 'copy_list') as FN_ARGS in order to | |
901 | declare the static member function "_FUN" below. For each arg append to | |
902 | DIRECT_ARGVEC (for the non-template case) or populate the pre-allocated | |
903 | call args (for the template case). If a parameter pack is found, expand | |
904 | it, flagging it as PACK_EXPANSION_LOCAL_P for the body call. */ | |
905 | ||
906 | tree fn_args = NULL_TREE; | |
907 | { | |
908 | int ix = 0; | |
909 | tree src = DECL_CHAIN (DECL_ARGUMENTS (callop)); | |
910 | tree tgt; | |
911 | ||
912 | while (src) | |
913 | { | |
914 | tree new_node = copy_node (src); | |
915 | ||
916 | if (!fn_args) | |
917 | fn_args = tgt = new_node; | |
918 | else | |
919 | { | |
920 | TREE_CHAIN (tgt) = new_node; | |
921 | tgt = new_node; | |
922 | } | |
923 | ||
924 | mark_exp_read (tgt); | |
925 | ||
926 | if (generic_lambda_p) | |
927 | { | |
928 | if (DECL_PACK_P (tgt)) | |
929 | { | |
930 | tree a = make_pack_expansion (tgt); | |
931 | if (decltype_call) | |
932 | CALL_EXPR_ARG (decltype_call, ix) = copy_node (a); | |
933 | PACK_EXPANSION_LOCAL_P (a) = true; | |
934 | CALL_EXPR_ARG (call, ix) = a; | |
935 | } | |
936 | else | |
937 | { | |
938 | tree a = convert_from_reference (tgt); | |
939 | CALL_EXPR_ARG (call, ix) = a; | |
940 | if (decltype_call) | |
941 | CALL_EXPR_ARG (decltype_call, ix) = copy_node (a); | |
942 | } | |
943 | ++ix; | |
944 | } | |
945 | else | |
946 | vec_safe_push (direct_argvec, tgt); | |
947 | ||
948 | src = TREE_CHAIN (src); | |
949 | } | |
950 | } | |
951 | ||
952 | ||
953 | if (generic_lambda_p) | |
954 | { | |
955 | if (decltype_call) | |
956 | { | |
957 | ++processing_template_decl; | |
958 | fn_result = finish_decltype_type | |
959 | (decltype_call, /*id_expression_or_member_access_p=*/false, | |
960 | tf_warning_or_error); | |
961 | --processing_template_decl; | |
962 | } | |
963 | } | |
964 | else | |
965 | call = build_call_a (callop, | |
966 | direct_argvec->length (), | |
967 | direct_argvec->address ()); | |
968 | ||
969 | CALL_FROM_THUNK_P (call) = 1; | |
970 | ||
971 | tree stattype = build_function_type (fn_result, FUNCTION_ARG_CHAIN (callop)); | |
5d9fd871 | 972 | |
973 | /* First build up the conversion op. */ | |
974 | ||
814b90ef | 975 | tree rettype = build_pointer_type (stattype); |
976 | tree name = mangle_conv_op_name_for_type (rettype); | |
977 | tree thistype = cp_build_qualified_type (type, TYPE_QUAL_CONST); | |
978 | tree fntype = build_method_type_directly (thistype, rettype, void_list_node); | |
979 | tree convfn = build_lang_decl (FUNCTION_DECL, name, fntype); | |
980 | tree fn = convfn; | |
5d9fd871 | 981 | DECL_SOURCE_LOCATION (fn) = DECL_SOURCE_LOCATION (callop); |
982 | ||
983 | if (TARGET_PTRMEMFUNC_VBIT_LOCATION == ptrmemfunc_vbit_in_pfn | |
984 | && DECL_ALIGN (fn) < 2 * BITS_PER_UNIT) | |
985 | DECL_ALIGN (fn) = 2 * BITS_PER_UNIT; | |
986 | ||
987 | SET_OVERLOADED_OPERATOR_CODE (fn, TYPE_EXPR); | |
988 | grokclassfn (type, fn, NO_SPECIAL); | |
989 | set_linkage_according_to_type (type, fn); | |
990 | rest_of_decl_compilation (fn, toplevel_bindings_p (), at_eof); | |
991 | DECL_IN_AGGR_P (fn) = 1; | |
992 | DECL_ARTIFICIAL (fn) = 1; | |
993 | DECL_NOT_REALLY_EXTERN (fn) = 1; | |
994 | DECL_DECLARED_INLINE_P (fn) = 1; | |
995 | DECL_ARGUMENTS (fn) = build_this_parm (fntype, TYPE_QUAL_CONST); | |
64d8d39e | 996 | if (nested_def) |
5d9fd871 | 997 | DECL_INTERFACE_KNOWN (fn) = 1; |
998 | ||
814b90ef | 999 | if (generic_lambda_p) |
1000 | fn = add_inherited_template_parms (fn, DECL_TI_TEMPLATE (callop)); | |
1001 | ||
5d9fd871 | 1002 | add_method (type, fn, NULL_TREE); |
1003 | ||
1004 | /* Generic thunk code fails for varargs; we'll complain in mark_used if | |
1005 | the conversion op is used. */ | |
1006 | if (varargs_function_p (callop)) | |
1007 | { | |
ea17a80d | 1008 | DECL_DELETED_FN (fn) = 1; |
5d9fd871 | 1009 | return; |
1010 | } | |
1011 | ||
1012 | /* Now build up the thunk to be returned. */ | |
1013 | ||
1014 | name = get_identifier ("_FUN"); | |
814b90ef | 1015 | tree statfn = build_lang_decl (FUNCTION_DECL, name, stattype); |
1016 | fn = statfn; | |
5d9fd871 | 1017 | DECL_SOURCE_LOCATION (fn) = DECL_SOURCE_LOCATION (callop); |
1018 | if (TARGET_PTRMEMFUNC_VBIT_LOCATION == ptrmemfunc_vbit_in_pfn | |
1019 | && DECL_ALIGN (fn) < 2 * BITS_PER_UNIT) | |
1020 | DECL_ALIGN (fn) = 2 * BITS_PER_UNIT; | |
1021 | grokclassfn (type, fn, NO_SPECIAL); | |
1022 | set_linkage_according_to_type (type, fn); | |
1023 | rest_of_decl_compilation (fn, toplevel_bindings_p (), at_eof); | |
1024 | DECL_IN_AGGR_P (fn) = 1; | |
1025 | DECL_ARTIFICIAL (fn) = 1; | |
1026 | DECL_NOT_REALLY_EXTERN (fn) = 1; | |
1027 | DECL_DECLARED_INLINE_P (fn) = 1; | |
1028 | DECL_STATIC_FUNCTION_P (fn) = 1; | |
814b90ef | 1029 | DECL_ARGUMENTS (fn) = fn_args; |
1030 | for (tree arg = fn_args; arg; arg = DECL_CHAIN (arg)) | |
5d9fd871 | 1031 | { |
1032 | /* Avoid duplicate -Wshadow warnings. */ | |
1033 | DECL_NAME (arg) = NULL_TREE; | |
1034 | DECL_CONTEXT (arg) = fn; | |
1035 | } | |
64d8d39e | 1036 | if (nested_def) |
5d9fd871 | 1037 | DECL_INTERFACE_KNOWN (fn) = 1; |
1038 | ||
814b90ef | 1039 | if (generic_lambda_p) |
1040 | fn = add_inherited_template_parms (fn, DECL_TI_TEMPLATE (callop)); | |
1041 | ||
5d9fd871 | 1042 | add_method (type, fn, NULL_TREE); |
1043 | ||
1044 | if (nested) | |
1045 | push_function_context (); | |
1046 | else | |
1047 | /* Still increment function_depth so that we don't GC in the | |
1048 | middle of an expression. */ | |
1049 | ++function_depth; | |
1050 | ||
1051 | /* Generate the body of the thunk. */ | |
1052 | ||
1053 | start_preparsed_function (statfn, NULL_TREE, | |
1054 | SF_PRE_PARSED | SF_INCLASS_INLINE); | |
1055 | if (DECL_ONE_ONLY (statfn)) | |
1056 | { | |
1057 | /* Put the thunk in the same comdat group as the call op. */ | |
415d1b9a | 1058 | cgraph_node::get_create (statfn)->add_to_same_comdat_group |
1059 | (cgraph_node::get_create (callop)); | |
5d9fd871 | 1060 | } |
814b90ef | 1061 | tree body = begin_function_body (); |
1062 | tree compound_stmt = begin_compound_stmt (0); | |
1063 | if (!generic_lambda_p) | |
9f10a108 | 1064 | { |
814b90ef | 1065 | set_flags_from_callee (call); |
1066 | if (MAYBE_CLASS_TYPE_P (TREE_TYPE (call))) | |
1067 | call = build_cplus_new (TREE_TYPE (call), call, tf_warning_or_error); | |
9f10a108 | 1068 | } |
5d9fd871 | 1069 | call = convert_from_reference (call); |
1070 | finish_return_stmt (call); | |
1071 | ||
1072 | finish_compound_stmt (compound_stmt); | |
1073 | finish_function_body (body); | |
1074 | ||
814b90ef | 1075 | fn = finish_function (/*inline*/2); |
1076 | if (!generic_lambda_p) | |
1077 | expand_or_defer_fn (fn); | |
5d9fd871 | 1078 | |
1079 | /* Generate the body of the conversion op. */ | |
1080 | ||
1081 | start_preparsed_function (convfn, NULL_TREE, | |
1082 | SF_PRE_PARSED | SF_INCLASS_INLINE); | |
1083 | body = begin_function_body (); | |
1084 | compound_stmt = begin_compound_stmt (0); | |
1085 | ||
1086 | /* decl_needed_p needs to see that it's used. */ | |
1087 | TREE_USED (statfn) = 1; | |
1088 | finish_return_stmt (decay_conversion (statfn, tf_warning_or_error)); | |
1089 | ||
1090 | finish_compound_stmt (compound_stmt); | |
1091 | finish_function_body (body); | |
1092 | ||
814b90ef | 1093 | fn = finish_function (/*inline*/2); |
1094 | if (!generic_lambda_p) | |
1095 | expand_or_defer_fn (fn); | |
5d9fd871 | 1096 | |
1097 | if (nested) | |
1098 | pop_function_context (); | |
1099 | else | |
1100 | --function_depth; | |
1101 | } | |
1102 | ||
1103 | /* Returns true iff VAL is a lambda-related declaration which should | |
1104 | be ignored by unqualified lookup. */ | |
1105 | ||
1106 | bool | |
1107 | is_lambda_ignored_entity (tree val) | |
1108 | { | |
1109 | /* In unevaluated context, look past normal capture proxies. */ | |
1110 | if (cp_unevaluated_operand && is_normal_capture_proxy (val)) | |
1111 | return true; | |
1112 | ||
1113 | /* Always ignore lambda fields, their names are only for debugging. */ | |
1114 | if (TREE_CODE (val) == FIELD_DECL | |
1115 | && CLASSTYPE_LAMBDA_EXPR (DECL_CONTEXT (val))) | |
1116 | return true; | |
1117 | ||
1118 | /* None of the lookups that use qualify_lookup want the op() from the | |
1119 | lambda; they want the one from the enclosing class. */ | |
1120 | if (TREE_CODE (val) == FUNCTION_DECL && LAMBDA_FUNCTION_P (val)) | |
1121 | return true; | |
1122 | ||
1123 | return false; | |
1124 | } |