1 /* Perform the semantic phase of lambda parsing, i.e., the process of
2    building tree structure, checking semantic consistency, and
3    building RTL.  These routines are used both during actual parsing
4    and during the instantiation of template functions.
5 
6    Copyright (C) 1998-2021 Free Software Foundation, Inc.
7 
8    This file is part of GCC.
9 
10    GCC is free software; you can redistribute it and/or modify it
11    under the terms of the GNU General Public License as published by
12    the Free Software Foundation; either version 3, or (at your option)
13    any later version.
14 
15    GCC is distributed in the hope that it will be useful, but
16    WITHOUT ANY WARRANTY; without even the implied warranty of
17    MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
18    General Public License for more details.
19 
20 You should have received a copy of the GNU General Public License
21 along with GCC; see the file COPYING3.  If not see
22 <http://www.gnu.org/licenses/>.  */
23 
24 #include "config.h"
25 #include "system.h"
26 #include "coretypes.h"
27 #include "cp-tree.h"
28 #include "stringpool.h"
29 #include "cgraph.h"
30 #include "tree-iterator.h"
31 #include "toplev.h"
32 #include "gimplify.h"
33 #include "target.h"
34 
35 /* Constructor for a lambda expression.  */
36 
37 tree
build_lambda_expr(void)38 build_lambda_expr (void)
39 {
40   tree lambda = make_node (LAMBDA_EXPR);
41   LAMBDA_EXPR_DEFAULT_CAPTURE_MODE (lambda) = CPLD_NONE;
42   LAMBDA_EXPR_CAPTURE_LIST         (lambda) = NULL_TREE;
43   LAMBDA_EXPR_THIS_CAPTURE         (lambda) = NULL_TREE;
44   LAMBDA_EXPR_REGEN_INFO           (lambda) = NULL_TREE;
45   LAMBDA_EXPR_PENDING_PROXIES      (lambda) = NULL;
46   LAMBDA_EXPR_MUTABLE_P            (lambda) = false;
47   return lambda;
48 }
49 
50 /* Create the closure object for a LAMBDA_EXPR.  */
51 
52 tree
build_lambda_object(tree lambda_expr)53 build_lambda_object (tree lambda_expr)
54 {
55   /* Build aggregate constructor call.
56      - cp_parser_braced_list
57      - cp_parser_functional_cast  */
58   vec<constructor_elt, va_gc> *elts = NULL;
59   tree node, expr, type;
60   location_t saved_loc;
61 
62   if (processing_template_decl || lambda_expr == error_mark_node)
63     return lambda_expr;
64 
65   /* Make sure any error messages refer to the lambda-introducer.  */
66   saved_loc = input_location;
67   input_location = LAMBDA_EXPR_LOCATION (lambda_expr);
68 
69   for (node = LAMBDA_EXPR_CAPTURE_LIST (lambda_expr);
70        node;
71        node = TREE_CHAIN (node))
72     {
73       tree field = TREE_PURPOSE (node);
74       tree val = TREE_VALUE (node);
75 
76       if (field == error_mark_node)
77 	{
78 	  expr = error_mark_node;
79 	  goto out;
80 	}
81 
82       if (TREE_CODE (val) == TREE_LIST)
83 	val = build_x_compound_expr_from_list (val, ELK_INIT,
84 					       tf_warning_or_error);
85 
86       if (DECL_P (val))
87 	mark_used (val);
88 
89       /* Mere mortals can't copy arrays with aggregate initialization, so
90 	 do some magic to make it work here.  */
91       if (TREE_CODE (TREE_TYPE (field)) == ARRAY_TYPE)
92 	val = build_array_copy (val);
93       else if (DECL_NORMAL_CAPTURE_P (field)
94 	       && !DECL_VLA_CAPTURE_P (field)
95 	       && !TYPE_REF_P (TREE_TYPE (field)))
96 	{
97 	  /* "the entities that are captured by copy are used to
98 	     direct-initialize each corresponding non-static data
99 	     member of the resulting closure object."
100 
101 	     There's normally no way to express direct-initialization
102 	     from an element of a CONSTRUCTOR, so we build up a special
103 	     TARGET_EXPR to bypass the usual copy-initialization.  */
104 	  val = force_rvalue (val, tf_warning_or_error);
105 	  if (TREE_CODE (val) == TARGET_EXPR)
106 	    TARGET_EXPR_DIRECT_INIT_P (val) = true;
107 	}
108 
109       CONSTRUCTOR_APPEND_ELT (elts, DECL_NAME (field), val);
110     }
111 
112   expr = build_constructor (init_list_type_node, elts);
113   CONSTRUCTOR_IS_DIRECT_INIT (expr) = 1;
114 
115   /* N2927: "[The closure] class type is not an aggregate."
116      But we briefly treat it as an aggregate to make this simpler.  */
117   type = LAMBDA_EXPR_CLOSURE (lambda_expr);
118   CLASSTYPE_NON_AGGREGATE (type) = 0;
119   expr = finish_compound_literal (type, expr, tf_warning_or_error);
120   CLASSTYPE_NON_AGGREGATE (type) = 1;
121 
122  out:
123   input_location = saved_loc;
124   return expr;
125 }
126 
127 /* Return an initialized RECORD_TYPE for LAMBDA.
128    LAMBDA must have its explicit captures already.  */
129 
130 tree
begin_lambda_type(tree lambda)131 begin_lambda_type (tree lambda)
132 {
133   /* Lambda names are nearly but not quite anonymous.  */
134   tree name = make_anon_name ();
135   IDENTIFIER_LAMBDA_P (name) = true;
136 
137   /* Create the new RECORD_TYPE for this lambda.  */
138   tree type = xref_tag (/*tag_code=*/record_type, name);
139   if (type == error_mark_node)
140     return error_mark_node;
141 
142   /* Designate it as a struct so that we can use aggregate initialization.  */
143   CLASSTYPE_DECLARED_CLASS (type) = false;
144 
145   /* Cross-reference the expression and the type.  */
146   LAMBDA_EXPR_CLOSURE (lambda) = type;
147   CLASSTYPE_LAMBDA_EXPR (type) = lambda;
148 
149   /* In C++17, assume the closure is literal; we'll clear the flag later if
150      necessary.  */
151   if (cxx_dialect >= cxx17)
152     CLASSTYPE_LITERAL_P (type) = true;
153 
154   /* Clear base types.  */
155   xref_basetypes (type, /*bases=*/NULL_TREE);
156 
157   /* Start the class.  */
158   type = begin_class_definition (type);
159 
160   return type;
161 }
162 
163 /* Returns the type to use for the return type of the operator() of a
164    closure class.  */
165 
166 tree
lambda_return_type(tree expr)167 lambda_return_type (tree expr)
168 {
169   if (expr == NULL_TREE)
170     return void_type_node;
171   if (type_unknown_p (expr)
172       || BRACE_ENCLOSED_INITIALIZER_P (expr))
173     {
174       cxx_incomplete_type_error (expr, TREE_TYPE (expr));
175       return error_mark_node;
176     }
177   gcc_checking_assert (!type_dependent_expression_p (expr));
178   return cv_unqualified (type_decays_to (unlowered_expr_type (expr)));
179 }
180 
181 /* Given a LAMBDA_EXPR or closure type LAMBDA, return the op() of the
182    closure type.  */
183 
184 tree
lambda_function(tree lambda)185 lambda_function (tree lambda)
186 {
187   tree type;
188   if (TREE_CODE (lambda) == LAMBDA_EXPR)
189     type = LAMBDA_EXPR_CLOSURE (lambda);
190   else
191     type = lambda;
192   gcc_assert (LAMBDA_TYPE_P (type));
193   /* Don't let debug_tree cause instantiation.  */
194   if (CLASSTYPE_TEMPLATE_INSTANTIATION (type)
195       && !COMPLETE_OR_OPEN_TYPE_P (type))
196     return NULL_TREE;
197   lambda = lookup_member (type, call_op_identifier,
198 			  /*protect=*/0, /*want_type=*/false,
199 			  tf_warning_or_error);
200   if (lambda)
201     lambda = STRIP_TEMPLATE (get_first_fn (lambda));
202   return lambda;
203 }
204 
205 /* Returns the type to use for the FIELD_DECL corresponding to the
206    capture of EXPR.  EXPLICIT_INIT_P indicates whether this is a
207    C++14 init capture, and BY_REFERENCE_P indicates whether we're
208    capturing by reference.  */
209 
210 tree
lambda_capture_field_type(tree expr,bool explicit_init_p,bool by_reference_p)211 lambda_capture_field_type (tree expr, bool explicit_init_p,
212 			   bool by_reference_p)
213 {
214   tree type;
215   bool is_this = is_this_parameter (tree_strip_nop_conversions (expr));
216 
217   if (!is_this && explicit_init_p)
218     {
219       tree auto_node = make_auto ();
220 
221       type = auto_node;
222       if (by_reference_p)
223 	/* Add the reference now, so deduction doesn't lose
224 	   outermost CV qualifiers of EXPR.  */
225 	type = build_reference_type (type);
226       if (uses_parameter_packs (expr))
227 	/* Stick with 'auto' even if the type could be deduced.  */;
228       else
229 	type = do_auto_deduction (type, expr, auto_node);
230     }
231   else if (!is_this && type_dependent_expression_p (expr))
232     {
233       type = cxx_make_type (DECLTYPE_TYPE);
234       DECLTYPE_TYPE_EXPR (type) = expr;
235       DECLTYPE_FOR_LAMBDA_CAPTURE (type) = true;
236       DECLTYPE_FOR_REF_CAPTURE (type) = by_reference_p;
237       SET_TYPE_STRUCTURAL_EQUALITY (type);
238     }
239   else
240     {
241       type = non_reference (unlowered_expr_type (expr));
242 
243       if (!is_this
244 	  && (by_reference_p || TREE_CODE (type) == FUNCTION_TYPE))
245 	type = build_reference_type (type);
246     }
247 
248   return type;
249 }
250 
251 /* Returns true iff DECL is a lambda capture proxy variable created by
252    build_capture_proxy.  */
253 
254 bool
is_capture_proxy(tree decl)255 is_capture_proxy (tree decl)
256 {
257   return (VAR_P (decl)
258 	  && DECL_HAS_VALUE_EXPR_P (decl)
259 	  && !DECL_ANON_UNION_VAR_P (decl)
260 	  && !DECL_DECOMPOSITION_P (decl)
261 	  && !DECL_FNAME_P (decl)
262 	  && !(DECL_ARTIFICIAL (decl)
263 	       && DECL_LANG_SPECIFIC (decl)
264 	       && DECL_OMP_PRIVATIZED_MEMBER (decl))
265 	  && LAMBDA_FUNCTION_P (DECL_CONTEXT (decl)));
266 }
267 
268 /* Returns true iff DECL is a capture proxy for a normal capture
269    (i.e. without explicit initializer).  */
270 
271 bool
is_normal_capture_proxy(tree decl)272 is_normal_capture_proxy (tree decl)
273 {
274   if (!is_capture_proxy (decl))
275     /* It's not a capture proxy.  */
276     return false;
277 
278   return (DECL_LANG_SPECIFIC (decl)
279 	  && DECL_CAPTURED_VARIABLE (decl));
280 }
281 
282 /* Returns true iff DECL is a capture proxy for a normal capture
283    of a constant variable.  */
284 
285 bool
is_constant_capture_proxy(tree decl)286 is_constant_capture_proxy (tree decl)
287 {
288   if (is_normal_capture_proxy (decl))
289     return decl_constant_var_p (DECL_CAPTURED_VARIABLE (decl));
290   return false;
291 }
292 
293 /* VAR is a capture proxy created by build_capture_proxy; add it to the
294    current function, which is the operator() for the appropriate lambda.  */
295 
296 void
insert_capture_proxy(tree var)297 insert_capture_proxy (tree var)
298 {
299   if (is_normal_capture_proxy (var))
300     {
301       tree cap = DECL_CAPTURED_VARIABLE (var);
302       if (CHECKING_P)
303 	{
304 	  gcc_assert (!is_normal_capture_proxy (cap));
305 	  tree old = retrieve_local_specialization (cap);
306 	  if (old)
307 	    gcc_assert (DECL_CONTEXT (old) != DECL_CONTEXT (var));
308 	}
309       register_local_specialization (var, cap);
310     }
311 
312   /* Put the capture proxy in the extra body block so that it won't clash
313      with a later local variable.  */
314   pushdecl_outermost_localscope (var);
315 
316   /* And put a DECL_EXPR in the STATEMENT_LIST for the same block.  */
317   var = build_stmt (DECL_SOURCE_LOCATION (var), DECL_EXPR, var);
318   tree stmt_list = (*stmt_list_stack)[1];
319   gcc_assert (stmt_list);
320   append_to_statement_list_force (var, &stmt_list);
321 }
322 
323 /* We've just finished processing a lambda; if the containing scope is also
324    a lambda, insert any capture proxies that were created while processing
325    the nested lambda.  */
326 
327 void
insert_pending_capture_proxies(void)328 insert_pending_capture_proxies (void)
329 {
330   tree lam;
331   vec<tree, va_gc> *proxies;
332   unsigned i;
333 
334   if (!current_function_decl || !LAMBDA_FUNCTION_P (current_function_decl))
335     return;
336 
337   lam = CLASSTYPE_LAMBDA_EXPR (DECL_CONTEXT (current_function_decl));
338   proxies = LAMBDA_EXPR_PENDING_PROXIES (lam);
339   for (i = 0; i < vec_safe_length (proxies); ++i)
340     {
341       tree var = (*proxies)[i];
342       insert_capture_proxy (var);
343     }
344   release_tree_vector (LAMBDA_EXPR_PENDING_PROXIES (lam));
345   LAMBDA_EXPR_PENDING_PROXIES (lam) = NULL;
346 }
347 
348 /* Given REF, a COMPONENT_REF designating a field in the lambda closure,
349    return the type we want the proxy to have: the type of the field itself,
350    with added const-qualification if the lambda isn't mutable and the
351    capture is by value.  */
352 
353 tree
lambda_proxy_type(tree ref)354 lambda_proxy_type (tree ref)
355 {
356   tree type;
357   if (ref == error_mark_node)
358     return error_mark_node;
359   if (REFERENCE_REF_P (ref))
360     ref = TREE_OPERAND (ref, 0);
361   gcc_assert (TREE_CODE (ref) == COMPONENT_REF);
362   type = TREE_TYPE (ref);
363   if (!type || WILDCARD_TYPE_P (non_reference (type)))
364     {
365       type = cxx_make_type (DECLTYPE_TYPE);
366       DECLTYPE_TYPE_EXPR (type) = ref;
367       DECLTYPE_FOR_LAMBDA_PROXY (type) = true;
368       SET_TYPE_STRUCTURAL_EQUALITY (type);
369     }
370   if (DECL_PACK_P (TREE_OPERAND (ref, 1)))
371     type = make_pack_expansion (type);
372   return type;
373 }
374 
375 /* MEMBER is a capture field in a lambda closure class.  Now that we're
376    inside the operator(), build a placeholder var for future lookups and
377    debugging.  */
378 
379 static tree
build_capture_proxy(tree member,tree init)380 build_capture_proxy (tree member, tree init)
381 {
382   tree var, object, fn, closure, name, lam, type;
383 
384   if (PACK_EXPANSION_P (member))
385     member = PACK_EXPANSION_PATTERN (member);
386 
387   closure = DECL_CONTEXT (member);
388   fn = lambda_function (closure);
389   lam = CLASSTYPE_LAMBDA_EXPR (closure);
390 
391   /* The proxy variable forwards to the capture field.  */
392   object = build_fold_indirect_ref (DECL_ARGUMENTS (fn));
393   object = finish_non_static_data_member (member, object, NULL_TREE);
394   if (REFERENCE_REF_P (object))
395     object = TREE_OPERAND (object, 0);
396 
397   /* Remove the __ inserted by add_capture.  */
398   name = get_identifier (IDENTIFIER_POINTER (DECL_NAME (member)) + 2);
399 
400   type = lambda_proxy_type (object);
401 
402   if (name == this_identifier && !INDIRECT_TYPE_P (type))
403     {
404       type = build_pointer_type (type);
405       type = cp_build_qualified_type (type, TYPE_QUAL_CONST);
406       object = build_fold_addr_expr_with_type (object, type);
407     }
408 
409   if (DECL_VLA_CAPTURE_P (member))
410     {
411       /* Rebuild the VLA type from the pointer and maxindex.  */
412       tree field = next_initializable_field (TYPE_FIELDS (type));
413       tree ptr = build_simple_component_ref (object, field);
414       field = next_initializable_field (DECL_CHAIN (field));
415       tree max = build_simple_component_ref (object, field);
416       type = build_cplus_array_type (TREE_TYPE (TREE_TYPE (ptr)),
417 				     build_index_type (max));
418       type = build_reference_type (type);
419       object = convert (type, ptr);
420     }
421 
422   complete_type (type);
423 
424   var = build_decl (input_location, VAR_DECL, name, type);
425   SET_DECL_VALUE_EXPR (var, object);
426   DECL_HAS_VALUE_EXPR_P (var) = 1;
427   DECL_ARTIFICIAL (var) = 1;
428   TREE_USED (var) = 1;
429   DECL_CONTEXT (var) = fn;
430 
431   if (DECL_NORMAL_CAPTURE_P (member))
432     {
433       if (DECL_VLA_CAPTURE_P (member))
434 	{
435 	  init = CONSTRUCTOR_ELT (init, 0)->value;
436 	  init = TREE_OPERAND (init, 0); // Strip ADDR_EXPR.
437 	  init = TREE_OPERAND (init, 0); // Strip ARRAY_REF.
438 	}
439       else
440 	{
441 	  if (PACK_EXPANSION_P (init))
442 	    init = PACK_EXPANSION_PATTERN (init);
443 	}
444 
445       if (INDIRECT_REF_P (init))
446 	init = TREE_OPERAND (init, 0);
447       STRIP_NOPS (init);
448 
449       gcc_assert (VAR_P (init) || TREE_CODE (init) == PARM_DECL);
450       while (is_normal_capture_proxy (init))
451 	init = DECL_CAPTURED_VARIABLE (init);
452       retrofit_lang_decl (var);
453       DECL_CAPTURED_VARIABLE (var) = init;
454     }
455 
456   if (name == this_identifier)
457     {
458       gcc_assert (LAMBDA_EXPR_THIS_CAPTURE (lam) == member);
459       LAMBDA_EXPR_THIS_CAPTURE (lam) = var;
460     }
461 
462   if (fn == current_function_decl)
463     insert_capture_proxy (var);
464   else
465     vec_safe_push (LAMBDA_EXPR_PENDING_PROXIES (lam), var);
466 
467   return var;
468 }
469 
470 static GTY(()) tree ptr_id;
471 static GTY(()) tree max_id;
472 
473 /* Return a struct containing a pointer and a length for lambda capture of
474    an array of runtime length.  */
475 
476 static tree
vla_capture_type(tree array_type)477 vla_capture_type (tree array_type)
478 {
479   tree type = xref_tag (record_type, make_anon_name ());
480   xref_basetypes (type, NULL_TREE);
481   type = begin_class_definition (type);
482   if (!ptr_id)
483     {
484       ptr_id = get_identifier ("ptr");
485       max_id = get_identifier ("max");
486     }
487   tree ptrtype = build_pointer_type (TREE_TYPE (array_type));
488   tree field = build_decl (input_location, FIELD_DECL, ptr_id, ptrtype);
489   finish_member_declaration (field);
490   field = build_decl (input_location, FIELD_DECL, max_id, sizetype);
491   finish_member_declaration (field);
492   return finish_struct (type, NULL_TREE);
493 }
494 
495 /* From an ID and INITIALIZER, create a capture (by reference if
496    BY_REFERENCE_P is true), add it to the capture-list for LAMBDA,
497    and return it.  If ID is `this', BY_REFERENCE_P says whether
498    `*this' is captured by reference.  */
499 
500 tree
add_capture(tree lambda,tree id,tree orig_init,bool by_reference_p,bool explicit_init_p)501 add_capture (tree lambda, tree id, tree orig_init, bool by_reference_p,
502 	     bool explicit_init_p)
503 {
504   char *buf;
505   tree type, member, name;
506   bool vla = false;
507   bool variadic = false;
508   tree initializer = orig_init;
509 
510   if (PACK_EXPANSION_P (initializer))
511     {
512       initializer = PACK_EXPANSION_PATTERN (initializer);
513       variadic = true;
514     }
515 
516   if (TREE_CODE (initializer) == TREE_LIST
517       /* A pack expansion might end up with multiple elements.  */
518       && !PACK_EXPANSION_P (TREE_VALUE (initializer)))
519     initializer = build_x_compound_expr_from_list (initializer, ELK_INIT,
520 						   tf_warning_or_error);
521   type = TREE_TYPE (initializer);
522   if (type == error_mark_node)
523     return error_mark_node;
524 
525   if (!dependent_type_p (type) && array_of_runtime_bound_p (type))
526     {
527       vla = true;
528       if (!by_reference_p)
529 	error ("array of runtime bound cannot be captured by copy, "
530 	       "only by reference");
531 
532       /* For a VLA, we capture the address of the first element and the
533 	 maximum index, and then reconstruct the VLA for the proxy.  */
534       tree elt = cp_build_array_ref (input_location, initializer,
535 				     integer_zero_node, tf_warning_or_error);
536       initializer = build_constructor_va (init_list_type_node, 2,
537 					  NULL_TREE, build_address (elt),
538 					  NULL_TREE, array_type_nelts (type));
539       type = vla_capture_type (type);
540     }
541   else if (!dependent_type_p (type)
542 	   && variably_modified_type_p (type, NULL_TREE))
543     {
544       sorry ("capture of variably-modified type %qT that is not an N3639 array "
545 	     "of runtime bound", type);
546       if (TREE_CODE (type) == ARRAY_TYPE
547 	  && variably_modified_type_p (TREE_TYPE (type), NULL_TREE))
548 	inform (input_location, "because the array element type %qT has "
549 		"variable size", TREE_TYPE (type));
550       return error_mark_node;
551     }
552   else
553     {
554       type = lambda_capture_field_type (initializer, explicit_init_p,
555 					by_reference_p);
556       if (type == error_mark_node)
557 	return error_mark_node;
558 
559       if (id == this_identifier && !by_reference_p)
560 	{
561 	  gcc_assert (INDIRECT_TYPE_P (type));
562 	  type = TREE_TYPE (type);
563 	  initializer = cp_build_fold_indirect_ref (initializer);
564 	}
565 
566       if (dependent_type_p (type))
567 	;
568       else if (id != this_identifier && by_reference_p)
569 	{
570 	  if (!lvalue_p (initializer))
571 	    {
572 	      error ("cannot capture %qE by reference", initializer);
573 	      return error_mark_node;
574 	    }
575 	}
576       else
577 	{
578 	  /* Capture by copy requires a complete type.  */
579 	  type = complete_type (type);
580 	  if (!COMPLETE_TYPE_P (type))
581 	    {
582 	      error ("capture by copy of incomplete type %qT", type);
583 	      cxx_incomplete_type_inform (type);
584 	      return error_mark_node;
585 	    }
586 	  else if (!verify_type_context (input_location,
587 					 TCTX_CAPTURE_BY_COPY, type))
588 	    return error_mark_node;
589 	}
590     }
591 
592   /* Add __ to the beginning of the field name so that user code
593      won't find the field with name lookup.  We can't just leave the name
594      unset because template instantiation uses the name to find
595      instantiated fields.  */
596   buf = (char *) alloca (IDENTIFIER_LENGTH (id) + 3);
597   buf[1] = buf[0] = '_';
598   memcpy (buf + 2, IDENTIFIER_POINTER (id),
599 	  IDENTIFIER_LENGTH (id) + 1);
600   name = get_identifier (buf);
601 
602   if (variadic)
603     {
604       type = make_pack_expansion (type);
605       if (explicit_init_p)
606 	/* With an explicit initializer 'type' is auto, which isn't really a
607 	   parameter pack in this context.  We will want as many fields as we
608 	   have elements in the expansion of the initializer, so use its packs
609 	   instead.  */
610 	{
611 	  PACK_EXPANSION_PARAMETER_PACKS (type)
612 	    = uses_parameter_packs (initializer);
613 	  PACK_EXPANSION_AUTO_P (type) = true;
614 	}
615     }
616 
617   /* Make member variable.  */
618   member = build_decl (input_location, FIELD_DECL, name, type);
619   DECL_VLA_CAPTURE_P (member) = vla;
620 
621   if (!explicit_init_p)
622     /* Normal captures are invisible to name lookup but uses are replaced
623        with references to the capture field; we implement this by only
624        really making them invisible in unevaluated context; see
625        qualify_lookup.  For now, let's make explicitly initialized captures
626        always visible.  */
627     DECL_NORMAL_CAPTURE_P (member) = true;
628 
629   if (id == this_identifier)
630     LAMBDA_EXPR_THIS_CAPTURE (lambda) = member;
631 
632   /* Add it to the appropriate closure class if we've started it.  */
633   if (current_class_type
634       && current_class_type == LAMBDA_EXPR_CLOSURE (lambda))
635     {
636       if (COMPLETE_TYPE_P (current_class_type))
637 	internal_error ("trying to capture %qD in instantiation of "
638 			"generic lambda", id);
639       finish_member_declaration (member);
640     }
641 
642   tree listmem = member;
643   if (variadic)
644     {
645       listmem = make_pack_expansion (member);
646       initializer = orig_init;
647     }
648   LAMBDA_EXPR_CAPTURE_LIST (lambda)
649     = tree_cons (listmem, initializer, LAMBDA_EXPR_CAPTURE_LIST (lambda));
650 
651   if (LAMBDA_EXPR_CLOSURE (lambda))
652     return build_capture_proxy (member, initializer);
653   /* For explicit captures we haven't started the function yet, so we wait
654      and build the proxy from cp_parser_lambda_body.  */
655   LAMBDA_CAPTURE_EXPLICIT_P (LAMBDA_EXPR_CAPTURE_LIST (lambda)) = true;
656   return NULL_TREE;
657 }
658 
659 /* Register all the capture members on the list CAPTURES, which is the
660    LAMBDA_EXPR_CAPTURE_LIST for the lambda after the introducer.  */
661 
662 void
register_capture_members(tree captures)663 register_capture_members (tree captures)
664 {
665   if (captures == NULL_TREE)
666     return;
667 
668   register_capture_members (TREE_CHAIN (captures));
669 
670   tree field = TREE_PURPOSE (captures);
671   if (PACK_EXPANSION_P (field))
672     field = PACK_EXPANSION_PATTERN (field);
673 
674   finish_member_declaration (field);
675 }
676 
677 /* Similar to add_capture, except this works on a stack of nested lambdas.
678    BY_REFERENCE_P in this case is derived from the default capture mode.
679    Returns the capture for the lambda at the bottom of the stack.  */
680 
681 tree
add_default_capture(tree lambda_stack,tree id,tree initializer)682 add_default_capture (tree lambda_stack, tree id, tree initializer)
683 {
684   bool this_capture_p = (id == this_identifier);
685   tree var = NULL_TREE;
686   tree saved_class_type = current_class_type;
687 
688   for (tree node = lambda_stack;
689        node;
690        node = TREE_CHAIN (node))
691     {
692       tree lambda = TREE_VALUE (node);
693 
694       current_class_type = LAMBDA_EXPR_CLOSURE (lambda);
695       if (DECL_PACK_P (initializer))
696 	initializer = make_pack_expansion (initializer);
697       var = add_capture (lambda,
698                             id,
699                             initializer,
700                             /*by_reference_p=*/
701 			    (this_capture_p
702 			     || (LAMBDA_EXPR_DEFAULT_CAPTURE_MODE (lambda)
703 				 == CPLD_REFERENCE)),
704 			    /*explicit_init_p=*/false);
705       initializer = convert_from_reference (var);
706 
707       /* Warn about deprecated implicit capture of this via [=].  */
708       if (cxx_dialect >= cxx20
709 	  && this_capture_p
710 	  && LAMBDA_EXPR_DEFAULT_CAPTURE_MODE (lambda) == CPLD_COPY)
711 	{
712 	  if (warning_at (LAMBDA_EXPR_LOCATION (lambda), OPT_Wdeprecated,
713 			  "implicit capture of %qE via %<[=]%> is deprecated "
714 			  "in C++20", this_identifier))
715 	    inform (LAMBDA_EXPR_LOCATION (lambda), "add explicit %<this%> or "
716 		    "%<*this%> capture");
717 	}
718     }
719 
720   current_class_type = saved_class_type;
721 
722   return var;
723 }
724 
725 /* Return the capture pertaining to a use of 'this' in LAMBDA, in the
726    form of an INDIRECT_REF, possibly adding it through default
727    capturing, if ADD_CAPTURE_P is nonzero.  If ADD_CAPTURE_P is negative,
728    try to capture but don't complain if we can't.  */
729 
730 tree
lambda_expr_this_capture(tree lambda,int add_capture_p)731 lambda_expr_this_capture (tree lambda, int add_capture_p)
732 {
733   tree result;
734 
735   tree this_capture = LAMBDA_EXPR_THIS_CAPTURE (lambda);
736 
737   /* In unevaluated context this isn't an odr-use, so don't capture.  */
738   if (cp_unevaluated_operand)
739     add_capture_p = false;
740 
741   /* Try to default capture 'this' if we can.  */
742   if (!this_capture)
743     {
744       tree lambda_stack = NULL_TREE;
745       tree init = NULL_TREE;
746 
747       /* If we are in a lambda function, we can move out until we hit:
748            1. a non-lambda function or NSDMI,
749            2. a lambda function capturing 'this', or
750            3. a non-default capturing lambda function.  */
751       for (tree tlambda = lambda; ;)
752 	{
753 	  if (add_capture_p
754 	      && LAMBDA_EXPR_DEFAULT_CAPTURE_MODE (tlambda) == CPLD_NONE)
755 	    /* tlambda won't let us capture 'this'.  */
756 	    break;
757 
758 	  if (add_capture_p)
759 	    lambda_stack = tree_cons (NULL_TREE,
760 				      tlambda,
761 				      lambda_stack);
762 
763 	  tree closure = LAMBDA_EXPR_CLOSURE (tlambda);
764 	  tree containing_function
765 	    = decl_function_context (TYPE_NAME (closure));
766 
767 	  tree ex = LAMBDA_EXPR_EXTRA_SCOPE (tlambda);
768 	  if (ex && TREE_CODE (ex) == FIELD_DECL)
769 	    {
770 	      /* Lambda in an NSDMI.  We don't have a function to look up
771 		 'this' in, but we can find (or rebuild) the fake one from
772 		 inject_this_parameter.  */
773 	      if (!containing_function && !COMPLETE_TYPE_P (closure))
774 		/* If we're parsing a lambda in a non-local class,
775 		   we can find the fake 'this' in scope_chain.  */
776 		init = scope_chain->x_current_class_ptr;
777 	      else
778 		/* Otherwise it's either gone or buried in
779 		   function_context_stack, so make another.  */
780 		init = build_this_parm (NULL_TREE, DECL_CONTEXT (ex),
781 					TYPE_UNQUALIFIED);
782 	      gcc_checking_assert
783 		(init && (TREE_TYPE (TREE_TYPE (init))
784 			  == current_nonlambda_class_type ()));
785 	      break;
786 	    }
787 
788 	  if (containing_function == NULL_TREE)
789 	    /* We ran out of scopes; there's no 'this' to capture.  */
790 	    break;
791 
792 	  if (!LAMBDA_FUNCTION_P (containing_function))
793 	    {
794 	      /* We found a non-lambda function.  */
795 	      if (DECL_NONSTATIC_MEMBER_FUNCTION_P (containing_function))
796 		/* First parameter is 'this'.  */
797 		init = DECL_ARGUMENTS (containing_function);
798 	      break;
799 	    }
800 
801 	  tlambda
802             = CLASSTYPE_LAMBDA_EXPR (DECL_CONTEXT (containing_function));
803 
804           if (LAMBDA_EXPR_THIS_CAPTURE (tlambda))
805 	    {
806 	      /* An outer lambda has already captured 'this'.  */
807 	      init = LAMBDA_EXPR_THIS_CAPTURE (tlambda);
808 	      break;
809 	    }
810 	}
811 
812       if (init)
813         {
814           if (add_capture_p)
815 	    this_capture = add_default_capture (lambda_stack,
816 					        /*id=*/this_identifier,
817 					        init);
818           else
819 	    this_capture = init;
820         }
821     }
822 
823   if (cp_unevaluated_operand)
824     result = this_capture;
825   else if (!this_capture)
826     {
827       if (add_capture_p == 1)
828 	{
829 	  error ("%<this%> was not captured for this lambda function");
830 	  result = error_mark_node;
831 	}
832       else
833 	result = NULL_TREE;
834     }
835   else
836     {
837       /* To make sure that current_class_ref is for the lambda.  */
838       gcc_assert (TYPE_MAIN_VARIANT (TREE_TYPE (current_class_ref))
839 		  == LAMBDA_EXPR_CLOSURE (lambda));
840 
841       result = this_capture;
842 
843       /* If 'this' is captured, each use of 'this' is transformed into an
844 	 access to the corresponding unnamed data member of the closure
845 	 type cast (_expr.cast_ 5.4) to the type of 'this'. [ The cast
846 	 ensures that the transformed expression is an rvalue. ] */
847       result = rvalue (result);
848     }
849 
850   return result;
851 }
852 
853 /* Return the innermost LAMBDA_EXPR we're currently in, if any.  */
854 
855 tree
current_lambda_expr(void)856 current_lambda_expr (void)
857 {
858   tree type = current_class_type;
859   while (type && !LAMBDA_TYPE_P (type))
860     type = decl_type_context (TYPE_NAME (type));
861   if (type)
862     return CLASSTYPE_LAMBDA_EXPR (type);
863   else
864     return NULL_TREE;
865 }
866 
867 /* Return the current LAMBDA_EXPR, if this is a resolvable dummy
868    object.  NULL otherwise..  */
869 
870 static tree
resolvable_dummy_lambda(tree object)871 resolvable_dummy_lambda (tree object)
872 {
873   if (!is_dummy_object (object))
874     return NULL_TREE;
875 
876   tree type = TYPE_MAIN_VARIANT (TREE_TYPE (object));
877   gcc_assert (!TYPE_PTR_P (type));
878 
879   if (type != current_class_type
880       && current_class_type
881       && LAMBDA_TYPE_P (current_class_type)
882       && lambda_function (current_class_type)
883       && DERIVED_FROM_P (type, nonlambda_method_basetype()))
884     return CLASSTYPE_LAMBDA_EXPR (current_class_type);
885 
886   return NULL_TREE;
887 }
888 
889 /* We don't want to capture 'this' until we know we need it, i.e. after
890    overload resolution has chosen a non-static member function.  At that
891    point we call this function to turn a dummy object into a use of the
892    'this' capture.  */
893 
894 tree
maybe_resolve_dummy(tree object,bool add_capture_p)895 maybe_resolve_dummy (tree object, bool add_capture_p)
896 {
897   if (tree lam = resolvable_dummy_lambda (object))
898     if (tree cap = lambda_expr_this_capture (lam, add_capture_p))
899       if (cap != error_mark_node)
900 	object = build_fold_indirect_ref (cap);
901 
902   return object;
903 }
904 
905 /* When parsing a generic lambda containing an argument-dependent
906    member function call we defer overload resolution to instantiation
907    time.  But we have to know now whether to capture this or not.
908    Do that if FNS contains any non-static fns.
909    The std doesn't anticipate this case, but I expect this to be the
910    outcome of discussion.  */
911 
912 void
maybe_generic_this_capture(tree object,tree fns)913 maybe_generic_this_capture (tree object, tree fns)
914 {
915   if (tree lam = resolvable_dummy_lambda (object))
916     if (!LAMBDA_EXPR_THIS_CAPTURE (lam))
917       {
918 	/* We've not yet captured, so look at the function set of
919 	   interest.  */
920 	if (BASELINK_P (fns))
921 	  fns = BASELINK_FUNCTIONS (fns);
922 	bool id_expr = TREE_CODE (fns) == TEMPLATE_ID_EXPR;
923 	if (id_expr)
924 	  fns = TREE_OPERAND (fns, 0);
925 
926 	for (lkp_iterator iter (fns); iter; ++iter)
927 	  if (((!id_expr && TREE_CODE (*iter) != USING_DECL)
928 	       || TREE_CODE (*iter) == TEMPLATE_DECL)
929 	      && DECL_NONSTATIC_MEMBER_FUNCTION_P (*iter))
930 	    {
931 	      /* Found a non-static member.  Capture this.  */
932 	      lambda_expr_this_capture (lam, /*maybe*/-1);
933 	      break;
934 	    }
935       }
936 }
937 
938 /* Returns the innermost non-lambda function.  */
939 
940 tree
current_nonlambda_function(void)941 current_nonlambda_function (void)
942 {
943   tree fn = current_function_decl;
944   while (fn && LAMBDA_FUNCTION_P (fn))
945     fn = decl_function_context (fn);
946   return fn;
947 }
948 
949 /* Returns the method basetype of the innermost non-lambda function, including
950    a hypothetical constructor if inside an NSDMI, or NULL_TREE if none.  */
951 
952 tree
nonlambda_method_basetype(void)953 nonlambda_method_basetype (void)
954 {
955   if (!current_class_ref)
956     return NULL_TREE;
957 
958   tree type = current_class_type;
959   if (!type || !LAMBDA_TYPE_P (type))
960     return type;
961 
962   while (true)
963     {
964       tree lam = CLASSTYPE_LAMBDA_EXPR (type);
965       tree ex = LAMBDA_EXPR_EXTRA_SCOPE (lam);
966       if (ex && TREE_CODE (ex) == FIELD_DECL)
967 	/* Lambda in an NSDMI.  */
968 	return DECL_CONTEXT (ex);
969 
970       tree fn = TYPE_CONTEXT (type);
971       if (!fn || TREE_CODE (fn) != FUNCTION_DECL
972 	  || !DECL_NONSTATIC_MEMBER_FUNCTION_P (fn))
973 	/* No enclosing non-lambda method.  */
974 	return NULL_TREE;
975       if (!LAMBDA_FUNCTION_P (fn))
976 	/* Found an enclosing non-lambda method.  */
977 	return TYPE_METHOD_BASETYPE (TREE_TYPE (fn));
978       type = DECL_CONTEXT (fn);
979     }
980 }
981 
982 /* Like current_scope, but looking through lambdas.  */
983 
984 tree
current_nonlambda_scope(void)985 current_nonlambda_scope (void)
986 {
987   tree scope = current_scope ();
988   for (;;)
989     {
990       if (TREE_CODE (scope) == FUNCTION_DECL
991 	  && LAMBDA_FUNCTION_P (scope))
992 	{
993 	  scope = CP_TYPE_CONTEXT (DECL_CONTEXT (scope));
994 	  continue;
995 	}
996       else if (LAMBDA_TYPE_P (scope))
997 	{
998 	  scope = CP_TYPE_CONTEXT (scope);
999 	  continue;
1000 	}
1001       break;
1002     }
1003   return scope;
1004 }
1005 
1006 /* Helper function for maybe_add_lambda_conv_op; build a CALL_EXPR with
1007    indicated FN and NARGS, but do not initialize the return type or any of the
1008    argument slots.  */
1009 
1010 static tree
prepare_op_call(tree fn,int nargs)1011 prepare_op_call (tree fn, int nargs)
1012 {
1013   tree t;
1014 
1015   t = build_vl_exp (CALL_EXPR, nargs + 3);
1016   CALL_EXPR_FN (t) = fn;
1017   CALL_EXPR_STATIC_CHAIN (t) = NULL;
1018 
1019   return t;
1020 }
1021 
1022 /* Return true iff CALLOP is the op() for a generic lambda.  */
1023 
1024 bool
generic_lambda_fn_p(tree callop)1025 generic_lambda_fn_p (tree callop)
1026 {
1027   return (LAMBDA_FUNCTION_P (callop)
1028 	  && DECL_TEMPLATE_INFO (callop)
1029 	  && PRIMARY_TEMPLATE_P (DECL_TI_TEMPLATE (callop)));
1030 }
1031 
1032 /* If the closure TYPE has a static op(), also add a conversion to function
1033    pointer.  */
1034 
1035 void
maybe_add_lambda_conv_op(tree type)1036 maybe_add_lambda_conv_op (tree type)
1037 {
1038   bool nested = (cfun != NULL);
1039   bool nested_def = decl_function_context (TYPE_MAIN_DECL (type));
1040   tree callop = lambda_function (type);
1041   tree lam = CLASSTYPE_LAMBDA_EXPR (type);
1042 
1043   if (LAMBDA_EXPR_CAPTURE_LIST (lam) != NULL_TREE
1044       || LAMBDA_EXPR_DEFAULT_CAPTURE_MODE (lam) != CPLD_NONE)
1045     return;
1046 
1047   if (processing_template_decl)
1048     return;
1049 
1050   bool const generic_lambda_p = generic_lambda_fn_p (callop);
1051 
1052   if (!generic_lambda_p && undeduced_auto_decl (callop))
1053     {
1054       /* If the op() wasn't deduced due to errors, give up.  */
1055       gcc_assert (errorcount || sorrycount);
1056       return;
1057     }
1058 
1059   /* Non-generic non-capturing lambdas only have a conversion function to
1060      pointer to function when the trailing requires-clause's constraints are
1061      satisfied.  */
1062   if (!generic_lambda_p && !constraints_satisfied_p (callop))
1063     return;
1064 
1065   /* Non-template conversion operators are defined directly with build_call_a
1066      and using DIRECT_ARGVEC for arguments (including 'this').  Templates are
1067      deferred and the CALL is built in-place.  In the case of a deduced return
1068      call op, the decltype expression, DECLTYPE_CALL, used as a substitute for
1069      the return type is also built in-place.  The arguments of DECLTYPE_CALL in
1070      the return expression may differ in flags from those in the body CALL.  In
1071      particular, parameter pack expansions are marked PACK_EXPANSION_LOCAL_P in
1072      the body CALL, but not in DECLTYPE_CALL.  */
1073 
1074   vec<tree, va_gc> *direct_argvec = 0;
1075   tree decltype_call = 0, call = 0;
1076   tree optype = TREE_TYPE (callop);
1077   tree fn_result = TREE_TYPE (optype);
1078 
1079   tree thisarg = build_int_cst (TREE_TYPE (DECL_ARGUMENTS (callop)), 0);
1080   if (generic_lambda_p)
1081     {
1082       ++processing_template_decl;
1083 
1084       /* Prepare the dependent member call for the static member function
1085 	 '_FUN' and, potentially, prepare another call to be used in a decltype
1086 	 return expression for a deduced return call op to allow for simple
1087 	 implementation of the conversion operator.  */
1088 
1089       tree instance = cp_build_fold_indirect_ref (thisarg);
1090       tree objfn = lookup_template_function (DECL_NAME (callop),
1091 					     DECL_TI_ARGS (callop));
1092       objfn = build_min (COMPONENT_REF, NULL_TREE,
1093 			 instance, objfn, NULL_TREE);
1094       int nargs = list_length (DECL_ARGUMENTS (callop)) - 1;
1095 
1096       call = prepare_op_call (objfn, nargs);
1097       if (type_uses_auto (fn_result))
1098 	decltype_call = prepare_op_call (objfn, nargs);
1099     }
1100   else
1101     {
1102       direct_argvec = make_tree_vector ();
1103       direct_argvec->quick_push (thisarg);
1104     }
1105 
1106   /* Copy CALLOP's argument list (as per 'copy_list') as FN_ARGS in order to
1107      declare the static member function "_FUN" below.  For each arg append to
1108      DIRECT_ARGVEC (for the non-template case) or populate the pre-allocated
1109      call args (for the template case).  If a parameter pack is found, expand
1110      it, flagging it as PACK_EXPANSION_LOCAL_P for the body call.  */
1111 
1112   tree fn_args = NULL_TREE;
1113   {
1114     int ix = 0;
1115     tree src = DECL_CHAIN (DECL_ARGUMENTS (callop));
1116     tree tgt = NULL;
1117 
1118     while (src)
1119       {
1120 	tree new_node = copy_node (src);
1121 	/* We set DECL_CONTEXT of NEW_NODE to the statfn below.
1122 	   Notice this is creating a recursive type!  */
1123 
1124 	/* Clear TREE_ADDRESSABLE on thunk arguments.  */
1125 	TREE_ADDRESSABLE (new_node) = 0;
1126 
1127 	if (!fn_args)
1128 	  fn_args = tgt = new_node;
1129 	else
1130 	  {
1131 	    TREE_CHAIN (tgt) = new_node;
1132 	    tgt = new_node;
1133 	  }
1134 
1135 	mark_exp_read (tgt);
1136 
1137 	if (generic_lambda_p)
1138 	  {
1139 	    tree a = tgt;
1140 	    if (DECL_PACK_P (tgt))
1141 	      {
1142 		a = make_pack_expansion (a);
1143 		PACK_EXPANSION_LOCAL_P (a) = true;
1144 	      }
1145 	    CALL_EXPR_ARG (call, ix) = a;
1146 
1147 	    if (decltype_call)
1148 	      {
1149 		/* Avoid capturing variables in this context.  */
1150 		++cp_unevaluated_operand;
1151 		CALL_EXPR_ARG (decltype_call, ix) = forward_parm (tgt);
1152 		--cp_unevaluated_operand;
1153 	      }
1154 
1155 	    ++ix;
1156 	  }
1157 	else
1158 	  vec_safe_push (direct_argvec, tgt);
1159 
1160 	src = TREE_CHAIN (src);
1161       }
1162   }
1163 
1164   if (generic_lambda_p)
1165     {
1166       if (decltype_call)
1167 	{
1168 	  fn_result = finish_decltype_type
1169 	    (decltype_call, /*id_expression_or_member_access_p=*/false,
1170 	     tf_warning_or_error);
1171 	}
1172     }
1173   else
1174     call = build_call_a (callop,
1175 			 direct_argvec->length (),
1176 			 direct_argvec->address ());
1177 
1178   CALL_FROM_THUNK_P (call) = 1;
1179   SET_EXPR_LOCATION (call, UNKNOWN_LOCATION);
1180 
1181   tree stattype = build_function_type (fn_result, FUNCTION_ARG_CHAIN (callop));
1182   stattype = (cp_build_type_attribute_variant
1183 	      (stattype, TYPE_ATTRIBUTES (optype)));
1184   if (flag_noexcept_type
1185       && TYPE_NOTHROW_P (TREE_TYPE (callop)))
1186     stattype = build_exception_variant (stattype, noexcept_true_spec);
1187 
1188   if (generic_lambda_p)
1189     --processing_template_decl;
1190 
1191   /* First build up the conversion op.  */
1192 
1193   tree rettype = build_pointer_type (stattype);
1194   tree name = make_conv_op_name (rettype);
1195   tree thistype = cp_build_qualified_type (type, TYPE_QUAL_CONST);
1196   tree fntype = build_method_type_directly (thistype, rettype, void_list_node);
1197   /* DR 1722: The conversion function should be noexcept.  */
1198   fntype = build_exception_variant (fntype, noexcept_true_spec);
1199   tree convfn = build_lang_decl (FUNCTION_DECL, name, fntype);
1200   SET_DECL_LANGUAGE (convfn, lang_cplusplus);
1201   tree fn = convfn;
1202   DECL_SOURCE_LOCATION (fn) = DECL_SOURCE_LOCATION (callop);
1203   SET_DECL_ALIGN (fn, MINIMUM_METHOD_BOUNDARY);
1204   grokclassfn (type, fn, NO_SPECIAL);
1205   set_linkage_according_to_type (type, fn);
1206   rest_of_decl_compilation (fn, namespace_bindings_p (), at_eof);
1207   DECL_IN_AGGR_P (fn) = 1;
1208   DECL_ARTIFICIAL (fn) = 1;
1209   DECL_NOT_REALLY_EXTERN (fn) = 1;
1210   DECL_DECLARED_INLINE_P (fn) = 1;
1211   DECL_DECLARED_CONSTEXPR_P (fn) = DECL_DECLARED_CONSTEXPR_P (callop);
1212   if (DECL_IMMEDIATE_FUNCTION_P (callop))
1213     SET_DECL_IMMEDIATE_FUNCTION_P (fn);
1214   DECL_ARGUMENTS (fn) = build_this_parm (fn, fntype, TYPE_QUAL_CONST);
1215 
1216   if (nested_def)
1217     DECL_INTERFACE_KNOWN (fn) = 1;
1218 
1219   if (generic_lambda_p)
1220     fn = add_inherited_template_parms (fn, DECL_TI_TEMPLATE (callop));
1221 
1222   add_method (type, fn, false);
1223 
1224   /* Generic thunk code fails for varargs; we'll complain in mark_used if
1225      the conversion op is used.  */
1226   if (varargs_function_p (callop))
1227     {
1228       DECL_DELETED_FN (fn) = 1;
1229       return;
1230     }
1231 
1232   /* Now build up the thunk to be returned.  */
1233 
1234   tree statfn = build_lang_decl (FUNCTION_DECL, fun_identifier, stattype);
1235   SET_DECL_LANGUAGE (statfn, lang_cplusplus);
1236   fn = statfn;
1237   DECL_SOURCE_LOCATION (fn) = DECL_SOURCE_LOCATION (callop);
1238   grokclassfn (type, fn, NO_SPECIAL);
1239   set_linkage_according_to_type (type, fn);
1240   rest_of_decl_compilation (fn, namespace_bindings_p (), at_eof);
1241   DECL_IN_AGGR_P (fn) = 1;
1242   DECL_ARTIFICIAL (fn) = 1;
1243   DECL_NOT_REALLY_EXTERN (fn) = 1;
1244   DECL_DECLARED_INLINE_P (fn) = 1;
1245   DECL_STATIC_FUNCTION_P (fn) = 1;
1246   DECL_DECLARED_CONSTEXPR_P (fn) = DECL_DECLARED_CONSTEXPR_P (callop);
1247   if (DECL_IMMEDIATE_FUNCTION_P (callop))
1248     SET_DECL_IMMEDIATE_FUNCTION_P (fn);
1249   DECL_ARGUMENTS (fn) = fn_args;
1250   for (tree arg = fn_args; arg; arg = DECL_CHAIN (arg))
1251     {
1252       /* Avoid duplicate -Wshadow warnings.  */
1253       DECL_NAME (arg) = NULL_TREE;
1254       DECL_CONTEXT (arg) = fn;
1255     }
1256   if (nested_def)
1257     DECL_INTERFACE_KNOWN (fn) = 1;
1258 
1259   if (generic_lambda_p)
1260     fn = add_inherited_template_parms (fn, DECL_TI_TEMPLATE (callop));
1261 
1262   if (flag_sanitize & SANITIZE_NULL)
1263     /* Don't UBsan this function; we're deliberately calling op() with a null
1264        object argument.  */
1265     add_no_sanitize_value (fn, SANITIZE_UNDEFINED);
1266 
1267   add_method (type, fn, false);
1268 
1269   if (nested)
1270     push_function_context ();
1271   else
1272     /* Still increment function_depth so that we don't GC in the
1273        middle of an expression.  */
1274     ++function_depth;
1275 
1276   /* Generate the body of the thunk.  */
1277 
1278   start_preparsed_function (statfn, NULL_TREE,
1279 			    SF_PRE_PARSED | SF_INCLASS_INLINE);
1280   tree body = begin_function_body ();
1281   tree compound_stmt = begin_compound_stmt (0);
1282   if (!generic_lambda_p)
1283     {
1284       set_flags_from_callee (call);
1285       if (MAYBE_CLASS_TYPE_P (TREE_TYPE (call)))
1286 	call = build_cplus_new (TREE_TYPE (call), call, tf_warning_or_error);
1287     }
1288   call = convert_from_reference (call);
1289   finish_return_stmt (call);
1290 
1291   finish_compound_stmt (compound_stmt);
1292   finish_function_body (body);
1293 
1294   fn = finish_function (/*inline_p=*/true);
1295   if (!generic_lambda_p)
1296     expand_or_defer_fn (fn);
1297 
1298   /* Generate the body of the conversion op.  */
1299 
1300   start_preparsed_function (convfn, NULL_TREE,
1301 			    SF_PRE_PARSED | SF_INCLASS_INLINE);
1302   body = begin_function_body ();
1303   compound_stmt = begin_compound_stmt (0);
1304 
1305   /* decl_needed_p needs to see that it's used.  */
1306   TREE_USED (statfn) = 1;
1307   finish_return_stmt (decay_conversion (statfn, tf_warning_or_error));
1308 
1309   finish_compound_stmt (compound_stmt);
1310   finish_function_body (body);
1311 
1312   fn = finish_function (/*inline_p=*/true);
1313   if (!generic_lambda_p)
1314     expand_or_defer_fn (fn);
1315 
1316   if (nested)
1317     pop_function_context ();
1318   else
1319     --function_depth;
1320 }
1321 
1322 /* True if FN is the static function "_FUN" that gets returned from the lambda
1323    conversion operator.  */
1324 
1325 bool
lambda_static_thunk_p(tree fn)1326 lambda_static_thunk_p (tree fn)
1327 {
1328   return (fn && TREE_CODE (fn) == FUNCTION_DECL
1329 	  && DECL_ARTIFICIAL (fn)
1330 	  && DECL_STATIC_FUNCTION_P (fn)
1331 	  && LAMBDA_TYPE_P (CP_DECL_CONTEXT (fn)));
1332 }
1333 
1334 bool
call_from_lambda_thunk_p(tree call)1335 call_from_lambda_thunk_p (tree call)
1336 {
1337   return (CALL_FROM_THUNK_P (call)
1338 	  && lambda_static_thunk_p (current_function_decl));
1339 }
1340 
1341 /* Returns true iff VAL is a lambda-related declaration which should
1342    be ignored by unqualified lookup.  */
1343 
1344 bool
is_lambda_ignored_entity(tree val)1345 is_lambda_ignored_entity (tree val)
1346 {
1347   /* Look past normal, non-VLA capture proxies.  */
1348   if (is_normal_capture_proxy (val)
1349       && !variably_modified_type_p (TREE_TYPE (val), NULL_TREE))
1350     return true;
1351 
1352   /* Always ignore lambda fields, their names are only for debugging.  */
1353   if (TREE_CODE (val) == FIELD_DECL
1354       && CLASSTYPE_LAMBDA_EXPR (DECL_CONTEXT (val)))
1355     return true;
1356 
1357   /* None of the lookups that use qualify_lookup want the op() from the
1358      lambda; they want the one from the enclosing class.  */
1359   val = OVL_FIRST (val);
1360   if (LAMBDA_FUNCTION_P (val))
1361     return true;
1362 
1363   return false;
1364 }
1365 
1366 /* Lambdas that appear in variable initializer or default argument scope
1367    get that in their mangling, so we need to record it.  We might as well
1368    use the count for function and namespace scopes as well.  */
1369 static GTY(()) tree lambda_scope;
1370 static GTY(()) int lambda_count;
1371 struct GTY(()) tree_int
1372 {
1373   tree t;
1374   int i;
1375 };
1376 static GTY(()) vec<tree_int, va_gc> *lambda_scope_stack;
1377 
1378 void
start_lambda_scope(tree decl)1379 start_lambda_scope (tree decl)
1380 {
1381   tree_int ti;
1382   gcc_assert (decl);
1383   /* Once we're inside a function, we ignore variable scope and just push
1384      the function again so that popping works properly.  */
1385   if (current_function_decl && TREE_CODE (decl) == VAR_DECL)
1386     decl = current_function_decl;
1387   ti.t = lambda_scope;
1388   ti.i = lambda_count;
1389   vec_safe_push (lambda_scope_stack, ti);
1390   if (lambda_scope != decl)
1391     {
1392       /* Don't reset the count if we're still in the same function.  */
1393       lambda_scope = decl;
1394       lambda_count = 0;
1395     }
1396 }
1397 
1398 void
record_lambda_scope(tree lambda)1399 record_lambda_scope (tree lambda)
1400 {
1401   LAMBDA_EXPR_EXTRA_SCOPE (lambda) = lambda_scope;
1402   LAMBDA_EXPR_DISCRIMINATOR (lambda) = lambda_count++;
1403   if (lambda_scope)
1404     {
1405       tree closure = LAMBDA_EXPR_CLOSURE (lambda);
1406       gcc_checking_assert (closure);
1407       maybe_attach_decl (lambda_scope, TYPE_NAME (closure));
1408     }
1409 }
1410 
1411 /* This lambda is an instantiation of a lambda in a template default argument
1412    that got no LAMBDA_EXPR_EXTRA_SCOPE, so this shouldn't either.  But we do
1413    need to use and increment the global count to avoid collisions.  */
1414 
1415 void
record_null_lambda_scope(tree lambda)1416 record_null_lambda_scope (tree lambda)
1417 {
1418   if (vec_safe_is_empty (lambda_scope_stack))
1419     record_lambda_scope (lambda);
1420   else
1421     {
1422       tree_int *p = lambda_scope_stack->begin();
1423       LAMBDA_EXPR_EXTRA_SCOPE (lambda) = p->t;
1424       LAMBDA_EXPR_DISCRIMINATOR (lambda) = p->i++;
1425     }
1426   gcc_assert (LAMBDA_EXPR_EXTRA_SCOPE (lambda) == NULL_TREE);
1427 }
1428 
1429 void
finish_lambda_scope(void)1430 finish_lambda_scope (void)
1431 {
1432   tree_int *p = &lambda_scope_stack->last ();
1433   if (lambda_scope != p->t)
1434     {
1435       lambda_scope = p->t;
1436       lambda_count = p->i;
1437     }
1438   lambda_scope_stack->pop ();
1439 }
1440 
1441 tree
start_lambda_function(tree fco,tree lambda_expr)1442 start_lambda_function (tree fco, tree lambda_expr)
1443 {
1444   /* Let the front end know that we are going to be defining this
1445      function.  */
1446   start_preparsed_function (fco,
1447 			    NULL_TREE,
1448 			    SF_PRE_PARSED | SF_INCLASS_INLINE);
1449 
1450   tree body = begin_function_body ();
1451 
1452   /* Push the proxies for any explicit captures.  */
1453   for (tree cap = LAMBDA_EXPR_CAPTURE_LIST (lambda_expr); cap;
1454        cap = TREE_CHAIN (cap))
1455     build_capture_proxy (TREE_PURPOSE (cap), TREE_VALUE (cap));
1456 
1457   return body;
1458 }
1459 
1460 /* Subroutine of prune_lambda_captures: CAP is a node in
1461    LAMBDA_EXPR_CAPTURE_LIST.  Return the variable it captures for which we
1462    might optimize away the capture, or NULL_TREE if there is no such
1463    variable.  */
1464 
1465 static tree
var_to_maybe_prune(tree cap)1466 var_to_maybe_prune (tree cap)
1467 {
1468   if (LAMBDA_CAPTURE_EXPLICIT_P (cap))
1469     /* Don't prune explicit captures.  */
1470     return NULL_TREE;
1471 
1472   tree mem = TREE_PURPOSE (cap);
1473   if (!DECL_P (mem) || !DECL_NORMAL_CAPTURE_P (mem))
1474     /* Packs and init-captures aren't captures of constant vars.  */
1475     return NULL_TREE;
1476 
1477   tree init = TREE_VALUE (cap);
1478   if (is_normal_capture_proxy (init))
1479     init = DECL_CAPTURED_VARIABLE (init);
1480   if (decl_constant_var_p (init))
1481     return init;
1482 
1483   return NULL_TREE;
1484 }
1485 
1486 /* walk_tree helper for prune_lambda_captures: Remember which capture proxies
1487    for constant variables are actually used in the lambda body.
1488 
1489    There will always be a DECL_EXPR for the capture proxy; remember it when we
1490    see it, but replace it with any other use.  */
1491 
1492 static tree
mark_const_cap_r(tree * t,int * walk_subtrees,void * data)1493 mark_const_cap_r (tree *t, int *walk_subtrees, void *data)
1494 {
1495   hash_map<tree,tree*> &const_vars = *(hash_map<tree,tree*>*)data;
1496 
1497   tree var = NULL_TREE;
1498   if (TREE_CODE (*t) == DECL_EXPR)
1499     {
1500       tree decl = DECL_EXPR_DECL (*t);
1501       if (is_constant_capture_proxy (decl))
1502 	{
1503 	  var = DECL_CAPTURED_VARIABLE (decl);
1504 	  *walk_subtrees = 0;
1505 	}
1506     }
1507   else if (is_constant_capture_proxy (*t))
1508     var = DECL_CAPTURED_VARIABLE (*t);
1509 
1510   if (var)
1511     {
1512       tree *&slot = const_vars.get_or_insert (var);
1513       if (!slot || VAR_P (*t))
1514 	slot = t;
1515     }
1516 
1517   return NULL_TREE;
1518 }
1519 
1520 /* We're at the end of processing a lambda; go back and remove any captures of
1521    constant variables for which we've folded away all uses.  */
1522 
1523 static void
prune_lambda_captures(tree body)1524 prune_lambda_captures (tree body)
1525 {
1526   tree lam = current_lambda_expr ();
1527   if (!LAMBDA_EXPR_CAPTURE_OPTIMIZED (lam))
1528     /* No uses were optimized away.  */
1529     return;
1530   if (LAMBDA_EXPR_DEFAULT_CAPTURE_MODE (lam) == CPLD_NONE)
1531     /* No default captures, and we don't prune explicit captures.  */
1532     return;
1533 
1534   hash_map<tree,tree*> const_vars;
1535 
1536   cp_walk_tree_without_duplicates (&body, mark_const_cap_r, &const_vars);
1537 
1538   tree *fieldp = &TYPE_FIELDS (LAMBDA_EXPR_CLOSURE (lam));
1539   for (tree *capp = &LAMBDA_EXPR_CAPTURE_LIST (lam); *capp; )
1540     {
1541       tree cap = *capp;
1542       if (tree var = var_to_maybe_prune (cap))
1543 	{
1544 	  tree **use = const_vars.get (var);
1545 	  if (use && TREE_CODE (**use) == DECL_EXPR)
1546 	    {
1547 	      /* All uses of this capture were folded away, leaving only the
1548 		 proxy declaration.  */
1549 
1550 	      /* Splice the capture out of LAMBDA_EXPR_CAPTURE_LIST.  */
1551 	      *capp = TREE_CHAIN (cap);
1552 
1553 	      /* And out of TYPE_FIELDS.  */
1554 	      tree field = TREE_PURPOSE (cap);
1555 	      while (*fieldp != field)
1556 		fieldp = &DECL_CHAIN (*fieldp);
1557 	      *fieldp = DECL_CHAIN (*fieldp);
1558 
1559 	      /* And remove the capture proxy declaration.  */
1560 	      **use = void_node;
1561 	      continue;
1562 	    }
1563 	}
1564 
1565       capp = &TREE_CHAIN (cap);
1566     }
1567 }
1568 
1569 void
finish_lambda_function(tree body)1570 finish_lambda_function (tree body)
1571 {
1572   finish_function_body (body);
1573 
1574   prune_lambda_captures (body);
1575 
1576   /* Finish the function and generate code for it if necessary.  */
1577   tree fn = finish_function (/*inline_p=*/true);
1578 
1579   /* Only expand if the call op is not a template.  */
1580   if (!DECL_TEMPLATE_INFO (fn))
1581     expand_or_defer_fn (fn);
1582 }
1583 
1584 #include "gt-cp-lambda.h"
1585