xref: /dragonfly/contrib/gcc-8.0/gcc/cp/lambda.c (revision 6e316fcd)
1 /* Perform the semantic phase of lambda parsing, i.e., the process of
2    building tree structure, checking semantic consistency, and
3    building RTL.  These routines are used both during actual parsing
4    and during the instantiation of template functions.
5 
6    Copyright (C) 1998-2018 Free Software Foundation, Inc.
7 
8    This file is part of GCC.
9 
10    GCC is free software; you can redistribute it and/or modify it
11    under the terms of the GNU General Public License as published by
12    the Free Software Foundation; either version 3, or (at your option)
13    any later version.
14 
15    GCC is distributed in the hope that it will be useful, but
16    WITHOUT ANY WARRANTY; without even the implied warranty of
17    MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
18    General Public License for more details.
19 
20 You should have received a copy of the GNU General Public License
21 along with GCC; see the file COPYING3.  If not see
22 <http://www.gnu.org/licenses/>.  */
23 
24 #include "config.h"
25 #include "system.h"
26 #include "coretypes.h"
27 #include "cp-tree.h"
28 #include "stringpool.h"
29 #include "cgraph.h"
30 #include "tree-iterator.h"
31 #include "toplev.h"
32 #include "gimplify.h"
33 
34 /* Constructor for a lambda expression.  */
35 
36 tree
37 build_lambda_expr (void)
38 {
39   tree lambda = make_node (LAMBDA_EXPR);
40   LAMBDA_EXPR_DEFAULT_CAPTURE_MODE (lambda) = CPLD_NONE;
41   LAMBDA_EXPR_CAPTURE_LIST         (lambda) = NULL_TREE;
42   LAMBDA_EXPR_THIS_CAPTURE         (lambda) = NULL_TREE;
43   LAMBDA_EXPR_PENDING_PROXIES      (lambda) = NULL;
44   LAMBDA_EXPR_MUTABLE_P            (lambda) = false;
45   return lambda;
46 }
47 
48 /* Create the closure object for a LAMBDA_EXPR.  */
49 
50 tree
51 build_lambda_object (tree lambda_expr)
52 {
53   /* Build aggregate constructor call.
54      - cp_parser_braced_list
55      - cp_parser_functional_cast  */
56   vec<constructor_elt, va_gc> *elts = NULL;
57   tree node, expr, type;
58   location_t saved_loc;
59 
60   if (processing_template_decl || lambda_expr == error_mark_node)
61     return lambda_expr;
62 
63   /* Make sure any error messages refer to the lambda-introducer.  */
64   saved_loc = input_location;
65   input_location = LAMBDA_EXPR_LOCATION (lambda_expr);
66 
67   for (node = LAMBDA_EXPR_CAPTURE_LIST (lambda_expr);
68        node;
69        node = TREE_CHAIN (node))
70     {
71       tree field = TREE_PURPOSE (node);
72       tree val = TREE_VALUE (node);
73 
74       if (field == error_mark_node)
75 	{
76 	  expr = error_mark_node;
77 	  goto out;
78 	}
79 
80       if (TREE_CODE (val) == TREE_LIST)
81 	val = build_x_compound_expr_from_list (val, ELK_INIT,
82 					       tf_warning_or_error);
83 
84       if (DECL_P (val))
85 	mark_used (val);
86 
87       /* Mere mortals can't copy arrays with aggregate initialization, so
88 	 do some magic to make it work here.  */
89       if (TREE_CODE (TREE_TYPE (field)) == ARRAY_TYPE)
90 	val = build_array_copy (val);
91       else if (DECL_NORMAL_CAPTURE_P (field)
92 	       && !DECL_VLA_CAPTURE_P (field)
93 	       && TREE_CODE (TREE_TYPE (field)) != REFERENCE_TYPE)
94 	{
95 	  /* "the entities that are captured by copy are used to
96 	     direct-initialize each corresponding non-static data
97 	     member of the resulting closure object."
98 
99 	     There's normally no way to express direct-initialization
100 	     from an element of a CONSTRUCTOR, so we build up a special
101 	     TARGET_EXPR to bypass the usual copy-initialization.  */
102 	  val = force_rvalue (val, tf_warning_or_error);
103 	  if (TREE_CODE (val) == TARGET_EXPR)
104 	    TARGET_EXPR_DIRECT_INIT_P (val) = true;
105 	}
106 
107       CONSTRUCTOR_APPEND_ELT (elts, DECL_NAME (field), val);
108     }
109 
110   expr = build_constructor (init_list_type_node, elts);
111   CONSTRUCTOR_IS_DIRECT_INIT (expr) = 1;
112 
113   /* N2927: "[The closure] class type is not an aggregate."
114      But we briefly treat it as an aggregate to make this simpler.  */
115   type = LAMBDA_EXPR_CLOSURE (lambda_expr);
116   CLASSTYPE_NON_AGGREGATE (type) = 0;
117   expr = finish_compound_literal (type, expr, tf_warning_or_error);
118   CLASSTYPE_NON_AGGREGATE (type) = 1;
119 
120  out:
121   input_location = saved_loc;
122   return expr;
123 }
124 
125 /* Return an initialized RECORD_TYPE for LAMBDA.
126    LAMBDA must have its explicit captures already.  */
127 
128 tree
129 begin_lambda_type (tree lambda)
130 {
131   tree type;
132 
133   {
134     /* Unique name.  This is just like an unnamed class, but we cannot use
135        make_anon_name because of certain checks against TYPE_UNNAMED_P.  */
136     tree name;
137     name = make_lambda_name ();
138 
139     /* Create the new RECORD_TYPE for this lambda.  */
140     type = xref_tag (/*tag_code=*/record_type,
141                      name,
142                      /*scope=*/ts_lambda,
143                      /*template_header_p=*/false);
144     if (type == error_mark_node)
145       return error_mark_node;
146   }
147 
148   /* Designate it as a struct so that we can use aggregate initialization.  */
149   CLASSTYPE_DECLARED_CLASS (type) = false;
150 
151   /* Cross-reference the expression and the type.  */
152   LAMBDA_EXPR_CLOSURE (lambda) = type;
153   CLASSTYPE_LAMBDA_EXPR (type) = lambda;
154 
155   /* In C++17, assume the closure is literal; we'll clear the flag later if
156      necessary.  */
157   if (cxx_dialect >= cxx17)
158     CLASSTYPE_LITERAL_P (type) = true;
159 
160   /* Clear base types.  */
161   xref_basetypes (type, /*bases=*/NULL_TREE);
162 
163   /* Start the class.  */
164   type = begin_class_definition (type);
165 
166   return type;
167 }
168 
169 /* Returns the type to use for the return type of the operator() of a
170    closure class.  */
171 
172 tree
173 lambda_return_type (tree expr)
174 {
175   if (expr == NULL_TREE)
176     return void_type_node;
177   if (type_unknown_p (expr)
178       || BRACE_ENCLOSED_INITIALIZER_P (expr))
179     {
180       cxx_incomplete_type_error (expr, TREE_TYPE (expr));
181       return error_mark_node;
182     }
183   gcc_checking_assert (!type_dependent_expression_p (expr));
184   return cv_unqualified (type_decays_to (unlowered_expr_type (expr)));
185 }
186 
187 /* Given a LAMBDA_EXPR or closure type LAMBDA, return the op() of the
188    closure type.  */
189 
190 tree
191 lambda_function (tree lambda)
192 {
193   tree type;
194   if (TREE_CODE (lambda) == LAMBDA_EXPR)
195     type = LAMBDA_EXPR_CLOSURE (lambda);
196   else
197     type = lambda;
198   gcc_assert (LAMBDA_TYPE_P (type));
199   /* Don't let debug_tree cause instantiation.  */
200   if (CLASSTYPE_TEMPLATE_INSTANTIATION (type)
201       && !COMPLETE_OR_OPEN_TYPE_P (type))
202     return NULL_TREE;
203   lambda = lookup_member (type, call_op_identifier,
204 			  /*protect=*/0, /*want_type=*/false,
205 			  tf_warning_or_error);
206   if (lambda)
207     lambda = STRIP_TEMPLATE (get_first_fn (lambda));
208   return lambda;
209 }
210 
211 /* Returns the type to use for the FIELD_DECL corresponding to the
212    capture of EXPR.  EXPLICIT_INIT_P indicates whether this is a
213    C++14 init capture, and BY_REFERENCE_P indicates whether we're
214    capturing by reference.  */
215 
216 tree
217 lambda_capture_field_type (tree expr, bool explicit_init_p,
218 			   bool by_reference_p)
219 {
220   tree type;
221   bool is_this = is_this_parameter (tree_strip_nop_conversions (expr));
222 
223   if (!is_this && type_dependent_expression_p (expr))
224     {
225       type = cxx_make_type (DECLTYPE_TYPE);
226       DECLTYPE_TYPE_EXPR (type) = expr;
227       DECLTYPE_FOR_LAMBDA_CAPTURE (type) = true;
228       DECLTYPE_FOR_INIT_CAPTURE (type) = explicit_init_p;
229       DECLTYPE_FOR_REF_CAPTURE (type) = by_reference_p;
230       SET_TYPE_STRUCTURAL_EQUALITY (type);
231     }
232   else if (!is_this && explicit_init_p)
233     {
234       tree auto_node = make_auto ();
235 
236       type = auto_node;
237       if (by_reference_p)
238 	/* Add the reference now, so deduction doesn't lose
239 	   outermost CV qualifiers of EXPR.  */
240 	type = build_reference_type (type);
241       type = do_auto_deduction (type, expr, auto_node);
242     }
243   else
244     {
245       type = non_reference (unlowered_expr_type (expr));
246 
247       if (!is_this
248 	  && (by_reference_p || TREE_CODE (type) == FUNCTION_TYPE))
249 	type = build_reference_type (type);
250     }
251 
252   return type;
253 }
254 
255 /* Returns true iff DECL is a lambda capture proxy variable created by
256    build_capture_proxy.  */
257 
258 bool
259 is_capture_proxy (tree decl)
260 {
261   return (VAR_P (decl)
262 	  && DECL_HAS_VALUE_EXPR_P (decl)
263 	  && !DECL_ANON_UNION_VAR_P (decl)
264 	  && !DECL_DECOMPOSITION_P (decl)
265 	  && !(DECL_ARTIFICIAL (decl)
266 	       && DECL_LANG_SPECIFIC (decl)
267 	       && DECL_OMP_PRIVATIZED_MEMBER (decl))
268 	  && LAMBDA_FUNCTION_P (DECL_CONTEXT (decl)));
269 }
270 
271 /* Returns true iff DECL is a capture proxy for a normal capture
272    (i.e. without explicit initializer).  */
273 
274 bool
275 is_normal_capture_proxy (tree decl)
276 {
277   if (!is_capture_proxy (decl))
278     /* It's not a capture proxy.  */
279     return false;
280 
281   if (variably_modified_type_p (TREE_TYPE (decl), NULL_TREE))
282     /* VLA capture.  */
283     return true;
284 
285   /* It is a capture proxy, is it a normal capture?  */
286   tree val = DECL_VALUE_EXPR (decl);
287   if (val == error_mark_node)
288     return true;
289 
290   if (TREE_CODE (val) == ADDR_EXPR)
291     val = TREE_OPERAND (val, 0);
292   gcc_assert (TREE_CODE (val) == COMPONENT_REF);
293   val = TREE_OPERAND (val, 1);
294   return DECL_NORMAL_CAPTURE_P (val);
295 }
296 
297 /* Returns true iff DECL is a capture proxy for a normal capture
298    of a constant variable.  */
299 
300 bool
301 is_constant_capture_proxy (tree decl)
302 {
303   if (is_normal_capture_proxy (decl))
304     return decl_constant_var_p (DECL_CAPTURED_VARIABLE (decl));
305   return false;
306 }
307 
308 /* VAR is a capture proxy created by build_capture_proxy; add it to the
309    current function, which is the operator() for the appropriate lambda.  */
310 
311 void
312 insert_capture_proxy (tree var)
313 {
314   if (is_normal_capture_proxy (var))
315     {
316       tree cap = DECL_CAPTURED_VARIABLE (var);
317       if (CHECKING_P)
318 	{
319 	  gcc_assert (!is_normal_capture_proxy (cap));
320 	  tree old = retrieve_local_specialization (cap);
321 	  if (old)
322 	    gcc_assert (DECL_CONTEXT (old) != DECL_CONTEXT (var));
323 	}
324       register_local_specialization (var, cap);
325     }
326 
327   /* Put the capture proxy in the extra body block so that it won't clash
328      with a later local variable.  */
329   pushdecl_outermost_localscope (var);
330 
331   /* And put a DECL_EXPR in the STATEMENT_LIST for the same block.  */
332   var = build_stmt (DECL_SOURCE_LOCATION (var), DECL_EXPR, var);
333   tree stmt_list = (*stmt_list_stack)[1];
334   gcc_assert (stmt_list);
335   append_to_statement_list_force (var, &stmt_list);
336 }
337 
338 /* We've just finished processing a lambda; if the containing scope is also
339    a lambda, insert any capture proxies that were created while processing
340    the nested lambda.  */
341 
342 void
343 insert_pending_capture_proxies (void)
344 {
345   tree lam;
346   vec<tree, va_gc> *proxies;
347   unsigned i;
348 
349   if (!current_function_decl || !LAMBDA_FUNCTION_P (current_function_decl))
350     return;
351 
352   lam = CLASSTYPE_LAMBDA_EXPR (DECL_CONTEXT (current_function_decl));
353   proxies = LAMBDA_EXPR_PENDING_PROXIES (lam);
354   for (i = 0; i < vec_safe_length (proxies); ++i)
355     {
356       tree var = (*proxies)[i];
357       insert_capture_proxy (var);
358     }
359   release_tree_vector (LAMBDA_EXPR_PENDING_PROXIES (lam));
360   LAMBDA_EXPR_PENDING_PROXIES (lam) = NULL;
361 }
362 
363 /* Given REF, a COMPONENT_REF designating a field in the lambda closure,
364    return the type we want the proxy to have: the type of the field itself,
365    with added const-qualification if the lambda isn't mutable and the
366    capture is by value.  */
367 
368 tree
369 lambda_proxy_type (tree ref)
370 {
371   tree type;
372   if (ref == error_mark_node)
373     return error_mark_node;
374   if (REFERENCE_REF_P (ref))
375     ref = TREE_OPERAND (ref, 0);
376   gcc_assert (TREE_CODE (ref) == COMPONENT_REF);
377   type = TREE_TYPE (ref);
378   if (!type || WILDCARD_TYPE_P (non_reference (type)))
379     {
380       type = cxx_make_type (DECLTYPE_TYPE);
381       DECLTYPE_TYPE_EXPR (type) = ref;
382       DECLTYPE_FOR_LAMBDA_PROXY (type) = true;
383       SET_TYPE_STRUCTURAL_EQUALITY (type);
384     }
385   if (DECL_PACK_P (TREE_OPERAND (ref, 1)))
386     type = make_pack_expansion (type);
387   return type;
388 }
389 
390 /* MEMBER is a capture field in a lambda closure class.  Now that we're
391    inside the operator(), build a placeholder var for future lookups and
392    debugging.  */
393 
394 static tree
395 build_capture_proxy (tree member, tree init)
396 {
397   tree var, object, fn, closure, name, lam, type;
398 
399   if (PACK_EXPANSION_P (member))
400     member = PACK_EXPANSION_PATTERN (member);
401 
402   closure = DECL_CONTEXT (member);
403   fn = lambda_function (closure);
404   lam = CLASSTYPE_LAMBDA_EXPR (closure);
405 
406   /* The proxy variable forwards to the capture field.  */
407   object = build_fold_indirect_ref (DECL_ARGUMENTS (fn));
408   object = finish_non_static_data_member (member, object, NULL_TREE);
409   if (REFERENCE_REF_P (object))
410     object = TREE_OPERAND (object, 0);
411 
412   /* Remove the __ inserted by add_capture.  */
413   name = get_identifier (IDENTIFIER_POINTER (DECL_NAME (member)) + 2);
414 
415   type = lambda_proxy_type (object);
416 
417   if (name == this_identifier && !POINTER_TYPE_P (type))
418     {
419       type = build_pointer_type (type);
420       type = cp_build_qualified_type (type, TYPE_QUAL_CONST);
421       object = build_fold_addr_expr_with_type (object, type);
422     }
423 
424   if (DECL_VLA_CAPTURE_P (member))
425     {
426       /* Rebuild the VLA type from the pointer and maxindex.  */
427       tree field = next_initializable_field (TYPE_FIELDS (type));
428       tree ptr = build_simple_component_ref (object, field);
429       field = next_initializable_field (DECL_CHAIN (field));
430       tree max = build_simple_component_ref (object, field);
431       type = build_cplus_array_type (TREE_TYPE (TREE_TYPE (ptr)),
432 				     build_index_type (max));
433       type = build_reference_type (type);
434       REFERENCE_VLA_OK (type) = true;
435       object = convert (type, ptr);
436     }
437 
438   complete_type (type);
439 
440   var = build_decl (input_location, VAR_DECL, name, type);
441   SET_DECL_VALUE_EXPR (var, object);
442   DECL_HAS_VALUE_EXPR_P (var) = 1;
443   DECL_ARTIFICIAL (var) = 1;
444   TREE_USED (var) = 1;
445   DECL_CONTEXT (var) = fn;
446 
447   if (DECL_NORMAL_CAPTURE_P (member))
448     {
449       if (DECL_VLA_CAPTURE_P (member))
450 	{
451 	  init = CONSTRUCTOR_ELT (init, 0)->value;
452 	  init = TREE_OPERAND (init, 0); // Strip ADDR_EXPR.
453 	  init = TREE_OPERAND (init, 0); // Strip ARRAY_REF.
454 	}
455       else
456 	{
457 	  if (PACK_EXPANSION_P (init))
458 	    init = PACK_EXPANSION_PATTERN (init);
459 	}
460 
461       if (INDIRECT_REF_P (init))
462 	init = TREE_OPERAND (init, 0);
463       STRIP_NOPS (init);
464 
465       gcc_assert (VAR_P (init) || TREE_CODE (init) == PARM_DECL);
466       while (is_normal_capture_proxy (init))
467 	init = DECL_CAPTURED_VARIABLE (init);
468       retrofit_lang_decl (var);
469       DECL_CAPTURED_VARIABLE (var) = init;
470     }
471 
472   if (name == this_identifier)
473     {
474       gcc_assert (LAMBDA_EXPR_THIS_CAPTURE (lam) == member);
475       LAMBDA_EXPR_THIS_CAPTURE (lam) = var;
476     }
477 
478   if (fn == current_function_decl)
479     insert_capture_proxy (var);
480   else
481     vec_safe_push (LAMBDA_EXPR_PENDING_PROXIES (lam), var);
482 
483   return var;
484 }
485 
486 static GTY(()) tree ptr_id;
487 static GTY(()) tree max_id;
488 
489 /* Return a struct containing a pointer and a length for lambda capture of
490    an array of runtime length.  */
491 
492 static tree
493 vla_capture_type (tree array_type)
494 {
495   tree type = xref_tag (record_type, make_anon_name (), ts_current, false);
496   xref_basetypes (type, NULL_TREE);
497   type = begin_class_definition (type);
498   if (!ptr_id)
499     {
500       ptr_id = get_identifier ("ptr");
501       max_id = get_identifier ("max");
502     }
503   tree ptrtype = build_pointer_type (TREE_TYPE (array_type));
504   tree field = build_decl (input_location, FIELD_DECL, ptr_id, ptrtype);
505   finish_member_declaration (field);
506   field = build_decl (input_location, FIELD_DECL, max_id, sizetype);
507   finish_member_declaration (field);
508   return finish_struct (type, NULL_TREE);
509 }
510 
511 /* From an ID and INITIALIZER, create a capture (by reference if
512    BY_REFERENCE_P is true), add it to the capture-list for LAMBDA,
513    and return it.  If ID is `this', BY_REFERENCE_P says whether
514    `*this' is captured by reference.  */
515 
516 tree
517 add_capture (tree lambda, tree id, tree orig_init, bool by_reference_p,
518 	     bool explicit_init_p)
519 {
520   char *buf;
521   tree type, member, name;
522   bool vla = false;
523   bool variadic = false;
524   tree initializer = orig_init;
525 
526   if (PACK_EXPANSION_P (initializer))
527     {
528       initializer = PACK_EXPANSION_PATTERN (initializer);
529       variadic = true;
530     }
531 
532   if (TREE_CODE (initializer) == TREE_LIST
533       /* A pack expansion might end up with multiple elements.  */
534       && !PACK_EXPANSION_P (TREE_VALUE (initializer)))
535     initializer = build_x_compound_expr_from_list (initializer, ELK_INIT,
536 						   tf_warning_or_error);
537   type = TREE_TYPE (initializer);
538   if (type == error_mark_node)
539     return error_mark_node;
540 
541   if (array_of_runtime_bound_p (type))
542     {
543       vla = true;
544       if (!by_reference_p)
545 	error ("array of runtime bound cannot be captured by copy, "
546 	       "only by reference");
547 
548       /* For a VLA, we capture the address of the first element and the
549 	 maximum index, and then reconstruct the VLA for the proxy.  */
550       tree elt = cp_build_array_ref (input_location, initializer,
551 				     integer_zero_node, tf_warning_or_error);
552       initializer = build_constructor_va (init_list_type_node, 2,
553 					  NULL_TREE, build_address (elt),
554 					  NULL_TREE, array_type_nelts (type));
555       type = vla_capture_type (type);
556     }
557   else if (!dependent_type_p (type)
558 	   && variably_modified_type_p (type, NULL_TREE))
559     {
560       sorry ("capture of variably-modified type %qT that is not an N3639 array "
561 	     "of runtime bound", type);
562       if (TREE_CODE (type) == ARRAY_TYPE
563 	  && variably_modified_type_p (TREE_TYPE (type), NULL_TREE))
564 	inform (input_location, "because the array element type %qT has "
565 		"variable size", TREE_TYPE (type));
566       return error_mark_node;
567     }
568   else
569     {
570       type = lambda_capture_field_type (initializer, explicit_init_p,
571 					by_reference_p);
572       if (type == error_mark_node)
573 	return error_mark_node;
574 
575       if (id == this_identifier && !by_reference_p)
576 	{
577 	  gcc_assert (POINTER_TYPE_P (type));
578 	  type = TREE_TYPE (type);
579 	  initializer = cp_build_fold_indirect_ref (initializer);
580 	}
581 
582       if (dependent_type_p (type))
583 	;
584       else if (id != this_identifier && by_reference_p)
585 	{
586 	  if (!lvalue_p (initializer))
587 	    {
588 	      error ("cannot capture %qE by reference", initializer);
589 	      return error_mark_node;
590 	    }
591 	}
592       else
593 	{
594 	  /* Capture by copy requires a complete type.  */
595 	  type = complete_type (type);
596 	  if (!COMPLETE_TYPE_P (type))
597 	    {
598 	      error ("capture by copy of incomplete type %qT", type);
599 	      cxx_incomplete_type_inform (type);
600 	      return error_mark_node;
601 	    }
602 	}
603     }
604 
605   /* Add __ to the beginning of the field name so that user code
606      won't find the field with name lookup.  We can't just leave the name
607      unset because template instantiation uses the name to find
608      instantiated fields.  */
609   buf = (char *) alloca (IDENTIFIER_LENGTH (id) + 3);
610   buf[1] = buf[0] = '_';
611   memcpy (buf + 2, IDENTIFIER_POINTER (id),
612 	  IDENTIFIER_LENGTH (id) + 1);
613   name = get_identifier (buf);
614 
615   /* If TREE_TYPE isn't set, we're still in the introducer, so check
616      for duplicates.  */
617   if (!LAMBDA_EXPR_CLOSURE (lambda))
618     {
619       if (IDENTIFIER_MARKED (name))
620 	{
621 	  pedwarn (input_location, 0,
622 		   "already captured %qD in lambda expression", id);
623 	  return NULL_TREE;
624 	}
625       IDENTIFIER_MARKED (name) = true;
626     }
627 
628   if (variadic)
629     type = make_pack_expansion (type);
630 
631   /* Make member variable.  */
632   member = build_decl (input_location, FIELD_DECL, name, type);
633   DECL_VLA_CAPTURE_P (member) = vla;
634 
635   if (!explicit_init_p)
636     /* Normal captures are invisible to name lookup but uses are replaced
637        with references to the capture field; we implement this by only
638        really making them invisible in unevaluated context; see
639        qualify_lookup.  For now, let's make explicitly initialized captures
640        always visible.  */
641     DECL_NORMAL_CAPTURE_P (member) = true;
642 
643   if (id == this_identifier)
644     LAMBDA_EXPR_THIS_CAPTURE (lambda) = member;
645 
646   /* Add it to the appropriate closure class if we've started it.  */
647   if (current_class_type
648       && current_class_type == LAMBDA_EXPR_CLOSURE (lambda))
649     {
650       if (COMPLETE_TYPE_P (current_class_type))
651 	internal_error ("trying to capture %qD in instantiation of "
652 			"generic lambda", id);
653       finish_member_declaration (member);
654     }
655 
656   tree listmem = member;
657   if (variadic)
658     {
659       listmem = make_pack_expansion (member);
660       initializer = orig_init;
661     }
662   LAMBDA_EXPR_CAPTURE_LIST (lambda)
663     = tree_cons (listmem, initializer, LAMBDA_EXPR_CAPTURE_LIST (lambda));
664 
665   if (LAMBDA_EXPR_CLOSURE (lambda))
666     return build_capture_proxy (member, initializer);
667   /* For explicit captures we haven't started the function yet, so we wait
668      and build the proxy from cp_parser_lambda_body.  */
669   LAMBDA_CAPTURE_EXPLICIT_P (LAMBDA_EXPR_CAPTURE_LIST (lambda)) = true;
670   return NULL_TREE;
671 }
672 
673 /* Register all the capture members on the list CAPTURES, which is the
674    LAMBDA_EXPR_CAPTURE_LIST for the lambda after the introducer.  */
675 
676 void
677 register_capture_members (tree captures)
678 {
679   if (captures == NULL_TREE)
680     return;
681 
682   register_capture_members (TREE_CHAIN (captures));
683 
684   tree field = TREE_PURPOSE (captures);
685   if (PACK_EXPANSION_P (field))
686     field = PACK_EXPANSION_PATTERN (field);
687 
688   /* We set this in add_capture to avoid duplicates.  */
689   IDENTIFIER_MARKED (DECL_NAME (field)) = false;
690   finish_member_declaration (field);
691 }
692 
693 /* Similar to add_capture, except this works on a stack of nested lambdas.
694    BY_REFERENCE_P in this case is derived from the default capture mode.
695    Returns the capture for the lambda at the bottom of the stack.  */
696 
697 tree
698 add_default_capture (tree lambda_stack, tree id, tree initializer)
699 {
700   bool this_capture_p = (id == this_identifier);
701 
702   tree var = NULL_TREE;
703 
704   tree saved_class_type = current_class_type;
705 
706   tree node;
707 
708   for (node = lambda_stack;
709        node;
710        node = TREE_CHAIN (node))
711     {
712       tree lambda = TREE_VALUE (node);
713 
714       current_class_type = LAMBDA_EXPR_CLOSURE (lambda);
715       if (DECL_PACK_P (initializer))
716 	initializer = make_pack_expansion (initializer);
717       var = add_capture (lambda,
718                             id,
719                             initializer,
720                             /*by_reference_p=*/
721 			    (this_capture_p
722 			     || (LAMBDA_EXPR_DEFAULT_CAPTURE_MODE (lambda)
723 				 == CPLD_REFERENCE)),
724 			    /*explicit_init_p=*/false);
725       initializer = convert_from_reference (var);
726     }
727 
728   current_class_type = saved_class_type;
729 
730   return var;
731 }
732 
733 /* Return the capture pertaining to a use of 'this' in LAMBDA, in the
734    form of an INDIRECT_REF, possibly adding it through default
735    capturing, if ADD_CAPTURE_P is true.  */
736 
737 tree
738 lambda_expr_this_capture (tree lambda, bool add_capture_p)
739 {
740   tree result;
741 
742   tree this_capture = LAMBDA_EXPR_THIS_CAPTURE (lambda);
743 
744   /* In unevaluated context this isn't an odr-use, so don't capture.  */
745   if (cp_unevaluated_operand)
746     add_capture_p = false;
747 
748   /* Try to default capture 'this' if we can.  */
749   if (!this_capture
750       && (!add_capture_p
751           || LAMBDA_EXPR_DEFAULT_CAPTURE_MODE (lambda) != CPLD_NONE))
752     {
753       tree lambda_stack = NULL_TREE;
754       tree init = NULL_TREE;
755 
756       /* If we are in a lambda function, we can move out until we hit:
757            1. a non-lambda function or NSDMI,
758            2. a lambda function capturing 'this', or
759            3. a non-default capturing lambda function.  */
760       for (tree tlambda = lambda; ;)
761 	{
762           lambda_stack = tree_cons (NULL_TREE,
763                                     tlambda,
764                                     lambda_stack);
765 
766 	  tree closure = LAMBDA_EXPR_CLOSURE (tlambda);
767 	  tree containing_function
768 	    = decl_function_context (TYPE_NAME (closure));
769 
770 	  tree ex = LAMBDA_EXPR_EXTRA_SCOPE (tlambda);
771 	  if (ex && TREE_CODE (ex) == FIELD_DECL)
772 	    {
773 	      /* Lambda in an NSDMI.  We don't have a function to look up
774 		 'this' in, but we can find (or rebuild) the fake one from
775 		 inject_this_parameter.  */
776 	      if (!containing_function && !COMPLETE_TYPE_P (closure))
777 		/* If we're parsing a lambda in a non-local class,
778 		   we can find the fake 'this' in scope_chain.  */
779 		init = scope_chain->x_current_class_ptr;
780 	      else
781 		/* Otherwise it's either gone or buried in
782 		   function_context_stack, so make another.  */
783 		init = build_this_parm (NULL_TREE, DECL_CONTEXT (ex),
784 					TYPE_UNQUALIFIED);
785 	      gcc_checking_assert
786 		(init && (TREE_TYPE (TREE_TYPE (init))
787 			  == current_nonlambda_class_type ()));
788 	      break;
789 	    }
790 
791 	  if (containing_function == NULL_TREE)
792 	    /* We ran out of scopes; there's no 'this' to capture.  */
793 	    break;
794 
795 	  if (!LAMBDA_FUNCTION_P (containing_function))
796 	    {
797 	      /* We found a non-lambda function.  */
798 	      if (DECL_NONSTATIC_MEMBER_FUNCTION_P (containing_function))
799 		/* First parameter is 'this'.  */
800 		init = DECL_ARGUMENTS (containing_function);
801 	      break;
802 	    }
803 
804 	  tlambda
805             = CLASSTYPE_LAMBDA_EXPR (DECL_CONTEXT (containing_function));
806 
807           if (LAMBDA_EXPR_THIS_CAPTURE (tlambda))
808 	    {
809 	      /* An outer lambda has already captured 'this'.  */
810 	      init = LAMBDA_EXPR_THIS_CAPTURE (tlambda);
811 	      break;
812 	    }
813 
814 	  if (LAMBDA_EXPR_DEFAULT_CAPTURE_MODE (tlambda) == CPLD_NONE)
815 	    /* An outer lambda won't let us capture 'this'.  */
816 	    break;
817 	}
818 
819       if (init)
820         {
821           if (add_capture_p)
822 	    this_capture = add_default_capture (lambda_stack,
823 					        /*id=*/this_identifier,
824 					        init);
825           else
826 	    this_capture = init;
827         }
828     }
829 
830   if (cp_unevaluated_operand)
831     result = this_capture;
832   else if (!this_capture)
833     {
834       if (add_capture_p)
835 	{
836 	  error ("%<this%> was not captured for this lambda function");
837 	  result = error_mark_node;
838 	}
839       else
840 	result = NULL_TREE;
841     }
842   else
843     {
844       /* To make sure that current_class_ref is for the lambda.  */
845       gcc_assert (TYPE_MAIN_VARIANT (TREE_TYPE (current_class_ref))
846 		  == LAMBDA_EXPR_CLOSURE (lambda));
847 
848       result = this_capture;
849 
850       /* If 'this' is captured, each use of 'this' is transformed into an
851 	 access to the corresponding unnamed data member of the closure
852 	 type cast (_expr.cast_ 5.4) to the type of 'this'. [ The cast
853 	 ensures that the transformed expression is an rvalue. ] */
854       result = rvalue (result);
855     }
856 
857   return result;
858 }
859 
860 /* Return the innermost LAMBDA_EXPR we're currently in, if any.  */
861 
862 tree
863 current_lambda_expr (void)
864 {
865   tree type = current_class_type;
866   while (type && !LAMBDA_TYPE_P (type))
867     type = decl_type_context (TYPE_NAME (type));
868   if (type)
869     return CLASSTYPE_LAMBDA_EXPR (type);
870   else
871     return NULL_TREE;
872 }
873 
874 /* Return the current LAMBDA_EXPR, if this is a resolvable dummy
875    object.  NULL otherwise..  */
876 
877 static tree
878 resolvable_dummy_lambda (tree object)
879 {
880   if (!is_dummy_object (object))
881     return NULL_TREE;
882 
883   tree type = TYPE_MAIN_VARIANT (TREE_TYPE (object));
884   gcc_assert (!TYPE_PTR_P (type));
885 
886   if (type != current_class_type
887       && current_class_type
888       && LAMBDA_TYPE_P (current_class_type)
889       && lambda_function (current_class_type)
890       && DERIVED_FROM_P (type, nonlambda_method_basetype()))
891     return CLASSTYPE_LAMBDA_EXPR (current_class_type);
892 
893   return NULL_TREE;
894 }
895 
896 /* We don't want to capture 'this' until we know we need it, i.e. after
897    overload resolution has chosen a non-static member function.  At that
898    point we call this function to turn a dummy object into a use of the
899    'this' capture.  */
900 
901 tree
902 maybe_resolve_dummy (tree object, bool add_capture_p)
903 {
904   if (tree lam = resolvable_dummy_lambda (object))
905     if (tree cap = lambda_expr_this_capture (lam, add_capture_p))
906       if (cap != error_mark_node)
907 	object = build_fold_indirect_ref (cap);
908 
909   return object;
910 }
911 
912 /* When parsing a generic lambda containing an argument-dependent
913    member function call we defer overload resolution to instantiation
914    time.  But we have to know now whether to capture this or not.
915    Do that if FNS contains any non-static fns.
916    The std doesn't anticipate this case, but I expect this to be the
917    outcome of discussion.  */
918 
919 void
920 maybe_generic_this_capture (tree object, tree fns)
921 {
922   if (tree lam = resolvable_dummy_lambda (object))
923     if (!LAMBDA_EXPR_THIS_CAPTURE (lam))
924       {
925 	/* We've not yet captured, so look at the function set of
926 	   interest.  */
927 	if (BASELINK_P (fns))
928 	  fns = BASELINK_FUNCTIONS (fns);
929 	bool id_expr = TREE_CODE (fns) == TEMPLATE_ID_EXPR;
930 	if (id_expr)
931 	  fns = TREE_OPERAND (fns, 0);
932 
933 	for (lkp_iterator iter (fns); iter; ++iter)
934 	  if ((!id_expr || TREE_CODE (*iter) == TEMPLATE_DECL)
935 	      && DECL_NONSTATIC_MEMBER_FUNCTION_P (*iter))
936 	    {
937 	      /* Found a non-static member.  Capture this.  */
938 	      lambda_expr_this_capture (lam, true);
939 	      break;
940 	    }
941       }
942 }
943 
944 /* Returns the innermost non-lambda function.  */
945 
946 tree
947 current_nonlambda_function (void)
948 {
949   tree fn = current_function_decl;
950   while (fn && LAMBDA_FUNCTION_P (fn))
951     fn = decl_function_context (fn);
952   return fn;
953 }
954 
955 /* Returns the method basetype of the innermost non-lambda function, including
956    a hypothetical constructor if inside an NSDMI, or NULL_TREE if none.  */
957 
958 tree
959 nonlambda_method_basetype (void)
960 {
961   if (!current_class_ref)
962     return NULL_TREE;
963 
964   tree type = current_class_type;
965   if (!type || !LAMBDA_TYPE_P (type))
966     return type;
967 
968   while (true)
969     {
970       tree lam = CLASSTYPE_LAMBDA_EXPR (type);
971       tree ex = LAMBDA_EXPR_EXTRA_SCOPE (lam);
972       if (ex && TREE_CODE (ex) == FIELD_DECL)
973 	/* Lambda in an NSDMI.  */
974 	return DECL_CONTEXT (ex);
975 
976       tree fn = TYPE_CONTEXT (type);
977       if (!fn || TREE_CODE (fn) != FUNCTION_DECL
978 	  || !DECL_NONSTATIC_MEMBER_FUNCTION_P (fn))
979 	/* No enclosing non-lambda method.  */
980 	return NULL_TREE;
981       if (!LAMBDA_FUNCTION_P (fn))
982 	/* Found an enclosing non-lambda method.  */
983 	return TYPE_METHOD_BASETYPE (TREE_TYPE (fn));
984       type = DECL_CONTEXT (fn);
985     }
986 }
987 
988 /* Like current_scope, but looking through lambdas.  */
989 
990 tree
991 current_nonlambda_scope (void)
992 {
993   tree scope = current_scope ();
994   for (;;)
995     {
996       if (TREE_CODE (scope) == FUNCTION_DECL
997 	  && LAMBDA_FUNCTION_P (scope))
998 	{
999 	  scope = CP_TYPE_CONTEXT (DECL_CONTEXT (scope));
1000 	  continue;
1001 	}
1002       else if (LAMBDA_TYPE_P (scope))
1003 	{
1004 	  scope = CP_TYPE_CONTEXT (scope);
1005 	  continue;
1006 	}
1007       break;
1008     }
1009   return scope;
1010 }
1011 
1012 /* Helper function for maybe_add_lambda_conv_op; build a CALL_EXPR with
1013    indicated FN and NARGS, but do not initialize the return type or any of the
1014    argument slots.  */
1015 
1016 static tree
1017 prepare_op_call (tree fn, int nargs)
1018 {
1019   tree t;
1020 
1021   t = build_vl_exp (CALL_EXPR, nargs + 3);
1022   CALL_EXPR_FN (t) = fn;
1023   CALL_EXPR_STATIC_CHAIN (t) = NULL;
1024 
1025   return t;
1026 }
1027 
1028 /* Return true iff CALLOP is the op() for a generic lambda.  */
1029 
1030 bool
1031 generic_lambda_fn_p (tree callop)
1032 {
1033   return (LAMBDA_FUNCTION_P (callop)
1034 	  && DECL_TEMPLATE_INFO (callop)
1035 	  && PRIMARY_TEMPLATE_P (DECL_TI_TEMPLATE (callop)));
1036 }
1037 
1038 /* If the closure TYPE has a static op(), also add a conversion to function
1039    pointer.  */
1040 
1041 void
1042 maybe_add_lambda_conv_op (tree type)
1043 {
1044   bool nested = (cfun != NULL);
1045   bool nested_def = decl_function_context (TYPE_MAIN_DECL (type));
1046   tree callop = lambda_function (type);
1047   tree lam = CLASSTYPE_LAMBDA_EXPR (type);
1048 
1049   if (LAMBDA_EXPR_CAPTURE_LIST (lam) != NULL_TREE
1050       || LAMBDA_EXPR_DEFAULT_CAPTURE_MODE (lam) != CPLD_NONE)
1051     return;
1052 
1053   if (processing_template_decl)
1054     return;
1055 
1056   bool const generic_lambda_p = generic_lambda_fn_p (callop);
1057 
1058   if (!generic_lambda_p && DECL_INITIAL (callop) == NULL_TREE)
1059     {
1060       /* If the op() wasn't instantiated due to errors, give up.  */
1061       gcc_assert (errorcount || sorrycount);
1062       return;
1063     }
1064 
1065   /* Non-template conversion operators are defined directly with build_call_a
1066      and using DIRECT_ARGVEC for arguments (including 'this').  Templates are
1067      deferred and the CALL is built in-place.  In the case of a deduced return
1068      call op, the decltype expression, DECLTYPE_CALL, used as a substitute for
1069      the return type is also built in-place.  The arguments of DECLTYPE_CALL in
1070      the return expression may differ in flags from those in the body CALL.  In
1071      particular, parameter pack expansions are marked PACK_EXPANSION_LOCAL_P in
1072      the body CALL, but not in DECLTYPE_CALL.  */
1073 
1074   vec<tree, va_gc> *direct_argvec = 0;
1075   tree decltype_call = 0, call = 0;
1076   tree optype = TREE_TYPE (callop);
1077   tree fn_result = TREE_TYPE (optype);
1078 
1079   tree thisarg = build_nop (TREE_TYPE (DECL_ARGUMENTS (callop)),
1080 			    null_pointer_node);
1081   if (generic_lambda_p)
1082     {
1083       ++processing_template_decl;
1084 
1085       /* Prepare the dependent member call for the static member function
1086 	 '_FUN' and, potentially, prepare another call to be used in a decltype
1087 	 return expression for a deduced return call op to allow for simple
1088 	 implementation of the conversion operator.  */
1089 
1090       tree instance = cp_build_fold_indirect_ref (thisarg);
1091       tree objfn = build_min (COMPONENT_REF, NULL_TREE,
1092 			      instance, DECL_NAME (callop), NULL_TREE);
1093       int nargs = list_length (DECL_ARGUMENTS (callop)) - 1;
1094 
1095       call = prepare_op_call (objfn, nargs);
1096       if (type_uses_auto (fn_result))
1097 	decltype_call = prepare_op_call (objfn, nargs);
1098     }
1099   else
1100     {
1101       direct_argvec = make_tree_vector ();
1102       direct_argvec->quick_push (thisarg);
1103     }
1104 
1105   /* Copy CALLOP's argument list (as per 'copy_list') as FN_ARGS in order to
1106      declare the static member function "_FUN" below.  For each arg append to
1107      DIRECT_ARGVEC (for the non-template case) or populate the pre-allocated
1108      call args (for the template case).  If a parameter pack is found, expand
1109      it, flagging it as PACK_EXPANSION_LOCAL_P for the body call.  */
1110 
1111   tree fn_args = NULL_TREE;
1112   {
1113     int ix = 0;
1114     tree src = DECL_CHAIN (DECL_ARGUMENTS (callop));
1115     tree tgt = NULL;
1116 
1117     while (src)
1118       {
1119 	tree new_node = copy_node (src);
1120 
1121 	/* Clear TREE_ADDRESSABLE on thunk arguments.  */
1122 	TREE_ADDRESSABLE (new_node) = 0;
1123 
1124 	if (!fn_args)
1125 	  fn_args = tgt = new_node;
1126 	else
1127 	  {
1128 	    TREE_CHAIN (tgt) = new_node;
1129 	    tgt = new_node;
1130 	  }
1131 
1132 	mark_exp_read (tgt);
1133 
1134 	if (generic_lambda_p)
1135 	  {
1136 	    /* Avoid capturing variables in this context.  */
1137 	    ++cp_unevaluated_operand;
1138 	    tree a = forward_parm (tgt);
1139 	    --cp_unevaluated_operand;
1140 
1141 	    CALL_EXPR_ARG (call, ix) = a;
1142 	    if (decltype_call)
1143 	      CALL_EXPR_ARG (decltype_call, ix) = unshare_expr (a);
1144 
1145 	    if (PACK_EXPANSION_P (a))
1146 	      /* Set this after unsharing so it's not in decltype_call.  */
1147 	      PACK_EXPANSION_LOCAL_P (a) = true;
1148 
1149 	    ++ix;
1150 	  }
1151 	else
1152 	  vec_safe_push (direct_argvec, tgt);
1153 
1154 	src = TREE_CHAIN (src);
1155       }
1156   }
1157 
1158   if (generic_lambda_p)
1159     {
1160       if (decltype_call)
1161 	{
1162 	  fn_result = finish_decltype_type
1163 	    (decltype_call, /*id_expression_or_member_access_p=*/false,
1164 	     tf_warning_or_error);
1165 	}
1166     }
1167   else
1168     call = build_call_a (callop,
1169 			 direct_argvec->length (),
1170 			 direct_argvec->address ());
1171 
1172   CALL_FROM_THUNK_P (call) = 1;
1173   SET_EXPR_LOCATION (call, UNKNOWN_LOCATION);
1174 
1175   tree stattype = build_function_type (fn_result, FUNCTION_ARG_CHAIN (callop));
1176   stattype = (cp_build_type_attribute_variant
1177 	      (stattype, TYPE_ATTRIBUTES (optype)));
1178   if (flag_noexcept_type
1179       && TYPE_NOTHROW_P (TREE_TYPE (callop)))
1180     stattype = build_exception_variant (stattype, noexcept_true_spec);
1181 
1182   if (generic_lambda_p)
1183     --processing_template_decl;
1184 
1185   /* First build up the conversion op.  */
1186 
1187   tree rettype = build_pointer_type (stattype);
1188   tree name = make_conv_op_name (rettype);
1189   tree thistype = cp_build_qualified_type (type, TYPE_QUAL_CONST);
1190   tree fntype = build_method_type_directly (thistype, rettype, void_list_node);
1191   tree convfn = build_lang_decl (FUNCTION_DECL, name, fntype);
1192   SET_DECL_LANGUAGE (convfn, lang_cplusplus);
1193   tree fn = convfn;
1194   DECL_SOURCE_LOCATION (fn) = DECL_SOURCE_LOCATION (callop);
1195   SET_DECL_ALIGN (fn, MINIMUM_METHOD_BOUNDARY);
1196   grokclassfn (type, fn, NO_SPECIAL);
1197   set_linkage_according_to_type (type, fn);
1198   rest_of_decl_compilation (fn, namespace_bindings_p (), at_eof);
1199   DECL_IN_AGGR_P (fn) = 1;
1200   DECL_ARTIFICIAL (fn) = 1;
1201   DECL_NOT_REALLY_EXTERN (fn) = 1;
1202   DECL_DECLARED_INLINE_P (fn) = 1;
1203   DECL_ARGUMENTS (fn) = build_this_parm (fn, fntype, TYPE_QUAL_CONST);
1204 
1205   if (nested_def)
1206     DECL_INTERFACE_KNOWN (fn) = 1;
1207 
1208   if (generic_lambda_p)
1209     fn = add_inherited_template_parms (fn, DECL_TI_TEMPLATE (callop));
1210 
1211   add_method (type, fn, false);
1212 
1213   /* Generic thunk code fails for varargs; we'll complain in mark_used if
1214      the conversion op is used.  */
1215   if (varargs_function_p (callop))
1216     {
1217       DECL_DELETED_FN (fn) = 1;
1218       return;
1219     }
1220 
1221   /* Now build up the thunk to be returned.  */
1222 
1223   name = get_identifier ("_FUN");
1224   tree statfn = build_lang_decl (FUNCTION_DECL, name, stattype);
1225   SET_DECL_LANGUAGE (statfn, lang_cplusplus);
1226   fn = statfn;
1227   DECL_SOURCE_LOCATION (fn) = DECL_SOURCE_LOCATION (callop);
1228   grokclassfn (type, fn, NO_SPECIAL);
1229   set_linkage_according_to_type (type, fn);
1230   rest_of_decl_compilation (fn, namespace_bindings_p (), at_eof);
1231   DECL_IN_AGGR_P (fn) = 1;
1232   DECL_ARTIFICIAL (fn) = 1;
1233   DECL_NOT_REALLY_EXTERN (fn) = 1;
1234   DECL_DECLARED_INLINE_P (fn) = 1;
1235   DECL_STATIC_FUNCTION_P (fn) = 1;
1236   DECL_ARGUMENTS (fn) = fn_args;
1237   for (tree arg = fn_args; arg; arg = DECL_CHAIN (arg))
1238     {
1239       /* Avoid duplicate -Wshadow warnings.  */
1240       DECL_NAME (arg) = NULL_TREE;
1241       DECL_CONTEXT (arg) = fn;
1242     }
1243   if (nested_def)
1244     DECL_INTERFACE_KNOWN (fn) = 1;
1245 
1246   if (generic_lambda_p)
1247     fn = add_inherited_template_parms (fn, DECL_TI_TEMPLATE (callop));
1248 
1249   if (flag_sanitize & SANITIZE_NULL)
1250     /* Don't UBsan this function; we're deliberately calling op() with a null
1251        object argument.  */
1252     add_no_sanitize_value (fn, SANITIZE_UNDEFINED);
1253 
1254   add_method (type, fn, false);
1255 
1256   if (nested)
1257     push_function_context ();
1258   else
1259     /* Still increment function_depth so that we don't GC in the
1260        middle of an expression.  */
1261     ++function_depth;
1262 
1263   /* Generate the body of the thunk.  */
1264 
1265   start_preparsed_function (statfn, NULL_TREE,
1266 			    SF_PRE_PARSED | SF_INCLASS_INLINE);
1267   if (DECL_ONE_ONLY (statfn))
1268     {
1269       /* Put the thunk in the same comdat group as the call op.  */
1270       cgraph_node::get_create (statfn)->add_to_same_comdat_group
1271 	(cgraph_node::get_create (callop));
1272     }
1273   tree body = begin_function_body ();
1274   tree compound_stmt = begin_compound_stmt (0);
1275   if (!generic_lambda_p)
1276     {
1277       set_flags_from_callee (call);
1278       if (MAYBE_CLASS_TYPE_P (TREE_TYPE (call)))
1279 	call = build_cplus_new (TREE_TYPE (call), call, tf_warning_or_error);
1280     }
1281   call = convert_from_reference (call);
1282   finish_return_stmt (call);
1283 
1284   finish_compound_stmt (compound_stmt);
1285   finish_function_body (body);
1286 
1287   fn = finish_function (/*inline_p=*/true);
1288   if (!generic_lambda_p)
1289     expand_or_defer_fn (fn);
1290 
1291   /* Generate the body of the conversion op.  */
1292 
1293   start_preparsed_function (convfn, NULL_TREE,
1294 			    SF_PRE_PARSED | SF_INCLASS_INLINE);
1295   body = begin_function_body ();
1296   compound_stmt = begin_compound_stmt (0);
1297 
1298   /* decl_needed_p needs to see that it's used.  */
1299   TREE_USED (statfn) = 1;
1300   finish_return_stmt (decay_conversion (statfn, tf_warning_or_error));
1301 
1302   finish_compound_stmt (compound_stmt);
1303   finish_function_body (body);
1304 
1305   fn = finish_function (/*inline_p=*/true);
1306   if (!generic_lambda_p)
1307     expand_or_defer_fn (fn);
1308 
1309   if (nested)
1310     pop_function_context ();
1311   else
1312     --function_depth;
1313 }
1314 
1315 /* True if FN is the static function "_FUN" that gets returned from the lambda
1316    conversion operator.  */
1317 
1318 bool
1319 lambda_static_thunk_p (tree fn)
1320 {
1321   return (fn && TREE_CODE (fn) == FUNCTION_DECL
1322 	  && DECL_ARTIFICIAL (fn)
1323 	  && DECL_STATIC_FUNCTION_P (fn)
1324 	  && LAMBDA_TYPE_P (CP_DECL_CONTEXT (fn)));
1325 }
1326 
1327 /* Returns true iff VAL is a lambda-related declaration which should
1328    be ignored by unqualified lookup.  */
1329 
1330 bool
1331 is_lambda_ignored_entity (tree val)
1332 {
1333   /* Look past normal capture proxies.  */
1334   if (is_normal_capture_proxy (val))
1335     return true;
1336 
1337   /* Always ignore lambda fields, their names are only for debugging.  */
1338   if (TREE_CODE (val) == FIELD_DECL
1339       && CLASSTYPE_LAMBDA_EXPR (DECL_CONTEXT (val)))
1340     return true;
1341 
1342   /* None of the lookups that use qualify_lookup want the op() from the
1343      lambda; they want the one from the enclosing class.  */
1344   if (TREE_CODE (val) == FUNCTION_DECL && LAMBDA_FUNCTION_P (val))
1345     return true;
1346 
1347   return false;
1348 }
1349 
1350 /* Lambdas that appear in variable initializer or default argument scope
1351    get that in their mangling, so we need to record it.  We might as well
1352    use the count for function and namespace scopes as well.  */
1353 static GTY(()) tree lambda_scope;
1354 static GTY(()) int lambda_count;
1355 struct GTY(()) tree_int
1356 {
1357   tree t;
1358   int i;
1359 };
1360 static GTY(()) vec<tree_int, va_gc> *lambda_scope_stack;
1361 
1362 void
1363 start_lambda_scope (tree decl)
1364 {
1365   tree_int ti;
1366   gcc_assert (decl);
1367   /* Once we're inside a function, we ignore variable scope and just push
1368      the function again so that popping works properly.  */
1369   if (current_function_decl && TREE_CODE (decl) == VAR_DECL)
1370     decl = current_function_decl;
1371   ti.t = lambda_scope;
1372   ti.i = lambda_count;
1373   vec_safe_push (lambda_scope_stack, ti);
1374   if (lambda_scope != decl)
1375     {
1376       /* Don't reset the count if we're still in the same function.  */
1377       lambda_scope = decl;
1378       lambda_count = 0;
1379     }
1380 }
1381 
1382 void
1383 record_lambda_scope (tree lambda)
1384 {
1385   LAMBDA_EXPR_EXTRA_SCOPE (lambda) = lambda_scope;
1386   LAMBDA_EXPR_DISCRIMINATOR (lambda) = lambda_count++;
1387 }
1388 
1389 /* This lambda is an instantiation of a lambda in a template default argument
1390    that got no LAMBDA_EXPR_EXTRA_SCOPE, so this shouldn't either.  But we do
1391    need to use and increment the global count to avoid collisions.  */
1392 
1393 void
1394 record_null_lambda_scope (tree lambda)
1395 {
1396   if (vec_safe_is_empty (lambda_scope_stack))
1397     record_lambda_scope (lambda);
1398   else
1399     {
1400       tree_int *p = lambda_scope_stack->begin();
1401       LAMBDA_EXPR_EXTRA_SCOPE (lambda) = p->t;
1402       LAMBDA_EXPR_DISCRIMINATOR (lambda) = p->i++;
1403     }
1404   gcc_assert (LAMBDA_EXPR_EXTRA_SCOPE (lambda) == NULL_TREE);
1405 }
1406 
1407 void
1408 finish_lambda_scope (void)
1409 {
1410   tree_int *p = &lambda_scope_stack->last ();
1411   if (lambda_scope != p->t)
1412     {
1413       lambda_scope = p->t;
1414       lambda_count = p->i;
1415     }
1416   lambda_scope_stack->pop ();
1417 }
1418 
1419 tree
1420 start_lambda_function (tree fco, tree lambda_expr)
1421 {
1422   /* Let the front end know that we are going to be defining this
1423      function.  */
1424   start_preparsed_function (fco,
1425 			    NULL_TREE,
1426 			    SF_PRE_PARSED | SF_INCLASS_INLINE);
1427 
1428   tree body = begin_function_body ();
1429 
1430   /* Push the proxies for any explicit captures.  */
1431   for (tree cap = LAMBDA_EXPR_CAPTURE_LIST (lambda_expr); cap;
1432        cap = TREE_CHAIN (cap))
1433     build_capture_proxy (TREE_PURPOSE (cap), TREE_VALUE (cap));
1434 
1435   return body;
1436 }
1437 
1438 /* Subroutine of prune_lambda_captures: CAP is a node in
1439    LAMBDA_EXPR_CAPTURE_LIST.  Return the variable it captures for which we
1440    might optimize away the capture, or NULL_TREE if there is no such
1441    variable.  */
1442 
1443 static tree
1444 var_to_maybe_prune (tree cap)
1445 {
1446   if (LAMBDA_CAPTURE_EXPLICIT_P (cap))
1447     /* Don't prune explicit captures.  */
1448     return NULL_TREE;
1449 
1450   tree mem = TREE_PURPOSE (cap);
1451   if (!DECL_P (mem) || !DECL_NORMAL_CAPTURE_P (mem))
1452     /* Packs and init-captures aren't captures of constant vars.  */
1453     return NULL_TREE;
1454 
1455   tree init = TREE_VALUE (cap);
1456   if (is_normal_capture_proxy (init))
1457     init = DECL_CAPTURED_VARIABLE (init);
1458   if (decl_constant_var_p (init))
1459     return init;
1460 
1461   return NULL_TREE;
1462 }
1463 
1464 /* walk_tree helper for prune_lambda_captures: Remember which capture proxies
1465    for constant variables are actually used in the lambda body.
1466 
1467    There will always be a DECL_EXPR for the capture proxy; remember it when we
1468    see it, but replace it with any other use.  */
1469 
1470 static tree
1471 mark_const_cap_r (tree *t, int *walk_subtrees, void *data)
1472 {
1473   hash_map<tree,tree*> &const_vars = *(hash_map<tree,tree*>*)data;
1474 
1475   tree var = NULL_TREE;
1476   if (TREE_CODE (*t) == DECL_EXPR)
1477     {
1478       tree decl = DECL_EXPR_DECL (*t);
1479       if (is_constant_capture_proxy (decl))
1480 	{
1481 	  var = DECL_CAPTURED_VARIABLE (decl);
1482 	  *walk_subtrees = 0;
1483 	}
1484     }
1485   else if (is_constant_capture_proxy (*t))
1486     var = DECL_CAPTURED_VARIABLE (*t);
1487 
1488   if (var)
1489     {
1490       tree *&slot = const_vars.get_or_insert (var);
1491       if (!slot || VAR_P (*t))
1492 	slot = t;
1493     }
1494 
1495   return NULL_TREE;
1496 }
1497 
1498 /* We're at the end of processing a lambda; go back and remove any captures of
1499    constant variables for which we've folded away all uses.  */
1500 
1501 static void
1502 prune_lambda_captures (tree body)
1503 {
1504   tree lam = current_lambda_expr ();
1505   if (!LAMBDA_EXPR_CAPTURE_OPTIMIZED (lam))
1506     /* No uses were optimized away.  */
1507     return;
1508   if (LAMBDA_EXPR_DEFAULT_CAPTURE_MODE (lam) == CPLD_NONE)
1509     /* No default captures, and we don't prune explicit captures.  */
1510     return;
1511 
1512   hash_map<tree,tree*> const_vars;
1513 
1514   cp_walk_tree_without_duplicates (&body, mark_const_cap_r, &const_vars);
1515 
1516   tree *fieldp = &TYPE_FIELDS (LAMBDA_EXPR_CLOSURE (lam));
1517   for (tree *capp = &LAMBDA_EXPR_CAPTURE_LIST (lam); *capp; )
1518     {
1519       tree cap = *capp;
1520       if (tree var = var_to_maybe_prune (cap))
1521 	{
1522 	  tree **use = const_vars.get (var);
1523 	  if (use && TREE_CODE (**use) == DECL_EXPR)
1524 	    {
1525 	      /* All uses of this capture were folded away, leaving only the
1526 		 proxy declaration.  */
1527 
1528 	      /* Splice the capture out of LAMBDA_EXPR_CAPTURE_LIST.  */
1529 	      *capp = TREE_CHAIN (cap);
1530 
1531 	      /* And out of TYPE_FIELDS.  */
1532 	      tree field = TREE_PURPOSE (cap);
1533 	      while (*fieldp != field)
1534 		fieldp = &DECL_CHAIN (*fieldp);
1535 	      *fieldp = DECL_CHAIN (*fieldp);
1536 
1537 	      /* And remove the capture proxy declaration.  */
1538 	      **use = void_node;
1539 	      continue;
1540 	    }
1541 	}
1542 
1543       capp = &TREE_CHAIN (cap);
1544     }
1545 }
1546 
1547 void
1548 finish_lambda_function (tree body)
1549 {
1550   finish_function_body (body);
1551 
1552   prune_lambda_captures (body);
1553 
1554   /* Finish the function and generate code for it if necessary.  */
1555   tree fn = finish_function (/*inline_p=*/true);
1556 
1557   /* Only expand if the call op is not a template.  */
1558   if (!DECL_TEMPLATE_INFO (fn))
1559     expand_or_defer_fn (fn);
1560 }
1561 
1562 #include "gt-cp-lambda.h"
1563