1 /* Language independent return value optimizations
2    Copyright (C) 2004-2020 Free Software Foundation, Inc.
3 
4 This file is part of GCC.
5 
6 GCC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 3, or (at your option)
9 any later version.
10 
11 GCC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
14 GNU General Public License for more details.
15 
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3.  If not see
18 <http://www.gnu.org/licenses/>.  */
19 
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "backend.h"
24 #include "tree.h"
25 #include "gimple.h"
26 #include "tree-pass.h"
27 #include "ssa.h"
28 #include "tree-pretty-print.h"
29 #include "gimple-iterator.h"
30 #include "gimple-walk.h"
31 #include "internal-fn.h"
32 
33 /* This file implements return value optimizations for functions which
34    return aggregate types.
35 
36    Basically this pass searches the function for return statements which
37    return a local aggregate.  When converted to RTL such statements will
38    generate a copy from the local aggregate to final return value destination
39    mandated by the target's ABI.
40 
41    That copy can often be avoided by directly constructing the return value
42    into the final destination mandated by the target's ABI.
43 
44    This is basically a generic equivalent to the C++ front-end's
45    Named Return Value optimization.  */
46 
47 struct nrv_data_t
48 {
49   /* This is the temporary (a VAR_DECL) which appears in all of
50      this function's RETURN_EXPR statements.  */
51   tree var;
52 
53   /* This is the function's RESULT_DECL.  We will replace all occurrences
54      of VAR with RESULT_DECL when we apply this optimization.  */
55   tree result;
56   int modified;
57 };
58 
59 static tree finalize_nrv_r (tree *, int *, void *);
60 
61 /* Callback for the tree walker.
62 
63    If TP refers to a RETURN_EXPR, then set the expression being returned
64    to nrv_data->result.
65 
66    If TP refers to nrv_data->var, then replace nrv_data->var with
67    nrv_data->result.
68 
69    If we reach a node where we know all the subtrees are uninteresting,
70    then set *WALK_SUBTREES to zero.  */
71 
72 static tree
finalize_nrv_r(tree * tp,int * walk_subtrees,void * data)73 finalize_nrv_r (tree *tp, int *walk_subtrees, void *data)
74 {
75   struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
76   struct nrv_data_t *dp = (struct nrv_data_t *) wi->info;
77 
78   /* No need to walk into types.  */
79   if (TYPE_P (*tp))
80     *walk_subtrees = 0;
81 
82   /* Otherwise replace all occurrences of VAR with RESULT.  */
83   else if (*tp == dp->var)
84     {
85       *tp = dp->result;
86       dp->modified = 1;
87     }
88 
89   /* Keep iterating.  */
90   return NULL_TREE;
91 }
92 
93 /* Main entry point for return value optimizations.
94 
95    If this function always returns the same local variable, and that
96    local variable is an aggregate type, then replace the variable with
97    the function's DECL_RESULT.
98 
99    This is the equivalent of the C++ named return value optimization
100    applied to optimized trees in a language independent form.  If we
101    ever encounter languages which prevent this kind of optimization,
102    then we could either have the languages register the optimization or
103    we could change the gating function to check the current language.  */
104 
105 namespace {
106 
107 const pass_data pass_data_nrv =
108 {
109   GIMPLE_PASS, /* type */
110   "nrv", /* name */
111   OPTGROUP_NONE, /* optinfo_flags */
112   TV_TREE_NRV, /* tv_id */
113   ( PROP_ssa | PROP_cfg ), /* properties_required */
114   0, /* properties_provided */
115   0, /* properties_destroyed */
116   0, /* todo_flags_start */
117   0, /* todo_flags_finish */
118 };
119 
120 class pass_nrv : public gimple_opt_pass
121 {
122 public:
pass_nrv(gcc::context * ctxt)123   pass_nrv (gcc::context *ctxt)
124     : gimple_opt_pass (pass_data_nrv, ctxt)
125   {}
126 
127   /* opt_pass methods: */
gate(function *)128   virtual bool gate (function *) { return optimize > 0; }
129 
130   virtual unsigned int execute (function *);
131 
132 }; // class pass_nrv
133 
134 unsigned int
execute(function * fun)135 pass_nrv::execute (function *fun)
136 {
137   tree result = DECL_RESULT (current_function_decl);
138   tree result_type = TREE_TYPE (result);
139   tree found = NULL;
140   basic_block bb;
141   gimple_stmt_iterator gsi;
142   struct nrv_data_t data;
143 
144   /* If this function does not return an aggregate type in memory, then
145      there is nothing to do.  */
146   if (!aggregate_value_p (result, current_function_decl))
147     return 0;
148 
149   /* If a GIMPLE type is returned in memory, finalize_nrv_r might create
150      non-GIMPLE.  */
151   if (is_gimple_reg_type (result_type))
152     return 0;
153 
154   /* If the front end already did something like this, don't do it here.  */
155   if (DECL_NAME (result))
156     return 0;
157 
158   /* If the result has its address taken then it might be modified
159      by means not detected in the following loop.  Bail out in this
160      case.  */
161   if (TREE_ADDRESSABLE (result))
162     return 0;
163 
164   /* Look through each block for assignments to the RESULT_DECL.  */
165   FOR_EACH_BB_FN (bb, fun)
166     {
167       for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
168 	{
169 	  gimple *stmt = gsi_stmt (gsi);
170 	  tree ret_val;
171 
172 	  if (greturn *return_stmt = dyn_cast <greturn *> (stmt))
173 	    {
174 	      /* In a function with an aggregate return value, the
175 		 gimplifier has changed all non-empty RETURN_EXPRs to
176 		 return the RESULT_DECL.  */
177 	      ret_val = gimple_return_retval (return_stmt);
178 	      if (ret_val)
179 		gcc_assert (ret_val == result);
180 	    }
181 	  else if (gimple_has_lhs (stmt)
182 		   && gimple_get_lhs (stmt) == result)
183 	    {
184               tree rhs;
185 
186 	      if (!gimple_assign_copy_p (stmt))
187 		return 0;
188 
189 	      rhs = gimple_assign_rhs1 (stmt);
190 
191 	      /* Now verify that this return statement uses the same value
192 		 as any previously encountered return statement.  */
193 	      if (found != NULL)
194 		{
195 		  /* If we found a return statement using a different variable
196 		     than previous return statements, then we cannot perform
197 		     NRV optimizations.  */
198 		  if (found != rhs)
199 		    return 0;
200 		}
201 	      else
202 		found = rhs;
203 
204 	      /* The returned value must be a local automatic variable of the
205 		 same type and alignment as the function's result.  */
206 	      if (!VAR_P (found)
207 		  || TREE_THIS_VOLATILE (found)
208 		  || !auto_var_in_fn_p (found, current_function_decl)
209 		  || TREE_ADDRESSABLE (found)
210 		  || DECL_ALIGN (found) > DECL_ALIGN (result)
211 		  || !useless_type_conversion_p (result_type,
212 						 TREE_TYPE (found)))
213 		return 0;
214 	    }
215 	  else if (gimple_has_lhs (stmt))
216 	    {
217 	      tree addr = get_base_address (gimple_get_lhs (stmt));
218 	       /* If there's any MODIFY of component of RESULT,
219 		  then bail out.  */
220 	      if (addr && addr == result)
221 		return 0;
222 	    }
223 	}
224     }
225 
226   if (!found)
227     return 0;
228 
229   /* If dumping details, then note once and only the NRV replacement.  */
230   if (dump_file && (dump_flags & TDF_DETAILS))
231     {
232       fprintf (dump_file, "NRV Replaced: ");
233       print_generic_expr (dump_file, found, dump_flags);
234       fprintf (dump_file, "  with: ");
235       print_generic_expr (dump_file, result, dump_flags);
236       fprintf (dump_file, "\n");
237     }
238 
239   /* At this point we know that all the return statements return the
240      same local which has suitable attributes for NRV.   Copy debugging
241      information from FOUND to RESULT if it will be useful.  But don't set
242      DECL_ABSTRACT_ORIGIN to point at another function.  */
243   if (!DECL_IGNORED_P (found)
244       && !(DECL_ABSTRACT_ORIGIN (found)
245 	   && DECL_CONTEXT (DECL_ABSTRACT_ORIGIN (found)) != current_function_decl))
246     {
247       DECL_NAME (result) = DECL_NAME (found);
248       DECL_SOURCE_LOCATION (result) = DECL_SOURCE_LOCATION (found);
249       DECL_ABSTRACT_ORIGIN (result) = DECL_ABSTRACT_ORIGIN (found);
250     }
251 
252   TREE_ADDRESSABLE (result) |= TREE_ADDRESSABLE (found);
253 
254   /* Now walk through the function changing all references to VAR to be
255      RESULT.  */
256   data.var = found;
257   data.result = result;
258   FOR_EACH_BB_FN (bb, fun)
259     {
260       for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); )
261 	{
262 	  gimple *stmt = gsi_stmt (gsi);
263 	  /* If this is a copy from VAR to RESULT, remove it.  */
264 	  if (gimple_assign_copy_p (stmt)
265 	      && gimple_assign_lhs (stmt) == result
266 	      && gimple_assign_rhs1 (stmt) == found)
267 	    {
268 	      unlink_stmt_vdef (stmt);
269 	      gsi_remove (&gsi, true);
270 	      release_defs (stmt);
271 	    }
272 	  else
273 	    {
274 	      struct walk_stmt_info wi;
275 	      memset (&wi, 0, sizeof (wi));
276 	      wi.info = &data;
277 	      data.modified = 0;
278 	      walk_gimple_op (stmt, finalize_nrv_r, &wi);
279 	      if (data.modified)
280 		update_stmt (stmt);
281 	      gsi_next (&gsi);
282 	    }
283 	}
284     }
285 
286   SET_DECL_VALUE_EXPR (found, result);
287   DECL_HAS_VALUE_EXPR_P (found) = 1;
288 
289   return 0;
290 }
291 
292 } // anon namespace
293 
294 gimple_opt_pass *
make_pass_nrv(gcc::context * ctxt)295 make_pass_nrv (gcc::context *ctxt)
296 {
297   return new pass_nrv (ctxt);
298 }
299 
300 /* Determine (pessimistically) whether DEST is available for NRV
301    optimization, where DEST is expected to be the LHS of a modify
302    expression where the RHS is a function returning an aggregate.
303 
304    DEST is available if it is not clobbered or used by the call.  */
305 
306 static bool
dest_safe_for_nrv_p(gcall * call)307 dest_safe_for_nrv_p (gcall *call)
308 {
309   tree dest = gimple_call_lhs (call);
310 
311   dest = get_base_address (dest);
312   if (! dest)
313     return false;
314 
315   if (TREE_CODE (dest) == SSA_NAME)
316     return true;
317 
318   if (call_may_clobber_ref_p (call, dest)
319       || ref_maybe_used_by_stmt_p (call, dest))
320     return false;
321 
322   return true;
323 }
324 
325 /* Walk through the function looking for GIMPLE_ASSIGNs with calls that
326    return in memory on the RHS.  For each of these, determine whether it is
327    safe to pass the address of the LHS as the return slot, and mark the
328    call appropriately if so.
329 
330    The NRV shares the return slot with a local variable in the callee; this
331    optimization shares the return slot with the target of the call within
332    the caller.  If the NRV is performed (which we can't know in general),
333    this optimization is safe if the address of the target has not
334    escaped prior to the call.  If it has, modifications to the local
335    variable will produce visible changes elsewhere, as in PR c++/19317.  */
336 
337 namespace {
338 
339 const pass_data pass_data_return_slot =
340 {
341   GIMPLE_PASS, /* type */
342   "retslot", /* name */
343   OPTGROUP_NONE, /* optinfo_flags */
344   TV_NONE, /* tv_id */
345   PROP_ssa, /* properties_required */
346   0, /* properties_provided */
347   0, /* properties_destroyed */
348   0, /* todo_flags_start */
349   0, /* todo_flags_finish */
350 };
351 
352 class pass_return_slot : public gimple_opt_pass
353 {
354 public:
pass_return_slot(gcc::context * ctxt)355   pass_return_slot (gcc::context *ctxt)
356     : gimple_opt_pass (pass_data_return_slot, ctxt)
357   {}
358 
359   /* opt_pass methods: */
360   virtual unsigned int execute (function *);
361 
362 }; // class pass_return_slot
363 
364 unsigned int
execute(function * fun)365 pass_return_slot::execute (function *fun)
366 {
367   basic_block bb;
368 
369   FOR_EACH_BB_FN (bb, fun)
370     {
371       gimple_stmt_iterator gsi;
372       for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
373 	{
374 	  gcall *stmt;
375 	  bool slot_opt_p;
376 
377 	  stmt = dyn_cast <gcall *> (gsi_stmt (gsi));
378 	  if (stmt
379 	      && gimple_call_lhs (stmt)
380 	      && !gimple_call_return_slot_opt_p (stmt)
381 	      /* Ignore internal functions, those are expanded specially
382 		 and aggregate_value_p on their result might result in
383 		 undesirable warnings with some backends.  */
384 	      && !gimple_call_internal_p (stmt)
385 	      && aggregate_value_p (TREE_TYPE (gimple_call_lhs (stmt)),
386 				    gimple_call_fndecl (stmt)))
387 	    {
388 	      /* Check if the location being assigned to is
389 		 clobbered by the call.  */
390 	      slot_opt_p = dest_safe_for_nrv_p (stmt);
391 	      gimple_call_set_return_slot_opt (stmt, slot_opt_p);
392 	    }
393 	}
394     }
395   return 0;
396 }
397 
398 } // anon namespace
399 
400 gimple_opt_pass *
make_pass_return_slot(gcc::context * ctxt)401 make_pass_return_slot (gcc::context *ctxt)
402 {
403   return new pass_return_slot (ctxt);
404 }
405