1 /* Language independent return value optimizations
2    Copyright (C) 2004-2016 Free Software Foundation, Inc.
3 
4 This file is part of GCC.
5 
6 GCC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 3, or (at your option)
9 any later version.
10 
11 GCC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
14 GNU General Public License for more details.
15 
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3.  If not see
18 <http://www.gnu.org/licenses/>.  */
19 
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "backend.h"
24 #include "tree.h"
25 #include "gimple.h"
26 #include "tree-pass.h"
27 #include "ssa.h"
28 #include "tree-pretty-print.h"
29 #include "gimple-iterator.h"
30 #include "gimple-walk.h"
31 
32 /* This file implements return value optimizations for functions which
33    return aggregate types.
34 
35    Basically this pass searches the function for return statements which
36    return a local aggregate.  When converted to RTL such statements will
37    generate a copy from the local aggregate to final return value destination
38    mandated by the target's ABI.
39 
40    That copy can often be avoided by directly constructing the return value
41    into the final destination mandated by the target's ABI.
42 
43    This is basically a generic equivalent to the C++ front-end's
44    Named Return Value optimization.  */
45 
46 struct nrv_data_t
47 {
48   /* This is the temporary (a VAR_DECL) which appears in all of
49      this function's RETURN_EXPR statements.  */
50   tree var;
51 
52   /* This is the function's RESULT_DECL.  We will replace all occurrences
53      of VAR with RESULT_DECL when we apply this optimization.  */
54   tree result;
55   int modified;
56 };
57 
58 static tree finalize_nrv_r (tree *, int *, void *);
59 
60 /* Callback for the tree walker.
61 
62    If TP refers to a RETURN_EXPR, then set the expression being returned
63    to nrv_data->result.
64 
65    If TP refers to nrv_data->var, then replace nrv_data->var with
66    nrv_data->result.
67 
68    If we reach a node where we know all the subtrees are uninteresting,
69    then set *WALK_SUBTREES to zero.  */
70 
71 static tree
finalize_nrv_r(tree * tp,int * walk_subtrees,void * data)72 finalize_nrv_r (tree *tp, int *walk_subtrees, void *data)
73 {
74   struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
75   struct nrv_data_t *dp = (struct nrv_data_t *) wi->info;
76 
77   /* No need to walk into types.  */
78   if (TYPE_P (*tp))
79     *walk_subtrees = 0;
80 
81   /* Otherwise replace all occurrences of VAR with RESULT.  */
82   else if (*tp == dp->var)
83     {
84       *tp = dp->result;
85       dp->modified = 1;
86     }
87 
88   /* Keep iterating.  */
89   return NULL_TREE;
90 }
91 
92 /* Main entry point for return value optimizations.
93 
94    If this function always returns the same local variable, and that
95    local variable is an aggregate type, then replace the variable with
96    the function's DECL_RESULT.
97 
98    This is the equivalent of the C++ named return value optimization
99    applied to optimized trees in a language independent form.  If we
100    ever encounter languages which prevent this kind of optimization,
101    then we could either have the languages register the optimization or
102    we could change the gating function to check the current language.  */
103 
104 namespace {
105 
106 const pass_data pass_data_nrv =
107 {
108   GIMPLE_PASS, /* type */
109   "nrv", /* name */
110   OPTGROUP_NONE, /* optinfo_flags */
111   TV_TREE_NRV, /* tv_id */
112   ( PROP_ssa | PROP_cfg ), /* properties_required */
113   0, /* properties_provided */
114   0, /* properties_destroyed */
115   0, /* todo_flags_start */
116   0, /* todo_flags_finish */
117 };
118 
119 class pass_nrv : public gimple_opt_pass
120 {
121 public:
pass_nrv(gcc::context * ctxt)122   pass_nrv (gcc::context *ctxt)
123     : gimple_opt_pass (pass_data_nrv, ctxt)
124   {}
125 
126   /* opt_pass methods: */
gate(function *)127   virtual bool gate (function *) { return optimize > 0; }
128 
129   virtual unsigned int execute (function *);
130 
131 }; // class pass_nrv
132 
133 unsigned int
execute(function * fun)134 pass_nrv::execute (function *fun)
135 {
136   tree result = DECL_RESULT (current_function_decl);
137   tree result_type = TREE_TYPE (result);
138   tree found = NULL;
139   basic_block bb;
140   gimple_stmt_iterator gsi;
141   struct nrv_data_t data;
142 
143   /* If this function does not return an aggregate type in memory, then
144      there is nothing to do.  */
145   if (!aggregate_value_p (result, current_function_decl))
146     return 0;
147 
148   /* If a GIMPLE type is returned in memory, finalize_nrv_r might create
149      non-GIMPLE.  */
150   if (is_gimple_reg_type (result_type))
151     return 0;
152 
153   /* If the front end already did something like this, don't do it here.  */
154   if (DECL_NAME (result))
155     return 0;
156 
157   /* If the result has its address taken then it might be modified
158      by means not detected in the following loop.  Bail out in this
159      case.  */
160   if (TREE_ADDRESSABLE (result))
161     return 0;
162 
163   /* Look through each block for assignments to the RESULT_DECL.  */
164   FOR_EACH_BB_FN (bb, fun)
165     {
166       for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
167 	{
168 	  gimple *stmt = gsi_stmt (gsi);
169 	  tree ret_val;
170 
171 	  if (greturn *return_stmt = dyn_cast <greturn *> (stmt))
172 	    {
173 	      /* In a function with an aggregate return value, the
174 		 gimplifier has changed all non-empty RETURN_EXPRs to
175 		 return the RESULT_DECL.  */
176 	      ret_val = gimple_return_retval (return_stmt);
177 	      if (ret_val)
178 		gcc_assert (ret_val == result);
179 	    }
180 	  else if (gimple_has_lhs (stmt)
181 		   && gimple_get_lhs (stmt) == result)
182 	    {
183               tree rhs;
184 
185 	      if (!gimple_assign_copy_p (stmt))
186 		return 0;
187 
188 	      rhs = gimple_assign_rhs1 (stmt);
189 
190 	      /* Now verify that this return statement uses the same value
191 		 as any previously encountered return statement.  */
192 	      if (found != NULL)
193 		{
194 		  /* If we found a return statement using a different variable
195 		     than previous return statements, then we can not perform
196 		     NRV optimizations.  */
197 		  if (found != rhs)
198 		    return 0;
199 		}
200 	      else
201 		found = rhs;
202 
203 	      /* The returned value must be a local automatic variable of the
204 		 same type and alignment as the function's result.  */
205 	      if (TREE_CODE (found) != VAR_DECL
206 		  || TREE_THIS_VOLATILE (found)
207 		  || !auto_var_in_fn_p (found, current_function_decl)
208 		  || TREE_ADDRESSABLE (found)
209 		  || DECL_ALIGN (found) > DECL_ALIGN (result)
210 		  || !useless_type_conversion_p (result_type,
211 						 TREE_TYPE (found)))
212 		return 0;
213 	    }
214 	  else if (gimple_has_lhs (stmt))
215 	    {
216 	      tree addr = get_base_address (gimple_get_lhs (stmt));
217 	       /* If there's any MODIFY of component of RESULT,
218 		  then bail out.  */
219 	      if (addr && addr == result)
220 		return 0;
221 	    }
222 	}
223     }
224 
225   if (!found)
226     return 0;
227 
228   /* If dumping details, then note once and only the NRV replacement.  */
229   if (dump_file && (dump_flags & TDF_DETAILS))
230     {
231       fprintf (dump_file, "NRV Replaced: ");
232       print_generic_expr (dump_file, found, dump_flags);
233       fprintf (dump_file, "  with: ");
234       print_generic_expr (dump_file, result, dump_flags);
235       fprintf (dump_file, "\n");
236     }
237 
238   /* At this point we know that all the return statements return the
239      same local which has suitable attributes for NRV.   Copy debugging
240      information from FOUND to RESULT if it will be useful.  But don't set
241      DECL_ABSTRACT_ORIGIN to point at another function.  */
242   if (!DECL_IGNORED_P (found)
243       && !(DECL_ABSTRACT_ORIGIN (found)
244 	   && DECL_CONTEXT (DECL_ABSTRACT_ORIGIN (found)) != current_function_decl))
245     {
246       DECL_NAME (result) = DECL_NAME (found);
247       DECL_SOURCE_LOCATION (result) = DECL_SOURCE_LOCATION (found);
248       DECL_ABSTRACT_ORIGIN (result) = DECL_ABSTRACT_ORIGIN (found);
249     }
250 
251   TREE_ADDRESSABLE (result) |= TREE_ADDRESSABLE (found);
252 
253   /* Now walk through the function changing all references to VAR to be
254      RESULT.  */
255   data.var = found;
256   data.result = result;
257   FOR_EACH_BB_FN (bb, fun)
258     {
259       for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); )
260 	{
261 	  gimple *stmt = gsi_stmt (gsi);
262 	  /* If this is a copy from VAR to RESULT, remove it.  */
263 	  if (gimple_assign_copy_p (stmt)
264 	      && gimple_assign_lhs (stmt) == result
265 	      && gimple_assign_rhs1 (stmt) == found)
266 	    {
267 	      unlink_stmt_vdef (stmt);
268 	      gsi_remove (&gsi, true);
269 	      release_defs (stmt);
270 	    }
271 	  else
272 	    {
273 	      struct walk_stmt_info wi;
274 	      memset (&wi, 0, sizeof (wi));
275 	      wi.info = &data;
276 	      data.modified = 0;
277 	      walk_gimple_op (stmt, finalize_nrv_r, &wi);
278 	      if (data.modified)
279 		update_stmt (stmt);
280 	      gsi_next (&gsi);
281 	    }
282 	}
283     }
284 
285   SET_DECL_VALUE_EXPR (found, result);
286   DECL_HAS_VALUE_EXPR_P (found) = 1;
287 
288   return 0;
289 }
290 
291 } // anon namespace
292 
293 gimple_opt_pass *
make_pass_nrv(gcc::context * ctxt)294 make_pass_nrv (gcc::context *ctxt)
295 {
296   return new pass_nrv (ctxt);
297 }
298 
299 /* Determine (pessimistically) whether DEST is available for NRV
300    optimization, where DEST is expected to be the LHS of a modify
301    expression where the RHS is a function returning an aggregate.
302 
303    DEST is available if it is not clobbered or used by the call.  */
304 
305 static bool
dest_safe_for_nrv_p(gcall * call)306 dest_safe_for_nrv_p (gcall *call)
307 {
308   tree dest = gimple_call_lhs (call);
309 
310   dest = get_base_address (dest);
311   if (! dest)
312     return false;
313 
314   if (TREE_CODE (dest) == SSA_NAME)
315     return true;
316 
317   if (call_may_clobber_ref_p (call, dest)
318       || ref_maybe_used_by_stmt_p (call, dest))
319     return false;
320 
321   return true;
322 }
323 
324 /* Walk through the function looking for GIMPLE_ASSIGNs with calls that
325    return in memory on the RHS.  For each of these, determine whether it is
326    safe to pass the address of the LHS as the return slot, and mark the
327    call appropriately if so.
328 
329    The NRV shares the return slot with a local variable in the callee; this
330    optimization shares the return slot with the target of the call within
331    the caller.  If the NRV is performed (which we can't know in general),
332    this optimization is safe if the address of the target has not
333    escaped prior to the call.  If it has, modifications to the local
334    variable will produce visible changes elsewhere, as in PR c++/19317.  */
335 
336 namespace {
337 
338 const pass_data pass_data_return_slot =
339 {
340   GIMPLE_PASS, /* type */
341   "retslot", /* name */
342   OPTGROUP_NONE, /* optinfo_flags */
343   TV_NONE, /* tv_id */
344   PROP_ssa, /* properties_required */
345   0, /* properties_provided */
346   0, /* properties_destroyed */
347   0, /* todo_flags_start */
348   0, /* todo_flags_finish */
349 };
350 
351 class pass_return_slot : public gimple_opt_pass
352 {
353 public:
pass_return_slot(gcc::context * ctxt)354   pass_return_slot (gcc::context *ctxt)
355     : gimple_opt_pass (pass_data_return_slot, ctxt)
356   {}
357 
358   /* opt_pass methods: */
359   virtual unsigned int execute (function *);
360 
361 }; // class pass_return_slot
362 
363 unsigned int
execute(function * fun)364 pass_return_slot::execute (function *fun)
365 {
366   basic_block bb;
367 
368   FOR_EACH_BB_FN (bb, fun)
369     {
370       gimple_stmt_iterator gsi;
371       for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
372 	{
373 	  gcall *stmt;
374 	  bool slot_opt_p;
375 
376 	  stmt = dyn_cast <gcall *> (gsi_stmt (gsi));
377 	  if (stmt
378 	      && gimple_call_lhs (stmt)
379 	      && !gimple_call_return_slot_opt_p (stmt)
380 	      && aggregate_value_p (TREE_TYPE (gimple_call_lhs (stmt)),
381 				    gimple_call_fndecl (stmt)))
382 	    {
383 	      /* Check if the location being assigned to is
384 		 clobbered by the call.  */
385 	      slot_opt_p = dest_safe_for_nrv_p (stmt);
386 	      gimple_call_set_return_slot_opt (stmt, slot_opt_p);
387 	    }
388 	}
389     }
390   return 0;
391 }
392 
393 } // anon namespace
394 
395 gimple_opt_pass *
make_pass_return_slot(gcc::context * ctxt)396 make_pass_return_slot (gcc::context *ctxt)
397 {
398   return new pass_return_slot (ctxt);
399 }
400