1 /* Detect paths through the CFG which can never be executed in a conforming
2    program and isolate them.
3 
4    Copyright (C) 2013-2016 Free Software Foundation, Inc.
5 
6 This file is part of GCC.
7 
8 GCC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 3, or (at your option)
11 any later version.
12 
13 GCC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
16 GNU General Public License for more details.
17 
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3.  If not see
20 <http://www.gnu.org/licenses/>.  */
21 
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "backend.h"
26 #include "tree.h"
27 #include "gimple.h"
28 #include "cfghooks.h"
29 #include "tree-pass.h"
30 #include "ssa.h"
31 #include "diagnostic-core.h"
32 #include "fold-const.h"
33 #include "gimple-iterator.h"
34 #include "gimple-walk.h"
35 #include "tree-ssa.h"
36 #include "cfgloop.h"
37 #include "tree-cfg.h"
38 #include "intl.h"
39 
40 
41 static bool cfg_altered;
42 
43 /* Callback for walk_stmt_load_store_ops.
44 
45    Return TRUE if OP will dereference the tree stored in DATA, FALSE
46    otherwise.
47 
48    This routine only makes a superficial check for a dereference.  Thus,
49    it must only be used if it is safe to return a false negative.  */
50 static bool
check_loadstore(gimple * stmt,tree op,tree,void * data)51 check_loadstore (gimple *stmt, tree op, tree, void *data)
52 {
53   if ((TREE_CODE (op) == MEM_REF || TREE_CODE (op) == TARGET_MEM_REF)
54       && operand_equal_p (TREE_OPERAND (op, 0), (tree)data, 0))
55     {
56       TREE_THIS_VOLATILE (op) = 1;
57       TREE_SIDE_EFFECTS (op) = 1;
58       update_stmt (stmt);
59       return true;
60     }
61   return false;
62 }
63 
64 /* Insert a trap after SI and split the block after the trap.  */
65 
66 static void
insert_trap(gimple_stmt_iterator * si_p,tree op)67 insert_trap (gimple_stmt_iterator *si_p, tree op)
68 {
69   /* We want the NULL pointer dereference to actually occur so that
70      code that wishes to catch the signal can do so.
71 
72      If the dereference is a load, then there's nothing to do as the
73      LHS will be a throw-away SSA_NAME and the RHS is the NULL dereference.
74 
75      If the dereference is a store and we can easily transform the RHS,
76      then simplify the RHS to enable more DCE.   Note that we require the
77      statement to be a GIMPLE_ASSIGN which filters out calls on the RHS.  */
78   gimple *stmt = gsi_stmt (*si_p);
79   if (walk_stmt_load_store_ops (stmt, (void *)op, NULL, check_loadstore)
80       && is_gimple_assign (stmt)
81       && INTEGRAL_TYPE_P (TREE_TYPE (gimple_assign_lhs (stmt))))
82     {
83       /* We just need to turn the RHS into zero converted to the proper
84          type.  */
85       tree type = TREE_TYPE (gimple_assign_lhs (stmt));
86       gimple_assign_set_rhs_code (stmt, INTEGER_CST);
87       gimple_assign_set_rhs1 (stmt, fold_convert (type, integer_zero_node));
88       update_stmt (stmt);
89     }
90 
91   gcall *new_stmt
92     = gimple_build_call (builtin_decl_explicit (BUILT_IN_TRAP), 0);
93   gimple_seq seq = NULL;
94   gimple_seq_add_stmt (&seq, new_stmt);
95 
96   /* If we had a NULL pointer dereference, then we want to insert the
97      __builtin_trap after the statement, for the other cases we want
98      to insert before the statement.  */
99   if (walk_stmt_load_store_ops (stmt, (void *)op,
100 			        check_loadstore,
101 				check_loadstore))
102     {
103       gsi_insert_after (si_p, seq, GSI_NEW_STMT);
104       if (stmt_ends_bb_p (stmt))
105 	{
106 	  split_block (gimple_bb (stmt), stmt);
107 	  return;
108 	}
109     }
110   else
111     gsi_insert_before (si_p, seq, GSI_NEW_STMT);
112 
113   split_block (gimple_bb (new_stmt), new_stmt);
114   *si_p = gsi_for_stmt (stmt);
115 }
116 
117 /* BB when reached via incoming edge E will exhibit undefined behavior
118    at STMT.  Isolate and optimize the path which exhibits undefined
119    behavior.
120 
121    Isolation is simple.  Duplicate BB and redirect E to BB'.
122 
123    Optimization is simple as well.  Replace STMT in BB' with an
124    unconditional trap and remove all outgoing edges from BB'.
125 
126    If RET_ZERO, do not trap, only return NULL.
127 
128    DUPLICATE is a pre-existing duplicate, use it as BB' if it exists.
129 
130    Return BB'.  */
131 
132 basic_block
isolate_path(basic_block bb,basic_block duplicate,edge e,gimple * stmt,tree op,bool ret_zero)133 isolate_path (basic_block bb, basic_block duplicate,
134 	      edge e, gimple *stmt, tree op, bool ret_zero)
135 {
136   gimple_stmt_iterator si, si2;
137   edge_iterator ei;
138   edge e2;
139 
140   /* First duplicate BB if we have not done so already and remove all
141      the duplicate's outgoing edges as duplicate is going to unconditionally
142      trap.  Removing the outgoing edges is both an optimization and ensures
143      we don't need to do any PHI node updates.  */
144   if (!duplicate)
145     {
146       duplicate = duplicate_block (bb, NULL, NULL);
147       if (!ret_zero)
148 	for (ei = ei_start (duplicate->succs); (e2 = ei_safe_edge (ei)); )
149 	  remove_edge (e2);
150     }
151 
152   /* Complete the isolation step by redirecting E to reach DUPLICATE.  */
153   e2 = redirect_edge_and_branch (e, duplicate);
154   if (e2)
155     flush_pending_stmts (e2);
156 
157 
158   /* There may be more than one statement in DUPLICATE which exhibits
159      undefined behavior.  Ultimately we want the first such statement in
160      DUPLCIATE so that we're able to delete as much code as possible.
161 
162      So each time we discover undefined behavior in DUPLICATE, search for
163      the statement which triggers undefined behavior.  If found, then
164      transform the statement into a trap and delete everything after the
165      statement.  If not found, then this particular instance was subsumed by
166      an earlier instance of undefined behavior and there's nothing to do.
167 
168      This is made more complicated by the fact that we have STMT, which is in
169      BB rather than in DUPLICATE.  So we set up two iterators, one for each
170      block and walk forward looking for STMT in BB, advancing each iterator at
171      each step.
172 
173      When we find STMT the second iterator should point to STMT's equivalent in
174      duplicate.  If DUPLICATE ends before STMT is found in BB, then there's
175      nothing to do.
176 
177      Ignore labels and debug statements.  */
178   si = gsi_start_nondebug_after_labels_bb (bb);
179   si2 = gsi_start_nondebug_after_labels_bb (duplicate);
180   while (!gsi_end_p (si) && !gsi_end_p (si2) && gsi_stmt (si) != stmt)
181     {
182       gsi_next_nondebug (&si);
183       gsi_next_nondebug (&si2);
184     }
185 
186   /* This would be an indicator that we never found STMT in BB, which should
187      never happen.  */
188   gcc_assert (!gsi_end_p (si));
189 
190   /* If we did not run to the end of DUPLICATE, then SI points to STMT and
191      SI2 points to the duplicate of STMT in DUPLICATE.  Insert a trap
192      before SI2 and remove SI2 and all trailing statements.  */
193   if (!gsi_end_p (si2))
194     {
195       if (ret_zero)
196 	{
197 	  greturn *ret = as_a <greturn *> (gsi_stmt (si2));
198 	  tree zero = build_zero_cst (TREE_TYPE (gimple_return_retval (ret)));
199 	  gimple_return_set_retval (ret, zero);
200 	  update_stmt (ret);
201 	}
202       else
203 	insert_trap (&si2, op);
204     }
205 
206   return duplicate;
207 }
208 
209 /* Look for PHI nodes which feed statements in the same block where
210    the value of the PHI node implies the statement is erroneous.
211 
212    For example, a NULL PHI arg value which then feeds a pointer
213    dereference.
214 
215    When found isolate and optimize the path associated with the PHI
216    argument feeding the erroneous statement.  */
217 static void
find_implicit_erroneous_behavior(void)218 find_implicit_erroneous_behavior (void)
219 {
220   basic_block bb;
221 
222   FOR_EACH_BB_FN (bb, cfun)
223     {
224       gphi_iterator si;
225 
226       /* Out of an abundance of caution, do not isolate paths to a
227 	 block where the block has any abnormal outgoing edges.
228 
229 	 We might be able to relax this in the future.  We have to detect
230 	 when we have to split the block with the NULL dereference and
231 	 the trap we insert.  We have to preserve abnormal edges out
232 	 of the isolated block which in turn means updating PHIs at
233 	 the targets of those abnormal outgoing edges.  */
234       if (has_abnormal_or_eh_outgoing_edge_p (bb))
235 	continue;
236 
237       /* First look for a PHI which sets a pointer to NULL and which
238  	 is then dereferenced within BB.  This is somewhat overly
239 	 conservative, but probably catches most of the interesting
240 	 cases.   */
241       for (si = gsi_start_phis (bb); !gsi_end_p (si); gsi_next (&si))
242 	{
243 	  gphi *phi = si.phi ();
244 	  tree lhs = gimple_phi_result (phi);
245 
246 	  /* If the result is not a pointer, then there is no need to
247  	     examine the arguments.  */
248 	  if (!POINTER_TYPE_P (TREE_TYPE (lhs)))
249 	    continue;
250 
251 	  /* PHI produces a pointer result.  See if any of the PHI's
252 	     arguments are NULL.
253 
254 	     When we remove an edge, we want to reprocess the current
255 	     index, hence the ugly way we update I for each iteration.  */
256 	  basic_block duplicate = NULL;
257 	  for (unsigned i = 0, next_i = 0;
258 	       i < gimple_phi_num_args (phi);
259 	       i = next_i)
260 	    {
261 	      tree op = gimple_phi_arg_def (phi, i);
262 	      edge e = gimple_phi_arg_edge (phi, i);
263 	      imm_use_iterator iter;
264 	      gimple *use_stmt;
265 
266 	      next_i = i + 1;
267 
268 	      if (TREE_CODE (op) == ADDR_EXPR)
269 		{
270 		  tree valbase = get_base_address (TREE_OPERAND (op, 0));
271 		  if ((TREE_CODE (valbase) == VAR_DECL
272 		       && !is_global_var (valbase))
273 		      || TREE_CODE (valbase) == PARM_DECL)
274 		    {
275 		      FOR_EACH_IMM_USE_STMT (use_stmt, iter, lhs)
276 			{
277 			  greturn *return_stmt
278 			    = dyn_cast <greturn *> (use_stmt);
279 			  if (!return_stmt)
280 			    continue;
281 
282 			  if (gimple_return_retval (return_stmt) != lhs)
283 			    continue;
284 
285 			  if (warning_at (gimple_location (use_stmt),
286 					  OPT_Wreturn_local_addr,
287 					  "function may return address "
288 					  "of local variable"))
289 			    inform (DECL_SOURCE_LOCATION(valbase),
290 				    "declared here");
291 
292 			  if (gimple_bb (use_stmt) == bb)
293 			    {
294 			      duplicate = isolate_path (bb, duplicate, e,
295 							use_stmt, lhs, true);
296 
297 			      /* When we remove an incoming edge, we need to
298 				 reprocess the Ith element.  */
299 			      next_i = i;
300 			      cfg_altered = true;
301 			    }
302 			}
303 		    }
304 		}
305 
306 	      if (!integer_zerop (op))
307 		continue;
308 
309 	      /* We've got a NULL PHI argument.  Now see if the
310  	         PHI's result is dereferenced within BB.  */
311 	      FOR_EACH_IMM_USE_STMT (use_stmt, iter, lhs)
312 	        {
313 	          /* We only care about uses in BB.  Catching cases in
314 		     in other blocks would require more complex path
315 		     isolation code.   */
316 		  if (gimple_bb (use_stmt) != bb)
317 		    continue;
318 
319 		  bool by_dereference
320 		    = infer_nonnull_range_by_dereference (use_stmt, lhs);
321 
322 		  if (by_dereference
323 		      || infer_nonnull_range_by_attribute (use_stmt, lhs))
324 		    {
325 		      location_t loc = gimple_location (use_stmt)
326 			? gimple_location (use_stmt)
327 			: gimple_phi_arg_location (phi, i);
328 
329 		      if (by_dereference)
330 			{
331 			  warning_at (loc, OPT_Wnull_dereference,
332 				      "potential null pointer dereference");
333 			  if (!flag_isolate_erroneous_paths_dereference)
334 			    continue;
335 			}
336 		      else
337 			{
338 			  if (!flag_isolate_erroneous_paths_attribute)
339 			    continue;
340 			}
341 
342 		      duplicate = isolate_path (bb, duplicate, e,
343 						use_stmt, lhs, false);
344 
345 		      /* When we remove an incoming edge, we need to
346 			 reprocess the Ith element.  */
347 		      next_i = i;
348 		      cfg_altered = true;
349 		    }
350 		}
351 	    }
352 	}
353     }
354 }
355 
356 /* Look for statements which exhibit erroneous behavior.  For example
357    a NULL pointer dereference.
358 
359    When found, optimize the block containing the erroneous behavior.  */
360 static void
find_explicit_erroneous_behavior(void)361 find_explicit_erroneous_behavior (void)
362 {
363   basic_block bb;
364 
365   FOR_EACH_BB_FN (bb, cfun)
366     {
367       gimple_stmt_iterator si;
368 
369       /* Out of an abundance of caution, do not isolate paths to a
370 	 block where the block has any abnormal outgoing edges.
371 
372 	 We might be able to relax this in the future.  We have to detect
373 	 when we have to split the block with the NULL dereference and
374 	 the trap we insert.  We have to preserve abnormal edges out
375 	 of the isolated block which in turn means updating PHIs at
376 	 the targets of those abnormal outgoing edges.  */
377       if (has_abnormal_or_eh_outgoing_edge_p (bb))
378 	continue;
379 
380       /* Now look at the statements in the block and see if any of
381 	 them explicitly dereference a NULL pointer.  This happens
382 	 because of jump threading and constant propagation.  */
383       for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
384 	{
385 	  gimple *stmt = gsi_stmt (si);
386 
387 	  /* By passing null_pointer_node, we can use the
388 	     infer_nonnull_range functions to detect explicit NULL
389 	     pointer dereferences and other uses where a non-NULL
390 	     value is required.  */
391 
392 	  bool by_dereference
393 	    = infer_nonnull_range_by_dereference (stmt, null_pointer_node);
394 	  if (by_dereference
395 	      || infer_nonnull_range_by_attribute (stmt, null_pointer_node))
396 	    {
397 	      if (by_dereference)
398 		{
399 		  warning_at (gimple_location (stmt), OPT_Wnull_dereference,
400 			      "null pointer dereference");
401 		  if (!flag_isolate_erroneous_paths_dereference)
402 		    continue;
403 		}
404 	      else
405 		{
406 		  if (!flag_isolate_erroneous_paths_attribute)
407 		    continue;
408 		}
409 
410 	      insert_trap (&si, null_pointer_node);
411 	      bb = gimple_bb (gsi_stmt (si));
412 
413 	      /* Ignore any more operands on this statement and
414 		 continue the statement iterator (which should
415 		 terminate its loop immediately.  */
416 	      cfg_altered = true;
417 	      break;
418 	    }
419 
420 	  /* Detect returning the address of a local variable.  This only
421 	     becomes undefined behavior if the result is used, so we do not
422 	     insert a trap and only return NULL instead.  */
423 	  if (greturn *return_stmt = dyn_cast <greturn *> (stmt))
424 	    {
425 	      tree val = gimple_return_retval (return_stmt);
426 	      if (val && TREE_CODE (val) == ADDR_EXPR)
427 		{
428 		  tree valbase = get_base_address (TREE_OPERAND (val, 0));
429 		  if ((TREE_CODE (valbase) == VAR_DECL
430 		       && !is_global_var (valbase))
431 		      || TREE_CODE (valbase) == PARM_DECL)
432 		    {
433 		      /* We only need it for this particular case.  */
434 		      calculate_dominance_info (CDI_POST_DOMINATORS);
435 		      const char* msg;
436 		      bool always_executed = dominated_by_p
437 			(CDI_POST_DOMINATORS,
438 			 single_succ (ENTRY_BLOCK_PTR_FOR_FN (cfun)), bb);
439 		      if (always_executed)
440 			msg = N_("function returns address of local variable");
441 		      else
442 			msg = N_("function may return address of "
443 				 "local variable");
444 
445 		      if (warning_at (gimple_location (stmt),
446 				      OPT_Wreturn_local_addr, msg))
447 			inform (DECL_SOURCE_LOCATION(valbase), "declared here");
448 		      tree zero = build_zero_cst (TREE_TYPE (val));
449 		      gimple_return_set_retval (return_stmt, zero);
450 		      update_stmt (stmt);
451 		    }
452 		}
453 	    }
454 	}
455     }
456 }
457 
458 /* Search the function for statements which, if executed, would cause
459    the program to fault such as a dereference of a NULL pointer.
460 
461    Such a program can't be valid if such a statement was to execute
462    according to ISO standards.
463 
464    We detect explicit NULL pointer dereferences as well as those implied
465    by a PHI argument having a NULL value which unconditionally flows into
466    a dereference in the same block as the PHI.
467 
468    In the former case we replace the offending statement with an
469    unconditional trap and eliminate the outgoing edges from the statement's
470    basic block.  This may expose secondary optimization opportunities.
471 
472    In the latter case, we isolate the path(s) with the NULL PHI
473    feeding the dereference.  We can then replace the offending statement
474    and eliminate the outgoing edges in the duplicate.  Again, this may
475    expose secondary optimization opportunities.
476 
477    A warning for both cases may be advisable as well.
478 
479    Other statically detectable violations of the ISO standard could be
480    handled in a similar way, such as out-of-bounds array indexing.  */
481 
482 static unsigned int
gimple_ssa_isolate_erroneous_paths(void)483 gimple_ssa_isolate_erroneous_paths (void)
484 {
485   initialize_original_copy_tables ();
486 
487   /* Search all the blocks for edges which, if traversed, will
488      result in undefined behavior.  */
489   cfg_altered = false;
490 
491   /* First handle cases where traversal of a particular edge
492      triggers undefined behavior.  These cases require creating
493      duplicate blocks and thus new SSA_NAMEs.
494 
495      We want that process complete prior to the phase where we start
496      removing edges from the CFG.  Edge removal may ultimately result in
497      removal of PHI nodes and thus releasing SSA_NAMEs back to the
498      name manager.
499 
500      If the two processes run in parallel we could release an SSA_NAME
501      back to the manager but we could still have dangling references
502      to the released SSA_NAME in unreachable blocks.
503      that any released names not have dangling references in the IL.  */
504   find_implicit_erroneous_behavior ();
505   find_explicit_erroneous_behavior ();
506 
507   free_original_copy_tables ();
508 
509   /* We scramble the CFG and loop structures a bit, clean up
510      appropriately.  We really should incrementally update the
511      loop structures, in theory it shouldn't be that hard.  */
512   free_dominance_info (CDI_POST_DOMINATORS);
513   if (cfg_altered)
514     {
515       free_dominance_info (CDI_DOMINATORS);
516       loops_state_set (LOOPS_NEED_FIXUP);
517       return TODO_cleanup_cfg | TODO_update_ssa;
518     }
519   return 0;
520 }
521 
522 namespace {
523 const pass_data pass_data_isolate_erroneous_paths =
524 {
525   GIMPLE_PASS, /* type */
526   "isolate-paths", /* name */
527   OPTGROUP_NONE, /* optinfo_flags */
528   TV_ISOLATE_ERRONEOUS_PATHS, /* tv_id */
529   ( PROP_cfg | PROP_ssa ), /* properties_required */
530   0, /* properties_provided */
531   0, /* properties_destroyed */
532   0, /* todo_flags_start */
533   0, /* todo_flags_finish */
534 };
535 
536 class pass_isolate_erroneous_paths : public gimple_opt_pass
537 {
538 public:
pass_isolate_erroneous_paths(gcc::context * ctxt)539   pass_isolate_erroneous_paths (gcc::context *ctxt)
540     : gimple_opt_pass (pass_data_isolate_erroneous_paths, ctxt)
541   {}
542 
543   /* opt_pass methods: */
clone()544   opt_pass * clone () { return new pass_isolate_erroneous_paths (m_ctxt); }
gate(function *)545   virtual bool gate (function *)
546     {
547       /* If we do not have a suitable builtin function for the trap statement,
548 	 then do not perform the optimization.  */
549       return (flag_isolate_erroneous_paths_dereference != 0
550 	      || flag_isolate_erroneous_paths_attribute != 0
551 	      || warn_null_dereference);
552     }
553 
execute(function *)554   virtual unsigned int execute (function *)
555     {
556       return gimple_ssa_isolate_erroneous_paths ();
557     }
558 
559 }; // class pass_isolate_erroneous_paths
560 }
561 
562 gimple_opt_pass *
make_pass_isolate_erroneous_paths(gcc::context * ctxt)563 make_pass_isolate_erroneous_paths (gcc::context *ctxt)
564 {
565   return new pass_isolate_erroneous_paths (ctxt);
566 }
567