1 /* Pointer Bounds Checker insrumentation pass.
2    Copyright (C) 2014-2016 Free Software Foundation, Inc.
3    Contributed by Ilya Enkovich (ilya.enkovich@intel.com)
4 
5 This file is part of GCC.
6 
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
10 version.
11 
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
15 for more details.
16 
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3.  If not see
19 <http://www.gnu.org/licenses/>.  */
20 
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "backend.h"
25 #include "target.h"
26 #include "rtl.h"
27 #include "tree.h"
28 #include "gimple.h"
29 #include "cfghooks.h"
30 #include "tree-pass.h"
31 #include "ssa.h"
32 #include "cgraph.h"
33 #include "diagnostic.h"
34 #include "fold-const.h"
35 #include "stor-layout.h"
36 #include "varasm.h"
37 #include "tree-iterator.h"
38 #include "tree-cfg.h"
39 #include "langhooks.h"
40 #include "tree-ssa-address.h"
41 #include "tree-ssa-loop-niter.h"
42 #include "gimple-pretty-print.h"
43 #include "gimple-iterator.h"
44 #include "gimplify.h"
45 #include "gimplify-me.h"
46 #include "print-tree.h"
47 #include "calls.h"
48 #include "expr.h"
49 #include "tree-ssa-propagate.h"
50 #include "tree-chkp.h"
51 #include "gimple-walk.h"
52 #include "tree-dfa.h"
53 #include "ipa-chkp.h"
54 #include "params.h"
55 
56 /*  Pointer Bounds Checker instruments code with memory checks to find
57     out-of-bounds memory accesses.  Checks are performed by computing
58     bounds for each pointer and then comparing address of accessed
59     memory before pointer dereferencing.
60 
61     1. Function clones.
62 
63     See ipa-chkp.c.
64 
65     2. Instrumentation.
66 
67     There are few things to instrument:
68 
69     a) Memory accesses - add checker calls to check address of accessed memory
70     against bounds of dereferenced pointer.  Obviously safe memory
71     accesses like static variable access does not have to be instrumented
72     with checks.
73 
74     Example:
75 
76       val_2 = *p_1;
77 
78       with 4 bytes access is transformed into:
79 
80       __builtin___chkp_bndcl (__bound_tmp.1_3, p_1);
81       D.1_4 = p_1 + 3;
82       __builtin___chkp_bndcu (__bound_tmp.1_3, D.1_4);
83       val_2 = *p_1;
84 
85       where __bound_tmp.1_3 are bounds computed for pointer p_1,
86       __builtin___chkp_bndcl is a lower bound check and
87       __builtin___chkp_bndcu is an upper bound check.
88 
89     b) Pointer stores.
90 
91     When pointer is stored in memory we need to store its bounds.  To
92     achieve compatibility of instrumented code with regular codes
93     we have to keep data layout and store bounds in special bound tables
94     via special checker call.  Implementation of bounds table may vary for
95     different platforms.  It has to associate pointer value and its
96     location (it is required because we may have two equal pointers
97     with different bounds stored in different places) with bounds.
98     Another checker builtin allows to get bounds for specified pointer
99     loaded from specified location.
100 
101     Example:
102 
103       buf1[i_1] = &buf2;
104 
105       is transformed into:
106 
107       buf1[i_1] = &buf2;
108       D.1_2 = &buf1[i_1];
109       __builtin___chkp_bndstx (D.1_2, &buf2, __bound_tmp.1_2);
110 
111       where __bound_tmp.1_2 are bounds of &buf2.
112 
113     c) Static initialization.
114 
115     The special case of pointer store is static pointer initialization.
116     Bounds initialization is performed in a few steps:
117       - register all static initializations in front-end using
118       chkp_register_var_initializer
119       - when file compilation finishes we create functions with special
120       attribute 'chkp ctor' and put explicit initialization code
121       (assignments) for all statically initialized pointers.
122       - when checker constructor is compiled checker pass adds required
123       bounds initialization for all statically initialized pointers
124       - since we do not actually need excess pointers initialization
125       in checker constructor we remove such assignments from them
126 
127     d) Calls.
128 
129     For each call in the code we add additional arguments to pass
130     bounds for pointer arguments.  We determine type of call arguments
131     using arguments list from function declaration; if function
132     declaration is not available we use function type; otherwise
133     (e.g. for unnamed arguments) we use type of passed value. Function
134     declaration/type is replaced with the instrumented one.
135 
136     Example:
137 
138       val_1 = foo (&buf1, &buf2, &buf1, 0);
139 
140       is translated into:
141 
142       val_1 = foo.chkp (&buf1, __bound_tmp.1_2, &buf2, __bound_tmp.1_3,
143                         &buf1, __bound_tmp.1_2, 0);
144 
145     e) Returns.
146 
147     If function returns a pointer value we have to return bounds also.
148     A new operand was added for return statement to hold returned bounds.
149 
150     Example:
151 
152       return &_buf1;
153 
154       is transformed into
155 
156       return &_buf1, __bound_tmp.1_1;
157 
158     3. Bounds computation.
159 
160     Compiler is fully responsible for computing bounds to be used for each
161     memory access.  The first step for bounds computation is to find the
162     origin of pointer dereferenced for memory access.  Basing on pointer
163     origin we define a way to compute its bounds.  There are just few
164     possible cases:
165 
166     a) Pointer is returned by call.
167 
168     In this case we use corresponding checker builtin method to obtain returned
169     bounds.
170 
171     Example:
172 
173       buf_1 = malloc (size_2);
174       foo (buf_1);
175 
176       is translated into:
177 
178       buf_1 = malloc (size_2);
179       __bound_tmp.1_3 = __builtin___chkp_bndret (buf_1);
180       foo (buf_1, __bound_tmp.1_3);
181 
182     b) Pointer is an address of an object.
183 
184     In this case compiler tries to compute objects size and create corresponding
185     bounds.  If object has incomplete type then special checker builtin is used to
186     obtain its size at runtime.
187 
188     Example:
189 
190       foo ()
191       {
192         <unnamed type> __bound_tmp.3;
193 	static int buf[100];
194 
195 	<bb 3>:
196 	__bound_tmp.3_2 = __builtin___chkp_bndmk (&buf, 400);
197 
198 	<bb 2>:
199 	return &buf, __bound_tmp.3_2;
200       }
201 
202     Example:
203 
204       Address of an object 'extern int buf[]' with incomplete type is
205       returned.
206 
207       foo ()
208       {
209         <unnamed type> __bound_tmp.4;
210 	long unsigned int __size_tmp.3;
211 
212 	<bb 3>:
213 	__size_tmp.3_4 = __builtin_ia32_sizeof (buf);
214 	__bound_tmp.4_3 = __builtin_ia32_bndmk (&buf, __size_tmp.3_4);
215 
216 	<bb 2>:
217 	return &buf, __bound_tmp.4_3;
218       }
219 
220     c) Pointer is the result of object narrowing.
221 
222     It happens when we use pointer to an object to compute pointer to a part
223     of an object.  E.g. we take pointer to a field of a structure. In this
224     case we perform bounds intersection using bounds of original object and
225     bounds of object's part (which are computed basing on its type).
226 
227     There may be some debatable questions about when narrowing should occur
228     and when it should not.  To avoid false bound violations in correct
229     programs we do not perform narrowing when address of an array element is
230     obtained (it has address of the whole array) and when address of the first
231     structure field is obtained (because it is guaranteed to be equal to
232     address of the whole structure and it is legal to cast it back to structure).
233 
234     Default narrowing behavior may be changed using compiler flags.
235 
236     Example:
237 
238       In this example address of the second structure field is returned.
239 
240       foo (struct A * p, __bounds_type __bounds_of_p)
241       {
242         <unnamed type> __bound_tmp.3;
243 	int * _2;
244 	int * _5;
245 
246 	<bb 2>:
247 	_5 = &p_1(D)->second_field;
248 	__bound_tmp.3_6 = __builtin___chkp_bndmk (_5, 4);
249 	__bound_tmp.3_8 = __builtin___chkp_intersect (__bound_tmp.3_6,
250 	                                              __bounds_of_p_3(D));
251 	_2 = &p_1(D)->second_field;
252 	return _2, __bound_tmp.3_8;
253       }
254 
255     Example:
256 
257       In this example address of the first field of array element is returned.
258 
259       foo (struct A * p, __bounds_type __bounds_of_p, int i)
260       {
261 	long unsigned int _3;
262 	long unsigned int _4;
263 	struct A * _6;
264 	int * _7;
265 
266 	<bb 2>:
267 	_3 = (long unsigned int) i_1(D);
268 	_4 = _3 * 8;
269 	_6 = p_5(D) + _4;
270 	_7 = &_6->first_field;
271 	return _7, __bounds_of_p_2(D);
272       }
273 
274 
275     d) Pointer is the result of pointer arithmetic or type cast.
276 
277     In this case bounds of the base pointer are used.  In case of binary
278     operation producing a pointer we are analyzing data flow further
279     looking for operand's bounds.  One operand is considered as a base
280     if it has some valid bounds.  If we fall into a case when none of
281     operands (or both of them) has valid bounds, a default bounds value
282     is used.
283 
284     Trying to find out bounds for binary operations we may fall into
285     cyclic dependencies for pointers.  To avoid infinite recursion all
286     walked phi nodes instantly obtain corresponding bounds but created
287     bounds are marked as incomplete.  It helps us to stop DF walk during
288     bounds search.
289 
290     When we reach pointer source, some args of incomplete bounds phi obtain
291     valid bounds and those values are propagated further through phi nodes.
292     If no valid bounds were found for phi node then we mark its result as
293     invalid bounds.  Process stops when all incomplete bounds become either
294     valid or invalid and we are able to choose a pointer base.
295 
296     e) Pointer is loaded from the memory.
297 
298     In this case we just need to load bounds from the bounds table.
299 
300     Example:
301 
302       foo ()
303       {
304         <unnamed type> __bound_tmp.3;
305 	static int * buf;
306 	int * _2;
307 
308 	<bb 2>:
309 	_2 = buf;
310 	__bound_tmp.3_4 = __builtin___chkp_bndldx (&buf, _2);
311 	return _2, __bound_tmp.3_4;
312       }
313 
314 */
315 
316 typedef void (*assign_handler)(tree, tree, void *);
317 
318 static tree chkp_get_zero_bounds ();
319 static tree chkp_find_bounds (tree ptr, gimple_stmt_iterator *iter);
320 static tree chkp_find_bounds_loaded (tree ptr, tree ptr_src,
321 				     gimple_stmt_iterator *iter);
322 static void chkp_parse_array_and_component_ref (tree node, tree *ptr,
323 						tree *elt, bool *safe,
324 						bool *bitfield,
325 						tree *bounds,
326 						gimple_stmt_iterator *iter,
327 						bool innermost_bounds);
328 
329 #define chkp_bndldx_fndecl \
330   (targetm.builtin_chkp_function (BUILT_IN_CHKP_BNDLDX))
331 #define chkp_bndstx_fndecl \
332   (targetm.builtin_chkp_function (BUILT_IN_CHKP_BNDSTX))
333 #define chkp_checkl_fndecl \
334   (targetm.builtin_chkp_function (BUILT_IN_CHKP_BNDCL))
335 #define chkp_checku_fndecl \
336   (targetm.builtin_chkp_function (BUILT_IN_CHKP_BNDCU))
337 #define chkp_bndmk_fndecl \
338   (targetm.builtin_chkp_function (BUILT_IN_CHKP_BNDMK))
339 #define chkp_ret_bnd_fndecl \
340   (targetm.builtin_chkp_function (BUILT_IN_CHKP_BNDRET))
341 #define chkp_intersect_fndecl \
342   (targetm.builtin_chkp_function (BUILT_IN_CHKP_INTERSECT))
343 #define chkp_narrow_bounds_fndecl \
344   (targetm.builtin_chkp_function (BUILT_IN_CHKP_NARROW))
345 #define chkp_sizeof_fndecl \
346   (targetm.builtin_chkp_function (BUILT_IN_CHKP_SIZEOF))
347 #define chkp_extract_lower_fndecl \
348   (targetm.builtin_chkp_function (BUILT_IN_CHKP_EXTRACT_LOWER))
349 #define chkp_extract_upper_fndecl \
350   (targetm.builtin_chkp_function (BUILT_IN_CHKP_EXTRACT_UPPER))
351 
352 static GTY (()) tree chkp_uintptr_type;
353 
354 static GTY (()) tree chkp_zero_bounds_var;
355 static GTY (()) tree chkp_none_bounds_var;
356 
357 static GTY (()) basic_block entry_block;
358 static GTY (()) tree zero_bounds;
359 static GTY (()) tree none_bounds;
360 static GTY (()) tree incomplete_bounds;
361 static GTY (()) tree tmp_var;
362 static GTY (()) tree size_tmp_var;
363 static GTY (()) bitmap chkp_abnormal_copies;
364 
365 struct hash_set<tree> *chkp_invalid_bounds;
366 struct hash_set<tree> *chkp_completed_bounds_set;
367 struct hash_map<tree, tree> *chkp_reg_bounds;
368 struct hash_map<tree, tree> *chkp_bound_vars;
369 struct hash_map<tree, tree> *chkp_reg_addr_bounds;
370 struct hash_map<tree, tree> *chkp_incomplete_bounds_map;
371 struct hash_map<tree, tree> *chkp_bounds_map;
372 struct hash_map<tree, tree> *chkp_static_var_bounds;
373 
374 static bool in_chkp_pass;
375 
376 #define CHKP_BOUND_TMP_NAME "__bound_tmp"
377 #define CHKP_SIZE_TMP_NAME "__size_tmp"
378 #define CHKP_BOUNDS_OF_SYMBOL_PREFIX "__chkp_bounds_of_"
379 #define CHKP_STRING_BOUNDS_PREFIX "__chkp_string_bounds_"
380 #define CHKP_VAR_BOUNDS_PREFIX "__chkp_var_bounds_"
381 #define CHKP_ZERO_BOUNDS_VAR_NAME "__chkp_zero_bounds"
382 #define CHKP_NONE_BOUNDS_VAR_NAME "__chkp_none_bounds"
383 
384 /* Static checker constructors may become very large and their
385    compilation with optimization may take too much time.
386    Therefore we put a limit to number of statements in one
387    constructor.  Tests with 100 000 statically initialized
388    pointers showed following compilation times on Sandy Bridge
389    server (used -O2):
390    limit    100 => ~18 sec.
391    limit    300 => ~22 sec.
392    limit   1000 => ~30 sec.
393    limit   3000 => ~49 sec.
394    limit   5000 => ~55 sec.
395    limit  10000 => ~76 sec.
396    limit 100000 => ~532 sec.  */
397 #define MAX_STMTS_IN_STATIC_CHKP_CTOR (PARAM_VALUE (PARAM_CHKP_MAX_CTOR_SIZE))
398 
399 struct chkp_ctor_stmt_list
400 {
401   tree stmts;
402   int avail;
403 };
404 
405 /* Return 1 if function FNDECL is instrumented by Pointer
406    Bounds Checker.  */
407 bool
408 chkp_function_instrumented_p (tree fndecl)
409 {
410   return fndecl
411     && lookup_attribute ("chkp instrumented", DECL_ATTRIBUTES (fndecl));
412 }
413 
414 /* Mark function FNDECL as instrumented.  */
415 void
416 chkp_function_mark_instrumented (tree fndecl)
417 {
418   if (chkp_function_instrumented_p (fndecl))
419     return;
420 
421   DECL_ATTRIBUTES (fndecl)
422     = tree_cons (get_identifier ("chkp instrumented"), NULL,
423 		 DECL_ATTRIBUTES (fndecl));
424 }
425 
426 /* Return true when STMT is builtin call to instrumentation function
427    corresponding to CODE.  */
428 
429 bool
430 chkp_gimple_call_builtin_p (gimple *call,
431 			    enum built_in_function code)
432 {
433   tree fndecl;
434   if (is_gimple_call (call)
435       && (fndecl = targetm.builtin_chkp_function (code))
436       && gimple_call_fndecl (call) == fndecl)
437     return true;
438   return false;
439 }
440 
441 /* Emit code to build zero bounds and return RTL holding
442    the result.  */
443 rtx
444 chkp_expand_zero_bounds ()
445 {
446   tree zero_bnd;
447 
448   if (flag_chkp_use_static_const_bounds)
449     zero_bnd = chkp_get_zero_bounds_var ();
450   else
451     zero_bnd = chkp_build_make_bounds_call (integer_zero_node,
452 					    integer_zero_node);
453   return expand_normal (zero_bnd);
454 }
455 
456 /* Emit code to store zero bounds for PTR located at MEM.  */
457 void
458 chkp_expand_bounds_reset_for_mem (tree mem, tree ptr)
459 {
460   tree zero_bnd, bnd, addr, bndstx;
461 
462   if (flag_chkp_use_static_const_bounds)
463     zero_bnd = chkp_get_zero_bounds_var ();
464   else
465     zero_bnd = chkp_build_make_bounds_call (integer_zero_node,
466 					    integer_zero_node);
467   bnd = make_tree (pointer_bounds_type_node,
468 		   assign_temp (pointer_bounds_type_node, 0, 1));
469   addr = build1 (ADDR_EXPR,
470 		 build_pointer_type (TREE_TYPE (mem)), mem);
471   bndstx = chkp_build_bndstx_call (addr, ptr, bnd);
472 
473   expand_assignment (bnd, zero_bnd, false);
474   expand_normal (bndstx);
475 }
476 
477 /* Build retbnd call for returned value RETVAL.
478 
479    If BNDVAL is not NULL then result is stored
480    in it.  Otherwise a temporary is created to
481    hold returned value.
482 
483    GSI points to a position for a retbnd call
484    and is set to created stmt.
485 
486    Cgraph edge is created for a new call if
487    UPDATE_EDGE is 1.
488 
489    Obtained bounds are returned.  */
490 tree
491 chkp_insert_retbnd_call (tree bndval, tree retval,
492 			 gimple_stmt_iterator *gsi)
493 {
494   gimple *call;
495 
496   if (!bndval)
497     bndval = create_tmp_reg (pointer_bounds_type_node, "retbnd");
498 
499   call = gimple_build_call (chkp_ret_bnd_fndecl, 1, retval);
500   gimple_call_set_lhs (call, bndval);
501   gsi_insert_after (gsi, call, GSI_CONTINUE_LINKING);
502 
503   return bndval;
504 }
505 
506 /* Build a GIMPLE_CALL identical to CALL but skipping bounds
507    arguments.  */
508 
509 gcall *
510 chkp_copy_call_skip_bounds (gcall *call)
511 {
512   bitmap bounds;
513   unsigned i;
514 
515   bitmap_obstack_initialize (NULL);
516   bounds = BITMAP_ALLOC (NULL);
517 
518   for (i = 0; i < gimple_call_num_args (call); i++)
519     if (POINTER_BOUNDS_P (gimple_call_arg (call, i)))
520       bitmap_set_bit (bounds, i);
521 
522   if (!bitmap_empty_p (bounds))
523     call = gimple_call_copy_skip_args (call, bounds);
524   gimple_call_set_with_bounds (call, false);
525 
526   BITMAP_FREE (bounds);
527   bitmap_obstack_release (NULL);
528 
529   return call;
530 }
531 
532 /* Redirect edge E to the correct node according to call_stmt.
533    Return 1 if bounds removal from call_stmt should be done
534    instead of redirection.  */
535 
536 bool
537 chkp_redirect_edge (cgraph_edge *e)
538 {
539   bool instrumented = false;
540   tree decl = e->callee->decl;
541 
542   if (e->callee->instrumentation_clone
543       || chkp_function_instrumented_p (decl))
544     instrumented = true;
545 
546   if (instrumented
547       && !gimple_call_with_bounds_p (e->call_stmt))
548     e->redirect_callee (cgraph_node::get_create (e->callee->orig_decl));
549   else if (!instrumented
550 	   && gimple_call_with_bounds_p (e->call_stmt)
551 	   && !chkp_gimple_call_builtin_p (e->call_stmt, BUILT_IN_CHKP_BNDCL)
552 	   && !chkp_gimple_call_builtin_p (e->call_stmt, BUILT_IN_CHKP_BNDCU)
553 	   && !chkp_gimple_call_builtin_p (e->call_stmt, BUILT_IN_CHKP_BNDSTX))
554     {
555       if (e->callee->instrumented_version)
556 	e->redirect_callee (e->callee->instrumented_version);
557       else
558 	{
559 	  tree args = TYPE_ARG_TYPES (TREE_TYPE (decl));
560 	  /* Avoid bounds removal if all args will be removed.  */
561 	  if (!args || TREE_VALUE (args) != void_type_node)
562 	    return true;
563 	  else
564 	    gimple_call_set_with_bounds (e->call_stmt, false);
565 	}
566     }
567 
568   return false;
569 }
570 
571 /* Mark statement S to not be instrumented.  */
572 static void
573 chkp_mark_stmt (gimple *s)
574 {
575   gimple_set_plf (s, GF_PLF_1, true);
576 }
577 
578 /* Mark statement S to be instrumented.  */
579 static void
580 chkp_unmark_stmt (gimple *s)
581 {
582   gimple_set_plf (s, GF_PLF_1, false);
583 }
584 
585 /* Return 1 if statement S should not be instrumented.  */
586 static bool
587 chkp_marked_stmt_p (gimple *s)
588 {
589   return gimple_plf (s, GF_PLF_1);
590 }
591 
592 /* Get var to be used for bound temps.  */
593 static tree
594 chkp_get_tmp_var (void)
595 {
596   if (!tmp_var)
597     tmp_var = create_tmp_reg (pointer_bounds_type_node, CHKP_BOUND_TMP_NAME);
598 
599   return tmp_var;
600 }
601 
602 /* Get SSA_NAME to be used as temp.  */
603 static tree
604 chkp_get_tmp_reg (gimple *stmt)
605 {
606   if (in_chkp_pass)
607     return make_ssa_name (chkp_get_tmp_var (), stmt);
608 
609   return make_temp_ssa_name (pointer_bounds_type_node, stmt,
610 			     CHKP_BOUND_TMP_NAME);
611 }
612 
613 /* Get var to be used for size temps.  */
614 static tree
615 chkp_get_size_tmp_var (void)
616 {
617   if (!size_tmp_var)
618     size_tmp_var = create_tmp_reg (chkp_uintptr_type, CHKP_SIZE_TMP_NAME);
619 
620   return size_tmp_var;
621 }
622 
623 /* Register bounds BND for address of OBJ.  */
624 static void
625 chkp_register_addr_bounds (tree obj, tree bnd)
626 {
627   if (bnd == incomplete_bounds)
628     return;
629 
630   chkp_reg_addr_bounds->put (obj, bnd);
631 
632   if (dump_file && (dump_flags & TDF_DETAILS))
633     {
634       fprintf (dump_file, "Regsitered bound ");
635       print_generic_expr (dump_file, bnd, 0);
636       fprintf (dump_file, " for address of ");
637       print_generic_expr (dump_file, obj, 0);
638       fprintf (dump_file, "\n");
639     }
640 }
641 
642 /* Return bounds registered for address of OBJ.  */
643 static tree
644 chkp_get_registered_addr_bounds (tree obj)
645 {
646   tree *slot = chkp_reg_addr_bounds->get (obj);
647   return slot ? *slot : NULL_TREE;
648 }
649 
650 /* Mark BOUNDS as completed.  */
651 static void
652 chkp_mark_completed_bounds (tree bounds)
653 {
654   chkp_completed_bounds_set->add (bounds);
655 
656   if (dump_file && (dump_flags & TDF_DETAILS))
657     {
658       fprintf (dump_file, "Marked bounds ");
659       print_generic_expr (dump_file, bounds, 0);
660       fprintf (dump_file, " as completed\n");
661     }
662 }
663 
664 /* Return 1 if BOUNDS were marked as completed and 0 otherwise.  */
665 static bool
666 chkp_completed_bounds (tree bounds)
667 {
668   return chkp_completed_bounds_set->contains (bounds);
669 }
670 
671 /* Clear comleted bound marks.  */
672 static void
673 chkp_erase_completed_bounds (void)
674 {
675   delete chkp_completed_bounds_set;
676   chkp_completed_bounds_set = new hash_set<tree>;
677 }
678 
679 /* Mark BOUNDS associated with PTR as incomplete.  */
680 static void
681 chkp_register_incomplete_bounds (tree bounds, tree ptr)
682 {
683   chkp_incomplete_bounds_map->put (bounds, ptr);
684 
685   if (dump_file && (dump_flags & TDF_DETAILS))
686     {
687       fprintf (dump_file, "Regsitered incomplete bounds ");
688       print_generic_expr (dump_file, bounds, 0);
689       fprintf (dump_file, " for ");
690       print_generic_expr (dump_file, ptr, 0);
691       fprintf (dump_file, "\n");
692     }
693 }
694 
695 /* Return 1 if BOUNDS are incomplete and 0 otherwise.  */
696 static bool
697 chkp_incomplete_bounds (tree bounds)
698 {
699   if (bounds == incomplete_bounds)
700     return true;
701 
702   if (chkp_completed_bounds (bounds))
703     return false;
704 
705   return chkp_incomplete_bounds_map->get (bounds) != NULL;
706 }
707 
708 /* Clear incomleted bound marks.  */
709 static void
710 chkp_erase_incomplete_bounds (void)
711 {
712   delete chkp_incomplete_bounds_map;
713   chkp_incomplete_bounds_map = new hash_map<tree, tree>;
714 }
715 
716 /* Build and return bndmk call which creates bounds for structure
717    pointed by PTR.  Structure should have complete type.  */
718 tree
719 chkp_make_bounds_for_struct_addr (tree ptr)
720 {
721   tree type = TREE_TYPE (ptr);
722   tree size;
723 
724   gcc_assert (POINTER_TYPE_P (type));
725 
726   size = TYPE_SIZE (TREE_TYPE (type));
727 
728   gcc_assert (size);
729 
730   return build_call_nary (pointer_bounds_type_node,
731 			  build_fold_addr_expr (chkp_bndmk_fndecl),
732 			  2, ptr, size);
733 }
734 
735 /* Traversal function for chkp_may_finish_incomplete_bounds.
736    Set RES to 0 if at least one argument of phi statement
737    defining bounds (passed in KEY arg) is unknown.
738    Traversal stops when first unknown phi argument is found.  */
739 bool
740 chkp_may_complete_phi_bounds (tree const &bounds, tree *slot ATTRIBUTE_UNUSED,
741 			      bool *res)
742 {
743   gimple *phi;
744   unsigned i;
745 
746   gcc_assert (TREE_CODE (bounds) == SSA_NAME);
747 
748   phi = SSA_NAME_DEF_STMT (bounds);
749 
750   gcc_assert (phi && gimple_code (phi) == GIMPLE_PHI);
751 
752   for (i = 0; i < gimple_phi_num_args (phi); i++)
753     {
754       tree phi_arg = gimple_phi_arg_def (phi, i);
755       if (!phi_arg)
756 	{
757 	  *res = false;
758 	  /* Do not need to traverse further.  */
759 	  return false;
760 	}
761     }
762 
763   return true;
764 }
765 
766 /* Return 1 if all phi nodes created for bounds have their
767    arguments computed.  */
768 static bool
769 chkp_may_finish_incomplete_bounds (void)
770 {
771   bool res = true;
772 
773   chkp_incomplete_bounds_map
774     ->traverse<bool *, chkp_may_complete_phi_bounds> (&res);
775 
776   return res;
777 }
778 
779 /* Helper function for chkp_finish_incomplete_bounds.
780    Recompute args for bounds phi node.  */
781 bool
782 chkp_recompute_phi_bounds (tree const &bounds, tree *slot,
783 			   void *res ATTRIBUTE_UNUSED)
784 {
785   tree ptr = *slot;
786   gphi *bounds_phi;
787   gphi *ptr_phi;
788   unsigned i;
789 
790   gcc_assert (TREE_CODE (bounds) == SSA_NAME);
791   gcc_assert (TREE_CODE (ptr) == SSA_NAME);
792 
793   bounds_phi = as_a <gphi *> (SSA_NAME_DEF_STMT (bounds));
794   ptr_phi = as_a <gphi *> (SSA_NAME_DEF_STMT (ptr));
795 
796   for (i = 0; i < gimple_phi_num_args (bounds_phi); i++)
797     {
798       tree ptr_arg = gimple_phi_arg_def (ptr_phi, i);
799       tree bound_arg = chkp_find_bounds (ptr_arg, NULL);
800 
801       add_phi_arg (bounds_phi, bound_arg,
802 		   gimple_phi_arg_edge (ptr_phi, i),
803 		   UNKNOWN_LOCATION);
804     }
805 
806   return true;
807 }
808 
809 /* Mark BOUNDS as invalid.  */
810 static void
811 chkp_mark_invalid_bounds (tree bounds)
812 {
813   chkp_invalid_bounds->add (bounds);
814 
815   if (dump_file && (dump_flags & TDF_DETAILS))
816     {
817       fprintf (dump_file, "Marked bounds ");
818       print_generic_expr (dump_file, bounds, 0);
819       fprintf (dump_file, " as invalid\n");
820     }
821 }
822 
823 /* Return 1 if BOUNDS were marked as invalid and 0 otherwise.  */
824 static bool
825 chkp_valid_bounds (tree bounds)
826 {
827   if (bounds == zero_bounds || bounds == none_bounds)
828     return false;
829 
830   return !chkp_invalid_bounds->contains (bounds);
831 }
832 
833 /* Helper function for chkp_finish_incomplete_bounds.
834    Check all arguments of phi nodes trying to find
835    valid completed bounds.  If there is at least one
836    such arg then bounds produced by phi node are marked
837    as valid completed bounds and all phi args are
838    recomputed.  */
839 bool
840 chkp_find_valid_phi_bounds (tree const &bounds, tree *slot, bool *res)
841 {
842   gimple *phi;
843   unsigned i;
844 
845   gcc_assert (TREE_CODE (bounds) == SSA_NAME);
846 
847   if (chkp_completed_bounds (bounds))
848     return true;
849 
850   phi = SSA_NAME_DEF_STMT (bounds);
851 
852   gcc_assert (phi && gimple_code (phi) == GIMPLE_PHI);
853 
854   for (i = 0; i < gimple_phi_num_args (phi); i++)
855     {
856       tree phi_arg = gimple_phi_arg_def (phi, i);
857 
858       gcc_assert (phi_arg);
859 
860       if (chkp_valid_bounds (phi_arg) && !chkp_incomplete_bounds (phi_arg))
861 	{
862 	  *res = true;
863 	  chkp_mark_completed_bounds (bounds);
864 	  chkp_recompute_phi_bounds (bounds, slot, NULL);
865 	  return true;
866 	}
867     }
868 
869   return true;
870 }
871 
872 /* Helper function for chkp_finish_incomplete_bounds.
873    Marks all incompleted bounds as invalid.  */
874 bool
875 chkp_mark_invalid_bounds_walker (tree const &bounds,
876 				 tree *slot ATTRIBUTE_UNUSED,
877 				 void *res ATTRIBUTE_UNUSED)
878 {
879   if (!chkp_completed_bounds (bounds))
880     {
881       chkp_mark_invalid_bounds (bounds);
882       chkp_mark_completed_bounds (bounds);
883     }
884   return true;
885 }
886 
887 /* When all bound phi nodes have all their args computed
888    we have enough info to find valid bounds.  We iterate
889    through all incompleted bounds searching for valid
890    bounds.  Found valid bounds are marked as completed
891    and all remaining incompleted bounds are recomputed.
892    Process continues until no new valid bounds may be
893    found.  All remained incompleted bounds are marked as
894    invalid (i.e. have no valid source of bounds).  */
895 static void
896 chkp_finish_incomplete_bounds (void)
897 {
898   bool found_valid = true;
899 
900   while (found_valid)
901     {
902       found_valid = false;
903 
904       chkp_incomplete_bounds_map->
905 	traverse<bool *, chkp_find_valid_phi_bounds> (&found_valid);
906 
907       if (found_valid)
908 	chkp_incomplete_bounds_map->
909 	  traverse<void *, chkp_recompute_phi_bounds> (NULL);
910     }
911 
912   chkp_incomplete_bounds_map->
913     traverse<void *, chkp_mark_invalid_bounds_walker> (NULL);
914   chkp_incomplete_bounds_map->
915     traverse<void *, chkp_recompute_phi_bounds> (NULL);
916 
917   chkp_erase_completed_bounds ();
918   chkp_erase_incomplete_bounds ();
919 }
920 
921 /* Return 1 if type TYPE is a pointer type or a
922    structure having a pointer type as one of its fields.
923    Otherwise return 0.  */
924 bool
925 chkp_type_has_pointer (const_tree type)
926 {
927   bool res = false;
928 
929   if (BOUNDED_TYPE_P (type))
930     res = true;
931   else if (RECORD_OR_UNION_TYPE_P (type))
932     {
933       tree field;
934 
935       for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
936 	if (TREE_CODE (field) == FIELD_DECL)
937 	  res = res || chkp_type_has_pointer (TREE_TYPE (field));
938     }
939   else if (TREE_CODE (type) == ARRAY_TYPE)
940     res = chkp_type_has_pointer (TREE_TYPE (type));
941 
942   return res;
943 }
944 
945 unsigned
946 chkp_type_bounds_count (const_tree type)
947 {
948   unsigned res = 0;
949 
950   if (!type)
951     res = 0;
952   else if (BOUNDED_TYPE_P (type))
953     res = 1;
954   else if (RECORD_OR_UNION_TYPE_P (type))
955     {
956       bitmap have_bound;
957 
958       bitmap_obstack_initialize (NULL);
959       have_bound = BITMAP_ALLOC (NULL);
960       chkp_find_bound_slots (type, have_bound);
961       res = bitmap_count_bits (have_bound);
962       BITMAP_FREE (have_bound);
963       bitmap_obstack_release (NULL);
964     }
965 
966   return res;
967 }
968 
969 /* Get bounds associated with NODE via
970    chkp_set_bounds call.  */
971 tree
972 chkp_get_bounds (tree node)
973 {
974   tree *slot;
975 
976   if (!chkp_bounds_map)
977     return NULL_TREE;
978 
979   slot = chkp_bounds_map->get (node);
980   return slot ? *slot : NULL_TREE;
981 }
982 
983 /* Associate bounds VAL with NODE.  */
984 void
985 chkp_set_bounds (tree node, tree val)
986 {
987   if (!chkp_bounds_map)
988     chkp_bounds_map = new hash_map<tree, tree>;
989 
990   chkp_bounds_map->put (node, val);
991 }
992 
993 /* Check if statically initialized variable VAR require
994    static bounds initialization.  If VAR is added into
995    bounds initlization list then 1 is returned. Otherwise
996    return 0.  */
997 extern bool
998 chkp_register_var_initializer (tree var)
999 {
1000   if (!flag_check_pointer_bounds
1001       || DECL_INITIAL (var) == error_mark_node)
1002     return false;
1003 
1004   gcc_assert (TREE_CODE (var) == VAR_DECL);
1005   gcc_assert (DECL_INITIAL (var));
1006 
1007   if (TREE_STATIC (var)
1008       && chkp_type_has_pointer (TREE_TYPE (var)))
1009     {
1010       varpool_node::get_create (var)->need_bounds_init = 1;
1011       return true;
1012     }
1013 
1014   return false;
1015 }
1016 
1017 /* Helper function for chkp_finish_file.
1018 
1019    Add new modification statement (RHS is assigned to LHS)
1020    into list of static initializer statementes (passed in ARG).
1021    If statements list becomes too big, emit checker constructor
1022    and start the new one.  */
1023 static void
1024 chkp_add_modification_to_stmt_list (tree lhs,
1025 				    tree rhs,
1026 				    void *arg)
1027 {
1028   struct chkp_ctor_stmt_list *stmts = (struct chkp_ctor_stmt_list *)arg;
1029   tree modify;
1030 
1031   if (!useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (rhs)))
1032     rhs = build1 (CONVERT_EXPR, TREE_TYPE (lhs), rhs);
1033 
1034   modify = build2 (MODIFY_EXPR, TREE_TYPE (lhs), lhs, rhs);
1035   append_to_statement_list (modify, &stmts->stmts);
1036 
1037   stmts->avail--;
1038 }
1039 
1040 /* Build and return ADDR_EXPR for specified object OBJ.  */
1041 static tree
1042 chkp_build_addr_expr (tree obj)
1043 {
1044   return TREE_CODE (obj) == TARGET_MEM_REF
1045     ? tree_mem_ref_addr (ptr_type_node, obj)
1046     : build_fold_addr_expr (obj);
1047 }
1048 
1049 /* Helper function for chkp_finish_file.
1050    Initialize bound variable BND_VAR with bounds of variable
1051    VAR to statements list STMTS.  If statements list becomes
1052    too big, emit checker constructor and start the new one.  */
1053 static void
1054 chkp_output_static_bounds (tree bnd_var, tree var,
1055 			   struct chkp_ctor_stmt_list *stmts)
1056 {
1057   tree lb, ub, size;
1058 
1059   if (TREE_CODE (var) == STRING_CST)
1060     {
1061       lb = build1 (CONVERT_EXPR, size_type_node, chkp_build_addr_expr (var));
1062       size = build_int_cst (size_type_node, TREE_STRING_LENGTH (var) - 1);
1063     }
1064   else if (DECL_SIZE (var)
1065 	   && !chkp_variable_size_type (TREE_TYPE (var)))
1066     {
1067       /* Compute bounds using statically known size.  */
1068       lb = build1 (CONVERT_EXPR, size_type_node, chkp_build_addr_expr (var));
1069       size = size_binop (MINUS_EXPR, DECL_SIZE_UNIT (var), size_one_node);
1070     }
1071   else
1072     {
1073       /* Compute bounds using dynamic size.  */
1074       tree call;
1075 
1076       lb = build1 (CONVERT_EXPR, size_type_node, chkp_build_addr_expr (var));
1077       call = build1 (ADDR_EXPR,
1078 		     build_pointer_type (TREE_TYPE (chkp_sizeof_fndecl)),
1079 		     chkp_sizeof_fndecl);
1080       size = build_call_nary (TREE_TYPE (TREE_TYPE (chkp_sizeof_fndecl)),
1081 			      call, 1, var);
1082 
1083       if (flag_chkp_zero_dynamic_size_as_infinite)
1084 	{
1085 	  tree max_size, cond;
1086 
1087 	  max_size = build2 (MINUS_EXPR, size_type_node, size_zero_node, lb);
1088 	  cond = build2 (NE_EXPR, boolean_type_node, size, size_zero_node);
1089 	  size = build3 (COND_EXPR, size_type_node, cond, size, max_size);
1090 	}
1091 
1092       size = size_binop (MINUS_EXPR, size, size_one_node);
1093     }
1094 
1095   ub = size_binop (PLUS_EXPR, lb, size);
1096   stmts->avail -= targetm.chkp_initialize_bounds (bnd_var, lb, ub,
1097 						  &stmts->stmts);
1098   if (stmts->avail <= 0)
1099     {
1100       cgraph_build_static_cdtor ('B', stmts->stmts,
1101 				 MAX_RESERVED_INIT_PRIORITY + 2);
1102       stmts->avail = MAX_STMTS_IN_STATIC_CHKP_CTOR;
1103       stmts->stmts = NULL;
1104     }
1105 }
1106 
1107 /* Return entry block to be used for checker initilization code.
1108    Create new block if required.  */
1109 static basic_block
1110 chkp_get_entry_block (void)
1111 {
1112   if (!entry_block)
1113     entry_block
1114       = split_block_after_labels (ENTRY_BLOCK_PTR_FOR_FN (cfun))->dest;
1115 
1116   return entry_block;
1117 }
1118 
1119 /* Return a bounds var to be used for pointer var PTR_VAR.  */
1120 static tree
1121 chkp_get_bounds_var (tree ptr_var)
1122 {
1123   tree bnd_var;
1124   tree *slot;
1125 
1126   slot = chkp_bound_vars->get (ptr_var);
1127   if (slot)
1128     bnd_var = *slot;
1129   else
1130     {
1131       bnd_var = create_tmp_reg (pointer_bounds_type_node,
1132 				CHKP_BOUND_TMP_NAME);
1133       chkp_bound_vars->put (ptr_var, bnd_var);
1134     }
1135 
1136   return bnd_var;
1137 }
1138 
1139 /* If BND is an abnormal bounds copy, return a copied value.
1140    Otherwise return BND.  */
1141 static tree
1142 chkp_get_orginal_bounds_for_abnormal_copy (tree bnd)
1143 {
1144   if (bitmap_bit_p (chkp_abnormal_copies, SSA_NAME_VERSION (bnd)))
1145     {
1146       gimple *bnd_def = SSA_NAME_DEF_STMT (bnd);
1147       gcc_checking_assert (gimple_code (bnd_def) == GIMPLE_ASSIGN);
1148       bnd = gimple_assign_rhs1 (bnd_def);
1149     }
1150 
1151   return bnd;
1152 }
1153 
1154 /* Register bounds BND for object PTR in global bounds table.
1155    A copy of bounds may be created for abnormal ssa names.
1156    Returns bounds to use for PTR.  */
1157 static tree
1158 chkp_maybe_copy_and_register_bounds (tree ptr, tree bnd)
1159 {
1160   bool abnormal_ptr;
1161 
1162   if (!chkp_reg_bounds)
1163     return bnd;
1164 
1165   /* Do nothing if bounds are incomplete_bounds
1166      because it means bounds will be recomputed.  */
1167   if (bnd == incomplete_bounds)
1168     return bnd;
1169 
1170   abnormal_ptr = (TREE_CODE (ptr) == SSA_NAME
1171 		  && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ptr)
1172 		  && gimple_code (SSA_NAME_DEF_STMT (ptr)) != GIMPLE_PHI);
1173 
1174   /* A single bounds value may be reused multiple times for
1175      different pointer values.  It may cause coalescing issues
1176      for abnormal SSA names.  To avoid it we create a bounds
1177      copy in case it is computed for abnormal SSA name.
1178 
1179      We also cannot reuse such created copies for other pointers  */
1180   if (abnormal_ptr
1181       || bitmap_bit_p (chkp_abnormal_copies, SSA_NAME_VERSION (bnd)))
1182     {
1183       tree bnd_var = NULL_TREE;
1184 
1185       if (abnormal_ptr)
1186 	{
1187 	  if (SSA_NAME_VAR (ptr))
1188 	    bnd_var = chkp_get_bounds_var (SSA_NAME_VAR (ptr));
1189 	}
1190       else
1191 	bnd_var = chkp_get_tmp_var ();
1192 
1193       /* For abnormal copies we may just find original
1194 	 bounds and use them.  */
1195       if (!abnormal_ptr && !SSA_NAME_IS_DEFAULT_DEF (bnd))
1196 	bnd = chkp_get_orginal_bounds_for_abnormal_copy (bnd);
1197       /* For undefined values we usually use none bounds
1198 	 value but in case of abnormal edge it may cause
1199 	 coalescing failures.  Use default definition of
1200 	 bounds variable instead to avoid it.  */
1201       else if (SSA_NAME_IS_DEFAULT_DEF (ptr)
1202 	       && TREE_CODE (SSA_NAME_VAR (ptr)) != PARM_DECL)
1203 	{
1204 	  bnd = get_or_create_ssa_default_def (cfun, bnd_var);
1205 
1206 	  if (dump_file && (dump_flags & TDF_DETAILS))
1207 	    {
1208 	      fprintf (dump_file, "Using default def bounds ");
1209 	      print_generic_expr (dump_file, bnd, 0);
1210 	      fprintf (dump_file, " for abnormal default def SSA name ");
1211 	      print_generic_expr (dump_file, ptr, 0);
1212 	      fprintf (dump_file, "\n");
1213 	    }
1214 	}
1215       else
1216 	{
1217 	  tree copy;
1218 	  gimple *def = SSA_NAME_DEF_STMT (ptr);
1219 	  gimple *assign;
1220 	  gimple_stmt_iterator gsi;
1221 
1222 	  if (bnd_var)
1223 	    copy = make_ssa_name (bnd_var);
1224 	  else
1225 	    copy = make_temp_ssa_name (pointer_bounds_type_node,
1226 				       NULL,
1227 				       CHKP_BOUND_TMP_NAME);
1228 	  bnd = chkp_get_orginal_bounds_for_abnormal_copy (bnd);
1229 	  assign = gimple_build_assign (copy, bnd);
1230 
1231 	  if (dump_file && (dump_flags & TDF_DETAILS))
1232 	    {
1233 	      fprintf (dump_file, "Creating a copy of bounds ");
1234 	      print_generic_expr (dump_file, bnd, 0);
1235 	      fprintf (dump_file, " for abnormal SSA name ");
1236 	      print_generic_expr (dump_file, ptr, 0);
1237 	      fprintf (dump_file, "\n");
1238 	    }
1239 
1240 	  if (gimple_code (def) == GIMPLE_NOP)
1241 	    {
1242 	      gsi = gsi_last_bb (chkp_get_entry_block ());
1243 	      if (!gsi_end_p (gsi) && is_ctrl_stmt (gsi_stmt (gsi)))
1244 		gsi_insert_before (&gsi, assign, GSI_CONTINUE_LINKING);
1245 	      else
1246 		gsi_insert_after (&gsi, assign, GSI_CONTINUE_LINKING);
1247 	    }
1248 	  else
1249 	    {
1250 	      gimple *bnd_def = SSA_NAME_DEF_STMT (bnd);
1251 	      /* Sometimes (e.g. when we load a pointer from a
1252 		 memory) bounds are produced later than a pointer.
1253 		 We need to insert bounds copy appropriately.  */
1254 	      if (gimple_code (bnd_def) != GIMPLE_NOP
1255 		  && stmt_dominates_stmt_p (def, bnd_def))
1256 		gsi = gsi_for_stmt (bnd_def);
1257 	      else
1258 		gsi = gsi_for_stmt (def);
1259 	      gsi_insert_after (&gsi, assign, GSI_CONTINUE_LINKING);
1260 	    }
1261 
1262 	  bnd = copy;
1263 	}
1264 
1265       if (abnormal_ptr)
1266 	bitmap_set_bit (chkp_abnormal_copies, SSA_NAME_VERSION (bnd));
1267     }
1268 
1269   chkp_reg_bounds->put (ptr, bnd);
1270 
1271   if (dump_file && (dump_flags & TDF_DETAILS))
1272     {
1273       fprintf (dump_file, "Regsitered bound ");
1274       print_generic_expr (dump_file, bnd, 0);
1275       fprintf (dump_file, " for pointer ");
1276       print_generic_expr (dump_file, ptr, 0);
1277       fprintf (dump_file, "\n");
1278     }
1279 
1280   return bnd;
1281 }
1282 
1283 /* Get bounds registered for object PTR in global bounds table.  */
1284 static tree
1285 chkp_get_registered_bounds (tree ptr)
1286 {
1287   tree *slot;
1288 
1289   if (!chkp_reg_bounds)
1290     return NULL_TREE;
1291 
1292   slot = chkp_reg_bounds->get (ptr);
1293   return slot ? *slot : NULL_TREE;
1294 }
1295 
1296 /* Add bound retvals to return statement pointed by GSI.  */
1297 
1298 static void
1299 chkp_add_bounds_to_ret_stmt (gimple_stmt_iterator *gsi)
1300 {
1301   greturn *ret = as_a <greturn *> (gsi_stmt (*gsi));
1302   tree retval = gimple_return_retval (ret);
1303   tree ret_decl = DECL_RESULT (cfun->decl);
1304   tree bounds;
1305 
1306   if (!retval)
1307     return;
1308 
1309   if (BOUNDED_P (ret_decl))
1310     {
1311       bounds = chkp_find_bounds (retval, gsi);
1312       bounds = chkp_maybe_copy_and_register_bounds (ret_decl, bounds);
1313       gimple_return_set_retbnd (ret, bounds);
1314     }
1315 
1316   update_stmt (ret);
1317 }
1318 
1319 /* Force OP to be suitable for using as an argument for call.
1320    New statements (if any) go to SEQ.  */
1321 static tree
1322 chkp_force_gimple_call_op (tree op, gimple_seq *seq)
1323 {
1324   gimple_seq stmts;
1325   gimple_stmt_iterator si;
1326 
1327   op = force_gimple_operand (unshare_expr (op), &stmts, true, NULL_TREE);
1328 
1329   for (si = gsi_start (stmts); !gsi_end_p (si); gsi_next (&si))
1330     chkp_mark_stmt (gsi_stmt (si));
1331 
1332   gimple_seq_add_seq (seq, stmts);
1333 
1334   return op;
1335 }
1336 
1337 /* Generate lower bound check for memory access by ADDR.
1338    Check is inserted before the position pointed by ITER.
1339    DIRFLAG indicates whether memory access is load or store.  */
1340 static void
1341 chkp_check_lower (tree addr, tree bounds,
1342 		  gimple_stmt_iterator iter,
1343 		  location_t location,
1344 		  tree dirflag)
1345 {
1346   gimple_seq seq;
1347   gimple *check;
1348   tree node;
1349 
1350   if (!chkp_function_instrumented_p (current_function_decl)
1351       && bounds == chkp_get_zero_bounds ())
1352     return;
1353 
1354   if (dirflag == integer_zero_node
1355       && !flag_chkp_check_read)
1356     return;
1357 
1358   if (dirflag == integer_one_node
1359       && !flag_chkp_check_write)
1360     return;
1361 
1362   seq = NULL;
1363 
1364   node = chkp_force_gimple_call_op (addr, &seq);
1365 
1366   check = gimple_build_call (chkp_checkl_fndecl, 2, node, bounds);
1367   chkp_mark_stmt (check);
1368   gimple_call_set_with_bounds (check, true);
1369   gimple_set_location (check, location);
1370   gimple_seq_add_stmt (&seq, check);
1371 
1372   gsi_insert_seq_before (&iter, seq, GSI_SAME_STMT);
1373 
1374   if (dump_file && (dump_flags & TDF_DETAILS))
1375     {
1376       gimple *before = gsi_stmt (iter);
1377       fprintf (dump_file, "Generated lower bound check for statement ");
1378       print_gimple_stmt (dump_file, before, 0, TDF_VOPS|TDF_MEMSYMS);
1379       fprintf (dump_file, "  ");
1380       print_gimple_stmt (dump_file, check, 0, TDF_VOPS|TDF_MEMSYMS);
1381     }
1382 }
1383 
1384 /* Generate upper bound check for memory access by ADDR.
1385    Check is inserted before the position pointed by ITER.
1386    DIRFLAG indicates whether memory access is load or store.  */
1387 static void
1388 chkp_check_upper (tree addr, tree bounds,
1389 		  gimple_stmt_iterator iter,
1390 		  location_t location,
1391 		  tree dirflag)
1392 {
1393   gimple_seq seq;
1394   gimple *check;
1395   tree node;
1396 
1397   if (!chkp_function_instrumented_p (current_function_decl)
1398       && bounds == chkp_get_zero_bounds ())
1399     return;
1400 
1401   if (dirflag == integer_zero_node
1402       && !flag_chkp_check_read)
1403     return;
1404 
1405   if (dirflag == integer_one_node
1406       && !flag_chkp_check_write)
1407     return;
1408 
1409   seq = NULL;
1410 
1411   node = chkp_force_gimple_call_op (addr, &seq);
1412 
1413   check = gimple_build_call (chkp_checku_fndecl, 2, node, bounds);
1414   chkp_mark_stmt (check);
1415   gimple_call_set_with_bounds (check, true);
1416   gimple_set_location (check, location);
1417   gimple_seq_add_stmt (&seq, check);
1418 
1419   gsi_insert_seq_before (&iter, seq, GSI_SAME_STMT);
1420 
1421   if (dump_file && (dump_flags & TDF_DETAILS))
1422     {
1423       gimple *before = gsi_stmt (iter);
1424       fprintf (dump_file, "Generated upper bound check for statement ");
1425       print_gimple_stmt (dump_file, before, 0, TDF_VOPS|TDF_MEMSYMS);
1426       fprintf (dump_file, "  ");
1427       print_gimple_stmt (dump_file, check, 0, TDF_VOPS|TDF_MEMSYMS);
1428     }
1429 }
1430 
1431 /* Generate lower and upper bound checks for memory access
1432    to memory slot [FIRST, LAST] againsr BOUNDS.  Checks
1433    are inserted before the position pointed by ITER.
1434    DIRFLAG indicates whether memory access is load or store.  */
1435 void
1436 chkp_check_mem_access (tree first, tree last, tree bounds,
1437 		       gimple_stmt_iterator iter,
1438 		       location_t location,
1439 		       tree dirflag)
1440 {
1441   chkp_check_lower (first, bounds, iter, location, dirflag);
1442   chkp_check_upper (last, bounds, iter, location, dirflag);
1443 }
1444 
1445 /* Replace call to _bnd_chk_* pointed by GSI with
1446    bndcu and bndcl calls.  DIRFLAG determines whether
1447    check is for read or write.  */
1448 
1449 void
1450 chkp_replace_address_check_builtin (gimple_stmt_iterator *gsi,
1451 				    tree dirflag)
1452 {
1453   gimple_stmt_iterator call_iter = *gsi;
1454   gimple *call = gsi_stmt (*gsi);
1455   tree fndecl = gimple_call_fndecl (call);
1456   tree addr = gimple_call_arg (call, 0);
1457   tree bounds = chkp_find_bounds (addr, gsi);
1458 
1459   if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_CHECK_PTR_LBOUNDS
1460       || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_CHECK_PTR_BOUNDS)
1461     chkp_check_lower (addr, bounds, *gsi, gimple_location (call), dirflag);
1462 
1463   if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_CHECK_PTR_UBOUNDS)
1464     chkp_check_upper (addr, bounds, *gsi, gimple_location (call), dirflag);
1465 
1466   if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_CHECK_PTR_BOUNDS)
1467     {
1468       tree size = gimple_call_arg (call, 1);
1469       addr = fold_build_pointer_plus (addr, size);
1470       addr = fold_build_pointer_plus_hwi (addr, -1);
1471       chkp_check_upper (addr, bounds, *gsi, gimple_location (call), dirflag);
1472     }
1473 
1474   gsi_remove (&call_iter, true);
1475 }
1476 
1477 /* Replace call to _bnd_get_ptr_* pointed by GSI with
1478    corresponding bounds extract call.  */
1479 
1480 void
1481 chkp_replace_extract_builtin (gimple_stmt_iterator *gsi)
1482 {
1483   gimple *call = gsi_stmt (*gsi);
1484   tree fndecl = gimple_call_fndecl (call);
1485   tree addr = gimple_call_arg (call, 0);
1486   tree bounds = chkp_find_bounds (addr, gsi);
1487   gimple *extract;
1488 
1489   if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_GET_PTR_LBOUND)
1490     fndecl = chkp_extract_lower_fndecl;
1491   else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_GET_PTR_UBOUND)
1492     fndecl = chkp_extract_upper_fndecl;
1493   else
1494     gcc_unreachable ();
1495 
1496   extract = gimple_build_call (fndecl, 1, bounds);
1497   gimple_call_set_lhs (extract, gimple_call_lhs (call));
1498   chkp_mark_stmt (extract);
1499 
1500   gsi_replace (gsi, extract, false);
1501 }
1502 
1503 /* Return COMPONENT_REF accessing FIELD in OBJ.  */
1504 static tree
1505 chkp_build_component_ref (tree obj, tree field)
1506 {
1507   tree res;
1508 
1509   /* If object is TMR then we do not use component_ref but
1510      add offset instead.  We need it to be able to get addr
1511      of the reasult later.  */
1512   if (TREE_CODE (obj) == TARGET_MEM_REF)
1513     {
1514       tree offs = TMR_OFFSET (obj);
1515       offs = fold_binary_to_constant (PLUS_EXPR, TREE_TYPE (offs),
1516 				      offs, DECL_FIELD_OFFSET (field));
1517 
1518       gcc_assert (offs);
1519 
1520       res = copy_node (obj);
1521       TREE_TYPE (res) = TREE_TYPE (field);
1522       TMR_OFFSET (res) = offs;
1523     }
1524   else
1525     res = build3 (COMPONENT_REF, TREE_TYPE (field), obj, field, NULL_TREE);
1526 
1527   return res;
1528 }
1529 
1530 /* Return ARRAY_REF for array ARR and index IDX with
1531    specified element type ETYPE and element size ESIZE.  */
1532 static tree
1533 chkp_build_array_ref (tree arr, tree etype, tree esize,
1534 		      unsigned HOST_WIDE_INT idx)
1535 {
1536   tree index = build_int_cst (size_type_node, idx);
1537   tree res;
1538 
1539   /* If object is TMR then we do not use array_ref but
1540      add offset instead.  We need it to be able to get addr
1541      of the reasult later.  */
1542   if (TREE_CODE (arr) == TARGET_MEM_REF)
1543     {
1544       tree offs = TMR_OFFSET (arr);
1545 
1546       esize = fold_binary_to_constant (MULT_EXPR, TREE_TYPE (esize),
1547 				     esize, index);
1548       gcc_assert(esize);
1549 
1550       offs = fold_binary_to_constant (PLUS_EXPR, TREE_TYPE (offs),
1551 				    offs, esize);
1552       gcc_assert (offs);
1553 
1554       res = copy_node (arr);
1555       TREE_TYPE (res) = etype;
1556       TMR_OFFSET (res) = offs;
1557     }
1558   else
1559     res = build4 (ARRAY_REF, etype, arr, index, NULL_TREE, NULL_TREE);
1560 
1561   return res;
1562 }
1563 
1564 /* Helper function for chkp_add_bounds_to_call_stmt.
1565    Fill ALL_BOUNDS output array with created bounds.
1566 
1567    OFFS is used for recursive calls and holds basic
1568    offset of TYPE in outer structure in bits.
1569 
1570    ITER points a position where bounds are searched.
1571 
1572    ALL_BOUNDS[i] is filled with elem bounds if there
1573    is a field in TYPE which has pointer type and offset
1574    equal to i * POINTER_SIZE in bits.  */
1575 static void
1576 chkp_find_bounds_for_elem (tree elem, tree *all_bounds,
1577 			   HOST_WIDE_INT offs,
1578 			   gimple_stmt_iterator *iter)
1579 {
1580   tree type = TREE_TYPE (elem);
1581 
1582   if (BOUNDED_TYPE_P (type))
1583     {
1584       if (!all_bounds[offs / POINTER_SIZE])
1585 	{
1586 	  tree temp = make_temp_ssa_name (type, NULL, "");
1587 	  gimple *assign = gimple_build_assign (temp, elem);
1588 	  gimple_stmt_iterator gsi;
1589 
1590 	  gsi_insert_before (iter, assign, GSI_SAME_STMT);
1591 	  gsi = gsi_for_stmt (assign);
1592 
1593 	  all_bounds[offs / POINTER_SIZE] = chkp_find_bounds (temp, &gsi);
1594 	}
1595     }
1596   else if (RECORD_OR_UNION_TYPE_P (type))
1597     {
1598       tree field;
1599 
1600       for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
1601 	if (TREE_CODE (field) == FIELD_DECL)
1602 	  {
1603 	    tree base = unshare_expr (elem);
1604 	    tree field_ref = chkp_build_component_ref (base, field);
1605 	    HOST_WIDE_INT field_offs
1606 	      = TREE_INT_CST_LOW (DECL_FIELD_BIT_OFFSET (field));
1607 	    if (DECL_FIELD_OFFSET (field))
1608 	      field_offs += TREE_INT_CST_LOW (DECL_FIELD_OFFSET (field)) * 8;
1609 
1610 	    chkp_find_bounds_for_elem (field_ref, all_bounds,
1611 				       offs + field_offs, iter);
1612 	  }
1613     }
1614   else if (TREE_CODE (type) == ARRAY_TYPE)
1615     {
1616       tree maxval = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
1617       tree etype = TREE_TYPE (type);
1618       HOST_WIDE_INT esize = TREE_INT_CST_LOW (TYPE_SIZE (etype));
1619       unsigned HOST_WIDE_INT cur;
1620 
1621       if (!maxval || integer_minus_onep (maxval))
1622 	return;
1623 
1624       for (cur = 0; cur <= TREE_INT_CST_LOW (maxval); cur++)
1625 	{
1626 	  tree base = unshare_expr (elem);
1627 	  tree arr_elem = chkp_build_array_ref (base, etype,
1628 						TYPE_SIZE (etype),
1629 						cur);
1630 	  chkp_find_bounds_for_elem (arr_elem, all_bounds, offs + cur * esize,
1631 				     iter);
1632 	}
1633     }
1634 }
1635 
1636 /* Fill HAVE_BOUND output bitmap with information about
1637    bounds requred for object of type TYPE.
1638 
1639    OFFS is used for recursive calls and holds basic
1640    offset of TYPE in outer structure in bits.
1641 
1642    HAVE_BOUND[i] is set to 1 if there is a field
1643    in TYPE which has pointer type and offset
1644    equal to i * POINTER_SIZE - OFFS in bits.  */
1645 void
1646 chkp_find_bound_slots_1 (const_tree type, bitmap have_bound,
1647 			 HOST_WIDE_INT offs)
1648 {
1649   if (BOUNDED_TYPE_P (type))
1650     bitmap_set_bit (have_bound, offs / POINTER_SIZE);
1651   else if (RECORD_OR_UNION_TYPE_P (type))
1652     {
1653       tree field;
1654 
1655       for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
1656 	if (TREE_CODE (field) == FIELD_DECL)
1657 	  {
1658 	    HOST_WIDE_INT field_offs = 0;
1659 	    if (DECL_FIELD_BIT_OFFSET (field))
1660 	      field_offs += TREE_INT_CST_LOW (DECL_FIELD_BIT_OFFSET (field));
1661 	    if (DECL_FIELD_OFFSET (field))
1662 	      field_offs += TREE_INT_CST_LOW (DECL_FIELD_OFFSET (field)) * 8;
1663 	    chkp_find_bound_slots_1 (TREE_TYPE (field), have_bound,
1664 				     offs + field_offs);
1665 	  }
1666     }
1667   else if (TREE_CODE (type) == ARRAY_TYPE && TYPE_DOMAIN (type))
1668     {
1669       /* The object type is an array of complete type, i.e., other
1670 	 than a flexible array.  */
1671       tree maxval = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
1672       tree etype = TREE_TYPE (type);
1673       HOST_WIDE_INT esize = TREE_INT_CST_LOW (TYPE_SIZE (etype));
1674       unsigned HOST_WIDE_INT cur;
1675 
1676       if (!maxval
1677 	  || TREE_CODE (maxval) != INTEGER_CST
1678 	  || integer_minus_onep (maxval))
1679 	return;
1680 
1681       for (cur = 0; cur <= TREE_INT_CST_LOW (maxval); cur++)
1682 	chkp_find_bound_slots_1 (etype, have_bound, offs + cur * esize);
1683     }
1684 }
1685 
1686 /* Fill bitmap RES with information about bounds for
1687    type TYPE.  See chkp_find_bound_slots_1 for more
1688    details.  */
1689 void
1690 chkp_find_bound_slots (const_tree type, bitmap res)
1691 {
1692   bitmap_clear (res);
1693   chkp_find_bound_slots_1 (type, res, 0);
1694 }
1695 
1696 /* Return 1 if call to FNDECL should be instrumented
1697    and 0 otherwise.  */
1698 
1699 static bool
1700 chkp_instrument_normal_builtin (tree fndecl)
1701 {
1702   switch (DECL_FUNCTION_CODE (fndecl))
1703     {
1704     case BUILT_IN_STRLEN:
1705     case BUILT_IN_STRCPY:
1706     case BUILT_IN_STRNCPY:
1707     case BUILT_IN_STPCPY:
1708     case BUILT_IN_STPNCPY:
1709     case BUILT_IN_STRCAT:
1710     case BUILT_IN_STRNCAT:
1711     case BUILT_IN_MEMCPY:
1712     case BUILT_IN_MEMPCPY:
1713     case BUILT_IN_MEMSET:
1714     case BUILT_IN_MEMMOVE:
1715     case BUILT_IN_BZERO:
1716     case BUILT_IN_STRCMP:
1717     case BUILT_IN_STRNCMP:
1718     case BUILT_IN_BCMP:
1719     case BUILT_IN_MEMCMP:
1720     case BUILT_IN_MEMCPY_CHK:
1721     case BUILT_IN_MEMPCPY_CHK:
1722     case BUILT_IN_MEMMOVE_CHK:
1723     case BUILT_IN_MEMSET_CHK:
1724     case BUILT_IN_STRCPY_CHK:
1725     case BUILT_IN_STRNCPY_CHK:
1726     case BUILT_IN_STPCPY_CHK:
1727     case BUILT_IN_STPNCPY_CHK:
1728     case BUILT_IN_STRCAT_CHK:
1729     case BUILT_IN_STRNCAT_CHK:
1730     case BUILT_IN_MALLOC:
1731     case BUILT_IN_CALLOC:
1732     case BUILT_IN_REALLOC:
1733       return 1;
1734 
1735     default:
1736       return 0;
1737     }
1738 }
1739 
1740 /* Add bound arguments to call statement pointed by GSI.
1741    Also performs a replacement of user checker builtins calls
1742    with internal ones.  */
1743 
1744 static void
1745 chkp_add_bounds_to_call_stmt (gimple_stmt_iterator *gsi)
1746 {
1747   gcall *call = as_a <gcall *> (gsi_stmt (*gsi));
1748   unsigned arg_no = 0;
1749   tree fndecl = gimple_call_fndecl (call);
1750   tree fntype;
1751   tree first_formal_arg;
1752   tree arg;
1753   bool use_fntype = false;
1754   tree op;
1755   ssa_op_iter iter;
1756   gcall *new_call;
1757 
1758   /* Do nothing for internal functions.  */
1759   if (gimple_call_internal_p (call))
1760     return;
1761 
1762   fntype = TREE_TYPE (TREE_TYPE (gimple_call_fn (call)));
1763 
1764   /* Do nothing if back-end builtin is called.  */
1765   if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
1766     return;
1767 
1768   /* Do nothing for some middle-end builtins.  */
1769   if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
1770       && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_OBJECT_SIZE)
1771     return;
1772 
1773   /* Do nothing for calls to not instrumentable functions.  */
1774   if (fndecl && !chkp_instrumentable_p (fndecl))
1775     return;
1776 
1777   /* Ignore CHKP_INIT_PTR_BOUNDS, CHKP_NULL_PTR_BOUNDS
1778      and CHKP_COPY_PTR_BOUNDS.  */
1779   if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
1780       && (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_INIT_PTR_BOUNDS
1781 	  || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_NULL_PTR_BOUNDS
1782 	  || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_COPY_PTR_BOUNDS
1783 	  || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_SET_PTR_BOUNDS))
1784     return;
1785 
1786   /* Check user builtins are replaced with checks.  */
1787   if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
1788       && (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_CHECK_PTR_LBOUNDS
1789 	  || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_CHECK_PTR_UBOUNDS
1790 	  || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_CHECK_PTR_BOUNDS))
1791     {
1792       chkp_replace_address_check_builtin (gsi, integer_minus_one_node);
1793       return;
1794     }
1795 
1796   /* Check user builtins are replaced with bound extract.  */
1797   if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
1798       && (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_GET_PTR_LBOUND
1799 	  || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_GET_PTR_UBOUND))
1800     {
1801       chkp_replace_extract_builtin (gsi);
1802       return;
1803     }
1804 
1805   /* BUILT_IN_CHKP_NARROW_PTR_BOUNDS call is replaced with
1806      target narrow bounds call.  */
1807   if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
1808       && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_NARROW_PTR_BOUNDS)
1809     {
1810       tree arg = gimple_call_arg (call, 1);
1811       tree bounds = chkp_find_bounds (arg, gsi);
1812 
1813       gimple_call_set_fndecl (call, chkp_narrow_bounds_fndecl);
1814       gimple_call_set_arg (call, 1, bounds);
1815       update_stmt (call);
1816 
1817       return;
1818     }
1819 
1820   /* BUILT_IN_CHKP_STORE_PTR_BOUNDS call is replaced with
1821      bndstx call.  */
1822   if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
1823       && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_STORE_PTR_BOUNDS)
1824     {
1825       tree addr = gimple_call_arg (call, 0);
1826       tree ptr = gimple_call_arg (call, 1);
1827       tree bounds = chkp_find_bounds (ptr, gsi);
1828       gimple_stmt_iterator iter = gsi_for_stmt (call);
1829 
1830       chkp_build_bndstx (addr, ptr, bounds, gsi);
1831       gsi_remove (&iter, true);
1832 
1833       return;
1834     }
1835 
1836   if (!flag_chkp_instrument_calls)
1837     return;
1838 
1839   /* We instrument only some subset of builtins.  We also instrument
1840      builtin calls to be inlined.  */
1841   if (fndecl
1842       && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
1843       && !chkp_instrument_normal_builtin (fndecl))
1844     {
1845       if (!lookup_attribute ("always_inline", DECL_ATTRIBUTES (fndecl)))
1846 	return;
1847 
1848       struct cgraph_node *clone = chkp_maybe_create_clone (fndecl);
1849       if (!clone
1850 	  || !gimple_has_body_p (clone->decl))
1851 	return;
1852     }
1853 
1854   /* If function decl is available then use it for
1855      formal arguments list.  Otherwise use function type.  */
1856   if (fndecl
1857       && DECL_ARGUMENTS (fndecl)
1858       && gimple_call_fntype (call) == TREE_TYPE (fndecl))
1859     first_formal_arg = DECL_ARGUMENTS (fndecl);
1860   else
1861     {
1862       first_formal_arg = TYPE_ARG_TYPES (fntype);
1863       use_fntype = true;
1864     }
1865 
1866   /* Fill vector of new call args.  */
1867   vec<tree> new_args = vNULL;
1868   new_args.create (gimple_call_num_args (call));
1869   arg = first_formal_arg;
1870   for (arg_no = 0; arg_no < gimple_call_num_args (call); arg_no++)
1871     {
1872       tree call_arg = gimple_call_arg (call, arg_no);
1873       tree type;
1874 
1875       /* Get arg type using formal argument description
1876 	 or actual argument type.  */
1877       if (arg)
1878 	if (use_fntype)
1879 	  if (TREE_VALUE (arg) != void_type_node)
1880 	    {
1881 	      type = TREE_VALUE (arg);
1882 	      arg = TREE_CHAIN (arg);
1883 	    }
1884 	  else
1885 	    type = TREE_TYPE (call_arg);
1886 	else
1887 	  {
1888 	    type = TREE_TYPE (arg);
1889 	    arg = TREE_CHAIN (arg);
1890 	  }
1891       else
1892 	type = TREE_TYPE (call_arg);
1893 
1894       new_args.safe_push (call_arg);
1895 
1896       if (BOUNDED_TYPE_P (type)
1897 	  || pass_by_reference (NULL, TYPE_MODE (type), type, true))
1898 	new_args.safe_push (chkp_find_bounds (call_arg, gsi));
1899       else if (chkp_type_has_pointer (type))
1900 	{
1901 	  HOST_WIDE_INT max_bounds
1902 	    = TREE_INT_CST_LOW (TYPE_SIZE (type)) / POINTER_SIZE;
1903 	  tree *all_bounds = (tree *)xmalloc (sizeof (tree) * max_bounds);
1904 	  HOST_WIDE_INT bnd_no;
1905 
1906 	  memset (all_bounds, 0, sizeof (tree) * max_bounds);
1907 
1908 	  chkp_find_bounds_for_elem (call_arg, all_bounds, 0, gsi);
1909 
1910 	  for (bnd_no = 0; bnd_no < max_bounds; bnd_no++)
1911 	    if (all_bounds[bnd_no])
1912 	      new_args.safe_push (all_bounds[bnd_no]);
1913 
1914            free (all_bounds);
1915 	}
1916     }
1917 
1918   if (new_args.length () == gimple_call_num_args (call))
1919     new_call = call;
1920   else
1921     {
1922       new_call = gimple_build_call_vec (gimple_op (call, 1), new_args);
1923       gimple_call_set_lhs (new_call, gimple_call_lhs (call));
1924       gimple_call_copy_flags (new_call, call);
1925       gimple_call_set_chain (new_call, gimple_call_chain (call));
1926     }
1927   new_args.release ();
1928 
1929   /* For direct calls fndecl is replaced with instrumented version.  */
1930   if (fndecl)
1931     {
1932       tree new_decl = chkp_maybe_create_clone (fndecl)->decl;
1933       gimple_call_set_fndecl (new_call, new_decl);
1934       /* In case of a type cast we should modify used function
1935 	 type instead of using type of new fndecl.  */
1936       if (gimple_call_fntype (call) != TREE_TYPE (fndecl))
1937 	{
1938 	  tree type = gimple_call_fntype (call);
1939 	  type = chkp_copy_function_type_adding_bounds (type);
1940 	  gimple_call_set_fntype (new_call, type);
1941 	}
1942       else
1943 	gimple_call_set_fntype (new_call, TREE_TYPE (new_decl));
1944     }
1945   /* For indirect call we should fix function pointer type if
1946      pass some bounds.  */
1947   else if (new_call != call)
1948     {
1949       tree type = gimple_call_fntype (call);
1950       type = chkp_copy_function_type_adding_bounds (type);
1951       gimple_call_set_fntype (new_call, type);
1952     }
1953 
1954   /* replace old call statement with the new one.  */
1955   if (call != new_call)
1956     {
1957       FOR_EACH_SSA_TREE_OPERAND (op, call, iter, SSA_OP_ALL_DEFS)
1958 	{
1959 	  SSA_NAME_DEF_STMT (op) = new_call;
1960 	}
1961       gsi_replace (gsi, new_call, true);
1962     }
1963   else
1964     update_stmt (new_call);
1965 
1966   gimple_call_set_with_bounds (new_call, true);
1967 }
1968 
1969 /* Return constant static bounds var with specified bounds LB and UB.
1970    If such var does not exists then new var is created with specified NAME.  */
1971 static tree
1972 chkp_make_static_const_bounds (HOST_WIDE_INT lb,
1973 			       HOST_WIDE_INT ub,
1974 			       const char *name)
1975 {
1976   tree id = get_identifier (name);
1977   tree var;
1978   varpool_node *node;
1979   symtab_node *snode;
1980 
1981   var  = build_decl (UNKNOWN_LOCATION, VAR_DECL, id,
1982 		     pointer_bounds_type_node);
1983   TREE_STATIC (var) = 1;
1984   TREE_PUBLIC (var) = 1;
1985 
1986   /* With LTO we may have constant bounds already in varpool.
1987      Try to find it.  */
1988   if ((snode = symtab_node::get_for_asmname (DECL_ASSEMBLER_NAME (var))))
1989     {
1990       /* We don't allow this symbol usage for non bounds.  */
1991       if (snode->type != SYMTAB_VARIABLE
1992 	  || !POINTER_BOUNDS_P (snode->decl))
1993 	sorry ("-fcheck-pointer-bounds requires '%s' "
1994 	       "name for internal usage",
1995 	       IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (var)));
1996 
1997       return snode->decl;
1998     }
1999 
2000   TREE_USED (var) = 1;
2001   TREE_READONLY (var) = 1;
2002   TREE_ADDRESSABLE (var) = 0;
2003   DECL_ARTIFICIAL (var) = 1;
2004   DECL_READ_P (var) = 1;
2005   DECL_INITIAL (var) = targetm.chkp_make_bounds_constant (lb, ub);
2006   make_decl_one_only (var, DECL_ASSEMBLER_NAME (var));
2007   /* We may use this symbol during ctors generation in chkp_finish_file
2008      when all symbols are emitted.  Force output to avoid undefined
2009      symbols in ctors.  */
2010   node = varpool_node::get_create (var);
2011   node->force_output = 1;
2012 
2013   varpool_node::finalize_decl (var);
2014 
2015   return var;
2016 }
2017 
2018 /* Generate code to make bounds with specified lower bound LB and SIZE.
2019    if AFTER is 1 then code is inserted after position pointed by ITER
2020    otherwise code is inserted before position pointed by ITER.
2021    If ITER is NULL then code is added to entry block.  */
2022 static tree
2023 chkp_make_bounds (tree lb, tree size, gimple_stmt_iterator *iter, bool after)
2024 {
2025   gimple_seq seq;
2026   gimple_stmt_iterator gsi;
2027   gimple *stmt;
2028   tree bounds;
2029 
2030   if (iter)
2031     gsi = *iter;
2032   else
2033     gsi = gsi_start_bb (chkp_get_entry_block ());
2034 
2035   seq = NULL;
2036 
2037   lb = chkp_force_gimple_call_op (lb, &seq);
2038   size = chkp_force_gimple_call_op (size, &seq);
2039 
2040   stmt = gimple_build_call (chkp_bndmk_fndecl, 2, lb, size);
2041   chkp_mark_stmt (stmt);
2042 
2043   bounds = chkp_get_tmp_reg (stmt);
2044   gimple_call_set_lhs (stmt, bounds);
2045 
2046   gimple_seq_add_stmt (&seq, stmt);
2047 
2048   if (iter && after)
2049     gsi_insert_seq_after (&gsi, seq, GSI_SAME_STMT);
2050   else
2051     gsi_insert_seq_before (&gsi, seq, GSI_SAME_STMT);
2052 
2053   if (dump_file && (dump_flags & TDF_DETAILS))
2054     {
2055       fprintf (dump_file, "Made bounds: ");
2056       print_gimple_stmt (dump_file, stmt, 0, TDF_VOPS|TDF_MEMSYMS);
2057       if (iter)
2058 	{
2059 	  fprintf (dump_file, "  inserted before statement: ");
2060 	  print_gimple_stmt (dump_file, gsi_stmt (*iter), 0, TDF_VOPS|TDF_MEMSYMS);
2061 	}
2062       else
2063 	fprintf (dump_file, "  at function entry\n");
2064     }
2065 
2066   /* update_stmt (stmt); */
2067 
2068   return bounds;
2069 }
2070 
2071 /* Return var holding zero bounds.  */
2072 tree
2073 chkp_get_zero_bounds_var (void)
2074 {
2075   if (!chkp_zero_bounds_var)
2076     chkp_zero_bounds_var
2077       = chkp_make_static_const_bounds (0, -1,
2078 				       CHKP_ZERO_BOUNDS_VAR_NAME);
2079   return chkp_zero_bounds_var;
2080 }
2081 
2082 /* Return var holding none bounds.  */
2083 tree
2084 chkp_get_none_bounds_var (void)
2085 {
2086   if (!chkp_none_bounds_var)
2087     chkp_none_bounds_var
2088       = chkp_make_static_const_bounds (-1, 0,
2089 				       CHKP_NONE_BOUNDS_VAR_NAME);
2090   return chkp_none_bounds_var;
2091 }
2092 
2093 /* Return SSA_NAME used to represent zero bounds.  */
2094 static tree
2095 chkp_get_zero_bounds (void)
2096 {
2097   if (zero_bounds)
2098     return zero_bounds;
2099 
2100   if (dump_file && (dump_flags & TDF_DETAILS))
2101     fprintf (dump_file, "Creating zero bounds...");
2102 
2103   if ((flag_chkp_use_static_bounds && flag_chkp_use_static_const_bounds)
2104       || flag_chkp_use_static_const_bounds > 0)
2105     {
2106       gimple_stmt_iterator gsi = gsi_start_bb (chkp_get_entry_block ());
2107       gimple *stmt;
2108 
2109       zero_bounds = chkp_get_tmp_reg (NULL);
2110       stmt = gimple_build_assign (zero_bounds, chkp_get_zero_bounds_var ());
2111       gsi_insert_before (&gsi, stmt, GSI_SAME_STMT);
2112     }
2113   else
2114     zero_bounds = chkp_make_bounds (integer_zero_node,
2115 				    integer_zero_node,
2116 				    NULL,
2117 				    false);
2118 
2119   return zero_bounds;
2120 }
2121 
2122 /* Return SSA_NAME used to represent none bounds.  */
2123 static tree
2124 chkp_get_none_bounds (void)
2125 {
2126   if (none_bounds)
2127     return none_bounds;
2128 
2129   if (dump_file && (dump_flags & TDF_DETAILS))
2130     fprintf (dump_file, "Creating none bounds...");
2131 
2132 
2133   if ((flag_chkp_use_static_bounds && flag_chkp_use_static_const_bounds)
2134       || flag_chkp_use_static_const_bounds > 0)
2135     {
2136       gimple_stmt_iterator gsi = gsi_start_bb (chkp_get_entry_block ());
2137       gimple *stmt;
2138 
2139       none_bounds = chkp_get_tmp_reg (NULL);
2140       stmt = gimple_build_assign (none_bounds, chkp_get_none_bounds_var ());
2141       gsi_insert_before (&gsi, stmt, GSI_SAME_STMT);
2142     }
2143   else
2144     none_bounds = chkp_make_bounds (integer_minus_one_node,
2145 				    build_int_cst (size_type_node, 2),
2146 				    NULL,
2147 				    false);
2148 
2149   return none_bounds;
2150 }
2151 
2152 /* Return bounds to be used as a result of operation which
2153    should not create poiunter (e.g. MULT_EXPR).  */
2154 static tree
2155 chkp_get_invalid_op_bounds (void)
2156 {
2157   return chkp_get_zero_bounds ();
2158 }
2159 
2160 /* Return bounds to be used for loads of non-pointer values.  */
2161 static tree
2162 chkp_get_nonpointer_load_bounds (void)
2163 {
2164   return chkp_get_zero_bounds ();
2165 }
2166 
2167 /* Return 1 if may use bndret call to get bounds for pointer
2168    returned by CALL.  */
2169 static bool
2170 chkp_call_returns_bounds_p (gcall *call)
2171 {
2172   if (gimple_call_internal_p (call))
2173     {
2174       if (gimple_call_internal_fn (call) == IFN_VA_ARG)
2175 	return true;
2176       return false;
2177     }
2178 
2179   if (gimple_call_builtin_p (call, BUILT_IN_CHKP_NARROW_PTR_BOUNDS)
2180       || chkp_gimple_call_builtin_p (call, BUILT_IN_CHKP_NARROW))
2181     return true;
2182 
2183   if (gimple_call_with_bounds_p (call))
2184     return true;
2185 
2186   tree fndecl = gimple_call_fndecl (call);
2187 
2188   if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
2189     return false;
2190 
2191   if (fndecl && !chkp_instrumentable_p (fndecl))
2192     return false;
2193 
2194   if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
2195     {
2196       if (chkp_instrument_normal_builtin (fndecl))
2197 	return true;
2198 
2199       if (!lookup_attribute ("always_inline", DECL_ATTRIBUTES (fndecl)))
2200 	return false;
2201 
2202       struct cgraph_node *clone = chkp_maybe_create_clone (fndecl);
2203       return (clone && gimple_has_body_p (clone->decl));
2204     }
2205 
2206   return true;
2207 }
2208 
2209 /* Build bounds returned by CALL.  */
2210 static tree
2211 chkp_build_returned_bound (gcall *call)
2212 {
2213   gimple_stmt_iterator gsi;
2214   tree bounds;
2215   gimple *stmt;
2216   tree fndecl = gimple_call_fndecl (call);
2217   unsigned int retflags;
2218   tree lhs = gimple_call_lhs (call);
2219 
2220   /* To avoid fixing alloca expands in targets we handle
2221      it separately.  */
2222   if (fndecl
2223       && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
2224       && (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_ALLOCA
2225 	  || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_ALLOCA_WITH_ALIGN))
2226     {
2227       tree size = gimple_call_arg (call, 0);
2228       gimple_stmt_iterator iter = gsi_for_stmt (call);
2229       bounds = chkp_make_bounds (lhs, size, &iter, true);
2230     }
2231   /* We know bounds returned by set_bounds builtin call.  */
2232   else if (fndecl
2233 	   && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
2234 	   && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_SET_PTR_BOUNDS)
2235     {
2236       tree lb = gimple_call_arg (call, 0);
2237       tree size = gimple_call_arg (call, 1);
2238       gimple_stmt_iterator iter = gsi_for_stmt (call);
2239       bounds = chkp_make_bounds (lb, size, &iter, true);
2240     }
2241   /* Detect bounds initialization calls.  */
2242   else if (fndecl
2243       && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
2244       && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_INIT_PTR_BOUNDS)
2245     bounds = chkp_get_zero_bounds ();
2246   /* Detect bounds nullification calls.  */
2247   else if (fndecl
2248       && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
2249       && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_NULL_PTR_BOUNDS)
2250     bounds = chkp_get_none_bounds ();
2251   /* Detect bounds copy calls.  */
2252   else if (fndecl
2253       && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
2254       && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_COPY_PTR_BOUNDS)
2255     {
2256       gimple_stmt_iterator iter = gsi_for_stmt (call);
2257       bounds = chkp_find_bounds (gimple_call_arg (call, 1), &iter);
2258     }
2259   /* Do not use retbnd when returned bounds are equal to some
2260      of passed bounds.  */
2261   else if (((retflags = gimple_call_return_flags (call)) & ERF_RETURNS_ARG)
2262 	   && (retflags & ERF_RETURN_ARG_MASK) < gimple_call_num_args (call))
2263     {
2264       gimple_stmt_iterator iter = gsi_for_stmt (call);
2265       unsigned int retarg = retflags & ERF_RETURN_ARG_MASK, argno;
2266       if (gimple_call_with_bounds_p (call))
2267 	{
2268 	  for (argno = 0; argno < gimple_call_num_args (call); argno++)
2269 	    if (!POINTER_BOUNDS_P (gimple_call_arg (call, argno)))
2270 	      {
2271 		if (retarg)
2272 		  retarg--;
2273 		else
2274 		  break;
2275 	      }
2276 	}
2277       else
2278 	argno = retarg;
2279 
2280       bounds = chkp_find_bounds (gimple_call_arg (call, argno), &iter);
2281     }
2282   else if (chkp_call_returns_bounds_p (call)
2283 	   && BOUNDED_P (lhs))
2284     {
2285       gcc_assert (TREE_CODE (lhs) == SSA_NAME);
2286 
2287       /* In general case build checker builtin call to
2288 	 obtain returned bounds.  */
2289       stmt = gimple_build_call (chkp_ret_bnd_fndecl, 1,
2290 				gimple_call_lhs (call));
2291       chkp_mark_stmt (stmt);
2292 
2293       gsi = gsi_for_stmt (call);
2294       gsi_insert_after (&gsi, stmt, GSI_SAME_STMT);
2295 
2296       bounds = chkp_get_tmp_reg (stmt);
2297       gimple_call_set_lhs (stmt, bounds);
2298 
2299       update_stmt (stmt);
2300     }
2301   else
2302     bounds = chkp_get_zero_bounds ();
2303 
2304   if (dump_file && (dump_flags & TDF_DETAILS))
2305     {
2306       fprintf (dump_file, "Built returned bounds (");
2307       print_generic_expr (dump_file, bounds, 0);
2308       fprintf (dump_file, ") for call: ");
2309       print_gimple_stmt (dump_file, call, 0, TDF_VOPS|TDF_MEMSYMS);
2310     }
2311 
2312   bounds = chkp_maybe_copy_and_register_bounds (lhs, bounds);
2313 
2314   return bounds;
2315 }
2316 
2317 /* Return bounds used as returned by call
2318    which produced SSA name VAL.  */
2319 gcall *
2320 chkp_retbnd_call_by_val (tree val)
2321 {
2322   if (TREE_CODE (val) != SSA_NAME)
2323     return NULL;
2324 
2325   gcc_assert (gimple_code (SSA_NAME_DEF_STMT (val)) == GIMPLE_CALL);
2326 
2327   imm_use_iterator use_iter;
2328   use_operand_p use_p;
2329   FOR_EACH_IMM_USE_FAST (use_p, use_iter, val)
2330     if (gimple_code (USE_STMT (use_p)) == GIMPLE_CALL
2331 	&& gimple_call_fndecl (USE_STMT (use_p)) == chkp_ret_bnd_fndecl)
2332       return as_a <gcall *> (USE_STMT (use_p));
2333 
2334   return NULL;
2335 }
2336 
2337 /* Check the next parameter for the given PARM is bounds
2338    and return it's default SSA_NAME (create if required).  */
2339 static tree
2340 chkp_get_next_bounds_parm (tree parm)
2341 {
2342   tree bounds = TREE_CHAIN (parm);
2343   gcc_assert (POINTER_BOUNDS_P (bounds));
2344   bounds = ssa_default_def (cfun, bounds);
2345   if (!bounds)
2346     {
2347       bounds = make_ssa_name (TREE_CHAIN (parm), gimple_build_nop ());
2348       set_ssa_default_def (cfun, TREE_CHAIN (parm), bounds);
2349     }
2350   return bounds;
2351 }
2352 
2353 /* Return bounds to be used for input argument PARM.  */
2354 static tree
2355 chkp_get_bound_for_parm (tree parm)
2356 {
2357   tree decl = SSA_NAME_VAR (parm);
2358   tree bounds;
2359 
2360   gcc_assert (TREE_CODE (decl) == PARM_DECL);
2361 
2362   bounds = chkp_get_registered_bounds (parm);
2363 
2364   if (!bounds)
2365     bounds = chkp_get_registered_bounds (decl);
2366 
2367   if (!bounds)
2368     {
2369       tree orig_decl = cgraph_node::get (cfun->decl)->orig_decl;
2370 
2371       /* For static chain param we return zero bounds
2372 	 because currently we do not check dereferences
2373 	 of this pointer.  */
2374       if (cfun->static_chain_decl == decl)
2375 	bounds = chkp_get_zero_bounds ();
2376       /* If non instrumented runtime is used then it may be useful
2377 	 to use zero bounds for input arguments of main
2378 	 function.  */
2379       else if (flag_chkp_zero_input_bounds_for_main
2380 	       && strcmp (IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (orig_decl)),
2381 			  "main") == 0)
2382 	bounds = chkp_get_zero_bounds ();
2383       else if (BOUNDED_P (parm))
2384 	{
2385 	  bounds = chkp_get_next_bounds_parm (decl);
2386 	  bounds = chkp_maybe_copy_and_register_bounds (decl, bounds);
2387 
2388 	  if (dump_file && (dump_flags & TDF_DETAILS))
2389 	    {
2390 	      fprintf (dump_file, "Built arg bounds (");
2391 	      print_generic_expr (dump_file, bounds, 0);
2392 	      fprintf (dump_file, ") for arg: ");
2393 	      print_node (dump_file, "", decl, 0);
2394 	    }
2395 	}
2396       else
2397 	bounds = chkp_get_zero_bounds ();
2398     }
2399 
2400   if (!chkp_get_registered_bounds (parm))
2401     bounds = chkp_maybe_copy_and_register_bounds (parm, bounds);
2402 
2403   if (dump_file && (dump_flags & TDF_DETAILS))
2404     {
2405       fprintf (dump_file, "Using bounds ");
2406       print_generic_expr (dump_file, bounds, 0);
2407       fprintf (dump_file, " for parm ");
2408       print_generic_expr (dump_file, parm, 0);
2409       fprintf (dump_file, " of type ");
2410       print_generic_expr (dump_file, TREE_TYPE (parm), 0);
2411       fprintf (dump_file, ".\n");
2412     }
2413 
2414   return bounds;
2415 }
2416 
2417 /* Build and return CALL_EXPR for bndstx builtin with specified
2418    arguments.  */
2419 tree
2420 chkp_build_bndldx_call (tree addr, tree ptr)
2421 {
2422   tree fn = build1 (ADDR_EXPR,
2423 		    build_pointer_type (TREE_TYPE (chkp_bndldx_fndecl)),
2424 		    chkp_bndldx_fndecl);
2425   tree call = build_call_nary (TREE_TYPE (TREE_TYPE (chkp_bndldx_fndecl)),
2426 			       fn, 2, addr, ptr);
2427   CALL_WITH_BOUNDS_P (call) = true;
2428   return call;
2429 }
2430 
2431 /* Insert code to load bounds for PTR located by ADDR.
2432    Code is inserted after position pointed by GSI.
2433    Loaded bounds are returned.  */
2434 static tree
2435 chkp_build_bndldx (tree addr, tree ptr, gimple_stmt_iterator *gsi)
2436 {
2437   gimple_seq seq;
2438   gimple *stmt;
2439   tree bounds;
2440 
2441   seq = NULL;
2442 
2443   addr = chkp_force_gimple_call_op (addr, &seq);
2444   ptr = chkp_force_gimple_call_op (ptr, &seq);
2445 
2446   stmt = gimple_build_call (chkp_bndldx_fndecl, 2, addr, ptr);
2447   chkp_mark_stmt (stmt);
2448   bounds = chkp_get_tmp_reg (stmt);
2449   gimple_call_set_lhs (stmt, bounds);
2450 
2451   gimple_seq_add_stmt (&seq, stmt);
2452 
2453   gsi_insert_seq_after (gsi, seq, GSI_CONTINUE_LINKING);
2454 
2455   if (dump_file && (dump_flags & TDF_DETAILS))
2456     {
2457       fprintf (dump_file, "Generated bndldx for pointer ");
2458       print_generic_expr (dump_file, ptr, 0);
2459       fprintf (dump_file, ": ");
2460       print_gimple_stmt (dump_file, stmt, 0, TDF_VOPS|TDF_MEMSYMS);
2461     }
2462 
2463   return bounds;
2464 }
2465 
2466 /* Build and return CALL_EXPR for bndstx builtin with specified
2467    arguments.  */
2468 tree
2469 chkp_build_bndstx_call (tree addr, tree ptr, tree bounds)
2470 {
2471   tree fn = build1 (ADDR_EXPR,
2472 		    build_pointer_type (TREE_TYPE (chkp_bndstx_fndecl)),
2473 		    chkp_bndstx_fndecl);
2474   tree call = build_call_nary (TREE_TYPE (TREE_TYPE (chkp_bndstx_fndecl)),
2475 			       fn, 3, ptr, bounds, addr);
2476   CALL_WITH_BOUNDS_P (call) = true;
2477   return call;
2478 }
2479 
2480 /* Insert code to store BOUNDS for PTR stored by ADDR.
2481    New statements are inserted after position pointed
2482    by GSI.  */
2483 void
2484 chkp_build_bndstx (tree addr, tree ptr, tree bounds,
2485 		   gimple_stmt_iterator *gsi)
2486 {
2487   gimple_seq seq;
2488   gimple *stmt;
2489 
2490   seq = NULL;
2491 
2492   addr = chkp_force_gimple_call_op (addr, &seq);
2493   ptr = chkp_force_gimple_call_op (ptr, &seq);
2494 
2495   stmt = gimple_build_call (chkp_bndstx_fndecl, 3, ptr, bounds, addr);
2496   chkp_mark_stmt (stmt);
2497   gimple_call_set_with_bounds (stmt, true);
2498 
2499   gimple_seq_add_stmt (&seq, stmt);
2500 
2501   gsi_insert_seq_after (gsi, seq, GSI_CONTINUE_LINKING);
2502 
2503   if (dump_file && (dump_flags & TDF_DETAILS))
2504     {
2505       fprintf (dump_file, "Generated bndstx for pointer store ");
2506       print_gimple_stmt (dump_file, gsi_stmt (*gsi), 0, TDF_VOPS|TDF_MEMSYMS);
2507       print_gimple_stmt (dump_file, stmt, 2, TDF_VOPS|TDF_MEMSYMS);
2508     }
2509 }
2510 
2511 /* This function is called when call statement
2512    is inlined and therefore we can't use bndret
2513    for its LHS anymore.  Function fixes bndret
2514    call using new RHS value if possible.  */
2515 void
2516 chkp_fixup_inlined_call (tree lhs, tree rhs)
2517 {
2518   tree addr, bounds;
2519   gcall *retbnd, *bndldx;
2520 
2521   if (!BOUNDED_P (lhs))
2522     return;
2523 
2524   /* Search for retbnd call.  */
2525   retbnd = chkp_retbnd_call_by_val (lhs);
2526   if (!retbnd)
2527     return;
2528 
2529   /* Currently only handle cases when call is replaced
2530      with a memory access.  In this case bndret call
2531      may be replaced with bndldx call.  Otherwise we
2532      have to search for bounds which may cause wrong
2533      result due to various optimizations applied.  */
2534   switch (TREE_CODE (rhs))
2535     {
2536     case VAR_DECL:
2537       if (DECL_REGISTER (rhs))
2538 	return;
2539       break;
2540 
2541     case MEM_REF:
2542       break;
2543 
2544     case ARRAY_REF:
2545     case COMPONENT_REF:
2546       addr = get_base_address (rhs);
2547       if (!DECL_P (addr)
2548 	  && TREE_CODE (addr) != MEM_REF)
2549 	return;
2550       if (DECL_P (addr) && DECL_REGISTER (addr))
2551 	return;
2552       break;
2553 
2554     default:
2555       return;
2556     }
2557 
2558   /* Create a new statements sequence with bndldx call.  */
2559   gimple_stmt_iterator gsi = gsi_for_stmt (retbnd);
2560   addr = build_fold_addr_expr (rhs);
2561   chkp_build_bndldx (addr, lhs, &gsi);
2562   bndldx = as_a <gcall *> (gsi_stmt (gsi));
2563 
2564   /* Remove bndret call.  */
2565   bounds = gimple_call_lhs (retbnd);
2566   gsi = gsi_for_stmt (retbnd);
2567   gsi_remove (&gsi, true);
2568 
2569   /* Link new bndldx call.  */
2570   gimple_call_set_lhs (bndldx, bounds);
2571   update_stmt (bndldx);
2572 }
2573 
2574 /* Compute bounds for pointer NODE which was assigned in
2575    assignment statement ASSIGN.  Return computed bounds.  */
2576 static tree
2577 chkp_compute_bounds_for_assignment (tree node, gimple *assign)
2578 {
2579   enum tree_code rhs_code = gimple_assign_rhs_code (assign);
2580   tree rhs1 = gimple_assign_rhs1 (assign);
2581   tree bounds = NULL_TREE;
2582   gimple_stmt_iterator iter = gsi_for_stmt (assign);
2583   tree base = NULL;
2584 
2585   if (dump_file && (dump_flags & TDF_DETAILS))
2586     {
2587       fprintf (dump_file, "Computing bounds for assignment: ");
2588       print_gimple_stmt (dump_file, assign, 0, TDF_VOPS|TDF_MEMSYMS);
2589     }
2590 
2591   switch (rhs_code)
2592     {
2593     case MEM_REF:
2594     case TARGET_MEM_REF:
2595     case COMPONENT_REF:
2596     case ARRAY_REF:
2597       /* We need to load bounds from the bounds table.  */
2598       bounds = chkp_find_bounds_loaded (node, rhs1, &iter);
2599       break;
2600 
2601     case VAR_DECL:
2602     case SSA_NAME:
2603     case ADDR_EXPR:
2604     case POINTER_PLUS_EXPR:
2605     case NOP_EXPR:
2606     case CONVERT_EXPR:
2607     case INTEGER_CST:
2608       /* Bounds are just propagated from RHS.  */
2609       bounds = chkp_find_bounds (rhs1, &iter);
2610       base = rhs1;
2611       break;
2612 
2613     case VIEW_CONVERT_EXPR:
2614       /* Bounds are just propagated from RHS.  */
2615       bounds = chkp_find_bounds (TREE_OPERAND (rhs1, 0), &iter);
2616       break;
2617 
2618     case PARM_DECL:
2619       if (BOUNDED_P (rhs1))
2620 	{
2621 	  /* We need to load bounds from the bounds table.  */
2622 	  bounds = chkp_build_bndldx (chkp_build_addr_expr (rhs1),
2623 				      node, &iter);
2624 	  TREE_ADDRESSABLE (rhs1) = 1;
2625 	}
2626       else
2627 	bounds = chkp_get_nonpointer_load_bounds ();
2628       break;
2629 
2630     case MINUS_EXPR:
2631     case PLUS_EXPR:
2632     case BIT_AND_EXPR:
2633     case BIT_IOR_EXPR:
2634     case BIT_XOR_EXPR:
2635       {
2636 	tree rhs2 = gimple_assign_rhs2 (assign);
2637 	tree bnd1 = chkp_find_bounds (rhs1, &iter);
2638 	tree bnd2 = chkp_find_bounds (rhs2, &iter);
2639 
2640 	/* First we try to check types of operands.  If it
2641 	   does not help then look at bound values.
2642 
2643 	   If some bounds are incomplete and other are
2644 	   not proven to be valid (i.e. also incomplete
2645 	   or invalid because value is not pointer) then
2646 	   resulting value is incomplete and will be
2647 	   recomputed later in chkp_finish_incomplete_bounds.  */
2648 	if (BOUNDED_P (rhs1)
2649 	    && !BOUNDED_P (rhs2))
2650 	  bounds = bnd1;
2651 	else if (BOUNDED_P (rhs2)
2652 		 && !BOUNDED_P (rhs1)
2653 		 && rhs_code != MINUS_EXPR)
2654 	  bounds = bnd2;
2655 	else if (chkp_incomplete_bounds (bnd1))
2656 	  if (chkp_valid_bounds (bnd2) && rhs_code != MINUS_EXPR
2657 	      && !chkp_incomplete_bounds (bnd2))
2658 	    bounds = bnd2;
2659 	  else
2660 	    bounds = incomplete_bounds;
2661 	else if (chkp_incomplete_bounds (bnd2))
2662 	  if (chkp_valid_bounds (bnd1)
2663 	      && !chkp_incomplete_bounds (bnd1))
2664 	    bounds = bnd1;
2665 	  else
2666 	    bounds = incomplete_bounds;
2667 	else if (!chkp_valid_bounds (bnd1))
2668 	  if (chkp_valid_bounds (bnd2) && rhs_code != MINUS_EXPR)
2669 	    bounds = bnd2;
2670 	  else if (bnd2 == chkp_get_zero_bounds ())
2671 	    bounds = bnd2;
2672 	  else
2673 	    bounds = bnd1;
2674 	else if (!chkp_valid_bounds (bnd2))
2675 	  bounds = bnd1;
2676 	else
2677 	  /* Seems both operands may have valid bounds
2678 	     (e.g. pointer minus pointer).  In such case
2679 	     use default invalid op bounds.  */
2680 	  bounds = chkp_get_invalid_op_bounds ();
2681 
2682 	base = (bounds == bnd1) ? rhs1 : (bounds == bnd2) ? rhs2 : NULL;
2683       }
2684       break;
2685 
2686     case BIT_NOT_EXPR:
2687     case NEGATE_EXPR:
2688     case LSHIFT_EXPR:
2689     case RSHIFT_EXPR:
2690     case LROTATE_EXPR:
2691     case RROTATE_EXPR:
2692     case EQ_EXPR:
2693     case NE_EXPR:
2694     case LT_EXPR:
2695     case LE_EXPR:
2696     case GT_EXPR:
2697     case GE_EXPR:
2698     case MULT_EXPR:
2699     case RDIV_EXPR:
2700     case TRUNC_DIV_EXPR:
2701     case FLOOR_DIV_EXPR:
2702     case CEIL_DIV_EXPR:
2703     case ROUND_DIV_EXPR:
2704     case TRUNC_MOD_EXPR:
2705     case FLOOR_MOD_EXPR:
2706     case CEIL_MOD_EXPR:
2707     case ROUND_MOD_EXPR:
2708     case EXACT_DIV_EXPR:
2709     case FIX_TRUNC_EXPR:
2710     case FLOAT_EXPR:
2711     case REALPART_EXPR:
2712     case IMAGPART_EXPR:
2713       /* No valid bounds may be produced by these exprs.  */
2714       bounds = chkp_get_invalid_op_bounds ();
2715       break;
2716 
2717     case COND_EXPR:
2718       {
2719 	tree val1 = gimple_assign_rhs2 (assign);
2720 	tree val2 = gimple_assign_rhs3 (assign);
2721 	tree bnd1 = chkp_find_bounds (val1, &iter);
2722 	tree bnd2 = chkp_find_bounds (val2, &iter);
2723 	gimple *stmt;
2724 
2725 	if (chkp_incomplete_bounds (bnd1) || chkp_incomplete_bounds (bnd2))
2726 	  bounds = incomplete_bounds;
2727 	else if (bnd1 == bnd2)
2728 	  bounds = bnd1;
2729 	else
2730 	  {
2731 	    rhs1 = unshare_expr (rhs1);
2732 
2733 	    bounds = chkp_get_tmp_reg (assign);
2734 	    stmt = gimple_build_assign (bounds, COND_EXPR, rhs1, bnd1, bnd2);
2735 	    gsi_insert_after (&iter, stmt, GSI_SAME_STMT);
2736 
2737 	    if (!chkp_valid_bounds (bnd1) && !chkp_valid_bounds (bnd2))
2738 	      chkp_mark_invalid_bounds (bounds);
2739 	  }
2740       }
2741       break;
2742 
2743     case MAX_EXPR:
2744     case MIN_EXPR:
2745       {
2746 	tree rhs2 = gimple_assign_rhs2 (assign);
2747 	tree bnd1 = chkp_find_bounds (rhs1, &iter);
2748 	tree bnd2 = chkp_find_bounds (rhs2, &iter);
2749 
2750 	if (chkp_incomplete_bounds (bnd1) || chkp_incomplete_bounds (bnd2))
2751 	  bounds = incomplete_bounds;
2752 	else if (bnd1 == bnd2)
2753 	  bounds = bnd1;
2754 	else
2755 	  {
2756 	    gimple *stmt;
2757 	    tree cond = build2 (rhs_code == MAX_EXPR ? GT_EXPR : LT_EXPR,
2758 				boolean_type_node, rhs1, rhs2);
2759 	    bounds = chkp_get_tmp_reg (assign);
2760 	    stmt = gimple_build_assign (bounds, COND_EXPR, cond, bnd1, bnd2);
2761 
2762 	    gsi_insert_after (&iter, stmt, GSI_SAME_STMT);
2763 
2764 	    if (!chkp_valid_bounds (bnd1) && !chkp_valid_bounds (bnd2))
2765 	      chkp_mark_invalid_bounds (bounds);
2766 	  }
2767       }
2768       break;
2769 
2770     default:
2771       bounds = chkp_get_zero_bounds ();
2772       warning (0, "pointer bounds were lost due to unexpected expression %s",
2773 	       get_tree_code_name (rhs_code));
2774     }
2775 
2776   gcc_assert (bounds);
2777 
2778   /* We may reuse bounds of other pointer we copy/modify.  But it is not
2779      allowed for abnormal ssa names.  If we produced a pointer using
2780      abnormal ssa name, we better make a bounds copy to avoid coalescing
2781      issues.  */
2782   if (base
2783       && TREE_CODE (base) == SSA_NAME
2784       && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (base))
2785     {
2786       gimple *stmt = gimple_build_assign (chkp_get_tmp_reg (NULL), bounds);
2787       gsi_insert_after (&iter, stmt, GSI_SAME_STMT);
2788       bounds = gimple_assign_lhs (stmt);
2789     }
2790 
2791   if (node)
2792     bounds = chkp_maybe_copy_and_register_bounds (node, bounds);
2793 
2794   return bounds;
2795 }
2796 
2797 /* Compute bounds for ssa name NODE defined by DEF_STMT pointed by ITER.
2798 
2799    There are just few statement codes allowed: NOP (for default ssa names),
2800    ASSIGN, CALL, PHI, ASM.
2801 
2802    Return computed bounds.  */
2803 static tree
2804 chkp_get_bounds_by_definition (tree node, gimple *def_stmt,
2805 			       gphi_iterator *iter)
2806 {
2807   tree var, bounds;
2808   enum gimple_code code = gimple_code (def_stmt);
2809   gphi *stmt;
2810 
2811   if (dump_file && (dump_flags & TDF_DETAILS))
2812     {
2813       fprintf (dump_file, "Searching for bounds for node: ");
2814       print_generic_expr (dump_file, node, 0);
2815 
2816       fprintf (dump_file, " using its definition: ");
2817       print_gimple_stmt (dump_file, def_stmt, 0, TDF_VOPS|TDF_MEMSYMS);
2818     }
2819 
2820   switch (code)
2821     {
2822     case GIMPLE_NOP:
2823       var = SSA_NAME_VAR (node);
2824       switch (TREE_CODE (var))
2825 	{
2826 	case PARM_DECL:
2827 	  bounds = chkp_get_bound_for_parm (node);
2828 	  break;
2829 
2830 	case VAR_DECL:
2831 	  /* For uninitialized pointers use none bounds.  */
2832 	  bounds = chkp_get_none_bounds ();
2833 	  bounds = chkp_maybe_copy_and_register_bounds (node, bounds);
2834 	  break;
2835 
2836 	case RESULT_DECL:
2837 	  {
2838 	    tree base_type;
2839 
2840 	    gcc_assert (TREE_CODE (TREE_TYPE (node)) == REFERENCE_TYPE);
2841 
2842 	    base_type = TREE_TYPE (TREE_TYPE (node));
2843 
2844 	    gcc_assert (TYPE_SIZE (base_type)
2845 			&& TREE_CODE (TYPE_SIZE (base_type)) == INTEGER_CST
2846 			&& tree_to_uhwi (TYPE_SIZE (base_type)) != 0);
2847 
2848 	    bounds = chkp_make_bounds (node, TYPE_SIZE_UNIT (base_type),
2849 				       NULL, false);
2850 	    bounds = chkp_maybe_copy_and_register_bounds (node, bounds);
2851 	  }
2852 	  break;
2853 
2854 	default:
2855 	  if (dump_file && (dump_flags & TDF_DETAILS))
2856 	    {
2857 	      fprintf (dump_file, "Unexpected var with no definition\n");
2858 	      print_generic_expr (dump_file, var, 0);
2859 	    }
2860 	  internal_error ("chkp_get_bounds_by_definition: Unexpected var of type %s",
2861 			  get_tree_code_name (TREE_CODE (var)));
2862 	}
2863       break;
2864 
2865     case GIMPLE_ASSIGN:
2866       bounds = chkp_compute_bounds_for_assignment (node, def_stmt);
2867       break;
2868 
2869     case GIMPLE_CALL:
2870       bounds = chkp_build_returned_bound (as_a <gcall *> (def_stmt));
2871       break;
2872 
2873     case GIMPLE_PHI:
2874       if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (node))
2875 	if (SSA_NAME_VAR (node))
2876 	  var = chkp_get_bounds_var (SSA_NAME_VAR (node));
2877 	else
2878 	  var = make_temp_ssa_name (pointer_bounds_type_node,
2879 				    NULL,
2880 				    CHKP_BOUND_TMP_NAME);
2881       else
2882 	var = chkp_get_tmp_var ();
2883       stmt = create_phi_node (var, gimple_bb (def_stmt));
2884       bounds = gimple_phi_result (stmt);
2885       *iter = gsi_for_phi (stmt);
2886 
2887       bounds = chkp_maybe_copy_and_register_bounds (node, bounds);
2888 
2889       /* Created bounds do not have all phi args computed and
2890 	 therefore we do not know if there is a valid source
2891 	 of bounds for that node.  Therefore we mark bounds
2892 	 as incomplete and then recompute them when all phi
2893 	 args are computed.  */
2894       chkp_register_incomplete_bounds (bounds, node);
2895       break;
2896 
2897     case GIMPLE_ASM:
2898       bounds = chkp_get_zero_bounds ();
2899       bounds = chkp_maybe_copy_and_register_bounds (node, bounds);
2900       break;
2901 
2902     default:
2903       internal_error ("chkp_get_bounds_by_definition: Unexpected GIMPLE code %s",
2904 		      gimple_code_name[code]);
2905     }
2906 
2907   return bounds;
2908 }
2909 
2910 /* Return CALL_EXPR for bndmk with specified LOWER_BOUND and SIZE.  */
2911 tree
2912 chkp_build_make_bounds_call (tree lower_bound, tree size)
2913 {
2914   tree call = build1 (ADDR_EXPR,
2915 		      build_pointer_type (TREE_TYPE (chkp_bndmk_fndecl)),
2916 		      chkp_bndmk_fndecl);
2917   return build_call_nary (TREE_TYPE (TREE_TYPE (chkp_bndmk_fndecl)),
2918 			  call, 2, lower_bound, size);
2919 }
2920 
2921 /* Create static bounds var of specfified OBJ which is
2922    is either VAR_DECL or string constant.  */
2923 static tree
2924 chkp_make_static_bounds (tree obj)
2925 {
2926   static int string_id = 1;
2927   static int var_id = 1;
2928   tree *slot;
2929   const char *var_name;
2930   char *bnd_var_name;
2931   tree bnd_var;
2932 
2933   /* First check if we already have required var.  */
2934   if (chkp_static_var_bounds)
2935     {
2936       /* For vars we use assembler name as a key in
2937 	 chkp_static_var_bounds map.  It allows to
2938 	 avoid duplicating bound vars for decls
2939 	 sharing assembler name.  */
2940       if (TREE_CODE (obj) == VAR_DECL)
2941 	{
2942 	  tree name = DECL_ASSEMBLER_NAME (obj);
2943 	  slot = chkp_static_var_bounds->get (name);
2944 	  if (slot)
2945 	    return *slot;
2946 	}
2947       else
2948 	{
2949 	  slot = chkp_static_var_bounds->get (obj);
2950 	  if (slot)
2951 	    return *slot;
2952 	}
2953     }
2954 
2955   /* Build decl for bounds var.  */
2956   if (TREE_CODE (obj) == VAR_DECL)
2957     {
2958       if (DECL_IGNORED_P (obj))
2959 	{
2960 	  bnd_var_name = (char *) xmalloc (strlen (CHKP_VAR_BOUNDS_PREFIX) + 10);
2961 	  sprintf (bnd_var_name, "%s%d", CHKP_VAR_BOUNDS_PREFIX, var_id++);
2962 	}
2963       else
2964 	{
2965 	  var_name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (obj));
2966 
2967 	  /* For hidden symbols we want to skip first '*' char.  */
2968 	  if (*var_name == '*')
2969 	    var_name++;
2970 
2971 	  bnd_var_name = (char *) xmalloc (strlen (var_name)
2972 					   + strlen (CHKP_BOUNDS_OF_SYMBOL_PREFIX) + 1);
2973 	  strcpy (bnd_var_name, CHKP_BOUNDS_OF_SYMBOL_PREFIX);
2974 	  strcat (bnd_var_name, var_name);
2975 	}
2976 
2977       bnd_var = build_decl (UNKNOWN_LOCATION, VAR_DECL,
2978 			    get_identifier (bnd_var_name),
2979 			    pointer_bounds_type_node);
2980 
2981       /* Address of the obj will be used as lower bound.  */
2982       TREE_ADDRESSABLE (obj) = 1;
2983     }
2984   else
2985     {
2986       bnd_var_name = (char *) xmalloc (strlen (CHKP_STRING_BOUNDS_PREFIX) + 10);
2987       sprintf (bnd_var_name, "%s%d", CHKP_STRING_BOUNDS_PREFIX, string_id++);
2988 
2989       bnd_var = build_decl (UNKNOWN_LOCATION, VAR_DECL,
2990 			    get_identifier (bnd_var_name),
2991 			    pointer_bounds_type_node);
2992     }
2993 
2994   free (bnd_var_name);
2995 
2996   TREE_PUBLIC (bnd_var) = 0;
2997   TREE_USED (bnd_var) = 1;
2998   TREE_READONLY (bnd_var) = 0;
2999   TREE_STATIC (bnd_var) = 1;
3000   TREE_ADDRESSABLE (bnd_var) = 0;
3001   DECL_ARTIFICIAL (bnd_var) = 1;
3002   DECL_COMMON (bnd_var) = 1;
3003   DECL_COMDAT (bnd_var) = 1;
3004   DECL_READ_P (bnd_var) = 1;
3005   DECL_INITIAL (bnd_var) = chkp_build_addr_expr (obj);
3006   /* Force output similar to constant bounds.
3007      See chkp_make_static_const_bounds. */
3008   varpool_node::get_create (bnd_var)->force_output = 1;
3009   /* Mark symbol as requiring bounds initialization.  */
3010   varpool_node::get_create (bnd_var)->need_bounds_init = 1;
3011   varpool_node::finalize_decl (bnd_var);
3012 
3013   /* Add created var to the map to use it for other references
3014      to obj.  */
3015   if (!chkp_static_var_bounds)
3016     chkp_static_var_bounds = new hash_map<tree, tree>;
3017 
3018   if (TREE_CODE (obj) == VAR_DECL)
3019     {
3020       tree name = DECL_ASSEMBLER_NAME (obj);
3021       chkp_static_var_bounds->put (name, bnd_var);
3022     }
3023   else
3024     chkp_static_var_bounds->put (obj, bnd_var);
3025 
3026   return bnd_var;
3027 }
3028 
3029 /* When var has incomplete type we cannot get size to
3030    compute its bounds.  In such cases we use checker
3031    builtin call which determines object size at runtime.  */
3032 static tree
3033 chkp_generate_extern_var_bounds (tree var)
3034 {
3035   tree bounds, size_reloc, lb, size, max_size, cond;
3036   gimple_stmt_iterator gsi;
3037   gimple_seq seq = NULL;
3038   gimple *stmt;
3039 
3040   /* If instrumentation is not enabled for vars having
3041      incomplete type then just return zero bounds to avoid
3042      checks for this var.  */
3043   if (!flag_chkp_incomplete_type)
3044     return chkp_get_zero_bounds ();
3045 
3046   if (dump_file && (dump_flags & TDF_DETAILS))
3047     {
3048       fprintf (dump_file, "Generating bounds for extern symbol '");
3049       print_generic_expr (dump_file, var, 0);
3050       fprintf (dump_file, "'\n");
3051     }
3052 
3053   stmt = gimple_build_call (chkp_sizeof_fndecl, 1, var);
3054 
3055   size_reloc = create_tmp_reg (chkp_uintptr_type, CHKP_SIZE_TMP_NAME);
3056   gimple_call_set_lhs (stmt, size_reloc);
3057 
3058   gimple_seq_add_stmt (&seq, stmt);
3059 
3060   lb = chkp_build_addr_expr (var);
3061   size = make_ssa_name (chkp_get_size_tmp_var ());
3062 
3063   if (flag_chkp_zero_dynamic_size_as_infinite)
3064     {
3065       /* We should check that size relocation was resolved.
3066 	 If it was not then use maximum possible size for the var.  */
3067       max_size = build2 (MINUS_EXPR, chkp_uintptr_type, integer_zero_node,
3068 			 fold_convert (chkp_uintptr_type, lb));
3069       max_size = chkp_force_gimple_call_op (max_size, &seq);
3070 
3071       cond = build2 (NE_EXPR, boolean_type_node,
3072 		     size_reloc, integer_zero_node);
3073       stmt = gimple_build_assign (size, COND_EXPR, cond, size_reloc, max_size);
3074       gimple_seq_add_stmt (&seq, stmt);
3075     }
3076   else
3077     {
3078       stmt = gimple_build_assign (size, size_reloc);
3079       gimple_seq_add_stmt (&seq, stmt);
3080     }
3081 
3082   gsi = gsi_start_bb (chkp_get_entry_block ());
3083   gsi_insert_seq_after (&gsi, seq, GSI_CONTINUE_LINKING);
3084 
3085   bounds = chkp_make_bounds (lb, size, &gsi, true);
3086 
3087   return bounds;
3088 }
3089 
3090 /* Return 1 if TYPE has fields with zero size or fields
3091    marked with chkp_variable_size attribute.  */
3092 bool
3093 chkp_variable_size_type (tree type)
3094 {
3095   bool res = false;
3096   tree field;
3097 
3098   if (RECORD_OR_UNION_TYPE_P (type))
3099     for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
3100       {
3101 	if (TREE_CODE (field) == FIELD_DECL)
3102 	  res = res
3103 	    || lookup_attribute ("bnd_variable_size", DECL_ATTRIBUTES (field))
3104 	    || chkp_variable_size_type (TREE_TYPE (field));
3105       }
3106   else
3107     res = !TYPE_SIZE (type)
3108       || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST
3109       || tree_to_uhwi (TYPE_SIZE (type)) == 0;
3110 
3111   return res;
3112 }
3113 
3114 /* Compute and return bounds for address of DECL which is
3115    one of VAR_DECL, PARM_DECL, RESULT_DECL.  */
3116 static tree
3117 chkp_get_bounds_for_decl_addr (tree decl)
3118 {
3119   tree bounds;
3120 
3121   gcc_assert (TREE_CODE (decl) == VAR_DECL
3122 	      || TREE_CODE (decl) == PARM_DECL
3123 	      || TREE_CODE (decl) == RESULT_DECL);
3124 
3125   bounds = chkp_get_registered_addr_bounds (decl);
3126 
3127   if (bounds)
3128     return bounds;
3129 
3130   if (dump_file && (dump_flags & TDF_DETAILS))
3131     {
3132       fprintf (dump_file, "Building bounds for address of decl ");
3133       print_generic_expr (dump_file, decl, 0);
3134       fprintf (dump_file, "\n");
3135     }
3136 
3137   /* Use zero bounds if size is unknown and checks for
3138      unknown sizes are restricted.  */
3139   if ((!DECL_SIZE (decl)
3140        || (chkp_variable_size_type (TREE_TYPE (decl))
3141 	   && (TREE_STATIC (decl)
3142 	       || DECL_EXTERNAL (decl)
3143 	       || TREE_PUBLIC (decl))))
3144       && !flag_chkp_incomplete_type)
3145       return chkp_get_zero_bounds ();
3146 
3147   if (VOID_TYPE_P (TREE_TYPE (decl)))
3148     return chkp_get_zero_bounds ();
3149 
3150   if (flag_chkp_use_static_bounds
3151       && TREE_CODE (decl) == VAR_DECL
3152       && (TREE_STATIC (decl)
3153 	      || DECL_EXTERNAL (decl)
3154 	      || TREE_PUBLIC (decl))
3155       && !DECL_THREAD_LOCAL_P (decl))
3156     {
3157       tree bnd_var = chkp_make_static_bounds (decl);
3158       gimple_stmt_iterator gsi = gsi_start_bb (chkp_get_entry_block ());
3159       gimple *stmt;
3160 
3161       bounds = chkp_get_tmp_reg (NULL);
3162       stmt = gimple_build_assign (bounds, bnd_var);
3163       gsi_insert_before (&gsi, stmt, GSI_SAME_STMT);
3164     }
3165   else if (!DECL_SIZE (decl)
3166       || (chkp_variable_size_type (TREE_TYPE (decl))
3167 	  && (TREE_STATIC (decl)
3168 	      || DECL_EXTERNAL (decl)
3169 	      || TREE_PUBLIC (decl))))
3170     {
3171       gcc_assert (TREE_CODE (decl) == VAR_DECL);
3172       bounds = chkp_generate_extern_var_bounds (decl);
3173     }
3174   else
3175     {
3176       tree lb = chkp_build_addr_expr (decl);
3177       bounds = chkp_make_bounds (lb, DECL_SIZE_UNIT (decl), NULL, false);
3178     }
3179 
3180   return bounds;
3181 }
3182 
3183 /* Compute and return bounds for constant string.  */
3184 static tree
3185 chkp_get_bounds_for_string_cst (tree cst)
3186 {
3187   tree bounds;
3188   tree lb;
3189   tree size;
3190 
3191   gcc_assert (TREE_CODE (cst) == STRING_CST);
3192 
3193   bounds = chkp_get_registered_bounds (cst);
3194 
3195   if (bounds)
3196     return bounds;
3197 
3198   if ((flag_chkp_use_static_bounds && flag_chkp_use_static_const_bounds)
3199       || flag_chkp_use_static_const_bounds > 0)
3200     {
3201       tree bnd_var = chkp_make_static_bounds (cst);
3202       gimple_stmt_iterator gsi = gsi_start_bb (chkp_get_entry_block ());
3203       gimple *stmt;
3204 
3205       bounds = chkp_get_tmp_reg (NULL);
3206       stmt = gimple_build_assign (bounds, bnd_var);
3207       gsi_insert_before (&gsi, stmt, GSI_SAME_STMT);
3208     }
3209   else
3210     {
3211       lb = chkp_build_addr_expr (cst);
3212       size = build_int_cst (chkp_uintptr_type, TREE_STRING_LENGTH (cst));
3213       bounds = chkp_make_bounds (lb, size, NULL, false);
3214     }
3215 
3216   bounds = chkp_maybe_copy_and_register_bounds (cst, bounds);
3217 
3218   return bounds;
3219 }
3220 
3221 /* Generate code to instersect bounds BOUNDS1 and BOUNDS2 and
3222    return the result.  if ITER is not NULL then Code is inserted
3223    before position pointed by ITER.  Otherwise code is added to
3224    entry block.  */
3225 static tree
3226 chkp_intersect_bounds (tree bounds1, tree bounds2, gimple_stmt_iterator *iter)
3227 {
3228   if (!bounds1 || bounds1 == chkp_get_zero_bounds ())
3229     return bounds2 ? bounds2 : bounds1;
3230   else if (!bounds2 || bounds2 == chkp_get_zero_bounds ())
3231     return bounds1;
3232   else
3233     {
3234       gimple_seq seq;
3235       gimple *stmt;
3236       tree bounds;
3237 
3238       seq = NULL;
3239 
3240       stmt = gimple_build_call (chkp_intersect_fndecl, 2, bounds1, bounds2);
3241       chkp_mark_stmt (stmt);
3242 
3243       bounds = chkp_get_tmp_reg (stmt);
3244       gimple_call_set_lhs (stmt, bounds);
3245 
3246       gimple_seq_add_stmt (&seq, stmt);
3247 
3248       /* We are probably doing narrowing for constant expression.
3249 	 In such case iter may be undefined.  */
3250       if (!iter)
3251 	{
3252 	  gimple_stmt_iterator gsi = gsi_last_bb (chkp_get_entry_block ());
3253 	  iter = &gsi;
3254 	  gsi_insert_seq_after (iter, seq, GSI_SAME_STMT);
3255 	}
3256       else
3257 	gsi_insert_seq_before (iter, seq, GSI_SAME_STMT);
3258 
3259       if (dump_file && (dump_flags & TDF_DETAILS))
3260 	{
3261 	  fprintf (dump_file, "Bounds intersection: ");
3262 	  print_gimple_stmt (dump_file, stmt, 0, TDF_VOPS|TDF_MEMSYMS);
3263 	  fprintf (dump_file, "  inserted before statement: ");
3264 	  print_gimple_stmt (dump_file, gsi_stmt (*iter), 0,
3265 			     TDF_VOPS|TDF_MEMSYMS);
3266 	}
3267 
3268       return bounds;
3269     }
3270 }
3271 
3272 /* Return 1 if we are allowed to narrow bounds for addressed FIELD
3273    and 0 othersize.  */
3274 static bool
3275 chkp_may_narrow_to_field (tree field)
3276 {
3277   return DECL_SIZE (field) && TREE_CODE (DECL_SIZE (field)) == INTEGER_CST
3278     && tree_to_uhwi (DECL_SIZE (field)) != 0
3279     && (!DECL_FIELD_OFFSET (field)
3280 	|| TREE_CODE (DECL_FIELD_OFFSET (field)) == INTEGER_CST)
3281     && (!DECL_FIELD_BIT_OFFSET (field)
3282 	|| TREE_CODE (DECL_FIELD_BIT_OFFSET (field)) == INTEGER_CST)
3283     && !lookup_attribute ("bnd_variable_size", DECL_ATTRIBUTES (field))
3284     && !chkp_variable_size_type (TREE_TYPE (field));
3285 }
3286 
3287 /* Return 1 if bounds for FIELD should be narrowed to
3288    field's own size.  */
3289 static bool
3290 chkp_narrow_bounds_for_field (tree field)
3291 {
3292   HOST_WIDE_INT offs;
3293   HOST_WIDE_INT bit_offs;
3294 
3295   if (!chkp_may_narrow_to_field (field))
3296     return false;
3297 
3298   /* Accesse to compiler generated fields should not cause
3299      bounds narrowing.  */
3300   if (DECL_ARTIFICIAL (field))
3301     return false;
3302 
3303   offs = tree_to_uhwi (DECL_FIELD_OFFSET (field));
3304   bit_offs = tree_to_uhwi (DECL_FIELD_BIT_OFFSET (field));
3305 
3306   return (flag_chkp_narrow_bounds
3307 	  && (flag_chkp_first_field_has_own_bounds
3308 	      || offs
3309 	      || bit_offs));
3310 }
3311 
3312 /* Perform narrowing for BOUNDS using bounds computed for field
3313    access COMPONENT.  ITER meaning is the same as for
3314    chkp_intersect_bounds.  */
3315 static tree
3316 chkp_narrow_bounds_to_field (tree bounds, tree component,
3317 			    gimple_stmt_iterator *iter)
3318 {
3319   tree field = TREE_OPERAND (component, 1);
3320   tree size = DECL_SIZE_UNIT (field);
3321   tree field_ptr = chkp_build_addr_expr (component);
3322   tree field_bounds;
3323 
3324   field_bounds = chkp_make_bounds (field_ptr, size, iter, false);
3325 
3326   return chkp_intersect_bounds (field_bounds, bounds, iter);
3327 }
3328 
3329 /* Parse field or array access NODE.
3330 
3331    PTR ouput parameter holds a pointer to the outermost
3332    object.
3333 
3334    BITFIELD output parameter is set to 1 if bitfield is
3335    accessed and to 0 otherwise.  If it is 1 then ELT holds
3336    outer component for accessed bit field.
3337 
3338    SAFE outer parameter is set to 1 if access is safe and
3339    checks are not required.
3340 
3341    BOUNDS outer parameter holds bounds to be used to check
3342    access (may be NULL).
3343 
3344    If INNERMOST_BOUNDS is 1 then try to narrow bounds to the
3345    innermost accessed component.  */
3346 static void
3347 chkp_parse_array_and_component_ref (tree node, tree *ptr,
3348 				    tree *elt, bool *safe,
3349 				    bool *bitfield,
3350 				    tree *bounds,
3351 				    gimple_stmt_iterator *iter,
3352 				    bool innermost_bounds)
3353 {
3354   tree comp_to_narrow = NULL_TREE;
3355   tree last_comp = NULL_TREE;
3356   bool array_ref_found = false;
3357   tree *nodes;
3358   tree var;
3359   int len;
3360   int i;
3361 
3362   /* Compute tree height for expression.  */
3363   var = node;
3364   len = 1;
3365   while (TREE_CODE (var) == COMPONENT_REF
3366 	 || TREE_CODE (var) == ARRAY_REF
3367 	 || TREE_CODE (var) == VIEW_CONVERT_EXPR)
3368     {
3369       var = TREE_OPERAND (var, 0);
3370       len++;
3371     }
3372 
3373   gcc_assert (len > 1);
3374 
3375   /* It is more convenient for us to scan left-to-right,
3376      so walk tree again and put all node to nodes vector
3377      in reversed order.  */
3378   nodes = XALLOCAVEC (tree, len);
3379   nodes[len - 1] = node;
3380   for (i = len - 2; i >= 0; i--)
3381     nodes[i] = TREE_OPERAND (nodes[i + 1], 0);
3382 
3383   if (bounds)
3384     *bounds = NULL;
3385   *safe = true;
3386   *bitfield = (TREE_CODE (node) == COMPONENT_REF
3387 	       && DECL_BIT_FIELD_TYPE (TREE_OPERAND (node, 1)));
3388   /* To get bitfield address we will need outer elemnt.  */
3389   if (*bitfield)
3390     *elt = nodes[len - 2];
3391   else
3392     *elt = NULL_TREE;
3393 
3394   /* If we have indirection in expression then compute
3395      outermost structure bounds.  Computed bounds may be
3396      narrowed later.  */
3397   if (TREE_CODE (nodes[0]) == MEM_REF || INDIRECT_REF_P (nodes[0]))
3398     {
3399       *safe = false;
3400       *ptr = TREE_OPERAND (nodes[0], 0);
3401       if (bounds)
3402 	*bounds = chkp_find_bounds (*ptr, iter);
3403     }
3404   else
3405     {
3406       gcc_assert (TREE_CODE (var) == VAR_DECL
3407 		  || TREE_CODE (var) == PARM_DECL
3408 		  || TREE_CODE (var) == RESULT_DECL
3409 		  || TREE_CODE (var) == STRING_CST
3410 		  || TREE_CODE (var) == SSA_NAME);
3411 
3412       *ptr = chkp_build_addr_expr (var);
3413     }
3414 
3415   /* In this loop we are trying to find a field access
3416      requiring narrowing.  There are two simple rules
3417      for search:
3418      1.  Leftmost array_ref is chosen if any.
3419      2.  Rightmost suitable component_ref is chosen if innermost
3420      bounds are required and no array_ref exists.  */
3421   for (i = 1; i < len; i++)
3422     {
3423       var = nodes[i];
3424 
3425       if (TREE_CODE (var) == ARRAY_REF)
3426 	{
3427 	  *safe = false;
3428 	  array_ref_found = true;
3429 	  if (flag_chkp_narrow_bounds
3430 	      && !flag_chkp_narrow_to_innermost_arrray
3431 	      && (!last_comp
3432 		  || chkp_may_narrow_to_field (TREE_OPERAND (last_comp, 1))))
3433 	    {
3434 	      comp_to_narrow = last_comp;
3435 	      break;
3436 	    }
3437 	}
3438       else if (TREE_CODE (var) == COMPONENT_REF)
3439 	{
3440 	  tree field = TREE_OPERAND (var, 1);
3441 
3442 	  if (innermost_bounds
3443 	      && !array_ref_found
3444 	      && chkp_narrow_bounds_for_field (field))
3445 	    comp_to_narrow = var;
3446 	  last_comp = var;
3447 
3448 	  if (flag_chkp_narrow_bounds
3449 	      && flag_chkp_narrow_to_innermost_arrray
3450 	      && TREE_CODE (TREE_TYPE (field)) == ARRAY_TYPE)
3451 	    {
3452 	      if (bounds)
3453 		*bounds = chkp_narrow_bounds_to_field (*bounds, var, iter);
3454 	      comp_to_narrow = NULL;
3455 	    }
3456 	}
3457       else if (TREE_CODE (var) == VIEW_CONVERT_EXPR)
3458 	/* Nothing to do for it.  */
3459 	;
3460       else
3461 	gcc_unreachable ();
3462     }
3463 
3464   if (comp_to_narrow && DECL_SIZE (TREE_OPERAND (comp_to_narrow, 1)) && bounds)
3465     *bounds = chkp_narrow_bounds_to_field (*bounds, comp_to_narrow, iter);
3466 
3467   if (innermost_bounds && bounds && !*bounds)
3468     *bounds = chkp_find_bounds (*ptr, iter);
3469 }
3470 
3471 /* Compute and return bounds for address of OBJ.  */
3472 static tree
3473 chkp_make_addressed_object_bounds (tree obj, gimple_stmt_iterator *iter)
3474 {
3475   tree bounds = chkp_get_registered_addr_bounds (obj);
3476 
3477   if (bounds)
3478     return bounds;
3479 
3480   switch (TREE_CODE (obj))
3481     {
3482     case VAR_DECL:
3483     case PARM_DECL:
3484     case RESULT_DECL:
3485       bounds = chkp_get_bounds_for_decl_addr (obj);
3486       break;
3487 
3488     case STRING_CST:
3489       bounds = chkp_get_bounds_for_string_cst (obj);
3490       break;
3491 
3492     case ARRAY_REF:
3493     case COMPONENT_REF:
3494       {
3495 	tree elt;
3496 	tree ptr;
3497 	bool safe;
3498 	bool bitfield;
3499 
3500 	chkp_parse_array_and_component_ref (obj, &ptr, &elt, &safe,
3501 					    &bitfield, &bounds, iter, true);
3502 
3503 	gcc_assert (bounds);
3504       }
3505       break;
3506 
3507     case FUNCTION_DECL:
3508     case LABEL_DECL:
3509       bounds = chkp_get_zero_bounds ();
3510       break;
3511 
3512     case MEM_REF:
3513       bounds = chkp_find_bounds (TREE_OPERAND (obj, 0), iter);
3514       break;
3515 
3516     case REALPART_EXPR:
3517     case IMAGPART_EXPR:
3518       bounds = chkp_make_addressed_object_bounds (TREE_OPERAND (obj, 0), iter);
3519       break;
3520 
3521     default:
3522       if (dump_file && (dump_flags & TDF_DETAILS))
3523 	{
3524 	  fprintf (dump_file, "chkp_make_addressed_object_bounds: "
3525 		   "unexpected object of type %s\n",
3526 		   get_tree_code_name (TREE_CODE (obj)));
3527 	  print_node (dump_file, "", obj, 0);
3528 	}
3529       internal_error ("chkp_make_addressed_object_bounds: "
3530 		      "Unexpected tree code %s",
3531 		      get_tree_code_name (TREE_CODE (obj)));
3532     }
3533 
3534   chkp_register_addr_bounds (obj, bounds);
3535 
3536   return bounds;
3537 }
3538 
3539 /* Compute bounds for pointer PTR loaded from PTR_SRC.  Generate statements
3540    to compute bounds if required.  Computed bounds should be available at
3541    position pointed by ITER.
3542 
3543    If PTR_SRC is NULL_TREE then pointer definition is identified.
3544 
3545    If PTR_SRC is not NULL_TREE then ITER points to statements which loads
3546    PTR.  If PTR is a any memory reference then ITER points to a statement
3547    after which bndldx will be inserterd.  In both cases ITER will be updated
3548    to point to the inserted bndldx statement.  */
3549 
3550 static tree
3551 chkp_find_bounds_1 (tree ptr, tree ptr_src, gimple_stmt_iterator *iter)
3552 {
3553   tree addr = NULL_TREE;
3554   tree bounds = NULL_TREE;
3555 
3556   if (!ptr_src)
3557     ptr_src = ptr;
3558 
3559   bounds = chkp_get_registered_bounds (ptr_src);
3560 
3561   if (bounds)
3562     return bounds;
3563 
3564   switch (TREE_CODE (ptr_src))
3565     {
3566     case MEM_REF:
3567     case VAR_DECL:
3568       if (BOUNDED_P (ptr_src))
3569 	if (TREE_CODE (ptr) == VAR_DECL && DECL_REGISTER (ptr))
3570 	  bounds = chkp_get_zero_bounds ();
3571 	else
3572 	  {
3573 	    addr = chkp_build_addr_expr (ptr_src);
3574 	    bounds = chkp_build_bndldx (addr, ptr, iter);
3575 	  }
3576       else
3577 	bounds = chkp_get_nonpointer_load_bounds ();
3578       break;
3579 
3580     case ARRAY_REF:
3581     case COMPONENT_REF:
3582       addr = get_base_address (ptr_src);
3583       if (DECL_P (addr)
3584 	  || TREE_CODE (addr) == MEM_REF
3585 	  || TREE_CODE (addr) == TARGET_MEM_REF)
3586 	{
3587 	  if (BOUNDED_P (ptr_src))
3588 	    if (TREE_CODE (ptr) == VAR_DECL && DECL_REGISTER (ptr))
3589 	      bounds = chkp_get_zero_bounds ();
3590 	    else
3591 	      {
3592 		addr = chkp_build_addr_expr (ptr_src);
3593 		bounds = chkp_build_bndldx (addr, ptr, iter);
3594 	      }
3595 	  else
3596 	    bounds = chkp_get_nonpointer_load_bounds ();
3597 	}
3598       else
3599 	{
3600 	  gcc_assert (TREE_CODE (addr) == SSA_NAME);
3601 	  bounds = chkp_find_bounds (addr, iter);
3602 	}
3603       break;
3604 
3605     case PARM_DECL:
3606       /* Handled above but failed.  */
3607       bounds = chkp_get_invalid_op_bounds ();
3608       break;
3609 
3610     case TARGET_MEM_REF:
3611       addr = chkp_build_addr_expr (ptr_src);
3612       bounds = chkp_build_bndldx (addr, ptr, iter);
3613       break;
3614 
3615     case SSA_NAME:
3616       bounds = chkp_get_registered_bounds (ptr_src);
3617       if (!bounds)
3618 	{
3619 	  gimple *def_stmt = SSA_NAME_DEF_STMT (ptr_src);
3620 	  gphi_iterator phi_iter;
3621 
3622 	  bounds = chkp_get_bounds_by_definition (ptr_src, def_stmt, &phi_iter);
3623 
3624 	  gcc_assert (bounds);
3625 
3626 	  if (gphi *def_phi = dyn_cast <gphi *> (def_stmt))
3627 	    {
3628 	      unsigned i;
3629 
3630 	      for (i = 0; i < gimple_phi_num_args (def_phi); i++)
3631 		{
3632 		  tree arg = gimple_phi_arg_def (def_phi, i);
3633 		  tree arg_bnd;
3634 		  gphi *phi_bnd;
3635 
3636 		  arg_bnd = chkp_find_bounds (arg, NULL);
3637 
3638 		  /* chkp_get_bounds_by_definition created new phi
3639 		     statement and phi_iter points to it.
3640 
3641 		     Previous call to chkp_find_bounds could create
3642 		     new basic block and therefore change phi statement
3643 		     phi_iter points to.  */
3644 		  phi_bnd = phi_iter.phi ();
3645 
3646 		  add_phi_arg (phi_bnd, arg_bnd,
3647 			       gimple_phi_arg_edge (def_phi, i),
3648 			       UNKNOWN_LOCATION);
3649 		}
3650 
3651 	      /* If all bound phi nodes have their arg computed
3652 		 then we may finish its computation.  See
3653 		 chkp_finish_incomplete_bounds for more details.  */
3654 	      if (chkp_may_finish_incomplete_bounds ())
3655 		chkp_finish_incomplete_bounds ();
3656 	    }
3657 
3658 	  gcc_assert (bounds == chkp_get_registered_bounds (ptr_src)
3659 		      || chkp_incomplete_bounds (bounds));
3660 	}
3661       break;
3662 
3663     case ADDR_EXPR:
3664     case WITH_SIZE_EXPR:
3665       bounds = chkp_make_addressed_object_bounds (TREE_OPERAND (ptr_src, 0), iter);
3666       break;
3667 
3668     case INTEGER_CST:
3669     case COMPLEX_CST:
3670     case VECTOR_CST:
3671       if (integer_zerop (ptr_src))
3672 	bounds = chkp_get_none_bounds ();
3673       else
3674 	bounds = chkp_get_invalid_op_bounds ();
3675       break;
3676 
3677     default:
3678       if (dump_file && (dump_flags & TDF_DETAILS))
3679 	{
3680 	  fprintf (dump_file, "chkp_find_bounds: unexpected ptr of type %s\n",
3681 		   get_tree_code_name (TREE_CODE (ptr_src)));
3682 	  print_node (dump_file, "", ptr_src, 0);
3683 	}
3684       internal_error ("chkp_find_bounds: Unexpected tree code %s",
3685 		      get_tree_code_name (TREE_CODE (ptr_src)));
3686     }
3687 
3688   if (!bounds)
3689     {
3690       if (dump_file && (dump_flags & TDF_DETAILS))
3691 	{
3692 	  fprintf (stderr, "chkp_find_bounds: cannot find bounds for pointer\n");
3693 	  print_node (dump_file, "", ptr_src, 0);
3694 	}
3695       internal_error ("chkp_find_bounds: Cannot find bounds for pointer");
3696     }
3697 
3698   return bounds;
3699 }
3700 
3701 /* Normal case for bounds search without forced narrowing.  */
3702 static tree
3703 chkp_find_bounds (tree ptr, gimple_stmt_iterator *iter)
3704 {
3705   return chkp_find_bounds_1 (ptr, NULL_TREE, iter);
3706 }
3707 
3708 /* Search bounds for pointer PTR loaded from PTR_SRC
3709    by statement *ITER points to.  */
3710 static tree
3711 chkp_find_bounds_loaded (tree ptr, tree ptr_src, gimple_stmt_iterator *iter)
3712 {
3713   return chkp_find_bounds_1 (ptr, ptr_src, iter);
3714 }
3715 
3716 /* Helper function which checks type of RHS and finds all pointers in
3717    it.  For each found pointer we build it's accesses in LHS and RHS
3718    objects and then call HANDLER for them.  Function is used to copy
3719    or initilize bounds for copied object.  */
3720 static void
3721 chkp_walk_pointer_assignments (tree lhs, tree rhs, void *arg,
3722 			       assign_handler handler)
3723 {
3724   tree type = TREE_TYPE (lhs);
3725 
3726   /* We have nothing to do with clobbers.  */
3727   if (TREE_CLOBBER_P (rhs))
3728     return;
3729 
3730   if (BOUNDED_TYPE_P (type))
3731     handler (lhs, rhs, arg);
3732   else if (RECORD_OR_UNION_TYPE_P (type))
3733     {
3734       tree field;
3735 
3736       if (TREE_CODE (rhs) == CONSTRUCTOR)
3737 	{
3738 	  unsigned HOST_WIDE_INT cnt;
3739 	  tree val;
3740 
3741 	  FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (rhs), cnt, field, val)
3742 	    {
3743 	      if (field && chkp_type_has_pointer (TREE_TYPE (field)))
3744 		{
3745 		  tree lhs_field = chkp_build_component_ref (lhs, field);
3746 		  chkp_walk_pointer_assignments (lhs_field, val, arg, handler);
3747 		}
3748 	    }
3749 	}
3750       else
3751 	for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
3752 	  if (TREE_CODE (field) == FIELD_DECL
3753 	      && chkp_type_has_pointer (TREE_TYPE (field)))
3754 	    {
3755 	      tree rhs_field = chkp_build_component_ref (rhs, field);
3756 	      tree lhs_field = chkp_build_component_ref (lhs, field);
3757 	      chkp_walk_pointer_assignments (lhs_field, rhs_field, arg, handler);
3758 	    }
3759     }
3760   else if (TREE_CODE (type) == ARRAY_TYPE)
3761     {
3762       unsigned HOST_WIDE_INT cur = 0;
3763       tree maxval = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
3764       tree etype = TREE_TYPE (type);
3765       tree esize = TYPE_SIZE (etype);
3766 
3767       if (TREE_CODE (rhs) == CONSTRUCTOR)
3768 	{
3769 	  unsigned HOST_WIDE_INT cnt;
3770 	  tree purp, val, lhs_elem;
3771 
3772 	  FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (rhs), cnt, purp, val)
3773 	    {
3774 	      if (purp && TREE_CODE (purp) == RANGE_EXPR)
3775 		{
3776 		  tree lo_index = TREE_OPERAND (purp, 0);
3777 		  tree hi_index = TREE_OPERAND (purp, 1);
3778 
3779 		  for (cur = (unsigned)tree_to_uhwi (lo_index);
3780 		       cur <= (unsigned)tree_to_uhwi (hi_index);
3781 		       cur++)
3782 		    {
3783 		      lhs_elem = chkp_build_array_ref (lhs, etype, esize, cur);
3784 		      chkp_walk_pointer_assignments (lhs_elem, val, arg, handler);
3785 		    }
3786 		}
3787 	      else
3788 		{
3789 		  if (purp)
3790 		    {
3791 		      gcc_assert (TREE_CODE (purp) == INTEGER_CST);
3792 		      cur = tree_to_uhwi (purp);
3793 		    }
3794 
3795 		  lhs_elem = chkp_build_array_ref (lhs, etype, esize, cur++);
3796 
3797 		  chkp_walk_pointer_assignments (lhs_elem, val, arg, handler);
3798 		}
3799 	    }
3800 	}
3801       /* Copy array only when size is known.  */
3802       else if (maxval && !integer_minus_onep (maxval))
3803 	for (cur = 0; cur <= TREE_INT_CST_LOW (maxval); cur++)
3804 	  {
3805 	    tree lhs_elem = chkp_build_array_ref (lhs, etype, esize, cur);
3806 	    tree rhs_elem = chkp_build_array_ref (rhs, etype, esize, cur);
3807 	    chkp_walk_pointer_assignments (lhs_elem, rhs_elem, arg, handler);
3808 	  }
3809     }
3810   else
3811     internal_error("chkp_walk_pointer_assignments: unexpected RHS type: %s",
3812 		   get_tree_code_name (TREE_CODE (type)));
3813 }
3814 
3815 /* Add code to copy bounds for assignment of RHS to LHS.
3816    ARG is an iterator pointing ne code position.  */
3817 static void
3818 chkp_copy_bounds_for_elem (tree lhs, tree rhs, void *arg)
3819 {
3820   gimple_stmt_iterator *iter = (gimple_stmt_iterator *)arg;
3821   tree bounds = chkp_find_bounds (rhs, iter);
3822   tree addr = chkp_build_addr_expr(lhs);
3823 
3824   chkp_build_bndstx (addr, rhs, bounds, iter);
3825 }
3826 
3827 /* Emit static bound initilizers and size vars.  */
3828 void
3829 chkp_finish_file (void)
3830 {
3831   struct varpool_node *node;
3832   struct chkp_ctor_stmt_list stmts;
3833 
3834   if (seen_error ())
3835     return;
3836 
3837   /* Iterate through varpool and generate bounds initialization
3838      constructors for all statically initialized pointers.  */
3839   stmts.avail = MAX_STMTS_IN_STATIC_CHKP_CTOR;
3840   stmts.stmts = NULL;
3841   FOR_EACH_VARIABLE (node)
3842     /* Check that var is actually emitted and we need and may initialize
3843        its bounds.  */
3844     if (node->need_bounds_init
3845 	&& !POINTER_BOUNDS_P (node->decl)
3846 	&& DECL_RTL (node->decl)
3847 	&& MEM_P (DECL_RTL (node->decl))
3848 	&& TREE_ASM_WRITTEN (node->decl))
3849       {
3850 	chkp_walk_pointer_assignments (node->decl,
3851 				       DECL_INITIAL (node->decl),
3852 				       &stmts,
3853 				       chkp_add_modification_to_stmt_list);
3854 
3855 	if (stmts.avail <= 0)
3856 	  {
3857 	    cgraph_build_static_cdtor ('P', stmts.stmts,
3858 				       MAX_RESERVED_INIT_PRIORITY + 3);
3859 	    stmts.avail = MAX_STMTS_IN_STATIC_CHKP_CTOR;
3860 	    stmts.stmts = NULL;
3861 	  }
3862       }
3863 
3864   if (stmts.stmts)
3865     cgraph_build_static_cdtor ('P', stmts.stmts,
3866 			       MAX_RESERVED_INIT_PRIORITY + 3);
3867 
3868   /* Iterate through varpool and generate bounds initialization
3869      constructors for all static bounds vars.  */
3870   stmts.avail = MAX_STMTS_IN_STATIC_CHKP_CTOR;
3871   stmts.stmts = NULL;
3872   FOR_EACH_VARIABLE (node)
3873     if (node->need_bounds_init
3874 	&& POINTER_BOUNDS_P (node->decl)
3875 	&& TREE_ASM_WRITTEN (node->decl))
3876       {
3877 	tree bnd = node->decl;
3878 	tree var;
3879 
3880 	gcc_assert (DECL_INITIAL (bnd)
3881 		    && TREE_CODE (DECL_INITIAL (bnd)) == ADDR_EXPR);
3882 
3883 	var = TREE_OPERAND (DECL_INITIAL (bnd), 0);
3884 	chkp_output_static_bounds (bnd, var, &stmts);
3885       }
3886 
3887   if (stmts.stmts)
3888     cgraph_build_static_cdtor ('B', stmts.stmts,
3889 			       MAX_RESERVED_INIT_PRIORITY + 2);
3890 
3891   delete chkp_static_var_bounds;
3892   delete chkp_bounds_map;
3893 }
3894 
3895 /* An instrumentation function which is called for each statement
3896    having memory access we want to instrument.  It inserts check
3897    code and bounds copy code.
3898 
3899    ITER points to statement to instrument.
3900 
3901    NODE holds memory access in statement to check.
3902 
3903    LOC holds the location information for statement.
3904 
3905    DIRFLAGS determines whether access is read or write.
3906 
3907    ACCESS_OFFS should be added to address used in NODE
3908    before check.
3909 
3910    ACCESS_SIZE holds size of checked access.
3911 
3912    SAFE indicates if NODE access is safe and should not be
3913    checked.  */
3914 static void
3915 chkp_process_stmt (gimple_stmt_iterator *iter, tree node,
3916 		   location_t loc, tree dirflag,
3917 		   tree access_offs, tree access_size,
3918 		   bool safe)
3919 {
3920   tree node_type = TREE_TYPE (node);
3921   tree size = access_size ? access_size : TYPE_SIZE_UNIT (node_type);
3922   tree addr_first = NULL_TREE; /* address of the first accessed byte */
3923   tree addr_last = NULL_TREE; /* address of the last accessed byte */
3924   tree ptr = NULL_TREE; /* a pointer used for dereference */
3925   tree bounds = NULL_TREE;
3926 
3927   /* We do not need instrumentation for clobbers.  */
3928   if (dirflag == integer_one_node
3929       && gimple_code (gsi_stmt (*iter)) == GIMPLE_ASSIGN
3930       && TREE_CLOBBER_P (gimple_assign_rhs1 (gsi_stmt (*iter))))
3931     return;
3932 
3933   switch (TREE_CODE (node))
3934     {
3935     case ARRAY_REF:
3936     case COMPONENT_REF:
3937       {
3938 	bool bitfield;
3939 	tree elt;
3940 
3941 	if (safe)
3942 	  {
3943 	    /* We are not going to generate any checks, so do not
3944 	       generate bounds as well.  */
3945 	    addr_first = chkp_build_addr_expr (node);
3946 	    break;
3947 	  }
3948 
3949 	chkp_parse_array_and_component_ref (node, &ptr, &elt, &safe,
3950 					    &bitfield, &bounds, iter, false);
3951 
3952 	/* Break if there is no dereference and operation is safe.  */
3953 
3954 	if (bitfield)
3955           {
3956             tree field = TREE_OPERAND (node, 1);
3957 
3958             if (TREE_CODE (DECL_SIZE_UNIT (field)) == INTEGER_CST)
3959               size = DECL_SIZE_UNIT (field);
3960 
3961 	    if (elt)
3962 	      elt = chkp_build_addr_expr (elt);
3963             addr_first = fold_convert_loc (loc, ptr_type_node, elt ? elt : ptr);
3964             addr_first = fold_build_pointer_plus_loc (loc,
3965 						      addr_first,
3966 						      byte_position (field));
3967           }
3968         else
3969           addr_first = chkp_build_addr_expr (node);
3970       }
3971       break;
3972 
3973     case INDIRECT_REF:
3974       ptr = TREE_OPERAND (node, 0);
3975       addr_first = ptr;
3976       break;
3977 
3978     case MEM_REF:
3979       ptr = TREE_OPERAND (node, 0);
3980       addr_first = chkp_build_addr_expr (node);
3981       break;
3982 
3983     case TARGET_MEM_REF:
3984       ptr = TMR_BASE (node);
3985       addr_first = chkp_build_addr_expr (node);
3986       break;
3987 
3988     case ARRAY_RANGE_REF:
3989       printf("ARRAY_RANGE_REF\n");
3990       debug_gimple_stmt(gsi_stmt(*iter));
3991       debug_tree(node);
3992       gcc_unreachable ();
3993       break;
3994 
3995     case BIT_FIELD_REF:
3996       {
3997 	tree offs, rem, bpu;
3998 
3999 	gcc_assert (!access_offs);
4000 	gcc_assert (!access_size);
4001 
4002 	bpu = fold_convert (size_type_node, bitsize_int (BITS_PER_UNIT));
4003 	offs = fold_convert (size_type_node, TREE_OPERAND (node, 2));
4004 	rem = size_binop_loc (loc, TRUNC_MOD_EXPR, offs, bpu);
4005 	offs = size_binop_loc (loc, TRUNC_DIV_EXPR, offs, bpu);
4006 
4007 	size = fold_convert (size_type_node, TREE_OPERAND (node, 1));
4008         size = size_binop_loc (loc, PLUS_EXPR, size, rem);
4009         size = size_binop_loc (loc, CEIL_DIV_EXPR, size, bpu);
4010         size = fold_convert (size_type_node, size);
4011 
4012 	chkp_process_stmt (iter, TREE_OPERAND (node, 0), loc,
4013 			 dirflag, offs, size, safe);
4014 	return;
4015       }
4016       break;
4017 
4018     case VAR_DECL:
4019     case RESULT_DECL:
4020     case PARM_DECL:
4021       if (dirflag != integer_one_node
4022 	  || DECL_REGISTER (node))
4023 	return;
4024 
4025       safe = true;
4026       addr_first = chkp_build_addr_expr (node);
4027       break;
4028 
4029     default:
4030       return;
4031     }
4032 
4033   /* If addr_last was not computed then use (addr_first + size - 1)
4034      expression to compute it.  */
4035   if (!addr_last)
4036     {
4037       addr_last = fold_build_pointer_plus_loc (loc, addr_first, size);
4038       addr_last = fold_build_pointer_plus_hwi_loc (loc, addr_last, -1);
4039     }
4040 
4041   /* Shift both first_addr and last_addr by access_offs if specified.  */
4042   if (access_offs)
4043     {
4044       addr_first = fold_build_pointer_plus_loc (loc, addr_first, access_offs);
4045       addr_last = fold_build_pointer_plus_loc (loc, addr_last, access_offs);
4046     }
4047 
4048   /* Generate bndcl/bndcu checks if memory access is not safe.  */
4049   if (!safe)
4050     {
4051       gimple_stmt_iterator stmt_iter = *iter;
4052 
4053       if (!bounds)
4054 	bounds = chkp_find_bounds (ptr, iter);
4055 
4056       chkp_check_mem_access (addr_first, addr_last, bounds,
4057 			     stmt_iter, loc, dirflag);
4058     }
4059 
4060   /* We need to store bounds in case pointer is stored.  */
4061   if (dirflag == integer_one_node
4062       && chkp_type_has_pointer (node_type)
4063       && flag_chkp_store_bounds)
4064     {
4065       gimple *stmt = gsi_stmt (*iter);
4066       tree rhs1 = gimple_assign_rhs1 (stmt);
4067       enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
4068 
4069       if (get_gimple_rhs_class (rhs_code) == GIMPLE_SINGLE_RHS)
4070 	chkp_walk_pointer_assignments (node, rhs1, iter,
4071 				       chkp_copy_bounds_for_elem);
4072       else
4073 	{
4074 	  bounds = chkp_compute_bounds_for_assignment (NULL_TREE, stmt);
4075 	  chkp_build_bndstx (addr_first, rhs1, bounds, iter);
4076 	}
4077     }
4078 }
4079 
4080 /* Add code to copy bounds for all pointers copied
4081    in ASSIGN created during inline of EDGE.  */
4082 void
4083 chkp_copy_bounds_for_assign (gimple *assign, struct cgraph_edge *edge)
4084 {
4085   tree lhs = gimple_assign_lhs (assign);
4086   tree rhs = gimple_assign_rhs1 (assign);
4087   gimple_stmt_iterator iter = gsi_for_stmt (assign);
4088 
4089   if (!flag_chkp_store_bounds)
4090     return;
4091 
4092   chkp_walk_pointer_assignments (lhs, rhs, &iter, chkp_copy_bounds_for_elem);
4093 
4094   /* We should create edges for all created calls to bndldx and bndstx.  */
4095   while (gsi_stmt (iter) != assign)
4096     {
4097       gimple *stmt = gsi_stmt (iter);
4098       if (gimple_code (stmt) == GIMPLE_CALL)
4099 	{
4100 	  tree fndecl = gimple_call_fndecl (stmt);
4101 	  struct cgraph_node *callee = cgraph_node::get_create (fndecl);
4102 	  struct cgraph_edge *new_edge;
4103 
4104 	  gcc_assert (fndecl == chkp_bndstx_fndecl
4105 		      || fndecl == chkp_bndldx_fndecl
4106 		      || fndecl == chkp_ret_bnd_fndecl);
4107 
4108 	  new_edge = edge->caller->create_edge (callee,
4109 						as_a <gcall *> (stmt),
4110 						edge->count,
4111 						edge->frequency);
4112 	  new_edge->frequency = compute_call_stmt_bb_frequency
4113 	    (edge->caller->decl, gimple_bb (stmt));
4114 	}
4115       gsi_prev (&iter);
4116     }
4117 }
4118 
4119 /* Some code transformation made during instrumentation pass
4120    may put code into inconsistent state.  Here we find and fix
4121    such flaws.  */
4122 void
4123 chkp_fix_cfg ()
4124 {
4125   basic_block bb;
4126   gimple_stmt_iterator i;
4127 
4128   /* We could insert some code right after stmt which ends bb.
4129      We wanted to put this code on fallthru edge but did not
4130      add new edges from the beginning because it may cause new
4131      phi node creation which may be incorrect due to incomplete
4132      bound phi nodes.  */
4133   FOR_ALL_BB_FN (bb, cfun)
4134     for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
4135       {
4136 	gimple *stmt = gsi_stmt (i);
4137 	gimple_stmt_iterator next = i;
4138 
4139 	gsi_next (&next);
4140 
4141 	if (stmt_ends_bb_p (stmt)
4142 	    && !gsi_end_p (next))
4143 	  {
4144 	    edge fall = find_fallthru_edge (bb->succs);
4145 	    basic_block dest = NULL;
4146 	    int flags = 0;
4147 
4148 	    gcc_assert (fall);
4149 
4150 	    /* We cannot split abnormal edge.  Therefore we
4151 	       store its params, make it regular and then
4152 	       rebuild abnormal edge after split.  */
4153 	    if (fall->flags & EDGE_ABNORMAL)
4154 	      {
4155 		flags = fall->flags & ~EDGE_FALLTHRU;
4156 		dest = fall->dest;
4157 
4158 		fall->flags &= ~EDGE_COMPLEX;
4159 	      }
4160 
4161 	    while (!gsi_end_p (next))
4162 	      {
4163 		gimple *next_stmt = gsi_stmt (next);
4164 		gsi_remove (&next, false);
4165 		gsi_insert_on_edge (fall, next_stmt);
4166 	      }
4167 
4168 	    gsi_commit_edge_inserts ();
4169 
4170 	    /* Re-create abnormal edge.  */
4171 	    if (dest)
4172 	      make_edge (bb, dest, flags);
4173 	  }
4174       }
4175 }
4176 
4177 /* Walker callback for chkp_replace_function_pointers.  Replaces
4178    function pointer in the specified operand with pointer to the
4179    instrumented function version.  */
4180 static tree
4181 chkp_replace_function_pointer (tree *op, int *walk_subtrees,
4182 			       void *data ATTRIBUTE_UNUSED)
4183 {
4184   if (TREE_CODE (*op) == FUNCTION_DECL
4185       && chkp_instrumentable_p (*op)
4186       && (DECL_BUILT_IN_CLASS (*op) == NOT_BUILT_IN
4187 	  /* For builtins we replace pointers only for selected
4188 	     function and functions having definitions.  */
4189 	  || (DECL_BUILT_IN_CLASS (*op) == BUILT_IN_NORMAL
4190 	      && (chkp_instrument_normal_builtin (*op)
4191 		  || gimple_has_body_p (*op)))))
4192     {
4193       struct cgraph_node *node = cgraph_node::get_create (*op);
4194       struct cgraph_node *clone = NULL;
4195 
4196       if (!node->instrumentation_clone)
4197 	clone = chkp_maybe_create_clone (*op);
4198 
4199       if (clone)
4200 	*op = clone->decl;
4201       *walk_subtrees = 0;
4202     }
4203 
4204   return NULL;
4205 }
4206 
4207 /* This function searches for function pointers in statement
4208    pointed by GSI and replaces them with pointers to instrumented
4209    function versions.  */
4210 static void
4211 chkp_replace_function_pointers (gimple_stmt_iterator *gsi)
4212 {
4213   gimple *stmt = gsi_stmt (*gsi);
4214   /* For calls we want to walk call args only.  */
4215   if (gimple_code (stmt) == GIMPLE_CALL)
4216     {
4217       unsigned i;
4218       for (i = 0; i < gimple_call_num_args (stmt); i++)
4219 	walk_tree (gimple_call_arg_ptr (stmt, i),
4220 		   chkp_replace_function_pointer, NULL, NULL);
4221     }
4222   else
4223     walk_gimple_stmt (gsi, NULL, chkp_replace_function_pointer, NULL);
4224 }
4225 
4226 /* This function instruments all statements working with memory,
4227    calls and rets.
4228 
4229    It also removes excess statements from static initializers.  */
4230 static void
4231 chkp_instrument_function (void)
4232 {
4233   basic_block bb, next;
4234   gimple_stmt_iterator i;
4235   enum gimple_rhs_class grhs_class;
4236   bool safe = lookup_attribute ("chkp ctor", DECL_ATTRIBUTES (cfun->decl));
4237 
4238   bb = ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb;
4239   do
4240     {
4241       next = bb->next_bb;
4242       for (i = gsi_start_bb (bb); !gsi_end_p (i); )
4243         {
4244 	  gimple *s = gsi_stmt (i);
4245 
4246 	  /* Skip statement marked to not be instrumented.  */
4247 	  if (chkp_marked_stmt_p (s))
4248 	    {
4249 	      gsi_next (&i);
4250 	      continue;
4251 	    }
4252 
4253 	  chkp_replace_function_pointers (&i);
4254 
4255           switch (gimple_code (s))
4256             {
4257             case GIMPLE_ASSIGN:
4258 	      chkp_process_stmt (&i, gimple_assign_lhs (s),
4259 				 gimple_location (s), integer_one_node,
4260 				 NULL_TREE, NULL_TREE, safe);
4261 	      chkp_process_stmt (&i, gimple_assign_rhs1 (s),
4262 				 gimple_location (s), integer_zero_node,
4263 				 NULL_TREE, NULL_TREE, safe);
4264 	      grhs_class = get_gimple_rhs_class (gimple_assign_rhs_code (s));
4265 	      if (grhs_class == GIMPLE_BINARY_RHS)
4266 		chkp_process_stmt (&i, gimple_assign_rhs2 (s),
4267 				   gimple_location (s), integer_zero_node,
4268 				   NULL_TREE, NULL_TREE, safe);
4269               break;
4270 
4271             case GIMPLE_RETURN:
4272 	      {
4273 		greturn *r = as_a <greturn *> (s);
4274 		if (gimple_return_retval (r) != NULL_TREE)
4275 		  {
4276 		    chkp_process_stmt (&i, gimple_return_retval (r),
4277 				       gimple_location (r),
4278 				       integer_zero_node,
4279 				       NULL_TREE, NULL_TREE, safe);
4280 
4281 		    /* Additionally we need to add bounds
4282 		       to return statement.  */
4283 		    chkp_add_bounds_to_ret_stmt (&i);
4284 		  }
4285 	      }
4286 	      break;
4287 
4288 	    case GIMPLE_CALL:
4289 	      chkp_add_bounds_to_call_stmt (&i);
4290 	      break;
4291 
4292             default:
4293               ;
4294             }
4295 
4296 	  gsi_next (&i);
4297 
4298 	  /* We do not need any actual pointer stores in checker
4299 	     static initializer.  */
4300 	  if (lookup_attribute ("chkp ctor", DECL_ATTRIBUTES (cfun->decl))
4301 	      && gimple_code (s) == GIMPLE_ASSIGN
4302 	      && gimple_store_p (s))
4303 	    {
4304 	      gimple_stmt_iterator del_iter = gsi_for_stmt (s);
4305 	      gsi_remove (&del_iter, true);
4306 	      unlink_stmt_vdef (s);
4307 	      release_defs(s);
4308 	    }
4309         }
4310       bb = next;
4311     }
4312   while (bb);
4313 
4314   /* Some input params may have bounds and be address taken.  In this case
4315      we should store incoming bounds into bounds table.  */
4316   tree arg;
4317   if (flag_chkp_store_bounds)
4318     for (arg = DECL_ARGUMENTS (cfun->decl); arg; arg = DECL_CHAIN (arg))
4319       if (TREE_ADDRESSABLE (arg))
4320 	{
4321 	  if (BOUNDED_P (arg))
4322 	    {
4323 	      tree bounds = chkp_get_next_bounds_parm (arg);
4324 	      tree def_ptr = ssa_default_def (cfun, arg);
4325 	      gimple_stmt_iterator iter
4326 		= gsi_start_bb (chkp_get_entry_block ());
4327 	      chkp_build_bndstx (chkp_build_addr_expr (arg),
4328 				 def_ptr ? def_ptr : arg,
4329 				 bounds, &iter);
4330 
4331 	      /* Skip bounds arg.  */
4332 	      arg = TREE_CHAIN (arg);
4333 	    }
4334 	  else if (chkp_type_has_pointer (TREE_TYPE (arg)))
4335 	    {
4336 	      tree orig_arg = arg;
4337 	      bitmap slots = BITMAP_ALLOC (NULL);
4338 	      gimple_stmt_iterator iter
4339 		= gsi_start_bb (chkp_get_entry_block ());
4340 	      bitmap_iterator bi;
4341 	      unsigned bnd_no;
4342 
4343 	      chkp_find_bound_slots (TREE_TYPE (arg), slots);
4344 
4345 	      EXECUTE_IF_SET_IN_BITMAP (slots, 0, bnd_no, bi)
4346 		{
4347 		  tree bounds = chkp_get_next_bounds_parm (arg);
4348 		  HOST_WIDE_INT offs = bnd_no * POINTER_SIZE / BITS_PER_UNIT;
4349 		  tree addr = chkp_build_addr_expr (orig_arg);
4350 		  tree ptr = build2 (MEM_REF, ptr_type_node, addr,
4351 				     build_int_cst (ptr_type_node, offs));
4352 		  chkp_build_bndstx (chkp_build_addr_expr (ptr), ptr,
4353 				     bounds, &iter);
4354 
4355 		  arg = DECL_CHAIN (arg);
4356 		}
4357 	      BITMAP_FREE (slots);
4358 	    }
4359 	}
4360 }
4361 
4362 /* Find init/null/copy_ptr_bounds calls and replace them
4363    with assignments.  It should allow better code
4364    optimization.  */
4365 
4366 static void
4367 chkp_remove_useless_builtins ()
4368 {
4369   basic_block bb;
4370   gimple_stmt_iterator gsi;
4371 
4372   FOR_EACH_BB_FN (bb, cfun)
4373     {
4374       for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
4375         {
4376 	  gimple *stmt = gsi_stmt (gsi);
4377 	  tree fndecl;
4378 	  enum built_in_function fcode;
4379 
4380 	  /* Find builtins returning first arg and replace
4381 	     them with assignments.  */
4382 	  if (gimple_code (stmt) == GIMPLE_CALL
4383 	      && (fndecl = gimple_call_fndecl (stmt))
4384 	      && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
4385 	      && (fcode = DECL_FUNCTION_CODE (fndecl))
4386 	      && (fcode == BUILT_IN_CHKP_INIT_PTR_BOUNDS
4387 		  || fcode == BUILT_IN_CHKP_NULL_PTR_BOUNDS
4388 		  || fcode == BUILT_IN_CHKP_COPY_PTR_BOUNDS
4389 		  || fcode == BUILT_IN_CHKP_SET_PTR_BOUNDS))
4390 	    {
4391 	      tree res = gimple_call_arg (stmt, 0);
4392 	      update_call_from_tree (&gsi, res);
4393 	      stmt = gsi_stmt (gsi);
4394 	      update_stmt (stmt);
4395 	    }
4396         }
4397     }
4398 }
4399 
4400 /* Initialize pass.  */
4401 static void
4402 chkp_init (void)
4403 {
4404   basic_block bb;
4405   gimple_stmt_iterator i;
4406 
4407   in_chkp_pass = true;
4408 
4409   for (bb = ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb; bb; bb = bb->next_bb)
4410     for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
4411       chkp_unmark_stmt (gsi_stmt (i));
4412 
4413   chkp_invalid_bounds = new hash_set<tree>;
4414   chkp_completed_bounds_set = new hash_set<tree>;
4415   delete chkp_reg_bounds;
4416   chkp_reg_bounds = new hash_map<tree, tree>;
4417   delete chkp_bound_vars;
4418   chkp_bound_vars = new hash_map<tree, tree>;
4419   chkp_reg_addr_bounds = new hash_map<tree, tree>;
4420   chkp_incomplete_bounds_map = new hash_map<tree, tree>;
4421   delete chkp_bounds_map;
4422   chkp_bounds_map = new hash_map<tree, tree>;
4423   chkp_abnormal_copies = BITMAP_GGC_ALLOC ();
4424 
4425   entry_block = NULL;
4426   zero_bounds = NULL_TREE;
4427   none_bounds = NULL_TREE;
4428   incomplete_bounds = integer_zero_node;
4429   tmp_var = NULL_TREE;
4430   size_tmp_var = NULL_TREE;
4431 
4432   chkp_uintptr_type = lang_hooks.types.type_for_mode (ptr_mode, true);
4433 
4434   /* We create these constant bounds once for each object file.
4435      These symbols go to comdat section and result in single copy
4436      of each one in the final binary.  */
4437   chkp_get_zero_bounds_var ();
4438   chkp_get_none_bounds_var ();
4439 
4440   calculate_dominance_info (CDI_DOMINATORS);
4441   calculate_dominance_info (CDI_POST_DOMINATORS);
4442 
4443   bitmap_obstack_initialize (NULL);
4444 }
4445 
4446 /* Finalize instrumentation pass.  */
4447 static void
4448 chkp_fini (void)
4449 {
4450   in_chkp_pass = false;
4451 
4452   delete chkp_invalid_bounds;
4453   delete chkp_completed_bounds_set;
4454   delete chkp_reg_addr_bounds;
4455   delete chkp_incomplete_bounds_map;
4456 
4457   free_dominance_info (CDI_DOMINATORS);
4458   free_dominance_info (CDI_POST_DOMINATORS);
4459 
4460   bitmap_obstack_release (NULL);
4461 
4462   entry_block = NULL;
4463   zero_bounds = NULL_TREE;
4464   none_bounds = NULL_TREE;
4465 }
4466 
4467 /* Main instrumentation pass function.  */
4468 static unsigned int
4469 chkp_execute (void)
4470 {
4471   chkp_init ();
4472 
4473   chkp_instrument_function ();
4474 
4475   chkp_remove_useless_builtins ();
4476 
4477   chkp_function_mark_instrumented (cfun->decl);
4478 
4479   chkp_fix_cfg ();
4480 
4481   chkp_fini ();
4482 
4483   return 0;
4484 }
4485 
4486 /* Instrumentation pass gate.  */
4487 static bool
4488 chkp_gate (void)
4489 {
4490   cgraph_node *node = cgraph_node::get (cfun->decl);
4491   return ((node != NULL
4492 	   && node->instrumentation_clone)
4493 	   || lookup_attribute ("chkp ctor", DECL_ATTRIBUTES (cfun->decl)));
4494 }
4495 
4496 namespace {
4497 
4498 const pass_data pass_data_chkp =
4499 {
4500   GIMPLE_PASS, /* type */
4501   "chkp", /* name */
4502   OPTGROUP_NONE, /* optinfo_flags */
4503   TV_NONE, /* tv_id */
4504   PROP_ssa | PROP_cfg, /* properties_required */
4505   0, /* properties_provided */
4506   0, /* properties_destroyed */
4507   0, /* todo_flags_start */
4508   TODO_verify_il
4509   | TODO_update_ssa /* todo_flags_finish */
4510 };
4511 
4512 class pass_chkp : public gimple_opt_pass
4513 {
4514 public:
4515   pass_chkp (gcc::context *ctxt)
4516     : gimple_opt_pass (pass_data_chkp, ctxt)
4517   {}
4518 
4519   /* opt_pass methods: */
4520   virtual opt_pass * clone ()
4521     {
4522       return new pass_chkp (m_ctxt);
4523     }
4524 
4525   virtual bool gate (function *)
4526     {
4527       return chkp_gate ();
4528     }
4529 
4530   virtual unsigned int execute (function *)
4531     {
4532       return chkp_execute ();
4533     }
4534 
4535 }; // class pass_chkp
4536 
4537 } // anon namespace
4538 
4539 gimple_opt_pass *
4540 make_pass_chkp (gcc::context *ctxt)
4541 {
4542   return new pass_chkp (ctxt);
4543 }
4544 
4545 #include "gt-tree-chkp.h"
4546