xref: /dragonfly/contrib/gcc-8.0/gcc/tree-chkp.c (revision 0600465e)
1 /* Pointer Bounds Checker insrumentation pass.
2    Copyright (C) 2014-2018 Free Software Foundation, Inc.
3    Contributed by Ilya Enkovich (ilya.enkovich@intel.com)
4 
5 This file is part of GCC.
6 
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
10 version.
11 
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
15 for more details.
16 
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3.  If not see
19 <http://www.gnu.org/licenses/>.  */
20 
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "backend.h"
25 #include "target.h"
26 #include "rtl.h"
27 #include "tree.h"
28 #include "gimple.h"
29 #include "cfghooks.h"
30 #include "tree-pass.h"
31 #include "ssa.h"
32 #include "cgraph.h"
33 #include "diagnostic.h"
34 #include "fold-const.h"
35 #include "stor-layout.h"
36 #include "varasm.h"
37 #include "tree-iterator.h"
38 #include "tree-cfg.h"
39 #include "langhooks.h"
40 #include "tree-ssa-address.h"
41 #include "tree-ssa-loop-niter.h"
42 #include "gimple-pretty-print.h"
43 #include "gimple-iterator.h"
44 #include "gimplify.h"
45 #include "gimplify-me.h"
46 #include "print-tree.h"
47 #include "calls.h"
48 #include "expr.h"
49 #include "tree-ssa-propagate.h"
50 #include "tree-chkp.h"
51 #include "gimple-walk.h"
52 #include "tree-dfa.h"
53 #include "ipa-chkp.h"
54 #include "params.h"
55 #include "stringpool.h"
56 #include "attribs.h"
57 
58 /*  Pointer Bounds Checker instruments code with memory checks to find
59     out-of-bounds memory accesses.  Checks are performed by computing
60     bounds for each pointer and then comparing address of accessed
61     memory before pointer dereferencing.
62 
63     1. Function clones.
64 
65     See ipa-chkp.c.
66 
67     2. Instrumentation.
68 
69     There are few things to instrument:
70 
71     a) Memory accesses - add checker calls to check address of accessed memory
72     against bounds of dereferenced pointer.  Obviously safe memory
73     accesses like static variable access does not have to be instrumented
74     with checks.
75 
76     Example:
77 
78       val_2 = *p_1;
79 
80       with 4 bytes access is transformed into:
81 
82       __builtin___chkp_bndcl (__bound_tmp.1_3, p_1);
83       D.1_4 = p_1 + 3;
84       __builtin___chkp_bndcu (__bound_tmp.1_3, D.1_4);
85       val_2 = *p_1;
86 
87       where __bound_tmp.1_3 are bounds computed for pointer p_1,
88       __builtin___chkp_bndcl is a lower bound check and
89       __builtin___chkp_bndcu is an upper bound check.
90 
91     b) Pointer stores.
92 
93     When pointer is stored in memory we need to store its bounds.  To
94     achieve compatibility of instrumented code with regular codes
95     we have to keep data layout and store bounds in special bound tables
96     via special checker call.  Implementation of bounds table may vary for
97     different platforms.  It has to associate pointer value and its
98     location (it is required because we may have two equal pointers
99     with different bounds stored in different places) with bounds.
100     Another checker builtin allows to get bounds for specified pointer
101     loaded from specified location.
102 
103     Example:
104 
105       buf1[i_1] = &buf2;
106 
107       is transformed into:
108 
109       buf1[i_1] = &buf2;
110       D.1_2 = &buf1[i_1];
111       __builtin___chkp_bndstx (D.1_2, &buf2, __bound_tmp.1_2);
112 
113       where __bound_tmp.1_2 are bounds of &buf2.
114 
115     c) Static initialization.
116 
117     The special case of pointer store is static pointer initialization.
118     Bounds initialization is performed in a few steps:
119       - register all static initializations in front-end using
120       chkp_register_var_initializer
121       - when file compilation finishes we create functions with special
122       attribute 'chkp ctor' and put explicit initialization code
123       (assignments) for all statically initialized pointers.
124       - when checker constructor is compiled checker pass adds required
125       bounds initialization for all statically initialized pointers
126       - since we do not actually need excess pointers initialization
127       in checker constructor we remove such assignments from them
128 
129     d) Calls.
130 
131     For each call in the code we add additional arguments to pass
132     bounds for pointer arguments.  We determine type of call arguments
133     using arguments list from function declaration; if function
134     declaration is not available we use function type; otherwise
135     (e.g. for unnamed arguments) we use type of passed value. Function
136     declaration/type is replaced with the instrumented one.
137 
138     Example:
139 
140       val_1 = foo (&buf1, &buf2, &buf1, 0);
141 
142       is translated into:
143 
144       val_1 = foo.chkp (&buf1, __bound_tmp.1_2, &buf2, __bound_tmp.1_3,
145                         &buf1, __bound_tmp.1_2, 0);
146 
147     e) Returns.
148 
149     If function returns a pointer value we have to return bounds also.
150     A new operand was added for return statement to hold returned bounds.
151 
152     Example:
153 
154       return &_buf1;
155 
156       is transformed into
157 
158       return &_buf1, __bound_tmp.1_1;
159 
160     3. Bounds computation.
161 
162     Compiler is fully responsible for computing bounds to be used for each
163     memory access.  The first step for bounds computation is to find the
164     origin of pointer dereferenced for memory access.  Basing on pointer
165     origin we define a way to compute its bounds.  There are just few
166     possible cases:
167 
168     a) Pointer is returned by call.
169 
170     In this case we use corresponding checker builtin method to obtain returned
171     bounds.
172 
173     Example:
174 
175       buf_1 = malloc (size_2);
176       foo (buf_1);
177 
178       is translated into:
179 
180       buf_1 = malloc (size_2);
181       __bound_tmp.1_3 = __builtin___chkp_bndret (buf_1);
182       foo (buf_1, __bound_tmp.1_3);
183 
184     b) Pointer is an address of an object.
185 
186     In this case compiler tries to compute objects size and create corresponding
187     bounds.  If object has incomplete type then special checker builtin is used to
188     obtain its size at runtime.
189 
190     Example:
191 
192       foo ()
193       {
194         <unnamed type> __bound_tmp.3;
195 	static int buf[100];
196 
197 	<bb 3>:
198 	__bound_tmp.3_2 = __builtin___chkp_bndmk (&buf, 400);
199 
200 	<bb 2>:
201 	return &buf, __bound_tmp.3_2;
202       }
203 
204     Example:
205 
206       Address of an object 'extern int buf[]' with incomplete type is
207       returned.
208 
209       foo ()
210       {
211         <unnamed type> __bound_tmp.4;
212 	long unsigned int __size_tmp.3;
213 
214 	<bb 3>:
215 	__size_tmp.3_4 = __builtin_ia32_sizeof (buf);
216 	__bound_tmp.4_3 = __builtin_ia32_bndmk (&buf, __size_tmp.3_4);
217 
218 	<bb 2>:
219 	return &buf, __bound_tmp.4_3;
220       }
221 
222     c) Pointer is the result of object narrowing.
223 
224     It happens when we use pointer to an object to compute pointer to a part
225     of an object.  E.g. we take pointer to a field of a structure. In this
226     case we perform bounds intersection using bounds of original object and
227     bounds of object's part (which are computed basing on its type).
228 
229     There may be some debatable questions about when narrowing should occur
230     and when it should not.  To avoid false bound violations in correct
231     programs we do not perform narrowing when address of an array element is
232     obtained (it has address of the whole array) and when address of the first
233     structure field is obtained (because it is guaranteed to be equal to
234     address of the whole structure and it is legal to cast it back to structure).
235 
236     Default narrowing behavior may be changed using compiler flags.
237 
238     Example:
239 
240       In this example address of the second structure field is returned.
241 
242       foo (struct A * p, __bounds_type __bounds_of_p)
243       {
244         <unnamed type> __bound_tmp.3;
245 	int * _2;
246 	int * _5;
247 
248 	<bb 2>:
249 	_5 = &p_1(D)->second_field;
250 	__bound_tmp.3_6 = __builtin___chkp_bndmk (_5, 4);
251 	__bound_tmp.3_8 = __builtin___chkp_intersect (__bound_tmp.3_6,
252 	                                              __bounds_of_p_3(D));
253 	_2 = &p_1(D)->second_field;
254 	return _2, __bound_tmp.3_8;
255       }
256 
257     Example:
258 
259       In this example address of the first field of array element is returned.
260 
261       foo (struct A * p, __bounds_type __bounds_of_p, int i)
262       {
263 	long unsigned int _3;
264 	long unsigned int _4;
265 	struct A * _6;
266 	int * _7;
267 
268 	<bb 2>:
269 	_3 = (long unsigned int) i_1(D);
270 	_4 = _3 * 8;
271 	_6 = p_5(D) + _4;
272 	_7 = &_6->first_field;
273 	return _7, __bounds_of_p_2(D);
274       }
275 
276 
277     d) Pointer is the result of pointer arithmetic or type cast.
278 
279     In this case bounds of the base pointer are used.  In case of binary
280     operation producing a pointer we are analyzing data flow further
281     looking for operand's bounds.  One operand is considered as a base
282     if it has some valid bounds.  If we fall into a case when none of
283     operands (or both of them) has valid bounds, a default bounds value
284     is used.
285 
286     Trying to find out bounds for binary operations we may fall into
287     cyclic dependencies for pointers.  To avoid infinite recursion all
288     walked phi nodes instantly obtain corresponding bounds but created
289     bounds are marked as incomplete.  It helps us to stop DF walk during
290     bounds search.
291 
292     When we reach pointer source, some args of incomplete bounds phi obtain
293     valid bounds and those values are propagated further through phi nodes.
294     If no valid bounds were found for phi node then we mark its result as
295     invalid bounds.  Process stops when all incomplete bounds become either
296     valid or invalid and we are able to choose a pointer base.
297 
298     e) Pointer is loaded from the memory.
299 
300     In this case we just need to load bounds from the bounds table.
301 
302     Example:
303 
304       foo ()
305       {
306         <unnamed type> __bound_tmp.3;
307 	static int * buf;
308 	int * _2;
309 
310 	<bb 2>:
311 	_2 = buf;
312 	__bound_tmp.3_4 = __builtin___chkp_bndldx (&buf, _2);
313 	return _2, __bound_tmp.3_4;
314       }
315 
316 */
317 
318 typedef void (*assign_handler)(tree, tree, void *);
319 
320 static tree chkp_get_zero_bounds ();
321 static tree chkp_find_bounds (tree ptr, gimple_stmt_iterator *iter);
322 static tree chkp_find_bounds_loaded (tree ptr, tree ptr_src,
323 				     gimple_stmt_iterator *iter);
324 static void chkp_parse_array_and_component_ref (tree node, tree *ptr,
325 						tree *elt, bool *safe,
326 						bool *bitfield,
327 						tree *bounds,
328 						gimple_stmt_iterator *iter,
329 						bool innermost_bounds);
330 static void chkp_parse_bit_field_ref (tree node, location_t loc,
331 				      tree *offset, tree *size);
332 static tree
333 chkp_make_addressed_object_bounds (tree obj, gimple_stmt_iterator *iter);
334 
335 #define chkp_bndldx_fndecl \
336   (targetm.builtin_chkp_function (BUILT_IN_CHKP_BNDLDX))
337 #define chkp_bndstx_fndecl \
338   (targetm.builtin_chkp_function (BUILT_IN_CHKP_BNDSTX))
339 #define chkp_checkl_fndecl \
340   (targetm.builtin_chkp_function (BUILT_IN_CHKP_BNDCL))
341 #define chkp_checku_fndecl \
342   (targetm.builtin_chkp_function (BUILT_IN_CHKP_BNDCU))
343 #define chkp_bndmk_fndecl \
344   (targetm.builtin_chkp_function (BUILT_IN_CHKP_BNDMK))
345 #define chkp_ret_bnd_fndecl \
346   (targetm.builtin_chkp_function (BUILT_IN_CHKP_BNDRET))
347 #define chkp_intersect_fndecl \
348   (targetm.builtin_chkp_function (BUILT_IN_CHKP_INTERSECT))
349 #define chkp_narrow_bounds_fndecl \
350   (targetm.builtin_chkp_function (BUILT_IN_CHKP_NARROW))
351 #define chkp_sizeof_fndecl \
352   (targetm.builtin_chkp_function (BUILT_IN_CHKP_SIZEOF))
353 #define chkp_extract_lower_fndecl \
354   (targetm.builtin_chkp_function (BUILT_IN_CHKP_EXTRACT_LOWER))
355 #define chkp_extract_upper_fndecl \
356   (targetm.builtin_chkp_function (BUILT_IN_CHKP_EXTRACT_UPPER))
357 
358 static GTY (()) tree chkp_uintptr_type;
359 
360 static GTY (()) tree chkp_zero_bounds_var;
361 static GTY (()) tree chkp_none_bounds_var;
362 
363 static GTY (()) basic_block entry_block;
364 static GTY (()) tree zero_bounds;
365 static GTY (()) tree none_bounds;
366 static GTY (()) tree incomplete_bounds;
367 static GTY (()) tree tmp_var;
368 static GTY (()) tree size_tmp_var;
369 static GTY (()) bitmap chkp_abnormal_copies;
370 
371 struct hash_set<tree> *chkp_invalid_bounds;
372 struct hash_set<tree> *chkp_completed_bounds_set;
373 struct hash_map<tree, tree> *chkp_reg_bounds;
374 struct hash_map<tree, tree> *chkp_bound_vars;
375 struct hash_map<tree, tree> *chkp_reg_addr_bounds;
376 struct hash_map<tree, tree> *chkp_incomplete_bounds_map;
377 struct hash_map<tree, tree> *chkp_bounds_map;
378 struct hash_map<tree, tree> *chkp_static_var_bounds;
379 
380 static bool in_chkp_pass;
381 
382 #define CHKP_BOUND_TMP_NAME "__bound_tmp"
383 #define CHKP_SIZE_TMP_NAME "__size_tmp"
384 #define CHKP_BOUNDS_OF_SYMBOL_PREFIX "__chkp_bounds_of_"
385 #define CHKP_STRING_BOUNDS_PREFIX "__chkp_string_bounds_"
386 #define CHKP_VAR_BOUNDS_PREFIX "__chkp_var_bounds_"
387 #define CHKP_ZERO_BOUNDS_VAR_NAME "__chkp_zero_bounds"
388 #define CHKP_NONE_BOUNDS_VAR_NAME "__chkp_none_bounds"
389 
390 /* Static checker constructors may become very large and their
391    compilation with optimization may take too much time.
392    Therefore we put a limit to number of statements in one
393    constructor.  Tests with 100 000 statically initialized
394    pointers showed following compilation times on Sandy Bridge
395    server (used -O2):
396    limit    100 => ~18 sec.
397    limit    300 => ~22 sec.
398    limit   1000 => ~30 sec.
399    limit   3000 => ~49 sec.
400    limit   5000 => ~55 sec.
401    limit  10000 => ~76 sec.
402    limit 100000 => ~532 sec.  */
403 #define MAX_STMTS_IN_STATIC_CHKP_CTOR (PARAM_VALUE (PARAM_CHKP_MAX_CTOR_SIZE))
404 
405 struct chkp_ctor_stmt_list
406 {
407   tree stmts;
408   int avail;
409 };
410 
411 /* Return 1 if function FNDECL is instrumented by Pointer
412    Bounds Checker.  */
413 bool
414 chkp_function_instrumented_p (tree fndecl)
415 {
416   return fndecl
417     && lookup_attribute ("chkp instrumented", DECL_ATTRIBUTES (fndecl));
418 }
419 
420 /* Mark function FNDECL as instrumented.  */
421 void
422 chkp_function_mark_instrumented (tree fndecl)
423 {
424   if (chkp_function_instrumented_p (fndecl))
425     return;
426 
427   DECL_ATTRIBUTES (fndecl)
428     = tree_cons (get_identifier ("chkp instrumented"), NULL,
429 		 DECL_ATTRIBUTES (fndecl));
430 }
431 
432 /* Return true when STMT is builtin call to instrumentation function
433    corresponding to CODE.  */
434 
435 bool
436 chkp_gimple_call_builtin_p (gimple *call,
437 			    enum built_in_function code)
438 {
439   tree fndecl;
440   /* We are skipping the check for address-spaces, that's
441      why we don't use gimple_call_builtin_p directly here.  */
442   if (is_gimple_call (call)
443       && (fndecl = gimple_call_fndecl (call)) != NULL
444       && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD
445       && (fndecl = targetm.builtin_chkp_function (code))
446       && (DECL_FUNCTION_CODE (gimple_call_fndecl (call))
447 	  == DECL_FUNCTION_CODE (fndecl)))
448     return true;
449   return false;
450 }
451 
452 /* Emit code to build zero bounds and return RTL holding
453    the result.  */
454 rtx
455 chkp_expand_zero_bounds ()
456 {
457   tree zero_bnd;
458 
459   if (flag_chkp_use_static_const_bounds)
460     zero_bnd = chkp_get_zero_bounds_var ();
461   else
462     zero_bnd = chkp_build_make_bounds_call (integer_zero_node,
463 					    integer_zero_node);
464   return expand_normal (zero_bnd);
465 }
466 
467 /* Emit code to store zero bounds for PTR located at MEM.  */
468 void
469 chkp_expand_bounds_reset_for_mem (tree mem, tree ptr)
470 {
471   tree zero_bnd, bnd, addr, bndstx;
472 
473   if (flag_chkp_use_static_const_bounds)
474     zero_bnd = chkp_get_zero_bounds_var ();
475   else
476     zero_bnd = chkp_build_make_bounds_call (integer_zero_node,
477 					    integer_zero_node);
478   bnd = make_tree (pointer_bounds_type_node,
479 		   assign_temp (pointer_bounds_type_node, 0, 1));
480   addr = build1 (ADDR_EXPR,
481 		 build_pointer_type (TREE_TYPE (mem)), mem);
482   bndstx = chkp_build_bndstx_call (addr, ptr, bnd);
483 
484   expand_assignment (bnd, zero_bnd, false);
485   expand_normal (bndstx);
486 }
487 
488 /* Build retbnd call for returned value RETVAL.
489 
490    If BNDVAL is not NULL then result is stored
491    in it.  Otherwise a temporary is created to
492    hold returned value.
493 
494    GSI points to a position for a retbnd call
495    and is set to created stmt.
496 
497    Cgraph edge is created for a new call if
498    UPDATE_EDGE is 1.
499 
500    Obtained bounds are returned.  */
501 tree
502 chkp_insert_retbnd_call (tree bndval, tree retval,
503 			 gimple_stmt_iterator *gsi)
504 {
505   gimple *call;
506 
507   if (!bndval)
508     bndval = create_tmp_reg (pointer_bounds_type_node, "retbnd");
509 
510   call = gimple_build_call (chkp_ret_bnd_fndecl, 1, retval);
511   gimple_call_set_lhs (call, bndval);
512   gsi_insert_after (gsi, call, GSI_CONTINUE_LINKING);
513 
514   return bndval;
515 }
516 
517 /* Build a GIMPLE_CALL identical to CALL but skipping bounds
518    arguments.  */
519 
520 gcall *
521 chkp_copy_call_skip_bounds (gcall *call)
522 {
523   bitmap bounds;
524   unsigned i;
525 
526   bitmap_obstack_initialize (NULL);
527   bounds = BITMAP_ALLOC (NULL);
528 
529   for (i = 0; i < gimple_call_num_args (call); i++)
530     if (POINTER_BOUNDS_P (gimple_call_arg (call, i)))
531       bitmap_set_bit (bounds, i);
532 
533   if (!bitmap_empty_p (bounds))
534     call = gimple_call_copy_skip_args (call, bounds);
535   gimple_call_set_with_bounds (call, false);
536 
537   BITMAP_FREE (bounds);
538   bitmap_obstack_release (NULL);
539 
540   return call;
541 }
542 
543 /* Redirect edge E to the correct node according to call_stmt.
544    Return 1 if bounds removal from call_stmt should be done
545    instead of redirection.  */
546 
547 bool
548 chkp_redirect_edge (cgraph_edge *e)
549 {
550   bool instrumented = false;
551   tree decl = e->callee->decl;
552 
553   if (e->callee->instrumentation_clone
554       || chkp_function_instrumented_p (decl))
555     instrumented = true;
556 
557   if (instrumented
558       && !gimple_call_with_bounds_p (e->call_stmt))
559     e->redirect_callee (cgraph_node::get_create (e->callee->orig_decl));
560   else if (!instrumented
561 	   && gimple_call_with_bounds_p (e->call_stmt)
562 	   && !chkp_gimple_call_builtin_p (e->call_stmt, BUILT_IN_CHKP_BNDCL)
563 	   && !chkp_gimple_call_builtin_p (e->call_stmt, BUILT_IN_CHKP_BNDCU)
564 	   && !chkp_gimple_call_builtin_p (e->call_stmt, BUILT_IN_CHKP_BNDSTX))
565     {
566       if (e->callee->instrumented_version)
567 	e->redirect_callee (e->callee->instrumented_version);
568       else
569 	{
570 	  tree args = TYPE_ARG_TYPES (TREE_TYPE (decl));
571 	  /* Avoid bounds removal if all args will be removed.  */
572 	  if (!args || TREE_VALUE (args) != void_type_node)
573 	    return true;
574 	  else
575 	    gimple_call_set_with_bounds (e->call_stmt, false);
576 	}
577     }
578 
579   return false;
580 }
581 
582 /* Mark statement S to not be instrumented.  */
583 static void
584 chkp_mark_stmt (gimple *s)
585 {
586   gimple_set_plf (s, GF_PLF_1, true);
587 }
588 
589 /* Mark statement S to be instrumented.  */
590 static void
591 chkp_unmark_stmt (gimple *s)
592 {
593   gimple_set_plf (s, GF_PLF_1, false);
594 }
595 
596 /* Return 1 if statement S should not be instrumented.  */
597 static bool
598 chkp_marked_stmt_p (gimple *s)
599 {
600   return gimple_plf (s, GF_PLF_1);
601 }
602 
603 /* Get var to be used for bound temps.  */
604 static tree
605 chkp_get_tmp_var (void)
606 {
607   if (!tmp_var)
608     tmp_var = create_tmp_reg (pointer_bounds_type_node, CHKP_BOUND_TMP_NAME);
609 
610   return tmp_var;
611 }
612 
613 /* Get SSA_NAME to be used as temp.  */
614 static tree
615 chkp_get_tmp_reg (gimple *stmt)
616 {
617   if (in_chkp_pass)
618     return make_ssa_name (chkp_get_tmp_var (), stmt);
619 
620   return make_temp_ssa_name (pointer_bounds_type_node, stmt,
621 			     CHKP_BOUND_TMP_NAME);
622 }
623 
624 /* Get var to be used for size temps.  */
625 static tree
626 chkp_get_size_tmp_var (void)
627 {
628   if (!size_tmp_var)
629     size_tmp_var = create_tmp_reg (chkp_uintptr_type, CHKP_SIZE_TMP_NAME);
630 
631   return size_tmp_var;
632 }
633 
634 /* Register bounds BND for address of OBJ.  */
635 static void
636 chkp_register_addr_bounds (tree obj, tree bnd)
637 {
638   if (bnd == incomplete_bounds)
639     return;
640 
641   chkp_reg_addr_bounds->put (obj, bnd);
642 
643   if (dump_file && (dump_flags & TDF_DETAILS))
644     {
645       fprintf (dump_file, "Regsitered bound ");
646       print_generic_expr (dump_file, bnd);
647       fprintf (dump_file, " for address of ");
648       print_generic_expr (dump_file, obj);
649       fprintf (dump_file, "\n");
650     }
651 }
652 
653 /* Return bounds registered for address of OBJ.  */
654 static tree
655 chkp_get_registered_addr_bounds (tree obj)
656 {
657   tree *slot = chkp_reg_addr_bounds->get (obj);
658   return slot ? *slot : NULL_TREE;
659 }
660 
661 /* Mark BOUNDS as completed.  */
662 static void
663 chkp_mark_completed_bounds (tree bounds)
664 {
665   chkp_completed_bounds_set->add (bounds);
666 
667   if (dump_file && (dump_flags & TDF_DETAILS))
668     {
669       fprintf (dump_file, "Marked bounds ");
670       print_generic_expr (dump_file, bounds);
671       fprintf (dump_file, " as completed\n");
672     }
673 }
674 
675 /* Return 1 if BOUNDS were marked as completed and 0 otherwise.  */
676 static bool
677 chkp_completed_bounds (tree bounds)
678 {
679   return chkp_completed_bounds_set->contains (bounds);
680 }
681 
682 /* Clear comleted bound marks.  */
683 static void
684 chkp_erase_completed_bounds (void)
685 {
686   delete chkp_completed_bounds_set;
687   chkp_completed_bounds_set = new hash_set<tree>;
688 }
689 
690 /* This function is used to provide a base address for
691    chkp_get_hard_register_fake_addr_expr.  */
692 static tree
693 chkp_get_hard_register_var_fake_base_address ()
694 {
695   int prec = TYPE_PRECISION (ptr_type_node);
696   return wide_int_to_tree (ptr_type_node, wi::min_value (prec, SIGNED));
697 }
698 
699 /* If we check bounds for a hard register variable, we cannot
700    use its address - it is illegal, so instead of that we use
701    this fake value.  */
702 static tree
703 chkp_get_hard_register_fake_addr_expr (tree obj)
704 {
705   tree addr = chkp_get_hard_register_var_fake_base_address ();
706   tree outer = obj;
707   while (TREE_CODE (outer) == COMPONENT_REF || TREE_CODE (outer) == ARRAY_REF)
708     {
709       if (TREE_CODE (outer) == COMPONENT_REF)
710 	{
711 	  addr = fold_build_pointer_plus (addr,
712 					  component_ref_field_offset (outer));
713 	  outer = TREE_OPERAND (outer, 0);
714 	}
715       else if (TREE_CODE (outer) == ARRAY_REF)
716 	{
717 	  tree indx = fold_convert(size_type_node, TREE_OPERAND(outer, 1));
718 	  tree offset = size_binop (MULT_EXPR,
719 				    array_ref_element_size (outer), indx);
720 	  addr = fold_build_pointer_plus (addr, offset);
721 	  outer = TREE_OPERAND (outer, 0);
722 	}
723     }
724 
725   return addr;
726 }
727 
728 /* Mark BOUNDS associated with PTR as incomplete.  */
729 static void
730 chkp_register_incomplete_bounds (tree bounds, tree ptr)
731 {
732   chkp_incomplete_bounds_map->put (bounds, ptr);
733 
734   if (dump_file && (dump_flags & TDF_DETAILS))
735     {
736       fprintf (dump_file, "Regsitered incomplete bounds ");
737       print_generic_expr (dump_file, bounds);
738       fprintf (dump_file, " for ");
739       print_generic_expr (dump_file, ptr);
740       fprintf (dump_file, "\n");
741     }
742 }
743 
744 /* Return 1 if BOUNDS are incomplete and 0 otherwise.  */
745 static bool
746 chkp_incomplete_bounds (tree bounds)
747 {
748   if (bounds == incomplete_bounds)
749     return true;
750 
751   if (chkp_completed_bounds (bounds))
752     return false;
753 
754   return chkp_incomplete_bounds_map->get (bounds) != NULL;
755 }
756 
757 /* Clear incomleted bound marks.  */
758 static void
759 chkp_erase_incomplete_bounds (void)
760 {
761   delete chkp_incomplete_bounds_map;
762   chkp_incomplete_bounds_map = new hash_map<tree, tree>;
763 }
764 
765 /* Build and return bndmk call which creates bounds for structure
766    pointed by PTR.  Structure should have complete type.  */
767 tree
768 chkp_make_bounds_for_struct_addr (tree ptr)
769 {
770   tree type = TREE_TYPE (ptr);
771   tree size;
772 
773   gcc_assert (POINTER_TYPE_P (type));
774 
775   size = TYPE_SIZE (TREE_TYPE (type));
776 
777   gcc_assert (size);
778 
779   return build_call_nary (pointer_bounds_type_node,
780 			  build_fold_addr_expr (chkp_bndmk_fndecl),
781 			  2, ptr, size);
782 }
783 
784 /* Traversal function for chkp_may_finish_incomplete_bounds.
785    Set RES to 0 if at least one argument of phi statement
786    defining bounds (passed in KEY arg) is unknown.
787    Traversal stops when first unknown phi argument is found.  */
788 bool
789 chkp_may_complete_phi_bounds (tree const &bounds, tree *slot ATTRIBUTE_UNUSED,
790 			      bool *res)
791 {
792   gimple *phi;
793   unsigned i;
794 
795   gcc_assert (TREE_CODE (bounds) == SSA_NAME);
796 
797   phi = SSA_NAME_DEF_STMT (bounds);
798 
799   gcc_assert (phi && gimple_code (phi) == GIMPLE_PHI);
800 
801   for (i = 0; i < gimple_phi_num_args (phi); i++)
802     {
803       tree phi_arg = gimple_phi_arg_def (phi, i);
804       if (!phi_arg)
805 	{
806 	  *res = false;
807 	  /* Do not need to traverse further.  */
808 	  return false;
809 	}
810     }
811 
812   return true;
813 }
814 
815 /* Return 1 if all phi nodes created for bounds have their
816    arguments computed.  */
817 static bool
818 chkp_may_finish_incomplete_bounds (void)
819 {
820   bool res = true;
821 
822   chkp_incomplete_bounds_map
823     ->traverse<bool *, chkp_may_complete_phi_bounds> (&res);
824 
825   return res;
826 }
827 
828 /* Helper function for chkp_finish_incomplete_bounds.
829    Recompute args for bounds phi node.  */
830 bool
831 chkp_recompute_phi_bounds (tree const &bounds, tree *slot,
832 			   void *res ATTRIBUTE_UNUSED)
833 {
834   tree ptr = *slot;
835   gphi *bounds_phi;
836   gphi *ptr_phi;
837   unsigned i;
838 
839   gcc_assert (TREE_CODE (bounds) == SSA_NAME);
840   gcc_assert (TREE_CODE (ptr) == SSA_NAME);
841 
842   bounds_phi = as_a <gphi *> (SSA_NAME_DEF_STMT (bounds));
843   ptr_phi = as_a <gphi *> (SSA_NAME_DEF_STMT (ptr));
844 
845   for (i = 0; i < gimple_phi_num_args (bounds_phi); i++)
846     {
847       tree ptr_arg = gimple_phi_arg_def (ptr_phi, i);
848       tree bound_arg = chkp_find_bounds (ptr_arg, NULL);
849 
850       add_phi_arg (bounds_phi, bound_arg,
851 		   gimple_phi_arg_edge (ptr_phi, i),
852 		   UNKNOWN_LOCATION);
853     }
854 
855   return true;
856 }
857 
858 /* Mark BOUNDS as invalid.  */
859 static void
860 chkp_mark_invalid_bounds (tree bounds)
861 {
862   chkp_invalid_bounds->add (bounds);
863 
864   if (dump_file && (dump_flags & TDF_DETAILS))
865     {
866       fprintf (dump_file, "Marked bounds ");
867       print_generic_expr (dump_file, bounds);
868       fprintf (dump_file, " as invalid\n");
869     }
870 }
871 
872 /* Return 1 if BOUNDS were marked as invalid and 0 otherwise.  */
873 static bool
874 chkp_valid_bounds (tree bounds)
875 {
876   if (bounds == zero_bounds || bounds == none_bounds)
877     return false;
878 
879   return !chkp_invalid_bounds->contains (bounds);
880 }
881 
882 /* Helper function for chkp_finish_incomplete_bounds.
883    Check all arguments of phi nodes trying to find
884    valid completed bounds.  If there is at least one
885    such arg then bounds produced by phi node are marked
886    as valid completed bounds and all phi args are
887    recomputed.  */
888 bool
889 chkp_find_valid_phi_bounds (tree const &bounds, tree *slot, bool *res)
890 {
891   gimple *phi;
892   unsigned i;
893 
894   gcc_assert (TREE_CODE (bounds) == SSA_NAME);
895 
896   if (chkp_completed_bounds (bounds))
897     return true;
898 
899   phi = SSA_NAME_DEF_STMT (bounds);
900 
901   gcc_assert (phi && gimple_code (phi) == GIMPLE_PHI);
902 
903   for (i = 0; i < gimple_phi_num_args (phi); i++)
904     {
905       tree phi_arg = gimple_phi_arg_def (phi, i);
906 
907       gcc_assert (phi_arg);
908 
909       if (chkp_valid_bounds (phi_arg) && !chkp_incomplete_bounds (phi_arg))
910 	{
911 	  *res = true;
912 	  chkp_mark_completed_bounds (bounds);
913 	  chkp_recompute_phi_bounds (bounds, slot, NULL);
914 	  return true;
915 	}
916     }
917 
918   return true;
919 }
920 
921 /* Helper function for chkp_finish_incomplete_bounds.
922    Marks all incompleted bounds as invalid.  */
923 bool
924 chkp_mark_invalid_bounds_walker (tree const &bounds,
925 				 tree *slot ATTRIBUTE_UNUSED,
926 				 void *res ATTRIBUTE_UNUSED)
927 {
928   if (!chkp_completed_bounds (bounds))
929     {
930       chkp_mark_invalid_bounds (bounds);
931       chkp_mark_completed_bounds (bounds);
932     }
933   return true;
934 }
935 
936 /* When all bound phi nodes have all their args computed
937    we have enough info to find valid bounds.  We iterate
938    through all incompleted bounds searching for valid
939    bounds.  Found valid bounds are marked as completed
940    and all remaining incompleted bounds are recomputed.
941    Process continues until no new valid bounds may be
942    found.  All remained incompleted bounds are marked as
943    invalid (i.e. have no valid source of bounds).  */
944 static void
945 chkp_finish_incomplete_bounds (void)
946 {
947   bool found_valid = true;
948 
949   while (found_valid)
950     {
951       found_valid = false;
952 
953       chkp_incomplete_bounds_map->
954 	traverse<bool *, chkp_find_valid_phi_bounds> (&found_valid);
955 
956       if (found_valid)
957 	chkp_incomplete_bounds_map->
958 	  traverse<void *, chkp_recompute_phi_bounds> (NULL);
959     }
960 
961   chkp_incomplete_bounds_map->
962     traverse<void *, chkp_mark_invalid_bounds_walker> (NULL);
963   chkp_incomplete_bounds_map->
964     traverse<void *, chkp_recompute_phi_bounds> (NULL);
965 
966   chkp_erase_completed_bounds ();
967   chkp_erase_incomplete_bounds ();
968 }
969 
970 /* Return 1 if type TYPE is a pointer type or a
971    structure having a pointer type as one of its fields.
972    Otherwise return 0.  */
973 bool
974 chkp_type_has_pointer (const_tree type)
975 {
976   bool res = false;
977 
978   if (BOUNDED_TYPE_P (type))
979     res = true;
980   else if (RECORD_OR_UNION_TYPE_P (type))
981     {
982       tree field;
983 
984       for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
985 	if (TREE_CODE (field) == FIELD_DECL)
986 	  res = res || chkp_type_has_pointer (TREE_TYPE (field));
987     }
988   else if (TREE_CODE (type) == ARRAY_TYPE)
989     res = chkp_type_has_pointer (TREE_TYPE (type));
990 
991   return res;
992 }
993 
994 unsigned
995 chkp_type_bounds_count (const_tree type)
996 {
997   unsigned res = 0;
998 
999   if (!type)
1000     res = 0;
1001   else if (BOUNDED_TYPE_P (type))
1002     res = 1;
1003   else if (RECORD_OR_UNION_TYPE_P (type))
1004     {
1005       bitmap have_bound;
1006 
1007       bitmap_obstack_initialize (NULL);
1008       have_bound = BITMAP_ALLOC (NULL);
1009       chkp_find_bound_slots (type, have_bound);
1010       res = bitmap_count_bits (have_bound);
1011       BITMAP_FREE (have_bound);
1012       bitmap_obstack_release (NULL);
1013     }
1014 
1015   return res;
1016 }
1017 
1018 /* Get bounds associated with NODE via
1019    chkp_set_bounds call.  */
1020 tree
1021 chkp_get_bounds (tree node)
1022 {
1023   tree *slot;
1024 
1025   if (!chkp_bounds_map)
1026     return NULL_TREE;
1027 
1028   slot = chkp_bounds_map->get (node);
1029   return slot ? *slot : NULL_TREE;
1030 }
1031 
1032 /* Associate bounds VAL with NODE.  */
1033 void
1034 chkp_set_bounds (tree node, tree val)
1035 {
1036   if (!chkp_bounds_map)
1037     chkp_bounds_map = new hash_map<tree, tree>;
1038 
1039   chkp_bounds_map->put (node, val);
1040 }
1041 
1042 /* Check if statically initialized variable VAR require
1043    static bounds initialization.  If VAR is added into
1044    bounds initlization list then 1 is returned. Otherwise
1045    return 0.  */
1046 extern bool
1047 chkp_register_var_initializer (tree var)
1048 {
1049   if (!flag_check_pointer_bounds
1050       || DECL_INITIAL (var) == error_mark_node)
1051     return false;
1052 
1053   gcc_assert (VAR_P (var));
1054   gcc_assert (DECL_INITIAL (var));
1055 
1056   if (TREE_STATIC (var)
1057       && chkp_type_has_pointer (TREE_TYPE (var)))
1058     {
1059       varpool_node::get_create (var)->need_bounds_init = 1;
1060       return true;
1061     }
1062 
1063   return false;
1064 }
1065 
1066 /* Helper function for chkp_finish_file.
1067 
1068    Add new modification statement (RHS is assigned to LHS)
1069    into list of static initializer statementes (passed in ARG).
1070    If statements list becomes too big, emit checker constructor
1071    and start the new one.  */
1072 static void
1073 chkp_add_modification_to_stmt_list (tree lhs,
1074 				    tree rhs,
1075 				    void *arg)
1076 {
1077   struct chkp_ctor_stmt_list *stmts = (struct chkp_ctor_stmt_list *)arg;
1078   tree modify;
1079 
1080   if (!useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (rhs)))
1081     rhs = build1 (CONVERT_EXPR, TREE_TYPE (lhs), rhs);
1082 
1083   modify = build2 (MODIFY_EXPR, TREE_TYPE (lhs), lhs, rhs);
1084   append_to_statement_list (modify, &stmts->stmts);
1085 
1086   stmts->avail--;
1087 }
1088 
1089 /* Build and return ADDR_EXPR for specified object OBJ.  */
1090 static tree
1091 chkp_build_addr_expr (tree obj)
1092 {
1093   /* We first check whether it is a "hard reg case".  */
1094   tree base = get_base_address (obj);
1095   if (VAR_P (base) && DECL_HARD_REGISTER (base))
1096     return chkp_get_hard_register_fake_addr_expr (obj);
1097 
1098   /* If not - return regular ADDR_EXPR.  */
1099   return TREE_CODE (obj) == TARGET_MEM_REF
1100     ? tree_mem_ref_addr (ptr_type_node, obj)
1101     : build_fold_addr_expr (obj);
1102 }
1103 
1104 /* Helper function for chkp_finish_file.
1105    Initialize bound variable BND_VAR with bounds of variable
1106    VAR to statements list STMTS.  If statements list becomes
1107    too big, emit checker constructor and start the new one.  */
1108 static void
1109 chkp_output_static_bounds (tree bnd_var, tree var,
1110 			   struct chkp_ctor_stmt_list *stmts)
1111 {
1112   tree lb, ub, size;
1113 
1114   if (TREE_CODE (var) == STRING_CST)
1115     {
1116       lb = build1 (CONVERT_EXPR, size_type_node, chkp_build_addr_expr (var));
1117       size = build_int_cst (size_type_node, TREE_STRING_LENGTH (var) - 1);
1118     }
1119   else if (DECL_SIZE (var)
1120 	   && !chkp_variable_size_type (TREE_TYPE (var)))
1121     {
1122       /* Compute bounds using statically known size.  */
1123       lb = build1 (CONVERT_EXPR, size_type_node, chkp_build_addr_expr (var));
1124       size = size_binop (MINUS_EXPR, DECL_SIZE_UNIT (var), size_one_node);
1125     }
1126   else
1127     {
1128       /* Compute bounds using dynamic size.  */
1129       tree call;
1130 
1131       lb = build1 (CONVERT_EXPR, size_type_node, chkp_build_addr_expr (var));
1132       call = build1 (ADDR_EXPR,
1133 		     build_pointer_type (TREE_TYPE (chkp_sizeof_fndecl)),
1134 		     chkp_sizeof_fndecl);
1135       size = build_call_nary (TREE_TYPE (TREE_TYPE (chkp_sizeof_fndecl)),
1136 			      call, 1, var);
1137 
1138       if (flag_chkp_zero_dynamic_size_as_infinite)
1139 	{
1140 	  tree max_size, cond;
1141 
1142 	  max_size = build2 (MINUS_EXPR, size_type_node, size_zero_node, lb);
1143 	  cond = build2 (NE_EXPR, boolean_type_node, size, size_zero_node);
1144 	  size = build3 (COND_EXPR, size_type_node, cond, size, max_size);
1145 	}
1146 
1147       size = size_binop (MINUS_EXPR, size, size_one_node);
1148     }
1149 
1150   ub = size_binop (PLUS_EXPR, lb, size);
1151   stmts->avail -= targetm.chkp_initialize_bounds (bnd_var, lb, ub,
1152 						  &stmts->stmts);
1153   if (stmts->avail <= 0)
1154     {
1155       cgraph_build_static_cdtor ('B', stmts->stmts,
1156 				 MAX_RESERVED_INIT_PRIORITY + 2);
1157       stmts->avail = MAX_STMTS_IN_STATIC_CHKP_CTOR;
1158       stmts->stmts = NULL;
1159     }
1160 }
1161 
1162 /* Return entry block to be used for checker initilization code.
1163    Create new block if required.  */
1164 static basic_block
1165 chkp_get_entry_block (void)
1166 {
1167   if (!entry_block)
1168     entry_block
1169       = split_block_after_labels (ENTRY_BLOCK_PTR_FOR_FN (cfun))->dest;
1170 
1171   return entry_block;
1172 }
1173 
1174 /* Return a bounds var to be used for pointer var PTR_VAR.  */
1175 static tree
1176 chkp_get_bounds_var (tree ptr_var)
1177 {
1178   tree bnd_var;
1179   tree *slot;
1180 
1181   slot = chkp_bound_vars->get (ptr_var);
1182   if (slot)
1183     bnd_var = *slot;
1184   else
1185     {
1186       bnd_var = create_tmp_reg (pointer_bounds_type_node,
1187 				CHKP_BOUND_TMP_NAME);
1188       chkp_bound_vars->put (ptr_var, bnd_var);
1189     }
1190 
1191   return bnd_var;
1192 }
1193 
1194 /* If BND is an abnormal bounds copy, return a copied value.
1195    Otherwise return BND.  */
1196 static tree
1197 chkp_get_orginal_bounds_for_abnormal_copy (tree bnd)
1198 {
1199   if (bitmap_bit_p (chkp_abnormal_copies, SSA_NAME_VERSION (bnd)))
1200     {
1201       gimple *bnd_def = SSA_NAME_DEF_STMT (bnd);
1202       gcc_checking_assert (gimple_code (bnd_def) == GIMPLE_ASSIGN);
1203       bnd = gimple_assign_rhs1 (bnd_def);
1204     }
1205 
1206   return bnd;
1207 }
1208 
1209 /* Register bounds BND for object PTR in global bounds table.
1210    A copy of bounds may be created for abnormal ssa names.
1211    Returns bounds to use for PTR.  */
1212 static tree
1213 chkp_maybe_copy_and_register_bounds (tree ptr, tree bnd)
1214 {
1215   bool abnormal_ptr;
1216 
1217   if (!chkp_reg_bounds)
1218     return bnd;
1219 
1220   /* Do nothing if bounds are incomplete_bounds
1221      because it means bounds will be recomputed.  */
1222   if (bnd == incomplete_bounds)
1223     return bnd;
1224 
1225   abnormal_ptr = (TREE_CODE (ptr) == SSA_NAME
1226 		  && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ptr)
1227 		  && gimple_code (SSA_NAME_DEF_STMT (ptr)) != GIMPLE_PHI);
1228 
1229   /* A single bounds value may be reused multiple times for
1230      different pointer values.  It may cause coalescing issues
1231      for abnormal SSA names.  To avoid it we create a bounds
1232      copy in case it is computed for abnormal SSA name.
1233 
1234      We also cannot reuse such created copies for other pointers  */
1235   if (abnormal_ptr
1236       || bitmap_bit_p (chkp_abnormal_copies, SSA_NAME_VERSION (bnd)))
1237     {
1238       tree bnd_var = NULL_TREE;
1239 
1240       if (abnormal_ptr)
1241 	{
1242 	  if (SSA_NAME_VAR (ptr))
1243 	    bnd_var = chkp_get_bounds_var (SSA_NAME_VAR (ptr));
1244 	}
1245       else
1246 	bnd_var = chkp_get_tmp_var ();
1247 
1248       /* For abnormal copies we may just find original
1249 	 bounds and use them.  */
1250       if (!abnormal_ptr && !SSA_NAME_IS_DEFAULT_DEF (bnd))
1251 	bnd = chkp_get_orginal_bounds_for_abnormal_copy (bnd);
1252       /* For undefined values we usually use none bounds
1253 	 value but in case of abnormal edge it may cause
1254 	 coalescing failures.  Use default definition of
1255 	 bounds variable instead to avoid it.  */
1256       else if (SSA_NAME_IS_DEFAULT_DEF (ptr)
1257 	       && TREE_CODE (SSA_NAME_VAR (ptr)) != PARM_DECL)
1258 	{
1259 	  bnd = get_or_create_ssa_default_def (cfun, bnd_var);
1260 
1261 	  if (dump_file && (dump_flags & TDF_DETAILS))
1262 	    {
1263 	      fprintf (dump_file, "Using default def bounds ");
1264 	      print_generic_expr (dump_file, bnd);
1265 	      fprintf (dump_file, " for abnormal default def SSA name ");
1266 	      print_generic_expr (dump_file, ptr);
1267 	      fprintf (dump_file, "\n");
1268 	    }
1269 	}
1270       else
1271 	{
1272 	  tree copy;
1273 	  gimple *def = SSA_NAME_DEF_STMT (ptr);
1274 	  gimple *assign;
1275 	  gimple_stmt_iterator gsi;
1276 
1277 	  if (bnd_var)
1278 	    copy = make_ssa_name (bnd_var);
1279 	  else
1280 	    copy = make_temp_ssa_name (pointer_bounds_type_node,
1281 				       NULL,
1282 				       CHKP_BOUND_TMP_NAME);
1283 	  bnd = chkp_get_orginal_bounds_for_abnormal_copy (bnd);
1284 	  assign = gimple_build_assign (copy, bnd);
1285 
1286 	  if (dump_file && (dump_flags & TDF_DETAILS))
1287 	    {
1288 	      fprintf (dump_file, "Creating a copy of bounds ");
1289 	      print_generic_expr (dump_file, bnd);
1290 	      fprintf (dump_file, " for abnormal SSA name ");
1291 	      print_generic_expr (dump_file, ptr);
1292 	      fprintf (dump_file, "\n");
1293 	    }
1294 
1295 	  if (gimple_code (def) == GIMPLE_NOP)
1296 	    {
1297 	      gsi = gsi_last_bb (chkp_get_entry_block ());
1298 	      if (!gsi_end_p (gsi) && is_ctrl_stmt (gsi_stmt (gsi)))
1299 		gsi_insert_before (&gsi, assign, GSI_CONTINUE_LINKING);
1300 	      else
1301 		gsi_insert_after (&gsi, assign, GSI_CONTINUE_LINKING);
1302 	    }
1303 	  else
1304 	    {
1305 	      gimple *bnd_def = SSA_NAME_DEF_STMT (bnd);
1306 	      /* Sometimes (e.g. when we load a pointer from a
1307 		 memory) bounds are produced later than a pointer.
1308 		 We need to insert bounds copy appropriately.  */
1309 	      if (gimple_code (bnd_def) != GIMPLE_NOP
1310 		  && stmt_dominates_stmt_p (def, bnd_def))
1311 		gsi = gsi_for_stmt (bnd_def);
1312 	      else
1313 		gsi = gsi_for_stmt (def);
1314 	      gsi_insert_after (&gsi, assign, GSI_CONTINUE_LINKING);
1315 	    }
1316 
1317 	  bnd = copy;
1318 	}
1319 
1320       if (abnormal_ptr)
1321 	bitmap_set_bit (chkp_abnormal_copies, SSA_NAME_VERSION (bnd));
1322     }
1323 
1324   chkp_reg_bounds->put (ptr, bnd);
1325 
1326   if (dump_file && (dump_flags & TDF_DETAILS))
1327     {
1328       fprintf (dump_file, "Regsitered bound ");
1329       print_generic_expr (dump_file, bnd);
1330       fprintf (dump_file, " for pointer ");
1331       print_generic_expr (dump_file, ptr);
1332       fprintf (dump_file, "\n");
1333     }
1334 
1335   return bnd;
1336 }
1337 
1338 /* Get bounds registered for object PTR in global bounds table.  */
1339 static tree
1340 chkp_get_registered_bounds (tree ptr)
1341 {
1342   tree *slot;
1343 
1344   if (!chkp_reg_bounds)
1345     return NULL_TREE;
1346 
1347   slot = chkp_reg_bounds->get (ptr);
1348   return slot ? *slot : NULL_TREE;
1349 }
1350 
1351 /* Add bound retvals to return statement pointed by GSI.  */
1352 
1353 static void
1354 chkp_add_bounds_to_ret_stmt (gimple_stmt_iterator *gsi)
1355 {
1356   greturn *ret = as_a <greturn *> (gsi_stmt (*gsi));
1357   tree retval = gimple_return_retval (ret);
1358   tree ret_decl = DECL_RESULT (cfun->decl);
1359   tree bounds;
1360 
1361   if (!retval)
1362     return;
1363 
1364   if (BOUNDED_P (ret_decl))
1365     {
1366       bounds = chkp_find_bounds (retval, gsi);
1367       bounds = chkp_maybe_copy_and_register_bounds (ret_decl, bounds);
1368       gimple_return_set_retbnd (ret, bounds);
1369     }
1370 
1371   update_stmt (ret);
1372 }
1373 
1374 /* Force OP to be suitable for using as an argument for call.
1375    New statements (if any) go to SEQ.  */
1376 static tree
1377 chkp_force_gimple_call_op (tree op, gimple_seq *seq)
1378 {
1379   gimple_seq stmts;
1380   gimple_stmt_iterator si;
1381 
1382   op = force_gimple_operand (unshare_expr (op), &stmts, true, NULL_TREE);
1383 
1384   for (si = gsi_start (stmts); !gsi_end_p (si); gsi_next (&si))
1385     chkp_mark_stmt (gsi_stmt (si));
1386 
1387   gimple_seq_add_seq (seq, stmts);
1388 
1389   return op;
1390 }
1391 
1392 /* Generate lower bound check for memory access by ADDR.
1393    Check is inserted before the position pointed by ITER.
1394    DIRFLAG indicates whether memory access is load or store.  */
1395 static void
1396 chkp_check_lower (tree addr, tree bounds,
1397 		  gimple_stmt_iterator iter,
1398 		  location_t location,
1399 		  tree dirflag)
1400 {
1401   gimple_seq seq;
1402   gimple *check;
1403   tree node;
1404 
1405   if (!chkp_function_instrumented_p (current_function_decl)
1406       && bounds == chkp_get_zero_bounds ())
1407     return;
1408 
1409   if (dirflag == integer_zero_node
1410       && !flag_chkp_check_read)
1411     return;
1412 
1413   if (dirflag == integer_one_node
1414       && !flag_chkp_check_write)
1415     return;
1416 
1417   seq = NULL;
1418 
1419   node = chkp_force_gimple_call_op (addr, &seq);
1420 
1421   check = gimple_build_call (chkp_checkl_fndecl, 2, node, bounds);
1422   chkp_mark_stmt (check);
1423   gimple_call_set_with_bounds (check, true);
1424   gimple_set_location (check, location);
1425   gimple_seq_add_stmt (&seq, check);
1426 
1427   gsi_insert_seq_before (&iter, seq, GSI_SAME_STMT);
1428 
1429   if (dump_file && (dump_flags & TDF_DETAILS))
1430     {
1431       gimple *before = gsi_stmt (iter);
1432       fprintf (dump_file, "Generated lower bound check for statement ");
1433       print_gimple_stmt (dump_file, before, 0, TDF_VOPS|TDF_MEMSYMS);
1434       fprintf (dump_file, "  ");
1435       print_gimple_stmt (dump_file, check, 0, TDF_VOPS|TDF_MEMSYMS);
1436     }
1437 }
1438 
1439 /* Generate upper bound check for memory access by ADDR.
1440    Check is inserted before the position pointed by ITER.
1441    DIRFLAG indicates whether memory access is load or store.  */
1442 static void
1443 chkp_check_upper (tree addr, tree bounds,
1444 		  gimple_stmt_iterator iter,
1445 		  location_t location,
1446 		  tree dirflag)
1447 {
1448   gimple_seq seq;
1449   gimple *check;
1450   tree node;
1451 
1452   if (!chkp_function_instrumented_p (current_function_decl)
1453       && bounds == chkp_get_zero_bounds ())
1454     return;
1455 
1456   if (dirflag == integer_zero_node
1457       && !flag_chkp_check_read)
1458     return;
1459 
1460   if (dirflag == integer_one_node
1461       && !flag_chkp_check_write)
1462     return;
1463 
1464   seq = NULL;
1465 
1466   node = chkp_force_gimple_call_op (addr, &seq);
1467 
1468   check = gimple_build_call (chkp_checku_fndecl, 2, node, bounds);
1469   chkp_mark_stmt (check);
1470   gimple_call_set_with_bounds (check, true);
1471   gimple_set_location (check, location);
1472   gimple_seq_add_stmt (&seq, check);
1473 
1474   gsi_insert_seq_before (&iter, seq, GSI_SAME_STMT);
1475 
1476   if (dump_file && (dump_flags & TDF_DETAILS))
1477     {
1478       gimple *before = gsi_stmt (iter);
1479       fprintf (dump_file, "Generated upper bound check for statement ");
1480       print_gimple_stmt (dump_file, before, 0, TDF_VOPS|TDF_MEMSYMS);
1481       fprintf (dump_file, "  ");
1482       print_gimple_stmt (dump_file, check, 0, TDF_VOPS|TDF_MEMSYMS);
1483     }
1484 }
1485 
1486 /* Generate lower and upper bound checks for memory access
1487    to memory slot [FIRST, LAST] againsr BOUNDS.  Checks
1488    are inserted before the position pointed by ITER.
1489    DIRFLAG indicates whether memory access is load or store.  */
1490 void
1491 chkp_check_mem_access (tree first, tree last, tree bounds,
1492 		       gimple_stmt_iterator iter,
1493 		       location_t location,
1494 		       tree dirflag)
1495 {
1496   chkp_check_lower (first, bounds, iter, location, dirflag);
1497   chkp_check_upper (last, bounds, iter, location, dirflag);
1498 }
1499 
1500 /* Replace call to _bnd_chk_* pointed by GSI with
1501    bndcu and bndcl calls.  DIRFLAG determines whether
1502    check is for read or write.  */
1503 
1504 void
1505 chkp_replace_address_check_builtin (gimple_stmt_iterator *gsi,
1506 				    tree dirflag)
1507 {
1508   gimple_stmt_iterator call_iter = *gsi;
1509   gimple *call = gsi_stmt (*gsi);
1510   tree fndecl = gimple_call_fndecl (call);
1511   tree addr = gimple_call_arg (call, 0);
1512   tree bounds = chkp_find_bounds (addr, gsi);
1513 
1514   if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_CHECK_PTR_LBOUNDS
1515       || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_CHECK_PTR_BOUNDS)
1516     chkp_check_lower (addr, bounds, *gsi, gimple_location (call), dirflag);
1517 
1518   if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_CHECK_PTR_UBOUNDS)
1519     chkp_check_upper (addr, bounds, *gsi, gimple_location (call), dirflag);
1520 
1521   if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_CHECK_PTR_BOUNDS)
1522     {
1523       tree size = gimple_call_arg (call, 1);
1524       addr = fold_build_pointer_plus (addr, size);
1525       addr = fold_build_pointer_plus_hwi (addr, -1);
1526       chkp_check_upper (addr, bounds, *gsi, gimple_location (call), dirflag);
1527     }
1528 
1529   gsi_remove (&call_iter, true);
1530 }
1531 
1532 /* Replace call to _bnd_get_ptr_* pointed by GSI with
1533    corresponding bounds extract call.  */
1534 
1535 void
1536 chkp_replace_extract_builtin (gimple_stmt_iterator *gsi)
1537 {
1538   gimple *call = gsi_stmt (*gsi);
1539   tree fndecl = gimple_call_fndecl (call);
1540   tree addr = gimple_call_arg (call, 0);
1541   tree bounds = chkp_find_bounds (addr, gsi);
1542   gimple *extract;
1543 
1544   if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_GET_PTR_LBOUND)
1545     fndecl = chkp_extract_lower_fndecl;
1546   else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_GET_PTR_UBOUND)
1547     fndecl = chkp_extract_upper_fndecl;
1548   else
1549     gcc_unreachable ();
1550 
1551   extract = gimple_build_call (fndecl, 1, bounds);
1552   gimple_call_set_lhs (extract, gimple_call_lhs (call));
1553   chkp_mark_stmt (extract);
1554 
1555   gsi_replace (gsi, extract, false);
1556 }
1557 
1558 /* Return COMPONENT_REF accessing FIELD in OBJ.  */
1559 static tree
1560 chkp_build_component_ref (tree obj, tree field)
1561 {
1562   tree res;
1563 
1564   /* If object is TMR then we do not use component_ref but
1565      add offset instead.  We need it to be able to get addr
1566      of the reasult later.  */
1567   if (TREE_CODE (obj) == TARGET_MEM_REF)
1568     {
1569       tree offs = TMR_OFFSET (obj);
1570       offs = fold_binary_to_constant (PLUS_EXPR, TREE_TYPE (offs),
1571 				      offs, DECL_FIELD_OFFSET (field));
1572 
1573       gcc_assert (offs);
1574 
1575       res = copy_node (obj);
1576       TREE_TYPE (res) = TREE_TYPE (field);
1577       TMR_OFFSET (res) = offs;
1578     }
1579   else
1580     res = build3 (COMPONENT_REF, TREE_TYPE (field), obj, field, NULL_TREE);
1581 
1582   return res;
1583 }
1584 
1585 /* Return ARRAY_REF for array ARR and index IDX with
1586    specified element type ETYPE and element size ESIZE.  */
1587 static tree
1588 chkp_build_array_ref (tree arr, tree etype, tree esize,
1589 		      unsigned HOST_WIDE_INT idx)
1590 {
1591   tree index = build_int_cst (size_type_node, idx);
1592   tree res;
1593 
1594   /* If object is TMR then we do not use array_ref but
1595      add offset instead.  We need it to be able to get addr
1596      of the reasult later.  */
1597   if (TREE_CODE (arr) == TARGET_MEM_REF)
1598     {
1599       tree offs = TMR_OFFSET (arr);
1600 
1601       esize = fold_binary_to_constant (MULT_EXPR, TREE_TYPE (esize),
1602 				     esize, index);
1603       gcc_assert(esize);
1604 
1605       offs = fold_binary_to_constant (PLUS_EXPR, TREE_TYPE (offs),
1606 				    offs, esize);
1607       gcc_assert (offs);
1608 
1609       res = copy_node (arr);
1610       TREE_TYPE (res) = etype;
1611       TMR_OFFSET (res) = offs;
1612     }
1613   else
1614     res = build4 (ARRAY_REF, etype, arr, index, NULL_TREE, NULL_TREE);
1615 
1616   return res;
1617 }
1618 
1619 /* Helper function for chkp_add_bounds_to_call_stmt.
1620    Fill ALL_BOUNDS output array with created bounds.
1621 
1622    OFFS is used for recursive calls and holds basic
1623    offset of TYPE in outer structure in bits.
1624 
1625    ITER points a position where bounds are searched.
1626 
1627    ALL_BOUNDS[i] is filled with elem bounds if there
1628    is a field in TYPE which has pointer type and offset
1629    equal to i * POINTER_SIZE in bits.  */
1630 static void
1631 chkp_find_bounds_for_elem (tree elem, tree *all_bounds,
1632 			   HOST_WIDE_INT offs,
1633 			   gimple_stmt_iterator *iter)
1634 {
1635   tree type = TREE_TYPE (elem);
1636 
1637   if (BOUNDED_TYPE_P (type))
1638     {
1639       if (!all_bounds[offs / POINTER_SIZE])
1640 	{
1641 	  tree temp = make_temp_ssa_name (type, NULL, "");
1642 	  gimple *assign = gimple_build_assign (temp, elem);
1643 	  gimple_stmt_iterator gsi;
1644 
1645 	  gsi_insert_before (iter, assign, GSI_SAME_STMT);
1646 	  gsi = gsi_for_stmt (assign);
1647 
1648 	  all_bounds[offs / POINTER_SIZE] = chkp_find_bounds (temp, &gsi);
1649 	}
1650     }
1651   else if (RECORD_OR_UNION_TYPE_P (type))
1652     {
1653       tree field;
1654 
1655       for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
1656 	if (TREE_CODE (field) == FIELD_DECL)
1657 	  {
1658 	    tree base = unshare_expr (elem);
1659 	    tree field_ref = chkp_build_component_ref (base, field);
1660 	    HOST_WIDE_INT field_offs
1661 	      = TREE_INT_CST_LOW (DECL_FIELD_BIT_OFFSET (field));
1662 	    if (DECL_FIELD_OFFSET (field))
1663 	      field_offs += TREE_INT_CST_LOW (DECL_FIELD_OFFSET (field)) * 8;
1664 
1665 	    chkp_find_bounds_for_elem (field_ref, all_bounds,
1666 				       offs + field_offs, iter);
1667 	  }
1668     }
1669   else if (TREE_CODE (type) == ARRAY_TYPE)
1670     {
1671       tree maxval = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
1672       tree etype = TREE_TYPE (type);
1673       HOST_WIDE_INT esize = TREE_INT_CST_LOW (TYPE_SIZE (etype));
1674       unsigned HOST_WIDE_INT cur;
1675 
1676       if (!maxval || integer_minus_onep (maxval))
1677 	return;
1678 
1679       for (cur = 0; cur <= TREE_INT_CST_LOW (maxval); cur++)
1680 	{
1681 	  tree base = unshare_expr (elem);
1682 	  tree arr_elem = chkp_build_array_ref (base, etype,
1683 						TYPE_SIZE (etype),
1684 						cur);
1685 	  chkp_find_bounds_for_elem (arr_elem, all_bounds, offs + cur * esize,
1686 				     iter);
1687 	}
1688     }
1689 }
1690 
1691 /* Maximum number of elements to check in an array.  */
1692 
1693 #define CHKP_ARRAY_MAX_CHECK_STEPS    4096
1694 
1695 /* Fill HAVE_BOUND output bitmap with information about
1696    bounds requred for object of type TYPE.
1697 
1698    OFFS is used for recursive calls and holds basic
1699    offset of TYPE in outer structure in bits.
1700 
1701    HAVE_BOUND[i] is set to 1 if there is a field
1702    in TYPE which has pointer type and offset
1703    equal to i * POINTER_SIZE - OFFS in bits.  */
1704 void
1705 chkp_find_bound_slots_1 (const_tree type, bitmap have_bound,
1706 			 HOST_WIDE_INT offs)
1707 {
1708   if (BOUNDED_TYPE_P (type))
1709     bitmap_set_bit (have_bound, offs / POINTER_SIZE);
1710   else if (RECORD_OR_UNION_TYPE_P (type))
1711     {
1712       tree field;
1713 
1714       for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
1715 	if (TREE_CODE (field) == FIELD_DECL)
1716 	  {
1717 	    HOST_WIDE_INT field_offs = 0;
1718 	    if (DECL_FIELD_BIT_OFFSET (field))
1719 	      field_offs += TREE_INT_CST_LOW (DECL_FIELD_BIT_OFFSET (field));
1720 	    if (DECL_FIELD_OFFSET (field))
1721 	      field_offs += TREE_INT_CST_LOW (DECL_FIELD_OFFSET (field)) * 8;
1722 	    chkp_find_bound_slots_1 (TREE_TYPE (field), have_bound,
1723 				     offs + field_offs);
1724 	  }
1725     }
1726   else if (TREE_CODE (type) == ARRAY_TYPE && TYPE_DOMAIN (type))
1727     {
1728       /* The object type is an array of complete type, i.e., other
1729 	 than a flexible array.  */
1730       tree maxval = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
1731       tree etype = TREE_TYPE (type);
1732       HOST_WIDE_INT esize = TREE_INT_CST_LOW (TYPE_SIZE (etype));
1733       unsigned HOST_WIDE_INT cur;
1734 
1735       if (!maxval
1736 	  || TREE_CODE (maxval) != INTEGER_CST
1737 	  || integer_minus_onep (maxval))
1738 	return;
1739 
1740       for (cur = 0;
1741 	  cur <= MIN (CHKP_ARRAY_MAX_CHECK_STEPS, TREE_INT_CST_LOW (maxval));
1742 	  cur++)
1743 	chkp_find_bound_slots_1 (etype, have_bound, offs + cur * esize);
1744     }
1745 }
1746 
1747 /* Fill bitmap RES with information about bounds for
1748    type TYPE.  See chkp_find_bound_slots_1 for more
1749    details.  */
1750 void
1751 chkp_find_bound_slots (const_tree type, bitmap res)
1752 {
1753   bitmap_clear (res);
1754   chkp_find_bound_slots_1 (type, res, 0);
1755 }
1756 
1757 /* Return 1 if call to FNDECL should be instrumented
1758    and 0 otherwise.  */
1759 
1760 static bool
1761 chkp_instrument_normal_builtin (tree fndecl)
1762 {
1763   switch (DECL_FUNCTION_CODE (fndecl))
1764     {
1765     case BUILT_IN_STRLEN:
1766     case BUILT_IN_STRCPY:
1767     case BUILT_IN_STRNCPY:
1768     case BUILT_IN_STPCPY:
1769     case BUILT_IN_STPNCPY:
1770     case BUILT_IN_STRCAT:
1771     case BUILT_IN_STRNCAT:
1772     case BUILT_IN_MEMCPY:
1773     case BUILT_IN_MEMPCPY:
1774     case BUILT_IN_MEMSET:
1775     case BUILT_IN_MEMMOVE:
1776     case BUILT_IN_BZERO:
1777     case BUILT_IN_STRCMP:
1778     case BUILT_IN_STRNCMP:
1779     case BUILT_IN_BCMP:
1780     case BUILT_IN_MEMCMP:
1781     case BUILT_IN_MEMCPY_CHK:
1782     case BUILT_IN_MEMPCPY_CHK:
1783     case BUILT_IN_MEMMOVE_CHK:
1784     case BUILT_IN_MEMSET_CHK:
1785     case BUILT_IN_STRCPY_CHK:
1786     case BUILT_IN_STRNCPY_CHK:
1787     case BUILT_IN_STPCPY_CHK:
1788     case BUILT_IN_STPNCPY_CHK:
1789     case BUILT_IN_STRCAT_CHK:
1790     case BUILT_IN_STRNCAT_CHK:
1791     case BUILT_IN_MALLOC:
1792     case BUILT_IN_CALLOC:
1793     case BUILT_IN_REALLOC:
1794       return 1;
1795 
1796     default:
1797       return 0;
1798     }
1799 }
1800 
1801 /* Add bound arguments to call statement pointed by GSI.
1802    Also performs a replacement of user checker builtins calls
1803    with internal ones.  */
1804 
1805 static void
1806 chkp_add_bounds_to_call_stmt (gimple_stmt_iterator *gsi)
1807 {
1808   gcall *call = as_a <gcall *> (gsi_stmt (*gsi));
1809   unsigned arg_no = 0;
1810   tree fndecl = gimple_call_fndecl (call);
1811   tree fntype;
1812   tree first_formal_arg;
1813   tree arg;
1814   bool use_fntype = false;
1815   tree op;
1816   ssa_op_iter iter;
1817   gcall *new_call;
1818 
1819   /* Do nothing for internal functions.  */
1820   if (gimple_call_internal_p (call))
1821     return;
1822 
1823   fntype = TREE_TYPE (TREE_TYPE (gimple_call_fn (call)));
1824 
1825   /* Do nothing if back-end builtin is called.  */
1826   if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
1827     return;
1828 
1829   /* Do nothing for some middle-end builtins.  */
1830   if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
1831       && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_OBJECT_SIZE)
1832     return;
1833 
1834   /* Do nothing for calls to not instrumentable functions.  */
1835   if (fndecl && !chkp_instrumentable_p (fndecl))
1836     return;
1837 
1838   /* Ignore CHKP_INIT_PTR_BOUNDS, CHKP_NULL_PTR_BOUNDS
1839      and CHKP_COPY_PTR_BOUNDS.  */
1840   if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
1841       && (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_INIT_PTR_BOUNDS
1842 	  || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_NULL_PTR_BOUNDS
1843 	  || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_COPY_PTR_BOUNDS
1844 	  || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_SET_PTR_BOUNDS))
1845     return;
1846 
1847   /* Check user builtins are replaced with checks.  */
1848   if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
1849       && (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_CHECK_PTR_LBOUNDS
1850 	  || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_CHECK_PTR_UBOUNDS
1851 	  || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_CHECK_PTR_BOUNDS))
1852     {
1853       chkp_replace_address_check_builtin (gsi, integer_minus_one_node);
1854       return;
1855     }
1856 
1857   /* Check user builtins are replaced with bound extract.  */
1858   if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
1859       && (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_GET_PTR_LBOUND
1860 	  || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_GET_PTR_UBOUND))
1861     {
1862       chkp_replace_extract_builtin (gsi);
1863       return;
1864     }
1865 
1866   /* BUILT_IN_CHKP_NARROW_PTR_BOUNDS call is replaced with
1867      target narrow bounds call.  */
1868   if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
1869       && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_NARROW_PTR_BOUNDS)
1870     {
1871       tree arg = gimple_call_arg (call, 1);
1872       tree bounds = chkp_find_bounds (arg, gsi);
1873 
1874       gimple_call_set_fndecl (call, chkp_narrow_bounds_fndecl);
1875       gimple_call_set_arg (call, 1, bounds);
1876       update_stmt (call);
1877 
1878       return;
1879     }
1880 
1881   /* BUILT_IN_CHKP_STORE_PTR_BOUNDS call is replaced with
1882      bndstx call.  */
1883   if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
1884       && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_STORE_PTR_BOUNDS)
1885     {
1886       tree addr = gimple_call_arg (call, 0);
1887       tree ptr = gimple_call_arg (call, 1);
1888       tree bounds = chkp_find_bounds (ptr, gsi);
1889       gimple_stmt_iterator iter = gsi_for_stmt (call);
1890 
1891       chkp_build_bndstx (addr, ptr, bounds, gsi);
1892       gsi_remove (&iter, true);
1893 
1894       return;
1895     }
1896 
1897   if (!flag_chkp_instrument_calls)
1898     return;
1899 
1900   /* We instrument only some subset of builtins.  We also instrument
1901      builtin calls to be inlined.  */
1902   if (fndecl
1903       && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
1904       && !chkp_instrument_normal_builtin (fndecl))
1905     {
1906       if (!lookup_attribute ("always_inline", DECL_ATTRIBUTES (fndecl)))
1907 	return;
1908 
1909       struct cgraph_node *clone = chkp_maybe_create_clone (fndecl);
1910       if (!clone
1911 	  || !gimple_has_body_p (clone->decl))
1912 	return;
1913     }
1914 
1915   /* If function decl is available then use it for
1916      formal arguments list.  Otherwise use function type.  */
1917   if (fndecl
1918       && DECL_ARGUMENTS (fndecl)
1919       && gimple_call_fntype (call) == TREE_TYPE (fndecl))
1920     first_formal_arg = DECL_ARGUMENTS (fndecl);
1921   else
1922     {
1923       first_formal_arg = TYPE_ARG_TYPES (fntype);
1924       use_fntype = true;
1925     }
1926 
1927   /* Fill vector of new call args.  */
1928   vec<tree> new_args = vNULL;
1929   new_args.create (gimple_call_num_args (call));
1930   arg = first_formal_arg;
1931   for (arg_no = 0; arg_no < gimple_call_num_args (call); arg_no++)
1932     {
1933       tree call_arg = gimple_call_arg (call, arg_no);
1934       tree type;
1935 
1936       /* Get arg type using formal argument description
1937 	 or actual argument type.  */
1938       if (arg)
1939 	if (use_fntype)
1940 	  if (TREE_VALUE (arg) != void_type_node)
1941 	    {
1942 	      type = TREE_VALUE (arg);
1943 	      arg = TREE_CHAIN (arg);
1944 	    }
1945 	  else
1946 	    type = TREE_TYPE (call_arg);
1947 	else
1948 	  {
1949 	    type = TREE_TYPE (arg);
1950 	    arg = TREE_CHAIN (arg);
1951 	  }
1952       else
1953 	type = TREE_TYPE (call_arg);
1954 
1955       new_args.safe_push (call_arg);
1956 
1957       if (BOUNDED_TYPE_P (type)
1958 	  || pass_by_reference (NULL, TYPE_MODE (type), type, true))
1959 	new_args.safe_push (chkp_find_bounds (call_arg, gsi));
1960       else if (chkp_type_has_pointer (type))
1961 	{
1962 	  HOST_WIDE_INT max_bounds
1963 	    = TREE_INT_CST_LOW (TYPE_SIZE (type)) / POINTER_SIZE;
1964 	  tree *all_bounds = (tree *)xmalloc (sizeof (tree) * max_bounds);
1965 	  HOST_WIDE_INT bnd_no;
1966 
1967 	  memset (all_bounds, 0, sizeof (tree) * max_bounds);
1968 
1969 	  chkp_find_bounds_for_elem (call_arg, all_bounds, 0, gsi);
1970 
1971 	  for (bnd_no = 0; bnd_no < max_bounds; bnd_no++)
1972 	    if (all_bounds[bnd_no])
1973 	      new_args.safe_push (all_bounds[bnd_no]);
1974 
1975            free (all_bounds);
1976 	}
1977     }
1978 
1979   if (new_args.length () == gimple_call_num_args (call))
1980     new_call = call;
1981   else
1982     {
1983       new_call = gimple_build_call_vec (gimple_op (call, 1), new_args);
1984       gimple_call_set_lhs (new_call, gimple_call_lhs (call));
1985       gimple_call_copy_flags (new_call, call);
1986       gimple_call_set_chain (new_call, gimple_call_chain (call));
1987     }
1988   new_args.release ();
1989 
1990   /* For direct calls fndecl is replaced with instrumented version.  */
1991   if (fndecl)
1992     {
1993       tree new_decl = chkp_maybe_create_clone (fndecl)->decl;
1994       gimple_call_set_fndecl (new_call, new_decl);
1995       /* In case of a type cast we should modify used function
1996 	 type instead of using type of new fndecl.  */
1997       if (gimple_call_fntype (call) != TREE_TYPE (fndecl))
1998 	{
1999 	  tree type = gimple_call_fntype (call);
2000 	  type = chkp_copy_function_type_adding_bounds (type);
2001 	  gimple_call_set_fntype (new_call, type);
2002 	}
2003       else
2004 	gimple_call_set_fntype (new_call, TREE_TYPE (new_decl));
2005     }
2006   /* For indirect call we should fix function pointer type if
2007      pass some bounds.  */
2008   else if (new_call != call)
2009     {
2010       tree type = gimple_call_fntype (call);
2011       type = chkp_copy_function_type_adding_bounds (type);
2012       gimple_call_set_fntype (new_call, type);
2013     }
2014 
2015   /* replace old call statement with the new one.  */
2016   if (call != new_call)
2017     {
2018       FOR_EACH_SSA_TREE_OPERAND (op, call, iter, SSA_OP_ALL_DEFS)
2019 	{
2020 	  SSA_NAME_DEF_STMT (op) = new_call;
2021 	}
2022       gsi_replace (gsi, new_call, true);
2023     }
2024   else
2025     update_stmt (new_call);
2026 
2027   gimple_call_set_with_bounds (new_call, true);
2028 }
2029 
2030 /* Return constant static bounds var with specified bounds LB and UB.
2031    If such var does not exists then new var is created with specified NAME.  */
2032 static tree
2033 chkp_make_static_const_bounds (HOST_WIDE_INT lb,
2034 			       HOST_WIDE_INT ub,
2035 			       const char *name)
2036 {
2037   tree id = get_identifier (name);
2038   tree var;
2039   varpool_node *node;
2040   symtab_node *snode;
2041 
2042   var  = build_decl (UNKNOWN_LOCATION, VAR_DECL, id,
2043 		     pointer_bounds_type_node);
2044   TREE_STATIC (var) = 1;
2045   TREE_PUBLIC (var) = 1;
2046 
2047   /* With LTO we may have constant bounds already in varpool.
2048      Try to find it.  */
2049   if ((snode = symtab_node::get_for_asmname (DECL_ASSEMBLER_NAME (var))))
2050     {
2051       /* We don't allow this symbol usage for non bounds.  */
2052       if (snode->type != SYMTAB_VARIABLE
2053 	  || !POINTER_BOUNDS_P (snode->decl))
2054 	sorry ("-fcheck-pointer-bounds requires %qs "
2055 	       "name for internal usage",
2056 	       IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (var)));
2057 
2058       return snode->decl;
2059     }
2060 
2061   TREE_USED (var) = 1;
2062   TREE_READONLY (var) = 1;
2063   TREE_ADDRESSABLE (var) = 0;
2064   DECL_ARTIFICIAL (var) = 1;
2065   DECL_READ_P (var) = 1;
2066   DECL_INITIAL (var) = targetm.chkp_make_bounds_constant (lb, ub);
2067   make_decl_one_only (var, DECL_ASSEMBLER_NAME (var));
2068   /* We may use this symbol during ctors generation in chkp_finish_file
2069      when all symbols are emitted.  Force output to avoid undefined
2070      symbols in ctors.  */
2071   node = varpool_node::get_create (var);
2072   node->force_output = 1;
2073 
2074   varpool_node::finalize_decl (var);
2075 
2076   return var;
2077 }
2078 
2079 /* Generate code to make bounds with specified lower bound LB and SIZE.
2080    if AFTER is 1 then code is inserted after position pointed by ITER
2081    otherwise code is inserted before position pointed by ITER.
2082    If ITER is NULL then code is added to entry block.  */
2083 static tree
2084 chkp_make_bounds (tree lb, tree size, gimple_stmt_iterator *iter, bool after)
2085 {
2086   gimple_seq seq;
2087   gimple_stmt_iterator gsi;
2088   gimple *stmt;
2089   tree bounds;
2090 
2091   if (iter)
2092     gsi = *iter;
2093   else
2094     gsi = gsi_start_bb (chkp_get_entry_block ());
2095 
2096   seq = NULL;
2097 
2098   lb = chkp_force_gimple_call_op (lb, &seq);
2099   size = chkp_force_gimple_call_op (size, &seq);
2100 
2101   stmt = gimple_build_call (chkp_bndmk_fndecl, 2, lb, size);
2102   chkp_mark_stmt (stmt);
2103 
2104   bounds = chkp_get_tmp_reg (stmt);
2105   gimple_call_set_lhs (stmt, bounds);
2106 
2107   gimple_seq_add_stmt (&seq, stmt);
2108 
2109   if (iter && after)
2110     gsi_insert_seq_after (&gsi, seq, GSI_SAME_STMT);
2111   else
2112     gsi_insert_seq_before (&gsi, seq, GSI_SAME_STMT);
2113 
2114   if (dump_file && (dump_flags & TDF_DETAILS))
2115     {
2116       fprintf (dump_file, "Made bounds: ");
2117       print_gimple_stmt (dump_file, stmt, 0, TDF_VOPS|TDF_MEMSYMS);
2118       if (iter)
2119 	{
2120 	  fprintf (dump_file, "  inserted before statement: ");
2121 	  print_gimple_stmt (dump_file, gsi_stmt (*iter), 0, TDF_VOPS|TDF_MEMSYMS);
2122 	}
2123       else
2124 	fprintf (dump_file, "  at function entry\n");
2125     }
2126 
2127   /* update_stmt (stmt); */
2128 
2129   return bounds;
2130 }
2131 
2132 /* Return var holding zero bounds.  */
2133 tree
2134 chkp_get_zero_bounds_var (void)
2135 {
2136   if (!chkp_zero_bounds_var)
2137     chkp_zero_bounds_var
2138       = chkp_make_static_const_bounds (0, -1,
2139 				       CHKP_ZERO_BOUNDS_VAR_NAME);
2140   return chkp_zero_bounds_var;
2141 }
2142 
2143 /* Return var holding none bounds.  */
2144 tree
2145 chkp_get_none_bounds_var (void)
2146 {
2147   if (!chkp_none_bounds_var)
2148     chkp_none_bounds_var
2149       = chkp_make_static_const_bounds (-1, 0,
2150 				       CHKP_NONE_BOUNDS_VAR_NAME);
2151   return chkp_none_bounds_var;
2152 }
2153 
2154 /* Return SSA_NAME used to represent zero bounds.  */
2155 static tree
2156 chkp_get_zero_bounds (void)
2157 {
2158   if (zero_bounds)
2159     return zero_bounds;
2160 
2161   if (dump_file && (dump_flags & TDF_DETAILS))
2162     fprintf (dump_file, "Creating zero bounds...");
2163 
2164   if ((flag_chkp_use_static_bounds && flag_chkp_use_static_const_bounds)
2165       || flag_chkp_use_static_const_bounds > 0)
2166     {
2167       gimple_stmt_iterator gsi = gsi_start_bb (chkp_get_entry_block ());
2168       gimple *stmt;
2169 
2170       zero_bounds = chkp_get_tmp_reg (NULL);
2171       stmt = gimple_build_assign (zero_bounds, chkp_get_zero_bounds_var ());
2172       gsi_insert_before (&gsi, stmt, GSI_SAME_STMT);
2173     }
2174   else
2175     zero_bounds = chkp_make_bounds (integer_zero_node,
2176 				    integer_zero_node,
2177 				    NULL,
2178 				    false);
2179 
2180   return zero_bounds;
2181 }
2182 
2183 /* Return SSA_NAME used to represent none bounds.  */
2184 static tree
2185 chkp_get_none_bounds (void)
2186 {
2187   if (none_bounds)
2188     return none_bounds;
2189 
2190   if (dump_file && (dump_flags & TDF_DETAILS))
2191     fprintf (dump_file, "Creating none bounds...");
2192 
2193 
2194   if ((flag_chkp_use_static_bounds && flag_chkp_use_static_const_bounds)
2195       || flag_chkp_use_static_const_bounds > 0)
2196     {
2197       gimple_stmt_iterator gsi = gsi_start_bb (chkp_get_entry_block ());
2198       gimple *stmt;
2199 
2200       none_bounds = chkp_get_tmp_reg (NULL);
2201       stmt = gimple_build_assign (none_bounds, chkp_get_none_bounds_var ());
2202       gsi_insert_before (&gsi, stmt, GSI_SAME_STMT);
2203     }
2204   else
2205     none_bounds = chkp_make_bounds (integer_minus_one_node,
2206 				    build_int_cst (size_type_node, 2),
2207 				    NULL,
2208 				    false);
2209 
2210   return none_bounds;
2211 }
2212 
2213 /* Return bounds to be used as a result of operation which
2214    should not create poiunter (e.g. MULT_EXPR).  */
2215 static tree
2216 chkp_get_invalid_op_bounds (void)
2217 {
2218   return chkp_get_zero_bounds ();
2219 }
2220 
2221 /* Return bounds to be used for loads of non-pointer values.  */
2222 static tree
2223 chkp_get_nonpointer_load_bounds (void)
2224 {
2225   return chkp_get_zero_bounds ();
2226 }
2227 
2228 /* Return 1 if may use bndret call to get bounds for pointer
2229    returned by CALL.  */
2230 static bool
2231 chkp_call_returns_bounds_p (gcall *call)
2232 {
2233   if (gimple_call_internal_p (call))
2234     {
2235       if (gimple_call_internal_fn (call) == IFN_VA_ARG)
2236 	return true;
2237       return false;
2238     }
2239 
2240   if (gimple_call_builtin_p (call, BUILT_IN_CHKP_NARROW_PTR_BOUNDS)
2241       || chkp_gimple_call_builtin_p (call, BUILT_IN_CHKP_NARROW))
2242     return true;
2243 
2244   if (gimple_call_with_bounds_p (call))
2245     return true;
2246 
2247   tree fndecl = gimple_call_fndecl (call);
2248 
2249   if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
2250     return false;
2251 
2252   if (fndecl && !chkp_instrumentable_p (fndecl))
2253     return false;
2254 
2255   if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
2256     {
2257       if (chkp_instrument_normal_builtin (fndecl))
2258 	return true;
2259 
2260       if (!lookup_attribute ("always_inline", DECL_ATTRIBUTES (fndecl)))
2261 	return false;
2262 
2263       struct cgraph_node *clone = chkp_maybe_create_clone (fndecl);
2264       return (clone && gimple_has_body_p (clone->decl));
2265     }
2266 
2267   return true;
2268 }
2269 
2270 /* Build bounds returned by CALL.  */
2271 static tree
2272 chkp_build_returned_bound (gcall *call)
2273 {
2274   gimple_stmt_iterator gsi;
2275   tree bounds;
2276   gimple *stmt;
2277   tree fndecl = gimple_call_fndecl (call);
2278   unsigned int retflags;
2279   tree lhs = gimple_call_lhs (call);
2280 
2281   /* To avoid fixing alloca expands in targets we handle
2282      it separately.  */
2283   if (fndecl
2284       && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
2285       && ALLOCA_FUNCTION_CODE_P (DECL_FUNCTION_CODE (fndecl)))
2286     {
2287       tree size = gimple_call_arg (call, 0);
2288       gimple_stmt_iterator iter = gsi_for_stmt (call);
2289       bounds = chkp_make_bounds (lhs, size, &iter, true);
2290     }
2291   /* We know bounds returned by set_bounds builtin call.  */
2292   else if (fndecl
2293 	   && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
2294 	   && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_SET_PTR_BOUNDS)
2295     {
2296       tree lb = gimple_call_arg (call, 0);
2297       tree size = gimple_call_arg (call, 1);
2298       gimple_stmt_iterator iter = gsi_for_stmt (call);
2299       bounds = chkp_make_bounds (lb, size, &iter, true);
2300     }
2301   /* Detect bounds initialization calls.  */
2302   else if (fndecl
2303       && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
2304       && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_INIT_PTR_BOUNDS)
2305     bounds = chkp_get_zero_bounds ();
2306   /* Detect bounds nullification calls.  */
2307   else if (fndecl
2308       && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
2309       && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_NULL_PTR_BOUNDS)
2310     bounds = chkp_get_none_bounds ();
2311   /* Detect bounds copy calls.  */
2312   else if (fndecl
2313       && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
2314       && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_COPY_PTR_BOUNDS)
2315     {
2316       gimple_stmt_iterator iter = gsi_for_stmt (call);
2317       bounds = chkp_find_bounds (gimple_call_arg (call, 1), &iter);
2318     }
2319   /* Do not use retbnd when returned bounds are equal to some
2320      of passed bounds.  */
2321   else if (((retflags = gimple_call_return_flags (call)) & ERF_RETURNS_ARG)
2322 	   && (retflags & ERF_RETURN_ARG_MASK) < gimple_call_num_args (call))
2323     {
2324       gimple_stmt_iterator iter = gsi_for_stmt (call);
2325       unsigned int retarg = retflags & ERF_RETURN_ARG_MASK, argno;
2326       if (gimple_call_with_bounds_p (call))
2327 	{
2328 	  for (argno = 0; argno < gimple_call_num_args (call); argno++)
2329 	    if (!POINTER_BOUNDS_P (gimple_call_arg (call, argno)))
2330 	      {
2331 		if (retarg)
2332 		  retarg--;
2333 		else
2334 		  break;
2335 	      }
2336 	}
2337       else
2338 	argno = retarg;
2339 
2340       bounds = chkp_find_bounds (gimple_call_arg (call, argno), &iter);
2341     }
2342   else if (chkp_call_returns_bounds_p (call)
2343 	   && BOUNDED_P (lhs))
2344     {
2345       gcc_assert (TREE_CODE (lhs) == SSA_NAME);
2346 
2347       /* In general case build checker builtin call to
2348 	 obtain returned bounds.  */
2349       stmt = gimple_build_call (chkp_ret_bnd_fndecl, 1,
2350 				gimple_call_lhs (call));
2351       chkp_mark_stmt (stmt);
2352 
2353       gsi = gsi_for_stmt (call);
2354       gsi_insert_after (&gsi, stmt, GSI_SAME_STMT);
2355 
2356       bounds = chkp_get_tmp_reg (stmt);
2357       gimple_call_set_lhs (stmt, bounds);
2358 
2359       update_stmt (stmt);
2360     }
2361   else
2362     bounds = chkp_get_zero_bounds ();
2363 
2364   if (dump_file && (dump_flags & TDF_DETAILS))
2365     {
2366       fprintf (dump_file, "Built returned bounds (");
2367       print_generic_expr (dump_file, bounds);
2368       fprintf (dump_file, ") for call: ");
2369       print_gimple_stmt (dump_file, call, 0, TDF_VOPS | TDF_MEMSYMS);
2370     }
2371 
2372   bounds = chkp_maybe_copy_and_register_bounds (lhs, bounds);
2373 
2374   return bounds;
2375 }
2376 
2377 /* Return bounds used as returned by call
2378    which produced SSA name VAL.  */
2379 gcall *
2380 chkp_retbnd_call_by_val (tree val)
2381 {
2382   if (TREE_CODE (val) != SSA_NAME)
2383     return NULL;
2384 
2385   gcc_assert (gimple_code (SSA_NAME_DEF_STMT (val)) == GIMPLE_CALL);
2386 
2387   imm_use_iterator use_iter;
2388   use_operand_p use_p;
2389   FOR_EACH_IMM_USE_FAST (use_p, use_iter, val)
2390     if (chkp_gimple_call_builtin_p (USE_STMT (use_p), BUILT_IN_CHKP_BNDRET))
2391       return as_a <gcall *> (USE_STMT (use_p));
2392 
2393   return NULL;
2394 }
2395 
2396 /* Check the next parameter for the given PARM is bounds
2397    and return it's default SSA_NAME (create if required).  */
2398 static tree
2399 chkp_get_next_bounds_parm (tree parm)
2400 {
2401   tree bounds = TREE_CHAIN (parm);
2402   gcc_assert (POINTER_BOUNDS_P (bounds));
2403   bounds = ssa_default_def (cfun, bounds);
2404   if (!bounds)
2405     {
2406       bounds = make_ssa_name (TREE_CHAIN (parm), gimple_build_nop ());
2407       set_ssa_default_def (cfun, TREE_CHAIN (parm), bounds);
2408     }
2409   return bounds;
2410 }
2411 
2412 /* Return bounds to be used for input argument PARM.  */
2413 static tree
2414 chkp_get_bound_for_parm (tree parm)
2415 {
2416   tree decl = SSA_NAME_VAR (parm);
2417   tree bounds;
2418 
2419   gcc_assert (TREE_CODE (decl) == PARM_DECL);
2420 
2421   bounds = chkp_get_registered_bounds (parm);
2422 
2423   if (!bounds)
2424     bounds = chkp_get_registered_bounds (decl);
2425 
2426   if (!bounds)
2427     {
2428       tree orig_decl = cgraph_node::get (cfun->decl)->orig_decl;
2429 
2430       /* For static chain param we return zero bounds
2431 	 because currently we do not check dereferences
2432 	 of this pointer.  */
2433       if (cfun->static_chain_decl == decl)
2434 	bounds = chkp_get_zero_bounds ();
2435       /* If non instrumented runtime is used then it may be useful
2436 	 to use zero bounds for input arguments of main
2437 	 function.  */
2438       else if (flag_chkp_zero_input_bounds_for_main
2439 	       && id_equal (DECL_ASSEMBLER_NAME (orig_decl), "main"))
2440 	bounds = chkp_get_zero_bounds ();
2441       else if (BOUNDED_P (parm))
2442 	{
2443 	  bounds = chkp_get_next_bounds_parm (decl);
2444 	  bounds = chkp_maybe_copy_and_register_bounds (decl, bounds);
2445 
2446 	  if (dump_file && (dump_flags & TDF_DETAILS))
2447 	    {
2448 	      fprintf (dump_file, "Built arg bounds (");
2449 	      print_generic_expr (dump_file, bounds);
2450 	      fprintf (dump_file, ") for arg: ");
2451 	      print_node (dump_file, "", decl, 0);
2452 	    }
2453 	}
2454       else
2455 	bounds = chkp_get_zero_bounds ();
2456     }
2457 
2458   if (!chkp_get_registered_bounds (parm))
2459     bounds = chkp_maybe_copy_and_register_bounds (parm, bounds);
2460 
2461   if (dump_file && (dump_flags & TDF_DETAILS))
2462     {
2463       fprintf (dump_file, "Using bounds ");
2464       print_generic_expr (dump_file, bounds);
2465       fprintf (dump_file, " for parm ");
2466       print_generic_expr (dump_file, parm);
2467       fprintf (dump_file, " of type ");
2468       print_generic_expr (dump_file, TREE_TYPE (parm));
2469       fprintf (dump_file, ".\n");
2470     }
2471 
2472   return bounds;
2473 }
2474 
2475 /* Build and return CALL_EXPR for bndstx builtin with specified
2476    arguments.  */
2477 tree
2478 chkp_build_bndldx_call (tree addr, tree ptr)
2479 {
2480   tree fn = build1 (ADDR_EXPR,
2481 		    build_pointer_type (TREE_TYPE (chkp_bndldx_fndecl)),
2482 		    chkp_bndldx_fndecl);
2483   tree call = build_call_nary (TREE_TYPE (TREE_TYPE (chkp_bndldx_fndecl)),
2484 			       fn, 2, addr, ptr);
2485   CALL_WITH_BOUNDS_P (call) = true;
2486   return call;
2487 }
2488 
2489 /* Insert code to load bounds for PTR located by ADDR.
2490    Code is inserted after position pointed by GSI.
2491    Loaded bounds are returned.  */
2492 static tree
2493 chkp_build_bndldx (tree addr, tree ptr, gimple_stmt_iterator *gsi)
2494 {
2495   gimple_seq seq;
2496   gimple *stmt;
2497   tree bounds;
2498 
2499   seq = NULL;
2500 
2501   addr = chkp_force_gimple_call_op (addr, &seq);
2502   ptr = chkp_force_gimple_call_op (ptr, &seq);
2503 
2504   stmt = gimple_build_call (chkp_bndldx_fndecl, 2, addr, ptr);
2505   chkp_mark_stmt (stmt);
2506   bounds = chkp_get_tmp_reg (stmt);
2507   gimple_call_set_lhs (stmt, bounds);
2508 
2509   gimple_seq_add_stmt (&seq, stmt);
2510 
2511   gsi_insert_seq_after (gsi, seq, GSI_CONTINUE_LINKING);
2512 
2513   if (dump_file && (dump_flags & TDF_DETAILS))
2514     {
2515       fprintf (dump_file, "Generated bndldx for pointer ");
2516       print_generic_expr (dump_file, ptr);
2517       fprintf (dump_file, ": ");
2518       print_gimple_stmt (dump_file, stmt, 0, TDF_VOPS | TDF_MEMSYMS);
2519     }
2520 
2521   return bounds;
2522 }
2523 
2524 /* Build and return CALL_EXPR for bndstx builtin with specified
2525    arguments.  */
2526 tree
2527 chkp_build_bndstx_call (tree addr, tree ptr, tree bounds)
2528 {
2529   tree fn = build1 (ADDR_EXPR,
2530 		    build_pointer_type (TREE_TYPE (chkp_bndstx_fndecl)),
2531 		    chkp_bndstx_fndecl);
2532   tree call = build_call_nary (TREE_TYPE (TREE_TYPE (chkp_bndstx_fndecl)),
2533 			       fn, 3, ptr, bounds, addr);
2534   CALL_WITH_BOUNDS_P (call) = true;
2535   return call;
2536 }
2537 
2538 /* Insert code to store BOUNDS for PTR stored by ADDR.
2539    New statements are inserted after position pointed
2540    by GSI.  */
2541 void
2542 chkp_build_bndstx (tree addr, tree ptr, tree bounds,
2543 		   gimple_stmt_iterator *gsi)
2544 {
2545   gimple_seq seq;
2546   gimple *stmt;
2547 
2548   seq = NULL;
2549 
2550   addr = chkp_force_gimple_call_op (addr, &seq);
2551   ptr = chkp_force_gimple_call_op (ptr, &seq);
2552 
2553   stmt = gimple_build_call (chkp_bndstx_fndecl, 3, ptr, bounds, addr);
2554   chkp_mark_stmt (stmt);
2555   gimple_call_set_with_bounds (stmt, true);
2556 
2557   gimple_seq_add_stmt (&seq, stmt);
2558 
2559   gsi_insert_seq_after (gsi, seq, GSI_CONTINUE_LINKING);
2560 
2561   if (dump_file && (dump_flags & TDF_DETAILS))
2562     {
2563       fprintf (dump_file, "Generated bndstx for pointer store ");
2564       print_gimple_stmt (dump_file, gsi_stmt (*gsi), 0, TDF_VOPS|TDF_MEMSYMS);
2565       print_gimple_stmt (dump_file, stmt, 2, TDF_VOPS|TDF_MEMSYMS);
2566     }
2567 }
2568 
2569 /* This function is called when call statement
2570    is inlined and therefore we can't use bndret
2571    for its LHS anymore.  Function fixes bndret
2572    call using new RHS value if possible.  */
2573 void
2574 chkp_fixup_inlined_call (tree lhs, tree rhs)
2575 {
2576   tree addr, bounds;
2577   gcall *retbnd, *bndldx;
2578 
2579   if (!BOUNDED_P (lhs))
2580     return;
2581 
2582   /* Search for retbnd call.  */
2583   retbnd = chkp_retbnd_call_by_val (lhs);
2584   if (!retbnd)
2585     return;
2586 
2587   /* Currently only handle cases when call is replaced
2588      with a memory access.  In this case bndret call
2589      may be replaced with bndldx call.  Otherwise we
2590      have to search for bounds which may cause wrong
2591      result due to various optimizations applied.  */
2592   switch (TREE_CODE (rhs))
2593     {
2594     case VAR_DECL:
2595       if (DECL_REGISTER (rhs))
2596 	return;
2597       break;
2598 
2599     case MEM_REF:
2600       break;
2601 
2602     case ARRAY_REF:
2603     case COMPONENT_REF:
2604       addr = get_base_address (rhs);
2605       if (!DECL_P (addr)
2606 	  && TREE_CODE (addr) != MEM_REF)
2607 	return;
2608       if (DECL_P (addr) && DECL_REGISTER (addr))
2609 	return;
2610       break;
2611 
2612     default:
2613       return;
2614     }
2615 
2616   /* Create a new statements sequence with bndldx call.  */
2617   gimple_stmt_iterator gsi = gsi_for_stmt (retbnd);
2618   addr = build_fold_addr_expr (rhs);
2619   chkp_build_bndldx (addr, lhs, &gsi);
2620   bndldx = as_a <gcall *> (gsi_stmt (gsi));
2621 
2622   /* Remove bndret call.  */
2623   bounds = gimple_call_lhs (retbnd);
2624   gsi = gsi_for_stmt (retbnd);
2625   gsi_remove (&gsi, true);
2626 
2627   /* Link new bndldx call.  */
2628   gimple_call_set_lhs (bndldx, bounds);
2629   update_stmt (bndldx);
2630 }
2631 
2632 /* Compute bounds for pointer NODE which was assigned in
2633    assignment statement ASSIGN.  Return computed bounds.  */
2634 static tree
2635 chkp_compute_bounds_for_assignment (tree node, gimple *assign)
2636 {
2637   enum tree_code rhs_code = gimple_assign_rhs_code (assign);
2638   tree rhs1 = gimple_assign_rhs1 (assign);
2639   tree bounds = NULL_TREE;
2640   gimple_stmt_iterator iter = gsi_for_stmt (assign);
2641   tree base = NULL;
2642 
2643   if (dump_file && (dump_flags & TDF_DETAILS))
2644     {
2645       fprintf (dump_file, "Computing bounds for assignment: ");
2646       print_gimple_stmt (dump_file, assign, 0, TDF_VOPS|TDF_MEMSYMS);
2647     }
2648 
2649   switch (rhs_code)
2650     {
2651     case MEM_REF:
2652     case TARGET_MEM_REF:
2653     case COMPONENT_REF:
2654     case ARRAY_REF:
2655       /* We need to load bounds from the bounds table.  */
2656       bounds = chkp_find_bounds_loaded (node, rhs1, &iter);
2657       break;
2658 
2659     case VAR_DECL:
2660     case SSA_NAME:
2661     case ADDR_EXPR:
2662     case POINTER_PLUS_EXPR:
2663     case NOP_EXPR:
2664     case CONVERT_EXPR:
2665     case INTEGER_CST:
2666       /* Bounds are just propagated from RHS.  */
2667       bounds = chkp_find_bounds (rhs1, &iter);
2668       base = rhs1;
2669       break;
2670 
2671     case VIEW_CONVERT_EXPR:
2672       /* Bounds are just propagated from RHS.  */
2673       bounds = chkp_find_bounds (TREE_OPERAND (rhs1, 0), &iter);
2674       break;
2675 
2676     case PARM_DECL:
2677       if (BOUNDED_P (rhs1))
2678 	{
2679 	  /* We need to load bounds from the bounds table.  */
2680 	  bounds = chkp_build_bndldx (chkp_build_addr_expr (rhs1),
2681 				      node, &iter);
2682 	  TREE_ADDRESSABLE (rhs1) = 1;
2683 	}
2684       else
2685 	bounds = chkp_get_nonpointer_load_bounds ();
2686       break;
2687 
2688     case MINUS_EXPR:
2689     case PLUS_EXPR:
2690     case BIT_AND_EXPR:
2691     case BIT_IOR_EXPR:
2692     case BIT_XOR_EXPR:
2693       {
2694 	tree rhs2 = gimple_assign_rhs2 (assign);
2695 	tree bnd1 = chkp_find_bounds (rhs1, &iter);
2696 	tree bnd2 = chkp_find_bounds (rhs2, &iter);
2697 
2698 	/* First we try to check types of operands.  If it
2699 	   does not help then look at bound values.
2700 
2701 	   If some bounds are incomplete and other are
2702 	   not proven to be valid (i.e. also incomplete
2703 	   or invalid because value is not pointer) then
2704 	   resulting value is incomplete and will be
2705 	   recomputed later in chkp_finish_incomplete_bounds.  */
2706 	if (BOUNDED_P (rhs1)
2707 	    && !BOUNDED_P (rhs2))
2708 	  bounds = bnd1;
2709 	else if (BOUNDED_P (rhs2)
2710 		 && !BOUNDED_P (rhs1)
2711 		 && rhs_code != MINUS_EXPR)
2712 	  bounds = bnd2;
2713 	else if (chkp_incomplete_bounds (bnd1))
2714 	  if (chkp_valid_bounds (bnd2) && rhs_code != MINUS_EXPR
2715 	      && !chkp_incomplete_bounds (bnd2))
2716 	    bounds = bnd2;
2717 	  else
2718 	    bounds = incomplete_bounds;
2719 	else if (chkp_incomplete_bounds (bnd2))
2720 	  if (chkp_valid_bounds (bnd1)
2721 	      && !chkp_incomplete_bounds (bnd1))
2722 	    bounds = bnd1;
2723 	  else
2724 	    bounds = incomplete_bounds;
2725 	else if (!chkp_valid_bounds (bnd1))
2726 	  if (chkp_valid_bounds (bnd2) && rhs_code != MINUS_EXPR)
2727 	    bounds = bnd2;
2728 	  else if (bnd2 == chkp_get_zero_bounds ())
2729 	    bounds = bnd2;
2730 	  else
2731 	    bounds = bnd1;
2732 	else if (!chkp_valid_bounds (bnd2))
2733 	  bounds = bnd1;
2734 	else
2735 	  /* Seems both operands may have valid bounds
2736 	     (e.g. pointer minus pointer).  In such case
2737 	     use default invalid op bounds.  */
2738 	  bounds = chkp_get_invalid_op_bounds ();
2739 
2740 	base = (bounds == bnd1) ? rhs1 : (bounds == bnd2) ? rhs2 : NULL;
2741       }
2742       break;
2743 
2744     case BIT_NOT_EXPR:
2745     case NEGATE_EXPR:
2746     case LSHIFT_EXPR:
2747     case RSHIFT_EXPR:
2748     case LROTATE_EXPR:
2749     case RROTATE_EXPR:
2750     case EQ_EXPR:
2751     case NE_EXPR:
2752     case LT_EXPR:
2753     case LE_EXPR:
2754     case GT_EXPR:
2755     case GE_EXPR:
2756     case MULT_EXPR:
2757     case RDIV_EXPR:
2758     case TRUNC_DIV_EXPR:
2759     case FLOOR_DIV_EXPR:
2760     case CEIL_DIV_EXPR:
2761     case ROUND_DIV_EXPR:
2762     case TRUNC_MOD_EXPR:
2763     case FLOOR_MOD_EXPR:
2764     case CEIL_MOD_EXPR:
2765     case ROUND_MOD_EXPR:
2766     case EXACT_DIV_EXPR:
2767     case FIX_TRUNC_EXPR:
2768     case FLOAT_EXPR:
2769     case REALPART_EXPR:
2770     case IMAGPART_EXPR:
2771     case POINTER_DIFF_EXPR:
2772       /* No valid bounds may be produced by these exprs.  */
2773       bounds = chkp_get_invalid_op_bounds ();
2774       break;
2775 
2776     case COND_EXPR:
2777       {
2778 	tree val1 = gimple_assign_rhs2 (assign);
2779 	tree val2 = gimple_assign_rhs3 (assign);
2780 	tree bnd1 = chkp_find_bounds (val1, &iter);
2781 	tree bnd2 = chkp_find_bounds (val2, &iter);
2782 	gimple *stmt;
2783 
2784 	if (chkp_incomplete_bounds (bnd1) || chkp_incomplete_bounds (bnd2))
2785 	  bounds = incomplete_bounds;
2786 	else if (bnd1 == bnd2)
2787 	  bounds = bnd1;
2788 	else
2789 	  {
2790 	    rhs1 = unshare_expr (rhs1);
2791 
2792 	    bounds = chkp_get_tmp_reg (assign);
2793 	    stmt = gimple_build_assign (bounds, COND_EXPR, rhs1, bnd1, bnd2);
2794 	    gsi_insert_after (&iter, stmt, GSI_SAME_STMT);
2795 
2796 	    if (!chkp_valid_bounds (bnd1) && !chkp_valid_bounds (bnd2))
2797 	      chkp_mark_invalid_bounds (bounds);
2798 	  }
2799       }
2800       break;
2801 
2802     case MAX_EXPR:
2803     case MIN_EXPR:
2804       {
2805 	tree rhs2 = gimple_assign_rhs2 (assign);
2806 	tree bnd1 = chkp_find_bounds (rhs1, &iter);
2807 	tree bnd2 = chkp_find_bounds (rhs2, &iter);
2808 
2809 	if (chkp_incomplete_bounds (bnd1) || chkp_incomplete_bounds (bnd2))
2810 	  bounds = incomplete_bounds;
2811 	else if (bnd1 == bnd2)
2812 	  bounds = bnd1;
2813 	else
2814 	  {
2815 	    gimple *stmt;
2816 	    tree cond = build2 (rhs_code == MAX_EXPR ? GT_EXPR : LT_EXPR,
2817 				boolean_type_node, rhs1, rhs2);
2818 	    bounds = chkp_get_tmp_reg (assign);
2819 	    stmt = gimple_build_assign (bounds, COND_EXPR, cond, bnd1, bnd2);
2820 
2821 	    gsi_insert_after (&iter, stmt, GSI_SAME_STMT);
2822 
2823 	    if (!chkp_valid_bounds (bnd1) && !chkp_valid_bounds (bnd2))
2824 	      chkp_mark_invalid_bounds (bounds);
2825 	  }
2826       }
2827       break;
2828 
2829     default:
2830       bounds = chkp_get_zero_bounds ();
2831       warning (0, "pointer bounds were lost due to unexpected expression %s",
2832 	       get_tree_code_name (rhs_code));
2833     }
2834 
2835   gcc_assert (bounds);
2836 
2837   /* We may reuse bounds of other pointer we copy/modify.  But it is not
2838      allowed for abnormal ssa names.  If we produced a pointer using
2839      abnormal ssa name, we better make a bounds copy to avoid coalescing
2840      issues.  */
2841   if (base
2842       && TREE_CODE (base) == SSA_NAME
2843       && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (base))
2844     {
2845       gimple *stmt = gimple_build_assign (chkp_get_tmp_reg (NULL), bounds);
2846       gsi_insert_after (&iter, stmt, GSI_SAME_STMT);
2847       bounds = gimple_assign_lhs (stmt);
2848     }
2849 
2850   if (node)
2851     bounds = chkp_maybe_copy_and_register_bounds (node, bounds);
2852 
2853   return bounds;
2854 }
2855 
2856 /* Compute bounds for ssa name NODE defined by DEF_STMT pointed by ITER.
2857 
2858    There are just few statement codes allowed: NOP (for default ssa names),
2859    ASSIGN, CALL, PHI, ASM.
2860 
2861    Return computed bounds.  */
2862 static tree
2863 chkp_get_bounds_by_definition (tree node, gimple *def_stmt,
2864 			       gphi_iterator *iter)
2865 {
2866   tree var, bounds;
2867   enum gimple_code code = gimple_code (def_stmt);
2868   gphi *stmt;
2869 
2870   if (dump_file && (dump_flags & TDF_DETAILS))
2871     {
2872       fprintf (dump_file, "Searching for bounds for node: ");
2873       print_generic_expr (dump_file, node);
2874 
2875       fprintf (dump_file, " using its definition: ");
2876       print_gimple_stmt (dump_file, def_stmt, 0, TDF_VOPS | TDF_MEMSYMS);
2877     }
2878 
2879   switch (code)
2880     {
2881     case GIMPLE_NOP:
2882       var = SSA_NAME_VAR (node);
2883       switch (TREE_CODE (var))
2884 	{
2885 	case PARM_DECL:
2886 	  bounds = chkp_get_bound_for_parm (node);
2887 	  break;
2888 
2889 	case VAR_DECL:
2890 	  /* For uninitialized pointers use none bounds.  */
2891 	  bounds = chkp_get_none_bounds ();
2892 	  bounds = chkp_maybe_copy_and_register_bounds (node, bounds);
2893 	  break;
2894 
2895 	case RESULT_DECL:
2896 	  {
2897 	    tree base_type;
2898 
2899 	    gcc_assert (TREE_CODE (TREE_TYPE (node)) == REFERENCE_TYPE);
2900 
2901 	    base_type = TREE_TYPE (TREE_TYPE (node));
2902 
2903 	    gcc_assert (TYPE_SIZE (base_type)
2904 			&& TREE_CODE (TYPE_SIZE (base_type)) == INTEGER_CST
2905 			&& tree_to_uhwi (TYPE_SIZE (base_type)) != 0);
2906 
2907 	    bounds = chkp_make_bounds (node, TYPE_SIZE_UNIT (base_type),
2908 				       NULL, false);
2909 	    bounds = chkp_maybe_copy_and_register_bounds (node, bounds);
2910 	  }
2911 	  break;
2912 
2913 	default:
2914 	  if (dump_file && (dump_flags & TDF_DETAILS))
2915 	    {
2916 	      fprintf (dump_file, "Unexpected var with no definition\n");
2917 	      print_generic_expr (dump_file, var);
2918 	    }
2919 	  internal_error ("chkp_get_bounds_by_definition: Unexpected var of type %s",
2920 			  get_tree_code_name (TREE_CODE (var)));
2921 	}
2922       break;
2923 
2924     case GIMPLE_ASSIGN:
2925       bounds = chkp_compute_bounds_for_assignment (node, def_stmt);
2926       break;
2927 
2928     case GIMPLE_CALL:
2929       bounds = chkp_build_returned_bound (as_a <gcall *> (def_stmt));
2930       break;
2931 
2932     case GIMPLE_PHI:
2933       if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (node))
2934 	if (SSA_NAME_VAR (node))
2935 	  var = chkp_get_bounds_var (SSA_NAME_VAR (node));
2936 	else
2937 	  var = make_temp_ssa_name (pointer_bounds_type_node,
2938 				    NULL,
2939 				    CHKP_BOUND_TMP_NAME);
2940       else
2941 	var = chkp_get_tmp_var ();
2942       stmt = create_phi_node (var, gimple_bb (def_stmt));
2943       bounds = gimple_phi_result (stmt);
2944       *iter = gsi_for_phi (stmt);
2945 
2946       bounds = chkp_maybe_copy_and_register_bounds (node, bounds);
2947 
2948       /* Created bounds do not have all phi args computed and
2949 	 therefore we do not know if there is a valid source
2950 	 of bounds for that node.  Therefore we mark bounds
2951 	 as incomplete and then recompute them when all phi
2952 	 args are computed.  */
2953       chkp_register_incomplete_bounds (bounds, node);
2954       break;
2955 
2956     case GIMPLE_ASM:
2957       bounds = chkp_get_zero_bounds ();
2958       bounds = chkp_maybe_copy_and_register_bounds (node, bounds);
2959       break;
2960 
2961     default:
2962       internal_error ("chkp_get_bounds_by_definition: Unexpected GIMPLE code %s",
2963 		      gimple_code_name[code]);
2964     }
2965 
2966   return bounds;
2967 }
2968 
2969 /* Return CALL_EXPR for bndmk with specified LOWER_BOUND and SIZE.  */
2970 tree
2971 chkp_build_make_bounds_call (tree lower_bound, tree size)
2972 {
2973   tree call = build1 (ADDR_EXPR,
2974 		      build_pointer_type (TREE_TYPE (chkp_bndmk_fndecl)),
2975 		      chkp_bndmk_fndecl);
2976   return build_call_nary (TREE_TYPE (TREE_TYPE (chkp_bndmk_fndecl)),
2977 			  call, 2, lower_bound, size);
2978 }
2979 
2980 /* Create static bounds var of specfified OBJ which is
2981    is either VAR_DECL or string constant.  */
2982 static tree
2983 chkp_make_static_bounds (tree obj)
2984 {
2985   static int string_id = 1;
2986   static int var_id = 1;
2987   tree *slot;
2988   const char *var_name;
2989   char *bnd_var_name;
2990   tree bnd_var;
2991 
2992   /* First check if we already have required var.  */
2993   if (chkp_static_var_bounds)
2994     {
2995       /* For vars we use assembler name as a key in
2996 	 chkp_static_var_bounds map.  It allows to
2997 	 avoid duplicating bound vars for decls
2998 	 sharing assembler name.  */
2999       if (VAR_P (obj))
3000 	{
3001 	  tree name = DECL_ASSEMBLER_NAME (obj);
3002 	  slot = chkp_static_var_bounds->get (name);
3003 	  if (slot)
3004 	    return *slot;
3005 	}
3006       else
3007 	{
3008 	  slot = chkp_static_var_bounds->get (obj);
3009 	  if (slot)
3010 	    return *slot;
3011 	}
3012     }
3013 
3014   /* Build decl for bounds var.  */
3015   if (VAR_P (obj))
3016     {
3017       if (DECL_IGNORED_P (obj))
3018 	{
3019 	  bnd_var_name = (char *) xmalloc (strlen (CHKP_VAR_BOUNDS_PREFIX) + 10);
3020 	  sprintf (bnd_var_name, "%s%d", CHKP_VAR_BOUNDS_PREFIX, var_id++);
3021 	}
3022       else
3023 	{
3024 	  var_name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (obj));
3025 
3026 	  /* For hidden symbols we want to skip first '*' char.  */
3027 	  if (*var_name == '*')
3028 	    var_name++;
3029 
3030 	  bnd_var_name = (char *) xmalloc (strlen (var_name)
3031 					   + strlen (CHKP_BOUNDS_OF_SYMBOL_PREFIX) + 1);
3032 	  strcpy (bnd_var_name, CHKP_BOUNDS_OF_SYMBOL_PREFIX);
3033 	  strcat (bnd_var_name, var_name);
3034 	}
3035 
3036       bnd_var = build_decl (UNKNOWN_LOCATION, VAR_DECL,
3037 			    get_identifier (bnd_var_name),
3038 			    pointer_bounds_type_node);
3039 
3040       /* Address of the obj will be used as lower bound.  */
3041       TREE_ADDRESSABLE (obj) = 1;
3042     }
3043   else
3044     {
3045       bnd_var_name = (char *) xmalloc (strlen (CHKP_STRING_BOUNDS_PREFIX) + 10);
3046       sprintf (bnd_var_name, "%s%d", CHKP_STRING_BOUNDS_PREFIX, string_id++);
3047 
3048       bnd_var = build_decl (UNKNOWN_LOCATION, VAR_DECL,
3049 			    get_identifier (bnd_var_name),
3050 			    pointer_bounds_type_node);
3051     }
3052 
3053   free (bnd_var_name);
3054 
3055   TREE_PUBLIC (bnd_var) = 0;
3056   TREE_USED (bnd_var) = 1;
3057   TREE_READONLY (bnd_var) = 0;
3058   TREE_STATIC (bnd_var) = 1;
3059   TREE_ADDRESSABLE (bnd_var) = 0;
3060   DECL_ARTIFICIAL (bnd_var) = 1;
3061   DECL_COMMON (bnd_var) = 1;
3062   DECL_COMDAT (bnd_var) = 1;
3063   DECL_READ_P (bnd_var) = 1;
3064   DECL_INITIAL (bnd_var) = chkp_build_addr_expr (obj);
3065   /* Force output similar to constant bounds.
3066      See chkp_make_static_const_bounds. */
3067   varpool_node::get_create (bnd_var)->force_output = 1;
3068   /* Mark symbol as requiring bounds initialization.  */
3069   varpool_node::get_create (bnd_var)->need_bounds_init = 1;
3070   varpool_node::finalize_decl (bnd_var);
3071 
3072   /* Add created var to the map to use it for other references
3073      to obj.  */
3074   if (!chkp_static_var_bounds)
3075     chkp_static_var_bounds = new hash_map<tree, tree>;
3076 
3077   if (VAR_P (obj))
3078     {
3079       tree name = DECL_ASSEMBLER_NAME (obj);
3080       chkp_static_var_bounds->put (name, bnd_var);
3081     }
3082   else
3083     chkp_static_var_bounds->put (obj, bnd_var);
3084 
3085   return bnd_var;
3086 }
3087 
3088 /* When var has incomplete type we cannot get size to
3089    compute its bounds.  In such cases we use checker
3090    builtin call which determines object size at runtime.  */
3091 static tree
3092 chkp_generate_extern_var_bounds (tree var)
3093 {
3094   tree bounds, size_reloc, lb, size, max_size, cond;
3095   gimple_stmt_iterator gsi;
3096   gimple_seq seq = NULL;
3097   gimple *stmt;
3098 
3099   /* If instrumentation is not enabled for vars having
3100      incomplete type then just return zero bounds to avoid
3101      checks for this var.  */
3102   if (!flag_chkp_incomplete_type)
3103     return chkp_get_zero_bounds ();
3104 
3105   if (dump_file && (dump_flags & TDF_DETAILS))
3106     {
3107       fprintf (dump_file, "Generating bounds for extern symbol '");
3108       print_generic_expr (dump_file, var);
3109       fprintf (dump_file, "'\n");
3110     }
3111 
3112   stmt = gimple_build_call (chkp_sizeof_fndecl, 1, var);
3113 
3114   size_reloc = create_tmp_reg (chkp_uintptr_type, CHKP_SIZE_TMP_NAME);
3115   gimple_call_set_lhs (stmt, size_reloc);
3116 
3117   gimple_seq_add_stmt (&seq, stmt);
3118 
3119   lb = chkp_build_addr_expr (var);
3120   size = make_ssa_name (chkp_get_size_tmp_var ());
3121 
3122   if (flag_chkp_zero_dynamic_size_as_infinite)
3123     {
3124       /* We should check that size relocation was resolved.
3125 	 If it was not then use maximum possible size for the var.  */
3126       max_size = build2 (MINUS_EXPR, chkp_uintptr_type, integer_zero_node,
3127 			 fold_convert (chkp_uintptr_type, lb));
3128       max_size = chkp_force_gimple_call_op (max_size, &seq);
3129 
3130       cond = build2 (NE_EXPR, boolean_type_node,
3131 		     size_reloc, integer_zero_node);
3132       stmt = gimple_build_assign (size, COND_EXPR, cond, size_reloc, max_size);
3133       gimple_seq_add_stmt (&seq, stmt);
3134     }
3135   else
3136     {
3137       stmt = gimple_build_assign (size, size_reloc);
3138       gimple_seq_add_stmt (&seq, stmt);
3139     }
3140 
3141   gsi = gsi_start_bb (chkp_get_entry_block ());
3142   gsi_insert_seq_after (&gsi, seq, GSI_CONTINUE_LINKING);
3143 
3144   bounds = chkp_make_bounds (lb, size, &gsi, true);
3145 
3146   return bounds;
3147 }
3148 
3149 /* Return 1 if TYPE has fields with zero size or fields
3150    marked with chkp_variable_size attribute.  */
3151 bool
3152 chkp_variable_size_type (tree type)
3153 {
3154   bool res = false;
3155   tree field;
3156 
3157   if (RECORD_OR_UNION_TYPE_P (type))
3158     for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
3159       {
3160 	if (TREE_CODE (field) == FIELD_DECL)
3161 	  res = res
3162 	    || lookup_attribute ("bnd_variable_size", DECL_ATTRIBUTES (field))
3163 	    || chkp_variable_size_type (TREE_TYPE (field));
3164       }
3165   else
3166     res = !TYPE_SIZE (type)
3167       || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST
3168       || tree_to_uhwi (TYPE_SIZE (type)) == 0;
3169 
3170   return res;
3171 }
3172 
3173 /* Compute and return bounds for address of DECL which is
3174    one of VAR_DECL, PARM_DECL, RESULT_DECL.  */
3175 static tree
3176 chkp_get_bounds_for_decl_addr (tree decl)
3177 {
3178   tree bounds;
3179 
3180   gcc_assert (VAR_P (decl)
3181 	      || TREE_CODE (decl) == PARM_DECL
3182 	      || TREE_CODE (decl) == RESULT_DECL);
3183 
3184   bounds = chkp_get_registered_addr_bounds (decl);
3185 
3186   if (bounds)
3187     return bounds;
3188 
3189   if (dump_file && (dump_flags & TDF_DETAILS))
3190     {
3191       fprintf (dump_file, "Building bounds for address of decl ");
3192       print_generic_expr (dump_file, decl);
3193       fprintf (dump_file, "\n");
3194     }
3195 
3196   /* Use zero bounds if size is unknown and checks for
3197      unknown sizes are restricted.  */
3198   if ((!DECL_SIZE (decl)
3199        || (chkp_variable_size_type (TREE_TYPE (decl))
3200 	   && (TREE_STATIC (decl)
3201 	       || DECL_EXTERNAL (decl)
3202 	       || TREE_PUBLIC (decl))))
3203       && !flag_chkp_incomplete_type)
3204       return chkp_get_zero_bounds ();
3205 
3206   if (VOID_TYPE_P (TREE_TYPE (decl)))
3207     return chkp_get_zero_bounds ();
3208 
3209   if (flag_chkp_use_static_bounds
3210       && VAR_P (decl)
3211       && (TREE_STATIC (decl)
3212 	      || DECL_EXTERNAL (decl)
3213 	      || TREE_PUBLIC (decl))
3214       && !DECL_THREAD_LOCAL_P (decl))
3215     {
3216       tree bnd_var = chkp_make_static_bounds (decl);
3217       gimple_stmt_iterator gsi = gsi_start_bb (chkp_get_entry_block ());
3218       gimple *stmt;
3219 
3220       bounds = chkp_get_tmp_reg (NULL);
3221       stmt = gimple_build_assign (bounds, bnd_var);
3222       gsi_insert_before (&gsi, stmt, GSI_SAME_STMT);
3223     }
3224   else if (!DECL_SIZE (decl)
3225       || (chkp_variable_size_type (TREE_TYPE (decl))
3226 	  && (TREE_STATIC (decl)
3227 	      || DECL_EXTERNAL (decl)
3228 	      || TREE_PUBLIC (decl))))
3229     {
3230       gcc_assert (VAR_P (decl));
3231       bounds = chkp_generate_extern_var_bounds (decl);
3232     }
3233   else
3234     {
3235       tree lb = chkp_build_addr_expr (decl);
3236       bounds = chkp_make_bounds (lb, DECL_SIZE_UNIT (decl), NULL, false);
3237     }
3238 
3239   return bounds;
3240 }
3241 
3242 /* Compute and return bounds for constant string.  */
3243 static tree
3244 chkp_get_bounds_for_string_cst (tree cst)
3245 {
3246   tree bounds;
3247   tree lb;
3248   tree size;
3249 
3250   gcc_assert (TREE_CODE (cst) == STRING_CST);
3251 
3252   bounds = chkp_get_registered_bounds (cst);
3253 
3254   if (bounds)
3255     return bounds;
3256 
3257   if ((flag_chkp_use_static_bounds && flag_chkp_use_static_const_bounds)
3258       || flag_chkp_use_static_const_bounds > 0)
3259     {
3260       tree bnd_var = chkp_make_static_bounds (cst);
3261       gimple_stmt_iterator gsi = gsi_start_bb (chkp_get_entry_block ());
3262       gimple *stmt;
3263 
3264       bounds = chkp_get_tmp_reg (NULL);
3265       stmt = gimple_build_assign (bounds, bnd_var);
3266       gsi_insert_before (&gsi, stmt, GSI_SAME_STMT);
3267     }
3268   else
3269     {
3270       lb = chkp_build_addr_expr (cst);
3271       size = build_int_cst (chkp_uintptr_type, TREE_STRING_LENGTH (cst));
3272       bounds = chkp_make_bounds (lb, size, NULL, false);
3273     }
3274 
3275   bounds = chkp_maybe_copy_and_register_bounds (cst, bounds);
3276 
3277   return bounds;
3278 }
3279 
3280 /* Generate code to instersect bounds BOUNDS1 and BOUNDS2 and
3281    return the result.  if ITER is not NULL then Code is inserted
3282    before position pointed by ITER.  Otherwise code is added to
3283    entry block.  */
3284 static tree
3285 chkp_intersect_bounds (tree bounds1, tree bounds2, gimple_stmt_iterator *iter)
3286 {
3287   if (!bounds1 || bounds1 == chkp_get_zero_bounds ())
3288     return bounds2 ? bounds2 : bounds1;
3289   else if (!bounds2 || bounds2 == chkp_get_zero_bounds ())
3290     return bounds1;
3291   else
3292     {
3293       gimple_seq seq;
3294       gimple *stmt;
3295       tree bounds;
3296 
3297       seq = NULL;
3298 
3299       stmt = gimple_build_call (chkp_intersect_fndecl, 2, bounds1, bounds2);
3300       chkp_mark_stmt (stmt);
3301 
3302       bounds = chkp_get_tmp_reg (stmt);
3303       gimple_call_set_lhs (stmt, bounds);
3304 
3305       gimple_seq_add_stmt (&seq, stmt);
3306 
3307       /* We are probably doing narrowing for constant expression.
3308 	 In such case iter may be undefined.  */
3309       if (!iter)
3310 	{
3311 	  gimple_stmt_iterator gsi = gsi_last_bb (chkp_get_entry_block ());
3312 	  iter = &gsi;
3313 	  gsi_insert_seq_after (iter, seq, GSI_SAME_STMT);
3314 	}
3315       else
3316 	gsi_insert_seq_before (iter, seq, GSI_SAME_STMT);
3317 
3318       if (dump_file && (dump_flags & TDF_DETAILS))
3319 	{
3320 	  fprintf (dump_file, "Bounds intersection: ");
3321 	  print_gimple_stmt (dump_file, stmt, 0, TDF_VOPS|TDF_MEMSYMS);
3322 	  fprintf (dump_file, "  inserted before statement: ");
3323 	  print_gimple_stmt (dump_file, gsi_stmt (*iter), 0,
3324 			     TDF_VOPS|TDF_MEMSYMS);
3325 	}
3326 
3327       return bounds;
3328     }
3329 }
3330 
3331 /* Return 1 if we are allowed to narrow bounds for addressed FIELD
3332    and 0 othersize.  REF is reference to the field.  */
3333 
3334 static bool
3335 chkp_may_narrow_to_field (tree ref, tree field)
3336 {
3337   return DECL_SIZE (field) && TREE_CODE (DECL_SIZE (field)) == INTEGER_CST
3338     && tree_to_uhwi (DECL_SIZE (field)) != 0
3339     && !(flag_chkp_flexible_struct_trailing_arrays
3340 	 && array_at_struct_end_p (ref))
3341     && (!DECL_FIELD_OFFSET (field)
3342 	|| TREE_CODE (DECL_FIELD_OFFSET (field)) == INTEGER_CST)
3343     && (!DECL_FIELD_BIT_OFFSET (field)
3344 	|| TREE_CODE (DECL_FIELD_BIT_OFFSET (field)) == INTEGER_CST)
3345     && !lookup_attribute ("bnd_variable_size", DECL_ATTRIBUTES (field))
3346     && !chkp_variable_size_type (TREE_TYPE (field));
3347 }
3348 
3349 /* Return 1 if bounds for FIELD should be narrowed to
3350    field's own size.  REF is reference to the field.  */
3351 
3352 static bool
3353 chkp_narrow_bounds_for_field (tree ref, tree field)
3354 {
3355   HOST_WIDE_INT offs;
3356   HOST_WIDE_INT bit_offs;
3357 
3358   if (!chkp_may_narrow_to_field (ref, field))
3359     return false;
3360 
3361   /* Access to compiler generated fields should not cause
3362      bounds narrowing.  */
3363   if (DECL_ARTIFICIAL (field))
3364     return false;
3365 
3366   offs = tree_to_uhwi (DECL_FIELD_OFFSET (field));
3367   bit_offs = tree_to_uhwi (DECL_FIELD_BIT_OFFSET (field));
3368 
3369   return (flag_chkp_narrow_bounds
3370 	  && (flag_chkp_first_field_has_own_bounds
3371 	      || offs
3372 	      || bit_offs));
3373 }
3374 
3375 /* Perform narrowing for BOUNDS of an INNER reference.  Shift boundary
3376    by OFFSET bytes and limit to SIZE bytes.  Newly created statements are
3377    added to ITER.  */
3378 
3379 static tree
3380 chkp_narrow_size_and_offset (tree bounds, tree inner, tree offset,
3381 			     tree size, gimple_stmt_iterator *iter)
3382 {
3383   tree addr = chkp_build_addr_expr (unshare_expr (inner));
3384   tree t = TREE_TYPE (addr);
3385 
3386   gimple *stmt = gimple_build_assign (NULL_TREE, addr);
3387   addr = make_temp_ssa_name (t, stmt, CHKP_BOUND_TMP_NAME);
3388   gimple_assign_set_lhs (stmt, addr);
3389   gsi_insert_seq_before (iter, stmt, GSI_SAME_STMT);
3390 
3391   stmt = gimple_build_assign (NULL_TREE, POINTER_PLUS_EXPR, addr, offset);
3392   tree shifted = make_temp_ssa_name (t, stmt, CHKP_BOUND_TMP_NAME);
3393   gimple_assign_set_lhs (stmt, shifted);
3394   gsi_insert_seq_before (iter, stmt, GSI_SAME_STMT);
3395 
3396   tree bounds2 = chkp_make_bounds (shifted, size, iter, false);
3397 
3398   return chkp_intersect_bounds (bounds, bounds2, iter);
3399 }
3400 
3401 /* Perform narrowing for BOUNDS using bounds computed for field
3402    access COMPONENT.  ITER meaning is the same as for
3403    chkp_intersect_bounds.  */
3404 
3405 static tree
3406 chkp_narrow_bounds_to_field (tree bounds, tree component,
3407 			    gimple_stmt_iterator *iter)
3408 {
3409   tree field = TREE_OPERAND (component, 1);
3410   tree size = DECL_SIZE_UNIT (field);
3411   tree field_ptr = chkp_build_addr_expr (component);
3412   tree field_bounds;
3413 
3414   field_bounds = chkp_make_bounds (field_ptr, size, iter, false);
3415 
3416   return chkp_intersect_bounds (field_bounds, bounds, iter);
3417 }
3418 
3419 /* Parse field or array access NODE.
3420 
3421    PTR ouput parameter holds a pointer to the outermost
3422    object.
3423 
3424    BITFIELD output parameter is set to 1 if bitfield is
3425    accessed and to 0 otherwise.  If it is 1 then ELT holds
3426    outer component for accessed bit field.
3427 
3428    SAFE outer parameter is set to 1 if access is safe and
3429    checks are not required.
3430 
3431    BOUNDS outer parameter holds bounds to be used to check
3432    access (may be NULL).
3433 
3434    If INNERMOST_BOUNDS is 1 then try to narrow bounds to the
3435    innermost accessed component.  */
3436 static void
3437 chkp_parse_array_and_component_ref (tree node, tree *ptr,
3438 				    tree *elt, bool *safe,
3439 				    bool *bitfield,
3440 				    tree *bounds,
3441 				    gimple_stmt_iterator *iter,
3442 				    bool innermost_bounds)
3443 {
3444   tree comp_to_narrow = NULL_TREE;
3445   tree last_comp = NULL_TREE;
3446   bool array_ref_found = false;
3447   tree *nodes;
3448   tree var;
3449   int len;
3450   int i;
3451 
3452   /* Compute tree height for expression.  */
3453   var = node;
3454   len = 1;
3455   while (TREE_CODE (var) == COMPONENT_REF
3456 	 || TREE_CODE (var) == ARRAY_REF
3457 	 || TREE_CODE (var) == VIEW_CONVERT_EXPR
3458 	 || TREE_CODE (var) == BIT_FIELD_REF)
3459     {
3460       var = TREE_OPERAND (var, 0);
3461       len++;
3462     }
3463 
3464   gcc_assert (len > 1);
3465 
3466   /* It is more convenient for us to scan left-to-right,
3467      so walk tree again and put all node to nodes vector
3468      in reversed order.  */
3469   nodes = XALLOCAVEC (tree, len);
3470   nodes[len - 1] = node;
3471   for (i = len - 2; i >= 0; i--)
3472     nodes[i] = TREE_OPERAND (nodes[i + 1], 0);
3473 
3474   if (bounds)
3475     *bounds = NULL;
3476   *safe = true;
3477   *bitfield = ((TREE_CODE (node) == COMPONENT_REF
3478 	       && DECL_BIT_FIELD_TYPE (TREE_OPERAND (node, 1)))
3479 	       || TREE_CODE (node) == BIT_FIELD_REF);
3480   /* To get bitfield address we will need outer element.  */
3481   if (*bitfield)
3482     *elt = nodes[len - 2];
3483   else
3484     *elt = NULL_TREE;
3485 
3486   /* If we have indirection in expression then compute
3487      outermost structure bounds.  Computed bounds may be
3488      narrowed later.  */
3489   if (TREE_CODE (nodes[0]) == MEM_REF || INDIRECT_REF_P (nodes[0]))
3490     {
3491       *safe = false;
3492       *ptr = TREE_OPERAND (nodes[0], 0);
3493       if (bounds)
3494 	*bounds = chkp_find_bounds (*ptr, iter);
3495     }
3496   else
3497     {
3498       gcc_assert (VAR_P (var)
3499 		  || TREE_CODE (var) == PARM_DECL
3500 		  || TREE_CODE (var) == RESULT_DECL
3501 		  || TREE_CODE (var) == STRING_CST
3502 		  || TREE_CODE (var) == SSA_NAME);
3503 
3504       *ptr = chkp_build_addr_expr (var);
3505 
3506       /* For hard register cases chkp_build_addr_expr returns INTEGER_CST
3507 	 and later on chkp_find_bounds will fail to find proper bounds.
3508 	 In order to avoid that, we find/create bounds right aways using
3509 	 the var itself.  */
3510       if (VAR_P (var) && DECL_HARD_REGISTER (var))
3511 	*bounds = chkp_make_addressed_object_bounds (var, iter);
3512     }
3513 
3514   /* In this loop we are trying to find a field access
3515      requiring narrowing.  There are two simple rules
3516      for search:
3517      1.  Leftmost array_ref is chosen if any.
3518      2.  Rightmost suitable component_ref is chosen if innermost
3519      bounds are required and no array_ref exists.  */
3520   for (i = 1; i < len; i++)
3521     {
3522       var = nodes[i];
3523 
3524       if (TREE_CODE (var) == ARRAY_REF)
3525 	{
3526 	  *safe = false;
3527 	  array_ref_found = true;
3528 	  if (flag_chkp_narrow_bounds
3529 	      && !flag_chkp_narrow_to_innermost_arrray
3530 	      && (!last_comp
3531 		  || chkp_may_narrow_to_field (var,
3532 					       TREE_OPERAND (last_comp, 1))))
3533 	    {
3534 	      comp_to_narrow = last_comp;
3535 	      break;
3536 	    }
3537 	}
3538       else if (TREE_CODE (var) == COMPONENT_REF)
3539 	{
3540 	  tree field = TREE_OPERAND (var, 1);
3541 
3542 	  if (innermost_bounds
3543 	      && !array_ref_found
3544 	      && chkp_narrow_bounds_for_field (var, field))
3545 	    comp_to_narrow = var;
3546 	  last_comp = var;
3547 
3548 	  if (flag_chkp_narrow_bounds
3549 	      && flag_chkp_narrow_to_innermost_arrray
3550 	      && TREE_CODE (TREE_TYPE (field)) == ARRAY_TYPE)
3551 	    {
3552 	      if (bounds)
3553 		*bounds = chkp_narrow_bounds_to_field (*bounds, var, iter);
3554 	      comp_to_narrow = NULL;
3555 	    }
3556 	}
3557       else if (TREE_CODE (var) == BIT_FIELD_REF)
3558 	{
3559 	  if (flag_chkp_narrow_bounds && bounds)
3560 	    {
3561 	      tree offset, size;
3562 	      chkp_parse_bit_field_ref (var, UNKNOWN_LOCATION, &offset, &size);
3563 	      *bounds
3564 		= chkp_narrow_size_and_offset (*bounds, TREE_OPERAND (var, 0),
3565 					       offset, size, iter);
3566 	    }
3567 	}
3568       else if (TREE_CODE (var) == VIEW_CONVERT_EXPR)
3569 	/* Nothing to do for it.  */
3570 	;
3571       else
3572 	gcc_unreachable ();
3573     }
3574 
3575   if (comp_to_narrow && DECL_SIZE (TREE_OPERAND (comp_to_narrow, 1)) && bounds)
3576     *bounds = chkp_narrow_bounds_to_field (*bounds, comp_to_narrow, iter);
3577 
3578   if (innermost_bounds && bounds && !*bounds)
3579     *bounds = chkp_find_bounds (*ptr, iter);
3580 }
3581 
3582 /* Parse BIT_FIELD_REF to a NODE for a given location LOC.  Return OFFSET
3583    and SIZE in bytes.  */
3584 
3585 static
3586 void chkp_parse_bit_field_ref (tree node, location_t loc, tree *offset,
3587 			       tree *size)
3588 {
3589   tree bpu = fold_convert (size_type_node, bitsize_int (BITS_PER_UNIT));
3590   tree offs = fold_convert (size_type_node, TREE_OPERAND (node, 2));
3591   tree rem = size_binop_loc (loc, TRUNC_MOD_EXPR, offs, bpu);
3592   offs = size_binop_loc (loc, TRUNC_DIV_EXPR, offs, bpu);
3593 
3594   tree s = fold_convert (size_type_node, TREE_OPERAND (node, 1));
3595   s = size_binop_loc (loc, PLUS_EXPR, s, rem);
3596   s = size_binop_loc (loc, CEIL_DIV_EXPR, s, bpu);
3597   s = fold_convert (size_type_node, s);
3598 
3599   *offset = offs;
3600   *size = s;
3601 }
3602 
3603 /* Compute and return bounds for address of OBJ.  */
3604 static tree
3605 chkp_make_addressed_object_bounds (tree obj, gimple_stmt_iterator *iter)
3606 {
3607   tree bounds = chkp_get_registered_addr_bounds (obj);
3608 
3609   if (bounds)
3610     return bounds;
3611 
3612   switch (TREE_CODE (obj))
3613     {
3614     case VAR_DECL:
3615     case PARM_DECL:
3616     case RESULT_DECL:
3617       bounds = chkp_get_bounds_for_decl_addr (obj);
3618       break;
3619 
3620     case STRING_CST:
3621       bounds = chkp_get_bounds_for_string_cst (obj);
3622       break;
3623 
3624     case ARRAY_REF:
3625     case COMPONENT_REF:
3626     case BIT_FIELD_REF:
3627       {
3628 	tree elt;
3629 	tree ptr;
3630 	bool safe;
3631 	bool bitfield;
3632 
3633 	chkp_parse_array_and_component_ref (obj, &ptr, &elt, &safe,
3634 					    &bitfield, &bounds, iter, true);
3635 
3636 	gcc_assert (bounds);
3637       }
3638       break;
3639 
3640     case FUNCTION_DECL:
3641     case LABEL_DECL:
3642       bounds = chkp_get_zero_bounds ();
3643       break;
3644 
3645     case MEM_REF:
3646       bounds = chkp_find_bounds (TREE_OPERAND (obj, 0), iter);
3647       break;
3648 
3649     case REALPART_EXPR:
3650     case IMAGPART_EXPR:
3651       bounds = chkp_make_addressed_object_bounds (TREE_OPERAND (obj, 0), iter);
3652       break;
3653 
3654     default:
3655       if (dump_file && (dump_flags & TDF_DETAILS))
3656 	{
3657 	  fprintf (dump_file, "chkp_make_addressed_object_bounds: "
3658 		   "unexpected object of type %s\n",
3659 		   get_tree_code_name (TREE_CODE (obj)));
3660 	  print_node (dump_file, "", obj, 0);
3661 	}
3662       internal_error ("chkp_make_addressed_object_bounds: "
3663 		      "Unexpected tree code %s",
3664 		      get_tree_code_name (TREE_CODE (obj)));
3665     }
3666 
3667   chkp_register_addr_bounds (obj, bounds);
3668 
3669   return bounds;
3670 }
3671 
3672 /* Compute bounds for pointer PTR loaded from PTR_SRC.  Generate statements
3673    to compute bounds if required.  Computed bounds should be available at
3674    position pointed by ITER.
3675 
3676    If PTR_SRC is NULL_TREE then pointer definition is identified.
3677 
3678    If PTR_SRC is not NULL_TREE then ITER points to statements which loads
3679    PTR.  If PTR is a any memory reference then ITER points to a statement
3680    after which bndldx will be inserterd.  In both cases ITER will be updated
3681    to point to the inserted bndldx statement.  */
3682 
3683 static tree
3684 chkp_find_bounds_1 (tree ptr, tree ptr_src, gimple_stmt_iterator *iter)
3685 {
3686   tree addr = NULL_TREE;
3687   tree bounds = NULL_TREE;
3688 
3689   if (!ptr_src)
3690     ptr_src = ptr;
3691 
3692   bounds = chkp_get_registered_bounds (ptr_src);
3693 
3694   if (bounds)
3695     return bounds;
3696 
3697   switch (TREE_CODE (ptr_src))
3698     {
3699     case MEM_REF:
3700     case VAR_DECL:
3701       if (BOUNDED_P (ptr_src))
3702 	if (VAR_P (ptr) && DECL_REGISTER (ptr))
3703 	  bounds = chkp_get_zero_bounds ();
3704 	else
3705 	  {
3706 	    addr = chkp_build_addr_expr (ptr_src);
3707 	    bounds = chkp_build_bndldx (addr, ptr, iter);
3708 	  }
3709       else
3710 	bounds = chkp_get_nonpointer_load_bounds ();
3711       break;
3712 
3713     case ARRAY_REF:
3714     case COMPONENT_REF:
3715       addr = get_base_address (ptr_src);
3716       if (VAR_P (addr) && DECL_HARD_REGISTER (addr))
3717 	{
3718 	  bounds = chkp_get_zero_bounds ();
3719 	  break;
3720 	}
3721       if (DECL_P (addr)
3722 	  || TREE_CODE (addr) == MEM_REF
3723 	  || TREE_CODE (addr) == TARGET_MEM_REF)
3724 	{
3725 	  if (BOUNDED_P (ptr_src))
3726 	    if (VAR_P (ptr) && DECL_REGISTER (ptr))
3727 	      bounds = chkp_get_zero_bounds ();
3728 	    else
3729 	      {
3730 		addr = chkp_build_addr_expr (ptr_src);
3731 		bounds = chkp_build_bndldx (addr, ptr, iter);
3732 	      }
3733 	  else
3734 	    bounds = chkp_get_nonpointer_load_bounds ();
3735 	}
3736       else
3737 	{
3738 	  gcc_assert (TREE_CODE (addr) == SSA_NAME);
3739 	  bounds = chkp_find_bounds (addr, iter);
3740 	}
3741       break;
3742 
3743     case PARM_DECL:
3744       /* Handled above but failed.  */
3745       bounds = chkp_get_invalid_op_bounds ();
3746       break;
3747 
3748     case TARGET_MEM_REF:
3749       addr = chkp_build_addr_expr (ptr_src);
3750       bounds = chkp_build_bndldx (addr, ptr, iter);
3751       break;
3752 
3753     case SSA_NAME:
3754       bounds = chkp_get_registered_bounds (ptr_src);
3755       if (!bounds)
3756 	{
3757 	  gimple *def_stmt = SSA_NAME_DEF_STMT (ptr_src);
3758 	  gphi_iterator phi_iter;
3759 
3760 	  bounds = chkp_get_bounds_by_definition (ptr_src, def_stmt, &phi_iter);
3761 
3762 	  gcc_assert (bounds);
3763 
3764 	  if (gphi *def_phi = dyn_cast <gphi *> (def_stmt))
3765 	    {
3766 	      unsigned i;
3767 
3768 	      for (i = 0; i < gimple_phi_num_args (def_phi); i++)
3769 		{
3770 		  tree arg = gimple_phi_arg_def (def_phi, i);
3771 		  tree arg_bnd;
3772 		  gphi *phi_bnd;
3773 
3774 		  arg_bnd = chkp_find_bounds (arg, NULL);
3775 
3776 		  /* chkp_get_bounds_by_definition created new phi
3777 		     statement and phi_iter points to it.
3778 
3779 		     Previous call to chkp_find_bounds could create
3780 		     new basic block and therefore change phi statement
3781 		     phi_iter points to.  */
3782 		  phi_bnd = phi_iter.phi ();
3783 
3784 		  add_phi_arg (phi_bnd, arg_bnd,
3785 			       gimple_phi_arg_edge (def_phi, i),
3786 			       UNKNOWN_LOCATION);
3787 		}
3788 
3789 	      /* If all bound phi nodes have their arg computed
3790 		 then we may finish its computation.  See
3791 		 chkp_finish_incomplete_bounds for more details.  */
3792 	      if (chkp_may_finish_incomplete_bounds ())
3793 		chkp_finish_incomplete_bounds ();
3794 	    }
3795 
3796 	  gcc_assert (bounds == chkp_get_registered_bounds (ptr_src)
3797 		      || chkp_incomplete_bounds (bounds));
3798 	}
3799       break;
3800 
3801     case ADDR_EXPR:
3802     case WITH_SIZE_EXPR:
3803       bounds = chkp_make_addressed_object_bounds (TREE_OPERAND (ptr_src, 0), iter);
3804       break;
3805 
3806     case INTEGER_CST:
3807     case COMPLEX_CST:
3808     case VECTOR_CST:
3809       if (integer_zerop (ptr_src))
3810 	bounds = chkp_get_none_bounds ();
3811       else
3812 	bounds = chkp_get_invalid_op_bounds ();
3813       break;
3814 
3815     default:
3816       if (dump_file && (dump_flags & TDF_DETAILS))
3817 	{
3818 	  fprintf (dump_file, "chkp_find_bounds: unexpected ptr of type %s\n",
3819 		   get_tree_code_name (TREE_CODE (ptr_src)));
3820 	  print_node (dump_file, "", ptr_src, 0);
3821 	}
3822       internal_error ("chkp_find_bounds: Unexpected tree code %s",
3823 		      get_tree_code_name (TREE_CODE (ptr_src)));
3824     }
3825 
3826   if (!bounds)
3827     {
3828       if (dump_file && (dump_flags & TDF_DETAILS))
3829 	{
3830 	  fprintf (stderr, "chkp_find_bounds: cannot find bounds for pointer\n");
3831 	  print_node (dump_file, "", ptr_src, 0);
3832 	}
3833       internal_error ("chkp_find_bounds: Cannot find bounds for pointer");
3834     }
3835 
3836   return bounds;
3837 }
3838 
3839 /* Normal case for bounds search without forced narrowing.  */
3840 static tree
3841 chkp_find_bounds (tree ptr, gimple_stmt_iterator *iter)
3842 {
3843   return chkp_find_bounds_1 (ptr, NULL_TREE, iter);
3844 }
3845 
3846 /* Search bounds for pointer PTR loaded from PTR_SRC
3847    by statement *ITER points to.  */
3848 static tree
3849 chkp_find_bounds_loaded (tree ptr, tree ptr_src, gimple_stmt_iterator *iter)
3850 {
3851   return chkp_find_bounds_1 (ptr, ptr_src, iter);
3852 }
3853 
3854 /* Helper function which checks type of RHS and finds all pointers in
3855    it.  For each found pointer we build it's accesses in LHS and RHS
3856    objects and then call HANDLER for them.  Function is used to copy
3857    or initilize bounds for copied object.  */
3858 static void
3859 chkp_walk_pointer_assignments (tree lhs, tree rhs, void *arg,
3860 			       assign_handler handler)
3861 {
3862   tree type = TREE_TYPE (lhs);
3863 
3864   /* We have nothing to do with clobbers.  */
3865   if (TREE_CLOBBER_P (rhs))
3866     return;
3867 
3868   if (BOUNDED_TYPE_P (type))
3869     handler (lhs, rhs, arg);
3870   else if (RECORD_OR_UNION_TYPE_P (type))
3871     {
3872       tree field;
3873 
3874       if (TREE_CODE (rhs) == CONSTRUCTOR)
3875 	{
3876 	  unsigned HOST_WIDE_INT cnt;
3877 	  tree val;
3878 
3879 	  FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (rhs), cnt, field, val)
3880 	    {
3881 	      if (field && chkp_type_has_pointer (TREE_TYPE (field)))
3882 		{
3883 		  tree lhs_field = chkp_build_component_ref (lhs, field);
3884 		  chkp_walk_pointer_assignments (lhs_field, val, arg, handler);
3885 		}
3886 	    }
3887 	}
3888       else
3889 	for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
3890 	  if (TREE_CODE (field) == FIELD_DECL
3891 	      && chkp_type_has_pointer (TREE_TYPE (field)))
3892 	    {
3893 	      tree rhs_field = chkp_build_component_ref (rhs, field);
3894 	      tree lhs_field = chkp_build_component_ref (lhs, field);
3895 	      chkp_walk_pointer_assignments (lhs_field, rhs_field, arg, handler);
3896 	    }
3897     }
3898   else if (TREE_CODE (type) == ARRAY_TYPE)
3899     {
3900       unsigned HOST_WIDE_INT cur = 0;
3901       tree maxval = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
3902       tree etype = TREE_TYPE (type);
3903       tree esize = TYPE_SIZE (etype);
3904 
3905       if (TREE_CODE (rhs) == CONSTRUCTOR)
3906 	{
3907 	  unsigned HOST_WIDE_INT cnt;
3908 	  tree purp, val, lhs_elem;
3909 
3910 	  FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (rhs), cnt, purp, val)
3911 	    {
3912 	      if (purp && TREE_CODE (purp) == RANGE_EXPR)
3913 		{
3914 		  tree lo_index = TREE_OPERAND (purp, 0);
3915 		  tree hi_index = TREE_OPERAND (purp, 1);
3916 
3917 		  for (cur = (unsigned)tree_to_uhwi (lo_index);
3918 		       cur <= (unsigned)tree_to_uhwi (hi_index);
3919 		       cur++)
3920 		    {
3921 		      lhs_elem = chkp_build_array_ref (lhs, etype, esize, cur);
3922 		      chkp_walk_pointer_assignments (lhs_elem, val, arg, handler);
3923 		    }
3924 		}
3925 	      else
3926 		{
3927 		  if (purp)
3928 		    {
3929 		      gcc_assert (TREE_CODE (purp) == INTEGER_CST);
3930 		      cur = tree_to_uhwi (purp);
3931 		    }
3932 
3933 		  lhs_elem = chkp_build_array_ref (lhs, etype, esize, cur++);
3934 
3935 		  chkp_walk_pointer_assignments (lhs_elem, val, arg, handler);
3936 		}
3937 	    }
3938 	}
3939       /* Copy array only when size is known.  */
3940       else if (maxval && !integer_minus_onep (maxval))
3941 	for (cur = 0; cur <= TREE_INT_CST_LOW (maxval); cur++)
3942 	  {
3943 	    tree lhs_elem = chkp_build_array_ref (lhs, etype, esize, cur);
3944 	    tree rhs_elem = chkp_build_array_ref (rhs, etype, esize, cur);
3945 	    chkp_walk_pointer_assignments (lhs_elem, rhs_elem, arg, handler);
3946 	  }
3947     }
3948   else
3949     internal_error("chkp_walk_pointer_assignments: unexpected RHS type: %s",
3950 		   get_tree_code_name (TREE_CODE (type)));
3951 }
3952 
3953 /* Add code to copy bounds for assignment of RHS to LHS.
3954    ARG is an iterator pointing ne code position.  */
3955 static void
3956 chkp_copy_bounds_for_elem (tree lhs, tree rhs, void *arg)
3957 {
3958   gimple_stmt_iterator *iter = (gimple_stmt_iterator *)arg;
3959   tree bounds = chkp_find_bounds (rhs, iter);
3960   tree addr = chkp_build_addr_expr(lhs);
3961 
3962   chkp_build_bndstx (addr, rhs, bounds, iter);
3963 }
3964 
3965 /* Emit static bound initilizers and size vars.  */
3966 void
3967 chkp_finish_file (void)
3968 {
3969   struct varpool_node *node;
3970   struct chkp_ctor_stmt_list stmts;
3971 
3972   if (seen_error ())
3973     return;
3974 
3975   /* Iterate through varpool and generate bounds initialization
3976      constructors for all statically initialized pointers.  */
3977   stmts.avail = MAX_STMTS_IN_STATIC_CHKP_CTOR;
3978   stmts.stmts = NULL;
3979   FOR_EACH_VARIABLE (node)
3980     /* Check that var is actually emitted and we need and may initialize
3981        its bounds.  */
3982     if (node->need_bounds_init
3983 	&& !POINTER_BOUNDS_P (node->decl)
3984 	&& DECL_RTL (node->decl)
3985 	&& MEM_P (DECL_RTL (node->decl))
3986 	&& TREE_ASM_WRITTEN (node->decl))
3987       {
3988 	chkp_walk_pointer_assignments (node->decl,
3989 				       DECL_INITIAL (node->decl),
3990 				       &stmts,
3991 				       chkp_add_modification_to_stmt_list);
3992 
3993 	if (stmts.avail <= 0)
3994 	  {
3995 	    cgraph_build_static_cdtor ('P', stmts.stmts,
3996 				       MAX_RESERVED_INIT_PRIORITY + 3);
3997 	    stmts.avail = MAX_STMTS_IN_STATIC_CHKP_CTOR;
3998 	    stmts.stmts = NULL;
3999 	  }
4000       }
4001 
4002   if (stmts.stmts)
4003     cgraph_build_static_cdtor ('P', stmts.stmts,
4004 			       MAX_RESERVED_INIT_PRIORITY + 3);
4005 
4006   /* Iterate through varpool and generate bounds initialization
4007      constructors for all static bounds vars.  */
4008   stmts.avail = MAX_STMTS_IN_STATIC_CHKP_CTOR;
4009   stmts.stmts = NULL;
4010   FOR_EACH_VARIABLE (node)
4011     if (node->need_bounds_init
4012 	&& POINTER_BOUNDS_P (node->decl)
4013 	&& TREE_ASM_WRITTEN (node->decl))
4014       {
4015 	tree bnd = node->decl;
4016 	tree var;
4017 
4018 	gcc_assert (DECL_INITIAL (bnd)
4019 		    && TREE_CODE (DECL_INITIAL (bnd)) == ADDR_EXPR);
4020 
4021 	var = TREE_OPERAND (DECL_INITIAL (bnd), 0);
4022 	chkp_output_static_bounds (bnd, var, &stmts);
4023       }
4024 
4025   if (stmts.stmts)
4026     cgraph_build_static_cdtor ('B', stmts.stmts,
4027 			       MAX_RESERVED_INIT_PRIORITY + 2);
4028 
4029   delete chkp_static_var_bounds;
4030   delete chkp_bounds_map;
4031 }
4032 
4033 /* An instrumentation function which is called for each statement
4034    having memory access we want to instrument.  It inserts check
4035    code and bounds copy code.
4036 
4037    ITER points to statement to instrument.
4038 
4039    NODE holds memory access in statement to check.
4040 
4041    LOC holds the location information for statement.
4042 
4043    DIRFLAGS determines whether access is read or write.
4044 
4045    ACCESS_OFFS should be added to address used in NODE
4046    before check.
4047 
4048    ACCESS_SIZE holds size of checked access.
4049 
4050    SAFE indicates if NODE access is safe and should not be
4051    checked.  */
4052 static void
4053 chkp_process_stmt (gimple_stmt_iterator *iter, tree node,
4054 		   location_t loc, tree dirflag,
4055 		   tree access_offs, tree access_size,
4056 		   bool safe)
4057 {
4058   tree node_type = TREE_TYPE (node);
4059   tree size = access_size ? access_size : TYPE_SIZE_UNIT (node_type);
4060   tree addr_first = NULL_TREE; /* address of the first accessed byte */
4061   tree addr_last = NULL_TREE; /* address of the last accessed byte */
4062   tree ptr = NULL_TREE; /* a pointer used for dereference */
4063   tree bounds = NULL_TREE;
4064   bool reg_store = false;
4065 
4066   /* We do not need instrumentation for clobbers.  */
4067   if (dirflag == integer_one_node
4068       && gimple_code (gsi_stmt (*iter)) == GIMPLE_ASSIGN
4069       && TREE_CLOBBER_P (gimple_assign_rhs1 (gsi_stmt (*iter))))
4070     return;
4071 
4072   switch (TREE_CODE (node))
4073     {
4074     case ARRAY_REF:
4075     case COMPONENT_REF:
4076       {
4077 	bool bitfield;
4078 	tree elt;
4079 
4080 	if (safe)
4081 	  {
4082 	    /* We are not going to generate any checks, so do not
4083 	       generate bounds as well.  */
4084 	    addr_first = chkp_build_addr_expr (node);
4085 	    break;
4086 	  }
4087 
4088 	chkp_parse_array_and_component_ref (node, &ptr, &elt, &safe,
4089 					    &bitfield, &bounds, iter, false);
4090 
4091 	/* Break if there is no dereference and operation is safe.  */
4092 
4093 	if (bitfield)
4094           {
4095             tree field = TREE_OPERAND (node, 1);
4096 
4097             if (TREE_CODE (DECL_SIZE_UNIT (field)) == INTEGER_CST)
4098               size = DECL_SIZE_UNIT (field);
4099 
4100 	    if (elt)
4101 	      elt = chkp_build_addr_expr (elt);
4102             addr_first = fold_convert_loc (loc, ptr_type_node, elt ? elt : ptr);
4103             addr_first = fold_build_pointer_plus_loc (loc,
4104 						      addr_first,
4105 						      byte_position (field));
4106           }
4107         else
4108           addr_first = chkp_build_addr_expr (node);
4109       }
4110       break;
4111 
4112     case INDIRECT_REF:
4113       ptr = TREE_OPERAND (node, 0);
4114       addr_first = ptr;
4115       break;
4116 
4117     case MEM_REF:
4118       ptr = TREE_OPERAND (node, 0);
4119       addr_first = chkp_build_addr_expr (node);
4120       break;
4121 
4122     case TARGET_MEM_REF:
4123       ptr = TMR_BASE (node);
4124       addr_first = chkp_build_addr_expr (node);
4125       break;
4126 
4127     case ARRAY_RANGE_REF:
4128       printf("ARRAY_RANGE_REF\n");
4129       debug_gimple_stmt(gsi_stmt(*iter));
4130       debug_tree(node);
4131       gcc_unreachable ();
4132       break;
4133 
4134     case BIT_FIELD_REF:
4135       {
4136 	tree offset, size;
4137 
4138 	gcc_assert (!access_offs);
4139 	gcc_assert (!access_size);
4140 
4141 	chkp_parse_bit_field_ref (node, loc, &offset, &size);
4142 
4143 	chkp_process_stmt (iter, TREE_OPERAND (node, 0), loc,
4144 			   dirflag, offset, size, safe);
4145 	return;
4146       }
4147       break;
4148 
4149     case VAR_DECL:
4150     case RESULT_DECL:
4151     case PARM_DECL:
4152       if (dirflag != integer_one_node
4153 	  || DECL_REGISTER (node))
4154 	return;
4155 
4156       safe = true;
4157       addr_first = chkp_build_addr_expr (node);
4158       break;
4159 
4160     default:
4161       return;
4162     }
4163 
4164   /* If addr_last was not computed then use (addr_first + size - 1)
4165      expression to compute it.  */
4166   if (!addr_last)
4167     {
4168       addr_last = fold_build_pointer_plus_loc (loc, addr_first, size);
4169       addr_last = fold_build_pointer_plus_hwi_loc (loc, addr_last, -1);
4170     }
4171 
4172   /* Shift both first_addr and last_addr by access_offs if specified.  */
4173   if (access_offs)
4174     {
4175       addr_first = fold_build_pointer_plus_loc (loc, addr_first, access_offs);
4176       addr_last = fold_build_pointer_plus_loc (loc, addr_last, access_offs);
4177     }
4178 
4179   if (dirflag == integer_one_node)
4180     {
4181       tree base = get_base_address (node);
4182       if (VAR_P (base) && DECL_HARD_REGISTER (base))
4183 	reg_store = true;
4184     }
4185 
4186   /* Generate bndcl/bndcu checks if memory access is not safe.  */
4187   if (!safe)
4188     {
4189       gimple_stmt_iterator stmt_iter = *iter;
4190 
4191       if (!bounds)
4192 	bounds = chkp_find_bounds (ptr, iter);
4193 
4194       chkp_check_mem_access (addr_first, addr_last, bounds,
4195 			     stmt_iter, loc, dirflag);
4196     }
4197 
4198   /* We need to store bounds in case pointer is stored.  */
4199   if (dirflag == integer_one_node
4200       && !reg_store
4201       && chkp_type_has_pointer (node_type)
4202       && flag_chkp_store_bounds)
4203     {
4204       gimple *stmt = gsi_stmt (*iter);
4205       tree rhs1 = gimple_assign_rhs1 (stmt);
4206       enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
4207 
4208       if (get_gimple_rhs_class (rhs_code) == GIMPLE_SINGLE_RHS)
4209 	chkp_walk_pointer_assignments (node, rhs1, iter,
4210 				       chkp_copy_bounds_for_elem);
4211       else
4212 	{
4213 	  bounds = chkp_compute_bounds_for_assignment (NULL_TREE, stmt);
4214 	  chkp_build_bndstx (addr_first, rhs1, bounds, iter);
4215 	}
4216     }
4217 }
4218 
4219 /* Add code to copy bounds for all pointers copied
4220    in ASSIGN created during inline of EDGE.  */
4221 void
4222 chkp_copy_bounds_for_assign (gimple *assign, struct cgraph_edge *edge)
4223 {
4224   tree lhs = gimple_assign_lhs (assign);
4225   tree rhs = gimple_assign_rhs1 (assign);
4226   gimple_stmt_iterator iter = gsi_for_stmt (assign);
4227 
4228   if (!flag_chkp_store_bounds)
4229     return;
4230 
4231   chkp_walk_pointer_assignments (lhs, rhs, &iter, chkp_copy_bounds_for_elem);
4232 
4233   /* We should create edges for all created calls to bndldx and bndstx.  */
4234   while (gsi_stmt (iter) != assign)
4235     {
4236       gimple *stmt = gsi_stmt (iter);
4237       if (gimple_code (stmt) == GIMPLE_CALL)
4238 	{
4239 	  tree fndecl = gimple_call_fndecl (stmt);
4240 	  struct cgraph_node *callee = cgraph_node::get_create (fndecl);
4241 
4242 	  gcc_assert (chkp_gimple_call_builtin_p (stmt, BUILT_IN_CHKP_BNDSTX)
4243 		      || chkp_gimple_call_builtin_p (stmt, BUILT_IN_CHKP_BNDLDX)
4244 		      || chkp_gimple_call_builtin_p (stmt, BUILT_IN_CHKP_BNDRET));
4245 
4246 	  edge->caller->create_edge (callee, as_a <gcall *> (stmt), edge->count);
4247 	}
4248       gsi_prev (&iter);
4249     }
4250 }
4251 
4252 /* Some code transformation made during instrumentation pass
4253    may put code into inconsistent state.  Here we find and fix
4254    such flaws.  */
4255 void
4256 chkp_fix_cfg ()
4257 {
4258   basic_block bb;
4259   gimple_stmt_iterator i;
4260 
4261   /* We could insert some code right after stmt which ends bb.
4262      We wanted to put this code on fallthru edge but did not
4263      add new edges from the beginning because it may cause new
4264      phi node creation which may be incorrect due to incomplete
4265      bound phi nodes.  */
4266   FOR_ALL_BB_FN (bb, cfun)
4267     for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
4268       {
4269 	gimple *stmt = gsi_stmt (i);
4270 	gimple_stmt_iterator next = i;
4271 
4272 	gsi_next (&next);
4273 
4274 	if (stmt_ends_bb_p (stmt)
4275 	    && !gsi_end_p (next))
4276 	  {
4277 	    edge fall = find_fallthru_edge (bb->succs);
4278 	    basic_block dest = NULL;
4279 	    int flags = 0;
4280 
4281 	    gcc_assert (fall);
4282 
4283 	    /* We cannot split abnormal edge.  Therefore we
4284 	       store its params, make it regular and then
4285 	       rebuild abnormal edge after split.  */
4286 	    if (fall->flags & EDGE_ABNORMAL)
4287 	      {
4288 		flags = fall->flags & ~EDGE_FALLTHRU;
4289 		dest = fall->dest;
4290 
4291 		fall->flags &= ~EDGE_COMPLEX;
4292 	      }
4293 
4294 	    while (!gsi_end_p (next))
4295 	      {
4296 		gimple *next_stmt = gsi_stmt (next);
4297 		gsi_remove (&next, false);
4298 		gsi_insert_on_edge (fall, next_stmt);
4299 	      }
4300 
4301 	    gsi_commit_edge_inserts ();
4302 
4303 	    /* Re-create abnormal edge.  */
4304 	    if (dest)
4305 	      make_edge (bb, dest, flags);
4306 	  }
4307       }
4308 }
4309 
4310 /* Walker callback for chkp_replace_function_pointers.  Replaces
4311    function pointer in the specified operand with pointer to the
4312    instrumented function version.  */
4313 static tree
4314 chkp_replace_function_pointer (tree *op, int *walk_subtrees,
4315 			       void *data ATTRIBUTE_UNUSED)
4316 {
4317   if (TREE_CODE (*op) == FUNCTION_DECL
4318       && chkp_instrumentable_p (*op)
4319       && (DECL_BUILT_IN_CLASS (*op) == NOT_BUILT_IN
4320 	  /* For builtins we replace pointers only for selected
4321 	     function and functions having definitions.  */
4322 	  || (DECL_BUILT_IN_CLASS (*op) == BUILT_IN_NORMAL
4323 	      && (chkp_instrument_normal_builtin (*op)
4324 		  || gimple_has_body_p (*op)))))
4325     {
4326       struct cgraph_node *node = cgraph_node::get_create (*op);
4327       struct cgraph_node *clone = NULL;
4328 
4329       if (!node->instrumentation_clone)
4330 	clone = chkp_maybe_create_clone (*op);
4331 
4332       if (clone)
4333 	*op = clone->decl;
4334       *walk_subtrees = 0;
4335     }
4336 
4337   return NULL;
4338 }
4339 
4340 /* This function searches for function pointers in statement
4341    pointed by GSI and replaces them with pointers to instrumented
4342    function versions.  */
4343 static void
4344 chkp_replace_function_pointers (gimple_stmt_iterator *gsi)
4345 {
4346   gimple *stmt = gsi_stmt (*gsi);
4347   /* For calls we want to walk call args only.  */
4348   if (gimple_code (stmt) == GIMPLE_CALL)
4349     {
4350       unsigned i;
4351       for (i = 0; i < gimple_call_num_args (stmt); i++)
4352 	walk_tree (gimple_call_arg_ptr (stmt, i),
4353 		   chkp_replace_function_pointer, NULL, NULL);
4354     }
4355   else
4356     walk_gimple_stmt (gsi, NULL, chkp_replace_function_pointer, NULL);
4357 }
4358 
4359 /* This function instruments all statements working with memory,
4360    calls and rets.
4361 
4362    It also removes excess statements from static initializers.  */
4363 static void
4364 chkp_instrument_function (void)
4365 {
4366   basic_block bb, next;
4367   gimple_stmt_iterator i;
4368   enum gimple_rhs_class grhs_class;
4369   bool safe = lookup_attribute ("chkp ctor", DECL_ATTRIBUTES (cfun->decl));
4370 
4371   bb = ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb;
4372   do
4373     {
4374       next = bb->next_bb;
4375       for (i = gsi_start_bb (bb); !gsi_end_p (i); )
4376         {
4377 	  gimple *s = gsi_stmt (i);
4378 
4379 	  /* Skip statement marked to not be instrumented.  */
4380 	  if (chkp_marked_stmt_p (s))
4381 	    {
4382 	      gsi_next (&i);
4383 	      continue;
4384 	    }
4385 
4386 	  chkp_replace_function_pointers (&i);
4387 
4388           switch (gimple_code (s))
4389             {
4390             case GIMPLE_ASSIGN:
4391 	      chkp_process_stmt (&i, gimple_assign_lhs (s),
4392 				 gimple_location (s), integer_one_node,
4393 				 NULL_TREE, NULL_TREE, safe);
4394 	      chkp_process_stmt (&i, gimple_assign_rhs1 (s),
4395 				 gimple_location (s), integer_zero_node,
4396 				 NULL_TREE, NULL_TREE, safe);
4397 	      grhs_class = get_gimple_rhs_class (gimple_assign_rhs_code (s));
4398 	      if (grhs_class == GIMPLE_BINARY_RHS)
4399 		chkp_process_stmt (&i, gimple_assign_rhs2 (s),
4400 				   gimple_location (s), integer_zero_node,
4401 				   NULL_TREE, NULL_TREE, safe);
4402               break;
4403 
4404             case GIMPLE_RETURN:
4405 	      {
4406 		greturn *r = as_a <greturn *> (s);
4407 		if (gimple_return_retval (r) != NULL_TREE)
4408 		  {
4409 		    chkp_process_stmt (&i, gimple_return_retval (r),
4410 				       gimple_location (r),
4411 				       integer_zero_node,
4412 				       NULL_TREE, NULL_TREE, safe);
4413 
4414 		    /* Additionally we need to add bounds
4415 		       to return statement.  */
4416 		    chkp_add_bounds_to_ret_stmt (&i);
4417 		  }
4418 	      }
4419 	      break;
4420 
4421 	    case GIMPLE_CALL:
4422 	      chkp_add_bounds_to_call_stmt (&i);
4423 	      break;
4424 
4425             default:
4426               ;
4427             }
4428 
4429 	  gsi_next (&i);
4430 
4431 	  /* We do not need any actual pointer stores in checker
4432 	     static initializer.  */
4433 	  if (lookup_attribute ("chkp ctor", DECL_ATTRIBUTES (cfun->decl))
4434 	      && gimple_code (s) == GIMPLE_ASSIGN
4435 	      && gimple_store_p (s))
4436 	    {
4437 	      gimple_stmt_iterator del_iter = gsi_for_stmt (s);
4438 	      gsi_remove (&del_iter, true);
4439 	      unlink_stmt_vdef (s);
4440 	      release_defs(s);
4441 	    }
4442         }
4443       bb = next;
4444     }
4445   while (bb);
4446 
4447   /* Some input params may have bounds and be address taken.  In this case
4448      we should store incoming bounds into bounds table.  */
4449   tree arg;
4450   if (flag_chkp_store_bounds)
4451     for (arg = DECL_ARGUMENTS (cfun->decl); arg; arg = DECL_CHAIN (arg))
4452       if (TREE_ADDRESSABLE (arg))
4453 	{
4454 	  if (BOUNDED_P (arg))
4455 	    {
4456 	      tree bounds = chkp_get_next_bounds_parm (arg);
4457 	      tree def_ptr = ssa_default_def (cfun, arg);
4458 	      gimple_stmt_iterator iter
4459 		= gsi_start_bb (chkp_get_entry_block ());
4460 	      chkp_build_bndstx (chkp_build_addr_expr (arg),
4461 				 def_ptr ? def_ptr : arg,
4462 				 bounds, &iter);
4463 
4464 	      /* Skip bounds arg.  */
4465 	      arg = TREE_CHAIN (arg);
4466 	    }
4467 	  else if (chkp_type_has_pointer (TREE_TYPE (arg)))
4468 	    {
4469 	      tree orig_arg = arg;
4470 	      bitmap slots = BITMAP_ALLOC (NULL);
4471 	      gimple_stmt_iterator iter
4472 		= gsi_start_bb (chkp_get_entry_block ());
4473 	      bitmap_iterator bi;
4474 	      unsigned bnd_no;
4475 
4476 	      chkp_find_bound_slots (TREE_TYPE (arg), slots);
4477 
4478 	      EXECUTE_IF_SET_IN_BITMAP (slots, 0, bnd_no, bi)
4479 		{
4480 		  tree bounds = chkp_get_next_bounds_parm (arg);
4481 		  HOST_WIDE_INT offs = bnd_no * POINTER_SIZE / BITS_PER_UNIT;
4482 		  tree addr = chkp_build_addr_expr (orig_arg);
4483 		  tree ptr = build2 (MEM_REF, ptr_type_node, addr,
4484 				     build_int_cst (ptr_type_node, offs));
4485 		  chkp_build_bndstx (chkp_build_addr_expr (ptr), ptr,
4486 				     bounds, &iter);
4487 
4488 		  arg = DECL_CHAIN (arg);
4489 		}
4490 	      BITMAP_FREE (slots);
4491 	    }
4492 	}
4493 }
4494 
4495 /* Find init/null/copy_ptr_bounds calls and replace them
4496    with assignments.  It should allow better code
4497    optimization.  */
4498 
4499 static void
4500 chkp_remove_useless_builtins ()
4501 {
4502   basic_block bb;
4503   gimple_stmt_iterator gsi;
4504 
4505   FOR_EACH_BB_FN (bb, cfun)
4506     {
4507       for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
4508         {
4509 	  gimple *stmt = gsi_stmt (gsi);
4510 	  tree fndecl;
4511 	  enum built_in_function fcode;
4512 
4513 	  /* Find builtins returning first arg and replace
4514 	     them with assignments.  */
4515 	  if (gimple_code (stmt) == GIMPLE_CALL
4516 	      && (fndecl = gimple_call_fndecl (stmt))
4517 	      && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
4518 	      && (fcode = DECL_FUNCTION_CODE (fndecl))
4519 	      && (fcode == BUILT_IN_CHKP_INIT_PTR_BOUNDS
4520 		  || fcode == BUILT_IN_CHKP_NULL_PTR_BOUNDS
4521 		  || fcode == BUILT_IN_CHKP_COPY_PTR_BOUNDS
4522 		  || fcode == BUILT_IN_CHKP_SET_PTR_BOUNDS))
4523 	    {
4524 	      tree res = gimple_call_arg (stmt, 0);
4525 	      update_call_from_tree (&gsi, res);
4526 	      stmt = gsi_stmt (gsi);
4527 	      update_stmt (stmt);
4528 	    }
4529         }
4530     }
4531 }
4532 
4533 /* Initialize pass.  */
4534 static void
4535 chkp_init (void)
4536 {
4537   basic_block bb;
4538   gimple_stmt_iterator i;
4539 
4540   in_chkp_pass = true;
4541 
4542   for (bb = ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb; bb; bb = bb->next_bb)
4543     for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
4544       chkp_unmark_stmt (gsi_stmt (i));
4545 
4546   chkp_invalid_bounds = new hash_set<tree>;
4547   chkp_completed_bounds_set = new hash_set<tree>;
4548   delete chkp_reg_bounds;
4549   chkp_reg_bounds = new hash_map<tree, tree>;
4550   delete chkp_bound_vars;
4551   chkp_bound_vars = new hash_map<tree, tree>;
4552   chkp_reg_addr_bounds = new hash_map<tree, tree>;
4553   chkp_incomplete_bounds_map = new hash_map<tree, tree>;
4554   delete chkp_bounds_map;
4555   chkp_bounds_map = new hash_map<tree, tree>;
4556   chkp_abnormal_copies = BITMAP_GGC_ALLOC ();
4557 
4558   entry_block = NULL;
4559   zero_bounds = NULL_TREE;
4560   none_bounds = NULL_TREE;
4561   incomplete_bounds = integer_zero_node;
4562   tmp_var = NULL_TREE;
4563   size_tmp_var = NULL_TREE;
4564 
4565   chkp_uintptr_type = lang_hooks.types.type_for_mode (ptr_mode, true);
4566 
4567   /* We create these constant bounds once for each object file.
4568      These symbols go to comdat section and result in single copy
4569      of each one in the final binary.  */
4570   chkp_get_zero_bounds_var ();
4571   chkp_get_none_bounds_var ();
4572 
4573   calculate_dominance_info (CDI_DOMINATORS);
4574   calculate_dominance_info (CDI_POST_DOMINATORS);
4575 
4576   bitmap_obstack_initialize (NULL);
4577 }
4578 
4579 /* Finalize instrumentation pass.  */
4580 static void
4581 chkp_fini (void)
4582 {
4583   in_chkp_pass = false;
4584 
4585   delete chkp_invalid_bounds;
4586   delete chkp_completed_bounds_set;
4587   delete chkp_reg_addr_bounds;
4588   delete chkp_incomplete_bounds_map;
4589 
4590   free_dominance_info (CDI_DOMINATORS);
4591   free_dominance_info (CDI_POST_DOMINATORS);
4592 
4593   bitmap_obstack_release (NULL);
4594 
4595   entry_block = NULL;
4596   zero_bounds = NULL_TREE;
4597   none_bounds = NULL_TREE;
4598 }
4599 
4600 /* Main instrumentation pass function.  */
4601 static unsigned int
4602 chkp_execute (void)
4603 {
4604   chkp_init ();
4605 
4606   chkp_instrument_function ();
4607 
4608   chkp_remove_useless_builtins ();
4609 
4610   chkp_function_mark_instrumented (cfun->decl);
4611 
4612   chkp_fix_cfg ();
4613 
4614   chkp_fini ();
4615 
4616   return 0;
4617 }
4618 
4619 /* Instrumentation pass gate.  */
4620 static bool
4621 chkp_gate (void)
4622 {
4623   cgraph_node *node = cgraph_node::get (cfun->decl);
4624   return ((node != NULL
4625 	   && node->instrumentation_clone)
4626 	   || lookup_attribute ("chkp ctor", DECL_ATTRIBUTES (cfun->decl)));
4627 }
4628 
4629 namespace {
4630 
4631 const pass_data pass_data_chkp =
4632 {
4633   GIMPLE_PASS, /* type */
4634   "chkp", /* name */
4635   OPTGROUP_NONE, /* optinfo_flags */
4636   TV_NONE, /* tv_id */
4637   PROP_ssa | PROP_cfg, /* properties_required */
4638   0, /* properties_provided */
4639   0, /* properties_destroyed */
4640   0, /* todo_flags_start */
4641   TODO_verify_il
4642   | TODO_update_ssa /* todo_flags_finish */
4643 };
4644 
4645 class pass_chkp : public gimple_opt_pass
4646 {
4647 public:
4648   pass_chkp (gcc::context *ctxt)
4649     : gimple_opt_pass (pass_data_chkp, ctxt)
4650   {}
4651 
4652   /* opt_pass methods: */
4653   virtual opt_pass * clone ()
4654     {
4655       return new pass_chkp (m_ctxt);
4656     }
4657 
4658   virtual bool gate (function *)
4659     {
4660       return chkp_gate ();
4661     }
4662 
4663   virtual unsigned int execute (function *)
4664     {
4665       return chkp_execute ();
4666     }
4667 
4668 }; // class pass_chkp
4669 
4670 } // anon namespace
4671 
4672 gimple_opt_pass *
4673 make_pass_chkp (gcc::context *ctxt)
4674 {
4675   return new pass_chkp (ctxt);
4676 }
4677 
4678 #include "gt-tree-chkp.h"
4679