1 /* Copyright (C) 2013-2018 Free Software Foundation, Inc.
2
3 This file is part of GCC.
4
5 GCC is free software; you can redistribute it and/or modify it under
6 the terms of the GNU General Public License as published by the Free
7 Software Foundation; either version 3, or (at your option) any later
8 version.
9
10 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
11 WARRANTY; without even the implied warranty of MERCHANTABILITY or
12 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
13 for more details.
14
15 You should have received a copy of the GNU General Public License
16 along with GCC; see the file COPYING3. If not see
17 <http://www.gnu.org/licenses/>. */
18
19 /* Virtual Table Pointer Security Pass - Detect corruption of vtable pointers
20 before using them for virtual method dispatches. */
21
22 /* This file is part of the vtable security feature implementation.
23 The vtable security feature is designed to detect when a virtual
24 call is about to be made through an invalid vtable pointer
25 (possibly due to data corruption or malicious attacks). The
26 compiler finds every virtual call, and inserts a verification call
27 before the virtual call. The verification call takes the actual
28 vtable pointer value in the object through which the virtual call
29 is being made, and compares the vtable pointer against a set of all
30 valid vtable pointers that the object could contain (this set is
31 based on the declared type of the object). If the pointer is in
32 the valid set, execution is allowed to continue; otherwise the
33 program is halted.
34
35 There are several pieces needed in order to make this work: 1. For
36 every virtual class in the program (i.e. a class that contains
37 virtual methods), we need to build the set of all possible valid
38 vtables that an object of that class could point to. This includes
39 vtables for any class(es) that inherit from the class under
40 consideration. 2. For every such data set we build up, we need a
41 way to find and reference the data set. This is complicated by the
42 fact that the real vtable addresses are not known until runtime,
43 when the program is loaded into memory, but we need to reference the
44 sets at compile time when we are inserting verification calls into
45 the program. 3. We need to find every virtual call in the program,
46 and insert the verification call (with the appropriate arguments)
47 before the virtual call. 4. We need some runtime library pieces:
48 the code to build up the data sets at runtime; the code to actually
49 perform the verification using the data sets; and some code to set
50 protections on the data sets, so they themselves do not become
51 hacker targets.
52
53 To find and reference the set of valid vtable pointers for any given
54 virtual class, we create a special global variable for each virtual
55 class. We refer to this as the "vtable map variable" for that
56 class. The vtable map variable has the type "void *", and is
57 initialized by the compiler to NULL. At runtime when the set of
58 valid vtable pointers for a virtual class, e.g. class Foo, is built,
59 the vtable map variable for class Foo is made to point to the set.
60 During compile time, when the compiler is inserting verification
61 calls into the program, it passes the vtable map variable for the
62 appropriate class to the verification call, so that at runtime the
63 verification call can find the appropriate data set.
64
65 The actual set of valid vtable pointers for a virtual class,
66 e.g. class Foo, cannot be built until runtime, when the vtables get
67 loaded into memory and their addresses are known. But the knowledge
68 about which vtables belong in which class' hierarchy is only known
69 at compile time. Therefore at compile time we collect class
70 hierarchy and vtable information about every virtual class, and we
71 generate calls to build up the data sets at runtime. To build the
72 data sets, we call one of the functions we add to the runtime
73 library, __VLTRegisterPair. __VLTRegisterPair takes two arguments,
74 a vtable map variable and the address of a vtable. If the vtable
75 map variable is currently NULL, it creates a new data set (hash
76 table), makes the vtable map variable point to the new data set, and
77 inserts the vtable address into the data set. If the vtable map
78 variable is not NULL, it just inserts the vtable address into the
79 data set. In order to make sure that our data sets are built before
80 any verification calls happen, we create a special constructor
81 initialization function for each compilation unit, give it a very
82 high initialization priority, and insert all of our calls to
83 __VLTRegisterPair into our special constructor initialization
84 function.
85
86 The vtable verification feature is controlled by the flag
87 '-fvtable-verify='. There are three flavors of this:
88 '-fvtable-verify=std', '-fvtable-verify=preinit', and
89 '-fvtable-verify=none'. If the option '-fvtable-verfy=preinit' is
90 used, then our constructor initialization function gets put into the
91 preinit array. This is necessary if there are data sets that need
92 to be built very early in execution. If the constructor
93 initialization function gets put into the preinit array, the we also
94 add calls to __VLTChangePermission at the beginning and end of the
95 function. The call at the beginning sets the permissions on the
96 data sets and vtable map variables to read/write, and the one at the
97 end makes them read-only. If the '-fvtable-verify=std' option is
98 used, the constructor initialization functions are executed at their
99 normal time, and the __VLTChangePermission calls are handled
100 differently (see the comments in libstdc++-v3/libsupc++/vtv_rts.cc).
101 The option '-fvtable-verify=none' turns off vtable verification.
102
103 This file contains code for the tree pass that goes through all the
104 statements in each basic block, looking for virtual calls, and
105 inserting a call to __VLTVerifyVtablePointer (with appropriate
106 arguments) before each one. It also contains the hash table
107 functions for the data structures used for collecting the class
108 hierarchy data and building/maintaining the vtable map variable data
109 are defined in gcc/vtable-verify.h. These data structures are
110 shared with the code in the C++ front end that collects the class
111 hierarchy & vtable information and generates the vtable map
112 variables (see cp/vtable-class-hierarchy.c). This tree pass should
113 run just before the gimple is converted to RTL.
114
115 Some implementation details for this pass:
116
117 To find all of the virtual calls, we iterate through all the
118 gimple statements in each basic block, looking for any call
119 statement with the code "OBJ_TYPE_REF". Once we have found the
120 virtual call, we need to find the vtable pointer through which the
121 call is being made, and the type of the object containing the
122 pointer (to find the appropriate vtable map variable). We then use
123 these to build a call to __VLTVerifyVtablePointer, passing the
124 vtable map variable, and the vtable pointer. We insert the
125 verification call just after the gimple statement that gets the
126 vtable pointer out of the object, and we update the next
127 statement to depend on the result returned from
128 __VLTVerifyVtablePointer (the vtable pointer value), to ensure
129 subsequent compiler phases don't remove or reorder the call (it's no
130 good to have the verification occur after the virtual call, for
131 example). To find the vtable pointer being used (and the type of
132 the object) we search backwards through the def_stmts chain from the
133 virtual call (see verify_bb_vtables for more details). */
134
135 #include "config.h"
136 #include "system.h"
137 #include "coretypes.h"
138 #include "backend.h"
139 #include "tree.h"
140 #include "gimple.h"
141 #include "tree-pass.h"
142 #include "ssa.h"
143 #include "gimple-iterator.h"
144
145 #include "vtable-verify.h"
146
147 unsigned num_vtable_map_nodes = 0;
148 int total_num_virtual_calls = 0;
149 int total_num_verified_vcalls = 0;
150
151 extern GTY(()) tree verify_vtbl_ptr_fndecl;
152 tree verify_vtbl_ptr_fndecl = NULL_TREE;
153
154 /* Keep track of whether or not any virtual call were verified. */
155 static bool any_verification_calls_generated = false;
156
157 unsigned int vtable_verify_main (void);
158
159
160 /* The following few functions are for the vtbl pointer hash table
161 in the 'registered' field of the struct vtable_map_node. The hash
162 table keeps track of which vtable pointers have been used in
163 calls to __VLTRegisterPair with that particular vtable map variable. */
164
165 /* This function checks to see if a particular VTABLE_DECL and OFFSET are
166 already in the 'registered' hash table for NODE. */
167
168 bool
vtbl_map_node_registration_find(struct vtbl_map_node * node,tree vtable_decl,unsigned offset)169 vtbl_map_node_registration_find (struct vtbl_map_node *node,
170 tree vtable_decl,
171 unsigned offset)
172 {
173 struct vtable_registration key;
174 struct vtable_registration **slot;
175
176 gcc_assert (node && node->registered);
177
178 key.vtable_decl = vtable_decl;
179 slot = node->registered->find_slot (&key, NO_INSERT);
180
181 if (slot && (*slot))
182 {
183 unsigned i;
184 for (i = 0; i < ((*slot)->offsets).length (); ++i)
185 if ((*slot)->offsets[i] == offset)
186 return true;
187 }
188
189 return false;
190 }
191
192 /* This function inserts VTABLE_DECL and OFFSET into the 'registered'
193 hash table for NODE. It returns a boolean indicating whether or not
194 it actually inserted anything. */
195
196 bool
vtbl_map_node_registration_insert(struct vtbl_map_node * node,tree vtable_decl,unsigned offset)197 vtbl_map_node_registration_insert (struct vtbl_map_node *node,
198 tree vtable_decl,
199 unsigned offset)
200 {
201 struct vtable_registration key;
202 struct vtable_registration **slot;
203 bool inserted_something = false;
204
205 if (!node || !node->registered)
206 return false;
207
208 key.vtable_decl = vtable_decl;
209 slot = node->registered->find_slot (&key, INSERT);
210
211 if (! *slot)
212 {
213 struct vtable_registration *node;
214 node = XNEW (struct vtable_registration);
215 node->vtable_decl = vtable_decl;
216
217 (node->offsets).create (10);
218 (node->offsets).safe_push (offset);
219 *slot = node;
220 inserted_something = true;
221 }
222 else
223 {
224 /* We found the vtable_decl slot; we need to see if it already
225 contains the offset. If not, we need to add the offset. */
226 unsigned i;
227 bool found = false;
228 for (i = 0; i < ((*slot)->offsets).length () && !found; ++i)
229 if ((*slot)->offsets[i] == offset)
230 found = true;
231
232 if (!found)
233 {
234 ((*slot)->offsets).safe_push (offset);
235 inserted_something = true;
236 }
237 }
238 return inserted_something;
239 }
240
241 /* Hashtable functions for vtable_registration hashtables. */
242
243 inline hashval_t
hash(const vtable_registration * p)244 registration_hasher::hash (const vtable_registration *p)
245 {
246 const struct vtable_registration *n = (const struct vtable_registration *) p;
247 return (hashval_t) (DECL_UID (n->vtable_decl));
248 }
249
250 inline bool
equal(const vtable_registration * p1,const vtable_registration * p2)251 registration_hasher::equal (const vtable_registration *p1,
252 const vtable_registration *p2)
253 {
254 const struct vtable_registration *n1 =
255 (const struct vtable_registration *) p1;
256 const struct vtable_registration *n2 =
257 (const struct vtable_registration *) p2;
258 return (DECL_UID (n1->vtable_decl) == DECL_UID (n2->vtable_decl));
259 }
260
261 /* End of hashtable functions for "registered" hashtables. */
262
263
264
265 /* Hashtable definition and functions for vtbl_map_hash. */
266
267 struct vtbl_map_hasher : nofree_ptr_hash <struct vtbl_map_node>
268 {
269 static inline hashval_t hash (const vtbl_map_node *);
270 static inline bool equal (const vtbl_map_node *, const vtbl_map_node *);
271 };
272
273 /* Returns a hash code for P. */
274
275 inline hashval_t
hash(const vtbl_map_node * p)276 vtbl_map_hasher::hash (const vtbl_map_node *p)
277 {
278 const struct vtbl_map_node n = *((const struct vtbl_map_node *) p);
279 return (hashval_t) IDENTIFIER_HASH_VALUE (n.class_name);
280 }
281
282 /* Returns nonzero if P1 and P2 are equal. */
283
284 inline bool
equal(const vtbl_map_node * p1,const vtbl_map_node * p2)285 vtbl_map_hasher::equal (const vtbl_map_node *p1, const vtbl_map_node *p2)
286 {
287 const struct vtbl_map_node n1 = *((const struct vtbl_map_node *) p1);
288 const struct vtbl_map_node n2 = *((const struct vtbl_map_node *) p2);
289 return (IDENTIFIER_HASH_VALUE (n1.class_name) ==
290 IDENTIFIER_HASH_VALUE (n2.class_name));
291 }
292
293 /* Here are the two structures into which we insert vtable map nodes.
294 We use two data structures because of the vastly different ways we need
295 to find the nodes for various tasks (see comments in vtable-verify.h
296 for more details. */
297
298 typedef hash_table<vtbl_map_hasher> vtbl_map_table_type;
299 typedef vtbl_map_table_type::iterator vtbl_map_iterator_type;
300
301 /* Vtable map variable nodes stored in a hash table. */
302 static vtbl_map_table_type *vtbl_map_hash;
303
304 /* Vtable map variable nodes stored in a vector. */
305 vec<struct vtbl_map_node *> vtbl_map_nodes_vec;
306
307 /* Vector of mangled names for anonymous classes. */
308 extern GTY(()) vec<tree, va_gc> *vtbl_mangled_name_types;
309 extern GTY(()) vec<tree, va_gc> *vtbl_mangled_name_ids;
310 vec<tree, va_gc> *vtbl_mangled_name_types;
311 vec<tree, va_gc> *vtbl_mangled_name_ids;
312
313 /* Look up class_type (a type decl for record types) in the vtbl_mangled_names_*
314 vectors. This is a linear lookup. Return the associated mangled name for
315 the class type. This is for handling types from anonymous namespaces, whose
316 DECL_ASSEMBLER_NAME ends up being "<anon>", which is useless for our
317 purposes.
318
319 We use two vectors of trees to keep track of the mangled names: One is a
320 vector of class types and the other is a vector of the mangled names. The
321 assumption is that these two vectors are kept in perfect lock-step so that
322 vtbl_mangled_name_ids[i] is the mangled name for
323 vtbl_mangled_name_types[i]. */
324
325 static tree
vtbl_find_mangled_name(tree class_type)326 vtbl_find_mangled_name (tree class_type)
327 {
328 tree result = NULL_TREE;
329 unsigned i;
330
331 if (!vtbl_mangled_name_types or !vtbl_mangled_name_ids)
332 return result;
333
334 if (vtbl_mangled_name_types->length() != vtbl_mangled_name_ids->length())
335 return result;
336
337 for (i = 0; i < vtbl_mangled_name_types->length(); ++i)
338 if ((*vtbl_mangled_name_types)[i] == class_type)
339 {
340 result = (*vtbl_mangled_name_ids)[i];
341 break;
342 }
343
344 return result;
345 }
346
347 /* Store a class type decl and its mangled name, for an anonymous RECORD_TYPE,
348 in the vtbl_mangled_names vector. Make sure there is not already an
349 entry for the class type before adding it. */
350
351 void
vtbl_register_mangled_name(tree class_type,tree mangled_name)352 vtbl_register_mangled_name (tree class_type, tree mangled_name)
353 {
354 if (!vtbl_mangled_name_types)
355 vec_alloc (vtbl_mangled_name_types, 10);
356
357 if (!vtbl_mangled_name_ids)
358 vec_alloc (vtbl_mangled_name_ids, 10);
359
360 gcc_assert (vtbl_mangled_name_types->length() ==
361 vtbl_mangled_name_ids->length());
362
363
364 if (vtbl_find_mangled_name (class_type) == NULL_TREE)
365 {
366 vec_safe_push (vtbl_mangled_name_types, class_type);
367 vec_safe_push (vtbl_mangled_name_ids, mangled_name);
368 }
369 }
370
371 /* Return vtbl_map node for CLASS_NAME without creating a new one. */
372
373 struct vtbl_map_node *
vtbl_map_get_node(tree class_type)374 vtbl_map_get_node (tree class_type)
375 {
376 struct vtbl_map_node key;
377 struct vtbl_map_node **slot;
378
379 tree class_type_decl;
380 tree class_name;
381 unsigned int type_quals;
382
383 if (!vtbl_map_hash)
384 return NULL;
385
386 gcc_assert (TREE_CODE (class_type) == RECORD_TYPE);
387
388
389 /* Find the TYPE_DECL for the class. */
390 class_type_decl = TYPE_NAME (class_type);
391
392 /* Verify that there aren't any qualifiers on the type. */
393 type_quals = TYPE_QUALS (TREE_TYPE (class_type_decl));
394 gcc_assert (type_quals == TYPE_UNQUALIFIED);
395
396 /* Get the mangled name for the unqualified type. */
397 gcc_assert (HAS_DECL_ASSEMBLER_NAME_P (class_type_decl));
398 class_name = DECL_ASSEMBLER_NAME (class_type_decl);
399
400 if (strstr (IDENTIFIER_POINTER (class_name), "<anon>") != NULL)
401 class_name = vtbl_find_mangled_name (class_type_decl);
402
403 key.class_name = class_name;
404 slot = (struct vtbl_map_node **) vtbl_map_hash->find_slot (&key, NO_INSERT);
405 if (!slot)
406 return NULL;
407 return *slot;
408 }
409
410 /* Return vtbl_map node assigned to BASE_CLASS_TYPE. Create new one
411 when needed. */
412
413 struct vtbl_map_node *
find_or_create_vtbl_map_node(tree base_class_type)414 find_or_create_vtbl_map_node (tree base_class_type)
415 {
416 struct vtbl_map_node key;
417 struct vtbl_map_node *node;
418 struct vtbl_map_node **slot;
419 tree class_type_decl;
420 unsigned int type_quals;
421
422 if (!vtbl_map_hash)
423 vtbl_map_hash = new vtbl_map_table_type (10);
424
425 /* Find the TYPE_DECL for the class. */
426 class_type_decl = TYPE_NAME (base_class_type);
427
428 /* Verify that there aren't any type qualifiers on type. */
429 type_quals = TYPE_QUALS (TREE_TYPE (class_type_decl));
430 gcc_assert (type_quals == TYPE_UNQUALIFIED);
431
432 gcc_assert (HAS_DECL_ASSEMBLER_NAME_P (class_type_decl));
433 key.class_name = DECL_ASSEMBLER_NAME (class_type_decl);
434
435 if (strstr (IDENTIFIER_POINTER (key.class_name), "<anon>") != NULL)
436 key.class_name = vtbl_find_mangled_name (class_type_decl);
437
438 slot = (struct vtbl_map_node **) vtbl_map_hash->find_slot (&key, INSERT);
439
440 if (*slot)
441 return *slot;
442
443 node = XNEW (struct vtbl_map_node);
444 node->vtbl_map_decl = NULL_TREE;
445 node->class_name = key.class_name;
446 node->uid = num_vtable_map_nodes++;
447
448 node->class_info = XNEW (struct vtv_graph_node);
449 node->class_info->class_type = base_class_type;
450 node->class_info->class_uid = node->uid;
451 node->class_info->num_processed_children = 0;
452
453 (node->class_info->parents).create (4);
454 (node->class_info->children).create (4);
455
456 node->registered = new register_table_type (16);
457
458 node->is_used = false;
459
460 vtbl_map_nodes_vec.safe_push (node);
461 gcc_assert (vtbl_map_nodes_vec[node->uid] == node);
462
463 *slot = node;
464 return node;
465 }
466
467 /* End of hashtable functions for vtable_map variables hash table. */
468
469 /* Given a gimple STMT, this function checks to see if the statement
470 is an assignment, the rhs of which is getting the vtable pointer
471 value out of an object. (i.e. it's the value we need to verify
472 because its the vtable pointer that will be used for a virtual
473 call). */
474
475 static bool
is_vtable_assignment_stmt(gimple * stmt)476 is_vtable_assignment_stmt (gimple *stmt)
477 {
478
479 if (gimple_code (stmt) != GIMPLE_ASSIGN)
480 return false;
481 else
482 {
483 tree lhs = gimple_assign_lhs (stmt);
484 tree rhs = gimple_assign_rhs1 (stmt);
485
486 if (TREE_CODE (lhs) != SSA_NAME)
487 return false;
488
489 if (TREE_CODE (rhs) != COMPONENT_REF)
490 return false;
491
492 if (! (TREE_OPERAND (rhs, 1))
493 || (TREE_CODE (TREE_OPERAND (rhs, 1)) != FIELD_DECL))
494 return false;
495
496 if (! DECL_VIRTUAL_P (TREE_OPERAND (rhs, 1)))
497 return false;
498 }
499
500 return true;
501 }
502
503 /* This function attempts to recover the declared class of an object
504 that is used in making a virtual call. We try to get the type from
505 the type cast in the gimple assignment statement that extracts the
506 vtable pointer from the object (DEF_STMT). The gimple statement
507 usually looks something like this:
508
509 D.2201_4 = MEM[(struct Event *)this_1(D)]._vptr.Event */
510
511 static tree
extract_object_class_type(tree rhs)512 extract_object_class_type (tree rhs)
513 {
514 tree result = NULL_TREE;
515
516 /* Try to find and extract the type cast from that stmt. */
517 if (TREE_CODE (rhs) == COMPONENT_REF)
518 {
519 tree op0 = TREE_OPERAND (rhs, 0);
520 tree op1 = TREE_OPERAND (rhs, 1);
521
522 if (TREE_CODE (op1) == FIELD_DECL
523 && DECL_VIRTUAL_P (op1))
524 {
525 if (TREE_CODE (op0) == COMPONENT_REF
526 && TREE_CODE (TREE_OPERAND (op0, 0)) == MEM_REF
527 && TREE_CODE (TREE_TYPE (TREE_OPERAND (op0, 0)))== RECORD_TYPE)
528 result = TREE_TYPE (TREE_OPERAND (op0, 0));
529 else
530 result = TREE_TYPE (op0);
531 }
532 else if (TREE_CODE (op0) == COMPONENT_REF)
533 {
534 result = extract_object_class_type (op0);
535 if (result == NULL_TREE
536 && TREE_CODE (op1) == COMPONENT_REF)
537 result = extract_object_class_type (op1);
538 }
539 }
540
541 return result;
542 }
543
544 /* This function traces forward through the def-use chain of an SSA
545 variable to see if it ever gets used in a virtual function call. It
546 returns a boolean indicating whether or not it found a virtual call in
547 the use chain. */
548
549 static bool
var_is_used_for_virtual_call_p(tree lhs,int * mem_ref_depth,int * recursion_depth)550 var_is_used_for_virtual_call_p (tree lhs, int *mem_ref_depth,
551 int *recursion_depth)
552 {
553 imm_use_iterator imm_iter;
554 bool found_vcall = false;
555 use_operand_p use_p;
556
557 if (TREE_CODE (lhs) != SSA_NAME)
558 return false;
559
560 if (*mem_ref_depth > 2)
561 return false;
562
563 if (*recursion_depth > 25)
564 /* If we've recursed this far the chances are pretty good that
565 we're not going to find what we're looking for, and that we've
566 gone down a recursion black hole. Time to stop. */
567 return false;
568
569 *recursion_depth = *recursion_depth + 1;
570
571 /* Iterate through the immediate uses of the current variable. If
572 it's a virtual function call, we're done. Otherwise, if there's
573 an LHS for the use stmt, add the ssa var to the work list
574 (assuming it's not already in the list and is not a variable
575 we've already examined. */
576
577 FOR_EACH_IMM_USE_FAST (use_p, imm_iter, lhs)
578 {
579 gimple *stmt2 = USE_STMT (use_p);
580
581 if (is_gimple_call (stmt2))
582 {
583 tree fncall = gimple_call_fn (stmt2);
584 if (fncall && TREE_CODE (fncall) == OBJ_TYPE_REF)
585 found_vcall = true;
586 else
587 return false;
588 }
589 else if (gimple_code (stmt2) == GIMPLE_PHI)
590 {
591 found_vcall = var_is_used_for_virtual_call_p
592 (gimple_phi_result (stmt2),
593 mem_ref_depth,
594 recursion_depth);
595 }
596 else if (is_gimple_assign (stmt2))
597 {
598 tree rhs = gimple_assign_rhs1 (stmt2);
599 if (TREE_CODE (rhs) == ADDR_EXPR
600 || TREE_CODE (rhs) == MEM_REF)
601 *mem_ref_depth = *mem_ref_depth + 1;
602
603 if (TREE_CODE (rhs) == COMPONENT_REF)
604 {
605 while (TREE_CODE (TREE_OPERAND (rhs, 0)) == COMPONENT_REF)
606 rhs = TREE_OPERAND (rhs, 0);
607
608 if (TREE_CODE (TREE_OPERAND (rhs, 0)) == ADDR_EXPR
609 || TREE_CODE (TREE_OPERAND (rhs, 0)) == MEM_REF)
610 *mem_ref_depth = *mem_ref_depth + 1;
611 }
612
613 if (*mem_ref_depth < 3)
614 found_vcall = var_is_used_for_virtual_call_p
615 (gimple_assign_lhs (stmt2),
616 mem_ref_depth,
617 recursion_depth);
618 }
619
620 else
621 break;
622
623 if (found_vcall)
624 return true;
625 }
626
627 return false;
628 }
629
630 /* Search through all the statements in a basic block (BB), searching
631 for virtual method calls. For each virtual method dispatch, find
632 the vptr value used, and the statically declared type of the
633 object; retrieve the vtable map variable for the type of the
634 object; generate a call to __VLTVerifyVtablePointer; and insert the
635 generated call into the basic block, after the point where the vptr
636 value is gotten out of the object and before the virtual method
637 dispatch. Make the virtual method dispatch depend on the return
638 value from the verification call, so that subsequent optimizations
639 cannot reorder the two calls. */
640
641 static void
verify_bb_vtables(basic_block bb)642 verify_bb_vtables (basic_block bb)
643 {
644 gimple_seq stmts;
645 gimple *stmt = NULL;
646 gimple_stmt_iterator gsi_vtbl_assign;
647 gimple_stmt_iterator gsi_virtual_call;
648
649 stmts = bb_seq (bb);
650 gsi_virtual_call = gsi_start (stmts);
651 for (; !gsi_end_p (gsi_virtual_call); gsi_next (&gsi_virtual_call))
652 {
653 stmt = gsi_stmt (gsi_virtual_call);
654
655 /* Count virtual calls. */
656 if (is_gimple_call (stmt))
657 {
658 tree fncall = gimple_call_fn (stmt);
659 if (fncall && TREE_CODE (fncall) == OBJ_TYPE_REF)
660 total_num_virtual_calls++;
661 }
662
663 if (is_vtable_assignment_stmt (stmt))
664 {
665 tree lhs = gimple_assign_lhs (stmt);
666 tree vtbl_var_decl = NULL_TREE;
667 struct vtbl_map_node *vtable_map_node;
668 tree vtbl_decl = NULL_TREE;
669 gcall *call_stmt;
670 const char *vtable_name = "<unknown>";
671 tree tmp0;
672 bool found;
673 int mem_ref_depth = 0;
674 int recursion_depth = 0;
675
676 /* Make sure this vptr field access is for a virtual call. */
677 if (!var_is_used_for_virtual_call_p (lhs, &mem_ref_depth,
678 &recursion_depth))
679 continue;
680
681 /* Now we have found the virtual method dispatch and
682 the preceding access of the _vptr.* field... Next
683 we need to find the statically declared type of
684 the object, so we can find and use the right
685 vtable map variable in the verification call. */
686 tree class_type = extract_object_class_type
687 (gimple_assign_rhs1 (stmt));
688
689 gsi_vtbl_assign = gsi_for_stmt (stmt);
690
691 if (class_type
692 && (TREE_CODE (class_type) == RECORD_TYPE)
693 && TYPE_BINFO (class_type))
694 {
695 /* Get the vtable VAR_DECL for the type. */
696 vtbl_var_decl = BINFO_VTABLE (TYPE_BINFO (class_type));
697
698 if (TREE_CODE (vtbl_var_decl) == POINTER_PLUS_EXPR)
699 vtbl_var_decl = TREE_OPERAND (TREE_OPERAND (vtbl_var_decl, 0),
700 0);
701
702 gcc_assert (vtbl_var_decl);
703
704 vtbl_decl = vtbl_var_decl;
705 vtable_map_node = vtbl_map_get_node
706 (TYPE_MAIN_VARIANT (class_type));
707
708 gcc_assert (verify_vtbl_ptr_fndecl);
709
710 /* Given the vtable pointer for the base class of the
711 object, build the call to __VLTVerifyVtablePointer to
712 verify that the object's vtable pointer (contained in
713 lhs) is in the set of valid vtable pointers for the
714 base class. */
715
716 if (vtable_map_node && vtable_map_node->vtbl_map_decl)
717 {
718 vtable_map_node->is_used = true;
719 vtbl_var_decl = vtable_map_node->vtbl_map_decl;
720
721 if (VAR_P (vtbl_decl))
722 vtable_name = IDENTIFIER_POINTER (DECL_NAME (vtbl_decl));
723
724 /* Call different routines if we are interested in
725 trace information to debug problems. */
726 if (flag_vtv_debug)
727 {
728 int len1 = IDENTIFIER_LENGTH
729 (DECL_NAME (vtbl_var_decl));
730 int len2 = strlen (vtable_name);
731
732 call_stmt = gimple_build_call
733 (verify_vtbl_ptr_fndecl, 4,
734 build1 (ADDR_EXPR,
735 TYPE_POINTER_TO
736 (TREE_TYPE (vtbl_var_decl)),
737 vtbl_var_decl),
738 lhs,
739 build_string_literal
740 (len1 + 1,
741 IDENTIFIER_POINTER
742 (DECL_NAME
743 (vtbl_var_decl))),
744 build_string_literal (len2 + 1,
745 vtable_name));
746 }
747 else
748 call_stmt = gimple_build_call
749 (verify_vtbl_ptr_fndecl, 2,
750 build1 (ADDR_EXPR,
751 TYPE_POINTER_TO
752 (TREE_TYPE (vtbl_var_decl)),
753 vtbl_var_decl),
754 lhs);
755
756
757 /* Create a new SSA_NAME var to hold the call's
758 return value, and make the call_stmt use the
759 variable for that purpose. */
760 tmp0 = make_temp_ssa_name (TREE_TYPE (lhs), NULL, "VTV");
761 gimple_call_set_lhs (call_stmt, tmp0);
762 update_stmt (call_stmt);
763
764 /* Replace all uses of lhs with tmp0. */
765 found = false;
766 imm_use_iterator iterator;
767 gimple *use_stmt;
768 FOR_EACH_IMM_USE_STMT (use_stmt, iterator, lhs)
769 {
770 use_operand_p use_p;
771 if (use_stmt == call_stmt)
772 continue;
773 FOR_EACH_IMM_USE_ON_STMT (use_p, iterator)
774 SET_USE (use_p, tmp0);
775 update_stmt (use_stmt);
776 found = true;
777 }
778
779 gcc_assert (found);
780
781 /* Insert the new verification call just after the
782 statement that gets the vtable pointer out of the
783 object. */
784 gcc_assert (gsi_stmt (gsi_vtbl_assign) == stmt);
785 gsi_insert_after (&gsi_vtbl_assign, call_stmt,
786 GSI_NEW_STMT);
787
788 any_verification_calls_generated = true;
789 total_num_verified_vcalls++;
790 }
791 }
792 }
793 }
794 }
795
796 /* Definition of this optimization pass. */
797
798 namespace {
799
800 const pass_data pass_data_vtable_verify =
801 {
802 GIMPLE_PASS, /* type */
803 "vtable-verify", /* name */
804 OPTGROUP_NONE, /* optinfo_flags */
805 TV_VTABLE_VERIFICATION, /* tv_id */
806 ( PROP_cfg | PROP_ssa ), /* properties_required */
807 0, /* properties_provided */
808 0, /* properties_destroyed */
809 0, /* todo_flags_start */
810 TODO_update_ssa, /* todo_flags_finish */
811 };
812
813 class pass_vtable_verify : public gimple_opt_pass
814 {
815 public:
pass_vtable_verify(gcc::context * ctxt)816 pass_vtable_verify (gcc::context *ctxt)
817 : gimple_opt_pass (pass_data_vtable_verify, ctxt)
818 {}
819
820 /* opt_pass methods: */
gate(function *)821 virtual bool gate (function *) { return (flag_vtable_verify); }
822 virtual unsigned int execute (function *);
823
824 }; // class pass_vtable_verify
825
826 /* Loop through all the basic blocks in the current function, passing them to
827 verify_bb_vtables, which searches for virtual calls, and inserts
828 calls to __VLTVerifyVtablePointer. */
829
830 unsigned int
execute(function * fun)831 pass_vtable_verify::execute (function *fun)
832 {
833 unsigned int ret = 1;
834 basic_block bb;
835
836 FOR_ALL_BB_FN (bb, fun)
837 verify_bb_vtables (bb);
838
839 return ret;
840 }
841
842 } // anon namespace
843
844 gimple_opt_pass *
make_pass_vtable_verify(gcc::context * ctxt)845 make_pass_vtable_verify (gcc::context *ctxt)
846 {
847 return new pass_vtable_verify (ctxt);
848 }
849
850 #include "gt-vtable-verify.h"
851