1 /* UndefinedBehaviorSanitizer, undefined behavior detector. 2 Copyright (C) 2013-2018 Free Software Foundation, Inc. 3 Contributed by Marek Polacek <polacek@redhat.com> 4 5 This file is part of GCC. 6 7 GCC is free software; you can redistribute it and/or modify it under 8 the terms of the GNU General Public License as published by the Free 9 Software Foundation; either version 3, or (at your option) any later 10 version. 11 12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY 13 WARRANTY; without even the implied warranty of MERCHANTABILITY or 14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License 15 for more details. 16 17 You should have received a copy of the GNU General Public License 18 along with GCC; see the file COPYING3. If not see 19 <http://www.gnu.org/licenses/>. */ 20 21 #include "config.h" 22 #include "system.h" 23 #include "coretypes.h" 24 #include "backend.h" 25 #include "rtl.h" 26 #include "c-family/c-common.h" 27 #include "gimple.h" 28 #include "cfghooks.h" 29 #include "tree-pass.h" 30 #include "memmodel.h" 31 #include "tm_p.h" 32 #include "ssa.h" 33 #include "cgraph.h" 34 #include "tree-pretty-print.h" 35 #include "stor-layout.h" 36 #include "cfganal.h" 37 #include "gimple-iterator.h" 38 #include "output.h" 39 #include "cfgloop.h" 40 #include "ubsan.h" 41 #include "expr.h" 42 #include "stringpool.h" 43 #include "attribs.h" 44 #include "asan.h" 45 #include "gimplify-me.h" 46 #include "dfp.h" 47 #include "builtins.h" 48 #include "tree-object-size.h" 49 #include "tree-cfg.h" 50 #include "gimple-fold.h" 51 #include "varasm.h" 52 53 /* Map from a tree to a VAR_DECL tree. */ 54 55 struct GTY((for_user)) tree_type_map { 56 struct tree_map_base type; 57 tree decl; 58 }; 59 60 struct tree_type_map_cache_hasher : ggc_cache_ptr_hash<tree_type_map> 61 { 62 static inline hashval_t 63 hash (tree_type_map *t) 64 { 65 return TYPE_UID (t->type.from); 66 } 67 68 static inline bool 69 equal (tree_type_map *a, tree_type_map *b) 70 { 71 return a->type.from == b->type.from; 72 } 73 74 static int 75 keep_cache_entry (tree_type_map *&m) 76 { 77 return ggc_marked_p (m->type.from); 78 } 79 }; 80 81 static GTY ((cache)) 82 hash_table<tree_type_map_cache_hasher> *decl_tree_for_type; 83 84 /* Lookup a VAR_DECL for TYPE, and return it if we find one. */ 85 86 static tree 87 decl_for_type_lookup (tree type) 88 { 89 /* If the hash table is not initialized yet, create it now. */ 90 if (decl_tree_for_type == NULL) 91 { 92 decl_tree_for_type 93 = hash_table<tree_type_map_cache_hasher>::create_ggc (10); 94 /* That also means we don't have to bother with the lookup. */ 95 return NULL_TREE; 96 } 97 98 struct tree_type_map *h, in; 99 in.type.from = type; 100 101 h = decl_tree_for_type->find_with_hash (&in, TYPE_UID (type)); 102 return h ? h->decl : NULL_TREE; 103 } 104 105 /* Insert a mapping TYPE->DECL in the VAR_DECL for type hashtable. */ 106 107 static void 108 decl_for_type_insert (tree type, tree decl) 109 { 110 struct tree_type_map *h; 111 112 h = ggc_alloc<tree_type_map> (); 113 h->type.from = type; 114 h->decl = decl; 115 *decl_tree_for_type->find_slot_with_hash (h, TYPE_UID (type), INSERT) = h; 116 } 117 118 /* Helper routine, which encodes a value in the pointer_sized_int_node. 119 Arguments with precision <= POINTER_SIZE are passed directly, 120 the rest is passed by reference. T is a value we are to encode. 121 PHASE determines when this function is called. */ 122 123 tree 124 ubsan_encode_value (tree t, enum ubsan_encode_value_phase phase) 125 { 126 tree type = TREE_TYPE (t); 127 scalar_mode mode = SCALAR_TYPE_MODE (type); 128 const unsigned int bitsize = GET_MODE_BITSIZE (mode); 129 if (bitsize <= POINTER_SIZE) 130 switch (TREE_CODE (type)) 131 { 132 case BOOLEAN_TYPE: 133 case ENUMERAL_TYPE: 134 case INTEGER_TYPE: 135 return fold_build1 (NOP_EXPR, pointer_sized_int_node, t); 136 case REAL_TYPE: 137 { 138 tree itype = build_nonstandard_integer_type (bitsize, true); 139 t = fold_build1 (VIEW_CONVERT_EXPR, itype, t); 140 return fold_convert (pointer_sized_int_node, t); 141 } 142 default: 143 gcc_unreachable (); 144 } 145 else 146 { 147 if (!DECL_P (t) || !TREE_ADDRESSABLE (t)) 148 { 149 /* The reason for this is that we don't want to pessimize 150 code by making vars unnecessarily addressable. */ 151 tree var; 152 if (phase != UBSAN_ENCODE_VALUE_GENERIC) 153 { 154 var = create_tmp_var (type); 155 mark_addressable (var); 156 } 157 else 158 { 159 var = create_tmp_var_raw (type); 160 TREE_ADDRESSABLE (var) = 1; 161 DECL_CONTEXT (var) = current_function_decl; 162 } 163 if (phase == UBSAN_ENCODE_VALUE_RTL) 164 { 165 rtx mem = assign_stack_temp_for_type (mode, GET_MODE_SIZE (mode), 166 type); 167 SET_DECL_RTL (var, mem); 168 expand_assignment (var, t, false); 169 return build_fold_addr_expr (var); 170 } 171 if (phase != UBSAN_ENCODE_VALUE_GENERIC) 172 { 173 tree tem = build2 (MODIFY_EXPR, void_type_node, var, t); 174 t = build_fold_addr_expr (var); 175 return build2 (COMPOUND_EXPR, TREE_TYPE (t), tem, t); 176 } 177 else 178 { 179 var = build4 (TARGET_EXPR, type, var, t, NULL_TREE, NULL_TREE); 180 return build_fold_addr_expr (var); 181 } 182 } 183 else 184 return build_fold_addr_expr (t); 185 } 186 } 187 188 /* Cached ubsan_get_type_descriptor_type () return value. */ 189 static GTY(()) tree ubsan_type_descriptor_type; 190 191 /* Build 192 struct __ubsan_type_descriptor 193 { 194 unsigned short __typekind; 195 unsigned short __typeinfo; 196 char __typename[]; 197 } 198 type. */ 199 200 static tree 201 ubsan_get_type_descriptor_type (void) 202 { 203 static const char *field_names[3] 204 = { "__typekind", "__typeinfo", "__typename" }; 205 tree fields[3], ret; 206 207 if (ubsan_type_descriptor_type) 208 return ubsan_type_descriptor_type; 209 210 tree itype = build_range_type (sizetype, size_zero_node, NULL_TREE); 211 tree flex_arr_type = build_array_type (char_type_node, itype); 212 213 ret = make_node (RECORD_TYPE); 214 for (int i = 0; i < 3; i++) 215 { 216 fields[i] = build_decl (UNKNOWN_LOCATION, FIELD_DECL, 217 get_identifier (field_names[i]), 218 (i == 2) ? flex_arr_type 219 : short_unsigned_type_node); 220 DECL_CONTEXT (fields[i]) = ret; 221 if (i) 222 DECL_CHAIN (fields[i - 1]) = fields[i]; 223 } 224 tree type_decl = build_decl (input_location, TYPE_DECL, 225 get_identifier ("__ubsan_type_descriptor"), 226 ret); 227 DECL_IGNORED_P (type_decl) = 1; 228 DECL_ARTIFICIAL (type_decl) = 1; 229 TYPE_FIELDS (ret) = fields[0]; 230 TYPE_NAME (ret) = type_decl; 231 TYPE_STUB_DECL (ret) = type_decl; 232 layout_type (ret); 233 ubsan_type_descriptor_type = ret; 234 return ret; 235 } 236 237 /* Cached ubsan_get_source_location_type () return value. */ 238 static GTY(()) tree ubsan_source_location_type; 239 240 /* Build 241 struct __ubsan_source_location 242 { 243 const char *__filename; 244 unsigned int __line; 245 unsigned int __column; 246 } 247 type. */ 248 249 tree 250 ubsan_get_source_location_type (void) 251 { 252 static const char *field_names[3] 253 = { "__filename", "__line", "__column" }; 254 tree fields[3], ret; 255 if (ubsan_source_location_type) 256 return ubsan_source_location_type; 257 258 tree const_char_type = build_qualified_type (char_type_node, 259 TYPE_QUAL_CONST); 260 261 ret = make_node (RECORD_TYPE); 262 for (int i = 0; i < 3; i++) 263 { 264 fields[i] = build_decl (UNKNOWN_LOCATION, FIELD_DECL, 265 get_identifier (field_names[i]), 266 (i == 0) ? build_pointer_type (const_char_type) 267 : unsigned_type_node); 268 DECL_CONTEXT (fields[i]) = ret; 269 if (i) 270 DECL_CHAIN (fields[i - 1]) = fields[i]; 271 } 272 tree type_decl = build_decl (input_location, TYPE_DECL, 273 get_identifier ("__ubsan_source_location"), 274 ret); 275 DECL_IGNORED_P (type_decl) = 1; 276 DECL_ARTIFICIAL (type_decl) = 1; 277 TYPE_FIELDS (ret) = fields[0]; 278 TYPE_NAME (ret) = type_decl; 279 TYPE_STUB_DECL (ret) = type_decl; 280 layout_type (ret); 281 ubsan_source_location_type = ret; 282 return ret; 283 } 284 285 /* Helper routine that returns a CONSTRUCTOR of __ubsan_source_location 286 type with its fields filled from a location_t LOC. */ 287 288 static tree 289 ubsan_source_location (location_t loc) 290 { 291 expanded_location xloc; 292 tree type = ubsan_get_source_location_type (); 293 294 xloc = expand_location (loc); 295 tree str; 296 if (xloc.file == NULL) 297 { 298 str = build_int_cst (ptr_type_node, 0); 299 xloc.line = 0; 300 xloc.column = 0; 301 } 302 else 303 { 304 /* Fill in the values from LOC. */ 305 size_t len = strlen (xloc.file) + 1; 306 str = build_string (len, xloc.file); 307 TREE_TYPE (str) = build_array_type_nelts (char_type_node, len); 308 TREE_READONLY (str) = 1; 309 TREE_STATIC (str) = 1; 310 str = build_fold_addr_expr (str); 311 } 312 tree ctor = build_constructor_va (type, 3, NULL_TREE, str, NULL_TREE, 313 build_int_cst (unsigned_type_node, 314 xloc.line), NULL_TREE, 315 build_int_cst (unsigned_type_node, 316 xloc.column)); 317 TREE_CONSTANT (ctor) = 1; 318 TREE_STATIC (ctor) = 1; 319 320 return ctor; 321 } 322 323 /* This routine returns a magic number for TYPE. */ 324 325 static unsigned short 326 get_ubsan_type_info_for_type (tree type) 327 { 328 if (TREE_CODE (type) == REAL_TYPE) 329 return tree_to_uhwi (TYPE_SIZE (type)); 330 else if (INTEGRAL_TYPE_P (type)) 331 { 332 int prec = exact_log2 (tree_to_uhwi (TYPE_SIZE (type))); 333 gcc_assert (prec != -1); 334 return (prec << 1) | !TYPE_UNSIGNED (type); 335 } 336 else 337 return 0; 338 } 339 340 /* Counters for internal labels. ubsan_ids[0] for Lubsan_type, 341 ubsan_ids[1] for Lubsan_data labels. */ 342 static GTY(()) unsigned int ubsan_ids[2]; 343 344 /* Helper routine that returns ADDR_EXPR of a VAR_DECL of a type 345 descriptor. It first looks into the hash table; if not found, 346 create the VAR_DECL, put it into the hash table and return the 347 ADDR_EXPR of it. TYPE describes a particular type. PSTYLE is 348 an enum controlling how we want to print the type. */ 349 350 tree 351 ubsan_type_descriptor (tree type, enum ubsan_print_style pstyle) 352 { 353 /* See through any typedefs. */ 354 type = TYPE_MAIN_VARIANT (type); 355 356 tree decl = decl_for_type_lookup (type); 357 /* It is possible that some of the earlier created DECLs were found 358 unused, in that case they weren't emitted and varpool_node::get 359 returns NULL node on them. But now we really need them. Thus, 360 renew them here. */ 361 if (decl != NULL_TREE && varpool_node::get (decl)) 362 return build_fold_addr_expr (decl); 363 364 tree dtype = ubsan_get_type_descriptor_type (); 365 tree type2 = type; 366 const char *tname = NULL; 367 pretty_printer pretty_name; 368 unsigned char deref_depth = 0; 369 unsigned short tkind, tinfo; 370 371 /* Get the name of the type, or the name of the pointer type. */ 372 if (pstyle == UBSAN_PRINT_POINTER) 373 { 374 gcc_assert (POINTER_TYPE_P (type)); 375 type2 = TREE_TYPE (type); 376 377 /* Remove any '*' operators from TYPE. */ 378 while (POINTER_TYPE_P (type2)) 379 deref_depth++, type2 = TREE_TYPE (type2); 380 381 if (TREE_CODE (type2) == METHOD_TYPE) 382 type2 = TYPE_METHOD_BASETYPE (type2); 383 } 384 385 /* If an array, get its type. */ 386 type2 = strip_array_types (type2); 387 388 if (pstyle == UBSAN_PRINT_ARRAY) 389 { 390 while (POINTER_TYPE_P (type2)) 391 deref_depth++, type2 = TREE_TYPE (type2); 392 } 393 394 if (TYPE_NAME (type2) != NULL) 395 { 396 if (TREE_CODE (TYPE_NAME (type2)) == IDENTIFIER_NODE) 397 tname = IDENTIFIER_POINTER (TYPE_NAME (type2)); 398 else if (DECL_NAME (TYPE_NAME (type2)) != NULL) 399 tname = IDENTIFIER_POINTER (DECL_NAME (TYPE_NAME (type2))); 400 } 401 402 if (tname == NULL) 403 /* We weren't able to determine the type name. */ 404 tname = "<unknown>"; 405 406 tree eltype = type; 407 if (pstyle == UBSAN_PRINT_POINTER) 408 { 409 pp_printf (&pretty_name, "'%s%s%s%s%s%s%s", 410 TYPE_VOLATILE (type2) ? "volatile " : "", 411 TYPE_READONLY (type2) ? "const " : "", 412 TYPE_RESTRICT (type2) ? "restrict " : "", 413 TYPE_ATOMIC (type2) ? "_Atomic " : "", 414 TREE_CODE (type2) == RECORD_TYPE 415 ? "struct " 416 : TREE_CODE (type2) == UNION_TYPE 417 ? "union " : "", tname, 418 deref_depth == 0 ? "" : " "); 419 while (deref_depth-- > 0) 420 pp_star (&pretty_name); 421 pp_quote (&pretty_name); 422 } 423 else if (pstyle == UBSAN_PRINT_ARRAY) 424 { 425 /* Pretty print the array dimensions. */ 426 gcc_assert (TREE_CODE (type) == ARRAY_TYPE); 427 tree t = type; 428 pp_printf (&pretty_name, "'%s ", tname); 429 while (deref_depth-- > 0) 430 pp_star (&pretty_name); 431 while (TREE_CODE (t) == ARRAY_TYPE) 432 { 433 pp_left_bracket (&pretty_name); 434 tree dom = TYPE_DOMAIN (t); 435 if (dom != NULL_TREE 436 && TYPE_MAX_VALUE (dom) != NULL_TREE 437 && TREE_CODE (TYPE_MAX_VALUE (dom)) == INTEGER_CST) 438 { 439 unsigned HOST_WIDE_INT m; 440 if (tree_fits_uhwi_p (TYPE_MAX_VALUE (dom)) 441 && (m = tree_to_uhwi (TYPE_MAX_VALUE (dom))) + 1 != 0) 442 pp_unsigned_wide_integer (&pretty_name, m + 1); 443 else 444 pp_wide_int (&pretty_name, 445 wi::add (wi::to_widest (TYPE_MAX_VALUE (dom)), 1), 446 TYPE_SIGN (TREE_TYPE (dom))); 447 } 448 else 449 /* ??? We can't determine the variable name; print VLA unspec. */ 450 pp_star (&pretty_name); 451 pp_right_bracket (&pretty_name); 452 t = TREE_TYPE (t); 453 } 454 pp_quote (&pretty_name); 455 456 /* Save the tree with stripped types. */ 457 eltype = t; 458 } 459 else 460 pp_printf (&pretty_name, "'%s'", tname); 461 462 switch (TREE_CODE (eltype)) 463 { 464 case BOOLEAN_TYPE: 465 case ENUMERAL_TYPE: 466 case INTEGER_TYPE: 467 tkind = 0x0000; 468 break; 469 case REAL_TYPE: 470 /* FIXME: libubsan right now only supports float, double and 471 long double type formats. */ 472 if (TYPE_MODE (eltype) == TYPE_MODE (float_type_node) 473 || TYPE_MODE (eltype) == TYPE_MODE (double_type_node) 474 || TYPE_MODE (eltype) == TYPE_MODE (long_double_type_node)) 475 tkind = 0x0001; 476 else 477 tkind = 0xffff; 478 break; 479 default: 480 tkind = 0xffff; 481 break; 482 } 483 tinfo = get_ubsan_type_info_for_type (eltype); 484 485 /* Create a new VAR_DECL of type descriptor. */ 486 const char *tmp = pp_formatted_text (&pretty_name); 487 size_t len = strlen (tmp) + 1; 488 tree str = build_string (len, tmp); 489 TREE_TYPE (str) = build_array_type_nelts (char_type_node, len); 490 TREE_READONLY (str) = 1; 491 TREE_STATIC (str) = 1; 492 493 char tmp_name[32]; 494 ASM_GENERATE_INTERNAL_LABEL (tmp_name, "Lubsan_type", ubsan_ids[0]++); 495 decl = build_decl (UNKNOWN_LOCATION, VAR_DECL, get_identifier (tmp_name), 496 dtype); 497 TREE_STATIC (decl) = 1; 498 TREE_PUBLIC (decl) = 0; 499 DECL_ARTIFICIAL (decl) = 1; 500 DECL_IGNORED_P (decl) = 1; 501 DECL_EXTERNAL (decl) = 0; 502 DECL_SIZE (decl) 503 = size_binop (PLUS_EXPR, DECL_SIZE (decl), TYPE_SIZE (TREE_TYPE (str))); 504 DECL_SIZE_UNIT (decl) 505 = size_binop (PLUS_EXPR, DECL_SIZE_UNIT (decl), 506 TYPE_SIZE_UNIT (TREE_TYPE (str))); 507 508 tree ctor = build_constructor_va (dtype, 3, NULL_TREE, 509 build_int_cst (short_unsigned_type_node, 510 tkind), NULL_TREE, 511 build_int_cst (short_unsigned_type_node, 512 tinfo), NULL_TREE, str); 513 TREE_CONSTANT (ctor) = 1; 514 TREE_STATIC (ctor) = 1; 515 DECL_INITIAL (decl) = ctor; 516 varpool_node::finalize_decl (decl); 517 518 /* Save the VAR_DECL into the hash table. */ 519 decl_for_type_insert (type, decl); 520 521 return build_fold_addr_expr (decl); 522 } 523 524 /* Create a structure for the ubsan library. NAME is a name of the new 525 structure. LOCCNT is number of locations, PLOC points to array of 526 locations. The arguments in ... are of __ubsan_type_descriptor type 527 and there are at most two of them, followed by NULL_TREE, followed 528 by optional extra arguments and another NULL_TREE. */ 529 530 tree 531 ubsan_create_data (const char *name, int loccnt, const location_t *ploc, ...) 532 { 533 va_list args; 534 tree ret, t; 535 tree fields[6]; 536 vec<tree, va_gc> *saved_args = NULL; 537 size_t i = 0; 538 int j; 539 540 /* It is possible that PCH zapped table with definitions of sanitizer 541 builtins. Reinitialize them if needed. */ 542 initialize_sanitizer_builtins (); 543 544 /* Firstly, create a pointer to type descriptor type. */ 545 tree td_type = ubsan_get_type_descriptor_type (); 546 td_type = build_pointer_type (td_type); 547 548 /* Create the structure type. */ 549 ret = make_node (RECORD_TYPE); 550 for (j = 0; j < loccnt; j++) 551 { 552 gcc_checking_assert (i < 2); 553 fields[i] = build_decl (UNKNOWN_LOCATION, FIELD_DECL, NULL_TREE, 554 ubsan_get_source_location_type ()); 555 DECL_CONTEXT (fields[i]) = ret; 556 if (i) 557 DECL_CHAIN (fields[i - 1]) = fields[i]; 558 i++; 559 } 560 561 va_start (args, ploc); 562 for (t = va_arg (args, tree); t != NULL_TREE; 563 i++, t = va_arg (args, tree)) 564 { 565 gcc_checking_assert (i < 4); 566 /* Save the tree arguments for later use. */ 567 vec_safe_push (saved_args, t); 568 fields[i] = build_decl (UNKNOWN_LOCATION, FIELD_DECL, NULL_TREE, 569 td_type); 570 DECL_CONTEXT (fields[i]) = ret; 571 if (i) 572 DECL_CHAIN (fields[i - 1]) = fields[i]; 573 } 574 575 for (t = va_arg (args, tree); t != NULL_TREE; 576 i++, t = va_arg (args, tree)) 577 { 578 gcc_checking_assert (i < 6); 579 /* Save the tree arguments for later use. */ 580 vec_safe_push (saved_args, t); 581 fields[i] = build_decl (UNKNOWN_LOCATION, FIELD_DECL, NULL_TREE, 582 TREE_TYPE (t)); 583 DECL_CONTEXT (fields[i]) = ret; 584 if (i) 585 DECL_CHAIN (fields[i - 1]) = fields[i]; 586 } 587 va_end (args); 588 589 tree type_decl = build_decl (input_location, TYPE_DECL, 590 get_identifier (name), ret); 591 DECL_IGNORED_P (type_decl) = 1; 592 DECL_ARTIFICIAL (type_decl) = 1; 593 TYPE_FIELDS (ret) = fields[0]; 594 TYPE_NAME (ret) = type_decl; 595 TYPE_STUB_DECL (ret) = type_decl; 596 layout_type (ret); 597 598 /* Now, fill in the type. */ 599 char tmp_name[32]; 600 ASM_GENERATE_INTERNAL_LABEL (tmp_name, "Lubsan_data", ubsan_ids[1]++); 601 tree var = build_decl (UNKNOWN_LOCATION, VAR_DECL, get_identifier (tmp_name), 602 ret); 603 TREE_STATIC (var) = 1; 604 TREE_PUBLIC (var) = 0; 605 DECL_ARTIFICIAL (var) = 1; 606 DECL_IGNORED_P (var) = 1; 607 DECL_EXTERNAL (var) = 0; 608 609 vec<constructor_elt, va_gc> *v; 610 vec_alloc (v, i); 611 tree ctor = build_constructor (ret, v); 612 613 /* If desirable, set the __ubsan_source_location element. */ 614 for (j = 0; j < loccnt; j++) 615 { 616 location_t loc = LOCATION_LOCUS (ploc[j]); 617 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, ubsan_source_location (loc)); 618 } 619 620 size_t nelts = vec_safe_length (saved_args); 621 for (i = 0; i < nelts; i++) 622 { 623 t = (*saved_args)[i]; 624 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, t); 625 } 626 627 TREE_CONSTANT (ctor) = 1; 628 TREE_STATIC (ctor) = 1; 629 DECL_INITIAL (var) = ctor; 630 varpool_node::finalize_decl (var); 631 632 return var; 633 } 634 635 /* Instrument the __builtin_unreachable call. We just call the libubsan 636 routine instead. */ 637 638 bool 639 ubsan_instrument_unreachable (gimple_stmt_iterator *gsi) 640 { 641 gimple *g; 642 location_t loc = gimple_location (gsi_stmt (*gsi)); 643 644 if (flag_sanitize_undefined_trap_on_error) 645 g = gimple_build_call (builtin_decl_explicit (BUILT_IN_TRAP), 0); 646 else 647 { 648 tree data = ubsan_create_data ("__ubsan_unreachable_data", 1, &loc, 649 NULL_TREE, NULL_TREE); 650 data = build_fold_addr_expr_loc (loc, data); 651 tree fn 652 = builtin_decl_explicit (BUILT_IN_UBSAN_HANDLE_BUILTIN_UNREACHABLE); 653 g = gimple_build_call (fn, 1, data); 654 } 655 gimple_set_location (g, loc); 656 gsi_replace (gsi, g, false); 657 return false; 658 } 659 660 /* Return true if T is a call to a libubsan routine. */ 661 662 bool 663 is_ubsan_builtin_p (tree t) 664 { 665 return TREE_CODE (t) == FUNCTION_DECL 666 && DECL_BUILT_IN_CLASS (t) == BUILT_IN_NORMAL 667 && strncmp (IDENTIFIER_POINTER (DECL_NAME (t)), 668 "__builtin___ubsan_", 18) == 0; 669 } 670 671 /* Create a callgraph edge for statement STMT. */ 672 673 static void 674 ubsan_create_edge (gimple *stmt) 675 { 676 gcall *call_stmt = dyn_cast <gcall *> (stmt); 677 basic_block bb = gimple_bb (stmt); 678 cgraph_node *node = cgraph_node::get (current_function_decl); 679 tree decl = gimple_call_fndecl (call_stmt); 680 if (decl) 681 node->create_edge (cgraph_node::get_create (decl), call_stmt, bb->count); 682 } 683 684 /* Expand the UBSAN_BOUNDS special builtin function. */ 685 686 bool 687 ubsan_expand_bounds_ifn (gimple_stmt_iterator *gsi) 688 { 689 gimple *stmt = gsi_stmt (*gsi); 690 location_t loc = gimple_location (stmt); 691 gcc_assert (gimple_call_num_args (stmt) == 3); 692 693 /* Pick up the arguments of the UBSAN_BOUNDS call. */ 694 tree type = TREE_TYPE (TREE_TYPE (gimple_call_arg (stmt, 0))); 695 tree index = gimple_call_arg (stmt, 1); 696 tree orig_index = index; 697 tree bound = gimple_call_arg (stmt, 2); 698 699 gimple_stmt_iterator gsi_orig = *gsi; 700 701 /* Create condition "if (index > bound)". */ 702 basic_block then_bb, fallthru_bb; 703 gimple_stmt_iterator cond_insert_point 704 = create_cond_insert_point (gsi, false, false, true, 705 &then_bb, &fallthru_bb); 706 index = fold_convert (TREE_TYPE (bound), index); 707 index = force_gimple_operand_gsi (&cond_insert_point, index, 708 true, NULL_TREE, 709 false, GSI_NEW_STMT); 710 gimple *g = gimple_build_cond (GT_EXPR, index, bound, NULL_TREE, NULL_TREE); 711 gimple_set_location (g, loc); 712 gsi_insert_after (&cond_insert_point, g, GSI_NEW_STMT); 713 714 /* Generate __ubsan_handle_out_of_bounds call. */ 715 *gsi = gsi_after_labels (then_bb); 716 if (flag_sanitize_undefined_trap_on_error) 717 g = gimple_build_call (builtin_decl_explicit (BUILT_IN_TRAP), 0); 718 else 719 { 720 tree data 721 = ubsan_create_data ("__ubsan_out_of_bounds_data", 1, &loc, 722 ubsan_type_descriptor (type, UBSAN_PRINT_ARRAY), 723 ubsan_type_descriptor (TREE_TYPE (orig_index)), 724 NULL_TREE, NULL_TREE); 725 data = build_fold_addr_expr_loc (loc, data); 726 enum built_in_function bcode 727 = (flag_sanitize_recover & SANITIZE_BOUNDS) 728 ? BUILT_IN_UBSAN_HANDLE_OUT_OF_BOUNDS 729 : BUILT_IN_UBSAN_HANDLE_OUT_OF_BOUNDS_ABORT; 730 tree fn = builtin_decl_explicit (bcode); 731 tree val = ubsan_encode_value (orig_index, UBSAN_ENCODE_VALUE_GIMPLE); 732 val = force_gimple_operand_gsi (gsi, val, true, NULL_TREE, true, 733 GSI_SAME_STMT); 734 g = gimple_build_call (fn, 2, data, val); 735 } 736 gimple_set_location (g, loc); 737 gsi_insert_before (gsi, g, GSI_SAME_STMT); 738 739 /* Get rid of the UBSAN_BOUNDS call from the IR. */ 740 unlink_stmt_vdef (stmt); 741 gsi_remove (&gsi_orig, true); 742 743 /* Point GSI to next logical statement. */ 744 *gsi = gsi_start_bb (fallthru_bb); 745 return true; 746 } 747 748 /* Expand UBSAN_NULL internal call. The type is kept on the ckind 749 argument which is a constant, because the middle-end treats pointer 750 conversions as useless and therefore the type of the first argument 751 could be changed to any other pointer type. */ 752 753 bool 754 ubsan_expand_null_ifn (gimple_stmt_iterator *gsip) 755 { 756 gimple_stmt_iterator gsi = *gsip; 757 gimple *stmt = gsi_stmt (gsi); 758 location_t loc = gimple_location (stmt); 759 gcc_assert (gimple_call_num_args (stmt) == 3); 760 tree ptr = gimple_call_arg (stmt, 0); 761 tree ckind = gimple_call_arg (stmt, 1); 762 tree align = gimple_call_arg (stmt, 2); 763 tree check_align = NULL_TREE; 764 bool check_null; 765 766 basic_block cur_bb = gsi_bb (gsi); 767 768 gimple *g; 769 if (!integer_zerop (align)) 770 { 771 unsigned int ptralign = get_pointer_alignment (ptr) / BITS_PER_UNIT; 772 if (compare_tree_int (align, ptralign) == 1) 773 { 774 check_align = make_ssa_name (pointer_sized_int_node); 775 g = gimple_build_assign (check_align, NOP_EXPR, ptr); 776 gimple_set_location (g, loc); 777 gsi_insert_before (&gsi, g, GSI_SAME_STMT); 778 } 779 } 780 check_null = sanitize_flags_p (SANITIZE_NULL); 781 782 if (check_align == NULL_TREE && !check_null) 783 { 784 gsi_remove (gsip, true); 785 /* Unlink the UBSAN_NULLs vops before replacing it. */ 786 unlink_stmt_vdef (stmt); 787 return true; 788 } 789 790 /* Split the original block holding the pointer dereference. */ 791 edge e = split_block (cur_bb, stmt); 792 793 /* Get a hold on the 'condition block', the 'then block' and the 794 'else block'. */ 795 basic_block cond_bb = e->src; 796 basic_block fallthru_bb = e->dest; 797 basic_block then_bb = create_empty_bb (cond_bb); 798 add_bb_to_loop (then_bb, cond_bb->loop_father); 799 loops_state_set (LOOPS_NEED_FIXUP); 800 801 /* Make an edge coming from the 'cond block' into the 'then block'; 802 this edge is unlikely taken, so set up the probability accordingly. */ 803 e = make_edge (cond_bb, then_bb, EDGE_TRUE_VALUE); 804 e->probability = profile_probability::very_unlikely (); 805 then_bb->count = e->count (); 806 807 /* Connect 'then block' with the 'else block'. This is needed 808 as the ubsan routines we call in the 'then block' are not noreturn. 809 The 'then block' only has one outcoming edge. */ 810 make_single_succ_edge (then_bb, fallthru_bb, EDGE_FALLTHRU); 811 812 /* Set up the fallthrough basic block. */ 813 e = find_edge (cond_bb, fallthru_bb); 814 e->flags = EDGE_FALSE_VALUE; 815 e->probability = profile_probability::very_likely (); 816 817 /* Update dominance info for the newly created then_bb; note that 818 fallthru_bb's dominance info has already been updated by 819 split_block. */ 820 if (dom_info_available_p (CDI_DOMINATORS)) 821 set_immediate_dominator (CDI_DOMINATORS, then_bb, cond_bb); 822 823 /* Put the ubsan builtin call into the newly created BB. */ 824 if (flag_sanitize_undefined_trap_on_error) 825 g = gimple_build_call (builtin_decl_implicit (BUILT_IN_TRAP), 0); 826 else 827 { 828 enum built_in_function bcode 829 = (flag_sanitize_recover & ((check_align ? SANITIZE_ALIGNMENT : 0) 830 | (check_null ? SANITIZE_NULL : 0))) 831 ? BUILT_IN_UBSAN_HANDLE_TYPE_MISMATCH_V1 832 : BUILT_IN_UBSAN_HANDLE_TYPE_MISMATCH_V1_ABORT; 833 tree fn = builtin_decl_implicit (bcode); 834 int align_log = tree_log2 (align); 835 tree data 836 = ubsan_create_data ("__ubsan_null_data", 1, &loc, 837 ubsan_type_descriptor (TREE_TYPE (ckind), 838 UBSAN_PRINT_POINTER), 839 NULL_TREE, 840 build_int_cst (unsigned_char_type_node, 841 MAX (align_log, 0)), 842 fold_convert (unsigned_char_type_node, ckind), 843 NULL_TREE); 844 data = build_fold_addr_expr_loc (loc, data); 845 g = gimple_build_call (fn, 2, data, 846 check_align ? check_align 847 : build_zero_cst (pointer_sized_int_node)); 848 } 849 gimple_stmt_iterator gsi2 = gsi_start_bb (then_bb); 850 gimple_set_location (g, loc); 851 gsi_insert_after (&gsi2, g, GSI_NEW_STMT); 852 853 /* Unlink the UBSAN_NULLs vops before replacing it. */ 854 unlink_stmt_vdef (stmt); 855 856 if (check_null) 857 { 858 g = gimple_build_cond (EQ_EXPR, ptr, build_int_cst (TREE_TYPE (ptr), 0), 859 NULL_TREE, NULL_TREE); 860 gimple_set_location (g, loc); 861 862 /* Replace the UBSAN_NULL with a GIMPLE_COND stmt. */ 863 gsi_replace (&gsi, g, false); 864 stmt = g; 865 } 866 867 if (check_align) 868 { 869 if (check_null) 870 { 871 /* Split the block with the condition again. */ 872 e = split_block (cond_bb, stmt); 873 basic_block cond1_bb = e->src; 874 basic_block cond2_bb = e->dest; 875 876 /* Make an edge coming from the 'cond1 block' into the 'then block'; 877 this edge is unlikely taken, so set up the probability 878 accordingly. */ 879 e = make_edge (cond1_bb, then_bb, EDGE_TRUE_VALUE); 880 e->probability = profile_probability::very_unlikely (); 881 882 /* Set up the fallthrough basic block. */ 883 e = find_edge (cond1_bb, cond2_bb); 884 e->flags = EDGE_FALSE_VALUE; 885 e->probability = profile_probability::very_likely (); 886 887 /* Update dominance info. */ 888 if (dom_info_available_p (CDI_DOMINATORS)) 889 { 890 set_immediate_dominator (CDI_DOMINATORS, fallthru_bb, cond1_bb); 891 set_immediate_dominator (CDI_DOMINATORS, then_bb, cond1_bb); 892 } 893 894 gsi2 = gsi_start_bb (cond2_bb); 895 } 896 897 tree mask = build_int_cst (pointer_sized_int_node, 898 tree_to_uhwi (align) - 1); 899 g = gimple_build_assign (make_ssa_name (pointer_sized_int_node), 900 BIT_AND_EXPR, check_align, mask); 901 gimple_set_location (g, loc); 902 if (check_null) 903 gsi_insert_after (&gsi2, g, GSI_NEW_STMT); 904 else 905 gsi_insert_before (&gsi, g, GSI_SAME_STMT); 906 907 g = gimple_build_cond (NE_EXPR, gimple_assign_lhs (g), 908 build_int_cst (pointer_sized_int_node, 0), 909 NULL_TREE, NULL_TREE); 910 gimple_set_location (g, loc); 911 if (check_null) 912 gsi_insert_after (&gsi2, g, GSI_NEW_STMT); 913 else 914 /* Replace the UBSAN_NULL with a GIMPLE_COND stmt. */ 915 gsi_replace (&gsi, g, false); 916 } 917 return false; 918 } 919 920 #define OBJSZ_MAX_OFFSET (1024 * 16) 921 922 /* Expand UBSAN_OBJECT_SIZE internal call. */ 923 924 bool 925 ubsan_expand_objsize_ifn (gimple_stmt_iterator *gsi) 926 { 927 gimple *stmt = gsi_stmt (*gsi); 928 location_t loc = gimple_location (stmt); 929 gcc_assert (gimple_call_num_args (stmt) == 4); 930 931 tree ptr = gimple_call_arg (stmt, 0); 932 tree offset = gimple_call_arg (stmt, 1); 933 tree size = gimple_call_arg (stmt, 2); 934 tree ckind = gimple_call_arg (stmt, 3); 935 gimple_stmt_iterator gsi_orig = *gsi; 936 gimple *g; 937 938 /* See if we can discard the check. */ 939 if (TREE_CODE (size) != INTEGER_CST 940 || integer_all_onesp (size)) 941 /* Yes, __builtin_object_size couldn't determine the 942 object size. */; 943 else if (TREE_CODE (offset) == INTEGER_CST 944 && wi::to_widest (offset) >= -OBJSZ_MAX_OFFSET 945 && wi::to_widest (offset) <= -1) 946 /* The offset is in range [-16K, -1]. */; 947 else 948 { 949 /* if (offset > objsize) */ 950 basic_block then_bb, fallthru_bb; 951 gimple_stmt_iterator cond_insert_point 952 = create_cond_insert_point (gsi, false, false, true, 953 &then_bb, &fallthru_bb); 954 g = gimple_build_cond (GT_EXPR, offset, size, NULL_TREE, NULL_TREE); 955 gimple_set_location (g, loc); 956 gsi_insert_after (&cond_insert_point, g, GSI_NEW_STMT); 957 958 /* If the offset is small enough, we don't need the second 959 run-time check. */ 960 if (TREE_CODE (offset) == INTEGER_CST 961 && wi::to_widest (offset) >= 0 962 && wi::to_widest (offset) <= OBJSZ_MAX_OFFSET) 963 *gsi = gsi_after_labels (then_bb); 964 else 965 { 966 /* Don't issue run-time error if (ptr > ptr + offset). That 967 may happen when computing a POINTER_PLUS_EXPR. */ 968 basic_block then2_bb, fallthru2_bb; 969 970 gimple_stmt_iterator gsi2 = gsi_after_labels (then_bb); 971 cond_insert_point = create_cond_insert_point (&gsi2, false, false, 972 true, &then2_bb, 973 &fallthru2_bb); 974 /* Convert the pointer to an integer type. */ 975 tree p = make_ssa_name (pointer_sized_int_node); 976 g = gimple_build_assign (p, NOP_EXPR, ptr); 977 gimple_set_location (g, loc); 978 gsi_insert_before (&cond_insert_point, g, GSI_NEW_STMT); 979 p = gimple_assign_lhs (g); 980 /* Compute ptr + offset. */ 981 g = gimple_build_assign (make_ssa_name (pointer_sized_int_node), 982 PLUS_EXPR, p, offset); 983 gimple_set_location (g, loc); 984 gsi_insert_after (&cond_insert_point, g, GSI_NEW_STMT); 985 /* Now build the conditional and put it into the IR. */ 986 g = gimple_build_cond (LE_EXPR, p, gimple_assign_lhs (g), 987 NULL_TREE, NULL_TREE); 988 gimple_set_location (g, loc); 989 gsi_insert_after (&cond_insert_point, g, GSI_NEW_STMT); 990 *gsi = gsi_after_labels (then2_bb); 991 } 992 993 /* Generate __ubsan_handle_type_mismatch call. */ 994 if (flag_sanitize_undefined_trap_on_error) 995 g = gimple_build_call (builtin_decl_explicit (BUILT_IN_TRAP), 0); 996 else 997 { 998 tree data 999 = ubsan_create_data ("__ubsan_objsz_data", 1, &loc, 1000 ubsan_type_descriptor (TREE_TYPE (ptr), 1001 UBSAN_PRINT_POINTER), 1002 NULL_TREE, 1003 build_zero_cst (unsigned_char_type_node), 1004 ckind, 1005 NULL_TREE); 1006 data = build_fold_addr_expr_loc (loc, data); 1007 enum built_in_function bcode 1008 = (flag_sanitize_recover & SANITIZE_OBJECT_SIZE) 1009 ? BUILT_IN_UBSAN_HANDLE_TYPE_MISMATCH_V1 1010 : BUILT_IN_UBSAN_HANDLE_TYPE_MISMATCH_V1_ABORT; 1011 tree p = make_ssa_name (pointer_sized_int_node); 1012 g = gimple_build_assign (p, NOP_EXPR, ptr); 1013 gimple_set_location (g, loc); 1014 gsi_insert_before (gsi, g, GSI_SAME_STMT); 1015 g = gimple_build_call (builtin_decl_explicit (bcode), 2, data, p); 1016 } 1017 gimple_set_location (g, loc); 1018 gsi_insert_before (gsi, g, GSI_SAME_STMT); 1019 1020 /* Point GSI to next logical statement. */ 1021 *gsi = gsi_start_bb (fallthru_bb); 1022 1023 /* Get rid of the UBSAN_OBJECT_SIZE call from the IR. */ 1024 unlink_stmt_vdef (stmt); 1025 gsi_remove (&gsi_orig, true); 1026 return true; 1027 } 1028 1029 /* Get rid of the UBSAN_OBJECT_SIZE call from the IR. */ 1030 unlink_stmt_vdef (stmt); 1031 gsi_remove (gsi, true); 1032 return true; 1033 } 1034 1035 /* Expand UBSAN_PTR internal call. */ 1036 1037 bool 1038 ubsan_expand_ptr_ifn (gimple_stmt_iterator *gsip) 1039 { 1040 gimple_stmt_iterator gsi = *gsip; 1041 gimple *stmt = gsi_stmt (gsi); 1042 location_t loc = gimple_location (stmt); 1043 gcc_assert (gimple_call_num_args (stmt) == 2); 1044 tree ptr = gimple_call_arg (stmt, 0); 1045 tree off = gimple_call_arg (stmt, 1); 1046 1047 if (integer_zerop (off)) 1048 { 1049 gsi_remove (gsip, true); 1050 unlink_stmt_vdef (stmt); 1051 return true; 1052 } 1053 1054 basic_block cur_bb = gsi_bb (gsi); 1055 tree ptrplusoff = make_ssa_name (pointer_sized_int_node); 1056 tree ptri = make_ssa_name (pointer_sized_int_node); 1057 int pos_neg = get_range_pos_neg (off); 1058 1059 /* Split the original block holding the pointer dereference. */ 1060 edge e = split_block (cur_bb, stmt); 1061 1062 /* Get a hold on the 'condition block', the 'then block' and the 1063 'else block'. */ 1064 basic_block cond_bb = e->src; 1065 basic_block fallthru_bb = e->dest; 1066 basic_block then_bb = create_empty_bb (cond_bb); 1067 basic_block cond_pos_bb = NULL, cond_neg_bb = NULL; 1068 add_bb_to_loop (then_bb, cond_bb->loop_father); 1069 loops_state_set (LOOPS_NEED_FIXUP); 1070 1071 /* Set up the fallthrough basic block. */ 1072 e->flags = EDGE_FALSE_VALUE; 1073 if (pos_neg != 3) 1074 { 1075 e->probability = profile_probability::very_likely (); 1076 1077 /* Connect 'then block' with the 'else block'. This is needed 1078 as the ubsan routines we call in the 'then block' are not noreturn. 1079 The 'then block' only has one outcoming edge. */ 1080 make_single_succ_edge (then_bb, fallthru_bb, EDGE_FALLTHRU); 1081 1082 /* Make an edge coming from the 'cond block' into the 'then block'; 1083 this edge is unlikely taken, so set up the probability 1084 accordingly. */ 1085 e = make_edge (cond_bb, then_bb, EDGE_TRUE_VALUE); 1086 e->probability = profile_probability::very_unlikely (); 1087 then_bb->count = e->count (); 1088 } 1089 else 1090 { 1091 e->probability = profile_probability::even (); 1092 1093 e = split_block (fallthru_bb, (gimple *) NULL); 1094 cond_neg_bb = e->src; 1095 fallthru_bb = e->dest; 1096 e->probability = profile_probability::very_likely (); 1097 e->flags = EDGE_FALSE_VALUE; 1098 1099 e = make_edge (cond_neg_bb, then_bb, EDGE_TRUE_VALUE); 1100 e->probability = profile_probability::very_unlikely (); 1101 then_bb->count = e->count (); 1102 1103 cond_pos_bb = create_empty_bb (cond_bb); 1104 add_bb_to_loop (cond_pos_bb, cond_bb->loop_father); 1105 1106 e = make_edge (cond_bb, cond_pos_bb, EDGE_TRUE_VALUE); 1107 e->probability = profile_probability::even (); 1108 cond_pos_bb->count = e->count (); 1109 1110 e = make_edge (cond_pos_bb, then_bb, EDGE_TRUE_VALUE); 1111 e->probability = profile_probability::very_unlikely (); 1112 1113 e = make_edge (cond_pos_bb, fallthru_bb, EDGE_FALSE_VALUE); 1114 e->probability = profile_probability::very_likely (); 1115 1116 make_single_succ_edge (then_bb, fallthru_bb, EDGE_FALLTHRU); 1117 } 1118 1119 gimple *g = gimple_build_assign (ptri, NOP_EXPR, ptr); 1120 gimple_set_location (g, loc); 1121 gsi_insert_before (&gsi, g, GSI_SAME_STMT); 1122 g = gimple_build_assign (ptrplusoff, PLUS_EXPR, ptri, off); 1123 gimple_set_location (g, loc); 1124 gsi_insert_before (&gsi, g, GSI_SAME_STMT); 1125 1126 /* Update dominance info for the newly created then_bb; note that 1127 fallthru_bb's dominance info has already been updated by 1128 split_block. */ 1129 if (dom_info_available_p (CDI_DOMINATORS)) 1130 { 1131 set_immediate_dominator (CDI_DOMINATORS, then_bb, cond_bb); 1132 if (pos_neg == 3) 1133 { 1134 set_immediate_dominator (CDI_DOMINATORS, cond_pos_bb, cond_bb); 1135 set_immediate_dominator (CDI_DOMINATORS, fallthru_bb, cond_bb); 1136 } 1137 } 1138 1139 /* Put the ubsan builtin call into the newly created BB. */ 1140 if (flag_sanitize_undefined_trap_on_error) 1141 g = gimple_build_call (builtin_decl_implicit (BUILT_IN_TRAP), 0); 1142 else 1143 { 1144 enum built_in_function bcode 1145 = (flag_sanitize_recover & SANITIZE_POINTER_OVERFLOW) 1146 ? BUILT_IN_UBSAN_HANDLE_POINTER_OVERFLOW 1147 : BUILT_IN_UBSAN_HANDLE_POINTER_OVERFLOW_ABORT; 1148 tree fn = builtin_decl_implicit (bcode); 1149 tree data 1150 = ubsan_create_data ("__ubsan_ptrovf_data", 1, &loc, 1151 NULL_TREE, NULL_TREE); 1152 data = build_fold_addr_expr_loc (loc, data); 1153 g = gimple_build_call (fn, 3, data, ptr, ptrplusoff); 1154 } 1155 gimple_stmt_iterator gsi2 = gsi_start_bb (then_bb); 1156 gimple_set_location (g, loc); 1157 gsi_insert_after (&gsi2, g, GSI_NEW_STMT); 1158 1159 /* Unlink the UBSAN_PTRs vops before replacing it. */ 1160 unlink_stmt_vdef (stmt); 1161 1162 if (TREE_CODE (off) == INTEGER_CST) 1163 g = gimple_build_cond (wi::neg_p (wi::to_wide (off)) ? LT_EXPR : GE_EXPR, 1164 ptri, fold_build1 (NEGATE_EXPR, sizetype, off), 1165 NULL_TREE, NULL_TREE); 1166 else if (pos_neg != 3) 1167 g = gimple_build_cond (pos_neg == 1 ? LT_EXPR : GT_EXPR, 1168 ptrplusoff, ptri, NULL_TREE, NULL_TREE); 1169 else 1170 { 1171 gsi2 = gsi_start_bb (cond_pos_bb); 1172 g = gimple_build_cond (LT_EXPR, ptrplusoff, ptri, NULL_TREE, NULL_TREE); 1173 gimple_set_location (g, loc); 1174 gsi_insert_after (&gsi2, g, GSI_NEW_STMT); 1175 1176 gsi2 = gsi_start_bb (cond_neg_bb); 1177 g = gimple_build_cond (GT_EXPR, ptrplusoff, ptri, NULL_TREE, NULL_TREE); 1178 gimple_set_location (g, loc); 1179 gsi_insert_after (&gsi2, g, GSI_NEW_STMT); 1180 1181 gimple_seq seq = NULL; 1182 tree t = gimple_build (&seq, loc, NOP_EXPR, ssizetype, off); 1183 t = gimple_build (&seq, loc, GE_EXPR, boolean_type_node, 1184 t, ssize_int (0)); 1185 gsi_insert_seq_before (&gsi, seq, GSI_SAME_STMT); 1186 g = gimple_build_cond (NE_EXPR, t, boolean_false_node, 1187 NULL_TREE, NULL_TREE); 1188 } 1189 gimple_set_location (g, loc); 1190 /* Replace the UBSAN_PTR with a GIMPLE_COND stmt. */ 1191 gsi_replace (&gsi, g, false); 1192 return false; 1193 } 1194 1195 1196 /* Cached __ubsan_vptr_type_cache decl. */ 1197 static GTY(()) tree ubsan_vptr_type_cache_decl; 1198 1199 /* Expand UBSAN_VPTR internal call. The type is kept on the ckind 1200 argument which is a constant, because the middle-end treats pointer 1201 conversions as useless and therefore the type of the first argument 1202 could be changed to any other pointer type. */ 1203 1204 bool 1205 ubsan_expand_vptr_ifn (gimple_stmt_iterator *gsip) 1206 { 1207 gimple_stmt_iterator gsi = *gsip; 1208 gimple *stmt = gsi_stmt (gsi); 1209 location_t loc = gimple_location (stmt); 1210 gcc_assert (gimple_call_num_args (stmt) == 5); 1211 tree op = gimple_call_arg (stmt, 0); 1212 tree vptr = gimple_call_arg (stmt, 1); 1213 tree str_hash = gimple_call_arg (stmt, 2); 1214 tree ti_decl_addr = gimple_call_arg (stmt, 3); 1215 tree ckind_tree = gimple_call_arg (stmt, 4); 1216 ubsan_null_ckind ckind = (ubsan_null_ckind) tree_to_uhwi (ckind_tree); 1217 tree type = TREE_TYPE (TREE_TYPE (ckind_tree)); 1218 gimple *g; 1219 basic_block fallthru_bb = NULL; 1220 1221 if (ckind == UBSAN_DOWNCAST_POINTER) 1222 { 1223 /* Guard everything with if (op != NULL) { ... }. */ 1224 basic_block then_bb; 1225 gimple_stmt_iterator cond_insert_point 1226 = create_cond_insert_point (gsip, false, false, true, 1227 &then_bb, &fallthru_bb); 1228 g = gimple_build_cond (NE_EXPR, op, build_zero_cst (TREE_TYPE (op)), 1229 NULL_TREE, NULL_TREE); 1230 gimple_set_location (g, loc); 1231 gsi_insert_after (&cond_insert_point, g, GSI_NEW_STMT); 1232 *gsip = gsi_after_labels (then_bb); 1233 gsi_remove (&gsi, false); 1234 gsi_insert_before (gsip, stmt, GSI_NEW_STMT); 1235 gsi = *gsip; 1236 } 1237 1238 tree htype = TREE_TYPE (str_hash); 1239 tree cst = wide_int_to_tree (htype, 1240 wi::uhwi (((uint64_t) 0x9ddfea08 << 32) 1241 | 0xeb382d69, 64)); 1242 g = gimple_build_assign (make_ssa_name (htype), BIT_XOR_EXPR, 1243 vptr, str_hash); 1244 gimple_set_location (g, loc); 1245 gsi_insert_before (gsip, g, GSI_SAME_STMT); 1246 g = gimple_build_assign (make_ssa_name (htype), MULT_EXPR, 1247 gimple_assign_lhs (g), cst); 1248 gimple_set_location (g, loc); 1249 gsi_insert_before (gsip, g, GSI_SAME_STMT); 1250 tree t1 = gimple_assign_lhs (g); 1251 g = gimple_build_assign (make_ssa_name (htype), LSHIFT_EXPR, 1252 t1, build_int_cst (integer_type_node, 47)); 1253 gimple_set_location (g, loc); 1254 tree t2 = gimple_assign_lhs (g); 1255 gsi_insert_before (gsip, g, GSI_SAME_STMT); 1256 g = gimple_build_assign (make_ssa_name (htype), BIT_XOR_EXPR, 1257 vptr, t1); 1258 gimple_set_location (g, loc); 1259 gsi_insert_before (gsip, g, GSI_SAME_STMT); 1260 g = gimple_build_assign (make_ssa_name (htype), BIT_XOR_EXPR, 1261 t2, gimple_assign_lhs (g)); 1262 gimple_set_location (g, loc); 1263 gsi_insert_before (gsip, g, GSI_SAME_STMT); 1264 g = gimple_build_assign (make_ssa_name (htype), MULT_EXPR, 1265 gimple_assign_lhs (g), cst); 1266 gimple_set_location (g, loc); 1267 gsi_insert_before (gsip, g, GSI_SAME_STMT); 1268 tree t3 = gimple_assign_lhs (g); 1269 g = gimple_build_assign (make_ssa_name (htype), LSHIFT_EXPR, 1270 t3, build_int_cst (integer_type_node, 47)); 1271 gimple_set_location (g, loc); 1272 gsi_insert_before (gsip, g, GSI_SAME_STMT); 1273 g = gimple_build_assign (make_ssa_name (htype), BIT_XOR_EXPR, 1274 t3, gimple_assign_lhs (g)); 1275 gimple_set_location (g, loc); 1276 gsi_insert_before (gsip, g, GSI_SAME_STMT); 1277 g = gimple_build_assign (make_ssa_name (htype), MULT_EXPR, 1278 gimple_assign_lhs (g), cst); 1279 gimple_set_location (g, loc); 1280 gsi_insert_before (gsip, g, GSI_SAME_STMT); 1281 if (!useless_type_conversion_p (pointer_sized_int_node, htype)) 1282 { 1283 g = gimple_build_assign (make_ssa_name (pointer_sized_int_node), 1284 NOP_EXPR, gimple_assign_lhs (g)); 1285 gimple_set_location (g, loc); 1286 gsi_insert_before (gsip, g, GSI_SAME_STMT); 1287 } 1288 tree hash = gimple_assign_lhs (g); 1289 1290 if (ubsan_vptr_type_cache_decl == NULL_TREE) 1291 { 1292 tree atype = build_array_type_nelts (pointer_sized_int_node, 128); 1293 tree array = build_decl (UNKNOWN_LOCATION, VAR_DECL, 1294 get_identifier ("__ubsan_vptr_type_cache"), 1295 atype); 1296 DECL_ARTIFICIAL (array) = 1; 1297 DECL_IGNORED_P (array) = 1; 1298 TREE_PUBLIC (array) = 1; 1299 TREE_STATIC (array) = 1; 1300 DECL_EXTERNAL (array) = 1; 1301 DECL_VISIBILITY (array) = VISIBILITY_DEFAULT; 1302 DECL_VISIBILITY_SPECIFIED (array) = 1; 1303 varpool_node::finalize_decl (array); 1304 ubsan_vptr_type_cache_decl = array; 1305 } 1306 1307 g = gimple_build_assign (make_ssa_name (pointer_sized_int_node), 1308 BIT_AND_EXPR, hash, 1309 build_int_cst (pointer_sized_int_node, 127)); 1310 gimple_set_location (g, loc); 1311 gsi_insert_before (gsip, g, GSI_SAME_STMT); 1312 1313 tree c = build4_loc (loc, ARRAY_REF, pointer_sized_int_node, 1314 ubsan_vptr_type_cache_decl, gimple_assign_lhs (g), 1315 NULL_TREE, NULL_TREE); 1316 g = gimple_build_assign (make_ssa_name (pointer_sized_int_node), 1317 ARRAY_REF, c); 1318 gimple_set_location (g, loc); 1319 gsi_insert_before (gsip, g, GSI_SAME_STMT); 1320 1321 basic_block then_bb, fallthru2_bb; 1322 gimple_stmt_iterator cond_insert_point 1323 = create_cond_insert_point (gsip, false, false, true, 1324 &then_bb, &fallthru2_bb); 1325 g = gimple_build_cond (NE_EXPR, gimple_assign_lhs (g), hash, 1326 NULL_TREE, NULL_TREE); 1327 gimple_set_location (g, loc); 1328 gsi_insert_after (&cond_insert_point, g, GSI_NEW_STMT); 1329 *gsip = gsi_after_labels (then_bb); 1330 if (fallthru_bb == NULL) 1331 fallthru_bb = fallthru2_bb; 1332 1333 tree data 1334 = ubsan_create_data ("__ubsan_vptr_data", 1, &loc, 1335 ubsan_type_descriptor (type), NULL_TREE, ti_decl_addr, 1336 build_int_cst (unsigned_char_type_node, ckind), 1337 NULL_TREE); 1338 data = build_fold_addr_expr_loc (loc, data); 1339 enum built_in_function bcode 1340 = (flag_sanitize_recover & SANITIZE_VPTR) 1341 ? BUILT_IN_UBSAN_HANDLE_DYNAMIC_TYPE_CACHE_MISS 1342 : BUILT_IN_UBSAN_HANDLE_DYNAMIC_TYPE_CACHE_MISS_ABORT; 1343 1344 g = gimple_build_call (builtin_decl_explicit (bcode), 3, data, op, hash); 1345 gimple_set_location (g, loc); 1346 gsi_insert_before (gsip, g, GSI_SAME_STMT); 1347 1348 /* Point GSI to next logical statement. */ 1349 *gsip = gsi_start_bb (fallthru_bb); 1350 1351 /* Get rid of the UBSAN_VPTR call from the IR. */ 1352 unlink_stmt_vdef (stmt); 1353 gsi_remove (&gsi, true); 1354 return true; 1355 } 1356 1357 /* Instrument a memory reference. BASE is the base of MEM, IS_LHS says 1358 whether the pointer is on the left hand side of the assignment. */ 1359 1360 static void 1361 instrument_mem_ref (tree mem, tree base, gimple_stmt_iterator *iter, 1362 bool is_lhs) 1363 { 1364 enum ubsan_null_ckind ikind = is_lhs ? UBSAN_STORE_OF : UBSAN_LOAD_OF; 1365 unsigned int align = 0; 1366 if (sanitize_flags_p (SANITIZE_ALIGNMENT)) 1367 { 1368 align = min_align_of_type (TREE_TYPE (base)); 1369 if (align <= 1) 1370 align = 0; 1371 } 1372 if (align == 0 && !sanitize_flags_p (SANITIZE_NULL)) 1373 return; 1374 tree t = TREE_OPERAND (base, 0); 1375 if (!POINTER_TYPE_P (TREE_TYPE (t))) 1376 return; 1377 if (RECORD_OR_UNION_TYPE_P (TREE_TYPE (base)) && mem != base) 1378 ikind = UBSAN_MEMBER_ACCESS; 1379 tree kind = build_int_cst (build_pointer_type (TREE_TYPE (base)), ikind); 1380 tree alignt = build_int_cst (pointer_sized_int_node, align); 1381 gcall *g = gimple_build_call_internal (IFN_UBSAN_NULL, 3, t, kind, alignt); 1382 gimple_set_location (g, gimple_location (gsi_stmt (*iter))); 1383 gsi_insert_before (iter, g, GSI_SAME_STMT); 1384 } 1385 1386 /* Perform the pointer instrumentation. */ 1387 1388 static void 1389 instrument_null (gimple_stmt_iterator gsi, tree t, bool is_lhs) 1390 { 1391 /* Handle also e.g. &s->i. */ 1392 if (TREE_CODE (t) == ADDR_EXPR) 1393 t = TREE_OPERAND (t, 0); 1394 tree base = get_base_address (t); 1395 if (base != NULL_TREE 1396 && TREE_CODE (base) == MEM_REF 1397 && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME) 1398 instrument_mem_ref (t, base, &gsi, is_lhs); 1399 } 1400 1401 /* Instrument pointer arithmetics PTR p+ OFF. */ 1402 1403 static void 1404 instrument_pointer_overflow (gimple_stmt_iterator *gsi, tree ptr, tree off) 1405 { 1406 if (TYPE_PRECISION (sizetype) != POINTER_SIZE) 1407 return; 1408 gcall *g = gimple_build_call_internal (IFN_UBSAN_PTR, 2, ptr, off); 1409 gimple_set_location (g, gimple_location (gsi_stmt (*gsi))); 1410 gsi_insert_before (gsi, g, GSI_SAME_STMT); 1411 } 1412 1413 /* Instrument pointer arithmetics if any. */ 1414 1415 static void 1416 maybe_instrument_pointer_overflow (gimple_stmt_iterator *gsi, tree t) 1417 { 1418 if (TYPE_PRECISION (sizetype) != POINTER_SIZE) 1419 return; 1420 1421 /* Handle also e.g. &s->i. */ 1422 if (TREE_CODE (t) == ADDR_EXPR) 1423 t = TREE_OPERAND (t, 0); 1424 1425 if (!handled_component_p (t) && TREE_CODE (t) != MEM_REF) 1426 return; 1427 1428 poly_int64 bitsize, bitpos, bytepos; 1429 tree offset; 1430 machine_mode mode; 1431 int volatilep = 0, reversep, unsignedp = 0; 1432 tree inner = get_inner_reference (t, &bitsize, &bitpos, &offset, &mode, 1433 &unsignedp, &reversep, &volatilep); 1434 tree moff = NULL_TREE; 1435 1436 bool decl_p = DECL_P (inner); 1437 tree base; 1438 if (decl_p) 1439 { 1440 if (DECL_REGISTER (inner)) 1441 return; 1442 base = inner; 1443 /* If BASE is a fixed size automatic variable or 1444 global variable defined in the current TU and bitpos 1445 fits, don't instrument anything. */ 1446 poly_int64 base_size; 1447 if (offset == NULL_TREE 1448 && maybe_ne (bitpos, 0) 1449 && (VAR_P (base) 1450 || TREE_CODE (base) == PARM_DECL 1451 || TREE_CODE (base) == RESULT_DECL) 1452 && poly_int_tree_p (DECL_SIZE (base), &base_size) 1453 && known_ge (base_size, bitpos) 1454 && (!is_global_var (base) || decl_binds_to_current_def_p (base))) 1455 return; 1456 } 1457 else if (TREE_CODE (inner) == MEM_REF) 1458 { 1459 base = TREE_OPERAND (inner, 0); 1460 if (TREE_CODE (base) == ADDR_EXPR 1461 && DECL_P (TREE_OPERAND (base, 0)) 1462 && !TREE_ADDRESSABLE (TREE_OPERAND (base, 0)) 1463 && !is_global_var (TREE_OPERAND (base, 0))) 1464 return; 1465 moff = TREE_OPERAND (inner, 1); 1466 if (integer_zerop (moff)) 1467 moff = NULL_TREE; 1468 } 1469 else 1470 return; 1471 1472 if (!POINTER_TYPE_P (TREE_TYPE (base)) && !DECL_P (base)) 1473 return; 1474 bytepos = bits_to_bytes_round_down (bitpos); 1475 if (offset == NULL_TREE && known_eq (bytepos, 0) && moff == NULL_TREE) 1476 return; 1477 1478 tree base_addr = base; 1479 if (decl_p) 1480 base_addr = build1 (ADDR_EXPR, 1481 build_pointer_type (TREE_TYPE (base)), base); 1482 t = offset; 1483 if (maybe_ne (bytepos, 0)) 1484 { 1485 if (t) 1486 t = fold_build2 (PLUS_EXPR, TREE_TYPE (t), t, 1487 build_int_cst (TREE_TYPE (t), bytepos)); 1488 else 1489 t = size_int (bytepos); 1490 } 1491 if (moff) 1492 { 1493 if (t) 1494 t = fold_build2 (PLUS_EXPR, TREE_TYPE (t), t, 1495 fold_convert (TREE_TYPE (t), moff)); 1496 else 1497 t = fold_convert (sizetype, moff); 1498 } 1499 t = force_gimple_operand_gsi (gsi, t, true, NULL_TREE, true, 1500 GSI_SAME_STMT); 1501 base_addr = force_gimple_operand_gsi (gsi, base_addr, true, NULL_TREE, true, 1502 GSI_SAME_STMT); 1503 instrument_pointer_overflow (gsi, base_addr, t); 1504 } 1505 1506 /* Build an ubsan builtin call for the signed-integer-overflow 1507 sanitization. CODE says what kind of builtin are we building, 1508 LOC is a location, LHSTYPE is the type of LHS, OP0 and OP1 1509 are operands of the binary operation. */ 1510 1511 tree 1512 ubsan_build_overflow_builtin (tree_code code, location_t loc, tree lhstype, 1513 tree op0, tree op1, tree *datap) 1514 { 1515 if (flag_sanitize_undefined_trap_on_error) 1516 return build_call_expr_loc (loc, builtin_decl_explicit (BUILT_IN_TRAP), 0); 1517 1518 tree data; 1519 if (datap && *datap) 1520 data = *datap; 1521 else 1522 data = ubsan_create_data ("__ubsan_overflow_data", 1, &loc, 1523 ubsan_type_descriptor (lhstype), NULL_TREE, 1524 NULL_TREE); 1525 if (datap) 1526 *datap = data; 1527 enum built_in_function fn_code; 1528 1529 switch (code) 1530 { 1531 case PLUS_EXPR: 1532 fn_code = (flag_sanitize_recover & SANITIZE_SI_OVERFLOW) 1533 ? BUILT_IN_UBSAN_HANDLE_ADD_OVERFLOW 1534 : BUILT_IN_UBSAN_HANDLE_ADD_OVERFLOW_ABORT; 1535 break; 1536 case MINUS_EXPR: 1537 fn_code = (flag_sanitize_recover & SANITIZE_SI_OVERFLOW) 1538 ? BUILT_IN_UBSAN_HANDLE_SUB_OVERFLOW 1539 : BUILT_IN_UBSAN_HANDLE_SUB_OVERFLOW_ABORT; 1540 break; 1541 case MULT_EXPR: 1542 fn_code = (flag_sanitize_recover & SANITIZE_SI_OVERFLOW) 1543 ? BUILT_IN_UBSAN_HANDLE_MUL_OVERFLOW 1544 : BUILT_IN_UBSAN_HANDLE_MUL_OVERFLOW_ABORT; 1545 break; 1546 case NEGATE_EXPR: 1547 fn_code = (flag_sanitize_recover & SANITIZE_SI_OVERFLOW) 1548 ? BUILT_IN_UBSAN_HANDLE_NEGATE_OVERFLOW 1549 : BUILT_IN_UBSAN_HANDLE_NEGATE_OVERFLOW_ABORT; 1550 break; 1551 default: 1552 gcc_unreachable (); 1553 } 1554 tree fn = builtin_decl_explicit (fn_code); 1555 return build_call_expr_loc (loc, fn, 2 + (code != NEGATE_EXPR), 1556 build_fold_addr_expr_loc (loc, data), 1557 ubsan_encode_value (op0, UBSAN_ENCODE_VALUE_RTL), 1558 op1 1559 ? ubsan_encode_value (op1, 1560 UBSAN_ENCODE_VALUE_RTL) 1561 : NULL_TREE); 1562 } 1563 1564 /* Perform the signed integer instrumentation. GSI is the iterator 1565 pointing at statement we are trying to instrument. */ 1566 1567 static void 1568 instrument_si_overflow (gimple_stmt_iterator gsi) 1569 { 1570 gimple *stmt = gsi_stmt (gsi); 1571 tree_code code = gimple_assign_rhs_code (stmt); 1572 tree lhs = gimple_assign_lhs (stmt); 1573 tree lhstype = TREE_TYPE (lhs); 1574 tree lhsinner = VECTOR_TYPE_P (lhstype) ? TREE_TYPE (lhstype) : lhstype; 1575 tree a, b; 1576 gimple *g; 1577 1578 /* If this is not a signed operation, don't instrument anything here. 1579 Also punt on bit-fields. */ 1580 if (!INTEGRAL_TYPE_P (lhsinner) 1581 || TYPE_OVERFLOW_WRAPS (lhsinner) 1582 || maybe_ne (GET_MODE_BITSIZE (TYPE_MODE (lhsinner)), 1583 TYPE_PRECISION (lhsinner))) 1584 return; 1585 1586 switch (code) 1587 { 1588 case MINUS_EXPR: 1589 case PLUS_EXPR: 1590 case MULT_EXPR: 1591 /* Transform 1592 i = u {+,-,*} 5; 1593 into 1594 i = UBSAN_CHECK_{ADD,SUB,MUL} (u, 5); */ 1595 a = gimple_assign_rhs1 (stmt); 1596 b = gimple_assign_rhs2 (stmt); 1597 g = gimple_build_call_internal (code == PLUS_EXPR 1598 ? IFN_UBSAN_CHECK_ADD 1599 : code == MINUS_EXPR 1600 ? IFN_UBSAN_CHECK_SUB 1601 : IFN_UBSAN_CHECK_MUL, 2, a, b); 1602 gimple_call_set_lhs (g, lhs); 1603 gsi_replace (&gsi, g, true); 1604 break; 1605 case NEGATE_EXPR: 1606 /* Represent i = -u; 1607 as 1608 i = UBSAN_CHECK_SUB (0, u); */ 1609 a = build_zero_cst (lhstype); 1610 b = gimple_assign_rhs1 (stmt); 1611 g = gimple_build_call_internal (IFN_UBSAN_CHECK_SUB, 2, a, b); 1612 gimple_call_set_lhs (g, lhs); 1613 gsi_replace (&gsi, g, true); 1614 break; 1615 case ABS_EXPR: 1616 /* Transform i = ABS_EXPR<u>; 1617 into 1618 _N = UBSAN_CHECK_SUB (0, u); 1619 i = ABS_EXPR<_N>; */ 1620 a = build_zero_cst (lhstype); 1621 b = gimple_assign_rhs1 (stmt); 1622 g = gimple_build_call_internal (IFN_UBSAN_CHECK_SUB, 2, a, b); 1623 a = make_ssa_name (lhstype); 1624 gimple_call_set_lhs (g, a); 1625 gimple_set_location (g, gimple_location (stmt)); 1626 gsi_insert_before (&gsi, g, GSI_SAME_STMT); 1627 gimple_assign_set_rhs1 (stmt, a); 1628 update_stmt (stmt); 1629 break; 1630 default: 1631 break; 1632 } 1633 } 1634 1635 /* Instrument loads from (non-bitfield) bool and C++ enum values 1636 to check if the memory value is outside of the range of the valid 1637 type values. */ 1638 1639 static void 1640 instrument_bool_enum_load (gimple_stmt_iterator *gsi) 1641 { 1642 gimple *stmt = gsi_stmt (*gsi); 1643 tree rhs = gimple_assign_rhs1 (stmt); 1644 tree type = TREE_TYPE (rhs); 1645 tree minv = NULL_TREE, maxv = NULL_TREE; 1646 1647 if (TREE_CODE (type) == BOOLEAN_TYPE 1648 && sanitize_flags_p (SANITIZE_BOOL)) 1649 { 1650 minv = boolean_false_node; 1651 maxv = boolean_true_node; 1652 } 1653 else if (TREE_CODE (type) == ENUMERAL_TYPE 1654 && sanitize_flags_p (SANITIZE_ENUM) 1655 && TREE_TYPE (type) != NULL_TREE 1656 && TREE_CODE (TREE_TYPE (type)) == INTEGER_TYPE 1657 && (TYPE_PRECISION (TREE_TYPE (type)) 1658 < GET_MODE_PRECISION (SCALAR_INT_TYPE_MODE (type)))) 1659 { 1660 minv = TYPE_MIN_VALUE (TREE_TYPE (type)); 1661 maxv = TYPE_MAX_VALUE (TREE_TYPE (type)); 1662 } 1663 else 1664 return; 1665 1666 int modebitsize = GET_MODE_BITSIZE (SCALAR_INT_TYPE_MODE (type)); 1667 poly_int64 bitsize, bitpos; 1668 tree offset; 1669 machine_mode mode; 1670 int volatilep = 0, reversep, unsignedp = 0; 1671 tree base = get_inner_reference (rhs, &bitsize, &bitpos, &offset, &mode, 1672 &unsignedp, &reversep, &volatilep); 1673 tree utype = build_nonstandard_integer_type (modebitsize, 1); 1674 1675 if ((VAR_P (base) && DECL_HARD_REGISTER (base)) 1676 || !multiple_p (bitpos, modebitsize) 1677 || maybe_ne (bitsize, modebitsize) 1678 || GET_MODE_BITSIZE (SCALAR_INT_TYPE_MODE (utype)) != modebitsize 1679 || TREE_CODE (gimple_assign_lhs (stmt)) != SSA_NAME) 1680 return; 1681 1682 bool ends_bb = stmt_ends_bb_p (stmt); 1683 location_t loc = gimple_location (stmt); 1684 tree lhs = gimple_assign_lhs (stmt); 1685 tree ptype = build_pointer_type (TREE_TYPE (rhs)); 1686 tree atype = reference_alias_ptr_type (rhs); 1687 gimple *g = gimple_build_assign (make_ssa_name (ptype), 1688 build_fold_addr_expr (rhs)); 1689 gimple_set_location (g, loc); 1690 gsi_insert_before (gsi, g, GSI_SAME_STMT); 1691 tree mem = build2 (MEM_REF, utype, gimple_assign_lhs (g), 1692 build_int_cst (atype, 0)); 1693 tree urhs = make_ssa_name (utype); 1694 if (ends_bb) 1695 { 1696 gimple_assign_set_lhs (stmt, urhs); 1697 g = gimple_build_assign (lhs, NOP_EXPR, urhs); 1698 gimple_set_location (g, loc); 1699 edge e = find_fallthru_edge (gimple_bb (stmt)->succs); 1700 gsi_insert_on_edge_immediate (e, g); 1701 gimple_assign_set_rhs_from_tree (gsi, mem); 1702 update_stmt (stmt); 1703 *gsi = gsi_for_stmt (g); 1704 g = stmt; 1705 } 1706 else 1707 { 1708 g = gimple_build_assign (urhs, mem); 1709 gimple_set_location (g, loc); 1710 gsi_insert_before (gsi, g, GSI_SAME_STMT); 1711 } 1712 minv = fold_convert (utype, minv); 1713 maxv = fold_convert (utype, maxv); 1714 if (!integer_zerop (minv)) 1715 { 1716 g = gimple_build_assign (make_ssa_name (utype), MINUS_EXPR, urhs, minv); 1717 gimple_set_location (g, loc); 1718 gsi_insert_before (gsi, g, GSI_SAME_STMT); 1719 } 1720 1721 gimple_stmt_iterator gsi2 = *gsi; 1722 basic_block then_bb, fallthru_bb; 1723 *gsi = create_cond_insert_point (gsi, true, false, true, 1724 &then_bb, &fallthru_bb); 1725 g = gimple_build_cond (GT_EXPR, gimple_assign_lhs (g), 1726 int_const_binop (MINUS_EXPR, maxv, minv), 1727 NULL_TREE, NULL_TREE); 1728 gimple_set_location (g, loc); 1729 gsi_insert_after (gsi, g, GSI_NEW_STMT); 1730 1731 if (!ends_bb) 1732 { 1733 gimple_assign_set_rhs_with_ops (&gsi2, NOP_EXPR, urhs); 1734 update_stmt (stmt); 1735 } 1736 1737 gsi2 = gsi_after_labels (then_bb); 1738 if (flag_sanitize_undefined_trap_on_error) 1739 g = gimple_build_call (builtin_decl_explicit (BUILT_IN_TRAP), 0); 1740 else 1741 { 1742 tree data = ubsan_create_data ("__ubsan_invalid_value_data", 1, &loc, 1743 ubsan_type_descriptor (type), NULL_TREE, 1744 NULL_TREE); 1745 data = build_fold_addr_expr_loc (loc, data); 1746 enum built_in_function bcode 1747 = (flag_sanitize_recover & (TREE_CODE (type) == BOOLEAN_TYPE 1748 ? SANITIZE_BOOL : SANITIZE_ENUM)) 1749 ? BUILT_IN_UBSAN_HANDLE_LOAD_INVALID_VALUE 1750 : BUILT_IN_UBSAN_HANDLE_LOAD_INVALID_VALUE_ABORT; 1751 tree fn = builtin_decl_explicit (bcode); 1752 1753 tree val = ubsan_encode_value (urhs, UBSAN_ENCODE_VALUE_GIMPLE); 1754 val = force_gimple_operand_gsi (&gsi2, val, true, NULL_TREE, true, 1755 GSI_SAME_STMT); 1756 g = gimple_build_call (fn, 2, data, val); 1757 } 1758 gimple_set_location (g, loc); 1759 gsi_insert_before (&gsi2, g, GSI_SAME_STMT); 1760 ubsan_create_edge (g); 1761 *gsi = gsi_for_stmt (stmt); 1762 } 1763 1764 /* Determine if we can propagate given LOCATION to ubsan_data descriptor to use 1765 new style handlers. Libubsan uses heuristics to destinguish between old and 1766 new styles and relies on these properties for filename: 1767 1768 a) Location's filename must not be NULL. 1769 b) Location's filename must not be equal to "". 1770 c) Location's filename must not be equal to "\1". 1771 d) First two bytes of filename must not contain '\xff' symbol. */ 1772 1773 static bool 1774 ubsan_use_new_style_p (location_t loc) 1775 { 1776 if (loc == UNKNOWN_LOCATION) 1777 return false; 1778 1779 expanded_location xloc = expand_location (loc); 1780 if (xloc.file == NULL || strncmp (xloc.file, "\1", 2) == 0 1781 || xloc.file[0] == '\0' || xloc.file[0] == '\xff' 1782 || xloc.file[1] == '\xff') 1783 return false; 1784 1785 return true; 1786 } 1787 1788 /* Instrument float point-to-integer conversion. TYPE is an integer type of 1789 destination, EXPR is floating-point expression. */ 1790 1791 tree 1792 ubsan_instrument_float_cast (location_t loc, tree type, tree expr) 1793 { 1794 tree expr_type = TREE_TYPE (expr); 1795 tree t, tt, fn, min, max; 1796 machine_mode mode = TYPE_MODE (expr_type); 1797 int prec = TYPE_PRECISION (type); 1798 bool uns_p = TYPE_UNSIGNED (type); 1799 if (loc == UNKNOWN_LOCATION) 1800 loc = input_location; 1801 1802 /* Float to integer conversion first truncates toward zero, so 1803 even signed char c = 127.875f; is not problematic. 1804 Therefore, we should complain only if EXPR is unordered or smaller 1805 or equal than TYPE_MIN_VALUE - 1.0 or greater or equal than 1806 TYPE_MAX_VALUE + 1.0. */ 1807 if (REAL_MODE_FORMAT (mode)->b == 2) 1808 { 1809 /* For maximum, TYPE_MAX_VALUE might not be representable 1810 in EXPR_TYPE, e.g. if TYPE is 64-bit long long and 1811 EXPR_TYPE is IEEE single float, but TYPE_MAX_VALUE + 1.0 is 1812 either representable or infinity. */ 1813 REAL_VALUE_TYPE maxval = dconst1; 1814 SET_REAL_EXP (&maxval, REAL_EXP (&maxval) + prec - !uns_p); 1815 real_convert (&maxval, mode, &maxval); 1816 max = build_real (expr_type, maxval); 1817 1818 /* For unsigned, assume -1.0 is always representable. */ 1819 if (uns_p) 1820 min = build_minus_one_cst (expr_type); 1821 else 1822 { 1823 /* TYPE_MIN_VALUE is generally representable (or -inf), 1824 but TYPE_MIN_VALUE - 1.0 might not be. */ 1825 REAL_VALUE_TYPE minval = dconstm1, minval2; 1826 SET_REAL_EXP (&minval, REAL_EXP (&minval) + prec - 1); 1827 real_convert (&minval, mode, &minval); 1828 real_arithmetic (&minval2, MINUS_EXPR, &minval, &dconst1); 1829 real_convert (&minval2, mode, &minval2); 1830 if (real_compare (EQ_EXPR, &minval, &minval2) 1831 && !real_isinf (&minval)) 1832 { 1833 /* If TYPE_MIN_VALUE - 1.0 is not representable and 1834 rounds to TYPE_MIN_VALUE, we need to subtract 1835 more. As REAL_MODE_FORMAT (mode)->p is the number 1836 of base digits, we want to subtract a number that 1837 will be 1 << (REAL_MODE_FORMAT (mode)->p - 1) 1838 times smaller than minval. */ 1839 minval2 = dconst1; 1840 gcc_assert (prec > REAL_MODE_FORMAT (mode)->p); 1841 SET_REAL_EXP (&minval2, 1842 REAL_EXP (&minval2) + prec - 1 1843 - REAL_MODE_FORMAT (mode)->p + 1); 1844 real_arithmetic (&minval2, MINUS_EXPR, &minval, &minval2); 1845 real_convert (&minval2, mode, &minval2); 1846 } 1847 min = build_real (expr_type, minval2); 1848 } 1849 } 1850 else if (REAL_MODE_FORMAT (mode)->b == 10) 1851 { 1852 /* For _Decimal128 up to 34 decimal digits, - sign, 1853 dot, e, exponent. */ 1854 char buf[64]; 1855 mpfr_t m; 1856 int p = REAL_MODE_FORMAT (mode)->p; 1857 REAL_VALUE_TYPE maxval, minval; 1858 1859 /* Use mpfr_snprintf rounding to compute the smallest 1860 representable decimal number greater or equal than 1861 1 << (prec - !uns_p). */ 1862 mpfr_init2 (m, prec + 2); 1863 mpfr_set_ui_2exp (m, 1, prec - !uns_p, GMP_RNDN); 1864 mpfr_snprintf (buf, sizeof buf, "%.*RUe", p - 1, m); 1865 decimal_real_from_string (&maxval, buf); 1866 max = build_real (expr_type, maxval); 1867 1868 /* For unsigned, assume -1.0 is always representable. */ 1869 if (uns_p) 1870 min = build_minus_one_cst (expr_type); 1871 else 1872 { 1873 /* Use mpfr_snprintf rounding to compute the largest 1874 representable decimal number less or equal than 1875 (-1 << (prec - 1)) - 1. */ 1876 mpfr_set_si_2exp (m, -1, prec - 1, GMP_RNDN); 1877 mpfr_sub_ui (m, m, 1, GMP_RNDN); 1878 mpfr_snprintf (buf, sizeof buf, "%.*RDe", p - 1, m); 1879 decimal_real_from_string (&minval, buf); 1880 min = build_real (expr_type, minval); 1881 } 1882 mpfr_clear (m); 1883 } 1884 else 1885 return NULL_TREE; 1886 1887 t = fold_build2 (UNLE_EXPR, boolean_type_node, expr, min); 1888 tt = fold_build2 (UNGE_EXPR, boolean_type_node, expr, max); 1889 t = fold_build2 (TRUTH_OR_EXPR, boolean_type_node, t, tt); 1890 if (integer_zerop (t)) 1891 return NULL_TREE; 1892 1893 if (flag_sanitize_undefined_trap_on_error) 1894 fn = build_call_expr_loc (loc, builtin_decl_explicit (BUILT_IN_TRAP), 0); 1895 else 1896 { 1897 location_t *loc_ptr = NULL; 1898 unsigned num_locations = 0; 1899 /* Figure out if we can propagate location to ubsan_data and use new 1900 style handlers in libubsan. */ 1901 if (ubsan_use_new_style_p (loc)) 1902 { 1903 loc_ptr = &loc; 1904 num_locations = 1; 1905 } 1906 /* Create the __ubsan_handle_float_cast_overflow fn call. */ 1907 tree data = ubsan_create_data ("__ubsan_float_cast_overflow_data", 1908 num_locations, loc_ptr, 1909 ubsan_type_descriptor (expr_type), 1910 ubsan_type_descriptor (type), NULL_TREE, 1911 NULL_TREE); 1912 enum built_in_function bcode 1913 = (flag_sanitize_recover & SANITIZE_FLOAT_CAST) 1914 ? BUILT_IN_UBSAN_HANDLE_FLOAT_CAST_OVERFLOW 1915 : BUILT_IN_UBSAN_HANDLE_FLOAT_CAST_OVERFLOW_ABORT; 1916 fn = builtin_decl_explicit (bcode); 1917 fn = build_call_expr_loc (loc, fn, 2, 1918 build_fold_addr_expr_loc (loc, data), 1919 ubsan_encode_value (expr)); 1920 } 1921 1922 return fold_build3 (COND_EXPR, void_type_node, t, fn, integer_zero_node); 1923 } 1924 1925 /* Instrument values passed to function arguments with nonnull attribute. */ 1926 1927 static void 1928 instrument_nonnull_arg (gimple_stmt_iterator *gsi) 1929 { 1930 gimple *stmt = gsi_stmt (*gsi); 1931 location_t loc[2]; 1932 /* infer_nonnull_range needs flag_delete_null_pointer_checks set, 1933 while for nonnull sanitization it is clear. */ 1934 int save_flag_delete_null_pointer_checks = flag_delete_null_pointer_checks; 1935 flag_delete_null_pointer_checks = 1; 1936 loc[0] = gimple_location (stmt); 1937 loc[1] = UNKNOWN_LOCATION; 1938 for (unsigned int i = 0; i < gimple_call_num_args (stmt); i++) 1939 { 1940 tree arg = gimple_call_arg (stmt, i); 1941 if (POINTER_TYPE_P (TREE_TYPE (arg)) 1942 && infer_nonnull_range_by_attribute (stmt, arg)) 1943 { 1944 gimple *g; 1945 if (!is_gimple_val (arg)) 1946 { 1947 g = gimple_build_assign (make_ssa_name (TREE_TYPE (arg)), arg); 1948 gimple_set_location (g, loc[0]); 1949 gsi_insert_before (gsi, g, GSI_SAME_STMT); 1950 arg = gimple_assign_lhs (g); 1951 } 1952 1953 basic_block then_bb, fallthru_bb; 1954 *gsi = create_cond_insert_point (gsi, true, false, true, 1955 &then_bb, &fallthru_bb); 1956 g = gimple_build_cond (EQ_EXPR, arg, 1957 build_zero_cst (TREE_TYPE (arg)), 1958 NULL_TREE, NULL_TREE); 1959 gimple_set_location (g, loc[0]); 1960 gsi_insert_after (gsi, g, GSI_NEW_STMT); 1961 1962 *gsi = gsi_after_labels (then_bb); 1963 if (flag_sanitize_undefined_trap_on_error) 1964 g = gimple_build_call (builtin_decl_explicit (BUILT_IN_TRAP), 0); 1965 else 1966 { 1967 tree data = ubsan_create_data ("__ubsan_nonnull_arg_data", 1968 2, loc, NULL_TREE, 1969 build_int_cst (integer_type_node, 1970 i + 1), 1971 NULL_TREE); 1972 data = build_fold_addr_expr_loc (loc[0], data); 1973 enum built_in_function bcode 1974 = (flag_sanitize_recover & SANITIZE_NONNULL_ATTRIBUTE) 1975 ? BUILT_IN_UBSAN_HANDLE_NONNULL_ARG 1976 : BUILT_IN_UBSAN_HANDLE_NONNULL_ARG_ABORT; 1977 tree fn = builtin_decl_explicit (bcode); 1978 1979 g = gimple_build_call (fn, 1, data); 1980 } 1981 gimple_set_location (g, loc[0]); 1982 gsi_insert_before (gsi, g, GSI_SAME_STMT); 1983 ubsan_create_edge (g); 1984 } 1985 *gsi = gsi_for_stmt (stmt); 1986 } 1987 flag_delete_null_pointer_checks = save_flag_delete_null_pointer_checks; 1988 } 1989 1990 /* Instrument returns in functions with returns_nonnull attribute. */ 1991 1992 static void 1993 instrument_nonnull_return (gimple_stmt_iterator *gsi) 1994 { 1995 greturn *stmt = as_a <greturn *> (gsi_stmt (*gsi)); 1996 location_t loc[2]; 1997 tree arg = gimple_return_retval (stmt); 1998 /* infer_nonnull_range needs flag_delete_null_pointer_checks set, 1999 while for nonnull return sanitization it is clear. */ 2000 int save_flag_delete_null_pointer_checks = flag_delete_null_pointer_checks; 2001 flag_delete_null_pointer_checks = 1; 2002 loc[0] = gimple_location (stmt); 2003 loc[1] = UNKNOWN_LOCATION; 2004 if (arg 2005 && POINTER_TYPE_P (TREE_TYPE (arg)) 2006 && is_gimple_val (arg) 2007 && infer_nonnull_range_by_attribute (stmt, arg)) 2008 { 2009 basic_block then_bb, fallthru_bb; 2010 *gsi = create_cond_insert_point (gsi, true, false, true, 2011 &then_bb, &fallthru_bb); 2012 gimple *g = gimple_build_cond (EQ_EXPR, arg, 2013 build_zero_cst (TREE_TYPE (arg)), 2014 NULL_TREE, NULL_TREE); 2015 gimple_set_location (g, loc[0]); 2016 gsi_insert_after (gsi, g, GSI_NEW_STMT); 2017 2018 *gsi = gsi_after_labels (then_bb); 2019 if (flag_sanitize_undefined_trap_on_error) 2020 g = gimple_build_call (builtin_decl_explicit (BUILT_IN_TRAP), 0); 2021 else 2022 { 2023 tree data = ubsan_create_data ("__ubsan_nonnull_return_data", 2024 1, &loc[1], NULL_TREE, NULL_TREE); 2025 data = build_fold_addr_expr_loc (loc[0], data); 2026 tree data2 = ubsan_create_data ("__ubsan_nonnull_return_data", 2027 1, &loc[0], NULL_TREE, NULL_TREE); 2028 data2 = build_fold_addr_expr_loc (loc[0], data2); 2029 enum built_in_function bcode 2030 = (flag_sanitize_recover & SANITIZE_RETURNS_NONNULL_ATTRIBUTE) 2031 ? BUILT_IN_UBSAN_HANDLE_NONNULL_RETURN_V1 2032 : BUILT_IN_UBSAN_HANDLE_NONNULL_RETURN_V1_ABORT; 2033 tree fn = builtin_decl_explicit (bcode); 2034 2035 g = gimple_build_call (fn, 2, data, data2); 2036 } 2037 gimple_set_location (g, loc[0]); 2038 gsi_insert_before (gsi, g, GSI_SAME_STMT); 2039 ubsan_create_edge (g); 2040 *gsi = gsi_for_stmt (stmt); 2041 } 2042 flag_delete_null_pointer_checks = save_flag_delete_null_pointer_checks; 2043 } 2044 2045 /* Instrument memory references. Here we check whether the pointer 2046 points to an out-of-bounds location. */ 2047 2048 static void 2049 instrument_object_size (gimple_stmt_iterator *gsi, tree t, bool is_lhs) 2050 { 2051 gimple *stmt = gsi_stmt (*gsi); 2052 location_t loc = gimple_location (stmt); 2053 tree type; 2054 tree index = NULL_TREE; 2055 HOST_WIDE_INT size_in_bytes; 2056 2057 type = TREE_TYPE (t); 2058 if (VOID_TYPE_P (type)) 2059 return; 2060 2061 switch (TREE_CODE (t)) 2062 { 2063 case COMPONENT_REF: 2064 if (TREE_CODE (t) == COMPONENT_REF 2065 && DECL_BIT_FIELD_REPRESENTATIVE (TREE_OPERAND (t, 1)) != NULL_TREE) 2066 { 2067 tree repr = DECL_BIT_FIELD_REPRESENTATIVE (TREE_OPERAND (t, 1)); 2068 t = build3 (COMPONENT_REF, TREE_TYPE (repr), TREE_OPERAND (t, 0), 2069 repr, TREE_OPERAND (t, 2)); 2070 } 2071 break; 2072 case ARRAY_REF: 2073 index = TREE_OPERAND (t, 1); 2074 break; 2075 case INDIRECT_REF: 2076 case MEM_REF: 2077 case VAR_DECL: 2078 case PARM_DECL: 2079 case RESULT_DECL: 2080 break; 2081 default: 2082 return; 2083 } 2084 2085 size_in_bytes = int_size_in_bytes (type); 2086 if (size_in_bytes <= 0) 2087 return; 2088 2089 poly_int64 bitsize, bitpos; 2090 tree offset; 2091 machine_mode mode; 2092 int volatilep = 0, reversep, unsignedp = 0; 2093 tree inner = get_inner_reference (t, &bitsize, &bitpos, &offset, &mode, 2094 &unsignedp, &reversep, &volatilep); 2095 2096 if (!multiple_p (bitpos, BITS_PER_UNIT) 2097 || maybe_ne (bitsize, size_in_bytes * BITS_PER_UNIT)) 2098 return; 2099 2100 bool decl_p = DECL_P (inner); 2101 tree base; 2102 if (decl_p) 2103 { 2104 if (DECL_REGISTER (inner)) 2105 return; 2106 base = inner; 2107 } 2108 else if (TREE_CODE (inner) == MEM_REF) 2109 base = TREE_OPERAND (inner, 0); 2110 else 2111 return; 2112 tree ptr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (t)), t); 2113 2114 while (TREE_CODE (base) == SSA_NAME) 2115 { 2116 gimple *def_stmt = SSA_NAME_DEF_STMT (base); 2117 if (gimple_assign_ssa_name_copy_p (def_stmt) 2118 || (gimple_assign_cast_p (def_stmt) 2119 && POINTER_TYPE_P (TREE_TYPE (gimple_assign_rhs1 (def_stmt)))) 2120 || (is_gimple_assign (def_stmt) 2121 && gimple_assign_rhs_code (def_stmt) == POINTER_PLUS_EXPR)) 2122 { 2123 tree rhs1 = gimple_assign_rhs1 (def_stmt); 2124 if (TREE_CODE (rhs1) == SSA_NAME 2125 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (rhs1)) 2126 break; 2127 else 2128 base = rhs1; 2129 } 2130 else 2131 break; 2132 } 2133 2134 if (!POINTER_TYPE_P (TREE_TYPE (base)) && !DECL_P (base)) 2135 return; 2136 2137 tree sizet; 2138 tree base_addr = base; 2139 gimple *bos_stmt = NULL; 2140 if (decl_p) 2141 base_addr = build1 (ADDR_EXPR, 2142 build_pointer_type (TREE_TYPE (base)), base); 2143 unsigned HOST_WIDE_INT size; 2144 if (compute_builtin_object_size (base_addr, 0, &size)) 2145 sizet = build_int_cst (sizetype, size); 2146 else if (optimize) 2147 { 2148 if (LOCATION_LOCUS (loc) == UNKNOWN_LOCATION) 2149 loc = input_location; 2150 /* Generate __builtin_object_size call. */ 2151 sizet = builtin_decl_explicit (BUILT_IN_OBJECT_SIZE); 2152 sizet = build_call_expr_loc (loc, sizet, 2, base_addr, 2153 integer_zero_node); 2154 sizet = force_gimple_operand_gsi (gsi, sizet, false, NULL_TREE, true, 2155 GSI_SAME_STMT); 2156 /* If the call above didn't end up being an integer constant, go one 2157 statement back and get the __builtin_object_size stmt. Save it, 2158 we might need it later. */ 2159 if (SSA_VAR_P (sizet)) 2160 { 2161 gsi_prev (gsi); 2162 bos_stmt = gsi_stmt (*gsi); 2163 2164 /* Move on to where we were. */ 2165 gsi_next (gsi); 2166 } 2167 } 2168 else 2169 return; 2170 2171 /* Generate UBSAN_OBJECT_SIZE (ptr, ptr+sizeof(*ptr)-base, objsize, ckind) 2172 call. */ 2173 /* ptr + sizeof (*ptr) - base */ 2174 t = fold_build2 (MINUS_EXPR, sizetype, 2175 fold_convert (pointer_sized_int_node, ptr), 2176 fold_convert (pointer_sized_int_node, base_addr)); 2177 t = fold_build2 (PLUS_EXPR, sizetype, t, TYPE_SIZE_UNIT (type)); 2178 2179 /* Perhaps we can omit the check. */ 2180 if (TREE_CODE (t) == INTEGER_CST 2181 && TREE_CODE (sizet) == INTEGER_CST 2182 && tree_int_cst_le (t, sizet)) 2183 return; 2184 2185 if (index != NULL_TREE 2186 && TREE_CODE (index) == SSA_NAME 2187 && TREE_CODE (sizet) == INTEGER_CST) 2188 { 2189 gimple *def = SSA_NAME_DEF_STMT (index); 2190 if (is_gimple_assign (def) 2191 && gimple_assign_rhs_code (def) == BIT_AND_EXPR 2192 && TREE_CODE (gimple_assign_rhs2 (def)) == INTEGER_CST) 2193 { 2194 tree cst = gimple_assign_rhs2 (def); 2195 tree sz = fold_build2 (EXACT_DIV_EXPR, sizetype, sizet, 2196 TYPE_SIZE_UNIT (type)); 2197 if (tree_int_cst_sgn (cst) >= 0 2198 && tree_int_cst_lt (cst, sz)) 2199 return; 2200 } 2201 } 2202 2203 if (bos_stmt && gimple_call_builtin_p (bos_stmt, BUILT_IN_OBJECT_SIZE)) 2204 ubsan_create_edge (bos_stmt); 2205 2206 /* We have to emit the check. */ 2207 t = force_gimple_operand_gsi (gsi, t, true, NULL_TREE, true, 2208 GSI_SAME_STMT); 2209 ptr = force_gimple_operand_gsi (gsi, ptr, true, NULL_TREE, true, 2210 GSI_SAME_STMT); 2211 tree ckind = build_int_cst (unsigned_char_type_node, 2212 is_lhs ? UBSAN_STORE_OF : UBSAN_LOAD_OF); 2213 gimple *g = gimple_build_call_internal (IFN_UBSAN_OBJECT_SIZE, 4, 2214 ptr, t, sizet, ckind); 2215 gimple_set_location (g, loc); 2216 gsi_insert_before (gsi, g, GSI_SAME_STMT); 2217 } 2218 2219 /* Instrument values passed to builtin functions. */ 2220 2221 static void 2222 instrument_builtin (gimple_stmt_iterator *gsi) 2223 { 2224 gimple *stmt = gsi_stmt (*gsi); 2225 location_t loc = gimple_location (stmt); 2226 tree arg; 2227 enum built_in_function fcode 2228 = DECL_FUNCTION_CODE (gimple_call_fndecl (stmt)); 2229 int kind = 0; 2230 switch (fcode) 2231 { 2232 CASE_INT_FN (BUILT_IN_CLZ): 2233 kind = 1; 2234 gcc_fallthrough (); 2235 CASE_INT_FN (BUILT_IN_CTZ): 2236 arg = gimple_call_arg (stmt, 0); 2237 if (!integer_nonzerop (arg)) 2238 { 2239 gimple *g; 2240 if (!is_gimple_val (arg)) 2241 { 2242 g = gimple_build_assign (make_ssa_name (TREE_TYPE (arg)), arg); 2243 gimple_set_location (g, loc); 2244 gsi_insert_before (gsi, g, GSI_SAME_STMT); 2245 arg = gimple_assign_lhs (g); 2246 } 2247 2248 basic_block then_bb, fallthru_bb; 2249 *gsi = create_cond_insert_point (gsi, true, false, true, 2250 &then_bb, &fallthru_bb); 2251 g = gimple_build_cond (EQ_EXPR, arg, 2252 build_zero_cst (TREE_TYPE (arg)), 2253 NULL_TREE, NULL_TREE); 2254 gimple_set_location (g, loc); 2255 gsi_insert_after (gsi, g, GSI_NEW_STMT); 2256 2257 *gsi = gsi_after_labels (then_bb); 2258 if (flag_sanitize_undefined_trap_on_error) 2259 g = gimple_build_call (builtin_decl_explicit (BUILT_IN_TRAP), 0); 2260 else 2261 { 2262 tree t = build_int_cst (unsigned_char_type_node, kind); 2263 tree data = ubsan_create_data ("__ubsan_builtin_data", 2264 1, &loc, NULL_TREE, t, NULL_TREE); 2265 data = build_fold_addr_expr_loc (loc, data); 2266 enum built_in_function bcode 2267 = (flag_sanitize_recover & SANITIZE_BUILTIN) 2268 ? BUILT_IN_UBSAN_HANDLE_INVALID_BUILTIN 2269 : BUILT_IN_UBSAN_HANDLE_INVALID_BUILTIN_ABORT; 2270 tree fn = builtin_decl_explicit (bcode); 2271 2272 g = gimple_build_call (fn, 1, data); 2273 } 2274 gimple_set_location (g, loc); 2275 gsi_insert_before (gsi, g, GSI_SAME_STMT); 2276 ubsan_create_edge (g); 2277 } 2278 *gsi = gsi_for_stmt (stmt); 2279 break; 2280 default: 2281 break; 2282 } 2283 } 2284 2285 namespace { 2286 2287 const pass_data pass_data_ubsan = 2288 { 2289 GIMPLE_PASS, /* type */ 2290 "ubsan", /* name */ 2291 OPTGROUP_NONE, /* optinfo_flags */ 2292 TV_TREE_UBSAN, /* tv_id */ 2293 ( PROP_cfg | PROP_ssa ), /* properties_required */ 2294 0, /* properties_provided */ 2295 0, /* properties_destroyed */ 2296 0, /* todo_flags_start */ 2297 TODO_update_ssa, /* todo_flags_finish */ 2298 }; 2299 2300 class pass_ubsan : public gimple_opt_pass 2301 { 2302 public: 2303 pass_ubsan (gcc::context *ctxt) 2304 : gimple_opt_pass (pass_data_ubsan, ctxt) 2305 {} 2306 2307 /* opt_pass methods: */ 2308 virtual bool gate (function *) 2309 { 2310 return sanitize_flags_p ((SANITIZE_NULL | SANITIZE_SI_OVERFLOW 2311 | SANITIZE_BOOL | SANITIZE_ENUM 2312 | SANITIZE_ALIGNMENT 2313 | SANITIZE_NONNULL_ATTRIBUTE 2314 | SANITIZE_RETURNS_NONNULL_ATTRIBUTE 2315 | SANITIZE_OBJECT_SIZE 2316 | SANITIZE_POINTER_OVERFLOW 2317 | SANITIZE_BUILTIN)); 2318 } 2319 2320 virtual unsigned int execute (function *); 2321 2322 }; // class pass_ubsan 2323 2324 unsigned int 2325 pass_ubsan::execute (function *fun) 2326 { 2327 basic_block bb; 2328 gimple_stmt_iterator gsi; 2329 unsigned int ret = 0; 2330 2331 initialize_sanitizer_builtins (); 2332 2333 FOR_EACH_BB_FN (bb, fun) 2334 { 2335 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi);) 2336 { 2337 gimple *stmt = gsi_stmt (gsi); 2338 if (is_gimple_debug (stmt) || gimple_clobber_p (stmt)) 2339 { 2340 gsi_next (&gsi); 2341 continue; 2342 } 2343 2344 if ((sanitize_flags_p (SANITIZE_SI_OVERFLOW, fun->decl)) 2345 && is_gimple_assign (stmt)) 2346 instrument_si_overflow (gsi); 2347 2348 if (sanitize_flags_p (SANITIZE_NULL | SANITIZE_ALIGNMENT, fun->decl)) 2349 { 2350 if (gimple_store_p (stmt)) 2351 instrument_null (gsi, gimple_get_lhs (stmt), true); 2352 if (gimple_assign_single_p (stmt)) 2353 instrument_null (gsi, gimple_assign_rhs1 (stmt), false); 2354 if (is_gimple_call (stmt)) 2355 { 2356 unsigned args_num = gimple_call_num_args (stmt); 2357 for (unsigned i = 0; i < args_num; ++i) 2358 { 2359 tree arg = gimple_call_arg (stmt, i); 2360 if (is_gimple_reg (arg) || is_gimple_min_invariant (arg)) 2361 continue; 2362 instrument_null (gsi, arg, false); 2363 } 2364 } 2365 } 2366 2367 if (sanitize_flags_p (SANITIZE_BOOL | SANITIZE_ENUM, fun->decl) 2368 && gimple_assign_load_p (stmt)) 2369 { 2370 instrument_bool_enum_load (&gsi); 2371 bb = gimple_bb (stmt); 2372 } 2373 2374 if (sanitize_flags_p (SANITIZE_NONNULL_ATTRIBUTE, fun->decl) 2375 && is_gimple_call (stmt) 2376 && !gimple_call_internal_p (stmt)) 2377 { 2378 instrument_nonnull_arg (&gsi); 2379 bb = gimple_bb (stmt); 2380 } 2381 2382 if (sanitize_flags_p (SANITIZE_BUILTIN, fun->decl) 2383 && gimple_call_builtin_p (stmt, BUILT_IN_NORMAL)) 2384 { 2385 instrument_builtin (&gsi); 2386 bb = gimple_bb (stmt); 2387 } 2388 2389 if (sanitize_flags_p (SANITIZE_RETURNS_NONNULL_ATTRIBUTE, fun->decl) 2390 && gimple_code (stmt) == GIMPLE_RETURN) 2391 { 2392 instrument_nonnull_return (&gsi); 2393 bb = gimple_bb (stmt); 2394 } 2395 2396 if (sanitize_flags_p (SANITIZE_OBJECT_SIZE, fun->decl)) 2397 { 2398 if (gimple_store_p (stmt)) 2399 instrument_object_size (&gsi, gimple_get_lhs (stmt), true); 2400 if (gimple_assign_load_p (stmt)) 2401 instrument_object_size (&gsi, gimple_assign_rhs1 (stmt), 2402 false); 2403 if (is_gimple_call (stmt)) 2404 { 2405 unsigned args_num = gimple_call_num_args (stmt); 2406 for (unsigned i = 0; i < args_num; ++i) 2407 { 2408 tree arg = gimple_call_arg (stmt, i); 2409 if (is_gimple_reg (arg) || is_gimple_min_invariant (arg)) 2410 continue; 2411 instrument_object_size (&gsi, arg, false); 2412 } 2413 } 2414 } 2415 2416 if (sanitize_flags_p (SANITIZE_POINTER_OVERFLOW, fun->decl)) 2417 { 2418 if (is_gimple_assign (stmt) 2419 && gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR) 2420 instrument_pointer_overflow (&gsi, 2421 gimple_assign_rhs1 (stmt), 2422 gimple_assign_rhs2 (stmt)); 2423 if (gimple_store_p (stmt)) 2424 maybe_instrument_pointer_overflow (&gsi, 2425 gimple_get_lhs (stmt)); 2426 if (gimple_assign_single_p (stmt)) 2427 maybe_instrument_pointer_overflow (&gsi, 2428 gimple_assign_rhs1 (stmt)); 2429 if (is_gimple_call (stmt)) 2430 { 2431 unsigned args_num = gimple_call_num_args (stmt); 2432 for (unsigned i = 0; i < args_num; ++i) 2433 { 2434 tree arg = gimple_call_arg (stmt, i); 2435 if (is_gimple_reg (arg)) 2436 continue; 2437 maybe_instrument_pointer_overflow (&gsi, arg); 2438 } 2439 } 2440 } 2441 2442 gsi_next (&gsi); 2443 } 2444 if (gimple_purge_dead_eh_edges (bb)) 2445 ret = TODO_cleanup_cfg; 2446 } 2447 return ret; 2448 } 2449 2450 } // anon namespace 2451 2452 gimple_opt_pass * 2453 make_pass_ubsan (gcc::context *ctxt) 2454 { 2455 return new pass_ubsan (ctxt); 2456 } 2457 2458 #include "gt-ubsan.h" 2459