]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/ubsan.c
a73061b6ae2c563432ea0617ce980e80dbf8551e
[thirdparty/gcc.git] / gcc / ubsan.c
1 /* UndefinedBehaviorSanitizer, undefined behavior detector.
2 Copyright (C) 2013-2017 Free Software Foundation, Inc.
3 Contributed by Marek Polacek <polacek@redhat.com>
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
10 version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "backend.h"
25 #include "rtl.h"
26 #include "c-family/c-common.h"
27 #include "gimple.h"
28 #include "cfghooks.h"
29 #include "tree-pass.h"
30 #include "memmodel.h"
31 #include "tm_p.h"
32 #include "ssa.h"
33 #include "cgraph.h"
34 #include "tree-pretty-print.h"
35 #include "stor-layout.h"
36 #include "cfganal.h"
37 #include "gimple-iterator.h"
38 #include "output.h"
39 #include "cfgloop.h"
40 #include "ubsan.h"
41 #include "expr.h"
42 #include "stringpool.h"
43 #include "attribs.h"
44 #include "asan.h"
45 #include "gimplify-me.h"
46 #include "dfp.h"
47 #include "builtins.h"
48 #include "tree-object-size.h"
49 #include "tree-cfg.h"
50 #include "gimple-fold.h"
51 #include "varasm.h"
52
53 /* Map from a tree to a VAR_DECL tree. */
54
55 struct GTY((for_user)) tree_type_map {
56 struct tree_map_base type;
57 tree decl;
58 };
59
60 struct tree_type_map_cache_hasher : ggc_cache_ptr_hash<tree_type_map>
61 {
62 static inline hashval_t
63 hash (tree_type_map *t)
64 {
65 return TYPE_UID (t->type.from);
66 }
67
68 static inline bool
69 equal (tree_type_map *a, tree_type_map *b)
70 {
71 return a->type.from == b->type.from;
72 }
73
74 static int
75 keep_cache_entry (tree_type_map *&m)
76 {
77 return ggc_marked_p (m->type.from);
78 }
79 };
80
81 static GTY ((cache))
82 hash_table<tree_type_map_cache_hasher> *decl_tree_for_type;
83
84 /* Lookup a VAR_DECL for TYPE, and return it if we find one. */
85
86 static tree
87 decl_for_type_lookup (tree type)
88 {
89 /* If the hash table is not initialized yet, create it now. */
90 if (decl_tree_for_type == NULL)
91 {
92 decl_tree_for_type
93 = hash_table<tree_type_map_cache_hasher>::create_ggc (10);
94 /* That also means we don't have to bother with the lookup. */
95 return NULL_TREE;
96 }
97
98 struct tree_type_map *h, in;
99 in.type.from = type;
100
101 h = decl_tree_for_type->find_with_hash (&in, TYPE_UID (type));
102 return h ? h->decl : NULL_TREE;
103 }
104
105 /* Insert a mapping TYPE->DECL in the VAR_DECL for type hashtable. */
106
107 static void
108 decl_for_type_insert (tree type, tree decl)
109 {
110 struct tree_type_map *h;
111
112 h = ggc_alloc<tree_type_map> ();
113 h->type.from = type;
114 h->decl = decl;
115 *decl_tree_for_type->find_slot_with_hash (h, TYPE_UID (type), INSERT) = h;
116 }
117
118 /* Helper routine, which encodes a value in the pointer_sized_int_node.
119 Arguments with precision <= POINTER_SIZE are passed directly,
120 the rest is passed by reference. T is a value we are to encode.
121 PHASE determines when this function is called. */
122
123 tree
124 ubsan_encode_value (tree t, enum ubsan_encode_value_phase phase)
125 {
126 tree type = TREE_TYPE (t);
127 scalar_mode mode = SCALAR_TYPE_MODE (type);
128 const unsigned int bitsize = GET_MODE_BITSIZE (mode);
129 if (bitsize <= POINTER_SIZE)
130 switch (TREE_CODE (type))
131 {
132 case BOOLEAN_TYPE:
133 case ENUMERAL_TYPE:
134 case INTEGER_TYPE:
135 return fold_build1 (NOP_EXPR, pointer_sized_int_node, t);
136 case REAL_TYPE:
137 {
138 tree itype = build_nonstandard_integer_type (bitsize, true);
139 t = fold_build1 (VIEW_CONVERT_EXPR, itype, t);
140 return fold_convert (pointer_sized_int_node, t);
141 }
142 default:
143 gcc_unreachable ();
144 }
145 else
146 {
147 if (!DECL_P (t) || !TREE_ADDRESSABLE (t))
148 {
149 /* The reason for this is that we don't want to pessimize
150 code by making vars unnecessarily addressable. */
151 tree var;
152 if (phase != UBSAN_ENCODE_VALUE_GENERIC)
153 {
154 var = create_tmp_var (type);
155 mark_addressable (var);
156 }
157 else
158 {
159 var = create_tmp_var_raw (type);
160 TREE_ADDRESSABLE (var) = 1;
161 DECL_CONTEXT (var) = current_function_decl;
162 }
163 if (phase == UBSAN_ENCODE_VALUE_RTL)
164 {
165 rtx mem = assign_stack_temp_for_type (mode, GET_MODE_SIZE (mode),
166 type);
167 SET_DECL_RTL (var, mem);
168 expand_assignment (var, t, false);
169 return build_fold_addr_expr (var);
170 }
171 if (phase != UBSAN_ENCODE_VALUE_GENERIC)
172 {
173 tree tem = build2 (MODIFY_EXPR, void_type_node, var, t);
174 t = build_fold_addr_expr (var);
175 return build2 (COMPOUND_EXPR, TREE_TYPE (t), tem, t);
176 }
177 else
178 {
179 var = build4 (TARGET_EXPR, type, var, t, NULL_TREE, NULL_TREE);
180 return build_fold_addr_expr (var);
181 }
182 }
183 else
184 return build_fold_addr_expr (t);
185 }
186 }
187
188 /* Cached ubsan_get_type_descriptor_type () return value. */
189 static GTY(()) tree ubsan_type_descriptor_type;
190
191 /* Build
192 struct __ubsan_type_descriptor
193 {
194 unsigned short __typekind;
195 unsigned short __typeinfo;
196 char __typename[];
197 }
198 type. */
199
200 static tree
201 ubsan_get_type_descriptor_type (void)
202 {
203 static const char *field_names[3]
204 = { "__typekind", "__typeinfo", "__typename" };
205 tree fields[3], ret;
206
207 if (ubsan_type_descriptor_type)
208 return ubsan_type_descriptor_type;
209
210 tree itype = build_range_type (sizetype, size_zero_node, NULL_TREE);
211 tree flex_arr_type = build_array_type (char_type_node, itype);
212
213 ret = make_node (RECORD_TYPE);
214 for (int i = 0; i < 3; i++)
215 {
216 fields[i] = build_decl (UNKNOWN_LOCATION, FIELD_DECL,
217 get_identifier (field_names[i]),
218 (i == 2) ? flex_arr_type
219 : short_unsigned_type_node);
220 DECL_CONTEXT (fields[i]) = ret;
221 if (i)
222 DECL_CHAIN (fields[i - 1]) = fields[i];
223 }
224 tree type_decl = build_decl (input_location, TYPE_DECL,
225 get_identifier ("__ubsan_type_descriptor"),
226 ret);
227 DECL_IGNORED_P (type_decl) = 1;
228 DECL_ARTIFICIAL (type_decl) = 1;
229 TYPE_FIELDS (ret) = fields[0];
230 TYPE_NAME (ret) = type_decl;
231 TYPE_STUB_DECL (ret) = type_decl;
232 layout_type (ret);
233 ubsan_type_descriptor_type = ret;
234 return ret;
235 }
236
237 /* Cached ubsan_get_source_location_type () return value. */
238 static GTY(()) tree ubsan_source_location_type;
239
240 /* Build
241 struct __ubsan_source_location
242 {
243 const char *__filename;
244 unsigned int __line;
245 unsigned int __column;
246 }
247 type. */
248
249 tree
250 ubsan_get_source_location_type (void)
251 {
252 static const char *field_names[3]
253 = { "__filename", "__line", "__column" };
254 tree fields[3], ret;
255 if (ubsan_source_location_type)
256 return ubsan_source_location_type;
257
258 tree const_char_type = build_qualified_type (char_type_node,
259 TYPE_QUAL_CONST);
260
261 ret = make_node (RECORD_TYPE);
262 for (int i = 0; i < 3; i++)
263 {
264 fields[i] = build_decl (UNKNOWN_LOCATION, FIELD_DECL,
265 get_identifier (field_names[i]),
266 (i == 0) ? build_pointer_type (const_char_type)
267 : unsigned_type_node);
268 DECL_CONTEXT (fields[i]) = ret;
269 if (i)
270 DECL_CHAIN (fields[i - 1]) = fields[i];
271 }
272 tree type_decl = build_decl (input_location, TYPE_DECL,
273 get_identifier ("__ubsan_source_location"),
274 ret);
275 DECL_IGNORED_P (type_decl) = 1;
276 DECL_ARTIFICIAL (type_decl) = 1;
277 TYPE_FIELDS (ret) = fields[0];
278 TYPE_NAME (ret) = type_decl;
279 TYPE_STUB_DECL (ret) = type_decl;
280 layout_type (ret);
281 ubsan_source_location_type = ret;
282 return ret;
283 }
284
285 /* Helper routine that returns a CONSTRUCTOR of __ubsan_source_location
286 type with its fields filled from a location_t LOC. */
287
288 static tree
289 ubsan_source_location (location_t loc)
290 {
291 expanded_location xloc;
292 tree type = ubsan_get_source_location_type ();
293
294 xloc = expand_location (loc);
295 tree str;
296 if (xloc.file == NULL)
297 {
298 str = build_int_cst (ptr_type_node, 0);
299 xloc.line = 0;
300 xloc.column = 0;
301 }
302 else
303 {
304 /* Fill in the values from LOC. */
305 size_t len = strlen (xloc.file) + 1;
306 str = build_string (len, xloc.file);
307 TREE_TYPE (str) = build_array_type_nelts (char_type_node, len);
308 TREE_READONLY (str) = 1;
309 TREE_STATIC (str) = 1;
310 str = build_fold_addr_expr (str);
311 }
312 tree ctor = build_constructor_va (type, 3, NULL_TREE, str, NULL_TREE,
313 build_int_cst (unsigned_type_node,
314 xloc.line), NULL_TREE,
315 build_int_cst (unsigned_type_node,
316 xloc.column));
317 TREE_CONSTANT (ctor) = 1;
318 TREE_STATIC (ctor) = 1;
319
320 return ctor;
321 }
322
323 /* This routine returns a magic number for TYPE. */
324
325 static unsigned short
326 get_ubsan_type_info_for_type (tree type)
327 {
328 if (TREE_CODE (type) == REAL_TYPE)
329 return tree_to_uhwi (TYPE_SIZE (type));
330 else if (INTEGRAL_TYPE_P (type))
331 {
332 int prec = exact_log2 (tree_to_uhwi (TYPE_SIZE (type)));
333 gcc_assert (prec != -1);
334 return (prec << 1) | !TYPE_UNSIGNED (type);
335 }
336 else
337 return 0;
338 }
339
340 /* Counters for internal labels. ubsan_ids[0] for Lubsan_type,
341 ubsan_ids[1] for Lubsan_data labels. */
342 static GTY(()) unsigned int ubsan_ids[2];
343
344 /* Helper routine that returns ADDR_EXPR of a VAR_DECL of a type
345 descriptor. It first looks into the hash table; if not found,
346 create the VAR_DECL, put it into the hash table and return the
347 ADDR_EXPR of it. TYPE describes a particular type. PSTYLE is
348 an enum controlling how we want to print the type. */
349
350 tree
351 ubsan_type_descriptor (tree type, enum ubsan_print_style pstyle)
352 {
353 /* See through any typedefs. */
354 type = TYPE_MAIN_VARIANT (type);
355
356 tree decl = decl_for_type_lookup (type);
357 /* It is possible that some of the earlier created DECLs were found
358 unused, in that case they weren't emitted and varpool_node::get
359 returns NULL node on them. But now we really need them. Thus,
360 renew them here. */
361 if (decl != NULL_TREE && varpool_node::get (decl))
362 return build_fold_addr_expr (decl);
363
364 tree dtype = ubsan_get_type_descriptor_type ();
365 tree type2 = type;
366 const char *tname = NULL;
367 pretty_printer pretty_name;
368 unsigned char deref_depth = 0;
369 unsigned short tkind, tinfo;
370
371 /* Get the name of the type, or the name of the pointer type. */
372 if (pstyle == UBSAN_PRINT_POINTER)
373 {
374 gcc_assert (POINTER_TYPE_P (type));
375 type2 = TREE_TYPE (type);
376
377 /* Remove any '*' operators from TYPE. */
378 while (POINTER_TYPE_P (type2))
379 deref_depth++, type2 = TREE_TYPE (type2);
380
381 if (TREE_CODE (type2) == METHOD_TYPE)
382 type2 = TYPE_METHOD_BASETYPE (type2);
383 }
384
385 /* If an array, get its type. */
386 type2 = strip_array_types (type2);
387
388 if (pstyle == UBSAN_PRINT_ARRAY)
389 {
390 while (POINTER_TYPE_P (type2))
391 deref_depth++, type2 = TREE_TYPE (type2);
392 }
393
394 if (TYPE_NAME (type2) != NULL)
395 {
396 if (TREE_CODE (TYPE_NAME (type2)) == IDENTIFIER_NODE)
397 tname = IDENTIFIER_POINTER (TYPE_NAME (type2));
398 else if (DECL_NAME (TYPE_NAME (type2)) != NULL)
399 tname = IDENTIFIER_POINTER (DECL_NAME (TYPE_NAME (type2)));
400 }
401
402 if (tname == NULL)
403 /* We weren't able to determine the type name. */
404 tname = "<unknown>";
405
406 tree eltype = type;
407 if (pstyle == UBSAN_PRINT_POINTER)
408 {
409 pp_printf (&pretty_name, "'%s%s%s%s%s%s%s",
410 TYPE_VOLATILE (type2) ? "volatile " : "",
411 TYPE_READONLY (type2) ? "const " : "",
412 TYPE_RESTRICT (type2) ? "restrict " : "",
413 TYPE_ATOMIC (type2) ? "_Atomic " : "",
414 TREE_CODE (type2) == RECORD_TYPE
415 ? "struct "
416 : TREE_CODE (type2) == UNION_TYPE
417 ? "union " : "", tname,
418 deref_depth == 0 ? "" : " ");
419 while (deref_depth-- > 0)
420 pp_star (&pretty_name);
421 pp_quote (&pretty_name);
422 }
423 else if (pstyle == UBSAN_PRINT_ARRAY)
424 {
425 /* Pretty print the array dimensions. */
426 gcc_assert (TREE_CODE (type) == ARRAY_TYPE);
427 tree t = type;
428 pp_printf (&pretty_name, "'%s ", tname);
429 while (deref_depth-- > 0)
430 pp_star (&pretty_name);
431 while (TREE_CODE (t) == ARRAY_TYPE)
432 {
433 pp_left_bracket (&pretty_name);
434 tree dom = TYPE_DOMAIN (t);
435 if (dom != NULL_TREE
436 && TYPE_MAX_VALUE (dom) != NULL_TREE
437 && TREE_CODE (TYPE_MAX_VALUE (dom)) == INTEGER_CST)
438 {
439 if (tree_fits_uhwi_p (TYPE_MAX_VALUE (dom))
440 && tree_to_uhwi (TYPE_MAX_VALUE (dom)) + 1 != 0)
441 pp_printf (&pretty_name, HOST_WIDE_INT_PRINT_DEC,
442 tree_to_uhwi (TYPE_MAX_VALUE (dom)) + 1);
443 else
444 pp_wide_int (&pretty_name,
445 wi::add (wi::to_widest (TYPE_MAX_VALUE (dom)), 1),
446 TYPE_SIGN (TREE_TYPE (dom)));
447 }
448 else
449 /* ??? We can't determine the variable name; print VLA unspec. */
450 pp_star (&pretty_name);
451 pp_right_bracket (&pretty_name);
452 t = TREE_TYPE (t);
453 }
454 pp_quote (&pretty_name);
455
456 /* Save the tree with stripped types. */
457 eltype = t;
458 }
459 else
460 pp_printf (&pretty_name, "'%s'", tname);
461
462 switch (TREE_CODE (eltype))
463 {
464 case BOOLEAN_TYPE:
465 case ENUMERAL_TYPE:
466 case INTEGER_TYPE:
467 tkind = 0x0000;
468 break;
469 case REAL_TYPE:
470 /* FIXME: libubsan right now only supports float, double and
471 long double type formats. */
472 if (TYPE_MODE (eltype) == TYPE_MODE (float_type_node)
473 || TYPE_MODE (eltype) == TYPE_MODE (double_type_node)
474 || TYPE_MODE (eltype) == TYPE_MODE (long_double_type_node))
475 tkind = 0x0001;
476 else
477 tkind = 0xffff;
478 break;
479 default:
480 tkind = 0xffff;
481 break;
482 }
483 tinfo = get_ubsan_type_info_for_type (eltype);
484
485 /* Create a new VAR_DECL of type descriptor. */
486 const char *tmp = pp_formatted_text (&pretty_name);
487 size_t len = strlen (tmp) + 1;
488 tree str = build_string (len, tmp);
489 TREE_TYPE (str) = build_array_type_nelts (char_type_node, len);
490 TREE_READONLY (str) = 1;
491 TREE_STATIC (str) = 1;
492
493 char tmp_name[32];
494 ASM_GENERATE_INTERNAL_LABEL (tmp_name, "Lubsan_type", ubsan_ids[0]++);
495 decl = build_decl (UNKNOWN_LOCATION, VAR_DECL, get_identifier (tmp_name),
496 dtype);
497 TREE_STATIC (decl) = 1;
498 TREE_PUBLIC (decl) = 0;
499 DECL_ARTIFICIAL (decl) = 1;
500 DECL_IGNORED_P (decl) = 1;
501 DECL_EXTERNAL (decl) = 0;
502 DECL_SIZE (decl)
503 = size_binop (PLUS_EXPR, DECL_SIZE (decl), TYPE_SIZE (TREE_TYPE (str)));
504 DECL_SIZE_UNIT (decl)
505 = size_binop (PLUS_EXPR, DECL_SIZE_UNIT (decl),
506 TYPE_SIZE_UNIT (TREE_TYPE (str)));
507
508 tree ctor = build_constructor_va (dtype, 3, NULL_TREE,
509 build_int_cst (short_unsigned_type_node,
510 tkind), NULL_TREE,
511 build_int_cst (short_unsigned_type_node,
512 tinfo), NULL_TREE, str);
513 TREE_CONSTANT (ctor) = 1;
514 TREE_STATIC (ctor) = 1;
515 DECL_INITIAL (decl) = ctor;
516 varpool_node::finalize_decl (decl);
517
518 /* Save the VAR_DECL into the hash table. */
519 decl_for_type_insert (type, decl);
520
521 return build_fold_addr_expr (decl);
522 }
523
524 /* Create a structure for the ubsan library. NAME is a name of the new
525 structure. LOCCNT is number of locations, PLOC points to array of
526 locations. The arguments in ... are of __ubsan_type_descriptor type
527 and there are at most two of them, followed by NULL_TREE, followed
528 by optional extra arguments and another NULL_TREE. */
529
530 tree
531 ubsan_create_data (const char *name, int loccnt, const location_t *ploc, ...)
532 {
533 va_list args;
534 tree ret, t;
535 tree fields[6];
536 vec<tree, va_gc> *saved_args = NULL;
537 size_t i = 0;
538 int j;
539
540 /* It is possible that PCH zapped table with definitions of sanitizer
541 builtins. Reinitialize them if needed. */
542 initialize_sanitizer_builtins ();
543
544 /* Firstly, create a pointer to type descriptor type. */
545 tree td_type = ubsan_get_type_descriptor_type ();
546 td_type = build_pointer_type (td_type);
547
548 /* Create the structure type. */
549 ret = make_node (RECORD_TYPE);
550 for (j = 0; j < loccnt; j++)
551 {
552 gcc_checking_assert (i < 2);
553 fields[i] = build_decl (UNKNOWN_LOCATION, FIELD_DECL, NULL_TREE,
554 ubsan_get_source_location_type ());
555 DECL_CONTEXT (fields[i]) = ret;
556 if (i)
557 DECL_CHAIN (fields[i - 1]) = fields[i];
558 i++;
559 }
560
561 va_start (args, ploc);
562 for (t = va_arg (args, tree); t != NULL_TREE;
563 i++, t = va_arg (args, tree))
564 {
565 gcc_checking_assert (i < 4);
566 /* Save the tree arguments for later use. */
567 vec_safe_push (saved_args, t);
568 fields[i] = build_decl (UNKNOWN_LOCATION, FIELD_DECL, NULL_TREE,
569 td_type);
570 DECL_CONTEXT (fields[i]) = ret;
571 if (i)
572 DECL_CHAIN (fields[i - 1]) = fields[i];
573 }
574
575 for (t = va_arg (args, tree); t != NULL_TREE;
576 i++, t = va_arg (args, tree))
577 {
578 gcc_checking_assert (i < 6);
579 /* Save the tree arguments for later use. */
580 vec_safe_push (saved_args, t);
581 fields[i] = build_decl (UNKNOWN_LOCATION, FIELD_DECL, NULL_TREE,
582 TREE_TYPE (t));
583 DECL_CONTEXT (fields[i]) = ret;
584 if (i)
585 DECL_CHAIN (fields[i - 1]) = fields[i];
586 }
587 va_end (args);
588
589 tree type_decl = build_decl (input_location, TYPE_DECL,
590 get_identifier (name), ret);
591 DECL_IGNORED_P (type_decl) = 1;
592 DECL_ARTIFICIAL (type_decl) = 1;
593 TYPE_FIELDS (ret) = fields[0];
594 TYPE_NAME (ret) = type_decl;
595 TYPE_STUB_DECL (ret) = type_decl;
596 layout_type (ret);
597
598 /* Now, fill in the type. */
599 char tmp_name[32];
600 ASM_GENERATE_INTERNAL_LABEL (tmp_name, "Lubsan_data", ubsan_ids[1]++);
601 tree var = build_decl (UNKNOWN_LOCATION, VAR_DECL, get_identifier (tmp_name),
602 ret);
603 TREE_STATIC (var) = 1;
604 TREE_PUBLIC (var) = 0;
605 DECL_ARTIFICIAL (var) = 1;
606 DECL_IGNORED_P (var) = 1;
607 DECL_EXTERNAL (var) = 0;
608
609 vec<constructor_elt, va_gc> *v;
610 vec_alloc (v, i);
611 tree ctor = build_constructor (ret, v);
612
613 /* If desirable, set the __ubsan_source_location element. */
614 for (j = 0; j < loccnt; j++)
615 {
616 location_t loc = LOCATION_LOCUS (ploc[j]);
617 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, ubsan_source_location (loc));
618 }
619
620 size_t nelts = vec_safe_length (saved_args);
621 for (i = 0; i < nelts; i++)
622 {
623 t = (*saved_args)[i];
624 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, t);
625 }
626
627 TREE_CONSTANT (ctor) = 1;
628 TREE_STATIC (ctor) = 1;
629 DECL_INITIAL (var) = ctor;
630 varpool_node::finalize_decl (var);
631
632 return var;
633 }
634
635 /* Instrument the __builtin_unreachable call. We just call the libubsan
636 routine instead. */
637
638 bool
639 ubsan_instrument_unreachable (gimple_stmt_iterator *gsi)
640 {
641 gimple *g;
642 location_t loc = gimple_location (gsi_stmt (*gsi));
643
644 if (flag_sanitize_undefined_trap_on_error)
645 g = gimple_build_call (builtin_decl_explicit (BUILT_IN_TRAP), 0);
646 else
647 {
648 tree data = ubsan_create_data ("__ubsan_unreachable_data", 1, &loc,
649 NULL_TREE, NULL_TREE);
650 data = build_fold_addr_expr_loc (loc, data);
651 tree fn
652 = builtin_decl_explicit (BUILT_IN_UBSAN_HANDLE_BUILTIN_UNREACHABLE);
653 g = gimple_build_call (fn, 1, data);
654 }
655 gimple_set_location (g, loc);
656 gsi_replace (gsi, g, false);
657 return false;
658 }
659
660 /* Return true if T is a call to a libubsan routine. */
661
662 bool
663 is_ubsan_builtin_p (tree t)
664 {
665 return TREE_CODE (t) == FUNCTION_DECL
666 && DECL_BUILT_IN_CLASS (t) == BUILT_IN_NORMAL
667 && strncmp (IDENTIFIER_POINTER (DECL_NAME (t)),
668 "__builtin___ubsan_", 18) == 0;
669 }
670
671 /* Create a callgraph edge for statement STMT. */
672
673 static void
674 ubsan_create_edge (gimple *stmt)
675 {
676 gcall *call_stmt = dyn_cast <gcall *> (stmt);
677 basic_block bb = gimple_bb (stmt);
678 int freq = compute_call_stmt_bb_frequency (current_function_decl, bb);
679 cgraph_node *node = cgraph_node::get (current_function_decl);
680 tree decl = gimple_call_fndecl (call_stmt);
681 if (decl)
682 node->create_edge (cgraph_node::get_create (decl), call_stmt, bb->count,
683 freq);
684 }
685
686 /* Expand the UBSAN_BOUNDS special builtin function. */
687
688 bool
689 ubsan_expand_bounds_ifn (gimple_stmt_iterator *gsi)
690 {
691 gimple *stmt = gsi_stmt (*gsi);
692 location_t loc = gimple_location (stmt);
693 gcc_assert (gimple_call_num_args (stmt) == 3);
694
695 /* Pick up the arguments of the UBSAN_BOUNDS call. */
696 tree type = TREE_TYPE (TREE_TYPE (gimple_call_arg (stmt, 0)));
697 tree index = gimple_call_arg (stmt, 1);
698 tree orig_index = index;
699 tree bound = gimple_call_arg (stmt, 2);
700
701 gimple_stmt_iterator gsi_orig = *gsi;
702
703 /* Create condition "if (index > bound)". */
704 basic_block then_bb, fallthru_bb;
705 gimple_stmt_iterator cond_insert_point
706 = create_cond_insert_point (gsi, false, false, true,
707 &then_bb, &fallthru_bb);
708 index = fold_convert (TREE_TYPE (bound), index);
709 index = force_gimple_operand_gsi (&cond_insert_point, index,
710 true, NULL_TREE,
711 false, GSI_NEW_STMT);
712 gimple *g = gimple_build_cond (GT_EXPR, index, bound, NULL_TREE, NULL_TREE);
713 gimple_set_location (g, loc);
714 gsi_insert_after (&cond_insert_point, g, GSI_NEW_STMT);
715
716 /* Generate __ubsan_handle_out_of_bounds call. */
717 *gsi = gsi_after_labels (then_bb);
718 if (flag_sanitize_undefined_trap_on_error)
719 g = gimple_build_call (builtin_decl_explicit (BUILT_IN_TRAP), 0);
720 else
721 {
722 tree data
723 = ubsan_create_data ("__ubsan_out_of_bounds_data", 1, &loc,
724 ubsan_type_descriptor (type, UBSAN_PRINT_ARRAY),
725 ubsan_type_descriptor (TREE_TYPE (orig_index)),
726 NULL_TREE, NULL_TREE);
727 data = build_fold_addr_expr_loc (loc, data);
728 enum built_in_function bcode
729 = (flag_sanitize_recover & SANITIZE_BOUNDS)
730 ? BUILT_IN_UBSAN_HANDLE_OUT_OF_BOUNDS
731 : BUILT_IN_UBSAN_HANDLE_OUT_OF_BOUNDS_ABORT;
732 tree fn = builtin_decl_explicit (bcode);
733 tree val = ubsan_encode_value (orig_index, UBSAN_ENCODE_VALUE_GIMPLE);
734 val = force_gimple_operand_gsi (gsi, val, true, NULL_TREE, true,
735 GSI_SAME_STMT);
736 g = gimple_build_call (fn, 2, data, val);
737 }
738 gimple_set_location (g, loc);
739 gsi_insert_before (gsi, g, GSI_SAME_STMT);
740
741 /* Get rid of the UBSAN_BOUNDS call from the IR. */
742 unlink_stmt_vdef (stmt);
743 gsi_remove (&gsi_orig, true);
744
745 /* Point GSI to next logical statement. */
746 *gsi = gsi_start_bb (fallthru_bb);
747 return true;
748 }
749
750 /* Expand UBSAN_NULL internal call. The type is kept on the ckind
751 argument which is a constant, because the middle-end treats pointer
752 conversions as useless and therefore the type of the first argument
753 could be changed to any other pointer type. */
754
755 bool
756 ubsan_expand_null_ifn (gimple_stmt_iterator *gsip)
757 {
758 gimple_stmt_iterator gsi = *gsip;
759 gimple *stmt = gsi_stmt (gsi);
760 location_t loc = gimple_location (stmt);
761 gcc_assert (gimple_call_num_args (stmt) == 3);
762 tree ptr = gimple_call_arg (stmt, 0);
763 tree ckind = gimple_call_arg (stmt, 1);
764 tree align = gimple_call_arg (stmt, 2);
765 tree check_align = NULL_TREE;
766 bool check_null;
767
768 basic_block cur_bb = gsi_bb (gsi);
769
770 gimple *g;
771 if (!integer_zerop (align))
772 {
773 unsigned int ptralign = get_pointer_alignment (ptr) / BITS_PER_UNIT;
774 if (compare_tree_int (align, ptralign) == 1)
775 {
776 check_align = make_ssa_name (pointer_sized_int_node);
777 g = gimple_build_assign (check_align, NOP_EXPR, ptr);
778 gimple_set_location (g, loc);
779 gsi_insert_before (&gsi, g, GSI_SAME_STMT);
780 }
781 }
782 check_null = sanitize_flags_p (SANITIZE_NULL);
783
784 if (check_align == NULL_TREE && !check_null)
785 {
786 gsi_remove (gsip, true);
787 /* Unlink the UBSAN_NULLs vops before replacing it. */
788 unlink_stmt_vdef (stmt);
789 return true;
790 }
791
792 /* Split the original block holding the pointer dereference. */
793 edge e = split_block (cur_bb, stmt);
794
795 /* Get a hold on the 'condition block', the 'then block' and the
796 'else block'. */
797 basic_block cond_bb = e->src;
798 basic_block fallthru_bb = e->dest;
799 basic_block then_bb = create_empty_bb (cond_bb);
800 add_bb_to_loop (then_bb, cond_bb->loop_father);
801 loops_state_set (LOOPS_NEED_FIXUP);
802
803 /* Make an edge coming from the 'cond block' into the 'then block';
804 this edge is unlikely taken, so set up the probability accordingly. */
805 e = make_edge (cond_bb, then_bb, EDGE_TRUE_VALUE);
806 e->probability = profile_probability::very_unlikely ();
807
808 /* Connect 'then block' with the 'else block'. This is needed
809 as the ubsan routines we call in the 'then block' are not noreturn.
810 The 'then block' only has one outcoming edge. */
811 make_single_succ_edge (then_bb, fallthru_bb, EDGE_FALLTHRU);
812
813 /* Set up the fallthrough basic block. */
814 e = find_edge (cond_bb, fallthru_bb);
815 e->flags = EDGE_FALSE_VALUE;
816 e->count = cond_bb->count;
817 e->probability = profile_probability::very_likely ();
818
819 /* Update dominance info for the newly created then_bb; note that
820 fallthru_bb's dominance info has already been updated by
821 split_block. */
822 if (dom_info_available_p (CDI_DOMINATORS))
823 set_immediate_dominator (CDI_DOMINATORS, then_bb, cond_bb);
824
825 /* Put the ubsan builtin call into the newly created BB. */
826 if (flag_sanitize_undefined_trap_on_error)
827 g = gimple_build_call (builtin_decl_implicit (BUILT_IN_TRAP), 0);
828 else
829 {
830 enum built_in_function bcode
831 = (flag_sanitize_recover & ((check_align ? SANITIZE_ALIGNMENT : 0)
832 | (check_null ? SANITIZE_NULL : 0)))
833 ? BUILT_IN_UBSAN_HANDLE_TYPE_MISMATCH_V1
834 : BUILT_IN_UBSAN_HANDLE_TYPE_MISMATCH_V1_ABORT;
835 tree fn = builtin_decl_implicit (bcode);
836 int align_log = tree_log2 (align);
837 tree data
838 = ubsan_create_data ("__ubsan_null_data", 1, &loc,
839 ubsan_type_descriptor (TREE_TYPE (ckind),
840 UBSAN_PRINT_POINTER),
841 NULL_TREE,
842 build_int_cst (unsigned_char_type_node,
843 MAX (align_log, 0)),
844 fold_convert (unsigned_char_type_node, ckind),
845 NULL_TREE);
846 data = build_fold_addr_expr_loc (loc, data);
847 g = gimple_build_call (fn, 2, data,
848 check_align ? check_align
849 : build_zero_cst (pointer_sized_int_node));
850 }
851 gimple_stmt_iterator gsi2 = gsi_start_bb (then_bb);
852 gimple_set_location (g, loc);
853 gsi_insert_after (&gsi2, g, GSI_NEW_STMT);
854
855 /* Unlink the UBSAN_NULLs vops before replacing it. */
856 unlink_stmt_vdef (stmt);
857
858 if (check_null)
859 {
860 g = gimple_build_cond (EQ_EXPR, ptr, build_int_cst (TREE_TYPE (ptr), 0),
861 NULL_TREE, NULL_TREE);
862 gimple_set_location (g, loc);
863
864 /* Replace the UBSAN_NULL with a GIMPLE_COND stmt. */
865 gsi_replace (&gsi, g, false);
866 stmt = g;
867 }
868
869 if (check_align)
870 {
871 if (check_null)
872 {
873 /* Split the block with the condition again. */
874 e = split_block (cond_bb, stmt);
875 basic_block cond1_bb = e->src;
876 basic_block cond2_bb = e->dest;
877
878 /* Make an edge coming from the 'cond1 block' into the 'then block';
879 this edge is unlikely taken, so set up the probability
880 accordingly. */
881 e = make_edge (cond1_bb, then_bb, EDGE_TRUE_VALUE);
882 e->probability = profile_probability::very_unlikely ();
883
884 /* Set up the fallthrough basic block. */
885 e = find_edge (cond1_bb, cond2_bb);
886 e->flags = EDGE_FALSE_VALUE;
887 e->count = cond1_bb->count;
888 e->probability = profile_probability::very_likely ();
889
890 /* Update dominance info. */
891 if (dom_info_available_p (CDI_DOMINATORS))
892 {
893 set_immediate_dominator (CDI_DOMINATORS, fallthru_bb, cond1_bb);
894 set_immediate_dominator (CDI_DOMINATORS, then_bb, cond1_bb);
895 }
896
897 gsi2 = gsi_start_bb (cond2_bb);
898 }
899
900 tree mask = build_int_cst (pointer_sized_int_node,
901 tree_to_uhwi (align) - 1);
902 g = gimple_build_assign (make_ssa_name (pointer_sized_int_node),
903 BIT_AND_EXPR, check_align, mask);
904 gimple_set_location (g, loc);
905 if (check_null)
906 gsi_insert_after (&gsi2, g, GSI_NEW_STMT);
907 else
908 gsi_insert_before (&gsi, g, GSI_SAME_STMT);
909
910 g = gimple_build_cond (NE_EXPR, gimple_assign_lhs (g),
911 build_int_cst (pointer_sized_int_node, 0),
912 NULL_TREE, NULL_TREE);
913 gimple_set_location (g, loc);
914 if (check_null)
915 gsi_insert_after (&gsi2, g, GSI_NEW_STMT);
916 else
917 /* Replace the UBSAN_NULL with a GIMPLE_COND stmt. */
918 gsi_replace (&gsi, g, false);
919 }
920 return false;
921 }
922
923 #define OBJSZ_MAX_OFFSET (1024 * 16)
924
925 /* Expand UBSAN_OBJECT_SIZE internal call. */
926
927 bool
928 ubsan_expand_objsize_ifn (gimple_stmt_iterator *gsi)
929 {
930 gimple *stmt = gsi_stmt (*gsi);
931 location_t loc = gimple_location (stmt);
932 gcc_assert (gimple_call_num_args (stmt) == 4);
933
934 tree ptr = gimple_call_arg (stmt, 0);
935 tree offset = gimple_call_arg (stmt, 1);
936 tree size = gimple_call_arg (stmt, 2);
937 tree ckind = gimple_call_arg (stmt, 3);
938 gimple_stmt_iterator gsi_orig = *gsi;
939 gimple *g;
940
941 /* See if we can discard the check. */
942 if (TREE_CODE (size) != INTEGER_CST
943 || integer_all_onesp (size))
944 /* Yes, __builtin_object_size couldn't determine the
945 object size. */;
946 else if (TREE_CODE (offset) == INTEGER_CST
947 && wi::to_widest (offset) >= -OBJSZ_MAX_OFFSET
948 && wi::to_widest (offset) <= -1)
949 /* The offset is in range [-16K, -1]. */;
950 else
951 {
952 /* if (offset > objsize) */
953 basic_block then_bb, fallthru_bb;
954 gimple_stmt_iterator cond_insert_point
955 = create_cond_insert_point (gsi, false, false, true,
956 &then_bb, &fallthru_bb);
957 g = gimple_build_cond (GT_EXPR, offset, size, NULL_TREE, NULL_TREE);
958 gimple_set_location (g, loc);
959 gsi_insert_after (&cond_insert_point, g, GSI_NEW_STMT);
960
961 /* If the offset is small enough, we don't need the second
962 run-time check. */
963 if (TREE_CODE (offset) == INTEGER_CST
964 && wi::to_widest (offset) >= 0
965 && wi::to_widest (offset) <= OBJSZ_MAX_OFFSET)
966 *gsi = gsi_after_labels (then_bb);
967 else
968 {
969 /* Don't issue run-time error if (ptr > ptr + offset). That
970 may happen when computing a POINTER_PLUS_EXPR. */
971 basic_block then2_bb, fallthru2_bb;
972
973 gimple_stmt_iterator gsi2 = gsi_after_labels (then_bb);
974 cond_insert_point = create_cond_insert_point (&gsi2, false, false,
975 true, &then2_bb,
976 &fallthru2_bb);
977 /* Convert the pointer to an integer type. */
978 tree p = make_ssa_name (pointer_sized_int_node);
979 g = gimple_build_assign (p, NOP_EXPR, ptr);
980 gimple_set_location (g, loc);
981 gsi_insert_before (&cond_insert_point, g, GSI_NEW_STMT);
982 p = gimple_assign_lhs (g);
983 /* Compute ptr + offset. */
984 g = gimple_build_assign (make_ssa_name (pointer_sized_int_node),
985 PLUS_EXPR, p, offset);
986 gimple_set_location (g, loc);
987 gsi_insert_after (&cond_insert_point, g, GSI_NEW_STMT);
988 /* Now build the conditional and put it into the IR. */
989 g = gimple_build_cond (LE_EXPR, p, gimple_assign_lhs (g),
990 NULL_TREE, NULL_TREE);
991 gimple_set_location (g, loc);
992 gsi_insert_after (&cond_insert_point, g, GSI_NEW_STMT);
993 *gsi = gsi_after_labels (then2_bb);
994 }
995
996 /* Generate __ubsan_handle_type_mismatch call. */
997 if (flag_sanitize_undefined_trap_on_error)
998 g = gimple_build_call (builtin_decl_explicit (BUILT_IN_TRAP), 0);
999 else
1000 {
1001 tree data
1002 = ubsan_create_data ("__ubsan_objsz_data", 1, &loc,
1003 ubsan_type_descriptor (TREE_TYPE (ptr),
1004 UBSAN_PRINT_POINTER),
1005 NULL_TREE,
1006 build_zero_cst (unsigned_char_type_node),
1007 ckind,
1008 NULL_TREE);
1009 data = build_fold_addr_expr_loc (loc, data);
1010 enum built_in_function bcode
1011 = (flag_sanitize_recover & SANITIZE_OBJECT_SIZE)
1012 ? BUILT_IN_UBSAN_HANDLE_TYPE_MISMATCH_V1
1013 : BUILT_IN_UBSAN_HANDLE_TYPE_MISMATCH_V1_ABORT;
1014 tree p = make_ssa_name (pointer_sized_int_node);
1015 g = gimple_build_assign (p, NOP_EXPR, ptr);
1016 gimple_set_location (g, loc);
1017 gsi_insert_before (gsi, g, GSI_SAME_STMT);
1018 g = gimple_build_call (builtin_decl_explicit (bcode), 2, data, p);
1019 }
1020 gimple_set_location (g, loc);
1021 gsi_insert_before (gsi, g, GSI_SAME_STMT);
1022
1023 /* Point GSI to next logical statement. */
1024 *gsi = gsi_start_bb (fallthru_bb);
1025
1026 /* Get rid of the UBSAN_OBJECT_SIZE call from the IR. */
1027 unlink_stmt_vdef (stmt);
1028 gsi_remove (&gsi_orig, true);
1029 return true;
1030 }
1031
1032 /* Get rid of the UBSAN_OBJECT_SIZE call from the IR. */
1033 unlink_stmt_vdef (stmt);
1034 gsi_remove (gsi, true);
1035 return true;
1036 }
1037
1038 /* Expand UBSAN_PTR internal call. */
1039
1040 bool
1041 ubsan_expand_ptr_ifn (gimple_stmt_iterator *gsip)
1042 {
1043 gimple_stmt_iterator gsi = *gsip;
1044 gimple *stmt = gsi_stmt (gsi);
1045 location_t loc = gimple_location (stmt);
1046 gcc_assert (gimple_call_num_args (stmt) == 2);
1047 tree ptr = gimple_call_arg (stmt, 0);
1048 tree off = gimple_call_arg (stmt, 1);
1049
1050 if (integer_zerop (off))
1051 {
1052 gsi_remove (gsip, true);
1053 unlink_stmt_vdef (stmt);
1054 return true;
1055 }
1056
1057 basic_block cur_bb = gsi_bb (gsi);
1058 tree ptrplusoff = make_ssa_name (pointer_sized_int_node);
1059 tree ptri = make_ssa_name (pointer_sized_int_node);
1060 int pos_neg = get_range_pos_neg (off);
1061
1062 /* Split the original block holding the pointer dereference. */
1063 edge e = split_block (cur_bb, stmt);
1064
1065 /* Get a hold on the 'condition block', the 'then block' and the
1066 'else block'. */
1067 basic_block cond_bb = e->src;
1068 basic_block fallthru_bb = e->dest;
1069 basic_block then_bb = create_empty_bb (cond_bb);
1070 basic_block cond_pos_bb = NULL, cond_neg_bb = NULL;
1071 add_bb_to_loop (then_bb, cond_bb->loop_father);
1072 loops_state_set (LOOPS_NEED_FIXUP);
1073
1074 /* Set up the fallthrough basic block. */
1075 e->flags = EDGE_FALSE_VALUE;
1076 if (pos_neg != 3)
1077 {
1078 e->count = cond_bb->count;
1079 e->probability = profile_probability::very_likely ();
1080
1081 /* Connect 'then block' with the 'else block'. This is needed
1082 as the ubsan routines we call in the 'then block' are not noreturn.
1083 The 'then block' only has one outcoming edge. */
1084 make_single_succ_edge (then_bb, fallthru_bb, EDGE_FALLTHRU);
1085
1086 /* Make an edge coming from the 'cond block' into the 'then block';
1087 this edge is unlikely taken, so set up the probability
1088 accordingly. */
1089 e = make_edge (cond_bb, then_bb, EDGE_TRUE_VALUE);
1090 e->probability = profile_probability::very_unlikely ();
1091 }
1092 else
1093 {
1094 profile_count count = cond_bb->count.apply_probability (PROB_EVEN);
1095 e->count = count;
1096 e->probability = profile_probability::even ();
1097
1098 e = split_block (fallthru_bb, (gimple *) NULL);
1099 cond_neg_bb = e->src;
1100 fallthru_bb = e->dest;
1101 e->count = count;
1102 e->probability = profile_probability::very_likely ();
1103 e->flags = EDGE_FALSE_VALUE;
1104
1105 e = make_edge (cond_neg_bb, then_bb, EDGE_TRUE_VALUE);
1106 e->probability = profile_probability::very_unlikely ();
1107
1108 cond_pos_bb = create_empty_bb (cond_bb);
1109 add_bb_to_loop (cond_pos_bb, cond_bb->loop_father);
1110
1111 e = make_edge (cond_bb, cond_pos_bb, EDGE_TRUE_VALUE);
1112 e->count = count;
1113 e->probability = profile_probability::even ();
1114
1115 e = make_edge (cond_pos_bb, then_bb, EDGE_TRUE_VALUE);
1116 e->probability = profile_probability::very_unlikely ();
1117
1118 e = make_edge (cond_pos_bb, fallthru_bb, EDGE_FALSE_VALUE);
1119 e->count = count;
1120 e->probability = profile_probability::very_likely ();
1121
1122 make_single_succ_edge (then_bb, fallthru_bb, EDGE_FALLTHRU);
1123 }
1124
1125 gimple *g = gimple_build_assign (ptri, NOP_EXPR, ptr);
1126 gimple_set_location (g, loc);
1127 gsi_insert_before (&gsi, g, GSI_SAME_STMT);
1128 g = gimple_build_assign (ptrplusoff, PLUS_EXPR, ptri, off);
1129 gimple_set_location (g, loc);
1130 gsi_insert_before (&gsi, g, GSI_SAME_STMT);
1131
1132 /* Update dominance info for the newly created then_bb; note that
1133 fallthru_bb's dominance info has already been updated by
1134 split_block. */
1135 if (dom_info_available_p (CDI_DOMINATORS))
1136 {
1137 set_immediate_dominator (CDI_DOMINATORS, then_bb, cond_bb);
1138 if (pos_neg == 3)
1139 {
1140 set_immediate_dominator (CDI_DOMINATORS, cond_pos_bb, cond_bb);
1141 set_immediate_dominator (CDI_DOMINATORS, fallthru_bb, cond_bb);
1142 }
1143 }
1144
1145 /* Put the ubsan builtin call into the newly created BB. */
1146 if (flag_sanitize_undefined_trap_on_error)
1147 g = gimple_build_call (builtin_decl_implicit (BUILT_IN_TRAP), 0);
1148 else
1149 {
1150 enum built_in_function bcode
1151 = (flag_sanitize_recover & SANITIZE_POINTER_OVERFLOW)
1152 ? BUILT_IN_UBSAN_HANDLE_POINTER_OVERFLOW
1153 : BUILT_IN_UBSAN_HANDLE_POINTER_OVERFLOW_ABORT;
1154 tree fn = builtin_decl_implicit (bcode);
1155 tree data
1156 = ubsan_create_data ("__ubsan_ptrovf_data", 1, &loc,
1157 NULL_TREE, NULL_TREE);
1158 data = build_fold_addr_expr_loc (loc, data);
1159 g = gimple_build_call (fn, 3, data, ptr, ptrplusoff);
1160 }
1161 gimple_stmt_iterator gsi2 = gsi_start_bb (then_bb);
1162 gimple_set_location (g, loc);
1163 gsi_insert_after (&gsi2, g, GSI_NEW_STMT);
1164
1165 /* Unlink the UBSAN_PTRs vops before replacing it. */
1166 unlink_stmt_vdef (stmt);
1167
1168 if (TREE_CODE (off) == INTEGER_CST)
1169 g = gimple_build_cond (wi::neg_p (wi::to_wide (off)) ? LT_EXPR : GE_EXPR,
1170 ptri, fold_build1 (NEGATE_EXPR, sizetype, off),
1171 NULL_TREE, NULL_TREE);
1172 else if (pos_neg != 3)
1173 g = gimple_build_cond (pos_neg == 1 ? LT_EXPR : GT_EXPR,
1174 ptrplusoff, ptri, NULL_TREE, NULL_TREE);
1175 else
1176 {
1177 gsi2 = gsi_start_bb (cond_pos_bb);
1178 g = gimple_build_cond (LT_EXPR, ptrplusoff, ptri, NULL_TREE, NULL_TREE);
1179 gimple_set_location (g, loc);
1180 gsi_insert_after (&gsi2, g, GSI_NEW_STMT);
1181
1182 gsi2 = gsi_start_bb (cond_neg_bb);
1183 g = gimple_build_cond (GT_EXPR, ptrplusoff, ptri, NULL_TREE, NULL_TREE);
1184 gimple_set_location (g, loc);
1185 gsi_insert_after (&gsi2, g, GSI_NEW_STMT);
1186
1187 gimple_seq seq = NULL;
1188 tree t = gimple_build (&seq, loc, NOP_EXPR, ssizetype, off);
1189 t = gimple_build (&seq, loc, GE_EXPR, boolean_type_node,
1190 t, ssize_int (0));
1191 gsi_insert_seq_before (&gsi, seq, GSI_SAME_STMT);
1192 g = gimple_build_cond (NE_EXPR, t, boolean_false_node,
1193 NULL_TREE, NULL_TREE);
1194 }
1195 gimple_set_location (g, loc);
1196 /* Replace the UBSAN_PTR with a GIMPLE_COND stmt. */
1197 gsi_replace (&gsi, g, false);
1198 return false;
1199 }
1200
1201
1202 /* Cached __ubsan_vptr_type_cache decl. */
1203 static GTY(()) tree ubsan_vptr_type_cache_decl;
1204
1205 /* Expand UBSAN_VPTR internal call. The type is kept on the ckind
1206 argument which is a constant, because the middle-end treats pointer
1207 conversions as useless and therefore the type of the first argument
1208 could be changed to any other pointer type. */
1209
1210 bool
1211 ubsan_expand_vptr_ifn (gimple_stmt_iterator *gsip)
1212 {
1213 gimple_stmt_iterator gsi = *gsip;
1214 gimple *stmt = gsi_stmt (gsi);
1215 location_t loc = gimple_location (stmt);
1216 gcc_assert (gimple_call_num_args (stmt) == 5);
1217 tree op = gimple_call_arg (stmt, 0);
1218 tree vptr = gimple_call_arg (stmt, 1);
1219 tree str_hash = gimple_call_arg (stmt, 2);
1220 tree ti_decl_addr = gimple_call_arg (stmt, 3);
1221 tree ckind_tree = gimple_call_arg (stmt, 4);
1222 ubsan_null_ckind ckind = (ubsan_null_ckind) tree_to_uhwi (ckind_tree);
1223 tree type = TREE_TYPE (TREE_TYPE (ckind_tree));
1224 gimple *g;
1225 basic_block fallthru_bb = NULL;
1226
1227 if (ckind == UBSAN_DOWNCAST_POINTER)
1228 {
1229 /* Guard everything with if (op != NULL) { ... }. */
1230 basic_block then_bb;
1231 gimple_stmt_iterator cond_insert_point
1232 = create_cond_insert_point (gsip, false, false, true,
1233 &then_bb, &fallthru_bb);
1234 g = gimple_build_cond (NE_EXPR, op, build_zero_cst (TREE_TYPE (op)),
1235 NULL_TREE, NULL_TREE);
1236 gimple_set_location (g, loc);
1237 gsi_insert_after (&cond_insert_point, g, GSI_NEW_STMT);
1238 *gsip = gsi_after_labels (then_bb);
1239 gsi_remove (&gsi, false);
1240 gsi_insert_before (gsip, stmt, GSI_NEW_STMT);
1241 gsi = *gsip;
1242 }
1243
1244 tree htype = TREE_TYPE (str_hash);
1245 tree cst = wide_int_to_tree (htype,
1246 wi::uhwi (((uint64_t) 0x9ddfea08 << 32)
1247 | 0xeb382d69, 64));
1248 g = gimple_build_assign (make_ssa_name (htype), BIT_XOR_EXPR,
1249 vptr, str_hash);
1250 gimple_set_location (g, loc);
1251 gsi_insert_before (gsip, g, GSI_SAME_STMT);
1252 g = gimple_build_assign (make_ssa_name (htype), MULT_EXPR,
1253 gimple_assign_lhs (g), cst);
1254 gimple_set_location (g, loc);
1255 gsi_insert_before (gsip, g, GSI_SAME_STMT);
1256 tree t1 = gimple_assign_lhs (g);
1257 g = gimple_build_assign (make_ssa_name (htype), LSHIFT_EXPR,
1258 t1, build_int_cst (integer_type_node, 47));
1259 gimple_set_location (g, loc);
1260 tree t2 = gimple_assign_lhs (g);
1261 gsi_insert_before (gsip, g, GSI_SAME_STMT);
1262 g = gimple_build_assign (make_ssa_name (htype), BIT_XOR_EXPR,
1263 vptr, t1);
1264 gimple_set_location (g, loc);
1265 gsi_insert_before (gsip, g, GSI_SAME_STMT);
1266 g = gimple_build_assign (make_ssa_name (htype), BIT_XOR_EXPR,
1267 t2, gimple_assign_lhs (g));
1268 gimple_set_location (g, loc);
1269 gsi_insert_before (gsip, g, GSI_SAME_STMT);
1270 g = gimple_build_assign (make_ssa_name (htype), MULT_EXPR,
1271 gimple_assign_lhs (g), cst);
1272 gimple_set_location (g, loc);
1273 gsi_insert_before (gsip, g, GSI_SAME_STMT);
1274 tree t3 = gimple_assign_lhs (g);
1275 g = gimple_build_assign (make_ssa_name (htype), LSHIFT_EXPR,
1276 t3, build_int_cst (integer_type_node, 47));
1277 gimple_set_location (g, loc);
1278 gsi_insert_before (gsip, g, GSI_SAME_STMT);
1279 g = gimple_build_assign (make_ssa_name (htype), BIT_XOR_EXPR,
1280 t3, gimple_assign_lhs (g));
1281 gimple_set_location (g, loc);
1282 gsi_insert_before (gsip, g, GSI_SAME_STMT);
1283 g = gimple_build_assign (make_ssa_name (htype), MULT_EXPR,
1284 gimple_assign_lhs (g), cst);
1285 gimple_set_location (g, loc);
1286 gsi_insert_before (gsip, g, GSI_SAME_STMT);
1287 if (!useless_type_conversion_p (pointer_sized_int_node, htype))
1288 {
1289 g = gimple_build_assign (make_ssa_name (pointer_sized_int_node),
1290 NOP_EXPR, gimple_assign_lhs (g));
1291 gimple_set_location (g, loc);
1292 gsi_insert_before (gsip, g, GSI_SAME_STMT);
1293 }
1294 tree hash = gimple_assign_lhs (g);
1295
1296 if (ubsan_vptr_type_cache_decl == NULL_TREE)
1297 {
1298 tree atype = build_array_type_nelts (pointer_sized_int_node, 128);
1299 tree array = build_decl (UNKNOWN_LOCATION, VAR_DECL,
1300 get_identifier ("__ubsan_vptr_type_cache"),
1301 atype);
1302 DECL_ARTIFICIAL (array) = 1;
1303 DECL_IGNORED_P (array) = 1;
1304 TREE_PUBLIC (array) = 1;
1305 TREE_STATIC (array) = 1;
1306 DECL_EXTERNAL (array) = 1;
1307 DECL_VISIBILITY (array) = VISIBILITY_DEFAULT;
1308 DECL_VISIBILITY_SPECIFIED (array) = 1;
1309 varpool_node::finalize_decl (array);
1310 ubsan_vptr_type_cache_decl = array;
1311 }
1312
1313 g = gimple_build_assign (make_ssa_name (pointer_sized_int_node),
1314 BIT_AND_EXPR, hash,
1315 build_int_cst (pointer_sized_int_node, 127));
1316 gimple_set_location (g, loc);
1317 gsi_insert_before (gsip, g, GSI_SAME_STMT);
1318
1319 tree c = build4_loc (loc, ARRAY_REF, pointer_sized_int_node,
1320 ubsan_vptr_type_cache_decl, gimple_assign_lhs (g),
1321 NULL_TREE, NULL_TREE);
1322 g = gimple_build_assign (make_ssa_name (pointer_sized_int_node),
1323 ARRAY_REF, c);
1324 gimple_set_location (g, loc);
1325 gsi_insert_before (gsip, g, GSI_SAME_STMT);
1326
1327 basic_block then_bb, fallthru2_bb;
1328 gimple_stmt_iterator cond_insert_point
1329 = create_cond_insert_point (gsip, false, false, true,
1330 &then_bb, &fallthru2_bb);
1331 g = gimple_build_cond (NE_EXPR, gimple_assign_lhs (g), hash,
1332 NULL_TREE, NULL_TREE);
1333 gimple_set_location (g, loc);
1334 gsi_insert_after (&cond_insert_point, g, GSI_NEW_STMT);
1335 *gsip = gsi_after_labels (then_bb);
1336 if (fallthru_bb == NULL)
1337 fallthru_bb = fallthru2_bb;
1338
1339 tree data
1340 = ubsan_create_data ("__ubsan_vptr_data", 1, &loc,
1341 ubsan_type_descriptor (type), NULL_TREE, ti_decl_addr,
1342 build_int_cst (unsigned_char_type_node, ckind),
1343 NULL_TREE);
1344 data = build_fold_addr_expr_loc (loc, data);
1345 enum built_in_function bcode
1346 = (flag_sanitize_recover & SANITIZE_VPTR)
1347 ? BUILT_IN_UBSAN_HANDLE_DYNAMIC_TYPE_CACHE_MISS
1348 : BUILT_IN_UBSAN_HANDLE_DYNAMIC_TYPE_CACHE_MISS_ABORT;
1349
1350 g = gimple_build_call (builtin_decl_explicit (bcode), 3, data, op, hash);
1351 gimple_set_location (g, loc);
1352 gsi_insert_before (gsip, g, GSI_SAME_STMT);
1353
1354 /* Point GSI to next logical statement. */
1355 *gsip = gsi_start_bb (fallthru_bb);
1356
1357 /* Get rid of the UBSAN_VPTR call from the IR. */
1358 unlink_stmt_vdef (stmt);
1359 gsi_remove (&gsi, true);
1360 return true;
1361 }
1362
1363 /* Instrument a memory reference. BASE is the base of MEM, IS_LHS says
1364 whether the pointer is on the left hand side of the assignment. */
1365
1366 static void
1367 instrument_mem_ref (tree mem, tree base, gimple_stmt_iterator *iter,
1368 bool is_lhs)
1369 {
1370 enum ubsan_null_ckind ikind = is_lhs ? UBSAN_STORE_OF : UBSAN_LOAD_OF;
1371 unsigned int align = 0;
1372 if (sanitize_flags_p (SANITIZE_ALIGNMENT))
1373 {
1374 align = min_align_of_type (TREE_TYPE (base));
1375 if (align <= 1)
1376 align = 0;
1377 }
1378 if (align == 0 && !sanitize_flags_p (SANITIZE_NULL))
1379 return;
1380 tree t = TREE_OPERAND (base, 0);
1381 if (!POINTER_TYPE_P (TREE_TYPE (t)))
1382 return;
1383 if (RECORD_OR_UNION_TYPE_P (TREE_TYPE (base)) && mem != base)
1384 ikind = UBSAN_MEMBER_ACCESS;
1385 tree kind = build_int_cst (build_pointer_type (TREE_TYPE (base)), ikind);
1386 tree alignt = build_int_cst (pointer_sized_int_node, align);
1387 gcall *g = gimple_build_call_internal (IFN_UBSAN_NULL, 3, t, kind, alignt);
1388 gimple_set_location (g, gimple_location (gsi_stmt (*iter)));
1389 gsi_insert_before (iter, g, GSI_SAME_STMT);
1390 }
1391
1392 /* Perform the pointer instrumentation. */
1393
1394 static void
1395 instrument_null (gimple_stmt_iterator gsi, tree t, bool is_lhs)
1396 {
1397 /* Handle also e.g. &s->i. */
1398 if (TREE_CODE (t) == ADDR_EXPR)
1399 t = TREE_OPERAND (t, 0);
1400 tree base = get_base_address (t);
1401 if (base != NULL_TREE
1402 && TREE_CODE (base) == MEM_REF
1403 && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME)
1404 instrument_mem_ref (t, base, &gsi, is_lhs);
1405 }
1406
1407 /* Instrument pointer arithmetics PTR p+ OFF. */
1408
1409 static void
1410 instrument_pointer_overflow (gimple_stmt_iterator *gsi, tree ptr, tree off)
1411 {
1412 if (TYPE_PRECISION (sizetype) != POINTER_SIZE)
1413 return;
1414 gcall *g = gimple_build_call_internal (IFN_UBSAN_PTR, 2, ptr, off);
1415 gimple_set_location (g, gimple_location (gsi_stmt (*gsi)));
1416 gsi_insert_before (gsi, g, GSI_SAME_STMT);
1417 }
1418
1419 /* Instrument pointer arithmetics if any. */
1420
1421 static void
1422 maybe_instrument_pointer_overflow (gimple_stmt_iterator *gsi, tree t)
1423 {
1424 if (TYPE_PRECISION (sizetype) != POINTER_SIZE)
1425 return;
1426
1427 /* Handle also e.g. &s->i. */
1428 if (TREE_CODE (t) == ADDR_EXPR)
1429 t = TREE_OPERAND (t, 0);
1430
1431 if (!handled_component_p (t) && TREE_CODE (t) != MEM_REF)
1432 return;
1433
1434 HOST_WIDE_INT bitsize, bitpos, bytepos;
1435 tree offset;
1436 machine_mode mode;
1437 int volatilep = 0, reversep, unsignedp = 0;
1438 tree inner = get_inner_reference (t, &bitsize, &bitpos, &offset, &mode,
1439 &unsignedp, &reversep, &volatilep);
1440 tree moff = NULL_TREE;
1441
1442 bool decl_p = DECL_P (inner);
1443 tree base;
1444 if (decl_p)
1445 {
1446 if (DECL_REGISTER (inner))
1447 return;
1448 base = inner;
1449 /* If BASE is a fixed size automatic variable or
1450 global variable defined in the current TU and bitpos
1451 fits, don't instrument anything. */
1452 if (offset == NULL_TREE
1453 && bitpos > 0
1454 && (VAR_P (base)
1455 || TREE_CODE (base) == PARM_DECL
1456 || TREE_CODE (base) == RESULT_DECL)
1457 && DECL_SIZE (base)
1458 && TREE_CODE (DECL_SIZE (base)) == INTEGER_CST
1459 && compare_tree_int (DECL_SIZE (base), bitpos) >= 0
1460 && (!is_global_var (base) || decl_binds_to_current_def_p (base)))
1461 return;
1462 }
1463 else if (TREE_CODE (inner) == MEM_REF)
1464 {
1465 base = TREE_OPERAND (inner, 0);
1466 if (TREE_CODE (base) == ADDR_EXPR
1467 && DECL_P (TREE_OPERAND (base, 0))
1468 && !TREE_ADDRESSABLE (TREE_OPERAND (base, 0))
1469 && !is_global_var (TREE_OPERAND (base, 0)))
1470 return;
1471 moff = TREE_OPERAND (inner, 1);
1472 if (integer_zerop (moff))
1473 moff = NULL_TREE;
1474 }
1475 else
1476 return;
1477
1478 if (!POINTER_TYPE_P (TREE_TYPE (base)) && !DECL_P (base))
1479 return;
1480 bytepos = bitpos / BITS_PER_UNIT;
1481 if (offset == NULL_TREE && bytepos == 0 && moff == NULL_TREE)
1482 return;
1483
1484 tree base_addr = base;
1485 if (decl_p)
1486 base_addr = build1 (ADDR_EXPR,
1487 build_pointer_type (TREE_TYPE (base)), base);
1488 t = offset;
1489 if (bytepos)
1490 {
1491 if (t)
1492 t = fold_build2 (PLUS_EXPR, TREE_TYPE (t), t,
1493 build_int_cst (TREE_TYPE (t), bytepos));
1494 else
1495 t = size_int (bytepos);
1496 }
1497 if (moff)
1498 {
1499 if (t)
1500 t = fold_build2 (PLUS_EXPR, TREE_TYPE (t), t,
1501 fold_convert (TREE_TYPE (t), moff));
1502 else
1503 t = fold_convert (sizetype, moff);
1504 }
1505 t = force_gimple_operand_gsi (gsi, t, true, NULL_TREE, true,
1506 GSI_SAME_STMT);
1507 base_addr = force_gimple_operand_gsi (gsi, base_addr, true, NULL_TREE, true,
1508 GSI_SAME_STMT);
1509 instrument_pointer_overflow (gsi, base_addr, t);
1510 }
1511
1512 /* Build an ubsan builtin call for the signed-integer-overflow
1513 sanitization. CODE says what kind of builtin are we building,
1514 LOC is a location, LHSTYPE is the type of LHS, OP0 and OP1
1515 are operands of the binary operation. */
1516
1517 tree
1518 ubsan_build_overflow_builtin (tree_code code, location_t loc, tree lhstype,
1519 tree op0, tree op1, tree *datap)
1520 {
1521 if (flag_sanitize_undefined_trap_on_error)
1522 return build_call_expr_loc (loc, builtin_decl_explicit (BUILT_IN_TRAP), 0);
1523
1524 tree data;
1525 if (datap && *datap)
1526 data = *datap;
1527 else
1528 data = ubsan_create_data ("__ubsan_overflow_data", 1, &loc,
1529 ubsan_type_descriptor (lhstype), NULL_TREE,
1530 NULL_TREE);
1531 if (datap)
1532 *datap = data;
1533 enum built_in_function fn_code;
1534
1535 switch (code)
1536 {
1537 case PLUS_EXPR:
1538 fn_code = (flag_sanitize_recover & SANITIZE_SI_OVERFLOW)
1539 ? BUILT_IN_UBSAN_HANDLE_ADD_OVERFLOW
1540 : BUILT_IN_UBSAN_HANDLE_ADD_OVERFLOW_ABORT;
1541 break;
1542 case MINUS_EXPR:
1543 fn_code = (flag_sanitize_recover & SANITIZE_SI_OVERFLOW)
1544 ? BUILT_IN_UBSAN_HANDLE_SUB_OVERFLOW
1545 : BUILT_IN_UBSAN_HANDLE_SUB_OVERFLOW_ABORT;
1546 break;
1547 case MULT_EXPR:
1548 fn_code = (flag_sanitize_recover & SANITIZE_SI_OVERFLOW)
1549 ? BUILT_IN_UBSAN_HANDLE_MUL_OVERFLOW
1550 : BUILT_IN_UBSAN_HANDLE_MUL_OVERFLOW_ABORT;
1551 break;
1552 case NEGATE_EXPR:
1553 fn_code = (flag_sanitize_recover & SANITIZE_SI_OVERFLOW)
1554 ? BUILT_IN_UBSAN_HANDLE_NEGATE_OVERFLOW
1555 : BUILT_IN_UBSAN_HANDLE_NEGATE_OVERFLOW_ABORT;
1556 break;
1557 default:
1558 gcc_unreachable ();
1559 }
1560 tree fn = builtin_decl_explicit (fn_code);
1561 return build_call_expr_loc (loc, fn, 2 + (code != NEGATE_EXPR),
1562 build_fold_addr_expr_loc (loc, data),
1563 ubsan_encode_value (op0, UBSAN_ENCODE_VALUE_RTL),
1564 op1
1565 ? ubsan_encode_value (op1,
1566 UBSAN_ENCODE_VALUE_RTL)
1567 : NULL_TREE);
1568 }
1569
1570 /* Perform the signed integer instrumentation. GSI is the iterator
1571 pointing at statement we are trying to instrument. */
1572
1573 static void
1574 instrument_si_overflow (gimple_stmt_iterator gsi)
1575 {
1576 gimple *stmt = gsi_stmt (gsi);
1577 tree_code code = gimple_assign_rhs_code (stmt);
1578 tree lhs = gimple_assign_lhs (stmt);
1579 tree lhstype = TREE_TYPE (lhs);
1580 tree lhsinner = VECTOR_TYPE_P (lhstype) ? TREE_TYPE (lhstype) : lhstype;
1581 tree a, b;
1582 gimple *g;
1583
1584 /* If this is not a signed operation, don't instrument anything here.
1585 Also punt on bit-fields. */
1586 if (!INTEGRAL_TYPE_P (lhsinner)
1587 || TYPE_OVERFLOW_WRAPS (lhsinner)
1588 || GET_MODE_BITSIZE (TYPE_MODE (lhsinner)) != TYPE_PRECISION (lhsinner))
1589 return;
1590
1591 switch (code)
1592 {
1593 case MINUS_EXPR:
1594 case PLUS_EXPR:
1595 case MULT_EXPR:
1596 /* Transform
1597 i = u {+,-,*} 5;
1598 into
1599 i = UBSAN_CHECK_{ADD,SUB,MUL} (u, 5); */
1600 a = gimple_assign_rhs1 (stmt);
1601 b = gimple_assign_rhs2 (stmt);
1602 g = gimple_build_call_internal (code == PLUS_EXPR
1603 ? IFN_UBSAN_CHECK_ADD
1604 : code == MINUS_EXPR
1605 ? IFN_UBSAN_CHECK_SUB
1606 : IFN_UBSAN_CHECK_MUL, 2, a, b);
1607 gimple_call_set_lhs (g, lhs);
1608 gsi_replace (&gsi, g, true);
1609 break;
1610 case NEGATE_EXPR:
1611 /* Represent i = -u;
1612 as
1613 i = UBSAN_CHECK_SUB (0, u); */
1614 a = build_zero_cst (lhstype);
1615 b = gimple_assign_rhs1 (stmt);
1616 g = gimple_build_call_internal (IFN_UBSAN_CHECK_SUB, 2, a, b);
1617 gimple_call_set_lhs (g, lhs);
1618 gsi_replace (&gsi, g, true);
1619 break;
1620 case ABS_EXPR:
1621 /* Transform i = ABS_EXPR<u>;
1622 into
1623 _N = UBSAN_CHECK_SUB (0, u);
1624 i = ABS_EXPR<_N>; */
1625 a = build_zero_cst (lhstype);
1626 b = gimple_assign_rhs1 (stmt);
1627 g = gimple_build_call_internal (IFN_UBSAN_CHECK_SUB, 2, a, b);
1628 a = make_ssa_name (lhstype);
1629 gimple_call_set_lhs (g, a);
1630 gimple_set_location (g, gimple_location (stmt));
1631 gsi_insert_before (&gsi, g, GSI_SAME_STMT);
1632 gimple_assign_set_rhs1 (stmt, a);
1633 update_stmt (stmt);
1634 break;
1635 default:
1636 break;
1637 }
1638 }
1639
1640 /* Instrument loads from (non-bitfield) bool and C++ enum values
1641 to check if the memory value is outside of the range of the valid
1642 type values. */
1643
1644 static void
1645 instrument_bool_enum_load (gimple_stmt_iterator *gsi)
1646 {
1647 gimple *stmt = gsi_stmt (*gsi);
1648 tree rhs = gimple_assign_rhs1 (stmt);
1649 tree type = TREE_TYPE (rhs);
1650 tree minv = NULL_TREE, maxv = NULL_TREE;
1651
1652 if (TREE_CODE (type) == BOOLEAN_TYPE
1653 && sanitize_flags_p (SANITIZE_BOOL))
1654 {
1655 minv = boolean_false_node;
1656 maxv = boolean_true_node;
1657 }
1658 else if (TREE_CODE (type) == ENUMERAL_TYPE
1659 && sanitize_flags_p (SANITIZE_ENUM)
1660 && TREE_TYPE (type) != NULL_TREE
1661 && TREE_CODE (TREE_TYPE (type)) == INTEGER_TYPE
1662 && (TYPE_PRECISION (TREE_TYPE (type))
1663 < GET_MODE_PRECISION (SCALAR_INT_TYPE_MODE (type))))
1664 {
1665 minv = TYPE_MIN_VALUE (TREE_TYPE (type));
1666 maxv = TYPE_MAX_VALUE (TREE_TYPE (type));
1667 }
1668 else
1669 return;
1670
1671 int modebitsize = GET_MODE_BITSIZE (SCALAR_INT_TYPE_MODE (type));
1672 HOST_WIDE_INT bitsize, bitpos;
1673 tree offset;
1674 machine_mode mode;
1675 int volatilep = 0, reversep, unsignedp = 0;
1676 tree base = get_inner_reference (rhs, &bitsize, &bitpos, &offset, &mode,
1677 &unsignedp, &reversep, &volatilep);
1678 tree utype = build_nonstandard_integer_type (modebitsize, 1);
1679
1680 if ((VAR_P (base) && DECL_HARD_REGISTER (base))
1681 || (bitpos % modebitsize) != 0
1682 || bitsize != modebitsize
1683 || GET_MODE_BITSIZE (SCALAR_INT_TYPE_MODE (utype)) != modebitsize
1684 || TREE_CODE (gimple_assign_lhs (stmt)) != SSA_NAME)
1685 return;
1686
1687 bool ends_bb = stmt_ends_bb_p (stmt);
1688 location_t loc = gimple_location (stmt);
1689 tree lhs = gimple_assign_lhs (stmt);
1690 tree ptype = build_pointer_type (TREE_TYPE (rhs));
1691 tree atype = reference_alias_ptr_type (rhs);
1692 gimple *g = gimple_build_assign (make_ssa_name (ptype),
1693 build_fold_addr_expr (rhs));
1694 gimple_set_location (g, loc);
1695 gsi_insert_before (gsi, g, GSI_SAME_STMT);
1696 tree mem = build2 (MEM_REF, utype, gimple_assign_lhs (g),
1697 build_int_cst (atype, 0));
1698 tree urhs = make_ssa_name (utype);
1699 if (ends_bb)
1700 {
1701 gimple_assign_set_lhs (stmt, urhs);
1702 g = gimple_build_assign (lhs, NOP_EXPR, urhs);
1703 gimple_set_location (g, loc);
1704 edge e = find_fallthru_edge (gimple_bb (stmt)->succs);
1705 gsi_insert_on_edge_immediate (e, g);
1706 gimple_assign_set_rhs_from_tree (gsi, mem);
1707 update_stmt (stmt);
1708 *gsi = gsi_for_stmt (g);
1709 g = stmt;
1710 }
1711 else
1712 {
1713 g = gimple_build_assign (urhs, mem);
1714 gimple_set_location (g, loc);
1715 gsi_insert_before (gsi, g, GSI_SAME_STMT);
1716 }
1717 minv = fold_convert (utype, minv);
1718 maxv = fold_convert (utype, maxv);
1719 if (!integer_zerop (minv))
1720 {
1721 g = gimple_build_assign (make_ssa_name (utype), MINUS_EXPR, urhs, minv);
1722 gimple_set_location (g, loc);
1723 gsi_insert_before (gsi, g, GSI_SAME_STMT);
1724 }
1725
1726 gimple_stmt_iterator gsi2 = *gsi;
1727 basic_block then_bb, fallthru_bb;
1728 *gsi = create_cond_insert_point (gsi, true, false, true,
1729 &then_bb, &fallthru_bb);
1730 g = gimple_build_cond (GT_EXPR, gimple_assign_lhs (g),
1731 int_const_binop (MINUS_EXPR, maxv, minv),
1732 NULL_TREE, NULL_TREE);
1733 gimple_set_location (g, loc);
1734 gsi_insert_after (gsi, g, GSI_NEW_STMT);
1735
1736 if (!ends_bb)
1737 {
1738 gimple_assign_set_rhs_with_ops (&gsi2, NOP_EXPR, urhs);
1739 update_stmt (stmt);
1740 }
1741
1742 gsi2 = gsi_after_labels (then_bb);
1743 if (flag_sanitize_undefined_trap_on_error)
1744 g = gimple_build_call (builtin_decl_explicit (BUILT_IN_TRAP), 0);
1745 else
1746 {
1747 tree data = ubsan_create_data ("__ubsan_invalid_value_data", 1, &loc,
1748 ubsan_type_descriptor (type), NULL_TREE,
1749 NULL_TREE);
1750 data = build_fold_addr_expr_loc (loc, data);
1751 enum built_in_function bcode
1752 = (flag_sanitize_recover & (TREE_CODE (type) == BOOLEAN_TYPE
1753 ? SANITIZE_BOOL : SANITIZE_ENUM))
1754 ? BUILT_IN_UBSAN_HANDLE_LOAD_INVALID_VALUE
1755 : BUILT_IN_UBSAN_HANDLE_LOAD_INVALID_VALUE_ABORT;
1756 tree fn = builtin_decl_explicit (bcode);
1757
1758 tree val = ubsan_encode_value (urhs, UBSAN_ENCODE_VALUE_GIMPLE);
1759 val = force_gimple_operand_gsi (&gsi2, val, true, NULL_TREE, true,
1760 GSI_SAME_STMT);
1761 g = gimple_build_call (fn, 2, data, val);
1762 }
1763 gimple_set_location (g, loc);
1764 gsi_insert_before (&gsi2, g, GSI_SAME_STMT);
1765 ubsan_create_edge (g);
1766 *gsi = gsi_for_stmt (stmt);
1767 }
1768
1769 /* Determine if we can propagate given LOCATION to ubsan_data descriptor to use
1770 new style handlers. Libubsan uses heuristics to destinguish between old and
1771 new styles and relies on these properties for filename:
1772
1773 a) Location's filename must not be NULL.
1774 b) Location's filename must not be equal to "".
1775 c) Location's filename must not be equal to "\1".
1776 d) First two bytes of filename must not contain '\xff' symbol. */
1777
1778 static bool
1779 ubsan_use_new_style_p (location_t loc)
1780 {
1781 if (loc == UNKNOWN_LOCATION)
1782 return false;
1783
1784 expanded_location xloc = expand_location (loc);
1785 if (xloc.file == NULL || strncmp (xloc.file, "\1", 2) == 0
1786 || xloc.file[0] == '\0' || xloc.file[0] == '\xff'
1787 || xloc.file[1] == '\xff')
1788 return false;
1789
1790 return true;
1791 }
1792
1793 /* Instrument float point-to-integer conversion. TYPE is an integer type of
1794 destination, EXPR is floating-point expression. */
1795
1796 tree
1797 ubsan_instrument_float_cast (location_t loc, tree type, tree expr)
1798 {
1799 tree expr_type = TREE_TYPE (expr);
1800 tree t, tt, fn, min, max;
1801 machine_mode mode = TYPE_MODE (expr_type);
1802 int prec = TYPE_PRECISION (type);
1803 bool uns_p = TYPE_UNSIGNED (type);
1804 if (loc == UNKNOWN_LOCATION)
1805 loc = input_location;
1806
1807 /* Float to integer conversion first truncates toward zero, so
1808 even signed char c = 127.875f; is not problematic.
1809 Therefore, we should complain only if EXPR is unordered or smaller
1810 or equal than TYPE_MIN_VALUE - 1.0 or greater or equal than
1811 TYPE_MAX_VALUE + 1.0. */
1812 if (REAL_MODE_FORMAT (mode)->b == 2)
1813 {
1814 /* For maximum, TYPE_MAX_VALUE might not be representable
1815 in EXPR_TYPE, e.g. if TYPE is 64-bit long long and
1816 EXPR_TYPE is IEEE single float, but TYPE_MAX_VALUE + 1.0 is
1817 either representable or infinity. */
1818 REAL_VALUE_TYPE maxval = dconst1;
1819 SET_REAL_EXP (&maxval, REAL_EXP (&maxval) + prec - !uns_p);
1820 real_convert (&maxval, mode, &maxval);
1821 max = build_real (expr_type, maxval);
1822
1823 /* For unsigned, assume -1.0 is always representable. */
1824 if (uns_p)
1825 min = build_minus_one_cst (expr_type);
1826 else
1827 {
1828 /* TYPE_MIN_VALUE is generally representable (or -inf),
1829 but TYPE_MIN_VALUE - 1.0 might not be. */
1830 REAL_VALUE_TYPE minval = dconstm1, minval2;
1831 SET_REAL_EXP (&minval, REAL_EXP (&minval) + prec - 1);
1832 real_convert (&minval, mode, &minval);
1833 real_arithmetic (&minval2, MINUS_EXPR, &minval, &dconst1);
1834 real_convert (&minval2, mode, &minval2);
1835 if (real_compare (EQ_EXPR, &minval, &minval2)
1836 && !real_isinf (&minval))
1837 {
1838 /* If TYPE_MIN_VALUE - 1.0 is not representable and
1839 rounds to TYPE_MIN_VALUE, we need to subtract
1840 more. As REAL_MODE_FORMAT (mode)->p is the number
1841 of base digits, we want to subtract a number that
1842 will be 1 << (REAL_MODE_FORMAT (mode)->p - 1)
1843 times smaller than minval. */
1844 minval2 = dconst1;
1845 gcc_assert (prec > REAL_MODE_FORMAT (mode)->p);
1846 SET_REAL_EXP (&minval2,
1847 REAL_EXP (&minval2) + prec - 1
1848 - REAL_MODE_FORMAT (mode)->p + 1);
1849 real_arithmetic (&minval2, MINUS_EXPR, &minval, &minval2);
1850 real_convert (&minval2, mode, &minval2);
1851 }
1852 min = build_real (expr_type, minval2);
1853 }
1854 }
1855 else if (REAL_MODE_FORMAT (mode)->b == 10)
1856 {
1857 /* For _Decimal128 up to 34 decimal digits, - sign,
1858 dot, e, exponent. */
1859 char buf[64];
1860 mpfr_t m;
1861 int p = REAL_MODE_FORMAT (mode)->p;
1862 REAL_VALUE_TYPE maxval, minval;
1863
1864 /* Use mpfr_snprintf rounding to compute the smallest
1865 representable decimal number greater or equal than
1866 1 << (prec - !uns_p). */
1867 mpfr_init2 (m, prec + 2);
1868 mpfr_set_ui_2exp (m, 1, prec - !uns_p, GMP_RNDN);
1869 mpfr_snprintf (buf, sizeof buf, "%.*RUe", p - 1, m);
1870 decimal_real_from_string (&maxval, buf);
1871 max = build_real (expr_type, maxval);
1872
1873 /* For unsigned, assume -1.0 is always representable. */
1874 if (uns_p)
1875 min = build_minus_one_cst (expr_type);
1876 else
1877 {
1878 /* Use mpfr_snprintf rounding to compute the largest
1879 representable decimal number less or equal than
1880 (-1 << (prec - 1)) - 1. */
1881 mpfr_set_si_2exp (m, -1, prec - 1, GMP_RNDN);
1882 mpfr_sub_ui (m, m, 1, GMP_RNDN);
1883 mpfr_snprintf (buf, sizeof buf, "%.*RDe", p - 1, m);
1884 decimal_real_from_string (&minval, buf);
1885 min = build_real (expr_type, minval);
1886 }
1887 mpfr_clear (m);
1888 }
1889 else
1890 return NULL_TREE;
1891
1892 t = fold_build2 (UNLE_EXPR, boolean_type_node, expr, min);
1893 tt = fold_build2 (UNGE_EXPR, boolean_type_node, expr, max);
1894 t = fold_build2 (TRUTH_OR_EXPR, boolean_type_node, t, tt);
1895 if (integer_zerop (t))
1896 return NULL_TREE;
1897
1898 if (flag_sanitize_undefined_trap_on_error)
1899 fn = build_call_expr_loc (loc, builtin_decl_explicit (BUILT_IN_TRAP), 0);
1900 else
1901 {
1902 location_t *loc_ptr = NULL;
1903 unsigned num_locations = 0;
1904 /* Figure out if we can propagate location to ubsan_data and use new
1905 style handlers in libubsan. */
1906 if (ubsan_use_new_style_p (loc))
1907 {
1908 loc_ptr = &loc;
1909 num_locations = 1;
1910 }
1911 /* Create the __ubsan_handle_float_cast_overflow fn call. */
1912 tree data = ubsan_create_data ("__ubsan_float_cast_overflow_data",
1913 num_locations, loc_ptr,
1914 ubsan_type_descriptor (expr_type),
1915 ubsan_type_descriptor (type), NULL_TREE,
1916 NULL_TREE);
1917 enum built_in_function bcode
1918 = (flag_sanitize_recover & SANITIZE_FLOAT_CAST)
1919 ? BUILT_IN_UBSAN_HANDLE_FLOAT_CAST_OVERFLOW
1920 : BUILT_IN_UBSAN_HANDLE_FLOAT_CAST_OVERFLOW_ABORT;
1921 fn = builtin_decl_explicit (bcode);
1922 fn = build_call_expr_loc (loc, fn, 2,
1923 build_fold_addr_expr_loc (loc, data),
1924 ubsan_encode_value (expr));
1925 }
1926
1927 return fold_build3 (COND_EXPR, void_type_node, t, fn, integer_zero_node);
1928 }
1929
1930 /* Instrument values passed to function arguments with nonnull attribute. */
1931
1932 static void
1933 instrument_nonnull_arg (gimple_stmt_iterator *gsi)
1934 {
1935 gimple *stmt = gsi_stmt (*gsi);
1936 location_t loc[2];
1937 /* infer_nonnull_range needs flag_delete_null_pointer_checks set,
1938 while for nonnull sanitization it is clear. */
1939 int save_flag_delete_null_pointer_checks = flag_delete_null_pointer_checks;
1940 flag_delete_null_pointer_checks = 1;
1941 loc[0] = gimple_location (stmt);
1942 loc[1] = UNKNOWN_LOCATION;
1943 for (unsigned int i = 0; i < gimple_call_num_args (stmt); i++)
1944 {
1945 tree arg = gimple_call_arg (stmt, i);
1946 if (POINTER_TYPE_P (TREE_TYPE (arg))
1947 && infer_nonnull_range_by_attribute (stmt, arg))
1948 {
1949 gimple *g;
1950 if (!is_gimple_val (arg))
1951 {
1952 g = gimple_build_assign (make_ssa_name (TREE_TYPE (arg)), arg);
1953 gimple_set_location (g, loc[0]);
1954 gsi_insert_before (gsi, g, GSI_SAME_STMT);
1955 arg = gimple_assign_lhs (g);
1956 }
1957
1958 basic_block then_bb, fallthru_bb;
1959 *gsi = create_cond_insert_point (gsi, true, false, true,
1960 &then_bb, &fallthru_bb);
1961 g = gimple_build_cond (EQ_EXPR, arg,
1962 build_zero_cst (TREE_TYPE (arg)),
1963 NULL_TREE, NULL_TREE);
1964 gimple_set_location (g, loc[0]);
1965 gsi_insert_after (gsi, g, GSI_NEW_STMT);
1966
1967 *gsi = gsi_after_labels (then_bb);
1968 if (flag_sanitize_undefined_trap_on_error)
1969 g = gimple_build_call (builtin_decl_explicit (BUILT_IN_TRAP), 0);
1970 else
1971 {
1972 tree data = ubsan_create_data ("__ubsan_nonnull_arg_data",
1973 2, loc, NULL_TREE,
1974 build_int_cst (integer_type_node,
1975 i + 1),
1976 NULL_TREE);
1977 data = build_fold_addr_expr_loc (loc[0], data);
1978 enum built_in_function bcode
1979 = (flag_sanitize_recover & SANITIZE_NONNULL_ATTRIBUTE)
1980 ? BUILT_IN_UBSAN_HANDLE_NONNULL_ARG
1981 : BUILT_IN_UBSAN_HANDLE_NONNULL_ARG_ABORT;
1982 tree fn = builtin_decl_explicit (bcode);
1983
1984 g = gimple_build_call (fn, 1, data);
1985 }
1986 gimple_set_location (g, loc[0]);
1987 gsi_insert_before (gsi, g, GSI_SAME_STMT);
1988 ubsan_create_edge (g);
1989 }
1990 *gsi = gsi_for_stmt (stmt);
1991 }
1992 flag_delete_null_pointer_checks = save_flag_delete_null_pointer_checks;
1993 }
1994
1995 /* Instrument returns in functions with returns_nonnull attribute. */
1996
1997 static void
1998 instrument_nonnull_return (gimple_stmt_iterator *gsi)
1999 {
2000 greturn *stmt = as_a <greturn *> (gsi_stmt (*gsi));
2001 location_t loc[2];
2002 tree arg = gimple_return_retval (stmt);
2003 /* infer_nonnull_range needs flag_delete_null_pointer_checks set,
2004 while for nonnull return sanitization it is clear. */
2005 int save_flag_delete_null_pointer_checks = flag_delete_null_pointer_checks;
2006 flag_delete_null_pointer_checks = 1;
2007 loc[0] = gimple_location (stmt);
2008 loc[1] = UNKNOWN_LOCATION;
2009 if (arg
2010 && POINTER_TYPE_P (TREE_TYPE (arg))
2011 && is_gimple_val (arg)
2012 && infer_nonnull_range_by_attribute (stmt, arg))
2013 {
2014 basic_block then_bb, fallthru_bb;
2015 *gsi = create_cond_insert_point (gsi, true, false, true,
2016 &then_bb, &fallthru_bb);
2017 gimple *g = gimple_build_cond (EQ_EXPR, arg,
2018 build_zero_cst (TREE_TYPE (arg)),
2019 NULL_TREE, NULL_TREE);
2020 gimple_set_location (g, loc[0]);
2021 gsi_insert_after (gsi, g, GSI_NEW_STMT);
2022
2023 *gsi = gsi_after_labels (then_bb);
2024 if (flag_sanitize_undefined_trap_on_error)
2025 g = gimple_build_call (builtin_decl_explicit (BUILT_IN_TRAP), 0);
2026 else
2027 {
2028 tree data = ubsan_create_data ("__ubsan_nonnull_return_data",
2029 1, &loc[1], NULL_TREE, NULL_TREE);
2030 data = build_fold_addr_expr_loc (loc[0], data);
2031 tree data2 = ubsan_create_data ("__ubsan_nonnull_return_data",
2032 1, &loc[0], NULL_TREE, NULL_TREE);
2033 data2 = build_fold_addr_expr_loc (loc[0], data2);
2034 enum built_in_function bcode
2035 = (flag_sanitize_recover & SANITIZE_RETURNS_NONNULL_ATTRIBUTE)
2036 ? BUILT_IN_UBSAN_HANDLE_NONNULL_RETURN_V1
2037 : BUILT_IN_UBSAN_HANDLE_NONNULL_RETURN_V1_ABORT;
2038 tree fn = builtin_decl_explicit (bcode);
2039
2040 g = gimple_build_call (fn, 2, data, data2);
2041 }
2042 gimple_set_location (g, loc[0]);
2043 gsi_insert_before (gsi, g, GSI_SAME_STMT);
2044 ubsan_create_edge (g);
2045 *gsi = gsi_for_stmt (stmt);
2046 }
2047 flag_delete_null_pointer_checks = save_flag_delete_null_pointer_checks;
2048 }
2049
2050 /* Instrument memory references. Here we check whether the pointer
2051 points to an out-of-bounds location. */
2052
2053 static void
2054 instrument_object_size (gimple_stmt_iterator *gsi, tree t, bool is_lhs)
2055 {
2056 gimple *stmt = gsi_stmt (*gsi);
2057 location_t loc = gimple_location (stmt);
2058 tree type;
2059 tree index = NULL_TREE;
2060 HOST_WIDE_INT size_in_bytes;
2061
2062 type = TREE_TYPE (t);
2063 if (VOID_TYPE_P (type))
2064 return;
2065
2066 switch (TREE_CODE (t))
2067 {
2068 case COMPONENT_REF:
2069 if (TREE_CODE (t) == COMPONENT_REF
2070 && DECL_BIT_FIELD_REPRESENTATIVE (TREE_OPERAND (t, 1)) != NULL_TREE)
2071 {
2072 tree repr = DECL_BIT_FIELD_REPRESENTATIVE (TREE_OPERAND (t, 1));
2073 t = build3 (COMPONENT_REF, TREE_TYPE (repr), TREE_OPERAND (t, 0),
2074 repr, TREE_OPERAND (t, 2));
2075 }
2076 break;
2077 case ARRAY_REF:
2078 index = TREE_OPERAND (t, 1);
2079 break;
2080 case INDIRECT_REF:
2081 case MEM_REF:
2082 case VAR_DECL:
2083 case PARM_DECL:
2084 case RESULT_DECL:
2085 break;
2086 default:
2087 return;
2088 }
2089
2090 size_in_bytes = int_size_in_bytes (type);
2091 if (size_in_bytes <= 0)
2092 return;
2093
2094 HOST_WIDE_INT bitsize, bitpos;
2095 tree offset;
2096 machine_mode mode;
2097 int volatilep = 0, reversep, unsignedp = 0;
2098 tree inner = get_inner_reference (t, &bitsize, &bitpos, &offset, &mode,
2099 &unsignedp, &reversep, &volatilep);
2100
2101 if (bitpos % BITS_PER_UNIT != 0
2102 || bitsize != size_in_bytes * BITS_PER_UNIT)
2103 return;
2104
2105 bool decl_p = DECL_P (inner);
2106 tree base;
2107 if (decl_p)
2108 {
2109 if (DECL_REGISTER (inner))
2110 return;
2111 base = inner;
2112 }
2113 else if (TREE_CODE (inner) == MEM_REF)
2114 base = TREE_OPERAND (inner, 0);
2115 else
2116 return;
2117 tree ptr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (t)), t);
2118
2119 while (TREE_CODE (base) == SSA_NAME)
2120 {
2121 gimple *def_stmt = SSA_NAME_DEF_STMT (base);
2122 if (gimple_assign_ssa_name_copy_p (def_stmt)
2123 || (gimple_assign_cast_p (def_stmt)
2124 && POINTER_TYPE_P (TREE_TYPE (gimple_assign_rhs1 (def_stmt))))
2125 || (is_gimple_assign (def_stmt)
2126 && gimple_assign_rhs_code (def_stmt) == POINTER_PLUS_EXPR))
2127 {
2128 tree rhs1 = gimple_assign_rhs1 (def_stmt);
2129 if (TREE_CODE (rhs1) == SSA_NAME
2130 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (rhs1))
2131 break;
2132 else
2133 base = rhs1;
2134 }
2135 else
2136 break;
2137 }
2138
2139 if (!POINTER_TYPE_P (TREE_TYPE (base)) && !DECL_P (base))
2140 return;
2141
2142 tree sizet;
2143 tree base_addr = base;
2144 gimple *bos_stmt = NULL;
2145 if (decl_p)
2146 base_addr = build1 (ADDR_EXPR,
2147 build_pointer_type (TREE_TYPE (base)), base);
2148 unsigned HOST_WIDE_INT size;
2149 if (compute_builtin_object_size (base_addr, 0, &size))
2150 sizet = build_int_cst (sizetype, size);
2151 else if (optimize)
2152 {
2153 if (LOCATION_LOCUS (loc) == UNKNOWN_LOCATION)
2154 loc = input_location;
2155 /* Generate __builtin_object_size call. */
2156 sizet = builtin_decl_explicit (BUILT_IN_OBJECT_SIZE);
2157 sizet = build_call_expr_loc (loc, sizet, 2, base_addr,
2158 integer_zero_node);
2159 sizet = force_gimple_operand_gsi (gsi, sizet, false, NULL_TREE, true,
2160 GSI_SAME_STMT);
2161 /* If the call above didn't end up being an integer constant, go one
2162 statement back and get the __builtin_object_size stmt. Save it,
2163 we might need it later. */
2164 if (SSA_VAR_P (sizet))
2165 {
2166 gsi_prev (gsi);
2167 bos_stmt = gsi_stmt (*gsi);
2168
2169 /* Move on to where we were. */
2170 gsi_next (gsi);
2171 }
2172 }
2173 else
2174 return;
2175
2176 /* Generate UBSAN_OBJECT_SIZE (ptr, ptr+sizeof(*ptr)-base, objsize, ckind)
2177 call. */
2178 /* ptr + sizeof (*ptr) - base */
2179 t = fold_build2 (MINUS_EXPR, sizetype,
2180 fold_convert (pointer_sized_int_node, ptr),
2181 fold_convert (pointer_sized_int_node, base_addr));
2182 t = fold_build2 (PLUS_EXPR, sizetype, t, TYPE_SIZE_UNIT (type));
2183
2184 /* Perhaps we can omit the check. */
2185 if (TREE_CODE (t) == INTEGER_CST
2186 && TREE_CODE (sizet) == INTEGER_CST
2187 && tree_int_cst_le (t, sizet))
2188 return;
2189
2190 if (index != NULL_TREE
2191 && TREE_CODE (index) == SSA_NAME
2192 && TREE_CODE (sizet) == INTEGER_CST)
2193 {
2194 gimple *def = SSA_NAME_DEF_STMT (index);
2195 if (is_gimple_assign (def)
2196 && gimple_assign_rhs_code (def) == BIT_AND_EXPR
2197 && TREE_CODE (gimple_assign_rhs2 (def)) == INTEGER_CST)
2198 {
2199 tree cst = gimple_assign_rhs2 (def);
2200 tree sz = fold_build2 (EXACT_DIV_EXPR, sizetype, sizet,
2201 TYPE_SIZE_UNIT (type));
2202 if (tree_int_cst_sgn (cst) >= 0
2203 && tree_int_cst_lt (cst, sz))
2204 return;
2205 }
2206 }
2207
2208 if (bos_stmt && gimple_call_builtin_p (bos_stmt, BUILT_IN_OBJECT_SIZE))
2209 ubsan_create_edge (bos_stmt);
2210
2211 /* We have to emit the check. */
2212 t = force_gimple_operand_gsi (gsi, t, true, NULL_TREE, true,
2213 GSI_SAME_STMT);
2214 ptr = force_gimple_operand_gsi (gsi, ptr, true, NULL_TREE, true,
2215 GSI_SAME_STMT);
2216 tree ckind = build_int_cst (unsigned_char_type_node,
2217 is_lhs ? UBSAN_STORE_OF : UBSAN_LOAD_OF);
2218 gimple *g = gimple_build_call_internal (IFN_UBSAN_OBJECT_SIZE, 4,
2219 ptr, t, sizet, ckind);
2220 gimple_set_location (g, loc);
2221 gsi_insert_before (gsi, g, GSI_SAME_STMT);
2222 }
2223
2224 /* Instrument values passed to builtin functions. */
2225
2226 static void
2227 instrument_builtin (gimple_stmt_iterator *gsi)
2228 {
2229 gimple *stmt = gsi_stmt (*gsi);
2230 location_t loc = gimple_location (stmt);
2231 tree arg;
2232 enum built_in_function fcode
2233 = DECL_FUNCTION_CODE (gimple_call_fndecl (stmt));
2234 int kind = 0;
2235 switch (fcode)
2236 {
2237 CASE_INT_FN (BUILT_IN_CLZ):
2238 kind = 1;
2239 gcc_fallthrough ();
2240 CASE_INT_FN (BUILT_IN_CTZ):
2241 arg = gimple_call_arg (stmt, 0);
2242 if (!integer_nonzerop (arg))
2243 {
2244 gimple *g;
2245 if (!is_gimple_val (arg))
2246 {
2247 g = gimple_build_assign (make_ssa_name (TREE_TYPE (arg)), arg);
2248 gimple_set_location (g, loc);
2249 gsi_insert_before (gsi, g, GSI_SAME_STMT);
2250 arg = gimple_assign_lhs (g);
2251 }
2252
2253 basic_block then_bb, fallthru_bb;
2254 *gsi = create_cond_insert_point (gsi, true, false, true,
2255 &then_bb, &fallthru_bb);
2256 g = gimple_build_cond (EQ_EXPR, arg,
2257 build_zero_cst (TREE_TYPE (arg)),
2258 NULL_TREE, NULL_TREE);
2259 gimple_set_location (g, loc);
2260 gsi_insert_after (gsi, g, GSI_NEW_STMT);
2261
2262 *gsi = gsi_after_labels (then_bb);
2263 if (flag_sanitize_undefined_trap_on_error)
2264 g = gimple_build_call (builtin_decl_explicit (BUILT_IN_TRAP), 0);
2265 else
2266 {
2267 tree t = build_int_cst (unsigned_char_type_node, kind);
2268 tree data = ubsan_create_data ("__ubsan_builtin_data",
2269 1, &loc, NULL_TREE, t, NULL_TREE);
2270 data = build_fold_addr_expr_loc (loc, data);
2271 enum built_in_function bcode
2272 = (flag_sanitize_recover & SANITIZE_BUILTIN)
2273 ? BUILT_IN_UBSAN_HANDLE_INVALID_BUILTIN
2274 : BUILT_IN_UBSAN_HANDLE_INVALID_BUILTIN_ABORT;
2275 tree fn = builtin_decl_explicit (bcode);
2276
2277 g = gimple_build_call (fn, 1, data);
2278 }
2279 gimple_set_location (g, loc);
2280 gsi_insert_before (gsi, g, GSI_SAME_STMT);
2281 ubsan_create_edge (g);
2282 }
2283 *gsi = gsi_for_stmt (stmt);
2284 break;
2285 default:
2286 break;
2287 }
2288 }
2289
2290 namespace {
2291
2292 const pass_data pass_data_ubsan =
2293 {
2294 GIMPLE_PASS, /* type */
2295 "ubsan", /* name */
2296 OPTGROUP_NONE, /* optinfo_flags */
2297 TV_TREE_UBSAN, /* tv_id */
2298 ( PROP_cfg | PROP_ssa ), /* properties_required */
2299 0, /* properties_provided */
2300 0, /* properties_destroyed */
2301 0, /* todo_flags_start */
2302 TODO_update_ssa, /* todo_flags_finish */
2303 };
2304
2305 class pass_ubsan : public gimple_opt_pass
2306 {
2307 public:
2308 pass_ubsan (gcc::context *ctxt)
2309 : gimple_opt_pass (pass_data_ubsan, ctxt)
2310 {}
2311
2312 /* opt_pass methods: */
2313 virtual bool gate (function *)
2314 {
2315 return sanitize_flags_p ((SANITIZE_NULL | SANITIZE_SI_OVERFLOW
2316 | SANITIZE_BOOL | SANITIZE_ENUM
2317 | SANITIZE_ALIGNMENT
2318 | SANITIZE_NONNULL_ATTRIBUTE
2319 | SANITIZE_RETURNS_NONNULL_ATTRIBUTE
2320 | SANITIZE_OBJECT_SIZE
2321 | SANITIZE_POINTER_OVERFLOW
2322 | SANITIZE_BUILTIN));
2323 }
2324
2325 virtual unsigned int execute (function *);
2326
2327 }; // class pass_ubsan
2328
2329 unsigned int
2330 pass_ubsan::execute (function *fun)
2331 {
2332 basic_block bb;
2333 gimple_stmt_iterator gsi;
2334 unsigned int ret = 0;
2335
2336 initialize_sanitizer_builtins ();
2337
2338 FOR_EACH_BB_FN (bb, fun)
2339 {
2340 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi);)
2341 {
2342 gimple *stmt = gsi_stmt (gsi);
2343 if (is_gimple_debug (stmt) || gimple_clobber_p (stmt))
2344 {
2345 gsi_next (&gsi);
2346 continue;
2347 }
2348
2349 if ((sanitize_flags_p (SANITIZE_SI_OVERFLOW, fun->decl))
2350 && is_gimple_assign (stmt))
2351 instrument_si_overflow (gsi);
2352
2353 if (sanitize_flags_p (SANITIZE_NULL | SANITIZE_ALIGNMENT, fun->decl))
2354 {
2355 if (gimple_store_p (stmt))
2356 instrument_null (gsi, gimple_get_lhs (stmt), true);
2357 if (gimple_assign_single_p (stmt))
2358 instrument_null (gsi, gimple_assign_rhs1 (stmt), false);
2359 if (is_gimple_call (stmt))
2360 {
2361 unsigned args_num = gimple_call_num_args (stmt);
2362 for (unsigned i = 0; i < args_num; ++i)
2363 {
2364 tree arg = gimple_call_arg (stmt, i);
2365 if (is_gimple_reg (arg) || is_gimple_min_invariant (arg))
2366 continue;
2367 instrument_null (gsi, arg, false);
2368 }
2369 }
2370 }
2371
2372 if (sanitize_flags_p (SANITIZE_BOOL | SANITIZE_ENUM, fun->decl)
2373 && gimple_assign_load_p (stmt))
2374 {
2375 instrument_bool_enum_load (&gsi);
2376 bb = gimple_bb (stmt);
2377 }
2378
2379 if (sanitize_flags_p (SANITIZE_NONNULL_ATTRIBUTE, fun->decl)
2380 && is_gimple_call (stmt)
2381 && !gimple_call_internal_p (stmt))
2382 {
2383 instrument_nonnull_arg (&gsi);
2384 bb = gimple_bb (stmt);
2385 }
2386
2387 if (sanitize_flags_p (SANITIZE_BUILTIN, fun->decl)
2388 && gimple_call_builtin_p (stmt, BUILT_IN_NORMAL))
2389 {
2390 instrument_builtin (&gsi);
2391 bb = gimple_bb (stmt);
2392 }
2393
2394 if (sanitize_flags_p (SANITIZE_RETURNS_NONNULL_ATTRIBUTE, fun->decl)
2395 && gimple_code (stmt) == GIMPLE_RETURN)
2396 {
2397 instrument_nonnull_return (&gsi);
2398 bb = gimple_bb (stmt);
2399 }
2400
2401 if (sanitize_flags_p (SANITIZE_OBJECT_SIZE, fun->decl))
2402 {
2403 if (gimple_store_p (stmt))
2404 instrument_object_size (&gsi, gimple_get_lhs (stmt), true);
2405 if (gimple_assign_load_p (stmt))
2406 instrument_object_size (&gsi, gimple_assign_rhs1 (stmt),
2407 false);
2408 if (is_gimple_call (stmt))
2409 {
2410 unsigned args_num = gimple_call_num_args (stmt);
2411 for (unsigned i = 0; i < args_num; ++i)
2412 {
2413 tree arg = gimple_call_arg (stmt, i);
2414 if (is_gimple_reg (arg) || is_gimple_min_invariant (arg))
2415 continue;
2416 instrument_object_size (&gsi, arg, false);
2417 }
2418 }
2419 }
2420
2421 if (sanitize_flags_p (SANITIZE_POINTER_OVERFLOW, fun->decl))
2422 {
2423 if (is_gimple_assign (stmt)
2424 && gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
2425 instrument_pointer_overflow (&gsi,
2426 gimple_assign_rhs1 (stmt),
2427 gimple_assign_rhs2 (stmt));
2428 if (gimple_store_p (stmt))
2429 maybe_instrument_pointer_overflow (&gsi,
2430 gimple_get_lhs (stmt));
2431 if (gimple_assign_single_p (stmt))
2432 maybe_instrument_pointer_overflow (&gsi,
2433 gimple_assign_rhs1 (stmt));
2434 if (is_gimple_call (stmt))
2435 {
2436 unsigned args_num = gimple_call_num_args (stmt);
2437 for (unsigned i = 0; i < args_num; ++i)
2438 {
2439 tree arg = gimple_call_arg (stmt, i);
2440 if (is_gimple_reg (arg))
2441 continue;
2442 maybe_instrument_pointer_overflow (&gsi, arg);
2443 }
2444 }
2445 }
2446
2447 gsi_next (&gsi);
2448 }
2449 if (gimple_purge_dead_eh_edges (bb))
2450 ret = TODO_cleanup_cfg;
2451 }
2452 return ret;
2453 }
2454
2455 } // anon namespace
2456
2457 gimple_opt_pass *
2458 make_pass_ubsan (gcc::context *ctxt)
2459 {
2460 return new pass_ubsan (ctxt);
2461 }
2462
2463 #include "gt-ubsan.h"