1 /* AddressSanitizer, a fast memory error detector.
2 Copyright (C) 2012-2015 Free Software Foundation, Inc.
3 Contributed by Kostya Serebryany <kcc@google.com>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
24 #include "coretypes.h"
31 #include "fold-const.h"
33 #include "internal-fn.h"
35 #include "gimple-iterator.h"
38 #include "stor-layout.h"
39 #include "tree-iterator.h"
41 #include "stringpool.h"
42 #include "tree-ssanames.h"
43 #include "tree-pass.h"
45 #include "gimple-pretty-print.h"
48 #include "insn-config.h"
55 #include "insn-codes.h"
59 #include "langhooks.h"
60 #include "alloc-pool.h"
62 #include "gimple-builder.h"
68 /* AddressSanitizer finds out-of-bounds and use-after-free bugs
69 with <2x slowdown on average.
71 The tool consists of two parts:
72 instrumentation module (this file) and a run-time library.
73 The instrumentation module adds a run-time check before every memory insn.
74 For a 8- or 16- byte load accessing address X:
75 ShadowAddr = (X >> 3) + Offset
76 ShadowValue = *(char*)ShadowAddr; // *(short*) for 16-byte access.
78 __asan_report_load8(X);
79 For a load of N bytes (N=1, 2 or 4) from address X:
80 ShadowAddr = (X >> 3) + Offset
81 ShadowValue = *(char*)ShadowAddr;
83 if ((X & 7) + N - 1 > ShadowValue)
84 __asan_report_loadN(X);
85 Stores are instrumented similarly, but using __asan_report_storeN functions.
86 A call too __asan_init_vN() is inserted to the list of module CTORs.
87 N is the version number of the AddressSanitizer API. The changes between the
88 API versions are listed in libsanitizer/asan/asan_interface_internal.h.
90 The run-time library redefines malloc (so that redzone are inserted around
91 the allocated memory) and free (so that reuse of free-ed memory is delayed),
92 provides __asan_report* and __asan_init_vN functions.
95 http://code.google.com/p/address-sanitizer/wiki/AddressSanitizerAlgorithm
97 The current implementation supports detection of out-of-bounds and
98 use-after-free in the heap, on the stack and for global variables.
100 [Protection of stack variables]
102 To understand how detection of out-of-bounds and use-after-free works
103 for stack variables, lets look at this example on x86_64 where the
104 stack grows downward:
118 For this function, the stack protected by asan will be organized as
119 follows, from the top of the stack to the bottom:
121 Slot 1/ [red zone of 32 bytes called 'RIGHT RedZone']
123 Slot 2/ [8 bytes of red zone, that adds up to the space of 'a' to make
124 the next slot be 32 bytes aligned; this one is called Partial
125 Redzone; this 32 bytes alignment is an asan constraint]
127 Slot 3/ [24 bytes for variable 'a']
129 Slot 4/ [red zone of 32 bytes called 'Middle RedZone']
131 Slot 5/ [24 bytes of Partial Red Zone (similar to slot 2]
133 Slot 6/ [8 bytes for variable 'b']
135 Slot 7/ [32 bytes of Red Zone at the bottom of the stack, called
138 The 32 bytes of LEFT red zone at the bottom of the stack can be
141 1/ The first 8 bytes contain a magical asan number that is always
144 2/ The following 8 bytes contains a pointer to a string (to be
145 parsed at runtime by the runtime asan library), which format is
148 "<function-name> <space> <num-of-variables-on-the-stack>
149 (<32-bytes-aligned-offset-in-bytes-of-variable> <space>
150 <length-of-var-in-bytes> ){n} "
152 where '(...){n}' means the content inside the parenthesis occurs 'n'
153 times, with 'n' being the number of variables on the stack.
155 3/ The following 8 bytes contain the PC of the current function which
156 will be used by the run-time library to print an error message.
158 4/ The following 8 bytes are reserved for internal use by the run-time.
160 The shadow memory for that stack layout is going to look like this:
162 - content of shadow memory 8 bytes for slot 7: 0xF1F1F1F1.
163 The F1 byte pattern is a magic number called
164 ASAN_STACK_MAGIC_LEFT and is a way for the runtime to know that
165 the memory for that shadow byte is part of a the LEFT red zone
166 intended to seat at the bottom of the variables on the stack.
168 - content of shadow memory 8 bytes for slots 6 and 5:
169 0xF4F4F400. The F4 byte pattern is a magic number
170 called ASAN_STACK_MAGIC_PARTIAL. It flags the fact that the
171 memory region for this shadow byte is a PARTIAL red zone
172 intended to pad a variable A, so that the slot following
173 {A,padding} is 32 bytes aligned.
175 Note that the fact that the least significant byte of this
176 shadow memory content is 00 means that 8 bytes of its
177 corresponding memory (which corresponds to the memory of
178 variable 'b') is addressable.
180 - content of shadow memory 8 bytes for slot 4: 0xF2F2F2F2.
181 The F2 byte pattern is a magic number called
182 ASAN_STACK_MAGIC_MIDDLE. It flags the fact that the memory
183 region for this shadow byte is a MIDDLE red zone intended to
184 seat between two 32 aligned slots of {variable,padding}.
186 - content of shadow memory 8 bytes for slot 3 and 2:
187 0xF4000000. This represents is the concatenation of
188 variable 'a' and the partial red zone following it, like what we
189 had for variable 'b'. The least significant 3 bytes being 00
190 means that the 3 bytes of variable 'a' are addressable.
192 - content of shadow memory 8 bytes for slot 1: 0xF3F3F3F3.
193 The F3 byte pattern is a magic number called
194 ASAN_STACK_MAGIC_RIGHT. It flags the fact that the memory
195 region for this shadow byte is a RIGHT red zone intended to seat
196 at the top of the variables of the stack.
198 Note that the real variable layout is done in expand_used_vars in
199 cfgexpand.c. As far as Address Sanitizer is concerned, it lays out
200 stack variables as well as the different red zones, emits some
201 prologue code to populate the shadow memory as to poison (mark as
202 non-accessible) the regions of the red zones and mark the regions of
203 stack variables as accessible, and emit some epilogue code to
204 un-poison (mark as accessible) the regions of red zones right before
207 [Protection of global variables]
209 The basic idea is to insert a red zone between two global variables
210 and install a constructor function that calls the asan runtime to do
211 the populating of the relevant shadow memory regions at load time.
213 So the global variables are laid out as to insert a red zone between
214 them. The size of the red zones is so that each variable starts on a
217 Then a constructor function is installed so that, for each global
218 variable, it calls the runtime asan library function
219 __asan_register_globals_with an instance of this type:
223 // Address of the beginning of the global variable.
226 // Initial size of the global variable.
229 // Size of the global variable + size of the red zone. This
230 // size is 32 bytes aligned.
231 uptr __size_with_redzone;
233 // Name of the global variable.
236 // Name of the module where the global variable is declared.
237 const void *__module_name;
239 // 1 if it has dynamic initialization, 0 otherwise.
240 uptr __has_dynamic_init;
242 // A pointer to struct that contains source location, could be NULL.
243 __asan_global_source_location *__location;
246 A destructor function that calls the runtime asan library function
247 _asan_unregister_globals is also installed. */
249 static unsigned HOST_WIDE_INT asan_shadow_offset_value
;
250 static bool asan_shadow_offset_computed
;
251 static vec
<char *> sanitized_sections
;
253 /* Sets shadow offset to value in string VAL. */
256 set_asan_shadow_offset (const char *val
)
261 #ifdef HAVE_LONG_LONG
262 asan_shadow_offset_value
= strtoull (val
, &endp
, 0);
264 asan_shadow_offset_value
= strtoul (val
, &endp
, 0);
266 if (!(*val
!= '\0' && *endp
== '\0' && errno
== 0))
269 asan_shadow_offset_computed
= true;
274 /* Set list of user-defined sections that need to be sanitized. */
277 set_sanitized_sections (const char *sections
)
281 FOR_EACH_VEC_ELT (sanitized_sections
, i
, pat
)
283 sanitized_sections
.truncate (0);
285 for (const char *s
= sections
; *s
; )
288 for (end
= s
; *end
&& *end
!= ','; ++end
);
289 size_t len
= end
- s
;
290 sanitized_sections
.safe_push (xstrndup (s
, len
));
291 s
= *end
? end
+ 1 : end
;
295 /* Checks whether section SEC should be sanitized. */
298 section_sanitized_p (const char *sec
)
302 FOR_EACH_VEC_ELT (sanitized_sections
, i
, pat
)
303 if (fnmatch (pat
, sec
, FNM_PERIOD
) == 0)
308 /* Returns Asan shadow offset. */
310 static unsigned HOST_WIDE_INT
311 asan_shadow_offset ()
313 if (!asan_shadow_offset_computed
)
315 asan_shadow_offset_computed
= true;
316 asan_shadow_offset_value
= targetm
.asan_shadow_offset ();
318 return asan_shadow_offset_value
;
321 alias_set_type asan_shadow_set
= -1;
323 /* Pointer types to 1 resp. 2 byte integers in shadow memory. A separate
324 alias set is used for all shadow memory accesses. */
325 static GTY(()) tree shadow_ptr_types
[2];
327 /* Decl for __asan_option_detect_stack_use_after_return. */
328 static GTY(()) tree asan_detect_stack_use_after_return
;
330 /* Various flags for Asan builtins. */
331 enum asan_check_flags
333 ASAN_CHECK_STORE
= 1 << 0,
334 ASAN_CHECK_SCALAR_ACCESS
= 1 << 1,
335 ASAN_CHECK_NON_ZERO_LEN
= 1 << 2,
336 ASAN_CHECK_LAST
= 1 << 3
339 /* Hashtable support for memory references used by gimple
342 /* This type represents a reference to a memory region. */
345 /* The expression of the beginning of the memory region. */
348 /* The size of the access. */
349 HOST_WIDE_INT access_size
;
351 /* Pool allocation new operator. */
352 inline void *operator new (size_t)
354 return pool
.allocate ();
357 /* Delete operator utilizing pool allocation. */
358 inline void operator delete (void *ptr
)
360 pool
.remove ((asan_mem_ref
*) ptr
);
363 /* Memory allocation pool. */
364 static pool_allocator
<asan_mem_ref
> pool
;
367 pool_allocator
<asan_mem_ref
> asan_mem_ref::pool ("asan_mem_ref", 10);
369 /* Initializes an instance of asan_mem_ref. */
372 asan_mem_ref_init (asan_mem_ref
*ref
, tree start
, HOST_WIDE_INT access_size
)
375 ref
->access_size
= access_size
;
378 /* Allocates memory for an instance of asan_mem_ref into the memory
379 pool returned by asan_mem_ref_get_alloc_pool and initialize it.
380 START is the address of (or the expression pointing to) the
381 beginning of memory reference. ACCESS_SIZE is the size of the
382 access to the referenced memory. */
385 asan_mem_ref_new (tree start
, HOST_WIDE_INT access_size
)
387 asan_mem_ref
*ref
= new asan_mem_ref
;
389 asan_mem_ref_init (ref
, start
, access_size
);
393 /* This builds and returns a pointer to the end of the memory region
394 that starts at START and of length LEN. */
397 asan_mem_ref_get_end (tree start
, tree len
)
399 if (len
== NULL_TREE
|| integer_zerop (len
))
402 if (!ptrofftype_p (len
))
403 len
= convert_to_ptrofftype (len
);
405 return fold_build2 (POINTER_PLUS_EXPR
, TREE_TYPE (start
), start
, len
);
408 /* Return a tree expression that represents the end of the referenced
409 memory region. Beware that this function can actually build a new
413 asan_mem_ref_get_end (const asan_mem_ref
*ref
, tree len
)
415 return asan_mem_ref_get_end (ref
->start
, len
);
418 struct asan_mem_ref_hasher
: nofree_ptr_hash
<asan_mem_ref
>
420 static inline hashval_t
hash (const asan_mem_ref
*);
421 static inline bool equal (const asan_mem_ref
*, const asan_mem_ref
*);
424 /* Hash a memory reference. */
427 asan_mem_ref_hasher::hash (const asan_mem_ref
*mem_ref
)
429 return iterative_hash_expr (mem_ref
->start
, 0);
432 /* Compare two memory references. We accept the length of either
433 memory references to be NULL_TREE. */
436 asan_mem_ref_hasher::equal (const asan_mem_ref
*m1
,
437 const asan_mem_ref
*m2
)
439 return operand_equal_p (m1
->start
, m2
->start
, 0);
442 static hash_table
<asan_mem_ref_hasher
> *asan_mem_ref_ht
;
444 /* Returns a reference to the hash table containing memory references.
445 This function ensures that the hash table is created. Note that
446 this hash table is updated by the function
447 update_mem_ref_hash_table. */
449 static hash_table
<asan_mem_ref_hasher
> *
450 get_mem_ref_hash_table ()
452 if (!asan_mem_ref_ht
)
453 asan_mem_ref_ht
= new hash_table
<asan_mem_ref_hasher
> (10);
455 return asan_mem_ref_ht
;
458 /* Clear all entries from the memory references hash table. */
461 empty_mem_ref_hash_table ()
464 asan_mem_ref_ht
->empty ();
467 /* Free the memory references hash table. */
470 free_mem_ref_resources ()
472 delete asan_mem_ref_ht
;
473 asan_mem_ref_ht
= NULL
;
475 asan_mem_ref::pool
.release ();
478 /* Return true iff the memory reference REF has been instrumented. */
481 has_mem_ref_been_instrumented (tree ref
, HOST_WIDE_INT access_size
)
484 asan_mem_ref_init (&r
, ref
, access_size
);
486 asan_mem_ref
*saved_ref
= get_mem_ref_hash_table ()->find (&r
);
487 return saved_ref
&& saved_ref
->access_size
>= access_size
;
490 /* Return true iff the memory reference REF has been instrumented. */
493 has_mem_ref_been_instrumented (const asan_mem_ref
*ref
)
495 return has_mem_ref_been_instrumented (ref
->start
, ref
->access_size
);
498 /* Return true iff access to memory region starting at REF and of
499 length LEN has been instrumented. */
502 has_mem_ref_been_instrumented (const asan_mem_ref
*ref
, tree len
)
504 HOST_WIDE_INT size_in_bytes
505 = tree_fits_shwi_p (len
) ? tree_to_shwi (len
) : -1;
507 return size_in_bytes
!= -1
508 && has_mem_ref_been_instrumented (ref
->start
, size_in_bytes
);
511 /* Set REF to the memory reference present in a gimple assignment
512 ASSIGNMENT. Return true upon successful completion, false
516 get_mem_ref_of_assignment (const gassign
*assignment
,
520 gcc_assert (gimple_assign_single_p (assignment
));
522 if (gimple_store_p (assignment
)
523 && !gimple_clobber_p (assignment
))
525 ref
->start
= gimple_assign_lhs (assignment
);
526 *ref_is_store
= true;
528 else if (gimple_assign_load_p (assignment
))
530 ref
->start
= gimple_assign_rhs1 (assignment
);
531 *ref_is_store
= false;
536 ref
->access_size
= int_size_in_bytes (TREE_TYPE (ref
->start
));
540 /* Return the memory references contained in a gimple statement
541 representing a builtin call that has to do with memory access. */
544 get_mem_refs_of_builtin_call (const gcall
*call
,
557 gcc_checking_assert (gimple_call_builtin_p (call
, BUILT_IN_NORMAL
));
559 tree callee
= gimple_call_fndecl (call
);
560 tree source0
= NULL_TREE
, source1
= NULL_TREE
,
561 dest
= NULL_TREE
, len
= NULL_TREE
;
562 bool is_store
= true, got_reference_p
= false;
563 HOST_WIDE_INT access_size
= 1;
565 *intercepted_p
= asan_intercepted_p ((DECL_FUNCTION_CODE (callee
)));
567 switch (DECL_FUNCTION_CODE (callee
))
569 /* (s, s, n) style memops. */
571 case BUILT_IN_MEMCMP
:
572 source0
= gimple_call_arg (call
, 0);
573 source1
= gimple_call_arg (call
, 1);
574 len
= gimple_call_arg (call
, 2);
577 /* (src, dest, n) style memops. */
579 source0
= gimple_call_arg (call
, 0);
580 dest
= gimple_call_arg (call
, 1);
581 len
= gimple_call_arg (call
, 2);
584 /* (dest, src, n) style memops. */
585 case BUILT_IN_MEMCPY
:
586 case BUILT_IN_MEMCPY_CHK
:
587 case BUILT_IN_MEMMOVE
:
588 case BUILT_IN_MEMMOVE_CHK
:
589 case BUILT_IN_MEMPCPY
:
590 case BUILT_IN_MEMPCPY_CHK
:
591 dest
= gimple_call_arg (call
, 0);
592 source0
= gimple_call_arg (call
, 1);
593 len
= gimple_call_arg (call
, 2);
596 /* (dest, n) style memops. */
598 dest
= gimple_call_arg (call
, 0);
599 len
= gimple_call_arg (call
, 1);
602 /* (dest, x, n) style memops*/
603 case BUILT_IN_MEMSET
:
604 case BUILT_IN_MEMSET_CHK
:
605 dest
= gimple_call_arg (call
, 0);
606 len
= gimple_call_arg (call
, 2);
609 case BUILT_IN_STRLEN
:
610 source0
= gimple_call_arg (call
, 0);
611 len
= gimple_call_lhs (call
);
614 /* And now the __atomic* and __sync builtins.
615 These are handled differently from the classical memory memory
616 access builtins above. */
618 case BUILT_IN_ATOMIC_LOAD_1
:
619 case BUILT_IN_ATOMIC_LOAD_2
:
620 case BUILT_IN_ATOMIC_LOAD_4
:
621 case BUILT_IN_ATOMIC_LOAD_8
:
622 case BUILT_IN_ATOMIC_LOAD_16
:
626 case BUILT_IN_SYNC_FETCH_AND_ADD_1
:
627 case BUILT_IN_SYNC_FETCH_AND_ADD_2
:
628 case BUILT_IN_SYNC_FETCH_AND_ADD_4
:
629 case BUILT_IN_SYNC_FETCH_AND_ADD_8
:
630 case BUILT_IN_SYNC_FETCH_AND_ADD_16
:
632 case BUILT_IN_SYNC_FETCH_AND_SUB_1
:
633 case BUILT_IN_SYNC_FETCH_AND_SUB_2
:
634 case BUILT_IN_SYNC_FETCH_AND_SUB_4
:
635 case BUILT_IN_SYNC_FETCH_AND_SUB_8
:
636 case BUILT_IN_SYNC_FETCH_AND_SUB_16
:
638 case BUILT_IN_SYNC_FETCH_AND_OR_1
:
639 case BUILT_IN_SYNC_FETCH_AND_OR_2
:
640 case BUILT_IN_SYNC_FETCH_AND_OR_4
:
641 case BUILT_IN_SYNC_FETCH_AND_OR_8
:
642 case BUILT_IN_SYNC_FETCH_AND_OR_16
:
644 case BUILT_IN_SYNC_FETCH_AND_AND_1
:
645 case BUILT_IN_SYNC_FETCH_AND_AND_2
:
646 case BUILT_IN_SYNC_FETCH_AND_AND_4
:
647 case BUILT_IN_SYNC_FETCH_AND_AND_8
:
648 case BUILT_IN_SYNC_FETCH_AND_AND_16
:
650 case BUILT_IN_SYNC_FETCH_AND_XOR_1
:
651 case BUILT_IN_SYNC_FETCH_AND_XOR_2
:
652 case BUILT_IN_SYNC_FETCH_AND_XOR_4
:
653 case BUILT_IN_SYNC_FETCH_AND_XOR_8
:
654 case BUILT_IN_SYNC_FETCH_AND_XOR_16
:
656 case BUILT_IN_SYNC_FETCH_AND_NAND_1
:
657 case BUILT_IN_SYNC_FETCH_AND_NAND_2
:
658 case BUILT_IN_SYNC_FETCH_AND_NAND_4
:
659 case BUILT_IN_SYNC_FETCH_AND_NAND_8
:
661 case BUILT_IN_SYNC_ADD_AND_FETCH_1
:
662 case BUILT_IN_SYNC_ADD_AND_FETCH_2
:
663 case BUILT_IN_SYNC_ADD_AND_FETCH_4
:
664 case BUILT_IN_SYNC_ADD_AND_FETCH_8
:
665 case BUILT_IN_SYNC_ADD_AND_FETCH_16
:
667 case BUILT_IN_SYNC_SUB_AND_FETCH_1
:
668 case BUILT_IN_SYNC_SUB_AND_FETCH_2
:
669 case BUILT_IN_SYNC_SUB_AND_FETCH_4
:
670 case BUILT_IN_SYNC_SUB_AND_FETCH_8
:
671 case BUILT_IN_SYNC_SUB_AND_FETCH_16
:
673 case BUILT_IN_SYNC_OR_AND_FETCH_1
:
674 case BUILT_IN_SYNC_OR_AND_FETCH_2
:
675 case BUILT_IN_SYNC_OR_AND_FETCH_4
:
676 case BUILT_IN_SYNC_OR_AND_FETCH_8
:
677 case BUILT_IN_SYNC_OR_AND_FETCH_16
:
679 case BUILT_IN_SYNC_AND_AND_FETCH_1
:
680 case BUILT_IN_SYNC_AND_AND_FETCH_2
:
681 case BUILT_IN_SYNC_AND_AND_FETCH_4
:
682 case BUILT_IN_SYNC_AND_AND_FETCH_8
:
683 case BUILT_IN_SYNC_AND_AND_FETCH_16
:
685 case BUILT_IN_SYNC_XOR_AND_FETCH_1
:
686 case BUILT_IN_SYNC_XOR_AND_FETCH_2
:
687 case BUILT_IN_SYNC_XOR_AND_FETCH_4
:
688 case BUILT_IN_SYNC_XOR_AND_FETCH_8
:
689 case BUILT_IN_SYNC_XOR_AND_FETCH_16
:
691 case BUILT_IN_SYNC_NAND_AND_FETCH_1
:
692 case BUILT_IN_SYNC_NAND_AND_FETCH_2
:
693 case BUILT_IN_SYNC_NAND_AND_FETCH_4
:
694 case BUILT_IN_SYNC_NAND_AND_FETCH_8
:
696 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1
:
697 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_2
:
698 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_4
:
699 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_8
:
700 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_16
:
702 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1
:
703 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_2
:
704 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_4
:
705 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_8
:
706 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_16
:
708 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_1
:
709 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_2
:
710 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_4
:
711 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_8
:
712 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_16
:
714 case BUILT_IN_SYNC_LOCK_RELEASE_1
:
715 case BUILT_IN_SYNC_LOCK_RELEASE_2
:
716 case BUILT_IN_SYNC_LOCK_RELEASE_4
:
717 case BUILT_IN_SYNC_LOCK_RELEASE_8
:
718 case BUILT_IN_SYNC_LOCK_RELEASE_16
:
720 case BUILT_IN_ATOMIC_EXCHANGE_1
:
721 case BUILT_IN_ATOMIC_EXCHANGE_2
:
722 case BUILT_IN_ATOMIC_EXCHANGE_4
:
723 case BUILT_IN_ATOMIC_EXCHANGE_8
:
724 case BUILT_IN_ATOMIC_EXCHANGE_16
:
726 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1
:
727 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_2
:
728 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_4
:
729 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_8
:
730 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_16
:
732 case BUILT_IN_ATOMIC_STORE_1
:
733 case BUILT_IN_ATOMIC_STORE_2
:
734 case BUILT_IN_ATOMIC_STORE_4
:
735 case BUILT_IN_ATOMIC_STORE_8
:
736 case BUILT_IN_ATOMIC_STORE_16
:
738 case BUILT_IN_ATOMIC_ADD_FETCH_1
:
739 case BUILT_IN_ATOMIC_ADD_FETCH_2
:
740 case BUILT_IN_ATOMIC_ADD_FETCH_4
:
741 case BUILT_IN_ATOMIC_ADD_FETCH_8
:
742 case BUILT_IN_ATOMIC_ADD_FETCH_16
:
744 case BUILT_IN_ATOMIC_SUB_FETCH_1
:
745 case BUILT_IN_ATOMIC_SUB_FETCH_2
:
746 case BUILT_IN_ATOMIC_SUB_FETCH_4
:
747 case BUILT_IN_ATOMIC_SUB_FETCH_8
:
748 case BUILT_IN_ATOMIC_SUB_FETCH_16
:
750 case BUILT_IN_ATOMIC_AND_FETCH_1
:
751 case BUILT_IN_ATOMIC_AND_FETCH_2
:
752 case BUILT_IN_ATOMIC_AND_FETCH_4
:
753 case BUILT_IN_ATOMIC_AND_FETCH_8
:
754 case BUILT_IN_ATOMIC_AND_FETCH_16
:
756 case BUILT_IN_ATOMIC_NAND_FETCH_1
:
757 case BUILT_IN_ATOMIC_NAND_FETCH_2
:
758 case BUILT_IN_ATOMIC_NAND_FETCH_4
:
759 case BUILT_IN_ATOMIC_NAND_FETCH_8
:
760 case BUILT_IN_ATOMIC_NAND_FETCH_16
:
762 case BUILT_IN_ATOMIC_XOR_FETCH_1
:
763 case BUILT_IN_ATOMIC_XOR_FETCH_2
:
764 case BUILT_IN_ATOMIC_XOR_FETCH_4
:
765 case BUILT_IN_ATOMIC_XOR_FETCH_8
:
766 case BUILT_IN_ATOMIC_XOR_FETCH_16
:
768 case BUILT_IN_ATOMIC_OR_FETCH_1
:
769 case BUILT_IN_ATOMIC_OR_FETCH_2
:
770 case BUILT_IN_ATOMIC_OR_FETCH_4
:
771 case BUILT_IN_ATOMIC_OR_FETCH_8
:
772 case BUILT_IN_ATOMIC_OR_FETCH_16
:
774 case BUILT_IN_ATOMIC_FETCH_ADD_1
:
775 case BUILT_IN_ATOMIC_FETCH_ADD_2
:
776 case BUILT_IN_ATOMIC_FETCH_ADD_4
:
777 case BUILT_IN_ATOMIC_FETCH_ADD_8
:
778 case BUILT_IN_ATOMIC_FETCH_ADD_16
:
780 case BUILT_IN_ATOMIC_FETCH_SUB_1
:
781 case BUILT_IN_ATOMIC_FETCH_SUB_2
:
782 case BUILT_IN_ATOMIC_FETCH_SUB_4
:
783 case BUILT_IN_ATOMIC_FETCH_SUB_8
:
784 case BUILT_IN_ATOMIC_FETCH_SUB_16
:
786 case BUILT_IN_ATOMIC_FETCH_AND_1
:
787 case BUILT_IN_ATOMIC_FETCH_AND_2
:
788 case BUILT_IN_ATOMIC_FETCH_AND_4
:
789 case BUILT_IN_ATOMIC_FETCH_AND_8
:
790 case BUILT_IN_ATOMIC_FETCH_AND_16
:
792 case BUILT_IN_ATOMIC_FETCH_NAND_1
:
793 case BUILT_IN_ATOMIC_FETCH_NAND_2
:
794 case BUILT_IN_ATOMIC_FETCH_NAND_4
:
795 case BUILT_IN_ATOMIC_FETCH_NAND_8
:
796 case BUILT_IN_ATOMIC_FETCH_NAND_16
:
798 case BUILT_IN_ATOMIC_FETCH_XOR_1
:
799 case BUILT_IN_ATOMIC_FETCH_XOR_2
:
800 case BUILT_IN_ATOMIC_FETCH_XOR_4
:
801 case BUILT_IN_ATOMIC_FETCH_XOR_8
:
802 case BUILT_IN_ATOMIC_FETCH_XOR_16
:
804 case BUILT_IN_ATOMIC_FETCH_OR_1
:
805 case BUILT_IN_ATOMIC_FETCH_OR_2
:
806 case BUILT_IN_ATOMIC_FETCH_OR_4
:
807 case BUILT_IN_ATOMIC_FETCH_OR_8
:
808 case BUILT_IN_ATOMIC_FETCH_OR_16
:
810 dest
= gimple_call_arg (call
, 0);
811 /* DEST represents the address of a memory location.
812 instrument_derefs wants the memory location, so lets
813 dereference the address DEST before handing it to
814 instrument_derefs. */
815 if (TREE_CODE (dest
) == ADDR_EXPR
)
816 dest
= TREE_OPERAND (dest
, 0);
817 else if (TREE_CODE (dest
) == SSA_NAME
|| TREE_CODE (dest
) == INTEGER_CST
)
818 dest
= build2 (MEM_REF
, TREE_TYPE (TREE_TYPE (dest
)),
819 dest
, build_int_cst (TREE_TYPE (dest
), 0));
823 access_size
= int_size_in_bytes (TREE_TYPE (dest
));
827 /* The other builtins memory access are not instrumented in this
828 function because they either don't have any length parameter,
829 or their length parameter is just a limit. */
833 if (len
!= NULL_TREE
)
835 if (source0
!= NULL_TREE
)
837 src0
->start
= source0
;
838 src0
->access_size
= access_size
;
840 *src0_is_store
= false;
843 if (source1
!= NULL_TREE
)
845 src1
->start
= source1
;
846 src1
->access_size
= access_size
;
848 *src1_is_store
= false;
851 if (dest
!= NULL_TREE
)
854 dst
->access_size
= access_size
;
856 *dst_is_store
= true;
859 got_reference_p
= true;
864 dst
->access_size
= access_size
;
865 *dst_len
= NULL_TREE
;
866 *dst_is_store
= is_store
;
867 *dest_is_deref
= true;
868 got_reference_p
= true;
871 return got_reference_p
;
874 /* Return true iff a given gimple statement has been instrumented.
875 Note that the statement is "defined" by the memory references it
879 has_stmt_been_instrumented_p (gimple stmt
)
881 if (gimple_assign_single_p (stmt
))
885 asan_mem_ref_init (&r
, NULL
, 1);
887 if (get_mem_ref_of_assignment (as_a
<gassign
*> (stmt
), &r
,
889 return has_mem_ref_been_instrumented (&r
);
891 else if (gimple_call_builtin_p (stmt
, BUILT_IN_NORMAL
))
893 asan_mem_ref src0
, src1
, dest
;
894 asan_mem_ref_init (&src0
, NULL
, 1);
895 asan_mem_ref_init (&src1
, NULL
, 1);
896 asan_mem_ref_init (&dest
, NULL
, 1);
898 tree src0_len
= NULL_TREE
, src1_len
= NULL_TREE
, dest_len
= NULL_TREE
;
899 bool src0_is_store
= false, src1_is_store
= false,
900 dest_is_store
= false, dest_is_deref
= false, intercepted_p
= true;
901 if (get_mem_refs_of_builtin_call (as_a
<gcall
*> (stmt
),
902 &src0
, &src0_len
, &src0_is_store
,
903 &src1
, &src1_len
, &src1_is_store
,
904 &dest
, &dest_len
, &dest_is_store
,
905 &dest_is_deref
, &intercepted_p
))
907 if (src0
.start
!= NULL_TREE
908 && !has_mem_ref_been_instrumented (&src0
, src0_len
))
911 if (src1
.start
!= NULL_TREE
912 && !has_mem_ref_been_instrumented (&src1
, src1_len
))
915 if (dest
.start
!= NULL_TREE
916 && !has_mem_ref_been_instrumented (&dest
, dest_len
))
925 /* Insert a memory reference into the hash table. */
928 update_mem_ref_hash_table (tree ref
, HOST_WIDE_INT access_size
)
930 hash_table
<asan_mem_ref_hasher
> *ht
= get_mem_ref_hash_table ();
933 asan_mem_ref_init (&r
, ref
, access_size
);
935 asan_mem_ref
**slot
= ht
->find_slot (&r
, INSERT
);
936 if (*slot
== NULL
|| (*slot
)->access_size
< access_size
)
937 *slot
= asan_mem_ref_new (ref
, access_size
);
940 /* Initialize shadow_ptr_types array. */
943 asan_init_shadow_ptr_types (void)
945 asan_shadow_set
= new_alias_set ();
946 shadow_ptr_types
[0] = build_distinct_type_copy (signed_char_type_node
);
947 TYPE_ALIAS_SET (shadow_ptr_types
[0]) = asan_shadow_set
;
948 shadow_ptr_types
[0] = build_pointer_type (shadow_ptr_types
[0]);
949 shadow_ptr_types
[1] = build_distinct_type_copy (short_integer_type_node
);
950 TYPE_ALIAS_SET (shadow_ptr_types
[1]) = asan_shadow_set
;
951 shadow_ptr_types
[1] = build_pointer_type (shadow_ptr_types
[1]);
952 initialize_sanitizer_builtins ();
955 /* Create ADDR_EXPR of STRING_CST with the PP pretty printer text. */
958 asan_pp_string (pretty_printer
*pp
)
960 const char *buf
= pp_formatted_text (pp
);
961 size_t len
= strlen (buf
);
962 tree ret
= build_string (len
+ 1, buf
);
964 = build_array_type (TREE_TYPE (shadow_ptr_types
[0]),
965 build_index_type (size_int (len
)));
966 TREE_READONLY (ret
) = 1;
967 TREE_STATIC (ret
) = 1;
968 return build1 (ADDR_EXPR
, shadow_ptr_types
[0], ret
);
971 /* Return a CONST_INT representing 4 subsequent shadow memory bytes. */
974 asan_shadow_cst (unsigned char shadow_bytes
[4])
977 unsigned HOST_WIDE_INT val
= 0;
978 gcc_assert (WORDS_BIG_ENDIAN
== BYTES_BIG_ENDIAN
);
979 for (i
= 0; i
< 4; i
++)
980 val
|= (unsigned HOST_WIDE_INT
) shadow_bytes
[BYTES_BIG_ENDIAN
? 3 - i
: i
]
981 << (BITS_PER_UNIT
* i
);
982 return gen_int_mode (val
, SImode
);
985 /* Clear shadow memory at SHADOW_MEM, LEN bytes. Can't call a library call here
989 asan_clear_shadow (rtx shadow_mem
, HOST_WIDE_INT len
)
991 rtx_insn
*insn
, *insns
, *jump
;
992 rtx_code_label
*top_label
;
996 clear_storage (shadow_mem
, GEN_INT (len
), BLOCK_OP_NORMAL
);
997 insns
= get_insns ();
999 for (insn
= insns
; insn
; insn
= NEXT_INSN (insn
))
1002 if (insn
== NULL_RTX
)
1008 gcc_assert ((len
& 3) == 0);
1009 top_label
= gen_label_rtx ();
1010 addr
= copy_to_mode_reg (Pmode
, XEXP (shadow_mem
, 0));
1011 shadow_mem
= adjust_automodify_address (shadow_mem
, SImode
, addr
, 0);
1012 end
= force_reg (Pmode
, plus_constant (Pmode
, addr
, len
));
1013 emit_label (top_label
);
1015 emit_move_insn (shadow_mem
, const0_rtx
);
1016 tmp
= expand_simple_binop (Pmode
, PLUS
, addr
, gen_int_mode (4, Pmode
), addr
,
1017 true, OPTAB_LIB_WIDEN
);
1019 emit_move_insn (addr
, tmp
);
1020 emit_cmp_and_jump_insns (addr
, end
, LT
, NULL_RTX
, Pmode
, true, top_label
);
1021 jump
= get_last_insn ();
1022 gcc_assert (JUMP_P (jump
));
1023 add_int_reg_note (jump
, REG_BR_PROB
, REG_BR_PROB_BASE
* 80 / 100);
1027 asan_function_start (void)
1029 section
*fnsec
= function_section (current_function_decl
);
1030 switch_to_section (fnsec
);
1031 ASM_OUTPUT_DEBUG_LABEL (asm_out_file
, "LASANPC",
1032 current_function_funcdef_no
);
1035 /* Insert code to protect stack vars. The prologue sequence should be emitted
1036 directly, epilogue sequence returned. BASE is the register holding the
1037 stack base, against which OFFSETS array offsets are relative to, OFFSETS
1038 array contains pairs of offsets in reverse order, always the end offset
1039 of some gap that needs protection followed by starting offset,
1040 and DECLS is an array of representative decls for each var partition.
1041 LENGTH is the length of the OFFSETS array, DECLS array is LENGTH / 2 - 1
1042 elements long (OFFSETS include gap before the first variable as well
1043 as gaps after each stack variable). PBASE is, if non-NULL, some pseudo
1044 register which stack vars DECL_RTLs are based on. Either BASE should be
1045 assigned to PBASE, when not doing use after return protection, or
1046 corresponding address based on __asan_stack_malloc* return value. */
1049 asan_emit_stack_protection (rtx base
, rtx pbase
, unsigned int alignb
,
1050 HOST_WIDE_INT
*offsets
, tree
*decls
, int length
)
1052 rtx shadow_base
, shadow_mem
, ret
, mem
, orig_base
;
1053 rtx_code_label
*lab
;
1056 unsigned char shadow_bytes
[4];
1057 HOST_WIDE_INT base_offset
= offsets
[length
- 1];
1058 HOST_WIDE_INT base_align_bias
= 0, offset
, prev_offset
;
1059 HOST_WIDE_INT asan_frame_size
= offsets
[0] - base_offset
;
1060 HOST_WIDE_INT last_offset
, last_size
;
1062 unsigned char cur_shadow_byte
= ASAN_STACK_MAGIC_LEFT
;
1063 tree str_cst
, decl
, id
;
1064 int use_after_return_class
= -1;
1066 if (shadow_ptr_types
[0] == NULL_TREE
)
1067 asan_init_shadow_ptr_types ();
1069 /* First of all, prepare the description string. */
1070 pretty_printer asan_pp
;
1072 pp_decimal_int (&asan_pp
, length
/ 2 - 1);
1073 pp_space (&asan_pp
);
1074 for (l
= length
- 2; l
; l
-= 2)
1076 tree decl
= decls
[l
/ 2 - 1];
1077 pp_wide_integer (&asan_pp
, offsets
[l
] - base_offset
);
1078 pp_space (&asan_pp
);
1079 pp_wide_integer (&asan_pp
, offsets
[l
- 1] - offsets
[l
]);
1080 pp_space (&asan_pp
);
1081 if (DECL_P (decl
) && DECL_NAME (decl
))
1083 pp_decimal_int (&asan_pp
, IDENTIFIER_LENGTH (DECL_NAME (decl
)));
1084 pp_space (&asan_pp
);
1085 pp_tree_identifier (&asan_pp
, DECL_NAME (decl
));
1088 pp_string (&asan_pp
, "9 <unknown>");
1089 pp_space (&asan_pp
);
1091 str_cst
= asan_pp_string (&asan_pp
);
1093 /* Emit the prologue sequence. */
1094 if (asan_frame_size
> 32 && asan_frame_size
<= 65536 && pbase
1095 && ASAN_USE_AFTER_RETURN
)
1097 use_after_return_class
= floor_log2 (asan_frame_size
- 1) - 5;
1098 /* __asan_stack_malloc_N guarantees alignment
1099 N < 6 ? (64 << N) : 4096 bytes. */
1100 if (alignb
> (use_after_return_class
< 6
1101 ? (64U << use_after_return_class
) : 4096U))
1102 use_after_return_class
= -1;
1103 else if (alignb
> ASAN_RED_ZONE_SIZE
&& (asan_frame_size
& (alignb
- 1)))
1104 base_align_bias
= ((asan_frame_size
+ alignb
- 1)
1105 & ~(alignb
- HOST_WIDE_INT_1
)) - asan_frame_size
;
1107 /* Align base if target is STRICT_ALIGNMENT. */
1108 if (STRICT_ALIGNMENT
)
1109 base
= expand_binop (Pmode
, and_optab
, base
,
1110 gen_int_mode (-((GET_MODE_ALIGNMENT (SImode
)
1111 << ASAN_SHADOW_SHIFT
)
1112 / BITS_PER_UNIT
), Pmode
), NULL_RTX
,
1115 if (use_after_return_class
== -1 && pbase
)
1116 emit_move_insn (pbase
, base
);
1118 base
= expand_binop (Pmode
, add_optab
, base
,
1119 gen_int_mode (base_offset
- base_align_bias
, Pmode
),
1120 NULL_RTX
, 1, OPTAB_DIRECT
);
1121 orig_base
= NULL_RTX
;
1122 if (use_after_return_class
!= -1)
1124 if (asan_detect_stack_use_after_return
== NULL_TREE
)
1126 id
= get_identifier ("__asan_option_detect_stack_use_after_return");
1127 decl
= build_decl (BUILTINS_LOCATION
, VAR_DECL
, id
,
1129 SET_DECL_ASSEMBLER_NAME (decl
, id
);
1130 TREE_ADDRESSABLE (decl
) = 1;
1131 DECL_ARTIFICIAL (decl
) = 1;
1132 DECL_IGNORED_P (decl
) = 1;
1133 DECL_EXTERNAL (decl
) = 1;
1134 TREE_STATIC (decl
) = 1;
1135 TREE_PUBLIC (decl
) = 1;
1136 TREE_USED (decl
) = 1;
1137 asan_detect_stack_use_after_return
= decl
;
1139 orig_base
= gen_reg_rtx (Pmode
);
1140 emit_move_insn (orig_base
, base
);
1141 ret
= expand_normal (asan_detect_stack_use_after_return
);
1142 lab
= gen_label_rtx ();
1143 int very_likely
= REG_BR_PROB_BASE
- (REG_BR_PROB_BASE
/ 2000 - 1);
1144 emit_cmp_and_jump_insns (ret
, const0_rtx
, EQ
, NULL_RTX
,
1145 VOIDmode
, 0, lab
, very_likely
);
1146 snprintf (buf
, sizeof buf
, "__asan_stack_malloc_%d",
1147 use_after_return_class
);
1148 ret
= init_one_libfunc (buf
);
1149 rtx addr
= convert_memory_address (ptr_mode
, base
);
1150 ret
= emit_library_call_value (ret
, NULL_RTX
, LCT_NORMAL
, ptr_mode
, 2,
1151 GEN_INT (asan_frame_size
1153 TYPE_MODE (pointer_sized_int_node
),
1155 ret
= convert_memory_address (Pmode
, ret
);
1156 emit_move_insn (base
, ret
);
1158 emit_move_insn (pbase
, expand_binop (Pmode
, add_optab
, base
,
1159 gen_int_mode (base_align_bias
1160 - base_offset
, Pmode
),
1161 NULL_RTX
, 1, OPTAB_DIRECT
));
1163 mem
= gen_rtx_MEM (ptr_mode
, base
);
1164 mem
= adjust_address (mem
, VOIDmode
, base_align_bias
);
1165 emit_move_insn (mem
, gen_int_mode (ASAN_STACK_FRAME_MAGIC
, ptr_mode
));
1166 mem
= adjust_address (mem
, VOIDmode
, GET_MODE_SIZE (ptr_mode
));
1167 emit_move_insn (mem
, expand_normal (str_cst
));
1168 mem
= adjust_address (mem
, VOIDmode
, GET_MODE_SIZE (ptr_mode
));
1169 ASM_GENERATE_INTERNAL_LABEL (buf
, "LASANPC", current_function_funcdef_no
);
1170 id
= get_identifier (buf
);
1171 decl
= build_decl (DECL_SOURCE_LOCATION (current_function_decl
),
1172 VAR_DECL
, id
, char_type_node
);
1173 SET_DECL_ASSEMBLER_NAME (decl
, id
);
1174 TREE_ADDRESSABLE (decl
) = 1;
1175 TREE_READONLY (decl
) = 1;
1176 DECL_ARTIFICIAL (decl
) = 1;
1177 DECL_IGNORED_P (decl
) = 1;
1178 TREE_STATIC (decl
) = 1;
1179 TREE_PUBLIC (decl
) = 0;
1180 TREE_USED (decl
) = 1;
1181 DECL_INITIAL (decl
) = decl
;
1182 TREE_ASM_WRITTEN (decl
) = 1;
1183 TREE_ASM_WRITTEN (id
) = 1;
1184 emit_move_insn (mem
, expand_normal (build_fold_addr_expr (decl
)));
1185 shadow_base
= expand_binop (Pmode
, lshr_optab
, base
,
1186 GEN_INT (ASAN_SHADOW_SHIFT
),
1187 NULL_RTX
, 1, OPTAB_DIRECT
);
1189 = plus_constant (Pmode
, shadow_base
,
1190 asan_shadow_offset ()
1191 + (base_align_bias
>> ASAN_SHADOW_SHIFT
));
1192 gcc_assert (asan_shadow_set
!= -1
1193 && (ASAN_RED_ZONE_SIZE
>> ASAN_SHADOW_SHIFT
) == 4);
1194 shadow_mem
= gen_rtx_MEM (SImode
, shadow_base
);
1195 set_mem_alias_set (shadow_mem
, asan_shadow_set
);
1196 if (STRICT_ALIGNMENT
)
1197 set_mem_align (shadow_mem
, (GET_MODE_ALIGNMENT (SImode
)));
1198 prev_offset
= base_offset
;
1199 for (l
= length
; l
; l
-= 2)
1202 cur_shadow_byte
= ASAN_STACK_MAGIC_RIGHT
;
1203 offset
= offsets
[l
- 1];
1204 if ((offset
- base_offset
) & (ASAN_RED_ZONE_SIZE
- 1))
1208 = base_offset
+ ((offset
- base_offset
)
1209 & ~(ASAN_RED_ZONE_SIZE
- HOST_WIDE_INT_1
));
1210 shadow_mem
= adjust_address (shadow_mem
, VOIDmode
,
1211 (aoff
- prev_offset
)
1212 >> ASAN_SHADOW_SHIFT
);
1214 for (i
= 0; i
< 4; i
++, aoff
+= (1 << ASAN_SHADOW_SHIFT
))
1217 if (aoff
< offset
- (1 << ASAN_SHADOW_SHIFT
) + 1)
1218 shadow_bytes
[i
] = 0;
1220 shadow_bytes
[i
] = offset
- aoff
;
1223 shadow_bytes
[i
] = ASAN_STACK_MAGIC_PARTIAL
;
1224 emit_move_insn (shadow_mem
, asan_shadow_cst (shadow_bytes
));
1227 while (offset
<= offsets
[l
- 2] - ASAN_RED_ZONE_SIZE
)
1229 shadow_mem
= adjust_address (shadow_mem
, VOIDmode
,
1230 (offset
- prev_offset
)
1231 >> ASAN_SHADOW_SHIFT
);
1232 prev_offset
= offset
;
1233 memset (shadow_bytes
, cur_shadow_byte
, 4);
1234 emit_move_insn (shadow_mem
, asan_shadow_cst (shadow_bytes
));
1235 offset
+= ASAN_RED_ZONE_SIZE
;
1237 cur_shadow_byte
= ASAN_STACK_MAGIC_MIDDLE
;
1239 do_pending_stack_adjust ();
1241 /* Construct epilogue sequence. */
1245 if (use_after_return_class
!= -1)
1247 rtx_code_label
*lab2
= gen_label_rtx ();
1248 char c
= (char) ASAN_STACK_MAGIC_USE_AFTER_RET
;
1249 int very_likely
= REG_BR_PROB_BASE
- (REG_BR_PROB_BASE
/ 2000 - 1);
1250 emit_cmp_and_jump_insns (orig_base
, base
, EQ
, NULL_RTX
,
1251 VOIDmode
, 0, lab2
, very_likely
);
1252 shadow_mem
= gen_rtx_MEM (BLKmode
, shadow_base
);
1253 set_mem_alias_set (shadow_mem
, asan_shadow_set
);
1254 mem
= gen_rtx_MEM (ptr_mode
, base
);
1255 mem
= adjust_address (mem
, VOIDmode
, base_align_bias
);
1256 emit_move_insn (mem
, gen_int_mode (ASAN_STACK_RETIRED_MAGIC
, ptr_mode
));
1257 unsigned HOST_WIDE_INT sz
= asan_frame_size
>> ASAN_SHADOW_SHIFT
;
1258 if (use_after_return_class
< 5
1259 && can_store_by_pieces (sz
, builtin_memset_read_str
, &c
,
1260 BITS_PER_UNIT
, true))
1261 store_by_pieces (shadow_mem
, sz
, builtin_memset_read_str
, &c
,
1262 BITS_PER_UNIT
, true, 0);
1263 else if (use_after_return_class
>= 5
1264 || !set_storage_via_setmem (shadow_mem
,
1266 gen_int_mode (c
, QImode
),
1267 BITS_PER_UNIT
, BITS_PER_UNIT
,
1270 snprintf (buf
, sizeof buf
, "__asan_stack_free_%d",
1271 use_after_return_class
);
1272 ret
= init_one_libfunc (buf
);
1273 rtx addr
= convert_memory_address (ptr_mode
, base
);
1274 rtx orig_addr
= convert_memory_address (ptr_mode
, orig_base
);
1275 emit_library_call (ret
, LCT_NORMAL
, ptr_mode
, 3, addr
, ptr_mode
,
1276 GEN_INT (asan_frame_size
+ base_align_bias
),
1277 TYPE_MODE (pointer_sized_int_node
),
1278 orig_addr
, ptr_mode
);
1280 lab
= gen_label_rtx ();
1285 shadow_mem
= gen_rtx_MEM (BLKmode
, shadow_base
);
1286 set_mem_alias_set (shadow_mem
, asan_shadow_set
);
1288 if (STRICT_ALIGNMENT
)
1289 set_mem_align (shadow_mem
, (GET_MODE_ALIGNMENT (SImode
)));
1291 prev_offset
= base_offset
;
1292 last_offset
= base_offset
;
1294 for (l
= length
; l
; l
-= 2)
1296 offset
= base_offset
+ ((offsets
[l
- 1] - base_offset
)
1297 & ~(ASAN_RED_ZONE_SIZE
- HOST_WIDE_INT_1
));
1298 if (last_offset
+ last_size
!= offset
)
1300 shadow_mem
= adjust_address (shadow_mem
, VOIDmode
,
1301 (last_offset
- prev_offset
)
1302 >> ASAN_SHADOW_SHIFT
);
1303 prev_offset
= last_offset
;
1304 asan_clear_shadow (shadow_mem
, last_size
>> ASAN_SHADOW_SHIFT
);
1305 last_offset
= offset
;
1308 last_size
+= base_offset
+ ((offsets
[l
- 2] - base_offset
)
1309 & ~(ASAN_RED_ZONE_SIZE
- HOST_WIDE_INT_1
))
1314 shadow_mem
= adjust_address (shadow_mem
, VOIDmode
,
1315 (last_offset
- prev_offset
)
1316 >> ASAN_SHADOW_SHIFT
);
1317 asan_clear_shadow (shadow_mem
, last_size
>> ASAN_SHADOW_SHIFT
);
1320 do_pending_stack_adjust ();
1324 insns
= get_insns ();
1329 /* Return true if DECL, a global var, might be overridden and needs
1330 therefore a local alias. */
1333 asan_needs_local_alias (tree decl
)
1335 return DECL_WEAK (decl
) || !targetm
.binds_local_p (decl
);
1338 /* Return true if DECL is a VAR_DECL that should be protected
1339 by Address Sanitizer, by appending a red zone with protected
1340 shadow memory after it and aligning it to at least
1341 ASAN_RED_ZONE_SIZE bytes. */
1344 asan_protect_global (tree decl
)
1351 if (TREE_CODE (decl
) == STRING_CST
)
1353 /* Instrument all STRING_CSTs except those created
1354 by asan_pp_string here. */
1355 if (shadow_ptr_types
[0] != NULL_TREE
1356 && TREE_CODE (TREE_TYPE (decl
)) == ARRAY_TYPE
1357 && TREE_TYPE (TREE_TYPE (decl
)) == TREE_TYPE (shadow_ptr_types
[0]))
1361 if (TREE_CODE (decl
) != VAR_DECL
1362 /* TLS vars aren't statically protectable. */
1363 || DECL_THREAD_LOCAL_P (decl
)
1364 /* Externs will be protected elsewhere. */
1365 || DECL_EXTERNAL (decl
)
1366 || !DECL_RTL_SET_P (decl
)
1367 /* Comdat vars pose an ABI problem, we can't know if
1368 the var that is selected by the linker will have
1370 || DECL_ONE_ONLY (decl
)
1371 /* Similarly for common vars. People can use -fno-common.
1372 Note: Linux kernel is built with -fno-common, so we do instrument
1373 globals there even if it is C. */
1374 || (DECL_COMMON (decl
) && TREE_PUBLIC (decl
))
1375 /* Don't protect if using user section, often vars placed
1376 into user section from multiple TUs are then assumed
1377 to be an array of such vars, putting padding in there
1378 breaks this assumption. */
1379 || (DECL_SECTION_NAME (decl
) != NULL
1380 && !symtab_node::get (decl
)->implicit_section
1381 && !section_sanitized_p (DECL_SECTION_NAME (decl
)))
1382 || DECL_SIZE (decl
) == 0
1383 || ASAN_RED_ZONE_SIZE
* BITS_PER_UNIT
> MAX_OFILE_ALIGNMENT
1384 || !valid_constant_size_p (DECL_SIZE_UNIT (decl
))
1385 || DECL_ALIGN_UNIT (decl
) > 2 * ASAN_RED_ZONE_SIZE
1386 || TREE_TYPE (decl
) == ubsan_get_source_location_type ())
1389 rtl
= DECL_RTL (decl
);
1390 if (!MEM_P (rtl
) || GET_CODE (XEXP (rtl
, 0)) != SYMBOL_REF
)
1392 symbol
= XEXP (rtl
, 0);
1394 if (CONSTANT_POOL_ADDRESS_P (symbol
)
1395 || TREE_CONSTANT_POOL_ADDRESS_P (symbol
))
1398 if (lookup_attribute ("weakref", DECL_ATTRIBUTES (decl
)))
1401 #ifndef ASM_OUTPUT_DEF
1402 if (asan_needs_local_alias (decl
))
1409 /* Construct a function tree for __asan_report_{load,store}{1,2,4,8,16,_n}.
1410 IS_STORE is either 1 (for a store) or 0 (for a load). */
1413 report_error_func (bool is_store
, bool recover_p
, HOST_WIDE_INT size_in_bytes
,
1416 static enum built_in_function report
[2][2][6]
1417 = { { { BUILT_IN_ASAN_REPORT_LOAD1
, BUILT_IN_ASAN_REPORT_LOAD2
,
1418 BUILT_IN_ASAN_REPORT_LOAD4
, BUILT_IN_ASAN_REPORT_LOAD8
,
1419 BUILT_IN_ASAN_REPORT_LOAD16
, BUILT_IN_ASAN_REPORT_LOAD_N
},
1420 { BUILT_IN_ASAN_REPORT_STORE1
, BUILT_IN_ASAN_REPORT_STORE2
,
1421 BUILT_IN_ASAN_REPORT_STORE4
, BUILT_IN_ASAN_REPORT_STORE8
,
1422 BUILT_IN_ASAN_REPORT_STORE16
, BUILT_IN_ASAN_REPORT_STORE_N
} },
1423 { { BUILT_IN_ASAN_REPORT_LOAD1_NOABORT
,
1424 BUILT_IN_ASAN_REPORT_LOAD2_NOABORT
,
1425 BUILT_IN_ASAN_REPORT_LOAD4_NOABORT
,
1426 BUILT_IN_ASAN_REPORT_LOAD8_NOABORT
,
1427 BUILT_IN_ASAN_REPORT_LOAD16_NOABORT
,
1428 BUILT_IN_ASAN_REPORT_LOAD_N_NOABORT
},
1429 { BUILT_IN_ASAN_REPORT_STORE1_NOABORT
,
1430 BUILT_IN_ASAN_REPORT_STORE2_NOABORT
,
1431 BUILT_IN_ASAN_REPORT_STORE4_NOABORT
,
1432 BUILT_IN_ASAN_REPORT_STORE8_NOABORT
,
1433 BUILT_IN_ASAN_REPORT_STORE16_NOABORT
,
1434 BUILT_IN_ASAN_REPORT_STORE_N_NOABORT
} } };
1435 if (size_in_bytes
== -1)
1438 return builtin_decl_implicit (report
[recover_p
][is_store
][5]);
1441 int size_log2
= exact_log2 (size_in_bytes
);
1442 return builtin_decl_implicit (report
[recover_p
][is_store
][size_log2
]);
1445 /* Construct a function tree for __asan_{load,store}{1,2,4,8,16,_n}.
1446 IS_STORE is either 1 (for a store) or 0 (for a load). */
1449 check_func (bool is_store
, bool recover_p
, HOST_WIDE_INT size_in_bytes
,
1452 static enum built_in_function check
[2][2][6]
1453 = { { { BUILT_IN_ASAN_LOAD1
, BUILT_IN_ASAN_LOAD2
,
1454 BUILT_IN_ASAN_LOAD4
, BUILT_IN_ASAN_LOAD8
,
1455 BUILT_IN_ASAN_LOAD16
, BUILT_IN_ASAN_LOADN
},
1456 { BUILT_IN_ASAN_STORE1
, BUILT_IN_ASAN_STORE2
,
1457 BUILT_IN_ASAN_STORE4
, BUILT_IN_ASAN_STORE8
,
1458 BUILT_IN_ASAN_STORE16
, BUILT_IN_ASAN_STOREN
} },
1459 { { BUILT_IN_ASAN_LOAD1_NOABORT
,
1460 BUILT_IN_ASAN_LOAD2_NOABORT
,
1461 BUILT_IN_ASAN_LOAD4_NOABORT
,
1462 BUILT_IN_ASAN_LOAD8_NOABORT
,
1463 BUILT_IN_ASAN_LOAD16_NOABORT
,
1464 BUILT_IN_ASAN_LOADN_NOABORT
},
1465 { BUILT_IN_ASAN_STORE1_NOABORT
,
1466 BUILT_IN_ASAN_STORE2_NOABORT
,
1467 BUILT_IN_ASAN_STORE4_NOABORT
,
1468 BUILT_IN_ASAN_STORE8_NOABORT
,
1469 BUILT_IN_ASAN_STORE16_NOABORT
,
1470 BUILT_IN_ASAN_STOREN_NOABORT
} } };
1471 if (size_in_bytes
== -1)
1474 return builtin_decl_implicit (check
[recover_p
][is_store
][5]);
1477 int size_log2
= exact_log2 (size_in_bytes
);
1478 return builtin_decl_implicit (check
[recover_p
][is_store
][size_log2
]);
1481 /* Split the current basic block and create a condition statement
1482 insertion point right before or after the statement pointed to by
1483 ITER. Return an iterator to the point at which the caller might
1484 safely insert the condition statement.
1486 THEN_BLOCK must be set to the address of an uninitialized instance
1487 of basic_block. The function will then set *THEN_BLOCK to the
1488 'then block' of the condition statement to be inserted by the
1491 If CREATE_THEN_FALLTHRU_EDGE is false, no edge will be created from
1492 *THEN_BLOCK to *FALLTHROUGH_BLOCK.
1494 Similarly, the function will set *FALLTRHOUGH_BLOCK to the 'else
1495 block' of the condition statement to be inserted by the caller.
1497 Note that *FALLTHROUGH_BLOCK is a new block that contains the
1498 statements starting from *ITER, and *THEN_BLOCK is a new empty
1501 *ITER is adjusted to point to always point to the first statement
1502 of the basic block * FALLTHROUGH_BLOCK. That statement is the
1503 same as what ITER was pointing to prior to calling this function,
1504 if BEFORE_P is true; otherwise, it is its following statement. */
1506 gimple_stmt_iterator
1507 create_cond_insert_point (gimple_stmt_iterator
*iter
,
1509 bool then_more_likely_p
,
1510 bool create_then_fallthru_edge
,
1511 basic_block
*then_block
,
1512 basic_block
*fallthrough_block
)
1514 gimple_stmt_iterator gsi
= *iter
;
1516 if (!gsi_end_p (gsi
) && before_p
)
1519 basic_block cur_bb
= gsi_bb (*iter
);
1521 edge e
= split_block (cur_bb
, gsi_stmt (gsi
));
1523 /* Get a hold on the 'condition block', the 'then block' and the
1525 basic_block cond_bb
= e
->src
;
1526 basic_block fallthru_bb
= e
->dest
;
1527 basic_block then_bb
= create_empty_bb (cond_bb
);
1530 add_bb_to_loop (then_bb
, cond_bb
->loop_father
);
1531 loops_state_set (LOOPS_NEED_FIXUP
);
1534 /* Set up the newly created 'then block'. */
1535 e
= make_edge (cond_bb
, then_bb
, EDGE_TRUE_VALUE
);
1536 int fallthrough_probability
1537 = then_more_likely_p
1538 ? PROB_VERY_UNLIKELY
1539 : PROB_ALWAYS
- PROB_VERY_UNLIKELY
;
1540 e
->probability
= PROB_ALWAYS
- fallthrough_probability
;
1541 if (create_then_fallthru_edge
)
1542 make_single_succ_edge (then_bb
, fallthru_bb
, EDGE_FALLTHRU
);
1544 /* Set up the fallthrough basic block. */
1545 e
= find_edge (cond_bb
, fallthru_bb
);
1546 e
->flags
= EDGE_FALSE_VALUE
;
1547 e
->count
= cond_bb
->count
;
1548 e
->probability
= fallthrough_probability
;
1550 /* Update dominance info for the newly created then_bb; note that
1551 fallthru_bb's dominance info has already been updated by
1553 if (dom_info_available_p (CDI_DOMINATORS
))
1554 set_immediate_dominator (CDI_DOMINATORS
, then_bb
, cond_bb
);
1556 *then_block
= then_bb
;
1557 *fallthrough_block
= fallthru_bb
;
1558 *iter
= gsi_start_bb (fallthru_bb
);
1560 return gsi_last_bb (cond_bb
);
1563 /* Insert an if condition followed by a 'then block' right before the
1564 statement pointed to by ITER. The fallthrough block -- which is the
1565 else block of the condition as well as the destination of the
1566 outcoming edge of the 'then block' -- starts with the statement
1569 COND is the condition of the if.
1571 If THEN_MORE_LIKELY_P is true, the probability of the edge to the
1572 'then block' is higher than the probability of the edge to the
1575 Upon completion of the function, *THEN_BB is set to the newly
1576 inserted 'then block' and similarly, *FALLTHROUGH_BB is set to the
1579 *ITER is adjusted to still point to the same statement it was
1580 pointing to initially. */
1583 insert_if_then_before_iter (gcond
*cond
,
1584 gimple_stmt_iterator
*iter
,
1585 bool then_more_likely_p
,
1586 basic_block
*then_bb
,
1587 basic_block
*fallthrough_bb
)
1589 gimple_stmt_iterator cond_insert_point
=
1590 create_cond_insert_point (iter
,
1593 /*create_then_fallthru_edge=*/true,
1596 gsi_insert_after (&cond_insert_point
, cond
, GSI_NEW_STMT
);
1600 (base_addr >> ASAN_SHADOW_SHIFT) + asan_shadow_offset (). */
1603 build_shadow_mem_access (gimple_stmt_iterator
*gsi
, location_t location
,
1604 tree base_addr
, tree shadow_ptr_type
)
1606 tree t
, uintptr_type
= TREE_TYPE (base_addr
);
1607 tree shadow_type
= TREE_TYPE (shadow_ptr_type
);
1610 t
= build_int_cst (uintptr_type
, ASAN_SHADOW_SHIFT
);
1611 g
= gimple_build_assign (make_ssa_name (uintptr_type
), RSHIFT_EXPR
,
1613 gimple_set_location (g
, location
);
1614 gsi_insert_after (gsi
, g
, GSI_NEW_STMT
);
1616 t
= build_int_cst (uintptr_type
, asan_shadow_offset ());
1617 g
= gimple_build_assign (make_ssa_name (uintptr_type
), PLUS_EXPR
,
1618 gimple_assign_lhs (g
), t
);
1619 gimple_set_location (g
, location
);
1620 gsi_insert_after (gsi
, g
, GSI_NEW_STMT
);
1622 g
= gimple_build_assign (make_ssa_name (shadow_ptr_type
), NOP_EXPR
,
1623 gimple_assign_lhs (g
));
1624 gimple_set_location (g
, location
);
1625 gsi_insert_after (gsi
, g
, GSI_NEW_STMT
);
1627 t
= build2 (MEM_REF
, shadow_type
, gimple_assign_lhs (g
),
1628 build_int_cst (shadow_ptr_type
, 0));
1629 g
= gimple_build_assign (make_ssa_name (shadow_type
), MEM_REF
, t
);
1630 gimple_set_location (g
, location
);
1631 gsi_insert_after (gsi
, g
, GSI_NEW_STMT
);
1632 return gimple_assign_lhs (g
);
1635 /* BASE can already be an SSA_NAME; in that case, do not create a
1636 new SSA_NAME for it. */
1639 maybe_create_ssa_name (location_t loc
, tree base
, gimple_stmt_iterator
*iter
,
1642 if (TREE_CODE (base
) == SSA_NAME
)
1644 gimple g
= gimple_build_assign (make_ssa_name (TREE_TYPE (base
)),
1645 TREE_CODE (base
), base
);
1646 gimple_set_location (g
, loc
);
1648 gsi_insert_before (iter
, g
, GSI_SAME_STMT
);
1650 gsi_insert_after (iter
, g
, GSI_NEW_STMT
);
1651 return gimple_assign_lhs (g
);
1654 /* LEN can already have necessary size and precision;
1655 in that case, do not create a new variable. */
1658 maybe_cast_to_ptrmode (location_t loc
, tree len
, gimple_stmt_iterator
*iter
,
1661 if (ptrofftype_p (len
))
1663 gimple g
= gimple_build_assign (make_ssa_name (pointer_sized_int_node
),
1665 gimple_set_location (g
, loc
);
1667 gsi_insert_before (iter
, g
, GSI_SAME_STMT
);
1669 gsi_insert_after (iter
, g
, GSI_NEW_STMT
);
1670 return gimple_assign_lhs (g
);
1673 /* Instrument the memory access instruction BASE. Insert new
1674 statements before or after ITER.
1676 Note that the memory access represented by BASE can be either an
1677 SSA_NAME, or a non-SSA expression. LOCATION is the source code
1678 location. IS_STORE is TRUE for a store, FALSE for a load.
1679 BEFORE_P is TRUE for inserting the instrumentation code before
1680 ITER, FALSE for inserting it after ITER. IS_SCALAR_ACCESS is TRUE
1681 for a scalar memory access and FALSE for memory region access.
1682 NON_ZERO_P is TRUE if memory region is guaranteed to have non-zero
1683 length. ALIGN tells alignment of accessed memory object.
1685 START_INSTRUMENTED and END_INSTRUMENTED are TRUE if start/end of
1686 memory region have already been instrumented.
1688 If BEFORE_P is TRUE, *ITER is arranged to still point to the
1689 statement it was pointing to prior to calling this function,
1690 otherwise, it points to the statement logically following it. */
1693 build_check_stmt (location_t loc
, tree base
, tree len
,
1694 HOST_WIDE_INT size_in_bytes
, gimple_stmt_iterator
*iter
,
1695 bool is_non_zero_len
, bool before_p
, bool is_store
,
1696 bool is_scalar_access
, unsigned int align
= 0)
1698 gimple_stmt_iterator gsi
= *iter
;
1701 gcc_assert (!(size_in_bytes
> 0 && !is_non_zero_len
));
1705 base
= unshare_expr (base
);
1706 base
= maybe_create_ssa_name (loc
, base
, &gsi
, before_p
);
1710 len
= unshare_expr (len
);
1711 len
= maybe_cast_to_ptrmode (loc
, len
, iter
, before_p
);
1715 gcc_assert (size_in_bytes
!= -1);
1716 len
= build_int_cst (pointer_sized_int_node
, size_in_bytes
);
1719 if (size_in_bytes
> 1)
1721 if ((size_in_bytes
& (size_in_bytes
- 1)) != 0
1722 || size_in_bytes
> 16)
1723 is_scalar_access
= false;
1724 else if (align
&& align
< size_in_bytes
* BITS_PER_UNIT
)
1726 /* On non-strict alignment targets, if
1727 16-byte access is just 8-byte aligned,
1728 this will result in misaligned shadow
1729 memory 2 byte load, but otherwise can
1730 be handled using one read. */
1731 if (size_in_bytes
!= 16
1733 || align
< 8 * BITS_PER_UNIT
)
1734 is_scalar_access
= false;
1738 HOST_WIDE_INT flags
= 0;
1740 flags
|= ASAN_CHECK_STORE
;
1741 if (is_non_zero_len
)
1742 flags
|= ASAN_CHECK_NON_ZERO_LEN
;
1743 if (is_scalar_access
)
1744 flags
|= ASAN_CHECK_SCALAR_ACCESS
;
1746 g
= gimple_build_call_internal (IFN_ASAN_CHECK
, 4,
1747 build_int_cst (integer_type_node
, flags
),
1749 build_int_cst (integer_type_node
,
1750 align
/ BITS_PER_UNIT
));
1751 gimple_set_location (g
, loc
);
1753 gsi_insert_before (&gsi
, g
, GSI_SAME_STMT
);
1756 gsi_insert_after (&gsi
, g
, GSI_NEW_STMT
);
1762 /* If T represents a memory access, add instrumentation code before ITER.
1763 LOCATION is source code location.
1764 IS_STORE is either TRUE (for a store) or FALSE (for a load). */
1767 instrument_derefs (gimple_stmt_iterator
*iter
, tree t
,
1768 location_t location
, bool is_store
)
1770 if (is_store
&& !ASAN_INSTRUMENT_WRITES
)
1772 if (!is_store
&& !ASAN_INSTRUMENT_READS
)
1776 HOST_WIDE_INT size_in_bytes
;
1778 type
= TREE_TYPE (t
);
1779 switch (TREE_CODE (t
))
1793 size_in_bytes
= int_size_in_bytes (type
);
1794 if (size_in_bytes
<= 0)
1797 HOST_WIDE_INT bitsize
, bitpos
;
1800 int volatilep
= 0, unsignedp
= 0;
1801 tree inner
= get_inner_reference (t
, &bitsize
, &bitpos
, &offset
,
1802 &mode
, &unsignedp
, &volatilep
, false);
1804 if (TREE_CODE (t
) == COMPONENT_REF
1805 && DECL_BIT_FIELD_REPRESENTATIVE (TREE_OPERAND (t
, 1)) != NULL_TREE
)
1807 tree repr
= DECL_BIT_FIELD_REPRESENTATIVE (TREE_OPERAND (t
, 1));
1808 instrument_derefs (iter
, build3 (COMPONENT_REF
, TREE_TYPE (repr
),
1809 TREE_OPERAND (t
, 0), repr
,
1810 NULL_TREE
), location
, is_store
);
1814 if (bitpos
% BITS_PER_UNIT
1815 || bitsize
!= size_in_bytes
* BITS_PER_UNIT
)
1818 if (TREE_CODE (inner
) == VAR_DECL
1819 && offset
== NULL_TREE
1821 && DECL_SIZE (inner
)
1822 && tree_fits_shwi_p (DECL_SIZE (inner
))
1823 && bitpos
+ bitsize
<= tree_to_shwi (DECL_SIZE (inner
)))
1825 if (DECL_THREAD_LOCAL_P (inner
))
1827 if (!ASAN_GLOBALS
&& is_global_var (inner
))
1829 if (!TREE_STATIC (inner
))
1831 /* Automatic vars in the current function will be always
1833 if (decl_function_context (inner
) == current_function_decl
)
1836 /* Always instrument external vars, they might be dynamically
1838 else if (!DECL_EXTERNAL (inner
))
1840 /* For static vars if they are known not to be dynamically
1841 initialized, they will be always accessible. */
1842 varpool_node
*vnode
= varpool_node::get (inner
);
1843 if (vnode
&& !vnode
->dynamically_initialized
)
1848 base
= build_fold_addr_expr (t
);
1849 if (!has_mem_ref_been_instrumented (base
, size_in_bytes
))
1851 unsigned int align
= get_object_alignment (t
);
1852 build_check_stmt (location
, base
, NULL_TREE
, size_in_bytes
, iter
,
1853 /*is_non_zero_len*/size_in_bytes
> 0, /*before_p=*/true,
1854 is_store
, /*is_scalar_access*/true, align
);
1855 update_mem_ref_hash_table (base
, size_in_bytes
);
1856 update_mem_ref_hash_table (t
, size_in_bytes
);
1861 /* Insert a memory reference into the hash table if access length
1862 can be determined in compile time. */
1865 maybe_update_mem_ref_hash_table (tree base
, tree len
)
1867 if (!POINTER_TYPE_P (TREE_TYPE (base
))
1868 || !INTEGRAL_TYPE_P (TREE_TYPE (len
)))
1871 HOST_WIDE_INT size_in_bytes
= tree_fits_shwi_p (len
) ? tree_to_shwi (len
) : -1;
1873 if (size_in_bytes
!= -1)
1874 update_mem_ref_hash_table (base
, size_in_bytes
);
1877 /* Instrument an access to a contiguous memory region that starts at
1878 the address pointed to by BASE, over a length of LEN (expressed in
1879 the sizeof (*BASE) bytes). ITER points to the instruction before
1880 which the instrumentation instructions must be inserted. LOCATION
1881 is the source location that the instrumentation instructions must
1882 have. If IS_STORE is true, then the memory access is a store;
1883 otherwise, it's a load. */
1886 instrument_mem_region_access (tree base
, tree len
,
1887 gimple_stmt_iterator
*iter
,
1888 location_t location
, bool is_store
)
1890 if (!POINTER_TYPE_P (TREE_TYPE (base
))
1891 || !INTEGRAL_TYPE_P (TREE_TYPE (len
))
1892 || integer_zerop (len
))
1895 HOST_WIDE_INT size_in_bytes
= tree_fits_shwi_p (len
) ? tree_to_shwi (len
) : -1;
1897 if ((size_in_bytes
== -1)
1898 || !has_mem_ref_been_instrumented (base
, size_in_bytes
))
1900 build_check_stmt (location
, base
, len
, size_in_bytes
, iter
,
1901 /*is_non_zero_len*/size_in_bytes
> 0, /*before_p*/true,
1902 is_store
, /*is_scalar_access*/false, /*align*/0);
1905 maybe_update_mem_ref_hash_table (base
, len
);
1906 *iter
= gsi_for_stmt (gsi_stmt (*iter
));
1909 /* Instrument the call to a built-in memory access function that is
1910 pointed to by the iterator ITER.
1912 Upon completion, return TRUE iff *ITER has been advanced to the
1913 statement following the one it was originally pointing to. */
1916 instrument_builtin_call (gimple_stmt_iterator
*iter
)
1918 if (!ASAN_MEMINTRIN
)
1921 bool iter_advanced_p
= false;
1922 gcall
*call
= as_a
<gcall
*> (gsi_stmt (*iter
));
1924 gcc_checking_assert (gimple_call_builtin_p (call
, BUILT_IN_NORMAL
));
1926 location_t loc
= gimple_location (call
);
1928 asan_mem_ref src0
, src1
, dest
;
1929 asan_mem_ref_init (&src0
, NULL
, 1);
1930 asan_mem_ref_init (&src1
, NULL
, 1);
1931 asan_mem_ref_init (&dest
, NULL
, 1);
1933 tree src0_len
= NULL_TREE
, src1_len
= NULL_TREE
, dest_len
= NULL_TREE
;
1934 bool src0_is_store
= false, src1_is_store
= false, dest_is_store
= false,
1935 dest_is_deref
= false, intercepted_p
= true;
1937 if (get_mem_refs_of_builtin_call (call
,
1938 &src0
, &src0_len
, &src0_is_store
,
1939 &src1
, &src1_len
, &src1_is_store
,
1940 &dest
, &dest_len
, &dest_is_store
,
1941 &dest_is_deref
, &intercepted_p
))
1945 instrument_derefs (iter
, dest
.start
, loc
, dest_is_store
);
1947 iter_advanced_p
= true;
1949 else if (!intercepted_p
1950 && (src0_len
|| src1_len
|| dest_len
))
1952 if (src0
.start
!= NULL_TREE
)
1953 instrument_mem_region_access (src0
.start
, src0_len
,
1954 iter
, loc
, /*is_store=*/false);
1955 if (src1
.start
!= NULL_TREE
)
1956 instrument_mem_region_access (src1
.start
, src1_len
,
1957 iter
, loc
, /*is_store=*/false);
1958 if (dest
.start
!= NULL_TREE
)
1959 instrument_mem_region_access (dest
.start
, dest_len
,
1960 iter
, loc
, /*is_store=*/true);
1962 *iter
= gsi_for_stmt (call
);
1964 iter_advanced_p
= true;
1968 if (src0
.start
!= NULL_TREE
)
1969 maybe_update_mem_ref_hash_table (src0
.start
, src0_len
);
1970 if (src1
.start
!= NULL_TREE
)
1971 maybe_update_mem_ref_hash_table (src1
.start
, src1_len
);
1972 if (dest
.start
!= NULL_TREE
)
1973 maybe_update_mem_ref_hash_table (dest
.start
, dest_len
);
1976 return iter_advanced_p
;
1979 /* Instrument the assignment statement ITER if it is subject to
1980 instrumentation. Return TRUE iff instrumentation actually
1981 happened. In that case, the iterator ITER is advanced to the next
1982 logical expression following the one initially pointed to by ITER,
1983 and the relevant memory reference that which access has been
1984 instrumented is added to the memory references hash table. */
1987 maybe_instrument_assignment (gimple_stmt_iterator
*iter
)
1989 gimple s
= gsi_stmt (*iter
);
1991 gcc_assert (gimple_assign_single_p (s
));
1993 tree ref_expr
= NULL_TREE
;
1994 bool is_store
, is_instrumented
= false;
1996 if (gimple_store_p (s
))
1998 ref_expr
= gimple_assign_lhs (s
);
2000 instrument_derefs (iter
, ref_expr
,
2001 gimple_location (s
),
2003 is_instrumented
= true;
2006 if (gimple_assign_load_p (s
))
2008 ref_expr
= gimple_assign_rhs1 (s
);
2010 instrument_derefs (iter
, ref_expr
,
2011 gimple_location (s
),
2013 is_instrumented
= true;
2016 if (is_instrumented
)
2019 return is_instrumented
;
2022 /* Instrument the function call pointed to by the iterator ITER, if it
2023 is subject to instrumentation. At the moment, the only function
2024 calls that are instrumented are some built-in functions that access
2025 memory. Look at instrument_builtin_call to learn more.
2027 Upon completion return TRUE iff *ITER was advanced to the statement
2028 following the one it was originally pointing to. */
2031 maybe_instrument_call (gimple_stmt_iterator
*iter
)
2033 gimple stmt
= gsi_stmt (*iter
);
2034 bool is_builtin
= gimple_call_builtin_p (stmt
, BUILT_IN_NORMAL
);
2036 if (is_builtin
&& instrument_builtin_call (iter
))
2039 if (gimple_call_noreturn_p (stmt
))
2043 tree callee
= gimple_call_fndecl (stmt
);
2044 switch (DECL_FUNCTION_CODE (callee
))
2046 case BUILT_IN_UNREACHABLE
:
2048 /* Don't instrument these. */
2054 tree decl
= builtin_decl_implicit (BUILT_IN_ASAN_HANDLE_NO_RETURN
);
2055 gimple g
= gimple_build_call (decl
, 0);
2056 gimple_set_location (g
, gimple_location (stmt
));
2057 gsi_insert_before (iter
, g
, GSI_SAME_STMT
);
2062 /* Walk each instruction of all basic block and instrument those that
2063 represent memory references: loads, stores, or function calls.
2064 In a given basic block, this function avoids instrumenting memory
2065 references that have already been instrumented. */
2068 transform_statements (void)
2070 basic_block bb
, last_bb
= NULL
;
2071 gimple_stmt_iterator i
;
2072 int saved_last_basic_block
= last_basic_block_for_fn (cfun
);
2074 FOR_EACH_BB_FN (bb
, cfun
)
2076 basic_block prev_bb
= bb
;
2078 if (bb
->index
>= saved_last_basic_block
) continue;
2080 /* Flush the mem ref hash table, if current bb doesn't have
2081 exactly one predecessor, or if that predecessor (skipping
2082 over asan created basic blocks) isn't the last processed
2083 basic block. Thus we effectively flush on extended basic
2084 block boundaries. */
2085 while (single_pred_p (prev_bb
))
2087 prev_bb
= single_pred (prev_bb
);
2088 if (prev_bb
->index
< saved_last_basic_block
)
2091 if (prev_bb
!= last_bb
)
2092 empty_mem_ref_hash_table ();
2095 for (i
= gsi_start_bb (bb
); !gsi_end_p (i
);)
2097 gimple s
= gsi_stmt (i
);
2099 if (has_stmt_been_instrumented_p (s
))
2101 else if (gimple_assign_single_p (s
)
2102 && !gimple_clobber_p (s
)
2103 && maybe_instrument_assignment (&i
))
2104 /* Nothing to do as maybe_instrument_assignment advanced
2106 else if (is_gimple_call (s
) && maybe_instrument_call (&i
))
2107 /* Nothing to do as maybe_instrument_call
2108 advanced the iterator I. */;
2111 /* No instrumentation happened.
2113 If the current instruction is a function call that
2114 might free something, let's forget about the memory
2115 references that got instrumented. Otherwise we might
2116 miss some instrumentation opportunities. */
2117 if (is_gimple_call (s
) && !nonfreeing_call_p (s
))
2118 empty_mem_ref_hash_table ();
2124 free_mem_ref_resources ();
2128 __asan_before_dynamic_init (module_name)
2130 __asan_after_dynamic_init ()
2134 asan_dynamic_init_call (bool after_p
)
2136 tree fn
= builtin_decl_implicit (after_p
2137 ? BUILT_IN_ASAN_AFTER_DYNAMIC_INIT
2138 : BUILT_IN_ASAN_BEFORE_DYNAMIC_INIT
);
2139 tree module_name_cst
= NULL_TREE
;
2142 pretty_printer module_name_pp
;
2143 pp_string (&module_name_pp
, main_input_filename
);
2145 if (shadow_ptr_types
[0] == NULL_TREE
)
2146 asan_init_shadow_ptr_types ();
2147 module_name_cst
= asan_pp_string (&module_name_pp
);
2148 module_name_cst
= fold_convert (const_ptr_type_node
,
2152 return build_call_expr (fn
, after_p
? 0 : 1, module_name_cst
);
2156 struct __asan_global
2160 uptr __size_with_redzone;
2162 const void *__module_name;
2163 uptr __has_dynamic_init;
2164 __asan_global_source_location *__location;
2168 asan_global_struct (void)
2170 static const char *field_names
[7]
2171 = { "__beg", "__size", "__size_with_redzone",
2172 "__name", "__module_name", "__has_dynamic_init", "__location"};
2173 tree fields
[7], ret
;
2176 ret
= make_node (RECORD_TYPE
);
2177 for (i
= 0; i
< 7; i
++)
2180 = build_decl (UNKNOWN_LOCATION
, FIELD_DECL
,
2181 get_identifier (field_names
[i
]),
2182 (i
== 0 || i
== 3) ? const_ptr_type_node
2183 : pointer_sized_int_node
);
2184 DECL_CONTEXT (fields
[i
]) = ret
;
2186 DECL_CHAIN (fields
[i
- 1]) = fields
[i
];
2188 tree type_decl
= build_decl (input_location
, TYPE_DECL
,
2189 get_identifier ("__asan_global"), ret
);
2190 DECL_IGNORED_P (type_decl
) = 1;
2191 DECL_ARTIFICIAL (type_decl
) = 1;
2192 TYPE_FIELDS (ret
) = fields
[0];
2193 TYPE_NAME (ret
) = type_decl
;
2194 TYPE_STUB_DECL (ret
) = type_decl
;
2199 /* Append description of a single global DECL into vector V.
2200 TYPE is __asan_global struct type as returned by asan_global_struct. */
2203 asan_add_global (tree decl
, tree type
, vec
<constructor_elt
, va_gc
> *v
)
2205 tree init
, uptr
= TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (type
)));
2206 unsigned HOST_WIDE_INT size
;
2207 tree str_cst
, module_name_cst
, refdecl
= decl
;
2208 vec
<constructor_elt
, va_gc
> *vinner
= NULL
;
2210 pretty_printer asan_pp
, module_name_pp
;
2212 if (DECL_NAME (decl
))
2213 pp_tree_identifier (&asan_pp
, DECL_NAME (decl
));
2215 pp_string (&asan_pp
, "<unknown>");
2216 str_cst
= asan_pp_string (&asan_pp
);
2218 pp_string (&module_name_pp
, main_input_filename
);
2219 module_name_cst
= asan_pp_string (&module_name_pp
);
2221 if (asan_needs_local_alias (decl
))
2224 ASM_GENERATE_INTERNAL_LABEL (buf
, "LASAN", vec_safe_length (v
) + 1);
2225 refdecl
= build_decl (DECL_SOURCE_LOCATION (decl
),
2226 VAR_DECL
, get_identifier (buf
), TREE_TYPE (decl
));
2227 TREE_ADDRESSABLE (refdecl
) = TREE_ADDRESSABLE (decl
);
2228 TREE_READONLY (refdecl
) = TREE_READONLY (decl
);
2229 TREE_THIS_VOLATILE (refdecl
) = TREE_THIS_VOLATILE (decl
);
2230 DECL_GIMPLE_REG_P (refdecl
) = DECL_GIMPLE_REG_P (decl
);
2231 DECL_ARTIFICIAL (refdecl
) = DECL_ARTIFICIAL (decl
);
2232 DECL_IGNORED_P (refdecl
) = DECL_IGNORED_P (decl
);
2233 TREE_STATIC (refdecl
) = 1;
2234 TREE_PUBLIC (refdecl
) = 0;
2235 TREE_USED (refdecl
) = 1;
2236 assemble_alias (refdecl
, DECL_ASSEMBLER_NAME (decl
));
2239 CONSTRUCTOR_APPEND_ELT (vinner
, NULL_TREE
,
2240 fold_convert (const_ptr_type_node
,
2241 build_fold_addr_expr (refdecl
)));
2242 size
= tree_to_uhwi (DECL_SIZE_UNIT (decl
));
2243 CONSTRUCTOR_APPEND_ELT (vinner
, NULL_TREE
, build_int_cst (uptr
, size
));
2244 size
+= asan_red_zone_size (size
);
2245 CONSTRUCTOR_APPEND_ELT (vinner
, NULL_TREE
, build_int_cst (uptr
, size
));
2246 CONSTRUCTOR_APPEND_ELT (vinner
, NULL_TREE
,
2247 fold_convert (const_ptr_type_node
, str_cst
));
2248 CONSTRUCTOR_APPEND_ELT (vinner
, NULL_TREE
,
2249 fold_convert (const_ptr_type_node
, module_name_cst
));
2250 varpool_node
*vnode
= varpool_node::get (decl
);
2251 int has_dynamic_init
= vnode
? vnode
->dynamically_initialized
: 0;
2252 CONSTRUCTOR_APPEND_ELT (vinner
, NULL_TREE
,
2253 build_int_cst (uptr
, has_dynamic_init
));
2254 tree locptr
= NULL_TREE
;
2255 location_t loc
= DECL_SOURCE_LOCATION (decl
);
2256 expanded_location xloc
= expand_location (loc
);
2257 if (xloc
.file
!= NULL
)
2259 static int lasanloccnt
= 0;
2261 ASM_GENERATE_INTERNAL_LABEL (buf
, "LASANLOC", ++lasanloccnt
);
2262 tree var
= build_decl (UNKNOWN_LOCATION
, VAR_DECL
, get_identifier (buf
),
2263 ubsan_get_source_location_type ());
2264 TREE_STATIC (var
) = 1;
2265 TREE_PUBLIC (var
) = 0;
2266 DECL_ARTIFICIAL (var
) = 1;
2267 DECL_IGNORED_P (var
) = 1;
2268 pretty_printer filename_pp
;
2269 pp_string (&filename_pp
, xloc
.file
);
2270 tree str
= asan_pp_string (&filename_pp
);
2271 tree ctor
= build_constructor_va (TREE_TYPE (var
), 3,
2272 NULL_TREE
, str
, NULL_TREE
,
2273 build_int_cst (unsigned_type_node
,
2274 xloc
.line
), NULL_TREE
,
2275 build_int_cst (unsigned_type_node
,
2277 TREE_CONSTANT (ctor
) = 1;
2278 TREE_STATIC (ctor
) = 1;
2279 DECL_INITIAL (var
) = ctor
;
2280 varpool_node::finalize_decl (var
);
2281 locptr
= fold_convert (uptr
, build_fold_addr_expr (var
));
2284 locptr
= build_int_cst (uptr
, 0);
2285 CONSTRUCTOR_APPEND_ELT (vinner
, NULL_TREE
, locptr
);
2286 init
= build_constructor (type
, vinner
);
2287 CONSTRUCTOR_APPEND_ELT (v
, NULL_TREE
, init
);
2290 /* Initialize sanitizer.def builtins if the FE hasn't initialized them. */
2292 initialize_sanitizer_builtins (void)
2296 if (builtin_decl_implicit_p (BUILT_IN_ASAN_INIT
))
2299 tree BT_FN_VOID
= build_function_type_list (void_type_node
, NULL_TREE
);
2301 = build_function_type_list (void_type_node
, ptr_type_node
, NULL_TREE
);
2302 tree BT_FN_VOID_CONST_PTR
2303 = build_function_type_list (void_type_node
, const_ptr_type_node
, NULL_TREE
);
2304 tree BT_FN_VOID_PTR_PTR
2305 = build_function_type_list (void_type_node
, ptr_type_node
,
2306 ptr_type_node
, NULL_TREE
);
2307 tree BT_FN_VOID_PTR_PTR_PTR
2308 = build_function_type_list (void_type_node
, ptr_type_node
,
2309 ptr_type_node
, ptr_type_node
, NULL_TREE
);
2310 tree BT_FN_VOID_PTR_PTRMODE
2311 = build_function_type_list (void_type_node
, ptr_type_node
,
2312 pointer_sized_int_node
, NULL_TREE
);
2314 = build_function_type_list (void_type_node
, integer_type_node
, NULL_TREE
);
2315 tree BT_FN_SIZE_CONST_PTR_INT
2316 = build_function_type_list (size_type_node
, const_ptr_type_node
,
2317 integer_type_node
, NULL_TREE
);
2318 tree BT_FN_BOOL_VPTR_PTR_IX_INT_INT
[5];
2319 tree BT_FN_IX_CONST_VPTR_INT
[5];
2320 tree BT_FN_IX_VPTR_IX_INT
[5];
2321 tree BT_FN_VOID_VPTR_IX_INT
[5];
2323 = build_pointer_type (build_qualified_type (void_type_node
,
2324 TYPE_QUAL_VOLATILE
));
2326 = build_pointer_type (build_qualified_type (void_type_node
,
2330 = lang_hooks
.types
.type_for_size (BOOL_TYPE_SIZE
, 1);
2332 for (i
= 0; i
< 5; i
++)
2334 tree ix
= build_nonstandard_integer_type (BITS_PER_UNIT
* (1 << i
), 1);
2335 BT_FN_BOOL_VPTR_PTR_IX_INT_INT
[i
]
2336 = build_function_type_list (boolt
, vptr
, ptr_type_node
, ix
,
2337 integer_type_node
, integer_type_node
,
2339 BT_FN_IX_CONST_VPTR_INT
[i
]
2340 = build_function_type_list (ix
, cvptr
, integer_type_node
, NULL_TREE
);
2341 BT_FN_IX_VPTR_IX_INT
[i
]
2342 = build_function_type_list (ix
, vptr
, ix
, integer_type_node
,
2344 BT_FN_VOID_VPTR_IX_INT
[i
]
2345 = build_function_type_list (void_type_node
, vptr
, ix
,
2346 integer_type_node
, NULL_TREE
);
2348 #define BT_FN_BOOL_VPTR_PTR_I1_INT_INT BT_FN_BOOL_VPTR_PTR_IX_INT_INT[0]
2349 #define BT_FN_I1_CONST_VPTR_INT BT_FN_IX_CONST_VPTR_INT[0]
2350 #define BT_FN_I1_VPTR_I1_INT BT_FN_IX_VPTR_IX_INT[0]
2351 #define BT_FN_VOID_VPTR_I1_INT BT_FN_VOID_VPTR_IX_INT[0]
2352 #define BT_FN_BOOL_VPTR_PTR_I2_INT_INT BT_FN_BOOL_VPTR_PTR_IX_INT_INT[1]
2353 #define BT_FN_I2_CONST_VPTR_INT BT_FN_IX_CONST_VPTR_INT[1]
2354 #define BT_FN_I2_VPTR_I2_INT BT_FN_IX_VPTR_IX_INT[1]
2355 #define BT_FN_VOID_VPTR_I2_INT BT_FN_VOID_VPTR_IX_INT[1]
2356 #define BT_FN_BOOL_VPTR_PTR_I4_INT_INT BT_FN_BOOL_VPTR_PTR_IX_INT_INT[2]
2357 #define BT_FN_I4_CONST_VPTR_INT BT_FN_IX_CONST_VPTR_INT[2]
2358 #define BT_FN_I4_VPTR_I4_INT BT_FN_IX_VPTR_IX_INT[2]
2359 #define BT_FN_VOID_VPTR_I4_INT BT_FN_VOID_VPTR_IX_INT[2]
2360 #define BT_FN_BOOL_VPTR_PTR_I8_INT_INT BT_FN_BOOL_VPTR_PTR_IX_INT_INT[3]
2361 #define BT_FN_I8_CONST_VPTR_INT BT_FN_IX_CONST_VPTR_INT[3]
2362 #define BT_FN_I8_VPTR_I8_INT BT_FN_IX_VPTR_IX_INT[3]
2363 #define BT_FN_VOID_VPTR_I8_INT BT_FN_VOID_VPTR_IX_INT[3]
2364 #define BT_FN_BOOL_VPTR_PTR_I16_INT_INT BT_FN_BOOL_VPTR_PTR_IX_INT_INT[4]
2365 #define BT_FN_I16_CONST_VPTR_INT BT_FN_IX_CONST_VPTR_INT[4]
2366 #define BT_FN_I16_VPTR_I16_INT BT_FN_IX_VPTR_IX_INT[4]
2367 #define BT_FN_VOID_VPTR_I16_INT BT_FN_VOID_VPTR_IX_INT[4]
2368 #undef ATTR_NOTHROW_LEAF_LIST
2369 #define ATTR_NOTHROW_LEAF_LIST ECF_NOTHROW | ECF_LEAF
2370 #undef ATTR_TMPURE_NOTHROW_LEAF_LIST
2371 #define ATTR_TMPURE_NOTHROW_LEAF_LIST ECF_TM_PURE | ATTR_NOTHROW_LEAF_LIST
2372 #undef ATTR_NORETURN_NOTHROW_LEAF_LIST
2373 #define ATTR_NORETURN_NOTHROW_LEAF_LIST ECF_NORETURN | ATTR_NOTHROW_LEAF_LIST
2374 #undef ATTR_CONST_NORETURN_NOTHROW_LEAF_LIST
2375 #define ATTR_CONST_NORETURN_NOTHROW_LEAF_LIST \
2376 ECF_CONST | ATTR_NORETURN_NOTHROW_LEAF_LIST
2377 #undef ATTR_TMPURE_NORETURN_NOTHROW_LEAF_LIST
2378 #define ATTR_TMPURE_NORETURN_NOTHROW_LEAF_LIST \
2379 ECF_TM_PURE | ATTR_NORETURN_NOTHROW_LEAF_LIST
2380 #undef ATTR_COLD_NOTHROW_LEAF_LIST
2381 #define ATTR_COLD_NOTHROW_LEAF_LIST \
2382 /* ECF_COLD missing */ ATTR_NOTHROW_LEAF_LIST
2383 #undef ATTR_COLD_NORETURN_NOTHROW_LEAF_LIST
2384 #define ATTR_COLD_NORETURN_NOTHROW_LEAF_LIST \
2385 /* ECF_COLD missing */ ATTR_NORETURN_NOTHROW_LEAF_LIST
2386 #undef ATTR_COLD_CONST_NORETURN_NOTHROW_LEAF_LIST
2387 #define ATTR_COLD_CONST_NORETURN_NOTHROW_LEAF_LIST \
2388 /* ECF_COLD missing */ ATTR_CONST_NORETURN_NOTHROW_LEAF_LIST
2389 #undef ATTR_PURE_NOTHROW_LEAF_LIST
2390 #define ATTR_PURE_NOTHROW_LEAF_LIST ECF_PURE | ATTR_NOTHROW_LEAF_LIST
2391 #undef DEF_SANITIZER_BUILTIN
2392 #define DEF_SANITIZER_BUILTIN(ENUM, NAME, TYPE, ATTRS) \
2393 decl = add_builtin_function ("__builtin_" NAME, TYPE, ENUM, \
2394 BUILT_IN_NORMAL, NAME, NULL_TREE); \
2395 set_call_expr_flags (decl, ATTRS); \
2396 set_builtin_decl (ENUM, decl, true);
2398 #include "sanitizer.def"
2400 /* -fsanitize=object-size uses __builtin_object_size, but that might
2401 not be available for e.g. Fortran at this point. We use
2402 DEF_SANITIZER_BUILTIN here only as a convenience macro. */
2403 if ((flag_sanitize
& SANITIZE_OBJECT_SIZE
)
2404 && !builtin_decl_implicit_p (BUILT_IN_OBJECT_SIZE
))
2405 DEF_SANITIZER_BUILTIN (BUILT_IN_OBJECT_SIZE
, "object_size",
2406 BT_FN_SIZE_CONST_PTR_INT
,
2407 ATTR_PURE_NOTHROW_LEAF_LIST
)
2409 #undef DEF_SANITIZER_BUILTIN
2412 /* Called via htab_traverse. Count number of emitted
2413 STRING_CSTs in the constant hash table. */
2416 count_string_csts (constant_descriptor_tree
**slot
,
2417 unsigned HOST_WIDE_INT
*data
)
2419 struct constant_descriptor_tree
*desc
= *slot
;
2420 if (TREE_CODE (desc
->value
) == STRING_CST
2421 && TREE_ASM_WRITTEN (desc
->value
)
2422 && asan_protect_global (desc
->value
))
2427 /* Helper structure to pass two parameters to
2430 struct asan_add_string_csts_data
2433 vec
<constructor_elt
, va_gc
> *v
;
2436 /* Called via hash_table::traverse. Call asan_add_global
2437 on emitted STRING_CSTs from the constant hash table. */
2440 add_string_csts (constant_descriptor_tree
**slot
,
2441 asan_add_string_csts_data
*aascd
)
2443 struct constant_descriptor_tree
*desc
= *slot
;
2444 if (TREE_CODE (desc
->value
) == STRING_CST
2445 && TREE_ASM_WRITTEN (desc
->value
)
2446 && asan_protect_global (desc
->value
))
2448 asan_add_global (SYMBOL_REF_DECL (XEXP (desc
->rtl
, 0)),
2449 aascd
->type
, aascd
->v
);
2454 /* Needs to be GTY(()), because cgraph_build_static_cdtor may
2455 invoke ggc_collect. */
2456 static GTY(()) tree asan_ctor_statements
;
2458 /* Module-level instrumentation.
2459 - Insert __asan_init_vN() into the list of CTORs.
2460 - TODO: insert redzones around globals.
2464 asan_finish_file (void)
2466 varpool_node
*vnode
;
2467 unsigned HOST_WIDE_INT gcount
= 0;
2469 if (shadow_ptr_types
[0] == NULL_TREE
)
2470 asan_init_shadow_ptr_types ();
2471 /* Avoid instrumenting code in the asan ctors/dtors.
2472 We don't need to insert padding after the description strings,
2473 nor after .LASAN* array. */
2474 flag_sanitize
&= ~SANITIZE_ADDRESS
;
2476 /* For user-space we want asan constructors to run first.
2477 Linux kernel does not support priorities other than default, and the only
2478 other user of constructors is coverage. So we run with the default
2480 int priority
= flag_sanitize
& SANITIZE_USER_ADDRESS
2481 ? MAX_RESERVED_INIT_PRIORITY
- 1 : DEFAULT_INIT_PRIORITY
;
2483 if (flag_sanitize
& SANITIZE_USER_ADDRESS
)
2485 tree fn
= builtin_decl_implicit (BUILT_IN_ASAN_INIT
);
2486 append_to_statement_list (build_call_expr (fn
, 0), &asan_ctor_statements
);
2488 FOR_EACH_DEFINED_VARIABLE (vnode
)
2489 if (TREE_ASM_WRITTEN (vnode
->decl
)
2490 && asan_protect_global (vnode
->decl
))
2492 hash_table
<tree_descriptor_hasher
> *const_desc_htab
= constant_pool_htab ();
2493 const_desc_htab
->traverse
<unsigned HOST_WIDE_INT
*, count_string_csts
>
2497 tree type
= asan_global_struct (), var
, ctor
;
2498 tree dtor_statements
= NULL_TREE
;
2499 vec
<constructor_elt
, va_gc
> *v
;
2502 type
= build_array_type_nelts (type
, gcount
);
2503 ASM_GENERATE_INTERNAL_LABEL (buf
, "LASAN", 0);
2504 var
= build_decl (UNKNOWN_LOCATION
, VAR_DECL
, get_identifier (buf
),
2506 TREE_STATIC (var
) = 1;
2507 TREE_PUBLIC (var
) = 0;
2508 DECL_ARTIFICIAL (var
) = 1;
2509 DECL_IGNORED_P (var
) = 1;
2510 vec_alloc (v
, gcount
);
2511 FOR_EACH_DEFINED_VARIABLE (vnode
)
2512 if (TREE_ASM_WRITTEN (vnode
->decl
)
2513 && asan_protect_global (vnode
->decl
))
2514 asan_add_global (vnode
->decl
, TREE_TYPE (type
), v
);
2515 struct asan_add_string_csts_data aascd
;
2516 aascd
.type
= TREE_TYPE (type
);
2518 const_desc_htab
->traverse
<asan_add_string_csts_data
*, add_string_csts
>
2520 ctor
= build_constructor (type
, v
);
2521 TREE_CONSTANT (ctor
) = 1;
2522 TREE_STATIC (ctor
) = 1;
2523 DECL_INITIAL (var
) = ctor
;
2524 varpool_node::finalize_decl (var
);
2526 tree fn
= builtin_decl_implicit (BUILT_IN_ASAN_REGISTER_GLOBALS
);
2527 tree gcount_tree
= build_int_cst (pointer_sized_int_node
, gcount
);
2528 append_to_statement_list (build_call_expr (fn
, 2,
2529 build_fold_addr_expr (var
),
2531 &asan_ctor_statements
);
2533 fn
= builtin_decl_implicit (BUILT_IN_ASAN_UNREGISTER_GLOBALS
);
2534 append_to_statement_list (build_call_expr (fn
, 2,
2535 build_fold_addr_expr (var
),
2538 cgraph_build_static_cdtor ('D', dtor_statements
, priority
);
2540 if (asan_ctor_statements
)
2541 cgraph_build_static_cdtor ('I', asan_ctor_statements
, priority
);
2542 flag_sanitize
|= SANITIZE_ADDRESS
;
2545 /* Expand the ASAN_{LOAD,STORE} builtins. */
2548 asan_expand_check_ifn (gimple_stmt_iterator
*iter
, bool use_calls
)
2550 gimple g
= gsi_stmt (*iter
);
2551 location_t loc
= gimple_location (g
);
2554 = (flag_sanitize
& flag_sanitize_recover
& SANITIZE_KERNEL_ADDRESS
) != 0;
2556 HOST_WIDE_INT flags
= tree_to_shwi (gimple_call_arg (g
, 0));
2557 gcc_assert (flags
< ASAN_CHECK_LAST
);
2558 bool is_scalar_access
= (flags
& ASAN_CHECK_SCALAR_ACCESS
) != 0;
2559 bool is_store
= (flags
& ASAN_CHECK_STORE
) != 0;
2560 bool is_non_zero_len
= (flags
& ASAN_CHECK_NON_ZERO_LEN
) != 0;
2562 tree base
= gimple_call_arg (g
, 1);
2563 tree len
= gimple_call_arg (g
, 2);
2564 HOST_WIDE_INT align
= tree_to_shwi (gimple_call_arg (g
, 3));
2566 HOST_WIDE_INT size_in_bytes
2567 = is_scalar_access
&& tree_fits_shwi_p (len
) ? tree_to_shwi (len
) : -1;
2571 /* Instrument using callbacks. */
2572 gimple g
= gimple_build_assign (make_ssa_name (pointer_sized_int_node
),
2574 gimple_set_location (g
, loc
);
2575 gsi_insert_before (iter
, g
, GSI_SAME_STMT
);
2576 tree base_addr
= gimple_assign_lhs (g
);
2579 tree fun
= check_func (is_store
, recover_p
, size_in_bytes
, &nargs
);
2581 g
= gimple_build_call (fun
, 1, base_addr
);
2584 gcc_assert (nargs
== 2);
2585 g
= gimple_build_assign (make_ssa_name (pointer_sized_int_node
),
2587 gimple_set_location (g
, loc
);
2588 gsi_insert_before (iter
, g
, GSI_SAME_STMT
);
2589 tree sz_arg
= gimple_assign_lhs (g
);
2590 g
= gimple_build_call (fun
, nargs
, base_addr
, sz_arg
);
2592 gimple_set_location (g
, loc
);
2593 gsi_replace (iter
, g
, false);
2597 HOST_WIDE_INT real_size_in_bytes
= size_in_bytes
== -1 ? 1 : size_in_bytes
;
2599 tree shadow_ptr_type
= shadow_ptr_types
[real_size_in_bytes
== 16 ? 1 : 0];
2600 tree shadow_type
= TREE_TYPE (shadow_ptr_type
);
2602 gimple_stmt_iterator gsi
= *iter
;
2604 if (!is_non_zero_len
)
2606 /* So, the length of the memory area to asan-protect is
2607 non-constant. Let's guard the generated instrumentation code
2612 //asan instrumentation code goes here.
2614 // falltrough instructions, starting with *ITER. */
2616 g
= gimple_build_cond (NE_EXPR
,
2618 build_int_cst (TREE_TYPE (len
), 0),
2619 NULL_TREE
, NULL_TREE
);
2620 gimple_set_location (g
, loc
);
2622 basic_block then_bb
, fallthrough_bb
;
2623 insert_if_then_before_iter (as_a
<gcond
*> (g
), iter
,
2624 /*then_more_likely_p=*/true,
2625 &then_bb
, &fallthrough_bb
);
2626 /* Note that fallthrough_bb starts with the statement that was
2627 pointed to by ITER. */
2629 /* The 'then block' of the 'if (len != 0) condition is where
2630 we'll generate the asan instrumentation code now. */
2631 gsi
= gsi_last_bb (then_bb
);
2634 /* Get an iterator on the point where we can add the condition
2635 statement for the instrumentation. */
2636 basic_block then_bb
, else_bb
;
2637 gsi
= create_cond_insert_point (&gsi
, /*before_p*/false,
2638 /*then_more_likely_p=*/false,
2639 /*create_then_fallthru_edge*/recover_p
,
2643 g
= gimple_build_assign (make_ssa_name (pointer_sized_int_node
),
2645 gimple_set_location (g
, loc
);
2646 gsi_insert_before (&gsi
, g
, GSI_NEW_STMT
);
2647 tree base_addr
= gimple_assign_lhs (g
);
2650 if (real_size_in_bytes
>= 8)
2652 tree shadow
= build_shadow_mem_access (&gsi
, loc
, base_addr
,
2658 /* Slow path for 1, 2 and 4 byte accesses. */
2659 /* Test (shadow != 0)
2660 & ((base_addr & 7) + (real_size_in_bytes - 1)) >= shadow). */
2661 tree shadow
= build_shadow_mem_access (&gsi
, loc
, base_addr
,
2663 gimple shadow_test
= build_assign (NE_EXPR
, shadow
, 0);
2664 gimple_seq seq
= NULL
;
2665 gimple_seq_add_stmt (&seq
, shadow_test
);
2666 /* Aligned (>= 8 bytes) can test just
2667 (real_size_in_bytes - 1 >= shadow), as base_addr & 7 is known
2671 gimple_seq_add_stmt (&seq
, build_assign (BIT_AND_EXPR
,
2673 gimple_seq_add_stmt (&seq
,
2674 build_type_cast (shadow_type
,
2675 gimple_seq_last (seq
)));
2676 if (real_size_in_bytes
> 1)
2677 gimple_seq_add_stmt (&seq
,
2678 build_assign (PLUS_EXPR
,
2679 gimple_seq_last (seq
),
2680 real_size_in_bytes
- 1));
2681 t
= gimple_assign_lhs (gimple_seq_last_stmt (seq
));
2684 t
= build_int_cst (shadow_type
, real_size_in_bytes
- 1);
2685 gimple_seq_add_stmt (&seq
, build_assign (GE_EXPR
, t
, shadow
));
2686 gimple_seq_add_stmt (&seq
, build_assign (BIT_AND_EXPR
, shadow_test
,
2687 gimple_seq_last (seq
)));
2688 t
= gimple_assign_lhs (gimple_seq_last (seq
));
2689 gimple_seq_set_location (seq
, loc
);
2690 gsi_insert_seq_after (&gsi
, seq
, GSI_CONTINUE_LINKING
);
2692 /* For non-constant, misaligned or otherwise weird access sizes,
2693 check first and last byte. */
2694 if (size_in_bytes
== -1)
2696 g
= gimple_build_assign (make_ssa_name (pointer_sized_int_node
),
2698 build_int_cst (pointer_sized_int_node
, 1));
2699 gimple_set_location (g
, loc
);
2700 gsi_insert_after (&gsi
, g
, GSI_NEW_STMT
);
2701 tree last
= gimple_assign_lhs (g
);
2702 g
= gimple_build_assign (make_ssa_name (pointer_sized_int_node
),
2703 PLUS_EXPR
, base_addr
, last
);
2704 gimple_set_location (g
, loc
);
2705 gsi_insert_after (&gsi
, g
, GSI_NEW_STMT
);
2706 tree base_end_addr
= gimple_assign_lhs (g
);
2708 tree shadow
= build_shadow_mem_access (&gsi
, loc
, base_end_addr
,
2710 gimple shadow_test
= build_assign (NE_EXPR
, shadow
, 0);
2711 gimple_seq seq
= NULL
;
2712 gimple_seq_add_stmt (&seq
, shadow_test
);
2713 gimple_seq_add_stmt (&seq
, build_assign (BIT_AND_EXPR
,
2715 gimple_seq_add_stmt (&seq
, build_type_cast (shadow_type
,
2716 gimple_seq_last (seq
)));
2717 gimple_seq_add_stmt (&seq
, build_assign (GE_EXPR
,
2718 gimple_seq_last (seq
),
2720 gimple_seq_add_stmt (&seq
, build_assign (BIT_AND_EXPR
, shadow_test
,
2721 gimple_seq_last (seq
)));
2722 gimple_seq_add_stmt (&seq
, build_assign (BIT_IOR_EXPR
, t
,
2723 gimple_seq_last (seq
)));
2724 t
= gimple_assign_lhs (gimple_seq_last (seq
));
2725 gimple_seq_set_location (seq
, loc
);
2726 gsi_insert_seq_after (&gsi
, seq
, GSI_CONTINUE_LINKING
);
2730 g
= gimple_build_cond (NE_EXPR
, t
, build_int_cst (TREE_TYPE (t
), 0),
2731 NULL_TREE
, NULL_TREE
);
2732 gimple_set_location (g
, loc
);
2733 gsi_insert_after (&gsi
, g
, GSI_NEW_STMT
);
2735 /* Generate call to the run-time library (e.g. __asan_report_load8). */
2736 gsi
= gsi_start_bb (then_bb
);
2738 tree fun
= report_error_func (is_store
, recover_p
, size_in_bytes
, &nargs
);
2739 g
= gimple_build_call (fun
, nargs
, base_addr
, len
);
2740 gimple_set_location (g
, loc
);
2741 gsi_insert_after (&gsi
, g
, GSI_NEW_STMT
);
2743 gsi_remove (iter
, true);
2744 *iter
= gsi_start_bb (else_bb
);
2749 /* Instrument the current function. */
2752 asan_instrument (void)
2754 if (shadow_ptr_types
[0] == NULL_TREE
)
2755 asan_init_shadow_ptr_types ();
2756 transform_statements ();
2763 return (flag_sanitize
& SANITIZE_ADDRESS
) != 0
2764 && !lookup_attribute ("no_sanitize_address",
2765 DECL_ATTRIBUTES (current_function_decl
));
2770 const pass_data pass_data_asan
=
2772 GIMPLE_PASS
, /* type */
2774 OPTGROUP_NONE
, /* optinfo_flags */
2775 TV_NONE
, /* tv_id */
2776 ( PROP_ssa
| PROP_cfg
| PROP_gimple_leh
), /* properties_required */
2777 0, /* properties_provided */
2778 0, /* properties_destroyed */
2779 0, /* todo_flags_start */
2780 TODO_update_ssa
, /* todo_flags_finish */
2783 class pass_asan
: public gimple_opt_pass
2786 pass_asan (gcc::context
*ctxt
)
2787 : gimple_opt_pass (pass_data_asan
, ctxt
)
2790 /* opt_pass methods: */
2791 opt_pass
* clone () { return new pass_asan (m_ctxt
); }
2792 virtual bool gate (function
*) { return gate_asan (); }
2793 virtual unsigned int execute (function
*) { return asan_instrument (); }
2795 }; // class pass_asan
2800 make_pass_asan (gcc::context
*ctxt
)
2802 return new pass_asan (ctxt
);
2807 const pass_data pass_data_asan_O0
=
2809 GIMPLE_PASS
, /* type */
2811 OPTGROUP_NONE
, /* optinfo_flags */
2812 TV_NONE
, /* tv_id */
2813 ( PROP_ssa
| PROP_cfg
| PROP_gimple_leh
), /* properties_required */
2814 0, /* properties_provided */
2815 0, /* properties_destroyed */
2816 0, /* todo_flags_start */
2817 TODO_update_ssa
, /* todo_flags_finish */
2820 class pass_asan_O0
: public gimple_opt_pass
2823 pass_asan_O0 (gcc::context
*ctxt
)
2824 : gimple_opt_pass (pass_data_asan_O0
, ctxt
)
2827 /* opt_pass methods: */
2828 virtual bool gate (function
*) { return !optimize
&& gate_asan (); }
2829 virtual unsigned int execute (function
*) { return asan_instrument (); }
2831 }; // class pass_asan_O0
2836 make_pass_asan_O0 (gcc::context
*ctxt
)
2838 return new pass_asan_O0 (ctxt
);
2841 #include "gt-asan.h"