]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/asan.c
[multiple changes]
[thirdparty/gcc.git] / gcc / asan.c
CommitLineData
37d6f666 1/* AddressSanitizer, a fast memory error detector.
cbe34bb5 2 Copyright (C) 2012-2017 Free Software Foundation, Inc.
37d6f666
WM
3 Contributed by Kostya Serebryany <kcc@google.com>
4
5This file is part of GCC.
6
7GCC is free software; you can redistribute it and/or modify it under
8the terms of the GNU General Public License as published by the Free
9Software Foundation; either version 3, or (at your option) any later
10version.
11
12GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13WARRANTY; without even the implied warranty of MERCHANTABILITY or
14FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15for more details.
16
17You should have received a copy of the GNU General Public License
18along with GCC; see the file COPYING3. If not see
19<http://www.gnu.org/licenses/>. */
20
21
22#include "config.h"
23#include "system.h"
24#include "coretypes.h"
c7131fb2 25#include "backend.h"
957060b5
AM
26#include "target.h"
27#include "rtl.h"
4d648807 28#include "tree.h"
c7131fb2 29#include "gimple.h"
957060b5
AM
30#include "cfghooks.h"
31#include "alloc-pool.h"
32#include "tree-pass.h"
4d0cdd0c 33#include "memmodel.h"
957060b5 34#include "tm_p.h"
c7775327 35#include "ssa.h"
957060b5
AM
36#include "stringpool.h"
37#include "tree-ssanames.h"
957060b5
AM
38#include "optabs.h"
39#include "emit-rtl.h"
40#include "cgraph.h"
41#include "gimple-pretty-print.h"
42#include "alias.h"
40e23961 43#include "fold-const.h"
60393bbc 44#include "cfganal.h"
45b0be94 45#include "gimplify.h"
5be5c238 46#include "gimple-iterator.h"
d8a2d370
DN
47#include "varasm.h"
48#include "stor-layout.h"
37d6f666 49#include "tree-iterator.h"
37d6f666 50#include "asan.h"
36566b39
PK
51#include "dojump.h"
52#include "explow.h"
f3ddd692 53#include "expr.h"
8240018b 54#include "output.h"
0e668eaf 55#include "langhooks.h"
a9e0d843 56#include "cfgloop.h"
ff2a63a7 57#include "gimple-builder.h"
b9a55b13 58#include "ubsan.h"
b5ebc991 59#include "params.h"
9b2b7279 60#include "builtins.h"
860503d8 61#include "fnmatch.h"
c7775327 62#include "tree-inline.h"
37d6f666 63
497a1c66
JJ
64/* AddressSanitizer finds out-of-bounds and use-after-free bugs
65 with <2x slowdown on average.
66
67 The tool consists of two parts:
68 instrumentation module (this file) and a run-time library.
69 The instrumentation module adds a run-time check before every memory insn.
70 For a 8- or 16- byte load accessing address X:
71 ShadowAddr = (X >> 3) + Offset
72 ShadowValue = *(char*)ShadowAddr; // *(short*) for 16-byte access.
73 if (ShadowValue)
74 __asan_report_load8(X);
75 For a load of N bytes (N=1, 2 or 4) from address X:
76 ShadowAddr = (X >> 3) + Offset
77 ShadowValue = *(char*)ShadowAddr;
78 if (ShadowValue)
79 if ((X & 7) + N - 1 > ShadowValue)
80 __asan_report_loadN(X);
81 Stores are instrumented similarly, but using __asan_report_storeN functions.
ef1b3fda
KS
82 A call too __asan_init_vN() is inserted to the list of module CTORs.
83 N is the version number of the AddressSanitizer API. The changes between the
84 API versions are listed in libsanitizer/asan/asan_interface_internal.h.
497a1c66
JJ
85
86 The run-time library redefines malloc (so that redzone are inserted around
87 the allocated memory) and free (so that reuse of free-ed memory is delayed),
ef1b3fda 88 provides __asan_report* and __asan_init_vN functions.
497a1c66
JJ
89
90 Read more:
91 http://code.google.com/p/address-sanitizer/wiki/AddressSanitizerAlgorithm
92
93 The current implementation supports detection of out-of-bounds and
94 use-after-free in the heap, on the stack and for global variables.
95
96 [Protection of stack variables]
97
98 To understand how detection of out-of-bounds and use-after-free works
99 for stack variables, lets look at this example on x86_64 where the
100 stack grows downward:
f3ddd692
JJ
101
102 int
103 foo ()
104 {
105 char a[23] = {0};
106 int b[2] = {0};
107
108 a[5] = 1;
109 b[1] = 2;
110
111 return a[5] + b[1];
112 }
113
497a1c66
JJ
114 For this function, the stack protected by asan will be organized as
115 follows, from the top of the stack to the bottom:
f3ddd692 116
497a1c66 117 Slot 1/ [red zone of 32 bytes called 'RIGHT RedZone']
f3ddd692 118
497a1c66
JJ
119 Slot 2/ [8 bytes of red zone, that adds up to the space of 'a' to make
120 the next slot be 32 bytes aligned; this one is called Partial
121 Redzone; this 32 bytes alignment is an asan constraint]
f3ddd692 122
497a1c66 123 Slot 3/ [24 bytes for variable 'a']
f3ddd692 124
497a1c66 125 Slot 4/ [red zone of 32 bytes called 'Middle RedZone']
f3ddd692 126
497a1c66 127 Slot 5/ [24 bytes of Partial Red Zone (similar to slot 2]
f3ddd692 128
497a1c66 129 Slot 6/ [8 bytes for variable 'b']
f3ddd692 130
497a1c66
JJ
131 Slot 7/ [32 bytes of Red Zone at the bottom of the stack, called
132 'LEFT RedZone']
f3ddd692 133
497a1c66
JJ
134 The 32 bytes of LEFT red zone at the bottom of the stack can be
135 decomposed as such:
f3ddd692
JJ
136
137 1/ The first 8 bytes contain a magical asan number that is always
138 0x41B58AB3.
139
140 2/ The following 8 bytes contains a pointer to a string (to be
141 parsed at runtime by the runtime asan library), which format is
142 the following:
143
144 "<function-name> <space> <num-of-variables-on-the-stack>
145 (<32-bytes-aligned-offset-in-bytes-of-variable> <space>
146 <length-of-var-in-bytes> ){n} "
147
148 where '(...){n}' means the content inside the parenthesis occurs 'n'
149 times, with 'n' being the number of variables on the stack.
c1f5ce48 150
ef1b3fda
KS
151 3/ The following 8 bytes contain the PC of the current function which
152 will be used by the run-time library to print an error message.
f3ddd692 153
ef1b3fda 154 4/ The following 8 bytes are reserved for internal use by the run-time.
f3ddd692 155
497a1c66 156 The shadow memory for that stack layout is going to look like this:
f3ddd692
JJ
157
158 - content of shadow memory 8 bytes for slot 7: 0xF1F1F1F1.
159 The F1 byte pattern is a magic number called
160 ASAN_STACK_MAGIC_LEFT and is a way for the runtime to know that
161 the memory for that shadow byte is part of a the LEFT red zone
162 intended to seat at the bottom of the variables on the stack.
163
164 - content of shadow memory 8 bytes for slots 6 and 5:
165 0xF4F4F400. The F4 byte pattern is a magic number
166 called ASAN_STACK_MAGIC_PARTIAL. It flags the fact that the
167 memory region for this shadow byte is a PARTIAL red zone
168 intended to pad a variable A, so that the slot following
169 {A,padding} is 32 bytes aligned.
170
171 Note that the fact that the least significant byte of this
172 shadow memory content is 00 means that 8 bytes of its
173 corresponding memory (which corresponds to the memory of
174 variable 'b') is addressable.
175
176 - content of shadow memory 8 bytes for slot 4: 0xF2F2F2F2.
177 The F2 byte pattern is a magic number called
178 ASAN_STACK_MAGIC_MIDDLE. It flags the fact that the memory
179 region for this shadow byte is a MIDDLE red zone intended to
180 seat between two 32 aligned slots of {variable,padding}.
181
182 - content of shadow memory 8 bytes for slot 3 and 2:
497a1c66 183 0xF4000000. This represents is the concatenation of
f3ddd692
JJ
184 variable 'a' and the partial red zone following it, like what we
185 had for variable 'b'. The least significant 3 bytes being 00
186 means that the 3 bytes of variable 'a' are addressable.
187
497a1c66 188 - content of shadow memory 8 bytes for slot 1: 0xF3F3F3F3.
f3ddd692
JJ
189 The F3 byte pattern is a magic number called
190 ASAN_STACK_MAGIC_RIGHT. It flags the fact that the memory
191 region for this shadow byte is a RIGHT red zone intended to seat
192 at the top of the variables of the stack.
193
497a1c66
JJ
194 Note that the real variable layout is done in expand_used_vars in
195 cfgexpand.c. As far as Address Sanitizer is concerned, it lays out
196 stack variables as well as the different red zones, emits some
197 prologue code to populate the shadow memory as to poison (mark as
198 non-accessible) the regions of the red zones and mark the regions of
199 stack variables as accessible, and emit some epilogue code to
200 un-poison (mark as accessible) the regions of red zones right before
201 the function exits.
8240018b 202
497a1c66 203 [Protection of global variables]
8240018b 204
497a1c66
JJ
205 The basic idea is to insert a red zone between two global variables
206 and install a constructor function that calls the asan runtime to do
207 the populating of the relevant shadow memory regions at load time.
8240018b 208
497a1c66
JJ
209 So the global variables are laid out as to insert a red zone between
210 them. The size of the red zones is so that each variable starts on a
211 32 bytes boundary.
8240018b 212
497a1c66
JJ
213 Then a constructor function is installed so that, for each global
214 variable, it calls the runtime asan library function
215 __asan_register_globals_with an instance of this type:
8240018b
JJ
216
217 struct __asan_global
218 {
219 // Address of the beginning of the global variable.
220 const void *__beg;
221
222 // Initial size of the global variable.
223 uptr __size;
224
225 // Size of the global variable + size of the red zone. This
226 // size is 32 bytes aligned.
227 uptr __size_with_redzone;
228
229 // Name of the global variable.
230 const void *__name;
231
ef1b3fda
KS
232 // Name of the module where the global variable is declared.
233 const void *__module_name;
234
59b36ecf 235 // 1 if it has dynamic initialization, 0 otherwise.
8240018b 236 uptr __has_dynamic_init;
866e32ad
KS
237
238 // A pointer to struct that contains source location, could be NULL.
239 __asan_global_source_location *__location;
8240018b
JJ
240 }
241
497a1c66
JJ
242 A destructor function that calls the runtime asan library function
243 _asan_unregister_globals is also installed. */
f3ddd692 244
fd960af2
YG
245static unsigned HOST_WIDE_INT asan_shadow_offset_value;
246static bool asan_shadow_offset_computed;
860503d8 247static vec<char *> sanitized_sections;
fd960af2 248
6dc4a604
ML
249/* Set of variable declarations that are going to be guarded by
250 use-after-scope sanitizer. */
251
252static hash_set<tree> *asan_handled_variables = NULL;
253
254hash_set <tree> *asan_used_labels = NULL;
255
fd960af2
YG
256/* Sets shadow offset to value in string VAL. */
257
258bool
259set_asan_shadow_offset (const char *val)
260{
261 char *endp;
c1f5ce48 262
fd960af2
YG
263 errno = 0;
264#ifdef HAVE_LONG_LONG
265 asan_shadow_offset_value = strtoull (val, &endp, 0);
266#else
267 asan_shadow_offset_value = strtoul (val, &endp, 0);
268#endif
269 if (!(*val != '\0' && *endp == '\0' && errno == 0))
270 return false;
271
272 asan_shadow_offset_computed = true;
273
274 return true;
275}
276
18af8d16
YG
277/* Set list of user-defined sections that need to be sanitized. */
278
279void
860503d8 280set_sanitized_sections (const char *sections)
18af8d16 281{
860503d8
YG
282 char *pat;
283 unsigned i;
284 FOR_EACH_VEC_ELT (sanitized_sections, i, pat)
285 free (pat);
286 sanitized_sections.truncate (0);
287
288 for (const char *s = sections; *s; )
289 {
290 const char *end;
291 for (end = s; *end && *end != ','; ++end);
292 size_t len = end - s;
293 sanitized_sections.safe_push (xstrndup (s, len));
294 s = *end ? end + 1 : end;
295 }
18af8d16
YG
296}
297
56b7aede
ML
298bool
299asan_mark_p (gimple *stmt, enum asan_mark_flags flag)
300{
301 return (gimple_call_internal_p (stmt, IFN_ASAN_MARK)
302 && tree_to_uhwi (gimple_call_arg (stmt, 0)) == flag);
303}
304
6dc4a604
ML
305bool
306asan_sanitize_stack_p (void)
307{
308 return ((flag_sanitize & SANITIZE_ADDRESS)
309 && ASAN_STACK
310 && !asan_no_sanitize_address_p ());
311}
312
18af8d16
YG
313/* Checks whether section SEC should be sanitized. */
314
315static bool
316section_sanitized_p (const char *sec)
317{
860503d8
YG
318 char *pat;
319 unsigned i;
320 FOR_EACH_VEC_ELT (sanitized_sections, i, pat)
321 if (fnmatch (pat, sec, FNM_PERIOD) == 0)
322 return true;
18af8d16
YG
323 return false;
324}
325
fd960af2
YG
326/* Returns Asan shadow offset. */
327
328static unsigned HOST_WIDE_INT
329asan_shadow_offset ()
330{
331 if (!asan_shadow_offset_computed)
332 {
333 asan_shadow_offset_computed = true;
334 asan_shadow_offset_value = targetm.asan_shadow_offset ();
335 }
336 return asan_shadow_offset_value;
337}
338
f3ddd692 339alias_set_type asan_shadow_set = -1;
37d6f666 340
6dc4a604 341/* Pointer types to 1, 2 or 4 byte integers in shadow memory. A separate
f6d98484 342 alias set is used for all shadow memory accesses. */
6dc4a604 343static GTY(()) tree shadow_ptr_types[3];
f6d98484 344
e361382f
JJ
345/* Decl for __asan_option_detect_stack_use_after_return. */
346static GTY(()) tree asan_detect_stack_use_after_return;
347
bdcbe80c
DS
348/* Hashtable support for memory references used by gimple
349 statements. */
350
351/* This type represents a reference to a memory region. */
352struct asan_mem_ref
353{
688010ba 354 /* The expression of the beginning of the memory region. */
bdcbe80c
DS
355 tree start;
356
40f9f6bb
JJ
357 /* The size of the access. */
358 HOST_WIDE_INT access_size;
c1f5ce48
ML
359};
360
fcb87c50 361object_allocator <asan_mem_ref> asan_mem_ref_pool ("asan_mem_ref");
bdcbe80c
DS
362
363/* Initializes an instance of asan_mem_ref. */
364
365static void
40f9f6bb 366asan_mem_ref_init (asan_mem_ref *ref, tree start, HOST_WIDE_INT access_size)
bdcbe80c
DS
367{
368 ref->start = start;
369 ref->access_size = access_size;
370}
371
372/* Allocates memory for an instance of asan_mem_ref into the memory
373 pool returned by asan_mem_ref_get_alloc_pool and initialize it.
374 START is the address of (or the expression pointing to) the
375 beginning of memory reference. ACCESS_SIZE is the size of the
376 access to the referenced memory. */
377
378static asan_mem_ref*
40f9f6bb 379asan_mem_ref_new (tree start, HOST_WIDE_INT access_size)
bdcbe80c 380{
fb0b2914 381 asan_mem_ref *ref = asan_mem_ref_pool.allocate ();
bdcbe80c
DS
382
383 asan_mem_ref_init (ref, start, access_size);
384 return ref;
385}
386
387/* This builds and returns a pointer to the end of the memory region
388 that starts at START and of length LEN. */
389
390tree
391asan_mem_ref_get_end (tree start, tree len)
392{
393 if (len == NULL_TREE || integer_zerop (len))
394 return start;
395
a2f581e1
YG
396 if (!ptrofftype_p (len))
397 len = convert_to_ptrofftype (len);
398
bdcbe80c
DS
399 return fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (start), start, len);
400}
401
402/* Return a tree expression that represents the end of the referenced
403 memory region. Beware that this function can actually build a new
404 tree expression. */
405
406tree
407asan_mem_ref_get_end (const asan_mem_ref *ref, tree len)
408{
409 return asan_mem_ref_get_end (ref->start, len);
410}
411
8d67ee55 412struct asan_mem_ref_hasher : nofree_ptr_hash <asan_mem_ref>
bdcbe80c 413{
67f58944
TS
414 static inline hashval_t hash (const asan_mem_ref *);
415 static inline bool equal (const asan_mem_ref *, const asan_mem_ref *);
bdcbe80c
DS
416};
417
418/* Hash a memory reference. */
419
420inline hashval_t
421asan_mem_ref_hasher::hash (const asan_mem_ref *mem_ref)
422{
bdea98ca 423 return iterative_hash_expr (mem_ref->start, 0);
bdcbe80c
DS
424}
425
426/* Compare two memory references. We accept the length of either
427 memory references to be NULL_TREE. */
428
429inline bool
430asan_mem_ref_hasher::equal (const asan_mem_ref *m1,
431 const asan_mem_ref *m2)
432{
bdea98ca 433 return operand_equal_p (m1->start, m2->start, 0);
bdcbe80c
DS
434}
435
c203e8a7 436static hash_table<asan_mem_ref_hasher> *asan_mem_ref_ht;
bdcbe80c
DS
437
438/* Returns a reference to the hash table containing memory references.
439 This function ensures that the hash table is created. Note that
440 this hash table is updated by the function
441 update_mem_ref_hash_table. */
442
c203e8a7 443static hash_table<asan_mem_ref_hasher> *
bdcbe80c
DS
444get_mem_ref_hash_table ()
445{
c203e8a7
TS
446 if (!asan_mem_ref_ht)
447 asan_mem_ref_ht = new hash_table<asan_mem_ref_hasher> (10);
bdcbe80c
DS
448
449 return asan_mem_ref_ht;
450}
451
452/* Clear all entries from the memory references hash table. */
453
454static void
455empty_mem_ref_hash_table ()
456{
c203e8a7
TS
457 if (asan_mem_ref_ht)
458 asan_mem_ref_ht->empty ();
bdcbe80c
DS
459}
460
461/* Free the memory references hash table. */
462
463static void
464free_mem_ref_resources ()
465{
c203e8a7
TS
466 delete asan_mem_ref_ht;
467 asan_mem_ref_ht = NULL;
bdcbe80c 468
fb0b2914 469 asan_mem_ref_pool.release ();
bdcbe80c
DS
470}
471
472/* Return true iff the memory reference REF has been instrumented. */
473
474static bool
40f9f6bb 475has_mem_ref_been_instrumented (tree ref, HOST_WIDE_INT access_size)
bdcbe80c
DS
476{
477 asan_mem_ref r;
478 asan_mem_ref_init (&r, ref, access_size);
479
bdea98ca
MO
480 asan_mem_ref *saved_ref = get_mem_ref_hash_table ()->find (&r);
481 return saved_ref && saved_ref->access_size >= access_size;
bdcbe80c
DS
482}
483
484/* Return true iff the memory reference REF has been instrumented. */
485
486static bool
487has_mem_ref_been_instrumented (const asan_mem_ref *ref)
488{
489 return has_mem_ref_been_instrumented (ref->start, ref->access_size);
490}
491
492/* Return true iff access to memory region starting at REF and of
493 length LEN has been instrumented. */
494
495static bool
496has_mem_ref_been_instrumented (const asan_mem_ref *ref, tree len)
497{
bdea98ca
MO
498 HOST_WIDE_INT size_in_bytes
499 = tree_fits_shwi_p (len) ? tree_to_shwi (len) : -1;
bdcbe80c 500
bdea98ca
MO
501 return size_in_bytes != -1
502 && has_mem_ref_been_instrumented (ref->start, size_in_bytes);
bdcbe80c
DS
503}
504
505/* Set REF to the memory reference present in a gimple assignment
506 ASSIGNMENT. Return true upon successful completion, false
507 otherwise. */
508
509static bool
538dd0b7 510get_mem_ref_of_assignment (const gassign *assignment,
bdcbe80c
DS
511 asan_mem_ref *ref,
512 bool *ref_is_store)
513{
514 gcc_assert (gimple_assign_single_p (assignment));
515
5d751b0c
JJ
516 if (gimple_store_p (assignment)
517 && !gimple_clobber_p (assignment))
bdcbe80c
DS
518 {
519 ref->start = gimple_assign_lhs (assignment);
520 *ref_is_store = true;
521 }
522 else if (gimple_assign_load_p (assignment))
523 {
524 ref->start = gimple_assign_rhs1 (assignment);
525 *ref_is_store = false;
526 }
527 else
528 return false;
529
530 ref->access_size = int_size_in_bytes (TREE_TYPE (ref->start));
531 return true;
532}
533
534/* Return the memory references contained in a gimple statement
535 representing a builtin call that has to do with memory access. */
536
537static bool
538dd0b7 538get_mem_refs_of_builtin_call (const gcall *call,
bdcbe80c
DS
539 asan_mem_ref *src0,
540 tree *src0_len,
541 bool *src0_is_store,
542 asan_mem_ref *src1,
543 tree *src1_len,
544 bool *src1_is_store,
545 asan_mem_ref *dst,
546 tree *dst_len,
547 bool *dst_is_store,
bdea98ca
MO
548 bool *dest_is_deref,
549 bool *intercepted_p)
bdcbe80c
DS
550{
551 gcc_checking_assert (gimple_call_builtin_p (call, BUILT_IN_NORMAL));
552
553 tree callee = gimple_call_fndecl (call);
554 tree source0 = NULL_TREE, source1 = NULL_TREE,
555 dest = NULL_TREE, len = NULL_TREE;
556 bool is_store = true, got_reference_p = false;
40f9f6bb 557 HOST_WIDE_INT access_size = 1;
bdcbe80c 558
bdea98ca
MO
559 *intercepted_p = asan_intercepted_p ((DECL_FUNCTION_CODE (callee)));
560
bdcbe80c
DS
561 switch (DECL_FUNCTION_CODE (callee))
562 {
563 /* (s, s, n) style memops. */
564 case BUILT_IN_BCMP:
565 case BUILT_IN_MEMCMP:
566 source0 = gimple_call_arg (call, 0);
567 source1 = gimple_call_arg (call, 1);
568 len = gimple_call_arg (call, 2);
569 break;
570
571 /* (src, dest, n) style memops. */
572 case BUILT_IN_BCOPY:
573 source0 = gimple_call_arg (call, 0);
574 dest = gimple_call_arg (call, 1);
575 len = gimple_call_arg (call, 2);
576 break;
577
578 /* (dest, src, n) style memops. */
579 case BUILT_IN_MEMCPY:
580 case BUILT_IN_MEMCPY_CHK:
581 case BUILT_IN_MEMMOVE:
582 case BUILT_IN_MEMMOVE_CHK:
583 case BUILT_IN_MEMPCPY:
584 case BUILT_IN_MEMPCPY_CHK:
585 dest = gimple_call_arg (call, 0);
586 source0 = gimple_call_arg (call, 1);
587 len = gimple_call_arg (call, 2);
588 break;
589
590 /* (dest, n) style memops. */
591 case BUILT_IN_BZERO:
592 dest = gimple_call_arg (call, 0);
593 len = gimple_call_arg (call, 1);
594 break;
595
596 /* (dest, x, n) style memops*/
597 case BUILT_IN_MEMSET:
598 case BUILT_IN_MEMSET_CHK:
599 dest = gimple_call_arg (call, 0);
600 len = gimple_call_arg (call, 2);
601 break;
602
603 case BUILT_IN_STRLEN:
604 source0 = gimple_call_arg (call, 0);
605 len = gimple_call_lhs (call);
606 break ;
607
608 /* And now the __atomic* and __sync builtins.
609 These are handled differently from the classical memory memory
610 access builtins above. */
611
612 case BUILT_IN_ATOMIC_LOAD_1:
613 case BUILT_IN_ATOMIC_LOAD_2:
614 case BUILT_IN_ATOMIC_LOAD_4:
615 case BUILT_IN_ATOMIC_LOAD_8:
616 case BUILT_IN_ATOMIC_LOAD_16:
617 is_store = false;
618 /* fall through. */
619
620 case BUILT_IN_SYNC_FETCH_AND_ADD_1:
621 case BUILT_IN_SYNC_FETCH_AND_ADD_2:
622 case BUILT_IN_SYNC_FETCH_AND_ADD_4:
623 case BUILT_IN_SYNC_FETCH_AND_ADD_8:
624 case BUILT_IN_SYNC_FETCH_AND_ADD_16:
625
626 case BUILT_IN_SYNC_FETCH_AND_SUB_1:
627 case BUILT_IN_SYNC_FETCH_AND_SUB_2:
628 case BUILT_IN_SYNC_FETCH_AND_SUB_4:
629 case BUILT_IN_SYNC_FETCH_AND_SUB_8:
630 case BUILT_IN_SYNC_FETCH_AND_SUB_16:
631
632 case BUILT_IN_SYNC_FETCH_AND_OR_1:
633 case BUILT_IN_SYNC_FETCH_AND_OR_2:
634 case BUILT_IN_SYNC_FETCH_AND_OR_4:
635 case BUILT_IN_SYNC_FETCH_AND_OR_8:
636 case BUILT_IN_SYNC_FETCH_AND_OR_16:
637
638 case BUILT_IN_SYNC_FETCH_AND_AND_1:
639 case BUILT_IN_SYNC_FETCH_AND_AND_2:
640 case BUILT_IN_SYNC_FETCH_AND_AND_4:
641 case BUILT_IN_SYNC_FETCH_AND_AND_8:
642 case BUILT_IN_SYNC_FETCH_AND_AND_16:
643
644 case BUILT_IN_SYNC_FETCH_AND_XOR_1:
645 case BUILT_IN_SYNC_FETCH_AND_XOR_2:
646 case BUILT_IN_SYNC_FETCH_AND_XOR_4:
647 case BUILT_IN_SYNC_FETCH_AND_XOR_8:
648 case BUILT_IN_SYNC_FETCH_AND_XOR_16:
649
650 case BUILT_IN_SYNC_FETCH_AND_NAND_1:
651 case BUILT_IN_SYNC_FETCH_AND_NAND_2:
652 case BUILT_IN_SYNC_FETCH_AND_NAND_4:
653 case BUILT_IN_SYNC_FETCH_AND_NAND_8:
654
655 case BUILT_IN_SYNC_ADD_AND_FETCH_1:
656 case BUILT_IN_SYNC_ADD_AND_FETCH_2:
657 case BUILT_IN_SYNC_ADD_AND_FETCH_4:
658 case BUILT_IN_SYNC_ADD_AND_FETCH_8:
659 case BUILT_IN_SYNC_ADD_AND_FETCH_16:
660
661 case BUILT_IN_SYNC_SUB_AND_FETCH_1:
662 case BUILT_IN_SYNC_SUB_AND_FETCH_2:
663 case BUILT_IN_SYNC_SUB_AND_FETCH_4:
664 case BUILT_IN_SYNC_SUB_AND_FETCH_8:
665 case BUILT_IN_SYNC_SUB_AND_FETCH_16:
666
667 case BUILT_IN_SYNC_OR_AND_FETCH_1:
668 case BUILT_IN_SYNC_OR_AND_FETCH_2:
669 case BUILT_IN_SYNC_OR_AND_FETCH_4:
670 case BUILT_IN_SYNC_OR_AND_FETCH_8:
671 case BUILT_IN_SYNC_OR_AND_FETCH_16:
672
673 case BUILT_IN_SYNC_AND_AND_FETCH_1:
674 case BUILT_IN_SYNC_AND_AND_FETCH_2:
675 case BUILT_IN_SYNC_AND_AND_FETCH_4:
676 case BUILT_IN_SYNC_AND_AND_FETCH_8:
677 case BUILT_IN_SYNC_AND_AND_FETCH_16:
678
679 case BUILT_IN_SYNC_XOR_AND_FETCH_1:
680 case BUILT_IN_SYNC_XOR_AND_FETCH_2:
681 case BUILT_IN_SYNC_XOR_AND_FETCH_4:
682 case BUILT_IN_SYNC_XOR_AND_FETCH_8:
683 case BUILT_IN_SYNC_XOR_AND_FETCH_16:
684
685 case BUILT_IN_SYNC_NAND_AND_FETCH_1:
686 case BUILT_IN_SYNC_NAND_AND_FETCH_2:
687 case BUILT_IN_SYNC_NAND_AND_FETCH_4:
688 case BUILT_IN_SYNC_NAND_AND_FETCH_8:
689
690 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1:
691 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_2:
692 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_4:
693 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_8:
694 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_16:
695
696 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1:
697 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_2:
698 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_4:
699 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_8:
700 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_16:
701
702 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_1:
703 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_2:
704 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_4:
705 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_8:
706 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_16:
707
708 case BUILT_IN_SYNC_LOCK_RELEASE_1:
709 case BUILT_IN_SYNC_LOCK_RELEASE_2:
710 case BUILT_IN_SYNC_LOCK_RELEASE_4:
711 case BUILT_IN_SYNC_LOCK_RELEASE_8:
712 case BUILT_IN_SYNC_LOCK_RELEASE_16:
713
714 case BUILT_IN_ATOMIC_EXCHANGE_1:
715 case BUILT_IN_ATOMIC_EXCHANGE_2:
716 case BUILT_IN_ATOMIC_EXCHANGE_4:
717 case BUILT_IN_ATOMIC_EXCHANGE_8:
718 case BUILT_IN_ATOMIC_EXCHANGE_16:
719
720 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1:
721 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_2:
722 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_4:
723 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_8:
724 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_16:
725
726 case BUILT_IN_ATOMIC_STORE_1:
727 case BUILT_IN_ATOMIC_STORE_2:
728 case BUILT_IN_ATOMIC_STORE_4:
729 case BUILT_IN_ATOMIC_STORE_8:
730 case BUILT_IN_ATOMIC_STORE_16:
731
732 case BUILT_IN_ATOMIC_ADD_FETCH_1:
733 case BUILT_IN_ATOMIC_ADD_FETCH_2:
734 case BUILT_IN_ATOMIC_ADD_FETCH_4:
735 case BUILT_IN_ATOMIC_ADD_FETCH_8:
736 case BUILT_IN_ATOMIC_ADD_FETCH_16:
737
738 case BUILT_IN_ATOMIC_SUB_FETCH_1:
739 case BUILT_IN_ATOMIC_SUB_FETCH_2:
740 case BUILT_IN_ATOMIC_SUB_FETCH_4:
741 case BUILT_IN_ATOMIC_SUB_FETCH_8:
742 case BUILT_IN_ATOMIC_SUB_FETCH_16:
743
744 case BUILT_IN_ATOMIC_AND_FETCH_1:
745 case BUILT_IN_ATOMIC_AND_FETCH_2:
746 case BUILT_IN_ATOMIC_AND_FETCH_4:
747 case BUILT_IN_ATOMIC_AND_FETCH_8:
748 case BUILT_IN_ATOMIC_AND_FETCH_16:
749
750 case BUILT_IN_ATOMIC_NAND_FETCH_1:
751 case BUILT_IN_ATOMIC_NAND_FETCH_2:
752 case BUILT_IN_ATOMIC_NAND_FETCH_4:
753 case BUILT_IN_ATOMIC_NAND_FETCH_8:
754 case BUILT_IN_ATOMIC_NAND_FETCH_16:
755
756 case BUILT_IN_ATOMIC_XOR_FETCH_1:
757 case BUILT_IN_ATOMIC_XOR_FETCH_2:
758 case BUILT_IN_ATOMIC_XOR_FETCH_4:
759 case BUILT_IN_ATOMIC_XOR_FETCH_8:
760 case BUILT_IN_ATOMIC_XOR_FETCH_16:
761
762 case BUILT_IN_ATOMIC_OR_FETCH_1:
763 case BUILT_IN_ATOMIC_OR_FETCH_2:
764 case BUILT_IN_ATOMIC_OR_FETCH_4:
765 case BUILT_IN_ATOMIC_OR_FETCH_8:
766 case BUILT_IN_ATOMIC_OR_FETCH_16:
767
768 case BUILT_IN_ATOMIC_FETCH_ADD_1:
769 case BUILT_IN_ATOMIC_FETCH_ADD_2:
770 case BUILT_IN_ATOMIC_FETCH_ADD_4:
771 case BUILT_IN_ATOMIC_FETCH_ADD_8:
772 case BUILT_IN_ATOMIC_FETCH_ADD_16:
773
774 case BUILT_IN_ATOMIC_FETCH_SUB_1:
775 case BUILT_IN_ATOMIC_FETCH_SUB_2:
776 case BUILT_IN_ATOMIC_FETCH_SUB_4:
777 case BUILT_IN_ATOMIC_FETCH_SUB_8:
778 case BUILT_IN_ATOMIC_FETCH_SUB_16:
779
780 case BUILT_IN_ATOMIC_FETCH_AND_1:
781 case BUILT_IN_ATOMIC_FETCH_AND_2:
782 case BUILT_IN_ATOMIC_FETCH_AND_4:
783 case BUILT_IN_ATOMIC_FETCH_AND_8:
784 case BUILT_IN_ATOMIC_FETCH_AND_16:
785
786 case BUILT_IN_ATOMIC_FETCH_NAND_1:
787 case BUILT_IN_ATOMIC_FETCH_NAND_2:
788 case BUILT_IN_ATOMIC_FETCH_NAND_4:
789 case BUILT_IN_ATOMIC_FETCH_NAND_8:
790 case BUILT_IN_ATOMIC_FETCH_NAND_16:
791
792 case BUILT_IN_ATOMIC_FETCH_XOR_1:
793 case BUILT_IN_ATOMIC_FETCH_XOR_2:
794 case BUILT_IN_ATOMIC_FETCH_XOR_4:
795 case BUILT_IN_ATOMIC_FETCH_XOR_8:
796 case BUILT_IN_ATOMIC_FETCH_XOR_16:
797
798 case BUILT_IN_ATOMIC_FETCH_OR_1:
799 case BUILT_IN_ATOMIC_FETCH_OR_2:
800 case BUILT_IN_ATOMIC_FETCH_OR_4:
801 case BUILT_IN_ATOMIC_FETCH_OR_8:
802 case BUILT_IN_ATOMIC_FETCH_OR_16:
803 {
804 dest = gimple_call_arg (call, 0);
805 /* DEST represents the address of a memory location.
806 instrument_derefs wants the memory location, so lets
807 dereference the address DEST before handing it to
808 instrument_derefs. */
809 if (TREE_CODE (dest) == ADDR_EXPR)
810 dest = TREE_OPERAND (dest, 0);
77e83307 811 else if (TREE_CODE (dest) == SSA_NAME || TREE_CODE (dest) == INTEGER_CST)
bdcbe80c
DS
812 dest = build2 (MEM_REF, TREE_TYPE (TREE_TYPE (dest)),
813 dest, build_int_cst (TREE_TYPE (dest), 0));
814 else
815 gcc_unreachable ();
816
817 access_size = int_size_in_bytes (TREE_TYPE (dest));
818 }
819
820 default:
821 /* The other builtins memory access are not instrumented in this
822 function because they either don't have any length parameter,
823 or their length parameter is just a limit. */
824 break;
825 }
826
827 if (len != NULL_TREE)
828 {
829 if (source0 != NULL_TREE)
830 {
831 src0->start = source0;
832 src0->access_size = access_size;
833 *src0_len = len;
834 *src0_is_store = false;
835 }
836
837 if (source1 != NULL_TREE)
838 {
839 src1->start = source1;
840 src1->access_size = access_size;
841 *src1_len = len;
842 *src1_is_store = false;
843 }
844
845 if (dest != NULL_TREE)
846 {
847 dst->start = dest;
848 dst->access_size = access_size;
849 *dst_len = len;
850 *dst_is_store = true;
851 }
852
853 got_reference_p = true;
854 }
b41288b3
JJ
855 else if (dest)
856 {
857 dst->start = dest;
858 dst->access_size = access_size;
859 *dst_len = NULL_TREE;
860 *dst_is_store = is_store;
861 *dest_is_deref = true;
862 got_reference_p = true;
863 }
bdcbe80c 864
b41288b3 865 return got_reference_p;
bdcbe80c
DS
866}
867
868/* Return true iff a given gimple statement has been instrumented.
869 Note that the statement is "defined" by the memory references it
870 contains. */
871
872static bool
355fe088 873has_stmt_been_instrumented_p (gimple *stmt)
bdcbe80c
DS
874{
875 if (gimple_assign_single_p (stmt))
876 {
877 bool r_is_store;
878 asan_mem_ref r;
879 asan_mem_ref_init (&r, NULL, 1);
880
538dd0b7
DM
881 if (get_mem_ref_of_assignment (as_a <gassign *> (stmt), &r,
882 &r_is_store))
bdcbe80c
DS
883 return has_mem_ref_been_instrumented (&r);
884 }
885 else if (gimple_call_builtin_p (stmt, BUILT_IN_NORMAL))
886 {
887 asan_mem_ref src0, src1, dest;
888 asan_mem_ref_init (&src0, NULL, 1);
889 asan_mem_ref_init (&src1, NULL, 1);
890 asan_mem_ref_init (&dest, NULL, 1);
891
892 tree src0_len = NULL_TREE, src1_len = NULL_TREE, dest_len = NULL_TREE;
893 bool src0_is_store = false, src1_is_store = false,
bdea98ca 894 dest_is_store = false, dest_is_deref = false, intercepted_p = true;
538dd0b7 895 if (get_mem_refs_of_builtin_call (as_a <gcall *> (stmt),
bdcbe80c
DS
896 &src0, &src0_len, &src0_is_store,
897 &src1, &src1_len, &src1_is_store,
898 &dest, &dest_len, &dest_is_store,
bdea98ca 899 &dest_is_deref, &intercepted_p))
bdcbe80c
DS
900 {
901 if (src0.start != NULL_TREE
902 && !has_mem_ref_been_instrumented (&src0, src0_len))
903 return false;
904
905 if (src1.start != NULL_TREE
906 && !has_mem_ref_been_instrumented (&src1, src1_len))
907 return false;
908
909 if (dest.start != NULL_TREE
910 && !has_mem_ref_been_instrumented (&dest, dest_len))
911 return false;
912
913 return true;
914 }
915 }
7db337c2
ML
916 else if (is_gimple_call (stmt) && gimple_store_p (stmt))
917 {
918 asan_mem_ref r;
919 asan_mem_ref_init (&r, NULL, 1);
920
921 r.start = gimple_call_lhs (stmt);
922 r.access_size = int_size_in_bytes (TREE_TYPE (r.start));
923 return has_mem_ref_been_instrumented (&r);
924 }
925
bdcbe80c
DS
926 return false;
927}
928
929/* Insert a memory reference into the hash table. */
930
931static void
40f9f6bb 932update_mem_ref_hash_table (tree ref, HOST_WIDE_INT access_size)
bdcbe80c 933{
c203e8a7 934 hash_table<asan_mem_ref_hasher> *ht = get_mem_ref_hash_table ();
bdcbe80c
DS
935
936 asan_mem_ref r;
937 asan_mem_ref_init (&r, ref, access_size);
938
c203e8a7 939 asan_mem_ref **slot = ht->find_slot (&r, INSERT);
bdea98ca 940 if (*slot == NULL || (*slot)->access_size < access_size)
bdcbe80c
DS
941 *slot = asan_mem_ref_new (ref, access_size);
942}
943
94fce891
JJ
944/* Initialize shadow_ptr_types array. */
945
946static void
947asan_init_shadow_ptr_types (void)
948{
949 asan_shadow_set = new_alias_set ();
6dc4a604
ML
950 tree types[3] = { signed_char_type_node, short_integer_type_node,
951 integer_type_node };
952
953 for (unsigned i = 0; i < 3; i++)
954 {
955 shadow_ptr_types[i] = build_distinct_type_copy (types[i]);
956 TYPE_ALIAS_SET (shadow_ptr_types[i]) = asan_shadow_set;
957 shadow_ptr_types[i] = build_pointer_type (shadow_ptr_types[i]);
958 }
959
94fce891
JJ
960 initialize_sanitizer_builtins ();
961}
962
11a877b3 963/* Create ADDR_EXPR of STRING_CST with the PP pretty printer text. */
8240018b
JJ
964
965static tree
11a877b3 966asan_pp_string (pretty_printer *pp)
8240018b 967{
11a877b3 968 const char *buf = pp_formatted_text (pp);
8240018b
JJ
969 size_t len = strlen (buf);
970 tree ret = build_string (len + 1, buf);
971 TREE_TYPE (ret)
94fce891
JJ
972 = build_array_type (TREE_TYPE (shadow_ptr_types[0]),
973 build_index_type (size_int (len)));
8240018b
JJ
974 TREE_READONLY (ret) = 1;
975 TREE_STATIC (ret) = 1;
94fce891 976 return build1 (ADDR_EXPR, shadow_ptr_types[0], ret);
8240018b
JJ
977}
978
f3ddd692
JJ
979/* Return a CONST_INT representing 4 subsequent shadow memory bytes. */
980
981static rtx
982asan_shadow_cst (unsigned char shadow_bytes[4])
983{
984 int i;
985 unsigned HOST_WIDE_INT val = 0;
986 gcc_assert (WORDS_BIG_ENDIAN == BYTES_BIG_ENDIAN);
987 for (i = 0; i < 4; i++)
988 val |= (unsigned HOST_WIDE_INT) shadow_bytes[BYTES_BIG_ENDIAN ? 3 - i : i]
989 << (BITS_PER_UNIT * i);
dcad1dd3 990 return gen_int_mode (val, SImode);
f3ddd692
JJ
991}
992
aeb7e7c1
JJ
993/* Clear shadow memory at SHADOW_MEM, LEN bytes. Can't call a library call here
994 though. */
995
996static void
997asan_clear_shadow (rtx shadow_mem, HOST_WIDE_INT len)
998{
3a965f61
DM
999 rtx_insn *insn, *insns, *jump;
1000 rtx_code_label *top_label;
1001 rtx end, addr, tmp;
aeb7e7c1
JJ
1002
1003 start_sequence ();
1004 clear_storage (shadow_mem, GEN_INT (len), BLOCK_OP_NORMAL);
1005 insns = get_insns ();
1006 end_sequence ();
1007 for (insn = insns; insn; insn = NEXT_INSN (insn))
1008 if (CALL_P (insn))
1009 break;
1010 if (insn == NULL_RTX)
1011 {
1012 emit_insn (insns);
1013 return;
1014 }
1015
1016 gcc_assert ((len & 3) == 0);
1017 top_label = gen_label_rtx ();
57d4d653 1018 addr = copy_to_mode_reg (Pmode, XEXP (shadow_mem, 0));
aeb7e7c1
JJ
1019 shadow_mem = adjust_automodify_address (shadow_mem, SImode, addr, 0);
1020 end = force_reg (Pmode, plus_constant (Pmode, addr, len));
1021 emit_label (top_label);
1022
1023 emit_move_insn (shadow_mem, const0_rtx);
2f1cd2eb 1024 tmp = expand_simple_binop (Pmode, PLUS, addr, gen_int_mode (4, Pmode), addr,
c62ccb9a 1025 true, OPTAB_LIB_WIDEN);
aeb7e7c1
JJ
1026 if (tmp != addr)
1027 emit_move_insn (addr, tmp);
1028 emit_cmp_and_jump_insns (addr, end, LT, NULL_RTX, Pmode, true, top_label);
1029 jump = get_last_insn ();
1030 gcc_assert (JUMP_P (jump));
e5af9ddd 1031 add_int_reg_note (jump, REG_BR_PROB, REG_BR_PROB_BASE * 80 / 100);
aeb7e7c1
JJ
1032}
1033
ef1b3fda
KS
1034void
1035asan_function_start (void)
1036{
1037 section *fnsec = function_section (current_function_decl);
1038 switch_to_section (fnsec);
1039 ASM_OUTPUT_DEBUG_LABEL (asm_out_file, "LASANPC",
c62ccb9a 1040 current_function_funcdef_no);
ef1b3fda
KS
1041}
1042
6dc4a604
ML
1043/* Return number of shadow bytes that are occupied by a local variable
1044 of SIZE bytes. */
1045
1046static unsigned HOST_WIDE_INT
1047shadow_mem_size (unsigned HOST_WIDE_INT size)
1048{
1049 return ROUND_UP (size, ASAN_SHADOW_GRANULARITY) / ASAN_SHADOW_GRANULARITY;
1050}
1051
f3ddd692
JJ
1052/* Insert code to protect stack vars. The prologue sequence should be emitted
1053 directly, epilogue sequence returned. BASE is the register holding the
1054 stack base, against which OFFSETS array offsets are relative to, OFFSETS
1055 array contains pairs of offsets in reverse order, always the end offset
1056 of some gap that needs protection followed by starting offset,
1057 and DECLS is an array of representative decls for each var partition.
1058 LENGTH is the length of the OFFSETS array, DECLS array is LENGTH / 2 - 1
1059 elements long (OFFSETS include gap before the first variable as well
e361382f
JJ
1060 as gaps after each stack variable). PBASE is, if non-NULL, some pseudo
1061 register which stack vars DECL_RTLs are based on. Either BASE should be
1062 assigned to PBASE, when not doing use after return protection, or
1063 corresponding address based on __asan_stack_malloc* return value. */
f3ddd692 1064
3a4abd2f 1065rtx_insn *
e361382f
JJ
1066asan_emit_stack_protection (rtx base, rtx pbase, unsigned int alignb,
1067 HOST_WIDE_INT *offsets, tree *decls, int length)
f3ddd692 1068{
19f8b229
TS
1069 rtx shadow_base, shadow_mem, ret, mem, orig_base;
1070 rtx_code_label *lab;
3a4abd2f 1071 rtx_insn *insns;
47d5beb4 1072 char buf[32];
f3ddd692 1073 unsigned char shadow_bytes[4];
e361382f
JJ
1074 HOST_WIDE_INT base_offset = offsets[length - 1];
1075 HOST_WIDE_INT base_align_bias = 0, offset, prev_offset;
1076 HOST_WIDE_INT asan_frame_size = offsets[0] - base_offset;
6dc4a604 1077 HOST_WIDE_INT last_offset;
f3ddd692
JJ
1078 int l;
1079 unsigned char cur_shadow_byte = ASAN_STACK_MAGIC_LEFT;
ef1b3fda 1080 tree str_cst, decl, id;
e361382f 1081 int use_after_return_class = -1;
f3ddd692 1082
94fce891
JJ
1083 if (shadow_ptr_types[0] == NULL_TREE)
1084 asan_init_shadow_ptr_types ();
1085
f3ddd692 1086 /* First of all, prepare the description string. */
11a877b3 1087 pretty_printer asan_pp;
da6ca2b5 1088
8240018b
JJ
1089 pp_decimal_int (&asan_pp, length / 2 - 1);
1090 pp_space (&asan_pp);
f3ddd692
JJ
1091 for (l = length - 2; l; l -= 2)
1092 {
1093 tree decl = decls[l / 2 - 1];
8240018b
JJ
1094 pp_wide_integer (&asan_pp, offsets[l] - base_offset);
1095 pp_space (&asan_pp);
1096 pp_wide_integer (&asan_pp, offsets[l - 1] - offsets[l]);
1097 pp_space (&asan_pp);
f3ddd692
JJ
1098 if (DECL_P (decl) && DECL_NAME (decl))
1099 {
8240018b
JJ
1100 pp_decimal_int (&asan_pp, IDENTIFIER_LENGTH (DECL_NAME (decl)));
1101 pp_space (&asan_pp);
b066401f 1102 pp_tree_identifier (&asan_pp, DECL_NAME (decl));
f3ddd692
JJ
1103 }
1104 else
8240018b
JJ
1105 pp_string (&asan_pp, "9 <unknown>");
1106 pp_space (&asan_pp);
f3ddd692 1107 }
11a877b3 1108 str_cst = asan_pp_string (&asan_pp);
f3ddd692
JJ
1109
1110 /* Emit the prologue sequence. */
b5ebc991
MO
1111 if (asan_frame_size > 32 && asan_frame_size <= 65536 && pbase
1112 && ASAN_USE_AFTER_RETURN)
e361382f
JJ
1113 {
1114 use_after_return_class = floor_log2 (asan_frame_size - 1) - 5;
1115 /* __asan_stack_malloc_N guarantees alignment
c62ccb9a 1116 N < 6 ? (64 << N) : 4096 bytes. */
e361382f
JJ
1117 if (alignb > (use_after_return_class < 6
1118 ? (64U << use_after_return_class) : 4096U))
1119 use_after_return_class = -1;
1120 else if (alignb > ASAN_RED_ZONE_SIZE && (asan_frame_size & (alignb - 1)))
1121 base_align_bias = ((asan_frame_size + alignb - 1)
1122 & ~(alignb - HOST_WIDE_INT_1)) - asan_frame_size;
1123 }
e5dcd695
LZ
1124 /* Align base if target is STRICT_ALIGNMENT. */
1125 if (STRICT_ALIGNMENT)
1126 base = expand_binop (Pmode, and_optab, base,
1127 gen_int_mode (-((GET_MODE_ALIGNMENT (SImode)
1128 << ASAN_SHADOW_SHIFT)
1129 / BITS_PER_UNIT), Pmode), NULL_RTX,
1130 1, OPTAB_DIRECT);
1131
e361382f
JJ
1132 if (use_after_return_class == -1 && pbase)
1133 emit_move_insn (pbase, base);
e5dcd695 1134
2f1cd2eb 1135 base = expand_binop (Pmode, add_optab, base,
e361382f 1136 gen_int_mode (base_offset - base_align_bias, Pmode),
f3ddd692 1137 NULL_RTX, 1, OPTAB_DIRECT);
e361382f
JJ
1138 orig_base = NULL_RTX;
1139 if (use_after_return_class != -1)
1140 {
1141 if (asan_detect_stack_use_after_return == NULL_TREE)
1142 {
1143 id = get_identifier ("__asan_option_detect_stack_use_after_return");
1144 decl = build_decl (BUILTINS_LOCATION, VAR_DECL, id,
1145 integer_type_node);
1146 SET_DECL_ASSEMBLER_NAME (decl, id);
1147 TREE_ADDRESSABLE (decl) = 1;
1148 DECL_ARTIFICIAL (decl) = 1;
1149 DECL_IGNORED_P (decl) = 1;
1150 DECL_EXTERNAL (decl) = 1;
1151 TREE_STATIC (decl) = 1;
1152 TREE_PUBLIC (decl) = 1;
1153 TREE_USED (decl) = 1;
1154 asan_detect_stack_use_after_return = decl;
1155 }
1156 orig_base = gen_reg_rtx (Pmode);
1157 emit_move_insn (orig_base, base);
1158 ret = expand_normal (asan_detect_stack_use_after_return);
1159 lab = gen_label_rtx ();
1160 int very_likely = REG_BR_PROB_BASE - (REG_BR_PROB_BASE / 2000 - 1);
1161 emit_cmp_and_jump_insns (ret, const0_rtx, EQ, NULL_RTX,
1162 VOIDmode, 0, lab, very_likely);
1163 snprintf (buf, sizeof buf, "__asan_stack_malloc_%d",
1164 use_after_return_class);
1165 ret = init_one_libfunc (buf);
89e302b8 1166 ret = emit_library_call_value (ret, NULL_RTX, LCT_NORMAL, ptr_mode, 1,
e361382f
JJ
1167 GEN_INT (asan_frame_size
1168 + base_align_bias),
89e302b8
MO
1169 TYPE_MODE (pointer_sized_int_node));
1170 /* __asan_stack_malloc_[n] returns a pointer to fake stack if succeeded
1171 and NULL otherwise. Check RET value is NULL here and jump over the
1172 BASE reassignment in this case. Otherwise, reassign BASE to RET. */
1173 int very_unlikely = REG_BR_PROB_BASE / 2000 - 1;
1174 emit_cmp_and_jump_insns (ret, const0_rtx, EQ, NULL_RTX,
1175 VOIDmode, 0, lab, very_unlikely);
e361382f
JJ
1176 ret = convert_memory_address (Pmode, ret);
1177 emit_move_insn (base, ret);
1178 emit_label (lab);
1179 emit_move_insn (pbase, expand_binop (Pmode, add_optab, base,
1180 gen_int_mode (base_align_bias
1181 - base_offset, Pmode),
1182 NULL_RTX, 1, OPTAB_DIRECT));
1183 }
f3ddd692 1184 mem = gen_rtx_MEM (ptr_mode, base);
e361382f 1185 mem = adjust_address (mem, VOIDmode, base_align_bias);
69db2d57 1186 emit_move_insn (mem, gen_int_mode (ASAN_STACK_FRAME_MAGIC, ptr_mode));
f3ddd692
JJ
1187 mem = adjust_address (mem, VOIDmode, GET_MODE_SIZE (ptr_mode));
1188 emit_move_insn (mem, expand_normal (str_cst));
ef1b3fda
KS
1189 mem = adjust_address (mem, VOIDmode, GET_MODE_SIZE (ptr_mode));
1190 ASM_GENERATE_INTERNAL_LABEL (buf, "LASANPC", current_function_funcdef_no);
1191 id = get_identifier (buf);
1192 decl = build_decl (DECL_SOURCE_LOCATION (current_function_decl),
c62ccb9a 1193 VAR_DECL, id, char_type_node);
ef1b3fda
KS
1194 SET_DECL_ASSEMBLER_NAME (decl, id);
1195 TREE_ADDRESSABLE (decl) = 1;
1196 TREE_READONLY (decl) = 1;
1197 DECL_ARTIFICIAL (decl) = 1;
1198 DECL_IGNORED_P (decl) = 1;
1199 TREE_STATIC (decl) = 1;
1200 TREE_PUBLIC (decl) = 0;
1201 TREE_USED (decl) = 1;
8c8b21e4
JJ
1202 DECL_INITIAL (decl) = decl;
1203 TREE_ASM_WRITTEN (decl) = 1;
1204 TREE_ASM_WRITTEN (id) = 1;
ef1b3fda 1205 emit_move_insn (mem, expand_normal (build_fold_addr_expr (decl)));
f3ddd692
JJ
1206 shadow_base = expand_binop (Pmode, lshr_optab, base,
1207 GEN_INT (ASAN_SHADOW_SHIFT),
1208 NULL_RTX, 1, OPTAB_DIRECT);
e361382f
JJ
1209 shadow_base
1210 = plus_constant (Pmode, shadow_base,
fd960af2 1211 asan_shadow_offset ()
e361382f 1212 + (base_align_bias >> ASAN_SHADOW_SHIFT));
f3ddd692
JJ
1213 gcc_assert (asan_shadow_set != -1
1214 && (ASAN_RED_ZONE_SIZE >> ASAN_SHADOW_SHIFT) == 4);
1215 shadow_mem = gen_rtx_MEM (SImode, shadow_base);
1216 set_mem_alias_set (shadow_mem, asan_shadow_set);
e5dcd695
LZ
1217 if (STRICT_ALIGNMENT)
1218 set_mem_align (shadow_mem, (GET_MODE_ALIGNMENT (SImode)));
f3ddd692
JJ
1219 prev_offset = base_offset;
1220 for (l = length; l; l -= 2)
1221 {
1222 if (l == 2)
1223 cur_shadow_byte = ASAN_STACK_MAGIC_RIGHT;
1224 offset = offsets[l - 1];
1225 if ((offset - base_offset) & (ASAN_RED_ZONE_SIZE - 1))
1226 {
1227 int i;
1228 HOST_WIDE_INT aoff
1229 = base_offset + ((offset - base_offset)
1230 & ~(ASAN_RED_ZONE_SIZE - HOST_WIDE_INT_1));
1231 shadow_mem = adjust_address (shadow_mem, VOIDmode,
1232 (aoff - prev_offset)
1233 >> ASAN_SHADOW_SHIFT);
1234 prev_offset = aoff;
6dc4a604 1235 for (i = 0; i < 4; i++, aoff += ASAN_SHADOW_GRANULARITY)
f3ddd692
JJ
1236 if (aoff < offset)
1237 {
6dc4a604 1238 if (aoff < offset - (HOST_WIDE_INT)ASAN_SHADOW_GRANULARITY + 1)
f3ddd692
JJ
1239 shadow_bytes[i] = 0;
1240 else
1241 shadow_bytes[i] = offset - aoff;
1242 }
1243 else
fbdb92eb 1244 shadow_bytes[i] = ASAN_STACK_MAGIC_MIDDLE;
f3ddd692
JJ
1245 emit_move_insn (shadow_mem, asan_shadow_cst (shadow_bytes));
1246 offset = aoff;
1247 }
1248 while (offset <= offsets[l - 2] - ASAN_RED_ZONE_SIZE)
1249 {
1250 shadow_mem = adjust_address (shadow_mem, VOIDmode,
1251 (offset - prev_offset)
1252 >> ASAN_SHADOW_SHIFT);
1253 prev_offset = offset;
1254 memset (shadow_bytes, cur_shadow_byte, 4);
1255 emit_move_insn (shadow_mem, asan_shadow_cst (shadow_bytes));
1256 offset += ASAN_RED_ZONE_SIZE;
1257 }
1258 cur_shadow_byte = ASAN_STACK_MAGIC_MIDDLE;
1259 }
1260 do_pending_stack_adjust ();
1261
1262 /* Construct epilogue sequence. */
1263 start_sequence ();
1264
19f8b229 1265 lab = NULL;
e361382f
JJ
1266 if (use_after_return_class != -1)
1267 {
19f8b229 1268 rtx_code_label *lab2 = gen_label_rtx ();
e361382f
JJ
1269 char c = (char) ASAN_STACK_MAGIC_USE_AFTER_RET;
1270 int very_likely = REG_BR_PROB_BASE - (REG_BR_PROB_BASE / 2000 - 1);
1271 emit_cmp_and_jump_insns (orig_base, base, EQ, NULL_RTX,
1272 VOIDmode, 0, lab2, very_likely);
1273 shadow_mem = gen_rtx_MEM (BLKmode, shadow_base);
1274 set_mem_alias_set (shadow_mem, asan_shadow_set);
1275 mem = gen_rtx_MEM (ptr_mode, base);
1276 mem = adjust_address (mem, VOIDmode, base_align_bias);
1277 emit_move_insn (mem, gen_int_mode (ASAN_STACK_RETIRED_MAGIC, ptr_mode));
1278 unsigned HOST_WIDE_INT sz = asan_frame_size >> ASAN_SHADOW_SHIFT;
1279 if (use_after_return_class < 5
1280 && can_store_by_pieces (sz, builtin_memset_read_str, &c,
1281 BITS_PER_UNIT, true))
1282 store_by_pieces (shadow_mem, sz, builtin_memset_read_str, &c,
1283 BITS_PER_UNIT, true, 0);
1284 else if (use_after_return_class >= 5
1285 || !set_storage_via_setmem (shadow_mem,
1286 GEN_INT (sz),
1287 gen_int_mode (c, QImode),
1288 BITS_PER_UNIT, BITS_PER_UNIT,
1289 -1, sz, sz, sz))
1290 {
1291 snprintf (buf, sizeof buf, "__asan_stack_free_%d",
1292 use_after_return_class);
1293 ret = init_one_libfunc (buf);
1294 rtx addr = convert_memory_address (ptr_mode, base);
1295 rtx orig_addr = convert_memory_address (ptr_mode, orig_base);
1296 emit_library_call (ret, LCT_NORMAL, ptr_mode, 3, addr, ptr_mode,
1297 GEN_INT (asan_frame_size + base_align_bias),
1298 TYPE_MODE (pointer_sized_int_node),
1299 orig_addr, ptr_mode);
1300 }
1301 lab = gen_label_rtx ();
1302 emit_jump (lab);
1303 emit_label (lab2);
1304 }
1305
f3ddd692
JJ
1306 shadow_mem = gen_rtx_MEM (BLKmode, shadow_base);
1307 set_mem_alias_set (shadow_mem, asan_shadow_set);
e5dcd695
LZ
1308
1309 if (STRICT_ALIGNMENT)
1310 set_mem_align (shadow_mem, (GET_MODE_ALIGNMENT (SImode)));
1311
6dc4a604
ML
1312 /* Unpoison shadow memory of a stack at the very end of a function.
1313 As we're poisoning stack variables at the end of their scope,
1314 shadow memory must be properly unpoisoned here. The easiest approach
1315 would be to collect all variables that should not be unpoisoned and
1316 we unpoison shadow memory of the whole stack except ranges
1317 occupied by these variables. */
f3ddd692 1318 last_offset = base_offset;
6dc4a604
ML
1319 HOST_WIDE_INT current_offset = last_offset;
1320 if (length)
f3ddd692 1321 {
6dc4a604
ML
1322 HOST_WIDE_INT var_end_offset = 0;
1323 HOST_WIDE_INT stack_start = offsets[length - 1];
1324 gcc_assert (last_offset == stack_start);
1325
1326 for (int l = length - 2; l > 0; l -= 2)
f3ddd692 1327 {
6dc4a604
ML
1328 HOST_WIDE_INT var_offset = offsets[l];
1329 current_offset = var_offset;
1330 var_end_offset = offsets[l - 1];
1331 HOST_WIDE_INT rounded_size = ROUND_UP (var_end_offset - var_offset,
1332 BITS_PER_UNIT);
1333
1334 /* Should we unpoison the variable? */
1335 if (asan_handled_variables != NULL
1336 && asan_handled_variables->contains (decl))
1337 {
1338 if (dump_file && (dump_flags & TDF_DETAILS))
1339 {
1340 const char *n = (DECL_NAME (decl)
1341 ? IDENTIFIER_POINTER (DECL_NAME (decl))
1342 : "<unknown>");
1343 fprintf (dump_file, "Unpoisoning shadow stack for variable: "
1344 "%s (%" PRId64 "B)\n", n,
1345 var_end_offset - var_offset);
1346 }
1347
1348 unsigned HOST_WIDE_INT s
1349 = shadow_mem_size (current_offset - last_offset);
1350 asan_clear_shadow (shadow_mem, s);
1351 HOST_WIDE_INT shift
1352 = shadow_mem_size (current_offset - last_offset + rounded_size);
1353 shadow_mem = adjust_address (shadow_mem, VOIDmode, shift);
1354 last_offset = var_offset + rounded_size;
1355 current_offset = last_offset;
1356 }
1357
f3ddd692 1358 }
6dc4a604
ML
1359
1360 /* Handle last redzone. */
1361 current_offset = offsets[0];
1362 asan_clear_shadow (shadow_mem,
1363 shadow_mem_size (current_offset - last_offset));
f3ddd692
JJ
1364 }
1365
6dc4a604
ML
1366 /* Clean-up set with instrumented stack variables. */
1367 delete asan_handled_variables;
1368 asan_handled_variables = NULL;
1369 delete asan_used_labels;
1370 asan_used_labels = NULL;
1371
f3ddd692 1372 do_pending_stack_adjust ();
e361382f
JJ
1373 if (lab)
1374 emit_label (lab);
f3ddd692 1375
3a4abd2f 1376 insns = get_insns ();
f3ddd692 1377 end_sequence ();
3a4abd2f 1378 return insns;
f3ddd692
JJ
1379}
1380
8240018b
JJ
1381/* Return true if DECL, a global var, might be overridden and needs
1382 therefore a local alias. */
1383
1384static bool
1385asan_needs_local_alias (tree decl)
1386{
1387 return DECL_WEAK (decl) || !targetm.binds_local_p (decl);
1388}
1389
84b0769e
MO
1390/* Return true if DECL, a global var, is an artificial ODR indicator symbol
1391 therefore doesn't need protection. */
1392
1393static bool
1394is_odr_indicator (tree decl)
1395{
1396 return (DECL_ARTIFICIAL (decl)
1397 && lookup_attribute ("asan odr indicator", DECL_ATTRIBUTES (decl)));
1398}
1399
8240018b
JJ
1400/* Return true if DECL is a VAR_DECL that should be protected
1401 by Address Sanitizer, by appending a red zone with protected
1402 shadow memory after it and aligning it to at least
1403 ASAN_RED_ZONE_SIZE bytes. */
1404
1405bool
1406asan_protect_global (tree decl)
1407{
b5ebc991
MO
1408 if (!ASAN_GLOBALS)
1409 return false;
1410
8240018b 1411 rtx rtl, symbol;
8240018b 1412
94fce891
JJ
1413 if (TREE_CODE (decl) == STRING_CST)
1414 {
1415 /* Instrument all STRING_CSTs except those created
1416 by asan_pp_string here. */
1417 if (shadow_ptr_types[0] != NULL_TREE
1418 && TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE
1419 && TREE_TYPE (TREE_TYPE (decl)) == TREE_TYPE (shadow_ptr_types[0]))
1420 return false;
1421 return true;
1422 }
8813a647 1423 if (!VAR_P (decl)
8240018b
JJ
1424 /* TLS vars aren't statically protectable. */
1425 || DECL_THREAD_LOCAL_P (decl)
1426 /* Externs will be protected elsewhere. */
1427 || DECL_EXTERNAL (decl)
8240018b
JJ
1428 || !DECL_RTL_SET_P (decl)
1429 /* Comdat vars pose an ABI problem, we can't know if
1430 the var that is selected by the linker will have
1431 padding or not. */
1432 || DECL_ONE_ONLY (decl)
f1d15bb9
DV
1433 /* Similarly for common vars. People can use -fno-common.
1434 Note: Linux kernel is built with -fno-common, so we do instrument
1435 globals there even if it is C. */
a8a6fd74 1436 || (DECL_COMMON (decl) && TREE_PUBLIC (decl))
8240018b
JJ
1437 /* Don't protect if using user section, often vars placed
1438 into user section from multiple TUs are then assumed
1439 to be an array of such vars, putting padding in there
1440 breaks this assumption. */
f961457f 1441 || (DECL_SECTION_NAME (decl) != NULL
18af8d16
YG
1442 && !symtab_node::get (decl)->implicit_section
1443 && !section_sanitized_p (DECL_SECTION_NAME (decl)))
8240018b
JJ
1444 || DECL_SIZE (decl) == 0
1445 || ASAN_RED_ZONE_SIZE * BITS_PER_UNIT > MAX_OFILE_ALIGNMENT
1446 || !valid_constant_size_p (DECL_SIZE_UNIT (decl))
21a82048 1447 || DECL_ALIGN_UNIT (decl) > 2 * ASAN_RED_ZONE_SIZE
84b0769e
MO
1448 || TREE_TYPE (decl) == ubsan_get_source_location_type ()
1449 || is_odr_indicator (decl))
8240018b
JJ
1450 return false;
1451
1452 rtl = DECL_RTL (decl);
1453 if (!MEM_P (rtl) || GET_CODE (XEXP (rtl, 0)) != SYMBOL_REF)
1454 return false;
1455 symbol = XEXP (rtl, 0);
1456
1457 if (CONSTANT_POOL_ADDRESS_P (symbol)
1458 || TREE_CONSTANT_POOL_ADDRESS_P (symbol))
1459 return false;
1460
8240018b
JJ
1461 if (lookup_attribute ("weakref", DECL_ATTRIBUTES (decl)))
1462 return false;
1463
1464#ifndef ASM_OUTPUT_DEF
1465 if (asan_needs_local_alias (decl))
1466 return false;
1467#endif
1468
497a1c66 1469 return true;
8240018b
JJ
1470}
1471
40f9f6bb
JJ
1472/* Construct a function tree for __asan_report_{load,store}{1,2,4,8,16,_n}.
1473 IS_STORE is either 1 (for a store) or 0 (for a load). */
37d6f666
WM
1474
1475static tree
fed4de37
YG
1476report_error_func (bool is_store, bool recover_p, HOST_WIDE_INT size_in_bytes,
1477 int *nargs)
37d6f666 1478{
fed4de37
YG
1479 static enum built_in_function report[2][2][6]
1480 = { { { BUILT_IN_ASAN_REPORT_LOAD1, BUILT_IN_ASAN_REPORT_LOAD2,
1481 BUILT_IN_ASAN_REPORT_LOAD4, BUILT_IN_ASAN_REPORT_LOAD8,
1482 BUILT_IN_ASAN_REPORT_LOAD16, BUILT_IN_ASAN_REPORT_LOAD_N },
1483 { BUILT_IN_ASAN_REPORT_STORE1, BUILT_IN_ASAN_REPORT_STORE2,
1484 BUILT_IN_ASAN_REPORT_STORE4, BUILT_IN_ASAN_REPORT_STORE8,
1485 BUILT_IN_ASAN_REPORT_STORE16, BUILT_IN_ASAN_REPORT_STORE_N } },
1486 { { BUILT_IN_ASAN_REPORT_LOAD1_NOABORT,
1487 BUILT_IN_ASAN_REPORT_LOAD2_NOABORT,
1488 BUILT_IN_ASAN_REPORT_LOAD4_NOABORT,
1489 BUILT_IN_ASAN_REPORT_LOAD8_NOABORT,
1490 BUILT_IN_ASAN_REPORT_LOAD16_NOABORT,
1491 BUILT_IN_ASAN_REPORT_LOAD_N_NOABORT },
1492 { BUILT_IN_ASAN_REPORT_STORE1_NOABORT,
1493 BUILT_IN_ASAN_REPORT_STORE2_NOABORT,
1494 BUILT_IN_ASAN_REPORT_STORE4_NOABORT,
1495 BUILT_IN_ASAN_REPORT_STORE8_NOABORT,
1496 BUILT_IN_ASAN_REPORT_STORE16_NOABORT,
1497 BUILT_IN_ASAN_REPORT_STORE_N_NOABORT } } };
8946c29e
YG
1498 if (size_in_bytes == -1)
1499 {
1500 *nargs = 2;
fed4de37 1501 return builtin_decl_implicit (report[recover_p][is_store][5]);
8946c29e
YG
1502 }
1503 *nargs = 1;
fed4de37
YG
1504 int size_log2 = exact_log2 (size_in_bytes);
1505 return builtin_decl_implicit (report[recover_p][is_store][size_log2]);
37d6f666
WM
1506}
1507
8946c29e
YG
1508/* Construct a function tree for __asan_{load,store}{1,2,4,8,16,_n}.
1509 IS_STORE is either 1 (for a store) or 0 (for a load). */
1510
1511static tree
fed4de37
YG
1512check_func (bool is_store, bool recover_p, HOST_WIDE_INT size_in_bytes,
1513 int *nargs)
8946c29e 1514{
fed4de37
YG
1515 static enum built_in_function check[2][2][6]
1516 = { { { BUILT_IN_ASAN_LOAD1, BUILT_IN_ASAN_LOAD2,
1517 BUILT_IN_ASAN_LOAD4, BUILT_IN_ASAN_LOAD8,
1518 BUILT_IN_ASAN_LOAD16, BUILT_IN_ASAN_LOADN },
1519 { BUILT_IN_ASAN_STORE1, BUILT_IN_ASAN_STORE2,
1520 BUILT_IN_ASAN_STORE4, BUILT_IN_ASAN_STORE8,
1521 BUILT_IN_ASAN_STORE16, BUILT_IN_ASAN_STOREN } },
1522 { { BUILT_IN_ASAN_LOAD1_NOABORT,
1523 BUILT_IN_ASAN_LOAD2_NOABORT,
1524 BUILT_IN_ASAN_LOAD4_NOABORT,
1525 BUILT_IN_ASAN_LOAD8_NOABORT,
1526 BUILT_IN_ASAN_LOAD16_NOABORT,
1527 BUILT_IN_ASAN_LOADN_NOABORT },
1528 { BUILT_IN_ASAN_STORE1_NOABORT,
1529 BUILT_IN_ASAN_STORE2_NOABORT,
1530 BUILT_IN_ASAN_STORE4_NOABORT,
1531 BUILT_IN_ASAN_STORE8_NOABORT,
1532 BUILT_IN_ASAN_STORE16_NOABORT,
1533 BUILT_IN_ASAN_STOREN_NOABORT } } };
8946c29e
YG
1534 if (size_in_bytes == -1)
1535 {
1536 *nargs = 2;
fed4de37 1537 return builtin_decl_implicit (check[recover_p][is_store][5]);
8946c29e
YG
1538 }
1539 *nargs = 1;
fed4de37
YG
1540 int size_log2 = exact_log2 (size_in_bytes);
1541 return builtin_decl_implicit (check[recover_p][is_store][size_log2]);
8946c29e
YG
1542}
1543
01452015 1544/* Split the current basic block and create a condition statement
25ae5027
DS
1545 insertion point right before or after the statement pointed to by
1546 ITER. Return an iterator to the point at which the caller might
1547 safely insert the condition statement.
01452015
DS
1548
1549 THEN_BLOCK must be set to the address of an uninitialized instance
1550 of basic_block. The function will then set *THEN_BLOCK to the
1551 'then block' of the condition statement to be inserted by the
1552 caller.
1553
c4bfe8bf
JJ
1554 If CREATE_THEN_FALLTHRU_EDGE is false, no edge will be created from
1555 *THEN_BLOCK to *FALLTHROUGH_BLOCK.
1556
01452015
DS
1557 Similarly, the function will set *FALLTRHOUGH_BLOCK to the 'else
1558 block' of the condition statement to be inserted by the caller.
1559
1560 Note that *FALLTHROUGH_BLOCK is a new block that contains the
1561 statements starting from *ITER, and *THEN_BLOCK is a new empty
1562 block.
1563
25ae5027
DS
1564 *ITER is adjusted to point to always point to the first statement
1565 of the basic block * FALLTHROUGH_BLOCK. That statement is the
1566 same as what ITER was pointing to prior to calling this function,
1567 if BEFORE_P is true; otherwise, it is its following statement. */
01452015 1568
ac0ff9f2 1569gimple_stmt_iterator
25ae5027
DS
1570create_cond_insert_point (gimple_stmt_iterator *iter,
1571 bool before_p,
1572 bool then_more_likely_p,
c4bfe8bf 1573 bool create_then_fallthru_edge,
25ae5027
DS
1574 basic_block *then_block,
1575 basic_block *fallthrough_block)
01452015
DS
1576{
1577 gimple_stmt_iterator gsi = *iter;
1578
25ae5027 1579 if (!gsi_end_p (gsi) && before_p)
01452015
DS
1580 gsi_prev (&gsi);
1581
1582 basic_block cur_bb = gsi_bb (*iter);
1583
1584 edge e = split_block (cur_bb, gsi_stmt (gsi));
1585
1586 /* Get a hold on the 'condition block', the 'then block' and the
1587 'else block'. */
1588 basic_block cond_bb = e->src;
1589 basic_block fallthru_bb = e->dest;
1590 basic_block then_bb = create_empty_bb (cond_bb);
a9e0d843
RB
1591 if (current_loops)
1592 {
1593 add_bb_to_loop (then_bb, cond_bb->loop_father);
1594 loops_state_set (LOOPS_NEED_FIXUP);
1595 }
01452015
DS
1596
1597 /* Set up the newly created 'then block'. */
1598 e = make_edge (cond_bb, then_bb, EDGE_TRUE_VALUE);
1599 int fallthrough_probability
1600 = then_more_likely_p
1601 ? PROB_VERY_UNLIKELY
1602 : PROB_ALWAYS - PROB_VERY_UNLIKELY;
1603 e->probability = PROB_ALWAYS - fallthrough_probability;
c4bfe8bf
JJ
1604 if (create_then_fallthru_edge)
1605 make_single_succ_edge (then_bb, fallthru_bb, EDGE_FALLTHRU);
01452015
DS
1606
1607 /* Set up the fallthrough basic block. */
1608 e = find_edge (cond_bb, fallthru_bb);
1609 e->flags = EDGE_FALSE_VALUE;
1610 e->count = cond_bb->count;
1611 e->probability = fallthrough_probability;
1612
1613 /* Update dominance info for the newly created then_bb; note that
1614 fallthru_bb's dominance info has already been updated by
1615 split_bock. */
1616 if (dom_info_available_p (CDI_DOMINATORS))
1617 set_immediate_dominator (CDI_DOMINATORS, then_bb, cond_bb);
1618
1619 *then_block = then_bb;
1620 *fallthrough_block = fallthru_bb;
1621 *iter = gsi_start_bb (fallthru_bb);
1622
1623 return gsi_last_bb (cond_bb);
1624}
1625
25ae5027
DS
1626/* Insert an if condition followed by a 'then block' right before the
1627 statement pointed to by ITER. The fallthrough block -- which is the
1628 else block of the condition as well as the destination of the
1629 outcoming edge of the 'then block' -- starts with the statement
1630 pointed to by ITER.
1631
497a1c66 1632 COND is the condition of the if.
25ae5027
DS
1633
1634 If THEN_MORE_LIKELY_P is true, the probability of the edge to the
1635 'then block' is higher than the probability of the edge to the
1636 fallthrough block.
1637
1638 Upon completion of the function, *THEN_BB is set to the newly
1639 inserted 'then block' and similarly, *FALLTHROUGH_BB is set to the
1640 fallthrough block.
1641
1642 *ITER is adjusted to still point to the same statement it was
1643 pointing to initially. */
1644
1645static void
538dd0b7 1646insert_if_then_before_iter (gcond *cond,
25ae5027
DS
1647 gimple_stmt_iterator *iter,
1648 bool then_more_likely_p,
1649 basic_block *then_bb,
1650 basic_block *fallthrough_bb)
1651{
1652 gimple_stmt_iterator cond_insert_point =
1653 create_cond_insert_point (iter,
1654 /*before_p=*/true,
1655 then_more_likely_p,
c4bfe8bf 1656 /*create_then_fallthru_edge=*/true,
25ae5027
DS
1657 then_bb,
1658 fallthrough_bb);
1659 gsi_insert_after (&cond_insert_point, cond, GSI_NEW_STMT);
1660}
1661
6dc4a604
ML
1662/* Build (base_addr >> ASAN_SHADOW_SHIFT) + asan_shadow_offset ().
1663 If RETURN_ADDRESS is set to true, return memory location instread
1664 of a value in the shadow memory. */
40f9f6bb
JJ
1665
1666static tree
1667build_shadow_mem_access (gimple_stmt_iterator *gsi, location_t location,
6dc4a604
ML
1668 tree base_addr, tree shadow_ptr_type,
1669 bool return_address = false)
40f9f6bb
JJ
1670{
1671 tree t, uintptr_type = TREE_TYPE (base_addr);
1672 tree shadow_type = TREE_TYPE (shadow_ptr_type);
355fe088 1673 gimple *g;
40f9f6bb
JJ
1674
1675 t = build_int_cst (uintptr_type, ASAN_SHADOW_SHIFT);
0d0e4a03
JJ
1676 g = gimple_build_assign (make_ssa_name (uintptr_type), RSHIFT_EXPR,
1677 base_addr, t);
40f9f6bb
JJ
1678 gimple_set_location (g, location);
1679 gsi_insert_after (gsi, g, GSI_NEW_STMT);
1680
fd960af2 1681 t = build_int_cst (uintptr_type, asan_shadow_offset ());
0d0e4a03
JJ
1682 g = gimple_build_assign (make_ssa_name (uintptr_type), PLUS_EXPR,
1683 gimple_assign_lhs (g), t);
40f9f6bb
JJ
1684 gimple_set_location (g, location);
1685 gsi_insert_after (gsi, g, GSI_NEW_STMT);
1686
0d0e4a03
JJ
1687 g = gimple_build_assign (make_ssa_name (shadow_ptr_type), NOP_EXPR,
1688 gimple_assign_lhs (g));
40f9f6bb
JJ
1689 gimple_set_location (g, location);
1690 gsi_insert_after (gsi, g, GSI_NEW_STMT);
1691
6dc4a604
ML
1692 if (!return_address)
1693 {
1694 t = build2 (MEM_REF, shadow_type, gimple_assign_lhs (g),
1695 build_int_cst (shadow_ptr_type, 0));
1696 g = gimple_build_assign (make_ssa_name (shadow_type), MEM_REF, t);
1697 gimple_set_location (g, location);
1698 gsi_insert_after (gsi, g, GSI_NEW_STMT);
1699 }
1700
40f9f6bb
JJ
1701 return gimple_assign_lhs (g);
1702}
1703
8946c29e
YG
1704/* BASE can already be an SSA_NAME; in that case, do not create a
1705 new SSA_NAME for it. */
1706
1707static tree
1708maybe_create_ssa_name (location_t loc, tree base, gimple_stmt_iterator *iter,
1709 bool before_p)
1710{
1711 if (TREE_CODE (base) == SSA_NAME)
1712 return base;
355fe088 1713 gimple *g = gimple_build_assign (make_ssa_name (TREE_TYPE (base)),
0d0e4a03 1714 TREE_CODE (base), base);
8946c29e
YG
1715 gimple_set_location (g, loc);
1716 if (before_p)
1717 gsi_insert_before (iter, g, GSI_SAME_STMT);
1718 else
1719 gsi_insert_after (iter, g, GSI_NEW_STMT);
1720 return gimple_assign_lhs (g);
1721}
1722
a2f581e1
YG
1723/* LEN can already have necessary size and precision;
1724 in that case, do not create a new variable. */
1725
1726tree
1727maybe_cast_to_ptrmode (location_t loc, tree len, gimple_stmt_iterator *iter,
1728 bool before_p)
1729{
1730 if (ptrofftype_p (len))
1731 return len;
355fe088 1732 gimple *g = gimple_build_assign (make_ssa_name (pointer_sized_int_node),
0d0e4a03 1733 NOP_EXPR, len);
a2f581e1
YG
1734 gimple_set_location (g, loc);
1735 if (before_p)
1736 gsi_insert_before (iter, g, GSI_SAME_STMT);
1737 else
1738 gsi_insert_after (iter, g, GSI_NEW_STMT);
1739 return gimple_assign_lhs (g);
1740}
1741
dc29bf1e 1742/* Instrument the memory access instruction BASE. Insert new
25ae5027 1743 statements before or after ITER.
dc29bf1e
DS
1744
1745 Note that the memory access represented by BASE can be either an
1746 SSA_NAME, or a non-SSA expression. LOCATION is the source code
1747 location. IS_STORE is TRUE for a store, FALSE for a load.
25ae5027 1748 BEFORE_P is TRUE for inserting the instrumentation code before
8946c29e
YG
1749 ITER, FALSE for inserting it after ITER. IS_SCALAR_ACCESS is TRUE
1750 for a scalar memory access and FALSE for memory region access.
1751 NON_ZERO_P is TRUE if memory region is guaranteed to have non-zero
1752 length. ALIGN tells alignment of accessed memory object.
1753
1754 START_INSTRUMENTED and END_INSTRUMENTED are TRUE if start/end of
1755 memory region have already been instrumented.
25ae5027
DS
1756
1757 If BEFORE_P is TRUE, *ITER is arranged to still point to the
1758 statement it was pointing to prior to calling this function,
1759 otherwise, it points to the statement logically following it. */
37d6f666
WM
1760
1761static void
c62ccb9a 1762build_check_stmt (location_t loc, tree base, tree len,
8946c29e 1763 HOST_WIDE_INT size_in_bytes, gimple_stmt_iterator *iter,
c62ccb9a 1764 bool is_non_zero_len, bool before_p, bool is_store,
bdea98ca 1765 bool is_scalar_access, unsigned int align = 0)
37d6f666 1766{
8946c29e 1767 gimple_stmt_iterator gsi = *iter;
355fe088 1768 gimple *g;
8946c29e 1769
c62ccb9a 1770 gcc_assert (!(size_in_bytes > 0 && !is_non_zero_len));
8946c29e 1771
c62ccb9a
YG
1772 gsi = *iter;
1773
1774 base = unshare_expr (base);
1775 base = maybe_create_ssa_name (loc, base, &gsi, before_p);
1776
8946c29e 1777 if (len)
a2f581e1
YG
1778 {
1779 len = unshare_expr (len);
1780 len = maybe_cast_to_ptrmode (loc, len, iter, before_p);
1781 }
8946c29e
YG
1782 else
1783 {
1784 gcc_assert (size_in_bytes != -1);
1785 len = build_int_cst (pointer_sized_int_node, size_in_bytes);
1786 }
1787
1788 if (size_in_bytes > 1)
b3f1051b 1789 {
8946c29e
YG
1790 if ((size_in_bytes & (size_in_bytes - 1)) != 0
1791 || size_in_bytes > 16)
c62ccb9a 1792 is_scalar_access = false;
8946c29e
YG
1793 else if (align && align < size_in_bytes * BITS_PER_UNIT)
1794 {
1795 /* On non-strict alignment targets, if
1796 16-byte access is just 8-byte aligned,
1797 this will result in misaligned shadow
1798 memory 2 byte load, but otherwise can
1799 be handled using one read. */
1800 if (size_in_bytes != 16
1801 || STRICT_ALIGNMENT
1802 || align < 8 * BITS_PER_UNIT)
c62ccb9a 1803 is_scalar_access = false;
40f9f6bb 1804 }
f6d98484 1805 }
37d6f666 1806
c62ccb9a
YG
1807 HOST_WIDE_INT flags = 0;
1808 if (is_store)
1809 flags |= ASAN_CHECK_STORE;
1810 if (is_non_zero_len)
1811 flags |= ASAN_CHECK_NON_ZERO_LEN;
1812 if (is_scalar_access)
1813 flags |= ASAN_CHECK_SCALAR_ACCESS;
c62ccb9a 1814
f434eb69 1815 g = gimple_build_call_internal (IFN_ASAN_CHECK, 4,
c62ccb9a 1816 build_int_cst (integer_type_node, flags),
f434eb69
MZ
1817 base, len,
1818 build_int_cst (integer_type_node,
1819 align / BITS_PER_UNIT));
c62ccb9a
YG
1820 gimple_set_location (g, loc);
1821 if (before_p)
1822 gsi_insert_before (&gsi, g, GSI_SAME_STMT);
8946c29e
YG
1823 else
1824 {
8946c29e 1825 gsi_insert_after (&gsi, g, GSI_NEW_STMT);
c62ccb9a
YG
1826 gsi_next (&gsi);
1827 *iter = gsi;
8946c29e 1828 }
37d6f666
WM
1829}
1830
1831/* If T represents a memory access, add instrumentation code before ITER.
1832 LOCATION is source code location.
25ae5027 1833 IS_STORE is either TRUE (for a store) or FALSE (for a load). */
37d6f666
WM
1834
1835static void
1836instrument_derefs (gimple_stmt_iterator *iter, tree t,
bdcbe80c 1837 location_t location, bool is_store)
37d6f666 1838{
b5ebc991
MO
1839 if (is_store && !ASAN_INSTRUMENT_WRITES)
1840 return;
1841 if (!is_store && !ASAN_INSTRUMENT_READS)
1842 return;
1843
37d6f666 1844 tree type, base;
f6d98484 1845 HOST_WIDE_INT size_in_bytes;
c3da4956
MO
1846 if (location == UNKNOWN_LOCATION)
1847 location = EXPR_LOCATION (t);
37d6f666
WM
1848
1849 type = TREE_TYPE (t);
37d6f666
WM
1850 switch (TREE_CODE (t))
1851 {
1852 case ARRAY_REF:
1853 case COMPONENT_REF:
1854 case INDIRECT_REF:
1855 case MEM_REF:
59b36ecf 1856 case VAR_DECL:
913f32a1 1857 case BIT_FIELD_REF:
37d6f666 1858 break;
59b36ecf 1859 /* FALLTHRU */
37d6f666
WM
1860 default:
1861 return;
1862 }
f6d98484
JJ
1863
1864 size_in_bytes = int_size_in_bytes (type);
40f9f6bb 1865 if (size_in_bytes <= 0)
f6d98484
JJ
1866 return;
1867
f6d98484
JJ
1868 HOST_WIDE_INT bitsize, bitpos;
1869 tree offset;
ef4bddc2 1870 machine_mode mode;
ee45a32d
EB
1871 int unsignedp, reversep, volatilep = 0;
1872 tree inner = get_inner_reference (t, &bitsize, &bitpos, &offset, &mode,
25b75a48 1873 &unsignedp, &reversep, &volatilep);
87d1d65a
YG
1874
1875 if (TREE_CODE (t) == COMPONENT_REF
1876 && DECL_BIT_FIELD_REPRESENTATIVE (TREE_OPERAND (t, 1)) != NULL_TREE)
1fe04fdc 1877 {
87d1d65a
YG
1878 tree repr = DECL_BIT_FIELD_REPRESENTATIVE (TREE_OPERAND (t, 1));
1879 instrument_derefs (iter, build3 (COMPONENT_REF, TREE_TYPE (repr),
1880 TREE_OPERAND (t, 0), repr,
1881 NULL_TREE), location, is_store);
1fe04fdc
JJ
1882 return;
1883 }
87d1d65a
YG
1884
1885 if (bitpos % BITS_PER_UNIT
1886 || bitsize != size_in_bytes * BITS_PER_UNIT)
40f9f6bb 1887 return;
f6d98484 1888
8813a647 1889 if (VAR_P (inner)
59b36ecf
JJ
1890 && offset == NULL_TREE
1891 && bitpos >= 0
1892 && DECL_SIZE (inner)
1893 && tree_fits_shwi_p (DECL_SIZE (inner))
1894 && bitpos + bitsize <= tree_to_shwi (DECL_SIZE (inner)))
1895 {
1896 if (DECL_THREAD_LOCAL_P (inner))
1897 return;
6b98fab5
MZ
1898 if (!ASAN_GLOBALS && is_global_var (inner))
1899 return;
59b36ecf
JJ
1900 if (!TREE_STATIC (inner))
1901 {
1902 /* Automatic vars in the current function will be always
1903 accessible. */
6dc4a604
ML
1904 if (decl_function_context (inner) == current_function_decl
1905 && (!asan_sanitize_use_after_scope ()
1906 || !TREE_ADDRESSABLE (inner)))
59b36ecf
JJ
1907 return;
1908 }
1909 /* Always instrument external vars, they might be dynamically
1910 initialized. */
1911 else if (!DECL_EXTERNAL (inner))
1912 {
1913 /* For static vars if they are known not to be dynamically
1914 initialized, they will be always accessible. */
9041d2e6 1915 varpool_node *vnode = varpool_node::get (inner);
59b36ecf
JJ
1916 if (vnode && !vnode->dynamically_initialized)
1917 return;
1918 }
1919 }
1920
f6d98484 1921 base = build_fold_addr_expr (t);
bdcbe80c
DS
1922 if (!has_mem_ref_been_instrumented (base, size_in_bytes))
1923 {
8946c29e
YG
1924 unsigned int align = get_object_alignment (t);
1925 build_check_stmt (location, base, NULL_TREE, size_in_bytes, iter,
c62ccb9a 1926 /*is_non_zero_len*/size_in_bytes > 0, /*before_p=*/true,
8946c29e 1927 is_store, /*is_scalar_access*/true, align);
bdcbe80c
DS
1928 update_mem_ref_hash_table (base, size_in_bytes);
1929 update_mem_ref_hash_table (t, size_in_bytes);
1930 }
1931
25ae5027
DS
1932}
1933
bdea98ca
MO
1934/* Insert a memory reference into the hash table if access length
1935 can be determined in compile time. */
1936
1937static void
1938maybe_update_mem_ref_hash_table (tree base, tree len)
1939{
1940 if (!POINTER_TYPE_P (TREE_TYPE (base))
1941 || !INTEGRAL_TYPE_P (TREE_TYPE (len)))
1942 return;
1943
1944 HOST_WIDE_INT size_in_bytes = tree_fits_shwi_p (len) ? tree_to_shwi (len) : -1;
1945
1946 if (size_in_bytes != -1)
1947 update_mem_ref_hash_table (base, size_in_bytes);
1948}
1949
25ae5027
DS
1950/* Instrument an access to a contiguous memory region that starts at
1951 the address pointed to by BASE, over a length of LEN (expressed in
1952 the sizeof (*BASE) bytes). ITER points to the instruction before
1953 which the instrumentation instructions must be inserted. LOCATION
1954 is the source location that the instrumentation instructions must
1955 have. If IS_STORE is true, then the memory access is a store;
1956 otherwise, it's a load. */
1957
1958static void
1959instrument_mem_region_access (tree base, tree len,
1960 gimple_stmt_iterator *iter,
1961 location_t location, bool is_store)
1962{
c63d3b96
JJ
1963 if (!POINTER_TYPE_P (TREE_TYPE (base))
1964 || !INTEGRAL_TYPE_P (TREE_TYPE (len))
1965 || integer_zerop (len))
25ae5027
DS
1966 return;
1967
8946c29e 1968 HOST_WIDE_INT size_in_bytes = tree_fits_shwi_p (len) ? tree_to_shwi (len) : -1;
bdcbe80c 1969
bdea98ca
MO
1970 if ((size_in_bytes == -1)
1971 || !has_mem_ref_been_instrumented (base, size_in_bytes))
1972 {
1973 build_check_stmt (location, base, len, size_in_bytes, iter,
1974 /*is_non_zero_len*/size_in_bytes > 0, /*before_p*/true,
1975 is_store, /*is_scalar_access*/false, /*align*/0);
1976 }
b41288b3 1977
bdea98ca 1978 maybe_update_mem_ref_hash_table (base, len);
b41288b3 1979 *iter = gsi_for_stmt (gsi_stmt (*iter));
bdcbe80c 1980}
25ae5027 1981
bdcbe80c
DS
1982/* Instrument the call to a built-in memory access function that is
1983 pointed to by the iterator ITER.
25ae5027 1984
bdcbe80c
DS
1985 Upon completion, return TRUE iff *ITER has been advanced to the
1986 statement following the one it was originally pointing to. */
25ae5027 1987
bdcbe80c
DS
1988static bool
1989instrument_builtin_call (gimple_stmt_iterator *iter)
1990{
b5ebc991
MO
1991 if (!ASAN_MEMINTRIN)
1992 return false;
1993
bdcbe80c 1994 bool iter_advanced_p = false;
538dd0b7 1995 gcall *call = as_a <gcall *> (gsi_stmt (*iter));
25ae5027 1996
bdcbe80c 1997 gcc_checking_assert (gimple_call_builtin_p (call, BUILT_IN_NORMAL));
25ae5027 1998
bdcbe80c 1999 location_t loc = gimple_location (call);
25ae5027 2000
bdea98ca
MO
2001 asan_mem_ref src0, src1, dest;
2002 asan_mem_ref_init (&src0, NULL, 1);
2003 asan_mem_ref_init (&src1, NULL, 1);
2004 asan_mem_ref_init (&dest, NULL, 1);
bdcbe80c 2005
bdea98ca
MO
2006 tree src0_len = NULL_TREE, src1_len = NULL_TREE, dest_len = NULL_TREE;
2007 bool src0_is_store = false, src1_is_store = false, dest_is_store = false,
2008 dest_is_deref = false, intercepted_p = true;
bdcbe80c 2009
bdea98ca
MO
2010 if (get_mem_refs_of_builtin_call (call,
2011 &src0, &src0_len, &src0_is_store,
2012 &src1, &src1_len, &src1_is_store,
2013 &dest, &dest_len, &dest_is_store,
2014 &dest_is_deref, &intercepted_p))
2015 {
2016 if (dest_is_deref)
bdcbe80c 2017 {
bdea98ca
MO
2018 instrument_derefs (iter, dest.start, loc, dest_is_store);
2019 gsi_next (iter);
2020 iter_advanced_p = true;
2021 }
2022 else if (!intercepted_p
2023 && (src0_len || src1_len || dest_len))
2024 {
2025 if (src0.start != NULL_TREE)
2026 instrument_mem_region_access (src0.start, src0_len,
2027 iter, loc, /*is_store=*/false);
2028 if (src1.start != NULL_TREE)
2029 instrument_mem_region_access (src1.start, src1_len,
2030 iter, loc, /*is_store=*/false);
2031 if (dest.start != NULL_TREE)
2032 instrument_mem_region_access (dest.start, dest_len,
2033 iter, loc, /*is_store=*/true);
2034
2035 *iter = gsi_for_stmt (call);
2036 gsi_next (iter);
2037 iter_advanced_p = true;
2038 }
2039 else
2040 {
2041 if (src0.start != NULL_TREE)
2042 maybe_update_mem_ref_hash_table (src0.start, src0_len);
2043 if (src1.start != NULL_TREE)
2044 maybe_update_mem_ref_hash_table (src1.start, src1_len);
2045 if (dest.start != NULL_TREE)
2046 maybe_update_mem_ref_hash_table (dest.start, dest_len);
bdcbe80c 2047 }
25ae5027 2048 }
bdcbe80c 2049 return iter_advanced_p;
25ae5027
DS
2050}
2051
2052/* Instrument the assignment statement ITER if it is subject to
bdcbe80c
DS
2053 instrumentation. Return TRUE iff instrumentation actually
2054 happened. In that case, the iterator ITER is advanced to the next
2055 logical expression following the one initially pointed to by ITER,
2056 and the relevant memory reference that which access has been
2057 instrumented is added to the memory references hash table. */
25ae5027 2058
bdcbe80c
DS
2059static bool
2060maybe_instrument_assignment (gimple_stmt_iterator *iter)
25ae5027 2061{
355fe088 2062 gimple *s = gsi_stmt (*iter);
25ae5027
DS
2063
2064 gcc_assert (gimple_assign_single_p (s));
2065
bdcbe80c
DS
2066 tree ref_expr = NULL_TREE;
2067 bool is_store, is_instrumented = false;
2068
52f2e7e1 2069 if (gimple_store_p (s))
bdcbe80c
DS
2070 {
2071 ref_expr = gimple_assign_lhs (s);
2072 is_store = true;
2073 instrument_derefs (iter, ref_expr,
2074 gimple_location (s),
2075 is_store);
2076 is_instrumented = true;
2077 }
c1f5ce48 2078
52f2e7e1 2079 if (gimple_assign_load_p (s))
bdcbe80c
DS
2080 {
2081 ref_expr = gimple_assign_rhs1 (s);
2082 is_store = false;
2083 instrument_derefs (iter, ref_expr,
2084 gimple_location (s),
2085 is_store);
2086 is_instrumented = true;
2087 }
2088
2089 if (is_instrumented)
2090 gsi_next (iter);
2091
2092 return is_instrumented;
25ae5027
DS
2093}
2094
2095/* Instrument the function call pointed to by the iterator ITER, if it
2096 is subject to instrumentation. At the moment, the only function
2097 calls that are instrumented are some built-in functions that access
2098 memory. Look at instrument_builtin_call to learn more.
2099
2100 Upon completion return TRUE iff *ITER was advanced to the statement
2101 following the one it was originally pointing to. */
2102
2103static bool
2104maybe_instrument_call (gimple_stmt_iterator *iter)
2105{
355fe088 2106 gimple *stmt = gsi_stmt (*iter);
bdcbe80c
DS
2107 bool is_builtin = gimple_call_builtin_p (stmt, BUILT_IN_NORMAL);
2108
2109 if (is_builtin && instrument_builtin_call (iter))
2b2571c9 2110 return true;
bdcbe80c 2111
2b2571c9
JJ
2112 if (gimple_call_noreturn_p (stmt))
2113 {
2114 if (is_builtin)
2115 {
2116 tree callee = gimple_call_fndecl (stmt);
2117 switch (DECL_FUNCTION_CODE (callee))
2118 {
2119 case BUILT_IN_UNREACHABLE:
2120 case BUILT_IN_TRAP:
2121 /* Don't instrument these. */
2122 return false;
083e891e
MP
2123 default:
2124 break;
2b2571c9
JJ
2125 }
2126 }
2127 tree decl = builtin_decl_implicit (BUILT_IN_ASAN_HANDLE_NO_RETURN);
355fe088 2128 gimple *g = gimple_build_call (decl, 0);
2b2571c9
JJ
2129 gimple_set_location (g, gimple_location (stmt));
2130 gsi_insert_before (iter, g, GSI_SAME_STMT);
2131 }
7db337c2 2132
c3da4956 2133 bool instrumented = false;
7db337c2
ML
2134 if (gimple_store_p (stmt))
2135 {
2136 tree ref_expr = gimple_call_lhs (stmt);
2137 instrument_derefs (iter, ref_expr,
2138 gimple_location (stmt),
2139 /*is_store=*/true);
2140
c3da4956 2141 instrumented = true;
7db337c2
ML
2142 }
2143
c3da4956
MO
2144 /* Walk through gimple_call arguments and check them id needed. */
2145 unsigned args_num = gimple_call_num_args (stmt);
2146 for (unsigned i = 0; i < args_num; ++i)
2147 {
2148 tree arg = gimple_call_arg (stmt, i);
2149 /* If ARG is not a non-aggregate register variable, compiler in general
2150 creates temporary for it and pass it as argument to gimple call.
2151 But in some cases, e.g. when we pass by value a small structure that
2152 fits to register, compiler can avoid extra overhead by pulling out
2153 these temporaries. In this case, we should check the argument. */
2154 if (!is_gimple_reg (arg) && !is_gimple_min_invariant (arg))
2155 {
2156 instrument_derefs (iter, arg,
2157 gimple_location (stmt),
2158 /*is_store=*/false);
2159 instrumented = true;
2160 }
2161 }
2162 if (instrumented)
2163 gsi_next (iter);
2164 return instrumented;
37d6f666
WM
2165}
2166
bdcbe80c
DS
2167/* Walk each instruction of all basic block and instrument those that
2168 represent memory references: loads, stores, or function calls.
2169 In a given basic block, this function avoids instrumenting memory
2170 references that have already been instrumented. */
37d6f666
WM
2171
2172static void
2173transform_statements (void)
2174{
c4bfe8bf 2175 basic_block bb, last_bb = NULL;
37d6f666 2176 gimple_stmt_iterator i;
8b1c6fd7 2177 int saved_last_basic_block = last_basic_block_for_fn (cfun);
37d6f666 2178
11cd3bed 2179 FOR_EACH_BB_FN (bb, cfun)
37d6f666 2180 {
c4bfe8bf 2181 basic_block prev_bb = bb;
bdcbe80c 2182
37d6f666 2183 if (bb->index >= saved_last_basic_block) continue;
c4bfe8bf
JJ
2184
2185 /* Flush the mem ref hash table, if current bb doesn't have
2186 exactly one predecessor, or if that predecessor (skipping
2187 over asan created basic blocks) isn't the last processed
2188 basic block. Thus we effectively flush on extended basic
2189 block boundaries. */
2190 while (single_pred_p (prev_bb))
2191 {
2192 prev_bb = single_pred (prev_bb);
2193 if (prev_bb->index < saved_last_basic_block)
2194 break;
2195 }
2196 if (prev_bb != last_bb)
2197 empty_mem_ref_hash_table ();
2198 last_bb = bb;
2199
25ae5027 2200 for (i = gsi_start_bb (bb); !gsi_end_p (i);)
497a1c66 2201 {
355fe088 2202 gimple *s = gsi_stmt (i);
25ae5027 2203
bdcbe80c
DS
2204 if (has_stmt_been_instrumented_p (s))
2205 gsi_next (&i);
2206 else if (gimple_assign_single_p (s)
e1e160c1 2207 && !gimple_clobber_p (s)
bdcbe80c
DS
2208 && maybe_instrument_assignment (&i))
2209 /* Nothing to do as maybe_instrument_assignment advanced
2210 the iterator I. */;
2211 else if (is_gimple_call (s) && maybe_instrument_call (&i))
2212 /* Nothing to do as maybe_instrument_call
2213 advanced the iterator I. */;
2214 else
25ae5027 2215 {
bdcbe80c
DS
2216 /* No instrumentation happened.
2217
c4bfe8bf
JJ
2218 If the current instruction is a function call that
2219 might free something, let's forget about the memory
2220 references that got instrumented. Otherwise we might
6dc4a604
ML
2221 miss some instrumentation opportunities. Do the same
2222 for a ASAN_MARK poisoning internal function. */
2223 if (is_gimple_call (s)
56b7aede
ML
2224 && (!nonfreeing_call_p (s)
2225 || asan_mark_p (s, ASAN_MARK_POISON)))
bdcbe80c
DS
2226 empty_mem_ref_hash_table ();
2227
2228 gsi_next (&i);
25ae5027 2229 }
497a1c66 2230 }
37d6f666 2231 }
bdcbe80c 2232 free_mem_ref_resources ();
37d6f666
WM
2233}
2234
59b36ecf
JJ
2235/* Build
2236 __asan_before_dynamic_init (module_name)
2237 or
2238 __asan_after_dynamic_init ()
2239 call. */
2240
2241tree
2242asan_dynamic_init_call (bool after_p)
2243{
185faecb
JJ
2244 if (shadow_ptr_types[0] == NULL_TREE)
2245 asan_init_shadow_ptr_types ();
2246
59b36ecf
JJ
2247 tree fn = builtin_decl_implicit (after_p
2248 ? BUILT_IN_ASAN_AFTER_DYNAMIC_INIT
2249 : BUILT_IN_ASAN_BEFORE_DYNAMIC_INIT);
2250 tree module_name_cst = NULL_TREE;
2251 if (!after_p)
2252 {
2253 pretty_printer module_name_pp;
2254 pp_string (&module_name_pp, main_input_filename);
2255
59b36ecf
JJ
2256 module_name_cst = asan_pp_string (&module_name_pp);
2257 module_name_cst = fold_convert (const_ptr_type_node,
2258 module_name_cst);
2259 }
2260
2261 return build_call_expr (fn, after_p ? 0 : 1, module_name_cst);
2262}
2263
8240018b
JJ
2264/* Build
2265 struct __asan_global
2266 {
2267 const void *__beg;
2268 uptr __size;
2269 uptr __size_with_redzone;
2270 const void *__name;
ef1b3fda 2271 const void *__module_name;
8240018b 2272 uptr __has_dynamic_init;
866e32ad 2273 __asan_global_source_location *__location;
fbdb92eb 2274 char *__odr_indicator;
8240018b
JJ
2275 } type. */
2276
2277static tree
2278asan_global_struct (void)
2279{
84b0769e 2280 static const char *field_names[]
8240018b 2281 = { "__beg", "__size", "__size_with_redzone",
84b0769e
MO
2282 "__name", "__module_name", "__has_dynamic_init", "__location",
2283 "__odr_indicator" };
2284 tree fields[ARRAY_SIZE (field_names)], ret;
2285 unsigned i;
8240018b
JJ
2286
2287 ret = make_node (RECORD_TYPE);
84b0769e 2288 for (i = 0; i < ARRAY_SIZE (field_names); i++)
8240018b
JJ
2289 {
2290 fields[i]
2291 = build_decl (UNKNOWN_LOCATION, FIELD_DECL,
2292 get_identifier (field_names[i]),
2293 (i == 0 || i == 3) ? const_ptr_type_node
de5a5fa1 2294 : pointer_sized_int_node);
8240018b
JJ
2295 DECL_CONTEXT (fields[i]) = ret;
2296 if (i)
2297 DECL_CHAIN (fields[i - 1]) = fields[i];
2298 }
bebcdc67
MP
2299 tree type_decl = build_decl (input_location, TYPE_DECL,
2300 get_identifier ("__asan_global"), ret);
2301 DECL_IGNORED_P (type_decl) = 1;
2302 DECL_ARTIFICIAL (type_decl) = 1;
8240018b 2303 TYPE_FIELDS (ret) = fields[0];
bebcdc67
MP
2304 TYPE_NAME (ret) = type_decl;
2305 TYPE_STUB_DECL (ret) = type_decl;
8240018b
JJ
2306 layout_type (ret);
2307 return ret;
2308}
2309
84b0769e
MO
2310/* Create and return odr indicator symbol for DECL.
2311 TYPE is __asan_global struct type as returned by asan_global_struct. */
2312
2313static tree
2314create_odr_indicator (tree decl, tree type)
2315{
2316 char *name;
2317 tree uptr = TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (type)));
2318 tree decl_name
2319 = (HAS_DECL_ASSEMBLER_NAME_P (decl) ? DECL_ASSEMBLER_NAME (decl)
2320 : DECL_NAME (decl));
2321 /* DECL_NAME theoretically might be NULL. Bail out with 0 in this case. */
2322 if (decl_name == NULL_TREE)
2323 return build_int_cst (uptr, 0);
2324 size_t len = strlen (IDENTIFIER_POINTER (decl_name)) + sizeof ("__odr_asan_");
2325 name = XALLOCAVEC (char, len);
2326 snprintf (name, len, "__odr_asan_%s", IDENTIFIER_POINTER (decl_name));
2327#ifndef NO_DOT_IN_LABEL
2328 name[sizeof ("__odr_asan") - 1] = '.';
2329#elif !defined(NO_DOLLAR_IN_LABEL)
2330 name[sizeof ("__odr_asan") - 1] = '$';
2331#endif
2332 tree var = build_decl (UNKNOWN_LOCATION, VAR_DECL, get_identifier (name),
2333 char_type_node);
2334 TREE_ADDRESSABLE (var) = 1;
2335 TREE_READONLY (var) = 0;
2336 TREE_THIS_VOLATILE (var) = 1;
2337 DECL_GIMPLE_REG_P (var) = 0;
2338 DECL_ARTIFICIAL (var) = 1;
2339 DECL_IGNORED_P (var) = 1;
2340 TREE_STATIC (var) = 1;
2341 TREE_PUBLIC (var) = 1;
2342 DECL_VISIBILITY (var) = DECL_VISIBILITY (decl);
2343 DECL_VISIBILITY_SPECIFIED (var) = DECL_VISIBILITY_SPECIFIED (decl);
2344
2345 TREE_USED (var) = 1;
2346 tree ctor = build_constructor_va (TREE_TYPE (var), 1, NULL_TREE,
2347 build_int_cst (unsigned_type_node, 0));
2348 TREE_CONSTANT (ctor) = 1;
2349 TREE_STATIC (ctor) = 1;
2350 DECL_INITIAL (var) = ctor;
2351 DECL_ATTRIBUTES (var) = tree_cons (get_identifier ("asan odr indicator"),
2352 NULL, DECL_ATTRIBUTES (var));
2353 make_decl_rtl (var);
2354 varpool_node::finalize_decl (var);
2355 return fold_convert (uptr, build_fold_addr_expr (var));
2356}
2357
2358/* Return true if DECL, a global var, might be overridden and needs
2359 an additional odr indicator symbol. */
2360
2361static bool
2362asan_needs_odr_indicator_p (tree decl)
2363{
0acd830b
MO
2364 /* Don't emit ODR indicators for kernel because:
2365 a) Kernel is written in C thus doesn't need ODR indicators.
2366 b) Some kernel code may have assumptions about symbols containing specific
2367 patterns in their names. Since ODR indicators contain original names
2368 of symbols they are emitted for, these assumptions would be broken for
2369 ODR indicator symbols. */
2370 return (!(flag_sanitize & SANITIZE_KERNEL_ADDRESS)
2371 && !DECL_ARTIFICIAL (decl)
2372 && !DECL_WEAK (decl)
2373 && TREE_PUBLIC (decl));
84b0769e
MO
2374}
2375
8240018b
JJ
2376/* Append description of a single global DECL into vector V.
2377 TYPE is __asan_global struct type as returned by asan_global_struct. */
2378
2379static void
9771b263 2380asan_add_global (tree decl, tree type, vec<constructor_elt, va_gc> *v)
8240018b
JJ
2381{
2382 tree init, uptr = TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (type)));
2383 unsigned HOST_WIDE_INT size;
ef1b3fda 2384 tree str_cst, module_name_cst, refdecl = decl;
9771b263 2385 vec<constructor_elt, va_gc> *vinner = NULL;
8240018b 2386
ef1b3fda 2387 pretty_printer asan_pp, module_name_pp;
8240018b 2388
8240018b 2389 if (DECL_NAME (decl))
b066401f 2390 pp_tree_identifier (&asan_pp, DECL_NAME (decl));
8240018b
JJ
2391 else
2392 pp_string (&asan_pp, "<unknown>");
11a877b3 2393 str_cst = asan_pp_string (&asan_pp);
8240018b 2394
e3d53f96 2395 pp_string (&module_name_pp, main_input_filename);
ef1b3fda
KS
2396 module_name_cst = asan_pp_string (&module_name_pp);
2397
8240018b
JJ
2398 if (asan_needs_local_alias (decl))
2399 {
2400 char buf[20];
9771b263 2401 ASM_GENERATE_INTERNAL_LABEL (buf, "LASAN", vec_safe_length (v) + 1);
8240018b
JJ
2402 refdecl = build_decl (DECL_SOURCE_LOCATION (decl),
2403 VAR_DECL, get_identifier (buf), TREE_TYPE (decl));
2404 TREE_ADDRESSABLE (refdecl) = TREE_ADDRESSABLE (decl);
2405 TREE_READONLY (refdecl) = TREE_READONLY (decl);
2406 TREE_THIS_VOLATILE (refdecl) = TREE_THIS_VOLATILE (decl);
2407 DECL_GIMPLE_REG_P (refdecl) = DECL_GIMPLE_REG_P (decl);
2408 DECL_ARTIFICIAL (refdecl) = DECL_ARTIFICIAL (decl);
2409 DECL_IGNORED_P (refdecl) = DECL_IGNORED_P (decl);
2410 TREE_STATIC (refdecl) = 1;
2411 TREE_PUBLIC (refdecl) = 0;
2412 TREE_USED (refdecl) = 1;
2413 assemble_alias (refdecl, DECL_ASSEMBLER_NAME (decl));
2414 }
2415
84b0769e
MO
2416 tree odr_indicator_ptr
2417 = (asan_needs_odr_indicator_p (decl) ? create_odr_indicator (decl, type)
2418 : build_int_cst (uptr, 0));
8240018b
JJ
2419 CONSTRUCTOR_APPEND_ELT (vinner, NULL_TREE,
2420 fold_convert (const_ptr_type_node,
2421 build_fold_addr_expr (refdecl)));
ae7e9ddd 2422 size = tree_to_uhwi (DECL_SIZE_UNIT (decl));
8240018b
JJ
2423 CONSTRUCTOR_APPEND_ELT (vinner, NULL_TREE, build_int_cst (uptr, size));
2424 size += asan_red_zone_size (size);
2425 CONSTRUCTOR_APPEND_ELT (vinner, NULL_TREE, build_int_cst (uptr, size));
2426 CONSTRUCTOR_APPEND_ELT (vinner, NULL_TREE,
2427 fold_convert (const_ptr_type_node, str_cst));
ef1b3fda
KS
2428 CONSTRUCTOR_APPEND_ELT (vinner, NULL_TREE,
2429 fold_convert (const_ptr_type_node, module_name_cst));
9041d2e6 2430 varpool_node *vnode = varpool_node::get (decl);
59b36ecf
JJ
2431 int has_dynamic_init = vnode ? vnode->dynamically_initialized : 0;
2432 CONSTRUCTOR_APPEND_ELT (vinner, NULL_TREE,
2433 build_int_cst (uptr, has_dynamic_init));
21a82048
JJ
2434 tree locptr = NULL_TREE;
2435 location_t loc = DECL_SOURCE_LOCATION (decl);
2436 expanded_location xloc = expand_location (loc);
2437 if (xloc.file != NULL)
2438 {
2439 static int lasanloccnt = 0;
2440 char buf[25];
2441 ASM_GENERATE_INTERNAL_LABEL (buf, "LASANLOC", ++lasanloccnt);
2442 tree var = build_decl (UNKNOWN_LOCATION, VAR_DECL, get_identifier (buf),
2443 ubsan_get_source_location_type ());
2444 TREE_STATIC (var) = 1;
2445 TREE_PUBLIC (var) = 0;
2446 DECL_ARTIFICIAL (var) = 1;
2447 DECL_IGNORED_P (var) = 1;
2448 pretty_printer filename_pp;
2449 pp_string (&filename_pp, xloc.file);
2450 tree str = asan_pp_string (&filename_pp);
2451 tree ctor = build_constructor_va (TREE_TYPE (var), 3,
2452 NULL_TREE, str, NULL_TREE,
2453 build_int_cst (unsigned_type_node,
2454 xloc.line), NULL_TREE,
2455 build_int_cst (unsigned_type_node,
2456 xloc.column));
2457 TREE_CONSTANT (ctor) = 1;
2458 TREE_STATIC (ctor) = 1;
2459 DECL_INITIAL (var) = ctor;
2460 varpool_node::finalize_decl (var);
2461 locptr = fold_convert (uptr, build_fold_addr_expr (var));
2462 }
2463 else
2464 locptr = build_int_cst (uptr, 0);
2465 CONSTRUCTOR_APPEND_ELT (vinner, NULL_TREE, locptr);
84b0769e 2466 CONSTRUCTOR_APPEND_ELT (vinner, NULL_TREE, odr_indicator_ptr);
8240018b
JJ
2467 init = build_constructor (type, vinner);
2468 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, init);
2469}
2470
0e668eaf
JJ
2471/* Initialize sanitizer.def builtins if the FE hasn't initialized them. */
2472void
2473initialize_sanitizer_builtins (void)
2474{
2475 tree decl;
2476
2477 if (builtin_decl_implicit_p (BUILT_IN_ASAN_INIT))
2478 return;
2479
2480 tree BT_FN_VOID = build_function_type_list (void_type_node, NULL_TREE);
2481 tree BT_FN_VOID_PTR
2482 = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
59b36ecf
JJ
2483 tree BT_FN_VOID_CONST_PTR
2484 = build_function_type_list (void_type_node, const_ptr_type_node, NULL_TREE);
b906f4ca
MP
2485 tree BT_FN_VOID_PTR_PTR
2486 = build_function_type_list (void_type_node, ptr_type_node,
2487 ptr_type_node, NULL_TREE);
de5a5fa1
MP
2488 tree BT_FN_VOID_PTR_PTR_PTR
2489 = build_function_type_list (void_type_node, ptr_type_node,
2490 ptr_type_node, ptr_type_node, NULL_TREE);
0e668eaf
JJ
2491 tree BT_FN_VOID_PTR_PTRMODE
2492 = build_function_type_list (void_type_node, ptr_type_node,
de5a5fa1 2493 pointer_sized_int_node, NULL_TREE);
c954bddd
JJ
2494 tree BT_FN_VOID_INT
2495 = build_function_type_list (void_type_node, integer_type_node, NULL_TREE);
0bae64d5
MP
2496 tree BT_FN_SIZE_CONST_PTR_INT
2497 = build_function_type_list (size_type_node, const_ptr_type_node,
2498 integer_type_node, NULL_TREE);
c954bddd
JJ
2499 tree BT_FN_BOOL_VPTR_PTR_IX_INT_INT[5];
2500 tree BT_FN_IX_CONST_VPTR_INT[5];
2501 tree BT_FN_IX_VPTR_IX_INT[5];
2502 tree BT_FN_VOID_VPTR_IX_INT[5];
2503 tree vptr
2504 = build_pointer_type (build_qualified_type (void_type_node,
2505 TYPE_QUAL_VOLATILE));
2506 tree cvptr
2507 = build_pointer_type (build_qualified_type (void_type_node,
2508 TYPE_QUAL_VOLATILE
2509 |TYPE_QUAL_CONST));
2510 tree boolt
2511 = lang_hooks.types.type_for_size (BOOL_TYPE_SIZE, 1);
2512 int i;
2513 for (i = 0; i < 5; i++)
2514 {
2515 tree ix = build_nonstandard_integer_type (BITS_PER_UNIT * (1 << i), 1);
2516 BT_FN_BOOL_VPTR_PTR_IX_INT_INT[i]
2517 = build_function_type_list (boolt, vptr, ptr_type_node, ix,
2518 integer_type_node, integer_type_node,
2519 NULL_TREE);
2520 BT_FN_IX_CONST_VPTR_INT[i]
2521 = build_function_type_list (ix, cvptr, integer_type_node, NULL_TREE);
2522 BT_FN_IX_VPTR_IX_INT[i]
2523 = build_function_type_list (ix, vptr, ix, integer_type_node,
2524 NULL_TREE);
2525 BT_FN_VOID_VPTR_IX_INT[i]
2526 = build_function_type_list (void_type_node, vptr, ix,
2527 integer_type_node, NULL_TREE);
2528 }
2529#define BT_FN_BOOL_VPTR_PTR_I1_INT_INT BT_FN_BOOL_VPTR_PTR_IX_INT_INT[0]
2530#define BT_FN_I1_CONST_VPTR_INT BT_FN_IX_CONST_VPTR_INT[0]
2531#define BT_FN_I1_VPTR_I1_INT BT_FN_IX_VPTR_IX_INT[0]
2532#define BT_FN_VOID_VPTR_I1_INT BT_FN_VOID_VPTR_IX_INT[0]
2533#define BT_FN_BOOL_VPTR_PTR_I2_INT_INT BT_FN_BOOL_VPTR_PTR_IX_INT_INT[1]
2534#define BT_FN_I2_CONST_VPTR_INT BT_FN_IX_CONST_VPTR_INT[1]
2535#define BT_FN_I2_VPTR_I2_INT BT_FN_IX_VPTR_IX_INT[1]
2536#define BT_FN_VOID_VPTR_I2_INT BT_FN_VOID_VPTR_IX_INT[1]
2537#define BT_FN_BOOL_VPTR_PTR_I4_INT_INT BT_FN_BOOL_VPTR_PTR_IX_INT_INT[2]
2538#define BT_FN_I4_CONST_VPTR_INT BT_FN_IX_CONST_VPTR_INT[2]
2539#define BT_FN_I4_VPTR_I4_INT BT_FN_IX_VPTR_IX_INT[2]
2540#define BT_FN_VOID_VPTR_I4_INT BT_FN_VOID_VPTR_IX_INT[2]
2541#define BT_FN_BOOL_VPTR_PTR_I8_INT_INT BT_FN_BOOL_VPTR_PTR_IX_INT_INT[3]
2542#define BT_FN_I8_CONST_VPTR_INT BT_FN_IX_CONST_VPTR_INT[3]
2543#define BT_FN_I8_VPTR_I8_INT BT_FN_IX_VPTR_IX_INT[3]
2544#define BT_FN_VOID_VPTR_I8_INT BT_FN_VOID_VPTR_IX_INT[3]
2545#define BT_FN_BOOL_VPTR_PTR_I16_INT_INT BT_FN_BOOL_VPTR_PTR_IX_INT_INT[4]
2546#define BT_FN_I16_CONST_VPTR_INT BT_FN_IX_CONST_VPTR_INT[4]
2547#define BT_FN_I16_VPTR_I16_INT BT_FN_IX_VPTR_IX_INT[4]
2548#define BT_FN_VOID_VPTR_I16_INT BT_FN_VOID_VPTR_IX_INT[4]
0e668eaf
JJ
2549#undef ATTR_NOTHROW_LEAF_LIST
2550#define ATTR_NOTHROW_LEAF_LIST ECF_NOTHROW | ECF_LEAF
bc77608b
JJ
2551#undef ATTR_TMPURE_NOTHROW_LEAF_LIST
2552#define ATTR_TMPURE_NOTHROW_LEAF_LIST ECF_TM_PURE | ATTR_NOTHROW_LEAF_LIST
0e668eaf
JJ
2553#undef ATTR_NORETURN_NOTHROW_LEAF_LIST
2554#define ATTR_NORETURN_NOTHROW_LEAF_LIST ECF_NORETURN | ATTR_NOTHROW_LEAF_LIST
4088b790
MP
2555#undef ATTR_CONST_NORETURN_NOTHROW_LEAF_LIST
2556#define ATTR_CONST_NORETURN_NOTHROW_LEAF_LIST \
2557 ECF_CONST | ATTR_NORETURN_NOTHROW_LEAF_LIST
bc77608b
JJ
2558#undef ATTR_TMPURE_NORETURN_NOTHROW_LEAF_LIST
2559#define ATTR_TMPURE_NORETURN_NOTHROW_LEAF_LIST \
2560 ECF_TM_PURE | ATTR_NORETURN_NOTHROW_LEAF_LIST
de5a5fa1
MP
2561#undef ATTR_COLD_NOTHROW_LEAF_LIST
2562#define ATTR_COLD_NOTHROW_LEAF_LIST \
2563 /* ECF_COLD missing */ ATTR_NOTHROW_LEAF_LIST
2564#undef ATTR_COLD_NORETURN_NOTHROW_LEAF_LIST
2565#define ATTR_COLD_NORETURN_NOTHROW_LEAF_LIST \
2566 /* ECF_COLD missing */ ATTR_NORETURN_NOTHROW_LEAF_LIST
4088b790
MP
2567#undef ATTR_COLD_CONST_NORETURN_NOTHROW_LEAF_LIST
2568#define ATTR_COLD_CONST_NORETURN_NOTHROW_LEAF_LIST \
2569 /* ECF_COLD missing */ ATTR_CONST_NORETURN_NOTHROW_LEAF_LIST
0bae64d5
MP
2570#undef ATTR_PURE_NOTHROW_LEAF_LIST
2571#define ATTR_PURE_NOTHROW_LEAF_LIST ECF_PURE | ATTR_NOTHROW_LEAF_LIST
8f91e6e0
JJ
2572#undef DEF_BUILTIN_STUB
2573#define DEF_BUILTIN_STUB(ENUM, NAME)
0e668eaf
JJ
2574#undef DEF_SANITIZER_BUILTIN
2575#define DEF_SANITIZER_BUILTIN(ENUM, NAME, TYPE, ATTRS) \
2576 decl = add_builtin_function ("__builtin_" NAME, TYPE, ENUM, \
2577 BUILT_IN_NORMAL, NAME, NULL_TREE); \
2578 set_call_expr_flags (decl, ATTRS); \
2579 set_builtin_decl (ENUM, decl, true);
2580
2581#include "sanitizer.def"
2582
0bae64d5
MP
2583 /* -fsanitize=object-size uses __builtin_object_size, but that might
2584 not be available for e.g. Fortran at this point. We use
2585 DEF_SANITIZER_BUILTIN here only as a convenience macro. */
2586 if ((flag_sanitize & SANITIZE_OBJECT_SIZE)
2587 && !builtin_decl_implicit_p (BUILT_IN_OBJECT_SIZE))
2588 DEF_SANITIZER_BUILTIN (BUILT_IN_OBJECT_SIZE, "object_size",
2589 BT_FN_SIZE_CONST_PTR_INT,
2590 ATTR_PURE_NOTHROW_LEAF_LIST)
2591
0e668eaf 2592#undef DEF_SANITIZER_BUILTIN
8f91e6e0 2593#undef DEF_BUILTIN_STUB
0e668eaf
JJ
2594}
2595
94fce891
JJ
2596/* Called via htab_traverse. Count number of emitted
2597 STRING_CSTs in the constant hash table. */
2598
2a22f99c
TS
2599int
2600count_string_csts (constant_descriptor_tree **slot,
2601 unsigned HOST_WIDE_INT *data)
94fce891 2602{
2a22f99c 2603 struct constant_descriptor_tree *desc = *slot;
94fce891
JJ
2604 if (TREE_CODE (desc->value) == STRING_CST
2605 && TREE_ASM_WRITTEN (desc->value)
2606 && asan_protect_global (desc->value))
2a22f99c 2607 ++*data;
94fce891
JJ
2608 return 1;
2609}
2610
2611/* Helper structure to pass two parameters to
2612 add_string_csts. */
2613
2614struct asan_add_string_csts_data
2615{
2616 tree type;
2617 vec<constructor_elt, va_gc> *v;
2618};
2619
2a22f99c 2620/* Called via hash_table::traverse. Call asan_add_global
94fce891
JJ
2621 on emitted STRING_CSTs from the constant hash table. */
2622
2a22f99c
TS
2623int
2624add_string_csts (constant_descriptor_tree **slot,
2625 asan_add_string_csts_data *aascd)
94fce891 2626{
2a22f99c 2627 struct constant_descriptor_tree *desc = *slot;
94fce891
JJ
2628 if (TREE_CODE (desc->value) == STRING_CST
2629 && TREE_ASM_WRITTEN (desc->value)
2630 && asan_protect_global (desc->value))
2631 {
94fce891
JJ
2632 asan_add_global (SYMBOL_REF_DECL (XEXP (desc->rtl, 0)),
2633 aascd->type, aascd->v);
2634 }
2635 return 1;
2636}
2637
8240018b
JJ
2638/* Needs to be GTY(()), because cgraph_build_static_cdtor may
2639 invoke ggc_collect. */
2640static GTY(()) tree asan_ctor_statements;
2641
37d6f666 2642/* Module-level instrumentation.
ef1b3fda 2643 - Insert __asan_init_vN() into the list of CTORs.
37d6f666
WM
2644 - TODO: insert redzones around globals.
2645 */
2646
2647void
2648asan_finish_file (void)
2649{
2c8326a5 2650 varpool_node *vnode;
8240018b
JJ
2651 unsigned HOST_WIDE_INT gcount = 0;
2652
94fce891
JJ
2653 if (shadow_ptr_types[0] == NULL_TREE)
2654 asan_init_shadow_ptr_types ();
2655 /* Avoid instrumenting code in the asan ctors/dtors.
2656 We don't need to insert padding after the description strings,
2657 nor after .LASAN* array. */
de5a5fa1 2658 flag_sanitize &= ~SANITIZE_ADDRESS;
0e668eaf 2659
f1d15bb9
DV
2660 /* For user-space we want asan constructors to run first.
2661 Linux kernel does not support priorities other than default, and the only
2662 other user of constructors is coverage. So we run with the default
2663 priority. */
2664 int priority = flag_sanitize & SANITIZE_USER_ADDRESS
2665 ? MAX_RESERVED_INIT_PRIORITY - 1 : DEFAULT_INIT_PRIORITY;
2666
c6d129b0
YG
2667 if (flag_sanitize & SANITIZE_USER_ADDRESS)
2668 {
2669 tree fn = builtin_decl_implicit (BUILT_IN_ASAN_INIT);
2670 append_to_statement_list (build_call_expr (fn, 0), &asan_ctor_statements);
89e302b8
MO
2671 fn = builtin_decl_implicit (BUILT_IN_ASAN_VERSION_MISMATCH_CHECK);
2672 append_to_statement_list (build_call_expr (fn, 0), &asan_ctor_statements);
c6d129b0 2673 }
8240018b 2674 FOR_EACH_DEFINED_VARIABLE (vnode)
67348ccc
DM
2675 if (TREE_ASM_WRITTEN (vnode->decl)
2676 && asan_protect_global (vnode->decl))
8240018b 2677 ++gcount;
2a22f99c
TS
2678 hash_table<tree_descriptor_hasher> *const_desc_htab = constant_pool_htab ();
2679 const_desc_htab->traverse<unsigned HOST_WIDE_INT *, count_string_csts>
2680 (&gcount);
8240018b
JJ
2681 if (gcount)
2682 {
0e668eaf 2683 tree type = asan_global_struct (), var, ctor;
8240018b 2684 tree dtor_statements = NULL_TREE;
9771b263 2685 vec<constructor_elt, va_gc> *v;
8240018b
JJ
2686 char buf[20];
2687
2688 type = build_array_type_nelts (type, gcount);
2689 ASM_GENERATE_INTERNAL_LABEL (buf, "LASAN", 0);
2690 var = build_decl (UNKNOWN_LOCATION, VAR_DECL, get_identifier (buf),
2691 type);
2692 TREE_STATIC (var) = 1;
2693 TREE_PUBLIC (var) = 0;
2694 DECL_ARTIFICIAL (var) = 1;
2695 DECL_IGNORED_P (var) = 1;
9771b263 2696 vec_alloc (v, gcount);
8240018b 2697 FOR_EACH_DEFINED_VARIABLE (vnode)
67348ccc
DM
2698 if (TREE_ASM_WRITTEN (vnode->decl)
2699 && asan_protect_global (vnode->decl))
2700 asan_add_global (vnode->decl, TREE_TYPE (type), v);
94fce891
JJ
2701 struct asan_add_string_csts_data aascd;
2702 aascd.type = TREE_TYPE (type);
2703 aascd.v = v;
2a22f99c
TS
2704 const_desc_htab->traverse<asan_add_string_csts_data *, add_string_csts>
2705 (&aascd);
8240018b
JJ
2706 ctor = build_constructor (type, v);
2707 TREE_CONSTANT (ctor) = 1;
2708 TREE_STATIC (ctor) = 1;
2709 DECL_INITIAL (var) = ctor;
9041d2e6 2710 varpool_node::finalize_decl (var);
8240018b 2711
c6d129b0 2712 tree fn = builtin_decl_implicit (BUILT_IN_ASAN_REGISTER_GLOBALS);
de5a5fa1 2713 tree gcount_tree = build_int_cst (pointer_sized_int_node, gcount);
0e668eaf 2714 append_to_statement_list (build_call_expr (fn, 2,
8240018b 2715 build_fold_addr_expr (var),
de5a5fa1 2716 gcount_tree),
8240018b
JJ
2717 &asan_ctor_statements);
2718
0e668eaf
JJ
2719 fn = builtin_decl_implicit (BUILT_IN_ASAN_UNREGISTER_GLOBALS);
2720 append_to_statement_list (build_call_expr (fn, 2,
8240018b 2721 build_fold_addr_expr (var),
de5a5fa1 2722 gcount_tree),
8240018b 2723 &dtor_statements);
f1d15bb9 2724 cgraph_build_static_cdtor ('D', dtor_statements, priority);
8240018b 2725 }
c6d129b0 2726 if (asan_ctor_statements)
f1d15bb9 2727 cgraph_build_static_cdtor ('I', asan_ctor_statements, priority);
de5a5fa1 2728 flag_sanitize |= SANITIZE_ADDRESS;
f6d98484
JJ
2729}
2730
6dc4a604
ML
2731/* Poison or unpoison (depending on IS_CLOBBER variable) shadow memory based
2732 on SHADOW address. Newly added statements will be added to ITER with
2733 given location LOC. We mark SIZE bytes in shadow memory, where
2734 LAST_CHUNK_SIZE is greater than zero in situation where we are at the
2735 end of a variable. */
2736
2737static void
2738asan_store_shadow_bytes (gimple_stmt_iterator *iter, location_t loc,
2739 tree shadow,
2740 unsigned HOST_WIDE_INT base_addr_offset,
2741 bool is_clobber, unsigned size,
2742 unsigned last_chunk_size)
2743{
2744 tree shadow_ptr_type;
2745
2746 switch (size)
2747 {
2748 case 1:
2749 shadow_ptr_type = shadow_ptr_types[0];
2750 break;
2751 case 2:
2752 shadow_ptr_type = shadow_ptr_types[1];
2753 break;
2754 case 4:
2755 shadow_ptr_type = shadow_ptr_types[2];
2756 break;
2757 default:
2758 gcc_unreachable ();
2759 }
2760
2761 unsigned char c = (char) is_clobber ? ASAN_STACK_MAGIC_USE_AFTER_SCOPE : 0;
2762 unsigned HOST_WIDE_INT val = 0;
2763 for (unsigned i = 0; i < size; ++i)
2764 {
2765 unsigned char shadow_c = c;
2766 if (i == size - 1 && last_chunk_size && !is_clobber)
2767 shadow_c = last_chunk_size;
2768 val |= (unsigned HOST_WIDE_INT) shadow_c << (BITS_PER_UNIT * i);
2769 }
2770
2771 /* Handle last chunk in unpoisoning. */
2772 tree magic = build_int_cst (TREE_TYPE (shadow_ptr_type), val);
2773
2774 tree dest = build2 (MEM_REF, TREE_TYPE (shadow_ptr_type), shadow,
2775 build_int_cst (shadow_ptr_type, base_addr_offset));
2776
2777 gimple *g = gimple_build_assign (dest, magic);
2778 gimple_set_location (g, loc);
2779 gsi_insert_after (iter, g, GSI_NEW_STMT);
2780}
2781
2782/* Expand the ASAN_MARK builtins. */
2783
2784bool
2785asan_expand_mark_ifn (gimple_stmt_iterator *iter)
2786{
2787 gimple *g = gsi_stmt (*iter);
2788 location_t loc = gimple_location (g);
56b7aede
ML
2789 HOST_WIDE_INT flag = tree_to_shwi (gimple_call_arg (g, 0));
2790 bool is_poison = ((asan_mark_flags)flag) == ASAN_MARK_POISON;
6dc4a604
ML
2791
2792 tree base = gimple_call_arg (g, 1);
2793 gcc_checking_assert (TREE_CODE (base) == ADDR_EXPR);
2794 tree decl = TREE_OPERAND (base, 0);
fb61d96c
ML
2795
2796 /* For a nested function, we can have: ASAN_MARK (2, &FRAME.2.fp_input, 4) */
2797 if (TREE_CODE (decl) == COMPONENT_REF
2798 && DECL_NONLOCAL_FRAME (TREE_OPERAND (decl, 0)))
2799 decl = TREE_OPERAND (decl, 0);
2800
6dc4a604
ML
2801 gcc_checking_assert (TREE_CODE (decl) == VAR_DECL);
2802 if (asan_handled_variables == NULL)
2803 asan_handled_variables = new hash_set<tree> (16);
2804 asan_handled_variables->add (decl);
2805 tree len = gimple_call_arg (g, 2);
2806
2807 gcc_assert (tree_fits_shwi_p (len));
2808 unsigned HOST_WIDE_INT size_in_bytes = tree_to_shwi (len);
2809 gcc_assert (size_in_bytes);
2810
2811 g = gimple_build_assign (make_ssa_name (pointer_sized_int_node),
2812 NOP_EXPR, base);
2813 gimple_set_location (g, loc);
2814 gsi_replace (iter, g, false);
2815 tree base_addr = gimple_assign_lhs (g);
2816
2817 /* Generate direct emission if size_in_bytes is small. */
2818 if (size_in_bytes <= ASAN_PARAM_USE_AFTER_SCOPE_DIRECT_EMISSION_THRESHOLD)
2819 {
2820 unsigned HOST_WIDE_INT shadow_size = shadow_mem_size (size_in_bytes);
2821
2822 tree shadow = build_shadow_mem_access (iter, loc, base_addr,
2823 shadow_ptr_types[0], true);
2824
2825 for (unsigned HOST_WIDE_INT offset = 0; offset < shadow_size;)
2826 {
2827 unsigned size = 1;
2828 if (shadow_size - offset >= 4)
2829 size = 4;
2830 else if (shadow_size - offset >= 2)
2831 size = 2;
2832
2833 unsigned HOST_WIDE_INT last_chunk_size = 0;
2834 unsigned HOST_WIDE_INT s = (offset + size) * ASAN_SHADOW_GRANULARITY;
2835 if (s > size_in_bytes)
2836 last_chunk_size = ASAN_SHADOW_GRANULARITY - (s - size_in_bytes);
2837
56b7aede 2838 asan_store_shadow_bytes (iter, loc, shadow, offset, is_poison,
6dc4a604
ML
2839 size, last_chunk_size);
2840 offset += size;
2841 }
2842 }
2843 else
2844 {
2845 g = gimple_build_assign (make_ssa_name (pointer_sized_int_node),
2846 NOP_EXPR, len);
2847 gimple_set_location (g, loc);
2848 gsi_insert_before (iter, g, GSI_SAME_STMT);
2849 tree sz_arg = gimple_assign_lhs (g);
2850
5594a028
ML
2851 tree fun
2852 = builtin_decl_implicit (is_poison ? BUILT_IN_ASAN_POISON_STACK_MEMORY
2853 : BUILT_IN_ASAN_UNPOISON_STACK_MEMORY);
6dc4a604
ML
2854 g = gimple_build_call (fun, 2, base_addr, sz_arg);
2855 gimple_set_location (g, loc);
2856 gsi_insert_after (iter, g, GSI_NEW_STMT);
2857 }
2858
2859 return false;
2860}
2861
c62ccb9a
YG
2862/* Expand the ASAN_{LOAD,STORE} builtins. */
2863
06cefae9 2864bool
c62ccb9a
YG
2865asan_expand_check_ifn (gimple_stmt_iterator *iter, bool use_calls)
2866{
355fe088 2867 gimple *g = gsi_stmt (*iter);
c62ccb9a 2868 location_t loc = gimple_location (g);
b59e2a49
MO
2869 bool recover_p;
2870 if (flag_sanitize & SANITIZE_USER_ADDRESS)
2871 recover_p = (flag_sanitize_recover & SANITIZE_USER_ADDRESS) != 0;
2872 else
2873 recover_p = (flag_sanitize_recover & SANITIZE_KERNEL_ADDRESS) != 0;
fed4de37 2874
c62ccb9a
YG
2875 HOST_WIDE_INT flags = tree_to_shwi (gimple_call_arg (g, 0));
2876 gcc_assert (flags < ASAN_CHECK_LAST);
2877 bool is_scalar_access = (flags & ASAN_CHECK_SCALAR_ACCESS) != 0;
2878 bool is_store = (flags & ASAN_CHECK_STORE) != 0;
2879 bool is_non_zero_len = (flags & ASAN_CHECK_NON_ZERO_LEN) != 0;
c62ccb9a
YG
2880
2881 tree base = gimple_call_arg (g, 1);
2882 tree len = gimple_call_arg (g, 2);
f434eb69 2883 HOST_WIDE_INT align = tree_to_shwi (gimple_call_arg (g, 3));
c62ccb9a
YG
2884
2885 HOST_WIDE_INT size_in_bytes
2886 = is_scalar_access && tree_fits_shwi_p (len) ? tree_to_shwi (len) : -1;
2887
2888 if (use_calls)
2889 {
2890 /* Instrument using callbacks. */
355fe088 2891 gimple *g = gimple_build_assign (make_ssa_name (pointer_sized_int_node),
0d0e4a03 2892 NOP_EXPR, base);
c62ccb9a
YG
2893 gimple_set_location (g, loc);
2894 gsi_insert_before (iter, g, GSI_SAME_STMT);
2895 tree base_addr = gimple_assign_lhs (g);
2896
2897 int nargs;
fed4de37 2898 tree fun = check_func (is_store, recover_p, size_in_bytes, &nargs);
c62ccb9a
YG
2899 if (nargs == 1)
2900 g = gimple_build_call (fun, 1, base_addr);
2901 else
2902 {
2903 gcc_assert (nargs == 2);
0d0e4a03
JJ
2904 g = gimple_build_assign (make_ssa_name (pointer_sized_int_node),
2905 NOP_EXPR, len);
c62ccb9a
YG
2906 gimple_set_location (g, loc);
2907 gsi_insert_before (iter, g, GSI_SAME_STMT);
2908 tree sz_arg = gimple_assign_lhs (g);
2909 g = gimple_build_call (fun, nargs, base_addr, sz_arg);
2910 }
2911 gimple_set_location (g, loc);
2912 gsi_replace (iter, g, false);
2913 return false;
2914 }
2915
2916 HOST_WIDE_INT real_size_in_bytes = size_in_bytes == -1 ? 1 : size_in_bytes;
2917
c62ccb9a
YG
2918 tree shadow_ptr_type = shadow_ptr_types[real_size_in_bytes == 16 ? 1 : 0];
2919 tree shadow_type = TREE_TYPE (shadow_ptr_type);
2920
2921 gimple_stmt_iterator gsi = *iter;
2922
2923 if (!is_non_zero_len)
2924 {
2925 /* So, the length of the memory area to asan-protect is
2926 non-constant. Let's guard the generated instrumentation code
2927 like:
2928
2929 if (len != 0)
2930 {
2931 //asan instrumentation code goes here.
2932 }
2933 // falltrough instructions, starting with *ITER. */
2934
2935 g = gimple_build_cond (NE_EXPR,
2936 len,
2937 build_int_cst (TREE_TYPE (len), 0),
2938 NULL_TREE, NULL_TREE);
2939 gimple_set_location (g, loc);
2940
2941 basic_block then_bb, fallthrough_bb;
538dd0b7
DM
2942 insert_if_then_before_iter (as_a <gcond *> (g), iter,
2943 /*then_more_likely_p=*/true,
2944 &then_bb, &fallthrough_bb);
c62ccb9a
YG
2945 /* Note that fallthrough_bb starts with the statement that was
2946 pointed to by ITER. */
2947
2948 /* The 'then block' of the 'if (len != 0) condition is where
2949 we'll generate the asan instrumentation code now. */
2950 gsi = gsi_last_bb (then_bb);
2951 }
2952
2953 /* Get an iterator on the point where we can add the condition
2954 statement for the instrumentation. */
2955 basic_block then_bb, else_bb;
2956 gsi = create_cond_insert_point (&gsi, /*before_p*/false,
2957 /*then_more_likely_p=*/false,
fed4de37 2958 /*create_then_fallthru_edge*/recover_p,
c62ccb9a
YG
2959 &then_bb,
2960 &else_bb);
2961
0d0e4a03
JJ
2962 g = gimple_build_assign (make_ssa_name (pointer_sized_int_node),
2963 NOP_EXPR, base);
c62ccb9a
YG
2964 gimple_set_location (g, loc);
2965 gsi_insert_before (&gsi, g, GSI_NEW_STMT);
2966 tree base_addr = gimple_assign_lhs (g);
2967
2968 tree t = NULL_TREE;
2969 if (real_size_in_bytes >= 8)
2970 {
2971 tree shadow = build_shadow_mem_access (&gsi, loc, base_addr,
2972 shadow_ptr_type);
2973 t = shadow;
2974 }
2975 else
2976 {
2977 /* Slow path for 1, 2 and 4 byte accesses. */
bdea98ca
MO
2978 /* Test (shadow != 0)
2979 & ((base_addr & 7) + (real_size_in_bytes - 1)) >= shadow). */
2980 tree shadow = build_shadow_mem_access (&gsi, loc, base_addr,
2981 shadow_ptr_type);
355fe088 2982 gimple *shadow_test = build_assign (NE_EXPR, shadow, 0);
bdea98ca
MO
2983 gimple_seq seq = NULL;
2984 gimple_seq_add_stmt (&seq, shadow_test);
2985 /* Aligned (>= 8 bytes) can test just
2986 (real_size_in_bytes - 1 >= shadow), as base_addr & 7 is known
2987 to be 0. */
2988 if (align < 8)
c62ccb9a 2989 {
bdea98ca
MO
2990 gimple_seq_add_stmt (&seq, build_assign (BIT_AND_EXPR,
2991 base_addr, 7));
2992 gimple_seq_add_stmt (&seq,
2993 build_type_cast (shadow_type,
2994 gimple_seq_last (seq)));
2995 if (real_size_in_bytes > 1)
2996 gimple_seq_add_stmt (&seq,
2997 build_assign (PLUS_EXPR,
2998 gimple_seq_last (seq),
2999 real_size_in_bytes - 1));
3000 t = gimple_assign_lhs (gimple_seq_last_stmt (seq));
c62ccb9a 3001 }
bdea98ca
MO
3002 else
3003 t = build_int_cst (shadow_type, real_size_in_bytes - 1);
3004 gimple_seq_add_stmt (&seq, build_assign (GE_EXPR, t, shadow));
3005 gimple_seq_add_stmt (&seq, build_assign (BIT_AND_EXPR, shadow_test,
3006 gimple_seq_last (seq)));
3007 t = gimple_assign_lhs (gimple_seq_last (seq));
3008 gimple_seq_set_location (seq, loc);
3009 gsi_insert_seq_after (&gsi, seq, GSI_CONTINUE_LINKING);
c62ccb9a
YG
3010
3011 /* For non-constant, misaligned or otherwise weird access sizes,
bdea98ca
MO
3012 check first and last byte. */
3013 if (size_in_bytes == -1)
c62ccb9a 3014 {
0d0e4a03
JJ
3015 g = gimple_build_assign (make_ssa_name (pointer_sized_int_node),
3016 MINUS_EXPR, len,
3017 build_int_cst (pointer_sized_int_node, 1));
c62ccb9a
YG
3018 gimple_set_location (g, loc);
3019 gsi_insert_after (&gsi, g, GSI_NEW_STMT);
3020 tree last = gimple_assign_lhs (g);
0d0e4a03
JJ
3021 g = gimple_build_assign (make_ssa_name (pointer_sized_int_node),
3022 PLUS_EXPR, base_addr, last);
c62ccb9a
YG
3023 gimple_set_location (g, loc);
3024 gsi_insert_after (&gsi, g, GSI_NEW_STMT);
3025 tree base_end_addr = gimple_assign_lhs (g);
3026
3027 tree shadow = build_shadow_mem_access (&gsi, loc, base_end_addr,
3028 shadow_ptr_type);
355fe088 3029 gimple *shadow_test = build_assign (NE_EXPR, shadow, 0);
c62ccb9a
YG
3030 gimple_seq seq = NULL;
3031 gimple_seq_add_stmt (&seq, shadow_test);
3032 gimple_seq_add_stmt (&seq, build_assign (BIT_AND_EXPR,
3033 base_end_addr, 7));
3034 gimple_seq_add_stmt (&seq, build_type_cast (shadow_type,
3035 gimple_seq_last (seq)));
3036 gimple_seq_add_stmt (&seq, build_assign (GE_EXPR,
3037 gimple_seq_last (seq),
3038 shadow));
3039 gimple_seq_add_stmt (&seq, build_assign (BIT_AND_EXPR, shadow_test,
3040 gimple_seq_last (seq)));
bdea98ca
MO
3041 gimple_seq_add_stmt (&seq, build_assign (BIT_IOR_EXPR, t,
3042 gimple_seq_last (seq)));
c62ccb9a
YG
3043 t = gimple_assign_lhs (gimple_seq_last (seq));
3044 gimple_seq_set_location (seq, loc);
3045 gsi_insert_seq_after (&gsi, seq, GSI_CONTINUE_LINKING);
3046 }
3047 }
3048
3049 g = gimple_build_cond (NE_EXPR, t, build_int_cst (TREE_TYPE (t), 0),
3050 NULL_TREE, NULL_TREE);
3051 gimple_set_location (g, loc);
3052 gsi_insert_after (&gsi, g, GSI_NEW_STMT);
3053
3054 /* Generate call to the run-time library (e.g. __asan_report_load8). */
3055 gsi = gsi_start_bb (then_bb);
3056 int nargs;
fed4de37 3057 tree fun = report_error_func (is_store, recover_p, size_in_bytes, &nargs);
c62ccb9a
YG
3058 g = gimple_build_call (fun, nargs, base_addr, len);
3059 gimple_set_location (g, loc);
3060 gsi_insert_after (&gsi, g, GSI_NEW_STMT);
3061
3062 gsi_remove (iter, true);
3063 *iter = gsi_start_bb (else_bb);
3064
3065 return true;
3066}
3067
c7775327
ML
3068/* Create ASAN shadow variable for a VAR_DECL which has been rewritten
3069 into SSA. Already seen VAR_DECLs are stored in SHADOW_VARS_MAPPING. */
3070
3071static tree
3072create_asan_shadow_var (tree var_decl,
3073 hash_map<tree, tree> &shadow_vars_mapping)
3074{
3075 tree *slot = shadow_vars_mapping.get (var_decl);
3076 if (slot == NULL)
3077 {
3078 tree shadow_var = copy_node (var_decl);
3079
3080 copy_body_data id;
3081 memset (&id, 0, sizeof (copy_body_data));
3082 id.src_fn = id.dst_fn = current_function_decl;
3083 copy_decl_for_dup_finish (&id, var_decl, shadow_var);
3084
3085 DECL_ARTIFICIAL (shadow_var) = 1;
3086 DECL_IGNORED_P (shadow_var) = 1;
3087 DECL_SEEN_IN_BIND_EXPR_P (shadow_var) = 0;
3088 gimple_add_tmp_var (shadow_var);
3089
3090 shadow_vars_mapping.put (var_decl, shadow_var);
3091 return shadow_var;
3092 }
3093 else
3094 return *slot;
3095}
3096
3097bool
3098asan_expand_poison_ifn (gimple_stmt_iterator *iter,
3099 bool *need_commit_edge_insert,
3100 hash_map<tree, tree> &shadow_vars_mapping)
3101{
3102 gimple *g = gsi_stmt (*iter);
3103 tree poisoned_var = gimple_call_lhs (g);
3104 if (!poisoned_var)
3105 {
3106 gsi_remove (iter, true);
3107 return true;
3108 }
3109
3110 tree shadow_var = create_asan_shadow_var (SSA_NAME_VAR (poisoned_var),
3111 shadow_vars_mapping);
3112
3113 bool recover_p;
3114 if (flag_sanitize & SANITIZE_USER_ADDRESS)
3115 recover_p = (flag_sanitize_recover & SANITIZE_USER_ADDRESS) != 0;
3116 else
3117 recover_p = (flag_sanitize_recover & SANITIZE_KERNEL_ADDRESS) != 0;
3118 tree size = DECL_SIZE_UNIT (shadow_var);
3119 gimple *poison_call
3120 = gimple_build_call_internal (IFN_ASAN_MARK, 3,
3121 build_int_cst (integer_type_node,
3122 ASAN_MARK_POISON),
3123 build_fold_addr_expr (shadow_var), size);
3124
3125 use_operand_p use_p;
3126 imm_use_iterator imm_iter;
3127 FOR_EACH_IMM_USE_FAST (use_p, imm_iter, poisoned_var)
3128 {
3129 gimple *use = USE_STMT (use_p);
3130 if (is_gimple_debug (use))
3131 continue;
3132
3133 int nargs;
3134 tree fun = report_error_func (false, recover_p, tree_to_uhwi (size),
3135 &nargs);
3136
3137 gcall *call = gimple_build_call (fun, 1,
3138 build_fold_addr_expr (shadow_var));
3139 gimple_set_location (call, gimple_location (use));
3140 gimple *call_to_insert = call;
3141
3142 /* The USE can be a gimple PHI node. If so, insert the call on
3143 all edges leading to the PHI node. */
3144 if (is_a <gphi *> (use))
3145 {
3146 gphi *phi = dyn_cast<gphi *> (use);
3147 for (unsigned i = 0; i < gimple_phi_num_args (phi); ++i)
3148 if (gimple_phi_arg_def (phi, i) == poisoned_var)
3149 {
3150 edge e = gimple_phi_arg_edge (phi, i);
3151
3152 if (call_to_insert == NULL)
3153 call_to_insert = gimple_copy (call);
3154
3155 gsi_insert_seq_on_edge (e, call_to_insert);
3156 *need_commit_edge_insert = true;
3157 call_to_insert = NULL;
3158 }
3159 }
3160 else
3161 {
3162 gimple_stmt_iterator gsi = gsi_for_stmt (use);
3163 gsi_insert_before (&gsi, call, GSI_NEW_STMT);
3164 }
3165 }
3166
3167 SSA_NAME_IS_DEFAULT_DEF (poisoned_var) = true;
3168 SSA_NAME_DEF_STMT (poisoned_var) = gimple_build_nop ();
3169 gsi_replace (iter, poison_call, false);
3170
3171 return true;
3172}
3173
37d6f666
WM
3174/* Instrument the current function. */
3175
3176static unsigned int
3177asan_instrument (void)
3178{
f6d98484 3179 if (shadow_ptr_types[0] == NULL_TREE)
94fce891 3180 asan_init_shadow_ptr_types ();
37d6f666 3181 transform_statements ();
37d6f666
WM
3182 return 0;
3183}
3184
3185static bool
3186gate_asan (void)
3187{
de5a5fa1 3188 return (flag_sanitize & SANITIZE_ADDRESS) != 0
e664c61c 3189 && !lookup_attribute ("no_sanitize_address",
77bc5132 3190 DECL_ATTRIBUTES (current_function_decl));
37d6f666
WM
3191}
3192
27a4cd48
DM
3193namespace {
3194
3195const pass_data pass_data_asan =
37d6f666 3196{
27a4cd48
DM
3197 GIMPLE_PASS, /* type */
3198 "asan", /* name */
3199 OPTGROUP_NONE, /* optinfo_flags */
27a4cd48
DM
3200 TV_NONE, /* tv_id */
3201 ( PROP_ssa | PROP_cfg | PROP_gimple_leh ), /* properties_required */
3202 0, /* properties_provided */
3203 0, /* properties_destroyed */
3204 0, /* todo_flags_start */
3bea341f 3205 TODO_update_ssa, /* todo_flags_finish */
37d6f666 3206};
f6d98484 3207
27a4cd48
DM
3208class pass_asan : public gimple_opt_pass
3209{
3210public:
c3284718
RS
3211 pass_asan (gcc::context *ctxt)
3212 : gimple_opt_pass (pass_data_asan, ctxt)
27a4cd48
DM
3213 {}
3214
3215 /* opt_pass methods: */
65d3284b 3216 opt_pass * clone () { return new pass_asan (m_ctxt); }
1a3d085c 3217 virtual bool gate (function *) { return gate_asan (); }
be55bfe6 3218 virtual unsigned int execute (function *) { return asan_instrument (); }
27a4cd48
DM
3219
3220}; // class pass_asan
3221
3222} // anon namespace
3223
3224gimple_opt_pass *
3225make_pass_asan (gcc::context *ctxt)
3226{
3227 return new pass_asan (ctxt);
3228}
3229
27a4cd48
DM
3230namespace {
3231
3232const pass_data pass_data_asan_O0 =
dfb9e332 3233{
27a4cd48
DM
3234 GIMPLE_PASS, /* type */
3235 "asan0", /* name */
3236 OPTGROUP_NONE, /* optinfo_flags */
27a4cd48
DM
3237 TV_NONE, /* tv_id */
3238 ( PROP_ssa | PROP_cfg | PROP_gimple_leh ), /* properties_required */
3239 0, /* properties_provided */
3240 0, /* properties_destroyed */
3241 0, /* todo_flags_start */
3bea341f 3242 TODO_update_ssa, /* todo_flags_finish */
dfb9e332
JJ
3243};
3244
27a4cd48
DM
3245class pass_asan_O0 : public gimple_opt_pass
3246{
3247public:
c3284718
RS
3248 pass_asan_O0 (gcc::context *ctxt)
3249 : gimple_opt_pass (pass_data_asan_O0, ctxt)
27a4cd48
DM
3250 {}
3251
3252 /* opt_pass methods: */
1a3d085c 3253 virtual bool gate (function *) { return !optimize && gate_asan (); }
be55bfe6 3254 virtual unsigned int execute (function *) { return asan_instrument (); }
27a4cd48
DM
3255
3256}; // class pass_asan_O0
3257
3258} // anon namespace
3259
3260gimple_opt_pass *
3261make_pass_asan_O0 (gcc::context *ctxt)
3262{
3263 return new pass_asan_O0 (ctxt);
3264}
3265
f6d98484 3266#include "gt-asan.h"