]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/asan.c
[Patch Doc] Update documentation for __fp16 type
[thirdparty/gcc.git] / gcc / asan.c
CommitLineData
37d6f666 1/* AddressSanitizer, a fast memory error detector.
818ab71a 2 Copyright (C) 2012-2016 Free Software Foundation, Inc.
37d6f666
WM
3 Contributed by Kostya Serebryany <kcc@google.com>
4
5This file is part of GCC.
6
7GCC is free software; you can redistribute it and/or modify it under
8the terms of the GNU General Public License as published by the Free
9Software Foundation; either version 3, or (at your option) any later
10version.
11
12GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13WARRANTY; without even the implied warranty of MERCHANTABILITY or
14FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15for more details.
16
17You should have received a copy of the GNU General Public License
18along with GCC; see the file COPYING3. If not see
19<http://www.gnu.org/licenses/>. */
20
21
22#include "config.h"
23#include "system.h"
24#include "coretypes.h"
c7131fb2 25#include "backend.h"
957060b5
AM
26#include "target.h"
27#include "rtl.h"
4d648807 28#include "tree.h"
c7131fb2 29#include "gimple.h"
957060b5
AM
30#include "cfghooks.h"
31#include "alloc-pool.h"
32#include "tree-pass.h"
4d0cdd0c 33#include "memmodel.h"
957060b5
AM
34#include "tm_p.h"
35#include "stringpool.h"
f90aa46c 36#include "tree-vrp.h"
957060b5 37#include "tree-ssanames.h"
957060b5
AM
38#include "optabs.h"
39#include "emit-rtl.h"
40#include "cgraph.h"
41#include "gimple-pretty-print.h"
42#include "alias.h"
40e23961 43#include "fold-const.h"
60393bbc 44#include "cfganal.h"
45b0be94 45#include "gimplify.h"
5be5c238 46#include "gimple-iterator.h"
d8a2d370
DN
47#include "varasm.h"
48#include "stor-layout.h"
37d6f666 49#include "tree-iterator.h"
37d6f666 50#include "asan.h"
36566b39
PK
51#include "dojump.h"
52#include "explow.h"
f3ddd692 53#include "expr.h"
8240018b 54#include "output.h"
0e668eaf 55#include "langhooks.h"
a9e0d843 56#include "cfgloop.h"
ff2a63a7 57#include "gimple-builder.h"
b9a55b13 58#include "ubsan.h"
b5ebc991 59#include "params.h"
9b2b7279 60#include "builtins.h"
860503d8 61#include "fnmatch.h"
37d6f666 62
497a1c66
JJ
63/* AddressSanitizer finds out-of-bounds and use-after-free bugs
64 with <2x slowdown on average.
65
66 The tool consists of two parts:
67 instrumentation module (this file) and a run-time library.
68 The instrumentation module adds a run-time check before every memory insn.
69 For a 8- or 16- byte load accessing address X:
70 ShadowAddr = (X >> 3) + Offset
71 ShadowValue = *(char*)ShadowAddr; // *(short*) for 16-byte access.
72 if (ShadowValue)
73 __asan_report_load8(X);
74 For a load of N bytes (N=1, 2 or 4) from address X:
75 ShadowAddr = (X >> 3) + Offset
76 ShadowValue = *(char*)ShadowAddr;
77 if (ShadowValue)
78 if ((X & 7) + N - 1 > ShadowValue)
79 __asan_report_loadN(X);
80 Stores are instrumented similarly, but using __asan_report_storeN functions.
ef1b3fda
KS
81 A call too __asan_init_vN() is inserted to the list of module CTORs.
82 N is the version number of the AddressSanitizer API. The changes between the
83 API versions are listed in libsanitizer/asan/asan_interface_internal.h.
497a1c66
JJ
84
85 The run-time library redefines malloc (so that redzone are inserted around
86 the allocated memory) and free (so that reuse of free-ed memory is delayed),
ef1b3fda 87 provides __asan_report* and __asan_init_vN functions.
497a1c66
JJ
88
89 Read more:
90 http://code.google.com/p/address-sanitizer/wiki/AddressSanitizerAlgorithm
91
92 The current implementation supports detection of out-of-bounds and
93 use-after-free in the heap, on the stack and for global variables.
94
95 [Protection of stack variables]
96
97 To understand how detection of out-of-bounds and use-after-free works
98 for stack variables, lets look at this example on x86_64 where the
99 stack grows downward:
f3ddd692
JJ
100
101 int
102 foo ()
103 {
104 char a[23] = {0};
105 int b[2] = {0};
106
107 a[5] = 1;
108 b[1] = 2;
109
110 return a[5] + b[1];
111 }
112
497a1c66
JJ
113 For this function, the stack protected by asan will be organized as
114 follows, from the top of the stack to the bottom:
f3ddd692 115
497a1c66 116 Slot 1/ [red zone of 32 bytes called 'RIGHT RedZone']
f3ddd692 117
497a1c66
JJ
118 Slot 2/ [8 bytes of red zone, that adds up to the space of 'a' to make
119 the next slot be 32 bytes aligned; this one is called Partial
120 Redzone; this 32 bytes alignment is an asan constraint]
f3ddd692 121
497a1c66 122 Slot 3/ [24 bytes for variable 'a']
f3ddd692 123
497a1c66 124 Slot 4/ [red zone of 32 bytes called 'Middle RedZone']
f3ddd692 125
497a1c66 126 Slot 5/ [24 bytes of Partial Red Zone (similar to slot 2]
f3ddd692 127
497a1c66 128 Slot 6/ [8 bytes for variable 'b']
f3ddd692 129
497a1c66
JJ
130 Slot 7/ [32 bytes of Red Zone at the bottom of the stack, called
131 'LEFT RedZone']
f3ddd692 132
497a1c66
JJ
133 The 32 bytes of LEFT red zone at the bottom of the stack can be
134 decomposed as such:
f3ddd692
JJ
135
136 1/ The first 8 bytes contain a magical asan number that is always
137 0x41B58AB3.
138
139 2/ The following 8 bytes contains a pointer to a string (to be
140 parsed at runtime by the runtime asan library), which format is
141 the following:
142
143 "<function-name> <space> <num-of-variables-on-the-stack>
144 (<32-bytes-aligned-offset-in-bytes-of-variable> <space>
145 <length-of-var-in-bytes> ){n} "
146
147 where '(...){n}' means the content inside the parenthesis occurs 'n'
148 times, with 'n' being the number of variables on the stack.
c1f5ce48 149
ef1b3fda
KS
150 3/ The following 8 bytes contain the PC of the current function which
151 will be used by the run-time library to print an error message.
f3ddd692 152
ef1b3fda 153 4/ The following 8 bytes are reserved for internal use by the run-time.
f3ddd692 154
497a1c66 155 The shadow memory for that stack layout is going to look like this:
f3ddd692
JJ
156
157 - content of shadow memory 8 bytes for slot 7: 0xF1F1F1F1.
158 The F1 byte pattern is a magic number called
159 ASAN_STACK_MAGIC_LEFT and is a way for the runtime to know that
160 the memory for that shadow byte is part of a the LEFT red zone
161 intended to seat at the bottom of the variables on the stack.
162
163 - content of shadow memory 8 bytes for slots 6 and 5:
164 0xF4F4F400. The F4 byte pattern is a magic number
165 called ASAN_STACK_MAGIC_PARTIAL. It flags the fact that the
166 memory region for this shadow byte is a PARTIAL red zone
167 intended to pad a variable A, so that the slot following
168 {A,padding} is 32 bytes aligned.
169
170 Note that the fact that the least significant byte of this
171 shadow memory content is 00 means that 8 bytes of its
172 corresponding memory (which corresponds to the memory of
173 variable 'b') is addressable.
174
175 - content of shadow memory 8 bytes for slot 4: 0xF2F2F2F2.
176 The F2 byte pattern is a magic number called
177 ASAN_STACK_MAGIC_MIDDLE. It flags the fact that the memory
178 region for this shadow byte is a MIDDLE red zone intended to
179 seat between two 32 aligned slots of {variable,padding}.
180
181 - content of shadow memory 8 bytes for slot 3 and 2:
497a1c66 182 0xF4000000. This represents is the concatenation of
f3ddd692
JJ
183 variable 'a' and the partial red zone following it, like what we
184 had for variable 'b'. The least significant 3 bytes being 00
185 means that the 3 bytes of variable 'a' are addressable.
186
497a1c66 187 - content of shadow memory 8 bytes for slot 1: 0xF3F3F3F3.
f3ddd692
JJ
188 The F3 byte pattern is a magic number called
189 ASAN_STACK_MAGIC_RIGHT. It flags the fact that the memory
190 region for this shadow byte is a RIGHT red zone intended to seat
191 at the top of the variables of the stack.
192
497a1c66
JJ
193 Note that the real variable layout is done in expand_used_vars in
194 cfgexpand.c. As far as Address Sanitizer is concerned, it lays out
195 stack variables as well as the different red zones, emits some
196 prologue code to populate the shadow memory as to poison (mark as
197 non-accessible) the regions of the red zones and mark the regions of
198 stack variables as accessible, and emit some epilogue code to
199 un-poison (mark as accessible) the regions of red zones right before
200 the function exits.
8240018b 201
497a1c66 202 [Protection of global variables]
8240018b 203
497a1c66
JJ
204 The basic idea is to insert a red zone between two global variables
205 and install a constructor function that calls the asan runtime to do
206 the populating of the relevant shadow memory regions at load time.
8240018b 207
497a1c66
JJ
208 So the global variables are laid out as to insert a red zone between
209 them. The size of the red zones is so that each variable starts on a
210 32 bytes boundary.
8240018b 211
497a1c66
JJ
212 Then a constructor function is installed so that, for each global
213 variable, it calls the runtime asan library function
214 __asan_register_globals_with an instance of this type:
8240018b
JJ
215
216 struct __asan_global
217 {
218 // Address of the beginning of the global variable.
219 const void *__beg;
220
221 // Initial size of the global variable.
222 uptr __size;
223
224 // Size of the global variable + size of the red zone. This
225 // size is 32 bytes aligned.
226 uptr __size_with_redzone;
227
228 // Name of the global variable.
229 const void *__name;
230
ef1b3fda
KS
231 // Name of the module where the global variable is declared.
232 const void *__module_name;
233
59b36ecf 234 // 1 if it has dynamic initialization, 0 otherwise.
8240018b 235 uptr __has_dynamic_init;
866e32ad
KS
236
237 // A pointer to struct that contains source location, could be NULL.
238 __asan_global_source_location *__location;
8240018b
JJ
239 }
240
497a1c66
JJ
241 A destructor function that calls the runtime asan library function
242 _asan_unregister_globals is also installed. */
f3ddd692 243
fd960af2
YG
244static unsigned HOST_WIDE_INT asan_shadow_offset_value;
245static bool asan_shadow_offset_computed;
860503d8 246static vec<char *> sanitized_sections;
fd960af2 247
6dc4a604
ML
248/* Set of variable declarations that are going to be guarded by
249 use-after-scope sanitizer. */
250
251static hash_set<tree> *asan_handled_variables = NULL;
252
253hash_set <tree> *asan_used_labels = NULL;
254
fd960af2
YG
255/* Sets shadow offset to value in string VAL. */
256
257bool
258set_asan_shadow_offset (const char *val)
259{
260 char *endp;
c1f5ce48 261
fd960af2
YG
262 errno = 0;
263#ifdef HAVE_LONG_LONG
264 asan_shadow_offset_value = strtoull (val, &endp, 0);
265#else
266 asan_shadow_offset_value = strtoul (val, &endp, 0);
267#endif
268 if (!(*val != '\0' && *endp == '\0' && errno == 0))
269 return false;
270
271 asan_shadow_offset_computed = true;
272
273 return true;
274}
275
18af8d16
YG
276/* Set list of user-defined sections that need to be sanitized. */
277
278void
860503d8 279set_sanitized_sections (const char *sections)
18af8d16 280{
860503d8
YG
281 char *pat;
282 unsigned i;
283 FOR_EACH_VEC_ELT (sanitized_sections, i, pat)
284 free (pat);
285 sanitized_sections.truncate (0);
286
287 for (const char *s = sections; *s; )
288 {
289 const char *end;
290 for (end = s; *end && *end != ','; ++end);
291 size_t len = end - s;
292 sanitized_sections.safe_push (xstrndup (s, len));
293 s = *end ? end + 1 : end;
294 }
18af8d16
YG
295}
296
56b7aede
ML
297bool
298asan_mark_p (gimple *stmt, enum asan_mark_flags flag)
299{
300 return (gimple_call_internal_p (stmt, IFN_ASAN_MARK)
301 && tree_to_uhwi (gimple_call_arg (stmt, 0)) == flag);
302}
303
6dc4a604
ML
304bool
305asan_sanitize_stack_p (void)
306{
307 return ((flag_sanitize & SANITIZE_ADDRESS)
308 && ASAN_STACK
309 && !asan_no_sanitize_address_p ());
310}
311
18af8d16
YG
312/* Checks whether section SEC should be sanitized. */
313
314static bool
315section_sanitized_p (const char *sec)
316{
860503d8
YG
317 char *pat;
318 unsigned i;
319 FOR_EACH_VEC_ELT (sanitized_sections, i, pat)
320 if (fnmatch (pat, sec, FNM_PERIOD) == 0)
321 return true;
18af8d16
YG
322 return false;
323}
324
fd960af2
YG
325/* Returns Asan shadow offset. */
326
327static unsigned HOST_WIDE_INT
328asan_shadow_offset ()
329{
330 if (!asan_shadow_offset_computed)
331 {
332 asan_shadow_offset_computed = true;
333 asan_shadow_offset_value = targetm.asan_shadow_offset ();
334 }
335 return asan_shadow_offset_value;
336}
337
f3ddd692 338alias_set_type asan_shadow_set = -1;
37d6f666 339
6dc4a604 340/* Pointer types to 1, 2 or 4 byte integers in shadow memory. A separate
f6d98484 341 alias set is used for all shadow memory accesses. */
6dc4a604 342static GTY(()) tree shadow_ptr_types[3];
f6d98484 343
e361382f
JJ
344/* Decl for __asan_option_detect_stack_use_after_return. */
345static GTY(()) tree asan_detect_stack_use_after_return;
346
bdcbe80c
DS
347/* Hashtable support for memory references used by gimple
348 statements. */
349
350/* This type represents a reference to a memory region. */
351struct asan_mem_ref
352{
688010ba 353 /* The expression of the beginning of the memory region. */
bdcbe80c
DS
354 tree start;
355
40f9f6bb
JJ
356 /* The size of the access. */
357 HOST_WIDE_INT access_size;
c1f5ce48
ML
358};
359
fcb87c50 360object_allocator <asan_mem_ref> asan_mem_ref_pool ("asan_mem_ref");
bdcbe80c
DS
361
362/* Initializes an instance of asan_mem_ref. */
363
364static void
40f9f6bb 365asan_mem_ref_init (asan_mem_ref *ref, tree start, HOST_WIDE_INT access_size)
bdcbe80c
DS
366{
367 ref->start = start;
368 ref->access_size = access_size;
369}
370
371/* Allocates memory for an instance of asan_mem_ref into the memory
372 pool returned by asan_mem_ref_get_alloc_pool and initialize it.
373 START is the address of (or the expression pointing to) the
374 beginning of memory reference. ACCESS_SIZE is the size of the
375 access to the referenced memory. */
376
377static asan_mem_ref*
40f9f6bb 378asan_mem_ref_new (tree start, HOST_WIDE_INT access_size)
bdcbe80c 379{
fb0b2914 380 asan_mem_ref *ref = asan_mem_ref_pool.allocate ();
bdcbe80c
DS
381
382 asan_mem_ref_init (ref, start, access_size);
383 return ref;
384}
385
386/* This builds and returns a pointer to the end of the memory region
387 that starts at START and of length LEN. */
388
389tree
390asan_mem_ref_get_end (tree start, tree len)
391{
392 if (len == NULL_TREE || integer_zerop (len))
393 return start;
394
a2f581e1
YG
395 if (!ptrofftype_p (len))
396 len = convert_to_ptrofftype (len);
397
bdcbe80c
DS
398 return fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (start), start, len);
399}
400
401/* Return a tree expression that represents the end of the referenced
402 memory region. Beware that this function can actually build a new
403 tree expression. */
404
405tree
406asan_mem_ref_get_end (const asan_mem_ref *ref, tree len)
407{
408 return asan_mem_ref_get_end (ref->start, len);
409}
410
8d67ee55 411struct asan_mem_ref_hasher : nofree_ptr_hash <asan_mem_ref>
bdcbe80c 412{
67f58944
TS
413 static inline hashval_t hash (const asan_mem_ref *);
414 static inline bool equal (const asan_mem_ref *, const asan_mem_ref *);
bdcbe80c
DS
415};
416
417/* Hash a memory reference. */
418
419inline hashval_t
420asan_mem_ref_hasher::hash (const asan_mem_ref *mem_ref)
421{
bdea98ca 422 return iterative_hash_expr (mem_ref->start, 0);
bdcbe80c
DS
423}
424
425/* Compare two memory references. We accept the length of either
426 memory references to be NULL_TREE. */
427
428inline bool
429asan_mem_ref_hasher::equal (const asan_mem_ref *m1,
430 const asan_mem_ref *m2)
431{
bdea98ca 432 return operand_equal_p (m1->start, m2->start, 0);
bdcbe80c
DS
433}
434
c203e8a7 435static hash_table<asan_mem_ref_hasher> *asan_mem_ref_ht;
bdcbe80c
DS
436
437/* Returns a reference to the hash table containing memory references.
438 This function ensures that the hash table is created. Note that
439 this hash table is updated by the function
440 update_mem_ref_hash_table. */
441
c203e8a7 442static hash_table<asan_mem_ref_hasher> *
bdcbe80c
DS
443get_mem_ref_hash_table ()
444{
c203e8a7
TS
445 if (!asan_mem_ref_ht)
446 asan_mem_ref_ht = new hash_table<asan_mem_ref_hasher> (10);
bdcbe80c
DS
447
448 return asan_mem_ref_ht;
449}
450
451/* Clear all entries from the memory references hash table. */
452
453static void
454empty_mem_ref_hash_table ()
455{
c203e8a7
TS
456 if (asan_mem_ref_ht)
457 asan_mem_ref_ht->empty ();
bdcbe80c
DS
458}
459
460/* Free the memory references hash table. */
461
462static void
463free_mem_ref_resources ()
464{
c203e8a7
TS
465 delete asan_mem_ref_ht;
466 asan_mem_ref_ht = NULL;
bdcbe80c 467
fb0b2914 468 asan_mem_ref_pool.release ();
bdcbe80c
DS
469}
470
471/* Return true iff the memory reference REF has been instrumented. */
472
473static bool
40f9f6bb 474has_mem_ref_been_instrumented (tree ref, HOST_WIDE_INT access_size)
bdcbe80c
DS
475{
476 asan_mem_ref r;
477 asan_mem_ref_init (&r, ref, access_size);
478
bdea98ca
MO
479 asan_mem_ref *saved_ref = get_mem_ref_hash_table ()->find (&r);
480 return saved_ref && saved_ref->access_size >= access_size;
bdcbe80c
DS
481}
482
483/* Return true iff the memory reference REF has been instrumented. */
484
485static bool
486has_mem_ref_been_instrumented (const asan_mem_ref *ref)
487{
488 return has_mem_ref_been_instrumented (ref->start, ref->access_size);
489}
490
491/* Return true iff access to memory region starting at REF and of
492 length LEN has been instrumented. */
493
494static bool
495has_mem_ref_been_instrumented (const asan_mem_ref *ref, tree len)
496{
bdea98ca
MO
497 HOST_WIDE_INT size_in_bytes
498 = tree_fits_shwi_p (len) ? tree_to_shwi (len) : -1;
bdcbe80c 499
bdea98ca
MO
500 return size_in_bytes != -1
501 && has_mem_ref_been_instrumented (ref->start, size_in_bytes);
bdcbe80c
DS
502}
503
504/* Set REF to the memory reference present in a gimple assignment
505 ASSIGNMENT. Return true upon successful completion, false
506 otherwise. */
507
508static bool
538dd0b7 509get_mem_ref_of_assignment (const gassign *assignment,
bdcbe80c
DS
510 asan_mem_ref *ref,
511 bool *ref_is_store)
512{
513 gcc_assert (gimple_assign_single_p (assignment));
514
5d751b0c
JJ
515 if (gimple_store_p (assignment)
516 && !gimple_clobber_p (assignment))
bdcbe80c
DS
517 {
518 ref->start = gimple_assign_lhs (assignment);
519 *ref_is_store = true;
520 }
521 else if (gimple_assign_load_p (assignment))
522 {
523 ref->start = gimple_assign_rhs1 (assignment);
524 *ref_is_store = false;
525 }
526 else
527 return false;
528
529 ref->access_size = int_size_in_bytes (TREE_TYPE (ref->start));
530 return true;
531}
532
533/* Return the memory references contained in a gimple statement
534 representing a builtin call that has to do with memory access. */
535
536static bool
538dd0b7 537get_mem_refs_of_builtin_call (const gcall *call,
bdcbe80c
DS
538 asan_mem_ref *src0,
539 tree *src0_len,
540 bool *src0_is_store,
541 asan_mem_ref *src1,
542 tree *src1_len,
543 bool *src1_is_store,
544 asan_mem_ref *dst,
545 tree *dst_len,
546 bool *dst_is_store,
bdea98ca
MO
547 bool *dest_is_deref,
548 bool *intercepted_p)
bdcbe80c
DS
549{
550 gcc_checking_assert (gimple_call_builtin_p (call, BUILT_IN_NORMAL));
551
552 tree callee = gimple_call_fndecl (call);
553 tree source0 = NULL_TREE, source1 = NULL_TREE,
554 dest = NULL_TREE, len = NULL_TREE;
555 bool is_store = true, got_reference_p = false;
40f9f6bb 556 HOST_WIDE_INT access_size = 1;
bdcbe80c 557
bdea98ca
MO
558 *intercepted_p = asan_intercepted_p ((DECL_FUNCTION_CODE (callee)));
559
bdcbe80c
DS
560 switch (DECL_FUNCTION_CODE (callee))
561 {
562 /* (s, s, n) style memops. */
563 case BUILT_IN_BCMP:
564 case BUILT_IN_MEMCMP:
565 source0 = gimple_call_arg (call, 0);
566 source1 = gimple_call_arg (call, 1);
567 len = gimple_call_arg (call, 2);
568 break;
569
570 /* (src, dest, n) style memops. */
571 case BUILT_IN_BCOPY:
572 source0 = gimple_call_arg (call, 0);
573 dest = gimple_call_arg (call, 1);
574 len = gimple_call_arg (call, 2);
575 break;
576
577 /* (dest, src, n) style memops. */
578 case BUILT_IN_MEMCPY:
579 case BUILT_IN_MEMCPY_CHK:
580 case BUILT_IN_MEMMOVE:
581 case BUILT_IN_MEMMOVE_CHK:
582 case BUILT_IN_MEMPCPY:
583 case BUILT_IN_MEMPCPY_CHK:
584 dest = gimple_call_arg (call, 0);
585 source0 = gimple_call_arg (call, 1);
586 len = gimple_call_arg (call, 2);
587 break;
588
589 /* (dest, n) style memops. */
590 case BUILT_IN_BZERO:
591 dest = gimple_call_arg (call, 0);
592 len = gimple_call_arg (call, 1);
593 break;
594
595 /* (dest, x, n) style memops*/
596 case BUILT_IN_MEMSET:
597 case BUILT_IN_MEMSET_CHK:
598 dest = gimple_call_arg (call, 0);
599 len = gimple_call_arg (call, 2);
600 break;
601
602 case BUILT_IN_STRLEN:
603 source0 = gimple_call_arg (call, 0);
604 len = gimple_call_lhs (call);
605 break ;
606
607 /* And now the __atomic* and __sync builtins.
608 These are handled differently from the classical memory memory
609 access builtins above. */
610
611 case BUILT_IN_ATOMIC_LOAD_1:
612 case BUILT_IN_ATOMIC_LOAD_2:
613 case BUILT_IN_ATOMIC_LOAD_4:
614 case BUILT_IN_ATOMIC_LOAD_8:
615 case BUILT_IN_ATOMIC_LOAD_16:
616 is_store = false;
617 /* fall through. */
618
619 case BUILT_IN_SYNC_FETCH_AND_ADD_1:
620 case BUILT_IN_SYNC_FETCH_AND_ADD_2:
621 case BUILT_IN_SYNC_FETCH_AND_ADD_4:
622 case BUILT_IN_SYNC_FETCH_AND_ADD_8:
623 case BUILT_IN_SYNC_FETCH_AND_ADD_16:
624
625 case BUILT_IN_SYNC_FETCH_AND_SUB_1:
626 case BUILT_IN_SYNC_FETCH_AND_SUB_2:
627 case BUILT_IN_SYNC_FETCH_AND_SUB_4:
628 case BUILT_IN_SYNC_FETCH_AND_SUB_8:
629 case BUILT_IN_SYNC_FETCH_AND_SUB_16:
630
631 case BUILT_IN_SYNC_FETCH_AND_OR_1:
632 case BUILT_IN_SYNC_FETCH_AND_OR_2:
633 case BUILT_IN_SYNC_FETCH_AND_OR_4:
634 case BUILT_IN_SYNC_FETCH_AND_OR_8:
635 case BUILT_IN_SYNC_FETCH_AND_OR_16:
636
637 case BUILT_IN_SYNC_FETCH_AND_AND_1:
638 case BUILT_IN_SYNC_FETCH_AND_AND_2:
639 case BUILT_IN_SYNC_FETCH_AND_AND_4:
640 case BUILT_IN_SYNC_FETCH_AND_AND_8:
641 case BUILT_IN_SYNC_FETCH_AND_AND_16:
642
643 case BUILT_IN_SYNC_FETCH_AND_XOR_1:
644 case BUILT_IN_SYNC_FETCH_AND_XOR_2:
645 case BUILT_IN_SYNC_FETCH_AND_XOR_4:
646 case BUILT_IN_SYNC_FETCH_AND_XOR_8:
647 case BUILT_IN_SYNC_FETCH_AND_XOR_16:
648
649 case BUILT_IN_SYNC_FETCH_AND_NAND_1:
650 case BUILT_IN_SYNC_FETCH_AND_NAND_2:
651 case BUILT_IN_SYNC_FETCH_AND_NAND_4:
652 case BUILT_IN_SYNC_FETCH_AND_NAND_8:
653
654 case BUILT_IN_SYNC_ADD_AND_FETCH_1:
655 case BUILT_IN_SYNC_ADD_AND_FETCH_2:
656 case BUILT_IN_SYNC_ADD_AND_FETCH_4:
657 case BUILT_IN_SYNC_ADD_AND_FETCH_8:
658 case BUILT_IN_SYNC_ADD_AND_FETCH_16:
659
660 case BUILT_IN_SYNC_SUB_AND_FETCH_1:
661 case BUILT_IN_SYNC_SUB_AND_FETCH_2:
662 case BUILT_IN_SYNC_SUB_AND_FETCH_4:
663 case BUILT_IN_SYNC_SUB_AND_FETCH_8:
664 case BUILT_IN_SYNC_SUB_AND_FETCH_16:
665
666 case BUILT_IN_SYNC_OR_AND_FETCH_1:
667 case BUILT_IN_SYNC_OR_AND_FETCH_2:
668 case BUILT_IN_SYNC_OR_AND_FETCH_4:
669 case BUILT_IN_SYNC_OR_AND_FETCH_8:
670 case BUILT_IN_SYNC_OR_AND_FETCH_16:
671
672 case BUILT_IN_SYNC_AND_AND_FETCH_1:
673 case BUILT_IN_SYNC_AND_AND_FETCH_2:
674 case BUILT_IN_SYNC_AND_AND_FETCH_4:
675 case BUILT_IN_SYNC_AND_AND_FETCH_8:
676 case BUILT_IN_SYNC_AND_AND_FETCH_16:
677
678 case BUILT_IN_SYNC_XOR_AND_FETCH_1:
679 case BUILT_IN_SYNC_XOR_AND_FETCH_2:
680 case BUILT_IN_SYNC_XOR_AND_FETCH_4:
681 case BUILT_IN_SYNC_XOR_AND_FETCH_8:
682 case BUILT_IN_SYNC_XOR_AND_FETCH_16:
683
684 case BUILT_IN_SYNC_NAND_AND_FETCH_1:
685 case BUILT_IN_SYNC_NAND_AND_FETCH_2:
686 case BUILT_IN_SYNC_NAND_AND_FETCH_4:
687 case BUILT_IN_SYNC_NAND_AND_FETCH_8:
688
689 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1:
690 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_2:
691 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_4:
692 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_8:
693 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_16:
694
695 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1:
696 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_2:
697 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_4:
698 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_8:
699 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_16:
700
701 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_1:
702 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_2:
703 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_4:
704 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_8:
705 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_16:
706
707 case BUILT_IN_SYNC_LOCK_RELEASE_1:
708 case BUILT_IN_SYNC_LOCK_RELEASE_2:
709 case BUILT_IN_SYNC_LOCK_RELEASE_4:
710 case BUILT_IN_SYNC_LOCK_RELEASE_8:
711 case BUILT_IN_SYNC_LOCK_RELEASE_16:
712
713 case BUILT_IN_ATOMIC_EXCHANGE_1:
714 case BUILT_IN_ATOMIC_EXCHANGE_2:
715 case BUILT_IN_ATOMIC_EXCHANGE_4:
716 case BUILT_IN_ATOMIC_EXCHANGE_8:
717 case BUILT_IN_ATOMIC_EXCHANGE_16:
718
719 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1:
720 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_2:
721 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_4:
722 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_8:
723 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_16:
724
725 case BUILT_IN_ATOMIC_STORE_1:
726 case BUILT_IN_ATOMIC_STORE_2:
727 case BUILT_IN_ATOMIC_STORE_4:
728 case BUILT_IN_ATOMIC_STORE_8:
729 case BUILT_IN_ATOMIC_STORE_16:
730
731 case BUILT_IN_ATOMIC_ADD_FETCH_1:
732 case BUILT_IN_ATOMIC_ADD_FETCH_2:
733 case BUILT_IN_ATOMIC_ADD_FETCH_4:
734 case BUILT_IN_ATOMIC_ADD_FETCH_8:
735 case BUILT_IN_ATOMIC_ADD_FETCH_16:
736
737 case BUILT_IN_ATOMIC_SUB_FETCH_1:
738 case BUILT_IN_ATOMIC_SUB_FETCH_2:
739 case BUILT_IN_ATOMIC_SUB_FETCH_4:
740 case BUILT_IN_ATOMIC_SUB_FETCH_8:
741 case BUILT_IN_ATOMIC_SUB_FETCH_16:
742
743 case BUILT_IN_ATOMIC_AND_FETCH_1:
744 case BUILT_IN_ATOMIC_AND_FETCH_2:
745 case BUILT_IN_ATOMIC_AND_FETCH_4:
746 case BUILT_IN_ATOMIC_AND_FETCH_8:
747 case BUILT_IN_ATOMIC_AND_FETCH_16:
748
749 case BUILT_IN_ATOMIC_NAND_FETCH_1:
750 case BUILT_IN_ATOMIC_NAND_FETCH_2:
751 case BUILT_IN_ATOMIC_NAND_FETCH_4:
752 case BUILT_IN_ATOMIC_NAND_FETCH_8:
753 case BUILT_IN_ATOMIC_NAND_FETCH_16:
754
755 case BUILT_IN_ATOMIC_XOR_FETCH_1:
756 case BUILT_IN_ATOMIC_XOR_FETCH_2:
757 case BUILT_IN_ATOMIC_XOR_FETCH_4:
758 case BUILT_IN_ATOMIC_XOR_FETCH_8:
759 case BUILT_IN_ATOMIC_XOR_FETCH_16:
760
761 case BUILT_IN_ATOMIC_OR_FETCH_1:
762 case BUILT_IN_ATOMIC_OR_FETCH_2:
763 case BUILT_IN_ATOMIC_OR_FETCH_4:
764 case BUILT_IN_ATOMIC_OR_FETCH_8:
765 case BUILT_IN_ATOMIC_OR_FETCH_16:
766
767 case BUILT_IN_ATOMIC_FETCH_ADD_1:
768 case BUILT_IN_ATOMIC_FETCH_ADD_2:
769 case BUILT_IN_ATOMIC_FETCH_ADD_4:
770 case BUILT_IN_ATOMIC_FETCH_ADD_8:
771 case BUILT_IN_ATOMIC_FETCH_ADD_16:
772
773 case BUILT_IN_ATOMIC_FETCH_SUB_1:
774 case BUILT_IN_ATOMIC_FETCH_SUB_2:
775 case BUILT_IN_ATOMIC_FETCH_SUB_4:
776 case BUILT_IN_ATOMIC_FETCH_SUB_8:
777 case BUILT_IN_ATOMIC_FETCH_SUB_16:
778
779 case BUILT_IN_ATOMIC_FETCH_AND_1:
780 case BUILT_IN_ATOMIC_FETCH_AND_2:
781 case BUILT_IN_ATOMIC_FETCH_AND_4:
782 case BUILT_IN_ATOMIC_FETCH_AND_8:
783 case BUILT_IN_ATOMIC_FETCH_AND_16:
784
785 case BUILT_IN_ATOMIC_FETCH_NAND_1:
786 case BUILT_IN_ATOMIC_FETCH_NAND_2:
787 case BUILT_IN_ATOMIC_FETCH_NAND_4:
788 case BUILT_IN_ATOMIC_FETCH_NAND_8:
789 case BUILT_IN_ATOMIC_FETCH_NAND_16:
790
791 case BUILT_IN_ATOMIC_FETCH_XOR_1:
792 case BUILT_IN_ATOMIC_FETCH_XOR_2:
793 case BUILT_IN_ATOMIC_FETCH_XOR_4:
794 case BUILT_IN_ATOMIC_FETCH_XOR_8:
795 case BUILT_IN_ATOMIC_FETCH_XOR_16:
796
797 case BUILT_IN_ATOMIC_FETCH_OR_1:
798 case BUILT_IN_ATOMIC_FETCH_OR_2:
799 case BUILT_IN_ATOMIC_FETCH_OR_4:
800 case BUILT_IN_ATOMIC_FETCH_OR_8:
801 case BUILT_IN_ATOMIC_FETCH_OR_16:
802 {
803 dest = gimple_call_arg (call, 0);
804 /* DEST represents the address of a memory location.
805 instrument_derefs wants the memory location, so lets
806 dereference the address DEST before handing it to
807 instrument_derefs. */
808 if (TREE_CODE (dest) == ADDR_EXPR)
809 dest = TREE_OPERAND (dest, 0);
77e83307 810 else if (TREE_CODE (dest) == SSA_NAME || TREE_CODE (dest) == INTEGER_CST)
bdcbe80c
DS
811 dest = build2 (MEM_REF, TREE_TYPE (TREE_TYPE (dest)),
812 dest, build_int_cst (TREE_TYPE (dest), 0));
813 else
814 gcc_unreachable ();
815
816 access_size = int_size_in_bytes (TREE_TYPE (dest));
817 }
818
819 default:
820 /* The other builtins memory access are not instrumented in this
821 function because they either don't have any length parameter,
822 or their length parameter is just a limit. */
823 break;
824 }
825
826 if (len != NULL_TREE)
827 {
828 if (source0 != NULL_TREE)
829 {
830 src0->start = source0;
831 src0->access_size = access_size;
832 *src0_len = len;
833 *src0_is_store = false;
834 }
835
836 if (source1 != NULL_TREE)
837 {
838 src1->start = source1;
839 src1->access_size = access_size;
840 *src1_len = len;
841 *src1_is_store = false;
842 }
843
844 if (dest != NULL_TREE)
845 {
846 dst->start = dest;
847 dst->access_size = access_size;
848 *dst_len = len;
849 *dst_is_store = true;
850 }
851
852 got_reference_p = true;
853 }
b41288b3
JJ
854 else if (dest)
855 {
856 dst->start = dest;
857 dst->access_size = access_size;
858 *dst_len = NULL_TREE;
859 *dst_is_store = is_store;
860 *dest_is_deref = true;
861 got_reference_p = true;
862 }
bdcbe80c 863
b41288b3 864 return got_reference_p;
bdcbe80c
DS
865}
866
867/* Return true iff a given gimple statement has been instrumented.
868 Note that the statement is "defined" by the memory references it
869 contains. */
870
871static bool
355fe088 872has_stmt_been_instrumented_p (gimple *stmt)
bdcbe80c
DS
873{
874 if (gimple_assign_single_p (stmt))
875 {
876 bool r_is_store;
877 asan_mem_ref r;
878 asan_mem_ref_init (&r, NULL, 1);
879
538dd0b7
DM
880 if (get_mem_ref_of_assignment (as_a <gassign *> (stmt), &r,
881 &r_is_store))
bdcbe80c
DS
882 return has_mem_ref_been_instrumented (&r);
883 }
884 else if (gimple_call_builtin_p (stmt, BUILT_IN_NORMAL))
885 {
886 asan_mem_ref src0, src1, dest;
887 asan_mem_ref_init (&src0, NULL, 1);
888 asan_mem_ref_init (&src1, NULL, 1);
889 asan_mem_ref_init (&dest, NULL, 1);
890
891 tree src0_len = NULL_TREE, src1_len = NULL_TREE, dest_len = NULL_TREE;
892 bool src0_is_store = false, src1_is_store = false,
bdea98ca 893 dest_is_store = false, dest_is_deref = false, intercepted_p = true;
538dd0b7 894 if (get_mem_refs_of_builtin_call (as_a <gcall *> (stmt),
bdcbe80c
DS
895 &src0, &src0_len, &src0_is_store,
896 &src1, &src1_len, &src1_is_store,
897 &dest, &dest_len, &dest_is_store,
bdea98ca 898 &dest_is_deref, &intercepted_p))
bdcbe80c
DS
899 {
900 if (src0.start != NULL_TREE
901 && !has_mem_ref_been_instrumented (&src0, src0_len))
902 return false;
903
904 if (src1.start != NULL_TREE
905 && !has_mem_ref_been_instrumented (&src1, src1_len))
906 return false;
907
908 if (dest.start != NULL_TREE
909 && !has_mem_ref_been_instrumented (&dest, dest_len))
910 return false;
911
912 return true;
913 }
914 }
7db337c2
ML
915 else if (is_gimple_call (stmt) && gimple_store_p (stmt))
916 {
917 asan_mem_ref r;
918 asan_mem_ref_init (&r, NULL, 1);
919
920 r.start = gimple_call_lhs (stmt);
921 r.access_size = int_size_in_bytes (TREE_TYPE (r.start));
922 return has_mem_ref_been_instrumented (&r);
923 }
924
bdcbe80c
DS
925 return false;
926}
927
928/* Insert a memory reference into the hash table. */
929
930static void
40f9f6bb 931update_mem_ref_hash_table (tree ref, HOST_WIDE_INT access_size)
bdcbe80c 932{
c203e8a7 933 hash_table<asan_mem_ref_hasher> *ht = get_mem_ref_hash_table ();
bdcbe80c
DS
934
935 asan_mem_ref r;
936 asan_mem_ref_init (&r, ref, access_size);
937
c203e8a7 938 asan_mem_ref **slot = ht->find_slot (&r, INSERT);
bdea98ca 939 if (*slot == NULL || (*slot)->access_size < access_size)
bdcbe80c
DS
940 *slot = asan_mem_ref_new (ref, access_size);
941}
942
94fce891
JJ
943/* Initialize shadow_ptr_types array. */
944
945static void
946asan_init_shadow_ptr_types (void)
947{
948 asan_shadow_set = new_alias_set ();
6dc4a604
ML
949 tree types[3] = { signed_char_type_node, short_integer_type_node,
950 integer_type_node };
951
952 for (unsigned i = 0; i < 3; i++)
953 {
954 shadow_ptr_types[i] = build_distinct_type_copy (types[i]);
955 TYPE_ALIAS_SET (shadow_ptr_types[i]) = asan_shadow_set;
956 shadow_ptr_types[i] = build_pointer_type (shadow_ptr_types[i]);
957 }
958
94fce891
JJ
959 initialize_sanitizer_builtins ();
960}
961
11a877b3 962/* Create ADDR_EXPR of STRING_CST with the PP pretty printer text. */
8240018b
JJ
963
964static tree
11a877b3 965asan_pp_string (pretty_printer *pp)
8240018b 966{
11a877b3 967 const char *buf = pp_formatted_text (pp);
8240018b
JJ
968 size_t len = strlen (buf);
969 tree ret = build_string (len + 1, buf);
970 TREE_TYPE (ret)
94fce891
JJ
971 = build_array_type (TREE_TYPE (shadow_ptr_types[0]),
972 build_index_type (size_int (len)));
8240018b
JJ
973 TREE_READONLY (ret) = 1;
974 TREE_STATIC (ret) = 1;
94fce891 975 return build1 (ADDR_EXPR, shadow_ptr_types[0], ret);
8240018b
JJ
976}
977
f3ddd692
JJ
978/* Return a CONST_INT representing 4 subsequent shadow memory bytes. */
979
980static rtx
981asan_shadow_cst (unsigned char shadow_bytes[4])
982{
983 int i;
984 unsigned HOST_WIDE_INT val = 0;
985 gcc_assert (WORDS_BIG_ENDIAN == BYTES_BIG_ENDIAN);
986 for (i = 0; i < 4; i++)
987 val |= (unsigned HOST_WIDE_INT) shadow_bytes[BYTES_BIG_ENDIAN ? 3 - i : i]
988 << (BITS_PER_UNIT * i);
dcad1dd3 989 return gen_int_mode (val, SImode);
f3ddd692
JJ
990}
991
aeb7e7c1
JJ
992/* Clear shadow memory at SHADOW_MEM, LEN bytes. Can't call a library call here
993 though. */
994
995static void
996asan_clear_shadow (rtx shadow_mem, HOST_WIDE_INT len)
997{
3a965f61
DM
998 rtx_insn *insn, *insns, *jump;
999 rtx_code_label *top_label;
1000 rtx end, addr, tmp;
aeb7e7c1
JJ
1001
1002 start_sequence ();
1003 clear_storage (shadow_mem, GEN_INT (len), BLOCK_OP_NORMAL);
1004 insns = get_insns ();
1005 end_sequence ();
1006 for (insn = insns; insn; insn = NEXT_INSN (insn))
1007 if (CALL_P (insn))
1008 break;
1009 if (insn == NULL_RTX)
1010 {
1011 emit_insn (insns);
1012 return;
1013 }
1014
1015 gcc_assert ((len & 3) == 0);
1016 top_label = gen_label_rtx ();
57d4d653 1017 addr = copy_to_mode_reg (Pmode, XEXP (shadow_mem, 0));
aeb7e7c1
JJ
1018 shadow_mem = adjust_automodify_address (shadow_mem, SImode, addr, 0);
1019 end = force_reg (Pmode, plus_constant (Pmode, addr, len));
1020 emit_label (top_label);
1021
1022 emit_move_insn (shadow_mem, const0_rtx);
2f1cd2eb 1023 tmp = expand_simple_binop (Pmode, PLUS, addr, gen_int_mode (4, Pmode), addr,
c62ccb9a 1024 true, OPTAB_LIB_WIDEN);
aeb7e7c1
JJ
1025 if (tmp != addr)
1026 emit_move_insn (addr, tmp);
1027 emit_cmp_and_jump_insns (addr, end, LT, NULL_RTX, Pmode, true, top_label);
1028 jump = get_last_insn ();
1029 gcc_assert (JUMP_P (jump));
e5af9ddd 1030 add_int_reg_note (jump, REG_BR_PROB, REG_BR_PROB_BASE * 80 / 100);
aeb7e7c1
JJ
1031}
1032
ef1b3fda
KS
1033void
1034asan_function_start (void)
1035{
1036 section *fnsec = function_section (current_function_decl);
1037 switch_to_section (fnsec);
1038 ASM_OUTPUT_DEBUG_LABEL (asm_out_file, "LASANPC",
c62ccb9a 1039 current_function_funcdef_no);
ef1b3fda
KS
1040}
1041
6dc4a604
ML
1042/* Return number of shadow bytes that are occupied by a local variable
1043 of SIZE bytes. */
1044
1045static unsigned HOST_WIDE_INT
1046shadow_mem_size (unsigned HOST_WIDE_INT size)
1047{
1048 return ROUND_UP (size, ASAN_SHADOW_GRANULARITY) / ASAN_SHADOW_GRANULARITY;
1049}
1050
f3ddd692
JJ
1051/* Insert code to protect stack vars. The prologue sequence should be emitted
1052 directly, epilogue sequence returned. BASE is the register holding the
1053 stack base, against which OFFSETS array offsets are relative to, OFFSETS
1054 array contains pairs of offsets in reverse order, always the end offset
1055 of some gap that needs protection followed by starting offset,
1056 and DECLS is an array of representative decls for each var partition.
1057 LENGTH is the length of the OFFSETS array, DECLS array is LENGTH / 2 - 1
1058 elements long (OFFSETS include gap before the first variable as well
e361382f
JJ
1059 as gaps after each stack variable). PBASE is, if non-NULL, some pseudo
1060 register which stack vars DECL_RTLs are based on. Either BASE should be
1061 assigned to PBASE, when not doing use after return protection, or
1062 corresponding address based on __asan_stack_malloc* return value. */
f3ddd692 1063
3a4abd2f 1064rtx_insn *
e361382f
JJ
1065asan_emit_stack_protection (rtx base, rtx pbase, unsigned int alignb,
1066 HOST_WIDE_INT *offsets, tree *decls, int length)
f3ddd692 1067{
19f8b229
TS
1068 rtx shadow_base, shadow_mem, ret, mem, orig_base;
1069 rtx_code_label *lab;
3a4abd2f 1070 rtx_insn *insns;
ef1b3fda 1071 char buf[30];
f3ddd692 1072 unsigned char shadow_bytes[4];
e361382f
JJ
1073 HOST_WIDE_INT base_offset = offsets[length - 1];
1074 HOST_WIDE_INT base_align_bias = 0, offset, prev_offset;
1075 HOST_WIDE_INT asan_frame_size = offsets[0] - base_offset;
6dc4a604 1076 HOST_WIDE_INT last_offset;
f3ddd692
JJ
1077 int l;
1078 unsigned char cur_shadow_byte = ASAN_STACK_MAGIC_LEFT;
ef1b3fda 1079 tree str_cst, decl, id;
e361382f 1080 int use_after_return_class = -1;
f3ddd692 1081
94fce891
JJ
1082 if (shadow_ptr_types[0] == NULL_TREE)
1083 asan_init_shadow_ptr_types ();
1084
f3ddd692 1085 /* First of all, prepare the description string. */
11a877b3 1086 pretty_printer asan_pp;
da6ca2b5 1087
8240018b
JJ
1088 pp_decimal_int (&asan_pp, length / 2 - 1);
1089 pp_space (&asan_pp);
f3ddd692
JJ
1090 for (l = length - 2; l; l -= 2)
1091 {
1092 tree decl = decls[l / 2 - 1];
8240018b
JJ
1093 pp_wide_integer (&asan_pp, offsets[l] - base_offset);
1094 pp_space (&asan_pp);
1095 pp_wide_integer (&asan_pp, offsets[l - 1] - offsets[l]);
1096 pp_space (&asan_pp);
f3ddd692
JJ
1097 if (DECL_P (decl) && DECL_NAME (decl))
1098 {
8240018b
JJ
1099 pp_decimal_int (&asan_pp, IDENTIFIER_LENGTH (DECL_NAME (decl)));
1100 pp_space (&asan_pp);
b066401f 1101 pp_tree_identifier (&asan_pp, DECL_NAME (decl));
f3ddd692
JJ
1102 }
1103 else
8240018b
JJ
1104 pp_string (&asan_pp, "9 <unknown>");
1105 pp_space (&asan_pp);
f3ddd692 1106 }
11a877b3 1107 str_cst = asan_pp_string (&asan_pp);
f3ddd692
JJ
1108
1109 /* Emit the prologue sequence. */
b5ebc991
MO
1110 if (asan_frame_size > 32 && asan_frame_size <= 65536 && pbase
1111 && ASAN_USE_AFTER_RETURN)
e361382f
JJ
1112 {
1113 use_after_return_class = floor_log2 (asan_frame_size - 1) - 5;
1114 /* __asan_stack_malloc_N guarantees alignment
c62ccb9a 1115 N < 6 ? (64 << N) : 4096 bytes. */
e361382f
JJ
1116 if (alignb > (use_after_return_class < 6
1117 ? (64U << use_after_return_class) : 4096U))
1118 use_after_return_class = -1;
1119 else if (alignb > ASAN_RED_ZONE_SIZE && (asan_frame_size & (alignb - 1)))
1120 base_align_bias = ((asan_frame_size + alignb - 1)
1121 & ~(alignb - HOST_WIDE_INT_1)) - asan_frame_size;
1122 }
e5dcd695
LZ
1123 /* Align base if target is STRICT_ALIGNMENT. */
1124 if (STRICT_ALIGNMENT)
1125 base = expand_binop (Pmode, and_optab, base,
1126 gen_int_mode (-((GET_MODE_ALIGNMENT (SImode)
1127 << ASAN_SHADOW_SHIFT)
1128 / BITS_PER_UNIT), Pmode), NULL_RTX,
1129 1, OPTAB_DIRECT);
1130
e361382f
JJ
1131 if (use_after_return_class == -1 && pbase)
1132 emit_move_insn (pbase, base);
e5dcd695 1133
2f1cd2eb 1134 base = expand_binop (Pmode, add_optab, base,
e361382f 1135 gen_int_mode (base_offset - base_align_bias, Pmode),
f3ddd692 1136 NULL_RTX, 1, OPTAB_DIRECT);
e361382f
JJ
1137 orig_base = NULL_RTX;
1138 if (use_after_return_class != -1)
1139 {
1140 if (asan_detect_stack_use_after_return == NULL_TREE)
1141 {
1142 id = get_identifier ("__asan_option_detect_stack_use_after_return");
1143 decl = build_decl (BUILTINS_LOCATION, VAR_DECL, id,
1144 integer_type_node);
1145 SET_DECL_ASSEMBLER_NAME (decl, id);
1146 TREE_ADDRESSABLE (decl) = 1;
1147 DECL_ARTIFICIAL (decl) = 1;
1148 DECL_IGNORED_P (decl) = 1;
1149 DECL_EXTERNAL (decl) = 1;
1150 TREE_STATIC (decl) = 1;
1151 TREE_PUBLIC (decl) = 1;
1152 TREE_USED (decl) = 1;
1153 asan_detect_stack_use_after_return = decl;
1154 }
1155 orig_base = gen_reg_rtx (Pmode);
1156 emit_move_insn (orig_base, base);
1157 ret = expand_normal (asan_detect_stack_use_after_return);
1158 lab = gen_label_rtx ();
1159 int very_likely = REG_BR_PROB_BASE - (REG_BR_PROB_BASE / 2000 - 1);
1160 emit_cmp_and_jump_insns (ret, const0_rtx, EQ, NULL_RTX,
1161 VOIDmode, 0, lab, very_likely);
1162 snprintf (buf, sizeof buf, "__asan_stack_malloc_%d",
1163 use_after_return_class);
1164 ret = init_one_libfunc (buf);
89e302b8 1165 ret = emit_library_call_value (ret, NULL_RTX, LCT_NORMAL, ptr_mode, 1,
e361382f
JJ
1166 GEN_INT (asan_frame_size
1167 + base_align_bias),
89e302b8
MO
1168 TYPE_MODE (pointer_sized_int_node));
1169 /* __asan_stack_malloc_[n] returns a pointer to fake stack if succeeded
1170 and NULL otherwise. Check RET value is NULL here and jump over the
1171 BASE reassignment in this case. Otherwise, reassign BASE to RET. */
1172 int very_unlikely = REG_BR_PROB_BASE / 2000 - 1;
1173 emit_cmp_and_jump_insns (ret, const0_rtx, EQ, NULL_RTX,
1174 VOIDmode, 0, lab, very_unlikely);
e361382f
JJ
1175 ret = convert_memory_address (Pmode, ret);
1176 emit_move_insn (base, ret);
1177 emit_label (lab);
1178 emit_move_insn (pbase, expand_binop (Pmode, add_optab, base,
1179 gen_int_mode (base_align_bias
1180 - base_offset, Pmode),
1181 NULL_RTX, 1, OPTAB_DIRECT));
1182 }
f3ddd692 1183 mem = gen_rtx_MEM (ptr_mode, base);
e361382f 1184 mem = adjust_address (mem, VOIDmode, base_align_bias);
69db2d57 1185 emit_move_insn (mem, gen_int_mode (ASAN_STACK_FRAME_MAGIC, ptr_mode));
f3ddd692
JJ
1186 mem = adjust_address (mem, VOIDmode, GET_MODE_SIZE (ptr_mode));
1187 emit_move_insn (mem, expand_normal (str_cst));
ef1b3fda
KS
1188 mem = adjust_address (mem, VOIDmode, GET_MODE_SIZE (ptr_mode));
1189 ASM_GENERATE_INTERNAL_LABEL (buf, "LASANPC", current_function_funcdef_no);
1190 id = get_identifier (buf);
1191 decl = build_decl (DECL_SOURCE_LOCATION (current_function_decl),
c62ccb9a 1192 VAR_DECL, id, char_type_node);
ef1b3fda
KS
1193 SET_DECL_ASSEMBLER_NAME (decl, id);
1194 TREE_ADDRESSABLE (decl) = 1;
1195 TREE_READONLY (decl) = 1;
1196 DECL_ARTIFICIAL (decl) = 1;
1197 DECL_IGNORED_P (decl) = 1;
1198 TREE_STATIC (decl) = 1;
1199 TREE_PUBLIC (decl) = 0;
1200 TREE_USED (decl) = 1;
8c8b21e4
JJ
1201 DECL_INITIAL (decl) = decl;
1202 TREE_ASM_WRITTEN (decl) = 1;
1203 TREE_ASM_WRITTEN (id) = 1;
ef1b3fda 1204 emit_move_insn (mem, expand_normal (build_fold_addr_expr (decl)));
f3ddd692
JJ
1205 shadow_base = expand_binop (Pmode, lshr_optab, base,
1206 GEN_INT (ASAN_SHADOW_SHIFT),
1207 NULL_RTX, 1, OPTAB_DIRECT);
e361382f
JJ
1208 shadow_base
1209 = plus_constant (Pmode, shadow_base,
fd960af2 1210 asan_shadow_offset ()
e361382f 1211 + (base_align_bias >> ASAN_SHADOW_SHIFT));
f3ddd692
JJ
1212 gcc_assert (asan_shadow_set != -1
1213 && (ASAN_RED_ZONE_SIZE >> ASAN_SHADOW_SHIFT) == 4);
1214 shadow_mem = gen_rtx_MEM (SImode, shadow_base);
1215 set_mem_alias_set (shadow_mem, asan_shadow_set);
e5dcd695
LZ
1216 if (STRICT_ALIGNMENT)
1217 set_mem_align (shadow_mem, (GET_MODE_ALIGNMENT (SImode)));
f3ddd692
JJ
1218 prev_offset = base_offset;
1219 for (l = length; l; l -= 2)
1220 {
1221 if (l == 2)
1222 cur_shadow_byte = ASAN_STACK_MAGIC_RIGHT;
1223 offset = offsets[l - 1];
1224 if ((offset - base_offset) & (ASAN_RED_ZONE_SIZE - 1))
1225 {
1226 int i;
1227 HOST_WIDE_INT aoff
1228 = base_offset + ((offset - base_offset)
1229 & ~(ASAN_RED_ZONE_SIZE - HOST_WIDE_INT_1));
1230 shadow_mem = adjust_address (shadow_mem, VOIDmode,
1231 (aoff - prev_offset)
1232 >> ASAN_SHADOW_SHIFT);
1233 prev_offset = aoff;
6dc4a604 1234 for (i = 0; i < 4; i++, aoff += ASAN_SHADOW_GRANULARITY)
f3ddd692
JJ
1235 if (aoff < offset)
1236 {
6dc4a604 1237 if (aoff < offset - (HOST_WIDE_INT)ASAN_SHADOW_GRANULARITY + 1)
f3ddd692
JJ
1238 shadow_bytes[i] = 0;
1239 else
1240 shadow_bytes[i] = offset - aoff;
1241 }
1242 else
fbdb92eb 1243 shadow_bytes[i] = ASAN_STACK_MAGIC_MIDDLE;
f3ddd692
JJ
1244 emit_move_insn (shadow_mem, asan_shadow_cst (shadow_bytes));
1245 offset = aoff;
1246 }
1247 while (offset <= offsets[l - 2] - ASAN_RED_ZONE_SIZE)
1248 {
1249 shadow_mem = adjust_address (shadow_mem, VOIDmode,
1250 (offset - prev_offset)
1251 >> ASAN_SHADOW_SHIFT);
1252 prev_offset = offset;
1253 memset (shadow_bytes, cur_shadow_byte, 4);
1254 emit_move_insn (shadow_mem, asan_shadow_cst (shadow_bytes));
1255 offset += ASAN_RED_ZONE_SIZE;
1256 }
1257 cur_shadow_byte = ASAN_STACK_MAGIC_MIDDLE;
1258 }
1259 do_pending_stack_adjust ();
1260
1261 /* Construct epilogue sequence. */
1262 start_sequence ();
1263
19f8b229 1264 lab = NULL;
e361382f
JJ
1265 if (use_after_return_class != -1)
1266 {
19f8b229 1267 rtx_code_label *lab2 = gen_label_rtx ();
e361382f
JJ
1268 char c = (char) ASAN_STACK_MAGIC_USE_AFTER_RET;
1269 int very_likely = REG_BR_PROB_BASE - (REG_BR_PROB_BASE / 2000 - 1);
1270 emit_cmp_and_jump_insns (orig_base, base, EQ, NULL_RTX,
1271 VOIDmode, 0, lab2, very_likely);
1272 shadow_mem = gen_rtx_MEM (BLKmode, shadow_base);
1273 set_mem_alias_set (shadow_mem, asan_shadow_set);
1274 mem = gen_rtx_MEM (ptr_mode, base);
1275 mem = adjust_address (mem, VOIDmode, base_align_bias);
1276 emit_move_insn (mem, gen_int_mode (ASAN_STACK_RETIRED_MAGIC, ptr_mode));
1277 unsigned HOST_WIDE_INT sz = asan_frame_size >> ASAN_SHADOW_SHIFT;
1278 if (use_after_return_class < 5
1279 && can_store_by_pieces (sz, builtin_memset_read_str, &c,
1280 BITS_PER_UNIT, true))
1281 store_by_pieces (shadow_mem, sz, builtin_memset_read_str, &c,
1282 BITS_PER_UNIT, true, 0);
1283 else if (use_after_return_class >= 5
1284 || !set_storage_via_setmem (shadow_mem,
1285 GEN_INT (sz),
1286 gen_int_mode (c, QImode),
1287 BITS_PER_UNIT, BITS_PER_UNIT,
1288 -1, sz, sz, sz))
1289 {
1290 snprintf (buf, sizeof buf, "__asan_stack_free_%d",
1291 use_after_return_class);
1292 ret = init_one_libfunc (buf);
1293 rtx addr = convert_memory_address (ptr_mode, base);
1294 rtx orig_addr = convert_memory_address (ptr_mode, orig_base);
1295 emit_library_call (ret, LCT_NORMAL, ptr_mode, 3, addr, ptr_mode,
1296 GEN_INT (asan_frame_size + base_align_bias),
1297 TYPE_MODE (pointer_sized_int_node),
1298 orig_addr, ptr_mode);
1299 }
1300 lab = gen_label_rtx ();
1301 emit_jump (lab);
1302 emit_label (lab2);
1303 }
1304
f3ddd692
JJ
1305 shadow_mem = gen_rtx_MEM (BLKmode, shadow_base);
1306 set_mem_alias_set (shadow_mem, asan_shadow_set);
e5dcd695
LZ
1307
1308 if (STRICT_ALIGNMENT)
1309 set_mem_align (shadow_mem, (GET_MODE_ALIGNMENT (SImode)));
1310
6dc4a604
ML
1311 /* Unpoison shadow memory of a stack at the very end of a function.
1312 As we're poisoning stack variables at the end of their scope,
1313 shadow memory must be properly unpoisoned here. The easiest approach
1314 would be to collect all variables that should not be unpoisoned and
1315 we unpoison shadow memory of the whole stack except ranges
1316 occupied by these variables. */
f3ddd692 1317 last_offset = base_offset;
6dc4a604
ML
1318 HOST_WIDE_INT current_offset = last_offset;
1319 if (length)
f3ddd692 1320 {
6dc4a604
ML
1321 HOST_WIDE_INT var_end_offset = 0;
1322 HOST_WIDE_INT stack_start = offsets[length - 1];
1323 gcc_assert (last_offset == stack_start);
1324
1325 for (int l = length - 2; l > 0; l -= 2)
f3ddd692 1326 {
6dc4a604
ML
1327 HOST_WIDE_INT var_offset = offsets[l];
1328 current_offset = var_offset;
1329 var_end_offset = offsets[l - 1];
1330 HOST_WIDE_INT rounded_size = ROUND_UP (var_end_offset - var_offset,
1331 BITS_PER_UNIT);
1332
1333 /* Should we unpoison the variable? */
1334 if (asan_handled_variables != NULL
1335 && asan_handled_variables->contains (decl))
1336 {
1337 if (dump_file && (dump_flags & TDF_DETAILS))
1338 {
1339 const char *n = (DECL_NAME (decl)
1340 ? IDENTIFIER_POINTER (DECL_NAME (decl))
1341 : "<unknown>");
1342 fprintf (dump_file, "Unpoisoning shadow stack for variable: "
1343 "%s (%" PRId64 "B)\n", n,
1344 var_end_offset - var_offset);
1345 }
1346
1347 unsigned HOST_WIDE_INT s
1348 = shadow_mem_size (current_offset - last_offset);
1349 asan_clear_shadow (shadow_mem, s);
1350 HOST_WIDE_INT shift
1351 = shadow_mem_size (current_offset - last_offset + rounded_size);
1352 shadow_mem = adjust_address (shadow_mem, VOIDmode, shift);
1353 last_offset = var_offset + rounded_size;
1354 current_offset = last_offset;
1355 }
1356
f3ddd692 1357 }
6dc4a604
ML
1358
1359 /* Handle last redzone. */
1360 current_offset = offsets[0];
1361 asan_clear_shadow (shadow_mem,
1362 shadow_mem_size (current_offset - last_offset));
f3ddd692
JJ
1363 }
1364
6dc4a604
ML
1365 /* Clean-up set with instrumented stack variables. */
1366 delete asan_handled_variables;
1367 asan_handled_variables = NULL;
1368 delete asan_used_labels;
1369 asan_used_labels = NULL;
1370
f3ddd692 1371 do_pending_stack_adjust ();
e361382f
JJ
1372 if (lab)
1373 emit_label (lab);
f3ddd692 1374
3a4abd2f 1375 insns = get_insns ();
f3ddd692 1376 end_sequence ();
3a4abd2f 1377 return insns;
f3ddd692
JJ
1378}
1379
8240018b
JJ
1380/* Return true if DECL, a global var, might be overridden and needs
1381 therefore a local alias. */
1382
1383static bool
1384asan_needs_local_alias (tree decl)
1385{
1386 return DECL_WEAK (decl) || !targetm.binds_local_p (decl);
1387}
1388
84b0769e
MO
1389/* Return true if DECL, a global var, is an artificial ODR indicator symbol
1390 therefore doesn't need protection. */
1391
1392static bool
1393is_odr_indicator (tree decl)
1394{
1395 return (DECL_ARTIFICIAL (decl)
1396 && lookup_attribute ("asan odr indicator", DECL_ATTRIBUTES (decl)));
1397}
1398
8240018b
JJ
1399/* Return true if DECL is a VAR_DECL that should be protected
1400 by Address Sanitizer, by appending a red zone with protected
1401 shadow memory after it and aligning it to at least
1402 ASAN_RED_ZONE_SIZE bytes. */
1403
1404bool
1405asan_protect_global (tree decl)
1406{
b5ebc991
MO
1407 if (!ASAN_GLOBALS)
1408 return false;
1409
8240018b 1410 rtx rtl, symbol;
8240018b 1411
94fce891
JJ
1412 if (TREE_CODE (decl) == STRING_CST)
1413 {
1414 /* Instrument all STRING_CSTs except those created
1415 by asan_pp_string here. */
1416 if (shadow_ptr_types[0] != NULL_TREE
1417 && TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE
1418 && TREE_TYPE (TREE_TYPE (decl)) == TREE_TYPE (shadow_ptr_types[0]))
1419 return false;
1420 return true;
1421 }
8813a647 1422 if (!VAR_P (decl)
8240018b
JJ
1423 /* TLS vars aren't statically protectable. */
1424 || DECL_THREAD_LOCAL_P (decl)
1425 /* Externs will be protected elsewhere. */
1426 || DECL_EXTERNAL (decl)
8240018b
JJ
1427 || !DECL_RTL_SET_P (decl)
1428 /* Comdat vars pose an ABI problem, we can't know if
1429 the var that is selected by the linker will have
1430 padding or not. */
1431 || DECL_ONE_ONLY (decl)
f1d15bb9
DV
1432 /* Similarly for common vars. People can use -fno-common.
1433 Note: Linux kernel is built with -fno-common, so we do instrument
1434 globals there even if it is C. */
a8a6fd74 1435 || (DECL_COMMON (decl) && TREE_PUBLIC (decl))
8240018b
JJ
1436 /* Don't protect if using user section, often vars placed
1437 into user section from multiple TUs are then assumed
1438 to be an array of such vars, putting padding in there
1439 breaks this assumption. */
f961457f 1440 || (DECL_SECTION_NAME (decl) != NULL
18af8d16
YG
1441 && !symtab_node::get (decl)->implicit_section
1442 && !section_sanitized_p (DECL_SECTION_NAME (decl)))
8240018b
JJ
1443 || DECL_SIZE (decl) == 0
1444 || ASAN_RED_ZONE_SIZE * BITS_PER_UNIT > MAX_OFILE_ALIGNMENT
1445 || !valid_constant_size_p (DECL_SIZE_UNIT (decl))
21a82048 1446 || DECL_ALIGN_UNIT (decl) > 2 * ASAN_RED_ZONE_SIZE
84b0769e
MO
1447 || TREE_TYPE (decl) == ubsan_get_source_location_type ()
1448 || is_odr_indicator (decl))
8240018b
JJ
1449 return false;
1450
1451 rtl = DECL_RTL (decl);
1452 if (!MEM_P (rtl) || GET_CODE (XEXP (rtl, 0)) != SYMBOL_REF)
1453 return false;
1454 symbol = XEXP (rtl, 0);
1455
1456 if (CONSTANT_POOL_ADDRESS_P (symbol)
1457 || TREE_CONSTANT_POOL_ADDRESS_P (symbol))
1458 return false;
1459
8240018b
JJ
1460 if (lookup_attribute ("weakref", DECL_ATTRIBUTES (decl)))
1461 return false;
1462
1463#ifndef ASM_OUTPUT_DEF
1464 if (asan_needs_local_alias (decl))
1465 return false;
1466#endif
1467
497a1c66 1468 return true;
8240018b
JJ
1469}
1470
40f9f6bb
JJ
1471/* Construct a function tree for __asan_report_{load,store}{1,2,4,8,16,_n}.
1472 IS_STORE is either 1 (for a store) or 0 (for a load). */
37d6f666
WM
1473
1474static tree
fed4de37
YG
1475report_error_func (bool is_store, bool recover_p, HOST_WIDE_INT size_in_bytes,
1476 int *nargs)
37d6f666 1477{
fed4de37
YG
1478 static enum built_in_function report[2][2][6]
1479 = { { { BUILT_IN_ASAN_REPORT_LOAD1, BUILT_IN_ASAN_REPORT_LOAD2,
1480 BUILT_IN_ASAN_REPORT_LOAD4, BUILT_IN_ASAN_REPORT_LOAD8,
1481 BUILT_IN_ASAN_REPORT_LOAD16, BUILT_IN_ASAN_REPORT_LOAD_N },
1482 { BUILT_IN_ASAN_REPORT_STORE1, BUILT_IN_ASAN_REPORT_STORE2,
1483 BUILT_IN_ASAN_REPORT_STORE4, BUILT_IN_ASAN_REPORT_STORE8,
1484 BUILT_IN_ASAN_REPORT_STORE16, BUILT_IN_ASAN_REPORT_STORE_N } },
1485 { { BUILT_IN_ASAN_REPORT_LOAD1_NOABORT,
1486 BUILT_IN_ASAN_REPORT_LOAD2_NOABORT,
1487 BUILT_IN_ASAN_REPORT_LOAD4_NOABORT,
1488 BUILT_IN_ASAN_REPORT_LOAD8_NOABORT,
1489 BUILT_IN_ASAN_REPORT_LOAD16_NOABORT,
1490 BUILT_IN_ASAN_REPORT_LOAD_N_NOABORT },
1491 { BUILT_IN_ASAN_REPORT_STORE1_NOABORT,
1492 BUILT_IN_ASAN_REPORT_STORE2_NOABORT,
1493 BUILT_IN_ASAN_REPORT_STORE4_NOABORT,
1494 BUILT_IN_ASAN_REPORT_STORE8_NOABORT,
1495 BUILT_IN_ASAN_REPORT_STORE16_NOABORT,
1496 BUILT_IN_ASAN_REPORT_STORE_N_NOABORT } } };
8946c29e
YG
1497 if (size_in_bytes == -1)
1498 {
1499 *nargs = 2;
fed4de37 1500 return builtin_decl_implicit (report[recover_p][is_store][5]);
8946c29e
YG
1501 }
1502 *nargs = 1;
fed4de37
YG
1503 int size_log2 = exact_log2 (size_in_bytes);
1504 return builtin_decl_implicit (report[recover_p][is_store][size_log2]);
37d6f666
WM
1505}
1506
8946c29e
YG
1507/* Construct a function tree for __asan_{load,store}{1,2,4,8,16,_n}.
1508 IS_STORE is either 1 (for a store) or 0 (for a load). */
1509
1510static tree
fed4de37
YG
1511check_func (bool is_store, bool recover_p, HOST_WIDE_INT size_in_bytes,
1512 int *nargs)
8946c29e 1513{
fed4de37
YG
1514 static enum built_in_function check[2][2][6]
1515 = { { { BUILT_IN_ASAN_LOAD1, BUILT_IN_ASAN_LOAD2,
1516 BUILT_IN_ASAN_LOAD4, BUILT_IN_ASAN_LOAD8,
1517 BUILT_IN_ASAN_LOAD16, BUILT_IN_ASAN_LOADN },
1518 { BUILT_IN_ASAN_STORE1, BUILT_IN_ASAN_STORE2,
1519 BUILT_IN_ASAN_STORE4, BUILT_IN_ASAN_STORE8,
1520 BUILT_IN_ASAN_STORE16, BUILT_IN_ASAN_STOREN } },
1521 { { BUILT_IN_ASAN_LOAD1_NOABORT,
1522 BUILT_IN_ASAN_LOAD2_NOABORT,
1523 BUILT_IN_ASAN_LOAD4_NOABORT,
1524 BUILT_IN_ASAN_LOAD8_NOABORT,
1525 BUILT_IN_ASAN_LOAD16_NOABORT,
1526 BUILT_IN_ASAN_LOADN_NOABORT },
1527 { BUILT_IN_ASAN_STORE1_NOABORT,
1528 BUILT_IN_ASAN_STORE2_NOABORT,
1529 BUILT_IN_ASAN_STORE4_NOABORT,
1530 BUILT_IN_ASAN_STORE8_NOABORT,
1531 BUILT_IN_ASAN_STORE16_NOABORT,
1532 BUILT_IN_ASAN_STOREN_NOABORT } } };
8946c29e
YG
1533 if (size_in_bytes == -1)
1534 {
1535 *nargs = 2;
fed4de37 1536 return builtin_decl_implicit (check[recover_p][is_store][5]);
8946c29e
YG
1537 }
1538 *nargs = 1;
fed4de37
YG
1539 int size_log2 = exact_log2 (size_in_bytes);
1540 return builtin_decl_implicit (check[recover_p][is_store][size_log2]);
8946c29e
YG
1541}
1542
01452015 1543/* Split the current basic block and create a condition statement
25ae5027
DS
1544 insertion point right before or after the statement pointed to by
1545 ITER. Return an iterator to the point at which the caller might
1546 safely insert the condition statement.
01452015
DS
1547
1548 THEN_BLOCK must be set to the address of an uninitialized instance
1549 of basic_block. The function will then set *THEN_BLOCK to the
1550 'then block' of the condition statement to be inserted by the
1551 caller.
1552
c4bfe8bf
JJ
1553 If CREATE_THEN_FALLTHRU_EDGE is false, no edge will be created from
1554 *THEN_BLOCK to *FALLTHROUGH_BLOCK.
1555
01452015
DS
1556 Similarly, the function will set *FALLTRHOUGH_BLOCK to the 'else
1557 block' of the condition statement to be inserted by the caller.
1558
1559 Note that *FALLTHROUGH_BLOCK is a new block that contains the
1560 statements starting from *ITER, and *THEN_BLOCK is a new empty
1561 block.
1562
25ae5027
DS
1563 *ITER is adjusted to point to always point to the first statement
1564 of the basic block * FALLTHROUGH_BLOCK. That statement is the
1565 same as what ITER was pointing to prior to calling this function,
1566 if BEFORE_P is true; otherwise, it is its following statement. */
01452015 1567
ac0ff9f2 1568gimple_stmt_iterator
25ae5027
DS
1569create_cond_insert_point (gimple_stmt_iterator *iter,
1570 bool before_p,
1571 bool then_more_likely_p,
c4bfe8bf 1572 bool create_then_fallthru_edge,
25ae5027
DS
1573 basic_block *then_block,
1574 basic_block *fallthrough_block)
01452015
DS
1575{
1576 gimple_stmt_iterator gsi = *iter;
1577
25ae5027 1578 if (!gsi_end_p (gsi) && before_p)
01452015
DS
1579 gsi_prev (&gsi);
1580
1581 basic_block cur_bb = gsi_bb (*iter);
1582
1583 edge e = split_block (cur_bb, gsi_stmt (gsi));
1584
1585 /* Get a hold on the 'condition block', the 'then block' and the
1586 'else block'. */
1587 basic_block cond_bb = e->src;
1588 basic_block fallthru_bb = e->dest;
1589 basic_block then_bb = create_empty_bb (cond_bb);
a9e0d843
RB
1590 if (current_loops)
1591 {
1592 add_bb_to_loop (then_bb, cond_bb->loop_father);
1593 loops_state_set (LOOPS_NEED_FIXUP);
1594 }
01452015
DS
1595
1596 /* Set up the newly created 'then block'. */
1597 e = make_edge (cond_bb, then_bb, EDGE_TRUE_VALUE);
1598 int fallthrough_probability
1599 = then_more_likely_p
1600 ? PROB_VERY_UNLIKELY
1601 : PROB_ALWAYS - PROB_VERY_UNLIKELY;
1602 e->probability = PROB_ALWAYS - fallthrough_probability;
c4bfe8bf
JJ
1603 if (create_then_fallthru_edge)
1604 make_single_succ_edge (then_bb, fallthru_bb, EDGE_FALLTHRU);
01452015
DS
1605
1606 /* Set up the fallthrough basic block. */
1607 e = find_edge (cond_bb, fallthru_bb);
1608 e->flags = EDGE_FALSE_VALUE;
1609 e->count = cond_bb->count;
1610 e->probability = fallthrough_probability;
1611
1612 /* Update dominance info for the newly created then_bb; note that
1613 fallthru_bb's dominance info has already been updated by
1614 split_bock. */
1615 if (dom_info_available_p (CDI_DOMINATORS))
1616 set_immediate_dominator (CDI_DOMINATORS, then_bb, cond_bb);
1617
1618 *then_block = then_bb;
1619 *fallthrough_block = fallthru_bb;
1620 *iter = gsi_start_bb (fallthru_bb);
1621
1622 return gsi_last_bb (cond_bb);
1623}
1624
25ae5027
DS
1625/* Insert an if condition followed by a 'then block' right before the
1626 statement pointed to by ITER. The fallthrough block -- which is the
1627 else block of the condition as well as the destination of the
1628 outcoming edge of the 'then block' -- starts with the statement
1629 pointed to by ITER.
1630
497a1c66 1631 COND is the condition of the if.
25ae5027
DS
1632
1633 If THEN_MORE_LIKELY_P is true, the probability of the edge to the
1634 'then block' is higher than the probability of the edge to the
1635 fallthrough block.
1636
1637 Upon completion of the function, *THEN_BB is set to the newly
1638 inserted 'then block' and similarly, *FALLTHROUGH_BB is set to the
1639 fallthrough block.
1640
1641 *ITER is adjusted to still point to the same statement it was
1642 pointing to initially. */
1643
1644static void
538dd0b7 1645insert_if_then_before_iter (gcond *cond,
25ae5027
DS
1646 gimple_stmt_iterator *iter,
1647 bool then_more_likely_p,
1648 basic_block *then_bb,
1649 basic_block *fallthrough_bb)
1650{
1651 gimple_stmt_iterator cond_insert_point =
1652 create_cond_insert_point (iter,
1653 /*before_p=*/true,
1654 then_more_likely_p,
c4bfe8bf 1655 /*create_then_fallthru_edge=*/true,
25ae5027
DS
1656 then_bb,
1657 fallthrough_bb);
1658 gsi_insert_after (&cond_insert_point, cond, GSI_NEW_STMT);
1659}
1660
6dc4a604
ML
1661/* Build (base_addr >> ASAN_SHADOW_SHIFT) + asan_shadow_offset ().
1662 If RETURN_ADDRESS is set to true, return memory location instread
1663 of a value in the shadow memory. */
40f9f6bb
JJ
1664
1665static tree
1666build_shadow_mem_access (gimple_stmt_iterator *gsi, location_t location,
6dc4a604
ML
1667 tree base_addr, tree shadow_ptr_type,
1668 bool return_address = false)
40f9f6bb
JJ
1669{
1670 tree t, uintptr_type = TREE_TYPE (base_addr);
1671 tree shadow_type = TREE_TYPE (shadow_ptr_type);
355fe088 1672 gimple *g;
40f9f6bb
JJ
1673
1674 t = build_int_cst (uintptr_type, ASAN_SHADOW_SHIFT);
0d0e4a03
JJ
1675 g = gimple_build_assign (make_ssa_name (uintptr_type), RSHIFT_EXPR,
1676 base_addr, t);
40f9f6bb
JJ
1677 gimple_set_location (g, location);
1678 gsi_insert_after (gsi, g, GSI_NEW_STMT);
1679
fd960af2 1680 t = build_int_cst (uintptr_type, asan_shadow_offset ());
0d0e4a03
JJ
1681 g = gimple_build_assign (make_ssa_name (uintptr_type), PLUS_EXPR,
1682 gimple_assign_lhs (g), t);
40f9f6bb
JJ
1683 gimple_set_location (g, location);
1684 gsi_insert_after (gsi, g, GSI_NEW_STMT);
1685
0d0e4a03
JJ
1686 g = gimple_build_assign (make_ssa_name (shadow_ptr_type), NOP_EXPR,
1687 gimple_assign_lhs (g));
40f9f6bb
JJ
1688 gimple_set_location (g, location);
1689 gsi_insert_after (gsi, g, GSI_NEW_STMT);
1690
6dc4a604
ML
1691 if (!return_address)
1692 {
1693 t = build2 (MEM_REF, shadow_type, gimple_assign_lhs (g),
1694 build_int_cst (shadow_ptr_type, 0));
1695 g = gimple_build_assign (make_ssa_name (shadow_type), MEM_REF, t);
1696 gimple_set_location (g, location);
1697 gsi_insert_after (gsi, g, GSI_NEW_STMT);
1698 }
1699
40f9f6bb
JJ
1700 return gimple_assign_lhs (g);
1701}
1702
8946c29e
YG
1703/* BASE can already be an SSA_NAME; in that case, do not create a
1704 new SSA_NAME for it. */
1705
1706static tree
1707maybe_create_ssa_name (location_t loc, tree base, gimple_stmt_iterator *iter,
1708 bool before_p)
1709{
1710 if (TREE_CODE (base) == SSA_NAME)
1711 return base;
355fe088 1712 gimple *g = gimple_build_assign (make_ssa_name (TREE_TYPE (base)),
0d0e4a03 1713 TREE_CODE (base), base);
8946c29e
YG
1714 gimple_set_location (g, loc);
1715 if (before_p)
1716 gsi_insert_before (iter, g, GSI_SAME_STMT);
1717 else
1718 gsi_insert_after (iter, g, GSI_NEW_STMT);
1719 return gimple_assign_lhs (g);
1720}
1721
a2f581e1
YG
1722/* LEN can already have necessary size and precision;
1723 in that case, do not create a new variable. */
1724
1725tree
1726maybe_cast_to_ptrmode (location_t loc, tree len, gimple_stmt_iterator *iter,
1727 bool before_p)
1728{
1729 if (ptrofftype_p (len))
1730 return len;
355fe088 1731 gimple *g = gimple_build_assign (make_ssa_name (pointer_sized_int_node),
0d0e4a03 1732 NOP_EXPR, len);
a2f581e1
YG
1733 gimple_set_location (g, loc);
1734 if (before_p)
1735 gsi_insert_before (iter, g, GSI_SAME_STMT);
1736 else
1737 gsi_insert_after (iter, g, GSI_NEW_STMT);
1738 return gimple_assign_lhs (g);
1739}
1740
dc29bf1e 1741/* Instrument the memory access instruction BASE. Insert new
25ae5027 1742 statements before or after ITER.
dc29bf1e
DS
1743
1744 Note that the memory access represented by BASE can be either an
1745 SSA_NAME, or a non-SSA expression. LOCATION is the source code
1746 location. IS_STORE is TRUE for a store, FALSE for a load.
25ae5027 1747 BEFORE_P is TRUE for inserting the instrumentation code before
8946c29e
YG
1748 ITER, FALSE for inserting it after ITER. IS_SCALAR_ACCESS is TRUE
1749 for a scalar memory access and FALSE for memory region access.
1750 NON_ZERO_P is TRUE if memory region is guaranteed to have non-zero
1751 length. ALIGN tells alignment of accessed memory object.
1752
1753 START_INSTRUMENTED and END_INSTRUMENTED are TRUE if start/end of
1754 memory region have already been instrumented.
25ae5027
DS
1755
1756 If BEFORE_P is TRUE, *ITER is arranged to still point to the
1757 statement it was pointing to prior to calling this function,
1758 otherwise, it points to the statement logically following it. */
37d6f666
WM
1759
1760static void
c62ccb9a 1761build_check_stmt (location_t loc, tree base, tree len,
8946c29e 1762 HOST_WIDE_INT size_in_bytes, gimple_stmt_iterator *iter,
c62ccb9a 1763 bool is_non_zero_len, bool before_p, bool is_store,
bdea98ca 1764 bool is_scalar_access, unsigned int align = 0)
37d6f666 1765{
8946c29e 1766 gimple_stmt_iterator gsi = *iter;
355fe088 1767 gimple *g;
8946c29e 1768
c62ccb9a 1769 gcc_assert (!(size_in_bytes > 0 && !is_non_zero_len));
8946c29e 1770
c62ccb9a
YG
1771 gsi = *iter;
1772
1773 base = unshare_expr (base);
1774 base = maybe_create_ssa_name (loc, base, &gsi, before_p);
1775
8946c29e 1776 if (len)
a2f581e1
YG
1777 {
1778 len = unshare_expr (len);
1779 len = maybe_cast_to_ptrmode (loc, len, iter, before_p);
1780 }
8946c29e
YG
1781 else
1782 {
1783 gcc_assert (size_in_bytes != -1);
1784 len = build_int_cst (pointer_sized_int_node, size_in_bytes);
1785 }
1786
1787 if (size_in_bytes > 1)
b3f1051b 1788 {
8946c29e
YG
1789 if ((size_in_bytes & (size_in_bytes - 1)) != 0
1790 || size_in_bytes > 16)
c62ccb9a 1791 is_scalar_access = false;
8946c29e
YG
1792 else if (align && align < size_in_bytes * BITS_PER_UNIT)
1793 {
1794 /* On non-strict alignment targets, if
1795 16-byte access is just 8-byte aligned,
1796 this will result in misaligned shadow
1797 memory 2 byte load, but otherwise can
1798 be handled using one read. */
1799 if (size_in_bytes != 16
1800 || STRICT_ALIGNMENT
1801 || align < 8 * BITS_PER_UNIT)
c62ccb9a 1802 is_scalar_access = false;
40f9f6bb 1803 }
f6d98484 1804 }
37d6f666 1805
c62ccb9a
YG
1806 HOST_WIDE_INT flags = 0;
1807 if (is_store)
1808 flags |= ASAN_CHECK_STORE;
1809 if (is_non_zero_len)
1810 flags |= ASAN_CHECK_NON_ZERO_LEN;
1811 if (is_scalar_access)
1812 flags |= ASAN_CHECK_SCALAR_ACCESS;
c62ccb9a 1813
f434eb69 1814 g = gimple_build_call_internal (IFN_ASAN_CHECK, 4,
c62ccb9a 1815 build_int_cst (integer_type_node, flags),
f434eb69
MZ
1816 base, len,
1817 build_int_cst (integer_type_node,
1818 align / BITS_PER_UNIT));
c62ccb9a
YG
1819 gimple_set_location (g, loc);
1820 if (before_p)
1821 gsi_insert_before (&gsi, g, GSI_SAME_STMT);
8946c29e
YG
1822 else
1823 {
8946c29e 1824 gsi_insert_after (&gsi, g, GSI_NEW_STMT);
c62ccb9a
YG
1825 gsi_next (&gsi);
1826 *iter = gsi;
8946c29e 1827 }
37d6f666
WM
1828}
1829
1830/* If T represents a memory access, add instrumentation code before ITER.
1831 LOCATION is source code location.
25ae5027 1832 IS_STORE is either TRUE (for a store) or FALSE (for a load). */
37d6f666
WM
1833
1834static void
1835instrument_derefs (gimple_stmt_iterator *iter, tree t,
bdcbe80c 1836 location_t location, bool is_store)
37d6f666 1837{
b5ebc991
MO
1838 if (is_store && !ASAN_INSTRUMENT_WRITES)
1839 return;
1840 if (!is_store && !ASAN_INSTRUMENT_READS)
1841 return;
1842
37d6f666 1843 tree type, base;
f6d98484 1844 HOST_WIDE_INT size_in_bytes;
c3da4956
MO
1845 if (location == UNKNOWN_LOCATION)
1846 location = EXPR_LOCATION (t);
37d6f666
WM
1847
1848 type = TREE_TYPE (t);
37d6f666
WM
1849 switch (TREE_CODE (t))
1850 {
1851 case ARRAY_REF:
1852 case COMPONENT_REF:
1853 case INDIRECT_REF:
1854 case MEM_REF:
59b36ecf 1855 case VAR_DECL:
913f32a1 1856 case BIT_FIELD_REF:
37d6f666 1857 break;
59b36ecf 1858 /* FALLTHRU */
37d6f666
WM
1859 default:
1860 return;
1861 }
f6d98484
JJ
1862
1863 size_in_bytes = int_size_in_bytes (type);
40f9f6bb 1864 if (size_in_bytes <= 0)
f6d98484
JJ
1865 return;
1866
f6d98484
JJ
1867 HOST_WIDE_INT bitsize, bitpos;
1868 tree offset;
ef4bddc2 1869 machine_mode mode;
ee45a32d
EB
1870 int unsignedp, reversep, volatilep = 0;
1871 tree inner = get_inner_reference (t, &bitsize, &bitpos, &offset, &mode,
25b75a48 1872 &unsignedp, &reversep, &volatilep);
87d1d65a
YG
1873
1874 if (TREE_CODE (t) == COMPONENT_REF
1875 && DECL_BIT_FIELD_REPRESENTATIVE (TREE_OPERAND (t, 1)) != NULL_TREE)
1fe04fdc 1876 {
87d1d65a
YG
1877 tree repr = DECL_BIT_FIELD_REPRESENTATIVE (TREE_OPERAND (t, 1));
1878 instrument_derefs (iter, build3 (COMPONENT_REF, TREE_TYPE (repr),
1879 TREE_OPERAND (t, 0), repr,
1880 NULL_TREE), location, is_store);
1fe04fdc
JJ
1881 return;
1882 }
87d1d65a
YG
1883
1884 if (bitpos % BITS_PER_UNIT
1885 || bitsize != size_in_bytes * BITS_PER_UNIT)
40f9f6bb 1886 return;
f6d98484 1887
8813a647 1888 if (VAR_P (inner)
59b36ecf
JJ
1889 && offset == NULL_TREE
1890 && bitpos >= 0
1891 && DECL_SIZE (inner)
1892 && tree_fits_shwi_p (DECL_SIZE (inner))
1893 && bitpos + bitsize <= tree_to_shwi (DECL_SIZE (inner)))
1894 {
1895 if (DECL_THREAD_LOCAL_P (inner))
1896 return;
6b98fab5
MZ
1897 if (!ASAN_GLOBALS && is_global_var (inner))
1898 return;
59b36ecf
JJ
1899 if (!TREE_STATIC (inner))
1900 {
1901 /* Automatic vars in the current function will be always
1902 accessible. */
6dc4a604
ML
1903 if (decl_function_context (inner) == current_function_decl
1904 && (!asan_sanitize_use_after_scope ()
1905 || !TREE_ADDRESSABLE (inner)))
59b36ecf
JJ
1906 return;
1907 }
1908 /* Always instrument external vars, they might be dynamically
1909 initialized. */
1910 else if (!DECL_EXTERNAL (inner))
1911 {
1912 /* For static vars if they are known not to be dynamically
1913 initialized, they will be always accessible. */
9041d2e6 1914 varpool_node *vnode = varpool_node::get (inner);
59b36ecf
JJ
1915 if (vnode && !vnode->dynamically_initialized)
1916 return;
1917 }
1918 }
1919
f6d98484 1920 base = build_fold_addr_expr (t);
bdcbe80c
DS
1921 if (!has_mem_ref_been_instrumented (base, size_in_bytes))
1922 {
8946c29e
YG
1923 unsigned int align = get_object_alignment (t);
1924 build_check_stmt (location, base, NULL_TREE, size_in_bytes, iter,
c62ccb9a 1925 /*is_non_zero_len*/size_in_bytes > 0, /*before_p=*/true,
8946c29e 1926 is_store, /*is_scalar_access*/true, align);
bdcbe80c
DS
1927 update_mem_ref_hash_table (base, size_in_bytes);
1928 update_mem_ref_hash_table (t, size_in_bytes);
1929 }
1930
25ae5027
DS
1931}
1932
bdea98ca
MO
1933/* Insert a memory reference into the hash table if access length
1934 can be determined in compile time. */
1935
1936static void
1937maybe_update_mem_ref_hash_table (tree base, tree len)
1938{
1939 if (!POINTER_TYPE_P (TREE_TYPE (base))
1940 || !INTEGRAL_TYPE_P (TREE_TYPE (len)))
1941 return;
1942
1943 HOST_WIDE_INT size_in_bytes = tree_fits_shwi_p (len) ? tree_to_shwi (len) : -1;
1944
1945 if (size_in_bytes != -1)
1946 update_mem_ref_hash_table (base, size_in_bytes);
1947}
1948
25ae5027
DS
1949/* Instrument an access to a contiguous memory region that starts at
1950 the address pointed to by BASE, over a length of LEN (expressed in
1951 the sizeof (*BASE) bytes). ITER points to the instruction before
1952 which the instrumentation instructions must be inserted. LOCATION
1953 is the source location that the instrumentation instructions must
1954 have. If IS_STORE is true, then the memory access is a store;
1955 otherwise, it's a load. */
1956
1957static void
1958instrument_mem_region_access (tree base, tree len,
1959 gimple_stmt_iterator *iter,
1960 location_t location, bool is_store)
1961{
c63d3b96
JJ
1962 if (!POINTER_TYPE_P (TREE_TYPE (base))
1963 || !INTEGRAL_TYPE_P (TREE_TYPE (len))
1964 || integer_zerop (len))
25ae5027
DS
1965 return;
1966
8946c29e 1967 HOST_WIDE_INT size_in_bytes = tree_fits_shwi_p (len) ? tree_to_shwi (len) : -1;
bdcbe80c 1968
bdea98ca
MO
1969 if ((size_in_bytes == -1)
1970 || !has_mem_ref_been_instrumented (base, size_in_bytes))
1971 {
1972 build_check_stmt (location, base, len, size_in_bytes, iter,
1973 /*is_non_zero_len*/size_in_bytes > 0, /*before_p*/true,
1974 is_store, /*is_scalar_access*/false, /*align*/0);
1975 }
b41288b3 1976
bdea98ca 1977 maybe_update_mem_ref_hash_table (base, len);
b41288b3 1978 *iter = gsi_for_stmt (gsi_stmt (*iter));
bdcbe80c 1979}
25ae5027 1980
bdcbe80c
DS
1981/* Instrument the call to a built-in memory access function that is
1982 pointed to by the iterator ITER.
25ae5027 1983
bdcbe80c
DS
1984 Upon completion, return TRUE iff *ITER has been advanced to the
1985 statement following the one it was originally pointing to. */
25ae5027 1986
bdcbe80c
DS
1987static bool
1988instrument_builtin_call (gimple_stmt_iterator *iter)
1989{
b5ebc991
MO
1990 if (!ASAN_MEMINTRIN)
1991 return false;
1992
bdcbe80c 1993 bool iter_advanced_p = false;
538dd0b7 1994 gcall *call = as_a <gcall *> (gsi_stmt (*iter));
25ae5027 1995
bdcbe80c 1996 gcc_checking_assert (gimple_call_builtin_p (call, BUILT_IN_NORMAL));
25ae5027 1997
bdcbe80c 1998 location_t loc = gimple_location (call);
25ae5027 1999
bdea98ca
MO
2000 asan_mem_ref src0, src1, dest;
2001 asan_mem_ref_init (&src0, NULL, 1);
2002 asan_mem_ref_init (&src1, NULL, 1);
2003 asan_mem_ref_init (&dest, NULL, 1);
bdcbe80c 2004
bdea98ca
MO
2005 tree src0_len = NULL_TREE, src1_len = NULL_TREE, dest_len = NULL_TREE;
2006 bool src0_is_store = false, src1_is_store = false, dest_is_store = false,
2007 dest_is_deref = false, intercepted_p = true;
bdcbe80c 2008
bdea98ca
MO
2009 if (get_mem_refs_of_builtin_call (call,
2010 &src0, &src0_len, &src0_is_store,
2011 &src1, &src1_len, &src1_is_store,
2012 &dest, &dest_len, &dest_is_store,
2013 &dest_is_deref, &intercepted_p))
2014 {
2015 if (dest_is_deref)
bdcbe80c 2016 {
bdea98ca
MO
2017 instrument_derefs (iter, dest.start, loc, dest_is_store);
2018 gsi_next (iter);
2019 iter_advanced_p = true;
2020 }
2021 else if (!intercepted_p
2022 && (src0_len || src1_len || dest_len))
2023 {
2024 if (src0.start != NULL_TREE)
2025 instrument_mem_region_access (src0.start, src0_len,
2026 iter, loc, /*is_store=*/false);
2027 if (src1.start != NULL_TREE)
2028 instrument_mem_region_access (src1.start, src1_len,
2029 iter, loc, /*is_store=*/false);
2030 if (dest.start != NULL_TREE)
2031 instrument_mem_region_access (dest.start, dest_len,
2032 iter, loc, /*is_store=*/true);
2033
2034 *iter = gsi_for_stmt (call);
2035 gsi_next (iter);
2036 iter_advanced_p = true;
2037 }
2038 else
2039 {
2040 if (src0.start != NULL_TREE)
2041 maybe_update_mem_ref_hash_table (src0.start, src0_len);
2042 if (src1.start != NULL_TREE)
2043 maybe_update_mem_ref_hash_table (src1.start, src1_len);
2044 if (dest.start != NULL_TREE)
2045 maybe_update_mem_ref_hash_table (dest.start, dest_len);
bdcbe80c 2046 }
25ae5027 2047 }
bdcbe80c 2048 return iter_advanced_p;
25ae5027
DS
2049}
2050
2051/* Instrument the assignment statement ITER if it is subject to
bdcbe80c
DS
2052 instrumentation. Return TRUE iff instrumentation actually
2053 happened. In that case, the iterator ITER is advanced to the next
2054 logical expression following the one initially pointed to by ITER,
2055 and the relevant memory reference that which access has been
2056 instrumented is added to the memory references hash table. */
25ae5027 2057
bdcbe80c
DS
2058static bool
2059maybe_instrument_assignment (gimple_stmt_iterator *iter)
25ae5027 2060{
355fe088 2061 gimple *s = gsi_stmt (*iter);
25ae5027
DS
2062
2063 gcc_assert (gimple_assign_single_p (s));
2064
bdcbe80c
DS
2065 tree ref_expr = NULL_TREE;
2066 bool is_store, is_instrumented = false;
2067
52f2e7e1 2068 if (gimple_store_p (s))
bdcbe80c
DS
2069 {
2070 ref_expr = gimple_assign_lhs (s);
2071 is_store = true;
2072 instrument_derefs (iter, ref_expr,
2073 gimple_location (s),
2074 is_store);
2075 is_instrumented = true;
2076 }
c1f5ce48 2077
52f2e7e1 2078 if (gimple_assign_load_p (s))
bdcbe80c
DS
2079 {
2080 ref_expr = gimple_assign_rhs1 (s);
2081 is_store = false;
2082 instrument_derefs (iter, ref_expr,
2083 gimple_location (s),
2084 is_store);
2085 is_instrumented = true;
2086 }
2087
2088 if (is_instrumented)
2089 gsi_next (iter);
2090
2091 return is_instrumented;
25ae5027
DS
2092}
2093
2094/* Instrument the function call pointed to by the iterator ITER, if it
2095 is subject to instrumentation. At the moment, the only function
2096 calls that are instrumented are some built-in functions that access
2097 memory. Look at instrument_builtin_call to learn more.
2098
2099 Upon completion return TRUE iff *ITER was advanced to the statement
2100 following the one it was originally pointing to. */
2101
2102static bool
2103maybe_instrument_call (gimple_stmt_iterator *iter)
2104{
355fe088 2105 gimple *stmt = gsi_stmt (*iter);
bdcbe80c
DS
2106 bool is_builtin = gimple_call_builtin_p (stmt, BUILT_IN_NORMAL);
2107
2108 if (is_builtin && instrument_builtin_call (iter))
2b2571c9 2109 return true;
bdcbe80c 2110
2b2571c9
JJ
2111 if (gimple_call_noreturn_p (stmt))
2112 {
2113 if (is_builtin)
2114 {
2115 tree callee = gimple_call_fndecl (stmt);
2116 switch (DECL_FUNCTION_CODE (callee))
2117 {
2118 case BUILT_IN_UNREACHABLE:
2119 case BUILT_IN_TRAP:
2120 /* Don't instrument these. */
2121 return false;
083e891e
MP
2122 default:
2123 break;
2b2571c9
JJ
2124 }
2125 }
2126 tree decl = builtin_decl_implicit (BUILT_IN_ASAN_HANDLE_NO_RETURN);
355fe088 2127 gimple *g = gimple_build_call (decl, 0);
2b2571c9
JJ
2128 gimple_set_location (g, gimple_location (stmt));
2129 gsi_insert_before (iter, g, GSI_SAME_STMT);
2130 }
7db337c2 2131
c3da4956 2132 bool instrumented = false;
7db337c2
ML
2133 if (gimple_store_p (stmt))
2134 {
2135 tree ref_expr = gimple_call_lhs (stmt);
2136 instrument_derefs (iter, ref_expr,
2137 gimple_location (stmt),
2138 /*is_store=*/true);
2139
c3da4956 2140 instrumented = true;
7db337c2
ML
2141 }
2142
c3da4956
MO
2143 /* Walk through gimple_call arguments and check them id needed. */
2144 unsigned args_num = gimple_call_num_args (stmt);
2145 for (unsigned i = 0; i < args_num; ++i)
2146 {
2147 tree arg = gimple_call_arg (stmt, i);
2148 /* If ARG is not a non-aggregate register variable, compiler in general
2149 creates temporary for it and pass it as argument to gimple call.
2150 But in some cases, e.g. when we pass by value a small structure that
2151 fits to register, compiler can avoid extra overhead by pulling out
2152 these temporaries. In this case, we should check the argument. */
2153 if (!is_gimple_reg (arg) && !is_gimple_min_invariant (arg))
2154 {
2155 instrument_derefs (iter, arg,
2156 gimple_location (stmt),
2157 /*is_store=*/false);
2158 instrumented = true;
2159 }
2160 }
2161 if (instrumented)
2162 gsi_next (iter);
2163 return instrumented;
37d6f666
WM
2164}
2165
bdcbe80c
DS
2166/* Walk each instruction of all basic block and instrument those that
2167 represent memory references: loads, stores, or function calls.
2168 In a given basic block, this function avoids instrumenting memory
2169 references that have already been instrumented. */
37d6f666
WM
2170
2171static void
2172transform_statements (void)
2173{
c4bfe8bf 2174 basic_block bb, last_bb = NULL;
37d6f666 2175 gimple_stmt_iterator i;
8b1c6fd7 2176 int saved_last_basic_block = last_basic_block_for_fn (cfun);
37d6f666 2177
11cd3bed 2178 FOR_EACH_BB_FN (bb, cfun)
37d6f666 2179 {
c4bfe8bf 2180 basic_block prev_bb = bb;
bdcbe80c 2181
37d6f666 2182 if (bb->index >= saved_last_basic_block) continue;
c4bfe8bf
JJ
2183
2184 /* Flush the mem ref hash table, if current bb doesn't have
2185 exactly one predecessor, or if that predecessor (skipping
2186 over asan created basic blocks) isn't the last processed
2187 basic block. Thus we effectively flush on extended basic
2188 block boundaries. */
2189 while (single_pred_p (prev_bb))
2190 {
2191 prev_bb = single_pred (prev_bb);
2192 if (prev_bb->index < saved_last_basic_block)
2193 break;
2194 }
2195 if (prev_bb != last_bb)
2196 empty_mem_ref_hash_table ();
2197 last_bb = bb;
2198
25ae5027 2199 for (i = gsi_start_bb (bb); !gsi_end_p (i);)
497a1c66 2200 {
355fe088 2201 gimple *s = gsi_stmt (i);
25ae5027 2202
bdcbe80c
DS
2203 if (has_stmt_been_instrumented_p (s))
2204 gsi_next (&i);
2205 else if (gimple_assign_single_p (s)
e1e160c1 2206 && !gimple_clobber_p (s)
bdcbe80c
DS
2207 && maybe_instrument_assignment (&i))
2208 /* Nothing to do as maybe_instrument_assignment advanced
2209 the iterator I. */;
2210 else if (is_gimple_call (s) && maybe_instrument_call (&i))
2211 /* Nothing to do as maybe_instrument_call
2212 advanced the iterator I. */;
2213 else
25ae5027 2214 {
bdcbe80c
DS
2215 /* No instrumentation happened.
2216
c4bfe8bf
JJ
2217 If the current instruction is a function call that
2218 might free something, let's forget about the memory
2219 references that got instrumented. Otherwise we might
6dc4a604
ML
2220 miss some instrumentation opportunities. Do the same
2221 for a ASAN_MARK poisoning internal function. */
2222 if (is_gimple_call (s)
56b7aede
ML
2223 && (!nonfreeing_call_p (s)
2224 || asan_mark_p (s, ASAN_MARK_POISON)))
bdcbe80c
DS
2225 empty_mem_ref_hash_table ();
2226
2227 gsi_next (&i);
25ae5027 2228 }
497a1c66 2229 }
37d6f666 2230 }
bdcbe80c 2231 free_mem_ref_resources ();
37d6f666
WM
2232}
2233
59b36ecf
JJ
2234/* Build
2235 __asan_before_dynamic_init (module_name)
2236 or
2237 __asan_after_dynamic_init ()
2238 call. */
2239
2240tree
2241asan_dynamic_init_call (bool after_p)
2242{
185faecb
JJ
2243 if (shadow_ptr_types[0] == NULL_TREE)
2244 asan_init_shadow_ptr_types ();
2245
59b36ecf
JJ
2246 tree fn = builtin_decl_implicit (after_p
2247 ? BUILT_IN_ASAN_AFTER_DYNAMIC_INIT
2248 : BUILT_IN_ASAN_BEFORE_DYNAMIC_INIT);
2249 tree module_name_cst = NULL_TREE;
2250 if (!after_p)
2251 {
2252 pretty_printer module_name_pp;
2253 pp_string (&module_name_pp, main_input_filename);
2254
59b36ecf
JJ
2255 module_name_cst = asan_pp_string (&module_name_pp);
2256 module_name_cst = fold_convert (const_ptr_type_node,
2257 module_name_cst);
2258 }
2259
2260 return build_call_expr (fn, after_p ? 0 : 1, module_name_cst);
2261}
2262
8240018b
JJ
2263/* Build
2264 struct __asan_global
2265 {
2266 const void *__beg;
2267 uptr __size;
2268 uptr __size_with_redzone;
2269 const void *__name;
ef1b3fda 2270 const void *__module_name;
8240018b 2271 uptr __has_dynamic_init;
866e32ad 2272 __asan_global_source_location *__location;
fbdb92eb 2273 char *__odr_indicator;
8240018b
JJ
2274 } type. */
2275
2276static tree
2277asan_global_struct (void)
2278{
84b0769e 2279 static const char *field_names[]
8240018b 2280 = { "__beg", "__size", "__size_with_redzone",
84b0769e
MO
2281 "__name", "__module_name", "__has_dynamic_init", "__location",
2282 "__odr_indicator" };
2283 tree fields[ARRAY_SIZE (field_names)], ret;
2284 unsigned i;
8240018b
JJ
2285
2286 ret = make_node (RECORD_TYPE);
84b0769e 2287 for (i = 0; i < ARRAY_SIZE (field_names); i++)
8240018b
JJ
2288 {
2289 fields[i]
2290 = build_decl (UNKNOWN_LOCATION, FIELD_DECL,
2291 get_identifier (field_names[i]),
2292 (i == 0 || i == 3) ? const_ptr_type_node
de5a5fa1 2293 : pointer_sized_int_node);
8240018b
JJ
2294 DECL_CONTEXT (fields[i]) = ret;
2295 if (i)
2296 DECL_CHAIN (fields[i - 1]) = fields[i];
2297 }
bebcdc67
MP
2298 tree type_decl = build_decl (input_location, TYPE_DECL,
2299 get_identifier ("__asan_global"), ret);
2300 DECL_IGNORED_P (type_decl) = 1;
2301 DECL_ARTIFICIAL (type_decl) = 1;
8240018b 2302 TYPE_FIELDS (ret) = fields[0];
bebcdc67
MP
2303 TYPE_NAME (ret) = type_decl;
2304 TYPE_STUB_DECL (ret) = type_decl;
8240018b
JJ
2305 layout_type (ret);
2306 return ret;
2307}
2308
84b0769e
MO
2309/* Create and return odr indicator symbol for DECL.
2310 TYPE is __asan_global struct type as returned by asan_global_struct. */
2311
2312static tree
2313create_odr_indicator (tree decl, tree type)
2314{
2315 char *name;
2316 tree uptr = TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (type)));
2317 tree decl_name
2318 = (HAS_DECL_ASSEMBLER_NAME_P (decl) ? DECL_ASSEMBLER_NAME (decl)
2319 : DECL_NAME (decl));
2320 /* DECL_NAME theoretically might be NULL. Bail out with 0 in this case. */
2321 if (decl_name == NULL_TREE)
2322 return build_int_cst (uptr, 0);
2323 size_t len = strlen (IDENTIFIER_POINTER (decl_name)) + sizeof ("__odr_asan_");
2324 name = XALLOCAVEC (char, len);
2325 snprintf (name, len, "__odr_asan_%s", IDENTIFIER_POINTER (decl_name));
2326#ifndef NO_DOT_IN_LABEL
2327 name[sizeof ("__odr_asan") - 1] = '.';
2328#elif !defined(NO_DOLLAR_IN_LABEL)
2329 name[sizeof ("__odr_asan") - 1] = '$';
2330#endif
2331 tree var = build_decl (UNKNOWN_LOCATION, VAR_DECL, get_identifier (name),
2332 char_type_node);
2333 TREE_ADDRESSABLE (var) = 1;
2334 TREE_READONLY (var) = 0;
2335 TREE_THIS_VOLATILE (var) = 1;
2336 DECL_GIMPLE_REG_P (var) = 0;
2337 DECL_ARTIFICIAL (var) = 1;
2338 DECL_IGNORED_P (var) = 1;
2339 TREE_STATIC (var) = 1;
2340 TREE_PUBLIC (var) = 1;
2341 DECL_VISIBILITY (var) = DECL_VISIBILITY (decl);
2342 DECL_VISIBILITY_SPECIFIED (var) = DECL_VISIBILITY_SPECIFIED (decl);
2343
2344 TREE_USED (var) = 1;
2345 tree ctor = build_constructor_va (TREE_TYPE (var), 1, NULL_TREE,
2346 build_int_cst (unsigned_type_node, 0));
2347 TREE_CONSTANT (ctor) = 1;
2348 TREE_STATIC (ctor) = 1;
2349 DECL_INITIAL (var) = ctor;
2350 DECL_ATTRIBUTES (var) = tree_cons (get_identifier ("asan odr indicator"),
2351 NULL, DECL_ATTRIBUTES (var));
2352 make_decl_rtl (var);
2353 varpool_node::finalize_decl (var);
2354 return fold_convert (uptr, build_fold_addr_expr (var));
2355}
2356
2357/* Return true if DECL, a global var, might be overridden and needs
2358 an additional odr indicator symbol. */
2359
2360static bool
2361asan_needs_odr_indicator_p (tree decl)
2362{
2363 return !DECL_ARTIFICIAL (decl) && !DECL_WEAK (decl) && TREE_PUBLIC (decl);
2364}
2365
8240018b
JJ
2366/* Append description of a single global DECL into vector V.
2367 TYPE is __asan_global struct type as returned by asan_global_struct. */
2368
2369static void
9771b263 2370asan_add_global (tree decl, tree type, vec<constructor_elt, va_gc> *v)
8240018b
JJ
2371{
2372 tree init, uptr = TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (type)));
2373 unsigned HOST_WIDE_INT size;
ef1b3fda 2374 tree str_cst, module_name_cst, refdecl = decl;
9771b263 2375 vec<constructor_elt, va_gc> *vinner = NULL;
8240018b 2376
ef1b3fda 2377 pretty_printer asan_pp, module_name_pp;
8240018b 2378
8240018b 2379 if (DECL_NAME (decl))
b066401f 2380 pp_tree_identifier (&asan_pp, DECL_NAME (decl));
8240018b
JJ
2381 else
2382 pp_string (&asan_pp, "<unknown>");
11a877b3 2383 str_cst = asan_pp_string (&asan_pp);
8240018b 2384
ef1b3fda
KS
2385 pp_string (&module_name_pp, main_input_filename);
2386 module_name_cst = asan_pp_string (&module_name_pp);
2387
8240018b
JJ
2388 if (asan_needs_local_alias (decl))
2389 {
2390 char buf[20];
9771b263 2391 ASM_GENERATE_INTERNAL_LABEL (buf, "LASAN", vec_safe_length (v) + 1);
8240018b
JJ
2392 refdecl = build_decl (DECL_SOURCE_LOCATION (decl),
2393 VAR_DECL, get_identifier (buf), TREE_TYPE (decl));
2394 TREE_ADDRESSABLE (refdecl) = TREE_ADDRESSABLE (decl);
2395 TREE_READONLY (refdecl) = TREE_READONLY (decl);
2396 TREE_THIS_VOLATILE (refdecl) = TREE_THIS_VOLATILE (decl);
2397 DECL_GIMPLE_REG_P (refdecl) = DECL_GIMPLE_REG_P (decl);
2398 DECL_ARTIFICIAL (refdecl) = DECL_ARTIFICIAL (decl);
2399 DECL_IGNORED_P (refdecl) = DECL_IGNORED_P (decl);
2400 TREE_STATIC (refdecl) = 1;
2401 TREE_PUBLIC (refdecl) = 0;
2402 TREE_USED (refdecl) = 1;
2403 assemble_alias (refdecl, DECL_ASSEMBLER_NAME (decl));
2404 }
2405
84b0769e
MO
2406 tree odr_indicator_ptr
2407 = (asan_needs_odr_indicator_p (decl) ? create_odr_indicator (decl, type)
2408 : build_int_cst (uptr, 0));
8240018b
JJ
2409 CONSTRUCTOR_APPEND_ELT (vinner, NULL_TREE,
2410 fold_convert (const_ptr_type_node,
2411 build_fold_addr_expr (refdecl)));
ae7e9ddd 2412 size = tree_to_uhwi (DECL_SIZE_UNIT (decl));
8240018b
JJ
2413 CONSTRUCTOR_APPEND_ELT (vinner, NULL_TREE, build_int_cst (uptr, size));
2414 size += asan_red_zone_size (size);
2415 CONSTRUCTOR_APPEND_ELT (vinner, NULL_TREE, build_int_cst (uptr, size));
2416 CONSTRUCTOR_APPEND_ELT (vinner, NULL_TREE,
2417 fold_convert (const_ptr_type_node, str_cst));
ef1b3fda
KS
2418 CONSTRUCTOR_APPEND_ELT (vinner, NULL_TREE,
2419 fold_convert (const_ptr_type_node, module_name_cst));
9041d2e6 2420 varpool_node *vnode = varpool_node::get (decl);
59b36ecf
JJ
2421 int has_dynamic_init = vnode ? vnode->dynamically_initialized : 0;
2422 CONSTRUCTOR_APPEND_ELT (vinner, NULL_TREE,
2423 build_int_cst (uptr, has_dynamic_init));
21a82048
JJ
2424 tree locptr = NULL_TREE;
2425 location_t loc = DECL_SOURCE_LOCATION (decl);
2426 expanded_location xloc = expand_location (loc);
2427 if (xloc.file != NULL)
2428 {
2429 static int lasanloccnt = 0;
2430 char buf[25];
2431 ASM_GENERATE_INTERNAL_LABEL (buf, "LASANLOC", ++lasanloccnt);
2432 tree var = build_decl (UNKNOWN_LOCATION, VAR_DECL, get_identifier (buf),
2433 ubsan_get_source_location_type ());
2434 TREE_STATIC (var) = 1;
2435 TREE_PUBLIC (var) = 0;
2436 DECL_ARTIFICIAL (var) = 1;
2437 DECL_IGNORED_P (var) = 1;
2438 pretty_printer filename_pp;
2439 pp_string (&filename_pp, xloc.file);
2440 tree str = asan_pp_string (&filename_pp);
2441 tree ctor = build_constructor_va (TREE_TYPE (var), 3,
2442 NULL_TREE, str, NULL_TREE,
2443 build_int_cst (unsigned_type_node,
2444 xloc.line), NULL_TREE,
2445 build_int_cst (unsigned_type_node,
2446 xloc.column));
2447 TREE_CONSTANT (ctor) = 1;
2448 TREE_STATIC (ctor) = 1;
2449 DECL_INITIAL (var) = ctor;
2450 varpool_node::finalize_decl (var);
2451 locptr = fold_convert (uptr, build_fold_addr_expr (var));
2452 }
2453 else
2454 locptr = build_int_cst (uptr, 0);
2455 CONSTRUCTOR_APPEND_ELT (vinner, NULL_TREE, locptr);
84b0769e 2456 CONSTRUCTOR_APPEND_ELT (vinner, NULL_TREE, odr_indicator_ptr);
8240018b
JJ
2457 init = build_constructor (type, vinner);
2458 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, init);
2459}
2460
0e668eaf
JJ
2461/* Initialize sanitizer.def builtins if the FE hasn't initialized them. */
2462void
2463initialize_sanitizer_builtins (void)
2464{
2465 tree decl;
2466
2467 if (builtin_decl_implicit_p (BUILT_IN_ASAN_INIT))
2468 return;
2469
2470 tree BT_FN_VOID = build_function_type_list (void_type_node, NULL_TREE);
2471 tree BT_FN_VOID_PTR
2472 = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
59b36ecf
JJ
2473 tree BT_FN_VOID_CONST_PTR
2474 = build_function_type_list (void_type_node, const_ptr_type_node, NULL_TREE);
b906f4ca
MP
2475 tree BT_FN_VOID_PTR_PTR
2476 = build_function_type_list (void_type_node, ptr_type_node,
2477 ptr_type_node, NULL_TREE);
de5a5fa1
MP
2478 tree BT_FN_VOID_PTR_PTR_PTR
2479 = build_function_type_list (void_type_node, ptr_type_node,
2480 ptr_type_node, ptr_type_node, NULL_TREE);
0e668eaf
JJ
2481 tree BT_FN_VOID_PTR_PTRMODE
2482 = build_function_type_list (void_type_node, ptr_type_node,
de5a5fa1 2483 pointer_sized_int_node, NULL_TREE);
c954bddd
JJ
2484 tree BT_FN_VOID_INT
2485 = build_function_type_list (void_type_node, integer_type_node, NULL_TREE);
0bae64d5
MP
2486 tree BT_FN_SIZE_CONST_PTR_INT
2487 = build_function_type_list (size_type_node, const_ptr_type_node,
2488 integer_type_node, NULL_TREE);
c954bddd
JJ
2489 tree BT_FN_BOOL_VPTR_PTR_IX_INT_INT[5];
2490 tree BT_FN_IX_CONST_VPTR_INT[5];
2491 tree BT_FN_IX_VPTR_IX_INT[5];
2492 tree BT_FN_VOID_VPTR_IX_INT[5];
2493 tree vptr
2494 = build_pointer_type (build_qualified_type (void_type_node,
2495 TYPE_QUAL_VOLATILE));
2496 tree cvptr
2497 = build_pointer_type (build_qualified_type (void_type_node,
2498 TYPE_QUAL_VOLATILE
2499 |TYPE_QUAL_CONST));
2500 tree boolt
2501 = lang_hooks.types.type_for_size (BOOL_TYPE_SIZE, 1);
2502 int i;
2503 for (i = 0; i < 5; i++)
2504 {
2505 tree ix = build_nonstandard_integer_type (BITS_PER_UNIT * (1 << i), 1);
2506 BT_FN_BOOL_VPTR_PTR_IX_INT_INT[i]
2507 = build_function_type_list (boolt, vptr, ptr_type_node, ix,
2508 integer_type_node, integer_type_node,
2509 NULL_TREE);
2510 BT_FN_IX_CONST_VPTR_INT[i]
2511 = build_function_type_list (ix, cvptr, integer_type_node, NULL_TREE);
2512 BT_FN_IX_VPTR_IX_INT[i]
2513 = build_function_type_list (ix, vptr, ix, integer_type_node,
2514 NULL_TREE);
2515 BT_FN_VOID_VPTR_IX_INT[i]
2516 = build_function_type_list (void_type_node, vptr, ix,
2517 integer_type_node, NULL_TREE);
2518 }
2519#define BT_FN_BOOL_VPTR_PTR_I1_INT_INT BT_FN_BOOL_VPTR_PTR_IX_INT_INT[0]
2520#define BT_FN_I1_CONST_VPTR_INT BT_FN_IX_CONST_VPTR_INT[0]
2521#define BT_FN_I1_VPTR_I1_INT BT_FN_IX_VPTR_IX_INT[0]
2522#define BT_FN_VOID_VPTR_I1_INT BT_FN_VOID_VPTR_IX_INT[0]
2523#define BT_FN_BOOL_VPTR_PTR_I2_INT_INT BT_FN_BOOL_VPTR_PTR_IX_INT_INT[1]
2524#define BT_FN_I2_CONST_VPTR_INT BT_FN_IX_CONST_VPTR_INT[1]
2525#define BT_FN_I2_VPTR_I2_INT BT_FN_IX_VPTR_IX_INT[1]
2526#define BT_FN_VOID_VPTR_I2_INT BT_FN_VOID_VPTR_IX_INT[1]
2527#define BT_FN_BOOL_VPTR_PTR_I4_INT_INT BT_FN_BOOL_VPTR_PTR_IX_INT_INT[2]
2528#define BT_FN_I4_CONST_VPTR_INT BT_FN_IX_CONST_VPTR_INT[2]
2529#define BT_FN_I4_VPTR_I4_INT BT_FN_IX_VPTR_IX_INT[2]
2530#define BT_FN_VOID_VPTR_I4_INT BT_FN_VOID_VPTR_IX_INT[2]
2531#define BT_FN_BOOL_VPTR_PTR_I8_INT_INT BT_FN_BOOL_VPTR_PTR_IX_INT_INT[3]
2532#define BT_FN_I8_CONST_VPTR_INT BT_FN_IX_CONST_VPTR_INT[3]
2533#define BT_FN_I8_VPTR_I8_INT BT_FN_IX_VPTR_IX_INT[3]
2534#define BT_FN_VOID_VPTR_I8_INT BT_FN_VOID_VPTR_IX_INT[3]
2535#define BT_FN_BOOL_VPTR_PTR_I16_INT_INT BT_FN_BOOL_VPTR_PTR_IX_INT_INT[4]
2536#define BT_FN_I16_CONST_VPTR_INT BT_FN_IX_CONST_VPTR_INT[4]
2537#define BT_FN_I16_VPTR_I16_INT BT_FN_IX_VPTR_IX_INT[4]
2538#define BT_FN_VOID_VPTR_I16_INT BT_FN_VOID_VPTR_IX_INT[4]
0e668eaf
JJ
2539#undef ATTR_NOTHROW_LEAF_LIST
2540#define ATTR_NOTHROW_LEAF_LIST ECF_NOTHROW | ECF_LEAF
bc77608b
JJ
2541#undef ATTR_TMPURE_NOTHROW_LEAF_LIST
2542#define ATTR_TMPURE_NOTHROW_LEAF_LIST ECF_TM_PURE | ATTR_NOTHROW_LEAF_LIST
0e668eaf
JJ
2543#undef ATTR_NORETURN_NOTHROW_LEAF_LIST
2544#define ATTR_NORETURN_NOTHROW_LEAF_LIST ECF_NORETURN | ATTR_NOTHROW_LEAF_LIST
4088b790
MP
2545#undef ATTR_CONST_NORETURN_NOTHROW_LEAF_LIST
2546#define ATTR_CONST_NORETURN_NOTHROW_LEAF_LIST \
2547 ECF_CONST | ATTR_NORETURN_NOTHROW_LEAF_LIST
bc77608b
JJ
2548#undef ATTR_TMPURE_NORETURN_NOTHROW_LEAF_LIST
2549#define ATTR_TMPURE_NORETURN_NOTHROW_LEAF_LIST \
2550 ECF_TM_PURE | ATTR_NORETURN_NOTHROW_LEAF_LIST
de5a5fa1
MP
2551#undef ATTR_COLD_NOTHROW_LEAF_LIST
2552#define ATTR_COLD_NOTHROW_LEAF_LIST \
2553 /* ECF_COLD missing */ ATTR_NOTHROW_LEAF_LIST
2554#undef ATTR_COLD_NORETURN_NOTHROW_LEAF_LIST
2555#define ATTR_COLD_NORETURN_NOTHROW_LEAF_LIST \
2556 /* ECF_COLD missing */ ATTR_NORETURN_NOTHROW_LEAF_LIST
4088b790
MP
2557#undef ATTR_COLD_CONST_NORETURN_NOTHROW_LEAF_LIST
2558#define ATTR_COLD_CONST_NORETURN_NOTHROW_LEAF_LIST \
2559 /* ECF_COLD missing */ ATTR_CONST_NORETURN_NOTHROW_LEAF_LIST
0bae64d5
MP
2560#undef ATTR_PURE_NOTHROW_LEAF_LIST
2561#define ATTR_PURE_NOTHROW_LEAF_LIST ECF_PURE | ATTR_NOTHROW_LEAF_LIST
8f91e6e0
JJ
2562#undef DEF_BUILTIN_STUB
2563#define DEF_BUILTIN_STUB(ENUM, NAME)
0e668eaf
JJ
2564#undef DEF_SANITIZER_BUILTIN
2565#define DEF_SANITIZER_BUILTIN(ENUM, NAME, TYPE, ATTRS) \
2566 decl = add_builtin_function ("__builtin_" NAME, TYPE, ENUM, \
2567 BUILT_IN_NORMAL, NAME, NULL_TREE); \
2568 set_call_expr_flags (decl, ATTRS); \
2569 set_builtin_decl (ENUM, decl, true);
2570
2571#include "sanitizer.def"
2572
0bae64d5
MP
2573 /* -fsanitize=object-size uses __builtin_object_size, but that might
2574 not be available for e.g. Fortran at this point. We use
2575 DEF_SANITIZER_BUILTIN here only as a convenience macro. */
2576 if ((flag_sanitize & SANITIZE_OBJECT_SIZE)
2577 && !builtin_decl_implicit_p (BUILT_IN_OBJECT_SIZE))
2578 DEF_SANITIZER_BUILTIN (BUILT_IN_OBJECT_SIZE, "object_size",
2579 BT_FN_SIZE_CONST_PTR_INT,
2580 ATTR_PURE_NOTHROW_LEAF_LIST)
2581
0e668eaf 2582#undef DEF_SANITIZER_BUILTIN
8f91e6e0 2583#undef DEF_BUILTIN_STUB
0e668eaf
JJ
2584}
2585
94fce891
JJ
2586/* Called via htab_traverse. Count number of emitted
2587 STRING_CSTs in the constant hash table. */
2588
2a22f99c
TS
2589int
2590count_string_csts (constant_descriptor_tree **slot,
2591 unsigned HOST_WIDE_INT *data)
94fce891 2592{
2a22f99c 2593 struct constant_descriptor_tree *desc = *slot;
94fce891
JJ
2594 if (TREE_CODE (desc->value) == STRING_CST
2595 && TREE_ASM_WRITTEN (desc->value)
2596 && asan_protect_global (desc->value))
2a22f99c 2597 ++*data;
94fce891
JJ
2598 return 1;
2599}
2600
2601/* Helper structure to pass two parameters to
2602 add_string_csts. */
2603
2604struct asan_add_string_csts_data
2605{
2606 tree type;
2607 vec<constructor_elt, va_gc> *v;
2608};
2609
2a22f99c 2610/* Called via hash_table::traverse. Call asan_add_global
94fce891
JJ
2611 on emitted STRING_CSTs from the constant hash table. */
2612
2a22f99c
TS
2613int
2614add_string_csts (constant_descriptor_tree **slot,
2615 asan_add_string_csts_data *aascd)
94fce891 2616{
2a22f99c 2617 struct constant_descriptor_tree *desc = *slot;
94fce891
JJ
2618 if (TREE_CODE (desc->value) == STRING_CST
2619 && TREE_ASM_WRITTEN (desc->value)
2620 && asan_protect_global (desc->value))
2621 {
94fce891
JJ
2622 asan_add_global (SYMBOL_REF_DECL (XEXP (desc->rtl, 0)),
2623 aascd->type, aascd->v);
2624 }
2625 return 1;
2626}
2627
8240018b
JJ
2628/* Needs to be GTY(()), because cgraph_build_static_cdtor may
2629 invoke ggc_collect. */
2630static GTY(()) tree asan_ctor_statements;
2631
37d6f666 2632/* Module-level instrumentation.
ef1b3fda 2633 - Insert __asan_init_vN() into the list of CTORs.
37d6f666
WM
2634 - TODO: insert redzones around globals.
2635 */
2636
2637void
2638asan_finish_file (void)
2639{
2c8326a5 2640 varpool_node *vnode;
8240018b
JJ
2641 unsigned HOST_WIDE_INT gcount = 0;
2642
94fce891
JJ
2643 if (shadow_ptr_types[0] == NULL_TREE)
2644 asan_init_shadow_ptr_types ();
2645 /* Avoid instrumenting code in the asan ctors/dtors.
2646 We don't need to insert padding after the description strings,
2647 nor after .LASAN* array. */
de5a5fa1 2648 flag_sanitize &= ~SANITIZE_ADDRESS;
0e668eaf 2649
f1d15bb9
DV
2650 /* For user-space we want asan constructors to run first.
2651 Linux kernel does not support priorities other than default, and the only
2652 other user of constructors is coverage. So we run with the default
2653 priority. */
2654 int priority = flag_sanitize & SANITIZE_USER_ADDRESS
2655 ? MAX_RESERVED_INIT_PRIORITY - 1 : DEFAULT_INIT_PRIORITY;
2656
c6d129b0
YG
2657 if (flag_sanitize & SANITIZE_USER_ADDRESS)
2658 {
2659 tree fn = builtin_decl_implicit (BUILT_IN_ASAN_INIT);
2660 append_to_statement_list (build_call_expr (fn, 0), &asan_ctor_statements);
89e302b8
MO
2661 fn = builtin_decl_implicit (BUILT_IN_ASAN_VERSION_MISMATCH_CHECK);
2662 append_to_statement_list (build_call_expr (fn, 0), &asan_ctor_statements);
c6d129b0 2663 }
8240018b 2664 FOR_EACH_DEFINED_VARIABLE (vnode)
67348ccc
DM
2665 if (TREE_ASM_WRITTEN (vnode->decl)
2666 && asan_protect_global (vnode->decl))
8240018b 2667 ++gcount;
2a22f99c
TS
2668 hash_table<tree_descriptor_hasher> *const_desc_htab = constant_pool_htab ();
2669 const_desc_htab->traverse<unsigned HOST_WIDE_INT *, count_string_csts>
2670 (&gcount);
8240018b
JJ
2671 if (gcount)
2672 {
0e668eaf 2673 tree type = asan_global_struct (), var, ctor;
8240018b 2674 tree dtor_statements = NULL_TREE;
9771b263 2675 vec<constructor_elt, va_gc> *v;
8240018b
JJ
2676 char buf[20];
2677
2678 type = build_array_type_nelts (type, gcount);
2679 ASM_GENERATE_INTERNAL_LABEL (buf, "LASAN", 0);
2680 var = build_decl (UNKNOWN_LOCATION, VAR_DECL, get_identifier (buf),
2681 type);
2682 TREE_STATIC (var) = 1;
2683 TREE_PUBLIC (var) = 0;
2684 DECL_ARTIFICIAL (var) = 1;
2685 DECL_IGNORED_P (var) = 1;
9771b263 2686 vec_alloc (v, gcount);
8240018b 2687 FOR_EACH_DEFINED_VARIABLE (vnode)
67348ccc
DM
2688 if (TREE_ASM_WRITTEN (vnode->decl)
2689 && asan_protect_global (vnode->decl))
2690 asan_add_global (vnode->decl, TREE_TYPE (type), v);
94fce891
JJ
2691 struct asan_add_string_csts_data aascd;
2692 aascd.type = TREE_TYPE (type);
2693 aascd.v = v;
2a22f99c
TS
2694 const_desc_htab->traverse<asan_add_string_csts_data *, add_string_csts>
2695 (&aascd);
8240018b
JJ
2696 ctor = build_constructor (type, v);
2697 TREE_CONSTANT (ctor) = 1;
2698 TREE_STATIC (ctor) = 1;
2699 DECL_INITIAL (var) = ctor;
9041d2e6 2700 varpool_node::finalize_decl (var);
8240018b 2701
c6d129b0 2702 tree fn = builtin_decl_implicit (BUILT_IN_ASAN_REGISTER_GLOBALS);
de5a5fa1 2703 tree gcount_tree = build_int_cst (pointer_sized_int_node, gcount);
0e668eaf 2704 append_to_statement_list (build_call_expr (fn, 2,
8240018b 2705 build_fold_addr_expr (var),
de5a5fa1 2706 gcount_tree),
8240018b
JJ
2707 &asan_ctor_statements);
2708
0e668eaf
JJ
2709 fn = builtin_decl_implicit (BUILT_IN_ASAN_UNREGISTER_GLOBALS);
2710 append_to_statement_list (build_call_expr (fn, 2,
8240018b 2711 build_fold_addr_expr (var),
de5a5fa1 2712 gcount_tree),
8240018b 2713 &dtor_statements);
f1d15bb9 2714 cgraph_build_static_cdtor ('D', dtor_statements, priority);
8240018b 2715 }
c6d129b0 2716 if (asan_ctor_statements)
f1d15bb9 2717 cgraph_build_static_cdtor ('I', asan_ctor_statements, priority);
de5a5fa1 2718 flag_sanitize |= SANITIZE_ADDRESS;
f6d98484
JJ
2719}
2720
6dc4a604
ML
2721/* Poison or unpoison (depending on IS_CLOBBER variable) shadow memory based
2722 on SHADOW address. Newly added statements will be added to ITER with
2723 given location LOC. We mark SIZE bytes in shadow memory, where
2724 LAST_CHUNK_SIZE is greater than zero in situation where we are at the
2725 end of a variable. */
2726
2727static void
2728asan_store_shadow_bytes (gimple_stmt_iterator *iter, location_t loc,
2729 tree shadow,
2730 unsigned HOST_WIDE_INT base_addr_offset,
2731 bool is_clobber, unsigned size,
2732 unsigned last_chunk_size)
2733{
2734 tree shadow_ptr_type;
2735
2736 switch (size)
2737 {
2738 case 1:
2739 shadow_ptr_type = shadow_ptr_types[0];
2740 break;
2741 case 2:
2742 shadow_ptr_type = shadow_ptr_types[1];
2743 break;
2744 case 4:
2745 shadow_ptr_type = shadow_ptr_types[2];
2746 break;
2747 default:
2748 gcc_unreachable ();
2749 }
2750
2751 unsigned char c = (char) is_clobber ? ASAN_STACK_MAGIC_USE_AFTER_SCOPE : 0;
2752 unsigned HOST_WIDE_INT val = 0;
2753 for (unsigned i = 0; i < size; ++i)
2754 {
2755 unsigned char shadow_c = c;
2756 if (i == size - 1 && last_chunk_size && !is_clobber)
2757 shadow_c = last_chunk_size;
2758 val |= (unsigned HOST_WIDE_INT) shadow_c << (BITS_PER_UNIT * i);
2759 }
2760
2761 /* Handle last chunk in unpoisoning. */
2762 tree magic = build_int_cst (TREE_TYPE (shadow_ptr_type), val);
2763
2764 tree dest = build2 (MEM_REF, TREE_TYPE (shadow_ptr_type), shadow,
2765 build_int_cst (shadow_ptr_type, base_addr_offset));
2766
2767 gimple *g = gimple_build_assign (dest, magic);
2768 gimple_set_location (g, loc);
2769 gsi_insert_after (iter, g, GSI_NEW_STMT);
2770}
2771
2772/* Expand the ASAN_MARK builtins. */
2773
2774bool
2775asan_expand_mark_ifn (gimple_stmt_iterator *iter)
2776{
2777 gimple *g = gsi_stmt (*iter);
2778 location_t loc = gimple_location (g);
56b7aede
ML
2779 HOST_WIDE_INT flag = tree_to_shwi (gimple_call_arg (g, 0));
2780 bool is_poison = ((asan_mark_flags)flag) == ASAN_MARK_POISON;
6dc4a604
ML
2781
2782 tree base = gimple_call_arg (g, 1);
2783 gcc_checking_assert (TREE_CODE (base) == ADDR_EXPR);
2784 tree decl = TREE_OPERAND (base, 0);
fb61d96c
ML
2785
2786 /* For a nested function, we can have: ASAN_MARK (2, &FRAME.2.fp_input, 4) */
2787 if (TREE_CODE (decl) == COMPONENT_REF
2788 && DECL_NONLOCAL_FRAME (TREE_OPERAND (decl, 0)))
2789 decl = TREE_OPERAND (decl, 0);
2790
6dc4a604
ML
2791 gcc_checking_assert (TREE_CODE (decl) == VAR_DECL);
2792 if (asan_handled_variables == NULL)
2793 asan_handled_variables = new hash_set<tree> (16);
2794 asan_handled_variables->add (decl);
2795 tree len = gimple_call_arg (g, 2);
2796
2797 gcc_assert (tree_fits_shwi_p (len));
2798 unsigned HOST_WIDE_INT size_in_bytes = tree_to_shwi (len);
2799 gcc_assert (size_in_bytes);
2800
2801 g = gimple_build_assign (make_ssa_name (pointer_sized_int_node),
2802 NOP_EXPR, base);
2803 gimple_set_location (g, loc);
2804 gsi_replace (iter, g, false);
2805 tree base_addr = gimple_assign_lhs (g);
2806
2807 /* Generate direct emission if size_in_bytes is small. */
2808 if (size_in_bytes <= ASAN_PARAM_USE_AFTER_SCOPE_DIRECT_EMISSION_THRESHOLD)
2809 {
2810 unsigned HOST_WIDE_INT shadow_size = shadow_mem_size (size_in_bytes);
2811
2812 tree shadow = build_shadow_mem_access (iter, loc, base_addr,
2813 shadow_ptr_types[0], true);
2814
2815 for (unsigned HOST_WIDE_INT offset = 0; offset < shadow_size;)
2816 {
2817 unsigned size = 1;
2818 if (shadow_size - offset >= 4)
2819 size = 4;
2820 else if (shadow_size - offset >= 2)
2821 size = 2;
2822
2823 unsigned HOST_WIDE_INT last_chunk_size = 0;
2824 unsigned HOST_WIDE_INT s = (offset + size) * ASAN_SHADOW_GRANULARITY;
2825 if (s > size_in_bytes)
2826 last_chunk_size = ASAN_SHADOW_GRANULARITY - (s - size_in_bytes);
2827
56b7aede 2828 asan_store_shadow_bytes (iter, loc, shadow, offset, is_poison,
6dc4a604
ML
2829 size, last_chunk_size);
2830 offset += size;
2831 }
2832 }
2833 else
2834 {
2835 g = gimple_build_assign (make_ssa_name (pointer_sized_int_node),
2836 NOP_EXPR, len);
2837 gimple_set_location (g, loc);
2838 gsi_insert_before (iter, g, GSI_SAME_STMT);
2839 tree sz_arg = gimple_assign_lhs (g);
2840
56b7aede 2841 tree fun = builtin_decl_implicit (is_poison ? BUILT_IN_ASAN_CLOBBER_N
6dc4a604
ML
2842 : BUILT_IN_ASAN_UNCLOBBER_N);
2843 g = gimple_build_call (fun, 2, base_addr, sz_arg);
2844 gimple_set_location (g, loc);
2845 gsi_insert_after (iter, g, GSI_NEW_STMT);
2846 }
2847
2848 return false;
2849}
2850
c62ccb9a
YG
2851/* Expand the ASAN_{LOAD,STORE} builtins. */
2852
06cefae9 2853bool
c62ccb9a
YG
2854asan_expand_check_ifn (gimple_stmt_iterator *iter, bool use_calls)
2855{
355fe088 2856 gimple *g = gsi_stmt (*iter);
c62ccb9a 2857 location_t loc = gimple_location (g);
b59e2a49
MO
2858 bool recover_p;
2859 if (flag_sanitize & SANITIZE_USER_ADDRESS)
2860 recover_p = (flag_sanitize_recover & SANITIZE_USER_ADDRESS) != 0;
2861 else
2862 recover_p = (flag_sanitize_recover & SANITIZE_KERNEL_ADDRESS) != 0;
fed4de37 2863
c62ccb9a
YG
2864 HOST_WIDE_INT flags = tree_to_shwi (gimple_call_arg (g, 0));
2865 gcc_assert (flags < ASAN_CHECK_LAST);
2866 bool is_scalar_access = (flags & ASAN_CHECK_SCALAR_ACCESS) != 0;
2867 bool is_store = (flags & ASAN_CHECK_STORE) != 0;
2868 bool is_non_zero_len = (flags & ASAN_CHECK_NON_ZERO_LEN) != 0;
c62ccb9a
YG
2869
2870 tree base = gimple_call_arg (g, 1);
2871 tree len = gimple_call_arg (g, 2);
f434eb69 2872 HOST_WIDE_INT align = tree_to_shwi (gimple_call_arg (g, 3));
c62ccb9a
YG
2873
2874 HOST_WIDE_INT size_in_bytes
2875 = is_scalar_access && tree_fits_shwi_p (len) ? tree_to_shwi (len) : -1;
2876
2877 if (use_calls)
2878 {
2879 /* Instrument using callbacks. */
355fe088 2880 gimple *g = gimple_build_assign (make_ssa_name (pointer_sized_int_node),
0d0e4a03 2881 NOP_EXPR, base);
c62ccb9a
YG
2882 gimple_set_location (g, loc);
2883 gsi_insert_before (iter, g, GSI_SAME_STMT);
2884 tree base_addr = gimple_assign_lhs (g);
2885
2886 int nargs;
fed4de37 2887 tree fun = check_func (is_store, recover_p, size_in_bytes, &nargs);
c62ccb9a
YG
2888 if (nargs == 1)
2889 g = gimple_build_call (fun, 1, base_addr);
2890 else
2891 {
2892 gcc_assert (nargs == 2);
0d0e4a03
JJ
2893 g = gimple_build_assign (make_ssa_name (pointer_sized_int_node),
2894 NOP_EXPR, len);
c62ccb9a
YG
2895 gimple_set_location (g, loc);
2896 gsi_insert_before (iter, g, GSI_SAME_STMT);
2897 tree sz_arg = gimple_assign_lhs (g);
2898 g = gimple_build_call (fun, nargs, base_addr, sz_arg);
2899 }
2900 gimple_set_location (g, loc);
2901 gsi_replace (iter, g, false);
2902 return false;
2903 }
2904
2905 HOST_WIDE_INT real_size_in_bytes = size_in_bytes == -1 ? 1 : size_in_bytes;
2906
c62ccb9a
YG
2907 tree shadow_ptr_type = shadow_ptr_types[real_size_in_bytes == 16 ? 1 : 0];
2908 tree shadow_type = TREE_TYPE (shadow_ptr_type);
2909
2910 gimple_stmt_iterator gsi = *iter;
2911
2912 if (!is_non_zero_len)
2913 {
2914 /* So, the length of the memory area to asan-protect is
2915 non-constant. Let's guard the generated instrumentation code
2916 like:
2917
2918 if (len != 0)
2919 {
2920 //asan instrumentation code goes here.
2921 }
2922 // falltrough instructions, starting with *ITER. */
2923
2924 g = gimple_build_cond (NE_EXPR,
2925 len,
2926 build_int_cst (TREE_TYPE (len), 0),
2927 NULL_TREE, NULL_TREE);
2928 gimple_set_location (g, loc);
2929
2930 basic_block then_bb, fallthrough_bb;
538dd0b7
DM
2931 insert_if_then_before_iter (as_a <gcond *> (g), iter,
2932 /*then_more_likely_p=*/true,
2933 &then_bb, &fallthrough_bb);
c62ccb9a
YG
2934 /* Note that fallthrough_bb starts with the statement that was
2935 pointed to by ITER. */
2936
2937 /* The 'then block' of the 'if (len != 0) condition is where
2938 we'll generate the asan instrumentation code now. */
2939 gsi = gsi_last_bb (then_bb);
2940 }
2941
2942 /* Get an iterator on the point where we can add the condition
2943 statement for the instrumentation. */
2944 basic_block then_bb, else_bb;
2945 gsi = create_cond_insert_point (&gsi, /*before_p*/false,
2946 /*then_more_likely_p=*/false,
fed4de37 2947 /*create_then_fallthru_edge*/recover_p,
c62ccb9a
YG
2948 &then_bb,
2949 &else_bb);
2950
0d0e4a03
JJ
2951 g = gimple_build_assign (make_ssa_name (pointer_sized_int_node),
2952 NOP_EXPR, base);
c62ccb9a
YG
2953 gimple_set_location (g, loc);
2954 gsi_insert_before (&gsi, g, GSI_NEW_STMT);
2955 tree base_addr = gimple_assign_lhs (g);
2956
2957 tree t = NULL_TREE;
2958 if (real_size_in_bytes >= 8)
2959 {
2960 tree shadow = build_shadow_mem_access (&gsi, loc, base_addr,
2961 shadow_ptr_type);
2962 t = shadow;
2963 }
2964 else
2965 {
2966 /* Slow path for 1, 2 and 4 byte accesses. */
bdea98ca
MO
2967 /* Test (shadow != 0)
2968 & ((base_addr & 7) + (real_size_in_bytes - 1)) >= shadow). */
2969 tree shadow = build_shadow_mem_access (&gsi, loc, base_addr,
2970 shadow_ptr_type);
355fe088 2971 gimple *shadow_test = build_assign (NE_EXPR, shadow, 0);
bdea98ca
MO
2972 gimple_seq seq = NULL;
2973 gimple_seq_add_stmt (&seq, shadow_test);
2974 /* Aligned (>= 8 bytes) can test just
2975 (real_size_in_bytes - 1 >= shadow), as base_addr & 7 is known
2976 to be 0. */
2977 if (align < 8)
c62ccb9a 2978 {
bdea98ca
MO
2979 gimple_seq_add_stmt (&seq, build_assign (BIT_AND_EXPR,
2980 base_addr, 7));
2981 gimple_seq_add_stmt (&seq,
2982 build_type_cast (shadow_type,
2983 gimple_seq_last (seq)));
2984 if (real_size_in_bytes > 1)
2985 gimple_seq_add_stmt (&seq,
2986 build_assign (PLUS_EXPR,
2987 gimple_seq_last (seq),
2988 real_size_in_bytes - 1));
2989 t = gimple_assign_lhs (gimple_seq_last_stmt (seq));
c62ccb9a 2990 }
bdea98ca
MO
2991 else
2992 t = build_int_cst (shadow_type, real_size_in_bytes - 1);
2993 gimple_seq_add_stmt (&seq, build_assign (GE_EXPR, t, shadow));
2994 gimple_seq_add_stmt (&seq, build_assign (BIT_AND_EXPR, shadow_test,
2995 gimple_seq_last (seq)));
2996 t = gimple_assign_lhs (gimple_seq_last (seq));
2997 gimple_seq_set_location (seq, loc);
2998 gsi_insert_seq_after (&gsi, seq, GSI_CONTINUE_LINKING);
c62ccb9a
YG
2999
3000 /* For non-constant, misaligned or otherwise weird access sizes,
bdea98ca
MO
3001 check first and last byte. */
3002 if (size_in_bytes == -1)
c62ccb9a 3003 {
0d0e4a03
JJ
3004 g = gimple_build_assign (make_ssa_name (pointer_sized_int_node),
3005 MINUS_EXPR, len,
3006 build_int_cst (pointer_sized_int_node, 1));
c62ccb9a
YG
3007 gimple_set_location (g, loc);
3008 gsi_insert_after (&gsi, g, GSI_NEW_STMT);
3009 tree last = gimple_assign_lhs (g);
0d0e4a03
JJ
3010 g = gimple_build_assign (make_ssa_name (pointer_sized_int_node),
3011 PLUS_EXPR, base_addr, last);
c62ccb9a
YG
3012 gimple_set_location (g, loc);
3013 gsi_insert_after (&gsi, g, GSI_NEW_STMT);
3014 tree base_end_addr = gimple_assign_lhs (g);
3015
3016 tree shadow = build_shadow_mem_access (&gsi, loc, base_end_addr,
3017 shadow_ptr_type);
355fe088 3018 gimple *shadow_test = build_assign (NE_EXPR, shadow, 0);
c62ccb9a
YG
3019 gimple_seq seq = NULL;
3020 gimple_seq_add_stmt (&seq, shadow_test);
3021 gimple_seq_add_stmt (&seq, build_assign (BIT_AND_EXPR,
3022 base_end_addr, 7));
3023 gimple_seq_add_stmt (&seq, build_type_cast (shadow_type,
3024 gimple_seq_last (seq)));
3025 gimple_seq_add_stmt (&seq, build_assign (GE_EXPR,
3026 gimple_seq_last (seq),
3027 shadow));
3028 gimple_seq_add_stmt (&seq, build_assign (BIT_AND_EXPR, shadow_test,
3029 gimple_seq_last (seq)));
bdea98ca
MO
3030 gimple_seq_add_stmt (&seq, build_assign (BIT_IOR_EXPR, t,
3031 gimple_seq_last (seq)));
c62ccb9a
YG
3032 t = gimple_assign_lhs (gimple_seq_last (seq));
3033 gimple_seq_set_location (seq, loc);
3034 gsi_insert_seq_after (&gsi, seq, GSI_CONTINUE_LINKING);
3035 }
3036 }
3037
3038 g = gimple_build_cond (NE_EXPR, t, build_int_cst (TREE_TYPE (t), 0),
3039 NULL_TREE, NULL_TREE);
3040 gimple_set_location (g, loc);
3041 gsi_insert_after (&gsi, g, GSI_NEW_STMT);
3042
3043 /* Generate call to the run-time library (e.g. __asan_report_load8). */
3044 gsi = gsi_start_bb (then_bb);
3045 int nargs;
fed4de37 3046 tree fun = report_error_func (is_store, recover_p, size_in_bytes, &nargs);
c62ccb9a
YG
3047 g = gimple_build_call (fun, nargs, base_addr, len);
3048 gimple_set_location (g, loc);
3049 gsi_insert_after (&gsi, g, GSI_NEW_STMT);
3050
3051 gsi_remove (iter, true);
3052 *iter = gsi_start_bb (else_bb);
3053
3054 return true;
3055}
3056
37d6f666
WM
3057/* Instrument the current function. */
3058
3059static unsigned int
3060asan_instrument (void)
3061{
f6d98484 3062 if (shadow_ptr_types[0] == NULL_TREE)
94fce891 3063 asan_init_shadow_ptr_types ();
37d6f666 3064 transform_statements ();
37d6f666
WM
3065 return 0;
3066}
3067
3068static bool
3069gate_asan (void)
3070{
de5a5fa1 3071 return (flag_sanitize & SANITIZE_ADDRESS) != 0
e664c61c 3072 && !lookup_attribute ("no_sanitize_address",
77bc5132 3073 DECL_ATTRIBUTES (current_function_decl));
37d6f666
WM
3074}
3075
27a4cd48
DM
3076namespace {
3077
3078const pass_data pass_data_asan =
37d6f666 3079{
27a4cd48
DM
3080 GIMPLE_PASS, /* type */
3081 "asan", /* name */
3082 OPTGROUP_NONE, /* optinfo_flags */
27a4cd48
DM
3083 TV_NONE, /* tv_id */
3084 ( PROP_ssa | PROP_cfg | PROP_gimple_leh ), /* properties_required */
3085 0, /* properties_provided */
3086 0, /* properties_destroyed */
3087 0, /* todo_flags_start */
3bea341f 3088 TODO_update_ssa, /* todo_flags_finish */
37d6f666 3089};
f6d98484 3090
27a4cd48
DM
3091class pass_asan : public gimple_opt_pass
3092{
3093public:
c3284718
RS
3094 pass_asan (gcc::context *ctxt)
3095 : gimple_opt_pass (pass_data_asan, ctxt)
27a4cd48
DM
3096 {}
3097
3098 /* opt_pass methods: */
65d3284b 3099 opt_pass * clone () { return new pass_asan (m_ctxt); }
1a3d085c 3100 virtual bool gate (function *) { return gate_asan (); }
be55bfe6 3101 virtual unsigned int execute (function *) { return asan_instrument (); }
27a4cd48
DM
3102
3103}; // class pass_asan
3104
3105} // anon namespace
3106
3107gimple_opt_pass *
3108make_pass_asan (gcc::context *ctxt)
3109{
3110 return new pass_asan (ctxt);
3111}
3112
27a4cd48
DM
3113namespace {
3114
3115const pass_data pass_data_asan_O0 =
dfb9e332 3116{
27a4cd48
DM
3117 GIMPLE_PASS, /* type */
3118 "asan0", /* name */
3119 OPTGROUP_NONE, /* optinfo_flags */
27a4cd48
DM
3120 TV_NONE, /* tv_id */
3121 ( PROP_ssa | PROP_cfg | PROP_gimple_leh ), /* properties_required */
3122 0, /* properties_provided */
3123 0, /* properties_destroyed */
3124 0, /* todo_flags_start */
3bea341f 3125 TODO_update_ssa, /* todo_flags_finish */
dfb9e332
JJ
3126};
3127
27a4cd48
DM
3128class pass_asan_O0 : public gimple_opt_pass
3129{
3130public:
c3284718
RS
3131 pass_asan_O0 (gcc::context *ctxt)
3132 : gimple_opt_pass (pass_data_asan_O0, ctxt)
27a4cd48
DM
3133 {}
3134
3135 /* opt_pass methods: */
1a3d085c 3136 virtual bool gate (function *) { return !optimize && gate_asan (); }
be55bfe6 3137 virtual unsigned int execute (function *) { return asan_instrument (); }
27a4cd48
DM
3138
3139}; // class pass_asan_O0
3140
3141} // anon namespace
3142
3143gimple_opt_pass *
3144make_pass_asan_O0 (gcc::context *ctxt)
3145{
3146 return new pass_asan_O0 (ctxt);
3147}
3148
f6d98484 3149#include "gt-asan.h"