]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/asan.c
libsanitizer merge from upstream r250806.
[thirdparty/gcc.git] / gcc / asan.c
CommitLineData
37d6f666 1/* AddressSanitizer, a fast memory error detector.
5624e564 2 Copyright (C) 2012-2015 Free Software Foundation, Inc.
37d6f666
WM
3 Contributed by Kostya Serebryany <kcc@google.com>
4
5This file is part of GCC.
6
7GCC is free software; you can redistribute it and/or modify it under
8the terms of the GNU General Public License as published by the Free
9Software Foundation; either version 3, or (at your option) any later
10version.
11
12GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13WARRANTY; without even the implied warranty of MERCHANTABILITY or
14FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15for more details.
16
17You should have received a copy of the GNU General Public License
18along with GCC; see the file COPYING3. If not see
19<http://www.gnu.org/licenses/>. */
20
21
22#include "config.h"
23#include "system.h"
24#include "coretypes.h"
40e23961 25#include "alias.h"
c7131fb2 26#include "backend.h"
9fdcd34e 27#include "cfghooks.h"
4d648807 28#include "tree.h"
c7131fb2
AM
29#include "gimple.h"
30#include "rtl.h"
31#include "options.h"
40e23961 32#include "fold-const.h"
60393bbc 33#include "cfganal.h"
2fb9a547 34#include "internal-fn.h"
45b0be94 35#include "gimplify.h"
5be5c238 36#include "gimple-iterator.h"
d8a2d370
DN
37#include "calls.h"
38#include "varasm.h"
39#include "stor-layout.h"
37d6f666 40#include "tree-iterator.h"
442b4905 41#include "cgraph.h"
d8a2d370 42#include "stringpool.h"
442b4905 43#include "tree-ssanames.h"
37d6f666 44#include "tree-pass.h"
37d6f666
WM
45#include "asan.h"
46#include "gimple-pretty-print.h"
dfe06d3e 47#include "target.h"
36566b39 48#include "flags.h"
36566b39
PK
49#include "insn-config.h"
50#include "expmed.h"
51#include "dojump.h"
52#include "explow.h"
53#include "emit-rtl.h"
54#include "stmt.h"
f3ddd692 55#include "expr.h"
b0710fe1 56#include "insn-codes.h"
f3ddd692 57#include "optabs.h"
8240018b 58#include "output.h"
7f71fad9 59#include "tm_p.h"
0e668eaf 60#include "langhooks.h"
bdcbe80c 61#include "alloc-pool.h"
a9e0d843 62#include "cfgloop.h"
ff2a63a7 63#include "gimple-builder.h"
b9a55b13 64#include "ubsan.h"
b5ebc991 65#include "params.h"
9b2b7279 66#include "builtins.h"
860503d8 67#include "fnmatch.h"
37d6f666 68
497a1c66
JJ
69/* AddressSanitizer finds out-of-bounds and use-after-free bugs
70 with <2x slowdown on average.
71
72 The tool consists of two parts:
73 instrumentation module (this file) and a run-time library.
74 The instrumentation module adds a run-time check before every memory insn.
75 For a 8- or 16- byte load accessing address X:
76 ShadowAddr = (X >> 3) + Offset
77 ShadowValue = *(char*)ShadowAddr; // *(short*) for 16-byte access.
78 if (ShadowValue)
79 __asan_report_load8(X);
80 For a load of N bytes (N=1, 2 or 4) from address X:
81 ShadowAddr = (X >> 3) + Offset
82 ShadowValue = *(char*)ShadowAddr;
83 if (ShadowValue)
84 if ((X & 7) + N - 1 > ShadowValue)
85 __asan_report_loadN(X);
86 Stores are instrumented similarly, but using __asan_report_storeN functions.
ef1b3fda
KS
87 A call too __asan_init_vN() is inserted to the list of module CTORs.
88 N is the version number of the AddressSanitizer API. The changes between the
89 API versions are listed in libsanitizer/asan/asan_interface_internal.h.
497a1c66
JJ
90
91 The run-time library redefines malloc (so that redzone are inserted around
92 the allocated memory) and free (so that reuse of free-ed memory is delayed),
ef1b3fda 93 provides __asan_report* and __asan_init_vN functions.
497a1c66
JJ
94
95 Read more:
96 http://code.google.com/p/address-sanitizer/wiki/AddressSanitizerAlgorithm
97
98 The current implementation supports detection of out-of-bounds and
99 use-after-free in the heap, on the stack and for global variables.
100
101 [Protection of stack variables]
102
103 To understand how detection of out-of-bounds and use-after-free works
104 for stack variables, lets look at this example on x86_64 where the
105 stack grows downward:
f3ddd692
JJ
106
107 int
108 foo ()
109 {
110 char a[23] = {0};
111 int b[2] = {0};
112
113 a[5] = 1;
114 b[1] = 2;
115
116 return a[5] + b[1];
117 }
118
497a1c66
JJ
119 For this function, the stack protected by asan will be organized as
120 follows, from the top of the stack to the bottom:
f3ddd692 121
497a1c66 122 Slot 1/ [red zone of 32 bytes called 'RIGHT RedZone']
f3ddd692 123
497a1c66
JJ
124 Slot 2/ [8 bytes of red zone, that adds up to the space of 'a' to make
125 the next slot be 32 bytes aligned; this one is called Partial
126 Redzone; this 32 bytes alignment is an asan constraint]
f3ddd692 127
497a1c66 128 Slot 3/ [24 bytes for variable 'a']
f3ddd692 129
497a1c66 130 Slot 4/ [red zone of 32 bytes called 'Middle RedZone']
f3ddd692 131
497a1c66 132 Slot 5/ [24 bytes of Partial Red Zone (similar to slot 2]
f3ddd692 133
497a1c66 134 Slot 6/ [8 bytes for variable 'b']
f3ddd692 135
497a1c66
JJ
136 Slot 7/ [32 bytes of Red Zone at the bottom of the stack, called
137 'LEFT RedZone']
f3ddd692 138
497a1c66
JJ
139 The 32 bytes of LEFT red zone at the bottom of the stack can be
140 decomposed as such:
f3ddd692
JJ
141
142 1/ The first 8 bytes contain a magical asan number that is always
143 0x41B58AB3.
144
145 2/ The following 8 bytes contains a pointer to a string (to be
146 parsed at runtime by the runtime asan library), which format is
147 the following:
148
149 "<function-name> <space> <num-of-variables-on-the-stack>
150 (<32-bytes-aligned-offset-in-bytes-of-variable> <space>
151 <length-of-var-in-bytes> ){n} "
152
153 where '(...){n}' means the content inside the parenthesis occurs 'n'
154 times, with 'n' being the number of variables on the stack.
c1f5ce48 155
ef1b3fda
KS
156 3/ The following 8 bytes contain the PC of the current function which
157 will be used by the run-time library to print an error message.
f3ddd692 158
ef1b3fda 159 4/ The following 8 bytes are reserved for internal use by the run-time.
f3ddd692 160
497a1c66 161 The shadow memory for that stack layout is going to look like this:
f3ddd692
JJ
162
163 - content of shadow memory 8 bytes for slot 7: 0xF1F1F1F1.
164 The F1 byte pattern is a magic number called
165 ASAN_STACK_MAGIC_LEFT and is a way for the runtime to know that
166 the memory for that shadow byte is part of a the LEFT red zone
167 intended to seat at the bottom of the variables on the stack.
168
169 - content of shadow memory 8 bytes for slots 6 and 5:
170 0xF4F4F400. The F4 byte pattern is a magic number
171 called ASAN_STACK_MAGIC_PARTIAL. It flags the fact that the
172 memory region for this shadow byte is a PARTIAL red zone
173 intended to pad a variable A, so that the slot following
174 {A,padding} is 32 bytes aligned.
175
176 Note that the fact that the least significant byte of this
177 shadow memory content is 00 means that 8 bytes of its
178 corresponding memory (which corresponds to the memory of
179 variable 'b') is addressable.
180
181 - content of shadow memory 8 bytes for slot 4: 0xF2F2F2F2.
182 The F2 byte pattern is a magic number called
183 ASAN_STACK_MAGIC_MIDDLE. It flags the fact that the memory
184 region for this shadow byte is a MIDDLE red zone intended to
185 seat between two 32 aligned slots of {variable,padding}.
186
187 - content of shadow memory 8 bytes for slot 3 and 2:
497a1c66 188 0xF4000000. This represents is the concatenation of
f3ddd692
JJ
189 variable 'a' and the partial red zone following it, like what we
190 had for variable 'b'. The least significant 3 bytes being 00
191 means that the 3 bytes of variable 'a' are addressable.
192
497a1c66 193 - content of shadow memory 8 bytes for slot 1: 0xF3F3F3F3.
f3ddd692
JJ
194 The F3 byte pattern is a magic number called
195 ASAN_STACK_MAGIC_RIGHT. It flags the fact that the memory
196 region for this shadow byte is a RIGHT red zone intended to seat
197 at the top of the variables of the stack.
198
497a1c66
JJ
199 Note that the real variable layout is done in expand_used_vars in
200 cfgexpand.c. As far as Address Sanitizer is concerned, it lays out
201 stack variables as well as the different red zones, emits some
202 prologue code to populate the shadow memory as to poison (mark as
203 non-accessible) the regions of the red zones and mark the regions of
204 stack variables as accessible, and emit some epilogue code to
205 un-poison (mark as accessible) the regions of red zones right before
206 the function exits.
8240018b 207
497a1c66 208 [Protection of global variables]
8240018b 209
497a1c66
JJ
210 The basic idea is to insert a red zone between two global variables
211 and install a constructor function that calls the asan runtime to do
212 the populating of the relevant shadow memory regions at load time.
8240018b 213
497a1c66
JJ
214 So the global variables are laid out as to insert a red zone between
215 them. The size of the red zones is so that each variable starts on a
216 32 bytes boundary.
8240018b 217
497a1c66
JJ
218 Then a constructor function is installed so that, for each global
219 variable, it calls the runtime asan library function
220 __asan_register_globals_with an instance of this type:
8240018b
JJ
221
222 struct __asan_global
223 {
224 // Address of the beginning of the global variable.
225 const void *__beg;
226
227 // Initial size of the global variable.
228 uptr __size;
229
230 // Size of the global variable + size of the red zone. This
231 // size is 32 bytes aligned.
232 uptr __size_with_redzone;
233
234 // Name of the global variable.
235 const void *__name;
236
ef1b3fda
KS
237 // Name of the module where the global variable is declared.
238 const void *__module_name;
239
59b36ecf 240 // 1 if it has dynamic initialization, 0 otherwise.
8240018b 241 uptr __has_dynamic_init;
866e32ad
KS
242
243 // A pointer to struct that contains source location, could be NULL.
244 __asan_global_source_location *__location;
8240018b
JJ
245 }
246
497a1c66
JJ
247 A destructor function that calls the runtime asan library function
248 _asan_unregister_globals is also installed. */
f3ddd692 249
fd960af2
YG
250static unsigned HOST_WIDE_INT asan_shadow_offset_value;
251static bool asan_shadow_offset_computed;
860503d8 252static vec<char *> sanitized_sections;
fd960af2
YG
253
254/* Sets shadow offset to value in string VAL. */
255
256bool
257set_asan_shadow_offset (const char *val)
258{
259 char *endp;
c1f5ce48 260
fd960af2
YG
261 errno = 0;
262#ifdef HAVE_LONG_LONG
263 asan_shadow_offset_value = strtoull (val, &endp, 0);
264#else
265 asan_shadow_offset_value = strtoul (val, &endp, 0);
266#endif
267 if (!(*val != '\0' && *endp == '\0' && errno == 0))
268 return false;
269
270 asan_shadow_offset_computed = true;
271
272 return true;
273}
274
18af8d16
YG
275/* Set list of user-defined sections that need to be sanitized. */
276
277void
860503d8 278set_sanitized_sections (const char *sections)
18af8d16 279{
860503d8
YG
280 char *pat;
281 unsigned i;
282 FOR_EACH_VEC_ELT (sanitized_sections, i, pat)
283 free (pat);
284 sanitized_sections.truncate (0);
285
286 for (const char *s = sections; *s; )
287 {
288 const char *end;
289 for (end = s; *end && *end != ','; ++end);
290 size_t len = end - s;
291 sanitized_sections.safe_push (xstrndup (s, len));
292 s = *end ? end + 1 : end;
293 }
18af8d16
YG
294}
295
296/* Checks whether section SEC should be sanitized. */
297
298static bool
299section_sanitized_p (const char *sec)
300{
860503d8
YG
301 char *pat;
302 unsigned i;
303 FOR_EACH_VEC_ELT (sanitized_sections, i, pat)
304 if (fnmatch (pat, sec, FNM_PERIOD) == 0)
305 return true;
18af8d16
YG
306 return false;
307}
308
fd960af2
YG
309/* Returns Asan shadow offset. */
310
311static unsigned HOST_WIDE_INT
312asan_shadow_offset ()
313{
314 if (!asan_shadow_offset_computed)
315 {
316 asan_shadow_offset_computed = true;
317 asan_shadow_offset_value = targetm.asan_shadow_offset ();
318 }
319 return asan_shadow_offset_value;
320}
321
f3ddd692 322alias_set_type asan_shadow_set = -1;
37d6f666 323
f6d98484
JJ
324/* Pointer types to 1 resp. 2 byte integers in shadow memory. A separate
325 alias set is used for all shadow memory accesses. */
326static GTY(()) tree shadow_ptr_types[2];
327
e361382f
JJ
328/* Decl for __asan_option_detect_stack_use_after_return. */
329static GTY(()) tree asan_detect_stack_use_after_return;
330
c62ccb9a
YG
331/* Various flags for Asan builtins. */
332enum asan_check_flags
8946c29e 333{
c62ccb9a
YG
334 ASAN_CHECK_STORE = 1 << 0,
335 ASAN_CHECK_SCALAR_ACCESS = 1 << 1,
336 ASAN_CHECK_NON_ZERO_LEN = 1 << 2,
bdea98ca 337 ASAN_CHECK_LAST = 1 << 3
c62ccb9a 338};
8946c29e 339
bdcbe80c
DS
340/* Hashtable support for memory references used by gimple
341 statements. */
342
343/* This type represents a reference to a memory region. */
344struct asan_mem_ref
345{
688010ba 346 /* The expression of the beginning of the memory region. */
bdcbe80c
DS
347 tree start;
348
40f9f6bb
JJ
349 /* The size of the access. */
350 HOST_WIDE_INT access_size;
c1f5ce48
ML
351};
352
fcb87c50 353object_allocator <asan_mem_ref> asan_mem_ref_pool ("asan_mem_ref");
bdcbe80c
DS
354
355/* Initializes an instance of asan_mem_ref. */
356
357static void
40f9f6bb 358asan_mem_ref_init (asan_mem_ref *ref, tree start, HOST_WIDE_INT access_size)
bdcbe80c
DS
359{
360 ref->start = start;
361 ref->access_size = access_size;
362}
363
364/* Allocates memory for an instance of asan_mem_ref into the memory
365 pool returned by asan_mem_ref_get_alloc_pool and initialize it.
366 START is the address of (or the expression pointing to) the
367 beginning of memory reference. ACCESS_SIZE is the size of the
368 access to the referenced memory. */
369
370static asan_mem_ref*
40f9f6bb 371asan_mem_ref_new (tree start, HOST_WIDE_INT access_size)
bdcbe80c 372{
fb0b2914 373 asan_mem_ref *ref = asan_mem_ref_pool.allocate ();
bdcbe80c
DS
374
375 asan_mem_ref_init (ref, start, access_size);
376 return ref;
377}
378
379/* This builds and returns a pointer to the end of the memory region
380 that starts at START and of length LEN. */
381
382tree
383asan_mem_ref_get_end (tree start, tree len)
384{
385 if (len == NULL_TREE || integer_zerop (len))
386 return start;
387
a2f581e1
YG
388 if (!ptrofftype_p (len))
389 len = convert_to_ptrofftype (len);
390
bdcbe80c
DS
391 return fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (start), start, len);
392}
393
394/* Return a tree expression that represents the end of the referenced
395 memory region. Beware that this function can actually build a new
396 tree expression. */
397
398tree
399asan_mem_ref_get_end (const asan_mem_ref *ref, tree len)
400{
401 return asan_mem_ref_get_end (ref->start, len);
402}
403
8d67ee55 404struct asan_mem_ref_hasher : nofree_ptr_hash <asan_mem_ref>
bdcbe80c 405{
67f58944
TS
406 static inline hashval_t hash (const asan_mem_ref *);
407 static inline bool equal (const asan_mem_ref *, const asan_mem_ref *);
bdcbe80c
DS
408};
409
410/* Hash a memory reference. */
411
412inline hashval_t
413asan_mem_ref_hasher::hash (const asan_mem_ref *mem_ref)
414{
bdea98ca 415 return iterative_hash_expr (mem_ref->start, 0);
bdcbe80c
DS
416}
417
418/* Compare two memory references. We accept the length of either
419 memory references to be NULL_TREE. */
420
421inline bool
422asan_mem_ref_hasher::equal (const asan_mem_ref *m1,
423 const asan_mem_ref *m2)
424{
bdea98ca 425 return operand_equal_p (m1->start, m2->start, 0);
bdcbe80c
DS
426}
427
c203e8a7 428static hash_table<asan_mem_ref_hasher> *asan_mem_ref_ht;
bdcbe80c
DS
429
430/* Returns a reference to the hash table containing memory references.
431 This function ensures that the hash table is created. Note that
432 this hash table is updated by the function
433 update_mem_ref_hash_table. */
434
c203e8a7 435static hash_table<asan_mem_ref_hasher> *
bdcbe80c
DS
436get_mem_ref_hash_table ()
437{
c203e8a7
TS
438 if (!asan_mem_ref_ht)
439 asan_mem_ref_ht = new hash_table<asan_mem_ref_hasher> (10);
bdcbe80c
DS
440
441 return asan_mem_ref_ht;
442}
443
444/* Clear all entries from the memory references hash table. */
445
446static void
447empty_mem_ref_hash_table ()
448{
c203e8a7
TS
449 if (asan_mem_ref_ht)
450 asan_mem_ref_ht->empty ();
bdcbe80c
DS
451}
452
453/* Free the memory references hash table. */
454
455static void
456free_mem_ref_resources ()
457{
c203e8a7
TS
458 delete asan_mem_ref_ht;
459 asan_mem_ref_ht = NULL;
bdcbe80c 460
fb0b2914 461 asan_mem_ref_pool.release ();
bdcbe80c
DS
462}
463
464/* Return true iff the memory reference REF has been instrumented. */
465
466static bool
40f9f6bb 467has_mem_ref_been_instrumented (tree ref, HOST_WIDE_INT access_size)
bdcbe80c
DS
468{
469 asan_mem_ref r;
470 asan_mem_ref_init (&r, ref, access_size);
471
bdea98ca
MO
472 asan_mem_ref *saved_ref = get_mem_ref_hash_table ()->find (&r);
473 return saved_ref && saved_ref->access_size >= access_size;
bdcbe80c
DS
474}
475
476/* Return true iff the memory reference REF has been instrumented. */
477
478static bool
479has_mem_ref_been_instrumented (const asan_mem_ref *ref)
480{
481 return has_mem_ref_been_instrumented (ref->start, ref->access_size);
482}
483
484/* Return true iff access to memory region starting at REF and of
485 length LEN has been instrumented. */
486
487static bool
488has_mem_ref_been_instrumented (const asan_mem_ref *ref, tree len)
489{
bdea98ca
MO
490 HOST_WIDE_INT size_in_bytes
491 = tree_fits_shwi_p (len) ? tree_to_shwi (len) : -1;
bdcbe80c 492
bdea98ca
MO
493 return size_in_bytes != -1
494 && has_mem_ref_been_instrumented (ref->start, size_in_bytes);
bdcbe80c
DS
495}
496
497/* Set REF to the memory reference present in a gimple assignment
498 ASSIGNMENT. Return true upon successful completion, false
499 otherwise. */
500
501static bool
538dd0b7 502get_mem_ref_of_assignment (const gassign *assignment,
bdcbe80c
DS
503 asan_mem_ref *ref,
504 bool *ref_is_store)
505{
506 gcc_assert (gimple_assign_single_p (assignment));
507
5d751b0c
JJ
508 if (gimple_store_p (assignment)
509 && !gimple_clobber_p (assignment))
bdcbe80c
DS
510 {
511 ref->start = gimple_assign_lhs (assignment);
512 *ref_is_store = true;
513 }
514 else if (gimple_assign_load_p (assignment))
515 {
516 ref->start = gimple_assign_rhs1 (assignment);
517 *ref_is_store = false;
518 }
519 else
520 return false;
521
522 ref->access_size = int_size_in_bytes (TREE_TYPE (ref->start));
523 return true;
524}
525
526/* Return the memory references contained in a gimple statement
527 representing a builtin call that has to do with memory access. */
528
529static bool
538dd0b7 530get_mem_refs_of_builtin_call (const gcall *call,
bdcbe80c
DS
531 asan_mem_ref *src0,
532 tree *src0_len,
533 bool *src0_is_store,
534 asan_mem_ref *src1,
535 tree *src1_len,
536 bool *src1_is_store,
537 asan_mem_ref *dst,
538 tree *dst_len,
539 bool *dst_is_store,
bdea98ca
MO
540 bool *dest_is_deref,
541 bool *intercepted_p)
bdcbe80c
DS
542{
543 gcc_checking_assert (gimple_call_builtin_p (call, BUILT_IN_NORMAL));
544
545 tree callee = gimple_call_fndecl (call);
546 tree source0 = NULL_TREE, source1 = NULL_TREE,
547 dest = NULL_TREE, len = NULL_TREE;
548 bool is_store = true, got_reference_p = false;
40f9f6bb 549 HOST_WIDE_INT access_size = 1;
bdcbe80c 550
bdea98ca
MO
551 *intercepted_p = asan_intercepted_p ((DECL_FUNCTION_CODE (callee)));
552
bdcbe80c
DS
553 switch (DECL_FUNCTION_CODE (callee))
554 {
555 /* (s, s, n) style memops. */
556 case BUILT_IN_BCMP:
557 case BUILT_IN_MEMCMP:
558 source0 = gimple_call_arg (call, 0);
559 source1 = gimple_call_arg (call, 1);
560 len = gimple_call_arg (call, 2);
561 break;
562
563 /* (src, dest, n) style memops. */
564 case BUILT_IN_BCOPY:
565 source0 = gimple_call_arg (call, 0);
566 dest = gimple_call_arg (call, 1);
567 len = gimple_call_arg (call, 2);
568 break;
569
570 /* (dest, src, n) style memops. */
571 case BUILT_IN_MEMCPY:
572 case BUILT_IN_MEMCPY_CHK:
573 case BUILT_IN_MEMMOVE:
574 case BUILT_IN_MEMMOVE_CHK:
575 case BUILT_IN_MEMPCPY:
576 case BUILT_IN_MEMPCPY_CHK:
577 dest = gimple_call_arg (call, 0);
578 source0 = gimple_call_arg (call, 1);
579 len = gimple_call_arg (call, 2);
580 break;
581
582 /* (dest, n) style memops. */
583 case BUILT_IN_BZERO:
584 dest = gimple_call_arg (call, 0);
585 len = gimple_call_arg (call, 1);
586 break;
587
588 /* (dest, x, n) style memops*/
589 case BUILT_IN_MEMSET:
590 case BUILT_IN_MEMSET_CHK:
591 dest = gimple_call_arg (call, 0);
592 len = gimple_call_arg (call, 2);
593 break;
594
595 case BUILT_IN_STRLEN:
596 source0 = gimple_call_arg (call, 0);
597 len = gimple_call_lhs (call);
598 break ;
599
600 /* And now the __atomic* and __sync builtins.
601 These are handled differently from the classical memory memory
602 access builtins above. */
603
604 case BUILT_IN_ATOMIC_LOAD_1:
605 case BUILT_IN_ATOMIC_LOAD_2:
606 case BUILT_IN_ATOMIC_LOAD_4:
607 case BUILT_IN_ATOMIC_LOAD_8:
608 case BUILT_IN_ATOMIC_LOAD_16:
609 is_store = false;
610 /* fall through. */
611
612 case BUILT_IN_SYNC_FETCH_AND_ADD_1:
613 case BUILT_IN_SYNC_FETCH_AND_ADD_2:
614 case BUILT_IN_SYNC_FETCH_AND_ADD_4:
615 case BUILT_IN_SYNC_FETCH_AND_ADD_8:
616 case BUILT_IN_SYNC_FETCH_AND_ADD_16:
617
618 case BUILT_IN_SYNC_FETCH_AND_SUB_1:
619 case BUILT_IN_SYNC_FETCH_AND_SUB_2:
620 case BUILT_IN_SYNC_FETCH_AND_SUB_4:
621 case BUILT_IN_SYNC_FETCH_AND_SUB_8:
622 case BUILT_IN_SYNC_FETCH_AND_SUB_16:
623
624 case BUILT_IN_SYNC_FETCH_AND_OR_1:
625 case BUILT_IN_SYNC_FETCH_AND_OR_2:
626 case BUILT_IN_SYNC_FETCH_AND_OR_4:
627 case BUILT_IN_SYNC_FETCH_AND_OR_8:
628 case BUILT_IN_SYNC_FETCH_AND_OR_16:
629
630 case BUILT_IN_SYNC_FETCH_AND_AND_1:
631 case BUILT_IN_SYNC_FETCH_AND_AND_2:
632 case BUILT_IN_SYNC_FETCH_AND_AND_4:
633 case BUILT_IN_SYNC_FETCH_AND_AND_8:
634 case BUILT_IN_SYNC_FETCH_AND_AND_16:
635
636 case BUILT_IN_SYNC_FETCH_AND_XOR_1:
637 case BUILT_IN_SYNC_FETCH_AND_XOR_2:
638 case BUILT_IN_SYNC_FETCH_AND_XOR_4:
639 case BUILT_IN_SYNC_FETCH_AND_XOR_8:
640 case BUILT_IN_SYNC_FETCH_AND_XOR_16:
641
642 case BUILT_IN_SYNC_FETCH_AND_NAND_1:
643 case BUILT_IN_SYNC_FETCH_AND_NAND_2:
644 case BUILT_IN_SYNC_FETCH_AND_NAND_4:
645 case BUILT_IN_SYNC_FETCH_AND_NAND_8:
646
647 case BUILT_IN_SYNC_ADD_AND_FETCH_1:
648 case BUILT_IN_SYNC_ADD_AND_FETCH_2:
649 case BUILT_IN_SYNC_ADD_AND_FETCH_4:
650 case BUILT_IN_SYNC_ADD_AND_FETCH_8:
651 case BUILT_IN_SYNC_ADD_AND_FETCH_16:
652
653 case BUILT_IN_SYNC_SUB_AND_FETCH_1:
654 case BUILT_IN_SYNC_SUB_AND_FETCH_2:
655 case BUILT_IN_SYNC_SUB_AND_FETCH_4:
656 case BUILT_IN_SYNC_SUB_AND_FETCH_8:
657 case BUILT_IN_SYNC_SUB_AND_FETCH_16:
658
659 case BUILT_IN_SYNC_OR_AND_FETCH_1:
660 case BUILT_IN_SYNC_OR_AND_FETCH_2:
661 case BUILT_IN_SYNC_OR_AND_FETCH_4:
662 case BUILT_IN_SYNC_OR_AND_FETCH_8:
663 case BUILT_IN_SYNC_OR_AND_FETCH_16:
664
665 case BUILT_IN_SYNC_AND_AND_FETCH_1:
666 case BUILT_IN_SYNC_AND_AND_FETCH_2:
667 case BUILT_IN_SYNC_AND_AND_FETCH_4:
668 case BUILT_IN_SYNC_AND_AND_FETCH_8:
669 case BUILT_IN_SYNC_AND_AND_FETCH_16:
670
671 case BUILT_IN_SYNC_XOR_AND_FETCH_1:
672 case BUILT_IN_SYNC_XOR_AND_FETCH_2:
673 case BUILT_IN_SYNC_XOR_AND_FETCH_4:
674 case BUILT_IN_SYNC_XOR_AND_FETCH_8:
675 case BUILT_IN_SYNC_XOR_AND_FETCH_16:
676
677 case BUILT_IN_SYNC_NAND_AND_FETCH_1:
678 case BUILT_IN_SYNC_NAND_AND_FETCH_2:
679 case BUILT_IN_SYNC_NAND_AND_FETCH_4:
680 case BUILT_IN_SYNC_NAND_AND_FETCH_8:
681
682 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1:
683 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_2:
684 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_4:
685 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_8:
686 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_16:
687
688 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1:
689 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_2:
690 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_4:
691 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_8:
692 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_16:
693
694 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_1:
695 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_2:
696 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_4:
697 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_8:
698 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_16:
699
700 case BUILT_IN_SYNC_LOCK_RELEASE_1:
701 case BUILT_IN_SYNC_LOCK_RELEASE_2:
702 case BUILT_IN_SYNC_LOCK_RELEASE_4:
703 case BUILT_IN_SYNC_LOCK_RELEASE_8:
704 case BUILT_IN_SYNC_LOCK_RELEASE_16:
705
706 case BUILT_IN_ATOMIC_EXCHANGE_1:
707 case BUILT_IN_ATOMIC_EXCHANGE_2:
708 case BUILT_IN_ATOMIC_EXCHANGE_4:
709 case BUILT_IN_ATOMIC_EXCHANGE_8:
710 case BUILT_IN_ATOMIC_EXCHANGE_16:
711
712 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1:
713 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_2:
714 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_4:
715 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_8:
716 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_16:
717
718 case BUILT_IN_ATOMIC_STORE_1:
719 case BUILT_IN_ATOMIC_STORE_2:
720 case BUILT_IN_ATOMIC_STORE_4:
721 case BUILT_IN_ATOMIC_STORE_8:
722 case BUILT_IN_ATOMIC_STORE_16:
723
724 case BUILT_IN_ATOMIC_ADD_FETCH_1:
725 case BUILT_IN_ATOMIC_ADD_FETCH_2:
726 case BUILT_IN_ATOMIC_ADD_FETCH_4:
727 case BUILT_IN_ATOMIC_ADD_FETCH_8:
728 case BUILT_IN_ATOMIC_ADD_FETCH_16:
729
730 case BUILT_IN_ATOMIC_SUB_FETCH_1:
731 case BUILT_IN_ATOMIC_SUB_FETCH_2:
732 case BUILT_IN_ATOMIC_SUB_FETCH_4:
733 case BUILT_IN_ATOMIC_SUB_FETCH_8:
734 case BUILT_IN_ATOMIC_SUB_FETCH_16:
735
736 case BUILT_IN_ATOMIC_AND_FETCH_1:
737 case BUILT_IN_ATOMIC_AND_FETCH_2:
738 case BUILT_IN_ATOMIC_AND_FETCH_4:
739 case BUILT_IN_ATOMIC_AND_FETCH_8:
740 case BUILT_IN_ATOMIC_AND_FETCH_16:
741
742 case BUILT_IN_ATOMIC_NAND_FETCH_1:
743 case BUILT_IN_ATOMIC_NAND_FETCH_2:
744 case BUILT_IN_ATOMIC_NAND_FETCH_4:
745 case BUILT_IN_ATOMIC_NAND_FETCH_8:
746 case BUILT_IN_ATOMIC_NAND_FETCH_16:
747
748 case BUILT_IN_ATOMIC_XOR_FETCH_1:
749 case BUILT_IN_ATOMIC_XOR_FETCH_2:
750 case BUILT_IN_ATOMIC_XOR_FETCH_4:
751 case BUILT_IN_ATOMIC_XOR_FETCH_8:
752 case BUILT_IN_ATOMIC_XOR_FETCH_16:
753
754 case BUILT_IN_ATOMIC_OR_FETCH_1:
755 case BUILT_IN_ATOMIC_OR_FETCH_2:
756 case BUILT_IN_ATOMIC_OR_FETCH_4:
757 case BUILT_IN_ATOMIC_OR_FETCH_8:
758 case BUILT_IN_ATOMIC_OR_FETCH_16:
759
760 case BUILT_IN_ATOMIC_FETCH_ADD_1:
761 case BUILT_IN_ATOMIC_FETCH_ADD_2:
762 case BUILT_IN_ATOMIC_FETCH_ADD_4:
763 case BUILT_IN_ATOMIC_FETCH_ADD_8:
764 case BUILT_IN_ATOMIC_FETCH_ADD_16:
765
766 case BUILT_IN_ATOMIC_FETCH_SUB_1:
767 case BUILT_IN_ATOMIC_FETCH_SUB_2:
768 case BUILT_IN_ATOMIC_FETCH_SUB_4:
769 case BUILT_IN_ATOMIC_FETCH_SUB_8:
770 case BUILT_IN_ATOMIC_FETCH_SUB_16:
771
772 case BUILT_IN_ATOMIC_FETCH_AND_1:
773 case BUILT_IN_ATOMIC_FETCH_AND_2:
774 case BUILT_IN_ATOMIC_FETCH_AND_4:
775 case BUILT_IN_ATOMIC_FETCH_AND_8:
776 case BUILT_IN_ATOMIC_FETCH_AND_16:
777
778 case BUILT_IN_ATOMIC_FETCH_NAND_1:
779 case BUILT_IN_ATOMIC_FETCH_NAND_2:
780 case BUILT_IN_ATOMIC_FETCH_NAND_4:
781 case BUILT_IN_ATOMIC_FETCH_NAND_8:
782 case BUILT_IN_ATOMIC_FETCH_NAND_16:
783
784 case BUILT_IN_ATOMIC_FETCH_XOR_1:
785 case BUILT_IN_ATOMIC_FETCH_XOR_2:
786 case BUILT_IN_ATOMIC_FETCH_XOR_4:
787 case BUILT_IN_ATOMIC_FETCH_XOR_8:
788 case BUILT_IN_ATOMIC_FETCH_XOR_16:
789
790 case BUILT_IN_ATOMIC_FETCH_OR_1:
791 case BUILT_IN_ATOMIC_FETCH_OR_2:
792 case BUILT_IN_ATOMIC_FETCH_OR_4:
793 case BUILT_IN_ATOMIC_FETCH_OR_8:
794 case BUILT_IN_ATOMIC_FETCH_OR_16:
795 {
796 dest = gimple_call_arg (call, 0);
797 /* DEST represents the address of a memory location.
798 instrument_derefs wants the memory location, so lets
799 dereference the address DEST before handing it to
800 instrument_derefs. */
801 if (TREE_CODE (dest) == ADDR_EXPR)
802 dest = TREE_OPERAND (dest, 0);
77e83307 803 else if (TREE_CODE (dest) == SSA_NAME || TREE_CODE (dest) == INTEGER_CST)
bdcbe80c
DS
804 dest = build2 (MEM_REF, TREE_TYPE (TREE_TYPE (dest)),
805 dest, build_int_cst (TREE_TYPE (dest), 0));
806 else
807 gcc_unreachable ();
808
809 access_size = int_size_in_bytes (TREE_TYPE (dest));
810 }
811
812 default:
813 /* The other builtins memory access are not instrumented in this
814 function because they either don't have any length parameter,
815 or their length parameter is just a limit. */
816 break;
817 }
818
819 if (len != NULL_TREE)
820 {
821 if (source0 != NULL_TREE)
822 {
823 src0->start = source0;
824 src0->access_size = access_size;
825 *src0_len = len;
826 *src0_is_store = false;
827 }
828
829 if (source1 != NULL_TREE)
830 {
831 src1->start = source1;
832 src1->access_size = access_size;
833 *src1_len = len;
834 *src1_is_store = false;
835 }
836
837 if (dest != NULL_TREE)
838 {
839 dst->start = dest;
840 dst->access_size = access_size;
841 *dst_len = len;
842 *dst_is_store = true;
843 }
844
845 got_reference_p = true;
846 }
b41288b3
JJ
847 else if (dest)
848 {
849 dst->start = dest;
850 dst->access_size = access_size;
851 *dst_len = NULL_TREE;
852 *dst_is_store = is_store;
853 *dest_is_deref = true;
854 got_reference_p = true;
855 }
bdcbe80c 856
b41288b3 857 return got_reference_p;
bdcbe80c
DS
858}
859
860/* Return true iff a given gimple statement has been instrumented.
861 Note that the statement is "defined" by the memory references it
862 contains. */
863
864static bool
355fe088 865has_stmt_been_instrumented_p (gimple *stmt)
bdcbe80c
DS
866{
867 if (gimple_assign_single_p (stmt))
868 {
869 bool r_is_store;
870 asan_mem_ref r;
871 asan_mem_ref_init (&r, NULL, 1);
872
538dd0b7
DM
873 if (get_mem_ref_of_assignment (as_a <gassign *> (stmt), &r,
874 &r_is_store))
bdcbe80c
DS
875 return has_mem_ref_been_instrumented (&r);
876 }
877 else if (gimple_call_builtin_p (stmt, BUILT_IN_NORMAL))
878 {
879 asan_mem_ref src0, src1, dest;
880 asan_mem_ref_init (&src0, NULL, 1);
881 asan_mem_ref_init (&src1, NULL, 1);
882 asan_mem_ref_init (&dest, NULL, 1);
883
884 tree src0_len = NULL_TREE, src1_len = NULL_TREE, dest_len = NULL_TREE;
885 bool src0_is_store = false, src1_is_store = false,
bdea98ca 886 dest_is_store = false, dest_is_deref = false, intercepted_p = true;
538dd0b7 887 if (get_mem_refs_of_builtin_call (as_a <gcall *> (stmt),
bdcbe80c
DS
888 &src0, &src0_len, &src0_is_store,
889 &src1, &src1_len, &src1_is_store,
890 &dest, &dest_len, &dest_is_store,
bdea98ca 891 &dest_is_deref, &intercepted_p))
bdcbe80c
DS
892 {
893 if (src0.start != NULL_TREE
894 && !has_mem_ref_been_instrumented (&src0, src0_len))
895 return false;
896
897 if (src1.start != NULL_TREE
898 && !has_mem_ref_been_instrumented (&src1, src1_len))
899 return false;
900
901 if (dest.start != NULL_TREE
902 && !has_mem_ref_been_instrumented (&dest, dest_len))
903 return false;
904
905 return true;
906 }
907 }
908 return false;
909}
910
911/* Insert a memory reference into the hash table. */
912
913static void
40f9f6bb 914update_mem_ref_hash_table (tree ref, HOST_WIDE_INT access_size)
bdcbe80c 915{
c203e8a7 916 hash_table<asan_mem_ref_hasher> *ht = get_mem_ref_hash_table ();
bdcbe80c
DS
917
918 asan_mem_ref r;
919 asan_mem_ref_init (&r, ref, access_size);
920
c203e8a7 921 asan_mem_ref **slot = ht->find_slot (&r, INSERT);
bdea98ca 922 if (*slot == NULL || (*slot)->access_size < access_size)
bdcbe80c
DS
923 *slot = asan_mem_ref_new (ref, access_size);
924}
925
94fce891
JJ
926/* Initialize shadow_ptr_types array. */
927
928static void
929asan_init_shadow_ptr_types (void)
930{
931 asan_shadow_set = new_alias_set ();
932 shadow_ptr_types[0] = build_distinct_type_copy (signed_char_type_node);
933 TYPE_ALIAS_SET (shadow_ptr_types[0]) = asan_shadow_set;
934 shadow_ptr_types[0] = build_pointer_type (shadow_ptr_types[0]);
935 shadow_ptr_types[1] = build_distinct_type_copy (short_integer_type_node);
936 TYPE_ALIAS_SET (shadow_ptr_types[1]) = asan_shadow_set;
937 shadow_ptr_types[1] = build_pointer_type (shadow_ptr_types[1]);
938 initialize_sanitizer_builtins ();
939}
940
11a877b3 941/* Create ADDR_EXPR of STRING_CST with the PP pretty printer text. */
8240018b
JJ
942
943static tree
11a877b3 944asan_pp_string (pretty_printer *pp)
8240018b 945{
11a877b3 946 const char *buf = pp_formatted_text (pp);
8240018b
JJ
947 size_t len = strlen (buf);
948 tree ret = build_string (len + 1, buf);
949 TREE_TYPE (ret)
94fce891
JJ
950 = build_array_type (TREE_TYPE (shadow_ptr_types[0]),
951 build_index_type (size_int (len)));
8240018b
JJ
952 TREE_READONLY (ret) = 1;
953 TREE_STATIC (ret) = 1;
94fce891 954 return build1 (ADDR_EXPR, shadow_ptr_types[0], ret);
8240018b
JJ
955}
956
f3ddd692
JJ
957/* Return a CONST_INT representing 4 subsequent shadow memory bytes. */
958
959static rtx
960asan_shadow_cst (unsigned char shadow_bytes[4])
961{
962 int i;
963 unsigned HOST_WIDE_INT val = 0;
964 gcc_assert (WORDS_BIG_ENDIAN == BYTES_BIG_ENDIAN);
965 for (i = 0; i < 4; i++)
966 val |= (unsigned HOST_WIDE_INT) shadow_bytes[BYTES_BIG_ENDIAN ? 3 - i : i]
967 << (BITS_PER_UNIT * i);
dcad1dd3 968 return gen_int_mode (val, SImode);
f3ddd692
JJ
969}
970
aeb7e7c1
JJ
971/* Clear shadow memory at SHADOW_MEM, LEN bytes. Can't call a library call here
972 though. */
973
974static void
975asan_clear_shadow (rtx shadow_mem, HOST_WIDE_INT len)
976{
3a965f61
DM
977 rtx_insn *insn, *insns, *jump;
978 rtx_code_label *top_label;
979 rtx end, addr, tmp;
aeb7e7c1
JJ
980
981 start_sequence ();
982 clear_storage (shadow_mem, GEN_INT (len), BLOCK_OP_NORMAL);
983 insns = get_insns ();
984 end_sequence ();
985 for (insn = insns; insn; insn = NEXT_INSN (insn))
986 if (CALL_P (insn))
987 break;
988 if (insn == NULL_RTX)
989 {
990 emit_insn (insns);
991 return;
992 }
993
994 gcc_assert ((len & 3) == 0);
995 top_label = gen_label_rtx ();
57d4d653 996 addr = copy_to_mode_reg (Pmode, XEXP (shadow_mem, 0));
aeb7e7c1
JJ
997 shadow_mem = adjust_automodify_address (shadow_mem, SImode, addr, 0);
998 end = force_reg (Pmode, plus_constant (Pmode, addr, len));
999 emit_label (top_label);
1000
1001 emit_move_insn (shadow_mem, const0_rtx);
2f1cd2eb 1002 tmp = expand_simple_binop (Pmode, PLUS, addr, gen_int_mode (4, Pmode), addr,
c62ccb9a 1003 true, OPTAB_LIB_WIDEN);
aeb7e7c1
JJ
1004 if (tmp != addr)
1005 emit_move_insn (addr, tmp);
1006 emit_cmp_and_jump_insns (addr, end, LT, NULL_RTX, Pmode, true, top_label);
1007 jump = get_last_insn ();
1008 gcc_assert (JUMP_P (jump));
e5af9ddd 1009 add_int_reg_note (jump, REG_BR_PROB, REG_BR_PROB_BASE * 80 / 100);
aeb7e7c1
JJ
1010}
1011
ef1b3fda
KS
1012void
1013asan_function_start (void)
1014{
1015 section *fnsec = function_section (current_function_decl);
1016 switch_to_section (fnsec);
1017 ASM_OUTPUT_DEBUG_LABEL (asm_out_file, "LASANPC",
c62ccb9a 1018 current_function_funcdef_no);
ef1b3fda
KS
1019}
1020
f3ddd692
JJ
1021/* Insert code to protect stack vars. The prologue sequence should be emitted
1022 directly, epilogue sequence returned. BASE is the register holding the
1023 stack base, against which OFFSETS array offsets are relative to, OFFSETS
1024 array contains pairs of offsets in reverse order, always the end offset
1025 of some gap that needs protection followed by starting offset,
1026 and DECLS is an array of representative decls for each var partition.
1027 LENGTH is the length of the OFFSETS array, DECLS array is LENGTH / 2 - 1
1028 elements long (OFFSETS include gap before the first variable as well
e361382f
JJ
1029 as gaps after each stack variable). PBASE is, if non-NULL, some pseudo
1030 register which stack vars DECL_RTLs are based on. Either BASE should be
1031 assigned to PBASE, when not doing use after return protection, or
1032 corresponding address based on __asan_stack_malloc* return value. */
f3ddd692 1033
3a4abd2f 1034rtx_insn *
e361382f
JJ
1035asan_emit_stack_protection (rtx base, rtx pbase, unsigned int alignb,
1036 HOST_WIDE_INT *offsets, tree *decls, int length)
f3ddd692 1037{
19f8b229
TS
1038 rtx shadow_base, shadow_mem, ret, mem, orig_base;
1039 rtx_code_label *lab;
3a4abd2f 1040 rtx_insn *insns;
ef1b3fda 1041 char buf[30];
f3ddd692 1042 unsigned char shadow_bytes[4];
e361382f
JJ
1043 HOST_WIDE_INT base_offset = offsets[length - 1];
1044 HOST_WIDE_INT base_align_bias = 0, offset, prev_offset;
1045 HOST_WIDE_INT asan_frame_size = offsets[0] - base_offset;
f3ddd692
JJ
1046 HOST_WIDE_INT last_offset, last_size;
1047 int l;
1048 unsigned char cur_shadow_byte = ASAN_STACK_MAGIC_LEFT;
ef1b3fda 1049 tree str_cst, decl, id;
e361382f 1050 int use_after_return_class = -1;
f3ddd692 1051
94fce891
JJ
1052 if (shadow_ptr_types[0] == NULL_TREE)
1053 asan_init_shadow_ptr_types ();
1054
f3ddd692 1055 /* First of all, prepare the description string. */
11a877b3 1056 pretty_printer asan_pp;
da6ca2b5 1057
8240018b
JJ
1058 pp_decimal_int (&asan_pp, length / 2 - 1);
1059 pp_space (&asan_pp);
f3ddd692
JJ
1060 for (l = length - 2; l; l -= 2)
1061 {
1062 tree decl = decls[l / 2 - 1];
8240018b
JJ
1063 pp_wide_integer (&asan_pp, offsets[l] - base_offset);
1064 pp_space (&asan_pp);
1065 pp_wide_integer (&asan_pp, offsets[l - 1] - offsets[l]);
1066 pp_space (&asan_pp);
f3ddd692
JJ
1067 if (DECL_P (decl) && DECL_NAME (decl))
1068 {
8240018b
JJ
1069 pp_decimal_int (&asan_pp, IDENTIFIER_LENGTH (DECL_NAME (decl)));
1070 pp_space (&asan_pp);
b066401f 1071 pp_tree_identifier (&asan_pp, DECL_NAME (decl));
f3ddd692
JJ
1072 }
1073 else
8240018b
JJ
1074 pp_string (&asan_pp, "9 <unknown>");
1075 pp_space (&asan_pp);
f3ddd692 1076 }
11a877b3 1077 str_cst = asan_pp_string (&asan_pp);
f3ddd692
JJ
1078
1079 /* Emit the prologue sequence. */
b5ebc991
MO
1080 if (asan_frame_size > 32 && asan_frame_size <= 65536 && pbase
1081 && ASAN_USE_AFTER_RETURN)
e361382f
JJ
1082 {
1083 use_after_return_class = floor_log2 (asan_frame_size - 1) - 5;
1084 /* __asan_stack_malloc_N guarantees alignment
c62ccb9a 1085 N < 6 ? (64 << N) : 4096 bytes. */
e361382f
JJ
1086 if (alignb > (use_after_return_class < 6
1087 ? (64U << use_after_return_class) : 4096U))
1088 use_after_return_class = -1;
1089 else if (alignb > ASAN_RED_ZONE_SIZE && (asan_frame_size & (alignb - 1)))
1090 base_align_bias = ((asan_frame_size + alignb - 1)
1091 & ~(alignb - HOST_WIDE_INT_1)) - asan_frame_size;
1092 }
e5dcd695
LZ
1093 /* Align base if target is STRICT_ALIGNMENT. */
1094 if (STRICT_ALIGNMENT)
1095 base = expand_binop (Pmode, and_optab, base,
1096 gen_int_mode (-((GET_MODE_ALIGNMENT (SImode)
1097 << ASAN_SHADOW_SHIFT)
1098 / BITS_PER_UNIT), Pmode), NULL_RTX,
1099 1, OPTAB_DIRECT);
1100
e361382f
JJ
1101 if (use_after_return_class == -1 && pbase)
1102 emit_move_insn (pbase, base);
e5dcd695 1103
2f1cd2eb 1104 base = expand_binop (Pmode, add_optab, base,
e361382f 1105 gen_int_mode (base_offset - base_align_bias, Pmode),
f3ddd692 1106 NULL_RTX, 1, OPTAB_DIRECT);
e361382f
JJ
1107 orig_base = NULL_RTX;
1108 if (use_after_return_class != -1)
1109 {
1110 if (asan_detect_stack_use_after_return == NULL_TREE)
1111 {
1112 id = get_identifier ("__asan_option_detect_stack_use_after_return");
1113 decl = build_decl (BUILTINS_LOCATION, VAR_DECL, id,
1114 integer_type_node);
1115 SET_DECL_ASSEMBLER_NAME (decl, id);
1116 TREE_ADDRESSABLE (decl) = 1;
1117 DECL_ARTIFICIAL (decl) = 1;
1118 DECL_IGNORED_P (decl) = 1;
1119 DECL_EXTERNAL (decl) = 1;
1120 TREE_STATIC (decl) = 1;
1121 TREE_PUBLIC (decl) = 1;
1122 TREE_USED (decl) = 1;
1123 asan_detect_stack_use_after_return = decl;
1124 }
1125 orig_base = gen_reg_rtx (Pmode);
1126 emit_move_insn (orig_base, base);
1127 ret = expand_normal (asan_detect_stack_use_after_return);
1128 lab = gen_label_rtx ();
1129 int very_likely = REG_BR_PROB_BASE - (REG_BR_PROB_BASE / 2000 - 1);
1130 emit_cmp_and_jump_insns (ret, const0_rtx, EQ, NULL_RTX,
1131 VOIDmode, 0, lab, very_likely);
1132 snprintf (buf, sizeof buf, "__asan_stack_malloc_%d",
1133 use_after_return_class);
1134 ret = init_one_libfunc (buf);
1135 rtx addr = convert_memory_address (ptr_mode, base);
1136 ret = emit_library_call_value (ret, NULL_RTX, LCT_NORMAL, ptr_mode, 2,
1137 GEN_INT (asan_frame_size
1138 + base_align_bias),
1139 TYPE_MODE (pointer_sized_int_node),
1140 addr, ptr_mode);
1141 ret = convert_memory_address (Pmode, ret);
1142 emit_move_insn (base, ret);
1143 emit_label (lab);
1144 emit_move_insn (pbase, expand_binop (Pmode, add_optab, base,
1145 gen_int_mode (base_align_bias
1146 - base_offset, Pmode),
1147 NULL_RTX, 1, OPTAB_DIRECT));
1148 }
f3ddd692 1149 mem = gen_rtx_MEM (ptr_mode, base);
e361382f 1150 mem = adjust_address (mem, VOIDmode, base_align_bias);
69db2d57 1151 emit_move_insn (mem, gen_int_mode (ASAN_STACK_FRAME_MAGIC, ptr_mode));
f3ddd692
JJ
1152 mem = adjust_address (mem, VOIDmode, GET_MODE_SIZE (ptr_mode));
1153 emit_move_insn (mem, expand_normal (str_cst));
ef1b3fda
KS
1154 mem = adjust_address (mem, VOIDmode, GET_MODE_SIZE (ptr_mode));
1155 ASM_GENERATE_INTERNAL_LABEL (buf, "LASANPC", current_function_funcdef_no);
1156 id = get_identifier (buf);
1157 decl = build_decl (DECL_SOURCE_LOCATION (current_function_decl),
c62ccb9a 1158 VAR_DECL, id, char_type_node);
ef1b3fda
KS
1159 SET_DECL_ASSEMBLER_NAME (decl, id);
1160 TREE_ADDRESSABLE (decl) = 1;
1161 TREE_READONLY (decl) = 1;
1162 DECL_ARTIFICIAL (decl) = 1;
1163 DECL_IGNORED_P (decl) = 1;
1164 TREE_STATIC (decl) = 1;
1165 TREE_PUBLIC (decl) = 0;
1166 TREE_USED (decl) = 1;
8c8b21e4
JJ
1167 DECL_INITIAL (decl) = decl;
1168 TREE_ASM_WRITTEN (decl) = 1;
1169 TREE_ASM_WRITTEN (id) = 1;
ef1b3fda 1170 emit_move_insn (mem, expand_normal (build_fold_addr_expr (decl)));
f3ddd692
JJ
1171 shadow_base = expand_binop (Pmode, lshr_optab, base,
1172 GEN_INT (ASAN_SHADOW_SHIFT),
1173 NULL_RTX, 1, OPTAB_DIRECT);
e361382f
JJ
1174 shadow_base
1175 = plus_constant (Pmode, shadow_base,
fd960af2 1176 asan_shadow_offset ()
e361382f 1177 + (base_align_bias >> ASAN_SHADOW_SHIFT));
f3ddd692
JJ
1178 gcc_assert (asan_shadow_set != -1
1179 && (ASAN_RED_ZONE_SIZE >> ASAN_SHADOW_SHIFT) == 4);
1180 shadow_mem = gen_rtx_MEM (SImode, shadow_base);
1181 set_mem_alias_set (shadow_mem, asan_shadow_set);
e5dcd695
LZ
1182 if (STRICT_ALIGNMENT)
1183 set_mem_align (shadow_mem, (GET_MODE_ALIGNMENT (SImode)));
f3ddd692
JJ
1184 prev_offset = base_offset;
1185 for (l = length; l; l -= 2)
1186 {
1187 if (l == 2)
1188 cur_shadow_byte = ASAN_STACK_MAGIC_RIGHT;
1189 offset = offsets[l - 1];
1190 if ((offset - base_offset) & (ASAN_RED_ZONE_SIZE - 1))
1191 {
1192 int i;
1193 HOST_WIDE_INT aoff
1194 = base_offset + ((offset - base_offset)
1195 & ~(ASAN_RED_ZONE_SIZE - HOST_WIDE_INT_1));
1196 shadow_mem = adjust_address (shadow_mem, VOIDmode,
1197 (aoff - prev_offset)
1198 >> ASAN_SHADOW_SHIFT);
1199 prev_offset = aoff;
1200 for (i = 0; i < 4; i++, aoff += (1 << ASAN_SHADOW_SHIFT))
1201 if (aoff < offset)
1202 {
1203 if (aoff < offset - (1 << ASAN_SHADOW_SHIFT) + 1)
1204 shadow_bytes[i] = 0;
1205 else
1206 shadow_bytes[i] = offset - aoff;
1207 }
1208 else
1209 shadow_bytes[i] = ASAN_STACK_MAGIC_PARTIAL;
1210 emit_move_insn (shadow_mem, asan_shadow_cst (shadow_bytes));
1211 offset = aoff;
1212 }
1213 while (offset <= offsets[l - 2] - ASAN_RED_ZONE_SIZE)
1214 {
1215 shadow_mem = adjust_address (shadow_mem, VOIDmode,
1216 (offset - prev_offset)
1217 >> ASAN_SHADOW_SHIFT);
1218 prev_offset = offset;
1219 memset (shadow_bytes, cur_shadow_byte, 4);
1220 emit_move_insn (shadow_mem, asan_shadow_cst (shadow_bytes));
1221 offset += ASAN_RED_ZONE_SIZE;
1222 }
1223 cur_shadow_byte = ASAN_STACK_MAGIC_MIDDLE;
1224 }
1225 do_pending_stack_adjust ();
1226
1227 /* Construct epilogue sequence. */
1228 start_sequence ();
1229
19f8b229 1230 lab = NULL;
e361382f
JJ
1231 if (use_after_return_class != -1)
1232 {
19f8b229 1233 rtx_code_label *lab2 = gen_label_rtx ();
e361382f
JJ
1234 char c = (char) ASAN_STACK_MAGIC_USE_AFTER_RET;
1235 int very_likely = REG_BR_PROB_BASE - (REG_BR_PROB_BASE / 2000 - 1);
1236 emit_cmp_and_jump_insns (orig_base, base, EQ, NULL_RTX,
1237 VOIDmode, 0, lab2, very_likely);
1238 shadow_mem = gen_rtx_MEM (BLKmode, shadow_base);
1239 set_mem_alias_set (shadow_mem, asan_shadow_set);
1240 mem = gen_rtx_MEM (ptr_mode, base);
1241 mem = adjust_address (mem, VOIDmode, base_align_bias);
1242 emit_move_insn (mem, gen_int_mode (ASAN_STACK_RETIRED_MAGIC, ptr_mode));
1243 unsigned HOST_WIDE_INT sz = asan_frame_size >> ASAN_SHADOW_SHIFT;
1244 if (use_after_return_class < 5
1245 && can_store_by_pieces (sz, builtin_memset_read_str, &c,
1246 BITS_PER_UNIT, true))
1247 store_by_pieces (shadow_mem, sz, builtin_memset_read_str, &c,
1248 BITS_PER_UNIT, true, 0);
1249 else if (use_after_return_class >= 5
1250 || !set_storage_via_setmem (shadow_mem,
1251 GEN_INT (sz),
1252 gen_int_mode (c, QImode),
1253 BITS_PER_UNIT, BITS_PER_UNIT,
1254 -1, sz, sz, sz))
1255 {
1256 snprintf (buf, sizeof buf, "__asan_stack_free_%d",
1257 use_after_return_class);
1258 ret = init_one_libfunc (buf);
1259 rtx addr = convert_memory_address (ptr_mode, base);
1260 rtx orig_addr = convert_memory_address (ptr_mode, orig_base);
1261 emit_library_call (ret, LCT_NORMAL, ptr_mode, 3, addr, ptr_mode,
1262 GEN_INT (asan_frame_size + base_align_bias),
1263 TYPE_MODE (pointer_sized_int_node),
1264 orig_addr, ptr_mode);
1265 }
1266 lab = gen_label_rtx ();
1267 emit_jump (lab);
1268 emit_label (lab2);
1269 }
1270
f3ddd692
JJ
1271 shadow_mem = gen_rtx_MEM (BLKmode, shadow_base);
1272 set_mem_alias_set (shadow_mem, asan_shadow_set);
e5dcd695
LZ
1273
1274 if (STRICT_ALIGNMENT)
1275 set_mem_align (shadow_mem, (GET_MODE_ALIGNMENT (SImode)));
1276
f3ddd692
JJ
1277 prev_offset = base_offset;
1278 last_offset = base_offset;
1279 last_size = 0;
1280 for (l = length; l; l -= 2)
1281 {
1282 offset = base_offset + ((offsets[l - 1] - base_offset)
1283 & ~(ASAN_RED_ZONE_SIZE - HOST_WIDE_INT_1));
1284 if (last_offset + last_size != offset)
1285 {
1286 shadow_mem = adjust_address (shadow_mem, VOIDmode,
1287 (last_offset - prev_offset)
1288 >> ASAN_SHADOW_SHIFT);
1289 prev_offset = last_offset;
aeb7e7c1 1290 asan_clear_shadow (shadow_mem, last_size >> ASAN_SHADOW_SHIFT);
f3ddd692
JJ
1291 last_offset = offset;
1292 last_size = 0;
1293 }
1294 last_size += base_offset + ((offsets[l - 2] - base_offset)
1295 & ~(ASAN_RED_ZONE_SIZE - HOST_WIDE_INT_1))
1296 - offset;
1297 }
1298 if (last_size)
1299 {
1300 shadow_mem = adjust_address (shadow_mem, VOIDmode,
1301 (last_offset - prev_offset)
1302 >> ASAN_SHADOW_SHIFT);
aeb7e7c1 1303 asan_clear_shadow (shadow_mem, last_size >> ASAN_SHADOW_SHIFT);
f3ddd692
JJ
1304 }
1305
1306 do_pending_stack_adjust ();
e361382f
JJ
1307 if (lab)
1308 emit_label (lab);
f3ddd692 1309
3a4abd2f 1310 insns = get_insns ();
f3ddd692 1311 end_sequence ();
3a4abd2f 1312 return insns;
f3ddd692
JJ
1313}
1314
8240018b
JJ
1315/* Return true if DECL, a global var, might be overridden and needs
1316 therefore a local alias. */
1317
1318static bool
1319asan_needs_local_alias (tree decl)
1320{
1321 return DECL_WEAK (decl) || !targetm.binds_local_p (decl);
1322}
1323
1324/* Return true if DECL is a VAR_DECL that should be protected
1325 by Address Sanitizer, by appending a red zone with protected
1326 shadow memory after it and aligning it to at least
1327 ASAN_RED_ZONE_SIZE bytes. */
1328
1329bool
1330asan_protect_global (tree decl)
1331{
b5ebc991
MO
1332 if (!ASAN_GLOBALS)
1333 return false;
1334
8240018b 1335 rtx rtl, symbol;
8240018b 1336
94fce891
JJ
1337 if (TREE_CODE (decl) == STRING_CST)
1338 {
1339 /* Instrument all STRING_CSTs except those created
1340 by asan_pp_string here. */
1341 if (shadow_ptr_types[0] != NULL_TREE
1342 && TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE
1343 && TREE_TYPE (TREE_TYPE (decl)) == TREE_TYPE (shadow_ptr_types[0]))
1344 return false;
1345 return true;
1346 }
8240018b
JJ
1347 if (TREE_CODE (decl) != VAR_DECL
1348 /* TLS vars aren't statically protectable. */
1349 || DECL_THREAD_LOCAL_P (decl)
1350 /* Externs will be protected elsewhere. */
1351 || DECL_EXTERNAL (decl)
8240018b
JJ
1352 || !DECL_RTL_SET_P (decl)
1353 /* Comdat vars pose an ABI problem, we can't know if
1354 the var that is selected by the linker will have
1355 padding or not. */
1356 || DECL_ONE_ONLY (decl)
f1d15bb9
DV
1357 /* Similarly for common vars. People can use -fno-common.
1358 Note: Linux kernel is built with -fno-common, so we do instrument
1359 globals there even if it is C. */
a8a6fd74 1360 || (DECL_COMMON (decl) && TREE_PUBLIC (decl))
8240018b
JJ
1361 /* Don't protect if using user section, often vars placed
1362 into user section from multiple TUs are then assumed
1363 to be an array of such vars, putting padding in there
1364 breaks this assumption. */
f961457f 1365 || (DECL_SECTION_NAME (decl) != NULL
18af8d16
YG
1366 && !symtab_node::get (decl)->implicit_section
1367 && !section_sanitized_p (DECL_SECTION_NAME (decl)))
8240018b
JJ
1368 || DECL_SIZE (decl) == 0
1369 || ASAN_RED_ZONE_SIZE * BITS_PER_UNIT > MAX_OFILE_ALIGNMENT
1370 || !valid_constant_size_p (DECL_SIZE_UNIT (decl))
21a82048
JJ
1371 || DECL_ALIGN_UNIT (decl) > 2 * ASAN_RED_ZONE_SIZE
1372 || TREE_TYPE (decl) == ubsan_get_source_location_type ())
8240018b
JJ
1373 return false;
1374
1375 rtl = DECL_RTL (decl);
1376 if (!MEM_P (rtl) || GET_CODE (XEXP (rtl, 0)) != SYMBOL_REF)
1377 return false;
1378 symbol = XEXP (rtl, 0);
1379
1380 if (CONSTANT_POOL_ADDRESS_P (symbol)
1381 || TREE_CONSTANT_POOL_ADDRESS_P (symbol))
1382 return false;
1383
8240018b
JJ
1384 if (lookup_attribute ("weakref", DECL_ATTRIBUTES (decl)))
1385 return false;
1386
1387#ifndef ASM_OUTPUT_DEF
1388 if (asan_needs_local_alias (decl))
1389 return false;
1390#endif
1391
497a1c66 1392 return true;
8240018b
JJ
1393}
1394
40f9f6bb
JJ
1395/* Construct a function tree for __asan_report_{load,store}{1,2,4,8,16,_n}.
1396 IS_STORE is either 1 (for a store) or 0 (for a load). */
37d6f666
WM
1397
1398static tree
fed4de37
YG
1399report_error_func (bool is_store, bool recover_p, HOST_WIDE_INT size_in_bytes,
1400 int *nargs)
37d6f666 1401{
fed4de37
YG
1402 static enum built_in_function report[2][2][6]
1403 = { { { BUILT_IN_ASAN_REPORT_LOAD1, BUILT_IN_ASAN_REPORT_LOAD2,
1404 BUILT_IN_ASAN_REPORT_LOAD4, BUILT_IN_ASAN_REPORT_LOAD8,
1405 BUILT_IN_ASAN_REPORT_LOAD16, BUILT_IN_ASAN_REPORT_LOAD_N },
1406 { BUILT_IN_ASAN_REPORT_STORE1, BUILT_IN_ASAN_REPORT_STORE2,
1407 BUILT_IN_ASAN_REPORT_STORE4, BUILT_IN_ASAN_REPORT_STORE8,
1408 BUILT_IN_ASAN_REPORT_STORE16, BUILT_IN_ASAN_REPORT_STORE_N } },
1409 { { BUILT_IN_ASAN_REPORT_LOAD1_NOABORT,
1410 BUILT_IN_ASAN_REPORT_LOAD2_NOABORT,
1411 BUILT_IN_ASAN_REPORT_LOAD4_NOABORT,
1412 BUILT_IN_ASAN_REPORT_LOAD8_NOABORT,
1413 BUILT_IN_ASAN_REPORT_LOAD16_NOABORT,
1414 BUILT_IN_ASAN_REPORT_LOAD_N_NOABORT },
1415 { BUILT_IN_ASAN_REPORT_STORE1_NOABORT,
1416 BUILT_IN_ASAN_REPORT_STORE2_NOABORT,
1417 BUILT_IN_ASAN_REPORT_STORE4_NOABORT,
1418 BUILT_IN_ASAN_REPORT_STORE8_NOABORT,
1419 BUILT_IN_ASAN_REPORT_STORE16_NOABORT,
1420 BUILT_IN_ASAN_REPORT_STORE_N_NOABORT } } };
8946c29e
YG
1421 if (size_in_bytes == -1)
1422 {
1423 *nargs = 2;
fed4de37 1424 return builtin_decl_implicit (report[recover_p][is_store][5]);
8946c29e
YG
1425 }
1426 *nargs = 1;
fed4de37
YG
1427 int size_log2 = exact_log2 (size_in_bytes);
1428 return builtin_decl_implicit (report[recover_p][is_store][size_log2]);
37d6f666
WM
1429}
1430
8946c29e
YG
1431/* Construct a function tree for __asan_{load,store}{1,2,4,8,16,_n}.
1432 IS_STORE is either 1 (for a store) or 0 (for a load). */
1433
1434static tree
fed4de37
YG
1435check_func (bool is_store, bool recover_p, HOST_WIDE_INT size_in_bytes,
1436 int *nargs)
8946c29e 1437{
fed4de37
YG
1438 static enum built_in_function check[2][2][6]
1439 = { { { BUILT_IN_ASAN_LOAD1, BUILT_IN_ASAN_LOAD2,
1440 BUILT_IN_ASAN_LOAD4, BUILT_IN_ASAN_LOAD8,
1441 BUILT_IN_ASAN_LOAD16, BUILT_IN_ASAN_LOADN },
1442 { BUILT_IN_ASAN_STORE1, BUILT_IN_ASAN_STORE2,
1443 BUILT_IN_ASAN_STORE4, BUILT_IN_ASAN_STORE8,
1444 BUILT_IN_ASAN_STORE16, BUILT_IN_ASAN_STOREN } },
1445 { { BUILT_IN_ASAN_LOAD1_NOABORT,
1446 BUILT_IN_ASAN_LOAD2_NOABORT,
1447 BUILT_IN_ASAN_LOAD4_NOABORT,
1448 BUILT_IN_ASAN_LOAD8_NOABORT,
1449 BUILT_IN_ASAN_LOAD16_NOABORT,
1450 BUILT_IN_ASAN_LOADN_NOABORT },
1451 { BUILT_IN_ASAN_STORE1_NOABORT,
1452 BUILT_IN_ASAN_STORE2_NOABORT,
1453 BUILT_IN_ASAN_STORE4_NOABORT,
1454 BUILT_IN_ASAN_STORE8_NOABORT,
1455 BUILT_IN_ASAN_STORE16_NOABORT,
1456 BUILT_IN_ASAN_STOREN_NOABORT } } };
8946c29e
YG
1457 if (size_in_bytes == -1)
1458 {
1459 *nargs = 2;
fed4de37 1460 return builtin_decl_implicit (check[recover_p][is_store][5]);
8946c29e
YG
1461 }
1462 *nargs = 1;
fed4de37
YG
1463 int size_log2 = exact_log2 (size_in_bytes);
1464 return builtin_decl_implicit (check[recover_p][is_store][size_log2]);
8946c29e
YG
1465}
1466
01452015 1467/* Split the current basic block and create a condition statement
25ae5027
DS
1468 insertion point right before or after the statement pointed to by
1469 ITER. Return an iterator to the point at which the caller might
1470 safely insert the condition statement.
01452015
DS
1471
1472 THEN_BLOCK must be set to the address of an uninitialized instance
1473 of basic_block. The function will then set *THEN_BLOCK to the
1474 'then block' of the condition statement to be inserted by the
1475 caller.
1476
c4bfe8bf
JJ
1477 If CREATE_THEN_FALLTHRU_EDGE is false, no edge will be created from
1478 *THEN_BLOCK to *FALLTHROUGH_BLOCK.
1479
01452015
DS
1480 Similarly, the function will set *FALLTRHOUGH_BLOCK to the 'else
1481 block' of the condition statement to be inserted by the caller.
1482
1483 Note that *FALLTHROUGH_BLOCK is a new block that contains the
1484 statements starting from *ITER, and *THEN_BLOCK is a new empty
1485 block.
1486
25ae5027
DS
1487 *ITER is adjusted to point to always point to the first statement
1488 of the basic block * FALLTHROUGH_BLOCK. That statement is the
1489 same as what ITER was pointing to prior to calling this function,
1490 if BEFORE_P is true; otherwise, it is its following statement. */
01452015 1491
ac0ff9f2 1492gimple_stmt_iterator
25ae5027
DS
1493create_cond_insert_point (gimple_stmt_iterator *iter,
1494 bool before_p,
1495 bool then_more_likely_p,
c4bfe8bf 1496 bool create_then_fallthru_edge,
25ae5027
DS
1497 basic_block *then_block,
1498 basic_block *fallthrough_block)
01452015
DS
1499{
1500 gimple_stmt_iterator gsi = *iter;
1501
25ae5027 1502 if (!gsi_end_p (gsi) && before_p)
01452015
DS
1503 gsi_prev (&gsi);
1504
1505 basic_block cur_bb = gsi_bb (*iter);
1506
1507 edge e = split_block (cur_bb, gsi_stmt (gsi));
1508
1509 /* Get a hold on the 'condition block', the 'then block' and the
1510 'else block'. */
1511 basic_block cond_bb = e->src;
1512 basic_block fallthru_bb = e->dest;
1513 basic_block then_bb = create_empty_bb (cond_bb);
a9e0d843
RB
1514 if (current_loops)
1515 {
1516 add_bb_to_loop (then_bb, cond_bb->loop_father);
1517 loops_state_set (LOOPS_NEED_FIXUP);
1518 }
01452015
DS
1519
1520 /* Set up the newly created 'then block'. */
1521 e = make_edge (cond_bb, then_bb, EDGE_TRUE_VALUE);
1522 int fallthrough_probability
1523 = then_more_likely_p
1524 ? PROB_VERY_UNLIKELY
1525 : PROB_ALWAYS - PROB_VERY_UNLIKELY;
1526 e->probability = PROB_ALWAYS - fallthrough_probability;
c4bfe8bf
JJ
1527 if (create_then_fallthru_edge)
1528 make_single_succ_edge (then_bb, fallthru_bb, EDGE_FALLTHRU);
01452015
DS
1529
1530 /* Set up the fallthrough basic block. */
1531 e = find_edge (cond_bb, fallthru_bb);
1532 e->flags = EDGE_FALSE_VALUE;
1533 e->count = cond_bb->count;
1534 e->probability = fallthrough_probability;
1535
1536 /* Update dominance info for the newly created then_bb; note that
1537 fallthru_bb's dominance info has already been updated by
1538 split_bock. */
1539 if (dom_info_available_p (CDI_DOMINATORS))
1540 set_immediate_dominator (CDI_DOMINATORS, then_bb, cond_bb);
1541
1542 *then_block = then_bb;
1543 *fallthrough_block = fallthru_bb;
1544 *iter = gsi_start_bb (fallthru_bb);
1545
1546 return gsi_last_bb (cond_bb);
1547}
1548
25ae5027
DS
1549/* Insert an if condition followed by a 'then block' right before the
1550 statement pointed to by ITER. The fallthrough block -- which is the
1551 else block of the condition as well as the destination of the
1552 outcoming edge of the 'then block' -- starts with the statement
1553 pointed to by ITER.
1554
497a1c66 1555 COND is the condition of the if.
25ae5027
DS
1556
1557 If THEN_MORE_LIKELY_P is true, the probability of the edge to the
1558 'then block' is higher than the probability of the edge to the
1559 fallthrough block.
1560
1561 Upon completion of the function, *THEN_BB is set to the newly
1562 inserted 'then block' and similarly, *FALLTHROUGH_BB is set to the
1563 fallthrough block.
1564
1565 *ITER is adjusted to still point to the same statement it was
1566 pointing to initially. */
1567
1568static void
538dd0b7 1569insert_if_then_before_iter (gcond *cond,
25ae5027
DS
1570 gimple_stmt_iterator *iter,
1571 bool then_more_likely_p,
1572 basic_block *then_bb,
1573 basic_block *fallthrough_bb)
1574{
1575 gimple_stmt_iterator cond_insert_point =
1576 create_cond_insert_point (iter,
1577 /*before_p=*/true,
1578 then_more_likely_p,
c4bfe8bf 1579 /*create_then_fallthru_edge=*/true,
25ae5027
DS
1580 then_bb,
1581 fallthrough_bb);
1582 gsi_insert_after (&cond_insert_point, cond, GSI_NEW_STMT);
1583}
1584
40f9f6bb 1585/* Build
fd960af2 1586 (base_addr >> ASAN_SHADOW_SHIFT) + asan_shadow_offset (). */
40f9f6bb
JJ
1587
1588static tree
1589build_shadow_mem_access (gimple_stmt_iterator *gsi, location_t location,
1590 tree base_addr, tree shadow_ptr_type)
1591{
1592 tree t, uintptr_type = TREE_TYPE (base_addr);
1593 tree shadow_type = TREE_TYPE (shadow_ptr_type);
355fe088 1594 gimple *g;
40f9f6bb
JJ
1595
1596 t = build_int_cst (uintptr_type, ASAN_SHADOW_SHIFT);
0d0e4a03
JJ
1597 g = gimple_build_assign (make_ssa_name (uintptr_type), RSHIFT_EXPR,
1598 base_addr, t);
40f9f6bb
JJ
1599 gimple_set_location (g, location);
1600 gsi_insert_after (gsi, g, GSI_NEW_STMT);
1601
fd960af2 1602 t = build_int_cst (uintptr_type, asan_shadow_offset ());
0d0e4a03
JJ
1603 g = gimple_build_assign (make_ssa_name (uintptr_type), PLUS_EXPR,
1604 gimple_assign_lhs (g), t);
40f9f6bb
JJ
1605 gimple_set_location (g, location);
1606 gsi_insert_after (gsi, g, GSI_NEW_STMT);
1607
0d0e4a03
JJ
1608 g = gimple_build_assign (make_ssa_name (shadow_ptr_type), NOP_EXPR,
1609 gimple_assign_lhs (g));
40f9f6bb
JJ
1610 gimple_set_location (g, location);
1611 gsi_insert_after (gsi, g, GSI_NEW_STMT);
1612
1613 t = build2 (MEM_REF, shadow_type, gimple_assign_lhs (g),
1614 build_int_cst (shadow_ptr_type, 0));
0d0e4a03 1615 g = gimple_build_assign (make_ssa_name (shadow_type), MEM_REF, t);
40f9f6bb
JJ
1616 gimple_set_location (g, location);
1617 gsi_insert_after (gsi, g, GSI_NEW_STMT);
1618 return gimple_assign_lhs (g);
1619}
1620
8946c29e
YG
1621/* BASE can already be an SSA_NAME; in that case, do not create a
1622 new SSA_NAME for it. */
1623
1624static tree
1625maybe_create_ssa_name (location_t loc, tree base, gimple_stmt_iterator *iter,
1626 bool before_p)
1627{
1628 if (TREE_CODE (base) == SSA_NAME)
1629 return base;
355fe088 1630 gimple *g = gimple_build_assign (make_ssa_name (TREE_TYPE (base)),
0d0e4a03 1631 TREE_CODE (base), base);
8946c29e
YG
1632 gimple_set_location (g, loc);
1633 if (before_p)
1634 gsi_insert_before (iter, g, GSI_SAME_STMT);
1635 else
1636 gsi_insert_after (iter, g, GSI_NEW_STMT);
1637 return gimple_assign_lhs (g);
1638}
1639
a2f581e1
YG
1640/* LEN can already have necessary size and precision;
1641 in that case, do not create a new variable. */
1642
1643tree
1644maybe_cast_to_ptrmode (location_t loc, tree len, gimple_stmt_iterator *iter,
1645 bool before_p)
1646{
1647 if (ptrofftype_p (len))
1648 return len;
355fe088 1649 gimple *g = gimple_build_assign (make_ssa_name (pointer_sized_int_node),
0d0e4a03 1650 NOP_EXPR, len);
a2f581e1
YG
1651 gimple_set_location (g, loc);
1652 if (before_p)
1653 gsi_insert_before (iter, g, GSI_SAME_STMT);
1654 else
1655 gsi_insert_after (iter, g, GSI_NEW_STMT);
1656 return gimple_assign_lhs (g);
1657}
1658
dc29bf1e 1659/* Instrument the memory access instruction BASE. Insert new
25ae5027 1660 statements before or after ITER.
dc29bf1e
DS
1661
1662 Note that the memory access represented by BASE can be either an
1663 SSA_NAME, or a non-SSA expression. LOCATION is the source code
1664 location. IS_STORE is TRUE for a store, FALSE for a load.
25ae5027 1665 BEFORE_P is TRUE for inserting the instrumentation code before
8946c29e
YG
1666 ITER, FALSE for inserting it after ITER. IS_SCALAR_ACCESS is TRUE
1667 for a scalar memory access and FALSE for memory region access.
1668 NON_ZERO_P is TRUE if memory region is guaranteed to have non-zero
1669 length. ALIGN tells alignment of accessed memory object.
1670
1671 START_INSTRUMENTED and END_INSTRUMENTED are TRUE if start/end of
1672 memory region have already been instrumented.
25ae5027
DS
1673
1674 If BEFORE_P is TRUE, *ITER is arranged to still point to the
1675 statement it was pointing to prior to calling this function,
1676 otherwise, it points to the statement logically following it. */
37d6f666
WM
1677
1678static void
c62ccb9a 1679build_check_stmt (location_t loc, tree base, tree len,
8946c29e 1680 HOST_WIDE_INT size_in_bytes, gimple_stmt_iterator *iter,
c62ccb9a 1681 bool is_non_zero_len, bool before_p, bool is_store,
bdea98ca 1682 bool is_scalar_access, unsigned int align = 0)
37d6f666 1683{
8946c29e 1684 gimple_stmt_iterator gsi = *iter;
355fe088 1685 gimple *g;
8946c29e 1686
c62ccb9a 1687 gcc_assert (!(size_in_bytes > 0 && !is_non_zero_len));
8946c29e 1688
c62ccb9a
YG
1689 gsi = *iter;
1690
1691 base = unshare_expr (base);
1692 base = maybe_create_ssa_name (loc, base, &gsi, before_p);
1693
8946c29e 1694 if (len)
a2f581e1
YG
1695 {
1696 len = unshare_expr (len);
1697 len = maybe_cast_to_ptrmode (loc, len, iter, before_p);
1698 }
8946c29e
YG
1699 else
1700 {
1701 gcc_assert (size_in_bytes != -1);
1702 len = build_int_cst (pointer_sized_int_node, size_in_bytes);
1703 }
1704
1705 if (size_in_bytes > 1)
b3f1051b 1706 {
8946c29e
YG
1707 if ((size_in_bytes & (size_in_bytes - 1)) != 0
1708 || size_in_bytes > 16)
c62ccb9a 1709 is_scalar_access = false;
8946c29e
YG
1710 else if (align && align < size_in_bytes * BITS_PER_UNIT)
1711 {
1712 /* On non-strict alignment targets, if
1713 16-byte access is just 8-byte aligned,
1714 this will result in misaligned shadow
1715 memory 2 byte load, but otherwise can
1716 be handled using one read. */
1717 if (size_in_bytes != 16
1718 || STRICT_ALIGNMENT
1719 || align < 8 * BITS_PER_UNIT)
c62ccb9a 1720 is_scalar_access = false;
40f9f6bb 1721 }
f6d98484 1722 }
37d6f666 1723
c62ccb9a
YG
1724 HOST_WIDE_INT flags = 0;
1725 if (is_store)
1726 flags |= ASAN_CHECK_STORE;
1727 if (is_non_zero_len)
1728 flags |= ASAN_CHECK_NON_ZERO_LEN;
1729 if (is_scalar_access)
1730 flags |= ASAN_CHECK_SCALAR_ACCESS;
c62ccb9a 1731
f434eb69 1732 g = gimple_build_call_internal (IFN_ASAN_CHECK, 4,
c62ccb9a 1733 build_int_cst (integer_type_node, flags),
f434eb69
MZ
1734 base, len,
1735 build_int_cst (integer_type_node,
1736 align / BITS_PER_UNIT));
c62ccb9a
YG
1737 gimple_set_location (g, loc);
1738 if (before_p)
1739 gsi_insert_before (&gsi, g, GSI_SAME_STMT);
8946c29e
YG
1740 else
1741 {
8946c29e 1742 gsi_insert_after (&gsi, g, GSI_NEW_STMT);
c62ccb9a
YG
1743 gsi_next (&gsi);
1744 *iter = gsi;
8946c29e 1745 }
37d6f666
WM
1746}
1747
1748/* If T represents a memory access, add instrumentation code before ITER.
1749 LOCATION is source code location.
25ae5027 1750 IS_STORE is either TRUE (for a store) or FALSE (for a load). */
37d6f666
WM
1751
1752static void
1753instrument_derefs (gimple_stmt_iterator *iter, tree t,
bdcbe80c 1754 location_t location, bool is_store)
37d6f666 1755{
b5ebc991
MO
1756 if (is_store && !ASAN_INSTRUMENT_WRITES)
1757 return;
1758 if (!is_store && !ASAN_INSTRUMENT_READS)
1759 return;
1760
37d6f666 1761 tree type, base;
f6d98484 1762 HOST_WIDE_INT size_in_bytes;
37d6f666
WM
1763
1764 type = TREE_TYPE (t);
37d6f666
WM
1765 switch (TREE_CODE (t))
1766 {
1767 case ARRAY_REF:
1768 case COMPONENT_REF:
1769 case INDIRECT_REF:
1770 case MEM_REF:
59b36ecf 1771 case VAR_DECL:
913f32a1 1772 case BIT_FIELD_REF:
37d6f666 1773 break;
59b36ecf 1774 /* FALLTHRU */
37d6f666
WM
1775 default:
1776 return;
1777 }
f6d98484
JJ
1778
1779 size_in_bytes = int_size_in_bytes (type);
40f9f6bb 1780 if (size_in_bytes <= 0)
f6d98484
JJ
1781 return;
1782
f6d98484
JJ
1783 HOST_WIDE_INT bitsize, bitpos;
1784 tree offset;
ef4bddc2 1785 machine_mode mode;
f6d98484 1786 int volatilep = 0, unsignedp = 0;
59b36ecf 1787 tree inner = get_inner_reference (t, &bitsize, &bitpos, &offset,
b3ecff82 1788 &mode, &unsignedp, &volatilep, false);
87d1d65a
YG
1789
1790 if (TREE_CODE (t) == COMPONENT_REF
1791 && DECL_BIT_FIELD_REPRESENTATIVE (TREE_OPERAND (t, 1)) != NULL_TREE)
1fe04fdc 1792 {
87d1d65a
YG
1793 tree repr = DECL_BIT_FIELD_REPRESENTATIVE (TREE_OPERAND (t, 1));
1794 instrument_derefs (iter, build3 (COMPONENT_REF, TREE_TYPE (repr),
1795 TREE_OPERAND (t, 0), repr,
1796 NULL_TREE), location, is_store);
1fe04fdc
JJ
1797 return;
1798 }
87d1d65a
YG
1799
1800 if (bitpos % BITS_PER_UNIT
1801 || bitsize != size_in_bytes * BITS_PER_UNIT)
40f9f6bb 1802 return;
f6d98484 1803
59b36ecf
JJ
1804 if (TREE_CODE (inner) == VAR_DECL
1805 && offset == NULL_TREE
1806 && bitpos >= 0
1807 && DECL_SIZE (inner)
1808 && tree_fits_shwi_p (DECL_SIZE (inner))
1809 && bitpos + bitsize <= tree_to_shwi (DECL_SIZE (inner)))
1810 {
1811 if (DECL_THREAD_LOCAL_P (inner))
1812 return;
6b98fab5
MZ
1813 if (!ASAN_GLOBALS && is_global_var (inner))
1814 return;
59b36ecf
JJ
1815 if (!TREE_STATIC (inner))
1816 {
1817 /* Automatic vars in the current function will be always
1818 accessible. */
1819 if (decl_function_context (inner) == current_function_decl)
1820 return;
1821 }
1822 /* Always instrument external vars, they might be dynamically
1823 initialized. */
1824 else if (!DECL_EXTERNAL (inner))
1825 {
1826 /* For static vars if they are known not to be dynamically
1827 initialized, they will be always accessible. */
9041d2e6 1828 varpool_node *vnode = varpool_node::get (inner);
59b36ecf
JJ
1829 if (vnode && !vnode->dynamically_initialized)
1830 return;
1831 }
1832 }
1833
f6d98484 1834 base = build_fold_addr_expr (t);
bdcbe80c
DS
1835 if (!has_mem_ref_been_instrumented (base, size_in_bytes))
1836 {
8946c29e
YG
1837 unsigned int align = get_object_alignment (t);
1838 build_check_stmt (location, base, NULL_TREE, size_in_bytes, iter,
c62ccb9a 1839 /*is_non_zero_len*/size_in_bytes > 0, /*before_p=*/true,
8946c29e 1840 is_store, /*is_scalar_access*/true, align);
bdcbe80c
DS
1841 update_mem_ref_hash_table (base, size_in_bytes);
1842 update_mem_ref_hash_table (t, size_in_bytes);
1843 }
1844
25ae5027
DS
1845}
1846
bdea98ca
MO
1847/* Insert a memory reference into the hash table if access length
1848 can be determined in compile time. */
1849
1850static void
1851maybe_update_mem_ref_hash_table (tree base, tree len)
1852{
1853 if (!POINTER_TYPE_P (TREE_TYPE (base))
1854 || !INTEGRAL_TYPE_P (TREE_TYPE (len)))
1855 return;
1856
1857 HOST_WIDE_INT size_in_bytes = tree_fits_shwi_p (len) ? tree_to_shwi (len) : -1;
1858
1859 if (size_in_bytes != -1)
1860 update_mem_ref_hash_table (base, size_in_bytes);
1861}
1862
25ae5027
DS
1863/* Instrument an access to a contiguous memory region that starts at
1864 the address pointed to by BASE, over a length of LEN (expressed in
1865 the sizeof (*BASE) bytes). ITER points to the instruction before
1866 which the instrumentation instructions must be inserted. LOCATION
1867 is the source location that the instrumentation instructions must
1868 have. If IS_STORE is true, then the memory access is a store;
1869 otherwise, it's a load. */
1870
1871static void
1872instrument_mem_region_access (tree base, tree len,
1873 gimple_stmt_iterator *iter,
1874 location_t location, bool is_store)
1875{
c63d3b96
JJ
1876 if (!POINTER_TYPE_P (TREE_TYPE (base))
1877 || !INTEGRAL_TYPE_P (TREE_TYPE (len))
1878 || integer_zerop (len))
25ae5027
DS
1879 return;
1880
8946c29e 1881 HOST_WIDE_INT size_in_bytes = tree_fits_shwi_p (len) ? tree_to_shwi (len) : -1;
bdcbe80c 1882
bdea98ca
MO
1883 if ((size_in_bytes == -1)
1884 || !has_mem_ref_been_instrumented (base, size_in_bytes))
1885 {
1886 build_check_stmt (location, base, len, size_in_bytes, iter,
1887 /*is_non_zero_len*/size_in_bytes > 0, /*before_p*/true,
1888 is_store, /*is_scalar_access*/false, /*align*/0);
1889 }
b41288b3 1890
bdea98ca 1891 maybe_update_mem_ref_hash_table (base, len);
b41288b3 1892 *iter = gsi_for_stmt (gsi_stmt (*iter));
bdcbe80c 1893}
25ae5027 1894
bdcbe80c
DS
1895/* Instrument the call to a built-in memory access function that is
1896 pointed to by the iterator ITER.
25ae5027 1897
bdcbe80c
DS
1898 Upon completion, return TRUE iff *ITER has been advanced to the
1899 statement following the one it was originally pointing to. */
25ae5027 1900
bdcbe80c
DS
1901static bool
1902instrument_builtin_call (gimple_stmt_iterator *iter)
1903{
b5ebc991
MO
1904 if (!ASAN_MEMINTRIN)
1905 return false;
1906
bdcbe80c 1907 bool iter_advanced_p = false;
538dd0b7 1908 gcall *call = as_a <gcall *> (gsi_stmt (*iter));
25ae5027 1909
bdcbe80c 1910 gcc_checking_assert (gimple_call_builtin_p (call, BUILT_IN_NORMAL));
25ae5027 1911
bdcbe80c 1912 location_t loc = gimple_location (call);
25ae5027 1913
bdea98ca
MO
1914 asan_mem_ref src0, src1, dest;
1915 asan_mem_ref_init (&src0, NULL, 1);
1916 asan_mem_ref_init (&src1, NULL, 1);
1917 asan_mem_ref_init (&dest, NULL, 1);
bdcbe80c 1918
bdea98ca
MO
1919 tree src0_len = NULL_TREE, src1_len = NULL_TREE, dest_len = NULL_TREE;
1920 bool src0_is_store = false, src1_is_store = false, dest_is_store = false,
1921 dest_is_deref = false, intercepted_p = true;
bdcbe80c 1922
bdea98ca
MO
1923 if (get_mem_refs_of_builtin_call (call,
1924 &src0, &src0_len, &src0_is_store,
1925 &src1, &src1_len, &src1_is_store,
1926 &dest, &dest_len, &dest_is_store,
1927 &dest_is_deref, &intercepted_p))
1928 {
1929 if (dest_is_deref)
bdcbe80c 1930 {
bdea98ca
MO
1931 instrument_derefs (iter, dest.start, loc, dest_is_store);
1932 gsi_next (iter);
1933 iter_advanced_p = true;
1934 }
1935 else if (!intercepted_p
1936 && (src0_len || src1_len || dest_len))
1937 {
1938 if (src0.start != NULL_TREE)
1939 instrument_mem_region_access (src0.start, src0_len,
1940 iter, loc, /*is_store=*/false);
1941 if (src1.start != NULL_TREE)
1942 instrument_mem_region_access (src1.start, src1_len,
1943 iter, loc, /*is_store=*/false);
1944 if (dest.start != NULL_TREE)
1945 instrument_mem_region_access (dest.start, dest_len,
1946 iter, loc, /*is_store=*/true);
1947
1948 *iter = gsi_for_stmt (call);
1949 gsi_next (iter);
1950 iter_advanced_p = true;
1951 }
1952 else
1953 {
1954 if (src0.start != NULL_TREE)
1955 maybe_update_mem_ref_hash_table (src0.start, src0_len);
1956 if (src1.start != NULL_TREE)
1957 maybe_update_mem_ref_hash_table (src1.start, src1_len);
1958 if (dest.start != NULL_TREE)
1959 maybe_update_mem_ref_hash_table (dest.start, dest_len);
bdcbe80c 1960 }
25ae5027 1961 }
bdcbe80c 1962 return iter_advanced_p;
25ae5027
DS
1963}
1964
1965/* Instrument the assignment statement ITER if it is subject to
bdcbe80c
DS
1966 instrumentation. Return TRUE iff instrumentation actually
1967 happened. In that case, the iterator ITER is advanced to the next
1968 logical expression following the one initially pointed to by ITER,
1969 and the relevant memory reference that which access has been
1970 instrumented is added to the memory references hash table. */
25ae5027 1971
bdcbe80c
DS
1972static bool
1973maybe_instrument_assignment (gimple_stmt_iterator *iter)
25ae5027 1974{
355fe088 1975 gimple *s = gsi_stmt (*iter);
25ae5027
DS
1976
1977 gcc_assert (gimple_assign_single_p (s));
1978
bdcbe80c
DS
1979 tree ref_expr = NULL_TREE;
1980 bool is_store, is_instrumented = false;
1981
52f2e7e1 1982 if (gimple_store_p (s))
bdcbe80c
DS
1983 {
1984 ref_expr = gimple_assign_lhs (s);
1985 is_store = true;
1986 instrument_derefs (iter, ref_expr,
1987 gimple_location (s),
1988 is_store);
1989 is_instrumented = true;
1990 }
c1f5ce48 1991
52f2e7e1 1992 if (gimple_assign_load_p (s))
bdcbe80c
DS
1993 {
1994 ref_expr = gimple_assign_rhs1 (s);
1995 is_store = false;
1996 instrument_derefs (iter, ref_expr,
1997 gimple_location (s),
1998 is_store);
1999 is_instrumented = true;
2000 }
2001
2002 if (is_instrumented)
2003 gsi_next (iter);
2004
2005 return is_instrumented;
25ae5027
DS
2006}
2007
2008/* Instrument the function call pointed to by the iterator ITER, if it
2009 is subject to instrumentation. At the moment, the only function
2010 calls that are instrumented are some built-in functions that access
2011 memory. Look at instrument_builtin_call to learn more.
2012
2013 Upon completion return TRUE iff *ITER was advanced to the statement
2014 following the one it was originally pointing to. */
2015
2016static bool
2017maybe_instrument_call (gimple_stmt_iterator *iter)
2018{
355fe088 2019 gimple *stmt = gsi_stmt (*iter);
bdcbe80c
DS
2020 bool is_builtin = gimple_call_builtin_p (stmt, BUILT_IN_NORMAL);
2021
2022 if (is_builtin && instrument_builtin_call (iter))
2b2571c9 2023 return true;
bdcbe80c 2024
2b2571c9
JJ
2025 if (gimple_call_noreturn_p (stmt))
2026 {
2027 if (is_builtin)
2028 {
2029 tree callee = gimple_call_fndecl (stmt);
2030 switch (DECL_FUNCTION_CODE (callee))
2031 {
2032 case BUILT_IN_UNREACHABLE:
2033 case BUILT_IN_TRAP:
2034 /* Don't instrument these. */
2035 return false;
083e891e
MP
2036 default:
2037 break;
2b2571c9
JJ
2038 }
2039 }
2040 tree decl = builtin_decl_implicit (BUILT_IN_ASAN_HANDLE_NO_RETURN);
355fe088 2041 gimple *g = gimple_build_call (decl, 0);
2b2571c9
JJ
2042 gimple_set_location (g, gimple_location (stmt));
2043 gsi_insert_before (iter, g, GSI_SAME_STMT);
2044 }
25ae5027 2045 return false;
37d6f666
WM
2046}
2047
bdcbe80c
DS
2048/* Walk each instruction of all basic block and instrument those that
2049 represent memory references: loads, stores, or function calls.
2050 In a given basic block, this function avoids instrumenting memory
2051 references that have already been instrumented. */
37d6f666
WM
2052
2053static void
2054transform_statements (void)
2055{
c4bfe8bf 2056 basic_block bb, last_bb = NULL;
37d6f666 2057 gimple_stmt_iterator i;
8b1c6fd7 2058 int saved_last_basic_block = last_basic_block_for_fn (cfun);
37d6f666 2059
11cd3bed 2060 FOR_EACH_BB_FN (bb, cfun)
37d6f666 2061 {
c4bfe8bf 2062 basic_block prev_bb = bb;
bdcbe80c 2063
37d6f666 2064 if (bb->index >= saved_last_basic_block) continue;
c4bfe8bf
JJ
2065
2066 /* Flush the mem ref hash table, if current bb doesn't have
2067 exactly one predecessor, or if that predecessor (skipping
2068 over asan created basic blocks) isn't the last processed
2069 basic block. Thus we effectively flush on extended basic
2070 block boundaries. */
2071 while (single_pred_p (prev_bb))
2072 {
2073 prev_bb = single_pred (prev_bb);
2074 if (prev_bb->index < saved_last_basic_block)
2075 break;
2076 }
2077 if (prev_bb != last_bb)
2078 empty_mem_ref_hash_table ();
2079 last_bb = bb;
2080
25ae5027 2081 for (i = gsi_start_bb (bb); !gsi_end_p (i);)
497a1c66 2082 {
355fe088 2083 gimple *s = gsi_stmt (i);
25ae5027 2084
bdcbe80c
DS
2085 if (has_stmt_been_instrumented_p (s))
2086 gsi_next (&i);
2087 else if (gimple_assign_single_p (s)
e1e160c1 2088 && !gimple_clobber_p (s)
bdcbe80c
DS
2089 && maybe_instrument_assignment (&i))
2090 /* Nothing to do as maybe_instrument_assignment advanced
2091 the iterator I. */;
2092 else if (is_gimple_call (s) && maybe_instrument_call (&i))
2093 /* Nothing to do as maybe_instrument_call
2094 advanced the iterator I. */;
2095 else
25ae5027 2096 {
bdcbe80c
DS
2097 /* No instrumentation happened.
2098
c4bfe8bf
JJ
2099 If the current instruction is a function call that
2100 might free something, let's forget about the memory
2101 references that got instrumented. Otherwise we might
2102 miss some instrumentation opportunities. */
2103 if (is_gimple_call (s) && !nonfreeing_call_p (s))
bdcbe80c
DS
2104 empty_mem_ref_hash_table ();
2105
2106 gsi_next (&i);
25ae5027 2107 }
497a1c66 2108 }
37d6f666 2109 }
bdcbe80c 2110 free_mem_ref_resources ();
37d6f666
WM
2111}
2112
59b36ecf
JJ
2113/* Build
2114 __asan_before_dynamic_init (module_name)
2115 or
2116 __asan_after_dynamic_init ()
2117 call. */
2118
2119tree
2120asan_dynamic_init_call (bool after_p)
2121{
2122 tree fn = builtin_decl_implicit (after_p
2123 ? BUILT_IN_ASAN_AFTER_DYNAMIC_INIT
2124 : BUILT_IN_ASAN_BEFORE_DYNAMIC_INIT);
2125 tree module_name_cst = NULL_TREE;
2126 if (!after_p)
2127 {
2128 pretty_printer module_name_pp;
2129 pp_string (&module_name_pp, main_input_filename);
2130
2131 if (shadow_ptr_types[0] == NULL_TREE)
2132 asan_init_shadow_ptr_types ();
2133 module_name_cst = asan_pp_string (&module_name_pp);
2134 module_name_cst = fold_convert (const_ptr_type_node,
2135 module_name_cst);
2136 }
2137
2138 return build_call_expr (fn, after_p ? 0 : 1, module_name_cst);
2139}
2140
8240018b
JJ
2141/* Build
2142 struct __asan_global
2143 {
2144 const void *__beg;
2145 uptr __size;
2146 uptr __size_with_redzone;
2147 const void *__name;
ef1b3fda 2148 const void *__module_name;
8240018b 2149 uptr __has_dynamic_init;
866e32ad 2150 __asan_global_source_location *__location;
8240018b
JJ
2151 } type. */
2152
2153static tree
2154asan_global_struct (void)
2155{
866e32ad 2156 static const char *field_names[7]
8240018b 2157 = { "__beg", "__size", "__size_with_redzone",
866e32ad
KS
2158 "__name", "__module_name", "__has_dynamic_init", "__location"};
2159 tree fields[7], ret;
8240018b
JJ
2160 int i;
2161
2162 ret = make_node (RECORD_TYPE);
866e32ad 2163 for (i = 0; i < 7; i++)
8240018b
JJ
2164 {
2165 fields[i]
2166 = build_decl (UNKNOWN_LOCATION, FIELD_DECL,
2167 get_identifier (field_names[i]),
2168 (i == 0 || i == 3) ? const_ptr_type_node
de5a5fa1 2169 : pointer_sized_int_node);
8240018b
JJ
2170 DECL_CONTEXT (fields[i]) = ret;
2171 if (i)
2172 DECL_CHAIN (fields[i - 1]) = fields[i];
2173 }
bebcdc67
MP
2174 tree type_decl = build_decl (input_location, TYPE_DECL,
2175 get_identifier ("__asan_global"), ret);
2176 DECL_IGNORED_P (type_decl) = 1;
2177 DECL_ARTIFICIAL (type_decl) = 1;
8240018b 2178 TYPE_FIELDS (ret) = fields[0];
bebcdc67
MP
2179 TYPE_NAME (ret) = type_decl;
2180 TYPE_STUB_DECL (ret) = type_decl;
8240018b
JJ
2181 layout_type (ret);
2182 return ret;
2183}
2184
2185/* Append description of a single global DECL into vector V.
2186 TYPE is __asan_global struct type as returned by asan_global_struct. */
2187
2188static void
9771b263 2189asan_add_global (tree decl, tree type, vec<constructor_elt, va_gc> *v)
8240018b
JJ
2190{
2191 tree init, uptr = TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (type)));
2192 unsigned HOST_WIDE_INT size;
ef1b3fda 2193 tree str_cst, module_name_cst, refdecl = decl;
9771b263 2194 vec<constructor_elt, va_gc> *vinner = NULL;
8240018b 2195
ef1b3fda 2196 pretty_printer asan_pp, module_name_pp;
8240018b 2197
8240018b 2198 if (DECL_NAME (decl))
b066401f 2199 pp_tree_identifier (&asan_pp, DECL_NAME (decl));
8240018b
JJ
2200 else
2201 pp_string (&asan_pp, "<unknown>");
11a877b3 2202 str_cst = asan_pp_string (&asan_pp);
8240018b 2203
ef1b3fda
KS
2204 pp_string (&module_name_pp, main_input_filename);
2205 module_name_cst = asan_pp_string (&module_name_pp);
2206
8240018b
JJ
2207 if (asan_needs_local_alias (decl))
2208 {
2209 char buf[20];
9771b263 2210 ASM_GENERATE_INTERNAL_LABEL (buf, "LASAN", vec_safe_length (v) + 1);
8240018b
JJ
2211 refdecl = build_decl (DECL_SOURCE_LOCATION (decl),
2212 VAR_DECL, get_identifier (buf), TREE_TYPE (decl));
2213 TREE_ADDRESSABLE (refdecl) = TREE_ADDRESSABLE (decl);
2214 TREE_READONLY (refdecl) = TREE_READONLY (decl);
2215 TREE_THIS_VOLATILE (refdecl) = TREE_THIS_VOLATILE (decl);
2216 DECL_GIMPLE_REG_P (refdecl) = DECL_GIMPLE_REG_P (decl);
2217 DECL_ARTIFICIAL (refdecl) = DECL_ARTIFICIAL (decl);
2218 DECL_IGNORED_P (refdecl) = DECL_IGNORED_P (decl);
2219 TREE_STATIC (refdecl) = 1;
2220 TREE_PUBLIC (refdecl) = 0;
2221 TREE_USED (refdecl) = 1;
2222 assemble_alias (refdecl, DECL_ASSEMBLER_NAME (decl));
2223 }
2224
2225 CONSTRUCTOR_APPEND_ELT (vinner, NULL_TREE,
2226 fold_convert (const_ptr_type_node,
2227 build_fold_addr_expr (refdecl)));
ae7e9ddd 2228 size = tree_to_uhwi (DECL_SIZE_UNIT (decl));
8240018b
JJ
2229 CONSTRUCTOR_APPEND_ELT (vinner, NULL_TREE, build_int_cst (uptr, size));
2230 size += asan_red_zone_size (size);
2231 CONSTRUCTOR_APPEND_ELT (vinner, NULL_TREE, build_int_cst (uptr, size));
2232 CONSTRUCTOR_APPEND_ELT (vinner, NULL_TREE,
2233 fold_convert (const_ptr_type_node, str_cst));
ef1b3fda
KS
2234 CONSTRUCTOR_APPEND_ELT (vinner, NULL_TREE,
2235 fold_convert (const_ptr_type_node, module_name_cst));
9041d2e6 2236 varpool_node *vnode = varpool_node::get (decl);
59b36ecf
JJ
2237 int has_dynamic_init = vnode ? vnode->dynamically_initialized : 0;
2238 CONSTRUCTOR_APPEND_ELT (vinner, NULL_TREE,
2239 build_int_cst (uptr, has_dynamic_init));
21a82048
JJ
2240 tree locptr = NULL_TREE;
2241 location_t loc = DECL_SOURCE_LOCATION (decl);
2242 expanded_location xloc = expand_location (loc);
2243 if (xloc.file != NULL)
2244 {
2245 static int lasanloccnt = 0;
2246 char buf[25];
2247 ASM_GENERATE_INTERNAL_LABEL (buf, "LASANLOC", ++lasanloccnt);
2248 tree var = build_decl (UNKNOWN_LOCATION, VAR_DECL, get_identifier (buf),
2249 ubsan_get_source_location_type ());
2250 TREE_STATIC (var) = 1;
2251 TREE_PUBLIC (var) = 0;
2252 DECL_ARTIFICIAL (var) = 1;
2253 DECL_IGNORED_P (var) = 1;
2254 pretty_printer filename_pp;
2255 pp_string (&filename_pp, xloc.file);
2256 tree str = asan_pp_string (&filename_pp);
2257 tree ctor = build_constructor_va (TREE_TYPE (var), 3,
2258 NULL_TREE, str, NULL_TREE,
2259 build_int_cst (unsigned_type_node,
2260 xloc.line), NULL_TREE,
2261 build_int_cst (unsigned_type_node,
2262 xloc.column));
2263 TREE_CONSTANT (ctor) = 1;
2264 TREE_STATIC (ctor) = 1;
2265 DECL_INITIAL (var) = ctor;
2266 varpool_node::finalize_decl (var);
2267 locptr = fold_convert (uptr, build_fold_addr_expr (var));
2268 }
2269 else
2270 locptr = build_int_cst (uptr, 0);
2271 CONSTRUCTOR_APPEND_ELT (vinner, NULL_TREE, locptr);
8240018b
JJ
2272 init = build_constructor (type, vinner);
2273 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, init);
2274}
2275
0e668eaf
JJ
2276/* Initialize sanitizer.def builtins if the FE hasn't initialized them. */
2277void
2278initialize_sanitizer_builtins (void)
2279{
2280 tree decl;
2281
2282 if (builtin_decl_implicit_p (BUILT_IN_ASAN_INIT))
2283 return;
2284
2285 tree BT_FN_VOID = build_function_type_list (void_type_node, NULL_TREE);
2286 tree BT_FN_VOID_PTR
2287 = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
59b36ecf
JJ
2288 tree BT_FN_VOID_CONST_PTR
2289 = build_function_type_list (void_type_node, const_ptr_type_node, NULL_TREE);
b906f4ca
MP
2290 tree BT_FN_VOID_PTR_PTR
2291 = build_function_type_list (void_type_node, ptr_type_node,
2292 ptr_type_node, NULL_TREE);
de5a5fa1
MP
2293 tree BT_FN_VOID_PTR_PTR_PTR
2294 = build_function_type_list (void_type_node, ptr_type_node,
2295 ptr_type_node, ptr_type_node, NULL_TREE);
0e668eaf
JJ
2296 tree BT_FN_VOID_PTR_PTRMODE
2297 = build_function_type_list (void_type_node, ptr_type_node,
de5a5fa1 2298 pointer_sized_int_node, NULL_TREE);
c954bddd
JJ
2299 tree BT_FN_VOID_INT
2300 = build_function_type_list (void_type_node, integer_type_node, NULL_TREE);
0bae64d5
MP
2301 tree BT_FN_SIZE_CONST_PTR_INT
2302 = build_function_type_list (size_type_node, const_ptr_type_node,
2303 integer_type_node, NULL_TREE);
c954bddd
JJ
2304 tree BT_FN_BOOL_VPTR_PTR_IX_INT_INT[5];
2305 tree BT_FN_IX_CONST_VPTR_INT[5];
2306 tree BT_FN_IX_VPTR_IX_INT[5];
2307 tree BT_FN_VOID_VPTR_IX_INT[5];
2308 tree vptr
2309 = build_pointer_type (build_qualified_type (void_type_node,
2310 TYPE_QUAL_VOLATILE));
2311 tree cvptr
2312 = build_pointer_type (build_qualified_type (void_type_node,
2313 TYPE_QUAL_VOLATILE
2314 |TYPE_QUAL_CONST));
2315 tree boolt
2316 = lang_hooks.types.type_for_size (BOOL_TYPE_SIZE, 1);
2317 int i;
2318 for (i = 0; i < 5; i++)
2319 {
2320 tree ix = build_nonstandard_integer_type (BITS_PER_UNIT * (1 << i), 1);
2321 BT_FN_BOOL_VPTR_PTR_IX_INT_INT[i]
2322 = build_function_type_list (boolt, vptr, ptr_type_node, ix,
2323 integer_type_node, integer_type_node,
2324 NULL_TREE);
2325 BT_FN_IX_CONST_VPTR_INT[i]
2326 = build_function_type_list (ix, cvptr, integer_type_node, NULL_TREE);
2327 BT_FN_IX_VPTR_IX_INT[i]
2328 = build_function_type_list (ix, vptr, ix, integer_type_node,
2329 NULL_TREE);
2330 BT_FN_VOID_VPTR_IX_INT[i]
2331 = build_function_type_list (void_type_node, vptr, ix,
2332 integer_type_node, NULL_TREE);
2333 }
2334#define BT_FN_BOOL_VPTR_PTR_I1_INT_INT BT_FN_BOOL_VPTR_PTR_IX_INT_INT[0]
2335#define BT_FN_I1_CONST_VPTR_INT BT_FN_IX_CONST_VPTR_INT[0]
2336#define BT_FN_I1_VPTR_I1_INT BT_FN_IX_VPTR_IX_INT[0]
2337#define BT_FN_VOID_VPTR_I1_INT BT_FN_VOID_VPTR_IX_INT[0]
2338#define BT_FN_BOOL_VPTR_PTR_I2_INT_INT BT_FN_BOOL_VPTR_PTR_IX_INT_INT[1]
2339#define BT_FN_I2_CONST_VPTR_INT BT_FN_IX_CONST_VPTR_INT[1]
2340#define BT_FN_I2_VPTR_I2_INT BT_FN_IX_VPTR_IX_INT[1]
2341#define BT_FN_VOID_VPTR_I2_INT BT_FN_VOID_VPTR_IX_INT[1]
2342#define BT_FN_BOOL_VPTR_PTR_I4_INT_INT BT_FN_BOOL_VPTR_PTR_IX_INT_INT[2]
2343#define BT_FN_I4_CONST_VPTR_INT BT_FN_IX_CONST_VPTR_INT[2]
2344#define BT_FN_I4_VPTR_I4_INT BT_FN_IX_VPTR_IX_INT[2]
2345#define BT_FN_VOID_VPTR_I4_INT BT_FN_VOID_VPTR_IX_INT[2]
2346#define BT_FN_BOOL_VPTR_PTR_I8_INT_INT BT_FN_BOOL_VPTR_PTR_IX_INT_INT[3]
2347#define BT_FN_I8_CONST_VPTR_INT BT_FN_IX_CONST_VPTR_INT[3]
2348#define BT_FN_I8_VPTR_I8_INT BT_FN_IX_VPTR_IX_INT[3]
2349#define BT_FN_VOID_VPTR_I8_INT BT_FN_VOID_VPTR_IX_INT[3]
2350#define BT_FN_BOOL_VPTR_PTR_I16_INT_INT BT_FN_BOOL_VPTR_PTR_IX_INT_INT[4]
2351#define BT_FN_I16_CONST_VPTR_INT BT_FN_IX_CONST_VPTR_INT[4]
2352#define BT_FN_I16_VPTR_I16_INT BT_FN_IX_VPTR_IX_INT[4]
2353#define BT_FN_VOID_VPTR_I16_INT BT_FN_VOID_VPTR_IX_INT[4]
0e668eaf
JJ
2354#undef ATTR_NOTHROW_LEAF_LIST
2355#define ATTR_NOTHROW_LEAF_LIST ECF_NOTHROW | ECF_LEAF
bc77608b
JJ
2356#undef ATTR_TMPURE_NOTHROW_LEAF_LIST
2357#define ATTR_TMPURE_NOTHROW_LEAF_LIST ECF_TM_PURE | ATTR_NOTHROW_LEAF_LIST
0e668eaf
JJ
2358#undef ATTR_NORETURN_NOTHROW_LEAF_LIST
2359#define ATTR_NORETURN_NOTHROW_LEAF_LIST ECF_NORETURN | ATTR_NOTHROW_LEAF_LIST
4088b790
MP
2360#undef ATTR_CONST_NORETURN_NOTHROW_LEAF_LIST
2361#define ATTR_CONST_NORETURN_NOTHROW_LEAF_LIST \
2362 ECF_CONST | ATTR_NORETURN_NOTHROW_LEAF_LIST
bc77608b
JJ
2363#undef ATTR_TMPURE_NORETURN_NOTHROW_LEAF_LIST
2364#define ATTR_TMPURE_NORETURN_NOTHROW_LEAF_LIST \
2365 ECF_TM_PURE | ATTR_NORETURN_NOTHROW_LEAF_LIST
de5a5fa1
MP
2366#undef ATTR_COLD_NOTHROW_LEAF_LIST
2367#define ATTR_COLD_NOTHROW_LEAF_LIST \
2368 /* ECF_COLD missing */ ATTR_NOTHROW_LEAF_LIST
2369#undef ATTR_COLD_NORETURN_NOTHROW_LEAF_LIST
2370#define ATTR_COLD_NORETURN_NOTHROW_LEAF_LIST \
2371 /* ECF_COLD missing */ ATTR_NORETURN_NOTHROW_LEAF_LIST
4088b790
MP
2372#undef ATTR_COLD_CONST_NORETURN_NOTHROW_LEAF_LIST
2373#define ATTR_COLD_CONST_NORETURN_NOTHROW_LEAF_LIST \
2374 /* ECF_COLD missing */ ATTR_CONST_NORETURN_NOTHROW_LEAF_LIST
0bae64d5
MP
2375#undef ATTR_PURE_NOTHROW_LEAF_LIST
2376#define ATTR_PURE_NOTHROW_LEAF_LIST ECF_PURE | ATTR_NOTHROW_LEAF_LIST
0e668eaf
JJ
2377#undef DEF_SANITIZER_BUILTIN
2378#define DEF_SANITIZER_BUILTIN(ENUM, NAME, TYPE, ATTRS) \
2379 decl = add_builtin_function ("__builtin_" NAME, TYPE, ENUM, \
2380 BUILT_IN_NORMAL, NAME, NULL_TREE); \
2381 set_call_expr_flags (decl, ATTRS); \
2382 set_builtin_decl (ENUM, decl, true);
2383
2384#include "sanitizer.def"
2385
0bae64d5
MP
2386 /* -fsanitize=object-size uses __builtin_object_size, but that might
2387 not be available for e.g. Fortran at this point. We use
2388 DEF_SANITIZER_BUILTIN here only as a convenience macro. */
2389 if ((flag_sanitize & SANITIZE_OBJECT_SIZE)
2390 && !builtin_decl_implicit_p (BUILT_IN_OBJECT_SIZE))
2391 DEF_SANITIZER_BUILTIN (BUILT_IN_OBJECT_SIZE, "object_size",
2392 BT_FN_SIZE_CONST_PTR_INT,
2393 ATTR_PURE_NOTHROW_LEAF_LIST)
2394
0e668eaf
JJ
2395#undef DEF_SANITIZER_BUILTIN
2396}
2397
94fce891
JJ
2398/* Called via htab_traverse. Count number of emitted
2399 STRING_CSTs in the constant hash table. */
2400
2a22f99c
TS
2401int
2402count_string_csts (constant_descriptor_tree **slot,
2403 unsigned HOST_WIDE_INT *data)
94fce891 2404{
2a22f99c 2405 struct constant_descriptor_tree *desc = *slot;
94fce891
JJ
2406 if (TREE_CODE (desc->value) == STRING_CST
2407 && TREE_ASM_WRITTEN (desc->value)
2408 && asan_protect_global (desc->value))
2a22f99c 2409 ++*data;
94fce891
JJ
2410 return 1;
2411}
2412
2413/* Helper structure to pass two parameters to
2414 add_string_csts. */
2415
2416struct asan_add_string_csts_data
2417{
2418 tree type;
2419 vec<constructor_elt, va_gc> *v;
2420};
2421
2a22f99c 2422/* Called via hash_table::traverse. Call asan_add_global
94fce891
JJ
2423 on emitted STRING_CSTs from the constant hash table. */
2424
2a22f99c
TS
2425int
2426add_string_csts (constant_descriptor_tree **slot,
2427 asan_add_string_csts_data *aascd)
94fce891 2428{
2a22f99c 2429 struct constant_descriptor_tree *desc = *slot;
94fce891
JJ
2430 if (TREE_CODE (desc->value) == STRING_CST
2431 && TREE_ASM_WRITTEN (desc->value)
2432 && asan_protect_global (desc->value))
2433 {
94fce891
JJ
2434 asan_add_global (SYMBOL_REF_DECL (XEXP (desc->rtl, 0)),
2435 aascd->type, aascd->v);
2436 }
2437 return 1;
2438}
2439
8240018b
JJ
2440/* Needs to be GTY(()), because cgraph_build_static_cdtor may
2441 invoke ggc_collect. */
2442static GTY(()) tree asan_ctor_statements;
2443
37d6f666 2444/* Module-level instrumentation.
ef1b3fda 2445 - Insert __asan_init_vN() into the list of CTORs.
37d6f666
WM
2446 - TODO: insert redzones around globals.
2447 */
2448
2449void
2450asan_finish_file (void)
2451{
2c8326a5 2452 varpool_node *vnode;
8240018b
JJ
2453 unsigned HOST_WIDE_INT gcount = 0;
2454
94fce891
JJ
2455 if (shadow_ptr_types[0] == NULL_TREE)
2456 asan_init_shadow_ptr_types ();
2457 /* Avoid instrumenting code in the asan ctors/dtors.
2458 We don't need to insert padding after the description strings,
2459 nor after .LASAN* array. */
de5a5fa1 2460 flag_sanitize &= ~SANITIZE_ADDRESS;
0e668eaf 2461
f1d15bb9
DV
2462 /* For user-space we want asan constructors to run first.
2463 Linux kernel does not support priorities other than default, and the only
2464 other user of constructors is coverage. So we run with the default
2465 priority. */
2466 int priority = flag_sanitize & SANITIZE_USER_ADDRESS
2467 ? MAX_RESERVED_INIT_PRIORITY - 1 : DEFAULT_INIT_PRIORITY;
2468
c6d129b0
YG
2469 if (flag_sanitize & SANITIZE_USER_ADDRESS)
2470 {
2471 tree fn = builtin_decl_implicit (BUILT_IN_ASAN_INIT);
2472 append_to_statement_list (build_call_expr (fn, 0), &asan_ctor_statements);
2473 }
8240018b 2474 FOR_EACH_DEFINED_VARIABLE (vnode)
67348ccc
DM
2475 if (TREE_ASM_WRITTEN (vnode->decl)
2476 && asan_protect_global (vnode->decl))
8240018b 2477 ++gcount;
2a22f99c
TS
2478 hash_table<tree_descriptor_hasher> *const_desc_htab = constant_pool_htab ();
2479 const_desc_htab->traverse<unsigned HOST_WIDE_INT *, count_string_csts>
2480 (&gcount);
8240018b
JJ
2481 if (gcount)
2482 {
0e668eaf 2483 tree type = asan_global_struct (), var, ctor;
8240018b 2484 tree dtor_statements = NULL_TREE;
9771b263 2485 vec<constructor_elt, va_gc> *v;
8240018b
JJ
2486 char buf[20];
2487
2488 type = build_array_type_nelts (type, gcount);
2489 ASM_GENERATE_INTERNAL_LABEL (buf, "LASAN", 0);
2490 var = build_decl (UNKNOWN_LOCATION, VAR_DECL, get_identifier (buf),
2491 type);
2492 TREE_STATIC (var) = 1;
2493 TREE_PUBLIC (var) = 0;
2494 DECL_ARTIFICIAL (var) = 1;
2495 DECL_IGNORED_P (var) = 1;
9771b263 2496 vec_alloc (v, gcount);
8240018b 2497 FOR_EACH_DEFINED_VARIABLE (vnode)
67348ccc
DM
2498 if (TREE_ASM_WRITTEN (vnode->decl)
2499 && asan_protect_global (vnode->decl))
2500 asan_add_global (vnode->decl, TREE_TYPE (type), v);
94fce891
JJ
2501 struct asan_add_string_csts_data aascd;
2502 aascd.type = TREE_TYPE (type);
2503 aascd.v = v;
2a22f99c
TS
2504 const_desc_htab->traverse<asan_add_string_csts_data *, add_string_csts>
2505 (&aascd);
8240018b
JJ
2506 ctor = build_constructor (type, v);
2507 TREE_CONSTANT (ctor) = 1;
2508 TREE_STATIC (ctor) = 1;
2509 DECL_INITIAL (var) = ctor;
9041d2e6 2510 varpool_node::finalize_decl (var);
8240018b 2511
c6d129b0 2512 tree fn = builtin_decl_implicit (BUILT_IN_ASAN_REGISTER_GLOBALS);
de5a5fa1 2513 tree gcount_tree = build_int_cst (pointer_sized_int_node, gcount);
0e668eaf 2514 append_to_statement_list (build_call_expr (fn, 2,
8240018b 2515 build_fold_addr_expr (var),
de5a5fa1 2516 gcount_tree),
8240018b
JJ
2517 &asan_ctor_statements);
2518
0e668eaf
JJ
2519 fn = builtin_decl_implicit (BUILT_IN_ASAN_UNREGISTER_GLOBALS);
2520 append_to_statement_list (build_call_expr (fn, 2,
8240018b 2521 build_fold_addr_expr (var),
de5a5fa1 2522 gcount_tree),
8240018b 2523 &dtor_statements);
f1d15bb9 2524 cgraph_build_static_cdtor ('D', dtor_statements, priority);
8240018b 2525 }
c6d129b0 2526 if (asan_ctor_statements)
f1d15bb9 2527 cgraph_build_static_cdtor ('I', asan_ctor_statements, priority);
de5a5fa1 2528 flag_sanitize |= SANITIZE_ADDRESS;
f6d98484
JJ
2529}
2530
c62ccb9a
YG
2531/* Expand the ASAN_{LOAD,STORE} builtins. */
2532
06cefae9 2533bool
c62ccb9a
YG
2534asan_expand_check_ifn (gimple_stmt_iterator *iter, bool use_calls)
2535{
355fe088 2536 gimple *g = gsi_stmt (*iter);
c62ccb9a
YG
2537 location_t loc = gimple_location (g);
2538
fed4de37
YG
2539 bool recover_p
2540 = (flag_sanitize & flag_sanitize_recover & SANITIZE_KERNEL_ADDRESS) != 0;
2541
c62ccb9a
YG
2542 HOST_WIDE_INT flags = tree_to_shwi (gimple_call_arg (g, 0));
2543 gcc_assert (flags < ASAN_CHECK_LAST);
2544 bool is_scalar_access = (flags & ASAN_CHECK_SCALAR_ACCESS) != 0;
2545 bool is_store = (flags & ASAN_CHECK_STORE) != 0;
2546 bool is_non_zero_len = (flags & ASAN_CHECK_NON_ZERO_LEN) != 0;
c62ccb9a
YG
2547
2548 tree base = gimple_call_arg (g, 1);
2549 tree len = gimple_call_arg (g, 2);
f434eb69 2550 HOST_WIDE_INT align = tree_to_shwi (gimple_call_arg (g, 3));
c62ccb9a
YG
2551
2552 HOST_WIDE_INT size_in_bytes
2553 = is_scalar_access && tree_fits_shwi_p (len) ? tree_to_shwi (len) : -1;
2554
2555 if (use_calls)
2556 {
2557 /* Instrument using callbacks. */
355fe088 2558 gimple *g = gimple_build_assign (make_ssa_name (pointer_sized_int_node),
0d0e4a03 2559 NOP_EXPR, base);
c62ccb9a
YG
2560 gimple_set_location (g, loc);
2561 gsi_insert_before (iter, g, GSI_SAME_STMT);
2562 tree base_addr = gimple_assign_lhs (g);
2563
2564 int nargs;
fed4de37 2565 tree fun = check_func (is_store, recover_p, size_in_bytes, &nargs);
c62ccb9a
YG
2566 if (nargs == 1)
2567 g = gimple_build_call (fun, 1, base_addr);
2568 else
2569 {
2570 gcc_assert (nargs == 2);
0d0e4a03
JJ
2571 g = gimple_build_assign (make_ssa_name (pointer_sized_int_node),
2572 NOP_EXPR, len);
c62ccb9a
YG
2573 gimple_set_location (g, loc);
2574 gsi_insert_before (iter, g, GSI_SAME_STMT);
2575 tree sz_arg = gimple_assign_lhs (g);
2576 g = gimple_build_call (fun, nargs, base_addr, sz_arg);
2577 }
2578 gimple_set_location (g, loc);
2579 gsi_replace (iter, g, false);
2580 return false;
2581 }
2582
2583 HOST_WIDE_INT real_size_in_bytes = size_in_bytes == -1 ? 1 : size_in_bytes;
2584
c62ccb9a
YG
2585 tree shadow_ptr_type = shadow_ptr_types[real_size_in_bytes == 16 ? 1 : 0];
2586 tree shadow_type = TREE_TYPE (shadow_ptr_type);
2587
2588 gimple_stmt_iterator gsi = *iter;
2589
2590 if (!is_non_zero_len)
2591 {
2592 /* So, the length of the memory area to asan-protect is
2593 non-constant. Let's guard the generated instrumentation code
2594 like:
2595
2596 if (len != 0)
2597 {
2598 //asan instrumentation code goes here.
2599 }
2600 // falltrough instructions, starting with *ITER. */
2601
2602 g = gimple_build_cond (NE_EXPR,
2603 len,
2604 build_int_cst (TREE_TYPE (len), 0),
2605 NULL_TREE, NULL_TREE);
2606 gimple_set_location (g, loc);
2607
2608 basic_block then_bb, fallthrough_bb;
538dd0b7
DM
2609 insert_if_then_before_iter (as_a <gcond *> (g), iter,
2610 /*then_more_likely_p=*/true,
2611 &then_bb, &fallthrough_bb);
c62ccb9a
YG
2612 /* Note that fallthrough_bb starts with the statement that was
2613 pointed to by ITER. */
2614
2615 /* The 'then block' of the 'if (len != 0) condition is where
2616 we'll generate the asan instrumentation code now. */
2617 gsi = gsi_last_bb (then_bb);
2618 }
2619
2620 /* Get an iterator on the point where we can add the condition
2621 statement for the instrumentation. */
2622 basic_block then_bb, else_bb;
2623 gsi = create_cond_insert_point (&gsi, /*before_p*/false,
2624 /*then_more_likely_p=*/false,
fed4de37 2625 /*create_then_fallthru_edge*/recover_p,
c62ccb9a
YG
2626 &then_bb,
2627 &else_bb);
2628
0d0e4a03
JJ
2629 g = gimple_build_assign (make_ssa_name (pointer_sized_int_node),
2630 NOP_EXPR, base);
c62ccb9a
YG
2631 gimple_set_location (g, loc);
2632 gsi_insert_before (&gsi, g, GSI_NEW_STMT);
2633 tree base_addr = gimple_assign_lhs (g);
2634
2635 tree t = NULL_TREE;
2636 if (real_size_in_bytes >= 8)
2637 {
2638 tree shadow = build_shadow_mem_access (&gsi, loc, base_addr,
2639 shadow_ptr_type);
2640 t = shadow;
2641 }
2642 else
2643 {
2644 /* Slow path for 1, 2 and 4 byte accesses. */
bdea98ca
MO
2645 /* Test (shadow != 0)
2646 & ((base_addr & 7) + (real_size_in_bytes - 1)) >= shadow). */
2647 tree shadow = build_shadow_mem_access (&gsi, loc, base_addr,
2648 shadow_ptr_type);
355fe088 2649 gimple *shadow_test = build_assign (NE_EXPR, shadow, 0);
bdea98ca
MO
2650 gimple_seq seq = NULL;
2651 gimple_seq_add_stmt (&seq, shadow_test);
2652 /* Aligned (>= 8 bytes) can test just
2653 (real_size_in_bytes - 1 >= shadow), as base_addr & 7 is known
2654 to be 0. */
2655 if (align < 8)
c62ccb9a 2656 {
bdea98ca
MO
2657 gimple_seq_add_stmt (&seq, build_assign (BIT_AND_EXPR,
2658 base_addr, 7));
2659 gimple_seq_add_stmt (&seq,
2660 build_type_cast (shadow_type,
2661 gimple_seq_last (seq)));
2662 if (real_size_in_bytes > 1)
2663 gimple_seq_add_stmt (&seq,
2664 build_assign (PLUS_EXPR,
2665 gimple_seq_last (seq),
2666 real_size_in_bytes - 1));
2667 t = gimple_assign_lhs (gimple_seq_last_stmt (seq));
c62ccb9a 2668 }
bdea98ca
MO
2669 else
2670 t = build_int_cst (shadow_type, real_size_in_bytes - 1);
2671 gimple_seq_add_stmt (&seq, build_assign (GE_EXPR, t, shadow));
2672 gimple_seq_add_stmt (&seq, build_assign (BIT_AND_EXPR, shadow_test,
2673 gimple_seq_last (seq)));
2674 t = gimple_assign_lhs (gimple_seq_last (seq));
2675 gimple_seq_set_location (seq, loc);
2676 gsi_insert_seq_after (&gsi, seq, GSI_CONTINUE_LINKING);
c62ccb9a
YG
2677
2678 /* For non-constant, misaligned or otherwise weird access sizes,
bdea98ca
MO
2679 check first and last byte. */
2680 if (size_in_bytes == -1)
c62ccb9a 2681 {
0d0e4a03
JJ
2682 g = gimple_build_assign (make_ssa_name (pointer_sized_int_node),
2683 MINUS_EXPR, len,
2684 build_int_cst (pointer_sized_int_node, 1));
c62ccb9a
YG
2685 gimple_set_location (g, loc);
2686 gsi_insert_after (&gsi, g, GSI_NEW_STMT);
2687 tree last = gimple_assign_lhs (g);
0d0e4a03
JJ
2688 g = gimple_build_assign (make_ssa_name (pointer_sized_int_node),
2689 PLUS_EXPR, base_addr, last);
c62ccb9a
YG
2690 gimple_set_location (g, loc);
2691 gsi_insert_after (&gsi, g, GSI_NEW_STMT);
2692 tree base_end_addr = gimple_assign_lhs (g);
2693
2694 tree shadow = build_shadow_mem_access (&gsi, loc, base_end_addr,
2695 shadow_ptr_type);
355fe088 2696 gimple *shadow_test = build_assign (NE_EXPR, shadow, 0);
c62ccb9a
YG
2697 gimple_seq seq = NULL;
2698 gimple_seq_add_stmt (&seq, shadow_test);
2699 gimple_seq_add_stmt (&seq, build_assign (BIT_AND_EXPR,
2700 base_end_addr, 7));
2701 gimple_seq_add_stmt (&seq, build_type_cast (shadow_type,
2702 gimple_seq_last (seq)));
2703 gimple_seq_add_stmt (&seq, build_assign (GE_EXPR,
2704 gimple_seq_last (seq),
2705 shadow));
2706 gimple_seq_add_stmt (&seq, build_assign (BIT_AND_EXPR, shadow_test,
2707 gimple_seq_last (seq)));
bdea98ca
MO
2708 gimple_seq_add_stmt (&seq, build_assign (BIT_IOR_EXPR, t,
2709 gimple_seq_last (seq)));
c62ccb9a
YG
2710 t = gimple_assign_lhs (gimple_seq_last (seq));
2711 gimple_seq_set_location (seq, loc);
2712 gsi_insert_seq_after (&gsi, seq, GSI_CONTINUE_LINKING);
2713 }
2714 }
2715
2716 g = gimple_build_cond (NE_EXPR, t, build_int_cst (TREE_TYPE (t), 0),
2717 NULL_TREE, NULL_TREE);
2718 gimple_set_location (g, loc);
2719 gsi_insert_after (&gsi, g, GSI_NEW_STMT);
2720
2721 /* Generate call to the run-time library (e.g. __asan_report_load8). */
2722 gsi = gsi_start_bb (then_bb);
2723 int nargs;
fed4de37 2724 tree fun = report_error_func (is_store, recover_p, size_in_bytes, &nargs);
c62ccb9a
YG
2725 g = gimple_build_call (fun, nargs, base_addr, len);
2726 gimple_set_location (g, loc);
2727 gsi_insert_after (&gsi, g, GSI_NEW_STMT);
2728
2729 gsi_remove (iter, true);
2730 *iter = gsi_start_bb (else_bb);
2731
2732 return true;
2733}
2734
37d6f666
WM
2735/* Instrument the current function. */
2736
2737static unsigned int
2738asan_instrument (void)
2739{
f6d98484 2740 if (shadow_ptr_types[0] == NULL_TREE)
94fce891 2741 asan_init_shadow_ptr_types ();
37d6f666 2742 transform_statements ();
37d6f666
WM
2743 return 0;
2744}
2745
2746static bool
2747gate_asan (void)
2748{
de5a5fa1 2749 return (flag_sanitize & SANITIZE_ADDRESS) != 0
e664c61c 2750 && !lookup_attribute ("no_sanitize_address",
77bc5132 2751 DECL_ATTRIBUTES (current_function_decl));
37d6f666
WM
2752}
2753
27a4cd48
DM
2754namespace {
2755
2756const pass_data pass_data_asan =
37d6f666 2757{
27a4cd48
DM
2758 GIMPLE_PASS, /* type */
2759 "asan", /* name */
2760 OPTGROUP_NONE, /* optinfo_flags */
27a4cd48
DM
2761 TV_NONE, /* tv_id */
2762 ( PROP_ssa | PROP_cfg | PROP_gimple_leh ), /* properties_required */
2763 0, /* properties_provided */
2764 0, /* properties_destroyed */
2765 0, /* todo_flags_start */
3bea341f 2766 TODO_update_ssa, /* todo_flags_finish */
37d6f666 2767};
f6d98484 2768
27a4cd48
DM
2769class pass_asan : public gimple_opt_pass
2770{
2771public:
c3284718
RS
2772 pass_asan (gcc::context *ctxt)
2773 : gimple_opt_pass (pass_data_asan, ctxt)
27a4cd48
DM
2774 {}
2775
2776 /* opt_pass methods: */
65d3284b 2777 opt_pass * clone () { return new pass_asan (m_ctxt); }
1a3d085c 2778 virtual bool gate (function *) { return gate_asan (); }
be55bfe6 2779 virtual unsigned int execute (function *) { return asan_instrument (); }
27a4cd48
DM
2780
2781}; // class pass_asan
2782
2783} // anon namespace
2784
2785gimple_opt_pass *
2786make_pass_asan (gcc::context *ctxt)
2787{
2788 return new pass_asan (ctxt);
2789}
2790
27a4cd48
DM
2791namespace {
2792
2793const pass_data pass_data_asan_O0 =
dfb9e332 2794{
27a4cd48
DM
2795 GIMPLE_PASS, /* type */
2796 "asan0", /* name */
2797 OPTGROUP_NONE, /* optinfo_flags */
27a4cd48
DM
2798 TV_NONE, /* tv_id */
2799 ( PROP_ssa | PROP_cfg | PROP_gimple_leh ), /* properties_required */
2800 0, /* properties_provided */
2801 0, /* properties_destroyed */
2802 0, /* todo_flags_start */
3bea341f 2803 TODO_update_ssa, /* todo_flags_finish */
dfb9e332
JJ
2804};
2805
27a4cd48
DM
2806class pass_asan_O0 : public gimple_opt_pass
2807{
2808public:
c3284718
RS
2809 pass_asan_O0 (gcc::context *ctxt)
2810 : gimple_opt_pass (pass_data_asan_O0, ctxt)
27a4cd48
DM
2811 {}
2812
2813 /* opt_pass methods: */
1a3d085c 2814 virtual bool gate (function *) { return !optimize && gate_asan (); }
be55bfe6 2815 virtual unsigned int execute (function *) { return asan_instrument (); }
27a4cd48
DM
2816
2817}; // class pass_asan_O0
2818
2819} // anon namespace
2820
2821gimple_opt_pass *
2822make_pass_asan_O0 (gcc::context *ctxt)
2823{
2824 return new pass_asan_O0 (ctxt);
2825}
2826
f6d98484 2827#include "gt-asan.h"