]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/asan.c
gcc/
[thirdparty/gcc.git] / gcc / asan.c
CommitLineData
b92cccf4 1/* AddressSanitizer, a fast memory error detector.
d353bf18 2 Copyright (C) 2012-2015 Free Software Foundation, Inc.
b92cccf4 3 Contributed by Kostya Serebryany <kcc@google.com>
4
5This file is part of GCC.
6
7GCC is free software; you can redistribute it and/or modify it under
8the terms of the GNU General Public License as published by the Free
9Software Foundation; either version 3, or (at your option) any later
10version.
11
12GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13WARRANTY; without even the implied warranty of MERCHANTABILITY or
14FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15for more details.
16
17You should have received a copy of the GNU General Public License
18along with GCC; see the file COPYING3. If not see
19<http://www.gnu.org/licenses/>. */
20
21
22#include "config.h"
23#include "system.h"
24#include "coretypes.h"
b20a8bb4 25#include "alias.h"
26#include "symtab.h"
27#include "options.h"
41a8aa41 28#include "tree.h"
b20a8bb4 29#include "fold-const.h"
94ea8568 30#include "predict.h"
94ea8568 31#include "tm.h"
32#include "hard-reg-set.h"
94ea8568 33#include "function.h"
34#include "dominance.h"
35#include "cfg.h"
36#include "cfganal.h"
bc61cadb 37#include "basic-block.h"
38#include "tree-ssa-alias.h"
39#include "internal-fn.h"
40#include "gimple-expr.h"
e795d6e1 41#include "gimple.h"
a8783bee 42#include "gimplify.h"
dcf1a1ec 43#include "gimple-iterator.h"
9ed99284 44#include "calls.h"
45#include "varasm.h"
46#include "stor-layout.h"
b92cccf4 47#include "tree-iterator.h"
1140c305 48#include "plugin-api.h"
49#include "ipa-ref.h"
073c1fd5 50#include "cgraph.h"
9ed99284 51#include "stringpool.h"
073c1fd5 52#include "tree-ssanames.h"
b92cccf4 53#include "tree-pass.h"
b92cccf4 54#include "asan.h"
55#include "gimple-pretty-print.h"
7ad5fd20 56#include "target.h"
d53441c8 57#include "rtl.h"
58#include "flags.h"
d53441c8 59#include "insn-config.h"
60#include "expmed.h"
61#include "dojump.h"
62#include "explow.h"
63#include "emit-rtl.h"
64#include "stmt.h"
3c919612 65#include "expr.h"
34517c64 66#include "insn-codes.h"
3c919612 67#include "optabs.h"
92fc5c48 68#include "output.h"
a4932d0d 69#include "tm_p.h"
b45e34ed 70#include "langhooks.h"
c31c80df 71#include "alloc-pool.h"
f6568ea4 72#include "cfgloop.h"
9a4a3348 73#include "gimple-builder.h"
19b928d9 74#include "ubsan.h"
bf2b7c22 75#include "params.h"
f7715905 76#include "builtins.h"
5cd86e48 77#include "fnmatch.h"
b92cccf4 78
eca932e6 79/* AddressSanitizer finds out-of-bounds and use-after-free bugs
80 with <2x slowdown on average.
81
82 The tool consists of two parts:
83 instrumentation module (this file) and a run-time library.
84 The instrumentation module adds a run-time check before every memory insn.
85 For a 8- or 16- byte load accessing address X:
86 ShadowAddr = (X >> 3) + Offset
87 ShadowValue = *(char*)ShadowAddr; // *(short*) for 16-byte access.
88 if (ShadowValue)
89 __asan_report_load8(X);
90 For a load of N bytes (N=1, 2 or 4) from address X:
91 ShadowAddr = (X >> 3) + Offset
92 ShadowValue = *(char*)ShadowAddr;
93 if (ShadowValue)
94 if ((X & 7) + N - 1 > ShadowValue)
95 __asan_report_loadN(X);
96 Stores are instrumented similarly, but using __asan_report_storeN functions.
1e80ce41 97 A call too __asan_init_vN() is inserted to the list of module CTORs.
98 N is the version number of the AddressSanitizer API. The changes between the
99 API versions are listed in libsanitizer/asan/asan_interface_internal.h.
eca932e6 100
101 The run-time library redefines malloc (so that redzone are inserted around
102 the allocated memory) and free (so that reuse of free-ed memory is delayed),
1e80ce41 103 provides __asan_report* and __asan_init_vN functions.
eca932e6 104
105 Read more:
106 http://code.google.com/p/address-sanitizer/wiki/AddressSanitizerAlgorithm
107
108 The current implementation supports detection of out-of-bounds and
109 use-after-free in the heap, on the stack and for global variables.
110
111 [Protection of stack variables]
112
113 To understand how detection of out-of-bounds and use-after-free works
114 for stack variables, lets look at this example on x86_64 where the
115 stack grows downward:
3c919612 116
117 int
118 foo ()
119 {
120 char a[23] = {0};
121 int b[2] = {0};
122
123 a[5] = 1;
124 b[1] = 2;
125
126 return a[5] + b[1];
127 }
128
eca932e6 129 For this function, the stack protected by asan will be organized as
130 follows, from the top of the stack to the bottom:
3c919612 131
eca932e6 132 Slot 1/ [red zone of 32 bytes called 'RIGHT RedZone']
3c919612 133
eca932e6 134 Slot 2/ [8 bytes of red zone, that adds up to the space of 'a' to make
135 the next slot be 32 bytes aligned; this one is called Partial
136 Redzone; this 32 bytes alignment is an asan constraint]
3c919612 137
eca932e6 138 Slot 3/ [24 bytes for variable 'a']
3c919612 139
eca932e6 140 Slot 4/ [red zone of 32 bytes called 'Middle RedZone']
3c919612 141
eca932e6 142 Slot 5/ [24 bytes of Partial Red Zone (similar to slot 2]
3c919612 143
eca932e6 144 Slot 6/ [8 bytes for variable 'b']
3c919612 145
eca932e6 146 Slot 7/ [32 bytes of Red Zone at the bottom of the stack, called
147 'LEFT RedZone']
3c919612 148
eca932e6 149 The 32 bytes of LEFT red zone at the bottom of the stack can be
150 decomposed as such:
3c919612 151
152 1/ The first 8 bytes contain a magical asan number that is always
153 0x41B58AB3.
154
155 2/ The following 8 bytes contains a pointer to a string (to be
156 parsed at runtime by the runtime asan library), which format is
157 the following:
158
159 "<function-name> <space> <num-of-variables-on-the-stack>
160 (<32-bytes-aligned-offset-in-bytes-of-variable> <space>
161 <length-of-var-in-bytes> ){n} "
162
163 where '(...){n}' means the content inside the parenthesis occurs 'n'
164 times, with 'n' being the number of variables on the stack.
e815c2c5 165
1e80ce41 166 3/ The following 8 bytes contain the PC of the current function which
167 will be used by the run-time library to print an error message.
3c919612 168
1e80ce41 169 4/ The following 8 bytes are reserved for internal use by the run-time.
3c919612 170
eca932e6 171 The shadow memory for that stack layout is going to look like this:
3c919612 172
173 - content of shadow memory 8 bytes for slot 7: 0xF1F1F1F1.
174 The F1 byte pattern is a magic number called
175 ASAN_STACK_MAGIC_LEFT and is a way for the runtime to know that
176 the memory for that shadow byte is part of a the LEFT red zone
177 intended to seat at the bottom of the variables on the stack.
178
179 - content of shadow memory 8 bytes for slots 6 and 5:
180 0xF4F4F400. The F4 byte pattern is a magic number
181 called ASAN_STACK_MAGIC_PARTIAL. It flags the fact that the
182 memory region for this shadow byte is a PARTIAL red zone
183 intended to pad a variable A, so that the slot following
184 {A,padding} is 32 bytes aligned.
185
186 Note that the fact that the least significant byte of this
187 shadow memory content is 00 means that 8 bytes of its
188 corresponding memory (which corresponds to the memory of
189 variable 'b') is addressable.
190
191 - content of shadow memory 8 bytes for slot 4: 0xF2F2F2F2.
192 The F2 byte pattern is a magic number called
193 ASAN_STACK_MAGIC_MIDDLE. It flags the fact that the memory
194 region for this shadow byte is a MIDDLE red zone intended to
195 seat between two 32 aligned slots of {variable,padding}.
196
197 - content of shadow memory 8 bytes for slot 3 and 2:
eca932e6 198 0xF4000000. This represents is the concatenation of
3c919612 199 variable 'a' and the partial red zone following it, like what we
200 had for variable 'b'. The least significant 3 bytes being 00
201 means that the 3 bytes of variable 'a' are addressable.
202
eca932e6 203 - content of shadow memory 8 bytes for slot 1: 0xF3F3F3F3.
3c919612 204 The F3 byte pattern is a magic number called
205 ASAN_STACK_MAGIC_RIGHT. It flags the fact that the memory
206 region for this shadow byte is a RIGHT red zone intended to seat
207 at the top of the variables of the stack.
208
eca932e6 209 Note that the real variable layout is done in expand_used_vars in
210 cfgexpand.c. As far as Address Sanitizer is concerned, it lays out
211 stack variables as well as the different red zones, emits some
212 prologue code to populate the shadow memory as to poison (mark as
213 non-accessible) the regions of the red zones and mark the regions of
214 stack variables as accessible, and emit some epilogue code to
215 un-poison (mark as accessible) the regions of red zones right before
216 the function exits.
92fc5c48 217
eca932e6 218 [Protection of global variables]
92fc5c48 219
eca932e6 220 The basic idea is to insert a red zone between two global variables
221 and install a constructor function that calls the asan runtime to do
222 the populating of the relevant shadow memory regions at load time.
92fc5c48 223
eca932e6 224 So the global variables are laid out as to insert a red zone between
225 them. The size of the red zones is so that each variable starts on a
226 32 bytes boundary.
92fc5c48 227
eca932e6 228 Then a constructor function is installed so that, for each global
229 variable, it calls the runtime asan library function
230 __asan_register_globals_with an instance of this type:
92fc5c48 231
232 struct __asan_global
233 {
234 // Address of the beginning of the global variable.
235 const void *__beg;
236
237 // Initial size of the global variable.
238 uptr __size;
239
240 // Size of the global variable + size of the red zone. This
241 // size is 32 bytes aligned.
242 uptr __size_with_redzone;
243
244 // Name of the global variable.
245 const void *__name;
246
1e80ce41 247 // Name of the module where the global variable is declared.
248 const void *__module_name;
249
085f6ebf 250 // 1 if it has dynamic initialization, 0 otherwise.
92fc5c48 251 uptr __has_dynamic_init;
a9586c9c 252
253 // A pointer to struct that contains source location, could be NULL.
254 __asan_global_source_location *__location;
92fc5c48 255 }
256
eca932e6 257 A destructor function that calls the runtime asan library function
258 _asan_unregister_globals is also installed. */
3c919612 259
cf357977 260static unsigned HOST_WIDE_INT asan_shadow_offset_value;
261static bool asan_shadow_offset_computed;
5cd86e48 262static vec<char *> sanitized_sections;
cf357977 263
264/* Sets shadow offset to value in string VAL. */
265
266bool
267set_asan_shadow_offset (const char *val)
268{
269 char *endp;
e815c2c5 270
cf357977 271 errno = 0;
272#ifdef HAVE_LONG_LONG
273 asan_shadow_offset_value = strtoull (val, &endp, 0);
274#else
275 asan_shadow_offset_value = strtoul (val, &endp, 0);
276#endif
277 if (!(*val != '\0' && *endp == '\0' && errno == 0))
278 return false;
279
280 asan_shadow_offset_computed = true;
281
282 return true;
283}
284
4d3c996b 285/* Set list of user-defined sections that need to be sanitized. */
286
287void
5cd86e48 288set_sanitized_sections (const char *sections)
4d3c996b 289{
5cd86e48 290 char *pat;
291 unsigned i;
292 FOR_EACH_VEC_ELT (sanitized_sections, i, pat)
293 free (pat);
294 sanitized_sections.truncate (0);
295
296 for (const char *s = sections; *s; )
297 {
298 const char *end;
299 for (end = s; *end && *end != ','; ++end);
300 size_t len = end - s;
301 sanitized_sections.safe_push (xstrndup (s, len));
302 s = *end ? end + 1 : end;
303 }
4d3c996b 304}
305
306/* Checks whether section SEC should be sanitized. */
307
308static bool
309section_sanitized_p (const char *sec)
310{
5cd86e48 311 char *pat;
312 unsigned i;
313 FOR_EACH_VEC_ELT (sanitized_sections, i, pat)
314 if (fnmatch (pat, sec, FNM_PERIOD) == 0)
315 return true;
4d3c996b 316 return false;
317}
318
cf357977 319/* Returns Asan shadow offset. */
320
321static unsigned HOST_WIDE_INT
322asan_shadow_offset ()
323{
324 if (!asan_shadow_offset_computed)
325 {
326 asan_shadow_offset_computed = true;
327 asan_shadow_offset_value = targetm.asan_shadow_offset ();
328 }
329 return asan_shadow_offset_value;
330}
331
3c919612 332alias_set_type asan_shadow_set = -1;
b92cccf4 333
5d5c682b 334/* Pointer types to 1 resp. 2 byte integers in shadow memory. A separate
335 alias set is used for all shadow memory accesses. */
336static GTY(()) tree shadow_ptr_types[2];
337
683539f6 338/* Decl for __asan_option_detect_stack_use_after_return. */
339static GTY(()) tree asan_detect_stack_use_after_return;
340
ff326078 341/* Various flags for Asan builtins. */
342enum asan_check_flags
4f86f720 343{
ff326078 344 ASAN_CHECK_STORE = 1 << 0,
345 ASAN_CHECK_SCALAR_ACCESS = 1 << 1,
346 ASAN_CHECK_NON_ZERO_LEN = 1 << 2,
f9acf11a 347 ASAN_CHECK_LAST = 1 << 3
ff326078 348};
4f86f720 349
c31c80df 350/* Hashtable support for memory references used by gimple
351 statements. */
352
353/* This type represents a reference to a memory region. */
354struct asan_mem_ref
355{
a04e8d62 356 /* The expression of the beginning of the memory region. */
c31c80df 357 tree start;
358
86d3a572 359 /* The size of the access. */
360 HOST_WIDE_INT access_size;
c31c80df 361
e815c2c5 362 /* Pool allocation new operator. */
363 inline void *operator new (size_t)
364 {
365 return pool.allocate ();
366 }
c31c80df 367
e815c2c5 368 /* Delete operator utilizing pool allocation. */
369 inline void operator delete (void *ptr)
370 {
371 pool.remove ((asan_mem_ref *) ptr);
372 }
c31c80df 373
e815c2c5 374 /* Memory allocation pool. */
375 static pool_allocator<asan_mem_ref> pool;
376};
377
378pool_allocator<asan_mem_ref> asan_mem_ref::pool ("asan_mem_ref", 10);
c31c80df 379
380/* Initializes an instance of asan_mem_ref. */
381
382static void
86d3a572 383asan_mem_ref_init (asan_mem_ref *ref, tree start, HOST_WIDE_INT access_size)
c31c80df 384{
385 ref->start = start;
386 ref->access_size = access_size;
387}
388
389/* Allocates memory for an instance of asan_mem_ref into the memory
390 pool returned by asan_mem_ref_get_alloc_pool and initialize it.
391 START is the address of (or the expression pointing to) the
392 beginning of memory reference. ACCESS_SIZE is the size of the
393 access to the referenced memory. */
394
395static asan_mem_ref*
86d3a572 396asan_mem_ref_new (tree start, HOST_WIDE_INT access_size)
c31c80df 397{
e815c2c5 398 asan_mem_ref *ref = new asan_mem_ref;
c31c80df 399
400 asan_mem_ref_init (ref, start, access_size);
401 return ref;
402}
403
404/* This builds and returns a pointer to the end of the memory region
405 that starts at START and of length LEN. */
406
407tree
408asan_mem_ref_get_end (tree start, tree len)
409{
410 if (len == NULL_TREE || integer_zerop (len))
411 return start;
412
0a9f72cf 413 if (!ptrofftype_p (len))
414 len = convert_to_ptrofftype (len);
415
c31c80df 416 return fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (start), start, len);
417}
418
419/* Return a tree expression that represents the end of the referenced
420 memory region. Beware that this function can actually build a new
421 tree expression. */
422
423tree
424asan_mem_ref_get_end (const asan_mem_ref *ref, tree len)
425{
426 return asan_mem_ref_get_end (ref->start, len);
427}
428
770ff93b 429struct asan_mem_ref_hasher : nofree_ptr_hash <asan_mem_ref>
c31c80df 430{
9969c043 431 static inline hashval_t hash (const asan_mem_ref *);
432 static inline bool equal (const asan_mem_ref *, const asan_mem_ref *);
c31c80df 433};
434
435/* Hash a memory reference. */
436
437inline hashval_t
438asan_mem_ref_hasher::hash (const asan_mem_ref *mem_ref)
439{
f9acf11a 440 return iterative_hash_expr (mem_ref->start, 0);
c31c80df 441}
442
443/* Compare two memory references. We accept the length of either
444 memory references to be NULL_TREE. */
445
446inline bool
447asan_mem_ref_hasher::equal (const asan_mem_ref *m1,
448 const asan_mem_ref *m2)
449{
f9acf11a 450 return operand_equal_p (m1->start, m2->start, 0);
c31c80df 451}
452
c1f445d2 453static hash_table<asan_mem_ref_hasher> *asan_mem_ref_ht;
c31c80df 454
455/* Returns a reference to the hash table containing memory references.
456 This function ensures that the hash table is created. Note that
457 this hash table is updated by the function
458 update_mem_ref_hash_table. */
459
c1f445d2 460static hash_table<asan_mem_ref_hasher> *
c31c80df 461get_mem_ref_hash_table ()
462{
c1f445d2 463 if (!asan_mem_ref_ht)
464 asan_mem_ref_ht = new hash_table<asan_mem_ref_hasher> (10);
c31c80df 465
466 return asan_mem_ref_ht;
467}
468
469/* Clear all entries from the memory references hash table. */
470
471static void
472empty_mem_ref_hash_table ()
473{
c1f445d2 474 if (asan_mem_ref_ht)
475 asan_mem_ref_ht->empty ();
c31c80df 476}
477
478/* Free the memory references hash table. */
479
480static void
481free_mem_ref_resources ()
482{
c1f445d2 483 delete asan_mem_ref_ht;
484 asan_mem_ref_ht = NULL;
c31c80df 485
e815c2c5 486 asan_mem_ref::pool.release ();
c31c80df 487}
488
489/* Return true iff the memory reference REF has been instrumented. */
490
491static bool
86d3a572 492has_mem_ref_been_instrumented (tree ref, HOST_WIDE_INT access_size)
c31c80df 493{
494 asan_mem_ref r;
495 asan_mem_ref_init (&r, ref, access_size);
496
f9acf11a 497 asan_mem_ref *saved_ref = get_mem_ref_hash_table ()->find (&r);
498 return saved_ref && saved_ref->access_size >= access_size;
c31c80df 499}
500
501/* Return true iff the memory reference REF has been instrumented. */
502
503static bool
504has_mem_ref_been_instrumented (const asan_mem_ref *ref)
505{
506 return has_mem_ref_been_instrumented (ref->start, ref->access_size);
507}
508
509/* Return true iff access to memory region starting at REF and of
510 length LEN has been instrumented. */
511
512static bool
513has_mem_ref_been_instrumented (const asan_mem_ref *ref, tree len)
514{
f9acf11a 515 HOST_WIDE_INT size_in_bytes
516 = tree_fits_shwi_p (len) ? tree_to_shwi (len) : -1;
c31c80df 517
f9acf11a 518 return size_in_bytes != -1
519 && has_mem_ref_been_instrumented (ref->start, size_in_bytes);
c31c80df 520}
521
522/* Set REF to the memory reference present in a gimple assignment
523 ASSIGNMENT. Return true upon successful completion, false
524 otherwise. */
525
526static bool
1a91d914 527get_mem_ref_of_assignment (const gassign *assignment,
c31c80df 528 asan_mem_ref *ref,
529 bool *ref_is_store)
530{
531 gcc_assert (gimple_assign_single_p (assignment));
532
9f559b20 533 if (gimple_store_p (assignment)
534 && !gimple_clobber_p (assignment))
c31c80df 535 {
536 ref->start = gimple_assign_lhs (assignment);
537 *ref_is_store = true;
538 }
539 else if (gimple_assign_load_p (assignment))
540 {
541 ref->start = gimple_assign_rhs1 (assignment);
542 *ref_is_store = false;
543 }
544 else
545 return false;
546
547 ref->access_size = int_size_in_bytes (TREE_TYPE (ref->start));
548 return true;
549}
550
551/* Return the memory references contained in a gimple statement
552 representing a builtin call that has to do with memory access. */
553
554static bool
1a91d914 555get_mem_refs_of_builtin_call (const gcall *call,
c31c80df 556 asan_mem_ref *src0,
557 tree *src0_len,
558 bool *src0_is_store,
559 asan_mem_ref *src1,
560 tree *src1_len,
561 bool *src1_is_store,
562 asan_mem_ref *dst,
563 tree *dst_len,
564 bool *dst_is_store,
f9acf11a 565 bool *dest_is_deref,
566 bool *intercepted_p)
c31c80df 567{
568 gcc_checking_assert (gimple_call_builtin_p (call, BUILT_IN_NORMAL));
569
570 tree callee = gimple_call_fndecl (call);
571 tree source0 = NULL_TREE, source1 = NULL_TREE,
572 dest = NULL_TREE, len = NULL_TREE;
573 bool is_store = true, got_reference_p = false;
86d3a572 574 HOST_WIDE_INT access_size = 1;
c31c80df 575
f9acf11a 576 *intercepted_p = asan_intercepted_p ((DECL_FUNCTION_CODE (callee)));
577
c31c80df 578 switch (DECL_FUNCTION_CODE (callee))
579 {
580 /* (s, s, n) style memops. */
581 case BUILT_IN_BCMP:
582 case BUILT_IN_MEMCMP:
583 source0 = gimple_call_arg (call, 0);
584 source1 = gimple_call_arg (call, 1);
585 len = gimple_call_arg (call, 2);
586 break;
587
588 /* (src, dest, n) style memops. */
589 case BUILT_IN_BCOPY:
590 source0 = gimple_call_arg (call, 0);
591 dest = gimple_call_arg (call, 1);
592 len = gimple_call_arg (call, 2);
593 break;
594
595 /* (dest, src, n) style memops. */
596 case BUILT_IN_MEMCPY:
597 case BUILT_IN_MEMCPY_CHK:
598 case BUILT_IN_MEMMOVE:
599 case BUILT_IN_MEMMOVE_CHK:
600 case BUILT_IN_MEMPCPY:
601 case BUILT_IN_MEMPCPY_CHK:
602 dest = gimple_call_arg (call, 0);
603 source0 = gimple_call_arg (call, 1);
604 len = gimple_call_arg (call, 2);
605 break;
606
607 /* (dest, n) style memops. */
608 case BUILT_IN_BZERO:
609 dest = gimple_call_arg (call, 0);
610 len = gimple_call_arg (call, 1);
611 break;
612
613 /* (dest, x, n) style memops*/
614 case BUILT_IN_MEMSET:
615 case BUILT_IN_MEMSET_CHK:
616 dest = gimple_call_arg (call, 0);
617 len = gimple_call_arg (call, 2);
618 break;
619
620 case BUILT_IN_STRLEN:
621 source0 = gimple_call_arg (call, 0);
622 len = gimple_call_lhs (call);
623 break ;
624
625 /* And now the __atomic* and __sync builtins.
626 These are handled differently from the classical memory memory
627 access builtins above. */
628
629 case BUILT_IN_ATOMIC_LOAD_1:
630 case BUILT_IN_ATOMIC_LOAD_2:
631 case BUILT_IN_ATOMIC_LOAD_4:
632 case BUILT_IN_ATOMIC_LOAD_8:
633 case BUILT_IN_ATOMIC_LOAD_16:
634 is_store = false;
635 /* fall through. */
636
637 case BUILT_IN_SYNC_FETCH_AND_ADD_1:
638 case BUILT_IN_SYNC_FETCH_AND_ADD_2:
639 case BUILT_IN_SYNC_FETCH_AND_ADD_4:
640 case BUILT_IN_SYNC_FETCH_AND_ADD_8:
641 case BUILT_IN_SYNC_FETCH_AND_ADD_16:
642
643 case BUILT_IN_SYNC_FETCH_AND_SUB_1:
644 case BUILT_IN_SYNC_FETCH_AND_SUB_2:
645 case BUILT_IN_SYNC_FETCH_AND_SUB_4:
646 case BUILT_IN_SYNC_FETCH_AND_SUB_8:
647 case BUILT_IN_SYNC_FETCH_AND_SUB_16:
648
649 case BUILT_IN_SYNC_FETCH_AND_OR_1:
650 case BUILT_IN_SYNC_FETCH_AND_OR_2:
651 case BUILT_IN_SYNC_FETCH_AND_OR_4:
652 case BUILT_IN_SYNC_FETCH_AND_OR_8:
653 case BUILT_IN_SYNC_FETCH_AND_OR_16:
654
655 case BUILT_IN_SYNC_FETCH_AND_AND_1:
656 case BUILT_IN_SYNC_FETCH_AND_AND_2:
657 case BUILT_IN_SYNC_FETCH_AND_AND_4:
658 case BUILT_IN_SYNC_FETCH_AND_AND_8:
659 case BUILT_IN_SYNC_FETCH_AND_AND_16:
660
661 case BUILT_IN_SYNC_FETCH_AND_XOR_1:
662 case BUILT_IN_SYNC_FETCH_AND_XOR_2:
663 case BUILT_IN_SYNC_FETCH_AND_XOR_4:
664 case BUILT_IN_SYNC_FETCH_AND_XOR_8:
665 case BUILT_IN_SYNC_FETCH_AND_XOR_16:
666
667 case BUILT_IN_SYNC_FETCH_AND_NAND_1:
668 case BUILT_IN_SYNC_FETCH_AND_NAND_2:
669 case BUILT_IN_SYNC_FETCH_AND_NAND_4:
670 case BUILT_IN_SYNC_FETCH_AND_NAND_8:
671
672 case BUILT_IN_SYNC_ADD_AND_FETCH_1:
673 case BUILT_IN_SYNC_ADD_AND_FETCH_2:
674 case BUILT_IN_SYNC_ADD_AND_FETCH_4:
675 case BUILT_IN_SYNC_ADD_AND_FETCH_8:
676 case BUILT_IN_SYNC_ADD_AND_FETCH_16:
677
678 case BUILT_IN_SYNC_SUB_AND_FETCH_1:
679 case BUILT_IN_SYNC_SUB_AND_FETCH_2:
680 case BUILT_IN_SYNC_SUB_AND_FETCH_4:
681 case BUILT_IN_SYNC_SUB_AND_FETCH_8:
682 case BUILT_IN_SYNC_SUB_AND_FETCH_16:
683
684 case BUILT_IN_SYNC_OR_AND_FETCH_1:
685 case BUILT_IN_SYNC_OR_AND_FETCH_2:
686 case BUILT_IN_SYNC_OR_AND_FETCH_4:
687 case BUILT_IN_SYNC_OR_AND_FETCH_8:
688 case BUILT_IN_SYNC_OR_AND_FETCH_16:
689
690 case BUILT_IN_SYNC_AND_AND_FETCH_1:
691 case BUILT_IN_SYNC_AND_AND_FETCH_2:
692 case BUILT_IN_SYNC_AND_AND_FETCH_4:
693 case BUILT_IN_SYNC_AND_AND_FETCH_8:
694 case BUILT_IN_SYNC_AND_AND_FETCH_16:
695
696 case BUILT_IN_SYNC_XOR_AND_FETCH_1:
697 case BUILT_IN_SYNC_XOR_AND_FETCH_2:
698 case BUILT_IN_SYNC_XOR_AND_FETCH_4:
699 case BUILT_IN_SYNC_XOR_AND_FETCH_8:
700 case BUILT_IN_SYNC_XOR_AND_FETCH_16:
701
702 case BUILT_IN_SYNC_NAND_AND_FETCH_1:
703 case BUILT_IN_SYNC_NAND_AND_FETCH_2:
704 case BUILT_IN_SYNC_NAND_AND_FETCH_4:
705 case BUILT_IN_SYNC_NAND_AND_FETCH_8:
706
707 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1:
708 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_2:
709 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_4:
710 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_8:
711 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_16:
712
713 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1:
714 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_2:
715 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_4:
716 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_8:
717 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_16:
718
719 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_1:
720 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_2:
721 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_4:
722 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_8:
723 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_16:
724
725 case BUILT_IN_SYNC_LOCK_RELEASE_1:
726 case BUILT_IN_SYNC_LOCK_RELEASE_2:
727 case BUILT_IN_SYNC_LOCK_RELEASE_4:
728 case BUILT_IN_SYNC_LOCK_RELEASE_8:
729 case BUILT_IN_SYNC_LOCK_RELEASE_16:
730
731 case BUILT_IN_ATOMIC_EXCHANGE_1:
732 case BUILT_IN_ATOMIC_EXCHANGE_2:
733 case BUILT_IN_ATOMIC_EXCHANGE_4:
734 case BUILT_IN_ATOMIC_EXCHANGE_8:
735 case BUILT_IN_ATOMIC_EXCHANGE_16:
736
737 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1:
738 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_2:
739 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_4:
740 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_8:
741 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_16:
742
743 case BUILT_IN_ATOMIC_STORE_1:
744 case BUILT_IN_ATOMIC_STORE_2:
745 case BUILT_IN_ATOMIC_STORE_4:
746 case BUILT_IN_ATOMIC_STORE_8:
747 case BUILT_IN_ATOMIC_STORE_16:
748
749 case BUILT_IN_ATOMIC_ADD_FETCH_1:
750 case BUILT_IN_ATOMIC_ADD_FETCH_2:
751 case BUILT_IN_ATOMIC_ADD_FETCH_4:
752 case BUILT_IN_ATOMIC_ADD_FETCH_8:
753 case BUILT_IN_ATOMIC_ADD_FETCH_16:
754
755 case BUILT_IN_ATOMIC_SUB_FETCH_1:
756 case BUILT_IN_ATOMIC_SUB_FETCH_2:
757 case BUILT_IN_ATOMIC_SUB_FETCH_4:
758 case BUILT_IN_ATOMIC_SUB_FETCH_8:
759 case BUILT_IN_ATOMIC_SUB_FETCH_16:
760
761 case BUILT_IN_ATOMIC_AND_FETCH_1:
762 case BUILT_IN_ATOMIC_AND_FETCH_2:
763 case BUILT_IN_ATOMIC_AND_FETCH_4:
764 case BUILT_IN_ATOMIC_AND_FETCH_8:
765 case BUILT_IN_ATOMIC_AND_FETCH_16:
766
767 case BUILT_IN_ATOMIC_NAND_FETCH_1:
768 case BUILT_IN_ATOMIC_NAND_FETCH_2:
769 case BUILT_IN_ATOMIC_NAND_FETCH_4:
770 case BUILT_IN_ATOMIC_NAND_FETCH_8:
771 case BUILT_IN_ATOMIC_NAND_FETCH_16:
772
773 case BUILT_IN_ATOMIC_XOR_FETCH_1:
774 case BUILT_IN_ATOMIC_XOR_FETCH_2:
775 case BUILT_IN_ATOMIC_XOR_FETCH_4:
776 case BUILT_IN_ATOMIC_XOR_FETCH_8:
777 case BUILT_IN_ATOMIC_XOR_FETCH_16:
778
779 case BUILT_IN_ATOMIC_OR_FETCH_1:
780 case BUILT_IN_ATOMIC_OR_FETCH_2:
781 case BUILT_IN_ATOMIC_OR_FETCH_4:
782 case BUILT_IN_ATOMIC_OR_FETCH_8:
783 case BUILT_IN_ATOMIC_OR_FETCH_16:
784
785 case BUILT_IN_ATOMIC_FETCH_ADD_1:
786 case BUILT_IN_ATOMIC_FETCH_ADD_2:
787 case BUILT_IN_ATOMIC_FETCH_ADD_4:
788 case BUILT_IN_ATOMIC_FETCH_ADD_8:
789 case BUILT_IN_ATOMIC_FETCH_ADD_16:
790
791 case BUILT_IN_ATOMIC_FETCH_SUB_1:
792 case BUILT_IN_ATOMIC_FETCH_SUB_2:
793 case BUILT_IN_ATOMIC_FETCH_SUB_4:
794 case BUILT_IN_ATOMIC_FETCH_SUB_8:
795 case BUILT_IN_ATOMIC_FETCH_SUB_16:
796
797 case BUILT_IN_ATOMIC_FETCH_AND_1:
798 case BUILT_IN_ATOMIC_FETCH_AND_2:
799 case BUILT_IN_ATOMIC_FETCH_AND_4:
800 case BUILT_IN_ATOMIC_FETCH_AND_8:
801 case BUILT_IN_ATOMIC_FETCH_AND_16:
802
803 case BUILT_IN_ATOMIC_FETCH_NAND_1:
804 case BUILT_IN_ATOMIC_FETCH_NAND_2:
805 case BUILT_IN_ATOMIC_FETCH_NAND_4:
806 case BUILT_IN_ATOMIC_FETCH_NAND_8:
807 case BUILT_IN_ATOMIC_FETCH_NAND_16:
808
809 case BUILT_IN_ATOMIC_FETCH_XOR_1:
810 case BUILT_IN_ATOMIC_FETCH_XOR_2:
811 case BUILT_IN_ATOMIC_FETCH_XOR_4:
812 case BUILT_IN_ATOMIC_FETCH_XOR_8:
813 case BUILT_IN_ATOMIC_FETCH_XOR_16:
814
815 case BUILT_IN_ATOMIC_FETCH_OR_1:
816 case BUILT_IN_ATOMIC_FETCH_OR_2:
817 case BUILT_IN_ATOMIC_FETCH_OR_4:
818 case BUILT_IN_ATOMIC_FETCH_OR_8:
819 case BUILT_IN_ATOMIC_FETCH_OR_16:
820 {
821 dest = gimple_call_arg (call, 0);
822 /* DEST represents the address of a memory location.
823 instrument_derefs wants the memory location, so lets
824 dereference the address DEST before handing it to
825 instrument_derefs. */
826 if (TREE_CODE (dest) == ADDR_EXPR)
827 dest = TREE_OPERAND (dest, 0);
4102b43a 828 else if (TREE_CODE (dest) == SSA_NAME || TREE_CODE (dest) == INTEGER_CST)
c31c80df 829 dest = build2 (MEM_REF, TREE_TYPE (TREE_TYPE (dest)),
830 dest, build_int_cst (TREE_TYPE (dest), 0));
831 else
832 gcc_unreachable ();
833
834 access_size = int_size_in_bytes (TREE_TYPE (dest));
835 }
836
837 default:
838 /* The other builtins memory access are not instrumented in this
839 function because they either don't have any length parameter,
840 or their length parameter is just a limit. */
841 break;
842 }
843
844 if (len != NULL_TREE)
845 {
846 if (source0 != NULL_TREE)
847 {
848 src0->start = source0;
849 src0->access_size = access_size;
850 *src0_len = len;
851 *src0_is_store = false;
852 }
853
854 if (source1 != NULL_TREE)
855 {
856 src1->start = source1;
857 src1->access_size = access_size;
858 *src1_len = len;
859 *src1_is_store = false;
860 }
861
862 if (dest != NULL_TREE)
863 {
864 dst->start = dest;
865 dst->access_size = access_size;
866 *dst_len = len;
867 *dst_is_store = true;
868 }
869
870 got_reference_p = true;
871 }
d9dc05a1 872 else if (dest)
873 {
874 dst->start = dest;
875 dst->access_size = access_size;
876 *dst_len = NULL_TREE;
877 *dst_is_store = is_store;
878 *dest_is_deref = true;
879 got_reference_p = true;
880 }
c31c80df 881
d9dc05a1 882 return got_reference_p;
c31c80df 883}
884
885/* Return true iff a given gimple statement has been instrumented.
886 Note that the statement is "defined" by the memory references it
887 contains. */
888
889static bool
890has_stmt_been_instrumented_p (gimple stmt)
891{
892 if (gimple_assign_single_p (stmt))
893 {
894 bool r_is_store;
895 asan_mem_ref r;
896 asan_mem_ref_init (&r, NULL, 1);
897
1a91d914 898 if (get_mem_ref_of_assignment (as_a <gassign *> (stmt), &r,
899 &r_is_store))
c31c80df 900 return has_mem_ref_been_instrumented (&r);
901 }
902 else if (gimple_call_builtin_p (stmt, BUILT_IN_NORMAL))
903 {
904 asan_mem_ref src0, src1, dest;
905 asan_mem_ref_init (&src0, NULL, 1);
906 asan_mem_ref_init (&src1, NULL, 1);
907 asan_mem_ref_init (&dest, NULL, 1);
908
909 tree src0_len = NULL_TREE, src1_len = NULL_TREE, dest_len = NULL_TREE;
910 bool src0_is_store = false, src1_is_store = false,
f9acf11a 911 dest_is_store = false, dest_is_deref = false, intercepted_p = true;
1a91d914 912 if (get_mem_refs_of_builtin_call (as_a <gcall *> (stmt),
c31c80df 913 &src0, &src0_len, &src0_is_store,
914 &src1, &src1_len, &src1_is_store,
915 &dest, &dest_len, &dest_is_store,
f9acf11a 916 &dest_is_deref, &intercepted_p))
c31c80df 917 {
918 if (src0.start != NULL_TREE
919 && !has_mem_ref_been_instrumented (&src0, src0_len))
920 return false;
921
922 if (src1.start != NULL_TREE
923 && !has_mem_ref_been_instrumented (&src1, src1_len))
924 return false;
925
926 if (dest.start != NULL_TREE
927 && !has_mem_ref_been_instrumented (&dest, dest_len))
928 return false;
929
930 return true;
931 }
932 }
933 return false;
934}
935
936/* Insert a memory reference into the hash table. */
937
938static void
86d3a572 939update_mem_ref_hash_table (tree ref, HOST_WIDE_INT access_size)
c31c80df 940{
c1f445d2 941 hash_table<asan_mem_ref_hasher> *ht = get_mem_ref_hash_table ();
c31c80df 942
943 asan_mem_ref r;
944 asan_mem_ref_init (&r, ref, access_size);
945
c1f445d2 946 asan_mem_ref **slot = ht->find_slot (&r, INSERT);
f9acf11a 947 if (*slot == NULL || (*slot)->access_size < access_size)
c31c80df 948 *slot = asan_mem_ref_new (ref, access_size);
949}
950
55b58027 951/* Initialize shadow_ptr_types array. */
952
953static void
954asan_init_shadow_ptr_types (void)
955{
956 asan_shadow_set = new_alias_set ();
957 shadow_ptr_types[0] = build_distinct_type_copy (signed_char_type_node);
958 TYPE_ALIAS_SET (shadow_ptr_types[0]) = asan_shadow_set;
959 shadow_ptr_types[0] = build_pointer_type (shadow_ptr_types[0]);
960 shadow_ptr_types[1] = build_distinct_type_copy (short_integer_type_node);
961 TYPE_ALIAS_SET (shadow_ptr_types[1]) = asan_shadow_set;
962 shadow_ptr_types[1] = build_pointer_type (shadow_ptr_types[1]);
963 initialize_sanitizer_builtins ();
964}
965
b75d2c15 966/* Create ADDR_EXPR of STRING_CST with the PP pretty printer text. */
92fc5c48 967
968static tree
b75d2c15 969asan_pp_string (pretty_printer *pp)
92fc5c48 970{
b75d2c15 971 const char *buf = pp_formatted_text (pp);
92fc5c48 972 size_t len = strlen (buf);
973 tree ret = build_string (len + 1, buf);
974 TREE_TYPE (ret)
55b58027 975 = build_array_type (TREE_TYPE (shadow_ptr_types[0]),
976 build_index_type (size_int (len)));
92fc5c48 977 TREE_READONLY (ret) = 1;
978 TREE_STATIC (ret) = 1;
55b58027 979 return build1 (ADDR_EXPR, shadow_ptr_types[0], ret);
92fc5c48 980}
981
3c919612 982/* Return a CONST_INT representing 4 subsequent shadow memory bytes. */
983
984static rtx
985asan_shadow_cst (unsigned char shadow_bytes[4])
986{
987 int i;
988 unsigned HOST_WIDE_INT val = 0;
989 gcc_assert (WORDS_BIG_ENDIAN == BYTES_BIG_ENDIAN);
990 for (i = 0; i < 4; i++)
991 val |= (unsigned HOST_WIDE_INT) shadow_bytes[BYTES_BIG_ENDIAN ? 3 - i : i]
992 << (BITS_PER_UNIT * i);
bc89f4e2 993 return gen_int_mode (val, SImode);
3c919612 994}
995
cc72d6e9 996/* Clear shadow memory at SHADOW_MEM, LEN bytes. Can't call a library call here
997 though. */
998
999static void
1000asan_clear_shadow (rtx shadow_mem, HOST_WIDE_INT len)
1001{
dbf944e5 1002 rtx_insn *insn, *insns, *jump;
1003 rtx_code_label *top_label;
1004 rtx end, addr, tmp;
cc72d6e9 1005
1006 start_sequence ();
1007 clear_storage (shadow_mem, GEN_INT (len), BLOCK_OP_NORMAL);
1008 insns = get_insns ();
1009 end_sequence ();
1010 for (insn = insns; insn; insn = NEXT_INSN (insn))
1011 if (CALL_P (insn))
1012 break;
1013 if (insn == NULL_RTX)
1014 {
1015 emit_insn (insns);
1016 return;
1017 }
1018
1019 gcc_assert ((len & 3) == 0);
1020 top_label = gen_label_rtx ();
a15fa55a 1021 addr = copy_to_mode_reg (Pmode, XEXP (shadow_mem, 0));
cc72d6e9 1022 shadow_mem = adjust_automodify_address (shadow_mem, SImode, addr, 0);
1023 end = force_reg (Pmode, plus_constant (Pmode, addr, len));
1024 emit_label (top_label);
1025
1026 emit_move_insn (shadow_mem, const0_rtx);
0359f9f5 1027 tmp = expand_simple_binop (Pmode, PLUS, addr, gen_int_mode (4, Pmode), addr,
ff326078 1028 true, OPTAB_LIB_WIDEN);
cc72d6e9 1029 if (tmp != addr)
1030 emit_move_insn (addr, tmp);
1031 emit_cmp_and_jump_insns (addr, end, LT, NULL_RTX, Pmode, true, top_label);
1032 jump = get_last_insn ();
1033 gcc_assert (JUMP_P (jump));
9eb946de 1034 add_int_reg_note (jump, REG_BR_PROB, REG_BR_PROB_BASE * 80 / 100);
cc72d6e9 1035}
1036
1e80ce41 1037void
1038asan_function_start (void)
1039{
1040 section *fnsec = function_section (current_function_decl);
1041 switch_to_section (fnsec);
1042 ASM_OUTPUT_DEBUG_LABEL (asm_out_file, "LASANPC",
ff326078 1043 current_function_funcdef_no);
1e80ce41 1044}
1045
3c919612 1046/* Insert code to protect stack vars. The prologue sequence should be emitted
1047 directly, epilogue sequence returned. BASE is the register holding the
1048 stack base, against which OFFSETS array offsets are relative to, OFFSETS
1049 array contains pairs of offsets in reverse order, always the end offset
1050 of some gap that needs protection followed by starting offset,
1051 and DECLS is an array of representative decls for each var partition.
1052 LENGTH is the length of the OFFSETS array, DECLS array is LENGTH / 2 - 1
1053 elements long (OFFSETS include gap before the first variable as well
683539f6 1054 as gaps after each stack variable). PBASE is, if non-NULL, some pseudo
1055 register which stack vars DECL_RTLs are based on. Either BASE should be
1056 assigned to PBASE, when not doing use after return protection, or
1057 corresponding address based on __asan_stack_malloc* return value. */
3c919612 1058
67ab16d9 1059rtx_insn *
683539f6 1060asan_emit_stack_protection (rtx base, rtx pbase, unsigned int alignb,
1061 HOST_WIDE_INT *offsets, tree *decls, int length)
3c919612 1062{
79f6a8ed 1063 rtx shadow_base, shadow_mem, ret, mem, orig_base;
1064 rtx_code_label *lab;
67ab16d9 1065 rtx_insn *insns;
1e80ce41 1066 char buf[30];
3c919612 1067 unsigned char shadow_bytes[4];
683539f6 1068 HOST_WIDE_INT base_offset = offsets[length - 1];
1069 HOST_WIDE_INT base_align_bias = 0, offset, prev_offset;
1070 HOST_WIDE_INT asan_frame_size = offsets[0] - base_offset;
3c919612 1071 HOST_WIDE_INT last_offset, last_size;
1072 int l;
1073 unsigned char cur_shadow_byte = ASAN_STACK_MAGIC_LEFT;
1e80ce41 1074 tree str_cst, decl, id;
683539f6 1075 int use_after_return_class = -1;
3c919612 1076
55b58027 1077 if (shadow_ptr_types[0] == NULL_TREE)
1078 asan_init_shadow_ptr_types ();
1079
3c919612 1080 /* First of all, prepare the description string. */
b75d2c15 1081 pretty_printer asan_pp;
eed6bc21 1082
92fc5c48 1083 pp_decimal_int (&asan_pp, length / 2 - 1);
1084 pp_space (&asan_pp);
3c919612 1085 for (l = length - 2; l; l -= 2)
1086 {
1087 tree decl = decls[l / 2 - 1];
92fc5c48 1088 pp_wide_integer (&asan_pp, offsets[l] - base_offset);
1089 pp_space (&asan_pp);
1090 pp_wide_integer (&asan_pp, offsets[l - 1] - offsets[l]);
1091 pp_space (&asan_pp);
3c919612 1092 if (DECL_P (decl) && DECL_NAME (decl))
1093 {
92fc5c48 1094 pp_decimal_int (&asan_pp, IDENTIFIER_LENGTH (DECL_NAME (decl)));
1095 pp_space (&asan_pp);
a94db6b0 1096 pp_tree_identifier (&asan_pp, DECL_NAME (decl));
3c919612 1097 }
1098 else
92fc5c48 1099 pp_string (&asan_pp, "9 <unknown>");
1100 pp_space (&asan_pp);
3c919612 1101 }
b75d2c15 1102 str_cst = asan_pp_string (&asan_pp);
3c919612 1103
1104 /* Emit the prologue sequence. */
bf2b7c22 1105 if (asan_frame_size > 32 && asan_frame_size <= 65536 && pbase
1106 && ASAN_USE_AFTER_RETURN)
683539f6 1107 {
1108 use_after_return_class = floor_log2 (asan_frame_size - 1) - 5;
1109 /* __asan_stack_malloc_N guarantees alignment
ff326078 1110 N < 6 ? (64 << N) : 4096 bytes. */
683539f6 1111 if (alignb > (use_after_return_class < 6
1112 ? (64U << use_after_return_class) : 4096U))
1113 use_after_return_class = -1;
1114 else if (alignb > ASAN_RED_ZONE_SIZE && (asan_frame_size & (alignb - 1)))
1115 base_align_bias = ((asan_frame_size + alignb - 1)
1116 & ~(alignb - HOST_WIDE_INT_1)) - asan_frame_size;
1117 }
f89175bb 1118 /* Align base if target is STRICT_ALIGNMENT. */
1119 if (STRICT_ALIGNMENT)
1120 base = expand_binop (Pmode, and_optab, base,
1121 gen_int_mode (-((GET_MODE_ALIGNMENT (SImode)
1122 << ASAN_SHADOW_SHIFT)
1123 / BITS_PER_UNIT), Pmode), NULL_RTX,
1124 1, OPTAB_DIRECT);
1125
683539f6 1126 if (use_after_return_class == -1 && pbase)
1127 emit_move_insn (pbase, base);
f89175bb 1128
0359f9f5 1129 base = expand_binop (Pmode, add_optab, base,
683539f6 1130 gen_int_mode (base_offset - base_align_bias, Pmode),
3c919612 1131 NULL_RTX, 1, OPTAB_DIRECT);
683539f6 1132 orig_base = NULL_RTX;
1133 if (use_after_return_class != -1)
1134 {
1135 if (asan_detect_stack_use_after_return == NULL_TREE)
1136 {
1137 id = get_identifier ("__asan_option_detect_stack_use_after_return");
1138 decl = build_decl (BUILTINS_LOCATION, VAR_DECL, id,
1139 integer_type_node);
1140 SET_DECL_ASSEMBLER_NAME (decl, id);
1141 TREE_ADDRESSABLE (decl) = 1;
1142 DECL_ARTIFICIAL (decl) = 1;
1143 DECL_IGNORED_P (decl) = 1;
1144 DECL_EXTERNAL (decl) = 1;
1145 TREE_STATIC (decl) = 1;
1146 TREE_PUBLIC (decl) = 1;
1147 TREE_USED (decl) = 1;
1148 asan_detect_stack_use_after_return = decl;
1149 }
1150 orig_base = gen_reg_rtx (Pmode);
1151 emit_move_insn (orig_base, base);
1152 ret = expand_normal (asan_detect_stack_use_after_return);
1153 lab = gen_label_rtx ();
1154 int very_likely = REG_BR_PROB_BASE - (REG_BR_PROB_BASE / 2000 - 1);
1155 emit_cmp_and_jump_insns (ret, const0_rtx, EQ, NULL_RTX,
1156 VOIDmode, 0, lab, very_likely);
1157 snprintf (buf, sizeof buf, "__asan_stack_malloc_%d",
1158 use_after_return_class);
1159 ret = init_one_libfunc (buf);
1160 rtx addr = convert_memory_address (ptr_mode, base);
1161 ret = emit_library_call_value (ret, NULL_RTX, LCT_NORMAL, ptr_mode, 2,
1162 GEN_INT (asan_frame_size
1163 + base_align_bias),
1164 TYPE_MODE (pointer_sized_int_node),
1165 addr, ptr_mode);
1166 ret = convert_memory_address (Pmode, ret);
1167 emit_move_insn (base, ret);
1168 emit_label (lab);
1169 emit_move_insn (pbase, expand_binop (Pmode, add_optab, base,
1170 gen_int_mode (base_align_bias
1171 - base_offset, Pmode),
1172 NULL_RTX, 1, OPTAB_DIRECT));
1173 }
3c919612 1174 mem = gen_rtx_MEM (ptr_mode, base);
683539f6 1175 mem = adjust_address (mem, VOIDmode, base_align_bias);
d11aedc7 1176 emit_move_insn (mem, gen_int_mode (ASAN_STACK_FRAME_MAGIC, ptr_mode));
3c919612 1177 mem = adjust_address (mem, VOIDmode, GET_MODE_SIZE (ptr_mode));
1178 emit_move_insn (mem, expand_normal (str_cst));
1e80ce41 1179 mem = adjust_address (mem, VOIDmode, GET_MODE_SIZE (ptr_mode));
1180 ASM_GENERATE_INTERNAL_LABEL (buf, "LASANPC", current_function_funcdef_no);
1181 id = get_identifier (buf);
1182 decl = build_decl (DECL_SOURCE_LOCATION (current_function_decl),
ff326078 1183 VAR_DECL, id, char_type_node);
1e80ce41 1184 SET_DECL_ASSEMBLER_NAME (decl, id);
1185 TREE_ADDRESSABLE (decl) = 1;
1186 TREE_READONLY (decl) = 1;
1187 DECL_ARTIFICIAL (decl) = 1;
1188 DECL_IGNORED_P (decl) = 1;
1189 TREE_STATIC (decl) = 1;
1190 TREE_PUBLIC (decl) = 0;
1191 TREE_USED (decl) = 1;
7c4fae98 1192 DECL_INITIAL (decl) = decl;
1193 TREE_ASM_WRITTEN (decl) = 1;
1194 TREE_ASM_WRITTEN (id) = 1;
1e80ce41 1195 emit_move_insn (mem, expand_normal (build_fold_addr_expr (decl)));
3c919612 1196 shadow_base = expand_binop (Pmode, lshr_optab, base,
1197 GEN_INT (ASAN_SHADOW_SHIFT),
1198 NULL_RTX, 1, OPTAB_DIRECT);
683539f6 1199 shadow_base
1200 = plus_constant (Pmode, shadow_base,
cf357977 1201 asan_shadow_offset ()
683539f6 1202 + (base_align_bias >> ASAN_SHADOW_SHIFT));
3c919612 1203 gcc_assert (asan_shadow_set != -1
1204 && (ASAN_RED_ZONE_SIZE >> ASAN_SHADOW_SHIFT) == 4);
1205 shadow_mem = gen_rtx_MEM (SImode, shadow_base);
1206 set_mem_alias_set (shadow_mem, asan_shadow_set);
f89175bb 1207 if (STRICT_ALIGNMENT)
1208 set_mem_align (shadow_mem, (GET_MODE_ALIGNMENT (SImode)));
3c919612 1209 prev_offset = base_offset;
1210 for (l = length; l; l -= 2)
1211 {
1212 if (l == 2)
1213 cur_shadow_byte = ASAN_STACK_MAGIC_RIGHT;
1214 offset = offsets[l - 1];
1215 if ((offset - base_offset) & (ASAN_RED_ZONE_SIZE - 1))
1216 {
1217 int i;
1218 HOST_WIDE_INT aoff
1219 = base_offset + ((offset - base_offset)
1220 & ~(ASAN_RED_ZONE_SIZE - HOST_WIDE_INT_1));
1221 shadow_mem = adjust_address (shadow_mem, VOIDmode,
1222 (aoff - prev_offset)
1223 >> ASAN_SHADOW_SHIFT);
1224 prev_offset = aoff;
1225 for (i = 0; i < 4; i++, aoff += (1 << ASAN_SHADOW_SHIFT))
1226 if (aoff < offset)
1227 {
1228 if (aoff < offset - (1 << ASAN_SHADOW_SHIFT) + 1)
1229 shadow_bytes[i] = 0;
1230 else
1231 shadow_bytes[i] = offset - aoff;
1232 }
1233 else
1234 shadow_bytes[i] = ASAN_STACK_MAGIC_PARTIAL;
1235 emit_move_insn (shadow_mem, asan_shadow_cst (shadow_bytes));
1236 offset = aoff;
1237 }
1238 while (offset <= offsets[l - 2] - ASAN_RED_ZONE_SIZE)
1239 {
1240 shadow_mem = adjust_address (shadow_mem, VOIDmode,
1241 (offset - prev_offset)
1242 >> ASAN_SHADOW_SHIFT);
1243 prev_offset = offset;
1244 memset (shadow_bytes, cur_shadow_byte, 4);
1245 emit_move_insn (shadow_mem, asan_shadow_cst (shadow_bytes));
1246 offset += ASAN_RED_ZONE_SIZE;
1247 }
1248 cur_shadow_byte = ASAN_STACK_MAGIC_MIDDLE;
1249 }
1250 do_pending_stack_adjust ();
1251
1252 /* Construct epilogue sequence. */
1253 start_sequence ();
1254
79f6a8ed 1255 lab = NULL;
683539f6 1256 if (use_after_return_class != -1)
1257 {
79f6a8ed 1258 rtx_code_label *lab2 = gen_label_rtx ();
683539f6 1259 char c = (char) ASAN_STACK_MAGIC_USE_AFTER_RET;
1260 int very_likely = REG_BR_PROB_BASE - (REG_BR_PROB_BASE / 2000 - 1);
1261 emit_cmp_and_jump_insns (orig_base, base, EQ, NULL_RTX,
1262 VOIDmode, 0, lab2, very_likely);
1263 shadow_mem = gen_rtx_MEM (BLKmode, shadow_base);
1264 set_mem_alias_set (shadow_mem, asan_shadow_set);
1265 mem = gen_rtx_MEM (ptr_mode, base);
1266 mem = adjust_address (mem, VOIDmode, base_align_bias);
1267 emit_move_insn (mem, gen_int_mode (ASAN_STACK_RETIRED_MAGIC, ptr_mode));
1268 unsigned HOST_WIDE_INT sz = asan_frame_size >> ASAN_SHADOW_SHIFT;
1269 if (use_after_return_class < 5
1270 && can_store_by_pieces (sz, builtin_memset_read_str, &c,
1271 BITS_PER_UNIT, true))
1272 store_by_pieces (shadow_mem, sz, builtin_memset_read_str, &c,
1273 BITS_PER_UNIT, true, 0);
1274 else if (use_after_return_class >= 5
1275 || !set_storage_via_setmem (shadow_mem,
1276 GEN_INT (sz),
1277 gen_int_mode (c, QImode),
1278 BITS_PER_UNIT, BITS_PER_UNIT,
1279 -1, sz, sz, sz))
1280 {
1281 snprintf (buf, sizeof buf, "__asan_stack_free_%d",
1282 use_after_return_class);
1283 ret = init_one_libfunc (buf);
1284 rtx addr = convert_memory_address (ptr_mode, base);
1285 rtx orig_addr = convert_memory_address (ptr_mode, orig_base);
1286 emit_library_call (ret, LCT_NORMAL, ptr_mode, 3, addr, ptr_mode,
1287 GEN_INT (asan_frame_size + base_align_bias),
1288 TYPE_MODE (pointer_sized_int_node),
1289 orig_addr, ptr_mode);
1290 }
1291 lab = gen_label_rtx ();
1292 emit_jump (lab);
1293 emit_label (lab2);
1294 }
1295
3c919612 1296 shadow_mem = gen_rtx_MEM (BLKmode, shadow_base);
1297 set_mem_alias_set (shadow_mem, asan_shadow_set);
f89175bb 1298
1299 if (STRICT_ALIGNMENT)
1300 set_mem_align (shadow_mem, (GET_MODE_ALIGNMENT (SImode)));
1301
3c919612 1302 prev_offset = base_offset;
1303 last_offset = base_offset;
1304 last_size = 0;
1305 for (l = length; l; l -= 2)
1306 {
1307 offset = base_offset + ((offsets[l - 1] - base_offset)
1308 & ~(ASAN_RED_ZONE_SIZE - HOST_WIDE_INT_1));
1309 if (last_offset + last_size != offset)
1310 {
1311 shadow_mem = adjust_address (shadow_mem, VOIDmode,
1312 (last_offset - prev_offset)
1313 >> ASAN_SHADOW_SHIFT);
1314 prev_offset = last_offset;
cc72d6e9 1315 asan_clear_shadow (shadow_mem, last_size >> ASAN_SHADOW_SHIFT);
3c919612 1316 last_offset = offset;
1317 last_size = 0;
1318 }
1319 last_size += base_offset + ((offsets[l - 2] - base_offset)
1320 & ~(ASAN_RED_ZONE_SIZE - HOST_WIDE_INT_1))
1321 - offset;
1322 }
1323 if (last_size)
1324 {
1325 shadow_mem = adjust_address (shadow_mem, VOIDmode,
1326 (last_offset - prev_offset)
1327 >> ASAN_SHADOW_SHIFT);
cc72d6e9 1328 asan_clear_shadow (shadow_mem, last_size >> ASAN_SHADOW_SHIFT);
3c919612 1329 }
1330
1331 do_pending_stack_adjust ();
683539f6 1332 if (lab)
1333 emit_label (lab);
3c919612 1334
67ab16d9 1335 insns = get_insns ();
3c919612 1336 end_sequence ();
67ab16d9 1337 return insns;
3c919612 1338}
1339
92fc5c48 1340/* Return true if DECL, a global var, might be overridden and needs
1341 therefore a local alias. */
1342
1343static bool
1344asan_needs_local_alias (tree decl)
1345{
1346 return DECL_WEAK (decl) || !targetm.binds_local_p (decl);
1347}
1348
1349/* Return true if DECL is a VAR_DECL that should be protected
1350 by Address Sanitizer, by appending a red zone with protected
1351 shadow memory after it and aligning it to at least
1352 ASAN_RED_ZONE_SIZE bytes. */
1353
1354bool
1355asan_protect_global (tree decl)
1356{
bf2b7c22 1357 if (!ASAN_GLOBALS)
1358 return false;
1359
92fc5c48 1360 rtx rtl, symbol;
92fc5c48 1361
55b58027 1362 if (TREE_CODE (decl) == STRING_CST)
1363 {
1364 /* Instrument all STRING_CSTs except those created
1365 by asan_pp_string here. */
1366 if (shadow_ptr_types[0] != NULL_TREE
1367 && TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE
1368 && TREE_TYPE (TREE_TYPE (decl)) == TREE_TYPE (shadow_ptr_types[0]))
1369 return false;
1370 return true;
1371 }
92fc5c48 1372 if (TREE_CODE (decl) != VAR_DECL
1373 /* TLS vars aren't statically protectable. */
1374 || DECL_THREAD_LOCAL_P (decl)
1375 /* Externs will be protected elsewhere. */
1376 || DECL_EXTERNAL (decl)
92fc5c48 1377 || !DECL_RTL_SET_P (decl)
1378 /* Comdat vars pose an ABI problem, we can't know if
1379 the var that is selected by the linker will have
1380 padding or not. */
1381 || DECL_ONE_ONLY (decl)
aa1dfc5f 1382 /* Similarly for common vars. People can use -fno-common.
1383 Note: Linux kernel is built with -fno-common, so we do instrument
1384 globals there even if it is C. */
1ba1559d 1385 || (DECL_COMMON (decl) && TREE_PUBLIC (decl))
92fc5c48 1386 /* Don't protect if using user section, often vars placed
1387 into user section from multiple TUs are then assumed
1388 to be an array of such vars, putting padding in there
1389 breaks this assumption. */
738a6bda 1390 || (DECL_SECTION_NAME (decl) != NULL
4d3c996b 1391 && !symtab_node::get (decl)->implicit_section
1392 && !section_sanitized_p (DECL_SECTION_NAME (decl)))
92fc5c48 1393 || DECL_SIZE (decl) == 0
1394 || ASAN_RED_ZONE_SIZE * BITS_PER_UNIT > MAX_OFILE_ALIGNMENT
1395 || !valid_constant_size_p (DECL_SIZE_UNIT (decl))
11d00a0b 1396 || DECL_ALIGN_UNIT (decl) > 2 * ASAN_RED_ZONE_SIZE
1397 || TREE_TYPE (decl) == ubsan_get_source_location_type ())
92fc5c48 1398 return false;
1399
1400 rtl = DECL_RTL (decl);
1401 if (!MEM_P (rtl) || GET_CODE (XEXP (rtl, 0)) != SYMBOL_REF)
1402 return false;
1403 symbol = XEXP (rtl, 0);
1404
1405 if (CONSTANT_POOL_ADDRESS_P (symbol)
1406 || TREE_CONSTANT_POOL_ADDRESS_P (symbol))
1407 return false;
1408
92fc5c48 1409 if (lookup_attribute ("weakref", DECL_ATTRIBUTES (decl)))
1410 return false;
1411
1412#ifndef ASM_OUTPUT_DEF
1413 if (asan_needs_local_alias (decl))
1414 return false;
1415#endif
1416
eca932e6 1417 return true;
92fc5c48 1418}
1419
86d3a572 1420/* Construct a function tree for __asan_report_{load,store}{1,2,4,8,16,_n}.
1421 IS_STORE is either 1 (for a store) or 0 (for a load). */
b92cccf4 1422
1423static tree
f4d482a6 1424report_error_func (bool is_store, bool recover_p, HOST_WIDE_INT size_in_bytes,
1425 int *nargs)
b92cccf4 1426{
f4d482a6 1427 static enum built_in_function report[2][2][6]
1428 = { { { BUILT_IN_ASAN_REPORT_LOAD1, BUILT_IN_ASAN_REPORT_LOAD2,
1429 BUILT_IN_ASAN_REPORT_LOAD4, BUILT_IN_ASAN_REPORT_LOAD8,
1430 BUILT_IN_ASAN_REPORT_LOAD16, BUILT_IN_ASAN_REPORT_LOAD_N },
1431 { BUILT_IN_ASAN_REPORT_STORE1, BUILT_IN_ASAN_REPORT_STORE2,
1432 BUILT_IN_ASAN_REPORT_STORE4, BUILT_IN_ASAN_REPORT_STORE8,
1433 BUILT_IN_ASAN_REPORT_STORE16, BUILT_IN_ASAN_REPORT_STORE_N } },
1434 { { BUILT_IN_ASAN_REPORT_LOAD1_NOABORT,
1435 BUILT_IN_ASAN_REPORT_LOAD2_NOABORT,
1436 BUILT_IN_ASAN_REPORT_LOAD4_NOABORT,
1437 BUILT_IN_ASAN_REPORT_LOAD8_NOABORT,
1438 BUILT_IN_ASAN_REPORT_LOAD16_NOABORT,
1439 BUILT_IN_ASAN_REPORT_LOAD_N_NOABORT },
1440 { BUILT_IN_ASAN_REPORT_STORE1_NOABORT,
1441 BUILT_IN_ASAN_REPORT_STORE2_NOABORT,
1442 BUILT_IN_ASAN_REPORT_STORE4_NOABORT,
1443 BUILT_IN_ASAN_REPORT_STORE8_NOABORT,
1444 BUILT_IN_ASAN_REPORT_STORE16_NOABORT,
1445 BUILT_IN_ASAN_REPORT_STORE_N_NOABORT } } };
4f86f720 1446 if (size_in_bytes == -1)
1447 {
1448 *nargs = 2;
f4d482a6 1449 return builtin_decl_implicit (report[recover_p][is_store][5]);
4f86f720 1450 }
1451 *nargs = 1;
f4d482a6 1452 int size_log2 = exact_log2 (size_in_bytes);
1453 return builtin_decl_implicit (report[recover_p][is_store][size_log2]);
b92cccf4 1454}
1455
4f86f720 1456/* Construct a function tree for __asan_{load,store}{1,2,4,8,16,_n}.
1457 IS_STORE is either 1 (for a store) or 0 (for a load). */
1458
1459static tree
f4d482a6 1460check_func (bool is_store, bool recover_p, HOST_WIDE_INT size_in_bytes,
1461 int *nargs)
4f86f720 1462{
f4d482a6 1463 static enum built_in_function check[2][2][6]
1464 = { { { BUILT_IN_ASAN_LOAD1, BUILT_IN_ASAN_LOAD2,
1465 BUILT_IN_ASAN_LOAD4, BUILT_IN_ASAN_LOAD8,
1466 BUILT_IN_ASAN_LOAD16, BUILT_IN_ASAN_LOADN },
1467 { BUILT_IN_ASAN_STORE1, BUILT_IN_ASAN_STORE2,
1468 BUILT_IN_ASAN_STORE4, BUILT_IN_ASAN_STORE8,
1469 BUILT_IN_ASAN_STORE16, BUILT_IN_ASAN_STOREN } },
1470 { { BUILT_IN_ASAN_LOAD1_NOABORT,
1471 BUILT_IN_ASAN_LOAD2_NOABORT,
1472 BUILT_IN_ASAN_LOAD4_NOABORT,
1473 BUILT_IN_ASAN_LOAD8_NOABORT,
1474 BUILT_IN_ASAN_LOAD16_NOABORT,
1475 BUILT_IN_ASAN_LOADN_NOABORT },
1476 { BUILT_IN_ASAN_STORE1_NOABORT,
1477 BUILT_IN_ASAN_STORE2_NOABORT,
1478 BUILT_IN_ASAN_STORE4_NOABORT,
1479 BUILT_IN_ASAN_STORE8_NOABORT,
1480 BUILT_IN_ASAN_STORE16_NOABORT,
1481 BUILT_IN_ASAN_STOREN_NOABORT } } };
4f86f720 1482 if (size_in_bytes == -1)
1483 {
1484 *nargs = 2;
f4d482a6 1485 return builtin_decl_implicit (check[recover_p][is_store][5]);
4f86f720 1486 }
1487 *nargs = 1;
f4d482a6 1488 int size_log2 = exact_log2 (size_in_bytes);
1489 return builtin_decl_implicit (check[recover_p][is_store][size_log2]);
4f86f720 1490}
1491
aab92688 1492/* Split the current basic block and create a condition statement
1ac3509e 1493 insertion point right before or after the statement pointed to by
1494 ITER. Return an iterator to the point at which the caller might
1495 safely insert the condition statement.
aab92688 1496
1497 THEN_BLOCK must be set to the address of an uninitialized instance
1498 of basic_block. The function will then set *THEN_BLOCK to the
1499 'then block' of the condition statement to be inserted by the
1500 caller.
1501
e8d4d8a9 1502 If CREATE_THEN_FALLTHRU_EDGE is false, no edge will be created from
1503 *THEN_BLOCK to *FALLTHROUGH_BLOCK.
1504
aab92688 1505 Similarly, the function will set *FALLTRHOUGH_BLOCK to the 'else
1506 block' of the condition statement to be inserted by the caller.
1507
1508 Note that *FALLTHROUGH_BLOCK is a new block that contains the
1509 statements starting from *ITER, and *THEN_BLOCK is a new empty
1510 block.
1511
1ac3509e 1512 *ITER is adjusted to point to always point to the first statement
1513 of the basic block * FALLTHROUGH_BLOCK. That statement is the
1514 same as what ITER was pointing to prior to calling this function,
1515 if BEFORE_P is true; otherwise, it is its following statement. */
aab92688 1516
ec08f7b0 1517gimple_stmt_iterator
1ac3509e 1518create_cond_insert_point (gimple_stmt_iterator *iter,
1519 bool before_p,
1520 bool then_more_likely_p,
e8d4d8a9 1521 bool create_then_fallthru_edge,
1ac3509e 1522 basic_block *then_block,
1523 basic_block *fallthrough_block)
aab92688 1524{
1525 gimple_stmt_iterator gsi = *iter;
1526
1ac3509e 1527 if (!gsi_end_p (gsi) && before_p)
aab92688 1528 gsi_prev (&gsi);
1529
1530 basic_block cur_bb = gsi_bb (*iter);
1531
1532 edge e = split_block (cur_bb, gsi_stmt (gsi));
1533
1534 /* Get a hold on the 'condition block', the 'then block' and the
1535 'else block'. */
1536 basic_block cond_bb = e->src;
1537 basic_block fallthru_bb = e->dest;
1538 basic_block then_bb = create_empty_bb (cond_bb);
f6568ea4 1539 if (current_loops)
1540 {
1541 add_bb_to_loop (then_bb, cond_bb->loop_father);
1542 loops_state_set (LOOPS_NEED_FIXUP);
1543 }
aab92688 1544
1545 /* Set up the newly created 'then block'. */
1546 e = make_edge (cond_bb, then_bb, EDGE_TRUE_VALUE);
1547 int fallthrough_probability
1548 = then_more_likely_p
1549 ? PROB_VERY_UNLIKELY
1550 : PROB_ALWAYS - PROB_VERY_UNLIKELY;
1551 e->probability = PROB_ALWAYS - fallthrough_probability;
e8d4d8a9 1552 if (create_then_fallthru_edge)
1553 make_single_succ_edge (then_bb, fallthru_bb, EDGE_FALLTHRU);
aab92688 1554
1555 /* Set up the fallthrough basic block. */
1556 e = find_edge (cond_bb, fallthru_bb);
1557 e->flags = EDGE_FALSE_VALUE;
1558 e->count = cond_bb->count;
1559 e->probability = fallthrough_probability;
1560
1561 /* Update dominance info for the newly created then_bb; note that
1562 fallthru_bb's dominance info has already been updated by
1563 split_bock. */
1564 if (dom_info_available_p (CDI_DOMINATORS))
1565 set_immediate_dominator (CDI_DOMINATORS, then_bb, cond_bb);
1566
1567 *then_block = then_bb;
1568 *fallthrough_block = fallthru_bb;
1569 *iter = gsi_start_bb (fallthru_bb);
1570
1571 return gsi_last_bb (cond_bb);
1572}
1573
1ac3509e 1574/* Insert an if condition followed by a 'then block' right before the
1575 statement pointed to by ITER. The fallthrough block -- which is the
1576 else block of the condition as well as the destination of the
1577 outcoming edge of the 'then block' -- starts with the statement
1578 pointed to by ITER.
1579
eca932e6 1580 COND is the condition of the if.
1ac3509e 1581
1582 If THEN_MORE_LIKELY_P is true, the probability of the edge to the
1583 'then block' is higher than the probability of the edge to the
1584 fallthrough block.
1585
1586 Upon completion of the function, *THEN_BB is set to the newly
1587 inserted 'then block' and similarly, *FALLTHROUGH_BB is set to the
1588 fallthrough block.
1589
1590 *ITER is adjusted to still point to the same statement it was
1591 pointing to initially. */
1592
1593static void
1a91d914 1594insert_if_then_before_iter (gcond *cond,
1ac3509e 1595 gimple_stmt_iterator *iter,
1596 bool then_more_likely_p,
1597 basic_block *then_bb,
1598 basic_block *fallthrough_bb)
1599{
1600 gimple_stmt_iterator cond_insert_point =
1601 create_cond_insert_point (iter,
1602 /*before_p=*/true,
1603 then_more_likely_p,
e8d4d8a9 1604 /*create_then_fallthru_edge=*/true,
1ac3509e 1605 then_bb,
1606 fallthrough_bb);
1607 gsi_insert_after (&cond_insert_point, cond, GSI_NEW_STMT);
1608}
1609
86d3a572 1610/* Build
cf357977 1611 (base_addr >> ASAN_SHADOW_SHIFT) + asan_shadow_offset (). */
86d3a572 1612
1613static tree
1614build_shadow_mem_access (gimple_stmt_iterator *gsi, location_t location,
1615 tree base_addr, tree shadow_ptr_type)
1616{
1617 tree t, uintptr_type = TREE_TYPE (base_addr);
1618 tree shadow_type = TREE_TYPE (shadow_ptr_type);
1619 gimple g;
1620
1621 t = build_int_cst (uintptr_type, ASAN_SHADOW_SHIFT);
e9cf809e 1622 g = gimple_build_assign (make_ssa_name (uintptr_type), RSHIFT_EXPR,
1623 base_addr, t);
86d3a572 1624 gimple_set_location (g, location);
1625 gsi_insert_after (gsi, g, GSI_NEW_STMT);
1626
cf357977 1627 t = build_int_cst (uintptr_type, asan_shadow_offset ());
e9cf809e 1628 g = gimple_build_assign (make_ssa_name (uintptr_type), PLUS_EXPR,
1629 gimple_assign_lhs (g), t);
86d3a572 1630 gimple_set_location (g, location);
1631 gsi_insert_after (gsi, g, GSI_NEW_STMT);
1632
e9cf809e 1633 g = gimple_build_assign (make_ssa_name (shadow_ptr_type), NOP_EXPR,
1634 gimple_assign_lhs (g));
86d3a572 1635 gimple_set_location (g, location);
1636 gsi_insert_after (gsi, g, GSI_NEW_STMT);
1637
1638 t = build2 (MEM_REF, shadow_type, gimple_assign_lhs (g),
1639 build_int_cst (shadow_ptr_type, 0));
e9cf809e 1640 g = gimple_build_assign (make_ssa_name (shadow_type), MEM_REF, t);
86d3a572 1641 gimple_set_location (g, location);
1642 gsi_insert_after (gsi, g, GSI_NEW_STMT);
1643 return gimple_assign_lhs (g);
1644}
1645
4f86f720 1646/* BASE can already be an SSA_NAME; in that case, do not create a
1647 new SSA_NAME for it. */
1648
1649static tree
1650maybe_create_ssa_name (location_t loc, tree base, gimple_stmt_iterator *iter,
1651 bool before_p)
1652{
1653 if (TREE_CODE (base) == SSA_NAME)
1654 return base;
e9cf809e 1655 gimple g = gimple_build_assign (make_ssa_name (TREE_TYPE (base)),
1656 TREE_CODE (base), base);
4f86f720 1657 gimple_set_location (g, loc);
1658 if (before_p)
1659 gsi_insert_before (iter, g, GSI_SAME_STMT);
1660 else
1661 gsi_insert_after (iter, g, GSI_NEW_STMT);
1662 return gimple_assign_lhs (g);
1663}
1664
0a9f72cf 1665/* LEN can already have necessary size and precision;
1666 in that case, do not create a new variable. */
1667
1668tree
1669maybe_cast_to_ptrmode (location_t loc, tree len, gimple_stmt_iterator *iter,
1670 bool before_p)
1671{
1672 if (ptrofftype_p (len))
1673 return len;
e9cf809e 1674 gimple g = gimple_build_assign (make_ssa_name (pointer_sized_int_node),
1675 NOP_EXPR, len);
0a9f72cf 1676 gimple_set_location (g, loc);
1677 if (before_p)
1678 gsi_insert_before (iter, g, GSI_SAME_STMT);
1679 else
1680 gsi_insert_after (iter, g, GSI_NEW_STMT);
1681 return gimple_assign_lhs (g);
1682}
1683
c91b0fff 1684/* Instrument the memory access instruction BASE. Insert new
1ac3509e 1685 statements before or after ITER.
c91b0fff 1686
1687 Note that the memory access represented by BASE can be either an
1688 SSA_NAME, or a non-SSA expression. LOCATION is the source code
1689 location. IS_STORE is TRUE for a store, FALSE for a load.
1ac3509e 1690 BEFORE_P is TRUE for inserting the instrumentation code before
4f86f720 1691 ITER, FALSE for inserting it after ITER. IS_SCALAR_ACCESS is TRUE
1692 for a scalar memory access and FALSE for memory region access.
1693 NON_ZERO_P is TRUE if memory region is guaranteed to have non-zero
1694 length. ALIGN tells alignment of accessed memory object.
1695
1696 START_INSTRUMENTED and END_INSTRUMENTED are TRUE if start/end of
1697 memory region have already been instrumented.
1ac3509e 1698
1699 If BEFORE_P is TRUE, *ITER is arranged to still point to the
1700 statement it was pointing to prior to calling this function,
1701 otherwise, it points to the statement logically following it. */
b92cccf4 1702
1703static void
ff326078 1704build_check_stmt (location_t loc, tree base, tree len,
4f86f720 1705 HOST_WIDE_INT size_in_bytes, gimple_stmt_iterator *iter,
ff326078 1706 bool is_non_zero_len, bool before_p, bool is_store,
f9acf11a 1707 bool is_scalar_access, unsigned int align = 0)
b92cccf4 1708{
4f86f720 1709 gimple_stmt_iterator gsi = *iter;
b92cccf4 1710 gimple g;
4f86f720 1711
ff326078 1712 gcc_assert (!(size_in_bytes > 0 && !is_non_zero_len));
4f86f720 1713
ff326078 1714 gsi = *iter;
1715
1716 base = unshare_expr (base);
1717 base = maybe_create_ssa_name (loc, base, &gsi, before_p);
1718
4f86f720 1719 if (len)
0a9f72cf 1720 {
1721 len = unshare_expr (len);
1722 len = maybe_cast_to_ptrmode (loc, len, iter, before_p);
1723 }
4f86f720 1724 else
1725 {
1726 gcc_assert (size_in_bytes != -1);
1727 len = build_int_cst (pointer_sized_int_node, size_in_bytes);
1728 }
1729
1730 if (size_in_bytes > 1)
93e990a3 1731 {
4f86f720 1732 if ((size_in_bytes & (size_in_bytes - 1)) != 0
1733 || size_in_bytes > 16)
ff326078 1734 is_scalar_access = false;
4f86f720 1735 else if (align && align < size_in_bytes * BITS_PER_UNIT)
1736 {
1737 /* On non-strict alignment targets, if
1738 16-byte access is just 8-byte aligned,
1739 this will result in misaligned shadow
1740 memory 2 byte load, but otherwise can
1741 be handled using one read. */
1742 if (size_in_bytes != 16
1743 || STRICT_ALIGNMENT
1744 || align < 8 * BITS_PER_UNIT)
ff326078 1745 is_scalar_access = false;
86d3a572 1746 }
5d5c682b 1747 }
b92cccf4 1748
ff326078 1749 HOST_WIDE_INT flags = 0;
1750 if (is_store)
1751 flags |= ASAN_CHECK_STORE;
1752 if (is_non_zero_len)
1753 flags |= ASAN_CHECK_NON_ZERO_LEN;
1754 if (is_scalar_access)
1755 flags |= ASAN_CHECK_SCALAR_ACCESS;
ff326078 1756
da81fb00 1757 g = gimple_build_call_internal (IFN_ASAN_CHECK, 4,
ff326078 1758 build_int_cst (integer_type_node, flags),
da81fb00 1759 base, len,
1760 build_int_cst (integer_type_node,
1761 align / BITS_PER_UNIT));
ff326078 1762 gimple_set_location (g, loc);
1763 if (before_p)
1764 gsi_insert_before (&gsi, g, GSI_SAME_STMT);
4f86f720 1765 else
1766 {
4f86f720 1767 gsi_insert_after (&gsi, g, GSI_NEW_STMT);
ff326078 1768 gsi_next (&gsi);
1769 *iter = gsi;
4f86f720 1770 }
b92cccf4 1771}
1772
1773/* If T represents a memory access, add instrumentation code before ITER.
1774 LOCATION is source code location.
1ac3509e 1775 IS_STORE is either TRUE (for a store) or FALSE (for a load). */
b92cccf4 1776
1777static void
1778instrument_derefs (gimple_stmt_iterator *iter, tree t,
c31c80df 1779 location_t location, bool is_store)
b92cccf4 1780{
bf2b7c22 1781 if (is_store && !ASAN_INSTRUMENT_WRITES)
1782 return;
1783 if (!is_store && !ASAN_INSTRUMENT_READS)
1784 return;
1785
b92cccf4 1786 tree type, base;
5d5c682b 1787 HOST_WIDE_INT size_in_bytes;
b92cccf4 1788
1789 type = TREE_TYPE (t);
b92cccf4 1790 switch (TREE_CODE (t))
1791 {
1792 case ARRAY_REF:
1793 case COMPONENT_REF:
1794 case INDIRECT_REF:
1795 case MEM_REF:
085f6ebf 1796 case VAR_DECL:
433da5c4 1797 case BIT_FIELD_REF:
b92cccf4 1798 break;
085f6ebf 1799 /* FALLTHRU */
b92cccf4 1800 default:
1801 return;
1802 }
5d5c682b 1803
1804 size_in_bytes = int_size_in_bytes (type);
86d3a572 1805 if (size_in_bytes <= 0)
5d5c682b 1806 return;
1807
5d5c682b 1808 HOST_WIDE_INT bitsize, bitpos;
1809 tree offset;
3754d046 1810 machine_mode mode;
5d5c682b 1811 int volatilep = 0, unsignedp = 0;
085f6ebf 1812 tree inner = get_inner_reference (t, &bitsize, &bitpos, &offset,
dc317fc8 1813 &mode, &unsignedp, &volatilep, false);
828ab337 1814
1815 if (TREE_CODE (t) == COMPONENT_REF
1816 && DECL_BIT_FIELD_REPRESENTATIVE (TREE_OPERAND (t, 1)) != NULL_TREE)
9ffeb5ce 1817 {
828ab337 1818 tree repr = DECL_BIT_FIELD_REPRESENTATIVE (TREE_OPERAND (t, 1));
1819 instrument_derefs (iter, build3 (COMPONENT_REF, TREE_TYPE (repr),
1820 TREE_OPERAND (t, 0), repr,
1821 NULL_TREE), location, is_store);
9ffeb5ce 1822 return;
1823 }
828ab337 1824
1825 if (bitpos % BITS_PER_UNIT
1826 || bitsize != size_in_bytes * BITS_PER_UNIT)
86d3a572 1827 return;
5d5c682b 1828
085f6ebf 1829 if (TREE_CODE (inner) == VAR_DECL
1830 && offset == NULL_TREE
1831 && bitpos >= 0
1832 && DECL_SIZE (inner)
1833 && tree_fits_shwi_p (DECL_SIZE (inner))
1834 && bitpos + bitsize <= tree_to_shwi (DECL_SIZE (inner)))
1835 {
1836 if (DECL_THREAD_LOCAL_P (inner))
1837 return;
a99c832c 1838 if (!ASAN_GLOBALS && is_global_var (inner))
1839 return;
085f6ebf 1840 if (!TREE_STATIC (inner))
1841 {
1842 /* Automatic vars in the current function will be always
1843 accessible. */
1844 if (decl_function_context (inner) == current_function_decl)
1845 return;
1846 }
1847 /* Always instrument external vars, they might be dynamically
1848 initialized. */
1849 else if (!DECL_EXTERNAL (inner))
1850 {
1851 /* For static vars if they are known not to be dynamically
1852 initialized, they will be always accessible. */
97221fd7 1853 varpool_node *vnode = varpool_node::get (inner);
085f6ebf 1854 if (vnode && !vnode->dynamically_initialized)
1855 return;
1856 }
1857 }
1858
5d5c682b 1859 base = build_fold_addr_expr (t);
c31c80df 1860 if (!has_mem_ref_been_instrumented (base, size_in_bytes))
1861 {
4f86f720 1862 unsigned int align = get_object_alignment (t);
1863 build_check_stmt (location, base, NULL_TREE, size_in_bytes, iter,
ff326078 1864 /*is_non_zero_len*/size_in_bytes > 0, /*before_p=*/true,
4f86f720 1865 is_store, /*is_scalar_access*/true, align);
c31c80df 1866 update_mem_ref_hash_table (base, size_in_bytes);
1867 update_mem_ref_hash_table (t, size_in_bytes);
1868 }
1869
1ac3509e 1870}
1871
f9acf11a 1872/* Insert a memory reference into the hash table if access length
1873 can be determined in compile time. */
1874
1875static void
1876maybe_update_mem_ref_hash_table (tree base, tree len)
1877{
1878 if (!POINTER_TYPE_P (TREE_TYPE (base))
1879 || !INTEGRAL_TYPE_P (TREE_TYPE (len)))
1880 return;
1881
1882 HOST_WIDE_INT size_in_bytes = tree_fits_shwi_p (len) ? tree_to_shwi (len) : -1;
1883
1884 if (size_in_bytes != -1)
1885 update_mem_ref_hash_table (base, size_in_bytes);
1886}
1887
1ac3509e 1888/* Instrument an access to a contiguous memory region that starts at
1889 the address pointed to by BASE, over a length of LEN (expressed in
1890 the sizeof (*BASE) bytes). ITER points to the instruction before
1891 which the instrumentation instructions must be inserted. LOCATION
1892 is the source location that the instrumentation instructions must
1893 have. If IS_STORE is true, then the memory access is a store;
1894 otherwise, it's a load. */
1895
1896static void
1897instrument_mem_region_access (tree base, tree len,
1898 gimple_stmt_iterator *iter,
1899 location_t location, bool is_store)
1900{
94dbcbb6 1901 if (!POINTER_TYPE_P (TREE_TYPE (base))
1902 || !INTEGRAL_TYPE_P (TREE_TYPE (len))
1903 || integer_zerop (len))
1ac3509e 1904 return;
1905
4f86f720 1906 HOST_WIDE_INT size_in_bytes = tree_fits_shwi_p (len) ? tree_to_shwi (len) : -1;
c31c80df 1907
f9acf11a 1908 if ((size_in_bytes == -1)
1909 || !has_mem_ref_been_instrumented (base, size_in_bytes))
1910 {
1911 build_check_stmt (location, base, len, size_in_bytes, iter,
1912 /*is_non_zero_len*/size_in_bytes > 0, /*before_p*/true,
1913 is_store, /*is_scalar_access*/false, /*align*/0);
1914 }
d9dc05a1 1915
f9acf11a 1916 maybe_update_mem_ref_hash_table (base, len);
d9dc05a1 1917 *iter = gsi_for_stmt (gsi_stmt (*iter));
c31c80df 1918}
1ac3509e 1919
c31c80df 1920/* Instrument the call to a built-in memory access function that is
1921 pointed to by the iterator ITER.
1ac3509e 1922
c31c80df 1923 Upon completion, return TRUE iff *ITER has been advanced to the
1924 statement following the one it was originally pointing to. */
1ac3509e 1925
c31c80df 1926static bool
1927instrument_builtin_call (gimple_stmt_iterator *iter)
1928{
bf2b7c22 1929 if (!ASAN_MEMINTRIN)
1930 return false;
1931
c31c80df 1932 bool iter_advanced_p = false;
1a91d914 1933 gcall *call = as_a <gcall *> (gsi_stmt (*iter));
1ac3509e 1934
c31c80df 1935 gcc_checking_assert (gimple_call_builtin_p (call, BUILT_IN_NORMAL));
1ac3509e 1936
c31c80df 1937 location_t loc = gimple_location (call);
1ac3509e 1938
f9acf11a 1939 asan_mem_ref src0, src1, dest;
1940 asan_mem_ref_init (&src0, NULL, 1);
1941 asan_mem_ref_init (&src1, NULL, 1);
1942 asan_mem_ref_init (&dest, NULL, 1);
c31c80df 1943
f9acf11a 1944 tree src0_len = NULL_TREE, src1_len = NULL_TREE, dest_len = NULL_TREE;
1945 bool src0_is_store = false, src1_is_store = false, dest_is_store = false,
1946 dest_is_deref = false, intercepted_p = true;
c31c80df 1947
f9acf11a 1948 if (get_mem_refs_of_builtin_call (call,
1949 &src0, &src0_len, &src0_is_store,
1950 &src1, &src1_len, &src1_is_store,
1951 &dest, &dest_len, &dest_is_store,
1952 &dest_is_deref, &intercepted_p))
1953 {
1954 if (dest_is_deref)
c31c80df 1955 {
f9acf11a 1956 instrument_derefs (iter, dest.start, loc, dest_is_store);
1957 gsi_next (iter);
1958 iter_advanced_p = true;
1959 }
1960 else if (!intercepted_p
1961 && (src0_len || src1_len || dest_len))
1962 {
1963 if (src0.start != NULL_TREE)
1964 instrument_mem_region_access (src0.start, src0_len,
1965 iter, loc, /*is_store=*/false);
1966 if (src1.start != NULL_TREE)
1967 instrument_mem_region_access (src1.start, src1_len,
1968 iter, loc, /*is_store=*/false);
1969 if (dest.start != NULL_TREE)
1970 instrument_mem_region_access (dest.start, dest_len,
1971 iter, loc, /*is_store=*/true);
1972
1973 *iter = gsi_for_stmt (call);
1974 gsi_next (iter);
1975 iter_advanced_p = true;
1976 }
1977 else
1978 {
1979 if (src0.start != NULL_TREE)
1980 maybe_update_mem_ref_hash_table (src0.start, src0_len);
1981 if (src1.start != NULL_TREE)
1982 maybe_update_mem_ref_hash_table (src1.start, src1_len);
1983 if (dest.start != NULL_TREE)
1984 maybe_update_mem_ref_hash_table (dest.start, dest_len);
c31c80df 1985 }
1ac3509e 1986 }
c31c80df 1987 return iter_advanced_p;
1ac3509e 1988}
1989
1990/* Instrument the assignment statement ITER if it is subject to
c31c80df 1991 instrumentation. Return TRUE iff instrumentation actually
1992 happened. In that case, the iterator ITER is advanced to the next
1993 logical expression following the one initially pointed to by ITER,
1994 and the relevant memory reference that which access has been
1995 instrumented is added to the memory references hash table. */
1ac3509e 1996
c31c80df 1997static bool
1998maybe_instrument_assignment (gimple_stmt_iterator *iter)
1ac3509e 1999{
2000 gimple s = gsi_stmt (*iter);
2001
2002 gcc_assert (gimple_assign_single_p (s));
2003
c31c80df 2004 tree ref_expr = NULL_TREE;
2005 bool is_store, is_instrumented = false;
2006
38e9269e 2007 if (gimple_store_p (s))
c31c80df 2008 {
2009 ref_expr = gimple_assign_lhs (s);
2010 is_store = true;
2011 instrument_derefs (iter, ref_expr,
2012 gimple_location (s),
2013 is_store);
2014 is_instrumented = true;
2015 }
e815c2c5 2016
38e9269e 2017 if (gimple_assign_load_p (s))
c31c80df 2018 {
2019 ref_expr = gimple_assign_rhs1 (s);
2020 is_store = false;
2021 instrument_derefs (iter, ref_expr,
2022 gimple_location (s),
2023 is_store);
2024 is_instrumented = true;
2025 }
2026
2027 if (is_instrumented)
2028 gsi_next (iter);
2029
2030 return is_instrumented;
1ac3509e 2031}
2032
2033/* Instrument the function call pointed to by the iterator ITER, if it
2034 is subject to instrumentation. At the moment, the only function
2035 calls that are instrumented are some built-in functions that access
2036 memory. Look at instrument_builtin_call to learn more.
2037
2038 Upon completion return TRUE iff *ITER was advanced to the statement
2039 following the one it was originally pointing to. */
2040
2041static bool
2042maybe_instrument_call (gimple_stmt_iterator *iter)
2043{
494f4883 2044 gimple stmt = gsi_stmt (*iter);
c31c80df 2045 bool is_builtin = gimple_call_builtin_p (stmt, BUILT_IN_NORMAL);
2046
2047 if (is_builtin && instrument_builtin_call (iter))
494f4883 2048 return true;
c31c80df 2049
494f4883 2050 if (gimple_call_noreturn_p (stmt))
2051 {
2052 if (is_builtin)
2053 {
2054 tree callee = gimple_call_fndecl (stmt);
2055 switch (DECL_FUNCTION_CODE (callee))
2056 {
2057 case BUILT_IN_UNREACHABLE:
2058 case BUILT_IN_TRAP:
2059 /* Don't instrument these. */
2060 return false;
5213d6c9 2061 default:
2062 break;
494f4883 2063 }
2064 }
2065 tree decl = builtin_decl_implicit (BUILT_IN_ASAN_HANDLE_NO_RETURN);
2066 gimple g = gimple_build_call (decl, 0);
2067 gimple_set_location (g, gimple_location (stmt));
2068 gsi_insert_before (iter, g, GSI_SAME_STMT);
2069 }
1ac3509e 2070 return false;
b92cccf4 2071}
2072
c31c80df 2073/* Walk each instruction of all basic block and instrument those that
2074 represent memory references: loads, stores, or function calls.
2075 In a given basic block, this function avoids instrumenting memory
2076 references that have already been instrumented. */
b92cccf4 2077
2078static void
2079transform_statements (void)
2080{
e8d4d8a9 2081 basic_block bb, last_bb = NULL;
b92cccf4 2082 gimple_stmt_iterator i;
fe672ac0 2083 int saved_last_basic_block = last_basic_block_for_fn (cfun);
b92cccf4 2084
fc00614f 2085 FOR_EACH_BB_FN (bb, cfun)
b92cccf4 2086 {
e8d4d8a9 2087 basic_block prev_bb = bb;
c31c80df 2088
b92cccf4 2089 if (bb->index >= saved_last_basic_block) continue;
e8d4d8a9 2090
2091 /* Flush the mem ref hash table, if current bb doesn't have
2092 exactly one predecessor, or if that predecessor (skipping
2093 over asan created basic blocks) isn't the last processed
2094 basic block. Thus we effectively flush on extended basic
2095 block boundaries. */
2096 while (single_pred_p (prev_bb))
2097 {
2098 prev_bb = single_pred (prev_bb);
2099 if (prev_bb->index < saved_last_basic_block)
2100 break;
2101 }
2102 if (prev_bb != last_bb)
2103 empty_mem_ref_hash_table ();
2104 last_bb = bb;
2105
1ac3509e 2106 for (i = gsi_start_bb (bb); !gsi_end_p (i);)
eca932e6 2107 {
1ac3509e 2108 gimple s = gsi_stmt (i);
2109
c31c80df 2110 if (has_stmt_been_instrumented_p (s))
2111 gsi_next (&i);
2112 else if (gimple_assign_single_p (s)
e99409a5 2113 && !gimple_clobber_p (s)
c31c80df 2114 && maybe_instrument_assignment (&i))
2115 /* Nothing to do as maybe_instrument_assignment advanced
2116 the iterator I. */;
2117 else if (is_gimple_call (s) && maybe_instrument_call (&i))
2118 /* Nothing to do as maybe_instrument_call
2119 advanced the iterator I. */;
2120 else
1ac3509e 2121 {
c31c80df 2122 /* No instrumentation happened.
2123
e8d4d8a9 2124 If the current instruction is a function call that
2125 might free something, let's forget about the memory
2126 references that got instrumented. Otherwise we might
2127 miss some instrumentation opportunities. */
2128 if (is_gimple_call (s) && !nonfreeing_call_p (s))
c31c80df 2129 empty_mem_ref_hash_table ();
2130
2131 gsi_next (&i);
1ac3509e 2132 }
eca932e6 2133 }
b92cccf4 2134 }
c31c80df 2135 free_mem_ref_resources ();
b92cccf4 2136}
2137
085f6ebf 2138/* Build
2139 __asan_before_dynamic_init (module_name)
2140 or
2141 __asan_after_dynamic_init ()
2142 call. */
2143
2144tree
2145asan_dynamic_init_call (bool after_p)
2146{
2147 tree fn = builtin_decl_implicit (after_p
2148 ? BUILT_IN_ASAN_AFTER_DYNAMIC_INIT
2149 : BUILT_IN_ASAN_BEFORE_DYNAMIC_INIT);
2150 tree module_name_cst = NULL_TREE;
2151 if (!after_p)
2152 {
2153 pretty_printer module_name_pp;
2154 pp_string (&module_name_pp, main_input_filename);
2155
2156 if (shadow_ptr_types[0] == NULL_TREE)
2157 asan_init_shadow_ptr_types ();
2158 module_name_cst = asan_pp_string (&module_name_pp);
2159 module_name_cst = fold_convert (const_ptr_type_node,
2160 module_name_cst);
2161 }
2162
2163 return build_call_expr (fn, after_p ? 0 : 1, module_name_cst);
2164}
2165
92fc5c48 2166/* Build
2167 struct __asan_global
2168 {
2169 const void *__beg;
2170 uptr __size;
2171 uptr __size_with_redzone;
2172 const void *__name;
1e80ce41 2173 const void *__module_name;
92fc5c48 2174 uptr __has_dynamic_init;
a9586c9c 2175 __asan_global_source_location *__location;
92fc5c48 2176 } type. */
2177
2178static tree
2179asan_global_struct (void)
2180{
a9586c9c 2181 static const char *field_names[7]
92fc5c48 2182 = { "__beg", "__size", "__size_with_redzone",
a9586c9c 2183 "__name", "__module_name", "__has_dynamic_init", "__location"};
2184 tree fields[7], ret;
92fc5c48 2185 int i;
2186
2187 ret = make_node (RECORD_TYPE);
a9586c9c 2188 for (i = 0; i < 7; i++)
92fc5c48 2189 {
2190 fields[i]
2191 = build_decl (UNKNOWN_LOCATION, FIELD_DECL,
2192 get_identifier (field_names[i]),
2193 (i == 0 || i == 3) ? const_ptr_type_node
9e46467d 2194 : pointer_sized_int_node);
92fc5c48 2195 DECL_CONTEXT (fields[i]) = ret;
2196 if (i)
2197 DECL_CHAIN (fields[i - 1]) = fields[i];
2198 }
530273ed 2199 tree type_decl = build_decl (input_location, TYPE_DECL,
2200 get_identifier ("__asan_global"), ret);
2201 DECL_IGNORED_P (type_decl) = 1;
2202 DECL_ARTIFICIAL (type_decl) = 1;
92fc5c48 2203 TYPE_FIELDS (ret) = fields[0];
530273ed 2204 TYPE_NAME (ret) = type_decl;
2205 TYPE_STUB_DECL (ret) = type_decl;
92fc5c48 2206 layout_type (ret);
2207 return ret;
2208}
2209
2210/* Append description of a single global DECL into vector V.
2211 TYPE is __asan_global struct type as returned by asan_global_struct. */
2212
2213static void
f1f41a6c 2214asan_add_global (tree decl, tree type, vec<constructor_elt, va_gc> *v)
92fc5c48 2215{
2216 tree init, uptr = TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (type)));
2217 unsigned HOST_WIDE_INT size;
1e80ce41 2218 tree str_cst, module_name_cst, refdecl = decl;
f1f41a6c 2219 vec<constructor_elt, va_gc> *vinner = NULL;
92fc5c48 2220
1e80ce41 2221 pretty_printer asan_pp, module_name_pp;
92fc5c48 2222
92fc5c48 2223 if (DECL_NAME (decl))
a94db6b0 2224 pp_tree_identifier (&asan_pp, DECL_NAME (decl));
92fc5c48 2225 else
2226 pp_string (&asan_pp, "<unknown>");
b75d2c15 2227 str_cst = asan_pp_string (&asan_pp);
92fc5c48 2228
1e80ce41 2229 pp_string (&module_name_pp, main_input_filename);
2230 module_name_cst = asan_pp_string (&module_name_pp);
2231
92fc5c48 2232 if (asan_needs_local_alias (decl))
2233 {
2234 char buf[20];
f1f41a6c 2235 ASM_GENERATE_INTERNAL_LABEL (buf, "LASAN", vec_safe_length (v) + 1);
92fc5c48 2236 refdecl = build_decl (DECL_SOURCE_LOCATION (decl),
2237 VAR_DECL, get_identifier (buf), TREE_TYPE (decl));
2238 TREE_ADDRESSABLE (refdecl) = TREE_ADDRESSABLE (decl);
2239 TREE_READONLY (refdecl) = TREE_READONLY (decl);
2240 TREE_THIS_VOLATILE (refdecl) = TREE_THIS_VOLATILE (decl);
2241 DECL_GIMPLE_REG_P (refdecl) = DECL_GIMPLE_REG_P (decl);
2242 DECL_ARTIFICIAL (refdecl) = DECL_ARTIFICIAL (decl);
2243 DECL_IGNORED_P (refdecl) = DECL_IGNORED_P (decl);
2244 TREE_STATIC (refdecl) = 1;
2245 TREE_PUBLIC (refdecl) = 0;
2246 TREE_USED (refdecl) = 1;
2247 assemble_alias (refdecl, DECL_ASSEMBLER_NAME (decl));
2248 }
2249
2250 CONSTRUCTOR_APPEND_ELT (vinner, NULL_TREE,
2251 fold_convert (const_ptr_type_node,
2252 build_fold_addr_expr (refdecl)));
6a0712d4 2253 size = tree_to_uhwi (DECL_SIZE_UNIT (decl));
92fc5c48 2254 CONSTRUCTOR_APPEND_ELT (vinner, NULL_TREE, build_int_cst (uptr, size));
2255 size += asan_red_zone_size (size);
2256 CONSTRUCTOR_APPEND_ELT (vinner, NULL_TREE, build_int_cst (uptr, size));
2257 CONSTRUCTOR_APPEND_ELT (vinner, NULL_TREE,
2258 fold_convert (const_ptr_type_node, str_cst));
1e80ce41 2259 CONSTRUCTOR_APPEND_ELT (vinner, NULL_TREE,
2260 fold_convert (const_ptr_type_node, module_name_cst));
97221fd7 2261 varpool_node *vnode = varpool_node::get (decl);
085f6ebf 2262 int has_dynamic_init = vnode ? vnode->dynamically_initialized : 0;
2263 CONSTRUCTOR_APPEND_ELT (vinner, NULL_TREE,
2264 build_int_cst (uptr, has_dynamic_init));
11d00a0b 2265 tree locptr = NULL_TREE;
2266 location_t loc = DECL_SOURCE_LOCATION (decl);
2267 expanded_location xloc = expand_location (loc);
2268 if (xloc.file != NULL)
2269 {
2270 static int lasanloccnt = 0;
2271 char buf[25];
2272 ASM_GENERATE_INTERNAL_LABEL (buf, "LASANLOC", ++lasanloccnt);
2273 tree var = build_decl (UNKNOWN_LOCATION, VAR_DECL, get_identifier (buf),
2274 ubsan_get_source_location_type ());
2275 TREE_STATIC (var) = 1;
2276 TREE_PUBLIC (var) = 0;
2277 DECL_ARTIFICIAL (var) = 1;
2278 DECL_IGNORED_P (var) = 1;
2279 pretty_printer filename_pp;
2280 pp_string (&filename_pp, xloc.file);
2281 tree str = asan_pp_string (&filename_pp);
2282 tree ctor = build_constructor_va (TREE_TYPE (var), 3,
2283 NULL_TREE, str, NULL_TREE,
2284 build_int_cst (unsigned_type_node,
2285 xloc.line), NULL_TREE,
2286 build_int_cst (unsigned_type_node,
2287 xloc.column));
2288 TREE_CONSTANT (ctor) = 1;
2289 TREE_STATIC (ctor) = 1;
2290 DECL_INITIAL (var) = ctor;
2291 varpool_node::finalize_decl (var);
2292 locptr = fold_convert (uptr, build_fold_addr_expr (var));
2293 }
2294 else
2295 locptr = build_int_cst (uptr, 0);
2296 CONSTRUCTOR_APPEND_ELT (vinner, NULL_TREE, locptr);
92fc5c48 2297 init = build_constructor (type, vinner);
2298 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, init);
2299}
2300
b45e34ed 2301/* Initialize sanitizer.def builtins if the FE hasn't initialized them. */
2302void
2303initialize_sanitizer_builtins (void)
2304{
2305 tree decl;
2306
2307 if (builtin_decl_implicit_p (BUILT_IN_ASAN_INIT))
2308 return;
2309
2310 tree BT_FN_VOID = build_function_type_list (void_type_node, NULL_TREE);
2311 tree BT_FN_VOID_PTR
2312 = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
085f6ebf 2313 tree BT_FN_VOID_CONST_PTR
2314 = build_function_type_list (void_type_node, const_ptr_type_node, NULL_TREE);
2c4c3477 2315 tree BT_FN_VOID_PTR_PTR
2316 = build_function_type_list (void_type_node, ptr_type_node,
2317 ptr_type_node, NULL_TREE);
9e46467d 2318 tree BT_FN_VOID_PTR_PTR_PTR
2319 = build_function_type_list (void_type_node, ptr_type_node,
2320 ptr_type_node, ptr_type_node, NULL_TREE);
b45e34ed 2321 tree BT_FN_VOID_PTR_PTRMODE
2322 = build_function_type_list (void_type_node, ptr_type_node,
9e46467d 2323 pointer_sized_int_node, NULL_TREE);
83392e87 2324 tree BT_FN_VOID_INT
2325 = build_function_type_list (void_type_node, integer_type_node, NULL_TREE);
d14d27c6 2326 tree BT_FN_SIZE_CONST_PTR_INT
2327 = build_function_type_list (size_type_node, const_ptr_type_node,
2328 integer_type_node, NULL_TREE);
83392e87 2329 tree BT_FN_BOOL_VPTR_PTR_IX_INT_INT[5];
2330 tree BT_FN_IX_CONST_VPTR_INT[5];
2331 tree BT_FN_IX_VPTR_IX_INT[5];
2332 tree BT_FN_VOID_VPTR_IX_INT[5];
2333 tree vptr
2334 = build_pointer_type (build_qualified_type (void_type_node,
2335 TYPE_QUAL_VOLATILE));
2336 tree cvptr
2337 = build_pointer_type (build_qualified_type (void_type_node,
2338 TYPE_QUAL_VOLATILE
2339 |TYPE_QUAL_CONST));
2340 tree boolt
2341 = lang_hooks.types.type_for_size (BOOL_TYPE_SIZE, 1);
2342 int i;
2343 for (i = 0; i < 5; i++)
2344 {
2345 tree ix = build_nonstandard_integer_type (BITS_PER_UNIT * (1 << i), 1);
2346 BT_FN_BOOL_VPTR_PTR_IX_INT_INT[i]
2347 = build_function_type_list (boolt, vptr, ptr_type_node, ix,
2348 integer_type_node, integer_type_node,
2349 NULL_TREE);
2350 BT_FN_IX_CONST_VPTR_INT[i]
2351 = build_function_type_list (ix, cvptr, integer_type_node, NULL_TREE);
2352 BT_FN_IX_VPTR_IX_INT[i]
2353 = build_function_type_list (ix, vptr, ix, integer_type_node,
2354 NULL_TREE);
2355 BT_FN_VOID_VPTR_IX_INT[i]
2356 = build_function_type_list (void_type_node, vptr, ix,
2357 integer_type_node, NULL_TREE);
2358 }
2359#define BT_FN_BOOL_VPTR_PTR_I1_INT_INT BT_FN_BOOL_VPTR_PTR_IX_INT_INT[0]
2360#define BT_FN_I1_CONST_VPTR_INT BT_FN_IX_CONST_VPTR_INT[0]
2361#define BT_FN_I1_VPTR_I1_INT BT_FN_IX_VPTR_IX_INT[0]
2362#define BT_FN_VOID_VPTR_I1_INT BT_FN_VOID_VPTR_IX_INT[0]
2363#define BT_FN_BOOL_VPTR_PTR_I2_INT_INT BT_FN_BOOL_VPTR_PTR_IX_INT_INT[1]
2364#define BT_FN_I2_CONST_VPTR_INT BT_FN_IX_CONST_VPTR_INT[1]
2365#define BT_FN_I2_VPTR_I2_INT BT_FN_IX_VPTR_IX_INT[1]
2366#define BT_FN_VOID_VPTR_I2_INT BT_FN_VOID_VPTR_IX_INT[1]
2367#define BT_FN_BOOL_VPTR_PTR_I4_INT_INT BT_FN_BOOL_VPTR_PTR_IX_INT_INT[2]
2368#define BT_FN_I4_CONST_VPTR_INT BT_FN_IX_CONST_VPTR_INT[2]
2369#define BT_FN_I4_VPTR_I4_INT BT_FN_IX_VPTR_IX_INT[2]
2370#define BT_FN_VOID_VPTR_I4_INT BT_FN_VOID_VPTR_IX_INT[2]
2371#define BT_FN_BOOL_VPTR_PTR_I8_INT_INT BT_FN_BOOL_VPTR_PTR_IX_INT_INT[3]
2372#define BT_FN_I8_CONST_VPTR_INT BT_FN_IX_CONST_VPTR_INT[3]
2373#define BT_FN_I8_VPTR_I8_INT BT_FN_IX_VPTR_IX_INT[3]
2374#define BT_FN_VOID_VPTR_I8_INT BT_FN_VOID_VPTR_IX_INT[3]
2375#define BT_FN_BOOL_VPTR_PTR_I16_INT_INT BT_FN_BOOL_VPTR_PTR_IX_INT_INT[4]
2376#define BT_FN_I16_CONST_VPTR_INT BT_FN_IX_CONST_VPTR_INT[4]
2377#define BT_FN_I16_VPTR_I16_INT BT_FN_IX_VPTR_IX_INT[4]
2378#define BT_FN_VOID_VPTR_I16_INT BT_FN_VOID_VPTR_IX_INT[4]
b45e34ed 2379#undef ATTR_NOTHROW_LEAF_LIST
2380#define ATTR_NOTHROW_LEAF_LIST ECF_NOTHROW | ECF_LEAF
c2901651 2381#undef ATTR_TMPURE_NOTHROW_LEAF_LIST
2382#define ATTR_TMPURE_NOTHROW_LEAF_LIST ECF_TM_PURE | ATTR_NOTHROW_LEAF_LIST
b45e34ed 2383#undef ATTR_NORETURN_NOTHROW_LEAF_LIST
2384#define ATTR_NORETURN_NOTHROW_LEAF_LIST ECF_NORETURN | ATTR_NOTHROW_LEAF_LIST
f6b540af 2385#undef ATTR_CONST_NORETURN_NOTHROW_LEAF_LIST
2386#define ATTR_CONST_NORETURN_NOTHROW_LEAF_LIST \
2387 ECF_CONST | ATTR_NORETURN_NOTHROW_LEAF_LIST
c2901651 2388#undef ATTR_TMPURE_NORETURN_NOTHROW_LEAF_LIST
2389#define ATTR_TMPURE_NORETURN_NOTHROW_LEAF_LIST \
2390 ECF_TM_PURE | ATTR_NORETURN_NOTHROW_LEAF_LIST
9e46467d 2391#undef ATTR_COLD_NOTHROW_LEAF_LIST
2392#define ATTR_COLD_NOTHROW_LEAF_LIST \
2393 /* ECF_COLD missing */ ATTR_NOTHROW_LEAF_LIST
2394#undef ATTR_COLD_NORETURN_NOTHROW_LEAF_LIST
2395#define ATTR_COLD_NORETURN_NOTHROW_LEAF_LIST \
2396 /* ECF_COLD missing */ ATTR_NORETURN_NOTHROW_LEAF_LIST
f6b540af 2397#undef ATTR_COLD_CONST_NORETURN_NOTHROW_LEAF_LIST
2398#define ATTR_COLD_CONST_NORETURN_NOTHROW_LEAF_LIST \
2399 /* ECF_COLD missing */ ATTR_CONST_NORETURN_NOTHROW_LEAF_LIST
d14d27c6 2400#undef ATTR_PURE_NOTHROW_LEAF_LIST
2401#define ATTR_PURE_NOTHROW_LEAF_LIST ECF_PURE | ATTR_NOTHROW_LEAF_LIST
b45e34ed 2402#undef DEF_SANITIZER_BUILTIN
2403#define DEF_SANITIZER_BUILTIN(ENUM, NAME, TYPE, ATTRS) \
2404 decl = add_builtin_function ("__builtin_" NAME, TYPE, ENUM, \
2405 BUILT_IN_NORMAL, NAME, NULL_TREE); \
2406 set_call_expr_flags (decl, ATTRS); \
2407 set_builtin_decl (ENUM, decl, true);
2408
2409#include "sanitizer.def"
2410
d14d27c6 2411 /* -fsanitize=object-size uses __builtin_object_size, but that might
2412 not be available for e.g. Fortran at this point. We use
2413 DEF_SANITIZER_BUILTIN here only as a convenience macro. */
2414 if ((flag_sanitize & SANITIZE_OBJECT_SIZE)
2415 && !builtin_decl_implicit_p (BUILT_IN_OBJECT_SIZE))
2416 DEF_SANITIZER_BUILTIN (BUILT_IN_OBJECT_SIZE, "object_size",
2417 BT_FN_SIZE_CONST_PTR_INT,
2418 ATTR_PURE_NOTHROW_LEAF_LIST)
2419
b45e34ed 2420#undef DEF_SANITIZER_BUILTIN
2421}
2422
55b58027 2423/* Called via htab_traverse. Count number of emitted
2424 STRING_CSTs in the constant hash table. */
2425
2ef51f0e 2426int
2427count_string_csts (constant_descriptor_tree **slot,
2428 unsigned HOST_WIDE_INT *data)
55b58027 2429{
2ef51f0e 2430 struct constant_descriptor_tree *desc = *slot;
55b58027 2431 if (TREE_CODE (desc->value) == STRING_CST
2432 && TREE_ASM_WRITTEN (desc->value)
2433 && asan_protect_global (desc->value))
2ef51f0e 2434 ++*data;
55b58027 2435 return 1;
2436}
2437
2438/* Helper structure to pass two parameters to
2439 add_string_csts. */
2440
2441struct asan_add_string_csts_data
2442{
2443 tree type;
2444 vec<constructor_elt, va_gc> *v;
2445};
2446
2ef51f0e 2447/* Called via hash_table::traverse. Call asan_add_global
55b58027 2448 on emitted STRING_CSTs from the constant hash table. */
2449
2ef51f0e 2450int
2451add_string_csts (constant_descriptor_tree **slot,
2452 asan_add_string_csts_data *aascd)
55b58027 2453{
2ef51f0e 2454 struct constant_descriptor_tree *desc = *slot;
55b58027 2455 if (TREE_CODE (desc->value) == STRING_CST
2456 && TREE_ASM_WRITTEN (desc->value)
2457 && asan_protect_global (desc->value))
2458 {
55b58027 2459 asan_add_global (SYMBOL_REF_DECL (XEXP (desc->rtl, 0)),
2460 aascd->type, aascd->v);
2461 }
2462 return 1;
2463}
2464
92fc5c48 2465/* Needs to be GTY(()), because cgraph_build_static_cdtor may
2466 invoke ggc_collect. */
2467static GTY(()) tree asan_ctor_statements;
2468
b92cccf4 2469/* Module-level instrumentation.
1e80ce41 2470 - Insert __asan_init_vN() into the list of CTORs.
b92cccf4 2471 - TODO: insert redzones around globals.
2472 */
2473
2474void
2475asan_finish_file (void)
2476{
098f44bc 2477 varpool_node *vnode;
92fc5c48 2478 unsigned HOST_WIDE_INT gcount = 0;
2479
55b58027 2480 if (shadow_ptr_types[0] == NULL_TREE)
2481 asan_init_shadow_ptr_types ();
2482 /* Avoid instrumenting code in the asan ctors/dtors.
2483 We don't need to insert padding after the description strings,
2484 nor after .LASAN* array. */
9e46467d 2485 flag_sanitize &= ~SANITIZE_ADDRESS;
b45e34ed 2486
aa1dfc5f 2487 /* For user-space we want asan constructors to run first.
2488 Linux kernel does not support priorities other than default, and the only
2489 other user of constructors is coverage. So we run with the default
2490 priority. */
2491 int priority = flag_sanitize & SANITIZE_USER_ADDRESS
2492 ? MAX_RESERVED_INIT_PRIORITY - 1 : DEFAULT_INIT_PRIORITY;
2493
647918a4 2494 if (flag_sanitize & SANITIZE_USER_ADDRESS)
2495 {
2496 tree fn = builtin_decl_implicit (BUILT_IN_ASAN_INIT);
2497 append_to_statement_list (build_call_expr (fn, 0), &asan_ctor_statements);
2498 }
92fc5c48 2499 FOR_EACH_DEFINED_VARIABLE (vnode)
02774f2d 2500 if (TREE_ASM_WRITTEN (vnode->decl)
2501 && asan_protect_global (vnode->decl))
92fc5c48 2502 ++gcount;
2ef51f0e 2503 hash_table<tree_descriptor_hasher> *const_desc_htab = constant_pool_htab ();
2504 const_desc_htab->traverse<unsigned HOST_WIDE_INT *, count_string_csts>
2505 (&gcount);
92fc5c48 2506 if (gcount)
2507 {
b45e34ed 2508 tree type = asan_global_struct (), var, ctor;
92fc5c48 2509 tree dtor_statements = NULL_TREE;
f1f41a6c 2510 vec<constructor_elt, va_gc> *v;
92fc5c48 2511 char buf[20];
2512
2513 type = build_array_type_nelts (type, gcount);
2514 ASM_GENERATE_INTERNAL_LABEL (buf, "LASAN", 0);
2515 var = build_decl (UNKNOWN_LOCATION, VAR_DECL, get_identifier (buf),
2516 type);
2517 TREE_STATIC (var) = 1;
2518 TREE_PUBLIC (var) = 0;
2519 DECL_ARTIFICIAL (var) = 1;
2520 DECL_IGNORED_P (var) = 1;
f1f41a6c 2521 vec_alloc (v, gcount);
92fc5c48 2522 FOR_EACH_DEFINED_VARIABLE (vnode)
02774f2d 2523 if (TREE_ASM_WRITTEN (vnode->decl)
2524 && asan_protect_global (vnode->decl))
2525 asan_add_global (vnode->decl, TREE_TYPE (type), v);
55b58027 2526 struct asan_add_string_csts_data aascd;
2527 aascd.type = TREE_TYPE (type);
2528 aascd.v = v;
2ef51f0e 2529 const_desc_htab->traverse<asan_add_string_csts_data *, add_string_csts>
2530 (&aascd);
92fc5c48 2531 ctor = build_constructor (type, v);
2532 TREE_CONSTANT (ctor) = 1;
2533 TREE_STATIC (ctor) = 1;
2534 DECL_INITIAL (var) = ctor;
97221fd7 2535 varpool_node::finalize_decl (var);
92fc5c48 2536
647918a4 2537 tree fn = builtin_decl_implicit (BUILT_IN_ASAN_REGISTER_GLOBALS);
9e46467d 2538 tree gcount_tree = build_int_cst (pointer_sized_int_node, gcount);
b45e34ed 2539 append_to_statement_list (build_call_expr (fn, 2,
92fc5c48 2540 build_fold_addr_expr (var),
9e46467d 2541 gcount_tree),
92fc5c48 2542 &asan_ctor_statements);
2543
b45e34ed 2544 fn = builtin_decl_implicit (BUILT_IN_ASAN_UNREGISTER_GLOBALS);
2545 append_to_statement_list (build_call_expr (fn, 2,
92fc5c48 2546 build_fold_addr_expr (var),
9e46467d 2547 gcount_tree),
92fc5c48 2548 &dtor_statements);
aa1dfc5f 2549 cgraph_build_static_cdtor ('D', dtor_statements, priority);
92fc5c48 2550 }
647918a4 2551 if (asan_ctor_statements)
aa1dfc5f 2552 cgraph_build_static_cdtor ('I', asan_ctor_statements, priority);
9e46467d 2553 flag_sanitize |= SANITIZE_ADDRESS;
5d5c682b 2554}
2555
ff326078 2556/* Expand the ASAN_{LOAD,STORE} builtins. */
2557
6b5813f5 2558bool
ff326078 2559asan_expand_check_ifn (gimple_stmt_iterator *iter, bool use_calls)
2560{
2561 gimple g = gsi_stmt (*iter);
2562 location_t loc = gimple_location (g);
2563
f4d482a6 2564 bool recover_p
2565 = (flag_sanitize & flag_sanitize_recover & SANITIZE_KERNEL_ADDRESS) != 0;
2566
ff326078 2567 HOST_WIDE_INT flags = tree_to_shwi (gimple_call_arg (g, 0));
2568 gcc_assert (flags < ASAN_CHECK_LAST);
2569 bool is_scalar_access = (flags & ASAN_CHECK_SCALAR_ACCESS) != 0;
2570 bool is_store = (flags & ASAN_CHECK_STORE) != 0;
2571 bool is_non_zero_len = (flags & ASAN_CHECK_NON_ZERO_LEN) != 0;
ff326078 2572
2573 tree base = gimple_call_arg (g, 1);
2574 tree len = gimple_call_arg (g, 2);
da81fb00 2575 HOST_WIDE_INT align = tree_to_shwi (gimple_call_arg (g, 3));
ff326078 2576
2577 HOST_WIDE_INT size_in_bytes
2578 = is_scalar_access && tree_fits_shwi_p (len) ? tree_to_shwi (len) : -1;
2579
2580 if (use_calls)
2581 {
2582 /* Instrument using callbacks. */
e9cf809e 2583 gimple g = gimple_build_assign (make_ssa_name (pointer_sized_int_node),
2584 NOP_EXPR, base);
ff326078 2585 gimple_set_location (g, loc);
2586 gsi_insert_before (iter, g, GSI_SAME_STMT);
2587 tree base_addr = gimple_assign_lhs (g);
2588
2589 int nargs;
f4d482a6 2590 tree fun = check_func (is_store, recover_p, size_in_bytes, &nargs);
ff326078 2591 if (nargs == 1)
2592 g = gimple_build_call (fun, 1, base_addr);
2593 else
2594 {
2595 gcc_assert (nargs == 2);
e9cf809e 2596 g = gimple_build_assign (make_ssa_name (pointer_sized_int_node),
2597 NOP_EXPR, len);
ff326078 2598 gimple_set_location (g, loc);
2599 gsi_insert_before (iter, g, GSI_SAME_STMT);
2600 tree sz_arg = gimple_assign_lhs (g);
2601 g = gimple_build_call (fun, nargs, base_addr, sz_arg);
2602 }
2603 gimple_set_location (g, loc);
2604 gsi_replace (iter, g, false);
2605 return false;
2606 }
2607
2608 HOST_WIDE_INT real_size_in_bytes = size_in_bytes == -1 ? 1 : size_in_bytes;
2609
ff326078 2610 tree shadow_ptr_type = shadow_ptr_types[real_size_in_bytes == 16 ? 1 : 0];
2611 tree shadow_type = TREE_TYPE (shadow_ptr_type);
2612
2613 gimple_stmt_iterator gsi = *iter;
2614
2615 if (!is_non_zero_len)
2616 {
2617 /* So, the length of the memory area to asan-protect is
2618 non-constant. Let's guard the generated instrumentation code
2619 like:
2620
2621 if (len != 0)
2622 {
2623 //asan instrumentation code goes here.
2624 }
2625 // falltrough instructions, starting with *ITER. */
2626
2627 g = gimple_build_cond (NE_EXPR,
2628 len,
2629 build_int_cst (TREE_TYPE (len), 0),
2630 NULL_TREE, NULL_TREE);
2631 gimple_set_location (g, loc);
2632
2633 basic_block then_bb, fallthrough_bb;
1a91d914 2634 insert_if_then_before_iter (as_a <gcond *> (g), iter,
2635 /*then_more_likely_p=*/true,
2636 &then_bb, &fallthrough_bb);
ff326078 2637 /* Note that fallthrough_bb starts with the statement that was
2638 pointed to by ITER. */
2639
2640 /* The 'then block' of the 'if (len != 0) condition is where
2641 we'll generate the asan instrumentation code now. */
2642 gsi = gsi_last_bb (then_bb);
2643 }
2644
2645 /* Get an iterator on the point where we can add the condition
2646 statement for the instrumentation. */
2647 basic_block then_bb, else_bb;
2648 gsi = create_cond_insert_point (&gsi, /*before_p*/false,
2649 /*then_more_likely_p=*/false,
f4d482a6 2650 /*create_then_fallthru_edge*/recover_p,
ff326078 2651 &then_bb,
2652 &else_bb);
2653
e9cf809e 2654 g = gimple_build_assign (make_ssa_name (pointer_sized_int_node),
2655 NOP_EXPR, base);
ff326078 2656 gimple_set_location (g, loc);
2657 gsi_insert_before (&gsi, g, GSI_NEW_STMT);
2658 tree base_addr = gimple_assign_lhs (g);
2659
2660 tree t = NULL_TREE;
2661 if (real_size_in_bytes >= 8)
2662 {
2663 tree shadow = build_shadow_mem_access (&gsi, loc, base_addr,
2664 shadow_ptr_type);
2665 t = shadow;
2666 }
2667 else
2668 {
2669 /* Slow path for 1, 2 and 4 byte accesses. */
f9acf11a 2670 /* Test (shadow != 0)
2671 & ((base_addr & 7) + (real_size_in_bytes - 1)) >= shadow). */
2672 tree shadow = build_shadow_mem_access (&gsi, loc, base_addr,
2673 shadow_ptr_type);
2674 gimple shadow_test = build_assign (NE_EXPR, shadow, 0);
2675 gimple_seq seq = NULL;
2676 gimple_seq_add_stmt (&seq, shadow_test);
2677 /* Aligned (>= 8 bytes) can test just
2678 (real_size_in_bytes - 1 >= shadow), as base_addr & 7 is known
2679 to be 0. */
2680 if (align < 8)
ff326078 2681 {
f9acf11a 2682 gimple_seq_add_stmt (&seq, build_assign (BIT_AND_EXPR,
2683 base_addr, 7));
2684 gimple_seq_add_stmt (&seq,
2685 build_type_cast (shadow_type,
2686 gimple_seq_last (seq)));
2687 if (real_size_in_bytes > 1)
2688 gimple_seq_add_stmt (&seq,
2689 build_assign (PLUS_EXPR,
2690 gimple_seq_last (seq),
2691 real_size_in_bytes - 1));
2692 t = gimple_assign_lhs (gimple_seq_last_stmt (seq));
ff326078 2693 }
f9acf11a 2694 else
2695 t = build_int_cst (shadow_type, real_size_in_bytes - 1);
2696 gimple_seq_add_stmt (&seq, build_assign (GE_EXPR, t, shadow));
2697 gimple_seq_add_stmt (&seq, build_assign (BIT_AND_EXPR, shadow_test,
2698 gimple_seq_last (seq)));
2699 t = gimple_assign_lhs (gimple_seq_last (seq));
2700 gimple_seq_set_location (seq, loc);
2701 gsi_insert_seq_after (&gsi, seq, GSI_CONTINUE_LINKING);
ff326078 2702
2703 /* For non-constant, misaligned or otherwise weird access sizes,
f9acf11a 2704 check first and last byte. */
2705 if (size_in_bytes == -1)
ff326078 2706 {
e9cf809e 2707 g = gimple_build_assign (make_ssa_name (pointer_sized_int_node),
2708 MINUS_EXPR, len,
2709 build_int_cst (pointer_sized_int_node, 1));
ff326078 2710 gimple_set_location (g, loc);
2711 gsi_insert_after (&gsi, g, GSI_NEW_STMT);
2712 tree last = gimple_assign_lhs (g);
e9cf809e 2713 g = gimple_build_assign (make_ssa_name (pointer_sized_int_node),
2714 PLUS_EXPR, base_addr, last);
ff326078 2715 gimple_set_location (g, loc);
2716 gsi_insert_after (&gsi, g, GSI_NEW_STMT);
2717 tree base_end_addr = gimple_assign_lhs (g);
2718
2719 tree shadow = build_shadow_mem_access (&gsi, loc, base_end_addr,
2720 shadow_ptr_type);
2721 gimple shadow_test = build_assign (NE_EXPR, shadow, 0);
2722 gimple_seq seq = NULL;
2723 gimple_seq_add_stmt (&seq, shadow_test);
2724 gimple_seq_add_stmt (&seq, build_assign (BIT_AND_EXPR,
2725 base_end_addr, 7));
2726 gimple_seq_add_stmt (&seq, build_type_cast (shadow_type,
2727 gimple_seq_last (seq)));
2728 gimple_seq_add_stmt (&seq, build_assign (GE_EXPR,
2729 gimple_seq_last (seq),
2730 shadow));
2731 gimple_seq_add_stmt (&seq, build_assign (BIT_AND_EXPR, shadow_test,
2732 gimple_seq_last (seq)));
f9acf11a 2733 gimple_seq_add_stmt (&seq, build_assign (BIT_IOR_EXPR, t,
2734 gimple_seq_last (seq)));
ff326078 2735 t = gimple_assign_lhs (gimple_seq_last (seq));
2736 gimple_seq_set_location (seq, loc);
2737 gsi_insert_seq_after (&gsi, seq, GSI_CONTINUE_LINKING);
2738 }
2739 }
2740
2741 g = gimple_build_cond (NE_EXPR, t, build_int_cst (TREE_TYPE (t), 0),
2742 NULL_TREE, NULL_TREE);
2743 gimple_set_location (g, loc);
2744 gsi_insert_after (&gsi, g, GSI_NEW_STMT);
2745
2746 /* Generate call to the run-time library (e.g. __asan_report_load8). */
2747 gsi = gsi_start_bb (then_bb);
2748 int nargs;
f4d482a6 2749 tree fun = report_error_func (is_store, recover_p, size_in_bytes, &nargs);
ff326078 2750 g = gimple_build_call (fun, nargs, base_addr, len);
2751 gimple_set_location (g, loc);
2752 gsi_insert_after (&gsi, g, GSI_NEW_STMT);
2753
2754 gsi_remove (iter, true);
2755 *iter = gsi_start_bb (else_bb);
2756
2757 return true;
2758}
2759
b92cccf4 2760/* Instrument the current function. */
2761
2762static unsigned int
2763asan_instrument (void)
2764{
5d5c682b 2765 if (shadow_ptr_types[0] == NULL_TREE)
55b58027 2766 asan_init_shadow_ptr_types ();
b92cccf4 2767 transform_statements ();
b92cccf4 2768 return 0;
2769}
2770
2771static bool
2772gate_asan (void)
2773{
9e46467d 2774 return (flag_sanitize & SANITIZE_ADDRESS) != 0
a9196da9 2775 && !lookup_attribute ("no_sanitize_address",
d413ffdd 2776 DECL_ATTRIBUTES (current_function_decl));
b92cccf4 2777}
2778
cbe8bda8 2779namespace {
2780
2781const pass_data pass_data_asan =
b92cccf4 2782{
cbe8bda8 2783 GIMPLE_PASS, /* type */
2784 "asan", /* name */
2785 OPTGROUP_NONE, /* optinfo_flags */
cbe8bda8 2786 TV_NONE, /* tv_id */
2787 ( PROP_ssa | PROP_cfg | PROP_gimple_leh ), /* properties_required */
2788 0, /* properties_provided */
2789 0, /* properties_destroyed */
2790 0, /* todo_flags_start */
8b88439e 2791 TODO_update_ssa, /* todo_flags_finish */
b92cccf4 2792};
5d5c682b 2793
cbe8bda8 2794class pass_asan : public gimple_opt_pass
2795{
2796public:
9af5ce0c 2797 pass_asan (gcc::context *ctxt)
2798 : gimple_opt_pass (pass_data_asan, ctxt)
cbe8bda8 2799 {}
2800
2801 /* opt_pass methods: */
ae84f584 2802 opt_pass * clone () { return new pass_asan (m_ctxt); }
31315c24 2803 virtual bool gate (function *) { return gate_asan (); }
65b0537f 2804 virtual unsigned int execute (function *) { return asan_instrument (); }
cbe8bda8 2805
2806}; // class pass_asan
2807
2808} // anon namespace
2809
2810gimple_opt_pass *
2811make_pass_asan (gcc::context *ctxt)
2812{
2813 return new pass_asan (ctxt);
2814}
2815
cbe8bda8 2816namespace {
2817
2818const pass_data pass_data_asan_O0 =
8293ee35 2819{
cbe8bda8 2820 GIMPLE_PASS, /* type */
2821 "asan0", /* name */
2822 OPTGROUP_NONE, /* optinfo_flags */
cbe8bda8 2823 TV_NONE, /* tv_id */
2824 ( PROP_ssa | PROP_cfg | PROP_gimple_leh ), /* properties_required */
2825 0, /* properties_provided */
2826 0, /* properties_destroyed */
2827 0, /* todo_flags_start */
8b88439e 2828 TODO_update_ssa, /* todo_flags_finish */
8293ee35 2829};
2830
cbe8bda8 2831class pass_asan_O0 : public gimple_opt_pass
2832{
2833public:
9af5ce0c 2834 pass_asan_O0 (gcc::context *ctxt)
2835 : gimple_opt_pass (pass_data_asan_O0, ctxt)
cbe8bda8 2836 {}
2837
2838 /* opt_pass methods: */
31315c24 2839 virtual bool gate (function *) { return !optimize && gate_asan (); }
65b0537f 2840 virtual unsigned int execute (function *) { return asan_instrument (); }
cbe8bda8 2841
2842}; // class pass_asan_O0
2843
2844} // anon namespace
2845
2846gimple_opt_pass *
2847make_pass_asan_O0 (gcc::context *ctxt)
2848{
2849 return new pass_asan_O0 (ctxt);
2850}
2851
5d5c682b 2852#include "gt-asan.h"