]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/asan.c
tree-core.h: Include symtab.h.
[thirdparty/gcc.git] / gcc / asan.c
1 /* AddressSanitizer, a fast memory error detector.
2 Copyright (C) 2012-2015 Free Software Foundation, Inc.
3 Contributed by Kostya Serebryany <kcc@google.com>
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
10 version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "alias.h"
26 #include "backend.h"
27 #include "tree.h"
28 #include "gimple.h"
29 #include "rtl.h"
30 #include "options.h"
31 #include "fold-const.h"
32 #include "cfganal.h"
33 #include "internal-fn.h"
34 #include "gimplify.h"
35 #include "gimple-iterator.h"
36 #include "calls.h"
37 #include "varasm.h"
38 #include "stor-layout.h"
39 #include "tree-iterator.h"
40 #include "cgraph.h"
41 #include "stringpool.h"
42 #include "tree-ssanames.h"
43 #include "tree-pass.h"
44 #include "asan.h"
45 #include "gimple-pretty-print.h"
46 #include "target.h"
47 #include "flags.h"
48 #include "insn-config.h"
49 #include "expmed.h"
50 #include "dojump.h"
51 #include "explow.h"
52 #include "emit-rtl.h"
53 #include "stmt.h"
54 #include "expr.h"
55 #include "insn-codes.h"
56 #include "optabs.h"
57 #include "output.h"
58 #include "tm_p.h"
59 #include "langhooks.h"
60 #include "alloc-pool.h"
61 #include "cfgloop.h"
62 #include "gimple-builder.h"
63 #include "ubsan.h"
64 #include "params.h"
65 #include "builtins.h"
66 #include "fnmatch.h"
67
68 /* AddressSanitizer finds out-of-bounds and use-after-free bugs
69 with <2x slowdown on average.
70
71 The tool consists of two parts:
72 instrumentation module (this file) and a run-time library.
73 The instrumentation module adds a run-time check before every memory insn.
74 For a 8- or 16- byte load accessing address X:
75 ShadowAddr = (X >> 3) + Offset
76 ShadowValue = *(char*)ShadowAddr; // *(short*) for 16-byte access.
77 if (ShadowValue)
78 __asan_report_load8(X);
79 For a load of N bytes (N=1, 2 or 4) from address X:
80 ShadowAddr = (X >> 3) + Offset
81 ShadowValue = *(char*)ShadowAddr;
82 if (ShadowValue)
83 if ((X & 7) + N - 1 > ShadowValue)
84 __asan_report_loadN(X);
85 Stores are instrumented similarly, but using __asan_report_storeN functions.
86 A call too __asan_init_vN() is inserted to the list of module CTORs.
87 N is the version number of the AddressSanitizer API. The changes between the
88 API versions are listed in libsanitizer/asan/asan_interface_internal.h.
89
90 The run-time library redefines malloc (so that redzone are inserted around
91 the allocated memory) and free (so that reuse of free-ed memory is delayed),
92 provides __asan_report* and __asan_init_vN functions.
93
94 Read more:
95 http://code.google.com/p/address-sanitizer/wiki/AddressSanitizerAlgorithm
96
97 The current implementation supports detection of out-of-bounds and
98 use-after-free in the heap, on the stack and for global variables.
99
100 [Protection of stack variables]
101
102 To understand how detection of out-of-bounds and use-after-free works
103 for stack variables, lets look at this example on x86_64 where the
104 stack grows downward:
105
106 int
107 foo ()
108 {
109 char a[23] = {0};
110 int b[2] = {0};
111
112 a[5] = 1;
113 b[1] = 2;
114
115 return a[5] + b[1];
116 }
117
118 For this function, the stack protected by asan will be organized as
119 follows, from the top of the stack to the bottom:
120
121 Slot 1/ [red zone of 32 bytes called 'RIGHT RedZone']
122
123 Slot 2/ [8 bytes of red zone, that adds up to the space of 'a' to make
124 the next slot be 32 bytes aligned; this one is called Partial
125 Redzone; this 32 bytes alignment is an asan constraint]
126
127 Slot 3/ [24 bytes for variable 'a']
128
129 Slot 4/ [red zone of 32 bytes called 'Middle RedZone']
130
131 Slot 5/ [24 bytes of Partial Red Zone (similar to slot 2]
132
133 Slot 6/ [8 bytes for variable 'b']
134
135 Slot 7/ [32 bytes of Red Zone at the bottom of the stack, called
136 'LEFT RedZone']
137
138 The 32 bytes of LEFT red zone at the bottom of the stack can be
139 decomposed as such:
140
141 1/ The first 8 bytes contain a magical asan number that is always
142 0x41B58AB3.
143
144 2/ The following 8 bytes contains a pointer to a string (to be
145 parsed at runtime by the runtime asan library), which format is
146 the following:
147
148 "<function-name> <space> <num-of-variables-on-the-stack>
149 (<32-bytes-aligned-offset-in-bytes-of-variable> <space>
150 <length-of-var-in-bytes> ){n} "
151
152 where '(...){n}' means the content inside the parenthesis occurs 'n'
153 times, with 'n' being the number of variables on the stack.
154
155 3/ The following 8 bytes contain the PC of the current function which
156 will be used by the run-time library to print an error message.
157
158 4/ The following 8 bytes are reserved for internal use by the run-time.
159
160 The shadow memory for that stack layout is going to look like this:
161
162 - content of shadow memory 8 bytes for slot 7: 0xF1F1F1F1.
163 The F1 byte pattern is a magic number called
164 ASAN_STACK_MAGIC_LEFT and is a way for the runtime to know that
165 the memory for that shadow byte is part of a the LEFT red zone
166 intended to seat at the bottom of the variables on the stack.
167
168 - content of shadow memory 8 bytes for slots 6 and 5:
169 0xF4F4F400. The F4 byte pattern is a magic number
170 called ASAN_STACK_MAGIC_PARTIAL. It flags the fact that the
171 memory region for this shadow byte is a PARTIAL red zone
172 intended to pad a variable A, so that the slot following
173 {A,padding} is 32 bytes aligned.
174
175 Note that the fact that the least significant byte of this
176 shadow memory content is 00 means that 8 bytes of its
177 corresponding memory (which corresponds to the memory of
178 variable 'b') is addressable.
179
180 - content of shadow memory 8 bytes for slot 4: 0xF2F2F2F2.
181 The F2 byte pattern is a magic number called
182 ASAN_STACK_MAGIC_MIDDLE. It flags the fact that the memory
183 region for this shadow byte is a MIDDLE red zone intended to
184 seat between two 32 aligned slots of {variable,padding}.
185
186 - content of shadow memory 8 bytes for slot 3 and 2:
187 0xF4000000. This represents is the concatenation of
188 variable 'a' and the partial red zone following it, like what we
189 had for variable 'b'. The least significant 3 bytes being 00
190 means that the 3 bytes of variable 'a' are addressable.
191
192 - content of shadow memory 8 bytes for slot 1: 0xF3F3F3F3.
193 The F3 byte pattern is a magic number called
194 ASAN_STACK_MAGIC_RIGHT. It flags the fact that the memory
195 region for this shadow byte is a RIGHT red zone intended to seat
196 at the top of the variables of the stack.
197
198 Note that the real variable layout is done in expand_used_vars in
199 cfgexpand.c. As far as Address Sanitizer is concerned, it lays out
200 stack variables as well as the different red zones, emits some
201 prologue code to populate the shadow memory as to poison (mark as
202 non-accessible) the regions of the red zones and mark the regions of
203 stack variables as accessible, and emit some epilogue code to
204 un-poison (mark as accessible) the regions of red zones right before
205 the function exits.
206
207 [Protection of global variables]
208
209 The basic idea is to insert a red zone between two global variables
210 and install a constructor function that calls the asan runtime to do
211 the populating of the relevant shadow memory regions at load time.
212
213 So the global variables are laid out as to insert a red zone between
214 them. The size of the red zones is so that each variable starts on a
215 32 bytes boundary.
216
217 Then a constructor function is installed so that, for each global
218 variable, it calls the runtime asan library function
219 __asan_register_globals_with an instance of this type:
220
221 struct __asan_global
222 {
223 // Address of the beginning of the global variable.
224 const void *__beg;
225
226 // Initial size of the global variable.
227 uptr __size;
228
229 // Size of the global variable + size of the red zone. This
230 // size is 32 bytes aligned.
231 uptr __size_with_redzone;
232
233 // Name of the global variable.
234 const void *__name;
235
236 // Name of the module where the global variable is declared.
237 const void *__module_name;
238
239 // 1 if it has dynamic initialization, 0 otherwise.
240 uptr __has_dynamic_init;
241
242 // A pointer to struct that contains source location, could be NULL.
243 __asan_global_source_location *__location;
244 }
245
246 A destructor function that calls the runtime asan library function
247 _asan_unregister_globals is also installed. */
248
249 static unsigned HOST_WIDE_INT asan_shadow_offset_value;
250 static bool asan_shadow_offset_computed;
251 static vec<char *> sanitized_sections;
252
253 /* Sets shadow offset to value in string VAL. */
254
255 bool
256 set_asan_shadow_offset (const char *val)
257 {
258 char *endp;
259
260 errno = 0;
261 #ifdef HAVE_LONG_LONG
262 asan_shadow_offset_value = strtoull (val, &endp, 0);
263 #else
264 asan_shadow_offset_value = strtoul (val, &endp, 0);
265 #endif
266 if (!(*val != '\0' && *endp == '\0' && errno == 0))
267 return false;
268
269 asan_shadow_offset_computed = true;
270
271 return true;
272 }
273
274 /* Set list of user-defined sections that need to be sanitized. */
275
276 void
277 set_sanitized_sections (const char *sections)
278 {
279 char *pat;
280 unsigned i;
281 FOR_EACH_VEC_ELT (sanitized_sections, i, pat)
282 free (pat);
283 sanitized_sections.truncate (0);
284
285 for (const char *s = sections; *s; )
286 {
287 const char *end;
288 for (end = s; *end && *end != ','; ++end);
289 size_t len = end - s;
290 sanitized_sections.safe_push (xstrndup (s, len));
291 s = *end ? end + 1 : end;
292 }
293 }
294
295 /* Checks whether section SEC should be sanitized. */
296
297 static bool
298 section_sanitized_p (const char *sec)
299 {
300 char *pat;
301 unsigned i;
302 FOR_EACH_VEC_ELT (sanitized_sections, i, pat)
303 if (fnmatch (pat, sec, FNM_PERIOD) == 0)
304 return true;
305 return false;
306 }
307
308 /* Returns Asan shadow offset. */
309
310 static unsigned HOST_WIDE_INT
311 asan_shadow_offset ()
312 {
313 if (!asan_shadow_offset_computed)
314 {
315 asan_shadow_offset_computed = true;
316 asan_shadow_offset_value = targetm.asan_shadow_offset ();
317 }
318 return asan_shadow_offset_value;
319 }
320
321 alias_set_type asan_shadow_set = -1;
322
323 /* Pointer types to 1 resp. 2 byte integers in shadow memory. A separate
324 alias set is used for all shadow memory accesses. */
325 static GTY(()) tree shadow_ptr_types[2];
326
327 /* Decl for __asan_option_detect_stack_use_after_return. */
328 static GTY(()) tree asan_detect_stack_use_after_return;
329
330 /* Various flags for Asan builtins. */
331 enum asan_check_flags
332 {
333 ASAN_CHECK_STORE = 1 << 0,
334 ASAN_CHECK_SCALAR_ACCESS = 1 << 1,
335 ASAN_CHECK_NON_ZERO_LEN = 1 << 2,
336 ASAN_CHECK_LAST = 1 << 3
337 };
338
339 /* Hashtable support for memory references used by gimple
340 statements. */
341
342 /* This type represents a reference to a memory region. */
343 struct asan_mem_ref
344 {
345 /* The expression of the beginning of the memory region. */
346 tree start;
347
348 /* The size of the access. */
349 HOST_WIDE_INT access_size;
350
351 /* Pool allocation new operator. */
352 inline void *operator new (size_t)
353 {
354 return pool.allocate ();
355 }
356
357 /* Delete operator utilizing pool allocation. */
358 inline void operator delete (void *ptr)
359 {
360 pool.remove ((asan_mem_ref *) ptr);
361 }
362
363 /* Memory allocation pool. */
364 static pool_allocator<asan_mem_ref> pool;
365 };
366
367 pool_allocator<asan_mem_ref> asan_mem_ref::pool ("asan_mem_ref", 10);
368
369 /* Initializes an instance of asan_mem_ref. */
370
371 static void
372 asan_mem_ref_init (asan_mem_ref *ref, tree start, HOST_WIDE_INT access_size)
373 {
374 ref->start = start;
375 ref->access_size = access_size;
376 }
377
378 /* Allocates memory for an instance of asan_mem_ref into the memory
379 pool returned by asan_mem_ref_get_alloc_pool and initialize it.
380 START is the address of (or the expression pointing to) the
381 beginning of memory reference. ACCESS_SIZE is the size of the
382 access to the referenced memory. */
383
384 static asan_mem_ref*
385 asan_mem_ref_new (tree start, HOST_WIDE_INT access_size)
386 {
387 asan_mem_ref *ref = new asan_mem_ref;
388
389 asan_mem_ref_init (ref, start, access_size);
390 return ref;
391 }
392
393 /* This builds and returns a pointer to the end of the memory region
394 that starts at START and of length LEN. */
395
396 tree
397 asan_mem_ref_get_end (tree start, tree len)
398 {
399 if (len == NULL_TREE || integer_zerop (len))
400 return start;
401
402 if (!ptrofftype_p (len))
403 len = convert_to_ptrofftype (len);
404
405 return fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (start), start, len);
406 }
407
408 /* Return a tree expression that represents the end of the referenced
409 memory region. Beware that this function can actually build a new
410 tree expression. */
411
412 tree
413 asan_mem_ref_get_end (const asan_mem_ref *ref, tree len)
414 {
415 return asan_mem_ref_get_end (ref->start, len);
416 }
417
418 struct asan_mem_ref_hasher : nofree_ptr_hash <asan_mem_ref>
419 {
420 static inline hashval_t hash (const asan_mem_ref *);
421 static inline bool equal (const asan_mem_ref *, const asan_mem_ref *);
422 };
423
424 /* Hash a memory reference. */
425
426 inline hashval_t
427 asan_mem_ref_hasher::hash (const asan_mem_ref *mem_ref)
428 {
429 return iterative_hash_expr (mem_ref->start, 0);
430 }
431
432 /* Compare two memory references. We accept the length of either
433 memory references to be NULL_TREE. */
434
435 inline bool
436 asan_mem_ref_hasher::equal (const asan_mem_ref *m1,
437 const asan_mem_ref *m2)
438 {
439 return operand_equal_p (m1->start, m2->start, 0);
440 }
441
442 static hash_table<asan_mem_ref_hasher> *asan_mem_ref_ht;
443
444 /* Returns a reference to the hash table containing memory references.
445 This function ensures that the hash table is created. Note that
446 this hash table is updated by the function
447 update_mem_ref_hash_table. */
448
449 static hash_table<asan_mem_ref_hasher> *
450 get_mem_ref_hash_table ()
451 {
452 if (!asan_mem_ref_ht)
453 asan_mem_ref_ht = new hash_table<asan_mem_ref_hasher> (10);
454
455 return asan_mem_ref_ht;
456 }
457
458 /* Clear all entries from the memory references hash table. */
459
460 static void
461 empty_mem_ref_hash_table ()
462 {
463 if (asan_mem_ref_ht)
464 asan_mem_ref_ht->empty ();
465 }
466
467 /* Free the memory references hash table. */
468
469 static void
470 free_mem_ref_resources ()
471 {
472 delete asan_mem_ref_ht;
473 asan_mem_ref_ht = NULL;
474
475 asan_mem_ref::pool.release ();
476 }
477
478 /* Return true iff the memory reference REF has been instrumented. */
479
480 static bool
481 has_mem_ref_been_instrumented (tree ref, HOST_WIDE_INT access_size)
482 {
483 asan_mem_ref r;
484 asan_mem_ref_init (&r, ref, access_size);
485
486 asan_mem_ref *saved_ref = get_mem_ref_hash_table ()->find (&r);
487 return saved_ref && saved_ref->access_size >= access_size;
488 }
489
490 /* Return true iff the memory reference REF has been instrumented. */
491
492 static bool
493 has_mem_ref_been_instrumented (const asan_mem_ref *ref)
494 {
495 return has_mem_ref_been_instrumented (ref->start, ref->access_size);
496 }
497
498 /* Return true iff access to memory region starting at REF and of
499 length LEN has been instrumented. */
500
501 static bool
502 has_mem_ref_been_instrumented (const asan_mem_ref *ref, tree len)
503 {
504 HOST_WIDE_INT size_in_bytes
505 = tree_fits_shwi_p (len) ? tree_to_shwi (len) : -1;
506
507 return size_in_bytes != -1
508 && has_mem_ref_been_instrumented (ref->start, size_in_bytes);
509 }
510
511 /* Set REF to the memory reference present in a gimple assignment
512 ASSIGNMENT. Return true upon successful completion, false
513 otherwise. */
514
515 static bool
516 get_mem_ref_of_assignment (const gassign *assignment,
517 asan_mem_ref *ref,
518 bool *ref_is_store)
519 {
520 gcc_assert (gimple_assign_single_p (assignment));
521
522 if (gimple_store_p (assignment)
523 && !gimple_clobber_p (assignment))
524 {
525 ref->start = gimple_assign_lhs (assignment);
526 *ref_is_store = true;
527 }
528 else if (gimple_assign_load_p (assignment))
529 {
530 ref->start = gimple_assign_rhs1 (assignment);
531 *ref_is_store = false;
532 }
533 else
534 return false;
535
536 ref->access_size = int_size_in_bytes (TREE_TYPE (ref->start));
537 return true;
538 }
539
540 /* Return the memory references contained in a gimple statement
541 representing a builtin call that has to do with memory access. */
542
543 static bool
544 get_mem_refs_of_builtin_call (const gcall *call,
545 asan_mem_ref *src0,
546 tree *src0_len,
547 bool *src0_is_store,
548 asan_mem_ref *src1,
549 tree *src1_len,
550 bool *src1_is_store,
551 asan_mem_ref *dst,
552 tree *dst_len,
553 bool *dst_is_store,
554 bool *dest_is_deref,
555 bool *intercepted_p)
556 {
557 gcc_checking_assert (gimple_call_builtin_p (call, BUILT_IN_NORMAL));
558
559 tree callee = gimple_call_fndecl (call);
560 tree source0 = NULL_TREE, source1 = NULL_TREE,
561 dest = NULL_TREE, len = NULL_TREE;
562 bool is_store = true, got_reference_p = false;
563 HOST_WIDE_INT access_size = 1;
564
565 *intercepted_p = asan_intercepted_p ((DECL_FUNCTION_CODE (callee)));
566
567 switch (DECL_FUNCTION_CODE (callee))
568 {
569 /* (s, s, n) style memops. */
570 case BUILT_IN_BCMP:
571 case BUILT_IN_MEMCMP:
572 source0 = gimple_call_arg (call, 0);
573 source1 = gimple_call_arg (call, 1);
574 len = gimple_call_arg (call, 2);
575 break;
576
577 /* (src, dest, n) style memops. */
578 case BUILT_IN_BCOPY:
579 source0 = gimple_call_arg (call, 0);
580 dest = gimple_call_arg (call, 1);
581 len = gimple_call_arg (call, 2);
582 break;
583
584 /* (dest, src, n) style memops. */
585 case BUILT_IN_MEMCPY:
586 case BUILT_IN_MEMCPY_CHK:
587 case BUILT_IN_MEMMOVE:
588 case BUILT_IN_MEMMOVE_CHK:
589 case BUILT_IN_MEMPCPY:
590 case BUILT_IN_MEMPCPY_CHK:
591 dest = gimple_call_arg (call, 0);
592 source0 = gimple_call_arg (call, 1);
593 len = gimple_call_arg (call, 2);
594 break;
595
596 /* (dest, n) style memops. */
597 case BUILT_IN_BZERO:
598 dest = gimple_call_arg (call, 0);
599 len = gimple_call_arg (call, 1);
600 break;
601
602 /* (dest, x, n) style memops*/
603 case BUILT_IN_MEMSET:
604 case BUILT_IN_MEMSET_CHK:
605 dest = gimple_call_arg (call, 0);
606 len = gimple_call_arg (call, 2);
607 break;
608
609 case BUILT_IN_STRLEN:
610 source0 = gimple_call_arg (call, 0);
611 len = gimple_call_lhs (call);
612 break ;
613
614 /* And now the __atomic* and __sync builtins.
615 These are handled differently from the classical memory memory
616 access builtins above. */
617
618 case BUILT_IN_ATOMIC_LOAD_1:
619 case BUILT_IN_ATOMIC_LOAD_2:
620 case BUILT_IN_ATOMIC_LOAD_4:
621 case BUILT_IN_ATOMIC_LOAD_8:
622 case BUILT_IN_ATOMIC_LOAD_16:
623 is_store = false;
624 /* fall through. */
625
626 case BUILT_IN_SYNC_FETCH_AND_ADD_1:
627 case BUILT_IN_SYNC_FETCH_AND_ADD_2:
628 case BUILT_IN_SYNC_FETCH_AND_ADD_4:
629 case BUILT_IN_SYNC_FETCH_AND_ADD_8:
630 case BUILT_IN_SYNC_FETCH_AND_ADD_16:
631
632 case BUILT_IN_SYNC_FETCH_AND_SUB_1:
633 case BUILT_IN_SYNC_FETCH_AND_SUB_2:
634 case BUILT_IN_SYNC_FETCH_AND_SUB_4:
635 case BUILT_IN_SYNC_FETCH_AND_SUB_8:
636 case BUILT_IN_SYNC_FETCH_AND_SUB_16:
637
638 case BUILT_IN_SYNC_FETCH_AND_OR_1:
639 case BUILT_IN_SYNC_FETCH_AND_OR_2:
640 case BUILT_IN_SYNC_FETCH_AND_OR_4:
641 case BUILT_IN_SYNC_FETCH_AND_OR_8:
642 case BUILT_IN_SYNC_FETCH_AND_OR_16:
643
644 case BUILT_IN_SYNC_FETCH_AND_AND_1:
645 case BUILT_IN_SYNC_FETCH_AND_AND_2:
646 case BUILT_IN_SYNC_FETCH_AND_AND_4:
647 case BUILT_IN_SYNC_FETCH_AND_AND_8:
648 case BUILT_IN_SYNC_FETCH_AND_AND_16:
649
650 case BUILT_IN_SYNC_FETCH_AND_XOR_1:
651 case BUILT_IN_SYNC_FETCH_AND_XOR_2:
652 case BUILT_IN_SYNC_FETCH_AND_XOR_4:
653 case BUILT_IN_SYNC_FETCH_AND_XOR_8:
654 case BUILT_IN_SYNC_FETCH_AND_XOR_16:
655
656 case BUILT_IN_SYNC_FETCH_AND_NAND_1:
657 case BUILT_IN_SYNC_FETCH_AND_NAND_2:
658 case BUILT_IN_SYNC_FETCH_AND_NAND_4:
659 case BUILT_IN_SYNC_FETCH_AND_NAND_8:
660
661 case BUILT_IN_SYNC_ADD_AND_FETCH_1:
662 case BUILT_IN_SYNC_ADD_AND_FETCH_2:
663 case BUILT_IN_SYNC_ADD_AND_FETCH_4:
664 case BUILT_IN_SYNC_ADD_AND_FETCH_8:
665 case BUILT_IN_SYNC_ADD_AND_FETCH_16:
666
667 case BUILT_IN_SYNC_SUB_AND_FETCH_1:
668 case BUILT_IN_SYNC_SUB_AND_FETCH_2:
669 case BUILT_IN_SYNC_SUB_AND_FETCH_4:
670 case BUILT_IN_SYNC_SUB_AND_FETCH_8:
671 case BUILT_IN_SYNC_SUB_AND_FETCH_16:
672
673 case BUILT_IN_SYNC_OR_AND_FETCH_1:
674 case BUILT_IN_SYNC_OR_AND_FETCH_2:
675 case BUILT_IN_SYNC_OR_AND_FETCH_4:
676 case BUILT_IN_SYNC_OR_AND_FETCH_8:
677 case BUILT_IN_SYNC_OR_AND_FETCH_16:
678
679 case BUILT_IN_SYNC_AND_AND_FETCH_1:
680 case BUILT_IN_SYNC_AND_AND_FETCH_2:
681 case BUILT_IN_SYNC_AND_AND_FETCH_4:
682 case BUILT_IN_SYNC_AND_AND_FETCH_8:
683 case BUILT_IN_SYNC_AND_AND_FETCH_16:
684
685 case BUILT_IN_SYNC_XOR_AND_FETCH_1:
686 case BUILT_IN_SYNC_XOR_AND_FETCH_2:
687 case BUILT_IN_SYNC_XOR_AND_FETCH_4:
688 case BUILT_IN_SYNC_XOR_AND_FETCH_8:
689 case BUILT_IN_SYNC_XOR_AND_FETCH_16:
690
691 case BUILT_IN_SYNC_NAND_AND_FETCH_1:
692 case BUILT_IN_SYNC_NAND_AND_FETCH_2:
693 case BUILT_IN_SYNC_NAND_AND_FETCH_4:
694 case BUILT_IN_SYNC_NAND_AND_FETCH_8:
695
696 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1:
697 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_2:
698 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_4:
699 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_8:
700 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_16:
701
702 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1:
703 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_2:
704 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_4:
705 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_8:
706 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_16:
707
708 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_1:
709 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_2:
710 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_4:
711 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_8:
712 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_16:
713
714 case BUILT_IN_SYNC_LOCK_RELEASE_1:
715 case BUILT_IN_SYNC_LOCK_RELEASE_2:
716 case BUILT_IN_SYNC_LOCK_RELEASE_4:
717 case BUILT_IN_SYNC_LOCK_RELEASE_8:
718 case BUILT_IN_SYNC_LOCK_RELEASE_16:
719
720 case BUILT_IN_ATOMIC_EXCHANGE_1:
721 case BUILT_IN_ATOMIC_EXCHANGE_2:
722 case BUILT_IN_ATOMIC_EXCHANGE_4:
723 case BUILT_IN_ATOMIC_EXCHANGE_8:
724 case BUILT_IN_ATOMIC_EXCHANGE_16:
725
726 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1:
727 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_2:
728 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_4:
729 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_8:
730 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_16:
731
732 case BUILT_IN_ATOMIC_STORE_1:
733 case BUILT_IN_ATOMIC_STORE_2:
734 case BUILT_IN_ATOMIC_STORE_4:
735 case BUILT_IN_ATOMIC_STORE_8:
736 case BUILT_IN_ATOMIC_STORE_16:
737
738 case BUILT_IN_ATOMIC_ADD_FETCH_1:
739 case BUILT_IN_ATOMIC_ADD_FETCH_2:
740 case BUILT_IN_ATOMIC_ADD_FETCH_4:
741 case BUILT_IN_ATOMIC_ADD_FETCH_8:
742 case BUILT_IN_ATOMIC_ADD_FETCH_16:
743
744 case BUILT_IN_ATOMIC_SUB_FETCH_1:
745 case BUILT_IN_ATOMIC_SUB_FETCH_2:
746 case BUILT_IN_ATOMIC_SUB_FETCH_4:
747 case BUILT_IN_ATOMIC_SUB_FETCH_8:
748 case BUILT_IN_ATOMIC_SUB_FETCH_16:
749
750 case BUILT_IN_ATOMIC_AND_FETCH_1:
751 case BUILT_IN_ATOMIC_AND_FETCH_2:
752 case BUILT_IN_ATOMIC_AND_FETCH_4:
753 case BUILT_IN_ATOMIC_AND_FETCH_8:
754 case BUILT_IN_ATOMIC_AND_FETCH_16:
755
756 case BUILT_IN_ATOMIC_NAND_FETCH_1:
757 case BUILT_IN_ATOMIC_NAND_FETCH_2:
758 case BUILT_IN_ATOMIC_NAND_FETCH_4:
759 case BUILT_IN_ATOMIC_NAND_FETCH_8:
760 case BUILT_IN_ATOMIC_NAND_FETCH_16:
761
762 case BUILT_IN_ATOMIC_XOR_FETCH_1:
763 case BUILT_IN_ATOMIC_XOR_FETCH_2:
764 case BUILT_IN_ATOMIC_XOR_FETCH_4:
765 case BUILT_IN_ATOMIC_XOR_FETCH_8:
766 case BUILT_IN_ATOMIC_XOR_FETCH_16:
767
768 case BUILT_IN_ATOMIC_OR_FETCH_1:
769 case BUILT_IN_ATOMIC_OR_FETCH_2:
770 case BUILT_IN_ATOMIC_OR_FETCH_4:
771 case BUILT_IN_ATOMIC_OR_FETCH_8:
772 case BUILT_IN_ATOMIC_OR_FETCH_16:
773
774 case BUILT_IN_ATOMIC_FETCH_ADD_1:
775 case BUILT_IN_ATOMIC_FETCH_ADD_2:
776 case BUILT_IN_ATOMIC_FETCH_ADD_4:
777 case BUILT_IN_ATOMIC_FETCH_ADD_8:
778 case BUILT_IN_ATOMIC_FETCH_ADD_16:
779
780 case BUILT_IN_ATOMIC_FETCH_SUB_1:
781 case BUILT_IN_ATOMIC_FETCH_SUB_2:
782 case BUILT_IN_ATOMIC_FETCH_SUB_4:
783 case BUILT_IN_ATOMIC_FETCH_SUB_8:
784 case BUILT_IN_ATOMIC_FETCH_SUB_16:
785
786 case BUILT_IN_ATOMIC_FETCH_AND_1:
787 case BUILT_IN_ATOMIC_FETCH_AND_2:
788 case BUILT_IN_ATOMIC_FETCH_AND_4:
789 case BUILT_IN_ATOMIC_FETCH_AND_8:
790 case BUILT_IN_ATOMIC_FETCH_AND_16:
791
792 case BUILT_IN_ATOMIC_FETCH_NAND_1:
793 case BUILT_IN_ATOMIC_FETCH_NAND_2:
794 case BUILT_IN_ATOMIC_FETCH_NAND_4:
795 case BUILT_IN_ATOMIC_FETCH_NAND_8:
796 case BUILT_IN_ATOMIC_FETCH_NAND_16:
797
798 case BUILT_IN_ATOMIC_FETCH_XOR_1:
799 case BUILT_IN_ATOMIC_FETCH_XOR_2:
800 case BUILT_IN_ATOMIC_FETCH_XOR_4:
801 case BUILT_IN_ATOMIC_FETCH_XOR_8:
802 case BUILT_IN_ATOMIC_FETCH_XOR_16:
803
804 case BUILT_IN_ATOMIC_FETCH_OR_1:
805 case BUILT_IN_ATOMIC_FETCH_OR_2:
806 case BUILT_IN_ATOMIC_FETCH_OR_4:
807 case BUILT_IN_ATOMIC_FETCH_OR_8:
808 case BUILT_IN_ATOMIC_FETCH_OR_16:
809 {
810 dest = gimple_call_arg (call, 0);
811 /* DEST represents the address of a memory location.
812 instrument_derefs wants the memory location, so lets
813 dereference the address DEST before handing it to
814 instrument_derefs. */
815 if (TREE_CODE (dest) == ADDR_EXPR)
816 dest = TREE_OPERAND (dest, 0);
817 else if (TREE_CODE (dest) == SSA_NAME || TREE_CODE (dest) == INTEGER_CST)
818 dest = build2 (MEM_REF, TREE_TYPE (TREE_TYPE (dest)),
819 dest, build_int_cst (TREE_TYPE (dest), 0));
820 else
821 gcc_unreachable ();
822
823 access_size = int_size_in_bytes (TREE_TYPE (dest));
824 }
825
826 default:
827 /* The other builtins memory access are not instrumented in this
828 function because they either don't have any length parameter,
829 or their length parameter is just a limit. */
830 break;
831 }
832
833 if (len != NULL_TREE)
834 {
835 if (source0 != NULL_TREE)
836 {
837 src0->start = source0;
838 src0->access_size = access_size;
839 *src0_len = len;
840 *src0_is_store = false;
841 }
842
843 if (source1 != NULL_TREE)
844 {
845 src1->start = source1;
846 src1->access_size = access_size;
847 *src1_len = len;
848 *src1_is_store = false;
849 }
850
851 if (dest != NULL_TREE)
852 {
853 dst->start = dest;
854 dst->access_size = access_size;
855 *dst_len = len;
856 *dst_is_store = true;
857 }
858
859 got_reference_p = true;
860 }
861 else if (dest)
862 {
863 dst->start = dest;
864 dst->access_size = access_size;
865 *dst_len = NULL_TREE;
866 *dst_is_store = is_store;
867 *dest_is_deref = true;
868 got_reference_p = true;
869 }
870
871 return got_reference_p;
872 }
873
874 /* Return true iff a given gimple statement has been instrumented.
875 Note that the statement is "defined" by the memory references it
876 contains. */
877
878 static bool
879 has_stmt_been_instrumented_p (gimple stmt)
880 {
881 if (gimple_assign_single_p (stmt))
882 {
883 bool r_is_store;
884 asan_mem_ref r;
885 asan_mem_ref_init (&r, NULL, 1);
886
887 if (get_mem_ref_of_assignment (as_a <gassign *> (stmt), &r,
888 &r_is_store))
889 return has_mem_ref_been_instrumented (&r);
890 }
891 else if (gimple_call_builtin_p (stmt, BUILT_IN_NORMAL))
892 {
893 asan_mem_ref src0, src1, dest;
894 asan_mem_ref_init (&src0, NULL, 1);
895 asan_mem_ref_init (&src1, NULL, 1);
896 asan_mem_ref_init (&dest, NULL, 1);
897
898 tree src0_len = NULL_TREE, src1_len = NULL_TREE, dest_len = NULL_TREE;
899 bool src0_is_store = false, src1_is_store = false,
900 dest_is_store = false, dest_is_deref = false, intercepted_p = true;
901 if (get_mem_refs_of_builtin_call (as_a <gcall *> (stmt),
902 &src0, &src0_len, &src0_is_store,
903 &src1, &src1_len, &src1_is_store,
904 &dest, &dest_len, &dest_is_store,
905 &dest_is_deref, &intercepted_p))
906 {
907 if (src0.start != NULL_TREE
908 && !has_mem_ref_been_instrumented (&src0, src0_len))
909 return false;
910
911 if (src1.start != NULL_TREE
912 && !has_mem_ref_been_instrumented (&src1, src1_len))
913 return false;
914
915 if (dest.start != NULL_TREE
916 && !has_mem_ref_been_instrumented (&dest, dest_len))
917 return false;
918
919 return true;
920 }
921 }
922 return false;
923 }
924
925 /* Insert a memory reference into the hash table. */
926
927 static void
928 update_mem_ref_hash_table (tree ref, HOST_WIDE_INT access_size)
929 {
930 hash_table<asan_mem_ref_hasher> *ht = get_mem_ref_hash_table ();
931
932 asan_mem_ref r;
933 asan_mem_ref_init (&r, ref, access_size);
934
935 asan_mem_ref **slot = ht->find_slot (&r, INSERT);
936 if (*slot == NULL || (*slot)->access_size < access_size)
937 *slot = asan_mem_ref_new (ref, access_size);
938 }
939
940 /* Initialize shadow_ptr_types array. */
941
942 static void
943 asan_init_shadow_ptr_types (void)
944 {
945 asan_shadow_set = new_alias_set ();
946 shadow_ptr_types[0] = build_distinct_type_copy (signed_char_type_node);
947 TYPE_ALIAS_SET (shadow_ptr_types[0]) = asan_shadow_set;
948 shadow_ptr_types[0] = build_pointer_type (shadow_ptr_types[0]);
949 shadow_ptr_types[1] = build_distinct_type_copy (short_integer_type_node);
950 TYPE_ALIAS_SET (shadow_ptr_types[1]) = asan_shadow_set;
951 shadow_ptr_types[1] = build_pointer_type (shadow_ptr_types[1]);
952 initialize_sanitizer_builtins ();
953 }
954
955 /* Create ADDR_EXPR of STRING_CST with the PP pretty printer text. */
956
957 static tree
958 asan_pp_string (pretty_printer *pp)
959 {
960 const char *buf = pp_formatted_text (pp);
961 size_t len = strlen (buf);
962 tree ret = build_string (len + 1, buf);
963 TREE_TYPE (ret)
964 = build_array_type (TREE_TYPE (shadow_ptr_types[0]),
965 build_index_type (size_int (len)));
966 TREE_READONLY (ret) = 1;
967 TREE_STATIC (ret) = 1;
968 return build1 (ADDR_EXPR, shadow_ptr_types[0], ret);
969 }
970
971 /* Return a CONST_INT representing 4 subsequent shadow memory bytes. */
972
973 static rtx
974 asan_shadow_cst (unsigned char shadow_bytes[4])
975 {
976 int i;
977 unsigned HOST_WIDE_INT val = 0;
978 gcc_assert (WORDS_BIG_ENDIAN == BYTES_BIG_ENDIAN);
979 for (i = 0; i < 4; i++)
980 val |= (unsigned HOST_WIDE_INT) shadow_bytes[BYTES_BIG_ENDIAN ? 3 - i : i]
981 << (BITS_PER_UNIT * i);
982 return gen_int_mode (val, SImode);
983 }
984
985 /* Clear shadow memory at SHADOW_MEM, LEN bytes. Can't call a library call here
986 though. */
987
988 static void
989 asan_clear_shadow (rtx shadow_mem, HOST_WIDE_INT len)
990 {
991 rtx_insn *insn, *insns, *jump;
992 rtx_code_label *top_label;
993 rtx end, addr, tmp;
994
995 start_sequence ();
996 clear_storage (shadow_mem, GEN_INT (len), BLOCK_OP_NORMAL);
997 insns = get_insns ();
998 end_sequence ();
999 for (insn = insns; insn; insn = NEXT_INSN (insn))
1000 if (CALL_P (insn))
1001 break;
1002 if (insn == NULL_RTX)
1003 {
1004 emit_insn (insns);
1005 return;
1006 }
1007
1008 gcc_assert ((len & 3) == 0);
1009 top_label = gen_label_rtx ();
1010 addr = copy_to_mode_reg (Pmode, XEXP (shadow_mem, 0));
1011 shadow_mem = adjust_automodify_address (shadow_mem, SImode, addr, 0);
1012 end = force_reg (Pmode, plus_constant (Pmode, addr, len));
1013 emit_label (top_label);
1014
1015 emit_move_insn (shadow_mem, const0_rtx);
1016 tmp = expand_simple_binop (Pmode, PLUS, addr, gen_int_mode (4, Pmode), addr,
1017 true, OPTAB_LIB_WIDEN);
1018 if (tmp != addr)
1019 emit_move_insn (addr, tmp);
1020 emit_cmp_and_jump_insns (addr, end, LT, NULL_RTX, Pmode, true, top_label);
1021 jump = get_last_insn ();
1022 gcc_assert (JUMP_P (jump));
1023 add_int_reg_note (jump, REG_BR_PROB, REG_BR_PROB_BASE * 80 / 100);
1024 }
1025
1026 void
1027 asan_function_start (void)
1028 {
1029 section *fnsec = function_section (current_function_decl);
1030 switch_to_section (fnsec);
1031 ASM_OUTPUT_DEBUG_LABEL (asm_out_file, "LASANPC",
1032 current_function_funcdef_no);
1033 }
1034
1035 /* Insert code to protect stack vars. The prologue sequence should be emitted
1036 directly, epilogue sequence returned. BASE is the register holding the
1037 stack base, against which OFFSETS array offsets are relative to, OFFSETS
1038 array contains pairs of offsets in reverse order, always the end offset
1039 of some gap that needs protection followed by starting offset,
1040 and DECLS is an array of representative decls for each var partition.
1041 LENGTH is the length of the OFFSETS array, DECLS array is LENGTH / 2 - 1
1042 elements long (OFFSETS include gap before the first variable as well
1043 as gaps after each stack variable). PBASE is, if non-NULL, some pseudo
1044 register which stack vars DECL_RTLs are based on. Either BASE should be
1045 assigned to PBASE, when not doing use after return protection, or
1046 corresponding address based on __asan_stack_malloc* return value. */
1047
1048 rtx_insn *
1049 asan_emit_stack_protection (rtx base, rtx pbase, unsigned int alignb,
1050 HOST_WIDE_INT *offsets, tree *decls, int length)
1051 {
1052 rtx shadow_base, shadow_mem, ret, mem, orig_base;
1053 rtx_code_label *lab;
1054 rtx_insn *insns;
1055 char buf[30];
1056 unsigned char shadow_bytes[4];
1057 HOST_WIDE_INT base_offset = offsets[length - 1];
1058 HOST_WIDE_INT base_align_bias = 0, offset, prev_offset;
1059 HOST_WIDE_INT asan_frame_size = offsets[0] - base_offset;
1060 HOST_WIDE_INT last_offset, last_size;
1061 int l;
1062 unsigned char cur_shadow_byte = ASAN_STACK_MAGIC_LEFT;
1063 tree str_cst, decl, id;
1064 int use_after_return_class = -1;
1065
1066 if (shadow_ptr_types[0] == NULL_TREE)
1067 asan_init_shadow_ptr_types ();
1068
1069 /* First of all, prepare the description string. */
1070 pretty_printer asan_pp;
1071
1072 pp_decimal_int (&asan_pp, length / 2 - 1);
1073 pp_space (&asan_pp);
1074 for (l = length - 2; l; l -= 2)
1075 {
1076 tree decl = decls[l / 2 - 1];
1077 pp_wide_integer (&asan_pp, offsets[l] - base_offset);
1078 pp_space (&asan_pp);
1079 pp_wide_integer (&asan_pp, offsets[l - 1] - offsets[l]);
1080 pp_space (&asan_pp);
1081 if (DECL_P (decl) && DECL_NAME (decl))
1082 {
1083 pp_decimal_int (&asan_pp, IDENTIFIER_LENGTH (DECL_NAME (decl)));
1084 pp_space (&asan_pp);
1085 pp_tree_identifier (&asan_pp, DECL_NAME (decl));
1086 }
1087 else
1088 pp_string (&asan_pp, "9 <unknown>");
1089 pp_space (&asan_pp);
1090 }
1091 str_cst = asan_pp_string (&asan_pp);
1092
1093 /* Emit the prologue sequence. */
1094 if (asan_frame_size > 32 && asan_frame_size <= 65536 && pbase
1095 && ASAN_USE_AFTER_RETURN)
1096 {
1097 use_after_return_class = floor_log2 (asan_frame_size - 1) - 5;
1098 /* __asan_stack_malloc_N guarantees alignment
1099 N < 6 ? (64 << N) : 4096 bytes. */
1100 if (alignb > (use_after_return_class < 6
1101 ? (64U << use_after_return_class) : 4096U))
1102 use_after_return_class = -1;
1103 else if (alignb > ASAN_RED_ZONE_SIZE && (asan_frame_size & (alignb - 1)))
1104 base_align_bias = ((asan_frame_size + alignb - 1)
1105 & ~(alignb - HOST_WIDE_INT_1)) - asan_frame_size;
1106 }
1107 /* Align base if target is STRICT_ALIGNMENT. */
1108 if (STRICT_ALIGNMENT)
1109 base = expand_binop (Pmode, and_optab, base,
1110 gen_int_mode (-((GET_MODE_ALIGNMENT (SImode)
1111 << ASAN_SHADOW_SHIFT)
1112 / BITS_PER_UNIT), Pmode), NULL_RTX,
1113 1, OPTAB_DIRECT);
1114
1115 if (use_after_return_class == -1 && pbase)
1116 emit_move_insn (pbase, base);
1117
1118 base = expand_binop (Pmode, add_optab, base,
1119 gen_int_mode (base_offset - base_align_bias, Pmode),
1120 NULL_RTX, 1, OPTAB_DIRECT);
1121 orig_base = NULL_RTX;
1122 if (use_after_return_class != -1)
1123 {
1124 if (asan_detect_stack_use_after_return == NULL_TREE)
1125 {
1126 id = get_identifier ("__asan_option_detect_stack_use_after_return");
1127 decl = build_decl (BUILTINS_LOCATION, VAR_DECL, id,
1128 integer_type_node);
1129 SET_DECL_ASSEMBLER_NAME (decl, id);
1130 TREE_ADDRESSABLE (decl) = 1;
1131 DECL_ARTIFICIAL (decl) = 1;
1132 DECL_IGNORED_P (decl) = 1;
1133 DECL_EXTERNAL (decl) = 1;
1134 TREE_STATIC (decl) = 1;
1135 TREE_PUBLIC (decl) = 1;
1136 TREE_USED (decl) = 1;
1137 asan_detect_stack_use_after_return = decl;
1138 }
1139 orig_base = gen_reg_rtx (Pmode);
1140 emit_move_insn (orig_base, base);
1141 ret = expand_normal (asan_detect_stack_use_after_return);
1142 lab = gen_label_rtx ();
1143 int very_likely = REG_BR_PROB_BASE - (REG_BR_PROB_BASE / 2000 - 1);
1144 emit_cmp_and_jump_insns (ret, const0_rtx, EQ, NULL_RTX,
1145 VOIDmode, 0, lab, very_likely);
1146 snprintf (buf, sizeof buf, "__asan_stack_malloc_%d",
1147 use_after_return_class);
1148 ret = init_one_libfunc (buf);
1149 rtx addr = convert_memory_address (ptr_mode, base);
1150 ret = emit_library_call_value (ret, NULL_RTX, LCT_NORMAL, ptr_mode, 2,
1151 GEN_INT (asan_frame_size
1152 + base_align_bias),
1153 TYPE_MODE (pointer_sized_int_node),
1154 addr, ptr_mode);
1155 ret = convert_memory_address (Pmode, ret);
1156 emit_move_insn (base, ret);
1157 emit_label (lab);
1158 emit_move_insn (pbase, expand_binop (Pmode, add_optab, base,
1159 gen_int_mode (base_align_bias
1160 - base_offset, Pmode),
1161 NULL_RTX, 1, OPTAB_DIRECT));
1162 }
1163 mem = gen_rtx_MEM (ptr_mode, base);
1164 mem = adjust_address (mem, VOIDmode, base_align_bias);
1165 emit_move_insn (mem, gen_int_mode (ASAN_STACK_FRAME_MAGIC, ptr_mode));
1166 mem = adjust_address (mem, VOIDmode, GET_MODE_SIZE (ptr_mode));
1167 emit_move_insn (mem, expand_normal (str_cst));
1168 mem = adjust_address (mem, VOIDmode, GET_MODE_SIZE (ptr_mode));
1169 ASM_GENERATE_INTERNAL_LABEL (buf, "LASANPC", current_function_funcdef_no);
1170 id = get_identifier (buf);
1171 decl = build_decl (DECL_SOURCE_LOCATION (current_function_decl),
1172 VAR_DECL, id, char_type_node);
1173 SET_DECL_ASSEMBLER_NAME (decl, id);
1174 TREE_ADDRESSABLE (decl) = 1;
1175 TREE_READONLY (decl) = 1;
1176 DECL_ARTIFICIAL (decl) = 1;
1177 DECL_IGNORED_P (decl) = 1;
1178 TREE_STATIC (decl) = 1;
1179 TREE_PUBLIC (decl) = 0;
1180 TREE_USED (decl) = 1;
1181 DECL_INITIAL (decl) = decl;
1182 TREE_ASM_WRITTEN (decl) = 1;
1183 TREE_ASM_WRITTEN (id) = 1;
1184 emit_move_insn (mem, expand_normal (build_fold_addr_expr (decl)));
1185 shadow_base = expand_binop (Pmode, lshr_optab, base,
1186 GEN_INT (ASAN_SHADOW_SHIFT),
1187 NULL_RTX, 1, OPTAB_DIRECT);
1188 shadow_base
1189 = plus_constant (Pmode, shadow_base,
1190 asan_shadow_offset ()
1191 + (base_align_bias >> ASAN_SHADOW_SHIFT));
1192 gcc_assert (asan_shadow_set != -1
1193 && (ASAN_RED_ZONE_SIZE >> ASAN_SHADOW_SHIFT) == 4);
1194 shadow_mem = gen_rtx_MEM (SImode, shadow_base);
1195 set_mem_alias_set (shadow_mem, asan_shadow_set);
1196 if (STRICT_ALIGNMENT)
1197 set_mem_align (shadow_mem, (GET_MODE_ALIGNMENT (SImode)));
1198 prev_offset = base_offset;
1199 for (l = length; l; l -= 2)
1200 {
1201 if (l == 2)
1202 cur_shadow_byte = ASAN_STACK_MAGIC_RIGHT;
1203 offset = offsets[l - 1];
1204 if ((offset - base_offset) & (ASAN_RED_ZONE_SIZE - 1))
1205 {
1206 int i;
1207 HOST_WIDE_INT aoff
1208 = base_offset + ((offset - base_offset)
1209 & ~(ASAN_RED_ZONE_SIZE - HOST_WIDE_INT_1));
1210 shadow_mem = adjust_address (shadow_mem, VOIDmode,
1211 (aoff - prev_offset)
1212 >> ASAN_SHADOW_SHIFT);
1213 prev_offset = aoff;
1214 for (i = 0; i < 4; i++, aoff += (1 << ASAN_SHADOW_SHIFT))
1215 if (aoff < offset)
1216 {
1217 if (aoff < offset - (1 << ASAN_SHADOW_SHIFT) + 1)
1218 shadow_bytes[i] = 0;
1219 else
1220 shadow_bytes[i] = offset - aoff;
1221 }
1222 else
1223 shadow_bytes[i] = ASAN_STACK_MAGIC_PARTIAL;
1224 emit_move_insn (shadow_mem, asan_shadow_cst (shadow_bytes));
1225 offset = aoff;
1226 }
1227 while (offset <= offsets[l - 2] - ASAN_RED_ZONE_SIZE)
1228 {
1229 shadow_mem = adjust_address (shadow_mem, VOIDmode,
1230 (offset - prev_offset)
1231 >> ASAN_SHADOW_SHIFT);
1232 prev_offset = offset;
1233 memset (shadow_bytes, cur_shadow_byte, 4);
1234 emit_move_insn (shadow_mem, asan_shadow_cst (shadow_bytes));
1235 offset += ASAN_RED_ZONE_SIZE;
1236 }
1237 cur_shadow_byte = ASAN_STACK_MAGIC_MIDDLE;
1238 }
1239 do_pending_stack_adjust ();
1240
1241 /* Construct epilogue sequence. */
1242 start_sequence ();
1243
1244 lab = NULL;
1245 if (use_after_return_class != -1)
1246 {
1247 rtx_code_label *lab2 = gen_label_rtx ();
1248 char c = (char) ASAN_STACK_MAGIC_USE_AFTER_RET;
1249 int very_likely = REG_BR_PROB_BASE - (REG_BR_PROB_BASE / 2000 - 1);
1250 emit_cmp_and_jump_insns (orig_base, base, EQ, NULL_RTX,
1251 VOIDmode, 0, lab2, very_likely);
1252 shadow_mem = gen_rtx_MEM (BLKmode, shadow_base);
1253 set_mem_alias_set (shadow_mem, asan_shadow_set);
1254 mem = gen_rtx_MEM (ptr_mode, base);
1255 mem = adjust_address (mem, VOIDmode, base_align_bias);
1256 emit_move_insn (mem, gen_int_mode (ASAN_STACK_RETIRED_MAGIC, ptr_mode));
1257 unsigned HOST_WIDE_INT sz = asan_frame_size >> ASAN_SHADOW_SHIFT;
1258 if (use_after_return_class < 5
1259 && can_store_by_pieces (sz, builtin_memset_read_str, &c,
1260 BITS_PER_UNIT, true))
1261 store_by_pieces (shadow_mem, sz, builtin_memset_read_str, &c,
1262 BITS_PER_UNIT, true, 0);
1263 else if (use_after_return_class >= 5
1264 || !set_storage_via_setmem (shadow_mem,
1265 GEN_INT (sz),
1266 gen_int_mode (c, QImode),
1267 BITS_PER_UNIT, BITS_PER_UNIT,
1268 -1, sz, sz, sz))
1269 {
1270 snprintf (buf, sizeof buf, "__asan_stack_free_%d",
1271 use_after_return_class);
1272 ret = init_one_libfunc (buf);
1273 rtx addr = convert_memory_address (ptr_mode, base);
1274 rtx orig_addr = convert_memory_address (ptr_mode, orig_base);
1275 emit_library_call (ret, LCT_NORMAL, ptr_mode, 3, addr, ptr_mode,
1276 GEN_INT (asan_frame_size + base_align_bias),
1277 TYPE_MODE (pointer_sized_int_node),
1278 orig_addr, ptr_mode);
1279 }
1280 lab = gen_label_rtx ();
1281 emit_jump (lab);
1282 emit_label (lab2);
1283 }
1284
1285 shadow_mem = gen_rtx_MEM (BLKmode, shadow_base);
1286 set_mem_alias_set (shadow_mem, asan_shadow_set);
1287
1288 if (STRICT_ALIGNMENT)
1289 set_mem_align (shadow_mem, (GET_MODE_ALIGNMENT (SImode)));
1290
1291 prev_offset = base_offset;
1292 last_offset = base_offset;
1293 last_size = 0;
1294 for (l = length; l; l -= 2)
1295 {
1296 offset = base_offset + ((offsets[l - 1] - base_offset)
1297 & ~(ASAN_RED_ZONE_SIZE - HOST_WIDE_INT_1));
1298 if (last_offset + last_size != offset)
1299 {
1300 shadow_mem = adjust_address (shadow_mem, VOIDmode,
1301 (last_offset - prev_offset)
1302 >> ASAN_SHADOW_SHIFT);
1303 prev_offset = last_offset;
1304 asan_clear_shadow (shadow_mem, last_size >> ASAN_SHADOW_SHIFT);
1305 last_offset = offset;
1306 last_size = 0;
1307 }
1308 last_size += base_offset + ((offsets[l - 2] - base_offset)
1309 & ~(ASAN_RED_ZONE_SIZE - HOST_WIDE_INT_1))
1310 - offset;
1311 }
1312 if (last_size)
1313 {
1314 shadow_mem = adjust_address (shadow_mem, VOIDmode,
1315 (last_offset - prev_offset)
1316 >> ASAN_SHADOW_SHIFT);
1317 asan_clear_shadow (shadow_mem, last_size >> ASAN_SHADOW_SHIFT);
1318 }
1319
1320 do_pending_stack_adjust ();
1321 if (lab)
1322 emit_label (lab);
1323
1324 insns = get_insns ();
1325 end_sequence ();
1326 return insns;
1327 }
1328
1329 /* Return true if DECL, a global var, might be overridden and needs
1330 therefore a local alias. */
1331
1332 static bool
1333 asan_needs_local_alias (tree decl)
1334 {
1335 return DECL_WEAK (decl) || !targetm.binds_local_p (decl);
1336 }
1337
1338 /* Return true if DECL is a VAR_DECL that should be protected
1339 by Address Sanitizer, by appending a red zone with protected
1340 shadow memory after it and aligning it to at least
1341 ASAN_RED_ZONE_SIZE bytes. */
1342
1343 bool
1344 asan_protect_global (tree decl)
1345 {
1346 if (!ASAN_GLOBALS)
1347 return false;
1348
1349 rtx rtl, symbol;
1350
1351 if (TREE_CODE (decl) == STRING_CST)
1352 {
1353 /* Instrument all STRING_CSTs except those created
1354 by asan_pp_string here. */
1355 if (shadow_ptr_types[0] != NULL_TREE
1356 && TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE
1357 && TREE_TYPE (TREE_TYPE (decl)) == TREE_TYPE (shadow_ptr_types[0]))
1358 return false;
1359 return true;
1360 }
1361 if (TREE_CODE (decl) != VAR_DECL
1362 /* TLS vars aren't statically protectable. */
1363 || DECL_THREAD_LOCAL_P (decl)
1364 /* Externs will be protected elsewhere. */
1365 || DECL_EXTERNAL (decl)
1366 || !DECL_RTL_SET_P (decl)
1367 /* Comdat vars pose an ABI problem, we can't know if
1368 the var that is selected by the linker will have
1369 padding or not. */
1370 || DECL_ONE_ONLY (decl)
1371 /* Similarly for common vars. People can use -fno-common.
1372 Note: Linux kernel is built with -fno-common, so we do instrument
1373 globals there even if it is C. */
1374 || (DECL_COMMON (decl) && TREE_PUBLIC (decl))
1375 /* Don't protect if using user section, often vars placed
1376 into user section from multiple TUs are then assumed
1377 to be an array of such vars, putting padding in there
1378 breaks this assumption. */
1379 || (DECL_SECTION_NAME (decl) != NULL
1380 && !symtab_node::get (decl)->implicit_section
1381 && !section_sanitized_p (DECL_SECTION_NAME (decl)))
1382 || DECL_SIZE (decl) == 0
1383 || ASAN_RED_ZONE_SIZE * BITS_PER_UNIT > MAX_OFILE_ALIGNMENT
1384 || !valid_constant_size_p (DECL_SIZE_UNIT (decl))
1385 || DECL_ALIGN_UNIT (decl) > 2 * ASAN_RED_ZONE_SIZE
1386 || TREE_TYPE (decl) == ubsan_get_source_location_type ())
1387 return false;
1388
1389 rtl = DECL_RTL (decl);
1390 if (!MEM_P (rtl) || GET_CODE (XEXP (rtl, 0)) != SYMBOL_REF)
1391 return false;
1392 symbol = XEXP (rtl, 0);
1393
1394 if (CONSTANT_POOL_ADDRESS_P (symbol)
1395 || TREE_CONSTANT_POOL_ADDRESS_P (symbol))
1396 return false;
1397
1398 if (lookup_attribute ("weakref", DECL_ATTRIBUTES (decl)))
1399 return false;
1400
1401 #ifndef ASM_OUTPUT_DEF
1402 if (asan_needs_local_alias (decl))
1403 return false;
1404 #endif
1405
1406 return true;
1407 }
1408
1409 /* Construct a function tree for __asan_report_{load,store}{1,2,4,8,16,_n}.
1410 IS_STORE is either 1 (for a store) or 0 (for a load). */
1411
1412 static tree
1413 report_error_func (bool is_store, bool recover_p, HOST_WIDE_INT size_in_bytes,
1414 int *nargs)
1415 {
1416 static enum built_in_function report[2][2][6]
1417 = { { { BUILT_IN_ASAN_REPORT_LOAD1, BUILT_IN_ASAN_REPORT_LOAD2,
1418 BUILT_IN_ASAN_REPORT_LOAD4, BUILT_IN_ASAN_REPORT_LOAD8,
1419 BUILT_IN_ASAN_REPORT_LOAD16, BUILT_IN_ASAN_REPORT_LOAD_N },
1420 { BUILT_IN_ASAN_REPORT_STORE1, BUILT_IN_ASAN_REPORT_STORE2,
1421 BUILT_IN_ASAN_REPORT_STORE4, BUILT_IN_ASAN_REPORT_STORE8,
1422 BUILT_IN_ASAN_REPORT_STORE16, BUILT_IN_ASAN_REPORT_STORE_N } },
1423 { { BUILT_IN_ASAN_REPORT_LOAD1_NOABORT,
1424 BUILT_IN_ASAN_REPORT_LOAD2_NOABORT,
1425 BUILT_IN_ASAN_REPORT_LOAD4_NOABORT,
1426 BUILT_IN_ASAN_REPORT_LOAD8_NOABORT,
1427 BUILT_IN_ASAN_REPORT_LOAD16_NOABORT,
1428 BUILT_IN_ASAN_REPORT_LOAD_N_NOABORT },
1429 { BUILT_IN_ASAN_REPORT_STORE1_NOABORT,
1430 BUILT_IN_ASAN_REPORT_STORE2_NOABORT,
1431 BUILT_IN_ASAN_REPORT_STORE4_NOABORT,
1432 BUILT_IN_ASAN_REPORT_STORE8_NOABORT,
1433 BUILT_IN_ASAN_REPORT_STORE16_NOABORT,
1434 BUILT_IN_ASAN_REPORT_STORE_N_NOABORT } } };
1435 if (size_in_bytes == -1)
1436 {
1437 *nargs = 2;
1438 return builtin_decl_implicit (report[recover_p][is_store][5]);
1439 }
1440 *nargs = 1;
1441 int size_log2 = exact_log2 (size_in_bytes);
1442 return builtin_decl_implicit (report[recover_p][is_store][size_log2]);
1443 }
1444
1445 /* Construct a function tree for __asan_{load,store}{1,2,4,8,16,_n}.
1446 IS_STORE is either 1 (for a store) or 0 (for a load). */
1447
1448 static tree
1449 check_func (bool is_store, bool recover_p, HOST_WIDE_INT size_in_bytes,
1450 int *nargs)
1451 {
1452 static enum built_in_function check[2][2][6]
1453 = { { { BUILT_IN_ASAN_LOAD1, BUILT_IN_ASAN_LOAD2,
1454 BUILT_IN_ASAN_LOAD4, BUILT_IN_ASAN_LOAD8,
1455 BUILT_IN_ASAN_LOAD16, BUILT_IN_ASAN_LOADN },
1456 { BUILT_IN_ASAN_STORE1, BUILT_IN_ASAN_STORE2,
1457 BUILT_IN_ASAN_STORE4, BUILT_IN_ASAN_STORE8,
1458 BUILT_IN_ASAN_STORE16, BUILT_IN_ASAN_STOREN } },
1459 { { BUILT_IN_ASAN_LOAD1_NOABORT,
1460 BUILT_IN_ASAN_LOAD2_NOABORT,
1461 BUILT_IN_ASAN_LOAD4_NOABORT,
1462 BUILT_IN_ASAN_LOAD8_NOABORT,
1463 BUILT_IN_ASAN_LOAD16_NOABORT,
1464 BUILT_IN_ASAN_LOADN_NOABORT },
1465 { BUILT_IN_ASAN_STORE1_NOABORT,
1466 BUILT_IN_ASAN_STORE2_NOABORT,
1467 BUILT_IN_ASAN_STORE4_NOABORT,
1468 BUILT_IN_ASAN_STORE8_NOABORT,
1469 BUILT_IN_ASAN_STORE16_NOABORT,
1470 BUILT_IN_ASAN_STOREN_NOABORT } } };
1471 if (size_in_bytes == -1)
1472 {
1473 *nargs = 2;
1474 return builtin_decl_implicit (check[recover_p][is_store][5]);
1475 }
1476 *nargs = 1;
1477 int size_log2 = exact_log2 (size_in_bytes);
1478 return builtin_decl_implicit (check[recover_p][is_store][size_log2]);
1479 }
1480
1481 /* Split the current basic block and create a condition statement
1482 insertion point right before or after the statement pointed to by
1483 ITER. Return an iterator to the point at which the caller might
1484 safely insert the condition statement.
1485
1486 THEN_BLOCK must be set to the address of an uninitialized instance
1487 of basic_block. The function will then set *THEN_BLOCK to the
1488 'then block' of the condition statement to be inserted by the
1489 caller.
1490
1491 If CREATE_THEN_FALLTHRU_EDGE is false, no edge will be created from
1492 *THEN_BLOCK to *FALLTHROUGH_BLOCK.
1493
1494 Similarly, the function will set *FALLTRHOUGH_BLOCK to the 'else
1495 block' of the condition statement to be inserted by the caller.
1496
1497 Note that *FALLTHROUGH_BLOCK is a new block that contains the
1498 statements starting from *ITER, and *THEN_BLOCK is a new empty
1499 block.
1500
1501 *ITER is adjusted to point to always point to the first statement
1502 of the basic block * FALLTHROUGH_BLOCK. That statement is the
1503 same as what ITER was pointing to prior to calling this function,
1504 if BEFORE_P is true; otherwise, it is its following statement. */
1505
1506 gimple_stmt_iterator
1507 create_cond_insert_point (gimple_stmt_iterator *iter,
1508 bool before_p,
1509 bool then_more_likely_p,
1510 bool create_then_fallthru_edge,
1511 basic_block *then_block,
1512 basic_block *fallthrough_block)
1513 {
1514 gimple_stmt_iterator gsi = *iter;
1515
1516 if (!gsi_end_p (gsi) && before_p)
1517 gsi_prev (&gsi);
1518
1519 basic_block cur_bb = gsi_bb (*iter);
1520
1521 edge e = split_block (cur_bb, gsi_stmt (gsi));
1522
1523 /* Get a hold on the 'condition block', the 'then block' and the
1524 'else block'. */
1525 basic_block cond_bb = e->src;
1526 basic_block fallthru_bb = e->dest;
1527 basic_block then_bb = create_empty_bb (cond_bb);
1528 if (current_loops)
1529 {
1530 add_bb_to_loop (then_bb, cond_bb->loop_father);
1531 loops_state_set (LOOPS_NEED_FIXUP);
1532 }
1533
1534 /* Set up the newly created 'then block'. */
1535 e = make_edge (cond_bb, then_bb, EDGE_TRUE_VALUE);
1536 int fallthrough_probability
1537 = then_more_likely_p
1538 ? PROB_VERY_UNLIKELY
1539 : PROB_ALWAYS - PROB_VERY_UNLIKELY;
1540 e->probability = PROB_ALWAYS - fallthrough_probability;
1541 if (create_then_fallthru_edge)
1542 make_single_succ_edge (then_bb, fallthru_bb, EDGE_FALLTHRU);
1543
1544 /* Set up the fallthrough basic block. */
1545 e = find_edge (cond_bb, fallthru_bb);
1546 e->flags = EDGE_FALSE_VALUE;
1547 e->count = cond_bb->count;
1548 e->probability = fallthrough_probability;
1549
1550 /* Update dominance info for the newly created then_bb; note that
1551 fallthru_bb's dominance info has already been updated by
1552 split_bock. */
1553 if (dom_info_available_p (CDI_DOMINATORS))
1554 set_immediate_dominator (CDI_DOMINATORS, then_bb, cond_bb);
1555
1556 *then_block = then_bb;
1557 *fallthrough_block = fallthru_bb;
1558 *iter = gsi_start_bb (fallthru_bb);
1559
1560 return gsi_last_bb (cond_bb);
1561 }
1562
1563 /* Insert an if condition followed by a 'then block' right before the
1564 statement pointed to by ITER. The fallthrough block -- which is the
1565 else block of the condition as well as the destination of the
1566 outcoming edge of the 'then block' -- starts with the statement
1567 pointed to by ITER.
1568
1569 COND is the condition of the if.
1570
1571 If THEN_MORE_LIKELY_P is true, the probability of the edge to the
1572 'then block' is higher than the probability of the edge to the
1573 fallthrough block.
1574
1575 Upon completion of the function, *THEN_BB is set to the newly
1576 inserted 'then block' and similarly, *FALLTHROUGH_BB is set to the
1577 fallthrough block.
1578
1579 *ITER is adjusted to still point to the same statement it was
1580 pointing to initially. */
1581
1582 static void
1583 insert_if_then_before_iter (gcond *cond,
1584 gimple_stmt_iterator *iter,
1585 bool then_more_likely_p,
1586 basic_block *then_bb,
1587 basic_block *fallthrough_bb)
1588 {
1589 gimple_stmt_iterator cond_insert_point =
1590 create_cond_insert_point (iter,
1591 /*before_p=*/true,
1592 then_more_likely_p,
1593 /*create_then_fallthru_edge=*/true,
1594 then_bb,
1595 fallthrough_bb);
1596 gsi_insert_after (&cond_insert_point, cond, GSI_NEW_STMT);
1597 }
1598
1599 /* Build
1600 (base_addr >> ASAN_SHADOW_SHIFT) + asan_shadow_offset (). */
1601
1602 static tree
1603 build_shadow_mem_access (gimple_stmt_iterator *gsi, location_t location,
1604 tree base_addr, tree shadow_ptr_type)
1605 {
1606 tree t, uintptr_type = TREE_TYPE (base_addr);
1607 tree shadow_type = TREE_TYPE (shadow_ptr_type);
1608 gimple g;
1609
1610 t = build_int_cst (uintptr_type, ASAN_SHADOW_SHIFT);
1611 g = gimple_build_assign (make_ssa_name (uintptr_type), RSHIFT_EXPR,
1612 base_addr, t);
1613 gimple_set_location (g, location);
1614 gsi_insert_after (gsi, g, GSI_NEW_STMT);
1615
1616 t = build_int_cst (uintptr_type, asan_shadow_offset ());
1617 g = gimple_build_assign (make_ssa_name (uintptr_type), PLUS_EXPR,
1618 gimple_assign_lhs (g), t);
1619 gimple_set_location (g, location);
1620 gsi_insert_after (gsi, g, GSI_NEW_STMT);
1621
1622 g = gimple_build_assign (make_ssa_name (shadow_ptr_type), NOP_EXPR,
1623 gimple_assign_lhs (g));
1624 gimple_set_location (g, location);
1625 gsi_insert_after (gsi, g, GSI_NEW_STMT);
1626
1627 t = build2 (MEM_REF, shadow_type, gimple_assign_lhs (g),
1628 build_int_cst (shadow_ptr_type, 0));
1629 g = gimple_build_assign (make_ssa_name (shadow_type), MEM_REF, t);
1630 gimple_set_location (g, location);
1631 gsi_insert_after (gsi, g, GSI_NEW_STMT);
1632 return gimple_assign_lhs (g);
1633 }
1634
1635 /* BASE can already be an SSA_NAME; in that case, do not create a
1636 new SSA_NAME for it. */
1637
1638 static tree
1639 maybe_create_ssa_name (location_t loc, tree base, gimple_stmt_iterator *iter,
1640 bool before_p)
1641 {
1642 if (TREE_CODE (base) == SSA_NAME)
1643 return base;
1644 gimple g = gimple_build_assign (make_ssa_name (TREE_TYPE (base)),
1645 TREE_CODE (base), base);
1646 gimple_set_location (g, loc);
1647 if (before_p)
1648 gsi_insert_before (iter, g, GSI_SAME_STMT);
1649 else
1650 gsi_insert_after (iter, g, GSI_NEW_STMT);
1651 return gimple_assign_lhs (g);
1652 }
1653
1654 /* LEN can already have necessary size and precision;
1655 in that case, do not create a new variable. */
1656
1657 tree
1658 maybe_cast_to_ptrmode (location_t loc, tree len, gimple_stmt_iterator *iter,
1659 bool before_p)
1660 {
1661 if (ptrofftype_p (len))
1662 return len;
1663 gimple g = gimple_build_assign (make_ssa_name (pointer_sized_int_node),
1664 NOP_EXPR, len);
1665 gimple_set_location (g, loc);
1666 if (before_p)
1667 gsi_insert_before (iter, g, GSI_SAME_STMT);
1668 else
1669 gsi_insert_after (iter, g, GSI_NEW_STMT);
1670 return gimple_assign_lhs (g);
1671 }
1672
1673 /* Instrument the memory access instruction BASE. Insert new
1674 statements before or after ITER.
1675
1676 Note that the memory access represented by BASE can be either an
1677 SSA_NAME, or a non-SSA expression. LOCATION is the source code
1678 location. IS_STORE is TRUE for a store, FALSE for a load.
1679 BEFORE_P is TRUE for inserting the instrumentation code before
1680 ITER, FALSE for inserting it after ITER. IS_SCALAR_ACCESS is TRUE
1681 for a scalar memory access and FALSE for memory region access.
1682 NON_ZERO_P is TRUE if memory region is guaranteed to have non-zero
1683 length. ALIGN tells alignment of accessed memory object.
1684
1685 START_INSTRUMENTED and END_INSTRUMENTED are TRUE if start/end of
1686 memory region have already been instrumented.
1687
1688 If BEFORE_P is TRUE, *ITER is arranged to still point to the
1689 statement it was pointing to prior to calling this function,
1690 otherwise, it points to the statement logically following it. */
1691
1692 static void
1693 build_check_stmt (location_t loc, tree base, tree len,
1694 HOST_WIDE_INT size_in_bytes, gimple_stmt_iterator *iter,
1695 bool is_non_zero_len, bool before_p, bool is_store,
1696 bool is_scalar_access, unsigned int align = 0)
1697 {
1698 gimple_stmt_iterator gsi = *iter;
1699 gimple g;
1700
1701 gcc_assert (!(size_in_bytes > 0 && !is_non_zero_len));
1702
1703 gsi = *iter;
1704
1705 base = unshare_expr (base);
1706 base = maybe_create_ssa_name (loc, base, &gsi, before_p);
1707
1708 if (len)
1709 {
1710 len = unshare_expr (len);
1711 len = maybe_cast_to_ptrmode (loc, len, iter, before_p);
1712 }
1713 else
1714 {
1715 gcc_assert (size_in_bytes != -1);
1716 len = build_int_cst (pointer_sized_int_node, size_in_bytes);
1717 }
1718
1719 if (size_in_bytes > 1)
1720 {
1721 if ((size_in_bytes & (size_in_bytes - 1)) != 0
1722 || size_in_bytes > 16)
1723 is_scalar_access = false;
1724 else if (align && align < size_in_bytes * BITS_PER_UNIT)
1725 {
1726 /* On non-strict alignment targets, if
1727 16-byte access is just 8-byte aligned,
1728 this will result in misaligned shadow
1729 memory 2 byte load, but otherwise can
1730 be handled using one read. */
1731 if (size_in_bytes != 16
1732 || STRICT_ALIGNMENT
1733 || align < 8 * BITS_PER_UNIT)
1734 is_scalar_access = false;
1735 }
1736 }
1737
1738 HOST_WIDE_INT flags = 0;
1739 if (is_store)
1740 flags |= ASAN_CHECK_STORE;
1741 if (is_non_zero_len)
1742 flags |= ASAN_CHECK_NON_ZERO_LEN;
1743 if (is_scalar_access)
1744 flags |= ASAN_CHECK_SCALAR_ACCESS;
1745
1746 g = gimple_build_call_internal (IFN_ASAN_CHECK, 4,
1747 build_int_cst (integer_type_node, flags),
1748 base, len,
1749 build_int_cst (integer_type_node,
1750 align / BITS_PER_UNIT));
1751 gimple_set_location (g, loc);
1752 if (before_p)
1753 gsi_insert_before (&gsi, g, GSI_SAME_STMT);
1754 else
1755 {
1756 gsi_insert_after (&gsi, g, GSI_NEW_STMT);
1757 gsi_next (&gsi);
1758 *iter = gsi;
1759 }
1760 }
1761
1762 /* If T represents a memory access, add instrumentation code before ITER.
1763 LOCATION is source code location.
1764 IS_STORE is either TRUE (for a store) or FALSE (for a load). */
1765
1766 static void
1767 instrument_derefs (gimple_stmt_iterator *iter, tree t,
1768 location_t location, bool is_store)
1769 {
1770 if (is_store && !ASAN_INSTRUMENT_WRITES)
1771 return;
1772 if (!is_store && !ASAN_INSTRUMENT_READS)
1773 return;
1774
1775 tree type, base;
1776 HOST_WIDE_INT size_in_bytes;
1777
1778 type = TREE_TYPE (t);
1779 switch (TREE_CODE (t))
1780 {
1781 case ARRAY_REF:
1782 case COMPONENT_REF:
1783 case INDIRECT_REF:
1784 case MEM_REF:
1785 case VAR_DECL:
1786 case BIT_FIELD_REF:
1787 break;
1788 /* FALLTHRU */
1789 default:
1790 return;
1791 }
1792
1793 size_in_bytes = int_size_in_bytes (type);
1794 if (size_in_bytes <= 0)
1795 return;
1796
1797 HOST_WIDE_INT bitsize, bitpos;
1798 tree offset;
1799 machine_mode mode;
1800 int volatilep = 0, unsignedp = 0;
1801 tree inner = get_inner_reference (t, &bitsize, &bitpos, &offset,
1802 &mode, &unsignedp, &volatilep, false);
1803
1804 if (TREE_CODE (t) == COMPONENT_REF
1805 && DECL_BIT_FIELD_REPRESENTATIVE (TREE_OPERAND (t, 1)) != NULL_TREE)
1806 {
1807 tree repr = DECL_BIT_FIELD_REPRESENTATIVE (TREE_OPERAND (t, 1));
1808 instrument_derefs (iter, build3 (COMPONENT_REF, TREE_TYPE (repr),
1809 TREE_OPERAND (t, 0), repr,
1810 NULL_TREE), location, is_store);
1811 return;
1812 }
1813
1814 if (bitpos % BITS_PER_UNIT
1815 || bitsize != size_in_bytes * BITS_PER_UNIT)
1816 return;
1817
1818 if (TREE_CODE (inner) == VAR_DECL
1819 && offset == NULL_TREE
1820 && bitpos >= 0
1821 && DECL_SIZE (inner)
1822 && tree_fits_shwi_p (DECL_SIZE (inner))
1823 && bitpos + bitsize <= tree_to_shwi (DECL_SIZE (inner)))
1824 {
1825 if (DECL_THREAD_LOCAL_P (inner))
1826 return;
1827 if (!ASAN_GLOBALS && is_global_var (inner))
1828 return;
1829 if (!TREE_STATIC (inner))
1830 {
1831 /* Automatic vars in the current function will be always
1832 accessible. */
1833 if (decl_function_context (inner) == current_function_decl)
1834 return;
1835 }
1836 /* Always instrument external vars, they might be dynamically
1837 initialized. */
1838 else if (!DECL_EXTERNAL (inner))
1839 {
1840 /* For static vars if they are known not to be dynamically
1841 initialized, they will be always accessible. */
1842 varpool_node *vnode = varpool_node::get (inner);
1843 if (vnode && !vnode->dynamically_initialized)
1844 return;
1845 }
1846 }
1847
1848 base = build_fold_addr_expr (t);
1849 if (!has_mem_ref_been_instrumented (base, size_in_bytes))
1850 {
1851 unsigned int align = get_object_alignment (t);
1852 build_check_stmt (location, base, NULL_TREE, size_in_bytes, iter,
1853 /*is_non_zero_len*/size_in_bytes > 0, /*before_p=*/true,
1854 is_store, /*is_scalar_access*/true, align);
1855 update_mem_ref_hash_table (base, size_in_bytes);
1856 update_mem_ref_hash_table (t, size_in_bytes);
1857 }
1858
1859 }
1860
1861 /* Insert a memory reference into the hash table if access length
1862 can be determined in compile time. */
1863
1864 static void
1865 maybe_update_mem_ref_hash_table (tree base, tree len)
1866 {
1867 if (!POINTER_TYPE_P (TREE_TYPE (base))
1868 || !INTEGRAL_TYPE_P (TREE_TYPE (len)))
1869 return;
1870
1871 HOST_WIDE_INT size_in_bytes = tree_fits_shwi_p (len) ? tree_to_shwi (len) : -1;
1872
1873 if (size_in_bytes != -1)
1874 update_mem_ref_hash_table (base, size_in_bytes);
1875 }
1876
1877 /* Instrument an access to a contiguous memory region that starts at
1878 the address pointed to by BASE, over a length of LEN (expressed in
1879 the sizeof (*BASE) bytes). ITER points to the instruction before
1880 which the instrumentation instructions must be inserted. LOCATION
1881 is the source location that the instrumentation instructions must
1882 have. If IS_STORE is true, then the memory access is a store;
1883 otherwise, it's a load. */
1884
1885 static void
1886 instrument_mem_region_access (tree base, tree len,
1887 gimple_stmt_iterator *iter,
1888 location_t location, bool is_store)
1889 {
1890 if (!POINTER_TYPE_P (TREE_TYPE (base))
1891 || !INTEGRAL_TYPE_P (TREE_TYPE (len))
1892 || integer_zerop (len))
1893 return;
1894
1895 HOST_WIDE_INT size_in_bytes = tree_fits_shwi_p (len) ? tree_to_shwi (len) : -1;
1896
1897 if ((size_in_bytes == -1)
1898 || !has_mem_ref_been_instrumented (base, size_in_bytes))
1899 {
1900 build_check_stmt (location, base, len, size_in_bytes, iter,
1901 /*is_non_zero_len*/size_in_bytes > 0, /*before_p*/true,
1902 is_store, /*is_scalar_access*/false, /*align*/0);
1903 }
1904
1905 maybe_update_mem_ref_hash_table (base, len);
1906 *iter = gsi_for_stmt (gsi_stmt (*iter));
1907 }
1908
1909 /* Instrument the call to a built-in memory access function that is
1910 pointed to by the iterator ITER.
1911
1912 Upon completion, return TRUE iff *ITER has been advanced to the
1913 statement following the one it was originally pointing to. */
1914
1915 static bool
1916 instrument_builtin_call (gimple_stmt_iterator *iter)
1917 {
1918 if (!ASAN_MEMINTRIN)
1919 return false;
1920
1921 bool iter_advanced_p = false;
1922 gcall *call = as_a <gcall *> (gsi_stmt (*iter));
1923
1924 gcc_checking_assert (gimple_call_builtin_p (call, BUILT_IN_NORMAL));
1925
1926 location_t loc = gimple_location (call);
1927
1928 asan_mem_ref src0, src1, dest;
1929 asan_mem_ref_init (&src0, NULL, 1);
1930 asan_mem_ref_init (&src1, NULL, 1);
1931 asan_mem_ref_init (&dest, NULL, 1);
1932
1933 tree src0_len = NULL_TREE, src1_len = NULL_TREE, dest_len = NULL_TREE;
1934 bool src0_is_store = false, src1_is_store = false, dest_is_store = false,
1935 dest_is_deref = false, intercepted_p = true;
1936
1937 if (get_mem_refs_of_builtin_call (call,
1938 &src0, &src0_len, &src0_is_store,
1939 &src1, &src1_len, &src1_is_store,
1940 &dest, &dest_len, &dest_is_store,
1941 &dest_is_deref, &intercepted_p))
1942 {
1943 if (dest_is_deref)
1944 {
1945 instrument_derefs (iter, dest.start, loc, dest_is_store);
1946 gsi_next (iter);
1947 iter_advanced_p = true;
1948 }
1949 else if (!intercepted_p
1950 && (src0_len || src1_len || dest_len))
1951 {
1952 if (src0.start != NULL_TREE)
1953 instrument_mem_region_access (src0.start, src0_len,
1954 iter, loc, /*is_store=*/false);
1955 if (src1.start != NULL_TREE)
1956 instrument_mem_region_access (src1.start, src1_len,
1957 iter, loc, /*is_store=*/false);
1958 if (dest.start != NULL_TREE)
1959 instrument_mem_region_access (dest.start, dest_len,
1960 iter, loc, /*is_store=*/true);
1961
1962 *iter = gsi_for_stmt (call);
1963 gsi_next (iter);
1964 iter_advanced_p = true;
1965 }
1966 else
1967 {
1968 if (src0.start != NULL_TREE)
1969 maybe_update_mem_ref_hash_table (src0.start, src0_len);
1970 if (src1.start != NULL_TREE)
1971 maybe_update_mem_ref_hash_table (src1.start, src1_len);
1972 if (dest.start != NULL_TREE)
1973 maybe_update_mem_ref_hash_table (dest.start, dest_len);
1974 }
1975 }
1976 return iter_advanced_p;
1977 }
1978
1979 /* Instrument the assignment statement ITER if it is subject to
1980 instrumentation. Return TRUE iff instrumentation actually
1981 happened. In that case, the iterator ITER is advanced to the next
1982 logical expression following the one initially pointed to by ITER,
1983 and the relevant memory reference that which access has been
1984 instrumented is added to the memory references hash table. */
1985
1986 static bool
1987 maybe_instrument_assignment (gimple_stmt_iterator *iter)
1988 {
1989 gimple s = gsi_stmt (*iter);
1990
1991 gcc_assert (gimple_assign_single_p (s));
1992
1993 tree ref_expr = NULL_TREE;
1994 bool is_store, is_instrumented = false;
1995
1996 if (gimple_store_p (s))
1997 {
1998 ref_expr = gimple_assign_lhs (s);
1999 is_store = true;
2000 instrument_derefs (iter, ref_expr,
2001 gimple_location (s),
2002 is_store);
2003 is_instrumented = true;
2004 }
2005
2006 if (gimple_assign_load_p (s))
2007 {
2008 ref_expr = gimple_assign_rhs1 (s);
2009 is_store = false;
2010 instrument_derefs (iter, ref_expr,
2011 gimple_location (s),
2012 is_store);
2013 is_instrumented = true;
2014 }
2015
2016 if (is_instrumented)
2017 gsi_next (iter);
2018
2019 return is_instrumented;
2020 }
2021
2022 /* Instrument the function call pointed to by the iterator ITER, if it
2023 is subject to instrumentation. At the moment, the only function
2024 calls that are instrumented are some built-in functions that access
2025 memory. Look at instrument_builtin_call to learn more.
2026
2027 Upon completion return TRUE iff *ITER was advanced to the statement
2028 following the one it was originally pointing to. */
2029
2030 static bool
2031 maybe_instrument_call (gimple_stmt_iterator *iter)
2032 {
2033 gimple stmt = gsi_stmt (*iter);
2034 bool is_builtin = gimple_call_builtin_p (stmt, BUILT_IN_NORMAL);
2035
2036 if (is_builtin && instrument_builtin_call (iter))
2037 return true;
2038
2039 if (gimple_call_noreturn_p (stmt))
2040 {
2041 if (is_builtin)
2042 {
2043 tree callee = gimple_call_fndecl (stmt);
2044 switch (DECL_FUNCTION_CODE (callee))
2045 {
2046 case BUILT_IN_UNREACHABLE:
2047 case BUILT_IN_TRAP:
2048 /* Don't instrument these. */
2049 return false;
2050 default:
2051 break;
2052 }
2053 }
2054 tree decl = builtin_decl_implicit (BUILT_IN_ASAN_HANDLE_NO_RETURN);
2055 gimple g = gimple_build_call (decl, 0);
2056 gimple_set_location (g, gimple_location (stmt));
2057 gsi_insert_before (iter, g, GSI_SAME_STMT);
2058 }
2059 return false;
2060 }
2061
2062 /* Walk each instruction of all basic block and instrument those that
2063 represent memory references: loads, stores, or function calls.
2064 In a given basic block, this function avoids instrumenting memory
2065 references that have already been instrumented. */
2066
2067 static void
2068 transform_statements (void)
2069 {
2070 basic_block bb, last_bb = NULL;
2071 gimple_stmt_iterator i;
2072 int saved_last_basic_block = last_basic_block_for_fn (cfun);
2073
2074 FOR_EACH_BB_FN (bb, cfun)
2075 {
2076 basic_block prev_bb = bb;
2077
2078 if (bb->index >= saved_last_basic_block) continue;
2079
2080 /* Flush the mem ref hash table, if current bb doesn't have
2081 exactly one predecessor, or if that predecessor (skipping
2082 over asan created basic blocks) isn't the last processed
2083 basic block. Thus we effectively flush on extended basic
2084 block boundaries. */
2085 while (single_pred_p (prev_bb))
2086 {
2087 prev_bb = single_pred (prev_bb);
2088 if (prev_bb->index < saved_last_basic_block)
2089 break;
2090 }
2091 if (prev_bb != last_bb)
2092 empty_mem_ref_hash_table ();
2093 last_bb = bb;
2094
2095 for (i = gsi_start_bb (bb); !gsi_end_p (i);)
2096 {
2097 gimple s = gsi_stmt (i);
2098
2099 if (has_stmt_been_instrumented_p (s))
2100 gsi_next (&i);
2101 else if (gimple_assign_single_p (s)
2102 && !gimple_clobber_p (s)
2103 && maybe_instrument_assignment (&i))
2104 /* Nothing to do as maybe_instrument_assignment advanced
2105 the iterator I. */;
2106 else if (is_gimple_call (s) && maybe_instrument_call (&i))
2107 /* Nothing to do as maybe_instrument_call
2108 advanced the iterator I. */;
2109 else
2110 {
2111 /* No instrumentation happened.
2112
2113 If the current instruction is a function call that
2114 might free something, let's forget about the memory
2115 references that got instrumented. Otherwise we might
2116 miss some instrumentation opportunities. */
2117 if (is_gimple_call (s) && !nonfreeing_call_p (s))
2118 empty_mem_ref_hash_table ();
2119
2120 gsi_next (&i);
2121 }
2122 }
2123 }
2124 free_mem_ref_resources ();
2125 }
2126
2127 /* Build
2128 __asan_before_dynamic_init (module_name)
2129 or
2130 __asan_after_dynamic_init ()
2131 call. */
2132
2133 tree
2134 asan_dynamic_init_call (bool after_p)
2135 {
2136 tree fn = builtin_decl_implicit (after_p
2137 ? BUILT_IN_ASAN_AFTER_DYNAMIC_INIT
2138 : BUILT_IN_ASAN_BEFORE_DYNAMIC_INIT);
2139 tree module_name_cst = NULL_TREE;
2140 if (!after_p)
2141 {
2142 pretty_printer module_name_pp;
2143 pp_string (&module_name_pp, main_input_filename);
2144
2145 if (shadow_ptr_types[0] == NULL_TREE)
2146 asan_init_shadow_ptr_types ();
2147 module_name_cst = asan_pp_string (&module_name_pp);
2148 module_name_cst = fold_convert (const_ptr_type_node,
2149 module_name_cst);
2150 }
2151
2152 return build_call_expr (fn, after_p ? 0 : 1, module_name_cst);
2153 }
2154
2155 /* Build
2156 struct __asan_global
2157 {
2158 const void *__beg;
2159 uptr __size;
2160 uptr __size_with_redzone;
2161 const void *__name;
2162 const void *__module_name;
2163 uptr __has_dynamic_init;
2164 __asan_global_source_location *__location;
2165 } type. */
2166
2167 static tree
2168 asan_global_struct (void)
2169 {
2170 static const char *field_names[7]
2171 = { "__beg", "__size", "__size_with_redzone",
2172 "__name", "__module_name", "__has_dynamic_init", "__location"};
2173 tree fields[7], ret;
2174 int i;
2175
2176 ret = make_node (RECORD_TYPE);
2177 for (i = 0; i < 7; i++)
2178 {
2179 fields[i]
2180 = build_decl (UNKNOWN_LOCATION, FIELD_DECL,
2181 get_identifier (field_names[i]),
2182 (i == 0 || i == 3) ? const_ptr_type_node
2183 : pointer_sized_int_node);
2184 DECL_CONTEXT (fields[i]) = ret;
2185 if (i)
2186 DECL_CHAIN (fields[i - 1]) = fields[i];
2187 }
2188 tree type_decl = build_decl (input_location, TYPE_DECL,
2189 get_identifier ("__asan_global"), ret);
2190 DECL_IGNORED_P (type_decl) = 1;
2191 DECL_ARTIFICIAL (type_decl) = 1;
2192 TYPE_FIELDS (ret) = fields[0];
2193 TYPE_NAME (ret) = type_decl;
2194 TYPE_STUB_DECL (ret) = type_decl;
2195 layout_type (ret);
2196 return ret;
2197 }
2198
2199 /* Append description of a single global DECL into vector V.
2200 TYPE is __asan_global struct type as returned by asan_global_struct. */
2201
2202 static void
2203 asan_add_global (tree decl, tree type, vec<constructor_elt, va_gc> *v)
2204 {
2205 tree init, uptr = TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (type)));
2206 unsigned HOST_WIDE_INT size;
2207 tree str_cst, module_name_cst, refdecl = decl;
2208 vec<constructor_elt, va_gc> *vinner = NULL;
2209
2210 pretty_printer asan_pp, module_name_pp;
2211
2212 if (DECL_NAME (decl))
2213 pp_tree_identifier (&asan_pp, DECL_NAME (decl));
2214 else
2215 pp_string (&asan_pp, "<unknown>");
2216 str_cst = asan_pp_string (&asan_pp);
2217
2218 pp_string (&module_name_pp, main_input_filename);
2219 module_name_cst = asan_pp_string (&module_name_pp);
2220
2221 if (asan_needs_local_alias (decl))
2222 {
2223 char buf[20];
2224 ASM_GENERATE_INTERNAL_LABEL (buf, "LASAN", vec_safe_length (v) + 1);
2225 refdecl = build_decl (DECL_SOURCE_LOCATION (decl),
2226 VAR_DECL, get_identifier (buf), TREE_TYPE (decl));
2227 TREE_ADDRESSABLE (refdecl) = TREE_ADDRESSABLE (decl);
2228 TREE_READONLY (refdecl) = TREE_READONLY (decl);
2229 TREE_THIS_VOLATILE (refdecl) = TREE_THIS_VOLATILE (decl);
2230 DECL_GIMPLE_REG_P (refdecl) = DECL_GIMPLE_REG_P (decl);
2231 DECL_ARTIFICIAL (refdecl) = DECL_ARTIFICIAL (decl);
2232 DECL_IGNORED_P (refdecl) = DECL_IGNORED_P (decl);
2233 TREE_STATIC (refdecl) = 1;
2234 TREE_PUBLIC (refdecl) = 0;
2235 TREE_USED (refdecl) = 1;
2236 assemble_alias (refdecl, DECL_ASSEMBLER_NAME (decl));
2237 }
2238
2239 CONSTRUCTOR_APPEND_ELT (vinner, NULL_TREE,
2240 fold_convert (const_ptr_type_node,
2241 build_fold_addr_expr (refdecl)));
2242 size = tree_to_uhwi (DECL_SIZE_UNIT (decl));
2243 CONSTRUCTOR_APPEND_ELT (vinner, NULL_TREE, build_int_cst (uptr, size));
2244 size += asan_red_zone_size (size);
2245 CONSTRUCTOR_APPEND_ELT (vinner, NULL_TREE, build_int_cst (uptr, size));
2246 CONSTRUCTOR_APPEND_ELT (vinner, NULL_TREE,
2247 fold_convert (const_ptr_type_node, str_cst));
2248 CONSTRUCTOR_APPEND_ELT (vinner, NULL_TREE,
2249 fold_convert (const_ptr_type_node, module_name_cst));
2250 varpool_node *vnode = varpool_node::get (decl);
2251 int has_dynamic_init = vnode ? vnode->dynamically_initialized : 0;
2252 CONSTRUCTOR_APPEND_ELT (vinner, NULL_TREE,
2253 build_int_cst (uptr, has_dynamic_init));
2254 tree locptr = NULL_TREE;
2255 location_t loc = DECL_SOURCE_LOCATION (decl);
2256 expanded_location xloc = expand_location (loc);
2257 if (xloc.file != NULL)
2258 {
2259 static int lasanloccnt = 0;
2260 char buf[25];
2261 ASM_GENERATE_INTERNAL_LABEL (buf, "LASANLOC", ++lasanloccnt);
2262 tree var = build_decl (UNKNOWN_LOCATION, VAR_DECL, get_identifier (buf),
2263 ubsan_get_source_location_type ());
2264 TREE_STATIC (var) = 1;
2265 TREE_PUBLIC (var) = 0;
2266 DECL_ARTIFICIAL (var) = 1;
2267 DECL_IGNORED_P (var) = 1;
2268 pretty_printer filename_pp;
2269 pp_string (&filename_pp, xloc.file);
2270 tree str = asan_pp_string (&filename_pp);
2271 tree ctor = build_constructor_va (TREE_TYPE (var), 3,
2272 NULL_TREE, str, NULL_TREE,
2273 build_int_cst (unsigned_type_node,
2274 xloc.line), NULL_TREE,
2275 build_int_cst (unsigned_type_node,
2276 xloc.column));
2277 TREE_CONSTANT (ctor) = 1;
2278 TREE_STATIC (ctor) = 1;
2279 DECL_INITIAL (var) = ctor;
2280 varpool_node::finalize_decl (var);
2281 locptr = fold_convert (uptr, build_fold_addr_expr (var));
2282 }
2283 else
2284 locptr = build_int_cst (uptr, 0);
2285 CONSTRUCTOR_APPEND_ELT (vinner, NULL_TREE, locptr);
2286 init = build_constructor (type, vinner);
2287 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, init);
2288 }
2289
2290 /* Initialize sanitizer.def builtins if the FE hasn't initialized them. */
2291 void
2292 initialize_sanitizer_builtins (void)
2293 {
2294 tree decl;
2295
2296 if (builtin_decl_implicit_p (BUILT_IN_ASAN_INIT))
2297 return;
2298
2299 tree BT_FN_VOID = build_function_type_list (void_type_node, NULL_TREE);
2300 tree BT_FN_VOID_PTR
2301 = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
2302 tree BT_FN_VOID_CONST_PTR
2303 = build_function_type_list (void_type_node, const_ptr_type_node, NULL_TREE);
2304 tree BT_FN_VOID_PTR_PTR
2305 = build_function_type_list (void_type_node, ptr_type_node,
2306 ptr_type_node, NULL_TREE);
2307 tree BT_FN_VOID_PTR_PTR_PTR
2308 = build_function_type_list (void_type_node, ptr_type_node,
2309 ptr_type_node, ptr_type_node, NULL_TREE);
2310 tree BT_FN_VOID_PTR_PTRMODE
2311 = build_function_type_list (void_type_node, ptr_type_node,
2312 pointer_sized_int_node, NULL_TREE);
2313 tree BT_FN_VOID_INT
2314 = build_function_type_list (void_type_node, integer_type_node, NULL_TREE);
2315 tree BT_FN_SIZE_CONST_PTR_INT
2316 = build_function_type_list (size_type_node, const_ptr_type_node,
2317 integer_type_node, NULL_TREE);
2318 tree BT_FN_BOOL_VPTR_PTR_IX_INT_INT[5];
2319 tree BT_FN_IX_CONST_VPTR_INT[5];
2320 tree BT_FN_IX_VPTR_IX_INT[5];
2321 tree BT_FN_VOID_VPTR_IX_INT[5];
2322 tree vptr
2323 = build_pointer_type (build_qualified_type (void_type_node,
2324 TYPE_QUAL_VOLATILE));
2325 tree cvptr
2326 = build_pointer_type (build_qualified_type (void_type_node,
2327 TYPE_QUAL_VOLATILE
2328 |TYPE_QUAL_CONST));
2329 tree boolt
2330 = lang_hooks.types.type_for_size (BOOL_TYPE_SIZE, 1);
2331 int i;
2332 for (i = 0; i < 5; i++)
2333 {
2334 tree ix = build_nonstandard_integer_type (BITS_PER_UNIT * (1 << i), 1);
2335 BT_FN_BOOL_VPTR_PTR_IX_INT_INT[i]
2336 = build_function_type_list (boolt, vptr, ptr_type_node, ix,
2337 integer_type_node, integer_type_node,
2338 NULL_TREE);
2339 BT_FN_IX_CONST_VPTR_INT[i]
2340 = build_function_type_list (ix, cvptr, integer_type_node, NULL_TREE);
2341 BT_FN_IX_VPTR_IX_INT[i]
2342 = build_function_type_list (ix, vptr, ix, integer_type_node,
2343 NULL_TREE);
2344 BT_FN_VOID_VPTR_IX_INT[i]
2345 = build_function_type_list (void_type_node, vptr, ix,
2346 integer_type_node, NULL_TREE);
2347 }
2348 #define BT_FN_BOOL_VPTR_PTR_I1_INT_INT BT_FN_BOOL_VPTR_PTR_IX_INT_INT[0]
2349 #define BT_FN_I1_CONST_VPTR_INT BT_FN_IX_CONST_VPTR_INT[0]
2350 #define BT_FN_I1_VPTR_I1_INT BT_FN_IX_VPTR_IX_INT[0]
2351 #define BT_FN_VOID_VPTR_I1_INT BT_FN_VOID_VPTR_IX_INT[0]
2352 #define BT_FN_BOOL_VPTR_PTR_I2_INT_INT BT_FN_BOOL_VPTR_PTR_IX_INT_INT[1]
2353 #define BT_FN_I2_CONST_VPTR_INT BT_FN_IX_CONST_VPTR_INT[1]
2354 #define BT_FN_I2_VPTR_I2_INT BT_FN_IX_VPTR_IX_INT[1]
2355 #define BT_FN_VOID_VPTR_I2_INT BT_FN_VOID_VPTR_IX_INT[1]
2356 #define BT_FN_BOOL_VPTR_PTR_I4_INT_INT BT_FN_BOOL_VPTR_PTR_IX_INT_INT[2]
2357 #define BT_FN_I4_CONST_VPTR_INT BT_FN_IX_CONST_VPTR_INT[2]
2358 #define BT_FN_I4_VPTR_I4_INT BT_FN_IX_VPTR_IX_INT[2]
2359 #define BT_FN_VOID_VPTR_I4_INT BT_FN_VOID_VPTR_IX_INT[2]
2360 #define BT_FN_BOOL_VPTR_PTR_I8_INT_INT BT_FN_BOOL_VPTR_PTR_IX_INT_INT[3]
2361 #define BT_FN_I8_CONST_VPTR_INT BT_FN_IX_CONST_VPTR_INT[3]
2362 #define BT_FN_I8_VPTR_I8_INT BT_FN_IX_VPTR_IX_INT[3]
2363 #define BT_FN_VOID_VPTR_I8_INT BT_FN_VOID_VPTR_IX_INT[3]
2364 #define BT_FN_BOOL_VPTR_PTR_I16_INT_INT BT_FN_BOOL_VPTR_PTR_IX_INT_INT[4]
2365 #define BT_FN_I16_CONST_VPTR_INT BT_FN_IX_CONST_VPTR_INT[4]
2366 #define BT_FN_I16_VPTR_I16_INT BT_FN_IX_VPTR_IX_INT[4]
2367 #define BT_FN_VOID_VPTR_I16_INT BT_FN_VOID_VPTR_IX_INT[4]
2368 #undef ATTR_NOTHROW_LEAF_LIST
2369 #define ATTR_NOTHROW_LEAF_LIST ECF_NOTHROW | ECF_LEAF
2370 #undef ATTR_TMPURE_NOTHROW_LEAF_LIST
2371 #define ATTR_TMPURE_NOTHROW_LEAF_LIST ECF_TM_PURE | ATTR_NOTHROW_LEAF_LIST
2372 #undef ATTR_NORETURN_NOTHROW_LEAF_LIST
2373 #define ATTR_NORETURN_NOTHROW_LEAF_LIST ECF_NORETURN | ATTR_NOTHROW_LEAF_LIST
2374 #undef ATTR_CONST_NORETURN_NOTHROW_LEAF_LIST
2375 #define ATTR_CONST_NORETURN_NOTHROW_LEAF_LIST \
2376 ECF_CONST | ATTR_NORETURN_NOTHROW_LEAF_LIST
2377 #undef ATTR_TMPURE_NORETURN_NOTHROW_LEAF_LIST
2378 #define ATTR_TMPURE_NORETURN_NOTHROW_LEAF_LIST \
2379 ECF_TM_PURE | ATTR_NORETURN_NOTHROW_LEAF_LIST
2380 #undef ATTR_COLD_NOTHROW_LEAF_LIST
2381 #define ATTR_COLD_NOTHROW_LEAF_LIST \
2382 /* ECF_COLD missing */ ATTR_NOTHROW_LEAF_LIST
2383 #undef ATTR_COLD_NORETURN_NOTHROW_LEAF_LIST
2384 #define ATTR_COLD_NORETURN_NOTHROW_LEAF_LIST \
2385 /* ECF_COLD missing */ ATTR_NORETURN_NOTHROW_LEAF_LIST
2386 #undef ATTR_COLD_CONST_NORETURN_NOTHROW_LEAF_LIST
2387 #define ATTR_COLD_CONST_NORETURN_NOTHROW_LEAF_LIST \
2388 /* ECF_COLD missing */ ATTR_CONST_NORETURN_NOTHROW_LEAF_LIST
2389 #undef ATTR_PURE_NOTHROW_LEAF_LIST
2390 #define ATTR_PURE_NOTHROW_LEAF_LIST ECF_PURE | ATTR_NOTHROW_LEAF_LIST
2391 #undef DEF_SANITIZER_BUILTIN
2392 #define DEF_SANITIZER_BUILTIN(ENUM, NAME, TYPE, ATTRS) \
2393 decl = add_builtin_function ("__builtin_" NAME, TYPE, ENUM, \
2394 BUILT_IN_NORMAL, NAME, NULL_TREE); \
2395 set_call_expr_flags (decl, ATTRS); \
2396 set_builtin_decl (ENUM, decl, true);
2397
2398 #include "sanitizer.def"
2399
2400 /* -fsanitize=object-size uses __builtin_object_size, but that might
2401 not be available for e.g. Fortran at this point. We use
2402 DEF_SANITIZER_BUILTIN here only as a convenience macro. */
2403 if ((flag_sanitize & SANITIZE_OBJECT_SIZE)
2404 && !builtin_decl_implicit_p (BUILT_IN_OBJECT_SIZE))
2405 DEF_SANITIZER_BUILTIN (BUILT_IN_OBJECT_SIZE, "object_size",
2406 BT_FN_SIZE_CONST_PTR_INT,
2407 ATTR_PURE_NOTHROW_LEAF_LIST)
2408
2409 #undef DEF_SANITIZER_BUILTIN
2410 }
2411
2412 /* Called via htab_traverse. Count number of emitted
2413 STRING_CSTs in the constant hash table. */
2414
2415 int
2416 count_string_csts (constant_descriptor_tree **slot,
2417 unsigned HOST_WIDE_INT *data)
2418 {
2419 struct constant_descriptor_tree *desc = *slot;
2420 if (TREE_CODE (desc->value) == STRING_CST
2421 && TREE_ASM_WRITTEN (desc->value)
2422 && asan_protect_global (desc->value))
2423 ++*data;
2424 return 1;
2425 }
2426
2427 /* Helper structure to pass two parameters to
2428 add_string_csts. */
2429
2430 struct asan_add_string_csts_data
2431 {
2432 tree type;
2433 vec<constructor_elt, va_gc> *v;
2434 };
2435
2436 /* Called via hash_table::traverse. Call asan_add_global
2437 on emitted STRING_CSTs from the constant hash table. */
2438
2439 int
2440 add_string_csts (constant_descriptor_tree **slot,
2441 asan_add_string_csts_data *aascd)
2442 {
2443 struct constant_descriptor_tree *desc = *slot;
2444 if (TREE_CODE (desc->value) == STRING_CST
2445 && TREE_ASM_WRITTEN (desc->value)
2446 && asan_protect_global (desc->value))
2447 {
2448 asan_add_global (SYMBOL_REF_DECL (XEXP (desc->rtl, 0)),
2449 aascd->type, aascd->v);
2450 }
2451 return 1;
2452 }
2453
2454 /* Needs to be GTY(()), because cgraph_build_static_cdtor may
2455 invoke ggc_collect. */
2456 static GTY(()) tree asan_ctor_statements;
2457
2458 /* Module-level instrumentation.
2459 - Insert __asan_init_vN() into the list of CTORs.
2460 - TODO: insert redzones around globals.
2461 */
2462
2463 void
2464 asan_finish_file (void)
2465 {
2466 varpool_node *vnode;
2467 unsigned HOST_WIDE_INT gcount = 0;
2468
2469 if (shadow_ptr_types[0] == NULL_TREE)
2470 asan_init_shadow_ptr_types ();
2471 /* Avoid instrumenting code in the asan ctors/dtors.
2472 We don't need to insert padding after the description strings,
2473 nor after .LASAN* array. */
2474 flag_sanitize &= ~SANITIZE_ADDRESS;
2475
2476 /* For user-space we want asan constructors to run first.
2477 Linux kernel does not support priorities other than default, and the only
2478 other user of constructors is coverage. So we run with the default
2479 priority. */
2480 int priority = flag_sanitize & SANITIZE_USER_ADDRESS
2481 ? MAX_RESERVED_INIT_PRIORITY - 1 : DEFAULT_INIT_PRIORITY;
2482
2483 if (flag_sanitize & SANITIZE_USER_ADDRESS)
2484 {
2485 tree fn = builtin_decl_implicit (BUILT_IN_ASAN_INIT);
2486 append_to_statement_list (build_call_expr (fn, 0), &asan_ctor_statements);
2487 }
2488 FOR_EACH_DEFINED_VARIABLE (vnode)
2489 if (TREE_ASM_WRITTEN (vnode->decl)
2490 && asan_protect_global (vnode->decl))
2491 ++gcount;
2492 hash_table<tree_descriptor_hasher> *const_desc_htab = constant_pool_htab ();
2493 const_desc_htab->traverse<unsigned HOST_WIDE_INT *, count_string_csts>
2494 (&gcount);
2495 if (gcount)
2496 {
2497 tree type = asan_global_struct (), var, ctor;
2498 tree dtor_statements = NULL_TREE;
2499 vec<constructor_elt, va_gc> *v;
2500 char buf[20];
2501
2502 type = build_array_type_nelts (type, gcount);
2503 ASM_GENERATE_INTERNAL_LABEL (buf, "LASAN", 0);
2504 var = build_decl (UNKNOWN_LOCATION, VAR_DECL, get_identifier (buf),
2505 type);
2506 TREE_STATIC (var) = 1;
2507 TREE_PUBLIC (var) = 0;
2508 DECL_ARTIFICIAL (var) = 1;
2509 DECL_IGNORED_P (var) = 1;
2510 vec_alloc (v, gcount);
2511 FOR_EACH_DEFINED_VARIABLE (vnode)
2512 if (TREE_ASM_WRITTEN (vnode->decl)
2513 && asan_protect_global (vnode->decl))
2514 asan_add_global (vnode->decl, TREE_TYPE (type), v);
2515 struct asan_add_string_csts_data aascd;
2516 aascd.type = TREE_TYPE (type);
2517 aascd.v = v;
2518 const_desc_htab->traverse<asan_add_string_csts_data *, add_string_csts>
2519 (&aascd);
2520 ctor = build_constructor (type, v);
2521 TREE_CONSTANT (ctor) = 1;
2522 TREE_STATIC (ctor) = 1;
2523 DECL_INITIAL (var) = ctor;
2524 varpool_node::finalize_decl (var);
2525
2526 tree fn = builtin_decl_implicit (BUILT_IN_ASAN_REGISTER_GLOBALS);
2527 tree gcount_tree = build_int_cst (pointer_sized_int_node, gcount);
2528 append_to_statement_list (build_call_expr (fn, 2,
2529 build_fold_addr_expr (var),
2530 gcount_tree),
2531 &asan_ctor_statements);
2532
2533 fn = builtin_decl_implicit (BUILT_IN_ASAN_UNREGISTER_GLOBALS);
2534 append_to_statement_list (build_call_expr (fn, 2,
2535 build_fold_addr_expr (var),
2536 gcount_tree),
2537 &dtor_statements);
2538 cgraph_build_static_cdtor ('D', dtor_statements, priority);
2539 }
2540 if (asan_ctor_statements)
2541 cgraph_build_static_cdtor ('I', asan_ctor_statements, priority);
2542 flag_sanitize |= SANITIZE_ADDRESS;
2543 }
2544
2545 /* Expand the ASAN_{LOAD,STORE} builtins. */
2546
2547 bool
2548 asan_expand_check_ifn (gimple_stmt_iterator *iter, bool use_calls)
2549 {
2550 gimple g = gsi_stmt (*iter);
2551 location_t loc = gimple_location (g);
2552
2553 bool recover_p
2554 = (flag_sanitize & flag_sanitize_recover & SANITIZE_KERNEL_ADDRESS) != 0;
2555
2556 HOST_WIDE_INT flags = tree_to_shwi (gimple_call_arg (g, 0));
2557 gcc_assert (flags < ASAN_CHECK_LAST);
2558 bool is_scalar_access = (flags & ASAN_CHECK_SCALAR_ACCESS) != 0;
2559 bool is_store = (flags & ASAN_CHECK_STORE) != 0;
2560 bool is_non_zero_len = (flags & ASAN_CHECK_NON_ZERO_LEN) != 0;
2561
2562 tree base = gimple_call_arg (g, 1);
2563 tree len = gimple_call_arg (g, 2);
2564 HOST_WIDE_INT align = tree_to_shwi (gimple_call_arg (g, 3));
2565
2566 HOST_WIDE_INT size_in_bytes
2567 = is_scalar_access && tree_fits_shwi_p (len) ? tree_to_shwi (len) : -1;
2568
2569 if (use_calls)
2570 {
2571 /* Instrument using callbacks. */
2572 gimple g = gimple_build_assign (make_ssa_name (pointer_sized_int_node),
2573 NOP_EXPR, base);
2574 gimple_set_location (g, loc);
2575 gsi_insert_before (iter, g, GSI_SAME_STMT);
2576 tree base_addr = gimple_assign_lhs (g);
2577
2578 int nargs;
2579 tree fun = check_func (is_store, recover_p, size_in_bytes, &nargs);
2580 if (nargs == 1)
2581 g = gimple_build_call (fun, 1, base_addr);
2582 else
2583 {
2584 gcc_assert (nargs == 2);
2585 g = gimple_build_assign (make_ssa_name (pointer_sized_int_node),
2586 NOP_EXPR, len);
2587 gimple_set_location (g, loc);
2588 gsi_insert_before (iter, g, GSI_SAME_STMT);
2589 tree sz_arg = gimple_assign_lhs (g);
2590 g = gimple_build_call (fun, nargs, base_addr, sz_arg);
2591 }
2592 gimple_set_location (g, loc);
2593 gsi_replace (iter, g, false);
2594 return false;
2595 }
2596
2597 HOST_WIDE_INT real_size_in_bytes = size_in_bytes == -1 ? 1 : size_in_bytes;
2598
2599 tree shadow_ptr_type = shadow_ptr_types[real_size_in_bytes == 16 ? 1 : 0];
2600 tree shadow_type = TREE_TYPE (shadow_ptr_type);
2601
2602 gimple_stmt_iterator gsi = *iter;
2603
2604 if (!is_non_zero_len)
2605 {
2606 /* So, the length of the memory area to asan-protect is
2607 non-constant. Let's guard the generated instrumentation code
2608 like:
2609
2610 if (len != 0)
2611 {
2612 //asan instrumentation code goes here.
2613 }
2614 // falltrough instructions, starting with *ITER. */
2615
2616 g = gimple_build_cond (NE_EXPR,
2617 len,
2618 build_int_cst (TREE_TYPE (len), 0),
2619 NULL_TREE, NULL_TREE);
2620 gimple_set_location (g, loc);
2621
2622 basic_block then_bb, fallthrough_bb;
2623 insert_if_then_before_iter (as_a <gcond *> (g), iter,
2624 /*then_more_likely_p=*/true,
2625 &then_bb, &fallthrough_bb);
2626 /* Note that fallthrough_bb starts with the statement that was
2627 pointed to by ITER. */
2628
2629 /* The 'then block' of the 'if (len != 0) condition is where
2630 we'll generate the asan instrumentation code now. */
2631 gsi = gsi_last_bb (then_bb);
2632 }
2633
2634 /* Get an iterator on the point where we can add the condition
2635 statement for the instrumentation. */
2636 basic_block then_bb, else_bb;
2637 gsi = create_cond_insert_point (&gsi, /*before_p*/false,
2638 /*then_more_likely_p=*/false,
2639 /*create_then_fallthru_edge*/recover_p,
2640 &then_bb,
2641 &else_bb);
2642
2643 g = gimple_build_assign (make_ssa_name (pointer_sized_int_node),
2644 NOP_EXPR, base);
2645 gimple_set_location (g, loc);
2646 gsi_insert_before (&gsi, g, GSI_NEW_STMT);
2647 tree base_addr = gimple_assign_lhs (g);
2648
2649 tree t = NULL_TREE;
2650 if (real_size_in_bytes >= 8)
2651 {
2652 tree shadow = build_shadow_mem_access (&gsi, loc, base_addr,
2653 shadow_ptr_type);
2654 t = shadow;
2655 }
2656 else
2657 {
2658 /* Slow path for 1, 2 and 4 byte accesses. */
2659 /* Test (shadow != 0)
2660 & ((base_addr & 7) + (real_size_in_bytes - 1)) >= shadow). */
2661 tree shadow = build_shadow_mem_access (&gsi, loc, base_addr,
2662 shadow_ptr_type);
2663 gimple shadow_test = build_assign (NE_EXPR, shadow, 0);
2664 gimple_seq seq = NULL;
2665 gimple_seq_add_stmt (&seq, shadow_test);
2666 /* Aligned (>= 8 bytes) can test just
2667 (real_size_in_bytes - 1 >= shadow), as base_addr & 7 is known
2668 to be 0. */
2669 if (align < 8)
2670 {
2671 gimple_seq_add_stmt (&seq, build_assign (BIT_AND_EXPR,
2672 base_addr, 7));
2673 gimple_seq_add_stmt (&seq,
2674 build_type_cast (shadow_type,
2675 gimple_seq_last (seq)));
2676 if (real_size_in_bytes > 1)
2677 gimple_seq_add_stmt (&seq,
2678 build_assign (PLUS_EXPR,
2679 gimple_seq_last (seq),
2680 real_size_in_bytes - 1));
2681 t = gimple_assign_lhs (gimple_seq_last_stmt (seq));
2682 }
2683 else
2684 t = build_int_cst (shadow_type, real_size_in_bytes - 1);
2685 gimple_seq_add_stmt (&seq, build_assign (GE_EXPR, t, shadow));
2686 gimple_seq_add_stmt (&seq, build_assign (BIT_AND_EXPR, shadow_test,
2687 gimple_seq_last (seq)));
2688 t = gimple_assign_lhs (gimple_seq_last (seq));
2689 gimple_seq_set_location (seq, loc);
2690 gsi_insert_seq_after (&gsi, seq, GSI_CONTINUE_LINKING);
2691
2692 /* For non-constant, misaligned or otherwise weird access sizes,
2693 check first and last byte. */
2694 if (size_in_bytes == -1)
2695 {
2696 g = gimple_build_assign (make_ssa_name (pointer_sized_int_node),
2697 MINUS_EXPR, len,
2698 build_int_cst (pointer_sized_int_node, 1));
2699 gimple_set_location (g, loc);
2700 gsi_insert_after (&gsi, g, GSI_NEW_STMT);
2701 tree last = gimple_assign_lhs (g);
2702 g = gimple_build_assign (make_ssa_name (pointer_sized_int_node),
2703 PLUS_EXPR, base_addr, last);
2704 gimple_set_location (g, loc);
2705 gsi_insert_after (&gsi, g, GSI_NEW_STMT);
2706 tree base_end_addr = gimple_assign_lhs (g);
2707
2708 tree shadow = build_shadow_mem_access (&gsi, loc, base_end_addr,
2709 shadow_ptr_type);
2710 gimple shadow_test = build_assign (NE_EXPR, shadow, 0);
2711 gimple_seq seq = NULL;
2712 gimple_seq_add_stmt (&seq, shadow_test);
2713 gimple_seq_add_stmt (&seq, build_assign (BIT_AND_EXPR,
2714 base_end_addr, 7));
2715 gimple_seq_add_stmt (&seq, build_type_cast (shadow_type,
2716 gimple_seq_last (seq)));
2717 gimple_seq_add_stmt (&seq, build_assign (GE_EXPR,
2718 gimple_seq_last (seq),
2719 shadow));
2720 gimple_seq_add_stmt (&seq, build_assign (BIT_AND_EXPR, shadow_test,
2721 gimple_seq_last (seq)));
2722 gimple_seq_add_stmt (&seq, build_assign (BIT_IOR_EXPR, t,
2723 gimple_seq_last (seq)));
2724 t = gimple_assign_lhs (gimple_seq_last (seq));
2725 gimple_seq_set_location (seq, loc);
2726 gsi_insert_seq_after (&gsi, seq, GSI_CONTINUE_LINKING);
2727 }
2728 }
2729
2730 g = gimple_build_cond (NE_EXPR, t, build_int_cst (TREE_TYPE (t), 0),
2731 NULL_TREE, NULL_TREE);
2732 gimple_set_location (g, loc);
2733 gsi_insert_after (&gsi, g, GSI_NEW_STMT);
2734
2735 /* Generate call to the run-time library (e.g. __asan_report_load8). */
2736 gsi = gsi_start_bb (then_bb);
2737 int nargs;
2738 tree fun = report_error_func (is_store, recover_p, size_in_bytes, &nargs);
2739 g = gimple_build_call (fun, nargs, base_addr, len);
2740 gimple_set_location (g, loc);
2741 gsi_insert_after (&gsi, g, GSI_NEW_STMT);
2742
2743 gsi_remove (iter, true);
2744 *iter = gsi_start_bb (else_bb);
2745
2746 return true;
2747 }
2748
2749 /* Instrument the current function. */
2750
2751 static unsigned int
2752 asan_instrument (void)
2753 {
2754 if (shadow_ptr_types[0] == NULL_TREE)
2755 asan_init_shadow_ptr_types ();
2756 transform_statements ();
2757 return 0;
2758 }
2759
2760 static bool
2761 gate_asan (void)
2762 {
2763 return (flag_sanitize & SANITIZE_ADDRESS) != 0
2764 && !lookup_attribute ("no_sanitize_address",
2765 DECL_ATTRIBUTES (current_function_decl));
2766 }
2767
2768 namespace {
2769
2770 const pass_data pass_data_asan =
2771 {
2772 GIMPLE_PASS, /* type */
2773 "asan", /* name */
2774 OPTGROUP_NONE, /* optinfo_flags */
2775 TV_NONE, /* tv_id */
2776 ( PROP_ssa | PROP_cfg | PROP_gimple_leh ), /* properties_required */
2777 0, /* properties_provided */
2778 0, /* properties_destroyed */
2779 0, /* todo_flags_start */
2780 TODO_update_ssa, /* todo_flags_finish */
2781 };
2782
2783 class pass_asan : public gimple_opt_pass
2784 {
2785 public:
2786 pass_asan (gcc::context *ctxt)
2787 : gimple_opt_pass (pass_data_asan, ctxt)
2788 {}
2789
2790 /* opt_pass methods: */
2791 opt_pass * clone () { return new pass_asan (m_ctxt); }
2792 virtual bool gate (function *) { return gate_asan (); }
2793 virtual unsigned int execute (function *) { return asan_instrument (); }
2794
2795 }; // class pass_asan
2796
2797 } // anon namespace
2798
2799 gimple_opt_pass *
2800 make_pass_asan (gcc::context *ctxt)
2801 {
2802 return new pass_asan (ctxt);
2803 }
2804
2805 namespace {
2806
2807 const pass_data pass_data_asan_O0 =
2808 {
2809 GIMPLE_PASS, /* type */
2810 "asan0", /* name */
2811 OPTGROUP_NONE, /* optinfo_flags */
2812 TV_NONE, /* tv_id */
2813 ( PROP_ssa | PROP_cfg | PROP_gimple_leh ), /* properties_required */
2814 0, /* properties_provided */
2815 0, /* properties_destroyed */
2816 0, /* todo_flags_start */
2817 TODO_update_ssa, /* todo_flags_finish */
2818 };
2819
2820 class pass_asan_O0 : public gimple_opt_pass
2821 {
2822 public:
2823 pass_asan_O0 (gcc::context *ctxt)
2824 : gimple_opt_pass (pass_data_asan_O0, ctxt)
2825 {}
2826
2827 /* opt_pass methods: */
2828 virtual bool gate (function *) { return !optimize && gate_asan (); }
2829 virtual unsigned int execute (function *) { return asan_instrument (); }
2830
2831 }; // class pass_asan_O0
2832
2833 } // anon namespace
2834
2835 gimple_opt_pass *
2836 make_pass_asan_O0 (gcc::context *ctxt)
2837 {
2838 return new pass_asan_O0 (ctxt);
2839 }
2840
2841 #include "gt-asan.h"