]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/asan.c
libsanitizer merge from upstream r250806, compiler part.
[thirdparty/gcc.git] / gcc / asan.c
1 /* AddressSanitizer, a fast memory error detector.
2 Copyright (C) 2012-2015 Free Software Foundation, Inc.
3 Contributed by Kostya Serebryany <kcc@google.com>
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
10 version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "alias.h"
26 #include "backend.h"
27 #include "cfghooks.h"
28 #include "tree.h"
29 #include "gimple.h"
30 #include "rtl.h"
31 #include "options.h"
32 #include "fold-const.h"
33 #include "cfganal.h"
34 #include "internal-fn.h"
35 #include "gimplify.h"
36 #include "gimple-iterator.h"
37 #include "calls.h"
38 #include "varasm.h"
39 #include "stor-layout.h"
40 #include "tree-iterator.h"
41 #include "cgraph.h"
42 #include "stringpool.h"
43 #include "tree-ssanames.h"
44 #include "tree-pass.h"
45 #include "asan.h"
46 #include "gimple-pretty-print.h"
47 #include "target.h"
48 #include "flags.h"
49 #include "insn-config.h"
50 #include "expmed.h"
51 #include "dojump.h"
52 #include "explow.h"
53 #include "emit-rtl.h"
54 #include "stmt.h"
55 #include "expr.h"
56 #include "insn-codes.h"
57 #include "optabs.h"
58 #include "output.h"
59 #include "tm_p.h"
60 #include "langhooks.h"
61 #include "alloc-pool.h"
62 #include "cfgloop.h"
63 #include "gimple-builder.h"
64 #include "ubsan.h"
65 #include "params.h"
66 #include "builtins.h"
67 #include "fnmatch.h"
68
69 /* AddressSanitizer finds out-of-bounds and use-after-free bugs
70 with <2x slowdown on average.
71
72 The tool consists of two parts:
73 instrumentation module (this file) and a run-time library.
74 The instrumentation module adds a run-time check before every memory insn.
75 For a 8- or 16- byte load accessing address X:
76 ShadowAddr = (X >> 3) + Offset
77 ShadowValue = *(char*)ShadowAddr; // *(short*) for 16-byte access.
78 if (ShadowValue)
79 __asan_report_load8(X);
80 For a load of N bytes (N=1, 2 or 4) from address X:
81 ShadowAddr = (X >> 3) + Offset
82 ShadowValue = *(char*)ShadowAddr;
83 if (ShadowValue)
84 if ((X & 7) + N - 1 > ShadowValue)
85 __asan_report_loadN(X);
86 Stores are instrumented similarly, but using __asan_report_storeN functions.
87 A call too __asan_init_vN() is inserted to the list of module CTORs.
88 N is the version number of the AddressSanitizer API. The changes between the
89 API versions are listed in libsanitizer/asan/asan_interface_internal.h.
90
91 The run-time library redefines malloc (so that redzone are inserted around
92 the allocated memory) and free (so that reuse of free-ed memory is delayed),
93 provides __asan_report* and __asan_init_vN functions.
94
95 Read more:
96 http://code.google.com/p/address-sanitizer/wiki/AddressSanitizerAlgorithm
97
98 The current implementation supports detection of out-of-bounds and
99 use-after-free in the heap, on the stack and for global variables.
100
101 [Protection of stack variables]
102
103 To understand how detection of out-of-bounds and use-after-free works
104 for stack variables, lets look at this example on x86_64 where the
105 stack grows downward:
106
107 int
108 foo ()
109 {
110 char a[23] = {0};
111 int b[2] = {0};
112
113 a[5] = 1;
114 b[1] = 2;
115
116 return a[5] + b[1];
117 }
118
119 For this function, the stack protected by asan will be organized as
120 follows, from the top of the stack to the bottom:
121
122 Slot 1/ [red zone of 32 bytes called 'RIGHT RedZone']
123
124 Slot 2/ [8 bytes of red zone, that adds up to the space of 'a' to make
125 the next slot be 32 bytes aligned; this one is called Partial
126 Redzone; this 32 bytes alignment is an asan constraint]
127
128 Slot 3/ [24 bytes for variable 'a']
129
130 Slot 4/ [red zone of 32 bytes called 'Middle RedZone']
131
132 Slot 5/ [24 bytes of Partial Red Zone (similar to slot 2]
133
134 Slot 6/ [8 bytes for variable 'b']
135
136 Slot 7/ [32 bytes of Red Zone at the bottom of the stack, called
137 'LEFT RedZone']
138
139 The 32 bytes of LEFT red zone at the bottom of the stack can be
140 decomposed as such:
141
142 1/ The first 8 bytes contain a magical asan number that is always
143 0x41B58AB3.
144
145 2/ The following 8 bytes contains a pointer to a string (to be
146 parsed at runtime by the runtime asan library), which format is
147 the following:
148
149 "<function-name> <space> <num-of-variables-on-the-stack>
150 (<32-bytes-aligned-offset-in-bytes-of-variable> <space>
151 <length-of-var-in-bytes> ){n} "
152
153 where '(...){n}' means the content inside the parenthesis occurs 'n'
154 times, with 'n' being the number of variables on the stack.
155
156 3/ The following 8 bytes contain the PC of the current function which
157 will be used by the run-time library to print an error message.
158
159 4/ The following 8 bytes are reserved for internal use by the run-time.
160
161 The shadow memory for that stack layout is going to look like this:
162
163 - content of shadow memory 8 bytes for slot 7: 0xF1F1F1F1.
164 The F1 byte pattern is a magic number called
165 ASAN_STACK_MAGIC_LEFT and is a way for the runtime to know that
166 the memory for that shadow byte is part of a the LEFT red zone
167 intended to seat at the bottom of the variables on the stack.
168
169 - content of shadow memory 8 bytes for slots 6 and 5:
170 0xF4F4F400. The F4 byte pattern is a magic number
171 called ASAN_STACK_MAGIC_PARTIAL. It flags the fact that the
172 memory region for this shadow byte is a PARTIAL red zone
173 intended to pad a variable A, so that the slot following
174 {A,padding} is 32 bytes aligned.
175
176 Note that the fact that the least significant byte of this
177 shadow memory content is 00 means that 8 bytes of its
178 corresponding memory (which corresponds to the memory of
179 variable 'b') is addressable.
180
181 - content of shadow memory 8 bytes for slot 4: 0xF2F2F2F2.
182 The F2 byte pattern is a magic number called
183 ASAN_STACK_MAGIC_MIDDLE. It flags the fact that the memory
184 region for this shadow byte is a MIDDLE red zone intended to
185 seat between two 32 aligned slots of {variable,padding}.
186
187 - content of shadow memory 8 bytes for slot 3 and 2:
188 0xF4000000. This represents is the concatenation of
189 variable 'a' and the partial red zone following it, like what we
190 had for variable 'b'. The least significant 3 bytes being 00
191 means that the 3 bytes of variable 'a' are addressable.
192
193 - content of shadow memory 8 bytes for slot 1: 0xF3F3F3F3.
194 The F3 byte pattern is a magic number called
195 ASAN_STACK_MAGIC_RIGHT. It flags the fact that the memory
196 region for this shadow byte is a RIGHT red zone intended to seat
197 at the top of the variables of the stack.
198
199 Note that the real variable layout is done in expand_used_vars in
200 cfgexpand.c. As far as Address Sanitizer is concerned, it lays out
201 stack variables as well as the different red zones, emits some
202 prologue code to populate the shadow memory as to poison (mark as
203 non-accessible) the regions of the red zones and mark the regions of
204 stack variables as accessible, and emit some epilogue code to
205 un-poison (mark as accessible) the regions of red zones right before
206 the function exits.
207
208 [Protection of global variables]
209
210 The basic idea is to insert a red zone between two global variables
211 and install a constructor function that calls the asan runtime to do
212 the populating of the relevant shadow memory regions at load time.
213
214 So the global variables are laid out as to insert a red zone between
215 them. The size of the red zones is so that each variable starts on a
216 32 bytes boundary.
217
218 Then a constructor function is installed so that, for each global
219 variable, it calls the runtime asan library function
220 __asan_register_globals_with an instance of this type:
221
222 struct __asan_global
223 {
224 // Address of the beginning of the global variable.
225 const void *__beg;
226
227 // Initial size of the global variable.
228 uptr __size;
229
230 // Size of the global variable + size of the red zone. This
231 // size is 32 bytes aligned.
232 uptr __size_with_redzone;
233
234 // Name of the global variable.
235 const void *__name;
236
237 // Name of the module where the global variable is declared.
238 const void *__module_name;
239
240 // 1 if it has dynamic initialization, 0 otherwise.
241 uptr __has_dynamic_init;
242
243 // A pointer to struct that contains source location, could be NULL.
244 __asan_global_source_location *__location;
245 }
246
247 A destructor function that calls the runtime asan library function
248 _asan_unregister_globals is also installed. */
249
250 static unsigned HOST_WIDE_INT asan_shadow_offset_value;
251 static bool asan_shadow_offset_computed;
252 static vec<char *> sanitized_sections;
253
254 /* Sets shadow offset to value in string VAL. */
255
256 bool
257 set_asan_shadow_offset (const char *val)
258 {
259 char *endp;
260
261 errno = 0;
262 #ifdef HAVE_LONG_LONG
263 asan_shadow_offset_value = strtoull (val, &endp, 0);
264 #else
265 asan_shadow_offset_value = strtoul (val, &endp, 0);
266 #endif
267 if (!(*val != '\0' && *endp == '\0' && errno == 0))
268 return false;
269
270 asan_shadow_offset_computed = true;
271
272 return true;
273 }
274
275 /* Set list of user-defined sections that need to be sanitized. */
276
277 void
278 set_sanitized_sections (const char *sections)
279 {
280 char *pat;
281 unsigned i;
282 FOR_EACH_VEC_ELT (sanitized_sections, i, pat)
283 free (pat);
284 sanitized_sections.truncate (0);
285
286 for (const char *s = sections; *s; )
287 {
288 const char *end;
289 for (end = s; *end && *end != ','; ++end);
290 size_t len = end - s;
291 sanitized_sections.safe_push (xstrndup (s, len));
292 s = *end ? end + 1 : end;
293 }
294 }
295
296 /* Checks whether section SEC should be sanitized. */
297
298 static bool
299 section_sanitized_p (const char *sec)
300 {
301 char *pat;
302 unsigned i;
303 FOR_EACH_VEC_ELT (sanitized_sections, i, pat)
304 if (fnmatch (pat, sec, FNM_PERIOD) == 0)
305 return true;
306 return false;
307 }
308
309 /* Returns Asan shadow offset. */
310
311 static unsigned HOST_WIDE_INT
312 asan_shadow_offset ()
313 {
314 if (!asan_shadow_offset_computed)
315 {
316 asan_shadow_offset_computed = true;
317 asan_shadow_offset_value = targetm.asan_shadow_offset ();
318 }
319 return asan_shadow_offset_value;
320 }
321
322 alias_set_type asan_shadow_set = -1;
323
324 /* Pointer types to 1 resp. 2 byte integers in shadow memory. A separate
325 alias set is used for all shadow memory accesses. */
326 static GTY(()) tree shadow_ptr_types[2];
327
328 /* Decl for __asan_option_detect_stack_use_after_return. */
329 static GTY(()) tree asan_detect_stack_use_after_return;
330
331 /* Various flags for Asan builtins. */
332 enum asan_check_flags
333 {
334 ASAN_CHECK_STORE = 1 << 0,
335 ASAN_CHECK_SCALAR_ACCESS = 1 << 1,
336 ASAN_CHECK_NON_ZERO_LEN = 1 << 2,
337 ASAN_CHECK_LAST = 1 << 3
338 };
339
340 /* Hashtable support for memory references used by gimple
341 statements. */
342
343 /* This type represents a reference to a memory region. */
344 struct asan_mem_ref
345 {
346 /* The expression of the beginning of the memory region. */
347 tree start;
348
349 /* The size of the access. */
350 HOST_WIDE_INT access_size;
351 };
352
353 object_allocator <asan_mem_ref> asan_mem_ref_pool ("asan_mem_ref");
354
355 /* Initializes an instance of asan_mem_ref. */
356
357 static void
358 asan_mem_ref_init (asan_mem_ref *ref, tree start, HOST_WIDE_INT access_size)
359 {
360 ref->start = start;
361 ref->access_size = access_size;
362 }
363
364 /* Allocates memory for an instance of asan_mem_ref into the memory
365 pool returned by asan_mem_ref_get_alloc_pool and initialize it.
366 START is the address of (or the expression pointing to) the
367 beginning of memory reference. ACCESS_SIZE is the size of the
368 access to the referenced memory. */
369
370 static asan_mem_ref*
371 asan_mem_ref_new (tree start, HOST_WIDE_INT access_size)
372 {
373 asan_mem_ref *ref = asan_mem_ref_pool.allocate ();
374
375 asan_mem_ref_init (ref, start, access_size);
376 return ref;
377 }
378
379 /* This builds and returns a pointer to the end of the memory region
380 that starts at START and of length LEN. */
381
382 tree
383 asan_mem_ref_get_end (tree start, tree len)
384 {
385 if (len == NULL_TREE || integer_zerop (len))
386 return start;
387
388 if (!ptrofftype_p (len))
389 len = convert_to_ptrofftype (len);
390
391 return fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (start), start, len);
392 }
393
394 /* Return a tree expression that represents the end of the referenced
395 memory region. Beware that this function can actually build a new
396 tree expression. */
397
398 tree
399 asan_mem_ref_get_end (const asan_mem_ref *ref, tree len)
400 {
401 return asan_mem_ref_get_end (ref->start, len);
402 }
403
404 struct asan_mem_ref_hasher : nofree_ptr_hash <asan_mem_ref>
405 {
406 static inline hashval_t hash (const asan_mem_ref *);
407 static inline bool equal (const asan_mem_ref *, const asan_mem_ref *);
408 };
409
410 /* Hash a memory reference. */
411
412 inline hashval_t
413 asan_mem_ref_hasher::hash (const asan_mem_ref *mem_ref)
414 {
415 return iterative_hash_expr (mem_ref->start, 0);
416 }
417
418 /* Compare two memory references. We accept the length of either
419 memory references to be NULL_TREE. */
420
421 inline bool
422 asan_mem_ref_hasher::equal (const asan_mem_ref *m1,
423 const asan_mem_ref *m2)
424 {
425 return operand_equal_p (m1->start, m2->start, 0);
426 }
427
428 static hash_table<asan_mem_ref_hasher> *asan_mem_ref_ht;
429
430 /* Returns a reference to the hash table containing memory references.
431 This function ensures that the hash table is created. Note that
432 this hash table is updated by the function
433 update_mem_ref_hash_table. */
434
435 static hash_table<asan_mem_ref_hasher> *
436 get_mem_ref_hash_table ()
437 {
438 if (!asan_mem_ref_ht)
439 asan_mem_ref_ht = new hash_table<asan_mem_ref_hasher> (10);
440
441 return asan_mem_ref_ht;
442 }
443
444 /* Clear all entries from the memory references hash table. */
445
446 static void
447 empty_mem_ref_hash_table ()
448 {
449 if (asan_mem_ref_ht)
450 asan_mem_ref_ht->empty ();
451 }
452
453 /* Free the memory references hash table. */
454
455 static void
456 free_mem_ref_resources ()
457 {
458 delete asan_mem_ref_ht;
459 asan_mem_ref_ht = NULL;
460
461 asan_mem_ref_pool.release ();
462 }
463
464 /* Return true iff the memory reference REF has been instrumented. */
465
466 static bool
467 has_mem_ref_been_instrumented (tree ref, HOST_WIDE_INT access_size)
468 {
469 asan_mem_ref r;
470 asan_mem_ref_init (&r, ref, access_size);
471
472 asan_mem_ref *saved_ref = get_mem_ref_hash_table ()->find (&r);
473 return saved_ref && saved_ref->access_size >= access_size;
474 }
475
476 /* Return true iff the memory reference REF has been instrumented. */
477
478 static bool
479 has_mem_ref_been_instrumented (const asan_mem_ref *ref)
480 {
481 return has_mem_ref_been_instrumented (ref->start, ref->access_size);
482 }
483
484 /* Return true iff access to memory region starting at REF and of
485 length LEN has been instrumented. */
486
487 static bool
488 has_mem_ref_been_instrumented (const asan_mem_ref *ref, tree len)
489 {
490 HOST_WIDE_INT size_in_bytes
491 = tree_fits_shwi_p (len) ? tree_to_shwi (len) : -1;
492
493 return size_in_bytes != -1
494 && has_mem_ref_been_instrumented (ref->start, size_in_bytes);
495 }
496
497 /* Set REF to the memory reference present in a gimple assignment
498 ASSIGNMENT. Return true upon successful completion, false
499 otherwise. */
500
501 static bool
502 get_mem_ref_of_assignment (const gassign *assignment,
503 asan_mem_ref *ref,
504 bool *ref_is_store)
505 {
506 gcc_assert (gimple_assign_single_p (assignment));
507
508 if (gimple_store_p (assignment)
509 && !gimple_clobber_p (assignment))
510 {
511 ref->start = gimple_assign_lhs (assignment);
512 *ref_is_store = true;
513 }
514 else if (gimple_assign_load_p (assignment))
515 {
516 ref->start = gimple_assign_rhs1 (assignment);
517 *ref_is_store = false;
518 }
519 else
520 return false;
521
522 ref->access_size = int_size_in_bytes (TREE_TYPE (ref->start));
523 return true;
524 }
525
526 /* Return the memory references contained in a gimple statement
527 representing a builtin call that has to do with memory access. */
528
529 static bool
530 get_mem_refs_of_builtin_call (const gcall *call,
531 asan_mem_ref *src0,
532 tree *src0_len,
533 bool *src0_is_store,
534 asan_mem_ref *src1,
535 tree *src1_len,
536 bool *src1_is_store,
537 asan_mem_ref *dst,
538 tree *dst_len,
539 bool *dst_is_store,
540 bool *dest_is_deref,
541 bool *intercepted_p)
542 {
543 gcc_checking_assert (gimple_call_builtin_p (call, BUILT_IN_NORMAL));
544
545 tree callee = gimple_call_fndecl (call);
546 tree source0 = NULL_TREE, source1 = NULL_TREE,
547 dest = NULL_TREE, len = NULL_TREE;
548 bool is_store = true, got_reference_p = false;
549 HOST_WIDE_INT access_size = 1;
550
551 *intercepted_p = asan_intercepted_p ((DECL_FUNCTION_CODE (callee)));
552
553 switch (DECL_FUNCTION_CODE (callee))
554 {
555 /* (s, s, n) style memops. */
556 case BUILT_IN_BCMP:
557 case BUILT_IN_MEMCMP:
558 source0 = gimple_call_arg (call, 0);
559 source1 = gimple_call_arg (call, 1);
560 len = gimple_call_arg (call, 2);
561 break;
562
563 /* (src, dest, n) style memops. */
564 case BUILT_IN_BCOPY:
565 source0 = gimple_call_arg (call, 0);
566 dest = gimple_call_arg (call, 1);
567 len = gimple_call_arg (call, 2);
568 break;
569
570 /* (dest, src, n) style memops. */
571 case BUILT_IN_MEMCPY:
572 case BUILT_IN_MEMCPY_CHK:
573 case BUILT_IN_MEMMOVE:
574 case BUILT_IN_MEMMOVE_CHK:
575 case BUILT_IN_MEMPCPY:
576 case BUILT_IN_MEMPCPY_CHK:
577 dest = gimple_call_arg (call, 0);
578 source0 = gimple_call_arg (call, 1);
579 len = gimple_call_arg (call, 2);
580 break;
581
582 /* (dest, n) style memops. */
583 case BUILT_IN_BZERO:
584 dest = gimple_call_arg (call, 0);
585 len = gimple_call_arg (call, 1);
586 break;
587
588 /* (dest, x, n) style memops*/
589 case BUILT_IN_MEMSET:
590 case BUILT_IN_MEMSET_CHK:
591 dest = gimple_call_arg (call, 0);
592 len = gimple_call_arg (call, 2);
593 break;
594
595 case BUILT_IN_STRLEN:
596 source0 = gimple_call_arg (call, 0);
597 len = gimple_call_lhs (call);
598 break ;
599
600 /* And now the __atomic* and __sync builtins.
601 These are handled differently from the classical memory memory
602 access builtins above. */
603
604 case BUILT_IN_ATOMIC_LOAD_1:
605 case BUILT_IN_ATOMIC_LOAD_2:
606 case BUILT_IN_ATOMIC_LOAD_4:
607 case BUILT_IN_ATOMIC_LOAD_8:
608 case BUILT_IN_ATOMIC_LOAD_16:
609 is_store = false;
610 /* fall through. */
611
612 case BUILT_IN_SYNC_FETCH_AND_ADD_1:
613 case BUILT_IN_SYNC_FETCH_AND_ADD_2:
614 case BUILT_IN_SYNC_FETCH_AND_ADD_4:
615 case BUILT_IN_SYNC_FETCH_AND_ADD_8:
616 case BUILT_IN_SYNC_FETCH_AND_ADD_16:
617
618 case BUILT_IN_SYNC_FETCH_AND_SUB_1:
619 case BUILT_IN_SYNC_FETCH_AND_SUB_2:
620 case BUILT_IN_SYNC_FETCH_AND_SUB_4:
621 case BUILT_IN_SYNC_FETCH_AND_SUB_8:
622 case BUILT_IN_SYNC_FETCH_AND_SUB_16:
623
624 case BUILT_IN_SYNC_FETCH_AND_OR_1:
625 case BUILT_IN_SYNC_FETCH_AND_OR_2:
626 case BUILT_IN_SYNC_FETCH_AND_OR_4:
627 case BUILT_IN_SYNC_FETCH_AND_OR_8:
628 case BUILT_IN_SYNC_FETCH_AND_OR_16:
629
630 case BUILT_IN_SYNC_FETCH_AND_AND_1:
631 case BUILT_IN_SYNC_FETCH_AND_AND_2:
632 case BUILT_IN_SYNC_FETCH_AND_AND_4:
633 case BUILT_IN_SYNC_FETCH_AND_AND_8:
634 case BUILT_IN_SYNC_FETCH_AND_AND_16:
635
636 case BUILT_IN_SYNC_FETCH_AND_XOR_1:
637 case BUILT_IN_SYNC_FETCH_AND_XOR_2:
638 case BUILT_IN_SYNC_FETCH_AND_XOR_4:
639 case BUILT_IN_SYNC_FETCH_AND_XOR_8:
640 case BUILT_IN_SYNC_FETCH_AND_XOR_16:
641
642 case BUILT_IN_SYNC_FETCH_AND_NAND_1:
643 case BUILT_IN_SYNC_FETCH_AND_NAND_2:
644 case BUILT_IN_SYNC_FETCH_AND_NAND_4:
645 case BUILT_IN_SYNC_FETCH_AND_NAND_8:
646
647 case BUILT_IN_SYNC_ADD_AND_FETCH_1:
648 case BUILT_IN_SYNC_ADD_AND_FETCH_2:
649 case BUILT_IN_SYNC_ADD_AND_FETCH_4:
650 case BUILT_IN_SYNC_ADD_AND_FETCH_8:
651 case BUILT_IN_SYNC_ADD_AND_FETCH_16:
652
653 case BUILT_IN_SYNC_SUB_AND_FETCH_1:
654 case BUILT_IN_SYNC_SUB_AND_FETCH_2:
655 case BUILT_IN_SYNC_SUB_AND_FETCH_4:
656 case BUILT_IN_SYNC_SUB_AND_FETCH_8:
657 case BUILT_IN_SYNC_SUB_AND_FETCH_16:
658
659 case BUILT_IN_SYNC_OR_AND_FETCH_1:
660 case BUILT_IN_SYNC_OR_AND_FETCH_2:
661 case BUILT_IN_SYNC_OR_AND_FETCH_4:
662 case BUILT_IN_SYNC_OR_AND_FETCH_8:
663 case BUILT_IN_SYNC_OR_AND_FETCH_16:
664
665 case BUILT_IN_SYNC_AND_AND_FETCH_1:
666 case BUILT_IN_SYNC_AND_AND_FETCH_2:
667 case BUILT_IN_SYNC_AND_AND_FETCH_4:
668 case BUILT_IN_SYNC_AND_AND_FETCH_8:
669 case BUILT_IN_SYNC_AND_AND_FETCH_16:
670
671 case BUILT_IN_SYNC_XOR_AND_FETCH_1:
672 case BUILT_IN_SYNC_XOR_AND_FETCH_2:
673 case BUILT_IN_SYNC_XOR_AND_FETCH_4:
674 case BUILT_IN_SYNC_XOR_AND_FETCH_8:
675 case BUILT_IN_SYNC_XOR_AND_FETCH_16:
676
677 case BUILT_IN_SYNC_NAND_AND_FETCH_1:
678 case BUILT_IN_SYNC_NAND_AND_FETCH_2:
679 case BUILT_IN_SYNC_NAND_AND_FETCH_4:
680 case BUILT_IN_SYNC_NAND_AND_FETCH_8:
681
682 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1:
683 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_2:
684 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_4:
685 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_8:
686 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_16:
687
688 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1:
689 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_2:
690 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_4:
691 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_8:
692 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_16:
693
694 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_1:
695 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_2:
696 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_4:
697 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_8:
698 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_16:
699
700 case BUILT_IN_SYNC_LOCK_RELEASE_1:
701 case BUILT_IN_SYNC_LOCK_RELEASE_2:
702 case BUILT_IN_SYNC_LOCK_RELEASE_4:
703 case BUILT_IN_SYNC_LOCK_RELEASE_8:
704 case BUILT_IN_SYNC_LOCK_RELEASE_16:
705
706 case BUILT_IN_ATOMIC_EXCHANGE_1:
707 case BUILT_IN_ATOMIC_EXCHANGE_2:
708 case BUILT_IN_ATOMIC_EXCHANGE_4:
709 case BUILT_IN_ATOMIC_EXCHANGE_8:
710 case BUILT_IN_ATOMIC_EXCHANGE_16:
711
712 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1:
713 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_2:
714 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_4:
715 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_8:
716 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_16:
717
718 case BUILT_IN_ATOMIC_STORE_1:
719 case BUILT_IN_ATOMIC_STORE_2:
720 case BUILT_IN_ATOMIC_STORE_4:
721 case BUILT_IN_ATOMIC_STORE_8:
722 case BUILT_IN_ATOMIC_STORE_16:
723
724 case BUILT_IN_ATOMIC_ADD_FETCH_1:
725 case BUILT_IN_ATOMIC_ADD_FETCH_2:
726 case BUILT_IN_ATOMIC_ADD_FETCH_4:
727 case BUILT_IN_ATOMIC_ADD_FETCH_8:
728 case BUILT_IN_ATOMIC_ADD_FETCH_16:
729
730 case BUILT_IN_ATOMIC_SUB_FETCH_1:
731 case BUILT_IN_ATOMIC_SUB_FETCH_2:
732 case BUILT_IN_ATOMIC_SUB_FETCH_4:
733 case BUILT_IN_ATOMIC_SUB_FETCH_8:
734 case BUILT_IN_ATOMIC_SUB_FETCH_16:
735
736 case BUILT_IN_ATOMIC_AND_FETCH_1:
737 case BUILT_IN_ATOMIC_AND_FETCH_2:
738 case BUILT_IN_ATOMIC_AND_FETCH_4:
739 case BUILT_IN_ATOMIC_AND_FETCH_8:
740 case BUILT_IN_ATOMIC_AND_FETCH_16:
741
742 case BUILT_IN_ATOMIC_NAND_FETCH_1:
743 case BUILT_IN_ATOMIC_NAND_FETCH_2:
744 case BUILT_IN_ATOMIC_NAND_FETCH_4:
745 case BUILT_IN_ATOMIC_NAND_FETCH_8:
746 case BUILT_IN_ATOMIC_NAND_FETCH_16:
747
748 case BUILT_IN_ATOMIC_XOR_FETCH_1:
749 case BUILT_IN_ATOMIC_XOR_FETCH_2:
750 case BUILT_IN_ATOMIC_XOR_FETCH_4:
751 case BUILT_IN_ATOMIC_XOR_FETCH_8:
752 case BUILT_IN_ATOMIC_XOR_FETCH_16:
753
754 case BUILT_IN_ATOMIC_OR_FETCH_1:
755 case BUILT_IN_ATOMIC_OR_FETCH_2:
756 case BUILT_IN_ATOMIC_OR_FETCH_4:
757 case BUILT_IN_ATOMIC_OR_FETCH_8:
758 case BUILT_IN_ATOMIC_OR_FETCH_16:
759
760 case BUILT_IN_ATOMIC_FETCH_ADD_1:
761 case BUILT_IN_ATOMIC_FETCH_ADD_2:
762 case BUILT_IN_ATOMIC_FETCH_ADD_4:
763 case BUILT_IN_ATOMIC_FETCH_ADD_8:
764 case BUILT_IN_ATOMIC_FETCH_ADD_16:
765
766 case BUILT_IN_ATOMIC_FETCH_SUB_1:
767 case BUILT_IN_ATOMIC_FETCH_SUB_2:
768 case BUILT_IN_ATOMIC_FETCH_SUB_4:
769 case BUILT_IN_ATOMIC_FETCH_SUB_8:
770 case BUILT_IN_ATOMIC_FETCH_SUB_16:
771
772 case BUILT_IN_ATOMIC_FETCH_AND_1:
773 case BUILT_IN_ATOMIC_FETCH_AND_2:
774 case BUILT_IN_ATOMIC_FETCH_AND_4:
775 case BUILT_IN_ATOMIC_FETCH_AND_8:
776 case BUILT_IN_ATOMIC_FETCH_AND_16:
777
778 case BUILT_IN_ATOMIC_FETCH_NAND_1:
779 case BUILT_IN_ATOMIC_FETCH_NAND_2:
780 case BUILT_IN_ATOMIC_FETCH_NAND_4:
781 case BUILT_IN_ATOMIC_FETCH_NAND_8:
782 case BUILT_IN_ATOMIC_FETCH_NAND_16:
783
784 case BUILT_IN_ATOMIC_FETCH_XOR_1:
785 case BUILT_IN_ATOMIC_FETCH_XOR_2:
786 case BUILT_IN_ATOMIC_FETCH_XOR_4:
787 case BUILT_IN_ATOMIC_FETCH_XOR_8:
788 case BUILT_IN_ATOMIC_FETCH_XOR_16:
789
790 case BUILT_IN_ATOMIC_FETCH_OR_1:
791 case BUILT_IN_ATOMIC_FETCH_OR_2:
792 case BUILT_IN_ATOMIC_FETCH_OR_4:
793 case BUILT_IN_ATOMIC_FETCH_OR_8:
794 case BUILT_IN_ATOMIC_FETCH_OR_16:
795 {
796 dest = gimple_call_arg (call, 0);
797 /* DEST represents the address of a memory location.
798 instrument_derefs wants the memory location, so lets
799 dereference the address DEST before handing it to
800 instrument_derefs. */
801 if (TREE_CODE (dest) == ADDR_EXPR)
802 dest = TREE_OPERAND (dest, 0);
803 else if (TREE_CODE (dest) == SSA_NAME || TREE_CODE (dest) == INTEGER_CST)
804 dest = build2 (MEM_REF, TREE_TYPE (TREE_TYPE (dest)),
805 dest, build_int_cst (TREE_TYPE (dest), 0));
806 else
807 gcc_unreachable ();
808
809 access_size = int_size_in_bytes (TREE_TYPE (dest));
810 }
811
812 default:
813 /* The other builtins memory access are not instrumented in this
814 function because they either don't have any length parameter,
815 or their length parameter is just a limit. */
816 break;
817 }
818
819 if (len != NULL_TREE)
820 {
821 if (source0 != NULL_TREE)
822 {
823 src0->start = source0;
824 src0->access_size = access_size;
825 *src0_len = len;
826 *src0_is_store = false;
827 }
828
829 if (source1 != NULL_TREE)
830 {
831 src1->start = source1;
832 src1->access_size = access_size;
833 *src1_len = len;
834 *src1_is_store = false;
835 }
836
837 if (dest != NULL_TREE)
838 {
839 dst->start = dest;
840 dst->access_size = access_size;
841 *dst_len = len;
842 *dst_is_store = true;
843 }
844
845 got_reference_p = true;
846 }
847 else if (dest)
848 {
849 dst->start = dest;
850 dst->access_size = access_size;
851 *dst_len = NULL_TREE;
852 *dst_is_store = is_store;
853 *dest_is_deref = true;
854 got_reference_p = true;
855 }
856
857 return got_reference_p;
858 }
859
860 /* Return true iff a given gimple statement has been instrumented.
861 Note that the statement is "defined" by the memory references it
862 contains. */
863
864 static bool
865 has_stmt_been_instrumented_p (gimple *stmt)
866 {
867 if (gimple_assign_single_p (stmt))
868 {
869 bool r_is_store;
870 asan_mem_ref r;
871 asan_mem_ref_init (&r, NULL, 1);
872
873 if (get_mem_ref_of_assignment (as_a <gassign *> (stmt), &r,
874 &r_is_store))
875 return has_mem_ref_been_instrumented (&r);
876 }
877 else if (gimple_call_builtin_p (stmt, BUILT_IN_NORMAL))
878 {
879 asan_mem_ref src0, src1, dest;
880 asan_mem_ref_init (&src0, NULL, 1);
881 asan_mem_ref_init (&src1, NULL, 1);
882 asan_mem_ref_init (&dest, NULL, 1);
883
884 tree src0_len = NULL_TREE, src1_len = NULL_TREE, dest_len = NULL_TREE;
885 bool src0_is_store = false, src1_is_store = false,
886 dest_is_store = false, dest_is_deref = false, intercepted_p = true;
887 if (get_mem_refs_of_builtin_call (as_a <gcall *> (stmt),
888 &src0, &src0_len, &src0_is_store,
889 &src1, &src1_len, &src1_is_store,
890 &dest, &dest_len, &dest_is_store,
891 &dest_is_deref, &intercepted_p))
892 {
893 if (src0.start != NULL_TREE
894 && !has_mem_ref_been_instrumented (&src0, src0_len))
895 return false;
896
897 if (src1.start != NULL_TREE
898 && !has_mem_ref_been_instrumented (&src1, src1_len))
899 return false;
900
901 if (dest.start != NULL_TREE
902 && !has_mem_ref_been_instrumented (&dest, dest_len))
903 return false;
904
905 return true;
906 }
907 }
908 return false;
909 }
910
911 /* Insert a memory reference into the hash table. */
912
913 static void
914 update_mem_ref_hash_table (tree ref, HOST_WIDE_INT access_size)
915 {
916 hash_table<asan_mem_ref_hasher> *ht = get_mem_ref_hash_table ();
917
918 asan_mem_ref r;
919 asan_mem_ref_init (&r, ref, access_size);
920
921 asan_mem_ref **slot = ht->find_slot (&r, INSERT);
922 if (*slot == NULL || (*slot)->access_size < access_size)
923 *slot = asan_mem_ref_new (ref, access_size);
924 }
925
926 /* Initialize shadow_ptr_types array. */
927
928 static void
929 asan_init_shadow_ptr_types (void)
930 {
931 asan_shadow_set = new_alias_set ();
932 shadow_ptr_types[0] = build_distinct_type_copy (signed_char_type_node);
933 TYPE_ALIAS_SET (shadow_ptr_types[0]) = asan_shadow_set;
934 shadow_ptr_types[0] = build_pointer_type (shadow_ptr_types[0]);
935 shadow_ptr_types[1] = build_distinct_type_copy (short_integer_type_node);
936 TYPE_ALIAS_SET (shadow_ptr_types[1]) = asan_shadow_set;
937 shadow_ptr_types[1] = build_pointer_type (shadow_ptr_types[1]);
938 initialize_sanitizer_builtins ();
939 }
940
941 /* Create ADDR_EXPR of STRING_CST with the PP pretty printer text. */
942
943 static tree
944 asan_pp_string (pretty_printer *pp)
945 {
946 const char *buf = pp_formatted_text (pp);
947 size_t len = strlen (buf);
948 tree ret = build_string (len + 1, buf);
949 TREE_TYPE (ret)
950 = build_array_type (TREE_TYPE (shadow_ptr_types[0]),
951 build_index_type (size_int (len)));
952 TREE_READONLY (ret) = 1;
953 TREE_STATIC (ret) = 1;
954 return build1 (ADDR_EXPR, shadow_ptr_types[0], ret);
955 }
956
957 /* Return a CONST_INT representing 4 subsequent shadow memory bytes. */
958
959 static rtx
960 asan_shadow_cst (unsigned char shadow_bytes[4])
961 {
962 int i;
963 unsigned HOST_WIDE_INT val = 0;
964 gcc_assert (WORDS_BIG_ENDIAN == BYTES_BIG_ENDIAN);
965 for (i = 0; i < 4; i++)
966 val |= (unsigned HOST_WIDE_INT) shadow_bytes[BYTES_BIG_ENDIAN ? 3 - i : i]
967 << (BITS_PER_UNIT * i);
968 return gen_int_mode (val, SImode);
969 }
970
971 /* Clear shadow memory at SHADOW_MEM, LEN bytes. Can't call a library call here
972 though. */
973
974 static void
975 asan_clear_shadow (rtx shadow_mem, HOST_WIDE_INT len)
976 {
977 rtx_insn *insn, *insns, *jump;
978 rtx_code_label *top_label;
979 rtx end, addr, tmp;
980
981 start_sequence ();
982 clear_storage (shadow_mem, GEN_INT (len), BLOCK_OP_NORMAL);
983 insns = get_insns ();
984 end_sequence ();
985 for (insn = insns; insn; insn = NEXT_INSN (insn))
986 if (CALL_P (insn))
987 break;
988 if (insn == NULL_RTX)
989 {
990 emit_insn (insns);
991 return;
992 }
993
994 gcc_assert ((len & 3) == 0);
995 top_label = gen_label_rtx ();
996 addr = copy_to_mode_reg (Pmode, XEXP (shadow_mem, 0));
997 shadow_mem = adjust_automodify_address (shadow_mem, SImode, addr, 0);
998 end = force_reg (Pmode, plus_constant (Pmode, addr, len));
999 emit_label (top_label);
1000
1001 emit_move_insn (shadow_mem, const0_rtx);
1002 tmp = expand_simple_binop (Pmode, PLUS, addr, gen_int_mode (4, Pmode), addr,
1003 true, OPTAB_LIB_WIDEN);
1004 if (tmp != addr)
1005 emit_move_insn (addr, tmp);
1006 emit_cmp_and_jump_insns (addr, end, LT, NULL_RTX, Pmode, true, top_label);
1007 jump = get_last_insn ();
1008 gcc_assert (JUMP_P (jump));
1009 add_int_reg_note (jump, REG_BR_PROB, REG_BR_PROB_BASE * 80 / 100);
1010 }
1011
1012 void
1013 asan_function_start (void)
1014 {
1015 section *fnsec = function_section (current_function_decl);
1016 switch_to_section (fnsec);
1017 ASM_OUTPUT_DEBUG_LABEL (asm_out_file, "LASANPC",
1018 current_function_funcdef_no);
1019 }
1020
1021 /* Insert code to protect stack vars. The prologue sequence should be emitted
1022 directly, epilogue sequence returned. BASE is the register holding the
1023 stack base, against which OFFSETS array offsets are relative to, OFFSETS
1024 array contains pairs of offsets in reverse order, always the end offset
1025 of some gap that needs protection followed by starting offset,
1026 and DECLS is an array of representative decls for each var partition.
1027 LENGTH is the length of the OFFSETS array, DECLS array is LENGTH / 2 - 1
1028 elements long (OFFSETS include gap before the first variable as well
1029 as gaps after each stack variable). PBASE is, if non-NULL, some pseudo
1030 register which stack vars DECL_RTLs are based on. Either BASE should be
1031 assigned to PBASE, when not doing use after return protection, or
1032 corresponding address based on __asan_stack_malloc* return value. */
1033
1034 rtx_insn *
1035 asan_emit_stack_protection (rtx base, rtx pbase, unsigned int alignb,
1036 HOST_WIDE_INT *offsets, tree *decls, int length)
1037 {
1038 rtx shadow_base, shadow_mem, ret, mem, orig_base;
1039 rtx_code_label *lab;
1040 rtx_insn *insns;
1041 char buf[30];
1042 unsigned char shadow_bytes[4];
1043 HOST_WIDE_INT base_offset = offsets[length - 1];
1044 HOST_WIDE_INT base_align_bias = 0, offset, prev_offset;
1045 HOST_WIDE_INT asan_frame_size = offsets[0] - base_offset;
1046 HOST_WIDE_INT last_offset, last_size;
1047 int l;
1048 unsigned char cur_shadow_byte = ASAN_STACK_MAGIC_LEFT;
1049 tree str_cst, decl, id;
1050 int use_after_return_class = -1;
1051
1052 if (shadow_ptr_types[0] == NULL_TREE)
1053 asan_init_shadow_ptr_types ();
1054
1055 /* First of all, prepare the description string. */
1056 pretty_printer asan_pp;
1057
1058 pp_decimal_int (&asan_pp, length / 2 - 1);
1059 pp_space (&asan_pp);
1060 for (l = length - 2; l; l -= 2)
1061 {
1062 tree decl = decls[l / 2 - 1];
1063 pp_wide_integer (&asan_pp, offsets[l] - base_offset);
1064 pp_space (&asan_pp);
1065 pp_wide_integer (&asan_pp, offsets[l - 1] - offsets[l]);
1066 pp_space (&asan_pp);
1067 if (DECL_P (decl) && DECL_NAME (decl))
1068 {
1069 pp_decimal_int (&asan_pp, IDENTIFIER_LENGTH (DECL_NAME (decl)));
1070 pp_space (&asan_pp);
1071 pp_tree_identifier (&asan_pp, DECL_NAME (decl));
1072 }
1073 else
1074 pp_string (&asan_pp, "9 <unknown>");
1075 pp_space (&asan_pp);
1076 }
1077 str_cst = asan_pp_string (&asan_pp);
1078
1079 /* Emit the prologue sequence. */
1080 if (asan_frame_size > 32 && asan_frame_size <= 65536 && pbase
1081 && ASAN_USE_AFTER_RETURN)
1082 {
1083 use_after_return_class = floor_log2 (asan_frame_size - 1) - 5;
1084 /* __asan_stack_malloc_N guarantees alignment
1085 N < 6 ? (64 << N) : 4096 bytes. */
1086 if (alignb > (use_after_return_class < 6
1087 ? (64U << use_after_return_class) : 4096U))
1088 use_after_return_class = -1;
1089 else if (alignb > ASAN_RED_ZONE_SIZE && (asan_frame_size & (alignb - 1)))
1090 base_align_bias = ((asan_frame_size + alignb - 1)
1091 & ~(alignb - HOST_WIDE_INT_1)) - asan_frame_size;
1092 }
1093 /* Align base if target is STRICT_ALIGNMENT. */
1094 if (STRICT_ALIGNMENT)
1095 base = expand_binop (Pmode, and_optab, base,
1096 gen_int_mode (-((GET_MODE_ALIGNMENT (SImode)
1097 << ASAN_SHADOW_SHIFT)
1098 / BITS_PER_UNIT), Pmode), NULL_RTX,
1099 1, OPTAB_DIRECT);
1100
1101 if (use_after_return_class == -1 && pbase)
1102 emit_move_insn (pbase, base);
1103
1104 base = expand_binop (Pmode, add_optab, base,
1105 gen_int_mode (base_offset - base_align_bias, Pmode),
1106 NULL_RTX, 1, OPTAB_DIRECT);
1107 orig_base = NULL_RTX;
1108 if (use_after_return_class != -1)
1109 {
1110 if (asan_detect_stack_use_after_return == NULL_TREE)
1111 {
1112 id = get_identifier ("__asan_option_detect_stack_use_after_return");
1113 decl = build_decl (BUILTINS_LOCATION, VAR_DECL, id,
1114 integer_type_node);
1115 SET_DECL_ASSEMBLER_NAME (decl, id);
1116 TREE_ADDRESSABLE (decl) = 1;
1117 DECL_ARTIFICIAL (decl) = 1;
1118 DECL_IGNORED_P (decl) = 1;
1119 DECL_EXTERNAL (decl) = 1;
1120 TREE_STATIC (decl) = 1;
1121 TREE_PUBLIC (decl) = 1;
1122 TREE_USED (decl) = 1;
1123 asan_detect_stack_use_after_return = decl;
1124 }
1125 orig_base = gen_reg_rtx (Pmode);
1126 emit_move_insn (orig_base, base);
1127 ret = expand_normal (asan_detect_stack_use_after_return);
1128 lab = gen_label_rtx ();
1129 int very_likely = REG_BR_PROB_BASE - (REG_BR_PROB_BASE / 2000 - 1);
1130 emit_cmp_and_jump_insns (ret, const0_rtx, EQ, NULL_RTX,
1131 VOIDmode, 0, lab, very_likely);
1132 snprintf (buf, sizeof buf, "__asan_stack_malloc_%d",
1133 use_after_return_class);
1134 ret = init_one_libfunc (buf);
1135 ret = emit_library_call_value (ret, NULL_RTX, LCT_NORMAL, ptr_mode, 1,
1136 GEN_INT (asan_frame_size
1137 + base_align_bias),
1138 TYPE_MODE (pointer_sized_int_node));
1139 /* __asan_stack_malloc_[n] returns a pointer to fake stack if succeeded
1140 and NULL otherwise. Check RET value is NULL here and jump over the
1141 BASE reassignment in this case. Otherwise, reassign BASE to RET. */
1142 int very_unlikely = REG_BR_PROB_BASE / 2000 - 1;
1143 emit_cmp_and_jump_insns (ret, const0_rtx, EQ, NULL_RTX,
1144 VOIDmode, 0, lab, very_unlikely);
1145 ret = convert_memory_address (Pmode, ret);
1146 emit_move_insn (base, ret);
1147 emit_label (lab);
1148 emit_move_insn (pbase, expand_binop (Pmode, add_optab, base,
1149 gen_int_mode (base_align_bias
1150 - base_offset, Pmode),
1151 NULL_RTX, 1, OPTAB_DIRECT));
1152 }
1153 mem = gen_rtx_MEM (ptr_mode, base);
1154 mem = adjust_address (mem, VOIDmode, base_align_bias);
1155 emit_move_insn (mem, gen_int_mode (ASAN_STACK_FRAME_MAGIC, ptr_mode));
1156 mem = adjust_address (mem, VOIDmode, GET_MODE_SIZE (ptr_mode));
1157 emit_move_insn (mem, expand_normal (str_cst));
1158 mem = adjust_address (mem, VOIDmode, GET_MODE_SIZE (ptr_mode));
1159 ASM_GENERATE_INTERNAL_LABEL (buf, "LASANPC", current_function_funcdef_no);
1160 id = get_identifier (buf);
1161 decl = build_decl (DECL_SOURCE_LOCATION (current_function_decl),
1162 VAR_DECL, id, char_type_node);
1163 SET_DECL_ASSEMBLER_NAME (decl, id);
1164 TREE_ADDRESSABLE (decl) = 1;
1165 TREE_READONLY (decl) = 1;
1166 DECL_ARTIFICIAL (decl) = 1;
1167 DECL_IGNORED_P (decl) = 1;
1168 TREE_STATIC (decl) = 1;
1169 TREE_PUBLIC (decl) = 0;
1170 TREE_USED (decl) = 1;
1171 DECL_INITIAL (decl) = decl;
1172 TREE_ASM_WRITTEN (decl) = 1;
1173 TREE_ASM_WRITTEN (id) = 1;
1174 emit_move_insn (mem, expand_normal (build_fold_addr_expr (decl)));
1175 shadow_base = expand_binop (Pmode, lshr_optab, base,
1176 GEN_INT (ASAN_SHADOW_SHIFT),
1177 NULL_RTX, 1, OPTAB_DIRECT);
1178 shadow_base
1179 = plus_constant (Pmode, shadow_base,
1180 asan_shadow_offset ()
1181 + (base_align_bias >> ASAN_SHADOW_SHIFT));
1182 gcc_assert (asan_shadow_set != -1
1183 && (ASAN_RED_ZONE_SIZE >> ASAN_SHADOW_SHIFT) == 4);
1184 shadow_mem = gen_rtx_MEM (SImode, shadow_base);
1185 set_mem_alias_set (shadow_mem, asan_shadow_set);
1186 if (STRICT_ALIGNMENT)
1187 set_mem_align (shadow_mem, (GET_MODE_ALIGNMENT (SImode)));
1188 prev_offset = base_offset;
1189 for (l = length; l; l -= 2)
1190 {
1191 if (l == 2)
1192 cur_shadow_byte = ASAN_STACK_MAGIC_RIGHT;
1193 offset = offsets[l - 1];
1194 if ((offset - base_offset) & (ASAN_RED_ZONE_SIZE - 1))
1195 {
1196 int i;
1197 HOST_WIDE_INT aoff
1198 = base_offset + ((offset - base_offset)
1199 & ~(ASAN_RED_ZONE_SIZE - HOST_WIDE_INT_1));
1200 shadow_mem = adjust_address (shadow_mem, VOIDmode,
1201 (aoff - prev_offset)
1202 >> ASAN_SHADOW_SHIFT);
1203 prev_offset = aoff;
1204 for (i = 0; i < 4; i++, aoff += (1 << ASAN_SHADOW_SHIFT))
1205 if (aoff < offset)
1206 {
1207 if (aoff < offset - (1 << ASAN_SHADOW_SHIFT) + 1)
1208 shadow_bytes[i] = 0;
1209 else
1210 shadow_bytes[i] = offset - aoff;
1211 }
1212 else
1213 shadow_bytes[i] = ASAN_STACK_MAGIC_PARTIAL;
1214 emit_move_insn (shadow_mem, asan_shadow_cst (shadow_bytes));
1215 offset = aoff;
1216 }
1217 while (offset <= offsets[l - 2] - ASAN_RED_ZONE_SIZE)
1218 {
1219 shadow_mem = adjust_address (shadow_mem, VOIDmode,
1220 (offset - prev_offset)
1221 >> ASAN_SHADOW_SHIFT);
1222 prev_offset = offset;
1223 memset (shadow_bytes, cur_shadow_byte, 4);
1224 emit_move_insn (shadow_mem, asan_shadow_cst (shadow_bytes));
1225 offset += ASAN_RED_ZONE_SIZE;
1226 }
1227 cur_shadow_byte = ASAN_STACK_MAGIC_MIDDLE;
1228 }
1229 do_pending_stack_adjust ();
1230
1231 /* Construct epilogue sequence. */
1232 start_sequence ();
1233
1234 lab = NULL;
1235 if (use_after_return_class != -1)
1236 {
1237 rtx_code_label *lab2 = gen_label_rtx ();
1238 char c = (char) ASAN_STACK_MAGIC_USE_AFTER_RET;
1239 int very_likely = REG_BR_PROB_BASE - (REG_BR_PROB_BASE / 2000 - 1);
1240 emit_cmp_and_jump_insns (orig_base, base, EQ, NULL_RTX,
1241 VOIDmode, 0, lab2, very_likely);
1242 shadow_mem = gen_rtx_MEM (BLKmode, shadow_base);
1243 set_mem_alias_set (shadow_mem, asan_shadow_set);
1244 mem = gen_rtx_MEM (ptr_mode, base);
1245 mem = adjust_address (mem, VOIDmode, base_align_bias);
1246 emit_move_insn (mem, gen_int_mode (ASAN_STACK_RETIRED_MAGIC, ptr_mode));
1247 unsigned HOST_WIDE_INT sz = asan_frame_size >> ASAN_SHADOW_SHIFT;
1248 if (use_after_return_class < 5
1249 && can_store_by_pieces (sz, builtin_memset_read_str, &c,
1250 BITS_PER_UNIT, true))
1251 store_by_pieces (shadow_mem, sz, builtin_memset_read_str, &c,
1252 BITS_PER_UNIT, true, 0);
1253 else if (use_after_return_class >= 5
1254 || !set_storage_via_setmem (shadow_mem,
1255 GEN_INT (sz),
1256 gen_int_mode (c, QImode),
1257 BITS_PER_UNIT, BITS_PER_UNIT,
1258 -1, sz, sz, sz))
1259 {
1260 snprintf (buf, sizeof buf, "__asan_stack_free_%d",
1261 use_after_return_class);
1262 ret = init_one_libfunc (buf);
1263 rtx addr = convert_memory_address (ptr_mode, base);
1264 rtx orig_addr = convert_memory_address (ptr_mode, orig_base);
1265 emit_library_call (ret, LCT_NORMAL, ptr_mode, 3, addr, ptr_mode,
1266 GEN_INT (asan_frame_size + base_align_bias),
1267 TYPE_MODE (pointer_sized_int_node),
1268 orig_addr, ptr_mode);
1269 }
1270 lab = gen_label_rtx ();
1271 emit_jump (lab);
1272 emit_label (lab2);
1273 }
1274
1275 shadow_mem = gen_rtx_MEM (BLKmode, shadow_base);
1276 set_mem_alias_set (shadow_mem, asan_shadow_set);
1277
1278 if (STRICT_ALIGNMENT)
1279 set_mem_align (shadow_mem, (GET_MODE_ALIGNMENT (SImode)));
1280
1281 prev_offset = base_offset;
1282 last_offset = base_offset;
1283 last_size = 0;
1284 for (l = length; l; l -= 2)
1285 {
1286 offset = base_offset + ((offsets[l - 1] - base_offset)
1287 & ~(ASAN_RED_ZONE_SIZE - HOST_WIDE_INT_1));
1288 if (last_offset + last_size != offset)
1289 {
1290 shadow_mem = adjust_address (shadow_mem, VOIDmode,
1291 (last_offset - prev_offset)
1292 >> ASAN_SHADOW_SHIFT);
1293 prev_offset = last_offset;
1294 asan_clear_shadow (shadow_mem, last_size >> ASAN_SHADOW_SHIFT);
1295 last_offset = offset;
1296 last_size = 0;
1297 }
1298 last_size += base_offset + ((offsets[l - 2] - base_offset)
1299 & ~(ASAN_RED_ZONE_SIZE - HOST_WIDE_INT_1))
1300 - offset;
1301 }
1302 if (last_size)
1303 {
1304 shadow_mem = adjust_address (shadow_mem, VOIDmode,
1305 (last_offset - prev_offset)
1306 >> ASAN_SHADOW_SHIFT);
1307 asan_clear_shadow (shadow_mem, last_size >> ASAN_SHADOW_SHIFT);
1308 }
1309
1310 do_pending_stack_adjust ();
1311 if (lab)
1312 emit_label (lab);
1313
1314 insns = get_insns ();
1315 end_sequence ();
1316 return insns;
1317 }
1318
1319 /* Return true if DECL, a global var, might be overridden and needs
1320 therefore a local alias. */
1321
1322 static bool
1323 asan_needs_local_alias (tree decl)
1324 {
1325 return DECL_WEAK (decl) || !targetm.binds_local_p (decl);
1326 }
1327
1328 /* Return true if DECL is a VAR_DECL that should be protected
1329 by Address Sanitizer, by appending a red zone with protected
1330 shadow memory after it and aligning it to at least
1331 ASAN_RED_ZONE_SIZE bytes. */
1332
1333 bool
1334 asan_protect_global (tree decl)
1335 {
1336 if (!ASAN_GLOBALS)
1337 return false;
1338
1339 rtx rtl, symbol;
1340
1341 if (TREE_CODE (decl) == STRING_CST)
1342 {
1343 /* Instrument all STRING_CSTs except those created
1344 by asan_pp_string here. */
1345 if (shadow_ptr_types[0] != NULL_TREE
1346 && TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE
1347 && TREE_TYPE (TREE_TYPE (decl)) == TREE_TYPE (shadow_ptr_types[0]))
1348 return false;
1349 return true;
1350 }
1351 if (TREE_CODE (decl) != VAR_DECL
1352 /* TLS vars aren't statically protectable. */
1353 || DECL_THREAD_LOCAL_P (decl)
1354 /* Externs will be protected elsewhere. */
1355 || DECL_EXTERNAL (decl)
1356 || !DECL_RTL_SET_P (decl)
1357 /* Comdat vars pose an ABI problem, we can't know if
1358 the var that is selected by the linker will have
1359 padding or not. */
1360 || DECL_ONE_ONLY (decl)
1361 /* Similarly for common vars. People can use -fno-common.
1362 Note: Linux kernel is built with -fno-common, so we do instrument
1363 globals there even if it is C. */
1364 || (DECL_COMMON (decl) && TREE_PUBLIC (decl))
1365 /* Don't protect if using user section, often vars placed
1366 into user section from multiple TUs are then assumed
1367 to be an array of such vars, putting padding in there
1368 breaks this assumption. */
1369 || (DECL_SECTION_NAME (decl) != NULL
1370 && !symtab_node::get (decl)->implicit_section
1371 && !section_sanitized_p (DECL_SECTION_NAME (decl)))
1372 || DECL_SIZE (decl) == 0
1373 || ASAN_RED_ZONE_SIZE * BITS_PER_UNIT > MAX_OFILE_ALIGNMENT
1374 || !valid_constant_size_p (DECL_SIZE_UNIT (decl))
1375 || DECL_ALIGN_UNIT (decl) > 2 * ASAN_RED_ZONE_SIZE
1376 || TREE_TYPE (decl) == ubsan_get_source_location_type ())
1377 return false;
1378
1379 rtl = DECL_RTL (decl);
1380 if (!MEM_P (rtl) || GET_CODE (XEXP (rtl, 0)) != SYMBOL_REF)
1381 return false;
1382 symbol = XEXP (rtl, 0);
1383
1384 if (CONSTANT_POOL_ADDRESS_P (symbol)
1385 || TREE_CONSTANT_POOL_ADDRESS_P (symbol))
1386 return false;
1387
1388 if (lookup_attribute ("weakref", DECL_ATTRIBUTES (decl)))
1389 return false;
1390
1391 #ifndef ASM_OUTPUT_DEF
1392 if (asan_needs_local_alias (decl))
1393 return false;
1394 #endif
1395
1396 return true;
1397 }
1398
1399 /* Construct a function tree for __asan_report_{load,store}{1,2,4,8,16,_n}.
1400 IS_STORE is either 1 (for a store) or 0 (for a load). */
1401
1402 static tree
1403 report_error_func (bool is_store, bool recover_p, HOST_WIDE_INT size_in_bytes,
1404 int *nargs)
1405 {
1406 static enum built_in_function report[2][2][6]
1407 = { { { BUILT_IN_ASAN_REPORT_LOAD1, BUILT_IN_ASAN_REPORT_LOAD2,
1408 BUILT_IN_ASAN_REPORT_LOAD4, BUILT_IN_ASAN_REPORT_LOAD8,
1409 BUILT_IN_ASAN_REPORT_LOAD16, BUILT_IN_ASAN_REPORT_LOAD_N },
1410 { BUILT_IN_ASAN_REPORT_STORE1, BUILT_IN_ASAN_REPORT_STORE2,
1411 BUILT_IN_ASAN_REPORT_STORE4, BUILT_IN_ASAN_REPORT_STORE8,
1412 BUILT_IN_ASAN_REPORT_STORE16, BUILT_IN_ASAN_REPORT_STORE_N } },
1413 { { BUILT_IN_ASAN_REPORT_LOAD1_NOABORT,
1414 BUILT_IN_ASAN_REPORT_LOAD2_NOABORT,
1415 BUILT_IN_ASAN_REPORT_LOAD4_NOABORT,
1416 BUILT_IN_ASAN_REPORT_LOAD8_NOABORT,
1417 BUILT_IN_ASAN_REPORT_LOAD16_NOABORT,
1418 BUILT_IN_ASAN_REPORT_LOAD_N_NOABORT },
1419 { BUILT_IN_ASAN_REPORT_STORE1_NOABORT,
1420 BUILT_IN_ASAN_REPORT_STORE2_NOABORT,
1421 BUILT_IN_ASAN_REPORT_STORE4_NOABORT,
1422 BUILT_IN_ASAN_REPORT_STORE8_NOABORT,
1423 BUILT_IN_ASAN_REPORT_STORE16_NOABORT,
1424 BUILT_IN_ASAN_REPORT_STORE_N_NOABORT } } };
1425 if (size_in_bytes == -1)
1426 {
1427 *nargs = 2;
1428 return builtin_decl_implicit (report[recover_p][is_store][5]);
1429 }
1430 *nargs = 1;
1431 int size_log2 = exact_log2 (size_in_bytes);
1432 return builtin_decl_implicit (report[recover_p][is_store][size_log2]);
1433 }
1434
1435 /* Construct a function tree for __asan_{load,store}{1,2,4,8,16,_n}.
1436 IS_STORE is either 1 (for a store) or 0 (for a load). */
1437
1438 static tree
1439 check_func (bool is_store, bool recover_p, HOST_WIDE_INT size_in_bytes,
1440 int *nargs)
1441 {
1442 static enum built_in_function check[2][2][6]
1443 = { { { BUILT_IN_ASAN_LOAD1, BUILT_IN_ASAN_LOAD2,
1444 BUILT_IN_ASAN_LOAD4, BUILT_IN_ASAN_LOAD8,
1445 BUILT_IN_ASAN_LOAD16, BUILT_IN_ASAN_LOADN },
1446 { BUILT_IN_ASAN_STORE1, BUILT_IN_ASAN_STORE2,
1447 BUILT_IN_ASAN_STORE4, BUILT_IN_ASAN_STORE8,
1448 BUILT_IN_ASAN_STORE16, BUILT_IN_ASAN_STOREN } },
1449 { { BUILT_IN_ASAN_LOAD1_NOABORT,
1450 BUILT_IN_ASAN_LOAD2_NOABORT,
1451 BUILT_IN_ASAN_LOAD4_NOABORT,
1452 BUILT_IN_ASAN_LOAD8_NOABORT,
1453 BUILT_IN_ASAN_LOAD16_NOABORT,
1454 BUILT_IN_ASAN_LOADN_NOABORT },
1455 { BUILT_IN_ASAN_STORE1_NOABORT,
1456 BUILT_IN_ASAN_STORE2_NOABORT,
1457 BUILT_IN_ASAN_STORE4_NOABORT,
1458 BUILT_IN_ASAN_STORE8_NOABORT,
1459 BUILT_IN_ASAN_STORE16_NOABORT,
1460 BUILT_IN_ASAN_STOREN_NOABORT } } };
1461 if (size_in_bytes == -1)
1462 {
1463 *nargs = 2;
1464 return builtin_decl_implicit (check[recover_p][is_store][5]);
1465 }
1466 *nargs = 1;
1467 int size_log2 = exact_log2 (size_in_bytes);
1468 return builtin_decl_implicit (check[recover_p][is_store][size_log2]);
1469 }
1470
1471 /* Split the current basic block and create a condition statement
1472 insertion point right before or after the statement pointed to by
1473 ITER. Return an iterator to the point at which the caller might
1474 safely insert the condition statement.
1475
1476 THEN_BLOCK must be set to the address of an uninitialized instance
1477 of basic_block. The function will then set *THEN_BLOCK to the
1478 'then block' of the condition statement to be inserted by the
1479 caller.
1480
1481 If CREATE_THEN_FALLTHRU_EDGE is false, no edge will be created from
1482 *THEN_BLOCK to *FALLTHROUGH_BLOCK.
1483
1484 Similarly, the function will set *FALLTRHOUGH_BLOCK to the 'else
1485 block' of the condition statement to be inserted by the caller.
1486
1487 Note that *FALLTHROUGH_BLOCK is a new block that contains the
1488 statements starting from *ITER, and *THEN_BLOCK is a new empty
1489 block.
1490
1491 *ITER is adjusted to point to always point to the first statement
1492 of the basic block * FALLTHROUGH_BLOCK. That statement is the
1493 same as what ITER was pointing to prior to calling this function,
1494 if BEFORE_P is true; otherwise, it is its following statement. */
1495
1496 gimple_stmt_iterator
1497 create_cond_insert_point (gimple_stmt_iterator *iter,
1498 bool before_p,
1499 bool then_more_likely_p,
1500 bool create_then_fallthru_edge,
1501 basic_block *then_block,
1502 basic_block *fallthrough_block)
1503 {
1504 gimple_stmt_iterator gsi = *iter;
1505
1506 if (!gsi_end_p (gsi) && before_p)
1507 gsi_prev (&gsi);
1508
1509 basic_block cur_bb = gsi_bb (*iter);
1510
1511 edge e = split_block (cur_bb, gsi_stmt (gsi));
1512
1513 /* Get a hold on the 'condition block', the 'then block' and the
1514 'else block'. */
1515 basic_block cond_bb = e->src;
1516 basic_block fallthru_bb = e->dest;
1517 basic_block then_bb = create_empty_bb (cond_bb);
1518 if (current_loops)
1519 {
1520 add_bb_to_loop (then_bb, cond_bb->loop_father);
1521 loops_state_set (LOOPS_NEED_FIXUP);
1522 }
1523
1524 /* Set up the newly created 'then block'. */
1525 e = make_edge (cond_bb, then_bb, EDGE_TRUE_VALUE);
1526 int fallthrough_probability
1527 = then_more_likely_p
1528 ? PROB_VERY_UNLIKELY
1529 : PROB_ALWAYS - PROB_VERY_UNLIKELY;
1530 e->probability = PROB_ALWAYS - fallthrough_probability;
1531 if (create_then_fallthru_edge)
1532 make_single_succ_edge (then_bb, fallthru_bb, EDGE_FALLTHRU);
1533
1534 /* Set up the fallthrough basic block. */
1535 e = find_edge (cond_bb, fallthru_bb);
1536 e->flags = EDGE_FALSE_VALUE;
1537 e->count = cond_bb->count;
1538 e->probability = fallthrough_probability;
1539
1540 /* Update dominance info for the newly created then_bb; note that
1541 fallthru_bb's dominance info has already been updated by
1542 split_bock. */
1543 if (dom_info_available_p (CDI_DOMINATORS))
1544 set_immediate_dominator (CDI_DOMINATORS, then_bb, cond_bb);
1545
1546 *then_block = then_bb;
1547 *fallthrough_block = fallthru_bb;
1548 *iter = gsi_start_bb (fallthru_bb);
1549
1550 return gsi_last_bb (cond_bb);
1551 }
1552
1553 /* Insert an if condition followed by a 'then block' right before the
1554 statement pointed to by ITER. The fallthrough block -- which is the
1555 else block of the condition as well as the destination of the
1556 outcoming edge of the 'then block' -- starts with the statement
1557 pointed to by ITER.
1558
1559 COND is the condition of the if.
1560
1561 If THEN_MORE_LIKELY_P is true, the probability of the edge to the
1562 'then block' is higher than the probability of the edge to the
1563 fallthrough block.
1564
1565 Upon completion of the function, *THEN_BB is set to the newly
1566 inserted 'then block' and similarly, *FALLTHROUGH_BB is set to the
1567 fallthrough block.
1568
1569 *ITER is adjusted to still point to the same statement it was
1570 pointing to initially. */
1571
1572 static void
1573 insert_if_then_before_iter (gcond *cond,
1574 gimple_stmt_iterator *iter,
1575 bool then_more_likely_p,
1576 basic_block *then_bb,
1577 basic_block *fallthrough_bb)
1578 {
1579 gimple_stmt_iterator cond_insert_point =
1580 create_cond_insert_point (iter,
1581 /*before_p=*/true,
1582 then_more_likely_p,
1583 /*create_then_fallthru_edge=*/true,
1584 then_bb,
1585 fallthrough_bb);
1586 gsi_insert_after (&cond_insert_point, cond, GSI_NEW_STMT);
1587 }
1588
1589 /* Build
1590 (base_addr >> ASAN_SHADOW_SHIFT) + asan_shadow_offset (). */
1591
1592 static tree
1593 build_shadow_mem_access (gimple_stmt_iterator *gsi, location_t location,
1594 tree base_addr, tree shadow_ptr_type)
1595 {
1596 tree t, uintptr_type = TREE_TYPE (base_addr);
1597 tree shadow_type = TREE_TYPE (shadow_ptr_type);
1598 gimple *g;
1599
1600 t = build_int_cst (uintptr_type, ASAN_SHADOW_SHIFT);
1601 g = gimple_build_assign (make_ssa_name (uintptr_type), RSHIFT_EXPR,
1602 base_addr, t);
1603 gimple_set_location (g, location);
1604 gsi_insert_after (gsi, g, GSI_NEW_STMT);
1605
1606 t = build_int_cst (uintptr_type, asan_shadow_offset ());
1607 g = gimple_build_assign (make_ssa_name (uintptr_type), PLUS_EXPR,
1608 gimple_assign_lhs (g), t);
1609 gimple_set_location (g, location);
1610 gsi_insert_after (gsi, g, GSI_NEW_STMT);
1611
1612 g = gimple_build_assign (make_ssa_name (shadow_ptr_type), NOP_EXPR,
1613 gimple_assign_lhs (g));
1614 gimple_set_location (g, location);
1615 gsi_insert_after (gsi, g, GSI_NEW_STMT);
1616
1617 t = build2 (MEM_REF, shadow_type, gimple_assign_lhs (g),
1618 build_int_cst (shadow_ptr_type, 0));
1619 g = gimple_build_assign (make_ssa_name (shadow_type), MEM_REF, t);
1620 gimple_set_location (g, location);
1621 gsi_insert_after (gsi, g, GSI_NEW_STMT);
1622 return gimple_assign_lhs (g);
1623 }
1624
1625 /* BASE can already be an SSA_NAME; in that case, do not create a
1626 new SSA_NAME for it. */
1627
1628 static tree
1629 maybe_create_ssa_name (location_t loc, tree base, gimple_stmt_iterator *iter,
1630 bool before_p)
1631 {
1632 if (TREE_CODE (base) == SSA_NAME)
1633 return base;
1634 gimple *g = gimple_build_assign (make_ssa_name (TREE_TYPE (base)),
1635 TREE_CODE (base), base);
1636 gimple_set_location (g, loc);
1637 if (before_p)
1638 gsi_insert_before (iter, g, GSI_SAME_STMT);
1639 else
1640 gsi_insert_after (iter, g, GSI_NEW_STMT);
1641 return gimple_assign_lhs (g);
1642 }
1643
1644 /* LEN can already have necessary size and precision;
1645 in that case, do not create a new variable. */
1646
1647 tree
1648 maybe_cast_to_ptrmode (location_t loc, tree len, gimple_stmt_iterator *iter,
1649 bool before_p)
1650 {
1651 if (ptrofftype_p (len))
1652 return len;
1653 gimple *g = gimple_build_assign (make_ssa_name (pointer_sized_int_node),
1654 NOP_EXPR, len);
1655 gimple_set_location (g, loc);
1656 if (before_p)
1657 gsi_insert_before (iter, g, GSI_SAME_STMT);
1658 else
1659 gsi_insert_after (iter, g, GSI_NEW_STMT);
1660 return gimple_assign_lhs (g);
1661 }
1662
1663 /* Instrument the memory access instruction BASE. Insert new
1664 statements before or after ITER.
1665
1666 Note that the memory access represented by BASE can be either an
1667 SSA_NAME, or a non-SSA expression. LOCATION is the source code
1668 location. IS_STORE is TRUE for a store, FALSE for a load.
1669 BEFORE_P is TRUE for inserting the instrumentation code before
1670 ITER, FALSE for inserting it after ITER. IS_SCALAR_ACCESS is TRUE
1671 for a scalar memory access and FALSE for memory region access.
1672 NON_ZERO_P is TRUE if memory region is guaranteed to have non-zero
1673 length. ALIGN tells alignment of accessed memory object.
1674
1675 START_INSTRUMENTED and END_INSTRUMENTED are TRUE if start/end of
1676 memory region have already been instrumented.
1677
1678 If BEFORE_P is TRUE, *ITER is arranged to still point to the
1679 statement it was pointing to prior to calling this function,
1680 otherwise, it points to the statement logically following it. */
1681
1682 static void
1683 build_check_stmt (location_t loc, tree base, tree len,
1684 HOST_WIDE_INT size_in_bytes, gimple_stmt_iterator *iter,
1685 bool is_non_zero_len, bool before_p, bool is_store,
1686 bool is_scalar_access, unsigned int align = 0)
1687 {
1688 gimple_stmt_iterator gsi = *iter;
1689 gimple *g;
1690
1691 gcc_assert (!(size_in_bytes > 0 && !is_non_zero_len));
1692
1693 gsi = *iter;
1694
1695 base = unshare_expr (base);
1696 base = maybe_create_ssa_name (loc, base, &gsi, before_p);
1697
1698 if (len)
1699 {
1700 len = unshare_expr (len);
1701 len = maybe_cast_to_ptrmode (loc, len, iter, before_p);
1702 }
1703 else
1704 {
1705 gcc_assert (size_in_bytes != -1);
1706 len = build_int_cst (pointer_sized_int_node, size_in_bytes);
1707 }
1708
1709 if (size_in_bytes > 1)
1710 {
1711 if ((size_in_bytes & (size_in_bytes - 1)) != 0
1712 || size_in_bytes > 16)
1713 is_scalar_access = false;
1714 else if (align && align < size_in_bytes * BITS_PER_UNIT)
1715 {
1716 /* On non-strict alignment targets, if
1717 16-byte access is just 8-byte aligned,
1718 this will result in misaligned shadow
1719 memory 2 byte load, but otherwise can
1720 be handled using one read. */
1721 if (size_in_bytes != 16
1722 || STRICT_ALIGNMENT
1723 || align < 8 * BITS_PER_UNIT)
1724 is_scalar_access = false;
1725 }
1726 }
1727
1728 HOST_WIDE_INT flags = 0;
1729 if (is_store)
1730 flags |= ASAN_CHECK_STORE;
1731 if (is_non_zero_len)
1732 flags |= ASAN_CHECK_NON_ZERO_LEN;
1733 if (is_scalar_access)
1734 flags |= ASAN_CHECK_SCALAR_ACCESS;
1735
1736 g = gimple_build_call_internal (IFN_ASAN_CHECK, 4,
1737 build_int_cst (integer_type_node, flags),
1738 base, len,
1739 build_int_cst (integer_type_node,
1740 align / BITS_PER_UNIT));
1741 gimple_set_location (g, loc);
1742 if (before_p)
1743 gsi_insert_before (&gsi, g, GSI_SAME_STMT);
1744 else
1745 {
1746 gsi_insert_after (&gsi, g, GSI_NEW_STMT);
1747 gsi_next (&gsi);
1748 *iter = gsi;
1749 }
1750 }
1751
1752 /* If T represents a memory access, add instrumentation code before ITER.
1753 LOCATION is source code location.
1754 IS_STORE is either TRUE (for a store) or FALSE (for a load). */
1755
1756 static void
1757 instrument_derefs (gimple_stmt_iterator *iter, tree t,
1758 location_t location, bool is_store)
1759 {
1760 if (is_store && !ASAN_INSTRUMENT_WRITES)
1761 return;
1762 if (!is_store && !ASAN_INSTRUMENT_READS)
1763 return;
1764
1765 tree type, base;
1766 HOST_WIDE_INT size_in_bytes;
1767
1768 type = TREE_TYPE (t);
1769 switch (TREE_CODE (t))
1770 {
1771 case ARRAY_REF:
1772 case COMPONENT_REF:
1773 case INDIRECT_REF:
1774 case MEM_REF:
1775 case VAR_DECL:
1776 case BIT_FIELD_REF:
1777 break;
1778 /* FALLTHRU */
1779 default:
1780 return;
1781 }
1782
1783 size_in_bytes = int_size_in_bytes (type);
1784 if (size_in_bytes <= 0)
1785 return;
1786
1787 HOST_WIDE_INT bitsize, bitpos;
1788 tree offset;
1789 machine_mode mode;
1790 int volatilep = 0, unsignedp = 0;
1791 tree inner = get_inner_reference (t, &bitsize, &bitpos, &offset,
1792 &mode, &unsignedp, &volatilep, false);
1793
1794 if (TREE_CODE (t) == COMPONENT_REF
1795 && DECL_BIT_FIELD_REPRESENTATIVE (TREE_OPERAND (t, 1)) != NULL_TREE)
1796 {
1797 tree repr = DECL_BIT_FIELD_REPRESENTATIVE (TREE_OPERAND (t, 1));
1798 instrument_derefs (iter, build3 (COMPONENT_REF, TREE_TYPE (repr),
1799 TREE_OPERAND (t, 0), repr,
1800 NULL_TREE), location, is_store);
1801 return;
1802 }
1803
1804 if (bitpos % BITS_PER_UNIT
1805 || bitsize != size_in_bytes * BITS_PER_UNIT)
1806 return;
1807
1808 if (TREE_CODE (inner) == VAR_DECL
1809 && offset == NULL_TREE
1810 && bitpos >= 0
1811 && DECL_SIZE (inner)
1812 && tree_fits_shwi_p (DECL_SIZE (inner))
1813 && bitpos + bitsize <= tree_to_shwi (DECL_SIZE (inner)))
1814 {
1815 if (DECL_THREAD_LOCAL_P (inner))
1816 return;
1817 if (!ASAN_GLOBALS && is_global_var (inner))
1818 return;
1819 if (!TREE_STATIC (inner))
1820 {
1821 /* Automatic vars in the current function will be always
1822 accessible. */
1823 if (decl_function_context (inner) == current_function_decl)
1824 return;
1825 }
1826 /* Always instrument external vars, they might be dynamically
1827 initialized. */
1828 else if (!DECL_EXTERNAL (inner))
1829 {
1830 /* For static vars if they are known not to be dynamically
1831 initialized, they will be always accessible. */
1832 varpool_node *vnode = varpool_node::get (inner);
1833 if (vnode && !vnode->dynamically_initialized)
1834 return;
1835 }
1836 }
1837
1838 base = build_fold_addr_expr (t);
1839 if (!has_mem_ref_been_instrumented (base, size_in_bytes))
1840 {
1841 unsigned int align = get_object_alignment (t);
1842 build_check_stmt (location, base, NULL_TREE, size_in_bytes, iter,
1843 /*is_non_zero_len*/size_in_bytes > 0, /*before_p=*/true,
1844 is_store, /*is_scalar_access*/true, align);
1845 update_mem_ref_hash_table (base, size_in_bytes);
1846 update_mem_ref_hash_table (t, size_in_bytes);
1847 }
1848
1849 }
1850
1851 /* Insert a memory reference into the hash table if access length
1852 can be determined in compile time. */
1853
1854 static void
1855 maybe_update_mem_ref_hash_table (tree base, tree len)
1856 {
1857 if (!POINTER_TYPE_P (TREE_TYPE (base))
1858 || !INTEGRAL_TYPE_P (TREE_TYPE (len)))
1859 return;
1860
1861 HOST_WIDE_INT size_in_bytes = tree_fits_shwi_p (len) ? tree_to_shwi (len) : -1;
1862
1863 if (size_in_bytes != -1)
1864 update_mem_ref_hash_table (base, size_in_bytes);
1865 }
1866
1867 /* Instrument an access to a contiguous memory region that starts at
1868 the address pointed to by BASE, over a length of LEN (expressed in
1869 the sizeof (*BASE) bytes). ITER points to the instruction before
1870 which the instrumentation instructions must be inserted. LOCATION
1871 is the source location that the instrumentation instructions must
1872 have. If IS_STORE is true, then the memory access is a store;
1873 otherwise, it's a load. */
1874
1875 static void
1876 instrument_mem_region_access (tree base, tree len,
1877 gimple_stmt_iterator *iter,
1878 location_t location, bool is_store)
1879 {
1880 if (!POINTER_TYPE_P (TREE_TYPE (base))
1881 || !INTEGRAL_TYPE_P (TREE_TYPE (len))
1882 || integer_zerop (len))
1883 return;
1884
1885 HOST_WIDE_INT size_in_bytes = tree_fits_shwi_p (len) ? tree_to_shwi (len) : -1;
1886
1887 if ((size_in_bytes == -1)
1888 || !has_mem_ref_been_instrumented (base, size_in_bytes))
1889 {
1890 build_check_stmt (location, base, len, size_in_bytes, iter,
1891 /*is_non_zero_len*/size_in_bytes > 0, /*before_p*/true,
1892 is_store, /*is_scalar_access*/false, /*align*/0);
1893 }
1894
1895 maybe_update_mem_ref_hash_table (base, len);
1896 *iter = gsi_for_stmt (gsi_stmt (*iter));
1897 }
1898
1899 /* Instrument the call to a built-in memory access function that is
1900 pointed to by the iterator ITER.
1901
1902 Upon completion, return TRUE iff *ITER has been advanced to the
1903 statement following the one it was originally pointing to. */
1904
1905 static bool
1906 instrument_builtin_call (gimple_stmt_iterator *iter)
1907 {
1908 if (!ASAN_MEMINTRIN)
1909 return false;
1910
1911 bool iter_advanced_p = false;
1912 gcall *call = as_a <gcall *> (gsi_stmt (*iter));
1913
1914 gcc_checking_assert (gimple_call_builtin_p (call, BUILT_IN_NORMAL));
1915
1916 location_t loc = gimple_location (call);
1917
1918 asan_mem_ref src0, src1, dest;
1919 asan_mem_ref_init (&src0, NULL, 1);
1920 asan_mem_ref_init (&src1, NULL, 1);
1921 asan_mem_ref_init (&dest, NULL, 1);
1922
1923 tree src0_len = NULL_TREE, src1_len = NULL_TREE, dest_len = NULL_TREE;
1924 bool src0_is_store = false, src1_is_store = false, dest_is_store = false,
1925 dest_is_deref = false, intercepted_p = true;
1926
1927 if (get_mem_refs_of_builtin_call (call,
1928 &src0, &src0_len, &src0_is_store,
1929 &src1, &src1_len, &src1_is_store,
1930 &dest, &dest_len, &dest_is_store,
1931 &dest_is_deref, &intercepted_p))
1932 {
1933 if (dest_is_deref)
1934 {
1935 instrument_derefs (iter, dest.start, loc, dest_is_store);
1936 gsi_next (iter);
1937 iter_advanced_p = true;
1938 }
1939 else if (!intercepted_p
1940 && (src0_len || src1_len || dest_len))
1941 {
1942 if (src0.start != NULL_TREE)
1943 instrument_mem_region_access (src0.start, src0_len,
1944 iter, loc, /*is_store=*/false);
1945 if (src1.start != NULL_TREE)
1946 instrument_mem_region_access (src1.start, src1_len,
1947 iter, loc, /*is_store=*/false);
1948 if (dest.start != NULL_TREE)
1949 instrument_mem_region_access (dest.start, dest_len,
1950 iter, loc, /*is_store=*/true);
1951
1952 *iter = gsi_for_stmt (call);
1953 gsi_next (iter);
1954 iter_advanced_p = true;
1955 }
1956 else
1957 {
1958 if (src0.start != NULL_TREE)
1959 maybe_update_mem_ref_hash_table (src0.start, src0_len);
1960 if (src1.start != NULL_TREE)
1961 maybe_update_mem_ref_hash_table (src1.start, src1_len);
1962 if (dest.start != NULL_TREE)
1963 maybe_update_mem_ref_hash_table (dest.start, dest_len);
1964 }
1965 }
1966 return iter_advanced_p;
1967 }
1968
1969 /* Instrument the assignment statement ITER if it is subject to
1970 instrumentation. Return TRUE iff instrumentation actually
1971 happened. In that case, the iterator ITER is advanced to the next
1972 logical expression following the one initially pointed to by ITER,
1973 and the relevant memory reference that which access has been
1974 instrumented is added to the memory references hash table. */
1975
1976 static bool
1977 maybe_instrument_assignment (gimple_stmt_iterator *iter)
1978 {
1979 gimple *s = gsi_stmt (*iter);
1980
1981 gcc_assert (gimple_assign_single_p (s));
1982
1983 tree ref_expr = NULL_TREE;
1984 bool is_store, is_instrumented = false;
1985
1986 if (gimple_store_p (s))
1987 {
1988 ref_expr = gimple_assign_lhs (s);
1989 is_store = true;
1990 instrument_derefs (iter, ref_expr,
1991 gimple_location (s),
1992 is_store);
1993 is_instrumented = true;
1994 }
1995
1996 if (gimple_assign_load_p (s))
1997 {
1998 ref_expr = gimple_assign_rhs1 (s);
1999 is_store = false;
2000 instrument_derefs (iter, ref_expr,
2001 gimple_location (s),
2002 is_store);
2003 is_instrumented = true;
2004 }
2005
2006 if (is_instrumented)
2007 gsi_next (iter);
2008
2009 return is_instrumented;
2010 }
2011
2012 /* Instrument the function call pointed to by the iterator ITER, if it
2013 is subject to instrumentation. At the moment, the only function
2014 calls that are instrumented are some built-in functions that access
2015 memory. Look at instrument_builtin_call to learn more.
2016
2017 Upon completion return TRUE iff *ITER was advanced to the statement
2018 following the one it was originally pointing to. */
2019
2020 static bool
2021 maybe_instrument_call (gimple_stmt_iterator *iter)
2022 {
2023 gimple *stmt = gsi_stmt (*iter);
2024 bool is_builtin = gimple_call_builtin_p (stmt, BUILT_IN_NORMAL);
2025
2026 if (is_builtin && instrument_builtin_call (iter))
2027 return true;
2028
2029 if (gimple_call_noreturn_p (stmt))
2030 {
2031 if (is_builtin)
2032 {
2033 tree callee = gimple_call_fndecl (stmt);
2034 switch (DECL_FUNCTION_CODE (callee))
2035 {
2036 case BUILT_IN_UNREACHABLE:
2037 case BUILT_IN_TRAP:
2038 /* Don't instrument these. */
2039 return false;
2040 default:
2041 break;
2042 }
2043 }
2044 tree decl = builtin_decl_implicit (BUILT_IN_ASAN_HANDLE_NO_RETURN);
2045 gimple *g = gimple_build_call (decl, 0);
2046 gimple_set_location (g, gimple_location (stmt));
2047 gsi_insert_before (iter, g, GSI_SAME_STMT);
2048 }
2049 return false;
2050 }
2051
2052 /* Walk each instruction of all basic block and instrument those that
2053 represent memory references: loads, stores, or function calls.
2054 In a given basic block, this function avoids instrumenting memory
2055 references that have already been instrumented. */
2056
2057 static void
2058 transform_statements (void)
2059 {
2060 basic_block bb, last_bb = NULL;
2061 gimple_stmt_iterator i;
2062 int saved_last_basic_block = last_basic_block_for_fn (cfun);
2063
2064 FOR_EACH_BB_FN (bb, cfun)
2065 {
2066 basic_block prev_bb = bb;
2067
2068 if (bb->index >= saved_last_basic_block) continue;
2069
2070 /* Flush the mem ref hash table, if current bb doesn't have
2071 exactly one predecessor, or if that predecessor (skipping
2072 over asan created basic blocks) isn't the last processed
2073 basic block. Thus we effectively flush on extended basic
2074 block boundaries. */
2075 while (single_pred_p (prev_bb))
2076 {
2077 prev_bb = single_pred (prev_bb);
2078 if (prev_bb->index < saved_last_basic_block)
2079 break;
2080 }
2081 if (prev_bb != last_bb)
2082 empty_mem_ref_hash_table ();
2083 last_bb = bb;
2084
2085 for (i = gsi_start_bb (bb); !gsi_end_p (i);)
2086 {
2087 gimple *s = gsi_stmt (i);
2088
2089 if (has_stmt_been_instrumented_p (s))
2090 gsi_next (&i);
2091 else if (gimple_assign_single_p (s)
2092 && !gimple_clobber_p (s)
2093 && maybe_instrument_assignment (&i))
2094 /* Nothing to do as maybe_instrument_assignment advanced
2095 the iterator I. */;
2096 else if (is_gimple_call (s) && maybe_instrument_call (&i))
2097 /* Nothing to do as maybe_instrument_call
2098 advanced the iterator I. */;
2099 else
2100 {
2101 /* No instrumentation happened.
2102
2103 If the current instruction is a function call that
2104 might free something, let's forget about the memory
2105 references that got instrumented. Otherwise we might
2106 miss some instrumentation opportunities. */
2107 if (is_gimple_call (s) && !nonfreeing_call_p (s))
2108 empty_mem_ref_hash_table ();
2109
2110 gsi_next (&i);
2111 }
2112 }
2113 }
2114 free_mem_ref_resources ();
2115 }
2116
2117 /* Build
2118 __asan_before_dynamic_init (module_name)
2119 or
2120 __asan_after_dynamic_init ()
2121 call. */
2122
2123 tree
2124 asan_dynamic_init_call (bool after_p)
2125 {
2126 tree fn = builtin_decl_implicit (after_p
2127 ? BUILT_IN_ASAN_AFTER_DYNAMIC_INIT
2128 : BUILT_IN_ASAN_BEFORE_DYNAMIC_INIT);
2129 tree module_name_cst = NULL_TREE;
2130 if (!after_p)
2131 {
2132 pretty_printer module_name_pp;
2133 pp_string (&module_name_pp, main_input_filename);
2134
2135 if (shadow_ptr_types[0] == NULL_TREE)
2136 asan_init_shadow_ptr_types ();
2137 module_name_cst = asan_pp_string (&module_name_pp);
2138 module_name_cst = fold_convert (const_ptr_type_node,
2139 module_name_cst);
2140 }
2141
2142 return build_call_expr (fn, after_p ? 0 : 1, module_name_cst);
2143 }
2144
2145 /* Build
2146 struct __asan_global
2147 {
2148 const void *__beg;
2149 uptr __size;
2150 uptr __size_with_redzone;
2151 const void *__name;
2152 const void *__module_name;
2153 uptr __has_dynamic_init;
2154 __asan_global_source_location *__location;
2155 } type. */
2156
2157 static tree
2158 asan_global_struct (void)
2159 {
2160 static const char *field_names[7]
2161 = { "__beg", "__size", "__size_with_redzone",
2162 "__name", "__module_name", "__has_dynamic_init", "__location"};
2163 tree fields[7], ret;
2164 int i;
2165
2166 ret = make_node (RECORD_TYPE);
2167 for (i = 0; i < 7; i++)
2168 {
2169 fields[i]
2170 = build_decl (UNKNOWN_LOCATION, FIELD_DECL,
2171 get_identifier (field_names[i]),
2172 (i == 0 || i == 3) ? const_ptr_type_node
2173 : pointer_sized_int_node);
2174 DECL_CONTEXT (fields[i]) = ret;
2175 if (i)
2176 DECL_CHAIN (fields[i - 1]) = fields[i];
2177 }
2178 tree type_decl = build_decl (input_location, TYPE_DECL,
2179 get_identifier ("__asan_global"), ret);
2180 DECL_IGNORED_P (type_decl) = 1;
2181 DECL_ARTIFICIAL (type_decl) = 1;
2182 TYPE_FIELDS (ret) = fields[0];
2183 TYPE_NAME (ret) = type_decl;
2184 TYPE_STUB_DECL (ret) = type_decl;
2185 layout_type (ret);
2186 return ret;
2187 }
2188
2189 /* Append description of a single global DECL into vector V.
2190 TYPE is __asan_global struct type as returned by asan_global_struct. */
2191
2192 static void
2193 asan_add_global (tree decl, tree type, vec<constructor_elt, va_gc> *v)
2194 {
2195 tree init, uptr = TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (type)));
2196 unsigned HOST_WIDE_INT size;
2197 tree str_cst, module_name_cst, refdecl = decl;
2198 vec<constructor_elt, va_gc> *vinner = NULL;
2199
2200 pretty_printer asan_pp, module_name_pp;
2201
2202 if (DECL_NAME (decl))
2203 pp_tree_identifier (&asan_pp, DECL_NAME (decl));
2204 else
2205 pp_string (&asan_pp, "<unknown>");
2206 str_cst = asan_pp_string (&asan_pp);
2207
2208 pp_string (&module_name_pp, main_input_filename);
2209 module_name_cst = asan_pp_string (&module_name_pp);
2210
2211 if (asan_needs_local_alias (decl))
2212 {
2213 char buf[20];
2214 ASM_GENERATE_INTERNAL_LABEL (buf, "LASAN", vec_safe_length (v) + 1);
2215 refdecl = build_decl (DECL_SOURCE_LOCATION (decl),
2216 VAR_DECL, get_identifier (buf), TREE_TYPE (decl));
2217 TREE_ADDRESSABLE (refdecl) = TREE_ADDRESSABLE (decl);
2218 TREE_READONLY (refdecl) = TREE_READONLY (decl);
2219 TREE_THIS_VOLATILE (refdecl) = TREE_THIS_VOLATILE (decl);
2220 DECL_GIMPLE_REG_P (refdecl) = DECL_GIMPLE_REG_P (decl);
2221 DECL_ARTIFICIAL (refdecl) = DECL_ARTIFICIAL (decl);
2222 DECL_IGNORED_P (refdecl) = DECL_IGNORED_P (decl);
2223 TREE_STATIC (refdecl) = 1;
2224 TREE_PUBLIC (refdecl) = 0;
2225 TREE_USED (refdecl) = 1;
2226 assemble_alias (refdecl, DECL_ASSEMBLER_NAME (decl));
2227 }
2228
2229 CONSTRUCTOR_APPEND_ELT (vinner, NULL_TREE,
2230 fold_convert (const_ptr_type_node,
2231 build_fold_addr_expr (refdecl)));
2232 size = tree_to_uhwi (DECL_SIZE_UNIT (decl));
2233 CONSTRUCTOR_APPEND_ELT (vinner, NULL_TREE, build_int_cst (uptr, size));
2234 size += asan_red_zone_size (size);
2235 CONSTRUCTOR_APPEND_ELT (vinner, NULL_TREE, build_int_cst (uptr, size));
2236 CONSTRUCTOR_APPEND_ELT (vinner, NULL_TREE,
2237 fold_convert (const_ptr_type_node, str_cst));
2238 CONSTRUCTOR_APPEND_ELT (vinner, NULL_TREE,
2239 fold_convert (const_ptr_type_node, module_name_cst));
2240 varpool_node *vnode = varpool_node::get (decl);
2241 int has_dynamic_init = vnode ? vnode->dynamically_initialized : 0;
2242 CONSTRUCTOR_APPEND_ELT (vinner, NULL_TREE,
2243 build_int_cst (uptr, has_dynamic_init));
2244 tree locptr = NULL_TREE;
2245 location_t loc = DECL_SOURCE_LOCATION (decl);
2246 expanded_location xloc = expand_location (loc);
2247 if (xloc.file != NULL)
2248 {
2249 static int lasanloccnt = 0;
2250 char buf[25];
2251 ASM_GENERATE_INTERNAL_LABEL (buf, "LASANLOC", ++lasanloccnt);
2252 tree var = build_decl (UNKNOWN_LOCATION, VAR_DECL, get_identifier (buf),
2253 ubsan_get_source_location_type ());
2254 TREE_STATIC (var) = 1;
2255 TREE_PUBLIC (var) = 0;
2256 DECL_ARTIFICIAL (var) = 1;
2257 DECL_IGNORED_P (var) = 1;
2258 pretty_printer filename_pp;
2259 pp_string (&filename_pp, xloc.file);
2260 tree str = asan_pp_string (&filename_pp);
2261 tree ctor = build_constructor_va (TREE_TYPE (var), 3,
2262 NULL_TREE, str, NULL_TREE,
2263 build_int_cst (unsigned_type_node,
2264 xloc.line), NULL_TREE,
2265 build_int_cst (unsigned_type_node,
2266 xloc.column));
2267 TREE_CONSTANT (ctor) = 1;
2268 TREE_STATIC (ctor) = 1;
2269 DECL_INITIAL (var) = ctor;
2270 varpool_node::finalize_decl (var);
2271 locptr = fold_convert (uptr, build_fold_addr_expr (var));
2272 }
2273 else
2274 locptr = build_int_cst (uptr, 0);
2275 CONSTRUCTOR_APPEND_ELT (vinner, NULL_TREE, locptr);
2276 init = build_constructor (type, vinner);
2277 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, init);
2278 }
2279
2280 /* Initialize sanitizer.def builtins if the FE hasn't initialized them. */
2281 void
2282 initialize_sanitizer_builtins (void)
2283 {
2284 tree decl;
2285
2286 if (builtin_decl_implicit_p (BUILT_IN_ASAN_INIT))
2287 return;
2288
2289 tree BT_FN_VOID = build_function_type_list (void_type_node, NULL_TREE);
2290 tree BT_FN_VOID_PTR
2291 = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
2292 tree BT_FN_VOID_CONST_PTR
2293 = build_function_type_list (void_type_node, const_ptr_type_node, NULL_TREE);
2294 tree BT_FN_VOID_PTR_PTR
2295 = build_function_type_list (void_type_node, ptr_type_node,
2296 ptr_type_node, NULL_TREE);
2297 tree BT_FN_VOID_PTR_PTR_PTR
2298 = build_function_type_list (void_type_node, ptr_type_node,
2299 ptr_type_node, ptr_type_node, NULL_TREE);
2300 tree BT_FN_VOID_PTR_PTRMODE
2301 = build_function_type_list (void_type_node, ptr_type_node,
2302 pointer_sized_int_node, NULL_TREE);
2303 tree BT_FN_VOID_INT
2304 = build_function_type_list (void_type_node, integer_type_node, NULL_TREE);
2305 tree BT_FN_SIZE_CONST_PTR_INT
2306 = build_function_type_list (size_type_node, const_ptr_type_node,
2307 integer_type_node, NULL_TREE);
2308 tree BT_FN_BOOL_VPTR_PTR_IX_INT_INT[5];
2309 tree BT_FN_IX_CONST_VPTR_INT[5];
2310 tree BT_FN_IX_VPTR_IX_INT[5];
2311 tree BT_FN_VOID_VPTR_IX_INT[5];
2312 tree vptr
2313 = build_pointer_type (build_qualified_type (void_type_node,
2314 TYPE_QUAL_VOLATILE));
2315 tree cvptr
2316 = build_pointer_type (build_qualified_type (void_type_node,
2317 TYPE_QUAL_VOLATILE
2318 |TYPE_QUAL_CONST));
2319 tree boolt
2320 = lang_hooks.types.type_for_size (BOOL_TYPE_SIZE, 1);
2321 int i;
2322 for (i = 0; i < 5; i++)
2323 {
2324 tree ix = build_nonstandard_integer_type (BITS_PER_UNIT * (1 << i), 1);
2325 BT_FN_BOOL_VPTR_PTR_IX_INT_INT[i]
2326 = build_function_type_list (boolt, vptr, ptr_type_node, ix,
2327 integer_type_node, integer_type_node,
2328 NULL_TREE);
2329 BT_FN_IX_CONST_VPTR_INT[i]
2330 = build_function_type_list (ix, cvptr, integer_type_node, NULL_TREE);
2331 BT_FN_IX_VPTR_IX_INT[i]
2332 = build_function_type_list (ix, vptr, ix, integer_type_node,
2333 NULL_TREE);
2334 BT_FN_VOID_VPTR_IX_INT[i]
2335 = build_function_type_list (void_type_node, vptr, ix,
2336 integer_type_node, NULL_TREE);
2337 }
2338 #define BT_FN_BOOL_VPTR_PTR_I1_INT_INT BT_FN_BOOL_VPTR_PTR_IX_INT_INT[0]
2339 #define BT_FN_I1_CONST_VPTR_INT BT_FN_IX_CONST_VPTR_INT[0]
2340 #define BT_FN_I1_VPTR_I1_INT BT_FN_IX_VPTR_IX_INT[0]
2341 #define BT_FN_VOID_VPTR_I1_INT BT_FN_VOID_VPTR_IX_INT[0]
2342 #define BT_FN_BOOL_VPTR_PTR_I2_INT_INT BT_FN_BOOL_VPTR_PTR_IX_INT_INT[1]
2343 #define BT_FN_I2_CONST_VPTR_INT BT_FN_IX_CONST_VPTR_INT[1]
2344 #define BT_FN_I2_VPTR_I2_INT BT_FN_IX_VPTR_IX_INT[1]
2345 #define BT_FN_VOID_VPTR_I2_INT BT_FN_VOID_VPTR_IX_INT[1]
2346 #define BT_FN_BOOL_VPTR_PTR_I4_INT_INT BT_FN_BOOL_VPTR_PTR_IX_INT_INT[2]
2347 #define BT_FN_I4_CONST_VPTR_INT BT_FN_IX_CONST_VPTR_INT[2]
2348 #define BT_FN_I4_VPTR_I4_INT BT_FN_IX_VPTR_IX_INT[2]
2349 #define BT_FN_VOID_VPTR_I4_INT BT_FN_VOID_VPTR_IX_INT[2]
2350 #define BT_FN_BOOL_VPTR_PTR_I8_INT_INT BT_FN_BOOL_VPTR_PTR_IX_INT_INT[3]
2351 #define BT_FN_I8_CONST_VPTR_INT BT_FN_IX_CONST_VPTR_INT[3]
2352 #define BT_FN_I8_VPTR_I8_INT BT_FN_IX_VPTR_IX_INT[3]
2353 #define BT_FN_VOID_VPTR_I8_INT BT_FN_VOID_VPTR_IX_INT[3]
2354 #define BT_FN_BOOL_VPTR_PTR_I16_INT_INT BT_FN_BOOL_VPTR_PTR_IX_INT_INT[4]
2355 #define BT_FN_I16_CONST_VPTR_INT BT_FN_IX_CONST_VPTR_INT[4]
2356 #define BT_FN_I16_VPTR_I16_INT BT_FN_IX_VPTR_IX_INT[4]
2357 #define BT_FN_VOID_VPTR_I16_INT BT_FN_VOID_VPTR_IX_INT[4]
2358 #undef ATTR_NOTHROW_LEAF_LIST
2359 #define ATTR_NOTHROW_LEAF_LIST ECF_NOTHROW | ECF_LEAF
2360 #undef ATTR_TMPURE_NOTHROW_LEAF_LIST
2361 #define ATTR_TMPURE_NOTHROW_LEAF_LIST ECF_TM_PURE | ATTR_NOTHROW_LEAF_LIST
2362 #undef ATTR_NORETURN_NOTHROW_LEAF_LIST
2363 #define ATTR_NORETURN_NOTHROW_LEAF_LIST ECF_NORETURN | ATTR_NOTHROW_LEAF_LIST
2364 #undef ATTR_CONST_NORETURN_NOTHROW_LEAF_LIST
2365 #define ATTR_CONST_NORETURN_NOTHROW_LEAF_LIST \
2366 ECF_CONST | ATTR_NORETURN_NOTHROW_LEAF_LIST
2367 #undef ATTR_TMPURE_NORETURN_NOTHROW_LEAF_LIST
2368 #define ATTR_TMPURE_NORETURN_NOTHROW_LEAF_LIST \
2369 ECF_TM_PURE | ATTR_NORETURN_NOTHROW_LEAF_LIST
2370 #undef ATTR_COLD_NOTHROW_LEAF_LIST
2371 #define ATTR_COLD_NOTHROW_LEAF_LIST \
2372 /* ECF_COLD missing */ ATTR_NOTHROW_LEAF_LIST
2373 #undef ATTR_COLD_NORETURN_NOTHROW_LEAF_LIST
2374 #define ATTR_COLD_NORETURN_NOTHROW_LEAF_LIST \
2375 /* ECF_COLD missing */ ATTR_NORETURN_NOTHROW_LEAF_LIST
2376 #undef ATTR_COLD_CONST_NORETURN_NOTHROW_LEAF_LIST
2377 #define ATTR_COLD_CONST_NORETURN_NOTHROW_LEAF_LIST \
2378 /* ECF_COLD missing */ ATTR_CONST_NORETURN_NOTHROW_LEAF_LIST
2379 #undef ATTR_PURE_NOTHROW_LEAF_LIST
2380 #define ATTR_PURE_NOTHROW_LEAF_LIST ECF_PURE | ATTR_NOTHROW_LEAF_LIST
2381 #undef DEF_SANITIZER_BUILTIN
2382 #define DEF_SANITIZER_BUILTIN(ENUM, NAME, TYPE, ATTRS) \
2383 decl = add_builtin_function ("__builtin_" NAME, TYPE, ENUM, \
2384 BUILT_IN_NORMAL, NAME, NULL_TREE); \
2385 set_call_expr_flags (decl, ATTRS); \
2386 set_builtin_decl (ENUM, decl, true);
2387
2388 #include "sanitizer.def"
2389
2390 /* -fsanitize=object-size uses __builtin_object_size, but that might
2391 not be available for e.g. Fortran at this point. We use
2392 DEF_SANITIZER_BUILTIN here only as a convenience macro. */
2393 if ((flag_sanitize & SANITIZE_OBJECT_SIZE)
2394 && !builtin_decl_implicit_p (BUILT_IN_OBJECT_SIZE))
2395 DEF_SANITIZER_BUILTIN (BUILT_IN_OBJECT_SIZE, "object_size",
2396 BT_FN_SIZE_CONST_PTR_INT,
2397 ATTR_PURE_NOTHROW_LEAF_LIST)
2398
2399 #undef DEF_SANITIZER_BUILTIN
2400 }
2401
2402 /* Called via htab_traverse. Count number of emitted
2403 STRING_CSTs in the constant hash table. */
2404
2405 int
2406 count_string_csts (constant_descriptor_tree **slot,
2407 unsigned HOST_WIDE_INT *data)
2408 {
2409 struct constant_descriptor_tree *desc = *slot;
2410 if (TREE_CODE (desc->value) == STRING_CST
2411 && TREE_ASM_WRITTEN (desc->value)
2412 && asan_protect_global (desc->value))
2413 ++*data;
2414 return 1;
2415 }
2416
2417 /* Helper structure to pass two parameters to
2418 add_string_csts. */
2419
2420 struct asan_add_string_csts_data
2421 {
2422 tree type;
2423 vec<constructor_elt, va_gc> *v;
2424 };
2425
2426 /* Called via hash_table::traverse. Call asan_add_global
2427 on emitted STRING_CSTs from the constant hash table. */
2428
2429 int
2430 add_string_csts (constant_descriptor_tree **slot,
2431 asan_add_string_csts_data *aascd)
2432 {
2433 struct constant_descriptor_tree *desc = *slot;
2434 if (TREE_CODE (desc->value) == STRING_CST
2435 && TREE_ASM_WRITTEN (desc->value)
2436 && asan_protect_global (desc->value))
2437 {
2438 asan_add_global (SYMBOL_REF_DECL (XEXP (desc->rtl, 0)),
2439 aascd->type, aascd->v);
2440 }
2441 return 1;
2442 }
2443
2444 /* Needs to be GTY(()), because cgraph_build_static_cdtor may
2445 invoke ggc_collect. */
2446 static GTY(()) tree asan_ctor_statements;
2447
2448 /* Module-level instrumentation.
2449 - Insert __asan_init_vN() into the list of CTORs.
2450 - TODO: insert redzones around globals.
2451 */
2452
2453 void
2454 asan_finish_file (void)
2455 {
2456 varpool_node *vnode;
2457 unsigned HOST_WIDE_INT gcount = 0;
2458
2459 if (shadow_ptr_types[0] == NULL_TREE)
2460 asan_init_shadow_ptr_types ();
2461 /* Avoid instrumenting code in the asan ctors/dtors.
2462 We don't need to insert padding after the description strings,
2463 nor after .LASAN* array. */
2464 flag_sanitize &= ~SANITIZE_ADDRESS;
2465
2466 /* For user-space we want asan constructors to run first.
2467 Linux kernel does not support priorities other than default, and the only
2468 other user of constructors is coverage. So we run with the default
2469 priority. */
2470 int priority = flag_sanitize & SANITIZE_USER_ADDRESS
2471 ? MAX_RESERVED_INIT_PRIORITY - 1 : DEFAULT_INIT_PRIORITY;
2472
2473 if (flag_sanitize & SANITIZE_USER_ADDRESS)
2474 {
2475 tree fn = builtin_decl_implicit (BUILT_IN_ASAN_INIT);
2476 append_to_statement_list (build_call_expr (fn, 0), &asan_ctor_statements);
2477 fn = builtin_decl_implicit (BUILT_IN_ASAN_VERSION_MISMATCH_CHECK);
2478 append_to_statement_list (build_call_expr (fn, 0), &asan_ctor_statements);
2479 }
2480 FOR_EACH_DEFINED_VARIABLE (vnode)
2481 if (TREE_ASM_WRITTEN (vnode->decl)
2482 && asan_protect_global (vnode->decl))
2483 ++gcount;
2484 hash_table<tree_descriptor_hasher> *const_desc_htab = constant_pool_htab ();
2485 const_desc_htab->traverse<unsigned HOST_WIDE_INT *, count_string_csts>
2486 (&gcount);
2487 if (gcount)
2488 {
2489 tree type = asan_global_struct (), var, ctor;
2490 tree dtor_statements = NULL_TREE;
2491 vec<constructor_elt, va_gc> *v;
2492 char buf[20];
2493
2494 type = build_array_type_nelts (type, gcount);
2495 ASM_GENERATE_INTERNAL_LABEL (buf, "LASAN", 0);
2496 var = build_decl (UNKNOWN_LOCATION, VAR_DECL, get_identifier (buf),
2497 type);
2498 TREE_STATIC (var) = 1;
2499 TREE_PUBLIC (var) = 0;
2500 DECL_ARTIFICIAL (var) = 1;
2501 DECL_IGNORED_P (var) = 1;
2502 vec_alloc (v, gcount);
2503 FOR_EACH_DEFINED_VARIABLE (vnode)
2504 if (TREE_ASM_WRITTEN (vnode->decl)
2505 && asan_protect_global (vnode->decl))
2506 asan_add_global (vnode->decl, TREE_TYPE (type), v);
2507 struct asan_add_string_csts_data aascd;
2508 aascd.type = TREE_TYPE (type);
2509 aascd.v = v;
2510 const_desc_htab->traverse<asan_add_string_csts_data *, add_string_csts>
2511 (&aascd);
2512 ctor = build_constructor (type, v);
2513 TREE_CONSTANT (ctor) = 1;
2514 TREE_STATIC (ctor) = 1;
2515 DECL_INITIAL (var) = ctor;
2516 varpool_node::finalize_decl (var);
2517
2518 tree fn = builtin_decl_implicit (BUILT_IN_ASAN_REGISTER_GLOBALS);
2519 tree gcount_tree = build_int_cst (pointer_sized_int_node, gcount);
2520 append_to_statement_list (build_call_expr (fn, 2,
2521 build_fold_addr_expr (var),
2522 gcount_tree),
2523 &asan_ctor_statements);
2524
2525 fn = builtin_decl_implicit (BUILT_IN_ASAN_UNREGISTER_GLOBALS);
2526 append_to_statement_list (build_call_expr (fn, 2,
2527 build_fold_addr_expr (var),
2528 gcount_tree),
2529 &dtor_statements);
2530 cgraph_build_static_cdtor ('D', dtor_statements, priority);
2531 }
2532 if (asan_ctor_statements)
2533 cgraph_build_static_cdtor ('I', asan_ctor_statements, priority);
2534 flag_sanitize |= SANITIZE_ADDRESS;
2535 }
2536
2537 /* Expand the ASAN_{LOAD,STORE} builtins. */
2538
2539 bool
2540 asan_expand_check_ifn (gimple_stmt_iterator *iter, bool use_calls)
2541 {
2542 gimple *g = gsi_stmt (*iter);
2543 location_t loc = gimple_location (g);
2544
2545 bool recover_p
2546 = (flag_sanitize & flag_sanitize_recover & SANITIZE_KERNEL_ADDRESS) != 0;
2547
2548 HOST_WIDE_INT flags = tree_to_shwi (gimple_call_arg (g, 0));
2549 gcc_assert (flags < ASAN_CHECK_LAST);
2550 bool is_scalar_access = (flags & ASAN_CHECK_SCALAR_ACCESS) != 0;
2551 bool is_store = (flags & ASAN_CHECK_STORE) != 0;
2552 bool is_non_zero_len = (flags & ASAN_CHECK_NON_ZERO_LEN) != 0;
2553
2554 tree base = gimple_call_arg (g, 1);
2555 tree len = gimple_call_arg (g, 2);
2556 HOST_WIDE_INT align = tree_to_shwi (gimple_call_arg (g, 3));
2557
2558 HOST_WIDE_INT size_in_bytes
2559 = is_scalar_access && tree_fits_shwi_p (len) ? tree_to_shwi (len) : -1;
2560
2561 if (use_calls)
2562 {
2563 /* Instrument using callbacks. */
2564 gimple *g = gimple_build_assign (make_ssa_name (pointer_sized_int_node),
2565 NOP_EXPR, base);
2566 gimple_set_location (g, loc);
2567 gsi_insert_before (iter, g, GSI_SAME_STMT);
2568 tree base_addr = gimple_assign_lhs (g);
2569
2570 int nargs;
2571 tree fun = check_func (is_store, recover_p, size_in_bytes, &nargs);
2572 if (nargs == 1)
2573 g = gimple_build_call (fun, 1, base_addr);
2574 else
2575 {
2576 gcc_assert (nargs == 2);
2577 g = gimple_build_assign (make_ssa_name (pointer_sized_int_node),
2578 NOP_EXPR, len);
2579 gimple_set_location (g, loc);
2580 gsi_insert_before (iter, g, GSI_SAME_STMT);
2581 tree sz_arg = gimple_assign_lhs (g);
2582 g = gimple_build_call (fun, nargs, base_addr, sz_arg);
2583 }
2584 gimple_set_location (g, loc);
2585 gsi_replace (iter, g, false);
2586 return false;
2587 }
2588
2589 HOST_WIDE_INT real_size_in_bytes = size_in_bytes == -1 ? 1 : size_in_bytes;
2590
2591 tree shadow_ptr_type = shadow_ptr_types[real_size_in_bytes == 16 ? 1 : 0];
2592 tree shadow_type = TREE_TYPE (shadow_ptr_type);
2593
2594 gimple_stmt_iterator gsi = *iter;
2595
2596 if (!is_non_zero_len)
2597 {
2598 /* So, the length of the memory area to asan-protect is
2599 non-constant. Let's guard the generated instrumentation code
2600 like:
2601
2602 if (len != 0)
2603 {
2604 //asan instrumentation code goes here.
2605 }
2606 // falltrough instructions, starting with *ITER. */
2607
2608 g = gimple_build_cond (NE_EXPR,
2609 len,
2610 build_int_cst (TREE_TYPE (len), 0),
2611 NULL_TREE, NULL_TREE);
2612 gimple_set_location (g, loc);
2613
2614 basic_block then_bb, fallthrough_bb;
2615 insert_if_then_before_iter (as_a <gcond *> (g), iter,
2616 /*then_more_likely_p=*/true,
2617 &then_bb, &fallthrough_bb);
2618 /* Note that fallthrough_bb starts with the statement that was
2619 pointed to by ITER. */
2620
2621 /* The 'then block' of the 'if (len != 0) condition is where
2622 we'll generate the asan instrumentation code now. */
2623 gsi = gsi_last_bb (then_bb);
2624 }
2625
2626 /* Get an iterator on the point where we can add the condition
2627 statement for the instrumentation. */
2628 basic_block then_bb, else_bb;
2629 gsi = create_cond_insert_point (&gsi, /*before_p*/false,
2630 /*then_more_likely_p=*/false,
2631 /*create_then_fallthru_edge*/recover_p,
2632 &then_bb,
2633 &else_bb);
2634
2635 g = gimple_build_assign (make_ssa_name (pointer_sized_int_node),
2636 NOP_EXPR, base);
2637 gimple_set_location (g, loc);
2638 gsi_insert_before (&gsi, g, GSI_NEW_STMT);
2639 tree base_addr = gimple_assign_lhs (g);
2640
2641 tree t = NULL_TREE;
2642 if (real_size_in_bytes >= 8)
2643 {
2644 tree shadow = build_shadow_mem_access (&gsi, loc, base_addr,
2645 shadow_ptr_type);
2646 t = shadow;
2647 }
2648 else
2649 {
2650 /* Slow path for 1, 2 and 4 byte accesses. */
2651 /* Test (shadow != 0)
2652 & ((base_addr & 7) + (real_size_in_bytes - 1)) >= shadow). */
2653 tree shadow = build_shadow_mem_access (&gsi, loc, base_addr,
2654 shadow_ptr_type);
2655 gimple *shadow_test = build_assign (NE_EXPR, shadow, 0);
2656 gimple_seq seq = NULL;
2657 gimple_seq_add_stmt (&seq, shadow_test);
2658 /* Aligned (>= 8 bytes) can test just
2659 (real_size_in_bytes - 1 >= shadow), as base_addr & 7 is known
2660 to be 0. */
2661 if (align < 8)
2662 {
2663 gimple_seq_add_stmt (&seq, build_assign (BIT_AND_EXPR,
2664 base_addr, 7));
2665 gimple_seq_add_stmt (&seq,
2666 build_type_cast (shadow_type,
2667 gimple_seq_last (seq)));
2668 if (real_size_in_bytes > 1)
2669 gimple_seq_add_stmt (&seq,
2670 build_assign (PLUS_EXPR,
2671 gimple_seq_last (seq),
2672 real_size_in_bytes - 1));
2673 t = gimple_assign_lhs (gimple_seq_last_stmt (seq));
2674 }
2675 else
2676 t = build_int_cst (shadow_type, real_size_in_bytes - 1);
2677 gimple_seq_add_stmt (&seq, build_assign (GE_EXPR, t, shadow));
2678 gimple_seq_add_stmt (&seq, build_assign (BIT_AND_EXPR, shadow_test,
2679 gimple_seq_last (seq)));
2680 t = gimple_assign_lhs (gimple_seq_last (seq));
2681 gimple_seq_set_location (seq, loc);
2682 gsi_insert_seq_after (&gsi, seq, GSI_CONTINUE_LINKING);
2683
2684 /* For non-constant, misaligned or otherwise weird access sizes,
2685 check first and last byte. */
2686 if (size_in_bytes == -1)
2687 {
2688 g = gimple_build_assign (make_ssa_name (pointer_sized_int_node),
2689 MINUS_EXPR, len,
2690 build_int_cst (pointer_sized_int_node, 1));
2691 gimple_set_location (g, loc);
2692 gsi_insert_after (&gsi, g, GSI_NEW_STMT);
2693 tree last = gimple_assign_lhs (g);
2694 g = gimple_build_assign (make_ssa_name (pointer_sized_int_node),
2695 PLUS_EXPR, base_addr, last);
2696 gimple_set_location (g, loc);
2697 gsi_insert_after (&gsi, g, GSI_NEW_STMT);
2698 tree base_end_addr = gimple_assign_lhs (g);
2699
2700 tree shadow = build_shadow_mem_access (&gsi, loc, base_end_addr,
2701 shadow_ptr_type);
2702 gimple *shadow_test = build_assign (NE_EXPR, shadow, 0);
2703 gimple_seq seq = NULL;
2704 gimple_seq_add_stmt (&seq, shadow_test);
2705 gimple_seq_add_stmt (&seq, build_assign (BIT_AND_EXPR,
2706 base_end_addr, 7));
2707 gimple_seq_add_stmt (&seq, build_type_cast (shadow_type,
2708 gimple_seq_last (seq)));
2709 gimple_seq_add_stmt (&seq, build_assign (GE_EXPR,
2710 gimple_seq_last (seq),
2711 shadow));
2712 gimple_seq_add_stmt (&seq, build_assign (BIT_AND_EXPR, shadow_test,
2713 gimple_seq_last (seq)));
2714 gimple_seq_add_stmt (&seq, build_assign (BIT_IOR_EXPR, t,
2715 gimple_seq_last (seq)));
2716 t = gimple_assign_lhs (gimple_seq_last (seq));
2717 gimple_seq_set_location (seq, loc);
2718 gsi_insert_seq_after (&gsi, seq, GSI_CONTINUE_LINKING);
2719 }
2720 }
2721
2722 g = gimple_build_cond (NE_EXPR, t, build_int_cst (TREE_TYPE (t), 0),
2723 NULL_TREE, NULL_TREE);
2724 gimple_set_location (g, loc);
2725 gsi_insert_after (&gsi, g, GSI_NEW_STMT);
2726
2727 /* Generate call to the run-time library (e.g. __asan_report_load8). */
2728 gsi = gsi_start_bb (then_bb);
2729 int nargs;
2730 tree fun = report_error_func (is_store, recover_p, size_in_bytes, &nargs);
2731 g = gimple_build_call (fun, nargs, base_addr, len);
2732 gimple_set_location (g, loc);
2733 gsi_insert_after (&gsi, g, GSI_NEW_STMT);
2734
2735 gsi_remove (iter, true);
2736 *iter = gsi_start_bb (else_bb);
2737
2738 return true;
2739 }
2740
2741 /* Instrument the current function. */
2742
2743 static unsigned int
2744 asan_instrument (void)
2745 {
2746 if (shadow_ptr_types[0] == NULL_TREE)
2747 asan_init_shadow_ptr_types ();
2748 transform_statements ();
2749 return 0;
2750 }
2751
2752 static bool
2753 gate_asan (void)
2754 {
2755 return (flag_sanitize & SANITIZE_ADDRESS) != 0
2756 && !lookup_attribute ("no_sanitize_address",
2757 DECL_ATTRIBUTES (current_function_decl));
2758 }
2759
2760 namespace {
2761
2762 const pass_data pass_data_asan =
2763 {
2764 GIMPLE_PASS, /* type */
2765 "asan", /* name */
2766 OPTGROUP_NONE, /* optinfo_flags */
2767 TV_NONE, /* tv_id */
2768 ( PROP_ssa | PROP_cfg | PROP_gimple_leh ), /* properties_required */
2769 0, /* properties_provided */
2770 0, /* properties_destroyed */
2771 0, /* todo_flags_start */
2772 TODO_update_ssa, /* todo_flags_finish */
2773 };
2774
2775 class pass_asan : public gimple_opt_pass
2776 {
2777 public:
2778 pass_asan (gcc::context *ctxt)
2779 : gimple_opt_pass (pass_data_asan, ctxt)
2780 {}
2781
2782 /* opt_pass methods: */
2783 opt_pass * clone () { return new pass_asan (m_ctxt); }
2784 virtual bool gate (function *) { return gate_asan (); }
2785 virtual unsigned int execute (function *) { return asan_instrument (); }
2786
2787 }; // class pass_asan
2788
2789 } // anon namespace
2790
2791 gimple_opt_pass *
2792 make_pass_asan (gcc::context *ctxt)
2793 {
2794 return new pass_asan (ctxt);
2795 }
2796
2797 namespace {
2798
2799 const pass_data pass_data_asan_O0 =
2800 {
2801 GIMPLE_PASS, /* type */
2802 "asan0", /* name */
2803 OPTGROUP_NONE, /* optinfo_flags */
2804 TV_NONE, /* tv_id */
2805 ( PROP_ssa | PROP_cfg | PROP_gimple_leh ), /* properties_required */
2806 0, /* properties_provided */
2807 0, /* properties_destroyed */
2808 0, /* todo_flags_start */
2809 TODO_update_ssa, /* todo_flags_finish */
2810 };
2811
2812 class pass_asan_O0 : public gimple_opt_pass
2813 {
2814 public:
2815 pass_asan_O0 (gcc::context *ctxt)
2816 : gimple_opt_pass (pass_data_asan_O0, ctxt)
2817 {}
2818
2819 /* opt_pass methods: */
2820 virtual bool gate (function *) { return !optimize && gate_asan (); }
2821 virtual unsigned int execute (function *) { return asan_instrument (); }
2822
2823 }; // class pass_asan_O0
2824
2825 } // anon namespace
2826
2827 gimple_opt_pass *
2828 make_pass_asan_O0 (gcc::context *ctxt)
2829 {
2830 return new pass_asan_O0 (ctxt);
2831 }
2832
2833 #include "gt-asan.h"