]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/asan.c
config/
[thirdparty/gcc.git] / gcc / asan.c
1 /* AddressSanitizer, a fast memory error detector.
2 Copyright (C) 2012-2013 Free Software Foundation, Inc.
3 Contributed by Kostya Serebryany <kcc@google.com>
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
10 version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tree.h"
26 #include "gimple.h"
27 #include "gimplify.h"
28 #include "gimple-iterator.h"
29 #include "tree-iterator.h"
30 #include "cgraph.h"
31 #include "tree-ssanames.h"
32 #include "tree-pass.h"
33 #include "asan.h"
34 #include "gimple-pretty-print.h"
35 #include "target.h"
36 #include "expr.h"
37 #include "optabs.h"
38 #include "output.h"
39 #include "tm_p.h"
40 #include "langhooks.h"
41 #include "hash-table.h"
42 #include "alloc-pool.h"
43 #include "cfgloop.h"
44 #include "gimple-builder.h"
45 #include "ubsan.h"
46
47 /* AddressSanitizer finds out-of-bounds and use-after-free bugs
48 with <2x slowdown on average.
49
50 The tool consists of two parts:
51 instrumentation module (this file) and a run-time library.
52 The instrumentation module adds a run-time check before every memory insn.
53 For a 8- or 16- byte load accessing address X:
54 ShadowAddr = (X >> 3) + Offset
55 ShadowValue = *(char*)ShadowAddr; // *(short*) for 16-byte access.
56 if (ShadowValue)
57 __asan_report_load8(X);
58 For a load of N bytes (N=1, 2 or 4) from address X:
59 ShadowAddr = (X >> 3) + Offset
60 ShadowValue = *(char*)ShadowAddr;
61 if (ShadowValue)
62 if ((X & 7) + N - 1 > ShadowValue)
63 __asan_report_loadN(X);
64 Stores are instrumented similarly, but using __asan_report_storeN functions.
65 A call too __asan_init_vN() is inserted to the list of module CTORs.
66 N is the version number of the AddressSanitizer API. The changes between the
67 API versions are listed in libsanitizer/asan/asan_interface_internal.h.
68
69 The run-time library redefines malloc (so that redzone are inserted around
70 the allocated memory) and free (so that reuse of free-ed memory is delayed),
71 provides __asan_report* and __asan_init_vN functions.
72
73 Read more:
74 http://code.google.com/p/address-sanitizer/wiki/AddressSanitizerAlgorithm
75
76 The current implementation supports detection of out-of-bounds and
77 use-after-free in the heap, on the stack and for global variables.
78
79 [Protection of stack variables]
80
81 To understand how detection of out-of-bounds and use-after-free works
82 for stack variables, lets look at this example on x86_64 where the
83 stack grows downward:
84
85 int
86 foo ()
87 {
88 char a[23] = {0};
89 int b[2] = {0};
90
91 a[5] = 1;
92 b[1] = 2;
93
94 return a[5] + b[1];
95 }
96
97 For this function, the stack protected by asan will be organized as
98 follows, from the top of the stack to the bottom:
99
100 Slot 1/ [red zone of 32 bytes called 'RIGHT RedZone']
101
102 Slot 2/ [8 bytes of red zone, that adds up to the space of 'a' to make
103 the next slot be 32 bytes aligned; this one is called Partial
104 Redzone; this 32 bytes alignment is an asan constraint]
105
106 Slot 3/ [24 bytes for variable 'a']
107
108 Slot 4/ [red zone of 32 bytes called 'Middle RedZone']
109
110 Slot 5/ [24 bytes of Partial Red Zone (similar to slot 2]
111
112 Slot 6/ [8 bytes for variable 'b']
113
114 Slot 7/ [32 bytes of Red Zone at the bottom of the stack, called
115 'LEFT RedZone']
116
117 The 32 bytes of LEFT red zone at the bottom of the stack can be
118 decomposed as such:
119
120 1/ The first 8 bytes contain a magical asan number that is always
121 0x41B58AB3.
122
123 2/ The following 8 bytes contains a pointer to a string (to be
124 parsed at runtime by the runtime asan library), which format is
125 the following:
126
127 "<function-name> <space> <num-of-variables-on-the-stack>
128 (<32-bytes-aligned-offset-in-bytes-of-variable> <space>
129 <length-of-var-in-bytes> ){n} "
130
131 where '(...){n}' means the content inside the parenthesis occurs 'n'
132 times, with 'n' being the number of variables on the stack.
133
134 3/ The following 8 bytes contain the PC of the current function which
135 will be used by the run-time library to print an error message.
136
137 4/ The following 8 bytes are reserved for internal use by the run-time.
138
139 The shadow memory for that stack layout is going to look like this:
140
141 - content of shadow memory 8 bytes for slot 7: 0xF1F1F1F1.
142 The F1 byte pattern is a magic number called
143 ASAN_STACK_MAGIC_LEFT and is a way for the runtime to know that
144 the memory for that shadow byte is part of a the LEFT red zone
145 intended to seat at the bottom of the variables on the stack.
146
147 - content of shadow memory 8 bytes for slots 6 and 5:
148 0xF4F4F400. The F4 byte pattern is a magic number
149 called ASAN_STACK_MAGIC_PARTIAL. It flags the fact that the
150 memory region for this shadow byte is a PARTIAL red zone
151 intended to pad a variable A, so that the slot following
152 {A,padding} is 32 bytes aligned.
153
154 Note that the fact that the least significant byte of this
155 shadow memory content is 00 means that 8 bytes of its
156 corresponding memory (which corresponds to the memory of
157 variable 'b') is addressable.
158
159 - content of shadow memory 8 bytes for slot 4: 0xF2F2F2F2.
160 The F2 byte pattern is a magic number called
161 ASAN_STACK_MAGIC_MIDDLE. It flags the fact that the memory
162 region for this shadow byte is a MIDDLE red zone intended to
163 seat between two 32 aligned slots of {variable,padding}.
164
165 - content of shadow memory 8 bytes for slot 3 and 2:
166 0xF4000000. This represents is the concatenation of
167 variable 'a' and the partial red zone following it, like what we
168 had for variable 'b'. The least significant 3 bytes being 00
169 means that the 3 bytes of variable 'a' are addressable.
170
171 - content of shadow memory 8 bytes for slot 1: 0xF3F3F3F3.
172 The F3 byte pattern is a magic number called
173 ASAN_STACK_MAGIC_RIGHT. It flags the fact that the memory
174 region for this shadow byte is a RIGHT red zone intended to seat
175 at the top of the variables of the stack.
176
177 Note that the real variable layout is done in expand_used_vars in
178 cfgexpand.c. As far as Address Sanitizer is concerned, it lays out
179 stack variables as well as the different red zones, emits some
180 prologue code to populate the shadow memory as to poison (mark as
181 non-accessible) the regions of the red zones and mark the regions of
182 stack variables as accessible, and emit some epilogue code to
183 un-poison (mark as accessible) the regions of red zones right before
184 the function exits.
185
186 [Protection of global variables]
187
188 The basic idea is to insert a red zone between two global variables
189 and install a constructor function that calls the asan runtime to do
190 the populating of the relevant shadow memory regions at load time.
191
192 So the global variables are laid out as to insert a red zone between
193 them. The size of the red zones is so that each variable starts on a
194 32 bytes boundary.
195
196 Then a constructor function is installed so that, for each global
197 variable, it calls the runtime asan library function
198 __asan_register_globals_with an instance of this type:
199
200 struct __asan_global
201 {
202 // Address of the beginning of the global variable.
203 const void *__beg;
204
205 // Initial size of the global variable.
206 uptr __size;
207
208 // Size of the global variable + size of the red zone. This
209 // size is 32 bytes aligned.
210 uptr __size_with_redzone;
211
212 // Name of the global variable.
213 const void *__name;
214
215 // Name of the module where the global variable is declared.
216 const void *__module_name;
217
218 // This is always set to NULL for now.
219 uptr __has_dynamic_init;
220 }
221
222 A destructor function that calls the runtime asan library function
223 _asan_unregister_globals is also installed. */
224
225 alias_set_type asan_shadow_set = -1;
226
227 /* Pointer types to 1 resp. 2 byte integers in shadow memory. A separate
228 alias set is used for all shadow memory accesses. */
229 static GTY(()) tree shadow_ptr_types[2];
230
231 /* Hashtable support for memory references used by gimple
232 statements. */
233
234 /* This type represents a reference to a memory region. */
235 struct asan_mem_ref
236 {
237 /* The expression of the beginning of the memory region. */
238 tree start;
239
240 /* The size of the access (can be 1, 2, 4, 8, 16 for now). */
241 char access_size;
242 };
243
244 static alloc_pool asan_mem_ref_alloc_pool;
245
246 /* This creates the alloc pool used to store the instances of
247 asan_mem_ref that are stored in the hash table asan_mem_ref_ht. */
248
249 static alloc_pool
250 asan_mem_ref_get_alloc_pool ()
251 {
252 if (asan_mem_ref_alloc_pool == NULL)
253 asan_mem_ref_alloc_pool = create_alloc_pool ("asan_mem_ref",
254 sizeof (asan_mem_ref),
255 10);
256 return asan_mem_ref_alloc_pool;
257
258 }
259
260 /* Initializes an instance of asan_mem_ref. */
261
262 static void
263 asan_mem_ref_init (asan_mem_ref *ref, tree start, char access_size)
264 {
265 ref->start = start;
266 ref->access_size = access_size;
267 }
268
269 /* Allocates memory for an instance of asan_mem_ref into the memory
270 pool returned by asan_mem_ref_get_alloc_pool and initialize it.
271 START is the address of (or the expression pointing to) the
272 beginning of memory reference. ACCESS_SIZE is the size of the
273 access to the referenced memory. */
274
275 static asan_mem_ref*
276 asan_mem_ref_new (tree start, char access_size)
277 {
278 asan_mem_ref *ref =
279 (asan_mem_ref *) pool_alloc (asan_mem_ref_get_alloc_pool ());
280
281 asan_mem_ref_init (ref, start, access_size);
282 return ref;
283 }
284
285 /* This builds and returns a pointer to the end of the memory region
286 that starts at START and of length LEN. */
287
288 tree
289 asan_mem_ref_get_end (tree start, tree len)
290 {
291 if (len == NULL_TREE || integer_zerop (len))
292 return start;
293
294 return fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (start), start, len);
295 }
296
297 /* Return a tree expression that represents the end of the referenced
298 memory region. Beware that this function can actually build a new
299 tree expression. */
300
301 tree
302 asan_mem_ref_get_end (const asan_mem_ref *ref, tree len)
303 {
304 return asan_mem_ref_get_end (ref->start, len);
305 }
306
307 struct asan_mem_ref_hasher
308 : typed_noop_remove <asan_mem_ref>
309 {
310 typedef asan_mem_ref value_type;
311 typedef asan_mem_ref compare_type;
312
313 static inline hashval_t hash (const value_type *);
314 static inline bool equal (const value_type *, const compare_type *);
315 };
316
317 /* Hash a memory reference. */
318
319 inline hashval_t
320 asan_mem_ref_hasher::hash (const asan_mem_ref *mem_ref)
321 {
322 hashval_t h = iterative_hash_expr (mem_ref->start, 0);
323 h = iterative_hash_hashval_t (h, mem_ref->access_size);
324 return h;
325 }
326
327 /* Compare two memory references. We accept the length of either
328 memory references to be NULL_TREE. */
329
330 inline bool
331 asan_mem_ref_hasher::equal (const asan_mem_ref *m1,
332 const asan_mem_ref *m2)
333 {
334 return (m1->access_size == m2->access_size
335 && operand_equal_p (m1->start, m2->start, 0));
336 }
337
338 static hash_table <asan_mem_ref_hasher> asan_mem_ref_ht;
339
340 /* Returns a reference to the hash table containing memory references.
341 This function ensures that the hash table is created. Note that
342 this hash table is updated by the function
343 update_mem_ref_hash_table. */
344
345 static hash_table <asan_mem_ref_hasher> &
346 get_mem_ref_hash_table ()
347 {
348 if (!asan_mem_ref_ht.is_created ())
349 asan_mem_ref_ht.create (10);
350
351 return asan_mem_ref_ht;
352 }
353
354 /* Clear all entries from the memory references hash table. */
355
356 static void
357 empty_mem_ref_hash_table ()
358 {
359 if (asan_mem_ref_ht.is_created ())
360 asan_mem_ref_ht.empty ();
361 }
362
363 /* Free the memory references hash table. */
364
365 static void
366 free_mem_ref_resources ()
367 {
368 if (asan_mem_ref_ht.is_created ())
369 asan_mem_ref_ht.dispose ();
370
371 if (asan_mem_ref_alloc_pool)
372 {
373 free_alloc_pool (asan_mem_ref_alloc_pool);
374 asan_mem_ref_alloc_pool = NULL;
375 }
376 }
377
378 /* Return true iff the memory reference REF has been instrumented. */
379
380 static bool
381 has_mem_ref_been_instrumented (tree ref, char access_size)
382 {
383 asan_mem_ref r;
384 asan_mem_ref_init (&r, ref, access_size);
385
386 return (get_mem_ref_hash_table ().find (&r) != NULL);
387 }
388
389 /* Return true iff the memory reference REF has been instrumented. */
390
391 static bool
392 has_mem_ref_been_instrumented (const asan_mem_ref *ref)
393 {
394 return has_mem_ref_been_instrumented (ref->start, ref->access_size);
395 }
396
397 /* Return true iff access to memory region starting at REF and of
398 length LEN has been instrumented. */
399
400 static bool
401 has_mem_ref_been_instrumented (const asan_mem_ref *ref, tree len)
402 {
403 /* First let's see if the address of the beginning of REF has been
404 instrumented. */
405 if (!has_mem_ref_been_instrumented (ref))
406 return false;
407
408 if (len != 0)
409 {
410 /* Let's see if the end of the region has been instrumented. */
411 if (!has_mem_ref_been_instrumented (asan_mem_ref_get_end (ref, len),
412 ref->access_size))
413 return false;
414 }
415 return true;
416 }
417
418 /* Set REF to the memory reference present in a gimple assignment
419 ASSIGNMENT. Return true upon successful completion, false
420 otherwise. */
421
422 static bool
423 get_mem_ref_of_assignment (const gimple assignment,
424 asan_mem_ref *ref,
425 bool *ref_is_store)
426 {
427 gcc_assert (gimple_assign_single_p (assignment));
428
429 if (gimple_store_p (assignment)
430 && !gimple_clobber_p (assignment))
431 {
432 ref->start = gimple_assign_lhs (assignment);
433 *ref_is_store = true;
434 }
435 else if (gimple_assign_load_p (assignment))
436 {
437 ref->start = gimple_assign_rhs1 (assignment);
438 *ref_is_store = false;
439 }
440 else
441 return false;
442
443 ref->access_size = int_size_in_bytes (TREE_TYPE (ref->start));
444 return true;
445 }
446
447 /* Return the memory references contained in a gimple statement
448 representing a builtin call that has to do with memory access. */
449
450 static bool
451 get_mem_refs_of_builtin_call (const gimple call,
452 asan_mem_ref *src0,
453 tree *src0_len,
454 bool *src0_is_store,
455 asan_mem_ref *src1,
456 tree *src1_len,
457 bool *src1_is_store,
458 asan_mem_ref *dst,
459 tree *dst_len,
460 bool *dst_is_store,
461 bool *dest_is_deref)
462 {
463 gcc_checking_assert (gimple_call_builtin_p (call, BUILT_IN_NORMAL));
464
465 tree callee = gimple_call_fndecl (call);
466 tree source0 = NULL_TREE, source1 = NULL_TREE,
467 dest = NULL_TREE, len = NULL_TREE;
468 bool is_store = true, got_reference_p = false;
469 char access_size = 1;
470
471 switch (DECL_FUNCTION_CODE (callee))
472 {
473 /* (s, s, n) style memops. */
474 case BUILT_IN_BCMP:
475 case BUILT_IN_MEMCMP:
476 source0 = gimple_call_arg (call, 0);
477 source1 = gimple_call_arg (call, 1);
478 len = gimple_call_arg (call, 2);
479 break;
480
481 /* (src, dest, n) style memops. */
482 case BUILT_IN_BCOPY:
483 source0 = gimple_call_arg (call, 0);
484 dest = gimple_call_arg (call, 1);
485 len = gimple_call_arg (call, 2);
486 break;
487
488 /* (dest, src, n) style memops. */
489 case BUILT_IN_MEMCPY:
490 case BUILT_IN_MEMCPY_CHK:
491 case BUILT_IN_MEMMOVE:
492 case BUILT_IN_MEMMOVE_CHK:
493 case BUILT_IN_MEMPCPY:
494 case BUILT_IN_MEMPCPY_CHK:
495 dest = gimple_call_arg (call, 0);
496 source0 = gimple_call_arg (call, 1);
497 len = gimple_call_arg (call, 2);
498 break;
499
500 /* (dest, n) style memops. */
501 case BUILT_IN_BZERO:
502 dest = gimple_call_arg (call, 0);
503 len = gimple_call_arg (call, 1);
504 break;
505
506 /* (dest, x, n) style memops*/
507 case BUILT_IN_MEMSET:
508 case BUILT_IN_MEMSET_CHK:
509 dest = gimple_call_arg (call, 0);
510 len = gimple_call_arg (call, 2);
511 break;
512
513 case BUILT_IN_STRLEN:
514 source0 = gimple_call_arg (call, 0);
515 len = gimple_call_lhs (call);
516 break ;
517
518 /* And now the __atomic* and __sync builtins.
519 These are handled differently from the classical memory memory
520 access builtins above. */
521
522 case BUILT_IN_ATOMIC_LOAD_1:
523 case BUILT_IN_ATOMIC_LOAD_2:
524 case BUILT_IN_ATOMIC_LOAD_4:
525 case BUILT_IN_ATOMIC_LOAD_8:
526 case BUILT_IN_ATOMIC_LOAD_16:
527 is_store = false;
528 /* fall through. */
529
530 case BUILT_IN_SYNC_FETCH_AND_ADD_1:
531 case BUILT_IN_SYNC_FETCH_AND_ADD_2:
532 case BUILT_IN_SYNC_FETCH_AND_ADD_4:
533 case BUILT_IN_SYNC_FETCH_AND_ADD_8:
534 case BUILT_IN_SYNC_FETCH_AND_ADD_16:
535
536 case BUILT_IN_SYNC_FETCH_AND_SUB_1:
537 case BUILT_IN_SYNC_FETCH_AND_SUB_2:
538 case BUILT_IN_SYNC_FETCH_AND_SUB_4:
539 case BUILT_IN_SYNC_FETCH_AND_SUB_8:
540 case BUILT_IN_SYNC_FETCH_AND_SUB_16:
541
542 case BUILT_IN_SYNC_FETCH_AND_OR_1:
543 case BUILT_IN_SYNC_FETCH_AND_OR_2:
544 case BUILT_IN_SYNC_FETCH_AND_OR_4:
545 case BUILT_IN_SYNC_FETCH_AND_OR_8:
546 case BUILT_IN_SYNC_FETCH_AND_OR_16:
547
548 case BUILT_IN_SYNC_FETCH_AND_AND_1:
549 case BUILT_IN_SYNC_FETCH_AND_AND_2:
550 case BUILT_IN_SYNC_FETCH_AND_AND_4:
551 case BUILT_IN_SYNC_FETCH_AND_AND_8:
552 case BUILT_IN_SYNC_FETCH_AND_AND_16:
553
554 case BUILT_IN_SYNC_FETCH_AND_XOR_1:
555 case BUILT_IN_SYNC_FETCH_AND_XOR_2:
556 case BUILT_IN_SYNC_FETCH_AND_XOR_4:
557 case BUILT_IN_SYNC_FETCH_AND_XOR_8:
558 case BUILT_IN_SYNC_FETCH_AND_XOR_16:
559
560 case BUILT_IN_SYNC_FETCH_AND_NAND_1:
561 case BUILT_IN_SYNC_FETCH_AND_NAND_2:
562 case BUILT_IN_SYNC_FETCH_AND_NAND_4:
563 case BUILT_IN_SYNC_FETCH_AND_NAND_8:
564
565 case BUILT_IN_SYNC_ADD_AND_FETCH_1:
566 case BUILT_IN_SYNC_ADD_AND_FETCH_2:
567 case BUILT_IN_SYNC_ADD_AND_FETCH_4:
568 case BUILT_IN_SYNC_ADD_AND_FETCH_8:
569 case BUILT_IN_SYNC_ADD_AND_FETCH_16:
570
571 case BUILT_IN_SYNC_SUB_AND_FETCH_1:
572 case BUILT_IN_SYNC_SUB_AND_FETCH_2:
573 case BUILT_IN_SYNC_SUB_AND_FETCH_4:
574 case BUILT_IN_SYNC_SUB_AND_FETCH_8:
575 case BUILT_IN_SYNC_SUB_AND_FETCH_16:
576
577 case BUILT_IN_SYNC_OR_AND_FETCH_1:
578 case BUILT_IN_SYNC_OR_AND_FETCH_2:
579 case BUILT_IN_SYNC_OR_AND_FETCH_4:
580 case BUILT_IN_SYNC_OR_AND_FETCH_8:
581 case BUILT_IN_SYNC_OR_AND_FETCH_16:
582
583 case BUILT_IN_SYNC_AND_AND_FETCH_1:
584 case BUILT_IN_SYNC_AND_AND_FETCH_2:
585 case BUILT_IN_SYNC_AND_AND_FETCH_4:
586 case BUILT_IN_SYNC_AND_AND_FETCH_8:
587 case BUILT_IN_SYNC_AND_AND_FETCH_16:
588
589 case BUILT_IN_SYNC_XOR_AND_FETCH_1:
590 case BUILT_IN_SYNC_XOR_AND_FETCH_2:
591 case BUILT_IN_SYNC_XOR_AND_FETCH_4:
592 case BUILT_IN_SYNC_XOR_AND_FETCH_8:
593 case BUILT_IN_SYNC_XOR_AND_FETCH_16:
594
595 case BUILT_IN_SYNC_NAND_AND_FETCH_1:
596 case BUILT_IN_SYNC_NAND_AND_FETCH_2:
597 case BUILT_IN_SYNC_NAND_AND_FETCH_4:
598 case BUILT_IN_SYNC_NAND_AND_FETCH_8:
599
600 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1:
601 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_2:
602 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_4:
603 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_8:
604 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_16:
605
606 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1:
607 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_2:
608 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_4:
609 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_8:
610 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_16:
611
612 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_1:
613 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_2:
614 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_4:
615 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_8:
616 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_16:
617
618 case BUILT_IN_SYNC_LOCK_RELEASE_1:
619 case BUILT_IN_SYNC_LOCK_RELEASE_2:
620 case BUILT_IN_SYNC_LOCK_RELEASE_4:
621 case BUILT_IN_SYNC_LOCK_RELEASE_8:
622 case BUILT_IN_SYNC_LOCK_RELEASE_16:
623
624 case BUILT_IN_ATOMIC_EXCHANGE_1:
625 case BUILT_IN_ATOMIC_EXCHANGE_2:
626 case BUILT_IN_ATOMIC_EXCHANGE_4:
627 case BUILT_IN_ATOMIC_EXCHANGE_8:
628 case BUILT_IN_ATOMIC_EXCHANGE_16:
629
630 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1:
631 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_2:
632 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_4:
633 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_8:
634 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_16:
635
636 case BUILT_IN_ATOMIC_STORE_1:
637 case BUILT_IN_ATOMIC_STORE_2:
638 case BUILT_IN_ATOMIC_STORE_4:
639 case BUILT_IN_ATOMIC_STORE_8:
640 case BUILT_IN_ATOMIC_STORE_16:
641
642 case BUILT_IN_ATOMIC_ADD_FETCH_1:
643 case BUILT_IN_ATOMIC_ADD_FETCH_2:
644 case BUILT_IN_ATOMIC_ADD_FETCH_4:
645 case BUILT_IN_ATOMIC_ADD_FETCH_8:
646 case BUILT_IN_ATOMIC_ADD_FETCH_16:
647
648 case BUILT_IN_ATOMIC_SUB_FETCH_1:
649 case BUILT_IN_ATOMIC_SUB_FETCH_2:
650 case BUILT_IN_ATOMIC_SUB_FETCH_4:
651 case BUILT_IN_ATOMIC_SUB_FETCH_8:
652 case BUILT_IN_ATOMIC_SUB_FETCH_16:
653
654 case BUILT_IN_ATOMIC_AND_FETCH_1:
655 case BUILT_IN_ATOMIC_AND_FETCH_2:
656 case BUILT_IN_ATOMIC_AND_FETCH_4:
657 case BUILT_IN_ATOMIC_AND_FETCH_8:
658 case BUILT_IN_ATOMIC_AND_FETCH_16:
659
660 case BUILT_IN_ATOMIC_NAND_FETCH_1:
661 case BUILT_IN_ATOMIC_NAND_FETCH_2:
662 case BUILT_IN_ATOMIC_NAND_FETCH_4:
663 case BUILT_IN_ATOMIC_NAND_FETCH_8:
664 case BUILT_IN_ATOMIC_NAND_FETCH_16:
665
666 case BUILT_IN_ATOMIC_XOR_FETCH_1:
667 case BUILT_IN_ATOMIC_XOR_FETCH_2:
668 case BUILT_IN_ATOMIC_XOR_FETCH_4:
669 case BUILT_IN_ATOMIC_XOR_FETCH_8:
670 case BUILT_IN_ATOMIC_XOR_FETCH_16:
671
672 case BUILT_IN_ATOMIC_OR_FETCH_1:
673 case BUILT_IN_ATOMIC_OR_FETCH_2:
674 case BUILT_IN_ATOMIC_OR_FETCH_4:
675 case BUILT_IN_ATOMIC_OR_FETCH_8:
676 case BUILT_IN_ATOMIC_OR_FETCH_16:
677
678 case BUILT_IN_ATOMIC_FETCH_ADD_1:
679 case BUILT_IN_ATOMIC_FETCH_ADD_2:
680 case BUILT_IN_ATOMIC_FETCH_ADD_4:
681 case BUILT_IN_ATOMIC_FETCH_ADD_8:
682 case BUILT_IN_ATOMIC_FETCH_ADD_16:
683
684 case BUILT_IN_ATOMIC_FETCH_SUB_1:
685 case BUILT_IN_ATOMIC_FETCH_SUB_2:
686 case BUILT_IN_ATOMIC_FETCH_SUB_4:
687 case BUILT_IN_ATOMIC_FETCH_SUB_8:
688 case BUILT_IN_ATOMIC_FETCH_SUB_16:
689
690 case BUILT_IN_ATOMIC_FETCH_AND_1:
691 case BUILT_IN_ATOMIC_FETCH_AND_2:
692 case BUILT_IN_ATOMIC_FETCH_AND_4:
693 case BUILT_IN_ATOMIC_FETCH_AND_8:
694 case BUILT_IN_ATOMIC_FETCH_AND_16:
695
696 case BUILT_IN_ATOMIC_FETCH_NAND_1:
697 case BUILT_IN_ATOMIC_FETCH_NAND_2:
698 case BUILT_IN_ATOMIC_FETCH_NAND_4:
699 case BUILT_IN_ATOMIC_FETCH_NAND_8:
700 case BUILT_IN_ATOMIC_FETCH_NAND_16:
701
702 case BUILT_IN_ATOMIC_FETCH_XOR_1:
703 case BUILT_IN_ATOMIC_FETCH_XOR_2:
704 case BUILT_IN_ATOMIC_FETCH_XOR_4:
705 case BUILT_IN_ATOMIC_FETCH_XOR_8:
706 case BUILT_IN_ATOMIC_FETCH_XOR_16:
707
708 case BUILT_IN_ATOMIC_FETCH_OR_1:
709 case BUILT_IN_ATOMIC_FETCH_OR_2:
710 case BUILT_IN_ATOMIC_FETCH_OR_4:
711 case BUILT_IN_ATOMIC_FETCH_OR_8:
712 case BUILT_IN_ATOMIC_FETCH_OR_16:
713 {
714 dest = gimple_call_arg (call, 0);
715 /* DEST represents the address of a memory location.
716 instrument_derefs wants the memory location, so lets
717 dereference the address DEST before handing it to
718 instrument_derefs. */
719 if (TREE_CODE (dest) == ADDR_EXPR)
720 dest = TREE_OPERAND (dest, 0);
721 else if (TREE_CODE (dest) == SSA_NAME || TREE_CODE (dest) == INTEGER_CST)
722 dest = build2 (MEM_REF, TREE_TYPE (TREE_TYPE (dest)),
723 dest, build_int_cst (TREE_TYPE (dest), 0));
724 else
725 gcc_unreachable ();
726
727 access_size = int_size_in_bytes (TREE_TYPE (dest));
728 }
729
730 default:
731 /* The other builtins memory access are not instrumented in this
732 function because they either don't have any length parameter,
733 or their length parameter is just a limit. */
734 break;
735 }
736
737 if (len != NULL_TREE)
738 {
739 if (source0 != NULL_TREE)
740 {
741 src0->start = source0;
742 src0->access_size = access_size;
743 *src0_len = len;
744 *src0_is_store = false;
745 }
746
747 if (source1 != NULL_TREE)
748 {
749 src1->start = source1;
750 src1->access_size = access_size;
751 *src1_len = len;
752 *src1_is_store = false;
753 }
754
755 if (dest != NULL_TREE)
756 {
757 dst->start = dest;
758 dst->access_size = access_size;
759 *dst_len = len;
760 *dst_is_store = true;
761 }
762
763 got_reference_p = true;
764 }
765 else if (dest)
766 {
767 dst->start = dest;
768 dst->access_size = access_size;
769 *dst_len = NULL_TREE;
770 *dst_is_store = is_store;
771 *dest_is_deref = true;
772 got_reference_p = true;
773 }
774
775 return got_reference_p;
776 }
777
778 /* Return true iff a given gimple statement has been instrumented.
779 Note that the statement is "defined" by the memory references it
780 contains. */
781
782 static bool
783 has_stmt_been_instrumented_p (gimple stmt)
784 {
785 if (gimple_assign_single_p (stmt))
786 {
787 bool r_is_store;
788 asan_mem_ref r;
789 asan_mem_ref_init (&r, NULL, 1);
790
791 if (get_mem_ref_of_assignment (stmt, &r, &r_is_store))
792 return has_mem_ref_been_instrumented (&r);
793 }
794 else if (gimple_call_builtin_p (stmt, BUILT_IN_NORMAL))
795 {
796 asan_mem_ref src0, src1, dest;
797 asan_mem_ref_init (&src0, NULL, 1);
798 asan_mem_ref_init (&src1, NULL, 1);
799 asan_mem_ref_init (&dest, NULL, 1);
800
801 tree src0_len = NULL_TREE, src1_len = NULL_TREE, dest_len = NULL_TREE;
802 bool src0_is_store = false, src1_is_store = false,
803 dest_is_store = false, dest_is_deref = false;
804 if (get_mem_refs_of_builtin_call (stmt,
805 &src0, &src0_len, &src0_is_store,
806 &src1, &src1_len, &src1_is_store,
807 &dest, &dest_len, &dest_is_store,
808 &dest_is_deref))
809 {
810 if (src0.start != NULL_TREE
811 && !has_mem_ref_been_instrumented (&src0, src0_len))
812 return false;
813
814 if (src1.start != NULL_TREE
815 && !has_mem_ref_been_instrumented (&src1, src1_len))
816 return false;
817
818 if (dest.start != NULL_TREE
819 && !has_mem_ref_been_instrumented (&dest, dest_len))
820 return false;
821
822 return true;
823 }
824 }
825 return false;
826 }
827
828 /* Insert a memory reference into the hash table. */
829
830 static void
831 update_mem_ref_hash_table (tree ref, char access_size)
832 {
833 hash_table <asan_mem_ref_hasher> ht = get_mem_ref_hash_table ();
834
835 asan_mem_ref r;
836 asan_mem_ref_init (&r, ref, access_size);
837
838 asan_mem_ref **slot = ht.find_slot (&r, INSERT);
839 if (*slot == NULL)
840 *slot = asan_mem_ref_new (ref, access_size);
841 }
842
843 /* Initialize shadow_ptr_types array. */
844
845 static void
846 asan_init_shadow_ptr_types (void)
847 {
848 asan_shadow_set = new_alias_set ();
849 shadow_ptr_types[0] = build_distinct_type_copy (signed_char_type_node);
850 TYPE_ALIAS_SET (shadow_ptr_types[0]) = asan_shadow_set;
851 shadow_ptr_types[0] = build_pointer_type (shadow_ptr_types[0]);
852 shadow_ptr_types[1] = build_distinct_type_copy (short_integer_type_node);
853 TYPE_ALIAS_SET (shadow_ptr_types[1]) = asan_shadow_set;
854 shadow_ptr_types[1] = build_pointer_type (shadow_ptr_types[1]);
855 initialize_sanitizer_builtins ();
856 }
857
858 /* Create ADDR_EXPR of STRING_CST with the PP pretty printer text. */
859
860 static tree
861 asan_pp_string (pretty_printer *pp)
862 {
863 const char *buf = pp_formatted_text (pp);
864 size_t len = strlen (buf);
865 tree ret = build_string (len + 1, buf);
866 TREE_TYPE (ret)
867 = build_array_type (TREE_TYPE (shadow_ptr_types[0]),
868 build_index_type (size_int (len)));
869 TREE_READONLY (ret) = 1;
870 TREE_STATIC (ret) = 1;
871 return build1 (ADDR_EXPR, shadow_ptr_types[0], ret);
872 }
873
874 /* Return a CONST_INT representing 4 subsequent shadow memory bytes. */
875
876 static rtx
877 asan_shadow_cst (unsigned char shadow_bytes[4])
878 {
879 int i;
880 unsigned HOST_WIDE_INT val = 0;
881 gcc_assert (WORDS_BIG_ENDIAN == BYTES_BIG_ENDIAN);
882 for (i = 0; i < 4; i++)
883 val |= (unsigned HOST_WIDE_INT) shadow_bytes[BYTES_BIG_ENDIAN ? 3 - i : i]
884 << (BITS_PER_UNIT * i);
885 return gen_int_mode (val, SImode);
886 }
887
888 /* Clear shadow memory at SHADOW_MEM, LEN bytes. Can't call a library call here
889 though. */
890
891 static void
892 asan_clear_shadow (rtx shadow_mem, HOST_WIDE_INT len)
893 {
894 rtx insn, insns, top_label, end, addr, tmp, jump;
895
896 start_sequence ();
897 clear_storage (shadow_mem, GEN_INT (len), BLOCK_OP_NORMAL);
898 insns = get_insns ();
899 end_sequence ();
900 for (insn = insns; insn; insn = NEXT_INSN (insn))
901 if (CALL_P (insn))
902 break;
903 if (insn == NULL_RTX)
904 {
905 emit_insn (insns);
906 return;
907 }
908
909 gcc_assert ((len & 3) == 0);
910 top_label = gen_label_rtx ();
911 addr = copy_to_mode_reg (Pmode, XEXP (shadow_mem, 0));
912 shadow_mem = adjust_automodify_address (shadow_mem, SImode, addr, 0);
913 end = force_reg (Pmode, plus_constant (Pmode, addr, len));
914 emit_label (top_label);
915
916 emit_move_insn (shadow_mem, const0_rtx);
917 tmp = expand_simple_binop (Pmode, PLUS, addr, gen_int_mode (4, Pmode), addr,
918 true, OPTAB_LIB_WIDEN);
919 if (tmp != addr)
920 emit_move_insn (addr, tmp);
921 emit_cmp_and_jump_insns (addr, end, LT, NULL_RTX, Pmode, true, top_label);
922 jump = get_last_insn ();
923 gcc_assert (JUMP_P (jump));
924 add_int_reg_note (jump, REG_BR_PROB, REG_BR_PROB_BASE * 80 / 100);
925 }
926
927 void
928 asan_function_start (void)
929 {
930 section *fnsec = function_section (current_function_decl);
931 switch_to_section (fnsec);
932 ASM_OUTPUT_DEBUG_LABEL (asm_out_file, "LASANPC",
933 current_function_funcdef_no);
934 }
935
936 /* Insert code to protect stack vars. The prologue sequence should be emitted
937 directly, epilogue sequence returned. BASE is the register holding the
938 stack base, against which OFFSETS array offsets are relative to, OFFSETS
939 array contains pairs of offsets in reverse order, always the end offset
940 of some gap that needs protection followed by starting offset,
941 and DECLS is an array of representative decls for each var partition.
942 LENGTH is the length of the OFFSETS array, DECLS array is LENGTH / 2 - 1
943 elements long (OFFSETS include gap before the first variable as well
944 as gaps after each stack variable). */
945
946 rtx
947 asan_emit_stack_protection (rtx base, HOST_WIDE_INT *offsets, tree *decls,
948 int length)
949 {
950 rtx shadow_base, shadow_mem, ret, mem;
951 char buf[30];
952 unsigned char shadow_bytes[4];
953 HOST_WIDE_INT base_offset = offsets[length - 1], offset, prev_offset;
954 HOST_WIDE_INT last_offset, last_size;
955 int l;
956 unsigned char cur_shadow_byte = ASAN_STACK_MAGIC_LEFT;
957 tree str_cst, decl, id;
958
959 if (shadow_ptr_types[0] == NULL_TREE)
960 asan_init_shadow_ptr_types ();
961
962 /* First of all, prepare the description string. */
963 pretty_printer asan_pp;
964
965 pp_decimal_int (&asan_pp, length / 2 - 1);
966 pp_space (&asan_pp);
967 for (l = length - 2; l; l -= 2)
968 {
969 tree decl = decls[l / 2 - 1];
970 pp_wide_integer (&asan_pp, offsets[l] - base_offset);
971 pp_space (&asan_pp);
972 pp_wide_integer (&asan_pp, offsets[l - 1] - offsets[l]);
973 pp_space (&asan_pp);
974 if (DECL_P (decl) && DECL_NAME (decl))
975 {
976 pp_decimal_int (&asan_pp, IDENTIFIER_LENGTH (DECL_NAME (decl)));
977 pp_space (&asan_pp);
978 pp_tree_identifier (&asan_pp, DECL_NAME (decl));
979 }
980 else
981 pp_string (&asan_pp, "9 <unknown>");
982 pp_space (&asan_pp);
983 }
984 str_cst = asan_pp_string (&asan_pp);
985
986 /* Emit the prologue sequence. */
987 base = expand_binop (Pmode, add_optab, base,
988 gen_int_mode (base_offset, Pmode),
989 NULL_RTX, 1, OPTAB_DIRECT);
990 mem = gen_rtx_MEM (ptr_mode, base);
991 emit_move_insn (mem, gen_int_mode (ASAN_STACK_FRAME_MAGIC, ptr_mode));
992 mem = adjust_address (mem, VOIDmode, GET_MODE_SIZE (ptr_mode));
993 emit_move_insn (mem, expand_normal (str_cst));
994 mem = adjust_address (mem, VOIDmode, GET_MODE_SIZE (ptr_mode));
995 ASM_GENERATE_INTERNAL_LABEL (buf, "LASANPC", current_function_funcdef_no);
996 id = get_identifier (buf);
997 decl = build_decl (DECL_SOURCE_LOCATION (current_function_decl),
998 VAR_DECL, id, char_type_node);
999 SET_DECL_ASSEMBLER_NAME (decl, id);
1000 TREE_ADDRESSABLE (decl) = 1;
1001 TREE_READONLY (decl) = 1;
1002 DECL_ARTIFICIAL (decl) = 1;
1003 DECL_IGNORED_P (decl) = 1;
1004 TREE_STATIC (decl) = 1;
1005 TREE_PUBLIC (decl) = 0;
1006 TREE_USED (decl) = 1;
1007 DECL_INITIAL (decl) = decl;
1008 TREE_ASM_WRITTEN (decl) = 1;
1009 TREE_ASM_WRITTEN (id) = 1;
1010 emit_move_insn (mem, expand_normal (build_fold_addr_expr (decl)));
1011 shadow_base = expand_binop (Pmode, lshr_optab, base,
1012 GEN_INT (ASAN_SHADOW_SHIFT),
1013 NULL_RTX, 1, OPTAB_DIRECT);
1014 shadow_base = expand_binop (Pmode, add_optab, shadow_base,
1015 gen_int_mode (targetm.asan_shadow_offset (),
1016 Pmode),
1017 NULL_RTX, 1, OPTAB_DIRECT);
1018 gcc_assert (asan_shadow_set != -1
1019 && (ASAN_RED_ZONE_SIZE >> ASAN_SHADOW_SHIFT) == 4);
1020 shadow_mem = gen_rtx_MEM (SImode, shadow_base);
1021 set_mem_alias_set (shadow_mem, asan_shadow_set);
1022 prev_offset = base_offset;
1023 for (l = length; l; l -= 2)
1024 {
1025 if (l == 2)
1026 cur_shadow_byte = ASAN_STACK_MAGIC_RIGHT;
1027 offset = offsets[l - 1];
1028 if ((offset - base_offset) & (ASAN_RED_ZONE_SIZE - 1))
1029 {
1030 int i;
1031 HOST_WIDE_INT aoff
1032 = base_offset + ((offset - base_offset)
1033 & ~(ASAN_RED_ZONE_SIZE - HOST_WIDE_INT_1));
1034 shadow_mem = adjust_address (shadow_mem, VOIDmode,
1035 (aoff - prev_offset)
1036 >> ASAN_SHADOW_SHIFT);
1037 prev_offset = aoff;
1038 for (i = 0; i < 4; i++, aoff += (1 << ASAN_SHADOW_SHIFT))
1039 if (aoff < offset)
1040 {
1041 if (aoff < offset - (1 << ASAN_SHADOW_SHIFT) + 1)
1042 shadow_bytes[i] = 0;
1043 else
1044 shadow_bytes[i] = offset - aoff;
1045 }
1046 else
1047 shadow_bytes[i] = ASAN_STACK_MAGIC_PARTIAL;
1048 emit_move_insn (shadow_mem, asan_shadow_cst (shadow_bytes));
1049 offset = aoff;
1050 }
1051 while (offset <= offsets[l - 2] - ASAN_RED_ZONE_SIZE)
1052 {
1053 shadow_mem = adjust_address (shadow_mem, VOIDmode,
1054 (offset - prev_offset)
1055 >> ASAN_SHADOW_SHIFT);
1056 prev_offset = offset;
1057 memset (shadow_bytes, cur_shadow_byte, 4);
1058 emit_move_insn (shadow_mem, asan_shadow_cst (shadow_bytes));
1059 offset += ASAN_RED_ZONE_SIZE;
1060 }
1061 cur_shadow_byte = ASAN_STACK_MAGIC_MIDDLE;
1062 }
1063 do_pending_stack_adjust ();
1064
1065 /* Construct epilogue sequence. */
1066 start_sequence ();
1067
1068 shadow_mem = gen_rtx_MEM (BLKmode, shadow_base);
1069 set_mem_alias_set (shadow_mem, asan_shadow_set);
1070 prev_offset = base_offset;
1071 last_offset = base_offset;
1072 last_size = 0;
1073 for (l = length; l; l -= 2)
1074 {
1075 offset = base_offset + ((offsets[l - 1] - base_offset)
1076 & ~(ASAN_RED_ZONE_SIZE - HOST_WIDE_INT_1));
1077 if (last_offset + last_size != offset)
1078 {
1079 shadow_mem = adjust_address (shadow_mem, VOIDmode,
1080 (last_offset - prev_offset)
1081 >> ASAN_SHADOW_SHIFT);
1082 prev_offset = last_offset;
1083 asan_clear_shadow (shadow_mem, last_size >> ASAN_SHADOW_SHIFT);
1084 last_offset = offset;
1085 last_size = 0;
1086 }
1087 last_size += base_offset + ((offsets[l - 2] - base_offset)
1088 & ~(ASAN_RED_ZONE_SIZE - HOST_WIDE_INT_1))
1089 - offset;
1090 }
1091 if (last_size)
1092 {
1093 shadow_mem = adjust_address (shadow_mem, VOIDmode,
1094 (last_offset - prev_offset)
1095 >> ASAN_SHADOW_SHIFT);
1096 asan_clear_shadow (shadow_mem, last_size >> ASAN_SHADOW_SHIFT);
1097 }
1098
1099 do_pending_stack_adjust ();
1100
1101 ret = get_insns ();
1102 end_sequence ();
1103 return ret;
1104 }
1105
1106 /* Return true if DECL, a global var, might be overridden and needs
1107 therefore a local alias. */
1108
1109 static bool
1110 asan_needs_local_alias (tree decl)
1111 {
1112 return DECL_WEAK (decl) || !targetm.binds_local_p (decl);
1113 }
1114
1115 /* Return true if DECL is a VAR_DECL that should be protected
1116 by Address Sanitizer, by appending a red zone with protected
1117 shadow memory after it and aligning it to at least
1118 ASAN_RED_ZONE_SIZE bytes. */
1119
1120 bool
1121 asan_protect_global (tree decl)
1122 {
1123 rtx rtl, symbol;
1124
1125 if (TREE_CODE (decl) == STRING_CST)
1126 {
1127 /* Instrument all STRING_CSTs except those created
1128 by asan_pp_string here. */
1129 if (shadow_ptr_types[0] != NULL_TREE
1130 && TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE
1131 && TREE_TYPE (TREE_TYPE (decl)) == TREE_TYPE (shadow_ptr_types[0]))
1132 return false;
1133 return true;
1134 }
1135 if (TREE_CODE (decl) != VAR_DECL
1136 /* TLS vars aren't statically protectable. */
1137 || DECL_THREAD_LOCAL_P (decl)
1138 /* Externs will be protected elsewhere. */
1139 || DECL_EXTERNAL (decl)
1140 || !DECL_RTL_SET_P (decl)
1141 /* Comdat vars pose an ABI problem, we can't know if
1142 the var that is selected by the linker will have
1143 padding or not. */
1144 || DECL_ONE_ONLY (decl)
1145 /* Similarly for common vars. People can use -fno-common. */
1146 || (DECL_COMMON (decl) && TREE_PUBLIC (decl))
1147 /* Don't protect if using user section, often vars placed
1148 into user section from multiple TUs are then assumed
1149 to be an array of such vars, putting padding in there
1150 breaks this assumption. */
1151 || (DECL_SECTION_NAME (decl) != NULL_TREE
1152 && !DECL_HAS_IMPLICIT_SECTION_NAME_P (decl))
1153 || DECL_SIZE (decl) == 0
1154 || ASAN_RED_ZONE_SIZE * BITS_PER_UNIT > MAX_OFILE_ALIGNMENT
1155 || !valid_constant_size_p (DECL_SIZE_UNIT (decl))
1156 || DECL_ALIGN_UNIT (decl) > 2 * ASAN_RED_ZONE_SIZE)
1157 return false;
1158
1159 rtl = DECL_RTL (decl);
1160 if (!MEM_P (rtl) || GET_CODE (XEXP (rtl, 0)) != SYMBOL_REF)
1161 return false;
1162 symbol = XEXP (rtl, 0);
1163
1164 if (CONSTANT_POOL_ADDRESS_P (symbol)
1165 || TREE_CONSTANT_POOL_ADDRESS_P (symbol))
1166 return false;
1167
1168 if (lookup_attribute ("weakref", DECL_ATTRIBUTES (decl)))
1169 return false;
1170
1171 #ifndef ASM_OUTPUT_DEF
1172 if (asan_needs_local_alias (decl))
1173 return false;
1174 #endif
1175
1176 return true;
1177 }
1178
1179 /* Construct a function tree for __asan_report_{load,store}{1,2,4,8,16}.
1180 IS_STORE is either 1 (for a store) or 0 (for a load).
1181 SIZE_IN_BYTES is one of 1, 2, 4, 8, 16. */
1182
1183 static tree
1184 report_error_func (bool is_store, int size_in_bytes)
1185 {
1186 static enum built_in_function report[2][5]
1187 = { { BUILT_IN_ASAN_REPORT_LOAD1, BUILT_IN_ASAN_REPORT_LOAD2,
1188 BUILT_IN_ASAN_REPORT_LOAD4, BUILT_IN_ASAN_REPORT_LOAD8,
1189 BUILT_IN_ASAN_REPORT_LOAD16 },
1190 { BUILT_IN_ASAN_REPORT_STORE1, BUILT_IN_ASAN_REPORT_STORE2,
1191 BUILT_IN_ASAN_REPORT_STORE4, BUILT_IN_ASAN_REPORT_STORE8,
1192 BUILT_IN_ASAN_REPORT_STORE16 } };
1193 return builtin_decl_implicit (report[is_store][exact_log2 (size_in_bytes)]);
1194 }
1195
1196 #define PROB_VERY_UNLIKELY (REG_BR_PROB_BASE / 2000 - 1)
1197 #define PROB_ALWAYS (REG_BR_PROB_BASE)
1198
1199 /* Split the current basic block and create a condition statement
1200 insertion point right before or after the statement pointed to by
1201 ITER. Return an iterator to the point at which the caller might
1202 safely insert the condition statement.
1203
1204 THEN_BLOCK must be set to the address of an uninitialized instance
1205 of basic_block. The function will then set *THEN_BLOCK to the
1206 'then block' of the condition statement to be inserted by the
1207 caller.
1208
1209 If CREATE_THEN_FALLTHRU_EDGE is false, no edge will be created from
1210 *THEN_BLOCK to *FALLTHROUGH_BLOCK.
1211
1212 Similarly, the function will set *FALLTRHOUGH_BLOCK to the 'else
1213 block' of the condition statement to be inserted by the caller.
1214
1215 Note that *FALLTHROUGH_BLOCK is a new block that contains the
1216 statements starting from *ITER, and *THEN_BLOCK is a new empty
1217 block.
1218
1219 *ITER is adjusted to point to always point to the first statement
1220 of the basic block * FALLTHROUGH_BLOCK. That statement is the
1221 same as what ITER was pointing to prior to calling this function,
1222 if BEFORE_P is true; otherwise, it is its following statement. */
1223
1224 static gimple_stmt_iterator
1225 create_cond_insert_point (gimple_stmt_iterator *iter,
1226 bool before_p,
1227 bool then_more_likely_p,
1228 bool create_then_fallthru_edge,
1229 basic_block *then_block,
1230 basic_block *fallthrough_block)
1231 {
1232 gimple_stmt_iterator gsi = *iter;
1233
1234 if (!gsi_end_p (gsi) && before_p)
1235 gsi_prev (&gsi);
1236
1237 basic_block cur_bb = gsi_bb (*iter);
1238
1239 edge e = split_block (cur_bb, gsi_stmt (gsi));
1240
1241 /* Get a hold on the 'condition block', the 'then block' and the
1242 'else block'. */
1243 basic_block cond_bb = e->src;
1244 basic_block fallthru_bb = e->dest;
1245 basic_block then_bb = create_empty_bb (cond_bb);
1246 if (current_loops)
1247 {
1248 add_bb_to_loop (then_bb, cond_bb->loop_father);
1249 loops_state_set (LOOPS_NEED_FIXUP);
1250 }
1251
1252 /* Set up the newly created 'then block'. */
1253 e = make_edge (cond_bb, then_bb, EDGE_TRUE_VALUE);
1254 int fallthrough_probability
1255 = then_more_likely_p
1256 ? PROB_VERY_UNLIKELY
1257 : PROB_ALWAYS - PROB_VERY_UNLIKELY;
1258 e->probability = PROB_ALWAYS - fallthrough_probability;
1259 if (create_then_fallthru_edge)
1260 make_single_succ_edge (then_bb, fallthru_bb, EDGE_FALLTHRU);
1261
1262 /* Set up the fallthrough basic block. */
1263 e = find_edge (cond_bb, fallthru_bb);
1264 e->flags = EDGE_FALSE_VALUE;
1265 e->count = cond_bb->count;
1266 e->probability = fallthrough_probability;
1267
1268 /* Update dominance info for the newly created then_bb; note that
1269 fallthru_bb's dominance info has already been updated by
1270 split_bock. */
1271 if (dom_info_available_p (CDI_DOMINATORS))
1272 set_immediate_dominator (CDI_DOMINATORS, then_bb, cond_bb);
1273
1274 *then_block = then_bb;
1275 *fallthrough_block = fallthru_bb;
1276 *iter = gsi_start_bb (fallthru_bb);
1277
1278 return gsi_last_bb (cond_bb);
1279 }
1280
1281 /* Insert an if condition followed by a 'then block' right before the
1282 statement pointed to by ITER. The fallthrough block -- which is the
1283 else block of the condition as well as the destination of the
1284 outcoming edge of the 'then block' -- starts with the statement
1285 pointed to by ITER.
1286
1287 COND is the condition of the if.
1288
1289 If THEN_MORE_LIKELY_P is true, the probability of the edge to the
1290 'then block' is higher than the probability of the edge to the
1291 fallthrough block.
1292
1293 Upon completion of the function, *THEN_BB is set to the newly
1294 inserted 'then block' and similarly, *FALLTHROUGH_BB is set to the
1295 fallthrough block.
1296
1297 *ITER is adjusted to still point to the same statement it was
1298 pointing to initially. */
1299
1300 static void
1301 insert_if_then_before_iter (gimple cond,
1302 gimple_stmt_iterator *iter,
1303 bool then_more_likely_p,
1304 basic_block *then_bb,
1305 basic_block *fallthrough_bb)
1306 {
1307 gimple_stmt_iterator cond_insert_point =
1308 create_cond_insert_point (iter,
1309 /*before_p=*/true,
1310 then_more_likely_p,
1311 /*create_then_fallthru_edge=*/true,
1312 then_bb,
1313 fallthrough_bb);
1314 gsi_insert_after (&cond_insert_point, cond, GSI_NEW_STMT);
1315 }
1316
1317 /* Instrument the memory access instruction BASE. Insert new
1318 statements before or after ITER.
1319
1320 Note that the memory access represented by BASE can be either an
1321 SSA_NAME, or a non-SSA expression. LOCATION is the source code
1322 location. IS_STORE is TRUE for a store, FALSE for a load.
1323 BEFORE_P is TRUE for inserting the instrumentation code before
1324 ITER, FALSE for inserting it after ITER. SIZE_IN_BYTES is one of
1325 1, 2, 4, 8, 16.
1326
1327 If BEFORE_P is TRUE, *ITER is arranged to still point to the
1328 statement it was pointing to prior to calling this function,
1329 otherwise, it points to the statement logically following it. */
1330
1331 static void
1332 build_check_stmt (location_t location, tree base, gimple_stmt_iterator *iter,
1333 bool before_p, bool is_store, int size_in_bytes)
1334 {
1335 gimple_stmt_iterator gsi;
1336 basic_block then_bb, else_bb;
1337 tree t, base_addr, shadow;
1338 gimple g;
1339 tree shadow_ptr_type = shadow_ptr_types[size_in_bytes == 16 ? 1 : 0];
1340 tree shadow_type = TREE_TYPE (shadow_ptr_type);
1341 tree uintptr_type
1342 = build_nonstandard_integer_type (TYPE_PRECISION (TREE_TYPE (base)), 1);
1343 tree base_ssa = base;
1344
1345 /* Get an iterator on the point where we can add the condition
1346 statement for the instrumentation. */
1347 gsi = create_cond_insert_point (iter, before_p,
1348 /*then_more_likely_p=*/false,
1349 /*create_then_fallthru_edge=*/false,
1350 &then_bb,
1351 &else_bb);
1352
1353 base = unshare_expr (base);
1354
1355 /* BASE can already be an SSA_NAME; in that case, do not create a
1356 new SSA_NAME for it. */
1357 if (TREE_CODE (base) != SSA_NAME)
1358 {
1359 g = gimple_build_assign_with_ops (TREE_CODE (base),
1360 make_ssa_name (TREE_TYPE (base), NULL),
1361 base, NULL_TREE);
1362 gimple_set_location (g, location);
1363 gsi_insert_after (&gsi, g, GSI_NEW_STMT);
1364 base_ssa = gimple_assign_lhs (g);
1365 }
1366
1367 g = gimple_build_assign_with_ops (NOP_EXPR,
1368 make_ssa_name (uintptr_type, NULL),
1369 base_ssa, NULL_TREE);
1370 gimple_set_location (g, location);
1371 gsi_insert_after (&gsi, g, GSI_NEW_STMT);
1372 base_addr = gimple_assign_lhs (g);
1373
1374 /* Build
1375 (base_addr >> ASAN_SHADOW_SHIFT) + targetm.asan_shadow_offset (). */
1376
1377 t = build_int_cst (uintptr_type, ASAN_SHADOW_SHIFT);
1378 g = gimple_build_assign_with_ops (RSHIFT_EXPR,
1379 make_ssa_name (uintptr_type, NULL),
1380 base_addr, t);
1381 gimple_set_location (g, location);
1382 gsi_insert_after (&gsi, g, GSI_NEW_STMT);
1383
1384 t = build_int_cst (uintptr_type, targetm.asan_shadow_offset ());
1385 g = gimple_build_assign_with_ops (PLUS_EXPR,
1386 make_ssa_name (uintptr_type, NULL),
1387 gimple_assign_lhs (g), t);
1388 gimple_set_location (g, location);
1389 gsi_insert_after (&gsi, g, GSI_NEW_STMT);
1390
1391 g = gimple_build_assign_with_ops (NOP_EXPR,
1392 make_ssa_name (shadow_ptr_type, NULL),
1393 gimple_assign_lhs (g), NULL_TREE);
1394 gimple_set_location (g, location);
1395 gsi_insert_after (&gsi, g, GSI_NEW_STMT);
1396
1397 t = build2 (MEM_REF, shadow_type, gimple_assign_lhs (g),
1398 build_int_cst (shadow_ptr_type, 0));
1399 g = gimple_build_assign_with_ops (MEM_REF,
1400 make_ssa_name (shadow_type, NULL),
1401 t, NULL_TREE);
1402 gimple_set_location (g, location);
1403 gsi_insert_after (&gsi, g, GSI_NEW_STMT);
1404 shadow = gimple_assign_lhs (g);
1405
1406 if (size_in_bytes < 8)
1407 {
1408 /* Slow path for 1, 2 and 4 byte accesses.
1409 Test (shadow != 0)
1410 & ((base_addr & 7) + (size_in_bytes - 1)) >= shadow). */
1411 gimple_seq seq = NULL;
1412 gimple shadow_test = build_assign (NE_EXPR, shadow, 0);
1413 gimple_seq_add_stmt (&seq, shadow_test);
1414 gimple_seq_add_stmt (&seq, build_assign (BIT_AND_EXPR, base_addr, 7));
1415 gimple_seq_add_stmt (&seq, build_type_cast (shadow_type,
1416 gimple_seq_last (seq)));
1417 if (size_in_bytes > 1)
1418 gimple_seq_add_stmt (&seq,
1419 build_assign (PLUS_EXPR, gimple_seq_last (seq),
1420 size_in_bytes - 1));
1421 gimple_seq_add_stmt (&seq, build_assign (GE_EXPR, gimple_seq_last (seq),
1422 shadow));
1423 gimple_seq_add_stmt (&seq, build_assign (BIT_AND_EXPR, shadow_test,
1424 gimple_seq_last (seq)));
1425 t = gimple_assign_lhs (gimple_seq_last (seq));
1426 gimple_seq_set_location (seq, location);
1427 gsi_insert_seq_after (&gsi, seq, GSI_CONTINUE_LINKING);
1428 }
1429 else
1430 t = shadow;
1431
1432 g = gimple_build_cond (NE_EXPR, t, build_int_cst (TREE_TYPE (t), 0),
1433 NULL_TREE, NULL_TREE);
1434 gimple_set_location (g, location);
1435 gsi_insert_after (&gsi, g, GSI_NEW_STMT);
1436
1437 /* Generate call to the run-time library (e.g. __asan_report_load8). */
1438 gsi = gsi_start_bb (then_bb);
1439 g = gimple_build_call (report_error_func (is_store, size_in_bytes),
1440 1, base_addr);
1441 gimple_set_location (g, location);
1442 gsi_insert_after (&gsi, g, GSI_NEW_STMT);
1443
1444 *iter = gsi_start_bb (else_bb);
1445 }
1446
1447 /* If T represents a memory access, add instrumentation code before ITER.
1448 LOCATION is source code location.
1449 IS_STORE is either TRUE (for a store) or FALSE (for a load). */
1450
1451 static void
1452 instrument_derefs (gimple_stmt_iterator *iter, tree t,
1453 location_t location, bool is_store)
1454 {
1455 tree type, base;
1456 HOST_WIDE_INT size_in_bytes;
1457
1458 type = TREE_TYPE (t);
1459 switch (TREE_CODE (t))
1460 {
1461 case ARRAY_REF:
1462 case COMPONENT_REF:
1463 case INDIRECT_REF:
1464 case MEM_REF:
1465 break;
1466 default:
1467 return;
1468 }
1469
1470 size_in_bytes = int_size_in_bytes (type);
1471 if ((size_in_bytes & (size_in_bytes - 1)) != 0
1472 || (unsigned HOST_WIDE_INT) size_in_bytes - 1 >= 16)
1473 return;
1474
1475 HOST_WIDE_INT bitsize, bitpos;
1476 tree offset;
1477 enum machine_mode mode;
1478 int volatilep = 0, unsignedp = 0;
1479 get_inner_reference (t, &bitsize, &bitpos, &offset,
1480 &mode, &unsignedp, &volatilep, false);
1481 if (bitpos % (size_in_bytes * BITS_PER_UNIT)
1482 || bitsize != size_in_bytes * BITS_PER_UNIT)
1483 {
1484 if (TREE_CODE (t) == COMPONENT_REF
1485 && DECL_BIT_FIELD_REPRESENTATIVE (TREE_OPERAND (t, 1)) != NULL_TREE)
1486 {
1487 tree repr = DECL_BIT_FIELD_REPRESENTATIVE (TREE_OPERAND (t, 1));
1488 instrument_derefs (iter, build3 (COMPONENT_REF, TREE_TYPE (repr),
1489 TREE_OPERAND (t, 0), repr,
1490 NULL_TREE), location, is_store);
1491 }
1492 return;
1493 }
1494
1495 base = build_fold_addr_expr (t);
1496 if (!has_mem_ref_been_instrumented (base, size_in_bytes))
1497 {
1498 build_check_stmt (location, base, iter, /*before_p=*/true,
1499 is_store, size_in_bytes);
1500 update_mem_ref_hash_table (base, size_in_bytes);
1501 update_mem_ref_hash_table (t, size_in_bytes);
1502 }
1503
1504 }
1505
1506 /* Instrument an access to a contiguous memory region that starts at
1507 the address pointed to by BASE, over a length of LEN (expressed in
1508 the sizeof (*BASE) bytes). ITER points to the instruction before
1509 which the instrumentation instructions must be inserted. LOCATION
1510 is the source location that the instrumentation instructions must
1511 have. If IS_STORE is true, then the memory access is a store;
1512 otherwise, it's a load. */
1513
1514 static void
1515 instrument_mem_region_access (tree base, tree len,
1516 gimple_stmt_iterator *iter,
1517 location_t location, bool is_store)
1518 {
1519 if (!POINTER_TYPE_P (TREE_TYPE (base))
1520 || !INTEGRAL_TYPE_P (TREE_TYPE (len))
1521 || integer_zerop (len))
1522 return;
1523
1524 gimple_stmt_iterator gsi = *iter;
1525
1526 basic_block fallthrough_bb = NULL, then_bb = NULL;
1527
1528 /* If the beginning of the memory region has already been
1529 instrumented, do not instrument it. */
1530 bool start_instrumented = has_mem_ref_been_instrumented (base, 1);
1531
1532 /* If the end of the memory region has already been instrumented, do
1533 not instrument it. */
1534 tree end = asan_mem_ref_get_end (base, len);
1535 bool end_instrumented = has_mem_ref_been_instrumented (end, 1);
1536
1537 if (start_instrumented && end_instrumented)
1538 return;
1539
1540 if (!is_gimple_constant (len))
1541 {
1542 /* So, the length of the memory area to asan-protect is
1543 non-constant. Let's guard the generated instrumentation code
1544 like:
1545
1546 if (len != 0)
1547 {
1548 //asan instrumentation code goes here.
1549 }
1550 // falltrough instructions, starting with *ITER. */
1551
1552 gimple g = gimple_build_cond (NE_EXPR,
1553 len,
1554 build_int_cst (TREE_TYPE (len), 0),
1555 NULL_TREE, NULL_TREE);
1556 gimple_set_location (g, location);
1557 insert_if_then_before_iter (g, iter, /*then_more_likely_p=*/true,
1558 &then_bb, &fallthrough_bb);
1559 /* Note that fallthrough_bb starts with the statement that was
1560 pointed to by ITER. */
1561
1562 /* The 'then block' of the 'if (len != 0) condition is where
1563 we'll generate the asan instrumentation code now. */
1564 gsi = gsi_last_bb (then_bb);
1565 }
1566
1567 if (!start_instrumented)
1568 {
1569 /* Instrument the beginning of the memory region to be accessed,
1570 and arrange for the rest of the intrumentation code to be
1571 inserted in the then block *after* the current gsi. */
1572 build_check_stmt (location, base, &gsi, /*before_p=*/true, is_store, 1);
1573
1574 if (then_bb)
1575 /* We are in the case where the length of the region is not
1576 constant; so instrumentation code is being generated in the
1577 'then block' of the 'if (len != 0) condition. Let's arrange
1578 for the subsequent instrumentation statements to go in the
1579 'then block'. */
1580 gsi = gsi_last_bb (then_bb);
1581 else
1582 {
1583 *iter = gsi;
1584 /* Don't remember this access as instrumented, if length
1585 is unknown. It might be zero and not being actually
1586 instrumented, so we can't rely on it being instrumented. */
1587 update_mem_ref_hash_table (base, 1);
1588 }
1589 }
1590
1591 if (end_instrumented)
1592 return;
1593
1594 /* We want to instrument the access at the end of the memory region,
1595 which is at (base + len - 1). */
1596
1597 /* offset = len - 1; */
1598 len = unshare_expr (len);
1599 tree offset;
1600 gimple_seq seq = NULL;
1601 if (TREE_CODE (len) == INTEGER_CST)
1602 offset = fold_build2 (MINUS_EXPR, size_type_node,
1603 fold_convert (size_type_node, len),
1604 build_int_cst (size_type_node, 1));
1605 else
1606 {
1607 gimple g;
1608 tree t;
1609
1610 if (TREE_CODE (len) != SSA_NAME)
1611 {
1612 t = make_ssa_name (TREE_TYPE (len), NULL);
1613 g = gimple_build_assign_with_ops (TREE_CODE (len), t, len, NULL);
1614 gimple_set_location (g, location);
1615 gimple_seq_add_stmt_without_update (&seq, g);
1616 len = t;
1617 }
1618 if (!useless_type_conversion_p (size_type_node, TREE_TYPE (len)))
1619 {
1620 t = make_ssa_name (size_type_node, NULL);
1621 g = gimple_build_assign_with_ops (NOP_EXPR, t, len, NULL);
1622 gimple_set_location (g, location);
1623 gimple_seq_add_stmt_without_update (&seq, g);
1624 len = t;
1625 }
1626
1627 t = make_ssa_name (size_type_node, NULL);
1628 g = gimple_build_assign_with_ops (MINUS_EXPR, t, len,
1629 build_int_cst (size_type_node, 1));
1630 gimple_set_location (g, location);
1631 gimple_seq_add_stmt_without_update (&seq, g);
1632 offset = gimple_assign_lhs (g);
1633 }
1634
1635 /* _1 = base; */
1636 base = unshare_expr (base);
1637 gimple region_end =
1638 gimple_build_assign_with_ops (TREE_CODE (base),
1639 make_ssa_name (TREE_TYPE (base), NULL),
1640 base, NULL);
1641 gimple_set_location (region_end, location);
1642 gimple_seq_add_stmt_without_update (&seq, region_end);
1643
1644 /* _2 = _1 + offset; */
1645 region_end =
1646 gimple_build_assign_with_ops (POINTER_PLUS_EXPR,
1647 make_ssa_name (TREE_TYPE (base), NULL),
1648 gimple_assign_lhs (region_end),
1649 offset);
1650 gimple_set_location (region_end, location);
1651 gimple_seq_add_stmt_without_update (&seq, region_end);
1652 gsi_insert_seq_before (&gsi, seq, GSI_SAME_STMT);
1653
1654 /* instrument access at _2; */
1655 gsi = gsi_for_stmt (region_end);
1656 build_check_stmt (location, gimple_assign_lhs (region_end),
1657 &gsi, /*before_p=*/false, is_store, 1);
1658
1659 if (then_bb == NULL)
1660 update_mem_ref_hash_table (end, 1);
1661
1662 *iter = gsi_for_stmt (gsi_stmt (*iter));
1663 }
1664
1665 /* Instrument the call (to the builtin strlen function) pointed to by
1666 ITER.
1667
1668 This function instruments the access to the first byte of the
1669 argument, right before the call. After the call it instruments the
1670 access to the last byte of the argument; it uses the result of the
1671 call to deduce the offset of that last byte.
1672
1673 Upon completion, iff the call has actually been instrumented, this
1674 function returns TRUE and *ITER points to the statement logically
1675 following the built-in strlen function call *ITER was initially
1676 pointing to. Otherwise, the function returns FALSE and *ITER
1677 remains unchanged. */
1678
1679 static bool
1680 instrument_strlen_call (gimple_stmt_iterator *iter)
1681 {
1682 gimple call = gsi_stmt (*iter);
1683 gcc_assert (is_gimple_call (call));
1684
1685 tree callee = gimple_call_fndecl (call);
1686 gcc_assert (is_builtin_fn (callee)
1687 && DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL
1688 && DECL_FUNCTION_CODE (callee) == BUILT_IN_STRLEN);
1689
1690 tree len = gimple_call_lhs (call);
1691 if (len == NULL)
1692 /* Some passes might clear the return value of the strlen call;
1693 bail out in that case. Return FALSE as we are not advancing
1694 *ITER. */
1695 return false;
1696 gcc_assert (INTEGRAL_TYPE_P (TREE_TYPE (len)));
1697
1698 location_t loc = gimple_location (call);
1699 tree str_arg = gimple_call_arg (call, 0);
1700
1701 /* Instrument the access to the first byte of str_arg. i.e:
1702
1703 _1 = str_arg; instrument (_1); */
1704 tree cptr_type = build_pointer_type (char_type_node);
1705 gimple str_arg_ssa =
1706 gimple_build_assign_with_ops (NOP_EXPR,
1707 make_ssa_name (cptr_type, NULL),
1708 str_arg, NULL);
1709 gimple_set_location (str_arg_ssa, loc);
1710 gimple_stmt_iterator gsi = *iter;
1711 gsi_insert_before (&gsi, str_arg_ssa, GSI_NEW_STMT);
1712 build_check_stmt (loc, gimple_assign_lhs (str_arg_ssa), &gsi,
1713 /*before_p=*/false, /*is_store=*/false, 1);
1714
1715 /* If we initially had an instruction like:
1716
1717 int n = strlen (str)
1718
1719 we now want to instrument the access to str[n], after the
1720 instruction above.*/
1721
1722 /* So let's build the access to str[n] that is, access through the
1723 pointer_plus expr: (_1 + len). */
1724 gimple stmt =
1725 gimple_build_assign_with_ops (POINTER_PLUS_EXPR,
1726 make_ssa_name (cptr_type, NULL),
1727 gimple_assign_lhs (str_arg_ssa),
1728 len);
1729 gimple_set_location (stmt, loc);
1730 gsi_insert_after (&gsi, stmt, GSI_NEW_STMT);
1731
1732 build_check_stmt (loc, gimple_assign_lhs (stmt), &gsi,
1733 /*before_p=*/false, /*is_store=*/false, 1);
1734
1735 /* Ensure that iter points to the statement logically following the
1736 one it was initially pointing to. */
1737 *iter = gsi;
1738 /* As *ITER has been advanced to point to the next statement, let's
1739 return true to inform transform_statements that it shouldn't
1740 advance *ITER anymore; otherwises it will skip that next
1741 statement, which wouldn't be instrumented. */
1742 return true;
1743 }
1744
1745 /* Instrument the call to a built-in memory access function that is
1746 pointed to by the iterator ITER.
1747
1748 Upon completion, return TRUE iff *ITER has been advanced to the
1749 statement following the one it was originally pointing to. */
1750
1751 static bool
1752 instrument_builtin_call (gimple_stmt_iterator *iter)
1753 {
1754 bool iter_advanced_p = false;
1755 gimple call = gsi_stmt (*iter);
1756
1757 gcc_checking_assert (gimple_call_builtin_p (call, BUILT_IN_NORMAL));
1758
1759 tree callee = gimple_call_fndecl (call);
1760 location_t loc = gimple_location (call);
1761
1762 if (DECL_FUNCTION_CODE (callee) == BUILT_IN_STRLEN)
1763 iter_advanced_p = instrument_strlen_call (iter);
1764 else
1765 {
1766 asan_mem_ref src0, src1, dest;
1767 asan_mem_ref_init (&src0, NULL, 1);
1768 asan_mem_ref_init (&src1, NULL, 1);
1769 asan_mem_ref_init (&dest, NULL, 1);
1770
1771 tree src0_len = NULL_TREE, src1_len = NULL_TREE, dest_len = NULL_TREE;
1772 bool src0_is_store = false, src1_is_store = false,
1773 dest_is_store = false, dest_is_deref = false;
1774
1775 if (get_mem_refs_of_builtin_call (call,
1776 &src0, &src0_len, &src0_is_store,
1777 &src1, &src1_len, &src1_is_store,
1778 &dest, &dest_len, &dest_is_store,
1779 &dest_is_deref))
1780 {
1781 if (dest_is_deref)
1782 {
1783 instrument_derefs (iter, dest.start, loc, dest_is_store);
1784 gsi_next (iter);
1785 iter_advanced_p = true;
1786 }
1787 else if (src0_len || src1_len || dest_len)
1788 {
1789 if (src0.start != NULL_TREE)
1790 instrument_mem_region_access (src0.start, src0_len,
1791 iter, loc, /*is_store=*/false);
1792 if (src1.start != NULL_TREE)
1793 instrument_mem_region_access (src1.start, src1_len,
1794 iter, loc, /*is_store=*/false);
1795 if (dest.start != NULL_TREE)
1796 instrument_mem_region_access (dest.start, dest_len,
1797 iter, loc, /*is_store=*/true);
1798 *iter = gsi_for_stmt (call);
1799 gsi_next (iter);
1800 iter_advanced_p = true;
1801 }
1802 }
1803 }
1804 return iter_advanced_p;
1805 }
1806
1807 /* Instrument the assignment statement ITER if it is subject to
1808 instrumentation. Return TRUE iff instrumentation actually
1809 happened. In that case, the iterator ITER is advanced to the next
1810 logical expression following the one initially pointed to by ITER,
1811 and the relevant memory reference that which access has been
1812 instrumented is added to the memory references hash table. */
1813
1814 static bool
1815 maybe_instrument_assignment (gimple_stmt_iterator *iter)
1816 {
1817 gimple s = gsi_stmt (*iter);
1818
1819 gcc_assert (gimple_assign_single_p (s));
1820
1821 tree ref_expr = NULL_TREE;
1822 bool is_store, is_instrumented = false;
1823
1824 if (gimple_store_p (s))
1825 {
1826 ref_expr = gimple_assign_lhs (s);
1827 is_store = true;
1828 instrument_derefs (iter, ref_expr,
1829 gimple_location (s),
1830 is_store);
1831 is_instrumented = true;
1832 }
1833
1834 if (gimple_assign_load_p (s))
1835 {
1836 ref_expr = gimple_assign_rhs1 (s);
1837 is_store = false;
1838 instrument_derefs (iter, ref_expr,
1839 gimple_location (s),
1840 is_store);
1841 is_instrumented = true;
1842 }
1843
1844 if (is_instrumented)
1845 gsi_next (iter);
1846
1847 return is_instrumented;
1848 }
1849
1850 /* Instrument the function call pointed to by the iterator ITER, if it
1851 is subject to instrumentation. At the moment, the only function
1852 calls that are instrumented are some built-in functions that access
1853 memory. Look at instrument_builtin_call to learn more.
1854
1855 Upon completion return TRUE iff *ITER was advanced to the statement
1856 following the one it was originally pointing to. */
1857
1858 static bool
1859 maybe_instrument_call (gimple_stmt_iterator *iter)
1860 {
1861 gimple stmt = gsi_stmt (*iter);
1862 bool is_builtin = gimple_call_builtin_p (stmt, BUILT_IN_NORMAL);
1863
1864 if (is_builtin && instrument_builtin_call (iter))
1865 return true;
1866
1867 if (gimple_call_noreturn_p (stmt))
1868 {
1869 if (is_builtin)
1870 {
1871 tree callee = gimple_call_fndecl (stmt);
1872 switch (DECL_FUNCTION_CODE (callee))
1873 {
1874 case BUILT_IN_UNREACHABLE:
1875 case BUILT_IN_TRAP:
1876 /* Don't instrument these. */
1877 return false;
1878 }
1879 }
1880 tree decl = builtin_decl_implicit (BUILT_IN_ASAN_HANDLE_NO_RETURN);
1881 gimple g = gimple_build_call (decl, 0);
1882 gimple_set_location (g, gimple_location (stmt));
1883 gsi_insert_before (iter, g, GSI_SAME_STMT);
1884 }
1885 return false;
1886 }
1887
1888 /* Walk each instruction of all basic block and instrument those that
1889 represent memory references: loads, stores, or function calls.
1890 In a given basic block, this function avoids instrumenting memory
1891 references that have already been instrumented. */
1892
1893 static void
1894 transform_statements (void)
1895 {
1896 basic_block bb, last_bb = NULL;
1897 gimple_stmt_iterator i;
1898 int saved_last_basic_block = last_basic_block;
1899
1900 FOR_EACH_BB (bb)
1901 {
1902 basic_block prev_bb = bb;
1903
1904 if (bb->index >= saved_last_basic_block) continue;
1905
1906 /* Flush the mem ref hash table, if current bb doesn't have
1907 exactly one predecessor, or if that predecessor (skipping
1908 over asan created basic blocks) isn't the last processed
1909 basic block. Thus we effectively flush on extended basic
1910 block boundaries. */
1911 while (single_pred_p (prev_bb))
1912 {
1913 prev_bb = single_pred (prev_bb);
1914 if (prev_bb->index < saved_last_basic_block)
1915 break;
1916 }
1917 if (prev_bb != last_bb)
1918 empty_mem_ref_hash_table ();
1919 last_bb = bb;
1920
1921 for (i = gsi_start_bb (bb); !gsi_end_p (i);)
1922 {
1923 gimple s = gsi_stmt (i);
1924
1925 if (has_stmt_been_instrumented_p (s))
1926 gsi_next (&i);
1927 else if (gimple_assign_single_p (s)
1928 && maybe_instrument_assignment (&i))
1929 /* Nothing to do as maybe_instrument_assignment advanced
1930 the iterator I. */;
1931 else if (is_gimple_call (s) && maybe_instrument_call (&i))
1932 /* Nothing to do as maybe_instrument_call
1933 advanced the iterator I. */;
1934 else
1935 {
1936 /* No instrumentation happened.
1937
1938 If the current instruction is a function call that
1939 might free something, let's forget about the memory
1940 references that got instrumented. Otherwise we might
1941 miss some instrumentation opportunities. */
1942 if (is_gimple_call (s) && !nonfreeing_call_p (s))
1943 empty_mem_ref_hash_table ();
1944
1945 gsi_next (&i);
1946 }
1947 }
1948 }
1949 free_mem_ref_resources ();
1950 }
1951
1952 /* Build
1953 struct __asan_global
1954 {
1955 const void *__beg;
1956 uptr __size;
1957 uptr __size_with_redzone;
1958 const void *__name;
1959 const void *__module_name;
1960 uptr __has_dynamic_init;
1961 } type. */
1962
1963 static tree
1964 asan_global_struct (void)
1965 {
1966 static const char *field_names[6]
1967 = { "__beg", "__size", "__size_with_redzone",
1968 "__name", "__module_name", "__has_dynamic_init" };
1969 tree fields[6], ret;
1970 int i;
1971
1972 ret = make_node (RECORD_TYPE);
1973 for (i = 0; i < 6; i++)
1974 {
1975 fields[i]
1976 = build_decl (UNKNOWN_LOCATION, FIELD_DECL,
1977 get_identifier (field_names[i]),
1978 (i == 0 || i == 3) ? const_ptr_type_node
1979 : pointer_sized_int_node);
1980 DECL_CONTEXT (fields[i]) = ret;
1981 if (i)
1982 DECL_CHAIN (fields[i - 1]) = fields[i];
1983 }
1984 TYPE_FIELDS (ret) = fields[0];
1985 TYPE_NAME (ret) = get_identifier ("__asan_global");
1986 layout_type (ret);
1987 return ret;
1988 }
1989
1990 /* Append description of a single global DECL into vector V.
1991 TYPE is __asan_global struct type as returned by asan_global_struct. */
1992
1993 static void
1994 asan_add_global (tree decl, tree type, vec<constructor_elt, va_gc> *v)
1995 {
1996 tree init, uptr = TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (type)));
1997 unsigned HOST_WIDE_INT size;
1998 tree str_cst, module_name_cst, refdecl = decl;
1999 vec<constructor_elt, va_gc> *vinner = NULL;
2000
2001 pretty_printer asan_pp, module_name_pp;
2002
2003 if (DECL_NAME (decl))
2004 pp_tree_identifier (&asan_pp, DECL_NAME (decl));
2005 else
2006 pp_string (&asan_pp, "<unknown>");
2007 str_cst = asan_pp_string (&asan_pp);
2008
2009 pp_string (&module_name_pp, main_input_filename);
2010 module_name_cst = asan_pp_string (&module_name_pp);
2011
2012 if (asan_needs_local_alias (decl))
2013 {
2014 char buf[20];
2015 ASM_GENERATE_INTERNAL_LABEL (buf, "LASAN", vec_safe_length (v) + 1);
2016 refdecl = build_decl (DECL_SOURCE_LOCATION (decl),
2017 VAR_DECL, get_identifier (buf), TREE_TYPE (decl));
2018 TREE_ADDRESSABLE (refdecl) = TREE_ADDRESSABLE (decl);
2019 TREE_READONLY (refdecl) = TREE_READONLY (decl);
2020 TREE_THIS_VOLATILE (refdecl) = TREE_THIS_VOLATILE (decl);
2021 DECL_GIMPLE_REG_P (refdecl) = DECL_GIMPLE_REG_P (decl);
2022 DECL_ARTIFICIAL (refdecl) = DECL_ARTIFICIAL (decl);
2023 DECL_IGNORED_P (refdecl) = DECL_IGNORED_P (decl);
2024 TREE_STATIC (refdecl) = 1;
2025 TREE_PUBLIC (refdecl) = 0;
2026 TREE_USED (refdecl) = 1;
2027 assemble_alias (refdecl, DECL_ASSEMBLER_NAME (decl));
2028 }
2029
2030 CONSTRUCTOR_APPEND_ELT (vinner, NULL_TREE,
2031 fold_convert (const_ptr_type_node,
2032 build_fold_addr_expr (refdecl)));
2033 size = tree_to_uhwi (DECL_SIZE_UNIT (decl));
2034 CONSTRUCTOR_APPEND_ELT (vinner, NULL_TREE, build_int_cst (uptr, size));
2035 size += asan_red_zone_size (size);
2036 CONSTRUCTOR_APPEND_ELT (vinner, NULL_TREE, build_int_cst (uptr, size));
2037 CONSTRUCTOR_APPEND_ELT (vinner, NULL_TREE,
2038 fold_convert (const_ptr_type_node, str_cst));
2039 CONSTRUCTOR_APPEND_ELT (vinner, NULL_TREE,
2040 fold_convert (const_ptr_type_node, module_name_cst));
2041 CONSTRUCTOR_APPEND_ELT (vinner, NULL_TREE, build_int_cst (uptr, 0));
2042 init = build_constructor (type, vinner);
2043 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, init);
2044 }
2045
2046 /* Initialize sanitizer.def builtins if the FE hasn't initialized them. */
2047 void
2048 initialize_sanitizer_builtins (void)
2049 {
2050 tree decl;
2051
2052 if (builtin_decl_implicit_p (BUILT_IN_ASAN_INIT))
2053 return;
2054
2055 tree BT_FN_VOID = build_function_type_list (void_type_node, NULL_TREE);
2056 tree BT_FN_VOID_PTR
2057 = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
2058 tree BT_FN_VOID_PTR_PTR
2059 = build_function_type_list (void_type_node, ptr_type_node,
2060 ptr_type_node, NULL_TREE);
2061 tree BT_FN_VOID_PTR_PTR_PTR
2062 = build_function_type_list (void_type_node, ptr_type_node,
2063 ptr_type_node, ptr_type_node, NULL_TREE);
2064 tree BT_FN_VOID_PTR_PTRMODE
2065 = build_function_type_list (void_type_node, ptr_type_node,
2066 pointer_sized_int_node, NULL_TREE);
2067 tree BT_FN_VOID_INT
2068 = build_function_type_list (void_type_node, integer_type_node, NULL_TREE);
2069 tree BT_FN_BOOL_VPTR_PTR_IX_INT_INT[5];
2070 tree BT_FN_IX_CONST_VPTR_INT[5];
2071 tree BT_FN_IX_VPTR_IX_INT[5];
2072 tree BT_FN_VOID_VPTR_IX_INT[5];
2073 tree vptr
2074 = build_pointer_type (build_qualified_type (void_type_node,
2075 TYPE_QUAL_VOLATILE));
2076 tree cvptr
2077 = build_pointer_type (build_qualified_type (void_type_node,
2078 TYPE_QUAL_VOLATILE
2079 |TYPE_QUAL_CONST));
2080 tree boolt
2081 = lang_hooks.types.type_for_size (BOOL_TYPE_SIZE, 1);
2082 int i;
2083 for (i = 0; i < 5; i++)
2084 {
2085 tree ix = build_nonstandard_integer_type (BITS_PER_UNIT * (1 << i), 1);
2086 BT_FN_BOOL_VPTR_PTR_IX_INT_INT[i]
2087 = build_function_type_list (boolt, vptr, ptr_type_node, ix,
2088 integer_type_node, integer_type_node,
2089 NULL_TREE);
2090 BT_FN_IX_CONST_VPTR_INT[i]
2091 = build_function_type_list (ix, cvptr, integer_type_node, NULL_TREE);
2092 BT_FN_IX_VPTR_IX_INT[i]
2093 = build_function_type_list (ix, vptr, ix, integer_type_node,
2094 NULL_TREE);
2095 BT_FN_VOID_VPTR_IX_INT[i]
2096 = build_function_type_list (void_type_node, vptr, ix,
2097 integer_type_node, NULL_TREE);
2098 }
2099 #define BT_FN_BOOL_VPTR_PTR_I1_INT_INT BT_FN_BOOL_VPTR_PTR_IX_INT_INT[0]
2100 #define BT_FN_I1_CONST_VPTR_INT BT_FN_IX_CONST_VPTR_INT[0]
2101 #define BT_FN_I1_VPTR_I1_INT BT_FN_IX_VPTR_IX_INT[0]
2102 #define BT_FN_VOID_VPTR_I1_INT BT_FN_VOID_VPTR_IX_INT[0]
2103 #define BT_FN_BOOL_VPTR_PTR_I2_INT_INT BT_FN_BOOL_VPTR_PTR_IX_INT_INT[1]
2104 #define BT_FN_I2_CONST_VPTR_INT BT_FN_IX_CONST_VPTR_INT[1]
2105 #define BT_FN_I2_VPTR_I2_INT BT_FN_IX_VPTR_IX_INT[1]
2106 #define BT_FN_VOID_VPTR_I2_INT BT_FN_VOID_VPTR_IX_INT[1]
2107 #define BT_FN_BOOL_VPTR_PTR_I4_INT_INT BT_FN_BOOL_VPTR_PTR_IX_INT_INT[2]
2108 #define BT_FN_I4_CONST_VPTR_INT BT_FN_IX_CONST_VPTR_INT[2]
2109 #define BT_FN_I4_VPTR_I4_INT BT_FN_IX_VPTR_IX_INT[2]
2110 #define BT_FN_VOID_VPTR_I4_INT BT_FN_VOID_VPTR_IX_INT[2]
2111 #define BT_FN_BOOL_VPTR_PTR_I8_INT_INT BT_FN_BOOL_VPTR_PTR_IX_INT_INT[3]
2112 #define BT_FN_I8_CONST_VPTR_INT BT_FN_IX_CONST_VPTR_INT[3]
2113 #define BT_FN_I8_VPTR_I8_INT BT_FN_IX_VPTR_IX_INT[3]
2114 #define BT_FN_VOID_VPTR_I8_INT BT_FN_VOID_VPTR_IX_INT[3]
2115 #define BT_FN_BOOL_VPTR_PTR_I16_INT_INT BT_FN_BOOL_VPTR_PTR_IX_INT_INT[4]
2116 #define BT_FN_I16_CONST_VPTR_INT BT_FN_IX_CONST_VPTR_INT[4]
2117 #define BT_FN_I16_VPTR_I16_INT BT_FN_IX_VPTR_IX_INT[4]
2118 #define BT_FN_VOID_VPTR_I16_INT BT_FN_VOID_VPTR_IX_INT[4]
2119 #undef ATTR_NOTHROW_LEAF_LIST
2120 #define ATTR_NOTHROW_LEAF_LIST ECF_NOTHROW | ECF_LEAF
2121 #undef ATTR_TMPURE_NOTHROW_LEAF_LIST
2122 #define ATTR_TMPURE_NOTHROW_LEAF_LIST ECF_TM_PURE | ATTR_NOTHROW_LEAF_LIST
2123 #undef ATTR_NORETURN_NOTHROW_LEAF_LIST
2124 #define ATTR_NORETURN_NOTHROW_LEAF_LIST ECF_NORETURN | ATTR_NOTHROW_LEAF_LIST
2125 #undef ATTR_TMPURE_NORETURN_NOTHROW_LEAF_LIST
2126 #define ATTR_TMPURE_NORETURN_NOTHROW_LEAF_LIST \
2127 ECF_TM_PURE | ATTR_NORETURN_NOTHROW_LEAF_LIST
2128 #undef ATTR_COLD_NOTHROW_LEAF_LIST
2129 #define ATTR_COLD_NOTHROW_LEAF_LIST \
2130 /* ECF_COLD missing */ ATTR_NOTHROW_LEAF_LIST
2131 #undef ATTR_COLD_NORETURN_NOTHROW_LEAF_LIST
2132 #define ATTR_COLD_NORETURN_NOTHROW_LEAF_LIST \
2133 /* ECF_COLD missing */ ATTR_NORETURN_NOTHROW_LEAF_LIST
2134 #undef DEF_SANITIZER_BUILTIN
2135 #define DEF_SANITIZER_BUILTIN(ENUM, NAME, TYPE, ATTRS) \
2136 decl = add_builtin_function ("__builtin_" NAME, TYPE, ENUM, \
2137 BUILT_IN_NORMAL, NAME, NULL_TREE); \
2138 set_call_expr_flags (decl, ATTRS); \
2139 set_builtin_decl (ENUM, decl, true);
2140
2141 #include "sanitizer.def"
2142
2143 #undef DEF_SANITIZER_BUILTIN
2144 }
2145
2146 /* Called via htab_traverse. Count number of emitted
2147 STRING_CSTs in the constant hash table. */
2148
2149 static int
2150 count_string_csts (void **slot, void *data)
2151 {
2152 struct constant_descriptor_tree *desc
2153 = (struct constant_descriptor_tree *) *slot;
2154 if (TREE_CODE (desc->value) == STRING_CST
2155 && TREE_ASM_WRITTEN (desc->value)
2156 && asan_protect_global (desc->value))
2157 ++*((unsigned HOST_WIDE_INT *) data);
2158 return 1;
2159 }
2160
2161 /* Helper structure to pass two parameters to
2162 add_string_csts. */
2163
2164 struct asan_add_string_csts_data
2165 {
2166 tree type;
2167 vec<constructor_elt, va_gc> *v;
2168 };
2169
2170 /* Called via htab_traverse. Call asan_add_global
2171 on emitted STRING_CSTs from the constant hash table. */
2172
2173 static int
2174 add_string_csts (void **slot, void *data)
2175 {
2176 struct constant_descriptor_tree *desc
2177 = (struct constant_descriptor_tree *) *slot;
2178 if (TREE_CODE (desc->value) == STRING_CST
2179 && TREE_ASM_WRITTEN (desc->value)
2180 && asan_protect_global (desc->value))
2181 {
2182 struct asan_add_string_csts_data *aascd
2183 = (struct asan_add_string_csts_data *) data;
2184 asan_add_global (SYMBOL_REF_DECL (XEXP (desc->rtl, 0)),
2185 aascd->type, aascd->v);
2186 }
2187 return 1;
2188 }
2189
2190 /* Needs to be GTY(()), because cgraph_build_static_cdtor may
2191 invoke ggc_collect. */
2192 static GTY(()) tree asan_ctor_statements;
2193
2194 /* Module-level instrumentation.
2195 - Insert __asan_init_vN() into the list of CTORs.
2196 - TODO: insert redzones around globals.
2197 */
2198
2199 void
2200 asan_finish_file (void)
2201 {
2202 struct varpool_node *vnode;
2203 unsigned HOST_WIDE_INT gcount = 0;
2204
2205 if (shadow_ptr_types[0] == NULL_TREE)
2206 asan_init_shadow_ptr_types ();
2207 /* Avoid instrumenting code in the asan ctors/dtors.
2208 We don't need to insert padding after the description strings,
2209 nor after .LASAN* array. */
2210 flag_sanitize &= ~SANITIZE_ADDRESS;
2211
2212 tree fn = builtin_decl_implicit (BUILT_IN_ASAN_INIT);
2213 append_to_statement_list (build_call_expr (fn, 0), &asan_ctor_statements);
2214 FOR_EACH_DEFINED_VARIABLE (vnode)
2215 if (TREE_ASM_WRITTEN (vnode->decl)
2216 && asan_protect_global (vnode->decl))
2217 ++gcount;
2218 htab_t const_desc_htab = constant_pool_htab ();
2219 htab_traverse (const_desc_htab, count_string_csts, &gcount);
2220 if (gcount)
2221 {
2222 tree type = asan_global_struct (), var, ctor;
2223 tree dtor_statements = NULL_TREE;
2224 vec<constructor_elt, va_gc> *v;
2225 char buf[20];
2226
2227 type = build_array_type_nelts (type, gcount);
2228 ASM_GENERATE_INTERNAL_LABEL (buf, "LASAN", 0);
2229 var = build_decl (UNKNOWN_LOCATION, VAR_DECL, get_identifier (buf),
2230 type);
2231 TREE_STATIC (var) = 1;
2232 TREE_PUBLIC (var) = 0;
2233 DECL_ARTIFICIAL (var) = 1;
2234 DECL_IGNORED_P (var) = 1;
2235 vec_alloc (v, gcount);
2236 FOR_EACH_DEFINED_VARIABLE (vnode)
2237 if (TREE_ASM_WRITTEN (vnode->decl)
2238 && asan_protect_global (vnode->decl))
2239 asan_add_global (vnode->decl, TREE_TYPE (type), v);
2240 struct asan_add_string_csts_data aascd;
2241 aascd.type = TREE_TYPE (type);
2242 aascd.v = v;
2243 htab_traverse (const_desc_htab, add_string_csts, &aascd);
2244 ctor = build_constructor (type, v);
2245 TREE_CONSTANT (ctor) = 1;
2246 TREE_STATIC (ctor) = 1;
2247 DECL_INITIAL (var) = ctor;
2248 varpool_assemble_decl (varpool_node_for_decl (var));
2249
2250 fn = builtin_decl_implicit (BUILT_IN_ASAN_REGISTER_GLOBALS);
2251 tree gcount_tree = build_int_cst (pointer_sized_int_node, gcount);
2252 append_to_statement_list (build_call_expr (fn, 2,
2253 build_fold_addr_expr (var),
2254 gcount_tree),
2255 &asan_ctor_statements);
2256
2257 fn = builtin_decl_implicit (BUILT_IN_ASAN_UNREGISTER_GLOBALS);
2258 append_to_statement_list (build_call_expr (fn, 2,
2259 build_fold_addr_expr (var),
2260 gcount_tree),
2261 &dtor_statements);
2262 cgraph_build_static_cdtor ('D', dtor_statements,
2263 MAX_RESERVED_INIT_PRIORITY - 1);
2264 }
2265 cgraph_build_static_cdtor ('I', asan_ctor_statements,
2266 MAX_RESERVED_INIT_PRIORITY - 1);
2267 flag_sanitize |= SANITIZE_ADDRESS;
2268 }
2269
2270 /* Instrument the current function. */
2271
2272 static unsigned int
2273 asan_instrument (void)
2274 {
2275 if (shadow_ptr_types[0] == NULL_TREE)
2276 asan_init_shadow_ptr_types ();
2277 transform_statements ();
2278 return 0;
2279 }
2280
2281 static bool
2282 gate_asan (void)
2283 {
2284 return (flag_sanitize & SANITIZE_ADDRESS) != 0
2285 && !lookup_attribute ("no_sanitize_address",
2286 DECL_ATTRIBUTES (current_function_decl));
2287 }
2288
2289 namespace {
2290
2291 const pass_data pass_data_asan =
2292 {
2293 GIMPLE_PASS, /* type */
2294 "asan", /* name */
2295 OPTGROUP_NONE, /* optinfo_flags */
2296 true, /* has_gate */
2297 true, /* has_execute */
2298 TV_NONE, /* tv_id */
2299 ( PROP_ssa | PROP_cfg | PROP_gimple_leh ), /* properties_required */
2300 0, /* properties_provided */
2301 0, /* properties_destroyed */
2302 0, /* todo_flags_start */
2303 ( TODO_verify_flow | TODO_verify_stmts
2304 | TODO_update_ssa ), /* todo_flags_finish */
2305 };
2306
2307 class pass_asan : public gimple_opt_pass
2308 {
2309 public:
2310 pass_asan (gcc::context *ctxt)
2311 : gimple_opt_pass (pass_data_asan, ctxt)
2312 {}
2313
2314 /* opt_pass methods: */
2315 opt_pass * clone () { return new pass_asan (m_ctxt); }
2316 bool gate () { return gate_asan (); }
2317 unsigned int execute () { return asan_instrument (); }
2318
2319 }; // class pass_asan
2320
2321 } // anon namespace
2322
2323 gimple_opt_pass *
2324 make_pass_asan (gcc::context *ctxt)
2325 {
2326 return new pass_asan (ctxt);
2327 }
2328
2329 static bool
2330 gate_asan_O0 (void)
2331 {
2332 return !optimize && gate_asan ();
2333 }
2334
2335 namespace {
2336
2337 const pass_data pass_data_asan_O0 =
2338 {
2339 GIMPLE_PASS, /* type */
2340 "asan0", /* name */
2341 OPTGROUP_NONE, /* optinfo_flags */
2342 true, /* has_gate */
2343 true, /* has_execute */
2344 TV_NONE, /* tv_id */
2345 ( PROP_ssa | PROP_cfg | PROP_gimple_leh ), /* properties_required */
2346 0, /* properties_provided */
2347 0, /* properties_destroyed */
2348 0, /* todo_flags_start */
2349 ( TODO_verify_flow | TODO_verify_stmts
2350 | TODO_update_ssa ), /* todo_flags_finish */
2351 };
2352
2353 class pass_asan_O0 : public gimple_opt_pass
2354 {
2355 public:
2356 pass_asan_O0 (gcc::context *ctxt)
2357 : gimple_opt_pass (pass_data_asan_O0, ctxt)
2358 {}
2359
2360 /* opt_pass methods: */
2361 bool gate () { return gate_asan_O0 (); }
2362 unsigned int execute () { return asan_instrument (); }
2363
2364 }; // class pass_asan_O0
2365
2366 } // anon namespace
2367
2368 gimple_opt_pass *
2369 make_pass_asan_O0 (gcc::context *ctxt)
2370 {
2371 return new pass_asan_O0 (ctxt);
2372 }
2373
2374 /* Perform optimization of sanitize functions. */
2375
2376 static unsigned int
2377 execute_sanopt (void)
2378 {
2379 basic_block bb;
2380
2381 FOR_EACH_BB (bb)
2382 {
2383 gimple_stmt_iterator gsi;
2384 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
2385 {
2386 gimple stmt = gsi_stmt (gsi);
2387
2388 if (!is_gimple_call (stmt))
2389 continue;
2390
2391 if (gimple_call_internal_p (stmt))
2392 switch (gimple_call_internal_fn (stmt))
2393 {
2394 case IFN_UBSAN_NULL:
2395 ubsan_expand_null_ifn (gsi);
2396 break;
2397 default:
2398 break;
2399 }
2400
2401 if (dump_file && (dump_flags & TDF_DETAILS))
2402 {
2403 fprintf (dump_file, "Optimized\n ");
2404 print_gimple_stmt (dump_file, stmt, 0, dump_flags);
2405 fprintf (dump_file, "\n");
2406 }
2407 }
2408 }
2409 return 0;
2410 }
2411
2412 static bool
2413 gate_sanopt (void)
2414 {
2415 return flag_sanitize;
2416 }
2417
2418 namespace {
2419
2420 const pass_data pass_data_sanopt =
2421 {
2422 GIMPLE_PASS, /* type */
2423 "sanopt", /* name */
2424 OPTGROUP_NONE, /* optinfo_flags */
2425 true, /* has_gate */
2426 true, /* has_execute */
2427 TV_NONE, /* tv_id */
2428 ( PROP_ssa | PROP_cfg | PROP_gimple_leh ), /* properties_required */
2429 0, /* properties_provided */
2430 0, /* properties_destroyed */
2431 0, /* todo_flags_start */
2432 ( TODO_verify_flow | TODO_verify_stmts
2433 | TODO_update_ssa ), /* todo_flags_finish */
2434 };
2435
2436 class pass_sanopt : public gimple_opt_pass
2437 {
2438 public:
2439 pass_sanopt (gcc::context *ctxt)
2440 : gimple_opt_pass (pass_data_sanopt, ctxt)
2441 {}
2442
2443 /* opt_pass methods: */
2444 bool gate () { return gate_sanopt (); }
2445 unsigned int execute () { return execute_sanopt (); }
2446
2447 }; // class pass_sanopt
2448
2449 } // anon namespace
2450
2451 gimple_opt_pass *
2452 make_pass_sanopt (gcc::context *ctxt)
2453 {
2454 return new pass_sanopt (ctxt);
2455 }
2456
2457 #include "gt-asan.h"