]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/asan.c
sanitizer.def (BUILT_IN_ASAN_REPORT_LOAD_N, [...]): New.
[thirdparty/gcc.git] / gcc / asan.c
1 /* AddressSanitizer, a fast memory error detector.
2 Copyright (C) 2012-2014 Free Software Foundation, Inc.
3 Contributed by Kostya Serebryany <kcc@google.com>
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
10 version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tree.h"
26 #include "hash-table.h"
27 #include "basic-block.h"
28 #include "tree-ssa-alias.h"
29 #include "internal-fn.h"
30 #include "gimple-expr.h"
31 #include "is-a.h"
32 #include "gimple.h"
33 #include "gimplify.h"
34 #include "gimple-iterator.h"
35 #include "calls.h"
36 #include "varasm.h"
37 #include "stor-layout.h"
38 #include "tree-iterator.h"
39 #include "cgraph.h"
40 #include "stringpool.h"
41 #include "tree-ssanames.h"
42 #include "tree-pass.h"
43 #include "asan.h"
44 #include "gimple-pretty-print.h"
45 #include "target.h"
46 #include "expr.h"
47 #include "optabs.h"
48 #include "output.h"
49 #include "tm_p.h"
50 #include "langhooks.h"
51 #include "alloc-pool.h"
52 #include "cfgloop.h"
53 #include "gimple-builder.h"
54 #include "ubsan.h"
55 #include "predict.h"
56 #include "params.h"
57
58 /* AddressSanitizer finds out-of-bounds and use-after-free bugs
59 with <2x slowdown on average.
60
61 The tool consists of two parts:
62 instrumentation module (this file) and a run-time library.
63 The instrumentation module adds a run-time check before every memory insn.
64 For a 8- or 16- byte load accessing address X:
65 ShadowAddr = (X >> 3) + Offset
66 ShadowValue = *(char*)ShadowAddr; // *(short*) for 16-byte access.
67 if (ShadowValue)
68 __asan_report_load8(X);
69 For a load of N bytes (N=1, 2 or 4) from address X:
70 ShadowAddr = (X >> 3) + Offset
71 ShadowValue = *(char*)ShadowAddr;
72 if (ShadowValue)
73 if ((X & 7) + N - 1 > ShadowValue)
74 __asan_report_loadN(X);
75 Stores are instrumented similarly, but using __asan_report_storeN functions.
76 A call too __asan_init_vN() is inserted to the list of module CTORs.
77 N is the version number of the AddressSanitizer API. The changes between the
78 API versions are listed in libsanitizer/asan/asan_interface_internal.h.
79
80 The run-time library redefines malloc (so that redzone are inserted around
81 the allocated memory) and free (so that reuse of free-ed memory is delayed),
82 provides __asan_report* and __asan_init_vN functions.
83
84 Read more:
85 http://code.google.com/p/address-sanitizer/wiki/AddressSanitizerAlgorithm
86
87 The current implementation supports detection of out-of-bounds and
88 use-after-free in the heap, on the stack and for global variables.
89
90 [Protection of stack variables]
91
92 To understand how detection of out-of-bounds and use-after-free works
93 for stack variables, lets look at this example on x86_64 where the
94 stack grows downward:
95
96 int
97 foo ()
98 {
99 char a[23] = {0};
100 int b[2] = {0};
101
102 a[5] = 1;
103 b[1] = 2;
104
105 return a[5] + b[1];
106 }
107
108 For this function, the stack protected by asan will be organized as
109 follows, from the top of the stack to the bottom:
110
111 Slot 1/ [red zone of 32 bytes called 'RIGHT RedZone']
112
113 Slot 2/ [8 bytes of red zone, that adds up to the space of 'a' to make
114 the next slot be 32 bytes aligned; this one is called Partial
115 Redzone; this 32 bytes alignment is an asan constraint]
116
117 Slot 3/ [24 bytes for variable 'a']
118
119 Slot 4/ [red zone of 32 bytes called 'Middle RedZone']
120
121 Slot 5/ [24 bytes of Partial Red Zone (similar to slot 2]
122
123 Slot 6/ [8 bytes for variable 'b']
124
125 Slot 7/ [32 bytes of Red Zone at the bottom of the stack, called
126 'LEFT RedZone']
127
128 The 32 bytes of LEFT red zone at the bottom of the stack can be
129 decomposed as such:
130
131 1/ The first 8 bytes contain a magical asan number that is always
132 0x41B58AB3.
133
134 2/ The following 8 bytes contains a pointer to a string (to be
135 parsed at runtime by the runtime asan library), which format is
136 the following:
137
138 "<function-name> <space> <num-of-variables-on-the-stack>
139 (<32-bytes-aligned-offset-in-bytes-of-variable> <space>
140 <length-of-var-in-bytes> ){n} "
141
142 where '(...){n}' means the content inside the parenthesis occurs 'n'
143 times, with 'n' being the number of variables on the stack.
144
145 3/ The following 8 bytes contain the PC of the current function which
146 will be used by the run-time library to print an error message.
147
148 4/ The following 8 bytes are reserved for internal use by the run-time.
149
150 The shadow memory for that stack layout is going to look like this:
151
152 - content of shadow memory 8 bytes for slot 7: 0xF1F1F1F1.
153 The F1 byte pattern is a magic number called
154 ASAN_STACK_MAGIC_LEFT and is a way for the runtime to know that
155 the memory for that shadow byte is part of a the LEFT red zone
156 intended to seat at the bottom of the variables on the stack.
157
158 - content of shadow memory 8 bytes for slots 6 and 5:
159 0xF4F4F400. The F4 byte pattern is a magic number
160 called ASAN_STACK_MAGIC_PARTIAL. It flags the fact that the
161 memory region for this shadow byte is a PARTIAL red zone
162 intended to pad a variable A, so that the slot following
163 {A,padding} is 32 bytes aligned.
164
165 Note that the fact that the least significant byte of this
166 shadow memory content is 00 means that 8 bytes of its
167 corresponding memory (which corresponds to the memory of
168 variable 'b') is addressable.
169
170 - content of shadow memory 8 bytes for slot 4: 0xF2F2F2F2.
171 The F2 byte pattern is a magic number called
172 ASAN_STACK_MAGIC_MIDDLE. It flags the fact that the memory
173 region for this shadow byte is a MIDDLE red zone intended to
174 seat between two 32 aligned slots of {variable,padding}.
175
176 - content of shadow memory 8 bytes for slot 3 and 2:
177 0xF4000000. This represents is the concatenation of
178 variable 'a' and the partial red zone following it, like what we
179 had for variable 'b'. The least significant 3 bytes being 00
180 means that the 3 bytes of variable 'a' are addressable.
181
182 - content of shadow memory 8 bytes for slot 1: 0xF3F3F3F3.
183 The F3 byte pattern is a magic number called
184 ASAN_STACK_MAGIC_RIGHT. It flags the fact that the memory
185 region for this shadow byte is a RIGHT red zone intended to seat
186 at the top of the variables of the stack.
187
188 Note that the real variable layout is done in expand_used_vars in
189 cfgexpand.c. As far as Address Sanitizer is concerned, it lays out
190 stack variables as well as the different red zones, emits some
191 prologue code to populate the shadow memory as to poison (mark as
192 non-accessible) the regions of the red zones and mark the regions of
193 stack variables as accessible, and emit some epilogue code to
194 un-poison (mark as accessible) the regions of red zones right before
195 the function exits.
196
197 [Protection of global variables]
198
199 The basic idea is to insert a red zone between two global variables
200 and install a constructor function that calls the asan runtime to do
201 the populating of the relevant shadow memory regions at load time.
202
203 So the global variables are laid out as to insert a red zone between
204 them. The size of the red zones is so that each variable starts on a
205 32 bytes boundary.
206
207 Then a constructor function is installed so that, for each global
208 variable, it calls the runtime asan library function
209 __asan_register_globals_with an instance of this type:
210
211 struct __asan_global
212 {
213 // Address of the beginning of the global variable.
214 const void *__beg;
215
216 // Initial size of the global variable.
217 uptr __size;
218
219 // Size of the global variable + size of the red zone. This
220 // size is 32 bytes aligned.
221 uptr __size_with_redzone;
222
223 // Name of the global variable.
224 const void *__name;
225
226 // Name of the module where the global variable is declared.
227 const void *__module_name;
228
229 // 1 if it has dynamic initialization, 0 otherwise.
230 uptr __has_dynamic_init;
231 }
232
233 A destructor function that calls the runtime asan library function
234 _asan_unregister_globals is also installed. */
235
236 alias_set_type asan_shadow_set = -1;
237
238 /* Pointer types to 1 resp. 2 byte integers in shadow memory. A separate
239 alias set is used for all shadow memory accesses. */
240 static GTY(()) tree shadow_ptr_types[2];
241
242 /* Decl for __asan_option_detect_stack_use_after_return. */
243 static GTY(()) tree asan_detect_stack_use_after_return;
244
245 /* Hashtable support for memory references used by gimple
246 statements. */
247
248 /* This type represents a reference to a memory region. */
249 struct asan_mem_ref
250 {
251 /* The expression of the beginning of the memory region. */
252 tree start;
253
254 /* The size of the access. */
255 HOST_WIDE_INT access_size;
256 };
257
258 static alloc_pool asan_mem_ref_alloc_pool;
259
260 /* This creates the alloc pool used to store the instances of
261 asan_mem_ref that are stored in the hash table asan_mem_ref_ht. */
262
263 static alloc_pool
264 asan_mem_ref_get_alloc_pool ()
265 {
266 if (asan_mem_ref_alloc_pool == NULL)
267 asan_mem_ref_alloc_pool = create_alloc_pool ("asan_mem_ref",
268 sizeof (asan_mem_ref),
269 10);
270 return asan_mem_ref_alloc_pool;
271
272 }
273
274 /* Initializes an instance of asan_mem_ref. */
275
276 static void
277 asan_mem_ref_init (asan_mem_ref *ref, tree start, HOST_WIDE_INT access_size)
278 {
279 ref->start = start;
280 ref->access_size = access_size;
281 }
282
283 /* Allocates memory for an instance of asan_mem_ref into the memory
284 pool returned by asan_mem_ref_get_alloc_pool and initialize it.
285 START is the address of (or the expression pointing to) the
286 beginning of memory reference. ACCESS_SIZE is the size of the
287 access to the referenced memory. */
288
289 static asan_mem_ref*
290 asan_mem_ref_new (tree start, HOST_WIDE_INT access_size)
291 {
292 asan_mem_ref *ref =
293 (asan_mem_ref *) pool_alloc (asan_mem_ref_get_alloc_pool ());
294
295 asan_mem_ref_init (ref, start, access_size);
296 return ref;
297 }
298
299 /* This builds and returns a pointer to the end of the memory region
300 that starts at START and of length LEN. */
301
302 tree
303 asan_mem_ref_get_end (tree start, tree len)
304 {
305 if (len == NULL_TREE || integer_zerop (len))
306 return start;
307
308 return fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (start), start, len);
309 }
310
311 /* Return a tree expression that represents the end of the referenced
312 memory region. Beware that this function can actually build a new
313 tree expression. */
314
315 tree
316 asan_mem_ref_get_end (const asan_mem_ref *ref, tree len)
317 {
318 return asan_mem_ref_get_end (ref->start, len);
319 }
320
321 struct asan_mem_ref_hasher
322 : typed_noop_remove <asan_mem_ref>
323 {
324 typedef asan_mem_ref value_type;
325 typedef asan_mem_ref compare_type;
326
327 static inline hashval_t hash (const value_type *);
328 static inline bool equal (const value_type *, const compare_type *);
329 };
330
331 /* Hash a memory reference. */
332
333 inline hashval_t
334 asan_mem_ref_hasher::hash (const asan_mem_ref *mem_ref)
335 {
336 hashval_t h = iterative_hash_expr (mem_ref->start, 0);
337 h = iterative_hash_host_wide_int (mem_ref->access_size, h);
338 return h;
339 }
340
341 /* Compare two memory references. We accept the length of either
342 memory references to be NULL_TREE. */
343
344 inline bool
345 asan_mem_ref_hasher::equal (const asan_mem_ref *m1,
346 const asan_mem_ref *m2)
347 {
348 return (m1->access_size == m2->access_size
349 && operand_equal_p (m1->start, m2->start, 0));
350 }
351
352 static hash_table <asan_mem_ref_hasher> asan_mem_ref_ht;
353
354 /* Returns a reference to the hash table containing memory references.
355 This function ensures that the hash table is created. Note that
356 this hash table is updated by the function
357 update_mem_ref_hash_table. */
358
359 static hash_table <asan_mem_ref_hasher> &
360 get_mem_ref_hash_table ()
361 {
362 if (!asan_mem_ref_ht.is_created ())
363 asan_mem_ref_ht.create (10);
364
365 return asan_mem_ref_ht;
366 }
367
368 /* Clear all entries from the memory references hash table. */
369
370 static void
371 empty_mem_ref_hash_table ()
372 {
373 if (asan_mem_ref_ht.is_created ())
374 asan_mem_ref_ht.empty ();
375 }
376
377 /* Free the memory references hash table. */
378
379 static void
380 free_mem_ref_resources ()
381 {
382 if (asan_mem_ref_ht.is_created ())
383 asan_mem_ref_ht.dispose ();
384
385 if (asan_mem_ref_alloc_pool)
386 {
387 free_alloc_pool (asan_mem_ref_alloc_pool);
388 asan_mem_ref_alloc_pool = NULL;
389 }
390 }
391
392 /* Return true iff the memory reference REF has been instrumented. */
393
394 static bool
395 has_mem_ref_been_instrumented (tree ref, HOST_WIDE_INT access_size)
396 {
397 asan_mem_ref r;
398 asan_mem_ref_init (&r, ref, access_size);
399
400 return (get_mem_ref_hash_table ().find (&r) != NULL);
401 }
402
403 /* Return true iff the memory reference REF has been instrumented. */
404
405 static bool
406 has_mem_ref_been_instrumented (const asan_mem_ref *ref)
407 {
408 return has_mem_ref_been_instrumented (ref->start, ref->access_size);
409 }
410
411 /* Return true iff access to memory region starting at REF and of
412 length LEN has been instrumented. */
413
414 static bool
415 has_mem_ref_been_instrumented (const asan_mem_ref *ref, tree len)
416 {
417 /* First let's see if the address of the beginning of REF has been
418 instrumented. */
419 if (!has_mem_ref_been_instrumented (ref))
420 return false;
421
422 if (len != 0)
423 {
424 /* Let's see if the end of the region has been instrumented. */
425 if (!has_mem_ref_been_instrumented (asan_mem_ref_get_end (ref, len),
426 ref->access_size))
427 return false;
428 }
429 return true;
430 }
431
432 /* Set REF to the memory reference present in a gimple assignment
433 ASSIGNMENT. Return true upon successful completion, false
434 otherwise. */
435
436 static bool
437 get_mem_ref_of_assignment (const gimple assignment,
438 asan_mem_ref *ref,
439 bool *ref_is_store)
440 {
441 gcc_assert (gimple_assign_single_p (assignment));
442
443 if (gimple_store_p (assignment)
444 && !gimple_clobber_p (assignment))
445 {
446 ref->start = gimple_assign_lhs (assignment);
447 *ref_is_store = true;
448 }
449 else if (gimple_assign_load_p (assignment))
450 {
451 ref->start = gimple_assign_rhs1 (assignment);
452 *ref_is_store = false;
453 }
454 else
455 return false;
456
457 ref->access_size = int_size_in_bytes (TREE_TYPE (ref->start));
458 return true;
459 }
460
461 /* Return the memory references contained in a gimple statement
462 representing a builtin call that has to do with memory access. */
463
464 static bool
465 get_mem_refs_of_builtin_call (const gimple call,
466 asan_mem_ref *src0,
467 tree *src0_len,
468 bool *src0_is_store,
469 asan_mem_ref *src1,
470 tree *src1_len,
471 bool *src1_is_store,
472 asan_mem_ref *dst,
473 tree *dst_len,
474 bool *dst_is_store,
475 bool *dest_is_deref)
476 {
477 gcc_checking_assert (gimple_call_builtin_p (call, BUILT_IN_NORMAL));
478
479 tree callee = gimple_call_fndecl (call);
480 tree source0 = NULL_TREE, source1 = NULL_TREE,
481 dest = NULL_TREE, len = NULL_TREE;
482 bool is_store = true, got_reference_p = false;
483 HOST_WIDE_INT access_size = 1;
484
485 switch (DECL_FUNCTION_CODE (callee))
486 {
487 /* (s, s, n) style memops. */
488 case BUILT_IN_BCMP:
489 case BUILT_IN_MEMCMP:
490 source0 = gimple_call_arg (call, 0);
491 source1 = gimple_call_arg (call, 1);
492 len = gimple_call_arg (call, 2);
493 break;
494
495 /* (src, dest, n) style memops. */
496 case BUILT_IN_BCOPY:
497 source0 = gimple_call_arg (call, 0);
498 dest = gimple_call_arg (call, 1);
499 len = gimple_call_arg (call, 2);
500 break;
501
502 /* (dest, src, n) style memops. */
503 case BUILT_IN_MEMCPY:
504 case BUILT_IN_MEMCPY_CHK:
505 case BUILT_IN_MEMMOVE:
506 case BUILT_IN_MEMMOVE_CHK:
507 case BUILT_IN_MEMPCPY:
508 case BUILT_IN_MEMPCPY_CHK:
509 dest = gimple_call_arg (call, 0);
510 source0 = gimple_call_arg (call, 1);
511 len = gimple_call_arg (call, 2);
512 break;
513
514 /* (dest, n) style memops. */
515 case BUILT_IN_BZERO:
516 dest = gimple_call_arg (call, 0);
517 len = gimple_call_arg (call, 1);
518 break;
519
520 /* (dest, x, n) style memops*/
521 case BUILT_IN_MEMSET:
522 case BUILT_IN_MEMSET_CHK:
523 dest = gimple_call_arg (call, 0);
524 len = gimple_call_arg (call, 2);
525 break;
526
527 case BUILT_IN_STRLEN:
528 source0 = gimple_call_arg (call, 0);
529 len = gimple_call_lhs (call);
530 break ;
531
532 /* And now the __atomic* and __sync builtins.
533 These are handled differently from the classical memory memory
534 access builtins above. */
535
536 case BUILT_IN_ATOMIC_LOAD_1:
537 case BUILT_IN_ATOMIC_LOAD_2:
538 case BUILT_IN_ATOMIC_LOAD_4:
539 case BUILT_IN_ATOMIC_LOAD_8:
540 case BUILT_IN_ATOMIC_LOAD_16:
541 is_store = false;
542 /* fall through. */
543
544 case BUILT_IN_SYNC_FETCH_AND_ADD_1:
545 case BUILT_IN_SYNC_FETCH_AND_ADD_2:
546 case BUILT_IN_SYNC_FETCH_AND_ADD_4:
547 case BUILT_IN_SYNC_FETCH_AND_ADD_8:
548 case BUILT_IN_SYNC_FETCH_AND_ADD_16:
549
550 case BUILT_IN_SYNC_FETCH_AND_SUB_1:
551 case BUILT_IN_SYNC_FETCH_AND_SUB_2:
552 case BUILT_IN_SYNC_FETCH_AND_SUB_4:
553 case BUILT_IN_SYNC_FETCH_AND_SUB_8:
554 case BUILT_IN_SYNC_FETCH_AND_SUB_16:
555
556 case BUILT_IN_SYNC_FETCH_AND_OR_1:
557 case BUILT_IN_SYNC_FETCH_AND_OR_2:
558 case BUILT_IN_SYNC_FETCH_AND_OR_4:
559 case BUILT_IN_SYNC_FETCH_AND_OR_8:
560 case BUILT_IN_SYNC_FETCH_AND_OR_16:
561
562 case BUILT_IN_SYNC_FETCH_AND_AND_1:
563 case BUILT_IN_SYNC_FETCH_AND_AND_2:
564 case BUILT_IN_SYNC_FETCH_AND_AND_4:
565 case BUILT_IN_SYNC_FETCH_AND_AND_8:
566 case BUILT_IN_SYNC_FETCH_AND_AND_16:
567
568 case BUILT_IN_SYNC_FETCH_AND_XOR_1:
569 case BUILT_IN_SYNC_FETCH_AND_XOR_2:
570 case BUILT_IN_SYNC_FETCH_AND_XOR_4:
571 case BUILT_IN_SYNC_FETCH_AND_XOR_8:
572 case BUILT_IN_SYNC_FETCH_AND_XOR_16:
573
574 case BUILT_IN_SYNC_FETCH_AND_NAND_1:
575 case BUILT_IN_SYNC_FETCH_AND_NAND_2:
576 case BUILT_IN_SYNC_FETCH_AND_NAND_4:
577 case BUILT_IN_SYNC_FETCH_AND_NAND_8:
578
579 case BUILT_IN_SYNC_ADD_AND_FETCH_1:
580 case BUILT_IN_SYNC_ADD_AND_FETCH_2:
581 case BUILT_IN_SYNC_ADD_AND_FETCH_4:
582 case BUILT_IN_SYNC_ADD_AND_FETCH_8:
583 case BUILT_IN_SYNC_ADD_AND_FETCH_16:
584
585 case BUILT_IN_SYNC_SUB_AND_FETCH_1:
586 case BUILT_IN_SYNC_SUB_AND_FETCH_2:
587 case BUILT_IN_SYNC_SUB_AND_FETCH_4:
588 case BUILT_IN_SYNC_SUB_AND_FETCH_8:
589 case BUILT_IN_SYNC_SUB_AND_FETCH_16:
590
591 case BUILT_IN_SYNC_OR_AND_FETCH_1:
592 case BUILT_IN_SYNC_OR_AND_FETCH_2:
593 case BUILT_IN_SYNC_OR_AND_FETCH_4:
594 case BUILT_IN_SYNC_OR_AND_FETCH_8:
595 case BUILT_IN_SYNC_OR_AND_FETCH_16:
596
597 case BUILT_IN_SYNC_AND_AND_FETCH_1:
598 case BUILT_IN_SYNC_AND_AND_FETCH_2:
599 case BUILT_IN_SYNC_AND_AND_FETCH_4:
600 case BUILT_IN_SYNC_AND_AND_FETCH_8:
601 case BUILT_IN_SYNC_AND_AND_FETCH_16:
602
603 case BUILT_IN_SYNC_XOR_AND_FETCH_1:
604 case BUILT_IN_SYNC_XOR_AND_FETCH_2:
605 case BUILT_IN_SYNC_XOR_AND_FETCH_4:
606 case BUILT_IN_SYNC_XOR_AND_FETCH_8:
607 case BUILT_IN_SYNC_XOR_AND_FETCH_16:
608
609 case BUILT_IN_SYNC_NAND_AND_FETCH_1:
610 case BUILT_IN_SYNC_NAND_AND_FETCH_2:
611 case BUILT_IN_SYNC_NAND_AND_FETCH_4:
612 case BUILT_IN_SYNC_NAND_AND_FETCH_8:
613
614 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1:
615 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_2:
616 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_4:
617 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_8:
618 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_16:
619
620 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1:
621 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_2:
622 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_4:
623 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_8:
624 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_16:
625
626 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_1:
627 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_2:
628 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_4:
629 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_8:
630 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_16:
631
632 case BUILT_IN_SYNC_LOCK_RELEASE_1:
633 case BUILT_IN_SYNC_LOCK_RELEASE_2:
634 case BUILT_IN_SYNC_LOCK_RELEASE_4:
635 case BUILT_IN_SYNC_LOCK_RELEASE_8:
636 case BUILT_IN_SYNC_LOCK_RELEASE_16:
637
638 case BUILT_IN_ATOMIC_EXCHANGE_1:
639 case BUILT_IN_ATOMIC_EXCHANGE_2:
640 case BUILT_IN_ATOMIC_EXCHANGE_4:
641 case BUILT_IN_ATOMIC_EXCHANGE_8:
642 case BUILT_IN_ATOMIC_EXCHANGE_16:
643
644 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1:
645 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_2:
646 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_4:
647 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_8:
648 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_16:
649
650 case BUILT_IN_ATOMIC_STORE_1:
651 case BUILT_IN_ATOMIC_STORE_2:
652 case BUILT_IN_ATOMIC_STORE_4:
653 case BUILT_IN_ATOMIC_STORE_8:
654 case BUILT_IN_ATOMIC_STORE_16:
655
656 case BUILT_IN_ATOMIC_ADD_FETCH_1:
657 case BUILT_IN_ATOMIC_ADD_FETCH_2:
658 case BUILT_IN_ATOMIC_ADD_FETCH_4:
659 case BUILT_IN_ATOMIC_ADD_FETCH_8:
660 case BUILT_IN_ATOMIC_ADD_FETCH_16:
661
662 case BUILT_IN_ATOMIC_SUB_FETCH_1:
663 case BUILT_IN_ATOMIC_SUB_FETCH_2:
664 case BUILT_IN_ATOMIC_SUB_FETCH_4:
665 case BUILT_IN_ATOMIC_SUB_FETCH_8:
666 case BUILT_IN_ATOMIC_SUB_FETCH_16:
667
668 case BUILT_IN_ATOMIC_AND_FETCH_1:
669 case BUILT_IN_ATOMIC_AND_FETCH_2:
670 case BUILT_IN_ATOMIC_AND_FETCH_4:
671 case BUILT_IN_ATOMIC_AND_FETCH_8:
672 case BUILT_IN_ATOMIC_AND_FETCH_16:
673
674 case BUILT_IN_ATOMIC_NAND_FETCH_1:
675 case BUILT_IN_ATOMIC_NAND_FETCH_2:
676 case BUILT_IN_ATOMIC_NAND_FETCH_4:
677 case BUILT_IN_ATOMIC_NAND_FETCH_8:
678 case BUILT_IN_ATOMIC_NAND_FETCH_16:
679
680 case BUILT_IN_ATOMIC_XOR_FETCH_1:
681 case BUILT_IN_ATOMIC_XOR_FETCH_2:
682 case BUILT_IN_ATOMIC_XOR_FETCH_4:
683 case BUILT_IN_ATOMIC_XOR_FETCH_8:
684 case BUILT_IN_ATOMIC_XOR_FETCH_16:
685
686 case BUILT_IN_ATOMIC_OR_FETCH_1:
687 case BUILT_IN_ATOMIC_OR_FETCH_2:
688 case BUILT_IN_ATOMIC_OR_FETCH_4:
689 case BUILT_IN_ATOMIC_OR_FETCH_8:
690 case BUILT_IN_ATOMIC_OR_FETCH_16:
691
692 case BUILT_IN_ATOMIC_FETCH_ADD_1:
693 case BUILT_IN_ATOMIC_FETCH_ADD_2:
694 case BUILT_IN_ATOMIC_FETCH_ADD_4:
695 case BUILT_IN_ATOMIC_FETCH_ADD_8:
696 case BUILT_IN_ATOMIC_FETCH_ADD_16:
697
698 case BUILT_IN_ATOMIC_FETCH_SUB_1:
699 case BUILT_IN_ATOMIC_FETCH_SUB_2:
700 case BUILT_IN_ATOMIC_FETCH_SUB_4:
701 case BUILT_IN_ATOMIC_FETCH_SUB_8:
702 case BUILT_IN_ATOMIC_FETCH_SUB_16:
703
704 case BUILT_IN_ATOMIC_FETCH_AND_1:
705 case BUILT_IN_ATOMIC_FETCH_AND_2:
706 case BUILT_IN_ATOMIC_FETCH_AND_4:
707 case BUILT_IN_ATOMIC_FETCH_AND_8:
708 case BUILT_IN_ATOMIC_FETCH_AND_16:
709
710 case BUILT_IN_ATOMIC_FETCH_NAND_1:
711 case BUILT_IN_ATOMIC_FETCH_NAND_2:
712 case BUILT_IN_ATOMIC_FETCH_NAND_4:
713 case BUILT_IN_ATOMIC_FETCH_NAND_8:
714 case BUILT_IN_ATOMIC_FETCH_NAND_16:
715
716 case BUILT_IN_ATOMIC_FETCH_XOR_1:
717 case BUILT_IN_ATOMIC_FETCH_XOR_2:
718 case BUILT_IN_ATOMIC_FETCH_XOR_4:
719 case BUILT_IN_ATOMIC_FETCH_XOR_8:
720 case BUILT_IN_ATOMIC_FETCH_XOR_16:
721
722 case BUILT_IN_ATOMIC_FETCH_OR_1:
723 case BUILT_IN_ATOMIC_FETCH_OR_2:
724 case BUILT_IN_ATOMIC_FETCH_OR_4:
725 case BUILT_IN_ATOMIC_FETCH_OR_8:
726 case BUILT_IN_ATOMIC_FETCH_OR_16:
727 {
728 dest = gimple_call_arg (call, 0);
729 /* DEST represents the address of a memory location.
730 instrument_derefs wants the memory location, so lets
731 dereference the address DEST before handing it to
732 instrument_derefs. */
733 if (TREE_CODE (dest) == ADDR_EXPR)
734 dest = TREE_OPERAND (dest, 0);
735 else if (TREE_CODE (dest) == SSA_NAME || TREE_CODE (dest) == INTEGER_CST)
736 dest = build2 (MEM_REF, TREE_TYPE (TREE_TYPE (dest)),
737 dest, build_int_cst (TREE_TYPE (dest), 0));
738 else
739 gcc_unreachable ();
740
741 access_size = int_size_in_bytes (TREE_TYPE (dest));
742 }
743
744 default:
745 /* The other builtins memory access are not instrumented in this
746 function because they either don't have any length parameter,
747 or their length parameter is just a limit. */
748 break;
749 }
750
751 if (len != NULL_TREE)
752 {
753 if (source0 != NULL_TREE)
754 {
755 src0->start = source0;
756 src0->access_size = access_size;
757 *src0_len = len;
758 *src0_is_store = false;
759 }
760
761 if (source1 != NULL_TREE)
762 {
763 src1->start = source1;
764 src1->access_size = access_size;
765 *src1_len = len;
766 *src1_is_store = false;
767 }
768
769 if (dest != NULL_TREE)
770 {
771 dst->start = dest;
772 dst->access_size = access_size;
773 *dst_len = len;
774 *dst_is_store = true;
775 }
776
777 got_reference_p = true;
778 }
779 else if (dest)
780 {
781 dst->start = dest;
782 dst->access_size = access_size;
783 *dst_len = NULL_TREE;
784 *dst_is_store = is_store;
785 *dest_is_deref = true;
786 got_reference_p = true;
787 }
788
789 return got_reference_p;
790 }
791
792 /* Return true iff a given gimple statement has been instrumented.
793 Note that the statement is "defined" by the memory references it
794 contains. */
795
796 static bool
797 has_stmt_been_instrumented_p (gimple stmt)
798 {
799 if (gimple_assign_single_p (stmt))
800 {
801 bool r_is_store;
802 asan_mem_ref r;
803 asan_mem_ref_init (&r, NULL, 1);
804
805 if (get_mem_ref_of_assignment (stmt, &r, &r_is_store))
806 return has_mem_ref_been_instrumented (&r);
807 }
808 else if (gimple_call_builtin_p (stmt, BUILT_IN_NORMAL))
809 {
810 asan_mem_ref src0, src1, dest;
811 asan_mem_ref_init (&src0, NULL, 1);
812 asan_mem_ref_init (&src1, NULL, 1);
813 asan_mem_ref_init (&dest, NULL, 1);
814
815 tree src0_len = NULL_TREE, src1_len = NULL_TREE, dest_len = NULL_TREE;
816 bool src0_is_store = false, src1_is_store = false,
817 dest_is_store = false, dest_is_deref = false;
818 if (get_mem_refs_of_builtin_call (stmt,
819 &src0, &src0_len, &src0_is_store,
820 &src1, &src1_len, &src1_is_store,
821 &dest, &dest_len, &dest_is_store,
822 &dest_is_deref))
823 {
824 if (src0.start != NULL_TREE
825 && !has_mem_ref_been_instrumented (&src0, src0_len))
826 return false;
827
828 if (src1.start != NULL_TREE
829 && !has_mem_ref_been_instrumented (&src1, src1_len))
830 return false;
831
832 if (dest.start != NULL_TREE
833 && !has_mem_ref_been_instrumented (&dest, dest_len))
834 return false;
835
836 return true;
837 }
838 }
839 return false;
840 }
841
842 /* Insert a memory reference into the hash table. */
843
844 static void
845 update_mem_ref_hash_table (tree ref, HOST_WIDE_INT access_size)
846 {
847 hash_table <asan_mem_ref_hasher> ht = get_mem_ref_hash_table ();
848
849 asan_mem_ref r;
850 asan_mem_ref_init (&r, ref, access_size);
851
852 asan_mem_ref **slot = ht.find_slot (&r, INSERT);
853 if (*slot == NULL)
854 *slot = asan_mem_ref_new (ref, access_size);
855 }
856
857 /* Initialize shadow_ptr_types array. */
858
859 static void
860 asan_init_shadow_ptr_types (void)
861 {
862 asan_shadow_set = new_alias_set ();
863 shadow_ptr_types[0] = build_distinct_type_copy (signed_char_type_node);
864 TYPE_ALIAS_SET (shadow_ptr_types[0]) = asan_shadow_set;
865 shadow_ptr_types[0] = build_pointer_type (shadow_ptr_types[0]);
866 shadow_ptr_types[1] = build_distinct_type_copy (short_integer_type_node);
867 TYPE_ALIAS_SET (shadow_ptr_types[1]) = asan_shadow_set;
868 shadow_ptr_types[1] = build_pointer_type (shadow_ptr_types[1]);
869 initialize_sanitizer_builtins ();
870 }
871
872 /* Create ADDR_EXPR of STRING_CST with the PP pretty printer text. */
873
874 static tree
875 asan_pp_string (pretty_printer *pp)
876 {
877 const char *buf = pp_formatted_text (pp);
878 size_t len = strlen (buf);
879 tree ret = build_string (len + 1, buf);
880 TREE_TYPE (ret)
881 = build_array_type (TREE_TYPE (shadow_ptr_types[0]),
882 build_index_type (size_int (len)));
883 TREE_READONLY (ret) = 1;
884 TREE_STATIC (ret) = 1;
885 return build1 (ADDR_EXPR, shadow_ptr_types[0], ret);
886 }
887
888 /* Return a CONST_INT representing 4 subsequent shadow memory bytes. */
889
890 static rtx
891 asan_shadow_cst (unsigned char shadow_bytes[4])
892 {
893 int i;
894 unsigned HOST_WIDE_INT val = 0;
895 gcc_assert (WORDS_BIG_ENDIAN == BYTES_BIG_ENDIAN);
896 for (i = 0; i < 4; i++)
897 val |= (unsigned HOST_WIDE_INT) shadow_bytes[BYTES_BIG_ENDIAN ? 3 - i : i]
898 << (BITS_PER_UNIT * i);
899 return gen_int_mode (val, SImode);
900 }
901
902 /* Clear shadow memory at SHADOW_MEM, LEN bytes. Can't call a library call here
903 though. */
904
905 static void
906 asan_clear_shadow (rtx shadow_mem, HOST_WIDE_INT len)
907 {
908 rtx insn, insns, top_label, end, addr, tmp, jump;
909
910 start_sequence ();
911 clear_storage (shadow_mem, GEN_INT (len), BLOCK_OP_NORMAL);
912 insns = get_insns ();
913 end_sequence ();
914 for (insn = insns; insn; insn = NEXT_INSN (insn))
915 if (CALL_P (insn))
916 break;
917 if (insn == NULL_RTX)
918 {
919 emit_insn (insns);
920 return;
921 }
922
923 gcc_assert ((len & 3) == 0);
924 top_label = gen_label_rtx ();
925 addr = copy_to_mode_reg (Pmode, XEXP (shadow_mem, 0));
926 shadow_mem = adjust_automodify_address (shadow_mem, SImode, addr, 0);
927 end = force_reg (Pmode, plus_constant (Pmode, addr, len));
928 emit_label (top_label);
929
930 emit_move_insn (shadow_mem, const0_rtx);
931 tmp = expand_simple_binop (Pmode, PLUS, addr, gen_int_mode (4, Pmode), addr,
932 true, OPTAB_LIB_WIDEN);
933 if (tmp != addr)
934 emit_move_insn (addr, tmp);
935 emit_cmp_and_jump_insns (addr, end, LT, NULL_RTX, Pmode, true, top_label);
936 jump = get_last_insn ();
937 gcc_assert (JUMP_P (jump));
938 add_int_reg_note (jump, REG_BR_PROB, REG_BR_PROB_BASE * 80 / 100);
939 }
940
941 void
942 asan_function_start (void)
943 {
944 section *fnsec = function_section (current_function_decl);
945 switch_to_section (fnsec);
946 ASM_OUTPUT_DEBUG_LABEL (asm_out_file, "LASANPC",
947 current_function_funcdef_no);
948 }
949
950 /* Insert code to protect stack vars. The prologue sequence should be emitted
951 directly, epilogue sequence returned. BASE is the register holding the
952 stack base, against which OFFSETS array offsets are relative to, OFFSETS
953 array contains pairs of offsets in reverse order, always the end offset
954 of some gap that needs protection followed by starting offset,
955 and DECLS is an array of representative decls for each var partition.
956 LENGTH is the length of the OFFSETS array, DECLS array is LENGTH / 2 - 1
957 elements long (OFFSETS include gap before the first variable as well
958 as gaps after each stack variable). PBASE is, if non-NULL, some pseudo
959 register which stack vars DECL_RTLs are based on. Either BASE should be
960 assigned to PBASE, when not doing use after return protection, or
961 corresponding address based on __asan_stack_malloc* return value. */
962
963 rtx
964 asan_emit_stack_protection (rtx base, rtx pbase, unsigned int alignb,
965 HOST_WIDE_INT *offsets, tree *decls, int length)
966 {
967 rtx shadow_base, shadow_mem, ret, mem, orig_base, lab;
968 char buf[30];
969 unsigned char shadow_bytes[4];
970 HOST_WIDE_INT base_offset = offsets[length - 1];
971 HOST_WIDE_INT base_align_bias = 0, offset, prev_offset;
972 HOST_WIDE_INT asan_frame_size = offsets[0] - base_offset;
973 HOST_WIDE_INT last_offset, last_size;
974 int l;
975 unsigned char cur_shadow_byte = ASAN_STACK_MAGIC_LEFT;
976 tree str_cst, decl, id;
977 int use_after_return_class = -1;
978
979 if (shadow_ptr_types[0] == NULL_TREE)
980 asan_init_shadow_ptr_types ();
981
982 /* First of all, prepare the description string. */
983 pretty_printer asan_pp;
984
985 pp_decimal_int (&asan_pp, length / 2 - 1);
986 pp_space (&asan_pp);
987 for (l = length - 2; l; l -= 2)
988 {
989 tree decl = decls[l / 2 - 1];
990 pp_wide_integer (&asan_pp, offsets[l] - base_offset);
991 pp_space (&asan_pp);
992 pp_wide_integer (&asan_pp, offsets[l - 1] - offsets[l]);
993 pp_space (&asan_pp);
994 if (DECL_P (decl) && DECL_NAME (decl))
995 {
996 pp_decimal_int (&asan_pp, IDENTIFIER_LENGTH (DECL_NAME (decl)));
997 pp_space (&asan_pp);
998 pp_tree_identifier (&asan_pp, DECL_NAME (decl));
999 }
1000 else
1001 pp_string (&asan_pp, "9 <unknown>");
1002 pp_space (&asan_pp);
1003 }
1004 str_cst = asan_pp_string (&asan_pp);
1005
1006 /* Emit the prologue sequence. */
1007 if (asan_frame_size > 32 && asan_frame_size <= 65536 && pbase
1008 && ASAN_USE_AFTER_RETURN)
1009 {
1010 use_after_return_class = floor_log2 (asan_frame_size - 1) - 5;
1011 /* __asan_stack_malloc_N guarantees alignment
1012 N < 6 ? (64 << N) : 4096 bytes. */
1013 if (alignb > (use_after_return_class < 6
1014 ? (64U << use_after_return_class) : 4096U))
1015 use_after_return_class = -1;
1016 else if (alignb > ASAN_RED_ZONE_SIZE && (asan_frame_size & (alignb - 1)))
1017 base_align_bias = ((asan_frame_size + alignb - 1)
1018 & ~(alignb - HOST_WIDE_INT_1)) - asan_frame_size;
1019 }
1020 /* Align base if target is STRICT_ALIGNMENT. */
1021 if (STRICT_ALIGNMENT)
1022 base = expand_binop (Pmode, and_optab, base,
1023 gen_int_mode (-((GET_MODE_ALIGNMENT (SImode)
1024 << ASAN_SHADOW_SHIFT)
1025 / BITS_PER_UNIT), Pmode), NULL_RTX,
1026 1, OPTAB_DIRECT);
1027
1028 if (use_after_return_class == -1 && pbase)
1029 emit_move_insn (pbase, base);
1030
1031 base = expand_binop (Pmode, add_optab, base,
1032 gen_int_mode (base_offset - base_align_bias, Pmode),
1033 NULL_RTX, 1, OPTAB_DIRECT);
1034 orig_base = NULL_RTX;
1035 if (use_after_return_class != -1)
1036 {
1037 if (asan_detect_stack_use_after_return == NULL_TREE)
1038 {
1039 id = get_identifier ("__asan_option_detect_stack_use_after_return");
1040 decl = build_decl (BUILTINS_LOCATION, VAR_DECL, id,
1041 integer_type_node);
1042 SET_DECL_ASSEMBLER_NAME (decl, id);
1043 TREE_ADDRESSABLE (decl) = 1;
1044 DECL_ARTIFICIAL (decl) = 1;
1045 DECL_IGNORED_P (decl) = 1;
1046 DECL_EXTERNAL (decl) = 1;
1047 TREE_STATIC (decl) = 1;
1048 TREE_PUBLIC (decl) = 1;
1049 TREE_USED (decl) = 1;
1050 asan_detect_stack_use_after_return = decl;
1051 }
1052 orig_base = gen_reg_rtx (Pmode);
1053 emit_move_insn (orig_base, base);
1054 ret = expand_normal (asan_detect_stack_use_after_return);
1055 lab = gen_label_rtx ();
1056 int very_likely = REG_BR_PROB_BASE - (REG_BR_PROB_BASE / 2000 - 1);
1057 emit_cmp_and_jump_insns (ret, const0_rtx, EQ, NULL_RTX,
1058 VOIDmode, 0, lab, very_likely);
1059 snprintf (buf, sizeof buf, "__asan_stack_malloc_%d",
1060 use_after_return_class);
1061 ret = init_one_libfunc (buf);
1062 rtx addr = convert_memory_address (ptr_mode, base);
1063 ret = emit_library_call_value (ret, NULL_RTX, LCT_NORMAL, ptr_mode, 2,
1064 GEN_INT (asan_frame_size
1065 + base_align_bias),
1066 TYPE_MODE (pointer_sized_int_node),
1067 addr, ptr_mode);
1068 ret = convert_memory_address (Pmode, ret);
1069 emit_move_insn (base, ret);
1070 emit_label (lab);
1071 emit_move_insn (pbase, expand_binop (Pmode, add_optab, base,
1072 gen_int_mode (base_align_bias
1073 - base_offset, Pmode),
1074 NULL_RTX, 1, OPTAB_DIRECT));
1075 }
1076 mem = gen_rtx_MEM (ptr_mode, base);
1077 mem = adjust_address (mem, VOIDmode, base_align_bias);
1078 emit_move_insn (mem, gen_int_mode (ASAN_STACK_FRAME_MAGIC, ptr_mode));
1079 mem = adjust_address (mem, VOIDmode, GET_MODE_SIZE (ptr_mode));
1080 emit_move_insn (mem, expand_normal (str_cst));
1081 mem = adjust_address (mem, VOIDmode, GET_MODE_SIZE (ptr_mode));
1082 ASM_GENERATE_INTERNAL_LABEL (buf, "LASANPC", current_function_funcdef_no);
1083 id = get_identifier (buf);
1084 decl = build_decl (DECL_SOURCE_LOCATION (current_function_decl),
1085 VAR_DECL, id, char_type_node);
1086 SET_DECL_ASSEMBLER_NAME (decl, id);
1087 TREE_ADDRESSABLE (decl) = 1;
1088 TREE_READONLY (decl) = 1;
1089 DECL_ARTIFICIAL (decl) = 1;
1090 DECL_IGNORED_P (decl) = 1;
1091 TREE_STATIC (decl) = 1;
1092 TREE_PUBLIC (decl) = 0;
1093 TREE_USED (decl) = 1;
1094 DECL_INITIAL (decl) = decl;
1095 TREE_ASM_WRITTEN (decl) = 1;
1096 TREE_ASM_WRITTEN (id) = 1;
1097 emit_move_insn (mem, expand_normal (build_fold_addr_expr (decl)));
1098 shadow_base = expand_binop (Pmode, lshr_optab, base,
1099 GEN_INT (ASAN_SHADOW_SHIFT),
1100 NULL_RTX, 1, OPTAB_DIRECT);
1101 shadow_base
1102 = plus_constant (Pmode, shadow_base,
1103 targetm.asan_shadow_offset ()
1104 + (base_align_bias >> ASAN_SHADOW_SHIFT));
1105 gcc_assert (asan_shadow_set != -1
1106 && (ASAN_RED_ZONE_SIZE >> ASAN_SHADOW_SHIFT) == 4);
1107 shadow_mem = gen_rtx_MEM (SImode, shadow_base);
1108 set_mem_alias_set (shadow_mem, asan_shadow_set);
1109 if (STRICT_ALIGNMENT)
1110 set_mem_align (shadow_mem, (GET_MODE_ALIGNMENT (SImode)));
1111 prev_offset = base_offset;
1112 for (l = length; l; l -= 2)
1113 {
1114 if (l == 2)
1115 cur_shadow_byte = ASAN_STACK_MAGIC_RIGHT;
1116 offset = offsets[l - 1];
1117 if ((offset - base_offset) & (ASAN_RED_ZONE_SIZE - 1))
1118 {
1119 int i;
1120 HOST_WIDE_INT aoff
1121 = base_offset + ((offset - base_offset)
1122 & ~(ASAN_RED_ZONE_SIZE - HOST_WIDE_INT_1));
1123 shadow_mem = adjust_address (shadow_mem, VOIDmode,
1124 (aoff - prev_offset)
1125 >> ASAN_SHADOW_SHIFT);
1126 prev_offset = aoff;
1127 for (i = 0; i < 4; i++, aoff += (1 << ASAN_SHADOW_SHIFT))
1128 if (aoff < offset)
1129 {
1130 if (aoff < offset - (1 << ASAN_SHADOW_SHIFT) + 1)
1131 shadow_bytes[i] = 0;
1132 else
1133 shadow_bytes[i] = offset - aoff;
1134 }
1135 else
1136 shadow_bytes[i] = ASAN_STACK_MAGIC_PARTIAL;
1137 emit_move_insn (shadow_mem, asan_shadow_cst (shadow_bytes));
1138 offset = aoff;
1139 }
1140 while (offset <= offsets[l - 2] - ASAN_RED_ZONE_SIZE)
1141 {
1142 shadow_mem = adjust_address (shadow_mem, VOIDmode,
1143 (offset - prev_offset)
1144 >> ASAN_SHADOW_SHIFT);
1145 prev_offset = offset;
1146 memset (shadow_bytes, cur_shadow_byte, 4);
1147 emit_move_insn (shadow_mem, asan_shadow_cst (shadow_bytes));
1148 offset += ASAN_RED_ZONE_SIZE;
1149 }
1150 cur_shadow_byte = ASAN_STACK_MAGIC_MIDDLE;
1151 }
1152 do_pending_stack_adjust ();
1153
1154 /* Construct epilogue sequence. */
1155 start_sequence ();
1156
1157 lab = NULL_RTX;
1158 if (use_after_return_class != -1)
1159 {
1160 rtx lab2 = gen_label_rtx ();
1161 char c = (char) ASAN_STACK_MAGIC_USE_AFTER_RET;
1162 int very_likely = REG_BR_PROB_BASE - (REG_BR_PROB_BASE / 2000 - 1);
1163 emit_cmp_and_jump_insns (orig_base, base, EQ, NULL_RTX,
1164 VOIDmode, 0, lab2, very_likely);
1165 shadow_mem = gen_rtx_MEM (BLKmode, shadow_base);
1166 set_mem_alias_set (shadow_mem, asan_shadow_set);
1167 mem = gen_rtx_MEM (ptr_mode, base);
1168 mem = adjust_address (mem, VOIDmode, base_align_bias);
1169 emit_move_insn (mem, gen_int_mode (ASAN_STACK_RETIRED_MAGIC, ptr_mode));
1170 unsigned HOST_WIDE_INT sz = asan_frame_size >> ASAN_SHADOW_SHIFT;
1171 if (use_after_return_class < 5
1172 && can_store_by_pieces (sz, builtin_memset_read_str, &c,
1173 BITS_PER_UNIT, true))
1174 store_by_pieces (shadow_mem, sz, builtin_memset_read_str, &c,
1175 BITS_PER_UNIT, true, 0);
1176 else if (use_after_return_class >= 5
1177 || !set_storage_via_setmem (shadow_mem,
1178 GEN_INT (sz),
1179 gen_int_mode (c, QImode),
1180 BITS_PER_UNIT, BITS_PER_UNIT,
1181 -1, sz, sz, sz))
1182 {
1183 snprintf (buf, sizeof buf, "__asan_stack_free_%d",
1184 use_after_return_class);
1185 ret = init_one_libfunc (buf);
1186 rtx addr = convert_memory_address (ptr_mode, base);
1187 rtx orig_addr = convert_memory_address (ptr_mode, orig_base);
1188 emit_library_call (ret, LCT_NORMAL, ptr_mode, 3, addr, ptr_mode,
1189 GEN_INT (asan_frame_size + base_align_bias),
1190 TYPE_MODE (pointer_sized_int_node),
1191 orig_addr, ptr_mode);
1192 }
1193 lab = gen_label_rtx ();
1194 emit_jump (lab);
1195 emit_label (lab2);
1196 }
1197
1198 shadow_mem = gen_rtx_MEM (BLKmode, shadow_base);
1199 set_mem_alias_set (shadow_mem, asan_shadow_set);
1200
1201 if (STRICT_ALIGNMENT)
1202 set_mem_align (shadow_mem, (GET_MODE_ALIGNMENT (SImode)));
1203
1204 prev_offset = base_offset;
1205 last_offset = base_offset;
1206 last_size = 0;
1207 for (l = length; l; l -= 2)
1208 {
1209 offset = base_offset + ((offsets[l - 1] - base_offset)
1210 & ~(ASAN_RED_ZONE_SIZE - HOST_WIDE_INT_1));
1211 if (last_offset + last_size != offset)
1212 {
1213 shadow_mem = adjust_address (shadow_mem, VOIDmode,
1214 (last_offset - prev_offset)
1215 >> ASAN_SHADOW_SHIFT);
1216 prev_offset = last_offset;
1217 asan_clear_shadow (shadow_mem, last_size >> ASAN_SHADOW_SHIFT);
1218 last_offset = offset;
1219 last_size = 0;
1220 }
1221 last_size += base_offset + ((offsets[l - 2] - base_offset)
1222 & ~(ASAN_RED_ZONE_SIZE - HOST_WIDE_INT_1))
1223 - offset;
1224 }
1225 if (last_size)
1226 {
1227 shadow_mem = adjust_address (shadow_mem, VOIDmode,
1228 (last_offset - prev_offset)
1229 >> ASAN_SHADOW_SHIFT);
1230 asan_clear_shadow (shadow_mem, last_size >> ASAN_SHADOW_SHIFT);
1231 }
1232
1233 do_pending_stack_adjust ();
1234 if (lab)
1235 emit_label (lab);
1236
1237 ret = get_insns ();
1238 end_sequence ();
1239 return ret;
1240 }
1241
1242 /* Return true if DECL, a global var, might be overridden and needs
1243 therefore a local alias. */
1244
1245 static bool
1246 asan_needs_local_alias (tree decl)
1247 {
1248 return DECL_WEAK (decl) || !targetm.binds_local_p (decl);
1249 }
1250
1251 /* Return true if DECL is a VAR_DECL that should be protected
1252 by Address Sanitizer, by appending a red zone with protected
1253 shadow memory after it and aligning it to at least
1254 ASAN_RED_ZONE_SIZE bytes. */
1255
1256 bool
1257 asan_protect_global (tree decl)
1258 {
1259 if (!ASAN_GLOBALS)
1260 return false;
1261
1262 rtx rtl, symbol;
1263
1264 if (TREE_CODE (decl) == STRING_CST)
1265 {
1266 /* Instrument all STRING_CSTs except those created
1267 by asan_pp_string here. */
1268 if (shadow_ptr_types[0] != NULL_TREE
1269 && TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE
1270 && TREE_TYPE (TREE_TYPE (decl)) == TREE_TYPE (shadow_ptr_types[0]))
1271 return false;
1272 return true;
1273 }
1274 if (TREE_CODE (decl) != VAR_DECL
1275 /* TLS vars aren't statically protectable. */
1276 || DECL_THREAD_LOCAL_P (decl)
1277 /* Externs will be protected elsewhere. */
1278 || DECL_EXTERNAL (decl)
1279 || !DECL_RTL_SET_P (decl)
1280 /* Comdat vars pose an ABI problem, we can't know if
1281 the var that is selected by the linker will have
1282 padding or not. */
1283 || DECL_ONE_ONLY (decl)
1284 /* Similarly for common vars. People can use -fno-common. */
1285 || (DECL_COMMON (decl) && TREE_PUBLIC (decl))
1286 /* Don't protect if using user section, often vars placed
1287 into user section from multiple TUs are then assumed
1288 to be an array of such vars, putting padding in there
1289 breaks this assumption. */
1290 || (DECL_SECTION_NAME (decl) != NULL_TREE
1291 && !DECL_HAS_IMPLICIT_SECTION_NAME_P (decl))
1292 || DECL_SIZE (decl) == 0
1293 || ASAN_RED_ZONE_SIZE * BITS_PER_UNIT > MAX_OFILE_ALIGNMENT
1294 || !valid_constant_size_p (DECL_SIZE_UNIT (decl))
1295 || DECL_ALIGN_UNIT (decl) > 2 * ASAN_RED_ZONE_SIZE)
1296 return false;
1297
1298 rtl = DECL_RTL (decl);
1299 if (!MEM_P (rtl) || GET_CODE (XEXP (rtl, 0)) != SYMBOL_REF)
1300 return false;
1301 symbol = XEXP (rtl, 0);
1302
1303 if (CONSTANT_POOL_ADDRESS_P (symbol)
1304 || TREE_CONSTANT_POOL_ADDRESS_P (symbol))
1305 return false;
1306
1307 if (lookup_attribute ("weakref", DECL_ATTRIBUTES (decl)))
1308 return false;
1309
1310 #ifndef ASM_OUTPUT_DEF
1311 if (asan_needs_local_alias (decl))
1312 return false;
1313 #endif
1314
1315 return true;
1316 }
1317
1318 /* Construct a function tree for __asan_report_{load,store}{1,2,4,8,16,_n}.
1319 IS_STORE is either 1 (for a store) or 0 (for a load). */
1320
1321 static tree
1322 report_error_func (bool is_store, HOST_WIDE_INT size_in_bytes)
1323 {
1324 static enum built_in_function report[2][6]
1325 = { { BUILT_IN_ASAN_REPORT_LOAD1, BUILT_IN_ASAN_REPORT_LOAD2,
1326 BUILT_IN_ASAN_REPORT_LOAD4, BUILT_IN_ASAN_REPORT_LOAD8,
1327 BUILT_IN_ASAN_REPORT_LOAD16, BUILT_IN_ASAN_REPORT_LOAD_N },
1328 { BUILT_IN_ASAN_REPORT_STORE1, BUILT_IN_ASAN_REPORT_STORE2,
1329 BUILT_IN_ASAN_REPORT_STORE4, BUILT_IN_ASAN_REPORT_STORE8,
1330 BUILT_IN_ASAN_REPORT_STORE16, BUILT_IN_ASAN_REPORT_STORE_N } };
1331 if ((size_in_bytes & (size_in_bytes - 1)) != 0
1332 || size_in_bytes > 16)
1333 return builtin_decl_implicit (report[is_store][5]);
1334 return builtin_decl_implicit (report[is_store][exact_log2 (size_in_bytes)]);
1335 }
1336
1337 /* Split the current basic block and create a condition statement
1338 insertion point right before or after the statement pointed to by
1339 ITER. Return an iterator to the point at which the caller might
1340 safely insert the condition statement.
1341
1342 THEN_BLOCK must be set to the address of an uninitialized instance
1343 of basic_block. The function will then set *THEN_BLOCK to the
1344 'then block' of the condition statement to be inserted by the
1345 caller.
1346
1347 If CREATE_THEN_FALLTHRU_EDGE is false, no edge will be created from
1348 *THEN_BLOCK to *FALLTHROUGH_BLOCK.
1349
1350 Similarly, the function will set *FALLTRHOUGH_BLOCK to the 'else
1351 block' of the condition statement to be inserted by the caller.
1352
1353 Note that *FALLTHROUGH_BLOCK is a new block that contains the
1354 statements starting from *ITER, and *THEN_BLOCK is a new empty
1355 block.
1356
1357 *ITER is adjusted to point to always point to the first statement
1358 of the basic block * FALLTHROUGH_BLOCK. That statement is the
1359 same as what ITER was pointing to prior to calling this function,
1360 if BEFORE_P is true; otherwise, it is its following statement. */
1361
1362 gimple_stmt_iterator
1363 create_cond_insert_point (gimple_stmt_iterator *iter,
1364 bool before_p,
1365 bool then_more_likely_p,
1366 bool create_then_fallthru_edge,
1367 basic_block *then_block,
1368 basic_block *fallthrough_block)
1369 {
1370 gimple_stmt_iterator gsi = *iter;
1371
1372 if (!gsi_end_p (gsi) && before_p)
1373 gsi_prev (&gsi);
1374
1375 basic_block cur_bb = gsi_bb (*iter);
1376
1377 edge e = split_block (cur_bb, gsi_stmt (gsi));
1378
1379 /* Get a hold on the 'condition block', the 'then block' and the
1380 'else block'. */
1381 basic_block cond_bb = e->src;
1382 basic_block fallthru_bb = e->dest;
1383 basic_block then_bb = create_empty_bb (cond_bb);
1384 if (current_loops)
1385 {
1386 add_bb_to_loop (then_bb, cond_bb->loop_father);
1387 loops_state_set (LOOPS_NEED_FIXUP);
1388 }
1389
1390 /* Set up the newly created 'then block'. */
1391 e = make_edge (cond_bb, then_bb, EDGE_TRUE_VALUE);
1392 int fallthrough_probability
1393 = then_more_likely_p
1394 ? PROB_VERY_UNLIKELY
1395 : PROB_ALWAYS - PROB_VERY_UNLIKELY;
1396 e->probability = PROB_ALWAYS - fallthrough_probability;
1397 if (create_then_fallthru_edge)
1398 make_single_succ_edge (then_bb, fallthru_bb, EDGE_FALLTHRU);
1399
1400 /* Set up the fallthrough basic block. */
1401 e = find_edge (cond_bb, fallthru_bb);
1402 e->flags = EDGE_FALSE_VALUE;
1403 e->count = cond_bb->count;
1404 e->probability = fallthrough_probability;
1405
1406 /* Update dominance info for the newly created then_bb; note that
1407 fallthru_bb's dominance info has already been updated by
1408 split_bock. */
1409 if (dom_info_available_p (CDI_DOMINATORS))
1410 set_immediate_dominator (CDI_DOMINATORS, then_bb, cond_bb);
1411
1412 *then_block = then_bb;
1413 *fallthrough_block = fallthru_bb;
1414 *iter = gsi_start_bb (fallthru_bb);
1415
1416 return gsi_last_bb (cond_bb);
1417 }
1418
1419 /* Insert an if condition followed by a 'then block' right before the
1420 statement pointed to by ITER. The fallthrough block -- which is the
1421 else block of the condition as well as the destination of the
1422 outcoming edge of the 'then block' -- starts with the statement
1423 pointed to by ITER.
1424
1425 COND is the condition of the if.
1426
1427 If THEN_MORE_LIKELY_P is true, the probability of the edge to the
1428 'then block' is higher than the probability of the edge to the
1429 fallthrough block.
1430
1431 Upon completion of the function, *THEN_BB is set to the newly
1432 inserted 'then block' and similarly, *FALLTHROUGH_BB is set to the
1433 fallthrough block.
1434
1435 *ITER is adjusted to still point to the same statement it was
1436 pointing to initially. */
1437
1438 static void
1439 insert_if_then_before_iter (gimple cond,
1440 gimple_stmt_iterator *iter,
1441 bool then_more_likely_p,
1442 basic_block *then_bb,
1443 basic_block *fallthrough_bb)
1444 {
1445 gimple_stmt_iterator cond_insert_point =
1446 create_cond_insert_point (iter,
1447 /*before_p=*/true,
1448 then_more_likely_p,
1449 /*create_then_fallthru_edge=*/true,
1450 then_bb,
1451 fallthrough_bb);
1452 gsi_insert_after (&cond_insert_point, cond, GSI_NEW_STMT);
1453 }
1454
1455 /* Build
1456 (base_addr >> ASAN_SHADOW_SHIFT) + targetm.asan_shadow_offset (). */
1457
1458 static tree
1459 build_shadow_mem_access (gimple_stmt_iterator *gsi, location_t location,
1460 tree base_addr, tree shadow_ptr_type)
1461 {
1462 tree t, uintptr_type = TREE_TYPE (base_addr);
1463 tree shadow_type = TREE_TYPE (shadow_ptr_type);
1464 gimple g;
1465
1466 t = build_int_cst (uintptr_type, ASAN_SHADOW_SHIFT);
1467 g = gimple_build_assign_with_ops (RSHIFT_EXPR,
1468 make_ssa_name (uintptr_type, NULL),
1469 base_addr, t);
1470 gimple_set_location (g, location);
1471 gsi_insert_after (gsi, g, GSI_NEW_STMT);
1472
1473 t = build_int_cst (uintptr_type, targetm.asan_shadow_offset ());
1474 g = gimple_build_assign_with_ops (PLUS_EXPR,
1475 make_ssa_name (uintptr_type, NULL),
1476 gimple_assign_lhs (g), t);
1477 gimple_set_location (g, location);
1478 gsi_insert_after (gsi, g, GSI_NEW_STMT);
1479
1480 g = gimple_build_assign_with_ops (NOP_EXPR,
1481 make_ssa_name (shadow_ptr_type, NULL),
1482 gimple_assign_lhs (g), NULL_TREE);
1483 gimple_set_location (g, location);
1484 gsi_insert_after (gsi, g, GSI_NEW_STMT);
1485
1486 t = build2 (MEM_REF, shadow_type, gimple_assign_lhs (g),
1487 build_int_cst (shadow_ptr_type, 0));
1488 g = gimple_build_assign_with_ops (MEM_REF,
1489 make_ssa_name (shadow_type, NULL),
1490 t, NULL_TREE);
1491 gimple_set_location (g, location);
1492 gsi_insert_after (gsi, g, GSI_NEW_STMT);
1493 return gimple_assign_lhs (g);
1494 }
1495
1496 /* Instrument the memory access instruction BASE. Insert new
1497 statements before or after ITER.
1498
1499 Note that the memory access represented by BASE can be either an
1500 SSA_NAME, or a non-SSA expression. LOCATION is the source code
1501 location. IS_STORE is TRUE for a store, FALSE for a load.
1502 BEFORE_P is TRUE for inserting the instrumentation code before
1503 ITER, FALSE for inserting it after ITER.
1504
1505 If BEFORE_P is TRUE, *ITER is arranged to still point to the
1506 statement it was pointing to prior to calling this function,
1507 otherwise, it points to the statement logically following it. */
1508
1509 static void
1510 build_check_stmt (location_t location, tree base, gimple_stmt_iterator *iter,
1511 bool before_p, bool is_store, HOST_WIDE_INT size_in_bytes)
1512 {
1513 gimple_stmt_iterator gsi;
1514 basic_block then_bb, else_bb;
1515 tree t, base_addr, shadow;
1516 gimple g;
1517 tree shadow_ptr_type = shadow_ptr_types[size_in_bytes == 16 ? 1 : 0];
1518 tree shadow_type = TREE_TYPE (shadow_ptr_type);
1519 tree uintptr_type
1520 = build_nonstandard_integer_type (TYPE_PRECISION (TREE_TYPE (base)), 1);
1521 tree base_ssa = base;
1522 HOST_WIDE_INT real_size_in_bytes = size_in_bytes;
1523 tree sz_arg = NULL_TREE;
1524
1525 if ((size_in_bytes & (size_in_bytes - 1)) != 0
1526 || size_in_bytes > 16)
1527 real_size_in_bytes = 1;
1528
1529 /* Get an iterator on the point where we can add the condition
1530 statement for the instrumentation. */
1531 gsi = create_cond_insert_point (iter, before_p,
1532 /*then_more_likely_p=*/false,
1533 /*create_then_fallthru_edge=*/false,
1534 &then_bb,
1535 &else_bb);
1536
1537 base = unshare_expr (base);
1538
1539 /* BASE can already be an SSA_NAME; in that case, do not create a
1540 new SSA_NAME for it. */
1541 if (TREE_CODE (base) != SSA_NAME)
1542 {
1543 g = gimple_build_assign_with_ops (TREE_CODE (base),
1544 make_ssa_name (TREE_TYPE (base), NULL),
1545 base, NULL_TREE);
1546 gimple_set_location (g, location);
1547 gsi_insert_after (&gsi, g, GSI_NEW_STMT);
1548 base_ssa = gimple_assign_lhs (g);
1549 }
1550
1551 g = gimple_build_assign_with_ops (NOP_EXPR,
1552 make_ssa_name (uintptr_type, NULL),
1553 base_ssa, NULL_TREE);
1554 gimple_set_location (g, location);
1555 gsi_insert_after (&gsi, g, GSI_NEW_STMT);
1556 base_addr = gimple_assign_lhs (g);
1557
1558 /* Build
1559 (base_addr >> ASAN_SHADOW_SHIFT) + targetm.asan_shadow_offset (). */
1560 shadow = build_shadow_mem_access (&gsi, location, base_addr,
1561 shadow_ptr_type);
1562
1563 if (real_size_in_bytes < 8)
1564 {
1565 /* Slow path for 1, 2 and 4 byte accesses.
1566 Test (shadow != 0)
1567 & ((base_addr & 7) + (real_size_in_bytes - 1)) >= shadow). */
1568 gimple_seq seq = NULL;
1569 gimple shadow_test = build_assign (NE_EXPR, shadow, 0);
1570 gimple_seq_add_stmt (&seq, shadow_test);
1571 gimple_seq_add_stmt (&seq, build_assign (BIT_AND_EXPR, base_addr, 7));
1572 gimple_seq_add_stmt (&seq, build_type_cast (shadow_type,
1573 gimple_seq_last (seq)));
1574 if (real_size_in_bytes > 1)
1575 gimple_seq_add_stmt (&seq,
1576 build_assign (PLUS_EXPR, gimple_seq_last (seq),
1577 real_size_in_bytes - 1));
1578 gimple_seq_add_stmt (&seq, build_assign (GE_EXPR, gimple_seq_last (seq),
1579 shadow));
1580 gimple_seq_add_stmt (&seq, build_assign (BIT_AND_EXPR, shadow_test,
1581 gimple_seq_last (seq)));
1582 t = gimple_assign_lhs (gimple_seq_last (seq));
1583 gimple_seq_set_location (seq, location);
1584 gsi_insert_seq_after (&gsi, seq, GSI_CONTINUE_LINKING);
1585 /* For weird access sizes, check first and last byte. */
1586 if (real_size_in_bytes != size_in_bytes)
1587 {
1588 g = gimple_build_assign_with_ops (PLUS_EXPR,
1589 make_ssa_name (uintptr_type, NULL),
1590 base_addr,
1591 build_int_cst (uintptr_type,
1592 size_in_bytes - 1));
1593 gimple_set_location (g, location);
1594 gsi_insert_after (&gsi, g, GSI_NEW_STMT);
1595 tree base_end_addr = gimple_assign_lhs (g);
1596
1597 shadow = build_shadow_mem_access (&gsi, location, base_end_addr,
1598 shadow_ptr_type);
1599 seq = NULL;
1600 shadow_test = build_assign (NE_EXPR, shadow, 0);
1601 gimple_seq_add_stmt (&seq, shadow_test);
1602 gimple_seq_add_stmt (&seq, build_assign (BIT_AND_EXPR,
1603 base_end_addr, 7));
1604 gimple_seq_add_stmt (&seq, build_type_cast (shadow_type,
1605 gimple_seq_last (seq)));
1606 gimple_seq_add_stmt (&seq, build_assign (GE_EXPR,
1607 gimple_seq_last (seq),
1608 shadow));
1609 gimple_seq_add_stmt (&seq, build_assign (BIT_AND_EXPR, shadow_test,
1610 gimple_seq_last (seq)));
1611 gimple_seq_add_stmt (&seq, build_assign (BIT_IOR_EXPR, t,
1612 gimple_seq_last (seq)));
1613 t = gimple_assign_lhs (gimple_seq_last (seq));
1614 gimple_seq_set_location (seq, location);
1615 gsi_insert_seq_after (&gsi, seq, GSI_CONTINUE_LINKING);
1616 sz_arg = build_int_cst (pointer_sized_int_node, size_in_bytes);
1617 }
1618 }
1619 else
1620 t = shadow;
1621
1622 g = gimple_build_cond (NE_EXPR, t, build_int_cst (TREE_TYPE (t), 0),
1623 NULL_TREE, NULL_TREE);
1624 gimple_set_location (g, location);
1625 gsi_insert_after (&gsi, g, GSI_NEW_STMT);
1626
1627 /* Generate call to the run-time library (e.g. __asan_report_load8). */
1628 gsi = gsi_start_bb (then_bb);
1629 g = gimple_build_call (report_error_func (is_store, size_in_bytes),
1630 sz_arg ? 2 : 1, base_addr, sz_arg);
1631 gimple_set_location (g, location);
1632 gsi_insert_after (&gsi, g, GSI_NEW_STMT);
1633
1634 *iter = gsi_start_bb (else_bb);
1635 }
1636
1637 /* If T represents a memory access, add instrumentation code before ITER.
1638 LOCATION is source code location.
1639 IS_STORE is either TRUE (for a store) or FALSE (for a load). */
1640
1641 static void
1642 instrument_derefs (gimple_stmt_iterator *iter, tree t,
1643 location_t location, bool is_store)
1644 {
1645 if (is_store && !ASAN_INSTRUMENT_WRITES)
1646 return;
1647 if (!is_store && !ASAN_INSTRUMENT_READS)
1648 return;
1649
1650 tree type, base;
1651 HOST_WIDE_INT size_in_bytes;
1652
1653 type = TREE_TYPE (t);
1654 switch (TREE_CODE (t))
1655 {
1656 case ARRAY_REF:
1657 case COMPONENT_REF:
1658 case INDIRECT_REF:
1659 case MEM_REF:
1660 case VAR_DECL:
1661 break;
1662 /* FALLTHRU */
1663 default:
1664 return;
1665 }
1666
1667 size_in_bytes = int_size_in_bytes (type);
1668 if (size_in_bytes <= 0)
1669 return;
1670
1671 HOST_WIDE_INT bitsize, bitpos;
1672 tree offset;
1673 enum machine_mode mode;
1674 int volatilep = 0, unsignedp = 0;
1675 tree inner = get_inner_reference (t, &bitsize, &bitpos, &offset,
1676 &mode, &unsignedp, &volatilep, false);
1677 if (((size_in_bytes & (size_in_bytes - 1)) == 0
1678 && (bitpos % (size_in_bytes * BITS_PER_UNIT)))
1679 || bitsize != size_in_bytes * BITS_PER_UNIT)
1680 {
1681 if (TREE_CODE (t) == COMPONENT_REF
1682 && DECL_BIT_FIELD_REPRESENTATIVE (TREE_OPERAND (t, 1)) != NULL_TREE)
1683 {
1684 tree repr = DECL_BIT_FIELD_REPRESENTATIVE (TREE_OPERAND (t, 1));
1685 instrument_derefs (iter, build3 (COMPONENT_REF, TREE_TYPE (repr),
1686 TREE_OPERAND (t, 0), repr,
1687 NULL_TREE), location, is_store);
1688 }
1689 return;
1690 }
1691 if (bitpos % BITS_PER_UNIT)
1692 return;
1693
1694 if (TREE_CODE (inner) == VAR_DECL
1695 && offset == NULL_TREE
1696 && bitpos >= 0
1697 && DECL_SIZE (inner)
1698 && tree_fits_shwi_p (DECL_SIZE (inner))
1699 && bitpos + bitsize <= tree_to_shwi (DECL_SIZE (inner)))
1700 {
1701 if (DECL_THREAD_LOCAL_P (inner))
1702 return;
1703 if (!TREE_STATIC (inner))
1704 {
1705 /* Automatic vars in the current function will be always
1706 accessible. */
1707 if (decl_function_context (inner) == current_function_decl)
1708 return;
1709 }
1710 /* Always instrument external vars, they might be dynamically
1711 initialized. */
1712 else if (!DECL_EXTERNAL (inner))
1713 {
1714 /* For static vars if they are known not to be dynamically
1715 initialized, they will be always accessible. */
1716 varpool_node *vnode = varpool_get_node (inner);
1717 if (vnode && !vnode->dynamically_initialized)
1718 return;
1719 }
1720 }
1721
1722 base = build_fold_addr_expr (t);
1723 if (!has_mem_ref_been_instrumented (base, size_in_bytes))
1724 {
1725 build_check_stmt (location, base, iter, /*before_p=*/true,
1726 is_store, size_in_bytes);
1727 update_mem_ref_hash_table (base, size_in_bytes);
1728 update_mem_ref_hash_table (t, size_in_bytes);
1729 }
1730
1731 }
1732
1733 /* Instrument an access to a contiguous memory region that starts at
1734 the address pointed to by BASE, over a length of LEN (expressed in
1735 the sizeof (*BASE) bytes). ITER points to the instruction before
1736 which the instrumentation instructions must be inserted. LOCATION
1737 is the source location that the instrumentation instructions must
1738 have. If IS_STORE is true, then the memory access is a store;
1739 otherwise, it's a load. */
1740
1741 static void
1742 instrument_mem_region_access (tree base, tree len,
1743 gimple_stmt_iterator *iter,
1744 location_t location, bool is_store)
1745 {
1746 if (!POINTER_TYPE_P (TREE_TYPE (base))
1747 || !INTEGRAL_TYPE_P (TREE_TYPE (len))
1748 || integer_zerop (len))
1749 return;
1750
1751 gimple_stmt_iterator gsi = *iter;
1752
1753 basic_block fallthrough_bb = NULL, then_bb = NULL;
1754
1755 /* If the beginning of the memory region has already been
1756 instrumented, do not instrument it. */
1757 bool start_instrumented = has_mem_ref_been_instrumented (base, 1);
1758
1759 /* If the end of the memory region has already been instrumented, do
1760 not instrument it. */
1761 tree end = asan_mem_ref_get_end (base, len);
1762 bool end_instrumented = has_mem_ref_been_instrumented (end, 1);
1763
1764 if (start_instrumented && end_instrumented)
1765 return;
1766
1767 if (!is_gimple_constant (len))
1768 {
1769 /* So, the length of the memory area to asan-protect is
1770 non-constant. Let's guard the generated instrumentation code
1771 like:
1772
1773 if (len != 0)
1774 {
1775 //asan instrumentation code goes here.
1776 }
1777 // falltrough instructions, starting with *ITER. */
1778
1779 gimple g = gimple_build_cond (NE_EXPR,
1780 len,
1781 build_int_cst (TREE_TYPE (len), 0),
1782 NULL_TREE, NULL_TREE);
1783 gimple_set_location (g, location);
1784 insert_if_then_before_iter (g, iter, /*then_more_likely_p=*/true,
1785 &then_bb, &fallthrough_bb);
1786 /* Note that fallthrough_bb starts with the statement that was
1787 pointed to by ITER. */
1788
1789 /* The 'then block' of the 'if (len != 0) condition is where
1790 we'll generate the asan instrumentation code now. */
1791 gsi = gsi_last_bb (then_bb);
1792 }
1793
1794 if (!start_instrumented)
1795 {
1796 /* Instrument the beginning of the memory region to be accessed,
1797 and arrange for the rest of the intrumentation code to be
1798 inserted in the then block *after* the current gsi. */
1799 build_check_stmt (location, base, &gsi, /*before_p=*/true, is_store, 1);
1800
1801 if (then_bb)
1802 /* We are in the case where the length of the region is not
1803 constant; so instrumentation code is being generated in the
1804 'then block' of the 'if (len != 0) condition. Let's arrange
1805 for the subsequent instrumentation statements to go in the
1806 'then block'. */
1807 gsi = gsi_last_bb (then_bb);
1808 else
1809 {
1810 *iter = gsi;
1811 /* Don't remember this access as instrumented, if length
1812 is unknown. It might be zero and not being actually
1813 instrumented, so we can't rely on it being instrumented. */
1814 update_mem_ref_hash_table (base, 1);
1815 }
1816 }
1817
1818 if (end_instrumented)
1819 return;
1820
1821 /* We want to instrument the access at the end of the memory region,
1822 which is at (base + len - 1). */
1823
1824 /* offset = len - 1; */
1825 len = unshare_expr (len);
1826 tree offset;
1827 gimple_seq seq = NULL;
1828 if (TREE_CODE (len) == INTEGER_CST)
1829 offset = fold_build2 (MINUS_EXPR, size_type_node,
1830 fold_convert (size_type_node, len),
1831 build_int_cst (size_type_node, 1));
1832 else
1833 {
1834 gimple g;
1835 tree t;
1836
1837 if (TREE_CODE (len) != SSA_NAME)
1838 {
1839 t = make_ssa_name (TREE_TYPE (len), NULL);
1840 g = gimple_build_assign_with_ops (TREE_CODE (len), t, len, NULL);
1841 gimple_set_location (g, location);
1842 gimple_seq_add_stmt_without_update (&seq, g);
1843 len = t;
1844 }
1845 if (!useless_type_conversion_p (size_type_node, TREE_TYPE (len)))
1846 {
1847 t = make_ssa_name (size_type_node, NULL);
1848 g = gimple_build_assign_with_ops (NOP_EXPR, t, len, NULL);
1849 gimple_set_location (g, location);
1850 gimple_seq_add_stmt_without_update (&seq, g);
1851 len = t;
1852 }
1853
1854 t = make_ssa_name (size_type_node, NULL);
1855 g = gimple_build_assign_with_ops (MINUS_EXPR, t, len,
1856 build_int_cst (size_type_node, 1));
1857 gimple_set_location (g, location);
1858 gimple_seq_add_stmt_without_update (&seq, g);
1859 offset = gimple_assign_lhs (g);
1860 }
1861
1862 /* _1 = base; */
1863 base = unshare_expr (base);
1864 gimple region_end =
1865 gimple_build_assign_with_ops (TREE_CODE (base),
1866 make_ssa_name (TREE_TYPE (base), NULL),
1867 base, NULL);
1868 gimple_set_location (region_end, location);
1869 gimple_seq_add_stmt_without_update (&seq, region_end);
1870
1871 /* _2 = _1 + offset; */
1872 region_end =
1873 gimple_build_assign_with_ops (POINTER_PLUS_EXPR,
1874 make_ssa_name (TREE_TYPE (base), NULL),
1875 gimple_assign_lhs (region_end),
1876 offset);
1877 gimple_set_location (region_end, location);
1878 gimple_seq_add_stmt_without_update (&seq, region_end);
1879 gsi_insert_seq_before (&gsi, seq, GSI_SAME_STMT);
1880
1881 /* instrument access at _2; */
1882 gsi = gsi_for_stmt (region_end);
1883 build_check_stmt (location, gimple_assign_lhs (region_end),
1884 &gsi, /*before_p=*/false, is_store, 1);
1885
1886 if (then_bb == NULL)
1887 update_mem_ref_hash_table (end, 1);
1888
1889 *iter = gsi_for_stmt (gsi_stmt (*iter));
1890 }
1891
1892 /* Instrument the call (to the builtin strlen function) pointed to by
1893 ITER.
1894
1895 This function instruments the access to the first byte of the
1896 argument, right before the call. After the call it instruments the
1897 access to the last byte of the argument; it uses the result of the
1898 call to deduce the offset of that last byte.
1899
1900 Upon completion, iff the call has actually been instrumented, this
1901 function returns TRUE and *ITER points to the statement logically
1902 following the built-in strlen function call *ITER was initially
1903 pointing to. Otherwise, the function returns FALSE and *ITER
1904 remains unchanged. */
1905
1906 static bool
1907 instrument_strlen_call (gimple_stmt_iterator *iter)
1908 {
1909 gimple call = gsi_stmt (*iter);
1910 gcc_assert (is_gimple_call (call));
1911
1912 tree callee = gimple_call_fndecl (call);
1913 gcc_assert (is_builtin_fn (callee)
1914 && DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL
1915 && DECL_FUNCTION_CODE (callee) == BUILT_IN_STRLEN);
1916
1917 tree len = gimple_call_lhs (call);
1918 if (len == NULL)
1919 /* Some passes might clear the return value of the strlen call;
1920 bail out in that case. Return FALSE as we are not advancing
1921 *ITER. */
1922 return false;
1923 gcc_assert (INTEGRAL_TYPE_P (TREE_TYPE (len)));
1924
1925 location_t loc = gimple_location (call);
1926 tree str_arg = gimple_call_arg (call, 0);
1927
1928 /* Instrument the access to the first byte of str_arg. i.e:
1929
1930 _1 = str_arg; instrument (_1); */
1931 tree cptr_type = build_pointer_type (char_type_node);
1932 gimple str_arg_ssa =
1933 gimple_build_assign_with_ops (NOP_EXPR,
1934 make_ssa_name (cptr_type, NULL),
1935 str_arg, NULL);
1936 gimple_set_location (str_arg_ssa, loc);
1937 gimple_stmt_iterator gsi = *iter;
1938 gsi_insert_before (&gsi, str_arg_ssa, GSI_NEW_STMT);
1939 build_check_stmt (loc, gimple_assign_lhs (str_arg_ssa), &gsi,
1940 /*before_p=*/false, /*is_store=*/false, 1);
1941
1942 /* If we initially had an instruction like:
1943
1944 int n = strlen (str)
1945
1946 we now want to instrument the access to str[n], after the
1947 instruction above.*/
1948
1949 /* So let's build the access to str[n] that is, access through the
1950 pointer_plus expr: (_1 + len). */
1951 gimple stmt =
1952 gimple_build_assign_with_ops (POINTER_PLUS_EXPR,
1953 make_ssa_name (cptr_type, NULL),
1954 gimple_assign_lhs (str_arg_ssa),
1955 len);
1956 gimple_set_location (stmt, loc);
1957 gsi_insert_after (&gsi, stmt, GSI_NEW_STMT);
1958
1959 build_check_stmt (loc, gimple_assign_lhs (stmt), &gsi,
1960 /*before_p=*/false, /*is_store=*/false, 1);
1961
1962 /* Ensure that iter points to the statement logically following the
1963 one it was initially pointing to. */
1964 *iter = gsi;
1965 /* As *ITER has been advanced to point to the next statement, let's
1966 return true to inform transform_statements that it shouldn't
1967 advance *ITER anymore; otherwises it will skip that next
1968 statement, which wouldn't be instrumented. */
1969 return true;
1970 }
1971
1972 /* Instrument the call to a built-in memory access function that is
1973 pointed to by the iterator ITER.
1974
1975 Upon completion, return TRUE iff *ITER has been advanced to the
1976 statement following the one it was originally pointing to. */
1977
1978 static bool
1979 instrument_builtin_call (gimple_stmt_iterator *iter)
1980 {
1981 if (!ASAN_MEMINTRIN)
1982 return false;
1983
1984 bool iter_advanced_p = false;
1985 gimple call = gsi_stmt (*iter);
1986
1987 gcc_checking_assert (gimple_call_builtin_p (call, BUILT_IN_NORMAL));
1988
1989 tree callee = gimple_call_fndecl (call);
1990 location_t loc = gimple_location (call);
1991
1992 if (DECL_FUNCTION_CODE (callee) == BUILT_IN_STRLEN)
1993 iter_advanced_p = instrument_strlen_call (iter);
1994 else
1995 {
1996 asan_mem_ref src0, src1, dest;
1997 asan_mem_ref_init (&src0, NULL, 1);
1998 asan_mem_ref_init (&src1, NULL, 1);
1999 asan_mem_ref_init (&dest, NULL, 1);
2000
2001 tree src0_len = NULL_TREE, src1_len = NULL_TREE, dest_len = NULL_TREE;
2002 bool src0_is_store = false, src1_is_store = false,
2003 dest_is_store = false, dest_is_deref = false;
2004
2005 if (get_mem_refs_of_builtin_call (call,
2006 &src0, &src0_len, &src0_is_store,
2007 &src1, &src1_len, &src1_is_store,
2008 &dest, &dest_len, &dest_is_store,
2009 &dest_is_deref))
2010 {
2011 if (dest_is_deref)
2012 {
2013 instrument_derefs (iter, dest.start, loc, dest_is_store);
2014 gsi_next (iter);
2015 iter_advanced_p = true;
2016 }
2017 else if (src0_len || src1_len || dest_len)
2018 {
2019 if (src0.start != NULL_TREE)
2020 instrument_mem_region_access (src0.start, src0_len,
2021 iter, loc, /*is_store=*/false);
2022 if (src1.start != NULL_TREE)
2023 instrument_mem_region_access (src1.start, src1_len,
2024 iter, loc, /*is_store=*/false);
2025 if (dest.start != NULL_TREE)
2026 instrument_mem_region_access (dest.start, dest_len,
2027 iter, loc, /*is_store=*/true);
2028 *iter = gsi_for_stmt (call);
2029 gsi_next (iter);
2030 iter_advanced_p = true;
2031 }
2032 }
2033 }
2034 return iter_advanced_p;
2035 }
2036
2037 /* Instrument the assignment statement ITER if it is subject to
2038 instrumentation. Return TRUE iff instrumentation actually
2039 happened. In that case, the iterator ITER is advanced to the next
2040 logical expression following the one initially pointed to by ITER,
2041 and the relevant memory reference that which access has been
2042 instrumented is added to the memory references hash table. */
2043
2044 static bool
2045 maybe_instrument_assignment (gimple_stmt_iterator *iter)
2046 {
2047 gimple s = gsi_stmt (*iter);
2048
2049 gcc_assert (gimple_assign_single_p (s));
2050
2051 tree ref_expr = NULL_TREE;
2052 bool is_store, is_instrumented = false;
2053
2054 if (gimple_store_p (s))
2055 {
2056 ref_expr = gimple_assign_lhs (s);
2057 is_store = true;
2058 instrument_derefs (iter, ref_expr,
2059 gimple_location (s),
2060 is_store);
2061 is_instrumented = true;
2062 }
2063
2064 if (gimple_assign_load_p (s))
2065 {
2066 ref_expr = gimple_assign_rhs1 (s);
2067 is_store = false;
2068 instrument_derefs (iter, ref_expr,
2069 gimple_location (s),
2070 is_store);
2071 is_instrumented = true;
2072 }
2073
2074 if (is_instrumented)
2075 gsi_next (iter);
2076
2077 return is_instrumented;
2078 }
2079
2080 /* Instrument the function call pointed to by the iterator ITER, if it
2081 is subject to instrumentation. At the moment, the only function
2082 calls that are instrumented are some built-in functions that access
2083 memory. Look at instrument_builtin_call to learn more.
2084
2085 Upon completion return TRUE iff *ITER was advanced to the statement
2086 following the one it was originally pointing to. */
2087
2088 static bool
2089 maybe_instrument_call (gimple_stmt_iterator *iter)
2090 {
2091 gimple stmt = gsi_stmt (*iter);
2092 bool is_builtin = gimple_call_builtin_p (stmt, BUILT_IN_NORMAL);
2093
2094 if (is_builtin && instrument_builtin_call (iter))
2095 return true;
2096
2097 if (gimple_call_noreturn_p (stmt))
2098 {
2099 if (is_builtin)
2100 {
2101 tree callee = gimple_call_fndecl (stmt);
2102 switch (DECL_FUNCTION_CODE (callee))
2103 {
2104 case BUILT_IN_UNREACHABLE:
2105 case BUILT_IN_TRAP:
2106 /* Don't instrument these. */
2107 return false;
2108 }
2109 }
2110 tree decl = builtin_decl_implicit (BUILT_IN_ASAN_HANDLE_NO_RETURN);
2111 gimple g = gimple_build_call (decl, 0);
2112 gimple_set_location (g, gimple_location (stmt));
2113 gsi_insert_before (iter, g, GSI_SAME_STMT);
2114 }
2115 return false;
2116 }
2117
2118 /* Walk each instruction of all basic block and instrument those that
2119 represent memory references: loads, stores, or function calls.
2120 In a given basic block, this function avoids instrumenting memory
2121 references that have already been instrumented. */
2122
2123 static void
2124 transform_statements (void)
2125 {
2126 basic_block bb, last_bb = NULL;
2127 gimple_stmt_iterator i;
2128 int saved_last_basic_block = last_basic_block_for_fn (cfun);
2129
2130 FOR_EACH_BB_FN (bb, cfun)
2131 {
2132 basic_block prev_bb = bb;
2133
2134 if (bb->index >= saved_last_basic_block) continue;
2135
2136 /* Flush the mem ref hash table, if current bb doesn't have
2137 exactly one predecessor, or if that predecessor (skipping
2138 over asan created basic blocks) isn't the last processed
2139 basic block. Thus we effectively flush on extended basic
2140 block boundaries. */
2141 while (single_pred_p (prev_bb))
2142 {
2143 prev_bb = single_pred (prev_bb);
2144 if (prev_bb->index < saved_last_basic_block)
2145 break;
2146 }
2147 if (prev_bb != last_bb)
2148 empty_mem_ref_hash_table ();
2149 last_bb = bb;
2150
2151 for (i = gsi_start_bb (bb); !gsi_end_p (i);)
2152 {
2153 gimple s = gsi_stmt (i);
2154
2155 if (has_stmt_been_instrumented_p (s))
2156 gsi_next (&i);
2157 else if (gimple_assign_single_p (s)
2158 && maybe_instrument_assignment (&i))
2159 /* Nothing to do as maybe_instrument_assignment advanced
2160 the iterator I. */;
2161 else if (is_gimple_call (s) && maybe_instrument_call (&i))
2162 /* Nothing to do as maybe_instrument_call
2163 advanced the iterator I. */;
2164 else
2165 {
2166 /* No instrumentation happened.
2167
2168 If the current instruction is a function call that
2169 might free something, let's forget about the memory
2170 references that got instrumented. Otherwise we might
2171 miss some instrumentation opportunities. */
2172 if (is_gimple_call (s) && !nonfreeing_call_p (s))
2173 empty_mem_ref_hash_table ();
2174
2175 gsi_next (&i);
2176 }
2177 }
2178 }
2179 free_mem_ref_resources ();
2180 }
2181
2182 /* Build
2183 __asan_before_dynamic_init (module_name)
2184 or
2185 __asan_after_dynamic_init ()
2186 call. */
2187
2188 tree
2189 asan_dynamic_init_call (bool after_p)
2190 {
2191 tree fn = builtin_decl_implicit (after_p
2192 ? BUILT_IN_ASAN_AFTER_DYNAMIC_INIT
2193 : BUILT_IN_ASAN_BEFORE_DYNAMIC_INIT);
2194 tree module_name_cst = NULL_TREE;
2195 if (!after_p)
2196 {
2197 pretty_printer module_name_pp;
2198 pp_string (&module_name_pp, main_input_filename);
2199
2200 if (shadow_ptr_types[0] == NULL_TREE)
2201 asan_init_shadow_ptr_types ();
2202 module_name_cst = asan_pp_string (&module_name_pp);
2203 module_name_cst = fold_convert (const_ptr_type_node,
2204 module_name_cst);
2205 }
2206
2207 return build_call_expr (fn, after_p ? 0 : 1, module_name_cst);
2208 }
2209
2210 /* Build
2211 struct __asan_global
2212 {
2213 const void *__beg;
2214 uptr __size;
2215 uptr __size_with_redzone;
2216 const void *__name;
2217 const void *__module_name;
2218 uptr __has_dynamic_init;
2219 } type. */
2220
2221 static tree
2222 asan_global_struct (void)
2223 {
2224 static const char *field_names[6]
2225 = { "__beg", "__size", "__size_with_redzone",
2226 "__name", "__module_name", "__has_dynamic_init" };
2227 tree fields[6], ret;
2228 int i;
2229
2230 ret = make_node (RECORD_TYPE);
2231 for (i = 0; i < 6; i++)
2232 {
2233 fields[i]
2234 = build_decl (UNKNOWN_LOCATION, FIELD_DECL,
2235 get_identifier (field_names[i]),
2236 (i == 0 || i == 3) ? const_ptr_type_node
2237 : pointer_sized_int_node);
2238 DECL_CONTEXT (fields[i]) = ret;
2239 if (i)
2240 DECL_CHAIN (fields[i - 1]) = fields[i];
2241 }
2242 TYPE_FIELDS (ret) = fields[0];
2243 TYPE_NAME (ret) = get_identifier ("__asan_global");
2244 layout_type (ret);
2245 return ret;
2246 }
2247
2248 /* Append description of a single global DECL into vector V.
2249 TYPE is __asan_global struct type as returned by asan_global_struct. */
2250
2251 static void
2252 asan_add_global (tree decl, tree type, vec<constructor_elt, va_gc> *v)
2253 {
2254 tree init, uptr = TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (type)));
2255 unsigned HOST_WIDE_INT size;
2256 tree str_cst, module_name_cst, refdecl = decl;
2257 vec<constructor_elt, va_gc> *vinner = NULL;
2258
2259 pretty_printer asan_pp, module_name_pp;
2260
2261 if (DECL_NAME (decl))
2262 pp_tree_identifier (&asan_pp, DECL_NAME (decl));
2263 else
2264 pp_string (&asan_pp, "<unknown>");
2265 str_cst = asan_pp_string (&asan_pp);
2266
2267 pp_string (&module_name_pp, main_input_filename);
2268 module_name_cst = asan_pp_string (&module_name_pp);
2269
2270 if (asan_needs_local_alias (decl))
2271 {
2272 char buf[20];
2273 ASM_GENERATE_INTERNAL_LABEL (buf, "LASAN", vec_safe_length (v) + 1);
2274 refdecl = build_decl (DECL_SOURCE_LOCATION (decl),
2275 VAR_DECL, get_identifier (buf), TREE_TYPE (decl));
2276 TREE_ADDRESSABLE (refdecl) = TREE_ADDRESSABLE (decl);
2277 TREE_READONLY (refdecl) = TREE_READONLY (decl);
2278 TREE_THIS_VOLATILE (refdecl) = TREE_THIS_VOLATILE (decl);
2279 DECL_GIMPLE_REG_P (refdecl) = DECL_GIMPLE_REG_P (decl);
2280 DECL_ARTIFICIAL (refdecl) = DECL_ARTIFICIAL (decl);
2281 DECL_IGNORED_P (refdecl) = DECL_IGNORED_P (decl);
2282 TREE_STATIC (refdecl) = 1;
2283 TREE_PUBLIC (refdecl) = 0;
2284 TREE_USED (refdecl) = 1;
2285 assemble_alias (refdecl, DECL_ASSEMBLER_NAME (decl));
2286 }
2287
2288 CONSTRUCTOR_APPEND_ELT (vinner, NULL_TREE,
2289 fold_convert (const_ptr_type_node,
2290 build_fold_addr_expr (refdecl)));
2291 size = tree_to_uhwi (DECL_SIZE_UNIT (decl));
2292 CONSTRUCTOR_APPEND_ELT (vinner, NULL_TREE, build_int_cst (uptr, size));
2293 size += asan_red_zone_size (size);
2294 CONSTRUCTOR_APPEND_ELT (vinner, NULL_TREE, build_int_cst (uptr, size));
2295 CONSTRUCTOR_APPEND_ELT (vinner, NULL_TREE,
2296 fold_convert (const_ptr_type_node, str_cst));
2297 CONSTRUCTOR_APPEND_ELT (vinner, NULL_TREE,
2298 fold_convert (const_ptr_type_node, module_name_cst));
2299 varpool_node *vnode = varpool_get_node (decl);
2300 int has_dynamic_init = vnode ? vnode->dynamically_initialized : 0;
2301 CONSTRUCTOR_APPEND_ELT (vinner, NULL_TREE,
2302 build_int_cst (uptr, has_dynamic_init));
2303 init = build_constructor (type, vinner);
2304 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, init);
2305 }
2306
2307 /* Initialize sanitizer.def builtins if the FE hasn't initialized them. */
2308 void
2309 initialize_sanitizer_builtins (void)
2310 {
2311 tree decl;
2312
2313 if (builtin_decl_implicit_p (BUILT_IN_ASAN_INIT))
2314 return;
2315
2316 tree BT_FN_VOID = build_function_type_list (void_type_node, NULL_TREE);
2317 tree BT_FN_VOID_PTR
2318 = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
2319 tree BT_FN_VOID_CONST_PTR
2320 = build_function_type_list (void_type_node, const_ptr_type_node, NULL_TREE);
2321 tree BT_FN_VOID_PTR_PTR
2322 = build_function_type_list (void_type_node, ptr_type_node,
2323 ptr_type_node, NULL_TREE);
2324 tree BT_FN_VOID_PTR_PTR_PTR
2325 = build_function_type_list (void_type_node, ptr_type_node,
2326 ptr_type_node, ptr_type_node, NULL_TREE);
2327 tree BT_FN_VOID_PTR_PTRMODE
2328 = build_function_type_list (void_type_node, ptr_type_node,
2329 pointer_sized_int_node, NULL_TREE);
2330 tree BT_FN_VOID_INT
2331 = build_function_type_list (void_type_node, integer_type_node, NULL_TREE);
2332 tree BT_FN_BOOL_VPTR_PTR_IX_INT_INT[5];
2333 tree BT_FN_IX_CONST_VPTR_INT[5];
2334 tree BT_FN_IX_VPTR_IX_INT[5];
2335 tree BT_FN_VOID_VPTR_IX_INT[5];
2336 tree vptr
2337 = build_pointer_type (build_qualified_type (void_type_node,
2338 TYPE_QUAL_VOLATILE));
2339 tree cvptr
2340 = build_pointer_type (build_qualified_type (void_type_node,
2341 TYPE_QUAL_VOLATILE
2342 |TYPE_QUAL_CONST));
2343 tree boolt
2344 = lang_hooks.types.type_for_size (BOOL_TYPE_SIZE, 1);
2345 int i;
2346 for (i = 0; i < 5; i++)
2347 {
2348 tree ix = build_nonstandard_integer_type (BITS_PER_UNIT * (1 << i), 1);
2349 BT_FN_BOOL_VPTR_PTR_IX_INT_INT[i]
2350 = build_function_type_list (boolt, vptr, ptr_type_node, ix,
2351 integer_type_node, integer_type_node,
2352 NULL_TREE);
2353 BT_FN_IX_CONST_VPTR_INT[i]
2354 = build_function_type_list (ix, cvptr, integer_type_node, NULL_TREE);
2355 BT_FN_IX_VPTR_IX_INT[i]
2356 = build_function_type_list (ix, vptr, ix, integer_type_node,
2357 NULL_TREE);
2358 BT_FN_VOID_VPTR_IX_INT[i]
2359 = build_function_type_list (void_type_node, vptr, ix,
2360 integer_type_node, NULL_TREE);
2361 }
2362 #define BT_FN_BOOL_VPTR_PTR_I1_INT_INT BT_FN_BOOL_VPTR_PTR_IX_INT_INT[0]
2363 #define BT_FN_I1_CONST_VPTR_INT BT_FN_IX_CONST_VPTR_INT[0]
2364 #define BT_FN_I1_VPTR_I1_INT BT_FN_IX_VPTR_IX_INT[0]
2365 #define BT_FN_VOID_VPTR_I1_INT BT_FN_VOID_VPTR_IX_INT[0]
2366 #define BT_FN_BOOL_VPTR_PTR_I2_INT_INT BT_FN_BOOL_VPTR_PTR_IX_INT_INT[1]
2367 #define BT_FN_I2_CONST_VPTR_INT BT_FN_IX_CONST_VPTR_INT[1]
2368 #define BT_FN_I2_VPTR_I2_INT BT_FN_IX_VPTR_IX_INT[1]
2369 #define BT_FN_VOID_VPTR_I2_INT BT_FN_VOID_VPTR_IX_INT[1]
2370 #define BT_FN_BOOL_VPTR_PTR_I4_INT_INT BT_FN_BOOL_VPTR_PTR_IX_INT_INT[2]
2371 #define BT_FN_I4_CONST_VPTR_INT BT_FN_IX_CONST_VPTR_INT[2]
2372 #define BT_FN_I4_VPTR_I4_INT BT_FN_IX_VPTR_IX_INT[2]
2373 #define BT_FN_VOID_VPTR_I4_INT BT_FN_VOID_VPTR_IX_INT[2]
2374 #define BT_FN_BOOL_VPTR_PTR_I8_INT_INT BT_FN_BOOL_VPTR_PTR_IX_INT_INT[3]
2375 #define BT_FN_I8_CONST_VPTR_INT BT_FN_IX_CONST_VPTR_INT[3]
2376 #define BT_FN_I8_VPTR_I8_INT BT_FN_IX_VPTR_IX_INT[3]
2377 #define BT_FN_VOID_VPTR_I8_INT BT_FN_VOID_VPTR_IX_INT[3]
2378 #define BT_FN_BOOL_VPTR_PTR_I16_INT_INT BT_FN_BOOL_VPTR_PTR_IX_INT_INT[4]
2379 #define BT_FN_I16_CONST_VPTR_INT BT_FN_IX_CONST_VPTR_INT[4]
2380 #define BT_FN_I16_VPTR_I16_INT BT_FN_IX_VPTR_IX_INT[4]
2381 #define BT_FN_VOID_VPTR_I16_INT BT_FN_VOID_VPTR_IX_INT[4]
2382 #undef ATTR_NOTHROW_LEAF_LIST
2383 #define ATTR_NOTHROW_LEAF_LIST ECF_NOTHROW | ECF_LEAF
2384 #undef ATTR_TMPURE_NOTHROW_LEAF_LIST
2385 #define ATTR_TMPURE_NOTHROW_LEAF_LIST ECF_TM_PURE | ATTR_NOTHROW_LEAF_LIST
2386 #undef ATTR_NORETURN_NOTHROW_LEAF_LIST
2387 #define ATTR_NORETURN_NOTHROW_LEAF_LIST ECF_NORETURN | ATTR_NOTHROW_LEAF_LIST
2388 #undef ATTR_TMPURE_NORETURN_NOTHROW_LEAF_LIST
2389 #define ATTR_TMPURE_NORETURN_NOTHROW_LEAF_LIST \
2390 ECF_TM_PURE | ATTR_NORETURN_NOTHROW_LEAF_LIST
2391 #undef ATTR_COLD_NOTHROW_LEAF_LIST
2392 #define ATTR_COLD_NOTHROW_LEAF_LIST \
2393 /* ECF_COLD missing */ ATTR_NOTHROW_LEAF_LIST
2394 #undef ATTR_COLD_NORETURN_NOTHROW_LEAF_LIST
2395 #define ATTR_COLD_NORETURN_NOTHROW_LEAF_LIST \
2396 /* ECF_COLD missing */ ATTR_NORETURN_NOTHROW_LEAF_LIST
2397 #undef DEF_SANITIZER_BUILTIN
2398 #define DEF_SANITIZER_BUILTIN(ENUM, NAME, TYPE, ATTRS) \
2399 decl = add_builtin_function ("__builtin_" NAME, TYPE, ENUM, \
2400 BUILT_IN_NORMAL, NAME, NULL_TREE); \
2401 set_call_expr_flags (decl, ATTRS); \
2402 set_builtin_decl (ENUM, decl, true);
2403
2404 #include "sanitizer.def"
2405
2406 #undef DEF_SANITIZER_BUILTIN
2407 }
2408
2409 /* Called via htab_traverse. Count number of emitted
2410 STRING_CSTs in the constant hash table. */
2411
2412 static int
2413 count_string_csts (void **slot, void *data)
2414 {
2415 struct constant_descriptor_tree *desc
2416 = (struct constant_descriptor_tree *) *slot;
2417 if (TREE_CODE (desc->value) == STRING_CST
2418 && TREE_ASM_WRITTEN (desc->value)
2419 && asan_protect_global (desc->value))
2420 ++*((unsigned HOST_WIDE_INT *) data);
2421 return 1;
2422 }
2423
2424 /* Helper structure to pass two parameters to
2425 add_string_csts. */
2426
2427 struct asan_add_string_csts_data
2428 {
2429 tree type;
2430 vec<constructor_elt, va_gc> *v;
2431 };
2432
2433 /* Called via htab_traverse. Call asan_add_global
2434 on emitted STRING_CSTs from the constant hash table. */
2435
2436 static int
2437 add_string_csts (void **slot, void *data)
2438 {
2439 struct constant_descriptor_tree *desc
2440 = (struct constant_descriptor_tree *) *slot;
2441 if (TREE_CODE (desc->value) == STRING_CST
2442 && TREE_ASM_WRITTEN (desc->value)
2443 && asan_protect_global (desc->value))
2444 {
2445 struct asan_add_string_csts_data *aascd
2446 = (struct asan_add_string_csts_data *) data;
2447 asan_add_global (SYMBOL_REF_DECL (XEXP (desc->rtl, 0)),
2448 aascd->type, aascd->v);
2449 }
2450 return 1;
2451 }
2452
2453 /* Needs to be GTY(()), because cgraph_build_static_cdtor may
2454 invoke ggc_collect. */
2455 static GTY(()) tree asan_ctor_statements;
2456
2457 /* Module-level instrumentation.
2458 - Insert __asan_init_vN() into the list of CTORs.
2459 - TODO: insert redzones around globals.
2460 */
2461
2462 void
2463 asan_finish_file (void)
2464 {
2465 varpool_node *vnode;
2466 unsigned HOST_WIDE_INT gcount = 0;
2467
2468 if (shadow_ptr_types[0] == NULL_TREE)
2469 asan_init_shadow_ptr_types ();
2470 /* Avoid instrumenting code in the asan ctors/dtors.
2471 We don't need to insert padding after the description strings,
2472 nor after .LASAN* array. */
2473 flag_sanitize &= ~SANITIZE_ADDRESS;
2474
2475 tree fn = builtin_decl_implicit (BUILT_IN_ASAN_INIT);
2476 append_to_statement_list (build_call_expr (fn, 0), &asan_ctor_statements);
2477 FOR_EACH_DEFINED_VARIABLE (vnode)
2478 if (TREE_ASM_WRITTEN (vnode->decl)
2479 && asan_protect_global (vnode->decl))
2480 ++gcount;
2481 htab_t const_desc_htab = constant_pool_htab ();
2482 htab_traverse (const_desc_htab, count_string_csts, &gcount);
2483 if (gcount)
2484 {
2485 tree type = asan_global_struct (), var, ctor;
2486 tree dtor_statements = NULL_TREE;
2487 vec<constructor_elt, va_gc> *v;
2488 char buf[20];
2489
2490 type = build_array_type_nelts (type, gcount);
2491 ASM_GENERATE_INTERNAL_LABEL (buf, "LASAN", 0);
2492 var = build_decl (UNKNOWN_LOCATION, VAR_DECL, get_identifier (buf),
2493 type);
2494 TREE_STATIC (var) = 1;
2495 TREE_PUBLIC (var) = 0;
2496 DECL_ARTIFICIAL (var) = 1;
2497 DECL_IGNORED_P (var) = 1;
2498 vec_alloc (v, gcount);
2499 FOR_EACH_DEFINED_VARIABLE (vnode)
2500 if (TREE_ASM_WRITTEN (vnode->decl)
2501 && asan_protect_global (vnode->decl))
2502 asan_add_global (vnode->decl, TREE_TYPE (type), v);
2503 struct asan_add_string_csts_data aascd;
2504 aascd.type = TREE_TYPE (type);
2505 aascd.v = v;
2506 htab_traverse (const_desc_htab, add_string_csts, &aascd);
2507 ctor = build_constructor (type, v);
2508 TREE_CONSTANT (ctor) = 1;
2509 TREE_STATIC (ctor) = 1;
2510 DECL_INITIAL (var) = ctor;
2511 varpool_assemble_decl (varpool_node_for_decl (var));
2512
2513 fn = builtin_decl_implicit (BUILT_IN_ASAN_REGISTER_GLOBALS);
2514 tree gcount_tree = build_int_cst (pointer_sized_int_node, gcount);
2515 append_to_statement_list (build_call_expr (fn, 2,
2516 build_fold_addr_expr (var),
2517 gcount_tree),
2518 &asan_ctor_statements);
2519
2520 fn = builtin_decl_implicit (BUILT_IN_ASAN_UNREGISTER_GLOBALS);
2521 append_to_statement_list (build_call_expr (fn, 2,
2522 build_fold_addr_expr (var),
2523 gcount_tree),
2524 &dtor_statements);
2525 cgraph_build_static_cdtor ('D', dtor_statements,
2526 MAX_RESERVED_INIT_PRIORITY - 1);
2527 }
2528 cgraph_build_static_cdtor ('I', asan_ctor_statements,
2529 MAX_RESERVED_INIT_PRIORITY - 1);
2530 flag_sanitize |= SANITIZE_ADDRESS;
2531 }
2532
2533 /* Instrument the current function. */
2534
2535 static unsigned int
2536 asan_instrument (void)
2537 {
2538 if (shadow_ptr_types[0] == NULL_TREE)
2539 asan_init_shadow_ptr_types ();
2540 transform_statements ();
2541 return 0;
2542 }
2543
2544 static bool
2545 gate_asan (void)
2546 {
2547 return (flag_sanitize & SANITIZE_ADDRESS) != 0
2548 && !lookup_attribute ("no_sanitize_address",
2549 DECL_ATTRIBUTES (current_function_decl));
2550 }
2551
2552 namespace {
2553
2554 const pass_data pass_data_asan =
2555 {
2556 GIMPLE_PASS, /* type */
2557 "asan", /* name */
2558 OPTGROUP_NONE, /* optinfo_flags */
2559 true, /* has_execute */
2560 TV_NONE, /* tv_id */
2561 ( PROP_ssa | PROP_cfg | PROP_gimple_leh ), /* properties_required */
2562 0, /* properties_provided */
2563 0, /* properties_destroyed */
2564 0, /* todo_flags_start */
2565 TODO_update_ssa, /* todo_flags_finish */
2566 };
2567
2568 class pass_asan : public gimple_opt_pass
2569 {
2570 public:
2571 pass_asan (gcc::context *ctxt)
2572 : gimple_opt_pass (pass_data_asan, ctxt)
2573 {}
2574
2575 /* opt_pass methods: */
2576 opt_pass * clone () { return new pass_asan (m_ctxt); }
2577 virtual bool gate (function *) { return gate_asan (); }
2578 virtual unsigned int execute (function *) { return asan_instrument (); }
2579
2580 }; // class pass_asan
2581
2582 } // anon namespace
2583
2584 gimple_opt_pass *
2585 make_pass_asan (gcc::context *ctxt)
2586 {
2587 return new pass_asan (ctxt);
2588 }
2589
2590 namespace {
2591
2592 const pass_data pass_data_asan_O0 =
2593 {
2594 GIMPLE_PASS, /* type */
2595 "asan0", /* name */
2596 OPTGROUP_NONE, /* optinfo_flags */
2597 true, /* has_execute */
2598 TV_NONE, /* tv_id */
2599 ( PROP_ssa | PROP_cfg | PROP_gimple_leh ), /* properties_required */
2600 0, /* properties_provided */
2601 0, /* properties_destroyed */
2602 0, /* todo_flags_start */
2603 TODO_update_ssa, /* todo_flags_finish */
2604 };
2605
2606 class pass_asan_O0 : public gimple_opt_pass
2607 {
2608 public:
2609 pass_asan_O0 (gcc::context *ctxt)
2610 : gimple_opt_pass (pass_data_asan_O0, ctxt)
2611 {}
2612
2613 /* opt_pass methods: */
2614 virtual bool gate (function *) { return !optimize && gate_asan (); }
2615 virtual unsigned int execute (function *) { return asan_instrument (); }
2616
2617 }; // class pass_asan_O0
2618
2619 } // anon namespace
2620
2621 gimple_opt_pass *
2622 make_pass_asan_O0 (gcc::context *ctxt)
2623 {
2624 return new pass_asan_O0 (ctxt);
2625 }
2626
2627 /* Perform optimization of sanitize functions. */
2628
2629 namespace {
2630
2631 const pass_data pass_data_sanopt =
2632 {
2633 GIMPLE_PASS, /* type */
2634 "sanopt", /* name */
2635 OPTGROUP_NONE, /* optinfo_flags */
2636 true, /* has_execute */
2637 TV_NONE, /* tv_id */
2638 ( PROP_ssa | PROP_cfg | PROP_gimple_leh ), /* properties_required */
2639 0, /* properties_provided */
2640 0, /* properties_destroyed */
2641 0, /* todo_flags_start */
2642 TODO_update_ssa, /* todo_flags_finish */
2643 };
2644
2645 class pass_sanopt : public gimple_opt_pass
2646 {
2647 public:
2648 pass_sanopt (gcc::context *ctxt)
2649 : gimple_opt_pass (pass_data_sanopt, ctxt)
2650 {}
2651
2652 /* opt_pass methods: */
2653 virtual bool gate (function *) { return flag_sanitize; }
2654 virtual unsigned int execute (function *);
2655
2656 }; // class pass_sanopt
2657
2658 unsigned int
2659 pass_sanopt::execute (function *fun)
2660 {
2661 basic_block bb;
2662
2663 FOR_EACH_BB_FN (bb, fun)
2664 {
2665 gimple_stmt_iterator gsi;
2666 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
2667 {
2668 gimple stmt = gsi_stmt (gsi);
2669
2670 if (!is_gimple_call (stmt))
2671 continue;
2672
2673 if (gimple_call_internal_p (stmt))
2674 switch (gimple_call_internal_fn (stmt))
2675 {
2676 case IFN_UBSAN_NULL:
2677 ubsan_expand_null_ifn (gsi);
2678 break;
2679 default:
2680 break;
2681 }
2682
2683 if (dump_file && (dump_flags & TDF_DETAILS))
2684 {
2685 fprintf (dump_file, "Optimized\n ");
2686 print_gimple_stmt (dump_file, stmt, 0, dump_flags);
2687 fprintf (dump_file, "\n");
2688 }
2689 }
2690 }
2691 return 0;
2692 }
2693
2694 } // anon namespace
2695
2696 gimple_opt_pass *
2697 make_pass_sanopt (gcc::context *ctxt)
2698 {
2699 return new pass_sanopt (ctxt);
2700 }
2701
2702 #include "gt-asan.h"