]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/asan.c
gimple.h: Remove all includes.
[thirdparty/gcc.git] / gcc / asan.c
CommitLineData
37d6f666 1/* AddressSanitizer, a fast memory error detector.
d1e082c2 2 Copyright (C) 2012-2013 Free Software Foundation, Inc.
37d6f666
WM
3 Contributed by Kostya Serebryany <kcc@google.com>
4
5This file is part of GCC.
6
7GCC is free software; you can redistribute it and/or modify it under
8the terms of the GNU General Public License as published by the Free
9Software Foundation; either version 3, or (at your option) any later
10version.
11
12GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13WARRANTY; without even the implied warranty of MERCHANTABILITY or
14FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15for more details.
16
17You should have received a copy of the GNU General Public License
18along with GCC; see the file COPYING3. If not see
19<http://www.gnu.org/licenses/>. */
20
21
22#include "config.h"
23#include "system.h"
24#include "coretypes.h"
4d648807 25#include "tree.h"
2fb9a547
AM
26#include "hash-table.h"
27#include "basic-block.h"
28#include "tree-ssa-alias.h"
29#include "internal-fn.h"
30#include "gimple-expr.h"
31#include "is-a.h"
18f429e2 32#include "gimple.h"
45b0be94 33#include "gimplify.h"
5be5c238 34#include "gimple-iterator.h"
d8a2d370
DN
35#include "calls.h"
36#include "varasm.h"
37#include "stor-layout.h"
37d6f666 38#include "tree-iterator.h"
442b4905 39#include "cgraph.h"
d8a2d370 40#include "stringpool.h"
442b4905 41#include "tree-ssanames.h"
37d6f666 42#include "tree-pass.h"
37d6f666
WM
43#include "asan.h"
44#include "gimple-pretty-print.h"
dfe06d3e 45#include "target.h"
f3ddd692
JJ
46#include "expr.h"
47#include "optabs.h"
8240018b 48#include "output.h"
7f71fad9 49#include "tm_p.h"
0e668eaf 50#include "langhooks.h"
bdcbe80c 51#include "alloc-pool.h"
a9e0d843 52#include "cfgloop.h"
ff2a63a7 53#include "gimple-builder.h"
b9a55b13 54#include "ubsan.h"
37d6f666 55
497a1c66
JJ
56/* AddressSanitizer finds out-of-bounds and use-after-free bugs
57 with <2x slowdown on average.
58
59 The tool consists of two parts:
60 instrumentation module (this file) and a run-time library.
61 The instrumentation module adds a run-time check before every memory insn.
62 For a 8- or 16- byte load accessing address X:
63 ShadowAddr = (X >> 3) + Offset
64 ShadowValue = *(char*)ShadowAddr; // *(short*) for 16-byte access.
65 if (ShadowValue)
66 __asan_report_load8(X);
67 For a load of N bytes (N=1, 2 or 4) from address X:
68 ShadowAddr = (X >> 3) + Offset
69 ShadowValue = *(char*)ShadowAddr;
70 if (ShadowValue)
71 if ((X & 7) + N - 1 > ShadowValue)
72 __asan_report_loadN(X);
73 Stores are instrumented similarly, but using __asan_report_storeN functions.
ef1b3fda
KS
74 A call too __asan_init_vN() is inserted to the list of module CTORs.
75 N is the version number of the AddressSanitizer API. The changes between the
76 API versions are listed in libsanitizer/asan/asan_interface_internal.h.
497a1c66
JJ
77
78 The run-time library redefines malloc (so that redzone are inserted around
79 the allocated memory) and free (so that reuse of free-ed memory is delayed),
ef1b3fda 80 provides __asan_report* and __asan_init_vN functions.
497a1c66
JJ
81
82 Read more:
83 http://code.google.com/p/address-sanitizer/wiki/AddressSanitizerAlgorithm
84
85 The current implementation supports detection of out-of-bounds and
86 use-after-free in the heap, on the stack and for global variables.
87
88 [Protection of stack variables]
89
90 To understand how detection of out-of-bounds and use-after-free works
91 for stack variables, lets look at this example on x86_64 where the
92 stack grows downward:
f3ddd692
JJ
93
94 int
95 foo ()
96 {
97 char a[23] = {0};
98 int b[2] = {0};
99
100 a[5] = 1;
101 b[1] = 2;
102
103 return a[5] + b[1];
104 }
105
497a1c66
JJ
106 For this function, the stack protected by asan will be organized as
107 follows, from the top of the stack to the bottom:
f3ddd692 108
497a1c66 109 Slot 1/ [red zone of 32 bytes called 'RIGHT RedZone']
f3ddd692 110
497a1c66
JJ
111 Slot 2/ [8 bytes of red zone, that adds up to the space of 'a' to make
112 the next slot be 32 bytes aligned; this one is called Partial
113 Redzone; this 32 bytes alignment is an asan constraint]
f3ddd692 114
497a1c66 115 Slot 3/ [24 bytes for variable 'a']
f3ddd692 116
497a1c66 117 Slot 4/ [red zone of 32 bytes called 'Middle RedZone']
f3ddd692 118
497a1c66 119 Slot 5/ [24 bytes of Partial Red Zone (similar to slot 2]
f3ddd692 120
497a1c66 121 Slot 6/ [8 bytes for variable 'b']
f3ddd692 122
497a1c66
JJ
123 Slot 7/ [32 bytes of Red Zone at the bottom of the stack, called
124 'LEFT RedZone']
f3ddd692 125
497a1c66
JJ
126 The 32 bytes of LEFT red zone at the bottom of the stack can be
127 decomposed as such:
f3ddd692
JJ
128
129 1/ The first 8 bytes contain a magical asan number that is always
130 0x41B58AB3.
131
132 2/ The following 8 bytes contains a pointer to a string (to be
133 parsed at runtime by the runtime asan library), which format is
134 the following:
135
136 "<function-name> <space> <num-of-variables-on-the-stack>
137 (<32-bytes-aligned-offset-in-bytes-of-variable> <space>
138 <length-of-var-in-bytes> ){n} "
139
140 where '(...){n}' means the content inside the parenthesis occurs 'n'
141 times, with 'n' being the number of variables on the stack.
ef1b3fda
KS
142
143 3/ The following 8 bytes contain the PC of the current function which
144 will be used by the run-time library to print an error message.
f3ddd692 145
ef1b3fda 146 4/ The following 8 bytes are reserved for internal use by the run-time.
f3ddd692 147
497a1c66 148 The shadow memory for that stack layout is going to look like this:
f3ddd692
JJ
149
150 - content of shadow memory 8 bytes for slot 7: 0xF1F1F1F1.
151 The F1 byte pattern is a magic number called
152 ASAN_STACK_MAGIC_LEFT and is a way for the runtime to know that
153 the memory for that shadow byte is part of a the LEFT red zone
154 intended to seat at the bottom of the variables on the stack.
155
156 - content of shadow memory 8 bytes for slots 6 and 5:
157 0xF4F4F400. The F4 byte pattern is a magic number
158 called ASAN_STACK_MAGIC_PARTIAL. It flags the fact that the
159 memory region for this shadow byte is a PARTIAL red zone
160 intended to pad a variable A, so that the slot following
161 {A,padding} is 32 bytes aligned.
162
163 Note that the fact that the least significant byte of this
164 shadow memory content is 00 means that 8 bytes of its
165 corresponding memory (which corresponds to the memory of
166 variable 'b') is addressable.
167
168 - content of shadow memory 8 bytes for slot 4: 0xF2F2F2F2.
169 The F2 byte pattern is a magic number called
170 ASAN_STACK_MAGIC_MIDDLE. It flags the fact that the memory
171 region for this shadow byte is a MIDDLE red zone intended to
172 seat between two 32 aligned slots of {variable,padding}.
173
174 - content of shadow memory 8 bytes for slot 3 and 2:
497a1c66 175 0xF4000000. This represents is the concatenation of
f3ddd692
JJ
176 variable 'a' and the partial red zone following it, like what we
177 had for variable 'b'. The least significant 3 bytes being 00
178 means that the 3 bytes of variable 'a' are addressable.
179
497a1c66 180 - content of shadow memory 8 bytes for slot 1: 0xF3F3F3F3.
f3ddd692
JJ
181 The F3 byte pattern is a magic number called
182 ASAN_STACK_MAGIC_RIGHT. It flags the fact that the memory
183 region for this shadow byte is a RIGHT red zone intended to seat
184 at the top of the variables of the stack.
185
497a1c66
JJ
186 Note that the real variable layout is done in expand_used_vars in
187 cfgexpand.c. As far as Address Sanitizer is concerned, it lays out
188 stack variables as well as the different red zones, emits some
189 prologue code to populate the shadow memory as to poison (mark as
190 non-accessible) the regions of the red zones and mark the regions of
191 stack variables as accessible, and emit some epilogue code to
192 un-poison (mark as accessible) the regions of red zones right before
193 the function exits.
8240018b 194
497a1c66 195 [Protection of global variables]
8240018b 196
497a1c66
JJ
197 The basic idea is to insert a red zone between two global variables
198 and install a constructor function that calls the asan runtime to do
199 the populating of the relevant shadow memory regions at load time.
8240018b 200
497a1c66
JJ
201 So the global variables are laid out as to insert a red zone between
202 them. The size of the red zones is so that each variable starts on a
203 32 bytes boundary.
8240018b 204
497a1c66
JJ
205 Then a constructor function is installed so that, for each global
206 variable, it calls the runtime asan library function
207 __asan_register_globals_with an instance of this type:
8240018b
JJ
208
209 struct __asan_global
210 {
211 // Address of the beginning of the global variable.
212 const void *__beg;
213
214 // Initial size of the global variable.
215 uptr __size;
216
217 // Size of the global variable + size of the red zone. This
218 // size is 32 bytes aligned.
219 uptr __size_with_redzone;
220
221 // Name of the global variable.
222 const void *__name;
223
ef1b3fda
KS
224 // Name of the module where the global variable is declared.
225 const void *__module_name;
226
8240018b
JJ
227 // This is always set to NULL for now.
228 uptr __has_dynamic_init;
229 }
230
497a1c66
JJ
231 A destructor function that calls the runtime asan library function
232 _asan_unregister_globals is also installed. */
f3ddd692
JJ
233
234alias_set_type asan_shadow_set = -1;
37d6f666 235
f6d98484
JJ
236/* Pointer types to 1 resp. 2 byte integers in shadow memory. A separate
237 alias set is used for all shadow memory accesses. */
238static GTY(()) tree shadow_ptr_types[2];
239
bdcbe80c
DS
240/* Hashtable support for memory references used by gimple
241 statements. */
242
243/* This type represents a reference to a memory region. */
244struct asan_mem_ref
245{
688010ba 246 /* The expression of the beginning of the memory region. */
bdcbe80c
DS
247 tree start;
248
249 /* The size of the access (can be 1, 2, 4, 8, 16 for now). */
250 char access_size;
251};
252
253static alloc_pool asan_mem_ref_alloc_pool;
254
255/* This creates the alloc pool used to store the instances of
256 asan_mem_ref that are stored in the hash table asan_mem_ref_ht. */
257
258static alloc_pool
259asan_mem_ref_get_alloc_pool ()
260{
261 if (asan_mem_ref_alloc_pool == NULL)
262 asan_mem_ref_alloc_pool = create_alloc_pool ("asan_mem_ref",
263 sizeof (asan_mem_ref),
264 10);
265 return asan_mem_ref_alloc_pool;
266
267}
268
269/* Initializes an instance of asan_mem_ref. */
270
271static void
272asan_mem_ref_init (asan_mem_ref *ref, tree start, char access_size)
273{
274 ref->start = start;
275 ref->access_size = access_size;
276}
277
278/* Allocates memory for an instance of asan_mem_ref into the memory
279 pool returned by asan_mem_ref_get_alloc_pool and initialize it.
280 START is the address of (or the expression pointing to) the
281 beginning of memory reference. ACCESS_SIZE is the size of the
282 access to the referenced memory. */
283
284static asan_mem_ref*
285asan_mem_ref_new (tree start, char access_size)
286{
287 asan_mem_ref *ref =
288 (asan_mem_ref *) pool_alloc (asan_mem_ref_get_alloc_pool ());
289
290 asan_mem_ref_init (ref, start, access_size);
291 return ref;
292}
293
294/* This builds and returns a pointer to the end of the memory region
295 that starts at START and of length LEN. */
296
297tree
298asan_mem_ref_get_end (tree start, tree len)
299{
300 if (len == NULL_TREE || integer_zerop (len))
301 return start;
302
303 return fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (start), start, len);
304}
305
306/* Return a tree expression that represents the end of the referenced
307 memory region. Beware that this function can actually build a new
308 tree expression. */
309
310tree
311asan_mem_ref_get_end (const asan_mem_ref *ref, tree len)
312{
313 return asan_mem_ref_get_end (ref->start, len);
314}
315
316struct asan_mem_ref_hasher
317 : typed_noop_remove <asan_mem_ref>
318{
319 typedef asan_mem_ref value_type;
320 typedef asan_mem_ref compare_type;
321
322 static inline hashval_t hash (const value_type *);
323 static inline bool equal (const value_type *, const compare_type *);
324};
325
326/* Hash a memory reference. */
327
328inline hashval_t
329asan_mem_ref_hasher::hash (const asan_mem_ref *mem_ref)
330{
331 hashval_t h = iterative_hash_expr (mem_ref->start, 0);
332 h = iterative_hash_hashval_t (h, mem_ref->access_size);
333 return h;
334}
335
336/* Compare two memory references. We accept the length of either
337 memory references to be NULL_TREE. */
338
339inline bool
340asan_mem_ref_hasher::equal (const asan_mem_ref *m1,
341 const asan_mem_ref *m2)
342{
343 return (m1->access_size == m2->access_size
344 && operand_equal_p (m1->start, m2->start, 0));
345}
346
347static hash_table <asan_mem_ref_hasher> asan_mem_ref_ht;
348
349/* Returns a reference to the hash table containing memory references.
350 This function ensures that the hash table is created. Note that
351 this hash table is updated by the function
352 update_mem_ref_hash_table. */
353
354static hash_table <asan_mem_ref_hasher> &
355get_mem_ref_hash_table ()
356{
357 if (!asan_mem_ref_ht.is_created ())
358 asan_mem_ref_ht.create (10);
359
360 return asan_mem_ref_ht;
361}
362
363/* Clear all entries from the memory references hash table. */
364
365static void
366empty_mem_ref_hash_table ()
367{
368 if (asan_mem_ref_ht.is_created ())
369 asan_mem_ref_ht.empty ();
370}
371
372/* Free the memory references hash table. */
373
374static void
375free_mem_ref_resources ()
376{
377 if (asan_mem_ref_ht.is_created ())
378 asan_mem_ref_ht.dispose ();
379
380 if (asan_mem_ref_alloc_pool)
381 {
382 free_alloc_pool (asan_mem_ref_alloc_pool);
383 asan_mem_ref_alloc_pool = NULL;
384 }
385}
386
387/* Return true iff the memory reference REF has been instrumented. */
388
389static bool
390has_mem_ref_been_instrumented (tree ref, char access_size)
391{
392 asan_mem_ref r;
393 asan_mem_ref_init (&r, ref, access_size);
394
395 return (get_mem_ref_hash_table ().find (&r) != NULL);
396}
397
398/* Return true iff the memory reference REF has been instrumented. */
399
400static bool
401has_mem_ref_been_instrumented (const asan_mem_ref *ref)
402{
403 return has_mem_ref_been_instrumented (ref->start, ref->access_size);
404}
405
406/* Return true iff access to memory region starting at REF and of
407 length LEN has been instrumented. */
408
409static bool
410has_mem_ref_been_instrumented (const asan_mem_ref *ref, tree len)
411{
412 /* First let's see if the address of the beginning of REF has been
413 instrumented. */
414 if (!has_mem_ref_been_instrumented (ref))
415 return false;
416
417 if (len != 0)
418 {
419 /* Let's see if the end of the region has been instrumented. */
420 if (!has_mem_ref_been_instrumented (asan_mem_ref_get_end (ref, len),
421 ref->access_size))
422 return false;
423 }
424 return true;
425}
426
427/* Set REF to the memory reference present in a gimple assignment
428 ASSIGNMENT. Return true upon successful completion, false
429 otherwise. */
430
431static bool
432get_mem_ref_of_assignment (const gimple assignment,
433 asan_mem_ref *ref,
434 bool *ref_is_store)
435{
436 gcc_assert (gimple_assign_single_p (assignment));
437
5d751b0c
JJ
438 if (gimple_store_p (assignment)
439 && !gimple_clobber_p (assignment))
bdcbe80c
DS
440 {
441 ref->start = gimple_assign_lhs (assignment);
442 *ref_is_store = true;
443 }
444 else if (gimple_assign_load_p (assignment))
445 {
446 ref->start = gimple_assign_rhs1 (assignment);
447 *ref_is_store = false;
448 }
449 else
450 return false;
451
452 ref->access_size = int_size_in_bytes (TREE_TYPE (ref->start));
453 return true;
454}
455
456/* Return the memory references contained in a gimple statement
457 representing a builtin call that has to do with memory access. */
458
459static bool
460get_mem_refs_of_builtin_call (const gimple call,
461 asan_mem_ref *src0,
462 tree *src0_len,
463 bool *src0_is_store,
464 asan_mem_ref *src1,
465 tree *src1_len,
466 bool *src1_is_store,
467 asan_mem_ref *dst,
468 tree *dst_len,
469 bool *dst_is_store,
470 bool *dest_is_deref)
471{
472 gcc_checking_assert (gimple_call_builtin_p (call, BUILT_IN_NORMAL));
473
474 tree callee = gimple_call_fndecl (call);
475 tree source0 = NULL_TREE, source1 = NULL_TREE,
476 dest = NULL_TREE, len = NULL_TREE;
477 bool is_store = true, got_reference_p = false;
478 char access_size = 1;
479
480 switch (DECL_FUNCTION_CODE (callee))
481 {
482 /* (s, s, n) style memops. */
483 case BUILT_IN_BCMP:
484 case BUILT_IN_MEMCMP:
485 source0 = gimple_call_arg (call, 0);
486 source1 = gimple_call_arg (call, 1);
487 len = gimple_call_arg (call, 2);
488 break;
489
490 /* (src, dest, n) style memops. */
491 case BUILT_IN_BCOPY:
492 source0 = gimple_call_arg (call, 0);
493 dest = gimple_call_arg (call, 1);
494 len = gimple_call_arg (call, 2);
495 break;
496
497 /* (dest, src, n) style memops. */
498 case BUILT_IN_MEMCPY:
499 case BUILT_IN_MEMCPY_CHK:
500 case BUILT_IN_MEMMOVE:
501 case BUILT_IN_MEMMOVE_CHK:
502 case BUILT_IN_MEMPCPY:
503 case BUILT_IN_MEMPCPY_CHK:
504 dest = gimple_call_arg (call, 0);
505 source0 = gimple_call_arg (call, 1);
506 len = gimple_call_arg (call, 2);
507 break;
508
509 /* (dest, n) style memops. */
510 case BUILT_IN_BZERO:
511 dest = gimple_call_arg (call, 0);
512 len = gimple_call_arg (call, 1);
513 break;
514
515 /* (dest, x, n) style memops*/
516 case BUILT_IN_MEMSET:
517 case BUILT_IN_MEMSET_CHK:
518 dest = gimple_call_arg (call, 0);
519 len = gimple_call_arg (call, 2);
520 break;
521
522 case BUILT_IN_STRLEN:
523 source0 = gimple_call_arg (call, 0);
524 len = gimple_call_lhs (call);
525 break ;
526
527 /* And now the __atomic* and __sync builtins.
528 These are handled differently from the classical memory memory
529 access builtins above. */
530
531 case BUILT_IN_ATOMIC_LOAD_1:
532 case BUILT_IN_ATOMIC_LOAD_2:
533 case BUILT_IN_ATOMIC_LOAD_4:
534 case BUILT_IN_ATOMIC_LOAD_8:
535 case BUILT_IN_ATOMIC_LOAD_16:
536 is_store = false;
537 /* fall through. */
538
539 case BUILT_IN_SYNC_FETCH_AND_ADD_1:
540 case BUILT_IN_SYNC_FETCH_AND_ADD_2:
541 case BUILT_IN_SYNC_FETCH_AND_ADD_4:
542 case BUILT_IN_SYNC_FETCH_AND_ADD_8:
543 case BUILT_IN_SYNC_FETCH_AND_ADD_16:
544
545 case BUILT_IN_SYNC_FETCH_AND_SUB_1:
546 case BUILT_IN_SYNC_FETCH_AND_SUB_2:
547 case BUILT_IN_SYNC_FETCH_AND_SUB_4:
548 case BUILT_IN_SYNC_FETCH_AND_SUB_8:
549 case BUILT_IN_SYNC_FETCH_AND_SUB_16:
550
551 case BUILT_IN_SYNC_FETCH_AND_OR_1:
552 case BUILT_IN_SYNC_FETCH_AND_OR_2:
553 case BUILT_IN_SYNC_FETCH_AND_OR_4:
554 case BUILT_IN_SYNC_FETCH_AND_OR_8:
555 case BUILT_IN_SYNC_FETCH_AND_OR_16:
556
557 case BUILT_IN_SYNC_FETCH_AND_AND_1:
558 case BUILT_IN_SYNC_FETCH_AND_AND_2:
559 case BUILT_IN_SYNC_FETCH_AND_AND_4:
560 case BUILT_IN_SYNC_FETCH_AND_AND_8:
561 case BUILT_IN_SYNC_FETCH_AND_AND_16:
562
563 case BUILT_IN_SYNC_FETCH_AND_XOR_1:
564 case BUILT_IN_SYNC_FETCH_AND_XOR_2:
565 case BUILT_IN_SYNC_FETCH_AND_XOR_4:
566 case BUILT_IN_SYNC_FETCH_AND_XOR_8:
567 case BUILT_IN_SYNC_FETCH_AND_XOR_16:
568
569 case BUILT_IN_SYNC_FETCH_AND_NAND_1:
570 case BUILT_IN_SYNC_FETCH_AND_NAND_2:
571 case BUILT_IN_SYNC_FETCH_AND_NAND_4:
572 case BUILT_IN_SYNC_FETCH_AND_NAND_8:
573
574 case BUILT_IN_SYNC_ADD_AND_FETCH_1:
575 case BUILT_IN_SYNC_ADD_AND_FETCH_2:
576 case BUILT_IN_SYNC_ADD_AND_FETCH_4:
577 case BUILT_IN_SYNC_ADD_AND_FETCH_8:
578 case BUILT_IN_SYNC_ADD_AND_FETCH_16:
579
580 case BUILT_IN_SYNC_SUB_AND_FETCH_1:
581 case BUILT_IN_SYNC_SUB_AND_FETCH_2:
582 case BUILT_IN_SYNC_SUB_AND_FETCH_4:
583 case BUILT_IN_SYNC_SUB_AND_FETCH_8:
584 case BUILT_IN_SYNC_SUB_AND_FETCH_16:
585
586 case BUILT_IN_SYNC_OR_AND_FETCH_1:
587 case BUILT_IN_SYNC_OR_AND_FETCH_2:
588 case BUILT_IN_SYNC_OR_AND_FETCH_4:
589 case BUILT_IN_SYNC_OR_AND_FETCH_8:
590 case BUILT_IN_SYNC_OR_AND_FETCH_16:
591
592 case BUILT_IN_SYNC_AND_AND_FETCH_1:
593 case BUILT_IN_SYNC_AND_AND_FETCH_2:
594 case BUILT_IN_SYNC_AND_AND_FETCH_4:
595 case BUILT_IN_SYNC_AND_AND_FETCH_8:
596 case BUILT_IN_SYNC_AND_AND_FETCH_16:
597
598 case BUILT_IN_SYNC_XOR_AND_FETCH_1:
599 case BUILT_IN_SYNC_XOR_AND_FETCH_2:
600 case BUILT_IN_SYNC_XOR_AND_FETCH_4:
601 case BUILT_IN_SYNC_XOR_AND_FETCH_8:
602 case BUILT_IN_SYNC_XOR_AND_FETCH_16:
603
604 case BUILT_IN_SYNC_NAND_AND_FETCH_1:
605 case BUILT_IN_SYNC_NAND_AND_FETCH_2:
606 case BUILT_IN_SYNC_NAND_AND_FETCH_4:
607 case BUILT_IN_SYNC_NAND_AND_FETCH_8:
608
609 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1:
610 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_2:
611 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_4:
612 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_8:
613 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_16:
614
615 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1:
616 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_2:
617 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_4:
618 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_8:
619 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_16:
620
621 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_1:
622 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_2:
623 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_4:
624 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_8:
625 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_16:
626
627 case BUILT_IN_SYNC_LOCK_RELEASE_1:
628 case BUILT_IN_SYNC_LOCK_RELEASE_2:
629 case BUILT_IN_SYNC_LOCK_RELEASE_4:
630 case BUILT_IN_SYNC_LOCK_RELEASE_8:
631 case BUILT_IN_SYNC_LOCK_RELEASE_16:
632
633 case BUILT_IN_ATOMIC_EXCHANGE_1:
634 case BUILT_IN_ATOMIC_EXCHANGE_2:
635 case BUILT_IN_ATOMIC_EXCHANGE_4:
636 case BUILT_IN_ATOMIC_EXCHANGE_8:
637 case BUILT_IN_ATOMIC_EXCHANGE_16:
638
639 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1:
640 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_2:
641 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_4:
642 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_8:
643 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_16:
644
645 case BUILT_IN_ATOMIC_STORE_1:
646 case BUILT_IN_ATOMIC_STORE_2:
647 case BUILT_IN_ATOMIC_STORE_4:
648 case BUILT_IN_ATOMIC_STORE_8:
649 case BUILT_IN_ATOMIC_STORE_16:
650
651 case BUILT_IN_ATOMIC_ADD_FETCH_1:
652 case BUILT_IN_ATOMIC_ADD_FETCH_2:
653 case BUILT_IN_ATOMIC_ADD_FETCH_4:
654 case BUILT_IN_ATOMIC_ADD_FETCH_8:
655 case BUILT_IN_ATOMIC_ADD_FETCH_16:
656
657 case BUILT_IN_ATOMIC_SUB_FETCH_1:
658 case BUILT_IN_ATOMIC_SUB_FETCH_2:
659 case BUILT_IN_ATOMIC_SUB_FETCH_4:
660 case BUILT_IN_ATOMIC_SUB_FETCH_8:
661 case BUILT_IN_ATOMIC_SUB_FETCH_16:
662
663 case BUILT_IN_ATOMIC_AND_FETCH_1:
664 case BUILT_IN_ATOMIC_AND_FETCH_2:
665 case BUILT_IN_ATOMIC_AND_FETCH_4:
666 case BUILT_IN_ATOMIC_AND_FETCH_8:
667 case BUILT_IN_ATOMIC_AND_FETCH_16:
668
669 case BUILT_IN_ATOMIC_NAND_FETCH_1:
670 case BUILT_IN_ATOMIC_NAND_FETCH_2:
671 case BUILT_IN_ATOMIC_NAND_FETCH_4:
672 case BUILT_IN_ATOMIC_NAND_FETCH_8:
673 case BUILT_IN_ATOMIC_NAND_FETCH_16:
674
675 case BUILT_IN_ATOMIC_XOR_FETCH_1:
676 case BUILT_IN_ATOMIC_XOR_FETCH_2:
677 case BUILT_IN_ATOMIC_XOR_FETCH_4:
678 case BUILT_IN_ATOMIC_XOR_FETCH_8:
679 case BUILT_IN_ATOMIC_XOR_FETCH_16:
680
681 case BUILT_IN_ATOMIC_OR_FETCH_1:
682 case BUILT_IN_ATOMIC_OR_FETCH_2:
683 case BUILT_IN_ATOMIC_OR_FETCH_4:
684 case BUILT_IN_ATOMIC_OR_FETCH_8:
685 case BUILT_IN_ATOMIC_OR_FETCH_16:
686
687 case BUILT_IN_ATOMIC_FETCH_ADD_1:
688 case BUILT_IN_ATOMIC_FETCH_ADD_2:
689 case BUILT_IN_ATOMIC_FETCH_ADD_4:
690 case BUILT_IN_ATOMIC_FETCH_ADD_8:
691 case BUILT_IN_ATOMIC_FETCH_ADD_16:
692
693 case BUILT_IN_ATOMIC_FETCH_SUB_1:
694 case BUILT_IN_ATOMIC_FETCH_SUB_2:
695 case BUILT_IN_ATOMIC_FETCH_SUB_4:
696 case BUILT_IN_ATOMIC_FETCH_SUB_8:
697 case BUILT_IN_ATOMIC_FETCH_SUB_16:
698
699 case BUILT_IN_ATOMIC_FETCH_AND_1:
700 case BUILT_IN_ATOMIC_FETCH_AND_2:
701 case BUILT_IN_ATOMIC_FETCH_AND_4:
702 case BUILT_IN_ATOMIC_FETCH_AND_8:
703 case BUILT_IN_ATOMIC_FETCH_AND_16:
704
705 case BUILT_IN_ATOMIC_FETCH_NAND_1:
706 case BUILT_IN_ATOMIC_FETCH_NAND_2:
707 case BUILT_IN_ATOMIC_FETCH_NAND_4:
708 case BUILT_IN_ATOMIC_FETCH_NAND_8:
709 case BUILT_IN_ATOMIC_FETCH_NAND_16:
710
711 case BUILT_IN_ATOMIC_FETCH_XOR_1:
712 case BUILT_IN_ATOMIC_FETCH_XOR_2:
713 case BUILT_IN_ATOMIC_FETCH_XOR_4:
714 case BUILT_IN_ATOMIC_FETCH_XOR_8:
715 case BUILT_IN_ATOMIC_FETCH_XOR_16:
716
717 case BUILT_IN_ATOMIC_FETCH_OR_1:
718 case BUILT_IN_ATOMIC_FETCH_OR_2:
719 case BUILT_IN_ATOMIC_FETCH_OR_4:
720 case BUILT_IN_ATOMIC_FETCH_OR_8:
721 case BUILT_IN_ATOMIC_FETCH_OR_16:
722 {
723 dest = gimple_call_arg (call, 0);
724 /* DEST represents the address of a memory location.
725 instrument_derefs wants the memory location, so lets
726 dereference the address DEST before handing it to
727 instrument_derefs. */
728 if (TREE_CODE (dest) == ADDR_EXPR)
729 dest = TREE_OPERAND (dest, 0);
77e83307 730 else if (TREE_CODE (dest) == SSA_NAME || TREE_CODE (dest) == INTEGER_CST)
bdcbe80c
DS
731 dest = build2 (MEM_REF, TREE_TYPE (TREE_TYPE (dest)),
732 dest, build_int_cst (TREE_TYPE (dest), 0));
733 else
734 gcc_unreachable ();
735
736 access_size = int_size_in_bytes (TREE_TYPE (dest));
737 }
738
739 default:
740 /* The other builtins memory access are not instrumented in this
741 function because they either don't have any length parameter,
742 or their length parameter is just a limit. */
743 break;
744 }
745
746 if (len != NULL_TREE)
747 {
748 if (source0 != NULL_TREE)
749 {
750 src0->start = source0;
751 src0->access_size = access_size;
752 *src0_len = len;
753 *src0_is_store = false;
754 }
755
756 if (source1 != NULL_TREE)
757 {
758 src1->start = source1;
759 src1->access_size = access_size;
760 *src1_len = len;
761 *src1_is_store = false;
762 }
763
764 if (dest != NULL_TREE)
765 {
766 dst->start = dest;
767 dst->access_size = access_size;
768 *dst_len = len;
769 *dst_is_store = true;
770 }
771
772 got_reference_p = true;
773 }
b41288b3
JJ
774 else if (dest)
775 {
776 dst->start = dest;
777 dst->access_size = access_size;
778 *dst_len = NULL_TREE;
779 *dst_is_store = is_store;
780 *dest_is_deref = true;
781 got_reference_p = true;
782 }
bdcbe80c 783
b41288b3 784 return got_reference_p;
bdcbe80c
DS
785}
786
787/* Return true iff a given gimple statement has been instrumented.
788 Note that the statement is "defined" by the memory references it
789 contains. */
790
791static bool
792has_stmt_been_instrumented_p (gimple stmt)
793{
794 if (gimple_assign_single_p (stmt))
795 {
796 bool r_is_store;
797 asan_mem_ref r;
798 asan_mem_ref_init (&r, NULL, 1);
799
800 if (get_mem_ref_of_assignment (stmt, &r, &r_is_store))
801 return has_mem_ref_been_instrumented (&r);
802 }
803 else if (gimple_call_builtin_p (stmt, BUILT_IN_NORMAL))
804 {
805 asan_mem_ref src0, src1, dest;
806 asan_mem_ref_init (&src0, NULL, 1);
807 asan_mem_ref_init (&src1, NULL, 1);
808 asan_mem_ref_init (&dest, NULL, 1);
809
810 tree src0_len = NULL_TREE, src1_len = NULL_TREE, dest_len = NULL_TREE;
811 bool src0_is_store = false, src1_is_store = false,
812 dest_is_store = false, dest_is_deref = false;
813 if (get_mem_refs_of_builtin_call (stmt,
814 &src0, &src0_len, &src0_is_store,
815 &src1, &src1_len, &src1_is_store,
816 &dest, &dest_len, &dest_is_store,
817 &dest_is_deref))
818 {
819 if (src0.start != NULL_TREE
820 && !has_mem_ref_been_instrumented (&src0, src0_len))
821 return false;
822
823 if (src1.start != NULL_TREE
824 && !has_mem_ref_been_instrumented (&src1, src1_len))
825 return false;
826
827 if (dest.start != NULL_TREE
828 && !has_mem_ref_been_instrumented (&dest, dest_len))
829 return false;
830
831 return true;
832 }
833 }
834 return false;
835}
836
837/* Insert a memory reference into the hash table. */
838
839static void
840update_mem_ref_hash_table (tree ref, char access_size)
841{
842 hash_table <asan_mem_ref_hasher> ht = get_mem_ref_hash_table ();
843
844 asan_mem_ref r;
845 asan_mem_ref_init (&r, ref, access_size);
846
847 asan_mem_ref **slot = ht.find_slot (&r, INSERT);
848 if (*slot == NULL)
849 *slot = asan_mem_ref_new (ref, access_size);
850}
851
94fce891
JJ
852/* Initialize shadow_ptr_types array. */
853
854static void
855asan_init_shadow_ptr_types (void)
856{
857 asan_shadow_set = new_alias_set ();
858 shadow_ptr_types[0] = build_distinct_type_copy (signed_char_type_node);
859 TYPE_ALIAS_SET (shadow_ptr_types[0]) = asan_shadow_set;
860 shadow_ptr_types[0] = build_pointer_type (shadow_ptr_types[0]);
861 shadow_ptr_types[1] = build_distinct_type_copy (short_integer_type_node);
862 TYPE_ALIAS_SET (shadow_ptr_types[1]) = asan_shadow_set;
863 shadow_ptr_types[1] = build_pointer_type (shadow_ptr_types[1]);
864 initialize_sanitizer_builtins ();
865}
866
11a877b3 867/* Create ADDR_EXPR of STRING_CST with the PP pretty printer text. */
8240018b
JJ
868
869static tree
11a877b3 870asan_pp_string (pretty_printer *pp)
8240018b 871{
11a877b3 872 const char *buf = pp_formatted_text (pp);
8240018b
JJ
873 size_t len = strlen (buf);
874 tree ret = build_string (len + 1, buf);
875 TREE_TYPE (ret)
94fce891
JJ
876 = build_array_type (TREE_TYPE (shadow_ptr_types[0]),
877 build_index_type (size_int (len)));
8240018b
JJ
878 TREE_READONLY (ret) = 1;
879 TREE_STATIC (ret) = 1;
94fce891 880 return build1 (ADDR_EXPR, shadow_ptr_types[0], ret);
8240018b
JJ
881}
882
f3ddd692
JJ
883/* Return a CONST_INT representing 4 subsequent shadow memory bytes. */
884
885static rtx
886asan_shadow_cst (unsigned char shadow_bytes[4])
887{
888 int i;
889 unsigned HOST_WIDE_INT val = 0;
890 gcc_assert (WORDS_BIG_ENDIAN == BYTES_BIG_ENDIAN);
891 for (i = 0; i < 4; i++)
892 val |= (unsigned HOST_WIDE_INT) shadow_bytes[BYTES_BIG_ENDIAN ? 3 - i : i]
893 << (BITS_PER_UNIT * i);
dcad1dd3 894 return gen_int_mode (val, SImode);
f3ddd692
JJ
895}
896
aeb7e7c1
JJ
897/* Clear shadow memory at SHADOW_MEM, LEN bytes. Can't call a library call here
898 though. */
899
900static void
901asan_clear_shadow (rtx shadow_mem, HOST_WIDE_INT len)
902{
903 rtx insn, insns, top_label, end, addr, tmp, jump;
904
905 start_sequence ();
906 clear_storage (shadow_mem, GEN_INT (len), BLOCK_OP_NORMAL);
907 insns = get_insns ();
908 end_sequence ();
909 for (insn = insns; insn; insn = NEXT_INSN (insn))
910 if (CALL_P (insn))
911 break;
912 if (insn == NULL_RTX)
913 {
914 emit_insn (insns);
915 return;
916 }
917
918 gcc_assert ((len & 3) == 0);
919 top_label = gen_label_rtx ();
57d4d653 920 addr = copy_to_mode_reg (Pmode, XEXP (shadow_mem, 0));
aeb7e7c1
JJ
921 shadow_mem = adjust_automodify_address (shadow_mem, SImode, addr, 0);
922 end = force_reg (Pmode, plus_constant (Pmode, addr, len));
923 emit_label (top_label);
924
925 emit_move_insn (shadow_mem, const0_rtx);
2f1cd2eb 926 tmp = expand_simple_binop (Pmode, PLUS, addr, gen_int_mode (4, Pmode), addr,
aeb7e7c1
JJ
927 true, OPTAB_LIB_WIDEN);
928 if (tmp != addr)
929 emit_move_insn (addr, tmp);
930 emit_cmp_and_jump_insns (addr, end, LT, NULL_RTX, Pmode, true, top_label);
931 jump = get_last_insn ();
932 gcc_assert (JUMP_P (jump));
e5af9ddd 933 add_int_reg_note (jump, REG_BR_PROB, REG_BR_PROB_BASE * 80 / 100);
aeb7e7c1
JJ
934}
935
ef1b3fda
KS
936void
937asan_function_start (void)
938{
939 section *fnsec = function_section (current_function_decl);
940 switch_to_section (fnsec);
941 ASM_OUTPUT_DEBUG_LABEL (asm_out_file, "LASANPC",
942 current_function_funcdef_no);
943}
944
f3ddd692
JJ
945/* Insert code to protect stack vars. The prologue sequence should be emitted
946 directly, epilogue sequence returned. BASE is the register holding the
947 stack base, against which OFFSETS array offsets are relative to, OFFSETS
948 array contains pairs of offsets in reverse order, always the end offset
949 of some gap that needs protection followed by starting offset,
950 and DECLS is an array of representative decls for each var partition.
951 LENGTH is the length of the OFFSETS array, DECLS array is LENGTH / 2 - 1
952 elements long (OFFSETS include gap before the first variable as well
953 as gaps after each stack variable). */
954
955rtx
956asan_emit_stack_protection (rtx base, HOST_WIDE_INT *offsets, tree *decls,
957 int length)
958{
959 rtx shadow_base, shadow_mem, ret, mem;
ef1b3fda 960 char buf[30];
f3ddd692
JJ
961 unsigned char shadow_bytes[4];
962 HOST_WIDE_INT base_offset = offsets[length - 1], offset, prev_offset;
963 HOST_WIDE_INT last_offset, last_size;
964 int l;
965 unsigned char cur_shadow_byte = ASAN_STACK_MAGIC_LEFT;
ef1b3fda 966 tree str_cst, decl, id;
f3ddd692 967
94fce891
JJ
968 if (shadow_ptr_types[0] == NULL_TREE)
969 asan_init_shadow_ptr_types ();
970
f3ddd692 971 /* First of all, prepare the description string. */
11a877b3 972 pretty_printer asan_pp;
da6ca2b5 973
8240018b
JJ
974 pp_decimal_int (&asan_pp, length / 2 - 1);
975 pp_space (&asan_pp);
f3ddd692
JJ
976 for (l = length - 2; l; l -= 2)
977 {
978 tree decl = decls[l / 2 - 1];
8240018b
JJ
979 pp_wide_integer (&asan_pp, offsets[l] - base_offset);
980 pp_space (&asan_pp);
981 pp_wide_integer (&asan_pp, offsets[l - 1] - offsets[l]);
982 pp_space (&asan_pp);
f3ddd692
JJ
983 if (DECL_P (decl) && DECL_NAME (decl))
984 {
8240018b
JJ
985 pp_decimal_int (&asan_pp, IDENTIFIER_LENGTH (DECL_NAME (decl)));
986 pp_space (&asan_pp);
b066401f 987 pp_tree_identifier (&asan_pp, DECL_NAME (decl));
f3ddd692
JJ
988 }
989 else
8240018b
JJ
990 pp_string (&asan_pp, "9 <unknown>");
991 pp_space (&asan_pp);
f3ddd692 992 }
11a877b3 993 str_cst = asan_pp_string (&asan_pp);
f3ddd692
JJ
994
995 /* Emit the prologue sequence. */
2f1cd2eb
RS
996 base = expand_binop (Pmode, add_optab, base,
997 gen_int_mode (base_offset, Pmode),
f3ddd692
JJ
998 NULL_RTX, 1, OPTAB_DIRECT);
999 mem = gen_rtx_MEM (ptr_mode, base);
69db2d57 1000 emit_move_insn (mem, gen_int_mode (ASAN_STACK_FRAME_MAGIC, ptr_mode));
f3ddd692
JJ
1001 mem = adjust_address (mem, VOIDmode, GET_MODE_SIZE (ptr_mode));
1002 emit_move_insn (mem, expand_normal (str_cst));
ef1b3fda
KS
1003 mem = adjust_address (mem, VOIDmode, GET_MODE_SIZE (ptr_mode));
1004 ASM_GENERATE_INTERNAL_LABEL (buf, "LASANPC", current_function_funcdef_no);
1005 id = get_identifier (buf);
1006 decl = build_decl (DECL_SOURCE_LOCATION (current_function_decl),
1007 VAR_DECL, id, char_type_node);
1008 SET_DECL_ASSEMBLER_NAME (decl, id);
1009 TREE_ADDRESSABLE (decl) = 1;
1010 TREE_READONLY (decl) = 1;
1011 DECL_ARTIFICIAL (decl) = 1;
1012 DECL_IGNORED_P (decl) = 1;
1013 TREE_STATIC (decl) = 1;
1014 TREE_PUBLIC (decl) = 0;
1015 TREE_USED (decl) = 1;
8c8b21e4
JJ
1016 DECL_INITIAL (decl) = decl;
1017 TREE_ASM_WRITTEN (decl) = 1;
1018 TREE_ASM_WRITTEN (id) = 1;
ef1b3fda 1019 emit_move_insn (mem, expand_normal (build_fold_addr_expr (decl)));
f3ddd692
JJ
1020 shadow_base = expand_binop (Pmode, lshr_optab, base,
1021 GEN_INT (ASAN_SHADOW_SHIFT),
1022 NULL_RTX, 1, OPTAB_DIRECT);
1023 shadow_base = expand_binop (Pmode, add_optab, shadow_base,
2f1cd2eb
RS
1024 gen_int_mode (targetm.asan_shadow_offset (),
1025 Pmode),
f3ddd692
JJ
1026 NULL_RTX, 1, OPTAB_DIRECT);
1027 gcc_assert (asan_shadow_set != -1
1028 && (ASAN_RED_ZONE_SIZE >> ASAN_SHADOW_SHIFT) == 4);
1029 shadow_mem = gen_rtx_MEM (SImode, shadow_base);
1030 set_mem_alias_set (shadow_mem, asan_shadow_set);
1031 prev_offset = base_offset;
1032 for (l = length; l; l -= 2)
1033 {
1034 if (l == 2)
1035 cur_shadow_byte = ASAN_STACK_MAGIC_RIGHT;
1036 offset = offsets[l - 1];
1037 if ((offset - base_offset) & (ASAN_RED_ZONE_SIZE - 1))
1038 {
1039 int i;
1040 HOST_WIDE_INT aoff
1041 = base_offset + ((offset - base_offset)
1042 & ~(ASAN_RED_ZONE_SIZE - HOST_WIDE_INT_1));
1043 shadow_mem = adjust_address (shadow_mem, VOIDmode,
1044 (aoff - prev_offset)
1045 >> ASAN_SHADOW_SHIFT);
1046 prev_offset = aoff;
1047 for (i = 0; i < 4; i++, aoff += (1 << ASAN_SHADOW_SHIFT))
1048 if (aoff < offset)
1049 {
1050 if (aoff < offset - (1 << ASAN_SHADOW_SHIFT) + 1)
1051 shadow_bytes[i] = 0;
1052 else
1053 shadow_bytes[i] = offset - aoff;
1054 }
1055 else
1056 shadow_bytes[i] = ASAN_STACK_MAGIC_PARTIAL;
1057 emit_move_insn (shadow_mem, asan_shadow_cst (shadow_bytes));
1058 offset = aoff;
1059 }
1060 while (offset <= offsets[l - 2] - ASAN_RED_ZONE_SIZE)
1061 {
1062 shadow_mem = adjust_address (shadow_mem, VOIDmode,
1063 (offset - prev_offset)
1064 >> ASAN_SHADOW_SHIFT);
1065 prev_offset = offset;
1066 memset (shadow_bytes, cur_shadow_byte, 4);
1067 emit_move_insn (shadow_mem, asan_shadow_cst (shadow_bytes));
1068 offset += ASAN_RED_ZONE_SIZE;
1069 }
1070 cur_shadow_byte = ASAN_STACK_MAGIC_MIDDLE;
1071 }
1072 do_pending_stack_adjust ();
1073
1074 /* Construct epilogue sequence. */
1075 start_sequence ();
1076
1077 shadow_mem = gen_rtx_MEM (BLKmode, shadow_base);
1078 set_mem_alias_set (shadow_mem, asan_shadow_set);
1079 prev_offset = base_offset;
1080 last_offset = base_offset;
1081 last_size = 0;
1082 for (l = length; l; l -= 2)
1083 {
1084 offset = base_offset + ((offsets[l - 1] - base_offset)
1085 & ~(ASAN_RED_ZONE_SIZE - HOST_WIDE_INT_1));
1086 if (last_offset + last_size != offset)
1087 {
1088 shadow_mem = adjust_address (shadow_mem, VOIDmode,
1089 (last_offset - prev_offset)
1090 >> ASAN_SHADOW_SHIFT);
1091 prev_offset = last_offset;
aeb7e7c1 1092 asan_clear_shadow (shadow_mem, last_size >> ASAN_SHADOW_SHIFT);
f3ddd692
JJ
1093 last_offset = offset;
1094 last_size = 0;
1095 }
1096 last_size += base_offset + ((offsets[l - 2] - base_offset)
1097 & ~(ASAN_RED_ZONE_SIZE - HOST_WIDE_INT_1))
1098 - offset;
1099 }
1100 if (last_size)
1101 {
1102 shadow_mem = adjust_address (shadow_mem, VOIDmode,
1103 (last_offset - prev_offset)
1104 >> ASAN_SHADOW_SHIFT);
aeb7e7c1 1105 asan_clear_shadow (shadow_mem, last_size >> ASAN_SHADOW_SHIFT);
f3ddd692
JJ
1106 }
1107
1108 do_pending_stack_adjust ();
1109
1110 ret = get_insns ();
1111 end_sequence ();
1112 return ret;
1113}
1114
8240018b
JJ
1115/* Return true if DECL, a global var, might be overridden and needs
1116 therefore a local alias. */
1117
1118static bool
1119asan_needs_local_alias (tree decl)
1120{
1121 return DECL_WEAK (decl) || !targetm.binds_local_p (decl);
1122}
1123
1124/* Return true if DECL is a VAR_DECL that should be protected
1125 by Address Sanitizer, by appending a red zone with protected
1126 shadow memory after it and aligning it to at least
1127 ASAN_RED_ZONE_SIZE bytes. */
1128
1129bool
1130asan_protect_global (tree decl)
1131{
1132 rtx rtl, symbol;
8240018b 1133
94fce891
JJ
1134 if (TREE_CODE (decl) == STRING_CST)
1135 {
1136 /* Instrument all STRING_CSTs except those created
1137 by asan_pp_string here. */
1138 if (shadow_ptr_types[0] != NULL_TREE
1139 && TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE
1140 && TREE_TYPE (TREE_TYPE (decl)) == TREE_TYPE (shadow_ptr_types[0]))
1141 return false;
1142 return true;
1143 }
8240018b
JJ
1144 if (TREE_CODE (decl) != VAR_DECL
1145 /* TLS vars aren't statically protectable. */
1146 || DECL_THREAD_LOCAL_P (decl)
1147 /* Externs will be protected elsewhere. */
1148 || DECL_EXTERNAL (decl)
8240018b
JJ
1149 || !DECL_RTL_SET_P (decl)
1150 /* Comdat vars pose an ABI problem, we can't know if
1151 the var that is selected by the linker will have
1152 padding or not. */
1153 || DECL_ONE_ONLY (decl)
1154 /* Similarly for common vars. People can use -fno-common. */
a8a6fd74 1155 || (DECL_COMMON (decl) && TREE_PUBLIC (decl))
8240018b
JJ
1156 /* Don't protect if using user section, often vars placed
1157 into user section from multiple TUs are then assumed
1158 to be an array of such vars, putting padding in there
1159 breaks this assumption. */
1160 || (DECL_SECTION_NAME (decl) != NULL_TREE
1161 && !DECL_HAS_IMPLICIT_SECTION_NAME_P (decl))
1162 || DECL_SIZE (decl) == 0
1163 || ASAN_RED_ZONE_SIZE * BITS_PER_UNIT > MAX_OFILE_ALIGNMENT
1164 || !valid_constant_size_p (DECL_SIZE_UNIT (decl))
1165 || DECL_ALIGN_UNIT (decl) > 2 * ASAN_RED_ZONE_SIZE)
1166 return false;
1167
1168 rtl = DECL_RTL (decl);
1169 if (!MEM_P (rtl) || GET_CODE (XEXP (rtl, 0)) != SYMBOL_REF)
1170 return false;
1171 symbol = XEXP (rtl, 0);
1172
1173 if (CONSTANT_POOL_ADDRESS_P (symbol)
1174 || TREE_CONSTANT_POOL_ADDRESS_P (symbol))
1175 return false;
1176
8240018b
JJ
1177 if (lookup_attribute ("weakref", DECL_ATTRIBUTES (decl)))
1178 return false;
1179
1180#ifndef ASM_OUTPUT_DEF
1181 if (asan_needs_local_alias (decl))
1182 return false;
1183#endif
1184
497a1c66 1185 return true;
8240018b
JJ
1186}
1187
37d6f666
WM
1188/* Construct a function tree for __asan_report_{load,store}{1,2,4,8,16}.
1189 IS_STORE is either 1 (for a store) or 0 (for a load).
1190 SIZE_IN_BYTES is one of 1, 2, 4, 8, 16. */
1191
1192static tree
f6d98484 1193report_error_func (bool is_store, int size_in_bytes)
37d6f666 1194{
0e668eaf
JJ
1195 static enum built_in_function report[2][5]
1196 = { { BUILT_IN_ASAN_REPORT_LOAD1, BUILT_IN_ASAN_REPORT_LOAD2,
1197 BUILT_IN_ASAN_REPORT_LOAD4, BUILT_IN_ASAN_REPORT_LOAD8,
1198 BUILT_IN_ASAN_REPORT_LOAD16 },
1199 { BUILT_IN_ASAN_REPORT_STORE1, BUILT_IN_ASAN_REPORT_STORE2,
1200 BUILT_IN_ASAN_REPORT_STORE4, BUILT_IN_ASAN_REPORT_STORE8,
1201 BUILT_IN_ASAN_REPORT_STORE16 } };
1202 return builtin_decl_implicit (report[is_store][exact_log2 (size_in_bytes)]);
37d6f666
WM
1203}
1204
f6d98484
JJ
1205#define PROB_VERY_UNLIKELY (REG_BR_PROB_BASE / 2000 - 1)
1206#define PROB_ALWAYS (REG_BR_PROB_BASE)
1207
01452015 1208/* Split the current basic block and create a condition statement
25ae5027
DS
1209 insertion point right before or after the statement pointed to by
1210 ITER. Return an iterator to the point at which the caller might
1211 safely insert the condition statement.
01452015
DS
1212
1213 THEN_BLOCK must be set to the address of an uninitialized instance
1214 of basic_block. The function will then set *THEN_BLOCK to the
1215 'then block' of the condition statement to be inserted by the
1216 caller.
1217
c4bfe8bf
JJ
1218 If CREATE_THEN_FALLTHRU_EDGE is false, no edge will be created from
1219 *THEN_BLOCK to *FALLTHROUGH_BLOCK.
1220
01452015
DS
1221 Similarly, the function will set *FALLTRHOUGH_BLOCK to the 'else
1222 block' of the condition statement to be inserted by the caller.
1223
1224 Note that *FALLTHROUGH_BLOCK is a new block that contains the
1225 statements starting from *ITER, and *THEN_BLOCK is a new empty
1226 block.
1227
25ae5027
DS
1228 *ITER is adjusted to point to always point to the first statement
1229 of the basic block * FALLTHROUGH_BLOCK. That statement is the
1230 same as what ITER was pointing to prior to calling this function,
1231 if BEFORE_P is true; otherwise, it is its following statement. */
01452015
DS
1232
1233static gimple_stmt_iterator
25ae5027
DS
1234create_cond_insert_point (gimple_stmt_iterator *iter,
1235 bool before_p,
1236 bool then_more_likely_p,
c4bfe8bf 1237 bool create_then_fallthru_edge,
25ae5027
DS
1238 basic_block *then_block,
1239 basic_block *fallthrough_block)
01452015
DS
1240{
1241 gimple_stmt_iterator gsi = *iter;
1242
25ae5027 1243 if (!gsi_end_p (gsi) && before_p)
01452015
DS
1244 gsi_prev (&gsi);
1245
1246 basic_block cur_bb = gsi_bb (*iter);
1247
1248 edge e = split_block (cur_bb, gsi_stmt (gsi));
1249
1250 /* Get a hold on the 'condition block', the 'then block' and the
1251 'else block'. */
1252 basic_block cond_bb = e->src;
1253 basic_block fallthru_bb = e->dest;
1254 basic_block then_bb = create_empty_bb (cond_bb);
a9e0d843
RB
1255 if (current_loops)
1256 {
1257 add_bb_to_loop (then_bb, cond_bb->loop_father);
1258 loops_state_set (LOOPS_NEED_FIXUP);
1259 }
01452015
DS
1260
1261 /* Set up the newly created 'then block'. */
1262 e = make_edge (cond_bb, then_bb, EDGE_TRUE_VALUE);
1263 int fallthrough_probability
1264 = then_more_likely_p
1265 ? PROB_VERY_UNLIKELY
1266 : PROB_ALWAYS - PROB_VERY_UNLIKELY;
1267 e->probability = PROB_ALWAYS - fallthrough_probability;
c4bfe8bf
JJ
1268 if (create_then_fallthru_edge)
1269 make_single_succ_edge (then_bb, fallthru_bb, EDGE_FALLTHRU);
01452015
DS
1270
1271 /* Set up the fallthrough basic block. */
1272 e = find_edge (cond_bb, fallthru_bb);
1273 e->flags = EDGE_FALSE_VALUE;
1274 e->count = cond_bb->count;
1275 e->probability = fallthrough_probability;
1276
1277 /* Update dominance info for the newly created then_bb; note that
1278 fallthru_bb's dominance info has already been updated by
1279 split_bock. */
1280 if (dom_info_available_p (CDI_DOMINATORS))
1281 set_immediate_dominator (CDI_DOMINATORS, then_bb, cond_bb);
1282
1283 *then_block = then_bb;
1284 *fallthrough_block = fallthru_bb;
1285 *iter = gsi_start_bb (fallthru_bb);
1286
1287 return gsi_last_bb (cond_bb);
1288}
1289
25ae5027
DS
1290/* Insert an if condition followed by a 'then block' right before the
1291 statement pointed to by ITER. The fallthrough block -- which is the
1292 else block of the condition as well as the destination of the
1293 outcoming edge of the 'then block' -- starts with the statement
1294 pointed to by ITER.
1295
497a1c66 1296 COND is the condition of the if.
25ae5027
DS
1297
1298 If THEN_MORE_LIKELY_P is true, the probability of the edge to the
1299 'then block' is higher than the probability of the edge to the
1300 fallthrough block.
1301
1302 Upon completion of the function, *THEN_BB is set to the newly
1303 inserted 'then block' and similarly, *FALLTHROUGH_BB is set to the
1304 fallthrough block.
1305
1306 *ITER is adjusted to still point to the same statement it was
1307 pointing to initially. */
1308
1309static void
1310insert_if_then_before_iter (gimple cond,
1311 gimple_stmt_iterator *iter,
1312 bool then_more_likely_p,
1313 basic_block *then_bb,
1314 basic_block *fallthrough_bb)
1315{
1316 gimple_stmt_iterator cond_insert_point =
1317 create_cond_insert_point (iter,
1318 /*before_p=*/true,
1319 then_more_likely_p,
c4bfe8bf 1320 /*create_then_fallthru_edge=*/true,
25ae5027
DS
1321 then_bb,
1322 fallthrough_bb);
1323 gsi_insert_after (&cond_insert_point, cond, GSI_NEW_STMT);
1324}
1325
dc29bf1e 1326/* Instrument the memory access instruction BASE. Insert new
25ae5027 1327 statements before or after ITER.
dc29bf1e
DS
1328
1329 Note that the memory access represented by BASE can be either an
1330 SSA_NAME, or a non-SSA expression. LOCATION is the source code
1331 location. IS_STORE is TRUE for a store, FALSE for a load.
25ae5027
DS
1332 BEFORE_P is TRUE for inserting the instrumentation code before
1333 ITER, FALSE for inserting it after ITER. SIZE_IN_BYTES is one of
1334 1, 2, 4, 8, 16.
1335
1336 If BEFORE_P is TRUE, *ITER is arranged to still point to the
1337 statement it was pointing to prior to calling this function,
1338 otherwise, it points to the statement logically following it. */
37d6f666
WM
1339
1340static void
25ae5027
DS
1341build_check_stmt (location_t location, tree base, gimple_stmt_iterator *iter,
1342 bool before_p, bool is_store, int size_in_bytes)
37d6f666
WM
1343{
1344 gimple_stmt_iterator gsi;
01452015 1345 basic_block then_bb, else_bb;
f6d98484 1346 tree t, base_addr, shadow;
37d6f666 1347 gimple g;
f6d98484
JJ
1348 tree shadow_ptr_type = shadow_ptr_types[size_in_bytes == 16 ? 1 : 0];
1349 tree shadow_type = TREE_TYPE (shadow_ptr_type);
1350 tree uintptr_type
1351 = build_nonstandard_integer_type (TYPE_PRECISION (TREE_TYPE (base)), 1);
dc29bf1e 1352 tree base_ssa = base;
37d6f666 1353
01452015
DS
1354 /* Get an iterator on the point where we can add the condition
1355 statement for the instrumentation. */
25ae5027
DS
1356 gsi = create_cond_insert_point (iter, before_p,
1357 /*then_more_likely_p=*/false,
c4bfe8bf 1358 /*create_then_fallthru_edge=*/false,
25ae5027
DS
1359 &then_bb,
1360 &else_bb);
37d6f666 1361
f6d98484 1362 base = unshare_expr (base);
37d6f666 1363
dc29bf1e
DS
1364 /* BASE can already be an SSA_NAME; in that case, do not create a
1365 new SSA_NAME for it. */
1366 if (TREE_CODE (base) != SSA_NAME)
1367 {
1368 g = gimple_build_assign_with_ops (TREE_CODE (base),
1369 make_ssa_name (TREE_TYPE (base), NULL),
1370 base, NULL_TREE);
1371 gimple_set_location (g, location);
1372 gsi_insert_after (&gsi, g, GSI_NEW_STMT);
1373 base_ssa = gimple_assign_lhs (g);
1374 }
37d6f666 1375
f6d98484
JJ
1376 g = gimple_build_assign_with_ops (NOP_EXPR,
1377 make_ssa_name (uintptr_type, NULL),
dc29bf1e 1378 base_ssa, NULL_TREE);
37d6f666 1379 gimple_set_location (g, location);
f6d98484
JJ
1380 gsi_insert_after (&gsi, g, GSI_NEW_STMT);
1381 base_addr = gimple_assign_lhs (g);
37d6f666 1382
f6d98484
JJ
1383 /* Build
1384 (base_addr >> ASAN_SHADOW_SHIFT) + targetm.asan_shadow_offset (). */
37d6f666 1385
f6d98484
JJ
1386 t = build_int_cst (uintptr_type, ASAN_SHADOW_SHIFT);
1387 g = gimple_build_assign_with_ops (RSHIFT_EXPR,
1388 make_ssa_name (uintptr_type, NULL),
1389 base_addr, t);
37d6f666 1390 gimple_set_location (g, location);
f6d98484
JJ
1391 gsi_insert_after (&gsi, g, GSI_NEW_STMT);
1392
1393 t = build_int_cst (uintptr_type, targetm.asan_shadow_offset ());
1394 g = gimple_build_assign_with_ops (PLUS_EXPR,
1395 make_ssa_name (uintptr_type, NULL),
1396 gimple_assign_lhs (g), t);
37d6f666 1397 gimple_set_location (g, location);
f6d98484 1398 gsi_insert_after (&gsi, g, GSI_NEW_STMT);
37d6f666 1399
f6d98484
JJ
1400 g = gimple_build_assign_with_ops (NOP_EXPR,
1401 make_ssa_name (shadow_ptr_type, NULL),
1402 gimple_assign_lhs (g), NULL_TREE);
1403 gimple_set_location (g, location);
1404 gsi_insert_after (&gsi, g, GSI_NEW_STMT);
37d6f666 1405
f6d98484
JJ
1406 t = build2 (MEM_REF, shadow_type, gimple_assign_lhs (g),
1407 build_int_cst (shadow_ptr_type, 0));
1408 g = gimple_build_assign_with_ops (MEM_REF,
1409 make_ssa_name (shadow_type, NULL),
1410 t, NULL_TREE);
1411 gimple_set_location (g, location);
1412 gsi_insert_after (&gsi, g, GSI_NEW_STMT);
1413 shadow = gimple_assign_lhs (g);
1414
1415 if (size_in_bytes < 8)
1416 {
1417 /* Slow path for 1, 2 and 4 byte accesses.
1418 Test (shadow != 0)
1419 & ((base_addr & 7) + (size_in_bytes - 1)) >= shadow). */
475b8f37
DN
1420 gimple_seq seq = NULL;
1421 gimple shadow_test = build_assign (NE_EXPR, shadow, 0);
1422 gimple_seq_add_stmt (&seq, shadow_test);
1423 gimple_seq_add_stmt (&seq, build_assign (BIT_AND_EXPR, base_addr, 7));
1424 gimple_seq_add_stmt (&seq, build_type_cast (shadow_type,
1425 gimple_seq_last (seq)));
f6d98484 1426 if (size_in_bytes > 1)
475b8f37
DN
1427 gimple_seq_add_stmt (&seq,
1428 build_assign (PLUS_EXPR, gimple_seq_last (seq),
1429 size_in_bytes - 1));
1430 gimple_seq_add_stmt (&seq, build_assign (GE_EXPR, gimple_seq_last (seq),
1431 shadow));
1432 gimple_seq_add_stmt (&seq, build_assign (BIT_AND_EXPR, shadow_test,
1433 gimple_seq_last (seq)));
1434 t = gimple_assign_lhs (gimple_seq_last (seq));
1435 gimple_seq_set_location (seq, location);
1436 gsi_insert_seq_after (&gsi, seq, GSI_CONTINUE_LINKING);
f6d98484
JJ
1437 }
1438 else
1439 t = shadow;
37d6f666 1440
f6d98484
JJ
1441 g = gimple_build_cond (NE_EXPR, t, build_int_cst (TREE_TYPE (t), 0),
1442 NULL_TREE, NULL_TREE);
1443 gimple_set_location (g, location);
1444 gsi_insert_after (&gsi, g, GSI_NEW_STMT);
37d6f666 1445
f6d98484 1446 /* Generate call to the run-time library (e.g. __asan_report_load8). */
37d6f666 1447 gsi = gsi_start_bb (then_bb);
f6d98484
JJ
1448 g = gimple_build_call (report_error_func (is_store, size_in_bytes),
1449 1, base_addr);
1450 gimple_set_location (g, location);
1451 gsi_insert_after (&gsi, g, GSI_NEW_STMT);
37d6f666 1452
dfb9e332 1453 *iter = gsi_start_bb (else_bb);
37d6f666
WM
1454}
1455
1456/* If T represents a memory access, add instrumentation code before ITER.
1457 LOCATION is source code location.
25ae5027 1458 IS_STORE is either TRUE (for a store) or FALSE (for a load). */
37d6f666
WM
1459
1460static void
1461instrument_derefs (gimple_stmt_iterator *iter, tree t,
bdcbe80c 1462 location_t location, bool is_store)
37d6f666
WM
1463{
1464 tree type, base;
f6d98484 1465 HOST_WIDE_INT size_in_bytes;
37d6f666
WM
1466
1467 type = TREE_TYPE (t);
37d6f666
WM
1468 switch (TREE_CODE (t))
1469 {
1470 case ARRAY_REF:
1471 case COMPONENT_REF:
1472 case INDIRECT_REF:
1473 case MEM_REF:
1474 break;
1475 default:
1476 return;
1477 }
f6d98484
JJ
1478
1479 size_in_bytes = int_size_in_bytes (type);
1480 if ((size_in_bytes & (size_in_bytes - 1)) != 0
1481 || (unsigned HOST_WIDE_INT) size_in_bytes - 1 >= 16)
1482 return;
1483
f6d98484
JJ
1484 HOST_WIDE_INT bitsize, bitpos;
1485 tree offset;
1486 enum machine_mode mode;
1487 int volatilep = 0, unsignedp = 0;
1488 get_inner_reference (t, &bitsize, &bitpos, &offset,
1489 &mode, &unsignedp, &volatilep, false);
25ae5027
DS
1490 if (bitpos % (size_in_bytes * BITS_PER_UNIT)
1491 || bitsize != size_in_bytes * BITS_PER_UNIT)
1fe04fdc
JJ
1492 {
1493 if (TREE_CODE (t) == COMPONENT_REF
1494 && DECL_BIT_FIELD_REPRESENTATIVE (TREE_OPERAND (t, 1)) != NULL_TREE)
1495 {
1496 tree repr = DECL_BIT_FIELD_REPRESENTATIVE (TREE_OPERAND (t, 1));
1497 instrument_derefs (iter, build3 (COMPONENT_REF, TREE_TYPE (repr),
1498 TREE_OPERAND (t, 0), repr,
1499 NULL_TREE), location, is_store);
1500 }
1501 return;
1502 }
f6d98484
JJ
1503
1504 base = build_fold_addr_expr (t);
bdcbe80c
DS
1505 if (!has_mem_ref_been_instrumented (base, size_in_bytes))
1506 {
1507 build_check_stmt (location, base, iter, /*before_p=*/true,
1508 is_store, size_in_bytes);
1509 update_mem_ref_hash_table (base, size_in_bytes);
1510 update_mem_ref_hash_table (t, size_in_bytes);
1511 }
1512
25ae5027
DS
1513}
1514
1515/* Instrument an access to a contiguous memory region that starts at
1516 the address pointed to by BASE, over a length of LEN (expressed in
1517 the sizeof (*BASE) bytes). ITER points to the instruction before
1518 which the instrumentation instructions must be inserted. LOCATION
1519 is the source location that the instrumentation instructions must
1520 have. If IS_STORE is true, then the memory access is a store;
1521 otherwise, it's a load. */
1522
1523static void
1524instrument_mem_region_access (tree base, tree len,
1525 gimple_stmt_iterator *iter,
1526 location_t location, bool is_store)
1527{
c63d3b96
JJ
1528 if (!POINTER_TYPE_P (TREE_TYPE (base))
1529 || !INTEGRAL_TYPE_P (TREE_TYPE (len))
1530 || integer_zerop (len))
25ae5027
DS
1531 return;
1532
1533 gimple_stmt_iterator gsi = *iter;
1534
1535 basic_block fallthrough_bb = NULL, then_bb = NULL;
bdcbe80c
DS
1536
1537 /* If the beginning of the memory region has already been
1538 instrumented, do not instrument it. */
b41288b3
JJ
1539 bool start_instrumented = has_mem_ref_been_instrumented (base, 1);
1540
1541 /* If the end of the memory region has already been instrumented, do
1542 not instrument it. */
1543 tree end = asan_mem_ref_get_end (base, len);
1544 bool end_instrumented = has_mem_ref_been_instrumented (end, 1);
1545
1546 if (start_instrumented && end_instrumented)
1547 return;
bdcbe80c 1548
25ae5027
DS
1549 if (!is_gimple_constant (len))
1550 {
1551 /* So, the length of the memory area to asan-protect is
1552 non-constant. Let's guard the generated instrumentation code
1553 like:
1554
1555 if (len != 0)
1556 {
1557 //asan instrumentation code goes here.
497a1c66 1558 }
25ae5027
DS
1559 // falltrough instructions, starting with *ITER. */
1560
1561 gimple g = gimple_build_cond (NE_EXPR,
1562 len,
1563 build_int_cst (TREE_TYPE (len), 0),
1564 NULL_TREE, NULL_TREE);
1565 gimple_set_location (g, location);
1566 insert_if_then_before_iter (g, iter, /*then_more_likely_p=*/true,
1567 &then_bb, &fallthrough_bb);
1568 /* Note that fallthrough_bb starts with the statement that was
1569 pointed to by ITER. */
1570
1571 /* The 'then block' of the 'if (len != 0) condition is where
1572 we'll generate the asan instrumentation code now. */
b41288b3 1573 gsi = gsi_last_bb (then_bb);
25ae5027
DS
1574 }
1575
b41288b3
JJ
1576 if (!start_instrumented)
1577 {
1578 /* Instrument the beginning of the memory region to be accessed,
1579 and arrange for the rest of the intrumentation code to be
1580 inserted in the then block *after* the current gsi. */
1581 build_check_stmt (location, base, &gsi, /*before_p=*/true, is_store, 1);
1582
1583 if (then_bb)
1584 /* We are in the case where the length of the region is not
1585 constant; so instrumentation code is being generated in the
1586 'then block' of the 'if (len != 0) condition. Let's arrange
1587 for the subsequent instrumentation statements to go in the
1588 'then block'. */
1589 gsi = gsi_last_bb (then_bb);
1590 else
1591 {
1592 *iter = gsi;
1593 /* Don't remember this access as instrumented, if length
1594 is unknown. It might be zero and not being actually
1595 instrumented, so we can't rely on it being instrumented. */
1596 update_mem_ref_hash_table (base, 1);
1597 }
1598 }
bdcbe80c 1599
b41288b3
JJ
1600 if (end_instrumented)
1601 return;
bdcbe80c 1602
25ae5027
DS
1603 /* We want to instrument the access at the end of the memory region,
1604 which is at (base + len - 1). */
1605
1606 /* offset = len - 1; */
1607 len = unshare_expr (len);
c63d3b96
JJ
1608 tree offset;
1609 gimple_seq seq = NULL;
1610 if (TREE_CODE (len) == INTEGER_CST)
1611 offset = fold_build2 (MINUS_EXPR, size_type_node,
1612 fold_convert (size_type_node, len),
1613 build_int_cst (size_type_node, 1));
1614 else
1615 {
1616 gimple g;
1617 tree t;
1618
1619 if (TREE_CODE (len) != SSA_NAME)
1620 {
1621 t = make_ssa_name (TREE_TYPE (len), NULL);
1622 g = gimple_build_assign_with_ops (TREE_CODE (len), t, len, NULL);
1623 gimple_set_location (g, location);
1624 gimple_seq_add_stmt_without_update (&seq, g);
1625 len = t;
1626 }
1627 if (!useless_type_conversion_p (size_type_node, TREE_TYPE (len)))
1628 {
1629 t = make_ssa_name (size_type_node, NULL);
1630 g = gimple_build_assign_with_ops (NOP_EXPR, t, len, NULL);
1631 gimple_set_location (g, location);
1632 gimple_seq_add_stmt_without_update (&seq, g);
1633 len = t;
1634 }
1635
1636 t = make_ssa_name (size_type_node, NULL);
1637 g = gimple_build_assign_with_ops (MINUS_EXPR, t, len,
1638 build_int_cst (size_type_node, 1));
1639 gimple_set_location (g, location);
bdcbe80c
DS
1640 gimple_seq_add_stmt_without_update (&seq, g);
1641 offset = gimple_assign_lhs (g);
1642 }
25ae5027 1643
bdcbe80c
DS
1644 /* _1 = base; */
1645 base = unshare_expr (base);
1646 gimple region_end =
1647 gimple_build_assign_with_ops (TREE_CODE (base),
1648 make_ssa_name (TREE_TYPE (base), NULL),
1649 base, NULL);
1650 gimple_set_location (region_end, location);
1651 gimple_seq_add_stmt_without_update (&seq, region_end);
25ae5027 1652
bdcbe80c
DS
1653 /* _2 = _1 + offset; */
1654 region_end =
1655 gimple_build_assign_with_ops (POINTER_PLUS_EXPR,
1656 make_ssa_name (TREE_TYPE (base), NULL),
1657 gimple_assign_lhs (region_end),
1658 offset);
1659 gimple_set_location (region_end, location);
b41288b3
JJ
1660 gimple_seq_add_stmt_without_update (&seq, region_end);
1661 gsi_insert_seq_before (&gsi, seq, GSI_SAME_STMT);
25ae5027 1662
bdcbe80c 1663 /* instrument access at _2; */
b41288b3 1664 gsi = gsi_for_stmt (region_end);
bdcbe80c
DS
1665 build_check_stmt (location, gimple_assign_lhs (region_end),
1666 &gsi, /*before_p=*/false, is_store, 1);
25ae5027 1667
b41288b3
JJ
1668 if (then_bb == NULL)
1669 update_mem_ref_hash_table (end, 1);
1670
1671 *iter = gsi_for_stmt (gsi_stmt (*iter));
bdcbe80c 1672}
25ae5027 1673
bdcbe80c
DS
1674/* Instrument the call (to the builtin strlen function) pointed to by
1675 ITER.
25ae5027 1676
bdcbe80c
DS
1677 This function instruments the access to the first byte of the
1678 argument, right before the call. After the call it instruments the
1679 access to the last byte of the argument; it uses the result of the
1680 call to deduce the offset of that last byte.
25ae5027 1681
99c2bd54 1682 Upon completion, iff the call has actually been instrumented, this
bdcbe80c
DS
1683 function returns TRUE and *ITER points to the statement logically
1684 following the built-in strlen function call *ITER was initially
1685 pointing to. Otherwise, the function returns FALSE and *ITER
1686 remains unchanged. */
25ae5027 1687
bdcbe80c
DS
1688static bool
1689instrument_strlen_call (gimple_stmt_iterator *iter)
1690{
1691 gimple call = gsi_stmt (*iter);
1692 gcc_assert (is_gimple_call (call));
25ae5027 1693
bdcbe80c
DS
1694 tree callee = gimple_call_fndecl (call);
1695 gcc_assert (is_builtin_fn (callee)
1696 && DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL
1697 && DECL_FUNCTION_CODE (callee) == BUILT_IN_STRLEN);
25ae5027 1698
bdcbe80c
DS
1699 tree len = gimple_call_lhs (call);
1700 if (len == NULL)
1701 /* Some passes might clear the return value of the strlen call;
1702 bail out in that case. Return FALSE as we are not advancing
1703 *ITER. */
1704 return false;
1705 gcc_assert (INTEGRAL_TYPE_P (TREE_TYPE (len)));
25ae5027 1706
bdcbe80c
DS
1707 location_t loc = gimple_location (call);
1708 tree str_arg = gimple_call_arg (call, 0);
25ae5027 1709
bdcbe80c 1710 /* Instrument the access to the first byte of str_arg. i.e:
25ae5027 1711
bdcbe80c 1712 _1 = str_arg; instrument (_1); */
99c2bd54 1713 tree cptr_type = build_pointer_type (char_type_node);
bdcbe80c
DS
1714 gimple str_arg_ssa =
1715 gimple_build_assign_with_ops (NOP_EXPR,
99c2bd54 1716 make_ssa_name (cptr_type, NULL),
bdcbe80c
DS
1717 str_arg, NULL);
1718 gimple_set_location (str_arg_ssa, loc);
1719 gimple_stmt_iterator gsi = *iter;
1720 gsi_insert_before (&gsi, str_arg_ssa, GSI_NEW_STMT);
1721 build_check_stmt (loc, gimple_assign_lhs (str_arg_ssa), &gsi,
1722 /*before_p=*/false, /*is_store=*/false, 1);
25ae5027 1723
bdcbe80c 1724 /* If we initially had an instruction like:
25ae5027 1725
bdcbe80c 1726 int n = strlen (str)
25ae5027 1727
bdcbe80c
DS
1728 we now want to instrument the access to str[n], after the
1729 instruction above.*/
25ae5027 1730
bdcbe80c
DS
1731 /* So let's build the access to str[n] that is, access through the
1732 pointer_plus expr: (_1 + len). */
1733 gimple stmt =
1734 gimple_build_assign_with_ops (POINTER_PLUS_EXPR,
99c2bd54 1735 make_ssa_name (cptr_type, NULL),
bdcbe80c
DS
1736 gimple_assign_lhs (str_arg_ssa),
1737 len);
1738 gimple_set_location (stmt, loc);
1739 gsi_insert_after (&gsi, stmt, GSI_NEW_STMT);
25ae5027 1740
bdcbe80c
DS
1741 build_check_stmt (loc, gimple_assign_lhs (stmt), &gsi,
1742 /*before_p=*/false, /*is_store=*/false, 1);
25ae5027 1743
bdcbe80c
DS
1744 /* Ensure that iter points to the statement logically following the
1745 one it was initially pointing to. */
1746 *iter = gsi;
1747 /* As *ITER has been advanced to point to the next statement, let's
1748 return true to inform transform_statements that it shouldn't
1749 advance *ITER anymore; otherwises it will skip that next
1750 statement, which wouldn't be instrumented. */
1751 return true;
1752}
25ae5027 1753
bdcbe80c
DS
1754/* Instrument the call to a built-in memory access function that is
1755 pointed to by the iterator ITER.
25ae5027 1756
bdcbe80c
DS
1757 Upon completion, return TRUE iff *ITER has been advanced to the
1758 statement following the one it was originally pointing to. */
25ae5027 1759
bdcbe80c
DS
1760static bool
1761instrument_builtin_call (gimple_stmt_iterator *iter)
1762{
1763 bool iter_advanced_p = false;
1764 gimple call = gsi_stmt (*iter);
25ae5027 1765
bdcbe80c 1766 gcc_checking_assert (gimple_call_builtin_p (call, BUILT_IN_NORMAL));
25ae5027 1767
bdcbe80c
DS
1768 tree callee = gimple_call_fndecl (call);
1769 location_t loc = gimple_location (call);
25ae5027 1770
bdcbe80c
DS
1771 if (DECL_FUNCTION_CODE (callee) == BUILT_IN_STRLEN)
1772 iter_advanced_p = instrument_strlen_call (iter);
1773 else
25ae5027 1774 {
bdcbe80c
DS
1775 asan_mem_ref src0, src1, dest;
1776 asan_mem_ref_init (&src0, NULL, 1);
1777 asan_mem_ref_init (&src1, NULL, 1);
1778 asan_mem_ref_init (&dest, NULL, 1);
1779
1780 tree src0_len = NULL_TREE, src1_len = NULL_TREE, dest_len = NULL_TREE;
1781 bool src0_is_store = false, src1_is_store = false,
1782 dest_is_store = false, dest_is_deref = false;
1783
1784 if (get_mem_refs_of_builtin_call (call,
1785 &src0, &src0_len, &src0_is_store,
8fb06726 1786 &src1, &src1_len, &src1_is_store,
bdcbe80c
DS
1787 &dest, &dest_len, &dest_is_store,
1788 &dest_is_deref))
1789 {
1790 if (dest_is_deref)
1791 {
1792 instrument_derefs (iter, dest.start, loc, dest_is_store);
1793 gsi_next (iter);
1794 iter_advanced_p = true;
1795 }
1796 else if (src0_len || src1_len || dest_len)
1797 {
b41288b3 1798 if (src0.start != NULL_TREE)
bdcbe80c
DS
1799 instrument_mem_region_access (src0.start, src0_len,
1800 iter, loc, /*is_store=*/false);
1801 if (src1.start != NULL_TREE)
1802 instrument_mem_region_access (src1.start, src1_len,
1803 iter, loc, /*is_store=*/false);
1804 if (dest.start != NULL_TREE)
1805 instrument_mem_region_access (dest.start, dest_len,
1806 iter, loc, /*is_store=*/true);
1807 *iter = gsi_for_stmt (call);
1808 gsi_next (iter);
1809 iter_advanced_p = true;
1810 }
1811 }
25ae5027 1812 }
bdcbe80c 1813 return iter_advanced_p;
25ae5027
DS
1814}
1815
1816/* Instrument the assignment statement ITER if it is subject to
bdcbe80c
DS
1817 instrumentation. Return TRUE iff instrumentation actually
1818 happened. In that case, the iterator ITER is advanced to the next
1819 logical expression following the one initially pointed to by ITER,
1820 and the relevant memory reference that which access has been
1821 instrumented is added to the memory references hash table. */
25ae5027 1822
bdcbe80c
DS
1823static bool
1824maybe_instrument_assignment (gimple_stmt_iterator *iter)
25ae5027
DS
1825{
1826 gimple s = gsi_stmt (*iter);
1827
1828 gcc_assert (gimple_assign_single_p (s));
1829
bdcbe80c
DS
1830 tree ref_expr = NULL_TREE;
1831 bool is_store, is_instrumented = false;
1832
52f2e7e1 1833 if (gimple_store_p (s))
bdcbe80c
DS
1834 {
1835 ref_expr = gimple_assign_lhs (s);
1836 is_store = true;
1837 instrument_derefs (iter, ref_expr,
1838 gimple_location (s),
1839 is_store);
1840 is_instrumented = true;
1841 }
1842
52f2e7e1 1843 if (gimple_assign_load_p (s))
bdcbe80c
DS
1844 {
1845 ref_expr = gimple_assign_rhs1 (s);
1846 is_store = false;
1847 instrument_derefs (iter, ref_expr,
1848 gimple_location (s),
1849 is_store);
1850 is_instrumented = true;
1851 }
1852
1853 if (is_instrumented)
1854 gsi_next (iter);
1855
1856 return is_instrumented;
25ae5027
DS
1857}
1858
1859/* Instrument the function call pointed to by the iterator ITER, if it
1860 is subject to instrumentation. At the moment, the only function
1861 calls that are instrumented are some built-in functions that access
1862 memory. Look at instrument_builtin_call to learn more.
1863
1864 Upon completion return TRUE iff *ITER was advanced to the statement
1865 following the one it was originally pointing to. */
1866
1867static bool
1868maybe_instrument_call (gimple_stmt_iterator *iter)
1869{
2b2571c9 1870 gimple stmt = gsi_stmt (*iter);
bdcbe80c
DS
1871 bool is_builtin = gimple_call_builtin_p (stmt, BUILT_IN_NORMAL);
1872
1873 if (is_builtin && instrument_builtin_call (iter))
2b2571c9 1874 return true;
bdcbe80c 1875
2b2571c9
JJ
1876 if (gimple_call_noreturn_p (stmt))
1877 {
1878 if (is_builtin)
1879 {
1880 tree callee = gimple_call_fndecl (stmt);
1881 switch (DECL_FUNCTION_CODE (callee))
1882 {
1883 case BUILT_IN_UNREACHABLE:
1884 case BUILT_IN_TRAP:
1885 /* Don't instrument these. */
1886 return false;
1887 }
1888 }
1889 tree decl = builtin_decl_implicit (BUILT_IN_ASAN_HANDLE_NO_RETURN);
1890 gimple g = gimple_build_call (decl, 0);
1891 gimple_set_location (g, gimple_location (stmt));
1892 gsi_insert_before (iter, g, GSI_SAME_STMT);
1893 }
25ae5027 1894 return false;
37d6f666
WM
1895}
1896
bdcbe80c
DS
1897/* Walk each instruction of all basic block and instrument those that
1898 represent memory references: loads, stores, or function calls.
1899 In a given basic block, this function avoids instrumenting memory
1900 references that have already been instrumented. */
37d6f666
WM
1901
1902static void
1903transform_statements (void)
1904{
c4bfe8bf 1905 basic_block bb, last_bb = NULL;
37d6f666
WM
1906 gimple_stmt_iterator i;
1907 int saved_last_basic_block = last_basic_block;
37d6f666
WM
1908
1909 FOR_EACH_BB (bb)
1910 {
c4bfe8bf 1911 basic_block prev_bb = bb;
bdcbe80c 1912
37d6f666 1913 if (bb->index >= saved_last_basic_block) continue;
c4bfe8bf
JJ
1914
1915 /* Flush the mem ref hash table, if current bb doesn't have
1916 exactly one predecessor, or if that predecessor (skipping
1917 over asan created basic blocks) isn't the last processed
1918 basic block. Thus we effectively flush on extended basic
1919 block boundaries. */
1920 while (single_pred_p (prev_bb))
1921 {
1922 prev_bb = single_pred (prev_bb);
1923 if (prev_bb->index < saved_last_basic_block)
1924 break;
1925 }
1926 if (prev_bb != last_bb)
1927 empty_mem_ref_hash_table ();
1928 last_bb = bb;
1929
25ae5027 1930 for (i = gsi_start_bb (bb); !gsi_end_p (i);)
497a1c66 1931 {
25ae5027
DS
1932 gimple s = gsi_stmt (i);
1933
bdcbe80c
DS
1934 if (has_stmt_been_instrumented_p (s))
1935 gsi_next (&i);
1936 else if (gimple_assign_single_p (s)
1937 && maybe_instrument_assignment (&i))
1938 /* Nothing to do as maybe_instrument_assignment advanced
1939 the iterator I. */;
1940 else if (is_gimple_call (s) && maybe_instrument_call (&i))
1941 /* Nothing to do as maybe_instrument_call
1942 advanced the iterator I. */;
1943 else
25ae5027 1944 {
bdcbe80c
DS
1945 /* No instrumentation happened.
1946
c4bfe8bf
JJ
1947 If the current instruction is a function call that
1948 might free something, let's forget about the memory
1949 references that got instrumented. Otherwise we might
1950 miss some instrumentation opportunities. */
1951 if (is_gimple_call (s) && !nonfreeing_call_p (s))
bdcbe80c
DS
1952 empty_mem_ref_hash_table ();
1953
1954 gsi_next (&i);
25ae5027 1955 }
497a1c66 1956 }
37d6f666 1957 }
bdcbe80c 1958 free_mem_ref_resources ();
37d6f666
WM
1959}
1960
8240018b
JJ
1961/* Build
1962 struct __asan_global
1963 {
1964 const void *__beg;
1965 uptr __size;
1966 uptr __size_with_redzone;
1967 const void *__name;
ef1b3fda 1968 const void *__module_name;
8240018b
JJ
1969 uptr __has_dynamic_init;
1970 } type. */
1971
1972static tree
1973asan_global_struct (void)
1974{
ef1b3fda 1975 static const char *field_names[6]
8240018b 1976 = { "__beg", "__size", "__size_with_redzone",
ef1b3fda
KS
1977 "__name", "__module_name", "__has_dynamic_init" };
1978 tree fields[6], ret;
8240018b
JJ
1979 int i;
1980
1981 ret = make_node (RECORD_TYPE);
ef1b3fda 1982 for (i = 0; i < 6; i++)
8240018b
JJ
1983 {
1984 fields[i]
1985 = build_decl (UNKNOWN_LOCATION, FIELD_DECL,
1986 get_identifier (field_names[i]),
1987 (i == 0 || i == 3) ? const_ptr_type_node
de5a5fa1 1988 : pointer_sized_int_node);
8240018b
JJ
1989 DECL_CONTEXT (fields[i]) = ret;
1990 if (i)
1991 DECL_CHAIN (fields[i - 1]) = fields[i];
1992 }
1993 TYPE_FIELDS (ret) = fields[0];
1994 TYPE_NAME (ret) = get_identifier ("__asan_global");
1995 layout_type (ret);
1996 return ret;
1997}
1998
1999/* Append description of a single global DECL into vector V.
2000 TYPE is __asan_global struct type as returned by asan_global_struct. */
2001
2002static void
9771b263 2003asan_add_global (tree decl, tree type, vec<constructor_elt, va_gc> *v)
8240018b
JJ
2004{
2005 tree init, uptr = TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (type)));
2006 unsigned HOST_WIDE_INT size;
ef1b3fda 2007 tree str_cst, module_name_cst, refdecl = decl;
9771b263 2008 vec<constructor_elt, va_gc> *vinner = NULL;
8240018b 2009
ef1b3fda 2010 pretty_printer asan_pp, module_name_pp;
8240018b 2011
8240018b 2012 if (DECL_NAME (decl))
b066401f 2013 pp_tree_identifier (&asan_pp, DECL_NAME (decl));
8240018b
JJ
2014 else
2015 pp_string (&asan_pp, "<unknown>");
11a877b3 2016 str_cst = asan_pp_string (&asan_pp);
8240018b 2017
ef1b3fda
KS
2018 pp_string (&module_name_pp, main_input_filename);
2019 module_name_cst = asan_pp_string (&module_name_pp);
2020
8240018b
JJ
2021 if (asan_needs_local_alias (decl))
2022 {
2023 char buf[20];
9771b263 2024 ASM_GENERATE_INTERNAL_LABEL (buf, "LASAN", vec_safe_length (v) + 1);
8240018b
JJ
2025 refdecl = build_decl (DECL_SOURCE_LOCATION (decl),
2026 VAR_DECL, get_identifier (buf), TREE_TYPE (decl));
2027 TREE_ADDRESSABLE (refdecl) = TREE_ADDRESSABLE (decl);
2028 TREE_READONLY (refdecl) = TREE_READONLY (decl);
2029 TREE_THIS_VOLATILE (refdecl) = TREE_THIS_VOLATILE (decl);
2030 DECL_GIMPLE_REG_P (refdecl) = DECL_GIMPLE_REG_P (decl);
2031 DECL_ARTIFICIAL (refdecl) = DECL_ARTIFICIAL (decl);
2032 DECL_IGNORED_P (refdecl) = DECL_IGNORED_P (decl);
2033 TREE_STATIC (refdecl) = 1;
2034 TREE_PUBLIC (refdecl) = 0;
2035 TREE_USED (refdecl) = 1;
2036 assemble_alias (refdecl, DECL_ASSEMBLER_NAME (decl));
2037 }
2038
2039 CONSTRUCTOR_APPEND_ELT (vinner, NULL_TREE,
2040 fold_convert (const_ptr_type_node,
2041 build_fold_addr_expr (refdecl)));
ae7e9ddd 2042 size = tree_to_uhwi (DECL_SIZE_UNIT (decl));
8240018b
JJ
2043 CONSTRUCTOR_APPEND_ELT (vinner, NULL_TREE, build_int_cst (uptr, size));
2044 size += asan_red_zone_size (size);
2045 CONSTRUCTOR_APPEND_ELT (vinner, NULL_TREE, build_int_cst (uptr, size));
2046 CONSTRUCTOR_APPEND_ELT (vinner, NULL_TREE,
2047 fold_convert (const_ptr_type_node, str_cst));
ef1b3fda
KS
2048 CONSTRUCTOR_APPEND_ELT (vinner, NULL_TREE,
2049 fold_convert (const_ptr_type_node, module_name_cst));
8240018b
JJ
2050 CONSTRUCTOR_APPEND_ELT (vinner, NULL_TREE, build_int_cst (uptr, 0));
2051 init = build_constructor (type, vinner);
2052 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, init);
2053}
2054
0e668eaf
JJ
2055/* Initialize sanitizer.def builtins if the FE hasn't initialized them. */
2056void
2057initialize_sanitizer_builtins (void)
2058{
2059 tree decl;
2060
2061 if (builtin_decl_implicit_p (BUILT_IN_ASAN_INIT))
2062 return;
2063
2064 tree BT_FN_VOID = build_function_type_list (void_type_node, NULL_TREE);
2065 tree BT_FN_VOID_PTR
2066 = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
b906f4ca
MP
2067 tree BT_FN_VOID_PTR_PTR
2068 = build_function_type_list (void_type_node, ptr_type_node,
2069 ptr_type_node, NULL_TREE);
de5a5fa1
MP
2070 tree BT_FN_VOID_PTR_PTR_PTR
2071 = build_function_type_list (void_type_node, ptr_type_node,
2072 ptr_type_node, ptr_type_node, NULL_TREE);
0e668eaf
JJ
2073 tree BT_FN_VOID_PTR_PTRMODE
2074 = build_function_type_list (void_type_node, ptr_type_node,
de5a5fa1 2075 pointer_sized_int_node, NULL_TREE);
c954bddd
JJ
2076 tree BT_FN_VOID_INT
2077 = build_function_type_list (void_type_node, integer_type_node, NULL_TREE);
2078 tree BT_FN_BOOL_VPTR_PTR_IX_INT_INT[5];
2079 tree BT_FN_IX_CONST_VPTR_INT[5];
2080 tree BT_FN_IX_VPTR_IX_INT[5];
2081 tree BT_FN_VOID_VPTR_IX_INT[5];
2082 tree vptr
2083 = build_pointer_type (build_qualified_type (void_type_node,
2084 TYPE_QUAL_VOLATILE));
2085 tree cvptr
2086 = build_pointer_type (build_qualified_type (void_type_node,
2087 TYPE_QUAL_VOLATILE
2088 |TYPE_QUAL_CONST));
2089 tree boolt
2090 = lang_hooks.types.type_for_size (BOOL_TYPE_SIZE, 1);
2091 int i;
2092 for (i = 0; i < 5; i++)
2093 {
2094 tree ix = build_nonstandard_integer_type (BITS_PER_UNIT * (1 << i), 1);
2095 BT_FN_BOOL_VPTR_PTR_IX_INT_INT[i]
2096 = build_function_type_list (boolt, vptr, ptr_type_node, ix,
2097 integer_type_node, integer_type_node,
2098 NULL_TREE);
2099 BT_FN_IX_CONST_VPTR_INT[i]
2100 = build_function_type_list (ix, cvptr, integer_type_node, NULL_TREE);
2101 BT_FN_IX_VPTR_IX_INT[i]
2102 = build_function_type_list (ix, vptr, ix, integer_type_node,
2103 NULL_TREE);
2104 BT_FN_VOID_VPTR_IX_INT[i]
2105 = build_function_type_list (void_type_node, vptr, ix,
2106 integer_type_node, NULL_TREE);
2107 }
2108#define BT_FN_BOOL_VPTR_PTR_I1_INT_INT BT_FN_BOOL_VPTR_PTR_IX_INT_INT[0]
2109#define BT_FN_I1_CONST_VPTR_INT BT_FN_IX_CONST_VPTR_INT[0]
2110#define BT_FN_I1_VPTR_I1_INT BT_FN_IX_VPTR_IX_INT[0]
2111#define BT_FN_VOID_VPTR_I1_INT BT_FN_VOID_VPTR_IX_INT[0]
2112#define BT_FN_BOOL_VPTR_PTR_I2_INT_INT BT_FN_BOOL_VPTR_PTR_IX_INT_INT[1]
2113#define BT_FN_I2_CONST_VPTR_INT BT_FN_IX_CONST_VPTR_INT[1]
2114#define BT_FN_I2_VPTR_I2_INT BT_FN_IX_VPTR_IX_INT[1]
2115#define BT_FN_VOID_VPTR_I2_INT BT_FN_VOID_VPTR_IX_INT[1]
2116#define BT_FN_BOOL_VPTR_PTR_I4_INT_INT BT_FN_BOOL_VPTR_PTR_IX_INT_INT[2]
2117#define BT_FN_I4_CONST_VPTR_INT BT_FN_IX_CONST_VPTR_INT[2]
2118#define BT_FN_I4_VPTR_I4_INT BT_FN_IX_VPTR_IX_INT[2]
2119#define BT_FN_VOID_VPTR_I4_INT BT_FN_VOID_VPTR_IX_INT[2]
2120#define BT_FN_BOOL_VPTR_PTR_I8_INT_INT BT_FN_BOOL_VPTR_PTR_IX_INT_INT[3]
2121#define BT_FN_I8_CONST_VPTR_INT BT_FN_IX_CONST_VPTR_INT[3]
2122#define BT_FN_I8_VPTR_I8_INT BT_FN_IX_VPTR_IX_INT[3]
2123#define BT_FN_VOID_VPTR_I8_INT BT_FN_VOID_VPTR_IX_INT[3]
2124#define BT_FN_BOOL_VPTR_PTR_I16_INT_INT BT_FN_BOOL_VPTR_PTR_IX_INT_INT[4]
2125#define BT_FN_I16_CONST_VPTR_INT BT_FN_IX_CONST_VPTR_INT[4]
2126#define BT_FN_I16_VPTR_I16_INT BT_FN_IX_VPTR_IX_INT[4]
2127#define BT_FN_VOID_VPTR_I16_INT BT_FN_VOID_VPTR_IX_INT[4]
0e668eaf
JJ
2128#undef ATTR_NOTHROW_LEAF_LIST
2129#define ATTR_NOTHROW_LEAF_LIST ECF_NOTHROW | ECF_LEAF
bc77608b
JJ
2130#undef ATTR_TMPURE_NOTHROW_LEAF_LIST
2131#define ATTR_TMPURE_NOTHROW_LEAF_LIST ECF_TM_PURE | ATTR_NOTHROW_LEAF_LIST
0e668eaf
JJ
2132#undef ATTR_NORETURN_NOTHROW_LEAF_LIST
2133#define ATTR_NORETURN_NOTHROW_LEAF_LIST ECF_NORETURN | ATTR_NOTHROW_LEAF_LIST
bc77608b
JJ
2134#undef ATTR_TMPURE_NORETURN_NOTHROW_LEAF_LIST
2135#define ATTR_TMPURE_NORETURN_NOTHROW_LEAF_LIST \
2136 ECF_TM_PURE | ATTR_NORETURN_NOTHROW_LEAF_LIST
de5a5fa1
MP
2137#undef ATTR_COLD_NOTHROW_LEAF_LIST
2138#define ATTR_COLD_NOTHROW_LEAF_LIST \
2139 /* ECF_COLD missing */ ATTR_NOTHROW_LEAF_LIST
2140#undef ATTR_COLD_NORETURN_NOTHROW_LEAF_LIST
2141#define ATTR_COLD_NORETURN_NOTHROW_LEAF_LIST \
2142 /* ECF_COLD missing */ ATTR_NORETURN_NOTHROW_LEAF_LIST
0e668eaf
JJ
2143#undef DEF_SANITIZER_BUILTIN
2144#define DEF_SANITIZER_BUILTIN(ENUM, NAME, TYPE, ATTRS) \
2145 decl = add_builtin_function ("__builtin_" NAME, TYPE, ENUM, \
2146 BUILT_IN_NORMAL, NAME, NULL_TREE); \
2147 set_call_expr_flags (decl, ATTRS); \
2148 set_builtin_decl (ENUM, decl, true);
2149
2150#include "sanitizer.def"
2151
2152#undef DEF_SANITIZER_BUILTIN
2153}
2154
94fce891
JJ
2155/* Called via htab_traverse. Count number of emitted
2156 STRING_CSTs in the constant hash table. */
2157
2158static int
2159count_string_csts (void **slot, void *data)
2160{
2161 struct constant_descriptor_tree *desc
2162 = (struct constant_descriptor_tree *) *slot;
2163 if (TREE_CODE (desc->value) == STRING_CST
2164 && TREE_ASM_WRITTEN (desc->value)
2165 && asan_protect_global (desc->value))
2166 ++*((unsigned HOST_WIDE_INT *) data);
2167 return 1;
2168}
2169
2170/* Helper structure to pass two parameters to
2171 add_string_csts. */
2172
2173struct asan_add_string_csts_data
2174{
2175 tree type;
2176 vec<constructor_elt, va_gc> *v;
2177};
2178
2179/* Called via htab_traverse. Call asan_add_global
2180 on emitted STRING_CSTs from the constant hash table. */
2181
2182static int
2183add_string_csts (void **slot, void *data)
2184{
2185 struct constant_descriptor_tree *desc
2186 = (struct constant_descriptor_tree *) *slot;
2187 if (TREE_CODE (desc->value) == STRING_CST
2188 && TREE_ASM_WRITTEN (desc->value)
2189 && asan_protect_global (desc->value))
2190 {
2191 struct asan_add_string_csts_data *aascd
2192 = (struct asan_add_string_csts_data *) data;
2193 asan_add_global (SYMBOL_REF_DECL (XEXP (desc->rtl, 0)),
2194 aascd->type, aascd->v);
2195 }
2196 return 1;
2197}
2198
8240018b
JJ
2199/* Needs to be GTY(()), because cgraph_build_static_cdtor may
2200 invoke ggc_collect. */
2201static GTY(()) tree asan_ctor_statements;
2202
37d6f666 2203/* Module-level instrumentation.
ef1b3fda 2204 - Insert __asan_init_vN() into the list of CTORs.
37d6f666
WM
2205 - TODO: insert redzones around globals.
2206 */
2207
2208void
2209asan_finish_file (void)
2210{
8240018b
JJ
2211 struct varpool_node *vnode;
2212 unsigned HOST_WIDE_INT gcount = 0;
2213
94fce891
JJ
2214 if (shadow_ptr_types[0] == NULL_TREE)
2215 asan_init_shadow_ptr_types ();
2216 /* Avoid instrumenting code in the asan ctors/dtors.
2217 We don't need to insert padding after the description strings,
2218 nor after .LASAN* array. */
de5a5fa1 2219 flag_sanitize &= ~SANITIZE_ADDRESS;
0e668eaf
JJ
2220
2221 tree fn = builtin_decl_implicit (BUILT_IN_ASAN_INIT);
2222 append_to_statement_list (build_call_expr (fn, 0), &asan_ctor_statements);
8240018b 2223 FOR_EACH_DEFINED_VARIABLE (vnode)
67348ccc
DM
2224 if (TREE_ASM_WRITTEN (vnode->decl)
2225 && asan_protect_global (vnode->decl))
8240018b 2226 ++gcount;
94fce891
JJ
2227 htab_t const_desc_htab = constant_pool_htab ();
2228 htab_traverse (const_desc_htab, count_string_csts, &gcount);
8240018b
JJ
2229 if (gcount)
2230 {
0e668eaf 2231 tree type = asan_global_struct (), var, ctor;
8240018b 2232 tree dtor_statements = NULL_TREE;
9771b263 2233 vec<constructor_elt, va_gc> *v;
8240018b
JJ
2234 char buf[20];
2235
2236 type = build_array_type_nelts (type, gcount);
2237 ASM_GENERATE_INTERNAL_LABEL (buf, "LASAN", 0);
2238 var = build_decl (UNKNOWN_LOCATION, VAR_DECL, get_identifier (buf),
2239 type);
2240 TREE_STATIC (var) = 1;
2241 TREE_PUBLIC (var) = 0;
2242 DECL_ARTIFICIAL (var) = 1;
2243 DECL_IGNORED_P (var) = 1;
9771b263 2244 vec_alloc (v, gcount);
8240018b 2245 FOR_EACH_DEFINED_VARIABLE (vnode)
67348ccc
DM
2246 if (TREE_ASM_WRITTEN (vnode->decl)
2247 && asan_protect_global (vnode->decl))
2248 asan_add_global (vnode->decl, TREE_TYPE (type), v);
94fce891
JJ
2249 struct asan_add_string_csts_data aascd;
2250 aascd.type = TREE_TYPE (type);
2251 aascd.v = v;
2252 htab_traverse (const_desc_htab, add_string_csts, &aascd);
8240018b
JJ
2253 ctor = build_constructor (type, v);
2254 TREE_CONSTANT (ctor) = 1;
2255 TREE_STATIC (ctor) = 1;
2256 DECL_INITIAL (var) = ctor;
2257 varpool_assemble_decl (varpool_node_for_decl (var));
2258
0e668eaf 2259 fn = builtin_decl_implicit (BUILT_IN_ASAN_REGISTER_GLOBALS);
de5a5fa1 2260 tree gcount_tree = build_int_cst (pointer_sized_int_node, gcount);
0e668eaf 2261 append_to_statement_list (build_call_expr (fn, 2,
8240018b 2262 build_fold_addr_expr (var),
de5a5fa1 2263 gcount_tree),
8240018b
JJ
2264 &asan_ctor_statements);
2265
0e668eaf
JJ
2266 fn = builtin_decl_implicit (BUILT_IN_ASAN_UNREGISTER_GLOBALS);
2267 append_to_statement_list (build_call_expr (fn, 2,
8240018b 2268 build_fold_addr_expr (var),
de5a5fa1 2269 gcount_tree),
8240018b
JJ
2270 &dtor_statements);
2271 cgraph_build_static_cdtor ('D', dtor_statements,
2272 MAX_RESERVED_INIT_PRIORITY - 1);
2273 }
2274 cgraph_build_static_cdtor ('I', asan_ctor_statements,
2275 MAX_RESERVED_INIT_PRIORITY - 1);
de5a5fa1 2276 flag_sanitize |= SANITIZE_ADDRESS;
f6d98484
JJ
2277}
2278
37d6f666
WM
2279/* Instrument the current function. */
2280
2281static unsigned int
2282asan_instrument (void)
2283{
f6d98484 2284 if (shadow_ptr_types[0] == NULL_TREE)
94fce891 2285 asan_init_shadow_ptr_types ();
37d6f666 2286 transform_statements ();
37d6f666
WM
2287 return 0;
2288}
2289
2290static bool
2291gate_asan (void)
2292{
de5a5fa1 2293 return (flag_sanitize & SANITIZE_ADDRESS) != 0
e664c61c 2294 && !lookup_attribute ("no_sanitize_address",
77bc5132 2295 DECL_ATTRIBUTES (current_function_decl));
37d6f666
WM
2296}
2297
27a4cd48
DM
2298namespace {
2299
2300const pass_data pass_data_asan =
37d6f666 2301{
27a4cd48
DM
2302 GIMPLE_PASS, /* type */
2303 "asan", /* name */
2304 OPTGROUP_NONE, /* optinfo_flags */
2305 true, /* has_gate */
2306 true, /* has_execute */
2307 TV_NONE, /* tv_id */
2308 ( PROP_ssa | PROP_cfg | PROP_gimple_leh ), /* properties_required */
2309 0, /* properties_provided */
2310 0, /* properties_destroyed */
2311 0, /* todo_flags_start */
2312 ( TODO_verify_flow | TODO_verify_stmts
2313 | TODO_update_ssa ), /* todo_flags_finish */
37d6f666 2314};
f6d98484 2315
27a4cd48
DM
2316class pass_asan : public gimple_opt_pass
2317{
2318public:
c3284718
RS
2319 pass_asan (gcc::context *ctxt)
2320 : gimple_opt_pass (pass_data_asan, ctxt)
27a4cd48
DM
2321 {}
2322
2323 /* opt_pass methods: */
65d3284b 2324 opt_pass * clone () { return new pass_asan (m_ctxt); }
27a4cd48
DM
2325 bool gate () { return gate_asan (); }
2326 unsigned int execute () { return asan_instrument (); }
2327
2328}; // class pass_asan
2329
2330} // anon namespace
2331
2332gimple_opt_pass *
2333make_pass_asan (gcc::context *ctxt)
2334{
2335 return new pass_asan (ctxt);
2336}
2337
dfb9e332
JJ
2338static bool
2339gate_asan_O0 (void)
2340{
77bc5132 2341 return !optimize && gate_asan ();
dfb9e332
JJ
2342}
2343
27a4cd48
DM
2344namespace {
2345
2346const pass_data pass_data_asan_O0 =
dfb9e332 2347{
27a4cd48
DM
2348 GIMPLE_PASS, /* type */
2349 "asan0", /* name */
2350 OPTGROUP_NONE, /* optinfo_flags */
2351 true, /* has_gate */
2352 true, /* has_execute */
2353 TV_NONE, /* tv_id */
2354 ( PROP_ssa | PROP_cfg | PROP_gimple_leh ), /* properties_required */
2355 0, /* properties_provided */
2356 0, /* properties_destroyed */
2357 0, /* todo_flags_start */
2358 ( TODO_verify_flow | TODO_verify_stmts
2359 | TODO_update_ssa ), /* todo_flags_finish */
dfb9e332
JJ
2360};
2361
27a4cd48
DM
2362class pass_asan_O0 : public gimple_opt_pass
2363{
2364public:
c3284718
RS
2365 pass_asan_O0 (gcc::context *ctxt)
2366 : gimple_opt_pass (pass_data_asan_O0, ctxt)
27a4cd48
DM
2367 {}
2368
2369 /* opt_pass methods: */
2370 bool gate () { return gate_asan_O0 (); }
2371 unsigned int execute () { return asan_instrument (); }
2372
2373}; // class pass_asan_O0
2374
2375} // anon namespace
2376
2377gimple_opt_pass *
2378make_pass_asan_O0 (gcc::context *ctxt)
2379{
2380 return new pass_asan_O0 (ctxt);
2381}
2382
b9a55b13
MP
2383/* Perform optimization of sanitize functions. */
2384
2385static unsigned int
2386execute_sanopt (void)
2387{
2388 basic_block bb;
2389
2390 FOR_EACH_BB (bb)
2391 {
2392 gimple_stmt_iterator gsi;
2393 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
2394 {
2395 gimple stmt = gsi_stmt (gsi);
2396
2397 if (!is_gimple_call (stmt))
2398 continue;
2399
2400 if (gimple_call_internal_p (stmt))
2401 switch (gimple_call_internal_fn (stmt))
2402 {
2403 case IFN_UBSAN_NULL:
2404 ubsan_expand_null_ifn (gsi);
2405 break;
2406 default:
2407 break;
2408 }
2409
2410 if (dump_file && (dump_flags & TDF_DETAILS))
2411 {
2412 fprintf (dump_file, "Optimized\n ");
2413 print_gimple_stmt (dump_file, stmt, 0, dump_flags);
2414 fprintf (dump_file, "\n");
2415 }
2416 }
2417 }
2418 return 0;
2419}
2420
2421static bool
2422gate_sanopt (void)
2423{
2424 return flag_sanitize;
2425}
2426
2427namespace {
2428
2429const pass_data pass_data_sanopt =
2430{
2431 GIMPLE_PASS, /* type */
2432 "sanopt", /* name */
2433 OPTGROUP_NONE, /* optinfo_flags */
2434 true, /* has_gate */
2435 true, /* has_execute */
2436 TV_NONE, /* tv_id */
2437 ( PROP_ssa | PROP_cfg | PROP_gimple_leh ), /* properties_required */
2438 0, /* properties_provided */
2439 0, /* properties_destroyed */
2440 0, /* todo_flags_start */
2441 ( TODO_verify_flow | TODO_verify_stmts
2442 | TODO_update_ssa ), /* todo_flags_finish */
2443};
2444
2445class pass_sanopt : public gimple_opt_pass
2446{
2447public:
2448 pass_sanopt (gcc::context *ctxt)
2449 : gimple_opt_pass (pass_data_sanopt, ctxt)
2450 {}
2451
2452 /* opt_pass methods: */
2453 bool gate () { return gate_sanopt (); }
2454 unsigned int execute () { return execute_sanopt (); }
2455
2456}; // class pass_sanopt
2457
2458} // anon namespace
2459
2460gimple_opt_pass *
2461make_pass_sanopt (gcc::context *ctxt)
2462{
2463 return new pass_sanopt (ctxt);
2464}
2465
f6d98484 2466#include "gt-asan.h"