]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/asan.c
[AArch64] Add a new CC mode for SVE conditions
[thirdparty/gcc.git] / gcc / asan.c
1 /* AddressSanitizer, a fast memory error detector.
2 Copyright (C) 2012-2019 Free Software Foundation, Inc.
3 Contributed by Kostya Serebryany <kcc@google.com>
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
10 version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "backend.h"
26 #include "target.h"
27 #include "rtl.h"
28 #include "tree.h"
29 #include "gimple.h"
30 #include "cfghooks.h"
31 #include "alloc-pool.h"
32 #include "tree-pass.h"
33 #include "memmodel.h"
34 #include "tm_p.h"
35 #include "ssa.h"
36 #include "stringpool.h"
37 #include "tree-ssanames.h"
38 #include "optabs.h"
39 #include "emit-rtl.h"
40 #include "cgraph.h"
41 #include "gimple-pretty-print.h"
42 #include "alias.h"
43 #include "fold-const.h"
44 #include "cfganal.h"
45 #include "gimplify.h"
46 #include "gimple-iterator.h"
47 #include "varasm.h"
48 #include "stor-layout.h"
49 #include "tree-iterator.h"
50 #include "stringpool.h"
51 #include "attribs.h"
52 #include "asan.h"
53 #include "dojump.h"
54 #include "explow.h"
55 #include "expr.h"
56 #include "output.h"
57 #include "langhooks.h"
58 #include "cfgloop.h"
59 #include "gimple-builder.h"
60 #include "gimple-fold.h"
61 #include "ubsan.h"
62 #include "params.h"
63 #include "builtins.h"
64 #include "fnmatch.h"
65 #include "tree-inline.h"
66
67 /* AddressSanitizer finds out-of-bounds and use-after-free bugs
68 with <2x slowdown on average.
69
70 The tool consists of two parts:
71 instrumentation module (this file) and a run-time library.
72 The instrumentation module adds a run-time check before every memory insn.
73 For a 8- or 16- byte load accessing address X:
74 ShadowAddr = (X >> 3) + Offset
75 ShadowValue = *(char*)ShadowAddr; // *(short*) for 16-byte access.
76 if (ShadowValue)
77 __asan_report_load8(X);
78 For a load of N bytes (N=1, 2 or 4) from address X:
79 ShadowAddr = (X >> 3) + Offset
80 ShadowValue = *(char*)ShadowAddr;
81 if (ShadowValue)
82 if ((X & 7) + N - 1 > ShadowValue)
83 __asan_report_loadN(X);
84 Stores are instrumented similarly, but using __asan_report_storeN functions.
85 A call too __asan_init_vN() is inserted to the list of module CTORs.
86 N is the version number of the AddressSanitizer API. The changes between the
87 API versions are listed in libsanitizer/asan/asan_interface_internal.h.
88
89 The run-time library redefines malloc (so that redzone are inserted around
90 the allocated memory) and free (so that reuse of free-ed memory is delayed),
91 provides __asan_report* and __asan_init_vN functions.
92
93 Read more:
94 http://code.google.com/p/address-sanitizer/wiki/AddressSanitizerAlgorithm
95
96 The current implementation supports detection of out-of-bounds and
97 use-after-free in the heap, on the stack and for global variables.
98
99 [Protection of stack variables]
100
101 To understand how detection of out-of-bounds and use-after-free works
102 for stack variables, lets look at this example on x86_64 where the
103 stack grows downward:
104
105 int
106 foo ()
107 {
108 char a[23] = {0};
109 int b[2] = {0};
110
111 a[5] = 1;
112 b[1] = 2;
113
114 return a[5] + b[1];
115 }
116
117 For this function, the stack protected by asan will be organized as
118 follows, from the top of the stack to the bottom:
119
120 Slot 1/ [red zone of 32 bytes called 'RIGHT RedZone']
121
122 Slot 2/ [8 bytes of red zone, that adds up to the space of 'a' to make
123 the next slot be 32 bytes aligned; this one is called Partial
124 Redzone; this 32 bytes alignment is an asan constraint]
125
126 Slot 3/ [24 bytes for variable 'a']
127
128 Slot 4/ [red zone of 32 bytes called 'Middle RedZone']
129
130 Slot 5/ [24 bytes of Partial Red Zone (similar to slot 2]
131
132 Slot 6/ [8 bytes for variable 'b']
133
134 Slot 7/ [32 bytes of Red Zone at the bottom of the stack, called
135 'LEFT RedZone']
136
137 The 32 bytes of LEFT red zone at the bottom of the stack can be
138 decomposed as such:
139
140 1/ The first 8 bytes contain a magical asan number that is always
141 0x41B58AB3.
142
143 2/ The following 8 bytes contains a pointer to a string (to be
144 parsed at runtime by the runtime asan library), which format is
145 the following:
146
147 "<function-name> <space> <num-of-variables-on-the-stack>
148 (<32-bytes-aligned-offset-in-bytes-of-variable> <space>
149 <length-of-var-in-bytes> ){n} "
150
151 where '(...){n}' means the content inside the parenthesis occurs 'n'
152 times, with 'n' being the number of variables on the stack.
153
154 3/ The following 8 bytes contain the PC of the current function which
155 will be used by the run-time library to print an error message.
156
157 4/ The following 8 bytes are reserved for internal use by the run-time.
158
159 The shadow memory for that stack layout is going to look like this:
160
161 - content of shadow memory 8 bytes for slot 7: 0xF1F1F1F1.
162 The F1 byte pattern is a magic number called
163 ASAN_STACK_MAGIC_LEFT and is a way for the runtime to know that
164 the memory for that shadow byte is part of a the LEFT red zone
165 intended to seat at the bottom of the variables on the stack.
166
167 - content of shadow memory 8 bytes for slots 6 and 5:
168 0xF4F4F400. The F4 byte pattern is a magic number
169 called ASAN_STACK_MAGIC_PARTIAL. It flags the fact that the
170 memory region for this shadow byte is a PARTIAL red zone
171 intended to pad a variable A, so that the slot following
172 {A,padding} is 32 bytes aligned.
173
174 Note that the fact that the least significant byte of this
175 shadow memory content is 00 means that 8 bytes of its
176 corresponding memory (which corresponds to the memory of
177 variable 'b') is addressable.
178
179 - content of shadow memory 8 bytes for slot 4: 0xF2F2F2F2.
180 The F2 byte pattern is a magic number called
181 ASAN_STACK_MAGIC_MIDDLE. It flags the fact that the memory
182 region for this shadow byte is a MIDDLE red zone intended to
183 seat between two 32 aligned slots of {variable,padding}.
184
185 - content of shadow memory 8 bytes for slot 3 and 2:
186 0xF4000000. This represents is the concatenation of
187 variable 'a' and the partial red zone following it, like what we
188 had for variable 'b'. The least significant 3 bytes being 00
189 means that the 3 bytes of variable 'a' are addressable.
190
191 - content of shadow memory 8 bytes for slot 1: 0xF3F3F3F3.
192 The F3 byte pattern is a magic number called
193 ASAN_STACK_MAGIC_RIGHT. It flags the fact that the memory
194 region for this shadow byte is a RIGHT red zone intended to seat
195 at the top of the variables of the stack.
196
197 Note that the real variable layout is done in expand_used_vars in
198 cfgexpand.c. As far as Address Sanitizer is concerned, it lays out
199 stack variables as well as the different red zones, emits some
200 prologue code to populate the shadow memory as to poison (mark as
201 non-accessible) the regions of the red zones and mark the regions of
202 stack variables as accessible, and emit some epilogue code to
203 un-poison (mark as accessible) the regions of red zones right before
204 the function exits.
205
206 [Protection of global variables]
207
208 The basic idea is to insert a red zone between two global variables
209 and install a constructor function that calls the asan runtime to do
210 the populating of the relevant shadow memory regions at load time.
211
212 So the global variables are laid out as to insert a red zone between
213 them. The size of the red zones is so that each variable starts on a
214 32 bytes boundary.
215
216 Then a constructor function is installed so that, for each global
217 variable, it calls the runtime asan library function
218 __asan_register_globals_with an instance of this type:
219
220 struct __asan_global
221 {
222 // Address of the beginning of the global variable.
223 const void *__beg;
224
225 // Initial size of the global variable.
226 uptr __size;
227
228 // Size of the global variable + size of the red zone. This
229 // size is 32 bytes aligned.
230 uptr __size_with_redzone;
231
232 // Name of the global variable.
233 const void *__name;
234
235 // Name of the module where the global variable is declared.
236 const void *__module_name;
237
238 // 1 if it has dynamic initialization, 0 otherwise.
239 uptr __has_dynamic_init;
240
241 // A pointer to struct that contains source location, could be NULL.
242 __asan_global_source_location *__location;
243 }
244
245 A destructor function that calls the runtime asan library function
246 _asan_unregister_globals is also installed. */
247
248 static unsigned HOST_WIDE_INT asan_shadow_offset_value;
249 static bool asan_shadow_offset_computed;
250 static vec<char *> sanitized_sections;
251 static tree last_alloca_addr;
252
253 /* Set of variable declarations that are going to be guarded by
254 use-after-scope sanitizer. */
255
256 hash_set<tree> *asan_handled_variables = NULL;
257
258 hash_set <tree> *asan_used_labels = NULL;
259
260 /* Sets shadow offset to value in string VAL. */
261
262 bool
263 set_asan_shadow_offset (const char *val)
264 {
265 char *endp;
266
267 errno = 0;
268 #ifdef HAVE_LONG_LONG
269 asan_shadow_offset_value = strtoull (val, &endp, 0);
270 #else
271 asan_shadow_offset_value = strtoul (val, &endp, 0);
272 #endif
273 if (!(*val != '\0' && *endp == '\0' && errno == 0))
274 return false;
275
276 asan_shadow_offset_computed = true;
277
278 return true;
279 }
280
281 /* Set list of user-defined sections that need to be sanitized. */
282
283 void
284 set_sanitized_sections (const char *sections)
285 {
286 char *pat;
287 unsigned i;
288 FOR_EACH_VEC_ELT (sanitized_sections, i, pat)
289 free (pat);
290 sanitized_sections.truncate (0);
291
292 for (const char *s = sections; *s; )
293 {
294 const char *end;
295 for (end = s; *end && *end != ','; ++end);
296 size_t len = end - s;
297 sanitized_sections.safe_push (xstrndup (s, len));
298 s = *end ? end + 1 : end;
299 }
300 }
301
302 bool
303 asan_mark_p (gimple *stmt, enum asan_mark_flags flag)
304 {
305 return (gimple_call_internal_p (stmt, IFN_ASAN_MARK)
306 && tree_to_uhwi (gimple_call_arg (stmt, 0)) == flag);
307 }
308
309 bool
310 asan_sanitize_stack_p (void)
311 {
312 return (sanitize_flags_p (SANITIZE_ADDRESS) && ASAN_STACK);
313 }
314
315 bool
316 asan_sanitize_allocas_p (void)
317 {
318 return (asan_sanitize_stack_p () && ASAN_PROTECT_ALLOCAS);
319 }
320
321 /* Checks whether section SEC should be sanitized. */
322
323 static bool
324 section_sanitized_p (const char *sec)
325 {
326 char *pat;
327 unsigned i;
328 FOR_EACH_VEC_ELT (sanitized_sections, i, pat)
329 if (fnmatch (pat, sec, FNM_PERIOD) == 0)
330 return true;
331 return false;
332 }
333
334 /* Returns Asan shadow offset. */
335
336 static unsigned HOST_WIDE_INT
337 asan_shadow_offset ()
338 {
339 if (!asan_shadow_offset_computed)
340 {
341 asan_shadow_offset_computed = true;
342 asan_shadow_offset_value = targetm.asan_shadow_offset ();
343 }
344 return asan_shadow_offset_value;
345 }
346
347 alias_set_type asan_shadow_set = -1;
348
349 /* Pointer types to 1, 2 or 4 byte integers in shadow memory. A separate
350 alias set is used for all shadow memory accesses. */
351 static GTY(()) tree shadow_ptr_types[3];
352
353 /* Decl for __asan_option_detect_stack_use_after_return. */
354 static GTY(()) tree asan_detect_stack_use_after_return;
355
356 /* Hashtable support for memory references used by gimple
357 statements. */
358
359 /* This type represents a reference to a memory region. */
360 struct asan_mem_ref
361 {
362 /* The expression of the beginning of the memory region. */
363 tree start;
364
365 /* The size of the access. */
366 HOST_WIDE_INT access_size;
367 };
368
369 object_allocator <asan_mem_ref> asan_mem_ref_pool ("asan_mem_ref");
370
371 /* Initializes an instance of asan_mem_ref. */
372
373 static void
374 asan_mem_ref_init (asan_mem_ref *ref, tree start, HOST_WIDE_INT access_size)
375 {
376 ref->start = start;
377 ref->access_size = access_size;
378 }
379
380 /* Allocates memory for an instance of asan_mem_ref into the memory
381 pool returned by asan_mem_ref_get_alloc_pool and initialize it.
382 START is the address of (or the expression pointing to) the
383 beginning of memory reference. ACCESS_SIZE is the size of the
384 access to the referenced memory. */
385
386 static asan_mem_ref*
387 asan_mem_ref_new (tree start, HOST_WIDE_INT access_size)
388 {
389 asan_mem_ref *ref = asan_mem_ref_pool.allocate ();
390
391 asan_mem_ref_init (ref, start, access_size);
392 return ref;
393 }
394
395 /* This builds and returns a pointer to the end of the memory region
396 that starts at START and of length LEN. */
397
398 tree
399 asan_mem_ref_get_end (tree start, tree len)
400 {
401 if (len == NULL_TREE || integer_zerop (len))
402 return start;
403
404 if (!ptrofftype_p (len))
405 len = convert_to_ptrofftype (len);
406
407 return fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (start), start, len);
408 }
409
410 /* Return a tree expression that represents the end of the referenced
411 memory region. Beware that this function can actually build a new
412 tree expression. */
413
414 tree
415 asan_mem_ref_get_end (const asan_mem_ref *ref, tree len)
416 {
417 return asan_mem_ref_get_end (ref->start, len);
418 }
419
420 struct asan_mem_ref_hasher : nofree_ptr_hash <asan_mem_ref>
421 {
422 static inline hashval_t hash (const asan_mem_ref *);
423 static inline bool equal (const asan_mem_ref *, const asan_mem_ref *);
424 };
425
426 /* Hash a memory reference. */
427
428 inline hashval_t
429 asan_mem_ref_hasher::hash (const asan_mem_ref *mem_ref)
430 {
431 return iterative_hash_expr (mem_ref->start, 0);
432 }
433
434 /* Compare two memory references. We accept the length of either
435 memory references to be NULL_TREE. */
436
437 inline bool
438 asan_mem_ref_hasher::equal (const asan_mem_ref *m1,
439 const asan_mem_ref *m2)
440 {
441 return operand_equal_p (m1->start, m2->start, 0);
442 }
443
444 static hash_table<asan_mem_ref_hasher> *asan_mem_ref_ht;
445
446 /* Returns a reference to the hash table containing memory references.
447 This function ensures that the hash table is created. Note that
448 this hash table is updated by the function
449 update_mem_ref_hash_table. */
450
451 static hash_table<asan_mem_ref_hasher> *
452 get_mem_ref_hash_table ()
453 {
454 if (!asan_mem_ref_ht)
455 asan_mem_ref_ht = new hash_table<asan_mem_ref_hasher> (10);
456
457 return asan_mem_ref_ht;
458 }
459
460 /* Clear all entries from the memory references hash table. */
461
462 static void
463 empty_mem_ref_hash_table ()
464 {
465 if (asan_mem_ref_ht)
466 asan_mem_ref_ht->empty ();
467 }
468
469 /* Free the memory references hash table. */
470
471 static void
472 free_mem_ref_resources ()
473 {
474 delete asan_mem_ref_ht;
475 asan_mem_ref_ht = NULL;
476
477 asan_mem_ref_pool.release ();
478 }
479
480 /* Return true iff the memory reference REF has been instrumented. */
481
482 static bool
483 has_mem_ref_been_instrumented (tree ref, HOST_WIDE_INT access_size)
484 {
485 asan_mem_ref r;
486 asan_mem_ref_init (&r, ref, access_size);
487
488 asan_mem_ref *saved_ref = get_mem_ref_hash_table ()->find (&r);
489 return saved_ref && saved_ref->access_size >= access_size;
490 }
491
492 /* Return true iff the memory reference REF has been instrumented. */
493
494 static bool
495 has_mem_ref_been_instrumented (const asan_mem_ref *ref)
496 {
497 return has_mem_ref_been_instrumented (ref->start, ref->access_size);
498 }
499
500 /* Return true iff access to memory region starting at REF and of
501 length LEN has been instrumented. */
502
503 static bool
504 has_mem_ref_been_instrumented (const asan_mem_ref *ref, tree len)
505 {
506 HOST_WIDE_INT size_in_bytes
507 = tree_fits_shwi_p (len) ? tree_to_shwi (len) : -1;
508
509 return size_in_bytes != -1
510 && has_mem_ref_been_instrumented (ref->start, size_in_bytes);
511 }
512
513 /* Set REF to the memory reference present in a gimple assignment
514 ASSIGNMENT. Return true upon successful completion, false
515 otherwise. */
516
517 static bool
518 get_mem_ref_of_assignment (const gassign *assignment,
519 asan_mem_ref *ref,
520 bool *ref_is_store)
521 {
522 gcc_assert (gimple_assign_single_p (assignment));
523
524 if (gimple_store_p (assignment)
525 && !gimple_clobber_p (assignment))
526 {
527 ref->start = gimple_assign_lhs (assignment);
528 *ref_is_store = true;
529 }
530 else if (gimple_assign_load_p (assignment))
531 {
532 ref->start = gimple_assign_rhs1 (assignment);
533 *ref_is_store = false;
534 }
535 else
536 return false;
537
538 ref->access_size = int_size_in_bytes (TREE_TYPE (ref->start));
539 return true;
540 }
541
542 /* Return address of last allocated dynamic alloca. */
543
544 static tree
545 get_last_alloca_addr ()
546 {
547 if (last_alloca_addr)
548 return last_alloca_addr;
549
550 last_alloca_addr = create_tmp_reg (ptr_type_node, "last_alloca_addr");
551 gassign *g = gimple_build_assign (last_alloca_addr, null_pointer_node);
552 edge e = single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun));
553 gsi_insert_on_edge_immediate (e, g);
554 return last_alloca_addr;
555 }
556
557 /* Insert __asan_allocas_unpoison (top, bottom) call before
558 __builtin_stack_restore (new_sp) call.
559 The pseudocode of this routine should look like this:
560 top = last_alloca_addr;
561 bot = new_sp;
562 __asan_allocas_unpoison (top, bot);
563 last_alloca_addr = new_sp;
564 __builtin_stack_restore (new_sp);
565 In general, we can't use new_sp as bot parameter because on some
566 architectures SP has non zero offset from dynamic stack area. Moreover, on
567 some architectures this offset (STACK_DYNAMIC_OFFSET) becomes known for each
568 particular function only after all callees were expanded to rtl.
569 The most noticeable example is PowerPC{,64}, see
570 http://refspecs.linuxfoundation.org/ELF/ppc64/PPC-elf64abi.html#DYNAM-STACK.
571 To overcome the issue we use following trick: pass new_sp as a second
572 parameter to __asan_allocas_unpoison and rewrite it during expansion with
573 new_sp + (virtual_dynamic_stack_rtx - sp) later in
574 expand_asan_emit_allocas_unpoison function. */
575
576 static void
577 handle_builtin_stack_restore (gcall *call, gimple_stmt_iterator *iter)
578 {
579 if (!iter || !asan_sanitize_allocas_p ())
580 return;
581
582 tree last_alloca = get_last_alloca_addr ();
583 tree restored_stack = gimple_call_arg (call, 0);
584 tree fn = builtin_decl_implicit (BUILT_IN_ASAN_ALLOCAS_UNPOISON);
585 gimple *g = gimple_build_call (fn, 2, last_alloca, restored_stack);
586 gsi_insert_before (iter, g, GSI_SAME_STMT);
587 g = gimple_build_assign (last_alloca, restored_stack);
588 gsi_insert_before (iter, g, GSI_SAME_STMT);
589 }
590
591 /* Deploy and poison redzones around __builtin_alloca call. To do this, we
592 should replace this call with another one with changed parameters and
593 replace all its uses with new address, so
594 addr = __builtin_alloca (old_size, align);
595 is replaced by
596 left_redzone_size = max (align, ASAN_RED_ZONE_SIZE);
597 Following two statements are optimized out if we know that
598 old_size & (ASAN_RED_ZONE_SIZE - 1) == 0, i.e. alloca doesn't need partial
599 redzone.
600 misalign = old_size & (ASAN_RED_ZONE_SIZE - 1);
601 partial_redzone_size = ASAN_RED_ZONE_SIZE - misalign;
602 right_redzone_size = ASAN_RED_ZONE_SIZE;
603 additional_size = left_redzone_size + partial_redzone_size +
604 right_redzone_size;
605 new_size = old_size + additional_size;
606 new_alloca = __builtin_alloca (new_size, max (align, 32))
607 __asan_alloca_poison (new_alloca, old_size)
608 addr = new_alloca + max (align, ASAN_RED_ZONE_SIZE);
609 last_alloca_addr = new_alloca;
610 ADDITIONAL_SIZE is added to make new memory allocation contain not only
611 requested memory, but also left, partial and right redzones as well as some
612 additional space, required by alignment. */
613
614 static void
615 handle_builtin_alloca (gcall *call, gimple_stmt_iterator *iter)
616 {
617 if (!iter || !asan_sanitize_allocas_p ())
618 return;
619
620 gassign *g;
621 gcall *gg;
622 const HOST_WIDE_INT redzone_mask = ASAN_RED_ZONE_SIZE - 1;
623
624 tree last_alloca = get_last_alloca_addr ();
625 tree callee = gimple_call_fndecl (call);
626 tree old_size = gimple_call_arg (call, 0);
627 tree ptr_type = gimple_call_lhs (call) ? TREE_TYPE (gimple_call_lhs (call))
628 : ptr_type_node;
629 tree partial_size = NULL_TREE;
630 unsigned int align
631 = DECL_FUNCTION_CODE (callee) == BUILT_IN_ALLOCA
632 ? 0 : tree_to_uhwi (gimple_call_arg (call, 1));
633
634 /* If ALIGN > ASAN_RED_ZONE_SIZE, we embed left redzone into first ALIGN
635 bytes of allocated space. Otherwise, align alloca to ASAN_RED_ZONE_SIZE
636 manually. */
637 align = MAX (align, ASAN_RED_ZONE_SIZE * BITS_PER_UNIT);
638
639 tree alloca_rz_mask = build_int_cst (size_type_node, redzone_mask);
640 tree redzone_size = build_int_cst (size_type_node, ASAN_RED_ZONE_SIZE);
641
642 /* Extract lower bits from old_size. */
643 wide_int size_nonzero_bits = get_nonzero_bits (old_size);
644 wide_int rz_mask
645 = wi::uhwi (redzone_mask, wi::get_precision (size_nonzero_bits));
646 wide_int old_size_lower_bits = wi::bit_and (size_nonzero_bits, rz_mask);
647
648 /* If alloca size is aligned to ASAN_RED_ZONE_SIZE, we don't need partial
649 redzone. Otherwise, compute its size here. */
650 if (wi::ne_p (old_size_lower_bits, 0))
651 {
652 /* misalign = size & (ASAN_RED_ZONE_SIZE - 1)
653 partial_size = ASAN_RED_ZONE_SIZE - misalign. */
654 g = gimple_build_assign (make_ssa_name (size_type_node, NULL),
655 BIT_AND_EXPR, old_size, alloca_rz_mask);
656 gsi_insert_before (iter, g, GSI_SAME_STMT);
657 tree misalign = gimple_assign_lhs (g);
658 g = gimple_build_assign (make_ssa_name (size_type_node, NULL), MINUS_EXPR,
659 redzone_size, misalign);
660 gsi_insert_before (iter, g, GSI_SAME_STMT);
661 partial_size = gimple_assign_lhs (g);
662 }
663
664 /* additional_size = align + ASAN_RED_ZONE_SIZE. */
665 tree additional_size = build_int_cst (size_type_node, align / BITS_PER_UNIT
666 + ASAN_RED_ZONE_SIZE);
667 /* If alloca has partial redzone, include it to additional_size too. */
668 if (partial_size)
669 {
670 /* additional_size += partial_size. */
671 g = gimple_build_assign (make_ssa_name (size_type_node), PLUS_EXPR,
672 partial_size, additional_size);
673 gsi_insert_before (iter, g, GSI_SAME_STMT);
674 additional_size = gimple_assign_lhs (g);
675 }
676
677 /* new_size = old_size + additional_size. */
678 g = gimple_build_assign (make_ssa_name (size_type_node), PLUS_EXPR, old_size,
679 additional_size);
680 gsi_insert_before (iter, g, GSI_SAME_STMT);
681 tree new_size = gimple_assign_lhs (g);
682
683 /* Build new __builtin_alloca call:
684 new_alloca_with_rz = __builtin_alloca (new_size, align). */
685 tree fn = builtin_decl_implicit (BUILT_IN_ALLOCA_WITH_ALIGN);
686 gg = gimple_build_call (fn, 2, new_size,
687 build_int_cst (size_type_node, align));
688 tree new_alloca_with_rz = make_ssa_name (ptr_type, gg);
689 gimple_call_set_lhs (gg, new_alloca_with_rz);
690 gsi_insert_before (iter, gg, GSI_SAME_STMT);
691
692 /* new_alloca = new_alloca_with_rz + align. */
693 g = gimple_build_assign (make_ssa_name (ptr_type), POINTER_PLUS_EXPR,
694 new_alloca_with_rz,
695 build_int_cst (size_type_node,
696 align / BITS_PER_UNIT));
697 gsi_insert_before (iter, g, GSI_SAME_STMT);
698 tree new_alloca = gimple_assign_lhs (g);
699
700 /* Poison newly created alloca redzones:
701 __asan_alloca_poison (new_alloca, old_size). */
702 fn = builtin_decl_implicit (BUILT_IN_ASAN_ALLOCA_POISON);
703 gg = gimple_build_call (fn, 2, new_alloca, old_size);
704 gsi_insert_before (iter, gg, GSI_SAME_STMT);
705
706 /* Save new_alloca_with_rz value into last_alloca to use it during
707 allocas unpoisoning. */
708 g = gimple_build_assign (last_alloca, new_alloca_with_rz);
709 gsi_insert_before (iter, g, GSI_SAME_STMT);
710
711 /* Finally, replace old alloca ptr with NEW_ALLOCA. */
712 replace_call_with_value (iter, new_alloca);
713 }
714
715 /* Return the memory references contained in a gimple statement
716 representing a builtin call that has to do with memory access. */
717
718 static bool
719 get_mem_refs_of_builtin_call (gcall *call,
720 asan_mem_ref *src0,
721 tree *src0_len,
722 bool *src0_is_store,
723 asan_mem_ref *src1,
724 tree *src1_len,
725 bool *src1_is_store,
726 asan_mem_ref *dst,
727 tree *dst_len,
728 bool *dst_is_store,
729 bool *dest_is_deref,
730 bool *intercepted_p,
731 gimple_stmt_iterator *iter = NULL)
732 {
733 gcc_checking_assert (gimple_call_builtin_p (call, BUILT_IN_NORMAL));
734
735 tree callee = gimple_call_fndecl (call);
736 tree source0 = NULL_TREE, source1 = NULL_TREE,
737 dest = NULL_TREE, len = NULL_TREE;
738 bool is_store = true, got_reference_p = false;
739 HOST_WIDE_INT access_size = 1;
740
741 *intercepted_p = asan_intercepted_p ((DECL_FUNCTION_CODE (callee)));
742
743 switch (DECL_FUNCTION_CODE (callee))
744 {
745 /* (s, s, n) style memops. */
746 case BUILT_IN_BCMP:
747 case BUILT_IN_MEMCMP:
748 source0 = gimple_call_arg (call, 0);
749 source1 = gimple_call_arg (call, 1);
750 len = gimple_call_arg (call, 2);
751 break;
752
753 /* (src, dest, n) style memops. */
754 case BUILT_IN_BCOPY:
755 source0 = gimple_call_arg (call, 0);
756 dest = gimple_call_arg (call, 1);
757 len = gimple_call_arg (call, 2);
758 break;
759
760 /* (dest, src, n) style memops. */
761 case BUILT_IN_MEMCPY:
762 case BUILT_IN_MEMCPY_CHK:
763 case BUILT_IN_MEMMOVE:
764 case BUILT_IN_MEMMOVE_CHK:
765 case BUILT_IN_MEMPCPY:
766 case BUILT_IN_MEMPCPY_CHK:
767 dest = gimple_call_arg (call, 0);
768 source0 = gimple_call_arg (call, 1);
769 len = gimple_call_arg (call, 2);
770 break;
771
772 /* (dest, n) style memops. */
773 case BUILT_IN_BZERO:
774 dest = gimple_call_arg (call, 0);
775 len = gimple_call_arg (call, 1);
776 break;
777
778 /* (dest, x, n) style memops*/
779 case BUILT_IN_MEMSET:
780 case BUILT_IN_MEMSET_CHK:
781 dest = gimple_call_arg (call, 0);
782 len = gimple_call_arg (call, 2);
783 break;
784
785 case BUILT_IN_STRLEN:
786 source0 = gimple_call_arg (call, 0);
787 len = gimple_call_lhs (call);
788 break;
789
790 case BUILT_IN_STACK_RESTORE:
791 handle_builtin_stack_restore (call, iter);
792 break;
793
794 CASE_BUILT_IN_ALLOCA:
795 handle_builtin_alloca (call, iter);
796 break;
797 /* And now the __atomic* and __sync builtins.
798 These are handled differently from the classical memory memory
799 access builtins above. */
800
801 case BUILT_IN_ATOMIC_LOAD_1:
802 is_store = false;
803 /* FALLTHRU */
804 case BUILT_IN_SYNC_FETCH_AND_ADD_1:
805 case BUILT_IN_SYNC_FETCH_AND_SUB_1:
806 case BUILT_IN_SYNC_FETCH_AND_OR_1:
807 case BUILT_IN_SYNC_FETCH_AND_AND_1:
808 case BUILT_IN_SYNC_FETCH_AND_XOR_1:
809 case BUILT_IN_SYNC_FETCH_AND_NAND_1:
810 case BUILT_IN_SYNC_ADD_AND_FETCH_1:
811 case BUILT_IN_SYNC_SUB_AND_FETCH_1:
812 case BUILT_IN_SYNC_OR_AND_FETCH_1:
813 case BUILT_IN_SYNC_AND_AND_FETCH_1:
814 case BUILT_IN_SYNC_XOR_AND_FETCH_1:
815 case BUILT_IN_SYNC_NAND_AND_FETCH_1:
816 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1:
817 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1:
818 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_1:
819 case BUILT_IN_SYNC_LOCK_RELEASE_1:
820 case BUILT_IN_ATOMIC_EXCHANGE_1:
821 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1:
822 case BUILT_IN_ATOMIC_STORE_1:
823 case BUILT_IN_ATOMIC_ADD_FETCH_1:
824 case BUILT_IN_ATOMIC_SUB_FETCH_1:
825 case BUILT_IN_ATOMIC_AND_FETCH_1:
826 case BUILT_IN_ATOMIC_NAND_FETCH_1:
827 case BUILT_IN_ATOMIC_XOR_FETCH_1:
828 case BUILT_IN_ATOMIC_OR_FETCH_1:
829 case BUILT_IN_ATOMIC_FETCH_ADD_1:
830 case BUILT_IN_ATOMIC_FETCH_SUB_1:
831 case BUILT_IN_ATOMIC_FETCH_AND_1:
832 case BUILT_IN_ATOMIC_FETCH_NAND_1:
833 case BUILT_IN_ATOMIC_FETCH_XOR_1:
834 case BUILT_IN_ATOMIC_FETCH_OR_1:
835 access_size = 1;
836 goto do_atomic;
837
838 case BUILT_IN_ATOMIC_LOAD_2:
839 is_store = false;
840 /* FALLTHRU */
841 case BUILT_IN_SYNC_FETCH_AND_ADD_2:
842 case BUILT_IN_SYNC_FETCH_AND_SUB_2:
843 case BUILT_IN_SYNC_FETCH_AND_OR_2:
844 case BUILT_IN_SYNC_FETCH_AND_AND_2:
845 case BUILT_IN_SYNC_FETCH_AND_XOR_2:
846 case BUILT_IN_SYNC_FETCH_AND_NAND_2:
847 case BUILT_IN_SYNC_ADD_AND_FETCH_2:
848 case BUILT_IN_SYNC_SUB_AND_FETCH_2:
849 case BUILT_IN_SYNC_OR_AND_FETCH_2:
850 case BUILT_IN_SYNC_AND_AND_FETCH_2:
851 case BUILT_IN_SYNC_XOR_AND_FETCH_2:
852 case BUILT_IN_SYNC_NAND_AND_FETCH_2:
853 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_2:
854 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_2:
855 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_2:
856 case BUILT_IN_SYNC_LOCK_RELEASE_2:
857 case BUILT_IN_ATOMIC_EXCHANGE_2:
858 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_2:
859 case BUILT_IN_ATOMIC_STORE_2:
860 case BUILT_IN_ATOMIC_ADD_FETCH_2:
861 case BUILT_IN_ATOMIC_SUB_FETCH_2:
862 case BUILT_IN_ATOMIC_AND_FETCH_2:
863 case BUILT_IN_ATOMIC_NAND_FETCH_2:
864 case BUILT_IN_ATOMIC_XOR_FETCH_2:
865 case BUILT_IN_ATOMIC_OR_FETCH_2:
866 case BUILT_IN_ATOMIC_FETCH_ADD_2:
867 case BUILT_IN_ATOMIC_FETCH_SUB_2:
868 case BUILT_IN_ATOMIC_FETCH_AND_2:
869 case BUILT_IN_ATOMIC_FETCH_NAND_2:
870 case BUILT_IN_ATOMIC_FETCH_XOR_2:
871 case BUILT_IN_ATOMIC_FETCH_OR_2:
872 access_size = 2;
873 goto do_atomic;
874
875 case BUILT_IN_ATOMIC_LOAD_4:
876 is_store = false;
877 /* FALLTHRU */
878 case BUILT_IN_SYNC_FETCH_AND_ADD_4:
879 case BUILT_IN_SYNC_FETCH_AND_SUB_4:
880 case BUILT_IN_SYNC_FETCH_AND_OR_4:
881 case BUILT_IN_SYNC_FETCH_AND_AND_4:
882 case BUILT_IN_SYNC_FETCH_AND_XOR_4:
883 case BUILT_IN_SYNC_FETCH_AND_NAND_4:
884 case BUILT_IN_SYNC_ADD_AND_FETCH_4:
885 case BUILT_IN_SYNC_SUB_AND_FETCH_4:
886 case BUILT_IN_SYNC_OR_AND_FETCH_4:
887 case BUILT_IN_SYNC_AND_AND_FETCH_4:
888 case BUILT_IN_SYNC_XOR_AND_FETCH_4:
889 case BUILT_IN_SYNC_NAND_AND_FETCH_4:
890 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_4:
891 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_4:
892 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_4:
893 case BUILT_IN_SYNC_LOCK_RELEASE_4:
894 case BUILT_IN_ATOMIC_EXCHANGE_4:
895 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_4:
896 case BUILT_IN_ATOMIC_STORE_4:
897 case BUILT_IN_ATOMIC_ADD_FETCH_4:
898 case BUILT_IN_ATOMIC_SUB_FETCH_4:
899 case BUILT_IN_ATOMIC_AND_FETCH_4:
900 case BUILT_IN_ATOMIC_NAND_FETCH_4:
901 case BUILT_IN_ATOMIC_XOR_FETCH_4:
902 case BUILT_IN_ATOMIC_OR_FETCH_4:
903 case BUILT_IN_ATOMIC_FETCH_ADD_4:
904 case BUILT_IN_ATOMIC_FETCH_SUB_4:
905 case BUILT_IN_ATOMIC_FETCH_AND_4:
906 case BUILT_IN_ATOMIC_FETCH_NAND_4:
907 case BUILT_IN_ATOMIC_FETCH_XOR_4:
908 case BUILT_IN_ATOMIC_FETCH_OR_4:
909 access_size = 4;
910 goto do_atomic;
911
912 case BUILT_IN_ATOMIC_LOAD_8:
913 is_store = false;
914 /* FALLTHRU */
915 case BUILT_IN_SYNC_FETCH_AND_ADD_8:
916 case BUILT_IN_SYNC_FETCH_AND_SUB_8:
917 case BUILT_IN_SYNC_FETCH_AND_OR_8:
918 case BUILT_IN_SYNC_FETCH_AND_AND_8:
919 case BUILT_IN_SYNC_FETCH_AND_XOR_8:
920 case BUILT_IN_SYNC_FETCH_AND_NAND_8:
921 case BUILT_IN_SYNC_ADD_AND_FETCH_8:
922 case BUILT_IN_SYNC_SUB_AND_FETCH_8:
923 case BUILT_IN_SYNC_OR_AND_FETCH_8:
924 case BUILT_IN_SYNC_AND_AND_FETCH_8:
925 case BUILT_IN_SYNC_XOR_AND_FETCH_8:
926 case BUILT_IN_SYNC_NAND_AND_FETCH_8:
927 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_8:
928 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_8:
929 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_8:
930 case BUILT_IN_SYNC_LOCK_RELEASE_8:
931 case BUILT_IN_ATOMIC_EXCHANGE_8:
932 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_8:
933 case BUILT_IN_ATOMIC_STORE_8:
934 case BUILT_IN_ATOMIC_ADD_FETCH_8:
935 case BUILT_IN_ATOMIC_SUB_FETCH_8:
936 case BUILT_IN_ATOMIC_AND_FETCH_8:
937 case BUILT_IN_ATOMIC_NAND_FETCH_8:
938 case BUILT_IN_ATOMIC_XOR_FETCH_8:
939 case BUILT_IN_ATOMIC_OR_FETCH_8:
940 case BUILT_IN_ATOMIC_FETCH_ADD_8:
941 case BUILT_IN_ATOMIC_FETCH_SUB_8:
942 case BUILT_IN_ATOMIC_FETCH_AND_8:
943 case BUILT_IN_ATOMIC_FETCH_NAND_8:
944 case BUILT_IN_ATOMIC_FETCH_XOR_8:
945 case BUILT_IN_ATOMIC_FETCH_OR_8:
946 access_size = 8;
947 goto do_atomic;
948
949 case BUILT_IN_ATOMIC_LOAD_16:
950 is_store = false;
951 /* FALLTHRU */
952 case BUILT_IN_SYNC_FETCH_AND_ADD_16:
953 case BUILT_IN_SYNC_FETCH_AND_SUB_16:
954 case BUILT_IN_SYNC_FETCH_AND_OR_16:
955 case BUILT_IN_SYNC_FETCH_AND_AND_16:
956 case BUILT_IN_SYNC_FETCH_AND_XOR_16:
957 case BUILT_IN_SYNC_FETCH_AND_NAND_16:
958 case BUILT_IN_SYNC_ADD_AND_FETCH_16:
959 case BUILT_IN_SYNC_SUB_AND_FETCH_16:
960 case BUILT_IN_SYNC_OR_AND_FETCH_16:
961 case BUILT_IN_SYNC_AND_AND_FETCH_16:
962 case BUILT_IN_SYNC_XOR_AND_FETCH_16:
963 case BUILT_IN_SYNC_NAND_AND_FETCH_16:
964 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_16:
965 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_16:
966 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_16:
967 case BUILT_IN_SYNC_LOCK_RELEASE_16:
968 case BUILT_IN_ATOMIC_EXCHANGE_16:
969 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_16:
970 case BUILT_IN_ATOMIC_STORE_16:
971 case BUILT_IN_ATOMIC_ADD_FETCH_16:
972 case BUILT_IN_ATOMIC_SUB_FETCH_16:
973 case BUILT_IN_ATOMIC_AND_FETCH_16:
974 case BUILT_IN_ATOMIC_NAND_FETCH_16:
975 case BUILT_IN_ATOMIC_XOR_FETCH_16:
976 case BUILT_IN_ATOMIC_OR_FETCH_16:
977 case BUILT_IN_ATOMIC_FETCH_ADD_16:
978 case BUILT_IN_ATOMIC_FETCH_SUB_16:
979 case BUILT_IN_ATOMIC_FETCH_AND_16:
980 case BUILT_IN_ATOMIC_FETCH_NAND_16:
981 case BUILT_IN_ATOMIC_FETCH_XOR_16:
982 case BUILT_IN_ATOMIC_FETCH_OR_16:
983 access_size = 16;
984 /* FALLTHRU */
985 do_atomic:
986 {
987 dest = gimple_call_arg (call, 0);
988 /* DEST represents the address of a memory location.
989 instrument_derefs wants the memory location, so lets
990 dereference the address DEST before handing it to
991 instrument_derefs. */
992 tree type = build_nonstandard_integer_type (access_size
993 * BITS_PER_UNIT, 1);
994 dest = build2 (MEM_REF, type, dest,
995 build_int_cst (build_pointer_type (char_type_node), 0));
996 break;
997 }
998
999 default:
1000 /* The other builtins memory access are not instrumented in this
1001 function because they either don't have any length parameter,
1002 or their length parameter is just a limit. */
1003 break;
1004 }
1005
1006 if (len != NULL_TREE)
1007 {
1008 if (source0 != NULL_TREE)
1009 {
1010 src0->start = source0;
1011 src0->access_size = access_size;
1012 *src0_len = len;
1013 *src0_is_store = false;
1014 }
1015
1016 if (source1 != NULL_TREE)
1017 {
1018 src1->start = source1;
1019 src1->access_size = access_size;
1020 *src1_len = len;
1021 *src1_is_store = false;
1022 }
1023
1024 if (dest != NULL_TREE)
1025 {
1026 dst->start = dest;
1027 dst->access_size = access_size;
1028 *dst_len = len;
1029 *dst_is_store = true;
1030 }
1031
1032 got_reference_p = true;
1033 }
1034 else if (dest)
1035 {
1036 dst->start = dest;
1037 dst->access_size = access_size;
1038 *dst_len = NULL_TREE;
1039 *dst_is_store = is_store;
1040 *dest_is_deref = true;
1041 got_reference_p = true;
1042 }
1043
1044 return got_reference_p;
1045 }
1046
1047 /* Return true iff a given gimple statement has been instrumented.
1048 Note that the statement is "defined" by the memory references it
1049 contains. */
1050
1051 static bool
1052 has_stmt_been_instrumented_p (gimple *stmt)
1053 {
1054 if (gimple_assign_single_p (stmt))
1055 {
1056 bool r_is_store;
1057 asan_mem_ref r;
1058 asan_mem_ref_init (&r, NULL, 1);
1059
1060 if (get_mem_ref_of_assignment (as_a <gassign *> (stmt), &r,
1061 &r_is_store))
1062 return has_mem_ref_been_instrumented (&r);
1063 }
1064 else if (gimple_call_builtin_p (stmt, BUILT_IN_NORMAL))
1065 {
1066 asan_mem_ref src0, src1, dest;
1067 asan_mem_ref_init (&src0, NULL, 1);
1068 asan_mem_ref_init (&src1, NULL, 1);
1069 asan_mem_ref_init (&dest, NULL, 1);
1070
1071 tree src0_len = NULL_TREE, src1_len = NULL_TREE, dest_len = NULL_TREE;
1072 bool src0_is_store = false, src1_is_store = false,
1073 dest_is_store = false, dest_is_deref = false, intercepted_p = true;
1074 if (get_mem_refs_of_builtin_call (as_a <gcall *> (stmt),
1075 &src0, &src0_len, &src0_is_store,
1076 &src1, &src1_len, &src1_is_store,
1077 &dest, &dest_len, &dest_is_store,
1078 &dest_is_deref, &intercepted_p))
1079 {
1080 if (src0.start != NULL_TREE
1081 && !has_mem_ref_been_instrumented (&src0, src0_len))
1082 return false;
1083
1084 if (src1.start != NULL_TREE
1085 && !has_mem_ref_been_instrumented (&src1, src1_len))
1086 return false;
1087
1088 if (dest.start != NULL_TREE
1089 && !has_mem_ref_been_instrumented (&dest, dest_len))
1090 return false;
1091
1092 return true;
1093 }
1094 }
1095 else if (is_gimple_call (stmt) && gimple_store_p (stmt))
1096 {
1097 asan_mem_ref r;
1098 asan_mem_ref_init (&r, NULL, 1);
1099
1100 r.start = gimple_call_lhs (stmt);
1101 r.access_size = int_size_in_bytes (TREE_TYPE (r.start));
1102 return has_mem_ref_been_instrumented (&r);
1103 }
1104
1105 return false;
1106 }
1107
1108 /* Insert a memory reference into the hash table. */
1109
1110 static void
1111 update_mem_ref_hash_table (tree ref, HOST_WIDE_INT access_size)
1112 {
1113 hash_table<asan_mem_ref_hasher> *ht = get_mem_ref_hash_table ();
1114
1115 asan_mem_ref r;
1116 asan_mem_ref_init (&r, ref, access_size);
1117
1118 asan_mem_ref **slot = ht->find_slot (&r, INSERT);
1119 if (*slot == NULL || (*slot)->access_size < access_size)
1120 *slot = asan_mem_ref_new (ref, access_size);
1121 }
1122
1123 /* Initialize shadow_ptr_types array. */
1124
1125 static void
1126 asan_init_shadow_ptr_types (void)
1127 {
1128 asan_shadow_set = new_alias_set ();
1129 tree types[3] = { signed_char_type_node, short_integer_type_node,
1130 integer_type_node };
1131
1132 for (unsigned i = 0; i < 3; i++)
1133 {
1134 shadow_ptr_types[i] = build_distinct_type_copy (types[i]);
1135 TYPE_ALIAS_SET (shadow_ptr_types[i]) = asan_shadow_set;
1136 shadow_ptr_types[i] = build_pointer_type (shadow_ptr_types[i]);
1137 }
1138
1139 initialize_sanitizer_builtins ();
1140 }
1141
1142 /* Create ADDR_EXPR of STRING_CST with the PP pretty printer text. */
1143
1144 static tree
1145 asan_pp_string (pretty_printer *pp)
1146 {
1147 const char *buf = pp_formatted_text (pp);
1148 size_t len = strlen (buf);
1149 tree ret = build_string (len + 1, buf);
1150 TREE_TYPE (ret)
1151 = build_array_type (TREE_TYPE (shadow_ptr_types[0]),
1152 build_index_type (size_int (len)));
1153 TREE_READONLY (ret) = 1;
1154 TREE_STATIC (ret) = 1;
1155 return build1 (ADDR_EXPR, shadow_ptr_types[0], ret);
1156 }
1157
1158 /* Clear shadow memory at SHADOW_MEM, LEN bytes. Can't call a library call here
1159 though. */
1160
1161 static void
1162 asan_clear_shadow (rtx shadow_mem, HOST_WIDE_INT len)
1163 {
1164 rtx_insn *insn, *insns, *jump;
1165 rtx_code_label *top_label;
1166 rtx end, addr, tmp;
1167
1168 gcc_assert ((len & 3) == 0);
1169 start_sequence ();
1170 clear_storage (shadow_mem, GEN_INT (len), BLOCK_OP_NORMAL);
1171 insns = get_insns ();
1172 end_sequence ();
1173 for (insn = insns; insn; insn = NEXT_INSN (insn))
1174 if (CALL_P (insn))
1175 break;
1176 if (insn == NULL_RTX)
1177 {
1178 emit_insn (insns);
1179 return;
1180 }
1181
1182 top_label = gen_label_rtx ();
1183 addr = copy_to_mode_reg (Pmode, XEXP (shadow_mem, 0));
1184 shadow_mem = adjust_automodify_address (shadow_mem, SImode, addr, 0);
1185 end = force_reg (Pmode, plus_constant (Pmode, addr, len));
1186 emit_label (top_label);
1187
1188 emit_move_insn (shadow_mem, const0_rtx);
1189 tmp = expand_simple_binop (Pmode, PLUS, addr, gen_int_mode (4, Pmode), addr,
1190 true, OPTAB_LIB_WIDEN);
1191 if (tmp != addr)
1192 emit_move_insn (addr, tmp);
1193 emit_cmp_and_jump_insns (addr, end, LT, NULL_RTX, Pmode, true, top_label);
1194 jump = get_last_insn ();
1195 gcc_assert (JUMP_P (jump));
1196 add_reg_br_prob_note (jump,
1197 profile_probability::guessed_always ()
1198 .apply_scale (80, 100));
1199 }
1200
1201 void
1202 asan_function_start (void)
1203 {
1204 section *fnsec = function_section (current_function_decl);
1205 switch_to_section (fnsec);
1206 ASM_OUTPUT_DEBUG_LABEL (asm_out_file, "LASANPC",
1207 current_function_funcdef_no);
1208 }
1209
1210 /* Return number of shadow bytes that are occupied by a local variable
1211 of SIZE bytes. */
1212
1213 static unsigned HOST_WIDE_INT
1214 shadow_mem_size (unsigned HOST_WIDE_INT size)
1215 {
1216 /* It must be possible to align stack variables to granularity
1217 of shadow memory. */
1218 gcc_assert (BITS_PER_UNIT
1219 * ASAN_SHADOW_GRANULARITY <= MAX_SUPPORTED_STACK_ALIGNMENT);
1220
1221 return ROUND_UP (size, ASAN_SHADOW_GRANULARITY) / ASAN_SHADOW_GRANULARITY;
1222 }
1223
1224 /* Always emit 4 bytes at a time. */
1225 #define RZ_BUFFER_SIZE 4
1226
1227 /* ASAN redzone buffer container that handles emission of shadow bytes. */
1228 struct asan_redzone_buffer
1229 {
1230 /* Constructor. */
1231 asan_redzone_buffer (rtx shadow_mem, HOST_WIDE_INT prev_offset):
1232 m_shadow_mem (shadow_mem), m_prev_offset (prev_offset),
1233 m_original_offset (prev_offset), m_shadow_bytes (RZ_BUFFER_SIZE)
1234 {}
1235
1236 /* Emit VALUE shadow byte at a given OFFSET. */
1237 void emit_redzone_byte (HOST_WIDE_INT offset, unsigned char value);
1238
1239 /* Emit RTX emission of the content of the buffer. */
1240 void flush_redzone_payload (void);
1241
1242 private:
1243 /* Flush if the content of the buffer is full
1244 (equal to RZ_BUFFER_SIZE). */
1245 void flush_if_full (void);
1246
1247 /* Memory where we last emitted a redzone payload. */
1248 rtx m_shadow_mem;
1249
1250 /* Relative offset where we last emitted a redzone payload. */
1251 HOST_WIDE_INT m_prev_offset;
1252
1253 /* Relative original offset. Used for checking only. */
1254 HOST_WIDE_INT m_original_offset;
1255
1256 public:
1257 /* Buffer with redzone payload. */
1258 auto_vec<unsigned char> m_shadow_bytes;
1259 };
1260
1261 /* Emit VALUE shadow byte at a given OFFSET. */
1262
1263 void
1264 asan_redzone_buffer::emit_redzone_byte (HOST_WIDE_INT offset,
1265 unsigned char value)
1266 {
1267 gcc_assert ((offset & (ASAN_SHADOW_GRANULARITY - 1)) == 0);
1268 gcc_assert (offset >= m_prev_offset);
1269
1270 HOST_WIDE_INT off
1271 = m_prev_offset + ASAN_SHADOW_GRANULARITY * m_shadow_bytes.length ();
1272 if (off == offset)
1273 {
1274 /* Consecutive shadow memory byte. */
1275 m_shadow_bytes.safe_push (value);
1276 flush_if_full ();
1277 }
1278 else
1279 {
1280 if (!m_shadow_bytes.is_empty ())
1281 flush_redzone_payload ();
1282
1283 /* Maybe start earlier in order to use aligned store. */
1284 HOST_WIDE_INT align = (offset - m_prev_offset) % ASAN_RED_ZONE_SIZE;
1285 if (align)
1286 {
1287 offset -= align;
1288 for (unsigned i = 0; i < align / BITS_PER_UNIT; i++)
1289 m_shadow_bytes.safe_push (0);
1290 }
1291
1292 /* Adjust m_prev_offset and m_shadow_mem. */
1293 HOST_WIDE_INT diff = offset - m_prev_offset;
1294 m_shadow_mem = adjust_address (m_shadow_mem, VOIDmode,
1295 diff >> ASAN_SHADOW_SHIFT);
1296 m_prev_offset = offset;
1297 m_shadow_bytes.safe_push (value);
1298 flush_if_full ();
1299 }
1300 }
1301
1302 /* Emit RTX emission of the content of the buffer. */
1303
1304 void
1305 asan_redzone_buffer::flush_redzone_payload (void)
1306 {
1307 gcc_assert (WORDS_BIG_ENDIAN == BYTES_BIG_ENDIAN);
1308
1309 if (m_shadow_bytes.is_empty ())
1310 return;
1311
1312 /* Be sure we always emit to an aligned address. */
1313 gcc_assert (((m_prev_offset - m_original_offset)
1314 & (ASAN_RED_ZONE_SIZE - 1)) == 0);
1315
1316 /* Fill it to RZ_BUFFER_SIZE bytes with zeros if needed. */
1317 unsigned l = m_shadow_bytes.length ();
1318 for (unsigned i = 0; i <= RZ_BUFFER_SIZE - l; i++)
1319 m_shadow_bytes.safe_push (0);
1320
1321 if (dump_file && (dump_flags & TDF_DETAILS))
1322 fprintf (dump_file,
1323 "Flushing rzbuffer at offset %" PRId64 " with: ", m_prev_offset);
1324
1325 unsigned HOST_WIDE_INT val = 0;
1326 for (unsigned i = 0; i < RZ_BUFFER_SIZE; i++)
1327 {
1328 unsigned char v
1329 = m_shadow_bytes[BYTES_BIG_ENDIAN ? RZ_BUFFER_SIZE - i - 1 : i];
1330 val |= (unsigned HOST_WIDE_INT)v << (BITS_PER_UNIT * i);
1331 if (dump_file && (dump_flags & TDF_DETAILS))
1332 fprintf (dump_file, "%02x ", v);
1333 }
1334
1335 if (dump_file && (dump_flags & TDF_DETAILS))
1336 fprintf (dump_file, "\n");
1337
1338 rtx c = gen_int_mode (val, SImode);
1339 m_shadow_mem = adjust_address (m_shadow_mem, SImode, 0);
1340 emit_move_insn (m_shadow_mem, c);
1341 m_shadow_bytes.truncate (0);
1342 }
1343
1344 /* Flush if the content of the buffer is full
1345 (equal to RZ_BUFFER_SIZE). */
1346
1347 void
1348 asan_redzone_buffer::flush_if_full (void)
1349 {
1350 if (m_shadow_bytes.length () == RZ_BUFFER_SIZE)
1351 flush_redzone_payload ();
1352 }
1353
1354 /* Insert code to protect stack vars. The prologue sequence should be emitted
1355 directly, epilogue sequence returned. BASE is the register holding the
1356 stack base, against which OFFSETS array offsets are relative to, OFFSETS
1357 array contains pairs of offsets in reverse order, always the end offset
1358 of some gap that needs protection followed by starting offset,
1359 and DECLS is an array of representative decls for each var partition.
1360 LENGTH is the length of the OFFSETS array, DECLS array is LENGTH / 2 - 1
1361 elements long (OFFSETS include gap before the first variable as well
1362 as gaps after each stack variable). PBASE is, if non-NULL, some pseudo
1363 register which stack vars DECL_RTLs are based on. Either BASE should be
1364 assigned to PBASE, when not doing use after return protection, or
1365 corresponding address based on __asan_stack_malloc* return value. */
1366
1367 rtx_insn *
1368 asan_emit_stack_protection (rtx base, rtx pbase, unsigned int alignb,
1369 HOST_WIDE_INT *offsets, tree *decls, int length)
1370 {
1371 rtx shadow_base, shadow_mem, ret, mem, orig_base;
1372 rtx_code_label *lab;
1373 rtx_insn *insns;
1374 char buf[32];
1375 HOST_WIDE_INT base_offset = offsets[length - 1];
1376 HOST_WIDE_INT base_align_bias = 0, offset, prev_offset;
1377 HOST_WIDE_INT asan_frame_size = offsets[0] - base_offset;
1378 HOST_WIDE_INT last_offset, last_size, last_size_aligned;
1379 int l;
1380 unsigned char cur_shadow_byte = ASAN_STACK_MAGIC_LEFT;
1381 tree str_cst, decl, id;
1382 int use_after_return_class = -1;
1383
1384 if (shadow_ptr_types[0] == NULL_TREE)
1385 asan_init_shadow_ptr_types ();
1386
1387 expanded_location cfun_xloc
1388 = expand_location (DECL_SOURCE_LOCATION (current_function_decl));
1389
1390 /* First of all, prepare the description string. */
1391 pretty_printer asan_pp;
1392
1393 pp_decimal_int (&asan_pp, length / 2 - 1);
1394 pp_space (&asan_pp);
1395 for (l = length - 2; l; l -= 2)
1396 {
1397 tree decl = decls[l / 2 - 1];
1398 pp_wide_integer (&asan_pp, offsets[l] - base_offset);
1399 pp_space (&asan_pp);
1400 pp_wide_integer (&asan_pp, offsets[l - 1] - offsets[l]);
1401 pp_space (&asan_pp);
1402
1403 expanded_location xloc
1404 = expand_location (DECL_SOURCE_LOCATION (decl));
1405 char location[32];
1406
1407 if (xloc.file == cfun_xloc.file)
1408 sprintf (location, ":%d", xloc.line);
1409 else
1410 location[0] = '\0';
1411
1412 if (DECL_P (decl) && DECL_NAME (decl))
1413 {
1414 unsigned idlen
1415 = IDENTIFIER_LENGTH (DECL_NAME (decl)) + strlen (location);
1416 pp_decimal_int (&asan_pp, idlen);
1417 pp_space (&asan_pp);
1418 pp_tree_identifier (&asan_pp, DECL_NAME (decl));
1419 pp_string (&asan_pp, location);
1420 }
1421 else
1422 pp_string (&asan_pp, "9 <unknown>");
1423
1424 if (l > 2)
1425 pp_space (&asan_pp);
1426 }
1427 str_cst = asan_pp_string (&asan_pp);
1428
1429 /* Emit the prologue sequence. */
1430 if (asan_frame_size > 32 && asan_frame_size <= 65536 && pbase
1431 && ASAN_USE_AFTER_RETURN)
1432 {
1433 use_after_return_class = floor_log2 (asan_frame_size - 1) - 5;
1434 /* __asan_stack_malloc_N guarantees alignment
1435 N < 6 ? (64 << N) : 4096 bytes. */
1436 if (alignb > (use_after_return_class < 6
1437 ? (64U << use_after_return_class) : 4096U))
1438 use_after_return_class = -1;
1439 else if (alignb > ASAN_RED_ZONE_SIZE && (asan_frame_size & (alignb - 1)))
1440 base_align_bias = ((asan_frame_size + alignb - 1)
1441 & ~(alignb - HOST_WIDE_INT_1)) - asan_frame_size;
1442 }
1443
1444 /* Align base if target is STRICT_ALIGNMENT. */
1445 if (STRICT_ALIGNMENT)
1446 {
1447 const HOST_WIDE_INT align
1448 = (GET_MODE_ALIGNMENT (SImode) / BITS_PER_UNIT) << ASAN_SHADOW_SHIFT;
1449 base = expand_binop (Pmode, and_optab, base, gen_int_mode (-align, Pmode),
1450 NULL_RTX, 1, OPTAB_DIRECT);
1451 }
1452
1453 if (use_after_return_class == -1 && pbase)
1454 emit_move_insn (pbase, base);
1455
1456 base = expand_binop (Pmode, add_optab, base,
1457 gen_int_mode (base_offset - base_align_bias, Pmode),
1458 NULL_RTX, 1, OPTAB_DIRECT);
1459 orig_base = NULL_RTX;
1460 if (use_after_return_class != -1)
1461 {
1462 if (asan_detect_stack_use_after_return == NULL_TREE)
1463 {
1464 id = get_identifier ("__asan_option_detect_stack_use_after_return");
1465 decl = build_decl (BUILTINS_LOCATION, VAR_DECL, id,
1466 integer_type_node);
1467 SET_DECL_ASSEMBLER_NAME (decl, id);
1468 TREE_ADDRESSABLE (decl) = 1;
1469 DECL_ARTIFICIAL (decl) = 1;
1470 DECL_IGNORED_P (decl) = 1;
1471 DECL_EXTERNAL (decl) = 1;
1472 TREE_STATIC (decl) = 1;
1473 TREE_PUBLIC (decl) = 1;
1474 TREE_USED (decl) = 1;
1475 asan_detect_stack_use_after_return = decl;
1476 }
1477 orig_base = gen_reg_rtx (Pmode);
1478 emit_move_insn (orig_base, base);
1479 ret = expand_normal (asan_detect_stack_use_after_return);
1480 lab = gen_label_rtx ();
1481 emit_cmp_and_jump_insns (ret, const0_rtx, EQ, NULL_RTX,
1482 VOIDmode, 0, lab,
1483 profile_probability::very_likely ());
1484 snprintf (buf, sizeof buf, "__asan_stack_malloc_%d",
1485 use_after_return_class);
1486 ret = init_one_libfunc (buf);
1487 ret = emit_library_call_value (ret, NULL_RTX, LCT_NORMAL, ptr_mode,
1488 GEN_INT (asan_frame_size
1489 + base_align_bias),
1490 TYPE_MODE (pointer_sized_int_node));
1491 /* __asan_stack_malloc_[n] returns a pointer to fake stack if succeeded
1492 and NULL otherwise. Check RET value is NULL here and jump over the
1493 BASE reassignment in this case. Otherwise, reassign BASE to RET. */
1494 emit_cmp_and_jump_insns (ret, const0_rtx, EQ, NULL_RTX,
1495 VOIDmode, 0, lab,
1496 profile_probability:: very_unlikely ());
1497 ret = convert_memory_address (Pmode, ret);
1498 emit_move_insn (base, ret);
1499 emit_label (lab);
1500 emit_move_insn (pbase, expand_binop (Pmode, add_optab, base,
1501 gen_int_mode (base_align_bias
1502 - base_offset, Pmode),
1503 NULL_RTX, 1, OPTAB_DIRECT));
1504 }
1505 mem = gen_rtx_MEM (ptr_mode, base);
1506 mem = adjust_address (mem, VOIDmode, base_align_bias);
1507 emit_move_insn (mem, gen_int_mode (ASAN_STACK_FRAME_MAGIC, ptr_mode));
1508 mem = adjust_address (mem, VOIDmode, GET_MODE_SIZE (ptr_mode));
1509 emit_move_insn (mem, expand_normal (str_cst));
1510 mem = adjust_address (mem, VOIDmode, GET_MODE_SIZE (ptr_mode));
1511 ASM_GENERATE_INTERNAL_LABEL (buf, "LASANPC", current_function_funcdef_no);
1512 id = get_identifier (buf);
1513 decl = build_decl (DECL_SOURCE_LOCATION (current_function_decl),
1514 VAR_DECL, id, char_type_node);
1515 SET_DECL_ASSEMBLER_NAME (decl, id);
1516 TREE_ADDRESSABLE (decl) = 1;
1517 TREE_READONLY (decl) = 1;
1518 DECL_ARTIFICIAL (decl) = 1;
1519 DECL_IGNORED_P (decl) = 1;
1520 TREE_STATIC (decl) = 1;
1521 TREE_PUBLIC (decl) = 0;
1522 TREE_USED (decl) = 1;
1523 DECL_INITIAL (decl) = decl;
1524 TREE_ASM_WRITTEN (decl) = 1;
1525 TREE_ASM_WRITTEN (id) = 1;
1526 emit_move_insn (mem, expand_normal (build_fold_addr_expr (decl)));
1527 shadow_base = expand_binop (Pmode, lshr_optab, base,
1528 gen_int_shift_amount (Pmode, ASAN_SHADOW_SHIFT),
1529 NULL_RTX, 1, OPTAB_DIRECT);
1530 shadow_base
1531 = plus_constant (Pmode, shadow_base,
1532 asan_shadow_offset ()
1533 + (base_align_bias >> ASAN_SHADOW_SHIFT));
1534 gcc_assert (asan_shadow_set != -1
1535 && (ASAN_RED_ZONE_SIZE >> ASAN_SHADOW_SHIFT) == 4);
1536 shadow_mem = gen_rtx_MEM (SImode, shadow_base);
1537 set_mem_alias_set (shadow_mem, asan_shadow_set);
1538 if (STRICT_ALIGNMENT)
1539 set_mem_align (shadow_mem, (GET_MODE_ALIGNMENT (SImode)));
1540 prev_offset = base_offset;
1541
1542 asan_redzone_buffer rz_buffer (shadow_mem, prev_offset);
1543 for (l = length; l; l -= 2)
1544 {
1545 if (l == 2)
1546 cur_shadow_byte = ASAN_STACK_MAGIC_RIGHT;
1547 offset = offsets[l - 1];
1548
1549 bool extra_byte = (offset - base_offset) & (ASAN_SHADOW_GRANULARITY - 1);
1550 /* If a red-zone is not aligned to ASAN_SHADOW_GRANULARITY then
1551 the previous stack variable has size % ASAN_SHADOW_GRANULARITY != 0.
1552 In that case we have to emit one extra byte that will describe
1553 how many bytes (our of ASAN_SHADOW_GRANULARITY) can be accessed. */
1554 if (extra_byte)
1555 {
1556 HOST_WIDE_INT aoff
1557 = base_offset + ((offset - base_offset)
1558 & ~(ASAN_SHADOW_GRANULARITY - HOST_WIDE_INT_1));
1559 rz_buffer.emit_redzone_byte (aoff, offset - aoff);
1560 offset = aoff + ASAN_SHADOW_GRANULARITY;
1561 }
1562
1563 /* Calculate size of red zone payload. */
1564 while (offset < offsets[l - 2])
1565 {
1566 rz_buffer.emit_redzone_byte (offset, cur_shadow_byte);
1567 offset += ASAN_SHADOW_GRANULARITY;
1568 }
1569
1570 cur_shadow_byte = ASAN_STACK_MAGIC_MIDDLE;
1571 }
1572
1573 /* As the automatic variables are aligned to
1574 ASAN_RED_ZONE_SIZE / ASAN_SHADOW_GRANULARITY, the buffer should be
1575 flushed here. */
1576 gcc_assert (rz_buffer.m_shadow_bytes.is_empty ());
1577
1578 do_pending_stack_adjust ();
1579
1580 /* Construct epilogue sequence. */
1581 start_sequence ();
1582
1583 lab = NULL;
1584 if (use_after_return_class != -1)
1585 {
1586 rtx_code_label *lab2 = gen_label_rtx ();
1587 char c = (char) ASAN_STACK_MAGIC_USE_AFTER_RET;
1588 emit_cmp_and_jump_insns (orig_base, base, EQ, NULL_RTX,
1589 VOIDmode, 0, lab2,
1590 profile_probability::very_likely ());
1591 shadow_mem = gen_rtx_MEM (BLKmode, shadow_base);
1592 set_mem_alias_set (shadow_mem, asan_shadow_set);
1593 mem = gen_rtx_MEM (ptr_mode, base);
1594 mem = adjust_address (mem, VOIDmode, base_align_bias);
1595 emit_move_insn (mem, gen_int_mode (ASAN_STACK_RETIRED_MAGIC, ptr_mode));
1596 unsigned HOST_WIDE_INT sz = asan_frame_size >> ASAN_SHADOW_SHIFT;
1597 if (use_after_return_class < 5
1598 && can_store_by_pieces (sz, builtin_memset_read_str, &c,
1599 BITS_PER_UNIT, true))
1600 store_by_pieces (shadow_mem, sz, builtin_memset_read_str, &c,
1601 BITS_PER_UNIT, true, RETURN_BEGIN);
1602 else if (use_after_return_class >= 5
1603 || !set_storage_via_setmem (shadow_mem,
1604 GEN_INT (sz),
1605 gen_int_mode (c, QImode),
1606 BITS_PER_UNIT, BITS_PER_UNIT,
1607 -1, sz, sz, sz))
1608 {
1609 snprintf (buf, sizeof buf, "__asan_stack_free_%d",
1610 use_after_return_class);
1611 ret = init_one_libfunc (buf);
1612 rtx addr = convert_memory_address (ptr_mode, base);
1613 rtx orig_addr = convert_memory_address (ptr_mode, orig_base);
1614 emit_library_call (ret, LCT_NORMAL, ptr_mode, addr, ptr_mode,
1615 GEN_INT (asan_frame_size + base_align_bias),
1616 TYPE_MODE (pointer_sized_int_node),
1617 orig_addr, ptr_mode);
1618 }
1619 lab = gen_label_rtx ();
1620 emit_jump (lab);
1621 emit_label (lab2);
1622 }
1623
1624 shadow_mem = gen_rtx_MEM (BLKmode, shadow_base);
1625 set_mem_alias_set (shadow_mem, asan_shadow_set);
1626
1627 if (STRICT_ALIGNMENT)
1628 set_mem_align (shadow_mem, (GET_MODE_ALIGNMENT (SImode)));
1629
1630 prev_offset = base_offset;
1631 last_offset = base_offset;
1632 last_size = 0;
1633 last_size_aligned = 0;
1634 for (l = length; l; l -= 2)
1635 {
1636 offset = base_offset + ((offsets[l - 1] - base_offset)
1637 & ~(ASAN_RED_ZONE_SIZE - HOST_WIDE_INT_1));
1638 if (last_offset + last_size_aligned < offset)
1639 {
1640 shadow_mem = adjust_address (shadow_mem, VOIDmode,
1641 (last_offset - prev_offset)
1642 >> ASAN_SHADOW_SHIFT);
1643 prev_offset = last_offset;
1644 asan_clear_shadow (shadow_mem, last_size_aligned >> ASAN_SHADOW_SHIFT);
1645 last_offset = offset;
1646 last_size = 0;
1647 }
1648 else
1649 last_size = offset - last_offset;
1650 last_size += base_offset + ((offsets[l - 2] - base_offset)
1651 & ~(ASAN_MIN_RED_ZONE_SIZE - HOST_WIDE_INT_1))
1652 - offset;
1653
1654 /* Unpoison shadow memory that corresponds to a variable that is
1655 is subject of use-after-return sanitization. */
1656 if (l > 2)
1657 {
1658 decl = decls[l / 2 - 2];
1659 if (asan_handled_variables != NULL
1660 && asan_handled_variables->contains (decl))
1661 {
1662 HOST_WIDE_INT size = offsets[l - 3] - offsets[l - 2];
1663 if (dump_file && (dump_flags & TDF_DETAILS))
1664 {
1665 const char *n = (DECL_NAME (decl)
1666 ? IDENTIFIER_POINTER (DECL_NAME (decl))
1667 : "<unknown>");
1668 fprintf (dump_file, "Unpoisoning shadow stack for variable: "
1669 "%s (%" PRId64 " B)\n", n, size);
1670 }
1671
1672 last_size += size & ~(ASAN_MIN_RED_ZONE_SIZE - HOST_WIDE_INT_1);
1673 }
1674 }
1675 last_size_aligned
1676 = ((last_size + (ASAN_RED_ZONE_SIZE - HOST_WIDE_INT_1))
1677 & ~(ASAN_RED_ZONE_SIZE - HOST_WIDE_INT_1));
1678 }
1679 if (last_size_aligned)
1680 {
1681 shadow_mem = adjust_address (shadow_mem, VOIDmode,
1682 (last_offset - prev_offset)
1683 >> ASAN_SHADOW_SHIFT);
1684 asan_clear_shadow (shadow_mem, last_size_aligned >> ASAN_SHADOW_SHIFT);
1685 }
1686
1687 /* Clean-up set with instrumented stack variables. */
1688 delete asan_handled_variables;
1689 asan_handled_variables = NULL;
1690 delete asan_used_labels;
1691 asan_used_labels = NULL;
1692
1693 do_pending_stack_adjust ();
1694 if (lab)
1695 emit_label (lab);
1696
1697 insns = get_insns ();
1698 end_sequence ();
1699 return insns;
1700 }
1701
1702 /* Emit __asan_allocas_unpoison (top, bot) call. The BASE parameter corresponds
1703 to BOT argument, for TOP virtual_stack_dynamic_rtx is used. NEW_SEQUENCE
1704 indicates whether we're emitting new instructions sequence or not. */
1705
1706 rtx_insn *
1707 asan_emit_allocas_unpoison (rtx top, rtx bot, rtx_insn *before)
1708 {
1709 if (before)
1710 push_to_sequence (before);
1711 else
1712 start_sequence ();
1713 rtx ret = init_one_libfunc ("__asan_allocas_unpoison");
1714 top = convert_memory_address (ptr_mode, top);
1715 bot = convert_memory_address (ptr_mode, bot);
1716 ret = emit_library_call_value (ret, NULL_RTX, LCT_NORMAL, ptr_mode,
1717 top, ptr_mode, bot, ptr_mode);
1718
1719 do_pending_stack_adjust ();
1720 rtx_insn *insns = get_insns ();
1721 end_sequence ();
1722 return insns;
1723 }
1724
1725 /* Return true if DECL, a global var, might be overridden and needs
1726 therefore a local alias. */
1727
1728 static bool
1729 asan_needs_local_alias (tree decl)
1730 {
1731 return DECL_WEAK (decl) || !targetm.binds_local_p (decl);
1732 }
1733
1734 /* Return true if DECL, a global var, is an artificial ODR indicator symbol
1735 therefore doesn't need protection. */
1736
1737 static bool
1738 is_odr_indicator (tree decl)
1739 {
1740 return (DECL_ARTIFICIAL (decl)
1741 && lookup_attribute ("asan odr indicator", DECL_ATTRIBUTES (decl)));
1742 }
1743
1744 /* Return true if DECL is a VAR_DECL that should be protected
1745 by Address Sanitizer, by appending a red zone with protected
1746 shadow memory after it and aligning it to at least
1747 ASAN_RED_ZONE_SIZE bytes. */
1748
1749 bool
1750 asan_protect_global (tree decl, bool ignore_decl_rtl_set_p)
1751 {
1752 if (!ASAN_GLOBALS)
1753 return false;
1754
1755 rtx rtl, symbol;
1756
1757 if (TREE_CODE (decl) == STRING_CST)
1758 {
1759 /* Instrument all STRING_CSTs except those created
1760 by asan_pp_string here. */
1761 if (shadow_ptr_types[0] != NULL_TREE
1762 && TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE
1763 && TREE_TYPE (TREE_TYPE (decl)) == TREE_TYPE (shadow_ptr_types[0]))
1764 return false;
1765 return true;
1766 }
1767 if (!VAR_P (decl)
1768 /* TLS vars aren't statically protectable. */
1769 || DECL_THREAD_LOCAL_P (decl)
1770 /* Externs will be protected elsewhere. */
1771 || DECL_EXTERNAL (decl)
1772 /* PR sanitizer/81697: For architectures that use section anchors first
1773 call to asan_protect_global may occur before DECL_RTL (decl) is set.
1774 We should ignore DECL_RTL_SET_P then, because otherwise the first call
1775 to asan_protect_global will return FALSE and the following calls on the
1776 same decl after setting DECL_RTL (decl) will return TRUE and we'll end
1777 up with inconsistency at runtime. */
1778 || (!DECL_RTL_SET_P (decl) && !ignore_decl_rtl_set_p)
1779 /* Comdat vars pose an ABI problem, we can't know if
1780 the var that is selected by the linker will have
1781 padding or not. */
1782 || DECL_ONE_ONLY (decl)
1783 /* Similarly for common vars. People can use -fno-common.
1784 Note: Linux kernel is built with -fno-common, so we do instrument
1785 globals there even if it is C. */
1786 || (DECL_COMMON (decl) && TREE_PUBLIC (decl))
1787 /* Don't protect if using user section, often vars placed
1788 into user section from multiple TUs are then assumed
1789 to be an array of such vars, putting padding in there
1790 breaks this assumption. */
1791 || (DECL_SECTION_NAME (decl) != NULL
1792 && !symtab_node::get (decl)->implicit_section
1793 && !section_sanitized_p (DECL_SECTION_NAME (decl)))
1794 || DECL_SIZE (decl) == 0
1795 || ASAN_RED_ZONE_SIZE * BITS_PER_UNIT > MAX_OFILE_ALIGNMENT
1796 || TREE_CODE (DECL_SIZE_UNIT (decl)) != INTEGER_CST
1797 || !valid_constant_size_p (DECL_SIZE_UNIT (decl))
1798 || DECL_ALIGN_UNIT (decl) > 2 * ASAN_RED_ZONE_SIZE
1799 || TREE_TYPE (decl) == ubsan_get_source_location_type ()
1800 || is_odr_indicator (decl))
1801 return false;
1802
1803 if (!ignore_decl_rtl_set_p || DECL_RTL_SET_P (decl))
1804 {
1805
1806 rtl = DECL_RTL (decl);
1807 if (!MEM_P (rtl) || GET_CODE (XEXP (rtl, 0)) != SYMBOL_REF)
1808 return false;
1809 symbol = XEXP (rtl, 0);
1810
1811 if (CONSTANT_POOL_ADDRESS_P (symbol)
1812 || TREE_CONSTANT_POOL_ADDRESS_P (symbol))
1813 return false;
1814 }
1815
1816 if (lookup_attribute ("weakref", DECL_ATTRIBUTES (decl)))
1817 return false;
1818
1819 if (!TARGET_SUPPORTS_ALIASES && asan_needs_local_alias (decl))
1820 return false;
1821
1822 return true;
1823 }
1824
1825 /* Construct a function tree for __asan_report_{load,store}{1,2,4,8,16,_n}.
1826 IS_STORE is either 1 (for a store) or 0 (for a load). */
1827
1828 static tree
1829 report_error_func (bool is_store, bool recover_p, HOST_WIDE_INT size_in_bytes,
1830 int *nargs)
1831 {
1832 static enum built_in_function report[2][2][6]
1833 = { { { BUILT_IN_ASAN_REPORT_LOAD1, BUILT_IN_ASAN_REPORT_LOAD2,
1834 BUILT_IN_ASAN_REPORT_LOAD4, BUILT_IN_ASAN_REPORT_LOAD8,
1835 BUILT_IN_ASAN_REPORT_LOAD16, BUILT_IN_ASAN_REPORT_LOAD_N },
1836 { BUILT_IN_ASAN_REPORT_STORE1, BUILT_IN_ASAN_REPORT_STORE2,
1837 BUILT_IN_ASAN_REPORT_STORE4, BUILT_IN_ASAN_REPORT_STORE8,
1838 BUILT_IN_ASAN_REPORT_STORE16, BUILT_IN_ASAN_REPORT_STORE_N } },
1839 { { BUILT_IN_ASAN_REPORT_LOAD1_NOABORT,
1840 BUILT_IN_ASAN_REPORT_LOAD2_NOABORT,
1841 BUILT_IN_ASAN_REPORT_LOAD4_NOABORT,
1842 BUILT_IN_ASAN_REPORT_LOAD8_NOABORT,
1843 BUILT_IN_ASAN_REPORT_LOAD16_NOABORT,
1844 BUILT_IN_ASAN_REPORT_LOAD_N_NOABORT },
1845 { BUILT_IN_ASAN_REPORT_STORE1_NOABORT,
1846 BUILT_IN_ASAN_REPORT_STORE2_NOABORT,
1847 BUILT_IN_ASAN_REPORT_STORE4_NOABORT,
1848 BUILT_IN_ASAN_REPORT_STORE8_NOABORT,
1849 BUILT_IN_ASAN_REPORT_STORE16_NOABORT,
1850 BUILT_IN_ASAN_REPORT_STORE_N_NOABORT } } };
1851 if (size_in_bytes == -1)
1852 {
1853 *nargs = 2;
1854 return builtin_decl_implicit (report[recover_p][is_store][5]);
1855 }
1856 *nargs = 1;
1857 int size_log2 = exact_log2 (size_in_bytes);
1858 return builtin_decl_implicit (report[recover_p][is_store][size_log2]);
1859 }
1860
1861 /* Construct a function tree for __asan_{load,store}{1,2,4,8,16,_n}.
1862 IS_STORE is either 1 (for a store) or 0 (for a load). */
1863
1864 static tree
1865 check_func (bool is_store, bool recover_p, HOST_WIDE_INT size_in_bytes,
1866 int *nargs)
1867 {
1868 static enum built_in_function check[2][2][6]
1869 = { { { BUILT_IN_ASAN_LOAD1, BUILT_IN_ASAN_LOAD2,
1870 BUILT_IN_ASAN_LOAD4, BUILT_IN_ASAN_LOAD8,
1871 BUILT_IN_ASAN_LOAD16, BUILT_IN_ASAN_LOADN },
1872 { BUILT_IN_ASAN_STORE1, BUILT_IN_ASAN_STORE2,
1873 BUILT_IN_ASAN_STORE4, BUILT_IN_ASAN_STORE8,
1874 BUILT_IN_ASAN_STORE16, BUILT_IN_ASAN_STOREN } },
1875 { { BUILT_IN_ASAN_LOAD1_NOABORT,
1876 BUILT_IN_ASAN_LOAD2_NOABORT,
1877 BUILT_IN_ASAN_LOAD4_NOABORT,
1878 BUILT_IN_ASAN_LOAD8_NOABORT,
1879 BUILT_IN_ASAN_LOAD16_NOABORT,
1880 BUILT_IN_ASAN_LOADN_NOABORT },
1881 { BUILT_IN_ASAN_STORE1_NOABORT,
1882 BUILT_IN_ASAN_STORE2_NOABORT,
1883 BUILT_IN_ASAN_STORE4_NOABORT,
1884 BUILT_IN_ASAN_STORE8_NOABORT,
1885 BUILT_IN_ASAN_STORE16_NOABORT,
1886 BUILT_IN_ASAN_STOREN_NOABORT } } };
1887 if (size_in_bytes == -1)
1888 {
1889 *nargs = 2;
1890 return builtin_decl_implicit (check[recover_p][is_store][5]);
1891 }
1892 *nargs = 1;
1893 int size_log2 = exact_log2 (size_in_bytes);
1894 return builtin_decl_implicit (check[recover_p][is_store][size_log2]);
1895 }
1896
1897 /* Split the current basic block and create a condition statement
1898 insertion point right before or after the statement pointed to by
1899 ITER. Return an iterator to the point at which the caller might
1900 safely insert the condition statement.
1901
1902 THEN_BLOCK must be set to the address of an uninitialized instance
1903 of basic_block. The function will then set *THEN_BLOCK to the
1904 'then block' of the condition statement to be inserted by the
1905 caller.
1906
1907 If CREATE_THEN_FALLTHRU_EDGE is false, no edge will be created from
1908 *THEN_BLOCK to *FALLTHROUGH_BLOCK.
1909
1910 Similarly, the function will set *FALLTRHOUGH_BLOCK to the 'else
1911 block' of the condition statement to be inserted by the caller.
1912
1913 Note that *FALLTHROUGH_BLOCK is a new block that contains the
1914 statements starting from *ITER, and *THEN_BLOCK is a new empty
1915 block.
1916
1917 *ITER is adjusted to point to always point to the first statement
1918 of the basic block * FALLTHROUGH_BLOCK. That statement is the
1919 same as what ITER was pointing to prior to calling this function,
1920 if BEFORE_P is true; otherwise, it is its following statement. */
1921
1922 gimple_stmt_iterator
1923 create_cond_insert_point (gimple_stmt_iterator *iter,
1924 bool before_p,
1925 bool then_more_likely_p,
1926 bool create_then_fallthru_edge,
1927 basic_block *then_block,
1928 basic_block *fallthrough_block)
1929 {
1930 gimple_stmt_iterator gsi = *iter;
1931
1932 if (!gsi_end_p (gsi) && before_p)
1933 gsi_prev (&gsi);
1934
1935 basic_block cur_bb = gsi_bb (*iter);
1936
1937 edge e = split_block (cur_bb, gsi_stmt (gsi));
1938
1939 /* Get a hold on the 'condition block', the 'then block' and the
1940 'else block'. */
1941 basic_block cond_bb = e->src;
1942 basic_block fallthru_bb = e->dest;
1943 basic_block then_bb = create_empty_bb (cond_bb);
1944 if (current_loops)
1945 {
1946 add_bb_to_loop (then_bb, cond_bb->loop_father);
1947 loops_state_set (LOOPS_NEED_FIXUP);
1948 }
1949
1950 /* Set up the newly created 'then block'. */
1951 e = make_edge (cond_bb, then_bb, EDGE_TRUE_VALUE);
1952 profile_probability fallthrough_probability
1953 = then_more_likely_p
1954 ? profile_probability::very_unlikely ()
1955 : profile_probability::very_likely ();
1956 e->probability = fallthrough_probability.invert ();
1957 then_bb->count = e->count ();
1958 if (create_then_fallthru_edge)
1959 make_single_succ_edge (then_bb, fallthru_bb, EDGE_FALLTHRU);
1960
1961 /* Set up the fallthrough basic block. */
1962 e = find_edge (cond_bb, fallthru_bb);
1963 e->flags = EDGE_FALSE_VALUE;
1964 e->probability = fallthrough_probability;
1965
1966 /* Update dominance info for the newly created then_bb; note that
1967 fallthru_bb's dominance info has already been updated by
1968 split_bock. */
1969 if (dom_info_available_p (CDI_DOMINATORS))
1970 set_immediate_dominator (CDI_DOMINATORS, then_bb, cond_bb);
1971
1972 *then_block = then_bb;
1973 *fallthrough_block = fallthru_bb;
1974 *iter = gsi_start_bb (fallthru_bb);
1975
1976 return gsi_last_bb (cond_bb);
1977 }
1978
1979 /* Insert an if condition followed by a 'then block' right before the
1980 statement pointed to by ITER. The fallthrough block -- which is the
1981 else block of the condition as well as the destination of the
1982 outcoming edge of the 'then block' -- starts with the statement
1983 pointed to by ITER.
1984
1985 COND is the condition of the if.
1986
1987 If THEN_MORE_LIKELY_P is true, the probability of the edge to the
1988 'then block' is higher than the probability of the edge to the
1989 fallthrough block.
1990
1991 Upon completion of the function, *THEN_BB is set to the newly
1992 inserted 'then block' and similarly, *FALLTHROUGH_BB is set to the
1993 fallthrough block.
1994
1995 *ITER is adjusted to still point to the same statement it was
1996 pointing to initially. */
1997
1998 static void
1999 insert_if_then_before_iter (gcond *cond,
2000 gimple_stmt_iterator *iter,
2001 bool then_more_likely_p,
2002 basic_block *then_bb,
2003 basic_block *fallthrough_bb)
2004 {
2005 gimple_stmt_iterator cond_insert_point =
2006 create_cond_insert_point (iter,
2007 /*before_p=*/true,
2008 then_more_likely_p,
2009 /*create_then_fallthru_edge=*/true,
2010 then_bb,
2011 fallthrough_bb);
2012 gsi_insert_after (&cond_insert_point, cond, GSI_NEW_STMT);
2013 }
2014
2015 /* Build (base_addr >> ASAN_SHADOW_SHIFT) + asan_shadow_offset ().
2016 If RETURN_ADDRESS is set to true, return memory location instread
2017 of a value in the shadow memory. */
2018
2019 static tree
2020 build_shadow_mem_access (gimple_stmt_iterator *gsi, location_t location,
2021 tree base_addr, tree shadow_ptr_type,
2022 bool return_address = false)
2023 {
2024 tree t, uintptr_type = TREE_TYPE (base_addr);
2025 tree shadow_type = TREE_TYPE (shadow_ptr_type);
2026 gimple *g;
2027
2028 t = build_int_cst (uintptr_type, ASAN_SHADOW_SHIFT);
2029 g = gimple_build_assign (make_ssa_name (uintptr_type), RSHIFT_EXPR,
2030 base_addr, t);
2031 gimple_set_location (g, location);
2032 gsi_insert_after (gsi, g, GSI_NEW_STMT);
2033
2034 t = build_int_cst (uintptr_type, asan_shadow_offset ());
2035 g = gimple_build_assign (make_ssa_name (uintptr_type), PLUS_EXPR,
2036 gimple_assign_lhs (g), t);
2037 gimple_set_location (g, location);
2038 gsi_insert_after (gsi, g, GSI_NEW_STMT);
2039
2040 g = gimple_build_assign (make_ssa_name (shadow_ptr_type), NOP_EXPR,
2041 gimple_assign_lhs (g));
2042 gimple_set_location (g, location);
2043 gsi_insert_after (gsi, g, GSI_NEW_STMT);
2044
2045 if (!return_address)
2046 {
2047 t = build2 (MEM_REF, shadow_type, gimple_assign_lhs (g),
2048 build_int_cst (shadow_ptr_type, 0));
2049 g = gimple_build_assign (make_ssa_name (shadow_type), MEM_REF, t);
2050 gimple_set_location (g, location);
2051 gsi_insert_after (gsi, g, GSI_NEW_STMT);
2052 }
2053
2054 return gimple_assign_lhs (g);
2055 }
2056
2057 /* BASE can already be an SSA_NAME; in that case, do not create a
2058 new SSA_NAME for it. */
2059
2060 static tree
2061 maybe_create_ssa_name (location_t loc, tree base, gimple_stmt_iterator *iter,
2062 bool before_p)
2063 {
2064 if (TREE_CODE (base) == SSA_NAME)
2065 return base;
2066 gimple *g = gimple_build_assign (make_ssa_name (TREE_TYPE (base)),
2067 TREE_CODE (base), base);
2068 gimple_set_location (g, loc);
2069 if (before_p)
2070 gsi_insert_before (iter, g, GSI_SAME_STMT);
2071 else
2072 gsi_insert_after (iter, g, GSI_NEW_STMT);
2073 return gimple_assign_lhs (g);
2074 }
2075
2076 /* LEN can already have necessary size and precision;
2077 in that case, do not create a new variable. */
2078
2079 tree
2080 maybe_cast_to_ptrmode (location_t loc, tree len, gimple_stmt_iterator *iter,
2081 bool before_p)
2082 {
2083 if (ptrofftype_p (len))
2084 return len;
2085 gimple *g = gimple_build_assign (make_ssa_name (pointer_sized_int_node),
2086 NOP_EXPR, len);
2087 gimple_set_location (g, loc);
2088 if (before_p)
2089 gsi_insert_before (iter, g, GSI_SAME_STMT);
2090 else
2091 gsi_insert_after (iter, g, GSI_NEW_STMT);
2092 return gimple_assign_lhs (g);
2093 }
2094
2095 /* Instrument the memory access instruction BASE. Insert new
2096 statements before or after ITER.
2097
2098 Note that the memory access represented by BASE can be either an
2099 SSA_NAME, or a non-SSA expression. LOCATION is the source code
2100 location. IS_STORE is TRUE for a store, FALSE for a load.
2101 BEFORE_P is TRUE for inserting the instrumentation code before
2102 ITER, FALSE for inserting it after ITER. IS_SCALAR_ACCESS is TRUE
2103 for a scalar memory access and FALSE for memory region access.
2104 NON_ZERO_P is TRUE if memory region is guaranteed to have non-zero
2105 length. ALIGN tells alignment of accessed memory object.
2106
2107 START_INSTRUMENTED and END_INSTRUMENTED are TRUE if start/end of
2108 memory region have already been instrumented.
2109
2110 If BEFORE_P is TRUE, *ITER is arranged to still point to the
2111 statement it was pointing to prior to calling this function,
2112 otherwise, it points to the statement logically following it. */
2113
2114 static void
2115 build_check_stmt (location_t loc, tree base, tree len,
2116 HOST_WIDE_INT size_in_bytes, gimple_stmt_iterator *iter,
2117 bool is_non_zero_len, bool before_p, bool is_store,
2118 bool is_scalar_access, unsigned int align = 0)
2119 {
2120 gimple_stmt_iterator gsi = *iter;
2121 gimple *g;
2122
2123 gcc_assert (!(size_in_bytes > 0 && !is_non_zero_len));
2124
2125 gsi = *iter;
2126
2127 base = unshare_expr (base);
2128 base = maybe_create_ssa_name (loc, base, &gsi, before_p);
2129
2130 if (len)
2131 {
2132 len = unshare_expr (len);
2133 len = maybe_cast_to_ptrmode (loc, len, iter, before_p);
2134 }
2135 else
2136 {
2137 gcc_assert (size_in_bytes != -1);
2138 len = build_int_cst (pointer_sized_int_node, size_in_bytes);
2139 }
2140
2141 if (size_in_bytes > 1)
2142 {
2143 if ((size_in_bytes & (size_in_bytes - 1)) != 0
2144 || size_in_bytes > 16)
2145 is_scalar_access = false;
2146 else if (align && align < size_in_bytes * BITS_PER_UNIT)
2147 {
2148 /* On non-strict alignment targets, if
2149 16-byte access is just 8-byte aligned,
2150 this will result in misaligned shadow
2151 memory 2 byte load, but otherwise can
2152 be handled using one read. */
2153 if (size_in_bytes != 16
2154 || STRICT_ALIGNMENT
2155 || align < 8 * BITS_PER_UNIT)
2156 is_scalar_access = false;
2157 }
2158 }
2159
2160 HOST_WIDE_INT flags = 0;
2161 if (is_store)
2162 flags |= ASAN_CHECK_STORE;
2163 if (is_non_zero_len)
2164 flags |= ASAN_CHECK_NON_ZERO_LEN;
2165 if (is_scalar_access)
2166 flags |= ASAN_CHECK_SCALAR_ACCESS;
2167
2168 g = gimple_build_call_internal (IFN_ASAN_CHECK, 4,
2169 build_int_cst (integer_type_node, flags),
2170 base, len,
2171 build_int_cst (integer_type_node,
2172 align / BITS_PER_UNIT));
2173 gimple_set_location (g, loc);
2174 if (before_p)
2175 gsi_insert_before (&gsi, g, GSI_SAME_STMT);
2176 else
2177 {
2178 gsi_insert_after (&gsi, g, GSI_NEW_STMT);
2179 gsi_next (&gsi);
2180 *iter = gsi;
2181 }
2182 }
2183
2184 /* If T represents a memory access, add instrumentation code before ITER.
2185 LOCATION is source code location.
2186 IS_STORE is either TRUE (for a store) or FALSE (for a load). */
2187
2188 static void
2189 instrument_derefs (gimple_stmt_iterator *iter, tree t,
2190 location_t location, bool is_store)
2191 {
2192 if (is_store && !ASAN_INSTRUMENT_WRITES)
2193 return;
2194 if (!is_store && !ASAN_INSTRUMENT_READS)
2195 return;
2196
2197 tree type, base;
2198 HOST_WIDE_INT size_in_bytes;
2199 if (location == UNKNOWN_LOCATION)
2200 location = EXPR_LOCATION (t);
2201
2202 type = TREE_TYPE (t);
2203 switch (TREE_CODE (t))
2204 {
2205 case ARRAY_REF:
2206 case COMPONENT_REF:
2207 case INDIRECT_REF:
2208 case MEM_REF:
2209 case VAR_DECL:
2210 case BIT_FIELD_REF:
2211 break;
2212 /* FALLTHRU */
2213 default:
2214 return;
2215 }
2216
2217 size_in_bytes = int_size_in_bytes (type);
2218 if (size_in_bytes <= 0)
2219 return;
2220
2221 poly_int64 bitsize, bitpos;
2222 tree offset;
2223 machine_mode mode;
2224 int unsignedp, reversep, volatilep = 0;
2225 tree inner = get_inner_reference (t, &bitsize, &bitpos, &offset, &mode,
2226 &unsignedp, &reversep, &volatilep);
2227
2228 if (TREE_CODE (t) == COMPONENT_REF
2229 && DECL_BIT_FIELD_REPRESENTATIVE (TREE_OPERAND (t, 1)) != NULL_TREE)
2230 {
2231 tree repr = DECL_BIT_FIELD_REPRESENTATIVE (TREE_OPERAND (t, 1));
2232 instrument_derefs (iter, build3 (COMPONENT_REF, TREE_TYPE (repr),
2233 TREE_OPERAND (t, 0), repr,
2234 TREE_OPERAND (t, 2)),
2235 location, is_store);
2236 return;
2237 }
2238
2239 if (!multiple_p (bitpos, BITS_PER_UNIT)
2240 || maybe_ne (bitsize, size_in_bytes * BITS_PER_UNIT))
2241 return;
2242
2243 if (VAR_P (inner) && DECL_HARD_REGISTER (inner))
2244 return;
2245
2246 poly_int64 decl_size;
2247 if (VAR_P (inner)
2248 && offset == NULL_TREE
2249 && DECL_SIZE (inner)
2250 && poly_int_tree_p (DECL_SIZE (inner), &decl_size)
2251 && known_subrange_p (bitpos, bitsize, 0, decl_size))
2252 {
2253 if (DECL_THREAD_LOCAL_P (inner))
2254 return;
2255 if (!ASAN_GLOBALS && is_global_var (inner))
2256 return;
2257 if (!TREE_STATIC (inner))
2258 {
2259 /* Automatic vars in the current function will be always
2260 accessible. */
2261 if (decl_function_context (inner) == current_function_decl
2262 && (!asan_sanitize_use_after_scope ()
2263 || !TREE_ADDRESSABLE (inner)))
2264 return;
2265 }
2266 /* Always instrument external vars, they might be dynamically
2267 initialized. */
2268 else if (!DECL_EXTERNAL (inner))
2269 {
2270 /* For static vars if they are known not to be dynamically
2271 initialized, they will be always accessible. */
2272 varpool_node *vnode = varpool_node::get (inner);
2273 if (vnode && !vnode->dynamically_initialized)
2274 return;
2275 }
2276 }
2277
2278 base = build_fold_addr_expr (t);
2279 if (!has_mem_ref_been_instrumented (base, size_in_bytes))
2280 {
2281 unsigned int align = get_object_alignment (t);
2282 build_check_stmt (location, base, NULL_TREE, size_in_bytes, iter,
2283 /*is_non_zero_len*/size_in_bytes > 0, /*before_p=*/true,
2284 is_store, /*is_scalar_access*/true, align);
2285 update_mem_ref_hash_table (base, size_in_bytes);
2286 update_mem_ref_hash_table (t, size_in_bytes);
2287 }
2288
2289 }
2290
2291 /* Insert a memory reference into the hash table if access length
2292 can be determined in compile time. */
2293
2294 static void
2295 maybe_update_mem_ref_hash_table (tree base, tree len)
2296 {
2297 if (!POINTER_TYPE_P (TREE_TYPE (base))
2298 || !INTEGRAL_TYPE_P (TREE_TYPE (len)))
2299 return;
2300
2301 HOST_WIDE_INT size_in_bytes = tree_fits_shwi_p (len) ? tree_to_shwi (len) : -1;
2302
2303 if (size_in_bytes != -1)
2304 update_mem_ref_hash_table (base, size_in_bytes);
2305 }
2306
2307 /* Instrument an access to a contiguous memory region that starts at
2308 the address pointed to by BASE, over a length of LEN (expressed in
2309 the sizeof (*BASE) bytes). ITER points to the instruction before
2310 which the instrumentation instructions must be inserted. LOCATION
2311 is the source location that the instrumentation instructions must
2312 have. If IS_STORE is true, then the memory access is a store;
2313 otherwise, it's a load. */
2314
2315 static void
2316 instrument_mem_region_access (tree base, tree len,
2317 gimple_stmt_iterator *iter,
2318 location_t location, bool is_store)
2319 {
2320 if (!POINTER_TYPE_P (TREE_TYPE (base))
2321 || !INTEGRAL_TYPE_P (TREE_TYPE (len))
2322 || integer_zerop (len))
2323 return;
2324
2325 HOST_WIDE_INT size_in_bytes = tree_fits_shwi_p (len) ? tree_to_shwi (len) : -1;
2326
2327 if ((size_in_bytes == -1)
2328 || !has_mem_ref_been_instrumented (base, size_in_bytes))
2329 {
2330 build_check_stmt (location, base, len, size_in_bytes, iter,
2331 /*is_non_zero_len*/size_in_bytes > 0, /*before_p*/true,
2332 is_store, /*is_scalar_access*/false, /*align*/0);
2333 }
2334
2335 maybe_update_mem_ref_hash_table (base, len);
2336 *iter = gsi_for_stmt (gsi_stmt (*iter));
2337 }
2338
2339 /* Instrument the call to a built-in memory access function that is
2340 pointed to by the iterator ITER.
2341
2342 Upon completion, return TRUE iff *ITER has been advanced to the
2343 statement following the one it was originally pointing to. */
2344
2345 static bool
2346 instrument_builtin_call (gimple_stmt_iterator *iter)
2347 {
2348 if (!ASAN_MEMINTRIN)
2349 return false;
2350
2351 bool iter_advanced_p = false;
2352 gcall *call = as_a <gcall *> (gsi_stmt (*iter));
2353
2354 gcc_checking_assert (gimple_call_builtin_p (call, BUILT_IN_NORMAL));
2355
2356 location_t loc = gimple_location (call);
2357
2358 asan_mem_ref src0, src1, dest;
2359 asan_mem_ref_init (&src0, NULL, 1);
2360 asan_mem_ref_init (&src1, NULL, 1);
2361 asan_mem_ref_init (&dest, NULL, 1);
2362
2363 tree src0_len = NULL_TREE, src1_len = NULL_TREE, dest_len = NULL_TREE;
2364 bool src0_is_store = false, src1_is_store = false, dest_is_store = false,
2365 dest_is_deref = false, intercepted_p = true;
2366
2367 if (get_mem_refs_of_builtin_call (call,
2368 &src0, &src0_len, &src0_is_store,
2369 &src1, &src1_len, &src1_is_store,
2370 &dest, &dest_len, &dest_is_store,
2371 &dest_is_deref, &intercepted_p, iter))
2372 {
2373 if (dest_is_deref)
2374 {
2375 instrument_derefs (iter, dest.start, loc, dest_is_store);
2376 gsi_next (iter);
2377 iter_advanced_p = true;
2378 }
2379 else if (!intercepted_p
2380 && (src0_len || src1_len || dest_len))
2381 {
2382 if (src0.start != NULL_TREE)
2383 instrument_mem_region_access (src0.start, src0_len,
2384 iter, loc, /*is_store=*/false);
2385 if (src1.start != NULL_TREE)
2386 instrument_mem_region_access (src1.start, src1_len,
2387 iter, loc, /*is_store=*/false);
2388 if (dest.start != NULL_TREE)
2389 instrument_mem_region_access (dest.start, dest_len,
2390 iter, loc, /*is_store=*/true);
2391
2392 *iter = gsi_for_stmt (call);
2393 gsi_next (iter);
2394 iter_advanced_p = true;
2395 }
2396 else
2397 {
2398 if (src0.start != NULL_TREE)
2399 maybe_update_mem_ref_hash_table (src0.start, src0_len);
2400 if (src1.start != NULL_TREE)
2401 maybe_update_mem_ref_hash_table (src1.start, src1_len);
2402 if (dest.start != NULL_TREE)
2403 maybe_update_mem_ref_hash_table (dest.start, dest_len);
2404 }
2405 }
2406 return iter_advanced_p;
2407 }
2408
2409 /* Instrument the assignment statement ITER if it is subject to
2410 instrumentation. Return TRUE iff instrumentation actually
2411 happened. In that case, the iterator ITER is advanced to the next
2412 logical expression following the one initially pointed to by ITER,
2413 and the relevant memory reference that which access has been
2414 instrumented is added to the memory references hash table. */
2415
2416 static bool
2417 maybe_instrument_assignment (gimple_stmt_iterator *iter)
2418 {
2419 gimple *s = gsi_stmt (*iter);
2420
2421 gcc_assert (gimple_assign_single_p (s));
2422
2423 tree ref_expr = NULL_TREE;
2424 bool is_store, is_instrumented = false;
2425
2426 if (gimple_store_p (s))
2427 {
2428 ref_expr = gimple_assign_lhs (s);
2429 is_store = true;
2430 instrument_derefs (iter, ref_expr,
2431 gimple_location (s),
2432 is_store);
2433 is_instrumented = true;
2434 }
2435
2436 if (gimple_assign_load_p (s))
2437 {
2438 ref_expr = gimple_assign_rhs1 (s);
2439 is_store = false;
2440 instrument_derefs (iter, ref_expr,
2441 gimple_location (s),
2442 is_store);
2443 is_instrumented = true;
2444 }
2445
2446 if (is_instrumented)
2447 gsi_next (iter);
2448
2449 return is_instrumented;
2450 }
2451
2452 /* Instrument the function call pointed to by the iterator ITER, if it
2453 is subject to instrumentation. At the moment, the only function
2454 calls that are instrumented are some built-in functions that access
2455 memory. Look at instrument_builtin_call to learn more.
2456
2457 Upon completion return TRUE iff *ITER was advanced to the statement
2458 following the one it was originally pointing to. */
2459
2460 static bool
2461 maybe_instrument_call (gimple_stmt_iterator *iter)
2462 {
2463 gimple *stmt = gsi_stmt (*iter);
2464 bool is_builtin = gimple_call_builtin_p (stmt, BUILT_IN_NORMAL);
2465
2466 if (is_builtin && instrument_builtin_call (iter))
2467 return true;
2468
2469 if (gimple_call_noreturn_p (stmt))
2470 {
2471 if (is_builtin)
2472 {
2473 tree callee = gimple_call_fndecl (stmt);
2474 switch (DECL_FUNCTION_CODE (callee))
2475 {
2476 case BUILT_IN_UNREACHABLE:
2477 case BUILT_IN_TRAP:
2478 /* Don't instrument these. */
2479 return false;
2480 default:
2481 break;
2482 }
2483 }
2484 tree decl = builtin_decl_implicit (BUILT_IN_ASAN_HANDLE_NO_RETURN);
2485 gimple *g = gimple_build_call (decl, 0);
2486 gimple_set_location (g, gimple_location (stmt));
2487 gsi_insert_before (iter, g, GSI_SAME_STMT);
2488 }
2489
2490 bool instrumented = false;
2491 if (gimple_store_p (stmt))
2492 {
2493 tree ref_expr = gimple_call_lhs (stmt);
2494 instrument_derefs (iter, ref_expr,
2495 gimple_location (stmt),
2496 /*is_store=*/true);
2497
2498 instrumented = true;
2499 }
2500
2501 /* Walk through gimple_call arguments and check them id needed. */
2502 unsigned args_num = gimple_call_num_args (stmt);
2503 for (unsigned i = 0; i < args_num; ++i)
2504 {
2505 tree arg = gimple_call_arg (stmt, i);
2506 /* If ARG is not a non-aggregate register variable, compiler in general
2507 creates temporary for it and pass it as argument to gimple call.
2508 But in some cases, e.g. when we pass by value a small structure that
2509 fits to register, compiler can avoid extra overhead by pulling out
2510 these temporaries. In this case, we should check the argument. */
2511 if (!is_gimple_reg (arg) && !is_gimple_min_invariant (arg))
2512 {
2513 instrument_derefs (iter, arg,
2514 gimple_location (stmt),
2515 /*is_store=*/false);
2516 instrumented = true;
2517 }
2518 }
2519 if (instrumented)
2520 gsi_next (iter);
2521 return instrumented;
2522 }
2523
2524 /* Walk each instruction of all basic block and instrument those that
2525 represent memory references: loads, stores, or function calls.
2526 In a given basic block, this function avoids instrumenting memory
2527 references that have already been instrumented. */
2528
2529 static void
2530 transform_statements (void)
2531 {
2532 basic_block bb, last_bb = NULL;
2533 gimple_stmt_iterator i;
2534 int saved_last_basic_block = last_basic_block_for_fn (cfun);
2535
2536 FOR_EACH_BB_FN (bb, cfun)
2537 {
2538 basic_block prev_bb = bb;
2539
2540 if (bb->index >= saved_last_basic_block) continue;
2541
2542 /* Flush the mem ref hash table, if current bb doesn't have
2543 exactly one predecessor, or if that predecessor (skipping
2544 over asan created basic blocks) isn't the last processed
2545 basic block. Thus we effectively flush on extended basic
2546 block boundaries. */
2547 while (single_pred_p (prev_bb))
2548 {
2549 prev_bb = single_pred (prev_bb);
2550 if (prev_bb->index < saved_last_basic_block)
2551 break;
2552 }
2553 if (prev_bb != last_bb)
2554 empty_mem_ref_hash_table ();
2555 last_bb = bb;
2556
2557 for (i = gsi_start_bb (bb); !gsi_end_p (i);)
2558 {
2559 gimple *s = gsi_stmt (i);
2560
2561 if (has_stmt_been_instrumented_p (s))
2562 gsi_next (&i);
2563 else if (gimple_assign_single_p (s)
2564 && !gimple_clobber_p (s)
2565 && maybe_instrument_assignment (&i))
2566 /* Nothing to do as maybe_instrument_assignment advanced
2567 the iterator I. */;
2568 else if (is_gimple_call (s) && maybe_instrument_call (&i))
2569 /* Nothing to do as maybe_instrument_call
2570 advanced the iterator I. */;
2571 else
2572 {
2573 /* No instrumentation happened.
2574
2575 If the current instruction is a function call that
2576 might free something, let's forget about the memory
2577 references that got instrumented. Otherwise we might
2578 miss some instrumentation opportunities. Do the same
2579 for a ASAN_MARK poisoning internal function. */
2580 if (is_gimple_call (s)
2581 && (!nonfreeing_call_p (s)
2582 || asan_mark_p (s, ASAN_MARK_POISON)))
2583 empty_mem_ref_hash_table ();
2584
2585 gsi_next (&i);
2586 }
2587 }
2588 }
2589 free_mem_ref_resources ();
2590 }
2591
2592 /* Build
2593 __asan_before_dynamic_init (module_name)
2594 or
2595 __asan_after_dynamic_init ()
2596 call. */
2597
2598 tree
2599 asan_dynamic_init_call (bool after_p)
2600 {
2601 if (shadow_ptr_types[0] == NULL_TREE)
2602 asan_init_shadow_ptr_types ();
2603
2604 tree fn = builtin_decl_implicit (after_p
2605 ? BUILT_IN_ASAN_AFTER_DYNAMIC_INIT
2606 : BUILT_IN_ASAN_BEFORE_DYNAMIC_INIT);
2607 tree module_name_cst = NULL_TREE;
2608 if (!after_p)
2609 {
2610 pretty_printer module_name_pp;
2611 pp_string (&module_name_pp, main_input_filename);
2612
2613 module_name_cst = asan_pp_string (&module_name_pp);
2614 module_name_cst = fold_convert (const_ptr_type_node,
2615 module_name_cst);
2616 }
2617
2618 return build_call_expr (fn, after_p ? 0 : 1, module_name_cst);
2619 }
2620
2621 /* Build
2622 struct __asan_global
2623 {
2624 const void *__beg;
2625 uptr __size;
2626 uptr __size_with_redzone;
2627 const void *__name;
2628 const void *__module_name;
2629 uptr __has_dynamic_init;
2630 __asan_global_source_location *__location;
2631 char *__odr_indicator;
2632 } type. */
2633
2634 static tree
2635 asan_global_struct (void)
2636 {
2637 static const char *field_names[]
2638 = { "__beg", "__size", "__size_with_redzone",
2639 "__name", "__module_name", "__has_dynamic_init", "__location",
2640 "__odr_indicator" };
2641 tree fields[ARRAY_SIZE (field_names)], ret;
2642 unsigned i;
2643
2644 ret = make_node (RECORD_TYPE);
2645 for (i = 0; i < ARRAY_SIZE (field_names); i++)
2646 {
2647 fields[i]
2648 = build_decl (UNKNOWN_LOCATION, FIELD_DECL,
2649 get_identifier (field_names[i]),
2650 (i == 0 || i == 3) ? const_ptr_type_node
2651 : pointer_sized_int_node);
2652 DECL_CONTEXT (fields[i]) = ret;
2653 if (i)
2654 DECL_CHAIN (fields[i - 1]) = fields[i];
2655 }
2656 tree type_decl = build_decl (input_location, TYPE_DECL,
2657 get_identifier ("__asan_global"), ret);
2658 DECL_IGNORED_P (type_decl) = 1;
2659 DECL_ARTIFICIAL (type_decl) = 1;
2660 TYPE_FIELDS (ret) = fields[0];
2661 TYPE_NAME (ret) = type_decl;
2662 TYPE_STUB_DECL (ret) = type_decl;
2663 layout_type (ret);
2664 return ret;
2665 }
2666
2667 /* Create and return odr indicator symbol for DECL.
2668 TYPE is __asan_global struct type as returned by asan_global_struct. */
2669
2670 static tree
2671 create_odr_indicator (tree decl, tree type)
2672 {
2673 char *name;
2674 tree uptr = TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (type)));
2675 tree decl_name
2676 = (HAS_DECL_ASSEMBLER_NAME_P (decl) ? DECL_ASSEMBLER_NAME (decl)
2677 : DECL_NAME (decl));
2678 /* DECL_NAME theoretically might be NULL. Bail out with 0 in this case. */
2679 if (decl_name == NULL_TREE)
2680 return build_int_cst (uptr, 0);
2681 const char *dname = IDENTIFIER_POINTER (decl_name);
2682 if (HAS_DECL_ASSEMBLER_NAME_P (decl))
2683 dname = targetm.strip_name_encoding (dname);
2684 size_t len = strlen (dname) + sizeof ("__odr_asan_");
2685 name = XALLOCAVEC (char, len);
2686 snprintf (name, len, "__odr_asan_%s", dname);
2687 #ifndef NO_DOT_IN_LABEL
2688 name[sizeof ("__odr_asan") - 1] = '.';
2689 #elif !defined(NO_DOLLAR_IN_LABEL)
2690 name[sizeof ("__odr_asan") - 1] = '$';
2691 #endif
2692 tree var = build_decl (UNKNOWN_LOCATION, VAR_DECL, get_identifier (name),
2693 char_type_node);
2694 TREE_ADDRESSABLE (var) = 1;
2695 TREE_READONLY (var) = 0;
2696 TREE_THIS_VOLATILE (var) = 1;
2697 DECL_GIMPLE_REG_P (var) = 0;
2698 DECL_ARTIFICIAL (var) = 1;
2699 DECL_IGNORED_P (var) = 1;
2700 TREE_STATIC (var) = 1;
2701 TREE_PUBLIC (var) = 1;
2702 DECL_VISIBILITY (var) = DECL_VISIBILITY (decl);
2703 DECL_VISIBILITY_SPECIFIED (var) = DECL_VISIBILITY_SPECIFIED (decl);
2704
2705 TREE_USED (var) = 1;
2706 tree ctor = build_constructor_va (TREE_TYPE (var), 1, NULL_TREE,
2707 build_int_cst (unsigned_type_node, 0));
2708 TREE_CONSTANT (ctor) = 1;
2709 TREE_STATIC (ctor) = 1;
2710 DECL_INITIAL (var) = ctor;
2711 DECL_ATTRIBUTES (var) = tree_cons (get_identifier ("asan odr indicator"),
2712 NULL, DECL_ATTRIBUTES (var));
2713 make_decl_rtl (var);
2714 varpool_node::finalize_decl (var);
2715 return fold_convert (uptr, build_fold_addr_expr (var));
2716 }
2717
2718 /* Return true if DECL, a global var, might be overridden and needs
2719 an additional odr indicator symbol. */
2720
2721 static bool
2722 asan_needs_odr_indicator_p (tree decl)
2723 {
2724 /* Don't emit ODR indicators for kernel because:
2725 a) Kernel is written in C thus doesn't need ODR indicators.
2726 b) Some kernel code may have assumptions about symbols containing specific
2727 patterns in their names. Since ODR indicators contain original names
2728 of symbols they are emitted for, these assumptions would be broken for
2729 ODR indicator symbols. */
2730 return (!(flag_sanitize & SANITIZE_KERNEL_ADDRESS)
2731 && !DECL_ARTIFICIAL (decl)
2732 && !DECL_WEAK (decl)
2733 && TREE_PUBLIC (decl));
2734 }
2735
2736 /* Append description of a single global DECL into vector V.
2737 TYPE is __asan_global struct type as returned by asan_global_struct. */
2738
2739 static void
2740 asan_add_global (tree decl, tree type, vec<constructor_elt, va_gc> *v)
2741 {
2742 tree init, uptr = TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (type)));
2743 unsigned HOST_WIDE_INT size;
2744 tree str_cst, module_name_cst, refdecl = decl;
2745 vec<constructor_elt, va_gc> *vinner = NULL;
2746
2747 pretty_printer asan_pp, module_name_pp;
2748
2749 if (DECL_NAME (decl))
2750 pp_tree_identifier (&asan_pp, DECL_NAME (decl));
2751 else
2752 pp_string (&asan_pp, "<unknown>");
2753 str_cst = asan_pp_string (&asan_pp);
2754
2755 pp_string (&module_name_pp, main_input_filename);
2756 module_name_cst = asan_pp_string (&module_name_pp);
2757
2758 if (asan_needs_local_alias (decl))
2759 {
2760 char buf[20];
2761 ASM_GENERATE_INTERNAL_LABEL (buf, "LASAN", vec_safe_length (v) + 1);
2762 refdecl = build_decl (DECL_SOURCE_LOCATION (decl),
2763 VAR_DECL, get_identifier (buf), TREE_TYPE (decl));
2764 TREE_ADDRESSABLE (refdecl) = TREE_ADDRESSABLE (decl);
2765 TREE_READONLY (refdecl) = TREE_READONLY (decl);
2766 TREE_THIS_VOLATILE (refdecl) = TREE_THIS_VOLATILE (decl);
2767 DECL_GIMPLE_REG_P (refdecl) = DECL_GIMPLE_REG_P (decl);
2768 DECL_ARTIFICIAL (refdecl) = DECL_ARTIFICIAL (decl);
2769 DECL_IGNORED_P (refdecl) = DECL_IGNORED_P (decl);
2770 TREE_STATIC (refdecl) = 1;
2771 TREE_PUBLIC (refdecl) = 0;
2772 TREE_USED (refdecl) = 1;
2773 assemble_alias (refdecl, DECL_ASSEMBLER_NAME (decl));
2774 }
2775
2776 tree odr_indicator_ptr
2777 = (asan_needs_odr_indicator_p (decl) ? create_odr_indicator (decl, type)
2778 : build_int_cst (uptr, 0));
2779 CONSTRUCTOR_APPEND_ELT (vinner, NULL_TREE,
2780 fold_convert (const_ptr_type_node,
2781 build_fold_addr_expr (refdecl)));
2782 size = tree_to_uhwi (DECL_SIZE_UNIT (decl));
2783 CONSTRUCTOR_APPEND_ELT (vinner, NULL_TREE, build_int_cst (uptr, size));
2784 size += asan_red_zone_size (size);
2785 CONSTRUCTOR_APPEND_ELT (vinner, NULL_TREE, build_int_cst (uptr, size));
2786 CONSTRUCTOR_APPEND_ELT (vinner, NULL_TREE,
2787 fold_convert (const_ptr_type_node, str_cst));
2788 CONSTRUCTOR_APPEND_ELT (vinner, NULL_TREE,
2789 fold_convert (const_ptr_type_node, module_name_cst));
2790 varpool_node *vnode = varpool_node::get (decl);
2791 int has_dynamic_init = 0;
2792 /* FIXME: Enable initialization order fiasco detection in LTO mode once
2793 proper fix for PR 79061 will be applied. */
2794 if (!in_lto_p)
2795 has_dynamic_init = vnode ? vnode->dynamically_initialized : 0;
2796 CONSTRUCTOR_APPEND_ELT (vinner, NULL_TREE,
2797 build_int_cst (uptr, has_dynamic_init));
2798 tree locptr = NULL_TREE;
2799 location_t loc = DECL_SOURCE_LOCATION (decl);
2800 expanded_location xloc = expand_location (loc);
2801 if (xloc.file != NULL)
2802 {
2803 static int lasanloccnt = 0;
2804 char buf[25];
2805 ASM_GENERATE_INTERNAL_LABEL (buf, "LASANLOC", ++lasanloccnt);
2806 tree var = build_decl (UNKNOWN_LOCATION, VAR_DECL, get_identifier (buf),
2807 ubsan_get_source_location_type ());
2808 TREE_STATIC (var) = 1;
2809 TREE_PUBLIC (var) = 0;
2810 DECL_ARTIFICIAL (var) = 1;
2811 DECL_IGNORED_P (var) = 1;
2812 pretty_printer filename_pp;
2813 pp_string (&filename_pp, xloc.file);
2814 tree str = asan_pp_string (&filename_pp);
2815 tree ctor = build_constructor_va (TREE_TYPE (var), 3,
2816 NULL_TREE, str, NULL_TREE,
2817 build_int_cst (unsigned_type_node,
2818 xloc.line), NULL_TREE,
2819 build_int_cst (unsigned_type_node,
2820 xloc.column));
2821 TREE_CONSTANT (ctor) = 1;
2822 TREE_STATIC (ctor) = 1;
2823 DECL_INITIAL (var) = ctor;
2824 varpool_node::finalize_decl (var);
2825 locptr = fold_convert (uptr, build_fold_addr_expr (var));
2826 }
2827 else
2828 locptr = build_int_cst (uptr, 0);
2829 CONSTRUCTOR_APPEND_ELT (vinner, NULL_TREE, locptr);
2830 CONSTRUCTOR_APPEND_ELT (vinner, NULL_TREE, odr_indicator_ptr);
2831 init = build_constructor (type, vinner);
2832 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, init);
2833 }
2834
2835 /* Initialize sanitizer.def builtins if the FE hasn't initialized them. */
2836 void
2837 initialize_sanitizer_builtins (void)
2838 {
2839 tree decl;
2840
2841 if (builtin_decl_implicit_p (BUILT_IN_ASAN_INIT))
2842 return;
2843
2844 tree BT_FN_VOID = build_function_type_list (void_type_node, NULL_TREE);
2845 tree BT_FN_VOID_PTR
2846 = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
2847 tree BT_FN_VOID_CONST_PTR
2848 = build_function_type_list (void_type_node, const_ptr_type_node, NULL_TREE);
2849 tree BT_FN_VOID_PTR_PTR
2850 = build_function_type_list (void_type_node, ptr_type_node,
2851 ptr_type_node, NULL_TREE);
2852 tree BT_FN_VOID_PTR_PTR_PTR
2853 = build_function_type_list (void_type_node, ptr_type_node,
2854 ptr_type_node, ptr_type_node, NULL_TREE);
2855 tree BT_FN_VOID_PTR_PTRMODE
2856 = build_function_type_list (void_type_node, ptr_type_node,
2857 pointer_sized_int_node, NULL_TREE);
2858 tree BT_FN_VOID_INT
2859 = build_function_type_list (void_type_node, integer_type_node, NULL_TREE);
2860 tree BT_FN_SIZE_CONST_PTR_INT
2861 = build_function_type_list (size_type_node, const_ptr_type_node,
2862 integer_type_node, NULL_TREE);
2863
2864 tree BT_FN_VOID_UINT8_UINT8
2865 = build_function_type_list (void_type_node, unsigned_char_type_node,
2866 unsigned_char_type_node, NULL_TREE);
2867 tree BT_FN_VOID_UINT16_UINT16
2868 = build_function_type_list (void_type_node, uint16_type_node,
2869 uint16_type_node, NULL_TREE);
2870 tree BT_FN_VOID_UINT32_UINT32
2871 = build_function_type_list (void_type_node, uint32_type_node,
2872 uint32_type_node, NULL_TREE);
2873 tree BT_FN_VOID_UINT64_UINT64
2874 = build_function_type_list (void_type_node, uint64_type_node,
2875 uint64_type_node, NULL_TREE);
2876 tree BT_FN_VOID_FLOAT_FLOAT
2877 = build_function_type_list (void_type_node, float_type_node,
2878 float_type_node, NULL_TREE);
2879 tree BT_FN_VOID_DOUBLE_DOUBLE
2880 = build_function_type_list (void_type_node, double_type_node,
2881 double_type_node, NULL_TREE);
2882 tree BT_FN_VOID_UINT64_PTR
2883 = build_function_type_list (void_type_node, uint64_type_node,
2884 ptr_type_node, NULL_TREE);
2885
2886 tree BT_FN_BOOL_VPTR_PTR_IX_INT_INT[5];
2887 tree BT_FN_IX_CONST_VPTR_INT[5];
2888 tree BT_FN_IX_VPTR_IX_INT[5];
2889 tree BT_FN_VOID_VPTR_IX_INT[5];
2890 tree vptr
2891 = build_pointer_type (build_qualified_type (void_type_node,
2892 TYPE_QUAL_VOLATILE));
2893 tree cvptr
2894 = build_pointer_type (build_qualified_type (void_type_node,
2895 TYPE_QUAL_VOLATILE
2896 |TYPE_QUAL_CONST));
2897 tree boolt
2898 = lang_hooks.types.type_for_size (BOOL_TYPE_SIZE, 1);
2899 int i;
2900 for (i = 0; i < 5; i++)
2901 {
2902 tree ix = build_nonstandard_integer_type (BITS_PER_UNIT * (1 << i), 1);
2903 BT_FN_BOOL_VPTR_PTR_IX_INT_INT[i]
2904 = build_function_type_list (boolt, vptr, ptr_type_node, ix,
2905 integer_type_node, integer_type_node,
2906 NULL_TREE);
2907 BT_FN_IX_CONST_VPTR_INT[i]
2908 = build_function_type_list (ix, cvptr, integer_type_node, NULL_TREE);
2909 BT_FN_IX_VPTR_IX_INT[i]
2910 = build_function_type_list (ix, vptr, ix, integer_type_node,
2911 NULL_TREE);
2912 BT_FN_VOID_VPTR_IX_INT[i]
2913 = build_function_type_list (void_type_node, vptr, ix,
2914 integer_type_node, NULL_TREE);
2915 }
2916 #define BT_FN_BOOL_VPTR_PTR_I1_INT_INT BT_FN_BOOL_VPTR_PTR_IX_INT_INT[0]
2917 #define BT_FN_I1_CONST_VPTR_INT BT_FN_IX_CONST_VPTR_INT[0]
2918 #define BT_FN_I1_VPTR_I1_INT BT_FN_IX_VPTR_IX_INT[0]
2919 #define BT_FN_VOID_VPTR_I1_INT BT_FN_VOID_VPTR_IX_INT[0]
2920 #define BT_FN_BOOL_VPTR_PTR_I2_INT_INT BT_FN_BOOL_VPTR_PTR_IX_INT_INT[1]
2921 #define BT_FN_I2_CONST_VPTR_INT BT_FN_IX_CONST_VPTR_INT[1]
2922 #define BT_FN_I2_VPTR_I2_INT BT_FN_IX_VPTR_IX_INT[1]
2923 #define BT_FN_VOID_VPTR_I2_INT BT_FN_VOID_VPTR_IX_INT[1]
2924 #define BT_FN_BOOL_VPTR_PTR_I4_INT_INT BT_FN_BOOL_VPTR_PTR_IX_INT_INT[2]
2925 #define BT_FN_I4_CONST_VPTR_INT BT_FN_IX_CONST_VPTR_INT[2]
2926 #define BT_FN_I4_VPTR_I4_INT BT_FN_IX_VPTR_IX_INT[2]
2927 #define BT_FN_VOID_VPTR_I4_INT BT_FN_VOID_VPTR_IX_INT[2]
2928 #define BT_FN_BOOL_VPTR_PTR_I8_INT_INT BT_FN_BOOL_VPTR_PTR_IX_INT_INT[3]
2929 #define BT_FN_I8_CONST_VPTR_INT BT_FN_IX_CONST_VPTR_INT[3]
2930 #define BT_FN_I8_VPTR_I8_INT BT_FN_IX_VPTR_IX_INT[3]
2931 #define BT_FN_VOID_VPTR_I8_INT BT_FN_VOID_VPTR_IX_INT[3]
2932 #define BT_FN_BOOL_VPTR_PTR_I16_INT_INT BT_FN_BOOL_VPTR_PTR_IX_INT_INT[4]
2933 #define BT_FN_I16_CONST_VPTR_INT BT_FN_IX_CONST_VPTR_INT[4]
2934 #define BT_FN_I16_VPTR_I16_INT BT_FN_IX_VPTR_IX_INT[4]
2935 #define BT_FN_VOID_VPTR_I16_INT BT_FN_VOID_VPTR_IX_INT[4]
2936 #undef ATTR_NOTHROW_LEAF_LIST
2937 #define ATTR_NOTHROW_LEAF_LIST ECF_NOTHROW | ECF_LEAF
2938 #undef ATTR_TMPURE_NOTHROW_LEAF_LIST
2939 #define ATTR_TMPURE_NOTHROW_LEAF_LIST ECF_TM_PURE | ATTR_NOTHROW_LEAF_LIST
2940 #undef ATTR_NORETURN_NOTHROW_LEAF_LIST
2941 #define ATTR_NORETURN_NOTHROW_LEAF_LIST ECF_NORETURN | ATTR_NOTHROW_LEAF_LIST
2942 #undef ATTR_CONST_NORETURN_NOTHROW_LEAF_LIST
2943 #define ATTR_CONST_NORETURN_NOTHROW_LEAF_LIST \
2944 ECF_CONST | ATTR_NORETURN_NOTHROW_LEAF_LIST
2945 #undef ATTR_TMPURE_NORETURN_NOTHROW_LEAF_LIST
2946 #define ATTR_TMPURE_NORETURN_NOTHROW_LEAF_LIST \
2947 ECF_TM_PURE | ATTR_NORETURN_NOTHROW_LEAF_LIST
2948 #undef ATTR_COLD_NOTHROW_LEAF_LIST
2949 #define ATTR_COLD_NOTHROW_LEAF_LIST \
2950 /* ECF_COLD missing */ ATTR_NOTHROW_LEAF_LIST
2951 #undef ATTR_COLD_NORETURN_NOTHROW_LEAF_LIST
2952 #define ATTR_COLD_NORETURN_NOTHROW_LEAF_LIST \
2953 /* ECF_COLD missing */ ATTR_NORETURN_NOTHROW_LEAF_LIST
2954 #undef ATTR_COLD_CONST_NORETURN_NOTHROW_LEAF_LIST
2955 #define ATTR_COLD_CONST_NORETURN_NOTHROW_LEAF_LIST \
2956 /* ECF_COLD missing */ ATTR_CONST_NORETURN_NOTHROW_LEAF_LIST
2957 #undef ATTR_PURE_NOTHROW_LEAF_LIST
2958 #define ATTR_PURE_NOTHROW_LEAF_LIST ECF_PURE | ATTR_NOTHROW_LEAF_LIST
2959 #undef DEF_BUILTIN_STUB
2960 #define DEF_BUILTIN_STUB(ENUM, NAME)
2961 #undef DEF_SANITIZER_BUILTIN_1
2962 #define DEF_SANITIZER_BUILTIN_1(ENUM, NAME, TYPE, ATTRS) \
2963 do { \
2964 decl = add_builtin_function ("__builtin_" NAME, TYPE, ENUM, \
2965 BUILT_IN_NORMAL, NAME, NULL_TREE); \
2966 set_call_expr_flags (decl, ATTRS); \
2967 set_builtin_decl (ENUM, decl, true); \
2968 } while (0)
2969 #undef DEF_SANITIZER_BUILTIN
2970 #define DEF_SANITIZER_BUILTIN(ENUM, NAME, TYPE, ATTRS) \
2971 DEF_SANITIZER_BUILTIN_1 (ENUM, NAME, TYPE, ATTRS);
2972
2973 #include "sanitizer.def"
2974
2975 /* -fsanitize=object-size uses __builtin_object_size, but that might
2976 not be available for e.g. Fortran at this point. We use
2977 DEF_SANITIZER_BUILTIN here only as a convenience macro. */
2978 if ((flag_sanitize & SANITIZE_OBJECT_SIZE)
2979 && !builtin_decl_implicit_p (BUILT_IN_OBJECT_SIZE))
2980 DEF_SANITIZER_BUILTIN_1 (BUILT_IN_OBJECT_SIZE, "object_size",
2981 BT_FN_SIZE_CONST_PTR_INT,
2982 ATTR_PURE_NOTHROW_LEAF_LIST);
2983
2984 #undef DEF_SANITIZER_BUILTIN_1
2985 #undef DEF_SANITIZER_BUILTIN
2986 #undef DEF_BUILTIN_STUB
2987 }
2988
2989 /* Called via htab_traverse. Count number of emitted
2990 STRING_CSTs in the constant hash table. */
2991
2992 int
2993 count_string_csts (constant_descriptor_tree **slot,
2994 unsigned HOST_WIDE_INT *data)
2995 {
2996 struct constant_descriptor_tree *desc = *slot;
2997 if (TREE_CODE (desc->value) == STRING_CST
2998 && TREE_ASM_WRITTEN (desc->value)
2999 && asan_protect_global (desc->value))
3000 ++*data;
3001 return 1;
3002 }
3003
3004 /* Helper structure to pass two parameters to
3005 add_string_csts. */
3006
3007 struct asan_add_string_csts_data
3008 {
3009 tree type;
3010 vec<constructor_elt, va_gc> *v;
3011 };
3012
3013 /* Called via hash_table::traverse. Call asan_add_global
3014 on emitted STRING_CSTs from the constant hash table. */
3015
3016 int
3017 add_string_csts (constant_descriptor_tree **slot,
3018 asan_add_string_csts_data *aascd)
3019 {
3020 struct constant_descriptor_tree *desc = *slot;
3021 if (TREE_CODE (desc->value) == STRING_CST
3022 && TREE_ASM_WRITTEN (desc->value)
3023 && asan_protect_global (desc->value))
3024 {
3025 asan_add_global (SYMBOL_REF_DECL (XEXP (desc->rtl, 0)),
3026 aascd->type, aascd->v);
3027 }
3028 return 1;
3029 }
3030
3031 /* Needs to be GTY(()), because cgraph_build_static_cdtor may
3032 invoke ggc_collect. */
3033 static GTY(()) tree asan_ctor_statements;
3034
3035 /* Module-level instrumentation.
3036 - Insert __asan_init_vN() into the list of CTORs.
3037 - TODO: insert redzones around globals.
3038 */
3039
3040 void
3041 asan_finish_file (void)
3042 {
3043 varpool_node *vnode;
3044 unsigned HOST_WIDE_INT gcount = 0;
3045
3046 if (shadow_ptr_types[0] == NULL_TREE)
3047 asan_init_shadow_ptr_types ();
3048 /* Avoid instrumenting code in the asan ctors/dtors.
3049 We don't need to insert padding after the description strings,
3050 nor after .LASAN* array. */
3051 flag_sanitize &= ~SANITIZE_ADDRESS;
3052
3053 /* For user-space we want asan constructors to run first.
3054 Linux kernel does not support priorities other than default, and the only
3055 other user of constructors is coverage. So we run with the default
3056 priority. */
3057 int priority = flag_sanitize & SANITIZE_USER_ADDRESS
3058 ? MAX_RESERVED_INIT_PRIORITY - 1 : DEFAULT_INIT_PRIORITY;
3059
3060 if (flag_sanitize & SANITIZE_USER_ADDRESS)
3061 {
3062 tree fn = builtin_decl_implicit (BUILT_IN_ASAN_INIT);
3063 append_to_statement_list (build_call_expr (fn, 0), &asan_ctor_statements);
3064 fn = builtin_decl_implicit (BUILT_IN_ASAN_VERSION_MISMATCH_CHECK);
3065 append_to_statement_list (build_call_expr (fn, 0), &asan_ctor_statements);
3066 }
3067 FOR_EACH_DEFINED_VARIABLE (vnode)
3068 if (TREE_ASM_WRITTEN (vnode->decl)
3069 && asan_protect_global (vnode->decl))
3070 ++gcount;
3071 hash_table<tree_descriptor_hasher> *const_desc_htab = constant_pool_htab ();
3072 const_desc_htab->traverse<unsigned HOST_WIDE_INT *, count_string_csts>
3073 (&gcount);
3074 if (gcount)
3075 {
3076 tree type = asan_global_struct (), var, ctor;
3077 tree dtor_statements = NULL_TREE;
3078 vec<constructor_elt, va_gc> *v;
3079 char buf[20];
3080
3081 type = build_array_type_nelts (type, gcount);
3082 ASM_GENERATE_INTERNAL_LABEL (buf, "LASAN", 0);
3083 var = build_decl (UNKNOWN_LOCATION, VAR_DECL, get_identifier (buf),
3084 type);
3085 TREE_STATIC (var) = 1;
3086 TREE_PUBLIC (var) = 0;
3087 DECL_ARTIFICIAL (var) = 1;
3088 DECL_IGNORED_P (var) = 1;
3089 vec_alloc (v, gcount);
3090 FOR_EACH_DEFINED_VARIABLE (vnode)
3091 if (TREE_ASM_WRITTEN (vnode->decl)
3092 && asan_protect_global (vnode->decl))
3093 asan_add_global (vnode->decl, TREE_TYPE (type), v);
3094 struct asan_add_string_csts_data aascd;
3095 aascd.type = TREE_TYPE (type);
3096 aascd.v = v;
3097 const_desc_htab->traverse<asan_add_string_csts_data *, add_string_csts>
3098 (&aascd);
3099 ctor = build_constructor (type, v);
3100 TREE_CONSTANT (ctor) = 1;
3101 TREE_STATIC (ctor) = 1;
3102 DECL_INITIAL (var) = ctor;
3103 SET_DECL_ALIGN (var, MAX (DECL_ALIGN (var),
3104 ASAN_SHADOW_GRANULARITY * BITS_PER_UNIT));
3105
3106 varpool_node::finalize_decl (var);
3107
3108 tree fn = builtin_decl_implicit (BUILT_IN_ASAN_REGISTER_GLOBALS);
3109 tree gcount_tree = build_int_cst (pointer_sized_int_node, gcount);
3110 append_to_statement_list (build_call_expr (fn, 2,
3111 build_fold_addr_expr (var),
3112 gcount_tree),
3113 &asan_ctor_statements);
3114
3115 fn = builtin_decl_implicit (BUILT_IN_ASAN_UNREGISTER_GLOBALS);
3116 append_to_statement_list (build_call_expr (fn, 2,
3117 build_fold_addr_expr (var),
3118 gcount_tree),
3119 &dtor_statements);
3120 cgraph_build_static_cdtor ('D', dtor_statements, priority);
3121 }
3122 if (asan_ctor_statements)
3123 cgraph_build_static_cdtor ('I', asan_ctor_statements, priority);
3124 flag_sanitize |= SANITIZE_ADDRESS;
3125 }
3126
3127 /* Poison or unpoison (depending on IS_CLOBBER variable) shadow memory based
3128 on SHADOW address. Newly added statements will be added to ITER with
3129 given location LOC. We mark SIZE bytes in shadow memory, where
3130 LAST_CHUNK_SIZE is greater than zero in situation where we are at the
3131 end of a variable. */
3132
3133 static void
3134 asan_store_shadow_bytes (gimple_stmt_iterator *iter, location_t loc,
3135 tree shadow,
3136 unsigned HOST_WIDE_INT base_addr_offset,
3137 bool is_clobber, unsigned size,
3138 unsigned last_chunk_size)
3139 {
3140 tree shadow_ptr_type;
3141
3142 switch (size)
3143 {
3144 case 1:
3145 shadow_ptr_type = shadow_ptr_types[0];
3146 break;
3147 case 2:
3148 shadow_ptr_type = shadow_ptr_types[1];
3149 break;
3150 case 4:
3151 shadow_ptr_type = shadow_ptr_types[2];
3152 break;
3153 default:
3154 gcc_unreachable ();
3155 }
3156
3157 unsigned char c = (char) is_clobber ? ASAN_STACK_MAGIC_USE_AFTER_SCOPE : 0;
3158 unsigned HOST_WIDE_INT val = 0;
3159 unsigned last_pos = size;
3160 if (last_chunk_size && !is_clobber)
3161 last_pos = BYTES_BIG_ENDIAN ? 0 : size - 1;
3162 for (unsigned i = 0; i < size; ++i)
3163 {
3164 unsigned char shadow_c = c;
3165 if (i == last_pos)
3166 shadow_c = last_chunk_size;
3167 val |= (unsigned HOST_WIDE_INT) shadow_c << (BITS_PER_UNIT * i);
3168 }
3169
3170 /* Handle last chunk in unpoisoning. */
3171 tree magic = build_int_cst (TREE_TYPE (shadow_ptr_type), val);
3172
3173 tree dest = build2 (MEM_REF, TREE_TYPE (shadow_ptr_type), shadow,
3174 build_int_cst (shadow_ptr_type, base_addr_offset));
3175
3176 gimple *g = gimple_build_assign (dest, magic);
3177 gimple_set_location (g, loc);
3178 gsi_insert_after (iter, g, GSI_NEW_STMT);
3179 }
3180
3181 /* Expand the ASAN_MARK builtins. */
3182
3183 bool
3184 asan_expand_mark_ifn (gimple_stmt_iterator *iter)
3185 {
3186 gimple *g = gsi_stmt (*iter);
3187 location_t loc = gimple_location (g);
3188 HOST_WIDE_INT flag = tree_to_shwi (gimple_call_arg (g, 0));
3189 bool is_poison = ((asan_mark_flags)flag) == ASAN_MARK_POISON;
3190
3191 tree base = gimple_call_arg (g, 1);
3192 gcc_checking_assert (TREE_CODE (base) == ADDR_EXPR);
3193 tree decl = TREE_OPERAND (base, 0);
3194
3195 /* For a nested function, we can have: ASAN_MARK (2, &FRAME.2.fp_input, 4) */
3196 if (TREE_CODE (decl) == COMPONENT_REF
3197 && DECL_NONLOCAL_FRAME (TREE_OPERAND (decl, 0)))
3198 decl = TREE_OPERAND (decl, 0);
3199
3200 gcc_checking_assert (TREE_CODE (decl) == VAR_DECL);
3201
3202 if (is_poison)
3203 {
3204 if (asan_handled_variables == NULL)
3205 asan_handled_variables = new hash_set<tree> (16);
3206 asan_handled_variables->add (decl);
3207 }
3208 tree len = gimple_call_arg (g, 2);
3209
3210 gcc_assert (tree_fits_shwi_p (len));
3211 unsigned HOST_WIDE_INT size_in_bytes = tree_to_shwi (len);
3212 gcc_assert (size_in_bytes);
3213
3214 g = gimple_build_assign (make_ssa_name (pointer_sized_int_node),
3215 NOP_EXPR, base);
3216 gimple_set_location (g, loc);
3217 gsi_replace (iter, g, false);
3218 tree base_addr = gimple_assign_lhs (g);
3219
3220 /* Generate direct emission if size_in_bytes is small. */
3221 if (size_in_bytes <= ASAN_PARAM_USE_AFTER_SCOPE_DIRECT_EMISSION_THRESHOLD)
3222 {
3223 const unsigned HOST_WIDE_INT shadow_size
3224 = shadow_mem_size (size_in_bytes);
3225 const unsigned int shadow_align
3226 = (get_pointer_alignment (base) / BITS_PER_UNIT) >> ASAN_SHADOW_SHIFT;
3227
3228 tree shadow = build_shadow_mem_access (iter, loc, base_addr,
3229 shadow_ptr_types[0], true);
3230
3231 for (unsigned HOST_WIDE_INT offset = 0; offset < shadow_size;)
3232 {
3233 unsigned size = 1;
3234 if (shadow_size - offset >= 4
3235 && (!STRICT_ALIGNMENT || shadow_align >= 4))
3236 size = 4;
3237 else if (shadow_size - offset >= 2
3238 && (!STRICT_ALIGNMENT || shadow_align >= 2))
3239 size = 2;
3240
3241 unsigned HOST_WIDE_INT last_chunk_size = 0;
3242 unsigned HOST_WIDE_INT s = (offset + size) * ASAN_SHADOW_GRANULARITY;
3243 if (s > size_in_bytes)
3244 last_chunk_size = ASAN_SHADOW_GRANULARITY - (s - size_in_bytes);
3245
3246 asan_store_shadow_bytes (iter, loc, shadow, offset, is_poison,
3247 size, last_chunk_size);
3248 offset += size;
3249 }
3250 }
3251 else
3252 {
3253 g = gimple_build_assign (make_ssa_name (pointer_sized_int_node),
3254 NOP_EXPR, len);
3255 gimple_set_location (g, loc);
3256 gsi_insert_before (iter, g, GSI_SAME_STMT);
3257 tree sz_arg = gimple_assign_lhs (g);
3258
3259 tree fun
3260 = builtin_decl_implicit (is_poison ? BUILT_IN_ASAN_POISON_STACK_MEMORY
3261 : BUILT_IN_ASAN_UNPOISON_STACK_MEMORY);
3262 g = gimple_build_call (fun, 2, base_addr, sz_arg);
3263 gimple_set_location (g, loc);
3264 gsi_insert_after (iter, g, GSI_NEW_STMT);
3265 }
3266
3267 return false;
3268 }
3269
3270 /* Expand the ASAN_{LOAD,STORE} builtins. */
3271
3272 bool
3273 asan_expand_check_ifn (gimple_stmt_iterator *iter, bool use_calls)
3274 {
3275 gimple *g = gsi_stmt (*iter);
3276 location_t loc = gimple_location (g);
3277 bool recover_p;
3278 if (flag_sanitize & SANITIZE_USER_ADDRESS)
3279 recover_p = (flag_sanitize_recover & SANITIZE_USER_ADDRESS) != 0;
3280 else
3281 recover_p = (flag_sanitize_recover & SANITIZE_KERNEL_ADDRESS) != 0;
3282
3283 HOST_WIDE_INT flags = tree_to_shwi (gimple_call_arg (g, 0));
3284 gcc_assert (flags < ASAN_CHECK_LAST);
3285 bool is_scalar_access = (flags & ASAN_CHECK_SCALAR_ACCESS) != 0;
3286 bool is_store = (flags & ASAN_CHECK_STORE) != 0;
3287 bool is_non_zero_len = (flags & ASAN_CHECK_NON_ZERO_LEN) != 0;
3288
3289 tree base = gimple_call_arg (g, 1);
3290 tree len = gimple_call_arg (g, 2);
3291 HOST_WIDE_INT align = tree_to_shwi (gimple_call_arg (g, 3));
3292
3293 HOST_WIDE_INT size_in_bytes
3294 = is_scalar_access && tree_fits_shwi_p (len) ? tree_to_shwi (len) : -1;
3295
3296 if (use_calls)
3297 {
3298 /* Instrument using callbacks. */
3299 gimple *g = gimple_build_assign (make_ssa_name (pointer_sized_int_node),
3300 NOP_EXPR, base);
3301 gimple_set_location (g, loc);
3302 gsi_insert_before (iter, g, GSI_SAME_STMT);
3303 tree base_addr = gimple_assign_lhs (g);
3304
3305 int nargs;
3306 tree fun = check_func (is_store, recover_p, size_in_bytes, &nargs);
3307 if (nargs == 1)
3308 g = gimple_build_call (fun, 1, base_addr);
3309 else
3310 {
3311 gcc_assert (nargs == 2);
3312 g = gimple_build_assign (make_ssa_name (pointer_sized_int_node),
3313 NOP_EXPR, len);
3314 gimple_set_location (g, loc);
3315 gsi_insert_before (iter, g, GSI_SAME_STMT);
3316 tree sz_arg = gimple_assign_lhs (g);
3317 g = gimple_build_call (fun, nargs, base_addr, sz_arg);
3318 }
3319 gimple_set_location (g, loc);
3320 gsi_replace (iter, g, false);
3321 return false;
3322 }
3323
3324 HOST_WIDE_INT real_size_in_bytes = size_in_bytes == -1 ? 1 : size_in_bytes;
3325
3326 tree shadow_ptr_type = shadow_ptr_types[real_size_in_bytes == 16 ? 1 : 0];
3327 tree shadow_type = TREE_TYPE (shadow_ptr_type);
3328
3329 gimple_stmt_iterator gsi = *iter;
3330
3331 if (!is_non_zero_len)
3332 {
3333 /* So, the length of the memory area to asan-protect is
3334 non-constant. Let's guard the generated instrumentation code
3335 like:
3336
3337 if (len != 0)
3338 {
3339 //asan instrumentation code goes here.
3340 }
3341 // falltrough instructions, starting with *ITER. */
3342
3343 g = gimple_build_cond (NE_EXPR,
3344 len,
3345 build_int_cst (TREE_TYPE (len), 0),
3346 NULL_TREE, NULL_TREE);
3347 gimple_set_location (g, loc);
3348
3349 basic_block then_bb, fallthrough_bb;
3350 insert_if_then_before_iter (as_a <gcond *> (g), iter,
3351 /*then_more_likely_p=*/true,
3352 &then_bb, &fallthrough_bb);
3353 /* Note that fallthrough_bb starts with the statement that was
3354 pointed to by ITER. */
3355
3356 /* The 'then block' of the 'if (len != 0) condition is where
3357 we'll generate the asan instrumentation code now. */
3358 gsi = gsi_last_bb (then_bb);
3359 }
3360
3361 /* Get an iterator on the point where we can add the condition
3362 statement for the instrumentation. */
3363 basic_block then_bb, else_bb;
3364 gsi = create_cond_insert_point (&gsi, /*before_p*/false,
3365 /*then_more_likely_p=*/false,
3366 /*create_then_fallthru_edge*/recover_p,
3367 &then_bb,
3368 &else_bb);
3369
3370 g = gimple_build_assign (make_ssa_name (pointer_sized_int_node),
3371 NOP_EXPR, base);
3372 gimple_set_location (g, loc);
3373 gsi_insert_before (&gsi, g, GSI_NEW_STMT);
3374 tree base_addr = gimple_assign_lhs (g);
3375
3376 tree t = NULL_TREE;
3377 if (real_size_in_bytes >= 8)
3378 {
3379 tree shadow = build_shadow_mem_access (&gsi, loc, base_addr,
3380 shadow_ptr_type);
3381 t = shadow;
3382 }
3383 else
3384 {
3385 /* Slow path for 1, 2 and 4 byte accesses. */
3386 /* Test (shadow != 0)
3387 & ((base_addr & 7) + (real_size_in_bytes - 1)) >= shadow). */
3388 tree shadow = build_shadow_mem_access (&gsi, loc, base_addr,
3389 shadow_ptr_type);
3390 gimple *shadow_test = build_assign (NE_EXPR, shadow, 0);
3391 gimple_seq seq = NULL;
3392 gimple_seq_add_stmt (&seq, shadow_test);
3393 /* Aligned (>= 8 bytes) can test just
3394 (real_size_in_bytes - 1 >= shadow), as base_addr & 7 is known
3395 to be 0. */
3396 if (align < 8)
3397 {
3398 gimple_seq_add_stmt (&seq, build_assign (BIT_AND_EXPR,
3399 base_addr, 7));
3400 gimple_seq_add_stmt (&seq,
3401 build_type_cast (shadow_type,
3402 gimple_seq_last (seq)));
3403 if (real_size_in_bytes > 1)
3404 gimple_seq_add_stmt (&seq,
3405 build_assign (PLUS_EXPR,
3406 gimple_seq_last (seq),
3407 real_size_in_bytes - 1));
3408 t = gimple_assign_lhs (gimple_seq_last_stmt (seq));
3409 }
3410 else
3411 t = build_int_cst (shadow_type, real_size_in_bytes - 1);
3412 gimple_seq_add_stmt (&seq, build_assign (GE_EXPR, t, shadow));
3413 gimple_seq_add_stmt (&seq, build_assign (BIT_AND_EXPR, shadow_test,
3414 gimple_seq_last (seq)));
3415 t = gimple_assign_lhs (gimple_seq_last (seq));
3416 gimple_seq_set_location (seq, loc);
3417 gsi_insert_seq_after (&gsi, seq, GSI_CONTINUE_LINKING);
3418
3419 /* For non-constant, misaligned or otherwise weird access sizes,
3420 check first and last byte. */
3421 if (size_in_bytes == -1)
3422 {
3423 g = gimple_build_assign (make_ssa_name (pointer_sized_int_node),
3424 MINUS_EXPR, len,
3425 build_int_cst (pointer_sized_int_node, 1));
3426 gimple_set_location (g, loc);
3427 gsi_insert_after (&gsi, g, GSI_NEW_STMT);
3428 tree last = gimple_assign_lhs (g);
3429 g = gimple_build_assign (make_ssa_name (pointer_sized_int_node),
3430 PLUS_EXPR, base_addr, last);
3431 gimple_set_location (g, loc);
3432 gsi_insert_after (&gsi, g, GSI_NEW_STMT);
3433 tree base_end_addr = gimple_assign_lhs (g);
3434
3435 tree shadow = build_shadow_mem_access (&gsi, loc, base_end_addr,
3436 shadow_ptr_type);
3437 gimple *shadow_test = build_assign (NE_EXPR, shadow, 0);
3438 gimple_seq seq = NULL;
3439 gimple_seq_add_stmt (&seq, shadow_test);
3440 gimple_seq_add_stmt (&seq, build_assign (BIT_AND_EXPR,
3441 base_end_addr, 7));
3442 gimple_seq_add_stmt (&seq, build_type_cast (shadow_type,
3443 gimple_seq_last (seq)));
3444 gimple_seq_add_stmt (&seq, build_assign (GE_EXPR,
3445 gimple_seq_last (seq),
3446 shadow));
3447 gimple_seq_add_stmt (&seq, build_assign (BIT_AND_EXPR, shadow_test,
3448 gimple_seq_last (seq)));
3449 gimple_seq_add_stmt (&seq, build_assign (BIT_IOR_EXPR, t,
3450 gimple_seq_last (seq)));
3451 t = gimple_assign_lhs (gimple_seq_last (seq));
3452 gimple_seq_set_location (seq, loc);
3453 gsi_insert_seq_after (&gsi, seq, GSI_CONTINUE_LINKING);
3454 }
3455 }
3456
3457 g = gimple_build_cond (NE_EXPR, t, build_int_cst (TREE_TYPE (t), 0),
3458 NULL_TREE, NULL_TREE);
3459 gimple_set_location (g, loc);
3460 gsi_insert_after (&gsi, g, GSI_NEW_STMT);
3461
3462 /* Generate call to the run-time library (e.g. __asan_report_load8). */
3463 gsi = gsi_start_bb (then_bb);
3464 int nargs;
3465 tree fun = report_error_func (is_store, recover_p, size_in_bytes, &nargs);
3466 g = gimple_build_call (fun, nargs, base_addr, len);
3467 gimple_set_location (g, loc);
3468 gsi_insert_after (&gsi, g, GSI_NEW_STMT);
3469
3470 gsi_remove (iter, true);
3471 *iter = gsi_start_bb (else_bb);
3472
3473 return true;
3474 }
3475
3476 /* Create ASAN shadow variable for a VAR_DECL which has been rewritten
3477 into SSA. Already seen VAR_DECLs are stored in SHADOW_VARS_MAPPING. */
3478
3479 static tree
3480 create_asan_shadow_var (tree var_decl,
3481 hash_map<tree, tree> &shadow_vars_mapping)
3482 {
3483 tree *slot = shadow_vars_mapping.get (var_decl);
3484 if (slot == NULL)
3485 {
3486 tree shadow_var = copy_node (var_decl);
3487
3488 copy_body_data id;
3489 memset (&id, 0, sizeof (copy_body_data));
3490 id.src_fn = id.dst_fn = current_function_decl;
3491 copy_decl_for_dup_finish (&id, var_decl, shadow_var);
3492
3493 DECL_ARTIFICIAL (shadow_var) = 1;
3494 DECL_IGNORED_P (shadow_var) = 1;
3495 DECL_SEEN_IN_BIND_EXPR_P (shadow_var) = 0;
3496 gimple_add_tmp_var (shadow_var);
3497
3498 shadow_vars_mapping.put (var_decl, shadow_var);
3499 return shadow_var;
3500 }
3501 else
3502 return *slot;
3503 }
3504
3505 /* Expand ASAN_POISON ifn. */
3506
3507 bool
3508 asan_expand_poison_ifn (gimple_stmt_iterator *iter,
3509 bool *need_commit_edge_insert,
3510 hash_map<tree, tree> &shadow_vars_mapping)
3511 {
3512 gimple *g = gsi_stmt (*iter);
3513 tree poisoned_var = gimple_call_lhs (g);
3514 if (!poisoned_var || has_zero_uses (poisoned_var))
3515 {
3516 gsi_remove (iter, true);
3517 return true;
3518 }
3519
3520 if (SSA_NAME_VAR (poisoned_var) == NULL_TREE)
3521 SET_SSA_NAME_VAR_OR_IDENTIFIER (poisoned_var,
3522 create_tmp_var (TREE_TYPE (poisoned_var)));
3523
3524 tree shadow_var = create_asan_shadow_var (SSA_NAME_VAR (poisoned_var),
3525 shadow_vars_mapping);
3526
3527 bool recover_p;
3528 if (flag_sanitize & SANITIZE_USER_ADDRESS)
3529 recover_p = (flag_sanitize_recover & SANITIZE_USER_ADDRESS) != 0;
3530 else
3531 recover_p = (flag_sanitize_recover & SANITIZE_KERNEL_ADDRESS) != 0;
3532 tree size = DECL_SIZE_UNIT (shadow_var);
3533 gimple *poison_call
3534 = gimple_build_call_internal (IFN_ASAN_MARK, 3,
3535 build_int_cst (integer_type_node,
3536 ASAN_MARK_POISON),
3537 build_fold_addr_expr (shadow_var), size);
3538
3539 gimple *use;
3540 imm_use_iterator imm_iter;
3541 FOR_EACH_IMM_USE_STMT (use, imm_iter, poisoned_var)
3542 {
3543 if (is_gimple_debug (use))
3544 continue;
3545
3546 int nargs;
3547 bool store_p = gimple_call_internal_p (use, IFN_ASAN_POISON_USE);
3548 tree fun = report_error_func (store_p, recover_p, tree_to_uhwi (size),
3549 &nargs);
3550
3551 gcall *call = gimple_build_call (fun, 1,
3552 build_fold_addr_expr (shadow_var));
3553 gimple_set_location (call, gimple_location (use));
3554 gimple *call_to_insert = call;
3555
3556 /* The USE can be a gimple PHI node. If so, insert the call on
3557 all edges leading to the PHI node. */
3558 if (is_a <gphi *> (use))
3559 {
3560 gphi *phi = dyn_cast<gphi *> (use);
3561 for (unsigned i = 0; i < gimple_phi_num_args (phi); ++i)
3562 if (gimple_phi_arg_def (phi, i) == poisoned_var)
3563 {
3564 edge e = gimple_phi_arg_edge (phi, i);
3565
3566 /* Do not insert on an edge we can't split. */
3567 if (e->flags & EDGE_ABNORMAL)
3568 continue;
3569
3570 if (call_to_insert == NULL)
3571 call_to_insert = gimple_copy (call);
3572
3573 gsi_insert_seq_on_edge (e, call_to_insert);
3574 *need_commit_edge_insert = true;
3575 call_to_insert = NULL;
3576 }
3577 }
3578 else
3579 {
3580 gimple_stmt_iterator gsi = gsi_for_stmt (use);
3581 if (store_p)
3582 gsi_replace (&gsi, call, true);
3583 else
3584 gsi_insert_before (&gsi, call, GSI_NEW_STMT);
3585 }
3586 }
3587
3588 SSA_NAME_IS_DEFAULT_DEF (poisoned_var) = true;
3589 SSA_NAME_DEF_STMT (poisoned_var) = gimple_build_nop ();
3590 gsi_replace (iter, poison_call, false);
3591
3592 return true;
3593 }
3594
3595 /* Instrument the current function. */
3596
3597 static unsigned int
3598 asan_instrument (void)
3599 {
3600 if (shadow_ptr_types[0] == NULL_TREE)
3601 asan_init_shadow_ptr_types ();
3602 transform_statements ();
3603 last_alloca_addr = NULL_TREE;
3604 return 0;
3605 }
3606
3607 static bool
3608 gate_asan (void)
3609 {
3610 return sanitize_flags_p (SANITIZE_ADDRESS);
3611 }
3612
3613 namespace {
3614
3615 const pass_data pass_data_asan =
3616 {
3617 GIMPLE_PASS, /* type */
3618 "asan", /* name */
3619 OPTGROUP_NONE, /* optinfo_flags */
3620 TV_NONE, /* tv_id */
3621 ( PROP_ssa | PROP_cfg | PROP_gimple_leh ), /* properties_required */
3622 0, /* properties_provided */
3623 0, /* properties_destroyed */
3624 0, /* todo_flags_start */
3625 TODO_update_ssa, /* todo_flags_finish */
3626 };
3627
3628 class pass_asan : public gimple_opt_pass
3629 {
3630 public:
3631 pass_asan (gcc::context *ctxt)
3632 : gimple_opt_pass (pass_data_asan, ctxt)
3633 {}
3634
3635 /* opt_pass methods: */
3636 opt_pass * clone () { return new pass_asan (m_ctxt); }
3637 virtual bool gate (function *) { return gate_asan (); }
3638 virtual unsigned int execute (function *) { return asan_instrument (); }
3639
3640 }; // class pass_asan
3641
3642 } // anon namespace
3643
3644 gimple_opt_pass *
3645 make_pass_asan (gcc::context *ctxt)
3646 {
3647 return new pass_asan (ctxt);
3648 }
3649
3650 namespace {
3651
3652 const pass_data pass_data_asan_O0 =
3653 {
3654 GIMPLE_PASS, /* type */
3655 "asan0", /* name */
3656 OPTGROUP_NONE, /* optinfo_flags */
3657 TV_NONE, /* tv_id */
3658 ( PROP_ssa | PROP_cfg | PROP_gimple_leh ), /* properties_required */
3659 0, /* properties_provided */
3660 0, /* properties_destroyed */
3661 0, /* todo_flags_start */
3662 TODO_update_ssa, /* todo_flags_finish */
3663 };
3664
3665 class pass_asan_O0 : public gimple_opt_pass
3666 {
3667 public:
3668 pass_asan_O0 (gcc::context *ctxt)
3669 : gimple_opt_pass (pass_data_asan_O0, ctxt)
3670 {}
3671
3672 /* opt_pass methods: */
3673 virtual bool gate (function *) { return !optimize && gate_asan (); }
3674 virtual unsigned int execute (function *) { return asan_instrument (); }
3675
3676 }; // class pass_asan_O0
3677
3678 } // anon namespace
3679
3680 gimple_opt_pass *
3681 make_pass_asan_O0 (gcc::context *ctxt)
3682 {
3683 return new pass_asan_O0 (ctxt);
3684 }
3685
3686 #include "gt-asan.h"