]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/asan.c
PR fortran/95090 - ICE: identifier overflow
[thirdparty/gcc.git] / gcc / asan.c
1 /* AddressSanitizer, a fast memory error detector.
2 Copyright (C) 2012-2020 Free Software Foundation, Inc.
3 Contributed by Kostya Serebryany <kcc@google.com>
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
10 version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "backend.h"
26 #include "target.h"
27 #include "rtl.h"
28 #include "tree.h"
29 #include "gimple.h"
30 #include "cfghooks.h"
31 #include "alloc-pool.h"
32 #include "tree-pass.h"
33 #include "memmodel.h"
34 #include "tm_p.h"
35 #include "ssa.h"
36 #include "stringpool.h"
37 #include "tree-ssanames.h"
38 #include "optabs.h"
39 #include "emit-rtl.h"
40 #include "cgraph.h"
41 #include "gimple-pretty-print.h"
42 #include "alias.h"
43 #include "fold-const.h"
44 #include "cfganal.h"
45 #include "gimplify.h"
46 #include "gimple-iterator.h"
47 #include "varasm.h"
48 #include "stor-layout.h"
49 #include "tree-iterator.h"
50 #include "stringpool.h"
51 #include "attribs.h"
52 #include "asan.h"
53 #include "dojump.h"
54 #include "explow.h"
55 #include "expr.h"
56 #include "output.h"
57 #include "langhooks.h"
58 #include "cfgloop.h"
59 #include "gimple-builder.h"
60 #include "gimple-fold.h"
61 #include "ubsan.h"
62 #include "builtins.h"
63 #include "fnmatch.h"
64 #include "tree-inline.h"
65 #include "tree-ssa.h"
66
67 /* AddressSanitizer finds out-of-bounds and use-after-free bugs
68 with <2x slowdown on average.
69
70 The tool consists of two parts:
71 instrumentation module (this file) and a run-time library.
72 The instrumentation module adds a run-time check before every memory insn.
73 For a 8- or 16- byte load accessing address X:
74 ShadowAddr = (X >> 3) + Offset
75 ShadowValue = *(char*)ShadowAddr; // *(short*) for 16-byte access.
76 if (ShadowValue)
77 __asan_report_load8(X);
78 For a load of N bytes (N=1, 2 or 4) from address X:
79 ShadowAddr = (X >> 3) + Offset
80 ShadowValue = *(char*)ShadowAddr;
81 if (ShadowValue)
82 if ((X & 7) + N - 1 > ShadowValue)
83 __asan_report_loadN(X);
84 Stores are instrumented similarly, but using __asan_report_storeN functions.
85 A call too __asan_init_vN() is inserted to the list of module CTORs.
86 N is the version number of the AddressSanitizer API. The changes between the
87 API versions are listed in libsanitizer/asan/asan_interface_internal.h.
88
89 The run-time library redefines malloc (so that redzone are inserted around
90 the allocated memory) and free (so that reuse of free-ed memory is delayed),
91 provides __asan_report* and __asan_init_vN functions.
92
93 Read more:
94 http://code.google.com/p/address-sanitizer/wiki/AddressSanitizerAlgorithm
95
96 The current implementation supports detection of out-of-bounds and
97 use-after-free in the heap, on the stack and for global variables.
98
99 [Protection of stack variables]
100
101 To understand how detection of out-of-bounds and use-after-free works
102 for stack variables, lets look at this example on x86_64 where the
103 stack grows downward:
104
105 int
106 foo ()
107 {
108 char a[23] = {0};
109 int b[2] = {0};
110
111 a[5] = 1;
112 b[1] = 2;
113
114 return a[5] + b[1];
115 }
116
117 For this function, the stack protected by asan will be organized as
118 follows, from the top of the stack to the bottom:
119
120 Slot 1/ [red zone of 32 bytes called 'RIGHT RedZone']
121
122 Slot 2/ [8 bytes of red zone, that adds up to the space of 'a' to make
123 the next slot be 32 bytes aligned; this one is called Partial
124 Redzone; this 32 bytes alignment is an asan constraint]
125
126 Slot 3/ [24 bytes for variable 'a']
127
128 Slot 4/ [red zone of 32 bytes called 'Middle RedZone']
129
130 Slot 5/ [24 bytes of Partial Red Zone (similar to slot 2]
131
132 Slot 6/ [8 bytes for variable 'b']
133
134 Slot 7/ [32 bytes of Red Zone at the bottom of the stack, called
135 'LEFT RedZone']
136
137 The 32 bytes of LEFT red zone at the bottom of the stack can be
138 decomposed as such:
139
140 1/ The first 8 bytes contain a magical asan number that is always
141 0x41B58AB3.
142
143 2/ The following 8 bytes contains a pointer to a string (to be
144 parsed at runtime by the runtime asan library), which format is
145 the following:
146
147 "<function-name> <space> <num-of-variables-on-the-stack>
148 (<32-bytes-aligned-offset-in-bytes-of-variable> <space>
149 <length-of-var-in-bytes> ){n} "
150
151 where '(...){n}' means the content inside the parenthesis occurs 'n'
152 times, with 'n' being the number of variables on the stack.
153
154 3/ The following 8 bytes contain the PC of the current function which
155 will be used by the run-time library to print an error message.
156
157 4/ The following 8 bytes are reserved for internal use by the run-time.
158
159 The shadow memory for that stack layout is going to look like this:
160
161 - content of shadow memory 8 bytes for slot 7: 0xF1F1F1F1.
162 The F1 byte pattern is a magic number called
163 ASAN_STACK_MAGIC_LEFT and is a way for the runtime to know that
164 the memory for that shadow byte is part of a the LEFT red zone
165 intended to seat at the bottom of the variables on the stack.
166
167 - content of shadow memory 8 bytes for slots 6 and 5:
168 0xF4F4F400. The F4 byte pattern is a magic number
169 called ASAN_STACK_MAGIC_PARTIAL. It flags the fact that the
170 memory region for this shadow byte is a PARTIAL red zone
171 intended to pad a variable A, so that the slot following
172 {A,padding} is 32 bytes aligned.
173
174 Note that the fact that the least significant byte of this
175 shadow memory content is 00 means that 8 bytes of its
176 corresponding memory (which corresponds to the memory of
177 variable 'b') is addressable.
178
179 - content of shadow memory 8 bytes for slot 4: 0xF2F2F2F2.
180 The F2 byte pattern is a magic number called
181 ASAN_STACK_MAGIC_MIDDLE. It flags the fact that the memory
182 region for this shadow byte is a MIDDLE red zone intended to
183 seat between two 32 aligned slots of {variable,padding}.
184
185 - content of shadow memory 8 bytes for slot 3 and 2:
186 0xF4000000. This represents is the concatenation of
187 variable 'a' and the partial red zone following it, like what we
188 had for variable 'b'. The least significant 3 bytes being 00
189 means that the 3 bytes of variable 'a' are addressable.
190
191 - content of shadow memory 8 bytes for slot 1: 0xF3F3F3F3.
192 The F3 byte pattern is a magic number called
193 ASAN_STACK_MAGIC_RIGHT. It flags the fact that the memory
194 region for this shadow byte is a RIGHT red zone intended to seat
195 at the top of the variables of the stack.
196
197 Note that the real variable layout is done in expand_used_vars in
198 cfgexpand.c. As far as Address Sanitizer is concerned, it lays out
199 stack variables as well as the different red zones, emits some
200 prologue code to populate the shadow memory as to poison (mark as
201 non-accessible) the regions of the red zones and mark the regions of
202 stack variables as accessible, and emit some epilogue code to
203 un-poison (mark as accessible) the regions of red zones right before
204 the function exits.
205
206 [Protection of global variables]
207
208 The basic idea is to insert a red zone between two global variables
209 and install a constructor function that calls the asan runtime to do
210 the populating of the relevant shadow memory regions at load time.
211
212 So the global variables are laid out as to insert a red zone between
213 them. The size of the red zones is so that each variable starts on a
214 32 bytes boundary.
215
216 Then a constructor function is installed so that, for each global
217 variable, it calls the runtime asan library function
218 __asan_register_globals_with an instance of this type:
219
220 struct __asan_global
221 {
222 // Address of the beginning of the global variable.
223 const void *__beg;
224
225 // Initial size of the global variable.
226 uptr __size;
227
228 // Size of the global variable + size of the red zone. This
229 // size is 32 bytes aligned.
230 uptr __size_with_redzone;
231
232 // Name of the global variable.
233 const void *__name;
234
235 // Name of the module where the global variable is declared.
236 const void *__module_name;
237
238 // 1 if it has dynamic initialization, 0 otherwise.
239 uptr __has_dynamic_init;
240
241 // A pointer to struct that contains source location, could be NULL.
242 __asan_global_source_location *__location;
243 }
244
245 A destructor function that calls the runtime asan library function
246 _asan_unregister_globals is also installed. */
247
248 static unsigned HOST_WIDE_INT asan_shadow_offset_value;
249 static bool asan_shadow_offset_computed;
250 static vec<char *> sanitized_sections;
251 static tree last_alloca_addr;
252
253 /* Set of variable declarations that are going to be guarded by
254 use-after-scope sanitizer. */
255
256 hash_set<tree> *asan_handled_variables = NULL;
257
258 hash_set <tree> *asan_used_labels = NULL;
259
260 /* Sets shadow offset to value in string VAL. */
261
262 bool
263 set_asan_shadow_offset (const char *val)
264 {
265 char *endp;
266
267 errno = 0;
268 #ifdef HAVE_LONG_LONG
269 asan_shadow_offset_value = strtoull (val, &endp, 0);
270 #else
271 asan_shadow_offset_value = strtoul (val, &endp, 0);
272 #endif
273 if (!(*val != '\0' && *endp == '\0' && errno == 0))
274 return false;
275
276 asan_shadow_offset_computed = true;
277
278 return true;
279 }
280
281 /* Set list of user-defined sections that need to be sanitized. */
282
283 void
284 set_sanitized_sections (const char *sections)
285 {
286 char *pat;
287 unsigned i;
288 FOR_EACH_VEC_ELT (sanitized_sections, i, pat)
289 free (pat);
290 sanitized_sections.truncate (0);
291
292 for (const char *s = sections; *s; )
293 {
294 const char *end;
295 for (end = s; *end && *end != ','; ++end);
296 size_t len = end - s;
297 sanitized_sections.safe_push (xstrndup (s, len));
298 s = *end ? end + 1 : end;
299 }
300 }
301
302 bool
303 asan_mark_p (gimple *stmt, enum asan_mark_flags flag)
304 {
305 return (gimple_call_internal_p (stmt, IFN_ASAN_MARK)
306 && tree_to_uhwi (gimple_call_arg (stmt, 0)) == flag);
307 }
308
309 bool
310 asan_sanitize_stack_p (void)
311 {
312 return (sanitize_flags_p (SANITIZE_ADDRESS) && param_asan_stack);
313 }
314
315 bool
316 asan_sanitize_allocas_p (void)
317 {
318 return (asan_sanitize_stack_p () && param_asan_protect_allocas);
319 }
320
321 /* Checks whether section SEC should be sanitized. */
322
323 static bool
324 section_sanitized_p (const char *sec)
325 {
326 char *pat;
327 unsigned i;
328 FOR_EACH_VEC_ELT (sanitized_sections, i, pat)
329 if (fnmatch (pat, sec, FNM_PERIOD) == 0)
330 return true;
331 return false;
332 }
333
334 /* Returns Asan shadow offset. */
335
336 static unsigned HOST_WIDE_INT
337 asan_shadow_offset ()
338 {
339 if (!asan_shadow_offset_computed)
340 {
341 asan_shadow_offset_computed = true;
342 asan_shadow_offset_value = targetm.asan_shadow_offset ();
343 }
344 return asan_shadow_offset_value;
345 }
346
347 alias_set_type asan_shadow_set = -1;
348
349 /* Pointer types to 1, 2 or 4 byte integers in shadow memory. A separate
350 alias set is used for all shadow memory accesses. */
351 static GTY(()) tree shadow_ptr_types[3];
352
353 /* Decl for __asan_option_detect_stack_use_after_return. */
354 static GTY(()) tree asan_detect_stack_use_after_return;
355
356 /* Hashtable support for memory references used by gimple
357 statements. */
358
359 /* This type represents a reference to a memory region. */
360 struct asan_mem_ref
361 {
362 /* The expression of the beginning of the memory region. */
363 tree start;
364
365 /* The size of the access. */
366 HOST_WIDE_INT access_size;
367 };
368
369 object_allocator <asan_mem_ref> asan_mem_ref_pool ("asan_mem_ref");
370
371 /* Initializes an instance of asan_mem_ref. */
372
373 static void
374 asan_mem_ref_init (asan_mem_ref *ref, tree start, HOST_WIDE_INT access_size)
375 {
376 ref->start = start;
377 ref->access_size = access_size;
378 }
379
380 /* Allocates memory for an instance of asan_mem_ref into the memory
381 pool returned by asan_mem_ref_get_alloc_pool and initialize it.
382 START is the address of (or the expression pointing to) the
383 beginning of memory reference. ACCESS_SIZE is the size of the
384 access to the referenced memory. */
385
386 static asan_mem_ref*
387 asan_mem_ref_new (tree start, HOST_WIDE_INT access_size)
388 {
389 asan_mem_ref *ref = asan_mem_ref_pool.allocate ();
390
391 asan_mem_ref_init (ref, start, access_size);
392 return ref;
393 }
394
395 /* This builds and returns a pointer to the end of the memory region
396 that starts at START and of length LEN. */
397
398 tree
399 asan_mem_ref_get_end (tree start, tree len)
400 {
401 if (len == NULL_TREE || integer_zerop (len))
402 return start;
403
404 if (!ptrofftype_p (len))
405 len = convert_to_ptrofftype (len);
406
407 return fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (start), start, len);
408 }
409
410 /* Return a tree expression that represents the end of the referenced
411 memory region. Beware that this function can actually build a new
412 tree expression. */
413
414 tree
415 asan_mem_ref_get_end (const asan_mem_ref *ref, tree len)
416 {
417 return asan_mem_ref_get_end (ref->start, len);
418 }
419
420 struct asan_mem_ref_hasher : nofree_ptr_hash <asan_mem_ref>
421 {
422 static inline hashval_t hash (const asan_mem_ref *);
423 static inline bool equal (const asan_mem_ref *, const asan_mem_ref *);
424 };
425
426 /* Hash a memory reference. */
427
428 inline hashval_t
429 asan_mem_ref_hasher::hash (const asan_mem_ref *mem_ref)
430 {
431 return iterative_hash_expr (mem_ref->start, 0);
432 }
433
434 /* Compare two memory references. We accept the length of either
435 memory references to be NULL_TREE. */
436
437 inline bool
438 asan_mem_ref_hasher::equal (const asan_mem_ref *m1,
439 const asan_mem_ref *m2)
440 {
441 return operand_equal_p (m1->start, m2->start, 0);
442 }
443
444 static hash_table<asan_mem_ref_hasher> *asan_mem_ref_ht;
445
446 /* Returns a reference to the hash table containing memory references.
447 This function ensures that the hash table is created. Note that
448 this hash table is updated by the function
449 update_mem_ref_hash_table. */
450
451 static hash_table<asan_mem_ref_hasher> *
452 get_mem_ref_hash_table ()
453 {
454 if (!asan_mem_ref_ht)
455 asan_mem_ref_ht = new hash_table<asan_mem_ref_hasher> (10);
456
457 return asan_mem_ref_ht;
458 }
459
460 /* Clear all entries from the memory references hash table. */
461
462 static void
463 empty_mem_ref_hash_table ()
464 {
465 if (asan_mem_ref_ht)
466 asan_mem_ref_ht->empty ();
467 }
468
469 /* Free the memory references hash table. */
470
471 static void
472 free_mem_ref_resources ()
473 {
474 delete asan_mem_ref_ht;
475 asan_mem_ref_ht = NULL;
476
477 asan_mem_ref_pool.release ();
478 }
479
480 /* Return true iff the memory reference REF has been instrumented. */
481
482 static bool
483 has_mem_ref_been_instrumented (tree ref, HOST_WIDE_INT access_size)
484 {
485 asan_mem_ref r;
486 asan_mem_ref_init (&r, ref, access_size);
487
488 asan_mem_ref *saved_ref = get_mem_ref_hash_table ()->find (&r);
489 return saved_ref && saved_ref->access_size >= access_size;
490 }
491
492 /* Return true iff the memory reference REF has been instrumented. */
493
494 static bool
495 has_mem_ref_been_instrumented (const asan_mem_ref *ref)
496 {
497 return has_mem_ref_been_instrumented (ref->start, ref->access_size);
498 }
499
500 /* Return true iff access to memory region starting at REF and of
501 length LEN has been instrumented. */
502
503 static bool
504 has_mem_ref_been_instrumented (const asan_mem_ref *ref, tree len)
505 {
506 HOST_WIDE_INT size_in_bytes
507 = tree_fits_shwi_p (len) ? tree_to_shwi (len) : -1;
508
509 return size_in_bytes != -1
510 && has_mem_ref_been_instrumented (ref->start, size_in_bytes);
511 }
512
513 /* Set REF to the memory reference present in a gimple assignment
514 ASSIGNMENT. Return true upon successful completion, false
515 otherwise. */
516
517 static bool
518 get_mem_ref_of_assignment (const gassign *assignment,
519 asan_mem_ref *ref,
520 bool *ref_is_store)
521 {
522 gcc_assert (gimple_assign_single_p (assignment));
523
524 if (gimple_store_p (assignment)
525 && !gimple_clobber_p (assignment))
526 {
527 ref->start = gimple_assign_lhs (assignment);
528 *ref_is_store = true;
529 }
530 else if (gimple_assign_load_p (assignment))
531 {
532 ref->start = gimple_assign_rhs1 (assignment);
533 *ref_is_store = false;
534 }
535 else
536 return false;
537
538 ref->access_size = int_size_in_bytes (TREE_TYPE (ref->start));
539 return true;
540 }
541
542 /* Return address of last allocated dynamic alloca. */
543
544 static tree
545 get_last_alloca_addr ()
546 {
547 if (last_alloca_addr)
548 return last_alloca_addr;
549
550 last_alloca_addr = create_tmp_reg (ptr_type_node, "last_alloca_addr");
551 gassign *g = gimple_build_assign (last_alloca_addr, null_pointer_node);
552 edge e = single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun));
553 gsi_insert_on_edge_immediate (e, g);
554 return last_alloca_addr;
555 }
556
557 /* Insert __asan_allocas_unpoison (top, bottom) call before
558 __builtin_stack_restore (new_sp) call.
559 The pseudocode of this routine should look like this:
560 top = last_alloca_addr;
561 bot = new_sp;
562 __asan_allocas_unpoison (top, bot);
563 last_alloca_addr = new_sp;
564 __builtin_stack_restore (new_sp);
565 In general, we can't use new_sp as bot parameter because on some
566 architectures SP has non zero offset from dynamic stack area. Moreover, on
567 some architectures this offset (STACK_DYNAMIC_OFFSET) becomes known for each
568 particular function only after all callees were expanded to rtl.
569 The most noticeable example is PowerPC{,64}, see
570 http://refspecs.linuxfoundation.org/ELF/ppc64/PPC-elf64abi.html#DYNAM-STACK.
571 To overcome the issue we use following trick: pass new_sp as a second
572 parameter to __asan_allocas_unpoison and rewrite it during expansion with
573 new_sp + (virtual_dynamic_stack_rtx - sp) later in
574 expand_asan_emit_allocas_unpoison function. */
575
576 static void
577 handle_builtin_stack_restore (gcall *call, gimple_stmt_iterator *iter)
578 {
579 if (!iter || !asan_sanitize_allocas_p ())
580 return;
581
582 tree last_alloca = get_last_alloca_addr ();
583 tree restored_stack = gimple_call_arg (call, 0);
584 tree fn = builtin_decl_implicit (BUILT_IN_ASAN_ALLOCAS_UNPOISON);
585 gimple *g = gimple_build_call (fn, 2, last_alloca, restored_stack);
586 gsi_insert_before (iter, g, GSI_SAME_STMT);
587 g = gimple_build_assign (last_alloca, restored_stack);
588 gsi_insert_before (iter, g, GSI_SAME_STMT);
589 }
590
591 /* Deploy and poison redzones around __builtin_alloca call. To do this, we
592 should replace this call with another one with changed parameters and
593 replace all its uses with new address, so
594 addr = __builtin_alloca (old_size, align);
595 is replaced by
596 left_redzone_size = max (align, ASAN_RED_ZONE_SIZE);
597 Following two statements are optimized out if we know that
598 old_size & (ASAN_RED_ZONE_SIZE - 1) == 0, i.e. alloca doesn't need partial
599 redzone.
600 misalign = old_size & (ASAN_RED_ZONE_SIZE - 1);
601 partial_redzone_size = ASAN_RED_ZONE_SIZE - misalign;
602 right_redzone_size = ASAN_RED_ZONE_SIZE;
603 additional_size = left_redzone_size + partial_redzone_size +
604 right_redzone_size;
605 new_size = old_size + additional_size;
606 new_alloca = __builtin_alloca (new_size, max (align, 32))
607 __asan_alloca_poison (new_alloca, old_size)
608 addr = new_alloca + max (align, ASAN_RED_ZONE_SIZE);
609 last_alloca_addr = new_alloca;
610 ADDITIONAL_SIZE is added to make new memory allocation contain not only
611 requested memory, but also left, partial and right redzones as well as some
612 additional space, required by alignment. */
613
614 static void
615 handle_builtin_alloca (gcall *call, gimple_stmt_iterator *iter)
616 {
617 if (!iter || !asan_sanitize_allocas_p ())
618 return;
619
620 gassign *g;
621 gcall *gg;
622 const HOST_WIDE_INT redzone_mask = ASAN_RED_ZONE_SIZE - 1;
623
624 tree last_alloca = get_last_alloca_addr ();
625 tree callee = gimple_call_fndecl (call);
626 tree old_size = gimple_call_arg (call, 0);
627 tree ptr_type = gimple_call_lhs (call) ? TREE_TYPE (gimple_call_lhs (call))
628 : ptr_type_node;
629 tree partial_size = NULL_TREE;
630 unsigned int align
631 = DECL_FUNCTION_CODE (callee) == BUILT_IN_ALLOCA
632 ? 0 : tree_to_uhwi (gimple_call_arg (call, 1));
633
634 /* If ALIGN > ASAN_RED_ZONE_SIZE, we embed left redzone into first ALIGN
635 bytes of allocated space. Otherwise, align alloca to ASAN_RED_ZONE_SIZE
636 manually. */
637 align = MAX (align, ASAN_RED_ZONE_SIZE * BITS_PER_UNIT);
638
639 tree alloca_rz_mask = build_int_cst (size_type_node, redzone_mask);
640 tree redzone_size = build_int_cst (size_type_node, ASAN_RED_ZONE_SIZE);
641
642 /* Extract lower bits from old_size. */
643 wide_int size_nonzero_bits = get_nonzero_bits (old_size);
644 wide_int rz_mask
645 = wi::uhwi (redzone_mask, wi::get_precision (size_nonzero_bits));
646 wide_int old_size_lower_bits = wi::bit_and (size_nonzero_bits, rz_mask);
647
648 /* If alloca size is aligned to ASAN_RED_ZONE_SIZE, we don't need partial
649 redzone. Otherwise, compute its size here. */
650 if (wi::ne_p (old_size_lower_bits, 0))
651 {
652 /* misalign = size & (ASAN_RED_ZONE_SIZE - 1)
653 partial_size = ASAN_RED_ZONE_SIZE - misalign. */
654 g = gimple_build_assign (make_ssa_name (size_type_node, NULL),
655 BIT_AND_EXPR, old_size, alloca_rz_mask);
656 gsi_insert_before (iter, g, GSI_SAME_STMT);
657 tree misalign = gimple_assign_lhs (g);
658 g = gimple_build_assign (make_ssa_name (size_type_node, NULL), MINUS_EXPR,
659 redzone_size, misalign);
660 gsi_insert_before (iter, g, GSI_SAME_STMT);
661 partial_size = gimple_assign_lhs (g);
662 }
663
664 /* additional_size = align + ASAN_RED_ZONE_SIZE. */
665 tree additional_size = build_int_cst (size_type_node, align / BITS_PER_UNIT
666 + ASAN_RED_ZONE_SIZE);
667 /* If alloca has partial redzone, include it to additional_size too. */
668 if (partial_size)
669 {
670 /* additional_size += partial_size. */
671 g = gimple_build_assign (make_ssa_name (size_type_node), PLUS_EXPR,
672 partial_size, additional_size);
673 gsi_insert_before (iter, g, GSI_SAME_STMT);
674 additional_size = gimple_assign_lhs (g);
675 }
676
677 /* new_size = old_size + additional_size. */
678 g = gimple_build_assign (make_ssa_name (size_type_node), PLUS_EXPR, old_size,
679 additional_size);
680 gsi_insert_before (iter, g, GSI_SAME_STMT);
681 tree new_size = gimple_assign_lhs (g);
682
683 /* Build new __builtin_alloca call:
684 new_alloca_with_rz = __builtin_alloca (new_size, align). */
685 tree fn = builtin_decl_implicit (BUILT_IN_ALLOCA_WITH_ALIGN);
686 gg = gimple_build_call (fn, 2, new_size,
687 build_int_cst (size_type_node, align));
688 tree new_alloca_with_rz = make_ssa_name (ptr_type, gg);
689 gimple_call_set_lhs (gg, new_alloca_with_rz);
690 gsi_insert_before (iter, gg, GSI_SAME_STMT);
691
692 /* new_alloca = new_alloca_with_rz + align. */
693 g = gimple_build_assign (make_ssa_name (ptr_type), POINTER_PLUS_EXPR,
694 new_alloca_with_rz,
695 build_int_cst (size_type_node,
696 align / BITS_PER_UNIT));
697 gsi_insert_before (iter, g, GSI_SAME_STMT);
698 tree new_alloca = gimple_assign_lhs (g);
699
700 /* Poison newly created alloca redzones:
701 __asan_alloca_poison (new_alloca, old_size). */
702 fn = builtin_decl_implicit (BUILT_IN_ASAN_ALLOCA_POISON);
703 gg = gimple_build_call (fn, 2, new_alloca, old_size);
704 gsi_insert_before (iter, gg, GSI_SAME_STMT);
705
706 /* Save new_alloca_with_rz value into last_alloca to use it during
707 allocas unpoisoning. */
708 g = gimple_build_assign (last_alloca, new_alloca_with_rz);
709 gsi_insert_before (iter, g, GSI_SAME_STMT);
710
711 /* Finally, replace old alloca ptr with NEW_ALLOCA. */
712 replace_call_with_value (iter, new_alloca);
713 }
714
715 /* Return the memory references contained in a gimple statement
716 representing a builtin call that has to do with memory access. */
717
718 static bool
719 get_mem_refs_of_builtin_call (gcall *call,
720 asan_mem_ref *src0,
721 tree *src0_len,
722 bool *src0_is_store,
723 asan_mem_ref *src1,
724 tree *src1_len,
725 bool *src1_is_store,
726 asan_mem_ref *dst,
727 tree *dst_len,
728 bool *dst_is_store,
729 bool *dest_is_deref,
730 bool *intercepted_p,
731 gimple_stmt_iterator *iter = NULL)
732 {
733 gcc_checking_assert (gimple_call_builtin_p (call, BUILT_IN_NORMAL));
734
735 tree callee = gimple_call_fndecl (call);
736 tree source0 = NULL_TREE, source1 = NULL_TREE,
737 dest = NULL_TREE, len = NULL_TREE;
738 bool is_store = true, got_reference_p = false;
739 HOST_WIDE_INT access_size = 1;
740
741 *intercepted_p = asan_intercepted_p ((DECL_FUNCTION_CODE (callee)));
742
743 switch (DECL_FUNCTION_CODE (callee))
744 {
745 /* (s, s, n) style memops. */
746 case BUILT_IN_BCMP:
747 case BUILT_IN_MEMCMP:
748 source0 = gimple_call_arg (call, 0);
749 source1 = gimple_call_arg (call, 1);
750 len = gimple_call_arg (call, 2);
751 break;
752
753 /* (src, dest, n) style memops. */
754 case BUILT_IN_BCOPY:
755 source0 = gimple_call_arg (call, 0);
756 dest = gimple_call_arg (call, 1);
757 len = gimple_call_arg (call, 2);
758 break;
759
760 /* (dest, src, n) style memops. */
761 case BUILT_IN_MEMCPY:
762 case BUILT_IN_MEMCPY_CHK:
763 case BUILT_IN_MEMMOVE:
764 case BUILT_IN_MEMMOVE_CHK:
765 case BUILT_IN_MEMPCPY:
766 case BUILT_IN_MEMPCPY_CHK:
767 dest = gimple_call_arg (call, 0);
768 source0 = gimple_call_arg (call, 1);
769 len = gimple_call_arg (call, 2);
770 break;
771
772 /* (dest, n) style memops. */
773 case BUILT_IN_BZERO:
774 dest = gimple_call_arg (call, 0);
775 len = gimple_call_arg (call, 1);
776 break;
777
778 /* (dest, x, n) style memops*/
779 case BUILT_IN_MEMSET:
780 case BUILT_IN_MEMSET_CHK:
781 dest = gimple_call_arg (call, 0);
782 len = gimple_call_arg (call, 2);
783 break;
784
785 case BUILT_IN_STRLEN:
786 source0 = gimple_call_arg (call, 0);
787 len = gimple_call_lhs (call);
788 break;
789
790 case BUILT_IN_STACK_RESTORE:
791 handle_builtin_stack_restore (call, iter);
792 break;
793
794 CASE_BUILT_IN_ALLOCA:
795 handle_builtin_alloca (call, iter);
796 break;
797 /* And now the __atomic* and __sync builtins.
798 These are handled differently from the classical memory
799 access builtins above. */
800
801 case BUILT_IN_ATOMIC_LOAD_1:
802 is_store = false;
803 /* FALLTHRU */
804 case BUILT_IN_SYNC_FETCH_AND_ADD_1:
805 case BUILT_IN_SYNC_FETCH_AND_SUB_1:
806 case BUILT_IN_SYNC_FETCH_AND_OR_1:
807 case BUILT_IN_SYNC_FETCH_AND_AND_1:
808 case BUILT_IN_SYNC_FETCH_AND_XOR_1:
809 case BUILT_IN_SYNC_FETCH_AND_NAND_1:
810 case BUILT_IN_SYNC_ADD_AND_FETCH_1:
811 case BUILT_IN_SYNC_SUB_AND_FETCH_1:
812 case BUILT_IN_SYNC_OR_AND_FETCH_1:
813 case BUILT_IN_SYNC_AND_AND_FETCH_1:
814 case BUILT_IN_SYNC_XOR_AND_FETCH_1:
815 case BUILT_IN_SYNC_NAND_AND_FETCH_1:
816 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1:
817 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1:
818 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_1:
819 case BUILT_IN_SYNC_LOCK_RELEASE_1:
820 case BUILT_IN_ATOMIC_EXCHANGE_1:
821 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1:
822 case BUILT_IN_ATOMIC_STORE_1:
823 case BUILT_IN_ATOMIC_ADD_FETCH_1:
824 case BUILT_IN_ATOMIC_SUB_FETCH_1:
825 case BUILT_IN_ATOMIC_AND_FETCH_1:
826 case BUILT_IN_ATOMIC_NAND_FETCH_1:
827 case BUILT_IN_ATOMIC_XOR_FETCH_1:
828 case BUILT_IN_ATOMIC_OR_FETCH_1:
829 case BUILT_IN_ATOMIC_FETCH_ADD_1:
830 case BUILT_IN_ATOMIC_FETCH_SUB_1:
831 case BUILT_IN_ATOMIC_FETCH_AND_1:
832 case BUILT_IN_ATOMIC_FETCH_NAND_1:
833 case BUILT_IN_ATOMIC_FETCH_XOR_1:
834 case BUILT_IN_ATOMIC_FETCH_OR_1:
835 access_size = 1;
836 goto do_atomic;
837
838 case BUILT_IN_ATOMIC_LOAD_2:
839 is_store = false;
840 /* FALLTHRU */
841 case BUILT_IN_SYNC_FETCH_AND_ADD_2:
842 case BUILT_IN_SYNC_FETCH_AND_SUB_2:
843 case BUILT_IN_SYNC_FETCH_AND_OR_2:
844 case BUILT_IN_SYNC_FETCH_AND_AND_2:
845 case BUILT_IN_SYNC_FETCH_AND_XOR_2:
846 case BUILT_IN_SYNC_FETCH_AND_NAND_2:
847 case BUILT_IN_SYNC_ADD_AND_FETCH_2:
848 case BUILT_IN_SYNC_SUB_AND_FETCH_2:
849 case BUILT_IN_SYNC_OR_AND_FETCH_2:
850 case BUILT_IN_SYNC_AND_AND_FETCH_2:
851 case BUILT_IN_SYNC_XOR_AND_FETCH_2:
852 case BUILT_IN_SYNC_NAND_AND_FETCH_2:
853 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_2:
854 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_2:
855 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_2:
856 case BUILT_IN_SYNC_LOCK_RELEASE_2:
857 case BUILT_IN_ATOMIC_EXCHANGE_2:
858 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_2:
859 case BUILT_IN_ATOMIC_STORE_2:
860 case BUILT_IN_ATOMIC_ADD_FETCH_2:
861 case BUILT_IN_ATOMIC_SUB_FETCH_2:
862 case BUILT_IN_ATOMIC_AND_FETCH_2:
863 case BUILT_IN_ATOMIC_NAND_FETCH_2:
864 case BUILT_IN_ATOMIC_XOR_FETCH_2:
865 case BUILT_IN_ATOMIC_OR_FETCH_2:
866 case BUILT_IN_ATOMIC_FETCH_ADD_2:
867 case BUILT_IN_ATOMIC_FETCH_SUB_2:
868 case BUILT_IN_ATOMIC_FETCH_AND_2:
869 case BUILT_IN_ATOMIC_FETCH_NAND_2:
870 case BUILT_IN_ATOMIC_FETCH_XOR_2:
871 case BUILT_IN_ATOMIC_FETCH_OR_2:
872 access_size = 2;
873 goto do_atomic;
874
875 case BUILT_IN_ATOMIC_LOAD_4:
876 is_store = false;
877 /* FALLTHRU */
878 case BUILT_IN_SYNC_FETCH_AND_ADD_4:
879 case BUILT_IN_SYNC_FETCH_AND_SUB_4:
880 case BUILT_IN_SYNC_FETCH_AND_OR_4:
881 case BUILT_IN_SYNC_FETCH_AND_AND_4:
882 case BUILT_IN_SYNC_FETCH_AND_XOR_4:
883 case BUILT_IN_SYNC_FETCH_AND_NAND_4:
884 case BUILT_IN_SYNC_ADD_AND_FETCH_4:
885 case BUILT_IN_SYNC_SUB_AND_FETCH_4:
886 case BUILT_IN_SYNC_OR_AND_FETCH_4:
887 case BUILT_IN_SYNC_AND_AND_FETCH_4:
888 case BUILT_IN_SYNC_XOR_AND_FETCH_4:
889 case BUILT_IN_SYNC_NAND_AND_FETCH_4:
890 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_4:
891 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_4:
892 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_4:
893 case BUILT_IN_SYNC_LOCK_RELEASE_4:
894 case BUILT_IN_ATOMIC_EXCHANGE_4:
895 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_4:
896 case BUILT_IN_ATOMIC_STORE_4:
897 case BUILT_IN_ATOMIC_ADD_FETCH_4:
898 case BUILT_IN_ATOMIC_SUB_FETCH_4:
899 case BUILT_IN_ATOMIC_AND_FETCH_4:
900 case BUILT_IN_ATOMIC_NAND_FETCH_4:
901 case BUILT_IN_ATOMIC_XOR_FETCH_4:
902 case BUILT_IN_ATOMIC_OR_FETCH_4:
903 case BUILT_IN_ATOMIC_FETCH_ADD_4:
904 case BUILT_IN_ATOMIC_FETCH_SUB_4:
905 case BUILT_IN_ATOMIC_FETCH_AND_4:
906 case BUILT_IN_ATOMIC_FETCH_NAND_4:
907 case BUILT_IN_ATOMIC_FETCH_XOR_4:
908 case BUILT_IN_ATOMIC_FETCH_OR_4:
909 access_size = 4;
910 goto do_atomic;
911
912 case BUILT_IN_ATOMIC_LOAD_8:
913 is_store = false;
914 /* FALLTHRU */
915 case BUILT_IN_SYNC_FETCH_AND_ADD_8:
916 case BUILT_IN_SYNC_FETCH_AND_SUB_8:
917 case BUILT_IN_SYNC_FETCH_AND_OR_8:
918 case BUILT_IN_SYNC_FETCH_AND_AND_8:
919 case BUILT_IN_SYNC_FETCH_AND_XOR_8:
920 case BUILT_IN_SYNC_FETCH_AND_NAND_8:
921 case BUILT_IN_SYNC_ADD_AND_FETCH_8:
922 case BUILT_IN_SYNC_SUB_AND_FETCH_8:
923 case BUILT_IN_SYNC_OR_AND_FETCH_8:
924 case BUILT_IN_SYNC_AND_AND_FETCH_8:
925 case BUILT_IN_SYNC_XOR_AND_FETCH_8:
926 case BUILT_IN_SYNC_NAND_AND_FETCH_8:
927 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_8:
928 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_8:
929 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_8:
930 case BUILT_IN_SYNC_LOCK_RELEASE_8:
931 case BUILT_IN_ATOMIC_EXCHANGE_8:
932 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_8:
933 case BUILT_IN_ATOMIC_STORE_8:
934 case BUILT_IN_ATOMIC_ADD_FETCH_8:
935 case BUILT_IN_ATOMIC_SUB_FETCH_8:
936 case BUILT_IN_ATOMIC_AND_FETCH_8:
937 case BUILT_IN_ATOMIC_NAND_FETCH_8:
938 case BUILT_IN_ATOMIC_XOR_FETCH_8:
939 case BUILT_IN_ATOMIC_OR_FETCH_8:
940 case BUILT_IN_ATOMIC_FETCH_ADD_8:
941 case BUILT_IN_ATOMIC_FETCH_SUB_8:
942 case BUILT_IN_ATOMIC_FETCH_AND_8:
943 case BUILT_IN_ATOMIC_FETCH_NAND_8:
944 case BUILT_IN_ATOMIC_FETCH_XOR_8:
945 case BUILT_IN_ATOMIC_FETCH_OR_8:
946 access_size = 8;
947 goto do_atomic;
948
949 case BUILT_IN_ATOMIC_LOAD_16:
950 is_store = false;
951 /* FALLTHRU */
952 case BUILT_IN_SYNC_FETCH_AND_ADD_16:
953 case BUILT_IN_SYNC_FETCH_AND_SUB_16:
954 case BUILT_IN_SYNC_FETCH_AND_OR_16:
955 case BUILT_IN_SYNC_FETCH_AND_AND_16:
956 case BUILT_IN_SYNC_FETCH_AND_XOR_16:
957 case BUILT_IN_SYNC_FETCH_AND_NAND_16:
958 case BUILT_IN_SYNC_ADD_AND_FETCH_16:
959 case BUILT_IN_SYNC_SUB_AND_FETCH_16:
960 case BUILT_IN_SYNC_OR_AND_FETCH_16:
961 case BUILT_IN_SYNC_AND_AND_FETCH_16:
962 case BUILT_IN_SYNC_XOR_AND_FETCH_16:
963 case BUILT_IN_SYNC_NAND_AND_FETCH_16:
964 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_16:
965 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_16:
966 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_16:
967 case BUILT_IN_SYNC_LOCK_RELEASE_16:
968 case BUILT_IN_ATOMIC_EXCHANGE_16:
969 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_16:
970 case BUILT_IN_ATOMIC_STORE_16:
971 case BUILT_IN_ATOMIC_ADD_FETCH_16:
972 case BUILT_IN_ATOMIC_SUB_FETCH_16:
973 case BUILT_IN_ATOMIC_AND_FETCH_16:
974 case BUILT_IN_ATOMIC_NAND_FETCH_16:
975 case BUILT_IN_ATOMIC_XOR_FETCH_16:
976 case BUILT_IN_ATOMIC_OR_FETCH_16:
977 case BUILT_IN_ATOMIC_FETCH_ADD_16:
978 case BUILT_IN_ATOMIC_FETCH_SUB_16:
979 case BUILT_IN_ATOMIC_FETCH_AND_16:
980 case BUILT_IN_ATOMIC_FETCH_NAND_16:
981 case BUILT_IN_ATOMIC_FETCH_XOR_16:
982 case BUILT_IN_ATOMIC_FETCH_OR_16:
983 access_size = 16;
984 /* FALLTHRU */
985 do_atomic:
986 {
987 dest = gimple_call_arg (call, 0);
988 /* DEST represents the address of a memory location.
989 instrument_derefs wants the memory location, so lets
990 dereference the address DEST before handing it to
991 instrument_derefs. */
992 tree type = build_nonstandard_integer_type (access_size
993 * BITS_PER_UNIT, 1);
994 dest = build2 (MEM_REF, type, dest,
995 build_int_cst (build_pointer_type (char_type_node), 0));
996 break;
997 }
998
999 default:
1000 /* The other builtins memory access are not instrumented in this
1001 function because they either don't have any length parameter,
1002 or their length parameter is just a limit. */
1003 break;
1004 }
1005
1006 if (len != NULL_TREE)
1007 {
1008 if (source0 != NULL_TREE)
1009 {
1010 src0->start = source0;
1011 src0->access_size = access_size;
1012 *src0_len = len;
1013 *src0_is_store = false;
1014 }
1015
1016 if (source1 != NULL_TREE)
1017 {
1018 src1->start = source1;
1019 src1->access_size = access_size;
1020 *src1_len = len;
1021 *src1_is_store = false;
1022 }
1023
1024 if (dest != NULL_TREE)
1025 {
1026 dst->start = dest;
1027 dst->access_size = access_size;
1028 *dst_len = len;
1029 *dst_is_store = true;
1030 }
1031
1032 got_reference_p = true;
1033 }
1034 else if (dest)
1035 {
1036 dst->start = dest;
1037 dst->access_size = access_size;
1038 *dst_len = NULL_TREE;
1039 *dst_is_store = is_store;
1040 *dest_is_deref = true;
1041 got_reference_p = true;
1042 }
1043
1044 return got_reference_p;
1045 }
1046
1047 /* Return true iff a given gimple statement has been instrumented.
1048 Note that the statement is "defined" by the memory references it
1049 contains. */
1050
1051 static bool
1052 has_stmt_been_instrumented_p (gimple *stmt)
1053 {
1054 if (gimple_assign_single_p (stmt))
1055 {
1056 bool r_is_store;
1057 asan_mem_ref r;
1058 asan_mem_ref_init (&r, NULL, 1);
1059
1060 if (get_mem_ref_of_assignment (as_a <gassign *> (stmt), &r,
1061 &r_is_store))
1062 return has_mem_ref_been_instrumented (&r);
1063 }
1064 else if (gimple_call_builtin_p (stmt, BUILT_IN_NORMAL))
1065 {
1066 asan_mem_ref src0, src1, dest;
1067 asan_mem_ref_init (&src0, NULL, 1);
1068 asan_mem_ref_init (&src1, NULL, 1);
1069 asan_mem_ref_init (&dest, NULL, 1);
1070
1071 tree src0_len = NULL_TREE, src1_len = NULL_TREE, dest_len = NULL_TREE;
1072 bool src0_is_store = false, src1_is_store = false,
1073 dest_is_store = false, dest_is_deref = false, intercepted_p = true;
1074 if (get_mem_refs_of_builtin_call (as_a <gcall *> (stmt),
1075 &src0, &src0_len, &src0_is_store,
1076 &src1, &src1_len, &src1_is_store,
1077 &dest, &dest_len, &dest_is_store,
1078 &dest_is_deref, &intercepted_p))
1079 {
1080 if (src0.start != NULL_TREE
1081 && !has_mem_ref_been_instrumented (&src0, src0_len))
1082 return false;
1083
1084 if (src1.start != NULL_TREE
1085 && !has_mem_ref_been_instrumented (&src1, src1_len))
1086 return false;
1087
1088 if (dest.start != NULL_TREE
1089 && !has_mem_ref_been_instrumented (&dest, dest_len))
1090 return false;
1091
1092 return true;
1093 }
1094 }
1095 else if (is_gimple_call (stmt) && gimple_store_p (stmt))
1096 {
1097 asan_mem_ref r;
1098 asan_mem_ref_init (&r, NULL, 1);
1099
1100 r.start = gimple_call_lhs (stmt);
1101 r.access_size = int_size_in_bytes (TREE_TYPE (r.start));
1102 return has_mem_ref_been_instrumented (&r);
1103 }
1104
1105 return false;
1106 }
1107
1108 /* Insert a memory reference into the hash table. */
1109
1110 static void
1111 update_mem_ref_hash_table (tree ref, HOST_WIDE_INT access_size)
1112 {
1113 hash_table<asan_mem_ref_hasher> *ht = get_mem_ref_hash_table ();
1114
1115 asan_mem_ref r;
1116 asan_mem_ref_init (&r, ref, access_size);
1117
1118 asan_mem_ref **slot = ht->find_slot (&r, INSERT);
1119 if (*slot == NULL || (*slot)->access_size < access_size)
1120 *slot = asan_mem_ref_new (ref, access_size);
1121 }
1122
1123 /* Initialize shadow_ptr_types array. */
1124
1125 static void
1126 asan_init_shadow_ptr_types (void)
1127 {
1128 asan_shadow_set = new_alias_set ();
1129 tree types[3] = { signed_char_type_node, short_integer_type_node,
1130 integer_type_node };
1131
1132 for (unsigned i = 0; i < 3; i++)
1133 {
1134 shadow_ptr_types[i] = build_distinct_type_copy (types[i]);
1135 TYPE_ALIAS_SET (shadow_ptr_types[i]) = asan_shadow_set;
1136 shadow_ptr_types[i] = build_pointer_type (shadow_ptr_types[i]);
1137 }
1138
1139 initialize_sanitizer_builtins ();
1140 }
1141
1142 /* Create ADDR_EXPR of STRING_CST with the PP pretty printer text. */
1143
1144 static tree
1145 asan_pp_string (pretty_printer *pp)
1146 {
1147 const char *buf = pp_formatted_text (pp);
1148 size_t len = strlen (buf);
1149 tree ret = build_string (len + 1, buf);
1150 TREE_TYPE (ret)
1151 = build_array_type (TREE_TYPE (shadow_ptr_types[0]),
1152 build_index_type (size_int (len)));
1153 TREE_READONLY (ret) = 1;
1154 TREE_STATIC (ret) = 1;
1155 return build1 (ADDR_EXPR, shadow_ptr_types[0], ret);
1156 }
1157
1158 /* Clear shadow memory at SHADOW_MEM, LEN bytes. Can't call a library call here
1159 though. */
1160
1161 static void
1162 asan_clear_shadow (rtx shadow_mem, HOST_WIDE_INT len)
1163 {
1164 rtx_insn *insn, *insns, *jump;
1165 rtx_code_label *top_label;
1166 rtx end, addr, tmp;
1167
1168 gcc_assert ((len & 3) == 0);
1169 start_sequence ();
1170 clear_storage (shadow_mem, GEN_INT (len), BLOCK_OP_NORMAL);
1171 insns = get_insns ();
1172 end_sequence ();
1173 for (insn = insns; insn; insn = NEXT_INSN (insn))
1174 if (CALL_P (insn))
1175 break;
1176 if (insn == NULL_RTX)
1177 {
1178 emit_insn (insns);
1179 return;
1180 }
1181
1182 top_label = gen_label_rtx ();
1183 addr = copy_to_mode_reg (Pmode, XEXP (shadow_mem, 0));
1184 shadow_mem = adjust_automodify_address (shadow_mem, SImode, addr, 0);
1185 end = force_reg (Pmode, plus_constant (Pmode, addr, len));
1186 emit_label (top_label);
1187
1188 emit_move_insn (shadow_mem, const0_rtx);
1189 tmp = expand_simple_binop (Pmode, PLUS, addr, gen_int_mode (4, Pmode), addr,
1190 true, OPTAB_LIB_WIDEN);
1191 if (tmp != addr)
1192 emit_move_insn (addr, tmp);
1193 emit_cmp_and_jump_insns (addr, end, LT, NULL_RTX, Pmode, true, top_label);
1194 jump = get_last_insn ();
1195 gcc_assert (JUMP_P (jump));
1196 add_reg_br_prob_note (jump,
1197 profile_probability::guessed_always ()
1198 .apply_scale (80, 100));
1199 }
1200
1201 void
1202 asan_function_start (void)
1203 {
1204 section *fnsec = function_section (current_function_decl);
1205 switch_to_section (fnsec);
1206 ASM_OUTPUT_DEBUG_LABEL (asm_out_file, "LASANPC",
1207 current_function_funcdef_no);
1208 }
1209
1210 /* Return number of shadow bytes that are occupied by a local variable
1211 of SIZE bytes. */
1212
1213 static unsigned HOST_WIDE_INT
1214 shadow_mem_size (unsigned HOST_WIDE_INT size)
1215 {
1216 /* It must be possible to align stack variables to granularity
1217 of shadow memory. */
1218 gcc_assert (BITS_PER_UNIT
1219 * ASAN_SHADOW_GRANULARITY <= MAX_SUPPORTED_STACK_ALIGNMENT);
1220
1221 return ROUND_UP (size, ASAN_SHADOW_GRANULARITY) / ASAN_SHADOW_GRANULARITY;
1222 }
1223
1224 /* Always emit 4 bytes at a time. */
1225 #define RZ_BUFFER_SIZE 4
1226
1227 /* ASAN redzone buffer container that handles emission of shadow bytes. */
1228 class asan_redzone_buffer
1229 {
1230 public:
1231 /* Constructor. */
1232 asan_redzone_buffer (rtx shadow_mem, HOST_WIDE_INT prev_offset):
1233 m_shadow_mem (shadow_mem), m_prev_offset (prev_offset),
1234 m_original_offset (prev_offset), m_shadow_bytes (RZ_BUFFER_SIZE)
1235 {}
1236
1237 /* Emit VALUE shadow byte at a given OFFSET. */
1238 void emit_redzone_byte (HOST_WIDE_INT offset, unsigned char value);
1239
1240 /* Emit RTX emission of the content of the buffer. */
1241 void flush_redzone_payload (void);
1242
1243 private:
1244 /* Flush if the content of the buffer is full
1245 (equal to RZ_BUFFER_SIZE). */
1246 void flush_if_full (void);
1247
1248 /* Memory where we last emitted a redzone payload. */
1249 rtx m_shadow_mem;
1250
1251 /* Relative offset where we last emitted a redzone payload. */
1252 HOST_WIDE_INT m_prev_offset;
1253
1254 /* Relative original offset. Used for checking only. */
1255 HOST_WIDE_INT m_original_offset;
1256
1257 public:
1258 /* Buffer with redzone payload. */
1259 auto_vec<unsigned char> m_shadow_bytes;
1260 };
1261
1262 /* Emit VALUE shadow byte at a given OFFSET. */
1263
1264 void
1265 asan_redzone_buffer::emit_redzone_byte (HOST_WIDE_INT offset,
1266 unsigned char value)
1267 {
1268 gcc_assert ((offset & (ASAN_SHADOW_GRANULARITY - 1)) == 0);
1269 gcc_assert (offset >= m_prev_offset);
1270
1271 HOST_WIDE_INT off
1272 = m_prev_offset + ASAN_SHADOW_GRANULARITY * m_shadow_bytes.length ();
1273 if (off == offset)
1274 {
1275 /* Consecutive shadow memory byte. */
1276 m_shadow_bytes.safe_push (value);
1277 flush_if_full ();
1278 }
1279 else
1280 {
1281 if (!m_shadow_bytes.is_empty ())
1282 flush_redzone_payload ();
1283
1284 /* Maybe start earlier in order to use aligned store. */
1285 HOST_WIDE_INT align = (offset - m_prev_offset) % ASAN_RED_ZONE_SIZE;
1286 if (align)
1287 {
1288 offset -= align;
1289 for (unsigned i = 0; i < align / BITS_PER_UNIT; i++)
1290 m_shadow_bytes.safe_push (0);
1291 }
1292
1293 /* Adjust m_prev_offset and m_shadow_mem. */
1294 HOST_WIDE_INT diff = offset - m_prev_offset;
1295 m_shadow_mem = adjust_address (m_shadow_mem, VOIDmode,
1296 diff >> ASAN_SHADOW_SHIFT);
1297 m_prev_offset = offset;
1298 m_shadow_bytes.safe_push (value);
1299 flush_if_full ();
1300 }
1301 }
1302
1303 /* Emit RTX emission of the content of the buffer. */
1304
1305 void
1306 asan_redzone_buffer::flush_redzone_payload (void)
1307 {
1308 gcc_assert (WORDS_BIG_ENDIAN == BYTES_BIG_ENDIAN);
1309
1310 if (m_shadow_bytes.is_empty ())
1311 return;
1312
1313 /* Be sure we always emit to an aligned address. */
1314 gcc_assert (((m_prev_offset - m_original_offset)
1315 & (ASAN_RED_ZONE_SIZE - 1)) == 0);
1316
1317 /* Fill it to RZ_BUFFER_SIZE bytes with zeros if needed. */
1318 unsigned l = m_shadow_bytes.length ();
1319 for (unsigned i = 0; i <= RZ_BUFFER_SIZE - l; i++)
1320 m_shadow_bytes.safe_push (0);
1321
1322 if (dump_file && (dump_flags & TDF_DETAILS))
1323 fprintf (dump_file,
1324 "Flushing rzbuffer at offset %" PRId64 " with: ", m_prev_offset);
1325
1326 unsigned HOST_WIDE_INT val = 0;
1327 for (unsigned i = 0; i < RZ_BUFFER_SIZE; i++)
1328 {
1329 unsigned char v
1330 = m_shadow_bytes[BYTES_BIG_ENDIAN ? RZ_BUFFER_SIZE - i - 1 : i];
1331 val |= (unsigned HOST_WIDE_INT)v << (BITS_PER_UNIT * i);
1332 if (dump_file && (dump_flags & TDF_DETAILS))
1333 fprintf (dump_file, "%02x ", v);
1334 }
1335
1336 if (dump_file && (dump_flags & TDF_DETAILS))
1337 fprintf (dump_file, "\n");
1338
1339 rtx c = gen_int_mode (val, SImode);
1340 m_shadow_mem = adjust_address (m_shadow_mem, SImode, 0);
1341 emit_move_insn (m_shadow_mem, c);
1342 m_shadow_bytes.truncate (0);
1343 }
1344
1345 /* Flush if the content of the buffer is full
1346 (equal to RZ_BUFFER_SIZE). */
1347
1348 void
1349 asan_redzone_buffer::flush_if_full (void)
1350 {
1351 if (m_shadow_bytes.length () == RZ_BUFFER_SIZE)
1352 flush_redzone_payload ();
1353 }
1354
1355 /* Insert code to protect stack vars. The prologue sequence should be emitted
1356 directly, epilogue sequence returned. BASE is the register holding the
1357 stack base, against which OFFSETS array offsets are relative to, OFFSETS
1358 array contains pairs of offsets in reverse order, always the end offset
1359 of some gap that needs protection followed by starting offset,
1360 and DECLS is an array of representative decls for each var partition.
1361 LENGTH is the length of the OFFSETS array, DECLS array is LENGTH / 2 - 1
1362 elements long (OFFSETS include gap before the first variable as well
1363 as gaps after each stack variable). PBASE is, if non-NULL, some pseudo
1364 register which stack vars DECL_RTLs are based on. Either BASE should be
1365 assigned to PBASE, when not doing use after return protection, or
1366 corresponding address based on __asan_stack_malloc* return value. */
1367
1368 rtx_insn *
1369 asan_emit_stack_protection (rtx base, rtx pbase, unsigned int alignb,
1370 HOST_WIDE_INT *offsets, tree *decls, int length)
1371 {
1372 rtx shadow_base, shadow_mem, ret, mem, orig_base;
1373 rtx_code_label *lab;
1374 rtx_insn *insns;
1375 char buf[32];
1376 HOST_WIDE_INT base_offset = offsets[length - 1];
1377 HOST_WIDE_INT base_align_bias = 0, offset, prev_offset;
1378 HOST_WIDE_INT asan_frame_size = offsets[0] - base_offset;
1379 HOST_WIDE_INT last_offset, last_size, last_size_aligned;
1380 int l;
1381 unsigned char cur_shadow_byte = ASAN_STACK_MAGIC_LEFT;
1382 tree str_cst, decl, id;
1383 int use_after_return_class = -1;
1384
1385 if (shadow_ptr_types[0] == NULL_TREE)
1386 asan_init_shadow_ptr_types ();
1387
1388 expanded_location cfun_xloc
1389 = expand_location (DECL_SOURCE_LOCATION (current_function_decl));
1390
1391 /* First of all, prepare the description string. */
1392 pretty_printer asan_pp;
1393
1394 pp_decimal_int (&asan_pp, length / 2 - 1);
1395 pp_space (&asan_pp);
1396 for (l = length - 2; l; l -= 2)
1397 {
1398 tree decl = decls[l / 2 - 1];
1399 pp_wide_integer (&asan_pp, offsets[l] - base_offset);
1400 pp_space (&asan_pp);
1401 pp_wide_integer (&asan_pp, offsets[l - 1] - offsets[l]);
1402 pp_space (&asan_pp);
1403
1404 expanded_location xloc
1405 = expand_location (DECL_SOURCE_LOCATION (decl));
1406 char location[32];
1407
1408 if (xloc.file == cfun_xloc.file)
1409 sprintf (location, ":%d", xloc.line);
1410 else
1411 location[0] = '\0';
1412
1413 if (DECL_P (decl) && DECL_NAME (decl))
1414 {
1415 unsigned idlen
1416 = IDENTIFIER_LENGTH (DECL_NAME (decl)) + strlen (location);
1417 pp_decimal_int (&asan_pp, idlen);
1418 pp_space (&asan_pp);
1419 pp_tree_identifier (&asan_pp, DECL_NAME (decl));
1420 pp_string (&asan_pp, location);
1421 }
1422 else
1423 pp_string (&asan_pp, "9 <unknown>");
1424
1425 if (l > 2)
1426 pp_space (&asan_pp);
1427 }
1428 str_cst = asan_pp_string (&asan_pp);
1429
1430 /* Emit the prologue sequence. */
1431 if (asan_frame_size > 32 && asan_frame_size <= 65536 && pbase
1432 && param_asan_use_after_return)
1433 {
1434 use_after_return_class = floor_log2 (asan_frame_size - 1) - 5;
1435 /* __asan_stack_malloc_N guarantees alignment
1436 N < 6 ? (64 << N) : 4096 bytes. */
1437 if (alignb > (use_after_return_class < 6
1438 ? (64U << use_after_return_class) : 4096U))
1439 use_after_return_class = -1;
1440 else if (alignb > ASAN_RED_ZONE_SIZE && (asan_frame_size & (alignb - 1)))
1441 base_align_bias = ((asan_frame_size + alignb - 1)
1442 & ~(alignb - HOST_WIDE_INT_1)) - asan_frame_size;
1443 }
1444
1445 /* Align base if target is STRICT_ALIGNMENT. */
1446 if (STRICT_ALIGNMENT)
1447 {
1448 const HOST_WIDE_INT align
1449 = (GET_MODE_ALIGNMENT (SImode) / BITS_PER_UNIT) << ASAN_SHADOW_SHIFT;
1450 base = expand_binop (Pmode, and_optab, base, gen_int_mode (-align, Pmode),
1451 NULL_RTX, 1, OPTAB_DIRECT);
1452 }
1453
1454 if (use_after_return_class == -1 && pbase)
1455 emit_move_insn (pbase, base);
1456
1457 base = expand_binop (Pmode, add_optab, base,
1458 gen_int_mode (base_offset - base_align_bias, Pmode),
1459 NULL_RTX, 1, OPTAB_DIRECT);
1460 orig_base = NULL_RTX;
1461 if (use_after_return_class != -1)
1462 {
1463 if (asan_detect_stack_use_after_return == NULL_TREE)
1464 {
1465 id = get_identifier ("__asan_option_detect_stack_use_after_return");
1466 decl = build_decl (BUILTINS_LOCATION, VAR_DECL, id,
1467 integer_type_node);
1468 SET_DECL_ASSEMBLER_NAME (decl, id);
1469 TREE_ADDRESSABLE (decl) = 1;
1470 DECL_ARTIFICIAL (decl) = 1;
1471 DECL_IGNORED_P (decl) = 1;
1472 DECL_EXTERNAL (decl) = 1;
1473 TREE_STATIC (decl) = 1;
1474 TREE_PUBLIC (decl) = 1;
1475 TREE_USED (decl) = 1;
1476 asan_detect_stack_use_after_return = decl;
1477 }
1478 orig_base = gen_reg_rtx (Pmode);
1479 emit_move_insn (orig_base, base);
1480 ret = expand_normal (asan_detect_stack_use_after_return);
1481 lab = gen_label_rtx ();
1482 emit_cmp_and_jump_insns (ret, const0_rtx, EQ, NULL_RTX,
1483 VOIDmode, 0, lab,
1484 profile_probability::very_likely ());
1485 snprintf (buf, sizeof buf, "__asan_stack_malloc_%d",
1486 use_after_return_class);
1487 ret = init_one_libfunc (buf);
1488 ret = emit_library_call_value (ret, NULL_RTX, LCT_NORMAL, ptr_mode,
1489 GEN_INT (asan_frame_size
1490 + base_align_bias),
1491 TYPE_MODE (pointer_sized_int_node));
1492 /* __asan_stack_malloc_[n] returns a pointer to fake stack if succeeded
1493 and NULL otherwise. Check RET value is NULL here and jump over the
1494 BASE reassignment in this case. Otherwise, reassign BASE to RET. */
1495 emit_cmp_and_jump_insns (ret, const0_rtx, EQ, NULL_RTX,
1496 VOIDmode, 0, lab,
1497 profile_probability:: very_unlikely ());
1498 ret = convert_memory_address (Pmode, ret);
1499 emit_move_insn (base, ret);
1500 emit_label (lab);
1501 emit_move_insn (pbase, expand_binop (Pmode, add_optab, base,
1502 gen_int_mode (base_align_bias
1503 - base_offset, Pmode),
1504 NULL_RTX, 1, OPTAB_DIRECT));
1505 }
1506 mem = gen_rtx_MEM (ptr_mode, base);
1507 mem = adjust_address (mem, VOIDmode, base_align_bias);
1508 emit_move_insn (mem, gen_int_mode (ASAN_STACK_FRAME_MAGIC, ptr_mode));
1509 mem = adjust_address (mem, VOIDmode, GET_MODE_SIZE (ptr_mode));
1510 emit_move_insn (mem, expand_normal (str_cst));
1511 mem = adjust_address (mem, VOIDmode, GET_MODE_SIZE (ptr_mode));
1512 ASM_GENERATE_INTERNAL_LABEL (buf, "LASANPC", current_function_funcdef_no);
1513 id = get_identifier (buf);
1514 decl = build_decl (DECL_SOURCE_LOCATION (current_function_decl),
1515 VAR_DECL, id, char_type_node);
1516 SET_DECL_ASSEMBLER_NAME (decl, id);
1517 TREE_ADDRESSABLE (decl) = 1;
1518 TREE_READONLY (decl) = 1;
1519 DECL_ARTIFICIAL (decl) = 1;
1520 DECL_IGNORED_P (decl) = 1;
1521 TREE_STATIC (decl) = 1;
1522 TREE_PUBLIC (decl) = 0;
1523 TREE_USED (decl) = 1;
1524 DECL_INITIAL (decl) = decl;
1525 TREE_ASM_WRITTEN (decl) = 1;
1526 TREE_ASM_WRITTEN (id) = 1;
1527 emit_move_insn (mem, expand_normal (build_fold_addr_expr (decl)));
1528 shadow_base = expand_binop (Pmode, lshr_optab, base,
1529 gen_int_shift_amount (Pmode, ASAN_SHADOW_SHIFT),
1530 NULL_RTX, 1, OPTAB_DIRECT);
1531 shadow_base
1532 = plus_constant (Pmode, shadow_base,
1533 asan_shadow_offset ()
1534 + (base_align_bias >> ASAN_SHADOW_SHIFT));
1535 gcc_assert (asan_shadow_set != -1
1536 && (ASAN_RED_ZONE_SIZE >> ASAN_SHADOW_SHIFT) == 4);
1537 shadow_mem = gen_rtx_MEM (SImode, shadow_base);
1538 set_mem_alias_set (shadow_mem, asan_shadow_set);
1539 if (STRICT_ALIGNMENT)
1540 set_mem_align (shadow_mem, (GET_MODE_ALIGNMENT (SImode)));
1541 prev_offset = base_offset;
1542
1543 asan_redzone_buffer rz_buffer (shadow_mem, prev_offset);
1544 for (l = length; l; l -= 2)
1545 {
1546 if (l == 2)
1547 cur_shadow_byte = ASAN_STACK_MAGIC_RIGHT;
1548 offset = offsets[l - 1];
1549
1550 bool extra_byte = (offset - base_offset) & (ASAN_SHADOW_GRANULARITY - 1);
1551 /* If a red-zone is not aligned to ASAN_SHADOW_GRANULARITY then
1552 the previous stack variable has size % ASAN_SHADOW_GRANULARITY != 0.
1553 In that case we have to emit one extra byte that will describe
1554 how many bytes (our of ASAN_SHADOW_GRANULARITY) can be accessed. */
1555 if (extra_byte)
1556 {
1557 HOST_WIDE_INT aoff
1558 = base_offset + ((offset - base_offset)
1559 & ~(ASAN_SHADOW_GRANULARITY - HOST_WIDE_INT_1));
1560 rz_buffer.emit_redzone_byte (aoff, offset - aoff);
1561 offset = aoff + ASAN_SHADOW_GRANULARITY;
1562 }
1563
1564 /* Calculate size of red zone payload. */
1565 while (offset < offsets[l - 2])
1566 {
1567 rz_buffer.emit_redzone_byte (offset, cur_shadow_byte);
1568 offset += ASAN_SHADOW_GRANULARITY;
1569 }
1570
1571 cur_shadow_byte = ASAN_STACK_MAGIC_MIDDLE;
1572 }
1573
1574 /* As the automatic variables are aligned to
1575 ASAN_RED_ZONE_SIZE / ASAN_SHADOW_GRANULARITY, the buffer should be
1576 flushed here. */
1577 gcc_assert (rz_buffer.m_shadow_bytes.is_empty ());
1578
1579 do_pending_stack_adjust ();
1580
1581 /* Construct epilogue sequence. */
1582 start_sequence ();
1583
1584 lab = NULL;
1585 if (use_after_return_class != -1)
1586 {
1587 rtx_code_label *lab2 = gen_label_rtx ();
1588 char c = (char) ASAN_STACK_MAGIC_USE_AFTER_RET;
1589 emit_cmp_and_jump_insns (orig_base, base, EQ, NULL_RTX,
1590 VOIDmode, 0, lab2,
1591 profile_probability::very_likely ());
1592 shadow_mem = gen_rtx_MEM (BLKmode, shadow_base);
1593 set_mem_alias_set (shadow_mem, asan_shadow_set);
1594 mem = gen_rtx_MEM (ptr_mode, base);
1595 mem = adjust_address (mem, VOIDmode, base_align_bias);
1596 emit_move_insn (mem, gen_int_mode (ASAN_STACK_RETIRED_MAGIC, ptr_mode));
1597 unsigned HOST_WIDE_INT sz = asan_frame_size >> ASAN_SHADOW_SHIFT;
1598 if (use_after_return_class < 5
1599 && can_store_by_pieces (sz, builtin_memset_read_str, &c,
1600 BITS_PER_UNIT, true))
1601 store_by_pieces (shadow_mem, sz, builtin_memset_read_str, &c,
1602 BITS_PER_UNIT, true, RETURN_BEGIN);
1603 else if (use_after_return_class >= 5
1604 || !set_storage_via_setmem (shadow_mem,
1605 GEN_INT (sz),
1606 gen_int_mode (c, QImode),
1607 BITS_PER_UNIT, BITS_PER_UNIT,
1608 -1, sz, sz, sz))
1609 {
1610 snprintf (buf, sizeof buf, "__asan_stack_free_%d",
1611 use_after_return_class);
1612 ret = init_one_libfunc (buf);
1613 rtx addr = convert_memory_address (ptr_mode, base);
1614 rtx orig_addr = convert_memory_address (ptr_mode, orig_base);
1615 emit_library_call (ret, LCT_NORMAL, ptr_mode, addr, ptr_mode,
1616 GEN_INT (asan_frame_size + base_align_bias),
1617 TYPE_MODE (pointer_sized_int_node),
1618 orig_addr, ptr_mode);
1619 }
1620 lab = gen_label_rtx ();
1621 emit_jump (lab);
1622 emit_label (lab2);
1623 }
1624
1625 shadow_mem = gen_rtx_MEM (BLKmode, shadow_base);
1626 set_mem_alias_set (shadow_mem, asan_shadow_set);
1627
1628 if (STRICT_ALIGNMENT)
1629 set_mem_align (shadow_mem, (GET_MODE_ALIGNMENT (SImode)));
1630
1631 prev_offset = base_offset;
1632 last_offset = base_offset;
1633 last_size = 0;
1634 last_size_aligned = 0;
1635 for (l = length; l; l -= 2)
1636 {
1637 offset = base_offset + ((offsets[l - 1] - base_offset)
1638 & ~(ASAN_RED_ZONE_SIZE - HOST_WIDE_INT_1));
1639 if (last_offset + last_size_aligned < offset)
1640 {
1641 shadow_mem = adjust_address (shadow_mem, VOIDmode,
1642 (last_offset - prev_offset)
1643 >> ASAN_SHADOW_SHIFT);
1644 prev_offset = last_offset;
1645 asan_clear_shadow (shadow_mem, last_size_aligned >> ASAN_SHADOW_SHIFT);
1646 last_offset = offset;
1647 last_size = 0;
1648 }
1649 else
1650 last_size = offset - last_offset;
1651 last_size += base_offset + ((offsets[l - 2] - base_offset)
1652 & ~(ASAN_MIN_RED_ZONE_SIZE - HOST_WIDE_INT_1))
1653 - offset;
1654
1655 /* Unpoison shadow memory that corresponds to a variable that is
1656 is subject of use-after-return sanitization. */
1657 if (l > 2)
1658 {
1659 decl = decls[l / 2 - 2];
1660 if (asan_handled_variables != NULL
1661 && asan_handled_variables->contains (decl))
1662 {
1663 HOST_WIDE_INT size = offsets[l - 3] - offsets[l - 2];
1664 if (dump_file && (dump_flags & TDF_DETAILS))
1665 {
1666 const char *n = (DECL_NAME (decl)
1667 ? IDENTIFIER_POINTER (DECL_NAME (decl))
1668 : "<unknown>");
1669 fprintf (dump_file, "Unpoisoning shadow stack for variable: "
1670 "%s (%" PRId64 " B)\n", n, size);
1671 }
1672
1673 last_size += size & ~(ASAN_MIN_RED_ZONE_SIZE - HOST_WIDE_INT_1);
1674 }
1675 }
1676 last_size_aligned
1677 = ((last_size + (ASAN_RED_ZONE_SIZE - HOST_WIDE_INT_1))
1678 & ~(ASAN_RED_ZONE_SIZE - HOST_WIDE_INT_1));
1679 }
1680 if (last_size_aligned)
1681 {
1682 shadow_mem = adjust_address (shadow_mem, VOIDmode,
1683 (last_offset - prev_offset)
1684 >> ASAN_SHADOW_SHIFT);
1685 asan_clear_shadow (shadow_mem, last_size_aligned >> ASAN_SHADOW_SHIFT);
1686 }
1687
1688 /* Clean-up set with instrumented stack variables. */
1689 delete asan_handled_variables;
1690 asan_handled_variables = NULL;
1691 delete asan_used_labels;
1692 asan_used_labels = NULL;
1693
1694 do_pending_stack_adjust ();
1695 if (lab)
1696 emit_label (lab);
1697
1698 insns = get_insns ();
1699 end_sequence ();
1700 return insns;
1701 }
1702
1703 /* Emit __asan_allocas_unpoison (top, bot) call. The BASE parameter corresponds
1704 to BOT argument, for TOP virtual_stack_dynamic_rtx is used. NEW_SEQUENCE
1705 indicates whether we're emitting new instructions sequence or not. */
1706
1707 rtx_insn *
1708 asan_emit_allocas_unpoison (rtx top, rtx bot, rtx_insn *before)
1709 {
1710 if (before)
1711 push_to_sequence (before);
1712 else
1713 start_sequence ();
1714 rtx ret = init_one_libfunc ("__asan_allocas_unpoison");
1715 top = convert_memory_address (ptr_mode, top);
1716 bot = convert_memory_address (ptr_mode, bot);
1717 emit_library_call (ret, LCT_NORMAL, ptr_mode,
1718 top, ptr_mode, bot, ptr_mode);
1719
1720 do_pending_stack_adjust ();
1721 rtx_insn *insns = get_insns ();
1722 end_sequence ();
1723 return insns;
1724 }
1725
1726 /* Return true if DECL, a global var, might be overridden and needs
1727 therefore a local alias. */
1728
1729 static bool
1730 asan_needs_local_alias (tree decl)
1731 {
1732 return DECL_WEAK (decl) || !targetm.binds_local_p (decl);
1733 }
1734
1735 /* Return true if DECL, a global var, is an artificial ODR indicator symbol
1736 therefore doesn't need protection. */
1737
1738 static bool
1739 is_odr_indicator (tree decl)
1740 {
1741 return (DECL_ARTIFICIAL (decl)
1742 && lookup_attribute ("asan odr indicator", DECL_ATTRIBUTES (decl)));
1743 }
1744
1745 /* Return true if DECL is a VAR_DECL that should be protected
1746 by Address Sanitizer, by appending a red zone with protected
1747 shadow memory after it and aligning it to at least
1748 ASAN_RED_ZONE_SIZE bytes. */
1749
1750 bool
1751 asan_protect_global (tree decl, bool ignore_decl_rtl_set_p)
1752 {
1753 if (!param_asan_globals)
1754 return false;
1755
1756 rtx rtl, symbol;
1757
1758 if (TREE_CODE (decl) == STRING_CST)
1759 {
1760 /* Instrument all STRING_CSTs except those created
1761 by asan_pp_string here. */
1762 if (shadow_ptr_types[0] != NULL_TREE
1763 && TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE
1764 && TREE_TYPE (TREE_TYPE (decl)) == TREE_TYPE (shadow_ptr_types[0]))
1765 return false;
1766 return true;
1767 }
1768 if (!VAR_P (decl)
1769 /* TLS vars aren't statically protectable. */
1770 || DECL_THREAD_LOCAL_P (decl)
1771 /* Externs will be protected elsewhere. */
1772 || DECL_EXTERNAL (decl)
1773 /* PR sanitizer/81697: For architectures that use section anchors first
1774 call to asan_protect_global may occur before DECL_RTL (decl) is set.
1775 We should ignore DECL_RTL_SET_P then, because otherwise the first call
1776 to asan_protect_global will return FALSE and the following calls on the
1777 same decl after setting DECL_RTL (decl) will return TRUE and we'll end
1778 up with inconsistency at runtime. */
1779 || (!DECL_RTL_SET_P (decl) && !ignore_decl_rtl_set_p)
1780 /* Comdat vars pose an ABI problem, we can't know if
1781 the var that is selected by the linker will have
1782 padding or not. */
1783 || DECL_ONE_ONLY (decl)
1784 /* Similarly for common vars. People can use -fno-common.
1785 Note: Linux kernel is built with -fno-common, so we do instrument
1786 globals there even if it is C. */
1787 || (DECL_COMMON (decl) && TREE_PUBLIC (decl))
1788 /* Don't protect if using user section, often vars placed
1789 into user section from multiple TUs are then assumed
1790 to be an array of such vars, putting padding in there
1791 breaks this assumption. */
1792 || (DECL_SECTION_NAME (decl) != NULL
1793 && !symtab_node::get (decl)->implicit_section
1794 && !section_sanitized_p (DECL_SECTION_NAME (decl)))
1795 || DECL_SIZE (decl) == 0
1796 || ASAN_RED_ZONE_SIZE * BITS_PER_UNIT > MAX_OFILE_ALIGNMENT
1797 || TREE_CODE (DECL_SIZE_UNIT (decl)) != INTEGER_CST
1798 || !valid_constant_size_p (DECL_SIZE_UNIT (decl))
1799 || DECL_ALIGN_UNIT (decl) > 2 * ASAN_RED_ZONE_SIZE
1800 || TREE_TYPE (decl) == ubsan_get_source_location_type ()
1801 || is_odr_indicator (decl))
1802 return false;
1803
1804 if (!ignore_decl_rtl_set_p || DECL_RTL_SET_P (decl))
1805 {
1806
1807 rtl = DECL_RTL (decl);
1808 if (!MEM_P (rtl) || GET_CODE (XEXP (rtl, 0)) != SYMBOL_REF)
1809 return false;
1810 symbol = XEXP (rtl, 0);
1811
1812 if (CONSTANT_POOL_ADDRESS_P (symbol)
1813 || TREE_CONSTANT_POOL_ADDRESS_P (symbol))
1814 return false;
1815 }
1816
1817 if (lookup_attribute ("weakref", DECL_ATTRIBUTES (decl)))
1818 return false;
1819
1820 if (!TARGET_SUPPORTS_ALIASES && asan_needs_local_alias (decl))
1821 return false;
1822
1823 return true;
1824 }
1825
1826 /* Construct a function tree for __asan_report_{load,store}{1,2,4,8,16,_n}.
1827 IS_STORE is either 1 (for a store) or 0 (for a load). */
1828
1829 static tree
1830 report_error_func (bool is_store, bool recover_p, HOST_WIDE_INT size_in_bytes,
1831 int *nargs)
1832 {
1833 static enum built_in_function report[2][2][6]
1834 = { { { BUILT_IN_ASAN_REPORT_LOAD1, BUILT_IN_ASAN_REPORT_LOAD2,
1835 BUILT_IN_ASAN_REPORT_LOAD4, BUILT_IN_ASAN_REPORT_LOAD8,
1836 BUILT_IN_ASAN_REPORT_LOAD16, BUILT_IN_ASAN_REPORT_LOAD_N },
1837 { BUILT_IN_ASAN_REPORT_STORE1, BUILT_IN_ASAN_REPORT_STORE2,
1838 BUILT_IN_ASAN_REPORT_STORE4, BUILT_IN_ASAN_REPORT_STORE8,
1839 BUILT_IN_ASAN_REPORT_STORE16, BUILT_IN_ASAN_REPORT_STORE_N } },
1840 { { BUILT_IN_ASAN_REPORT_LOAD1_NOABORT,
1841 BUILT_IN_ASAN_REPORT_LOAD2_NOABORT,
1842 BUILT_IN_ASAN_REPORT_LOAD4_NOABORT,
1843 BUILT_IN_ASAN_REPORT_LOAD8_NOABORT,
1844 BUILT_IN_ASAN_REPORT_LOAD16_NOABORT,
1845 BUILT_IN_ASAN_REPORT_LOAD_N_NOABORT },
1846 { BUILT_IN_ASAN_REPORT_STORE1_NOABORT,
1847 BUILT_IN_ASAN_REPORT_STORE2_NOABORT,
1848 BUILT_IN_ASAN_REPORT_STORE4_NOABORT,
1849 BUILT_IN_ASAN_REPORT_STORE8_NOABORT,
1850 BUILT_IN_ASAN_REPORT_STORE16_NOABORT,
1851 BUILT_IN_ASAN_REPORT_STORE_N_NOABORT } } };
1852 if (size_in_bytes == -1)
1853 {
1854 *nargs = 2;
1855 return builtin_decl_implicit (report[recover_p][is_store][5]);
1856 }
1857 *nargs = 1;
1858 int size_log2 = exact_log2 (size_in_bytes);
1859 return builtin_decl_implicit (report[recover_p][is_store][size_log2]);
1860 }
1861
1862 /* Construct a function tree for __asan_{load,store}{1,2,4,8,16,_n}.
1863 IS_STORE is either 1 (for a store) or 0 (for a load). */
1864
1865 static tree
1866 check_func (bool is_store, bool recover_p, HOST_WIDE_INT size_in_bytes,
1867 int *nargs)
1868 {
1869 static enum built_in_function check[2][2][6]
1870 = { { { BUILT_IN_ASAN_LOAD1, BUILT_IN_ASAN_LOAD2,
1871 BUILT_IN_ASAN_LOAD4, BUILT_IN_ASAN_LOAD8,
1872 BUILT_IN_ASAN_LOAD16, BUILT_IN_ASAN_LOADN },
1873 { BUILT_IN_ASAN_STORE1, BUILT_IN_ASAN_STORE2,
1874 BUILT_IN_ASAN_STORE4, BUILT_IN_ASAN_STORE8,
1875 BUILT_IN_ASAN_STORE16, BUILT_IN_ASAN_STOREN } },
1876 { { BUILT_IN_ASAN_LOAD1_NOABORT,
1877 BUILT_IN_ASAN_LOAD2_NOABORT,
1878 BUILT_IN_ASAN_LOAD4_NOABORT,
1879 BUILT_IN_ASAN_LOAD8_NOABORT,
1880 BUILT_IN_ASAN_LOAD16_NOABORT,
1881 BUILT_IN_ASAN_LOADN_NOABORT },
1882 { BUILT_IN_ASAN_STORE1_NOABORT,
1883 BUILT_IN_ASAN_STORE2_NOABORT,
1884 BUILT_IN_ASAN_STORE4_NOABORT,
1885 BUILT_IN_ASAN_STORE8_NOABORT,
1886 BUILT_IN_ASAN_STORE16_NOABORT,
1887 BUILT_IN_ASAN_STOREN_NOABORT } } };
1888 if (size_in_bytes == -1)
1889 {
1890 *nargs = 2;
1891 return builtin_decl_implicit (check[recover_p][is_store][5]);
1892 }
1893 *nargs = 1;
1894 int size_log2 = exact_log2 (size_in_bytes);
1895 return builtin_decl_implicit (check[recover_p][is_store][size_log2]);
1896 }
1897
1898 /* Split the current basic block and create a condition statement
1899 insertion point right before or after the statement pointed to by
1900 ITER. Return an iterator to the point at which the caller might
1901 safely insert the condition statement.
1902
1903 THEN_BLOCK must be set to the address of an uninitialized instance
1904 of basic_block. The function will then set *THEN_BLOCK to the
1905 'then block' of the condition statement to be inserted by the
1906 caller.
1907
1908 If CREATE_THEN_FALLTHRU_EDGE is false, no edge will be created from
1909 *THEN_BLOCK to *FALLTHROUGH_BLOCK.
1910
1911 Similarly, the function will set *FALLTRHOUGH_BLOCK to the 'else
1912 block' of the condition statement to be inserted by the caller.
1913
1914 Note that *FALLTHROUGH_BLOCK is a new block that contains the
1915 statements starting from *ITER, and *THEN_BLOCK is a new empty
1916 block.
1917
1918 *ITER is adjusted to point to always point to the first statement
1919 of the basic block * FALLTHROUGH_BLOCK. That statement is the
1920 same as what ITER was pointing to prior to calling this function,
1921 if BEFORE_P is true; otherwise, it is its following statement. */
1922
1923 gimple_stmt_iterator
1924 create_cond_insert_point (gimple_stmt_iterator *iter,
1925 bool before_p,
1926 bool then_more_likely_p,
1927 bool create_then_fallthru_edge,
1928 basic_block *then_block,
1929 basic_block *fallthrough_block)
1930 {
1931 gimple_stmt_iterator gsi = *iter;
1932
1933 if (!gsi_end_p (gsi) && before_p)
1934 gsi_prev (&gsi);
1935
1936 basic_block cur_bb = gsi_bb (*iter);
1937
1938 edge e = split_block (cur_bb, gsi_stmt (gsi));
1939
1940 /* Get a hold on the 'condition block', the 'then block' and the
1941 'else block'. */
1942 basic_block cond_bb = e->src;
1943 basic_block fallthru_bb = e->dest;
1944 basic_block then_bb = create_empty_bb (cond_bb);
1945 if (current_loops)
1946 {
1947 add_bb_to_loop (then_bb, cond_bb->loop_father);
1948 loops_state_set (LOOPS_NEED_FIXUP);
1949 }
1950
1951 /* Set up the newly created 'then block'. */
1952 e = make_edge (cond_bb, then_bb, EDGE_TRUE_VALUE);
1953 profile_probability fallthrough_probability
1954 = then_more_likely_p
1955 ? profile_probability::very_unlikely ()
1956 : profile_probability::very_likely ();
1957 e->probability = fallthrough_probability.invert ();
1958 then_bb->count = e->count ();
1959 if (create_then_fallthru_edge)
1960 make_single_succ_edge (then_bb, fallthru_bb, EDGE_FALLTHRU);
1961
1962 /* Set up the fallthrough basic block. */
1963 e = find_edge (cond_bb, fallthru_bb);
1964 e->flags = EDGE_FALSE_VALUE;
1965 e->probability = fallthrough_probability;
1966
1967 /* Update dominance info for the newly created then_bb; note that
1968 fallthru_bb's dominance info has already been updated by
1969 split_bock. */
1970 if (dom_info_available_p (CDI_DOMINATORS))
1971 set_immediate_dominator (CDI_DOMINATORS, then_bb, cond_bb);
1972
1973 *then_block = then_bb;
1974 *fallthrough_block = fallthru_bb;
1975 *iter = gsi_start_bb (fallthru_bb);
1976
1977 return gsi_last_bb (cond_bb);
1978 }
1979
1980 /* Insert an if condition followed by a 'then block' right before the
1981 statement pointed to by ITER. The fallthrough block -- which is the
1982 else block of the condition as well as the destination of the
1983 outcoming edge of the 'then block' -- starts with the statement
1984 pointed to by ITER.
1985
1986 COND is the condition of the if.
1987
1988 If THEN_MORE_LIKELY_P is true, the probability of the edge to the
1989 'then block' is higher than the probability of the edge to the
1990 fallthrough block.
1991
1992 Upon completion of the function, *THEN_BB is set to the newly
1993 inserted 'then block' and similarly, *FALLTHROUGH_BB is set to the
1994 fallthrough block.
1995
1996 *ITER is adjusted to still point to the same statement it was
1997 pointing to initially. */
1998
1999 static void
2000 insert_if_then_before_iter (gcond *cond,
2001 gimple_stmt_iterator *iter,
2002 bool then_more_likely_p,
2003 basic_block *then_bb,
2004 basic_block *fallthrough_bb)
2005 {
2006 gimple_stmt_iterator cond_insert_point =
2007 create_cond_insert_point (iter,
2008 /*before_p=*/true,
2009 then_more_likely_p,
2010 /*create_then_fallthru_edge=*/true,
2011 then_bb,
2012 fallthrough_bb);
2013 gsi_insert_after (&cond_insert_point, cond, GSI_NEW_STMT);
2014 }
2015
2016 /* Build (base_addr >> ASAN_SHADOW_SHIFT) + asan_shadow_offset ().
2017 If RETURN_ADDRESS is set to true, return memory location instread
2018 of a value in the shadow memory. */
2019
2020 static tree
2021 build_shadow_mem_access (gimple_stmt_iterator *gsi, location_t location,
2022 tree base_addr, tree shadow_ptr_type,
2023 bool return_address = false)
2024 {
2025 tree t, uintptr_type = TREE_TYPE (base_addr);
2026 tree shadow_type = TREE_TYPE (shadow_ptr_type);
2027 gimple *g;
2028
2029 t = build_int_cst (uintptr_type, ASAN_SHADOW_SHIFT);
2030 g = gimple_build_assign (make_ssa_name (uintptr_type), RSHIFT_EXPR,
2031 base_addr, t);
2032 gimple_set_location (g, location);
2033 gsi_insert_after (gsi, g, GSI_NEW_STMT);
2034
2035 t = build_int_cst (uintptr_type, asan_shadow_offset ());
2036 g = gimple_build_assign (make_ssa_name (uintptr_type), PLUS_EXPR,
2037 gimple_assign_lhs (g), t);
2038 gimple_set_location (g, location);
2039 gsi_insert_after (gsi, g, GSI_NEW_STMT);
2040
2041 g = gimple_build_assign (make_ssa_name (shadow_ptr_type), NOP_EXPR,
2042 gimple_assign_lhs (g));
2043 gimple_set_location (g, location);
2044 gsi_insert_after (gsi, g, GSI_NEW_STMT);
2045
2046 if (!return_address)
2047 {
2048 t = build2 (MEM_REF, shadow_type, gimple_assign_lhs (g),
2049 build_int_cst (shadow_ptr_type, 0));
2050 g = gimple_build_assign (make_ssa_name (shadow_type), MEM_REF, t);
2051 gimple_set_location (g, location);
2052 gsi_insert_after (gsi, g, GSI_NEW_STMT);
2053 }
2054
2055 return gimple_assign_lhs (g);
2056 }
2057
2058 /* BASE can already be an SSA_NAME; in that case, do not create a
2059 new SSA_NAME for it. */
2060
2061 static tree
2062 maybe_create_ssa_name (location_t loc, tree base, gimple_stmt_iterator *iter,
2063 bool before_p)
2064 {
2065 STRIP_USELESS_TYPE_CONVERSION (base);
2066 if (TREE_CODE (base) == SSA_NAME)
2067 return base;
2068 gimple *g = gimple_build_assign (make_ssa_name (TREE_TYPE (base)), base);
2069 gimple_set_location (g, loc);
2070 if (before_p)
2071 gsi_insert_before (iter, g, GSI_SAME_STMT);
2072 else
2073 gsi_insert_after (iter, g, GSI_NEW_STMT);
2074 return gimple_assign_lhs (g);
2075 }
2076
2077 /* LEN can already have necessary size and precision;
2078 in that case, do not create a new variable. */
2079
2080 tree
2081 maybe_cast_to_ptrmode (location_t loc, tree len, gimple_stmt_iterator *iter,
2082 bool before_p)
2083 {
2084 if (ptrofftype_p (len))
2085 return len;
2086 gimple *g = gimple_build_assign (make_ssa_name (pointer_sized_int_node),
2087 NOP_EXPR, len);
2088 gimple_set_location (g, loc);
2089 if (before_p)
2090 gsi_insert_before (iter, g, GSI_SAME_STMT);
2091 else
2092 gsi_insert_after (iter, g, GSI_NEW_STMT);
2093 return gimple_assign_lhs (g);
2094 }
2095
2096 /* Instrument the memory access instruction BASE. Insert new
2097 statements before or after ITER.
2098
2099 Note that the memory access represented by BASE can be either an
2100 SSA_NAME, or a non-SSA expression. LOCATION is the source code
2101 location. IS_STORE is TRUE for a store, FALSE for a load.
2102 BEFORE_P is TRUE for inserting the instrumentation code before
2103 ITER, FALSE for inserting it after ITER. IS_SCALAR_ACCESS is TRUE
2104 for a scalar memory access and FALSE for memory region access.
2105 NON_ZERO_P is TRUE if memory region is guaranteed to have non-zero
2106 length. ALIGN tells alignment of accessed memory object.
2107
2108 START_INSTRUMENTED and END_INSTRUMENTED are TRUE if start/end of
2109 memory region have already been instrumented.
2110
2111 If BEFORE_P is TRUE, *ITER is arranged to still point to the
2112 statement it was pointing to prior to calling this function,
2113 otherwise, it points to the statement logically following it. */
2114
2115 static void
2116 build_check_stmt (location_t loc, tree base, tree len,
2117 HOST_WIDE_INT size_in_bytes, gimple_stmt_iterator *iter,
2118 bool is_non_zero_len, bool before_p, bool is_store,
2119 bool is_scalar_access, unsigned int align = 0)
2120 {
2121 gimple_stmt_iterator gsi = *iter;
2122 gimple *g;
2123
2124 gcc_assert (!(size_in_bytes > 0 && !is_non_zero_len));
2125
2126 gsi = *iter;
2127
2128 base = unshare_expr (base);
2129 base = maybe_create_ssa_name (loc, base, &gsi, before_p);
2130
2131 if (len)
2132 {
2133 len = unshare_expr (len);
2134 len = maybe_cast_to_ptrmode (loc, len, iter, before_p);
2135 }
2136 else
2137 {
2138 gcc_assert (size_in_bytes != -1);
2139 len = build_int_cst (pointer_sized_int_node, size_in_bytes);
2140 }
2141
2142 if (size_in_bytes > 1)
2143 {
2144 if ((size_in_bytes & (size_in_bytes - 1)) != 0
2145 || size_in_bytes > 16)
2146 is_scalar_access = false;
2147 else if (align && align < size_in_bytes * BITS_PER_UNIT)
2148 {
2149 /* On non-strict alignment targets, if
2150 16-byte access is just 8-byte aligned,
2151 this will result in misaligned shadow
2152 memory 2 byte load, but otherwise can
2153 be handled using one read. */
2154 if (size_in_bytes != 16
2155 || STRICT_ALIGNMENT
2156 || align < 8 * BITS_PER_UNIT)
2157 is_scalar_access = false;
2158 }
2159 }
2160
2161 HOST_WIDE_INT flags = 0;
2162 if (is_store)
2163 flags |= ASAN_CHECK_STORE;
2164 if (is_non_zero_len)
2165 flags |= ASAN_CHECK_NON_ZERO_LEN;
2166 if (is_scalar_access)
2167 flags |= ASAN_CHECK_SCALAR_ACCESS;
2168
2169 g = gimple_build_call_internal (IFN_ASAN_CHECK, 4,
2170 build_int_cst (integer_type_node, flags),
2171 base, len,
2172 build_int_cst (integer_type_node,
2173 align / BITS_PER_UNIT));
2174 gimple_set_location (g, loc);
2175 if (before_p)
2176 gsi_insert_before (&gsi, g, GSI_SAME_STMT);
2177 else
2178 {
2179 gsi_insert_after (&gsi, g, GSI_NEW_STMT);
2180 gsi_next (&gsi);
2181 *iter = gsi;
2182 }
2183 }
2184
2185 /* If T represents a memory access, add instrumentation code before ITER.
2186 LOCATION is source code location.
2187 IS_STORE is either TRUE (for a store) or FALSE (for a load). */
2188
2189 static void
2190 instrument_derefs (gimple_stmt_iterator *iter, tree t,
2191 location_t location, bool is_store)
2192 {
2193 if (is_store && !param_asan_instrument_writes)
2194 return;
2195 if (!is_store && !param_asan_instrument_reads)
2196 return;
2197
2198 tree type, base;
2199 HOST_WIDE_INT size_in_bytes;
2200 if (location == UNKNOWN_LOCATION)
2201 location = EXPR_LOCATION (t);
2202
2203 type = TREE_TYPE (t);
2204 switch (TREE_CODE (t))
2205 {
2206 case ARRAY_REF:
2207 case COMPONENT_REF:
2208 case INDIRECT_REF:
2209 case MEM_REF:
2210 case VAR_DECL:
2211 case BIT_FIELD_REF:
2212 break;
2213 /* FALLTHRU */
2214 default:
2215 return;
2216 }
2217
2218 size_in_bytes = int_size_in_bytes (type);
2219 if (size_in_bytes <= 0)
2220 return;
2221
2222 poly_int64 bitsize, bitpos;
2223 tree offset;
2224 machine_mode mode;
2225 int unsignedp, reversep, volatilep = 0;
2226 tree inner = get_inner_reference (t, &bitsize, &bitpos, &offset, &mode,
2227 &unsignedp, &reversep, &volatilep);
2228
2229 if (TREE_CODE (t) == COMPONENT_REF
2230 && DECL_BIT_FIELD_REPRESENTATIVE (TREE_OPERAND (t, 1)) != NULL_TREE)
2231 {
2232 tree repr = DECL_BIT_FIELD_REPRESENTATIVE (TREE_OPERAND (t, 1));
2233 instrument_derefs (iter, build3 (COMPONENT_REF, TREE_TYPE (repr),
2234 TREE_OPERAND (t, 0), repr,
2235 TREE_OPERAND (t, 2)),
2236 location, is_store);
2237 return;
2238 }
2239
2240 if (!multiple_p (bitpos, BITS_PER_UNIT)
2241 || maybe_ne (bitsize, size_in_bytes * BITS_PER_UNIT))
2242 return;
2243
2244 if (VAR_P (inner) && DECL_HARD_REGISTER (inner))
2245 return;
2246
2247 poly_int64 decl_size;
2248 if (VAR_P (inner)
2249 && offset == NULL_TREE
2250 && DECL_SIZE (inner)
2251 && poly_int_tree_p (DECL_SIZE (inner), &decl_size)
2252 && known_subrange_p (bitpos, bitsize, 0, decl_size))
2253 {
2254 if (DECL_THREAD_LOCAL_P (inner))
2255 return;
2256 if (!param_asan_globals && is_global_var (inner))
2257 return;
2258 if (!TREE_STATIC (inner))
2259 {
2260 /* Automatic vars in the current function will be always
2261 accessible. */
2262 if (decl_function_context (inner) == current_function_decl
2263 && (!asan_sanitize_use_after_scope ()
2264 || !TREE_ADDRESSABLE (inner)))
2265 return;
2266 }
2267 /* Always instrument external vars, they might be dynamically
2268 initialized. */
2269 else if (!DECL_EXTERNAL (inner))
2270 {
2271 /* For static vars if they are known not to be dynamically
2272 initialized, they will be always accessible. */
2273 varpool_node *vnode = varpool_node::get (inner);
2274 if (vnode && !vnode->dynamically_initialized)
2275 return;
2276 }
2277 }
2278
2279 base = build_fold_addr_expr (t);
2280 if (!has_mem_ref_been_instrumented (base, size_in_bytes))
2281 {
2282 unsigned int align = get_object_alignment (t);
2283 build_check_stmt (location, base, NULL_TREE, size_in_bytes, iter,
2284 /*is_non_zero_len*/size_in_bytes > 0, /*before_p=*/true,
2285 is_store, /*is_scalar_access*/true, align);
2286 update_mem_ref_hash_table (base, size_in_bytes);
2287 update_mem_ref_hash_table (t, size_in_bytes);
2288 }
2289
2290 }
2291
2292 /* Insert a memory reference into the hash table if access length
2293 can be determined in compile time. */
2294
2295 static void
2296 maybe_update_mem_ref_hash_table (tree base, tree len)
2297 {
2298 if (!POINTER_TYPE_P (TREE_TYPE (base))
2299 || !INTEGRAL_TYPE_P (TREE_TYPE (len)))
2300 return;
2301
2302 HOST_WIDE_INT size_in_bytes = tree_fits_shwi_p (len) ? tree_to_shwi (len) : -1;
2303
2304 if (size_in_bytes != -1)
2305 update_mem_ref_hash_table (base, size_in_bytes);
2306 }
2307
2308 /* Instrument an access to a contiguous memory region that starts at
2309 the address pointed to by BASE, over a length of LEN (expressed in
2310 the sizeof (*BASE) bytes). ITER points to the instruction before
2311 which the instrumentation instructions must be inserted. LOCATION
2312 is the source location that the instrumentation instructions must
2313 have. If IS_STORE is true, then the memory access is a store;
2314 otherwise, it's a load. */
2315
2316 static void
2317 instrument_mem_region_access (tree base, tree len,
2318 gimple_stmt_iterator *iter,
2319 location_t location, bool is_store)
2320 {
2321 if (!POINTER_TYPE_P (TREE_TYPE (base))
2322 || !INTEGRAL_TYPE_P (TREE_TYPE (len))
2323 || integer_zerop (len))
2324 return;
2325
2326 HOST_WIDE_INT size_in_bytes = tree_fits_shwi_p (len) ? tree_to_shwi (len) : -1;
2327
2328 if ((size_in_bytes == -1)
2329 || !has_mem_ref_been_instrumented (base, size_in_bytes))
2330 {
2331 build_check_stmt (location, base, len, size_in_bytes, iter,
2332 /*is_non_zero_len*/size_in_bytes > 0, /*before_p*/true,
2333 is_store, /*is_scalar_access*/false, /*align*/0);
2334 }
2335
2336 maybe_update_mem_ref_hash_table (base, len);
2337 *iter = gsi_for_stmt (gsi_stmt (*iter));
2338 }
2339
2340 /* Instrument the call to a built-in memory access function that is
2341 pointed to by the iterator ITER.
2342
2343 Upon completion, return TRUE iff *ITER has been advanced to the
2344 statement following the one it was originally pointing to. */
2345
2346 static bool
2347 instrument_builtin_call (gimple_stmt_iterator *iter)
2348 {
2349 if (!param_asan_memintrin)
2350 return false;
2351
2352 bool iter_advanced_p = false;
2353 gcall *call = as_a <gcall *> (gsi_stmt (*iter));
2354
2355 gcc_checking_assert (gimple_call_builtin_p (call, BUILT_IN_NORMAL));
2356
2357 location_t loc = gimple_location (call);
2358
2359 asan_mem_ref src0, src1, dest;
2360 asan_mem_ref_init (&src0, NULL, 1);
2361 asan_mem_ref_init (&src1, NULL, 1);
2362 asan_mem_ref_init (&dest, NULL, 1);
2363
2364 tree src0_len = NULL_TREE, src1_len = NULL_TREE, dest_len = NULL_TREE;
2365 bool src0_is_store = false, src1_is_store = false, dest_is_store = false,
2366 dest_is_deref = false, intercepted_p = true;
2367
2368 if (get_mem_refs_of_builtin_call (call,
2369 &src0, &src0_len, &src0_is_store,
2370 &src1, &src1_len, &src1_is_store,
2371 &dest, &dest_len, &dest_is_store,
2372 &dest_is_deref, &intercepted_p, iter))
2373 {
2374 if (dest_is_deref)
2375 {
2376 instrument_derefs (iter, dest.start, loc, dest_is_store);
2377 gsi_next (iter);
2378 iter_advanced_p = true;
2379 }
2380 else if (!intercepted_p
2381 && (src0_len || src1_len || dest_len))
2382 {
2383 if (src0.start != NULL_TREE)
2384 instrument_mem_region_access (src0.start, src0_len,
2385 iter, loc, /*is_store=*/false);
2386 if (src1.start != NULL_TREE)
2387 instrument_mem_region_access (src1.start, src1_len,
2388 iter, loc, /*is_store=*/false);
2389 if (dest.start != NULL_TREE)
2390 instrument_mem_region_access (dest.start, dest_len,
2391 iter, loc, /*is_store=*/true);
2392
2393 *iter = gsi_for_stmt (call);
2394 gsi_next (iter);
2395 iter_advanced_p = true;
2396 }
2397 else
2398 {
2399 if (src0.start != NULL_TREE)
2400 maybe_update_mem_ref_hash_table (src0.start, src0_len);
2401 if (src1.start != NULL_TREE)
2402 maybe_update_mem_ref_hash_table (src1.start, src1_len);
2403 if (dest.start != NULL_TREE)
2404 maybe_update_mem_ref_hash_table (dest.start, dest_len);
2405 }
2406 }
2407 return iter_advanced_p;
2408 }
2409
2410 /* Instrument the assignment statement ITER if it is subject to
2411 instrumentation. Return TRUE iff instrumentation actually
2412 happened. In that case, the iterator ITER is advanced to the next
2413 logical expression following the one initially pointed to by ITER,
2414 and the relevant memory reference that which access has been
2415 instrumented is added to the memory references hash table. */
2416
2417 static bool
2418 maybe_instrument_assignment (gimple_stmt_iterator *iter)
2419 {
2420 gimple *s = gsi_stmt (*iter);
2421
2422 gcc_assert (gimple_assign_single_p (s));
2423
2424 tree ref_expr = NULL_TREE;
2425 bool is_store, is_instrumented = false;
2426
2427 if (gimple_store_p (s))
2428 {
2429 ref_expr = gimple_assign_lhs (s);
2430 is_store = true;
2431 instrument_derefs (iter, ref_expr,
2432 gimple_location (s),
2433 is_store);
2434 is_instrumented = true;
2435 }
2436
2437 if (gimple_assign_load_p (s))
2438 {
2439 ref_expr = gimple_assign_rhs1 (s);
2440 is_store = false;
2441 instrument_derefs (iter, ref_expr,
2442 gimple_location (s),
2443 is_store);
2444 is_instrumented = true;
2445 }
2446
2447 if (is_instrumented)
2448 gsi_next (iter);
2449
2450 return is_instrumented;
2451 }
2452
2453 /* Instrument the function call pointed to by the iterator ITER, if it
2454 is subject to instrumentation. At the moment, the only function
2455 calls that are instrumented are some built-in functions that access
2456 memory. Look at instrument_builtin_call to learn more.
2457
2458 Upon completion return TRUE iff *ITER was advanced to the statement
2459 following the one it was originally pointing to. */
2460
2461 static bool
2462 maybe_instrument_call (gimple_stmt_iterator *iter)
2463 {
2464 gimple *stmt = gsi_stmt (*iter);
2465 bool is_builtin = gimple_call_builtin_p (stmt, BUILT_IN_NORMAL);
2466
2467 if (is_builtin && instrument_builtin_call (iter))
2468 return true;
2469
2470 if (gimple_call_noreturn_p (stmt))
2471 {
2472 if (is_builtin)
2473 {
2474 tree callee = gimple_call_fndecl (stmt);
2475 switch (DECL_FUNCTION_CODE (callee))
2476 {
2477 case BUILT_IN_UNREACHABLE:
2478 case BUILT_IN_TRAP:
2479 /* Don't instrument these. */
2480 return false;
2481 default:
2482 break;
2483 }
2484 }
2485 tree decl = builtin_decl_implicit (BUILT_IN_ASAN_HANDLE_NO_RETURN);
2486 gimple *g = gimple_build_call (decl, 0);
2487 gimple_set_location (g, gimple_location (stmt));
2488 gsi_insert_before (iter, g, GSI_SAME_STMT);
2489 }
2490
2491 bool instrumented = false;
2492 if (gimple_store_p (stmt))
2493 {
2494 tree ref_expr = gimple_call_lhs (stmt);
2495 instrument_derefs (iter, ref_expr,
2496 gimple_location (stmt),
2497 /*is_store=*/true);
2498
2499 instrumented = true;
2500 }
2501
2502 /* Walk through gimple_call arguments and check them id needed. */
2503 unsigned args_num = gimple_call_num_args (stmt);
2504 for (unsigned i = 0; i < args_num; ++i)
2505 {
2506 tree arg = gimple_call_arg (stmt, i);
2507 /* If ARG is not a non-aggregate register variable, compiler in general
2508 creates temporary for it and pass it as argument to gimple call.
2509 But in some cases, e.g. when we pass by value a small structure that
2510 fits to register, compiler can avoid extra overhead by pulling out
2511 these temporaries. In this case, we should check the argument. */
2512 if (!is_gimple_reg (arg) && !is_gimple_min_invariant (arg))
2513 {
2514 instrument_derefs (iter, arg,
2515 gimple_location (stmt),
2516 /*is_store=*/false);
2517 instrumented = true;
2518 }
2519 }
2520 if (instrumented)
2521 gsi_next (iter);
2522 return instrumented;
2523 }
2524
2525 /* Walk each instruction of all basic block and instrument those that
2526 represent memory references: loads, stores, or function calls.
2527 In a given basic block, this function avoids instrumenting memory
2528 references that have already been instrumented. */
2529
2530 static void
2531 transform_statements (void)
2532 {
2533 basic_block bb, last_bb = NULL;
2534 gimple_stmt_iterator i;
2535 int saved_last_basic_block = last_basic_block_for_fn (cfun);
2536
2537 FOR_EACH_BB_FN (bb, cfun)
2538 {
2539 basic_block prev_bb = bb;
2540
2541 if (bb->index >= saved_last_basic_block) continue;
2542
2543 /* Flush the mem ref hash table, if current bb doesn't have
2544 exactly one predecessor, or if that predecessor (skipping
2545 over asan created basic blocks) isn't the last processed
2546 basic block. Thus we effectively flush on extended basic
2547 block boundaries. */
2548 while (single_pred_p (prev_bb))
2549 {
2550 prev_bb = single_pred (prev_bb);
2551 if (prev_bb->index < saved_last_basic_block)
2552 break;
2553 }
2554 if (prev_bb != last_bb)
2555 empty_mem_ref_hash_table ();
2556 last_bb = bb;
2557
2558 for (i = gsi_start_bb (bb); !gsi_end_p (i);)
2559 {
2560 gimple *s = gsi_stmt (i);
2561
2562 if (has_stmt_been_instrumented_p (s))
2563 gsi_next (&i);
2564 else if (gimple_assign_single_p (s)
2565 && !gimple_clobber_p (s)
2566 && maybe_instrument_assignment (&i))
2567 /* Nothing to do as maybe_instrument_assignment advanced
2568 the iterator I. */;
2569 else if (is_gimple_call (s) && maybe_instrument_call (&i))
2570 /* Nothing to do as maybe_instrument_call
2571 advanced the iterator I. */;
2572 else
2573 {
2574 /* No instrumentation happened.
2575
2576 If the current instruction is a function call that
2577 might free something, let's forget about the memory
2578 references that got instrumented. Otherwise we might
2579 miss some instrumentation opportunities. Do the same
2580 for a ASAN_MARK poisoning internal function. */
2581 if (is_gimple_call (s)
2582 && (!nonfreeing_call_p (s)
2583 || asan_mark_p (s, ASAN_MARK_POISON)))
2584 empty_mem_ref_hash_table ();
2585
2586 gsi_next (&i);
2587 }
2588 }
2589 }
2590 free_mem_ref_resources ();
2591 }
2592
2593 /* Build
2594 __asan_before_dynamic_init (module_name)
2595 or
2596 __asan_after_dynamic_init ()
2597 call. */
2598
2599 tree
2600 asan_dynamic_init_call (bool after_p)
2601 {
2602 if (shadow_ptr_types[0] == NULL_TREE)
2603 asan_init_shadow_ptr_types ();
2604
2605 tree fn = builtin_decl_implicit (after_p
2606 ? BUILT_IN_ASAN_AFTER_DYNAMIC_INIT
2607 : BUILT_IN_ASAN_BEFORE_DYNAMIC_INIT);
2608 tree module_name_cst = NULL_TREE;
2609 if (!after_p)
2610 {
2611 pretty_printer module_name_pp;
2612 pp_string (&module_name_pp, main_input_filename);
2613
2614 module_name_cst = asan_pp_string (&module_name_pp);
2615 module_name_cst = fold_convert (const_ptr_type_node,
2616 module_name_cst);
2617 }
2618
2619 return build_call_expr (fn, after_p ? 0 : 1, module_name_cst);
2620 }
2621
2622 /* Build
2623 struct __asan_global
2624 {
2625 const void *__beg;
2626 uptr __size;
2627 uptr __size_with_redzone;
2628 const void *__name;
2629 const void *__module_name;
2630 uptr __has_dynamic_init;
2631 __asan_global_source_location *__location;
2632 char *__odr_indicator;
2633 } type. */
2634
2635 static tree
2636 asan_global_struct (void)
2637 {
2638 static const char *field_names[]
2639 = { "__beg", "__size", "__size_with_redzone",
2640 "__name", "__module_name", "__has_dynamic_init", "__location",
2641 "__odr_indicator" };
2642 tree fields[ARRAY_SIZE (field_names)], ret;
2643 unsigned i;
2644
2645 ret = make_node (RECORD_TYPE);
2646 for (i = 0; i < ARRAY_SIZE (field_names); i++)
2647 {
2648 fields[i]
2649 = build_decl (UNKNOWN_LOCATION, FIELD_DECL,
2650 get_identifier (field_names[i]),
2651 (i == 0 || i == 3) ? const_ptr_type_node
2652 : pointer_sized_int_node);
2653 DECL_CONTEXT (fields[i]) = ret;
2654 if (i)
2655 DECL_CHAIN (fields[i - 1]) = fields[i];
2656 }
2657 tree type_decl = build_decl (input_location, TYPE_DECL,
2658 get_identifier ("__asan_global"), ret);
2659 DECL_IGNORED_P (type_decl) = 1;
2660 DECL_ARTIFICIAL (type_decl) = 1;
2661 TYPE_FIELDS (ret) = fields[0];
2662 TYPE_NAME (ret) = type_decl;
2663 TYPE_STUB_DECL (ret) = type_decl;
2664 TYPE_ARTIFICIAL (ret) = 1;
2665 layout_type (ret);
2666 return ret;
2667 }
2668
2669 /* Create and return odr indicator symbol for DECL.
2670 TYPE is __asan_global struct type as returned by asan_global_struct. */
2671
2672 static tree
2673 create_odr_indicator (tree decl, tree type)
2674 {
2675 char *name;
2676 tree uptr = TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (type)));
2677 tree decl_name
2678 = (HAS_DECL_ASSEMBLER_NAME_P (decl) ? DECL_ASSEMBLER_NAME (decl)
2679 : DECL_NAME (decl));
2680 /* DECL_NAME theoretically might be NULL. Bail out with 0 in this case. */
2681 if (decl_name == NULL_TREE)
2682 return build_int_cst (uptr, 0);
2683 const char *dname = IDENTIFIER_POINTER (decl_name);
2684 if (HAS_DECL_ASSEMBLER_NAME_P (decl))
2685 dname = targetm.strip_name_encoding (dname);
2686 size_t len = strlen (dname) + sizeof ("__odr_asan_");
2687 name = XALLOCAVEC (char, len);
2688 snprintf (name, len, "__odr_asan_%s", dname);
2689 #ifndef NO_DOT_IN_LABEL
2690 name[sizeof ("__odr_asan") - 1] = '.';
2691 #elif !defined(NO_DOLLAR_IN_LABEL)
2692 name[sizeof ("__odr_asan") - 1] = '$';
2693 #endif
2694 tree var = build_decl (UNKNOWN_LOCATION, VAR_DECL, get_identifier (name),
2695 char_type_node);
2696 TREE_ADDRESSABLE (var) = 1;
2697 TREE_READONLY (var) = 0;
2698 TREE_THIS_VOLATILE (var) = 1;
2699 DECL_ARTIFICIAL (var) = 1;
2700 DECL_IGNORED_P (var) = 1;
2701 TREE_STATIC (var) = 1;
2702 TREE_PUBLIC (var) = 1;
2703 DECL_VISIBILITY (var) = DECL_VISIBILITY (decl);
2704 DECL_VISIBILITY_SPECIFIED (var) = DECL_VISIBILITY_SPECIFIED (decl);
2705
2706 TREE_USED (var) = 1;
2707 tree ctor = build_constructor_va (TREE_TYPE (var), 1, NULL_TREE,
2708 build_int_cst (unsigned_type_node, 0));
2709 TREE_CONSTANT (ctor) = 1;
2710 TREE_STATIC (ctor) = 1;
2711 DECL_INITIAL (var) = ctor;
2712 DECL_ATTRIBUTES (var) = tree_cons (get_identifier ("asan odr indicator"),
2713 NULL, DECL_ATTRIBUTES (var));
2714 make_decl_rtl (var);
2715 varpool_node::finalize_decl (var);
2716 return fold_convert (uptr, build_fold_addr_expr (var));
2717 }
2718
2719 /* Return true if DECL, a global var, might be overridden and needs
2720 an additional odr indicator symbol. */
2721
2722 static bool
2723 asan_needs_odr_indicator_p (tree decl)
2724 {
2725 /* Don't emit ODR indicators for kernel because:
2726 a) Kernel is written in C thus doesn't need ODR indicators.
2727 b) Some kernel code may have assumptions about symbols containing specific
2728 patterns in their names. Since ODR indicators contain original names
2729 of symbols they are emitted for, these assumptions would be broken for
2730 ODR indicator symbols. */
2731 return (!(flag_sanitize & SANITIZE_KERNEL_ADDRESS)
2732 && !DECL_ARTIFICIAL (decl)
2733 && !DECL_WEAK (decl)
2734 && TREE_PUBLIC (decl));
2735 }
2736
2737 /* Append description of a single global DECL into vector V.
2738 TYPE is __asan_global struct type as returned by asan_global_struct. */
2739
2740 static void
2741 asan_add_global (tree decl, tree type, vec<constructor_elt, va_gc> *v)
2742 {
2743 tree init, uptr = TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (type)));
2744 unsigned HOST_WIDE_INT size;
2745 tree str_cst, module_name_cst, refdecl = decl;
2746 vec<constructor_elt, va_gc> *vinner = NULL;
2747
2748 pretty_printer asan_pp, module_name_pp;
2749
2750 if (DECL_NAME (decl))
2751 pp_tree_identifier (&asan_pp, DECL_NAME (decl));
2752 else
2753 pp_string (&asan_pp, "<unknown>");
2754 str_cst = asan_pp_string (&asan_pp);
2755
2756 pp_string (&module_name_pp, main_input_filename);
2757 module_name_cst = asan_pp_string (&module_name_pp);
2758
2759 if (asan_needs_local_alias (decl))
2760 {
2761 char buf[20];
2762 ASM_GENERATE_INTERNAL_LABEL (buf, "LASAN", vec_safe_length (v) + 1);
2763 refdecl = build_decl (DECL_SOURCE_LOCATION (decl),
2764 VAR_DECL, get_identifier (buf), TREE_TYPE (decl));
2765 TREE_ADDRESSABLE (refdecl) = TREE_ADDRESSABLE (decl);
2766 TREE_READONLY (refdecl) = TREE_READONLY (decl);
2767 TREE_THIS_VOLATILE (refdecl) = TREE_THIS_VOLATILE (decl);
2768 DECL_NOT_GIMPLE_REG_P (refdecl) = DECL_NOT_GIMPLE_REG_P (decl);
2769 DECL_ARTIFICIAL (refdecl) = DECL_ARTIFICIAL (decl);
2770 DECL_IGNORED_P (refdecl) = DECL_IGNORED_P (decl);
2771 TREE_STATIC (refdecl) = 1;
2772 TREE_PUBLIC (refdecl) = 0;
2773 TREE_USED (refdecl) = 1;
2774 assemble_alias (refdecl, DECL_ASSEMBLER_NAME (decl));
2775 }
2776
2777 tree odr_indicator_ptr
2778 = (asan_needs_odr_indicator_p (decl) ? create_odr_indicator (decl, type)
2779 : build_int_cst (uptr, 0));
2780 CONSTRUCTOR_APPEND_ELT (vinner, NULL_TREE,
2781 fold_convert (const_ptr_type_node,
2782 build_fold_addr_expr (refdecl)));
2783 size = tree_to_uhwi (DECL_SIZE_UNIT (decl));
2784 CONSTRUCTOR_APPEND_ELT (vinner, NULL_TREE, build_int_cst (uptr, size));
2785 size += asan_red_zone_size (size);
2786 CONSTRUCTOR_APPEND_ELT (vinner, NULL_TREE, build_int_cst (uptr, size));
2787 CONSTRUCTOR_APPEND_ELT (vinner, NULL_TREE,
2788 fold_convert (const_ptr_type_node, str_cst));
2789 CONSTRUCTOR_APPEND_ELT (vinner, NULL_TREE,
2790 fold_convert (const_ptr_type_node, module_name_cst));
2791 varpool_node *vnode = varpool_node::get (decl);
2792 int has_dynamic_init = 0;
2793 /* FIXME: Enable initialization order fiasco detection in LTO mode once
2794 proper fix for PR 79061 will be applied. */
2795 if (!in_lto_p)
2796 has_dynamic_init = vnode ? vnode->dynamically_initialized : 0;
2797 CONSTRUCTOR_APPEND_ELT (vinner, NULL_TREE,
2798 build_int_cst (uptr, has_dynamic_init));
2799 tree locptr = NULL_TREE;
2800 location_t loc = DECL_SOURCE_LOCATION (decl);
2801 expanded_location xloc = expand_location (loc);
2802 if (xloc.file != NULL)
2803 {
2804 static int lasanloccnt = 0;
2805 char buf[25];
2806 ASM_GENERATE_INTERNAL_LABEL (buf, "LASANLOC", ++lasanloccnt);
2807 tree var = build_decl (UNKNOWN_LOCATION, VAR_DECL, get_identifier (buf),
2808 ubsan_get_source_location_type ());
2809 TREE_STATIC (var) = 1;
2810 TREE_PUBLIC (var) = 0;
2811 DECL_ARTIFICIAL (var) = 1;
2812 DECL_IGNORED_P (var) = 1;
2813 pretty_printer filename_pp;
2814 pp_string (&filename_pp, xloc.file);
2815 tree str = asan_pp_string (&filename_pp);
2816 tree ctor = build_constructor_va (TREE_TYPE (var), 3,
2817 NULL_TREE, str, NULL_TREE,
2818 build_int_cst (unsigned_type_node,
2819 xloc.line), NULL_TREE,
2820 build_int_cst (unsigned_type_node,
2821 xloc.column));
2822 TREE_CONSTANT (ctor) = 1;
2823 TREE_STATIC (ctor) = 1;
2824 DECL_INITIAL (var) = ctor;
2825 varpool_node::finalize_decl (var);
2826 locptr = fold_convert (uptr, build_fold_addr_expr (var));
2827 }
2828 else
2829 locptr = build_int_cst (uptr, 0);
2830 CONSTRUCTOR_APPEND_ELT (vinner, NULL_TREE, locptr);
2831 CONSTRUCTOR_APPEND_ELT (vinner, NULL_TREE, odr_indicator_ptr);
2832 init = build_constructor (type, vinner);
2833 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, init);
2834 }
2835
2836 /* Initialize sanitizer.def builtins if the FE hasn't initialized them. */
2837 void
2838 initialize_sanitizer_builtins (void)
2839 {
2840 tree decl;
2841
2842 if (builtin_decl_implicit_p (BUILT_IN_ASAN_INIT))
2843 return;
2844
2845 tree BT_FN_VOID = build_function_type_list (void_type_node, NULL_TREE);
2846 tree BT_FN_VOID_PTR
2847 = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
2848 tree BT_FN_VOID_CONST_PTR
2849 = build_function_type_list (void_type_node, const_ptr_type_node, NULL_TREE);
2850 tree BT_FN_VOID_PTR_PTR
2851 = build_function_type_list (void_type_node, ptr_type_node,
2852 ptr_type_node, NULL_TREE);
2853 tree BT_FN_VOID_PTR_PTR_PTR
2854 = build_function_type_list (void_type_node, ptr_type_node,
2855 ptr_type_node, ptr_type_node, NULL_TREE);
2856 tree BT_FN_VOID_PTR_PTRMODE
2857 = build_function_type_list (void_type_node, ptr_type_node,
2858 pointer_sized_int_node, NULL_TREE);
2859 tree BT_FN_VOID_INT
2860 = build_function_type_list (void_type_node, integer_type_node, NULL_TREE);
2861 tree BT_FN_SIZE_CONST_PTR_INT
2862 = build_function_type_list (size_type_node, const_ptr_type_node,
2863 integer_type_node, NULL_TREE);
2864
2865 tree BT_FN_VOID_UINT8_UINT8
2866 = build_function_type_list (void_type_node, unsigned_char_type_node,
2867 unsigned_char_type_node, NULL_TREE);
2868 tree BT_FN_VOID_UINT16_UINT16
2869 = build_function_type_list (void_type_node, uint16_type_node,
2870 uint16_type_node, NULL_TREE);
2871 tree BT_FN_VOID_UINT32_UINT32
2872 = build_function_type_list (void_type_node, uint32_type_node,
2873 uint32_type_node, NULL_TREE);
2874 tree BT_FN_VOID_UINT64_UINT64
2875 = build_function_type_list (void_type_node, uint64_type_node,
2876 uint64_type_node, NULL_TREE);
2877 tree BT_FN_VOID_FLOAT_FLOAT
2878 = build_function_type_list (void_type_node, float_type_node,
2879 float_type_node, NULL_TREE);
2880 tree BT_FN_VOID_DOUBLE_DOUBLE
2881 = build_function_type_list (void_type_node, double_type_node,
2882 double_type_node, NULL_TREE);
2883 tree BT_FN_VOID_UINT64_PTR
2884 = build_function_type_list (void_type_node, uint64_type_node,
2885 ptr_type_node, NULL_TREE);
2886
2887 tree BT_FN_BOOL_VPTR_PTR_IX_INT_INT[5];
2888 tree BT_FN_IX_CONST_VPTR_INT[5];
2889 tree BT_FN_IX_VPTR_IX_INT[5];
2890 tree BT_FN_VOID_VPTR_IX_INT[5];
2891 tree vptr
2892 = build_pointer_type (build_qualified_type (void_type_node,
2893 TYPE_QUAL_VOLATILE));
2894 tree cvptr
2895 = build_pointer_type (build_qualified_type (void_type_node,
2896 TYPE_QUAL_VOLATILE
2897 |TYPE_QUAL_CONST));
2898 tree boolt
2899 = lang_hooks.types.type_for_size (BOOL_TYPE_SIZE, 1);
2900 int i;
2901 for (i = 0; i < 5; i++)
2902 {
2903 tree ix = build_nonstandard_integer_type (BITS_PER_UNIT * (1 << i), 1);
2904 BT_FN_BOOL_VPTR_PTR_IX_INT_INT[i]
2905 = build_function_type_list (boolt, vptr, ptr_type_node, ix,
2906 integer_type_node, integer_type_node,
2907 NULL_TREE);
2908 BT_FN_IX_CONST_VPTR_INT[i]
2909 = build_function_type_list (ix, cvptr, integer_type_node, NULL_TREE);
2910 BT_FN_IX_VPTR_IX_INT[i]
2911 = build_function_type_list (ix, vptr, ix, integer_type_node,
2912 NULL_TREE);
2913 BT_FN_VOID_VPTR_IX_INT[i]
2914 = build_function_type_list (void_type_node, vptr, ix,
2915 integer_type_node, NULL_TREE);
2916 }
2917 #define BT_FN_BOOL_VPTR_PTR_I1_INT_INT BT_FN_BOOL_VPTR_PTR_IX_INT_INT[0]
2918 #define BT_FN_I1_CONST_VPTR_INT BT_FN_IX_CONST_VPTR_INT[0]
2919 #define BT_FN_I1_VPTR_I1_INT BT_FN_IX_VPTR_IX_INT[0]
2920 #define BT_FN_VOID_VPTR_I1_INT BT_FN_VOID_VPTR_IX_INT[0]
2921 #define BT_FN_BOOL_VPTR_PTR_I2_INT_INT BT_FN_BOOL_VPTR_PTR_IX_INT_INT[1]
2922 #define BT_FN_I2_CONST_VPTR_INT BT_FN_IX_CONST_VPTR_INT[1]
2923 #define BT_FN_I2_VPTR_I2_INT BT_FN_IX_VPTR_IX_INT[1]
2924 #define BT_FN_VOID_VPTR_I2_INT BT_FN_VOID_VPTR_IX_INT[1]
2925 #define BT_FN_BOOL_VPTR_PTR_I4_INT_INT BT_FN_BOOL_VPTR_PTR_IX_INT_INT[2]
2926 #define BT_FN_I4_CONST_VPTR_INT BT_FN_IX_CONST_VPTR_INT[2]
2927 #define BT_FN_I4_VPTR_I4_INT BT_FN_IX_VPTR_IX_INT[2]
2928 #define BT_FN_VOID_VPTR_I4_INT BT_FN_VOID_VPTR_IX_INT[2]
2929 #define BT_FN_BOOL_VPTR_PTR_I8_INT_INT BT_FN_BOOL_VPTR_PTR_IX_INT_INT[3]
2930 #define BT_FN_I8_CONST_VPTR_INT BT_FN_IX_CONST_VPTR_INT[3]
2931 #define BT_FN_I8_VPTR_I8_INT BT_FN_IX_VPTR_IX_INT[3]
2932 #define BT_FN_VOID_VPTR_I8_INT BT_FN_VOID_VPTR_IX_INT[3]
2933 #define BT_FN_BOOL_VPTR_PTR_I16_INT_INT BT_FN_BOOL_VPTR_PTR_IX_INT_INT[4]
2934 #define BT_FN_I16_CONST_VPTR_INT BT_FN_IX_CONST_VPTR_INT[4]
2935 #define BT_FN_I16_VPTR_I16_INT BT_FN_IX_VPTR_IX_INT[4]
2936 #define BT_FN_VOID_VPTR_I16_INT BT_FN_VOID_VPTR_IX_INT[4]
2937 #undef ATTR_NOTHROW_LEAF_LIST
2938 #define ATTR_NOTHROW_LEAF_LIST ECF_NOTHROW | ECF_LEAF
2939 #undef ATTR_TMPURE_NOTHROW_LEAF_LIST
2940 #define ATTR_TMPURE_NOTHROW_LEAF_LIST ECF_TM_PURE | ATTR_NOTHROW_LEAF_LIST
2941 #undef ATTR_NORETURN_NOTHROW_LEAF_LIST
2942 #define ATTR_NORETURN_NOTHROW_LEAF_LIST ECF_NORETURN | ATTR_NOTHROW_LEAF_LIST
2943 #undef ATTR_CONST_NORETURN_NOTHROW_LEAF_LIST
2944 #define ATTR_CONST_NORETURN_NOTHROW_LEAF_LIST \
2945 ECF_CONST | ATTR_NORETURN_NOTHROW_LEAF_LIST
2946 #undef ATTR_TMPURE_NORETURN_NOTHROW_LEAF_LIST
2947 #define ATTR_TMPURE_NORETURN_NOTHROW_LEAF_LIST \
2948 ECF_TM_PURE | ATTR_NORETURN_NOTHROW_LEAF_LIST
2949 #undef ATTR_COLD_NOTHROW_LEAF_LIST
2950 #define ATTR_COLD_NOTHROW_LEAF_LIST \
2951 /* ECF_COLD missing */ ATTR_NOTHROW_LEAF_LIST
2952 #undef ATTR_COLD_NORETURN_NOTHROW_LEAF_LIST
2953 #define ATTR_COLD_NORETURN_NOTHROW_LEAF_LIST \
2954 /* ECF_COLD missing */ ATTR_NORETURN_NOTHROW_LEAF_LIST
2955 #undef ATTR_COLD_CONST_NORETURN_NOTHROW_LEAF_LIST
2956 #define ATTR_COLD_CONST_NORETURN_NOTHROW_LEAF_LIST \
2957 /* ECF_COLD missing */ ATTR_CONST_NORETURN_NOTHROW_LEAF_LIST
2958 #undef ATTR_PURE_NOTHROW_LEAF_LIST
2959 #define ATTR_PURE_NOTHROW_LEAF_LIST ECF_PURE | ATTR_NOTHROW_LEAF_LIST
2960 #undef DEF_BUILTIN_STUB
2961 #define DEF_BUILTIN_STUB(ENUM, NAME)
2962 #undef DEF_SANITIZER_BUILTIN_1
2963 #define DEF_SANITIZER_BUILTIN_1(ENUM, NAME, TYPE, ATTRS) \
2964 do { \
2965 decl = add_builtin_function ("__builtin_" NAME, TYPE, ENUM, \
2966 BUILT_IN_NORMAL, NAME, NULL_TREE); \
2967 set_call_expr_flags (decl, ATTRS); \
2968 set_builtin_decl (ENUM, decl, true); \
2969 } while (0)
2970 #undef DEF_SANITIZER_BUILTIN
2971 #define DEF_SANITIZER_BUILTIN(ENUM, NAME, TYPE, ATTRS) \
2972 DEF_SANITIZER_BUILTIN_1 (ENUM, NAME, TYPE, ATTRS);
2973
2974 #include "sanitizer.def"
2975
2976 /* -fsanitize=object-size uses __builtin_object_size, but that might
2977 not be available for e.g. Fortran at this point. We use
2978 DEF_SANITIZER_BUILTIN here only as a convenience macro. */
2979 if ((flag_sanitize & SANITIZE_OBJECT_SIZE)
2980 && !builtin_decl_implicit_p (BUILT_IN_OBJECT_SIZE))
2981 DEF_SANITIZER_BUILTIN_1 (BUILT_IN_OBJECT_SIZE, "object_size",
2982 BT_FN_SIZE_CONST_PTR_INT,
2983 ATTR_PURE_NOTHROW_LEAF_LIST);
2984
2985 #undef DEF_SANITIZER_BUILTIN_1
2986 #undef DEF_SANITIZER_BUILTIN
2987 #undef DEF_BUILTIN_STUB
2988 }
2989
2990 /* Called via htab_traverse. Count number of emitted
2991 STRING_CSTs in the constant hash table. */
2992
2993 int
2994 count_string_csts (constant_descriptor_tree **slot,
2995 unsigned HOST_WIDE_INT *data)
2996 {
2997 struct constant_descriptor_tree *desc = *slot;
2998 if (TREE_CODE (desc->value) == STRING_CST
2999 && TREE_ASM_WRITTEN (desc->value)
3000 && asan_protect_global (desc->value))
3001 ++*data;
3002 return 1;
3003 }
3004
3005 /* Helper structure to pass two parameters to
3006 add_string_csts. */
3007
3008 struct asan_add_string_csts_data
3009 {
3010 tree type;
3011 vec<constructor_elt, va_gc> *v;
3012 };
3013
3014 /* Called via hash_table::traverse. Call asan_add_global
3015 on emitted STRING_CSTs from the constant hash table. */
3016
3017 int
3018 add_string_csts (constant_descriptor_tree **slot,
3019 asan_add_string_csts_data *aascd)
3020 {
3021 struct constant_descriptor_tree *desc = *slot;
3022 if (TREE_CODE (desc->value) == STRING_CST
3023 && TREE_ASM_WRITTEN (desc->value)
3024 && asan_protect_global (desc->value))
3025 {
3026 asan_add_global (SYMBOL_REF_DECL (XEXP (desc->rtl, 0)),
3027 aascd->type, aascd->v);
3028 }
3029 return 1;
3030 }
3031
3032 /* Needs to be GTY(()), because cgraph_build_static_cdtor may
3033 invoke ggc_collect. */
3034 static GTY(()) tree asan_ctor_statements;
3035
3036 /* Module-level instrumentation.
3037 - Insert __asan_init_vN() into the list of CTORs.
3038 - TODO: insert redzones around globals.
3039 */
3040
3041 void
3042 asan_finish_file (void)
3043 {
3044 varpool_node *vnode;
3045 unsigned HOST_WIDE_INT gcount = 0;
3046
3047 if (shadow_ptr_types[0] == NULL_TREE)
3048 asan_init_shadow_ptr_types ();
3049 /* Avoid instrumenting code in the asan ctors/dtors.
3050 We don't need to insert padding after the description strings,
3051 nor after .LASAN* array. */
3052 flag_sanitize &= ~SANITIZE_ADDRESS;
3053
3054 /* For user-space we want asan constructors to run first.
3055 Linux kernel does not support priorities other than default, and the only
3056 other user of constructors is coverage. So we run with the default
3057 priority. */
3058 int priority = flag_sanitize & SANITIZE_USER_ADDRESS
3059 ? MAX_RESERVED_INIT_PRIORITY - 1 : DEFAULT_INIT_PRIORITY;
3060
3061 if (flag_sanitize & SANITIZE_USER_ADDRESS)
3062 {
3063 tree fn = builtin_decl_implicit (BUILT_IN_ASAN_INIT);
3064 append_to_statement_list (build_call_expr (fn, 0), &asan_ctor_statements);
3065 fn = builtin_decl_implicit (BUILT_IN_ASAN_VERSION_MISMATCH_CHECK);
3066 append_to_statement_list (build_call_expr (fn, 0), &asan_ctor_statements);
3067 }
3068 FOR_EACH_DEFINED_VARIABLE (vnode)
3069 if (TREE_ASM_WRITTEN (vnode->decl)
3070 && asan_protect_global (vnode->decl))
3071 ++gcount;
3072 hash_table<tree_descriptor_hasher> *const_desc_htab = constant_pool_htab ();
3073 const_desc_htab->traverse<unsigned HOST_WIDE_INT *, count_string_csts>
3074 (&gcount);
3075 if (gcount)
3076 {
3077 tree type = asan_global_struct (), var, ctor;
3078 tree dtor_statements = NULL_TREE;
3079 vec<constructor_elt, va_gc> *v;
3080 char buf[20];
3081
3082 type = build_array_type_nelts (type, gcount);
3083 ASM_GENERATE_INTERNAL_LABEL (buf, "LASAN", 0);
3084 var = build_decl (UNKNOWN_LOCATION, VAR_DECL, get_identifier (buf),
3085 type);
3086 TREE_STATIC (var) = 1;
3087 TREE_PUBLIC (var) = 0;
3088 DECL_ARTIFICIAL (var) = 1;
3089 DECL_IGNORED_P (var) = 1;
3090 vec_alloc (v, gcount);
3091 FOR_EACH_DEFINED_VARIABLE (vnode)
3092 if (TREE_ASM_WRITTEN (vnode->decl)
3093 && asan_protect_global (vnode->decl))
3094 asan_add_global (vnode->decl, TREE_TYPE (type), v);
3095 struct asan_add_string_csts_data aascd;
3096 aascd.type = TREE_TYPE (type);
3097 aascd.v = v;
3098 const_desc_htab->traverse<asan_add_string_csts_data *, add_string_csts>
3099 (&aascd);
3100 ctor = build_constructor (type, v);
3101 TREE_CONSTANT (ctor) = 1;
3102 TREE_STATIC (ctor) = 1;
3103 DECL_INITIAL (var) = ctor;
3104 SET_DECL_ALIGN (var, MAX (DECL_ALIGN (var),
3105 ASAN_SHADOW_GRANULARITY * BITS_PER_UNIT));
3106
3107 varpool_node::finalize_decl (var);
3108
3109 tree fn = builtin_decl_implicit (BUILT_IN_ASAN_REGISTER_GLOBALS);
3110 tree gcount_tree = build_int_cst (pointer_sized_int_node, gcount);
3111 append_to_statement_list (build_call_expr (fn, 2,
3112 build_fold_addr_expr (var),
3113 gcount_tree),
3114 &asan_ctor_statements);
3115
3116 fn = builtin_decl_implicit (BUILT_IN_ASAN_UNREGISTER_GLOBALS);
3117 append_to_statement_list (build_call_expr (fn, 2,
3118 build_fold_addr_expr (var),
3119 gcount_tree),
3120 &dtor_statements);
3121 cgraph_build_static_cdtor ('D', dtor_statements, priority);
3122 }
3123 if (asan_ctor_statements)
3124 cgraph_build_static_cdtor ('I', asan_ctor_statements, priority);
3125 flag_sanitize |= SANITIZE_ADDRESS;
3126 }
3127
3128 /* Poison or unpoison (depending on IS_CLOBBER variable) shadow memory based
3129 on SHADOW address. Newly added statements will be added to ITER with
3130 given location LOC. We mark SIZE bytes in shadow memory, where
3131 LAST_CHUNK_SIZE is greater than zero in situation where we are at the
3132 end of a variable. */
3133
3134 static void
3135 asan_store_shadow_bytes (gimple_stmt_iterator *iter, location_t loc,
3136 tree shadow,
3137 unsigned HOST_WIDE_INT base_addr_offset,
3138 bool is_clobber, unsigned size,
3139 unsigned last_chunk_size)
3140 {
3141 tree shadow_ptr_type;
3142
3143 switch (size)
3144 {
3145 case 1:
3146 shadow_ptr_type = shadow_ptr_types[0];
3147 break;
3148 case 2:
3149 shadow_ptr_type = shadow_ptr_types[1];
3150 break;
3151 case 4:
3152 shadow_ptr_type = shadow_ptr_types[2];
3153 break;
3154 default:
3155 gcc_unreachable ();
3156 }
3157
3158 unsigned char c = (char) is_clobber ? ASAN_STACK_MAGIC_USE_AFTER_SCOPE : 0;
3159 unsigned HOST_WIDE_INT val = 0;
3160 unsigned last_pos = size;
3161 if (last_chunk_size && !is_clobber)
3162 last_pos = BYTES_BIG_ENDIAN ? 0 : size - 1;
3163 for (unsigned i = 0; i < size; ++i)
3164 {
3165 unsigned char shadow_c = c;
3166 if (i == last_pos)
3167 shadow_c = last_chunk_size;
3168 val |= (unsigned HOST_WIDE_INT) shadow_c << (BITS_PER_UNIT * i);
3169 }
3170
3171 /* Handle last chunk in unpoisoning. */
3172 tree magic = build_int_cst (TREE_TYPE (shadow_ptr_type), val);
3173
3174 tree dest = build2 (MEM_REF, TREE_TYPE (shadow_ptr_type), shadow,
3175 build_int_cst (shadow_ptr_type, base_addr_offset));
3176
3177 gimple *g = gimple_build_assign (dest, magic);
3178 gimple_set_location (g, loc);
3179 gsi_insert_after (iter, g, GSI_NEW_STMT);
3180 }
3181
3182 /* Expand the ASAN_MARK builtins. */
3183
3184 bool
3185 asan_expand_mark_ifn (gimple_stmt_iterator *iter)
3186 {
3187 gimple *g = gsi_stmt (*iter);
3188 location_t loc = gimple_location (g);
3189 HOST_WIDE_INT flag = tree_to_shwi (gimple_call_arg (g, 0));
3190 bool is_poison = ((asan_mark_flags)flag) == ASAN_MARK_POISON;
3191
3192 tree base = gimple_call_arg (g, 1);
3193 gcc_checking_assert (TREE_CODE (base) == ADDR_EXPR);
3194 tree decl = TREE_OPERAND (base, 0);
3195
3196 /* For a nested function, we can have: ASAN_MARK (2, &FRAME.2.fp_input, 4) */
3197 if (TREE_CODE (decl) == COMPONENT_REF
3198 && DECL_NONLOCAL_FRAME (TREE_OPERAND (decl, 0)))
3199 decl = TREE_OPERAND (decl, 0);
3200
3201 gcc_checking_assert (TREE_CODE (decl) == VAR_DECL);
3202
3203 if (is_poison)
3204 {
3205 if (asan_handled_variables == NULL)
3206 asan_handled_variables = new hash_set<tree> (16);
3207 asan_handled_variables->add (decl);
3208 }
3209 tree len = gimple_call_arg (g, 2);
3210
3211 gcc_assert (tree_fits_shwi_p (len));
3212 unsigned HOST_WIDE_INT size_in_bytes = tree_to_shwi (len);
3213 gcc_assert (size_in_bytes);
3214
3215 g = gimple_build_assign (make_ssa_name (pointer_sized_int_node),
3216 NOP_EXPR, base);
3217 gimple_set_location (g, loc);
3218 gsi_replace (iter, g, false);
3219 tree base_addr = gimple_assign_lhs (g);
3220
3221 /* Generate direct emission if size_in_bytes is small. */
3222 if (size_in_bytes
3223 <= (unsigned)param_use_after_scope_direct_emission_threshold)
3224 {
3225 const unsigned HOST_WIDE_INT shadow_size
3226 = shadow_mem_size (size_in_bytes);
3227 const unsigned int shadow_align
3228 = (get_pointer_alignment (base) / BITS_PER_UNIT) >> ASAN_SHADOW_SHIFT;
3229
3230 tree shadow = build_shadow_mem_access (iter, loc, base_addr,
3231 shadow_ptr_types[0], true);
3232
3233 for (unsigned HOST_WIDE_INT offset = 0; offset < shadow_size;)
3234 {
3235 unsigned size = 1;
3236 if (shadow_size - offset >= 4
3237 && (!STRICT_ALIGNMENT || shadow_align >= 4))
3238 size = 4;
3239 else if (shadow_size - offset >= 2
3240 && (!STRICT_ALIGNMENT || shadow_align >= 2))
3241 size = 2;
3242
3243 unsigned HOST_WIDE_INT last_chunk_size = 0;
3244 unsigned HOST_WIDE_INT s = (offset + size) * ASAN_SHADOW_GRANULARITY;
3245 if (s > size_in_bytes)
3246 last_chunk_size = ASAN_SHADOW_GRANULARITY - (s - size_in_bytes);
3247
3248 asan_store_shadow_bytes (iter, loc, shadow, offset, is_poison,
3249 size, last_chunk_size);
3250 offset += size;
3251 }
3252 }
3253 else
3254 {
3255 g = gimple_build_assign (make_ssa_name (pointer_sized_int_node),
3256 NOP_EXPR, len);
3257 gimple_set_location (g, loc);
3258 gsi_insert_before (iter, g, GSI_SAME_STMT);
3259 tree sz_arg = gimple_assign_lhs (g);
3260
3261 tree fun
3262 = builtin_decl_implicit (is_poison ? BUILT_IN_ASAN_POISON_STACK_MEMORY
3263 : BUILT_IN_ASAN_UNPOISON_STACK_MEMORY);
3264 g = gimple_build_call (fun, 2, base_addr, sz_arg);
3265 gimple_set_location (g, loc);
3266 gsi_insert_after (iter, g, GSI_NEW_STMT);
3267 }
3268
3269 return false;
3270 }
3271
3272 /* Expand the ASAN_{LOAD,STORE} builtins. */
3273
3274 bool
3275 asan_expand_check_ifn (gimple_stmt_iterator *iter, bool use_calls)
3276 {
3277 gimple *g = gsi_stmt (*iter);
3278 location_t loc = gimple_location (g);
3279 bool recover_p;
3280 if (flag_sanitize & SANITIZE_USER_ADDRESS)
3281 recover_p = (flag_sanitize_recover & SANITIZE_USER_ADDRESS) != 0;
3282 else
3283 recover_p = (flag_sanitize_recover & SANITIZE_KERNEL_ADDRESS) != 0;
3284
3285 HOST_WIDE_INT flags = tree_to_shwi (gimple_call_arg (g, 0));
3286 gcc_assert (flags < ASAN_CHECK_LAST);
3287 bool is_scalar_access = (flags & ASAN_CHECK_SCALAR_ACCESS) != 0;
3288 bool is_store = (flags & ASAN_CHECK_STORE) != 0;
3289 bool is_non_zero_len = (flags & ASAN_CHECK_NON_ZERO_LEN) != 0;
3290
3291 tree base = gimple_call_arg (g, 1);
3292 tree len = gimple_call_arg (g, 2);
3293 HOST_WIDE_INT align = tree_to_shwi (gimple_call_arg (g, 3));
3294
3295 HOST_WIDE_INT size_in_bytes
3296 = is_scalar_access && tree_fits_shwi_p (len) ? tree_to_shwi (len) : -1;
3297
3298 if (use_calls)
3299 {
3300 /* Instrument using callbacks. */
3301 gimple *g = gimple_build_assign (make_ssa_name (pointer_sized_int_node),
3302 NOP_EXPR, base);
3303 gimple_set_location (g, loc);
3304 gsi_insert_before (iter, g, GSI_SAME_STMT);
3305 tree base_addr = gimple_assign_lhs (g);
3306
3307 int nargs;
3308 tree fun = check_func (is_store, recover_p, size_in_bytes, &nargs);
3309 if (nargs == 1)
3310 g = gimple_build_call (fun, 1, base_addr);
3311 else
3312 {
3313 gcc_assert (nargs == 2);
3314 g = gimple_build_assign (make_ssa_name (pointer_sized_int_node),
3315 NOP_EXPR, len);
3316 gimple_set_location (g, loc);
3317 gsi_insert_before (iter, g, GSI_SAME_STMT);
3318 tree sz_arg = gimple_assign_lhs (g);
3319 g = gimple_build_call (fun, nargs, base_addr, sz_arg);
3320 }
3321 gimple_set_location (g, loc);
3322 gsi_replace (iter, g, false);
3323 return false;
3324 }
3325
3326 HOST_WIDE_INT real_size_in_bytes = size_in_bytes == -1 ? 1 : size_in_bytes;
3327
3328 tree shadow_ptr_type = shadow_ptr_types[real_size_in_bytes == 16 ? 1 : 0];
3329 tree shadow_type = TREE_TYPE (shadow_ptr_type);
3330
3331 gimple_stmt_iterator gsi = *iter;
3332
3333 if (!is_non_zero_len)
3334 {
3335 /* So, the length of the memory area to asan-protect is
3336 non-constant. Let's guard the generated instrumentation code
3337 like:
3338
3339 if (len != 0)
3340 {
3341 //asan instrumentation code goes here.
3342 }
3343 // falltrough instructions, starting with *ITER. */
3344
3345 g = gimple_build_cond (NE_EXPR,
3346 len,
3347 build_int_cst (TREE_TYPE (len), 0),
3348 NULL_TREE, NULL_TREE);
3349 gimple_set_location (g, loc);
3350
3351 basic_block then_bb, fallthrough_bb;
3352 insert_if_then_before_iter (as_a <gcond *> (g), iter,
3353 /*then_more_likely_p=*/true,
3354 &then_bb, &fallthrough_bb);
3355 /* Note that fallthrough_bb starts with the statement that was
3356 pointed to by ITER. */
3357
3358 /* The 'then block' of the 'if (len != 0) condition is where
3359 we'll generate the asan instrumentation code now. */
3360 gsi = gsi_last_bb (then_bb);
3361 }
3362
3363 /* Get an iterator on the point where we can add the condition
3364 statement for the instrumentation. */
3365 basic_block then_bb, else_bb;
3366 gsi = create_cond_insert_point (&gsi, /*before_p*/false,
3367 /*then_more_likely_p=*/false,
3368 /*create_then_fallthru_edge*/recover_p,
3369 &then_bb,
3370 &else_bb);
3371
3372 g = gimple_build_assign (make_ssa_name (pointer_sized_int_node),
3373 NOP_EXPR, base);
3374 gimple_set_location (g, loc);
3375 gsi_insert_before (&gsi, g, GSI_NEW_STMT);
3376 tree base_addr = gimple_assign_lhs (g);
3377
3378 tree t = NULL_TREE;
3379 if (real_size_in_bytes >= 8)
3380 {
3381 tree shadow = build_shadow_mem_access (&gsi, loc, base_addr,
3382 shadow_ptr_type);
3383 t = shadow;
3384 }
3385 else
3386 {
3387 /* Slow path for 1, 2 and 4 byte accesses. */
3388 /* Test (shadow != 0)
3389 & ((base_addr & 7) + (real_size_in_bytes - 1)) >= shadow). */
3390 tree shadow = build_shadow_mem_access (&gsi, loc, base_addr,
3391 shadow_ptr_type);
3392 gimple *shadow_test = build_assign (NE_EXPR, shadow, 0);
3393 gimple_seq seq = NULL;
3394 gimple_seq_add_stmt (&seq, shadow_test);
3395 /* Aligned (>= 8 bytes) can test just
3396 (real_size_in_bytes - 1 >= shadow), as base_addr & 7 is known
3397 to be 0. */
3398 if (align < 8)
3399 {
3400 gimple_seq_add_stmt (&seq, build_assign (BIT_AND_EXPR,
3401 base_addr, 7));
3402 gimple_seq_add_stmt (&seq,
3403 build_type_cast (shadow_type,
3404 gimple_seq_last (seq)));
3405 if (real_size_in_bytes > 1)
3406 gimple_seq_add_stmt (&seq,
3407 build_assign (PLUS_EXPR,
3408 gimple_seq_last (seq),
3409 real_size_in_bytes - 1));
3410 t = gimple_assign_lhs (gimple_seq_last_stmt (seq));
3411 }
3412 else
3413 t = build_int_cst (shadow_type, real_size_in_bytes - 1);
3414 gimple_seq_add_stmt (&seq, build_assign (GE_EXPR, t, shadow));
3415 gimple_seq_add_stmt (&seq, build_assign (BIT_AND_EXPR, shadow_test,
3416 gimple_seq_last (seq)));
3417 t = gimple_assign_lhs (gimple_seq_last (seq));
3418 gimple_seq_set_location (seq, loc);
3419 gsi_insert_seq_after (&gsi, seq, GSI_CONTINUE_LINKING);
3420
3421 /* For non-constant, misaligned or otherwise weird access sizes,
3422 check first and last byte. */
3423 if (size_in_bytes == -1)
3424 {
3425 g = gimple_build_assign (make_ssa_name (pointer_sized_int_node),
3426 MINUS_EXPR, len,
3427 build_int_cst (pointer_sized_int_node, 1));
3428 gimple_set_location (g, loc);
3429 gsi_insert_after (&gsi, g, GSI_NEW_STMT);
3430 tree last = gimple_assign_lhs (g);
3431 g = gimple_build_assign (make_ssa_name (pointer_sized_int_node),
3432 PLUS_EXPR, base_addr, last);
3433 gimple_set_location (g, loc);
3434 gsi_insert_after (&gsi, g, GSI_NEW_STMT);
3435 tree base_end_addr = gimple_assign_lhs (g);
3436
3437 tree shadow = build_shadow_mem_access (&gsi, loc, base_end_addr,
3438 shadow_ptr_type);
3439 gimple *shadow_test = build_assign (NE_EXPR, shadow, 0);
3440 gimple_seq seq = NULL;
3441 gimple_seq_add_stmt (&seq, shadow_test);
3442 gimple_seq_add_stmt (&seq, build_assign (BIT_AND_EXPR,
3443 base_end_addr, 7));
3444 gimple_seq_add_stmt (&seq, build_type_cast (shadow_type,
3445 gimple_seq_last (seq)));
3446 gimple_seq_add_stmt (&seq, build_assign (GE_EXPR,
3447 gimple_seq_last (seq),
3448 shadow));
3449 gimple_seq_add_stmt (&seq, build_assign (BIT_AND_EXPR, shadow_test,
3450 gimple_seq_last (seq)));
3451 gimple_seq_add_stmt (&seq, build_assign (BIT_IOR_EXPR, t,
3452 gimple_seq_last (seq)));
3453 t = gimple_assign_lhs (gimple_seq_last (seq));
3454 gimple_seq_set_location (seq, loc);
3455 gsi_insert_seq_after (&gsi, seq, GSI_CONTINUE_LINKING);
3456 }
3457 }
3458
3459 g = gimple_build_cond (NE_EXPR, t, build_int_cst (TREE_TYPE (t), 0),
3460 NULL_TREE, NULL_TREE);
3461 gimple_set_location (g, loc);
3462 gsi_insert_after (&gsi, g, GSI_NEW_STMT);
3463
3464 /* Generate call to the run-time library (e.g. __asan_report_load8). */
3465 gsi = gsi_start_bb (then_bb);
3466 int nargs;
3467 tree fun = report_error_func (is_store, recover_p, size_in_bytes, &nargs);
3468 g = gimple_build_call (fun, nargs, base_addr, len);
3469 gimple_set_location (g, loc);
3470 gsi_insert_after (&gsi, g, GSI_NEW_STMT);
3471
3472 gsi_remove (iter, true);
3473 *iter = gsi_start_bb (else_bb);
3474
3475 return true;
3476 }
3477
3478 /* Create ASAN shadow variable for a VAR_DECL which has been rewritten
3479 into SSA. Already seen VAR_DECLs are stored in SHADOW_VARS_MAPPING. */
3480
3481 static tree
3482 create_asan_shadow_var (tree var_decl,
3483 hash_map<tree, tree> &shadow_vars_mapping)
3484 {
3485 tree *slot = shadow_vars_mapping.get (var_decl);
3486 if (slot == NULL)
3487 {
3488 tree shadow_var = copy_node (var_decl);
3489
3490 copy_body_data id;
3491 memset (&id, 0, sizeof (copy_body_data));
3492 id.src_fn = id.dst_fn = current_function_decl;
3493 copy_decl_for_dup_finish (&id, var_decl, shadow_var);
3494
3495 DECL_ARTIFICIAL (shadow_var) = 1;
3496 DECL_IGNORED_P (shadow_var) = 1;
3497 DECL_SEEN_IN_BIND_EXPR_P (shadow_var) = 0;
3498 gimple_add_tmp_var (shadow_var);
3499
3500 shadow_vars_mapping.put (var_decl, shadow_var);
3501 return shadow_var;
3502 }
3503 else
3504 return *slot;
3505 }
3506
3507 /* Expand ASAN_POISON ifn. */
3508
3509 bool
3510 asan_expand_poison_ifn (gimple_stmt_iterator *iter,
3511 bool *need_commit_edge_insert,
3512 hash_map<tree, tree> &shadow_vars_mapping)
3513 {
3514 gimple *g = gsi_stmt (*iter);
3515 tree poisoned_var = gimple_call_lhs (g);
3516 if (!poisoned_var || has_zero_uses (poisoned_var))
3517 {
3518 gsi_remove (iter, true);
3519 return true;
3520 }
3521
3522 if (SSA_NAME_VAR (poisoned_var) == NULL_TREE)
3523 SET_SSA_NAME_VAR_OR_IDENTIFIER (poisoned_var,
3524 create_tmp_var (TREE_TYPE (poisoned_var)));
3525
3526 tree shadow_var = create_asan_shadow_var (SSA_NAME_VAR (poisoned_var),
3527 shadow_vars_mapping);
3528
3529 bool recover_p;
3530 if (flag_sanitize & SANITIZE_USER_ADDRESS)
3531 recover_p = (flag_sanitize_recover & SANITIZE_USER_ADDRESS) != 0;
3532 else
3533 recover_p = (flag_sanitize_recover & SANITIZE_KERNEL_ADDRESS) != 0;
3534 tree size = DECL_SIZE_UNIT (shadow_var);
3535 gimple *poison_call
3536 = gimple_build_call_internal (IFN_ASAN_MARK, 3,
3537 build_int_cst (integer_type_node,
3538 ASAN_MARK_POISON),
3539 build_fold_addr_expr (shadow_var), size);
3540
3541 gimple *use;
3542 imm_use_iterator imm_iter;
3543 FOR_EACH_IMM_USE_STMT (use, imm_iter, poisoned_var)
3544 {
3545 if (is_gimple_debug (use))
3546 continue;
3547
3548 int nargs;
3549 bool store_p = gimple_call_internal_p (use, IFN_ASAN_POISON_USE);
3550 tree fun = report_error_func (store_p, recover_p, tree_to_uhwi (size),
3551 &nargs);
3552
3553 gcall *call = gimple_build_call (fun, 1,
3554 build_fold_addr_expr (shadow_var));
3555 gimple_set_location (call, gimple_location (use));
3556 gimple *call_to_insert = call;
3557
3558 /* The USE can be a gimple PHI node. If so, insert the call on
3559 all edges leading to the PHI node. */
3560 if (is_a <gphi *> (use))
3561 {
3562 gphi *phi = dyn_cast<gphi *> (use);
3563 for (unsigned i = 0; i < gimple_phi_num_args (phi); ++i)
3564 if (gimple_phi_arg_def (phi, i) == poisoned_var)
3565 {
3566 edge e = gimple_phi_arg_edge (phi, i);
3567
3568 /* Do not insert on an edge we can't split. */
3569 if (e->flags & EDGE_ABNORMAL)
3570 continue;
3571
3572 if (call_to_insert == NULL)
3573 call_to_insert = gimple_copy (call);
3574
3575 gsi_insert_seq_on_edge (e, call_to_insert);
3576 *need_commit_edge_insert = true;
3577 call_to_insert = NULL;
3578 }
3579 }
3580 else
3581 {
3582 gimple_stmt_iterator gsi = gsi_for_stmt (use);
3583 if (store_p)
3584 gsi_replace (&gsi, call, true);
3585 else
3586 gsi_insert_before (&gsi, call, GSI_NEW_STMT);
3587 }
3588 }
3589
3590 SSA_NAME_IS_DEFAULT_DEF (poisoned_var) = true;
3591 SSA_NAME_DEF_STMT (poisoned_var) = gimple_build_nop ();
3592 gsi_replace (iter, poison_call, false);
3593
3594 return true;
3595 }
3596
3597 /* Instrument the current function. */
3598
3599 static unsigned int
3600 asan_instrument (void)
3601 {
3602 if (shadow_ptr_types[0] == NULL_TREE)
3603 asan_init_shadow_ptr_types ();
3604 transform_statements ();
3605 last_alloca_addr = NULL_TREE;
3606 return 0;
3607 }
3608
3609 static bool
3610 gate_asan (void)
3611 {
3612 return sanitize_flags_p (SANITIZE_ADDRESS);
3613 }
3614
3615 namespace {
3616
3617 const pass_data pass_data_asan =
3618 {
3619 GIMPLE_PASS, /* type */
3620 "asan", /* name */
3621 OPTGROUP_NONE, /* optinfo_flags */
3622 TV_NONE, /* tv_id */
3623 ( PROP_ssa | PROP_cfg | PROP_gimple_leh ), /* properties_required */
3624 0, /* properties_provided */
3625 0, /* properties_destroyed */
3626 0, /* todo_flags_start */
3627 TODO_update_ssa, /* todo_flags_finish */
3628 };
3629
3630 class pass_asan : public gimple_opt_pass
3631 {
3632 public:
3633 pass_asan (gcc::context *ctxt)
3634 : gimple_opt_pass (pass_data_asan, ctxt)
3635 {}
3636
3637 /* opt_pass methods: */
3638 opt_pass * clone () { return new pass_asan (m_ctxt); }
3639 virtual bool gate (function *) { return gate_asan (); }
3640 virtual unsigned int execute (function *) { return asan_instrument (); }
3641
3642 }; // class pass_asan
3643
3644 } // anon namespace
3645
3646 gimple_opt_pass *
3647 make_pass_asan (gcc::context *ctxt)
3648 {
3649 return new pass_asan (ctxt);
3650 }
3651
3652 namespace {
3653
3654 const pass_data pass_data_asan_O0 =
3655 {
3656 GIMPLE_PASS, /* type */
3657 "asan0", /* name */
3658 OPTGROUP_NONE, /* optinfo_flags */
3659 TV_NONE, /* tv_id */
3660 ( PROP_ssa | PROP_cfg | PROP_gimple_leh ), /* properties_required */
3661 0, /* properties_provided */
3662 0, /* properties_destroyed */
3663 0, /* todo_flags_start */
3664 TODO_update_ssa, /* todo_flags_finish */
3665 };
3666
3667 class pass_asan_O0 : public gimple_opt_pass
3668 {
3669 public:
3670 pass_asan_O0 (gcc::context *ctxt)
3671 : gimple_opt_pass (pass_data_asan_O0, ctxt)
3672 {}
3673
3674 /* opt_pass methods: */
3675 virtual bool gate (function *) { return !optimize && gate_asan (); }
3676 virtual unsigned int execute (function *) { return asan_instrument (); }
3677
3678 }; // class pass_asan_O0
3679
3680 } // anon namespace
3681
3682 gimple_opt_pass *
3683 make_pass_asan_O0 (gcc::context *ctxt)
3684 {
3685 return new pass_asan_O0 (ctxt);
3686 }
3687
3688 #include "gt-asan.h"