]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/tree-mudflap.c
e2ba26f869d2d66835c86a6c1912e84127b86817
[thirdparty/gcc.git] / gcc / tree-mudflap.c
1 /* Mudflap: narrow-pointer bounds-checking by tree rewriting.
2 Copyright (C) 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010, 2012
3 Free Software Foundation, Inc.
4 Contributed by Frank Ch. Eigler <fche@redhat.com>
5 and Graydon Hoare <graydon@redhat.com>
6
7 This file is part of GCC.
8
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
12 version.
13
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 for more details.
18
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
22
23
24 #include "config.h"
25 #include "system.h"
26 #include "coretypes.h"
27 #include "tm.h"
28 #include "tree.h"
29 #include "tm_p.h"
30 #include "basic-block.h"
31 #include "flags.h"
32 #include "function.h"
33 #include "tree-inline.h"
34 #include "gimple.h"
35 #include "tree-iterator.h"
36 #include "tree-flow.h"
37 #include "tree-mudflap.h"
38 #include "tree-pass.h"
39 #include "hashtab.h"
40 #include "diagnostic.h"
41 #include "demangle.h"
42 #include "langhooks.h"
43 #include "ggc.h"
44 #include "cgraph.h"
45 #include "gimple.h"
46
47 extern void add_bb_to_loop (basic_block, struct loop *);
48
49 /* Internal function decls */
50
51
52 /* Options. */
53 #define flag_mudflap_threads (flag_mudflap == 2)
54
55 /* Helpers. */
56 static tree mf_build_string (const char *string);
57 static tree mf_varname_tree (tree);
58 static tree mf_file_function_line_tree (location_t);
59
60 /* Indirection-related instrumentation. */
61 static void mf_decl_cache_locals (void);
62 static void mf_decl_clear_locals (void);
63 static void mf_xform_statements (void);
64 static unsigned int execute_mudflap_function_ops (void);
65
66 /* Addressable variables instrumentation. */
67 static void mf_xform_decls (gimple_seq, tree);
68 static tree mx_xfn_xform_decls (gimple_stmt_iterator *, bool *,
69 struct walk_stmt_info *);
70 static gimple_seq mx_register_decls (tree, gimple_seq, location_t);
71 static unsigned int execute_mudflap_function_decls (void);
72
73 /* Return true if DECL is artificial stub that shouldn't be instrumented by
74 mf. We should instrument clones of non-artificial functions. */
75 static inline bool
76 mf_artificial (const_tree decl)
77 {
78 return DECL_ARTIFICIAL (DECL_ORIGIN (decl));
79 }
80
81 /* ------------------------------------------------------------------------ */
82 /* Some generally helpful functions for mudflap instrumentation. */
83
84 /* Build a reference to a literal string. */
85 static tree
86 mf_build_string (const char *string)
87 {
88 size_t len = strlen (string);
89 tree result = mf_mark (build_string (len + 1, string));
90
91 TREE_TYPE (result) = build_array_type
92 (char_type_node, build_index_type (size_int (len)));
93 TREE_CONSTANT (result) = 1;
94 TREE_READONLY (result) = 1;
95 TREE_STATIC (result) = 1;
96
97 result = build1 (ADDR_EXPR, build_pointer_type (char_type_node), result);
98
99 return mf_mark (result);
100 }
101
102 /* Create a properly typed STRING_CST node that describes the given
103 declaration. It will be used as an argument for __mf_register().
104 Try to construct a helpful string, including file/function/variable
105 name. */
106
107 static tree
108 mf_varname_tree (tree decl)
109 {
110 static pretty_printer buf_rec;
111 static int initialized = 0;
112 pretty_printer *buf = & buf_rec;
113 const char *buf_contents;
114 tree result;
115
116 gcc_assert (decl);
117
118 if (!initialized)
119 {
120 pp_construct (buf, /* prefix */ NULL, /* line-width */ 0);
121 initialized = 1;
122 }
123 pp_clear_output_area (buf);
124
125 /* Add FILENAME[:LINENUMBER[:COLUMNNUMBER]]. */
126 {
127 expanded_location xloc = expand_location (DECL_SOURCE_LOCATION (decl));
128 const char *sourcefile;
129 unsigned sourceline = xloc.line;
130 unsigned sourcecolumn = 0;
131 sourcecolumn = xloc.column;
132 sourcefile = xloc.file;
133 if (sourcefile == NULL && current_function_decl != NULL_TREE)
134 sourcefile = DECL_SOURCE_FILE (current_function_decl);
135 if (sourcefile == NULL)
136 sourcefile = "<unknown file>";
137
138 pp_string (buf, sourcefile);
139
140 if (sourceline != 0)
141 {
142 pp_string (buf, ":");
143 pp_decimal_int (buf, sourceline);
144
145 if (sourcecolumn != 0)
146 {
147 pp_string (buf, ":");
148 pp_decimal_int (buf, sourcecolumn);
149 }
150 }
151 }
152
153 if (current_function_decl != NULL_TREE)
154 {
155 /* Add (FUNCTION) */
156 pp_string (buf, " (");
157 {
158 const char *funcname = NULL;
159 if (DECL_NAME (current_function_decl))
160 funcname = lang_hooks.decl_printable_name (current_function_decl, 1);
161 if (funcname == NULL)
162 funcname = "anonymous fn";
163
164 pp_string (buf, funcname);
165 }
166 pp_string (buf, ") ");
167 }
168 else
169 pp_string (buf, " ");
170
171 /* Add <variable-declaration>, possibly demangled. */
172 {
173 const char *declname = NULL;
174
175 if (DECL_NAME (decl) != NULL)
176 {
177 if (strcmp ("GNU C++", lang_hooks.name) == 0)
178 {
179 /* The gcc/cp decl_printable_name hook doesn't do as good a job as
180 the libiberty demangler. */
181 declname = cplus_demangle (IDENTIFIER_POINTER (DECL_NAME (decl)),
182 DMGL_AUTO | DMGL_VERBOSE);
183 }
184 if (declname == NULL)
185 declname = lang_hooks.decl_printable_name (decl, 3);
186 }
187 if (declname == NULL)
188 declname = "<unnamed variable>";
189
190 pp_string (buf, declname);
191 }
192
193 /* Return the lot as a new STRING_CST. */
194 buf_contents = pp_base_formatted_text (buf);
195 result = mf_build_string (buf_contents);
196 pp_clear_output_area (buf);
197
198 return result;
199 }
200
201
202 /* And another friend, for producing a simpler message. */
203
204 static tree
205 mf_file_function_line_tree (location_t location)
206 {
207 expanded_location xloc = expand_location (location);
208 const char *file = NULL, *colon, *line, *op, *name, *cp;
209 char linecolbuf[30]; /* Enough for two decimal numbers plus a colon. */
210 char *string;
211 tree result;
212
213 /* Add FILENAME[:LINENUMBER[:COLUMNNUMBER]]. */
214 file = xloc.file;
215 if (file == NULL && current_function_decl != NULL_TREE)
216 file = DECL_SOURCE_FILE (current_function_decl);
217 if (file == NULL)
218 file = "<unknown file>";
219
220 if (xloc.line > 0)
221 {
222 if (xloc.column > 0)
223 sprintf (linecolbuf, "%d:%d", xloc.line, xloc.column);
224 else
225 sprintf (linecolbuf, "%d", xloc.line);
226 colon = ":";
227 line = linecolbuf;
228 }
229 else
230 colon = line = "";
231
232 /* Add (FUNCTION). */
233 name = lang_hooks.decl_printable_name (current_function_decl, 1);
234 if (name)
235 {
236 op = " (";
237 cp = ")";
238 }
239 else
240 op = name = cp = "";
241
242 string = concat (file, colon, line, op, name, cp, NULL);
243 result = mf_build_string (string);
244 free (string);
245
246 return result;
247 }
248
249
250 /* global tree nodes */
251
252 /* Global tree objects for global variables and functions exported by
253 mudflap runtime library. mf_init_extern_trees must be called
254 before using these. */
255
256 /* uintptr_t (usually "unsigned long") */
257 static GTY (()) tree mf_uintptr_type;
258
259 /* struct __mf_cache { uintptr_t low; uintptr_t high; }; */
260 static GTY (()) tree mf_cache_struct_type;
261
262 /* struct __mf_cache * const */
263 static GTY (()) tree mf_cache_structptr_type;
264
265 /* extern struct __mf_cache __mf_lookup_cache []; */
266 static GTY (()) tree mf_cache_array_decl;
267
268 /* extern unsigned char __mf_lc_shift; */
269 static GTY (()) tree mf_cache_shift_decl;
270
271 /* extern uintptr_t __mf_lc_mask; */
272 static GTY (()) tree mf_cache_mask_decl;
273
274 /* Their function-scope local shadows, used in single-threaded mode only. */
275
276 /* auto const unsigned char __mf_lc_shift_l; */
277 static GTY (()) tree mf_cache_shift_decl_l;
278
279 /* auto const uintptr_t __mf_lc_mask_l; */
280 static GTY (()) tree mf_cache_mask_decl_l;
281
282 /* extern void __mf_check (void *ptr, size_t sz, int type, const char *); */
283 static GTY (()) tree mf_check_fndecl;
284
285 /* extern void __mf_register (void *ptr, size_t sz, int type, const char *); */
286 static GTY (()) tree mf_register_fndecl;
287
288 /* extern void __mf_unregister (void *ptr, size_t sz, int type); */
289 static GTY (()) tree mf_unregister_fndecl;
290
291 /* extern void __mf_init (); */
292 static GTY (()) tree mf_init_fndecl;
293
294 /* extern int __mf_set_options (const char*); */
295 static GTY (()) tree mf_set_options_fndecl;
296
297
298 /* Helper for mudflap_init: construct a decl with the given category,
299 name, and type, mark it an external reference, and pushdecl it. */
300 static inline tree
301 mf_make_builtin (enum tree_code category, const char *name, tree type)
302 {
303 tree decl = mf_mark (build_decl (UNKNOWN_LOCATION,
304 category, get_identifier (name), type));
305 TREE_PUBLIC (decl) = 1;
306 DECL_EXTERNAL (decl) = 1;
307 lang_hooks.decls.pushdecl (decl);
308 /* The decl was declared by the compiler. */
309 DECL_ARTIFICIAL (decl) = 1;
310 /* And we don't want debug info for it. */
311 DECL_IGNORED_P (decl) = 1;
312 return decl;
313 }
314
315 /* Helper for mudflap_init: construct a tree corresponding to the type
316 struct __mf_cache { uintptr_t low; uintptr_t high; };
317 where uintptr_t is the FIELD_TYPE argument. */
318 static inline tree
319 mf_make_mf_cache_struct_type (tree field_type)
320 {
321 /* There is, abominably, no language-independent way to construct a
322 RECORD_TYPE. So we have to call the basic type construction
323 primitives by hand. */
324 tree fieldlo = build_decl (UNKNOWN_LOCATION,
325 FIELD_DECL, get_identifier ("low"), field_type);
326 tree fieldhi = build_decl (UNKNOWN_LOCATION,
327 FIELD_DECL, get_identifier ("high"), field_type);
328
329 tree struct_type = make_node (RECORD_TYPE);
330 DECL_CONTEXT (fieldlo) = struct_type;
331 DECL_CONTEXT (fieldhi) = struct_type;
332 DECL_CHAIN (fieldlo) = fieldhi;
333 TYPE_FIELDS (struct_type) = fieldlo;
334 TYPE_NAME (struct_type) = get_identifier ("__mf_cache");
335 layout_type (struct_type);
336
337 return struct_type;
338 }
339
340 /* Initialize the global tree nodes that correspond to mf-runtime.h
341 declarations. */
342 void
343 mudflap_init (void)
344 {
345 static bool done = false;
346 tree mf_const_string_type;
347 tree mf_cache_array_type;
348 tree mf_check_register_fntype;
349 tree mf_unregister_fntype;
350 tree mf_init_fntype;
351 tree mf_set_options_fntype;
352
353 if (done)
354 return;
355 done = true;
356
357 mf_uintptr_type = lang_hooks.types.type_for_mode (ptr_mode,
358 /*unsignedp=*/true);
359 mf_const_string_type
360 = build_pointer_type (build_qualified_type
361 (char_type_node, TYPE_QUAL_CONST));
362
363 mf_cache_struct_type = mf_make_mf_cache_struct_type (mf_uintptr_type);
364 mf_cache_structptr_type = build_pointer_type (mf_cache_struct_type);
365 mf_cache_array_type = build_array_type (mf_cache_struct_type, 0);
366 mf_check_register_fntype =
367 build_function_type_list (void_type_node, ptr_type_node, size_type_node,
368 integer_type_node, mf_const_string_type, NULL_TREE);
369 mf_unregister_fntype =
370 build_function_type_list (void_type_node, ptr_type_node, size_type_node,
371 integer_type_node, NULL_TREE);
372 mf_init_fntype =
373 build_function_type_list (void_type_node, NULL_TREE);
374 mf_set_options_fntype =
375 build_function_type_list (integer_type_node, mf_const_string_type, NULL_TREE);
376
377 mf_cache_array_decl = mf_make_builtin (VAR_DECL, "__mf_lookup_cache",
378 mf_cache_array_type);
379 mf_cache_shift_decl = mf_make_builtin (VAR_DECL, "__mf_lc_shift",
380 unsigned_char_type_node);
381 mf_cache_mask_decl = mf_make_builtin (VAR_DECL, "__mf_lc_mask",
382 mf_uintptr_type);
383 /* Don't process these in mudflap_enqueue_decl, should they come by
384 there for some reason. */
385 mf_mark (mf_cache_array_decl);
386 mf_mark (mf_cache_shift_decl);
387 mf_mark (mf_cache_mask_decl);
388 mf_check_fndecl = mf_make_builtin (FUNCTION_DECL, "__mf_check",
389 mf_check_register_fntype);
390 mf_register_fndecl = mf_make_builtin (FUNCTION_DECL, "__mf_register",
391 mf_check_register_fntype);
392 mf_unregister_fndecl = mf_make_builtin (FUNCTION_DECL, "__mf_unregister",
393 mf_unregister_fntype);
394 mf_init_fndecl = mf_make_builtin (FUNCTION_DECL, "__mf_init",
395 mf_init_fntype);
396 mf_set_options_fndecl = mf_make_builtin (FUNCTION_DECL, "__mf_set_options",
397 mf_set_options_fntype);
398 }
399
400
401 /* ------------------------------------------------------------------------ */
402 /* This is the second part of the mudflap instrumentation. It works on
403 low-level GIMPLE using the CFG, because we want to run this pass after
404 tree optimizations have been performed, but we have to preserve the CFG
405 for expansion from trees to RTL.
406 Below is the list of transformations performed on statements in the
407 current function.
408
409 1) Memory reference transforms: Perform the mudflap indirection-related
410 tree transforms on memory references.
411
412 2) Mark BUILTIN_ALLOCA calls not inlineable.
413
414 */
415
416 static unsigned int
417 execute_mudflap_function_ops (void)
418 {
419 struct gimplify_ctx gctx;
420
421 /* Don't instrument functions such as the synthetic constructor
422 built during mudflap_finish_file. */
423 if (mf_marked_p (current_function_decl)
424 || mf_artificial (current_function_decl))
425 return 0;
426
427 push_gimplify_context (&gctx);
428
429 /* In multithreaded mode, don't cache the lookup cache parameters. */
430 if (! flag_mudflap_threads)
431 mf_decl_cache_locals ();
432
433 mf_xform_statements ();
434
435 if (! flag_mudflap_threads)
436 mf_decl_clear_locals ();
437
438 pop_gimplify_context (NULL);
439 return 0;
440 }
441
442 /* Insert a gimple_seq SEQ on all the outgoing edges out of BB. Note that
443 if BB has more than one edge, STMT will be replicated for each edge.
444 Also, abnormal edges will be ignored. */
445
446 static void
447 insert_edge_copies_seq (gimple_seq seq, basic_block bb)
448 {
449 edge e;
450 edge_iterator ei;
451 unsigned n_copies = -1;
452
453 FOR_EACH_EDGE (e, ei, bb->succs)
454 if (!(e->flags & EDGE_ABNORMAL))
455 n_copies++;
456
457 FOR_EACH_EDGE (e, ei, bb->succs)
458 if (!(e->flags & EDGE_ABNORMAL))
459 gsi_insert_seq_on_edge (e, n_copies-- > 0 ? gimple_seq_copy (seq) : seq);
460 }
461
462 /* Create and initialize local shadow variables for the lookup cache
463 globals. Put their decls in the *_l globals for use by
464 mf_build_check_statement_for. */
465
466 static void
467 mf_decl_cache_locals (void)
468 {
469 gimple g;
470 gimple_seq seq = NULL;
471
472 /* Build the cache vars. */
473 mf_cache_shift_decl_l
474 = mf_mark (make_rename_temp (TREE_TYPE (mf_cache_shift_decl),
475 "__mf_lookup_shift_l"));
476
477 mf_cache_mask_decl_l
478 = mf_mark (make_rename_temp (TREE_TYPE (mf_cache_mask_decl),
479 "__mf_lookup_mask_l"));
480
481 /* Build initialization nodes for the cache vars. We just load the
482 globals into the cache variables. */
483 g = gimple_build_assign (mf_cache_shift_decl_l, mf_cache_shift_decl);
484 gimple_set_location (g, DECL_SOURCE_LOCATION (current_function_decl));
485 gimple_seq_add_stmt (&seq, g);
486
487 g = gimple_build_assign (mf_cache_mask_decl_l, mf_cache_mask_decl);
488 gimple_set_location (g, DECL_SOURCE_LOCATION (current_function_decl));
489 gimple_seq_add_stmt (&seq, g);
490
491 insert_edge_copies_seq (seq, ENTRY_BLOCK_PTR);
492
493 gsi_commit_edge_inserts ();
494 }
495
496
497 static void
498 mf_decl_clear_locals (void)
499 {
500 /* Unset local shadows. */
501 mf_cache_shift_decl_l = NULL_TREE;
502 mf_cache_mask_decl_l = NULL_TREE;
503 }
504
505 static void
506 mf_build_check_statement_for (tree base, tree limit,
507 gimple_stmt_iterator *instr_gsi,
508 location_t location, tree dirflag)
509 {
510 gimple_stmt_iterator gsi;
511 basic_block cond_bb, then_bb, join_bb;
512 edge e;
513 tree cond, t, u, v;
514 tree mf_base;
515 tree mf_elem;
516 tree mf_limit;
517 gimple g;
518 gimple_seq seq, stmts;
519
520 /* We first need to split the current basic block, and start altering
521 the CFG. This allows us to insert the statements we're about to
522 construct into the right basic blocks. */
523
524 cond_bb = gimple_bb (gsi_stmt (*instr_gsi));
525 gsi = *instr_gsi;
526 gsi_prev (&gsi);
527 if (! gsi_end_p (gsi))
528 e = split_block (cond_bb, gsi_stmt (gsi));
529 else
530 e = split_block_after_labels (cond_bb);
531 cond_bb = e->src;
532 join_bb = e->dest;
533
534 /* A recap at this point: join_bb is the basic block at whose head
535 is the gimple statement for which this check expression is being
536 built. cond_bb is the (possibly new, synthetic) basic block the
537 end of which will contain the cache-lookup code, and a
538 conditional that jumps to the cache-miss code or, much more
539 likely, over to join_bb. */
540
541 /* Create the bb that contains the cache-miss fallback block (mf_check). */
542 then_bb = create_empty_bb (cond_bb);
543 make_edge (cond_bb, then_bb, EDGE_TRUE_VALUE);
544 make_single_succ_edge (then_bb, join_bb, EDGE_FALLTHRU);
545
546 /* Mark the pseudo-fallthrough edge from cond_bb to join_bb. */
547 e = find_edge (cond_bb, join_bb);
548 e->flags = EDGE_FALSE_VALUE;
549 e->count = cond_bb->count;
550 e->probability = REG_BR_PROB_BASE;
551
552 /* Update dominance info. Note that bb_join's data was
553 updated by split_block. */
554 if (dom_info_available_p (CDI_DOMINATORS))
555 {
556 set_immediate_dominator (CDI_DOMINATORS, then_bb, cond_bb);
557 set_immediate_dominator (CDI_DOMINATORS, join_bb, cond_bb);
558 }
559
560 /* Update loop info. */
561 if (current_loops)
562 add_bb_to_loop (then_bb, cond_bb->loop_father);
563
564 /* Build our local variables. */
565 mf_elem = make_rename_temp (mf_cache_structptr_type, "__mf_elem");
566 mf_base = make_rename_temp (mf_uintptr_type, "__mf_base");
567 mf_limit = make_rename_temp (mf_uintptr_type, "__mf_limit");
568
569 /* Build: __mf_base = (uintptr_t) <base address expression>. */
570 seq = NULL;
571 t = fold_convert_loc (location, mf_uintptr_type,
572 unshare_expr (base));
573 t = force_gimple_operand (t, &stmts, false, NULL_TREE);
574 gimple_seq_add_seq (&seq, stmts);
575 g = gimple_build_assign (mf_base, t);
576 gimple_set_location (g, location);
577 gimple_seq_add_stmt (&seq, g);
578
579 /* Build: __mf_limit = (uintptr_t) <limit address expression>. */
580 t = fold_convert_loc (location, mf_uintptr_type,
581 unshare_expr (limit));
582 t = force_gimple_operand (t, &stmts, false, NULL_TREE);
583 gimple_seq_add_seq (&seq, stmts);
584 g = gimple_build_assign (mf_limit, t);
585 gimple_set_location (g, location);
586 gimple_seq_add_stmt (&seq, g);
587
588 /* Build: __mf_elem = &__mf_lookup_cache [(__mf_base >> __mf_shift)
589 & __mf_mask]. */
590 t = build2 (RSHIFT_EXPR, mf_uintptr_type, mf_base,
591 flag_mudflap_threads ? mf_cache_shift_decl
592 : mf_cache_shift_decl_l);
593 t = build2 (BIT_AND_EXPR, mf_uintptr_type, t,
594 flag_mudflap_threads ? mf_cache_mask_decl
595 : mf_cache_mask_decl_l);
596 t = build4 (ARRAY_REF,
597 TREE_TYPE (TREE_TYPE (mf_cache_array_decl)),
598 mf_cache_array_decl, t, NULL_TREE, NULL_TREE);
599 t = build1 (ADDR_EXPR, mf_cache_structptr_type, t);
600 t = force_gimple_operand (t, &stmts, false, NULL_TREE);
601 gimple_seq_add_seq (&seq, stmts);
602 g = gimple_build_assign (mf_elem, t);
603 gimple_set_location (g, location);
604 gimple_seq_add_stmt (&seq, g);
605
606 /* Quick validity check.
607
608 if (__mf_elem->low > __mf_base
609 || (__mf_elem_high < __mf_limit))
610 {
611 __mf_check ();
612 ... and only if single-threaded:
613 __mf_lookup_shift_1 = f...;
614 __mf_lookup_mask_l = ...;
615 }
616
617 It is expected that this body of code is rarely executed so we mark
618 the edge to the THEN clause of the conditional jump as unlikely. */
619
620 /* Construct t <-- '__mf_elem->low > __mf_base'. */
621 t = build3 (COMPONENT_REF, mf_uintptr_type,
622 build1 (INDIRECT_REF, mf_cache_struct_type, mf_elem),
623 TYPE_FIELDS (mf_cache_struct_type), NULL_TREE);
624 t = build2 (GT_EXPR, boolean_type_node, t, mf_base);
625
626 /* Construct '__mf_elem->high < __mf_limit'.
627
628 First build:
629 1) u <-- '__mf_elem->high'
630 2) v <-- '__mf_limit'.
631
632 Then build 'u <-- (u < v). */
633
634 u = build3 (COMPONENT_REF, mf_uintptr_type,
635 build1 (INDIRECT_REF, mf_cache_struct_type, mf_elem),
636 DECL_CHAIN (TYPE_FIELDS (mf_cache_struct_type)), NULL_TREE);
637
638 v = mf_limit;
639
640 u = build2 (LT_EXPR, boolean_type_node, u, v);
641
642 /* Build the composed conditional: t <-- 't || u'. Then store the
643 result of the evaluation of 't' in a temporary variable which we
644 can use as the condition for the conditional jump. */
645 t = build2 (TRUTH_OR_EXPR, boolean_type_node, t, u);
646 t = force_gimple_operand (t, &stmts, false, NULL_TREE);
647 gimple_seq_add_seq (&seq, stmts);
648 cond = make_rename_temp (boolean_type_node, "__mf_unlikely_cond");
649 g = gimple_build_assign (cond, t);
650 gimple_set_location (g, location);
651 gimple_seq_add_stmt (&seq, g);
652
653 /* Build the conditional jump. 'cond' is just a temporary so we can
654 simply build a void COND_EXPR. We do need labels in both arms though. */
655 g = gimple_build_cond (NE_EXPR, cond, boolean_false_node, NULL_TREE,
656 NULL_TREE);
657 gimple_set_location (g, location);
658 gimple_seq_add_stmt (&seq, g);
659
660 /* At this point, after so much hard work, we have only constructed
661 the conditional jump,
662
663 if (__mf_elem->low > __mf_base
664 || (__mf_elem_high < __mf_limit))
665
666 The lowered GIMPLE tree representing this code is in the statement
667 list starting at 'head'.
668
669 We can insert this now in the current basic block, i.e. the one that
670 the statement we're instrumenting was originally in. */
671 gsi = gsi_last_bb (cond_bb);
672 gsi_insert_seq_after (&gsi, seq, GSI_CONTINUE_LINKING);
673
674 /* Now build up the body of the cache-miss handling:
675
676 __mf_check();
677 refresh *_l vars.
678
679 This is the body of the conditional. */
680
681 seq = NULL;
682 /* u is a string, so it is already a gimple value. */
683 u = mf_file_function_line_tree (location);
684 /* NB: we pass the overall [base..limit] range to mf_check. */
685 v = fold_build2_loc (location, PLUS_EXPR, mf_uintptr_type,
686 fold_build2_loc (location,
687 MINUS_EXPR, mf_uintptr_type, mf_limit, mf_base),
688 build_int_cst (mf_uintptr_type, 1));
689 v = force_gimple_operand (v, &stmts, true, NULL_TREE);
690 gimple_seq_add_seq (&seq, stmts);
691 g = gimple_build_call (mf_check_fndecl, 4, mf_base, v, dirflag, u);
692 gimple_seq_add_stmt (&seq, g);
693
694 if (! flag_mudflap_threads)
695 {
696 if (stmt_ends_bb_p (g))
697 {
698 gsi = gsi_start_bb (then_bb);
699 gsi_insert_seq_after (&gsi, seq, GSI_CONTINUE_LINKING);
700 e = split_block (then_bb, g);
701 then_bb = e->dest;
702 seq = NULL;
703 }
704
705 g = gimple_build_assign (mf_cache_shift_decl_l, mf_cache_shift_decl);
706 gimple_seq_add_stmt (&seq, g);
707
708 g = gimple_build_assign (mf_cache_mask_decl_l, mf_cache_mask_decl);
709 gimple_seq_add_stmt (&seq, g);
710 }
711
712 /* Insert the check code in the THEN block. */
713 gsi = gsi_start_bb (then_bb);
714 gsi_insert_seq_after (&gsi, seq, GSI_CONTINUE_LINKING);
715
716 *instr_gsi = gsi_start_bb (join_bb);
717 }
718
719
720 /* Check whether the given decl, generally a VAR_DECL or PARM_DECL, is
721 eligible for instrumentation. For the mudflap1 pass, this implies
722 that it should be registered with the libmudflap runtime. For the
723 mudflap2 pass this means instrumenting an indirection operation with
724 respect to the object.
725 */
726 static int
727 mf_decl_eligible_p (tree decl)
728 {
729 return ((TREE_CODE (decl) == VAR_DECL || TREE_CODE (decl) == PARM_DECL)
730 /* The decl must have its address taken. In the case of
731 arrays, this flag is also set if the indexes are not
732 compile-time known valid constants. */
733 /* XXX: not sufficient: return-by-value structs! */
734 && TREE_ADDRESSABLE (decl)
735 /* The type of the variable must be complete. */
736 && COMPLETE_OR_VOID_TYPE_P (TREE_TYPE (decl))
737 /* The decl hasn't been decomposed somehow. */
738 && !DECL_HAS_VALUE_EXPR_P (decl));
739 }
740
741
742 static void
743 mf_xform_derefs_1 (gimple_stmt_iterator *iter, tree *tp,
744 location_t location, tree dirflag)
745 {
746 tree type, base, limit, addr, size, t;
747
748 /* Don't instrument read operations. */
749 if (dirflag == integer_zero_node && flag_mudflap_ignore_reads)
750 return;
751
752 /* Don't instrument marked nodes. */
753 if (mf_marked_p (*tp))
754 return;
755
756 t = *tp;
757 type = TREE_TYPE (t);
758
759 if (type == error_mark_node)
760 return;
761
762 size = TYPE_SIZE_UNIT (type);
763
764 switch (TREE_CODE (t))
765 {
766 case ARRAY_REF:
767 case COMPONENT_REF:
768 {
769 /* This is trickier than it may first appear. The reason is
770 that we are looking at expressions from the "inside out" at
771 this point. We may have a complex nested aggregate/array
772 expression (e.g. "a.b[i].c"), maybe with an indirection as
773 the leftmost operator ("p->a.b.d"), where instrumentation
774 is necessary. Or we may have an innocent "a.b.c"
775 expression that must not be instrumented. We need to
776 recurse all the way down the nesting structure to figure it
777 out: looking just at the outer node is not enough. */
778 tree var;
779 int component_ref_only = (TREE_CODE (t) == COMPONENT_REF);
780 /* If we have a bitfield component reference, we must note the
781 innermost addressable object in ELT, from which we will
782 construct the byte-addressable bounds of the bitfield. */
783 tree elt = NULL_TREE;
784 int bitfield_ref_p = (TREE_CODE (t) == COMPONENT_REF
785 && DECL_BIT_FIELD_TYPE (TREE_OPERAND (t, 1)));
786
787 /* Iterate to the top of the ARRAY_REF/COMPONENT_REF
788 containment hierarchy to find the outermost VAR_DECL. */
789 var = TREE_OPERAND (t, 0);
790 while (1)
791 {
792 if (bitfield_ref_p && elt == NULL_TREE
793 && (TREE_CODE (var) == ARRAY_REF
794 || TREE_CODE (var) == COMPONENT_REF))
795 elt = var;
796
797 if (TREE_CODE (var) == ARRAY_REF)
798 {
799 component_ref_only = 0;
800 var = TREE_OPERAND (var, 0);
801 }
802 else if (TREE_CODE (var) == COMPONENT_REF)
803 var = TREE_OPERAND (var, 0);
804 else if (INDIRECT_REF_P (var)
805 || TREE_CODE (var) == MEM_REF)
806 {
807 base = TREE_OPERAND (var, 0);
808 break;
809 }
810 else if (TREE_CODE (var) == VIEW_CONVERT_EXPR)
811 {
812 var = TREE_OPERAND (var, 0);
813 if (CONSTANT_CLASS_P (var)
814 && TREE_CODE (var) != STRING_CST)
815 return;
816 }
817 else
818 {
819 gcc_assert (TREE_CODE (var) == VAR_DECL
820 || TREE_CODE (var) == PARM_DECL
821 || TREE_CODE (var) == RESULT_DECL
822 || TREE_CODE (var) == STRING_CST);
823 /* Don't instrument this access if the underlying
824 variable is not "eligible". This test matches
825 those arrays that have only known-valid indexes,
826 and thus are not labeled TREE_ADDRESSABLE. */
827 if (! mf_decl_eligible_p (var) || component_ref_only)
828 return;
829 else
830 {
831 base = build1 (ADDR_EXPR,
832 build_pointer_type (TREE_TYPE (var)), var);
833 break;
834 }
835 }
836 }
837
838 /* Handle the case of ordinary non-indirection structure
839 accesses. These have only nested COMPONENT_REF nodes (no
840 INDIRECT_REF), but pass through the above filter loop.
841 Note that it's possible for such a struct variable to match
842 the eligible_p test because someone else might take its
843 address sometime. */
844
845 /* We need special processing for bitfield components, because
846 their addresses cannot be taken. */
847 if (bitfield_ref_p)
848 {
849 tree field = TREE_OPERAND (t, 1);
850
851 if (TREE_CODE (DECL_SIZE_UNIT (field)) == INTEGER_CST)
852 size = DECL_SIZE_UNIT (field);
853
854 if (elt)
855 elt = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (elt)),
856 elt);
857 addr = fold_convert_loc (location, ptr_type_node, elt ? elt : base);
858 addr = fold_build_pointer_plus_loc (location,
859 addr, byte_position (field));
860 }
861 else
862 addr = build1 (ADDR_EXPR, build_pointer_type (type), t);
863
864 limit = fold_build2_loc (location, MINUS_EXPR, mf_uintptr_type,
865 fold_build2_loc (location, PLUS_EXPR, mf_uintptr_type,
866 fold_convert (mf_uintptr_type, addr),
867 size),
868 integer_one_node);
869 }
870 break;
871
872 case INDIRECT_REF:
873 addr = TREE_OPERAND (t, 0);
874 base = addr;
875 limit = fold_build_pointer_plus_hwi_loc
876 (location, fold_build_pointer_plus_loc (location, base, size), -1);
877 break;
878
879 case MEM_REF:
880 addr = fold_build_pointer_plus_loc (location, TREE_OPERAND (t, 0),
881 TREE_OPERAND (t, 1));
882 base = addr;
883 limit = fold_build_pointer_plus_hwi_loc (location,
884 fold_build_pointer_plus_loc (location,
885 base, size), -1);
886 break;
887
888 case TARGET_MEM_REF:
889 addr = tree_mem_ref_addr (ptr_type_node, t);
890 base = addr;
891 limit = fold_build_pointer_plus_hwi_loc (location,
892 fold_build_pointer_plus_loc (location,
893 base, size), -1);
894 break;
895
896 case ARRAY_RANGE_REF:
897 warning (OPT_Wmudflap,
898 "mudflap checking not yet implemented for ARRAY_RANGE_REF");
899 return;
900
901 case BIT_FIELD_REF:
902 /* ??? merge with COMPONENT_REF code above? */
903 {
904 tree ofs, rem, bpu;
905
906 /* If we're not dereferencing something, then the access
907 must be ok. */
908 if (TREE_CODE (TREE_OPERAND (t, 0)) != INDIRECT_REF)
909 return;
910
911 bpu = bitsize_int (BITS_PER_UNIT);
912 ofs = fold_convert (bitsizetype, TREE_OPERAND (t, 2));
913 rem = size_binop_loc (location, TRUNC_MOD_EXPR, ofs, bpu);
914 ofs = size_binop_loc (location, TRUNC_DIV_EXPR, ofs, bpu);
915
916 size = fold_convert (bitsizetype, TREE_OPERAND (t, 1));
917 size = size_binop_loc (location, PLUS_EXPR, size, rem);
918 size = size_binop_loc (location, CEIL_DIV_EXPR, size, bpu);
919 size = fold_convert (sizetype, size);
920
921 addr = TREE_OPERAND (TREE_OPERAND (t, 0), 0);
922 addr = fold_convert (ptr_type_node, addr);
923 addr = fold_build_pointer_plus_loc (location, addr, ofs);
924
925 base = addr;
926 limit = fold_build_pointer_plus_hwi_loc (location,
927 fold_build_pointer_plus_loc (location,
928 base, size), -1);
929 }
930 break;
931
932 default:
933 return;
934 }
935
936 mf_build_check_statement_for (base, limit, iter, location, dirflag);
937 }
938 /* Transform
939 1) Memory references.
940 */
941 static void
942 mf_xform_statements (void)
943 {
944 basic_block bb, next;
945 gimple_stmt_iterator i;
946 int saved_last_basic_block = last_basic_block;
947 enum gimple_rhs_class grhs_class;
948
949 bb = ENTRY_BLOCK_PTR ->next_bb;
950 do
951 {
952 next = bb->next_bb;
953 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
954 {
955 gimple s = gsi_stmt (i);
956
957 /* Only a few GIMPLE statements can reference memory. */
958 switch (gimple_code (s))
959 {
960 case GIMPLE_ASSIGN:
961 mf_xform_derefs_1 (&i, gimple_assign_lhs_ptr (s),
962 gimple_location (s), integer_one_node);
963 mf_xform_derefs_1 (&i, gimple_assign_rhs1_ptr (s),
964 gimple_location (s), integer_zero_node);
965 grhs_class = get_gimple_rhs_class (gimple_assign_rhs_code (s));
966 if (grhs_class == GIMPLE_BINARY_RHS)
967 mf_xform_derefs_1 (&i, gimple_assign_rhs2_ptr (s),
968 gimple_location (s), integer_zero_node);
969 break;
970
971 case GIMPLE_RETURN:
972 if (gimple_return_retval (s) != NULL_TREE)
973 {
974 mf_xform_derefs_1 (&i, gimple_return_retval_ptr (s),
975 gimple_location (s),
976 integer_zero_node);
977 }
978 break;
979
980 default:
981 ;
982 }
983 }
984 bb = next;
985 }
986 while (bb && bb->index <= saved_last_basic_block);
987 }
988
989 /* ------------------------------------------------------------------------ */
990 /* ADDR_EXPR transforms. Perform the declaration-related mudflap tree
991 transforms on the current function.
992
993 This is the first part of the mudflap instrumentation. It works on
994 high-level GIMPLE because after lowering, all variables are moved out
995 of their BIND_EXPR binding context, and we lose liveness information
996 for the declarations we wish to instrument. */
997
998 static unsigned int
999 execute_mudflap_function_decls (void)
1000 {
1001 struct gimplify_ctx gctx;
1002
1003 /* Don't instrument functions such as the synthetic constructor
1004 built during mudflap_finish_file. */
1005 if (mf_marked_p (current_function_decl)
1006 || mf_artificial (current_function_decl))
1007 return 0;
1008
1009 push_gimplify_context (&gctx);
1010
1011 mf_xform_decls (gimple_body (current_function_decl),
1012 DECL_ARGUMENTS (current_function_decl));
1013
1014 pop_gimplify_context (NULL);
1015 return 0;
1016 }
1017
1018 /* This struct is passed between mf_xform_decls to store state needed
1019 during the traversal searching for objects that have their
1020 addresses taken. */
1021 struct mf_xform_decls_data
1022 {
1023 tree param_decls;
1024 };
1025
1026
1027 /* Synthesize a CALL_EXPR and a TRY_FINALLY_EXPR, for this chain of
1028 _DECLs if appropriate. Arrange to call the __mf_register function
1029 now, and the __mf_unregister function later for each. Return the
1030 gimple sequence after synthesis. */
1031 gimple_seq
1032 mx_register_decls (tree decl, gimple_seq seq, location_t location)
1033 {
1034 gimple_seq finally_stmts = NULL;
1035 gimple_stmt_iterator initially_stmts = gsi_start (seq);
1036
1037 while (decl != NULL_TREE)
1038 {
1039 if (mf_decl_eligible_p (decl)
1040 /* Not already processed. */
1041 && ! mf_marked_p (decl)
1042 /* Automatic variable. */
1043 && ! DECL_EXTERNAL (decl)
1044 && ! TREE_STATIC (decl))
1045 {
1046 tree size = NULL_TREE, variable_name;
1047 gimple unregister_fncall, register_fncall;
1048 tree unregister_fncall_param, register_fncall_param;
1049
1050 /* Variable-sized objects should have sizes already been
1051 gimplified when we got here. */
1052 size = fold_convert (size_type_node,
1053 TYPE_SIZE_UNIT (TREE_TYPE (decl)));
1054 gcc_assert (is_gimple_val (size));
1055
1056
1057 unregister_fncall_param =
1058 mf_mark (build1 (ADDR_EXPR,
1059 build_pointer_type (TREE_TYPE (decl)),
1060 decl));
1061 /* __mf_unregister (&VARIABLE, sizeof (VARIABLE), __MF_TYPE_STACK) */
1062 unregister_fncall = gimple_build_call (mf_unregister_fndecl, 3,
1063 unregister_fncall_param,
1064 size,
1065 integer_three_node);
1066
1067
1068 variable_name = mf_varname_tree (decl);
1069 register_fncall_param =
1070 mf_mark (build1 (ADDR_EXPR,
1071 build_pointer_type (TREE_TYPE (decl)),
1072 decl));
1073 /* __mf_register (&VARIABLE, sizeof (VARIABLE), __MF_TYPE_STACK,
1074 "name") */
1075 register_fncall = gimple_build_call (mf_register_fndecl, 4,
1076 register_fncall_param,
1077 size,
1078 integer_three_node,
1079 variable_name);
1080
1081
1082 /* Accumulate the two calls. */
1083 gimple_set_location (register_fncall, location);
1084 gimple_set_location (unregister_fncall, location);
1085
1086 /* Add the __mf_register call at the current appending point. */
1087 if (gsi_end_p (initially_stmts))
1088 {
1089 if (!mf_artificial (decl))
1090 warning (OPT_Wmudflap,
1091 "mudflap cannot track %qE in stub function",
1092 DECL_NAME (decl));
1093 }
1094 else
1095 {
1096 gsi_insert_before (&initially_stmts, register_fncall,
1097 GSI_SAME_STMT);
1098
1099 /* Accumulate the FINALLY piece. */
1100 gimple_seq_add_stmt (&finally_stmts, unregister_fncall);
1101 }
1102 mf_mark (decl);
1103 }
1104
1105 decl = DECL_CHAIN (decl);
1106 }
1107
1108 /* Actually, (initially_stmts!=NULL) <=> (finally_stmts!=NULL) */
1109 if (finally_stmts != NULL)
1110 {
1111 gimple stmt = gimple_build_try (seq, finally_stmts, GIMPLE_TRY_FINALLY);
1112 gimple_seq new_seq = NULL;
1113
1114 gimple_seq_add_stmt (&new_seq, stmt);
1115 return new_seq;
1116 }
1117 else
1118 return seq;
1119 }
1120
1121
1122 /* Process every variable mentioned in BIND_EXPRs. */
1123 static tree
1124 mx_xfn_xform_decls (gimple_stmt_iterator *gsi,
1125 bool *handled_operands_p ATTRIBUTE_UNUSED,
1126 struct walk_stmt_info *wi)
1127 {
1128 struct mf_xform_decls_data *d = (struct mf_xform_decls_data *) wi->info;
1129 gimple stmt = gsi_stmt (*gsi);
1130
1131 switch (gimple_code (stmt))
1132 {
1133 case GIMPLE_BIND:
1134 {
1135 /* Process function parameters now (but only once). */
1136 if (d->param_decls)
1137 {
1138 gimple_bind_set_body (stmt,
1139 mx_register_decls (d->param_decls,
1140 gimple_bind_body (stmt),
1141 gimple_location (stmt)));
1142 d->param_decls = NULL_TREE;
1143 }
1144
1145 gimple_bind_set_body (stmt,
1146 mx_register_decls (gimple_bind_vars (stmt),
1147 gimple_bind_body (stmt),
1148 gimple_location (stmt)));
1149 }
1150 break;
1151
1152 default:
1153 break;
1154 }
1155
1156 return NULL_TREE;
1157 }
1158
1159 /* Perform the object lifetime tracking mudflap transform on the given function
1160 tree. The tree is mutated in place, with possibly copied subtree nodes.
1161
1162 For every auto variable declared, if its address is ever taken
1163 within the function, then supply its lifetime to the mudflap
1164 runtime with the __mf_register and __mf_unregister calls.
1165 */
1166
1167 static void
1168 mf_xform_decls (gimple_seq fnbody, tree fnparams)
1169 {
1170 struct mf_xform_decls_data d;
1171 struct walk_stmt_info wi;
1172 struct pointer_set_t *pset = pointer_set_create ();
1173
1174 d.param_decls = fnparams;
1175 memset (&wi, 0, sizeof (wi));
1176 wi.info = (void*) &d;
1177 wi.pset = pset;
1178 walk_gimple_seq (fnbody, mx_xfn_xform_decls, NULL, &wi);
1179 pointer_set_destroy (pset);
1180 }
1181
1182
1183 /* ------------------------------------------------------------------------ */
1184 /* Externally visible mudflap functions. */
1185
1186
1187 /* Mark and return the given tree node to prevent further mudflap
1188 transforms. */
1189 static GTY ((param_is (union tree_node))) htab_t marked_trees = NULL;
1190
1191 tree
1192 mf_mark (tree t)
1193 {
1194 void **slot;
1195
1196 if (marked_trees == NULL)
1197 marked_trees = htab_create_ggc (31, htab_hash_pointer, htab_eq_pointer,
1198 NULL);
1199
1200 slot = htab_find_slot (marked_trees, t, INSERT);
1201 *slot = t;
1202 return t;
1203 }
1204
1205 int
1206 mf_marked_p (tree t)
1207 {
1208 void *entry;
1209
1210 if (marked_trees == NULL)
1211 return 0;
1212
1213 entry = htab_find (marked_trees, t);
1214 return (entry != NULL);
1215 }
1216
1217 /* Remember given node as a static of some kind: global data,
1218 function-scope static, or an anonymous constant. Its assembler
1219 label is given. */
1220
1221 /* A list of globals whose incomplete declarations we encountered.
1222 Instead of emitting the __mf_register call for them here, it's
1223 delayed until program finish time. If they're still incomplete by
1224 then, warnings are emitted. */
1225
1226 static GTY (()) VEC(tree,gc) *deferred_static_decls;
1227
1228 /* A list of statements for calling __mf_register() at startup time. */
1229 static GTY (()) tree enqueued_call_stmt_chain;
1230
1231 static void
1232 mudflap_register_call (tree obj, tree object_size, tree varname)
1233 {
1234 tree arg, call_stmt;
1235
1236 arg = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (obj)), obj);
1237 arg = fold_convert (ptr_type_node, arg);
1238
1239 call_stmt = build_call_expr (mf_register_fndecl, 4,
1240 arg,
1241 fold_convert (size_type_node, object_size),
1242 /* __MF_TYPE_STATIC */
1243 build_int_cst (integer_type_node, 4),
1244 varname);
1245
1246 append_to_statement_list (call_stmt, &enqueued_call_stmt_chain);
1247 }
1248
1249 void
1250 mudflap_enqueue_decl (tree obj)
1251 {
1252 if (mf_marked_p (obj))
1253 return;
1254
1255 /* We don't need to process variable decls that are internally
1256 generated extern. If we did, we'd end up with warnings for them
1257 during mudflap_finish_file (). That would confuse the user,
1258 since the text would refer to variables that don't show up in the
1259 user's source code. */
1260 if (DECL_P (obj) && DECL_EXTERNAL (obj) && mf_artificial (obj))
1261 return;
1262
1263 VEC_safe_push (tree, gc, deferred_static_decls, obj);
1264 }
1265
1266
1267 void
1268 mudflap_enqueue_constant (tree obj)
1269 {
1270 tree object_size, varname;
1271
1272 if (mf_marked_p (obj))
1273 return;
1274
1275 if (TREE_CODE (obj) == STRING_CST)
1276 object_size = size_int (TREE_STRING_LENGTH (obj));
1277 else
1278 object_size = size_in_bytes (TREE_TYPE (obj));
1279
1280 if (TREE_CODE (obj) == STRING_CST)
1281 varname = mf_build_string ("string literal");
1282 else
1283 varname = mf_build_string ("constant");
1284
1285 mudflap_register_call (obj, object_size, varname);
1286 }
1287
1288
1289 /* Emit any file-wide instrumentation. */
1290 void
1291 mudflap_finish_file (void)
1292 {
1293 tree ctor_statements = NULL_TREE;
1294
1295 /* No need to continue when there were errors. */
1296 if (seen_error ())
1297 return;
1298
1299 /* Insert a call to __mf_init. */
1300 {
1301 tree call2_stmt = build_call_expr (mf_init_fndecl, 0);
1302 append_to_statement_list (call2_stmt, &ctor_statements);
1303 }
1304
1305 /* If appropriate, call __mf_set_options to pass along read-ignore mode. */
1306 if (flag_mudflap_ignore_reads)
1307 {
1308 tree arg = mf_build_string ("-ignore-reads");
1309 tree call_stmt = build_call_expr (mf_set_options_fndecl, 1, arg);
1310 append_to_statement_list (call_stmt, &ctor_statements);
1311 }
1312
1313 /* Process all enqueued object decls. */
1314 if (deferred_static_decls)
1315 {
1316 size_t i;
1317 tree obj;
1318 FOR_EACH_VEC_ELT (tree, deferred_static_decls, i, obj)
1319 {
1320 gcc_assert (DECL_P (obj));
1321
1322 if (mf_marked_p (obj))
1323 continue;
1324
1325 /* Omit registration for static unaddressed objects. NB:
1326 Perform registration for non-static objects regardless of
1327 TREE_USED or TREE_ADDRESSABLE, because they may be used
1328 from other compilation units. */
1329 if (! TREE_PUBLIC (obj) && ! TREE_ADDRESSABLE (obj))
1330 continue;
1331
1332 if (! COMPLETE_TYPE_P (TREE_TYPE (obj)))
1333 {
1334 warning (OPT_Wmudflap,
1335 "mudflap cannot track unknown size extern %qE",
1336 DECL_NAME (obj));
1337 continue;
1338 }
1339
1340 mudflap_register_call (obj,
1341 size_in_bytes (TREE_TYPE (obj)),
1342 mf_varname_tree (obj));
1343 }
1344
1345 VEC_truncate (tree, deferred_static_decls, 0);
1346 }
1347
1348 /* Append all the enqueued registration calls. */
1349 if (enqueued_call_stmt_chain)
1350 {
1351 append_to_statement_list (enqueued_call_stmt_chain, &ctor_statements);
1352 enqueued_call_stmt_chain = NULL_TREE;
1353 }
1354
1355 cgraph_build_static_cdtor ('I', ctor_statements,
1356 MAX_RESERVED_INIT_PRIORITY-1);
1357 }
1358
1359
1360 static bool
1361 gate_mudflap (void)
1362 {
1363 return flag_mudflap != 0;
1364 }
1365
1366 struct gimple_opt_pass pass_mudflap_1 =
1367 {
1368 {
1369 GIMPLE_PASS,
1370 "mudflap1", /* name */
1371 gate_mudflap, /* gate */
1372 execute_mudflap_function_decls, /* execute */
1373 NULL, /* sub */
1374 NULL, /* next */
1375 0, /* static_pass_number */
1376 TV_NONE, /* tv_id */
1377 PROP_gimple_any, /* properties_required */
1378 0, /* properties_provided */
1379 0, /* properties_destroyed */
1380 0, /* todo_flags_start */
1381 0 /* todo_flags_finish */
1382 }
1383 };
1384
1385 struct gimple_opt_pass pass_mudflap_2 =
1386 {
1387 {
1388 GIMPLE_PASS,
1389 "mudflap2", /* name */
1390 gate_mudflap, /* gate */
1391 execute_mudflap_function_ops, /* execute */
1392 NULL, /* sub */
1393 NULL, /* next */
1394 0, /* static_pass_number */
1395 TV_NONE, /* tv_id */
1396 PROP_ssa | PROP_cfg | PROP_gimple_leh,/* properties_required */
1397 0, /* properties_provided */
1398 0, /* properties_destroyed */
1399 0, /* todo_flags_start */
1400 TODO_verify_flow | TODO_verify_stmts
1401 | TODO_update_ssa /* todo_flags_finish */
1402 }
1403 };
1404
1405 #include "gt-tree-mudflap.h"