]>
Commit | Line | Data |
---|---|---|
b077695d | 1 | /* GCC instrumentation plugin for ThreadSanitizer. |
3aea1f79 | 2 | Copyright (C) 2011-2014 Free Software Foundation, Inc. |
b077695d | 3 | Contributed by Dmitry Vyukov <dvyukov@google.com> |
4 | ||
5 | This file is part of GCC. | |
6 | ||
7 | GCC is free software; you can redistribute it and/or modify it under | |
8 | the terms of the GNU General Public License as published by the Free | |
9 | Software Foundation; either version 3, or (at your option) any later | |
10 | version. | |
11 | ||
12 | GCC is distributed in the hope that it will be useful, but WITHOUT ANY | |
13 | WARRANTY; without even the implied warranty of MERCHANTABILITY or | |
14 | FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License | |
15 | for more details. | |
16 | ||
17 | You should have received a copy of the GNU General Public License | |
18 | along with GCC; see the file COPYING3. If not see | |
19 | <http://www.gnu.org/licenses/>. */ | |
20 | ||
21 | ||
22 | #include "config.h" | |
23 | #include "system.h" | |
24 | #include "coretypes.h" | |
25 | #include "tree.h" | |
9ed99284 | 26 | #include "expr.h" |
b077695d | 27 | #include "intl.h" |
28 | #include "tm.h" | |
29 | #include "basic-block.h" | |
bc61cadb | 30 | #include "tree-ssa-alias.h" |
31 | #include "internal-fn.h" | |
32 | #include "gimple-expr.h" | |
33 | #include "is-a.h" | |
e795d6e1 | 34 | #include "gimple.h" |
a8783bee | 35 | #include "gimplify.h" |
dcf1a1ec | 36 | #include "gimple-iterator.h" |
a3020f2f | 37 | #include "hashtab.h" |
38 | #include "hash-set.h" | |
39 | #include "vec.h" | |
40 | #include "machmode.h" | |
41 | #include "hard-reg-set.h" | |
42 | #include "input.h" | |
b077695d | 43 | #include "function.h" |
073c1fd5 | 44 | #include "gimple-ssa.h" |
45 | #include "cgraph.h" | |
46 | #include "tree-cfg.h" | |
9ed99284 | 47 | #include "stringpool.h" |
073c1fd5 | 48 | #include "tree-ssanames.h" |
b077695d | 49 | #include "tree-pass.h" |
50 | #include "tree-iterator.h" | |
51 | #include "langhooks.h" | |
52 | #include "output.h" | |
53 | #include "options.h" | |
54 | #include "target.h" | |
b077695d | 55 | #include "diagnostic.h" |
83392e87 | 56 | #include "tree-ssa-propagate.h" |
b45e34ed | 57 | #include "tsan.h" |
58 | #include "asan.h" | |
b077695d | 59 | |
60 | /* Number of instrumented memory accesses in the current function. */ | |
61 | ||
62 | /* Builds the following decl | |
63 | void __tsan_read/writeX (void *addr); */ | |
64 | ||
65 | static tree | |
66 | get_memory_access_decl (bool is_write, unsigned size) | |
67 | { | |
68 | enum built_in_function fcode; | |
69 | ||
70 | if (size <= 1) | |
b45e34ed | 71 | fcode = is_write ? BUILT_IN_TSAN_WRITE1 |
72 | : BUILT_IN_TSAN_READ1; | |
b077695d | 73 | else if (size <= 3) |
b45e34ed | 74 | fcode = is_write ? BUILT_IN_TSAN_WRITE2 |
75 | : BUILT_IN_TSAN_READ2; | |
b077695d | 76 | else if (size <= 7) |
b45e34ed | 77 | fcode = is_write ? BUILT_IN_TSAN_WRITE4 |
78 | : BUILT_IN_TSAN_READ4; | |
b077695d | 79 | else if (size <= 15) |
b45e34ed | 80 | fcode = is_write ? BUILT_IN_TSAN_WRITE8 |
81 | : BUILT_IN_TSAN_READ8; | |
b077695d | 82 | else |
b45e34ed | 83 | fcode = is_write ? BUILT_IN_TSAN_WRITE16 |
84 | : BUILT_IN_TSAN_READ16; | |
b077695d | 85 | |
86 | return builtin_decl_implicit (fcode); | |
87 | } | |
88 | ||
89 | /* Check as to whether EXPR refers to a store to vptr. */ | |
90 | ||
91 | static tree | |
92 | is_vptr_store (gimple stmt, tree expr, bool is_write) | |
93 | { | |
94 | if (is_write == true | |
95 | && gimple_assign_single_p (stmt) | |
96 | && TREE_CODE (expr) == COMPONENT_REF) | |
97 | { | |
98 | tree field = TREE_OPERAND (expr, 1); | |
99 | if (TREE_CODE (field) == FIELD_DECL | |
100 | && DECL_VIRTUAL_P (field)) | |
101 | return gimple_assign_rhs1 (stmt); | |
102 | } | |
103 | return NULL; | |
104 | } | |
105 | ||
b077695d | 106 | /* Instruments EXPR if needed. If any instrumentation is inserted, |
a4641938 | 107 | return true. */ |
b077695d | 108 | |
109 | static bool | |
110 | instrument_expr (gimple_stmt_iterator gsi, tree expr, bool is_write) | |
111 | { | |
c2fa3404 | 112 | tree base, rhs, expr_ptr, builtin_decl; |
b077695d | 113 | basic_block bb; |
114 | HOST_WIDE_INT size; | |
115 | gimple stmt, g; | |
c2fa3404 | 116 | gimple_seq seq; |
b077695d | 117 | location_t loc; |
118 | ||
b077695d | 119 | size = int_size_in_bytes (TREE_TYPE (expr)); |
120 | if (size == -1) | |
121 | return false; | |
122 | ||
123 | /* For now just avoid instrumenting bit field acceses. | |
124 | TODO: handle bit-fields as if touching the whole field. */ | |
125 | HOST_WIDE_INT bitsize, bitpos; | |
126 | tree offset; | |
127 | enum machine_mode mode; | |
128 | int volatilep = 0, unsignedp = 0; | |
5a5c6968 | 129 | base = get_inner_reference (expr, &bitsize, &bitpos, &offset, |
dc317fc8 | 130 | &mode, &unsignedp, &volatilep, false); |
5a5c6968 | 131 | |
132 | /* No need to instrument accesses to decls that don't escape, | |
133 | they can't escape to other threads then. */ | |
134 | if (DECL_P (base)) | |
135 | { | |
136 | struct pt_solution pt; | |
137 | memset (&pt, 0, sizeof (pt)); | |
138 | pt.escaped = 1; | |
139 | pt.ipa_escaped = flag_ipa_pta != 0; | |
140 | pt.nonlocal = 1; | |
141 | if (!pt_solution_includes (&pt, base)) | |
142 | return false; | |
143 | if (!is_global_var (base) && !may_be_aliased (base)) | |
144 | return false; | |
145 | } | |
146 | ||
060d11f4 | 147 | if (TREE_READONLY (base) |
148 | || (TREE_CODE (base) == VAR_DECL | |
149 | && DECL_HARD_REGISTER (base))) | |
b077695d | 150 | return false; |
151 | ||
888b31c9 | 152 | if (size == 0 |
153 | || bitpos % (size * BITS_PER_UNIT) | |
5a5c6968 | 154 | || bitsize != size * BITS_PER_UNIT) |
b077695d | 155 | return false; |
156 | ||
157 | stmt = gsi_stmt (gsi); | |
158 | loc = gimple_location (stmt); | |
159 | rhs = is_vptr_store (stmt, expr, is_write); | |
160 | gcc_checking_assert (rhs != NULL || is_gimple_addressable (expr)); | |
161 | expr_ptr = build_fold_addr_expr (unshare_expr (expr)); | |
c2fa3404 | 162 | seq = NULL; |
163 | if (!is_gimple_val (expr_ptr)) | |
b077695d | 164 | { |
c2fa3404 | 165 | g = gimple_build_assign (make_ssa_name (TREE_TYPE (expr_ptr), NULL), |
166 | expr_ptr); | |
167 | expr_ptr = gimple_assign_lhs (g); | |
168 | gimple_set_location (g, loc); | |
169 | gimple_seq_add_stmt_without_update (&seq, g); | |
b077695d | 170 | } |
c2fa3404 | 171 | if (rhs == NULL) |
172 | g = gimple_build_call (get_memory_access_decl (is_write, size), | |
173 | 1, expr_ptr); | |
b077695d | 174 | else |
175 | { | |
176 | builtin_decl = builtin_decl_implicit (BUILT_IN_TSAN_VPTR_UPDATE); | |
177 | g = gimple_build_call (builtin_decl, 1, expr_ptr); | |
178 | } | |
179 | gimple_set_location (g, loc); | |
c2fa3404 | 180 | gimple_seq_add_stmt_without_update (&seq, g); |
b077695d | 181 | /* Instrumentation for assignment of a function result |
182 | must be inserted after the call. Instrumentation for | |
183 | reads of function arguments must be inserted before the call. | |
184 | That's because the call can contain synchronization. */ | |
185 | if (is_gimple_call (stmt) && is_write) | |
186 | { | |
187 | /* If the call can throw, it must be the last stmt in | |
188 | a basic block, so the instrumented stmts need to be | |
a4641938 | 189 | inserted in successor bbs. */ |
b077695d | 190 | if (is_ctrl_altering_stmt (stmt)) |
191 | { | |
192 | edge e; | |
193 | ||
194 | bb = gsi_bb (gsi); | |
195 | e = find_fallthru_edge (bb->succs); | |
196 | if (e) | |
c2fa3404 | 197 | gsi_insert_seq_on_edge_immediate (e, seq); |
b077695d | 198 | } |
199 | else | |
c2fa3404 | 200 | gsi_insert_seq_after (&gsi, seq, GSI_NEW_STMT); |
b077695d | 201 | } |
202 | else | |
c2fa3404 | 203 | gsi_insert_seq_before (&gsi, seq, GSI_SAME_STMT); |
b077695d | 204 | |
205 | return true; | |
206 | } | |
207 | ||
83392e87 | 208 | /* Actions for sync/atomic builtin transformations. */ |
209 | enum tsan_atomic_action | |
210 | { | |
211 | check_last, add_seq_cst, add_acquire, weak_cas, strong_cas, | |
212 | bool_cas, val_cas, lock_release, fetch_op, fetch_op_seq_cst | |
213 | }; | |
214 | ||
215 | /* Table how to map sync/atomic builtins to their corresponding | |
216 | tsan equivalents. */ | |
9d34f097 | 217 | static const struct tsan_map_atomic |
83392e87 | 218 | { |
219 | enum built_in_function fcode, tsan_fcode; | |
220 | enum tsan_atomic_action action; | |
221 | enum tree_code code; | |
222 | } tsan_atomic_table[] = | |
223 | { | |
224 | #define TRANSFORM(fcode, tsan_fcode, action, code) \ | |
225 | { BUILT_IN_##fcode, BUILT_IN_##tsan_fcode, action, code } | |
226 | #define CHECK_LAST(fcode, tsan_fcode) \ | |
227 | TRANSFORM (fcode, tsan_fcode, check_last, ERROR_MARK) | |
228 | #define ADD_SEQ_CST(fcode, tsan_fcode) \ | |
229 | TRANSFORM (fcode, tsan_fcode, add_seq_cst, ERROR_MARK) | |
230 | #define ADD_ACQUIRE(fcode, tsan_fcode) \ | |
231 | TRANSFORM (fcode, tsan_fcode, add_acquire, ERROR_MARK) | |
232 | #define WEAK_CAS(fcode, tsan_fcode) \ | |
233 | TRANSFORM (fcode, tsan_fcode, weak_cas, ERROR_MARK) | |
234 | #define STRONG_CAS(fcode, tsan_fcode) \ | |
235 | TRANSFORM (fcode, tsan_fcode, strong_cas, ERROR_MARK) | |
236 | #define BOOL_CAS(fcode, tsan_fcode) \ | |
237 | TRANSFORM (fcode, tsan_fcode, bool_cas, ERROR_MARK) | |
238 | #define VAL_CAS(fcode, tsan_fcode) \ | |
239 | TRANSFORM (fcode, tsan_fcode, val_cas, ERROR_MARK) | |
240 | #define LOCK_RELEASE(fcode, tsan_fcode) \ | |
241 | TRANSFORM (fcode, tsan_fcode, lock_release, ERROR_MARK) | |
242 | #define FETCH_OP(fcode, tsan_fcode, code) \ | |
243 | TRANSFORM (fcode, tsan_fcode, fetch_op, code) | |
244 | #define FETCH_OPS(fcode, tsan_fcode, code) \ | |
245 | TRANSFORM (fcode, tsan_fcode, fetch_op_seq_cst, code) | |
246 | ||
247 | CHECK_LAST (ATOMIC_LOAD_1, TSAN_ATOMIC8_LOAD), | |
248 | CHECK_LAST (ATOMIC_LOAD_2, TSAN_ATOMIC16_LOAD), | |
249 | CHECK_LAST (ATOMIC_LOAD_4, TSAN_ATOMIC32_LOAD), | |
250 | CHECK_LAST (ATOMIC_LOAD_8, TSAN_ATOMIC64_LOAD), | |
251 | CHECK_LAST (ATOMIC_LOAD_16, TSAN_ATOMIC128_LOAD), | |
252 | CHECK_LAST (ATOMIC_STORE_1, TSAN_ATOMIC8_STORE), | |
253 | CHECK_LAST (ATOMIC_STORE_2, TSAN_ATOMIC16_STORE), | |
254 | CHECK_LAST (ATOMIC_STORE_4, TSAN_ATOMIC32_STORE), | |
255 | CHECK_LAST (ATOMIC_STORE_8, TSAN_ATOMIC64_STORE), | |
256 | CHECK_LAST (ATOMIC_STORE_16, TSAN_ATOMIC128_STORE), | |
257 | CHECK_LAST (ATOMIC_EXCHANGE_1, TSAN_ATOMIC8_EXCHANGE), | |
258 | CHECK_LAST (ATOMIC_EXCHANGE_2, TSAN_ATOMIC16_EXCHANGE), | |
259 | CHECK_LAST (ATOMIC_EXCHANGE_4, TSAN_ATOMIC32_EXCHANGE), | |
260 | CHECK_LAST (ATOMIC_EXCHANGE_8, TSAN_ATOMIC64_EXCHANGE), | |
261 | CHECK_LAST (ATOMIC_EXCHANGE_16, TSAN_ATOMIC128_EXCHANGE), | |
262 | CHECK_LAST (ATOMIC_FETCH_ADD_1, TSAN_ATOMIC8_FETCH_ADD), | |
263 | CHECK_LAST (ATOMIC_FETCH_ADD_2, TSAN_ATOMIC16_FETCH_ADD), | |
264 | CHECK_LAST (ATOMIC_FETCH_ADD_4, TSAN_ATOMIC32_FETCH_ADD), | |
265 | CHECK_LAST (ATOMIC_FETCH_ADD_8, TSAN_ATOMIC64_FETCH_ADD), | |
266 | CHECK_LAST (ATOMIC_FETCH_ADD_16, TSAN_ATOMIC128_FETCH_ADD), | |
267 | CHECK_LAST (ATOMIC_FETCH_SUB_1, TSAN_ATOMIC8_FETCH_SUB), | |
268 | CHECK_LAST (ATOMIC_FETCH_SUB_2, TSAN_ATOMIC16_FETCH_SUB), | |
269 | CHECK_LAST (ATOMIC_FETCH_SUB_4, TSAN_ATOMIC32_FETCH_SUB), | |
270 | CHECK_LAST (ATOMIC_FETCH_SUB_8, TSAN_ATOMIC64_FETCH_SUB), | |
271 | CHECK_LAST (ATOMIC_FETCH_SUB_16, TSAN_ATOMIC128_FETCH_SUB), | |
272 | CHECK_LAST (ATOMIC_FETCH_AND_1, TSAN_ATOMIC8_FETCH_AND), | |
273 | CHECK_LAST (ATOMIC_FETCH_AND_2, TSAN_ATOMIC16_FETCH_AND), | |
274 | CHECK_LAST (ATOMIC_FETCH_AND_4, TSAN_ATOMIC32_FETCH_AND), | |
275 | CHECK_LAST (ATOMIC_FETCH_AND_8, TSAN_ATOMIC64_FETCH_AND), | |
276 | CHECK_LAST (ATOMIC_FETCH_AND_16, TSAN_ATOMIC128_FETCH_AND), | |
277 | CHECK_LAST (ATOMIC_FETCH_OR_1, TSAN_ATOMIC8_FETCH_OR), | |
278 | CHECK_LAST (ATOMIC_FETCH_OR_2, TSAN_ATOMIC16_FETCH_OR), | |
279 | CHECK_LAST (ATOMIC_FETCH_OR_4, TSAN_ATOMIC32_FETCH_OR), | |
280 | CHECK_LAST (ATOMIC_FETCH_OR_8, TSAN_ATOMIC64_FETCH_OR), | |
281 | CHECK_LAST (ATOMIC_FETCH_OR_16, TSAN_ATOMIC128_FETCH_OR), | |
282 | CHECK_LAST (ATOMIC_FETCH_XOR_1, TSAN_ATOMIC8_FETCH_XOR), | |
283 | CHECK_LAST (ATOMIC_FETCH_XOR_2, TSAN_ATOMIC16_FETCH_XOR), | |
284 | CHECK_LAST (ATOMIC_FETCH_XOR_4, TSAN_ATOMIC32_FETCH_XOR), | |
285 | CHECK_LAST (ATOMIC_FETCH_XOR_8, TSAN_ATOMIC64_FETCH_XOR), | |
286 | CHECK_LAST (ATOMIC_FETCH_XOR_16, TSAN_ATOMIC128_FETCH_XOR), | |
287 | CHECK_LAST (ATOMIC_FETCH_NAND_1, TSAN_ATOMIC8_FETCH_NAND), | |
288 | CHECK_LAST (ATOMIC_FETCH_NAND_2, TSAN_ATOMIC16_FETCH_NAND), | |
289 | CHECK_LAST (ATOMIC_FETCH_NAND_4, TSAN_ATOMIC32_FETCH_NAND), | |
290 | CHECK_LAST (ATOMIC_FETCH_NAND_8, TSAN_ATOMIC64_FETCH_NAND), | |
291 | CHECK_LAST (ATOMIC_FETCH_NAND_16, TSAN_ATOMIC128_FETCH_NAND), | |
292 | ||
293 | CHECK_LAST (ATOMIC_THREAD_FENCE, TSAN_ATOMIC_THREAD_FENCE), | |
294 | CHECK_LAST (ATOMIC_SIGNAL_FENCE, TSAN_ATOMIC_SIGNAL_FENCE), | |
295 | ||
296 | FETCH_OP (ATOMIC_ADD_FETCH_1, TSAN_ATOMIC8_FETCH_ADD, PLUS_EXPR), | |
297 | FETCH_OP (ATOMIC_ADD_FETCH_2, TSAN_ATOMIC16_FETCH_ADD, PLUS_EXPR), | |
298 | FETCH_OP (ATOMIC_ADD_FETCH_4, TSAN_ATOMIC32_FETCH_ADD, PLUS_EXPR), | |
299 | FETCH_OP (ATOMIC_ADD_FETCH_8, TSAN_ATOMIC64_FETCH_ADD, PLUS_EXPR), | |
300 | FETCH_OP (ATOMIC_ADD_FETCH_16, TSAN_ATOMIC128_FETCH_ADD, PLUS_EXPR), | |
301 | FETCH_OP (ATOMIC_SUB_FETCH_1, TSAN_ATOMIC8_FETCH_SUB, MINUS_EXPR), | |
302 | FETCH_OP (ATOMIC_SUB_FETCH_2, TSAN_ATOMIC16_FETCH_SUB, MINUS_EXPR), | |
303 | FETCH_OP (ATOMIC_SUB_FETCH_4, TSAN_ATOMIC32_FETCH_SUB, MINUS_EXPR), | |
304 | FETCH_OP (ATOMIC_SUB_FETCH_8, TSAN_ATOMIC64_FETCH_SUB, MINUS_EXPR), | |
305 | FETCH_OP (ATOMIC_SUB_FETCH_16, TSAN_ATOMIC128_FETCH_SUB, MINUS_EXPR), | |
306 | FETCH_OP (ATOMIC_AND_FETCH_1, TSAN_ATOMIC8_FETCH_AND, BIT_AND_EXPR), | |
307 | FETCH_OP (ATOMIC_AND_FETCH_2, TSAN_ATOMIC16_FETCH_AND, BIT_AND_EXPR), | |
308 | FETCH_OP (ATOMIC_AND_FETCH_4, TSAN_ATOMIC32_FETCH_AND, BIT_AND_EXPR), | |
309 | FETCH_OP (ATOMIC_AND_FETCH_8, TSAN_ATOMIC64_FETCH_AND, BIT_AND_EXPR), | |
310 | FETCH_OP (ATOMIC_AND_FETCH_16, TSAN_ATOMIC128_FETCH_AND, BIT_AND_EXPR), | |
311 | FETCH_OP (ATOMIC_OR_FETCH_1, TSAN_ATOMIC8_FETCH_OR, BIT_IOR_EXPR), | |
312 | FETCH_OP (ATOMIC_OR_FETCH_2, TSAN_ATOMIC16_FETCH_OR, BIT_IOR_EXPR), | |
313 | FETCH_OP (ATOMIC_OR_FETCH_4, TSAN_ATOMIC32_FETCH_OR, BIT_IOR_EXPR), | |
314 | FETCH_OP (ATOMIC_OR_FETCH_8, TSAN_ATOMIC64_FETCH_OR, BIT_IOR_EXPR), | |
315 | FETCH_OP (ATOMIC_OR_FETCH_16, TSAN_ATOMIC128_FETCH_OR, BIT_IOR_EXPR), | |
316 | FETCH_OP (ATOMIC_XOR_FETCH_1, TSAN_ATOMIC8_FETCH_XOR, BIT_XOR_EXPR), | |
317 | FETCH_OP (ATOMIC_XOR_FETCH_2, TSAN_ATOMIC16_FETCH_XOR, BIT_XOR_EXPR), | |
318 | FETCH_OP (ATOMIC_XOR_FETCH_4, TSAN_ATOMIC32_FETCH_XOR, BIT_XOR_EXPR), | |
319 | FETCH_OP (ATOMIC_XOR_FETCH_8, TSAN_ATOMIC64_FETCH_XOR, BIT_XOR_EXPR), | |
320 | FETCH_OP (ATOMIC_XOR_FETCH_16, TSAN_ATOMIC128_FETCH_XOR, BIT_XOR_EXPR), | |
321 | FETCH_OP (ATOMIC_NAND_FETCH_1, TSAN_ATOMIC8_FETCH_NAND, BIT_NOT_EXPR), | |
322 | FETCH_OP (ATOMIC_NAND_FETCH_2, TSAN_ATOMIC16_FETCH_NAND, BIT_NOT_EXPR), | |
323 | FETCH_OP (ATOMIC_NAND_FETCH_4, TSAN_ATOMIC32_FETCH_NAND, BIT_NOT_EXPR), | |
324 | FETCH_OP (ATOMIC_NAND_FETCH_8, TSAN_ATOMIC64_FETCH_NAND, BIT_NOT_EXPR), | |
325 | FETCH_OP (ATOMIC_NAND_FETCH_16, TSAN_ATOMIC128_FETCH_NAND, BIT_NOT_EXPR), | |
326 | ||
327 | ADD_ACQUIRE (SYNC_LOCK_TEST_AND_SET_1, TSAN_ATOMIC8_EXCHANGE), | |
328 | ADD_ACQUIRE (SYNC_LOCK_TEST_AND_SET_2, TSAN_ATOMIC16_EXCHANGE), | |
329 | ADD_ACQUIRE (SYNC_LOCK_TEST_AND_SET_4, TSAN_ATOMIC32_EXCHANGE), | |
330 | ADD_ACQUIRE (SYNC_LOCK_TEST_AND_SET_8, TSAN_ATOMIC64_EXCHANGE), | |
331 | ADD_ACQUIRE (SYNC_LOCK_TEST_AND_SET_16, TSAN_ATOMIC128_EXCHANGE), | |
332 | ||
333 | ADD_SEQ_CST (SYNC_FETCH_AND_ADD_1, TSAN_ATOMIC8_FETCH_ADD), | |
334 | ADD_SEQ_CST (SYNC_FETCH_AND_ADD_2, TSAN_ATOMIC16_FETCH_ADD), | |
335 | ADD_SEQ_CST (SYNC_FETCH_AND_ADD_4, TSAN_ATOMIC32_FETCH_ADD), | |
336 | ADD_SEQ_CST (SYNC_FETCH_AND_ADD_8, TSAN_ATOMIC64_FETCH_ADD), | |
337 | ADD_SEQ_CST (SYNC_FETCH_AND_ADD_16, TSAN_ATOMIC128_FETCH_ADD), | |
338 | ADD_SEQ_CST (SYNC_FETCH_AND_SUB_1, TSAN_ATOMIC8_FETCH_SUB), | |
339 | ADD_SEQ_CST (SYNC_FETCH_AND_SUB_2, TSAN_ATOMIC16_FETCH_SUB), | |
340 | ADD_SEQ_CST (SYNC_FETCH_AND_SUB_4, TSAN_ATOMIC32_FETCH_SUB), | |
341 | ADD_SEQ_CST (SYNC_FETCH_AND_SUB_8, TSAN_ATOMIC64_FETCH_SUB), | |
342 | ADD_SEQ_CST (SYNC_FETCH_AND_SUB_16, TSAN_ATOMIC128_FETCH_SUB), | |
343 | ADD_SEQ_CST (SYNC_FETCH_AND_AND_1, TSAN_ATOMIC8_FETCH_AND), | |
344 | ADD_SEQ_CST (SYNC_FETCH_AND_AND_2, TSAN_ATOMIC16_FETCH_AND), | |
345 | ADD_SEQ_CST (SYNC_FETCH_AND_AND_4, TSAN_ATOMIC32_FETCH_AND), | |
346 | ADD_SEQ_CST (SYNC_FETCH_AND_AND_8, TSAN_ATOMIC64_FETCH_AND), | |
347 | ADD_SEQ_CST (SYNC_FETCH_AND_AND_16, TSAN_ATOMIC128_FETCH_AND), | |
348 | ADD_SEQ_CST (SYNC_FETCH_AND_OR_1, TSAN_ATOMIC8_FETCH_OR), | |
349 | ADD_SEQ_CST (SYNC_FETCH_AND_OR_2, TSAN_ATOMIC16_FETCH_OR), | |
350 | ADD_SEQ_CST (SYNC_FETCH_AND_OR_4, TSAN_ATOMIC32_FETCH_OR), | |
351 | ADD_SEQ_CST (SYNC_FETCH_AND_OR_8, TSAN_ATOMIC64_FETCH_OR), | |
352 | ADD_SEQ_CST (SYNC_FETCH_AND_OR_16, TSAN_ATOMIC128_FETCH_OR), | |
353 | ADD_SEQ_CST (SYNC_FETCH_AND_XOR_1, TSAN_ATOMIC8_FETCH_XOR), | |
354 | ADD_SEQ_CST (SYNC_FETCH_AND_XOR_2, TSAN_ATOMIC16_FETCH_XOR), | |
355 | ADD_SEQ_CST (SYNC_FETCH_AND_XOR_4, TSAN_ATOMIC32_FETCH_XOR), | |
356 | ADD_SEQ_CST (SYNC_FETCH_AND_XOR_8, TSAN_ATOMIC64_FETCH_XOR), | |
357 | ADD_SEQ_CST (SYNC_FETCH_AND_XOR_16, TSAN_ATOMIC128_FETCH_XOR), | |
358 | ADD_SEQ_CST (SYNC_FETCH_AND_NAND_1, TSAN_ATOMIC8_FETCH_NAND), | |
359 | ADD_SEQ_CST (SYNC_FETCH_AND_NAND_2, TSAN_ATOMIC16_FETCH_NAND), | |
360 | ADD_SEQ_CST (SYNC_FETCH_AND_NAND_4, TSAN_ATOMIC32_FETCH_NAND), | |
361 | ADD_SEQ_CST (SYNC_FETCH_AND_NAND_8, TSAN_ATOMIC64_FETCH_NAND), | |
362 | ADD_SEQ_CST (SYNC_FETCH_AND_NAND_16, TSAN_ATOMIC128_FETCH_NAND), | |
363 | ||
364 | ADD_SEQ_CST (SYNC_SYNCHRONIZE, TSAN_ATOMIC_THREAD_FENCE), | |
365 | ||
366 | FETCH_OPS (SYNC_ADD_AND_FETCH_1, TSAN_ATOMIC8_FETCH_ADD, PLUS_EXPR), | |
367 | FETCH_OPS (SYNC_ADD_AND_FETCH_2, TSAN_ATOMIC16_FETCH_ADD, PLUS_EXPR), | |
368 | FETCH_OPS (SYNC_ADD_AND_FETCH_4, TSAN_ATOMIC32_FETCH_ADD, PLUS_EXPR), | |
369 | FETCH_OPS (SYNC_ADD_AND_FETCH_8, TSAN_ATOMIC64_FETCH_ADD, PLUS_EXPR), | |
370 | FETCH_OPS (SYNC_ADD_AND_FETCH_16, TSAN_ATOMIC128_FETCH_ADD, PLUS_EXPR), | |
371 | FETCH_OPS (SYNC_SUB_AND_FETCH_1, TSAN_ATOMIC8_FETCH_SUB, MINUS_EXPR), | |
372 | FETCH_OPS (SYNC_SUB_AND_FETCH_2, TSAN_ATOMIC16_FETCH_SUB, MINUS_EXPR), | |
373 | FETCH_OPS (SYNC_SUB_AND_FETCH_4, TSAN_ATOMIC32_FETCH_SUB, MINUS_EXPR), | |
374 | FETCH_OPS (SYNC_SUB_AND_FETCH_8, TSAN_ATOMIC64_FETCH_SUB, MINUS_EXPR), | |
375 | FETCH_OPS (SYNC_SUB_AND_FETCH_16, TSAN_ATOMIC128_FETCH_SUB, MINUS_EXPR), | |
376 | FETCH_OPS (SYNC_AND_AND_FETCH_1, TSAN_ATOMIC8_FETCH_AND, BIT_AND_EXPR), | |
377 | FETCH_OPS (SYNC_AND_AND_FETCH_2, TSAN_ATOMIC16_FETCH_AND, BIT_AND_EXPR), | |
378 | FETCH_OPS (SYNC_AND_AND_FETCH_4, TSAN_ATOMIC32_FETCH_AND, BIT_AND_EXPR), | |
379 | FETCH_OPS (SYNC_AND_AND_FETCH_8, TSAN_ATOMIC64_FETCH_AND, BIT_AND_EXPR), | |
380 | FETCH_OPS (SYNC_AND_AND_FETCH_16, TSAN_ATOMIC128_FETCH_AND, BIT_AND_EXPR), | |
381 | FETCH_OPS (SYNC_OR_AND_FETCH_1, TSAN_ATOMIC8_FETCH_OR, BIT_IOR_EXPR), | |
382 | FETCH_OPS (SYNC_OR_AND_FETCH_2, TSAN_ATOMIC16_FETCH_OR, BIT_IOR_EXPR), | |
383 | FETCH_OPS (SYNC_OR_AND_FETCH_4, TSAN_ATOMIC32_FETCH_OR, BIT_IOR_EXPR), | |
384 | FETCH_OPS (SYNC_OR_AND_FETCH_8, TSAN_ATOMIC64_FETCH_OR, BIT_IOR_EXPR), | |
385 | FETCH_OPS (SYNC_OR_AND_FETCH_16, TSAN_ATOMIC128_FETCH_OR, BIT_IOR_EXPR), | |
386 | FETCH_OPS (SYNC_XOR_AND_FETCH_1, TSAN_ATOMIC8_FETCH_XOR, BIT_XOR_EXPR), | |
387 | FETCH_OPS (SYNC_XOR_AND_FETCH_2, TSAN_ATOMIC16_FETCH_XOR, BIT_XOR_EXPR), | |
388 | FETCH_OPS (SYNC_XOR_AND_FETCH_4, TSAN_ATOMIC32_FETCH_XOR, BIT_XOR_EXPR), | |
389 | FETCH_OPS (SYNC_XOR_AND_FETCH_8, TSAN_ATOMIC64_FETCH_XOR, BIT_XOR_EXPR), | |
390 | FETCH_OPS (SYNC_XOR_AND_FETCH_16, TSAN_ATOMIC128_FETCH_XOR, BIT_XOR_EXPR), | |
391 | FETCH_OPS (SYNC_NAND_AND_FETCH_1, TSAN_ATOMIC8_FETCH_NAND, BIT_NOT_EXPR), | |
392 | FETCH_OPS (SYNC_NAND_AND_FETCH_2, TSAN_ATOMIC16_FETCH_NAND, BIT_NOT_EXPR), | |
393 | FETCH_OPS (SYNC_NAND_AND_FETCH_4, TSAN_ATOMIC32_FETCH_NAND, BIT_NOT_EXPR), | |
394 | FETCH_OPS (SYNC_NAND_AND_FETCH_8, TSAN_ATOMIC64_FETCH_NAND, BIT_NOT_EXPR), | |
395 | FETCH_OPS (SYNC_NAND_AND_FETCH_16, TSAN_ATOMIC128_FETCH_NAND, BIT_NOT_EXPR), | |
396 | ||
397 | WEAK_CAS (ATOMIC_COMPARE_EXCHANGE_1, TSAN_ATOMIC8_COMPARE_EXCHANGE_WEAK), | |
398 | WEAK_CAS (ATOMIC_COMPARE_EXCHANGE_2, TSAN_ATOMIC16_COMPARE_EXCHANGE_WEAK), | |
399 | WEAK_CAS (ATOMIC_COMPARE_EXCHANGE_4, TSAN_ATOMIC32_COMPARE_EXCHANGE_WEAK), | |
400 | WEAK_CAS (ATOMIC_COMPARE_EXCHANGE_8, TSAN_ATOMIC64_COMPARE_EXCHANGE_WEAK), | |
401 | WEAK_CAS (ATOMIC_COMPARE_EXCHANGE_16, TSAN_ATOMIC128_COMPARE_EXCHANGE_WEAK), | |
402 | ||
403 | STRONG_CAS (ATOMIC_COMPARE_EXCHANGE_1, TSAN_ATOMIC8_COMPARE_EXCHANGE_STRONG), | |
404 | STRONG_CAS (ATOMIC_COMPARE_EXCHANGE_2, | |
405 | TSAN_ATOMIC16_COMPARE_EXCHANGE_STRONG), | |
406 | STRONG_CAS (ATOMIC_COMPARE_EXCHANGE_4, | |
407 | TSAN_ATOMIC32_COMPARE_EXCHANGE_STRONG), | |
408 | STRONG_CAS (ATOMIC_COMPARE_EXCHANGE_8, | |
409 | TSAN_ATOMIC64_COMPARE_EXCHANGE_STRONG), | |
410 | STRONG_CAS (ATOMIC_COMPARE_EXCHANGE_16, | |
411 | TSAN_ATOMIC128_COMPARE_EXCHANGE_STRONG), | |
412 | ||
413 | BOOL_CAS (SYNC_BOOL_COMPARE_AND_SWAP_1, | |
414 | TSAN_ATOMIC8_COMPARE_EXCHANGE_STRONG), | |
415 | BOOL_CAS (SYNC_BOOL_COMPARE_AND_SWAP_2, | |
416 | TSAN_ATOMIC16_COMPARE_EXCHANGE_STRONG), | |
417 | BOOL_CAS (SYNC_BOOL_COMPARE_AND_SWAP_4, | |
418 | TSAN_ATOMIC32_COMPARE_EXCHANGE_STRONG), | |
419 | BOOL_CAS (SYNC_BOOL_COMPARE_AND_SWAP_8, | |
420 | TSAN_ATOMIC64_COMPARE_EXCHANGE_STRONG), | |
421 | BOOL_CAS (SYNC_BOOL_COMPARE_AND_SWAP_16, | |
422 | TSAN_ATOMIC128_COMPARE_EXCHANGE_STRONG), | |
423 | ||
424 | VAL_CAS (SYNC_VAL_COMPARE_AND_SWAP_1, TSAN_ATOMIC8_COMPARE_EXCHANGE_STRONG), | |
425 | VAL_CAS (SYNC_VAL_COMPARE_AND_SWAP_2, TSAN_ATOMIC16_COMPARE_EXCHANGE_STRONG), | |
426 | VAL_CAS (SYNC_VAL_COMPARE_AND_SWAP_4, TSAN_ATOMIC32_COMPARE_EXCHANGE_STRONG), | |
427 | VAL_CAS (SYNC_VAL_COMPARE_AND_SWAP_8, TSAN_ATOMIC64_COMPARE_EXCHANGE_STRONG), | |
428 | VAL_CAS (SYNC_VAL_COMPARE_AND_SWAP_16, | |
429 | TSAN_ATOMIC128_COMPARE_EXCHANGE_STRONG), | |
430 | ||
431 | LOCK_RELEASE (SYNC_LOCK_RELEASE_1, TSAN_ATOMIC8_STORE), | |
432 | LOCK_RELEASE (SYNC_LOCK_RELEASE_2, TSAN_ATOMIC16_STORE), | |
433 | LOCK_RELEASE (SYNC_LOCK_RELEASE_4, TSAN_ATOMIC32_STORE), | |
434 | LOCK_RELEASE (SYNC_LOCK_RELEASE_8, TSAN_ATOMIC64_STORE), | |
435 | LOCK_RELEASE (SYNC_LOCK_RELEASE_16, TSAN_ATOMIC128_STORE) | |
436 | }; | |
437 | ||
438 | /* Instrument an atomic builtin. */ | |
439 | ||
440 | static void | |
441 | instrument_builtin_call (gimple_stmt_iterator *gsi) | |
442 | { | |
443 | gimple stmt = gsi_stmt (*gsi), g; | |
444 | tree callee = gimple_call_fndecl (stmt), last_arg, args[6], t, lhs; | |
445 | enum built_in_function fcode = DECL_FUNCTION_CODE (callee); | |
446 | unsigned int i, num = gimple_call_num_args (stmt), j; | |
447 | for (j = 0; j < 6 && j < num; j++) | |
448 | args[j] = gimple_call_arg (stmt, j); | |
449 | for (i = 0; i < ARRAY_SIZE (tsan_atomic_table); i++) | |
450 | if (fcode != tsan_atomic_table[i].fcode) | |
451 | continue; | |
452 | else | |
453 | { | |
454 | tree decl = builtin_decl_implicit (tsan_atomic_table[i].tsan_fcode); | |
455 | if (decl == NULL_TREE) | |
456 | return; | |
457 | switch (tsan_atomic_table[i].action) | |
458 | { | |
459 | case check_last: | |
460 | case fetch_op: | |
461 | last_arg = gimple_call_arg (stmt, num - 1); | |
cd4547bf | 462 | if (!tree_fits_uhwi_p (last_arg) |
aa59f000 | 463 | || tree_to_uhwi (last_arg) > MEMMODEL_SEQ_CST) |
83392e87 | 464 | return; |
465 | gimple_call_set_fndecl (stmt, decl); | |
466 | update_stmt (stmt); | |
467 | if (tsan_atomic_table[i].action == fetch_op) | |
468 | { | |
469 | args[1] = gimple_call_arg (stmt, 1); | |
470 | goto adjust_result; | |
471 | } | |
472 | return; | |
473 | case add_seq_cst: | |
474 | case add_acquire: | |
475 | case fetch_op_seq_cst: | |
476 | gcc_assert (num <= 2); | |
477 | for (j = 0; j < num; j++) | |
478 | args[j] = gimple_call_arg (stmt, j); | |
479 | for (; j < 2; j++) | |
480 | args[j] = NULL_TREE; | |
481 | args[num] = build_int_cst (NULL_TREE, | |
482 | tsan_atomic_table[i].action | |
483 | != add_acquire | |
484 | ? MEMMODEL_SEQ_CST | |
485 | : MEMMODEL_ACQUIRE); | |
486 | update_gimple_call (gsi, decl, num + 1, args[0], args[1], args[2]); | |
487 | stmt = gsi_stmt (*gsi); | |
488 | if (tsan_atomic_table[i].action == fetch_op_seq_cst) | |
489 | { | |
490 | adjust_result: | |
491 | lhs = gimple_call_lhs (stmt); | |
492 | if (lhs == NULL_TREE) | |
493 | return; | |
494 | if (!useless_type_conversion_p (TREE_TYPE (lhs), | |
495 | TREE_TYPE (args[1]))) | |
496 | { | |
497 | tree var = make_ssa_name (TREE_TYPE (lhs), NULL); | |
498 | g = gimple_build_assign_with_ops (NOP_EXPR, var, | |
499 | args[1], NULL_TREE); | |
500 | gsi_insert_after (gsi, g, GSI_NEW_STMT); | |
501 | args[1] = var; | |
502 | } | |
503 | gimple_call_set_lhs (stmt, | |
504 | make_ssa_name (TREE_TYPE (lhs), NULL)); | |
505 | /* BIT_NOT_EXPR stands for NAND. */ | |
506 | if (tsan_atomic_table[i].code == BIT_NOT_EXPR) | |
507 | { | |
508 | tree var = make_ssa_name (TREE_TYPE (lhs), NULL); | |
509 | g = gimple_build_assign_with_ops (BIT_AND_EXPR, var, | |
510 | gimple_call_lhs (stmt), | |
511 | args[1]); | |
512 | gsi_insert_after (gsi, g, GSI_NEW_STMT); | |
513 | g = gimple_build_assign_with_ops (BIT_NOT_EXPR, lhs, var, | |
514 | NULL_TREE); | |
515 | } | |
516 | else | |
517 | g = gimple_build_assign_with_ops (tsan_atomic_table[i].code, | |
518 | lhs, | |
519 | gimple_call_lhs (stmt), | |
520 | args[1]); | |
521 | update_stmt (stmt); | |
522 | gsi_insert_after (gsi, g, GSI_NEW_STMT); | |
523 | } | |
524 | return; | |
525 | case weak_cas: | |
526 | if (!integer_nonzerop (gimple_call_arg (stmt, 3))) | |
527 | continue; | |
528 | /* FALLTHRU */ | |
529 | case strong_cas: | |
530 | gcc_assert (num == 6); | |
531 | for (j = 0; j < 6; j++) | |
532 | args[j] = gimple_call_arg (stmt, j); | |
cd4547bf | 533 | if (!tree_fits_uhwi_p (args[4]) |
aa59f000 | 534 | || tree_to_uhwi (args[4]) > MEMMODEL_SEQ_CST) |
83392e87 | 535 | return; |
cd4547bf | 536 | if (!tree_fits_uhwi_p (args[5]) |
aa59f000 | 537 | || tree_to_uhwi (args[5]) > MEMMODEL_SEQ_CST) |
83392e87 | 538 | return; |
539 | update_gimple_call (gsi, decl, 5, args[0], args[1], args[2], | |
540 | args[4], args[5]); | |
541 | return; | |
542 | case bool_cas: | |
543 | case val_cas: | |
544 | gcc_assert (num == 3); | |
545 | for (j = 0; j < 3; j++) | |
546 | args[j] = gimple_call_arg (stmt, j); | |
547 | t = TYPE_ARG_TYPES (TREE_TYPE (decl)); | |
548 | t = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (t))); | |
549 | t = create_tmp_var (t, NULL); | |
550 | mark_addressable (t); | |
551 | if (!useless_type_conversion_p (TREE_TYPE (t), | |
552 | TREE_TYPE (args[1]))) | |
553 | { | |
554 | g = gimple_build_assign_with_ops (NOP_EXPR, | |
555 | make_ssa_name (TREE_TYPE (t), | |
556 | NULL), | |
557 | args[1], NULL_TREE); | |
558 | gsi_insert_before (gsi, g, GSI_SAME_STMT); | |
559 | args[1] = gimple_assign_lhs (g); | |
560 | } | |
561 | g = gimple_build_assign (t, args[1]); | |
562 | gsi_insert_before (gsi, g, GSI_SAME_STMT); | |
563 | lhs = gimple_call_lhs (stmt); | |
564 | update_gimple_call (gsi, decl, 5, args[0], | |
565 | build_fold_addr_expr (t), args[2], | |
566 | build_int_cst (NULL_TREE, | |
567 | MEMMODEL_SEQ_CST), | |
568 | build_int_cst (NULL_TREE, | |
569 | MEMMODEL_SEQ_CST)); | |
570 | if (tsan_atomic_table[i].action == val_cas && lhs) | |
571 | { | |
572 | tree cond; | |
573 | stmt = gsi_stmt (*gsi); | |
574 | g = gimple_build_assign (make_ssa_name (TREE_TYPE (t), NULL), | |
575 | t); | |
576 | gsi_insert_after (gsi, g, GSI_NEW_STMT); | |
577 | t = make_ssa_name (TREE_TYPE (TREE_TYPE (decl)), stmt); | |
578 | cond = build2 (NE_EXPR, boolean_type_node, t, | |
579 | build_int_cst (TREE_TYPE (t), 0)); | |
580 | g = gimple_build_assign_with_ops (COND_EXPR, lhs, cond, | |
581 | args[1], | |
582 | gimple_assign_lhs (g)); | |
583 | gimple_call_set_lhs (stmt, t); | |
584 | update_stmt (stmt); | |
585 | gsi_insert_after (gsi, g, GSI_NEW_STMT); | |
586 | } | |
587 | return; | |
588 | case lock_release: | |
589 | gcc_assert (num == 1); | |
590 | t = TYPE_ARG_TYPES (TREE_TYPE (decl)); | |
591 | t = TREE_VALUE (TREE_CHAIN (t)); | |
592 | update_gimple_call (gsi, decl, 3, gimple_call_arg (stmt, 0), | |
593 | build_int_cst (t, 0), | |
594 | build_int_cst (NULL_TREE, | |
595 | MEMMODEL_RELEASE)); | |
596 | return; | |
597 | default: | |
598 | continue; | |
599 | } | |
600 | } | |
601 | } | |
602 | ||
b077695d | 603 | /* Instruments the gimple pointed to by GSI. Return |
a4641938 | 604 | true if func entry/exit should be instrumented. */ |
b077695d | 605 | |
606 | static bool | |
83392e87 | 607 | instrument_gimple (gimple_stmt_iterator *gsi) |
b077695d | 608 | { |
609 | gimple stmt; | |
610 | tree rhs, lhs; | |
611 | bool instrumented = false; | |
612 | ||
83392e87 | 613 | stmt = gsi_stmt (*gsi); |
b077695d | 614 | if (is_gimple_call (stmt) |
615 | && (gimple_call_fndecl (stmt) | |
616 | != builtin_decl_implicit (BUILT_IN_TSAN_INIT))) | |
83392e87 | 617 | { |
8ded4352 | 618 | if (gimple_call_builtin_p (stmt, BUILT_IN_NORMAL)) |
83392e87 | 619 | instrument_builtin_call (gsi); |
620 | return true; | |
621 | } | |
a4641938 | 622 | else if (is_gimple_assign (stmt) |
623 | && !gimple_clobber_p (stmt)) | |
b077695d | 624 | { |
625 | if (gimple_store_p (stmt)) | |
626 | { | |
627 | lhs = gimple_assign_lhs (stmt); | |
83392e87 | 628 | instrumented = instrument_expr (*gsi, lhs, true); |
b077695d | 629 | } |
630 | if (gimple_assign_load_p (stmt)) | |
631 | { | |
632 | rhs = gimple_assign_rhs1 (stmt); | |
83392e87 | 633 | instrumented = instrument_expr (*gsi, rhs, false); |
b077695d | 634 | } |
635 | } | |
636 | return instrumented; | |
637 | } | |
638 | ||
639 | /* Instruments all interesting memory accesses in the current function. | |
a4641938 | 640 | Return true if func entry/exit should be instrumented. */ |
b077695d | 641 | |
642 | static bool | |
643 | instrument_memory_accesses (void) | |
644 | { | |
645 | basic_block bb; | |
646 | gimple_stmt_iterator gsi; | |
647 | bool fentry_exit_instrument = false; | |
648 | ||
fc00614f | 649 | FOR_EACH_BB_FN (bb, cfun) |
b077695d | 650 | for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi)) |
83392e87 | 651 | fentry_exit_instrument |= instrument_gimple (&gsi); |
b077695d | 652 | return fentry_exit_instrument; |
653 | } | |
654 | ||
655 | /* Instruments function entry. */ | |
656 | ||
657 | static void | |
658 | instrument_func_entry (void) | |
659 | { | |
660 | basic_block succ_bb; | |
661 | gimple_stmt_iterator gsi; | |
662 | tree ret_addr, builtin_decl; | |
663 | gimple g; | |
664 | ||
34154e27 | 665 | succ_bb = single_succ (ENTRY_BLOCK_PTR_FOR_FN (cfun)); |
b077695d | 666 | gsi = gsi_after_labels (succ_bb); |
667 | ||
668 | builtin_decl = builtin_decl_implicit (BUILT_IN_RETURN_ADDRESS); | |
669 | g = gimple_build_call (builtin_decl, 1, integer_zero_node); | |
670 | ret_addr = make_ssa_name (ptr_type_node, NULL); | |
671 | gimple_call_set_lhs (g, ret_addr); | |
672 | gimple_set_location (g, cfun->function_start_locus); | |
673 | gsi_insert_before (&gsi, g, GSI_SAME_STMT); | |
674 | ||
675 | builtin_decl = builtin_decl_implicit (BUILT_IN_TSAN_FUNC_ENTRY); | |
676 | g = gimple_build_call (builtin_decl, 1, ret_addr); | |
677 | gimple_set_location (g, cfun->function_start_locus); | |
678 | gsi_insert_before (&gsi, g, GSI_SAME_STMT); | |
679 | } | |
680 | ||
681 | /* Instruments function exits. */ | |
682 | ||
683 | static void | |
684 | instrument_func_exit (void) | |
685 | { | |
686 | location_t loc; | |
687 | basic_block exit_bb; | |
688 | gimple_stmt_iterator gsi; | |
689 | gimple stmt, g; | |
690 | tree builtin_decl; | |
691 | edge e; | |
692 | edge_iterator ei; | |
693 | ||
694 | /* Find all function exits. */ | |
34154e27 | 695 | exit_bb = EXIT_BLOCK_PTR_FOR_FN (cfun); |
b077695d | 696 | FOR_EACH_EDGE (e, ei, exit_bb->preds) |
697 | { | |
698 | gsi = gsi_last_bb (e->src); | |
699 | stmt = gsi_stmt (gsi); | |
ef951254 | 700 | gcc_assert (gimple_code (stmt) == GIMPLE_RETURN |
701 | || gimple_call_builtin_p (stmt, BUILT_IN_RETURN)); | |
b077695d | 702 | loc = gimple_location (stmt); |
703 | builtin_decl = builtin_decl_implicit (BUILT_IN_TSAN_FUNC_EXIT); | |
704 | g = gimple_build_call (builtin_decl, 0); | |
705 | gimple_set_location (g, loc); | |
706 | gsi_insert_before (&gsi, g, GSI_SAME_STMT); | |
707 | } | |
708 | } | |
709 | ||
710 | /* ThreadSanitizer instrumentation pass. */ | |
711 | ||
712 | static unsigned | |
713 | tsan_pass (void) | |
714 | { | |
b45e34ed | 715 | initialize_sanitizer_builtins (); |
b077695d | 716 | if (instrument_memory_accesses ()) |
717 | { | |
718 | instrument_func_entry (); | |
719 | instrument_func_exit (); | |
720 | } | |
721 | return 0; | |
722 | } | |
723 | ||
b077695d | 724 | /* Inserts __tsan_init () into the list of CTORs. */ |
725 | ||
726 | void | |
727 | tsan_finish_file (void) | |
728 | { | |
b45e34ed | 729 | tree ctor_statements = NULL_TREE; |
b077695d | 730 | |
b45e34ed | 731 | initialize_sanitizer_builtins (); |
732 | tree init_decl = builtin_decl_implicit (BUILT_IN_TSAN_INIT); | |
b077695d | 733 | append_to_statement_list (build_call_expr (init_decl, 0), |
734 | &ctor_statements); | |
735 | cgraph_build_static_cdtor ('I', ctor_statements, | |
736 | MAX_RESERVED_INIT_PRIORITY - 1); | |
737 | } | |
738 | ||
739 | /* The pass descriptor. */ | |
740 | ||
cbe8bda8 | 741 | namespace { |
742 | ||
743 | const pass_data pass_data_tsan = | |
b077695d | 744 | { |
cbe8bda8 | 745 | GIMPLE_PASS, /* type */ |
746 | "tsan", /* name */ | |
747 | OPTGROUP_NONE, /* optinfo_flags */ | |
cbe8bda8 | 748 | TV_NONE, /* tv_id */ |
749 | ( PROP_ssa | PROP_cfg ), /* properties_required */ | |
750 | 0, /* properties_provided */ | |
751 | 0, /* properties_destroyed */ | |
752 | 0, /* todo_flags_start */ | |
8b88439e | 753 | TODO_update_ssa, /* todo_flags_finish */ |
b077695d | 754 | }; |
755 | ||
cbe8bda8 | 756 | class pass_tsan : public gimple_opt_pass |
757 | { | |
758 | public: | |
9af5ce0c | 759 | pass_tsan (gcc::context *ctxt) |
760 | : gimple_opt_pass (pass_data_tsan, ctxt) | |
cbe8bda8 | 761 | {} |
762 | ||
763 | /* opt_pass methods: */ | |
ae84f584 | 764 | opt_pass * clone () { return new pass_tsan (m_ctxt); } |
31315c24 | 765 | virtual bool gate (function *) |
766 | { | |
767 | return (flag_sanitize & SANITIZE_THREAD) != 0; | |
768 | } | |
769 | ||
65b0537f | 770 | virtual unsigned int execute (function *) { return tsan_pass (); } |
cbe8bda8 | 771 | |
772 | }; // class pass_tsan | |
773 | ||
774 | } // anon namespace | |
775 | ||
776 | gimple_opt_pass * | |
777 | make_pass_tsan (gcc::context *ctxt) | |
778 | { | |
779 | return new pass_tsan (ctxt); | |
780 | } | |
781 | ||
cbe8bda8 | 782 | namespace { |
783 | ||
784 | const pass_data pass_data_tsan_O0 = | |
b077695d | 785 | { |
cbe8bda8 | 786 | GIMPLE_PASS, /* type */ |
787 | "tsan0", /* name */ | |
788 | OPTGROUP_NONE, /* optinfo_flags */ | |
cbe8bda8 | 789 | TV_NONE, /* tv_id */ |
790 | ( PROP_ssa | PROP_cfg ), /* properties_required */ | |
791 | 0, /* properties_provided */ | |
792 | 0, /* properties_destroyed */ | |
793 | 0, /* todo_flags_start */ | |
8b88439e | 794 | TODO_update_ssa, /* todo_flags_finish */ |
b077695d | 795 | }; |
cbe8bda8 | 796 | |
797 | class pass_tsan_O0 : public gimple_opt_pass | |
798 | { | |
799 | public: | |
9af5ce0c | 800 | pass_tsan_O0 (gcc::context *ctxt) |
801 | : gimple_opt_pass (pass_data_tsan_O0, ctxt) | |
cbe8bda8 | 802 | {} |
803 | ||
804 | /* opt_pass methods: */ | |
31315c24 | 805 | virtual bool gate (function *) |
806 | { | |
807 | return (flag_sanitize & SANITIZE_THREAD) != 0 && !optimize; | |
808 | } | |
809 | ||
65b0537f | 810 | virtual unsigned int execute (function *) { return tsan_pass (); } |
cbe8bda8 | 811 | |
812 | }; // class pass_tsan_O0 | |
813 | ||
814 | } // anon namespace | |
815 | ||
816 | gimple_opt_pass * | |
817 | make_pass_tsan_O0 (gcc::context *ctxt) | |
818 | { | |
819 | return new pass_tsan_O0 (ctxt); | |
820 | } |