]>
Commit | Line | Data |
---|---|---|
b077695d | 1 | /* GCC instrumentation plugin for ThreadSanitizer. |
711789cc | 2 | Copyright (C) 2011-2013 Free Software Foundation, Inc. |
b077695d | 3 | Contributed by Dmitry Vyukov <dvyukov@google.com> |
4 | ||
5 | This file is part of GCC. | |
6 | ||
7 | GCC is free software; you can redistribute it and/or modify it under | |
8 | the terms of the GNU General Public License as published by the Free | |
9 | Software Foundation; either version 3, or (at your option) any later | |
10 | version. | |
11 | ||
12 | GCC is distributed in the hope that it will be useful, but WITHOUT ANY | |
13 | WARRANTY; without even the implied warranty of MERCHANTABILITY or | |
14 | FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License | |
15 | for more details. | |
16 | ||
17 | You should have received a copy of the GNU General Public License | |
18 | along with GCC; see the file COPYING3. If not see | |
19 | <http://www.gnu.org/licenses/>. */ | |
20 | ||
21 | ||
22 | #include "config.h" | |
23 | #include "system.h" | |
24 | #include "coretypes.h" | |
25 | #include "tree.h" | |
26 | #include "intl.h" | |
27 | #include "tm.h" | |
28 | #include "basic-block.h" | |
29 | #include "gimple.h" | |
30 | #include "function.h" | |
31 | #include "tree-flow.h" | |
32 | #include "tree-pass.h" | |
33 | #include "tree-iterator.h" | |
34 | #include "langhooks.h" | |
35 | #include "output.h" | |
36 | #include "options.h" | |
37 | #include "target.h" | |
38 | #include "cgraph.h" | |
39 | #include "diagnostic.h" | |
83392e87 | 40 | #include "tree-ssa-propagate.h" |
b45e34ed | 41 | #include "tsan.h" |
42 | #include "asan.h" | |
b077695d | 43 | |
44 | /* Number of instrumented memory accesses in the current function. */ | |
45 | ||
46 | /* Builds the following decl | |
47 | void __tsan_read/writeX (void *addr); */ | |
48 | ||
49 | static tree | |
50 | get_memory_access_decl (bool is_write, unsigned size) | |
51 | { | |
52 | enum built_in_function fcode; | |
53 | ||
54 | if (size <= 1) | |
b45e34ed | 55 | fcode = is_write ? BUILT_IN_TSAN_WRITE1 |
56 | : BUILT_IN_TSAN_READ1; | |
b077695d | 57 | else if (size <= 3) |
b45e34ed | 58 | fcode = is_write ? BUILT_IN_TSAN_WRITE2 |
59 | : BUILT_IN_TSAN_READ2; | |
b077695d | 60 | else if (size <= 7) |
b45e34ed | 61 | fcode = is_write ? BUILT_IN_TSAN_WRITE4 |
62 | : BUILT_IN_TSAN_READ4; | |
b077695d | 63 | else if (size <= 15) |
b45e34ed | 64 | fcode = is_write ? BUILT_IN_TSAN_WRITE8 |
65 | : BUILT_IN_TSAN_READ8; | |
b077695d | 66 | else |
b45e34ed | 67 | fcode = is_write ? BUILT_IN_TSAN_WRITE16 |
68 | : BUILT_IN_TSAN_READ16; | |
b077695d | 69 | |
70 | return builtin_decl_implicit (fcode); | |
71 | } | |
72 | ||
73 | /* Check as to whether EXPR refers to a store to vptr. */ | |
74 | ||
75 | static tree | |
76 | is_vptr_store (gimple stmt, tree expr, bool is_write) | |
77 | { | |
78 | if (is_write == true | |
79 | && gimple_assign_single_p (stmt) | |
80 | && TREE_CODE (expr) == COMPONENT_REF) | |
81 | { | |
82 | tree field = TREE_OPERAND (expr, 1); | |
83 | if (TREE_CODE (field) == FIELD_DECL | |
84 | && DECL_VIRTUAL_P (field)) | |
85 | return gimple_assign_rhs1 (stmt); | |
86 | } | |
87 | return NULL; | |
88 | } | |
89 | ||
b077695d | 90 | /* Instruments EXPR if needed. If any instrumentation is inserted, |
a4641938 | 91 | return true. */ |
b077695d | 92 | |
93 | static bool | |
94 | instrument_expr (gimple_stmt_iterator gsi, tree expr, bool is_write) | |
95 | { | |
c2fa3404 | 96 | tree base, rhs, expr_ptr, builtin_decl; |
b077695d | 97 | basic_block bb; |
98 | HOST_WIDE_INT size; | |
99 | gimple stmt, g; | |
c2fa3404 | 100 | gimple_seq seq; |
b077695d | 101 | location_t loc; |
102 | ||
b077695d | 103 | size = int_size_in_bytes (TREE_TYPE (expr)); |
104 | if (size == -1) | |
105 | return false; | |
106 | ||
107 | /* For now just avoid instrumenting bit field acceses. | |
108 | TODO: handle bit-fields as if touching the whole field. */ | |
109 | HOST_WIDE_INT bitsize, bitpos; | |
110 | tree offset; | |
111 | enum machine_mode mode; | |
112 | int volatilep = 0, unsignedp = 0; | |
5a5c6968 | 113 | base = get_inner_reference (expr, &bitsize, &bitpos, &offset, |
114 | &mode, &unsignedp, &volatilep, false); | |
115 | ||
116 | /* No need to instrument accesses to decls that don't escape, | |
117 | they can't escape to other threads then. */ | |
118 | if (DECL_P (base)) | |
119 | { | |
120 | struct pt_solution pt; | |
121 | memset (&pt, 0, sizeof (pt)); | |
122 | pt.escaped = 1; | |
123 | pt.ipa_escaped = flag_ipa_pta != 0; | |
124 | pt.nonlocal = 1; | |
125 | if (!pt_solution_includes (&pt, base)) | |
126 | return false; | |
127 | if (!is_global_var (base) && !may_be_aliased (base)) | |
128 | return false; | |
129 | } | |
130 | ||
131 | if (TREE_READONLY (base)) | |
b077695d | 132 | return false; |
133 | ||
888b31c9 | 134 | if (size == 0 |
135 | || bitpos % (size * BITS_PER_UNIT) | |
5a5c6968 | 136 | || bitsize != size * BITS_PER_UNIT) |
b077695d | 137 | return false; |
138 | ||
139 | stmt = gsi_stmt (gsi); | |
140 | loc = gimple_location (stmt); | |
141 | rhs = is_vptr_store (stmt, expr, is_write); | |
142 | gcc_checking_assert (rhs != NULL || is_gimple_addressable (expr)); | |
143 | expr_ptr = build_fold_addr_expr (unshare_expr (expr)); | |
c2fa3404 | 144 | seq = NULL; |
145 | if (!is_gimple_val (expr_ptr)) | |
b077695d | 146 | { |
c2fa3404 | 147 | g = gimple_build_assign (make_ssa_name (TREE_TYPE (expr_ptr), NULL), |
148 | expr_ptr); | |
149 | expr_ptr = gimple_assign_lhs (g); | |
150 | gimple_set_location (g, loc); | |
151 | gimple_seq_add_stmt_without_update (&seq, g); | |
b077695d | 152 | } |
c2fa3404 | 153 | if (rhs == NULL) |
154 | g = gimple_build_call (get_memory_access_decl (is_write, size), | |
155 | 1, expr_ptr); | |
b077695d | 156 | else |
157 | { | |
158 | builtin_decl = builtin_decl_implicit (BUILT_IN_TSAN_VPTR_UPDATE); | |
159 | g = gimple_build_call (builtin_decl, 1, expr_ptr); | |
160 | } | |
161 | gimple_set_location (g, loc); | |
c2fa3404 | 162 | gimple_seq_add_stmt_without_update (&seq, g); |
b077695d | 163 | /* Instrumentation for assignment of a function result |
164 | must be inserted after the call. Instrumentation for | |
165 | reads of function arguments must be inserted before the call. | |
166 | That's because the call can contain synchronization. */ | |
167 | if (is_gimple_call (stmt) && is_write) | |
168 | { | |
169 | /* If the call can throw, it must be the last stmt in | |
170 | a basic block, so the instrumented stmts need to be | |
a4641938 | 171 | inserted in successor bbs. */ |
b077695d | 172 | if (is_ctrl_altering_stmt (stmt)) |
173 | { | |
174 | edge e; | |
175 | ||
176 | bb = gsi_bb (gsi); | |
177 | e = find_fallthru_edge (bb->succs); | |
178 | if (e) | |
c2fa3404 | 179 | gsi_insert_seq_on_edge_immediate (e, seq); |
b077695d | 180 | } |
181 | else | |
c2fa3404 | 182 | gsi_insert_seq_after (&gsi, seq, GSI_NEW_STMT); |
b077695d | 183 | } |
184 | else | |
c2fa3404 | 185 | gsi_insert_seq_before (&gsi, seq, GSI_SAME_STMT); |
b077695d | 186 | |
187 | return true; | |
188 | } | |
189 | ||
83392e87 | 190 | /* Actions for sync/atomic builtin transformations. */ |
191 | enum tsan_atomic_action | |
192 | { | |
193 | check_last, add_seq_cst, add_acquire, weak_cas, strong_cas, | |
194 | bool_cas, val_cas, lock_release, fetch_op, fetch_op_seq_cst | |
195 | }; | |
196 | ||
197 | /* Table how to map sync/atomic builtins to their corresponding | |
198 | tsan equivalents. */ | |
199 | static struct tsan_map_atomic | |
200 | { | |
201 | enum built_in_function fcode, tsan_fcode; | |
202 | enum tsan_atomic_action action; | |
203 | enum tree_code code; | |
204 | } tsan_atomic_table[] = | |
205 | { | |
206 | #define TRANSFORM(fcode, tsan_fcode, action, code) \ | |
207 | { BUILT_IN_##fcode, BUILT_IN_##tsan_fcode, action, code } | |
208 | #define CHECK_LAST(fcode, tsan_fcode) \ | |
209 | TRANSFORM (fcode, tsan_fcode, check_last, ERROR_MARK) | |
210 | #define ADD_SEQ_CST(fcode, tsan_fcode) \ | |
211 | TRANSFORM (fcode, tsan_fcode, add_seq_cst, ERROR_MARK) | |
212 | #define ADD_ACQUIRE(fcode, tsan_fcode) \ | |
213 | TRANSFORM (fcode, tsan_fcode, add_acquire, ERROR_MARK) | |
214 | #define WEAK_CAS(fcode, tsan_fcode) \ | |
215 | TRANSFORM (fcode, tsan_fcode, weak_cas, ERROR_MARK) | |
216 | #define STRONG_CAS(fcode, tsan_fcode) \ | |
217 | TRANSFORM (fcode, tsan_fcode, strong_cas, ERROR_MARK) | |
218 | #define BOOL_CAS(fcode, tsan_fcode) \ | |
219 | TRANSFORM (fcode, tsan_fcode, bool_cas, ERROR_MARK) | |
220 | #define VAL_CAS(fcode, tsan_fcode) \ | |
221 | TRANSFORM (fcode, tsan_fcode, val_cas, ERROR_MARK) | |
222 | #define LOCK_RELEASE(fcode, tsan_fcode) \ | |
223 | TRANSFORM (fcode, tsan_fcode, lock_release, ERROR_MARK) | |
224 | #define FETCH_OP(fcode, tsan_fcode, code) \ | |
225 | TRANSFORM (fcode, tsan_fcode, fetch_op, code) | |
226 | #define FETCH_OPS(fcode, tsan_fcode, code) \ | |
227 | TRANSFORM (fcode, tsan_fcode, fetch_op_seq_cst, code) | |
228 | ||
229 | CHECK_LAST (ATOMIC_LOAD_1, TSAN_ATOMIC8_LOAD), | |
230 | CHECK_LAST (ATOMIC_LOAD_2, TSAN_ATOMIC16_LOAD), | |
231 | CHECK_LAST (ATOMIC_LOAD_4, TSAN_ATOMIC32_LOAD), | |
232 | CHECK_LAST (ATOMIC_LOAD_8, TSAN_ATOMIC64_LOAD), | |
233 | CHECK_LAST (ATOMIC_LOAD_16, TSAN_ATOMIC128_LOAD), | |
234 | CHECK_LAST (ATOMIC_STORE_1, TSAN_ATOMIC8_STORE), | |
235 | CHECK_LAST (ATOMIC_STORE_2, TSAN_ATOMIC16_STORE), | |
236 | CHECK_LAST (ATOMIC_STORE_4, TSAN_ATOMIC32_STORE), | |
237 | CHECK_LAST (ATOMIC_STORE_8, TSAN_ATOMIC64_STORE), | |
238 | CHECK_LAST (ATOMIC_STORE_16, TSAN_ATOMIC128_STORE), | |
239 | CHECK_LAST (ATOMIC_EXCHANGE_1, TSAN_ATOMIC8_EXCHANGE), | |
240 | CHECK_LAST (ATOMIC_EXCHANGE_2, TSAN_ATOMIC16_EXCHANGE), | |
241 | CHECK_LAST (ATOMIC_EXCHANGE_4, TSAN_ATOMIC32_EXCHANGE), | |
242 | CHECK_LAST (ATOMIC_EXCHANGE_8, TSAN_ATOMIC64_EXCHANGE), | |
243 | CHECK_LAST (ATOMIC_EXCHANGE_16, TSAN_ATOMIC128_EXCHANGE), | |
244 | CHECK_LAST (ATOMIC_FETCH_ADD_1, TSAN_ATOMIC8_FETCH_ADD), | |
245 | CHECK_LAST (ATOMIC_FETCH_ADD_2, TSAN_ATOMIC16_FETCH_ADD), | |
246 | CHECK_LAST (ATOMIC_FETCH_ADD_4, TSAN_ATOMIC32_FETCH_ADD), | |
247 | CHECK_LAST (ATOMIC_FETCH_ADD_8, TSAN_ATOMIC64_FETCH_ADD), | |
248 | CHECK_LAST (ATOMIC_FETCH_ADD_16, TSAN_ATOMIC128_FETCH_ADD), | |
249 | CHECK_LAST (ATOMIC_FETCH_SUB_1, TSAN_ATOMIC8_FETCH_SUB), | |
250 | CHECK_LAST (ATOMIC_FETCH_SUB_2, TSAN_ATOMIC16_FETCH_SUB), | |
251 | CHECK_LAST (ATOMIC_FETCH_SUB_4, TSAN_ATOMIC32_FETCH_SUB), | |
252 | CHECK_LAST (ATOMIC_FETCH_SUB_8, TSAN_ATOMIC64_FETCH_SUB), | |
253 | CHECK_LAST (ATOMIC_FETCH_SUB_16, TSAN_ATOMIC128_FETCH_SUB), | |
254 | CHECK_LAST (ATOMIC_FETCH_AND_1, TSAN_ATOMIC8_FETCH_AND), | |
255 | CHECK_LAST (ATOMIC_FETCH_AND_2, TSAN_ATOMIC16_FETCH_AND), | |
256 | CHECK_LAST (ATOMIC_FETCH_AND_4, TSAN_ATOMIC32_FETCH_AND), | |
257 | CHECK_LAST (ATOMIC_FETCH_AND_8, TSAN_ATOMIC64_FETCH_AND), | |
258 | CHECK_LAST (ATOMIC_FETCH_AND_16, TSAN_ATOMIC128_FETCH_AND), | |
259 | CHECK_LAST (ATOMIC_FETCH_OR_1, TSAN_ATOMIC8_FETCH_OR), | |
260 | CHECK_LAST (ATOMIC_FETCH_OR_2, TSAN_ATOMIC16_FETCH_OR), | |
261 | CHECK_LAST (ATOMIC_FETCH_OR_4, TSAN_ATOMIC32_FETCH_OR), | |
262 | CHECK_LAST (ATOMIC_FETCH_OR_8, TSAN_ATOMIC64_FETCH_OR), | |
263 | CHECK_LAST (ATOMIC_FETCH_OR_16, TSAN_ATOMIC128_FETCH_OR), | |
264 | CHECK_LAST (ATOMIC_FETCH_XOR_1, TSAN_ATOMIC8_FETCH_XOR), | |
265 | CHECK_LAST (ATOMIC_FETCH_XOR_2, TSAN_ATOMIC16_FETCH_XOR), | |
266 | CHECK_LAST (ATOMIC_FETCH_XOR_4, TSAN_ATOMIC32_FETCH_XOR), | |
267 | CHECK_LAST (ATOMIC_FETCH_XOR_8, TSAN_ATOMIC64_FETCH_XOR), | |
268 | CHECK_LAST (ATOMIC_FETCH_XOR_16, TSAN_ATOMIC128_FETCH_XOR), | |
269 | CHECK_LAST (ATOMIC_FETCH_NAND_1, TSAN_ATOMIC8_FETCH_NAND), | |
270 | CHECK_LAST (ATOMIC_FETCH_NAND_2, TSAN_ATOMIC16_FETCH_NAND), | |
271 | CHECK_LAST (ATOMIC_FETCH_NAND_4, TSAN_ATOMIC32_FETCH_NAND), | |
272 | CHECK_LAST (ATOMIC_FETCH_NAND_8, TSAN_ATOMIC64_FETCH_NAND), | |
273 | CHECK_LAST (ATOMIC_FETCH_NAND_16, TSAN_ATOMIC128_FETCH_NAND), | |
274 | ||
275 | CHECK_LAST (ATOMIC_THREAD_FENCE, TSAN_ATOMIC_THREAD_FENCE), | |
276 | CHECK_LAST (ATOMIC_SIGNAL_FENCE, TSAN_ATOMIC_SIGNAL_FENCE), | |
277 | ||
278 | FETCH_OP (ATOMIC_ADD_FETCH_1, TSAN_ATOMIC8_FETCH_ADD, PLUS_EXPR), | |
279 | FETCH_OP (ATOMIC_ADD_FETCH_2, TSAN_ATOMIC16_FETCH_ADD, PLUS_EXPR), | |
280 | FETCH_OP (ATOMIC_ADD_FETCH_4, TSAN_ATOMIC32_FETCH_ADD, PLUS_EXPR), | |
281 | FETCH_OP (ATOMIC_ADD_FETCH_8, TSAN_ATOMIC64_FETCH_ADD, PLUS_EXPR), | |
282 | FETCH_OP (ATOMIC_ADD_FETCH_16, TSAN_ATOMIC128_FETCH_ADD, PLUS_EXPR), | |
283 | FETCH_OP (ATOMIC_SUB_FETCH_1, TSAN_ATOMIC8_FETCH_SUB, MINUS_EXPR), | |
284 | FETCH_OP (ATOMIC_SUB_FETCH_2, TSAN_ATOMIC16_FETCH_SUB, MINUS_EXPR), | |
285 | FETCH_OP (ATOMIC_SUB_FETCH_4, TSAN_ATOMIC32_FETCH_SUB, MINUS_EXPR), | |
286 | FETCH_OP (ATOMIC_SUB_FETCH_8, TSAN_ATOMIC64_FETCH_SUB, MINUS_EXPR), | |
287 | FETCH_OP (ATOMIC_SUB_FETCH_16, TSAN_ATOMIC128_FETCH_SUB, MINUS_EXPR), | |
288 | FETCH_OP (ATOMIC_AND_FETCH_1, TSAN_ATOMIC8_FETCH_AND, BIT_AND_EXPR), | |
289 | FETCH_OP (ATOMIC_AND_FETCH_2, TSAN_ATOMIC16_FETCH_AND, BIT_AND_EXPR), | |
290 | FETCH_OP (ATOMIC_AND_FETCH_4, TSAN_ATOMIC32_FETCH_AND, BIT_AND_EXPR), | |
291 | FETCH_OP (ATOMIC_AND_FETCH_8, TSAN_ATOMIC64_FETCH_AND, BIT_AND_EXPR), | |
292 | FETCH_OP (ATOMIC_AND_FETCH_16, TSAN_ATOMIC128_FETCH_AND, BIT_AND_EXPR), | |
293 | FETCH_OP (ATOMIC_OR_FETCH_1, TSAN_ATOMIC8_FETCH_OR, BIT_IOR_EXPR), | |
294 | FETCH_OP (ATOMIC_OR_FETCH_2, TSAN_ATOMIC16_FETCH_OR, BIT_IOR_EXPR), | |
295 | FETCH_OP (ATOMIC_OR_FETCH_4, TSAN_ATOMIC32_FETCH_OR, BIT_IOR_EXPR), | |
296 | FETCH_OP (ATOMIC_OR_FETCH_8, TSAN_ATOMIC64_FETCH_OR, BIT_IOR_EXPR), | |
297 | FETCH_OP (ATOMIC_OR_FETCH_16, TSAN_ATOMIC128_FETCH_OR, BIT_IOR_EXPR), | |
298 | FETCH_OP (ATOMIC_XOR_FETCH_1, TSAN_ATOMIC8_FETCH_XOR, BIT_XOR_EXPR), | |
299 | FETCH_OP (ATOMIC_XOR_FETCH_2, TSAN_ATOMIC16_FETCH_XOR, BIT_XOR_EXPR), | |
300 | FETCH_OP (ATOMIC_XOR_FETCH_4, TSAN_ATOMIC32_FETCH_XOR, BIT_XOR_EXPR), | |
301 | FETCH_OP (ATOMIC_XOR_FETCH_8, TSAN_ATOMIC64_FETCH_XOR, BIT_XOR_EXPR), | |
302 | FETCH_OP (ATOMIC_XOR_FETCH_16, TSAN_ATOMIC128_FETCH_XOR, BIT_XOR_EXPR), | |
303 | FETCH_OP (ATOMIC_NAND_FETCH_1, TSAN_ATOMIC8_FETCH_NAND, BIT_NOT_EXPR), | |
304 | FETCH_OP (ATOMIC_NAND_FETCH_2, TSAN_ATOMIC16_FETCH_NAND, BIT_NOT_EXPR), | |
305 | FETCH_OP (ATOMIC_NAND_FETCH_4, TSAN_ATOMIC32_FETCH_NAND, BIT_NOT_EXPR), | |
306 | FETCH_OP (ATOMIC_NAND_FETCH_8, TSAN_ATOMIC64_FETCH_NAND, BIT_NOT_EXPR), | |
307 | FETCH_OP (ATOMIC_NAND_FETCH_16, TSAN_ATOMIC128_FETCH_NAND, BIT_NOT_EXPR), | |
308 | ||
309 | ADD_ACQUIRE (SYNC_LOCK_TEST_AND_SET_1, TSAN_ATOMIC8_EXCHANGE), | |
310 | ADD_ACQUIRE (SYNC_LOCK_TEST_AND_SET_2, TSAN_ATOMIC16_EXCHANGE), | |
311 | ADD_ACQUIRE (SYNC_LOCK_TEST_AND_SET_4, TSAN_ATOMIC32_EXCHANGE), | |
312 | ADD_ACQUIRE (SYNC_LOCK_TEST_AND_SET_8, TSAN_ATOMIC64_EXCHANGE), | |
313 | ADD_ACQUIRE (SYNC_LOCK_TEST_AND_SET_16, TSAN_ATOMIC128_EXCHANGE), | |
314 | ||
315 | ADD_SEQ_CST (SYNC_FETCH_AND_ADD_1, TSAN_ATOMIC8_FETCH_ADD), | |
316 | ADD_SEQ_CST (SYNC_FETCH_AND_ADD_2, TSAN_ATOMIC16_FETCH_ADD), | |
317 | ADD_SEQ_CST (SYNC_FETCH_AND_ADD_4, TSAN_ATOMIC32_FETCH_ADD), | |
318 | ADD_SEQ_CST (SYNC_FETCH_AND_ADD_8, TSAN_ATOMIC64_FETCH_ADD), | |
319 | ADD_SEQ_CST (SYNC_FETCH_AND_ADD_16, TSAN_ATOMIC128_FETCH_ADD), | |
320 | ADD_SEQ_CST (SYNC_FETCH_AND_SUB_1, TSAN_ATOMIC8_FETCH_SUB), | |
321 | ADD_SEQ_CST (SYNC_FETCH_AND_SUB_2, TSAN_ATOMIC16_FETCH_SUB), | |
322 | ADD_SEQ_CST (SYNC_FETCH_AND_SUB_4, TSAN_ATOMIC32_FETCH_SUB), | |
323 | ADD_SEQ_CST (SYNC_FETCH_AND_SUB_8, TSAN_ATOMIC64_FETCH_SUB), | |
324 | ADD_SEQ_CST (SYNC_FETCH_AND_SUB_16, TSAN_ATOMIC128_FETCH_SUB), | |
325 | ADD_SEQ_CST (SYNC_FETCH_AND_AND_1, TSAN_ATOMIC8_FETCH_AND), | |
326 | ADD_SEQ_CST (SYNC_FETCH_AND_AND_2, TSAN_ATOMIC16_FETCH_AND), | |
327 | ADD_SEQ_CST (SYNC_FETCH_AND_AND_4, TSAN_ATOMIC32_FETCH_AND), | |
328 | ADD_SEQ_CST (SYNC_FETCH_AND_AND_8, TSAN_ATOMIC64_FETCH_AND), | |
329 | ADD_SEQ_CST (SYNC_FETCH_AND_AND_16, TSAN_ATOMIC128_FETCH_AND), | |
330 | ADD_SEQ_CST (SYNC_FETCH_AND_OR_1, TSAN_ATOMIC8_FETCH_OR), | |
331 | ADD_SEQ_CST (SYNC_FETCH_AND_OR_2, TSAN_ATOMIC16_FETCH_OR), | |
332 | ADD_SEQ_CST (SYNC_FETCH_AND_OR_4, TSAN_ATOMIC32_FETCH_OR), | |
333 | ADD_SEQ_CST (SYNC_FETCH_AND_OR_8, TSAN_ATOMIC64_FETCH_OR), | |
334 | ADD_SEQ_CST (SYNC_FETCH_AND_OR_16, TSAN_ATOMIC128_FETCH_OR), | |
335 | ADD_SEQ_CST (SYNC_FETCH_AND_XOR_1, TSAN_ATOMIC8_FETCH_XOR), | |
336 | ADD_SEQ_CST (SYNC_FETCH_AND_XOR_2, TSAN_ATOMIC16_FETCH_XOR), | |
337 | ADD_SEQ_CST (SYNC_FETCH_AND_XOR_4, TSAN_ATOMIC32_FETCH_XOR), | |
338 | ADD_SEQ_CST (SYNC_FETCH_AND_XOR_8, TSAN_ATOMIC64_FETCH_XOR), | |
339 | ADD_SEQ_CST (SYNC_FETCH_AND_XOR_16, TSAN_ATOMIC128_FETCH_XOR), | |
340 | ADD_SEQ_CST (SYNC_FETCH_AND_NAND_1, TSAN_ATOMIC8_FETCH_NAND), | |
341 | ADD_SEQ_CST (SYNC_FETCH_AND_NAND_2, TSAN_ATOMIC16_FETCH_NAND), | |
342 | ADD_SEQ_CST (SYNC_FETCH_AND_NAND_4, TSAN_ATOMIC32_FETCH_NAND), | |
343 | ADD_SEQ_CST (SYNC_FETCH_AND_NAND_8, TSAN_ATOMIC64_FETCH_NAND), | |
344 | ADD_SEQ_CST (SYNC_FETCH_AND_NAND_16, TSAN_ATOMIC128_FETCH_NAND), | |
345 | ||
346 | ADD_SEQ_CST (SYNC_SYNCHRONIZE, TSAN_ATOMIC_THREAD_FENCE), | |
347 | ||
348 | FETCH_OPS (SYNC_ADD_AND_FETCH_1, TSAN_ATOMIC8_FETCH_ADD, PLUS_EXPR), | |
349 | FETCH_OPS (SYNC_ADD_AND_FETCH_2, TSAN_ATOMIC16_FETCH_ADD, PLUS_EXPR), | |
350 | FETCH_OPS (SYNC_ADD_AND_FETCH_4, TSAN_ATOMIC32_FETCH_ADD, PLUS_EXPR), | |
351 | FETCH_OPS (SYNC_ADD_AND_FETCH_8, TSAN_ATOMIC64_FETCH_ADD, PLUS_EXPR), | |
352 | FETCH_OPS (SYNC_ADD_AND_FETCH_16, TSAN_ATOMIC128_FETCH_ADD, PLUS_EXPR), | |
353 | FETCH_OPS (SYNC_SUB_AND_FETCH_1, TSAN_ATOMIC8_FETCH_SUB, MINUS_EXPR), | |
354 | FETCH_OPS (SYNC_SUB_AND_FETCH_2, TSAN_ATOMIC16_FETCH_SUB, MINUS_EXPR), | |
355 | FETCH_OPS (SYNC_SUB_AND_FETCH_4, TSAN_ATOMIC32_FETCH_SUB, MINUS_EXPR), | |
356 | FETCH_OPS (SYNC_SUB_AND_FETCH_8, TSAN_ATOMIC64_FETCH_SUB, MINUS_EXPR), | |
357 | FETCH_OPS (SYNC_SUB_AND_FETCH_16, TSAN_ATOMIC128_FETCH_SUB, MINUS_EXPR), | |
358 | FETCH_OPS (SYNC_AND_AND_FETCH_1, TSAN_ATOMIC8_FETCH_AND, BIT_AND_EXPR), | |
359 | FETCH_OPS (SYNC_AND_AND_FETCH_2, TSAN_ATOMIC16_FETCH_AND, BIT_AND_EXPR), | |
360 | FETCH_OPS (SYNC_AND_AND_FETCH_4, TSAN_ATOMIC32_FETCH_AND, BIT_AND_EXPR), | |
361 | FETCH_OPS (SYNC_AND_AND_FETCH_8, TSAN_ATOMIC64_FETCH_AND, BIT_AND_EXPR), | |
362 | FETCH_OPS (SYNC_AND_AND_FETCH_16, TSAN_ATOMIC128_FETCH_AND, BIT_AND_EXPR), | |
363 | FETCH_OPS (SYNC_OR_AND_FETCH_1, TSAN_ATOMIC8_FETCH_OR, BIT_IOR_EXPR), | |
364 | FETCH_OPS (SYNC_OR_AND_FETCH_2, TSAN_ATOMIC16_FETCH_OR, BIT_IOR_EXPR), | |
365 | FETCH_OPS (SYNC_OR_AND_FETCH_4, TSAN_ATOMIC32_FETCH_OR, BIT_IOR_EXPR), | |
366 | FETCH_OPS (SYNC_OR_AND_FETCH_8, TSAN_ATOMIC64_FETCH_OR, BIT_IOR_EXPR), | |
367 | FETCH_OPS (SYNC_OR_AND_FETCH_16, TSAN_ATOMIC128_FETCH_OR, BIT_IOR_EXPR), | |
368 | FETCH_OPS (SYNC_XOR_AND_FETCH_1, TSAN_ATOMIC8_FETCH_XOR, BIT_XOR_EXPR), | |
369 | FETCH_OPS (SYNC_XOR_AND_FETCH_2, TSAN_ATOMIC16_FETCH_XOR, BIT_XOR_EXPR), | |
370 | FETCH_OPS (SYNC_XOR_AND_FETCH_4, TSAN_ATOMIC32_FETCH_XOR, BIT_XOR_EXPR), | |
371 | FETCH_OPS (SYNC_XOR_AND_FETCH_8, TSAN_ATOMIC64_FETCH_XOR, BIT_XOR_EXPR), | |
372 | FETCH_OPS (SYNC_XOR_AND_FETCH_16, TSAN_ATOMIC128_FETCH_XOR, BIT_XOR_EXPR), | |
373 | FETCH_OPS (SYNC_NAND_AND_FETCH_1, TSAN_ATOMIC8_FETCH_NAND, BIT_NOT_EXPR), | |
374 | FETCH_OPS (SYNC_NAND_AND_FETCH_2, TSAN_ATOMIC16_FETCH_NAND, BIT_NOT_EXPR), | |
375 | FETCH_OPS (SYNC_NAND_AND_FETCH_4, TSAN_ATOMIC32_FETCH_NAND, BIT_NOT_EXPR), | |
376 | FETCH_OPS (SYNC_NAND_AND_FETCH_8, TSAN_ATOMIC64_FETCH_NAND, BIT_NOT_EXPR), | |
377 | FETCH_OPS (SYNC_NAND_AND_FETCH_16, TSAN_ATOMIC128_FETCH_NAND, BIT_NOT_EXPR), | |
378 | ||
379 | WEAK_CAS (ATOMIC_COMPARE_EXCHANGE_1, TSAN_ATOMIC8_COMPARE_EXCHANGE_WEAK), | |
380 | WEAK_CAS (ATOMIC_COMPARE_EXCHANGE_2, TSAN_ATOMIC16_COMPARE_EXCHANGE_WEAK), | |
381 | WEAK_CAS (ATOMIC_COMPARE_EXCHANGE_4, TSAN_ATOMIC32_COMPARE_EXCHANGE_WEAK), | |
382 | WEAK_CAS (ATOMIC_COMPARE_EXCHANGE_8, TSAN_ATOMIC64_COMPARE_EXCHANGE_WEAK), | |
383 | WEAK_CAS (ATOMIC_COMPARE_EXCHANGE_16, TSAN_ATOMIC128_COMPARE_EXCHANGE_WEAK), | |
384 | ||
385 | STRONG_CAS (ATOMIC_COMPARE_EXCHANGE_1, TSAN_ATOMIC8_COMPARE_EXCHANGE_STRONG), | |
386 | STRONG_CAS (ATOMIC_COMPARE_EXCHANGE_2, | |
387 | TSAN_ATOMIC16_COMPARE_EXCHANGE_STRONG), | |
388 | STRONG_CAS (ATOMIC_COMPARE_EXCHANGE_4, | |
389 | TSAN_ATOMIC32_COMPARE_EXCHANGE_STRONG), | |
390 | STRONG_CAS (ATOMIC_COMPARE_EXCHANGE_8, | |
391 | TSAN_ATOMIC64_COMPARE_EXCHANGE_STRONG), | |
392 | STRONG_CAS (ATOMIC_COMPARE_EXCHANGE_16, | |
393 | TSAN_ATOMIC128_COMPARE_EXCHANGE_STRONG), | |
394 | ||
395 | BOOL_CAS (SYNC_BOOL_COMPARE_AND_SWAP_1, | |
396 | TSAN_ATOMIC8_COMPARE_EXCHANGE_STRONG), | |
397 | BOOL_CAS (SYNC_BOOL_COMPARE_AND_SWAP_2, | |
398 | TSAN_ATOMIC16_COMPARE_EXCHANGE_STRONG), | |
399 | BOOL_CAS (SYNC_BOOL_COMPARE_AND_SWAP_4, | |
400 | TSAN_ATOMIC32_COMPARE_EXCHANGE_STRONG), | |
401 | BOOL_CAS (SYNC_BOOL_COMPARE_AND_SWAP_8, | |
402 | TSAN_ATOMIC64_COMPARE_EXCHANGE_STRONG), | |
403 | BOOL_CAS (SYNC_BOOL_COMPARE_AND_SWAP_16, | |
404 | TSAN_ATOMIC128_COMPARE_EXCHANGE_STRONG), | |
405 | ||
406 | VAL_CAS (SYNC_VAL_COMPARE_AND_SWAP_1, TSAN_ATOMIC8_COMPARE_EXCHANGE_STRONG), | |
407 | VAL_CAS (SYNC_VAL_COMPARE_AND_SWAP_2, TSAN_ATOMIC16_COMPARE_EXCHANGE_STRONG), | |
408 | VAL_CAS (SYNC_VAL_COMPARE_AND_SWAP_4, TSAN_ATOMIC32_COMPARE_EXCHANGE_STRONG), | |
409 | VAL_CAS (SYNC_VAL_COMPARE_AND_SWAP_8, TSAN_ATOMIC64_COMPARE_EXCHANGE_STRONG), | |
410 | VAL_CAS (SYNC_VAL_COMPARE_AND_SWAP_16, | |
411 | TSAN_ATOMIC128_COMPARE_EXCHANGE_STRONG), | |
412 | ||
413 | LOCK_RELEASE (SYNC_LOCK_RELEASE_1, TSAN_ATOMIC8_STORE), | |
414 | LOCK_RELEASE (SYNC_LOCK_RELEASE_2, TSAN_ATOMIC16_STORE), | |
415 | LOCK_RELEASE (SYNC_LOCK_RELEASE_4, TSAN_ATOMIC32_STORE), | |
416 | LOCK_RELEASE (SYNC_LOCK_RELEASE_8, TSAN_ATOMIC64_STORE), | |
417 | LOCK_RELEASE (SYNC_LOCK_RELEASE_16, TSAN_ATOMIC128_STORE) | |
418 | }; | |
419 | ||
420 | /* Instrument an atomic builtin. */ | |
421 | ||
422 | static void | |
423 | instrument_builtin_call (gimple_stmt_iterator *gsi) | |
424 | { | |
425 | gimple stmt = gsi_stmt (*gsi), g; | |
426 | tree callee = gimple_call_fndecl (stmt), last_arg, args[6], t, lhs; | |
427 | enum built_in_function fcode = DECL_FUNCTION_CODE (callee); | |
428 | unsigned int i, num = gimple_call_num_args (stmt), j; | |
429 | for (j = 0; j < 6 && j < num; j++) | |
430 | args[j] = gimple_call_arg (stmt, j); | |
431 | for (i = 0; i < ARRAY_SIZE (tsan_atomic_table); i++) | |
432 | if (fcode != tsan_atomic_table[i].fcode) | |
433 | continue; | |
434 | else | |
435 | { | |
436 | tree decl = builtin_decl_implicit (tsan_atomic_table[i].tsan_fcode); | |
437 | if (decl == NULL_TREE) | |
438 | return; | |
439 | switch (tsan_atomic_table[i].action) | |
440 | { | |
441 | case check_last: | |
442 | case fetch_op: | |
443 | last_arg = gimple_call_arg (stmt, num - 1); | |
444 | if (!host_integerp (last_arg, 1) | |
445 | || (unsigned HOST_WIDE_INT) tree_low_cst (last_arg, 1) | |
446 | > MEMMODEL_SEQ_CST) | |
447 | return; | |
448 | gimple_call_set_fndecl (stmt, decl); | |
449 | update_stmt (stmt); | |
450 | if (tsan_atomic_table[i].action == fetch_op) | |
451 | { | |
452 | args[1] = gimple_call_arg (stmt, 1); | |
453 | goto adjust_result; | |
454 | } | |
455 | return; | |
456 | case add_seq_cst: | |
457 | case add_acquire: | |
458 | case fetch_op_seq_cst: | |
459 | gcc_assert (num <= 2); | |
460 | for (j = 0; j < num; j++) | |
461 | args[j] = gimple_call_arg (stmt, j); | |
462 | for (; j < 2; j++) | |
463 | args[j] = NULL_TREE; | |
464 | args[num] = build_int_cst (NULL_TREE, | |
465 | tsan_atomic_table[i].action | |
466 | != add_acquire | |
467 | ? MEMMODEL_SEQ_CST | |
468 | : MEMMODEL_ACQUIRE); | |
469 | update_gimple_call (gsi, decl, num + 1, args[0], args[1], args[2]); | |
470 | stmt = gsi_stmt (*gsi); | |
471 | if (tsan_atomic_table[i].action == fetch_op_seq_cst) | |
472 | { | |
473 | adjust_result: | |
474 | lhs = gimple_call_lhs (stmt); | |
475 | if (lhs == NULL_TREE) | |
476 | return; | |
477 | if (!useless_type_conversion_p (TREE_TYPE (lhs), | |
478 | TREE_TYPE (args[1]))) | |
479 | { | |
480 | tree var = make_ssa_name (TREE_TYPE (lhs), NULL); | |
481 | g = gimple_build_assign_with_ops (NOP_EXPR, var, | |
482 | args[1], NULL_TREE); | |
483 | gsi_insert_after (gsi, g, GSI_NEW_STMT); | |
484 | args[1] = var; | |
485 | } | |
486 | gimple_call_set_lhs (stmt, | |
487 | make_ssa_name (TREE_TYPE (lhs), NULL)); | |
488 | /* BIT_NOT_EXPR stands for NAND. */ | |
489 | if (tsan_atomic_table[i].code == BIT_NOT_EXPR) | |
490 | { | |
491 | tree var = make_ssa_name (TREE_TYPE (lhs), NULL); | |
492 | g = gimple_build_assign_with_ops (BIT_AND_EXPR, var, | |
493 | gimple_call_lhs (stmt), | |
494 | args[1]); | |
495 | gsi_insert_after (gsi, g, GSI_NEW_STMT); | |
496 | g = gimple_build_assign_with_ops (BIT_NOT_EXPR, lhs, var, | |
497 | NULL_TREE); | |
498 | } | |
499 | else | |
500 | g = gimple_build_assign_with_ops (tsan_atomic_table[i].code, | |
501 | lhs, | |
502 | gimple_call_lhs (stmt), | |
503 | args[1]); | |
504 | update_stmt (stmt); | |
505 | gsi_insert_after (gsi, g, GSI_NEW_STMT); | |
506 | } | |
507 | return; | |
508 | case weak_cas: | |
509 | if (!integer_nonzerop (gimple_call_arg (stmt, 3))) | |
510 | continue; | |
511 | /* FALLTHRU */ | |
512 | case strong_cas: | |
513 | gcc_assert (num == 6); | |
514 | for (j = 0; j < 6; j++) | |
515 | args[j] = gimple_call_arg (stmt, j); | |
516 | if (!host_integerp (args[4], 1) | |
517 | || (unsigned HOST_WIDE_INT) tree_low_cst (args[4], 1) | |
518 | > MEMMODEL_SEQ_CST) | |
519 | return; | |
520 | if (!host_integerp (args[5], 1) | |
521 | || (unsigned HOST_WIDE_INT) tree_low_cst (args[5], 1) | |
522 | > MEMMODEL_SEQ_CST) | |
523 | return; | |
524 | update_gimple_call (gsi, decl, 5, args[0], args[1], args[2], | |
525 | args[4], args[5]); | |
526 | return; | |
527 | case bool_cas: | |
528 | case val_cas: | |
529 | gcc_assert (num == 3); | |
530 | for (j = 0; j < 3; j++) | |
531 | args[j] = gimple_call_arg (stmt, j); | |
532 | t = TYPE_ARG_TYPES (TREE_TYPE (decl)); | |
533 | t = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (t))); | |
534 | t = create_tmp_var (t, NULL); | |
535 | mark_addressable (t); | |
536 | if (!useless_type_conversion_p (TREE_TYPE (t), | |
537 | TREE_TYPE (args[1]))) | |
538 | { | |
539 | g = gimple_build_assign_with_ops (NOP_EXPR, | |
540 | make_ssa_name (TREE_TYPE (t), | |
541 | NULL), | |
542 | args[1], NULL_TREE); | |
543 | gsi_insert_before (gsi, g, GSI_SAME_STMT); | |
544 | args[1] = gimple_assign_lhs (g); | |
545 | } | |
546 | g = gimple_build_assign (t, args[1]); | |
547 | gsi_insert_before (gsi, g, GSI_SAME_STMT); | |
548 | lhs = gimple_call_lhs (stmt); | |
549 | update_gimple_call (gsi, decl, 5, args[0], | |
550 | build_fold_addr_expr (t), args[2], | |
551 | build_int_cst (NULL_TREE, | |
552 | MEMMODEL_SEQ_CST), | |
553 | build_int_cst (NULL_TREE, | |
554 | MEMMODEL_SEQ_CST)); | |
555 | if (tsan_atomic_table[i].action == val_cas && lhs) | |
556 | { | |
557 | tree cond; | |
558 | stmt = gsi_stmt (*gsi); | |
559 | g = gimple_build_assign (make_ssa_name (TREE_TYPE (t), NULL), | |
560 | t); | |
561 | gsi_insert_after (gsi, g, GSI_NEW_STMT); | |
562 | t = make_ssa_name (TREE_TYPE (TREE_TYPE (decl)), stmt); | |
563 | cond = build2 (NE_EXPR, boolean_type_node, t, | |
564 | build_int_cst (TREE_TYPE (t), 0)); | |
565 | g = gimple_build_assign_with_ops (COND_EXPR, lhs, cond, | |
566 | args[1], | |
567 | gimple_assign_lhs (g)); | |
568 | gimple_call_set_lhs (stmt, t); | |
569 | update_stmt (stmt); | |
570 | gsi_insert_after (gsi, g, GSI_NEW_STMT); | |
571 | } | |
572 | return; | |
573 | case lock_release: | |
574 | gcc_assert (num == 1); | |
575 | t = TYPE_ARG_TYPES (TREE_TYPE (decl)); | |
576 | t = TREE_VALUE (TREE_CHAIN (t)); | |
577 | update_gimple_call (gsi, decl, 3, gimple_call_arg (stmt, 0), | |
578 | build_int_cst (t, 0), | |
579 | build_int_cst (NULL_TREE, | |
580 | MEMMODEL_RELEASE)); | |
581 | return; | |
582 | default: | |
583 | continue; | |
584 | } | |
585 | } | |
586 | } | |
587 | ||
b077695d | 588 | /* Instruments the gimple pointed to by GSI. Return |
a4641938 | 589 | true if func entry/exit should be instrumented. */ |
b077695d | 590 | |
591 | static bool | |
83392e87 | 592 | instrument_gimple (gimple_stmt_iterator *gsi) |
b077695d | 593 | { |
594 | gimple stmt; | |
595 | tree rhs, lhs; | |
596 | bool instrumented = false; | |
597 | ||
83392e87 | 598 | stmt = gsi_stmt (*gsi); |
b077695d | 599 | if (is_gimple_call (stmt) |
600 | && (gimple_call_fndecl (stmt) | |
601 | != builtin_decl_implicit (BUILT_IN_TSAN_INIT))) | |
83392e87 | 602 | { |
603 | if (is_gimple_builtin_call (stmt)) | |
604 | instrument_builtin_call (gsi); | |
605 | return true; | |
606 | } | |
a4641938 | 607 | else if (is_gimple_assign (stmt) |
608 | && !gimple_clobber_p (stmt)) | |
b077695d | 609 | { |
610 | if (gimple_store_p (stmt)) | |
611 | { | |
612 | lhs = gimple_assign_lhs (stmt); | |
83392e87 | 613 | instrumented = instrument_expr (*gsi, lhs, true); |
b077695d | 614 | } |
615 | if (gimple_assign_load_p (stmt)) | |
616 | { | |
617 | rhs = gimple_assign_rhs1 (stmt); | |
83392e87 | 618 | instrumented = instrument_expr (*gsi, rhs, false); |
b077695d | 619 | } |
620 | } | |
621 | return instrumented; | |
622 | } | |
623 | ||
624 | /* Instruments all interesting memory accesses in the current function. | |
a4641938 | 625 | Return true if func entry/exit should be instrumented. */ |
b077695d | 626 | |
627 | static bool | |
628 | instrument_memory_accesses (void) | |
629 | { | |
630 | basic_block bb; | |
631 | gimple_stmt_iterator gsi; | |
632 | bool fentry_exit_instrument = false; | |
633 | ||
634 | FOR_EACH_BB (bb) | |
635 | for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi)) | |
83392e87 | 636 | fentry_exit_instrument |= instrument_gimple (&gsi); |
b077695d | 637 | return fentry_exit_instrument; |
638 | } | |
639 | ||
640 | /* Instruments function entry. */ | |
641 | ||
642 | static void | |
643 | instrument_func_entry (void) | |
644 | { | |
645 | basic_block succ_bb; | |
646 | gimple_stmt_iterator gsi; | |
647 | tree ret_addr, builtin_decl; | |
648 | gimple g; | |
649 | ||
650 | succ_bb = single_succ (ENTRY_BLOCK_PTR); | |
651 | gsi = gsi_after_labels (succ_bb); | |
652 | ||
653 | builtin_decl = builtin_decl_implicit (BUILT_IN_RETURN_ADDRESS); | |
654 | g = gimple_build_call (builtin_decl, 1, integer_zero_node); | |
655 | ret_addr = make_ssa_name (ptr_type_node, NULL); | |
656 | gimple_call_set_lhs (g, ret_addr); | |
657 | gimple_set_location (g, cfun->function_start_locus); | |
658 | gsi_insert_before (&gsi, g, GSI_SAME_STMT); | |
659 | ||
660 | builtin_decl = builtin_decl_implicit (BUILT_IN_TSAN_FUNC_ENTRY); | |
661 | g = gimple_build_call (builtin_decl, 1, ret_addr); | |
662 | gimple_set_location (g, cfun->function_start_locus); | |
663 | gsi_insert_before (&gsi, g, GSI_SAME_STMT); | |
664 | } | |
665 | ||
666 | /* Instruments function exits. */ | |
667 | ||
668 | static void | |
669 | instrument_func_exit (void) | |
670 | { | |
671 | location_t loc; | |
672 | basic_block exit_bb; | |
673 | gimple_stmt_iterator gsi; | |
674 | gimple stmt, g; | |
675 | tree builtin_decl; | |
676 | edge e; | |
677 | edge_iterator ei; | |
678 | ||
679 | /* Find all function exits. */ | |
680 | exit_bb = EXIT_BLOCK_PTR; | |
681 | FOR_EACH_EDGE (e, ei, exit_bb->preds) | |
682 | { | |
683 | gsi = gsi_last_bb (e->src); | |
684 | stmt = gsi_stmt (gsi); | |
ef951254 | 685 | gcc_assert (gimple_code (stmt) == GIMPLE_RETURN |
686 | || gimple_call_builtin_p (stmt, BUILT_IN_RETURN)); | |
b077695d | 687 | loc = gimple_location (stmt); |
688 | builtin_decl = builtin_decl_implicit (BUILT_IN_TSAN_FUNC_EXIT); | |
689 | g = gimple_build_call (builtin_decl, 0); | |
690 | gimple_set_location (g, loc); | |
691 | gsi_insert_before (&gsi, g, GSI_SAME_STMT); | |
692 | } | |
693 | } | |
694 | ||
695 | /* ThreadSanitizer instrumentation pass. */ | |
696 | ||
697 | static unsigned | |
698 | tsan_pass (void) | |
699 | { | |
b45e34ed | 700 | initialize_sanitizer_builtins (); |
b077695d | 701 | if (instrument_memory_accesses ()) |
702 | { | |
703 | instrument_func_entry (); | |
704 | instrument_func_exit (); | |
705 | } | |
706 | return 0; | |
707 | } | |
708 | ||
709 | /* The pass's gate. */ | |
710 | ||
711 | static bool | |
712 | tsan_gate (void) | |
713 | { | |
b45e34ed | 714 | return flag_tsan != 0; |
b077695d | 715 | } |
716 | ||
717 | /* Inserts __tsan_init () into the list of CTORs. */ | |
718 | ||
719 | void | |
720 | tsan_finish_file (void) | |
721 | { | |
b45e34ed | 722 | tree ctor_statements = NULL_TREE; |
b077695d | 723 | |
b45e34ed | 724 | initialize_sanitizer_builtins (); |
725 | tree init_decl = builtin_decl_implicit (BUILT_IN_TSAN_INIT); | |
b077695d | 726 | append_to_statement_list (build_call_expr (init_decl, 0), |
727 | &ctor_statements); | |
728 | cgraph_build_static_cdtor ('I', ctor_statements, | |
729 | MAX_RESERVED_INIT_PRIORITY - 1); | |
730 | } | |
731 | ||
732 | /* The pass descriptor. */ | |
733 | ||
734 | struct gimple_opt_pass pass_tsan = | |
735 | { | |
736 | { | |
737 | GIMPLE_PASS, | |
738 | "tsan", /* name */ | |
739 | OPTGROUP_NONE, /* optinfo_flags */ | |
740 | tsan_gate, /* gate */ | |
741 | tsan_pass, /* execute */ | |
742 | NULL, /* sub */ | |
743 | NULL, /* next */ | |
744 | 0, /* static_pass_number */ | |
745 | TV_NONE, /* tv_id */ | |
746 | PROP_ssa | PROP_cfg, /* properties_required */ | |
747 | 0, /* properties_provided */ | |
748 | 0, /* properties_destroyed */ | |
749 | 0, /* todo_flags_start */ | |
a4641938 | 750 | TODO_verify_all | TODO_update_ssa /* todo_flags_finish */ |
b077695d | 751 | } |
752 | }; | |
753 | ||
754 | static bool | |
755 | tsan_gate_O0 (void) | |
756 | { | |
b45e34ed | 757 | return flag_tsan != 0 && !optimize; |
b077695d | 758 | } |
759 | ||
760 | struct gimple_opt_pass pass_tsan_O0 = | |
761 | { | |
762 | { | |
763 | GIMPLE_PASS, | |
764 | "tsan0", /* name */ | |
765 | OPTGROUP_NONE, /* optinfo_flags */ | |
766 | tsan_gate_O0, /* gate */ | |
767 | tsan_pass, /* execute */ | |
768 | NULL, /* sub */ | |
769 | NULL, /* next */ | |
770 | 0, /* static_pass_number */ | |
771 | TV_NONE, /* tv_id */ | |
772 | PROP_ssa | PROP_cfg, /* properties_required */ | |
773 | 0, /* properties_provided */ | |
774 | 0, /* properties_destroyed */ | |
775 | 0, /* todo_flags_start */ | |
a4641938 | 776 | TODO_verify_all | TODO_update_ssa /* todo_flags_finish */ |
b077695d | 777 | } |
778 | }; |