]>
Commit | Line | Data |
---|---|---|
b077695d | 1 | /* GCC instrumentation plugin for ThreadSanitizer. |
d353bf18 | 2 | Copyright (C) 2011-2015 Free Software Foundation, Inc. |
b077695d | 3 | Contributed by Dmitry Vyukov <dvyukov@google.com> |
4 | ||
5 | This file is part of GCC. | |
6 | ||
7 | GCC is free software; you can redistribute it and/or modify it under | |
8 | the terms of the GNU General Public License as published by the Free | |
9 | Software Foundation; either version 3, or (at your option) any later | |
10 | version. | |
11 | ||
12 | GCC is distributed in the hope that it will be useful, but WITHOUT ANY | |
13 | WARRANTY; without even the implied warranty of MERCHANTABILITY or | |
14 | FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License | |
15 | for more details. | |
16 | ||
17 | You should have received a copy of the GNU General Public License | |
18 | along with GCC; see the file COPYING3. If not see | |
19 | <http://www.gnu.org/licenses/>. */ | |
20 | ||
21 | ||
22 | #include "config.h" | |
23 | #include "system.h" | |
24 | #include "coretypes.h" | |
b20a8bb4 | 25 | #include "alias.h" |
26 | #include "symtab.h" | |
27 | #include "options.h" | |
b077695d | 28 | #include "tree.h" |
b20a8bb4 | 29 | #include "fold-const.h" |
b077695d | 30 | #include "tm.h" |
94ea8568 | 31 | #include "hard-reg-set.h" |
94ea8568 | 32 | #include "function.h" |
d53441c8 | 33 | #include "rtl.h" |
34 | #include "flags.h" | |
d53441c8 | 35 | #include "insn-config.h" |
36 | #include "expmed.h" | |
37 | #include "dojump.h" | |
38 | #include "explow.h" | |
39 | #include "calls.h" | |
40 | #include "emit-rtl.h" | |
41 | #include "varasm.h" | |
42 | #include "stmt.h" | |
43 | #include "expr.h" | |
44 | #include "intl.h" | |
45 | #include "predict.h" | |
94ea8568 | 46 | #include "dominance.h" |
47 | #include "cfg.h" | |
b077695d | 48 | #include "basic-block.h" |
bc61cadb | 49 | #include "tree-ssa-alias.h" |
50 | #include "internal-fn.h" | |
51 | #include "gimple-expr.h" | |
e795d6e1 | 52 | #include "gimple.h" |
a8783bee | 53 | #include "gimplify.h" |
dcf1a1ec | 54 | #include "gimple-iterator.h" |
5a17c7cc | 55 | #include "gimplify-me.h" |
073c1fd5 | 56 | #include "gimple-ssa.h" |
57 | #include "cgraph.h" | |
58 | #include "tree-cfg.h" | |
9ed99284 | 59 | #include "stringpool.h" |
073c1fd5 | 60 | #include "tree-ssanames.h" |
b077695d | 61 | #include "tree-pass.h" |
62 | #include "tree-iterator.h" | |
63 | #include "langhooks.h" | |
64 | #include "output.h" | |
b077695d | 65 | #include "target.h" |
b077695d | 66 | #include "diagnostic.h" |
83392e87 | 67 | #include "tree-ssa-propagate.h" |
5a17c7cc | 68 | #include "tree-ssa-loop-ivopts.h" |
b45e34ed | 69 | #include "tsan.h" |
70 | #include "asan.h" | |
831b9bed | 71 | #include "builtins.h" |
b077695d | 72 | |
73 | /* Number of instrumented memory accesses in the current function. */ | |
74 | ||
75 | /* Builds the following decl | |
76 | void __tsan_read/writeX (void *addr); */ | |
77 | ||
78 | static tree | |
79 | get_memory_access_decl (bool is_write, unsigned size) | |
80 | { | |
81 | enum built_in_function fcode; | |
82 | ||
83 | if (size <= 1) | |
b45e34ed | 84 | fcode = is_write ? BUILT_IN_TSAN_WRITE1 |
85 | : BUILT_IN_TSAN_READ1; | |
b077695d | 86 | else if (size <= 3) |
b45e34ed | 87 | fcode = is_write ? BUILT_IN_TSAN_WRITE2 |
88 | : BUILT_IN_TSAN_READ2; | |
b077695d | 89 | else if (size <= 7) |
b45e34ed | 90 | fcode = is_write ? BUILT_IN_TSAN_WRITE4 |
91 | : BUILT_IN_TSAN_READ4; | |
b077695d | 92 | else if (size <= 15) |
b45e34ed | 93 | fcode = is_write ? BUILT_IN_TSAN_WRITE8 |
94 | : BUILT_IN_TSAN_READ8; | |
b077695d | 95 | else |
b45e34ed | 96 | fcode = is_write ? BUILT_IN_TSAN_WRITE16 |
97 | : BUILT_IN_TSAN_READ16; | |
b077695d | 98 | |
99 | return builtin_decl_implicit (fcode); | |
100 | } | |
101 | ||
102 | /* Check as to whether EXPR refers to a store to vptr. */ | |
103 | ||
104 | static tree | |
105 | is_vptr_store (gimple stmt, tree expr, bool is_write) | |
106 | { | |
107 | if (is_write == true | |
108 | && gimple_assign_single_p (stmt) | |
109 | && TREE_CODE (expr) == COMPONENT_REF) | |
110 | { | |
111 | tree field = TREE_OPERAND (expr, 1); | |
112 | if (TREE_CODE (field) == FIELD_DECL | |
113 | && DECL_VIRTUAL_P (field)) | |
114 | return gimple_assign_rhs1 (stmt); | |
115 | } | |
116 | return NULL; | |
117 | } | |
118 | ||
b077695d | 119 | /* Instruments EXPR if needed. If any instrumentation is inserted, |
a4641938 | 120 | return true. */ |
b077695d | 121 | |
122 | static bool | |
123 | instrument_expr (gimple_stmt_iterator gsi, tree expr, bool is_write) | |
124 | { | |
c2fa3404 | 125 | tree base, rhs, expr_ptr, builtin_decl; |
b077695d | 126 | basic_block bb; |
127 | HOST_WIDE_INT size; | |
128 | gimple stmt, g; | |
c2fa3404 | 129 | gimple_seq seq; |
b077695d | 130 | location_t loc; |
831b9bed | 131 | unsigned int align; |
b077695d | 132 | |
b077695d | 133 | size = int_size_in_bytes (TREE_TYPE (expr)); |
831b9bed | 134 | if (size <= 0) |
b077695d | 135 | return false; |
136 | ||
b077695d | 137 | HOST_WIDE_INT bitsize, bitpos; |
138 | tree offset; | |
3754d046 | 139 | machine_mode mode; |
b077695d | 140 | int volatilep = 0, unsignedp = 0; |
5a5c6968 | 141 | base = get_inner_reference (expr, &bitsize, &bitpos, &offset, |
dc317fc8 | 142 | &mode, &unsignedp, &volatilep, false); |
5a5c6968 | 143 | |
144 | /* No need to instrument accesses to decls that don't escape, | |
145 | they can't escape to other threads then. */ | |
5a17c7cc | 146 | if (DECL_P (base) && !is_global_var (base)) |
5a5c6968 | 147 | { |
148 | struct pt_solution pt; | |
149 | memset (&pt, 0, sizeof (pt)); | |
150 | pt.escaped = 1; | |
151 | pt.ipa_escaped = flag_ipa_pta != 0; | |
5a5c6968 | 152 | if (!pt_solution_includes (&pt, base)) |
153 | return false; | |
5a17c7cc | 154 | if (!may_be_aliased (base)) |
5a5c6968 | 155 | return false; |
156 | } | |
157 | ||
060d11f4 | 158 | if (TREE_READONLY (base) |
159 | || (TREE_CODE (base) == VAR_DECL | |
160 | && DECL_HARD_REGISTER (base))) | |
b077695d | 161 | return false; |
162 | ||
b077695d | 163 | stmt = gsi_stmt (gsi); |
164 | loc = gimple_location (stmt); | |
165 | rhs = is_vptr_store (stmt, expr, is_write); | |
831b9bed | 166 | |
167 | if ((TREE_CODE (expr) == COMPONENT_REF | |
168 | && DECL_BIT_FIELD_TYPE (TREE_OPERAND (expr, 1))) | |
169 | || TREE_CODE (expr) == BIT_FIELD_REF) | |
170 | { | |
171 | base = TREE_OPERAND (expr, 0); | |
172 | if (TREE_CODE (expr) == COMPONENT_REF) | |
173 | { | |
174 | expr = TREE_OPERAND (expr, 1); | |
175 | if (is_write && DECL_BIT_FIELD_REPRESENTATIVE (expr)) | |
176 | expr = DECL_BIT_FIELD_REPRESENTATIVE (expr); | |
177 | if (!tree_fits_uhwi_p (DECL_FIELD_OFFSET (expr)) | |
178 | || !tree_fits_uhwi_p (DECL_FIELD_BIT_OFFSET (expr)) | |
179 | || !tree_fits_uhwi_p (DECL_SIZE (expr))) | |
180 | return false; | |
181 | bitpos = tree_to_uhwi (DECL_FIELD_OFFSET (expr)) * BITS_PER_UNIT | |
182 | + tree_to_uhwi (DECL_FIELD_BIT_OFFSET (expr)); | |
183 | bitsize = tree_to_uhwi (DECL_SIZE (expr)); | |
184 | } | |
185 | else | |
186 | { | |
187 | if (!tree_fits_uhwi_p (TREE_OPERAND (expr, 2)) | |
188 | || !tree_fits_uhwi_p (TREE_OPERAND (expr, 1))) | |
189 | return false; | |
190 | bitpos = tree_to_uhwi (TREE_OPERAND (expr, 2)); | |
191 | bitsize = tree_to_uhwi (TREE_OPERAND (expr, 1)); | |
192 | } | |
193 | if (bitpos < 0 || bitsize <= 0) | |
194 | return false; | |
195 | size = (bitpos % BITS_PER_UNIT + bitsize + BITS_PER_UNIT - 1) | |
196 | / BITS_PER_UNIT; | |
5a17c7cc | 197 | if (may_be_nonaddressable_p (base)) |
198 | return false; | |
831b9bed | 199 | align = get_object_alignment (base); |
200 | if (align < BITS_PER_UNIT) | |
201 | return false; | |
202 | bitpos = bitpos & ~(BITS_PER_UNIT - 1); | |
203 | if ((align - 1) & bitpos) | |
204 | { | |
205 | align = (align - 1) & bitpos; | |
206 | align = align & -align; | |
207 | } | |
831b9bed | 208 | expr = build_fold_addr_expr (unshare_expr (base)); |
831b9bed | 209 | expr = build2 (MEM_REF, char_type_node, expr, |
210 | build_int_cst (TREE_TYPE (expr), bitpos / BITS_PER_UNIT)); | |
211 | expr_ptr = build_fold_addr_expr (expr); | |
212 | } | |
213 | else | |
214 | { | |
5a17c7cc | 215 | if (may_be_nonaddressable_p (expr)) |
216 | return false; | |
831b9bed | 217 | align = get_object_alignment (expr); |
218 | if (align < BITS_PER_UNIT) | |
219 | return false; | |
831b9bed | 220 | expr_ptr = build_fold_addr_expr (unshare_expr (expr)); |
221 | } | |
5a17c7cc | 222 | expr_ptr = force_gimple_operand (expr_ptr, &seq, true, NULL_TREE); |
71f0663f | 223 | if ((size & (size - 1)) != 0 || size > 16 |
831b9bed | 224 | || align < MIN (size, 8) * BITS_PER_UNIT) |
225 | { | |
226 | builtin_decl = builtin_decl_implicit (is_write | |
227 | ? BUILT_IN_TSAN_WRITE_RANGE | |
228 | : BUILT_IN_TSAN_READ_RANGE); | |
229 | g = gimple_build_call (builtin_decl, 2, expr_ptr, size_int (size)); | |
230 | } | |
231 | else if (rhs == NULL) | |
c2fa3404 | 232 | g = gimple_build_call (get_memory_access_decl (is_write, size), |
233 | 1, expr_ptr); | |
b077695d | 234 | else |
235 | { | |
236 | builtin_decl = builtin_decl_implicit (BUILT_IN_TSAN_VPTR_UPDATE); | |
24fe68b6 | 237 | g = gimple_build_call (builtin_decl, 2, expr_ptr, unshare_expr (rhs)); |
b077695d | 238 | } |
239 | gimple_set_location (g, loc); | |
c2fa3404 | 240 | gimple_seq_add_stmt_without_update (&seq, g); |
b077695d | 241 | /* Instrumentation for assignment of a function result |
242 | must be inserted after the call. Instrumentation for | |
243 | reads of function arguments must be inserted before the call. | |
244 | That's because the call can contain synchronization. */ | |
245 | if (is_gimple_call (stmt) && is_write) | |
246 | { | |
247 | /* If the call can throw, it must be the last stmt in | |
248 | a basic block, so the instrumented stmts need to be | |
a4641938 | 249 | inserted in successor bbs. */ |
b077695d | 250 | if (is_ctrl_altering_stmt (stmt)) |
251 | { | |
252 | edge e; | |
253 | ||
254 | bb = gsi_bb (gsi); | |
255 | e = find_fallthru_edge (bb->succs); | |
256 | if (e) | |
c2fa3404 | 257 | gsi_insert_seq_on_edge_immediate (e, seq); |
b077695d | 258 | } |
259 | else | |
c2fa3404 | 260 | gsi_insert_seq_after (&gsi, seq, GSI_NEW_STMT); |
b077695d | 261 | } |
262 | else | |
c2fa3404 | 263 | gsi_insert_seq_before (&gsi, seq, GSI_SAME_STMT); |
b077695d | 264 | |
265 | return true; | |
266 | } | |
267 | ||
83392e87 | 268 | /* Actions for sync/atomic builtin transformations. */ |
269 | enum tsan_atomic_action | |
270 | { | |
271 | check_last, add_seq_cst, add_acquire, weak_cas, strong_cas, | |
272 | bool_cas, val_cas, lock_release, fetch_op, fetch_op_seq_cst | |
273 | }; | |
274 | ||
275 | /* Table how to map sync/atomic builtins to their corresponding | |
276 | tsan equivalents. */ | |
9d34f097 | 277 | static const struct tsan_map_atomic |
83392e87 | 278 | { |
279 | enum built_in_function fcode, tsan_fcode; | |
280 | enum tsan_atomic_action action; | |
281 | enum tree_code code; | |
282 | } tsan_atomic_table[] = | |
283 | { | |
284 | #define TRANSFORM(fcode, tsan_fcode, action, code) \ | |
285 | { BUILT_IN_##fcode, BUILT_IN_##tsan_fcode, action, code } | |
286 | #define CHECK_LAST(fcode, tsan_fcode) \ | |
287 | TRANSFORM (fcode, tsan_fcode, check_last, ERROR_MARK) | |
288 | #define ADD_SEQ_CST(fcode, tsan_fcode) \ | |
289 | TRANSFORM (fcode, tsan_fcode, add_seq_cst, ERROR_MARK) | |
290 | #define ADD_ACQUIRE(fcode, tsan_fcode) \ | |
291 | TRANSFORM (fcode, tsan_fcode, add_acquire, ERROR_MARK) | |
292 | #define WEAK_CAS(fcode, tsan_fcode) \ | |
293 | TRANSFORM (fcode, tsan_fcode, weak_cas, ERROR_MARK) | |
294 | #define STRONG_CAS(fcode, tsan_fcode) \ | |
295 | TRANSFORM (fcode, tsan_fcode, strong_cas, ERROR_MARK) | |
296 | #define BOOL_CAS(fcode, tsan_fcode) \ | |
297 | TRANSFORM (fcode, tsan_fcode, bool_cas, ERROR_MARK) | |
298 | #define VAL_CAS(fcode, tsan_fcode) \ | |
299 | TRANSFORM (fcode, tsan_fcode, val_cas, ERROR_MARK) | |
300 | #define LOCK_RELEASE(fcode, tsan_fcode) \ | |
301 | TRANSFORM (fcode, tsan_fcode, lock_release, ERROR_MARK) | |
302 | #define FETCH_OP(fcode, tsan_fcode, code) \ | |
303 | TRANSFORM (fcode, tsan_fcode, fetch_op, code) | |
304 | #define FETCH_OPS(fcode, tsan_fcode, code) \ | |
305 | TRANSFORM (fcode, tsan_fcode, fetch_op_seq_cst, code) | |
306 | ||
307 | CHECK_LAST (ATOMIC_LOAD_1, TSAN_ATOMIC8_LOAD), | |
308 | CHECK_LAST (ATOMIC_LOAD_2, TSAN_ATOMIC16_LOAD), | |
309 | CHECK_LAST (ATOMIC_LOAD_4, TSAN_ATOMIC32_LOAD), | |
310 | CHECK_LAST (ATOMIC_LOAD_8, TSAN_ATOMIC64_LOAD), | |
311 | CHECK_LAST (ATOMIC_LOAD_16, TSAN_ATOMIC128_LOAD), | |
312 | CHECK_LAST (ATOMIC_STORE_1, TSAN_ATOMIC8_STORE), | |
313 | CHECK_LAST (ATOMIC_STORE_2, TSAN_ATOMIC16_STORE), | |
314 | CHECK_LAST (ATOMIC_STORE_4, TSAN_ATOMIC32_STORE), | |
315 | CHECK_LAST (ATOMIC_STORE_8, TSAN_ATOMIC64_STORE), | |
316 | CHECK_LAST (ATOMIC_STORE_16, TSAN_ATOMIC128_STORE), | |
317 | CHECK_LAST (ATOMIC_EXCHANGE_1, TSAN_ATOMIC8_EXCHANGE), | |
318 | CHECK_LAST (ATOMIC_EXCHANGE_2, TSAN_ATOMIC16_EXCHANGE), | |
319 | CHECK_LAST (ATOMIC_EXCHANGE_4, TSAN_ATOMIC32_EXCHANGE), | |
320 | CHECK_LAST (ATOMIC_EXCHANGE_8, TSAN_ATOMIC64_EXCHANGE), | |
321 | CHECK_LAST (ATOMIC_EXCHANGE_16, TSAN_ATOMIC128_EXCHANGE), | |
322 | CHECK_LAST (ATOMIC_FETCH_ADD_1, TSAN_ATOMIC8_FETCH_ADD), | |
323 | CHECK_LAST (ATOMIC_FETCH_ADD_2, TSAN_ATOMIC16_FETCH_ADD), | |
324 | CHECK_LAST (ATOMIC_FETCH_ADD_4, TSAN_ATOMIC32_FETCH_ADD), | |
325 | CHECK_LAST (ATOMIC_FETCH_ADD_8, TSAN_ATOMIC64_FETCH_ADD), | |
326 | CHECK_LAST (ATOMIC_FETCH_ADD_16, TSAN_ATOMIC128_FETCH_ADD), | |
327 | CHECK_LAST (ATOMIC_FETCH_SUB_1, TSAN_ATOMIC8_FETCH_SUB), | |
328 | CHECK_LAST (ATOMIC_FETCH_SUB_2, TSAN_ATOMIC16_FETCH_SUB), | |
329 | CHECK_LAST (ATOMIC_FETCH_SUB_4, TSAN_ATOMIC32_FETCH_SUB), | |
330 | CHECK_LAST (ATOMIC_FETCH_SUB_8, TSAN_ATOMIC64_FETCH_SUB), | |
331 | CHECK_LAST (ATOMIC_FETCH_SUB_16, TSAN_ATOMIC128_FETCH_SUB), | |
332 | CHECK_LAST (ATOMIC_FETCH_AND_1, TSAN_ATOMIC8_FETCH_AND), | |
333 | CHECK_LAST (ATOMIC_FETCH_AND_2, TSAN_ATOMIC16_FETCH_AND), | |
334 | CHECK_LAST (ATOMIC_FETCH_AND_4, TSAN_ATOMIC32_FETCH_AND), | |
335 | CHECK_LAST (ATOMIC_FETCH_AND_8, TSAN_ATOMIC64_FETCH_AND), | |
336 | CHECK_LAST (ATOMIC_FETCH_AND_16, TSAN_ATOMIC128_FETCH_AND), | |
337 | CHECK_LAST (ATOMIC_FETCH_OR_1, TSAN_ATOMIC8_FETCH_OR), | |
338 | CHECK_LAST (ATOMIC_FETCH_OR_2, TSAN_ATOMIC16_FETCH_OR), | |
339 | CHECK_LAST (ATOMIC_FETCH_OR_4, TSAN_ATOMIC32_FETCH_OR), | |
340 | CHECK_LAST (ATOMIC_FETCH_OR_8, TSAN_ATOMIC64_FETCH_OR), | |
341 | CHECK_LAST (ATOMIC_FETCH_OR_16, TSAN_ATOMIC128_FETCH_OR), | |
342 | CHECK_LAST (ATOMIC_FETCH_XOR_1, TSAN_ATOMIC8_FETCH_XOR), | |
343 | CHECK_LAST (ATOMIC_FETCH_XOR_2, TSAN_ATOMIC16_FETCH_XOR), | |
344 | CHECK_LAST (ATOMIC_FETCH_XOR_4, TSAN_ATOMIC32_FETCH_XOR), | |
345 | CHECK_LAST (ATOMIC_FETCH_XOR_8, TSAN_ATOMIC64_FETCH_XOR), | |
346 | CHECK_LAST (ATOMIC_FETCH_XOR_16, TSAN_ATOMIC128_FETCH_XOR), | |
347 | CHECK_LAST (ATOMIC_FETCH_NAND_1, TSAN_ATOMIC8_FETCH_NAND), | |
348 | CHECK_LAST (ATOMIC_FETCH_NAND_2, TSAN_ATOMIC16_FETCH_NAND), | |
349 | CHECK_LAST (ATOMIC_FETCH_NAND_4, TSAN_ATOMIC32_FETCH_NAND), | |
350 | CHECK_LAST (ATOMIC_FETCH_NAND_8, TSAN_ATOMIC64_FETCH_NAND), | |
351 | CHECK_LAST (ATOMIC_FETCH_NAND_16, TSAN_ATOMIC128_FETCH_NAND), | |
352 | ||
353 | CHECK_LAST (ATOMIC_THREAD_FENCE, TSAN_ATOMIC_THREAD_FENCE), | |
354 | CHECK_LAST (ATOMIC_SIGNAL_FENCE, TSAN_ATOMIC_SIGNAL_FENCE), | |
355 | ||
356 | FETCH_OP (ATOMIC_ADD_FETCH_1, TSAN_ATOMIC8_FETCH_ADD, PLUS_EXPR), | |
357 | FETCH_OP (ATOMIC_ADD_FETCH_2, TSAN_ATOMIC16_FETCH_ADD, PLUS_EXPR), | |
358 | FETCH_OP (ATOMIC_ADD_FETCH_4, TSAN_ATOMIC32_FETCH_ADD, PLUS_EXPR), | |
359 | FETCH_OP (ATOMIC_ADD_FETCH_8, TSAN_ATOMIC64_FETCH_ADD, PLUS_EXPR), | |
360 | FETCH_OP (ATOMIC_ADD_FETCH_16, TSAN_ATOMIC128_FETCH_ADD, PLUS_EXPR), | |
361 | FETCH_OP (ATOMIC_SUB_FETCH_1, TSAN_ATOMIC8_FETCH_SUB, MINUS_EXPR), | |
362 | FETCH_OP (ATOMIC_SUB_FETCH_2, TSAN_ATOMIC16_FETCH_SUB, MINUS_EXPR), | |
363 | FETCH_OP (ATOMIC_SUB_FETCH_4, TSAN_ATOMIC32_FETCH_SUB, MINUS_EXPR), | |
364 | FETCH_OP (ATOMIC_SUB_FETCH_8, TSAN_ATOMIC64_FETCH_SUB, MINUS_EXPR), | |
365 | FETCH_OP (ATOMIC_SUB_FETCH_16, TSAN_ATOMIC128_FETCH_SUB, MINUS_EXPR), | |
366 | FETCH_OP (ATOMIC_AND_FETCH_1, TSAN_ATOMIC8_FETCH_AND, BIT_AND_EXPR), | |
367 | FETCH_OP (ATOMIC_AND_FETCH_2, TSAN_ATOMIC16_FETCH_AND, BIT_AND_EXPR), | |
368 | FETCH_OP (ATOMIC_AND_FETCH_4, TSAN_ATOMIC32_FETCH_AND, BIT_AND_EXPR), | |
369 | FETCH_OP (ATOMIC_AND_FETCH_8, TSAN_ATOMIC64_FETCH_AND, BIT_AND_EXPR), | |
370 | FETCH_OP (ATOMIC_AND_FETCH_16, TSAN_ATOMIC128_FETCH_AND, BIT_AND_EXPR), | |
371 | FETCH_OP (ATOMIC_OR_FETCH_1, TSAN_ATOMIC8_FETCH_OR, BIT_IOR_EXPR), | |
372 | FETCH_OP (ATOMIC_OR_FETCH_2, TSAN_ATOMIC16_FETCH_OR, BIT_IOR_EXPR), | |
373 | FETCH_OP (ATOMIC_OR_FETCH_4, TSAN_ATOMIC32_FETCH_OR, BIT_IOR_EXPR), | |
374 | FETCH_OP (ATOMIC_OR_FETCH_8, TSAN_ATOMIC64_FETCH_OR, BIT_IOR_EXPR), | |
375 | FETCH_OP (ATOMIC_OR_FETCH_16, TSAN_ATOMIC128_FETCH_OR, BIT_IOR_EXPR), | |
376 | FETCH_OP (ATOMIC_XOR_FETCH_1, TSAN_ATOMIC8_FETCH_XOR, BIT_XOR_EXPR), | |
377 | FETCH_OP (ATOMIC_XOR_FETCH_2, TSAN_ATOMIC16_FETCH_XOR, BIT_XOR_EXPR), | |
378 | FETCH_OP (ATOMIC_XOR_FETCH_4, TSAN_ATOMIC32_FETCH_XOR, BIT_XOR_EXPR), | |
379 | FETCH_OP (ATOMIC_XOR_FETCH_8, TSAN_ATOMIC64_FETCH_XOR, BIT_XOR_EXPR), | |
380 | FETCH_OP (ATOMIC_XOR_FETCH_16, TSAN_ATOMIC128_FETCH_XOR, BIT_XOR_EXPR), | |
381 | FETCH_OP (ATOMIC_NAND_FETCH_1, TSAN_ATOMIC8_FETCH_NAND, BIT_NOT_EXPR), | |
382 | FETCH_OP (ATOMIC_NAND_FETCH_2, TSAN_ATOMIC16_FETCH_NAND, BIT_NOT_EXPR), | |
383 | FETCH_OP (ATOMIC_NAND_FETCH_4, TSAN_ATOMIC32_FETCH_NAND, BIT_NOT_EXPR), | |
384 | FETCH_OP (ATOMIC_NAND_FETCH_8, TSAN_ATOMIC64_FETCH_NAND, BIT_NOT_EXPR), | |
385 | FETCH_OP (ATOMIC_NAND_FETCH_16, TSAN_ATOMIC128_FETCH_NAND, BIT_NOT_EXPR), | |
386 | ||
387 | ADD_ACQUIRE (SYNC_LOCK_TEST_AND_SET_1, TSAN_ATOMIC8_EXCHANGE), | |
388 | ADD_ACQUIRE (SYNC_LOCK_TEST_AND_SET_2, TSAN_ATOMIC16_EXCHANGE), | |
389 | ADD_ACQUIRE (SYNC_LOCK_TEST_AND_SET_4, TSAN_ATOMIC32_EXCHANGE), | |
390 | ADD_ACQUIRE (SYNC_LOCK_TEST_AND_SET_8, TSAN_ATOMIC64_EXCHANGE), | |
391 | ADD_ACQUIRE (SYNC_LOCK_TEST_AND_SET_16, TSAN_ATOMIC128_EXCHANGE), | |
392 | ||
393 | ADD_SEQ_CST (SYNC_FETCH_AND_ADD_1, TSAN_ATOMIC8_FETCH_ADD), | |
394 | ADD_SEQ_CST (SYNC_FETCH_AND_ADD_2, TSAN_ATOMIC16_FETCH_ADD), | |
395 | ADD_SEQ_CST (SYNC_FETCH_AND_ADD_4, TSAN_ATOMIC32_FETCH_ADD), | |
396 | ADD_SEQ_CST (SYNC_FETCH_AND_ADD_8, TSAN_ATOMIC64_FETCH_ADD), | |
397 | ADD_SEQ_CST (SYNC_FETCH_AND_ADD_16, TSAN_ATOMIC128_FETCH_ADD), | |
398 | ADD_SEQ_CST (SYNC_FETCH_AND_SUB_1, TSAN_ATOMIC8_FETCH_SUB), | |
399 | ADD_SEQ_CST (SYNC_FETCH_AND_SUB_2, TSAN_ATOMIC16_FETCH_SUB), | |
400 | ADD_SEQ_CST (SYNC_FETCH_AND_SUB_4, TSAN_ATOMIC32_FETCH_SUB), | |
401 | ADD_SEQ_CST (SYNC_FETCH_AND_SUB_8, TSAN_ATOMIC64_FETCH_SUB), | |
402 | ADD_SEQ_CST (SYNC_FETCH_AND_SUB_16, TSAN_ATOMIC128_FETCH_SUB), | |
403 | ADD_SEQ_CST (SYNC_FETCH_AND_AND_1, TSAN_ATOMIC8_FETCH_AND), | |
404 | ADD_SEQ_CST (SYNC_FETCH_AND_AND_2, TSAN_ATOMIC16_FETCH_AND), | |
405 | ADD_SEQ_CST (SYNC_FETCH_AND_AND_4, TSAN_ATOMIC32_FETCH_AND), | |
406 | ADD_SEQ_CST (SYNC_FETCH_AND_AND_8, TSAN_ATOMIC64_FETCH_AND), | |
407 | ADD_SEQ_CST (SYNC_FETCH_AND_AND_16, TSAN_ATOMIC128_FETCH_AND), | |
408 | ADD_SEQ_CST (SYNC_FETCH_AND_OR_1, TSAN_ATOMIC8_FETCH_OR), | |
409 | ADD_SEQ_CST (SYNC_FETCH_AND_OR_2, TSAN_ATOMIC16_FETCH_OR), | |
410 | ADD_SEQ_CST (SYNC_FETCH_AND_OR_4, TSAN_ATOMIC32_FETCH_OR), | |
411 | ADD_SEQ_CST (SYNC_FETCH_AND_OR_8, TSAN_ATOMIC64_FETCH_OR), | |
412 | ADD_SEQ_CST (SYNC_FETCH_AND_OR_16, TSAN_ATOMIC128_FETCH_OR), | |
413 | ADD_SEQ_CST (SYNC_FETCH_AND_XOR_1, TSAN_ATOMIC8_FETCH_XOR), | |
414 | ADD_SEQ_CST (SYNC_FETCH_AND_XOR_2, TSAN_ATOMIC16_FETCH_XOR), | |
415 | ADD_SEQ_CST (SYNC_FETCH_AND_XOR_4, TSAN_ATOMIC32_FETCH_XOR), | |
416 | ADD_SEQ_CST (SYNC_FETCH_AND_XOR_8, TSAN_ATOMIC64_FETCH_XOR), | |
417 | ADD_SEQ_CST (SYNC_FETCH_AND_XOR_16, TSAN_ATOMIC128_FETCH_XOR), | |
418 | ADD_SEQ_CST (SYNC_FETCH_AND_NAND_1, TSAN_ATOMIC8_FETCH_NAND), | |
419 | ADD_SEQ_CST (SYNC_FETCH_AND_NAND_2, TSAN_ATOMIC16_FETCH_NAND), | |
420 | ADD_SEQ_CST (SYNC_FETCH_AND_NAND_4, TSAN_ATOMIC32_FETCH_NAND), | |
421 | ADD_SEQ_CST (SYNC_FETCH_AND_NAND_8, TSAN_ATOMIC64_FETCH_NAND), | |
422 | ADD_SEQ_CST (SYNC_FETCH_AND_NAND_16, TSAN_ATOMIC128_FETCH_NAND), | |
423 | ||
424 | ADD_SEQ_CST (SYNC_SYNCHRONIZE, TSAN_ATOMIC_THREAD_FENCE), | |
425 | ||
426 | FETCH_OPS (SYNC_ADD_AND_FETCH_1, TSAN_ATOMIC8_FETCH_ADD, PLUS_EXPR), | |
427 | FETCH_OPS (SYNC_ADD_AND_FETCH_2, TSAN_ATOMIC16_FETCH_ADD, PLUS_EXPR), | |
428 | FETCH_OPS (SYNC_ADD_AND_FETCH_4, TSAN_ATOMIC32_FETCH_ADD, PLUS_EXPR), | |
429 | FETCH_OPS (SYNC_ADD_AND_FETCH_8, TSAN_ATOMIC64_FETCH_ADD, PLUS_EXPR), | |
430 | FETCH_OPS (SYNC_ADD_AND_FETCH_16, TSAN_ATOMIC128_FETCH_ADD, PLUS_EXPR), | |
431 | FETCH_OPS (SYNC_SUB_AND_FETCH_1, TSAN_ATOMIC8_FETCH_SUB, MINUS_EXPR), | |
432 | FETCH_OPS (SYNC_SUB_AND_FETCH_2, TSAN_ATOMIC16_FETCH_SUB, MINUS_EXPR), | |
433 | FETCH_OPS (SYNC_SUB_AND_FETCH_4, TSAN_ATOMIC32_FETCH_SUB, MINUS_EXPR), | |
434 | FETCH_OPS (SYNC_SUB_AND_FETCH_8, TSAN_ATOMIC64_FETCH_SUB, MINUS_EXPR), | |
435 | FETCH_OPS (SYNC_SUB_AND_FETCH_16, TSAN_ATOMIC128_FETCH_SUB, MINUS_EXPR), | |
436 | FETCH_OPS (SYNC_AND_AND_FETCH_1, TSAN_ATOMIC8_FETCH_AND, BIT_AND_EXPR), | |
437 | FETCH_OPS (SYNC_AND_AND_FETCH_2, TSAN_ATOMIC16_FETCH_AND, BIT_AND_EXPR), | |
438 | FETCH_OPS (SYNC_AND_AND_FETCH_4, TSAN_ATOMIC32_FETCH_AND, BIT_AND_EXPR), | |
439 | FETCH_OPS (SYNC_AND_AND_FETCH_8, TSAN_ATOMIC64_FETCH_AND, BIT_AND_EXPR), | |
440 | FETCH_OPS (SYNC_AND_AND_FETCH_16, TSAN_ATOMIC128_FETCH_AND, BIT_AND_EXPR), | |
441 | FETCH_OPS (SYNC_OR_AND_FETCH_1, TSAN_ATOMIC8_FETCH_OR, BIT_IOR_EXPR), | |
442 | FETCH_OPS (SYNC_OR_AND_FETCH_2, TSAN_ATOMIC16_FETCH_OR, BIT_IOR_EXPR), | |
443 | FETCH_OPS (SYNC_OR_AND_FETCH_4, TSAN_ATOMIC32_FETCH_OR, BIT_IOR_EXPR), | |
444 | FETCH_OPS (SYNC_OR_AND_FETCH_8, TSAN_ATOMIC64_FETCH_OR, BIT_IOR_EXPR), | |
445 | FETCH_OPS (SYNC_OR_AND_FETCH_16, TSAN_ATOMIC128_FETCH_OR, BIT_IOR_EXPR), | |
446 | FETCH_OPS (SYNC_XOR_AND_FETCH_1, TSAN_ATOMIC8_FETCH_XOR, BIT_XOR_EXPR), | |
447 | FETCH_OPS (SYNC_XOR_AND_FETCH_2, TSAN_ATOMIC16_FETCH_XOR, BIT_XOR_EXPR), | |
448 | FETCH_OPS (SYNC_XOR_AND_FETCH_4, TSAN_ATOMIC32_FETCH_XOR, BIT_XOR_EXPR), | |
449 | FETCH_OPS (SYNC_XOR_AND_FETCH_8, TSAN_ATOMIC64_FETCH_XOR, BIT_XOR_EXPR), | |
450 | FETCH_OPS (SYNC_XOR_AND_FETCH_16, TSAN_ATOMIC128_FETCH_XOR, BIT_XOR_EXPR), | |
451 | FETCH_OPS (SYNC_NAND_AND_FETCH_1, TSAN_ATOMIC8_FETCH_NAND, BIT_NOT_EXPR), | |
452 | FETCH_OPS (SYNC_NAND_AND_FETCH_2, TSAN_ATOMIC16_FETCH_NAND, BIT_NOT_EXPR), | |
453 | FETCH_OPS (SYNC_NAND_AND_FETCH_4, TSAN_ATOMIC32_FETCH_NAND, BIT_NOT_EXPR), | |
454 | FETCH_OPS (SYNC_NAND_AND_FETCH_8, TSAN_ATOMIC64_FETCH_NAND, BIT_NOT_EXPR), | |
455 | FETCH_OPS (SYNC_NAND_AND_FETCH_16, TSAN_ATOMIC128_FETCH_NAND, BIT_NOT_EXPR), | |
456 | ||
457 | WEAK_CAS (ATOMIC_COMPARE_EXCHANGE_1, TSAN_ATOMIC8_COMPARE_EXCHANGE_WEAK), | |
458 | WEAK_CAS (ATOMIC_COMPARE_EXCHANGE_2, TSAN_ATOMIC16_COMPARE_EXCHANGE_WEAK), | |
459 | WEAK_CAS (ATOMIC_COMPARE_EXCHANGE_4, TSAN_ATOMIC32_COMPARE_EXCHANGE_WEAK), | |
460 | WEAK_CAS (ATOMIC_COMPARE_EXCHANGE_8, TSAN_ATOMIC64_COMPARE_EXCHANGE_WEAK), | |
461 | WEAK_CAS (ATOMIC_COMPARE_EXCHANGE_16, TSAN_ATOMIC128_COMPARE_EXCHANGE_WEAK), | |
462 | ||
463 | STRONG_CAS (ATOMIC_COMPARE_EXCHANGE_1, TSAN_ATOMIC8_COMPARE_EXCHANGE_STRONG), | |
464 | STRONG_CAS (ATOMIC_COMPARE_EXCHANGE_2, | |
465 | TSAN_ATOMIC16_COMPARE_EXCHANGE_STRONG), | |
466 | STRONG_CAS (ATOMIC_COMPARE_EXCHANGE_4, | |
467 | TSAN_ATOMIC32_COMPARE_EXCHANGE_STRONG), | |
468 | STRONG_CAS (ATOMIC_COMPARE_EXCHANGE_8, | |
469 | TSAN_ATOMIC64_COMPARE_EXCHANGE_STRONG), | |
470 | STRONG_CAS (ATOMIC_COMPARE_EXCHANGE_16, | |
471 | TSAN_ATOMIC128_COMPARE_EXCHANGE_STRONG), | |
472 | ||
473 | BOOL_CAS (SYNC_BOOL_COMPARE_AND_SWAP_1, | |
474 | TSAN_ATOMIC8_COMPARE_EXCHANGE_STRONG), | |
475 | BOOL_CAS (SYNC_BOOL_COMPARE_AND_SWAP_2, | |
476 | TSAN_ATOMIC16_COMPARE_EXCHANGE_STRONG), | |
477 | BOOL_CAS (SYNC_BOOL_COMPARE_AND_SWAP_4, | |
478 | TSAN_ATOMIC32_COMPARE_EXCHANGE_STRONG), | |
479 | BOOL_CAS (SYNC_BOOL_COMPARE_AND_SWAP_8, | |
480 | TSAN_ATOMIC64_COMPARE_EXCHANGE_STRONG), | |
481 | BOOL_CAS (SYNC_BOOL_COMPARE_AND_SWAP_16, | |
482 | TSAN_ATOMIC128_COMPARE_EXCHANGE_STRONG), | |
483 | ||
484 | VAL_CAS (SYNC_VAL_COMPARE_AND_SWAP_1, TSAN_ATOMIC8_COMPARE_EXCHANGE_STRONG), | |
485 | VAL_CAS (SYNC_VAL_COMPARE_AND_SWAP_2, TSAN_ATOMIC16_COMPARE_EXCHANGE_STRONG), | |
486 | VAL_CAS (SYNC_VAL_COMPARE_AND_SWAP_4, TSAN_ATOMIC32_COMPARE_EXCHANGE_STRONG), | |
487 | VAL_CAS (SYNC_VAL_COMPARE_AND_SWAP_8, TSAN_ATOMIC64_COMPARE_EXCHANGE_STRONG), | |
488 | VAL_CAS (SYNC_VAL_COMPARE_AND_SWAP_16, | |
489 | TSAN_ATOMIC128_COMPARE_EXCHANGE_STRONG), | |
490 | ||
491 | LOCK_RELEASE (SYNC_LOCK_RELEASE_1, TSAN_ATOMIC8_STORE), | |
492 | LOCK_RELEASE (SYNC_LOCK_RELEASE_2, TSAN_ATOMIC16_STORE), | |
493 | LOCK_RELEASE (SYNC_LOCK_RELEASE_4, TSAN_ATOMIC32_STORE), | |
494 | LOCK_RELEASE (SYNC_LOCK_RELEASE_8, TSAN_ATOMIC64_STORE), | |
495 | LOCK_RELEASE (SYNC_LOCK_RELEASE_16, TSAN_ATOMIC128_STORE) | |
496 | }; | |
497 | ||
498 | /* Instrument an atomic builtin. */ | |
499 | ||
500 | static void | |
501 | instrument_builtin_call (gimple_stmt_iterator *gsi) | |
502 | { | |
503 | gimple stmt = gsi_stmt (*gsi), g; | |
504 | tree callee = gimple_call_fndecl (stmt), last_arg, args[6], t, lhs; | |
505 | enum built_in_function fcode = DECL_FUNCTION_CODE (callee); | |
506 | unsigned int i, num = gimple_call_num_args (stmt), j; | |
507 | for (j = 0; j < 6 && j < num; j++) | |
508 | args[j] = gimple_call_arg (stmt, j); | |
509 | for (i = 0; i < ARRAY_SIZE (tsan_atomic_table); i++) | |
510 | if (fcode != tsan_atomic_table[i].fcode) | |
511 | continue; | |
512 | else | |
513 | { | |
514 | tree decl = builtin_decl_implicit (tsan_atomic_table[i].tsan_fcode); | |
515 | if (decl == NULL_TREE) | |
516 | return; | |
517 | switch (tsan_atomic_table[i].action) | |
518 | { | |
519 | case check_last: | |
520 | case fetch_op: | |
521 | last_arg = gimple_call_arg (stmt, num - 1); | |
cd4547bf | 522 | if (!tree_fits_uhwi_p (last_arg) |
a372f7ca | 523 | || memmodel_base (tree_to_uhwi (last_arg)) >= MEMMODEL_LAST) |
83392e87 | 524 | return; |
525 | gimple_call_set_fndecl (stmt, decl); | |
526 | update_stmt (stmt); | |
527 | if (tsan_atomic_table[i].action == fetch_op) | |
528 | { | |
529 | args[1] = gimple_call_arg (stmt, 1); | |
530 | goto adjust_result; | |
531 | } | |
532 | return; | |
533 | case add_seq_cst: | |
534 | case add_acquire: | |
535 | case fetch_op_seq_cst: | |
536 | gcc_assert (num <= 2); | |
537 | for (j = 0; j < num; j++) | |
538 | args[j] = gimple_call_arg (stmt, j); | |
539 | for (; j < 2; j++) | |
540 | args[j] = NULL_TREE; | |
541 | args[num] = build_int_cst (NULL_TREE, | |
542 | tsan_atomic_table[i].action | |
543 | != add_acquire | |
544 | ? MEMMODEL_SEQ_CST | |
545 | : MEMMODEL_ACQUIRE); | |
546 | update_gimple_call (gsi, decl, num + 1, args[0], args[1], args[2]); | |
547 | stmt = gsi_stmt (*gsi); | |
548 | if (tsan_atomic_table[i].action == fetch_op_seq_cst) | |
549 | { | |
550 | adjust_result: | |
551 | lhs = gimple_call_lhs (stmt); | |
552 | if (lhs == NULL_TREE) | |
553 | return; | |
554 | if (!useless_type_conversion_p (TREE_TYPE (lhs), | |
555 | TREE_TYPE (args[1]))) | |
556 | { | |
f9e245b2 | 557 | tree var = make_ssa_name (TREE_TYPE (lhs)); |
e9cf809e | 558 | g = gimple_build_assign (var, NOP_EXPR, args[1]); |
83392e87 | 559 | gsi_insert_after (gsi, g, GSI_NEW_STMT); |
560 | args[1] = var; | |
561 | } | |
f9e245b2 | 562 | gimple_call_set_lhs (stmt, make_ssa_name (TREE_TYPE (lhs))); |
83392e87 | 563 | /* BIT_NOT_EXPR stands for NAND. */ |
564 | if (tsan_atomic_table[i].code == BIT_NOT_EXPR) | |
565 | { | |
f9e245b2 | 566 | tree var = make_ssa_name (TREE_TYPE (lhs)); |
e9cf809e | 567 | g = gimple_build_assign (var, BIT_AND_EXPR, |
568 | gimple_call_lhs (stmt), args[1]); | |
83392e87 | 569 | gsi_insert_after (gsi, g, GSI_NEW_STMT); |
e9cf809e | 570 | g = gimple_build_assign (lhs, BIT_NOT_EXPR, var); |
83392e87 | 571 | } |
572 | else | |
e9cf809e | 573 | g = gimple_build_assign (lhs, tsan_atomic_table[i].code, |
574 | gimple_call_lhs (stmt), args[1]); | |
83392e87 | 575 | update_stmt (stmt); |
576 | gsi_insert_after (gsi, g, GSI_NEW_STMT); | |
577 | } | |
578 | return; | |
579 | case weak_cas: | |
580 | if (!integer_nonzerop (gimple_call_arg (stmt, 3))) | |
581 | continue; | |
582 | /* FALLTHRU */ | |
583 | case strong_cas: | |
584 | gcc_assert (num == 6); | |
585 | for (j = 0; j < 6; j++) | |
586 | args[j] = gimple_call_arg (stmt, j); | |
cd4547bf | 587 | if (!tree_fits_uhwi_p (args[4]) |
a372f7ca | 588 | || memmodel_base (tree_to_uhwi (args[4])) >= MEMMODEL_LAST) |
83392e87 | 589 | return; |
cd4547bf | 590 | if (!tree_fits_uhwi_p (args[5]) |
a372f7ca | 591 | || memmodel_base (tree_to_uhwi (args[5])) >= MEMMODEL_LAST) |
83392e87 | 592 | return; |
593 | update_gimple_call (gsi, decl, 5, args[0], args[1], args[2], | |
594 | args[4], args[5]); | |
595 | return; | |
596 | case bool_cas: | |
597 | case val_cas: | |
598 | gcc_assert (num == 3); | |
599 | for (j = 0; j < 3; j++) | |
600 | args[j] = gimple_call_arg (stmt, j); | |
601 | t = TYPE_ARG_TYPES (TREE_TYPE (decl)); | |
602 | t = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (t))); | |
f9e245b2 | 603 | t = create_tmp_var (t); |
83392e87 | 604 | mark_addressable (t); |
605 | if (!useless_type_conversion_p (TREE_TYPE (t), | |
606 | TREE_TYPE (args[1]))) | |
607 | { | |
e9cf809e | 608 | g = gimple_build_assign (make_ssa_name (TREE_TYPE (t)), |
609 | NOP_EXPR, args[1]); | |
83392e87 | 610 | gsi_insert_before (gsi, g, GSI_SAME_STMT); |
611 | args[1] = gimple_assign_lhs (g); | |
612 | } | |
613 | g = gimple_build_assign (t, args[1]); | |
614 | gsi_insert_before (gsi, g, GSI_SAME_STMT); | |
615 | lhs = gimple_call_lhs (stmt); | |
616 | update_gimple_call (gsi, decl, 5, args[0], | |
617 | build_fold_addr_expr (t), args[2], | |
618 | build_int_cst (NULL_TREE, | |
619 | MEMMODEL_SEQ_CST), | |
620 | build_int_cst (NULL_TREE, | |
621 | MEMMODEL_SEQ_CST)); | |
622 | if (tsan_atomic_table[i].action == val_cas && lhs) | |
623 | { | |
624 | tree cond; | |
625 | stmt = gsi_stmt (*gsi); | |
f9e245b2 | 626 | g = gimple_build_assign (make_ssa_name (TREE_TYPE (t)), t); |
83392e87 | 627 | gsi_insert_after (gsi, g, GSI_NEW_STMT); |
628 | t = make_ssa_name (TREE_TYPE (TREE_TYPE (decl)), stmt); | |
629 | cond = build2 (NE_EXPR, boolean_type_node, t, | |
630 | build_int_cst (TREE_TYPE (t), 0)); | |
e9cf809e | 631 | g = gimple_build_assign (lhs, COND_EXPR, cond, args[1], |
632 | gimple_assign_lhs (g)); | |
83392e87 | 633 | gimple_call_set_lhs (stmt, t); |
634 | update_stmt (stmt); | |
635 | gsi_insert_after (gsi, g, GSI_NEW_STMT); | |
636 | } | |
637 | return; | |
638 | case lock_release: | |
639 | gcc_assert (num == 1); | |
640 | t = TYPE_ARG_TYPES (TREE_TYPE (decl)); | |
641 | t = TREE_VALUE (TREE_CHAIN (t)); | |
642 | update_gimple_call (gsi, decl, 3, gimple_call_arg (stmt, 0), | |
643 | build_int_cst (t, 0), | |
644 | build_int_cst (NULL_TREE, | |
645 | MEMMODEL_RELEASE)); | |
646 | return; | |
647 | default: | |
648 | continue; | |
649 | } | |
650 | } | |
651 | } | |
652 | ||
b077695d | 653 | /* Instruments the gimple pointed to by GSI. Return |
a4641938 | 654 | true if func entry/exit should be instrumented. */ |
b077695d | 655 | |
656 | static bool | |
83392e87 | 657 | instrument_gimple (gimple_stmt_iterator *gsi) |
b077695d | 658 | { |
659 | gimple stmt; | |
660 | tree rhs, lhs; | |
661 | bool instrumented = false; | |
662 | ||
83392e87 | 663 | stmt = gsi_stmt (*gsi); |
b077695d | 664 | if (is_gimple_call (stmt) |
665 | && (gimple_call_fndecl (stmt) | |
666 | != builtin_decl_implicit (BUILT_IN_TSAN_INIT))) | |
83392e87 | 667 | { |
a691030f | 668 | /* All functions with function call will have exit instrumented, |
669 | therefore no function calls other than __tsan_func_exit | |
670 | shall appear in the functions. */ | |
671 | gimple_call_set_tail (as_a <gcall *> (stmt), false); | |
8ded4352 | 672 | if (gimple_call_builtin_p (stmt, BUILT_IN_NORMAL)) |
83392e87 | 673 | instrument_builtin_call (gsi); |
674 | return true; | |
675 | } | |
a4641938 | 676 | else if (is_gimple_assign (stmt) |
677 | && !gimple_clobber_p (stmt)) | |
b077695d | 678 | { |
679 | if (gimple_store_p (stmt)) | |
680 | { | |
681 | lhs = gimple_assign_lhs (stmt); | |
83392e87 | 682 | instrumented = instrument_expr (*gsi, lhs, true); |
b077695d | 683 | } |
684 | if (gimple_assign_load_p (stmt)) | |
685 | { | |
686 | rhs = gimple_assign_rhs1 (stmt); | |
83392e87 | 687 | instrumented = instrument_expr (*gsi, rhs, false); |
b077695d | 688 | } |
689 | } | |
690 | return instrumented; | |
691 | } | |
692 | ||
ed53cebe | 693 | /* Replace TSAN_FUNC_EXIT internal call with function exit tsan builtin. */ |
694 | ||
695 | static void | |
696 | replace_func_exit (gimple stmt) | |
697 | { | |
698 | tree builtin_decl = builtin_decl_implicit (BUILT_IN_TSAN_FUNC_EXIT); | |
699 | gimple g = gimple_build_call (builtin_decl, 0); | |
700 | gimple_set_location (g, cfun->function_end_locus); | |
701 | gimple_stmt_iterator gsi = gsi_for_stmt (stmt); | |
702 | gsi_replace (&gsi, g, true); | |
703 | } | |
704 | ||
705 | /* Instrument function exit. Used when TSAN_FUNC_EXIT does not exist. */ | |
706 | ||
707 | static void | |
708 | instrument_func_exit (void) | |
709 | { | |
710 | location_t loc; | |
711 | basic_block exit_bb; | |
712 | gimple_stmt_iterator gsi; | |
713 | gimple stmt, g; | |
714 | tree builtin_decl; | |
715 | edge e; | |
716 | edge_iterator ei; | |
717 | ||
718 | /* Find all function exits. */ | |
719 | exit_bb = EXIT_BLOCK_PTR_FOR_FN (cfun); | |
720 | FOR_EACH_EDGE (e, ei, exit_bb->preds) | |
721 | { | |
722 | gsi = gsi_last_bb (e->src); | |
723 | stmt = gsi_stmt (gsi); | |
724 | gcc_assert (gimple_code (stmt) == GIMPLE_RETURN | |
725 | || gimple_call_builtin_p (stmt, BUILT_IN_RETURN)); | |
726 | loc = gimple_location (stmt); | |
727 | builtin_decl = builtin_decl_implicit (BUILT_IN_TSAN_FUNC_EXIT); | |
728 | g = gimple_build_call (builtin_decl, 0); | |
729 | gimple_set_location (g, loc); | |
730 | gsi_insert_before (&gsi, g, GSI_SAME_STMT); | |
731 | } | |
732 | } | |
733 | ||
b077695d | 734 | /* Instruments all interesting memory accesses in the current function. |
a4641938 | 735 | Return true if func entry/exit should be instrumented. */ |
b077695d | 736 | |
737 | static bool | |
738 | instrument_memory_accesses (void) | |
739 | { | |
740 | basic_block bb; | |
741 | gimple_stmt_iterator gsi; | |
742 | bool fentry_exit_instrument = false; | |
ed53cebe | 743 | bool func_exit_seen = false; |
744 | auto_vec<gimple> tsan_func_exits; | |
b077695d | 745 | |
fc00614f | 746 | FOR_EACH_BB_FN (bb, cfun) |
b077695d | 747 | for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi)) |
ed53cebe | 748 | { |
749 | gimple stmt = gsi_stmt (gsi); | |
750 | if (is_gimple_call (stmt) | |
751 | && gimple_call_internal_p (stmt) | |
752 | && gimple_call_internal_fn (stmt) == IFN_TSAN_FUNC_EXIT) | |
753 | { | |
754 | if (fentry_exit_instrument) | |
755 | replace_func_exit (stmt); | |
756 | else | |
757 | tsan_func_exits.safe_push (stmt); | |
758 | func_exit_seen = true; | |
759 | } | |
760 | else | |
761 | fentry_exit_instrument |= instrument_gimple (&gsi); | |
762 | } | |
763 | unsigned int i; | |
764 | gimple stmt; | |
765 | FOR_EACH_VEC_ELT (tsan_func_exits, i, stmt) | |
766 | if (fentry_exit_instrument) | |
767 | replace_func_exit (stmt); | |
768 | else | |
769 | { | |
770 | gsi = gsi_for_stmt (stmt); | |
771 | gsi_remove (&gsi, true); | |
772 | } | |
773 | if (fentry_exit_instrument && !func_exit_seen) | |
774 | instrument_func_exit (); | |
b077695d | 775 | return fentry_exit_instrument; |
776 | } | |
777 | ||
778 | /* Instruments function entry. */ | |
779 | ||
780 | static void | |
781 | instrument_func_entry (void) | |
782 | { | |
b077695d | 783 | tree ret_addr, builtin_decl; |
784 | gimple g; | |
f2940d10 | 785 | gimple_seq seq = NULL; |
b077695d | 786 | |
787 | builtin_decl = builtin_decl_implicit (BUILT_IN_RETURN_ADDRESS); | |
788 | g = gimple_build_call (builtin_decl, 1, integer_zero_node); | |
f9e245b2 | 789 | ret_addr = make_ssa_name (ptr_type_node); |
b077695d | 790 | gimple_call_set_lhs (g, ret_addr); |
791 | gimple_set_location (g, cfun->function_start_locus); | |
f2940d10 | 792 | gimple_seq_add_stmt_without_update (&seq, g); |
b077695d | 793 | |
f2940d10 | 794 | builtin_decl = builtin_decl_implicit (BUILT_IN_TSAN_FUNC_ENTRY); |
b077695d | 795 | g = gimple_build_call (builtin_decl, 1, ret_addr); |
796 | gimple_set_location (g, cfun->function_start_locus); | |
f2940d10 | 797 | gimple_seq_add_stmt_without_update (&seq, g); |
798 | ||
799 | edge e = single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun)); | |
800 | gsi_insert_seq_on_edge_immediate (e, seq); | |
b077695d | 801 | } |
802 | ||
b077695d | 803 | /* ThreadSanitizer instrumentation pass. */ |
804 | ||
805 | static unsigned | |
806 | tsan_pass (void) | |
807 | { | |
b45e34ed | 808 | initialize_sanitizer_builtins (); |
b077695d | 809 | if (instrument_memory_accesses ()) |
ed53cebe | 810 | instrument_func_entry (); |
b077695d | 811 | return 0; |
812 | } | |
813 | ||
b077695d | 814 | /* Inserts __tsan_init () into the list of CTORs. */ |
815 | ||
816 | void | |
817 | tsan_finish_file (void) | |
818 | { | |
b45e34ed | 819 | tree ctor_statements = NULL_TREE; |
b077695d | 820 | |
b45e34ed | 821 | initialize_sanitizer_builtins (); |
822 | tree init_decl = builtin_decl_implicit (BUILT_IN_TSAN_INIT); | |
b077695d | 823 | append_to_statement_list (build_call_expr (init_decl, 0), |
824 | &ctor_statements); | |
825 | cgraph_build_static_cdtor ('I', ctor_statements, | |
826 | MAX_RESERVED_INIT_PRIORITY - 1); | |
827 | } | |
828 | ||
829 | /* The pass descriptor. */ | |
830 | ||
cbe8bda8 | 831 | namespace { |
832 | ||
833 | const pass_data pass_data_tsan = | |
b077695d | 834 | { |
cbe8bda8 | 835 | GIMPLE_PASS, /* type */ |
836 | "tsan", /* name */ | |
837 | OPTGROUP_NONE, /* optinfo_flags */ | |
cbe8bda8 | 838 | TV_NONE, /* tv_id */ |
839 | ( PROP_ssa | PROP_cfg ), /* properties_required */ | |
840 | 0, /* properties_provided */ | |
841 | 0, /* properties_destroyed */ | |
842 | 0, /* todo_flags_start */ | |
8b88439e | 843 | TODO_update_ssa, /* todo_flags_finish */ |
b077695d | 844 | }; |
845 | ||
cbe8bda8 | 846 | class pass_tsan : public gimple_opt_pass |
847 | { | |
848 | public: | |
9af5ce0c | 849 | pass_tsan (gcc::context *ctxt) |
850 | : gimple_opt_pass (pass_data_tsan, ctxt) | |
cbe8bda8 | 851 | {} |
852 | ||
853 | /* opt_pass methods: */ | |
ae84f584 | 854 | opt_pass * clone () { return new pass_tsan (m_ctxt); } |
31315c24 | 855 | virtual bool gate (function *) |
856 | { | |
d1e96383 | 857 | return ((flag_sanitize & SANITIZE_THREAD) != 0 |
858 | && !lookup_attribute ("no_sanitize_thread", | |
859 | DECL_ATTRIBUTES (current_function_decl))); | |
31315c24 | 860 | } |
861 | ||
65b0537f | 862 | virtual unsigned int execute (function *) { return tsan_pass (); } |
cbe8bda8 | 863 | |
864 | }; // class pass_tsan | |
865 | ||
866 | } // anon namespace | |
867 | ||
868 | gimple_opt_pass * | |
869 | make_pass_tsan (gcc::context *ctxt) | |
870 | { | |
871 | return new pass_tsan (ctxt); | |
872 | } | |
873 | ||
cbe8bda8 | 874 | namespace { |
875 | ||
876 | const pass_data pass_data_tsan_O0 = | |
b077695d | 877 | { |
cbe8bda8 | 878 | GIMPLE_PASS, /* type */ |
879 | "tsan0", /* name */ | |
880 | OPTGROUP_NONE, /* optinfo_flags */ | |
cbe8bda8 | 881 | TV_NONE, /* tv_id */ |
882 | ( PROP_ssa | PROP_cfg ), /* properties_required */ | |
883 | 0, /* properties_provided */ | |
884 | 0, /* properties_destroyed */ | |
885 | 0, /* todo_flags_start */ | |
8b88439e | 886 | TODO_update_ssa, /* todo_flags_finish */ |
b077695d | 887 | }; |
cbe8bda8 | 888 | |
889 | class pass_tsan_O0 : public gimple_opt_pass | |
890 | { | |
891 | public: | |
9af5ce0c | 892 | pass_tsan_O0 (gcc::context *ctxt) |
893 | : gimple_opt_pass (pass_data_tsan_O0, ctxt) | |
cbe8bda8 | 894 | {} |
895 | ||
896 | /* opt_pass methods: */ | |
31315c24 | 897 | virtual bool gate (function *) |
898 | { | |
d1e96383 | 899 | return ((flag_sanitize & SANITIZE_THREAD) != 0 && !optimize |
900 | && !lookup_attribute ("no_sanitize_thread", | |
901 | DECL_ATTRIBUTES (current_function_decl))); | |
31315c24 | 902 | } |
903 | ||
65b0537f | 904 | virtual unsigned int execute (function *) { return tsan_pass (); } |
cbe8bda8 | 905 | |
906 | }; // class pass_tsan_O0 | |
907 | ||
908 | } // anon namespace | |
909 | ||
910 | gimple_opt_pass * | |
911 | make_pass_tsan_O0 (gcc::context *ctxt) | |
912 | { | |
913 | return new pass_tsan_O0 (ctxt); | |
914 | } |