]>
Commit | Line | Data |
---|---|---|
b077695d | 1 | /* GCC instrumentation plugin for ThreadSanitizer. |
f1717362 | 2 | Copyright (C) 2011-2016 Free Software Foundation, Inc. |
b077695d | 3 | Contributed by Dmitry Vyukov <dvyukov@google.com> |
4 | ||
5 | This file is part of GCC. | |
6 | ||
7 | GCC is free software; you can redistribute it and/or modify it under | |
8 | the terms of the GNU General Public License as published by the Free | |
9 | Software Foundation; either version 3, or (at your option) any later | |
10 | version. | |
11 | ||
12 | GCC is distributed in the hope that it will be useful, but WITHOUT ANY | |
13 | WARRANTY; without even the implied warranty of MERCHANTABILITY or | |
14 | FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License | |
15 | for more details. | |
16 | ||
17 | You should have received a copy of the GNU General Public License | |
18 | along with GCC; see the file COPYING3. If not see | |
19 | <http://www.gnu.org/licenses/>. */ | |
20 | ||
21 | ||
22 | #include "config.h" | |
23 | #include "system.h" | |
24 | #include "coretypes.h" | |
9ef16211 | 25 | #include "backend.h" |
7c29e30e | 26 | #include "rtl.h" |
b077695d | 27 | #include "tree.h" |
9ef16211 | 28 | #include "gimple.h" |
7c29e30e | 29 | #include "tree-pass.h" |
9ef16211 | 30 | #include "ssa.h" |
7c29e30e | 31 | #include "cgraph.h" |
9ef16211 | 32 | #include "fold-const.h" |
a8783bee | 33 | #include "gimplify.h" |
dcf1a1ec | 34 | #include "gimple-iterator.h" |
5a17c7cc | 35 | #include "gimplify-me.h" |
073c1fd5 | 36 | #include "tree-cfg.h" |
b077695d | 37 | #include "tree-iterator.h" |
83392e87 | 38 | #include "tree-ssa-propagate.h" |
5a17c7cc | 39 | #include "tree-ssa-loop-ivopts.h" |
b45e34ed | 40 | #include "tsan.h" |
41 | #include "asan.h" | |
831b9bed | 42 | #include "builtins.h" |
b077695d | 43 | |
44 | /* Number of instrumented memory accesses in the current function. */ | |
45 | ||
46 | /* Builds the following decl | |
47 | void __tsan_read/writeX (void *addr); */ | |
48 | ||
49 | static tree | |
50 | get_memory_access_decl (bool is_write, unsigned size) | |
51 | { | |
52 | enum built_in_function fcode; | |
53 | ||
54 | if (size <= 1) | |
b45e34ed | 55 | fcode = is_write ? BUILT_IN_TSAN_WRITE1 |
56 | : BUILT_IN_TSAN_READ1; | |
b077695d | 57 | else if (size <= 3) |
b45e34ed | 58 | fcode = is_write ? BUILT_IN_TSAN_WRITE2 |
59 | : BUILT_IN_TSAN_READ2; | |
b077695d | 60 | else if (size <= 7) |
b45e34ed | 61 | fcode = is_write ? BUILT_IN_TSAN_WRITE4 |
62 | : BUILT_IN_TSAN_READ4; | |
b077695d | 63 | else if (size <= 15) |
b45e34ed | 64 | fcode = is_write ? BUILT_IN_TSAN_WRITE8 |
65 | : BUILT_IN_TSAN_READ8; | |
b077695d | 66 | else |
b45e34ed | 67 | fcode = is_write ? BUILT_IN_TSAN_WRITE16 |
68 | : BUILT_IN_TSAN_READ16; | |
b077695d | 69 | |
70 | return builtin_decl_implicit (fcode); | |
71 | } | |
72 | ||
73 | /* Check as to whether EXPR refers to a store to vptr. */ | |
74 | ||
75 | static tree | |
42acab1c | 76 | is_vptr_store (gimple *stmt, tree expr, bool is_write) |
b077695d | 77 | { |
78 | if (is_write == true | |
79 | && gimple_assign_single_p (stmt) | |
80 | && TREE_CODE (expr) == COMPONENT_REF) | |
81 | { | |
82 | tree field = TREE_OPERAND (expr, 1); | |
83 | if (TREE_CODE (field) == FIELD_DECL | |
84 | && DECL_VIRTUAL_P (field)) | |
85 | return gimple_assign_rhs1 (stmt); | |
86 | } | |
87 | return NULL; | |
88 | } | |
89 | ||
b077695d | 90 | /* Instruments EXPR if needed. If any instrumentation is inserted, |
a4641938 | 91 | return true. */ |
b077695d | 92 | |
93 | static bool | |
94 | instrument_expr (gimple_stmt_iterator gsi, tree expr, bool is_write) | |
95 | { | |
c2fa3404 | 96 | tree base, rhs, expr_ptr, builtin_decl; |
b077695d | 97 | basic_block bb; |
98 | HOST_WIDE_INT size; | |
42acab1c | 99 | gimple *stmt, *g; |
c2fa3404 | 100 | gimple_seq seq; |
b077695d | 101 | location_t loc; |
831b9bed | 102 | unsigned int align; |
b077695d | 103 | |
b077695d | 104 | size = int_size_in_bytes (TREE_TYPE (expr)); |
831b9bed | 105 | if (size <= 0) |
b077695d | 106 | return false; |
107 | ||
b077695d | 108 | HOST_WIDE_INT bitsize, bitpos; |
109 | tree offset; | |
3754d046 | 110 | machine_mode mode; |
292237f3 | 111 | int unsignedp, reversep, volatilep = 0; |
112 | base = get_inner_reference (expr, &bitsize, &bitpos, &offset, &mode, | |
113 | &unsignedp, &reversep, &volatilep, false); | |
5a5c6968 | 114 | |
115 | /* No need to instrument accesses to decls that don't escape, | |
116 | they can't escape to other threads then. */ | |
5a17c7cc | 117 | if (DECL_P (base) && !is_global_var (base)) |
5a5c6968 | 118 | { |
119 | struct pt_solution pt; | |
120 | memset (&pt, 0, sizeof (pt)); | |
121 | pt.escaped = 1; | |
122 | pt.ipa_escaped = flag_ipa_pta != 0; | |
5a5c6968 | 123 | if (!pt_solution_includes (&pt, base)) |
124 | return false; | |
5a17c7cc | 125 | if (!may_be_aliased (base)) |
5a5c6968 | 126 | return false; |
127 | } | |
128 | ||
060d11f4 | 129 | if (TREE_READONLY (base) |
130 | || (TREE_CODE (base) == VAR_DECL | |
131 | && DECL_HARD_REGISTER (base))) | |
b077695d | 132 | return false; |
133 | ||
b077695d | 134 | stmt = gsi_stmt (gsi); |
135 | loc = gimple_location (stmt); | |
136 | rhs = is_vptr_store (stmt, expr, is_write); | |
831b9bed | 137 | |
138 | if ((TREE_CODE (expr) == COMPONENT_REF | |
139 | && DECL_BIT_FIELD_TYPE (TREE_OPERAND (expr, 1))) | |
140 | || TREE_CODE (expr) == BIT_FIELD_REF) | |
141 | { | |
142 | base = TREE_OPERAND (expr, 0); | |
143 | if (TREE_CODE (expr) == COMPONENT_REF) | |
144 | { | |
145 | expr = TREE_OPERAND (expr, 1); | |
146 | if (is_write && DECL_BIT_FIELD_REPRESENTATIVE (expr)) | |
147 | expr = DECL_BIT_FIELD_REPRESENTATIVE (expr); | |
148 | if (!tree_fits_uhwi_p (DECL_FIELD_OFFSET (expr)) | |
149 | || !tree_fits_uhwi_p (DECL_FIELD_BIT_OFFSET (expr)) | |
150 | || !tree_fits_uhwi_p (DECL_SIZE (expr))) | |
151 | return false; | |
152 | bitpos = tree_to_uhwi (DECL_FIELD_OFFSET (expr)) * BITS_PER_UNIT | |
153 | + tree_to_uhwi (DECL_FIELD_BIT_OFFSET (expr)); | |
154 | bitsize = tree_to_uhwi (DECL_SIZE (expr)); | |
155 | } | |
156 | else | |
157 | { | |
158 | if (!tree_fits_uhwi_p (TREE_OPERAND (expr, 2)) | |
159 | || !tree_fits_uhwi_p (TREE_OPERAND (expr, 1))) | |
160 | return false; | |
161 | bitpos = tree_to_uhwi (TREE_OPERAND (expr, 2)); | |
162 | bitsize = tree_to_uhwi (TREE_OPERAND (expr, 1)); | |
163 | } | |
164 | if (bitpos < 0 || bitsize <= 0) | |
165 | return false; | |
166 | size = (bitpos % BITS_PER_UNIT + bitsize + BITS_PER_UNIT - 1) | |
167 | / BITS_PER_UNIT; | |
5a17c7cc | 168 | if (may_be_nonaddressable_p (base)) |
169 | return false; | |
831b9bed | 170 | align = get_object_alignment (base); |
171 | if (align < BITS_PER_UNIT) | |
172 | return false; | |
173 | bitpos = bitpos & ~(BITS_PER_UNIT - 1); | |
174 | if ((align - 1) & bitpos) | |
175 | { | |
176 | align = (align - 1) & bitpos; | |
177 | align = align & -align; | |
178 | } | |
831b9bed | 179 | expr = build_fold_addr_expr (unshare_expr (base)); |
831b9bed | 180 | expr = build2 (MEM_REF, char_type_node, expr, |
181 | build_int_cst (TREE_TYPE (expr), bitpos / BITS_PER_UNIT)); | |
182 | expr_ptr = build_fold_addr_expr (expr); | |
183 | } | |
184 | else | |
185 | { | |
5a17c7cc | 186 | if (may_be_nonaddressable_p (expr)) |
187 | return false; | |
831b9bed | 188 | align = get_object_alignment (expr); |
189 | if (align < BITS_PER_UNIT) | |
190 | return false; | |
831b9bed | 191 | expr_ptr = build_fold_addr_expr (unshare_expr (expr)); |
192 | } | |
5a17c7cc | 193 | expr_ptr = force_gimple_operand (expr_ptr, &seq, true, NULL_TREE); |
71f0663f | 194 | if ((size & (size - 1)) != 0 || size > 16 |
831b9bed | 195 | || align < MIN (size, 8) * BITS_PER_UNIT) |
196 | { | |
197 | builtin_decl = builtin_decl_implicit (is_write | |
198 | ? BUILT_IN_TSAN_WRITE_RANGE | |
199 | : BUILT_IN_TSAN_READ_RANGE); | |
200 | g = gimple_build_call (builtin_decl, 2, expr_ptr, size_int (size)); | |
201 | } | |
202 | else if (rhs == NULL) | |
c2fa3404 | 203 | g = gimple_build_call (get_memory_access_decl (is_write, size), |
204 | 1, expr_ptr); | |
b077695d | 205 | else |
206 | { | |
207 | builtin_decl = builtin_decl_implicit (BUILT_IN_TSAN_VPTR_UPDATE); | |
24fe68b6 | 208 | g = gimple_build_call (builtin_decl, 2, expr_ptr, unshare_expr (rhs)); |
b077695d | 209 | } |
210 | gimple_set_location (g, loc); | |
c2fa3404 | 211 | gimple_seq_add_stmt_without_update (&seq, g); |
b077695d | 212 | /* Instrumentation for assignment of a function result |
213 | must be inserted after the call. Instrumentation for | |
214 | reads of function arguments must be inserted before the call. | |
215 | That's because the call can contain synchronization. */ | |
216 | if (is_gimple_call (stmt) && is_write) | |
217 | { | |
218 | /* If the call can throw, it must be the last stmt in | |
219 | a basic block, so the instrumented stmts need to be | |
a4641938 | 220 | inserted in successor bbs. */ |
b077695d | 221 | if (is_ctrl_altering_stmt (stmt)) |
222 | { | |
223 | edge e; | |
224 | ||
225 | bb = gsi_bb (gsi); | |
226 | e = find_fallthru_edge (bb->succs); | |
227 | if (e) | |
c2fa3404 | 228 | gsi_insert_seq_on_edge_immediate (e, seq); |
b077695d | 229 | } |
230 | else | |
c2fa3404 | 231 | gsi_insert_seq_after (&gsi, seq, GSI_NEW_STMT); |
b077695d | 232 | } |
233 | else | |
c2fa3404 | 234 | gsi_insert_seq_before (&gsi, seq, GSI_SAME_STMT); |
b077695d | 235 | |
236 | return true; | |
237 | } | |
238 | ||
83392e87 | 239 | /* Actions for sync/atomic builtin transformations. */ |
240 | enum tsan_atomic_action | |
241 | { | |
242 | check_last, add_seq_cst, add_acquire, weak_cas, strong_cas, | |
243 | bool_cas, val_cas, lock_release, fetch_op, fetch_op_seq_cst | |
244 | }; | |
245 | ||
246 | /* Table how to map sync/atomic builtins to their corresponding | |
247 | tsan equivalents. */ | |
9d34f097 | 248 | static const struct tsan_map_atomic |
83392e87 | 249 | { |
250 | enum built_in_function fcode, tsan_fcode; | |
251 | enum tsan_atomic_action action; | |
252 | enum tree_code code; | |
253 | } tsan_atomic_table[] = | |
254 | { | |
255 | #define TRANSFORM(fcode, tsan_fcode, action, code) \ | |
256 | { BUILT_IN_##fcode, BUILT_IN_##tsan_fcode, action, code } | |
257 | #define CHECK_LAST(fcode, tsan_fcode) \ | |
258 | TRANSFORM (fcode, tsan_fcode, check_last, ERROR_MARK) | |
259 | #define ADD_SEQ_CST(fcode, tsan_fcode) \ | |
260 | TRANSFORM (fcode, tsan_fcode, add_seq_cst, ERROR_MARK) | |
261 | #define ADD_ACQUIRE(fcode, tsan_fcode) \ | |
262 | TRANSFORM (fcode, tsan_fcode, add_acquire, ERROR_MARK) | |
263 | #define WEAK_CAS(fcode, tsan_fcode) \ | |
264 | TRANSFORM (fcode, tsan_fcode, weak_cas, ERROR_MARK) | |
265 | #define STRONG_CAS(fcode, tsan_fcode) \ | |
266 | TRANSFORM (fcode, tsan_fcode, strong_cas, ERROR_MARK) | |
267 | #define BOOL_CAS(fcode, tsan_fcode) \ | |
268 | TRANSFORM (fcode, tsan_fcode, bool_cas, ERROR_MARK) | |
269 | #define VAL_CAS(fcode, tsan_fcode) \ | |
270 | TRANSFORM (fcode, tsan_fcode, val_cas, ERROR_MARK) | |
271 | #define LOCK_RELEASE(fcode, tsan_fcode) \ | |
272 | TRANSFORM (fcode, tsan_fcode, lock_release, ERROR_MARK) | |
273 | #define FETCH_OP(fcode, tsan_fcode, code) \ | |
274 | TRANSFORM (fcode, tsan_fcode, fetch_op, code) | |
275 | #define FETCH_OPS(fcode, tsan_fcode, code) \ | |
276 | TRANSFORM (fcode, tsan_fcode, fetch_op_seq_cst, code) | |
277 | ||
278 | CHECK_LAST (ATOMIC_LOAD_1, TSAN_ATOMIC8_LOAD), | |
279 | CHECK_LAST (ATOMIC_LOAD_2, TSAN_ATOMIC16_LOAD), | |
280 | CHECK_LAST (ATOMIC_LOAD_4, TSAN_ATOMIC32_LOAD), | |
281 | CHECK_LAST (ATOMIC_LOAD_8, TSAN_ATOMIC64_LOAD), | |
282 | CHECK_LAST (ATOMIC_LOAD_16, TSAN_ATOMIC128_LOAD), | |
283 | CHECK_LAST (ATOMIC_STORE_1, TSAN_ATOMIC8_STORE), | |
284 | CHECK_LAST (ATOMIC_STORE_2, TSAN_ATOMIC16_STORE), | |
285 | CHECK_LAST (ATOMIC_STORE_4, TSAN_ATOMIC32_STORE), | |
286 | CHECK_LAST (ATOMIC_STORE_8, TSAN_ATOMIC64_STORE), | |
287 | CHECK_LAST (ATOMIC_STORE_16, TSAN_ATOMIC128_STORE), | |
288 | CHECK_LAST (ATOMIC_EXCHANGE_1, TSAN_ATOMIC8_EXCHANGE), | |
289 | CHECK_LAST (ATOMIC_EXCHANGE_2, TSAN_ATOMIC16_EXCHANGE), | |
290 | CHECK_LAST (ATOMIC_EXCHANGE_4, TSAN_ATOMIC32_EXCHANGE), | |
291 | CHECK_LAST (ATOMIC_EXCHANGE_8, TSAN_ATOMIC64_EXCHANGE), | |
292 | CHECK_LAST (ATOMIC_EXCHANGE_16, TSAN_ATOMIC128_EXCHANGE), | |
293 | CHECK_LAST (ATOMIC_FETCH_ADD_1, TSAN_ATOMIC8_FETCH_ADD), | |
294 | CHECK_LAST (ATOMIC_FETCH_ADD_2, TSAN_ATOMIC16_FETCH_ADD), | |
295 | CHECK_LAST (ATOMIC_FETCH_ADD_4, TSAN_ATOMIC32_FETCH_ADD), | |
296 | CHECK_LAST (ATOMIC_FETCH_ADD_8, TSAN_ATOMIC64_FETCH_ADD), | |
297 | CHECK_LAST (ATOMIC_FETCH_ADD_16, TSAN_ATOMIC128_FETCH_ADD), | |
298 | CHECK_LAST (ATOMIC_FETCH_SUB_1, TSAN_ATOMIC8_FETCH_SUB), | |
299 | CHECK_LAST (ATOMIC_FETCH_SUB_2, TSAN_ATOMIC16_FETCH_SUB), | |
300 | CHECK_LAST (ATOMIC_FETCH_SUB_4, TSAN_ATOMIC32_FETCH_SUB), | |
301 | CHECK_LAST (ATOMIC_FETCH_SUB_8, TSAN_ATOMIC64_FETCH_SUB), | |
302 | CHECK_LAST (ATOMIC_FETCH_SUB_16, TSAN_ATOMIC128_FETCH_SUB), | |
303 | CHECK_LAST (ATOMIC_FETCH_AND_1, TSAN_ATOMIC8_FETCH_AND), | |
304 | CHECK_LAST (ATOMIC_FETCH_AND_2, TSAN_ATOMIC16_FETCH_AND), | |
305 | CHECK_LAST (ATOMIC_FETCH_AND_4, TSAN_ATOMIC32_FETCH_AND), | |
306 | CHECK_LAST (ATOMIC_FETCH_AND_8, TSAN_ATOMIC64_FETCH_AND), | |
307 | CHECK_LAST (ATOMIC_FETCH_AND_16, TSAN_ATOMIC128_FETCH_AND), | |
308 | CHECK_LAST (ATOMIC_FETCH_OR_1, TSAN_ATOMIC8_FETCH_OR), | |
309 | CHECK_LAST (ATOMIC_FETCH_OR_2, TSAN_ATOMIC16_FETCH_OR), | |
310 | CHECK_LAST (ATOMIC_FETCH_OR_4, TSAN_ATOMIC32_FETCH_OR), | |
311 | CHECK_LAST (ATOMIC_FETCH_OR_8, TSAN_ATOMIC64_FETCH_OR), | |
312 | CHECK_LAST (ATOMIC_FETCH_OR_16, TSAN_ATOMIC128_FETCH_OR), | |
313 | CHECK_LAST (ATOMIC_FETCH_XOR_1, TSAN_ATOMIC8_FETCH_XOR), | |
314 | CHECK_LAST (ATOMIC_FETCH_XOR_2, TSAN_ATOMIC16_FETCH_XOR), | |
315 | CHECK_LAST (ATOMIC_FETCH_XOR_4, TSAN_ATOMIC32_FETCH_XOR), | |
316 | CHECK_LAST (ATOMIC_FETCH_XOR_8, TSAN_ATOMIC64_FETCH_XOR), | |
317 | CHECK_LAST (ATOMIC_FETCH_XOR_16, TSAN_ATOMIC128_FETCH_XOR), | |
318 | CHECK_LAST (ATOMIC_FETCH_NAND_1, TSAN_ATOMIC8_FETCH_NAND), | |
319 | CHECK_LAST (ATOMIC_FETCH_NAND_2, TSAN_ATOMIC16_FETCH_NAND), | |
320 | CHECK_LAST (ATOMIC_FETCH_NAND_4, TSAN_ATOMIC32_FETCH_NAND), | |
321 | CHECK_LAST (ATOMIC_FETCH_NAND_8, TSAN_ATOMIC64_FETCH_NAND), | |
322 | CHECK_LAST (ATOMIC_FETCH_NAND_16, TSAN_ATOMIC128_FETCH_NAND), | |
323 | ||
324 | CHECK_LAST (ATOMIC_THREAD_FENCE, TSAN_ATOMIC_THREAD_FENCE), | |
325 | CHECK_LAST (ATOMIC_SIGNAL_FENCE, TSAN_ATOMIC_SIGNAL_FENCE), | |
326 | ||
327 | FETCH_OP (ATOMIC_ADD_FETCH_1, TSAN_ATOMIC8_FETCH_ADD, PLUS_EXPR), | |
328 | FETCH_OP (ATOMIC_ADD_FETCH_2, TSAN_ATOMIC16_FETCH_ADD, PLUS_EXPR), | |
329 | FETCH_OP (ATOMIC_ADD_FETCH_4, TSAN_ATOMIC32_FETCH_ADD, PLUS_EXPR), | |
330 | FETCH_OP (ATOMIC_ADD_FETCH_8, TSAN_ATOMIC64_FETCH_ADD, PLUS_EXPR), | |
331 | FETCH_OP (ATOMIC_ADD_FETCH_16, TSAN_ATOMIC128_FETCH_ADD, PLUS_EXPR), | |
332 | FETCH_OP (ATOMIC_SUB_FETCH_1, TSAN_ATOMIC8_FETCH_SUB, MINUS_EXPR), | |
333 | FETCH_OP (ATOMIC_SUB_FETCH_2, TSAN_ATOMIC16_FETCH_SUB, MINUS_EXPR), | |
334 | FETCH_OP (ATOMIC_SUB_FETCH_4, TSAN_ATOMIC32_FETCH_SUB, MINUS_EXPR), | |
335 | FETCH_OP (ATOMIC_SUB_FETCH_8, TSAN_ATOMIC64_FETCH_SUB, MINUS_EXPR), | |
336 | FETCH_OP (ATOMIC_SUB_FETCH_16, TSAN_ATOMIC128_FETCH_SUB, MINUS_EXPR), | |
337 | FETCH_OP (ATOMIC_AND_FETCH_1, TSAN_ATOMIC8_FETCH_AND, BIT_AND_EXPR), | |
338 | FETCH_OP (ATOMIC_AND_FETCH_2, TSAN_ATOMIC16_FETCH_AND, BIT_AND_EXPR), | |
339 | FETCH_OP (ATOMIC_AND_FETCH_4, TSAN_ATOMIC32_FETCH_AND, BIT_AND_EXPR), | |
340 | FETCH_OP (ATOMIC_AND_FETCH_8, TSAN_ATOMIC64_FETCH_AND, BIT_AND_EXPR), | |
341 | FETCH_OP (ATOMIC_AND_FETCH_16, TSAN_ATOMIC128_FETCH_AND, BIT_AND_EXPR), | |
342 | FETCH_OP (ATOMIC_OR_FETCH_1, TSAN_ATOMIC8_FETCH_OR, BIT_IOR_EXPR), | |
343 | FETCH_OP (ATOMIC_OR_FETCH_2, TSAN_ATOMIC16_FETCH_OR, BIT_IOR_EXPR), | |
344 | FETCH_OP (ATOMIC_OR_FETCH_4, TSAN_ATOMIC32_FETCH_OR, BIT_IOR_EXPR), | |
345 | FETCH_OP (ATOMIC_OR_FETCH_8, TSAN_ATOMIC64_FETCH_OR, BIT_IOR_EXPR), | |
346 | FETCH_OP (ATOMIC_OR_FETCH_16, TSAN_ATOMIC128_FETCH_OR, BIT_IOR_EXPR), | |
347 | FETCH_OP (ATOMIC_XOR_FETCH_1, TSAN_ATOMIC8_FETCH_XOR, BIT_XOR_EXPR), | |
348 | FETCH_OP (ATOMIC_XOR_FETCH_2, TSAN_ATOMIC16_FETCH_XOR, BIT_XOR_EXPR), | |
349 | FETCH_OP (ATOMIC_XOR_FETCH_4, TSAN_ATOMIC32_FETCH_XOR, BIT_XOR_EXPR), | |
350 | FETCH_OP (ATOMIC_XOR_FETCH_8, TSAN_ATOMIC64_FETCH_XOR, BIT_XOR_EXPR), | |
351 | FETCH_OP (ATOMIC_XOR_FETCH_16, TSAN_ATOMIC128_FETCH_XOR, BIT_XOR_EXPR), | |
352 | FETCH_OP (ATOMIC_NAND_FETCH_1, TSAN_ATOMIC8_FETCH_NAND, BIT_NOT_EXPR), | |
353 | FETCH_OP (ATOMIC_NAND_FETCH_2, TSAN_ATOMIC16_FETCH_NAND, BIT_NOT_EXPR), | |
354 | FETCH_OP (ATOMIC_NAND_FETCH_4, TSAN_ATOMIC32_FETCH_NAND, BIT_NOT_EXPR), | |
355 | FETCH_OP (ATOMIC_NAND_FETCH_8, TSAN_ATOMIC64_FETCH_NAND, BIT_NOT_EXPR), | |
356 | FETCH_OP (ATOMIC_NAND_FETCH_16, TSAN_ATOMIC128_FETCH_NAND, BIT_NOT_EXPR), | |
357 | ||
358 | ADD_ACQUIRE (SYNC_LOCK_TEST_AND_SET_1, TSAN_ATOMIC8_EXCHANGE), | |
359 | ADD_ACQUIRE (SYNC_LOCK_TEST_AND_SET_2, TSAN_ATOMIC16_EXCHANGE), | |
360 | ADD_ACQUIRE (SYNC_LOCK_TEST_AND_SET_4, TSAN_ATOMIC32_EXCHANGE), | |
361 | ADD_ACQUIRE (SYNC_LOCK_TEST_AND_SET_8, TSAN_ATOMIC64_EXCHANGE), | |
362 | ADD_ACQUIRE (SYNC_LOCK_TEST_AND_SET_16, TSAN_ATOMIC128_EXCHANGE), | |
363 | ||
364 | ADD_SEQ_CST (SYNC_FETCH_AND_ADD_1, TSAN_ATOMIC8_FETCH_ADD), | |
365 | ADD_SEQ_CST (SYNC_FETCH_AND_ADD_2, TSAN_ATOMIC16_FETCH_ADD), | |
366 | ADD_SEQ_CST (SYNC_FETCH_AND_ADD_4, TSAN_ATOMIC32_FETCH_ADD), | |
367 | ADD_SEQ_CST (SYNC_FETCH_AND_ADD_8, TSAN_ATOMIC64_FETCH_ADD), | |
368 | ADD_SEQ_CST (SYNC_FETCH_AND_ADD_16, TSAN_ATOMIC128_FETCH_ADD), | |
369 | ADD_SEQ_CST (SYNC_FETCH_AND_SUB_1, TSAN_ATOMIC8_FETCH_SUB), | |
370 | ADD_SEQ_CST (SYNC_FETCH_AND_SUB_2, TSAN_ATOMIC16_FETCH_SUB), | |
371 | ADD_SEQ_CST (SYNC_FETCH_AND_SUB_4, TSAN_ATOMIC32_FETCH_SUB), | |
372 | ADD_SEQ_CST (SYNC_FETCH_AND_SUB_8, TSAN_ATOMIC64_FETCH_SUB), | |
373 | ADD_SEQ_CST (SYNC_FETCH_AND_SUB_16, TSAN_ATOMIC128_FETCH_SUB), | |
374 | ADD_SEQ_CST (SYNC_FETCH_AND_AND_1, TSAN_ATOMIC8_FETCH_AND), | |
375 | ADD_SEQ_CST (SYNC_FETCH_AND_AND_2, TSAN_ATOMIC16_FETCH_AND), | |
376 | ADD_SEQ_CST (SYNC_FETCH_AND_AND_4, TSAN_ATOMIC32_FETCH_AND), | |
377 | ADD_SEQ_CST (SYNC_FETCH_AND_AND_8, TSAN_ATOMIC64_FETCH_AND), | |
378 | ADD_SEQ_CST (SYNC_FETCH_AND_AND_16, TSAN_ATOMIC128_FETCH_AND), | |
379 | ADD_SEQ_CST (SYNC_FETCH_AND_OR_1, TSAN_ATOMIC8_FETCH_OR), | |
380 | ADD_SEQ_CST (SYNC_FETCH_AND_OR_2, TSAN_ATOMIC16_FETCH_OR), | |
381 | ADD_SEQ_CST (SYNC_FETCH_AND_OR_4, TSAN_ATOMIC32_FETCH_OR), | |
382 | ADD_SEQ_CST (SYNC_FETCH_AND_OR_8, TSAN_ATOMIC64_FETCH_OR), | |
383 | ADD_SEQ_CST (SYNC_FETCH_AND_OR_16, TSAN_ATOMIC128_FETCH_OR), | |
384 | ADD_SEQ_CST (SYNC_FETCH_AND_XOR_1, TSAN_ATOMIC8_FETCH_XOR), | |
385 | ADD_SEQ_CST (SYNC_FETCH_AND_XOR_2, TSAN_ATOMIC16_FETCH_XOR), | |
386 | ADD_SEQ_CST (SYNC_FETCH_AND_XOR_4, TSAN_ATOMIC32_FETCH_XOR), | |
387 | ADD_SEQ_CST (SYNC_FETCH_AND_XOR_8, TSAN_ATOMIC64_FETCH_XOR), | |
388 | ADD_SEQ_CST (SYNC_FETCH_AND_XOR_16, TSAN_ATOMIC128_FETCH_XOR), | |
389 | ADD_SEQ_CST (SYNC_FETCH_AND_NAND_1, TSAN_ATOMIC8_FETCH_NAND), | |
390 | ADD_SEQ_CST (SYNC_FETCH_AND_NAND_2, TSAN_ATOMIC16_FETCH_NAND), | |
391 | ADD_SEQ_CST (SYNC_FETCH_AND_NAND_4, TSAN_ATOMIC32_FETCH_NAND), | |
392 | ADD_SEQ_CST (SYNC_FETCH_AND_NAND_8, TSAN_ATOMIC64_FETCH_NAND), | |
393 | ADD_SEQ_CST (SYNC_FETCH_AND_NAND_16, TSAN_ATOMIC128_FETCH_NAND), | |
394 | ||
395 | ADD_SEQ_CST (SYNC_SYNCHRONIZE, TSAN_ATOMIC_THREAD_FENCE), | |
396 | ||
397 | FETCH_OPS (SYNC_ADD_AND_FETCH_1, TSAN_ATOMIC8_FETCH_ADD, PLUS_EXPR), | |
398 | FETCH_OPS (SYNC_ADD_AND_FETCH_2, TSAN_ATOMIC16_FETCH_ADD, PLUS_EXPR), | |
399 | FETCH_OPS (SYNC_ADD_AND_FETCH_4, TSAN_ATOMIC32_FETCH_ADD, PLUS_EXPR), | |
400 | FETCH_OPS (SYNC_ADD_AND_FETCH_8, TSAN_ATOMIC64_FETCH_ADD, PLUS_EXPR), | |
401 | FETCH_OPS (SYNC_ADD_AND_FETCH_16, TSAN_ATOMIC128_FETCH_ADD, PLUS_EXPR), | |
402 | FETCH_OPS (SYNC_SUB_AND_FETCH_1, TSAN_ATOMIC8_FETCH_SUB, MINUS_EXPR), | |
403 | FETCH_OPS (SYNC_SUB_AND_FETCH_2, TSAN_ATOMIC16_FETCH_SUB, MINUS_EXPR), | |
404 | FETCH_OPS (SYNC_SUB_AND_FETCH_4, TSAN_ATOMIC32_FETCH_SUB, MINUS_EXPR), | |
405 | FETCH_OPS (SYNC_SUB_AND_FETCH_8, TSAN_ATOMIC64_FETCH_SUB, MINUS_EXPR), | |
406 | FETCH_OPS (SYNC_SUB_AND_FETCH_16, TSAN_ATOMIC128_FETCH_SUB, MINUS_EXPR), | |
407 | FETCH_OPS (SYNC_AND_AND_FETCH_1, TSAN_ATOMIC8_FETCH_AND, BIT_AND_EXPR), | |
408 | FETCH_OPS (SYNC_AND_AND_FETCH_2, TSAN_ATOMIC16_FETCH_AND, BIT_AND_EXPR), | |
409 | FETCH_OPS (SYNC_AND_AND_FETCH_4, TSAN_ATOMIC32_FETCH_AND, BIT_AND_EXPR), | |
410 | FETCH_OPS (SYNC_AND_AND_FETCH_8, TSAN_ATOMIC64_FETCH_AND, BIT_AND_EXPR), | |
411 | FETCH_OPS (SYNC_AND_AND_FETCH_16, TSAN_ATOMIC128_FETCH_AND, BIT_AND_EXPR), | |
412 | FETCH_OPS (SYNC_OR_AND_FETCH_1, TSAN_ATOMIC8_FETCH_OR, BIT_IOR_EXPR), | |
413 | FETCH_OPS (SYNC_OR_AND_FETCH_2, TSAN_ATOMIC16_FETCH_OR, BIT_IOR_EXPR), | |
414 | FETCH_OPS (SYNC_OR_AND_FETCH_4, TSAN_ATOMIC32_FETCH_OR, BIT_IOR_EXPR), | |
415 | FETCH_OPS (SYNC_OR_AND_FETCH_8, TSAN_ATOMIC64_FETCH_OR, BIT_IOR_EXPR), | |
416 | FETCH_OPS (SYNC_OR_AND_FETCH_16, TSAN_ATOMIC128_FETCH_OR, BIT_IOR_EXPR), | |
417 | FETCH_OPS (SYNC_XOR_AND_FETCH_1, TSAN_ATOMIC8_FETCH_XOR, BIT_XOR_EXPR), | |
418 | FETCH_OPS (SYNC_XOR_AND_FETCH_2, TSAN_ATOMIC16_FETCH_XOR, BIT_XOR_EXPR), | |
419 | FETCH_OPS (SYNC_XOR_AND_FETCH_4, TSAN_ATOMIC32_FETCH_XOR, BIT_XOR_EXPR), | |
420 | FETCH_OPS (SYNC_XOR_AND_FETCH_8, TSAN_ATOMIC64_FETCH_XOR, BIT_XOR_EXPR), | |
421 | FETCH_OPS (SYNC_XOR_AND_FETCH_16, TSAN_ATOMIC128_FETCH_XOR, BIT_XOR_EXPR), | |
422 | FETCH_OPS (SYNC_NAND_AND_FETCH_1, TSAN_ATOMIC8_FETCH_NAND, BIT_NOT_EXPR), | |
423 | FETCH_OPS (SYNC_NAND_AND_FETCH_2, TSAN_ATOMIC16_FETCH_NAND, BIT_NOT_EXPR), | |
424 | FETCH_OPS (SYNC_NAND_AND_FETCH_4, TSAN_ATOMIC32_FETCH_NAND, BIT_NOT_EXPR), | |
425 | FETCH_OPS (SYNC_NAND_AND_FETCH_8, TSAN_ATOMIC64_FETCH_NAND, BIT_NOT_EXPR), | |
426 | FETCH_OPS (SYNC_NAND_AND_FETCH_16, TSAN_ATOMIC128_FETCH_NAND, BIT_NOT_EXPR), | |
427 | ||
428 | WEAK_CAS (ATOMIC_COMPARE_EXCHANGE_1, TSAN_ATOMIC8_COMPARE_EXCHANGE_WEAK), | |
429 | WEAK_CAS (ATOMIC_COMPARE_EXCHANGE_2, TSAN_ATOMIC16_COMPARE_EXCHANGE_WEAK), | |
430 | WEAK_CAS (ATOMIC_COMPARE_EXCHANGE_4, TSAN_ATOMIC32_COMPARE_EXCHANGE_WEAK), | |
431 | WEAK_CAS (ATOMIC_COMPARE_EXCHANGE_8, TSAN_ATOMIC64_COMPARE_EXCHANGE_WEAK), | |
432 | WEAK_CAS (ATOMIC_COMPARE_EXCHANGE_16, TSAN_ATOMIC128_COMPARE_EXCHANGE_WEAK), | |
433 | ||
434 | STRONG_CAS (ATOMIC_COMPARE_EXCHANGE_1, TSAN_ATOMIC8_COMPARE_EXCHANGE_STRONG), | |
435 | STRONG_CAS (ATOMIC_COMPARE_EXCHANGE_2, | |
436 | TSAN_ATOMIC16_COMPARE_EXCHANGE_STRONG), | |
437 | STRONG_CAS (ATOMIC_COMPARE_EXCHANGE_4, | |
438 | TSAN_ATOMIC32_COMPARE_EXCHANGE_STRONG), | |
439 | STRONG_CAS (ATOMIC_COMPARE_EXCHANGE_8, | |
440 | TSAN_ATOMIC64_COMPARE_EXCHANGE_STRONG), | |
441 | STRONG_CAS (ATOMIC_COMPARE_EXCHANGE_16, | |
442 | TSAN_ATOMIC128_COMPARE_EXCHANGE_STRONG), | |
443 | ||
444 | BOOL_CAS (SYNC_BOOL_COMPARE_AND_SWAP_1, | |
445 | TSAN_ATOMIC8_COMPARE_EXCHANGE_STRONG), | |
446 | BOOL_CAS (SYNC_BOOL_COMPARE_AND_SWAP_2, | |
447 | TSAN_ATOMIC16_COMPARE_EXCHANGE_STRONG), | |
448 | BOOL_CAS (SYNC_BOOL_COMPARE_AND_SWAP_4, | |
449 | TSAN_ATOMIC32_COMPARE_EXCHANGE_STRONG), | |
450 | BOOL_CAS (SYNC_BOOL_COMPARE_AND_SWAP_8, | |
451 | TSAN_ATOMIC64_COMPARE_EXCHANGE_STRONG), | |
452 | BOOL_CAS (SYNC_BOOL_COMPARE_AND_SWAP_16, | |
453 | TSAN_ATOMIC128_COMPARE_EXCHANGE_STRONG), | |
454 | ||
455 | VAL_CAS (SYNC_VAL_COMPARE_AND_SWAP_1, TSAN_ATOMIC8_COMPARE_EXCHANGE_STRONG), | |
456 | VAL_CAS (SYNC_VAL_COMPARE_AND_SWAP_2, TSAN_ATOMIC16_COMPARE_EXCHANGE_STRONG), | |
457 | VAL_CAS (SYNC_VAL_COMPARE_AND_SWAP_4, TSAN_ATOMIC32_COMPARE_EXCHANGE_STRONG), | |
458 | VAL_CAS (SYNC_VAL_COMPARE_AND_SWAP_8, TSAN_ATOMIC64_COMPARE_EXCHANGE_STRONG), | |
459 | VAL_CAS (SYNC_VAL_COMPARE_AND_SWAP_16, | |
460 | TSAN_ATOMIC128_COMPARE_EXCHANGE_STRONG), | |
461 | ||
462 | LOCK_RELEASE (SYNC_LOCK_RELEASE_1, TSAN_ATOMIC8_STORE), | |
463 | LOCK_RELEASE (SYNC_LOCK_RELEASE_2, TSAN_ATOMIC16_STORE), | |
464 | LOCK_RELEASE (SYNC_LOCK_RELEASE_4, TSAN_ATOMIC32_STORE), | |
465 | LOCK_RELEASE (SYNC_LOCK_RELEASE_8, TSAN_ATOMIC64_STORE), | |
466 | LOCK_RELEASE (SYNC_LOCK_RELEASE_16, TSAN_ATOMIC128_STORE) | |
467 | }; | |
468 | ||
469 | /* Instrument an atomic builtin. */ | |
470 | ||
471 | static void | |
472 | instrument_builtin_call (gimple_stmt_iterator *gsi) | |
473 | { | |
42acab1c | 474 | gimple *stmt = gsi_stmt (*gsi), *g; |
83392e87 | 475 | tree callee = gimple_call_fndecl (stmt), last_arg, args[6], t, lhs; |
476 | enum built_in_function fcode = DECL_FUNCTION_CODE (callee); | |
477 | unsigned int i, num = gimple_call_num_args (stmt), j; | |
478 | for (j = 0; j < 6 && j < num; j++) | |
479 | args[j] = gimple_call_arg (stmt, j); | |
480 | for (i = 0; i < ARRAY_SIZE (tsan_atomic_table); i++) | |
481 | if (fcode != tsan_atomic_table[i].fcode) | |
482 | continue; | |
483 | else | |
484 | { | |
485 | tree decl = builtin_decl_implicit (tsan_atomic_table[i].tsan_fcode); | |
486 | if (decl == NULL_TREE) | |
487 | return; | |
488 | switch (tsan_atomic_table[i].action) | |
489 | { | |
490 | case check_last: | |
491 | case fetch_op: | |
492 | last_arg = gimple_call_arg (stmt, num - 1); | |
cd4547bf | 493 | if (!tree_fits_uhwi_p (last_arg) |
a372f7ca | 494 | || memmodel_base (tree_to_uhwi (last_arg)) >= MEMMODEL_LAST) |
83392e87 | 495 | return; |
496 | gimple_call_set_fndecl (stmt, decl); | |
497 | update_stmt (stmt); | |
498 | if (tsan_atomic_table[i].action == fetch_op) | |
499 | { | |
500 | args[1] = gimple_call_arg (stmt, 1); | |
501 | goto adjust_result; | |
502 | } | |
503 | return; | |
504 | case add_seq_cst: | |
505 | case add_acquire: | |
506 | case fetch_op_seq_cst: | |
507 | gcc_assert (num <= 2); | |
508 | for (j = 0; j < num; j++) | |
509 | args[j] = gimple_call_arg (stmt, j); | |
510 | for (; j < 2; j++) | |
511 | args[j] = NULL_TREE; | |
512 | args[num] = build_int_cst (NULL_TREE, | |
513 | tsan_atomic_table[i].action | |
514 | != add_acquire | |
515 | ? MEMMODEL_SEQ_CST | |
516 | : MEMMODEL_ACQUIRE); | |
517 | update_gimple_call (gsi, decl, num + 1, args[0], args[1], args[2]); | |
518 | stmt = gsi_stmt (*gsi); | |
519 | if (tsan_atomic_table[i].action == fetch_op_seq_cst) | |
520 | { | |
521 | adjust_result: | |
522 | lhs = gimple_call_lhs (stmt); | |
523 | if (lhs == NULL_TREE) | |
524 | return; | |
525 | if (!useless_type_conversion_p (TREE_TYPE (lhs), | |
526 | TREE_TYPE (args[1]))) | |
527 | { | |
f9e245b2 | 528 | tree var = make_ssa_name (TREE_TYPE (lhs)); |
e9cf809e | 529 | g = gimple_build_assign (var, NOP_EXPR, args[1]); |
83392e87 | 530 | gsi_insert_after (gsi, g, GSI_NEW_STMT); |
531 | args[1] = var; | |
532 | } | |
f9e245b2 | 533 | gimple_call_set_lhs (stmt, make_ssa_name (TREE_TYPE (lhs))); |
83392e87 | 534 | /* BIT_NOT_EXPR stands for NAND. */ |
535 | if (tsan_atomic_table[i].code == BIT_NOT_EXPR) | |
536 | { | |
f9e245b2 | 537 | tree var = make_ssa_name (TREE_TYPE (lhs)); |
e9cf809e | 538 | g = gimple_build_assign (var, BIT_AND_EXPR, |
539 | gimple_call_lhs (stmt), args[1]); | |
83392e87 | 540 | gsi_insert_after (gsi, g, GSI_NEW_STMT); |
e9cf809e | 541 | g = gimple_build_assign (lhs, BIT_NOT_EXPR, var); |
83392e87 | 542 | } |
543 | else | |
e9cf809e | 544 | g = gimple_build_assign (lhs, tsan_atomic_table[i].code, |
545 | gimple_call_lhs (stmt), args[1]); | |
83392e87 | 546 | update_stmt (stmt); |
547 | gsi_insert_after (gsi, g, GSI_NEW_STMT); | |
548 | } | |
549 | return; | |
550 | case weak_cas: | |
551 | if (!integer_nonzerop (gimple_call_arg (stmt, 3))) | |
552 | continue; | |
553 | /* FALLTHRU */ | |
554 | case strong_cas: | |
555 | gcc_assert (num == 6); | |
556 | for (j = 0; j < 6; j++) | |
557 | args[j] = gimple_call_arg (stmt, j); | |
cd4547bf | 558 | if (!tree_fits_uhwi_p (args[4]) |
a372f7ca | 559 | || memmodel_base (tree_to_uhwi (args[4])) >= MEMMODEL_LAST) |
83392e87 | 560 | return; |
cd4547bf | 561 | if (!tree_fits_uhwi_p (args[5]) |
a372f7ca | 562 | || memmodel_base (tree_to_uhwi (args[5])) >= MEMMODEL_LAST) |
83392e87 | 563 | return; |
564 | update_gimple_call (gsi, decl, 5, args[0], args[1], args[2], | |
565 | args[4], args[5]); | |
566 | return; | |
567 | case bool_cas: | |
568 | case val_cas: | |
569 | gcc_assert (num == 3); | |
570 | for (j = 0; j < 3; j++) | |
571 | args[j] = gimple_call_arg (stmt, j); | |
572 | t = TYPE_ARG_TYPES (TREE_TYPE (decl)); | |
573 | t = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (t))); | |
f9e245b2 | 574 | t = create_tmp_var (t); |
83392e87 | 575 | mark_addressable (t); |
576 | if (!useless_type_conversion_p (TREE_TYPE (t), | |
577 | TREE_TYPE (args[1]))) | |
578 | { | |
e9cf809e | 579 | g = gimple_build_assign (make_ssa_name (TREE_TYPE (t)), |
580 | NOP_EXPR, args[1]); | |
83392e87 | 581 | gsi_insert_before (gsi, g, GSI_SAME_STMT); |
582 | args[1] = gimple_assign_lhs (g); | |
583 | } | |
584 | g = gimple_build_assign (t, args[1]); | |
585 | gsi_insert_before (gsi, g, GSI_SAME_STMT); | |
586 | lhs = gimple_call_lhs (stmt); | |
587 | update_gimple_call (gsi, decl, 5, args[0], | |
588 | build_fold_addr_expr (t), args[2], | |
589 | build_int_cst (NULL_TREE, | |
590 | MEMMODEL_SEQ_CST), | |
591 | build_int_cst (NULL_TREE, | |
592 | MEMMODEL_SEQ_CST)); | |
593 | if (tsan_atomic_table[i].action == val_cas && lhs) | |
594 | { | |
595 | tree cond; | |
596 | stmt = gsi_stmt (*gsi); | |
f9e245b2 | 597 | g = gimple_build_assign (make_ssa_name (TREE_TYPE (t)), t); |
83392e87 | 598 | gsi_insert_after (gsi, g, GSI_NEW_STMT); |
599 | t = make_ssa_name (TREE_TYPE (TREE_TYPE (decl)), stmt); | |
600 | cond = build2 (NE_EXPR, boolean_type_node, t, | |
601 | build_int_cst (TREE_TYPE (t), 0)); | |
e9cf809e | 602 | g = gimple_build_assign (lhs, COND_EXPR, cond, args[1], |
603 | gimple_assign_lhs (g)); | |
83392e87 | 604 | gimple_call_set_lhs (stmt, t); |
605 | update_stmt (stmt); | |
606 | gsi_insert_after (gsi, g, GSI_NEW_STMT); | |
607 | } | |
608 | return; | |
609 | case lock_release: | |
610 | gcc_assert (num == 1); | |
611 | t = TYPE_ARG_TYPES (TREE_TYPE (decl)); | |
612 | t = TREE_VALUE (TREE_CHAIN (t)); | |
613 | update_gimple_call (gsi, decl, 3, gimple_call_arg (stmt, 0), | |
614 | build_int_cst (t, 0), | |
615 | build_int_cst (NULL_TREE, | |
616 | MEMMODEL_RELEASE)); | |
617 | return; | |
618 | default: | |
619 | continue; | |
620 | } | |
621 | } | |
622 | } | |
623 | ||
b077695d | 624 | /* Instruments the gimple pointed to by GSI. Return |
a4641938 | 625 | true if func entry/exit should be instrumented. */ |
b077695d | 626 | |
627 | static bool | |
83392e87 | 628 | instrument_gimple (gimple_stmt_iterator *gsi) |
b077695d | 629 | { |
42acab1c | 630 | gimple *stmt; |
b077695d | 631 | tree rhs, lhs; |
632 | bool instrumented = false; | |
633 | ||
83392e87 | 634 | stmt = gsi_stmt (*gsi); |
b077695d | 635 | if (is_gimple_call (stmt) |
636 | && (gimple_call_fndecl (stmt) | |
637 | != builtin_decl_implicit (BUILT_IN_TSAN_INIT))) | |
83392e87 | 638 | { |
a691030f | 639 | /* All functions with function call will have exit instrumented, |
640 | therefore no function calls other than __tsan_func_exit | |
641 | shall appear in the functions. */ | |
642 | gimple_call_set_tail (as_a <gcall *> (stmt), false); | |
8ded4352 | 643 | if (gimple_call_builtin_p (stmt, BUILT_IN_NORMAL)) |
83392e87 | 644 | instrument_builtin_call (gsi); |
645 | return true; | |
646 | } | |
a4641938 | 647 | else if (is_gimple_assign (stmt) |
648 | && !gimple_clobber_p (stmt)) | |
b077695d | 649 | { |
650 | if (gimple_store_p (stmt)) | |
651 | { | |
652 | lhs = gimple_assign_lhs (stmt); | |
83392e87 | 653 | instrumented = instrument_expr (*gsi, lhs, true); |
b077695d | 654 | } |
655 | if (gimple_assign_load_p (stmt)) | |
656 | { | |
657 | rhs = gimple_assign_rhs1 (stmt); | |
83392e87 | 658 | instrumented = instrument_expr (*gsi, rhs, false); |
b077695d | 659 | } |
660 | } | |
661 | return instrumented; | |
662 | } | |
663 | ||
ed53cebe | 664 | /* Replace TSAN_FUNC_EXIT internal call with function exit tsan builtin. */ |
665 | ||
666 | static void | |
42acab1c | 667 | replace_func_exit (gimple *stmt) |
ed53cebe | 668 | { |
669 | tree builtin_decl = builtin_decl_implicit (BUILT_IN_TSAN_FUNC_EXIT); | |
42acab1c | 670 | gimple *g = gimple_build_call (builtin_decl, 0); |
ed53cebe | 671 | gimple_set_location (g, cfun->function_end_locus); |
672 | gimple_stmt_iterator gsi = gsi_for_stmt (stmt); | |
673 | gsi_replace (&gsi, g, true); | |
674 | } | |
675 | ||
676 | /* Instrument function exit. Used when TSAN_FUNC_EXIT does not exist. */ | |
677 | ||
678 | static void | |
679 | instrument_func_exit (void) | |
680 | { | |
681 | location_t loc; | |
682 | basic_block exit_bb; | |
683 | gimple_stmt_iterator gsi; | |
42acab1c | 684 | gimple *stmt, *g; |
ed53cebe | 685 | tree builtin_decl; |
686 | edge e; | |
687 | edge_iterator ei; | |
688 | ||
689 | /* Find all function exits. */ | |
690 | exit_bb = EXIT_BLOCK_PTR_FOR_FN (cfun); | |
691 | FOR_EACH_EDGE (e, ei, exit_bb->preds) | |
692 | { | |
693 | gsi = gsi_last_bb (e->src); | |
694 | stmt = gsi_stmt (gsi); | |
695 | gcc_assert (gimple_code (stmt) == GIMPLE_RETURN | |
696 | || gimple_call_builtin_p (stmt, BUILT_IN_RETURN)); | |
697 | loc = gimple_location (stmt); | |
698 | builtin_decl = builtin_decl_implicit (BUILT_IN_TSAN_FUNC_EXIT); | |
699 | g = gimple_build_call (builtin_decl, 0); | |
700 | gimple_set_location (g, loc); | |
701 | gsi_insert_before (&gsi, g, GSI_SAME_STMT); | |
702 | } | |
703 | } | |
704 | ||
b077695d | 705 | /* Instruments all interesting memory accesses in the current function. |
a4641938 | 706 | Return true if func entry/exit should be instrumented. */ |
b077695d | 707 | |
708 | static bool | |
709 | instrument_memory_accesses (void) | |
710 | { | |
711 | basic_block bb; | |
712 | gimple_stmt_iterator gsi; | |
713 | bool fentry_exit_instrument = false; | |
ed53cebe | 714 | bool func_exit_seen = false; |
42acab1c | 715 | auto_vec<gimple *> tsan_func_exits; |
b077695d | 716 | |
fc00614f | 717 | FOR_EACH_BB_FN (bb, cfun) |
b077695d | 718 | for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi)) |
ed53cebe | 719 | { |
42acab1c | 720 | gimple *stmt = gsi_stmt (gsi); |
ed53cebe | 721 | if (is_gimple_call (stmt) |
722 | && gimple_call_internal_p (stmt) | |
723 | && gimple_call_internal_fn (stmt) == IFN_TSAN_FUNC_EXIT) | |
724 | { | |
725 | if (fentry_exit_instrument) | |
726 | replace_func_exit (stmt); | |
727 | else | |
728 | tsan_func_exits.safe_push (stmt); | |
729 | func_exit_seen = true; | |
730 | } | |
731 | else | |
732 | fentry_exit_instrument |= instrument_gimple (&gsi); | |
733 | } | |
734 | unsigned int i; | |
42acab1c | 735 | gimple *stmt; |
ed53cebe | 736 | FOR_EACH_VEC_ELT (tsan_func_exits, i, stmt) |
737 | if (fentry_exit_instrument) | |
738 | replace_func_exit (stmt); | |
739 | else | |
740 | { | |
741 | gsi = gsi_for_stmt (stmt); | |
742 | gsi_remove (&gsi, true); | |
743 | } | |
744 | if (fentry_exit_instrument && !func_exit_seen) | |
745 | instrument_func_exit (); | |
b077695d | 746 | return fentry_exit_instrument; |
747 | } | |
748 | ||
749 | /* Instruments function entry. */ | |
750 | ||
751 | static void | |
752 | instrument_func_entry (void) | |
753 | { | |
b077695d | 754 | tree ret_addr, builtin_decl; |
42acab1c | 755 | gimple *g; |
f2940d10 | 756 | gimple_seq seq = NULL; |
b077695d | 757 | |
758 | builtin_decl = builtin_decl_implicit (BUILT_IN_RETURN_ADDRESS); | |
759 | g = gimple_build_call (builtin_decl, 1, integer_zero_node); | |
f9e245b2 | 760 | ret_addr = make_ssa_name (ptr_type_node); |
b077695d | 761 | gimple_call_set_lhs (g, ret_addr); |
762 | gimple_set_location (g, cfun->function_start_locus); | |
f2940d10 | 763 | gimple_seq_add_stmt_without_update (&seq, g); |
b077695d | 764 | |
f2940d10 | 765 | builtin_decl = builtin_decl_implicit (BUILT_IN_TSAN_FUNC_ENTRY); |
b077695d | 766 | g = gimple_build_call (builtin_decl, 1, ret_addr); |
767 | gimple_set_location (g, cfun->function_start_locus); | |
f2940d10 | 768 | gimple_seq_add_stmt_without_update (&seq, g); |
769 | ||
770 | edge e = single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun)); | |
771 | gsi_insert_seq_on_edge_immediate (e, seq); | |
b077695d | 772 | } |
773 | ||
b077695d | 774 | /* ThreadSanitizer instrumentation pass. */ |
775 | ||
776 | static unsigned | |
777 | tsan_pass (void) | |
778 | { | |
b45e34ed | 779 | initialize_sanitizer_builtins (); |
b077695d | 780 | if (instrument_memory_accesses ()) |
ed53cebe | 781 | instrument_func_entry (); |
b077695d | 782 | return 0; |
783 | } | |
784 | ||
b077695d | 785 | /* Inserts __tsan_init () into the list of CTORs. */ |
786 | ||
787 | void | |
788 | tsan_finish_file (void) | |
789 | { | |
b45e34ed | 790 | tree ctor_statements = NULL_TREE; |
b077695d | 791 | |
b45e34ed | 792 | initialize_sanitizer_builtins (); |
793 | tree init_decl = builtin_decl_implicit (BUILT_IN_TSAN_INIT); | |
b077695d | 794 | append_to_statement_list (build_call_expr (init_decl, 0), |
795 | &ctor_statements); | |
796 | cgraph_build_static_cdtor ('I', ctor_statements, | |
797 | MAX_RESERVED_INIT_PRIORITY - 1); | |
798 | } | |
799 | ||
800 | /* The pass descriptor. */ | |
801 | ||
cbe8bda8 | 802 | namespace { |
803 | ||
804 | const pass_data pass_data_tsan = | |
b077695d | 805 | { |
cbe8bda8 | 806 | GIMPLE_PASS, /* type */ |
807 | "tsan", /* name */ | |
808 | OPTGROUP_NONE, /* optinfo_flags */ | |
cbe8bda8 | 809 | TV_NONE, /* tv_id */ |
810 | ( PROP_ssa | PROP_cfg ), /* properties_required */ | |
811 | 0, /* properties_provided */ | |
812 | 0, /* properties_destroyed */ | |
813 | 0, /* todo_flags_start */ | |
8b88439e | 814 | TODO_update_ssa, /* todo_flags_finish */ |
b077695d | 815 | }; |
816 | ||
cbe8bda8 | 817 | class pass_tsan : public gimple_opt_pass |
818 | { | |
819 | public: | |
9af5ce0c | 820 | pass_tsan (gcc::context *ctxt) |
821 | : gimple_opt_pass (pass_data_tsan, ctxt) | |
cbe8bda8 | 822 | {} |
823 | ||
824 | /* opt_pass methods: */ | |
ae84f584 | 825 | opt_pass * clone () { return new pass_tsan (m_ctxt); } |
31315c24 | 826 | virtual bool gate (function *) |
827 | { | |
d1e96383 | 828 | return ((flag_sanitize & SANITIZE_THREAD) != 0 |
829 | && !lookup_attribute ("no_sanitize_thread", | |
830 | DECL_ATTRIBUTES (current_function_decl))); | |
31315c24 | 831 | } |
832 | ||
65b0537f | 833 | virtual unsigned int execute (function *) { return tsan_pass (); } |
cbe8bda8 | 834 | |
835 | }; // class pass_tsan | |
836 | ||
837 | } // anon namespace | |
838 | ||
839 | gimple_opt_pass * | |
840 | make_pass_tsan (gcc::context *ctxt) | |
841 | { | |
842 | return new pass_tsan (ctxt); | |
843 | } | |
844 | ||
cbe8bda8 | 845 | namespace { |
846 | ||
847 | const pass_data pass_data_tsan_O0 = | |
b077695d | 848 | { |
cbe8bda8 | 849 | GIMPLE_PASS, /* type */ |
850 | "tsan0", /* name */ | |
851 | OPTGROUP_NONE, /* optinfo_flags */ | |
cbe8bda8 | 852 | TV_NONE, /* tv_id */ |
853 | ( PROP_ssa | PROP_cfg ), /* properties_required */ | |
854 | 0, /* properties_provided */ | |
855 | 0, /* properties_destroyed */ | |
856 | 0, /* todo_flags_start */ | |
8b88439e | 857 | TODO_update_ssa, /* todo_flags_finish */ |
b077695d | 858 | }; |
cbe8bda8 | 859 | |
860 | class pass_tsan_O0 : public gimple_opt_pass | |
861 | { | |
862 | public: | |
9af5ce0c | 863 | pass_tsan_O0 (gcc::context *ctxt) |
864 | : gimple_opt_pass (pass_data_tsan_O0, ctxt) | |
cbe8bda8 | 865 | {} |
866 | ||
867 | /* opt_pass methods: */ | |
31315c24 | 868 | virtual bool gate (function *) |
869 | { | |
d1e96383 | 870 | return ((flag_sanitize & SANITIZE_THREAD) != 0 && !optimize |
871 | && !lookup_attribute ("no_sanitize_thread", | |
872 | DECL_ATTRIBUTES (current_function_decl))); | |
31315c24 | 873 | } |
874 | ||
65b0537f | 875 | virtual unsigned int execute (function *) { return tsan_pass (); } |
cbe8bda8 | 876 | |
877 | }; // class pass_tsan_O0 | |
878 | ||
879 | } // anon namespace | |
880 | ||
881 | gimple_opt_pass * | |
882 | make_pass_tsan_O0 (gcc::context *ctxt) | |
883 | { | |
884 | return new pass_tsan_O0 (ctxt); | |
885 | } |