]>
Commit | Line | Data |
---|---|---|
32b4b7f5 | 1 | /* GCC instrumentation plugin for ThreadSanitizer. |
8d9254fc | 2 | Copyright (C) 2011-2020 Free Software Foundation, Inc. |
32b4b7f5 DV |
3 | Contributed by Dmitry Vyukov <dvyukov@google.com> |
4 | ||
5 | This file is part of GCC. | |
6 | ||
7 | GCC is free software; you can redistribute it and/or modify it under | |
8 | the terms of the GNU General Public License as published by the Free | |
9 | Software Foundation; either version 3, or (at your option) any later | |
10 | version. | |
11 | ||
12 | GCC is distributed in the hope that it will be useful, but WITHOUT ANY | |
13 | WARRANTY; without even the implied warranty of MERCHANTABILITY or | |
14 | FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License | |
15 | for more details. | |
16 | ||
17 | You should have received a copy of the GNU General Public License | |
18 | along with GCC; see the file COPYING3. If not see | |
19 | <http://www.gnu.org/licenses/>. */ | |
20 | ||
21 | ||
22 | #include "config.h" | |
23 | #include "system.h" | |
24 | #include "coretypes.h" | |
c7131fb2 | 25 | #include "backend.h" |
957060b5 | 26 | #include "rtl.h" |
32b4b7f5 | 27 | #include "tree.h" |
e73cf9a2 | 28 | #include "memmodel.h" |
c7131fb2 | 29 | #include "gimple.h" |
957060b5 | 30 | #include "tree-pass.h" |
c7131fb2 | 31 | #include "ssa.h" |
957060b5 | 32 | #include "cgraph.h" |
c7131fb2 | 33 | #include "fold-const.h" |
45b0be94 | 34 | #include "gimplify.h" |
5be5c238 | 35 | #include "gimple-iterator.h" |
020ca950 | 36 | #include "gimplify-me.h" |
442b4905 | 37 | #include "tree-cfg.h" |
32b4b7f5 | 38 | #include "tree-iterator.h" |
c954bddd | 39 | #include "tree-ssa-propagate.h" |
020ca950 | 40 | #include "tree-ssa-loop-ivopts.h" |
f3dd574e | 41 | #include "tree-eh.h" |
0e668eaf | 42 | #include "tsan.h" |
314e6352 ML |
43 | #include "stringpool.h" |
44 | #include "attribs.h" | |
0e668eaf | 45 | #include "asan.h" |
fe86867f | 46 | #include "builtins.h" |
5ba4a08c | 47 | #include "target.h" |
32b4b7f5 DV |
48 | |
49 | /* Number of instrumented memory accesses in the current function. */ | |
50 | ||
51 | /* Builds the following decl | |
52 | void __tsan_read/writeX (void *addr); */ | |
53 | ||
54 | static tree | |
ab2789ec | 55 | get_memory_access_decl (bool is_write, unsigned size, bool volatilep) |
32b4b7f5 DV |
56 | { |
57 | enum built_in_function fcode; | |
ab2789ec | 58 | int pos; |
32b4b7f5 DV |
59 | |
60 | if (size <= 1) | |
ab2789ec | 61 | pos = 0; |
32b4b7f5 | 62 | else if (size <= 3) |
ab2789ec | 63 | pos = 1; |
32b4b7f5 | 64 | else if (size <= 7) |
ab2789ec | 65 | pos = 2; |
32b4b7f5 | 66 | else if (size <= 15) |
ab2789ec ME |
67 | pos = 3; |
68 | else | |
69 | pos = 4; | |
70 | ||
71 | if (param_tsan_distinguish_volatile && volatilep) | |
72 | fcode = is_write ? BUILT_IN_TSAN_VOLATILE_WRITE1 | |
73 | : BUILT_IN_TSAN_VOLATILE_READ1; | |
32b4b7f5 | 74 | else |
ab2789ec ME |
75 | fcode = is_write ? BUILT_IN_TSAN_WRITE1 |
76 | : BUILT_IN_TSAN_READ1; | |
77 | fcode = (built_in_function)(fcode + pos); | |
32b4b7f5 DV |
78 | |
79 | return builtin_decl_implicit (fcode); | |
80 | } | |
81 | ||
82 | /* Check as to whether EXPR refers to a store to vptr. */ | |
83 | ||
84 | static tree | |
355fe088 | 85 | is_vptr_store (gimple *stmt, tree expr, bool is_write) |
32b4b7f5 DV |
86 | { |
87 | if (is_write == true | |
88 | && gimple_assign_single_p (stmt) | |
89 | && TREE_CODE (expr) == COMPONENT_REF) | |
90 | { | |
91 | tree field = TREE_OPERAND (expr, 1); | |
92 | if (TREE_CODE (field) == FIELD_DECL | |
93 | && DECL_VIRTUAL_P (field)) | |
94 | return gimple_assign_rhs1 (stmt); | |
95 | } | |
96 | return NULL; | |
97 | } | |
98 | ||
32b4b7f5 | 99 | /* Instruments EXPR if needed. If any instrumentation is inserted, |
8ddf5c28 | 100 | return true. */ |
32b4b7f5 DV |
101 | |
102 | static bool | |
103 | instrument_expr (gimple_stmt_iterator gsi, tree expr, bool is_write) | |
104 | { | |
5c972fb6 | 105 | tree base, rhs, expr_ptr, builtin_decl; |
32b4b7f5 DV |
106 | basic_block bb; |
107 | HOST_WIDE_INT size; | |
355fe088 | 108 | gimple *stmt, *g; |
5c972fb6 | 109 | gimple_seq seq; |
32b4b7f5 | 110 | location_t loc; |
fe86867f | 111 | unsigned int align; |
32b4b7f5 | 112 | |
32b4b7f5 | 113 | size = int_size_in_bytes (TREE_TYPE (expr)); |
fe86867f | 114 | if (size <= 0) |
32b4b7f5 DV |
115 | return false; |
116 | ||
f37fac2b | 117 | poly_int64 unused_bitsize, unused_bitpos; |
32b4b7f5 | 118 | tree offset; |
ef4bddc2 | 119 | machine_mode mode; |
ee45a32d | 120 | int unsignedp, reversep, volatilep = 0; |
f37fac2b RS |
121 | base = get_inner_reference (expr, &unused_bitsize, &unused_bitpos, &offset, |
122 | &mode, &unsignedp, &reversep, &volatilep); | |
7a36dc06 JJ |
123 | |
124 | /* No need to instrument accesses to decls that don't escape, | |
125 | they can't escape to other threads then. */ | |
020ca950 | 126 | if (DECL_P (base) && !is_global_var (base)) |
7a36dc06 JJ |
127 | { |
128 | struct pt_solution pt; | |
129 | memset (&pt, 0, sizeof (pt)); | |
130 | pt.escaped = 1; | |
131 | pt.ipa_escaped = flag_ipa_pta != 0; | |
7a36dc06 JJ |
132 | if (!pt_solution_includes (&pt, base)) |
133 | return false; | |
020ca950 | 134 | if (!may_be_aliased (base)) |
7a36dc06 JJ |
135 | return false; |
136 | } | |
137 | ||
8813a647 | 138 | if (TREE_READONLY (base) || (VAR_P (base) && DECL_HARD_REGISTER (base))) |
32b4b7f5 DV |
139 | return false; |
140 | ||
32b4b7f5 DV |
141 | stmt = gsi_stmt (gsi); |
142 | loc = gimple_location (stmt); | |
143 | rhs = is_vptr_store (stmt, expr, is_write); | |
fe86867f BE |
144 | |
145 | if ((TREE_CODE (expr) == COMPONENT_REF | |
146 | && DECL_BIT_FIELD_TYPE (TREE_OPERAND (expr, 1))) | |
147 | || TREE_CODE (expr) == BIT_FIELD_REF) | |
148 | { | |
f37fac2b | 149 | HOST_WIDE_INT bitpos, bitsize; |
fe86867f BE |
150 | base = TREE_OPERAND (expr, 0); |
151 | if (TREE_CODE (expr) == COMPONENT_REF) | |
152 | { | |
153 | expr = TREE_OPERAND (expr, 1); | |
154 | if (is_write && DECL_BIT_FIELD_REPRESENTATIVE (expr)) | |
155 | expr = DECL_BIT_FIELD_REPRESENTATIVE (expr); | |
156 | if (!tree_fits_uhwi_p (DECL_FIELD_OFFSET (expr)) | |
157 | || !tree_fits_uhwi_p (DECL_FIELD_BIT_OFFSET (expr)) | |
158 | || !tree_fits_uhwi_p (DECL_SIZE (expr))) | |
159 | return false; | |
160 | bitpos = tree_to_uhwi (DECL_FIELD_OFFSET (expr)) * BITS_PER_UNIT | |
161 | + tree_to_uhwi (DECL_FIELD_BIT_OFFSET (expr)); | |
162 | bitsize = tree_to_uhwi (DECL_SIZE (expr)); | |
163 | } | |
164 | else | |
165 | { | |
166 | if (!tree_fits_uhwi_p (TREE_OPERAND (expr, 2)) | |
167 | || !tree_fits_uhwi_p (TREE_OPERAND (expr, 1))) | |
168 | return false; | |
169 | bitpos = tree_to_uhwi (TREE_OPERAND (expr, 2)); | |
170 | bitsize = tree_to_uhwi (TREE_OPERAND (expr, 1)); | |
171 | } | |
172 | if (bitpos < 0 || bitsize <= 0) | |
173 | return false; | |
174 | size = (bitpos % BITS_PER_UNIT + bitsize + BITS_PER_UNIT - 1) | |
175 | / BITS_PER_UNIT; | |
020ca950 BE |
176 | if (may_be_nonaddressable_p (base)) |
177 | return false; | |
fe86867f BE |
178 | align = get_object_alignment (base); |
179 | if (align < BITS_PER_UNIT) | |
180 | return false; | |
181 | bitpos = bitpos & ~(BITS_PER_UNIT - 1); | |
182 | if ((align - 1) & bitpos) | |
183 | { | |
184 | align = (align - 1) & bitpos; | |
146ec50f | 185 | align = least_bit_hwi (align); |
fe86867f | 186 | } |
fe86867f | 187 | expr = build_fold_addr_expr (unshare_expr (base)); |
fe86867f BE |
188 | expr = build2 (MEM_REF, char_type_node, expr, |
189 | build_int_cst (TREE_TYPE (expr), bitpos / BITS_PER_UNIT)); | |
190 | expr_ptr = build_fold_addr_expr (expr); | |
191 | } | |
192 | else | |
193 | { | |
020ca950 BE |
194 | if (may_be_nonaddressable_p (expr)) |
195 | return false; | |
fe86867f BE |
196 | align = get_object_alignment (expr); |
197 | if (align < BITS_PER_UNIT) | |
198 | return false; | |
fe86867f BE |
199 | expr_ptr = build_fold_addr_expr (unshare_expr (expr)); |
200 | } | |
020ca950 | 201 | expr_ptr = force_gimple_operand (expr_ptr, &seq, true, NULL_TREE); |
1d4854da | 202 | if ((size & (size - 1)) != 0 || size > 16 |
fe86867f BE |
203 | || align < MIN (size, 8) * BITS_PER_UNIT) |
204 | { | |
205 | builtin_decl = builtin_decl_implicit (is_write | |
206 | ? BUILT_IN_TSAN_WRITE_RANGE | |
207 | : BUILT_IN_TSAN_READ_RANGE); | |
208 | g = gimple_build_call (builtin_decl, 2, expr_ptr, size_int (size)); | |
209 | } | |
210 | else if (rhs == NULL) | |
ab2789ec ME |
211 | g = gimple_build_call (get_memory_access_decl (is_write, size, |
212 | TREE_THIS_VOLATILE (expr)), | |
5c972fb6 | 213 | 1, expr_ptr); |
32b4b7f5 DV |
214 | else |
215 | { | |
216 | builtin_decl = builtin_decl_implicit (BUILT_IN_TSAN_VPTR_UPDATE); | |
cbf9a566 | 217 | g = gimple_build_call (builtin_decl, 2, expr_ptr, unshare_expr (rhs)); |
32b4b7f5 DV |
218 | } |
219 | gimple_set_location (g, loc); | |
5c972fb6 | 220 | gimple_seq_add_stmt_without_update (&seq, g); |
32b4b7f5 DV |
221 | /* Instrumentation for assignment of a function result |
222 | must be inserted after the call. Instrumentation for | |
223 | reads of function arguments must be inserted before the call. | |
224 | That's because the call can contain synchronization. */ | |
225 | if (is_gimple_call (stmt) && is_write) | |
226 | { | |
227 | /* If the call can throw, it must be the last stmt in | |
228 | a basic block, so the instrumented stmts need to be | |
8ddf5c28 | 229 | inserted in successor bbs. */ |
32b4b7f5 DV |
230 | if (is_ctrl_altering_stmt (stmt)) |
231 | { | |
232 | edge e; | |
233 | ||
234 | bb = gsi_bb (gsi); | |
235 | e = find_fallthru_edge (bb->succs); | |
236 | if (e) | |
5c972fb6 | 237 | gsi_insert_seq_on_edge_immediate (e, seq); |
32b4b7f5 DV |
238 | } |
239 | else | |
5c972fb6 | 240 | gsi_insert_seq_after (&gsi, seq, GSI_NEW_STMT); |
32b4b7f5 DV |
241 | } |
242 | else | |
5c972fb6 | 243 | gsi_insert_seq_before (&gsi, seq, GSI_SAME_STMT); |
32b4b7f5 DV |
244 | |
245 | return true; | |
246 | } | |
247 | ||
c954bddd JJ |
248 | /* Actions for sync/atomic builtin transformations. */ |
249 | enum tsan_atomic_action | |
250 | { | |
251 | check_last, add_seq_cst, add_acquire, weak_cas, strong_cas, | |
5ba4a08c JJ |
252 | bool_cas, val_cas, lock_release, fetch_op, fetch_op_seq_cst, |
253 | bool_clear, bool_test_and_set | |
c954bddd JJ |
254 | }; |
255 | ||
256 | /* Table how to map sync/atomic builtins to their corresponding | |
257 | tsan equivalents. */ | |
34c136b6 | 258 | static const struct tsan_map_atomic |
c954bddd JJ |
259 | { |
260 | enum built_in_function fcode, tsan_fcode; | |
261 | enum tsan_atomic_action action; | |
262 | enum tree_code code; | |
263 | } tsan_atomic_table[] = | |
264 | { | |
265 | #define TRANSFORM(fcode, tsan_fcode, action, code) \ | |
266 | { BUILT_IN_##fcode, BUILT_IN_##tsan_fcode, action, code } | |
267 | #define CHECK_LAST(fcode, tsan_fcode) \ | |
268 | TRANSFORM (fcode, tsan_fcode, check_last, ERROR_MARK) | |
269 | #define ADD_SEQ_CST(fcode, tsan_fcode) \ | |
270 | TRANSFORM (fcode, tsan_fcode, add_seq_cst, ERROR_MARK) | |
271 | #define ADD_ACQUIRE(fcode, tsan_fcode) \ | |
272 | TRANSFORM (fcode, tsan_fcode, add_acquire, ERROR_MARK) | |
273 | #define WEAK_CAS(fcode, tsan_fcode) \ | |
274 | TRANSFORM (fcode, tsan_fcode, weak_cas, ERROR_MARK) | |
275 | #define STRONG_CAS(fcode, tsan_fcode) \ | |
276 | TRANSFORM (fcode, tsan_fcode, strong_cas, ERROR_MARK) | |
277 | #define BOOL_CAS(fcode, tsan_fcode) \ | |
278 | TRANSFORM (fcode, tsan_fcode, bool_cas, ERROR_MARK) | |
279 | #define VAL_CAS(fcode, tsan_fcode) \ | |
280 | TRANSFORM (fcode, tsan_fcode, val_cas, ERROR_MARK) | |
281 | #define LOCK_RELEASE(fcode, tsan_fcode) \ | |
282 | TRANSFORM (fcode, tsan_fcode, lock_release, ERROR_MARK) | |
283 | #define FETCH_OP(fcode, tsan_fcode, code) \ | |
284 | TRANSFORM (fcode, tsan_fcode, fetch_op, code) | |
285 | #define FETCH_OPS(fcode, tsan_fcode, code) \ | |
286 | TRANSFORM (fcode, tsan_fcode, fetch_op_seq_cst, code) | |
5ba4a08c JJ |
287 | #define BOOL_CLEAR(fcode, tsan_fcode) \ |
288 | TRANSFORM (fcode, tsan_fcode, bool_clear, ERROR_MARK) | |
289 | #define BOOL_TEST_AND_SET(fcode, tsan_fcode) \ | |
290 | TRANSFORM (fcode, tsan_fcode, bool_test_and_set, ERROR_MARK) | |
c954bddd JJ |
291 | |
292 | CHECK_LAST (ATOMIC_LOAD_1, TSAN_ATOMIC8_LOAD), | |
293 | CHECK_LAST (ATOMIC_LOAD_2, TSAN_ATOMIC16_LOAD), | |
294 | CHECK_LAST (ATOMIC_LOAD_4, TSAN_ATOMIC32_LOAD), | |
295 | CHECK_LAST (ATOMIC_LOAD_8, TSAN_ATOMIC64_LOAD), | |
296 | CHECK_LAST (ATOMIC_LOAD_16, TSAN_ATOMIC128_LOAD), | |
297 | CHECK_LAST (ATOMIC_STORE_1, TSAN_ATOMIC8_STORE), | |
298 | CHECK_LAST (ATOMIC_STORE_2, TSAN_ATOMIC16_STORE), | |
299 | CHECK_LAST (ATOMIC_STORE_4, TSAN_ATOMIC32_STORE), | |
300 | CHECK_LAST (ATOMIC_STORE_8, TSAN_ATOMIC64_STORE), | |
301 | CHECK_LAST (ATOMIC_STORE_16, TSAN_ATOMIC128_STORE), | |
302 | CHECK_LAST (ATOMIC_EXCHANGE_1, TSAN_ATOMIC8_EXCHANGE), | |
303 | CHECK_LAST (ATOMIC_EXCHANGE_2, TSAN_ATOMIC16_EXCHANGE), | |
304 | CHECK_LAST (ATOMIC_EXCHANGE_4, TSAN_ATOMIC32_EXCHANGE), | |
305 | CHECK_LAST (ATOMIC_EXCHANGE_8, TSAN_ATOMIC64_EXCHANGE), | |
306 | CHECK_LAST (ATOMIC_EXCHANGE_16, TSAN_ATOMIC128_EXCHANGE), | |
307 | CHECK_LAST (ATOMIC_FETCH_ADD_1, TSAN_ATOMIC8_FETCH_ADD), | |
308 | CHECK_LAST (ATOMIC_FETCH_ADD_2, TSAN_ATOMIC16_FETCH_ADD), | |
309 | CHECK_LAST (ATOMIC_FETCH_ADD_4, TSAN_ATOMIC32_FETCH_ADD), | |
310 | CHECK_LAST (ATOMIC_FETCH_ADD_8, TSAN_ATOMIC64_FETCH_ADD), | |
311 | CHECK_LAST (ATOMIC_FETCH_ADD_16, TSAN_ATOMIC128_FETCH_ADD), | |
312 | CHECK_LAST (ATOMIC_FETCH_SUB_1, TSAN_ATOMIC8_FETCH_SUB), | |
313 | CHECK_LAST (ATOMIC_FETCH_SUB_2, TSAN_ATOMIC16_FETCH_SUB), | |
314 | CHECK_LAST (ATOMIC_FETCH_SUB_4, TSAN_ATOMIC32_FETCH_SUB), | |
315 | CHECK_LAST (ATOMIC_FETCH_SUB_8, TSAN_ATOMIC64_FETCH_SUB), | |
316 | CHECK_LAST (ATOMIC_FETCH_SUB_16, TSAN_ATOMIC128_FETCH_SUB), | |
317 | CHECK_LAST (ATOMIC_FETCH_AND_1, TSAN_ATOMIC8_FETCH_AND), | |
318 | CHECK_LAST (ATOMIC_FETCH_AND_2, TSAN_ATOMIC16_FETCH_AND), | |
319 | CHECK_LAST (ATOMIC_FETCH_AND_4, TSAN_ATOMIC32_FETCH_AND), | |
320 | CHECK_LAST (ATOMIC_FETCH_AND_8, TSAN_ATOMIC64_FETCH_AND), | |
321 | CHECK_LAST (ATOMIC_FETCH_AND_16, TSAN_ATOMIC128_FETCH_AND), | |
322 | CHECK_LAST (ATOMIC_FETCH_OR_1, TSAN_ATOMIC8_FETCH_OR), | |
323 | CHECK_LAST (ATOMIC_FETCH_OR_2, TSAN_ATOMIC16_FETCH_OR), | |
324 | CHECK_LAST (ATOMIC_FETCH_OR_4, TSAN_ATOMIC32_FETCH_OR), | |
325 | CHECK_LAST (ATOMIC_FETCH_OR_8, TSAN_ATOMIC64_FETCH_OR), | |
326 | CHECK_LAST (ATOMIC_FETCH_OR_16, TSAN_ATOMIC128_FETCH_OR), | |
327 | CHECK_LAST (ATOMIC_FETCH_XOR_1, TSAN_ATOMIC8_FETCH_XOR), | |
328 | CHECK_LAST (ATOMIC_FETCH_XOR_2, TSAN_ATOMIC16_FETCH_XOR), | |
329 | CHECK_LAST (ATOMIC_FETCH_XOR_4, TSAN_ATOMIC32_FETCH_XOR), | |
330 | CHECK_LAST (ATOMIC_FETCH_XOR_8, TSAN_ATOMIC64_FETCH_XOR), | |
331 | CHECK_LAST (ATOMIC_FETCH_XOR_16, TSAN_ATOMIC128_FETCH_XOR), | |
332 | CHECK_LAST (ATOMIC_FETCH_NAND_1, TSAN_ATOMIC8_FETCH_NAND), | |
333 | CHECK_LAST (ATOMIC_FETCH_NAND_2, TSAN_ATOMIC16_FETCH_NAND), | |
334 | CHECK_LAST (ATOMIC_FETCH_NAND_4, TSAN_ATOMIC32_FETCH_NAND), | |
335 | CHECK_LAST (ATOMIC_FETCH_NAND_8, TSAN_ATOMIC64_FETCH_NAND), | |
336 | CHECK_LAST (ATOMIC_FETCH_NAND_16, TSAN_ATOMIC128_FETCH_NAND), | |
337 | ||
338 | CHECK_LAST (ATOMIC_THREAD_FENCE, TSAN_ATOMIC_THREAD_FENCE), | |
339 | CHECK_LAST (ATOMIC_SIGNAL_FENCE, TSAN_ATOMIC_SIGNAL_FENCE), | |
340 | ||
341 | FETCH_OP (ATOMIC_ADD_FETCH_1, TSAN_ATOMIC8_FETCH_ADD, PLUS_EXPR), | |
342 | FETCH_OP (ATOMIC_ADD_FETCH_2, TSAN_ATOMIC16_FETCH_ADD, PLUS_EXPR), | |
343 | FETCH_OP (ATOMIC_ADD_FETCH_4, TSAN_ATOMIC32_FETCH_ADD, PLUS_EXPR), | |
344 | FETCH_OP (ATOMIC_ADD_FETCH_8, TSAN_ATOMIC64_FETCH_ADD, PLUS_EXPR), | |
345 | FETCH_OP (ATOMIC_ADD_FETCH_16, TSAN_ATOMIC128_FETCH_ADD, PLUS_EXPR), | |
346 | FETCH_OP (ATOMIC_SUB_FETCH_1, TSAN_ATOMIC8_FETCH_SUB, MINUS_EXPR), | |
347 | FETCH_OP (ATOMIC_SUB_FETCH_2, TSAN_ATOMIC16_FETCH_SUB, MINUS_EXPR), | |
348 | FETCH_OP (ATOMIC_SUB_FETCH_4, TSAN_ATOMIC32_FETCH_SUB, MINUS_EXPR), | |
349 | FETCH_OP (ATOMIC_SUB_FETCH_8, TSAN_ATOMIC64_FETCH_SUB, MINUS_EXPR), | |
350 | FETCH_OP (ATOMIC_SUB_FETCH_16, TSAN_ATOMIC128_FETCH_SUB, MINUS_EXPR), | |
351 | FETCH_OP (ATOMIC_AND_FETCH_1, TSAN_ATOMIC8_FETCH_AND, BIT_AND_EXPR), | |
352 | FETCH_OP (ATOMIC_AND_FETCH_2, TSAN_ATOMIC16_FETCH_AND, BIT_AND_EXPR), | |
353 | FETCH_OP (ATOMIC_AND_FETCH_4, TSAN_ATOMIC32_FETCH_AND, BIT_AND_EXPR), | |
354 | FETCH_OP (ATOMIC_AND_FETCH_8, TSAN_ATOMIC64_FETCH_AND, BIT_AND_EXPR), | |
355 | FETCH_OP (ATOMIC_AND_FETCH_16, TSAN_ATOMIC128_FETCH_AND, BIT_AND_EXPR), | |
356 | FETCH_OP (ATOMIC_OR_FETCH_1, TSAN_ATOMIC8_FETCH_OR, BIT_IOR_EXPR), | |
357 | FETCH_OP (ATOMIC_OR_FETCH_2, TSAN_ATOMIC16_FETCH_OR, BIT_IOR_EXPR), | |
358 | FETCH_OP (ATOMIC_OR_FETCH_4, TSAN_ATOMIC32_FETCH_OR, BIT_IOR_EXPR), | |
359 | FETCH_OP (ATOMIC_OR_FETCH_8, TSAN_ATOMIC64_FETCH_OR, BIT_IOR_EXPR), | |
360 | FETCH_OP (ATOMIC_OR_FETCH_16, TSAN_ATOMIC128_FETCH_OR, BIT_IOR_EXPR), | |
361 | FETCH_OP (ATOMIC_XOR_FETCH_1, TSAN_ATOMIC8_FETCH_XOR, BIT_XOR_EXPR), | |
362 | FETCH_OP (ATOMIC_XOR_FETCH_2, TSAN_ATOMIC16_FETCH_XOR, BIT_XOR_EXPR), | |
363 | FETCH_OP (ATOMIC_XOR_FETCH_4, TSAN_ATOMIC32_FETCH_XOR, BIT_XOR_EXPR), | |
364 | FETCH_OP (ATOMIC_XOR_FETCH_8, TSAN_ATOMIC64_FETCH_XOR, BIT_XOR_EXPR), | |
365 | FETCH_OP (ATOMIC_XOR_FETCH_16, TSAN_ATOMIC128_FETCH_XOR, BIT_XOR_EXPR), | |
366 | FETCH_OP (ATOMIC_NAND_FETCH_1, TSAN_ATOMIC8_FETCH_NAND, BIT_NOT_EXPR), | |
367 | FETCH_OP (ATOMIC_NAND_FETCH_2, TSAN_ATOMIC16_FETCH_NAND, BIT_NOT_EXPR), | |
368 | FETCH_OP (ATOMIC_NAND_FETCH_4, TSAN_ATOMIC32_FETCH_NAND, BIT_NOT_EXPR), | |
369 | FETCH_OP (ATOMIC_NAND_FETCH_8, TSAN_ATOMIC64_FETCH_NAND, BIT_NOT_EXPR), | |
370 | FETCH_OP (ATOMIC_NAND_FETCH_16, TSAN_ATOMIC128_FETCH_NAND, BIT_NOT_EXPR), | |
371 | ||
372 | ADD_ACQUIRE (SYNC_LOCK_TEST_AND_SET_1, TSAN_ATOMIC8_EXCHANGE), | |
373 | ADD_ACQUIRE (SYNC_LOCK_TEST_AND_SET_2, TSAN_ATOMIC16_EXCHANGE), | |
374 | ADD_ACQUIRE (SYNC_LOCK_TEST_AND_SET_4, TSAN_ATOMIC32_EXCHANGE), | |
375 | ADD_ACQUIRE (SYNC_LOCK_TEST_AND_SET_8, TSAN_ATOMIC64_EXCHANGE), | |
376 | ADD_ACQUIRE (SYNC_LOCK_TEST_AND_SET_16, TSAN_ATOMIC128_EXCHANGE), | |
377 | ||
378 | ADD_SEQ_CST (SYNC_FETCH_AND_ADD_1, TSAN_ATOMIC8_FETCH_ADD), | |
379 | ADD_SEQ_CST (SYNC_FETCH_AND_ADD_2, TSAN_ATOMIC16_FETCH_ADD), | |
380 | ADD_SEQ_CST (SYNC_FETCH_AND_ADD_4, TSAN_ATOMIC32_FETCH_ADD), | |
381 | ADD_SEQ_CST (SYNC_FETCH_AND_ADD_8, TSAN_ATOMIC64_FETCH_ADD), | |
382 | ADD_SEQ_CST (SYNC_FETCH_AND_ADD_16, TSAN_ATOMIC128_FETCH_ADD), | |
383 | ADD_SEQ_CST (SYNC_FETCH_AND_SUB_1, TSAN_ATOMIC8_FETCH_SUB), | |
384 | ADD_SEQ_CST (SYNC_FETCH_AND_SUB_2, TSAN_ATOMIC16_FETCH_SUB), | |
385 | ADD_SEQ_CST (SYNC_FETCH_AND_SUB_4, TSAN_ATOMIC32_FETCH_SUB), | |
386 | ADD_SEQ_CST (SYNC_FETCH_AND_SUB_8, TSAN_ATOMIC64_FETCH_SUB), | |
387 | ADD_SEQ_CST (SYNC_FETCH_AND_SUB_16, TSAN_ATOMIC128_FETCH_SUB), | |
388 | ADD_SEQ_CST (SYNC_FETCH_AND_AND_1, TSAN_ATOMIC8_FETCH_AND), | |
389 | ADD_SEQ_CST (SYNC_FETCH_AND_AND_2, TSAN_ATOMIC16_FETCH_AND), | |
390 | ADD_SEQ_CST (SYNC_FETCH_AND_AND_4, TSAN_ATOMIC32_FETCH_AND), | |
391 | ADD_SEQ_CST (SYNC_FETCH_AND_AND_8, TSAN_ATOMIC64_FETCH_AND), | |
392 | ADD_SEQ_CST (SYNC_FETCH_AND_AND_16, TSAN_ATOMIC128_FETCH_AND), | |
393 | ADD_SEQ_CST (SYNC_FETCH_AND_OR_1, TSAN_ATOMIC8_FETCH_OR), | |
394 | ADD_SEQ_CST (SYNC_FETCH_AND_OR_2, TSAN_ATOMIC16_FETCH_OR), | |
395 | ADD_SEQ_CST (SYNC_FETCH_AND_OR_4, TSAN_ATOMIC32_FETCH_OR), | |
396 | ADD_SEQ_CST (SYNC_FETCH_AND_OR_8, TSAN_ATOMIC64_FETCH_OR), | |
397 | ADD_SEQ_CST (SYNC_FETCH_AND_OR_16, TSAN_ATOMIC128_FETCH_OR), | |
398 | ADD_SEQ_CST (SYNC_FETCH_AND_XOR_1, TSAN_ATOMIC8_FETCH_XOR), | |
399 | ADD_SEQ_CST (SYNC_FETCH_AND_XOR_2, TSAN_ATOMIC16_FETCH_XOR), | |
400 | ADD_SEQ_CST (SYNC_FETCH_AND_XOR_4, TSAN_ATOMIC32_FETCH_XOR), | |
401 | ADD_SEQ_CST (SYNC_FETCH_AND_XOR_8, TSAN_ATOMIC64_FETCH_XOR), | |
402 | ADD_SEQ_CST (SYNC_FETCH_AND_XOR_16, TSAN_ATOMIC128_FETCH_XOR), | |
403 | ADD_SEQ_CST (SYNC_FETCH_AND_NAND_1, TSAN_ATOMIC8_FETCH_NAND), | |
404 | ADD_SEQ_CST (SYNC_FETCH_AND_NAND_2, TSAN_ATOMIC16_FETCH_NAND), | |
405 | ADD_SEQ_CST (SYNC_FETCH_AND_NAND_4, TSAN_ATOMIC32_FETCH_NAND), | |
406 | ADD_SEQ_CST (SYNC_FETCH_AND_NAND_8, TSAN_ATOMIC64_FETCH_NAND), | |
407 | ADD_SEQ_CST (SYNC_FETCH_AND_NAND_16, TSAN_ATOMIC128_FETCH_NAND), | |
408 | ||
409 | ADD_SEQ_CST (SYNC_SYNCHRONIZE, TSAN_ATOMIC_THREAD_FENCE), | |
410 | ||
411 | FETCH_OPS (SYNC_ADD_AND_FETCH_1, TSAN_ATOMIC8_FETCH_ADD, PLUS_EXPR), | |
412 | FETCH_OPS (SYNC_ADD_AND_FETCH_2, TSAN_ATOMIC16_FETCH_ADD, PLUS_EXPR), | |
413 | FETCH_OPS (SYNC_ADD_AND_FETCH_4, TSAN_ATOMIC32_FETCH_ADD, PLUS_EXPR), | |
414 | FETCH_OPS (SYNC_ADD_AND_FETCH_8, TSAN_ATOMIC64_FETCH_ADD, PLUS_EXPR), | |
415 | FETCH_OPS (SYNC_ADD_AND_FETCH_16, TSAN_ATOMIC128_FETCH_ADD, PLUS_EXPR), | |
416 | FETCH_OPS (SYNC_SUB_AND_FETCH_1, TSAN_ATOMIC8_FETCH_SUB, MINUS_EXPR), | |
417 | FETCH_OPS (SYNC_SUB_AND_FETCH_2, TSAN_ATOMIC16_FETCH_SUB, MINUS_EXPR), | |
418 | FETCH_OPS (SYNC_SUB_AND_FETCH_4, TSAN_ATOMIC32_FETCH_SUB, MINUS_EXPR), | |
419 | FETCH_OPS (SYNC_SUB_AND_FETCH_8, TSAN_ATOMIC64_FETCH_SUB, MINUS_EXPR), | |
420 | FETCH_OPS (SYNC_SUB_AND_FETCH_16, TSAN_ATOMIC128_FETCH_SUB, MINUS_EXPR), | |
421 | FETCH_OPS (SYNC_AND_AND_FETCH_1, TSAN_ATOMIC8_FETCH_AND, BIT_AND_EXPR), | |
422 | FETCH_OPS (SYNC_AND_AND_FETCH_2, TSAN_ATOMIC16_FETCH_AND, BIT_AND_EXPR), | |
423 | FETCH_OPS (SYNC_AND_AND_FETCH_4, TSAN_ATOMIC32_FETCH_AND, BIT_AND_EXPR), | |
424 | FETCH_OPS (SYNC_AND_AND_FETCH_8, TSAN_ATOMIC64_FETCH_AND, BIT_AND_EXPR), | |
425 | FETCH_OPS (SYNC_AND_AND_FETCH_16, TSAN_ATOMIC128_FETCH_AND, BIT_AND_EXPR), | |
426 | FETCH_OPS (SYNC_OR_AND_FETCH_1, TSAN_ATOMIC8_FETCH_OR, BIT_IOR_EXPR), | |
427 | FETCH_OPS (SYNC_OR_AND_FETCH_2, TSAN_ATOMIC16_FETCH_OR, BIT_IOR_EXPR), | |
428 | FETCH_OPS (SYNC_OR_AND_FETCH_4, TSAN_ATOMIC32_FETCH_OR, BIT_IOR_EXPR), | |
429 | FETCH_OPS (SYNC_OR_AND_FETCH_8, TSAN_ATOMIC64_FETCH_OR, BIT_IOR_EXPR), | |
430 | FETCH_OPS (SYNC_OR_AND_FETCH_16, TSAN_ATOMIC128_FETCH_OR, BIT_IOR_EXPR), | |
431 | FETCH_OPS (SYNC_XOR_AND_FETCH_1, TSAN_ATOMIC8_FETCH_XOR, BIT_XOR_EXPR), | |
432 | FETCH_OPS (SYNC_XOR_AND_FETCH_2, TSAN_ATOMIC16_FETCH_XOR, BIT_XOR_EXPR), | |
433 | FETCH_OPS (SYNC_XOR_AND_FETCH_4, TSAN_ATOMIC32_FETCH_XOR, BIT_XOR_EXPR), | |
434 | FETCH_OPS (SYNC_XOR_AND_FETCH_8, TSAN_ATOMIC64_FETCH_XOR, BIT_XOR_EXPR), | |
435 | FETCH_OPS (SYNC_XOR_AND_FETCH_16, TSAN_ATOMIC128_FETCH_XOR, BIT_XOR_EXPR), | |
436 | FETCH_OPS (SYNC_NAND_AND_FETCH_1, TSAN_ATOMIC8_FETCH_NAND, BIT_NOT_EXPR), | |
437 | FETCH_OPS (SYNC_NAND_AND_FETCH_2, TSAN_ATOMIC16_FETCH_NAND, BIT_NOT_EXPR), | |
438 | FETCH_OPS (SYNC_NAND_AND_FETCH_4, TSAN_ATOMIC32_FETCH_NAND, BIT_NOT_EXPR), | |
439 | FETCH_OPS (SYNC_NAND_AND_FETCH_8, TSAN_ATOMIC64_FETCH_NAND, BIT_NOT_EXPR), | |
440 | FETCH_OPS (SYNC_NAND_AND_FETCH_16, TSAN_ATOMIC128_FETCH_NAND, BIT_NOT_EXPR), | |
441 | ||
442 | WEAK_CAS (ATOMIC_COMPARE_EXCHANGE_1, TSAN_ATOMIC8_COMPARE_EXCHANGE_WEAK), | |
443 | WEAK_CAS (ATOMIC_COMPARE_EXCHANGE_2, TSAN_ATOMIC16_COMPARE_EXCHANGE_WEAK), | |
444 | WEAK_CAS (ATOMIC_COMPARE_EXCHANGE_4, TSAN_ATOMIC32_COMPARE_EXCHANGE_WEAK), | |
445 | WEAK_CAS (ATOMIC_COMPARE_EXCHANGE_8, TSAN_ATOMIC64_COMPARE_EXCHANGE_WEAK), | |
446 | WEAK_CAS (ATOMIC_COMPARE_EXCHANGE_16, TSAN_ATOMIC128_COMPARE_EXCHANGE_WEAK), | |
447 | ||
448 | STRONG_CAS (ATOMIC_COMPARE_EXCHANGE_1, TSAN_ATOMIC8_COMPARE_EXCHANGE_STRONG), | |
449 | STRONG_CAS (ATOMIC_COMPARE_EXCHANGE_2, | |
450 | TSAN_ATOMIC16_COMPARE_EXCHANGE_STRONG), | |
451 | STRONG_CAS (ATOMIC_COMPARE_EXCHANGE_4, | |
452 | TSAN_ATOMIC32_COMPARE_EXCHANGE_STRONG), | |
453 | STRONG_CAS (ATOMIC_COMPARE_EXCHANGE_8, | |
454 | TSAN_ATOMIC64_COMPARE_EXCHANGE_STRONG), | |
455 | STRONG_CAS (ATOMIC_COMPARE_EXCHANGE_16, | |
456 | TSAN_ATOMIC128_COMPARE_EXCHANGE_STRONG), | |
457 | ||
458 | BOOL_CAS (SYNC_BOOL_COMPARE_AND_SWAP_1, | |
459 | TSAN_ATOMIC8_COMPARE_EXCHANGE_STRONG), | |
460 | BOOL_CAS (SYNC_BOOL_COMPARE_AND_SWAP_2, | |
461 | TSAN_ATOMIC16_COMPARE_EXCHANGE_STRONG), | |
462 | BOOL_CAS (SYNC_BOOL_COMPARE_AND_SWAP_4, | |
463 | TSAN_ATOMIC32_COMPARE_EXCHANGE_STRONG), | |
464 | BOOL_CAS (SYNC_BOOL_COMPARE_AND_SWAP_8, | |
465 | TSAN_ATOMIC64_COMPARE_EXCHANGE_STRONG), | |
466 | BOOL_CAS (SYNC_BOOL_COMPARE_AND_SWAP_16, | |
467 | TSAN_ATOMIC128_COMPARE_EXCHANGE_STRONG), | |
468 | ||
469 | VAL_CAS (SYNC_VAL_COMPARE_AND_SWAP_1, TSAN_ATOMIC8_COMPARE_EXCHANGE_STRONG), | |
470 | VAL_CAS (SYNC_VAL_COMPARE_AND_SWAP_2, TSAN_ATOMIC16_COMPARE_EXCHANGE_STRONG), | |
471 | VAL_CAS (SYNC_VAL_COMPARE_AND_SWAP_4, TSAN_ATOMIC32_COMPARE_EXCHANGE_STRONG), | |
472 | VAL_CAS (SYNC_VAL_COMPARE_AND_SWAP_8, TSAN_ATOMIC64_COMPARE_EXCHANGE_STRONG), | |
473 | VAL_CAS (SYNC_VAL_COMPARE_AND_SWAP_16, | |
474 | TSAN_ATOMIC128_COMPARE_EXCHANGE_STRONG), | |
475 | ||
476 | LOCK_RELEASE (SYNC_LOCK_RELEASE_1, TSAN_ATOMIC8_STORE), | |
477 | LOCK_RELEASE (SYNC_LOCK_RELEASE_2, TSAN_ATOMIC16_STORE), | |
478 | LOCK_RELEASE (SYNC_LOCK_RELEASE_4, TSAN_ATOMIC32_STORE), | |
479 | LOCK_RELEASE (SYNC_LOCK_RELEASE_8, TSAN_ATOMIC64_STORE), | |
5ba4a08c JJ |
480 | LOCK_RELEASE (SYNC_LOCK_RELEASE_16, TSAN_ATOMIC128_STORE), |
481 | ||
482 | BOOL_CLEAR (ATOMIC_CLEAR, TSAN_ATOMIC8_STORE), | |
483 | ||
484 | BOOL_TEST_AND_SET (ATOMIC_TEST_AND_SET, TSAN_ATOMIC8_EXCHANGE) | |
c954bddd JJ |
485 | }; |
486 | ||
487 | /* Instrument an atomic builtin. */ | |
488 | ||
489 | static void | |
490 | instrument_builtin_call (gimple_stmt_iterator *gsi) | |
491 | { | |
355fe088 | 492 | gimple *stmt = gsi_stmt (*gsi), *g; |
c954bddd JJ |
493 | tree callee = gimple_call_fndecl (stmt), last_arg, args[6], t, lhs; |
494 | enum built_in_function fcode = DECL_FUNCTION_CODE (callee); | |
495 | unsigned int i, num = gimple_call_num_args (stmt), j; | |
496 | for (j = 0; j < 6 && j < num; j++) | |
497 | args[j] = gimple_call_arg (stmt, j); | |
498 | for (i = 0; i < ARRAY_SIZE (tsan_atomic_table); i++) | |
499 | if (fcode != tsan_atomic_table[i].fcode) | |
500 | continue; | |
501 | else | |
502 | { | |
503 | tree decl = builtin_decl_implicit (tsan_atomic_table[i].tsan_fcode); | |
504 | if (decl == NULL_TREE) | |
505 | return; | |
506 | switch (tsan_atomic_table[i].action) | |
507 | { | |
508 | case check_last: | |
509 | case fetch_op: | |
510 | last_arg = gimple_call_arg (stmt, num - 1); | |
3ed67fbf JJ |
511 | if (tree_fits_uhwi_p (last_arg) |
512 | && memmodel_base (tree_to_uhwi (last_arg)) >= MEMMODEL_LAST) | |
c954bddd JJ |
513 | return; |
514 | gimple_call_set_fndecl (stmt, decl); | |
515 | update_stmt (stmt); | |
f3dd574e | 516 | maybe_clean_eh_stmt (stmt); |
c954bddd JJ |
517 | if (tsan_atomic_table[i].action == fetch_op) |
518 | { | |
519 | args[1] = gimple_call_arg (stmt, 1); | |
520 | goto adjust_result; | |
521 | } | |
522 | return; | |
523 | case add_seq_cst: | |
524 | case add_acquire: | |
525 | case fetch_op_seq_cst: | |
526 | gcc_assert (num <= 2); | |
527 | for (j = 0; j < num; j++) | |
528 | args[j] = gimple_call_arg (stmt, j); | |
529 | for (; j < 2; j++) | |
530 | args[j] = NULL_TREE; | |
531 | args[num] = build_int_cst (NULL_TREE, | |
532 | tsan_atomic_table[i].action | |
533 | != add_acquire | |
534 | ? MEMMODEL_SEQ_CST | |
535 | : MEMMODEL_ACQUIRE); | |
536 | update_gimple_call (gsi, decl, num + 1, args[0], args[1], args[2]); | |
f3dd574e | 537 | maybe_clean_or_replace_eh_stmt (stmt, gsi_stmt (*gsi)); |
c954bddd JJ |
538 | stmt = gsi_stmt (*gsi); |
539 | if (tsan_atomic_table[i].action == fetch_op_seq_cst) | |
540 | { | |
541 | adjust_result: | |
542 | lhs = gimple_call_lhs (stmt); | |
543 | if (lhs == NULL_TREE) | |
544 | return; | |
545 | if (!useless_type_conversion_p (TREE_TYPE (lhs), | |
546 | TREE_TYPE (args[1]))) | |
547 | { | |
b731b390 | 548 | tree var = make_ssa_name (TREE_TYPE (lhs)); |
0d0e4a03 | 549 | g = gimple_build_assign (var, NOP_EXPR, args[1]); |
c954bddd JJ |
550 | gsi_insert_after (gsi, g, GSI_NEW_STMT); |
551 | args[1] = var; | |
552 | } | |
b731b390 | 553 | gimple_call_set_lhs (stmt, make_ssa_name (TREE_TYPE (lhs))); |
c954bddd JJ |
554 | /* BIT_NOT_EXPR stands for NAND. */ |
555 | if (tsan_atomic_table[i].code == BIT_NOT_EXPR) | |
556 | { | |
b731b390 | 557 | tree var = make_ssa_name (TREE_TYPE (lhs)); |
0d0e4a03 JJ |
558 | g = gimple_build_assign (var, BIT_AND_EXPR, |
559 | gimple_call_lhs (stmt), args[1]); | |
c954bddd | 560 | gsi_insert_after (gsi, g, GSI_NEW_STMT); |
0d0e4a03 | 561 | g = gimple_build_assign (lhs, BIT_NOT_EXPR, var); |
c954bddd JJ |
562 | } |
563 | else | |
0d0e4a03 JJ |
564 | g = gimple_build_assign (lhs, tsan_atomic_table[i].code, |
565 | gimple_call_lhs (stmt), args[1]); | |
c954bddd JJ |
566 | update_stmt (stmt); |
567 | gsi_insert_after (gsi, g, GSI_NEW_STMT); | |
568 | } | |
569 | return; | |
570 | case weak_cas: | |
571 | if (!integer_nonzerop (gimple_call_arg (stmt, 3))) | |
572 | continue; | |
573 | /* FALLTHRU */ | |
574 | case strong_cas: | |
575 | gcc_assert (num == 6); | |
576 | for (j = 0; j < 6; j++) | |
577 | args[j] = gimple_call_arg (stmt, j); | |
3ed67fbf JJ |
578 | if (tree_fits_uhwi_p (args[4]) |
579 | && memmodel_base (tree_to_uhwi (args[4])) >= MEMMODEL_LAST) | |
c954bddd | 580 | return; |
3ed67fbf JJ |
581 | if (tree_fits_uhwi_p (args[5]) |
582 | && memmodel_base (tree_to_uhwi (args[5])) >= MEMMODEL_LAST) | |
c954bddd JJ |
583 | return; |
584 | update_gimple_call (gsi, decl, 5, args[0], args[1], args[2], | |
585 | args[4], args[5]); | |
f3dd574e | 586 | maybe_clean_or_replace_eh_stmt (stmt, gsi_stmt (*gsi)); |
c954bddd JJ |
587 | return; |
588 | case bool_cas: | |
589 | case val_cas: | |
590 | gcc_assert (num == 3); | |
591 | for (j = 0; j < 3; j++) | |
592 | args[j] = gimple_call_arg (stmt, j); | |
593 | t = TYPE_ARG_TYPES (TREE_TYPE (decl)); | |
594 | t = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (t))); | |
b731b390 | 595 | t = create_tmp_var (t); |
c954bddd JJ |
596 | mark_addressable (t); |
597 | if (!useless_type_conversion_p (TREE_TYPE (t), | |
598 | TREE_TYPE (args[1]))) | |
599 | { | |
0d0e4a03 JJ |
600 | g = gimple_build_assign (make_ssa_name (TREE_TYPE (t)), |
601 | NOP_EXPR, args[1]); | |
c954bddd JJ |
602 | gsi_insert_before (gsi, g, GSI_SAME_STMT); |
603 | args[1] = gimple_assign_lhs (g); | |
604 | } | |
605 | g = gimple_build_assign (t, args[1]); | |
606 | gsi_insert_before (gsi, g, GSI_SAME_STMT); | |
607 | lhs = gimple_call_lhs (stmt); | |
608 | update_gimple_call (gsi, decl, 5, args[0], | |
609 | build_fold_addr_expr (t), args[2], | |
610 | build_int_cst (NULL_TREE, | |
611 | MEMMODEL_SEQ_CST), | |
612 | build_int_cst (NULL_TREE, | |
613 | MEMMODEL_SEQ_CST)); | |
f3dd574e | 614 | maybe_clean_or_replace_eh_stmt (stmt, gsi_stmt (*gsi)); |
c954bddd JJ |
615 | if (tsan_atomic_table[i].action == val_cas && lhs) |
616 | { | |
617 | tree cond; | |
618 | stmt = gsi_stmt (*gsi); | |
b731b390 | 619 | g = gimple_build_assign (make_ssa_name (TREE_TYPE (t)), t); |
c954bddd JJ |
620 | gsi_insert_after (gsi, g, GSI_NEW_STMT); |
621 | t = make_ssa_name (TREE_TYPE (TREE_TYPE (decl)), stmt); | |
622 | cond = build2 (NE_EXPR, boolean_type_node, t, | |
623 | build_int_cst (TREE_TYPE (t), 0)); | |
0d0e4a03 JJ |
624 | g = gimple_build_assign (lhs, COND_EXPR, cond, args[1], |
625 | gimple_assign_lhs (g)); | |
c954bddd JJ |
626 | gimple_call_set_lhs (stmt, t); |
627 | update_stmt (stmt); | |
628 | gsi_insert_after (gsi, g, GSI_NEW_STMT); | |
629 | } | |
630 | return; | |
631 | case lock_release: | |
632 | gcc_assert (num == 1); | |
633 | t = TYPE_ARG_TYPES (TREE_TYPE (decl)); | |
634 | t = TREE_VALUE (TREE_CHAIN (t)); | |
635 | update_gimple_call (gsi, decl, 3, gimple_call_arg (stmt, 0), | |
636 | build_int_cst (t, 0), | |
637 | build_int_cst (NULL_TREE, | |
638 | MEMMODEL_RELEASE)); | |
f3dd574e | 639 | maybe_clean_or_replace_eh_stmt (stmt, gsi_stmt (*gsi)); |
c954bddd | 640 | return; |
5ba4a08c JJ |
641 | case bool_clear: |
642 | case bool_test_and_set: | |
643 | if (BOOL_TYPE_SIZE != 8) | |
644 | { | |
645 | decl = NULL_TREE; | |
646 | for (j = 1; j < 5; j++) | |
647 | if (BOOL_TYPE_SIZE == (8 << j)) | |
648 | { | |
649 | enum built_in_function tsan_fcode | |
650 | = (enum built_in_function) | |
651 | (tsan_atomic_table[i].tsan_fcode + j); | |
652 | decl = builtin_decl_implicit (tsan_fcode); | |
653 | break; | |
654 | } | |
655 | if (decl == NULL_TREE) | |
656 | return; | |
657 | } | |
658 | last_arg = gimple_call_arg (stmt, num - 1); | |
3ed67fbf JJ |
659 | if (tree_fits_uhwi_p (last_arg) |
660 | && memmodel_base (tree_to_uhwi (last_arg)) >= MEMMODEL_LAST) | |
5ba4a08c JJ |
661 | return; |
662 | t = TYPE_ARG_TYPES (TREE_TYPE (decl)); | |
663 | t = TREE_VALUE (TREE_CHAIN (t)); | |
664 | if (tsan_atomic_table[i].action == bool_clear) | |
665 | { | |
666 | update_gimple_call (gsi, decl, 3, gimple_call_arg (stmt, 0), | |
667 | build_int_cst (t, 0), last_arg); | |
f3dd574e | 668 | maybe_clean_or_replace_eh_stmt (stmt, gsi_stmt (*gsi)); |
5ba4a08c JJ |
669 | return; |
670 | } | |
671 | t = build_int_cst (t, targetm.atomic_test_and_set_trueval); | |
672 | update_gimple_call (gsi, decl, 3, gimple_call_arg (stmt, 0), | |
673 | t, last_arg); | |
f3dd574e | 674 | maybe_clean_or_replace_eh_stmt (stmt, gsi_stmt (*gsi)); |
5ba4a08c JJ |
675 | stmt = gsi_stmt (*gsi); |
676 | lhs = gimple_call_lhs (stmt); | |
677 | if (lhs == NULL_TREE) | |
678 | return; | |
679 | if (targetm.atomic_test_and_set_trueval != 1 | |
680 | || !useless_type_conversion_p (TREE_TYPE (lhs), | |
681 | TREE_TYPE (t))) | |
682 | { | |
683 | tree new_lhs = make_ssa_name (TREE_TYPE (t)); | |
684 | gimple_call_set_lhs (stmt, new_lhs); | |
685 | if (targetm.atomic_test_and_set_trueval != 1) | |
686 | g = gimple_build_assign (lhs, NE_EXPR, new_lhs, | |
687 | build_int_cst (TREE_TYPE (t), 0)); | |
688 | else | |
689 | g = gimple_build_assign (lhs, NOP_EXPR, new_lhs); | |
690 | gsi_insert_after (gsi, g, GSI_NEW_STMT); | |
691 | update_stmt (stmt); | |
692 | } | |
693 | return; | |
c954bddd JJ |
694 | default: |
695 | continue; | |
696 | } | |
697 | } | |
698 | } | |
699 | ||
32b4b7f5 | 700 | /* Instruments the gimple pointed to by GSI. Return |
8ddf5c28 | 701 | true if func entry/exit should be instrumented. */ |
32b4b7f5 DV |
702 | |
703 | static bool | |
c954bddd | 704 | instrument_gimple (gimple_stmt_iterator *gsi) |
32b4b7f5 | 705 | { |
355fe088 | 706 | gimple *stmt; |
32b4b7f5 DV |
707 | tree rhs, lhs; |
708 | bool instrumented = false; | |
709 | ||
c954bddd | 710 | stmt = gsi_stmt (*gsi); |
32b4b7f5 DV |
711 | if (is_gimple_call (stmt) |
712 | && (gimple_call_fndecl (stmt) | |
713 | != builtin_decl_implicit (BUILT_IN_TSAN_INIT))) | |
c954bddd | 714 | { |
a3f94967 JJ |
715 | /* All functions with function call will have exit instrumented, |
716 | therefore no function calls other than __tsan_func_exit | |
717 | shall appear in the functions. */ | |
718 | gimple_call_set_tail (as_a <gcall *> (stmt), false); | |
5c944c6c | 719 | if (gimple_call_builtin_p (stmt, BUILT_IN_NORMAL)) |
c954bddd JJ |
720 | instrument_builtin_call (gsi); |
721 | return true; | |
722 | } | |
8ddf5c28 JJ |
723 | else if (is_gimple_assign (stmt) |
724 | && !gimple_clobber_p (stmt)) | |
32b4b7f5 DV |
725 | { |
726 | if (gimple_store_p (stmt)) | |
727 | { | |
728 | lhs = gimple_assign_lhs (stmt); | |
c954bddd | 729 | instrumented = instrument_expr (*gsi, lhs, true); |
32b4b7f5 DV |
730 | } |
731 | if (gimple_assign_load_p (stmt)) | |
732 | { | |
733 | rhs = gimple_assign_rhs1 (stmt); | |
c954bddd | 734 | instrumented = instrument_expr (*gsi, rhs, false); |
32b4b7f5 DV |
735 | } |
736 | } | |
737 | return instrumented; | |
738 | } | |
739 | ||
fca4adf2 JJ |
740 | /* Replace TSAN_FUNC_EXIT internal call with function exit tsan builtin. */ |
741 | ||
742 | static void | |
355fe088 | 743 | replace_func_exit (gimple *stmt) |
fca4adf2 JJ |
744 | { |
745 | tree builtin_decl = builtin_decl_implicit (BUILT_IN_TSAN_FUNC_EXIT); | |
355fe088 | 746 | gimple *g = gimple_build_call (builtin_decl, 0); |
fca4adf2 JJ |
747 | gimple_set_location (g, cfun->function_end_locus); |
748 | gimple_stmt_iterator gsi = gsi_for_stmt (stmt); | |
749 | gsi_replace (&gsi, g, true); | |
750 | } | |
751 | ||
752 | /* Instrument function exit. Used when TSAN_FUNC_EXIT does not exist. */ | |
753 | ||
754 | static void | |
755 | instrument_func_exit (void) | |
756 | { | |
757 | location_t loc; | |
758 | basic_block exit_bb; | |
759 | gimple_stmt_iterator gsi; | |
355fe088 | 760 | gimple *stmt, *g; |
fca4adf2 JJ |
761 | tree builtin_decl; |
762 | edge e; | |
763 | edge_iterator ei; | |
764 | ||
765 | /* Find all function exits. */ | |
766 | exit_bb = EXIT_BLOCK_PTR_FOR_FN (cfun); | |
767 | FOR_EACH_EDGE (e, ei, exit_bb->preds) | |
768 | { | |
769 | gsi = gsi_last_bb (e->src); | |
770 | stmt = gsi_stmt (gsi); | |
771 | gcc_assert (gimple_code (stmt) == GIMPLE_RETURN | |
772 | || gimple_call_builtin_p (stmt, BUILT_IN_RETURN)); | |
773 | loc = gimple_location (stmt); | |
774 | builtin_decl = builtin_decl_implicit (BUILT_IN_TSAN_FUNC_EXIT); | |
775 | g = gimple_build_call (builtin_decl, 0); | |
776 | gimple_set_location (g, loc); | |
777 | gsi_insert_before (&gsi, g, GSI_SAME_STMT); | |
778 | } | |
779 | } | |
780 | ||
32b4b7f5 | 781 | /* Instruments all interesting memory accesses in the current function. |
8ddf5c28 | 782 | Return true if func entry/exit should be instrumented. */ |
32b4b7f5 DV |
783 | |
784 | static bool | |
f3dd574e | 785 | instrument_memory_accesses (bool *cfg_changed) |
32b4b7f5 DV |
786 | { |
787 | basic_block bb; | |
788 | gimple_stmt_iterator gsi; | |
789 | bool fentry_exit_instrument = false; | |
fca4adf2 | 790 | bool func_exit_seen = false; |
355fe088 | 791 | auto_vec<gimple *> tsan_func_exits; |
32b4b7f5 | 792 | |
11cd3bed | 793 | FOR_EACH_BB_FN (bb, cfun) |
f3dd574e JJ |
794 | { |
795 | for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi)) | |
796 | { | |
797 | gimple *stmt = gsi_stmt (gsi); | |
798 | if (gimple_call_internal_p (stmt, IFN_TSAN_FUNC_EXIT)) | |
799 | { | |
800 | if (fentry_exit_instrument) | |
801 | replace_func_exit (stmt); | |
802 | else | |
803 | tsan_func_exits.safe_push (stmt); | |
804 | func_exit_seen = true; | |
805 | } | |
806 | else | |
807 | fentry_exit_instrument |= instrument_gimple (&gsi); | |
808 | } | |
809 | if (gimple_purge_dead_eh_edges (bb)) | |
810 | *cfg_changed = true; | |
811 | } | |
fca4adf2 | 812 | unsigned int i; |
355fe088 | 813 | gimple *stmt; |
fca4adf2 JJ |
814 | FOR_EACH_VEC_ELT (tsan_func_exits, i, stmt) |
815 | if (fentry_exit_instrument) | |
816 | replace_func_exit (stmt); | |
817 | else | |
818 | { | |
819 | gsi = gsi_for_stmt (stmt); | |
820 | gsi_remove (&gsi, true); | |
821 | } | |
822 | if (fentry_exit_instrument && !func_exit_seen) | |
823 | instrument_func_exit (); | |
32b4b7f5 DV |
824 | return fentry_exit_instrument; |
825 | } | |
826 | ||
827 | /* Instruments function entry. */ | |
828 | ||
829 | static void | |
830 | instrument_func_entry (void) | |
831 | { | |
32b4b7f5 | 832 | tree ret_addr, builtin_decl; |
355fe088 | 833 | gimple *g; |
9ab3864f | 834 | gimple_seq seq = NULL; |
32b4b7f5 DV |
835 | |
836 | builtin_decl = builtin_decl_implicit (BUILT_IN_RETURN_ADDRESS); | |
837 | g = gimple_build_call (builtin_decl, 1, integer_zero_node); | |
b731b390 | 838 | ret_addr = make_ssa_name (ptr_type_node); |
32b4b7f5 DV |
839 | gimple_call_set_lhs (g, ret_addr); |
840 | gimple_set_location (g, cfun->function_start_locus); | |
9ab3864f | 841 | gimple_seq_add_stmt_without_update (&seq, g); |
32b4b7f5 | 842 | |
9ab3864f | 843 | builtin_decl = builtin_decl_implicit (BUILT_IN_TSAN_FUNC_ENTRY); |
32b4b7f5 DV |
844 | g = gimple_build_call (builtin_decl, 1, ret_addr); |
845 | gimple_set_location (g, cfun->function_start_locus); | |
9ab3864f JJ |
846 | gimple_seq_add_stmt_without_update (&seq, g); |
847 | ||
848 | edge e = single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun)); | |
849 | gsi_insert_seq_on_edge_immediate (e, seq); | |
32b4b7f5 DV |
850 | } |
851 | ||
32b4b7f5 DV |
852 | /* ThreadSanitizer instrumentation pass. */ |
853 | ||
854 | static unsigned | |
855 | tsan_pass (void) | |
856 | { | |
0e668eaf | 857 | initialize_sanitizer_builtins (); |
f3dd574e JJ |
858 | bool cfg_changed = false; |
859 | if (instrument_memory_accesses (&cfg_changed)) | |
fca4adf2 | 860 | instrument_func_entry (); |
f3dd574e | 861 | return cfg_changed ? TODO_cleanup_cfg : 0; |
32b4b7f5 DV |
862 | } |
863 | ||
32b4b7f5 DV |
864 | /* Inserts __tsan_init () into the list of CTORs. */ |
865 | ||
866 | void | |
867 | tsan_finish_file (void) | |
868 | { | |
0e668eaf | 869 | tree ctor_statements = NULL_TREE; |
32b4b7f5 | 870 | |
0e668eaf JJ |
871 | initialize_sanitizer_builtins (); |
872 | tree init_decl = builtin_decl_implicit (BUILT_IN_TSAN_INIT); | |
32b4b7f5 DV |
873 | append_to_statement_list (build_call_expr (init_decl, 0), |
874 | &ctor_statements); | |
875 | cgraph_build_static_cdtor ('I', ctor_statements, | |
876 | MAX_RESERVED_INIT_PRIORITY - 1); | |
877 | } | |
878 | ||
879 | /* The pass descriptor. */ | |
880 | ||
27a4cd48 DM |
881 | namespace { |
882 | ||
883 | const pass_data pass_data_tsan = | |
32b4b7f5 | 884 | { |
27a4cd48 DM |
885 | GIMPLE_PASS, /* type */ |
886 | "tsan", /* name */ | |
887 | OPTGROUP_NONE, /* optinfo_flags */ | |
27a4cd48 DM |
888 | TV_NONE, /* tv_id */ |
889 | ( PROP_ssa | PROP_cfg ), /* properties_required */ | |
890 | 0, /* properties_provided */ | |
891 | 0, /* properties_destroyed */ | |
892 | 0, /* todo_flags_start */ | |
3bea341f | 893 | TODO_update_ssa, /* todo_flags_finish */ |
32b4b7f5 DV |
894 | }; |
895 | ||
27a4cd48 DM |
896 | class pass_tsan : public gimple_opt_pass |
897 | { | |
898 | public: | |
c3284718 RS |
899 | pass_tsan (gcc::context *ctxt) |
900 | : gimple_opt_pass (pass_data_tsan, ctxt) | |
27a4cd48 DM |
901 | {} |
902 | ||
903 | /* opt_pass methods: */ | |
65d3284b | 904 | opt_pass * clone () { return new pass_tsan (m_ctxt); } |
1a3d085c TS |
905 | virtual bool gate (function *) |
906 | { | |
45b2222a | 907 | return sanitize_flags_p (SANITIZE_THREAD); |
1a3d085c TS |
908 | } |
909 | ||
be55bfe6 | 910 | virtual unsigned int execute (function *) { return tsan_pass (); } |
27a4cd48 DM |
911 | |
912 | }; // class pass_tsan | |
913 | ||
914 | } // anon namespace | |
915 | ||
916 | gimple_opt_pass * | |
917 | make_pass_tsan (gcc::context *ctxt) | |
918 | { | |
919 | return new pass_tsan (ctxt); | |
920 | } | |
921 | ||
27a4cd48 DM |
922 | namespace { |
923 | ||
924 | const pass_data pass_data_tsan_O0 = | |
32b4b7f5 | 925 | { |
27a4cd48 DM |
926 | GIMPLE_PASS, /* type */ |
927 | "tsan0", /* name */ | |
928 | OPTGROUP_NONE, /* optinfo_flags */ | |
27a4cd48 DM |
929 | TV_NONE, /* tv_id */ |
930 | ( PROP_ssa | PROP_cfg ), /* properties_required */ | |
931 | 0, /* properties_provided */ | |
932 | 0, /* properties_destroyed */ | |
933 | 0, /* todo_flags_start */ | |
3bea341f | 934 | TODO_update_ssa, /* todo_flags_finish */ |
32b4b7f5 | 935 | }; |
27a4cd48 DM |
936 | |
937 | class pass_tsan_O0 : public gimple_opt_pass | |
938 | { | |
939 | public: | |
c3284718 RS |
940 | pass_tsan_O0 (gcc::context *ctxt) |
941 | : gimple_opt_pass (pass_data_tsan_O0, ctxt) | |
27a4cd48 DM |
942 | {} |
943 | ||
944 | /* opt_pass methods: */ | |
1a3d085c TS |
945 | virtual bool gate (function *) |
946 | { | |
45b2222a | 947 | return (sanitize_flags_p (SANITIZE_THREAD) && !optimize); |
1a3d085c TS |
948 | } |
949 | ||
be55bfe6 | 950 | virtual unsigned int execute (function *) { return tsan_pass (); } |
27a4cd48 DM |
951 | |
952 | }; // class pass_tsan_O0 | |
953 | ||
954 | } // anon namespace | |
955 | ||
956 | gimple_opt_pass * | |
957 | make_pass_tsan_O0 (gcc::context *ctxt) | |
958 | { | |
959 | return new pass_tsan_O0 (ctxt); | |
960 | } |