]>
Commit | Line | Data |
---|---|---|
32b4b7f5 | 1 | /* GCC instrumentation plugin for ThreadSanitizer. |
cbe34bb5 | 2 | Copyright (C) 2011-2017 Free Software Foundation, Inc. |
32b4b7f5 DV |
3 | Contributed by Dmitry Vyukov <dvyukov@google.com> |
4 | ||
5 | This file is part of GCC. | |
6 | ||
7 | GCC is free software; you can redistribute it and/or modify it under | |
8 | the terms of the GNU General Public License as published by the Free | |
9 | Software Foundation; either version 3, or (at your option) any later | |
10 | version. | |
11 | ||
12 | GCC is distributed in the hope that it will be useful, but WITHOUT ANY | |
13 | WARRANTY; without even the implied warranty of MERCHANTABILITY or | |
14 | FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License | |
15 | for more details. | |
16 | ||
17 | You should have received a copy of the GNU General Public License | |
18 | along with GCC; see the file COPYING3. If not see | |
19 | <http://www.gnu.org/licenses/>. */ | |
20 | ||
21 | ||
22 | #include "config.h" | |
23 | #include "system.h" | |
24 | #include "coretypes.h" | |
c7131fb2 | 25 | #include "backend.h" |
957060b5 | 26 | #include "rtl.h" |
32b4b7f5 | 27 | #include "tree.h" |
e73cf9a2 | 28 | #include "memmodel.h" |
c7131fb2 | 29 | #include "gimple.h" |
957060b5 | 30 | #include "tree-pass.h" |
c7131fb2 | 31 | #include "ssa.h" |
957060b5 | 32 | #include "cgraph.h" |
c7131fb2 | 33 | #include "fold-const.h" |
45b0be94 | 34 | #include "gimplify.h" |
5be5c238 | 35 | #include "gimple-iterator.h" |
020ca950 | 36 | #include "gimplify-me.h" |
442b4905 | 37 | #include "tree-cfg.h" |
32b4b7f5 | 38 | #include "tree-iterator.h" |
c954bddd | 39 | #include "tree-ssa-propagate.h" |
020ca950 | 40 | #include "tree-ssa-loop-ivopts.h" |
0e668eaf JJ |
41 | #include "tsan.h" |
42 | #include "asan.h" | |
fe86867f | 43 | #include "builtins.h" |
5ba4a08c | 44 | #include "target.h" |
32b4b7f5 DV |
45 | |
46 | /* Number of instrumented memory accesses in the current function. */ | |
47 | ||
48 | /* Builds the following decl | |
49 | void __tsan_read/writeX (void *addr); */ | |
50 | ||
51 | static tree | |
52 | get_memory_access_decl (bool is_write, unsigned size) | |
53 | { | |
54 | enum built_in_function fcode; | |
55 | ||
56 | if (size <= 1) | |
0e668eaf JJ |
57 | fcode = is_write ? BUILT_IN_TSAN_WRITE1 |
58 | : BUILT_IN_TSAN_READ1; | |
32b4b7f5 | 59 | else if (size <= 3) |
0e668eaf JJ |
60 | fcode = is_write ? BUILT_IN_TSAN_WRITE2 |
61 | : BUILT_IN_TSAN_READ2; | |
32b4b7f5 | 62 | else if (size <= 7) |
0e668eaf JJ |
63 | fcode = is_write ? BUILT_IN_TSAN_WRITE4 |
64 | : BUILT_IN_TSAN_READ4; | |
32b4b7f5 | 65 | else if (size <= 15) |
0e668eaf JJ |
66 | fcode = is_write ? BUILT_IN_TSAN_WRITE8 |
67 | : BUILT_IN_TSAN_READ8; | |
32b4b7f5 | 68 | else |
0e668eaf JJ |
69 | fcode = is_write ? BUILT_IN_TSAN_WRITE16 |
70 | : BUILT_IN_TSAN_READ16; | |
32b4b7f5 DV |
71 | |
72 | return builtin_decl_implicit (fcode); | |
73 | } | |
74 | ||
75 | /* Check as to whether EXPR refers to a store to vptr. */ | |
76 | ||
77 | static tree | |
355fe088 | 78 | is_vptr_store (gimple *stmt, tree expr, bool is_write) |
32b4b7f5 DV |
79 | { |
80 | if (is_write == true | |
81 | && gimple_assign_single_p (stmt) | |
82 | && TREE_CODE (expr) == COMPONENT_REF) | |
83 | { | |
84 | tree field = TREE_OPERAND (expr, 1); | |
85 | if (TREE_CODE (field) == FIELD_DECL | |
86 | && DECL_VIRTUAL_P (field)) | |
87 | return gimple_assign_rhs1 (stmt); | |
88 | } | |
89 | return NULL; | |
90 | } | |
91 | ||
32b4b7f5 | 92 | /* Instruments EXPR if needed. If any instrumentation is inserted, |
8ddf5c28 | 93 | return true. */ |
32b4b7f5 DV |
94 | |
95 | static bool | |
96 | instrument_expr (gimple_stmt_iterator gsi, tree expr, bool is_write) | |
97 | { | |
5c972fb6 | 98 | tree base, rhs, expr_ptr, builtin_decl; |
32b4b7f5 DV |
99 | basic_block bb; |
100 | HOST_WIDE_INT size; | |
355fe088 | 101 | gimple *stmt, *g; |
5c972fb6 | 102 | gimple_seq seq; |
32b4b7f5 | 103 | location_t loc; |
fe86867f | 104 | unsigned int align; |
32b4b7f5 | 105 | |
32b4b7f5 | 106 | size = int_size_in_bytes (TREE_TYPE (expr)); |
fe86867f | 107 | if (size <= 0) |
32b4b7f5 DV |
108 | return false; |
109 | ||
32b4b7f5 DV |
110 | HOST_WIDE_INT bitsize, bitpos; |
111 | tree offset; | |
ef4bddc2 | 112 | machine_mode mode; |
ee45a32d EB |
113 | int unsignedp, reversep, volatilep = 0; |
114 | base = get_inner_reference (expr, &bitsize, &bitpos, &offset, &mode, | |
25b75a48 | 115 | &unsignedp, &reversep, &volatilep); |
7a36dc06 JJ |
116 | |
117 | /* No need to instrument accesses to decls that don't escape, | |
118 | they can't escape to other threads then. */ | |
020ca950 | 119 | if (DECL_P (base) && !is_global_var (base)) |
7a36dc06 JJ |
120 | { |
121 | struct pt_solution pt; | |
122 | memset (&pt, 0, sizeof (pt)); | |
123 | pt.escaped = 1; | |
124 | pt.ipa_escaped = flag_ipa_pta != 0; | |
7a36dc06 JJ |
125 | if (!pt_solution_includes (&pt, base)) |
126 | return false; | |
020ca950 | 127 | if (!may_be_aliased (base)) |
7a36dc06 JJ |
128 | return false; |
129 | } | |
130 | ||
8813a647 | 131 | if (TREE_READONLY (base) || (VAR_P (base) && DECL_HARD_REGISTER (base))) |
32b4b7f5 DV |
132 | return false; |
133 | ||
32b4b7f5 DV |
134 | stmt = gsi_stmt (gsi); |
135 | loc = gimple_location (stmt); | |
136 | rhs = is_vptr_store (stmt, expr, is_write); | |
fe86867f BE |
137 | |
138 | if ((TREE_CODE (expr) == COMPONENT_REF | |
139 | && DECL_BIT_FIELD_TYPE (TREE_OPERAND (expr, 1))) | |
140 | || TREE_CODE (expr) == BIT_FIELD_REF) | |
141 | { | |
142 | base = TREE_OPERAND (expr, 0); | |
143 | if (TREE_CODE (expr) == COMPONENT_REF) | |
144 | { | |
145 | expr = TREE_OPERAND (expr, 1); | |
146 | if (is_write && DECL_BIT_FIELD_REPRESENTATIVE (expr)) | |
147 | expr = DECL_BIT_FIELD_REPRESENTATIVE (expr); | |
148 | if (!tree_fits_uhwi_p (DECL_FIELD_OFFSET (expr)) | |
149 | || !tree_fits_uhwi_p (DECL_FIELD_BIT_OFFSET (expr)) | |
150 | || !tree_fits_uhwi_p (DECL_SIZE (expr))) | |
151 | return false; | |
152 | bitpos = tree_to_uhwi (DECL_FIELD_OFFSET (expr)) * BITS_PER_UNIT | |
153 | + tree_to_uhwi (DECL_FIELD_BIT_OFFSET (expr)); | |
154 | bitsize = tree_to_uhwi (DECL_SIZE (expr)); | |
155 | } | |
156 | else | |
157 | { | |
158 | if (!tree_fits_uhwi_p (TREE_OPERAND (expr, 2)) | |
159 | || !tree_fits_uhwi_p (TREE_OPERAND (expr, 1))) | |
160 | return false; | |
161 | bitpos = tree_to_uhwi (TREE_OPERAND (expr, 2)); | |
162 | bitsize = tree_to_uhwi (TREE_OPERAND (expr, 1)); | |
163 | } | |
164 | if (bitpos < 0 || bitsize <= 0) | |
165 | return false; | |
166 | size = (bitpos % BITS_PER_UNIT + bitsize + BITS_PER_UNIT - 1) | |
167 | / BITS_PER_UNIT; | |
020ca950 BE |
168 | if (may_be_nonaddressable_p (base)) |
169 | return false; | |
fe86867f BE |
170 | align = get_object_alignment (base); |
171 | if (align < BITS_PER_UNIT) | |
172 | return false; | |
173 | bitpos = bitpos & ~(BITS_PER_UNIT - 1); | |
174 | if ((align - 1) & bitpos) | |
175 | { | |
176 | align = (align - 1) & bitpos; | |
146ec50f | 177 | align = least_bit_hwi (align); |
fe86867f | 178 | } |
fe86867f | 179 | expr = build_fold_addr_expr (unshare_expr (base)); |
fe86867f BE |
180 | expr = build2 (MEM_REF, char_type_node, expr, |
181 | build_int_cst (TREE_TYPE (expr), bitpos / BITS_PER_UNIT)); | |
182 | expr_ptr = build_fold_addr_expr (expr); | |
183 | } | |
184 | else | |
185 | { | |
020ca950 BE |
186 | if (may_be_nonaddressable_p (expr)) |
187 | return false; | |
fe86867f BE |
188 | align = get_object_alignment (expr); |
189 | if (align < BITS_PER_UNIT) | |
190 | return false; | |
fe86867f BE |
191 | expr_ptr = build_fold_addr_expr (unshare_expr (expr)); |
192 | } | |
020ca950 | 193 | expr_ptr = force_gimple_operand (expr_ptr, &seq, true, NULL_TREE); |
1d4854da | 194 | if ((size & (size - 1)) != 0 || size > 16 |
fe86867f BE |
195 | || align < MIN (size, 8) * BITS_PER_UNIT) |
196 | { | |
197 | builtin_decl = builtin_decl_implicit (is_write | |
198 | ? BUILT_IN_TSAN_WRITE_RANGE | |
199 | : BUILT_IN_TSAN_READ_RANGE); | |
200 | g = gimple_build_call (builtin_decl, 2, expr_ptr, size_int (size)); | |
201 | } | |
202 | else if (rhs == NULL) | |
5c972fb6 JJ |
203 | g = gimple_build_call (get_memory_access_decl (is_write, size), |
204 | 1, expr_ptr); | |
32b4b7f5 DV |
205 | else |
206 | { | |
207 | builtin_decl = builtin_decl_implicit (BUILT_IN_TSAN_VPTR_UPDATE); | |
cbf9a566 | 208 | g = gimple_build_call (builtin_decl, 2, expr_ptr, unshare_expr (rhs)); |
32b4b7f5 DV |
209 | } |
210 | gimple_set_location (g, loc); | |
5c972fb6 | 211 | gimple_seq_add_stmt_without_update (&seq, g); |
32b4b7f5 DV |
212 | /* Instrumentation for assignment of a function result |
213 | must be inserted after the call. Instrumentation for | |
214 | reads of function arguments must be inserted before the call. | |
215 | That's because the call can contain synchronization. */ | |
216 | if (is_gimple_call (stmt) && is_write) | |
217 | { | |
218 | /* If the call can throw, it must be the last stmt in | |
219 | a basic block, so the instrumented stmts need to be | |
8ddf5c28 | 220 | inserted in successor bbs. */ |
32b4b7f5 DV |
221 | if (is_ctrl_altering_stmt (stmt)) |
222 | { | |
223 | edge e; | |
224 | ||
225 | bb = gsi_bb (gsi); | |
226 | e = find_fallthru_edge (bb->succs); | |
227 | if (e) | |
5c972fb6 | 228 | gsi_insert_seq_on_edge_immediate (e, seq); |
32b4b7f5 DV |
229 | } |
230 | else | |
5c972fb6 | 231 | gsi_insert_seq_after (&gsi, seq, GSI_NEW_STMT); |
32b4b7f5 DV |
232 | } |
233 | else | |
5c972fb6 | 234 | gsi_insert_seq_before (&gsi, seq, GSI_SAME_STMT); |
32b4b7f5 DV |
235 | |
236 | return true; | |
237 | } | |
238 | ||
c954bddd JJ |
239 | /* Actions for sync/atomic builtin transformations. */ |
240 | enum tsan_atomic_action | |
241 | { | |
242 | check_last, add_seq_cst, add_acquire, weak_cas, strong_cas, | |
5ba4a08c JJ |
243 | bool_cas, val_cas, lock_release, fetch_op, fetch_op_seq_cst, |
244 | bool_clear, bool_test_and_set | |
c954bddd JJ |
245 | }; |
246 | ||
247 | /* Table how to map sync/atomic builtins to their corresponding | |
248 | tsan equivalents. */ | |
34c136b6 | 249 | static const struct tsan_map_atomic |
c954bddd JJ |
250 | { |
251 | enum built_in_function fcode, tsan_fcode; | |
252 | enum tsan_atomic_action action; | |
253 | enum tree_code code; | |
254 | } tsan_atomic_table[] = | |
255 | { | |
256 | #define TRANSFORM(fcode, tsan_fcode, action, code) \ | |
257 | { BUILT_IN_##fcode, BUILT_IN_##tsan_fcode, action, code } | |
258 | #define CHECK_LAST(fcode, tsan_fcode) \ | |
259 | TRANSFORM (fcode, tsan_fcode, check_last, ERROR_MARK) | |
260 | #define ADD_SEQ_CST(fcode, tsan_fcode) \ | |
261 | TRANSFORM (fcode, tsan_fcode, add_seq_cst, ERROR_MARK) | |
262 | #define ADD_ACQUIRE(fcode, tsan_fcode) \ | |
263 | TRANSFORM (fcode, tsan_fcode, add_acquire, ERROR_MARK) | |
264 | #define WEAK_CAS(fcode, tsan_fcode) \ | |
265 | TRANSFORM (fcode, tsan_fcode, weak_cas, ERROR_MARK) | |
266 | #define STRONG_CAS(fcode, tsan_fcode) \ | |
267 | TRANSFORM (fcode, tsan_fcode, strong_cas, ERROR_MARK) | |
268 | #define BOOL_CAS(fcode, tsan_fcode) \ | |
269 | TRANSFORM (fcode, tsan_fcode, bool_cas, ERROR_MARK) | |
270 | #define VAL_CAS(fcode, tsan_fcode) \ | |
271 | TRANSFORM (fcode, tsan_fcode, val_cas, ERROR_MARK) | |
272 | #define LOCK_RELEASE(fcode, tsan_fcode) \ | |
273 | TRANSFORM (fcode, tsan_fcode, lock_release, ERROR_MARK) | |
274 | #define FETCH_OP(fcode, tsan_fcode, code) \ | |
275 | TRANSFORM (fcode, tsan_fcode, fetch_op, code) | |
276 | #define FETCH_OPS(fcode, tsan_fcode, code) \ | |
277 | TRANSFORM (fcode, tsan_fcode, fetch_op_seq_cst, code) | |
5ba4a08c JJ |
278 | #define BOOL_CLEAR(fcode, tsan_fcode) \ |
279 | TRANSFORM (fcode, tsan_fcode, bool_clear, ERROR_MARK) | |
280 | #define BOOL_TEST_AND_SET(fcode, tsan_fcode) \ | |
281 | TRANSFORM (fcode, tsan_fcode, bool_test_and_set, ERROR_MARK) | |
c954bddd JJ |
282 | |
283 | CHECK_LAST (ATOMIC_LOAD_1, TSAN_ATOMIC8_LOAD), | |
284 | CHECK_LAST (ATOMIC_LOAD_2, TSAN_ATOMIC16_LOAD), | |
285 | CHECK_LAST (ATOMIC_LOAD_4, TSAN_ATOMIC32_LOAD), | |
286 | CHECK_LAST (ATOMIC_LOAD_8, TSAN_ATOMIC64_LOAD), | |
287 | CHECK_LAST (ATOMIC_LOAD_16, TSAN_ATOMIC128_LOAD), | |
288 | CHECK_LAST (ATOMIC_STORE_1, TSAN_ATOMIC8_STORE), | |
289 | CHECK_LAST (ATOMIC_STORE_2, TSAN_ATOMIC16_STORE), | |
290 | CHECK_LAST (ATOMIC_STORE_4, TSAN_ATOMIC32_STORE), | |
291 | CHECK_LAST (ATOMIC_STORE_8, TSAN_ATOMIC64_STORE), | |
292 | CHECK_LAST (ATOMIC_STORE_16, TSAN_ATOMIC128_STORE), | |
293 | CHECK_LAST (ATOMIC_EXCHANGE_1, TSAN_ATOMIC8_EXCHANGE), | |
294 | CHECK_LAST (ATOMIC_EXCHANGE_2, TSAN_ATOMIC16_EXCHANGE), | |
295 | CHECK_LAST (ATOMIC_EXCHANGE_4, TSAN_ATOMIC32_EXCHANGE), | |
296 | CHECK_LAST (ATOMIC_EXCHANGE_8, TSAN_ATOMIC64_EXCHANGE), | |
297 | CHECK_LAST (ATOMIC_EXCHANGE_16, TSAN_ATOMIC128_EXCHANGE), | |
298 | CHECK_LAST (ATOMIC_FETCH_ADD_1, TSAN_ATOMIC8_FETCH_ADD), | |
299 | CHECK_LAST (ATOMIC_FETCH_ADD_2, TSAN_ATOMIC16_FETCH_ADD), | |
300 | CHECK_LAST (ATOMIC_FETCH_ADD_4, TSAN_ATOMIC32_FETCH_ADD), | |
301 | CHECK_LAST (ATOMIC_FETCH_ADD_8, TSAN_ATOMIC64_FETCH_ADD), | |
302 | CHECK_LAST (ATOMIC_FETCH_ADD_16, TSAN_ATOMIC128_FETCH_ADD), | |
303 | CHECK_LAST (ATOMIC_FETCH_SUB_1, TSAN_ATOMIC8_FETCH_SUB), | |
304 | CHECK_LAST (ATOMIC_FETCH_SUB_2, TSAN_ATOMIC16_FETCH_SUB), | |
305 | CHECK_LAST (ATOMIC_FETCH_SUB_4, TSAN_ATOMIC32_FETCH_SUB), | |
306 | CHECK_LAST (ATOMIC_FETCH_SUB_8, TSAN_ATOMIC64_FETCH_SUB), | |
307 | CHECK_LAST (ATOMIC_FETCH_SUB_16, TSAN_ATOMIC128_FETCH_SUB), | |
308 | CHECK_LAST (ATOMIC_FETCH_AND_1, TSAN_ATOMIC8_FETCH_AND), | |
309 | CHECK_LAST (ATOMIC_FETCH_AND_2, TSAN_ATOMIC16_FETCH_AND), | |
310 | CHECK_LAST (ATOMIC_FETCH_AND_4, TSAN_ATOMIC32_FETCH_AND), | |
311 | CHECK_LAST (ATOMIC_FETCH_AND_8, TSAN_ATOMIC64_FETCH_AND), | |
312 | CHECK_LAST (ATOMIC_FETCH_AND_16, TSAN_ATOMIC128_FETCH_AND), | |
313 | CHECK_LAST (ATOMIC_FETCH_OR_1, TSAN_ATOMIC8_FETCH_OR), | |
314 | CHECK_LAST (ATOMIC_FETCH_OR_2, TSAN_ATOMIC16_FETCH_OR), | |
315 | CHECK_LAST (ATOMIC_FETCH_OR_4, TSAN_ATOMIC32_FETCH_OR), | |
316 | CHECK_LAST (ATOMIC_FETCH_OR_8, TSAN_ATOMIC64_FETCH_OR), | |
317 | CHECK_LAST (ATOMIC_FETCH_OR_16, TSAN_ATOMIC128_FETCH_OR), | |
318 | CHECK_LAST (ATOMIC_FETCH_XOR_1, TSAN_ATOMIC8_FETCH_XOR), | |
319 | CHECK_LAST (ATOMIC_FETCH_XOR_2, TSAN_ATOMIC16_FETCH_XOR), | |
320 | CHECK_LAST (ATOMIC_FETCH_XOR_4, TSAN_ATOMIC32_FETCH_XOR), | |
321 | CHECK_LAST (ATOMIC_FETCH_XOR_8, TSAN_ATOMIC64_FETCH_XOR), | |
322 | CHECK_LAST (ATOMIC_FETCH_XOR_16, TSAN_ATOMIC128_FETCH_XOR), | |
323 | CHECK_LAST (ATOMIC_FETCH_NAND_1, TSAN_ATOMIC8_FETCH_NAND), | |
324 | CHECK_LAST (ATOMIC_FETCH_NAND_2, TSAN_ATOMIC16_FETCH_NAND), | |
325 | CHECK_LAST (ATOMIC_FETCH_NAND_4, TSAN_ATOMIC32_FETCH_NAND), | |
326 | CHECK_LAST (ATOMIC_FETCH_NAND_8, TSAN_ATOMIC64_FETCH_NAND), | |
327 | CHECK_LAST (ATOMIC_FETCH_NAND_16, TSAN_ATOMIC128_FETCH_NAND), | |
328 | ||
329 | CHECK_LAST (ATOMIC_THREAD_FENCE, TSAN_ATOMIC_THREAD_FENCE), | |
330 | CHECK_LAST (ATOMIC_SIGNAL_FENCE, TSAN_ATOMIC_SIGNAL_FENCE), | |
331 | ||
332 | FETCH_OP (ATOMIC_ADD_FETCH_1, TSAN_ATOMIC8_FETCH_ADD, PLUS_EXPR), | |
333 | FETCH_OP (ATOMIC_ADD_FETCH_2, TSAN_ATOMIC16_FETCH_ADD, PLUS_EXPR), | |
334 | FETCH_OP (ATOMIC_ADD_FETCH_4, TSAN_ATOMIC32_FETCH_ADD, PLUS_EXPR), | |
335 | FETCH_OP (ATOMIC_ADD_FETCH_8, TSAN_ATOMIC64_FETCH_ADD, PLUS_EXPR), | |
336 | FETCH_OP (ATOMIC_ADD_FETCH_16, TSAN_ATOMIC128_FETCH_ADD, PLUS_EXPR), | |
337 | FETCH_OP (ATOMIC_SUB_FETCH_1, TSAN_ATOMIC8_FETCH_SUB, MINUS_EXPR), | |
338 | FETCH_OP (ATOMIC_SUB_FETCH_2, TSAN_ATOMIC16_FETCH_SUB, MINUS_EXPR), | |
339 | FETCH_OP (ATOMIC_SUB_FETCH_4, TSAN_ATOMIC32_FETCH_SUB, MINUS_EXPR), | |
340 | FETCH_OP (ATOMIC_SUB_FETCH_8, TSAN_ATOMIC64_FETCH_SUB, MINUS_EXPR), | |
341 | FETCH_OP (ATOMIC_SUB_FETCH_16, TSAN_ATOMIC128_FETCH_SUB, MINUS_EXPR), | |
342 | FETCH_OP (ATOMIC_AND_FETCH_1, TSAN_ATOMIC8_FETCH_AND, BIT_AND_EXPR), | |
343 | FETCH_OP (ATOMIC_AND_FETCH_2, TSAN_ATOMIC16_FETCH_AND, BIT_AND_EXPR), | |
344 | FETCH_OP (ATOMIC_AND_FETCH_4, TSAN_ATOMIC32_FETCH_AND, BIT_AND_EXPR), | |
345 | FETCH_OP (ATOMIC_AND_FETCH_8, TSAN_ATOMIC64_FETCH_AND, BIT_AND_EXPR), | |
346 | FETCH_OP (ATOMIC_AND_FETCH_16, TSAN_ATOMIC128_FETCH_AND, BIT_AND_EXPR), | |
347 | FETCH_OP (ATOMIC_OR_FETCH_1, TSAN_ATOMIC8_FETCH_OR, BIT_IOR_EXPR), | |
348 | FETCH_OP (ATOMIC_OR_FETCH_2, TSAN_ATOMIC16_FETCH_OR, BIT_IOR_EXPR), | |
349 | FETCH_OP (ATOMIC_OR_FETCH_4, TSAN_ATOMIC32_FETCH_OR, BIT_IOR_EXPR), | |
350 | FETCH_OP (ATOMIC_OR_FETCH_8, TSAN_ATOMIC64_FETCH_OR, BIT_IOR_EXPR), | |
351 | FETCH_OP (ATOMIC_OR_FETCH_16, TSAN_ATOMIC128_FETCH_OR, BIT_IOR_EXPR), | |
352 | FETCH_OP (ATOMIC_XOR_FETCH_1, TSAN_ATOMIC8_FETCH_XOR, BIT_XOR_EXPR), | |
353 | FETCH_OP (ATOMIC_XOR_FETCH_2, TSAN_ATOMIC16_FETCH_XOR, BIT_XOR_EXPR), | |
354 | FETCH_OP (ATOMIC_XOR_FETCH_4, TSAN_ATOMIC32_FETCH_XOR, BIT_XOR_EXPR), | |
355 | FETCH_OP (ATOMIC_XOR_FETCH_8, TSAN_ATOMIC64_FETCH_XOR, BIT_XOR_EXPR), | |
356 | FETCH_OP (ATOMIC_XOR_FETCH_16, TSAN_ATOMIC128_FETCH_XOR, BIT_XOR_EXPR), | |
357 | FETCH_OP (ATOMIC_NAND_FETCH_1, TSAN_ATOMIC8_FETCH_NAND, BIT_NOT_EXPR), | |
358 | FETCH_OP (ATOMIC_NAND_FETCH_2, TSAN_ATOMIC16_FETCH_NAND, BIT_NOT_EXPR), | |
359 | FETCH_OP (ATOMIC_NAND_FETCH_4, TSAN_ATOMIC32_FETCH_NAND, BIT_NOT_EXPR), | |
360 | FETCH_OP (ATOMIC_NAND_FETCH_8, TSAN_ATOMIC64_FETCH_NAND, BIT_NOT_EXPR), | |
361 | FETCH_OP (ATOMIC_NAND_FETCH_16, TSAN_ATOMIC128_FETCH_NAND, BIT_NOT_EXPR), | |
362 | ||
363 | ADD_ACQUIRE (SYNC_LOCK_TEST_AND_SET_1, TSAN_ATOMIC8_EXCHANGE), | |
364 | ADD_ACQUIRE (SYNC_LOCK_TEST_AND_SET_2, TSAN_ATOMIC16_EXCHANGE), | |
365 | ADD_ACQUIRE (SYNC_LOCK_TEST_AND_SET_4, TSAN_ATOMIC32_EXCHANGE), | |
366 | ADD_ACQUIRE (SYNC_LOCK_TEST_AND_SET_8, TSAN_ATOMIC64_EXCHANGE), | |
367 | ADD_ACQUIRE (SYNC_LOCK_TEST_AND_SET_16, TSAN_ATOMIC128_EXCHANGE), | |
368 | ||
369 | ADD_SEQ_CST (SYNC_FETCH_AND_ADD_1, TSAN_ATOMIC8_FETCH_ADD), | |
370 | ADD_SEQ_CST (SYNC_FETCH_AND_ADD_2, TSAN_ATOMIC16_FETCH_ADD), | |
371 | ADD_SEQ_CST (SYNC_FETCH_AND_ADD_4, TSAN_ATOMIC32_FETCH_ADD), | |
372 | ADD_SEQ_CST (SYNC_FETCH_AND_ADD_8, TSAN_ATOMIC64_FETCH_ADD), | |
373 | ADD_SEQ_CST (SYNC_FETCH_AND_ADD_16, TSAN_ATOMIC128_FETCH_ADD), | |
374 | ADD_SEQ_CST (SYNC_FETCH_AND_SUB_1, TSAN_ATOMIC8_FETCH_SUB), | |
375 | ADD_SEQ_CST (SYNC_FETCH_AND_SUB_2, TSAN_ATOMIC16_FETCH_SUB), | |
376 | ADD_SEQ_CST (SYNC_FETCH_AND_SUB_4, TSAN_ATOMIC32_FETCH_SUB), | |
377 | ADD_SEQ_CST (SYNC_FETCH_AND_SUB_8, TSAN_ATOMIC64_FETCH_SUB), | |
378 | ADD_SEQ_CST (SYNC_FETCH_AND_SUB_16, TSAN_ATOMIC128_FETCH_SUB), | |
379 | ADD_SEQ_CST (SYNC_FETCH_AND_AND_1, TSAN_ATOMIC8_FETCH_AND), | |
380 | ADD_SEQ_CST (SYNC_FETCH_AND_AND_2, TSAN_ATOMIC16_FETCH_AND), | |
381 | ADD_SEQ_CST (SYNC_FETCH_AND_AND_4, TSAN_ATOMIC32_FETCH_AND), | |
382 | ADD_SEQ_CST (SYNC_FETCH_AND_AND_8, TSAN_ATOMIC64_FETCH_AND), | |
383 | ADD_SEQ_CST (SYNC_FETCH_AND_AND_16, TSAN_ATOMIC128_FETCH_AND), | |
384 | ADD_SEQ_CST (SYNC_FETCH_AND_OR_1, TSAN_ATOMIC8_FETCH_OR), | |
385 | ADD_SEQ_CST (SYNC_FETCH_AND_OR_2, TSAN_ATOMIC16_FETCH_OR), | |
386 | ADD_SEQ_CST (SYNC_FETCH_AND_OR_4, TSAN_ATOMIC32_FETCH_OR), | |
387 | ADD_SEQ_CST (SYNC_FETCH_AND_OR_8, TSAN_ATOMIC64_FETCH_OR), | |
388 | ADD_SEQ_CST (SYNC_FETCH_AND_OR_16, TSAN_ATOMIC128_FETCH_OR), | |
389 | ADD_SEQ_CST (SYNC_FETCH_AND_XOR_1, TSAN_ATOMIC8_FETCH_XOR), | |
390 | ADD_SEQ_CST (SYNC_FETCH_AND_XOR_2, TSAN_ATOMIC16_FETCH_XOR), | |
391 | ADD_SEQ_CST (SYNC_FETCH_AND_XOR_4, TSAN_ATOMIC32_FETCH_XOR), | |
392 | ADD_SEQ_CST (SYNC_FETCH_AND_XOR_8, TSAN_ATOMIC64_FETCH_XOR), | |
393 | ADD_SEQ_CST (SYNC_FETCH_AND_XOR_16, TSAN_ATOMIC128_FETCH_XOR), | |
394 | ADD_SEQ_CST (SYNC_FETCH_AND_NAND_1, TSAN_ATOMIC8_FETCH_NAND), | |
395 | ADD_SEQ_CST (SYNC_FETCH_AND_NAND_2, TSAN_ATOMIC16_FETCH_NAND), | |
396 | ADD_SEQ_CST (SYNC_FETCH_AND_NAND_4, TSAN_ATOMIC32_FETCH_NAND), | |
397 | ADD_SEQ_CST (SYNC_FETCH_AND_NAND_8, TSAN_ATOMIC64_FETCH_NAND), | |
398 | ADD_SEQ_CST (SYNC_FETCH_AND_NAND_16, TSAN_ATOMIC128_FETCH_NAND), | |
399 | ||
400 | ADD_SEQ_CST (SYNC_SYNCHRONIZE, TSAN_ATOMIC_THREAD_FENCE), | |
401 | ||
402 | FETCH_OPS (SYNC_ADD_AND_FETCH_1, TSAN_ATOMIC8_FETCH_ADD, PLUS_EXPR), | |
403 | FETCH_OPS (SYNC_ADD_AND_FETCH_2, TSAN_ATOMIC16_FETCH_ADD, PLUS_EXPR), | |
404 | FETCH_OPS (SYNC_ADD_AND_FETCH_4, TSAN_ATOMIC32_FETCH_ADD, PLUS_EXPR), | |
405 | FETCH_OPS (SYNC_ADD_AND_FETCH_8, TSAN_ATOMIC64_FETCH_ADD, PLUS_EXPR), | |
406 | FETCH_OPS (SYNC_ADD_AND_FETCH_16, TSAN_ATOMIC128_FETCH_ADD, PLUS_EXPR), | |
407 | FETCH_OPS (SYNC_SUB_AND_FETCH_1, TSAN_ATOMIC8_FETCH_SUB, MINUS_EXPR), | |
408 | FETCH_OPS (SYNC_SUB_AND_FETCH_2, TSAN_ATOMIC16_FETCH_SUB, MINUS_EXPR), | |
409 | FETCH_OPS (SYNC_SUB_AND_FETCH_4, TSAN_ATOMIC32_FETCH_SUB, MINUS_EXPR), | |
410 | FETCH_OPS (SYNC_SUB_AND_FETCH_8, TSAN_ATOMIC64_FETCH_SUB, MINUS_EXPR), | |
411 | FETCH_OPS (SYNC_SUB_AND_FETCH_16, TSAN_ATOMIC128_FETCH_SUB, MINUS_EXPR), | |
412 | FETCH_OPS (SYNC_AND_AND_FETCH_1, TSAN_ATOMIC8_FETCH_AND, BIT_AND_EXPR), | |
413 | FETCH_OPS (SYNC_AND_AND_FETCH_2, TSAN_ATOMIC16_FETCH_AND, BIT_AND_EXPR), | |
414 | FETCH_OPS (SYNC_AND_AND_FETCH_4, TSAN_ATOMIC32_FETCH_AND, BIT_AND_EXPR), | |
415 | FETCH_OPS (SYNC_AND_AND_FETCH_8, TSAN_ATOMIC64_FETCH_AND, BIT_AND_EXPR), | |
416 | FETCH_OPS (SYNC_AND_AND_FETCH_16, TSAN_ATOMIC128_FETCH_AND, BIT_AND_EXPR), | |
417 | FETCH_OPS (SYNC_OR_AND_FETCH_1, TSAN_ATOMIC8_FETCH_OR, BIT_IOR_EXPR), | |
418 | FETCH_OPS (SYNC_OR_AND_FETCH_2, TSAN_ATOMIC16_FETCH_OR, BIT_IOR_EXPR), | |
419 | FETCH_OPS (SYNC_OR_AND_FETCH_4, TSAN_ATOMIC32_FETCH_OR, BIT_IOR_EXPR), | |
420 | FETCH_OPS (SYNC_OR_AND_FETCH_8, TSAN_ATOMIC64_FETCH_OR, BIT_IOR_EXPR), | |
421 | FETCH_OPS (SYNC_OR_AND_FETCH_16, TSAN_ATOMIC128_FETCH_OR, BIT_IOR_EXPR), | |
422 | FETCH_OPS (SYNC_XOR_AND_FETCH_1, TSAN_ATOMIC8_FETCH_XOR, BIT_XOR_EXPR), | |
423 | FETCH_OPS (SYNC_XOR_AND_FETCH_2, TSAN_ATOMIC16_FETCH_XOR, BIT_XOR_EXPR), | |
424 | FETCH_OPS (SYNC_XOR_AND_FETCH_4, TSAN_ATOMIC32_FETCH_XOR, BIT_XOR_EXPR), | |
425 | FETCH_OPS (SYNC_XOR_AND_FETCH_8, TSAN_ATOMIC64_FETCH_XOR, BIT_XOR_EXPR), | |
426 | FETCH_OPS (SYNC_XOR_AND_FETCH_16, TSAN_ATOMIC128_FETCH_XOR, BIT_XOR_EXPR), | |
427 | FETCH_OPS (SYNC_NAND_AND_FETCH_1, TSAN_ATOMIC8_FETCH_NAND, BIT_NOT_EXPR), | |
428 | FETCH_OPS (SYNC_NAND_AND_FETCH_2, TSAN_ATOMIC16_FETCH_NAND, BIT_NOT_EXPR), | |
429 | FETCH_OPS (SYNC_NAND_AND_FETCH_4, TSAN_ATOMIC32_FETCH_NAND, BIT_NOT_EXPR), | |
430 | FETCH_OPS (SYNC_NAND_AND_FETCH_8, TSAN_ATOMIC64_FETCH_NAND, BIT_NOT_EXPR), | |
431 | FETCH_OPS (SYNC_NAND_AND_FETCH_16, TSAN_ATOMIC128_FETCH_NAND, BIT_NOT_EXPR), | |
432 | ||
433 | WEAK_CAS (ATOMIC_COMPARE_EXCHANGE_1, TSAN_ATOMIC8_COMPARE_EXCHANGE_WEAK), | |
434 | WEAK_CAS (ATOMIC_COMPARE_EXCHANGE_2, TSAN_ATOMIC16_COMPARE_EXCHANGE_WEAK), | |
435 | WEAK_CAS (ATOMIC_COMPARE_EXCHANGE_4, TSAN_ATOMIC32_COMPARE_EXCHANGE_WEAK), | |
436 | WEAK_CAS (ATOMIC_COMPARE_EXCHANGE_8, TSAN_ATOMIC64_COMPARE_EXCHANGE_WEAK), | |
437 | WEAK_CAS (ATOMIC_COMPARE_EXCHANGE_16, TSAN_ATOMIC128_COMPARE_EXCHANGE_WEAK), | |
438 | ||
439 | STRONG_CAS (ATOMIC_COMPARE_EXCHANGE_1, TSAN_ATOMIC8_COMPARE_EXCHANGE_STRONG), | |
440 | STRONG_CAS (ATOMIC_COMPARE_EXCHANGE_2, | |
441 | TSAN_ATOMIC16_COMPARE_EXCHANGE_STRONG), | |
442 | STRONG_CAS (ATOMIC_COMPARE_EXCHANGE_4, | |
443 | TSAN_ATOMIC32_COMPARE_EXCHANGE_STRONG), | |
444 | STRONG_CAS (ATOMIC_COMPARE_EXCHANGE_8, | |
445 | TSAN_ATOMIC64_COMPARE_EXCHANGE_STRONG), | |
446 | STRONG_CAS (ATOMIC_COMPARE_EXCHANGE_16, | |
447 | TSAN_ATOMIC128_COMPARE_EXCHANGE_STRONG), | |
448 | ||
449 | BOOL_CAS (SYNC_BOOL_COMPARE_AND_SWAP_1, | |
450 | TSAN_ATOMIC8_COMPARE_EXCHANGE_STRONG), | |
451 | BOOL_CAS (SYNC_BOOL_COMPARE_AND_SWAP_2, | |
452 | TSAN_ATOMIC16_COMPARE_EXCHANGE_STRONG), | |
453 | BOOL_CAS (SYNC_BOOL_COMPARE_AND_SWAP_4, | |
454 | TSAN_ATOMIC32_COMPARE_EXCHANGE_STRONG), | |
455 | BOOL_CAS (SYNC_BOOL_COMPARE_AND_SWAP_8, | |
456 | TSAN_ATOMIC64_COMPARE_EXCHANGE_STRONG), | |
457 | BOOL_CAS (SYNC_BOOL_COMPARE_AND_SWAP_16, | |
458 | TSAN_ATOMIC128_COMPARE_EXCHANGE_STRONG), | |
459 | ||
460 | VAL_CAS (SYNC_VAL_COMPARE_AND_SWAP_1, TSAN_ATOMIC8_COMPARE_EXCHANGE_STRONG), | |
461 | VAL_CAS (SYNC_VAL_COMPARE_AND_SWAP_2, TSAN_ATOMIC16_COMPARE_EXCHANGE_STRONG), | |
462 | VAL_CAS (SYNC_VAL_COMPARE_AND_SWAP_4, TSAN_ATOMIC32_COMPARE_EXCHANGE_STRONG), | |
463 | VAL_CAS (SYNC_VAL_COMPARE_AND_SWAP_8, TSAN_ATOMIC64_COMPARE_EXCHANGE_STRONG), | |
464 | VAL_CAS (SYNC_VAL_COMPARE_AND_SWAP_16, | |
465 | TSAN_ATOMIC128_COMPARE_EXCHANGE_STRONG), | |
466 | ||
467 | LOCK_RELEASE (SYNC_LOCK_RELEASE_1, TSAN_ATOMIC8_STORE), | |
468 | LOCK_RELEASE (SYNC_LOCK_RELEASE_2, TSAN_ATOMIC16_STORE), | |
469 | LOCK_RELEASE (SYNC_LOCK_RELEASE_4, TSAN_ATOMIC32_STORE), | |
470 | LOCK_RELEASE (SYNC_LOCK_RELEASE_8, TSAN_ATOMIC64_STORE), | |
5ba4a08c JJ |
471 | LOCK_RELEASE (SYNC_LOCK_RELEASE_16, TSAN_ATOMIC128_STORE), |
472 | ||
473 | BOOL_CLEAR (ATOMIC_CLEAR, TSAN_ATOMIC8_STORE), | |
474 | ||
475 | BOOL_TEST_AND_SET (ATOMIC_TEST_AND_SET, TSAN_ATOMIC8_EXCHANGE) | |
c954bddd JJ |
476 | }; |
477 | ||
478 | /* Instrument an atomic builtin. */ | |
479 | ||
480 | static void | |
481 | instrument_builtin_call (gimple_stmt_iterator *gsi) | |
482 | { | |
355fe088 | 483 | gimple *stmt = gsi_stmt (*gsi), *g; |
c954bddd JJ |
484 | tree callee = gimple_call_fndecl (stmt), last_arg, args[6], t, lhs; |
485 | enum built_in_function fcode = DECL_FUNCTION_CODE (callee); | |
486 | unsigned int i, num = gimple_call_num_args (stmt), j; | |
487 | for (j = 0; j < 6 && j < num; j++) | |
488 | args[j] = gimple_call_arg (stmt, j); | |
489 | for (i = 0; i < ARRAY_SIZE (tsan_atomic_table); i++) | |
490 | if (fcode != tsan_atomic_table[i].fcode) | |
491 | continue; | |
492 | else | |
493 | { | |
494 | tree decl = builtin_decl_implicit (tsan_atomic_table[i].tsan_fcode); | |
495 | if (decl == NULL_TREE) | |
496 | return; | |
497 | switch (tsan_atomic_table[i].action) | |
498 | { | |
499 | case check_last: | |
500 | case fetch_op: | |
501 | last_arg = gimple_call_arg (stmt, num - 1); | |
cc269bb6 | 502 | if (!tree_fits_uhwi_p (last_arg) |
46b35980 | 503 | || memmodel_base (tree_to_uhwi (last_arg)) >= MEMMODEL_LAST) |
c954bddd JJ |
504 | return; |
505 | gimple_call_set_fndecl (stmt, decl); | |
506 | update_stmt (stmt); | |
507 | if (tsan_atomic_table[i].action == fetch_op) | |
508 | { | |
509 | args[1] = gimple_call_arg (stmt, 1); | |
510 | goto adjust_result; | |
511 | } | |
512 | return; | |
513 | case add_seq_cst: | |
514 | case add_acquire: | |
515 | case fetch_op_seq_cst: | |
516 | gcc_assert (num <= 2); | |
517 | for (j = 0; j < num; j++) | |
518 | args[j] = gimple_call_arg (stmt, j); | |
519 | for (; j < 2; j++) | |
520 | args[j] = NULL_TREE; | |
521 | args[num] = build_int_cst (NULL_TREE, | |
522 | tsan_atomic_table[i].action | |
523 | != add_acquire | |
524 | ? MEMMODEL_SEQ_CST | |
525 | : MEMMODEL_ACQUIRE); | |
526 | update_gimple_call (gsi, decl, num + 1, args[0], args[1], args[2]); | |
527 | stmt = gsi_stmt (*gsi); | |
528 | if (tsan_atomic_table[i].action == fetch_op_seq_cst) | |
529 | { | |
530 | adjust_result: | |
531 | lhs = gimple_call_lhs (stmt); | |
532 | if (lhs == NULL_TREE) | |
533 | return; | |
534 | if (!useless_type_conversion_p (TREE_TYPE (lhs), | |
535 | TREE_TYPE (args[1]))) | |
536 | { | |
b731b390 | 537 | tree var = make_ssa_name (TREE_TYPE (lhs)); |
0d0e4a03 | 538 | g = gimple_build_assign (var, NOP_EXPR, args[1]); |
c954bddd JJ |
539 | gsi_insert_after (gsi, g, GSI_NEW_STMT); |
540 | args[1] = var; | |
541 | } | |
b731b390 | 542 | gimple_call_set_lhs (stmt, make_ssa_name (TREE_TYPE (lhs))); |
c954bddd JJ |
543 | /* BIT_NOT_EXPR stands for NAND. */ |
544 | if (tsan_atomic_table[i].code == BIT_NOT_EXPR) | |
545 | { | |
b731b390 | 546 | tree var = make_ssa_name (TREE_TYPE (lhs)); |
0d0e4a03 JJ |
547 | g = gimple_build_assign (var, BIT_AND_EXPR, |
548 | gimple_call_lhs (stmt), args[1]); | |
c954bddd | 549 | gsi_insert_after (gsi, g, GSI_NEW_STMT); |
0d0e4a03 | 550 | g = gimple_build_assign (lhs, BIT_NOT_EXPR, var); |
c954bddd JJ |
551 | } |
552 | else | |
0d0e4a03 JJ |
553 | g = gimple_build_assign (lhs, tsan_atomic_table[i].code, |
554 | gimple_call_lhs (stmt), args[1]); | |
c954bddd JJ |
555 | update_stmt (stmt); |
556 | gsi_insert_after (gsi, g, GSI_NEW_STMT); | |
557 | } | |
558 | return; | |
559 | case weak_cas: | |
560 | if (!integer_nonzerop (gimple_call_arg (stmt, 3))) | |
561 | continue; | |
562 | /* FALLTHRU */ | |
563 | case strong_cas: | |
564 | gcc_assert (num == 6); | |
565 | for (j = 0; j < 6; j++) | |
566 | args[j] = gimple_call_arg (stmt, j); | |
cc269bb6 | 567 | if (!tree_fits_uhwi_p (args[4]) |
46b35980 | 568 | || memmodel_base (tree_to_uhwi (args[4])) >= MEMMODEL_LAST) |
c954bddd | 569 | return; |
cc269bb6 | 570 | if (!tree_fits_uhwi_p (args[5]) |
46b35980 | 571 | || memmodel_base (tree_to_uhwi (args[5])) >= MEMMODEL_LAST) |
c954bddd JJ |
572 | return; |
573 | update_gimple_call (gsi, decl, 5, args[0], args[1], args[2], | |
574 | args[4], args[5]); | |
575 | return; | |
576 | case bool_cas: | |
577 | case val_cas: | |
578 | gcc_assert (num == 3); | |
579 | for (j = 0; j < 3; j++) | |
580 | args[j] = gimple_call_arg (stmt, j); | |
581 | t = TYPE_ARG_TYPES (TREE_TYPE (decl)); | |
582 | t = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (t))); | |
b731b390 | 583 | t = create_tmp_var (t); |
c954bddd JJ |
584 | mark_addressable (t); |
585 | if (!useless_type_conversion_p (TREE_TYPE (t), | |
586 | TREE_TYPE (args[1]))) | |
587 | { | |
0d0e4a03 JJ |
588 | g = gimple_build_assign (make_ssa_name (TREE_TYPE (t)), |
589 | NOP_EXPR, args[1]); | |
c954bddd JJ |
590 | gsi_insert_before (gsi, g, GSI_SAME_STMT); |
591 | args[1] = gimple_assign_lhs (g); | |
592 | } | |
593 | g = gimple_build_assign (t, args[1]); | |
594 | gsi_insert_before (gsi, g, GSI_SAME_STMT); | |
595 | lhs = gimple_call_lhs (stmt); | |
596 | update_gimple_call (gsi, decl, 5, args[0], | |
597 | build_fold_addr_expr (t), args[2], | |
598 | build_int_cst (NULL_TREE, | |
599 | MEMMODEL_SEQ_CST), | |
600 | build_int_cst (NULL_TREE, | |
601 | MEMMODEL_SEQ_CST)); | |
602 | if (tsan_atomic_table[i].action == val_cas && lhs) | |
603 | { | |
604 | tree cond; | |
605 | stmt = gsi_stmt (*gsi); | |
b731b390 | 606 | g = gimple_build_assign (make_ssa_name (TREE_TYPE (t)), t); |
c954bddd JJ |
607 | gsi_insert_after (gsi, g, GSI_NEW_STMT); |
608 | t = make_ssa_name (TREE_TYPE (TREE_TYPE (decl)), stmt); | |
609 | cond = build2 (NE_EXPR, boolean_type_node, t, | |
610 | build_int_cst (TREE_TYPE (t), 0)); | |
0d0e4a03 JJ |
611 | g = gimple_build_assign (lhs, COND_EXPR, cond, args[1], |
612 | gimple_assign_lhs (g)); | |
c954bddd JJ |
613 | gimple_call_set_lhs (stmt, t); |
614 | update_stmt (stmt); | |
615 | gsi_insert_after (gsi, g, GSI_NEW_STMT); | |
616 | } | |
617 | return; | |
618 | case lock_release: | |
619 | gcc_assert (num == 1); | |
620 | t = TYPE_ARG_TYPES (TREE_TYPE (decl)); | |
621 | t = TREE_VALUE (TREE_CHAIN (t)); | |
622 | update_gimple_call (gsi, decl, 3, gimple_call_arg (stmt, 0), | |
623 | build_int_cst (t, 0), | |
624 | build_int_cst (NULL_TREE, | |
625 | MEMMODEL_RELEASE)); | |
626 | return; | |
5ba4a08c JJ |
627 | case bool_clear: |
628 | case bool_test_and_set: | |
629 | if (BOOL_TYPE_SIZE != 8) | |
630 | { | |
631 | decl = NULL_TREE; | |
632 | for (j = 1; j < 5; j++) | |
633 | if (BOOL_TYPE_SIZE == (8 << j)) | |
634 | { | |
635 | enum built_in_function tsan_fcode | |
636 | = (enum built_in_function) | |
637 | (tsan_atomic_table[i].tsan_fcode + j); | |
638 | decl = builtin_decl_implicit (tsan_fcode); | |
639 | break; | |
640 | } | |
641 | if (decl == NULL_TREE) | |
642 | return; | |
643 | } | |
644 | last_arg = gimple_call_arg (stmt, num - 1); | |
645 | if (!tree_fits_uhwi_p (last_arg) | |
646 | || memmodel_base (tree_to_uhwi (last_arg)) >= MEMMODEL_LAST) | |
647 | return; | |
648 | t = TYPE_ARG_TYPES (TREE_TYPE (decl)); | |
649 | t = TREE_VALUE (TREE_CHAIN (t)); | |
650 | if (tsan_atomic_table[i].action == bool_clear) | |
651 | { | |
652 | update_gimple_call (gsi, decl, 3, gimple_call_arg (stmt, 0), | |
653 | build_int_cst (t, 0), last_arg); | |
654 | return; | |
655 | } | |
656 | t = build_int_cst (t, targetm.atomic_test_and_set_trueval); | |
657 | update_gimple_call (gsi, decl, 3, gimple_call_arg (stmt, 0), | |
658 | t, last_arg); | |
659 | stmt = gsi_stmt (*gsi); | |
660 | lhs = gimple_call_lhs (stmt); | |
661 | if (lhs == NULL_TREE) | |
662 | return; | |
663 | if (targetm.atomic_test_and_set_trueval != 1 | |
664 | || !useless_type_conversion_p (TREE_TYPE (lhs), | |
665 | TREE_TYPE (t))) | |
666 | { | |
667 | tree new_lhs = make_ssa_name (TREE_TYPE (t)); | |
668 | gimple_call_set_lhs (stmt, new_lhs); | |
669 | if (targetm.atomic_test_and_set_trueval != 1) | |
670 | g = gimple_build_assign (lhs, NE_EXPR, new_lhs, | |
671 | build_int_cst (TREE_TYPE (t), 0)); | |
672 | else | |
673 | g = gimple_build_assign (lhs, NOP_EXPR, new_lhs); | |
674 | gsi_insert_after (gsi, g, GSI_NEW_STMT); | |
675 | update_stmt (stmt); | |
676 | } | |
677 | return; | |
c954bddd JJ |
678 | default: |
679 | continue; | |
680 | } | |
681 | } | |
682 | } | |
683 | ||
32b4b7f5 | 684 | /* Instruments the gimple pointed to by GSI. Return |
8ddf5c28 | 685 | true if func entry/exit should be instrumented. */ |
32b4b7f5 DV |
686 | |
687 | static bool | |
c954bddd | 688 | instrument_gimple (gimple_stmt_iterator *gsi) |
32b4b7f5 | 689 | { |
355fe088 | 690 | gimple *stmt; |
32b4b7f5 DV |
691 | tree rhs, lhs; |
692 | bool instrumented = false; | |
693 | ||
c954bddd | 694 | stmt = gsi_stmt (*gsi); |
32b4b7f5 DV |
695 | if (is_gimple_call (stmt) |
696 | && (gimple_call_fndecl (stmt) | |
697 | != builtin_decl_implicit (BUILT_IN_TSAN_INIT))) | |
c954bddd | 698 | { |
a3f94967 JJ |
699 | /* All functions with function call will have exit instrumented, |
700 | therefore no function calls other than __tsan_func_exit | |
701 | shall appear in the functions. */ | |
702 | gimple_call_set_tail (as_a <gcall *> (stmt), false); | |
5c944c6c | 703 | if (gimple_call_builtin_p (stmt, BUILT_IN_NORMAL)) |
c954bddd JJ |
704 | instrument_builtin_call (gsi); |
705 | return true; | |
706 | } | |
8ddf5c28 JJ |
707 | else if (is_gimple_assign (stmt) |
708 | && !gimple_clobber_p (stmt)) | |
32b4b7f5 DV |
709 | { |
710 | if (gimple_store_p (stmt)) | |
711 | { | |
712 | lhs = gimple_assign_lhs (stmt); | |
c954bddd | 713 | instrumented = instrument_expr (*gsi, lhs, true); |
32b4b7f5 DV |
714 | } |
715 | if (gimple_assign_load_p (stmt)) | |
716 | { | |
717 | rhs = gimple_assign_rhs1 (stmt); | |
c954bddd | 718 | instrumented = instrument_expr (*gsi, rhs, false); |
32b4b7f5 DV |
719 | } |
720 | } | |
721 | return instrumented; | |
722 | } | |
723 | ||
fca4adf2 JJ |
724 | /* Replace TSAN_FUNC_EXIT internal call with function exit tsan builtin. */ |
725 | ||
726 | static void | |
355fe088 | 727 | replace_func_exit (gimple *stmt) |
fca4adf2 JJ |
728 | { |
729 | tree builtin_decl = builtin_decl_implicit (BUILT_IN_TSAN_FUNC_EXIT); | |
355fe088 | 730 | gimple *g = gimple_build_call (builtin_decl, 0); |
fca4adf2 JJ |
731 | gimple_set_location (g, cfun->function_end_locus); |
732 | gimple_stmt_iterator gsi = gsi_for_stmt (stmt); | |
733 | gsi_replace (&gsi, g, true); | |
734 | } | |
735 | ||
736 | /* Instrument function exit. Used when TSAN_FUNC_EXIT does not exist. */ | |
737 | ||
738 | static void | |
739 | instrument_func_exit (void) | |
740 | { | |
741 | location_t loc; | |
742 | basic_block exit_bb; | |
743 | gimple_stmt_iterator gsi; | |
355fe088 | 744 | gimple *stmt, *g; |
fca4adf2 JJ |
745 | tree builtin_decl; |
746 | edge e; | |
747 | edge_iterator ei; | |
748 | ||
749 | /* Find all function exits. */ | |
750 | exit_bb = EXIT_BLOCK_PTR_FOR_FN (cfun); | |
751 | FOR_EACH_EDGE (e, ei, exit_bb->preds) | |
752 | { | |
753 | gsi = gsi_last_bb (e->src); | |
754 | stmt = gsi_stmt (gsi); | |
755 | gcc_assert (gimple_code (stmt) == GIMPLE_RETURN | |
756 | || gimple_call_builtin_p (stmt, BUILT_IN_RETURN)); | |
757 | loc = gimple_location (stmt); | |
758 | builtin_decl = builtin_decl_implicit (BUILT_IN_TSAN_FUNC_EXIT); | |
759 | g = gimple_build_call (builtin_decl, 0); | |
760 | gimple_set_location (g, loc); | |
761 | gsi_insert_before (&gsi, g, GSI_SAME_STMT); | |
762 | } | |
763 | } | |
764 | ||
32b4b7f5 | 765 | /* Instruments all interesting memory accesses in the current function. |
8ddf5c28 | 766 | Return true if func entry/exit should be instrumented. */ |
32b4b7f5 DV |
767 | |
768 | static bool | |
769 | instrument_memory_accesses (void) | |
770 | { | |
771 | basic_block bb; | |
772 | gimple_stmt_iterator gsi; | |
773 | bool fentry_exit_instrument = false; | |
fca4adf2 | 774 | bool func_exit_seen = false; |
355fe088 | 775 | auto_vec<gimple *> tsan_func_exits; |
32b4b7f5 | 776 | |
11cd3bed | 777 | FOR_EACH_BB_FN (bb, cfun) |
32b4b7f5 | 778 | for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi)) |
fca4adf2 | 779 | { |
355fe088 | 780 | gimple *stmt = gsi_stmt (gsi); |
8e4284d0 | 781 | if (gimple_call_internal_p (stmt, IFN_TSAN_FUNC_EXIT)) |
fca4adf2 JJ |
782 | { |
783 | if (fentry_exit_instrument) | |
784 | replace_func_exit (stmt); | |
785 | else | |
786 | tsan_func_exits.safe_push (stmt); | |
787 | func_exit_seen = true; | |
788 | } | |
789 | else | |
790 | fentry_exit_instrument |= instrument_gimple (&gsi); | |
791 | } | |
792 | unsigned int i; | |
355fe088 | 793 | gimple *stmt; |
fca4adf2 JJ |
794 | FOR_EACH_VEC_ELT (tsan_func_exits, i, stmt) |
795 | if (fentry_exit_instrument) | |
796 | replace_func_exit (stmt); | |
797 | else | |
798 | { | |
799 | gsi = gsi_for_stmt (stmt); | |
800 | gsi_remove (&gsi, true); | |
801 | } | |
802 | if (fentry_exit_instrument && !func_exit_seen) | |
803 | instrument_func_exit (); | |
32b4b7f5 DV |
804 | return fentry_exit_instrument; |
805 | } | |
806 | ||
807 | /* Instruments function entry. */ | |
808 | ||
809 | static void | |
810 | instrument_func_entry (void) | |
811 | { | |
32b4b7f5 | 812 | tree ret_addr, builtin_decl; |
355fe088 | 813 | gimple *g; |
9ab3864f | 814 | gimple_seq seq = NULL; |
32b4b7f5 DV |
815 | |
816 | builtin_decl = builtin_decl_implicit (BUILT_IN_RETURN_ADDRESS); | |
817 | g = gimple_build_call (builtin_decl, 1, integer_zero_node); | |
b731b390 | 818 | ret_addr = make_ssa_name (ptr_type_node); |
32b4b7f5 DV |
819 | gimple_call_set_lhs (g, ret_addr); |
820 | gimple_set_location (g, cfun->function_start_locus); | |
9ab3864f | 821 | gimple_seq_add_stmt_without_update (&seq, g); |
32b4b7f5 | 822 | |
9ab3864f | 823 | builtin_decl = builtin_decl_implicit (BUILT_IN_TSAN_FUNC_ENTRY); |
32b4b7f5 DV |
824 | g = gimple_build_call (builtin_decl, 1, ret_addr); |
825 | gimple_set_location (g, cfun->function_start_locus); | |
9ab3864f JJ |
826 | gimple_seq_add_stmt_without_update (&seq, g); |
827 | ||
828 | edge e = single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun)); | |
829 | gsi_insert_seq_on_edge_immediate (e, seq); | |
32b4b7f5 DV |
830 | } |
831 | ||
32b4b7f5 DV |
832 | /* ThreadSanitizer instrumentation pass. */ |
833 | ||
834 | static unsigned | |
835 | tsan_pass (void) | |
836 | { | |
0e668eaf | 837 | initialize_sanitizer_builtins (); |
32b4b7f5 | 838 | if (instrument_memory_accesses ()) |
fca4adf2 | 839 | instrument_func_entry (); |
32b4b7f5 DV |
840 | return 0; |
841 | } | |
842 | ||
32b4b7f5 DV |
843 | /* Inserts __tsan_init () into the list of CTORs. */ |
844 | ||
845 | void | |
846 | tsan_finish_file (void) | |
847 | { | |
0e668eaf | 848 | tree ctor_statements = NULL_TREE; |
32b4b7f5 | 849 | |
0e668eaf JJ |
850 | initialize_sanitizer_builtins (); |
851 | tree init_decl = builtin_decl_implicit (BUILT_IN_TSAN_INIT); | |
32b4b7f5 DV |
852 | append_to_statement_list (build_call_expr (init_decl, 0), |
853 | &ctor_statements); | |
854 | cgraph_build_static_cdtor ('I', ctor_statements, | |
855 | MAX_RESERVED_INIT_PRIORITY - 1); | |
856 | } | |
857 | ||
858 | /* The pass descriptor. */ | |
859 | ||
27a4cd48 DM |
860 | namespace { |
861 | ||
862 | const pass_data pass_data_tsan = | |
32b4b7f5 | 863 | { |
27a4cd48 DM |
864 | GIMPLE_PASS, /* type */ |
865 | "tsan", /* name */ | |
866 | OPTGROUP_NONE, /* optinfo_flags */ | |
27a4cd48 DM |
867 | TV_NONE, /* tv_id */ |
868 | ( PROP_ssa | PROP_cfg ), /* properties_required */ | |
869 | 0, /* properties_provided */ | |
870 | 0, /* properties_destroyed */ | |
871 | 0, /* todo_flags_start */ | |
3bea341f | 872 | TODO_update_ssa, /* todo_flags_finish */ |
32b4b7f5 DV |
873 | }; |
874 | ||
27a4cd48 DM |
875 | class pass_tsan : public gimple_opt_pass |
876 | { | |
877 | public: | |
c3284718 RS |
878 | pass_tsan (gcc::context *ctxt) |
879 | : gimple_opt_pass (pass_data_tsan, ctxt) | |
27a4cd48 DM |
880 | {} |
881 | ||
882 | /* opt_pass methods: */ | |
65d3284b | 883 | opt_pass * clone () { return new pass_tsan (m_ctxt); } |
1a3d085c TS |
884 | virtual bool gate (function *) |
885 | { | |
de35aa66 MS |
886 | return ((flag_sanitize & SANITIZE_THREAD) != 0 |
887 | && !lookup_attribute ("no_sanitize_thread", | |
888 | DECL_ATTRIBUTES (current_function_decl))); | |
1a3d085c TS |
889 | } |
890 | ||
be55bfe6 | 891 | virtual unsigned int execute (function *) { return tsan_pass (); } |
27a4cd48 DM |
892 | |
893 | }; // class pass_tsan | |
894 | ||
895 | } // anon namespace | |
896 | ||
897 | gimple_opt_pass * | |
898 | make_pass_tsan (gcc::context *ctxt) | |
899 | { | |
900 | return new pass_tsan (ctxt); | |
901 | } | |
902 | ||
27a4cd48 DM |
903 | namespace { |
904 | ||
905 | const pass_data pass_data_tsan_O0 = | |
32b4b7f5 | 906 | { |
27a4cd48 DM |
907 | GIMPLE_PASS, /* type */ |
908 | "tsan0", /* name */ | |
909 | OPTGROUP_NONE, /* optinfo_flags */ | |
27a4cd48 DM |
910 | TV_NONE, /* tv_id */ |
911 | ( PROP_ssa | PROP_cfg ), /* properties_required */ | |
912 | 0, /* properties_provided */ | |
913 | 0, /* properties_destroyed */ | |
914 | 0, /* todo_flags_start */ | |
3bea341f | 915 | TODO_update_ssa, /* todo_flags_finish */ |
32b4b7f5 | 916 | }; |
27a4cd48 DM |
917 | |
918 | class pass_tsan_O0 : public gimple_opt_pass | |
919 | { | |
920 | public: | |
c3284718 RS |
921 | pass_tsan_O0 (gcc::context *ctxt) |
922 | : gimple_opt_pass (pass_data_tsan_O0, ctxt) | |
27a4cd48 DM |
923 | {} |
924 | ||
925 | /* opt_pass methods: */ | |
1a3d085c TS |
926 | virtual bool gate (function *) |
927 | { | |
de35aa66 MS |
928 | return ((flag_sanitize & SANITIZE_THREAD) != 0 && !optimize |
929 | && !lookup_attribute ("no_sanitize_thread", | |
930 | DECL_ATTRIBUTES (current_function_decl))); | |
1a3d085c TS |
931 | } |
932 | ||
be55bfe6 | 933 | virtual unsigned int execute (function *) { return tsan_pass (); } |
27a4cd48 DM |
934 | |
935 | }; // class pass_tsan_O0 | |
936 | ||
937 | } // anon namespace | |
938 | ||
939 | gimple_opt_pass * | |
940 | make_pass_tsan_O0 (gcc::context *ctxt) | |
941 | { | |
942 | return new pass_tsan_O0 (ctxt); | |
943 | } |