]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/tsan.c
gimple-walk.h: New File.
[thirdparty/gcc.git] / gcc / tsan.c
1 /* GCC instrumentation plugin for ThreadSanitizer.
2 Copyright (C) 2011-2013 Free Software Foundation, Inc.
3 Contributed by Dmitry Vyukov <dvyukov@google.com>
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
10 version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tree.h"
26 #include "intl.h"
27 #include "tm.h"
28 #include "basic-block.h"
29 #include "gimplify.h"
30 #include "gimple-iterator.h"
31 #include "function.h"
32 #include "gimple-ssa.h"
33 #include "cgraph.h"
34 #include "tree-cfg.h"
35 #include "tree-ssanames.h"
36 #include "tree-pass.h"
37 #include "tree-iterator.h"
38 #include "langhooks.h"
39 #include "output.h"
40 #include "options.h"
41 #include "target.h"
42 #include "diagnostic.h"
43 #include "tree-ssa-propagate.h"
44 #include "tsan.h"
45 #include "asan.h"
46
47 /* Number of instrumented memory accesses in the current function. */
48
49 /* Builds the following decl
50 void __tsan_read/writeX (void *addr); */
51
52 static tree
53 get_memory_access_decl (bool is_write, unsigned size)
54 {
55 enum built_in_function fcode;
56
57 if (size <= 1)
58 fcode = is_write ? BUILT_IN_TSAN_WRITE1
59 : BUILT_IN_TSAN_READ1;
60 else if (size <= 3)
61 fcode = is_write ? BUILT_IN_TSAN_WRITE2
62 : BUILT_IN_TSAN_READ2;
63 else if (size <= 7)
64 fcode = is_write ? BUILT_IN_TSAN_WRITE4
65 : BUILT_IN_TSAN_READ4;
66 else if (size <= 15)
67 fcode = is_write ? BUILT_IN_TSAN_WRITE8
68 : BUILT_IN_TSAN_READ8;
69 else
70 fcode = is_write ? BUILT_IN_TSAN_WRITE16
71 : BUILT_IN_TSAN_READ16;
72
73 return builtin_decl_implicit (fcode);
74 }
75
76 /* Check as to whether EXPR refers to a store to vptr. */
77
78 static tree
79 is_vptr_store (gimple stmt, tree expr, bool is_write)
80 {
81 if (is_write == true
82 && gimple_assign_single_p (stmt)
83 && TREE_CODE (expr) == COMPONENT_REF)
84 {
85 tree field = TREE_OPERAND (expr, 1);
86 if (TREE_CODE (field) == FIELD_DECL
87 && DECL_VIRTUAL_P (field))
88 return gimple_assign_rhs1 (stmt);
89 }
90 return NULL;
91 }
92
93 /* Instruments EXPR if needed. If any instrumentation is inserted,
94 return true. */
95
96 static bool
97 instrument_expr (gimple_stmt_iterator gsi, tree expr, bool is_write)
98 {
99 tree base, rhs, expr_ptr, builtin_decl;
100 basic_block bb;
101 HOST_WIDE_INT size;
102 gimple stmt, g;
103 gimple_seq seq;
104 location_t loc;
105
106 size = int_size_in_bytes (TREE_TYPE (expr));
107 if (size == -1)
108 return false;
109
110 /* For now just avoid instrumenting bit field acceses.
111 TODO: handle bit-fields as if touching the whole field. */
112 HOST_WIDE_INT bitsize, bitpos;
113 tree offset;
114 enum machine_mode mode;
115 int volatilep = 0, unsignedp = 0;
116 base = get_inner_reference (expr, &bitsize, &bitpos, &offset,
117 &mode, &unsignedp, &volatilep, false);
118
119 /* No need to instrument accesses to decls that don't escape,
120 they can't escape to other threads then. */
121 if (DECL_P (base))
122 {
123 struct pt_solution pt;
124 memset (&pt, 0, sizeof (pt));
125 pt.escaped = 1;
126 pt.ipa_escaped = flag_ipa_pta != 0;
127 pt.nonlocal = 1;
128 if (!pt_solution_includes (&pt, base))
129 return false;
130 if (!is_global_var (base) && !may_be_aliased (base))
131 return false;
132 }
133
134 if (TREE_READONLY (base)
135 || (TREE_CODE (base) == VAR_DECL
136 && DECL_HARD_REGISTER (base)))
137 return false;
138
139 if (size == 0
140 || bitpos % (size * BITS_PER_UNIT)
141 || bitsize != size * BITS_PER_UNIT)
142 return false;
143
144 stmt = gsi_stmt (gsi);
145 loc = gimple_location (stmt);
146 rhs = is_vptr_store (stmt, expr, is_write);
147 gcc_checking_assert (rhs != NULL || is_gimple_addressable (expr));
148 expr_ptr = build_fold_addr_expr (unshare_expr (expr));
149 seq = NULL;
150 if (!is_gimple_val (expr_ptr))
151 {
152 g = gimple_build_assign (make_ssa_name (TREE_TYPE (expr_ptr), NULL),
153 expr_ptr);
154 expr_ptr = gimple_assign_lhs (g);
155 gimple_set_location (g, loc);
156 gimple_seq_add_stmt_without_update (&seq, g);
157 }
158 if (rhs == NULL)
159 g = gimple_build_call (get_memory_access_decl (is_write, size),
160 1, expr_ptr);
161 else
162 {
163 builtin_decl = builtin_decl_implicit (BUILT_IN_TSAN_VPTR_UPDATE);
164 g = gimple_build_call (builtin_decl, 1, expr_ptr);
165 }
166 gimple_set_location (g, loc);
167 gimple_seq_add_stmt_without_update (&seq, g);
168 /* Instrumentation for assignment of a function result
169 must be inserted after the call. Instrumentation for
170 reads of function arguments must be inserted before the call.
171 That's because the call can contain synchronization. */
172 if (is_gimple_call (stmt) && is_write)
173 {
174 /* If the call can throw, it must be the last stmt in
175 a basic block, so the instrumented stmts need to be
176 inserted in successor bbs. */
177 if (is_ctrl_altering_stmt (stmt))
178 {
179 edge e;
180
181 bb = gsi_bb (gsi);
182 e = find_fallthru_edge (bb->succs);
183 if (e)
184 gsi_insert_seq_on_edge_immediate (e, seq);
185 }
186 else
187 gsi_insert_seq_after (&gsi, seq, GSI_NEW_STMT);
188 }
189 else
190 gsi_insert_seq_before (&gsi, seq, GSI_SAME_STMT);
191
192 return true;
193 }
194
195 /* Actions for sync/atomic builtin transformations. */
196 enum tsan_atomic_action
197 {
198 check_last, add_seq_cst, add_acquire, weak_cas, strong_cas,
199 bool_cas, val_cas, lock_release, fetch_op, fetch_op_seq_cst
200 };
201
202 /* Table how to map sync/atomic builtins to their corresponding
203 tsan equivalents. */
204 static const struct tsan_map_atomic
205 {
206 enum built_in_function fcode, tsan_fcode;
207 enum tsan_atomic_action action;
208 enum tree_code code;
209 } tsan_atomic_table[] =
210 {
211 #define TRANSFORM(fcode, tsan_fcode, action, code) \
212 { BUILT_IN_##fcode, BUILT_IN_##tsan_fcode, action, code }
213 #define CHECK_LAST(fcode, tsan_fcode) \
214 TRANSFORM (fcode, tsan_fcode, check_last, ERROR_MARK)
215 #define ADD_SEQ_CST(fcode, tsan_fcode) \
216 TRANSFORM (fcode, tsan_fcode, add_seq_cst, ERROR_MARK)
217 #define ADD_ACQUIRE(fcode, tsan_fcode) \
218 TRANSFORM (fcode, tsan_fcode, add_acquire, ERROR_MARK)
219 #define WEAK_CAS(fcode, tsan_fcode) \
220 TRANSFORM (fcode, tsan_fcode, weak_cas, ERROR_MARK)
221 #define STRONG_CAS(fcode, tsan_fcode) \
222 TRANSFORM (fcode, tsan_fcode, strong_cas, ERROR_MARK)
223 #define BOOL_CAS(fcode, tsan_fcode) \
224 TRANSFORM (fcode, tsan_fcode, bool_cas, ERROR_MARK)
225 #define VAL_CAS(fcode, tsan_fcode) \
226 TRANSFORM (fcode, tsan_fcode, val_cas, ERROR_MARK)
227 #define LOCK_RELEASE(fcode, tsan_fcode) \
228 TRANSFORM (fcode, tsan_fcode, lock_release, ERROR_MARK)
229 #define FETCH_OP(fcode, tsan_fcode, code) \
230 TRANSFORM (fcode, tsan_fcode, fetch_op, code)
231 #define FETCH_OPS(fcode, tsan_fcode, code) \
232 TRANSFORM (fcode, tsan_fcode, fetch_op_seq_cst, code)
233
234 CHECK_LAST (ATOMIC_LOAD_1, TSAN_ATOMIC8_LOAD),
235 CHECK_LAST (ATOMIC_LOAD_2, TSAN_ATOMIC16_LOAD),
236 CHECK_LAST (ATOMIC_LOAD_4, TSAN_ATOMIC32_LOAD),
237 CHECK_LAST (ATOMIC_LOAD_8, TSAN_ATOMIC64_LOAD),
238 CHECK_LAST (ATOMIC_LOAD_16, TSAN_ATOMIC128_LOAD),
239 CHECK_LAST (ATOMIC_STORE_1, TSAN_ATOMIC8_STORE),
240 CHECK_LAST (ATOMIC_STORE_2, TSAN_ATOMIC16_STORE),
241 CHECK_LAST (ATOMIC_STORE_4, TSAN_ATOMIC32_STORE),
242 CHECK_LAST (ATOMIC_STORE_8, TSAN_ATOMIC64_STORE),
243 CHECK_LAST (ATOMIC_STORE_16, TSAN_ATOMIC128_STORE),
244 CHECK_LAST (ATOMIC_EXCHANGE_1, TSAN_ATOMIC8_EXCHANGE),
245 CHECK_LAST (ATOMIC_EXCHANGE_2, TSAN_ATOMIC16_EXCHANGE),
246 CHECK_LAST (ATOMIC_EXCHANGE_4, TSAN_ATOMIC32_EXCHANGE),
247 CHECK_LAST (ATOMIC_EXCHANGE_8, TSAN_ATOMIC64_EXCHANGE),
248 CHECK_LAST (ATOMIC_EXCHANGE_16, TSAN_ATOMIC128_EXCHANGE),
249 CHECK_LAST (ATOMIC_FETCH_ADD_1, TSAN_ATOMIC8_FETCH_ADD),
250 CHECK_LAST (ATOMIC_FETCH_ADD_2, TSAN_ATOMIC16_FETCH_ADD),
251 CHECK_LAST (ATOMIC_FETCH_ADD_4, TSAN_ATOMIC32_FETCH_ADD),
252 CHECK_LAST (ATOMIC_FETCH_ADD_8, TSAN_ATOMIC64_FETCH_ADD),
253 CHECK_LAST (ATOMIC_FETCH_ADD_16, TSAN_ATOMIC128_FETCH_ADD),
254 CHECK_LAST (ATOMIC_FETCH_SUB_1, TSAN_ATOMIC8_FETCH_SUB),
255 CHECK_LAST (ATOMIC_FETCH_SUB_2, TSAN_ATOMIC16_FETCH_SUB),
256 CHECK_LAST (ATOMIC_FETCH_SUB_4, TSAN_ATOMIC32_FETCH_SUB),
257 CHECK_LAST (ATOMIC_FETCH_SUB_8, TSAN_ATOMIC64_FETCH_SUB),
258 CHECK_LAST (ATOMIC_FETCH_SUB_16, TSAN_ATOMIC128_FETCH_SUB),
259 CHECK_LAST (ATOMIC_FETCH_AND_1, TSAN_ATOMIC8_FETCH_AND),
260 CHECK_LAST (ATOMIC_FETCH_AND_2, TSAN_ATOMIC16_FETCH_AND),
261 CHECK_LAST (ATOMIC_FETCH_AND_4, TSAN_ATOMIC32_FETCH_AND),
262 CHECK_LAST (ATOMIC_FETCH_AND_8, TSAN_ATOMIC64_FETCH_AND),
263 CHECK_LAST (ATOMIC_FETCH_AND_16, TSAN_ATOMIC128_FETCH_AND),
264 CHECK_LAST (ATOMIC_FETCH_OR_1, TSAN_ATOMIC8_FETCH_OR),
265 CHECK_LAST (ATOMIC_FETCH_OR_2, TSAN_ATOMIC16_FETCH_OR),
266 CHECK_LAST (ATOMIC_FETCH_OR_4, TSAN_ATOMIC32_FETCH_OR),
267 CHECK_LAST (ATOMIC_FETCH_OR_8, TSAN_ATOMIC64_FETCH_OR),
268 CHECK_LAST (ATOMIC_FETCH_OR_16, TSAN_ATOMIC128_FETCH_OR),
269 CHECK_LAST (ATOMIC_FETCH_XOR_1, TSAN_ATOMIC8_FETCH_XOR),
270 CHECK_LAST (ATOMIC_FETCH_XOR_2, TSAN_ATOMIC16_FETCH_XOR),
271 CHECK_LAST (ATOMIC_FETCH_XOR_4, TSAN_ATOMIC32_FETCH_XOR),
272 CHECK_LAST (ATOMIC_FETCH_XOR_8, TSAN_ATOMIC64_FETCH_XOR),
273 CHECK_LAST (ATOMIC_FETCH_XOR_16, TSAN_ATOMIC128_FETCH_XOR),
274 CHECK_LAST (ATOMIC_FETCH_NAND_1, TSAN_ATOMIC8_FETCH_NAND),
275 CHECK_LAST (ATOMIC_FETCH_NAND_2, TSAN_ATOMIC16_FETCH_NAND),
276 CHECK_LAST (ATOMIC_FETCH_NAND_4, TSAN_ATOMIC32_FETCH_NAND),
277 CHECK_LAST (ATOMIC_FETCH_NAND_8, TSAN_ATOMIC64_FETCH_NAND),
278 CHECK_LAST (ATOMIC_FETCH_NAND_16, TSAN_ATOMIC128_FETCH_NAND),
279
280 CHECK_LAST (ATOMIC_THREAD_FENCE, TSAN_ATOMIC_THREAD_FENCE),
281 CHECK_LAST (ATOMIC_SIGNAL_FENCE, TSAN_ATOMIC_SIGNAL_FENCE),
282
283 FETCH_OP (ATOMIC_ADD_FETCH_1, TSAN_ATOMIC8_FETCH_ADD, PLUS_EXPR),
284 FETCH_OP (ATOMIC_ADD_FETCH_2, TSAN_ATOMIC16_FETCH_ADD, PLUS_EXPR),
285 FETCH_OP (ATOMIC_ADD_FETCH_4, TSAN_ATOMIC32_FETCH_ADD, PLUS_EXPR),
286 FETCH_OP (ATOMIC_ADD_FETCH_8, TSAN_ATOMIC64_FETCH_ADD, PLUS_EXPR),
287 FETCH_OP (ATOMIC_ADD_FETCH_16, TSAN_ATOMIC128_FETCH_ADD, PLUS_EXPR),
288 FETCH_OP (ATOMIC_SUB_FETCH_1, TSAN_ATOMIC8_FETCH_SUB, MINUS_EXPR),
289 FETCH_OP (ATOMIC_SUB_FETCH_2, TSAN_ATOMIC16_FETCH_SUB, MINUS_EXPR),
290 FETCH_OP (ATOMIC_SUB_FETCH_4, TSAN_ATOMIC32_FETCH_SUB, MINUS_EXPR),
291 FETCH_OP (ATOMIC_SUB_FETCH_8, TSAN_ATOMIC64_FETCH_SUB, MINUS_EXPR),
292 FETCH_OP (ATOMIC_SUB_FETCH_16, TSAN_ATOMIC128_FETCH_SUB, MINUS_EXPR),
293 FETCH_OP (ATOMIC_AND_FETCH_1, TSAN_ATOMIC8_FETCH_AND, BIT_AND_EXPR),
294 FETCH_OP (ATOMIC_AND_FETCH_2, TSAN_ATOMIC16_FETCH_AND, BIT_AND_EXPR),
295 FETCH_OP (ATOMIC_AND_FETCH_4, TSAN_ATOMIC32_FETCH_AND, BIT_AND_EXPR),
296 FETCH_OP (ATOMIC_AND_FETCH_8, TSAN_ATOMIC64_FETCH_AND, BIT_AND_EXPR),
297 FETCH_OP (ATOMIC_AND_FETCH_16, TSAN_ATOMIC128_FETCH_AND, BIT_AND_EXPR),
298 FETCH_OP (ATOMIC_OR_FETCH_1, TSAN_ATOMIC8_FETCH_OR, BIT_IOR_EXPR),
299 FETCH_OP (ATOMIC_OR_FETCH_2, TSAN_ATOMIC16_FETCH_OR, BIT_IOR_EXPR),
300 FETCH_OP (ATOMIC_OR_FETCH_4, TSAN_ATOMIC32_FETCH_OR, BIT_IOR_EXPR),
301 FETCH_OP (ATOMIC_OR_FETCH_8, TSAN_ATOMIC64_FETCH_OR, BIT_IOR_EXPR),
302 FETCH_OP (ATOMIC_OR_FETCH_16, TSAN_ATOMIC128_FETCH_OR, BIT_IOR_EXPR),
303 FETCH_OP (ATOMIC_XOR_FETCH_1, TSAN_ATOMIC8_FETCH_XOR, BIT_XOR_EXPR),
304 FETCH_OP (ATOMIC_XOR_FETCH_2, TSAN_ATOMIC16_FETCH_XOR, BIT_XOR_EXPR),
305 FETCH_OP (ATOMIC_XOR_FETCH_4, TSAN_ATOMIC32_FETCH_XOR, BIT_XOR_EXPR),
306 FETCH_OP (ATOMIC_XOR_FETCH_8, TSAN_ATOMIC64_FETCH_XOR, BIT_XOR_EXPR),
307 FETCH_OP (ATOMIC_XOR_FETCH_16, TSAN_ATOMIC128_FETCH_XOR, BIT_XOR_EXPR),
308 FETCH_OP (ATOMIC_NAND_FETCH_1, TSAN_ATOMIC8_FETCH_NAND, BIT_NOT_EXPR),
309 FETCH_OP (ATOMIC_NAND_FETCH_2, TSAN_ATOMIC16_FETCH_NAND, BIT_NOT_EXPR),
310 FETCH_OP (ATOMIC_NAND_FETCH_4, TSAN_ATOMIC32_FETCH_NAND, BIT_NOT_EXPR),
311 FETCH_OP (ATOMIC_NAND_FETCH_8, TSAN_ATOMIC64_FETCH_NAND, BIT_NOT_EXPR),
312 FETCH_OP (ATOMIC_NAND_FETCH_16, TSAN_ATOMIC128_FETCH_NAND, BIT_NOT_EXPR),
313
314 ADD_ACQUIRE (SYNC_LOCK_TEST_AND_SET_1, TSAN_ATOMIC8_EXCHANGE),
315 ADD_ACQUIRE (SYNC_LOCK_TEST_AND_SET_2, TSAN_ATOMIC16_EXCHANGE),
316 ADD_ACQUIRE (SYNC_LOCK_TEST_AND_SET_4, TSAN_ATOMIC32_EXCHANGE),
317 ADD_ACQUIRE (SYNC_LOCK_TEST_AND_SET_8, TSAN_ATOMIC64_EXCHANGE),
318 ADD_ACQUIRE (SYNC_LOCK_TEST_AND_SET_16, TSAN_ATOMIC128_EXCHANGE),
319
320 ADD_SEQ_CST (SYNC_FETCH_AND_ADD_1, TSAN_ATOMIC8_FETCH_ADD),
321 ADD_SEQ_CST (SYNC_FETCH_AND_ADD_2, TSAN_ATOMIC16_FETCH_ADD),
322 ADD_SEQ_CST (SYNC_FETCH_AND_ADD_4, TSAN_ATOMIC32_FETCH_ADD),
323 ADD_SEQ_CST (SYNC_FETCH_AND_ADD_8, TSAN_ATOMIC64_FETCH_ADD),
324 ADD_SEQ_CST (SYNC_FETCH_AND_ADD_16, TSAN_ATOMIC128_FETCH_ADD),
325 ADD_SEQ_CST (SYNC_FETCH_AND_SUB_1, TSAN_ATOMIC8_FETCH_SUB),
326 ADD_SEQ_CST (SYNC_FETCH_AND_SUB_2, TSAN_ATOMIC16_FETCH_SUB),
327 ADD_SEQ_CST (SYNC_FETCH_AND_SUB_4, TSAN_ATOMIC32_FETCH_SUB),
328 ADD_SEQ_CST (SYNC_FETCH_AND_SUB_8, TSAN_ATOMIC64_FETCH_SUB),
329 ADD_SEQ_CST (SYNC_FETCH_AND_SUB_16, TSAN_ATOMIC128_FETCH_SUB),
330 ADD_SEQ_CST (SYNC_FETCH_AND_AND_1, TSAN_ATOMIC8_FETCH_AND),
331 ADD_SEQ_CST (SYNC_FETCH_AND_AND_2, TSAN_ATOMIC16_FETCH_AND),
332 ADD_SEQ_CST (SYNC_FETCH_AND_AND_4, TSAN_ATOMIC32_FETCH_AND),
333 ADD_SEQ_CST (SYNC_FETCH_AND_AND_8, TSAN_ATOMIC64_FETCH_AND),
334 ADD_SEQ_CST (SYNC_FETCH_AND_AND_16, TSAN_ATOMIC128_FETCH_AND),
335 ADD_SEQ_CST (SYNC_FETCH_AND_OR_1, TSAN_ATOMIC8_FETCH_OR),
336 ADD_SEQ_CST (SYNC_FETCH_AND_OR_2, TSAN_ATOMIC16_FETCH_OR),
337 ADD_SEQ_CST (SYNC_FETCH_AND_OR_4, TSAN_ATOMIC32_FETCH_OR),
338 ADD_SEQ_CST (SYNC_FETCH_AND_OR_8, TSAN_ATOMIC64_FETCH_OR),
339 ADD_SEQ_CST (SYNC_FETCH_AND_OR_16, TSAN_ATOMIC128_FETCH_OR),
340 ADD_SEQ_CST (SYNC_FETCH_AND_XOR_1, TSAN_ATOMIC8_FETCH_XOR),
341 ADD_SEQ_CST (SYNC_FETCH_AND_XOR_2, TSAN_ATOMIC16_FETCH_XOR),
342 ADD_SEQ_CST (SYNC_FETCH_AND_XOR_4, TSAN_ATOMIC32_FETCH_XOR),
343 ADD_SEQ_CST (SYNC_FETCH_AND_XOR_8, TSAN_ATOMIC64_FETCH_XOR),
344 ADD_SEQ_CST (SYNC_FETCH_AND_XOR_16, TSAN_ATOMIC128_FETCH_XOR),
345 ADD_SEQ_CST (SYNC_FETCH_AND_NAND_1, TSAN_ATOMIC8_FETCH_NAND),
346 ADD_SEQ_CST (SYNC_FETCH_AND_NAND_2, TSAN_ATOMIC16_FETCH_NAND),
347 ADD_SEQ_CST (SYNC_FETCH_AND_NAND_4, TSAN_ATOMIC32_FETCH_NAND),
348 ADD_SEQ_CST (SYNC_FETCH_AND_NAND_8, TSAN_ATOMIC64_FETCH_NAND),
349 ADD_SEQ_CST (SYNC_FETCH_AND_NAND_16, TSAN_ATOMIC128_FETCH_NAND),
350
351 ADD_SEQ_CST (SYNC_SYNCHRONIZE, TSAN_ATOMIC_THREAD_FENCE),
352
353 FETCH_OPS (SYNC_ADD_AND_FETCH_1, TSAN_ATOMIC8_FETCH_ADD, PLUS_EXPR),
354 FETCH_OPS (SYNC_ADD_AND_FETCH_2, TSAN_ATOMIC16_FETCH_ADD, PLUS_EXPR),
355 FETCH_OPS (SYNC_ADD_AND_FETCH_4, TSAN_ATOMIC32_FETCH_ADD, PLUS_EXPR),
356 FETCH_OPS (SYNC_ADD_AND_FETCH_8, TSAN_ATOMIC64_FETCH_ADD, PLUS_EXPR),
357 FETCH_OPS (SYNC_ADD_AND_FETCH_16, TSAN_ATOMIC128_FETCH_ADD, PLUS_EXPR),
358 FETCH_OPS (SYNC_SUB_AND_FETCH_1, TSAN_ATOMIC8_FETCH_SUB, MINUS_EXPR),
359 FETCH_OPS (SYNC_SUB_AND_FETCH_2, TSAN_ATOMIC16_FETCH_SUB, MINUS_EXPR),
360 FETCH_OPS (SYNC_SUB_AND_FETCH_4, TSAN_ATOMIC32_FETCH_SUB, MINUS_EXPR),
361 FETCH_OPS (SYNC_SUB_AND_FETCH_8, TSAN_ATOMIC64_FETCH_SUB, MINUS_EXPR),
362 FETCH_OPS (SYNC_SUB_AND_FETCH_16, TSAN_ATOMIC128_FETCH_SUB, MINUS_EXPR),
363 FETCH_OPS (SYNC_AND_AND_FETCH_1, TSAN_ATOMIC8_FETCH_AND, BIT_AND_EXPR),
364 FETCH_OPS (SYNC_AND_AND_FETCH_2, TSAN_ATOMIC16_FETCH_AND, BIT_AND_EXPR),
365 FETCH_OPS (SYNC_AND_AND_FETCH_4, TSAN_ATOMIC32_FETCH_AND, BIT_AND_EXPR),
366 FETCH_OPS (SYNC_AND_AND_FETCH_8, TSAN_ATOMIC64_FETCH_AND, BIT_AND_EXPR),
367 FETCH_OPS (SYNC_AND_AND_FETCH_16, TSAN_ATOMIC128_FETCH_AND, BIT_AND_EXPR),
368 FETCH_OPS (SYNC_OR_AND_FETCH_1, TSAN_ATOMIC8_FETCH_OR, BIT_IOR_EXPR),
369 FETCH_OPS (SYNC_OR_AND_FETCH_2, TSAN_ATOMIC16_FETCH_OR, BIT_IOR_EXPR),
370 FETCH_OPS (SYNC_OR_AND_FETCH_4, TSAN_ATOMIC32_FETCH_OR, BIT_IOR_EXPR),
371 FETCH_OPS (SYNC_OR_AND_FETCH_8, TSAN_ATOMIC64_FETCH_OR, BIT_IOR_EXPR),
372 FETCH_OPS (SYNC_OR_AND_FETCH_16, TSAN_ATOMIC128_FETCH_OR, BIT_IOR_EXPR),
373 FETCH_OPS (SYNC_XOR_AND_FETCH_1, TSAN_ATOMIC8_FETCH_XOR, BIT_XOR_EXPR),
374 FETCH_OPS (SYNC_XOR_AND_FETCH_2, TSAN_ATOMIC16_FETCH_XOR, BIT_XOR_EXPR),
375 FETCH_OPS (SYNC_XOR_AND_FETCH_4, TSAN_ATOMIC32_FETCH_XOR, BIT_XOR_EXPR),
376 FETCH_OPS (SYNC_XOR_AND_FETCH_8, TSAN_ATOMIC64_FETCH_XOR, BIT_XOR_EXPR),
377 FETCH_OPS (SYNC_XOR_AND_FETCH_16, TSAN_ATOMIC128_FETCH_XOR, BIT_XOR_EXPR),
378 FETCH_OPS (SYNC_NAND_AND_FETCH_1, TSAN_ATOMIC8_FETCH_NAND, BIT_NOT_EXPR),
379 FETCH_OPS (SYNC_NAND_AND_FETCH_2, TSAN_ATOMIC16_FETCH_NAND, BIT_NOT_EXPR),
380 FETCH_OPS (SYNC_NAND_AND_FETCH_4, TSAN_ATOMIC32_FETCH_NAND, BIT_NOT_EXPR),
381 FETCH_OPS (SYNC_NAND_AND_FETCH_8, TSAN_ATOMIC64_FETCH_NAND, BIT_NOT_EXPR),
382 FETCH_OPS (SYNC_NAND_AND_FETCH_16, TSAN_ATOMIC128_FETCH_NAND, BIT_NOT_EXPR),
383
384 WEAK_CAS (ATOMIC_COMPARE_EXCHANGE_1, TSAN_ATOMIC8_COMPARE_EXCHANGE_WEAK),
385 WEAK_CAS (ATOMIC_COMPARE_EXCHANGE_2, TSAN_ATOMIC16_COMPARE_EXCHANGE_WEAK),
386 WEAK_CAS (ATOMIC_COMPARE_EXCHANGE_4, TSAN_ATOMIC32_COMPARE_EXCHANGE_WEAK),
387 WEAK_CAS (ATOMIC_COMPARE_EXCHANGE_8, TSAN_ATOMIC64_COMPARE_EXCHANGE_WEAK),
388 WEAK_CAS (ATOMIC_COMPARE_EXCHANGE_16, TSAN_ATOMIC128_COMPARE_EXCHANGE_WEAK),
389
390 STRONG_CAS (ATOMIC_COMPARE_EXCHANGE_1, TSAN_ATOMIC8_COMPARE_EXCHANGE_STRONG),
391 STRONG_CAS (ATOMIC_COMPARE_EXCHANGE_2,
392 TSAN_ATOMIC16_COMPARE_EXCHANGE_STRONG),
393 STRONG_CAS (ATOMIC_COMPARE_EXCHANGE_4,
394 TSAN_ATOMIC32_COMPARE_EXCHANGE_STRONG),
395 STRONG_CAS (ATOMIC_COMPARE_EXCHANGE_8,
396 TSAN_ATOMIC64_COMPARE_EXCHANGE_STRONG),
397 STRONG_CAS (ATOMIC_COMPARE_EXCHANGE_16,
398 TSAN_ATOMIC128_COMPARE_EXCHANGE_STRONG),
399
400 BOOL_CAS (SYNC_BOOL_COMPARE_AND_SWAP_1,
401 TSAN_ATOMIC8_COMPARE_EXCHANGE_STRONG),
402 BOOL_CAS (SYNC_BOOL_COMPARE_AND_SWAP_2,
403 TSAN_ATOMIC16_COMPARE_EXCHANGE_STRONG),
404 BOOL_CAS (SYNC_BOOL_COMPARE_AND_SWAP_4,
405 TSAN_ATOMIC32_COMPARE_EXCHANGE_STRONG),
406 BOOL_CAS (SYNC_BOOL_COMPARE_AND_SWAP_8,
407 TSAN_ATOMIC64_COMPARE_EXCHANGE_STRONG),
408 BOOL_CAS (SYNC_BOOL_COMPARE_AND_SWAP_16,
409 TSAN_ATOMIC128_COMPARE_EXCHANGE_STRONG),
410
411 VAL_CAS (SYNC_VAL_COMPARE_AND_SWAP_1, TSAN_ATOMIC8_COMPARE_EXCHANGE_STRONG),
412 VAL_CAS (SYNC_VAL_COMPARE_AND_SWAP_2, TSAN_ATOMIC16_COMPARE_EXCHANGE_STRONG),
413 VAL_CAS (SYNC_VAL_COMPARE_AND_SWAP_4, TSAN_ATOMIC32_COMPARE_EXCHANGE_STRONG),
414 VAL_CAS (SYNC_VAL_COMPARE_AND_SWAP_8, TSAN_ATOMIC64_COMPARE_EXCHANGE_STRONG),
415 VAL_CAS (SYNC_VAL_COMPARE_AND_SWAP_16,
416 TSAN_ATOMIC128_COMPARE_EXCHANGE_STRONG),
417
418 LOCK_RELEASE (SYNC_LOCK_RELEASE_1, TSAN_ATOMIC8_STORE),
419 LOCK_RELEASE (SYNC_LOCK_RELEASE_2, TSAN_ATOMIC16_STORE),
420 LOCK_RELEASE (SYNC_LOCK_RELEASE_4, TSAN_ATOMIC32_STORE),
421 LOCK_RELEASE (SYNC_LOCK_RELEASE_8, TSAN_ATOMIC64_STORE),
422 LOCK_RELEASE (SYNC_LOCK_RELEASE_16, TSAN_ATOMIC128_STORE)
423 };
424
425 /* Instrument an atomic builtin. */
426
427 static void
428 instrument_builtin_call (gimple_stmt_iterator *gsi)
429 {
430 gimple stmt = gsi_stmt (*gsi), g;
431 tree callee = gimple_call_fndecl (stmt), last_arg, args[6], t, lhs;
432 enum built_in_function fcode = DECL_FUNCTION_CODE (callee);
433 unsigned int i, num = gimple_call_num_args (stmt), j;
434 for (j = 0; j < 6 && j < num; j++)
435 args[j] = gimple_call_arg (stmt, j);
436 for (i = 0; i < ARRAY_SIZE (tsan_atomic_table); i++)
437 if (fcode != tsan_atomic_table[i].fcode)
438 continue;
439 else
440 {
441 tree decl = builtin_decl_implicit (tsan_atomic_table[i].tsan_fcode);
442 if (decl == NULL_TREE)
443 return;
444 switch (tsan_atomic_table[i].action)
445 {
446 case check_last:
447 case fetch_op:
448 last_arg = gimple_call_arg (stmt, num - 1);
449 if (!host_integerp (last_arg, 1)
450 || (unsigned HOST_WIDE_INT) tree_low_cst (last_arg, 1)
451 > MEMMODEL_SEQ_CST)
452 return;
453 gimple_call_set_fndecl (stmt, decl);
454 update_stmt (stmt);
455 if (tsan_atomic_table[i].action == fetch_op)
456 {
457 args[1] = gimple_call_arg (stmt, 1);
458 goto adjust_result;
459 }
460 return;
461 case add_seq_cst:
462 case add_acquire:
463 case fetch_op_seq_cst:
464 gcc_assert (num <= 2);
465 for (j = 0; j < num; j++)
466 args[j] = gimple_call_arg (stmt, j);
467 for (; j < 2; j++)
468 args[j] = NULL_TREE;
469 args[num] = build_int_cst (NULL_TREE,
470 tsan_atomic_table[i].action
471 != add_acquire
472 ? MEMMODEL_SEQ_CST
473 : MEMMODEL_ACQUIRE);
474 update_gimple_call (gsi, decl, num + 1, args[0], args[1], args[2]);
475 stmt = gsi_stmt (*gsi);
476 if (tsan_atomic_table[i].action == fetch_op_seq_cst)
477 {
478 adjust_result:
479 lhs = gimple_call_lhs (stmt);
480 if (lhs == NULL_TREE)
481 return;
482 if (!useless_type_conversion_p (TREE_TYPE (lhs),
483 TREE_TYPE (args[1])))
484 {
485 tree var = make_ssa_name (TREE_TYPE (lhs), NULL);
486 g = gimple_build_assign_with_ops (NOP_EXPR, var,
487 args[1], NULL_TREE);
488 gsi_insert_after (gsi, g, GSI_NEW_STMT);
489 args[1] = var;
490 }
491 gimple_call_set_lhs (stmt,
492 make_ssa_name (TREE_TYPE (lhs), NULL));
493 /* BIT_NOT_EXPR stands for NAND. */
494 if (tsan_atomic_table[i].code == BIT_NOT_EXPR)
495 {
496 tree var = make_ssa_name (TREE_TYPE (lhs), NULL);
497 g = gimple_build_assign_with_ops (BIT_AND_EXPR, var,
498 gimple_call_lhs (stmt),
499 args[1]);
500 gsi_insert_after (gsi, g, GSI_NEW_STMT);
501 g = gimple_build_assign_with_ops (BIT_NOT_EXPR, lhs, var,
502 NULL_TREE);
503 }
504 else
505 g = gimple_build_assign_with_ops (tsan_atomic_table[i].code,
506 lhs,
507 gimple_call_lhs (stmt),
508 args[1]);
509 update_stmt (stmt);
510 gsi_insert_after (gsi, g, GSI_NEW_STMT);
511 }
512 return;
513 case weak_cas:
514 if (!integer_nonzerop (gimple_call_arg (stmt, 3)))
515 continue;
516 /* FALLTHRU */
517 case strong_cas:
518 gcc_assert (num == 6);
519 for (j = 0; j < 6; j++)
520 args[j] = gimple_call_arg (stmt, j);
521 if (!host_integerp (args[4], 1)
522 || (unsigned HOST_WIDE_INT) tree_low_cst (args[4], 1)
523 > MEMMODEL_SEQ_CST)
524 return;
525 if (!host_integerp (args[5], 1)
526 || (unsigned HOST_WIDE_INT) tree_low_cst (args[5], 1)
527 > MEMMODEL_SEQ_CST)
528 return;
529 update_gimple_call (gsi, decl, 5, args[0], args[1], args[2],
530 args[4], args[5]);
531 return;
532 case bool_cas:
533 case val_cas:
534 gcc_assert (num == 3);
535 for (j = 0; j < 3; j++)
536 args[j] = gimple_call_arg (stmt, j);
537 t = TYPE_ARG_TYPES (TREE_TYPE (decl));
538 t = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (t)));
539 t = create_tmp_var (t, NULL);
540 mark_addressable (t);
541 if (!useless_type_conversion_p (TREE_TYPE (t),
542 TREE_TYPE (args[1])))
543 {
544 g = gimple_build_assign_with_ops (NOP_EXPR,
545 make_ssa_name (TREE_TYPE (t),
546 NULL),
547 args[1], NULL_TREE);
548 gsi_insert_before (gsi, g, GSI_SAME_STMT);
549 args[1] = gimple_assign_lhs (g);
550 }
551 g = gimple_build_assign (t, args[1]);
552 gsi_insert_before (gsi, g, GSI_SAME_STMT);
553 lhs = gimple_call_lhs (stmt);
554 update_gimple_call (gsi, decl, 5, args[0],
555 build_fold_addr_expr (t), args[2],
556 build_int_cst (NULL_TREE,
557 MEMMODEL_SEQ_CST),
558 build_int_cst (NULL_TREE,
559 MEMMODEL_SEQ_CST));
560 if (tsan_atomic_table[i].action == val_cas && lhs)
561 {
562 tree cond;
563 stmt = gsi_stmt (*gsi);
564 g = gimple_build_assign (make_ssa_name (TREE_TYPE (t), NULL),
565 t);
566 gsi_insert_after (gsi, g, GSI_NEW_STMT);
567 t = make_ssa_name (TREE_TYPE (TREE_TYPE (decl)), stmt);
568 cond = build2 (NE_EXPR, boolean_type_node, t,
569 build_int_cst (TREE_TYPE (t), 0));
570 g = gimple_build_assign_with_ops (COND_EXPR, lhs, cond,
571 args[1],
572 gimple_assign_lhs (g));
573 gimple_call_set_lhs (stmt, t);
574 update_stmt (stmt);
575 gsi_insert_after (gsi, g, GSI_NEW_STMT);
576 }
577 return;
578 case lock_release:
579 gcc_assert (num == 1);
580 t = TYPE_ARG_TYPES (TREE_TYPE (decl));
581 t = TREE_VALUE (TREE_CHAIN (t));
582 update_gimple_call (gsi, decl, 3, gimple_call_arg (stmt, 0),
583 build_int_cst (t, 0),
584 build_int_cst (NULL_TREE,
585 MEMMODEL_RELEASE));
586 return;
587 default:
588 continue;
589 }
590 }
591 }
592
593 /* Instruments the gimple pointed to by GSI. Return
594 true if func entry/exit should be instrumented. */
595
596 static bool
597 instrument_gimple (gimple_stmt_iterator *gsi)
598 {
599 gimple stmt;
600 tree rhs, lhs;
601 bool instrumented = false;
602
603 stmt = gsi_stmt (*gsi);
604 if (is_gimple_call (stmt)
605 && (gimple_call_fndecl (stmt)
606 != builtin_decl_implicit (BUILT_IN_TSAN_INIT)))
607 {
608 if (is_gimple_builtin_call (stmt))
609 instrument_builtin_call (gsi);
610 return true;
611 }
612 else if (is_gimple_assign (stmt)
613 && !gimple_clobber_p (stmt))
614 {
615 if (gimple_store_p (stmt))
616 {
617 lhs = gimple_assign_lhs (stmt);
618 instrumented = instrument_expr (*gsi, lhs, true);
619 }
620 if (gimple_assign_load_p (stmt))
621 {
622 rhs = gimple_assign_rhs1 (stmt);
623 instrumented = instrument_expr (*gsi, rhs, false);
624 }
625 }
626 return instrumented;
627 }
628
629 /* Instruments all interesting memory accesses in the current function.
630 Return true if func entry/exit should be instrumented. */
631
632 static bool
633 instrument_memory_accesses (void)
634 {
635 basic_block bb;
636 gimple_stmt_iterator gsi;
637 bool fentry_exit_instrument = false;
638
639 FOR_EACH_BB (bb)
640 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
641 fentry_exit_instrument |= instrument_gimple (&gsi);
642 return fentry_exit_instrument;
643 }
644
645 /* Instruments function entry. */
646
647 static void
648 instrument_func_entry (void)
649 {
650 basic_block succ_bb;
651 gimple_stmt_iterator gsi;
652 tree ret_addr, builtin_decl;
653 gimple g;
654
655 succ_bb = single_succ (ENTRY_BLOCK_PTR);
656 gsi = gsi_after_labels (succ_bb);
657
658 builtin_decl = builtin_decl_implicit (BUILT_IN_RETURN_ADDRESS);
659 g = gimple_build_call (builtin_decl, 1, integer_zero_node);
660 ret_addr = make_ssa_name (ptr_type_node, NULL);
661 gimple_call_set_lhs (g, ret_addr);
662 gimple_set_location (g, cfun->function_start_locus);
663 gsi_insert_before (&gsi, g, GSI_SAME_STMT);
664
665 builtin_decl = builtin_decl_implicit (BUILT_IN_TSAN_FUNC_ENTRY);
666 g = gimple_build_call (builtin_decl, 1, ret_addr);
667 gimple_set_location (g, cfun->function_start_locus);
668 gsi_insert_before (&gsi, g, GSI_SAME_STMT);
669 }
670
671 /* Instruments function exits. */
672
673 static void
674 instrument_func_exit (void)
675 {
676 location_t loc;
677 basic_block exit_bb;
678 gimple_stmt_iterator gsi;
679 gimple stmt, g;
680 tree builtin_decl;
681 edge e;
682 edge_iterator ei;
683
684 /* Find all function exits. */
685 exit_bb = EXIT_BLOCK_PTR;
686 FOR_EACH_EDGE (e, ei, exit_bb->preds)
687 {
688 gsi = gsi_last_bb (e->src);
689 stmt = gsi_stmt (gsi);
690 gcc_assert (gimple_code (stmt) == GIMPLE_RETURN
691 || gimple_call_builtin_p (stmt, BUILT_IN_RETURN));
692 loc = gimple_location (stmt);
693 builtin_decl = builtin_decl_implicit (BUILT_IN_TSAN_FUNC_EXIT);
694 g = gimple_build_call (builtin_decl, 0);
695 gimple_set_location (g, loc);
696 gsi_insert_before (&gsi, g, GSI_SAME_STMT);
697 }
698 }
699
700 /* ThreadSanitizer instrumentation pass. */
701
702 static unsigned
703 tsan_pass (void)
704 {
705 initialize_sanitizer_builtins ();
706 if (instrument_memory_accesses ())
707 {
708 instrument_func_entry ();
709 instrument_func_exit ();
710 }
711 return 0;
712 }
713
714 /* The pass's gate. */
715
716 static bool
717 tsan_gate (void)
718 {
719 return (flag_sanitize & SANITIZE_THREAD) != 0;
720 }
721
722 /* Inserts __tsan_init () into the list of CTORs. */
723
724 void
725 tsan_finish_file (void)
726 {
727 tree ctor_statements = NULL_TREE;
728
729 initialize_sanitizer_builtins ();
730 tree init_decl = builtin_decl_implicit (BUILT_IN_TSAN_INIT);
731 append_to_statement_list (build_call_expr (init_decl, 0),
732 &ctor_statements);
733 cgraph_build_static_cdtor ('I', ctor_statements,
734 MAX_RESERVED_INIT_PRIORITY - 1);
735 }
736
737 /* The pass descriptor. */
738
739 namespace {
740
741 const pass_data pass_data_tsan =
742 {
743 GIMPLE_PASS, /* type */
744 "tsan", /* name */
745 OPTGROUP_NONE, /* optinfo_flags */
746 true, /* has_gate */
747 true, /* has_execute */
748 TV_NONE, /* tv_id */
749 ( PROP_ssa | PROP_cfg ), /* properties_required */
750 0, /* properties_provided */
751 0, /* properties_destroyed */
752 0, /* todo_flags_start */
753 ( TODO_verify_all | TODO_update_ssa ), /* todo_flags_finish */
754 };
755
756 class pass_tsan : public gimple_opt_pass
757 {
758 public:
759 pass_tsan (gcc::context *ctxt)
760 : gimple_opt_pass (pass_data_tsan, ctxt)
761 {}
762
763 /* opt_pass methods: */
764 opt_pass * clone () { return new pass_tsan (m_ctxt); }
765 bool gate () { return tsan_gate (); }
766 unsigned int execute () { return tsan_pass (); }
767
768 }; // class pass_tsan
769
770 } // anon namespace
771
772 gimple_opt_pass *
773 make_pass_tsan (gcc::context *ctxt)
774 {
775 return new pass_tsan (ctxt);
776 }
777
778 static bool
779 tsan_gate_O0 (void)
780 {
781 return (flag_sanitize & SANITIZE_THREAD) != 0 && !optimize;
782 }
783
784 namespace {
785
786 const pass_data pass_data_tsan_O0 =
787 {
788 GIMPLE_PASS, /* type */
789 "tsan0", /* name */
790 OPTGROUP_NONE, /* optinfo_flags */
791 true, /* has_gate */
792 true, /* has_execute */
793 TV_NONE, /* tv_id */
794 ( PROP_ssa | PROP_cfg ), /* properties_required */
795 0, /* properties_provided */
796 0, /* properties_destroyed */
797 0, /* todo_flags_start */
798 ( TODO_verify_all | TODO_update_ssa ), /* todo_flags_finish */
799 };
800
801 class pass_tsan_O0 : public gimple_opt_pass
802 {
803 public:
804 pass_tsan_O0 (gcc::context *ctxt)
805 : gimple_opt_pass (pass_data_tsan_O0, ctxt)
806 {}
807
808 /* opt_pass methods: */
809 bool gate () { return tsan_gate_O0 (); }
810 unsigned int execute () { return tsan_pass (); }
811
812 }; // class pass_tsan_O0
813
814 } // anon namespace
815
816 gimple_opt_pass *
817 make_pass_tsan_O0 (gcc::context *ctxt)
818 {
819 return new pass_tsan_O0 (ctxt);
820 }