]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/tsan.c
2015-10-29 Andrew MacLeod <amacleod@redhat.com>
[thirdparty/gcc.git] / gcc / tsan.c
1 /* GCC instrumentation plugin for ThreadSanitizer.
2 Copyright (C) 2011-2015 Free Software Foundation, Inc.
3 Contributed by Dmitry Vyukov <dvyukov@google.com>
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
10 version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "backend.h"
26 #include "target.h"
27 #include "rtl.h"
28 #include "tree.h"
29 #include "gimple.h"
30 #include "tree-pass.h"
31 #include "ssa.h"
32 #include "expmed.h"
33 #include "insn-config.h"
34 #include "emit-rtl.h"
35 #include "cgraph.h"
36 #include "diagnostic.h"
37 #include "alias.h"
38 #include "fold-const.h"
39 #include "flags.h"
40 #include "dojump.h"
41 #include "explow.h"
42 #include "calls.h"
43 #include "varasm.h"
44 #include "stmt.h"
45 #include "expr.h"
46 #include "intl.h"
47 #include "internal-fn.h"
48 #include "gimplify.h"
49 #include "gimple-iterator.h"
50 #include "gimplify-me.h"
51 #include "tree-cfg.h"
52 #include "tree-iterator.h"
53 #include "langhooks.h"
54 #include "output.h"
55 #include "tree-ssa-propagate.h"
56 #include "tree-ssa-loop-ivopts.h"
57 #include "tsan.h"
58 #include "asan.h"
59 #include "builtins.h"
60
61 /* Number of instrumented memory accesses in the current function. */
62
63 /* Builds the following decl
64 void __tsan_read/writeX (void *addr); */
65
66 static tree
67 get_memory_access_decl (bool is_write, unsigned size)
68 {
69 enum built_in_function fcode;
70
71 if (size <= 1)
72 fcode = is_write ? BUILT_IN_TSAN_WRITE1
73 : BUILT_IN_TSAN_READ1;
74 else if (size <= 3)
75 fcode = is_write ? BUILT_IN_TSAN_WRITE2
76 : BUILT_IN_TSAN_READ2;
77 else if (size <= 7)
78 fcode = is_write ? BUILT_IN_TSAN_WRITE4
79 : BUILT_IN_TSAN_READ4;
80 else if (size <= 15)
81 fcode = is_write ? BUILT_IN_TSAN_WRITE8
82 : BUILT_IN_TSAN_READ8;
83 else
84 fcode = is_write ? BUILT_IN_TSAN_WRITE16
85 : BUILT_IN_TSAN_READ16;
86
87 return builtin_decl_implicit (fcode);
88 }
89
90 /* Check as to whether EXPR refers to a store to vptr. */
91
92 static tree
93 is_vptr_store (gimple *stmt, tree expr, bool is_write)
94 {
95 if (is_write == true
96 && gimple_assign_single_p (stmt)
97 && TREE_CODE (expr) == COMPONENT_REF)
98 {
99 tree field = TREE_OPERAND (expr, 1);
100 if (TREE_CODE (field) == FIELD_DECL
101 && DECL_VIRTUAL_P (field))
102 return gimple_assign_rhs1 (stmt);
103 }
104 return NULL;
105 }
106
107 /* Instruments EXPR if needed. If any instrumentation is inserted,
108 return true. */
109
110 static bool
111 instrument_expr (gimple_stmt_iterator gsi, tree expr, bool is_write)
112 {
113 tree base, rhs, expr_ptr, builtin_decl;
114 basic_block bb;
115 HOST_WIDE_INT size;
116 gimple *stmt, *g;
117 gimple_seq seq;
118 location_t loc;
119 unsigned int align;
120
121 size = int_size_in_bytes (TREE_TYPE (expr));
122 if (size <= 0)
123 return false;
124
125 HOST_WIDE_INT bitsize, bitpos;
126 tree offset;
127 machine_mode mode;
128 int volatilep = 0, unsignedp = 0;
129 base = get_inner_reference (expr, &bitsize, &bitpos, &offset,
130 &mode, &unsignedp, &volatilep, false);
131
132 /* No need to instrument accesses to decls that don't escape,
133 they can't escape to other threads then. */
134 if (DECL_P (base) && !is_global_var (base))
135 {
136 struct pt_solution pt;
137 memset (&pt, 0, sizeof (pt));
138 pt.escaped = 1;
139 pt.ipa_escaped = flag_ipa_pta != 0;
140 if (!pt_solution_includes (&pt, base))
141 return false;
142 if (!may_be_aliased (base))
143 return false;
144 }
145
146 if (TREE_READONLY (base)
147 || (TREE_CODE (base) == VAR_DECL
148 && DECL_HARD_REGISTER (base)))
149 return false;
150
151 stmt = gsi_stmt (gsi);
152 loc = gimple_location (stmt);
153 rhs = is_vptr_store (stmt, expr, is_write);
154
155 if ((TREE_CODE (expr) == COMPONENT_REF
156 && DECL_BIT_FIELD_TYPE (TREE_OPERAND (expr, 1)))
157 || TREE_CODE (expr) == BIT_FIELD_REF)
158 {
159 base = TREE_OPERAND (expr, 0);
160 if (TREE_CODE (expr) == COMPONENT_REF)
161 {
162 expr = TREE_OPERAND (expr, 1);
163 if (is_write && DECL_BIT_FIELD_REPRESENTATIVE (expr))
164 expr = DECL_BIT_FIELD_REPRESENTATIVE (expr);
165 if (!tree_fits_uhwi_p (DECL_FIELD_OFFSET (expr))
166 || !tree_fits_uhwi_p (DECL_FIELD_BIT_OFFSET (expr))
167 || !tree_fits_uhwi_p (DECL_SIZE (expr)))
168 return false;
169 bitpos = tree_to_uhwi (DECL_FIELD_OFFSET (expr)) * BITS_PER_UNIT
170 + tree_to_uhwi (DECL_FIELD_BIT_OFFSET (expr));
171 bitsize = tree_to_uhwi (DECL_SIZE (expr));
172 }
173 else
174 {
175 if (!tree_fits_uhwi_p (TREE_OPERAND (expr, 2))
176 || !tree_fits_uhwi_p (TREE_OPERAND (expr, 1)))
177 return false;
178 bitpos = tree_to_uhwi (TREE_OPERAND (expr, 2));
179 bitsize = tree_to_uhwi (TREE_OPERAND (expr, 1));
180 }
181 if (bitpos < 0 || bitsize <= 0)
182 return false;
183 size = (bitpos % BITS_PER_UNIT + bitsize + BITS_PER_UNIT - 1)
184 / BITS_PER_UNIT;
185 if (may_be_nonaddressable_p (base))
186 return false;
187 align = get_object_alignment (base);
188 if (align < BITS_PER_UNIT)
189 return false;
190 bitpos = bitpos & ~(BITS_PER_UNIT - 1);
191 if ((align - 1) & bitpos)
192 {
193 align = (align - 1) & bitpos;
194 align = align & -align;
195 }
196 expr = build_fold_addr_expr (unshare_expr (base));
197 expr = build2 (MEM_REF, char_type_node, expr,
198 build_int_cst (TREE_TYPE (expr), bitpos / BITS_PER_UNIT));
199 expr_ptr = build_fold_addr_expr (expr);
200 }
201 else
202 {
203 if (may_be_nonaddressable_p (expr))
204 return false;
205 align = get_object_alignment (expr);
206 if (align < BITS_PER_UNIT)
207 return false;
208 expr_ptr = build_fold_addr_expr (unshare_expr (expr));
209 }
210 expr_ptr = force_gimple_operand (expr_ptr, &seq, true, NULL_TREE);
211 if ((size & (size - 1)) != 0 || size > 16
212 || align < MIN (size, 8) * BITS_PER_UNIT)
213 {
214 builtin_decl = builtin_decl_implicit (is_write
215 ? BUILT_IN_TSAN_WRITE_RANGE
216 : BUILT_IN_TSAN_READ_RANGE);
217 g = gimple_build_call (builtin_decl, 2, expr_ptr, size_int (size));
218 }
219 else if (rhs == NULL)
220 g = gimple_build_call (get_memory_access_decl (is_write, size),
221 1, expr_ptr);
222 else
223 {
224 builtin_decl = builtin_decl_implicit (BUILT_IN_TSAN_VPTR_UPDATE);
225 g = gimple_build_call (builtin_decl, 2, expr_ptr, unshare_expr (rhs));
226 }
227 gimple_set_location (g, loc);
228 gimple_seq_add_stmt_without_update (&seq, g);
229 /* Instrumentation for assignment of a function result
230 must be inserted after the call. Instrumentation for
231 reads of function arguments must be inserted before the call.
232 That's because the call can contain synchronization. */
233 if (is_gimple_call (stmt) && is_write)
234 {
235 /* If the call can throw, it must be the last stmt in
236 a basic block, so the instrumented stmts need to be
237 inserted in successor bbs. */
238 if (is_ctrl_altering_stmt (stmt))
239 {
240 edge e;
241
242 bb = gsi_bb (gsi);
243 e = find_fallthru_edge (bb->succs);
244 if (e)
245 gsi_insert_seq_on_edge_immediate (e, seq);
246 }
247 else
248 gsi_insert_seq_after (&gsi, seq, GSI_NEW_STMT);
249 }
250 else
251 gsi_insert_seq_before (&gsi, seq, GSI_SAME_STMT);
252
253 return true;
254 }
255
256 /* Actions for sync/atomic builtin transformations. */
257 enum tsan_atomic_action
258 {
259 check_last, add_seq_cst, add_acquire, weak_cas, strong_cas,
260 bool_cas, val_cas, lock_release, fetch_op, fetch_op_seq_cst
261 };
262
263 /* Table how to map sync/atomic builtins to their corresponding
264 tsan equivalents. */
265 static const struct tsan_map_atomic
266 {
267 enum built_in_function fcode, tsan_fcode;
268 enum tsan_atomic_action action;
269 enum tree_code code;
270 } tsan_atomic_table[] =
271 {
272 #define TRANSFORM(fcode, tsan_fcode, action, code) \
273 { BUILT_IN_##fcode, BUILT_IN_##tsan_fcode, action, code }
274 #define CHECK_LAST(fcode, tsan_fcode) \
275 TRANSFORM (fcode, tsan_fcode, check_last, ERROR_MARK)
276 #define ADD_SEQ_CST(fcode, tsan_fcode) \
277 TRANSFORM (fcode, tsan_fcode, add_seq_cst, ERROR_MARK)
278 #define ADD_ACQUIRE(fcode, tsan_fcode) \
279 TRANSFORM (fcode, tsan_fcode, add_acquire, ERROR_MARK)
280 #define WEAK_CAS(fcode, tsan_fcode) \
281 TRANSFORM (fcode, tsan_fcode, weak_cas, ERROR_MARK)
282 #define STRONG_CAS(fcode, tsan_fcode) \
283 TRANSFORM (fcode, tsan_fcode, strong_cas, ERROR_MARK)
284 #define BOOL_CAS(fcode, tsan_fcode) \
285 TRANSFORM (fcode, tsan_fcode, bool_cas, ERROR_MARK)
286 #define VAL_CAS(fcode, tsan_fcode) \
287 TRANSFORM (fcode, tsan_fcode, val_cas, ERROR_MARK)
288 #define LOCK_RELEASE(fcode, tsan_fcode) \
289 TRANSFORM (fcode, tsan_fcode, lock_release, ERROR_MARK)
290 #define FETCH_OP(fcode, tsan_fcode, code) \
291 TRANSFORM (fcode, tsan_fcode, fetch_op, code)
292 #define FETCH_OPS(fcode, tsan_fcode, code) \
293 TRANSFORM (fcode, tsan_fcode, fetch_op_seq_cst, code)
294
295 CHECK_LAST (ATOMIC_LOAD_1, TSAN_ATOMIC8_LOAD),
296 CHECK_LAST (ATOMIC_LOAD_2, TSAN_ATOMIC16_LOAD),
297 CHECK_LAST (ATOMIC_LOAD_4, TSAN_ATOMIC32_LOAD),
298 CHECK_LAST (ATOMIC_LOAD_8, TSAN_ATOMIC64_LOAD),
299 CHECK_LAST (ATOMIC_LOAD_16, TSAN_ATOMIC128_LOAD),
300 CHECK_LAST (ATOMIC_STORE_1, TSAN_ATOMIC8_STORE),
301 CHECK_LAST (ATOMIC_STORE_2, TSAN_ATOMIC16_STORE),
302 CHECK_LAST (ATOMIC_STORE_4, TSAN_ATOMIC32_STORE),
303 CHECK_LAST (ATOMIC_STORE_8, TSAN_ATOMIC64_STORE),
304 CHECK_LAST (ATOMIC_STORE_16, TSAN_ATOMIC128_STORE),
305 CHECK_LAST (ATOMIC_EXCHANGE_1, TSAN_ATOMIC8_EXCHANGE),
306 CHECK_LAST (ATOMIC_EXCHANGE_2, TSAN_ATOMIC16_EXCHANGE),
307 CHECK_LAST (ATOMIC_EXCHANGE_4, TSAN_ATOMIC32_EXCHANGE),
308 CHECK_LAST (ATOMIC_EXCHANGE_8, TSAN_ATOMIC64_EXCHANGE),
309 CHECK_LAST (ATOMIC_EXCHANGE_16, TSAN_ATOMIC128_EXCHANGE),
310 CHECK_LAST (ATOMIC_FETCH_ADD_1, TSAN_ATOMIC8_FETCH_ADD),
311 CHECK_LAST (ATOMIC_FETCH_ADD_2, TSAN_ATOMIC16_FETCH_ADD),
312 CHECK_LAST (ATOMIC_FETCH_ADD_4, TSAN_ATOMIC32_FETCH_ADD),
313 CHECK_LAST (ATOMIC_FETCH_ADD_8, TSAN_ATOMIC64_FETCH_ADD),
314 CHECK_LAST (ATOMIC_FETCH_ADD_16, TSAN_ATOMIC128_FETCH_ADD),
315 CHECK_LAST (ATOMIC_FETCH_SUB_1, TSAN_ATOMIC8_FETCH_SUB),
316 CHECK_LAST (ATOMIC_FETCH_SUB_2, TSAN_ATOMIC16_FETCH_SUB),
317 CHECK_LAST (ATOMIC_FETCH_SUB_4, TSAN_ATOMIC32_FETCH_SUB),
318 CHECK_LAST (ATOMIC_FETCH_SUB_8, TSAN_ATOMIC64_FETCH_SUB),
319 CHECK_LAST (ATOMIC_FETCH_SUB_16, TSAN_ATOMIC128_FETCH_SUB),
320 CHECK_LAST (ATOMIC_FETCH_AND_1, TSAN_ATOMIC8_FETCH_AND),
321 CHECK_LAST (ATOMIC_FETCH_AND_2, TSAN_ATOMIC16_FETCH_AND),
322 CHECK_LAST (ATOMIC_FETCH_AND_4, TSAN_ATOMIC32_FETCH_AND),
323 CHECK_LAST (ATOMIC_FETCH_AND_8, TSAN_ATOMIC64_FETCH_AND),
324 CHECK_LAST (ATOMIC_FETCH_AND_16, TSAN_ATOMIC128_FETCH_AND),
325 CHECK_LAST (ATOMIC_FETCH_OR_1, TSAN_ATOMIC8_FETCH_OR),
326 CHECK_LAST (ATOMIC_FETCH_OR_2, TSAN_ATOMIC16_FETCH_OR),
327 CHECK_LAST (ATOMIC_FETCH_OR_4, TSAN_ATOMIC32_FETCH_OR),
328 CHECK_LAST (ATOMIC_FETCH_OR_8, TSAN_ATOMIC64_FETCH_OR),
329 CHECK_LAST (ATOMIC_FETCH_OR_16, TSAN_ATOMIC128_FETCH_OR),
330 CHECK_LAST (ATOMIC_FETCH_XOR_1, TSAN_ATOMIC8_FETCH_XOR),
331 CHECK_LAST (ATOMIC_FETCH_XOR_2, TSAN_ATOMIC16_FETCH_XOR),
332 CHECK_LAST (ATOMIC_FETCH_XOR_4, TSAN_ATOMIC32_FETCH_XOR),
333 CHECK_LAST (ATOMIC_FETCH_XOR_8, TSAN_ATOMIC64_FETCH_XOR),
334 CHECK_LAST (ATOMIC_FETCH_XOR_16, TSAN_ATOMIC128_FETCH_XOR),
335 CHECK_LAST (ATOMIC_FETCH_NAND_1, TSAN_ATOMIC8_FETCH_NAND),
336 CHECK_LAST (ATOMIC_FETCH_NAND_2, TSAN_ATOMIC16_FETCH_NAND),
337 CHECK_LAST (ATOMIC_FETCH_NAND_4, TSAN_ATOMIC32_FETCH_NAND),
338 CHECK_LAST (ATOMIC_FETCH_NAND_8, TSAN_ATOMIC64_FETCH_NAND),
339 CHECK_LAST (ATOMIC_FETCH_NAND_16, TSAN_ATOMIC128_FETCH_NAND),
340
341 CHECK_LAST (ATOMIC_THREAD_FENCE, TSAN_ATOMIC_THREAD_FENCE),
342 CHECK_LAST (ATOMIC_SIGNAL_FENCE, TSAN_ATOMIC_SIGNAL_FENCE),
343
344 FETCH_OP (ATOMIC_ADD_FETCH_1, TSAN_ATOMIC8_FETCH_ADD, PLUS_EXPR),
345 FETCH_OP (ATOMIC_ADD_FETCH_2, TSAN_ATOMIC16_FETCH_ADD, PLUS_EXPR),
346 FETCH_OP (ATOMIC_ADD_FETCH_4, TSAN_ATOMIC32_FETCH_ADD, PLUS_EXPR),
347 FETCH_OP (ATOMIC_ADD_FETCH_8, TSAN_ATOMIC64_FETCH_ADD, PLUS_EXPR),
348 FETCH_OP (ATOMIC_ADD_FETCH_16, TSAN_ATOMIC128_FETCH_ADD, PLUS_EXPR),
349 FETCH_OP (ATOMIC_SUB_FETCH_1, TSAN_ATOMIC8_FETCH_SUB, MINUS_EXPR),
350 FETCH_OP (ATOMIC_SUB_FETCH_2, TSAN_ATOMIC16_FETCH_SUB, MINUS_EXPR),
351 FETCH_OP (ATOMIC_SUB_FETCH_4, TSAN_ATOMIC32_FETCH_SUB, MINUS_EXPR),
352 FETCH_OP (ATOMIC_SUB_FETCH_8, TSAN_ATOMIC64_FETCH_SUB, MINUS_EXPR),
353 FETCH_OP (ATOMIC_SUB_FETCH_16, TSAN_ATOMIC128_FETCH_SUB, MINUS_EXPR),
354 FETCH_OP (ATOMIC_AND_FETCH_1, TSAN_ATOMIC8_FETCH_AND, BIT_AND_EXPR),
355 FETCH_OP (ATOMIC_AND_FETCH_2, TSAN_ATOMIC16_FETCH_AND, BIT_AND_EXPR),
356 FETCH_OP (ATOMIC_AND_FETCH_4, TSAN_ATOMIC32_FETCH_AND, BIT_AND_EXPR),
357 FETCH_OP (ATOMIC_AND_FETCH_8, TSAN_ATOMIC64_FETCH_AND, BIT_AND_EXPR),
358 FETCH_OP (ATOMIC_AND_FETCH_16, TSAN_ATOMIC128_FETCH_AND, BIT_AND_EXPR),
359 FETCH_OP (ATOMIC_OR_FETCH_1, TSAN_ATOMIC8_FETCH_OR, BIT_IOR_EXPR),
360 FETCH_OP (ATOMIC_OR_FETCH_2, TSAN_ATOMIC16_FETCH_OR, BIT_IOR_EXPR),
361 FETCH_OP (ATOMIC_OR_FETCH_4, TSAN_ATOMIC32_FETCH_OR, BIT_IOR_EXPR),
362 FETCH_OP (ATOMIC_OR_FETCH_8, TSAN_ATOMIC64_FETCH_OR, BIT_IOR_EXPR),
363 FETCH_OP (ATOMIC_OR_FETCH_16, TSAN_ATOMIC128_FETCH_OR, BIT_IOR_EXPR),
364 FETCH_OP (ATOMIC_XOR_FETCH_1, TSAN_ATOMIC8_FETCH_XOR, BIT_XOR_EXPR),
365 FETCH_OP (ATOMIC_XOR_FETCH_2, TSAN_ATOMIC16_FETCH_XOR, BIT_XOR_EXPR),
366 FETCH_OP (ATOMIC_XOR_FETCH_4, TSAN_ATOMIC32_FETCH_XOR, BIT_XOR_EXPR),
367 FETCH_OP (ATOMIC_XOR_FETCH_8, TSAN_ATOMIC64_FETCH_XOR, BIT_XOR_EXPR),
368 FETCH_OP (ATOMIC_XOR_FETCH_16, TSAN_ATOMIC128_FETCH_XOR, BIT_XOR_EXPR),
369 FETCH_OP (ATOMIC_NAND_FETCH_1, TSAN_ATOMIC8_FETCH_NAND, BIT_NOT_EXPR),
370 FETCH_OP (ATOMIC_NAND_FETCH_2, TSAN_ATOMIC16_FETCH_NAND, BIT_NOT_EXPR),
371 FETCH_OP (ATOMIC_NAND_FETCH_4, TSAN_ATOMIC32_FETCH_NAND, BIT_NOT_EXPR),
372 FETCH_OP (ATOMIC_NAND_FETCH_8, TSAN_ATOMIC64_FETCH_NAND, BIT_NOT_EXPR),
373 FETCH_OP (ATOMIC_NAND_FETCH_16, TSAN_ATOMIC128_FETCH_NAND, BIT_NOT_EXPR),
374
375 ADD_ACQUIRE (SYNC_LOCK_TEST_AND_SET_1, TSAN_ATOMIC8_EXCHANGE),
376 ADD_ACQUIRE (SYNC_LOCK_TEST_AND_SET_2, TSAN_ATOMIC16_EXCHANGE),
377 ADD_ACQUIRE (SYNC_LOCK_TEST_AND_SET_4, TSAN_ATOMIC32_EXCHANGE),
378 ADD_ACQUIRE (SYNC_LOCK_TEST_AND_SET_8, TSAN_ATOMIC64_EXCHANGE),
379 ADD_ACQUIRE (SYNC_LOCK_TEST_AND_SET_16, TSAN_ATOMIC128_EXCHANGE),
380
381 ADD_SEQ_CST (SYNC_FETCH_AND_ADD_1, TSAN_ATOMIC8_FETCH_ADD),
382 ADD_SEQ_CST (SYNC_FETCH_AND_ADD_2, TSAN_ATOMIC16_FETCH_ADD),
383 ADD_SEQ_CST (SYNC_FETCH_AND_ADD_4, TSAN_ATOMIC32_FETCH_ADD),
384 ADD_SEQ_CST (SYNC_FETCH_AND_ADD_8, TSAN_ATOMIC64_FETCH_ADD),
385 ADD_SEQ_CST (SYNC_FETCH_AND_ADD_16, TSAN_ATOMIC128_FETCH_ADD),
386 ADD_SEQ_CST (SYNC_FETCH_AND_SUB_1, TSAN_ATOMIC8_FETCH_SUB),
387 ADD_SEQ_CST (SYNC_FETCH_AND_SUB_2, TSAN_ATOMIC16_FETCH_SUB),
388 ADD_SEQ_CST (SYNC_FETCH_AND_SUB_4, TSAN_ATOMIC32_FETCH_SUB),
389 ADD_SEQ_CST (SYNC_FETCH_AND_SUB_8, TSAN_ATOMIC64_FETCH_SUB),
390 ADD_SEQ_CST (SYNC_FETCH_AND_SUB_16, TSAN_ATOMIC128_FETCH_SUB),
391 ADD_SEQ_CST (SYNC_FETCH_AND_AND_1, TSAN_ATOMIC8_FETCH_AND),
392 ADD_SEQ_CST (SYNC_FETCH_AND_AND_2, TSAN_ATOMIC16_FETCH_AND),
393 ADD_SEQ_CST (SYNC_FETCH_AND_AND_4, TSAN_ATOMIC32_FETCH_AND),
394 ADD_SEQ_CST (SYNC_FETCH_AND_AND_8, TSAN_ATOMIC64_FETCH_AND),
395 ADD_SEQ_CST (SYNC_FETCH_AND_AND_16, TSAN_ATOMIC128_FETCH_AND),
396 ADD_SEQ_CST (SYNC_FETCH_AND_OR_1, TSAN_ATOMIC8_FETCH_OR),
397 ADD_SEQ_CST (SYNC_FETCH_AND_OR_2, TSAN_ATOMIC16_FETCH_OR),
398 ADD_SEQ_CST (SYNC_FETCH_AND_OR_4, TSAN_ATOMIC32_FETCH_OR),
399 ADD_SEQ_CST (SYNC_FETCH_AND_OR_8, TSAN_ATOMIC64_FETCH_OR),
400 ADD_SEQ_CST (SYNC_FETCH_AND_OR_16, TSAN_ATOMIC128_FETCH_OR),
401 ADD_SEQ_CST (SYNC_FETCH_AND_XOR_1, TSAN_ATOMIC8_FETCH_XOR),
402 ADD_SEQ_CST (SYNC_FETCH_AND_XOR_2, TSAN_ATOMIC16_FETCH_XOR),
403 ADD_SEQ_CST (SYNC_FETCH_AND_XOR_4, TSAN_ATOMIC32_FETCH_XOR),
404 ADD_SEQ_CST (SYNC_FETCH_AND_XOR_8, TSAN_ATOMIC64_FETCH_XOR),
405 ADD_SEQ_CST (SYNC_FETCH_AND_XOR_16, TSAN_ATOMIC128_FETCH_XOR),
406 ADD_SEQ_CST (SYNC_FETCH_AND_NAND_1, TSAN_ATOMIC8_FETCH_NAND),
407 ADD_SEQ_CST (SYNC_FETCH_AND_NAND_2, TSAN_ATOMIC16_FETCH_NAND),
408 ADD_SEQ_CST (SYNC_FETCH_AND_NAND_4, TSAN_ATOMIC32_FETCH_NAND),
409 ADD_SEQ_CST (SYNC_FETCH_AND_NAND_8, TSAN_ATOMIC64_FETCH_NAND),
410 ADD_SEQ_CST (SYNC_FETCH_AND_NAND_16, TSAN_ATOMIC128_FETCH_NAND),
411
412 ADD_SEQ_CST (SYNC_SYNCHRONIZE, TSAN_ATOMIC_THREAD_FENCE),
413
414 FETCH_OPS (SYNC_ADD_AND_FETCH_1, TSAN_ATOMIC8_FETCH_ADD, PLUS_EXPR),
415 FETCH_OPS (SYNC_ADD_AND_FETCH_2, TSAN_ATOMIC16_FETCH_ADD, PLUS_EXPR),
416 FETCH_OPS (SYNC_ADD_AND_FETCH_4, TSAN_ATOMIC32_FETCH_ADD, PLUS_EXPR),
417 FETCH_OPS (SYNC_ADD_AND_FETCH_8, TSAN_ATOMIC64_FETCH_ADD, PLUS_EXPR),
418 FETCH_OPS (SYNC_ADD_AND_FETCH_16, TSAN_ATOMIC128_FETCH_ADD, PLUS_EXPR),
419 FETCH_OPS (SYNC_SUB_AND_FETCH_1, TSAN_ATOMIC8_FETCH_SUB, MINUS_EXPR),
420 FETCH_OPS (SYNC_SUB_AND_FETCH_2, TSAN_ATOMIC16_FETCH_SUB, MINUS_EXPR),
421 FETCH_OPS (SYNC_SUB_AND_FETCH_4, TSAN_ATOMIC32_FETCH_SUB, MINUS_EXPR),
422 FETCH_OPS (SYNC_SUB_AND_FETCH_8, TSAN_ATOMIC64_FETCH_SUB, MINUS_EXPR),
423 FETCH_OPS (SYNC_SUB_AND_FETCH_16, TSAN_ATOMIC128_FETCH_SUB, MINUS_EXPR),
424 FETCH_OPS (SYNC_AND_AND_FETCH_1, TSAN_ATOMIC8_FETCH_AND, BIT_AND_EXPR),
425 FETCH_OPS (SYNC_AND_AND_FETCH_2, TSAN_ATOMIC16_FETCH_AND, BIT_AND_EXPR),
426 FETCH_OPS (SYNC_AND_AND_FETCH_4, TSAN_ATOMIC32_FETCH_AND, BIT_AND_EXPR),
427 FETCH_OPS (SYNC_AND_AND_FETCH_8, TSAN_ATOMIC64_FETCH_AND, BIT_AND_EXPR),
428 FETCH_OPS (SYNC_AND_AND_FETCH_16, TSAN_ATOMIC128_FETCH_AND, BIT_AND_EXPR),
429 FETCH_OPS (SYNC_OR_AND_FETCH_1, TSAN_ATOMIC8_FETCH_OR, BIT_IOR_EXPR),
430 FETCH_OPS (SYNC_OR_AND_FETCH_2, TSAN_ATOMIC16_FETCH_OR, BIT_IOR_EXPR),
431 FETCH_OPS (SYNC_OR_AND_FETCH_4, TSAN_ATOMIC32_FETCH_OR, BIT_IOR_EXPR),
432 FETCH_OPS (SYNC_OR_AND_FETCH_8, TSAN_ATOMIC64_FETCH_OR, BIT_IOR_EXPR),
433 FETCH_OPS (SYNC_OR_AND_FETCH_16, TSAN_ATOMIC128_FETCH_OR, BIT_IOR_EXPR),
434 FETCH_OPS (SYNC_XOR_AND_FETCH_1, TSAN_ATOMIC8_FETCH_XOR, BIT_XOR_EXPR),
435 FETCH_OPS (SYNC_XOR_AND_FETCH_2, TSAN_ATOMIC16_FETCH_XOR, BIT_XOR_EXPR),
436 FETCH_OPS (SYNC_XOR_AND_FETCH_4, TSAN_ATOMIC32_FETCH_XOR, BIT_XOR_EXPR),
437 FETCH_OPS (SYNC_XOR_AND_FETCH_8, TSAN_ATOMIC64_FETCH_XOR, BIT_XOR_EXPR),
438 FETCH_OPS (SYNC_XOR_AND_FETCH_16, TSAN_ATOMIC128_FETCH_XOR, BIT_XOR_EXPR),
439 FETCH_OPS (SYNC_NAND_AND_FETCH_1, TSAN_ATOMIC8_FETCH_NAND, BIT_NOT_EXPR),
440 FETCH_OPS (SYNC_NAND_AND_FETCH_2, TSAN_ATOMIC16_FETCH_NAND, BIT_NOT_EXPR),
441 FETCH_OPS (SYNC_NAND_AND_FETCH_4, TSAN_ATOMIC32_FETCH_NAND, BIT_NOT_EXPR),
442 FETCH_OPS (SYNC_NAND_AND_FETCH_8, TSAN_ATOMIC64_FETCH_NAND, BIT_NOT_EXPR),
443 FETCH_OPS (SYNC_NAND_AND_FETCH_16, TSAN_ATOMIC128_FETCH_NAND, BIT_NOT_EXPR),
444
445 WEAK_CAS (ATOMIC_COMPARE_EXCHANGE_1, TSAN_ATOMIC8_COMPARE_EXCHANGE_WEAK),
446 WEAK_CAS (ATOMIC_COMPARE_EXCHANGE_2, TSAN_ATOMIC16_COMPARE_EXCHANGE_WEAK),
447 WEAK_CAS (ATOMIC_COMPARE_EXCHANGE_4, TSAN_ATOMIC32_COMPARE_EXCHANGE_WEAK),
448 WEAK_CAS (ATOMIC_COMPARE_EXCHANGE_8, TSAN_ATOMIC64_COMPARE_EXCHANGE_WEAK),
449 WEAK_CAS (ATOMIC_COMPARE_EXCHANGE_16, TSAN_ATOMIC128_COMPARE_EXCHANGE_WEAK),
450
451 STRONG_CAS (ATOMIC_COMPARE_EXCHANGE_1, TSAN_ATOMIC8_COMPARE_EXCHANGE_STRONG),
452 STRONG_CAS (ATOMIC_COMPARE_EXCHANGE_2,
453 TSAN_ATOMIC16_COMPARE_EXCHANGE_STRONG),
454 STRONG_CAS (ATOMIC_COMPARE_EXCHANGE_4,
455 TSAN_ATOMIC32_COMPARE_EXCHANGE_STRONG),
456 STRONG_CAS (ATOMIC_COMPARE_EXCHANGE_8,
457 TSAN_ATOMIC64_COMPARE_EXCHANGE_STRONG),
458 STRONG_CAS (ATOMIC_COMPARE_EXCHANGE_16,
459 TSAN_ATOMIC128_COMPARE_EXCHANGE_STRONG),
460
461 BOOL_CAS (SYNC_BOOL_COMPARE_AND_SWAP_1,
462 TSAN_ATOMIC8_COMPARE_EXCHANGE_STRONG),
463 BOOL_CAS (SYNC_BOOL_COMPARE_AND_SWAP_2,
464 TSAN_ATOMIC16_COMPARE_EXCHANGE_STRONG),
465 BOOL_CAS (SYNC_BOOL_COMPARE_AND_SWAP_4,
466 TSAN_ATOMIC32_COMPARE_EXCHANGE_STRONG),
467 BOOL_CAS (SYNC_BOOL_COMPARE_AND_SWAP_8,
468 TSAN_ATOMIC64_COMPARE_EXCHANGE_STRONG),
469 BOOL_CAS (SYNC_BOOL_COMPARE_AND_SWAP_16,
470 TSAN_ATOMIC128_COMPARE_EXCHANGE_STRONG),
471
472 VAL_CAS (SYNC_VAL_COMPARE_AND_SWAP_1, TSAN_ATOMIC8_COMPARE_EXCHANGE_STRONG),
473 VAL_CAS (SYNC_VAL_COMPARE_AND_SWAP_2, TSAN_ATOMIC16_COMPARE_EXCHANGE_STRONG),
474 VAL_CAS (SYNC_VAL_COMPARE_AND_SWAP_4, TSAN_ATOMIC32_COMPARE_EXCHANGE_STRONG),
475 VAL_CAS (SYNC_VAL_COMPARE_AND_SWAP_8, TSAN_ATOMIC64_COMPARE_EXCHANGE_STRONG),
476 VAL_CAS (SYNC_VAL_COMPARE_AND_SWAP_16,
477 TSAN_ATOMIC128_COMPARE_EXCHANGE_STRONG),
478
479 LOCK_RELEASE (SYNC_LOCK_RELEASE_1, TSAN_ATOMIC8_STORE),
480 LOCK_RELEASE (SYNC_LOCK_RELEASE_2, TSAN_ATOMIC16_STORE),
481 LOCK_RELEASE (SYNC_LOCK_RELEASE_4, TSAN_ATOMIC32_STORE),
482 LOCK_RELEASE (SYNC_LOCK_RELEASE_8, TSAN_ATOMIC64_STORE),
483 LOCK_RELEASE (SYNC_LOCK_RELEASE_16, TSAN_ATOMIC128_STORE)
484 };
485
486 /* Instrument an atomic builtin. */
487
488 static void
489 instrument_builtin_call (gimple_stmt_iterator *gsi)
490 {
491 gimple *stmt = gsi_stmt (*gsi), *g;
492 tree callee = gimple_call_fndecl (stmt), last_arg, args[6], t, lhs;
493 enum built_in_function fcode = DECL_FUNCTION_CODE (callee);
494 unsigned int i, num = gimple_call_num_args (stmt), j;
495 for (j = 0; j < 6 && j < num; j++)
496 args[j] = gimple_call_arg (stmt, j);
497 for (i = 0; i < ARRAY_SIZE (tsan_atomic_table); i++)
498 if (fcode != tsan_atomic_table[i].fcode)
499 continue;
500 else
501 {
502 tree decl = builtin_decl_implicit (tsan_atomic_table[i].tsan_fcode);
503 if (decl == NULL_TREE)
504 return;
505 switch (tsan_atomic_table[i].action)
506 {
507 case check_last:
508 case fetch_op:
509 last_arg = gimple_call_arg (stmt, num - 1);
510 if (!tree_fits_uhwi_p (last_arg)
511 || memmodel_base (tree_to_uhwi (last_arg)) >= MEMMODEL_LAST)
512 return;
513 gimple_call_set_fndecl (stmt, decl);
514 update_stmt (stmt);
515 if (tsan_atomic_table[i].action == fetch_op)
516 {
517 args[1] = gimple_call_arg (stmt, 1);
518 goto adjust_result;
519 }
520 return;
521 case add_seq_cst:
522 case add_acquire:
523 case fetch_op_seq_cst:
524 gcc_assert (num <= 2);
525 for (j = 0; j < num; j++)
526 args[j] = gimple_call_arg (stmt, j);
527 for (; j < 2; j++)
528 args[j] = NULL_TREE;
529 args[num] = build_int_cst (NULL_TREE,
530 tsan_atomic_table[i].action
531 != add_acquire
532 ? MEMMODEL_SEQ_CST
533 : MEMMODEL_ACQUIRE);
534 update_gimple_call (gsi, decl, num + 1, args[0], args[1], args[2]);
535 stmt = gsi_stmt (*gsi);
536 if (tsan_atomic_table[i].action == fetch_op_seq_cst)
537 {
538 adjust_result:
539 lhs = gimple_call_lhs (stmt);
540 if (lhs == NULL_TREE)
541 return;
542 if (!useless_type_conversion_p (TREE_TYPE (lhs),
543 TREE_TYPE (args[1])))
544 {
545 tree var = make_ssa_name (TREE_TYPE (lhs));
546 g = gimple_build_assign (var, NOP_EXPR, args[1]);
547 gsi_insert_after (gsi, g, GSI_NEW_STMT);
548 args[1] = var;
549 }
550 gimple_call_set_lhs (stmt, make_ssa_name (TREE_TYPE (lhs)));
551 /* BIT_NOT_EXPR stands for NAND. */
552 if (tsan_atomic_table[i].code == BIT_NOT_EXPR)
553 {
554 tree var = make_ssa_name (TREE_TYPE (lhs));
555 g = gimple_build_assign (var, BIT_AND_EXPR,
556 gimple_call_lhs (stmt), args[1]);
557 gsi_insert_after (gsi, g, GSI_NEW_STMT);
558 g = gimple_build_assign (lhs, BIT_NOT_EXPR, var);
559 }
560 else
561 g = gimple_build_assign (lhs, tsan_atomic_table[i].code,
562 gimple_call_lhs (stmt), args[1]);
563 update_stmt (stmt);
564 gsi_insert_after (gsi, g, GSI_NEW_STMT);
565 }
566 return;
567 case weak_cas:
568 if (!integer_nonzerop (gimple_call_arg (stmt, 3)))
569 continue;
570 /* FALLTHRU */
571 case strong_cas:
572 gcc_assert (num == 6);
573 for (j = 0; j < 6; j++)
574 args[j] = gimple_call_arg (stmt, j);
575 if (!tree_fits_uhwi_p (args[4])
576 || memmodel_base (tree_to_uhwi (args[4])) >= MEMMODEL_LAST)
577 return;
578 if (!tree_fits_uhwi_p (args[5])
579 || memmodel_base (tree_to_uhwi (args[5])) >= MEMMODEL_LAST)
580 return;
581 update_gimple_call (gsi, decl, 5, args[0], args[1], args[2],
582 args[4], args[5]);
583 return;
584 case bool_cas:
585 case val_cas:
586 gcc_assert (num == 3);
587 for (j = 0; j < 3; j++)
588 args[j] = gimple_call_arg (stmt, j);
589 t = TYPE_ARG_TYPES (TREE_TYPE (decl));
590 t = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (t)));
591 t = create_tmp_var (t);
592 mark_addressable (t);
593 if (!useless_type_conversion_p (TREE_TYPE (t),
594 TREE_TYPE (args[1])))
595 {
596 g = gimple_build_assign (make_ssa_name (TREE_TYPE (t)),
597 NOP_EXPR, args[1]);
598 gsi_insert_before (gsi, g, GSI_SAME_STMT);
599 args[1] = gimple_assign_lhs (g);
600 }
601 g = gimple_build_assign (t, args[1]);
602 gsi_insert_before (gsi, g, GSI_SAME_STMT);
603 lhs = gimple_call_lhs (stmt);
604 update_gimple_call (gsi, decl, 5, args[0],
605 build_fold_addr_expr (t), args[2],
606 build_int_cst (NULL_TREE,
607 MEMMODEL_SEQ_CST),
608 build_int_cst (NULL_TREE,
609 MEMMODEL_SEQ_CST));
610 if (tsan_atomic_table[i].action == val_cas && lhs)
611 {
612 tree cond;
613 stmt = gsi_stmt (*gsi);
614 g = gimple_build_assign (make_ssa_name (TREE_TYPE (t)), t);
615 gsi_insert_after (gsi, g, GSI_NEW_STMT);
616 t = make_ssa_name (TREE_TYPE (TREE_TYPE (decl)), stmt);
617 cond = build2 (NE_EXPR, boolean_type_node, t,
618 build_int_cst (TREE_TYPE (t), 0));
619 g = gimple_build_assign (lhs, COND_EXPR, cond, args[1],
620 gimple_assign_lhs (g));
621 gimple_call_set_lhs (stmt, t);
622 update_stmt (stmt);
623 gsi_insert_after (gsi, g, GSI_NEW_STMT);
624 }
625 return;
626 case lock_release:
627 gcc_assert (num == 1);
628 t = TYPE_ARG_TYPES (TREE_TYPE (decl));
629 t = TREE_VALUE (TREE_CHAIN (t));
630 update_gimple_call (gsi, decl, 3, gimple_call_arg (stmt, 0),
631 build_int_cst (t, 0),
632 build_int_cst (NULL_TREE,
633 MEMMODEL_RELEASE));
634 return;
635 default:
636 continue;
637 }
638 }
639 }
640
641 /* Instruments the gimple pointed to by GSI. Return
642 true if func entry/exit should be instrumented. */
643
644 static bool
645 instrument_gimple (gimple_stmt_iterator *gsi)
646 {
647 gimple *stmt;
648 tree rhs, lhs;
649 bool instrumented = false;
650
651 stmt = gsi_stmt (*gsi);
652 if (is_gimple_call (stmt)
653 && (gimple_call_fndecl (stmt)
654 != builtin_decl_implicit (BUILT_IN_TSAN_INIT)))
655 {
656 /* All functions with function call will have exit instrumented,
657 therefore no function calls other than __tsan_func_exit
658 shall appear in the functions. */
659 gimple_call_set_tail (as_a <gcall *> (stmt), false);
660 if (gimple_call_builtin_p (stmt, BUILT_IN_NORMAL))
661 instrument_builtin_call (gsi);
662 return true;
663 }
664 else if (is_gimple_assign (stmt)
665 && !gimple_clobber_p (stmt))
666 {
667 if (gimple_store_p (stmt))
668 {
669 lhs = gimple_assign_lhs (stmt);
670 instrumented = instrument_expr (*gsi, lhs, true);
671 }
672 if (gimple_assign_load_p (stmt))
673 {
674 rhs = gimple_assign_rhs1 (stmt);
675 instrumented = instrument_expr (*gsi, rhs, false);
676 }
677 }
678 return instrumented;
679 }
680
681 /* Replace TSAN_FUNC_EXIT internal call with function exit tsan builtin. */
682
683 static void
684 replace_func_exit (gimple *stmt)
685 {
686 tree builtin_decl = builtin_decl_implicit (BUILT_IN_TSAN_FUNC_EXIT);
687 gimple *g = gimple_build_call (builtin_decl, 0);
688 gimple_set_location (g, cfun->function_end_locus);
689 gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
690 gsi_replace (&gsi, g, true);
691 }
692
693 /* Instrument function exit. Used when TSAN_FUNC_EXIT does not exist. */
694
695 static void
696 instrument_func_exit (void)
697 {
698 location_t loc;
699 basic_block exit_bb;
700 gimple_stmt_iterator gsi;
701 gimple *stmt, *g;
702 tree builtin_decl;
703 edge e;
704 edge_iterator ei;
705
706 /* Find all function exits. */
707 exit_bb = EXIT_BLOCK_PTR_FOR_FN (cfun);
708 FOR_EACH_EDGE (e, ei, exit_bb->preds)
709 {
710 gsi = gsi_last_bb (e->src);
711 stmt = gsi_stmt (gsi);
712 gcc_assert (gimple_code (stmt) == GIMPLE_RETURN
713 || gimple_call_builtin_p (stmt, BUILT_IN_RETURN));
714 loc = gimple_location (stmt);
715 builtin_decl = builtin_decl_implicit (BUILT_IN_TSAN_FUNC_EXIT);
716 g = gimple_build_call (builtin_decl, 0);
717 gimple_set_location (g, loc);
718 gsi_insert_before (&gsi, g, GSI_SAME_STMT);
719 }
720 }
721
722 /* Instruments all interesting memory accesses in the current function.
723 Return true if func entry/exit should be instrumented. */
724
725 static bool
726 instrument_memory_accesses (void)
727 {
728 basic_block bb;
729 gimple_stmt_iterator gsi;
730 bool fentry_exit_instrument = false;
731 bool func_exit_seen = false;
732 auto_vec<gimple *> tsan_func_exits;
733
734 FOR_EACH_BB_FN (bb, cfun)
735 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
736 {
737 gimple *stmt = gsi_stmt (gsi);
738 if (is_gimple_call (stmt)
739 && gimple_call_internal_p (stmt)
740 && gimple_call_internal_fn (stmt) == IFN_TSAN_FUNC_EXIT)
741 {
742 if (fentry_exit_instrument)
743 replace_func_exit (stmt);
744 else
745 tsan_func_exits.safe_push (stmt);
746 func_exit_seen = true;
747 }
748 else
749 fentry_exit_instrument |= instrument_gimple (&gsi);
750 }
751 unsigned int i;
752 gimple *stmt;
753 FOR_EACH_VEC_ELT (tsan_func_exits, i, stmt)
754 if (fentry_exit_instrument)
755 replace_func_exit (stmt);
756 else
757 {
758 gsi = gsi_for_stmt (stmt);
759 gsi_remove (&gsi, true);
760 }
761 if (fentry_exit_instrument && !func_exit_seen)
762 instrument_func_exit ();
763 return fentry_exit_instrument;
764 }
765
766 /* Instruments function entry. */
767
768 static void
769 instrument_func_entry (void)
770 {
771 tree ret_addr, builtin_decl;
772 gimple *g;
773 gimple_seq seq = NULL;
774
775 builtin_decl = builtin_decl_implicit (BUILT_IN_RETURN_ADDRESS);
776 g = gimple_build_call (builtin_decl, 1, integer_zero_node);
777 ret_addr = make_ssa_name (ptr_type_node);
778 gimple_call_set_lhs (g, ret_addr);
779 gimple_set_location (g, cfun->function_start_locus);
780 gimple_seq_add_stmt_without_update (&seq, g);
781
782 builtin_decl = builtin_decl_implicit (BUILT_IN_TSAN_FUNC_ENTRY);
783 g = gimple_build_call (builtin_decl, 1, ret_addr);
784 gimple_set_location (g, cfun->function_start_locus);
785 gimple_seq_add_stmt_without_update (&seq, g);
786
787 edge e = single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun));
788 gsi_insert_seq_on_edge_immediate (e, seq);
789 }
790
791 /* ThreadSanitizer instrumentation pass. */
792
793 static unsigned
794 tsan_pass (void)
795 {
796 initialize_sanitizer_builtins ();
797 if (instrument_memory_accesses ())
798 instrument_func_entry ();
799 return 0;
800 }
801
802 /* Inserts __tsan_init () into the list of CTORs. */
803
804 void
805 tsan_finish_file (void)
806 {
807 tree ctor_statements = NULL_TREE;
808
809 initialize_sanitizer_builtins ();
810 tree init_decl = builtin_decl_implicit (BUILT_IN_TSAN_INIT);
811 append_to_statement_list (build_call_expr (init_decl, 0),
812 &ctor_statements);
813 cgraph_build_static_cdtor ('I', ctor_statements,
814 MAX_RESERVED_INIT_PRIORITY - 1);
815 }
816
817 /* The pass descriptor. */
818
819 namespace {
820
821 const pass_data pass_data_tsan =
822 {
823 GIMPLE_PASS, /* type */
824 "tsan", /* name */
825 OPTGROUP_NONE, /* optinfo_flags */
826 TV_NONE, /* tv_id */
827 ( PROP_ssa | PROP_cfg ), /* properties_required */
828 0, /* properties_provided */
829 0, /* properties_destroyed */
830 0, /* todo_flags_start */
831 TODO_update_ssa, /* todo_flags_finish */
832 };
833
834 class pass_tsan : public gimple_opt_pass
835 {
836 public:
837 pass_tsan (gcc::context *ctxt)
838 : gimple_opt_pass (pass_data_tsan, ctxt)
839 {}
840
841 /* opt_pass methods: */
842 opt_pass * clone () { return new pass_tsan (m_ctxt); }
843 virtual bool gate (function *)
844 {
845 return ((flag_sanitize & SANITIZE_THREAD) != 0
846 && !lookup_attribute ("no_sanitize_thread",
847 DECL_ATTRIBUTES (current_function_decl)));
848 }
849
850 virtual unsigned int execute (function *) { return tsan_pass (); }
851
852 }; // class pass_tsan
853
854 } // anon namespace
855
856 gimple_opt_pass *
857 make_pass_tsan (gcc::context *ctxt)
858 {
859 return new pass_tsan (ctxt);
860 }
861
862 namespace {
863
864 const pass_data pass_data_tsan_O0 =
865 {
866 GIMPLE_PASS, /* type */
867 "tsan0", /* name */
868 OPTGROUP_NONE, /* optinfo_flags */
869 TV_NONE, /* tv_id */
870 ( PROP_ssa | PROP_cfg ), /* properties_required */
871 0, /* properties_provided */
872 0, /* properties_destroyed */
873 0, /* todo_flags_start */
874 TODO_update_ssa, /* todo_flags_finish */
875 };
876
877 class pass_tsan_O0 : public gimple_opt_pass
878 {
879 public:
880 pass_tsan_O0 (gcc::context *ctxt)
881 : gimple_opt_pass (pass_data_tsan_O0, ctxt)
882 {}
883
884 /* opt_pass methods: */
885 virtual bool gate (function *)
886 {
887 return ((flag_sanitize & SANITIZE_THREAD) != 0 && !optimize
888 && !lookup_attribute ("no_sanitize_thread",
889 DECL_ATTRIBUTES (current_function_decl)));
890 }
891
892 virtual unsigned int execute (function *) { return tsan_pass (); }
893
894 }; // class pass_tsan_O0
895
896 } // anon namespace
897
898 gimple_opt_pass *
899 make_pass_tsan_O0 (gcc::context *ctxt)
900 {
901 return new pass_tsan_O0 (ctxt);
902 }