1 /* This file contains routines to construct OpenACC and OpenMP constructs,
2 called from parsing in the C and C++ front ends.
4 Copyright (C) 2005-2024 Free Software Foundation, Inc.
5 Contributed by Richard Henderson <rth@redhat.com>,
6 Diego Novillo <dnovillo@redhat.com>.
8 This file is part of GCC.
10 GCC is free software; you can redistribute it and/or modify it under
11 the terms of the GNU General Public License as published by the Free
12 Software Foundation; either version 3, or (at your option) any later
15 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
16 WARRANTY; without even the implied warranty of MERCHANTABILITY or
17 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
20 You should have received a copy of the GNU General Public License
21 along with GCC; see the file COPYING3. If not see
22 <http://www.gnu.org/licenses/>. */
26 #include "coretypes.h"
29 #include "gimple-expr.h"
31 #include "stringpool.h"
32 #include "omp-general.h"
33 #include "gomp-constants.h"
37 #include "langhooks.h"
39 #include "tree-iterator.h"
42 /* Complete a #pragma oacc wait construct. LOC is the location of
46 c_finish_oacc_wait (location_t loc
, tree parms
, tree clauses
)
48 const int nparms
= list_length (parms
);
50 vec
<tree
, va_gc
> *args
;
52 vec_alloc (args
, nparms
+ 2);
53 stmt
= builtin_decl_explicit (BUILT_IN_GOACC_WAIT
);
55 if (omp_find_clause (clauses
, OMP_CLAUSE_ASYNC
))
56 t
= OMP_CLAUSE_ASYNC_EXPR (clauses
);
58 t
= build_int_cst (integer_type_node
, GOMP_ASYNC_SYNC
);
61 args
->quick_push (build_int_cst (integer_type_node
, nparms
));
63 for (t
= parms
; t
; t
= TREE_CHAIN (t
))
65 if (TREE_CODE (OMP_CLAUSE_WAIT_EXPR (t
)) == INTEGER_CST
)
66 args
->quick_push (build_int_cst (integer_type_node
,
67 TREE_INT_CST_LOW (OMP_CLAUSE_WAIT_EXPR (t
))));
69 args
->quick_push (OMP_CLAUSE_WAIT_EXPR (t
));
72 stmt
= build_call_expr_loc_vec (loc
, stmt
, args
);
79 /* Complete a #pragma omp master construct. STMT is the structured-block
80 that follows the pragma. LOC is the location of the #pragma. */
83 c_finish_omp_master (location_t loc
, tree stmt
)
85 tree t
= add_stmt (build1 (OMP_MASTER
, void_type_node
, stmt
));
86 SET_EXPR_LOCATION (t
, loc
);
90 /* Complete a #pragma omp masked construct. BODY is the structured-block
91 that follows the pragma. LOC is the location of the #pragma. */
94 c_finish_omp_masked (location_t loc
, tree body
, tree clauses
)
96 tree stmt
= make_node (OMP_MASKED
);
97 TREE_TYPE (stmt
) = void_type_node
;
98 OMP_MASKED_BODY (stmt
) = body
;
99 OMP_MASKED_CLAUSES (stmt
) = clauses
;
100 SET_EXPR_LOCATION (stmt
, loc
);
101 return add_stmt (stmt
);
104 /* Complete a #pragma omp taskgroup construct. BODY is the structured-block
105 that follows the pragma. LOC is the location of the #pragma. */
108 c_finish_omp_taskgroup (location_t loc
, tree body
, tree clauses
)
110 tree stmt
= make_node (OMP_TASKGROUP
);
111 TREE_TYPE (stmt
) = void_type_node
;
112 OMP_TASKGROUP_BODY (stmt
) = body
;
113 OMP_TASKGROUP_CLAUSES (stmt
) = clauses
;
114 SET_EXPR_LOCATION (stmt
, loc
);
115 return add_stmt (stmt
);
118 /* Complete a #pragma omp critical construct. BODY is the structured-block
119 that follows the pragma, NAME is the identifier in the pragma, or null
120 if it was omitted. LOC is the location of the #pragma. */
123 c_finish_omp_critical (location_t loc
, tree body
, tree name
, tree clauses
)
125 gcc_assert (!clauses
|| OMP_CLAUSE_CODE (clauses
) == OMP_CLAUSE_HINT
);
126 if (name
== NULL_TREE
127 && clauses
!= NULL_TREE
128 && integer_nonzerop (OMP_CLAUSE_HINT_EXPR (clauses
)))
130 error_at (OMP_CLAUSE_LOCATION (clauses
),
131 "%<#pragma omp critical%> with %<hint%> clause requires "
132 "a name, except when %<omp_sync_hint_none%> is used");
133 return error_mark_node
;
136 tree stmt
= make_node (OMP_CRITICAL
);
137 TREE_TYPE (stmt
) = void_type_node
;
138 OMP_CRITICAL_BODY (stmt
) = body
;
139 OMP_CRITICAL_NAME (stmt
) = name
;
140 OMP_CRITICAL_CLAUSES (stmt
) = clauses
;
141 SET_EXPR_LOCATION (stmt
, loc
);
142 return add_stmt (stmt
);
145 /* Complete a #pragma omp ordered construct. STMT is the structured-block
146 that follows the pragma. LOC is the location of the #pragma. */
149 c_finish_omp_ordered (location_t loc
, tree clauses
, tree stmt
)
151 tree t
= make_node (OMP_ORDERED
);
152 TREE_TYPE (t
) = void_type_node
;
153 OMP_ORDERED_BODY (t
) = stmt
;
154 if (!flag_openmp
/* flag_openmp_simd */
155 && (OMP_CLAUSE_CODE (clauses
) != OMP_CLAUSE_SIMD
156 || OMP_CLAUSE_CHAIN (clauses
)))
157 clauses
= build_omp_clause (loc
, OMP_CLAUSE_SIMD
);
158 OMP_ORDERED_CLAUSES (t
) = clauses
;
159 SET_EXPR_LOCATION (t
, loc
);
164 /* Complete a #pragma omp barrier construct. LOC is the location of
168 c_finish_omp_barrier (location_t loc
)
172 x
= builtin_decl_explicit (BUILT_IN_GOMP_BARRIER
);
173 x
= build_call_expr_loc (loc
, x
, 0);
178 /* Complete a #pragma omp taskwait construct. LOC is the location of the
182 c_finish_omp_taskwait (location_t loc
)
186 x
= builtin_decl_explicit (BUILT_IN_GOMP_TASKWAIT
);
187 x
= build_call_expr_loc (loc
, x
, 0);
192 /* Complete a #pragma omp taskyield construct. LOC is the location of the
196 c_finish_omp_taskyield (location_t loc
)
200 x
= builtin_decl_explicit (BUILT_IN_GOMP_TASKYIELD
);
201 x
= build_call_expr_loc (loc
, x
, 0);
206 /* Complete a #pragma omp atomic construct. For CODE OMP_ATOMIC
207 the expression to be implemented atomically is LHS opcode= RHS.
208 For OMP_ATOMIC_READ V = LHS, for OMP_ATOMIC_CAPTURE_{NEW,OLD} LHS
209 opcode= RHS with the new or old content of LHS returned.
210 LOC is the location of the atomic statement. The value returned
211 is either error_mark_node (if the construct was erroneous) or an
212 OMP_ATOMIC* node which should be added to the current statement
213 tree with add_stmt. If TEST is set, avoid calling save_expr
214 or create_tmp_var*. */
217 c_finish_omp_atomic (location_t loc
, enum tree_code code
,
218 enum tree_code opcode
, tree lhs
, tree rhs
,
219 tree v
, tree lhs1
, tree rhs1
, tree r
, bool swapped
,
220 enum omp_memory_order memory_order
, bool weak
,
223 tree x
, type
, addr
, pre
= NULL_TREE
, rtmp
= NULL_TREE
, vtmp
= NULL_TREE
;
224 HOST_WIDE_INT bitpos
= 0, bitsize
= 0;
225 enum tree_code orig_opcode
= opcode
;
227 if (lhs
== error_mark_node
|| rhs
== error_mark_node
228 || v
== error_mark_node
|| lhs1
== error_mark_node
229 || rhs1
== error_mark_node
|| r
== error_mark_node
)
230 return error_mark_node
;
232 /* ??? According to one reading of the OpenMP spec, complex type are
233 supported, but there are no atomic stores for any architecture.
234 But at least icc 9.0 doesn't support complex types here either.
235 And lets not even talk about vector types... */
236 type
= TREE_TYPE (lhs
);
237 if (!INTEGRAL_TYPE_P (type
)
238 && !POINTER_TYPE_P (type
)
239 && !SCALAR_FLOAT_TYPE_P (type
))
241 error_at (loc
, "invalid expression type for %<#pragma omp atomic%>");
242 return error_mark_node
;
244 if (TYPE_ATOMIC (type
))
246 error_at (loc
, "%<_Atomic%> expression in %<#pragma omp atomic%>");
247 return error_mark_node
;
249 if (r
&& r
!= void_list_node
&& !INTEGRAL_TYPE_P (TREE_TYPE (r
)))
251 error_at (loc
, "%<#pragma omp atomic compare capture%> with non-integral "
252 "comparison result");
253 return error_mark_node
;
256 if (opcode
== RDIV_EXPR
)
257 opcode
= TRUNC_DIV_EXPR
;
259 /* ??? Validate that rhs does not overlap lhs. */
261 if (TREE_CODE (lhs
) == COMPONENT_REF
262 && TREE_CODE (TREE_OPERAND (lhs
, 1)) == FIELD_DECL
263 && DECL_C_BIT_FIELD (TREE_OPERAND (lhs
, 1))
264 && DECL_BIT_FIELD_REPRESENTATIVE (TREE_OPERAND (lhs
, 1)))
266 tree field
= TREE_OPERAND (lhs
, 1);
267 tree repr
= DECL_BIT_FIELD_REPRESENTATIVE (field
);
268 if (tree_fits_uhwi_p (DECL_FIELD_OFFSET (field
))
269 && tree_fits_uhwi_p (DECL_FIELD_OFFSET (repr
)))
270 bitpos
= (tree_to_uhwi (DECL_FIELD_OFFSET (field
))
271 - tree_to_uhwi (DECL_FIELD_OFFSET (repr
))) * BITS_PER_UNIT
;
274 bitpos
+= (tree_to_uhwi (DECL_FIELD_BIT_OFFSET (field
))
275 - tree_to_uhwi (DECL_FIELD_BIT_OFFSET (repr
)));
276 gcc_assert (tree_fits_shwi_p (DECL_SIZE (field
)));
277 bitsize
= tree_to_shwi (DECL_SIZE (field
));
279 type
= TREE_TYPE (repr
);
280 lhs
= build3 (COMPONENT_REF
, TREE_TYPE (repr
), TREE_OPERAND (lhs
, 0),
281 repr
, TREE_OPERAND (lhs
, 2));
284 /* Take and save the address of the lhs. From then on we'll reference it
286 addr
= build_unary_op (loc
, ADDR_EXPR
, lhs
, false);
287 if (addr
== error_mark_node
)
288 return error_mark_node
;
290 addr
= save_expr (addr
);
292 && TREE_CODE (addr
) != SAVE_EXPR
293 && (TREE_CODE (addr
) != ADDR_EXPR
294 || !VAR_P (TREE_OPERAND (addr
, 0))))
296 /* Make sure LHS is simple enough so that goa_lhs_expr_p can recognize
297 it even after unsharing function body. */
298 tree var
= create_tmp_var_raw (TREE_TYPE (addr
));
299 DECL_CONTEXT (var
) = current_function_decl
;
300 addr
= build4 (TARGET_EXPR
, TREE_TYPE (addr
), var
, addr
, NULL
, NULL
);
303 lhs
= build_indirect_ref (loc
, addr
, RO_NULL
);
306 if (code
== OMP_ATOMIC_READ
)
308 x
= build1 (OMP_ATOMIC_READ
, type
, addr
);
309 SET_EXPR_LOCATION (x
, loc
);
310 OMP_ATOMIC_MEMORY_ORDER (x
) = memory_order
;
313 x
= build3_loc (loc
, BIT_FIELD_REF
, TREE_TYPE (blhs
), x
,
314 bitsize_int (bitsize
), bitsize_int (bitpos
));
315 return build_modify_expr (loc
, v
, NULL_TREE
, NOP_EXPR
,
319 /* There are lots of warnings, errors, and conversions that need to happen
320 in the course of interpreting a statement. Use the normal mechanisms
321 to do this, and then take it apart again. */
324 lhs
= build3_loc (loc
, BIT_FIELD_REF
, TREE_TYPE (blhs
), lhs
,
325 bitsize_int (bitsize
), bitsize_int (bitpos
));
326 if (opcode
== COND_EXPR
)
328 bool save
= in_late_binary_op
;
329 in_late_binary_op
= true;
330 std::swap (rhs
, rhs1
);
331 rhs1
= build_binary_op (loc
, EQ_EXPR
, lhs
, rhs1
, true);
332 in_late_binary_op
= save
;
335 rhs
= build_binary_op (loc
, opcode
, rhs
, lhs
, true);
336 else if (opcode
!= NOP_EXPR
)
337 rhs
= build_binary_op (loc
, opcode
, lhs
, rhs
, true);
340 else if (opcode
== COND_EXPR
)
342 bool save
= in_late_binary_op
;
343 in_late_binary_op
= true;
344 std::swap (rhs
, rhs1
);
345 rhs1
= build_binary_op (loc
, EQ_EXPR
, lhs
, rhs1
, true);
346 in_late_binary_op
= save
;
351 rhs
= build_binary_op (loc
, opcode
, rhs
, lhs
, true);
354 bool save
= in_late_binary_op
;
355 in_late_binary_op
= true;
356 if ((opcode
== MIN_EXPR
|| opcode
== MAX_EXPR
)
357 && build_binary_op (loc
, LT_EXPR
, blhs
? blhs
: lhs
, rhs
,
358 true) == error_mark_node
)
361 x
= build_modify_expr (loc
, blhs
? blhs
: lhs
, NULL_TREE
, opcode
,
362 loc
, rhs
, NULL_TREE
);
363 in_late_binary_op
= save
;
364 if (x
== error_mark_node
)
365 return error_mark_node
;
366 if (TREE_CODE (x
) == COMPOUND_EXPR
)
368 pre
= TREE_OPERAND (x
, 0);
369 gcc_assert (TREE_CODE (pre
) == SAVE_EXPR
|| tree_invariant_p (pre
));
370 x
= TREE_OPERAND (x
, 1);
372 gcc_assert (TREE_CODE (x
) == MODIFY_EXPR
);
373 rhs
= TREE_OPERAND (x
, 1);
376 rhs
= build3_loc (loc
, BIT_INSERT_EXPR
, type
, new_lhs
,
377 rhs
, bitsize_int (bitpos
));
378 if (orig_opcode
== COND_EXPR
)
380 if (error_operand_p (rhs1
))
381 return error_mark_node
;
382 gcc_assert (TREE_CODE (rhs1
) == EQ_EXPR
);
383 tree cmptype
= TREE_TYPE (TREE_OPERAND (rhs1
, 0));
384 if (SCALAR_FLOAT_TYPE_P (cmptype
) && !test
)
386 bool clear_padding
= false;
387 HOST_WIDE_INT non_padding_start
= 0;
388 HOST_WIDE_INT non_padding_end
= 0;
389 if (BITS_PER_UNIT
== 8
391 && clear_padding_type_may_have_padding_p (cmptype
))
393 HOST_WIDE_INT sz
= int_size_in_bytes (cmptype
), i
;
395 unsigned char *buf
= XALLOCAVEC (unsigned char, sz
);
396 memset (buf
, ~0, sz
);
397 clear_type_padding_in_mask (cmptype
, buf
);
398 for (i
= 0; i
< sz
; i
++)
399 if (buf
[i
] != (unsigned char) ~0)
401 clear_padding
= true;
404 if (clear_padding
&& buf
[i
] == 0)
406 /* Try to optimize. In the common case where
407 non-padding bits are all continuous and start
408 and end at a byte boundary, we can just adjust
409 the memcmp call arguments and don't need to
410 emit __builtin_clear_padding calls. */
413 for (i
= 0; i
< sz
; i
++)
416 if (i
< sz
&& buf
[i
] == (unsigned char) ~0)
418 non_padding_start
= i
;
420 if (buf
[i
] != (unsigned char) ~0)
432 non_padding_start
= 0;
439 tree inttype
= NULL_TREE
;
440 if (!clear_padding
&& tree_fits_uhwi_p (TYPE_SIZE (cmptype
)))
442 HOST_WIDE_INT prec
= tree_to_uhwi (TYPE_SIZE (cmptype
));
443 inttype
= c_common_type_for_size (prec
, 1);
445 && (!tree_int_cst_equal (TYPE_SIZE (cmptype
),
447 || TYPE_PRECISION (inttype
) != prec
))
452 TREE_OPERAND (rhs1
, 0)
453 = build1_loc (loc
, VIEW_CONVERT_EXPR
, inttype
,
454 TREE_OPERAND (rhs1
, 0));
455 TREE_OPERAND (rhs1
, 1)
456 = build1_loc (loc
, VIEW_CONVERT_EXPR
, inttype
,
457 TREE_OPERAND (rhs1
, 1));
461 tree pcmptype
= build_pointer_type (cmptype
);
462 tree tmp1
= create_tmp_var_raw (cmptype
);
463 TREE_ADDRESSABLE (tmp1
) = 1;
464 DECL_CONTEXT (tmp1
) = current_function_decl
;
465 tmp1
= build4 (TARGET_EXPR
, cmptype
, tmp1
,
466 TREE_OPERAND (rhs1
, 0), NULL
, NULL
);
467 tmp1
= build1 (ADDR_EXPR
, pcmptype
, tmp1
);
468 tree tmp2
= create_tmp_var_raw (cmptype
);
469 TREE_ADDRESSABLE (tmp2
) = 1;
470 DECL_CONTEXT (tmp2
) = current_function_decl
;
471 tmp2
= build4 (TARGET_EXPR
, cmptype
, tmp2
,
472 TREE_OPERAND (rhs1
, 1), NULL
, NULL
);
473 tmp2
= build1 (ADDR_EXPR
, pcmptype
, tmp2
);
474 if (non_padding_start
)
476 tmp1
= build2 (POINTER_PLUS_EXPR
, pcmptype
, tmp1
,
477 size_int (non_padding_start
));
478 tmp2
= build2 (POINTER_PLUS_EXPR
, pcmptype
, tmp2
,
479 size_int (non_padding_start
));
481 tree fndecl
= builtin_decl_explicit (BUILT_IN_MEMCMP
);
482 rhs1
= build_call_expr_loc (loc
, fndecl
, 3, tmp1
, tmp2
,
484 ? size_int (non_padding_end
486 : TYPE_SIZE_UNIT (cmptype
));
487 rhs1
= build2 (EQ_EXPR
, boolean_type_node
, rhs1
,
489 if (clear_padding
&& non_padding_end
== 0)
491 fndecl
= builtin_decl_explicit (BUILT_IN_CLEAR_PADDING
);
492 tree cp1
= build_call_expr_loc (loc
, fndecl
, 1, tmp1
);
493 tree cp2
= build_call_expr_loc (loc
, fndecl
, 1, tmp2
);
494 rhs1
= omit_two_operands_loc (loc
, boolean_type_node
,
503 tree var
= create_tmp_var_raw (boolean_type_node
);
504 DECL_CONTEXT (var
) = current_function_decl
;
505 rtmp
= build4 (TARGET_EXPR
, boolean_type_node
, var
,
506 boolean_false_node
, NULL
, NULL
);
507 save
= in_late_binary_op
;
508 in_late_binary_op
= true;
509 x
= build_modify_expr (loc
, var
, NULL_TREE
, NOP_EXPR
,
510 loc
, rhs1
, NULL_TREE
);
511 in_late_binary_op
= save
;
512 if (x
== error_mark_node
)
513 return error_mark_node
;
514 gcc_assert (TREE_CODE (x
) == MODIFY_EXPR
515 && TREE_OPERAND (x
, 0) == var
);
516 TREE_OPERAND (x
, 0) = rtmp
;
517 rhs1
= omit_one_operand_loc (loc
, boolean_type_node
, x
, rtmp
);
519 rhs
= build3_loc (loc
, COND_EXPR
, type
, rhs1
, rhs
, new_lhs
);
523 /* Punt the actual generation of atomic operations to common code. */
524 if (code
== OMP_ATOMIC
)
525 type
= void_type_node
;
526 x
= build2 (code
, type
, addr
, rhs
);
527 SET_EXPR_LOCATION (x
, loc
);
528 OMP_ATOMIC_MEMORY_ORDER (x
) = memory_order
;
529 OMP_ATOMIC_WEAK (x
) = weak
;
531 /* Generally it is hard to prove lhs1 and lhs are the same memory
532 location, just diagnose different variables. */
539 if (code
== OMP_ATOMIC
)
540 error_at (loc
, "%<#pragma omp atomic update%> uses two different "
541 "variables for memory");
543 error_at (loc
, "%<#pragma omp atomic capture%> uses two different "
544 "variables for memory");
545 return error_mark_node
;
550 && TREE_CODE (lhs1
) == COMPONENT_REF
551 && TREE_CODE (TREE_OPERAND (lhs1
, 1)) == FIELD_DECL
552 && DECL_C_BIT_FIELD (TREE_OPERAND (lhs1
, 1))
553 && DECL_BIT_FIELD_REPRESENTATIVE (TREE_OPERAND (lhs1
, 1)))
555 tree field
= TREE_OPERAND (lhs1
, 1);
556 tree repr
= DECL_BIT_FIELD_REPRESENTATIVE (field
);
557 lhs1
= build3 (COMPONENT_REF
, TREE_TYPE (repr
), TREE_OPERAND (lhs1
, 0),
558 repr
, TREE_OPERAND (lhs1
, 2));
562 && TREE_CODE (rhs1
) == COMPONENT_REF
563 && TREE_CODE (TREE_OPERAND (rhs1
, 1)) == FIELD_DECL
564 && DECL_C_BIT_FIELD (TREE_OPERAND (rhs1
, 1))
565 && DECL_BIT_FIELD_REPRESENTATIVE (TREE_OPERAND (rhs1
, 1)))
567 tree field
= TREE_OPERAND (rhs1
, 1);
568 tree repr
= DECL_BIT_FIELD_REPRESENTATIVE (field
);
569 rhs1
= build3 (COMPONENT_REF
, TREE_TYPE (repr
), TREE_OPERAND (rhs1
, 0),
570 repr
, TREE_OPERAND (rhs1
, 2));
573 if (code
!= OMP_ATOMIC
)
575 /* Generally it is hard to prove lhs1 and lhs are the same memory
576 location, just diagnose different variables. */
577 if (lhs1
&& VAR_P (lhs1
) && VAR_P (orig_lhs
))
579 if (lhs1
!= orig_lhs
&& !test
)
581 error_at (loc
, "%<#pragma omp atomic capture%> uses two "
582 "different variables for memory");
583 return error_mark_node
;
587 x
= build3_loc (loc
, BIT_FIELD_REF
, TREE_TYPE (blhs
), x
,
588 bitsize_int (bitsize
), bitsize_int (bitpos
));
591 vtmp
= create_tmp_var_raw (TREE_TYPE (x
));
592 DECL_CONTEXT (vtmp
) = current_function_decl
;
596 x
= build_modify_expr (loc
, vtmp
, NULL_TREE
, NOP_EXPR
,
598 if (x
== error_mark_node
)
599 return error_mark_node
;
600 type
= TREE_TYPE (x
);
603 vtmp
= build4 (TARGET_EXPR
, TREE_TYPE (vtmp
), vtmp
,
604 build_zero_cst (TREE_TYPE (vtmp
)), NULL
, NULL
);
605 gcc_assert (TREE_CODE (x
) == MODIFY_EXPR
606 && TREE_OPERAND (x
, 0) == TARGET_EXPR_SLOT (vtmp
));
607 TREE_OPERAND (x
, 0) = vtmp
;
609 if (rhs1
&& rhs1
!= orig_lhs
)
611 tree rhs1addr
= build_unary_op (loc
, ADDR_EXPR
, rhs1
, false);
612 if (rhs1addr
== error_mark_node
)
613 return error_mark_node
;
614 x
= omit_one_operand_loc (loc
, type
, x
, rhs1addr
);
616 if (lhs1
&& lhs1
!= orig_lhs
)
618 tree lhs1addr
= build_unary_op (loc
, ADDR_EXPR
, lhs1
, false);
619 if (lhs1addr
== error_mark_node
)
620 return error_mark_node
;
621 if (code
== OMP_ATOMIC_CAPTURE_OLD
)
622 x
= omit_one_operand_loc (loc
, type
, x
, lhs1addr
);
627 x
= omit_two_operands_loc (loc
, type
, x
, x
, lhs1addr
);
631 else if (rhs1
&& rhs1
!= orig_lhs
)
633 tree rhs1addr
= build_unary_op (loc
, ADDR_EXPR
, rhs1
, false);
634 if (rhs1addr
== error_mark_node
)
635 return error_mark_node
;
636 x
= omit_one_operand_loc (loc
, type
, x
, rhs1addr
);
640 x
= omit_one_operand_loc (loc
, type
, x
, pre
);
641 if (r
&& r
!= void_list_node
)
643 in_late_binary_op
= true;
644 tree x2
= build_modify_expr (loc
, r
, NULL_TREE
, NOP_EXPR
,
645 loc
, rtmp
, NULL_TREE
);
646 in_late_binary_op
= save
;
647 if (x2
== error_mark_node
)
648 return error_mark_node
;
649 x
= omit_one_operand_loc (loc
, TREE_TYPE (x2
), x2
, x
);
653 in_late_binary_op
= true;
654 tree x2
= build_modify_expr (loc
, v
, NULL_TREE
, NOP_EXPR
,
655 loc
, vtmp
, NULL_TREE
);
656 in_late_binary_op
= save
;
657 if (x2
== error_mark_node
)
658 return error_mark_node
;
659 x2
= build3_loc (loc
, COND_EXPR
, void_type_node
, rtmp
,
661 x
= omit_one_operand_loc (loc
, TREE_TYPE (x2
), x2
, x
);
667 /* Return true if TYPE is the implementation's omp_depend_t. */
670 c_omp_depend_t_p (tree type
)
672 type
= TYPE_MAIN_VARIANT (type
);
673 return (TREE_CODE (type
) == RECORD_TYPE
675 && ((TREE_CODE (TYPE_NAME (type
)) == TYPE_DECL
676 ? DECL_NAME (TYPE_NAME (type
)) : TYPE_NAME (type
))
677 == get_identifier ("omp_depend_t"))
678 && TYPE_FILE_SCOPE_P (type
)
679 && COMPLETE_TYPE_P (type
)
680 && TREE_CODE (TYPE_SIZE (type
)) == INTEGER_CST
681 && !compare_tree_int (TYPE_SIZE (type
),
682 2 * tree_to_uhwi (TYPE_SIZE (ptr_type_node
))));
686 /* Complete a #pragma omp depobj construct. LOC is the location of the
690 c_finish_omp_depobj (location_t loc
, tree depobj
,
691 enum omp_clause_depend_kind kind
, tree clause
)
694 if (!error_operand_p (depobj
))
696 if (!c_omp_depend_t_p (TREE_TYPE (depobj
)))
698 error_at (EXPR_LOC_OR_LOC (depobj
, loc
),
699 "type of %<depobj%> expression is not %<omp_depend_t%>");
700 depobj
= error_mark_node
;
702 else if (TYPE_READONLY (TREE_TYPE (depobj
)))
704 error_at (EXPR_LOC_OR_LOC (depobj
, loc
),
705 "%<const%> qualified %<depobj%> expression");
706 depobj
= error_mark_node
;
710 depobj
= error_mark_node
;
712 if (clause
== error_mark_node
)
717 gcc_assert (TREE_CODE (clause
) == OMP_CLAUSE
);
718 if (OMP_CLAUSE_CODE (clause
) == OMP_CLAUSE_DOACROSS
)
720 error_at (OMP_CLAUSE_LOCATION (clause
),
721 "%<depend(%s)%> is only allowed in %<omp ordered%>",
722 OMP_CLAUSE_DOACROSS_KIND (clause
)
723 == OMP_CLAUSE_DOACROSS_SOURCE
724 ? "source" : "sink");
727 gcc_assert (OMP_CLAUSE_CODE (clause
) == OMP_CLAUSE_DEPEND
);
728 if (OMP_CLAUSE_CHAIN (clause
))
729 error_at (OMP_CLAUSE_LOCATION (clause
),
730 "more than one locator in %<depend%> clause on %<depobj%> "
732 switch (OMP_CLAUSE_DEPEND_KIND (clause
))
734 case OMP_CLAUSE_DEPEND_DEPOBJ
:
735 error_at (OMP_CLAUSE_LOCATION (clause
),
736 "%<depobj%> dependence type specified in %<depend%> "
737 "clause on %<depobj%> construct");
739 case OMP_CLAUSE_DEPEND_IN
:
740 case OMP_CLAUSE_DEPEND_OUT
:
741 case OMP_CLAUSE_DEPEND_INOUT
:
742 case OMP_CLAUSE_DEPEND_MUTEXINOUTSET
:
743 case OMP_CLAUSE_DEPEND_INOUTSET
:
744 kind
= OMP_CLAUSE_DEPEND_KIND (clause
);
745 t
= OMP_CLAUSE_DECL (clause
);
747 if (TREE_CODE (t
) == TREE_LIST
749 && TREE_CODE (TREE_PURPOSE (t
)) == TREE_VEC
)
751 error_at (OMP_CLAUSE_LOCATION (clause
),
752 "%<iterator%> modifier may not be specified on "
753 "%<depobj%> construct");
756 if (TREE_CODE (t
) == COMPOUND_EXPR
)
758 tree t1
= build_fold_addr_expr (TREE_OPERAND (t
, 1));
759 t
= build2 (COMPOUND_EXPR
, TREE_TYPE (t1
), TREE_OPERAND (t
, 0),
762 else if (t
!= null_pointer_node
)
763 t
= build_fold_addr_expr (t
);
770 gcc_assert (kind
!= OMP_CLAUSE_DEPEND_INVALID
);
772 if (depobj
== error_mark_node
)
775 depobj
= build_fold_addr_expr_loc (EXPR_LOC_OR_LOC (depobj
, loc
), depobj
);
777 = build_pointer_type_for_mode (ptr_type_node
, TYPE_MODE (ptr_type_node
),
779 depobj
= fold_convert (dtype
, depobj
);
783 depobj
= save_expr (depobj
);
784 r
= build_indirect_ref (loc
, depobj
, RO_UNARY_STAR
);
785 add_stmt (build2 (MODIFY_EXPR
, void_type_node
, r
, t
));
790 case OMP_CLAUSE_DEPEND_IN
:
793 case OMP_CLAUSE_DEPEND_OUT
:
796 case OMP_CLAUSE_DEPEND_INOUT
:
797 k
= GOMP_DEPEND_INOUT
;
799 case OMP_CLAUSE_DEPEND_MUTEXINOUTSET
:
800 k
= GOMP_DEPEND_MUTEXINOUTSET
;
802 case OMP_CLAUSE_DEPEND_INOUTSET
:
803 k
= GOMP_DEPEND_INOUTSET
;
805 case OMP_CLAUSE_DEPEND_LAST
:
811 t
= build_int_cst (ptr_type_node
, k
);
812 depobj
= build2_loc (loc
, POINTER_PLUS_EXPR
, TREE_TYPE (depobj
), depobj
,
813 TYPE_SIZE_UNIT (ptr_type_node
));
814 r
= build_indirect_ref (loc
, depobj
, RO_UNARY_STAR
);
815 add_stmt (build2 (MODIFY_EXPR
, void_type_node
, r
, t
));
819 /* Complete a #pragma omp flush construct. We don't do anything with
820 the variable list that the syntax allows. LOC is the location of
824 c_finish_omp_flush (location_t loc
, int mo
)
828 if (mo
== MEMMODEL_LAST
|| mo
== MEMMODEL_SEQ_CST
)
830 x
= builtin_decl_explicit (BUILT_IN_SYNC_SYNCHRONIZE
);
831 x
= build_call_expr_loc (loc
, x
, 0);
835 x
= builtin_decl_explicit (BUILT_IN_ATOMIC_THREAD_FENCE
);
836 x
= build_call_expr_loc (loc
, x
, 1,
837 build_int_cst (integer_type_node
, mo
));
843 /* Check and canonicalize OMP_FOR increment expression.
844 Helper function for c_finish_omp_for. */
847 check_omp_for_incr_expr (location_t loc
, tree exp
, tree decl
)
851 if (!INTEGRAL_TYPE_P (TREE_TYPE (exp
))
852 || TYPE_PRECISION (TREE_TYPE (exp
)) < TYPE_PRECISION (TREE_TYPE (decl
)))
853 return error_mark_node
;
856 return build_int_cst (TREE_TYPE (exp
), 0);
858 switch (TREE_CODE (exp
))
861 t
= check_omp_for_incr_expr (loc
, TREE_OPERAND (exp
, 0), decl
);
862 if (t
!= error_mark_node
)
863 return fold_convert_loc (loc
, TREE_TYPE (exp
), t
);
866 t
= check_omp_for_incr_expr (loc
, TREE_OPERAND (exp
, 0), decl
);
867 if (t
!= error_mark_node
)
868 return fold_build2_loc (loc
, MINUS_EXPR
,
869 TREE_TYPE (exp
), t
, TREE_OPERAND (exp
, 1));
872 t
= check_omp_for_incr_expr (loc
, TREE_OPERAND (exp
, 0), decl
);
873 if (t
!= error_mark_node
)
874 return fold_build2_loc (loc
, PLUS_EXPR
,
875 TREE_TYPE (exp
), t
, TREE_OPERAND (exp
, 1));
876 t
= check_omp_for_incr_expr (loc
, TREE_OPERAND (exp
, 1), decl
);
877 if (t
!= error_mark_node
)
878 return fold_build2_loc (loc
, PLUS_EXPR
,
879 TREE_TYPE (exp
), TREE_OPERAND (exp
, 0), t
);
883 /* cp_build_modify_expr forces preevaluation of the RHS to make
884 sure that it is evaluated before the lvalue-rvalue conversion
885 is applied to the LHS. Reconstruct the original expression. */
886 tree op0
= TREE_OPERAND (exp
, 0);
887 if (TREE_CODE (op0
) == TARGET_EXPR
888 && !VOID_TYPE_P (TREE_TYPE (op0
)))
890 tree op1
= TREE_OPERAND (exp
, 1);
891 tree temp
= TARGET_EXPR_SLOT (op0
);
892 if (BINARY_CLASS_P (op1
)
893 && TREE_OPERAND (op1
, 1) == temp
)
895 op1
= copy_node (op1
);
896 TREE_OPERAND (op1
, 1) = TARGET_EXPR_INITIAL (op0
);
897 return check_omp_for_incr_expr (loc
, op1
, decl
);
906 return error_mark_node
;
909 /* If the OMP_FOR increment expression in INCR is of pointer type,
910 canonicalize it into an expression handled by gimplify_omp_for()
911 and return it. DECL is the iteration variable. */
914 c_omp_for_incr_canonicalize_ptr (location_t loc
, tree decl
, tree incr
)
916 if (POINTER_TYPE_P (TREE_TYPE (decl
))
917 && TREE_OPERAND (incr
, 1))
919 tree t
= fold_convert_loc (loc
,
920 sizetype
, TREE_OPERAND (incr
, 1));
922 if (TREE_CODE (incr
) == POSTDECREMENT_EXPR
923 || TREE_CODE (incr
) == PREDECREMENT_EXPR
)
924 t
= fold_build1_loc (loc
, NEGATE_EXPR
, sizetype
, t
);
925 t
= fold_build_pointer_plus (decl
, t
);
926 incr
= build2 (MODIFY_EXPR
, void_type_node
, decl
, t
);
931 /* Validate and generate OMP_FOR.
932 DECLV is a vector of iteration variables, for each collapsed loop.
934 ORIG_DECLV, if non-NULL, is a vector with the original iteration
935 variables (prior to any transformations, by say, C++ iterators).
937 INITV, CONDV and INCRV are vectors containing initialization
938 expressions, controlling predicates and increment expressions.
939 BODY is the body of the loop and PRE_BODY statements that go before
943 c_finish_omp_for (location_t locus
, enum tree_code code
, tree declv
,
944 tree orig_declv
, tree initv
, tree condv
, tree incrv
,
945 tree body
, tree pre_body
, bool final_p
)
951 gcc_assert (TREE_VEC_LENGTH (declv
) == TREE_VEC_LENGTH (initv
));
952 gcc_assert (TREE_VEC_LENGTH (declv
) == TREE_VEC_LENGTH (condv
));
953 gcc_assert (TREE_VEC_LENGTH (declv
) == TREE_VEC_LENGTH (incrv
));
954 for (i
= 0; i
< TREE_VEC_LENGTH (declv
); i
++)
956 tree decl
= TREE_VEC_ELT (declv
, i
);
957 tree init
= TREE_VEC_ELT (initv
, i
);
958 tree cond
= TREE_VEC_ELT (condv
, i
);
959 tree incr
= TREE_VEC_ELT (incrv
, i
);
962 if (EXPR_HAS_LOCATION (init
))
963 elocus
= EXPR_LOCATION (init
);
965 /* Validate the iteration variable. */
966 if (!INTEGRAL_TYPE_P (TREE_TYPE (decl
))
967 && TREE_CODE (TREE_TYPE (decl
)) != POINTER_TYPE
)
969 error_at (elocus
, "invalid type for iteration variable %qE", decl
);
972 else if (TYPE_ATOMIC (TREE_TYPE (decl
)))
974 error_at (elocus
, "%<_Atomic%> iteration variable %qE", decl
);
976 /* _Atomic iterator confuses stuff too much, so we risk ICE
977 trying to diagnose it further. */
981 /* In the case of "for (int i = 0...)", init will be a decl. It should
982 have a DECL_INITIAL that we can turn into an assignment. */
985 elocus
= DECL_SOURCE_LOCATION (decl
);
987 init
= DECL_INITIAL (decl
);
990 error_at (elocus
, "%qE is not initialized", decl
);
991 init
= integer_zero_node
;
994 DECL_INITIAL (decl
) = NULL_TREE
;
996 init
= build_modify_expr (elocus
, decl
, NULL_TREE
, NOP_EXPR
,
997 /* FIXME diagnostics: This should
998 be the location of the INIT. */
1003 if (init
!= error_mark_node
)
1005 gcc_assert (TREE_CODE (init
) == MODIFY_EXPR
);
1006 gcc_assert (TREE_OPERAND (init
, 0) == decl
);
1009 if (cond
== NULL_TREE
)
1011 error_at (elocus
, "missing controlling predicate");
1016 bool cond_ok
= false;
1018 /* E.g. C sizeof (vla) could add COMPOUND_EXPRs with
1019 evaluation of the vla VAR_DECL. We need to readd
1020 them to the non-decl operand. See PR45784. */
1021 while (TREE_CODE (cond
) == COMPOUND_EXPR
)
1022 cond
= TREE_OPERAND (cond
, 1);
1024 if (EXPR_HAS_LOCATION (cond
))
1025 elocus
= EXPR_LOCATION (cond
);
1027 if (TREE_CODE (cond
) == LT_EXPR
1028 || TREE_CODE (cond
) == LE_EXPR
1029 || TREE_CODE (cond
) == GT_EXPR
1030 || TREE_CODE (cond
) == GE_EXPR
1031 || TREE_CODE (cond
) == NE_EXPR
1032 || TREE_CODE (cond
) == EQ_EXPR
)
1034 tree op0
= TREE_OPERAND (cond
, 0);
1035 tree op1
= TREE_OPERAND (cond
, 1);
1037 /* 2.5.1. The comparison in the condition is computed in
1038 the type of DECL, otherwise the behavior is undefined.
1044 according to ISO will be evaluated as:
1049 if (TREE_CODE (op0
) == NOP_EXPR
1050 && decl
== TREE_OPERAND (op0
, 0))
1052 TREE_OPERAND (cond
, 0) = TREE_OPERAND (op0
, 0);
1053 TREE_OPERAND (cond
, 1)
1054 = fold_build1_loc (elocus
, NOP_EXPR
, TREE_TYPE (decl
),
1055 TREE_OPERAND (cond
, 1));
1057 else if (TREE_CODE (op1
) == NOP_EXPR
1058 && decl
== TREE_OPERAND (op1
, 0))
1060 TREE_OPERAND (cond
, 1) = TREE_OPERAND (op1
, 0);
1061 TREE_OPERAND (cond
, 0)
1062 = fold_build1_loc (elocus
, NOP_EXPR
, TREE_TYPE (decl
),
1063 TREE_OPERAND (cond
, 0));
1066 if (decl
== TREE_OPERAND (cond
, 0))
1068 else if (decl
== TREE_OPERAND (cond
, 1))
1070 TREE_SET_CODE (cond
,
1071 swap_tree_comparison (TREE_CODE (cond
)));
1072 TREE_OPERAND (cond
, 1) = TREE_OPERAND (cond
, 0);
1073 TREE_OPERAND (cond
, 0) = decl
;
1077 if (TREE_CODE (cond
) == NE_EXPR
1078 || TREE_CODE (cond
) == EQ_EXPR
)
1080 if (!INTEGRAL_TYPE_P (TREE_TYPE (decl
)))
1082 if (code
== OACC_LOOP
|| TREE_CODE (cond
) == EQ_EXPR
)
1085 else if (operand_equal_p (TREE_OPERAND (cond
, 1),
1086 TYPE_MIN_VALUE (TREE_TYPE (decl
)),
1088 TREE_SET_CODE (cond
, TREE_CODE (cond
) == NE_EXPR
1089 ? GT_EXPR
: LE_EXPR
);
1090 else if (operand_equal_p (TREE_OPERAND (cond
, 1),
1091 TYPE_MAX_VALUE (TREE_TYPE (decl
)),
1093 TREE_SET_CODE (cond
, TREE_CODE (cond
) == NE_EXPR
1094 ? LT_EXPR
: GE_EXPR
);
1095 else if (code
== OACC_LOOP
|| TREE_CODE (cond
) == EQ_EXPR
)
1099 if (cond_ok
&& TREE_VEC_ELT (condv
, i
) != cond
)
1101 tree ce
= NULL_TREE
, *pce
= &ce
;
1102 tree type
= TREE_TYPE (TREE_OPERAND (cond
, 1));
1103 for (tree c
= TREE_VEC_ELT (condv
, i
); c
!= cond
;
1104 c
= TREE_OPERAND (c
, 1))
1106 *pce
= build2 (COMPOUND_EXPR
, type
, TREE_OPERAND (c
, 0),
1107 TREE_OPERAND (cond
, 1));
1108 pce
= &TREE_OPERAND (*pce
, 1);
1110 TREE_OPERAND (cond
, 1) = ce
;
1111 TREE_VEC_ELT (condv
, i
) = cond
;
1117 error_at (elocus
, "invalid controlling predicate");
1122 if (incr
== NULL_TREE
)
1124 error_at (elocus
, "missing increment expression");
1129 bool incr_ok
= false;
1131 if (EXPR_HAS_LOCATION (incr
))
1132 elocus
= EXPR_LOCATION (incr
);
1134 /* Check all the valid increment expressions: v++, v--, ++v, --v,
1135 v = v + incr, v = incr + v and v = v - incr. */
1136 switch (TREE_CODE (incr
))
1138 case POSTINCREMENT_EXPR
:
1139 case PREINCREMENT_EXPR
:
1140 case POSTDECREMENT_EXPR
:
1141 case PREDECREMENT_EXPR
:
1142 if (TREE_OPERAND (incr
, 0) != decl
)
1147 && TREE_CODE (cond
) == NE_EXPR
1148 && TREE_CODE (TREE_TYPE (decl
)) == POINTER_TYPE
1149 && TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl
)))
1150 && (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl
))))
1153 /* For pointer to VLA, transform != into < or >
1154 depending on whether incr is increment or decrement. */
1155 if (TREE_CODE (incr
) == PREINCREMENT_EXPR
1156 || TREE_CODE (incr
) == POSTINCREMENT_EXPR
)
1157 TREE_SET_CODE (cond
, LT_EXPR
);
1159 TREE_SET_CODE (cond
, GT_EXPR
);
1161 incr
= c_omp_for_incr_canonicalize_ptr (elocus
, decl
, incr
);
1165 if (TREE_CODE (TREE_OPERAND (incr
, 0)) != SAVE_EXPR
1166 || TREE_CODE (TREE_OPERAND (incr
, 1)) != MODIFY_EXPR
)
1168 incr
= TREE_OPERAND (incr
, 1);
1171 if (TREE_OPERAND (incr
, 0) != decl
)
1173 if (TREE_OPERAND (incr
, 1) == decl
)
1175 if (TREE_CODE (TREE_OPERAND (incr
, 1)) == PLUS_EXPR
1176 && (TREE_OPERAND (TREE_OPERAND (incr
, 1), 0) == decl
1177 || TREE_OPERAND (TREE_OPERAND (incr
, 1), 1) == decl
))
1179 else if ((TREE_CODE (TREE_OPERAND (incr
, 1)) == MINUS_EXPR
1180 || (TREE_CODE (TREE_OPERAND (incr
, 1))
1181 == POINTER_PLUS_EXPR
))
1182 && TREE_OPERAND (TREE_OPERAND (incr
, 1), 0) == decl
)
1186 tree t
= check_omp_for_incr_expr (elocus
,
1187 TREE_OPERAND (incr
, 1),
1189 if (t
!= error_mark_node
)
1192 t
= build2 (PLUS_EXPR
, TREE_TYPE (decl
), decl
, t
);
1193 incr
= build2 (MODIFY_EXPR
, void_type_node
, decl
, t
);
1198 && TREE_CODE (cond
) == NE_EXPR
)
1200 tree i
= TREE_OPERAND (incr
, 1);
1201 i
= TREE_OPERAND (i
, TREE_OPERAND (i
, 0) == decl
);
1202 i
= c_fully_fold (i
, false, NULL
);
1204 && TREE_CODE (i
) != INTEGER_CST
)
1206 else if (TREE_CODE (TREE_TYPE (decl
)) == POINTER_TYPE
)
1209 = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl
)));
1212 enum tree_code ccode
= GT_EXPR
;
1213 unit
= c_fully_fold (unit
, false, NULL
);
1214 i
= fold_convert (TREE_TYPE (unit
), i
);
1215 if (operand_equal_p (unit
, i
, 0))
1217 if (ccode
== GT_EXPR
)
1219 i
= fold_unary (NEGATE_EXPR
, TREE_TYPE (i
), i
);
1221 || !operand_equal_p (unit
, i
, 0))
1224 "increment is not constant 1 or "
1225 "-1 for %<!=%> condition");
1229 if (TREE_CODE (unit
) != INTEGER_CST
)
1230 /* For pointer to VLA, transform != into < or >
1231 depending on whether the pointer is
1232 incremented or decremented in each
1234 TREE_SET_CODE (cond
, ccode
);
1239 if (!integer_onep (i
) && !integer_minus_onep (i
))
1242 "increment is not constant 1 or -1 for"
1243 " %<!=%> condition");
1255 error_at (elocus
, "invalid increment expression");
1260 TREE_VEC_ELT (initv
, i
) = init
;
1261 TREE_VEC_ELT (incrv
, i
) = incr
;
1268 tree t
= make_node (code
);
1270 TREE_TYPE (t
) = void_type_node
;
1271 OMP_FOR_INIT (t
) = initv
;
1272 OMP_FOR_COND (t
) = condv
;
1273 OMP_FOR_INCR (t
) = incrv
;
1274 OMP_FOR_BODY (t
) = body
;
1275 OMP_FOR_PRE_BODY (t
) = pre_body
;
1276 OMP_FOR_ORIG_DECLS (t
) = orig_declv
;
1278 SET_EXPR_LOCATION (t
, locus
);
1283 /* Type for passing data in between c_omp_check_loop_iv and
1284 c_omp_check_loop_iv_r. */
1286 struct c_omp_check_loop_iv_data
1291 location_t stmt_loc
;
1292 location_t expr_loc
;
1296 hash_set
<tree
> *ppset
;
1299 /* Return -1 if DECL is not a loop iterator in loop nest D, otherwise
1300 return the index of the loop in which it is an iterator.
1301 Return TREE_VEC_LENGTH (d->declv) if it is a C++ range for iterator. */
1304 c_omp_is_loop_iterator (tree decl
, struct c_omp_check_loop_iv_data
*d
)
1306 for (int i
= 0; i
< TREE_VEC_LENGTH (d
->declv
); i
++)
1307 if (decl
== TREE_VEC_ELT (d
->declv
, i
)
1308 || (TREE_CODE (TREE_VEC_ELT (d
->declv
, i
)) == TREE_LIST
1309 && decl
== TREE_PURPOSE (TREE_VEC_ELT (d
->declv
, i
))))
1311 else if (TREE_CODE (TREE_VEC_ELT (d
->declv
, i
)) == TREE_LIST
1312 && TREE_CHAIN (TREE_VEC_ELT (d
->declv
, i
))
1313 && (TREE_CODE (TREE_CHAIN (TREE_VEC_ELT (d
->declv
, i
)))
1316 j
< TREE_VEC_LENGTH (TREE_CHAIN (TREE_VEC_ELT (d
->declv
, i
))); j
++)
1317 if (decl
== TREE_VEC_ELT (TREE_CHAIN (TREE_VEC_ELT (d
->declv
, i
)), j
))
1318 return TREE_VEC_LENGTH (d
->declv
);
1322 /* Helper function called via walk_tree, to diagnose uses
1323 of associated loop IVs inside of lb, b and incr expressions
1327 c_omp_check_loop_iv_r (tree
*tp
, int *walk_subtrees
, void *data
)
1329 struct c_omp_check_loop_iv_data
*d
1330 = (struct c_omp_check_loop_iv_data
*) data
;
1333 int idx
= c_omp_is_loop_iterator (*tp
, d
);
1337 if ((d
->kind
& 4) && idx
< d
->idx
)
1339 d
->maybe_nonrect
= true;
1343 if (d
->ppset
->add (*tp
))
1346 location_t loc
= d
->expr_loc
;
1347 if (loc
== UNKNOWN_LOCATION
)
1350 switch (d
->kind
& 3)
1353 error_at (loc
, "initializer expression refers to "
1354 "iteration variable %qD", *tp
);
1357 error_at (loc
, "condition expression refers to "
1358 "iteration variable %qD", *tp
);
1361 error_at (loc
, "increment expression refers to "
1362 "iteration variable %qD", *tp
);
1367 else if ((d
->kind
& 4)
1368 && TREE_CODE (*tp
) != TREE_VEC
1369 && TREE_CODE (*tp
) != PLUS_EXPR
1370 && TREE_CODE (*tp
) != MINUS_EXPR
1371 && TREE_CODE (*tp
) != MULT_EXPR
1372 && TREE_CODE (*tp
) != POINTER_PLUS_EXPR
1373 && !CONVERT_EXPR_P (*tp
))
1377 walk_tree_1 (tp
, c_omp_check_loop_iv_r
, data
, NULL
, d
->lh
);
1381 else if (d
->ppset
->add (*tp
))
1383 /* Don't walk dtors added by C++ wrap_cleanups_r. */
1384 else if (TREE_CODE (*tp
) == TRY_CATCH_EXPR
1385 && TRY_CATCH_IS_CLEANUP (*tp
))
1388 return walk_tree_1 (&TREE_OPERAND (*tp
, 0), c_omp_check_loop_iv_r
, data
,
1395 /* Check the allowed expressions for non-rectangular loop nest lb and b
1396 expressions. Return the outer var decl referenced in the expression. */
1399 c_omp_check_nonrect_loop_iv (tree
*tp
, struct c_omp_check_loop_iv_data
*d
,
1402 d
->maybe_nonrect
= false;
1406 hash_set
<tree
> pset
;
1407 hash_set
<tree
> *ppset
= d
->ppset
;
1411 if (TREE_CODE (t
) == TREE_VEC
1412 && TREE_VEC_LENGTH (t
) == 3
1413 && DECL_P (TREE_VEC_ELT (t
, 0))
1414 && c_omp_is_loop_iterator (TREE_VEC_ELT (t
, 0), d
) >= 0)
1417 walk_tree_1 (&TREE_VEC_ELT (t
, 1), c_omp_check_loop_iv_r
, d
, NULL
, lh
);
1418 walk_tree_1 (&TREE_VEC_ELT (t
, 1), c_omp_check_loop_iv_r
, d
, NULL
, lh
);
1420 return d
->fail
? NULL_TREE
: TREE_VEC_ELT (t
, 0);
1423 while (CONVERT_EXPR_P (t
))
1424 t
= TREE_OPERAND (t
, 0);
1426 tree a1
= t
, a2
= integer_zero_node
;
1427 bool neg_a1
= false, neg_a2
= false;
1428 switch (TREE_CODE (t
))
1432 a1
= TREE_OPERAND (t
, 0);
1433 a2
= TREE_OPERAND (t
, 1);
1434 while (CONVERT_EXPR_P (a1
))
1435 a1
= TREE_OPERAND (a1
, 0);
1436 while (CONVERT_EXPR_P (a2
))
1437 a2
= TREE_OPERAND (a2
, 0);
1438 if (DECL_P (a1
) && c_omp_is_loop_iterator (a1
, d
) >= 0)
1440 a2
= TREE_OPERAND (t
, 1);
1441 if (TREE_CODE (t
) == MINUS_EXPR
)
1446 if (DECL_P (a2
) && c_omp_is_loop_iterator (a2
, d
) >= 0)
1448 a1
= TREE_OPERAND (t
, 0);
1449 if (TREE_CODE (t
) == MINUS_EXPR
)
1455 if (TREE_CODE (a1
) == MULT_EXPR
&& TREE_CODE (a2
) == MULT_EXPR
)
1457 tree o1
= TREE_OPERAND (a1
, 0);
1458 tree o2
= TREE_OPERAND (a1
, 1);
1459 while (CONVERT_EXPR_P (o1
))
1460 o1
= TREE_OPERAND (o1
, 0);
1461 while (CONVERT_EXPR_P (o2
))
1462 o2
= TREE_OPERAND (o2
, 0);
1463 if ((DECL_P (o1
) && c_omp_is_loop_iterator (o1
, d
) >= 0)
1464 || (DECL_P (o2
) && c_omp_is_loop_iterator (o2
, d
) >= 0))
1466 a2
= TREE_OPERAND (t
, 1);
1467 if (TREE_CODE (t
) == MINUS_EXPR
)
1473 if (TREE_CODE (a2
) == MULT_EXPR
)
1475 a1
= TREE_OPERAND (t
, 0);
1476 if (TREE_CODE (t
) == MINUS_EXPR
)
1482 if (TREE_CODE (a1
) == MULT_EXPR
)
1484 a2
= TREE_OPERAND (t
, 1);
1485 if (TREE_CODE (t
) == MINUS_EXPR
)
1490 a2
= integer_zero_node
;
1492 case POINTER_PLUS_EXPR
:
1493 a1
= TREE_OPERAND (t
, 0);
1494 a2
= TREE_OPERAND (t
, 1);
1495 while (CONVERT_EXPR_P (a1
))
1496 a1
= TREE_OPERAND (a1
, 0);
1497 if (DECL_P (a1
) && c_omp_is_loop_iterator (a1
, d
) >= 0)
1499 a2
= TREE_OPERAND (t
, 1);
1508 a1
= integer_one_node
;
1509 if (TREE_CODE (t
) == MULT_EXPR
)
1511 tree o1
= TREE_OPERAND (t
, 0);
1512 tree o2
= TREE_OPERAND (t
, 1);
1513 while (CONVERT_EXPR_P (o1
))
1514 o1
= TREE_OPERAND (o1
, 0);
1515 while (CONVERT_EXPR_P (o2
))
1516 o2
= TREE_OPERAND (o2
, 0);
1517 if (DECL_P (o1
) && c_omp_is_loop_iterator (o1
, d
) >= 0)
1519 a1
= TREE_OPERAND (t
, 1);
1522 else if (DECL_P (o2
) && c_omp_is_loop_iterator (o2
, d
) >= 0)
1524 a1
= TREE_OPERAND (t
, 0);
1530 tree ret
= NULL_TREE
;
1531 if (DECL_P (t
) && c_omp_is_loop_iterator (t
, d
) >= 0)
1533 location_t loc
= d
->expr_loc
;
1534 if (loc
== UNKNOWN_LOCATION
)
1536 if (!lang_hooks
.types_compatible_p (TREE_TYPE (*tp
), TREE_TYPE (t
)))
1539 error_at (loc
, "outer iteration variable %qD used in initializer"
1540 " expression has type other than %qT",
1541 t
, TREE_TYPE (*tp
));
1543 error_at (loc
, "outer iteration variable %qD used in condition"
1544 " expression has type other than %qT",
1545 t
, TREE_TYPE (*tp
));
1548 else if (!INTEGRAL_TYPE_P (TREE_TYPE (a1
)))
1550 error_at (loc
, "outer iteration variable %qD multiplier expression"
1551 " %qE is not integral", t
, a1
);
1554 else if (!INTEGRAL_TYPE_P (TREE_TYPE (a2
)))
1556 error_at (loc
, "outer iteration variable %qD addend expression"
1557 " %qE is not integral", t
, a2
);
1562 walk_tree_1 (&a1
, c_omp_check_loop_iv_r
, d
, NULL
, lh
);
1563 walk_tree_1 (&a2
, c_omp_check_loop_iv_r
, d
, NULL
, lh
);
1567 a1
= fold_convert (TREE_TYPE (*tp
), a1
);
1568 a2
= fold_convert (TREE_TYPE (*tp
), a2
);
1570 a1
= fold_build1 (NEGATE_EXPR
, TREE_TYPE (a1
), a1
);
1572 a2
= fold_build1 (NEGATE_EXPR
, TREE_TYPE (a2
), a2
);
1574 *tp
= make_tree_vec (3);
1575 TREE_VEC_ELT (*tp
, 0) = t
;
1576 TREE_VEC_ELT (*tp
, 1) = a1
;
1577 TREE_VEC_ELT (*tp
, 2) = a2
;
1581 walk_tree_1 (&t
, c_omp_check_loop_iv_r
, d
, NULL
, lh
);
1587 /* Diagnose invalid references to loop iterators in lb, b and incr
1591 c_omp_check_loop_iv (tree stmt
, tree declv
, walk_tree_lh lh
)
1593 hash_set
<tree
> pset
;
1594 struct c_omp_check_loop_iv_data data
;
1599 data
.maybe_nonrect
= false;
1600 data
.stmt_loc
= EXPR_LOCATION (stmt
);
1603 for (i
= 0; i
< TREE_VEC_LENGTH (OMP_FOR_INIT (stmt
)); i
++)
1605 tree init
= TREE_VEC_ELT (OMP_FOR_INIT (stmt
), i
);
1606 gcc_assert (TREE_CODE (init
) == MODIFY_EXPR
);
1607 tree decl
= TREE_OPERAND (init
, 0);
1608 tree cond
= TREE_VEC_ELT (OMP_FOR_COND (stmt
), i
);
1609 gcc_assert (COMPARISON_CLASS_P (cond
));
1610 gcc_assert (TREE_OPERAND (cond
, 0) == decl
);
1611 tree incr
= TREE_VEC_ELT (OMP_FOR_INCR (stmt
), i
);
1612 data
.expr_loc
= EXPR_LOCATION (TREE_OPERAND (init
, 1));
1613 tree vec_outer1
= NULL_TREE
, vec_outer2
= NULL_TREE
;
1616 && (unsigned) c_omp_is_loop_iterator (decl
, &data
) < (unsigned) i
)
1618 location_t loc
= data
.expr_loc
;
1619 if (loc
== UNKNOWN_LOCATION
)
1620 loc
= data
.stmt_loc
;
1621 error_at (loc
, "the same loop iteration variables %qD used in "
1622 "multiple associated loops", decl
);
1625 /* Handle non-rectangular loop nests. */
1626 if (TREE_CODE (stmt
) != OACC_LOOP
&& i
> 0)
1630 walk_tree_1 (&TREE_OPERAND (init
, 1),
1631 c_omp_check_loop_iv_r
, &data
, NULL
, lh
);
1632 if (data
.maybe_nonrect
)
1633 vec_outer1
= c_omp_check_nonrect_loop_iv (&TREE_OPERAND (init
, 1),
1635 /* Don't warn for C++ random access iterators here, the
1636 expression then involves the subtraction and always refers
1637 to the original value. The C++ FE needs to warn on those
1639 if (decl
== TREE_VEC_ELT (declv
, i
)
1640 || (TREE_CODE (TREE_VEC_ELT (declv
, i
)) == TREE_LIST
1641 && decl
== TREE_PURPOSE (TREE_VEC_ELT (declv
, i
))))
1643 data
.expr_loc
= EXPR_LOCATION (cond
);
1644 data
.kind
= kind
| 1;
1645 walk_tree_1 (&TREE_OPERAND (cond
, 1),
1646 c_omp_check_loop_iv_r
, &data
, NULL
, lh
);
1647 if (data
.maybe_nonrect
)
1648 vec_outer2
= c_omp_check_nonrect_loop_iv (&TREE_OPERAND (cond
, 1),
1651 if (vec_outer1
&& vec_outer2
&& vec_outer1
!= vec_outer2
)
1653 location_t loc
= data
.expr_loc
;
1654 if (loc
== UNKNOWN_LOCATION
)
1655 loc
= data
.stmt_loc
;
1656 error_at (loc
, "two different outer iteration variables %qD and %qD"
1657 " used in a single loop", vec_outer1
, vec_outer2
);
1660 if (vec_outer1
|| vec_outer2
)
1661 OMP_FOR_NON_RECTANGULAR (stmt
) = 1;
1662 if (TREE_CODE (incr
) == MODIFY_EXPR
)
1664 gcc_assert (TREE_OPERAND (incr
, 0) == decl
);
1665 incr
= TREE_OPERAND (incr
, 1);
1667 if (TREE_CODE (incr
) == PLUS_EXPR
1668 && TREE_OPERAND (incr
, 1) == decl
)
1670 data
.expr_loc
= EXPR_LOCATION (TREE_OPERAND (incr
, 0));
1671 walk_tree_1 (&TREE_OPERAND (incr
, 0),
1672 c_omp_check_loop_iv_r
, &data
, NULL
, lh
);
1676 data
.expr_loc
= EXPR_LOCATION (TREE_OPERAND (incr
, 1));
1677 walk_tree_1 (&TREE_OPERAND (incr
, 1),
1678 c_omp_check_loop_iv_r
, &data
, NULL
, lh
);
1685 /* Similar, but allows to check the init or cond expressions individually. */
1688 c_omp_check_loop_iv_exprs (location_t stmt_loc
, enum tree_code code
,
1689 tree declv
, int i
, tree decl
, tree init
, tree cond
,
1692 hash_set
<tree
> pset
;
1693 struct c_omp_check_loop_iv_data data
;
1694 int kind
= (code
!= OACC_LOOP
&& i
> 0) ? 4 : 0;
1698 data
.maybe_nonrect
= false;
1699 data
.stmt_loc
= stmt_loc
;
1704 && (unsigned) c_omp_is_loop_iterator (decl
, &data
) < (unsigned) i
)
1706 error_at (stmt_loc
, "the same loop iteration variables %qD used in "
1707 "multiple associated loops", decl
);
1712 data
.expr_loc
= EXPR_LOCATION (init
);
1715 c_omp_check_loop_iv_r
, &data
, NULL
, lh
);
1719 gcc_assert (COMPARISON_CLASS_P (cond
));
1720 data
.expr_loc
= EXPR_LOCATION (init
);
1721 data
.kind
= kind
| 1;
1722 if (TREE_OPERAND (cond
, 0) == decl
)
1723 walk_tree_1 (&TREE_OPERAND (cond
, 1),
1724 c_omp_check_loop_iv_r
, &data
, NULL
, lh
);
1726 walk_tree_1 (&TREE_OPERAND (cond
, 0),
1727 c_omp_check_loop_iv_r
, &data
, NULL
, lh
);
1733 /* Helper function for c_omp_check_loop_binding_exprs: look for a binding
1734 of DECL in BODY. Only traverse things that might be containers for
1735 intervening code in an OMP loop. Returns the BIND_EXPR or DECL_EXPR
1736 if found, otherwise null. */
1739 find_binding_in_body (tree decl
, tree body
)
1744 switch (TREE_CODE (body
))
1747 for (tree b
= BIND_EXPR_VARS (body
); b
; b
= DECL_CHAIN (b
))
1750 return find_binding_in_body (decl
, BIND_EXPR_BODY (body
));
1753 if (DECL_EXPR_DECL (body
) == decl
)
1757 case STATEMENT_LIST
:
1758 for (tree_stmt_iterator si
= tsi_start (body
); !tsi_end_p (si
);
1761 tree b
= find_binding_in_body (decl
, tsi_stmt (si
));
1767 case OMP_STRUCTURED_BLOCK
:
1768 return find_binding_in_body (decl
, OMP_BODY (body
));
1775 /* Traversal function for check_loop_binding_expr, to diagnose
1776 errors when a binding made in intervening code is referenced outside
1777 of the loop. Returns non-null if such a reference is found. DATA points
1778 to the tree containing the loop body. */
1781 check_loop_binding_expr_r (tree
*tp
, int *walk_subtrees ATTRIBUTE_UNUSED
,
1784 tree body
= *(tree
*)data
;
1786 if (DECL_P (*tp
) && find_binding_in_body (*tp
, body
))
1791 /* Helper macro used below. */
1793 #define LOCATION_OR(loc1, loc2) \
1794 ((loc1) != UNKNOWN_LOCATION ? (loc1) : (loc2))
1796 /* Check a single expression EXPR for references to variables bound in
1797 intervening code in BODY. Return true if ok, otherwise give an error
1798 referencing CONTEXT and return false. Use LOC for the error message
1799 if EXPR doesn't have one. */
1801 check_loop_binding_expr (tree expr
, tree body
, const char *context
,
1804 tree bad
= walk_tree (&expr
, check_loop_binding_expr_r
, (void *)&body
, NULL
);
1808 location_t eloc
= EXPR_LOCATION (expr
);
1809 error_at (LOCATION_OR (eloc
, loc
),
1810 "variable %qD used %s is bound "
1811 "in intervening code", bad
, context
);
1817 /* STMT is an OMP_FOR construct. Check all of the iteration variable,
1818 initializer, end condition, and increment for bindings inside the
1819 loop body. If ORIG_INITS is provided, check those elements too.
1820 Return true if OK, false otherwise. */
1822 c_omp_check_loop_binding_exprs (tree stmt
, vec
<tree
> *orig_inits
)
1825 location_t loc
= EXPR_LOCATION (stmt
);
1826 tree body
= OMP_FOR_BODY (stmt
);
1827 int orig_init_length
= orig_inits
? orig_inits
->length () : 0;
1829 for (int i
= 1; i
< TREE_VEC_LENGTH (OMP_FOR_INIT (stmt
)); i
++)
1831 tree init
= TREE_VEC_ELT (OMP_FOR_INIT (stmt
), i
);
1832 tree cond
= TREE_VEC_ELT (OMP_FOR_COND (stmt
), i
);
1833 tree incr
= TREE_VEC_ELT (OMP_FOR_INCR (stmt
), i
);
1834 gcc_assert (TREE_CODE (init
) == MODIFY_EXPR
);
1835 tree decl
= TREE_OPERAND (init
, 0);
1836 tree orig_init
= i
< orig_init_length
? (*orig_inits
)[i
] : NULL_TREE
;
1840 e
= TREE_OPERAND (init
, 1);
1841 eloc
= LOCATION_OR (EXPR_LOCATION (init
), loc
);
1842 if (!check_loop_binding_expr (decl
, body
, "as loop variable", eloc
))
1844 if (!check_loop_binding_expr (e
, body
, "in initializer", eloc
))
1847 && !check_loop_binding_expr (orig_init
, body
,
1848 "in initializer", eloc
))
1851 /* INCR and/or COND may be null if this is a template with a
1855 eloc
= LOCATION_OR (EXPR_LOCATION (cond
), loc
);
1856 if (COMPARISON_CLASS_P (cond
) && TREE_OPERAND (cond
, 0) == decl
)
1857 e
= TREE_OPERAND (cond
, 1);
1858 else if (COMPARISON_CLASS_P (cond
) && TREE_OPERAND (cond
, 1) == decl
)
1859 e
= TREE_OPERAND (cond
, 0);
1862 if (!check_loop_binding_expr (e
, body
, "in end test", eloc
))
1868 eloc
= LOCATION_OR (EXPR_LOCATION (incr
), loc
);
1869 /* INCR should be either a MODIFY_EXPR or pre/post
1870 increment/decrement. We don't have to check the latter
1871 since there are no operands besides the iteration variable. */
1872 if (TREE_CODE (incr
) == MODIFY_EXPR
1873 && !check_loop_binding_expr (TREE_OPERAND (incr
, 1), body
,
1874 "in increment expression", eloc
))
1882 /* This function splits clauses for OpenACC combined loop
1883 constructs. OpenACC combined loop constructs are:
1884 #pragma acc kernels loop
1885 #pragma acc parallel loop */
1888 c_oacc_split_loop_clauses (tree clauses
, tree
*not_loop_clauses
,
1891 tree next
, loop_clauses
, nc
;
1893 loop_clauses
= *not_loop_clauses
= NULL_TREE
;
1894 for (; clauses
; clauses
= next
)
1896 next
= OMP_CLAUSE_CHAIN (clauses
);
1898 switch (OMP_CLAUSE_CODE (clauses
))
1901 case OMP_CLAUSE_COLLAPSE
:
1902 case OMP_CLAUSE_TILE
:
1903 case OMP_CLAUSE_GANG
:
1904 case OMP_CLAUSE_WORKER
:
1905 case OMP_CLAUSE_VECTOR
:
1906 case OMP_CLAUSE_AUTO
:
1907 case OMP_CLAUSE_SEQ
:
1908 case OMP_CLAUSE_INDEPENDENT
:
1909 case OMP_CLAUSE_PRIVATE
:
1910 OMP_CLAUSE_CHAIN (clauses
) = loop_clauses
;
1911 loop_clauses
= clauses
;
1914 /* Reductions must be duplicated on both constructs. */
1915 case OMP_CLAUSE_REDUCTION
:
1918 nc
= build_omp_clause (OMP_CLAUSE_LOCATION (clauses
),
1919 OMP_CLAUSE_REDUCTION
);
1920 OMP_CLAUSE_DECL (nc
) = OMP_CLAUSE_DECL (clauses
);
1921 OMP_CLAUSE_REDUCTION_CODE (nc
)
1922 = OMP_CLAUSE_REDUCTION_CODE (clauses
);
1923 OMP_CLAUSE_CHAIN (nc
) = *not_loop_clauses
;
1924 *not_loop_clauses
= nc
;
1927 OMP_CLAUSE_CHAIN (clauses
) = loop_clauses
;
1928 loop_clauses
= clauses
;
1931 /* Parallel/kernels clauses. */
1933 OMP_CLAUSE_CHAIN (clauses
) = *not_loop_clauses
;
1934 *not_loop_clauses
= clauses
;
1939 return loop_clauses
;
1942 /* This function attempts to split or duplicate clauses for OpenMP
1943 combined/composite constructs. Right now there are 30 different
1944 constructs. CODE is the innermost construct in the combined construct,
1945 and MASK allows to determine which constructs are combined together,
1946 as every construct has at least one clause that no other construct
1947 has (except for OMP_SECTIONS, but that can be only combined with parallel,
1948 and OMP_MASTER, which doesn't have any clauses at all).
1949 OpenMP combined/composite constructs are:
1950 #pragma omp distribute parallel for
1951 #pragma omp distribute parallel for simd
1952 #pragma omp distribute simd
1953 #pragma omp for simd
1954 #pragma omp masked taskloop
1955 #pragma omp masked taskloop simd
1956 #pragma omp master taskloop
1957 #pragma omp master taskloop simd
1958 #pragma omp parallel for
1959 #pragma omp parallel for simd
1960 #pragma omp parallel loop
1961 #pragma omp parallel masked
1962 #pragma omp parallel masked taskloop
1963 #pragma omp parallel masked taskloop simd
1964 #pragma omp parallel master
1965 #pragma omp parallel master taskloop
1966 #pragma omp parallel master taskloop simd
1967 #pragma omp parallel sections
1968 #pragma omp target parallel
1969 #pragma omp target parallel for
1970 #pragma omp target parallel for simd
1971 #pragma omp target parallel loop
1972 #pragma omp target teams
1973 #pragma omp target teams distribute
1974 #pragma omp target teams distribute parallel for
1975 #pragma omp target teams distribute parallel for simd
1976 #pragma omp target teams distribute simd
1977 #pragma omp target teams loop
1978 #pragma omp target simd
1979 #pragma omp taskloop simd
1980 #pragma omp teams distribute
1981 #pragma omp teams distribute parallel for
1982 #pragma omp teams distribute parallel for simd
1983 #pragma omp teams distribute simd
1984 #pragma omp teams loop */
1987 c_omp_split_clauses (location_t loc
, enum tree_code code
,
1988 omp_clause_mask mask
, tree clauses
, tree
*cclauses
)
1991 enum c_omp_clause_split s
;
1993 bool has_dup_allocate
= false;
1995 for (i
= 0; i
< C_OMP_CLAUSE_SPLIT_COUNT
; i
++)
1997 /* Add implicit nowait clause on
1998 #pragma omp parallel {for,for simd,sections}. */
1999 if ((mask
& (OMP_CLAUSE_MASK_1
<< PRAGMA_OMP_CLAUSE_NUM_THREADS
)) != 0)
2004 if ((mask
& (OMP_CLAUSE_MASK_1
<< PRAGMA_OMP_CLAUSE_SCHEDULE
)) != 0)
2005 cclauses
[C_OMP_CLAUSE_SPLIT_FOR
]
2006 = build_omp_clause (loc
, OMP_CLAUSE_NOWAIT
);
2009 cclauses
[C_OMP_CLAUSE_SPLIT_SECTIONS
]
2010 = build_omp_clause (loc
, OMP_CLAUSE_NOWAIT
);
2016 for (; clauses
; clauses
= next
)
2018 next
= OMP_CLAUSE_CHAIN (clauses
);
2020 switch (OMP_CLAUSE_CODE (clauses
))
2022 /* First the clauses that are unique to some constructs. */
2023 case OMP_CLAUSE_DEVICE
:
2024 case OMP_CLAUSE_MAP
:
2025 case OMP_CLAUSE_IS_DEVICE_PTR
:
2026 case OMP_CLAUSE_HAS_DEVICE_ADDR
:
2027 case OMP_CLAUSE_DEFAULTMAP
:
2028 case OMP_CLAUSE_DEPEND
:
2029 s
= C_OMP_CLAUSE_SPLIT_TARGET
;
2031 case OMP_CLAUSE_DOACROSS
:
2032 /* This can happen with invalid depend(source) or
2033 depend(sink:vec) on target combined with other constructs. */
2034 gcc_assert (OMP_CLAUSE_DOACROSS_DEPEND (clauses
));
2035 s
= C_OMP_CLAUSE_SPLIT_TARGET
;
2037 case OMP_CLAUSE_NUM_TEAMS
:
2038 s
= C_OMP_CLAUSE_SPLIT_TEAMS
;
2040 case OMP_CLAUSE_DIST_SCHEDULE
:
2041 s
= C_OMP_CLAUSE_SPLIT_DISTRIBUTE
;
2043 case OMP_CLAUSE_COPYIN
:
2044 case OMP_CLAUSE_NUM_THREADS
:
2045 case OMP_CLAUSE_PROC_BIND
:
2046 s
= C_OMP_CLAUSE_SPLIT_PARALLEL
;
2048 case OMP_CLAUSE_ORDERED
:
2049 s
= C_OMP_CLAUSE_SPLIT_FOR
;
2051 case OMP_CLAUSE_SCHEDULE
:
2052 s
= C_OMP_CLAUSE_SPLIT_FOR
;
2053 if (code
!= OMP_SIMD
)
2054 OMP_CLAUSE_SCHEDULE_SIMD (clauses
) = 0;
2056 case OMP_CLAUSE_SAFELEN
:
2057 case OMP_CLAUSE_SIMDLEN
:
2058 case OMP_CLAUSE_ALIGNED
:
2059 case OMP_CLAUSE_NONTEMPORAL
:
2060 s
= C_OMP_CLAUSE_SPLIT_SIMD
;
2062 case OMP_CLAUSE_GRAINSIZE
:
2063 case OMP_CLAUSE_NUM_TASKS
:
2064 case OMP_CLAUSE_FINAL
:
2065 case OMP_CLAUSE_UNTIED
:
2066 case OMP_CLAUSE_MERGEABLE
:
2067 case OMP_CLAUSE_NOGROUP
:
2068 case OMP_CLAUSE_PRIORITY
:
2069 s
= C_OMP_CLAUSE_SPLIT_TASKLOOP
;
2071 case OMP_CLAUSE_BIND
:
2072 s
= C_OMP_CLAUSE_SPLIT_LOOP
;
2074 case OMP_CLAUSE_FILTER
:
2075 s
= C_OMP_CLAUSE_SPLIT_MASKED
;
2077 /* Duplicate this to all of taskloop, distribute, for, simd and
2079 case OMP_CLAUSE_COLLAPSE
:
2080 if (code
== OMP_SIMD
)
2082 if ((mask
& ((OMP_CLAUSE_MASK_1
<< PRAGMA_OMP_CLAUSE_SCHEDULE
)
2083 | (OMP_CLAUSE_MASK_1
2084 << PRAGMA_OMP_CLAUSE_DIST_SCHEDULE
)
2085 | (OMP_CLAUSE_MASK_1
2086 << PRAGMA_OMP_CLAUSE_NOGROUP
))) != 0)
2088 c
= build_omp_clause (OMP_CLAUSE_LOCATION (clauses
),
2089 OMP_CLAUSE_COLLAPSE
);
2090 OMP_CLAUSE_COLLAPSE_EXPR (c
)
2091 = OMP_CLAUSE_COLLAPSE_EXPR (clauses
);
2092 OMP_CLAUSE_CHAIN (c
) = cclauses
[C_OMP_CLAUSE_SPLIT_SIMD
];
2093 cclauses
[C_OMP_CLAUSE_SPLIT_SIMD
] = c
;
2097 /* This must be #pragma omp target simd */
2098 s
= C_OMP_CLAUSE_SPLIT_SIMD
;
2102 if ((mask
& (OMP_CLAUSE_MASK_1
<< PRAGMA_OMP_CLAUSE_SCHEDULE
)) != 0)
2104 if ((mask
& (OMP_CLAUSE_MASK_1
2105 << PRAGMA_OMP_CLAUSE_DIST_SCHEDULE
)) != 0)
2107 c
= build_omp_clause (OMP_CLAUSE_LOCATION (clauses
),
2108 OMP_CLAUSE_COLLAPSE
);
2109 OMP_CLAUSE_COLLAPSE_EXPR (c
)
2110 = OMP_CLAUSE_COLLAPSE_EXPR (clauses
);
2111 OMP_CLAUSE_CHAIN (c
) = cclauses
[C_OMP_CLAUSE_SPLIT_FOR
];
2112 cclauses
[C_OMP_CLAUSE_SPLIT_FOR
] = c
;
2113 s
= C_OMP_CLAUSE_SPLIT_DISTRIBUTE
;
2116 s
= C_OMP_CLAUSE_SPLIT_FOR
;
2118 else if ((mask
& (OMP_CLAUSE_MASK_1
<< PRAGMA_OMP_CLAUSE_NOGROUP
))
2120 s
= C_OMP_CLAUSE_SPLIT_TASKLOOP
;
2121 else if (code
== OMP_LOOP
)
2122 s
= C_OMP_CLAUSE_SPLIT_LOOP
;
2124 s
= C_OMP_CLAUSE_SPLIT_DISTRIBUTE
;
2126 /* Private clause is supported on all constructs but master/masked,
2127 it is enough to put it on the innermost one other than
2128 master/masked. For #pragma omp {for,sections} put it on parallel
2129 though, as that's what we did for OpenMP 3.1. */
2130 case OMP_CLAUSE_PRIVATE
:
2133 case OMP_SIMD
: s
= C_OMP_CLAUSE_SPLIT_SIMD
; break;
2134 case OMP_FOR
: case OMP_SECTIONS
:
2135 case OMP_PARALLEL
: s
= C_OMP_CLAUSE_SPLIT_PARALLEL
; break;
2136 case OMP_DISTRIBUTE
: s
= C_OMP_CLAUSE_SPLIT_DISTRIBUTE
; break;
2137 case OMP_TEAMS
: s
= C_OMP_CLAUSE_SPLIT_TEAMS
; break;
2138 case OMP_MASTER
: s
= C_OMP_CLAUSE_SPLIT_PARALLEL
; break;
2139 case OMP_MASKED
: s
= C_OMP_CLAUSE_SPLIT_PARALLEL
; break;
2140 case OMP_TASKLOOP
: s
= C_OMP_CLAUSE_SPLIT_TASKLOOP
; break;
2141 case OMP_LOOP
: s
= C_OMP_CLAUSE_SPLIT_LOOP
; break;
2142 default: gcc_unreachable ();
2145 /* Firstprivate clause is supported on all constructs but
2146 simd, master, masked and loop. Put it on the outermost of those
2147 and duplicate on teams and parallel. */
2148 case OMP_CLAUSE_FIRSTPRIVATE
:
2149 if ((mask
& (OMP_CLAUSE_MASK_1
<< PRAGMA_OMP_CLAUSE_MAP
))
2152 if (code
== OMP_SIMD
2153 && (mask
& ((OMP_CLAUSE_MASK_1
2154 << PRAGMA_OMP_CLAUSE_NUM_THREADS
)
2155 | (OMP_CLAUSE_MASK_1
2156 << PRAGMA_OMP_CLAUSE_NUM_TEAMS
))) == 0)
2158 /* This must be #pragma omp target simd. */
2159 s
= C_OMP_CLAUSE_SPLIT_TARGET
;
2160 OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (clauses
) = 1;
2161 OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT_TARGET (clauses
) = 1;
2164 c
= build_omp_clause (OMP_CLAUSE_LOCATION (clauses
),
2165 OMP_CLAUSE_FIRSTPRIVATE
);
2166 /* firstprivate should not be applied to target if it is
2167 also lastprivate or on the combined/composite construct,
2168 or if it is mentioned in map clause. OMP_CLAUSE_DECLs
2169 may need to go through FE handling though (instantiation,
2170 C++ non-static data members, array section lowering), so
2171 add the clause with OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT and
2172 let *finish_omp_clauses and the gimplifier handle it
2174 OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c
) = 1;
2175 OMP_CLAUSE_DECL (c
) = OMP_CLAUSE_DECL (clauses
);
2176 OMP_CLAUSE_CHAIN (c
) = cclauses
[C_OMP_CLAUSE_SPLIT_TARGET
];
2177 cclauses
[C_OMP_CLAUSE_SPLIT_TARGET
] = c
;
2179 if ((mask
& (OMP_CLAUSE_MASK_1
<< PRAGMA_OMP_CLAUSE_NUM_THREADS
))
2182 if ((mask
& ((OMP_CLAUSE_MASK_1
<< PRAGMA_OMP_CLAUSE_NUM_TEAMS
)
2183 | (OMP_CLAUSE_MASK_1
2184 << PRAGMA_OMP_CLAUSE_DIST_SCHEDULE
))) != 0)
2186 c
= build_omp_clause (OMP_CLAUSE_LOCATION (clauses
),
2187 OMP_CLAUSE_FIRSTPRIVATE
);
2188 OMP_CLAUSE_DECL (c
) = OMP_CLAUSE_DECL (clauses
);
2189 OMP_CLAUSE_CHAIN (c
) = cclauses
[C_OMP_CLAUSE_SPLIT_PARALLEL
];
2190 cclauses
[C_OMP_CLAUSE_SPLIT_PARALLEL
] = c
;
2191 if ((mask
& (OMP_CLAUSE_MASK_1
2192 << PRAGMA_OMP_CLAUSE_NUM_TEAMS
)) != 0)
2193 s
= C_OMP_CLAUSE_SPLIT_TEAMS
;
2195 s
= C_OMP_CLAUSE_SPLIT_DISTRIBUTE
;
2197 else if ((mask
& (OMP_CLAUSE_MASK_1
2198 << PRAGMA_OMP_CLAUSE_NOGROUP
)) != 0)
2200 #pragma omp parallel mas{ked,ter} taskloop{, simd}. */
2201 s
= C_OMP_CLAUSE_SPLIT_TASKLOOP
;
2204 #pragma omp parallel{, for{, simd}, sections,loop}
2206 #pragma omp target parallel. */
2207 s
= C_OMP_CLAUSE_SPLIT_PARALLEL
;
2209 else if ((mask
& (OMP_CLAUSE_MASK_1
<< PRAGMA_OMP_CLAUSE_NUM_TEAMS
))
2212 /* This must be one of
2213 #pragma omp {,target }teams {distribute,loop}
2214 #pragma omp target teams
2215 #pragma omp {,target }teams distribute simd. */
2216 gcc_assert (code
== OMP_DISTRIBUTE
2218 || code
== OMP_TEAMS
2219 || code
== OMP_SIMD
);
2220 s
= C_OMP_CLAUSE_SPLIT_TEAMS
;
2222 else if ((mask
& (OMP_CLAUSE_MASK_1
2223 << PRAGMA_OMP_CLAUSE_DIST_SCHEDULE
)) != 0)
2225 /* This must be #pragma omp distribute simd. */
2226 gcc_assert (code
== OMP_SIMD
);
2227 s
= C_OMP_CLAUSE_SPLIT_DISTRIBUTE
;
2229 else if ((mask
& (OMP_CLAUSE_MASK_1
2230 << PRAGMA_OMP_CLAUSE_NOGROUP
)) != 0)
2233 #pragma omp {,{,parallel }mas{ked,ter} }taskloop simd
2235 #pragma omp {,parallel }mas{ked,ter} taskloop. */
2236 gcc_assert (code
== OMP_SIMD
|| code
== OMP_TASKLOOP
);
2237 s
= C_OMP_CLAUSE_SPLIT_TASKLOOP
;
2241 /* This must be #pragma omp for simd. */
2242 gcc_assert (code
== OMP_SIMD
);
2243 s
= C_OMP_CLAUSE_SPLIT_FOR
;
2246 /* Lastprivate is allowed on distribute, for, sections, taskloop, loop
2247 and simd. In parallel {for{, simd},sections} we actually want to
2248 put it on parallel rather than for or sections. */
2249 case OMP_CLAUSE_LASTPRIVATE
:
2250 if (code
== OMP_DISTRIBUTE
)
2252 s
= C_OMP_CLAUSE_SPLIT_DISTRIBUTE
;
2255 if ((mask
& (OMP_CLAUSE_MASK_1
2256 << PRAGMA_OMP_CLAUSE_DIST_SCHEDULE
)) != 0)
2258 c
= build_omp_clause (OMP_CLAUSE_LOCATION (clauses
),
2259 OMP_CLAUSE_LASTPRIVATE
);
2260 OMP_CLAUSE_DECL (c
) = OMP_CLAUSE_DECL (clauses
);
2261 OMP_CLAUSE_CHAIN (c
) = cclauses
[C_OMP_CLAUSE_SPLIT_DISTRIBUTE
];
2262 OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c
)
2263 = OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (clauses
);
2264 cclauses
[C_OMP_CLAUSE_SPLIT_DISTRIBUTE
] = c
;
2266 if (code
== OMP_FOR
|| code
== OMP_SECTIONS
)
2268 if ((mask
& (OMP_CLAUSE_MASK_1
<< PRAGMA_OMP_CLAUSE_NUM_THREADS
))
2270 s
= C_OMP_CLAUSE_SPLIT_PARALLEL
;
2272 s
= C_OMP_CLAUSE_SPLIT_FOR
;
2275 if (code
== OMP_TASKLOOP
)
2277 s
= C_OMP_CLAUSE_SPLIT_TASKLOOP
;
2280 if (code
== OMP_LOOP
)
2282 s
= C_OMP_CLAUSE_SPLIT_LOOP
;
2285 gcc_assert (code
== OMP_SIMD
);
2286 if ((mask
& (OMP_CLAUSE_MASK_1
<< PRAGMA_OMP_CLAUSE_SCHEDULE
)) != 0)
2288 c
= build_omp_clause (OMP_CLAUSE_LOCATION (clauses
),
2289 OMP_CLAUSE_LASTPRIVATE
);
2290 OMP_CLAUSE_DECL (c
) = OMP_CLAUSE_DECL (clauses
);
2291 OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c
)
2292 = OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (clauses
);
2293 if ((mask
& (OMP_CLAUSE_MASK_1
<< PRAGMA_OMP_CLAUSE_NUM_THREADS
))
2295 s
= C_OMP_CLAUSE_SPLIT_PARALLEL
;
2297 s
= C_OMP_CLAUSE_SPLIT_FOR
;
2298 OMP_CLAUSE_CHAIN (c
) = cclauses
[s
];
2301 if ((mask
& (OMP_CLAUSE_MASK_1
<< PRAGMA_OMP_CLAUSE_NOGROUP
)) != 0)
2303 c
= build_omp_clause (OMP_CLAUSE_LOCATION (clauses
),
2304 OMP_CLAUSE_LASTPRIVATE
);
2305 OMP_CLAUSE_DECL (c
) = OMP_CLAUSE_DECL (clauses
);
2306 OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c
)
2307 = OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (clauses
);
2308 OMP_CLAUSE_CHAIN (c
) = cclauses
[C_OMP_CLAUSE_SPLIT_TASKLOOP
];
2309 cclauses
[C_OMP_CLAUSE_SPLIT_TASKLOOP
] = c
;
2311 s
= C_OMP_CLAUSE_SPLIT_SIMD
;
2313 /* Shared and default clauses are allowed on parallel, teams and
2315 case OMP_CLAUSE_SHARED
:
2316 case OMP_CLAUSE_DEFAULT
:
2317 if ((mask
& (OMP_CLAUSE_MASK_1
<< PRAGMA_OMP_CLAUSE_NOGROUP
))
2320 if ((mask
& (OMP_CLAUSE_MASK_1
<< PRAGMA_OMP_CLAUSE_NUM_THREADS
))
2323 c
= build_omp_clause (OMP_CLAUSE_LOCATION (clauses
),
2324 OMP_CLAUSE_CODE (clauses
));
2325 if (OMP_CLAUSE_CODE (clauses
) == OMP_CLAUSE_SHARED
)
2326 OMP_CLAUSE_DECL (c
) = OMP_CLAUSE_DECL (clauses
);
2328 OMP_CLAUSE_DEFAULT_KIND (c
)
2329 = OMP_CLAUSE_DEFAULT_KIND (clauses
);
2330 OMP_CLAUSE_CHAIN (c
) = cclauses
[C_OMP_CLAUSE_SPLIT_PARALLEL
];
2331 cclauses
[C_OMP_CLAUSE_SPLIT_PARALLEL
] = c
;
2333 s
= C_OMP_CLAUSE_SPLIT_TASKLOOP
;
2336 if ((mask
& (OMP_CLAUSE_MASK_1
<< PRAGMA_OMP_CLAUSE_NUM_TEAMS
))
2339 if ((mask
& (OMP_CLAUSE_MASK_1
<< PRAGMA_OMP_CLAUSE_NUM_THREADS
))
2342 s
= C_OMP_CLAUSE_SPLIT_TEAMS
;
2345 c
= build_omp_clause (OMP_CLAUSE_LOCATION (clauses
),
2346 OMP_CLAUSE_CODE (clauses
));
2347 if (OMP_CLAUSE_CODE (clauses
) == OMP_CLAUSE_SHARED
)
2348 OMP_CLAUSE_DECL (c
) = OMP_CLAUSE_DECL (clauses
);
2350 OMP_CLAUSE_DEFAULT_KIND (c
)
2351 = OMP_CLAUSE_DEFAULT_KIND (clauses
);
2352 OMP_CLAUSE_CHAIN (c
) = cclauses
[C_OMP_CLAUSE_SPLIT_TEAMS
];
2353 cclauses
[C_OMP_CLAUSE_SPLIT_TEAMS
] = c
;
2355 s
= C_OMP_CLAUSE_SPLIT_PARALLEL
;
2357 /* order clauses are allowed on distribute, for, simd and loop. */
2358 case OMP_CLAUSE_ORDER
:
2359 if ((mask
& (OMP_CLAUSE_MASK_1
2360 << PRAGMA_OMP_CLAUSE_DIST_SCHEDULE
)) != 0)
2362 if (code
== OMP_DISTRIBUTE
)
2364 s
= C_OMP_CLAUSE_SPLIT_DISTRIBUTE
;
2367 c
= build_omp_clause (OMP_CLAUSE_LOCATION (clauses
),
2369 OMP_CLAUSE_ORDER_UNCONSTRAINED (c
)
2370 = OMP_CLAUSE_ORDER_UNCONSTRAINED (clauses
);
2371 OMP_CLAUSE_ORDER_REPRODUCIBLE (c
)
2372 = OMP_CLAUSE_ORDER_REPRODUCIBLE (clauses
);
2373 OMP_CLAUSE_CHAIN (c
) = cclauses
[C_OMP_CLAUSE_SPLIT_DISTRIBUTE
];
2374 cclauses
[C_OMP_CLAUSE_SPLIT_DISTRIBUTE
] = c
;
2376 if ((mask
& (OMP_CLAUSE_MASK_1
<< PRAGMA_OMP_CLAUSE_SCHEDULE
)) != 0)
2378 if (code
== OMP_SIMD
)
2380 c
= build_omp_clause (OMP_CLAUSE_LOCATION (clauses
),
2382 OMP_CLAUSE_ORDER_UNCONSTRAINED (c
)
2383 = OMP_CLAUSE_ORDER_UNCONSTRAINED (clauses
);
2384 OMP_CLAUSE_ORDER_REPRODUCIBLE (c
)
2385 = OMP_CLAUSE_ORDER_REPRODUCIBLE (clauses
);
2386 OMP_CLAUSE_CHAIN (c
) = cclauses
[C_OMP_CLAUSE_SPLIT_FOR
];
2387 cclauses
[C_OMP_CLAUSE_SPLIT_FOR
] = c
;
2388 s
= C_OMP_CLAUSE_SPLIT_SIMD
;
2391 s
= C_OMP_CLAUSE_SPLIT_FOR
;
2393 else if (code
== OMP_LOOP
)
2394 s
= C_OMP_CLAUSE_SPLIT_LOOP
;
2396 s
= C_OMP_CLAUSE_SPLIT_SIMD
;
2398 /* Reduction is allowed on simd, for, parallel, sections, taskloop,
2399 teams and loop. Duplicate it on all of them, but omit on for or
2400 sections if parallel is present (unless inscan, in that case
2401 omit on parallel). If taskloop or loop is combined with
2402 parallel, omit it on parallel. */
2403 case OMP_CLAUSE_REDUCTION
:
2404 if (OMP_CLAUSE_REDUCTION_TASK (clauses
))
2406 if (code
== OMP_SIMD
|| code
== OMP_LOOP
)
2408 error_at (OMP_CLAUSE_LOCATION (clauses
),
2409 "invalid %<task%> reduction modifier on construct "
2410 "combined with %<simd%> or %<loop%>");
2411 OMP_CLAUSE_REDUCTION_TASK (clauses
) = 0;
2413 else if (code
!= OMP_SECTIONS
2414 && (mask
& (OMP_CLAUSE_MASK_1
2415 << PRAGMA_OMP_CLAUSE_NUM_THREADS
)) == 0
2416 && (mask
& (OMP_CLAUSE_MASK_1
2417 << PRAGMA_OMP_CLAUSE_SCHEDULE
)) == 0)
2419 error_at (OMP_CLAUSE_LOCATION (clauses
),
2420 "invalid %<task%> reduction modifier on construct "
2421 "not combined with %<parallel%>, %<for%> or "
2423 OMP_CLAUSE_REDUCTION_TASK (clauses
) = 0;
2426 if (OMP_CLAUSE_REDUCTION_INSCAN (clauses
)
2427 && ((mask
& ((OMP_CLAUSE_MASK_1
2428 << PRAGMA_OMP_CLAUSE_DIST_SCHEDULE
)
2429 | (OMP_CLAUSE_MASK_1
<< PRAGMA_OMP_CLAUSE_MAP
)))
2432 error_at (OMP_CLAUSE_LOCATION (clauses
),
2433 "%<inscan%> %<reduction%> clause on construct other "
2434 "than %<for%>, %<simd%>, %<for simd%>, "
2435 "%<parallel for%>, %<parallel for simd%>");
2436 OMP_CLAUSE_REDUCTION_INSCAN (clauses
) = 0;
2438 if ((mask
& (OMP_CLAUSE_MASK_1
<< PRAGMA_OMP_CLAUSE_MAP
)) != 0)
2440 c
= build_omp_clause (OMP_CLAUSE_LOCATION (clauses
),
2442 OMP_CLAUSE_DECL (c
) = OMP_CLAUSE_DECL (clauses
);
2443 OMP_CLAUSE_SET_MAP_KIND (c
, GOMP_MAP_TOFROM
);
2444 OMP_CLAUSE_MAP_IMPLICIT (c
) = 1;
2445 OMP_CLAUSE_CHAIN (c
) = cclauses
[C_OMP_CLAUSE_SPLIT_TARGET
];
2446 cclauses
[C_OMP_CLAUSE_SPLIT_TARGET
] = c
;
2448 if ((mask
& (OMP_CLAUSE_MASK_1
<< PRAGMA_OMP_CLAUSE_SCHEDULE
)) != 0)
2450 if (code
== OMP_SIMD
)
2452 c
= build_omp_clause (OMP_CLAUSE_LOCATION (clauses
),
2453 OMP_CLAUSE_REDUCTION
);
2454 OMP_CLAUSE_DECL (c
) = OMP_CLAUSE_DECL (clauses
);
2455 OMP_CLAUSE_REDUCTION_CODE (c
)
2456 = OMP_CLAUSE_REDUCTION_CODE (clauses
);
2457 OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
)
2458 = OMP_CLAUSE_REDUCTION_PLACEHOLDER (clauses
);
2459 OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c
)
2460 = OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (clauses
);
2461 OMP_CLAUSE_REDUCTION_INSCAN (c
)
2462 = OMP_CLAUSE_REDUCTION_INSCAN (clauses
);
2463 OMP_CLAUSE_CHAIN (c
) = cclauses
[C_OMP_CLAUSE_SPLIT_SIMD
];
2464 cclauses
[C_OMP_CLAUSE_SPLIT_SIMD
] = c
;
2466 if ((mask
& (OMP_CLAUSE_MASK_1
<< PRAGMA_OMP_CLAUSE_NUM_TEAMS
))
2469 c
= build_omp_clause (OMP_CLAUSE_LOCATION (clauses
),
2470 OMP_CLAUSE_REDUCTION
);
2471 OMP_CLAUSE_DECL (c
) = OMP_CLAUSE_DECL (clauses
);
2472 OMP_CLAUSE_REDUCTION_CODE (c
)
2473 = OMP_CLAUSE_REDUCTION_CODE (clauses
);
2474 OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
)
2475 = OMP_CLAUSE_REDUCTION_PLACEHOLDER (clauses
);
2476 OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c
)
2477 = OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (clauses
);
2478 OMP_CLAUSE_REDUCTION_INSCAN (c
)
2479 = OMP_CLAUSE_REDUCTION_INSCAN (clauses
);
2480 OMP_CLAUSE_CHAIN (c
) = cclauses
[C_OMP_CLAUSE_SPLIT_TEAMS
];
2481 cclauses
[C_OMP_CLAUSE_SPLIT_TEAMS
] = c
;
2482 s
= C_OMP_CLAUSE_SPLIT_PARALLEL
;
2484 else if ((mask
& (OMP_CLAUSE_MASK_1
2485 << PRAGMA_OMP_CLAUSE_NUM_THREADS
)) != 0
2486 && !OMP_CLAUSE_REDUCTION_INSCAN (clauses
))
2487 s
= C_OMP_CLAUSE_SPLIT_PARALLEL
;
2489 s
= C_OMP_CLAUSE_SPLIT_FOR
;
2491 else if (code
== OMP_SECTIONS
2492 || code
== OMP_PARALLEL
2493 || code
== OMP_MASTER
2494 || code
== OMP_MASKED
)
2495 s
= C_OMP_CLAUSE_SPLIT_PARALLEL
;
2496 else if (code
== OMP_TASKLOOP
)
2497 s
= C_OMP_CLAUSE_SPLIT_TASKLOOP
;
2498 else if (code
== OMP_LOOP
)
2499 s
= C_OMP_CLAUSE_SPLIT_LOOP
;
2500 else if (code
== OMP_SIMD
)
2502 if ((mask
& (OMP_CLAUSE_MASK_1
<< PRAGMA_OMP_CLAUSE_NOGROUP
))
2505 c
= build_omp_clause (OMP_CLAUSE_LOCATION (clauses
),
2506 OMP_CLAUSE_REDUCTION
);
2507 OMP_CLAUSE_DECL (c
) = OMP_CLAUSE_DECL (clauses
);
2508 OMP_CLAUSE_REDUCTION_CODE (c
)
2509 = OMP_CLAUSE_REDUCTION_CODE (clauses
);
2510 OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
)
2511 = OMP_CLAUSE_REDUCTION_PLACEHOLDER (clauses
);
2512 OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c
)
2513 = OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (clauses
);
2514 OMP_CLAUSE_REDUCTION_INSCAN (c
)
2515 = OMP_CLAUSE_REDUCTION_INSCAN (clauses
);
2516 OMP_CLAUSE_CHAIN (c
) = cclauses
[C_OMP_CLAUSE_SPLIT_TASKLOOP
];
2517 cclauses
[C_OMP_CLAUSE_SPLIT_TASKLOOP
] = c
;
2519 else if ((mask
& (OMP_CLAUSE_MASK_1
2520 << PRAGMA_OMP_CLAUSE_NUM_TEAMS
)) != 0)
2522 c
= build_omp_clause (OMP_CLAUSE_LOCATION (clauses
),
2523 OMP_CLAUSE_REDUCTION
);
2524 OMP_CLAUSE_DECL (c
) = OMP_CLAUSE_DECL (clauses
);
2525 OMP_CLAUSE_REDUCTION_CODE (c
)
2526 = OMP_CLAUSE_REDUCTION_CODE (clauses
);
2527 OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
)
2528 = OMP_CLAUSE_REDUCTION_PLACEHOLDER (clauses
);
2529 OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c
)
2530 = OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (clauses
);
2531 OMP_CLAUSE_REDUCTION_INSCAN (c
)
2532 = OMP_CLAUSE_REDUCTION_INSCAN (clauses
);
2533 OMP_CLAUSE_CHAIN (c
) = cclauses
[C_OMP_CLAUSE_SPLIT_TEAMS
];
2534 cclauses
[C_OMP_CLAUSE_SPLIT_TEAMS
] = c
;
2536 s
= C_OMP_CLAUSE_SPLIT_SIMD
;
2539 s
= C_OMP_CLAUSE_SPLIT_TEAMS
;
2541 case OMP_CLAUSE_IN_REDUCTION
:
2542 if ((mask
& (OMP_CLAUSE_MASK_1
<< PRAGMA_OMP_CLAUSE_MAP
)) != 0)
2544 /* When on target, map(always, tofrom: item) is added as
2545 well. For non-combined target it is added in the FEs. */
2546 c
= build_omp_clause (OMP_CLAUSE_LOCATION (clauses
),
2548 OMP_CLAUSE_DECL (c
) = OMP_CLAUSE_DECL (clauses
);
2549 OMP_CLAUSE_SET_MAP_KIND (c
, GOMP_MAP_ALWAYS_TOFROM
);
2550 OMP_CLAUSE_CHAIN (c
) = cclauses
[C_OMP_CLAUSE_SPLIT_TARGET
];
2551 cclauses
[C_OMP_CLAUSE_SPLIT_TARGET
] = c
;
2552 s
= C_OMP_CLAUSE_SPLIT_TARGET
;
2555 /* in_reduction on taskloop simd becomes reduction on the simd
2556 and keeps being in_reduction on taskloop. */
2557 if (code
== OMP_SIMD
)
2559 c
= build_omp_clause (OMP_CLAUSE_LOCATION (clauses
),
2560 OMP_CLAUSE_REDUCTION
);
2561 OMP_CLAUSE_DECL (c
) = OMP_CLAUSE_DECL (clauses
);
2562 OMP_CLAUSE_REDUCTION_CODE (c
)
2563 = OMP_CLAUSE_REDUCTION_CODE (clauses
);
2564 OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
)
2565 = OMP_CLAUSE_REDUCTION_PLACEHOLDER (clauses
);
2566 OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c
)
2567 = OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (clauses
);
2568 OMP_CLAUSE_CHAIN (c
) = cclauses
[C_OMP_CLAUSE_SPLIT_SIMD
];
2569 cclauses
[C_OMP_CLAUSE_SPLIT_SIMD
] = c
;
2571 s
= C_OMP_CLAUSE_SPLIT_TASKLOOP
;
2574 if (OMP_CLAUSE_IF_MODIFIER (clauses
) != ERROR_MARK
)
2576 s
= C_OMP_CLAUSE_SPLIT_COUNT
;
2577 switch (OMP_CLAUSE_IF_MODIFIER (clauses
))
2580 if ((mask
& (OMP_CLAUSE_MASK_1
2581 << PRAGMA_OMP_CLAUSE_NUM_THREADS
)) != 0)
2582 s
= C_OMP_CLAUSE_SPLIT_PARALLEL
;
2585 if (code
== OMP_SIMD
)
2586 s
= C_OMP_CLAUSE_SPLIT_SIMD
;
2589 if ((mask
& (OMP_CLAUSE_MASK_1
<< PRAGMA_OMP_CLAUSE_NOGROUP
))
2591 s
= C_OMP_CLAUSE_SPLIT_TASKLOOP
;
2594 if ((mask
& (OMP_CLAUSE_MASK_1
<< PRAGMA_OMP_CLAUSE_MAP
))
2596 s
= C_OMP_CLAUSE_SPLIT_TARGET
;
2601 if (s
!= C_OMP_CLAUSE_SPLIT_COUNT
)
2603 /* Error-recovery here, invalid if-modifier specified, add the
2604 clause to just one construct. */
2605 if ((mask
& (OMP_CLAUSE_MASK_1
<< PRAGMA_OMP_CLAUSE_MAP
)) != 0)
2606 s
= C_OMP_CLAUSE_SPLIT_TARGET
;
2607 else if ((mask
& (OMP_CLAUSE_MASK_1
2608 << PRAGMA_OMP_CLAUSE_NUM_THREADS
)) != 0)
2609 s
= C_OMP_CLAUSE_SPLIT_PARALLEL
;
2610 else if ((mask
& (OMP_CLAUSE_MASK_1
2611 << PRAGMA_OMP_CLAUSE_NOGROUP
)) != 0)
2612 s
= C_OMP_CLAUSE_SPLIT_TASKLOOP
;
2613 else if (code
== OMP_SIMD
)
2614 s
= C_OMP_CLAUSE_SPLIT_SIMD
;
2619 /* Otherwise, duplicate if clause to all constructs. */
2620 if (code
== OMP_SIMD
)
2622 if ((mask
& ((OMP_CLAUSE_MASK_1
<< PRAGMA_OMP_CLAUSE_MAP
)
2623 | (OMP_CLAUSE_MASK_1
2624 << PRAGMA_OMP_CLAUSE_NUM_THREADS
)
2625 | (OMP_CLAUSE_MASK_1
<< PRAGMA_OMP_CLAUSE_NOGROUP
)))
2628 c
= build_omp_clause (OMP_CLAUSE_LOCATION (clauses
),
2630 OMP_CLAUSE_IF_MODIFIER (c
)
2631 = OMP_CLAUSE_IF_MODIFIER (clauses
);
2632 OMP_CLAUSE_IF_EXPR (c
) = OMP_CLAUSE_IF_EXPR (clauses
);
2633 OMP_CLAUSE_CHAIN (c
) = cclauses
[C_OMP_CLAUSE_SPLIT_SIMD
];
2634 cclauses
[C_OMP_CLAUSE_SPLIT_SIMD
] = c
;
2638 s
= C_OMP_CLAUSE_SPLIT_SIMD
;
2642 if ((mask
& (OMP_CLAUSE_MASK_1
<< PRAGMA_OMP_CLAUSE_NOGROUP
))
2645 if ((mask
& (OMP_CLAUSE_MASK_1
2646 << PRAGMA_OMP_CLAUSE_NUM_THREADS
)) != 0)
2648 c
= build_omp_clause (OMP_CLAUSE_LOCATION (clauses
),
2650 OMP_CLAUSE_IF_MODIFIER (c
)
2651 = OMP_CLAUSE_IF_MODIFIER (clauses
);
2652 OMP_CLAUSE_IF_EXPR (c
) = OMP_CLAUSE_IF_EXPR (clauses
);
2653 OMP_CLAUSE_CHAIN (c
) = cclauses
[C_OMP_CLAUSE_SPLIT_TASKLOOP
];
2654 cclauses
[C_OMP_CLAUSE_SPLIT_TASKLOOP
] = c
;
2655 s
= C_OMP_CLAUSE_SPLIT_PARALLEL
;
2658 s
= C_OMP_CLAUSE_SPLIT_TASKLOOP
;
2660 else if ((mask
& (OMP_CLAUSE_MASK_1
2661 << PRAGMA_OMP_CLAUSE_NUM_THREADS
)) != 0)
2663 if ((mask
& (OMP_CLAUSE_MASK_1
2664 << PRAGMA_OMP_CLAUSE_MAP
)) != 0)
2666 c
= build_omp_clause (OMP_CLAUSE_LOCATION (clauses
),
2668 OMP_CLAUSE_IF_MODIFIER (c
)
2669 = OMP_CLAUSE_IF_MODIFIER (clauses
);
2670 OMP_CLAUSE_IF_EXPR (c
) = OMP_CLAUSE_IF_EXPR (clauses
);
2671 OMP_CLAUSE_CHAIN (c
) = cclauses
[C_OMP_CLAUSE_SPLIT_TARGET
];
2672 cclauses
[C_OMP_CLAUSE_SPLIT_TARGET
] = c
;
2673 s
= C_OMP_CLAUSE_SPLIT_PARALLEL
;
2676 s
= C_OMP_CLAUSE_SPLIT_PARALLEL
;
2679 s
= C_OMP_CLAUSE_SPLIT_TARGET
;
2681 case OMP_CLAUSE_LINEAR
:
2682 /* Linear clause is allowed on simd and for. Put it on the
2683 innermost construct. */
2684 if (code
== OMP_SIMD
)
2685 s
= C_OMP_CLAUSE_SPLIT_SIMD
;
2687 s
= C_OMP_CLAUSE_SPLIT_FOR
;
2689 case OMP_CLAUSE_NOWAIT
:
2690 /* Nowait clause is allowed on target, for and sections, but
2691 is not allowed on parallel for or parallel sections. Therefore,
2692 put it on target construct if present, because that can only
2693 be combined with parallel for{, simd} and not with for{, simd},
2694 otherwise to the worksharing construct. */
2695 if ((mask
& (OMP_CLAUSE_MASK_1
<< PRAGMA_OMP_CLAUSE_MAP
))
2697 s
= C_OMP_CLAUSE_SPLIT_TARGET
;
2699 s
= C_OMP_CLAUSE_SPLIT_FOR
;
2701 /* thread_limit is allowed on target and teams. Distribute it
2703 case OMP_CLAUSE_THREAD_LIMIT
:
2704 if ((mask
& (OMP_CLAUSE_MASK_1
<< PRAGMA_OMP_CLAUSE_MAP
))
2707 if ((mask
& (OMP_CLAUSE_MASK_1
<< PRAGMA_OMP_CLAUSE_NUM_TEAMS
))
2710 c
= build_omp_clause (OMP_CLAUSE_LOCATION (clauses
),
2711 OMP_CLAUSE_THREAD_LIMIT
);
2712 OMP_CLAUSE_THREAD_LIMIT_EXPR (c
)
2713 = OMP_CLAUSE_THREAD_LIMIT_EXPR (clauses
);
2714 OMP_CLAUSE_CHAIN (c
) = cclauses
[C_OMP_CLAUSE_SPLIT_TARGET
];
2715 cclauses
[C_OMP_CLAUSE_SPLIT_TARGET
] = c
;
2719 s
= C_OMP_CLAUSE_SPLIT_TARGET
;
2723 s
= C_OMP_CLAUSE_SPLIT_TEAMS
;
2725 /* Allocate clause is allowed on target, teams, distribute, parallel,
2726 for, sections and taskloop. Distribute it to all. */
2727 case OMP_CLAUSE_ALLOCATE
:
2728 s
= C_OMP_CLAUSE_SPLIT_COUNT
;
2729 for (i
= 0; i
< C_OMP_CLAUSE_SPLIT_COUNT
; i
++)
2733 case C_OMP_CLAUSE_SPLIT_TARGET
:
2734 if ((mask
& (OMP_CLAUSE_MASK_1
2735 << PRAGMA_OMP_CLAUSE_MAP
)) == 0)
2738 case C_OMP_CLAUSE_SPLIT_TEAMS
:
2739 if ((mask
& (OMP_CLAUSE_MASK_1
2740 << PRAGMA_OMP_CLAUSE_NUM_TEAMS
)) == 0)
2743 case C_OMP_CLAUSE_SPLIT_DISTRIBUTE
:
2744 if ((mask
& (OMP_CLAUSE_MASK_1
2745 << PRAGMA_OMP_CLAUSE_DIST_SCHEDULE
)) == 0)
2748 case C_OMP_CLAUSE_SPLIT_PARALLEL
:
2749 if ((mask
& (OMP_CLAUSE_MASK_1
2750 << PRAGMA_OMP_CLAUSE_NUM_THREADS
)) == 0)
2753 case C_OMP_CLAUSE_SPLIT_FOR
:
2754 STATIC_ASSERT (C_OMP_CLAUSE_SPLIT_SECTIONS
2755 == C_OMP_CLAUSE_SPLIT_FOR
2756 && (C_OMP_CLAUSE_SPLIT_TASKLOOP
2757 == C_OMP_CLAUSE_SPLIT_FOR
)
2758 && (C_OMP_CLAUSE_SPLIT_LOOP
2759 == C_OMP_CLAUSE_SPLIT_FOR
));
2760 if (code
== OMP_SECTIONS
)
2762 if ((mask
& (OMP_CLAUSE_MASK_1
2763 << PRAGMA_OMP_CLAUSE_SCHEDULE
)) != 0)
2765 if ((mask
& (OMP_CLAUSE_MASK_1
2766 << PRAGMA_OMP_CLAUSE_NOGROUP
)) != 0)
2769 case C_OMP_CLAUSE_SPLIT_SIMD
:
2774 if (s
!= C_OMP_CLAUSE_SPLIT_COUNT
)
2776 c
= build_omp_clause (OMP_CLAUSE_LOCATION (clauses
),
2777 OMP_CLAUSE_ALLOCATE
);
2779 = OMP_CLAUSE_DECL (clauses
);
2780 OMP_CLAUSE_ALLOCATE_ALLOCATOR (c
)
2781 = OMP_CLAUSE_ALLOCATE_ALLOCATOR (clauses
);
2782 OMP_CLAUSE_ALLOCATE_ALIGN (c
)
2783 = OMP_CLAUSE_ALLOCATE_ALIGN (clauses
);
2784 OMP_CLAUSE_CHAIN (c
) = cclauses
[s
];
2786 has_dup_allocate
= true;
2788 s
= (enum c_omp_clause_split
) i
;
2790 gcc_assert (s
!= C_OMP_CLAUSE_SPLIT_COUNT
);
2795 OMP_CLAUSE_CHAIN (clauses
) = cclauses
[s
];
2796 cclauses
[s
] = clauses
;
2799 if (has_dup_allocate
)
2801 bool need_prune
= false;
2802 bitmap_obstack_initialize (NULL
);
2803 for (i
= 0; i
< C_OMP_CLAUSE_SPLIT_SIMD
- (code
== OMP_LOOP
); i
++)
2806 bitmap_head allocate_head
;
2807 bitmap_initialize (&allocate_head
, &bitmap_default_obstack
);
2808 for (c
= cclauses
[i
]; c
; c
= OMP_CLAUSE_CHAIN (c
))
2809 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_ALLOCATE
2810 && DECL_P (OMP_CLAUSE_DECL (c
)))
2811 bitmap_set_bit (&allocate_head
,
2812 DECL_UID (OMP_CLAUSE_DECL (c
)));
2813 for (c
= cclauses
[i
]; c
; c
= OMP_CLAUSE_CHAIN (c
))
2814 switch (OMP_CLAUSE_CODE (c
))
2816 case OMP_CLAUSE_REDUCTION
:
2817 case OMP_CLAUSE_IN_REDUCTION
:
2818 case OMP_CLAUSE_TASK_REDUCTION
:
2819 if (TREE_CODE (OMP_CLAUSE_DECL (c
)) == MEM_REF
)
2821 tree t
= TREE_OPERAND (OMP_CLAUSE_DECL (c
), 0);
2822 if (TREE_CODE (t
) == POINTER_PLUS_EXPR
)
2823 t
= TREE_OPERAND (t
, 0);
2824 if (TREE_CODE (t
) == ADDR_EXPR
2825 || INDIRECT_REF_P (t
))
2826 t
= TREE_OPERAND (t
, 0);
2828 bitmap_clear_bit (&allocate_head
, DECL_UID (t
));
2831 else if (TREE_CODE (OMP_CLAUSE_DECL (c
)) == TREE_LIST
)
2834 for (t
= OMP_CLAUSE_DECL (c
);
2835 TREE_CODE (t
) == TREE_LIST
; t
= TREE_CHAIN (t
))
2838 bitmap_clear_bit (&allocate_head
, DECL_UID (t
));
2842 case OMP_CLAUSE_PRIVATE
:
2843 case OMP_CLAUSE_FIRSTPRIVATE
:
2844 case OMP_CLAUSE_LASTPRIVATE
:
2845 case OMP_CLAUSE_LINEAR
:
2846 if (DECL_P (OMP_CLAUSE_DECL (c
)))
2847 bitmap_clear_bit (&allocate_head
,
2848 DECL_UID (OMP_CLAUSE_DECL (c
)));
2853 for (c
= cclauses
[i
]; c
; c
= OMP_CLAUSE_CHAIN (c
))
2854 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_ALLOCATE
2855 && DECL_P (OMP_CLAUSE_DECL (c
))
2856 && bitmap_bit_p (&allocate_head
,
2857 DECL_UID (OMP_CLAUSE_DECL (c
))))
2859 /* Mark allocate clauses which don't have corresponding
2860 explicit data sharing clause. */
2861 OMP_CLAUSE_ALLOCATE_COMBINED (c
) = 1;
2865 bitmap_obstack_release (NULL
);
2868 /* At least one allocate clause has been marked. Walk all the
2869 duplicated allocate clauses in sync. If it is marked in all
2870 constituent constructs, diagnose it as invalid and remove
2871 them. Otherwise, remove all marked inner clauses inside
2872 a construct that doesn't have them marked. Keep the outer
2873 marked ones, because some clause duplication is done only
2874 during gimplification. */
2875 tree
*p
[C_OMP_CLAUSE_SPLIT_COUNT
];
2876 for (i
= 0; i
< C_OMP_CLAUSE_SPLIT_COUNT
; i
++)
2877 if (cclauses
[i
] == NULL_TREE
2878 || i
== C_OMP_CLAUSE_SPLIT_SIMD
2879 || (i
== C_OMP_CLAUSE_SPLIT_LOOP
&& code
== OMP_LOOP
))
2882 p
[i
] = &cclauses
[i
];
2886 tree seen
= NULL_TREE
;
2887 for (i
= C_OMP_CLAUSE_SPLIT_COUNT
- 1; i
>= 0; i
--)
2891 && OMP_CLAUSE_CODE (*p
[i
]) != OMP_CLAUSE_ALLOCATE
)
2892 p
[i
] = &OMP_CLAUSE_CHAIN (*p
[i
]);
2893 if (*p
[i
] == NULL_TREE
)
2895 i
= C_OMP_CLAUSE_SPLIT_COUNT
;
2898 if (!OMP_CLAUSE_ALLOCATE_COMBINED (*p
[i
]) && j
== -1)
2902 if (i
== C_OMP_CLAUSE_SPLIT_COUNT
)
2905 error_at (OMP_CLAUSE_LOCATION (seen
),
2906 "%qD specified in %<allocate%> clause but not in "
2907 "an explicit privatization clause",
2908 OMP_CLAUSE_DECL (seen
));
2909 for (i
= 0; i
< C_OMP_CLAUSE_SPLIT_COUNT
; i
++)
2914 *p
[i
] = OMP_CLAUSE_CHAIN (*p
[i
]);
2917 p
[i
] = &OMP_CLAUSE_CHAIN (*p
[i
]);
2927 if ((mask
& (OMP_CLAUSE_MASK_1
<< PRAGMA_OMP_CLAUSE_MAP
)) == 0)
2928 gcc_assert (cclauses
[C_OMP_CLAUSE_SPLIT_TARGET
] == NULL_TREE
);
2929 if ((mask
& (OMP_CLAUSE_MASK_1
<< PRAGMA_OMP_CLAUSE_NUM_TEAMS
)) == 0)
2930 gcc_assert (cclauses
[C_OMP_CLAUSE_SPLIT_TEAMS
] == NULL_TREE
);
2931 if ((mask
& (OMP_CLAUSE_MASK_1
<< PRAGMA_OMP_CLAUSE_DIST_SCHEDULE
)) == 0
2932 && (mask
& (OMP_CLAUSE_MASK_1
<< PRAGMA_OMP_CLAUSE_FILTER
)) == 0)
2933 gcc_assert (cclauses
[C_OMP_CLAUSE_SPLIT_DISTRIBUTE
] == NULL_TREE
);
2934 if ((mask
& (OMP_CLAUSE_MASK_1
<< PRAGMA_OMP_CLAUSE_NUM_THREADS
)) == 0)
2935 gcc_assert (cclauses
[C_OMP_CLAUSE_SPLIT_PARALLEL
] == NULL_TREE
);
2936 if ((mask
& ((OMP_CLAUSE_MASK_1
<< PRAGMA_OMP_CLAUSE_SCHEDULE
)
2937 | (OMP_CLAUSE_MASK_1
<< PRAGMA_OMP_CLAUSE_NOGROUP
))) == 0
2938 && code
!= OMP_SECTIONS
2939 && code
!= OMP_LOOP
)
2940 gcc_assert (cclauses
[C_OMP_CLAUSE_SPLIT_FOR
] == NULL_TREE
);
2941 if (code
!= OMP_SIMD
)
2942 gcc_assert (cclauses
[C_OMP_CLAUSE_SPLIT_SIMD
] == NULL_TREE
);
2946 /* qsort callback to compare #pragma omp declare simd clauses. */
2949 c_omp_declare_simd_clause_cmp (const void *p
, const void *q
)
2951 tree a
= *(const tree
*) p
;
2952 tree b
= *(const tree
*) q
;
2953 if (OMP_CLAUSE_CODE (a
) != OMP_CLAUSE_CODE (b
))
2955 if (OMP_CLAUSE_CODE (a
) > OMP_CLAUSE_CODE (b
))
2959 if (OMP_CLAUSE_CODE (a
) != OMP_CLAUSE_SIMDLEN
2960 && OMP_CLAUSE_CODE (a
) != OMP_CLAUSE_INBRANCH
2961 && OMP_CLAUSE_CODE (a
) != OMP_CLAUSE_NOTINBRANCH
)
2963 int c
= tree_to_shwi (OMP_CLAUSE_DECL (a
));
2964 int d
= tree_to_shwi (OMP_CLAUSE_DECL (b
));
2973 /* Change PARM_DECLs in OMP_CLAUSE_DECL of #pragma omp declare simd
2974 CLAUSES on FNDECL into argument indexes and sort them. */
2977 c_omp_declare_simd_clauses_to_numbers (tree parms
, tree clauses
)
2980 vec
<tree
> clvec
= vNULL
;
2982 for (c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
2984 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_SIMDLEN
2985 && OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_INBRANCH
2986 && OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_NOTINBRANCH
)
2988 tree decl
= OMP_CLAUSE_DECL (c
);
2991 for (arg
= parms
, idx
= 0; arg
;
2992 arg
= TREE_CHAIN (arg
), idx
++)
2995 if (arg
== NULL_TREE
)
2997 error_at (OMP_CLAUSE_LOCATION (c
),
2998 "%qD is not a function argument", decl
);
3001 OMP_CLAUSE_DECL (c
) = build_int_cst (integer_type_node
, idx
);
3002 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LINEAR
3003 && OMP_CLAUSE_LINEAR_VARIABLE_STRIDE (c
))
3005 decl
= OMP_CLAUSE_LINEAR_STEP (c
);
3006 for (arg
= parms
, idx
= 0; arg
;
3007 arg
= TREE_CHAIN (arg
), idx
++)
3010 if (arg
== NULL_TREE
)
3012 error_at (OMP_CLAUSE_LOCATION (c
),
3013 "%qD is not a function argument", decl
);
3016 OMP_CLAUSE_LINEAR_STEP (c
)
3017 = build_int_cst (integer_type_node
, idx
);
3020 clvec
.safe_push (c
);
3022 if (!clvec
.is_empty ())
3024 unsigned int len
= clvec
.length (), i
;
3025 clvec
.qsort (c_omp_declare_simd_clause_cmp
);
3027 for (i
= 0; i
< len
; i
++)
3028 OMP_CLAUSE_CHAIN (clvec
[i
]) = (i
< len
- 1) ? clvec
[i
+ 1] : NULL_TREE
;
3031 clauses
= NULL_TREE
;
3036 /* Change argument indexes in CLAUSES of FNDECL back to PARM_DECLs. */
3039 c_omp_declare_simd_clauses_to_decls (tree fndecl
, tree clauses
)
3043 for (c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
3044 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_SIMDLEN
3045 && OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_INBRANCH
3046 && OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_NOTINBRANCH
)
3048 int idx
= tree_to_shwi (OMP_CLAUSE_DECL (c
)), i
;
3050 for (arg
= DECL_ARGUMENTS (fndecl
), i
= 0; arg
;
3051 arg
= TREE_CHAIN (arg
), i
++)
3055 OMP_CLAUSE_DECL (c
) = arg
;
3056 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LINEAR
3057 && OMP_CLAUSE_LINEAR_VARIABLE_STRIDE (c
))
3059 idx
= tree_to_shwi (OMP_CLAUSE_LINEAR_STEP (c
));
3060 for (arg
= DECL_ARGUMENTS (fndecl
), i
= 0; arg
;
3061 arg
= TREE_CHAIN (arg
), i
++)
3065 OMP_CLAUSE_LINEAR_STEP (c
) = arg
;
3070 /* Return true for __func__ and similar function-local predefined
3071 variables (which are in OpenMP predetermined shared, allowed in
3072 shared/firstprivate clauses). */
3075 c_omp_predefined_variable (tree decl
)
3078 && DECL_ARTIFICIAL (decl
)
3079 && TREE_STATIC (decl
)
3080 && DECL_NAME (decl
))
3082 if (TREE_READONLY (decl
)
3083 && (DECL_NAME (decl
) == ridpointers
[RID_C99_FUNCTION_NAME
]
3084 || DECL_NAME (decl
) == ridpointers
[RID_FUNCTION_NAME
]
3085 || DECL_NAME (decl
) == ridpointers
[RID_PRETTY_FUNCTION_NAME
]))
3087 /* For UBSan handle the same also ubsan_create_data created
3088 variables. There is no magic flag for those, but user variables
3089 shouldn't be DECL_ARTIFICIAL or have TYPE_ARTIFICIAL type with
3091 if ((flag_sanitize
& (SANITIZE_UNDEFINED
3092 | SANITIZE_UNDEFINED_NONDEFAULT
)) != 0
3093 && DECL_IGNORED_P (decl
)
3094 && !TREE_READONLY (decl
)
3095 && TREE_CODE (DECL_NAME (decl
)) == IDENTIFIER_NODE
3096 && TREE_CODE (TREE_TYPE (decl
)) == RECORD_TYPE
3097 && TYPE_ARTIFICIAL (TREE_TYPE (decl
))
3098 && TYPE_NAME (TREE_TYPE (decl
))
3099 && TREE_CODE (TYPE_NAME (TREE_TYPE (decl
))) == TYPE_DECL
3100 && DECL_NAME (TYPE_NAME (TREE_TYPE (decl
)))
3101 && (TREE_CODE (DECL_NAME (TYPE_NAME (TREE_TYPE (decl
))))
3102 == IDENTIFIER_NODE
))
3104 tree id1
= DECL_NAME (decl
);
3105 tree id2
= DECL_NAME (TYPE_NAME (TREE_TYPE (decl
)));
3106 if (IDENTIFIER_LENGTH (id1
) >= sizeof ("ubsan_data") - 1
3107 && IDENTIFIER_LENGTH (id2
) >= sizeof ("__ubsan__data")
3108 && !memcmp (IDENTIFIER_POINTER (id2
), "__ubsan_",
3109 sizeof ("__ubsan_") - 1)
3110 && !memcmp (IDENTIFIER_POINTER (id2
) + IDENTIFIER_LENGTH (id2
)
3111 - sizeof ("_data") + 1, "_data",
3112 sizeof ("_data") - 1)
3113 && strstr (IDENTIFIER_POINTER (id1
), "ubsan_data"))
3120 /* OMP_CLAUSE_DEFAULT_UNSPECIFIED unless OpenMP sharing attribute of DECL
3121 is predetermined. */
3123 enum omp_clause_default_kind
3124 c_omp_predetermined_sharing (tree decl
)
3126 /* Predetermine artificial variables holding integral values, those
3127 are usually result of gimplify_one_sizepos or SAVE_EXPR
3130 && DECL_ARTIFICIAL (decl
)
3131 && INTEGRAL_TYPE_P (TREE_TYPE (decl
)))
3132 return OMP_CLAUSE_DEFAULT_SHARED
;
3134 if (c_omp_predefined_variable (decl
))
3135 return OMP_CLAUSE_DEFAULT_SHARED
;
3137 return OMP_CLAUSE_DEFAULT_UNSPECIFIED
;
3140 /* OMP_CLAUSE_DEFAULTMAP_CATEGORY_UNSPECIFIED unless OpenMP mapping attribute
3141 of DECL is predetermined. */
3143 enum omp_clause_defaultmap_kind
3144 c_omp_predetermined_mapping (tree decl
)
3146 /* Predetermine artificial variables holding integral values, those
3147 are usually result of gimplify_one_sizepos or SAVE_EXPR
3150 && DECL_ARTIFICIAL (decl
)
3151 && INTEGRAL_TYPE_P (TREE_TYPE (decl
)))
3152 return OMP_CLAUSE_DEFAULTMAP_FIRSTPRIVATE
;
3154 if (c_omp_predefined_variable (decl
))
3155 return OMP_CLAUSE_DEFAULTMAP_TO
;
3157 return OMP_CLAUSE_DEFAULTMAP_CATEGORY_UNSPECIFIED
;
3161 /* Used to merge map clause information in c_omp_adjust_map_clauses. */
3165 bool firstprivate_ptr_p
;
3167 bool omp_declare_target
;
3168 map_clause (void) : clause (NULL_TREE
), firstprivate_ptr_p (false),
3169 decl_mapped (false), omp_declare_target (false) { }
3172 /* Adjust map clauses after normal clause parsing, mainly to mark specific
3173 base-pointer map cases addressable that may be turned into attach/detach
3174 operations during gimplification. */
3176 c_omp_adjust_map_clauses (tree clauses
, bool is_target
)
3180 /* If this is not a target construct, just turn firstprivate pointers
3181 into attach/detach, the runtime will check and do the rest. */
3183 for (tree c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
3184 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
3185 && OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_FIRSTPRIVATE_POINTER
3186 && DECL_P (OMP_CLAUSE_DECL (c
))
3187 && POINTER_TYPE_P (TREE_TYPE (OMP_CLAUSE_DECL (c
))))
3189 tree ptr
= OMP_CLAUSE_DECL (c
);
3190 c_common_mark_addressable_vec (ptr
);
3195 hash_map
<tree
, map_clause
> maps
;
3197 for (tree c
= clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
3198 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
3199 && DECL_P (OMP_CLAUSE_DECL (c
)))
3201 /* If this is for a target construct, the firstprivate pointer
3202 is marked addressable if either is true:
3203 (1) the base-pointer is mapped in this same construct, or
3204 (2) the base-pointer is a variable place on the device by
3205 "declare target" directives.
3207 Here we iterate through all map clauses collecting these cases,
3208 and merge them with a hash_map to process below. */
3210 if (OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_FIRSTPRIVATE_POINTER
3211 && POINTER_TYPE_P (TREE_TYPE (OMP_CLAUSE_DECL (c
))))
3213 tree ptr
= OMP_CLAUSE_DECL (c
);
3214 map_clause
&mc
= maps
.get_or_insert (ptr
);
3215 if (mc
.clause
== NULL_TREE
)
3217 mc
.firstprivate_ptr_p
= true;
3219 if (is_global_var (ptr
)
3220 && lookup_attribute ("omp declare target",
3221 DECL_ATTRIBUTES (ptr
)))
3222 mc
.omp_declare_target
= true;
3224 else if (OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_ALLOC
3225 || OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_TO
3226 || OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_FROM
3227 || OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_TOFROM
3228 || OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_ALWAYS_TO
3229 || OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_ALWAYS_FROM
3230 || OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_ALWAYS_TOFROM
)
3232 map_clause
&mc
= maps
.get_or_insert (OMP_CLAUSE_DECL (c
));
3233 mc
.decl_mapped
= true;
3237 for (hash_map
<tree
, map_clause
>::iterator i
= maps
.begin ();
3238 i
!= maps
.end (); ++i
)
3240 map_clause
&mc
= (*i
).second
;
3242 if (mc
.firstprivate_ptr_p
3243 && (mc
.decl_mapped
|| mc
.omp_declare_target
))
3244 c_common_mark_addressable_vec (OMP_CLAUSE_DECL (mc
.clause
));
3248 /* Maybe strip off an indirection from a "converted" reference, then find the
3249 origin of a pointer (i.e. without any offset). */
3252 c_omp_address_inspector::unconverted_ref_origin ()
3256 /* We may have a reference-typed component access at the outermost level
3257 that has had convert_from_reference called on it. Get the un-dereferenced
3258 reference itself. */
3259 t
= maybe_unconvert_ref (t
);
3261 /* Find base pointer for POINTER_PLUS_EXPR, etc. */
3267 /* Return TRUE if the address is a component access. */
3270 c_omp_address_inspector::component_access_p ()
3272 tree t
= maybe_unconvert_ref (orig
);
3276 return TREE_CODE (t
) == COMPONENT_REF
;
3279 /* Perform various checks on the address, as described by clause CLAUSE (we
3280 only use its code and location here). */
3283 c_omp_address_inspector::check_clause (tree clause
)
3285 tree t
= unconverted_ref_origin ();
3287 if (TREE_CODE (t
) != COMPONENT_REF
)
3290 if (TREE_CODE (TREE_OPERAND (t
, 1)) == FIELD_DECL
3291 && DECL_BIT_FIELD (TREE_OPERAND (t
, 1)))
3293 error_at (OMP_CLAUSE_LOCATION (clause
),
3294 "bit-field %qE in %qs clause",
3295 t
, omp_clause_code_name
[OMP_CLAUSE_CODE (clause
)]);
3298 else if (!processing_template_decl_p ()
3299 && !omp_mappable_type (TREE_TYPE (t
)))
3301 error_at (OMP_CLAUSE_LOCATION (clause
),
3302 "%qE does not have a mappable type in %qs clause",
3303 t
, omp_clause_code_name
[OMP_CLAUSE_CODE (clause
)]);
3304 emit_unmappable_type_notes (TREE_TYPE (t
));
3307 else if (TREE_TYPE (t
) && TYPE_ATOMIC (TREE_TYPE (t
)))
3309 error_at (OMP_CLAUSE_LOCATION (clause
),
3310 "%<_Atomic%> %qE in %qs clause", t
,
3311 omp_clause_code_name
[OMP_CLAUSE_CODE (clause
)]);
3318 /* Find the "root term" for the address. This is the innermost decl, etc.
3322 c_omp_address_inspector::get_root_term (bool checking
)
3324 if (root_term
&& !checking
)
3327 tree t
= unconverted_ref_origin ();
3329 while (TREE_CODE (t
) == COMPONENT_REF
)
3332 && TREE_TYPE (TREE_OPERAND (t
, 0))
3333 && TREE_CODE (TREE_TYPE (TREE_OPERAND (t
, 0))) == UNION_TYPE
)
3335 error_at (loc
, "%qE is a member of a union", t
);
3336 return error_mark_node
;
3338 t
= TREE_OPERAND (t
, 0);
3339 while (TREE_CODE (t
) == MEM_REF
3340 || TREE_CODE (t
) == INDIRECT_REF
3341 || TREE_CODE (t
) == ARRAY_REF
)
3343 if (TREE_CODE (t
) == MEM_REF
3344 || TREE_CODE (t
) == INDIRECT_REF
)
3345 indirections
= true;
3346 t
= TREE_OPERAND (t
, 0);
3348 if (TREE_CODE (t
) == POINTER_PLUS_EXPR
)
3349 t
= TREE_OPERAND (t
, 0);
3358 /* Return TRUE if the address is supported in mapping clauses. At present,
3359 this means that the innermost expression is a DECL_P, but could be extended
3360 to other types of expression in the future. */
3363 c_omp_address_inspector::map_supported_p ()
3365 /* If we've already decided if the mapped address is supported, return
3367 if (map_supported
!= -1)
3368 return map_supported
;
3370 tree t
= unconverted_ref_origin ();
3374 while (TREE_CODE (t
) == INDIRECT_REF
3375 || TREE_CODE (t
) == MEM_REF
3376 || TREE_CODE (t
) == ARRAY_REF
3377 || TREE_CODE (t
) == COMPONENT_REF
3378 || TREE_CODE (t
) == COMPOUND_EXPR
3379 || TREE_CODE (t
) == SAVE_EXPR
3380 || TREE_CODE (t
) == POINTER_PLUS_EXPR
3381 || TREE_CODE (t
) == NON_LVALUE_EXPR
3382 || TREE_CODE (t
) == NOP_EXPR
)
3383 if (TREE_CODE (t
) == COMPOUND_EXPR
)
3384 t
= TREE_OPERAND (t
, 1);
3386 t
= TREE_OPERAND (t
, 0);
3390 map_supported
= DECL_P (t
);
3392 return map_supported
;
3395 /* Get the origin of an address T, stripping off offsets and some other
3399 c_omp_address_inspector::get_origin (tree t
)
3403 if (TREE_CODE (t
) == COMPOUND_EXPR
)
3405 t
= TREE_OPERAND (t
, 1);
3408 else if (TREE_CODE (t
) == POINTER_PLUS_EXPR
3409 || TREE_CODE (t
) == SAVE_EXPR
)
3410 t
= TREE_OPERAND (t
, 0);
3411 else if (TREE_CODE (t
) == INDIRECT_REF
3412 && TREE_CODE (TREE_TYPE (TREE_OPERAND (t
, 0))) == REFERENCE_TYPE
)
3413 t
= TREE_OPERAND (t
, 0);
3421 /* For an address T that might be a reference that has had
3422 "convert_from_reference" called on it, return the actual reference without
3426 c_omp_address_inspector::maybe_unconvert_ref (tree t
)
3428 if (TREE_CODE (t
) == INDIRECT_REF
3429 && TREE_CODE (TREE_TYPE (TREE_OPERAND (t
, 0))) == REFERENCE_TYPE
)
3430 return TREE_OPERAND (t
, 0);
3435 /* Return TRUE if CLAUSE might describe a zero-length array section. */
3438 c_omp_address_inspector::maybe_zero_length_array_section (tree clause
)
3440 switch (OMP_CLAUSE_MAP_KIND (clause
))
3442 case GOMP_MAP_ALLOC
:
3443 case GOMP_MAP_IF_PRESENT
:
3446 case GOMP_MAP_TOFROM
:
3447 case GOMP_MAP_ALWAYS_TO
:
3448 case GOMP_MAP_ALWAYS_FROM
:
3449 case GOMP_MAP_ALWAYS_TOFROM
:
3450 case GOMP_MAP_PRESENT_ALLOC
:
3451 case GOMP_MAP_PRESENT_TO
:
3452 case GOMP_MAP_PRESENT_FROM
:
3453 case GOMP_MAP_PRESENT_TOFROM
:
3454 case GOMP_MAP_ALWAYS_PRESENT_TO
:
3455 case GOMP_MAP_ALWAYS_PRESENT_FROM
:
3456 case GOMP_MAP_ALWAYS_PRESENT_TOFROM
:
3457 case GOMP_MAP_RELEASE
:
3458 case GOMP_MAP_DELETE
:
3459 case GOMP_MAP_FORCE_TO
:
3460 case GOMP_MAP_FORCE_FROM
:
3461 case GOMP_MAP_FORCE_TOFROM
:
3462 case GOMP_MAP_FORCE_PRESENT
:
3469 /* Expand a chained access. We only expect to see a quite limited range of
3470 expression types here, because e.g. you can't have an array of
3474 omp_expand_access_chain (tree c
, tree expr
, vec
<omp_addr_token
*> &addr_tokens
,
3475 unsigned *idx
, c_omp_region_type ort
)
3477 using namespace omp_addr_tokenizer
;
3478 location_t loc
= OMP_CLAUSE_LOCATION (c
);
3480 tree c2
= NULL_TREE
;
3483 if ((ort
& C_ORT_EXIT_DATA
) != 0
3484 || OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_FROM
3485 || (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
3486 && (OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_FROM
3487 || OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_DELETE
3488 || OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_RELEASE
3489 || OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_ALWAYS_FROM
3490 || OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_FORCE_FROM
3491 || OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_PRESENT_FROM
3492 || OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_ALWAYS_PRESENT_FROM
)))
3493 kind
= GOMP_MAP_DETACH
;
3495 kind
= GOMP_MAP_ATTACH
;
3497 switch (addr_tokens
[i
]->u
.access_kind
)
3499 case ACCESS_POINTER
:
3500 case ACCESS_POINTER_OFFSET
:
3503 = fold_convert_loc (loc
, ptrdiff_type_node
, addr_tokens
[i
]->expr
);
3504 tree data_addr
= omp_accessed_addr (addr_tokens
, i
, expr
);
3505 c2
= build_omp_clause (loc
, OMP_CLAUSE_MAP
);
3506 OMP_CLAUSE_SET_MAP_KIND (c2
, kind
);
3507 OMP_CLAUSE_DECL (c2
) = addr_tokens
[i
]->expr
;
3508 OMP_CLAUSE_SIZE (c2
)
3509 = fold_build2_loc (loc
, MINUS_EXPR
, ptrdiff_type_node
,
3510 fold_convert_loc (loc
, ptrdiff_type_node
,
3516 case ACCESS_INDEXED_ARRAY
:
3520 return error_mark_node
;
3525 OMP_CLAUSE_CHAIN (c2
) = OMP_CLAUSE_CHAIN (c
);
3526 OMP_CLAUSE_CHAIN (c
) = c2
;
3532 if (i
< addr_tokens
.length ()
3533 && addr_tokens
[i
]->type
== ACCESS_METHOD
)
3534 return omp_expand_access_chain (c
, expr
, addr_tokens
, idx
, ort
);
3539 /* Translate "array_base_decl access_method" to OMP mapping clauses. */
3542 c_omp_address_inspector::expand_array_base (tree c
,
3543 vec
<omp_addr_token
*> &addr_tokens
,
3544 tree expr
, unsigned *idx
,
3545 c_omp_region_type ort
)
3547 using namespace omp_addr_tokenizer
;
3548 location_t loc
= OMP_CLAUSE_LOCATION (c
);
3550 tree decl
= addr_tokens
[i
+ 1]->expr
;
3551 bool decl_p
= DECL_P (decl
);
3552 bool declare_target_p
= (decl_p
3553 && is_global_var (decl
)
3554 && lookup_attribute ("omp declare target",
3555 DECL_ATTRIBUTES (decl
)));
3556 bool map_p
= OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
;
3557 bool implicit_p
= map_p
&& OMP_CLAUSE_MAP_IMPLICIT (c
);
3558 bool chain_p
= omp_access_chain_p (addr_tokens
, i
+ 1);
3559 tree c2
= NULL_TREE
, c3
= NULL_TREE
;
3560 unsigned consume_tokens
= 2;
3561 bool target_p
= (ort
& C_ORT_TARGET
) != 0;
3562 bool openmp_p
= (ort
& C_ORT_OMP
) != 0;
3564 gcc_assert (i
== 0);
3568 && (OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_ATTACH
3569 || OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_DETACH
))
3576 switch (addr_tokens
[i
+ 1]->u
.access_kind
)
3579 if (decl_p
&& !target_p
)
3580 c_common_mark_addressable_vec (addr_tokens
[i
+ 1]->expr
);
3585 /* Copy the referenced object. Note that we do this even for !MAP_P
3587 tree obj
= convert_from_reference (addr_tokens
[i
+ 1]->expr
);
3588 if (TREE_CODE (TREE_TYPE (obj
)) == ARRAY_TYPE
)
3589 /* We have a ref to array: add a [0] element as the ME expects. */
3590 OMP_CLAUSE_DECL (c
) = build_array_ref (loc
, obj
, integer_zero_node
);
3592 OMP_CLAUSE_DECL (c
) = obj
;
3593 OMP_CLAUSE_SIZE (c
) = TYPE_SIZE_UNIT (TREE_TYPE (obj
));
3598 c_common_mark_addressable_vec (addr_tokens
[i
+ 1]->expr
);
3605 /* If we have a reference to a pointer, avoid using
3606 FIRSTPRIVATE_REFERENCE here in case the pointer is modified in the
3607 offload region (we can only do that if the pointer does not point
3608 to a mapped block). We could avoid doing this if we don't have a
3610 bool ref_to_ptr
= TREE_CODE (TREE_TYPE (obj
)) == POINTER_TYPE
;
3612 c2
= build_omp_clause (loc
, OMP_CLAUSE_MAP
);
3614 && !declare_target_p
3616 OMP_CLAUSE_SET_MAP_KIND (c2
, GOMP_MAP_FIRSTPRIVATE_REFERENCE
);
3619 OMP_CLAUSE_SET_MAP_KIND (c2
, GOMP_MAP_ATTACH_DETACH
);
3621 c_common_mark_addressable_vec (addr_tokens
[i
+ 1]->expr
);
3623 OMP_CLAUSE_DECL (c2
) = addr_tokens
[i
+ 1]->expr
;
3624 OMP_CLAUSE_SIZE (c2
) = size_zero_node
;
3629 c2
= build_omp_clause (loc
, OMP_CLAUSE_MAP
);
3630 OMP_CLAUSE_SET_MAP_KIND (c2
, GOMP_MAP_ALLOC
);
3631 OMP_CLAUSE_DECL (c2
) = addr_tokens
[i
+ 1]->expr
;
3632 OMP_CLAUSE_SIZE (c2
)
3633 = TYPE_SIZE_UNIT (TREE_TYPE (OMP_CLAUSE_DECL (c2
)));
3638 case ACCESS_INDEXED_REF_TO_ARRAY
:
3643 c_common_mark_addressable_vec (addr_tokens
[i
+ 1]->expr
);
3651 = convert_from_reference (addr_tokens
[i
+ 1]->expr
);
3652 virtual_origin
= build_fold_addr_expr (virtual_origin
);
3653 virtual_origin
= fold_convert_loc (loc
, ptrdiff_type_node
,
3655 tree data_addr
= omp_accessed_addr (addr_tokens
, i
+ 1, expr
);
3656 c2
= build_omp_clause (loc
, OMP_CLAUSE_MAP
);
3657 if (decl_p
&& target_p
&& !declare_target_p
)
3659 /* It appears that omp-low.cc mishandles cases where we have a
3660 [reference to an] array of pointers such as:
3662 int *arr[N]; (or "int *(&arr)[N] = ...")
3663 #pragma omp target map(arr[a][b:c])
3666 in such cases chain_p will be true. For now, fall back to
3667 GOMP_MAP_POINTER. */
3668 enum gomp_map_kind k
= chain_p
? GOMP_MAP_POINTER
3669 : GOMP_MAP_FIRSTPRIVATE_REFERENCE
;
3670 OMP_CLAUSE_SET_MAP_KIND (c2
, k
);
3675 c_common_mark_addressable_vec (addr_tokens
[i
+ 1]->expr
);
3676 OMP_CLAUSE_SET_MAP_KIND (c2
, GOMP_MAP_ATTACH_DETACH
);
3678 OMP_CLAUSE_DECL (c2
) = addr_tokens
[i
+ 1]->expr
;
3679 OMP_CLAUSE_SIZE (c2
)
3680 = fold_build2_loc (loc
, MINUS_EXPR
, ptrdiff_type_node
,
3681 fold_convert_loc (loc
, ptrdiff_type_node
,
3687 case ACCESS_INDEXED_ARRAY
:
3692 c_common_mark_addressable_vec (addr_tokens
[i
+ 1]->expr
);
3696 /* The code handling "firstprivatize_array_bases" in gimplify.cc is
3697 relevant here. What do we need to create for arrays at this
3698 stage? (This condition doesn't feel quite right. FIXME?) */
3700 && (TREE_CODE (TREE_TYPE (addr_tokens
[i
+ 1]->expr
))
3705 = build_fold_addr_expr (addr_tokens
[i
+ 1]->expr
);
3706 virtual_origin
= fold_convert_loc (loc
, ptrdiff_type_node
,
3708 tree data_addr
= omp_accessed_addr (addr_tokens
, i
+ 1, expr
);
3709 c2
= build_omp_clause (loc
, OMP_CLAUSE_MAP
);
3710 if (decl_p
&& target_p
)
3712 /* See comment for ACCESS_INDEXED_REF_TO_ARRAY above. */
3713 enum gomp_map_kind k
= chain_p
? GOMP_MAP_POINTER
3714 : GOMP_MAP_FIRSTPRIVATE_POINTER
;
3715 OMP_CLAUSE_SET_MAP_KIND (c2
, k
);
3720 c_common_mark_addressable_vec (addr_tokens
[i
+ 1]->expr
);
3721 OMP_CLAUSE_SET_MAP_KIND (c2
, GOMP_MAP_ATTACH_DETACH
);
3723 OMP_CLAUSE_DECL (c2
) = addr_tokens
[i
+ 1]->expr
;
3724 OMP_CLAUSE_SIZE (c2
)
3725 = fold_build2_loc (loc
, MINUS_EXPR
, ptrdiff_type_node
,
3726 fold_convert_loc (loc
, ptrdiff_type_node
,
3732 case ACCESS_POINTER
:
3733 case ACCESS_POINTER_OFFSET
:
3738 c_common_mark_addressable_vec (addr_tokens
[i
+ 1]->expr
);
3742 unsigned last_access
= i
+ 1;
3743 tree virtual_origin
;
3746 && addr_tokens
[i
+ 2]->type
== ACCESS_METHOD
3747 && addr_tokens
[i
+ 2]->u
.access_kind
== ACCESS_INDEXED_ARRAY
)
3749 /* !!! This seems wrong for ACCESS_POINTER_OFFSET. */
3751 chain_p
= omp_access_chain_p (addr_tokens
, i
+ 2);
3752 last_access
= i
+ 2;
3754 = build_array_ref (loc
, addr_tokens
[last_access
]->expr
,
3756 virtual_origin
= build_fold_addr_expr (virtual_origin
);
3757 virtual_origin
= fold_convert_loc (loc
, ptrdiff_type_node
,
3761 virtual_origin
= fold_convert_loc (loc
, ptrdiff_type_node
,
3762 addr_tokens
[last_access
]->expr
);
3763 tree data_addr
= omp_accessed_addr (addr_tokens
, last_access
, expr
);
3764 c2
= build_omp_clause (loc
, OMP_CLAUSE_MAP
);
3765 /* For OpenACC, use FIRSTPRIVATE_POINTER for decls even on non-compute
3766 regions (e.g. "acc data" constructs). It'll be removed anyway in
3767 gimplify.cc, but doing it this way maintains diagnostic
3769 if (decl_p
&& (target_p
|| !openmp_p
) && !chain_p
&& !declare_target_p
)
3770 OMP_CLAUSE_SET_MAP_KIND (c2
, GOMP_MAP_FIRSTPRIVATE_POINTER
);
3774 c_common_mark_addressable_vec (addr_tokens
[i
+ 1]->expr
);
3775 OMP_CLAUSE_SET_MAP_KIND (c2
, GOMP_MAP_ATTACH_DETACH
);
3777 OMP_CLAUSE_DECL (c2
) = addr_tokens
[i
+ 1]->expr
;
3778 OMP_CLAUSE_SIZE (c2
)
3779 = fold_build2_loc (loc
, MINUS_EXPR
, ptrdiff_type_node
,
3780 fold_convert_loc (loc
, ptrdiff_type_node
,
3786 case ACCESS_REF_TO_POINTER
:
3787 case ACCESS_REF_TO_POINTER_OFFSET
:
3792 c_common_mark_addressable_vec (addr_tokens
[i
+ 1]->expr
);
3796 unsigned last_access
= i
+ 1;
3797 tree virtual_origin
;
3800 && addr_tokens
[i
+ 2]->type
== ACCESS_METHOD
3801 && addr_tokens
[i
+ 2]->u
.access_kind
== ACCESS_INDEXED_ARRAY
)
3803 /* !!! This seems wrong for ACCESS_POINTER_OFFSET. */
3805 chain_p
= omp_access_chain_p (addr_tokens
, i
+ 2);
3806 last_access
= i
+ 2;
3808 = build_array_ref (loc
, addr_tokens
[last_access
]->expr
,
3810 virtual_origin
= build_fold_addr_expr (virtual_origin
);
3811 virtual_origin
= fold_convert_loc (loc
, ptrdiff_type_node
,
3817 = convert_from_reference (addr_tokens
[last_access
]->expr
);
3818 virtual_origin
= fold_convert_loc (loc
, ptrdiff_type_node
,
3822 tree data_addr
= omp_accessed_addr (addr_tokens
, last_access
, expr
);
3823 c2
= build_omp_clause (loc
, OMP_CLAUSE_MAP
);
3824 if (decl_p
&& target_p
&& !chain_p
&& !declare_target_p
)
3826 OMP_CLAUSE_SET_MAP_KIND (c2
, GOMP_MAP_FIRSTPRIVATE_REFERENCE
);
3827 OMP_CLAUSE_DECL (c2
) = addr_tokens
[i
+ 1]->expr
;
3832 c_common_mark_addressable_vec (addr_tokens
[i
+ 1]->expr
);
3833 OMP_CLAUSE_SET_MAP_KIND (c2
, GOMP_MAP_ATTACH_DETACH
);
3834 OMP_CLAUSE_DECL (c2
)
3835 = convert_from_reference (addr_tokens
[i
+ 1]->expr
);
3837 OMP_CLAUSE_SIZE (c2
)
3838 = fold_build2_loc (loc
, MINUS_EXPR
, ptrdiff_type_node
,
3839 fold_convert_loc (loc
, ptrdiff_type_node
,
3846 *idx
= i
+ consume_tokens
;
3847 return error_mark_node
;
3852 OMP_CLAUSE_CHAIN (c3
) = OMP_CLAUSE_CHAIN (c
);
3853 OMP_CLAUSE_CHAIN (c2
) = c3
;
3854 OMP_CLAUSE_CHAIN (c
) = c2
;
3857 OMP_CLAUSE_MAP_IMPLICIT (c2
) = 1;
3858 OMP_CLAUSE_MAP_IMPLICIT (c3
) = 1;
3864 OMP_CLAUSE_CHAIN (c2
) = OMP_CLAUSE_CHAIN (c
);
3865 OMP_CLAUSE_CHAIN (c
) = c2
;
3867 OMP_CLAUSE_MAP_IMPLICIT (c2
) = 1;
3871 i
+= consume_tokens
;
3874 if (chain_p
&& map_p
)
3875 return omp_expand_access_chain (c
, expr
, addr_tokens
, idx
, ort
);
3880 /* Translate "component_selector access_method" to OMP mapping clauses. */
3883 c_omp_address_inspector::expand_component_selector (tree c
,
3884 vec
<omp_addr_token
*>
3886 tree expr
, unsigned *idx
,
3887 c_omp_region_type ort
)
3889 using namespace omp_addr_tokenizer
;
3890 location_t loc
= OMP_CLAUSE_LOCATION (c
);
3892 tree c2
= NULL_TREE
, c3
= NULL_TREE
;
3893 bool chain_p
= omp_access_chain_p (addr_tokens
, i
+ 1);
3894 bool map_p
= OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
;
3896 switch (addr_tokens
[i
+ 1]->u
.access_kind
)
3899 case ACCESS_INDEXED_ARRAY
:
3904 /* Copy the referenced object. Note that we also do this for !MAP_P
3906 tree obj
= convert_from_reference (addr_tokens
[i
+ 1]->expr
);
3907 OMP_CLAUSE_DECL (c
) = obj
;
3908 OMP_CLAUSE_SIZE (c
) = TYPE_SIZE_UNIT (TREE_TYPE (obj
));
3913 c2
= build_omp_clause (loc
, OMP_CLAUSE_MAP
);
3914 OMP_CLAUSE_SET_MAP_KIND (c2
, GOMP_MAP_ATTACH_DETACH
);
3915 OMP_CLAUSE_DECL (c2
) = addr_tokens
[i
+ 1]->expr
;
3916 OMP_CLAUSE_SIZE (c2
) = size_zero_node
;
3920 case ACCESS_INDEXED_REF_TO_ARRAY
:
3926 = convert_from_reference (addr_tokens
[i
+ 1]->expr
);
3927 virtual_origin
= build_fold_addr_expr (virtual_origin
);
3928 virtual_origin
= fold_convert_loc (loc
, ptrdiff_type_node
,
3930 tree data_addr
= omp_accessed_addr (addr_tokens
, i
+ 1, expr
);
3932 c2
= build_omp_clause (loc
, OMP_CLAUSE_MAP
);
3933 OMP_CLAUSE_SET_MAP_KIND (c2
, GOMP_MAP_ATTACH_DETACH
);
3934 OMP_CLAUSE_DECL (c2
) = addr_tokens
[i
+ 1]->expr
;
3935 OMP_CLAUSE_SIZE (c2
)
3936 = fold_build2_loc (loc
, MINUS_EXPR
, ptrdiff_type_node
,
3937 fold_convert_loc (loc
, ptrdiff_type_node
,
3943 case ACCESS_POINTER
:
3944 case ACCESS_POINTER_OFFSET
:
3950 = fold_convert_loc (loc
, ptrdiff_type_node
,
3951 addr_tokens
[i
+ 1]->expr
);
3952 tree data_addr
= omp_accessed_addr (addr_tokens
, i
+ 1, expr
);
3954 c2
= build_omp_clause (loc
, OMP_CLAUSE_MAP
);
3955 OMP_CLAUSE_SET_MAP_KIND (c2
, GOMP_MAP_ATTACH_DETACH
);
3956 OMP_CLAUSE_DECL (c2
) = addr_tokens
[i
+ 1]->expr
;
3957 OMP_CLAUSE_SIZE (c2
)
3958 = fold_build2_loc (loc
, MINUS_EXPR
, ptrdiff_type_node
,
3959 fold_convert_loc (loc
, ptrdiff_type_node
,
3965 case ACCESS_REF_TO_POINTER
:
3966 case ACCESS_REF_TO_POINTER_OFFSET
:
3971 tree ptr
= convert_from_reference (addr_tokens
[i
+ 1]->expr
);
3972 tree virtual_origin
= fold_convert_loc (loc
, ptrdiff_type_node
,
3974 tree data_addr
= omp_accessed_addr (addr_tokens
, i
+ 1, expr
);
3976 /* Attach the pointer... */
3977 c2
= build_omp_clause (OMP_CLAUSE_LOCATION (c
), OMP_CLAUSE_MAP
);
3978 OMP_CLAUSE_SET_MAP_KIND (c2
, GOMP_MAP_ATTACH_DETACH
);
3979 OMP_CLAUSE_DECL (c2
) = ptr
;
3980 OMP_CLAUSE_SIZE (c2
)
3981 = fold_build2_loc (loc
, MINUS_EXPR
, ptrdiff_type_node
,
3982 fold_convert_loc (loc
, ptrdiff_type_node
,
3986 /* ...and also the reference. */
3987 c3
= build_omp_clause (OMP_CLAUSE_LOCATION (c
), OMP_CLAUSE_MAP
);
3988 OMP_CLAUSE_SET_MAP_KIND (c3
, GOMP_MAP_ATTACH_DETACH
);
3989 OMP_CLAUSE_DECL (c3
) = addr_tokens
[i
+ 1]->expr
;
3990 OMP_CLAUSE_SIZE (c3
) = size_zero_node
;
3996 return error_mark_node
;
4001 OMP_CLAUSE_CHAIN (c3
) = OMP_CLAUSE_CHAIN (c
);
4002 OMP_CLAUSE_CHAIN (c2
) = c3
;
4003 OMP_CLAUSE_CHAIN (c
) = c2
;
4008 OMP_CLAUSE_CHAIN (c2
) = OMP_CLAUSE_CHAIN (c
);
4009 OMP_CLAUSE_CHAIN (c
) = c2
;
4016 if (chain_p
&& map_p
)
4017 return omp_expand_access_chain (c
, expr
, addr_tokens
, idx
, ort
);
4022 /* Expand a map clause into a group of mapping clauses, creating nodes to
4023 attach/detach pointers and so forth as necessary. */
4026 c_omp_address_inspector::expand_map_clause (tree c
, tree expr
,
4027 vec
<omp_addr_token
*> &addr_tokens
,
4028 c_omp_region_type ort
)
4030 using namespace omp_addr_tokenizer
;
4031 unsigned i
, length
= addr_tokens
.length ();
4033 for (i
= 0; i
< length
;)
4035 int remaining
= length
- i
;
4038 && addr_tokens
[i
]->type
== ARRAY_BASE
4039 && addr_tokens
[i
]->u
.structure_base_kind
== BASE_DECL
4040 && addr_tokens
[i
+ 1]->type
== ACCESS_METHOD
)
4042 c
= expand_array_base (c
, addr_tokens
, expr
, &i
, ort
);
4043 if (c
== error_mark_node
)
4044 return error_mark_node
;
4046 else if (remaining
>= 2
4047 && addr_tokens
[i
]->type
== ARRAY_BASE
4048 && addr_tokens
[i
]->u
.structure_base_kind
== BASE_ARBITRARY_EXPR
4049 && addr_tokens
[i
+ 1]->type
== ACCESS_METHOD
)
4051 c
= expand_array_base (c
, addr_tokens
, expr
, &i
, ort
);
4052 if (c
== error_mark_node
)
4053 return error_mark_node
;
4055 else if (remaining
>= 2
4056 && addr_tokens
[i
]->type
== STRUCTURE_BASE
4057 && addr_tokens
[i
]->u
.structure_base_kind
== BASE_DECL
4058 && addr_tokens
[i
+ 1]->type
== ACCESS_METHOD
)
4060 if (addr_tokens
[i
+ 1]->u
.access_kind
== ACCESS_DIRECT
)
4061 c_common_mark_addressable_vec (addr_tokens
[i
+ 1]->expr
);
4063 while (addr_tokens
[i
]->type
== ACCESS_METHOD
)
4066 else if (remaining
>= 2
4067 && addr_tokens
[i
]->type
== STRUCTURE_BASE
4068 && addr_tokens
[i
]->u
.structure_base_kind
== BASE_ARBITRARY_EXPR
4069 && addr_tokens
[i
+ 1]->type
== ACCESS_METHOD
)
4071 switch (addr_tokens
[i
+ 1]->u
.access_kind
)
4074 case ACCESS_POINTER
:
4076 while (addr_tokens
[i
]->type
== ACCESS_METHOD
)
4080 return error_mark_node
;
4083 else if (remaining
>= 2
4084 && addr_tokens
[i
]->type
== COMPONENT_SELECTOR
4085 && addr_tokens
[i
+ 1]->type
== ACCESS_METHOD
)
4087 c
= expand_component_selector (c
, addr_tokens
, expr
, &i
, ort
);
4088 /* We used 'expr', so these must have been the last tokens. */
4089 gcc_assert (i
== length
);
4090 if (c
== error_mark_node
)
4091 return error_mark_node
;
4093 else if (remaining
>= 3
4094 && addr_tokens
[i
]->type
== COMPONENT_SELECTOR
4095 && addr_tokens
[i
+ 1]->type
== STRUCTURE_BASE
4096 && (addr_tokens
[i
+ 1]->u
.structure_base_kind
4097 == BASE_COMPONENT_EXPR
)
4098 && addr_tokens
[i
+ 2]->type
== ACCESS_METHOD
)
4101 while (addr_tokens
[i
]->type
== ACCESS_METHOD
)
4111 return error_mark_node
;
4114 const struct c_omp_directive c_omp_directives
[] = {
4115 /* Keep this alphabetically sorted by the first word. Non-null second/third
4116 if any should precede null ones. */
4117 { "allocate", nullptr, nullptr, PRAGMA_OMP_ALLOCATE
,
4118 C_OMP_DIR_DECLARATIVE
, false },
4119 { "assume", nullptr, nullptr, PRAGMA_OMP_ASSUME
,
4120 C_OMP_DIR_INFORMATIONAL
, false },
4121 { "assumes", nullptr, nullptr, PRAGMA_OMP_ASSUMES
,
4122 C_OMP_DIR_INFORMATIONAL
, false },
4123 { "atomic", nullptr, nullptr, PRAGMA_OMP_ATOMIC
,
4124 C_OMP_DIR_CONSTRUCT
, false },
4125 { "barrier", nullptr, nullptr, PRAGMA_OMP_BARRIER
,
4126 C_OMP_DIR_STANDALONE
, false },
4127 { "begin", "assumes", nullptr, PRAGMA_OMP_BEGIN
,
4128 C_OMP_DIR_INFORMATIONAL
, false },
4129 { "begin", "declare", "target", PRAGMA_OMP_BEGIN
,
4130 C_OMP_DIR_DECLARATIVE
, false },
4131 /* { "begin", "declare", "variant", PRAGMA_OMP_BEGIN,
4132 C_OMP_DIR_DECLARATIVE, false }, */
4133 /* { "begin", "metadirective", nullptr, PRAGMA_OMP_BEGIN,
4134 C_OMP_DIR_???, ??? }, */
4135 { "cancel", nullptr, nullptr, PRAGMA_OMP_CANCEL
,
4136 C_OMP_DIR_STANDALONE
, false },
4137 { "cancellation", "point", nullptr, PRAGMA_OMP_CANCELLATION_POINT
,
4138 C_OMP_DIR_STANDALONE
, false },
4139 { "critical", nullptr, nullptr, PRAGMA_OMP_CRITICAL
,
4140 C_OMP_DIR_CONSTRUCT
, false },
4141 /* { "declare", "mapper", nullptr, PRAGMA_OMP_DECLARE,
4142 C_OMP_DIR_DECLARATIVE, false }, */
4143 { "declare", "reduction", nullptr, PRAGMA_OMP_DECLARE
,
4144 C_OMP_DIR_DECLARATIVE
, true },
4145 { "declare", "simd", nullptr, PRAGMA_OMP_DECLARE
,
4146 C_OMP_DIR_DECLARATIVE
, true },
4147 { "declare", "target", nullptr, PRAGMA_OMP_DECLARE
,
4148 C_OMP_DIR_DECLARATIVE
, false },
4149 { "declare", "variant", nullptr, PRAGMA_OMP_DECLARE
,
4150 C_OMP_DIR_DECLARATIVE
, false },
4151 { "depobj", nullptr, nullptr, PRAGMA_OMP_DEPOBJ
,
4152 C_OMP_DIR_STANDALONE
, false },
4153 /* { "dispatch", nullptr, nullptr, PRAGMA_OMP_DISPATCH,
4154 C_OMP_DIR_CONSTRUCT, false }, */
4155 { "distribute", nullptr, nullptr, PRAGMA_OMP_DISTRIBUTE
,
4156 C_OMP_DIR_CONSTRUCT
, true },
4157 { "end", "assumes", nullptr, PRAGMA_OMP_END
,
4158 C_OMP_DIR_INFORMATIONAL
, false },
4159 { "end", "declare", "target", PRAGMA_OMP_END
,
4160 C_OMP_DIR_DECLARATIVE
, false },
4161 /* { "end", "declare", "variant", PRAGMA_OMP_END,
4162 C_OMP_DIR_DECLARATIVE, false }, */
4163 /* { "end", "metadirective", nullptr, PRAGMA_OMP_END,
4164 C_OMP_DIR_???, ??? }, */
4165 /* error with at(execution) is C_OMP_DIR_STANDALONE. */
4166 { "error", nullptr, nullptr, PRAGMA_OMP_ERROR
,
4167 C_OMP_DIR_UTILITY
, false },
4168 { "flush", nullptr, nullptr, PRAGMA_OMP_FLUSH
,
4169 C_OMP_DIR_STANDALONE
, false },
4170 { "for", nullptr, nullptr, PRAGMA_OMP_FOR
,
4171 C_OMP_DIR_CONSTRUCT
, true },
4172 /* { "groupprivate", nullptr, nullptr, PRAGMA_OMP_GROUPPRIVATE,
4173 C_OMP_DIR_DECLARATIVE, false }, */
4174 /* { "interop", nullptr, nullptr, PRAGMA_OMP_INTEROP,
4175 C_OMP_DIR_STANDALONE, false }, */
4176 { "loop", nullptr, nullptr, PRAGMA_OMP_LOOP
,
4177 C_OMP_DIR_CONSTRUCT
, true },
4178 { "masked", nullptr, nullptr, PRAGMA_OMP_MASKED
,
4179 C_OMP_DIR_CONSTRUCT
, true },
4180 { "master", nullptr, nullptr, PRAGMA_OMP_MASTER
,
4181 C_OMP_DIR_CONSTRUCT
, true },
4182 /* { "metadirective", nullptr, nullptr, PRAGMA_OMP_METADIRECTIVE,
4183 C_OMP_DIR_???, ??? }, */
4184 { "nothing", nullptr, nullptr, PRAGMA_OMP_NOTHING
,
4185 C_OMP_DIR_UTILITY
, false },
4186 /* ordered with depend clause is C_OMP_DIR_STANDALONE. */
4187 { "ordered", nullptr, nullptr, PRAGMA_OMP_ORDERED
,
4188 C_OMP_DIR_CONSTRUCT
, true },
4189 { "parallel", nullptr, nullptr, PRAGMA_OMP_PARALLEL
,
4190 C_OMP_DIR_CONSTRUCT
, true },
4191 { "requires", nullptr, nullptr, PRAGMA_OMP_REQUIRES
,
4192 C_OMP_DIR_INFORMATIONAL
, false },
4193 { "scan", nullptr, nullptr, PRAGMA_OMP_SCAN
,
4194 C_OMP_DIR_CONSTRUCT
, true },
4195 { "scope", nullptr, nullptr, PRAGMA_OMP_SCOPE
,
4196 C_OMP_DIR_CONSTRUCT
, false },
4197 { "section", nullptr, nullptr, PRAGMA_OMP_SECTION
,
4198 C_OMP_DIR_CONSTRUCT
, false },
4199 { "sections", nullptr, nullptr, PRAGMA_OMP_SECTIONS
,
4200 C_OMP_DIR_CONSTRUCT
, false },
4201 { "simd", nullptr, nullptr, PRAGMA_OMP_SIMD
,
4202 C_OMP_DIR_CONSTRUCT
, true },
4203 { "single", nullptr, nullptr, PRAGMA_OMP_SINGLE
,
4204 C_OMP_DIR_CONSTRUCT
, false },
4205 { "target", "data", nullptr, PRAGMA_OMP_TARGET
,
4206 C_OMP_DIR_CONSTRUCT
, false },
4207 { "target", "enter", "data", PRAGMA_OMP_TARGET
,
4208 C_OMP_DIR_STANDALONE
, false },
4209 { "target", "exit", "data", PRAGMA_OMP_TARGET
,
4210 C_OMP_DIR_STANDALONE
, false },
4211 { "target", "update", nullptr, PRAGMA_OMP_TARGET
,
4212 C_OMP_DIR_STANDALONE
, false },
4213 { "target", nullptr, nullptr, PRAGMA_OMP_TARGET
,
4214 C_OMP_DIR_CONSTRUCT
, true },
4215 { "task", nullptr, nullptr, PRAGMA_OMP_TASK
,
4216 C_OMP_DIR_CONSTRUCT
, false },
4217 { "taskgroup", nullptr, nullptr, PRAGMA_OMP_TASKGROUP
,
4218 C_OMP_DIR_CONSTRUCT
, false },
4219 { "taskloop", nullptr, nullptr, PRAGMA_OMP_TASKLOOP
,
4220 C_OMP_DIR_CONSTRUCT
, true },
4221 { "taskwait", nullptr, nullptr, PRAGMA_OMP_TASKWAIT
,
4222 C_OMP_DIR_STANDALONE
, false },
4223 { "taskyield", nullptr, nullptr, PRAGMA_OMP_TASKYIELD
,
4224 C_OMP_DIR_STANDALONE
, false },
4225 /* { "tile", nullptr, nullptr, PRAGMA_OMP_TILE,
4226 C_OMP_DIR_CONSTRUCT, false }, */
4227 { "teams", nullptr, nullptr, PRAGMA_OMP_TEAMS
,
4228 C_OMP_DIR_CONSTRUCT
, true },
4229 { "threadprivate", nullptr, nullptr, PRAGMA_OMP_THREADPRIVATE
,
4230 C_OMP_DIR_DECLARATIVE
, false }
4231 /* { "unroll", nullptr, nullptr, PRAGMA_OMP_UNROLL,
4232 C_OMP_DIR_CONSTRUCT, false }, */
4235 /* Find (non-combined/composite) OpenMP directive (if any) which starts
4236 with FIRST keyword and for multi-word directives has SECOND and
4237 THIRD keyword after it. */
4239 const struct c_omp_directive
*
4240 c_omp_categorize_directive (const char *first
, const char *second
,
4243 const size_t n_omp_directives
= ARRAY_SIZE (c_omp_directives
);
4244 for (size_t i
= 0; i
< n_omp_directives
; i
++)
4246 if ((unsigned char) c_omp_directives
[i
].first
[0]
4247 < (unsigned char) first
[0])
4249 if ((unsigned char) c_omp_directives
[i
].first
[0]
4250 > (unsigned char) first
[0])
4252 if (strcmp (c_omp_directives
[i
].first
, first
))
4254 if (!c_omp_directives
[i
].second
)
4255 return &c_omp_directives
[i
];
4256 if (!second
|| strcmp (c_omp_directives
[i
].second
, second
))
4258 if (!c_omp_directives
[i
].third
)
4259 return &c_omp_directives
[i
];
4260 if (!third
|| strcmp (c_omp_directives
[i
].third
, third
))
4262 return &c_omp_directives
[i
];