1 /* Tree lowering pass. This pass converts the GENERIC functions-as-trees
2 tree representation into the GIMPLE form.
3 Copyright (C) 2002-2022 Free Software Foundation, Inc.
4 Major work done by Sebastian Pop <s.pop@laposte.net>,
5 Diego Novillo <dnovillo@redhat.com> and Jason Merrill <jason@redhat.com>.
7 This file is part of GCC.
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
25 #include "coretypes.h"
33 #include "gimple-predict.h"
34 #include "tree-pass.h" /* FIXME: only for PROP_gimple_any */
37 #include "tree-pretty-print.h"
38 #include "diagnostic-core.h"
40 #include "fold-const.h"
45 #include "gimple-iterator.h"
46 #include "gimple-fold.h"
49 #include "stor-layout.h"
50 #include "print-tree.h"
51 #include "tree-iterator.h"
52 #include "tree-inline.h"
53 #include "langhooks.h"
56 #include "tree-hash-traits.h"
57 #include "omp-general.h"
59 #include "gimple-low.h"
60 #include "gomp-constants.h"
61 #include "splay-tree.h"
62 #include "gimple-walk.h"
63 #include "langhooks-def.h" /* FIXME: for lhd_set_decl_assembler_name */
65 #include "stringpool.h"
69 #include "omp-offload.h"
71 #include "tree-nested.h"
73 /* Hash set of poisoned variables in a bind expr. */
74 static hash_set
<tree
> *asan_poisoned_variables
= NULL
;
76 enum gimplify_omp_var_data
79 GOVD_EXPLICIT
= 0x000002,
80 GOVD_SHARED
= 0x000004,
81 GOVD_PRIVATE
= 0x000008,
82 GOVD_FIRSTPRIVATE
= 0x000010,
83 GOVD_LASTPRIVATE
= 0x000020,
84 GOVD_REDUCTION
= 0x000040,
87 GOVD_DEBUG_PRIVATE
= 0x000200,
88 GOVD_PRIVATE_OUTER_REF
= 0x000400,
89 GOVD_LINEAR
= 0x000800,
90 GOVD_ALIGNED
= 0x001000,
92 /* Flag for GOVD_MAP: don't copy back. */
93 GOVD_MAP_TO_ONLY
= 0x002000,
95 /* Flag for GOVD_LINEAR or GOVD_LASTPRIVATE: no outer reference. */
96 GOVD_LINEAR_LASTPRIVATE_NO_OUTER
= 0x004000,
98 GOVD_MAP_0LEN_ARRAY
= 0x008000,
100 /* Flag for GOVD_MAP, if it is always, to or always, tofrom mapping. */
101 GOVD_MAP_ALWAYS_TO
= 0x010000,
103 /* Flag for shared vars that are or might be stored to in the region. */
104 GOVD_WRITTEN
= 0x020000,
106 /* Flag for GOVD_MAP, if it is a forced mapping. */
107 GOVD_MAP_FORCE
= 0x040000,
109 /* Flag for GOVD_MAP: must be present already. */
110 GOVD_MAP_FORCE_PRESENT
= 0x080000,
112 /* Flag for GOVD_MAP: only allocate. */
113 GOVD_MAP_ALLOC_ONLY
= 0x100000,
115 /* Flag for GOVD_MAP: only copy back. */
116 GOVD_MAP_FROM_ONLY
= 0x200000,
118 GOVD_NONTEMPORAL
= 0x400000,
120 /* Flag for GOVD_LASTPRIVATE: conditional modifier. */
121 GOVD_LASTPRIVATE_CONDITIONAL
= 0x800000,
123 GOVD_CONDTEMP
= 0x1000000,
125 /* Flag for GOVD_REDUCTION: inscan seen in {in,ex}clusive clause. */
126 GOVD_REDUCTION_INSCAN
= 0x2000000,
128 /* Flag for GOVD_FIRSTPRIVATE: OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT. */
129 GOVD_FIRSTPRIVATE_IMPLICIT
= 0x4000000,
131 GOVD_DATA_SHARE_CLASS
= (GOVD_SHARED
| GOVD_PRIVATE
| GOVD_FIRSTPRIVATE
132 | GOVD_LASTPRIVATE
| GOVD_REDUCTION
| GOVD_LINEAR
139 ORT_WORKSHARE
= 0x00,
140 ORT_TASKGROUP
= 0x01,
144 ORT_COMBINED_PARALLEL
= ORT_PARALLEL
| 1,
147 ORT_UNTIED_TASK
= ORT_TASK
| 1,
148 ORT_TASKLOOP
= ORT_TASK
| 2,
149 ORT_UNTIED_TASKLOOP
= ORT_UNTIED_TASK
| 2,
152 ORT_COMBINED_TEAMS
= ORT_TEAMS
| 1,
153 ORT_HOST_TEAMS
= ORT_TEAMS
| 2,
154 ORT_COMBINED_HOST_TEAMS
= ORT_COMBINED_TEAMS
| 2,
157 ORT_TARGET_DATA
= 0x40,
159 /* Data region with offloading. */
161 ORT_COMBINED_TARGET
= ORT_TARGET
| 1,
162 ORT_IMPLICIT_TARGET
= ORT_TARGET
| 2,
164 /* OpenACC variants. */
165 ORT_ACC
= 0x100, /* A generic OpenACC region. */
166 ORT_ACC_DATA
= ORT_ACC
| ORT_TARGET_DATA
, /* Data construct. */
167 ORT_ACC_PARALLEL
= ORT_ACC
| ORT_TARGET
, /* Parallel construct */
168 ORT_ACC_KERNELS
= ORT_ACC
| ORT_TARGET
| 2, /* Kernels construct. */
169 ORT_ACC_SERIAL
= ORT_ACC
| ORT_TARGET
| 4, /* Serial construct. */
170 ORT_ACC_HOST_DATA
= ORT_ACC
| ORT_TARGET_DATA
| 2, /* Host data. */
172 /* Dummy OpenMP region, used to disable expansion of
173 DECL_VALUE_EXPRs in taskloop pre body. */
177 /* Gimplify hashtable helper. */
179 struct gimplify_hasher
: free_ptr_hash
<elt_t
>
181 static inline hashval_t
hash (const elt_t
*);
182 static inline bool equal (const elt_t
*, const elt_t
*);
187 struct gimplify_ctx
*prev_context
;
189 vec
<gbind
*> bind_expr_stack
;
191 gimple_seq conditional_cleanups
;
195 vec
<tree
> case_labels
;
196 hash_set
<tree
> *live_switch_vars
;
197 /* The formal temporary table. Should this be persistent? */
198 hash_table
<gimplify_hasher
> *temp_htab
;
201 unsigned into_ssa
: 1;
202 unsigned allow_rhs_cond_expr
: 1;
203 unsigned in_cleanup_point_expr
: 1;
204 unsigned keep_stack
: 1;
205 unsigned save_stack
: 1;
206 unsigned in_switch_expr
: 1;
209 enum gimplify_defaultmap_kind
212 GDMK_SCALAR_TARGET
, /* w/ Fortran's target attr, implicit mapping, only. */
218 struct gimplify_omp_ctx
220 struct gimplify_omp_ctx
*outer_context
;
221 splay_tree variables
;
222 hash_set
<tree
> *privatized_types
;
224 /* Iteration variables in an OMP_FOR. */
225 vec
<tree
> loop_iter_var
;
227 enum omp_clause_default_kind default_kind
;
228 enum omp_region_type region_type
;
232 bool target_firstprivatize_array_bases
;
234 bool order_concurrent
;
240 static struct gimplify_ctx
*gimplify_ctxp
;
241 static struct gimplify_omp_ctx
*gimplify_omp_ctxp
;
242 static bool in_omp_construct
;
244 /* Forward declaration. */
245 static enum gimplify_status
gimplify_compound_expr (tree
*, gimple_seq
*, bool);
246 static hash_map
<tree
, tree
> *oacc_declare_returns
;
247 static enum gimplify_status
gimplify_expr (tree
*, gimple_seq
*, gimple_seq
*,
248 bool (*) (tree
), fallback_t
, bool);
249 static void prepare_gimple_addressable (tree
*, gimple_seq
*);
251 /* Shorter alias name for the above function for use in gimplify.cc
255 gimplify_seq_add_stmt (gimple_seq
*seq_p
, gimple
*gs
)
257 gimple_seq_add_stmt_without_update (seq_p
, gs
);
260 /* Append sequence SRC to the end of sequence *DST_P. If *DST_P is
261 NULL, a new sequence is allocated. This function is
262 similar to gimple_seq_add_seq, but does not scan the operands.
263 During gimplification, we need to manipulate statement sequences
264 before the def/use vectors have been constructed. */
267 gimplify_seq_add_seq (gimple_seq
*dst_p
, gimple_seq src
)
269 gimple_stmt_iterator si
;
274 si
= gsi_last (*dst_p
);
275 gsi_insert_seq_after_without_update (&si
, src
, GSI_NEW_STMT
);
279 /* Pointer to a list of allocated gimplify_ctx structs to be used for pushing
280 and popping gimplify contexts. */
282 static struct gimplify_ctx
*ctx_pool
= NULL
;
284 /* Return a gimplify context struct from the pool. */
286 static inline struct gimplify_ctx
*
289 struct gimplify_ctx
* c
= ctx_pool
;
292 ctx_pool
= c
->prev_context
;
294 c
= XNEW (struct gimplify_ctx
);
296 memset (c
, '\0', sizeof (*c
));
300 /* Put gimplify context C back into the pool. */
303 ctx_free (struct gimplify_ctx
*c
)
305 c
->prev_context
= ctx_pool
;
309 /* Free allocated ctx stack memory. */
312 free_gimplify_stack (void)
314 struct gimplify_ctx
*c
;
316 while ((c
= ctx_pool
))
318 ctx_pool
= c
->prev_context
;
324 /* Set up a context for the gimplifier. */
327 push_gimplify_context (bool in_ssa
, bool rhs_cond_ok
)
329 struct gimplify_ctx
*c
= ctx_alloc ();
331 c
->prev_context
= gimplify_ctxp
;
333 gimplify_ctxp
->into_ssa
= in_ssa
;
334 gimplify_ctxp
->allow_rhs_cond_expr
= rhs_cond_ok
;
337 /* Tear down a context for the gimplifier. If BODY is non-null, then
338 put the temporaries into the outer BIND_EXPR. Otherwise, put them
341 BODY is not a sequence, but the first tuple in a sequence. */
344 pop_gimplify_context (gimple
*body
)
346 struct gimplify_ctx
*c
= gimplify_ctxp
;
349 && (!c
->bind_expr_stack
.exists ()
350 || c
->bind_expr_stack
.is_empty ()));
351 c
->bind_expr_stack
.release ();
352 gimplify_ctxp
= c
->prev_context
;
355 declare_vars (c
->temps
, body
, false);
357 record_vars (c
->temps
);
364 /* Push a GIMPLE_BIND tuple onto the stack of bindings. */
367 gimple_push_bind_expr (gbind
*bind_stmt
)
369 gimplify_ctxp
->bind_expr_stack
.reserve (8);
370 gimplify_ctxp
->bind_expr_stack
.safe_push (bind_stmt
);
373 /* Pop the first element off the stack of bindings. */
376 gimple_pop_bind_expr (void)
378 gimplify_ctxp
->bind_expr_stack
.pop ();
381 /* Return the first element of the stack of bindings. */
384 gimple_current_bind_expr (void)
386 return gimplify_ctxp
->bind_expr_stack
.last ();
389 /* Return the stack of bindings created during gimplification. */
392 gimple_bind_expr_stack (void)
394 return gimplify_ctxp
->bind_expr_stack
;
397 /* Return true iff there is a COND_EXPR between us and the innermost
398 CLEANUP_POINT_EXPR. This info is used by gimple_push_cleanup. */
401 gimple_conditional_context (void)
403 return gimplify_ctxp
->conditions
> 0;
406 /* Note that we've entered a COND_EXPR. */
409 gimple_push_condition (void)
411 #ifdef ENABLE_GIMPLE_CHECKING
412 if (gimplify_ctxp
->conditions
== 0)
413 gcc_assert (gimple_seq_empty_p (gimplify_ctxp
->conditional_cleanups
));
415 ++(gimplify_ctxp
->conditions
);
418 /* Note that we've left a COND_EXPR. If we're back at unconditional scope
419 now, add any conditional cleanups we've seen to the prequeue. */
422 gimple_pop_condition (gimple_seq
*pre_p
)
424 int conds
= --(gimplify_ctxp
->conditions
);
426 gcc_assert (conds
>= 0);
429 gimplify_seq_add_seq (pre_p
, gimplify_ctxp
->conditional_cleanups
);
430 gimplify_ctxp
->conditional_cleanups
= NULL
;
434 /* A stable comparison routine for use with splay trees and DECLs. */
437 splay_tree_compare_decl_uid (splay_tree_key xa
, splay_tree_key xb
)
442 return DECL_UID (a
) - DECL_UID (b
);
445 /* Create a new omp construct that deals with variable remapping. */
447 static struct gimplify_omp_ctx
*
448 new_omp_context (enum omp_region_type region_type
)
450 struct gimplify_omp_ctx
*c
;
452 c
= XCNEW (struct gimplify_omp_ctx
);
453 c
->outer_context
= gimplify_omp_ctxp
;
454 c
->variables
= splay_tree_new (splay_tree_compare_decl_uid
, 0, 0);
455 c
->privatized_types
= new hash_set
<tree
>;
456 c
->location
= input_location
;
457 c
->region_type
= region_type
;
458 if ((region_type
& ORT_TASK
) == 0)
459 c
->default_kind
= OMP_CLAUSE_DEFAULT_SHARED
;
461 c
->default_kind
= OMP_CLAUSE_DEFAULT_UNSPECIFIED
;
462 c
->defaultmap
[GDMK_SCALAR
] = GOVD_MAP
;
463 c
->defaultmap
[GDMK_SCALAR_TARGET
] = GOVD_MAP
;
464 c
->defaultmap
[GDMK_AGGREGATE
] = GOVD_MAP
;
465 c
->defaultmap
[GDMK_ALLOCATABLE
] = GOVD_MAP
;
466 c
->defaultmap
[GDMK_POINTER
] = GOVD_MAP
;
471 /* Destroy an omp construct that deals with variable remapping. */
474 delete_omp_context (struct gimplify_omp_ctx
*c
)
476 splay_tree_delete (c
->variables
);
477 delete c
->privatized_types
;
478 c
->loop_iter_var
.release ();
482 static void omp_add_variable (struct gimplify_omp_ctx
*, tree
, unsigned int);
483 static bool omp_notice_variable (struct gimplify_omp_ctx
*, tree
, bool);
485 /* Both gimplify the statement T and append it to *SEQ_P. This function
486 behaves exactly as gimplify_stmt, but you don't have to pass T as a
490 gimplify_and_add (tree t
, gimple_seq
*seq_p
)
492 gimplify_stmt (&t
, seq_p
);
495 /* Gimplify statement T into sequence *SEQ_P, and return the first
496 tuple in the sequence of generated tuples for this statement.
497 Return NULL if gimplifying T produced no tuples. */
500 gimplify_and_return_first (tree t
, gimple_seq
*seq_p
)
502 gimple_stmt_iterator last
= gsi_last (*seq_p
);
504 gimplify_and_add (t
, seq_p
);
506 if (!gsi_end_p (last
))
509 return gsi_stmt (last
);
512 return gimple_seq_first_stmt (*seq_p
);
515 /* Returns true iff T is a valid RHS for an assignment to an un-renamed
516 LHS, or for a call argument. */
519 is_gimple_mem_rhs (tree t
)
521 /* If we're dealing with a renamable type, either source or dest must be
522 a renamed variable. */
523 if (is_gimple_reg_type (TREE_TYPE (t
)))
524 return is_gimple_val (t
);
526 return is_gimple_val (t
) || is_gimple_lvalue (t
);
529 /* Return true if T is a CALL_EXPR or an expression that can be
530 assigned to a temporary. Note that this predicate should only be
531 used during gimplification. See the rationale for this in
532 gimplify_modify_expr. */
535 is_gimple_reg_rhs_or_call (tree t
)
537 return (get_gimple_rhs_class (TREE_CODE (t
)) != GIMPLE_INVALID_RHS
538 || TREE_CODE (t
) == CALL_EXPR
);
541 /* Return true if T is a valid memory RHS or a CALL_EXPR. Note that
542 this predicate should only be used during gimplification. See the
543 rationale for this in gimplify_modify_expr. */
546 is_gimple_mem_rhs_or_call (tree t
)
548 /* If we're dealing with a renamable type, either source or dest must be
549 a renamed variable. */
550 if (is_gimple_reg_type (TREE_TYPE (t
)))
551 return is_gimple_val (t
);
553 return (is_gimple_val (t
)
554 || is_gimple_lvalue (t
)
555 || TREE_CLOBBER_P (t
)
556 || TREE_CODE (t
) == CALL_EXPR
);
559 /* Create a temporary with a name derived from VAL. Subroutine of
560 lookup_tmp_var; nobody else should call this function. */
563 create_tmp_from_val (tree val
)
565 /* Drop all qualifiers and address-space information from the value type. */
566 tree type
= TYPE_MAIN_VARIANT (TREE_TYPE (val
));
567 tree var
= create_tmp_var (type
, get_name (val
));
571 /* Create a temporary to hold the value of VAL. If IS_FORMAL, try to reuse
572 an existing expression temporary. If NOT_GIMPLE_REG, mark it as such. */
575 lookup_tmp_var (tree val
, bool is_formal
, bool not_gimple_reg
)
579 /* We cannot mark a formal temporary with DECL_NOT_GIMPLE_REG_P. */
580 gcc_assert (!is_formal
|| !not_gimple_reg
);
582 /* If not optimizing, never really reuse a temporary. local-alloc
583 won't allocate any variable that is used in more than one basic
584 block, which means it will go into memory, causing much extra
585 work in reload and final and poorer code generation, outweighing
586 the extra memory allocation here. */
587 if (!optimize
|| !is_formal
|| TREE_SIDE_EFFECTS (val
))
589 ret
= create_tmp_from_val (val
);
590 DECL_NOT_GIMPLE_REG_P (ret
) = not_gimple_reg
;
598 if (!gimplify_ctxp
->temp_htab
)
599 gimplify_ctxp
->temp_htab
= new hash_table
<gimplify_hasher
> (1000);
600 slot
= gimplify_ctxp
->temp_htab
->find_slot (&elt
, INSERT
);
603 elt_p
= XNEW (elt_t
);
605 elt_p
->temp
= ret
= create_tmp_from_val (val
);
618 /* Helper for get_formal_tmp_var and get_initialized_tmp_var. */
621 internal_get_tmp_var (tree val
, gimple_seq
*pre_p
, gimple_seq
*post_p
,
622 bool is_formal
, bool allow_ssa
, bool not_gimple_reg
)
626 /* Notice that we explicitly allow VAL to be a CALL_EXPR so that we
627 can create an INIT_EXPR and convert it into a GIMPLE_CALL below. */
628 gimplify_expr (&val
, pre_p
, post_p
, is_gimple_reg_rhs_or_call
,
632 && gimplify_ctxp
->into_ssa
633 && is_gimple_reg_type (TREE_TYPE (val
)))
635 t
= make_ssa_name (TYPE_MAIN_VARIANT (TREE_TYPE (val
)));
636 if (! gimple_in_ssa_p (cfun
))
638 const char *name
= get_name (val
);
640 SET_SSA_NAME_VAR_OR_IDENTIFIER (t
, create_tmp_var_name (name
));
644 t
= lookup_tmp_var (val
, is_formal
, not_gimple_reg
);
646 mod
= build2 (INIT_EXPR
, TREE_TYPE (t
), t
, unshare_expr (val
));
648 SET_EXPR_LOCATION (mod
, EXPR_LOC_OR_LOC (val
, input_location
));
650 /* gimplify_modify_expr might want to reduce this further. */
651 gimplify_and_add (mod
, pre_p
);
657 /* Return a formal temporary variable initialized with VAL. PRE_P is as
658 in gimplify_expr. Only use this function if:
660 1) The value of the unfactored expression represented by VAL will not
661 change between the initialization and use of the temporary, and
662 2) The temporary will not be otherwise modified.
664 For instance, #1 means that this is inappropriate for SAVE_EXPR temps,
665 and #2 means it is inappropriate for && temps.
667 For other cases, use get_initialized_tmp_var instead. */
670 get_formal_tmp_var (tree val
, gimple_seq
*pre_p
)
672 return internal_get_tmp_var (val
, pre_p
, NULL
, true, true, false);
675 /* Return a temporary variable initialized with VAL. PRE_P and POST_P
676 are as in gimplify_expr. */
679 get_initialized_tmp_var (tree val
, gimple_seq
*pre_p
,
680 gimple_seq
*post_p
/* = NULL */,
681 bool allow_ssa
/* = true */)
683 return internal_get_tmp_var (val
, pre_p
, post_p
, false, allow_ssa
, false);
686 /* Declare all the variables in VARS in SCOPE. If DEBUG_INFO is true,
687 generate debug info for them; otherwise don't. */
690 declare_vars (tree vars
, gimple
*gs
, bool debug_info
)
697 gbind
*scope
= as_a
<gbind
*> (gs
);
699 temps
= nreverse (last
);
701 block
= gimple_bind_block (scope
);
702 gcc_assert (!block
|| TREE_CODE (block
) == BLOCK
);
703 if (!block
|| !debug_info
)
705 DECL_CHAIN (last
) = gimple_bind_vars (scope
);
706 gimple_bind_set_vars (scope
, temps
);
710 /* We need to attach the nodes both to the BIND_EXPR and to its
711 associated BLOCK for debugging purposes. The key point here
712 is that the BLOCK_VARS of the BIND_EXPR_BLOCK of a BIND_EXPR
713 is a subchain of the BIND_EXPR_VARS of the BIND_EXPR. */
714 if (BLOCK_VARS (block
))
715 BLOCK_VARS (block
) = chainon (BLOCK_VARS (block
), temps
);
718 gimple_bind_set_vars (scope
,
719 chainon (gimple_bind_vars (scope
), temps
));
720 BLOCK_VARS (block
) = temps
;
726 /* For VAR a VAR_DECL of variable size, try to find a constant upper bound
727 for the size and adjust DECL_SIZE/DECL_SIZE_UNIT accordingly. Abort if
728 no such upper bound can be obtained. */
731 force_constant_size (tree var
)
733 /* The only attempt we make is by querying the maximum size of objects
734 of the variable's type. */
736 HOST_WIDE_INT max_size
;
738 gcc_assert (VAR_P (var
));
740 max_size
= max_int_size_in_bytes (TREE_TYPE (var
));
742 gcc_assert (max_size
>= 0);
745 = build_int_cst (TREE_TYPE (DECL_SIZE_UNIT (var
)), max_size
);
747 = build_int_cst (TREE_TYPE (DECL_SIZE (var
)), max_size
* BITS_PER_UNIT
);
750 /* Push the temporary variable TMP into the current binding. */
753 gimple_add_tmp_var_fn (struct function
*fn
, tree tmp
)
755 gcc_assert (!DECL_CHAIN (tmp
) && !DECL_SEEN_IN_BIND_EXPR_P (tmp
));
757 /* Later processing assumes that the object size is constant, which might
758 not be true at this point. Force the use of a constant upper bound in
760 if (!tree_fits_poly_uint64_p (DECL_SIZE_UNIT (tmp
)))
761 force_constant_size (tmp
);
763 DECL_CONTEXT (tmp
) = fn
->decl
;
764 DECL_SEEN_IN_BIND_EXPR_P (tmp
) = 1;
766 record_vars_into (tmp
, fn
->decl
);
769 /* Push the temporary variable TMP into the current binding. */
772 gimple_add_tmp_var (tree tmp
)
774 gcc_assert (!DECL_CHAIN (tmp
) && !DECL_SEEN_IN_BIND_EXPR_P (tmp
));
776 /* Later processing assumes that the object size is constant, which might
777 not be true at this point. Force the use of a constant upper bound in
779 if (!tree_fits_poly_uint64_p (DECL_SIZE_UNIT (tmp
)))
780 force_constant_size (tmp
);
782 DECL_CONTEXT (tmp
) = current_function_decl
;
783 DECL_SEEN_IN_BIND_EXPR_P (tmp
) = 1;
787 DECL_CHAIN (tmp
) = gimplify_ctxp
->temps
;
788 gimplify_ctxp
->temps
= tmp
;
790 /* Mark temporaries local within the nearest enclosing parallel. */
791 if (gimplify_omp_ctxp
)
793 struct gimplify_omp_ctx
*ctx
= gimplify_omp_ctxp
;
794 int flag
= GOVD_LOCAL
| GOVD_SEEN
;
796 && (ctx
->region_type
== ORT_WORKSHARE
797 || ctx
->region_type
== ORT_TASKGROUP
798 || ctx
->region_type
== ORT_SIMD
799 || ctx
->region_type
== ORT_ACC
))
801 if (ctx
->region_type
== ORT_SIMD
802 && TREE_ADDRESSABLE (tmp
)
803 && !TREE_STATIC (tmp
))
805 if (TREE_CODE (DECL_SIZE_UNIT (tmp
)) != INTEGER_CST
)
806 ctx
->add_safelen1
= true;
807 else if (ctx
->in_for_exprs
)
810 flag
= GOVD_PRIVATE
| GOVD_SEEN
;
813 ctx
= ctx
->outer_context
;
816 omp_add_variable (ctx
, tmp
, flag
);
825 /* This case is for nested functions. We need to expose the locals
827 body_seq
= gimple_body (current_function_decl
);
828 declare_vars (tmp
, gimple_seq_first_stmt (body_seq
), false);
834 /* This page contains routines to unshare tree nodes, i.e. to duplicate tree
835 nodes that are referenced more than once in GENERIC functions. This is
836 necessary because gimplification (translation into GIMPLE) is performed
837 by modifying tree nodes in-place, so gimplication of a shared node in a
838 first context could generate an invalid GIMPLE form in a second context.
840 This is achieved with a simple mark/copy/unmark algorithm that walks the
841 GENERIC representation top-down, marks nodes with TREE_VISITED the first
842 time it encounters them, duplicates them if they already have TREE_VISITED
843 set, and finally removes the TREE_VISITED marks it has set.
845 The algorithm works only at the function level, i.e. it generates a GENERIC
846 representation of a function with no nodes shared within the function when
847 passed a GENERIC function (except for nodes that are allowed to be shared).
849 At the global level, it is also necessary to unshare tree nodes that are
850 referenced in more than one function, for the same aforementioned reason.
851 This requires some cooperation from the front-end. There are 2 strategies:
853 1. Manual unsharing. The front-end needs to call unshare_expr on every
854 expression that might end up being shared across functions.
856 2. Deep unsharing. This is an extension of regular unsharing. Instead
857 of calling unshare_expr on expressions that might be shared across
858 functions, the front-end pre-marks them with TREE_VISITED. This will
859 ensure that they are unshared on the first reference within functions
860 when the regular unsharing algorithm runs. The counterpart is that
861 this algorithm must look deeper than for manual unsharing, which is
862 specified by LANG_HOOKS_DEEP_UNSHARING.
864 If there are only few specific cases of node sharing across functions, it is
865 probably easier for a front-end to unshare the expressions manually. On the
866 contrary, if the expressions generated at the global level are as widespread
867 as expressions generated within functions, deep unsharing is very likely the
870 /* Similar to copy_tree_r but do not copy SAVE_EXPR or TARGET_EXPR nodes.
871 These nodes model computations that must be done once. If we were to
872 unshare something like SAVE_EXPR(i++), the gimplification process would
873 create wrong code. However, if DATA is non-null, it must hold a pointer
874 set that is used to unshare the subtrees of these nodes. */
877 mostly_copy_tree_r (tree
*tp
, int *walk_subtrees
, void *data
)
880 enum tree_code code
= TREE_CODE (t
);
882 /* Do not copy SAVE_EXPR, TARGET_EXPR or BIND_EXPR nodes themselves, but
883 copy their subtrees if we can make sure to do it only once. */
884 if (code
== SAVE_EXPR
|| code
== TARGET_EXPR
|| code
== BIND_EXPR
)
886 if (data
&& !((hash_set
<tree
> *)data
)->add (t
))
892 /* Stop at types, decls, constants like copy_tree_r. */
893 else if (TREE_CODE_CLASS (code
) == tcc_type
894 || TREE_CODE_CLASS (code
) == tcc_declaration
895 || TREE_CODE_CLASS (code
) == tcc_constant
)
898 /* Cope with the statement expression extension. */
899 else if (code
== STATEMENT_LIST
)
902 /* Leave the bulk of the work to copy_tree_r itself. */
904 copy_tree_r (tp
, walk_subtrees
, NULL
);
909 /* Callback for walk_tree to unshare most of the shared trees rooted at *TP.
910 If *TP has been visited already, then *TP is deeply copied by calling
911 mostly_copy_tree_r. DATA is passed to mostly_copy_tree_r unmodified. */
914 copy_if_shared_r (tree
*tp
, int *walk_subtrees
, void *data
)
917 enum tree_code code
= TREE_CODE (t
);
919 /* Skip types, decls, and constants. But we do want to look at their
920 types and the bounds of types. Mark them as visited so we properly
921 unmark their subtrees on the unmark pass. If we've already seen them,
922 don't look down further. */
923 if (TREE_CODE_CLASS (code
) == tcc_type
924 || TREE_CODE_CLASS (code
) == tcc_declaration
925 || TREE_CODE_CLASS (code
) == tcc_constant
)
927 if (TREE_VISITED (t
))
930 TREE_VISITED (t
) = 1;
933 /* If this node has been visited already, unshare it and don't look
935 else if (TREE_VISITED (t
))
937 walk_tree (tp
, mostly_copy_tree_r
, data
, NULL
);
941 /* Otherwise, mark the node as visited and keep looking. */
943 TREE_VISITED (t
) = 1;
948 /* Unshare most of the shared trees rooted at *TP. DATA is passed to the
949 copy_if_shared_r callback unmodified. */
952 copy_if_shared (tree
*tp
, void *data
)
954 walk_tree (tp
, copy_if_shared_r
, data
, NULL
);
957 /* Unshare all the trees in the body of FNDECL, as well as in the bodies of
958 any nested functions. */
961 unshare_body (tree fndecl
)
963 struct cgraph_node
*cgn
= cgraph_node::get (fndecl
);
964 /* If the language requires deep unsharing, we need a pointer set to make
965 sure we don't repeatedly unshare subtrees of unshareable nodes. */
966 hash_set
<tree
> *visited
967 = lang_hooks
.deep_unsharing
? new hash_set
<tree
> : NULL
;
969 copy_if_shared (&DECL_SAVED_TREE (fndecl
), visited
);
970 copy_if_shared (&DECL_SIZE (DECL_RESULT (fndecl
)), visited
);
971 copy_if_shared (&DECL_SIZE_UNIT (DECL_RESULT (fndecl
)), visited
);
976 for (cgn
= first_nested_function (cgn
); cgn
;
977 cgn
= next_nested_function (cgn
))
978 unshare_body (cgn
->decl
);
981 /* Callback for walk_tree to unmark the visited trees rooted at *TP.
982 Subtrees are walked until the first unvisited node is encountered. */
985 unmark_visited_r (tree
*tp
, int *walk_subtrees
, void *data ATTRIBUTE_UNUSED
)
989 /* If this node has been visited, unmark it and keep looking. */
990 if (TREE_VISITED (t
))
991 TREE_VISITED (t
) = 0;
993 /* Otherwise, don't look any deeper. */
1000 /* Unmark the visited trees rooted at *TP. */
1003 unmark_visited (tree
*tp
)
1005 walk_tree (tp
, unmark_visited_r
, NULL
, NULL
);
1008 /* Likewise, but mark all trees as not visited. */
1011 unvisit_body (tree fndecl
)
1013 struct cgraph_node
*cgn
= cgraph_node::get (fndecl
);
1015 unmark_visited (&DECL_SAVED_TREE (fndecl
));
1016 unmark_visited (&DECL_SIZE (DECL_RESULT (fndecl
)));
1017 unmark_visited (&DECL_SIZE_UNIT (DECL_RESULT (fndecl
)));
1020 for (cgn
= first_nested_function (cgn
);
1021 cgn
; cgn
= next_nested_function (cgn
))
1022 unvisit_body (cgn
->decl
);
1025 /* Unconditionally make an unshared copy of EXPR. This is used when using
1026 stored expressions which span multiple functions, such as BINFO_VTABLE,
1027 as the normal unsharing process can't tell that they're shared. */
1030 unshare_expr (tree expr
)
1032 walk_tree (&expr
, mostly_copy_tree_r
, NULL
, NULL
);
1036 /* Worker for unshare_expr_without_location. */
1039 prune_expr_location (tree
*tp
, int *walk_subtrees
, void *)
1042 SET_EXPR_LOCATION (*tp
, UNKNOWN_LOCATION
);
1048 /* Similar to unshare_expr but also prune all expression locations
1052 unshare_expr_without_location (tree expr
)
1054 walk_tree (&expr
, mostly_copy_tree_r
, NULL
, NULL
);
1056 walk_tree (&expr
, prune_expr_location
, NULL
, NULL
);
1060 /* Return the EXPR_LOCATION of EXPR, if it (maybe recursively) has
1061 one, OR_ELSE otherwise. The location of a STATEMENT_LISTs
1062 comprising at least one DEBUG_BEGIN_STMT followed by exactly one
1063 EXPR is the location of the EXPR. */
1066 rexpr_location (tree expr
, location_t or_else
= UNKNOWN_LOCATION
)
1071 if (EXPR_HAS_LOCATION (expr
))
1072 return EXPR_LOCATION (expr
);
1074 if (TREE_CODE (expr
) != STATEMENT_LIST
)
1077 tree_stmt_iterator i
= tsi_start (expr
);
1080 while (!tsi_end_p (i
) && TREE_CODE (tsi_stmt (i
)) == DEBUG_BEGIN_STMT
)
1086 if (!found
|| !tsi_one_before_end_p (i
))
1089 return rexpr_location (tsi_stmt (i
), or_else
);
1092 /* Return TRUE iff EXPR (maybe recursively) has a location; see
1093 rexpr_location for the potential recursion. */
1096 rexpr_has_location (tree expr
)
1098 return rexpr_location (expr
) != UNKNOWN_LOCATION
;
1102 /* WRAPPER is a code such as BIND_EXPR or CLEANUP_POINT_EXPR which can both
1103 contain statements and have a value. Assign its value to a temporary
1104 and give it void_type_node. Return the temporary, or NULL_TREE if
1105 WRAPPER was already void. */
1108 voidify_wrapper_expr (tree wrapper
, tree temp
)
1110 tree type
= TREE_TYPE (wrapper
);
1111 if (type
&& !VOID_TYPE_P (type
))
1115 /* Set p to point to the body of the wrapper. Loop until we find
1116 something that isn't a wrapper. */
1117 for (p
= &wrapper
; p
&& *p
; )
1119 switch (TREE_CODE (*p
))
1122 TREE_SIDE_EFFECTS (*p
) = 1;
1123 TREE_TYPE (*p
) = void_type_node
;
1124 /* For a BIND_EXPR, the body is operand 1. */
1125 p
= &BIND_EXPR_BODY (*p
);
1128 case CLEANUP_POINT_EXPR
:
1129 case TRY_FINALLY_EXPR
:
1130 case TRY_CATCH_EXPR
:
1131 TREE_SIDE_EFFECTS (*p
) = 1;
1132 TREE_TYPE (*p
) = void_type_node
;
1133 p
= &TREE_OPERAND (*p
, 0);
1136 case STATEMENT_LIST
:
1138 tree_stmt_iterator i
= tsi_last (*p
);
1139 TREE_SIDE_EFFECTS (*p
) = 1;
1140 TREE_TYPE (*p
) = void_type_node
;
1141 p
= tsi_end_p (i
) ? NULL
: tsi_stmt_ptr (i
);
1146 /* Advance to the last statement. Set all container types to
1148 for (; TREE_CODE (*p
) == COMPOUND_EXPR
; p
= &TREE_OPERAND (*p
, 1))
1150 TREE_SIDE_EFFECTS (*p
) = 1;
1151 TREE_TYPE (*p
) = void_type_node
;
1155 case TRANSACTION_EXPR
:
1156 TREE_SIDE_EFFECTS (*p
) = 1;
1157 TREE_TYPE (*p
) = void_type_node
;
1158 p
= &TRANSACTION_EXPR_BODY (*p
);
1162 /* Assume that any tree upon which voidify_wrapper_expr is
1163 directly called is a wrapper, and that its body is op0. */
1166 TREE_SIDE_EFFECTS (*p
) = 1;
1167 TREE_TYPE (*p
) = void_type_node
;
1168 p
= &TREE_OPERAND (*p
, 0);
1176 if (p
== NULL
|| IS_EMPTY_STMT (*p
))
1180 /* The wrapper is on the RHS of an assignment that we're pushing
1182 gcc_assert (TREE_CODE (temp
) == INIT_EXPR
1183 || TREE_CODE (temp
) == MODIFY_EXPR
);
1184 TREE_OPERAND (temp
, 1) = *p
;
1189 temp
= create_tmp_var (type
, "retval");
1190 *p
= build2 (INIT_EXPR
, type
, temp
, *p
);
1199 /* Prepare calls to builtins to SAVE and RESTORE the stack as well as
1200 a temporary through which they communicate. */
1203 build_stack_save_restore (gcall
**save
, gcall
**restore
)
1207 *save
= gimple_build_call (builtin_decl_implicit (BUILT_IN_STACK_SAVE
), 0);
1208 tmp_var
= create_tmp_var (ptr_type_node
, "saved_stack");
1209 gimple_call_set_lhs (*save
, tmp_var
);
1212 = gimple_build_call (builtin_decl_implicit (BUILT_IN_STACK_RESTORE
),
1216 /* Generate IFN_ASAN_MARK call that poisons shadow of a for DECL variable. */
1219 build_asan_poison_call_expr (tree decl
)
1221 /* Do not poison variables that have size equal to zero. */
1222 tree unit_size
= DECL_SIZE_UNIT (decl
);
1223 if (zerop (unit_size
))
1226 tree base
= build_fold_addr_expr (decl
);
1228 return build_call_expr_internal_loc (UNKNOWN_LOCATION
, IFN_ASAN_MARK
,
1230 build_int_cst (integer_type_node
,
1235 /* Generate IFN_ASAN_MARK call that would poison or unpoison, depending
1236 on POISON flag, shadow memory of a DECL variable. The call will be
1237 put on location identified by IT iterator, where BEFORE flag drives
1238 position where the stmt will be put. */
1241 asan_poison_variable (tree decl
, bool poison
, gimple_stmt_iterator
*it
,
1244 tree unit_size
= DECL_SIZE_UNIT (decl
);
1245 tree base
= build_fold_addr_expr (decl
);
1247 /* Do not poison variables that have size equal to zero. */
1248 if (zerop (unit_size
))
1251 /* It's necessary to have all stack variables aligned to ASAN granularity
1253 gcc_assert (!hwasan_sanitize_p () || hwasan_sanitize_stack_p ());
1254 unsigned shadow_granularity
1255 = hwasan_sanitize_p () ? HWASAN_TAG_GRANULE_SIZE
: ASAN_SHADOW_GRANULARITY
;
1256 if (DECL_ALIGN_UNIT (decl
) <= shadow_granularity
)
1257 SET_DECL_ALIGN (decl
, BITS_PER_UNIT
* shadow_granularity
);
1259 HOST_WIDE_INT flags
= poison
? ASAN_MARK_POISON
: ASAN_MARK_UNPOISON
;
1262 = gimple_build_call_internal (IFN_ASAN_MARK
, 3,
1263 build_int_cst (integer_type_node
, flags
),
1267 gsi_insert_before (it
, g
, GSI_NEW_STMT
);
1269 gsi_insert_after (it
, g
, GSI_NEW_STMT
);
1272 /* Generate IFN_ASAN_MARK internal call that depending on POISON flag
1273 either poisons or unpoisons a DECL. Created statement is appended
1274 to SEQ_P gimple sequence. */
1277 asan_poison_variable (tree decl
, bool poison
, gimple_seq
*seq_p
)
1279 gimple_stmt_iterator it
= gsi_last (*seq_p
);
1280 bool before
= false;
1285 asan_poison_variable (decl
, poison
, &it
, before
);
1288 /* Sort pair of VAR_DECLs A and B by DECL_UID. */
1291 sort_by_decl_uid (const void *a
, const void *b
)
1293 const tree
*t1
= (const tree
*)a
;
1294 const tree
*t2
= (const tree
*)b
;
1296 int uid1
= DECL_UID (*t1
);
1297 int uid2
= DECL_UID (*t2
);
1301 else if (uid1
> uid2
)
1307 /* Generate IFN_ASAN_MARK internal call for all VARIABLES
1308 depending on POISON flag. Created statement is appended
1309 to SEQ_P gimple sequence. */
1312 asan_poison_variables (hash_set
<tree
> *variables
, bool poison
, gimple_seq
*seq_p
)
1314 unsigned c
= variables
->elements ();
1318 auto_vec
<tree
> sorted_variables (c
);
1320 for (hash_set
<tree
>::iterator it
= variables
->begin ();
1321 it
!= variables
->end (); ++it
)
1322 sorted_variables
.safe_push (*it
);
1324 sorted_variables
.qsort (sort_by_decl_uid
);
1328 FOR_EACH_VEC_ELT (sorted_variables
, i
, var
)
1330 asan_poison_variable (var
, poison
, seq_p
);
1332 /* Add use_after_scope_memory attribute for the variable in order
1333 to prevent re-written into SSA. */
1334 if (!lookup_attribute (ASAN_USE_AFTER_SCOPE_ATTRIBUTE
,
1335 DECL_ATTRIBUTES (var
)))
1336 DECL_ATTRIBUTES (var
)
1337 = tree_cons (get_identifier (ASAN_USE_AFTER_SCOPE_ATTRIBUTE
),
1339 DECL_ATTRIBUTES (var
));
1343 /* Gimplify a BIND_EXPR. Just voidify and recurse. */
1345 static enum gimplify_status
1346 gimplify_bind_expr (tree
*expr_p
, gimple_seq
*pre_p
)
1348 tree bind_expr
= *expr_p
;
1349 bool old_keep_stack
= gimplify_ctxp
->keep_stack
;
1350 bool old_save_stack
= gimplify_ctxp
->save_stack
;
1353 gimple_seq body
, cleanup
;
1355 location_t start_locus
= 0, end_locus
= 0;
1356 tree ret_clauses
= NULL
;
1358 tree temp
= voidify_wrapper_expr (bind_expr
, NULL
);
1360 /* Mark variables seen in this bind expr. */
1361 for (t
= BIND_EXPR_VARS (bind_expr
); t
; t
= DECL_CHAIN (t
))
1365 struct gimplify_omp_ctx
*ctx
= gimplify_omp_ctxp
;
1367 /* Mark variable as local. */
1368 if (ctx
&& ctx
->region_type
!= ORT_NONE
&& !DECL_EXTERNAL (t
))
1370 if (! DECL_SEEN_IN_BIND_EXPR_P (t
)
1371 || splay_tree_lookup (ctx
->variables
,
1372 (splay_tree_key
) t
) == NULL
)
1374 int flag
= GOVD_LOCAL
;
1375 if (ctx
->region_type
== ORT_SIMD
1376 && TREE_ADDRESSABLE (t
)
1377 && !TREE_STATIC (t
))
1379 if (TREE_CODE (DECL_SIZE_UNIT (t
)) != INTEGER_CST
)
1380 ctx
->add_safelen1
= true;
1382 flag
= GOVD_PRIVATE
;
1384 omp_add_variable (ctx
, t
, flag
| GOVD_SEEN
);
1386 /* Static locals inside of target construct or offloaded
1387 routines need to be "omp declare target". */
1388 if (TREE_STATIC (t
))
1389 for (; ctx
; ctx
= ctx
->outer_context
)
1390 if ((ctx
->region_type
& ORT_TARGET
) != 0)
1392 if (!lookup_attribute ("omp declare target",
1393 DECL_ATTRIBUTES (t
)))
1395 tree id
= get_identifier ("omp declare target");
1397 = tree_cons (id
, NULL_TREE
, DECL_ATTRIBUTES (t
));
1398 varpool_node
*node
= varpool_node::get (t
);
1401 node
->offloadable
= 1;
1402 if (ENABLE_OFFLOADING
&& !DECL_EXTERNAL (t
))
1404 g
->have_offload
= true;
1406 vec_safe_push (offload_vars
, t
);
1414 DECL_SEEN_IN_BIND_EXPR_P (t
) = 1;
1416 if (DECL_HARD_REGISTER (t
) && !is_global_var (t
) && cfun
)
1417 cfun
->has_local_explicit_reg_vars
= true;
1421 bind_stmt
= gimple_build_bind (BIND_EXPR_VARS (bind_expr
), NULL
,
1422 BIND_EXPR_BLOCK (bind_expr
));
1423 gimple_push_bind_expr (bind_stmt
);
1425 gimplify_ctxp
->keep_stack
= false;
1426 gimplify_ctxp
->save_stack
= false;
1428 /* Gimplify the body into the GIMPLE_BIND tuple's body. */
1430 gimplify_stmt (&BIND_EXPR_BODY (bind_expr
), &body
);
1431 gimple_bind_set_body (bind_stmt
, body
);
1433 /* Source location wise, the cleanup code (stack_restore and clobbers)
1434 belongs to the end of the block, so propagate what we have. The
1435 stack_save operation belongs to the beginning of block, which we can
1436 infer from the bind_expr directly if the block has no explicit
1438 if (BIND_EXPR_BLOCK (bind_expr
))
1440 end_locus
= BLOCK_SOURCE_END_LOCATION (BIND_EXPR_BLOCK (bind_expr
));
1441 start_locus
= BLOCK_SOURCE_LOCATION (BIND_EXPR_BLOCK (bind_expr
));
1443 if (start_locus
== 0)
1444 start_locus
= EXPR_LOCATION (bind_expr
);
1449 /* If the code both contains VLAs and calls alloca, then we cannot reclaim
1450 the stack space allocated to the VLAs. */
1451 if (gimplify_ctxp
->save_stack
&& !gimplify_ctxp
->keep_stack
)
1453 gcall
*stack_restore
;
1455 /* Save stack on entry and restore it on exit. Add a try_finally
1456 block to achieve this. */
1457 build_stack_save_restore (&stack_save
, &stack_restore
);
1459 gimple_set_location (stack_save
, start_locus
);
1460 gimple_set_location (stack_restore
, end_locus
);
1462 gimplify_seq_add_stmt (&cleanup
, stack_restore
);
1465 /* Add clobbers for all variables that go out of scope. */
1466 for (t
= BIND_EXPR_VARS (bind_expr
); t
; t
= DECL_CHAIN (t
))
1469 && !is_global_var (t
)
1470 && DECL_CONTEXT (t
) == current_function_decl
)
1472 if (!DECL_HARD_REGISTER (t
)
1473 && !TREE_THIS_VOLATILE (t
)
1474 && !DECL_HAS_VALUE_EXPR_P (t
)
1475 /* Only care for variables that have to be in memory. Others
1476 will be rewritten into SSA names, hence moved to the
1478 && !is_gimple_reg (t
)
1479 && flag_stack_reuse
!= SR_NONE
)
1481 tree clobber
= build_clobber (TREE_TYPE (t
), CLOBBER_EOL
);
1482 gimple
*clobber_stmt
;
1483 clobber_stmt
= gimple_build_assign (t
, clobber
);
1484 gimple_set_location (clobber_stmt
, end_locus
);
1485 gimplify_seq_add_stmt (&cleanup
, clobber_stmt
);
1488 if (flag_openacc
&& oacc_declare_returns
!= NULL
)
1491 if (DECL_HAS_VALUE_EXPR_P (key
))
1493 key
= DECL_VALUE_EXPR (key
);
1494 if (TREE_CODE (key
) == INDIRECT_REF
)
1495 key
= TREE_OPERAND (key
, 0);
1497 tree
*c
= oacc_declare_returns
->get (key
);
1501 OMP_CLAUSE_CHAIN (*c
) = ret_clauses
;
1503 ret_clauses
= unshare_expr (*c
);
1505 oacc_declare_returns
->remove (key
);
1507 if (oacc_declare_returns
->is_empty ())
1509 delete oacc_declare_returns
;
1510 oacc_declare_returns
= NULL
;
1516 if (asan_poisoned_variables
!= NULL
1517 && asan_poisoned_variables
->contains (t
))
1519 asan_poisoned_variables
->remove (t
);
1520 asan_poison_variable (t
, true, &cleanup
);
1523 if (gimplify_ctxp
->live_switch_vars
!= NULL
1524 && gimplify_ctxp
->live_switch_vars
->contains (t
))
1525 gimplify_ctxp
->live_switch_vars
->remove (t
);
1531 gimple_stmt_iterator si
= gsi_start (cleanup
);
1533 stmt
= gimple_build_omp_target (NULL
, GF_OMP_TARGET_KIND_OACC_DECLARE
,
1535 gsi_insert_seq_before_without_update (&si
, stmt
, GSI_NEW_STMT
);
1541 gimple_seq new_body
;
1544 gs
= gimple_build_try (gimple_bind_body (bind_stmt
), cleanup
,
1545 GIMPLE_TRY_FINALLY
);
1548 gimplify_seq_add_stmt (&new_body
, stack_save
);
1549 gimplify_seq_add_stmt (&new_body
, gs
);
1550 gimple_bind_set_body (bind_stmt
, new_body
);
1553 /* keep_stack propagates all the way up to the outermost BIND_EXPR. */
1554 if (!gimplify_ctxp
->keep_stack
)
1555 gimplify_ctxp
->keep_stack
= old_keep_stack
;
1556 gimplify_ctxp
->save_stack
= old_save_stack
;
1558 gimple_pop_bind_expr ();
1560 gimplify_seq_add_stmt (pre_p
, bind_stmt
);
1568 *expr_p
= NULL_TREE
;
1572 /* Maybe add early return predict statement to PRE_P sequence. */
1575 maybe_add_early_return_predict_stmt (gimple_seq
*pre_p
)
1577 /* If we are not in a conditional context, add PREDICT statement. */
1578 if (gimple_conditional_context ())
1580 gimple
*predict
= gimple_build_predict (PRED_TREE_EARLY_RETURN
,
1582 gimplify_seq_add_stmt (pre_p
, predict
);
1586 /* Gimplify a RETURN_EXPR. If the expression to be returned is not a
1587 GIMPLE value, it is assigned to a new temporary and the statement is
1588 re-written to return the temporary.
1590 PRE_P points to the sequence where side effects that must happen before
1591 STMT should be stored. */
1593 static enum gimplify_status
1594 gimplify_return_expr (tree stmt
, gimple_seq
*pre_p
)
1597 tree ret_expr
= TREE_OPERAND (stmt
, 0);
1598 tree result_decl
, result
;
1600 if (ret_expr
== error_mark_node
)
1604 || TREE_CODE (ret_expr
) == RESULT_DECL
)
1606 maybe_add_early_return_predict_stmt (pre_p
);
1607 greturn
*ret
= gimple_build_return (ret_expr
);
1608 copy_warning (ret
, stmt
);
1609 gimplify_seq_add_stmt (pre_p
, ret
);
1613 if (VOID_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl
))))
1614 result_decl
= NULL_TREE
;
1615 else if (TREE_CODE (ret_expr
) == COMPOUND_EXPR
)
1617 /* Used in C++ for handling EH cleanup of the return value if a local
1618 cleanup throws. Assume the front-end knows what it's doing. */
1619 result_decl
= DECL_RESULT (current_function_decl
);
1620 /* But crash if we end up trying to modify ret_expr below. */
1621 ret_expr
= NULL_TREE
;
1625 result_decl
= TREE_OPERAND (ret_expr
, 0);
1627 /* See through a return by reference. */
1628 if (TREE_CODE (result_decl
) == INDIRECT_REF
)
1629 result_decl
= TREE_OPERAND (result_decl
, 0);
1631 gcc_assert ((TREE_CODE (ret_expr
) == MODIFY_EXPR
1632 || TREE_CODE (ret_expr
) == INIT_EXPR
)
1633 && TREE_CODE (result_decl
) == RESULT_DECL
);
1636 /* If aggregate_value_p is true, then we can return the bare RESULT_DECL.
1637 Recall that aggregate_value_p is FALSE for any aggregate type that is
1638 returned in registers. If we're returning values in registers, then
1639 we don't want to extend the lifetime of the RESULT_DECL, particularly
1640 across another call. In addition, for those aggregates for which
1641 hard_function_value generates a PARALLEL, we'll die during normal
1642 expansion of structure assignments; there's special code in expand_return
1643 to handle this case that does not exist in expand_expr. */
1646 else if (aggregate_value_p (result_decl
, TREE_TYPE (current_function_decl
)))
1648 if (!poly_int_tree_p (DECL_SIZE (result_decl
)))
1650 if (!TYPE_SIZES_GIMPLIFIED (TREE_TYPE (result_decl
)))
1651 gimplify_type_sizes (TREE_TYPE (result_decl
), pre_p
);
1652 /* Note that we don't use gimplify_vla_decl because the RESULT_DECL
1653 should be effectively allocated by the caller, i.e. all calls to
1654 this function must be subject to the Return Slot Optimization. */
1655 gimplify_one_sizepos (&DECL_SIZE (result_decl
), pre_p
);
1656 gimplify_one_sizepos (&DECL_SIZE_UNIT (result_decl
), pre_p
);
1658 result
= result_decl
;
1660 else if (gimplify_ctxp
->return_temp
)
1661 result
= gimplify_ctxp
->return_temp
;
1664 result
= create_tmp_reg (TREE_TYPE (result_decl
));
1666 /* ??? With complex control flow (usually involving abnormal edges),
1667 we can wind up warning about an uninitialized value for this. Due
1668 to how this variable is constructed and initialized, this is never
1669 true. Give up and never warn. */
1670 suppress_warning (result
, OPT_Wuninitialized
);
1672 gimplify_ctxp
->return_temp
= result
;
1675 /* Smash the lhs of the MODIFY_EXPR to the temporary we plan to use.
1676 Then gimplify the whole thing. */
1677 if (result
!= result_decl
)
1678 TREE_OPERAND (ret_expr
, 0) = result
;
1680 gimplify_and_add (TREE_OPERAND (stmt
, 0), pre_p
);
1682 maybe_add_early_return_predict_stmt (pre_p
);
1683 ret
= gimple_build_return (result
);
1684 copy_warning (ret
, stmt
);
1685 gimplify_seq_add_stmt (pre_p
, ret
);
1690 /* Gimplify a variable-length array DECL. */
1693 gimplify_vla_decl (tree decl
, gimple_seq
*seq_p
)
1695 /* This is a variable-sized decl. Simplify its size and mark it
1696 for deferred expansion. */
1697 tree t
, addr
, ptr_type
;
1699 gimplify_one_sizepos (&DECL_SIZE (decl
), seq_p
);
1700 gimplify_one_sizepos (&DECL_SIZE_UNIT (decl
), seq_p
);
1702 /* Don't mess with a DECL_VALUE_EXPR set by the front-end. */
1703 if (DECL_HAS_VALUE_EXPR_P (decl
))
1706 /* All occurrences of this decl in final gimplified code will be
1707 replaced by indirection. Setting DECL_VALUE_EXPR does two
1708 things: First, it lets the rest of the gimplifier know what
1709 replacement to use. Second, it lets the debug info know
1710 where to find the value. */
1711 ptr_type
= build_pointer_type (TREE_TYPE (decl
));
1712 addr
= create_tmp_var (ptr_type
, get_name (decl
));
1713 DECL_IGNORED_P (addr
) = 0;
1714 t
= build_fold_indirect_ref (addr
);
1715 TREE_THIS_NOTRAP (t
) = 1;
1716 SET_DECL_VALUE_EXPR (decl
, t
);
1717 DECL_HAS_VALUE_EXPR_P (decl
) = 1;
1719 t
= build_alloca_call_expr (DECL_SIZE_UNIT (decl
), DECL_ALIGN (decl
),
1720 max_int_size_in_bytes (TREE_TYPE (decl
)));
1721 /* The call has been built for a variable-sized object. */
1722 CALL_ALLOCA_FOR_VAR_P (t
) = 1;
1723 t
= fold_convert (ptr_type
, t
);
1724 t
= build2 (MODIFY_EXPR
, TREE_TYPE (addr
), addr
, t
);
1726 gimplify_and_add (t
, seq_p
);
1728 /* Record the dynamic allocation associated with DECL if requested. */
1729 if (flag_callgraph_info
& CALLGRAPH_INFO_DYNAMIC_ALLOC
)
1730 record_dynamic_alloc (decl
);
1733 /* A helper function to be called via walk_tree. Mark all labels under *TP
1734 as being forced. To be called for DECL_INITIAL of static variables. */
1737 force_labels_r (tree
*tp
, int *walk_subtrees
, void *data ATTRIBUTE_UNUSED
)
1741 if (TREE_CODE (*tp
) == LABEL_DECL
)
1743 FORCED_LABEL (*tp
) = 1;
1744 cfun
->has_forced_label_in_static
= 1;
1750 /* Generate an initialization to automatic variable DECL based on INIT_TYPE.
1751 Build a call to internal const function DEFERRED_INIT:
1752 1st argument: SIZE of the DECL;
1753 2nd argument: INIT_TYPE;
1754 3rd argument: NAME of the DECL;
1756 as LHS = DEFERRED_INIT (SIZE of the DECL, INIT_TYPE, NAME of the DECL). */
1759 gimple_add_init_for_auto_var (tree decl
,
1760 enum auto_init_type init_type
,
1763 gcc_assert (auto_var_p (decl
));
1764 gcc_assert (init_type
> AUTO_INIT_UNINITIALIZED
);
1765 location_t loc
= EXPR_LOCATION (decl
);
1766 tree decl_size
= TYPE_SIZE_UNIT (TREE_TYPE (decl
));
1769 = build_int_cst (integer_type_node
, (int) init_type
);
1771 tree decl_name
= NULL_TREE
;
1772 if (DECL_NAME (decl
))
1774 decl_name
= build_string_literal (IDENTIFIER_LENGTH (DECL_NAME (decl
)) + 1,
1775 IDENTIFIER_POINTER (DECL_NAME (decl
)));
1779 char *decl_name_anonymous
= xasprintf ("D.%u", DECL_UID (decl
));
1780 decl_name
= build_string_literal (strlen (decl_name_anonymous
) + 1,
1781 decl_name_anonymous
);
1782 free (decl_name_anonymous
);
1785 tree call
= build_call_expr_internal_loc (loc
, IFN_DEFERRED_INIT
,
1786 TREE_TYPE (decl
), 3,
1787 decl_size
, init_type_node
,
1790 gimplify_assign (decl
, call
, seq_p
);
1793 /* Generate padding initialization for automatic vairable DECL.
1794 C guarantees that brace-init with fewer initializers than members
1795 aggregate will initialize the rest of the aggregate as-if it were
1796 static initialization. In turn static initialization guarantees
1797 that padding is initialized to zero. So, we always initialize paddings
1798 to zeroes regardless INIT_TYPE.
1799 To do the padding initialization, we insert a call to
1800 __builtin_clear_padding (&decl, 0, for_auto_init = true).
1801 Note, we add an additional dummy argument for __builtin_clear_padding,
1802 'for_auto_init' to distinguish whether this call is for automatic
1803 variable initialization or not.
1806 gimple_add_padding_init_for_auto_var (tree decl
, bool is_vla
,
1809 tree addr_of_decl
= NULL_TREE
;
1810 tree fn
= builtin_decl_explicit (BUILT_IN_CLEAR_PADDING
);
1814 /* The temporary address variable for this vla should be
1815 created in gimplify_vla_decl. */
1816 gcc_assert (DECL_HAS_VALUE_EXPR_P (decl
));
1817 gcc_assert (TREE_CODE (DECL_VALUE_EXPR (decl
)) == INDIRECT_REF
);
1818 addr_of_decl
= TREE_OPERAND (DECL_VALUE_EXPR (decl
), 0);
1822 mark_addressable (decl
);
1823 addr_of_decl
= build_fold_addr_expr (decl
);
1826 gimple
*call
= gimple_build_call (fn
, 2, addr_of_decl
,
1827 build_one_cst (TREE_TYPE (addr_of_decl
)));
1828 gimplify_seq_add_stmt (seq_p
, call
);
1831 /* Return true if the DECL need to be automaticly initialized by the
1834 is_var_need_auto_init (tree decl
)
1836 if (auto_var_p (decl
)
1837 && (TREE_CODE (decl
) != VAR_DECL
1838 || !DECL_HARD_REGISTER (decl
))
1839 && (flag_auto_var_init
> AUTO_INIT_UNINITIALIZED
)
1840 && (!lookup_attribute ("uninitialized", DECL_ATTRIBUTES (decl
)))
1841 && !OPAQUE_TYPE_P (TREE_TYPE (decl
))
1842 && !is_empty_type (TREE_TYPE (decl
)))
1847 /* Gimplify a DECL_EXPR node *STMT_P by making any necessary allocation
1848 and initialization explicit. */
1850 static enum gimplify_status
1851 gimplify_decl_expr (tree
*stmt_p
, gimple_seq
*seq_p
)
1853 tree stmt
= *stmt_p
;
1854 tree decl
= DECL_EXPR_DECL (stmt
);
1856 *stmt_p
= NULL_TREE
;
1858 if (TREE_TYPE (decl
) == error_mark_node
)
1861 if ((TREE_CODE (decl
) == TYPE_DECL
1863 && !TYPE_SIZES_GIMPLIFIED (TREE_TYPE (decl
)))
1865 gimplify_type_sizes (TREE_TYPE (decl
), seq_p
);
1866 if (TREE_CODE (TREE_TYPE (decl
)) == REFERENCE_TYPE
)
1867 gimplify_type_sizes (TREE_TYPE (TREE_TYPE (decl
)), seq_p
);
1870 /* ??? DECL_ORIGINAL_TYPE is streamed for LTO so it needs to be gimplified
1871 in case its size expressions contain problematic nodes like CALL_EXPR. */
1872 if (TREE_CODE (decl
) == TYPE_DECL
1873 && DECL_ORIGINAL_TYPE (decl
)
1874 && !TYPE_SIZES_GIMPLIFIED (DECL_ORIGINAL_TYPE (decl
)))
1876 gimplify_type_sizes (DECL_ORIGINAL_TYPE (decl
), seq_p
);
1877 if (TREE_CODE (DECL_ORIGINAL_TYPE (decl
)) == REFERENCE_TYPE
)
1878 gimplify_type_sizes (TREE_TYPE (DECL_ORIGINAL_TYPE (decl
)), seq_p
);
1881 if (VAR_P (decl
) && !DECL_EXTERNAL (decl
))
1883 tree init
= DECL_INITIAL (decl
);
1884 bool is_vla
= false;
1885 /* Check whether a decl has FE created VALUE_EXPR here BEFORE
1886 gimplify_vla_decl creates VALUE_EXPR for a vla decl.
1887 If the decl has VALUE_EXPR that was created by FE (usually
1888 C++FE), it's a proxy varaible, and FE already initialized
1889 the VALUE_EXPR of it, we should not initialize it anymore. */
1890 bool decl_had_value_expr_p
= DECL_HAS_VALUE_EXPR_P (decl
);
1893 if (!poly_int_tree_p (DECL_SIZE_UNIT (decl
), &size
)
1894 || (!TREE_STATIC (decl
)
1895 && flag_stack_check
== GENERIC_STACK_CHECK
1897 (unsigned HOST_WIDE_INT
) STACK_CHECK_MAX_VAR_SIZE
)))
1899 gimplify_vla_decl (decl
, seq_p
);
1903 if (asan_poisoned_variables
1905 && TREE_ADDRESSABLE (decl
)
1906 && !TREE_STATIC (decl
)
1907 && !DECL_HAS_VALUE_EXPR_P (decl
)
1908 && DECL_ALIGN (decl
) <= MAX_SUPPORTED_STACK_ALIGNMENT
1909 && dbg_cnt (asan_use_after_scope
)
1910 && !gimplify_omp_ctxp
1911 /* GNAT introduces temporaries to hold return values of calls in
1912 initializers of variables defined in other units, so the
1913 declaration of the variable is discarded completely. We do not
1914 want to issue poison calls for such dropped variables. */
1915 && (DECL_SEEN_IN_BIND_EXPR_P (decl
)
1916 || (DECL_ARTIFICIAL (decl
) && DECL_NAME (decl
) == NULL_TREE
)))
1918 asan_poisoned_variables
->add (decl
);
1919 asan_poison_variable (decl
, false, seq_p
);
1920 if (!DECL_ARTIFICIAL (decl
) && gimplify_ctxp
->live_switch_vars
)
1921 gimplify_ctxp
->live_switch_vars
->add (decl
);
1924 /* Some front ends do not explicitly declare all anonymous
1925 artificial variables. We compensate here by declaring the
1926 variables, though it would be better if the front ends would
1927 explicitly declare them. */
1928 if (!DECL_SEEN_IN_BIND_EXPR_P (decl
)
1929 && DECL_ARTIFICIAL (decl
) && DECL_NAME (decl
) == NULL_TREE
)
1930 gimple_add_tmp_var (decl
);
1932 if (init
&& init
!= error_mark_node
)
1934 if (!TREE_STATIC (decl
))
1936 DECL_INITIAL (decl
) = NULL_TREE
;
1937 init
= build2 (INIT_EXPR
, void_type_node
, decl
, init
);
1938 gimplify_and_add (init
, seq_p
);
1940 /* Clear TREE_READONLY if we really have an initialization. */
1941 if (!DECL_INITIAL (decl
)
1942 && !omp_privatize_by_reference (decl
))
1943 TREE_READONLY (decl
) = 0;
1946 /* We must still examine initializers for static variables
1947 as they may contain a label address. */
1948 walk_tree (&init
, force_labels_r
, NULL
, NULL
);
1950 /* When there is no explicit initializer, if the user requested,
1951 We should insert an artifical initializer for this automatic
1953 else if (is_var_need_auto_init (decl
)
1954 && !decl_had_value_expr_p
)
1956 gimple_add_init_for_auto_var (decl
,
1959 /* The expanding of a call to the above .DEFERRED_INIT will apply
1960 block initialization to the whole space covered by this variable.
1961 As a result, all the paddings will be initialized to zeroes
1962 for zero initialization and 0xFE byte-repeatable patterns for
1963 pattern initialization.
1964 In order to make the paddings as zeroes for pattern init, We
1965 should add a call to __builtin_clear_padding to clear the
1966 paddings to zero in compatiple with CLANG.
1967 We cannot insert this call if the variable is a gimple register
1968 since __builtin_clear_padding will take the address of the
1969 variable. As a result, if a long double/_Complex long double
1970 variable will spilled into stack later, its padding is 0XFE. */
1971 if (flag_auto_var_init
== AUTO_INIT_PATTERN
1972 && !is_gimple_reg (decl
)
1973 && clear_padding_type_may_have_padding_p (TREE_TYPE (decl
)))
1974 gimple_add_padding_init_for_auto_var (decl
, is_vla
, seq_p
);
1981 /* Gimplify a LOOP_EXPR. Normally this just involves gimplifying the body
1982 and replacing the LOOP_EXPR with goto, but if the loop contains an
1983 EXIT_EXPR, we need to append a label for it to jump to. */
1985 static enum gimplify_status
1986 gimplify_loop_expr (tree
*expr_p
, gimple_seq
*pre_p
)
1988 tree saved_label
= gimplify_ctxp
->exit_label
;
1989 tree start_label
= create_artificial_label (UNKNOWN_LOCATION
);
1991 gimplify_seq_add_stmt (pre_p
, gimple_build_label (start_label
));
1993 gimplify_ctxp
->exit_label
= NULL_TREE
;
1995 gimplify_and_add (LOOP_EXPR_BODY (*expr_p
), pre_p
);
1997 gimplify_seq_add_stmt (pre_p
, gimple_build_goto (start_label
));
1999 if (gimplify_ctxp
->exit_label
)
2000 gimplify_seq_add_stmt (pre_p
,
2001 gimple_build_label (gimplify_ctxp
->exit_label
));
2003 gimplify_ctxp
->exit_label
= saved_label
;
2009 /* Gimplify a statement list onto a sequence. These may be created either
2010 by an enlightened front-end, or by shortcut_cond_expr. */
2012 static enum gimplify_status
2013 gimplify_statement_list (tree
*expr_p
, gimple_seq
*pre_p
)
2015 tree temp
= voidify_wrapper_expr (*expr_p
, NULL
);
2017 tree_stmt_iterator i
= tsi_start (*expr_p
);
2019 while (!tsi_end_p (i
))
2021 gimplify_stmt (tsi_stmt_ptr (i
), pre_p
);
2035 /* Emit warning for the unreachable statment STMT if needed.
2036 Return the gimple itself when the warning is emitted, otherwise
2039 emit_warn_switch_unreachable (gimple
*stmt
)
2041 if (gimple_code (stmt
) == GIMPLE_GOTO
2042 && TREE_CODE (gimple_goto_dest (stmt
)) == LABEL_DECL
2043 && DECL_ARTIFICIAL (gimple_goto_dest (stmt
)))
2044 /* Don't warn for compiler-generated gotos. These occur
2045 in Duff's devices, for example. */
2047 else if ((flag_auto_var_init
> AUTO_INIT_UNINITIALIZED
)
2048 && ((gimple_call_internal_p (stmt
, IFN_DEFERRED_INIT
))
2049 || (gimple_call_builtin_p (stmt
, BUILT_IN_CLEAR_PADDING
)
2050 && (bool) TREE_INT_CST_LOW (gimple_call_arg (stmt
, 1)))
2051 || (is_gimple_assign (stmt
)
2052 && gimple_assign_single_p (stmt
)
2053 && (TREE_CODE (gimple_assign_rhs1 (stmt
)) == SSA_NAME
)
2054 && gimple_call_internal_p (
2055 SSA_NAME_DEF_STMT (gimple_assign_rhs1 (stmt
)),
2056 IFN_DEFERRED_INIT
))))
2057 /* Don't warn for compiler-generated initializations for
2058 -ftrivial-auto-var-init.
2060 case 1: a call to .DEFERRED_INIT;
2061 case 2: a call to __builtin_clear_padding with the 2nd argument is
2062 present and non-zero;
2063 case 3: a gimple assign store right after the call to .DEFERRED_INIT
2064 that has the LHS of .DEFERRED_INIT as the RHS as following:
2065 _1 = .DEFERRED_INIT (4, 2, &"i1"[0]);
2069 warning_at (gimple_location (stmt
), OPT_Wswitch_unreachable
,
2070 "statement will never be executed");
2074 /* Callback for walk_gimple_seq. */
2077 warn_switch_unreachable_and_auto_init_r (gimple_stmt_iterator
*gsi_p
,
2078 bool *handled_ops_p
,
2079 struct walk_stmt_info
*wi
)
2081 gimple
*stmt
= gsi_stmt (*gsi_p
);
2082 bool unreachable_issued
= wi
->info
!= NULL
;
2084 *handled_ops_p
= true;
2085 switch (gimple_code (stmt
))
2088 /* A compiler-generated cleanup or a user-written try block.
2089 If it's empty, don't dive into it--that would result in
2090 worse location info. */
2091 if (gimple_try_eval (stmt
) == NULL
)
2093 if (warn_switch_unreachable
&& !unreachable_issued
)
2094 wi
->info
= emit_warn_switch_unreachable (stmt
);
2096 /* Stop when auto var init warning is not on. */
2097 if (!warn_trivial_auto_var_init
)
2098 return integer_zero_node
;
2103 case GIMPLE_EH_FILTER
:
2104 case GIMPLE_TRANSACTION
:
2105 /* Walk the sub-statements. */
2106 *handled_ops_p
= false;
2110 /* Ignore these. We may generate them before declarations that
2111 are never executed. If there's something to warn about,
2112 there will be non-debug stmts too, and we'll catch those. */
2116 /* Stop till the first Label. */
2117 return integer_zero_node
;
2119 if (gimple_call_internal_p (stmt
, IFN_ASAN_MARK
))
2121 *handled_ops_p
= false;
2124 if (warn_trivial_auto_var_init
2125 && flag_auto_var_init
> AUTO_INIT_UNINITIALIZED
2126 && gimple_call_internal_p (stmt
, IFN_DEFERRED_INIT
))
2128 /* Get the variable name from the 3rd argument of call. */
2129 tree var_name
= gimple_call_arg (stmt
, 2);
2130 var_name
= TREE_OPERAND (TREE_OPERAND (var_name
, 0), 0);
2131 const char *var_name_str
= TREE_STRING_POINTER (var_name
);
2133 warning_at (gimple_location (stmt
), OPT_Wtrivial_auto_var_init
,
2134 "%qs cannot be initialized with"
2135 "%<-ftrivial-auto-var_init%>",
2142 /* check the first "real" statement (not a decl/lexical scope/...), issue
2143 warning if needed. */
2144 if (warn_switch_unreachable
&& !unreachable_issued
)
2145 wi
->info
= emit_warn_switch_unreachable (stmt
);
2146 /* Stop when auto var init warning is not on. */
2147 if (!warn_trivial_auto_var_init
)
2148 return integer_zero_node
;
2155 /* Possibly warn about unreachable statements between switch's controlling
2156 expression and the first case. Also warn about -ftrivial-auto-var-init
2157 cannot initialize the auto variable under such situation.
2158 SEQ is the body of a switch expression. */
2161 maybe_warn_switch_unreachable_and_auto_init (gimple_seq seq
)
2163 if ((!warn_switch_unreachable
&& !warn_trivial_auto_var_init
)
2164 /* This warning doesn't play well with Fortran when optimizations
2166 || lang_GNU_Fortran ()
2170 struct walk_stmt_info wi
;
2172 memset (&wi
, 0, sizeof (wi
));
2173 walk_gimple_seq (seq
, warn_switch_unreachable_and_auto_init_r
, NULL
, &wi
);
2177 /* A label entry that pairs label and a location. */
2184 /* Find LABEL in vector of label entries VEC. */
2186 static struct label_entry
*
2187 find_label_entry (const auto_vec
<struct label_entry
> *vec
, tree label
)
2190 struct label_entry
*l
;
2192 FOR_EACH_VEC_ELT (*vec
, i
, l
)
2193 if (l
->label
== label
)
2198 /* Return true if LABEL, a LABEL_DECL, represents a case label
2199 in a vector of labels CASES. */
2202 case_label_p (const vec
<tree
> *cases
, tree label
)
2207 FOR_EACH_VEC_ELT (*cases
, i
, l
)
2208 if (CASE_LABEL (l
) == label
)
2213 /* Find the last nondebug statement in a scope STMT. */
2216 last_stmt_in_scope (gimple
*stmt
)
2221 switch (gimple_code (stmt
))
2225 gbind
*bind
= as_a
<gbind
*> (stmt
);
2226 stmt
= gimple_seq_last_nondebug_stmt (gimple_bind_body (bind
));
2227 return last_stmt_in_scope (stmt
);
2232 gtry
*try_stmt
= as_a
<gtry
*> (stmt
);
2233 stmt
= gimple_seq_last_nondebug_stmt (gimple_try_eval (try_stmt
));
2234 gimple
*last_eval
= last_stmt_in_scope (stmt
);
2235 if (gimple_stmt_may_fallthru (last_eval
)
2236 && (last_eval
== NULL
2237 || !gimple_call_internal_p (last_eval
, IFN_FALLTHROUGH
))
2238 && gimple_try_kind (try_stmt
) == GIMPLE_TRY_FINALLY
)
2240 stmt
= gimple_seq_last_nondebug_stmt (gimple_try_cleanup (try_stmt
));
2241 return last_stmt_in_scope (stmt
);
2255 /* Collect labels that may fall through into LABELS and return the statement
2256 preceding another case label, or a user-defined label. Store a location
2257 useful to give warnings at *PREVLOC (usually the location of the returned
2258 statement or of its surrounding scope). */
2261 collect_fallthrough_labels (gimple_stmt_iterator
*gsi_p
,
2262 auto_vec
<struct label_entry
> *labels
,
2263 location_t
*prevloc
)
2265 gimple
*prev
= NULL
;
2267 *prevloc
= UNKNOWN_LOCATION
;
2270 if (gimple_code (gsi_stmt (*gsi_p
)) == GIMPLE_BIND
)
2272 /* Recognize the special GIMPLE_BIND added by gimplify_switch_expr,
2273 which starts on a GIMPLE_SWITCH and ends with a break label.
2274 Handle that as a single statement that can fall through. */
2275 gbind
*bind
= as_a
<gbind
*> (gsi_stmt (*gsi_p
));
2276 gimple
*first
= gimple_seq_first_stmt (gimple_bind_body (bind
));
2277 gimple
*last
= gimple_seq_last_stmt (gimple_bind_body (bind
));
2279 && gimple_code (first
) == GIMPLE_SWITCH
2280 && gimple_code (last
) == GIMPLE_LABEL
)
2282 tree label
= gimple_label_label (as_a
<glabel
*> (last
));
2283 if (SWITCH_BREAK_LABEL_P (label
))
2291 if (gimple_code (gsi_stmt (*gsi_p
)) == GIMPLE_BIND
2292 || gimple_code (gsi_stmt (*gsi_p
)) == GIMPLE_TRY
)
2294 /* Nested scope. Only look at the last statement of
2295 the innermost scope. */
2296 location_t bind_loc
= gimple_location (gsi_stmt (*gsi_p
));
2297 gimple
*last
= last_stmt_in_scope (gsi_stmt (*gsi_p
));
2301 /* It might be a label without a location. Use the
2302 location of the scope then. */
2303 if (!gimple_has_location (prev
))
2304 *prevloc
= bind_loc
;
2310 /* Ifs are tricky. */
2311 if (gimple_code (gsi_stmt (*gsi_p
)) == GIMPLE_COND
)
2313 gcond
*cond_stmt
= as_a
<gcond
*> (gsi_stmt (*gsi_p
));
2314 tree false_lab
= gimple_cond_false_label (cond_stmt
);
2315 location_t if_loc
= gimple_location (cond_stmt
);
2318 if (i > 1) goto <D.2259>; else goto D;
2319 we can't do much with the else-branch. */
2320 if (!DECL_ARTIFICIAL (false_lab
))
2323 /* Go on until the false label, then one step back. */
2324 for (; !gsi_end_p (*gsi_p
); gsi_next (gsi_p
))
2326 gimple
*stmt
= gsi_stmt (*gsi_p
);
2327 if (gimple_code (stmt
) == GIMPLE_LABEL
2328 && gimple_label_label (as_a
<glabel
*> (stmt
)) == false_lab
)
2332 /* Not found? Oops. */
2333 if (gsi_end_p (*gsi_p
))
2336 /* A dead label can't fall through. */
2337 if (!UNUSED_LABEL_P (false_lab
))
2339 struct label_entry l
= { false_lab
, if_loc
};
2340 labels
->safe_push (l
);
2343 /* Go to the last statement of the then branch. */
2346 /* if (i != 0) goto <D.1759>; else goto <D.1760>;
2352 if (gimple_code (gsi_stmt (*gsi_p
)) == GIMPLE_GOTO
2353 && !gimple_has_location (gsi_stmt (*gsi_p
)))
2355 /* Look at the statement before, it might be
2356 attribute fallthrough, in which case don't warn. */
2358 bool fallthru_before_dest
2359 = gimple_call_internal_p (gsi_stmt (*gsi_p
), IFN_FALLTHROUGH
);
2361 tree goto_dest
= gimple_goto_dest (gsi_stmt (*gsi_p
));
2362 if (!fallthru_before_dest
)
2364 struct label_entry l
= { goto_dest
, if_loc
};
2365 labels
->safe_push (l
);
2368 /* This case is about
2369 if (1 != 0) goto <D.2022>; else goto <D.2023>;
2374 where #2 is UNUSED_LABEL_P and we want to warn about #1 falling
2375 through to #3. So set PREV to #1. */
2376 else if (UNUSED_LABEL_P (false_lab
))
2377 prev
= gsi_stmt (*gsi_p
);
2379 /* And move back. */
2383 /* Remember the last statement. Skip labels that are of no interest
2385 if (gimple_code (gsi_stmt (*gsi_p
)) == GIMPLE_LABEL
)
2387 tree label
= gimple_label_label (as_a
<glabel
*> (gsi_stmt (*gsi_p
)));
2388 if (find_label_entry (labels
, label
))
2389 prev
= gsi_stmt (*gsi_p
);
2391 else if (gimple_call_internal_p (gsi_stmt (*gsi_p
), IFN_ASAN_MARK
))
2393 else if (gimple_code (gsi_stmt (*gsi_p
)) == GIMPLE_PREDICT
)
2395 else if (!is_gimple_debug (gsi_stmt (*gsi_p
)))
2396 prev
= gsi_stmt (*gsi_p
);
2399 while (!gsi_end_p (*gsi_p
)
2400 /* Stop if we find a case or a user-defined label. */
2401 && (gimple_code (gsi_stmt (*gsi_p
)) != GIMPLE_LABEL
2402 || !gimple_has_location (gsi_stmt (*gsi_p
))));
2404 if (prev
&& gimple_has_location (prev
))
2405 *prevloc
= gimple_location (prev
);
2409 /* Return true if the switch fallthough warning should occur. LABEL is
2410 the label statement that we're falling through to. */
2413 should_warn_for_implicit_fallthrough (gimple_stmt_iterator
*gsi_p
, tree label
)
2415 gimple_stmt_iterator gsi
= *gsi_p
;
2417 /* Don't warn if the label is marked with a "falls through" comment. */
2418 if (FALLTHROUGH_LABEL_P (label
))
2421 /* Don't warn for non-case labels followed by a statement:
2426 as these are likely intentional. */
2427 if (!case_label_p (&gimplify_ctxp
->case_labels
, label
))
2430 while (!gsi_end_p (gsi
)
2431 && gimple_code (gsi_stmt (gsi
)) == GIMPLE_LABEL
2432 && (l
= gimple_label_label (as_a
<glabel
*> (gsi_stmt (gsi
))))
2433 && !case_label_p (&gimplify_ctxp
->case_labels
, l
))
2434 gsi_next_nondebug (&gsi
);
2435 if (gsi_end_p (gsi
) || gimple_code (gsi_stmt (gsi
)) != GIMPLE_LABEL
)
2439 /* Don't warn for terminated branches, i.e. when the subsequent case labels
2440 immediately breaks. */
2443 /* Skip all immediately following labels. */
2444 while (!gsi_end_p (gsi
)
2445 && (gimple_code (gsi_stmt (gsi
)) == GIMPLE_LABEL
2446 || gimple_code (gsi_stmt (gsi
)) == GIMPLE_PREDICT
))
2447 gsi_next_nondebug (&gsi
);
2449 /* { ... something; default:; } */
2451 /* { ... something; default: break; } or
2452 { ... something; default: goto L; } */
2453 || gimple_code (gsi_stmt (gsi
)) == GIMPLE_GOTO
2454 /* { ... something; default: return; } */
2455 || gimple_code (gsi_stmt (gsi
)) == GIMPLE_RETURN
)
2461 /* Callback for walk_gimple_seq. */
2464 warn_implicit_fallthrough_r (gimple_stmt_iterator
*gsi_p
, bool *handled_ops_p
,
2465 struct walk_stmt_info
*)
2467 gimple
*stmt
= gsi_stmt (*gsi_p
);
2469 *handled_ops_p
= true;
2470 switch (gimple_code (stmt
))
2475 case GIMPLE_EH_FILTER
:
2476 case GIMPLE_TRANSACTION
:
2477 /* Walk the sub-statements. */
2478 *handled_ops_p
= false;
2481 /* Find a sequence of form:
2488 and possibly warn. */
2491 /* Found a label. Skip all immediately following labels. */
2492 while (!gsi_end_p (*gsi_p
)
2493 && gimple_code (gsi_stmt (*gsi_p
)) == GIMPLE_LABEL
)
2494 gsi_next_nondebug (gsi_p
);
2496 /* There might be no more statements. */
2497 if (gsi_end_p (*gsi_p
))
2498 return integer_zero_node
;
2500 /* Vector of labels that fall through. */
2501 auto_vec
<struct label_entry
> labels
;
2503 gimple
*prev
= collect_fallthrough_labels (gsi_p
, &labels
, &prevloc
);
2505 /* There might be no more statements. */
2506 if (gsi_end_p (*gsi_p
))
2507 return integer_zero_node
;
2509 gimple
*next
= gsi_stmt (*gsi_p
);
2511 /* If what follows is a label, then we may have a fallthrough. */
2512 if (gimple_code (next
) == GIMPLE_LABEL
2513 && gimple_has_location (next
)
2514 && (label
= gimple_label_label (as_a
<glabel
*> (next
)))
2517 struct label_entry
*l
;
2518 bool warned_p
= false;
2519 auto_diagnostic_group d
;
2520 if (!should_warn_for_implicit_fallthrough (gsi_p
, label
))
2522 else if (gimple_code (prev
) == GIMPLE_LABEL
2523 && (label
= gimple_label_label (as_a
<glabel
*> (prev
)))
2524 && (l
= find_label_entry (&labels
, label
)))
2525 warned_p
= warning_at (l
->loc
, OPT_Wimplicit_fallthrough_
,
2526 "this statement may fall through");
2527 else if (!gimple_call_internal_p (prev
, IFN_FALLTHROUGH
)
2528 /* Try to be clever and don't warn when the statement
2529 can't actually fall through. */
2530 && gimple_stmt_may_fallthru (prev
)
2531 && prevloc
!= UNKNOWN_LOCATION
)
2532 warned_p
= warning_at (prevloc
,
2533 OPT_Wimplicit_fallthrough_
,
2534 "this statement may fall through");
2536 inform (gimple_location (next
), "here");
2538 /* Mark this label as processed so as to prevent multiple
2539 warnings in nested switches. */
2540 FALLTHROUGH_LABEL_P (label
) = true;
2542 /* So that next warn_implicit_fallthrough_r will start looking for
2543 a new sequence starting with this label. */
2554 /* Warn when a switch case falls through. */
2557 maybe_warn_implicit_fallthrough (gimple_seq seq
)
2559 if (!warn_implicit_fallthrough
)
2562 /* This warning is meant for C/C++/ObjC/ObjC++ only. */
2565 || lang_GNU_OBJC ()))
2568 struct walk_stmt_info wi
;
2569 memset (&wi
, 0, sizeof (wi
));
2570 walk_gimple_seq (seq
, warn_implicit_fallthrough_r
, NULL
, &wi
);
2573 /* Callback for walk_gimple_seq. */
2576 expand_FALLTHROUGH_r (gimple_stmt_iterator
*gsi_p
, bool *handled_ops_p
,
2577 struct walk_stmt_info
*wi
)
2579 gimple
*stmt
= gsi_stmt (*gsi_p
);
2581 *handled_ops_p
= true;
2582 switch (gimple_code (stmt
))
2587 case GIMPLE_EH_FILTER
:
2588 case GIMPLE_TRANSACTION
:
2589 /* Walk the sub-statements. */
2590 *handled_ops_p
= false;
2593 if (gimple_call_internal_p (stmt
, IFN_FALLTHROUGH
))
2595 gsi_remove (gsi_p
, true);
2596 if (gsi_end_p (*gsi_p
))
2598 *static_cast<location_t
*>(wi
->info
) = gimple_location (stmt
);
2599 return integer_zero_node
;
2603 location_t loc
= gimple_location (stmt
);
2605 gimple_stmt_iterator gsi2
= *gsi_p
;
2606 stmt
= gsi_stmt (gsi2
);
2607 if (gimple_code (stmt
) == GIMPLE_GOTO
&& !gimple_has_location (stmt
))
2609 /* Go on until the artificial label. */
2610 tree goto_dest
= gimple_goto_dest (stmt
);
2611 for (; !gsi_end_p (gsi2
); gsi_next (&gsi2
))
2613 if (gimple_code (gsi_stmt (gsi2
)) == GIMPLE_LABEL
2614 && gimple_label_label (as_a
<glabel
*> (gsi_stmt (gsi2
)))
2619 /* Not found? Stop. */
2620 if (gsi_end_p (gsi2
))
2623 /* Look one past it. */
2627 /* We're looking for a case label or default label here. */
2628 while (!gsi_end_p (gsi2
))
2630 stmt
= gsi_stmt (gsi2
);
2631 if (gimple_code (stmt
) == GIMPLE_LABEL
)
2633 tree label
= gimple_label_label (as_a
<glabel
*> (stmt
));
2634 if (gimple_has_location (stmt
) && DECL_ARTIFICIAL (label
))
2640 else if (gimple_call_internal_p (stmt
, IFN_ASAN_MARK
))
2642 else if (!is_gimple_debug (stmt
))
2643 /* Anything else is not expected. */
2648 pedwarn (loc
, 0, "attribute %<fallthrough%> not preceding "
2649 "a case label or default label");
2658 /* Expand all FALLTHROUGH () calls in SEQ. */
2661 expand_FALLTHROUGH (gimple_seq
*seq_p
)
2663 struct walk_stmt_info wi
;
2665 memset (&wi
, 0, sizeof (wi
));
2666 wi
.info
= (void *) &loc
;
2667 walk_gimple_seq_mod (seq_p
, expand_FALLTHROUGH_r
, NULL
, &wi
);
2668 if (wi
.callback_result
== integer_zero_node
)
2669 /* We've found [[fallthrough]]; at the end of a switch, which the C++
2670 standard says is ill-formed; see [dcl.attr.fallthrough]. */
2671 pedwarn (loc
, 0, "attribute %<fallthrough%> not preceding "
2672 "a case label or default label");
2676 /* Gimplify a SWITCH_EXPR, and collect the vector of labels it can
2679 static enum gimplify_status
2680 gimplify_switch_expr (tree
*expr_p
, gimple_seq
*pre_p
)
2682 tree switch_expr
= *expr_p
;
2683 gimple_seq switch_body_seq
= NULL
;
2684 enum gimplify_status ret
;
2685 tree index_type
= TREE_TYPE (switch_expr
);
2686 if (index_type
== NULL_TREE
)
2687 index_type
= TREE_TYPE (SWITCH_COND (switch_expr
));
2689 ret
= gimplify_expr (&SWITCH_COND (switch_expr
), pre_p
, NULL
, is_gimple_val
,
2691 if (ret
== GS_ERROR
|| ret
== GS_UNHANDLED
)
2694 if (SWITCH_BODY (switch_expr
))
2697 vec
<tree
> saved_labels
;
2698 hash_set
<tree
> *saved_live_switch_vars
= NULL
;
2699 tree default_case
= NULL_TREE
;
2700 gswitch
*switch_stmt
;
2702 /* Save old labels, get new ones from body, then restore the old
2703 labels. Save all the things from the switch body to append after. */
2704 saved_labels
= gimplify_ctxp
->case_labels
;
2705 gimplify_ctxp
->case_labels
.create (8);
2707 /* Do not create live_switch_vars if SWITCH_BODY is not a BIND_EXPR. */
2708 saved_live_switch_vars
= gimplify_ctxp
->live_switch_vars
;
2709 tree_code body_type
= TREE_CODE (SWITCH_BODY (switch_expr
));
2710 if (body_type
== BIND_EXPR
|| body_type
== STATEMENT_LIST
)
2711 gimplify_ctxp
->live_switch_vars
= new hash_set
<tree
> (4);
2713 gimplify_ctxp
->live_switch_vars
= NULL
;
2715 bool old_in_switch_expr
= gimplify_ctxp
->in_switch_expr
;
2716 gimplify_ctxp
->in_switch_expr
= true;
2718 gimplify_stmt (&SWITCH_BODY (switch_expr
), &switch_body_seq
);
2720 gimplify_ctxp
->in_switch_expr
= old_in_switch_expr
;
2721 maybe_warn_switch_unreachable_and_auto_init (switch_body_seq
);
2722 maybe_warn_implicit_fallthrough (switch_body_seq
);
2723 /* Only do this for the outermost GIMPLE_SWITCH. */
2724 if (!gimplify_ctxp
->in_switch_expr
)
2725 expand_FALLTHROUGH (&switch_body_seq
);
2727 labels
= gimplify_ctxp
->case_labels
;
2728 gimplify_ctxp
->case_labels
= saved_labels
;
2730 if (gimplify_ctxp
->live_switch_vars
)
2732 gcc_assert (gimplify_ctxp
->live_switch_vars
->is_empty ());
2733 delete gimplify_ctxp
->live_switch_vars
;
2735 gimplify_ctxp
->live_switch_vars
= saved_live_switch_vars
;
2737 preprocess_case_label_vec_for_gimple (labels
, index_type
,
2740 bool add_bind
= false;
2743 glabel
*new_default
;
2746 = build_case_label (NULL_TREE
, NULL_TREE
,
2747 create_artificial_label (UNKNOWN_LOCATION
));
2748 if (old_in_switch_expr
)
2750 SWITCH_BREAK_LABEL_P (CASE_LABEL (default_case
)) = 1;
2753 new_default
= gimple_build_label (CASE_LABEL (default_case
));
2754 gimplify_seq_add_stmt (&switch_body_seq
, new_default
);
2756 else if (old_in_switch_expr
)
2758 gimple
*last
= gimple_seq_last_stmt (switch_body_seq
);
2759 if (last
&& gimple_code (last
) == GIMPLE_LABEL
)
2761 tree label
= gimple_label_label (as_a
<glabel
*> (last
));
2762 if (SWITCH_BREAK_LABEL_P (label
))
2767 switch_stmt
= gimple_build_switch (SWITCH_COND (switch_expr
),
2768 default_case
, labels
);
2769 /* For the benefit of -Wimplicit-fallthrough, if switch_body_seq
2770 ends with a GIMPLE_LABEL holding SWITCH_BREAK_LABEL_P LABEL_DECL,
2771 wrap the GIMPLE_SWITCH up to that GIMPLE_LABEL into a GIMPLE_BIND,
2772 so that we can easily find the start and end of the switch
2776 gimple_seq bind_body
= NULL
;
2777 gimplify_seq_add_stmt (&bind_body
, switch_stmt
);
2778 gimple_seq_add_seq (&bind_body
, switch_body_seq
);
2779 gbind
*bind
= gimple_build_bind (NULL_TREE
, bind_body
, NULL_TREE
);
2780 gimple_set_location (bind
, EXPR_LOCATION (switch_expr
));
2781 gimplify_seq_add_stmt (pre_p
, bind
);
2785 gimplify_seq_add_stmt (pre_p
, switch_stmt
);
2786 gimplify_seq_add_seq (pre_p
, switch_body_seq
);
2796 /* Gimplify the LABEL_EXPR pointed to by EXPR_P. */
2798 static enum gimplify_status
2799 gimplify_label_expr (tree
*expr_p
, gimple_seq
*pre_p
)
2801 gcc_assert (decl_function_context (LABEL_EXPR_LABEL (*expr_p
))
2802 == current_function_decl
);
2804 tree label
= LABEL_EXPR_LABEL (*expr_p
);
2805 glabel
*label_stmt
= gimple_build_label (label
);
2806 gimple_set_location (label_stmt
, EXPR_LOCATION (*expr_p
));
2807 gimplify_seq_add_stmt (pre_p
, label_stmt
);
2809 if (lookup_attribute ("cold", DECL_ATTRIBUTES (label
)))
2810 gimple_seq_add_stmt (pre_p
, gimple_build_predict (PRED_COLD_LABEL
,
2812 else if (lookup_attribute ("hot", DECL_ATTRIBUTES (label
)))
2813 gimple_seq_add_stmt (pre_p
, gimple_build_predict (PRED_HOT_LABEL
,
2819 /* Gimplify the CASE_LABEL_EXPR pointed to by EXPR_P. */
2821 static enum gimplify_status
2822 gimplify_case_label_expr (tree
*expr_p
, gimple_seq
*pre_p
)
2824 struct gimplify_ctx
*ctxp
;
2827 /* Invalid programs can play Duff's Device type games with, for example,
2828 #pragma omp parallel. At least in the C front end, we don't
2829 detect such invalid branches until after gimplification, in the
2830 diagnose_omp_blocks pass. */
2831 for (ctxp
= gimplify_ctxp
; ; ctxp
= ctxp
->prev_context
)
2832 if (ctxp
->case_labels
.exists ())
2835 tree label
= CASE_LABEL (*expr_p
);
2836 label_stmt
= gimple_build_label (label
);
2837 gimple_set_location (label_stmt
, EXPR_LOCATION (*expr_p
));
2838 ctxp
->case_labels
.safe_push (*expr_p
);
2839 gimplify_seq_add_stmt (pre_p
, label_stmt
);
2841 if (lookup_attribute ("cold", DECL_ATTRIBUTES (label
)))
2842 gimple_seq_add_stmt (pre_p
, gimple_build_predict (PRED_COLD_LABEL
,
2844 else if (lookup_attribute ("hot", DECL_ATTRIBUTES (label
)))
2845 gimple_seq_add_stmt (pre_p
, gimple_build_predict (PRED_HOT_LABEL
,
2851 /* Build a GOTO to the LABEL_DECL pointed to by LABEL_P, building it first
2855 build_and_jump (tree
*label_p
)
2857 if (label_p
== NULL
)
2858 /* If there's nowhere to jump, just fall through. */
2861 if (*label_p
== NULL_TREE
)
2863 tree label
= create_artificial_label (UNKNOWN_LOCATION
);
2867 return build1 (GOTO_EXPR
, void_type_node
, *label_p
);
2870 /* Gimplify an EXIT_EXPR by converting to a GOTO_EXPR inside a COND_EXPR.
2871 This also involves building a label to jump to and communicating it to
2872 gimplify_loop_expr through gimplify_ctxp->exit_label. */
2874 static enum gimplify_status
2875 gimplify_exit_expr (tree
*expr_p
)
2877 tree cond
= TREE_OPERAND (*expr_p
, 0);
2880 expr
= build_and_jump (&gimplify_ctxp
->exit_label
);
2881 expr
= build3 (COND_EXPR
, void_type_node
, cond
, expr
, NULL_TREE
);
2887 /* *EXPR_P is a COMPONENT_REF being used as an rvalue. If its type is
2888 different from its canonical type, wrap the whole thing inside a
2889 NOP_EXPR and force the type of the COMPONENT_REF to be the canonical
2892 The canonical type of a COMPONENT_REF is the type of the field being
2893 referenced--unless the field is a bit-field which can be read directly
2894 in a smaller mode, in which case the canonical type is the
2895 sign-appropriate type corresponding to that mode. */
2898 canonicalize_component_ref (tree
*expr_p
)
2900 tree expr
= *expr_p
;
2903 gcc_assert (TREE_CODE (expr
) == COMPONENT_REF
);
2905 if (INTEGRAL_TYPE_P (TREE_TYPE (expr
)))
2906 type
= TREE_TYPE (get_unwidened (expr
, NULL_TREE
));
2908 type
= TREE_TYPE (TREE_OPERAND (expr
, 1));
2910 /* One could argue that all the stuff below is not necessary for
2911 the non-bitfield case and declare it a FE error if type
2912 adjustment would be needed. */
2913 if (TREE_TYPE (expr
) != type
)
2915 #ifdef ENABLE_TYPES_CHECKING
2916 tree old_type
= TREE_TYPE (expr
);
2920 /* We need to preserve qualifiers and propagate them from
2922 type_quals
= TYPE_QUALS (type
)
2923 | TYPE_QUALS (TREE_TYPE (TREE_OPERAND (expr
, 0)));
2924 if (TYPE_QUALS (type
) != type_quals
)
2925 type
= build_qualified_type (TYPE_MAIN_VARIANT (type
), type_quals
);
2927 /* Set the type of the COMPONENT_REF to the underlying type. */
2928 TREE_TYPE (expr
) = type
;
2930 #ifdef ENABLE_TYPES_CHECKING
2931 /* It is now a FE error, if the conversion from the canonical
2932 type to the original expression type is not useless. */
2933 gcc_assert (useless_type_conversion_p (old_type
, type
));
2938 /* If a NOP conversion is changing a pointer to array of foo to a pointer
2939 to foo, embed that change in the ADDR_EXPR by converting
2944 where L is the lower bound. For simplicity, only do this for constant
2946 The constraint is that the type of &array[L] is trivially convertible
2950 canonicalize_addr_expr (tree
*expr_p
)
2952 tree expr
= *expr_p
;
2953 tree addr_expr
= TREE_OPERAND (expr
, 0);
2954 tree datype
, ddatype
, pddatype
;
2956 /* We simplify only conversions from an ADDR_EXPR to a pointer type. */
2957 if (!POINTER_TYPE_P (TREE_TYPE (expr
))
2958 || TREE_CODE (addr_expr
) != ADDR_EXPR
)
2961 /* The addr_expr type should be a pointer to an array. */
2962 datype
= TREE_TYPE (TREE_TYPE (addr_expr
));
2963 if (TREE_CODE (datype
) != ARRAY_TYPE
)
2966 /* The pointer to element type shall be trivially convertible to
2967 the expression pointer type. */
2968 ddatype
= TREE_TYPE (datype
);
2969 pddatype
= build_pointer_type (ddatype
);
2970 if (!useless_type_conversion_p (TYPE_MAIN_VARIANT (TREE_TYPE (expr
)),
2974 /* The lower bound and element sizes must be constant. */
2975 if (!TYPE_SIZE_UNIT (ddatype
)
2976 || TREE_CODE (TYPE_SIZE_UNIT (ddatype
)) != INTEGER_CST
2977 || !TYPE_DOMAIN (datype
) || !TYPE_MIN_VALUE (TYPE_DOMAIN (datype
))
2978 || TREE_CODE (TYPE_MIN_VALUE (TYPE_DOMAIN (datype
))) != INTEGER_CST
)
2981 /* All checks succeeded. Build a new node to merge the cast. */
2982 *expr_p
= build4 (ARRAY_REF
, ddatype
, TREE_OPERAND (addr_expr
, 0),
2983 TYPE_MIN_VALUE (TYPE_DOMAIN (datype
)),
2984 NULL_TREE
, NULL_TREE
);
2985 *expr_p
= build1 (ADDR_EXPR
, pddatype
, *expr_p
);
2987 /* We can have stripped a required restrict qualifier above. */
2988 if (!useless_type_conversion_p (TREE_TYPE (expr
), TREE_TYPE (*expr_p
)))
2989 *expr_p
= fold_convert (TREE_TYPE (expr
), *expr_p
);
2992 /* *EXPR_P is a NOP_EXPR or CONVERT_EXPR. Remove it and/or other conversions
2993 underneath as appropriate. */
2995 static enum gimplify_status
2996 gimplify_conversion (tree
*expr_p
)
2998 location_t loc
= EXPR_LOCATION (*expr_p
);
2999 gcc_assert (CONVERT_EXPR_P (*expr_p
));
3001 /* Then strip away all but the outermost conversion. */
3002 STRIP_SIGN_NOPS (TREE_OPERAND (*expr_p
, 0));
3004 /* And remove the outermost conversion if it's useless. */
3005 if (tree_ssa_useless_type_conversion (*expr_p
))
3006 *expr_p
= TREE_OPERAND (*expr_p
, 0);
3008 /* If we still have a conversion at the toplevel,
3009 then canonicalize some constructs. */
3010 if (CONVERT_EXPR_P (*expr_p
))
3012 tree sub
= TREE_OPERAND (*expr_p
, 0);
3014 /* If a NOP conversion is changing the type of a COMPONENT_REF
3015 expression, then canonicalize its type now in order to expose more
3016 redundant conversions. */
3017 if (TREE_CODE (sub
) == COMPONENT_REF
)
3018 canonicalize_component_ref (&TREE_OPERAND (*expr_p
, 0));
3020 /* If a NOP conversion is changing a pointer to array of foo
3021 to a pointer to foo, embed that change in the ADDR_EXPR. */
3022 else if (TREE_CODE (sub
) == ADDR_EXPR
)
3023 canonicalize_addr_expr (expr_p
);
3026 /* If we have a conversion to a non-register type force the
3027 use of a VIEW_CONVERT_EXPR instead. */
3028 if (CONVERT_EXPR_P (*expr_p
) && !is_gimple_reg_type (TREE_TYPE (*expr_p
)))
3029 *expr_p
= fold_build1_loc (loc
, VIEW_CONVERT_EXPR
, TREE_TYPE (*expr_p
),
3030 TREE_OPERAND (*expr_p
, 0));
3032 /* Canonicalize CONVERT_EXPR to NOP_EXPR. */
3033 if (TREE_CODE (*expr_p
) == CONVERT_EXPR
)
3034 TREE_SET_CODE (*expr_p
, NOP_EXPR
);
3039 /* Gimplify a VAR_DECL or PARM_DECL. Return GS_OK if we expanded a
3040 DECL_VALUE_EXPR, and it's worth re-examining things. */
3042 static enum gimplify_status
3043 gimplify_var_or_parm_decl (tree
*expr_p
)
3045 tree decl
= *expr_p
;
3047 /* ??? If this is a local variable, and it has not been seen in any
3048 outer BIND_EXPR, then it's probably the result of a duplicate
3049 declaration, for which we've already issued an error. It would
3050 be really nice if the front end wouldn't leak these at all.
3051 Currently the only known culprit is C++ destructors, as seen
3052 in g++.old-deja/g++.jason/binding.C.
3053 Another possible culpit are size expressions for variably modified
3054 types which are lost in the FE or not gimplified correctly. */
3056 && !DECL_SEEN_IN_BIND_EXPR_P (decl
)
3057 && !TREE_STATIC (decl
) && !DECL_EXTERNAL (decl
)
3058 && decl_function_context (decl
) == current_function_decl
)
3060 gcc_assert (seen_error ());
3064 /* When within an OMP context, notice uses of variables. */
3065 if (gimplify_omp_ctxp
&& omp_notice_variable (gimplify_omp_ctxp
, decl
, true))
3068 /* If the decl is an alias for another expression, substitute it now. */
3069 if (DECL_HAS_VALUE_EXPR_P (decl
))
3071 *expr_p
= unshare_expr (DECL_VALUE_EXPR (decl
));
3078 /* Recalculate the value of the TREE_SIDE_EFFECTS flag for T. */
3081 recalculate_side_effects (tree t
)
3083 enum tree_code code
= TREE_CODE (t
);
3084 int len
= TREE_OPERAND_LENGTH (t
);
3087 switch (TREE_CODE_CLASS (code
))
3089 case tcc_expression
:
3095 case PREDECREMENT_EXPR
:
3096 case PREINCREMENT_EXPR
:
3097 case POSTDECREMENT_EXPR
:
3098 case POSTINCREMENT_EXPR
:
3099 /* All of these have side-effects, no matter what their
3108 case tcc_comparison
: /* a comparison expression */
3109 case tcc_unary
: /* a unary arithmetic expression */
3110 case tcc_binary
: /* a binary arithmetic expression */
3111 case tcc_reference
: /* a reference */
3112 case tcc_vl_exp
: /* a function call */
3113 TREE_SIDE_EFFECTS (t
) = TREE_THIS_VOLATILE (t
);
3114 for (i
= 0; i
< len
; ++i
)
3116 tree op
= TREE_OPERAND (t
, i
);
3117 if (op
&& TREE_SIDE_EFFECTS (op
))
3118 TREE_SIDE_EFFECTS (t
) = 1;
3123 /* No side-effects. */
3131 /* Gimplify the COMPONENT_REF, ARRAY_REF, REALPART_EXPR or IMAGPART_EXPR
3135 : min_lval '[' val ']'
3137 | compound_lval '[' val ']'
3138 | compound_lval '.' ID
3140 This is not part of the original SIMPLE definition, which separates
3141 array and member references, but it seems reasonable to handle them
3142 together. Also, this way we don't run into problems with union
3143 aliasing; gcc requires that for accesses through a union to alias, the
3144 union reference must be explicit, which was not always the case when we
3145 were splitting up array and member refs.
3147 PRE_P points to the sequence where side effects that must happen before
3148 *EXPR_P should be stored.
3150 POST_P points to the sequence where side effects that must happen after
3151 *EXPR_P should be stored. */
3153 static enum gimplify_status
3154 gimplify_compound_lval (tree
*expr_p
, gimple_seq
*pre_p
, gimple_seq
*post_p
,
3155 fallback_t fallback
)
3158 enum gimplify_status ret
= GS_ALL_DONE
, tret
;
3160 location_t loc
= EXPR_LOCATION (*expr_p
);
3161 tree expr
= *expr_p
;
3163 /* Create a stack of the subexpressions so later we can walk them in
3164 order from inner to outer. */
3165 auto_vec
<tree
, 10> expr_stack
;
3167 /* We can handle anything that get_inner_reference can deal with. */
3168 for (p
= expr_p
; ; p
= &TREE_OPERAND (*p
, 0))
3171 /* Fold INDIRECT_REFs now to turn them into ARRAY_REFs. */
3172 if (TREE_CODE (*p
) == INDIRECT_REF
)
3173 *p
= fold_indirect_ref_loc (loc
, *p
);
3175 if (handled_component_p (*p
))
3177 /* Expand DECL_VALUE_EXPR now. In some cases that may expose
3178 additional COMPONENT_REFs. */
3179 else if ((VAR_P (*p
) || TREE_CODE (*p
) == PARM_DECL
)
3180 && gimplify_var_or_parm_decl (p
) == GS_OK
)
3185 expr_stack
.safe_push (*p
);
3188 gcc_assert (expr_stack
.length ());
3190 /* Now EXPR_STACK is a stack of pointers to all the refs we've
3191 walked through and P points to the innermost expression.
3193 Java requires that we elaborated nodes in source order. That
3194 means we must gimplify the inner expression followed by each of
3195 the indices, in order. But we can't gimplify the inner
3196 expression until we deal with any variable bounds, sizes, or
3197 positions in order to deal with PLACEHOLDER_EXPRs.
3199 The base expression may contain a statement expression that
3200 has declarations used in size expressions, so has to be
3201 gimplified before gimplifying the size expressions.
3203 So we do this in three steps. First we deal with variable
3204 bounds, sizes, and positions, then we gimplify the base and
3205 ensure it is memory if needed, then we deal with the annotations
3206 for any variables in the components and any indices, from left
3209 bool need_non_reg
= false;
3210 for (i
= expr_stack
.length () - 1; i
>= 0; i
--)
3212 tree t
= expr_stack
[i
];
3214 if (TREE_CODE (t
) == ARRAY_REF
|| TREE_CODE (t
) == ARRAY_RANGE_REF
)
3216 /* Deal with the low bound and element type size and put them into
3217 the ARRAY_REF. If these values are set, they have already been
3219 if (TREE_OPERAND (t
, 2) == NULL_TREE
)
3221 tree low
= unshare_expr (array_ref_low_bound (t
));
3222 if (!is_gimple_min_invariant (low
))
3224 TREE_OPERAND (t
, 2) = low
;
3228 if (TREE_OPERAND (t
, 3) == NULL_TREE
)
3230 tree elmt_size
= array_ref_element_size (t
);
3231 if (!is_gimple_min_invariant (elmt_size
))
3233 elmt_size
= unshare_expr (elmt_size
);
3234 tree elmt_type
= TREE_TYPE (TREE_TYPE (TREE_OPERAND (t
, 0)));
3235 tree factor
= size_int (TYPE_ALIGN_UNIT (elmt_type
));
3237 /* Divide the element size by the alignment of the element
3239 elmt_size
= size_binop_loc (loc
, EXACT_DIV_EXPR
,
3242 TREE_OPERAND (t
, 3) = elmt_size
;
3245 need_non_reg
= true;
3247 else if (TREE_CODE (t
) == COMPONENT_REF
)
3249 /* Set the field offset into T and gimplify it. */
3250 if (TREE_OPERAND (t
, 2) == NULL_TREE
)
3252 tree offset
= component_ref_field_offset (t
);
3253 if (!is_gimple_min_invariant (offset
))
3255 offset
= unshare_expr (offset
);
3256 tree field
= TREE_OPERAND (t
, 1);
3258 = size_int (DECL_OFFSET_ALIGN (field
) / BITS_PER_UNIT
);
3260 /* Divide the offset by its alignment. */
3261 offset
= size_binop_loc (loc
, EXACT_DIV_EXPR
,
3264 TREE_OPERAND (t
, 2) = offset
;
3267 need_non_reg
= true;
3271 /* Step 2 is to gimplify the base expression. Make sure lvalue is set
3272 so as to match the min_lval predicate. Failure to do so may result
3273 in the creation of large aggregate temporaries. */
3274 tret
= gimplify_expr (p
, pre_p
, post_p
, is_gimple_min_lval
,
3275 fallback
| fb_lvalue
);
3276 ret
= MIN (ret
, tret
);
3278 /* Step 2a: if we have component references we do not support on
3279 registers then make sure the base isn't a register. Of course
3280 we can only do so if an rvalue is OK. */
3281 if (need_non_reg
&& (fallback
& fb_rvalue
))
3282 prepare_gimple_addressable (p
, pre_p
);
3284 /* Step 3: gimplify size expressions and the indices and operands of
3285 ARRAY_REF. During this loop we also remove any useless conversions. */
3287 for (; expr_stack
.length () > 0; )
3289 tree t
= expr_stack
.pop ();
3291 if (TREE_CODE (t
) == ARRAY_REF
|| TREE_CODE (t
) == ARRAY_RANGE_REF
)
3293 /* Gimplify the low bound and element type size. */
3294 tret
= gimplify_expr (&TREE_OPERAND (t
, 2), pre_p
, post_p
,
3295 is_gimple_reg
, fb_rvalue
);
3296 ret
= MIN (ret
, tret
);
3298 tret
= gimplify_expr (&TREE_OPERAND (t
, 3), pre_p
, post_p
,
3299 is_gimple_reg
, fb_rvalue
);
3300 ret
= MIN (ret
, tret
);
3302 /* Gimplify the dimension. */
3303 tret
= gimplify_expr (&TREE_OPERAND (t
, 1), pre_p
, post_p
,
3304 is_gimple_val
, fb_rvalue
);
3305 ret
= MIN (ret
, tret
);
3307 else if (TREE_CODE (t
) == COMPONENT_REF
)
3309 tret
= gimplify_expr (&TREE_OPERAND (t
, 2), pre_p
, post_p
,
3310 is_gimple_reg
, fb_rvalue
);
3311 ret
= MIN (ret
, tret
);
3314 STRIP_USELESS_TYPE_CONVERSION (TREE_OPERAND (t
, 0));
3316 /* The innermost expression P may have originally had
3317 TREE_SIDE_EFFECTS set which would have caused all the outer
3318 expressions in *EXPR_P leading to P to also have had
3319 TREE_SIDE_EFFECTS set. */
3320 recalculate_side_effects (t
);
3323 /* If the outermost expression is a COMPONENT_REF, canonicalize its type. */
3324 if ((fallback
& fb_rvalue
) && TREE_CODE (*expr_p
) == COMPONENT_REF
)
3326 canonicalize_component_ref (expr_p
);
3329 expr_stack
.release ();
3331 gcc_assert (*expr_p
== expr
|| ret
!= GS_ALL_DONE
);
3336 /* Gimplify the self modifying expression pointed to by EXPR_P
3339 PRE_P points to the list where side effects that must happen before
3340 *EXPR_P should be stored.
3342 POST_P points to the list where side effects that must happen after
3343 *EXPR_P should be stored.
3345 WANT_VALUE is nonzero iff we want to use the value of this expression
3346 in another expression.
3348 ARITH_TYPE is the type the computation should be performed in. */
3350 enum gimplify_status
3351 gimplify_self_mod_expr (tree
*expr_p
, gimple_seq
*pre_p
, gimple_seq
*post_p
,
3352 bool want_value
, tree arith_type
)
3354 enum tree_code code
;
3355 tree lhs
, lvalue
, rhs
, t1
;
3356 gimple_seq post
= NULL
, *orig_post_p
= post_p
;
3358 enum tree_code arith_code
;
3359 enum gimplify_status ret
;
3360 location_t loc
= EXPR_LOCATION (*expr_p
);
3362 code
= TREE_CODE (*expr_p
);
3364 gcc_assert (code
== POSTINCREMENT_EXPR
|| code
== POSTDECREMENT_EXPR
3365 || code
== PREINCREMENT_EXPR
|| code
== PREDECREMENT_EXPR
);
3367 /* Prefix or postfix? */
3368 if (code
== POSTINCREMENT_EXPR
|| code
== POSTDECREMENT_EXPR
)
3369 /* Faster to treat as prefix if result is not used. */
3370 postfix
= want_value
;
3374 /* For postfix, make sure the inner expression's post side effects
3375 are executed after side effects from this expression. */
3379 /* Add or subtract? */
3380 if (code
== PREINCREMENT_EXPR
|| code
== POSTINCREMENT_EXPR
)
3381 arith_code
= PLUS_EXPR
;
3383 arith_code
= MINUS_EXPR
;
3385 /* Gimplify the LHS into a GIMPLE lvalue. */
3386 lvalue
= TREE_OPERAND (*expr_p
, 0);
3387 ret
= gimplify_expr (&lvalue
, pre_p
, post_p
, is_gimple_lvalue
, fb_lvalue
);
3388 if (ret
== GS_ERROR
)
3391 /* Extract the operands to the arithmetic operation. */
3393 rhs
= TREE_OPERAND (*expr_p
, 1);
3395 /* For postfix operator, we evaluate the LHS to an rvalue and then use
3396 that as the result value and in the postqueue operation. */
3399 ret
= gimplify_expr (&lhs
, pre_p
, post_p
, is_gimple_val
, fb_rvalue
);
3400 if (ret
== GS_ERROR
)
3403 lhs
= get_initialized_tmp_var (lhs
, pre_p
);
3406 /* For POINTERs increment, use POINTER_PLUS_EXPR. */
3407 if (POINTER_TYPE_P (TREE_TYPE (lhs
)))
3409 rhs
= convert_to_ptrofftype_loc (loc
, rhs
);
3410 if (arith_code
== MINUS_EXPR
)
3411 rhs
= fold_build1_loc (loc
, NEGATE_EXPR
, TREE_TYPE (rhs
), rhs
);
3412 t1
= fold_build2 (POINTER_PLUS_EXPR
, TREE_TYPE (*expr_p
), lhs
, rhs
);
3415 t1
= fold_convert (TREE_TYPE (*expr_p
),
3416 fold_build2 (arith_code
, arith_type
,
3417 fold_convert (arith_type
, lhs
),
3418 fold_convert (arith_type
, rhs
)));
3422 gimplify_assign (lvalue
, t1
, pre_p
);
3423 gimplify_seq_add_seq (orig_post_p
, post
);
3429 *expr_p
= build2 (MODIFY_EXPR
, TREE_TYPE (lvalue
), lvalue
, t1
);
3434 /* If *EXPR_P has a variable sized type, wrap it in a WITH_SIZE_EXPR. */
3437 maybe_with_size_expr (tree
*expr_p
)
3439 tree expr
= *expr_p
;
3440 tree type
= TREE_TYPE (expr
);
3443 /* If we've already wrapped this or the type is error_mark_node, we can't do
3445 if (TREE_CODE (expr
) == WITH_SIZE_EXPR
3446 || type
== error_mark_node
)
3449 /* If the size isn't known or is a constant, we have nothing to do. */
3450 size
= TYPE_SIZE_UNIT (type
);
3451 if (!size
|| poly_int_tree_p (size
))
3454 /* Otherwise, make a WITH_SIZE_EXPR. */
3455 size
= unshare_expr (size
);
3456 size
= SUBSTITUTE_PLACEHOLDER_IN_EXPR (size
, expr
);
3457 *expr_p
= build2 (WITH_SIZE_EXPR
, type
, expr
, size
);
3460 /* Helper for gimplify_call_expr. Gimplify a single argument *ARG_P
3461 Store any side-effects in PRE_P. CALL_LOCATION is the location of
3462 the CALL_EXPR. If ALLOW_SSA is set the actual parameter may be
3463 gimplified to an SSA name. */
3465 enum gimplify_status
3466 gimplify_arg (tree
*arg_p
, gimple_seq
*pre_p
, location_t call_location
,
3469 bool (*test
) (tree
);
3472 /* In general, we allow lvalues for function arguments to avoid
3473 extra overhead of copying large aggregates out of even larger
3474 aggregates into temporaries only to copy the temporaries to
3475 the argument list. Make optimizers happy by pulling out to
3476 temporaries those types that fit in registers. */
3477 if (is_gimple_reg_type (TREE_TYPE (*arg_p
)))
3478 test
= is_gimple_val
, fb
= fb_rvalue
;
3481 test
= is_gimple_lvalue
, fb
= fb_either
;
3482 /* Also strip a TARGET_EXPR that would force an extra copy. */
3483 if (TREE_CODE (*arg_p
) == TARGET_EXPR
)
3485 tree init
= TARGET_EXPR_INITIAL (*arg_p
);
3487 && !VOID_TYPE_P (TREE_TYPE (init
)))
3492 /* If this is a variable sized type, we must remember the size. */
3493 maybe_with_size_expr (arg_p
);
3495 /* FIXME diagnostics: This will mess up gcc.dg/Warray-bounds.c. */
3496 /* Make sure arguments have the same location as the function call
3498 protected_set_expr_location (*arg_p
, call_location
);
3500 /* There is a sequence point before a function call. Side effects in
3501 the argument list must occur before the actual call. So, when
3502 gimplifying arguments, force gimplify_expr to use an internal
3503 post queue which is then appended to the end of PRE_P. */
3504 return gimplify_expr (arg_p
, pre_p
, NULL
, test
, fb
, allow_ssa
);
3507 /* Don't fold inside offloading or taskreg regions: it can break code by
3508 adding decl references that weren't in the source. We'll do it during
3509 omplower pass instead. */
3512 maybe_fold_stmt (gimple_stmt_iterator
*gsi
)
3514 struct gimplify_omp_ctx
*ctx
;
3515 for (ctx
= gimplify_omp_ctxp
; ctx
; ctx
= ctx
->outer_context
)
3516 if ((ctx
->region_type
& (ORT_TARGET
| ORT_PARALLEL
| ORT_TASK
)) != 0)
3518 else if ((ctx
->region_type
& ORT_HOST_TEAMS
) == ORT_HOST_TEAMS
)
3520 /* Delay folding of builtins until the IL is in consistent state
3521 so the diagnostic machinery can do a better job. */
3522 if (gimple_call_builtin_p (gsi_stmt (*gsi
)))
3524 return fold_stmt (gsi
);
3527 /* Gimplify the CALL_EXPR node *EXPR_P into the GIMPLE sequence PRE_P.
3528 WANT_VALUE is true if the result of the call is desired. */
3530 static enum gimplify_status
3531 gimplify_call_expr (tree
*expr_p
, gimple_seq
*pre_p
, bool want_value
)
3533 tree fndecl
, parms
, p
, fnptrtype
;
3534 enum gimplify_status ret
;
3537 bool builtin_va_start_p
= false;
3538 location_t loc
= EXPR_LOCATION (*expr_p
);
3540 gcc_assert (TREE_CODE (*expr_p
) == CALL_EXPR
);
3542 /* For reliable diagnostics during inlining, it is necessary that
3543 every call_expr be annotated with file and line. */
3544 if (! EXPR_HAS_LOCATION (*expr_p
))
3545 SET_EXPR_LOCATION (*expr_p
, input_location
);
3547 /* Gimplify internal functions created in the FEs. */
3548 if (CALL_EXPR_FN (*expr_p
) == NULL_TREE
)
3553 nargs
= call_expr_nargs (*expr_p
);
3554 enum internal_fn ifn
= CALL_EXPR_IFN (*expr_p
);
3555 auto_vec
<tree
> vargs (nargs
);
3557 if (ifn
== IFN_ASSUME
)
3559 if (simple_condition_p (CALL_EXPR_ARG (*expr_p
, 0)))
3561 /* If the [[assume (cond)]]; condition is simple
3562 enough and can be evaluated unconditionally
3563 without side-effects, expand it as
3564 if (!cond) __builtin_unreachable (); */
3565 tree fndecl
= builtin_decl_explicit (BUILT_IN_UNREACHABLE
);
3566 *expr_p
= build3 (COND_EXPR
, void_type_node
,
3567 CALL_EXPR_ARG (*expr_p
, 0), void_node
,
3568 build_call_expr_loc (EXPR_LOCATION (*expr_p
),
3572 /* If not optimizing, ignore the assumptions. */
3575 *expr_p
= NULL_TREE
;
3578 /* Temporarily, until gimple lowering, transform
3585 such that gimple lowering can outline the condition into
3586 a separate function easily. */
3587 tree guard
= create_tmp_var (boolean_type_node
);
3588 *expr_p
= build2 (MODIFY_EXPR
, void_type_node
, guard
,
3589 CALL_EXPR_ARG (*expr_p
, 0));
3590 *expr_p
= build3 (BIND_EXPR
, void_type_node
, NULL
, *expr_p
, NULL
);
3591 push_gimplify_context ();
3592 gimple_seq body
= NULL
;
3593 gimple
*g
= gimplify_and_return_first (*expr_p
, &body
);
3594 pop_gimplify_context (g
);
3595 g
= gimple_build_assume (guard
, body
);
3596 gimple_set_location (g
, loc
);
3597 gimplify_seq_add_stmt (pre_p
, g
);
3598 *expr_p
= NULL_TREE
;
3602 for (i
= 0; i
< nargs
; i
++)
3604 gimplify_arg (&CALL_EXPR_ARG (*expr_p
, i
), pre_p
,
3605 EXPR_LOCATION (*expr_p
));
3606 vargs
.quick_push (CALL_EXPR_ARG (*expr_p
, i
));
3609 gcall
*call
= gimple_build_call_internal_vec (ifn
, vargs
);
3610 gimple_call_set_nothrow (call
, TREE_NOTHROW (*expr_p
));
3611 gimplify_seq_add_stmt (pre_p
, call
);
3615 /* This may be a call to a builtin function.
3617 Builtin function calls may be transformed into different
3618 (and more efficient) builtin function calls under certain
3619 circumstances. Unfortunately, gimplification can muck things
3620 up enough that the builtin expanders are not aware that certain
3621 transformations are still valid.
3623 So we attempt transformation/gimplification of the call before
3624 we gimplify the CALL_EXPR. At this time we do not manage to
3625 transform all calls in the same manner as the expanders do, but
3626 we do transform most of them. */
3627 fndecl
= get_callee_fndecl (*expr_p
);
3628 if (fndecl
&& fndecl_built_in_p (fndecl
, BUILT_IN_NORMAL
))
3629 switch (DECL_FUNCTION_CODE (fndecl
))
3631 CASE_BUILT_IN_ALLOCA
:
3632 /* If the call has been built for a variable-sized object, then we
3633 want to restore the stack level when the enclosing BIND_EXPR is
3634 exited to reclaim the allocated space; otherwise, we precisely
3635 need to do the opposite and preserve the latest stack level. */
3636 if (CALL_ALLOCA_FOR_VAR_P (*expr_p
))
3637 gimplify_ctxp
->save_stack
= true;
3639 gimplify_ctxp
->keep_stack
= true;
3642 case BUILT_IN_VA_START
:
3644 builtin_va_start_p
= TRUE
;
3645 if (call_expr_nargs (*expr_p
) < 2)
3647 error ("too few arguments to function %<va_start%>");
3648 *expr_p
= build_empty_stmt (EXPR_LOCATION (*expr_p
));
3652 if (fold_builtin_next_arg (*expr_p
, true))
3654 *expr_p
= build_empty_stmt (EXPR_LOCATION (*expr_p
));
3660 case BUILT_IN_EH_RETURN
:
3661 cfun
->calls_eh_return
= true;
3664 case BUILT_IN_CLEAR_PADDING
:
3665 if (call_expr_nargs (*expr_p
) == 1)
3667 /* Remember the original type of the argument in an internal
3668 dummy second argument, as in GIMPLE pointer conversions are
3669 useless. Also mark this call as not for automatic
3670 initialization in the internal dummy third argument. */
3671 p
= CALL_EXPR_ARG (*expr_p
, 0);
3673 = build_call_expr_loc (EXPR_LOCATION (*expr_p
), fndecl
, 2, p
,
3674 build_zero_cst (TREE_TYPE (p
)));
3682 if (fndecl
&& fndecl_built_in_p (fndecl
))
3684 tree new_tree
= fold_call_expr (input_location
, *expr_p
, !want_value
);
3685 if (new_tree
&& new_tree
!= *expr_p
)
3687 /* There was a transformation of this call which computes the
3688 same value, but in a more efficient way. Return and try
3695 /* Remember the original function pointer type. */
3696 fnptrtype
= TREE_TYPE (CALL_EXPR_FN (*expr_p
));
3701 && (cfun
->curr_properties
& PROP_gimple_any
) == 0)
3703 tree variant
= omp_resolve_declare_variant (fndecl
);
3704 if (variant
!= fndecl
)
3705 CALL_EXPR_FN (*expr_p
) = build1 (ADDR_EXPR
, fnptrtype
, variant
);
3708 /* There is a sequence point before the call, so any side effects in
3709 the calling expression must occur before the actual call. Force
3710 gimplify_expr to use an internal post queue. */
3711 ret
= gimplify_expr (&CALL_EXPR_FN (*expr_p
), pre_p
, NULL
,
3712 is_gimple_call_addr
, fb_rvalue
);
3714 nargs
= call_expr_nargs (*expr_p
);
3716 /* Get argument types for verification. */
3717 fndecl
= get_callee_fndecl (*expr_p
);
3720 parms
= TYPE_ARG_TYPES (TREE_TYPE (fndecl
));
3722 parms
= TYPE_ARG_TYPES (TREE_TYPE (fnptrtype
));
3724 if (fndecl
&& DECL_ARGUMENTS (fndecl
))
3725 p
= DECL_ARGUMENTS (fndecl
);
3730 for (i
= 0; i
< nargs
&& p
; i
++, p
= TREE_CHAIN (p
))
3733 /* If the last argument is __builtin_va_arg_pack () and it is not
3734 passed as a named argument, decrease the number of CALL_EXPR
3735 arguments and set instead the CALL_EXPR_VA_ARG_PACK flag. */
3738 && TREE_CODE (CALL_EXPR_ARG (*expr_p
, nargs
- 1)) == CALL_EXPR
)
3740 tree last_arg
= CALL_EXPR_ARG (*expr_p
, nargs
- 1);
3741 tree last_arg_fndecl
= get_callee_fndecl (last_arg
);
3744 && fndecl_built_in_p (last_arg_fndecl
, BUILT_IN_VA_ARG_PACK
))
3746 tree call
= *expr_p
;
3749 *expr_p
= build_call_array_loc (loc
, TREE_TYPE (call
),
3750 CALL_EXPR_FN (call
),
3751 nargs
, CALL_EXPR_ARGP (call
));
3753 /* Copy all CALL_EXPR flags, location and block, except
3754 CALL_EXPR_VA_ARG_PACK flag. */
3755 CALL_EXPR_STATIC_CHAIN (*expr_p
) = CALL_EXPR_STATIC_CHAIN (call
);
3756 CALL_EXPR_TAILCALL (*expr_p
) = CALL_EXPR_TAILCALL (call
);
3757 CALL_EXPR_RETURN_SLOT_OPT (*expr_p
)
3758 = CALL_EXPR_RETURN_SLOT_OPT (call
);
3759 CALL_FROM_THUNK_P (*expr_p
) = CALL_FROM_THUNK_P (call
);
3760 SET_EXPR_LOCATION (*expr_p
, EXPR_LOCATION (call
));
3762 /* Set CALL_EXPR_VA_ARG_PACK. */
3763 CALL_EXPR_VA_ARG_PACK (*expr_p
) = 1;
3767 /* If the call returns twice then after building the CFG the call
3768 argument computations will no longer dominate the call because
3769 we add an abnormal incoming edge to the call. So do not use SSA
3771 bool returns_twice
= call_expr_flags (*expr_p
) & ECF_RETURNS_TWICE
;
3773 /* Gimplify the function arguments. */
3776 for (i
= (PUSH_ARGS_REVERSED
? nargs
- 1 : 0);
3777 PUSH_ARGS_REVERSED
? i
>= 0 : i
< nargs
;
3778 PUSH_ARGS_REVERSED
? i
-- : i
++)
3780 enum gimplify_status t
;
3782 /* Avoid gimplifying the second argument to va_start, which needs to
3783 be the plain PARM_DECL. */
3784 if ((i
!= 1) || !builtin_va_start_p
)
3786 t
= gimplify_arg (&CALL_EXPR_ARG (*expr_p
, i
), pre_p
,
3787 EXPR_LOCATION (*expr_p
), ! returns_twice
);
3795 /* Gimplify the static chain. */
3796 if (CALL_EXPR_STATIC_CHAIN (*expr_p
))
3798 if (fndecl
&& !DECL_STATIC_CHAIN (fndecl
))
3799 CALL_EXPR_STATIC_CHAIN (*expr_p
) = NULL
;
3802 enum gimplify_status t
;
3803 t
= gimplify_arg (&CALL_EXPR_STATIC_CHAIN (*expr_p
), pre_p
,
3804 EXPR_LOCATION (*expr_p
), ! returns_twice
);
3810 /* Verify the function result. */
3811 if (want_value
&& fndecl
3812 && VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fnptrtype
))))
3814 error_at (loc
, "using result of function returning %<void%>");
3818 /* Try this again in case gimplification exposed something. */
3819 if (ret
!= GS_ERROR
)
3821 tree new_tree
= fold_call_expr (input_location
, *expr_p
, !want_value
);
3823 if (new_tree
&& new_tree
!= *expr_p
)
3825 /* There was a transformation of this call which computes the
3826 same value, but in a more efficient way. Return and try
3834 *expr_p
= error_mark_node
;
3838 /* If the function is "const" or "pure", then clear TREE_SIDE_EFFECTS on its
3839 decl. This allows us to eliminate redundant or useless
3840 calls to "const" functions. */
3841 if (TREE_CODE (*expr_p
) == CALL_EXPR
)
3843 int flags
= call_expr_flags (*expr_p
);
3844 if (flags
& (ECF_CONST
| ECF_PURE
)
3845 /* An infinite loop is considered a side effect. */
3846 && !(flags
& (ECF_LOOPING_CONST_OR_PURE
)))
3847 TREE_SIDE_EFFECTS (*expr_p
) = 0;
3850 /* If the value is not needed by the caller, emit a new GIMPLE_CALL
3851 and clear *EXPR_P. Otherwise, leave *EXPR_P in its gimplified
3852 form and delegate the creation of a GIMPLE_CALL to
3853 gimplify_modify_expr. This is always possible because when
3854 WANT_VALUE is true, the caller wants the result of this call into
3855 a temporary, which means that we will emit an INIT_EXPR in
3856 internal_get_tmp_var which will then be handled by
3857 gimplify_modify_expr. */
3860 /* The CALL_EXPR in *EXPR_P is already in GIMPLE form, so all we
3861 have to do is replicate it as a GIMPLE_CALL tuple. */
3862 gimple_stmt_iterator gsi
;
3863 call
= gimple_build_call_from_tree (*expr_p
, fnptrtype
);
3864 notice_special_calls (call
);
3865 gimplify_seq_add_stmt (pre_p
, call
);
3866 gsi
= gsi_last (*pre_p
);
3867 maybe_fold_stmt (&gsi
);
3868 *expr_p
= NULL_TREE
;
3871 /* Remember the original function type. */
3872 CALL_EXPR_FN (*expr_p
) = build1 (NOP_EXPR
, fnptrtype
,
3873 CALL_EXPR_FN (*expr_p
));
3878 /* Handle shortcut semantics in the predicate operand of a COND_EXPR by
3879 rewriting it into multiple COND_EXPRs, and possibly GOTO_EXPRs.
3881 TRUE_LABEL_P and FALSE_LABEL_P point to the labels to jump to if the
3882 condition is true or false, respectively. If null, we should generate
3883 our own to skip over the evaluation of this specific expression.
3885 LOCUS is the source location of the COND_EXPR.
3887 This function is the tree equivalent of do_jump.
3889 shortcut_cond_r should only be called by shortcut_cond_expr. */
3892 shortcut_cond_r (tree pred
, tree
*true_label_p
, tree
*false_label_p
,
3895 tree local_label
= NULL_TREE
;
3896 tree t
, expr
= NULL
;
3898 /* OK, it's not a simple case; we need to pull apart the COND_EXPR to
3899 retain the shortcut semantics. Just insert the gotos here;
3900 shortcut_cond_expr will append the real blocks later. */
3901 if (TREE_CODE (pred
) == TRUTH_ANDIF_EXPR
)
3903 location_t new_locus
;
3905 /* Turn if (a && b) into
3907 if (a); else goto no;
3908 if (b) goto yes; else goto no;
3911 if (false_label_p
== NULL
)
3912 false_label_p
= &local_label
;
3914 /* Keep the original source location on the first 'if'. */
3915 t
= shortcut_cond_r (TREE_OPERAND (pred
, 0), NULL
, false_label_p
, locus
);
3916 append_to_statement_list (t
, &expr
);
3918 /* Set the source location of the && on the second 'if'. */
3919 new_locus
= rexpr_location (pred
, locus
);
3920 t
= shortcut_cond_r (TREE_OPERAND (pred
, 1), true_label_p
, false_label_p
,
3922 append_to_statement_list (t
, &expr
);
3924 else if (TREE_CODE (pred
) == TRUTH_ORIF_EXPR
)
3926 location_t new_locus
;
3928 /* Turn if (a || b) into
3931 if (b) goto yes; else goto no;
3934 if (true_label_p
== NULL
)
3935 true_label_p
= &local_label
;
3937 /* Keep the original source location on the first 'if'. */
3938 t
= shortcut_cond_r (TREE_OPERAND (pred
, 0), true_label_p
, NULL
, locus
);
3939 append_to_statement_list (t
, &expr
);
3941 /* Set the source location of the || on the second 'if'. */
3942 new_locus
= rexpr_location (pred
, locus
);
3943 t
= shortcut_cond_r (TREE_OPERAND (pred
, 1), true_label_p
, false_label_p
,
3945 append_to_statement_list (t
, &expr
);
3947 else if (TREE_CODE (pred
) == COND_EXPR
3948 && !VOID_TYPE_P (TREE_TYPE (TREE_OPERAND (pred
, 1)))
3949 && !VOID_TYPE_P (TREE_TYPE (TREE_OPERAND (pred
, 2))))
3951 location_t new_locus
;
3953 /* As long as we're messing with gotos, turn if (a ? b : c) into
3955 if (b) goto yes; else goto no;
3957 if (c) goto yes; else goto no;
3959 Don't do this if one of the arms has void type, which can happen
3960 in C++ when the arm is throw. */
3962 /* Keep the original source location on the first 'if'. Set the source
3963 location of the ? on the second 'if'. */
3964 new_locus
= rexpr_location (pred
, locus
);
3965 expr
= build3 (COND_EXPR
, void_type_node
, TREE_OPERAND (pred
, 0),
3966 shortcut_cond_r (TREE_OPERAND (pred
, 1), true_label_p
,
3967 false_label_p
, locus
),
3968 shortcut_cond_r (TREE_OPERAND (pred
, 2), true_label_p
,
3969 false_label_p
, new_locus
));
3973 expr
= build3 (COND_EXPR
, void_type_node
, pred
,
3974 build_and_jump (true_label_p
),
3975 build_and_jump (false_label_p
));
3976 SET_EXPR_LOCATION (expr
, locus
);
3981 t
= build1 (LABEL_EXPR
, void_type_node
, local_label
);
3982 append_to_statement_list (t
, &expr
);
3988 /* If EXPR is a GOTO_EXPR, return it. If it is a STATEMENT_LIST, skip
3989 any of its leading DEBUG_BEGIN_STMTS and recurse on the subsequent
3990 statement, if it is the last one. Otherwise, return NULL. */
3993 find_goto (tree expr
)
3998 if (TREE_CODE (expr
) == GOTO_EXPR
)
4001 if (TREE_CODE (expr
) != STATEMENT_LIST
)
4004 tree_stmt_iterator i
= tsi_start (expr
);
4006 while (!tsi_end_p (i
) && TREE_CODE (tsi_stmt (i
)) == DEBUG_BEGIN_STMT
)
4009 if (!tsi_one_before_end_p (i
))
4012 return find_goto (tsi_stmt (i
));
4015 /* Same as find_goto, except that it returns NULL if the destination
4016 is not a LABEL_DECL. */
4019 find_goto_label (tree expr
)
4021 tree dest
= find_goto (expr
);
4022 if (dest
&& TREE_CODE (GOTO_DESTINATION (dest
)) == LABEL_DECL
)
4027 /* Given a conditional expression EXPR with short-circuit boolean
4028 predicates using TRUTH_ANDIF_EXPR or TRUTH_ORIF_EXPR, break the
4029 predicate apart into the equivalent sequence of conditionals. */
4032 shortcut_cond_expr (tree expr
)
4034 tree pred
= TREE_OPERAND (expr
, 0);
4035 tree then_
= TREE_OPERAND (expr
, 1);
4036 tree else_
= TREE_OPERAND (expr
, 2);
4037 tree true_label
, false_label
, end_label
, t
;
4039 tree
*false_label_p
;
4040 bool emit_end
, emit_false
, jump_over_else
;
4041 bool then_se
= then_
&& TREE_SIDE_EFFECTS (then_
);
4042 bool else_se
= else_
&& TREE_SIDE_EFFECTS (else_
);
4044 /* First do simple transformations. */
4047 /* If there is no 'else', turn
4050 if (a) if (b) then c. */
4051 while (TREE_CODE (pred
) == TRUTH_ANDIF_EXPR
)
4053 /* Keep the original source location on the first 'if'. */
4054 location_t locus
= EXPR_LOC_OR_LOC (expr
, input_location
);
4055 TREE_OPERAND (expr
, 0) = TREE_OPERAND (pred
, 1);
4056 /* Set the source location of the && on the second 'if'. */
4057 if (rexpr_has_location (pred
))
4058 SET_EXPR_LOCATION (expr
, rexpr_location (pred
));
4059 then_
= shortcut_cond_expr (expr
);
4060 then_se
= then_
&& TREE_SIDE_EFFECTS (then_
);
4061 pred
= TREE_OPERAND (pred
, 0);
4062 expr
= build3 (COND_EXPR
, void_type_node
, pred
, then_
, NULL_TREE
);
4063 SET_EXPR_LOCATION (expr
, locus
);
4069 /* If there is no 'then', turn
4072 if (a); else if (b); else d. */
4073 while (TREE_CODE (pred
) == TRUTH_ORIF_EXPR
)
4075 /* Keep the original source location on the first 'if'. */
4076 location_t locus
= EXPR_LOC_OR_LOC (expr
, input_location
);
4077 TREE_OPERAND (expr
, 0) = TREE_OPERAND (pred
, 1);
4078 /* Set the source location of the || on the second 'if'. */
4079 if (rexpr_has_location (pred
))
4080 SET_EXPR_LOCATION (expr
, rexpr_location (pred
));
4081 else_
= shortcut_cond_expr (expr
);
4082 else_se
= else_
&& TREE_SIDE_EFFECTS (else_
);
4083 pred
= TREE_OPERAND (pred
, 0);
4084 expr
= build3 (COND_EXPR
, void_type_node
, pred
, NULL_TREE
, else_
);
4085 SET_EXPR_LOCATION (expr
, locus
);
4089 /* If we're done, great. */
4090 if (TREE_CODE (pred
) != TRUTH_ANDIF_EXPR
4091 && TREE_CODE (pred
) != TRUTH_ORIF_EXPR
)
4094 /* Otherwise we need to mess with gotos. Change
4097 if (a); else goto no;
4100 and recursively gimplify the condition. */
4102 true_label
= false_label
= end_label
= NULL_TREE
;
4104 /* If our arms just jump somewhere, hijack those labels so we don't
4105 generate jumps to jumps. */
4107 if (tree then_goto
= find_goto_label (then_
))
4109 true_label
= GOTO_DESTINATION (then_goto
);
4114 if (tree else_goto
= find_goto_label (else_
))
4116 false_label
= GOTO_DESTINATION (else_goto
);
4121 /* If we aren't hijacking a label for the 'then' branch, it falls through. */
4123 true_label_p
= &true_label
;
4125 true_label_p
= NULL
;
4127 /* The 'else' branch also needs a label if it contains interesting code. */
4128 if (false_label
|| else_se
)
4129 false_label_p
= &false_label
;
4131 false_label_p
= NULL
;
4133 /* If there was nothing else in our arms, just forward the label(s). */
4134 if (!then_se
&& !else_se
)
4135 return shortcut_cond_r (pred
, true_label_p
, false_label_p
,
4136 EXPR_LOC_OR_LOC (expr
, input_location
));
4138 /* If our last subexpression already has a terminal label, reuse it. */
4140 t
= expr_last (else_
);
4142 t
= expr_last (then_
);
4145 if (t
&& TREE_CODE (t
) == LABEL_EXPR
)
4146 end_label
= LABEL_EXPR_LABEL (t
);
4148 /* If we don't care about jumping to the 'else' branch, jump to the end
4149 if the condition is false. */
4151 false_label_p
= &end_label
;
4153 /* We only want to emit these labels if we aren't hijacking them. */
4154 emit_end
= (end_label
== NULL_TREE
);
4155 emit_false
= (false_label
== NULL_TREE
);
4157 /* We only emit the jump over the else clause if we have to--if the
4158 then clause may fall through. Otherwise we can wind up with a
4159 useless jump and a useless label at the end of gimplified code,
4160 which will cause us to think that this conditional as a whole
4161 falls through even if it doesn't. If we then inline a function
4162 which ends with such a condition, that can cause us to issue an
4163 inappropriate warning about control reaching the end of a
4164 non-void function. */
4165 jump_over_else
= block_may_fallthru (then_
);
4167 pred
= shortcut_cond_r (pred
, true_label_p
, false_label_p
,
4168 EXPR_LOC_OR_LOC (expr
, input_location
));
4171 append_to_statement_list (pred
, &expr
);
4173 append_to_statement_list (then_
, &expr
);
4178 tree last
= expr_last (expr
);
4179 t
= build_and_jump (&end_label
);
4180 if (rexpr_has_location (last
))
4181 SET_EXPR_LOCATION (t
, rexpr_location (last
));
4182 append_to_statement_list (t
, &expr
);
4186 t
= build1 (LABEL_EXPR
, void_type_node
, false_label
);
4187 append_to_statement_list (t
, &expr
);
4189 append_to_statement_list (else_
, &expr
);
4191 if (emit_end
&& end_label
)
4193 t
= build1 (LABEL_EXPR
, void_type_node
, end_label
);
4194 append_to_statement_list (t
, &expr
);
4200 /* EXPR is used in a boolean context; make sure it has BOOLEAN_TYPE. */
4203 gimple_boolify (tree expr
)
4205 tree type
= TREE_TYPE (expr
);
4206 location_t loc
= EXPR_LOCATION (expr
);
4208 if (TREE_CODE (expr
) == NE_EXPR
4209 && TREE_CODE (TREE_OPERAND (expr
, 0)) == CALL_EXPR
4210 && integer_zerop (TREE_OPERAND (expr
, 1)))
4212 tree call
= TREE_OPERAND (expr
, 0);
4213 tree fn
= get_callee_fndecl (call
);
4215 /* For __builtin_expect ((long) (x), y) recurse into x as well
4216 if x is truth_value_p. */
4218 && fndecl_built_in_p (fn
, BUILT_IN_EXPECT
)
4219 && call_expr_nargs (call
) == 2)
4221 tree arg
= CALL_EXPR_ARG (call
, 0);
4224 if (TREE_CODE (arg
) == NOP_EXPR
4225 && TREE_TYPE (arg
) == TREE_TYPE (call
))
4226 arg
= TREE_OPERAND (arg
, 0);
4227 if (truth_value_p (TREE_CODE (arg
)))
4229 arg
= gimple_boolify (arg
);
4230 CALL_EXPR_ARG (call
, 0)
4231 = fold_convert_loc (loc
, TREE_TYPE (call
), arg
);
4237 switch (TREE_CODE (expr
))
4239 case TRUTH_AND_EXPR
:
4241 case TRUTH_XOR_EXPR
:
4242 case TRUTH_ANDIF_EXPR
:
4243 case TRUTH_ORIF_EXPR
:
4244 /* Also boolify the arguments of truth exprs. */
4245 TREE_OPERAND (expr
, 1) = gimple_boolify (TREE_OPERAND (expr
, 1));
4248 case TRUTH_NOT_EXPR
:
4249 TREE_OPERAND (expr
, 0) = gimple_boolify (TREE_OPERAND (expr
, 0));
4251 /* These expressions always produce boolean results. */
4252 if (TREE_CODE (type
) != BOOLEAN_TYPE
)
4253 TREE_TYPE (expr
) = boolean_type_node
;
4257 switch ((enum annot_expr_kind
) TREE_INT_CST_LOW (TREE_OPERAND (expr
, 1)))
4259 case annot_expr_ivdep_kind
:
4260 case annot_expr_unroll_kind
:
4261 case annot_expr_no_vector_kind
:
4262 case annot_expr_vector_kind
:
4263 case annot_expr_parallel_kind
:
4264 TREE_OPERAND (expr
, 0) = gimple_boolify (TREE_OPERAND (expr
, 0));
4265 if (TREE_CODE (type
) != BOOLEAN_TYPE
)
4266 TREE_TYPE (expr
) = boolean_type_node
;
4273 if (COMPARISON_CLASS_P (expr
))
4275 /* There expressions always prduce boolean results. */
4276 if (TREE_CODE (type
) != BOOLEAN_TYPE
)
4277 TREE_TYPE (expr
) = boolean_type_node
;
4280 /* Other expressions that get here must have boolean values, but
4281 might need to be converted to the appropriate mode. */
4282 if (TREE_CODE (type
) == BOOLEAN_TYPE
)
4284 return fold_convert_loc (loc
, boolean_type_node
, expr
);
4288 /* Given a conditional expression *EXPR_P without side effects, gimplify
4289 its operands. New statements are inserted to PRE_P. */
4291 static enum gimplify_status
4292 gimplify_pure_cond_expr (tree
*expr_p
, gimple_seq
*pre_p
)
4294 tree expr
= *expr_p
, cond
;
4295 enum gimplify_status ret
, tret
;
4296 enum tree_code code
;
4298 cond
= gimple_boolify (COND_EXPR_COND (expr
));
4300 /* We need to handle && and || specially, as their gimplification
4301 creates pure cond_expr, thus leading to an infinite cycle otherwise. */
4302 code
= TREE_CODE (cond
);
4303 if (code
== TRUTH_ANDIF_EXPR
)
4304 TREE_SET_CODE (cond
, TRUTH_AND_EXPR
);
4305 else if (code
== TRUTH_ORIF_EXPR
)
4306 TREE_SET_CODE (cond
, TRUTH_OR_EXPR
);
4307 ret
= gimplify_expr (&cond
, pre_p
, NULL
, is_gimple_val
, fb_rvalue
);
4308 COND_EXPR_COND (*expr_p
) = cond
;
4310 tret
= gimplify_expr (&COND_EXPR_THEN (expr
), pre_p
, NULL
,
4311 is_gimple_val
, fb_rvalue
);
4312 ret
= MIN (ret
, tret
);
4313 tret
= gimplify_expr (&COND_EXPR_ELSE (expr
), pre_p
, NULL
,
4314 is_gimple_val
, fb_rvalue
);
4316 return MIN (ret
, tret
);
4319 /* Return true if evaluating EXPR could trap.
4320 EXPR is GENERIC, while tree_could_trap_p can be called
4324 generic_expr_could_trap_p (tree expr
)
4328 if (!expr
|| is_gimple_val (expr
))
4331 if (!EXPR_P (expr
) || tree_could_trap_p (expr
))
4334 n
= TREE_OPERAND_LENGTH (expr
);
4335 for (i
= 0; i
< n
; i
++)
4336 if (generic_expr_could_trap_p (TREE_OPERAND (expr
, i
)))
4342 /* Convert the conditional expression pointed to by EXPR_P '(p) ? a : b;'
4351 The second form is used when *EXPR_P is of type void.
4353 PRE_P points to the list where side effects that must happen before
4354 *EXPR_P should be stored. */
4356 static enum gimplify_status
4357 gimplify_cond_expr (tree
*expr_p
, gimple_seq
*pre_p
, fallback_t fallback
)
4359 tree expr
= *expr_p
;
4360 tree type
= TREE_TYPE (expr
);
4361 location_t loc
= EXPR_LOCATION (expr
);
4362 tree tmp
, arm1
, arm2
;
4363 enum gimplify_status ret
;
4364 tree label_true
, label_false
, label_cont
;
4365 bool have_then_clause_p
, have_else_clause_p
;
4367 enum tree_code pred_code
;
4368 gimple_seq seq
= NULL
;
4370 /* If this COND_EXPR has a value, copy the values into a temporary within
4372 if (!VOID_TYPE_P (type
))
4374 tree then_
= TREE_OPERAND (expr
, 1), else_
= TREE_OPERAND (expr
, 2);
4377 /* If either an rvalue is ok or we do not require an lvalue, create the
4378 temporary. But we cannot do that if the type is addressable. */
4379 if (((fallback
& fb_rvalue
) || !(fallback
& fb_lvalue
))
4380 && !TREE_ADDRESSABLE (type
))
4382 if (gimplify_ctxp
->allow_rhs_cond_expr
4383 /* If either branch has side effects or could trap, it can't be
4384 evaluated unconditionally. */
4385 && !TREE_SIDE_EFFECTS (then_
)
4386 && !generic_expr_could_trap_p (then_
)
4387 && !TREE_SIDE_EFFECTS (else_
)
4388 && !generic_expr_could_trap_p (else_
))
4389 return gimplify_pure_cond_expr (expr_p
, pre_p
);
4391 tmp
= create_tmp_var (type
, "iftmp");
4395 /* Otherwise, only create and copy references to the values. */
4398 type
= build_pointer_type (type
);
4400 if (!VOID_TYPE_P (TREE_TYPE (then_
)))
4401 then_
= build_fold_addr_expr_loc (loc
, then_
);
4403 if (!VOID_TYPE_P (TREE_TYPE (else_
)))
4404 else_
= build_fold_addr_expr_loc (loc
, else_
);
4407 = build3 (COND_EXPR
, type
, TREE_OPERAND (expr
, 0), then_
, else_
);
4409 tmp
= create_tmp_var (type
, "iftmp");
4410 result
= build_simple_mem_ref_loc (loc
, tmp
);
4413 /* Build the new then clause, `tmp = then_;'. But don't build the
4414 assignment if the value is void; in C++ it can be if it's a throw. */
4415 if (!VOID_TYPE_P (TREE_TYPE (then_
)))
4416 TREE_OPERAND (expr
, 1) = build2 (INIT_EXPR
, type
, tmp
, then_
);
4418 /* Similarly, build the new else clause, `tmp = else_;'. */
4419 if (!VOID_TYPE_P (TREE_TYPE (else_
)))
4420 TREE_OPERAND (expr
, 2) = build2 (INIT_EXPR
, type
, tmp
, else_
);
4422 TREE_TYPE (expr
) = void_type_node
;
4423 recalculate_side_effects (expr
);
4425 /* Move the COND_EXPR to the prequeue. */
4426 gimplify_stmt (&expr
, pre_p
);
4432 /* Remove any COMPOUND_EXPR so the following cases will be caught. */
4433 STRIP_TYPE_NOPS (TREE_OPERAND (expr
, 0));
4434 if (TREE_CODE (TREE_OPERAND (expr
, 0)) == COMPOUND_EXPR
)
4435 gimplify_compound_expr (&TREE_OPERAND (expr
, 0), pre_p
, true);
4437 /* Make sure the condition has BOOLEAN_TYPE. */
4438 TREE_OPERAND (expr
, 0) = gimple_boolify (TREE_OPERAND (expr
, 0));
4440 /* Break apart && and || conditions. */
4441 if (TREE_CODE (TREE_OPERAND (expr
, 0)) == TRUTH_ANDIF_EXPR
4442 || TREE_CODE (TREE_OPERAND (expr
, 0)) == TRUTH_ORIF_EXPR
)
4444 expr
= shortcut_cond_expr (expr
);
4446 if (expr
!= *expr_p
)
4450 /* We can't rely on gimplify_expr to re-gimplify the expanded
4451 form properly, as cleanups might cause the target labels to be
4452 wrapped in a TRY_FINALLY_EXPR. To prevent that, we need to
4453 set up a conditional context. */
4454 gimple_push_condition ();
4455 gimplify_stmt (expr_p
, &seq
);
4456 gimple_pop_condition (pre_p
);
4457 gimple_seq_add_seq (pre_p
, seq
);
4463 /* Now do the normal gimplification. */
4465 /* Gimplify condition. */
4466 ret
= gimplify_expr (&TREE_OPERAND (expr
, 0), pre_p
, NULL
,
4467 is_gimple_condexpr_for_cond
, fb_rvalue
);
4468 if (ret
== GS_ERROR
)
4470 gcc_assert (TREE_OPERAND (expr
, 0) != NULL_TREE
);
4472 gimple_push_condition ();
4474 have_then_clause_p
= have_else_clause_p
= false;
4475 label_true
= find_goto_label (TREE_OPERAND (expr
, 1));
4477 && DECL_CONTEXT (GOTO_DESTINATION (label_true
)) == current_function_decl
4478 /* For -O0 avoid this optimization if the COND_EXPR and GOTO_EXPR
4479 have different locations, otherwise we end up with incorrect
4480 location information on the branches. */
4482 || !EXPR_HAS_LOCATION (expr
)
4483 || !rexpr_has_location (label_true
)
4484 || EXPR_LOCATION (expr
) == rexpr_location (label_true
)))
4486 have_then_clause_p
= true;
4487 label_true
= GOTO_DESTINATION (label_true
);
4490 label_true
= create_artificial_label (UNKNOWN_LOCATION
);
4491 label_false
= find_goto_label (TREE_OPERAND (expr
, 2));
4493 && DECL_CONTEXT (GOTO_DESTINATION (label_false
)) == current_function_decl
4494 /* For -O0 avoid this optimization if the COND_EXPR and GOTO_EXPR
4495 have different locations, otherwise we end up with incorrect
4496 location information on the branches. */
4498 || !EXPR_HAS_LOCATION (expr
)
4499 || !rexpr_has_location (label_false
)
4500 || EXPR_LOCATION (expr
) == rexpr_location (label_false
)))
4502 have_else_clause_p
= true;
4503 label_false
= GOTO_DESTINATION (label_false
);
4506 label_false
= create_artificial_label (UNKNOWN_LOCATION
);
4508 gimple_cond_get_ops_from_tree (COND_EXPR_COND (expr
), &pred_code
, &arm1
,
4510 cond_stmt
= gimple_build_cond (pred_code
, arm1
, arm2
, label_true
,
4512 gimple_set_location (cond_stmt
, EXPR_LOCATION (expr
));
4513 copy_warning (cond_stmt
, COND_EXPR_COND (expr
));
4514 gimplify_seq_add_stmt (&seq
, cond_stmt
);
4515 gimple_stmt_iterator gsi
= gsi_last (seq
);
4516 maybe_fold_stmt (&gsi
);
4518 label_cont
= NULL_TREE
;
4519 if (!have_then_clause_p
)
4521 /* For if (...) {} else { code; } put label_true after
4523 if (TREE_OPERAND (expr
, 1) == NULL_TREE
4524 && !have_else_clause_p
4525 && TREE_OPERAND (expr
, 2) != NULL_TREE
)
4527 /* For if (0) {} else { code; } tell -Wimplicit-fallthrough
4528 handling that label_cont == label_true can be only reached
4529 through fallthrough from { code; }. */
4530 if (integer_zerop (COND_EXPR_COND (expr
)))
4531 UNUSED_LABEL_P (label_true
) = 1;
4532 label_cont
= label_true
;
4536 bool then_side_effects
4537 = (TREE_OPERAND (expr
, 1)
4538 && TREE_SIDE_EFFECTS (TREE_OPERAND (expr
, 1)));
4539 gimplify_seq_add_stmt (&seq
, gimple_build_label (label_true
));
4540 have_then_clause_p
= gimplify_stmt (&TREE_OPERAND (expr
, 1), &seq
);
4541 /* For if (...) { code; } else {} or
4542 if (...) { code; } else goto label; or
4543 if (...) { code; return; } else { ... }
4544 label_cont isn't needed. */
4545 if (!have_else_clause_p
4546 && TREE_OPERAND (expr
, 2) != NULL_TREE
4547 && gimple_seq_may_fallthru (seq
))
4550 label_cont
= create_artificial_label (UNKNOWN_LOCATION
);
4552 /* For if (0) { non-side-effect-code } else { code }
4553 tell -Wimplicit-fallthrough handling that label_cont can
4554 be only reached through fallthrough from { code }. */
4555 if (integer_zerop (COND_EXPR_COND (expr
)))
4557 UNUSED_LABEL_P (label_true
) = 1;
4558 if (!then_side_effects
)
4559 UNUSED_LABEL_P (label_cont
) = 1;
4562 g
= gimple_build_goto (label_cont
);
4564 /* GIMPLE_COND's are very low level; they have embedded
4565 gotos. This particular embedded goto should not be marked
4566 with the location of the original COND_EXPR, as it would
4567 correspond to the COND_EXPR's condition, not the ELSE or the
4568 THEN arms. To avoid marking it with the wrong location, flag
4569 it as "no location". */
4570 gimple_set_do_not_emit_location (g
);
4572 gimplify_seq_add_stmt (&seq
, g
);
4576 if (!have_else_clause_p
)
4578 /* For if (1) { code } or if (1) { code } else { non-side-effect-code }
4579 tell -Wimplicit-fallthrough handling that label_false can be only
4580 reached through fallthrough from { code }. */
4581 if (integer_nonzerop (COND_EXPR_COND (expr
))
4582 && (TREE_OPERAND (expr
, 2) == NULL_TREE
4583 || !TREE_SIDE_EFFECTS (TREE_OPERAND (expr
, 2))))
4584 UNUSED_LABEL_P (label_false
) = 1;
4585 gimplify_seq_add_stmt (&seq
, gimple_build_label (label_false
));
4586 have_else_clause_p
= gimplify_stmt (&TREE_OPERAND (expr
, 2), &seq
);
4589 gimplify_seq_add_stmt (&seq
, gimple_build_label (label_cont
));
4591 gimple_pop_condition (pre_p
);
4592 gimple_seq_add_seq (pre_p
, seq
);
4594 if (ret
== GS_ERROR
)
4596 else if (have_then_clause_p
|| have_else_clause_p
)
4600 /* Both arms are empty; replace the COND_EXPR with its predicate. */
4601 expr
= TREE_OPERAND (expr
, 0);
4602 gimplify_stmt (&expr
, pre_p
);
4609 /* Prepare the node pointed to by EXPR_P, an is_gimple_addressable expression,
4610 to be marked addressable.
4612 We cannot rely on such an expression being directly markable if a temporary
4613 has been created by the gimplification. In this case, we create another
4614 temporary and initialize it with a copy, which will become a store after we
4615 mark it addressable. This can happen if the front-end passed us something
4616 that it could not mark addressable yet, like a Fortran pass-by-reference
4617 parameter (int) floatvar. */
4620 prepare_gimple_addressable (tree
*expr_p
, gimple_seq
*seq_p
)
4622 while (handled_component_p (*expr_p
))
4623 expr_p
= &TREE_OPERAND (*expr_p
, 0);
4625 /* Do not allow an SSA name as the temporary. */
4626 if (is_gimple_reg (*expr_p
))
4627 *expr_p
= internal_get_tmp_var (*expr_p
, seq_p
, NULL
, false, false, true);
4630 /* A subroutine of gimplify_modify_expr. Replace a MODIFY_EXPR with
4631 a call to __builtin_memcpy. */
4633 static enum gimplify_status
4634 gimplify_modify_expr_to_memcpy (tree
*expr_p
, tree size
, bool want_value
,
4637 tree t
, to
, to_ptr
, from
, from_ptr
;
4639 location_t loc
= EXPR_LOCATION (*expr_p
);
4641 to
= TREE_OPERAND (*expr_p
, 0);
4642 from
= TREE_OPERAND (*expr_p
, 1);
4644 /* Mark the RHS addressable. Beware that it may not be possible to do so
4645 directly if a temporary has been created by the gimplification. */
4646 prepare_gimple_addressable (&from
, seq_p
);
4648 mark_addressable (from
);
4649 from_ptr
= build_fold_addr_expr_loc (loc
, from
);
4650 gimplify_arg (&from_ptr
, seq_p
, loc
);
4652 mark_addressable (to
);
4653 to_ptr
= build_fold_addr_expr_loc (loc
, to
);
4654 gimplify_arg (&to_ptr
, seq_p
, loc
);
4656 t
= builtin_decl_implicit (BUILT_IN_MEMCPY
);
4658 gs
= gimple_build_call (t
, 3, to_ptr
, from_ptr
, size
);
4659 gimple_call_set_alloca_for_var (gs
, true);
4663 /* tmp = memcpy() */
4664 t
= create_tmp_var (TREE_TYPE (to_ptr
));
4665 gimple_call_set_lhs (gs
, t
);
4666 gimplify_seq_add_stmt (seq_p
, gs
);
4668 *expr_p
= build_simple_mem_ref (t
);
4672 gimplify_seq_add_stmt (seq_p
, gs
);
4677 /* A subroutine of gimplify_modify_expr. Replace a MODIFY_EXPR with
4678 a call to __builtin_memset. In this case we know that the RHS is
4679 a CONSTRUCTOR with an empty element list. */
4681 static enum gimplify_status
4682 gimplify_modify_expr_to_memset (tree
*expr_p
, tree size
, bool want_value
,
4685 tree t
, from
, to
, to_ptr
;
4687 location_t loc
= EXPR_LOCATION (*expr_p
);
4689 /* Assert our assumptions, to abort instead of producing wrong code
4690 silently if they are not met. Beware that the RHS CONSTRUCTOR might
4691 not be immediately exposed. */
4692 from
= TREE_OPERAND (*expr_p
, 1);
4693 if (TREE_CODE (from
) == WITH_SIZE_EXPR
)
4694 from
= TREE_OPERAND (from
, 0);
4696 gcc_assert (TREE_CODE (from
) == CONSTRUCTOR
4697 && vec_safe_is_empty (CONSTRUCTOR_ELTS (from
)));
4700 to
= TREE_OPERAND (*expr_p
, 0);
4702 to_ptr
= build_fold_addr_expr_loc (loc
, to
);
4703 gimplify_arg (&to_ptr
, seq_p
, loc
);
4704 t
= builtin_decl_implicit (BUILT_IN_MEMSET
);
4706 gs
= gimple_build_call (t
, 3, to_ptr
, integer_zero_node
, size
);
4710 /* tmp = memset() */
4711 t
= create_tmp_var (TREE_TYPE (to_ptr
));
4712 gimple_call_set_lhs (gs
, t
);
4713 gimplify_seq_add_stmt (seq_p
, gs
);
4715 *expr_p
= build1 (INDIRECT_REF
, TREE_TYPE (to
), t
);
4719 gimplify_seq_add_stmt (seq_p
, gs
);
4724 /* A subroutine of gimplify_init_ctor_preeval. Called via walk_tree,
4725 determine, cautiously, if a CONSTRUCTOR overlaps the lhs of an
4726 assignment. Return non-null if we detect a potential overlap. */
4728 struct gimplify_init_ctor_preeval_data
4730 /* The base decl of the lhs object. May be NULL, in which case we
4731 have to assume the lhs is indirect. */
4734 /* The alias set of the lhs object. */
4735 alias_set_type lhs_alias_set
;
4739 gimplify_init_ctor_preeval_1 (tree
*tp
, int *walk_subtrees
, void *xdata
)
4741 struct gimplify_init_ctor_preeval_data
*data
4742 = (struct gimplify_init_ctor_preeval_data
*) xdata
;
4745 /* If we find the base object, obviously we have overlap. */
4746 if (data
->lhs_base_decl
== t
)
4749 /* If the constructor component is indirect, determine if we have a
4750 potential overlap with the lhs. The only bits of information we
4751 have to go on at this point are addressability and alias sets. */
4752 if ((INDIRECT_REF_P (t
)
4753 || TREE_CODE (t
) == MEM_REF
)
4754 && (!data
->lhs_base_decl
|| TREE_ADDRESSABLE (data
->lhs_base_decl
))
4755 && alias_sets_conflict_p (data
->lhs_alias_set
, get_alias_set (t
)))
4758 /* If the constructor component is a call, determine if it can hide a
4759 potential overlap with the lhs through an INDIRECT_REF like above.
4760 ??? Ugh - this is completely broken. In fact this whole analysis
4761 doesn't look conservative. */
4762 if (TREE_CODE (t
) == CALL_EXPR
)
4764 tree type
, fntype
= TREE_TYPE (TREE_TYPE (CALL_EXPR_FN (t
)));
4766 for (type
= TYPE_ARG_TYPES (fntype
); type
; type
= TREE_CHAIN (type
))
4767 if (POINTER_TYPE_P (TREE_VALUE (type
))
4768 && (!data
->lhs_base_decl
|| TREE_ADDRESSABLE (data
->lhs_base_decl
))
4769 && alias_sets_conflict_p (data
->lhs_alias_set
,
4771 (TREE_TYPE (TREE_VALUE (type
)))))
4775 if (IS_TYPE_OR_DECL_P (t
))
4780 /* A subroutine of gimplify_init_constructor. Pre-evaluate EXPR,
4781 force values that overlap with the lhs (as described by *DATA)
4782 into temporaries. */
4785 gimplify_init_ctor_preeval (tree
*expr_p
, gimple_seq
*pre_p
, gimple_seq
*post_p
,
4786 struct gimplify_init_ctor_preeval_data
*data
)
4788 enum gimplify_status one
;
4790 /* If the value is constant, then there's nothing to pre-evaluate. */
4791 if (TREE_CONSTANT (*expr_p
))
4793 /* Ensure it does not have side effects, it might contain a reference to
4794 the object we're initializing. */
4795 gcc_assert (!TREE_SIDE_EFFECTS (*expr_p
));
4799 /* If the type has non-trivial constructors, we can't pre-evaluate. */
4800 if (TREE_ADDRESSABLE (TREE_TYPE (*expr_p
)))
4803 /* Recurse for nested constructors. */
4804 if (TREE_CODE (*expr_p
) == CONSTRUCTOR
)
4806 unsigned HOST_WIDE_INT ix
;
4807 constructor_elt
*ce
;
4808 vec
<constructor_elt
, va_gc
> *v
= CONSTRUCTOR_ELTS (*expr_p
);
4810 FOR_EACH_VEC_SAFE_ELT (v
, ix
, ce
)
4811 gimplify_init_ctor_preeval (&ce
->value
, pre_p
, post_p
, data
);
4816 /* If this is a variable sized type, we must remember the size. */
4817 maybe_with_size_expr (expr_p
);
4819 /* Gimplify the constructor element to something appropriate for the rhs
4820 of a MODIFY_EXPR. Given that we know the LHS is an aggregate, we know
4821 the gimplifier will consider this a store to memory. Doing this
4822 gimplification now means that we won't have to deal with complicated
4823 language-specific trees, nor trees like SAVE_EXPR that can induce
4824 exponential search behavior. */
4825 one
= gimplify_expr (expr_p
, pre_p
, post_p
, is_gimple_mem_rhs
, fb_rvalue
);
4826 if (one
== GS_ERROR
)
4832 /* If we gimplified to a bare decl, we can be sure that it doesn't overlap
4833 with the lhs, since "a = { .x=a }" doesn't make sense. This will
4834 always be true for all scalars, since is_gimple_mem_rhs insists on a
4835 temporary variable for them. */
4836 if (DECL_P (*expr_p
))
4839 /* If this is of variable size, we have no choice but to assume it doesn't
4840 overlap since we can't make a temporary for it. */
4841 if (TREE_CODE (TYPE_SIZE (TREE_TYPE (*expr_p
))) != INTEGER_CST
)
4844 /* Otherwise, we must search for overlap ... */
4845 if (!walk_tree (expr_p
, gimplify_init_ctor_preeval_1
, data
, NULL
))
4848 /* ... and if found, force the value into a temporary. */
4849 *expr_p
= get_formal_tmp_var (*expr_p
, pre_p
);
4852 /* A subroutine of gimplify_init_ctor_eval. Create a loop for
4853 a RANGE_EXPR in a CONSTRUCTOR for an array.
4857 object[var] = value;
4864 We increment var _after_ the loop exit check because we might otherwise
4865 fail if upper == TYPE_MAX_VALUE (type for upper).
4867 Note that we never have to deal with SAVE_EXPRs here, because this has
4868 already been taken care of for us, in gimplify_init_ctor_preeval(). */
4870 static void gimplify_init_ctor_eval (tree
, vec
<constructor_elt
, va_gc
> *,
4871 gimple_seq
*, bool);
4874 gimplify_init_ctor_eval_range (tree object
, tree lower
, tree upper
,
4875 tree value
, tree array_elt_type
,
4876 gimple_seq
*pre_p
, bool cleared
)
4878 tree loop_entry_label
, loop_exit_label
, fall_thru_label
;
4879 tree var
, var_type
, cref
, tmp
;
4881 loop_entry_label
= create_artificial_label (UNKNOWN_LOCATION
);
4882 loop_exit_label
= create_artificial_label (UNKNOWN_LOCATION
);
4883 fall_thru_label
= create_artificial_label (UNKNOWN_LOCATION
);
4885 /* Create and initialize the index variable. */
4886 var_type
= TREE_TYPE (upper
);
4887 var
= create_tmp_var (var_type
);
4888 gimplify_seq_add_stmt (pre_p
, gimple_build_assign (var
, lower
));
4890 /* Add the loop entry label. */
4891 gimplify_seq_add_stmt (pre_p
, gimple_build_label (loop_entry_label
));
4893 /* Build the reference. */
4894 cref
= build4 (ARRAY_REF
, array_elt_type
, unshare_expr (object
),
4895 var
, NULL_TREE
, NULL_TREE
);
4897 /* If we are a constructor, just call gimplify_init_ctor_eval to do
4898 the store. Otherwise just assign value to the reference. */
4900 if (TREE_CODE (value
) == CONSTRUCTOR
)
4901 /* NB we might have to call ourself recursively through
4902 gimplify_init_ctor_eval if the value is a constructor. */
4903 gimplify_init_ctor_eval (cref
, CONSTRUCTOR_ELTS (value
),
4907 if (gimplify_expr (&value
, pre_p
, NULL
, is_gimple_val
, fb_rvalue
)
4909 gimplify_seq_add_stmt (pre_p
, gimple_build_assign (cref
, value
));
4912 /* We exit the loop when the index var is equal to the upper bound. */
4913 gimplify_seq_add_stmt (pre_p
,
4914 gimple_build_cond (EQ_EXPR
, var
, upper
,
4915 loop_exit_label
, fall_thru_label
));
4917 gimplify_seq_add_stmt (pre_p
, gimple_build_label (fall_thru_label
));
4919 /* Otherwise, increment the index var... */
4920 tmp
= build2 (PLUS_EXPR
, var_type
, var
,
4921 fold_convert (var_type
, integer_one_node
));
4922 gimplify_seq_add_stmt (pre_p
, gimple_build_assign (var
, tmp
));
4924 /* ...and jump back to the loop entry. */
4925 gimplify_seq_add_stmt (pre_p
, gimple_build_goto (loop_entry_label
));
4927 /* Add the loop exit label. */
4928 gimplify_seq_add_stmt (pre_p
, gimple_build_label (loop_exit_label
));
4931 /* A subroutine of gimplify_init_constructor. Generate individual
4932 MODIFY_EXPRs for a CONSTRUCTOR. OBJECT is the LHS against which the
4933 assignments should happen. ELTS is the CONSTRUCTOR_ELTS of the
4934 CONSTRUCTOR. CLEARED is true if the entire LHS object has been
4938 gimplify_init_ctor_eval (tree object
, vec
<constructor_elt
, va_gc
> *elts
,
4939 gimple_seq
*pre_p
, bool cleared
)
4941 tree array_elt_type
= NULL
;
4942 unsigned HOST_WIDE_INT ix
;
4943 tree purpose
, value
;
4945 if (TREE_CODE (TREE_TYPE (object
)) == ARRAY_TYPE
)
4946 array_elt_type
= TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (object
)));
4948 FOR_EACH_CONSTRUCTOR_ELT (elts
, ix
, purpose
, value
)
4952 /* NULL values are created above for gimplification errors. */
4956 if (cleared
&& initializer_zerop (value
))
4959 /* ??? Here's to hoping the front end fills in all of the indices,
4960 so we don't have to figure out what's missing ourselves. */
4961 gcc_assert (purpose
);
4963 /* Skip zero-sized fields, unless value has side-effects. This can
4964 happen with calls to functions returning a empty type, which
4965 we shouldn't discard. As a number of downstream passes don't
4966 expect sets of empty type fields, we rely on the gimplification of
4967 the MODIFY_EXPR we make below to drop the assignment statement. */
4968 if (!TREE_SIDE_EFFECTS (value
)
4969 && TREE_CODE (purpose
) == FIELD_DECL
4970 && is_empty_type (TREE_TYPE (purpose
)))
4973 /* If we have a RANGE_EXPR, we have to build a loop to assign the
4975 if (TREE_CODE (purpose
) == RANGE_EXPR
)
4977 tree lower
= TREE_OPERAND (purpose
, 0);
4978 tree upper
= TREE_OPERAND (purpose
, 1);
4980 /* If the lower bound is equal to upper, just treat it as if
4981 upper was the index. */
4982 if (simple_cst_equal (lower
, upper
))
4986 gimplify_init_ctor_eval_range (object
, lower
, upper
, value
,
4987 array_elt_type
, pre_p
, cleared
);
4994 /* Do not use bitsizetype for ARRAY_REF indices. */
4995 if (TYPE_DOMAIN (TREE_TYPE (object
)))
4997 = fold_convert (TREE_TYPE (TYPE_DOMAIN (TREE_TYPE (object
))),
4999 cref
= build4 (ARRAY_REF
, array_elt_type
, unshare_expr (object
),
5000 purpose
, NULL_TREE
, NULL_TREE
);
5004 gcc_assert (TREE_CODE (purpose
) == FIELD_DECL
);
5005 cref
= build3 (COMPONENT_REF
, TREE_TYPE (purpose
),
5006 unshare_expr (object
), purpose
, NULL_TREE
);
5009 if (TREE_CODE (value
) == CONSTRUCTOR
5010 && TREE_CODE (TREE_TYPE (value
)) != VECTOR_TYPE
)
5011 gimplify_init_ctor_eval (cref
, CONSTRUCTOR_ELTS (value
),
5015 tree init
= build2 (INIT_EXPR
, TREE_TYPE (cref
), cref
, value
);
5016 gimplify_and_add (init
, pre_p
);
5022 /* Return the appropriate RHS predicate for this LHS. */
5025 rhs_predicate_for (tree lhs
)
5027 if (is_gimple_reg (lhs
))
5028 return is_gimple_reg_rhs_or_call
;
5030 return is_gimple_mem_rhs_or_call
;
5033 /* Return the initial guess for an appropriate RHS predicate for this LHS,
5034 before the LHS has been gimplified. */
5036 static gimple_predicate
5037 initial_rhs_predicate_for (tree lhs
)
5039 if (is_gimple_reg_type (TREE_TYPE (lhs
)))
5040 return is_gimple_reg_rhs_or_call
;
5042 return is_gimple_mem_rhs_or_call
;
5045 /* Gimplify a C99 compound literal expression. This just means adding
5046 the DECL_EXPR before the current statement and using its anonymous
5049 static enum gimplify_status
5050 gimplify_compound_literal_expr (tree
*expr_p
, gimple_seq
*pre_p
,
5051 bool (*gimple_test_f
) (tree
),
5052 fallback_t fallback
)
5054 tree decl_s
= COMPOUND_LITERAL_EXPR_DECL_EXPR (*expr_p
);
5055 tree decl
= DECL_EXPR_DECL (decl_s
);
5056 tree init
= DECL_INITIAL (decl
);
5057 /* Mark the decl as addressable if the compound literal
5058 expression is addressable now, otherwise it is marked too late
5059 after we gimplify the initialization expression. */
5060 if (TREE_ADDRESSABLE (*expr_p
))
5061 TREE_ADDRESSABLE (decl
) = 1;
5062 /* Otherwise, if we don't need an lvalue and have a literal directly
5063 substitute it. Check if it matches the gimple predicate, as
5064 otherwise we'd generate a new temporary, and we can as well just
5065 use the decl we already have. */
5066 else if (!TREE_ADDRESSABLE (decl
)
5067 && !TREE_THIS_VOLATILE (decl
)
5069 && (fallback
& fb_lvalue
) == 0
5070 && gimple_test_f (init
))
5076 /* If the decl is not addressable, then it is being used in some
5077 expression or on the right hand side of a statement, and it can
5078 be put into a readonly data section. */
5079 if (!TREE_ADDRESSABLE (decl
) && (fallback
& fb_lvalue
) == 0)
5080 TREE_READONLY (decl
) = 1;
5082 /* This decl isn't mentioned in the enclosing block, so add it to the
5083 list of temps. FIXME it seems a bit of a kludge to say that
5084 anonymous artificial vars aren't pushed, but everything else is. */
5085 if (DECL_NAME (decl
) == NULL_TREE
&& !DECL_SEEN_IN_BIND_EXPR_P (decl
))
5086 gimple_add_tmp_var (decl
);
5088 gimplify_and_add (decl_s
, pre_p
);
5093 /* Optimize embedded COMPOUND_LITERAL_EXPRs within a CONSTRUCTOR,
5094 return a new CONSTRUCTOR if something changed. */
5097 optimize_compound_literals_in_ctor (tree orig_ctor
)
5099 tree ctor
= orig_ctor
;
5100 vec
<constructor_elt
, va_gc
> *elts
= CONSTRUCTOR_ELTS (ctor
);
5101 unsigned int idx
, num
= vec_safe_length (elts
);
5103 for (idx
= 0; idx
< num
; idx
++)
5105 tree value
= (*elts
)[idx
].value
;
5106 tree newval
= value
;
5107 if (TREE_CODE (value
) == CONSTRUCTOR
)
5108 newval
= optimize_compound_literals_in_ctor (value
);
5109 else if (TREE_CODE (value
) == COMPOUND_LITERAL_EXPR
)
5111 tree decl_s
= COMPOUND_LITERAL_EXPR_DECL_EXPR (value
);
5112 tree decl
= DECL_EXPR_DECL (decl_s
);
5113 tree init
= DECL_INITIAL (decl
);
5115 if (!TREE_ADDRESSABLE (value
)
5116 && !TREE_ADDRESSABLE (decl
)
5118 && TREE_CODE (init
) == CONSTRUCTOR
)
5119 newval
= optimize_compound_literals_in_ctor (init
);
5121 if (newval
== value
)
5124 if (ctor
== orig_ctor
)
5126 ctor
= copy_node (orig_ctor
);
5127 CONSTRUCTOR_ELTS (ctor
) = vec_safe_copy (elts
);
5128 elts
= CONSTRUCTOR_ELTS (ctor
);
5130 (*elts
)[idx
].value
= newval
;
5135 /* A subroutine of gimplify_modify_expr. Break out elements of a
5136 CONSTRUCTOR used as an initializer into separate MODIFY_EXPRs.
5138 Note that we still need to clear any elements that don't have explicit
5139 initializers, so if not all elements are initialized we keep the
5140 original MODIFY_EXPR, we just remove all of the constructor elements.
5142 If NOTIFY_TEMP_CREATION is true, do not gimplify, just return
5143 GS_ERROR if we would have to create a temporary when gimplifying
5144 this constructor. Otherwise, return GS_OK.
5146 If NOTIFY_TEMP_CREATION is false, just do the gimplification. */
5148 static enum gimplify_status
5149 gimplify_init_constructor (tree
*expr_p
, gimple_seq
*pre_p
, gimple_seq
*post_p
,
5150 bool want_value
, bool notify_temp_creation
)
5152 tree object
, ctor
, type
;
5153 enum gimplify_status ret
;
5154 vec
<constructor_elt
, va_gc
> *elts
;
5155 bool cleared
= false;
5156 bool is_empty_ctor
= false;
5157 bool is_init_expr
= (TREE_CODE (*expr_p
) == INIT_EXPR
);
5159 gcc_assert (TREE_CODE (TREE_OPERAND (*expr_p
, 1)) == CONSTRUCTOR
);
5161 if (!notify_temp_creation
)
5163 ret
= gimplify_expr (&TREE_OPERAND (*expr_p
, 0), pre_p
, post_p
,
5164 is_gimple_lvalue
, fb_lvalue
);
5165 if (ret
== GS_ERROR
)
5169 object
= TREE_OPERAND (*expr_p
, 0);
5170 ctor
= TREE_OPERAND (*expr_p
, 1)
5171 = optimize_compound_literals_in_ctor (TREE_OPERAND (*expr_p
, 1));
5172 type
= TREE_TYPE (ctor
);
5173 elts
= CONSTRUCTOR_ELTS (ctor
);
5176 switch (TREE_CODE (type
))
5180 case QUAL_UNION_TYPE
:
5183 /* Use readonly data for initializers of this or smaller size
5184 regardless of the num_nonzero_elements / num_unique_nonzero_elements
5186 const HOST_WIDE_INT min_unique_size
= 64;
5187 /* If num_nonzero_elements / num_unique_nonzero_elements ratio
5188 is smaller than this, use readonly data. */
5189 const int unique_nonzero_ratio
= 8;
5190 /* True if a single access of the object must be ensured. This is the
5191 case if the target is volatile, the type is non-addressable and more
5192 than one field need to be assigned. */
5193 const bool ensure_single_access
5194 = TREE_THIS_VOLATILE (object
)
5195 && !TREE_ADDRESSABLE (type
)
5196 && vec_safe_length (elts
) > 1;
5197 struct gimplify_init_ctor_preeval_data preeval_data
;
5198 HOST_WIDE_INT num_ctor_elements
, num_nonzero_elements
;
5199 HOST_WIDE_INT num_unique_nonzero_elements
;
5200 bool complete_p
, valid_const_initializer
;
5202 /* Aggregate types must lower constructors to initialization of
5203 individual elements. The exception is that a CONSTRUCTOR node
5204 with no elements indicates zero-initialization of the whole. */
5205 if (vec_safe_is_empty (elts
))
5207 if (notify_temp_creation
)
5210 /* The var will be initialized and so appear on lhs of
5211 assignment, it can't be TREE_READONLY anymore. */
5213 TREE_READONLY (object
) = 0;
5215 is_empty_ctor
= true;
5219 /* Fetch information about the constructor to direct later processing.
5220 We might want to make static versions of it in various cases, and
5221 can only do so if it known to be a valid constant initializer. */
5222 valid_const_initializer
5223 = categorize_ctor_elements (ctor
, &num_nonzero_elements
,
5224 &num_unique_nonzero_elements
,
5225 &num_ctor_elements
, &complete_p
);
5227 /* If a const aggregate variable is being initialized, then it
5228 should never be a lose to promote the variable to be static. */
5229 if (valid_const_initializer
5230 && num_nonzero_elements
> 1
5231 && TREE_READONLY (object
)
5233 && !DECL_REGISTER (object
)
5234 && (flag_merge_constants
>= 2 || !TREE_ADDRESSABLE (object
))
5235 /* For ctors that have many repeated nonzero elements
5236 represented through RANGE_EXPRs, prefer initializing
5237 those through runtime loops over copies of large amounts
5238 of data from readonly data section. */
5239 && (num_unique_nonzero_elements
5240 > num_nonzero_elements
/ unique_nonzero_ratio
5241 || ((unsigned HOST_WIDE_INT
) int_size_in_bytes (type
)
5242 <= (unsigned HOST_WIDE_INT
) min_unique_size
)))
5244 if (notify_temp_creation
)
5247 DECL_INITIAL (object
) = ctor
;
5248 TREE_STATIC (object
) = 1;
5249 if (!DECL_NAME (object
))
5250 DECL_NAME (object
) = create_tmp_var_name ("C");
5251 walk_tree (&DECL_INITIAL (object
), force_labels_r
, NULL
, NULL
);
5253 /* ??? C++ doesn't automatically append a .<number> to the
5254 assembler name, and even when it does, it looks at FE private
5255 data structures to figure out what that number should be,
5256 which are not set for this variable. I suppose this is
5257 important for local statics for inline functions, which aren't
5258 "local" in the object file sense. So in order to get a unique
5259 TU-local symbol, we must invoke the lhd version now. */
5260 lhd_set_decl_assembler_name (object
);
5262 *expr_p
= NULL_TREE
;
5266 /* The var will be initialized and so appear on lhs of
5267 assignment, it can't be TREE_READONLY anymore. */
5268 if (VAR_P (object
) && !notify_temp_creation
)
5269 TREE_READONLY (object
) = 0;
5271 /* If there are "lots" of initialized elements, even discounting
5272 those that are not address constants (and thus *must* be
5273 computed at runtime), then partition the constructor into
5274 constant and non-constant parts. Block copy the constant
5275 parts in, then generate code for the non-constant parts. */
5276 /* TODO. There's code in cp/typeck.cc to do this. */
5278 if (int_size_in_bytes (TREE_TYPE (ctor
)) < 0)
5279 /* store_constructor will ignore the clearing of variable-sized
5280 objects. Initializers for such objects must explicitly set
5281 every field that needs to be set. */
5283 else if (!complete_p
)
5284 /* If the constructor isn't complete, clear the whole object
5285 beforehand, unless CONSTRUCTOR_NO_CLEARING is set on it.
5287 ??? This ought not to be needed. For any element not present
5288 in the initializer, we should simply set them to zero. Except
5289 we'd need to *find* the elements that are not present, and that
5290 requires trickery to avoid quadratic compile-time behavior in
5291 large cases or excessive memory use in small cases. */
5292 cleared
= !CONSTRUCTOR_NO_CLEARING (ctor
);
5293 else if (num_ctor_elements
- num_nonzero_elements
5294 > CLEAR_RATIO (optimize_function_for_speed_p (cfun
))
5295 && num_nonzero_elements
< num_ctor_elements
/ 4)
5296 /* If there are "lots" of zeros, it's more efficient to clear
5297 the memory and then set the nonzero elements. */
5299 else if (ensure_single_access
&& num_nonzero_elements
== 0)
5300 /* If a single access to the target must be ensured and all elements
5301 are zero, then it's optimal to clear whatever their number. */
5306 /* If there are "lots" of initialized elements, and all of them
5307 are valid address constants, then the entire initializer can
5308 be dropped to memory, and then memcpy'd out. Don't do this
5309 for sparse arrays, though, as it's more efficient to follow
5310 the standard CONSTRUCTOR behavior of memset followed by
5311 individual element initialization. Also don't do this for small
5312 all-zero initializers (which aren't big enough to merit
5313 clearing), and don't try to make bitwise copies of
5314 TREE_ADDRESSABLE types. */
5315 if (valid_const_initializer
5317 && !(cleared
|| num_nonzero_elements
== 0)
5318 && !TREE_ADDRESSABLE (type
))
5320 HOST_WIDE_INT size
= int_size_in_bytes (type
);
5323 /* ??? We can still get unbounded array types, at least
5324 from the C++ front end. This seems wrong, but attempt
5325 to work around it for now. */
5328 size
= int_size_in_bytes (TREE_TYPE (object
));
5330 TREE_TYPE (ctor
) = type
= TREE_TYPE (object
);
5333 /* Find the maximum alignment we can assume for the object. */
5334 /* ??? Make use of DECL_OFFSET_ALIGN. */
5335 if (DECL_P (object
))
5336 align
= DECL_ALIGN (object
);
5338 align
= TYPE_ALIGN (type
);
5340 /* Do a block move either if the size is so small as to make
5341 each individual move a sub-unit move on average, or if it
5342 is so large as to make individual moves inefficient. */
5344 && num_nonzero_elements
> 1
5345 /* For ctors that have many repeated nonzero elements
5346 represented through RANGE_EXPRs, prefer initializing
5347 those through runtime loops over copies of large amounts
5348 of data from readonly data section. */
5349 && (num_unique_nonzero_elements
5350 > num_nonzero_elements
/ unique_nonzero_ratio
5351 || size
<= min_unique_size
)
5352 && (size
< num_nonzero_elements
5353 || !can_move_by_pieces (size
, align
)))
5355 if (notify_temp_creation
)
5358 walk_tree (&ctor
, force_labels_r
, NULL
, NULL
);
5359 ctor
= tree_output_constant_def (ctor
);
5360 if (!useless_type_conversion_p (type
, TREE_TYPE (ctor
)))
5361 ctor
= build1 (VIEW_CONVERT_EXPR
, type
, ctor
);
5362 TREE_OPERAND (*expr_p
, 1) = ctor
;
5364 /* This is no longer an assignment of a CONSTRUCTOR, but
5365 we still may have processing to do on the LHS. So
5366 pretend we didn't do anything here to let that happen. */
5367 return GS_UNHANDLED
;
5371 /* If a single access to the target must be ensured and there are
5372 nonzero elements or the zero elements are not assigned en masse,
5373 initialize the target from a temporary. */
5374 if (ensure_single_access
&& (num_nonzero_elements
> 0 || !cleared
))
5376 if (notify_temp_creation
)
5379 tree temp
= create_tmp_var (TYPE_MAIN_VARIANT (type
));
5380 TREE_OPERAND (*expr_p
, 0) = temp
;
5381 *expr_p
= build2 (COMPOUND_EXPR
, TREE_TYPE (*expr_p
),
5383 build2 (MODIFY_EXPR
, void_type_node
,
5388 if (notify_temp_creation
)
5391 /* If there are nonzero elements and if needed, pre-evaluate to capture
5392 elements overlapping with the lhs into temporaries. We must do this
5393 before clearing to fetch the values before they are zeroed-out. */
5394 if (num_nonzero_elements
> 0 && TREE_CODE (*expr_p
) != INIT_EXPR
)
5396 preeval_data
.lhs_base_decl
= get_base_address (object
);
5397 if (!DECL_P (preeval_data
.lhs_base_decl
))
5398 preeval_data
.lhs_base_decl
= NULL
;
5399 preeval_data
.lhs_alias_set
= get_alias_set (object
);
5401 gimplify_init_ctor_preeval (&TREE_OPERAND (*expr_p
, 1),
5402 pre_p
, post_p
, &preeval_data
);
5405 bool ctor_has_side_effects_p
5406 = TREE_SIDE_EFFECTS (TREE_OPERAND (*expr_p
, 1));
5410 /* Zap the CONSTRUCTOR element list, which simplifies this case.
5411 Note that we still have to gimplify, in order to handle the
5412 case of variable sized types. Avoid shared tree structures. */
5413 CONSTRUCTOR_ELTS (ctor
) = NULL
;
5414 TREE_SIDE_EFFECTS (ctor
) = 0;
5415 object
= unshare_expr (object
);
5416 gimplify_stmt (expr_p
, pre_p
);
5419 /* If we have not block cleared the object, or if there are nonzero
5420 elements in the constructor, or if the constructor has side effects,
5421 add assignments to the individual scalar fields of the object. */
5423 || num_nonzero_elements
> 0
5424 || ctor_has_side_effects_p
)
5425 gimplify_init_ctor_eval (object
, elts
, pre_p
, cleared
);
5427 *expr_p
= NULL_TREE
;
5435 if (notify_temp_creation
)
5438 /* Extract the real and imaginary parts out of the ctor. */
5439 gcc_assert (elts
->length () == 2);
5440 r
= (*elts
)[0].value
;
5441 i
= (*elts
)[1].value
;
5442 if (r
== NULL
|| i
== NULL
)
5444 tree zero
= build_zero_cst (TREE_TYPE (type
));
5451 /* Complex types have either COMPLEX_CST or COMPLEX_EXPR to
5452 represent creation of a complex value. */
5453 if (TREE_CONSTANT (r
) && TREE_CONSTANT (i
))
5455 ctor
= build_complex (type
, r
, i
);
5456 TREE_OPERAND (*expr_p
, 1) = ctor
;
5460 ctor
= build2 (COMPLEX_EXPR
, type
, r
, i
);
5461 TREE_OPERAND (*expr_p
, 1) = ctor
;
5462 ret
= gimplify_expr (&TREE_OPERAND (*expr_p
, 1),
5465 rhs_predicate_for (TREE_OPERAND (*expr_p
, 0)),
5473 unsigned HOST_WIDE_INT ix
;
5474 constructor_elt
*ce
;
5476 if (notify_temp_creation
)
5479 /* Vector types use CONSTRUCTOR all the way through gimple
5480 compilation as a general initializer. */
5481 FOR_EACH_VEC_SAFE_ELT (elts
, ix
, ce
)
5483 enum gimplify_status tret
;
5484 tret
= gimplify_expr (&ce
->value
, pre_p
, post_p
, is_gimple_val
,
5486 if (tret
== GS_ERROR
)
5488 else if (TREE_STATIC (ctor
)
5489 && !initializer_constant_valid_p (ce
->value
,
5490 TREE_TYPE (ce
->value
)))
5491 TREE_STATIC (ctor
) = 0;
5493 recompute_constructor_flags (ctor
);
5495 /* Go ahead and simplify constant constructors to VECTOR_CST. */
5496 if (TREE_CONSTANT (ctor
))
5498 bool constant_p
= true;
5501 /* Even when ctor is constant, it might contain non-*_CST
5502 elements, such as addresses or trapping values like
5503 1.0/0.0 - 1.0/0.0. Such expressions don't belong
5504 in VECTOR_CST nodes. */
5505 FOR_EACH_CONSTRUCTOR_VALUE (elts
, ix
, value
)
5506 if (!CONSTANT_CLASS_P (value
))
5514 TREE_OPERAND (*expr_p
, 1) = build_vector_from_ctor (type
, elts
);
5519 if (!is_gimple_reg (TREE_OPERAND (*expr_p
, 0)))
5520 TREE_OPERAND (*expr_p
, 1) = get_formal_tmp_var (ctor
, pre_p
);
5525 /* So how did we get a CONSTRUCTOR for a scalar type? */
5529 if (ret
== GS_ERROR
)
5531 /* If we have gimplified both sides of the initializer but have
5532 not emitted an assignment, do so now. */
5534 /* If the type is an empty type, we don't need to emit the
5536 && !is_empty_type (TREE_TYPE (TREE_OPERAND (*expr_p
, 0))))
5538 tree lhs
= TREE_OPERAND (*expr_p
, 0);
5539 tree rhs
= TREE_OPERAND (*expr_p
, 1);
5540 if (want_value
&& object
== lhs
)
5541 lhs
= unshare_expr (lhs
);
5542 gassign
*init
= gimple_build_assign (lhs
, rhs
);
5543 gimplify_seq_add_stmt (pre_p
, init
);
5556 /* If the user requests to initialize automatic variables, we
5557 should initialize paddings inside the variable. Add a call to
5558 __builtin_clear_pading (&object, 0, for_auto_init = true) to
5559 initialize paddings of object always to zero regardless of
5560 INIT_TYPE. Note, we will not insert this call if the aggregate
5561 variable has be completely cleared already or it's initialized
5562 with an empty constructor. We cannot insert this call if the
5563 variable is a gimple register since __builtin_clear_padding will take
5564 the address of the variable. As a result, if a long double/_Complex long
5565 double variable will be spilled into stack later, its padding cannot
5566 be cleared with __builtin_clear_padding. We should clear its padding
5567 when it is spilled into memory. */
5569 && !is_gimple_reg (object
)
5570 && clear_padding_type_may_have_padding_p (type
)
5571 && ((AGGREGATE_TYPE_P (type
) && !cleared
&& !is_empty_ctor
)
5572 || !AGGREGATE_TYPE_P (type
))
5573 && is_var_need_auto_init (object
))
5574 gimple_add_padding_init_for_auto_var (object
, false, pre_p
);
5579 /* Given a pointer value OP0, return a simplified version of an
5580 indirection through OP0, or NULL_TREE if no simplification is
5581 possible. This may only be applied to a rhs of an expression.
5582 Note that the resulting type may be different from the type pointed
5583 to in the sense that it is still compatible from the langhooks
5587 gimple_fold_indirect_ref_rhs (tree t
)
5589 return gimple_fold_indirect_ref (t
);
5592 /* Subroutine of gimplify_modify_expr to do simplifications of
5593 MODIFY_EXPRs based on the code of the RHS. We loop for as long as
5594 something changes. */
5596 static enum gimplify_status
5597 gimplify_modify_expr_rhs (tree
*expr_p
, tree
*from_p
, tree
*to_p
,
5598 gimple_seq
*pre_p
, gimple_seq
*post_p
,
5601 enum gimplify_status ret
= GS_UNHANDLED
;
5607 switch (TREE_CODE (*from_p
))
5610 /* If we're assigning from a read-only variable initialized with
5611 a constructor and not volatile, do the direct assignment from
5612 the constructor, but only if the target is not volatile either
5613 since this latter assignment might end up being done on a per
5614 field basis. However, if the target is volatile and the type
5615 is aggregate and non-addressable, gimplify_init_constructor
5616 knows that it needs to ensure a single access to the target
5617 and it will return GS_OK only in this case. */
5618 if (TREE_READONLY (*from_p
)
5619 && DECL_INITIAL (*from_p
)
5620 && TREE_CODE (DECL_INITIAL (*from_p
)) == CONSTRUCTOR
5621 && !TREE_THIS_VOLATILE (*from_p
)
5622 && (!TREE_THIS_VOLATILE (*to_p
)
5623 || (AGGREGATE_TYPE_P (TREE_TYPE (*to_p
))
5624 && !TREE_ADDRESSABLE (TREE_TYPE (*to_p
)))))
5626 tree old_from
= *from_p
;
5627 enum gimplify_status subret
;
5629 /* Move the constructor into the RHS. */
5630 *from_p
= unshare_expr (DECL_INITIAL (*from_p
));
5632 /* Let's see if gimplify_init_constructor will need to put
5634 subret
= gimplify_init_constructor (expr_p
, NULL
, NULL
,
5636 if (subret
== GS_ERROR
)
5638 /* If so, revert the change. */
5649 if (!TREE_ADDRESSABLE (TREE_TYPE (*from_p
)))
5650 /* If we have code like
5654 where the type of "x" is a (possibly cv-qualified variant
5655 of "A"), treat the entire expression as identical to "x".
5656 This kind of code arises in C++ when an object is bound
5657 to a const reference, and if "x" is a TARGET_EXPR we want
5658 to take advantage of the optimization below. But not if
5659 the type is TREE_ADDRESSABLE; then C++17 says that the
5660 TARGET_EXPR needs to be a temporary. */
5662 = gimple_fold_indirect_ref_rhs (TREE_OPERAND (*from_p
, 0)))
5664 bool volatile_p
= TREE_THIS_VOLATILE (*from_p
);
5665 if (TREE_THIS_VOLATILE (t
) != volatile_p
)
5668 t
= build_simple_mem_ref_loc (EXPR_LOCATION (*from_p
),
5669 build_fold_addr_expr (t
));
5670 if (REFERENCE_CLASS_P (t
))
5671 TREE_THIS_VOLATILE (t
) = volatile_p
;
5681 /* If we are initializing something from a TARGET_EXPR, strip the
5682 TARGET_EXPR and initialize it directly, if possible. This can't
5683 be done if the initializer is void, since that implies that the
5684 temporary is set in some non-trivial way.
5686 ??? What about code that pulls out the temp and uses it
5687 elsewhere? I think that such code never uses the TARGET_EXPR as
5688 an initializer. If I'm wrong, we'll die because the temp won't
5689 have any RTL. In that case, I guess we'll need to replace
5690 references somehow. */
5691 tree init
= TARGET_EXPR_INITIAL (*from_p
);
5694 && (TREE_CODE (*expr_p
) != MODIFY_EXPR
5695 || !TARGET_EXPR_NO_ELIDE (*from_p
))
5696 && !VOID_TYPE_P (TREE_TYPE (init
)))
5706 /* Remove any COMPOUND_EXPR in the RHS so the following cases will be
5708 gimplify_compound_expr (from_p
, pre_p
, true);
5714 /* If we already made some changes, let the front end have a
5715 crack at this before we break it down. */
5716 if (ret
!= GS_UNHANDLED
)
5719 /* If we're initializing from a CONSTRUCTOR, break this into
5720 individual MODIFY_EXPRs. */
5721 ret
= gimplify_init_constructor (expr_p
, pre_p
, post_p
, want_value
,
5726 /* If we're assigning to a non-register type, push the assignment
5727 down into the branches. This is mandatory for ADDRESSABLE types,
5728 since we cannot generate temporaries for such, but it saves a
5729 copy in other cases as well. */
5730 if (!is_gimple_reg_type (TREE_TYPE (*from_p
)))
5732 /* This code should mirror the code in gimplify_cond_expr. */
5733 enum tree_code code
= TREE_CODE (*expr_p
);
5734 tree cond
= *from_p
;
5735 tree result
= *to_p
;
5737 ret
= gimplify_expr (&result
, pre_p
, post_p
,
5738 is_gimple_lvalue
, fb_lvalue
);
5739 if (ret
!= GS_ERROR
)
5742 /* If we are going to write RESULT more than once, clear
5743 TREE_READONLY flag, otherwise we might incorrectly promote
5744 the variable to static const and initialize it at compile
5745 time in one of the branches. */
5747 && TREE_TYPE (TREE_OPERAND (cond
, 1)) != void_type_node
5748 && TREE_TYPE (TREE_OPERAND (cond
, 2)) != void_type_node
)
5749 TREE_READONLY (result
) = 0;
5750 if (TREE_TYPE (TREE_OPERAND (cond
, 1)) != void_type_node
)
5751 TREE_OPERAND (cond
, 1)
5752 = build2 (code
, void_type_node
, result
,
5753 TREE_OPERAND (cond
, 1));
5754 if (TREE_TYPE (TREE_OPERAND (cond
, 2)) != void_type_node
)
5755 TREE_OPERAND (cond
, 2)
5756 = build2 (code
, void_type_node
, unshare_expr (result
),
5757 TREE_OPERAND (cond
, 2));
5759 TREE_TYPE (cond
) = void_type_node
;
5760 recalculate_side_effects (cond
);
5764 gimplify_and_add (cond
, pre_p
);
5765 *expr_p
= unshare_expr (result
);
5774 /* For calls that return in memory, give *to_p as the CALL_EXPR's
5775 return slot so that we don't generate a temporary. */
5776 if (!CALL_EXPR_RETURN_SLOT_OPT (*from_p
)
5777 && aggregate_value_p (*from_p
, *from_p
))
5781 if (!(rhs_predicate_for (*to_p
))(*from_p
))
5782 /* If we need a temporary, *to_p isn't accurate. */
5784 /* It's OK to use the return slot directly unless it's an NRV. */
5785 else if (TREE_CODE (*to_p
) == RESULT_DECL
5786 && DECL_NAME (*to_p
) == NULL_TREE
5787 && needs_to_live_in_memory (*to_p
))
5789 else if (is_gimple_reg_type (TREE_TYPE (*to_p
))
5790 || (DECL_P (*to_p
) && DECL_REGISTER (*to_p
)))
5791 /* Don't force regs into memory. */
5793 else if (TREE_CODE (*expr_p
) == INIT_EXPR
)
5794 /* It's OK to use the target directly if it's being
5797 else if (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (*to_p
)))
5799 /* Always use the target and thus RSO for variable-sized types.
5800 GIMPLE cannot deal with a variable-sized assignment
5801 embedded in a call statement. */
5803 else if (TREE_CODE (*to_p
) != SSA_NAME
5804 && (!is_gimple_variable (*to_p
)
5805 || needs_to_live_in_memory (*to_p
)))
5806 /* Don't use the original target if it's already addressable;
5807 if its address escapes, and the called function uses the
5808 NRV optimization, a conforming program could see *to_p
5809 change before the called function returns; see c++/19317.
5810 When optimizing, the return_slot pass marks more functions
5811 as safe after we have escape info. */
5818 CALL_EXPR_RETURN_SLOT_OPT (*from_p
) = 1;
5819 mark_addressable (*to_p
);
5824 case WITH_SIZE_EXPR
:
5825 /* Likewise for calls that return an aggregate of non-constant size,
5826 since we would not be able to generate a temporary at all. */
5827 if (TREE_CODE (TREE_OPERAND (*from_p
, 0)) == CALL_EXPR
)
5829 *from_p
= TREE_OPERAND (*from_p
, 0);
5830 /* We don't change ret in this case because the
5831 WITH_SIZE_EXPR might have been added in
5832 gimplify_modify_expr, so returning GS_OK would lead to an
5838 /* If we're initializing from a container, push the initialization
5840 case CLEANUP_POINT_EXPR
:
5842 case STATEMENT_LIST
:
5844 tree wrap
= *from_p
;
5847 ret
= gimplify_expr (to_p
, pre_p
, post_p
, is_gimple_min_lval
,
5849 if (ret
!= GS_ERROR
)
5852 t
= voidify_wrapper_expr (wrap
, *expr_p
);
5853 gcc_assert (t
== *expr_p
);
5857 gimplify_and_add (wrap
, pre_p
);
5858 *expr_p
= unshare_expr (*to_p
);
5866 /* Pull out compound literal expressions from a NOP_EXPR.
5867 Those are created in the C FE to drop qualifiers during
5868 lvalue conversion. */
5869 if ((TREE_CODE (TREE_OPERAND (*from_p
, 0)) == COMPOUND_LITERAL_EXPR
)
5870 && tree_ssa_useless_type_conversion (*from_p
))
5872 *from_p
= TREE_OPERAND (*from_p
, 0);
5878 case COMPOUND_LITERAL_EXPR
:
5880 tree complit
= TREE_OPERAND (*expr_p
, 1);
5881 tree decl_s
= COMPOUND_LITERAL_EXPR_DECL_EXPR (complit
);
5882 tree decl
= DECL_EXPR_DECL (decl_s
);
5883 tree init
= DECL_INITIAL (decl
);
5885 /* struct T x = (struct T) { 0, 1, 2 } can be optimized
5886 into struct T x = { 0, 1, 2 } if the address of the
5887 compound literal has never been taken. */
5888 if (!TREE_ADDRESSABLE (complit
)
5889 && !TREE_ADDRESSABLE (decl
)
5892 *expr_p
= copy_node (*expr_p
);
5893 TREE_OPERAND (*expr_p
, 1) = init
;
5908 /* Return true if T looks like a valid GIMPLE statement. */
5911 is_gimple_stmt (tree t
)
5913 const enum tree_code code
= TREE_CODE (t
);
5918 /* The only valid NOP_EXPR is the empty statement. */
5919 return IS_EMPTY_STMT (t
);
5923 /* These are only valid if they're void. */
5924 return TREE_TYPE (t
) == NULL
|| VOID_TYPE_P (TREE_TYPE (t
));
5930 case CASE_LABEL_EXPR
:
5931 case TRY_CATCH_EXPR
:
5932 case TRY_FINALLY_EXPR
:
5933 case EH_FILTER_EXPR
:
5936 case STATEMENT_LIST
:
5941 case OACC_HOST_DATA
:
5944 case OACC_ENTER_DATA
:
5945 case OACC_EXIT_DATA
:
5950 case OMP_DISTRIBUTE
:
5965 case OMP_TARGET_DATA
:
5966 case OMP_TARGET_UPDATE
:
5967 case OMP_TARGET_ENTER_DATA
:
5968 case OMP_TARGET_EXIT_DATA
:
5971 /* These are always void. */
5977 /* These are valid regardless of their type. */
5986 /* Promote partial stores to COMPLEX variables to total stores. *EXPR_P is
5987 a MODIFY_EXPR with a lhs of a REAL/IMAGPART_EXPR of a gimple register.
5989 IMPORTANT NOTE: This promotion is performed by introducing a load of the
5990 other, unmodified part of the complex object just before the total store.
5991 As a consequence, if the object is still uninitialized, an undefined value
5992 will be loaded into a register, which may result in a spurious exception
5993 if the register is floating-point and the value happens to be a signaling
5994 NaN for example. Then the fully-fledged complex operations lowering pass
5995 followed by a DCE pass are necessary in order to fix things up. */
5997 static enum gimplify_status
5998 gimplify_modify_expr_complex_part (tree
*expr_p
, gimple_seq
*pre_p
,
6001 enum tree_code code
, ocode
;
6002 tree lhs
, rhs
, new_rhs
, other
, realpart
, imagpart
;
6004 lhs
= TREE_OPERAND (*expr_p
, 0);
6005 rhs
= TREE_OPERAND (*expr_p
, 1);
6006 code
= TREE_CODE (lhs
);
6007 lhs
= TREE_OPERAND (lhs
, 0);
6009 ocode
= code
== REALPART_EXPR
? IMAGPART_EXPR
: REALPART_EXPR
;
6010 other
= build1 (ocode
, TREE_TYPE (rhs
), lhs
);
6011 suppress_warning (other
);
6012 other
= get_formal_tmp_var (other
, pre_p
);
6014 realpart
= code
== REALPART_EXPR
? rhs
: other
;
6015 imagpart
= code
== REALPART_EXPR
? other
: rhs
;
6017 if (TREE_CONSTANT (realpart
) && TREE_CONSTANT (imagpart
))
6018 new_rhs
= build_complex (TREE_TYPE (lhs
), realpart
, imagpart
);
6020 new_rhs
= build2 (COMPLEX_EXPR
, TREE_TYPE (lhs
), realpart
, imagpart
);
6022 gimplify_seq_add_stmt (pre_p
, gimple_build_assign (lhs
, new_rhs
));
6023 *expr_p
= (want_value
) ? rhs
: NULL_TREE
;
6028 /* Gimplify the MODIFY_EXPR node pointed to by EXPR_P.
6034 PRE_P points to the list where side effects that must happen before
6035 *EXPR_P should be stored.
6037 POST_P points to the list where side effects that must happen after
6038 *EXPR_P should be stored.
6040 WANT_VALUE is nonzero iff we want to use the value of this expression
6041 in another expression. */
6043 static enum gimplify_status
6044 gimplify_modify_expr (tree
*expr_p
, gimple_seq
*pre_p
, gimple_seq
*post_p
,
6047 tree
*from_p
= &TREE_OPERAND (*expr_p
, 1);
6048 tree
*to_p
= &TREE_OPERAND (*expr_p
, 0);
6049 enum gimplify_status ret
= GS_UNHANDLED
;
6051 location_t loc
= EXPR_LOCATION (*expr_p
);
6052 gimple_stmt_iterator gsi
;
6054 gcc_assert (TREE_CODE (*expr_p
) == MODIFY_EXPR
6055 || TREE_CODE (*expr_p
) == INIT_EXPR
);
6057 /* Trying to simplify a clobber using normal logic doesn't work,
6058 so handle it here. */
6059 if (TREE_CLOBBER_P (*from_p
))
6061 ret
= gimplify_expr (to_p
, pre_p
, post_p
, is_gimple_lvalue
, fb_lvalue
);
6062 if (ret
== GS_ERROR
)
6064 gcc_assert (!want_value
);
6065 if (!VAR_P (*to_p
) && TREE_CODE (*to_p
) != MEM_REF
)
6067 tree addr
= get_initialized_tmp_var (build_fold_addr_expr (*to_p
),
6069 *to_p
= build_simple_mem_ref_loc (EXPR_LOCATION (*to_p
), addr
);
6071 gimplify_seq_add_stmt (pre_p
, gimple_build_assign (*to_p
, *from_p
));
6076 /* Convert initialization from an empty variable-size CONSTRUCTOR to
6078 if (TREE_TYPE (*from_p
) != error_mark_node
6079 && TYPE_SIZE_UNIT (TREE_TYPE (*from_p
))
6080 && !poly_int_tree_p (TYPE_SIZE_UNIT (TREE_TYPE (*from_p
)))
6081 && TREE_CODE (*from_p
) == CONSTRUCTOR
6082 && CONSTRUCTOR_NELTS (*from_p
) == 0)
6084 maybe_with_size_expr (from_p
);
6085 gcc_assert (TREE_CODE (*from_p
) == WITH_SIZE_EXPR
);
6086 return gimplify_modify_expr_to_memset (expr_p
,
6087 TREE_OPERAND (*from_p
, 1),
6091 /* Insert pointer conversions required by the middle-end that are not
6092 required by the frontend. This fixes middle-end type checking for
6093 for example gcc.dg/redecl-6.c. */
6094 if (POINTER_TYPE_P (TREE_TYPE (*to_p
)))
6096 STRIP_USELESS_TYPE_CONVERSION (*from_p
);
6097 if (!useless_type_conversion_p (TREE_TYPE (*to_p
), TREE_TYPE (*from_p
)))
6098 *from_p
= fold_convert_loc (loc
, TREE_TYPE (*to_p
), *from_p
);
6101 /* See if any simplifications can be done based on what the RHS is. */
6102 ret
= gimplify_modify_expr_rhs (expr_p
, from_p
, to_p
, pre_p
, post_p
,
6104 if (ret
!= GS_UNHANDLED
)
6107 /* For empty types only gimplify the left hand side and right hand
6108 side as statements and throw away the assignment. Do this after
6109 gimplify_modify_expr_rhs so we handle TARGET_EXPRs of addressable
6111 if (is_empty_type (TREE_TYPE (*from_p
))
6113 /* Don't do this for calls that return addressable types, expand_call
6114 relies on those having a lhs. */
6115 && !(TREE_ADDRESSABLE (TREE_TYPE (*from_p
))
6116 && TREE_CODE (*from_p
) == CALL_EXPR
))
6118 gimplify_stmt (from_p
, pre_p
);
6119 gimplify_stmt (to_p
, pre_p
);
6120 *expr_p
= NULL_TREE
;
6124 /* If the value being copied is of variable width, compute the length
6125 of the copy into a WITH_SIZE_EXPR. Note that we need to do this
6126 before gimplifying any of the operands so that we can resolve any
6127 PLACEHOLDER_EXPRs in the size. Also note that the RTL expander uses
6128 the size of the expression to be copied, not of the destination, so
6129 that is what we must do here. */
6130 maybe_with_size_expr (from_p
);
6132 /* As a special case, we have to temporarily allow for assignments
6133 with a CALL_EXPR on the RHS. Since in GIMPLE a function call is
6134 a toplevel statement, when gimplifying the GENERIC expression
6135 MODIFY_EXPR <a, CALL_EXPR <foo>>, we cannot create the tuple
6136 GIMPLE_ASSIGN <a, GIMPLE_CALL <foo>>.
6138 Instead, we need to create the tuple GIMPLE_CALL <a, foo>. To
6139 prevent gimplify_expr from trying to create a new temporary for
6140 foo's LHS, we tell it that it should only gimplify until it
6141 reaches the CALL_EXPR. On return from gimplify_expr, the newly
6142 created GIMPLE_CALL <foo> will be the last statement in *PRE_P
6143 and all we need to do here is set 'a' to be its LHS. */
6145 /* Gimplify the RHS first for C++17 and bug 71104. */
6146 gimple_predicate initial_pred
= initial_rhs_predicate_for (*to_p
);
6147 ret
= gimplify_expr (from_p
, pre_p
, post_p
, initial_pred
, fb_rvalue
);
6148 if (ret
== GS_ERROR
)
6151 /* Then gimplify the LHS. */
6152 /* If we gimplified the RHS to a CALL_EXPR and that call may return
6153 twice we have to make sure to gimplify into non-SSA as otherwise
6154 the abnormal edge added later will make those defs not dominate
6156 ??? Technically this applies only to the registers used in the
6157 resulting non-register *TO_P. */
6158 bool saved_into_ssa
= gimplify_ctxp
->into_ssa
;
6160 && TREE_CODE (*from_p
) == CALL_EXPR
6161 && call_expr_flags (*from_p
) & ECF_RETURNS_TWICE
)
6162 gimplify_ctxp
->into_ssa
= false;
6163 ret
= gimplify_expr (to_p
, pre_p
, post_p
, is_gimple_lvalue
, fb_lvalue
);
6164 gimplify_ctxp
->into_ssa
= saved_into_ssa
;
6165 if (ret
== GS_ERROR
)
6168 /* Now that the LHS is gimplified, re-gimplify the RHS if our initial
6169 guess for the predicate was wrong. */
6170 gimple_predicate final_pred
= rhs_predicate_for (*to_p
);
6171 if (final_pred
!= initial_pred
)
6173 ret
= gimplify_expr (from_p
, pre_p
, post_p
, final_pred
, fb_rvalue
);
6174 if (ret
== GS_ERROR
)
6178 /* In case of va_arg internal fn wrappped in a WITH_SIZE_EXPR, add the type
6179 size as argument to the call. */
6180 if (TREE_CODE (*from_p
) == WITH_SIZE_EXPR
)
6182 tree call
= TREE_OPERAND (*from_p
, 0);
6183 tree vlasize
= TREE_OPERAND (*from_p
, 1);
6185 if (TREE_CODE (call
) == CALL_EXPR
6186 && CALL_EXPR_IFN (call
) == IFN_VA_ARG
)
6188 int nargs
= call_expr_nargs (call
);
6189 tree type
= TREE_TYPE (call
);
6190 tree ap
= CALL_EXPR_ARG (call
, 0);
6191 tree tag
= CALL_EXPR_ARG (call
, 1);
6192 tree aptag
= CALL_EXPR_ARG (call
, 2);
6193 tree newcall
= build_call_expr_internal_loc (EXPR_LOCATION (call
),
6197 TREE_OPERAND (*from_p
, 0) = newcall
;
6201 /* Now see if the above changed *from_p to something we handle specially. */
6202 ret
= gimplify_modify_expr_rhs (expr_p
, from_p
, to_p
, pre_p
, post_p
,
6204 if (ret
!= GS_UNHANDLED
)
6207 /* If we've got a variable sized assignment between two lvalues (i.e. does
6208 not involve a call), then we can make things a bit more straightforward
6209 by converting the assignment to memcpy or memset. */
6210 if (TREE_CODE (*from_p
) == WITH_SIZE_EXPR
)
6212 tree from
= TREE_OPERAND (*from_p
, 0);
6213 tree size
= TREE_OPERAND (*from_p
, 1);
6215 if (TREE_CODE (from
) == CONSTRUCTOR
)
6216 return gimplify_modify_expr_to_memset (expr_p
, size
, want_value
, pre_p
);
6218 if (is_gimple_addressable (from
))
6221 return gimplify_modify_expr_to_memcpy (expr_p
, size
, want_value
,
6226 /* Transform partial stores to non-addressable complex variables into
6227 total stores. This allows us to use real instead of virtual operands
6228 for these variables, which improves optimization. */
6229 if ((TREE_CODE (*to_p
) == REALPART_EXPR
6230 || TREE_CODE (*to_p
) == IMAGPART_EXPR
)
6231 && is_gimple_reg (TREE_OPERAND (*to_p
, 0)))
6232 return gimplify_modify_expr_complex_part (expr_p
, pre_p
, want_value
);
6234 /* Try to alleviate the effects of the gimplification creating artificial
6235 temporaries (see for example is_gimple_reg_rhs) on the debug info, but
6236 make sure not to create DECL_DEBUG_EXPR links across functions. */
6237 if (!gimplify_ctxp
->into_ssa
6239 && DECL_IGNORED_P (*from_p
)
6241 && !DECL_IGNORED_P (*to_p
)
6242 && decl_function_context (*to_p
) == current_function_decl
6243 && decl_function_context (*from_p
) == current_function_decl
)
6245 if (!DECL_NAME (*from_p
) && DECL_NAME (*to_p
))
6247 = create_tmp_var_name (IDENTIFIER_POINTER (DECL_NAME (*to_p
)));
6248 DECL_HAS_DEBUG_EXPR_P (*from_p
) = 1;
6249 SET_DECL_DEBUG_EXPR (*from_p
, *to_p
);
6252 if (want_value
&& TREE_THIS_VOLATILE (*to_p
))
6253 *from_p
= get_initialized_tmp_var (*from_p
, pre_p
, post_p
);
6255 if (TREE_CODE (*from_p
) == CALL_EXPR
)
6257 /* Since the RHS is a CALL_EXPR, we need to create a GIMPLE_CALL
6258 instead of a GIMPLE_ASSIGN. */
6260 if (CALL_EXPR_FN (*from_p
) == NULL_TREE
)
6262 /* Gimplify internal functions created in the FEs. */
6263 int nargs
= call_expr_nargs (*from_p
), i
;
6264 enum internal_fn ifn
= CALL_EXPR_IFN (*from_p
);
6265 auto_vec
<tree
> vargs (nargs
);
6267 for (i
= 0; i
< nargs
; i
++)
6269 gimplify_arg (&CALL_EXPR_ARG (*from_p
, i
), pre_p
,
6270 EXPR_LOCATION (*from_p
));
6271 vargs
.quick_push (CALL_EXPR_ARG (*from_p
, i
));
6273 call_stmt
= gimple_build_call_internal_vec (ifn
, vargs
);
6274 gimple_call_set_nothrow (call_stmt
, TREE_NOTHROW (*from_p
));
6275 gimple_set_location (call_stmt
, EXPR_LOCATION (*expr_p
));
6279 tree fnptrtype
= TREE_TYPE (CALL_EXPR_FN (*from_p
));
6280 CALL_EXPR_FN (*from_p
) = TREE_OPERAND (CALL_EXPR_FN (*from_p
), 0);
6281 STRIP_USELESS_TYPE_CONVERSION (CALL_EXPR_FN (*from_p
));
6282 tree fndecl
= get_callee_fndecl (*from_p
);
6284 && fndecl_built_in_p (fndecl
, BUILT_IN_EXPECT
)
6285 && call_expr_nargs (*from_p
) == 3)
6286 call_stmt
= gimple_build_call_internal (IFN_BUILTIN_EXPECT
, 3,
6287 CALL_EXPR_ARG (*from_p
, 0),
6288 CALL_EXPR_ARG (*from_p
, 1),
6289 CALL_EXPR_ARG (*from_p
, 2));
6292 call_stmt
= gimple_build_call_from_tree (*from_p
, fnptrtype
);
6295 notice_special_calls (call_stmt
);
6296 if (!gimple_call_noreturn_p (call_stmt
) || !should_remove_lhs_p (*to_p
))
6297 gimple_call_set_lhs (call_stmt
, *to_p
);
6298 else if (TREE_CODE (*to_p
) == SSA_NAME
)
6299 /* The above is somewhat premature, avoid ICEing later for a
6300 SSA name w/o a definition. We may have uses in the GIMPLE IL.
6301 ??? This doesn't make it a default-def. */
6302 SSA_NAME_DEF_STMT (*to_p
) = gimple_build_nop ();
6308 assign
= gimple_build_assign (*to_p
, *from_p
);
6309 gimple_set_location (assign
, EXPR_LOCATION (*expr_p
));
6310 if (COMPARISON_CLASS_P (*from_p
))
6311 copy_warning (assign
, *from_p
);
6314 if (gimplify_ctxp
->into_ssa
&& is_gimple_reg (*to_p
))
6316 /* We should have got an SSA name from the start. */
6317 gcc_assert (TREE_CODE (*to_p
) == SSA_NAME
6318 || ! gimple_in_ssa_p (cfun
));
6321 gimplify_seq_add_stmt (pre_p
, assign
);
6322 gsi
= gsi_last (*pre_p
);
6323 maybe_fold_stmt (&gsi
);
6327 *expr_p
= TREE_THIS_VOLATILE (*to_p
) ? *from_p
: unshare_expr (*to_p
);
6336 /* Gimplify a comparison between two variable-sized objects. Do this
6337 with a call to BUILT_IN_MEMCMP. */
6339 static enum gimplify_status
6340 gimplify_variable_sized_compare (tree
*expr_p
)
6342 location_t loc
= EXPR_LOCATION (*expr_p
);
6343 tree op0
= TREE_OPERAND (*expr_p
, 0);
6344 tree op1
= TREE_OPERAND (*expr_p
, 1);
6345 tree t
, arg
, dest
, src
, expr
;
6347 arg
= TYPE_SIZE_UNIT (TREE_TYPE (op0
));
6348 arg
= unshare_expr (arg
);
6349 arg
= SUBSTITUTE_PLACEHOLDER_IN_EXPR (arg
, op0
);
6350 src
= build_fold_addr_expr_loc (loc
, op1
);
6351 dest
= build_fold_addr_expr_loc (loc
, op0
);
6352 t
= builtin_decl_implicit (BUILT_IN_MEMCMP
);
6353 t
= build_call_expr_loc (loc
, t
, 3, dest
, src
, arg
);
6356 = build2 (TREE_CODE (*expr_p
), TREE_TYPE (*expr_p
), t
, integer_zero_node
);
6357 SET_EXPR_LOCATION (expr
, loc
);
6363 /* Gimplify a comparison between two aggregate objects of integral scalar
6364 mode as a comparison between the bitwise equivalent scalar values. */
6366 static enum gimplify_status
6367 gimplify_scalar_mode_aggregate_compare (tree
*expr_p
)
6369 location_t loc
= EXPR_LOCATION (*expr_p
);
6370 tree op0
= TREE_OPERAND (*expr_p
, 0);
6371 tree op1
= TREE_OPERAND (*expr_p
, 1);
6373 tree type
= TREE_TYPE (op0
);
6374 tree scalar_type
= lang_hooks
.types
.type_for_mode (TYPE_MODE (type
), 1);
6376 op0
= fold_build1_loc (loc
, VIEW_CONVERT_EXPR
, scalar_type
, op0
);
6377 op1
= fold_build1_loc (loc
, VIEW_CONVERT_EXPR
, scalar_type
, op1
);
6380 = fold_build2_loc (loc
, TREE_CODE (*expr_p
), TREE_TYPE (*expr_p
), op0
, op1
);
6385 /* Gimplify an expression sequence. This function gimplifies each
6386 expression and rewrites the original expression with the last
6387 expression of the sequence in GIMPLE form.
6389 PRE_P points to the list where the side effects for all the
6390 expressions in the sequence will be emitted.
6392 WANT_VALUE is true when the result of the last COMPOUND_EXPR is used. */
6394 static enum gimplify_status
6395 gimplify_compound_expr (tree
*expr_p
, gimple_seq
*pre_p
, bool want_value
)
6401 tree
*sub_p
= &TREE_OPERAND (t
, 0);
6403 if (TREE_CODE (*sub_p
) == COMPOUND_EXPR
)
6404 gimplify_compound_expr (sub_p
, pre_p
, false);
6406 gimplify_stmt (sub_p
, pre_p
);
6408 t
= TREE_OPERAND (t
, 1);
6410 while (TREE_CODE (t
) == COMPOUND_EXPR
);
6417 gimplify_stmt (expr_p
, pre_p
);
6422 /* Gimplify a SAVE_EXPR node. EXPR_P points to the expression to
6423 gimplify. After gimplification, EXPR_P will point to a new temporary
6424 that holds the original value of the SAVE_EXPR node.
6426 PRE_P points to the list where side effects that must happen before
6427 *EXPR_P should be stored. */
6429 static enum gimplify_status
6430 gimplify_save_expr (tree
*expr_p
, gimple_seq
*pre_p
, gimple_seq
*post_p
)
6432 enum gimplify_status ret
= GS_ALL_DONE
;
6435 gcc_assert (TREE_CODE (*expr_p
) == SAVE_EXPR
);
6436 val
= TREE_OPERAND (*expr_p
, 0);
6438 if (TREE_TYPE (val
) == error_mark_node
)
6441 /* If the SAVE_EXPR has not been resolved, then evaluate it once. */
6442 if (!SAVE_EXPR_RESOLVED_P (*expr_p
))
6444 /* The operand may be a void-valued expression. It is
6445 being executed only for its side-effects. */
6446 if (TREE_TYPE (val
) == void_type_node
)
6448 ret
= gimplify_expr (&TREE_OPERAND (*expr_p
, 0), pre_p
, post_p
,
6449 is_gimple_stmt
, fb_none
);
6453 /* The temporary may not be an SSA name as later abnormal and EH
6454 control flow may invalidate use/def domination. When in SSA
6455 form then assume there are no such issues and SAVE_EXPRs only
6456 appear via GENERIC foldings. */
6457 val
= get_initialized_tmp_var (val
, pre_p
, post_p
,
6458 gimple_in_ssa_p (cfun
));
6460 TREE_OPERAND (*expr_p
, 0) = val
;
6461 SAVE_EXPR_RESOLVED_P (*expr_p
) = 1;
6469 /* Rewrite the ADDR_EXPR node pointed to by EXPR_P
6476 PRE_P points to the list where side effects that must happen before
6477 *EXPR_P should be stored.
6479 POST_P points to the list where side effects that must happen after
6480 *EXPR_P should be stored. */
6482 static enum gimplify_status
6483 gimplify_addr_expr (tree
*expr_p
, gimple_seq
*pre_p
, gimple_seq
*post_p
)
6485 tree expr
= *expr_p
;
6486 tree op0
= TREE_OPERAND (expr
, 0);
6487 enum gimplify_status ret
;
6488 location_t loc
= EXPR_LOCATION (*expr_p
);
6490 switch (TREE_CODE (op0
))
6494 /* Check if we are dealing with an expression of the form '&*ptr'.
6495 While the front end folds away '&*ptr' into 'ptr', these
6496 expressions may be generated internally by the compiler (e.g.,
6497 builtins like __builtin_va_end). */
6498 /* Caution: the silent array decomposition semantics we allow for
6499 ADDR_EXPR means we can't always discard the pair. */
6500 /* Gimplification of the ADDR_EXPR operand may drop
6501 cv-qualification conversions, so make sure we add them if
6504 tree op00
= TREE_OPERAND (op0
, 0);
6505 tree t_expr
= TREE_TYPE (expr
);
6506 tree t_op00
= TREE_TYPE (op00
);
6508 if (!useless_type_conversion_p (t_expr
, t_op00
))
6509 op00
= fold_convert_loc (loc
, TREE_TYPE (expr
), op00
);
6515 case VIEW_CONVERT_EXPR
:
6516 /* Take the address of our operand and then convert it to the type of
6519 ??? The interactions of VIEW_CONVERT_EXPR and aliasing is not at
6520 all clear. The impact of this transformation is even less clear. */
6522 /* If the operand is a useless conversion, look through it. Doing so
6523 guarantees that the ADDR_EXPR and its operand will remain of the
6525 if (tree_ssa_useless_type_conversion (TREE_OPERAND (op0
, 0)))
6526 op0
= TREE_OPERAND (op0
, 0);
6528 *expr_p
= fold_convert_loc (loc
, TREE_TYPE (expr
),
6529 build_fold_addr_expr_loc (loc
,
6530 TREE_OPERAND (op0
, 0)));
6535 if (integer_zerop (TREE_OPERAND (op0
, 1)))
6536 goto do_indirect_ref
;
6541 /* If we see a call to a declared builtin or see its address
6542 being taken (we can unify those cases here) then we can mark
6543 the builtin for implicit generation by GCC. */
6544 if (TREE_CODE (op0
) == FUNCTION_DECL
6545 && fndecl_built_in_p (op0
, BUILT_IN_NORMAL
)
6546 && builtin_decl_declared_p (DECL_FUNCTION_CODE (op0
)))
6547 set_builtin_decl_implicit_p (DECL_FUNCTION_CODE (op0
), true);
6549 /* We use fb_either here because the C frontend sometimes takes
6550 the address of a call that returns a struct; see
6551 gcc.dg/c99-array-lval-1.c. The gimplifier will correctly make
6552 the implied temporary explicit. */
6554 /* Make the operand addressable. */
6555 ret
= gimplify_expr (&TREE_OPERAND (expr
, 0), pre_p
, post_p
,
6556 is_gimple_addressable
, fb_either
);
6557 if (ret
== GS_ERROR
)
6560 /* Then mark it. Beware that it may not be possible to do so directly
6561 if a temporary has been created by the gimplification. */
6562 prepare_gimple_addressable (&TREE_OPERAND (expr
, 0), pre_p
);
6564 op0
= TREE_OPERAND (expr
, 0);
6566 /* For various reasons, the gimplification of the expression
6567 may have made a new INDIRECT_REF. */
6568 if (TREE_CODE (op0
) == INDIRECT_REF
6569 || (TREE_CODE (op0
) == MEM_REF
6570 && integer_zerop (TREE_OPERAND (op0
, 1))))
6571 goto do_indirect_ref
;
6573 mark_addressable (TREE_OPERAND (expr
, 0));
6575 /* The FEs may end up building ADDR_EXPRs early on a decl with
6576 an incomplete type. Re-build ADDR_EXPRs in canonical form
6578 if (!types_compatible_p (TREE_TYPE (op0
), TREE_TYPE (TREE_TYPE (expr
))))
6579 *expr_p
= build_fold_addr_expr (op0
);
6581 /* Make sure TREE_CONSTANT and TREE_SIDE_EFFECTS are set properly. */
6582 recompute_tree_invariant_for_addr_expr (*expr_p
);
6584 /* If we re-built the ADDR_EXPR add a conversion to the original type
6586 if (!useless_type_conversion_p (TREE_TYPE (expr
), TREE_TYPE (*expr_p
)))
6587 *expr_p
= fold_convert (TREE_TYPE (expr
), *expr_p
);
6595 /* Gimplify the operands of an ASM_EXPR. Input operands should be a gimple
6596 value; output operands should be a gimple lvalue. */
6598 static enum gimplify_status
6599 gimplify_asm_expr (tree
*expr_p
, gimple_seq
*pre_p
, gimple_seq
*post_p
)
6603 const char **oconstraints
;
6606 const char *constraint
;
6607 bool allows_mem
, allows_reg
, is_inout
;
6608 enum gimplify_status ret
, tret
;
6610 vec
<tree
, va_gc
> *inputs
;
6611 vec
<tree
, va_gc
> *outputs
;
6612 vec
<tree
, va_gc
> *clobbers
;
6613 vec
<tree
, va_gc
> *labels
;
6617 noutputs
= list_length (ASM_OUTPUTS (expr
));
6618 oconstraints
= (const char **) alloca ((noutputs
) * sizeof (const char *));
6626 link_next
= NULL_TREE
;
6627 for (i
= 0, link
= ASM_OUTPUTS (expr
); link
; ++i
, link
= link_next
)
6630 size_t constraint_len
;
6632 link_next
= TREE_CHAIN (link
);
6636 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link
)));
6637 constraint_len
= strlen (constraint
);
6638 if (constraint_len
== 0)
6641 ok
= parse_output_constraint (&constraint
, i
, 0, 0,
6642 &allows_mem
, &allows_reg
, &is_inout
);
6649 /* If we can't make copies, we can only accept memory.
6650 Similarly for VLAs. */
6651 tree outtype
= TREE_TYPE (TREE_VALUE (link
));
6652 if (outtype
!= error_mark_node
6653 && (TREE_ADDRESSABLE (outtype
)
6654 || !COMPLETE_TYPE_P (outtype
)
6655 || !tree_fits_poly_uint64_p (TYPE_SIZE_UNIT (outtype
))))
6661 error ("impossible constraint in %<asm%>");
6662 error ("non-memory output %d must stay in memory", i
);
6667 if (!allows_reg
&& allows_mem
)
6668 mark_addressable (TREE_VALUE (link
));
6670 tree orig
= TREE_VALUE (link
);
6671 tret
= gimplify_expr (&TREE_VALUE (link
), pre_p
, post_p
,
6672 is_inout
? is_gimple_min_lval
: is_gimple_lvalue
,
6673 fb_lvalue
| fb_mayfail
);
6674 if (tret
== GS_ERROR
)
6676 if (orig
!= error_mark_node
)
6677 error ("invalid lvalue in %<asm%> output %d", i
);
6681 /* If the constraint does not allow memory make sure we gimplify
6682 it to a register if it is not already but its base is. This
6683 happens for complex and vector components. */
6686 tree op
= TREE_VALUE (link
);
6687 if (! is_gimple_val (op
)
6688 && is_gimple_reg_type (TREE_TYPE (op
))
6689 && is_gimple_reg (get_base_address (op
)))
6691 tree tem
= create_tmp_reg (TREE_TYPE (op
));
6695 ass
= build2 (MODIFY_EXPR
, TREE_TYPE (tem
),
6696 tem
, unshare_expr (op
));
6697 gimplify_and_add (ass
, pre_p
);
6699 ass
= build2 (MODIFY_EXPR
, TREE_TYPE (tem
), op
, tem
);
6700 gimplify_and_add (ass
, post_p
);
6702 TREE_VALUE (link
) = tem
;
6707 vec_safe_push (outputs
, link
);
6708 TREE_CHAIN (link
) = NULL_TREE
;
6712 /* An input/output operand. To give the optimizers more
6713 flexibility, split it into separate input and output
6716 /* Buffer big enough to format a 32-bit UINT_MAX into. */
6719 /* Turn the in/out constraint into an output constraint. */
6720 char *p
= xstrdup (constraint
);
6722 TREE_VALUE (TREE_PURPOSE (link
)) = build_string (constraint_len
, p
);
6724 /* And add a matching input constraint. */
6727 sprintf (buf
, "%u", i
);
6729 /* If there are multiple alternatives in the constraint,
6730 handle each of them individually. Those that allow register
6731 will be replaced with operand number, the others will stay
6733 if (strchr (p
, ',') != NULL
)
6735 size_t len
= 0, buflen
= strlen (buf
);
6736 char *beg
, *end
, *str
, *dst
;
6740 end
= strchr (beg
, ',');
6742 end
= strchr (beg
, '\0');
6743 if ((size_t) (end
- beg
) < buflen
)
6746 len
+= end
- beg
+ 1;
6753 str
= (char *) alloca (len
);
6754 for (beg
= p
+ 1, dst
= str
;;)
6757 bool mem_p
, reg_p
, inout_p
;
6759 end
= strchr (beg
, ',');
6764 parse_output_constraint (&tem
, i
, 0, 0,
6765 &mem_p
, ®_p
, &inout_p
);
6770 memcpy (dst
, buf
, buflen
);
6779 memcpy (dst
, beg
, len
);
6788 input
= build_string (dst
- str
, str
);
6791 input
= build_string (strlen (buf
), buf
);
6794 input
= build_string (constraint_len
- 1, constraint
+ 1);
6798 input
= build_tree_list (build_tree_list (NULL_TREE
, input
),
6799 unshare_expr (TREE_VALUE (link
)));
6800 ASM_INPUTS (expr
) = chainon (ASM_INPUTS (expr
), input
);
6804 link_next
= NULL_TREE
;
6805 for (link
= ASM_INPUTS (expr
); link
; ++i
, link
= link_next
)
6807 link_next
= TREE_CHAIN (link
);
6808 constraint
= TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link
)));
6809 parse_input_constraint (&constraint
, 0, 0, noutputs
, 0,
6810 oconstraints
, &allows_mem
, &allows_reg
);
6812 /* If we can't make copies, we can only accept memory. */
6813 tree intype
= TREE_TYPE (TREE_VALUE (link
));
6814 if (intype
!= error_mark_node
6815 && (TREE_ADDRESSABLE (intype
)
6816 || !COMPLETE_TYPE_P (intype
)
6817 || !tree_fits_poly_uint64_p (TYPE_SIZE_UNIT (intype
))))
6823 error ("impossible constraint in %<asm%>");
6824 error ("non-memory input %d must stay in memory", i
);
6829 /* If the operand is a memory input, it should be an lvalue. */
6830 if (!allows_reg
&& allows_mem
)
6832 tree inputv
= TREE_VALUE (link
);
6833 STRIP_NOPS (inputv
);
6834 if (TREE_CODE (inputv
) == PREDECREMENT_EXPR
6835 || TREE_CODE (inputv
) == PREINCREMENT_EXPR
6836 || TREE_CODE (inputv
) == POSTDECREMENT_EXPR
6837 || TREE_CODE (inputv
) == POSTINCREMENT_EXPR
6838 || TREE_CODE (inputv
) == MODIFY_EXPR
)
6839 TREE_VALUE (link
) = error_mark_node
;
6840 tret
= gimplify_expr (&TREE_VALUE (link
), pre_p
, post_p
,
6841 is_gimple_lvalue
, fb_lvalue
| fb_mayfail
);
6842 if (tret
!= GS_ERROR
)
6844 /* Unlike output operands, memory inputs are not guaranteed
6845 to be lvalues by the FE, and while the expressions are
6846 marked addressable there, if it is e.g. a statement
6847 expression, temporaries in it might not end up being
6848 addressable. They might be already used in the IL and thus
6849 it is too late to make them addressable now though. */
6850 tree x
= TREE_VALUE (link
);
6851 while (handled_component_p (x
))
6852 x
= TREE_OPERAND (x
, 0);
6853 if (TREE_CODE (x
) == MEM_REF
6854 && TREE_CODE (TREE_OPERAND (x
, 0)) == ADDR_EXPR
)
6855 x
= TREE_OPERAND (TREE_OPERAND (x
, 0), 0);
6857 || TREE_CODE (x
) == PARM_DECL
6858 || TREE_CODE (x
) == RESULT_DECL
)
6859 && !TREE_ADDRESSABLE (x
)
6860 && is_gimple_reg (x
))
6862 warning_at (EXPR_LOC_OR_LOC (TREE_VALUE (link
),
6864 "memory input %d is not directly addressable",
6866 prepare_gimple_addressable (&TREE_VALUE (link
), pre_p
);
6869 mark_addressable (TREE_VALUE (link
));
6870 if (tret
== GS_ERROR
)
6872 if (inputv
!= error_mark_node
)
6873 error_at (EXPR_LOC_OR_LOC (TREE_VALUE (link
), input_location
),
6874 "memory input %d is not directly addressable", i
);
6880 tret
= gimplify_expr (&TREE_VALUE (link
), pre_p
, post_p
,
6881 is_gimple_asm_val
, fb_rvalue
);
6882 if (tret
== GS_ERROR
)
6886 TREE_CHAIN (link
) = NULL_TREE
;
6887 vec_safe_push (inputs
, link
);
6890 link_next
= NULL_TREE
;
6891 for (link
= ASM_CLOBBERS (expr
); link
; ++i
, link
= link_next
)
6893 link_next
= TREE_CHAIN (link
);
6894 TREE_CHAIN (link
) = NULL_TREE
;
6895 vec_safe_push (clobbers
, link
);
6898 link_next
= NULL_TREE
;
6899 for (link
= ASM_LABELS (expr
); link
; ++i
, link
= link_next
)
6901 link_next
= TREE_CHAIN (link
);
6902 TREE_CHAIN (link
) = NULL_TREE
;
6903 vec_safe_push (labels
, link
);
6906 /* Do not add ASMs with errors to the gimple IL stream. */
6907 if (ret
!= GS_ERROR
)
6909 stmt
= gimple_build_asm_vec (TREE_STRING_POINTER (ASM_STRING (expr
)),
6910 inputs
, outputs
, clobbers
, labels
);
6912 gimple_asm_set_volatile (stmt
, ASM_VOLATILE_P (expr
) || noutputs
== 0);
6913 gimple_asm_set_input (stmt
, ASM_INPUT_P (expr
));
6914 gimple_asm_set_inline (stmt
, ASM_INLINE_P (expr
));
6916 gimplify_seq_add_stmt (pre_p
, stmt
);
6922 /* Gimplify a CLEANUP_POINT_EXPR. Currently this works by adding
6923 GIMPLE_WITH_CLEANUP_EXPRs to the prequeue as we encounter cleanups while
6924 gimplifying the body, and converting them to TRY_FINALLY_EXPRs when we
6925 return to this function.
6927 FIXME should we complexify the prequeue handling instead? Or use flags
6928 for all the cleanups and let the optimizer tighten them up? The current
6929 code seems pretty fragile; it will break on a cleanup within any
6930 non-conditional nesting. But any such nesting would be broken, anyway;
6931 we can't write a TRY_FINALLY_EXPR that starts inside a nesting construct
6932 and continues out of it. We can do that at the RTL level, though, so
6933 having an optimizer to tighten up try/finally regions would be a Good
6936 static enum gimplify_status
6937 gimplify_cleanup_point_expr (tree
*expr_p
, gimple_seq
*pre_p
)
6939 gimple_stmt_iterator iter
;
6940 gimple_seq body_sequence
= NULL
;
6942 tree temp
= voidify_wrapper_expr (*expr_p
, NULL
);
6944 /* We only care about the number of conditions between the innermost
6945 CLEANUP_POINT_EXPR and the cleanup. So save and reset the count and
6946 any cleanups collected outside the CLEANUP_POINT_EXPR. */
6947 int old_conds
= gimplify_ctxp
->conditions
;
6948 gimple_seq old_cleanups
= gimplify_ctxp
->conditional_cleanups
;
6949 bool old_in_cleanup_point_expr
= gimplify_ctxp
->in_cleanup_point_expr
;
6950 gimplify_ctxp
->conditions
= 0;
6951 gimplify_ctxp
->conditional_cleanups
= NULL
;
6952 gimplify_ctxp
->in_cleanup_point_expr
= true;
6954 gimplify_stmt (&TREE_OPERAND (*expr_p
, 0), &body_sequence
);
6956 gimplify_ctxp
->conditions
= old_conds
;
6957 gimplify_ctxp
->conditional_cleanups
= old_cleanups
;
6958 gimplify_ctxp
->in_cleanup_point_expr
= old_in_cleanup_point_expr
;
6960 for (iter
= gsi_start (body_sequence
); !gsi_end_p (iter
); )
6962 gimple
*wce
= gsi_stmt (iter
);
6964 if (gimple_code (wce
) == GIMPLE_WITH_CLEANUP_EXPR
)
6966 if (gsi_one_before_end_p (iter
))
6968 /* Note that gsi_insert_seq_before and gsi_remove do not
6969 scan operands, unlike some other sequence mutators. */
6970 if (!gimple_wce_cleanup_eh_only (wce
))
6971 gsi_insert_seq_before_without_update (&iter
,
6972 gimple_wce_cleanup (wce
),
6974 gsi_remove (&iter
, true);
6981 enum gimple_try_flags kind
;
6983 if (gimple_wce_cleanup_eh_only (wce
))
6984 kind
= GIMPLE_TRY_CATCH
;
6986 kind
= GIMPLE_TRY_FINALLY
;
6987 seq
= gsi_split_seq_after (iter
);
6989 gtry
= gimple_build_try (seq
, gimple_wce_cleanup (wce
), kind
);
6990 /* Do not use gsi_replace here, as it may scan operands.
6991 We want to do a simple structural modification only. */
6992 gsi_set_stmt (&iter
, gtry
);
6993 iter
= gsi_start (gtry
->eval
);
7000 gimplify_seq_add_seq (pre_p
, body_sequence
);
7013 /* Insert a cleanup marker for gimplify_cleanup_point_expr. CLEANUP
7014 is the cleanup action required. EH_ONLY is true if the cleanup should
7015 only be executed if an exception is thrown, not on normal exit.
7016 If FORCE_UNCOND is true perform the cleanup unconditionally; this is
7017 only valid for clobbers. */
7020 gimple_push_cleanup (tree var
, tree cleanup
, bool eh_only
, gimple_seq
*pre_p
,
7021 bool force_uncond
= false)
7024 gimple_seq cleanup_stmts
= NULL
;
7026 /* Errors can result in improperly nested cleanups. Which results in
7027 confusion when trying to resolve the GIMPLE_WITH_CLEANUP_EXPR. */
7031 if (gimple_conditional_context ())
7033 /* If we're in a conditional context, this is more complex. We only
7034 want to run the cleanup if we actually ran the initialization that
7035 necessitates it, but we want to run it after the end of the
7036 conditional context. So we wrap the try/finally around the
7037 condition and use a flag to determine whether or not to actually
7038 run the destructor. Thus
7042 becomes (approximately)
7046 if (test) { A::A(temp); flag = 1; val = f(temp); }
7049 if (flag) A::~A(temp);
7055 gimplify_stmt (&cleanup
, &cleanup_stmts
);
7056 wce
= gimple_build_wce (cleanup_stmts
);
7057 gimplify_seq_add_stmt (&gimplify_ctxp
->conditional_cleanups
, wce
);
7061 tree flag
= create_tmp_var (boolean_type_node
, "cleanup");
7062 gassign
*ffalse
= gimple_build_assign (flag
, boolean_false_node
);
7063 gassign
*ftrue
= gimple_build_assign (flag
, boolean_true_node
);
7065 cleanup
= build3 (COND_EXPR
, void_type_node
, flag
, cleanup
, NULL
);
7066 gimplify_stmt (&cleanup
, &cleanup_stmts
);
7067 wce
= gimple_build_wce (cleanup_stmts
);
7068 gimple_wce_set_cleanup_eh_only (wce
, eh_only
);
7070 gimplify_seq_add_stmt (&gimplify_ctxp
->conditional_cleanups
, ffalse
);
7071 gimplify_seq_add_stmt (&gimplify_ctxp
->conditional_cleanups
, wce
);
7072 gimplify_seq_add_stmt (pre_p
, ftrue
);
7074 /* Because of this manipulation, and the EH edges that jump
7075 threading cannot redirect, the temporary (VAR) will appear
7076 to be used uninitialized. Don't warn. */
7077 suppress_warning (var
, OPT_Wuninitialized
);
7082 gimplify_stmt (&cleanup
, &cleanup_stmts
);
7083 wce
= gimple_build_wce (cleanup_stmts
);
7084 gimple_wce_set_cleanup_eh_only (wce
, eh_only
);
7085 gimplify_seq_add_stmt (pre_p
, wce
);
7089 /* Gimplify a TARGET_EXPR which doesn't appear on the rhs of an INIT_EXPR. */
7091 static enum gimplify_status
7092 gimplify_target_expr (tree
*expr_p
, gimple_seq
*pre_p
, gimple_seq
*post_p
)
7094 tree targ
= *expr_p
;
7095 tree temp
= TARGET_EXPR_SLOT (targ
);
7096 tree init
= TARGET_EXPR_INITIAL (targ
);
7097 enum gimplify_status ret
;
7099 bool unpoison_empty_seq
= false;
7100 gimple_stmt_iterator unpoison_it
;
7104 gimple_seq init_pre_p
= NULL
;
7106 /* TARGET_EXPR temps aren't part of the enclosing block, so add it
7107 to the temps list. Handle also variable length TARGET_EXPRs. */
7108 if (!poly_int_tree_p (DECL_SIZE (temp
)))
7110 if (!TYPE_SIZES_GIMPLIFIED (TREE_TYPE (temp
)))
7111 gimplify_type_sizes (TREE_TYPE (temp
), &init_pre_p
);
7112 /* FIXME: this is correct only when the size of the type does
7113 not depend on expressions evaluated in init. */
7114 gimplify_vla_decl (temp
, &init_pre_p
);
7118 /* Save location where we need to place unpoisoning. It's possible
7119 that a variable will be converted to needs_to_live_in_memory. */
7120 unpoison_it
= gsi_last (*pre_p
);
7121 unpoison_empty_seq
= gsi_end_p (unpoison_it
);
7123 gimple_add_tmp_var (temp
);
7126 /* If TARGET_EXPR_INITIAL is void, then the mere evaluation of the
7127 expression is supposed to initialize the slot. */
7128 if (VOID_TYPE_P (TREE_TYPE (init
)))
7129 ret
= gimplify_expr (&init
, &init_pre_p
, post_p
, is_gimple_stmt
,
7133 tree init_expr
= build2 (INIT_EXPR
, void_type_node
, temp
, init
);
7135 ret
= gimplify_expr (&init
, &init_pre_p
, post_p
, is_gimple_stmt
,
7138 ggc_free (init_expr
);
7140 if (ret
== GS_ERROR
)
7142 /* PR c++/28266 Make sure this is expanded only once. */
7143 TARGET_EXPR_INITIAL (targ
) = NULL_TREE
;
7148 gimplify_and_add (init
, &init_pre_p
);
7150 /* Add a clobber for the temporary going out of scope, like
7151 gimplify_bind_expr. */
7152 if (gimplify_ctxp
->in_cleanup_point_expr
7153 && needs_to_live_in_memory (temp
))
7155 if (flag_stack_reuse
== SR_ALL
)
7157 tree clobber
= build_clobber (TREE_TYPE (temp
), CLOBBER_EOL
);
7158 clobber
= build2 (MODIFY_EXPR
, TREE_TYPE (temp
), temp
, clobber
);
7159 gimple_push_cleanup (temp
, clobber
, false, pre_p
, true);
7161 if (asan_poisoned_variables
7162 && DECL_ALIGN (temp
) <= MAX_SUPPORTED_STACK_ALIGNMENT
7163 && !TREE_STATIC (temp
)
7164 && dbg_cnt (asan_use_after_scope
)
7165 && !gimplify_omp_ctxp
)
7167 tree asan_cleanup
= build_asan_poison_call_expr (temp
);
7170 if (unpoison_empty_seq
)
7171 unpoison_it
= gsi_start (*pre_p
);
7173 asan_poison_variable (temp
, false, &unpoison_it
,
7174 unpoison_empty_seq
);
7175 gimple_push_cleanup (temp
, asan_cleanup
, false, pre_p
);
7180 gimple_seq_add_seq (pre_p
, init_pre_p
);
7182 /* If needed, push the cleanup for the temp. */
7183 if (TARGET_EXPR_CLEANUP (targ
))
7184 gimple_push_cleanup (temp
, TARGET_EXPR_CLEANUP (targ
),
7185 CLEANUP_EH_ONLY (targ
), pre_p
);
7187 /* Only expand this once. */
7188 TREE_OPERAND (targ
, 3) = init
;
7189 TARGET_EXPR_INITIAL (targ
) = NULL_TREE
;
7192 /* We should have expanded this before. */
7193 gcc_assert (DECL_SEEN_IN_BIND_EXPR_P (temp
));
7199 /* Gimplification of expression trees. */
7201 /* Gimplify an expression which appears at statement context. The
7202 corresponding GIMPLE statements are added to *SEQ_P. If *SEQ_P is
7203 NULL, a new sequence is allocated.
7205 Return true if we actually added a statement to the queue. */
7208 gimplify_stmt (tree
*stmt_p
, gimple_seq
*seq_p
)
7210 gimple_seq_node last
;
7212 last
= gimple_seq_last (*seq_p
);
7213 gimplify_expr (stmt_p
, seq_p
, NULL
, is_gimple_stmt
, fb_none
);
7214 return last
!= gimple_seq_last (*seq_p
);
7217 /* Add FIRSTPRIVATE entries for DECL in the OpenMP the surrounding parallels
7218 to CTX. If entries already exist, force them to be some flavor of private.
7219 If there is no enclosing parallel, do nothing. */
7222 omp_firstprivatize_variable (struct gimplify_omp_ctx
*ctx
, tree decl
)
7226 if (decl
== NULL
|| !DECL_P (decl
) || ctx
->region_type
== ORT_NONE
)
7231 n
= splay_tree_lookup (ctx
->variables
, (splay_tree_key
)decl
);
7234 if (n
->value
& GOVD_SHARED
)
7235 n
->value
= GOVD_FIRSTPRIVATE
| (n
->value
& GOVD_SEEN
);
7236 else if (n
->value
& GOVD_MAP
)
7237 n
->value
|= GOVD_MAP_TO_ONLY
;
7241 else if ((ctx
->region_type
& ORT_TARGET
) != 0)
7243 if (ctx
->defaultmap
[GDMK_SCALAR
] & GOVD_FIRSTPRIVATE
)
7244 omp_add_variable (ctx
, decl
, GOVD_FIRSTPRIVATE
);
7246 omp_add_variable (ctx
, decl
, GOVD_MAP
| GOVD_MAP_TO_ONLY
);
7248 else if (ctx
->region_type
!= ORT_WORKSHARE
7249 && ctx
->region_type
!= ORT_TASKGROUP
7250 && ctx
->region_type
!= ORT_SIMD
7251 && ctx
->region_type
!= ORT_ACC
7252 && !(ctx
->region_type
& ORT_TARGET_DATA
))
7253 omp_add_variable (ctx
, decl
, GOVD_FIRSTPRIVATE
);
7255 ctx
= ctx
->outer_context
;
7260 /* Similarly for each of the type sizes of TYPE. */
7263 omp_firstprivatize_type_sizes (struct gimplify_omp_ctx
*ctx
, tree type
)
7265 if (type
== NULL
|| type
== error_mark_node
)
7267 type
= TYPE_MAIN_VARIANT (type
);
7269 if (ctx
->privatized_types
->add (type
))
7272 switch (TREE_CODE (type
))
7278 case FIXED_POINT_TYPE
:
7279 omp_firstprivatize_variable (ctx
, TYPE_MIN_VALUE (type
));
7280 omp_firstprivatize_variable (ctx
, TYPE_MAX_VALUE (type
));
7284 omp_firstprivatize_type_sizes (ctx
, TREE_TYPE (type
));
7285 omp_firstprivatize_type_sizes (ctx
, TYPE_DOMAIN (type
));
7290 case QUAL_UNION_TYPE
:
7293 for (field
= TYPE_FIELDS (type
); field
; field
= DECL_CHAIN (field
))
7294 if (TREE_CODE (field
) == FIELD_DECL
)
7296 omp_firstprivatize_variable (ctx
, DECL_FIELD_OFFSET (field
));
7297 omp_firstprivatize_type_sizes (ctx
, TREE_TYPE (field
));
7303 case REFERENCE_TYPE
:
7304 omp_firstprivatize_type_sizes (ctx
, TREE_TYPE (type
));
7311 omp_firstprivatize_variable (ctx
, TYPE_SIZE (type
));
7312 omp_firstprivatize_variable (ctx
, TYPE_SIZE_UNIT (type
));
7313 lang_hooks
.types
.omp_firstprivatize_type_sizes (ctx
, type
);
7316 /* Add an entry for DECL in the OMP context CTX with FLAGS. */
7319 omp_add_variable (struct gimplify_omp_ctx
*ctx
, tree decl
, unsigned int flags
)
7322 unsigned int nflags
;
7325 if (error_operand_p (decl
) || ctx
->region_type
== ORT_NONE
)
7328 /* Never elide decls whose type has TREE_ADDRESSABLE set. This means
7329 there are constructors involved somewhere. Exception is a shared clause,
7330 there is nothing privatized in that case. */
7331 if ((flags
& GOVD_SHARED
) == 0
7332 && (TREE_ADDRESSABLE (TREE_TYPE (decl
))
7333 || TYPE_NEEDS_CONSTRUCTING (TREE_TYPE (decl
))))
7336 n
= splay_tree_lookup (ctx
->variables
, (splay_tree_key
)decl
);
7337 if (n
!= NULL
&& (n
->value
& GOVD_DATA_SHARE_CLASS
) != 0)
7339 /* We shouldn't be re-adding the decl with the same data
7341 gcc_assert ((n
->value
& GOVD_DATA_SHARE_CLASS
& flags
) == 0);
7342 nflags
= n
->value
| flags
;
7343 /* The only combination of data sharing classes we should see is
7344 FIRSTPRIVATE and LASTPRIVATE. However, OpenACC permits
7345 reduction variables to be used in data sharing clauses. */
7346 gcc_assert ((ctx
->region_type
& ORT_ACC
) != 0
7347 || ((nflags
& GOVD_DATA_SHARE_CLASS
)
7348 == (GOVD_FIRSTPRIVATE
| GOVD_LASTPRIVATE
))
7349 || (flags
& GOVD_DATA_SHARE_CLASS
) == 0);
7354 /* When adding a variable-sized variable, we have to handle all sorts
7355 of additional bits of data: the pointer replacement variable, and
7356 the parameters of the type. */
7357 if (DECL_SIZE (decl
) && TREE_CODE (DECL_SIZE (decl
)) != INTEGER_CST
)
7359 /* Add the pointer replacement variable as PRIVATE if the variable
7360 replacement is private, else FIRSTPRIVATE since we'll need the
7361 address of the original variable either for SHARED, or for the
7362 copy into or out of the context. */
7363 if (!(flags
& GOVD_LOCAL
) && ctx
->region_type
!= ORT_TASKGROUP
)
7365 if (flags
& GOVD_MAP
)
7366 nflags
= GOVD_MAP
| GOVD_MAP_TO_ONLY
| GOVD_EXPLICIT
;
7367 else if (flags
& GOVD_PRIVATE
)
7368 nflags
= GOVD_PRIVATE
;
7369 else if (((ctx
->region_type
& (ORT_TARGET
| ORT_TARGET_DATA
)) != 0
7370 && (flags
& GOVD_FIRSTPRIVATE
))
7371 || (ctx
->region_type
== ORT_TARGET_DATA
7372 && (flags
& GOVD_DATA_SHARE_CLASS
) == 0))
7373 nflags
= GOVD_PRIVATE
| GOVD_EXPLICIT
;
7375 nflags
= GOVD_FIRSTPRIVATE
;
7376 nflags
|= flags
& GOVD_SEEN
;
7377 t
= DECL_VALUE_EXPR (decl
);
7378 gcc_assert (TREE_CODE (t
) == INDIRECT_REF
);
7379 t
= TREE_OPERAND (t
, 0);
7380 gcc_assert (DECL_P (t
));
7381 omp_add_variable (ctx
, t
, nflags
);
7384 /* Add all of the variable and type parameters (which should have
7385 been gimplified to a formal temporary) as FIRSTPRIVATE. */
7386 omp_firstprivatize_variable (ctx
, DECL_SIZE_UNIT (decl
));
7387 omp_firstprivatize_variable (ctx
, DECL_SIZE (decl
));
7388 omp_firstprivatize_type_sizes (ctx
, TREE_TYPE (decl
));
7390 /* The variable-sized variable itself is never SHARED, only some form
7391 of PRIVATE. The sharing would take place via the pointer variable
7392 which we remapped above. */
7393 if (flags
& GOVD_SHARED
)
7394 flags
= GOVD_SHARED
| GOVD_DEBUG_PRIVATE
7395 | (flags
& (GOVD_SEEN
| GOVD_EXPLICIT
));
7397 /* We're going to make use of the TYPE_SIZE_UNIT at least in the
7398 alloca statement we generate for the variable, so make sure it
7399 is available. This isn't automatically needed for the SHARED
7400 case, since we won't be allocating local storage then.
7401 For local variables TYPE_SIZE_UNIT might not be gimplified yet,
7402 in this case omp_notice_variable will be called later
7403 on when it is gimplified. */
7404 else if (! (flags
& (GOVD_LOCAL
| GOVD_MAP
))
7405 && DECL_P (TYPE_SIZE_UNIT (TREE_TYPE (decl
))))
7406 omp_notice_variable (ctx
, TYPE_SIZE_UNIT (TREE_TYPE (decl
)), true);
7408 else if ((flags
& (GOVD_MAP
| GOVD_LOCAL
)) == 0
7409 && omp_privatize_by_reference (decl
))
7411 omp_firstprivatize_type_sizes (ctx
, TREE_TYPE (decl
));
7413 /* Similar to the direct variable sized case above, we'll need the
7414 size of references being privatized. */
7415 if ((flags
& GOVD_SHARED
) == 0)
7417 t
= TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl
)));
7418 if (t
&& DECL_P (t
))
7419 omp_notice_variable (ctx
, t
, true);
7426 splay_tree_insert (ctx
->variables
, (splay_tree_key
)decl
, flags
);
7428 /* For reductions clauses in OpenACC loop directives, by default create a
7429 copy clause on the enclosing parallel construct for carrying back the
7431 if (ctx
->region_type
== ORT_ACC
&& (flags
& GOVD_REDUCTION
))
7433 struct gimplify_omp_ctx
*outer_ctx
= ctx
->outer_context
;
7436 n
= splay_tree_lookup (outer_ctx
->variables
, (splay_tree_key
)decl
);
7439 /* Ignore local variables and explicitly declared clauses. */
7440 if (n
->value
& (GOVD_LOCAL
| GOVD_EXPLICIT
))
7442 else if (outer_ctx
->region_type
== ORT_ACC_KERNELS
)
7444 /* According to the OpenACC spec, such a reduction variable
7445 should already have a copy map on a kernels construct,
7446 verify that here. */
7447 gcc_assert (!(n
->value
& GOVD_FIRSTPRIVATE
)
7448 && (n
->value
& GOVD_MAP
));
7450 else if (outer_ctx
->region_type
== ORT_ACC_PARALLEL
)
7452 /* Remove firstprivate and make it a copy map. */
7453 n
->value
&= ~GOVD_FIRSTPRIVATE
;
7454 n
->value
|= GOVD_MAP
;
7457 else if (outer_ctx
->region_type
== ORT_ACC_PARALLEL
)
7459 splay_tree_insert (outer_ctx
->variables
, (splay_tree_key
)decl
,
7460 GOVD_MAP
| GOVD_SEEN
);
7463 outer_ctx
= outer_ctx
->outer_context
;
7468 /* Notice a threadprivate variable DECL used in OMP context CTX.
7469 This just prints out diagnostics about threadprivate variable uses
7470 in untied tasks. If DECL2 is non-NULL, prevent this warning
7471 on that variable. */
7474 omp_notice_threadprivate_variable (struct gimplify_omp_ctx
*ctx
, tree decl
,
7478 struct gimplify_omp_ctx
*octx
;
7480 for (octx
= ctx
; octx
; octx
= octx
->outer_context
)
7481 if ((octx
->region_type
& ORT_TARGET
) != 0
7482 || octx
->order_concurrent
)
7484 n
= splay_tree_lookup (octx
->variables
, (splay_tree_key
)decl
);
7487 if (octx
->order_concurrent
)
7489 error ("threadprivate variable %qE used in a region with"
7490 " %<order(concurrent)%> clause", DECL_NAME (decl
));
7491 inform (octx
->location
, "enclosing region");
7495 error ("threadprivate variable %qE used in target region",
7497 inform (octx
->location
, "enclosing target region");
7499 splay_tree_insert (octx
->variables
, (splay_tree_key
)decl
, 0);
7502 splay_tree_insert (octx
->variables
, (splay_tree_key
)decl2
, 0);
7505 if (ctx
->region_type
!= ORT_UNTIED_TASK
)
7507 n
= splay_tree_lookup (ctx
->variables
, (splay_tree_key
)decl
);
7510 error ("threadprivate variable %qE used in untied task",
7512 inform (ctx
->location
, "enclosing task");
7513 splay_tree_insert (ctx
->variables
, (splay_tree_key
)decl
, 0);
7516 splay_tree_insert (ctx
->variables
, (splay_tree_key
)decl2
, 0);
7520 /* Return true if global var DECL is device resident. */
7523 device_resident_p (tree decl
)
7525 tree attr
= lookup_attribute ("oacc declare target", DECL_ATTRIBUTES (decl
));
7530 for (tree t
= TREE_VALUE (attr
); t
; t
= TREE_PURPOSE (t
))
7532 tree c
= TREE_VALUE (t
);
7533 if (OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_DEVICE_RESIDENT
)
7540 /* Return true if DECL has an ACC DECLARE attribute. */
7543 is_oacc_declared (tree decl
)
7545 tree t
= TREE_CODE (decl
) == MEM_REF
? TREE_OPERAND (decl
, 0) : decl
;
7546 tree declared
= lookup_attribute ("oacc declare target", DECL_ATTRIBUTES (t
));
7547 return declared
!= NULL_TREE
;
7550 /* Determine outer default flags for DECL mentioned in an OMP region
7551 but not declared in an enclosing clause.
7553 ??? Some compiler-generated variables (like SAVE_EXPRs) could be
7554 remapped firstprivate instead of shared. To some extent this is
7555 addressed in omp_firstprivatize_type_sizes, but not
7559 omp_default_clause (struct gimplify_omp_ctx
*ctx
, tree decl
,
7560 bool in_code
, unsigned flags
)
7562 enum omp_clause_default_kind default_kind
= ctx
->default_kind
;
7563 enum omp_clause_default_kind kind
;
7565 kind
= lang_hooks
.decls
.omp_predetermined_sharing (decl
);
7566 if (ctx
->region_type
& ORT_TASK
)
7568 tree detach_clause
= omp_find_clause (ctx
->clauses
, OMP_CLAUSE_DETACH
);
7570 /* The event-handle specified by a detach clause should always be firstprivate,
7571 regardless of the current default. */
7572 if (detach_clause
&& OMP_CLAUSE_DECL (detach_clause
) == decl
)
7573 kind
= OMP_CLAUSE_DEFAULT_FIRSTPRIVATE
;
7575 if (kind
!= OMP_CLAUSE_DEFAULT_UNSPECIFIED
)
7576 default_kind
= kind
;
7577 else if (VAR_P (decl
) && TREE_STATIC (decl
) && DECL_IN_CONSTANT_POOL (decl
))
7578 default_kind
= OMP_CLAUSE_DEFAULT_SHARED
;
7579 /* For C/C++ default({,first}private), variables with static storage duration
7580 declared in a namespace or global scope and referenced in construct
7581 must be explicitly specified, i.e. acts as default(none). */
7582 else if ((default_kind
== OMP_CLAUSE_DEFAULT_PRIVATE
7583 || default_kind
== OMP_CLAUSE_DEFAULT_FIRSTPRIVATE
)
7585 && is_global_var (decl
)
7586 && (DECL_FILE_SCOPE_P (decl
)
7587 || (DECL_CONTEXT (decl
)
7588 && TREE_CODE (DECL_CONTEXT (decl
)) == NAMESPACE_DECL
))
7589 && !lang_GNU_Fortran ())
7590 default_kind
= OMP_CLAUSE_DEFAULT_NONE
;
7592 switch (default_kind
)
7594 case OMP_CLAUSE_DEFAULT_NONE
:
7598 if (ctx
->region_type
& ORT_PARALLEL
)
7600 else if ((ctx
->region_type
& ORT_TASKLOOP
) == ORT_TASKLOOP
)
7602 else if (ctx
->region_type
& ORT_TASK
)
7604 else if (ctx
->region_type
& ORT_TEAMS
)
7609 error ("%qE not specified in enclosing %qs",
7610 DECL_NAME (lang_hooks
.decls
.omp_report_decl (decl
)), rtype
);
7611 inform (ctx
->location
, "enclosing %qs", rtype
);
7614 case OMP_CLAUSE_DEFAULT_SHARED
:
7615 flags
|= GOVD_SHARED
;
7617 case OMP_CLAUSE_DEFAULT_PRIVATE
:
7618 flags
|= GOVD_PRIVATE
;
7620 case OMP_CLAUSE_DEFAULT_FIRSTPRIVATE
:
7621 flags
|= GOVD_FIRSTPRIVATE
;
7623 case OMP_CLAUSE_DEFAULT_UNSPECIFIED
:
7624 /* decl will be either GOVD_FIRSTPRIVATE or GOVD_SHARED. */
7625 gcc_assert ((ctx
->region_type
& ORT_TASK
) != 0);
7626 if (struct gimplify_omp_ctx
*octx
= ctx
->outer_context
)
7628 omp_notice_variable (octx
, decl
, in_code
);
7629 for (; octx
; octx
= octx
->outer_context
)
7633 n2
= splay_tree_lookup (octx
->variables
, (splay_tree_key
) decl
);
7634 if ((octx
->region_type
& (ORT_TARGET_DATA
| ORT_TARGET
)) != 0
7635 && (n2
== NULL
|| (n2
->value
& GOVD_DATA_SHARE_CLASS
) == 0))
7637 if (n2
&& (n2
->value
& GOVD_DATA_SHARE_CLASS
) != GOVD_SHARED
)
7639 flags
|= GOVD_FIRSTPRIVATE
;
7642 if ((octx
->region_type
& (ORT_PARALLEL
| ORT_TEAMS
)) != 0)
7644 flags
|= GOVD_SHARED
;
7650 if (TREE_CODE (decl
) == PARM_DECL
7651 || (!is_global_var (decl
)
7652 && DECL_CONTEXT (decl
) == current_function_decl
))
7653 flags
|= GOVD_FIRSTPRIVATE
;
7655 flags
|= GOVD_SHARED
;
7667 /* Determine outer default flags for DECL mentioned in an OACC region
7668 but not declared in an enclosing clause. */
7671 oacc_default_clause (struct gimplify_omp_ctx
*ctx
, tree decl
, unsigned flags
)
7674 bool on_device
= false;
7675 bool is_private
= false;
7676 bool declared
= is_oacc_declared (decl
);
7677 tree type
= TREE_TYPE (decl
);
7679 if (omp_privatize_by_reference (decl
))
7680 type
= TREE_TYPE (type
);
7682 /* For Fortran COMMON blocks, only used variables in those blocks are
7683 transfered and remapped. The block itself will have a private clause to
7684 avoid transfering the data twice.
7685 The hook evaluates to false by default. For a variable in Fortran's COMMON
7686 or EQUIVALENCE block, returns 'true' (as we have shared=false) - as only
7687 the variables in such a COMMON/EQUIVALENCE block shall be privatized not
7688 the whole block. For C++ and Fortran, it can also be true under certain
7689 other conditions, if DECL_HAS_VALUE_EXPR. */
7690 if (RECORD_OR_UNION_TYPE_P (type
))
7691 is_private
= lang_hooks
.decls
.omp_disregard_value_expr (decl
, false);
7693 if ((ctx
->region_type
& (ORT_ACC_PARALLEL
| ORT_ACC_KERNELS
)) != 0
7694 && is_global_var (decl
)
7695 && device_resident_p (decl
)
7699 flags
|= GOVD_MAP_TO_ONLY
;
7702 switch (ctx
->region_type
)
7704 case ORT_ACC_KERNELS
:
7708 flags
|= GOVD_FIRSTPRIVATE
;
7709 else if (AGGREGATE_TYPE_P (type
))
7711 /* Aggregates default to 'present_or_copy', or 'present'. */
7712 if (ctx
->default_kind
!= OMP_CLAUSE_DEFAULT_PRESENT
)
7715 flags
|= GOVD_MAP
| GOVD_MAP_FORCE_PRESENT
;
7718 /* Scalars default to 'copy'. */
7719 flags
|= GOVD_MAP
| GOVD_MAP_FORCE
;
7723 case ORT_ACC_PARALLEL
:
7724 case ORT_ACC_SERIAL
:
7725 rkind
= ctx
->region_type
== ORT_ACC_PARALLEL
? "parallel" : "serial";
7728 flags
|= GOVD_FIRSTPRIVATE
;
7729 else if (on_device
|| declared
)
7731 else if (AGGREGATE_TYPE_P (type
))
7733 /* Aggregates default to 'present_or_copy', or 'present'. */
7734 if (ctx
->default_kind
!= OMP_CLAUSE_DEFAULT_PRESENT
)
7737 flags
|= GOVD_MAP
| GOVD_MAP_FORCE_PRESENT
;
7740 /* Scalars default to 'firstprivate'. */
7741 flags
|= GOVD_FIRSTPRIVATE
;
7749 if (DECL_ARTIFICIAL (decl
))
7750 ; /* We can get compiler-generated decls, and should not complain
7752 else if (ctx
->default_kind
== OMP_CLAUSE_DEFAULT_NONE
)
7754 error ("%qE not specified in enclosing OpenACC %qs construct",
7755 DECL_NAME (lang_hooks
.decls
.omp_report_decl (decl
)), rkind
);
7756 inform (ctx
->location
, "enclosing OpenACC %qs construct", rkind
);
7758 else if (ctx
->default_kind
== OMP_CLAUSE_DEFAULT_PRESENT
)
7759 ; /* Handled above. */
7761 gcc_checking_assert (ctx
->default_kind
== OMP_CLAUSE_DEFAULT_SHARED
);
7766 /* Record the fact that DECL was used within the OMP context CTX.
7767 IN_CODE is true when real code uses DECL, and false when we should
7768 merely emit default(none) errors. Return true if DECL is going to
7769 be remapped and thus DECL shouldn't be gimplified into its
7770 DECL_VALUE_EXPR (if any). */
7773 omp_notice_variable (struct gimplify_omp_ctx
*ctx
, tree decl
, bool in_code
)
7776 unsigned flags
= in_code
? GOVD_SEEN
: 0;
7777 bool ret
= false, shared
;
7779 if (error_operand_p (decl
))
7782 if (ctx
->region_type
== ORT_NONE
)
7783 return lang_hooks
.decls
.omp_disregard_value_expr (decl
, false);
7785 if (is_global_var (decl
))
7787 /* Threadprivate variables are predetermined. */
7788 if (DECL_THREAD_LOCAL_P (decl
))
7789 return omp_notice_threadprivate_variable (ctx
, decl
, NULL_TREE
);
7791 if (DECL_HAS_VALUE_EXPR_P (decl
))
7793 if (ctx
->region_type
& ORT_ACC
)
7794 /* For OpenACC, defer expansion of value to avoid transfering
7795 privatized common block data instead of im-/explicitly transfered
7796 variables which are in common blocks. */
7800 tree value
= get_base_address (DECL_VALUE_EXPR (decl
));
7802 if (value
&& DECL_P (value
) && DECL_THREAD_LOCAL_P (value
))
7803 return omp_notice_threadprivate_variable (ctx
, decl
, value
);
7807 if (gimplify_omp_ctxp
->outer_context
== NULL
7809 && oacc_get_fn_attrib (current_function_decl
))
7811 location_t loc
= DECL_SOURCE_LOCATION (decl
);
7813 if (lookup_attribute ("omp declare target link",
7814 DECL_ATTRIBUTES (decl
)))
7817 "%qE with %<link%> clause used in %<routine%> function",
7821 else if (!lookup_attribute ("omp declare target",
7822 DECL_ATTRIBUTES (decl
)))
7825 "%qE requires a %<declare%> directive for use "
7826 "in a %<routine%> function", DECL_NAME (decl
));
7832 n
= splay_tree_lookup (ctx
->variables
, (splay_tree_key
)decl
);
7833 if ((ctx
->region_type
& ORT_TARGET
) != 0)
7835 if (ctx
->region_type
& ORT_ACC
)
7836 /* For OpenACC, as remarked above, defer expansion. */
7841 ret
= lang_hooks
.decls
.omp_disregard_value_expr (decl
, shared
);
7844 unsigned nflags
= flags
;
7845 if ((ctx
->region_type
& ORT_ACC
) == 0)
7847 bool is_declare_target
= false;
7848 if (is_global_var (decl
)
7849 && varpool_node::get_create (decl
)->offloadable
)
7851 struct gimplify_omp_ctx
*octx
;
7852 for (octx
= ctx
->outer_context
;
7853 octx
; octx
= octx
->outer_context
)
7855 n
= splay_tree_lookup (octx
->variables
,
7856 (splay_tree_key
)decl
);
7858 && (n
->value
& GOVD_DATA_SHARE_CLASS
) != GOVD_SHARED
7859 && (n
->value
& GOVD_DATA_SHARE_CLASS
) != 0)
7862 is_declare_target
= octx
== NULL
;
7864 if (!is_declare_target
)
7867 enum omp_clause_defaultmap_kind kind
;
7868 if (lang_hooks
.decls
.omp_allocatable_p (decl
))
7869 gdmk
= GDMK_ALLOCATABLE
;
7870 else if (lang_hooks
.decls
.omp_scalar_target_p (decl
))
7871 gdmk
= GDMK_SCALAR_TARGET
;
7872 else if (lang_hooks
.decls
.omp_scalar_p (decl
, false))
7874 else if (TREE_CODE (TREE_TYPE (decl
)) == POINTER_TYPE
7875 || (TREE_CODE (TREE_TYPE (decl
)) == REFERENCE_TYPE
7876 && (TREE_CODE (TREE_TYPE (TREE_TYPE (decl
)))
7878 gdmk
= GDMK_POINTER
;
7880 gdmk
= GDMK_AGGREGATE
;
7881 kind
= lang_hooks
.decls
.omp_predetermined_mapping (decl
);
7882 if (kind
!= OMP_CLAUSE_DEFAULTMAP_CATEGORY_UNSPECIFIED
)
7884 if (kind
== OMP_CLAUSE_DEFAULTMAP_FIRSTPRIVATE
)
7885 nflags
|= GOVD_FIRSTPRIVATE
;
7886 else if (kind
== OMP_CLAUSE_DEFAULTMAP_TO
)
7887 nflags
|= GOVD_MAP
| GOVD_MAP_TO_ONLY
;
7891 else if (ctx
->defaultmap
[gdmk
] == 0)
7893 tree d
= lang_hooks
.decls
.omp_report_decl (decl
);
7894 error ("%qE not specified in enclosing %<target%>",
7896 inform (ctx
->location
, "enclosing %<target%>");
7898 else if (ctx
->defaultmap
[gdmk
]
7899 & (GOVD_MAP_0LEN_ARRAY
| GOVD_FIRSTPRIVATE
))
7900 nflags
|= ctx
->defaultmap
[gdmk
];
7903 gcc_assert (ctx
->defaultmap
[gdmk
] & GOVD_MAP
);
7904 nflags
|= ctx
->defaultmap
[gdmk
] & ~GOVD_MAP
;
7909 struct gimplify_omp_ctx
*octx
= ctx
->outer_context
;
7910 if ((ctx
->region_type
& ORT_ACC
) && octx
)
7912 /* Look in outer OpenACC contexts, to see if there's a
7913 data attribute for this variable. */
7914 omp_notice_variable (octx
, decl
, in_code
);
7916 for (; octx
; octx
= octx
->outer_context
)
7918 if (!(octx
->region_type
& (ORT_TARGET_DATA
| ORT_TARGET
)))
7921 = splay_tree_lookup (octx
->variables
,
7922 (splay_tree_key
) decl
);
7925 if (octx
->region_type
== ORT_ACC_HOST_DATA
)
7926 error ("variable %qE declared in enclosing "
7927 "%<host_data%> region", DECL_NAME (decl
));
7929 if (octx
->region_type
== ORT_ACC_DATA
7930 && (n2
->value
& GOVD_MAP_0LEN_ARRAY
))
7931 nflags
|= GOVD_MAP_0LEN_ARRAY
;
7937 if ((nflags
& ~(GOVD_MAP_TO_ONLY
| GOVD_MAP_FROM_ONLY
7938 | GOVD_MAP_ALLOC_ONLY
)) == flags
)
7940 tree type
= TREE_TYPE (decl
);
7942 if (gimplify_omp_ctxp
->target_firstprivatize_array_bases
7943 && omp_privatize_by_reference (decl
))
7944 type
= TREE_TYPE (type
);
7945 if (!omp_mappable_type (type
))
7947 error ("%qD referenced in target region does not have "
7948 "a mappable type", decl
);
7949 nflags
|= GOVD_MAP
| GOVD_EXPLICIT
;
7953 if ((ctx
->region_type
& ORT_ACC
) != 0)
7954 nflags
= oacc_default_clause (ctx
, decl
, flags
);
7960 omp_add_variable (ctx
, decl
, nflags
);
7964 /* If nothing changed, there's nothing left to do. */
7965 if ((n
->value
& flags
) == flags
)
7975 if (ctx
->region_type
== ORT_WORKSHARE
7976 || ctx
->region_type
== ORT_TASKGROUP
7977 || ctx
->region_type
== ORT_SIMD
7978 || ctx
->region_type
== ORT_ACC
7979 || (ctx
->region_type
& ORT_TARGET_DATA
) != 0)
7982 flags
= omp_default_clause (ctx
, decl
, in_code
, flags
);
7984 if ((flags
& GOVD_PRIVATE
)
7985 && lang_hooks
.decls
.omp_private_outer_ref (decl
))
7986 flags
|= GOVD_PRIVATE_OUTER_REF
;
7988 omp_add_variable (ctx
, decl
, flags
);
7990 shared
= (flags
& GOVD_SHARED
) != 0;
7991 ret
= lang_hooks
.decls
.omp_disregard_value_expr (decl
, shared
);
7995 /* Don't mark as GOVD_SEEN addressable temporaries seen only in simd
7996 lb, b or incr expressions, those shouldn't be turned into simd arrays. */
7997 if (ctx
->region_type
== ORT_SIMD
7998 && ctx
->in_for_exprs
7999 && ((n
->value
& (GOVD_PRIVATE
| GOVD_SEEN
| GOVD_EXPLICIT
))
8001 flags
&= ~GOVD_SEEN
;
8003 if ((n
->value
& (GOVD_SEEN
| GOVD_LOCAL
)) == 0
8004 && (flags
& (GOVD_SEEN
| GOVD_LOCAL
)) == GOVD_SEEN
8005 && DECL_SIZE (decl
))
8007 if (TREE_CODE (DECL_SIZE (decl
)) != INTEGER_CST
)
8010 tree t
= DECL_VALUE_EXPR (decl
);
8011 gcc_assert (TREE_CODE (t
) == INDIRECT_REF
);
8012 t
= TREE_OPERAND (t
, 0);
8013 gcc_assert (DECL_P (t
));
8014 n2
= splay_tree_lookup (ctx
->variables
, (splay_tree_key
) t
);
8015 n2
->value
|= GOVD_SEEN
;
8017 else if (omp_privatize_by_reference (decl
)
8018 && TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl
)))
8019 && (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl
))))
8023 tree t
= TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl
)));
8024 gcc_assert (DECL_P (t
));
8025 n2
= splay_tree_lookup (ctx
->variables
, (splay_tree_key
) t
);
8027 omp_notice_variable (ctx
, t
, true);
8031 if (ctx
->region_type
& ORT_ACC
)
8032 /* For OpenACC, as remarked above, defer expansion. */
8035 shared
= ((flags
| n
->value
) & GOVD_SHARED
) != 0;
8036 ret
= lang_hooks
.decls
.omp_disregard_value_expr (decl
, shared
);
8038 /* If nothing changed, there's nothing left to do. */
8039 if ((n
->value
& flags
) == flags
)
8045 /* If the variable is private in the current context, then we don't
8046 need to propagate anything to an outer context. */
8047 if ((flags
& GOVD_PRIVATE
) && !(flags
& GOVD_PRIVATE_OUTER_REF
))
8049 if ((flags
& (GOVD_LINEAR
| GOVD_LINEAR_LASTPRIVATE_NO_OUTER
))
8050 == (GOVD_LINEAR
| GOVD_LINEAR_LASTPRIVATE_NO_OUTER
))
8052 if ((flags
& (GOVD_FIRSTPRIVATE
| GOVD_LASTPRIVATE
8053 | GOVD_LINEAR_LASTPRIVATE_NO_OUTER
))
8054 == (GOVD_LASTPRIVATE
| GOVD_LINEAR_LASTPRIVATE_NO_OUTER
))
8056 if (ctx
->outer_context
8057 && omp_notice_variable (ctx
->outer_context
, decl
, in_code
))
8062 /* Verify that DECL is private within CTX. If there's specific information
8063 to the contrary in the innermost scope, generate an error. */
8066 omp_is_private (struct gimplify_omp_ctx
*ctx
, tree decl
, int simd
)
8070 n
= splay_tree_lookup (ctx
->variables
, (splay_tree_key
)decl
);
8073 if (n
->value
& GOVD_SHARED
)
8075 if (ctx
== gimplify_omp_ctxp
)
8078 error ("iteration variable %qE is predetermined linear",
8081 error ("iteration variable %qE should be private",
8083 n
->value
= GOVD_PRIVATE
;
8089 else if ((n
->value
& GOVD_EXPLICIT
) != 0
8090 && (ctx
== gimplify_omp_ctxp
8091 || (ctx
->region_type
== ORT_COMBINED_PARALLEL
8092 && gimplify_omp_ctxp
->outer_context
== ctx
)))
8094 if ((n
->value
& GOVD_FIRSTPRIVATE
) != 0)
8095 error ("iteration variable %qE should not be firstprivate",
8097 else if ((n
->value
& GOVD_REDUCTION
) != 0)
8098 error ("iteration variable %qE should not be reduction",
8100 else if (simd
!= 1 && (n
->value
& GOVD_LINEAR
) != 0)
8101 error ("iteration variable %qE should not be linear",
8104 return (ctx
== gimplify_omp_ctxp
8105 || (ctx
->region_type
== ORT_COMBINED_PARALLEL
8106 && gimplify_omp_ctxp
->outer_context
== ctx
));
8109 if (ctx
->region_type
!= ORT_WORKSHARE
8110 && ctx
->region_type
!= ORT_TASKGROUP
8111 && ctx
->region_type
!= ORT_SIMD
8112 && ctx
->region_type
!= ORT_ACC
)
8114 else if (ctx
->outer_context
)
8115 return omp_is_private (ctx
->outer_context
, decl
, simd
);
8119 /* Return true if DECL is private within a parallel region
8120 that binds to the current construct's context or in parallel
8121 region's REDUCTION clause. */
8124 omp_check_private (struct gimplify_omp_ctx
*ctx
, tree decl
, bool copyprivate
)
8130 ctx
= ctx
->outer_context
;
8133 if (is_global_var (decl
))
8136 /* References might be private, but might be shared too,
8137 when checking for copyprivate, assume they might be
8138 private, otherwise assume they might be shared. */
8142 if (omp_privatize_by_reference (decl
))
8145 /* Treat C++ privatized non-static data members outside
8146 of the privatization the same. */
8147 if (omp_member_access_dummy_var (decl
))
8153 n
= splay_tree_lookup (ctx
->variables
, (splay_tree_key
) decl
);
8155 if ((ctx
->region_type
& (ORT_TARGET
| ORT_TARGET_DATA
)) != 0
8156 && (n
== NULL
|| (n
->value
& GOVD_DATA_SHARE_CLASS
) == 0))
8158 if ((ctx
->region_type
& ORT_TARGET_DATA
) != 0
8160 || (n
->value
& GOVD_MAP
) == 0)
8167 if ((n
->value
& GOVD_LOCAL
) != 0
8168 && omp_member_access_dummy_var (decl
))
8170 return (n
->value
& GOVD_SHARED
) == 0;
8173 if (ctx
->region_type
== ORT_WORKSHARE
8174 || ctx
->region_type
== ORT_TASKGROUP
8175 || ctx
->region_type
== ORT_SIMD
8176 || ctx
->region_type
== ORT_ACC
)
8185 /* Callback for walk_tree to find a DECL_EXPR for the given DECL. */
8188 find_decl_expr (tree
*tp
, int *walk_subtrees
, void *data
)
8192 /* If this node has been visited, unmark it and keep looking. */
8193 if (TREE_CODE (t
) == DECL_EXPR
&& DECL_EXPR_DECL (t
) == (tree
) data
)
8196 if (IS_TYPE_OR_DECL_P (t
))
8202 /* Gimplify the affinity clause but effectively ignore it.
8205 if ((step > 1) ? var <= end : var > end)
8206 locatator_var_expr; */
8209 gimplify_omp_affinity (tree
*list_p
, gimple_seq
*pre_p
)
8211 tree last_iter
= NULL_TREE
;
8212 tree last_bind
= NULL_TREE
;
8213 tree label
= NULL_TREE
;
8214 tree
*last_body
= NULL
;
8215 for (tree c
= *list_p
; c
; c
= OMP_CLAUSE_CHAIN (c
))
8216 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_AFFINITY
)
8218 tree t
= OMP_CLAUSE_DECL (c
);
8219 if (TREE_CODE (t
) == TREE_LIST
8221 && TREE_CODE (TREE_PURPOSE (t
)) == TREE_VEC
)
8223 if (TREE_VALUE (t
) == null_pointer_node
)
8225 if (TREE_PURPOSE (t
) != last_iter
)
8229 append_to_statement_list (label
, last_body
);
8230 gimplify_and_add (last_bind
, pre_p
);
8231 last_bind
= NULL_TREE
;
8233 for (tree it
= TREE_PURPOSE (t
); it
; it
= TREE_CHAIN (it
))
8235 if (gimplify_expr (&TREE_VEC_ELT (it
, 1), pre_p
, NULL
,
8236 is_gimple_val
, fb_rvalue
) == GS_ERROR
8237 || gimplify_expr (&TREE_VEC_ELT (it
, 2), pre_p
, NULL
,
8238 is_gimple_val
, fb_rvalue
) == GS_ERROR
8239 || gimplify_expr (&TREE_VEC_ELT (it
, 3), pre_p
, NULL
,
8240 is_gimple_val
, fb_rvalue
) == GS_ERROR
8241 || (gimplify_expr (&TREE_VEC_ELT (it
, 4), pre_p
, NULL
,
8242 is_gimple_val
, fb_rvalue
)
8246 last_iter
= TREE_PURPOSE (t
);
8247 tree block
= TREE_VEC_ELT (TREE_PURPOSE (t
), 5);
8248 last_bind
= build3 (BIND_EXPR
, void_type_node
, BLOCK_VARS (block
),
8250 last_body
= &BIND_EXPR_BODY (last_bind
);
8251 tree cond
= NULL_TREE
;
8252 location_t loc
= OMP_CLAUSE_LOCATION (c
);
8253 for (tree it
= TREE_PURPOSE (t
); it
; it
= TREE_CHAIN (it
))
8255 tree var
= TREE_VEC_ELT (it
, 0);
8256 tree begin
= TREE_VEC_ELT (it
, 1);
8257 tree end
= TREE_VEC_ELT (it
, 2);
8258 tree step
= TREE_VEC_ELT (it
, 3);
8259 loc
= DECL_SOURCE_LOCATION (var
);
8260 tree tem
= build2_loc (loc
, MODIFY_EXPR
, void_type_node
,
8262 append_to_statement_list_force (tem
, last_body
);
8264 tree cond1
= fold_build2_loc (loc
, GT_EXPR
, boolean_type_node
,
8265 step
, build_zero_cst (TREE_TYPE (step
)));
8266 tree cond2
= fold_build2_loc (loc
, LE_EXPR
, boolean_type_node
,
8268 tree cond3
= fold_build2_loc (loc
, GT_EXPR
, boolean_type_node
,
8270 cond1
= fold_build3_loc (loc
, COND_EXPR
, boolean_type_node
,
8271 cond1
, cond2
, cond3
);
8273 cond
= fold_build2_loc (loc
, TRUTH_AND_EXPR
,
8274 boolean_type_node
, cond
, cond1
);
8278 tree cont_label
= create_artificial_label (loc
);
8279 label
= build1 (LABEL_EXPR
, void_type_node
, cont_label
);
8280 tree tem
= fold_build3_loc (loc
, COND_EXPR
, void_type_node
, cond
,
8282 build_and_jump (&cont_label
));
8283 append_to_statement_list_force (tem
, last_body
);
8285 if (TREE_CODE (TREE_VALUE (t
)) == COMPOUND_EXPR
)
8287 append_to_statement_list (TREE_OPERAND (TREE_VALUE (t
), 0),
8289 TREE_VALUE (t
) = TREE_OPERAND (TREE_VALUE (t
), 1);
8291 if (error_operand_p (TREE_VALUE (t
)))
8293 append_to_statement_list_force (TREE_VALUE (t
), last_body
);
8294 TREE_VALUE (t
) = null_pointer_node
;
8300 append_to_statement_list (label
, last_body
);
8301 gimplify_and_add (last_bind
, pre_p
);
8302 last_bind
= NULL_TREE
;
8304 if (TREE_CODE (OMP_CLAUSE_DECL (c
)) == COMPOUND_EXPR
)
8306 gimplify_expr (&TREE_OPERAND (OMP_CLAUSE_DECL (c
), 0), pre_p
,
8307 NULL
, is_gimple_val
, fb_rvalue
);
8308 OMP_CLAUSE_DECL (c
) = TREE_OPERAND (OMP_CLAUSE_DECL (c
), 1);
8310 if (error_operand_p (OMP_CLAUSE_DECL (c
)))
8312 if (gimplify_expr (&OMP_CLAUSE_DECL (c
), pre_p
, NULL
,
8313 is_gimple_lvalue
, fb_lvalue
) == GS_ERROR
)
8315 gimplify_and_add (OMP_CLAUSE_DECL (c
), pre_p
);
8320 append_to_statement_list (label
, last_body
);
8321 gimplify_and_add (last_bind
, pre_p
);
8326 /* If *LIST_P contains any OpenMP depend clauses with iterators,
8327 lower all the depend clauses by populating corresponding depend
8328 array. Returns 0 if there are no such depend clauses, or
8329 2 if all depend clauses should be removed, 1 otherwise. */
8332 gimplify_omp_depend (tree
*list_p
, gimple_seq
*pre_p
)
8336 size_t n
[5] = { 0, 0, 0, 0, 0 };
8338 tree counts
[5] = { NULL_TREE
, NULL_TREE
, NULL_TREE
, NULL_TREE
, NULL_TREE
};
8339 tree last_iter
= NULL_TREE
, last_count
= NULL_TREE
;
8341 location_t first_loc
= UNKNOWN_LOCATION
;
8343 for (c
= *list_p
; c
; c
= OMP_CLAUSE_CHAIN (c
))
8344 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_DEPEND
)
8346 switch (OMP_CLAUSE_DEPEND_KIND (c
))
8348 case OMP_CLAUSE_DEPEND_IN
:
8351 case OMP_CLAUSE_DEPEND_OUT
:
8352 case OMP_CLAUSE_DEPEND_INOUT
:
8355 case OMP_CLAUSE_DEPEND_MUTEXINOUTSET
:
8358 case OMP_CLAUSE_DEPEND_DEPOBJ
:
8361 case OMP_CLAUSE_DEPEND_INOUTSET
:
8367 tree t
= OMP_CLAUSE_DECL (c
);
8368 if (first_loc
== UNKNOWN_LOCATION
)
8369 first_loc
= OMP_CLAUSE_LOCATION (c
);
8370 if (TREE_CODE (t
) == TREE_LIST
8372 && TREE_CODE (TREE_PURPOSE (t
)) == TREE_VEC
)
8374 if (TREE_PURPOSE (t
) != last_iter
)
8376 tree tcnt
= size_one_node
;
8377 for (tree it
= TREE_PURPOSE (t
); it
; it
= TREE_CHAIN (it
))
8379 if (gimplify_expr (&TREE_VEC_ELT (it
, 1), pre_p
, NULL
,
8380 is_gimple_val
, fb_rvalue
) == GS_ERROR
8381 || gimplify_expr (&TREE_VEC_ELT (it
, 2), pre_p
, NULL
,
8382 is_gimple_val
, fb_rvalue
) == GS_ERROR
8383 || gimplify_expr (&TREE_VEC_ELT (it
, 3), pre_p
, NULL
,
8384 is_gimple_val
, fb_rvalue
) == GS_ERROR
8385 || (gimplify_expr (&TREE_VEC_ELT (it
, 4), pre_p
, NULL
,
8386 is_gimple_val
, fb_rvalue
)
8389 tree var
= TREE_VEC_ELT (it
, 0);
8390 tree begin
= TREE_VEC_ELT (it
, 1);
8391 tree end
= TREE_VEC_ELT (it
, 2);
8392 tree step
= TREE_VEC_ELT (it
, 3);
8393 tree orig_step
= TREE_VEC_ELT (it
, 4);
8394 tree type
= TREE_TYPE (var
);
8395 tree stype
= TREE_TYPE (step
);
8396 location_t loc
= DECL_SOURCE_LOCATION (var
);
8398 /* Compute count for this iterator as
8400 ? (begin < end ? (end - begin + (step - 1)) / step : 0)
8401 : (begin > end ? (end - begin + (step + 1)) / step : 0)
8402 and compute product of those for the entire depend
8404 if (POINTER_TYPE_P (type
))
8405 endmbegin
= fold_build2_loc (loc
, POINTER_DIFF_EXPR
,
8408 endmbegin
= fold_build2_loc (loc
, MINUS_EXPR
, type
,
8410 tree stepm1
= fold_build2_loc (loc
, MINUS_EXPR
, stype
,
8412 build_int_cst (stype
, 1));
8413 tree stepp1
= fold_build2_loc (loc
, PLUS_EXPR
, stype
, step
,
8414 build_int_cst (stype
, 1));
8415 tree pos
= fold_build2_loc (loc
, PLUS_EXPR
, stype
,
8416 unshare_expr (endmbegin
),
8418 pos
= fold_build2_loc (loc
, TRUNC_DIV_EXPR
, stype
,
8420 tree neg
= fold_build2_loc (loc
, PLUS_EXPR
, stype
,
8422 if (TYPE_UNSIGNED (stype
))
8424 neg
= fold_build1_loc (loc
, NEGATE_EXPR
, stype
, neg
);
8425 step
= fold_build1_loc (loc
, NEGATE_EXPR
, stype
, step
);
8427 neg
= fold_build2_loc (loc
, TRUNC_DIV_EXPR
, stype
,
8430 tree cond
= fold_build2_loc (loc
, LT_EXPR
,
8433 pos
= fold_build3_loc (loc
, COND_EXPR
, stype
, cond
, pos
,
8434 build_int_cst (stype
, 0));
8435 cond
= fold_build2_loc (loc
, LT_EXPR
, boolean_type_node
,
8437 neg
= fold_build3_loc (loc
, COND_EXPR
, stype
, cond
, neg
,
8438 build_int_cst (stype
, 0));
8439 tree osteptype
= TREE_TYPE (orig_step
);
8440 cond
= fold_build2_loc (loc
, GT_EXPR
, boolean_type_node
,
8442 build_int_cst (osteptype
, 0));
8443 tree cnt
= fold_build3_loc (loc
, COND_EXPR
, stype
,
8445 cnt
= fold_convert_loc (loc
, sizetype
, cnt
);
8446 if (gimplify_expr (&cnt
, pre_p
, NULL
, is_gimple_val
,
8447 fb_rvalue
) == GS_ERROR
)
8449 tcnt
= size_binop_loc (loc
, MULT_EXPR
, tcnt
, cnt
);
8451 if (gimplify_expr (&tcnt
, pre_p
, NULL
, is_gimple_val
,
8452 fb_rvalue
) == GS_ERROR
)
8454 last_iter
= TREE_PURPOSE (t
);
8457 if (counts
[i
] == NULL_TREE
)
8458 counts
[i
] = last_count
;
8460 counts
[i
] = size_binop_loc (OMP_CLAUSE_LOCATION (c
),
8461 PLUS_EXPR
, counts
[i
], last_count
);
8466 for (i
= 0; i
< 5; i
++)
8472 tree total
= size_zero_node
;
8473 for (i
= 0; i
< 5; i
++)
8475 unused
[i
] = counts
[i
] == NULL_TREE
&& n
[i
] == 0;
8476 if (counts
[i
] == NULL_TREE
)
8477 counts
[i
] = size_zero_node
;
8479 counts
[i
] = size_binop (PLUS_EXPR
, counts
[i
], size_int (n
[i
]));
8480 if (gimplify_expr (&counts
[i
], pre_p
, NULL
, is_gimple_val
,
8481 fb_rvalue
) == GS_ERROR
)
8483 total
= size_binop (PLUS_EXPR
, total
, counts
[i
]);
8486 if (gimplify_expr (&total
, pre_p
, NULL
, is_gimple_val
, fb_rvalue
)
8489 bool is_old
= unused
[1] && unused
[3] && unused
[4];
8490 tree totalpx
= size_binop (PLUS_EXPR
, unshare_expr (total
),
8491 size_int (is_old
? 1 : 4));
8493 totalpx
= size_binop (PLUS_EXPR
, totalpx
,
8494 size_binop (MULT_EXPR
, counts
[4], size_int (2)));
8495 tree type
= build_array_type (ptr_type_node
, build_index_type (totalpx
));
8496 tree array
= create_tmp_var_raw (type
);
8497 TREE_ADDRESSABLE (array
) = 1;
8498 if (!poly_int_tree_p (totalpx
))
8500 if (!TYPE_SIZES_GIMPLIFIED (TREE_TYPE (array
)))
8501 gimplify_type_sizes (TREE_TYPE (array
), pre_p
);
8502 if (gimplify_omp_ctxp
)
8504 struct gimplify_omp_ctx
*ctx
= gimplify_omp_ctxp
;
8506 && (ctx
->region_type
== ORT_WORKSHARE
8507 || ctx
->region_type
== ORT_TASKGROUP
8508 || ctx
->region_type
== ORT_SIMD
8509 || ctx
->region_type
== ORT_ACC
))
8510 ctx
= ctx
->outer_context
;
8512 omp_add_variable (ctx
, array
, GOVD_LOCAL
| GOVD_SEEN
);
8514 gimplify_vla_decl (array
, pre_p
);
8517 gimple_add_tmp_var (array
);
8518 tree r
= build4 (ARRAY_REF
, ptr_type_node
, array
, size_int (0), NULL_TREE
,
8523 tem
= build2 (MODIFY_EXPR
, void_type_node
, r
,
8524 build_int_cst (ptr_type_node
, 0));
8525 gimplify_and_add (tem
, pre_p
);
8526 r
= build4 (ARRAY_REF
, ptr_type_node
, array
, size_int (1), NULL_TREE
,
8529 tem
= build2 (MODIFY_EXPR
, void_type_node
, r
,
8530 fold_convert (ptr_type_node
, total
));
8531 gimplify_and_add (tem
, pre_p
);
8532 for (i
= 1; i
< (is_old
? 2 : 4); i
++)
8534 r
= build4 (ARRAY_REF
, ptr_type_node
, array
, size_int (i
+ !is_old
),
8535 NULL_TREE
, NULL_TREE
);
8536 tem
= build2 (MODIFY_EXPR
, void_type_node
, r
, counts
[i
- 1]);
8537 gimplify_and_add (tem
, pre_p
);
8544 for (i
= 0; i
< 5; i
++)
8546 if (i
&& (i
>= j
|| unused
[i
- 1]))
8548 cnts
[i
] = cnts
[i
- 1];
8551 cnts
[i
] = create_tmp_var (sizetype
);
8553 g
= gimple_build_assign (cnts
[i
], size_int (is_old
? 2 : 5));
8558 t
= size_binop (PLUS_EXPR
, counts
[0], size_int (2));
8560 t
= size_binop (PLUS_EXPR
, cnts
[i
- 1], counts
[i
- 1]);
8561 if (gimplify_expr (&t
, pre_p
, NULL
, is_gimple_val
, fb_rvalue
)
8564 g
= gimple_build_assign (cnts
[i
], t
);
8566 gimple_seq_add_stmt (pre_p
, g
);
8569 cnts
[5] = NULL_TREE
;
8572 tree t
= size_binop (PLUS_EXPR
, total
, size_int (5));
8573 cnts
[5] = create_tmp_var (sizetype
);
8574 g
= gimple_build_assign (cnts
[i
], t
);
8575 gimple_seq_add_stmt (pre_p
, g
);
8578 last_iter
= NULL_TREE
;
8579 tree last_bind
= NULL_TREE
;
8580 tree
*last_body
= NULL
;
8581 for (c
= *list_p
; c
; c
= OMP_CLAUSE_CHAIN (c
))
8582 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_DEPEND
)
8584 switch (OMP_CLAUSE_DEPEND_KIND (c
))
8586 case OMP_CLAUSE_DEPEND_IN
:
8589 case OMP_CLAUSE_DEPEND_OUT
:
8590 case OMP_CLAUSE_DEPEND_INOUT
:
8593 case OMP_CLAUSE_DEPEND_MUTEXINOUTSET
:
8596 case OMP_CLAUSE_DEPEND_DEPOBJ
:
8599 case OMP_CLAUSE_DEPEND_INOUTSET
:
8605 tree t
= OMP_CLAUSE_DECL (c
);
8606 if (TREE_CODE (t
) == TREE_LIST
8608 && TREE_CODE (TREE_PURPOSE (t
)) == TREE_VEC
)
8610 if (TREE_PURPOSE (t
) != last_iter
)
8613 gimplify_and_add (last_bind
, pre_p
);
8614 tree block
= TREE_VEC_ELT (TREE_PURPOSE (t
), 5);
8615 last_bind
= build3 (BIND_EXPR
, void_type_node
,
8616 BLOCK_VARS (block
), NULL
, block
);
8617 TREE_SIDE_EFFECTS (last_bind
) = 1;
8618 SET_EXPR_LOCATION (last_bind
, OMP_CLAUSE_LOCATION (c
));
8619 tree
*p
= &BIND_EXPR_BODY (last_bind
);
8620 for (tree it
= TREE_PURPOSE (t
); it
; it
= TREE_CHAIN (it
))
8622 tree var
= TREE_VEC_ELT (it
, 0);
8623 tree begin
= TREE_VEC_ELT (it
, 1);
8624 tree end
= TREE_VEC_ELT (it
, 2);
8625 tree step
= TREE_VEC_ELT (it
, 3);
8626 tree orig_step
= TREE_VEC_ELT (it
, 4);
8627 tree type
= TREE_TYPE (var
);
8628 location_t loc
= DECL_SOURCE_LOCATION (var
);
8636 if (orig_step > 0) {
8637 if (var < end) goto beg_label;
8639 if (var > end) goto beg_label;
8641 for each iterator, with inner iterators added to
8643 tree beg_label
= create_artificial_label (loc
);
8644 tree cond_label
= NULL_TREE
;
8645 tem
= build2_loc (loc
, MODIFY_EXPR
, void_type_node
,
8647 append_to_statement_list_force (tem
, p
);
8648 tem
= build_and_jump (&cond_label
);
8649 append_to_statement_list_force (tem
, p
);
8650 tem
= build1 (LABEL_EXPR
, void_type_node
, beg_label
);
8651 append_to_statement_list (tem
, p
);
8652 tree bind
= build3 (BIND_EXPR
, void_type_node
, NULL_TREE
,
8653 NULL_TREE
, NULL_TREE
);
8654 TREE_SIDE_EFFECTS (bind
) = 1;
8655 SET_EXPR_LOCATION (bind
, loc
);
8656 append_to_statement_list_force (bind
, p
);
8657 if (POINTER_TYPE_P (type
))
8658 tem
= build2_loc (loc
, POINTER_PLUS_EXPR
, type
,
8659 var
, fold_convert_loc (loc
, sizetype
,
8662 tem
= build2_loc (loc
, PLUS_EXPR
, type
, var
, step
);
8663 tem
= build2_loc (loc
, MODIFY_EXPR
, void_type_node
,
8665 append_to_statement_list_force (tem
, p
);
8666 tem
= build1 (LABEL_EXPR
, void_type_node
, cond_label
);
8667 append_to_statement_list (tem
, p
);
8668 tree cond
= fold_build2_loc (loc
, LT_EXPR
,
8672 = fold_build3_loc (loc
, COND_EXPR
, void_type_node
,
8673 cond
, build_and_jump (&beg_label
),
8675 cond
= fold_build2_loc (loc
, GT_EXPR
, boolean_type_node
,
8678 = fold_build3_loc (loc
, COND_EXPR
, void_type_node
,
8679 cond
, build_and_jump (&beg_label
),
8681 tree osteptype
= TREE_TYPE (orig_step
);
8682 cond
= fold_build2_loc (loc
, GT_EXPR
, boolean_type_node
,
8684 build_int_cst (osteptype
, 0));
8685 tem
= fold_build3_loc (loc
, COND_EXPR
, void_type_node
,
8687 append_to_statement_list_force (tem
, p
);
8688 p
= &BIND_EXPR_BODY (bind
);
8692 last_iter
= TREE_PURPOSE (t
);
8693 if (TREE_CODE (TREE_VALUE (t
)) == COMPOUND_EXPR
)
8695 append_to_statement_list (TREE_OPERAND (TREE_VALUE (t
),
8697 TREE_VALUE (t
) = TREE_OPERAND (TREE_VALUE (t
), 1);
8699 if (error_operand_p (TREE_VALUE (t
)))
8701 if (TREE_VALUE (t
) != null_pointer_node
)
8702 TREE_VALUE (t
) = build_fold_addr_expr (TREE_VALUE (t
));
8705 r
= build4 (ARRAY_REF
, ptr_type_node
, array
, cnts
[i
],
8706 NULL_TREE
, NULL_TREE
);
8707 tree r2
= build4 (ARRAY_REF
, ptr_type_node
, array
, cnts
[5],
8708 NULL_TREE
, NULL_TREE
);
8709 r2
= build_fold_addr_expr_with_type (r2
, ptr_type_node
);
8710 tem
= build2_loc (OMP_CLAUSE_LOCATION (c
), MODIFY_EXPR
,
8711 void_type_node
, r
, r2
);
8712 append_to_statement_list_force (tem
, last_body
);
8713 tem
= build2_loc (OMP_CLAUSE_LOCATION (c
), MODIFY_EXPR
,
8714 void_type_node
, cnts
[i
],
8715 size_binop (PLUS_EXPR
, cnts
[i
],
8717 append_to_statement_list_force (tem
, last_body
);
8720 r
= build4 (ARRAY_REF
, ptr_type_node
, array
, cnts
[i
],
8721 NULL_TREE
, NULL_TREE
);
8722 tem
= build2_loc (OMP_CLAUSE_LOCATION (c
), MODIFY_EXPR
,
8723 void_type_node
, r
, TREE_VALUE (t
));
8724 append_to_statement_list_force (tem
, last_body
);
8727 r
= build4 (ARRAY_REF
, ptr_type_node
, array
,
8728 size_binop (PLUS_EXPR
, cnts
[i
], size_int (1)),
8729 NULL_TREE
, NULL_TREE
);
8730 tem
= build_int_cst (ptr_type_node
, GOMP_DEPEND_INOUTSET
);
8731 tem
= build2_loc (OMP_CLAUSE_LOCATION (c
), MODIFY_EXPR
,
8732 void_type_node
, r
, tem
);
8733 append_to_statement_list_force (tem
, last_body
);
8735 tem
= build2_loc (OMP_CLAUSE_LOCATION (c
), MODIFY_EXPR
,
8736 void_type_node
, cnts
[i
],
8737 size_binop (PLUS_EXPR
, cnts
[i
],
8738 size_int (1 + (i
== 5))));
8739 append_to_statement_list_force (tem
, last_body
);
8740 TREE_VALUE (t
) = null_pointer_node
;
8746 gimplify_and_add (last_bind
, pre_p
);
8747 last_bind
= NULL_TREE
;
8749 if (TREE_CODE (OMP_CLAUSE_DECL (c
)) == COMPOUND_EXPR
)
8751 gimplify_expr (&TREE_OPERAND (OMP_CLAUSE_DECL (c
), 0), pre_p
,
8752 NULL
, is_gimple_val
, fb_rvalue
);
8753 OMP_CLAUSE_DECL (c
) = TREE_OPERAND (OMP_CLAUSE_DECL (c
), 1);
8755 if (error_operand_p (OMP_CLAUSE_DECL (c
)))
8757 if (OMP_CLAUSE_DECL (c
) != null_pointer_node
)
8758 OMP_CLAUSE_DECL (c
) = build_fold_addr_expr (OMP_CLAUSE_DECL (c
));
8759 if (gimplify_expr (&OMP_CLAUSE_DECL (c
), pre_p
, NULL
,
8760 is_gimple_val
, fb_rvalue
) == GS_ERROR
)
8764 r
= build4 (ARRAY_REF
, ptr_type_node
, array
, cnts
[i
],
8765 NULL_TREE
, NULL_TREE
);
8766 tree r2
= build4 (ARRAY_REF
, ptr_type_node
, array
, cnts
[5],
8767 NULL_TREE
, NULL_TREE
);
8768 r2
= build_fold_addr_expr_with_type (r2
, ptr_type_node
);
8769 tem
= build2 (MODIFY_EXPR
, void_type_node
, r
, r2
);
8770 gimplify_and_add (tem
, pre_p
);
8771 g
= gimple_build_assign (cnts
[i
], size_binop (PLUS_EXPR
,
8774 gimple_seq_add_stmt (pre_p
, g
);
8777 r
= build4 (ARRAY_REF
, ptr_type_node
, array
, cnts
[i
],
8778 NULL_TREE
, NULL_TREE
);
8779 tem
= build2 (MODIFY_EXPR
, void_type_node
, r
, OMP_CLAUSE_DECL (c
));
8780 gimplify_and_add (tem
, pre_p
);
8783 r
= build4 (ARRAY_REF
, ptr_type_node
, array
,
8784 size_binop (PLUS_EXPR
, cnts
[i
], size_int (1)),
8785 NULL_TREE
, NULL_TREE
);
8786 tem
= build_int_cst (ptr_type_node
, GOMP_DEPEND_INOUTSET
);
8787 tem
= build2 (MODIFY_EXPR
, void_type_node
, r
, tem
);
8788 append_to_statement_list_force (tem
, last_body
);
8789 gimplify_and_add (tem
, pre_p
);
8791 g
= gimple_build_assign (cnts
[i
],
8792 size_binop (PLUS_EXPR
, cnts
[i
],
8793 size_int (1 + (i
== 5))));
8794 gimple_seq_add_stmt (pre_p
, g
);
8798 gimplify_and_add (last_bind
, pre_p
);
8799 tree cond
= boolean_false_node
;
8803 cond
= build2_loc (first_loc
, NE_EXPR
, boolean_type_node
, cnts
[0],
8804 size_binop_loc (first_loc
, PLUS_EXPR
, counts
[0],
8807 cond
= build2_loc (first_loc
, TRUTH_OR_EXPR
, boolean_type_node
, cond
,
8808 build2_loc (first_loc
, NE_EXPR
, boolean_type_node
,
8810 size_binop_loc (first_loc
, PLUS_EXPR
,
8816 tree prev
= size_int (5);
8817 for (i
= 0; i
< 5; i
++)
8821 prev
= size_binop_loc (first_loc
, PLUS_EXPR
, counts
[i
], prev
);
8822 cond
= build2_loc (first_loc
, TRUTH_OR_EXPR
, boolean_type_node
, cond
,
8823 build2_loc (first_loc
, NE_EXPR
, boolean_type_node
,
8824 cnts
[i
], unshare_expr (prev
)));
8827 tem
= build3_loc (first_loc
, COND_EXPR
, void_type_node
, cond
,
8828 build_call_expr_loc (first_loc
,
8829 builtin_decl_explicit (BUILT_IN_TRAP
),
8831 gimplify_and_add (tem
, pre_p
);
8832 c
= build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE_DEPEND
);
8833 OMP_CLAUSE_DEPEND_KIND (c
) = OMP_CLAUSE_DEPEND_LAST
;
8834 OMP_CLAUSE_DECL (c
) = build_fold_addr_expr (array
);
8835 OMP_CLAUSE_CHAIN (c
) = *list_p
;
8840 /* For a set of mappings describing an array section pointed to by a struct
8841 (or derived type, etc.) component, create an "alloc" or "release" node to
8842 insert into a list following a GOMP_MAP_STRUCT node. For some types of
8843 mapping (e.g. Fortran arrays with descriptors), an additional mapping may
8844 be created that is inserted into the list of mapping nodes attached to the
8845 directive being processed -- not part of the sorted list of nodes after
8848 CODE is the code of the directive being processed. GRP_START and GRP_END
8849 are the first and last of two or three nodes representing this array section
8850 mapping (e.g. a data movement node like GOMP_MAP_{TO,FROM}, optionally a
8851 GOMP_MAP_TO_PSET, and finally a GOMP_MAP_ALWAYS_POINTER). EXTRA_NODE is
8852 filled with the additional node described above, if needed.
8854 This function does not add the new nodes to any lists itself. It is the
8855 responsibility of the caller to do that. */
8858 build_omp_struct_comp_nodes (enum tree_code code
, tree grp_start
, tree grp_end
,
8861 enum gomp_map_kind mkind
8862 = (code
== OMP_TARGET_EXIT_DATA
|| code
== OACC_EXIT_DATA
)
8863 ? GOMP_MAP_RELEASE
: GOMP_MAP_ALLOC
;
8865 gcc_assert (grp_start
!= grp_end
);
8867 tree c2
= build_omp_clause (OMP_CLAUSE_LOCATION (grp_end
), OMP_CLAUSE_MAP
);
8868 OMP_CLAUSE_SET_MAP_KIND (c2
, mkind
);
8869 OMP_CLAUSE_DECL (c2
) = unshare_expr (OMP_CLAUSE_DECL (grp_end
));
8870 OMP_CLAUSE_CHAIN (c2
) = NULL_TREE
;
8871 tree grp_mid
= NULL_TREE
;
8872 if (OMP_CLAUSE_CHAIN (grp_start
) != grp_end
)
8873 grp_mid
= OMP_CLAUSE_CHAIN (grp_start
);
8876 && OMP_CLAUSE_CODE (grp_mid
) == OMP_CLAUSE_MAP
8877 && OMP_CLAUSE_MAP_KIND (grp_mid
) == GOMP_MAP_TO_PSET
)
8878 OMP_CLAUSE_SIZE (c2
) = OMP_CLAUSE_SIZE (grp_mid
);
8880 OMP_CLAUSE_SIZE (c2
) = TYPE_SIZE_UNIT (ptr_type_node
);
8883 && OMP_CLAUSE_CODE (grp_mid
) == OMP_CLAUSE_MAP
8884 && (OMP_CLAUSE_MAP_KIND (grp_mid
) == GOMP_MAP_ALWAYS_POINTER
8885 || OMP_CLAUSE_MAP_KIND (grp_mid
) == GOMP_MAP_ATTACH_DETACH
))
8888 = build_omp_clause (OMP_CLAUSE_LOCATION (grp_end
), OMP_CLAUSE_MAP
);
8889 OMP_CLAUSE_SET_MAP_KIND (c3
, mkind
);
8890 OMP_CLAUSE_DECL (c3
) = unshare_expr (OMP_CLAUSE_DECL (grp_mid
));
8891 OMP_CLAUSE_SIZE (c3
) = TYPE_SIZE_UNIT (ptr_type_node
);
8892 OMP_CLAUSE_CHAIN (c3
) = NULL_TREE
;
8897 *extra_node
= NULL_TREE
;
8902 /* Strip ARRAY_REFS or an indirect ref off BASE, find the containing object,
8903 and set *BITPOSP and *POFFSETP to the bit offset of the access.
8904 If BASE_REF is non-NULL and the containing object is a reference, set
8905 *BASE_REF to that reference before dereferencing the object.
8906 If BASE_REF is NULL, check that the containing object is a COMPONENT_REF or
8907 has array type, else return NULL. */
8910 extract_base_bit_offset (tree base
, poly_int64
*bitposp
,
8911 poly_offset_int
*poffsetp
)
8914 poly_int64 bitsize
, bitpos
;
8916 int unsignedp
, reversep
, volatilep
= 0;
8917 poly_offset_int poffset
;
8921 base
= get_inner_reference (base
, &bitsize
, &bitpos
, &offset
, &mode
,
8922 &unsignedp
, &reversep
, &volatilep
);
8926 if (offset
&& poly_int_tree_p (offset
))
8928 poffset
= wi::to_poly_offset (offset
);
8934 if (maybe_ne (bitpos
, 0))
8935 poffset
+= bits_to_bytes_round_down (bitpos
);
8938 *poffsetp
= poffset
;
8943 /* Used for topological sorting of mapping groups. UNVISITED means we haven't
8944 started processing the group yet. The TEMPORARY mark is used when we first
8945 encounter a group on a depth-first traversal, and the PERMANENT mark is used
8946 when we have processed all the group's children (i.e. all the base pointers
8947 referred to by the group's mapping nodes, recursively). */
8949 enum omp_tsort_mark
{
8955 /* A group of OMP_CLAUSE_MAP nodes that correspond to a single "map"
8958 struct omp_mapping_group
{
8961 omp_tsort_mark mark
;
8962 /* If we've removed the group but need to reindex, mark the group as
8965 struct omp_mapping_group
*sibling
;
8966 struct omp_mapping_group
*next
;
8970 debug_mapping_group (omp_mapping_group
*grp
)
8972 tree tmp
= OMP_CLAUSE_CHAIN (grp
->grp_end
);
8973 OMP_CLAUSE_CHAIN (grp
->grp_end
) = NULL
;
8974 debug_generic_expr (*grp
->grp_start
);
8975 OMP_CLAUSE_CHAIN (grp
->grp_end
) = tmp
;
8978 /* Return the OpenMP "base pointer" of an expression EXPR, or NULL if there
8982 omp_get_base_pointer (tree expr
)
8984 while (TREE_CODE (expr
) == ARRAY_REF
8985 || TREE_CODE (expr
) == COMPONENT_REF
)
8986 expr
= TREE_OPERAND (expr
, 0);
8988 if (TREE_CODE (expr
) == INDIRECT_REF
8989 || (TREE_CODE (expr
) == MEM_REF
8990 && integer_zerop (TREE_OPERAND (expr
, 1))))
8992 expr
= TREE_OPERAND (expr
, 0);
8993 while (TREE_CODE (expr
) == COMPOUND_EXPR
)
8994 expr
= TREE_OPERAND (expr
, 1);
8995 if (TREE_CODE (expr
) == POINTER_PLUS_EXPR
)
8996 expr
= TREE_OPERAND (expr
, 0);
8997 if (TREE_CODE (expr
) == SAVE_EXPR
)
8998 expr
= TREE_OPERAND (expr
, 0);
9006 /* Remove COMPONENT_REFS and indirections from EXPR. */
9009 omp_strip_components_and_deref (tree expr
)
9011 while (TREE_CODE (expr
) == COMPONENT_REF
9012 || TREE_CODE (expr
) == INDIRECT_REF
9013 || (TREE_CODE (expr
) == MEM_REF
9014 && integer_zerop (TREE_OPERAND (expr
, 1)))
9015 || TREE_CODE (expr
) == POINTER_PLUS_EXPR
9016 || TREE_CODE (expr
) == COMPOUND_EXPR
)
9017 if (TREE_CODE (expr
) == COMPOUND_EXPR
)
9018 expr
= TREE_OPERAND (expr
, 1);
9020 expr
= TREE_OPERAND (expr
, 0);
9028 omp_strip_indirections (tree expr
)
9030 while (TREE_CODE (expr
) == INDIRECT_REF
9031 || (TREE_CODE (expr
) == MEM_REF
9032 && integer_zerop (TREE_OPERAND (expr
, 1))))
9033 expr
= TREE_OPERAND (expr
, 0);
9038 /* An attach or detach operation depends directly on the address being
9039 attached/detached. Return that address, or none if there are no
9040 attachments/detachments. */
9043 omp_get_attachment (omp_mapping_group
*grp
)
9045 tree node
= *grp
->grp_start
;
9047 switch (OMP_CLAUSE_MAP_KIND (node
))
9051 case GOMP_MAP_TOFROM
:
9052 case GOMP_MAP_ALWAYS_FROM
:
9053 case GOMP_MAP_ALWAYS_TO
:
9054 case GOMP_MAP_ALWAYS_TOFROM
:
9055 case GOMP_MAP_FORCE_FROM
:
9056 case GOMP_MAP_FORCE_TO
:
9057 case GOMP_MAP_FORCE_TOFROM
:
9058 case GOMP_MAP_FORCE_PRESENT
:
9059 case GOMP_MAP_ALLOC
:
9060 case GOMP_MAP_RELEASE
:
9061 case GOMP_MAP_DELETE
:
9062 case GOMP_MAP_FORCE_ALLOC
:
9063 if (node
== grp
->grp_end
)
9066 node
= OMP_CLAUSE_CHAIN (node
);
9067 if (node
&& OMP_CLAUSE_MAP_KIND (node
) == GOMP_MAP_TO_PSET
)
9069 gcc_assert (node
!= grp
->grp_end
);
9070 node
= OMP_CLAUSE_CHAIN (node
);
9073 switch (OMP_CLAUSE_MAP_KIND (node
))
9075 case GOMP_MAP_POINTER
:
9076 case GOMP_MAP_ALWAYS_POINTER
:
9077 case GOMP_MAP_FIRSTPRIVATE_POINTER
:
9078 case GOMP_MAP_FIRSTPRIVATE_REFERENCE
:
9079 case GOMP_MAP_POINTER_TO_ZERO_LENGTH_ARRAY_SECTION
:
9082 case GOMP_MAP_ATTACH_DETACH
:
9083 case GOMP_MAP_ATTACH_ZERO_LENGTH_ARRAY_SECTION
:
9084 return OMP_CLAUSE_DECL (node
);
9087 internal_error ("unexpected mapping node");
9089 return error_mark_node
;
9091 case GOMP_MAP_TO_PSET
:
9092 gcc_assert (node
!= grp
->grp_end
);
9093 node
= OMP_CLAUSE_CHAIN (node
);
9094 if (OMP_CLAUSE_MAP_KIND (node
) == GOMP_MAP_ATTACH
9095 || OMP_CLAUSE_MAP_KIND (node
) == GOMP_MAP_DETACH
)
9096 return OMP_CLAUSE_DECL (node
);
9098 internal_error ("unexpected mapping node");
9099 return error_mark_node
;
9101 case GOMP_MAP_ATTACH
:
9102 case GOMP_MAP_DETACH
:
9103 node
= OMP_CLAUSE_CHAIN (node
);
9104 if (!node
|| *grp
->grp_start
== grp
->grp_end
)
9105 return OMP_CLAUSE_DECL (*grp
->grp_start
);
9106 if (OMP_CLAUSE_MAP_KIND (node
) == GOMP_MAP_FIRSTPRIVATE_POINTER
9107 || OMP_CLAUSE_MAP_KIND (node
) == GOMP_MAP_FIRSTPRIVATE_REFERENCE
)
9108 return OMP_CLAUSE_DECL (*grp
->grp_start
);
9110 internal_error ("unexpected mapping node");
9111 return error_mark_node
;
9113 case GOMP_MAP_STRUCT
:
9114 case GOMP_MAP_FORCE_DEVICEPTR
:
9115 case GOMP_MAP_DEVICE_RESIDENT
:
9117 case GOMP_MAP_IF_PRESENT
:
9118 case GOMP_MAP_FIRSTPRIVATE
:
9119 case GOMP_MAP_FIRSTPRIVATE_INT
:
9120 case GOMP_MAP_USE_DEVICE_PTR
:
9121 case GOMP_MAP_ATTACH_ZERO_LENGTH_ARRAY_SECTION
:
9125 internal_error ("unexpected mapping node");
9128 return error_mark_node
;
9131 /* Given a pointer START_P to the start of a group of related (e.g. pointer)
9132 mappings, return the chain pointer to the end of that group in the list. */
9135 omp_group_last (tree
*start_p
)
9137 tree c
= *start_p
, nc
, *grp_last_p
= start_p
;
9139 gcc_assert (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
);
9141 nc
= OMP_CLAUSE_CHAIN (c
);
9143 if (!nc
|| OMP_CLAUSE_CODE (nc
) != OMP_CLAUSE_MAP
)
9146 switch (OMP_CLAUSE_MAP_KIND (c
))
9150 && OMP_CLAUSE_CODE (nc
) == OMP_CLAUSE_MAP
9151 && (OMP_CLAUSE_MAP_KIND (nc
) == GOMP_MAP_FIRSTPRIVATE_REFERENCE
9152 || OMP_CLAUSE_MAP_KIND (nc
) == GOMP_MAP_FIRSTPRIVATE_POINTER
9153 || OMP_CLAUSE_MAP_KIND (nc
) == GOMP_MAP_ATTACH_DETACH
9154 || OMP_CLAUSE_MAP_KIND (nc
) == GOMP_MAP_POINTER
9155 || (OMP_CLAUSE_MAP_KIND (nc
)
9156 == GOMP_MAP_POINTER_TO_ZERO_LENGTH_ARRAY_SECTION
)
9157 || (OMP_CLAUSE_MAP_KIND (nc
)
9158 == GOMP_MAP_ATTACH_ZERO_LENGTH_ARRAY_SECTION
)
9159 || OMP_CLAUSE_MAP_KIND (nc
) == GOMP_MAP_ALWAYS_POINTER
9160 || OMP_CLAUSE_MAP_KIND (nc
) == GOMP_MAP_TO_PSET
))
9162 grp_last_p
= &OMP_CLAUSE_CHAIN (c
);
9164 tree nc2
= OMP_CLAUSE_CHAIN (nc
);
9166 && OMP_CLAUSE_CODE (nc2
) == OMP_CLAUSE_MAP
9167 && (OMP_CLAUSE_MAP_KIND (nc
)
9168 == GOMP_MAP_POINTER_TO_ZERO_LENGTH_ARRAY_SECTION
)
9169 && OMP_CLAUSE_MAP_KIND (nc2
) == GOMP_MAP_ATTACH
)
9171 grp_last_p
= &OMP_CLAUSE_CHAIN (nc
);
9173 nc2
= OMP_CLAUSE_CHAIN (nc2
);
9179 case GOMP_MAP_ATTACH
:
9180 case GOMP_MAP_DETACH
:
9181 /* This is a weird artifact of how directives are parsed: bare attach or
9182 detach clauses get a subsequent (meaningless) FIRSTPRIVATE_POINTER or
9183 FIRSTPRIVATE_REFERENCE node. FIXME. */
9185 && OMP_CLAUSE_CODE (nc
) == OMP_CLAUSE_MAP
9186 && (OMP_CLAUSE_MAP_KIND (nc
) == GOMP_MAP_FIRSTPRIVATE_REFERENCE
9187 || OMP_CLAUSE_MAP_KIND (nc
) == GOMP_MAP_FIRSTPRIVATE_POINTER
))
9188 grp_last_p
= &OMP_CLAUSE_CHAIN (c
);
9191 case GOMP_MAP_TO_PSET
:
9192 if (OMP_CLAUSE_CODE (nc
) == OMP_CLAUSE_MAP
9193 && (OMP_CLAUSE_MAP_KIND (nc
) == GOMP_MAP_ATTACH
9194 || OMP_CLAUSE_MAP_KIND (nc
) == GOMP_MAP_DETACH
))
9195 grp_last_p
= &OMP_CLAUSE_CHAIN (c
);
9198 case GOMP_MAP_STRUCT
:
9200 unsigned HOST_WIDE_INT num_mappings
9201 = tree_to_uhwi (OMP_CLAUSE_SIZE (c
));
9202 if (OMP_CLAUSE_MAP_KIND (nc
) == GOMP_MAP_FIRSTPRIVATE_POINTER
9203 || OMP_CLAUSE_MAP_KIND (nc
) == GOMP_MAP_FIRSTPRIVATE_REFERENCE
9204 || OMP_CLAUSE_MAP_KIND (nc
) == GOMP_MAP_ATTACH_DETACH
)
9205 grp_last_p
= &OMP_CLAUSE_CHAIN (*grp_last_p
);
9206 for (unsigned i
= 0; i
< num_mappings
; i
++)
9207 grp_last_p
= &OMP_CLAUSE_CHAIN (*grp_last_p
);
9215 /* Walk through LIST_P, and return a list of groups of mappings found (e.g.
9216 OMP_CLAUSE_MAP with GOMP_MAP_{TO/FROM/TOFROM} followed by one or two
9217 associated GOMP_MAP_POINTER mappings). Return a vector of omp_mapping_group
9218 if we have more than one such group, else return NULL. */
9221 omp_gather_mapping_groups_1 (tree
*list_p
, vec
<omp_mapping_group
> *groups
,
9222 tree gather_sentinel
)
9224 for (tree
*cp
= list_p
;
9225 *cp
&& *cp
!= gather_sentinel
;
9226 cp
= &OMP_CLAUSE_CHAIN (*cp
))
9228 if (OMP_CLAUSE_CODE (*cp
) != OMP_CLAUSE_MAP
)
9231 tree
*grp_last_p
= omp_group_last (cp
);
9232 omp_mapping_group grp
;
9235 grp
.grp_end
= *grp_last_p
;
9236 grp
.mark
= UNVISITED
;
9238 grp
.deleted
= false;
9240 groups
->safe_push (grp
);
9246 static vec
<omp_mapping_group
> *
9247 omp_gather_mapping_groups (tree
*list_p
)
9249 vec
<omp_mapping_group
> *groups
= new vec
<omp_mapping_group
> ();
9251 omp_gather_mapping_groups_1 (list_p
, groups
, NULL_TREE
);
9253 if (groups
->length () > 0)
9262 /* A pointer mapping group GRP may define a block of memory starting at some
9263 base address, and maybe also define a firstprivate pointer or firstprivate
9264 reference that points to that block. The return value is a node containing
9265 the former, and the *FIRSTPRIVATE pointer is set if we have the latter.
9266 If we define several base pointers, i.e. for a GOMP_MAP_STRUCT mapping,
9267 return the number of consecutive chained nodes in CHAINED. */
9270 omp_group_base (omp_mapping_group
*grp
, unsigned int *chained
,
9273 tree node
= *grp
->grp_start
;
9275 *firstprivate
= NULL_TREE
;
9278 switch (OMP_CLAUSE_MAP_KIND (node
))
9282 case GOMP_MAP_TOFROM
:
9283 case GOMP_MAP_ALWAYS_FROM
:
9284 case GOMP_MAP_ALWAYS_TO
:
9285 case GOMP_MAP_ALWAYS_TOFROM
:
9286 case GOMP_MAP_FORCE_FROM
:
9287 case GOMP_MAP_FORCE_TO
:
9288 case GOMP_MAP_FORCE_TOFROM
:
9289 case GOMP_MAP_FORCE_PRESENT
:
9290 case GOMP_MAP_ALLOC
:
9291 case GOMP_MAP_RELEASE
:
9292 case GOMP_MAP_DELETE
:
9293 case GOMP_MAP_FORCE_ALLOC
:
9294 case GOMP_MAP_IF_PRESENT
:
9295 if (node
== grp
->grp_end
)
9298 node
= OMP_CLAUSE_CHAIN (node
);
9299 if (node
&& OMP_CLAUSE_MAP_KIND (node
) == GOMP_MAP_TO_PSET
)
9301 if (node
== grp
->grp_end
)
9302 return *grp
->grp_start
;
9303 node
= OMP_CLAUSE_CHAIN (node
);
9306 switch (OMP_CLAUSE_MAP_KIND (node
))
9308 case GOMP_MAP_POINTER
:
9309 case GOMP_MAP_FIRSTPRIVATE_POINTER
:
9310 case GOMP_MAP_FIRSTPRIVATE_REFERENCE
:
9311 case GOMP_MAP_POINTER_TO_ZERO_LENGTH_ARRAY_SECTION
:
9312 *firstprivate
= OMP_CLAUSE_DECL (node
);
9313 return *grp
->grp_start
;
9315 case GOMP_MAP_ALWAYS_POINTER
:
9316 case GOMP_MAP_ATTACH_DETACH
:
9317 case GOMP_MAP_ATTACH_ZERO_LENGTH_ARRAY_SECTION
:
9318 return *grp
->grp_start
;
9321 internal_error ("unexpected mapping node");
9324 internal_error ("unexpected mapping node");
9325 return error_mark_node
;
9327 case GOMP_MAP_TO_PSET
:
9328 gcc_assert (node
!= grp
->grp_end
);
9329 node
= OMP_CLAUSE_CHAIN (node
);
9330 if (OMP_CLAUSE_MAP_KIND (node
) == GOMP_MAP_ATTACH
9331 || OMP_CLAUSE_MAP_KIND (node
) == GOMP_MAP_DETACH
)
9334 internal_error ("unexpected mapping node");
9335 return error_mark_node
;
9337 case GOMP_MAP_ATTACH
:
9338 case GOMP_MAP_DETACH
:
9339 node
= OMP_CLAUSE_CHAIN (node
);
9340 if (!node
|| *grp
->grp_start
== grp
->grp_end
)
9342 if (OMP_CLAUSE_MAP_KIND (node
) == GOMP_MAP_FIRSTPRIVATE_POINTER
9343 || OMP_CLAUSE_MAP_KIND (node
) == GOMP_MAP_FIRSTPRIVATE_REFERENCE
)
9345 /* We're mapping the base pointer itself in a bare attach or detach
9346 node. This is a side effect of how parsing works, and the mapping
9347 will be removed anyway (at least for enter/exit data directives).
9348 We should ignore the mapping here. FIXME. */
9352 internal_error ("unexpected mapping node");
9353 return error_mark_node
;
9355 case GOMP_MAP_STRUCT
:
9357 unsigned HOST_WIDE_INT num_mappings
9358 = tree_to_uhwi (OMP_CLAUSE_SIZE (node
));
9359 node
= OMP_CLAUSE_CHAIN (node
);
9360 if (OMP_CLAUSE_MAP_KIND (node
) == GOMP_MAP_FIRSTPRIVATE_POINTER
9361 || OMP_CLAUSE_MAP_KIND (node
) == GOMP_MAP_FIRSTPRIVATE_REFERENCE
)
9363 *firstprivate
= OMP_CLAUSE_DECL (node
);
9364 node
= OMP_CLAUSE_CHAIN (node
);
9366 *chained
= num_mappings
;
9370 case GOMP_MAP_FORCE_DEVICEPTR
:
9371 case GOMP_MAP_DEVICE_RESIDENT
:
9373 case GOMP_MAP_FIRSTPRIVATE
:
9374 case GOMP_MAP_FIRSTPRIVATE_INT
:
9375 case GOMP_MAP_USE_DEVICE_PTR
:
9376 case GOMP_MAP_ATTACH_ZERO_LENGTH_ARRAY_SECTION
:
9379 case GOMP_MAP_FIRSTPRIVATE_POINTER
:
9380 case GOMP_MAP_FIRSTPRIVATE_REFERENCE
:
9381 case GOMP_MAP_POINTER
:
9382 case GOMP_MAP_ALWAYS_POINTER
:
9383 case GOMP_MAP_POINTER_TO_ZERO_LENGTH_ARRAY_SECTION
:
9384 /* These shouldn't appear by themselves. */
9386 internal_error ("unexpected pointer mapping node");
9387 return error_mark_node
;
9393 return error_mark_node
;
9396 /* Given a vector of omp_mapping_groups, build a hash table so we can look up
9397 nodes by tree_operand_hash. */
9400 omp_index_mapping_groups_1 (hash_map
<tree_operand_hash
,
9401 omp_mapping_group
*> *grpmap
,
9402 vec
<omp_mapping_group
> *groups
,
9403 tree reindex_sentinel
)
9405 omp_mapping_group
*grp
;
9407 bool reindexing
= reindex_sentinel
!= NULL_TREE
, above_hwm
= false;
9409 FOR_EACH_VEC_ELT (*groups
, i
, grp
)
9411 if (reindexing
&& *grp
->grp_start
== reindex_sentinel
)
9414 if (reindexing
&& !above_hwm
)
9418 unsigned int chained
;
9419 tree node
= omp_group_base (grp
, &chained
, &fpp
);
9421 if (node
== error_mark_node
|| (!node
&& !fpp
))
9424 for (unsigned j
= 0;
9425 node
&& j
< chained
;
9426 node
= OMP_CLAUSE_CHAIN (node
), j
++)
9428 tree decl
= OMP_CLAUSE_DECL (node
);
9430 /* Sometimes we see zero-offset MEM_REF instead of INDIRECT_REF,
9431 meaning node-hash lookups don't work. This is a workaround for
9432 that, but ideally we should just create the INDIRECT_REF at
9433 source instead. FIXME. */
9434 if (TREE_CODE (decl
) == MEM_REF
9435 && integer_zerop (TREE_OPERAND (decl
, 1)))
9436 decl
= build_fold_indirect_ref (TREE_OPERAND (decl
, 0));
9438 omp_mapping_group
**prev
= grpmap
->get (decl
);
9440 if (prev
&& *prev
== grp
)
9444 /* Mapping the same thing twice is normally diagnosed as an error,
9445 but can happen under some circumstances, e.g. in pr99928-16.c,
9448 #pragma omp target simd reduction(+:a[:3]) \
9449 map(always, tofrom: a[:6])
9452 will result in two "a[0]" mappings (of different sizes). */
9454 grp
->sibling
= (*prev
)->sibling
;
9455 (*prev
)->sibling
= grp
;
9458 grpmap
->put (decl
, grp
);
9464 omp_mapping_group
**prev
= grpmap
->get (fpp
);
9465 if (prev
&& *prev
!= grp
)
9467 grp
->sibling
= (*prev
)->sibling
;
9468 (*prev
)->sibling
= grp
;
9471 grpmap
->put (fpp
, grp
);
9475 static hash_map
<tree_operand_hash
, omp_mapping_group
*> *
9476 omp_index_mapping_groups (vec
<omp_mapping_group
> *groups
)
9478 hash_map
<tree_operand_hash
, omp_mapping_group
*> *grpmap
9479 = new hash_map
<tree_operand_hash
, omp_mapping_group
*>;
9481 omp_index_mapping_groups_1 (grpmap
, groups
, NULL_TREE
);
9486 /* Rebuild group map from partially-processed clause list (during
9487 omp_build_struct_sibling_lists). We have already processed nodes up until
9488 a high-water mark (HWM). This is a bit tricky because the list is being
9489 reordered as it is scanned, but we know:
9491 1. The list after HWM has not been touched yet, so we can reindex it safely.
9493 2. The list before and including HWM has been altered, but remains
9494 well-formed throughout the sibling-list building operation.
9496 so, we can do the reindex operation in two parts, on the processed and
9497 then the unprocessed halves of the list. */
9499 static hash_map
<tree_operand_hash
, omp_mapping_group
*> *
9500 omp_reindex_mapping_groups (tree
*list_p
,
9501 vec
<omp_mapping_group
> *groups
,
9502 vec
<omp_mapping_group
> *processed_groups
,
9505 hash_map
<tree_operand_hash
, omp_mapping_group
*> *grpmap
9506 = new hash_map
<tree_operand_hash
, omp_mapping_group
*>;
9508 processed_groups
->truncate (0);
9510 omp_gather_mapping_groups_1 (list_p
, processed_groups
, sentinel
);
9511 omp_index_mapping_groups_1 (grpmap
, processed_groups
, NULL_TREE
);
9513 omp_index_mapping_groups_1 (grpmap
, groups
, sentinel
);
9518 /* Find the immediately-containing struct for a component ref (etc.)
9522 omp_containing_struct (tree expr
)
9528 /* Note: don't strip NOPs unless we're also stripping off array refs or a
9530 if (TREE_CODE (expr
) != ARRAY_REF
&& TREE_CODE (expr
) != COMPONENT_REF
)
9533 while (TREE_CODE (expr
) == ARRAY_REF
)
9534 expr
= TREE_OPERAND (expr
, 0);
9536 if (TREE_CODE (expr
) == COMPONENT_REF
)
9537 expr
= TREE_OPERAND (expr
, 0);
9542 /* Return TRUE if DECL describes a component that is part of a whole structure
9543 that is mapped elsewhere in GRPMAP. *MAPPED_BY_GROUP is set to the group
9544 that maps that structure, if present. */
9547 omp_mapped_by_containing_struct (hash_map
<tree_operand_hash
,
9548 omp_mapping_group
*> *grpmap
,
9550 omp_mapping_group
**mapped_by_group
)
9552 tree wsdecl
= NULL_TREE
;
9554 *mapped_by_group
= NULL
;
9558 wsdecl
= omp_containing_struct (decl
);
9561 omp_mapping_group
**wholestruct
= grpmap
->get (wsdecl
);
9563 && TREE_CODE (wsdecl
) == MEM_REF
9564 && integer_zerop (TREE_OPERAND (wsdecl
, 1)))
9566 tree deref
= TREE_OPERAND (wsdecl
, 0);
9567 deref
= build_fold_indirect_ref (deref
);
9568 wholestruct
= grpmap
->get (deref
);
9572 *mapped_by_group
= *wholestruct
;
9581 /* Helper function for omp_tsort_mapping_groups. Returns TRUE on success, or
9585 omp_tsort_mapping_groups_1 (omp_mapping_group
***outlist
,
9586 vec
<omp_mapping_group
> *groups
,
9587 hash_map
<tree_operand_hash
, omp_mapping_group
*>
9589 omp_mapping_group
*grp
)
9591 if (grp
->mark
== PERMANENT
)
9593 if (grp
->mark
== TEMPORARY
)
9595 fprintf (stderr
, "when processing group:\n");
9596 debug_mapping_group (grp
);
9597 internal_error ("base pointer cycle detected");
9600 grp
->mark
= TEMPORARY
;
9602 tree attaches_to
= omp_get_attachment (grp
);
9606 omp_mapping_group
**basep
= grpmap
->get (attaches_to
);
9608 if (basep
&& *basep
!= grp
)
9610 for (omp_mapping_group
*w
= *basep
; w
; w
= w
->sibling
)
9611 if (!omp_tsort_mapping_groups_1 (outlist
, groups
, grpmap
, w
))
9616 tree decl
= OMP_CLAUSE_DECL (*grp
->grp_start
);
9620 tree base
= omp_get_base_pointer (decl
);
9625 omp_mapping_group
**innerp
= grpmap
->get (base
);
9626 omp_mapping_group
*wholestruct
;
9628 /* We should treat whole-structure mappings as if all (pointer, in this
9629 case) members are mapped as individual list items. Check if we have
9630 such a whole-structure mapping, if we don't have an explicit reference
9631 to the pointer member itself. */
9633 && TREE_CODE (base
) == COMPONENT_REF
9634 && omp_mapped_by_containing_struct (grpmap
, base
, &wholestruct
))
9635 innerp
= &wholestruct
;
9637 if (innerp
&& *innerp
!= grp
)
9639 for (omp_mapping_group
*w
= *innerp
; w
; w
= w
->sibling
)
9640 if (!omp_tsort_mapping_groups_1 (outlist
, groups
, grpmap
, w
))
9648 grp
->mark
= PERMANENT
;
9650 /* Emit grp to output list. */
9653 *outlist
= &grp
->next
;
9658 /* Topologically sort GROUPS, so that OMP 5.0-defined base pointers come
9659 before mappings that use those pointers. This is an implementation of the
9660 depth-first search algorithm, described e.g. at:
9662 https://en.wikipedia.org/wiki/Topological_sorting
9665 static omp_mapping_group
*
9666 omp_tsort_mapping_groups (vec
<omp_mapping_group
> *groups
,
9667 hash_map
<tree_operand_hash
, omp_mapping_group
*>
9670 omp_mapping_group
*grp
, *outlist
= NULL
, **cursor
;
9675 FOR_EACH_VEC_ELT (*groups
, i
, grp
)
9677 if (grp
->mark
!= PERMANENT
)
9678 if (!omp_tsort_mapping_groups_1 (&cursor
, groups
, grpmap
, grp
))
9685 /* Split INLIST into two parts, moving groups corresponding to
9686 ALLOC/RELEASE/DELETE mappings to one list, and other mappings to another.
9687 The former list is then appended to the latter. Each sub-list retains the
9688 order of the original list.
9689 Note that ATTACH nodes are later moved to the end of the list in
9690 gimplify_adjust_omp_clauses, for target regions. */
9692 static omp_mapping_group
*
9693 omp_segregate_mapping_groups (omp_mapping_group
*inlist
)
9695 omp_mapping_group
*ard_groups
= NULL
, *tf_groups
= NULL
;
9696 omp_mapping_group
**ard_tail
= &ard_groups
, **tf_tail
= &tf_groups
;
9698 for (omp_mapping_group
*w
= inlist
; w
;)
9700 tree c
= *w
->grp_start
;
9701 omp_mapping_group
*next
= w
->next
;
9703 gcc_assert (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
);
9705 switch (OMP_CLAUSE_MAP_KIND (c
))
9707 case GOMP_MAP_ALLOC
:
9708 case GOMP_MAP_RELEASE
:
9709 case GOMP_MAP_DELETE
:
9712 ard_tail
= &w
->next
;
9724 /* Now splice the lists together... */
9725 *tf_tail
= ard_groups
;
9730 /* Given a list LIST_P containing groups of mappings given by GROUPS, reorder
9731 those groups based on the output list of omp_tsort_mapping_groups --
9732 singly-linked, threaded through each element's NEXT pointer starting at
9733 HEAD. Each list element appears exactly once in that linked list.
9735 Each element of GROUPS may correspond to one or several mapping nodes.
9736 Node groups are kept together, and in the reordered list, the positions of
9737 the original groups are reused for the positions of the reordered list.
9738 Hence if we have e.g.
9740 {to ptr ptr} firstprivate {tofrom ptr} ...
9742 first group non-"map" second group
9744 and say the second group contains a base pointer for the first so must be
9745 moved before it, the resulting list will contain:
9747 {tofrom ptr} firstprivate {to ptr ptr} ...
9748 ^ prev. second group ^ prev. first group
9752 omp_reorder_mapping_groups (vec
<omp_mapping_group
> *groups
,
9753 omp_mapping_group
*head
,
9756 omp_mapping_group
*grp
;
9758 unsigned numgroups
= groups
->length ();
9759 auto_vec
<tree
> old_heads (numgroups
);
9760 auto_vec
<tree
*> old_headps (numgroups
);
9761 auto_vec
<tree
> new_heads (numgroups
);
9762 auto_vec
<tree
> old_succs (numgroups
);
9763 bool map_at_start
= (list_p
== (*groups
)[0].grp_start
);
9765 tree
*new_grp_tail
= NULL
;
9767 /* Stash the start & end nodes of each mapping group before we start
9768 modifying the list. */
9769 FOR_EACH_VEC_ELT (*groups
, i
, grp
)
9771 old_headps
.quick_push (grp
->grp_start
);
9772 old_heads
.quick_push (*grp
->grp_start
);
9773 old_succs
.quick_push (OMP_CLAUSE_CHAIN (grp
->grp_end
));
9776 /* And similarly, the heads of the groups in the order we want to rearrange
9778 for (omp_mapping_group
*w
= head
; w
; w
= w
->next
)
9779 new_heads
.quick_push (*w
->grp_start
);
9781 FOR_EACH_VEC_ELT (*groups
, i
, grp
)
9785 if (new_grp_tail
&& old_succs
[i
- 1] == old_heads
[i
])
9787 /* a {b c d} {e f g} h i j (original)
9789 a {k l m} {e f g} h i j (inserted new group on last iter)
9791 a {k l m} {n o p} h i j (this time, chain last group to new one)
9794 *new_grp_tail
= new_heads
[i
];
9796 else if (new_grp_tail
)
9798 /* a {b c d} e {f g h} i j k (original)
9800 a {l m n} e {f g h} i j k (gap after last iter's group)
9802 a {l m n} e {o p q} h i j (chain last group to old successor)
9805 *new_grp_tail
= old_succs
[i
- 1];
9806 *old_headps
[i
] = new_heads
[i
];
9810 /* The first inserted group -- point to new group, and leave end
9816 *grp
->grp_start
= new_heads
[i
];
9819 new_grp_tail
= &OMP_CLAUSE_CHAIN (head
->grp_end
);
9825 *new_grp_tail
= old_succs
[numgroups
- 1];
9829 return map_at_start
? (*groups
)[0].grp_start
: list_p
;
9832 /* DECL is supposed to have lastprivate semantics in the outer contexts
9833 of combined/composite constructs, starting with OCTX.
9834 Add needed lastprivate, shared or map clause if no data sharing or
9835 mapping clause are present. IMPLICIT_P is true if it is an implicit
9836 clause (IV on simd), in which case the lastprivate will not be
9837 copied to some constructs. */
9840 omp_lastprivate_for_combined_outer_constructs (struct gimplify_omp_ctx
*octx
,
9841 tree decl
, bool implicit_p
)
9843 struct gimplify_omp_ctx
*orig_octx
= octx
;
9844 for (; octx
; octx
= octx
->outer_context
)
9846 if ((octx
->region_type
== ORT_COMBINED_PARALLEL
9847 || (octx
->region_type
& ORT_COMBINED_TEAMS
) == ORT_COMBINED_TEAMS
)
9848 && splay_tree_lookup (octx
->variables
,
9849 (splay_tree_key
) decl
) == NULL
)
9851 omp_add_variable (octx
, decl
, GOVD_SHARED
| GOVD_SEEN
);
9854 if ((octx
->region_type
& ORT_TASK
) != 0
9855 && octx
->combined_loop
9856 && splay_tree_lookup (octx
->variables
,
9857 (splay_tree_key
) decl
) == NULL
)
9859 omp_add_variable (octx
, decl
, GOVD_LASTPRIVATE
| GOVD_SEEN
);
9863 && octx
->region_type
== ORT_WORKSHARE
9864 && octx
->combined_loop
9865 && splay_tree_lookup (octx
->variables
,
9866 (splay_tree_key
) decl
) == NULL
9867 && octx
->outer_context
9868 && octx
->outer_context
->region_type
== ORT_COMBINED_PARALLEL
9869 && splay_tree_lookup (octx
->outer_context
->variables
,
9870 (splay_tree_key
) decl
) == NULL
)
9872 octx
= octx
->outer_context
;
9873 omp_add_variable (octx
, decl
, GOVD_LASTPRIVATE
| GOVD_SEEN
);
9876 if ((octx
->region_type
== ORT_WORKSHARE
|| octx
->region_type
== ORT_ACC
)
9877 && octx
->combined_loop
9878 && splay_tree_lookup (octx
->variables
,
9879 (splay_tree_key
) decl
) == NULL
9880 && !omp_check_private (octx
, decl
, false))
9882 omp_add_variable (octx
, decl
, GOVD_LASTPRIVATE
| GOVD_SEEN
);
9885 if (octx
->region_type
== ORT_COMBINED_TARGET
)
9887 splay_tree_node n
= splay_tree_lookup (octx
->variables
,
9888 (splay_tree_key
) decl
);
9891 omp_add_variable (octx
, decl
, GOVD_MAP
| GOVD_SEEN
);
9892 octx
= octx
->outer_context
;
9894 else if (!implicit_p
9895 && (n
->value
& GOVD_FIRSTPRIVATE_IMPLICIT
))
9897 n
->value
&= ~(GOVD_FIRSTPRIVATE
9898 | GOVD_FIRSTPRIVATE_IMPLICIT
9900 omp_add_variable (octx
, decl
, GOVD_MAP
| GOVD_SEEN
);
9901 octx
= octx
->outer_context
;
9906 if (octx
&& (implicit_p
|| octx
!= orig_octx
))
9907 omp_notice_variable (octx
, decl
, true);
9910 /* If we have mappings INNER and OUTER, where INNER is a component access and
9911 OUTER is a mapping of the whole containing struct, check that the mappings
9912 are compatible. We'll be deleting the inner mapping, so we need to make
9913 sure the outer mapping does (at least) the same transfers to/from the device
9914 as the inner mapping. */
9917 omp_check_mapping_compatibility (location_t loc
,
9918 omp_mapping_group
*outer
,
9919 omp_mapping_group
*inner
)
9921 tree first_outer
= *outer
->grp_start
, first_inner
= *inner
->grp_start
;
9923 gcc_assert (OMP_CLAUSE_CODE (first_outer
) == OMP_CLAUSE_MAP
);
9924 gcc_assert (OMP_CLAUSE_CODE (first_inner
) == OMP_CLAUSE_MAP
);
9926 enum gomp_map_kind outer_kind
= OMP_CLAUSE_MAP_KIND (first_outer
);
9927 enum gomp_map_kind inner_kind
= OMP_CLAUSE_MAP_KIND (first_inner
);
9929 if (outer_kind
== inner_kind
)
9934 case GOMP_MAP_ALWAYS_TO
:
9935 if (inner_kind
== GOMP_MAP_FORCE_PRESENT
9936 || inner_kind
== GOMP_MAP_ALLOC
9937 || inner_kind
== GOMP_MAP_TO
)
9941 case GOMP_MAP_ALWAYS_FROM
:
9942 if (inner_kind
== GOMP_MAP_FORCE_PRESENT
9943 || inner_kind
== GOMP_MAP_ALLOC
9944 || inner_kind
== GOMP_MAP_FROM
)
9950 if (inner_kind
== GOMP_MAP_FORCE_PRESENT
9951 || inner_kind
== GOMP_MAP_ALLOC
)
9955 case GOMP_MAP_ALWAYS_TOFROM
:
9956 case GOMP_MAP_TOFROM
:
9957 if (inner_kind
== GOMP_MAP_FORCE_PRESENT
9958 || inner_kind
== GOMP_MAP_ALLOC
9959 || inner_kind
== GOMP_MAP_TO
9960 || inner_kind
== GOMP_MAP_FROM
9961 || inner_kind
== GOMP_MAP_TOFROM
)
9969 error_at (loc
, "data movement for component %qE is not compatible with "
9970 "movement for struct %qE", OMP_CLAUSE_DECL (first_inner
),
9971 OMP_CLAUSE_DECL (first_outer
));
9976 /* Similar to omp_resolve_clause_dependencies, but for OpenACC. The only
9977 clause dependencies we handle for now are struct element mappings and
9978 whole-struct mappings on the same directive, and duplicate clause
9982 oacc_resolve_clause_dependencies (vec
<omp_mapping_group
> *groups
,
9983 hash_map
<tree_operand_hash
,
9984 omp_mapping_group
*> *grpmap
)
9987 omp_mapping_group
*grp
;
9988 hash_set
<tree_operand_hash
> *seen_components
= NULL
;
9989 hash_set
<tree_operand_hash
> *shown_error
= NULL
;
9991 FOR_EACH_VEC_ELT (*groups
, i
, grp
)
9993 tree grp_end
= grp
->grp_end
;
9994 tree decl
= OMP_CLAUSE_DECL (grp_end
);
9996 gcc_assert (OMP_CLAUSE_CODE (grp_end
) == OMP_CLAUSE_MAP
);
9998 if (DECL_P (grp_end
))
10001 tree c
= OMP_CLAUSE_DECL (*grp
->grp_start
);
10002 while (TREE_CODE (c
) == ARRAY_REF
)
10003 c
= TREE_OPERAND (c
, 0);
10004 if (TREE_CODE (c
) != COMPONENT_REF
)
10006 if (!seen_components
)
10007 seen_components
= new hash_set
<tree_operand_hash
> ();
10009 shown_error
= new hash_set
<tree_operand_hash
> ();
10010 if (seen_components
->contains (c
)
10011 && !shown_error
->contains (c
))
10013 error_at (OMP_CLAUSE_LOCATION (grp_end
),
10014 "%qE appears more than once in map clauses",
10015 OMP_CLAUSE_DECL (grp_end
));
10016 shown_error
->add (c
);
10019 seen_components
->add (c
);
10021 omp_mapping_group
*struct_group
;
10022 if (omp_mapped_by_containing_struct (grpmap
, decl
, &struct_group
)
10023 && *grp
->grp_start
== grp_end
)
10025 omp_check_mapping_compatibility (OMP_CLAUSE_LOCATION (grp_end
),
10026 struct_group
, grp
);
10027 /* Remove the whole of this mapping -- redundant. */
10028 grp
->deleted
= true;
10032 if (seen_components
)
10033 delete seen_components
;
10035 delete shown_error
;
10038 /* Link node NEWNODE so it is pointed to by chain INSERT_AT. NEWNODE's chain
10039 is linked to the previous node pointed to by INSERT_AT. */
10042 omp_siblist_insert_node_after (tree newnode
, tree
*insert_at
)
10044 OMP_CLAUSE_CHAIN (newnode
) = *insert_at
;
10045 *insert_at
= newnode
;
10046 return &OMP_CLAUSE_CHAIN (newnode
);
10049 /* Move NODE (which is currently pointed to by the chain OLD_POS) so it is
10050 pointed to by chain MOVE_AFTER instead. */
10053 omp_siblist_move_node_after (tree node
, tree
*old_pos
, tree
*move_after
)
10055 gcc_assert (node
== *old_pos
);
10056 *old_pos
= OMP_CLAUSE_CHAIN (node
);
10057 OMP_CLAUSE_CHAIN (node
) = *move_after
;
10058 *move_after
= node
;
10061 /* Move nodes from FIRST_PTR (pointed to by previous node's chain) to
10062 LAST_NODE to after MOVE_AFTER chain. Similar to below function, but no
10063 new nodes are prepended to the list before splicing into the new position.
10064 Return the position we should continue scanning the list at, or NULL to
10065 stay where we were. */
10068 omp_siblist_move_nodes_after (tree
*first_ptr
, tree last_node
,
10071 if (first_ptr
== move_after
)
10074 tree tmp
= *first_ptr
;
10075 *first_ptr
= OMP_CLAUSE_CHAIN (last_node
);
10076 OMP_CLAUSE_CHAIN (last_node
) = *move_after
;
10082 /* Concatenate two lists described by [FIRST_NEW, LAST_NEW_TAIL] and
10083 [FIRST_PTR, LAST_NODE], and insert them in the OMP clause list after chain
10084 pointer MOVE_AFTER.
10086 The latter list was previously part of the OMP clause list, and the former
10087 (prepended) part is comprised of new nodes.
10089 We start with a list of nodes starting with a struct mapping node. We
10090 rearrange the list so that new nodes starting from FIRST_NEW and whose last
10091 node's chain is LAST_NEW_TAIL comes directly after MOVE_AFTER, followed by
10092 the group of mapping nodes we are currently processing (from the chain
10093 FIRST_PTR to LAST_NODE). The return value is the pointer to the next chain
10094 we should continue processing from, or NULL to stay where we were.
10096 The transformation (in the case where MOVE_AFTER and FIRST_PTR are
10097 different) is worked through below. Here we are processing LAST_NODE, and
10098 FIRST_PTR points at the preceding mapping clause:
10100 #. mapping node chain
10101 ---------------------------------------------------
10102 A. struct_node [->B]
10104 C. comp_2 [->D (move_after)]
10106 E. attach_3 [->F (first_ptr)]
10107 F. map_to_4 [->G (continue_at)]
10108 G. attach_4 (last_node) [->H]
10111 *last_new_tail = *first_ptr;
10113 I. new_node (first_new) [->F (last_new_tail)]
10115 *first_ptr = OMP_CLAUSE_CHAIN (last_node)
10117 #. mapping node chain
10118 ----------------------------------------------------
10119 A. struct_node [->B]
10121 C. comp_2 [->D (move_after)]
10123 E. attach_3 [->H (first_ptr)]
10124 F. map_to_4 [->G (continue_at)]
10125 G. attach_4 (last_node) [->H]
10128 I. new_node (first_new) [->F (last_new_tail)]
10130 OMP_CLAUSE_CHAIN (last_node) = *move_after;
10132 #. mapping node chain
10133 ---------------------------------------------------
10134 A. struct_node [->B]
10136 C. comp_2 [->D (move_after)]
10138 E. attach_3 [->H (continue_at)]
10140 G. attach_4 (last_node) [->D]
10143 I. new_node (first_new) [->F (last_new_tail)]
10145 *move_after = first_new;
10147 #. mapping node chain
10148 ---------------------------------------------------
10149 A. struct_node [->B]
10151 C. comp_2 [->I (move_after)]
10153 E. attach_3 [->H (continue_at)]
10155 G. attach_4 (last_node) [->D]
10157 I. new_node (first_new) [->F (last_new_tail)]
10161 #. mapping node chain
10162 ---------------------------------------------------
10163 A. struct_node [->B]
10165 C. comp_2 [->I (move_after)]
10166 I. new_node (first_new) [->F (last_new_tail)]
10168 G. attach_4 (last_node) [->D]
10170 E. attach_3 [->H (continue_at)]
10175 omp_siblist_move_concat_nodes_after (tree first_new
, tree
*last_new_tail
,
10176 tree
*first_ptr
, tree last_node
,
10179 tree
*continue_at
= NULL
;
10180 *last_new_tail
= *first_ptr
;
10181 if (first_ptr
== move_after
)
10182 *move_after
= first_new
;
10185 *first_ptr
= OMP_CLAUSE_CHAIN (last_node
);
10186 continue_at
= first_ptr
;
10187 OMP_CLAUSE_CHAIN (last_node
) = *move_after
;
10188 *move_after
= first_new
;
10190 return continue_at
;
10193 /* Mapping struct members causes an additional set of nodes to be created,
10194 starting with GOMP_MAP_STRUCT followed by a number of mappings equal to the
10195 number of members being mapped, in order of ascending position (address or
10198 We scan through the list of mapping clauses, calling this function for each
10199 struct member mapping we find, and build up the list of mappings after the
10200 initial GOMP_MAP_STRUCT node. For pointer members, these will be
10201 newly-created ALLOC nodes. For non-pointer members, the existing mapping is
10202 moved into place in the sorted list.
10211 #pragma (acc|omp directive) copy(struct.a[0:n], struct.b[0:n], struct.c,
10214 GOMP_MAP_STRUCT (4)
10215 [GOMP_MAP_FIRSTPRIVATE_REFERENCE -- for refs to structs]
10216 GOMP_MAP_ALLOC (struct.a)
10217 GOMP_MAP_ALLOC (struct.b)
10218 GOMP_MAP_TO (struct.c)
10219 GOMP_MAP_ALLOC (struct.d)
10222 In the case where we are mapping references to pointers, or in Fortran if
10223 we are mapping an array with a descriptor, additional nodes may be created
10224 after the struct node list also.
10226 The return code is either a pointer to the next node to process (if the
10227 list has been rearranged), else NULL to continue with the next node in the
10231 omp_accumulate_sibling_list (enum omp_region_type region_type
,
10232 enum tree_code code
,
10233 hash_map
<tree_operand_hash
, tree
>
10234 *&struct_map_to_clause
, tree
*grp_start_p
,
10235 tree grp_end
, tree
*inner
)
10237 poly_offset_int coffset
;
10238 poly_int64 cbitpos
;
10239 tree ocd
= OMP_CLAUSE_DECL (grp_end
);
10240 bool openmp
= !(region_type
& ORT_ACC
);
10241 tree
*continue_at
= NULL
;
10243 while (TREE_CODE (ocd
) == ARRAY_REF
)
10244 ocd
= TREE_OPERAND (ocd
, 0);
10246 if (TREE_CODE (ocd
) == INDIRECT_REF
)
10247 ocd
= TREE_OPERAND (ocd
, 0);
10249 tree base
= extract_base_bit_offset (ocd
, &cbitpos
, &coffset
);
10251 bool ptr
= (OMP_CLAUSE_MAP_KIND (grp_end
) == GOMP_MAP_ALWAYS_POINTER
);
10252 bool attach_detach
= ((OMP_CLAUSE_MAP_KIND (grp_end
)
10253 == GOMP_MAP_ATTACH_DETACH
)
10254 || (OMP_CLAUSE_MAP_KIND (grp_end
)
10255 == GOMP_MAP_ATTACH_ZERO_LENGTH_ARRAY_SECTION
));
10256 bool attach
= (OMP_CLAUSE_MAP_KIND (grp_end
) == GOMP_MAP_ATTACH
10257 || OMP_CLAUSE_MAP_KIND (grp_end
) == GOMP_MAP_DETACH
);
10259 /* FIXME: If we're not mapping the base pointer in some other clause on this
10260 directive, I think we want to create ALLOC/RELEASE here -- i.e. not
10262 if (openmp
&& attach_detach
)
10265 if (!struct_map_to_clause
|| struct_map_to_clause
->get (base
) == NULL
)
10267 tree l
= build_omp_clause (OMP_CLAUSE_LOCATION (grp_end
), OMP_CLAUSE_MAP
);
10268 gomp_map_kind k
= attach
? GOMP_MAP_FORCE_PRESENT
: GOMP_MAP_STRUCT
;
10270 OMP_CLAUSE_SET_MAP_KIND (l
, k
);
10272 OMP_CLAUSE_DECL (l
) = unshare_expr (base
);
10274 OMP_CLAUSE_SIZE (l
)
10275 = (!attach
? size_int (1)
10276 : (DECL_P (OMP_CLAUSE_DECL (l
))
10277 ? DECL_SIZE_UNIT (OMP_CLAUSE_DECL (l
))
10278 : TYPE_SIZE_UNIT (TREE_TYPE (OMP_CLAUSE_DECL (l
)))));
10279 if (struct_map_to_clause
== NULL
)
10280 struct_map_to_clause
= new hash_map
<tree_operand_hash
, tree
>;
10281 struct_map_to_clause
->put (base
, l
);
10283 if (ptr
|| attach_detach
)
10287 = build_omp_struct_comp_nodes (code
, *grp_start_p
, grp_end
,
10289 OMP_CLAUSE_CHAIN (l
) = alloc_node
;
10291 tree
*insert_node_pos
= grp_start_p
;
10295 OMP_CLAUSE_CHAIN (extra_node
) = *insert_node_pos
;
10296 OMP_CLAUSE_CHAIN (alloc_node
) = extra_node
;
10299 OMP_CLAUSE_CHAIN (alloc_node
) = *insert_node_pos
;
10301 *insert_node_pos
= l
;
10305 gcc_assert (*grp_start_p
== grp_end
);
10306 grp_start_p
= omp_siblist_insert_node_after (l
, grp_start_p
);
10309 tree noind
= omp_strip_indirections (base
);
10312 && (region_type
& ORT_TARGET
)
10313 && TREE_CODE (noind
) == COMPONENT_REF
)
10315 /* The base for this component access is a struct component access
10316 itself. Insert a node to be processed on the next iteration of
10317 our caller's loop, which will subsequently be turned into a new,
10318 inner GOMP_MAP_STRUCT mapping.
10320 We need to do this else the non-DECL_P base won't be
10321 rewritten correctly in the offloaded region. */
10322 tree c2
= build_omp_clause (OMP_CLAUSE_LOCATION (grp_end
),
10324 OMP_CLAUSE_SET_MAP_KIND (c2
, GOMP_MAP_FORCE_PRESENT
);
10325 OMP_CLAUSE_DECL (c2
) = unshare_expr (noind
);
10326 OMP_CLAUSE_SIZE (c2
) = TYPE_SIZE_UNIT (TREE_TYPE (noind
));
10331 tree sdecl
= omp_strip_components_and_deref (base
);
10333 if (POINTER_TYPE_P (TREE_TYPE (sdecl
)) && (region_type
& ORT_TARGET
))
10335 tree c2
= build_omp_clause (OMP_CLAUSE_LOCATION (grp_end
),
10338 = (TREE_CODE (base
) == INDIRECT_REF
10339 && ((TREE_CODE (TREE_TYPE (TREE_OPERAND (base
, 0)))
10341 || ((TREE_CODE (TREE_OPERAND (base
, 0))
10343 && (TREE_CODE (TREE_TYPE (TREE_OPERAND
10344 (TREE_OPERAND (base
, 0), 0)))
10345 == REFERENCE_TYPE
))));
10346 enum gomp_map_kind mkind
= base_ref
? GOMP_MAP_FIRSTPRIVATE_REFERENCE
10347 : GOMP_MAP_FIRSTPRIVATE_POINTER
;
10348 OMP_CLAUSE_SET_MAP_KIND (c2
, mkind
);
10349 OMP_CLAUSE_DECL (c2
) = sdecl
;
10350 tree baddr
= build_fold_addr_expr (base
);
10351 baddr
= fold_convert_loc (OMP_CLAUSE_LOCATION (grp_end
),
10352 ptrdiff_type_node
, baddr
);
10353 /* This isn't going to be good enough when we add support for more
10354 complicated lvalue expressions. FIXME. */
10355 if (TREE_CODE (TREE_TYPE (sdecl
)) == REFERENCE_TYPE
10356 && TREE_CODE (TREE_TYPE (TREE_TYPE (sdecl
))) == POINTER_TYPE
)
10357 sdecl
= build_simple_mem_ref (sdecl
);
10358 tree decladdr
= fold_convert_loc (OMP_CLAUSE_LOCATION (grp_end
),
10359 ptrdiff_type_node
, sdecl
);
10360 OMP_CLAUSE_SIZE (c2
)
10361 = fold_build2_loc (OMP_CLAUSE_LOCATION (grp_end
), MINUS_EXPR
,
10362 ptrdiff_type_node
, baddr
, decladdr
);
10363 /* Insert after struct node. */
10364 OMP_CLAUSE_CHAIN (c2
) = OMP_CLAUSE_CHAIN (l
);
10365 OMP_CLAUSE_CHAIN (l
) = c2
;
10370 else if (struct_map_to_clause
)
10372 tree
*osc
= struct_map_to_clause
->get (base
);
10373 tree
*sc
= NULL
, *scp
= NULL
;
10374 sc
= &OMP_CLAUSE_CHAIN (*osc
);
10375 /* The struct mapping might be immediately followed by a
10376 FIRSTPRIVATE_POINTER and/or FIRSTPRIVATE_REFERENCE -- if it's an
10377 indirect access or a reference, or both. (This added node is removed
10378 in omp-low.c after it has been processed there.) */
10380 && (OMP_CLAUSE_MAP_KIND (*sc
) == GOMP_MAP_FIRSTPRIVATE_POINTER
10381 || OMP_CLAUSE_MAP_KIND (*sc
) == GOMP_MAP_FIRSTPRIVATE_REFERENCE
))
10382 sc
= &OMP_CLAUSE_CHAIN (*sc
);
10383 for (; *sc
!= grp_end
; sc
= &OMP_CLAUSE_CHAIN (*sc
))
10384 if ((ptr
|| attach_detach
) && sc
== grp_start_p
)
10386 else if (TREE_CODE (OMP_CLAUSE_DECL (*sc
)) != COMPONENT_REF
10387 && TREE_CODE (OMP_CLAUSE_DECL (*sc
)) != INDIRECT_REF
10388 && TREE_CODE (OMP_CLAUSE_DECL (*sc
)) != ARRAY_REF
)
10392 tree sc_decl
= OMP_CLAUSE_DECL (*sc
);
10393 poly_offset_int offset
;
10396 if (TREE_CODE (sc_decl
) == ARRAY_REF
)
10398 while (TREE_CODE (sc_decl
) == ARRAY_REF
)
10399 sc_decl
= TREE_OPERAND (sc_decl
, 0);
10400 if (TREE_CODE (sc_decl
) != COMPONENT_REF
10401 || TREE_CODE (TREE_TYPE (sc_decl
)) != ARRAY_TYPE
)
10404 else if (TREE_CODE (sc_decl
) == INDIRECT_REF
10405 && TREE_CODE (TREE_OPERAND (sc_decl
, 0)) == COMPONENT_REF
10406 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (sc_decl
, 0)))
10407 == REFERENCE_TYPE
))
10408 sc_decl
= TREE_OPERAND (sc_decl
, 0);
10410 tree base2
= extract_base_bit_offset (sc_decl
, &bitpos
, &offset
);
10411 if (!base2
|| !operand_equal_p (base2
, base
, 0))
10415 if (maybe_lt (coffset
, offset
)
10416 || (known_eq (coffset
, offset
)
10417 && maybe_lt (cbitpos
, bitpos
)))
10419 if (ptr
|| attach_detach
)
10427 OMP_CLAUSE_SIZE (*osc
)
10428 = size_binop (PLUS_EXPR
, OMP_CLAUSE_SIZE (*osc
), size_one_node
);
10429 if (ptr
|| attach_detach
)
10431 tree cl
= NULL_TREE
, extra_node
;
10432 tree alloc_node
= build_omp_struct_comp_nodes (code
, *grp_start_p
,
10433 grp_end
, &extra_node
);
10434 tree
*tail_chain
= NULL
;
10438 grp_end : the last (or only) node in this group.
10439 grp_start_p : pointer to the first node in a pointer mapping group
10440 up to and including GRP_END.
10441 sc : pointer to the chain for the end of the struct component
10443 scp : pointer to the chain for the sorted position at which we
10444 should insert in the middle of the struct component list
10445 (else NULL to insert at end).
10446 alloc_node : the "alloc" node for the structure (pointer-type)
10447 component. We insert at SCP (if present), else SC
10448 (the end of the struct component list).
10449 extra_node : a newly-synthesized node for an additional indirect
10450 pointer mapping or a Fortran pointer set, if needed.
10451 cl : first node to prepend before grp_start_p.
10452 tail_chain : pointer to chain of last prepended node.
10454 The general idea is we move the nodes for this struct mapping
10455 together: the alloc node goes into the sorted list directly after
10456 the struct mapping, and any extra nodes (together with the nodes
10457 mapping arrays pointed to by struct components) get moved after
10458 that list. When SCP is NULL, we insert the nodes at SC, i.e. at
10459 the end of the struct component mapping list. It's important that
10460 the alloc_node comes first in that case because it's part of the
10461 sorted component mapping list (but subsequent nodes are not!). */
10464 omp_siblist_insert_node_after (alloc_node
, scp
);
10466 /* Make [cl,tail_chain] a list of the alloc node (if we haven't
10467 already inserted it) and the extra_node (if it is present). The
10468 list can be empty if we added alloc_node above and there is no
10470 if (scp
&& extra_node
)
10473 tail_chain
= &OMP_CLAUSE_CHAIN (extra_node
);
10475 else if (extra_node
)
10477 OMP_CLAUSE_CHAIN (alloc_node
) = extra_node
;
10479 tail_chain
= &OMP_CLAUSE_CHAIN (extra_node
);
10484 tail_chain
= &OMP_CLAUSE_CHAIN (alloc_node
);
10488 = cl
? omp_siblist_move_concat_nodes_after (cl
, tail_chain
,
10489 grp_start_p
, grp_end
,
10491 : omp_siblist_move_nodes_after (grp_start_p
, grp_end
, sc
);
10493 else if (*sc
!= grp_end
)
10495 gcc_assert (*grp_start_p
== grp_end
);
10497 /* We are moving the current node back to a previous struct node:
10498 the node that used to point to the current node will now point to
10500 continue_at
= grp_start_p
;
10501 /* In the non-pointer case, the mapping clause itself is moved into
10502 the correct position in the struct component list, which in this
10503 case is just SC. */
10504 omp_siblist_move_node_after (*grp_start_p
, grp_start_p
, sc
);
10507 return continue_at
;
10510 /* Scan through GROUPS, and create sorted structure sibling lists without
10514 omp_build_struct_sibling_lists (enum tree_code code
,
10515 enum omp_region_type region_type
,
10516 vec
<omp_mapping_group
> *groups
,
10517 hash_map
<tree_operand_hash
, omp_mapping_group
*>
10522 omp_mapping_group
*grp
;
10523 hash_map
<tree_operand_hash
, tree
> *struct_map_to_clause
= NULL
;
10524 bool success
= true;
10525 tree
*new_next
= NULL
;
10526 tree
*tail
= &OMP_CLAUSE_CHAIN ((*groups
)[groups
->length () - 1].grp_end
);
10527 auto_vec
<omp_mapping_group
> pre_hwm_groups
;
10529 FOR_EACH_VEC_ELT (*groups
, i
, grp
)
10531 tree c
= grp
->grp_end
;
10532 tree decl
= OMP_CLAUSE_DECL (c
);
10533 tree grp_end
= grp
->grp_end
;
10534 tree sentinel
= OMP_CLAUSE_CHAIN (grp_end
);
10537 grp
->grp_start
= new_next
;
10541 tree
*grp_start_p
= grp
->grp_start
;
10546 /* Skip groups we marked for deletion in
10547 oacc_resolve_clause_dependencies. */
10551 if (OMP_CLAUSE_CHAIN (*grp_start_p
)
10552 && OMP_CLAUSE_CHAIN (*grp_start_p
) != grp_end
)
10554 /* Don't process an array descriptor that isn't inside a derived type
10555 as a struct (the GOMP_MAP_POINTER following will have the form
10556 "var.data", but such mappings are handled specially). */
10557 tree grpmid
= OMP_CLAUSE_CHAIN (*grp_start_p
);
10558 if (OMP_CLAUSE_CODE (grpmid
) == OMP_CLAUSE_MAP
10559 && OMP_CLAUSE_MAP_KIND (grpmid
) == GOMP_MAP_TO_PSET
10560 && DECL_P (OMP_CLAUSE_DECL (grpmid
)))
10565 if (TREE_CODE (d
) == ARRAY_REF
)
10567 while (TREE_CODE (d
) == ARRAY_REF
)
10568 d
= TREE_OPERAND (d
, 0);
10569 if (TREE_CODE (d
) == COMPONENT_REF
10570 && TREE_CODE (TREE_TYPE (d
)) == ARRAY_TYPE
)
10574 && TREE_CODE (decl
) == INDIRECT_REF
10575 && TREE_CODE (TREE_OPERAND (decl
, 0)) == COMPONENT_REF
10576 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (decl
, 0)))
10578 && (OMP_CLAUSE_MAP_KIND (c
)
10579 != GOMP_MAP_POINTER_TO_ZERO_LENGTH_ARRAY_SECTION
))
10580 decl
= TREE_OPERAND (decl
, 0);
10584 if (TREE_CODE (decl
) != COMPONENT_REF
)
10587 /* If we're mapping the whole struct in another node, skip adding this
10588 node to a sibling list. */
10589 omp_mapping_group
*wholestruct
;
10590 if (omp_mapped_by_containing_struct (*grpmap
, OMP_CLAUSE_DECL (c
),
10593 if (!(region_type
& ORT_ACC
)
10594 && *grp_start_p
== grp_end
)
10595 /* Remove the whole of this mapping -- redundant. */
10596 grp
->deleted
= true;
10601 if (OMP_CLAUSE_MAP_KIND (c
) != GOMP_MAP_TO_PSET
10602 && OMP_CLAUSE_MAP_KIND (c
) != GOMP_MAP_ATTACH
10603 && OMP_CLAUSE_MAP_KIND (c
) != GOMP_MAP_DETACH
10604 && code
!= OACC_UPDATE
10605 && code
!= OMP_TARGET_UPDATE
)
10607 if (error_operand_p (decl
))
10613 tree stype
= TREE_TYPE (decl
);
10614 if (TREE_CODE (stype
) == REFERENCE_TYPE
)
10615 stype
= TREE_TYPE (stype
);
10616 if (TYPE_SIZE_UNIT (stype
) == NULL
10617 || TREE_CODE (TYPE_SIZE_UNIT (stype
)) != INTEGER_CST
)
10619 error_at (OMP_CLAUSE_LOCATION (c
),
10620 "mapping field %qE of variable length "
10621 "structure", OMP_CLAUSE_DECL (c
));
10626 tree inner
= NULL_TREE
;
10629 = omp_accumulate_sibling_list (region_type
, code
,
10630 struct_map_to_clause
, grp_start_p
,
10635 if (new_next
&& *new_next
== NULL_TREE
)
10640 OMP_CLAUSE_CHAIN (inner
) = NULL_TREE
;
10641 omp_mapping_group newgrp
;
10642 newgrp
.grp_start
= new_next
? new_next
: tail
;
10643 newgrp
.grp_end
= inner
;
10644 newgrp
.mark
= UNVISITED
;
10645 newgrp
.sibling
= NULL
;
10646 newgrp
.deleted
= false;
10647 newgrp
.next
= NULL
;
10648 groups
->safe_push (newgrp
);
10650 /* !!! Growing GROUPS might invalidate the pointers in the group
10651 map. Rebuild it here. This is a bit inefficient, but
10652 shouldn't happen very often. */
10655 = omp_reindex_mapping_groups (list_p
, groups
, &pre_hwm_groups
,
10658 tail
= &OMP_CLAUSE_CHAIN (inner
);
10663 /* Delete groups marked for deletion above. At this point the order of the
10664 groups may no longer correspond to the order of the underlying list,
10665 which complicates this a little. First clear out OMP_CLAUSE_DECL for
10666 deleted nodes... */
10668 FOR_EACH_VEC_ELT (*groups
, i
, grp
)
10670 for (tree d
= *grp
->grp_start
;
10671 d
!= OMP_CLAUSE_CHAIN (grp
->grp_end
);
10672 d
= OMP_CLAUSE_CHAIN (d
))
10673 OMP_CLAUSE_DECL (d
) = NULL_TREE
;
10675 /* ...then sweep through the list removing the now-empty nodes. */
10680 if (OMP_CLAUSE_CODE (*tail
) == OMP_CLAUSE_MAP
10681 && OMP_CLAUSE_DECL (*tail
) == NULL_TREE
)
10682 *tail
= OMP_CLAUSE_CHAIN (*tail
);
10684 tail
= &OMP_CLAUSE_CHAIN (*tail
);
10688 if (struct_map_to_clause
)
10689 delete struct_map_to_clause
;
10694 /* Scan the OMP clauses in *LIST_P, installing mappings into a new
10695 and previous omp contexts. */
10698 gimplify_scan_omp_clauses (tree
*list_p
, gimple_seq
*pre_p
,
10699 enum omp_region_type region_type
,
10700 enum tree_code code
)
10702 struct gimplify_omp_ctx
*ctx
, *outer_ctx
;
10704 tree
*prev_list_p
= NULL
, *orig_list_p
= list_p
;
10705 int handled_depend_iterators
= -1;
10708 ctx
= new_omp_context (region_type
);
10710 outer_ctx
= ctx
->outer_context
;
10711 if (code
== OMP_TARGET
)
10713 if (!lang_GNU_Fortran ())
10714 ctx
->defaultmap
[GDMK_POINTER
] = GOVD_MAP
| GOVD_MAP_0LEN_ARRAY
;
10715 ctx
->defaultmap
[GDMK_SCALAR
] = GOVD_FIRSTPRIVATE
;
10716 ctx
->defaultmap
[GDMK_SCALAR_TARGET
] = (lang_GNU_Fortran ()
10717 ? GOVD_MAP
: GOVD_FIRSTPRIVATE
);
10719 if (!lang_GNU_Fortran ())
10723 case OMP_TARGET_DATA
:
10724 case OMP_TARGET_ENTER_DATA
:
10725 case OMP_TARGET_EXIT_DATA
:
10727 case OACC_HOST_DATA
:
10728 case OACC_PARALLEL
:
10730 ctx
->target_firstprivatize_array_bases
= true;
10735 if (code
== OMP_TARGET
10736 || code
== OMP_TARGET_DATA
10737 || code
== OMP_TARGET_ENTER_DATA
10738 || code
== OMP_TARGET_EXIT_DATA
)
10740 vec
<omp_mapping_group
> *groups
;
10741 groups
= omp_gather_mapping_groups (list_p
);
10744 hash_map
<tree_operand_hash
, omp_mapping_group
*> *grpmap
;
10745 grpmap
= omp_index_mapping_groups (groups
);
10747 omp_build_struct_sibling_lists (code
, region_type
, groups
, &grpmap
,
10750 omp_mapping_group
*outlist
= NULL
;
10752 /* Topological sorting may fail if we have duplicate nodes, which
10753 we should have detected and shown an error for already. Skip
10754 sorting in that case. */
10761 /* Rebuild now we have struct sibling lists. */
10762 groups
= omp_gather_mapping_groups (list_p
);
10763 grpmap
= omp_index_mapping_groups (groups
);
10765 outlist
= omp_tsort_mapping_groups (groups
, grpmap
);
10766 outlist
= omp_segregate_mapping_groups (outlist
);
10767 list_p
= omp_reorder_mapping_groups (groups
, outlist
, list_p
);
10774 else if (region_type
& ORT_ACC
)
10776 vec
<omp_mapping_group
> *groups
;
10777 groups
= omp_gather_mapping_groups (list_p
);
10780 hash_map
<tree_operand_hash
, omp_mapping_group
*> *grpmap
;
10781 grpmap
= omp_index_mapping_groups (groups
);
10783 oacc_resolve_clause_dependencies (groups
, grpmap
);
10784 omp_build_struct_sibling_lists (code
, region_type
, groups
, &grpmap
,
10792 while ((c
= *list_p
) != NULL
)
10794 bool remove
= false;
10795 bool notice_outer
= true;
10796 const char *check_non_private
= NULL
;
10797 unsigned int flags
;
10800 switch (OMP_CLAUSE_CODE (c
))
10802 case OMP_CLAUSE_PRIVATE
:
10803 flags
= GOVD_PRIVATE
| GOVD_EXPLICIT
;
10804 if (lang_hooks
.decls
.omp_private_outer_ref (OMP_CLAUSE_DECL (c
)))
10806 flags
|= GOVD_PRIVATE_OUTER_REF
;
10807 OMP_CLAUSE_PRIVATE_OUTER_REF (c
) = 1;
10810 notice_outer
= false;
10812 case OMP_CLAUSE_SHARED
:
10813 flags
= GOVD_SHARED
| GOVD_EXPLICIT
;
10815 case OMP_CLAUSE_FIRSTPRIVATE
:
10816 flags
= GOVD_FIRSTPRIVATE
| GOVD_EXPLICIT
;
10817 check_non_private
= "firstprivate";
10818 if (OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c
))
10820 gcc_assert (code
== OMP_TARGET
);
10821 flags
|= GOVD_FIRSTPRIVATE_IMPLICIT
;
10824 case OMP_CLAUSE_LASTPRIVATE
:
10825 if (OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c
))
10828 case OMP_DISTRIBUTE
:
10829 error_at (OMP_CLAUSE_LOCATION (c
),
10830 "conditional %<lastprivate%> clause on "
10831 "%qs construct", "distribute");
10832 OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c
) = 0;
10835 error_at (OMP_CLAUSE_LOCATION (c
),
10836 "conditional %<lastprivate%> clause on "
10837 "%qs construct", "taskloop");
10838 OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c
) = 0;
10843 flags
= GOVD_LASTPRIVATE
| GOVD_SEEN
| GOVD_EXPLICIT
;
10844 if (code
!= OMP_LOOP
)
10845 check_non_private
= "lastprivate";
10846 decl
= OMP_CLAUSE_DECL (c
);
10847 if (error_operand_p (decl
))
10849 if (OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c
)
10850 && !lang_hooks
.decls
.omp_scalar_p (decl
, true))
10852 error_at (OMP_CLAUSE_LOCATION (c
),
10853 "non-scalar variable %qD in conditional "
10854 "%<lastprivate%> clause", decl
);
10855 OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c
) = 0;
10857 if (OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c
))
10858 flags
|= GOVD_LASTPRIVATE_CONDITIONAL
;
10859 omp_lastprivate_for_combined_outer_constructs (outer_ctx
, decl
,
10862 case OMP_CLAUSE_REDUCTION
:
10863 if (OMP_CLAUSE_REDUCTION_TASK (c
))
10865 if (region_type
== ORT_WORKSHARE
|| code
== OMP_SCOPE
)
10868 nowait
= omp_find_clause (*list_p
,
10869 OMP_CLAUSE_NOWAIT
) != NULL_TREE
;
10871 && (outer_ctx
== NULL
10872 || outer_ctx
->region_type
!= ORT_COMBINED_PARALLEL
))
10874 error_at (OMP_CLAUSE_LOCATION (c
),
10875 "%<task%> reduction modifier on a construct "
10876 "with a %<nowait%> clause");
10877 OMP_CLAUSE_REDUCTION_TASK (c
) = 0;
10880 else if ((region_type
& ORT_PARALLEL
) != ORT_PARALLEL
)
10882 error_at (OMP_CLAUSE_LOCATION (c
),
10883 "invalid %<task%> reduction modifier on construct "
10884 "other than %<parallel%>, %qs, %<sections%> or "
10885 "%<scope%>", lang_GNU_Fortran () ? "do" : "for");
10886 OMP_CLAUSE_REDUCTION_TASK (c
) = 0;
10889 if (OMP_CLAUSE_REDUCTION_INSCAN (c
))
10893 error_at (OMP_CLAUSE_LOCATION (c
),
10894 "%<inscan%> %<reduction%> clause on "
10895 "%qs construct", "sections");
10896 OMP_CLAUSE_REDUCTION_INSCAN (c
) = 0;
10899 error_at (OMP_CLAUSE_LOCATION (c
),
10900 "%<inscan%> %<reduction%> clause on "
10901 "%qs construct", "parallel");
10902 OMP_CLAUSE_REDUCTION_INSCAN (c
) = 0;
10905 error_at (OMP_CLAUSE_LOCATION (c
),
10906 "%<inscan%> %<reduction%> clause on "
10907 "%qs construct", "teams");
10908 OMP_CLAUSE_REDUCTION_INSCAN (c
) = 0;
10911 error_at (OMP_CLAUSE_LOCATION (c
),
10912 "%<inscan%> %<reduction%> clause on "
10913 "%qs construct", "taskloop");
10914 OMP_CLAUSE_REDUCTION_INSCAN (c
) = 0;
10917 error_at (OMP_CLAUSE_LOCATION (c
),
10918 "%<inscan%> %<reduction%> clause on "
10919 "%qs construct", "scope");
10920 OMP_CLAUSE_REDUCTION_INSCAN (c
) = 0;
10926 case OMP_CLAUSE_IN_REDUCTION
:
10927 case OMP_CLAUSE_TASK_REDUCTION
:
10928 flags
= GOVD_REDUCTION
| GOVD_SEEN
| GOVD_EXPLICIT
;
10929 /* OpenACC permits reductions on private variables. */
10930 if (!(region_type
& ORT_ACC
)
10931 /* taskgroup is actually not a worksharing region. */
10932 && code
!= OMP_TASKGROUP
)
10933 check_non_private
= omp_clause_code_name
[OMP_CLAUSE_CODE (c
)];
10934 decl
= OMP_CLAUSE_DECL (c
);
10935 if (TREE_CODE (decl
) == MEM_REF
)
10937 tree type
= TREE_TYPE (decl
);
10938 bool saved_into_ssa
= gimplify_ctxp
->into_ssa
;
10939 gimplify_ctxp
->into_ssa
= false;
10940 if (gimplify_expr (&TYPE_MAX_VALUE (TYPE_DOMAIN (type
)), pre_p
,
10941 NULL
, is_gimple_val
, fb_rvalue
, false)
10944 gimplify_ctxp
->into_ssa
= saved_into_ssa
;
10948 gimplify_ctxp
->into_ssa
= saved_into_ssa
;
10949 tree v
= TYPE_MAX_VALUE (TYPE_DOMAIN (type
));
10952 omp_firstprivatize_variable (ctx
, v
);
10953 omp_notice_variable (ctx
, v
, true);
10955 decl
= TREE_OPERAND (decl
, 0);
10956 if (TREE_CODE (decl
) == POINTER_PLUS_EXPR
)
10958 gimplify_ctxp
->into_ssa
= false;
10959 if (gimplify_expr (&TREE_OPERAND (decl
, 1), pre_p
,
10960 NULL
, is_gimple_val
, fb_rvalue
, false)
10963 gimplify_ctxp
->into_ssa
= saved_into_ssa
;
10967 gimplify_ctxp
->into_ssa
= saved_into_ssa
;
10968 v
= TREE_OPERAND (decl
, 1);
10971 omp_firstprivatize_variable (ctx
, v
);
10972 omp_notice_variable (ctx
, v
, true);
10974 decl
= TREE_OPERAND (decl
, 0);
10976 if (TREE_CODE (decl
) == ADDR_EXPR
10977 || TREE_CODE (decl
) == INDIRECT_REF
)
10978 decl
= TREE_OPERAND (decl
, 0);
10981 case OMP_CLAUSE_LINEAR
:
10982 if (gimplify_expr (&OMP_CLAUSE_LINEAR_STEP (c
), pre_p
, NULL
,
10983 is_gimple_val
, fb_rvalue
) == GS_ERROR
)
10990 if (code
== OMP_SIMD
10991 && !OMP_CLAUSE_LINEAR_NO_COPYIN (c
))
10993 struct gimplify_omp_ctx
*octx
= outer_ctx
;
10995 && octx
->region_type
== ORT_WORKSHARE
10996 && octx
->combined_loop
10997 && !octx
->distribute
)
10999 if (octx
->outer_context
11000 && (octx
->outer_context
->region_type
11001 == ORT_COMBINED_PARALLEL
))
11002 octx
= octx
->outer_context
->outer_context
;
11004 octx
= octx
->outer_context
;
11007 && octx
->region_type
== ORT_WORKSHARE
11008 && octx
->combined_loop
11009 && octx
->distribute
)
11011 error_at (OMP_CLAUSE_LOCATION (c
),
11012 "%<linear%> clause for variable other than "
11013 "loop iterator specified on construct "
11014 "combined with %<distribute%>");
11019 /* For combined #pragma omp parallel for simd, need to put
11020 lastprivate and perhaps firstprivate too on the
11021 parallel. Similarly for #pragma omp for simd. */
11022 struct gimplify_omp_ctx
*octx
= outer_ctx
;
11023 bool taskloop_seen
= false;
11027 if (OMP_CLAUSE_LINEAR_NO_COPYIN (c
)
11028 && OMP_CLAUSE_LINEAR_NO_COPYOUT (c
))
11030 decl
= OMP_CLAUSE_DECL (c
);
11031 if (error_operand_p (decl
))
11037 if (!OMP_CLAUSE_LINEAR_NO_COPYIN (c
))
11038 flags
|= GOVD_FIRSTPRIVATE
;
11039 if (!OMP_CLAUSE_LINEAR_NO_COPYOUT (c
))
11040 flags
|= GOVD_LASTPRIVATE
;
11042 && octx
->region_type
== ORT_WORKSHARE
11043 && octx
->combined_loop
)
11045 if (octx
->outer_context
11046 && (octx
->outer_context
->region_type
11047 == ORT_COMBINED_PARALLEL
))
11048 octx
= octx
->outer_context
;
11049 else if (omp_check_private (octx
, decl
, false))
11053 && (octx
->region_type
& ORT_TASK
) != 0
11054 && octx
->combined_loop
)
11055 taskloop_seen
= true;
11057 && octx
->region_type
== ORT_COMBINED_PARALLEL
11058 && ((ctx
->region_type
== ORT_WORKSHARE
11059 && octx
== outer_ctx
)
11061 flags
= GOVD_SEEN
| GOVD_SHARED
;
11063 && ((octx
->region_type
& ORT_COMBINED_TEAMS
)
11064 == ORT_COMBINED_TEAMS
))
11065 flags
= GOVD_SEEN
| GOVD_SHARED
;
11067 && octx
->region_type
== ORT_COMBINED_TARGET
)
11069 if (flags
& GOVD_LASTPRIVATE
)
11070 flags
= GOVD_SEEN
| GOVD_MAP
;
11075 = splay_tree_lookup (octx
->variables
,
11076 (splay_tree_key
) decl
);
11077 if (on
&& (on
->value
& GOVD_DATA_SHARE_CLASS
) != 0)
11082 omp_add_variable (octx
, decl
, flags
);
11083 if (octx
->outer_context
== NULL
)
11085 octx
= octx
->outer_context
;
11090 && (!OMP_CLAUSE_LINEAR_NO_COPYIN (c
)
11091 || !OMP_CLAUSE_LINEAR_NO_COPYOUT (c
)))
11092 omp_notice_variable (octx
, decl
, true);
11094 flags
= GOVD_LINEAR
| GOVD_EXPLICIT
;
11095 if (OMP_CLAUSE_LINEAR_NO_COPYIN (c
)
11096 && OMP_CLAUSE_LINEAR_NO_COPYOUT (c
))
11098 notice_outer
= false;
11099 flags
|= GOVD_LINEAR_LASTPRIVATE_NO_OUTER
;
11103 case OMP_CLAUSE_MAP
:
11104 decl
= OMP_CLAUSE_DECL (c
);
11105 if (error_operand_p (decl
))
11112 if (TREE_CODE (TREE_TYPE (decl
)) != ARRAY_TYPE
)
11115 case OMP_TARGET_DATA
:
11116 case OMP_TARGET_ENTER_DATA
:
11117 case OMP_TARGET_EXIT_DATA
:
11118 case OACC_ENTER_DATA
:
11119 case OACC_EXIT_DATA
:
11120 case OACC_HOST_DATA
:
11121 if (OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_FIRSTPRIVATE_POINTER
11122 || (OMP_CLAUSE_MAP_KIND (c
)
11123 == GOMP_MAP_FIRSTPRIVATE_REFERENCE
))
11124 /* For target {,enter ,exit }data only the array slice is
11125 mapped, but not the pointer to it. */
11131 /* For Fortran, not only the pointer to the data is mapped but also
11132 the address of the pointer, the array descriptor etc.; for
11133 'exit data' - and in particular for 'delete:' - having an 'alloc:'
11134 does not make sense. Likewise, for 'update' only transferring the
11135 data itself is needed as the rest has been handled in previous
11136 directives. However, for 'exit data', the array descriptor needs
11137 to be delete; hence, we turn the MAP_TO_PSET into a MAP_DELETE.
11139 NOTE: Generally, it is not safe to perform "enter data" operations
11140 on arrays where the data *or the descriptor* may go out of scope
11141 before a corresponding "exit data" operation -- and such a
11142 descriptor may be synthesized temporarily, e.g. to pass an
11143 explicit-shape array to a function expecting an assumed-shape
11144 argument. Performing "enter data" inside the called function
11145 would thus be problematic. */
11146 if (code
== OMP_TARGET_EXIT_DATA
11147 && OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_TO_PSET
)
11148 OMP_CLAUSE_SET_MAP_KIND (c
, OMP_CLAUSE_MAP_KIND (*prev_list_p
)
11150 ? GOMP_MAP_DELETE
: GOMP_MAP_RELEASE
);
11151 else if ((code
== OMP_TARGET_EXIT_DATA
|| code
== OMP_TARGET_UPDATE
)
11152 && (OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_POINTER
11153 || OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_TO_PSET
))
11158 if (DECL_P (decl
) && outer_ctx
&& (region_type
& ORT_ACC
))
11160 struct gimplify_omp_ctx
*octx
;
11161 for (octx
= outer_ctx
; octx
; octx
= octx
->outer_context
)
11163 if (octx
->region_type
!= ORT_ACC_HOST_DATA
)
11166 = splay_tree_lookup (octx
->variables
,
11167 (splay_tree_key
) decl
);
11169 error_at (OMP_CLAUSE_LOCATION (c
), "variable %qE "
11170 "declared in enclosing %<host_data%> region",
11174 if (OMP_CLAUSE_SIZE (c
) == NULL_TREE
)
11175 OMP_CLAUSE_SIZE (c
) = DECL_P (decl
) ? DECL_SIZE_UNIT (decl
)
11176 : TYPE_SIZE_UNIT (TREE_TYPE (decl
));
11177 if (gimplify_expr (&OMP_CLAUSE_SIZE (c
), pre_p
,
11178 NULL
, is_gimple_val
, fb_rvalue
) == GS_ERROR
)
11183 else if ((OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_FIRSTPRIVATE_POINTER
11184 || (OMP_CLAUSE_MAP_KIND (c
)
11185 == GOMP_MAP_FIRSTPRIVATE_REFERENCE
)
11186 || OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_ATTACH_DETACH
)
11187 && TREE_CODE (OMP_CLAUSE_SIZE (c
)) != INTEGER_CST
)
11189 OMP_CLAUSE_SIZE (c
)
11190 = get_initialized_tmp_var (OMP_CLAUSE_SIZE (c
), pre_p
, NULL
,
11192 if ((region_type
& ORT_TARGET
) != 0)
11193 omp_add_variable (ctx
, OMP_CLAUSE_SIZE (c
),
11194 GOVD_FIRSTPRIVATE
| GOVD_SEEN
);
11197 if (OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_STRUCT
)
11199 tree base
= omp_strip_components_and_deref (decl
);
11204 = splay_tree_lookup (ctx
->variables
,
11205 (splay_tree_key
) decl
);
11208 && (n
->value
& (GOVD_MAP
| GOVD_FIRSTPRIVATE
)) != 0)
11213 flags
= GOVD_MAP
| GOVD_EXPLICIT
;
11219 if (TREE_CODE (decl
) == TARGET_EXPR
)
11221 if (gimplify_expr (&OMP_CLAUSE_DECL (c
), pre_p
, NULL
,
11222 is_gimple_lvalue
, fb_lvalue
)
11226 else if (!DECL_P (decl
))
11228 tree d
= decl
, *pd
;
11229 if (TREE_CODE (d
) == ARRAY_REF
)
11231 while (TREE_CODE (d
) == ARRAY_REF
)
11232 d
= TREE_OPERAND (d
, 0);
11233 if (TREE_CODE (d
) == COMPONENT_REF
11234 && TREE_CODE (TREE_TYPE (d
)) == ARRAY_TYPE
)
11237 pd
= &OMP_CLAUSE_DECL (c
);
11239 && TREE_CODE (decl
) == INDIRECT_REF
11240 && TREE_CODE (TREE_OPERAND (decl
, 0)) == COMPONENT_REF
11241 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (decl
, 0)))
11243 && (OMP_CLAUSE_MAP_KIND (c
)
11244 != GOMP_MAP_POINTER_TO_ZERO_LENGTH_ARRAY_SECTION
))
11246 pd
= &TREE_OPERAND (decl
, 0);
11247 decl
= TREE_OPERAND (decl
, 0);
11249 /* An "attach/detach" operation on an update directive should
11250 behave as a GOMP_MAP_ALWAYS_POINTER. Beware that
11251 unlike attach or detach map kinds, GOMP_MAP_ALWAYS_POINTER
11252 depends on the previous mapping. */
11253 if (code
== OACC_UPDATE
11254 && OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_ATTACH_DETACH
)
11255 OMP_CLAUSE_SET_MAP_KIND (c
, GOMP_MAP_ALWAYS_POINTER
);
11257 if (OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_ATTACH_DETACH
)
11259 if (TREE_CODE (TREE_TYPE (OMP_CLAUSE_DECL (c
)))
11264 gomp_map_kind k
= ((code
== OACC_EXIT_DATA
11265 || code
== OMP_TARGET_EXIT_DATA
)
11266 ? GOMP_MAP_DETACH
: GOMP_MAP_ATTACH
);
11267 OMP_CLAUSE_SET_MAP_KIND (c
, k
);
11273 while (TREE_CODE (cref
) == ARRAY_REF
)
11274 cref
= TREE_OPERAND (cref
, 0);
11276 if (TREE_CODE (cref
) == INDIRECT_REF
)
11277 cref
= TREE_OPERAND (cref
, 0);
11279 if (TREE_CODE (cref
) == COMPONENT_REF
)
11282 while (base
&& !DECL_P (base
))
11284 tree innerbase
= omp_get_base_pointer (base
);
11291 && GOMP_MAP_ALWAYS_P (OMP_CLAUSE_MAP_KIND (c
))
11292 && POINTER_TYPE_P (TREE_TYPE (base
)))
11295 = splay_tree_lookup (ctx
->variables
,
11296 (splay_tree_key
) base
);
11297 n
->value
|= GOVD_SEEN
;
11301 if (code
== OMP_TARGET
&& OMP_CLAUSE_MAP_IN_REDUCTION (c
))
11303 /* Don't gimplify *pd fully at this point, as the base
11304 will need to be adjusted during omp lowering. */
11305 auto_vec
<tree
, 10> expr_stack
;
11307 while (handled_component_p (*p
)
11308 || TREE_CODE (*p
) == INDIRECT_REF
11309 || TREE_CODE (*p
) == ADDR_EXPR
11310 || TREE_CODE (*p
) == MEM_REF
11311 || TREE_CODE (*p
) == NON_LVALUE_EXPR
)
11313 expr_stack
.safe_push (*p
);
11314 p
= &TREE_OPERAND (*p
, 0);
11316 for (int i
= expr_stack
.length () - 1; i
>= 0; i
--)
11318 tree t
= expr_stack
[i
];
11319 if (TREE_CODE (t
) == ARRAY_REF
11320 || TREE_CODE (t
) == ARRAY_RANGE_REF
)
11322 if (TREE_OPERAND (t
, 2) == NULL_TREE
)
11324 tree low
= unshare_expr (array_ref_low_bound (t
));
11325 if (!is_gimple_min_invariant (low
))
11327 TREE_OPERAND (t
, 2) = low
;
11328 if (gimplify_expr (&TREE_OPERAND (t
, 2),
11331 fb_rvalue
) == GS_ERROR
)
11335 else if (gimplify_expr (&TREE_OPERAND (t
, 2), pre_p
,
11336 NULL
, is_gimple_reg
,
11337 fb_rvalue
) == GS_ERROR
)
11339 if (TREE_OPERAND (t
, 3) == NULL_TREE
)
11341 tree elmt_size
= array_ref_element_size (t
);
11342 if (!is_gimple_min_invariant (elmt_size
))
11344 elmt_size
= unshare_expr (elmt_size
);
11346 = TREE_TYPE (TREE_TYPE (TREE_OPERAND (t
,
11349 = size_int (TYPE_ALIGN_UNIT (elmt_type
));
11351 = size_binop (EXACT_DIV_EXPR
, elmt_size
,
11353 TREE_OPERAND (t
, 3) = elmt_size
;
11354 if (gimplify_expr (&TREE_OPERAND (t
, 3),
11357 fb_rvalue
) == GS_ERROR
)
11361 else if (gimplify_expr (&TREE_OPERAND (t
, 3), pre_p
,
11362 NULL
, is_gimple_reg
,
11363 fb_rvalue
) == GS_ERROR
)
11366 else if (TREE_CODE (t
) == COMPONENT_REF
)
11368 if (TREE_OPERAND (t
, 2) == NULL_TREE
)
11370 tree offset
= component_ref_field_offset (t
);
11371 if (!is_gimple_min_invariant (offset
))
11373 offset
= unshare_expr (offset
);
11374 tree field
= TREE_OPERAND (t
, 1);
11376 = size_int (DECL_OFFSET_ALIGN (field
)
11378 offset
= size_binop (EXACT_DIV_EXPR
, offset
,
11380 TREE_OPERAND (t
, 2) = offset
;
11381 if (gimplify_expr (&TREE_OPERAND (t
, 2),
11384 fb_rvalue
) == GS_ERROR
)
11388 else if (gimplify_expr (&TREE_OPERAND (t
, 2), pre_p
,
11389 NULL
, is_gimple_reg
,
11390 fb_rvalue
) == GS_ERROR
)
11394 for (; expr_stack
.length () > 0; )
11396 tree t
= expr_stack
.pop ();
11398 if (TREE_CODE (t
) == ARRAY_REF
11399 || TREE_CODE (t
) == ARRAY_RANGE_REF
)
11401 if (!is_gimple_min_invariant (TREE_OPERAND (t
, 1))
11402 && gimplify_expr (&TREE_OPERAND (t
, 1), pre_p
,
11403 NULL
, is_gimple_val
,
11404 fb_rvalue
) == GS_ERROR
)
11409 else if (gimplify_expr (pd
, pre_p
, NULL
, is_gimple_lvalue
,
11410 fb_lvalue
) == GS_ERROR
)
11417 && OMP_CLAUSE_MAP_KIND (c
) != GOMP_MAP_ALWAYS_POINTER
11418 && OMP_CLAUSE_MAP_KIND (c
) != GOMP_MAP_ATTACH_DETACH
11419 && OMP_CLAUSE_MAP_KIND (c
) != GOMP_MAP_TO_PSET
11420 && OMP_CLAUSE_CHAIN (c
)
11421 && OMP_CLAUSE_CODE (OMP_CLAUSE_CHAIN (c
)) == OMP_CLAUSE_MAP
11422 && ((OMP_CLAUSE_MAP_KIND (OMP_CLAUSE_CHAIN (c
))
11423 == GOMP_MAP_ALWAYS_POINTER
)
11424 || (OMP_CLAUSE_MAP_KIND (OMP_CLAUSE_CHAIN (c
))
11425 == GOMP_MAP_ATTACH_DETACH
)
11426 || (OMP_CLAUSE_MAP_KIND (OMP_CLAUSE_CHAIN (c
))
11427 == GOMP_MAP_TO_PSET
)))
11428 prev_list_p
= list_p
;
11432 flags
= GOVD_MAP
| GOVD_EXPLICIT
;
11433 if (OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_ALWAYS_TO
11434 || OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_ALWAYS_TOFROM
)
11435 flags
|= GOVD_MAP_ALWAYS_TO
;
11437 if ((code
== OMP_TARGET
11438 || code
== OMP_TARGET_DATA
11439 || code
== OMP_TARGET_ENTER_DATA
11440 || code
== OMP_TARGET_EXIT_DATA
)
11441 && OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_ATTACH_DETACH
)
11443 for (struct gimplify_omp_ctx
*octx
= outer_ctx
; octx
;
11444 octx
= octx
->outer_context
)
11447 = splay_tree_lookup (octx
->variables
,
11448 (splay_tree_key
) OMP_CLAUSE_DECL (c
));
11449 /* If this is contained in an outer OpenMP region as a
11450 firstprivate value, remove the attach/detach. */
11451 if (n
&& (n
->value
& GOVD_FIRSTPRIVATE
))
11453 OMP_CLAUSE_SET_MAP_KIND (c
, GOMP_MAP_FIRSTPRIVATE_POINTER
);
11458 enum gomp_map_kind map_kind
= (code
== OMP_TARGET_EXIT_DATA
11460 : GOMP_MAP_ATTACH
);
11461 OMP_CLAUSE_SET_MAP_KIND (c
, map_kind
);
11466 case OMP_CLAUSE_AFFINITY
:
11467 gimplify_omp_affinity (list_p
, pre_p
);
11470 case OMP_CLAUSE_DOACROSS
:
11471 if (OMP_CLAUSE_DOACROSS_KIND (c
) == OMP_CLAUSE_DOACROSS_SINK
)
11473 tree deps
= OMP_CLAUSE_DECL (c
);
11474 while (deps
&& TREE_CODE (deps
) == TREE_LIST
)
11476 if (TREE_CODE (TREE_PURPOSE (deps
)) == TRUNC_DIV_EXPR
11477 && DECL_P (TREE_OPERAND (TREE_PURPOSE (deps
), 1)))
11478 gimplify_expr (&TREE_OPERAND (TREE_PURPOSE (deps
), 1),
11479 pre_p
, NULL
, is_gimple_val
, fb_rvalue
);
11480 deps
= TREE_CHAIN (deps
);
11484 gcc_assert (OMP_CLAUSE_DOACROSS_KIND (c
)
11485 == OMP_CLAUSE_DOACROSS_SOURCE
);
11487 case OMP_CLAUSE_DEPEND
:
11488 if (handled_depend_iterators
== -1)
11489 handled_depend_iterators
= gimplify_omp_depend (list_p
, pre_p
);
11490 if (handled_depend_iterators
)
11492 if (handled_depend_iterators
== 2)
11496 if (TREE_CODE (OMP_CLAUSE_DECL (c
)) == COMPOUND_EXPR
)
11498 gimplify_expr (&TREE_OPERAND (OMP_CLAUSE_DECL (c
), 0), pre_p
,
11499 NULL
, is_gimple_val
, fb_rvalue
);
11500 OMP_CLAUSE_DECL (c
) = TREE_OPERAND (OMP_CLAUSE_DECL (c
), 1);
11502 if (error_operand_p (OMP_CLAUSE_DECL (c
)))
11507 if (OMP_CLAUSE_DECL (c
) != null_pointer_node
)
11509 OMP_CLAUSE_DECL (c
) = build_fold_addr_expr (OMP_CLAUSE_DECL (c
));
11510 if (gimplify_expr (&OMP_CLAUSE_DECL (c
), pre_p
, NULL
,
11511 is_gimple_val
, fb_rvalue
) == GS_ERROR
)
11517 if (code
== OMP_TASK
)
11518 ctx
->has_depend
= true;
11521 case OMP_CLAUSE_TO
:
11522 case OMP_CLAUSE_FROM
:
11523 case OMP_CLAUSE__CACHE_
:
11524 decl
= OMP_CLAUSE_DECL (c
);
11525 if (error_operand_p (decl
))
11530 if (OMP_CLAUSE_SIZE (c
) == NULL_TREE
)
11531 OMP_CLAUSE_SIZE (c
) = DECL_P (decl
) ? DECL_SIZE_UNIT (decl
)
11532 : TYPE_SIZE_UNIT (TREE_TYPE (decl
));
11533 if (gimplify_expr (&OMP_CLAUSE_SIZE (c
), pre_p
,
11534 NULL
, is_gimple_val
, fb_rvalue
) == GS_ERROR
)
11539 if (!DECL_P (decl
))
11541 if (gimplify_expr (&OMP_CLAUSE_DECL (c
), pre_p
,
11542 NULL
, is_gimple_lvalue
, fb_lvalue
)
11552 case OMP_CLAUSE_USE_DEVICE_PTR
:
11553 case OMP_CLAUSE_USE_DEVICE_ADDR
:
11554 flags
= GOVD_EXPLICIT
;
11557 case OMP_CLAUSE_HAS_DEVICE_ADDR
:
11558 decl
= OMP_CLAUSE_DECL (c
);
11559 while (TREE_CODE (decl
) == INDIRECT_REF
11560 || TREE_CODE (decl
) == ARRAY_REF
)
11561 decl
= TREE_OPERAND (decl
, 0);
11562 flags
= GOVD_EXPLICIT
;
11565 case OMP_CLAUSE_IS_DEVICE_PTR
:
11566 flags
= GOVD_FIRSTPRIVATE
| GOVD_EXPLICIT
;
11570 decl
= OMP_CLAUSE_DECL (c
);
11572 if (error_operand_p (decl
))
11577 if (DECL_NAME (decl
) == NULL_TREE
&& (flags
& GOVD_SHARED
) == 0)
11579 tree t
= omp_member_access_dummy_var (decl
);
11582 tree v
= DECL_VALUE_EXPR (decl
);
11583 DECL_NAME (decl
) = DECL_NAME (TREE_OPERAND (v
, 1));
11585 omp_notice_variable (outer_ctx
, t
, true);
11588 if (code
== OACC_DATA
11589 && OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
11590 && OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_FIRSTPRIVATE_POINTER
)
11591 flags
|= GOVD_MAP_0LEN_ARRAY
;
11592 omp_add_variable (ctx
, decl
, flags
);
11593 if ((OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_REDUCTION
11594 || OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_IN_REDUCTION
11595 || OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_TASK_REDUCTION
)
11596 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
))
11598 struct gimplify_omp_ctx
*pctx
11599 = code
== OMP_TARGET
? outer_ctx
: ctx
;
11601 omp_add_variable (pctx
, OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
),
11602 GOVD_LOCAL
| GOVD_SEEN
);
11604 && OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c
)
11605 && walk_tree (&OMP_CLAUSE_REDUCTION_INIT (c
),
11607 OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c
),
11608 NULL
) == NULL_TREE
)
11609 omp_add_variable (pctx
,
11610 OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c
),
11611 GOVD_LOCAL
| GOVD_SEEN
);
11612 gimplify_omp_ctxp
= pctx
;
11613 push_gimplify_context ();
11615 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
) = NULL
;
11616 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
) = NULL
;
11618 gimplify_and_add (OMP_CLAUSE_REDUCTION_INIT (c
),
11619 &OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
));
11620 pop_gimplify_context
11621 (gimple_seq_first_stmt (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
)));
11622 push_gimplify_context ();
11623 gimplify_and_add (OMP_CLAUSE_REDUCTION_MERGE (c
),
11624 &OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
));
11625 pop_gimplify_context
11626 (gimple_seq_first_stmt (OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
)));
11627 OMP_CLAUSE_REDUCTION_INIT (c
) = NULL_TREE
;
11628 OMP_CLAUSE_REDUCTION_MERGE (c
) = NULL_TREE
;
11630 gimplify_omp_ctxp
= outer_ctx
;
11632 else if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
11633 && OMP_CLAUSE_LASTPRIVATE_STMT (c
))
11635 gimplify_omp_ctxp
= ctx
;
11636 push_gimplify_context ();
11637 if (TREE_CODE (OMP_CLAUSE_LASTPRIVATE_STMT (c
)) != BIND_EXPR
)
11639 tree bind
= build3 (BIND_EXPR
, void_type_node
, NULL
,
11641 TREE_SIDE_EFFECTS (bind
) = 1;
11642 BIND_EXPR_BODY (bind
) = OMP_CLAUSE_LASTPRIVATE_STMT (c
);
11643 OMP_CLAUSE_LASTPRIVATE_STMT (c
) = bind
;
11645 gimplify_and_add (OMP_CLAUSE_LASTPRIVATE_STMT (c
),
11646 &OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c
));
11647 pop_gimplify_context
11648 (gimple_seq_first_stmt (OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c
)));
11649 OMP_CLAUSE_LASTPRIVATE_STMT (c
) = NULL_TREE
;
11651 gimplify_omp_ctxp
= outer_ctx
;
11653 else if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LINEAR
11654 && OMP_CLAUSE_LINEAR_STMT (c
))
11656 gimplify_omp_ctxp
= ctx
;
11657 push_gimplify_context ();
11658 if (TREE_CODE (OMP_CLAUSE_LINEAR_STMT (c
)) != BIND_EXPR
)
11660 tree bind
= build3 (BIND_EXPR
, void_type_node
, NULL
,
11662 TREE_SIDE_EFFECTS (bind
) = 1;
11663 BIND_EXPR_BODY (bind
) = OMP_CLAUSE_LINEAR_STMT (c
);
11664 OMP_CLAUSE_LINEAR_STMT (c
) = bind
;
11666 gimplify_and_add (OMP_CLAUSE_LINEAR_STMT (c
),
11667 &OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c
));
11668 pop_gimplify_context
11669 (gimple_seq_first_stmt (OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c
)));
11670 OMP_CLAUSE_LINEAR_STMT (c
) = NULL_TREE
;
11672 gimplify_omp_ctxp
= outer_ctx
;
11678 case OMP_CLAUSE_COPYIN
:
11679 case OMP_CLAUSE_COPYPRIVATE
:
11680 decl
= OMP_CLAUSE_DECL (c
);
11681 if (error_operand_p (decl
))
11686 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_COPYPRIVATE
11688 && !omp_check_private (ctx
, decl
, true))
11691 if (is_global_var (decl
))
11693 if (DECL_THREAD_LOCAL_P (decl
))
11695 else if (DECL_HAS_VALUE_EXPR_P (decl
))
11697 tree value
= get_base_address (DECL_VALUE_EXPR (decl
));
11701 && DECL_THREAD_LOCAL_P (value
))
11706 error_at (OMP_CLAUSE_LOCATION (c
),
11707 "copyprivate variable %qE is not threadprivate"
11708 " or private in outer context", DECL_NAME (decl
));
11711 if ((OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_REDUCTION
11712 || OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_FIRSTPRIVATE
11713 || OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
)
11715 && ((region_type
& ORT_TASKLOOP
) == ORT_TASKLOOP
11716 || (region_type
== ORT_WORKSHARE
11717 && OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_REDUCTION
11718 && (OMP_CLAUSE_REDUCTION_INSCAN (c
)
11719 || code
== OMP_LOOP
)))
11720 && (outer_ctx
->region_type
== ORT_COMBINED_PARALLEL
11721 || (code
== OMP_LOOP
11722 && OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_REDUCTION
11723 && ((outer_ctx
->region_type
& ORT_COMBINED_TEAMS
)
11724 == ORT_COMBINED_TEAMS
))))
11727 = splay_tree_lookup (outer_ctx
->variables
,
11728 (splay_tree_key
)decl
);
11729 if (on
== NULL
|| (on
->value
& GOVD_DATA_SHARE_CLASS
) == 0)
11731 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_REDUCTION
11732 && TREE_CODE (OMP_CLAUSE_DECL (c
)) == MEM_REF
11733 && (TREE_CODE (TREE_TYPE (decl
)) == POINTER_TYPE
11734 || (TREE_CODE (TREE_TYPE (decl
)) == REFERENCE_TYPE
11735 && (TREE_CODE (TREE_TYPE (TREE_TYPE (decl
)))
11736 == POINTER_TYPE
))))
11737 omp_firstprivatize_variable (outer_ctx
, decl
);
11740 omp_add_variable (outer_ctx
, decl
,
11741 GOVD_SEEN
| GOVD_SHARED
);
11742 if (outer_ctx
->outer_context
)
11743 omp_notice_variable (outer_ctx
->outer_context
, decl
,
11749 omp_notice_variable (outer_ctx
, decl
, true);
11750 if (check_non_private
11751 && (region_type
== ORT_WORKSHARE
|| code
== OMP_SCOPE
)
11752 && (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_REDUCTION
11753 || decl
== OMP_CLAUSE_DECL (c
)
11754 || (TREE_CODE (OMP_CLAUSE_DECL (c
)) == MEM_REF
11755 && (TREE_CODE (TREE_OPERAND (OMP_CLAUSE_DECL (c
), 0))
11757 || (TREE_CODE (TREE_OPERAND (OMP_CLAUSE_DECL (c
), 0))
11758 == POINTER_PLUS_EXPR
11759 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND
11760 (OMP_CLAUSE_DECL (c
), 0), 0))
11762 && omp_check_private (ctx
, decl
, false))
11764 error ("%s variable %qE is private in outer context",
11765 check_non_private
, DECL_NAME (decl
));
11770 case OMP_CLAUSE_DETACH
:
11771 flags
= GOVD_FIRSTPRIVATE
| GOVD_SEEN
;
11774 case OMP_CLAUSE_IF
:
11775 if (OMP_CLAUSE_IF_MODIFIER (c
) != ERROR_MARK
11776 && OMP_CLAUSE_IF_MODIFIER (c
) != code
)
11779 for (int i
= 0; i
< 2; i
++)
11780 switch (i
? OMP_CLAUSE_IF_MODIFIER (c
) : code
)
11782 case VOID_CST
: p
[i
] = "cancel"; break;
11783 case OMP_PARALLEL
: p
[i
] = "parallel"; break;
11784 case OMP_SIMD
: p
[i
] = "simd"; break;
11785 case OMP_TASK
: p
[i
] = "task"; break;
11786 case OMP_TASKLOOP
: p
[i
] = "taskloop"; break;
11787 case OMP_TARGET_DATA
: p
[i
] = "target data"; break;
11788 case OMP_TARGET
: p
[i
] = "target"; break;
11789 case OMP_TARGET_UPDATE
: p
[i
] = "target update"; break;
11790 case OMP_TARGET_ENTER_DATA
:
11791 p
[i
] = "target enter data"; break;
11792 case OMP_TARGET_EXIT_DATA
: p
[i
] = "target exit data"; break;
11793 default: gcc_unreachable ();
11795 error_at (OMP_CLAUSE_LOCATION (c
),
11796 "expected %qs %<if%> clause modifier rather than %qs",
11800 /* Fall through. */
11802 case OMP_CLAUSE_FINAL
:
11803 OMP_CLAUSE_OPERAND (c
, 0)
11804 = gimple_boolify (OMP_CLAUSE_OPERAND (c
, 0));
11805 /* Fall through. */
11807 case OMP_CLAUSE_NUM_TEAMS
:
11808 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_NUM_TEAMS
11809 && OMP_CLAUSE_NUM_TEAMS_LOWER_EXPR (c
)
11810 && !is_gimple_min_invariant (OMP_CLAUSE_NUM_TEAMS_LOWER_EXPR (c
)))
11812 if (error_operand_p (OMP_CLAUSE_NUM_TEAMS_LOWER_EXPR (c
)))
11817 OMP_CLAUSE_NUM_TEAMS_LOWER_EXPR (c
)
11818 = get_initialized_tmp_var (OMP_CLAUSE_NUM_TEAMS_LOWER_EXPR (c
),
11819 pre_p
, NULL
, true);
11821 /* Fall through. */
11823 case OMP_CLAUSE_SCHEDULE
:
11824 case OMP_CLAUSE_NUM_THREADS
:
11825 case OMP_CLAUSE_THREAD_LIMIT
:
11826 case OMP_CLAUSE_DIST_SCHEDULE
:
11827 case OMP_CLAUSE_DEVICE
:
11828 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_DEVICE
11829 && OMP_CLAUSE_DEVICE_ANCESTOR (c
))
11831 if (code
!= OMP_TARGET
)
11833 error_at (OMP_CLAUSE_LOCATION (c
),
11834 "%<device%> clause with %<ancestor%> is only "
11835 "allowed on %<target%> construct");
11840 tree clauses
= *orig_list_p
;
11841 for (; clauses
; clauses
= OMP_CLAUSE_CHAIN (clauses
))
11842 if (OMP_CLAUSE_CODE (clauses
) != OMP_CLAUSE_DEVICE
11843 && OMP_CLAUSE_CODE (clauses
) != OMP_CLAUSE_FIRSTPRIVATE
11844 && OMP_CLAUSE_CODE (clauses
) != OMP_CLAUSE_PRIVATE
11845 && OMP_CLAUSE_CODE (clauses
) != OMP_CLAUSE_DEFAULTMAP
11846 && OMP_CLAUSE_CODE (clauses
) != OMP_CLAUSE_MAP
11849 error_at (OMP_CLAUSE_LOCATION (c
),
11850 "with %<ancestor%>, only the %<device%>, "
11851 "%<firstprivate%>, %<private%>, %<defaultmap%>, "
11852 "and %<map%> clauses may appear on the "
11858 /* Fall through. */
11860 case OMP_CLAUSE_PRIORITY
:
11861 case OMP_CLAUSE_GRAINSIZE
:
11862 case OMP_CLAUSE_NUM_TASKS
:
11863 case OMP_CLAUSE_FILTER
:
11864 case OMP_CLAUSE_HINT
:
11865 case OMP_CLAUSE_ASYNC
:
11866 case OMP_CLAUSE_WAIT
:
11867 case OMP_CLAUSE_NUM_GANGS
:
11868 case OMP_CLAUSE_NUM_WORKERS
:
11869 case OMP_CLAUSE_VECTOR_LENGTH
:
11870 case OMP_CLAUSE_WORKER
:
11871 case OMP_CLAUSE_VECTOR
:
11872 if (OMP_CLAUSE_OPERAND (c
, 0)
11873 && !is_gimple_min_invariant (OMP_CLAUSE_OPERAND (c
, 0)))
11875 if (error_operand_p (OMP_CLAUSE_OPERAND (c
, 0)))
11880 /* All these clauses care about value, not a particular decl,
11881 so try to force it into a SSA_NAME or fresh temporary. */
11882 OMP_CLAUSE_OPERAND (c
, 0)
11883 = get_initialized_tmp_var (OMP_CLAUSE_OPERAND (c
, 0),
11884 pre_p
, NULL
, true);
11888 case OMP_CLAUSE_GANG
:
11889 if (gimplify_expr (&OMP_CLAUSE_OPERAND (c
, 0), pre_p
, NULL
,
11890 is_gimple_val
, fb_rvalue
) == GS_ERROR
)
11892 if (gimplify_expr (&OMP_CLAUSE_OPERAND (c
, 1), pre_p
, NULL
,
11893 is_gimple_val
, fb_rvalue
) == GS_ERROR
)
11897 case OMP_CLAUSE_NOWAIT
:
11901 case OMP_CLAUSE_ORDERED
:
11902 case OMP_CLAUSE_UNTIED
:
11903 case OMP_CLAUSE_COLLAPSE
:
11904 case OMP_CLAUSE_TILE
:
11905 case OMP_CLAUSE_AUTO
:
11906 case OMP_CLAUSE_SEQ
:
11907 case OMP_CLAUSE_INDEPENDENT
:
11908 case OMP_CLAUSE_MERGEABLE
:
11909 case OMP_CLAUSE_PROC_BIND
:
11910 case OMP_CLAUSE_SAFELEN
:
11911 case OMP_CLAUSE_SIMDLEN
:
11912 case OMP_CLAUSE_NOGROUP
:
11913 case OMP_CLAUSE_THREADS
:
11914 case OMP_CLAUSE_SIMD
:
11915 case OMP_CLAUSE_BIND
:
11916 case OMP_CLAUSE_IF_PRESENT
:
11917 case OMP_CLAUSE_FINALIZE
:
11920 case OMP_CLAUSE_ORDER
:
11921 ctx
->order_concurrent
= true;
11924 case OMP_CLAUSE_DEFAULTMAP
:
11925 enum gimplify_defaultmap_kind gdmkmin
, gdmkmax
;
11926 switch (OMP_CLAUSE_DEFAULTMAP_CATEGORY (c
))
11928 case OMP_CLAUSE_DEFAULTMAP_CATEGORY_UNSPECIFIED
:
11929 gdmkmin
= GDMK_SCALAR
;
11930 gdmkmax
= GDMK_POINTER
;
11932 case OMP_CLAUSE_DEFAULTMAP_CATEGORY_SCALAR
:
11933 gdmkmin
= GDMK_SCALAR
;
11934 gdmkmax
= GDMK_SCALAR_TARGET
;
11936 case OMP_CLAUSE_DEFAULTMAP_CATEGORY_AGGREGATE
:
11937 gdmkmin
= gdmkmax
= GDMK_AGGREGATE
;
11939 case OMP_CLAUSE_DEFAULTMAP_CATEGORY_ALLOCATABLE
:
11940 gdmkmin
= gdmkmax
= GDMK_ALLOCATABLE
;
11942 case OMP_CLAUSE_DEFAULTMAP_CATEGORY_POINTER
:
11943 gdmkmin
= gdmkmax
= GDMK_POINTER
;
11946 gcc_unreachable ();
11948 for (int gdmk
= gdmkmin
; gdmk
<= gdmkmax
; gdmk
++)
11949 switch (OMP_CLAUSE_DEFAULTMAP_BEHAVIOR (c
))
11951 case OMP_CLAUSE_DEFAULTMAP_ALLOC
:
11952 ctx
->defaultmap
[gdmk
] = GOVD_MAP
| GOVD_MAP_ALLOC_ONLY
;
11954 case OMP_CLAUSE_DEFAULTMAP_TO
:
11955 ctx
->defaultmap
[gdmk
] = GOVD_MAP
| GOVD_MAP_TO_ONLY
;
11957 case OMP_CLAUSE_DEFAULTMAP_FROM
:
11958 ctx
->defaultmap
[gdmk
] = GOVD_MAP
| GOVD_MAP_FROM_ONLY
;
11960 case OMP_CLAUSE_DEFAULTMAP_TOFROM
:
11961 ctx
->defaultmap
[gdmk
] = GOVD_MAP
;
11963 case OMP_CLAUSE_DEFAULTMAP_FIRSTPRIVATE
:
11964 ctx
->defaultmap
[gdmk
] = GOVD_FIRSTPRIVATE
;
11966 case OMP_CLAUSE_DEFAULTMAP_NONE
:
11967 ctx
->defaultmap
[gdmk
] = 0;
11969 case OMP_CLAUSE_DEFAULTMAP_DEFAULT
:
11973 ctx
->defaultmap
[gdmk
] = GOVD_FIRSTPRIVATE
;
11975 case GDMK_SCALAR_TARGET
:
11976 ctx
->defaultmap
[gdmk
] = (lang_GNU_Fortran ()
11977 ? GOVD_MAP
: GOVD_FIRSTPRIVATE
);
11979 case GDMK_AGGREGATE
:
11980 case GDMK_ALLOCATABLE
:
11981 ctx
->defaultmap
[gdmk
] = GOVD_MAP
;
11984 ctx
->defaultmap
[gdmk
] = GOVD_MAP
;
11985 if (!lang_GNU_Fortran ())
11986 ctx
->defaultmap
[gdmk
] |= GOVD_MAP_0LEN_ARRAY
;
11989 gcc_unreachable ();
11993 gcc_unreachable ();
11997 case OMP_CLAUSE_ALIGNED
:
11998 decl
= OMP_CLAUSE_DECL (c
);
11999 if (error_operand_p (decl
))
12004 if (gimplify_expr (&OMP_CLAUSE_ALIGNED_ALIGNMENT (c
), pre_p
, NULL
,
12005 is_gimple_val
, fb_rvalue
) == GS_ERROR
)
12010 if (!is_global_var (decl
)
12011 && TREE_CODE (TREE_TYPE (decl
)) == POINTER_TYPE
)
12012 omp_add_variable (ctx
, decl
, GOVD_ALIGNED
);
12015 case OMP_CLAUSE_NONTEMPORAL
:
12016 decl
= OMP_CLAUSE_DECL (c
);
12017 if (error_operand_p (decl
))
12022 omp_add_variable (ctx
, decl
, GOVD_NONTEMPORAL
);
12025 case OMP_CLAUSE_ALLOCATE
:
12026 decl
= OMP_CLAUSE_DECL (c
);
12027 if (error_operand_p (decl
))
12032 if (gimplify_expr (&OMP_CLAUSE_ALLOCATE_ALLOCATOR (c
), pre_p
, NULL
,
12033 is_gimple_val
, fb_rvalue
) == GS_ERROR
)
12038 else if (OMP_CLAUSE_ALLOCATE_ALLOCATOR (c
) == NULL_TREE
12039 || (TREE_CODE (OMP_CLAUSE_ALLOCATE_ALLOCATOR (c
))
12042 else if (code
== OMP_TASKLOOP
12043 || !DECL_P (OMP_CLAUSE_ALLOCATE_ALLOCATOR (c
)))
12044 OMP_CLAUSE_ALLOCATE_ALLOCATOR (c
)
12045 = get_initialized_tmp_var (OMP_CLAUSE_ALLOCATE_ALLOCATOR (c
),
12046 pre_p
, NULL
, false);
12049 case OMP_CLAUSE_DEFAULT
:
12050 ctx
->default_kind
= OMP_CLAUSE_DEFAULT_KIND (c
);
12053 case OMP_CLAUSE_INCLUSIVE
:
12054 case OMP_CLAUSE_EXCLUSIVE
:
12055 decl
= OMP_CLAUSE_DECL (c
);
12057 splay_tree_node n
= splay_tree_lookup (outer_ctx
->variables
,
12058 (splay_tree_key
) decl
);
12059 if (n
== NULL
|| (n
->value
& GOVD_REDUCTION
) == 0)
12061 error_at (OMP_CLAUSE_LOCATION (c
),
12062 "%qD specified in %qs clause but not in %<inscan%> "
12063 "%<reduction%> clause on the containing construct",
12064 decl
, omp_clause_code_name
[OMP_CLAUSE_CODE (c
)]);
12069 n
->value
|= GOVD_REDUCTION_INSCAN
;
12070 if (outer_ctx
->region_type
== ORT_SIMD
12071 && outer_ctx
->outer_context
12072 && outer_ctx
->outer_context
->region_type
== ORT_WORKSHARE
)
12074 n
= splay_tree_lookup (outer_ctx
->outer_context
->variables
,
12075 (splay_tree_key
) decl
);
12076 if (n
&& (n
->value
& GOVD_REDUCTION
) != 0)
12077 n
->value
|= GOVD_REDUCTION_INSCAN
;
12083 case OMP_CLAUSE_NOHOST
:
12085 gcc_unreachable ();
12088 if (code
== OACC_DATA
12089 && OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
12090 && (OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_FIRSTPRIVATE_POINTER
12091 || OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_FIRSTPRIVATE_REFERENCE
))
12094 *list_p
= OMP_CLAUSE_CHAIN (c
);
12096 list_p
= &OMP_CLAUSE_CHAIN (c
);
12099 ctx
->clauses
= *orig_list_p
;
12100 gimplify_omp_ctxp
= ctx
;
12103 /* Return true if DECL is a candidate for shared to firstprivate
12104 optimization. We only consider non-addressable scalars, not
12105 too big, and not references. */
12108 omp_shared_to_firstprivate_optimizable_decl_p (tree decl
)
12110 if (TREE_ADDRESSABLE (decl
))
12112 tree type
= TREE_TYPE (decl
);
12113 if (!is_gimple_reg_type (type
)
12114 || TREE_CODE (type
) == REFERENCE_TYPE
12115 || TREE_ADDRESSABLE (type
))
12117 /* Don't optimize too large decls, as each thread/task will have
12119 HOST_WIDE_INT len
= int_size_in_bytes (type
);
12120 if (len
== -1 || len
> 4 * POINTER_SIZE
/ BITS_PER_UNIT
)
12122 if (omp_privatize_by_reference (decl
))
12127 /* Helper function of omp_find_stores_op and gimplify_adjust_omp_clauses*.
12128 For omp_shared_to_firstprivate_optimizable_decl_p decl mark it as
12129 GOVD_WRITTEN in outer contexts. */
12132 omp_mark_stores (struct gimplify_omp_ctx
*ctx
, tree decl
)
12134 for (; ctx
; ctx
= ctx
->outer_context
)
12136 splay_tree_node n
= splay_tree_lookup (ctx
->variables
,
12137 (splay_tree_key
) decl
);
12140 else if (n
->value
& GOVD_SHARED
)
12142 n
->value
|= GOVD_WRITTEN
;
12145 else if (n
->value
& GOVD_DATA_SHARE_CLASS
)
12150 /* Helper callback for walk_gimple_seq to discover possible stores
12151 to omp_shared_to_firstprivate_optimizable_decl_p decls and set
12152 GOVD_WRITTEN if they are GOVD_SHARED in some outer context
12156 omp_find_stores_op (tree
*tp
, int *walk_subtrees
, void *data
)
12158 struct walk_stmt_info
*wi
= (struct walk_stmt_info
*) data
;
12160 *walk_subtrees
= 0;
12167 if (handled_component_p (op
))
12168 op
= TREE_OPERAND (op
, 0);
12169 else if ((TREE_CODE (op
) == MEM_REF
|| TREE_CODE (op
) == TARGET_MEM_REF
)
12170 && TREE_CODE (TREE_OPERAND (op
, 0)) == ADDR_EXPR
)
12171 op
= TREE_OPERAND (TREE_OPERAND (op
, 0), 0);
12176 if (!DECL_P (op
) || !omp_shared_to_firstprivate_optimizable_decl_p (op
))
12179 omp_mark_stores (gimplify_omp_ctxp
, op
);
12183 /* Helper callback for walk_gimple_seq to discover possible stores
12184 to omp_shared_to_firstprivate_optimizable_decl_p decls and set
12185 GOVD_WRITTEN if they are GOVD_SHARED in some outer context
12189 omp_find_stores_stmt (gimple_stmt_iterator
*gsi_p
,
12190 bool *handled_ops_p
,
12191 struct walk_stmt_info
*wi
)
12193 gimple
*stmt
= gsi_stmt (*gsi_p
);
12194 switch (gimple_code (stmt
))
12196 /* Don't recurse on OpenMP constructs for which
12197 gimplify_adjust_omp_clauses already handled the bodies,
12198 except handle gimple_omp_for_pre_body. */
12199 case GIMPLE_OMP_FOR
:
12200 *handled_ops_p
= true;
12201 if (gimple_omp_for_pre_body (stmt
))
12202 walk_gimple_seq (gimple_omp_for_pre_body (stmt
),
12203 omp_find_stores_stmt
, omp_find_stores_op
, wi
);
12205 case GIMPLE_OMP_PARALLEL
:
12206 case GIMPLE_OMP_TASK
:
12207 case GIMPLE_OMP_SECTIONS
:
12208 case GIMPLE_OMP_SINGLE
:
12209 case GIMPLE_OMP_SCOPE
:
12210 case GIMPLE_OMP_TARGET
:
12211 case GIMPLE_OMP_TEAMS
:
12212 case GIMPLE_OMP_CRITICAL
:
12213 *handled_ops_p
= true;
12221 struct gimplify_adjust_omp_clauses_data
12227 /* For all variables that were not actually used within the context,
12228 remove PRIVATE, SHARED, and FIRSTPRIVATE clauses. */
12231 gimplify_adjust_omp_clauses_1 (splay_tree_node n
, void *data
)
12233 tree
*list_p
= ((struct gimplify_adjust_omp_clauses_data
*) data
)->list_p
;
12235 = ((struct gimplify_adjust_omp_clauses_data
*) data
)->pre_p
;
12236 tree decl
= (tree
) n
->key
;
12237 unsigned flags
= n
->value
;
12238 enum omp_clause_code code
;
12240 bool private_debug
;
12242 if (gimplify_omp_ctxp
->region_type
== ORT_COMBINED_PARALLEL
12243 && (flags
& GOVD_LASTPRIVATE_CONDITIONAL
) != 0)
12244 flags
= GOVD_SHARED
| GOVD_SEEN
| GOVD_WRITTEN
;
12245 if (flags
& (GOVD_EXPLICIT
| GOVD_LOCAL
))
12247 if ((flags
& GOVD_SEEN
) == 0)
12249 if (flags
& GOVD_DEBUG_PRIVATE
)
12251 gcc_assert ((flags
& GOVD_DATA_SHARE_CLASS
) == GOVD_SHARED
);
12252 private_debug
= true;
12254 else if (flags
& GOVD_MAP
)
12255 private_debug
= false;
12258 = lang_hooks
.decls
.omp_private_debug_clause (decl
,
12259 !!(flags
& GOVD_SHARED
));
12261 code
= OMP_CLAUSE_PRIVATE
;
12262 else if (flags
& GOVD_MAP
)
12264 code
= OMP_CLAUSE_MAP
;
12265 if ((gimplify_omp_ctxp
->region_type
& ORT_ACC
) == 0
12266 && TYPE_ATOMIC (strip_array_types (TREE_TYPE (decl
))))
12268 error ("%<_Atomic%> %qD in implicit %<map%> clause", decl
);
12272 && DECL_IN_CONSTANT_POOL (decl
)
12273 && !lookup_attribute ("omp declare target",
12274 DECL_ATTRIBUTES (decl
)))
12276 tree id
= get_identifier ("omp declare target");
12277 DECL_ATTRIBUTES (decl
)
12278 = tree_cons (id
, NULL_TREE
, DECL_ATTRIBUTES (decl
));
12279 varpool_node
*node
= varpool_node::get (decl
);
12282 node
->offloadable
= 1;
12283 if (ENABLE_OFFLOADING
)
12284 g
->have_offload
= true;
12288 else if (flags
& GOVD_SHARED
)
12290 if (is_global_var (decl
))
12292 struct gimplify_omp_ctx
*ctx
= gimplify_omp_ctxp
->outer_context
;
12293 while (ctx
!= NULL
)
12296 = splay_tree_lookup (ctx
->variables
, (splay_tree_key
) decl
);
12297 if (on
&& (on
->value
& (GOVD_FIRSTPRIVATE
| GOVD_LASTPRIVATE
12298 | GOVD_PRIVATE
| GOVD_REDUCTION
12299 | GOVD_LINEAR
| GOVD_MAP
)) != 0)
12301 ctx
= ctx
->outer_context
;
12306 code
= OMP_CLAUSE_SHARED
;
12307 /* Don't optimize shared into firstprivate for read-only vars
12308 on tasks with depend clause, we shouldn't try to copy them
12309 until the dependencies are satisfied. */
12310 if (gimplify_omp_ctxp
->has_depend
)
12311 flags
|= GOVD_WRITTEN
;
12313 else if (flags
& GOVD_PRIVATE
)
12314 code
= OMP_CLAUSE_PRIVATE
;
12315 else if (flags
& GOVD_FIRSTPRIVATE
)
12317 code
= OMP_CLAUSE_FIRSTPRIVATE
;
12318 if ((gimplify_omp_ctxp
->region_type
& ORT_TARGET
)
12319 && (gimplify_omp_ctxp
->region_type
& ORT_ACC
) == 0
12320 && TYPE_ATOMIC (strip_array_types (TREE_TYPE (decl
))))
12322 error ("%<_Atomic%> %qD in implicit %<firstprivate%> clause on "
12323 "%<target%> construct", decl
);
12327 else if (flags
& GOVD_LASTPRIVATE
)
12328 code
= OMP_CLAUSE_LASTPRIVATE
;
12329 else if (flags
& (GOVD_ALIGNED
| GOVD_NONTEMPORAL
))
12331 else if (flags
& GOVD_CONDTEMP
)
12333 code
= OMP_CLAUSE__CONDTEMP_
;
12334 gimple_add_tmp_var (decl
);
12337 gcc_unreachable ();
12339 if (((flags
& GOVD_LASTPRIVATE
)
12340 || (code
== OMP_CLAUSE_SHARED
&& (flags
& GOVD_WRITTEN
)))
12341 && omp_shared_to_firstprivate_optimizable_decl_p (decl
))
12342 omp_mark_stores (gimplify_omp_ctxp
->outer_context
, decl
);
12344 tree chain
= *list_p
;
12345 clause
= build_omp_clause (input_location
, code
);
12346 OMP_CLAUSE_DECL (clause
) = decl
;
12347 OMP_CLAUSE_CHAIN (clause
) = chain
;
12349 OMP_CLAUSE_PRIVATE_DEBUG (clause
) = 1;
12350 else if (code
== OMP_CLAUSE_PRIVATE
&& (flags
& GOVD_PRIVATE_OUTER_REF
))
12351 OMP_CLAUSE_PRIVATE_OUTER_REF (clause
) = 1;
12352 else if (code
== OMP_CLAUSE_SHARED
12353 && (flags
& GOVD_WRITTEN
) == 0
12354 && omp_shared_to_firstprivate_optimizable_decl_p (decl
))
12355 OMP_CLAUSE_SHARED_READONLY (clause
) = 1;
12356 else if (code
== OMP_CLAUSE_FIRSTPRIVATE
&& (flags
& GOVD_EXPLICIT
) == 0)
12357 OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (clause
) = 1;
12358 else if (code
== OMP_CLAUSE_MAP
&& (flags
& GOVD_MAP_0LEN_ARRAY
) != 0)
12360 tree nc
= build_omp_clause (input_location
, OMP_CLAUSE_MAP
);
12361 OMP_CLAUSE_DECL (nc
) = decl
;
12362 if (TREE_CODE (TREE_TYPE (decl
)) == REFERENCE_TYPE
12363 && TREE_CODE (TREE_TYPE (TREE_TYPE (decl
))) == POINTER_TYPE
)
12364 OMP_CLAUSE_DECL (clause
)
12365 = build_simple_mem_ref_loc (input_location
, decl
);
12366 OMP_CLAUSE_DECL (clause
)
12367 = build2 (MEM_REF
, char_type_node
, OMP_CLAUSE_DECL (clause
),
12368 build_int_cst (build_pointer_type (char_type_node
), 0));
12369 OMP_CLAUSE_SIZE (clause
) = size_zero_node
;
12370 OMP_CLAUSE_SIZE (nc
) = size_zero_node
;
12371 OMP_CLAUSE_SET_MAP_KIND (clause
, GOMP_MAP_ALLOC
);
12372 OMP_CLAUSE_MAP_MAYBE_ZERO_LENGTH_ARRAY_SECTION (clause
) = 1;
12373 OMP_CLAUSE_SET_MAP_KIND (nc
, GOMP_MAP_FIRSTPRIVATE_POINTER
);
12374 OMP_CLAUSE_CHAIN (nc
) = chain
;
12375 OMP_CLAUSE_CHAIN (clause
) = nc
;
12376 struct gimplify_omp_ctx
*ctx
= gimplify_omp_ctxp
;
12377 gimplify_omp_ctxp
= ctx
->outer_context
;
12378 gimplify_expr (&TREE_OPERAND (OMP_CLAUSE_DECL (clause
), 0),
12379 pre_p
, NULL
, is_gimple_val
, fb_rvalue
);
12380 gimplify_omp_ctxp
= ctx
;
12382 else if (code
== OMP_CLAUSE_MAP
)
12385 /* Not all combinations of these GOVD_MAP flags are actually valid. */
12386 switch (flags
& (GOVD_MAP_TO_ONLY
12388 | GOVD_MAP_FORCE_PRESENT
12389 | GOVD_MAP_ALLOC_ONLY
12390 | GOVD_MAP_FROM_ONLY
))
12393 kind
= GOMP_MAP_TOFROM
;
12395 case GOVD_MAP_FORCE
:
12396 kind
= GOMP_MAP_TOFROM
| GOMP_MAP_FLAG_FORCE
;
12398 case GOVD_MAP_TO_ONLY
:
12399 kind
= GOMP_MAP_TO
;
12401 case GOVD_MAP_FROM_ONLY
:
12402 kind
= GOMP_MAP_FROM
;
12404 case GOVD_MAP_ALLOC_ONLY
:
12405 kind
= GOMP_MAP_ALLOC
;
12407 case GOVD_MAP_TO_ONLY
| GOVD_MAP_FORCE
:
12408 kind
= GOMP_MAP_TO
| GOMP_MAP_FLAG_FORCE
;
12410 case GOVD_MAP_FORCE_PRESENT
:
12411 kind
= GOMP_MAP_FORCE_PRESENT
;
12414 gcc_unreachable ();
12416 OMP_CLAUSE_SET_MAP_KIND (clause
, kind
);
12417 /* Setting of the implicit flag for the runtime is currently disabled for
12419 if ((gimplify_omp_ctxp
->region_type
& ORT_ACC
) == 0)
12420 OMP_CLAUSE_MAP_RUNTIME_IMPLICIT_P (clause
) = 1;
12421 if (DECL_SIZE (decl
)
12422 && TREE_CODE (DECL_SIZE (decl
)) != INTEGER_CST
)
12424 tree decl2
= DECL_VALUE_EXPR (decl
);
12425 gcc_assert (TREE_CODE (decl2
) == INDIRECT_REF
);
12426 decl2
= TREE_OPERAND (decl2
, 0);
12427 gcc_assert (DECL_P (decl2
));
12428 tree mem
= build_simple_mem_ref (decl2
);
12429 OMP_CLAUSE_DECL (clause
) = mem
;
12430 OMP_CLAUSE_SIZE (clause
) = TYPE_SIZE_UNIT (TREE_TYPE (decl
));
12431 if (gimplify_omp_ctxp
->outer_context
)
12433 struct gimplify_omp_ctx
*ctx
= gimplify_omp_ctxp
->outer_context
;
12434 omp_notice_variable (ctx
, decl2
, true);
12435 omp_notice_variable (ctx
, OMP_CLAUSE_SIZE (clause
), true);
12437 tree nc
= build_omp_clause (OMP_CLAUSE_LOCATION (clause
),
12439 OMP_CLAUSE_DECL (nc
) = decl
;
12440 OMP_CLAUSE_SIZE (nc
) = size_zero_node
;
12441 if (gimplify_omp_ctxp
->target_firstprivatize_array_bases
)
12442 OMP_CLAUSE_SET_MAP_KIND (nc
, GOMP_MAP_FIRSTPRIVATE_POINTER
);
12444 OMP_CLAUSE_SET_MAP_KIND (nc
, GOMP_MAP_POINTER
);
12445 OMP_CLAUSE_CHAIN (nc
) = OMP_CLAUSE_CHAIN (clause
);
12446 OMP_CLAUSE_CHAIN (clause
) = nc
;
12448 else if (gimplify_omp_ctxp
->target_firstprivatize_array_bases
12449 && omp_privatize_by_reference (decl
))
12451 OMP_CLAUSE_DECL (clause
) = build_simple_mem_ref (decl
);
12452 OMP_CLAUSE_SIZE (clause
)
12453 = unshare_expr (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl
))));
12454 struct gimplify_omp_ctx
*ctx
= gimplify_omp_ctxp
;
12455 gimplify_omp_ctxp
= ctx
->outer_context
;
12456 gimplify_expr (&OMP_CLAUSE_SIZE (clause
),
12457 pre_p
, NULL
, is_gimple_val
, fb_rvalue
);
12458 gimplify_omp_ctxp
= ctx
;
12459 tree nc
= build_omp_clause (OMP_CLAUSE_LOCATION (clause
),
12461 OMP_CLAUSE_DECL (nc
) = decl
;
12462 OMP_CLAUSE_SIZE (nc
) = size_zero_node
;
12463 OMP_CLAUSE_SET_MAP_KIND (nc
, GOMP_MAP_FIRSTPRIVATE_REFERENCE
);
12464 OMP_CLAUSE_CHAIN (nc
) = OMP_CLAUSE_CHAIN (clause
);
12465 OMP_CLAUSE_CHAIN (clause
) = nc
;
12468 OMP_CLAUSE_SIZE (clause
) = DECL_SIZE_UNIT (decl
);
12470 if (code
== OMP_CLAUSE_FIRSTPRIVATE
&& (flags
& GOVD_LASTPRIVATE
) != 0)
12472 tree nc
= build_omp_clause (input_location
, OMP_CLAUSE_LASTPRIVATE
);
12473 OMP_CLAUSE_DECL (nc
) = decl
;
12474 OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (nc
) = 1;
12475 OMP_CLAUSE_CHAIN (nc
) = chain
;
12476 OMP_CLAUSE_CHAIN (clause
) = nc
;
12477 struct gimplify_omp_ctx
*ctx
= gimplify_omp_ctxp
;
12478 gimplify_omp_ctxp
= ctx
->outer_context
;
12479 lang_hooks
.decls
.omp_finish_clause (nc
, pre_p
,
12480 (ctx
->region_type
& ORT_ACC
) != 0);
12481 gimplify_omp_ctxp
= ctx
;
12484 struct gimplify_omp_ctx
*ctx
= gimplify_omp_ctxp
;
12485 gimplify_omp_ctxp
= ctx
->outer_context
;
12486 /* Don't call omp_finish_clause on implicitly added OMP_CLAUSE_PRIVATE
12487 in simd. Those are only added for the local vars inside of simd body
12488 and they don't need to be e.g. default constructible. */
12489 if (code
!= OMP_CLAUSE_PRIVATE
|| ctx
->region_type
!= ORT_SIMD
)
12490 lang_hooks
.decls
.omp_finish_clause (clause
, pre_p
,
12491 (ctx
->region_type
& ORT_ACC
) != 0);
12492 if (gimplify_omp_ctxp
)
12493 for (; clause
!= chain
; clause
= OMP_CLAUSE_CHAIN (clause
))
12494 if (OMP_CLAUSE_CODE (clause
) == OMP_CLAUSE_MAP
12495 && DECL_P (OMP_CLAUSE_SIZE (clause
)))
12496 omp_notice_variable (gimplify_omp_ctxp
, OMP_CLAUSE_SIZE (clause
),
12498 gimplify_omp_ctxp
= ctx
;
12503 gimplify_adjust_omp_clauses (gimple_seq
*pre_p
, gimple_seq body
, tree
*list_p
,
12504 enum tree_code code
)
12506 struct gimplify_omp_ctx
*ctx
= gimplify_omp_ctxp
;
12507 tree
*orig_list_p
= list_p
;
12509 bool has_inscan_reductions
= false;
12513 struct gimplify_omp_ctx
*octx
;
12514 for (octx
= ctx
; octx
; octx
= octx
->outer_context
)
12515 if ((octx
->region_type
& (ORT_PARALLEL
| ORT_TASK
| ORT_TEAMS
)) != 0)
12519 struct walk_stmt_info wi
;
12520 memset (&wi
, 0, sizeof (wi
));
12521 walk_gimple_seq (body
, omp_find_stores_stmt
,
12522 omp_find_stores_op
, &wi
);
12526 if (ctx
->add_safelen1
)
12528 /* If there are VLAs in the body of simd loop, prevent
12530 gcc_assert (ctx
->region_type
== ORT_SIMD
);
12531 c
= build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE_SAFELEN
);
12532 OMP_CLAUSE_SAFELEN_EXPR (c
) = integer_one_node
;
12533 OMP_CLAUSE_CHAIN (c
) = *list_p
;
12535 list_p
= &OMP_CLAUSE_CHAIN (c
);
12538 if (ctx
->region_type
== ORT_WORKSHARE
12539 && ctx
->outer_context
12540 && ctx
->outer_context
->region_type
== ORT_COMBINED_PARALLEL
)
12542 for (c
= ctx
->outer_context
->clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
12543 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
12544 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c
))
12546 decl
= OMP_CLAUSE_DECL (c
);
12548 = splay_tree_lookup (ctx
->outer_context
->variables
,
12549 (splay_tree_key
) decl
);
12550 gcc_checking_assert (!splay_tree_lookup (ctx
->variables
,
12551 (splay_tree_key
) decl
));
12552 omp_add_variable (ctx
, decl
, n
->value
);
12553 tree c2
= copy_node (c
);
12554 OMP_CLAUSE_CHAIN (c2
) = *list_p
;
12556 if ((n
->value
& GOVD_FIRSTPRIVATE
) == 0)
12558 c2
= build_omp_clause (OMP_CLAUSE_LOCATION (c
),
12559 OMP_CLAUSE_FIRSTPRIVATE
);
12560 OMP_CLAUSE_DECL (c2
) = decl
;
12561 OMP_CLAUSE_CHAIN (c2
) = *list_p
;
12566 tree attach_list
= NULL_TREE
;
12567 tree
*attach_tail
= &attach_list
;
12569 while ((c
= *list_p
) != NULL
)
12572 bool remove
= false;
12573 bool move_attach
= false;
12575 switch (OMP_CLAUSE_CODE (c
))
12577 case OMP_CLAUSE_FIRSTPRIVATE
:
12578 if ((ctx
->region_type
& ORT_TARGET
)
12579 && (ctx
->region_type
& ORT_ACC
) == 0
12580 && TYPE_ATOMIC (strip_array_types
12581 (TREE_TYPE (OMP_CLAUSE_DECL (c
)))))
12583 error_at (OMP_CLAUSE_LOCATION (c
),
12584 "%<_Atomic%> %qD in %<firstprivate%> clause on "
12585 "%<target%> construct", OMP_CLAUSE_DECL (c
));
12589 if (OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c
))
12591 decl
= OMP_CLAUSE_DECL (c
);
12592 n
= splay_tree_lookup (ctx
->variables
, (splay_tree_key
) decl
);
12593 if ((n
->value
& GOVD_MAP
) != 0)
12598 OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT_TARGET (c
) = 0;
12599 OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c
) = 0;
12602 case OMP_CLAUSE_PRIVATE
:
12603 case OMP_CLAUSE_SHARED
:
12604 case OMP_CLAUSE_LINEAR
:
12605 decl
= OMP_CLAUSE_DECL (c
);
12606 n
= splay_tree_lookup (ctx
->variables
, (splay_tree_key
) decl
);
12607 remove
= !(n
->value
& GOVD_SEEN
);
12608 if ((n
->value
& GOVD_LASTPRIVATE_CONDITIONAL
) != 0
12609 && code
== OMP_PARALLEL
12610 && OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_FIRSTPRIVATE
)
12614 bool shared
= OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_SHARED
;
12615 if ((n
->value
& GOVD_DEBUG_PRIVATE
)
12616 || lang_hooks
.decls
.omp_private_debug_clause (decl
, shared
))
12618 gcc_assert ((n
->value
& GOVD_DEBUG_PRIVATE
) == 0
12619 || ((n
->value
& GOVD_DATA_SHARE_CLASS
)
12621 OMP_CLAUSE_SET_CODE (c
, OMP_CLAUSE_PRIVATE
);
12622 OMP_CLAUSE_PRIVATE_DEBUG (c
) = 1;
12624 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_SHARED
12627 n
->value
|= GOVD_WRITTEN
;
12628 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_SHARED
12629 && (n
->value
& GOVD_WRITTEN
) == 0
12631 && omp_shared_to_firstprivate_optimizable_decl_p (decl
))
12632 OMP_CLAUSE_SHARED_READONLY (c
) = 1;
12633 else if (DECL_P (decl
)
12634 && ((OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_SHARED
12635 && (n
->value
& GOVD_WRITTEN
) != 0)
12636 || (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LINEAR
12637 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c
)))
12638 && omp_shared_to_firstprivate_optimizable_decl_p (decl
))
12639 omp_mark_stores (gimplify_omp_ctxp
->outer_context
, decl
);
12642 n
->value
&= ~GOVD_EXPLICIT
;
12645 case OMP_CLAUSE_LASTPRIVATE
:
12646 /* Make sure OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE is set to
12647 accurately reflect the presence of a FIRSTPRIVATE clause. */
12648 decl
= OMP_CLAUSE_DECL (c
);
12649 n
= splay_tree_lookup (ctx
->variables
, (splay_tree_key
) decl
);
12650 OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c
)
12651 = (n
->value
& GOVD_FIRSTPRIVATE
) != 0;
12652 if (code
== OMP_DISTRIBUTE
12653 && OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c
))
12656 error_at (OMP_CLAUSE_LOCATION (c
),
12657 "same variable used in %<firstprivate%> and "
12658 "%<lastprivate%> clauses on %<distribute%> "
12662 && OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
12664 && omp_shared_to_firstprivate_optimizable_decl_p (decl
))
12665 omp_mark_stores (gimplify_omp_ctxp
->outer_context
, decl
);
12666 if (OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c
) && code
== OMP_PARALLEL
)
12670 case OMP_CLAUSE_ALIGNED
:
12671 decl
= OMP_CLAUSE_DECL (c
);
12672 if (!is_global_var (decl
))
12674 n
= splay_tree_lookup (ctx
->variables
, (splay_tree_key
) decl
);
12675 remove
= n
== NULL
|| !(n
->value
& GOVD_SEEN
);
12676 if (!remove
&& TREE_CODE (TREE_TYPE (decl
)) == POINTER_TYPE
)
12678 struct gimplify_omp_ctx
*octx
;
12680 && (n
->value
& (GOVD_DATA_SHARE_CLASS
12681 & ~GOVD_FIRSTPRIVATE
)))
12684 for (octx
= ctx
->outer_context
; octx
;
12685 octx
= octx
->outer_context
)
12687 n
= splay_tree_lookup (octx
->variables
,
12688 (splay_tree_key
) decl
);
12691 if (n
->value
& GOVD_LOCAL
)
12693 /* We have to avoid assigning a shared variable
12694 to itself when trying to add
12695 __builtin_assume_aligned. */
12696 if (n
->value
& GOVD_SHARED
)
12704 else if (TREE_CODE (TREE_TYPE (decl
)) == ARRAY_TYPE
)
12706 n
= splay_tree_lookup (ctx
->variables
, (splay_tree_key
) decl
);
12707 if (n
!= NULL
&& (n
->value
& GOVD_DATA_SHARE_CLASS
) != 0)
12712 case OMP_CLAUSE_HAS_DEVICE_ADDR
:
12713 decl
= OMP_CLAUSE_DECL (c
);
12714 while (TREE_CODE (decl
) == INDIRECT_REF
12715 || TREE_CODE (decl
) == ARRAY_REF
)
12716 decl
= TREE_OPERAND (decl
, 0);
12717 n
= splay_tree_lookup (ctx
->variables
, (splay_tree_key
) decl
);
12718 remove
= n
== NULL
|| !(n
->value
& GOVD_SEEN
);
12721 case OMP_CLAUSE_IS_DEVICE_PTR
:
12722 case OMP_CLAUSE_NONTEMPORAL
:
12723 decl
= OMP_CLAUSE_DECL (c
);
12724 n
= splay_tree_lookup (ctx
->variables
, (splay_tree_key
) decl
);
12725 remove
= n
== NULL
|| !(n
->value
& GOVD_SEEN
);
12728 case OMP_CLAUSE_MAP
:
12729 if (code
== OMP_TARGET_EXIT_DATA
12730 && OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_ALWAYS_POINTER
)
12735 /* If we have a target region, we can push all the attaches to the
12736 end of the list (we may have standalone "attach" operations
12737 synthesized for GOMP_MAP_STRUCT nodes that must be processed after
12738 the attachment point AND the pointed-to block have been mapped).
12739 If we have something else, e.g. "enter data", we need to keep
12740 "attach" nodes together with the previous node they attach to so
12741 that separate "exit data" operations work properly (see
12742 libgomp/target.c). */
12743 if ((ctx
->region_type
& ORT_TARGET
) != 0
12744 && (OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_ATTACH
12745 || (OMP_CLAUSE_MAP_KIND (c
)
12746 == GOMP_MAP_ATTACH_ZERO_LENGTH_ARRAY_SECTION
)))
12747 move_attach
= true;
12748 decl
= OMP_CLAUSE_DECL (c
);
12749 /* Data clauses associated with reductions must be
12750 compatible with present_or_copy. Warn and adjust the clause
12751 if that is not the case. */
12752 if (ctx
->region_type
== ORT_ACC_PARALLEL
12753 || ctx
->region_type
== ORT_ACC_SERIAL
)
12755 tree t
= DECL_P (decl
) ? decl
: TREE_OPERAND (decl
, 0);
12759 n
= splay_tree_lookup (ctx
->variables
, (splay_tree_key
) t
);
12761 if (n
&& (n
->value
& GOVD_REDUCTION
))
12763 enum gomp_map_kind kind
= OMP_CLAUSE_MAP_KIND (c
);
12765 OMP_CLAUSE_MAP_IN_REDUCTION (c
) = 1;
12766 if ((kind
& GOMP_MAP_TOFROM
) != GOMP_MAP_TOFROM
12767 && kind
!= GOMP_MAP_FORCE_PRESENT
12768 && kind
!= GOMP_MAP_POINTER
)
12770 warning_at (OMP_CLAUSE_LOCATION (c
), 0,
12771 "incompatible data clause with reduction "
12772 "on %qE; promoting to %<present_or_copy%>",
12774 OMP_CLAUSE_SET_MAP_KIND (c
, GOMP_MAP_TOFROM
);
12778 if (OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_STRUCT
12779 && (code
== OMP_TARGET_EXIT_DATA
|| code
== OACC_EXIT_DATA
))
12784 if (!DECL_P (decl
))
12786 if ((ctx
->region_type
& ORT_TARGET
) != 0
12787 && OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_FIRSTPRIVATE_POINTER
)
12789 if (TREE_CODE (decl
) == INDIRECT_REF
12790 && TREE_CODE (TREE_OPERAND (decl
, 0)) == COMPONENT_REF
12791 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (decl
, 0)))
12792 == REFERENCE_TYPE
))
12793 decl
= TREE_OPERAND (decl
, 0);
12794 if (TREE_CODE (decl
) == COMPONENT_REF
)
12796 while (TREE_CODE (decl
) == COMPONENT_REF
)
12797 decl
= TREE_OPERAND (decl
, 0);
12800 n
= splay_tree_lookup (ctx
->variables
,
12801 (splay_tree_key
) decl
);
12802 if (!(n
->value
& GOVD_SEEN
))
12809 n
= splay_tree_lookup (ctx
->variables
, (splay_tree_key
) decl
);
12810 if ((ctx
->region_type
& ORT_TARGET
) != 0
12811 && !(n
->value
& GOVD_SEEN
)
12812 && GOMP_MAP_ALWAYS_P (OMP_CLAUSE_MAP_KIND (c
)) == 0
12813 && (!is_global_var (decl
)
12814 || !lookup_attribute ("omp declare target link",
12815 DECL_ATTRIBUTES (decl
))))
12818 /* For struct element mapping, if struct is never referenced
12819 in target block and none of the mapping has always modifier,
12820 remove all the struct element mappings, which immediately
12821 follow the GOMP_MAP_STRUCT map clause. */
12822 if (OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_STRUCT
)
12824 HOST_WIDE_INT cnt
= tree_to_shwi (OMP_CLAUSE_SIZE (c
));
12826 OMP_CLAUSE_CHAIN (c
)
12827 = OMP_CLAUSE_CHAIN (OMP_CLAUSE_CHAIN (c
));
12830 else if (DECL_SIZE (decl
)
12831 && TREE_CODE (DECL_SIZE (decl
)) != INTEGER_CST
12832 && OMP_CLAUSE_MAP_KIND (c
) != GOMP_MAP_POINTER
12833 && OMP_CLAUSE_MAP_KIND (c
) != GOMP_MAP_FIRSTPRIVATE_POINTER
12834 && (OMP_CLAUSE_MAP_KIND (c
)
12835 != GOMP_MAP_FIRSTPRIVATE_REFERENCE
))
12837 /* For GOMP_MAP_FORCE_DEVICEPTR, we'll never enter here, because
12838 for these, TREE_CODE (DECL_SIZE (decl)) will always be
12840 gcc_assert (OMP_CLAUSE_MAP_KIND (c
) != GOMP_MAP_FORCE_DEVICEPTR
);
12842 tree decl2
= DECL_VALUE_EXPR (decl
);
12843 gcc_assert (TREE_CODE (decl2
) == INDIRECT_REF
);
12844 decl2
= TREE_OPERAND (decl2
, 0);
12845 gcc_assert (DECL_P (decl2
));
12846 tree mem
= build_simple_mem_ref (decl2
);
12847 OMP_CLAUSE_DECL (c
) = mem
;
12848 OMP_CLAUSE_SIZE (c
) = TYPE_SIZE_UNIT (TREE_TYPE (decl
));
12849 if (ctx
->outer_context
)
12851 omp_notice_variable (ctx
->outer_context
, decl2
, true);
12852 omp_notice_variable (ctx
->outer_context
,
12853 OMP_CLAUSE_SIZE (c
), true);
12855 if (((ctx
->region_type
& ORT_TARGET
) != 0
12856 || !ctx
->target_firstprivatize_array_bases
)
12857 && ((n
->value
& GOVD_SEEN
) == 0
12858 || (n
->value
& (GOVD_PRIVATE
| GOVD_FIRSTPRIVATE
)) == 0))
12860 tree nc
= build_omp_clause (OMP_CLAUSE_LOCATION (c
),
12862 OMP_CLAUSE_DECL (nc
) = decl
;
12863 OMP_CLAUSE_SIZE (nc
) = size_zero_node
;
12864 if (ctx
->target_firstprivatize_array_bases
)
12865 OMP_CLAUSE_SET_MAP_KIND (nc
,
12866 GOMP_MAP_FIRSTPRIVATE_POINTER
);
12868 OMP_CLAUSE_SET_MAP_KIND (nc
, GOMP_MAP_POINTER
);
12869 OMP_CLAUSE_CHAIN (nc
) = OMP_CLAUSE_CHAIN (c
);
12870 OMP_CLAUSE_CHAIN (c
) = nc
;
12876 if (OMP_CLAUSE_SIZE (c
) == NULL_TREE
)
12877 OMP_CLAUSE_SIZE (c
) = DECL_SIZE_UNIT (decl
);
12878 gcc_assert ((n
->value
& GOVD_SEEN
) == 0
12879 || ((n
->value
& (GOVD_PRIVATE
| GOVD_FIRSTPRIVATE
))
12884 case OMP_CLAUSE_TO
:
12885 case OMP_CLAUSE_FROM
:
12886 case OMP_CLAUSE__CACHE_
:
12887 decl
= OMP_CLAUSE_DECL (c
);
12888 if (!DECL_P (decl
))
12890 if (DECL_SIZE (decl
)
12891 && TREE_CODE (DECL_SIZE (decl
)) != INTEGER_CST
)
12893 tree decl2
= DECL_VALUE_EXPR (decl
);
12894 gcc_assert (TREE_CODE (decl2
) == INDIRECT_REF
);
12895 decl2
= TREE_OPERAND (decl2
, 0);
12896 gcc_assert (DECL_P (decl2
));
12897 tree mem
= build_simple_mem_ref (decl2
);
12898 OMP_CLAUSE_DECL (c
) = mem
;
12899 OMP_CLAUSE_SIZE (c
) = TYPE_SIZE_UNIT (TREE_TYPE (decl
));
12900 if (ctx
->outer_context
)
12902 omp_notice_variable (ctx
->outer_context
, decl2
, true);
12903 omp_notice_variable (ctx
->outer_context
,
12904 OMP_CLAUSE_SIZE (c
), true);
12907 else if (OMP_CLAUSE_SIZE (c
) == NULL_TREE
)
12908 OMP_CLAUSE_SIZE (c
) = DECL_SIZE_UNIT (decl
);
12911 case OMP_CLAUSE_REDUCTION
:
12912 if (OMP_CLAUSE_REDUCTION_INSCAN (c
))
12914 decl
= OMP_CLAUSE_DECL (c
);
12915 n
= splay_tree_lookup (ctx
->variables
, (splay_tree_key
) decl
);
12916 if ((n
->value
& GOVD_REDUCTION_INSCAN
) == 0)
12919 error_at (OMP_CLAUSE_LOCATION (c
),
12920 "%qD specified in %<inscan%> %<reduction%> clause "
12921 "but not in %<scan%> directive clause", decl
);
12924 has_inscan_reductions
= true;
12927 case OMP_CLAUSE_IN_REDUCTION
:
12928 case OMP_CLAUSE_TASK_REDUCTION
:
12929 decl
= OMP_CLAUSE_DECL (c
);
12930 /* OpenACC reductions need a present_or_copy data clause.
12931 Add one if necessary. Emit error when the reduction is private. */
12932 if (ctx
->region_type
== ORT_ACC_PARALLEL
12933 || ctx
->region_type
== ORT_ACC_SERIAL
)
12935 n
= splay_tree_lookup (ctx
->variables
, (splay_tree_key
) decl
);
12936 if (n
->value
& (GOVD_PRIVATE
| GOVD_FIRSTPRIVATE
))
12939 error_at (OMP_CLAUSE_LOCATION (c
), "invalid private "
12940 "reduction on %qE", DECL_NAME (decl
));
12942 else if ((n
->value
& GOVD_MAP
) == 0)
12944 tree next
= OMP_CLAUSE_CHAIN (c
);
12945 tree nc
= build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE_MAP
);
12946 OMP_CLAUSE_SET_MAP_KIND (nc
, GOMP_MAP_TOFROM
);
12947 OMP_CLAUSE_DECL (nc
) = decl
;
12948 OMP_CLAUSE_CHAIN (c
) = nc
;
12949 lang_hooks
.decls
.omp_finish_clause (nc
, pre_p
,
12954 OMP_CLAUSE_MAP_IN_REDUCTION (nc
) = 1;
12955 if (OMP_CLAUSE_CHAIN (nc
) == NULL
)
12957 nc
= OMP_CLAUSE_CHAIN (nc
);
12959 OMP_CLAUSE_CHAIN (nc
) = next
;
12960 n
->value
|= GOVD_MAP
;
12964 && omp_shared_to_firstprivate_optimizable_decl_p (decl
))
12965 omp_mark_stores (gimplify_omp_ctxp
->outer_context
, decl
);
12968 case OMP_CLAUSE_ALLOCATE
:
12969 decl
= OMP_CLAUSE_DECL (c
);
12970 n
= splay_tree_lookup (ctx
->variables
, (splay_tree_key
) decl
);
12971 if (n
!= NULL
&& !(n
->value
& GOVD_SEEN
))
12973 if ((n
->value
& (GOVD_PRIVATE
| GOVD_FIRSTPRIVATE
| GOVD_LINEAR
))
12975 && (n
->value
& (GOVD_REDUCTION
| GOVD_LASTPRIVATE
)) == 0)
12979 && OMP_CLAUSE_ALLOCATE_ALLOCATOR (c
)
12980 && TREE_CODE (OMP_CLAUSE_ALLOCATE_ALLOCATOR (c
)) != INTEGER_CST
12981 && ((ctx
->region_type
& (ORT_PARALLEL
| ORT_TARGET
)) != 0
12982 || (ctx
->region_type
& ORT_TASKLOOP
) == ORT_TASK
12983 || (ctx
->region_type
& ORT_HOST_TEAMS
) == ORT_HOST_TEAMS
))
12985 tree allocator
= OMP_CLAUSE_ALLOCATE_ALLOCATOR (c
);
12986 n
= splay_tree_lookup (ctx
->variables
, (splay_tree_key
) allocator
);
12989 enum omp_clause_default_kind default_kind
12990 = ctx
->default_kind
;
12991 ctx
->default_kind
= OMP_CLAUSE_DEFAULT_FIRSTPRIVATE
;
12992 omp_notice_variable (ctx
, OMP_CLAUSE_ALLOCATE_ALLOCATOR (c
),
12994 ctx
->default_kind
= default_kind
;
12997 omp_notice_variable (ctx
, OMP_CLAUSE_ALLOCATE_ALLOCATOR (c
),
13002 case OMP_CLAUSE_COPYIN
:
13003 case OMP_CLAUSE_COPYPRIVATE
:
13004 case OMP_CLAUSE_IF
:
13005 case OMP_CLAUSE_NUM_THREADS
:
13006 case OMP_CLAUSE_NUM_TEAMS
:
13007 case OMP_CLAUSE_THREAD_LIMIT
:
13008 case OMP_CLAUSE_DIST_SCHEDULE
:
13009 case OMP_CLAUSE_DEVICE
:
13010 case OMP_CLAUSE_SCHEDULE
:
13011 case OMP_CLAUSE_NOWAIT
:
13012 case OMP_CLAUSE_ORDERED
:
13013 case OMP_CLAUSE_DEFAULT
:
13014 case OMP_CLAUSE_UNTIED
:
13015 case OMP_CLAUSE_COLLAPSE
:
13016 case OMP_CLAUSE_FINAL
:
13017 case OMP_CLAUSE_MERGEABLE
:
13018 case OMP_CLAUSE_PROC_BIND
:
13019 case OMP_CLAUSE_SAFELEN
:
13020 case OMP_CLAUSE_SIMDLEN
:
13021 case OMP_CLAUSE_DEPEND
:
13022 case OMP_CLAUSE_DOACROSS
:
13023 case OMP_CLAUSE_PRIORITY
:
13024 case OMP_CLAUSE_GRAINSIZE
:
13025 case OMP_CLAUSE_NUM_TASKS
:
13026 case OMP_CLAUSE_NOGROUP
:
13027 case OMP_CLAUSE_THREADS
:
13028 case OMP_CLAUSE_SIMD
:
13029 case OMP_CLAUSE_FILTER
:
13030 case OMP_CLAUSE_HINT
:
13031 case OMP_CLAUSE_DEFAULTMAP
:
13032 case OMP_CLAUSE_ORDER
:
13033 case OMP_CLAUSE_BIND
:
13034 case OMP_CLAUSE_DETACH
:
13035 case OMP_CLAUSE_USE_DEVICE_PTR
:
13036 case OMP_CLAUSE_USE_DEVICE_ADDR
:
13037 case OMP_CLAUSE_ASYNC
:
13038 case OMP_CLAUSE_WAIT
:
13039 case OMP_CLAUSE_INDEPENDENT
:
13040 case OMP_CLAUSE_NUM_GANGS
:
13041 case OMP_CLAUSE_NUM_WORKERS
:
13042 case OMP_CLAUSE_VECTOR_LENGTH
:
13043 case OMP_CLAUSE_GANG
:
13044 case OMP_CLAUSE_WORKER
:
13045 case OMP_CLAUSE_VECTOR
:
13046 case OMP_CLAUSE_AUTO
:
13047 case OMP_CLAUSE_SEQ
:
13048 case OMP_CLAUSE_TILE
:
13049 case OMP_CLAUSE_IF_PRESENT
:
13050 case OMP_CLAUSE_FINALIZE
:
13051 case OMP_CLAUSE_INCLUSIVE
:
13052 case OMP_CLAUSE_EXCLUSIVE
:
13055 case OMP_CLAUSE_NOHOST
:
13057 gcc_unreachable ();
13061 *list_p
= OMP_CLAUSE_CHAIN (c
);
13062 else if (move_attach
)
13064 /* Remove attach node from here, separate out into its own list. */
13066 *list_p
= OMP_CLAUSE_CHAIN (c
);
13067 OMP_CLAUSE_CHAIN (c
) = NULL_TREE
;
13068 attach_tail
= &OMP_CLAUSE_CHAIN (c
);
13071 list_p
= &OMP_CLAUSE_CHAIN (c
);
13074 /* Splice attach nodes at the end of the list. */
13077 *list_p
= attach_list
;
13078 list_p
= attach_tail
;
13081 /* Add in any implicit data sharing. */
13082 struct gimplify_adjust_omp_clauses_data data
;
13083 if ((gimplify_omp_ctxp
->region_type
& ORT_ACC
) == 0)
13085 /* OpenMP. Implicit clauses are added at the start of the clause list,
13086 but after any non-map clauses. */
13087 tree
*implicit_add_list_p
= orig_list_p
;
13088 while (*implicit_add_list_p
13089 && OMP_CLAUSE_CODE (*implicit_add_list_p
) != OMP_CLAUSE_MAP
)
13090 implicit_add_list_p
= &OMP_CLAUSE_CHAIN (*implicit_add_list_p
);
13091 data
.list_p
= implicit_add_list_p
;
13095 data
.list_p
= list_p
;
13096 data
.pre_p
= pre_p
;
13097 splay_tree_foreach (ctx
->variables
, gimplify_adjust_omp_clauses_1
, &data
);
13099 if (has_inscan_reductions
)
13100 for (c
= *orig_list_p
; c
; c
= OMP_CLAUSE_CHAIN (c
))
13101 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LINEAR
13102 && !OMP_CLAUSE_LINEAR_NO_COPYIN (c
))
13104 error_at (OMP_CLAUSE_LOCATION (c
),
13105 "%<inscan%> %<reduction%> clause used together with "
13106 "%<linear%> clause for a variable other than loop "
13111 gimplify_omp_ctxp
= ctx
->outer_context
;
13112 delete_omp_context (ctx
);
13115 /* Return 0 if CONSTRUCTS selectors don't match the OpenMP context,
13116 -1 if unknown yet (simd is involved, won't be known until vectorization)
13117 and 1 if they do. If SCORES is non-NULL, it should point to an array
13118 of at least 2*NCONSTRUCTS+2 ints, and will be filled with the positions
13119 of the CONSTRUCTS (position -1 if it will never match) followed by
13120 number of constructs in the OpenMP context construct trait. If the
13121 score depends on whether it will be in a declare simd clone or not,
13122 the function returns 2 and there will be two sets of the scores, the first
13123 one for the case that it is not in a declare simd clone, the other
13124 that it is in a declare simd clone. */
13127 omp_construct_selector_matches (enum tree_code
*constructs
, int nconstructs
,
13130 int matched
= 0, cnt
= 0;
13131 bool simd_seen
= false;
13132 bool target_seen
= false;
13133 int declare_simd_cnt
= -1;
13134 auto_vec
<enum tree_code
, 16> codes
;
13135 for (struct gimplify_omp_ctx
*ctx
= gimplify_omp_ctxp
; ctx
;)
13137 if (((ctx
->region_type
& ORT_PARALLEL
) && ctx
->code
== OMP_PARALLEL
)
13138 || ((ctx
->region_type
& (ORT_TARGET
| ORT_IMPLICIT_TARGET
| ORT_ACC
))
13139 == ORT_TARGET
&& ctx
->code
== OMP_TARGET
)
13140 || ((ctx
->region_type
& ORT_TEAMS
) && ctx
->code
== OMP_TEAMS
)
13141 || (ctx
->region_type
== ORT_WORKSHARE
&& ctx
->code
== OMP_FOR
)
13142 || (ctx
->region_type
== ORT_SIMD
13143 && ctx
->code
== OMP_SIMD
13144 && !omp_find_clause (ctx
->clauses
, OMP_CLAUSE_BIND
)))
13148 codes
.safe_push (ctx
->code
);
13149 else if (matched
< nconstructs
&& ctx
->code
== constructs
[matched
])
13151 if (ctx
->code
== OMP_SIMD
)
13159 if (ctx
->code
== OMP_TARGET
)
13161 if (scores
== NULL
)
13162 return matched
< nconstructs
? 0 : simd_seen
? -1 : 1;
13163 target_seen
= true;
13167 else if (ctx
->region_type
== ORT_WORKSHARE
13168 && ctx
->code
== OMP_LOOP
13169 && ctx
->outer_context
13170 && ctx
->outer_context
->region_type
== ORT_COMBINED_PARALLEL
13171 && ctx
->outer_context
->outer_context
13172 && ctx
->outer_context
->outer_context
->code
== OMP_LOOP
13173 && ctx
->outer_context
->outer_context
->distribute
)
13174 ctx
= ctx
->outer_context
->outer_context
;
13175 ctx
= ctx
->outer_context
;
13178 && lookup_attribute ("omp declare simd",
13179 DECL_ATTRIBUTES (current_function_decl
)))
13181 /* Declare simd is a maybe case, it is supposed to be added only to the
13182 omp-simd-clone.cc added clones and not to the base function. */
13183 declare_simd_cnt
= cnt
++;
13185 codes
.safe_push (OMP_SIMD
);
13187 && constructs
[0] == OMP_SIMD
)
13189 gcc_assert (matched
== 0);
13191 if (++matched
== nconstructs
)
13195 if (tree attr
= lookup_attribute ("omp declare variant variant",
13196 DECL_ATTRIBUTES (current_function_decl
)))
13198 enum tree_code variant_constructs
[5];
13199 int variant_nconstructs
= 0;
13201 variant_nconstructs
13202 = omp_constructor_traits_to_codes (TREE_VALUE (attr
),
13203 variant_constructs
);
13204 for (int i
= 0; i
< variant_nconstructs
; i
++)
13208 codes
.safe_push (variant_constructs
[i
]);
13209 else if (matched
< nconstructs
13210 && variant_constructs
[i
] == constructs
[matched
])
13212 if (variant_constructs
[i
] == OMP_SIMD
)
13223 && lookup_attribute ("omp declare target block",
13224 DECL_ATTRIBUTES (current_function_decl
)))
13227 codes
.safe_push (OMP_TARGET
);
13228 else if (matched
< nconstructs
&& constructs
[matched
] == OMP_TARGET
)
13233 for (int pass
= 0; pass
< (declare_simd_cnt
== -1 ? 1 : 2); pass
++)
13235 int j
= codes
.length () - 1;
13236 for (int i
= nconstructs
- 1; i
>= 0; i
--)
13239 && (pass
!= 0 || declare_simd_cnt
!= j
)
13240 && constructs
[i
] != codes
[j
])
13242 if (pass
== 0 && declare_simd_cnt
!= -1 && j
> declare_simd_cnt
)
13247 *scores
++ = ((pass
== 0 && declare_simd_cnt
!= -1)
13248 ? codes
.length () - 1 : codes
.length ());
13250 return declare_simd_cnt
== -1 ? 1 : 2;
13252 if (matched
== nconstructs
)
13253 return simd_seen
? -1 : 1;
13257 /* Gimplify OACC_CACHE. */
13260 gimplify_oacc_cache (tree
*expr_p
, gimple_seq
*pre_p
)
13262 tree expr
= *expr_p
;
13264 gimplify_scan_omp_clauses (&OACC_CACHE_CLAUSES (expr
), pre_p
, ORT_ACC
,
13266 gimplify_adjust_omp_clauses (pre_p
, NULL
, &OACC_CACHE_CLAUSES (expr
),
13269 /* TODO: Do something sensible with this information. */
13271 *expr_p
= NULL_TREE
;
13274 /* Helper function of gimplify_oacc_declare. The helper's purpose is to,
13275 if required, translate 'kind' in CLAUSE into an 'entry' kind and 'exit'
13276 kind. The entry kind will replace the one in CLAUSE, while the exit
13277 kind will be used in a new omp_clause and returned to the caller. */
13280 gimplify_oacc_declare_1 (tree clause
)
13282 HOST_WIDE_INT kind
, new_op
;
13286 kind
= OMP_CLAUSE_MAP_KIND (clause
);
13290 case GOMP_MAP_ALLOC
:
13291 new_op
= GOMP_MAP_RELEASE
;
13295 case GOMP_MAP_FROM
:
13296 OMP_CLAUSE_SET_MAP_KIND (clause
, GOMP_MAP_FORCE_ALLOC
);
13297 new_op
= GOMP_MAP_FROM
;
13301 case GOMP_MAP_TOFROM
:
13302 OMP_CLAUSE_SET_MAP_KIND (clause
, GOMP_MAP_TO
);
13303 new_op
= GOMP_MAP_FROM
;
13307 case GOMP_MAP_DEVICE_RESIDENT
:
13308 case GOMP_MAP_FORCE_DEVICEPTR
:
13309 case GOMP_MAP_FORCE_PRESENT
:
13310 case GOMP_MAP_LINK
:
13311 case GOMP_MAP_POINTER
:
13316 gcc_unreachable ();
13322 c
= build_omp_clause (OMP_CLAUSE_LOCATION (clause
), OMP_CLAUSE_MAP
);
13323 OMP_CLAUSE_SET_MAP_KIND (c
, new_op
);
13324 OMP_CLAUSE_DECL (c
) = OMP_CLAUSE_DECL (clause
);
13330 /* Gimplify OACC_DECLARE. */
13333 gimplify_oacc_declare (tree
*expr_p
, gimple_seq
*pre_p
)
13335 tree expr
= *expr_p
;
13337 tree clauses
, t
, decl
;
13339 clauses
= OACC_DECLARE_CLAUSES (expr
);
13341 gimplify_scan_omp_clauses (&clauses
, pre_p
, ORT_TARGET_DATA
, OACC_DECLARE
);
13342 gimplify_adjust_omp_clauses (pre_p
, NULL
, &clauses
, OACC_DECLARE
);
13344 for (t
= clauses
; t
; t
= OMP_CLAUSE_CHAIN (t
))
13346 decl
= OMP_CLAUSE_DECL (t
);
13348 if (TREE_CODE (decl
) == MEM_REF
)
13349 decl
= TREE_OPERAND (decl
, 0);
13351 if (VAR_P (decl
) && !is_oacc_declared (decl
))
13353 tree attr
= get_identifier ("oacc declare target");
13354 DECL_ATTRIBUTES (decl
) = tree_cons (attr
, NULL_TREE
,
13355 DECL_ATTRIBUTES (decl
));
13359 && !is_global_var (decl
)
13360 && DECL_CONTEXT (decl
) == current_function_decl
)
13362 tree c
= gimplify_oacc_declare_1 (t
);
13365 if (oacc_declare_returns
== NULL
)
13366 oacc_declare_returns
= new hash_map
<tree
, tree
>;
13368 oacc_declare_returns
->put (decl
, c
);
13372 if (gimplify_omp_ctxp
)
13373 omp_add_variable (gimplify_omp_ctxp
, decl
, GOVD_SEEN
);
13376 stmt
= gimple_build_omp_target (NULL
, GF_OMP_TARGET_KIND_OACC_DECLARE
,
13379 gimplify_seq_add_stmt (pre_p
, stmt
);
13381 *expr_p
= NULL_TREE
;
13384 /* Gimplify the contents of an OMP_PARALLEL statement. This involves
13385 gimplification of the body, as well as scanning the body for used
13386 variables. We need to do this scan now, because variable-sized
13387 decls will be decomposed during gimplification. */
13390 gimplify_omp_parallel (tree
*expr_p
, gimple_seq
*pre_p
)
13392 tree expr
= *expr_p
;
13394 gimple_seq body
= NULL
;
13396 gimplify_scan_omp_clauses (&OMP_PARALLEL_CLAUSES (expr
), pre_p
,
13397 OMP_PARALLEL_COMBINED (expr
)
13398 ? ORT_COMBINED_PARALLEL
13399 : ORT_PARALLEL
, OMP_PARALLEL
);
13401 push_gimplify_context ();
13403 g
= gimplify_and_return_first (OMP_PARALLEL_BODY (expr
), &body
);
13404 if (gimple_code (g
) == GIMPLE_BIND
)
13405 pop_gimplify_context (g
);
13407 pop_gimplify_context (NULL
);
13409 gimplify_adjust_omp_clauses (pre_p
, body
, &OMP_PARALLEL_CLAUSES (expr
),
13412 g
= gimple_build_omp_parallel (body
,
13413 OMP_PARALLEL_CLAUSES (expr
),
13414 NULL_TREE
, NULL_TREE
);
13415 if (OMP_PARALLEL_COMBINED (expr
))
13416 gimple_omp_set_subcode (g
, GF_OMP_PARALLEL_COMBINED
);
13417 gimplify_seq_add_stmt (pre_p
, g
);
13418 *expr_p
= NULL_TREE
;
13421 /* Gimplify the contents of an OMP_TASK statement. This involves
13422 gimplification of the body, as well as scanning the body for used
13423 variables. We need to do this scan now, because variable-sized
13424 decls will be decomposed during gimplification. */
13427 gimplify_omp_task (tree
*expr_p
, gimple_seq
*pre_p
)
13429 tree expr
= *expr_p
;
13431 gimple_seq body
= NULL
;
13432 bool nowait
= false;
13433 bool has_depend
= false;
13435 if (OMP_TASK_BODY (expr
) == NULL_TREE
)
13437 for (tree c
= OMP_TASK_CLAUSES (expr
); c
; c
= OMP_CLAUSE_CHAIN (c
))
13438 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_DEPEND
)
13441 if (OMP_CLAUSE_DEPEND_KIND (c
) == OMP_CLAUSE_DEPEND_MUTEXINOUTSET
)
13443 error_at (OMP_CLAUSE_LOCATION (c
),
13444 "%<mutexinoutset%> kind in %<depend%> clause on a "
13445 "%<taskwait%> construct");
13449 else if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_NOWAIT
)
13451 if (nowait
&& !has_depend
)
13453 error_at (EXPR_LOCATION (expr
),
13454 "%<taskwait%> construct with %<nowait%> clause but no "
13455 "%<depend%> clauses");
13456 *expr_p
= NULL_TREE
;
13461 gimplify_scan_omp_clauses (&OMP_TASK_CLAUSES (expr
), pre_p
,
13462 omp_find_clause (OMP_TASK_CLAUSES (expr
),
13464 ? ORT_UNTIED_TASK
: ORT_TASK
, OMP_TASK
);
13466 if (OMP_TASK_BODY (expr
))
13468 push_gimplify_context ();
13470 g
= gimplify_and_return_first (OMP_TASK_BODY (expr
), &body
);
13471 if (gimple_code (g
) == GIMPLE_BIND
)
13472 pop_gimplify_context (g
);
13474 pop_gimplify_context (NULL
);
13477 gimplify_adjust_omp_clauses (pre_p
, body
, &OMP_TASK_CLAUSES (expr
),
13480 g
= gimple_build_omp_task (body
,
13481 OMP_TASK_CLAUSES (expr
),
13482 NULL_TREE
, NULL_TREE
,
13483 NULL_TREE
, NULL_TREE
, NULL_TREE
);
13484 if (OMP_TASK_BODY (expr
) == NULL_TREE
)
13485 gimple_omp_task_set_taskwait_p (g
, true);
13486 gimplify_seq_add_stmt (pre_p
, g
);
13487 *expr_p
= NULL_TREE
;
13490 /* Helper function for gimplify_omp_for. If *TP is not a gimple constant,
13491 force it into a temporary initialized in PRE_P and add firstprivate clause
13492 to ORIG_FOR_STMT. */
13495 gimplify_omp_taskloop_expr (tree type
, tree
*tp
, gimple_seq
*pre_p
,
13496 tree orig_for_stmt
)
13498 if (*tp
== NULL
|| is_gimple_constant (*tp
))
13501 *tp
= get_initialized_tmp_var (*tp
, pre_p
, NULL
, false);
13502 /* Reference to pointer conversion is considered useless,
13503 but is significant for firstprivate clause. Force it
13506 && TREE_CODE (type
) == POINTER_TYPE
13507 && TREE_CODE (TREE_TYPE (*tp
)) == REFERENCE_TYPE
)
13509 tree v
= create_tmp_var (TYPE_MAIN_VARIANT (type
));
13510 tree m
= build2 (INIT_EXPR
, TREE_TYPE (v
), v
, *tp
);
13511 gimplify_and_add (m
, pre_p
);
13515 tree c
= build_omp_clause (input_location
, OMP_CLAUSE_FIRSTPRIVATE
);
13516 OMP_CLAUSE_DECL (c
) = *tp
;
13517 OMP_CLAUSE_CHAIN (c
) = OMP_FOR_CLAUSES (orig_for_stmt
);
13518 OMP_FOR_CLAUSES (orig_for_stmt
) = c
;
13521 /* Helper function of gimplify_omp_for, find OMP_ORDERED with
13522 null OMP_ORDERED_BODY inside of OMP_FOR's body. */
13525 find_standalone_omp_ordered (tree
*tp
, int *walk_subtrees
, void *)
13527 switch (TREE_CODE (*tp
))
13530 if (OMP_ORDERED_BODY (*tp
) == NULL_TREE
)
13536 *walk_subtrees
= 0;
13544 /* Gimplify the gross structure of an OMP_FOR statement. */
13546 static enum gimplify_status
13547 gimplify_omp_for (tree
*expr_p
, gimple_seq
*pre_p
)
13549 tree for_stmt
, orig_for_stmt
, inner_for_stmt
= NULL_TREE
, decl
, var
, t
;
13550 enum gimplify_status ret
= GS_ALL_DONE
;
13551 enum gimplify_status tret
;
13553 gimple_seq for_body
, for_pre_body
;
13555 bitmap has_decl_expr
= NULL
;
13556 enum omp_region_type ort
= ORT_WORKSHARE
;
13557 bool openacc
= TREE_CODE (*expr_p
) == OACC_LOOP
;
13559 orig_for_stmt
= for_stmt
= *expr_p
;
13561 bool loop_p
= (omp_find_clause (OMP_FOR_CLAUSES (for_stmt
), OMP_CLAUSE_BIND
)
13563 if (OMP_FOR_INIT (for_stmt
) == NULL_TREE
)
13565 tree
*data
[4] = { NULL
, NULL
, NULL
, NULL
};
13566 gcc_assert (TREE_CODE (for_stmt
) != OACC_LOOP
);
13567 inner_for_stmt
= walk_tree (&OMP_FOR_BODY (for_stmt
),
13568 find_combined_omp_for
, data
, NULL
);
13569 if (inner_for_stmt
== NULL_TREE
)
13571 gcc_assert (seen_error ());
13572 *expr_p
= NULL_TREE
;
13575 if (data
[2] && OMP_FOR_PRE_BODY (*data
[2]))
13577 append_to_statement_list_force (OMP_FOR_PRE_BODY (*data
[2]),
13578 &OMP_FOR_PRE_BODY (for_stmt
));
13579 OMP_FOR_PRE_BODY (*data
[2]) = NULL_TREE
;
13581 if (OMP_FOR_PRE_BODY (inner_for_stmt
))
13583 append_to_statement_list_force (OMP_FOR_PRE_BODY (inner_for_stmt
),
13584 &OMP_FOR_PRE_BODY (for_stmt
));
13585 OMP_FOR_PRE_BODY (inner_for_stmt
) = NULL_TREE
;
13590 /* We have some statements or variable declarations in between
13591 the composite construct directives. Move them around the
13594 for (i
= 0; i
< 3; i
++)
13598 if (i
< 2 && data
[i
+ 1] == &OMP_BODY (t
))
13599 data
[i
+ 1] = data
[i
];
13600 *data
[i
] = OMP_BODY (t
);
13601 tree body
= build3 (BIND_EXPR
, void_type_node
, NULL_TREE
,
13602 NULL_TREE
, make_node (BLOCK
));
13603 OMP_BODY (t
) = body
;
13604 append_to_statement_list_force (inner_for_stmt
,
13605 &BIND_EXPR_BODY (body
));
13607 data
[3] = tsi_stmt_ptr (tsi_start (BIND_EXPR_BODY (body
)));
13608 gcc_assert (*data
[3] == inner_for_stmt
);
13613 for (i
= 0; i
< TREE_VEC_LENGTH (OMP_FOR_INIT (inner_for_stmt
)); i
++)
13615 && OMP_FOR_ORIG_DECLS (inner_for_stmt
)
13616 && TREE_CODE (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (inner_for_stmt
),
13618 && TREE_PURPOSE (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (inner_for_stmt
),
13621 tree orig
= TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (inner_for_stmt
), i
);
13622 /* Class iterators aren't allowed on OMP_SIMD, so the only
13623 case we need to solve is distribute parallel for. They are
13624 allowed on the loop construct, but that is already handled
13625 in gimplify_omp_loop. */
13626 gcc_assert (TREE_CODE (inner_for_stmt
) == OMP_FOR
13627 && TREE_CODE (for_stmt
) == OMP_DISTRIBUTE
13629 tree orig_decl
= TREE_PURPOSE (orig
);
13630 tree last
= TREE_VALUE (orig
);
13632 for (pc
= &OMP_FOR_CLAUSES (inner_for_stmt
);
13633 *pc
; pc
= &OMP_CLAUSE_CHAIN (*pc
))
13634 if ((OMP_CLAUSE_CODE (*pc
) == OMP_CLAUSE_PRIVATE
13635 || OMP_CLAUSE_CODE (*pc
) == OMP_CLAUSE_LASTPRIVATE
)
13636 && OMP_CLAUSE_DECL (*pc
) == orig_decl
)
13638 if (*pc
== NULL_TREE
)
13641 for (spc
= &OMP_PARALLEL_CLAUSES (*data
[1]);
13642 *spc
; spc
= &OMP_CLAUSE_CHAIN (*spc
))
13643 if (OMP_CLAUSE_CODE (*spc
) == OMP_CLAUSE_PRIVATE
13644 && OMP_CLAUSE_DECL (*spc
) == orig_decl
)
13649 *spc
= OMP_CLAUSE_CHAIN (c
);
13650 OMP_CLAUSE_CHAIN (c
) = NULL_TREE
;
13654 if (*pc
== NULL_TREE
)
13656 else if (OMP_CLAUSE_CODE (*pc
) == OMP_CLAUSE_PRIVATE
)
13658 /* private clause will appear only on inner_for_stmt.
13659 Change it into firstprivate, and add private clause
13661 tree c
= copy_node (*pc
);
13662 OMP_CLAUSE_CHAIN (c
) = OMP_FOR_CLAUSES (for_stmt
);
13663 OMP_FOR_CLAUSES (for_stmt
) = c
;
13664 OMP_CLAUSE_CODE (*pc
) = OMP_CLAUSE_FIRSTPRIVATE
;
13665 lang_hooks
.decls
.omp_finish_clause (*pc
, pre_p
, openacc
);
13669 /* lastprivate clause will appear on both inner_for_stmt
13670 and for_stmt. Add firstprivate clause to
13672 tree c
= build_omp_clause (OMP_CLAUSE_LOCATION (*pc
),
13673 OMP_CLAUSE_FIRSTPRIVATE
);
13674 OMP_CLAUSE_DECL (c
) = OMP_CLAUSE_DECL (*pc
);
13675 OMP_CLAUSE_CHAIN (c
) = *pc
;
13677 lang_hooks
.decls
.omp_finish_clause (*pc
, pre_p
, openacc
);
13679 tree c
= build_omp_clause (UNKNOWN_LOCATION
,
13680 OMP_CLAUSE_FIRSTPRIVATE
);
13681 OMP_CLAUSE_DECL (c
) = last
;
13682 OMP_CLAUSE_CHAIN (c
) = OMP_PARALLEL_CLAUSES (*data
[1]);
13683 OMP_PARALLEL_CLAUSES (*data
[1]) = c
;
13684 c
= build_omp_clause (UNKNOWN_LOCATION
,
13685 *pc
? OMP_CLAUSE_SHARED
13686 : OMP_CLAUSE_FIRSTPRIVATE
);
13687 OMP_CLAUSE_DECL (c
) = orig_decl
;
13688 OMP_CLAUSE_CHAIN (c
) = OMP_PARALLEL_CLAUSES (*data
[1]);
13689 OMP_PARALLEL_CLAUSES (*data
[1]) = c
;
13691 /* Similarly, take care of C++ range for temporaries, those should
13692 be firstprivate on OMP_PARALLEL if any. */
13694 for (i
= 0; i
< TREE_VEC_LENGTH (OMP_FOR_INIT (inner_for_stmt
)); i
++)
13695 if (OMP_FOR_ORIG_DECLS (inner_for_stmt
)
13696 && TREE_CODE (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (inner_for_stmt
),
13698 && TREE_CHAIN (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (inner_for_stmt
),
13702 = TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (inner_for_stmt
), i
);
13703 tree v
= TREE_CHAIN (orig
);
13704 tree c
= build_omp_clause (UNKNOWN_LOCATION
,
13705 OMP_CLAUSE_FIRSTPRIVATE
);
13706 /* First add firstprivate clause for the __for_end artificial
13708 OMP_CLAUSE_DECL (c
) = TREE_VEC_ELT (v
, 1);
13709 if (TREE_CODE (TREE_TYPE (OMP_CLAUSE_DECL (c
)))
13711 OMP_CLAUSE_FIRSTPRIVATE_NO_REFERENCE (c
) = 1;
13712 OMP_CLAUSE_CHAIN (c
) = OMP_PARALLEL_CLAUSES (*data
[1]);
13713 OMP_PARALLEL_CLAUSES (*data
[1]) = c
;
13714 if (TREE_VEC_ELT (v
, 0))
13716 /* And now the same for __for_range artificial decl if it
13718 c
= build_omp_clause (UNKNOWN_LOCATION
,
13719 OMP_CLAUSE_FIRSTPRIVATE
);
13720 OMP_CLAUSE_DECL (c
) = TREE_VEC_ELT (v
, 0);
13721 if (TREE_CODE (TREE_TYPE (OMP_CLAUSE_DECL (c
)))
13723 OMP_CLAUSE_FIRSTPRIVATE_NO_REFERENCE (c
) = 1;
13724 OMP_CLAUSE_CHAIN (c
) = OMP_PARALLEL_CLAUSES (*data
[1]);
13725 OMP_PARALLEL_CLAUSES (*data
[1]) = c
;
13730 switch (TREE_CODE (for_stmt
))
13733 if (OMP_FOR_NON_RECTANGULAR (inner_for_stmt
? inner_for_stmt
: for_stmt
))
13735 if (omp_find_clause (OMP_FOR_CLAUSES (for_stmt
),
13736 OMP_CLAUSE_SCHEDULE
))
13737 error_at (EXPR_LOCATION (for_stmt
),
13738 "%qs clause may not appear on non-rectangular %qs",
13739 "schedule", lang_GNU_Fortran () ? "do" : "for");
13740 if (omp_find_clause (OMP_FOR_CLAUSES (for_stmt
), OMP_CLAUSE_ORDERED
))
13741 error_at (EXPR_LOCATION (for_stmt
),
13742 "%qs clause may not appear on non-rectangular %qs",
13743 "ordered", lang_GNU_Fortran () ? "do" : "for");
13746 case OMP_DISTRIBUTE
:
13747 if (OMP_FOR_NON_RECTANGULAR (inner_for_stmt
? inner_for_stmt
: for_stmt
)
13748 && omp_find_clause (OMP_FOR_CLAUSES (for_stmt
),
13749 OMP_CLAUSE_DIST_SCHEDULE
))
13750 error_at (EXPR_LOCATION (for_stmt
),
13751 "%qs clause may not appear on non-rectangular %qs",
13752 "dist_schedule", "distribute");
13758 if (OMP_FOR_NON_RECTANGULAR (inner_for_stmt
? inner_for_stmt
: for_stmt
))
13760 if (omp_find_clause (OMP_FOR_CLAUSES (for_stmt
),
13761 OMP_CLAUSE_GRAINSIZE
))
13762 error_at (EXPR_LOCATION (for_stmt
),
13763 "%qs clause may not appear on non-rectangular %qs",
13764 "grainsize", "taskloop");
13765 if (omp_find_clause (OMP_FOR_CLAUSES (for_stmt
),
13766 OMP_CLAUSE_NUM_TASKS
))
13767 error_at (EXPR_LOCATION (for_stmt
),
13768 "%qs clause may not appear on non-rectangular %qs",
13769 "num_tasks", "taskloop");
13771 if (omp_find_clause (OMP_FOR_CLAUSES (for_stmt
), OMP_CLAUSE_UNTIED
))
13772 ort
= ORT_UNTIED_TASKLOOP
;
13774 ort
= ORT_TASKLOOP
;
13780 gcc_unreachable ();
13783 /* Set OMP_CLAUSE_LINEAR_NO_COPYIN flag on explicit linear
13784 clause for the IV. */
13785 if (ort
== ORT_SIMD
&& TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt
)) == 1)
13787 t
= TREE_VEC_ELT (OMP_FOR_INIT (for_stmt
), 0);
13788 gcc_assert (TREE_CODE (t
) == MODIFY_EXPR
);
13789 decl
= TREE_OPERAND (t
, 0);
13790 for (tree c
= OMP_FOR_CLAUSES (for_stmt
); c
; c
= OMP_CLAUSE_CHAIN (c
))
13791 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LINEAR
13792 && OMP_CLAUSE_DECL (c
) == decl
)
13794 OMP_CLAUSE_LINEAR_NO_COPYIN (c
) = 1;
13799 if (TREE_CODE (for_stmt
) != OMP_TASKLOOP
)
13800 gimplify_scan_omp_clauses (&OMP_FOR_CLAUSES (for_stmt
), pre_p
, ort
,
13801 loop_p
&& TREE_CODE (for_stmt
) != OMP_SIMD
13802 ? OMP_LOOP
: TREE_CODE (for_stmt
));
13804 if (TREE_CODE (for_stmt
) == OMP_DISTRIBUTE
)
13805 gimplify_omp_ctxp
->distribute
= true;
13807 /* Handle OMP_FOR_INIT. */
13808 for_pre_body
= NULL
;
13809 if ((ort
== ORT_SIMD
13810 || (inner_for_stmt
&& TREE_CODE (inner_for_stmt
) == OMP_SIMD
))
13811 && OMP_FOR_PRE_BODY (for_stmt
))
13813 has_decl_expr
= BITMAP_ALLOC (NULL
);
13814 if (TREE_CODE (OMP_FOR_PRE_BODY (for_stmt
)) == DECL_EXPR
13815 && TREE_CODE (DECL_EXPR_DECL (OMP_FOR_PRE_BODY (for_stmt
)))
13818 t
= OMP_FOR_PRE_BODY (for_stmt
);
13819 bitmap_set_bit (has_decl_expr
, DECL_UID (DECL_EXPR_DECL (t
)));
13821 else if (TREE_CODE (OMP_FOR_PRE_BODY (for_stmt
)) == STATEMENT_LIST
)
13823 tree_stmt_iterator si
;
13824 for (si
= tsi_start (OMP_FOR_PRE_BODY (for_stmt
)); !tsi_end_p (si
);
13828 if (TREE_CODE (t
) == DECL_EXPR
13829 && TREE_CODE (DECL_EXPR_DECL (t
)) == VAR_DECL
)
13830 bitmap_set_bit (has_decl_expr
, DECL_UID (DECL_EXPR_DECL (t
)));
13834 if (OMP_FOR_PRE_BODY (for_stmt
))
13836 if (TREE_CODE (for_stmt
) != OMP_TASKLOOP
|| gimplify_omp_ctxp
)
13837 gimplify_and_add (OMP_FOR_PRE_BODY (for_stmt
), &for_pre_body
);
13840 struct gimplify_omp_ctx ctx
;
13841 memset (&ctx
, 0, sizeof (ctx
));
13842 ctx
.region_type
= ORT_NONE
;
13843 gimplify_omp_ctxp
= &ctx
;
13844 gimplify_and_add (OMP_FOR_PRE_BODY (for_stmt
), &for_pre_body
);
13845 gimplify_omp_ctxp
= NULL
;
13848 OMP_FOR_PRE_BODY (for_stmt
) = NULL_TREE
;
13850 if (OMP_FOR_INIT (for_stmt
) == NULL_TREE
)
13851 for_stmt
= inner_for_stmt
;
13853 /* For taskloop, need to gimplify the start, end and step before the
13854 taskloop, outside of the taskloop omp context. */
13855 if (TREE_CODE (orig_for_stmt
) == OMP_TASKLOOP
)
13857 for (i
= 0; i
< TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt
)); i
++)
13859 t
= TREE_VEC_ELT (OMP_FOR_INIT (for_stmt
), i
);
13860 gimple_seq
*for_pre_p
= (gimple_seq_empty_p (for_pre_body
)
13861 ? pre_p
: &for_pre_body
);
13862 tree type
= TREE_TYPE (TREE_OPERAND (t
, 0));
13863 if (TREE_CODE (TREE_OPERAND (t
, 1)) == TREE_VEC
)
13865 tree v
= TREE_OPERAND (t
, 1);
13866 gimplify_omp_taskloop_expr (type
, &TREE_VEC_ELT (v
, 1),
13867 for_pre_p
, orig_for_stmt
);
13868 gimplify_omp_taskloop_expr (type
, &TREE_VEC_ELT (v
, 2),
13869 for_pre_p
, orig_for_stmt
);
13872 gimplify_omp_taskloop_expr (type
, &TREE_OPERAND (t
, 1), for_pre_p
,
13875 /* Handle OMP_FOR_COND. */
13876 t
= TREE_VEC_ELT (OMP_FOR_COND (for_stmt
), i
);
13877 if (TREE_CODE (TREE_OPERAND (t
, 1)) == TREE_VEC
)
13879 tree v
= TREE_OPERAND (t
, 1);
13880 gimplify_omp_taskloop_expr (type
, &TREE_VEC_ELT (v
, 1),
13881 for_pre_p
, orig_for_stmt
);
13882 gimplify_omp_taskloop_expr (type
, &TREE_VEC_ELT (v
, 2),
13883 for_pre_p
, orig_for_stmt
);
13886 gimplify_omp_taskloop_expr (type
, &TREE_OPERAND (t
, 1), for_pre_p
,
13889 /* Handle OMP_FOR_INCR. */
13890 t
= TREE_VEC_ELT (OMP_FOR_INCR (for_stmt
), i
);
13891 if (TREE_CODE (t
) == MODIFY_EXPR
)
13893 decl
= TREE_OPERAND (t
, 0);
13894 t
= TREE_OPERAND (t
, 1);
13895 tree
*tp
= &TREE_OPERAND (t
, 1);
13896 if (TREE_CODE (t
) == PLUS_EXPR
&& *tp
== decl
)
13897 tp
= &TREE_OPERAND (t
, 0);
13899 gimplify_omp_taskloop_expr (NULL_TREE
, tp
, for_pre_p
,
13904 gimplify_scan_omp_clauses (&OMP_FOR_CLAUSES (orig_for_stmt
), pre_p
, ort
,
13908 if (orig_for_stmt
!= for_stmt
)
13909 gimplify_omp_ctxp
->combined_loop
= true;
13912 gcc_assert (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt
))
13913 == TREE_VEC_LENGTH (OMP_FOR_COND (for_stmt
)));
13914 gcc_assert (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt
))
13915 == TREE_VEC_LENGTH (OMP_FOR_INCR (for_stmt
)));
13917 tree c
= omp_find_clause (OMP_FOR_CLAUSES (for_stmt
), OMP_CLAUSE_ORDERED
);
13918 bool is_doacross
= false;
13919 if (c
&& walk_tree_without_duplicates (&OMP_FOR_BODY (for_stmt
),
13920 find_standalone_omp_ordered
, NULL
))
13922 OMP_CLAUSE_ORDERED_DOACROSS (c
) = 1;
13923 is_doacross
= true;
13924 int len
= TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt
));
13925 gimplify_omp_ctxp
->loop_iter_var
.create (len
* 2);
13926 for (tree
*pc
= &OMP_FOR_CLAUSES (for_stmt
); *pc
; )
13927 if (OMP_CLAUSE_CODE (*pc
) == OMP_CLAUSE_LINEAR
)
13929 error_at (OMP_CLAUSE_LOCATION (*pc
),
13930 "%<linear%> clause may not be specified together "
13931 "with %<ordered%> clause if stand-alone %<ordered%> "
13932 "construct is nested in it");
13933 *pc
= OMP_CLAUSE_CHAIN (*pc
);
13936 pc
= &OMP_CLAUSE_CHAIN (*pc
);
13938 int collapse
= 1, tile
= 0;
13939 c
= omp_find_clause (OMP_FOR_CLAUSES (for_stmt
), OMP_CLAUSE_COLLAPSE
);
13941 collapse
= tree_to_shwi (OMP_CLAUSE_COLLAPSE_EXPR (c
));
13942 c
= omp_find_clause (OMP_FOR_CLAUSES (for_stmt
), OMP_CLAUSE_TILE
);
13944 tile
= list_length (OMP_CLAUSE_TILE_LIST (c
));
13945 c
= omp_find_clause (OMP_FOR_CLAUSES (for_stmt
), OMP_CLAUSE_ALLOCATE
);
13946 hash_set
<tree
> *allocate_uids
= NULL
;
13949 allocate_uids
= new hash_set
<tree
>;
13950 for (; c
; c
= OMP_CLAUSE_CHAIN (c
))
13951 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_ALLOCATE
)
13952 allocate_uids
->add (OMP_CLAUSE_DECL (c
));
13954 for (i
= 0; i
< TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt
)); i
++)
13956 t
= TREE_VEC_ELT (OMP_FOR_INIT (for_stmt
), i
);
13957 gcc_assert (TREE_CODE (t
) == MODIFY_EXPR
);
13958 decl
= TREE_OPERAND (t
, 0);
13959 gcc_assert (DECL_P (decl
));
13960 gcc_assert (INTEGRAL_TYPE_P (TREE_TYPE (decl
))
13961 || POINTER_TYPE_P (TREE_TYPE (decl
)));
13964 if (TREE_CODE (for_stmt
) == OMP_FOR
&& OMP_FOR_ORIG_DECLS (for_stmt
))
13966 tree orig_decl
= TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (for_stmt
), i
);
13967 if (TREE_CODE (orig_decl
) == TREE_LIST
)
13969 orig_decl
= TREE_PURPOSE (orig_decl
);
13973 gimplify_omp_ctxp
->loop_iter_var
.quick_push (orig_decl
);
13976 gimplify_omp_ctxp
->loop_iter_var
.quick_push (decl
);
13977 gimplify_omp_ctxp
->loop_iter_var
.quick_push (decl
);
13980 if (for_stmt
== orig_for_stmt
)
13982 tree orig_decl
= decl
;
13983 if (OMP_FOR_ORIG_DECLS (for_stmt
))
13985 tree orig_decl
= TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (for_stmt
), i
);
13986 if (TREE_CODE (orig_decl
) == TREE_LIST
)
13988 orig_decl
= TREE_PURPOSE (orig_decl
);
13993 if (is_global_var (orig_decl
) && DECL_THREAD_LOCAL_P (orig_decl
))
13994 error_at (EXPR_LOCATION (for_stmt
),
13995 "threadprivate iteration variable %qD", orig_decl
);
13998 /* Make sure the iteration variable is private. */
13999 tree c
= NULL_TREE
;
14000 tree c2
= NULL_TREE
;
14001 if (orig_for_stmt
!= for_stmt
)
14003 /* Preserve this information until we gimplify the inner simd. */
14005 && bitmap_bit_p (has_decl_expr
, DECL_UID (decl
)))
14006 TREE_PRIVATE (t
) = 1;
14008 else if (ort
== ORT_SIMD
)
14010 splay_tree_node n
= splay_tree_lookup (gimplify_omp_ctxp
->variables
,
14011 (splay_tree_key
) decl
);
14012 omp_is_private (gimplify_omp_ctxp
, decl
,
14013 1 + (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt
))
14015 if (n
!= NULL
&& (n
->value
& GOVD_DATA_SHARE_CLASS
) != 0)
14017 omp_notice_variable (gimplify_omp_ctxp
, decl
, true);
14018 if (n
->value
& GOVD_LASTPRIVATE_CONDITIONAL
)
14019 for (tree c3
= omp_find_clause (OMP_FOR_CLAUSES (for_stmt
),
14020 OMP_CLAUSE_LASTPRIVATE
);
14021 c3
; c3
= omp_find_clause (OMP_CLAUSE_CHAIN (c3
),
14022 OMP_CLAUSE_LASTPRIVATE
))
14023 if (OMP_CLAUSE_DECL (c3
) == decl
)
14025 warning_at (OMP_CLAUSE_LOCATION (c3
), 0,
14026 "conditional %<lastprivate%> on loop "
14027 "iterator %qD ignored", decl
);
14028 OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c3
) = 0;
14029 n
->value
&= ~GOVD_LASTPRIVATE_CONDITIONAL
;
14032 else if (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt
)) == 1 && !loop_p
)
14034 c
= build_omp_clause (input_location
, OMP_CLAUSE_LINEAR
);
14035 OMP_CLAUSE_LINEAR_NO_COPYIN (c
) = 1;
14036 unsigned int flags
= GOVD_LINEAR
| GOVD_EXPLICIT
| GOVD_SEEN
;
14038 && bitmap_bit_p (has_decl_expr
, DECL_UID (decl
)))
14039 || TREE_PRIVATE (t
))
14041 OMP_CLAUSE_LINEAR_NO_COPYOUT (c
) = 1;
14042 flags
|= GOVD_LINEAR_LASTPRIVATE_NO_OUTER
;
14044 struct gimplify_omp_ctx
*outer
14045 = gimplify_omp_ctxp
->outer_context
;
14046 if (outer
&& !OMP_CLAUSE_LINEAR_NO_COPYOUT (c
))
14048 if (outer
->region_type
== ORT_WORKSHARE
14049 && outer
->combined_loop
)
14051 n
= splay_tree_lookup (outer
->variables
,
14052 (splay_tree_key
)decl
);
14053 if (n
!= NULL
&& (n
->value
& GOVD_LOCAL
) != 0)
14055 OMP_CLAUSE_LINEAR_NO_COPYOUT (c
) = 1;
14056 flags
|= GOVD_LINEAR_LASTPRIVATE_NO_OUTER
;
14060 struct gimplify_omp_ctx
*octx
= outer
->outer_context
;
14062 && octx
->region_type
== ORT_COMBINED_PARALLEL
14063 && octx
->outer_context
14064 && (octx
->outer_context
->region_type
14066 && octx
->outer_context
->combined_loop
)
14068 octx
= octx
->outer_context
;
14069 n
= splay_tree_lookup (octx
->variables
,
14070 (splay_tree_key
)decl
);
14071 if (n
!= NULL
&& (n
->value
& GOVD_LOCAL
) != 0)
14073 OMP_CLAUSE_LINEAR_NO_COPYOUT (c
) = 1;
14074 flags
|= GOVD_LINEAR_LASTPRIVATE_NO_OUTER
;
14081 OMP_CLAUSE_DECL (c
) = decl
;
14082 OMP_CLAUSE_CHAIN (c
) = OMP_FOR_CLAUSES (for_stmt
);
14083 OMP_FOR_CLAUSES (for_stmt
) = c
;
14084 omp_add_variable (gimplify_omp_ctxp
, decl
, flags
);
14085 if (outer
&& !OMP_CLAUSE_LINEAR_NO_COPYOUT (c
))
14086 omp_lastprivate_for_combined_outer_constructs (outer
, decl
,
14093 || !bitmap_bit_p (has_decl_expr
, DECL_UID (decl
)));
14094 if (TREE_PRIVATE (t
))
14095 lastprivate
= false;
14096 if (loop_p
&& OMP_FOR_ORIG_DECLS (for_stmt
))
14098 tree elt
= TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (for_stmt
), i
);
14099 if (TREE_CODE (elt
) == TREE_LIST
&& TREE_PURPOSE (elt
))
14100 lastprivate
= false;
14103 struct gimplify_omp_ctx
*outer
14104 = gimplify_omp_ctxp
->outer_context
;
14105 if (outer
&& lastprivate
)
14106 omp_lastprivate_for_combined_outer_constructs (outer
, decl
,
14109 c
= build_omp_clause (input_location
,
14110 lastprivate
? OMP_CLAUSE_LASTPRIVATE
14111 : OMP_CLAUSE_PRIVATE
);
14112 OMP_CLAUSE_DECL (c
) = decl
;
14113 OMP_CLAUSE_CHAIN (c
) = OMP_FOR_CLAUSES (for_stmt
);
14114 OMP_FOR_CLAUSES (for_stmt
) = c
;
14115 omp_add_variable (gimplify_omp_ctxp
, decl
,
14116 (lastprivate
? GOVD_LASTPRIVATE
: GOVD_PRIVATE
)
14117 | GOVD_EXPLICIT
| GOVD_SEEN
);
14121 else if (omp_is_private (gimplify_omp_ctxp
, decl
, 0))
14123 omp_notice_variable (gimplify_omp_ctxp
, decl
, true);
14124 splay_tree_node n
= splay_tree_lookup (gimplify_omp_ctxp
->variables
,
14125 (splay_tree_key
) decl
);
14126 if (n
&& (n
->value
& GOVD_LASTPRIVATE_CONDITIONAL
))
14127 for (tree c3
= omp_find_clause (OMP_FOR_CLAUSES (for_stmt
),
14128 OMP_CLAUSE_LASTPRIVATE
);
14129 c3
; c3
= omp_find_clause (OMP_CLAUSE_CHAIN (c3
),
14130 OMP_CLAUSE_LASTPRIVATE
))
14131 if (OMP_CLAUSE_DECL (c3
) == decl
)
14133 warning_at (OMP_CLAUSE_LOCATION (c3
), 0,
14134 "conditional %<lastprivate%> on loop "
14135 "iterator %qD ignored", decl
);
14136 OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c3
) = 0;
14137 n
->value
&= ~GOVD_LASTPRIVATE_CONDITIONAL
;
14141 omp_add_variable (gimplify_omp_ctxp
, decl
, GOVD_PRIVATE
| GOVD_SEEN
);
14143 /* If DECL is not a gimple register, create a temporary variable to act
14144 as an iteration counter. This is valid, since DECL cannot be
14145 modified in the body of the loop. Similarly for any iteration vars
14146 in simd with collapse > 1 where the iterator vars must be
14147 lastprivate. And similarly for vars mentioned in allocate clauses. */
14148 if (orig_for_stmt
!= for_stmt
)
14150 else if (!is_gimple_reg (decl
)
14151 || (ort
== ORT_SIMD
14152 && TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt
)) > 1)
14153 || (allocate_uids
&& allocate_uids
->contains (decl
)))
14155 struct gimplify_omp_ctx
*ctx
= gimplify_omp_ctxp
;
14156 /* Make sure omp_add_variable is not called on it prematurely.
14157 We call it ourselves a few lines later. */
14158 gimplify_omp_ctxp
= NULL
;
14159 var
= create_tmp_var (TREE_TYPE (decl
), get_name (decl
));
14160 gimplify_omp_ctxp
= ctx
;
14161 TREE_OPERAND (t
, 0) = var
;
14163 gimplify_seq_add_stmt (&for_body
, gimple_build_assign (decl
, var
));
14165 if (ort
== ORT_SIMD
14166 && TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt
)) == 1)
14168 c2
= build_omp_clause (input_location
, OMP_CLAUSE_LINEAR
);
14169 OMP_CLAUSE_LINEAR_NO_COPYIN (c2
) = 1;
14170 OMP_CLAUSE_LINEAR_NO_COPYOUT (c2
) = 1;
14171 OMP_CLAUSE_DECL (c2
) = var
;
14172 OMP_CLAUSE_CHAIN (c2
) = OMP_FOR_CLAUSES (for_stmt
);
14173 OMP_FOR_CLAUSES (for_stmt
) = c2
;
14174 omp_add_variable (gimplify_omp_ctxp
, var
,
14175 GOVD_LINEAR
| GOVD_EXPLICIT
| GOVD_SEEN
);
14176 if (c
== NULL_TREE
)
14183 omp_add_variable (gimplify_omp_ctxp
, var
,
14184 GOVD_PRIVATE
| GOVD_SEEN
);
14189 gimplify_omp_ctxp
->in_for_exprs
= true;
14190 if (TREE_CODE (TREE_OPERAND (t
, 1)) == TREE_VEC
)
14192 tree lb
= TREE_OPERAND (t
, 1);
14193 tret
= gimplify_expr (&TREE_VEC_ELT (lb
, 1), &for_pre_body
, NULL
,
14194 is_gimple_val
, fb_rvalue
, false);
14195 ret
= MIN (ret
, tret
);
14196 tret
= gimplify_expr (&TREE_VEC_ELT (lb
, 2), &for_pre_body
, NULL
,
14197 is_gimple_val
, fb_rvalue
, false);
14200 tret
= gimplify_expr (&TREE_OPERAND (t
, 1), &for_pre_body
, NULL
,
14201 is_gimple_val
, fb_rvalue
, false);
14202 gimplify_omp_ctxp
->in_for_exprs
= false;
14203 ret
= MIN (ret
, tret
);
14204 if (ret
== GS_ERROR
)
14207 /* Handle OMP_FOR_COND. */
14208 t
= TREE_VEC_ELT (OMP_FOR_COND (for_stmt
), i
);
14209 gcc_assert (COMPARISON_CLASS_P (t
));
14210 gcc_assert (TREE_OPERAND (t
, 0) == decl
);
14212 gimplify_omp_ctxp
->in_for_exprs
= true;
14213 if (TREE_CODE (TREE_OPERAND (t
, 1)) == TREE_VEC
)
14215 tree ub
= TREE_OPERAND (t
, 1);
14216 tret
= gimplify_expr (&TREE_VEC_ELT (ub
, 1), &for_pre_body
, NULL
,
14217 is_gimple_val
, fb_rvalue
, false);
14218 ret
= MIN (ret
, tret
);
14219 tret
= gimplify_expr (&TREE_VEC_ELT (ub
, 2), &for_pre_body
, NULL
,
14220 is_gimple_val
, fb_rvalue
, false);
14223 tret
= gimplify_expr (&TREE_OPERAND (t
, 1), &for_pre_body
, NULL
,
14224 is_gimple_val
, fb_rvalue
, false);
14225 gimplify_omp_ctxp
->in_for_exprs
= false;
14226 ret
= MIN (ret
, tret
);
14228 /* Handle OMP_FOR_INCR. */
14229 t
= TREE_VEC_ELT (OMP_FOR_INCR (for_stmt
), i
);
14230 switch (TREE_CODE (t
))
14232 case PREINCREMENT_EXPR
:
14233 case POSTINCREMENT_EXPR
:
14235 tree decl
= TREE_OPERAND (t
, 0);
14236 /* c_omp_for_incr_canonicalize_ptr() should have been
14237 called to massage things appropriately. */
14238 gcc_assert (!POINTER_TYPE_P (TREE_TYPE (decl
)));
14240 if (orig_for_stmt
!= for_stmt
)
14242 t
= build_int_cst (TREE_TYPE (decl
), 1);
14244 OMP_CLAUSE_LINEAR_STEP (c
) = t
;
14245 t
= build2 (PLUS_EXPR
, TREE_TYPE (decl
), var
, t
);
14246 t
= build2 (MODIFY_EXPR
, TREE_TYPE (var
), var
, t
);
14247 TREE_VEC_ELT (OMP_FOR_INCR (for_stmt
), i
) = t
;
14251 case PREDECREMENT_EXPR
:
14252 case POSTDECREMENT_EXPR
:
14253 /* c_omp_for_incr_canonicalize_ptr() should have been
14254 called to massage things appropriately. */
14255 gcc_assert (!POINTER_TYPE_P (TREE_TYPE (decl
)));
14256 if (orig_for_stmt
!= for_stmt
)
14258 t
= build_int_cst (TREE_TYPE (decl
), -1);
14260 OMP_CLAUSE_LINEAR_STEP (c
) = t
;
14261 t
= build2 (PLUS_EXPR
, TREE_TYPE (decl
), var
, t
);
14262 t
= build2 (MODIFY_EXPR
, TREE_TYPE (var
), var
, t
);
14263 TREE_VEC_ELT (OMP_FOR_INCR (for_stmt
), i
) = t
;
14267 gcc_assert (TREE_OPERAND (t
, 0) == decl
);
14268 TREE_OPERAND (t
, 0) = var
;
14270 t
= TREE_OPERAND (t
, 1);
14271 switch (TREE_CODE (t
))
14274 if (TREE_OPERAND (t
, 1) == decl
)
14276 TREE_OPERAND (t
, 1) = TREE_OPERAND (t
, 0);
14277 TREE_OPERAND (t
, 0) = var
;
14283 case POINTER_PLUS_EXPR
:
14284 gcc_assert (TREE_OPERAND (t
, 0) == decl
);
14285 TREE_OPERAND (t
, 0) = var
;
14288 gcc_unreachable ();
14291 gimplify_omp_ctxp
->in_for_exprs
= true;
14292 tret
= gimplify_expr (&TREE_OPERAND (t
, 1), &for_pre_body
, NULL
,
14293 is_gimple_val
, fb_rvalue
, false);
14294 ret
= MIN (ret
, tret
);
14297 tree step
= TREE_OPERAND (t
, 1);
14298 tree stept
= TREE_TYPE (decl
);
14299 if (POINTER_TYPE_P (stept
))
14301 step
= fold_convert (stept
, step
);
14302 if (TREE_CODE (t
) == MINUS_EXPR
)
14303 step
= fold_build1 (NEGATE_EXPR
, stept
, step
);
14304 OMP_CLAUSE_LINEAR_STEP (c
) = step
;
14305 if (step
!= TREE_OPERAND (t
, 1))
14307 tret
= gimplify_expr (&OMP_CLAUSE_LINEAR_STEP (c
),
14308 &for_pre_body
, NULL
,
14309 is_gimple_val
, fb_rvalue
, false);
14310 ret
= MIN (ret
, tret
);
14313 gimplify_omp_ctxp
->in_for_exprs
= false;
14317 gcc_unreachable ();
14323 OMP_CLAUSE_LINEAR_STEP (c2
) = OMP_CLAUSE_LINEAR_STEP (c
);
14326 if ((var
!= decl
|| collapse
> 1 || tile
) && orig_for_stmt
== for_stmt
)
14328 for (c
= OMP_FOR_CLAUSES (for_stmt
); c
; c
= OMP_CLAUSE_CHAIN (c
))
14329 if (((OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
14330 && OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c
) == NULL
)
14331 || (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LINEAR
14332 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c
)
14333 && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c
) == NULL
))
14334 && OMP_CLAUSE_DECL (c
) == decl
)
14336 if (is_doacross
&& (collapse
== 1 || i
>= collapse
))
14340 t
= TREE_VEC_ELT (OMP_FOR_INCR (for_stmt
), i
);
14341 gcc_assert (TREE_CODE (t
) == MODIFY_EXPR
);
14342 gcc_assert (TREE_OPERAND (t
, 0) == var
);
14343 t
= TREE_OPERAND (t
, 1);
14344 gcc_assert (TREE_CODE (t
) == PLUS_EXPR
14345 || TREE_CODE (t
) == MINUS_EXPR
14346 || TREE_CODE (t
) == POINTER_PLUS_EXPR
);
14347 gcc_assert (TREE_OPERAND (t
, 0) == var
);
14348 t
= build2 (TREE_CODE (t
), TREE_TYPE (decl
),
14349 is_doacross
? var
: decl
,
14350 TREE_OPERAND (t
, 1));
14353 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
)
14354 seq
= &OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c
);
14356 seq
= &OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c
);
14357 push_gimplify_context ();
14358 gimplify_assign (decl
, t
, seq
);
14359 gimple
*bind
= NULL
;
14360 if (gimplify_ctxp
->temps
)
14362 bind
= gimple_build_bind (NULL_TREE
, *seq
, NULL_TREE
);
14364 gimplify_seq_add_stmt (seq
, bind
);
14366 pop_gimplify_context (bind
);
14369 if (OMP_FOR_NON_RECTANGULAR (for_stmt
) && var
!= decl
)
14370 for (int j
= i
+ 1; j
< TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt
)); j
++)
14372 t
= TREE_VEC_ELT (OMP_FOR_INIT (for_stmt
), j
);
14373 gcc_assert (TREE_CODE (t
) == MODIFY_EXPR
);
14374 if (TREE_CODE (TREE_OPERAND (t
, 1)) == TREE_VEC
14375 && TREE_VEC_ELT (TREE_OPERAND (t
, 1), 0) == decl
)
14376 TREE_VEC_ELT (TREE_OPERAND (t
, 1), 0) = var
;
14377 t
= TREE_VEC_ELT (OMP_FOR_COND (for_stmt
), j
);
14378 gcc_assert (COMPARISON_CLASS_P (t
));
14379 if (TREE_CODE (TREE_OPERAND (t
, 1)) == TREE_VEC
14380 && TREE_VEC_ELT (TREE_OPERAND (t
, 1), 0) == decl
)
14381 TREE_VEC_ELT (TREE_OPERAND (t
, 1), 0) = var
;
14385 BITMAP_FREE (has_decl_expr
);
14386 delete allocate_uids
;
14388 if (TREE_CODE (orig_for_stmt
) == OMP_TASKLOOP
14389 || (loop_p
&& orig_for_stmt
== for_stmt
))
14391 push_gimplify_context ();
14392 if (TREE_CODE (OMP_FOR_BODY (orig_for_stmt
)) != BIND_EXPR
)
14394 OMP_FOR_BODY (orig_for_stmt
)
14395 = build3 (BIND_EXPR
, void_type_node
, NULL
,
14396 OMP_FOR_BODY (orig_for_stmt
), NULL
);
14397 TREE_SIDE_EFFECTS (OMP_FOR_BODY (orig_for_stmt
)) = 1;
14401 gimple
*g
= gimplify_and_return_first (OMP_FOR_BODY (orig_for_stmt
),
14404 if (TREE_CODE (orig_for_stmt
) == OMP_TASKLOOP
14405 || (loop_p
&& orig_for_stmt
== for_stmt
))
14407 if (gimple_code (g
) == GIMPLE_BIND
)
14408 pop_gimplify_context (g
);
14410 pop_gimplify_context (NULL
);
14413 if (orig_for_stmt
!= for_stmt
)
14414 for (i
= 0; i
< TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt
)); i
++)
14416 t
= TREE_VEC_ELT (OMP_FOR_INIT (for_stmt
), i
);
14417 decl
= TREE_OPERAND (t
, 0);
14418 struct gimplify_omp_ctx
*ctx
= gimplify_omp_ctxp
;
14419 if (TREE_CODE (orig_for_stmt
) == OMP_TASKLOOP
)
14420 gimplify_omp_ctxp
= ctx
->outer_context
;
14421 var
= create_tmp_var (TREE_TYPE (decl
), get_name (decl
));
14422 gimplify_omp_ctxp
= ctx
;
14423 omp_add_variable (gimplify_omp_ctxp
, var
, GOVD_PRIVATE
| GOVD_SEEN
);
14424 TREE_OPERAND (t
, 0) = var
;
14425 t
= TREE_VEC_ELT (OMP_FOR_INCR (for_stmt
), i
);
14426 TREE_OPERAND (t
, 1) = copy_node (TREE_OPERAND (t
, 1));
14427 TREE_OPERAND (TREE_OPERAND (t
, 1), 0) = var
;
14428 if (OMP_FOR_NON_RECTANGULAR (for_stmt
))
14429 for (int j
= i
+ 1;
14430 j
< TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt
)); j
++)
14432 t
= TREE_VEC_ELT (OMP_FOR_INIT (for_stmt
), j
);
14433 gcc_assert (TREE_CODE (t
) == MODIFY_EXPR
);
14434 if (TREE_CODE (TREE_OPERAND (t
, 1)) == TREE_VEC
14435 && TREE_VEC_ELT (TREE_OPERAND (t
, 1), 0) == decl
)
14437 TREE_OPERAND (t
, 1) = copy_node (TREE_OPERAND (t
, 1));
14438 TREE_VEC_ELT (TREE_OPERAND (t
, 1), 0) = var
;
14440 t
= TREE_VEC_ELT (OMP_FOR_COND (for_stmt
), j
);
14441 gcc_assert (COMPARISON_CLASS_P (t
));
14442 if (TREE_CODE (TREE_OPERAND (t
, 1)) == TREE_VEC
14443 && TREE_VEC_ELT (TREE_OPERAND (t
, 1), 0) == decl
)
14445 TREE_OPERAND (t
, 1) = copy_node (TREE_OPERAND (t
, 1));
14446 TREE_VEC_ELT (TREE_OPERAND (t
, 1), 0) = var
;
14451 gimplify_adjust_omp_clauses (pre_p
, for_body
,
14452 &OMP_FOR_CLAUSES (orig_for_stmt
),
14453 TREE_CODE (orig_for_stmt
));
14456 switch (TREE_CODE (orig_for_stmt
))
14458 case OMP_FOR
: kind
= GF_OMP_FOR_KIND_FOR
; break;
14459 case OMP_SIMD
: kind
= GF_OMP_FOR_KIND_SIMD
; break;
14460 case OMP_DISTRIBUTE
: kind
= GF_OMP_FOR_KIND_DISTRIBUTE
; break;
14461 case OMP_TASKLOOP
: kind
= GF_OMP_FOR_KIND_TASKLOOP
; break;
14462 case OACC_LOOP
: kind
= GF_OMP_FOR_KIND_OACC_LOOP
; break;
14464 gcc_unreachable ();
14466 if (loop_p
&& kind
== GF_OMP_FOR_KIND_SIMD
)
14468 gimplify_seq_add_seq (pre_p
, for_pre_body
);
14469 for_pre_body
= NULL
;
14471 gfor
= gimple_build_omp_for (for_body
, kind
, OMP_FOR_CLAUSES (orig_for_stmt
),
14472 TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt
)),
14474 if (orig_for_stmt
!= for_stmt
)
14475 gimple_omp_for_set_combined_p (gfor
, true);
14476 if (gimplify_omp_ctxp
14477 && (gimplify_omp_ctxp
->combined_loop
14478 || (gimplify_omp_ctxp
->region_type
== ORT_COMBINED_PARALLEL
14479 && gimplify_omp_ctxp
->outer_context
14480 && gimplify_omp_ctxp
->outer_context
->combined_loop
)))
14482 gimple_omp_for_set_combined_into_p (gfor
, true);
14483 if (gimplify_omp_ctxp
->combined_loop
)
14484 gcc_assert (TREE_CODE (orig_for_stmt
) == OMP_SIMD
);
14486 gcc_assert (TREE_CODE (orig_for_stmt
) == OMP_FOR
);
14489 for (i
= 0; i
< TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt
)); i
++)
14491 t
= TREE_VEC_ELT (OMP_FOR_INIT (for_stmt
), i
);
14492 gimple_omp_for_set_index (gfor
, i
, TREE_OPERAND (t
, 0));
14493 gimple_omp_for_set_initial (gfor
, i
, TREE_OPERAND (t
, 1));
14494 t
= TREE_VEC_ELT (OMP_FOR_COND (for_stmt
), i
);
14495 gimple_omp_for_set_cond (gfor
, i
, TREE_CODE (t
));
14496 gimple_omp_for_set_final (gfor
, i
, TREE_OPERAND (t
, 1));
14497 t
= TREE_VEC_ELT (OMP_FOR_INCR (for_stmt
), i
);
14498 gimple_omp_for_set_incr (gfor
, i
, TREE_OPERAND (t
, 1));
14501 /* OMP_TASKLOOP is gimplified as two GIMPLE_OMP_FOR taskloop
14502 constructs with GIMPLE_OMP_TASK sandwiched in between them.
14503 The outer taskloop stands for computing the number of iterations,
14504 counts for collapsed loops and holding taskloop specific clauses.
14505 The task construct stands for the effect of data sharing on the
14506 explicit task it creates and the inner taskloop stands for expansion
14507 of the static loop inside of the explicit task construct. */
14508 if (TREE_CODE (orig_for_stmt
) == OMP_TASKLOOP
)
14510 tree
*gfor_clauses_ptr
= gimple_omp_for_clauses_ptr (gfor
);
14511 tree task_clauses
= NULL_TREE
;
14512 tree c
= *gfor_clauses_ptr
;
14513 tree
*gtask_clauses_ptr
= &task_clauses
;
14514 tree outer_for_clauses
= NULL_TREE
;
14515 tree
*gforo_clauses_ptr
= &outer_for_clauses
;
14516 bitmap lastprivate_uids
= NULL
;
14517 if (omp_find_clause (c
, OMP_CLAUSE_ALLOCATE
))
14519 c
= omp_find_clause (c
, OMP_CLAUSE_LASTPRIVATE
);
14522 lastprivate_uids
= BITMAP_ALLOC (NULL
);
14523 for (; c
; c
= omp_find_clause (OMP_CLAUSE_CHAIN (c
),
14524 OMP_CLAUSE_LASTPRIVATE
))
14525 bitmap_set_bit (lastprivate_uids
,
14526 DECL_UID (OMP_CLAUSE_DECL (c
)));
14528 c
= *gfor_clauses_ptr
;
14530 for (; c
; c
= OMP_CLAUSE_CHAIN (c
))
14531 switch (OMP_CLAUSE_CODE (c
))
14533 /* These clauses are allowed on task, move them there. */
14534 case OMP_CLAUSE_SHARED
:
14535 case OMP_CLAUSE_FIRSTPRIVATE
:
14536 case OMP_CLAUSE_DEFAULT
:
14537 case OMP_CLAUSE_IF
:
14538 case OMP_CLAUSE_UNTIED
:
14539 case OMP_CLAUSE_FINAL
:
14540 case OMP_CLAUSE_MERGEABLE
:
14541 case OMP_CLAUSE_PRIORITY
:
14542 case OMP_CLAUSE_REDUCTION
:
14543 case OMP_CLAUSE_IN_REDUCTION
:
14544 *gtask_clauses_ptr
= c
;
14545 gtask_clauses_ptr
= &OMP_CLAUSE_CHAIN (c
);
14547 case OMP_CLAUSE_PRIVATE
:
14548 if (OMP_CLAUSE_PRIVATE_TASKLOOP_IV (c
))
14550 /* We want private on outer for and firstprivate
14553 = build_omp_clause (OMP_CLAUSE_LOCATION (c
),
14554 OMP_CLAUSE_FIRSTPRIVATE
);
14555 OMP_CLAUSE_DECL (*gtask_clauses_ptr
) = OMP_CLAUSE_DECL (c
);
14556 lang_hooks
.decls
.omp_finish_clause (*gtask_clauses_ptr
, NULL
,
14558 gtask_clauses_ptr
= &OMP_CLAUSE_CHAIN (*gtask_clauses_ptr
);
14559 *gforo_clauses_ptr
= c
;
14560 gforo_clauses_ptr
= &OMP_CLAUSE_CHAIN (c
);
14564 *gtask_clauses_ptr
= c
;
14565 gtask_clauses_ptr
= &OMP_CLAUSE_CHAIN (c
);
14568 /* These clauses go into outer taskloop clauses. */
14569 case OMP_CLAUSE_GRAINSIZE
:
14570 case OMP_CLAUSE_NUM_TASKS
:
14571 case OMP_CLAUSE_NOGROUP
:
14572 *gforo_clauses_ptr
= c
;
14573 gforo_clauses_ptr
= &OMP_CLAUSE_CHAIN (c
);
14575 /* Collapse clause we duplicate on both taskloops. */
14576 case OMP_CLAUSE_COLLAPSE
:
14577 *gfor_clauses_ptr
= c
;
14578 gfor_clauses_ptr
= &OMP_CLAUSE_CHAIN (c
);
14579 *gforo_clauses_ptr
= copy_node (c
);
14580 gforo_clauses_ptr
= &OMP_CLAUSE_CHAIN (*gforo_clauses_ptr
);
14582 /* For lastprivate, keep the clause on inner taskloop, and add
14583 a shared clause on task. If the same decl is also firstprivate,
14584 add also firstprivate clause on the inner taskloop. */
14585 case OMP_CLAUSE_LASTPRIVATE
:
14586 if (OMP_CLAUSE_LASTPRIVATE_LOOP_IV (c
))
14588 /* For taskloop C++ lastprivate IVs, we want:
14589 1) private on outer taskloop
14590 2) firstprivate and shared on task
14591 3) lastprivate on inner taskloop */
14593 = build_omp_clause (OMP_CLAUSE_LOCATION (c
),
14594 OMP_CLAUSE_FIRSTPRIVATE
);
14595 OMP_CLAUSE_DECL (*gtask_clauses_ptr
) = OMP_CLAUSE_DECL (c
);
14596 lang_hooks
.decls
.omp_finish_clause (*gtask_clauses_ptr
, NULL
,
14598 gtask_clauses_ptr
= &OMP_CLAUSE_CHAIN (*gtask_clauses_ptr
);
14599 OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c
) = 1;
14600 *gforo_clauses_ptr
= build_omp_clause (OMP_CLAUSE_LOCATION (c
),
14601 OMP_CLAUSE_PRIVATE
);
14602 OMP_CLAUSE_DECL (*gforo_clauses_ptr
) = OMP_CLAUSE_DECL (c
);
14603 OMP_CLAUSE_PRIVATE_TASKLOOP_IV (*gforo_clauses_ptr
) = 1;
14604 TREE_TYPE (*gforo_clauses_ptr
) = TREE_TYPE (c
);
14605 gforo_clauses_ptr
= &OMP_CLAUSE_CHAIN (*gforo_clauses_ptr
);
14607 *gfor_clauses_ptr
= c
;
14608 gfor_clauses_ptr
= &OMP_CLAUSE_CHAIN (c
);
14610 = build_omp_clause (OMP_CLAUSE_LOCATION (c
), OMP_CLAUSE_SHARED
);
14611 OMP_CLAUSE_DECL (*gtask_clauses_ptr
) = OMP_CLAUSE_DECL (c
);
14612 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c
))
14613 OMP_CLAUSE_SHARED_FIRSTPRIVATE (*gtask_clauses_ptr
) = 1;
14615 = &OMP_CLAUSE_CHAIN (*gtask_clauses_ptr
);
14617 /* Allocate clause we duplicate on task and inner taskloop
14618 if the decl is lastprivate, otherwise just put on task. */
14619 case OMP_CLAUSE_ALLOCATE
:
14620 if (OMP_CLAUSE_ALLOCATE_ALLOCATOR (c
)
14621 && DECL_P (OMP_CLAUSE_ALLOCATE_ALLOCATOR (c
)))
14623 /* Additionally, put firstprivate clause on task
14624 for the allocator if it is not constant. */
14626 = build_omp_clause (OMP_CLAUSE_LOCATION (c
),
14627 OMP_CLAUSE_FIRSTPRIVATE
);
14628 OMP_CLAUSE_DECL (*gtask_clauses_ptr
)
14629 = OMP_CLAUSE_ALLOCATE_ALLOCATOR (c
);
14630 gtask_clauses_ptr
= &OMP_CLAUSE_CHAIN (*gtask_clauses_ptr
);
14632 if (lastprivate_uids
14633 && bitmap_bit_p (lastprivate_uids
,
14634 DECL_UID (OMP_CLAUSE_DECL (c
))))
14636 *gfor_clauses_ptr
= c
;
14637 gfor_clauses_ptr
= &OMP_CLAUSE_CHAIN (c
);
14638 *gtask_clauses_ptr
= copy_node (c
);
14639 gtask_clauses_ptr
= &OMP_CLAUSE_CHAIN (*gtask_clauses_ptr
);
14643 *gtask_clauses_ptr
= c
;
14644 gtask_clauses_ptr
= &OMP_CLAUSE_CHAIN (c
);
14648 gcc_unreachable ();
14650 *gfor_clauses_ptr
= NULL_TREE
;
14651 *gtask_clauses_ptr
= NULL_TREE
;
14652 *gforo_clauses_ptr
= NULL_TREE
;
14653 BITMAP_FREE (lastprivate_uids
);
14654 gimple_set_location (gfor
, input_location
);
14655 g
= gimple_build_bind (NULL_TREE
, gfor
, NULL_TREE
);
14656 g
= gimple_build_omp_task (g
, task_clauses
, NULL_TREE
, NULL_TREE
,
14657 NULL_TREE
, NULL_TREE
, NULL_TREE
);
14658 gimple_set_location (g
, input_location
);
14659 gimple_omp_task_set_taskloop_p (g
, true);
14660 g
= gimple_build_bind (NULL_TREE
, g
, NULL_TREE
);
14662 = gimple_build_omp_for (g
, GF_OMP_FOR_KIND_TASKLOOP
, outer_for_clauses
,
14663 gimple_omp_for_collapse (gfor
),
14664 gimple_omp_for_pre_body (gfor
));
14665 gimple_omp_for_set_pre_body (gfor
, NULL
);
14666 gimple_omp_for_set_combined_p (gforo
, true);
14667 gimple_omp_for_set_combined_into_p (gfor
, true);
14668 for (i
= 0; i
< (int) gimple_omp_for_collapse (gfor
); i
++)
14670 tree type
= TREE_TYPE (gimple_omp_for_index (gfor
, i
));
14671 tree v
= create_tmp_var (type
);
14672 gimple_omp_for_set_index (gforo
, i
, v
);
14673 t
= unshare_expr (gimple_omp_for_initial (gfor
, i
));
14674 gimple_omp_for_set_initial (gforo
, i
, t
);
14675 gimple_omp_for_set_cond (gforo
, i
,
14676 gimple_omp_for_cond (gfor
, i
));
14677 t
= unshare_expr (gimple_omp_for_final (gfor
, i
));
14678 gimple_omp_for_set_final (gforo
, i
, t
);
14679 t
= unshare_expr (gimple_omp_for_incr (gfor
, i
));
14680 gcc_assert (TREE_OPERAND (t
, 0) == gimple_omp_for_index (gfor
, i
));
14681 TREE_OPERAND (t
, 0) = v
;
14682 gimple_omp_for_set_incr (gforo
, i
, t
);
14683 t
= build_omp_clause (input_location
, OMP_CLAUSE_PRIVATE
);
14684 OMP_CLAUSE_DECL (t
) = v
;
14685 OMP_CLAUSE_CHAIN (t
) = gimple_omp_for_clauses (gforo
);
14686 gimple_omp_for_set_clauses (gforo
, t
);
14687 if (OMP_FOR_NON_RECTANGULAR (for_stmt
))
14689 tree
*p1
= NULL
, *p2
= NULL
;
14690 t
= gimple_omp_for_initial (gforo
, i
);
14691 if (TREE_CODE (t
) == TREE_VEC
)
14692 p1
= &TREE_VEC_ELT (t
, 0);
14693 t
= gimple_omp_for_final (gforo
, i
);
14694 if (TREE_CODE (t
) == TREE_VEC
)
14697 p2
= &TREE_VEC_ELT (t
, 0);
14699 p1
= &TREE_VEC_ELT (t
, 0);
14704 for (j
= 0; j
< i
; j
++)
14705 if (*p1
== gimple_omp_for_index (gfor
, j
))
14707 *p1
= gimple_omp_for_index (gforo
, j
);
14712 gcc_assert (j
< i
);
14716 gimplify_seq_add_stmt (pre_p
, gforo
);
14719 gimplify_seq_add_stmt (pre_p
, gfor
);
14721 if (TREE_CODE (orig_for_stmt
) == OMP_FOR
)
14723 struct gimplify_omp_ctx
*ctx
= gimplify_omp_ctxp
;
14724 unsigned lastprivate_conditional
= 0;
14726 && (ctx
->region_type
== ORT_TARGET_DATA
14727 || ctx
->region_type
== ORT_TASKGROUP
))
14728 ctx
= ctx
->outer_context
;
14729 if (ctx
&& (ctx
->region_type
& ORT_PARALLEL
) != 0)
14730 for (tree c
= gimple_omp_for_clauses (gfor
);
14731 c
; c
= OMP_CLAUSE_CHAIN (c
))
14732 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
14733 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c
))
14734 ++lastprivate_conditional
;
14735 if (lastprivate_conditional
)
14737 struct omp_for_data fd
;
14738 omp_extract_for_data (gfor
, &fd
, NULL
);
14739 tree type
= build_array_type_nelts (unsigned_type_for (fd
.iter_type
),
14740 lastprivate_conditional
);
14741 tree var
= create_tmp_var_raw (type
);
14742 tree c
= build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE__CONDTEMP_
);
14743 OMP_CLAUSE_DECL (c
) = var
;
14744 OMP_CLAUSE_CHAIN (c
) = gimple_omp_for_clauses (gfor
);
14745 gimple_omp_for_set_clauses (gfor
, c
);
14746 omp_add_variable (ctx
, var
, GOVD_CONDTEMP
| GOVD_SEEN
);
14749 else if (TREE_CODE (orig_for_stmt
) == OMP_SIMD
)
14751 unsigned lastprivate_conditional
= 0;
14752 for (tree c
= gimple_omp_for_clauses (gfor
); c
; c
= OMP_CLAUSE_CHAIN (c
))
14753 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
14754 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c
))
14755 ++lastprivate_conditional
;
14756 if (lastprivate_conditional
)
14758 struct omp_for_data fd
;
14759 omp_extract_for_data (gfor
, &fd
, NULL
);
14760 tree type
= unsigned_type_for (fd
.iter_type
);
14761 while (lastprivate_conditional
--)
14763 tree c
= build_omp_clause (UNKNOWN_LOCATION
,
14764 OMP_CLAUSE__CONDTEMP_
);
14765 OMP_CLAUSE_DECL (c
) = create_tmp_var (type
);
14766 OMP_CLAUSE_CHAIN (c
) = gimple_omp_for_clauses (gfor
);
14767 gimple_omp_for_set_clauses (gfor
, c
);
14772 if (ret
!= GS_ALL_DONE
)
14774 *expr_p
= NULL_TREE
;
14775 return GS_ALL_DONE
;
14778 /* Helper for gimplify_omp_loop, called through walk_tree. */
14781 note_no_context_vars (tree
*tp
, int *, void *data
)
14784 && DECL_CONTEXT (*tp
) == NULL_TREE
14785 && !is_global_var (*tp
))
14787 vec
<tree
> *d
= (vec
<tree
> *) data
;
14788 d
->safe_push (*tp
);
14789 DECL_CONTEXT (*tp
) = current_function_decl
;
14794 /* Gimplify the gross structure of an OMP_LOOP statement. */
14796 static enum gimplify_status
14797 gimplify_omp_loop (tree
*expr_p
, gimple_seq
*pre_p
)
14799 tree for_stmt
= *expr_p
;
14800 tree clauses
= OMP_FOR_CLAUSES (for_stmt
);
14801 struct gimplify_omp_ctx
*octx
= gimplify_omp_ctxp
;
14802 enum omp_clause_bind_kind kind
= OMP_CLAUSE_BIND_THREAD
;
14805 /* If order is not present, the behavior is as if order(concurrent)
14807 tree order
= omp_find_clause (clauses
, OMP_CLAUSE_ORDER
);
14808 if (order
== NULL_TREE
)
14810 order
= build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE_ORDER
);
14811 OMP_CLAUSE_CHAIN (order
) = clauses
;
14812 OMP_FOR_CLAUSES (for_stmt
) = clauses
= order
;
14815 tree bind
= omp_find_clause (clauses
, OMP_CLAUSE_BIND
);
14816 if (bind
== NULL_TREE
)
14818 if (!flag_openmp
) /* flag_openmp_simd */
14820 else if (octx
&& (octx
->region_type
& ORT_TEAMS
) != 0)
14821 kind
= OMP_CLAUSE_BIND_TEAMS
;
14822 else if (octx
&& (octx
->region_type
& ORT_PARALLEL
) != 0)
14823 kind
= OMP_CLAUSE_BIND_PARALLEL
;
14826 for (; octx
; octx
= octx
->outer_context
)
14828 if ((octx
->region_type
& ORT_ACC
) != 0
14829 || octx
->region_type
== ORT_NONE
14830 || octx
->region_type
== ORT_IMPLICIT_TARGET
)
14834 if (octx
== NULL
&& !in_omp_construct
)
14835 error_at (EXPR_LOCATION (for_stmt
),
14836 "%<bind%> clause not specified on a %<loop%> "
14837 "construct not nested inside another OpenMP construct");
14839 bind
= build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE_BIND
);
14840 OMP_CLAUSE_CHAIN (bind
) = clauses
;
14841 OMP_CLAUSE_BIND_KIND (bind
) = kind
;
14842 OMP_FOR_CLAUSES (for_stmt
) = bind
;
14845 switch (OMP_CLAUSE_BIND_KIND (bind
))
14847 case OMP_CLAUSE_BIND_THREAD
:
14849 case OMP_CLAUSE_BIND_PARALLEL
:
14850 if (!flag_openmp
) /* flag_openmp_simd */
14852 OMP_CLAUSE_BIND_KIND (bind
) = OMP_CLAUSE_BIND_THREAD
;
14855 for (; octx
; octx
= octx
->outer_context
)
14856 if (octx
->region_type
== ORT_SIMD
14857 && omp_find_clause (octx
->clauses
, OMP_CLAUSE_BIND
) == NULL_TREE
)
14859 error_at (EXPR_LOCATION (for_stmt
),
14860 "%<bind(parallel)%> on a %<loop%> construct nested "
14861 "inside %<simd%> construct");
14862 OMP_CLAUSE_BIND_KIND (bind
) = OMP_CLAUSE_BIND_THREAD
;
14865 kind
= OMP_CLAUSE_BIND_PARALLEL
;
14867 case OMP_CLAUSE_BIND_TEAMS
:
14868 if (!flag_openmp
) /* flag_openmp_simd */
14870 OMP_CLAUSE_BIND_KIND (bind
) = OMP_CLAUSE_BIND_THREAD
;
14874 && octx
->region_type
!= ORT_IMPLICIT_TARGET
14875 && octx
->region_type
!= ORT_NONE
14876 && (octx
->region_type
& ORT_TEAMS
) == 0)
14877 || in_omp_construct
)
14879 error_at (EXPR_LOCATION (for_stmt
),
14880 "%<bind(teams)%> on a %<loop%> region not strictly "
14881 "nested inside of a %<teams%> region");
14882 OMP_CLAUSE_BIND_KIND (bind
) = OMP_CLAUSE_BIND_THREAD
;
14885 kind
= OMP_CLAUSE_BIND_TEAMS
;
14888 gcc_unreachable ();
14891 for (tree
*pc
= &OMP_FOR_CLAUSES (for_stmt
); *pc
; )
14892 switch (OMP_CLAUSE_CODE (*pc
))
14894 case OMP_CLAUSE_REDUCTION
:
14895 if (OMP_CLAUSE_REDUCTION_INSCAN (*pc
))
14897 error_at (OMP_CLAUSE_LOCATION (*pc
),
14898 "%<inscan%> %<reduction%> clause on "
14899 "%qs construct", "loop");
14900 OMP_CLAUSE_REDUCTION_INSCAN (*pc
) = 0;
14902 if (OMP_CLAUSE_REDUCTION_TASK (*pc
))
14904 error_at (OMP_CLAUSE_LOCATION (*pc
),
14905 "invalid %<task%> reduction modifier on construct "
14906 "other than %<parallel%>, %qs or %<sections%>",
14907 lang_GNU_Fortran () ? "do" : "for");
14908 OMP_CLAUSE_REDUCTION_TASK (*pc
) = 0;
14910 pc
= &OMP_CLAUSE_CHAIN (*pc
);
14912 case OMP_CLAUSE_LASTPRIVATE
:
14913 for (i
= 0; i
< TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt
)); i
++)
14915 tree t
= TREE_VEC_ELT (OMP_FOR_INIT (for_stmt
), i
);
14916 gcc_assert (TREE_CODE (t
) == MODIFY_EXPR
);
14917 if (OMP_CLAUSE_DECL (*pc
) == TREE_OPERAND (t
, 0))
14919 if (OMP_FOR_ORIG_DECLS (for_stmt
)
14920 && TREE_CODE (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (for_stmt
),
14922 && TREE_PURPOSE (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (for_stmt
),
14925 tree orig
= TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (for_stmt
), i
);
14926 if (OMP_CLAUSE_DECL (*pc
) == TREE_PURPOSE (orig
))
14930 if (i
== TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt
)))
14932 error_at (OMP_CLAUSE_LOCATION (*pc
),
14933 "%<lastprivate%> clause on a %<loop%> construct refers "
14934 "to a variable %qD which is not the loop iterator",
14935 OMP_CLAUSE_DECL (*pc
));
14936 *pc
= OMP_CLAUSE_CHAIN (*pc
);
14939 pc
= &OMP_CLAUSE_CHAIN (*pc
);
14942 pc
= &OMP_CLAUSE_CHAIN (*pc
);
14946 TREE_SET_CODE (for_stmt
, OMP_SIMD
);
14951 case OMP_CLAUSE_BIND_THREAD
: last
= 0; break;
14952 case OMP_CLAUSE_BIND_PARALLEL
: last
= 1; break;
14953 case OMP_CLAUSE_BIND_TEAMS
: last
= 2; break;
14955 for (int pass
= 1; pass
<= last
; pass
++)
14959 tree bind
= build3 (BIND_EXPR
, void_type_node
, NULL
, NULL
,
14960 make_node (BLOCK
));
14961 append_to_statement_list (*expr_p
, &BIND_EXPR_BODY (bind
));
14962 *expr_p
= make_node (OMP_PARALLEL
);
14963 TREE_TYPE (*expr_p
) = void_type_node
;
14964 OMP_PARALLEL_BODY (*expr_p
) = bind
;
14965 OMP_PARALLEL_COMBINED (*expr_p
) = 1;
14966 SET_EXPR_LOCATION (*expr_p
, EXPR_LOCATION (for_stmt
));
14967 tree
*pc
= &OMP_PARALLEL_CLAUSES (*expr_p
);
14968 for (i
= 0; i
< TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt
)); i
++)
14969 if (OMP_FOR_ORIG_DECLS (for_stmt
)
14970 && (TREE_CODE (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (for_stmt
), i
))
14973 tree elt
= TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (for_stmt
), i
);
14974 if (TREE_PURPOSE (elt
) && TREE_VALUE (elt
))
14976 *pc
= build_omp_clause (UNKNOWN_LOCATION
,
14977 OMP_CLAUSE_FIRSTPRIVATE
);
14978 OMP_CLAUSE_DECL (*pc
) = TREE_VALUE (elt
);
14979 pc
= &OMP_CLAUSE_CHAIN (*pc
);
14983 tree t
= make_node (pass
== 2 ? OMP_DISTRIBUTE
: OMP_FOR
);
14984 tree
*pc
= &OMP_FOR_CLAUSES (t
);
14985 TREE_TYPE (t
) = void_type_node
;
14986 OMP_FOR_BODY (t
) = *expr_p
;
14987 SET_EXPR_LOCATION (t
, EXPR_LOCATION (for_stmt
));
14988 for (tree c
= OMP_FOR_CLAUSES (for_stmt
); c
; c
= OMP_CLAUSE_CHAIN (c
))
14989 switch (OMP_CLAUSE_CODE (c
))
14991 case OMP_CLAUSE_BIND
:
14992 case OMP_CLAUSE_ORDER
:
14993 case OMP_CLAUSE_COLLAPSE
:
14994 *pc
= copy_node (c
);
14995 pc
= &OMP_CLAUSE_CHAIN (*pc
);
14997 case OMP_CLAUSE_PRIVATE
:
14998 case OMP_CLAUSE_FIRSTPRIVATE
:
14999 /* Only needed on innermost. */
15001 case OMP_CLAUSE_LASTPRIVATE
:
15002 if (OMP_CLAUSE_LASTPRIVATE_LOOP_IV (c
) && pass
!= last
)
15004 *pc
= build_omp_clause (OMP_CLAUSE_LOCATION (c
),
15005 OMP_CLAUSE_FIRSTPRIVATE
);
15006 OMP_CLAUSE_DECL (*pc
) = OMP_CLAUSE_DECL (c
);
15007 lang_hooks
.decls
.omp_finish_clause (*pc
, NULL
, false);
15008 pc
= &OMP_CLAUSE_CHAIN (*pc
);
15010 *pc
= copy_node (c
);
15011 OMP_CLAUSE_LASTPRIVATE_STMT (*pc
) = NULL_TREE
;
15012 TREE_TYPE (*pc
) = unshare_expr (TREE_TYPE (c
));
15013 if (OMP_CLAUSE_LASTPRIVATE_LOOP_IV (c
))
15016 OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (*pc
) = 1;
15018 lang_hooks
.decls
.omp_finish_clause (*pc
, NULL
, false);
15019 OMP_CLAUSE_LASTPRIVATE_LOOP_IV (*pc
) = 0;
15021 pc
= &OMP_CLAUSE_CHAIN (*pc
);
15023 case OMP_CLAUSE_REDUCTION
:
15024 *pc
= copy_node (c
);
15025 OMP_CLAUSE_DECL (*pc
) = unshare_expr (OMP_CLAUSE_DECL (c
));
15026 TREE_TYPE (*pc
) = unshare_expr (TREE_TYPE (c
));
15027 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (*pc
))
15029 auto_vec
<tree
> no_context_vars
;
15030 int walk_subtrees
= 0;
15031 note_no_context_vars (&OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
),
15032 &walk_subtrees
, &no_context_vars
);
15033 if (tree p
= OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c
))
15034 note_no_context_vars (&p
, &walk_subtrees
, &no_context_vars
);
15035 walk_tree_without_duplicates (&OMP_CLAUSE_REDUCTION_INIT (c
),
15036 note_no_context_vars
,
15038 walk_tree_without_duplicates (&OMP_CLAUSE_REDUCTION_MERGE (c
),
15039 note_no_context_vars
,
15042 OMP_CLAUSE_REDUCTION_PLACEHOLDER (*pc
)
15043 = copy_node (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
));
15044 if (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (*pc
))
15045 OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (*pc
)
15046 = copy_node (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c
));
15048 hash_map
<tree
, tree
> decl_map
;
15049 decl_map
.put (OMP_CLAUSE_DECL (c
), OMP_CLAUSE_DECL (c
));
15050 decl_map
.put (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
),
15051 OMP_CLAUSE_REDUCTION_PLACEHOLDER (*pc
));
15052 if (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (*pc
))
15053 decl_map
.put (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c
),
15054 OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (*pc
));
15057 memset (&id
, 0, sizeof (id
));
15058 id
.src_fn
= current_function_decl
;
15059 id
.dst_fn
= current_function_decl
;
15060 id
.src_cfun
= cfun
;
15061 id
.decl_map
= &decl_map
;
15062 id
.copy_decl
= copy_decl_no_change
;
15063 id
.transform_call_graph_edges
= CB_CGE_DUPLICATE
;
15064 id
.transform_new_cfg
= true;
15065 id
.transform_return_to_modify
= false;
15067 walk_tree (&OMP_CLAUSE_REDUCTION_INIT (*pc
), copy_tree_body_r
,
15069 walk_tree (&OMP_CLAUSE_REDUCTION_MERGE (*pc
), copy_tree_body_r
,
15072 for (tree d
: no_context_vars
)
15074 DECL_CONTEXT (d
) = NULL_TREE
;
15075 DECL_CONTEXT (*decl_map
.get (d
)) = NULL_TREE
;
15080 OMP_CLAUSE_REDUCTION_INIT (*pc
)
15081 = unshare_expr (OMP_CLAUSE_REDUCTION_INIT (c
));
15082 OMP_CLAUSE_REDUCTION_MERGE (*pc
)
15083 = unshare_expr (OMP_CLAUSE_REDUCTION_MERGE (c
));
15085 pc
= &OMP_CLAUSE_CHAIN (*pc
);
15088 gcc_unreachable ();
15093 return gimplify_expr (expr_p
, pre_p
, NULL
, is_gimple_stmt
, fb_none
);
15097 /* Helper function of optimize_target_teams, find OMP_TEAMS inside
15098 of OMP_TARGET's body. */
15101 find_omp_teams (tree
*tp
, int *walk_subtrees
, void *)
15103 *walk_subtrees
= 0;
15104 switch (TREE_CODE (*tp
))
15109 case STATEMENT_LIST
:
15110 *walk_subtrees
= 1;
15118 /* Helper function of optimize_target_teams, determine if the expression
15119 can be computed safely before the target construct on the host. */
15122 computable_teams_clause (tree
*tp
, int *walk_subtrees
, void *)
15128 *walk_subtrees
= 0;
15131 switch (TREE_CODE (*tp
))
15136 *walk_subtrees
= 0;
15137 if (error_operand_p (*tp
)
15138 || !INTEGRAL_TYPE_P (TREE_TYPE (*tp
))
15139 || DECL_HAS_VALUE_EXPR_P (*tp
)
15140 || DECL_THREAD_LOCAL_P (*tp
)
15141 || TREE_SIDE_EFFECTS (*tp
)
15142 || TREE_THIS_VOLATILE (*tp
))
15144 if (is_global_var (*tp
)
15145 && (lookup_attribute ("omp declare target", DECL_ATTRIBUTES (*tp
))
15146 || lookup_attribute ("omp declare target link",
15147 DECL_ATTRIBUTES (*tp
))))
15150 && !DECL_SEEN_IN_BIND_EXPR_P (*tp
)
15151 && !is_global_var (*tp
)
15152 && decl_function_context (*tp
) == current_function_decl
)
15154 n
= splay_tree_lookup (gimplify_omp_ctxp
->variables
,
15155 (splay_tree_key
) *tp
);
15158 if (gimplify_omp_ctxp
->defaultmap
[GDMK_SCALAR
] & GOVD_FIRSTPRIVATE
)
15162 else if (n
->value
& GOVD_LOCAL
)
15164 else if (n
->value
& GOVD_FIRSTPRIVATE
)
15166 else if ((n
->value
& (GOVD_MAP
| GOVD_MAP_ALWAYS_TO
))
15167 == (GOVD_MAP
| GOVD_MAP_ALWAYS_TO
))
15171 if (!INTEGRAL_TYPE_P (TREE_TYPE (*tp
)))
15175 if (TARGET_EXPR_INITIAL (*tp
)
15176 || TREE_CODE (TARGET_EXPR_SLOT (*tp
)) != VAR_DECL
)
15178 return computable_teams_clause (&TARGET_EXPR_SLOT (*tp
),
15179 walk_subtrees
, NULL
);
15180 /* Allow some reasonable subset of integral arithmetics. */
15184 case TRUNC_DIV_EXPR
:
15185 case CEIL_DIV_EXPR
:
15186 case FLOOR_DIV_EXPR
:
15187 case ROUND_DIV_EXPR
:
15188 case TRUNC_MOD_EXPR
:
15189 case CEIL_MOD_EXPR
:
15190 case FLOOR_MOD_EXPR
:
15191 case ROUND_MOD_EXPR
:
15193 case EXACT_DIV_EXPR
:
15204 case NON_LVALUE_EXPR
:
15206 if (!INTEGRAL_TYPE_P (TREE_TYPE (*tp
)))
15209 /* And disallow anything else, except for comparisons. */
15211 if (COMPARISON_CLASS_P (*tp
))
15217 /* Try to determine if the num_teams and/or thread_limit expressions
15218 can have their values determined already before entering the
15220 INTEGER_CSTs trivially are,
15221 integral decls that are firstprivate (explicitly or implicitly)
15222 or explicitly map(always, to:) or map(always, tofrom:) on the target
15223 region too, and expressions involving simple arithmetics on those
15224 too, function calls are not ok, dereferencing something neither etc.
15225 Add NUM_TEAMS and THREAD_LIMIT clauses to the OMP_CLAUSES of
15226 EXPR based on what we find:
15227 0 stands for clause not specified at all, use implementation default
15228 -1 stands for value that can't be determined easily before entering
15229 the target construct.
15230 If teams construct is not present at all, use 1 for num_teams
15231 and 0 for thread_limit (only one team is involved, and the thread
15232 limit is implementation defined. */
15235 optimize_target_teams (tree target
, gimple_seq
*pre_p
)
15237 tree body
= OMP_BODY (target
);
15238 tree teams
= walk_tree (&body
, find_omp_teams
, NULL
, NULL
);
15239 tree num_teams_lower
= NULL_TREE
;
15240 tree num_teams_upper
= integer_zero_node
;
15241 tree thread_limit
= integer_zero_node
;
15242 location_t num_teams_loc
= EXPR_LOCATION (target
);
15243 location_t thread_limit_loc
= EXPR_LOCATION (target
);
15245 struct gimplify_omp_ctx
*target_ctx
= gimplify_omp_ctxp
;
15247 if (teams
== NULL_TREE
)
15248 num_teams_upper
= integer_one_node
;
15250 for (c
= OMP_TEAMS_CLAUSES (teams
); c
; c
= OMP_CLAUSE_CHAIN (c
))
15252 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_NUM_TEAMS
)
15254 p
= &num_teams_upper
;
15255 num_teams_loc
= OMP_CLAUSE_LOCATION (c
);
15256 if (OMP_CLAUSE_NUM_TEAMS_LOWER_EXPR (c
))
15258 expr
= OMP_CLAUSE_NUM_TEAMS_LOWER_EXPR (c
);
15259 if (TREE_CODE (expr
) == INTEGER_CST
)
15260 num_teams_lower
= expr
;
15261 else if (walk_tree (&expr
, computable_teams_clause
,
15263 num_teams_lower
= integer_minus_one_node
;
15266 num_teams_lower
= expr
;
15267 gimplify_omp_ctxp
= gimplify_omp_ctxp
->outer_context
;
15268 if (gimplify_expr (&num_teams_lower
, pre_p
, NULL
,
15269 is_gimple_val
, fb_rvalue
, false)
15272 gimplify_omp_ctxp
= target_ctx
;
15273 num_teams_lower
= integer_minus_one_node
;
15277 gimplify_omp_ctxp
= target_ctx
;
15278 if (!DECL_P (expr
) && TREE_CODE (expr
) != TARGET_EXPR
)
15279 OMP_CLAUSE_NUM_TEAMS_LOWER_EXPR (c
)
15285 else if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_THREAD_LIMIT
)
15288 thread_limit_loc
= OMP_CLAUSE_LOCATION (c
);
15292 expr
= OMP_CLAUSE_OPERAND (c
, 0);
15293 if (TREE_CODE (expr
) == INTEGER_CST
)
15298 if (walk_tree (&expr
, computable_teams_clause
, NULL
, NULL
))
15300 *p
= integer_minus_one_node
;
15304 gimplify_omp_ctxp
= gimplify_omp_ctxp
->outer_context
;
15305 if (gimplify_expr (p
, pre_p
, NULL
, is_gimple_val
, fb_rvalue
, false)
15308 gimplify_omp_ctxp
= target_ctx
;
15309 *p
= integer_minus_one_node
;
15312 gimplify_omp_ctxp
= target_ctx
;
15313 if (!DECL_P (expr
) && TREE_CODE (expr
) != TARGET_EXPR
)
15314 OMP_CLAUSE_OPERAND (c
, 0) = *p
;
15316 if (!omp_find_clause (OMP_TARGET_CLAUSES (target
), OMP_CLAUSE_THREAD_LIMIT
))
15318 c
= build_omp_clause (thread_limit_loc
, OMP_CLAUSE_THREAD_LIMIT
);
15319 OMP_CLAUSE_THREAD_LIMIT_EXPR (c
) = thread_limit
;
15320 OMP_CLAUSE_CHAIN (c
) = OMP_TARGET_CLAUSES (target
);
15321 OMP_TARGET_CLAUSES (target
) = c
;
15323 c
= build_omp_clause (num_teams_loc
, OMP_CLAUSE_NUM_TEAMS
);
15324 OMP_CLAUSE_NUM_TEAMS_UPPER_EXPR (c
) = num_teams_upper
;
15325 OMP_CLAUSE_NUM_TEAMS_LOWER_EXPR (c
) = num_teams_lower
;
15326 OMP_CLAUSE_CHAIN (c
) = OMP_TARGET_CLAUSES (target
);
15327 OMP_TARGET_CLAUSES (target
) = c
;
15330 /* Gimplify the gross structure of several OMP constructs. */
15333 gimplify_omp_workshare (tree
*expr_p
, gimple_seq
*pre_p
)
15335 tree expr
= *expr_p
;
15337 gimple_seq body
= NULL
;
15338 enum omp_region_type ort
;
15340 switch (TREE_CODE (expr
))
15344 ort
= ORT_WORKSHARE
;
15347 ort
= ORT_TASKGROUP
;
15350 ort
= OMP_TARGET_COMBINED (expr
) ? ORT_COMBINED_TARGET
: ORT_TARGET
;
15353 ort
= ORT_ACC_KERNELS
;
15355 case OACC_PARALLEL
:
15356 ort
= ORT_ACC_PARALLEL
;
15359 ort
= ORT_ACC_SERIAL
;
15362 ort
= ORT_ACC_DATA
;
15364 case OMP_TARGET_DATA
:
15365 ort
= ORT_TARGET_DATA
;
15368 ort
= OMP_TEAMS_COMBINED (expr
) ? ORT_COMBINED_TEAMS
: ORT_TEAMS
;
15369 if (gimplify_omp_ctxp
== NULL
15370 || gimplify_omp_ctxp
->region_type
== ORT_IMPLICIT_TARGET
)
15371 ort
= (enum omp_region_type
) (ort
| ORT_HOST_TEAMS
);
15373 case OACC_HOST_DATA
:
15374 ort
= ORT_ACC_HOST_DATA
;
15377 gcc_unreachable ();
15380 bool save_in_omp_construct
= in_omp_construct
;
15381 if ((ort
& ORT_ACC
) == 0)
15382 in_omp_construct
= false;
15383 gimplify_scan_omp_clauses (&OMP_CLAUSES (expr
), pre_p
, ort
,
15385 if (TREE_CODE (expr
) == OMP_TARGET
)
15386 optimize_target_teams (expr
, pre_p
);
15387 if ((ort
& (ORT_TARGET
| ORT_TARGET_DATA
)) != 0
15388 || (ort
& ORT_HOST_TEAMS
) == ORT_HOST_TEAMS
)
15390 push_gimplify_context ();
15391 gimple
*g
= gimplify_and_return_first (OMP_BODY (expr
), &body
);
15392 if (gimple_code (g
) == GIMPLE_BIND
)
15393 pop_gimplify_context (g
);
15395 pop_gimplify_context (NULL
);
15396 if ((ort
& ORT_TARGET_DATA
) != 0)
15398 enum built_in_function end_ix
;
15399 switch (TREE_CODE (expr
))
15402 case OACC_HOST_DATA
:
15403 end_ix
= BUILT_IN_GOACC_DATA_END
;
15405 case OMP_TARGET_DATA
:
15406 end_ix
= BUILT_IN_GOMP_TARGET_END_DATA
;
15409 gcc_unreachable ();
15411 tree fn
= builtin_decl_explicit (end_ix
);
15412 g
= gimple_build_call (fn
, 0);
15413 gimple_seq cleanup
= NULL
;
15414 gimple_seq_add_stmt (&cleanup
, g
);
15415 g
= gimple_build_try (body
, cleanup
, GIMPLE_TRY_FINALLY
);
15417 gimple_seq_add_stmt (&body
, g
);
15421 gimplify_and_add (OMP_BODY (expr
), &body
);
15422 gimplify_adjust_omp_clauses (pre_p
, body
, &OMP_CLAUSES (expr
),
15424 in_omp_construct
= save_in_omp_construct
;
15426 switch (TREE_CODE (expr
))
15429 stmt
= gimple_build_omp_target (body
, GF_OMP_TARGET_KIND_OACC_DATA
,
15430 OMP_CLAUSES (expr
));
15432 case OACC_HOST_DATA
:
15433 if (omp_find_clause (OMP_CLAUSES (expr
), OMP_CLAUSE_IF_PRESENT
))
15435 for (tree c
= OMP_CLAUSES (expr
); c
; c
= OMP_CLAUSE_CHAIN (c
))
15436 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_USE_DEVICE_PTR
)
15437 OMP_CLAUSE_USE_DEVICE_PTR_IF_PRESENT (c
) = 1;
15440 stmt
= gimple_build_omp_target (body
, GF_OMP_TARGET_KIND_OACC_HOST_DATA
,
15441 OMP_CLAUSES (expr
));
15444 stmt
= gimple_build_omp_target (body
, GF_OMP_TARGET_KIND_OACC_KERNELS
,
15445 OMP_CLAUSES (expr
));
15447 case OACC_PARALLEL
:
15448 stmt
= gimple_build_omp_target (body
, GF_OMP_TARGET_KIND_OACC_PARALLEL
,
15449 OMP_CLAUSES (expr
));
15452 stmt
= gimple_build_omp_target (body
, GF_OMP_TARGET_KIND_OACC_SERIAL
,
15453 OMP_CLAUSES (expr
));
15456 stmt
= gimple_build_omp_sections (body
, OMP_CLAUSES (expr
));
15459 stmt
= gimple_build_omp_single (body
, OMP_CLAUSES (expr
));
15462 stmt
= gimple_build_omp_scope (body
, OMP_CLAUSES (expr
));
15465 stmt
= gimple_build_omp_target (body
, GF_OMP_TARGET_KIND_REGION
,
15466 OMP_CLAUSES (expr
));
15468 case OMP_TARGET_DATA
:
15469 /* Put use_device_{ptr,addr} clauses last, as map clauses are supposed
15470 to be evaluated before the use_device_{ptr,addr} clauses if they
15471 refer to the same variables. */
15473 tree use_device_clauses
;
15474 tree
*pc
, *uc
= &use_device_clauses
;
15475 for (pc
= &OMP_CLAUSES (expr
); *pc
; )
15476 if (OMP_CLAUSE_CODE (*pc
) == OMP_CLAUSE_USE_DEVICE_PTR
15477 || OMP_CLAUSE_CODE (*pc
) == OMP_CLAUSE_USE_DEVICE_ADDR
)
15480 *pc
= OMP_CLAUSE_CHAIN (*pc
);
15481 uc
= &OMP_CLAUSE_CHAIN (*uc
);
15484 pc
= &OMP_CLAUSE_CHAIN (*pc
);
15486 *pc
= use_device_clauses
;
15487 stmt
= gimple_build_omp_target (body
, GF_OMP_TARGET_KIND_DATA
,
15488 OMP_CLAUSES (expr
));
15492 stmt
= gimple_build_omp_teams (body
, OMP_CLAUSES (expr
));
15493 if ((ort
& ORT_HOST_TEAMS
) == ORT_HOST_TEAMS
)
15494 gimple_omp_teams_set_host (as_a
<gomp_teams
*> (stmt
), true);
15497 gcc_unreachable ();
15500 gimplify_seq_add_stmt (pre_p
, stmt
);
15501 *expr_p
= NULL_TREE
;
15504 /* Gimplify the gross structure of OpenACC enter/exit data, update, and OpenMP
15505 target update constructs. */
15508 gimplify_omp_target_update (tree
*expr_p
, gimple_seq
*pre_p
)
15510 tree expr
= *expr_p
;
15513 enum omp_region_type ort
= ORT_WORKSHARE
;
15515 switch (TREE_CODE (expr
))
15517 case OACC_ENTER_DATA
:
15518 kind
= GF_OMP_TARGET_KIND_OACC_ENTER_DATA
;
15521 case OACC_EXIT_DATA
:
15522 kind
= GF_OMP_TARGET_KIND_OACC_EXIT_DATA
;
15526 kind
= GF_OMP_TARGET_KIND_OACC_UPDATE
;
15529 case OMP_TARGET_UPDATE
:
15530 kind
= GF_OMP_TARGET_KIND_UPDATE
;
15532 case OMP_TARGET_ENTER_DATA
:
15533 kind
= GF_OMP_TARGET_KIND_ENTER_DATA
;
15535 case OMP_TARGET_EXIT_DATA
:
15536 kind
= GF_OMP_TARGET_KIND_EXIT_DATA
;
15539 gcc_unreachable ();
15541 gimplify_scan_omp_clauses (&OMP_STANDALONE_CLAUSES (expr
), pre_p
,
15542 ort
, TREE_CODE (expr
));
15543 gimplify_adjust_omp_clauses (pre_p
, NULL
, &OMP_STANDALONE_CLAUSES (expr
),
15545 if (TREE_CODE (expr
) == OACC_UPDATE
15546 && omp_find_clause (OMP_STANDALONE_CLAUSES (expr
),
15547 OMP_CLAUSE_IF_PRESENT
))
15549 /* The runtime uses GOMP_MAP_{TO,FROM} to denote the if_present
15551 for (tree c
= OMP_STANDALONE_CLAUSES (expr
); c
; c
= OMP_CLAUSE_CHAIN (c
))
15552 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
)
15553 switch (OMP_CLAUSE_MAP_KIND (c
))
15555 case GOMP_MAP_FORCE_TO
:
15556 OMP_CLAUSE_SET_MAP_KIND (c
, GOMP_MAP_TO
);
15558 case GOMP_MAP_FORCE_FROM
:
15559 OMP_CLAUSE_SET_MAP_KIND (c
, GOMP_MAP_FROM
);
15565 else if (TREE_CODE (expr
) == OACC_EXIT_DATA
15566 && omp_find_clause (OMP_STANDALONE_CLAUSES (expr
),
15567 OMP_CLAUSE_FINALIZE
))
15569 /* Use GOMP_MAP_DELETE/GOMP_MAP_FORCE_FROM to denote "finalize"
15571 bool have_clause
= false;
15572 for (tree c
= OMP_STANDALONE_CLAUSES (expr
); c
; c
= OMP_CLAUSE_CHAIN (c
))
15573 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
)
15574 switch (OMP_CLAUSE_MAP_KIND (c
))
15576 case GOMP_MAP_FROM
:
15577 OMP_CLAUSE_SET_MAP_KIND (c
, GOMP_MAP_FORCE_FROM
);
15578 have_clause
= true;
15580 case GOMP_MAP_RELEASE
:
15581 OMP_CLAUSE_SET_MAP_KIND (c
, GOMP_MAP_DELETE
);
15582 have_clause
= true;
15584 case GOMP_MAP_TO_PSET
:
15585 /* Fortran arrays with descriptors must map that descriptor when
15586 doing standalone "attach" operations (in OpenACC). In that
15587 case GOMP_MAP_TO_PSET appears by itself with no preceding
15588 clause (see trans-openmp.cc:gfc_trans_omp_clauses). */
15590 case GOMP_MAP_POINTER
:
15591 /* TODO PR92929: we may see these here, but they'll always follow
15592 one of the clauses above, and will be handled by libgomp as
15593 one group, so no handling required here. */
15594 gcc_assert (have_clause
);
15596 case GOMP_MAP_DETACH
:
15597 OMP_CLAUSE_SET_MAP_KIND (c
, GOMP_MAP_FORCE_DETACH
);
15598 have_clause
= false;
15600 case GOMP_MAP_STRUCT
:
15601 have_clause
= false;
15604 gcc_unreachable ();
15607 stmt
= gimple_build_omp_target (NULL
, kind
, OMP_STANDALONE_CLAUSES (expr
));
15609 gimplify_seq_add_stmt (pre_p
, stmt
);
15610 *expr_p
= NULL_TREE
;
15613 /* A subroutine of gimplify_omp_atomic. The front end is supposed to have
15614 stabilized the lhs of the atomic operation as *ADDR. Return true if
15615 EXPR is this stabilized form. */
15618 goa_lhs_expr_p (tree expr
, tree addr
)
15620 /* Also include casts to other type variants. The C front end is fond
15621 of adding these for e.g. volatile variables. This is like
15622 STRIP_TYPE_NOPS but includes the main variant lookup. */
15623 STRIP_USELESS_TYPE_CONVERSION (expr
);
15625 if (TREE_CODE (expr
) == INDIRECT_REF
)
15627 expr
= TREE_OPERAND (expr
, 0);
15628 while (expr
!= addr
15629 && (CONVERT_EXPR_P (expr
)
15630 || TREE_CODE (expr
) == NON_LVALUE_EXPR
)
15631 && TREE_CODE (expr
) == TREE_CODE (addr
)
15632 && types_compatible_p (TREE_TYPE (expr
), TREE_TYPE (addr
)))
15634 expr
= TREE_OPERAND (expr
, 0);
15635 addr
= TREE_OPERAND (addr
, 0);
15639 return (TREE_CODE (addr
) == ADDR_EXPR
15640 && TREE_CODE (expr
) == ADDR_EXPR
15641 && TREE_OPERAND (addr
, 0) == TREE_OPERAND (expr
, 0));
15643 if (TREE_CODE (addr
) == ADDR_EXPR
&& expr
== TREE_OPERAND (addr
, 0))
15648 /* Walk *EXPR_P and replace appearances of *LHS_ADDR with LHS_VAR. If an
15649 expression does not involve the lhs, evaluate it into a temporary.
15650 Return 1 if the lhs appeared as a subexpression, 0 if it did not,
15651 or -1 if an error was encountered. */
15654 goa_stabilize_expr (tree
*expr_p
, gimple_seq
*pre_p
, tree lhs_addr
,
15655 tree lhs_var
, tree
&target_expr
, bool rhs
, int depth
)
15657 tree expr
= *expr_p
;
15660 if (goa_lhs_expr_p (expr
, lhs_addr
))
15666 if (is_gimple_val (expr
))
15669 /* Maximum depth of lhs in expression is for the
15670 __builtin_clear_padding (...), __builtin_clear_padding (...),
15671 __builtin_memcmp (&TARGET_EXPR <lhs, >, ...) == 0 ? ... : lhs; */
15675 switch (TREE_CODE_CLASS (TREE_CODE (expr
)))
15678 case tcc_comparison
:
15679 saw_lhs
|= goa_stabilize_expr (&TREE_OPERAND (expr
, 1), pre_p
, lhs_addr
,
15680 lhs_var
, target_expr
, true, depth
);
15683 saw_lhs
|= goa_stabilize_expr (&TREE_OPERAND (expr
, 0), pre_p
, lhs_addr
,
15684 lhs_var
, target_expr
, true, depth
);
15686 case tcc_expression
:
15687 switch (TREE_CODE (expr
))
15689 case TRUTH_ANDIF_EXPR
:
15690 case TRUTH_ORIF_EXPR
:
15691 case TRUTH_AND_EXPR
:
15692 case TRUTH_OR_EXPR
:
15693 case TRUTH_XOR_EXPR
:
15694 case BIT_INSERT_EXPR
:
15695 saw_lhs
|= goa_stabilize_expr (&TREE_OPERAND (expr
, 1), pre_p
,
15696 lhs_addr
, lhs_var
, target_expr
, true,
15699 case TRUTH_NOT_EXPR
:
15700 saw_lhs
|= goa_stabilize_expr (&TREE_OPERAND (expr
, 0), pre_p
,
15701 lhs_addr
, lhs_var
, target_expr
, true,
15705 if (pre_p
&& !goa_stabilize_expr (expr_p
, NULL
, lhs_addr
, lhs_var
,
15706 target_expr
, true, depth
))
15708 saw_lhs
|= goa_stabilize_expr (&TREE_OPERAND (expr
, 1), pre_p
,
15709 lhs_addr
, lhs_var
, target_expr
, true,
15711 saw_lhs
|= goa_stabilize_expr (&TREE_OPERAND (expr
, 0), pre_p
,
15712 lhs_addr
, lhs_var
, target_expr
, false,
15717 if (pre_p
&& !goa_stabilize_expr (expr_p
, NULL
, lhs_addr
, lhs_var
,
15718 target_expr
, true, depth
))
15720 saw_lhs
|= goa_stabilize_expr (&TREE_OPERAND (expr
, 0), pre_p
,
15721 lhs_addr
, lhs_var
, target_expr
, false,
15724 case COMPOUND_EXPR
:
15725 /* Break out any preevaluations from cp_build_modify_expr. */
15726 for (; TREE_CODE (expr
) == COMPOUND_EXPR
;
15727 expr
= TREE_OPERAND (expr
, 1))
15729 /* Special-case __builtin_clear_padding call before
15730 __builtin_memcmp. */
15731 if (TREE_CODE (TREE_OPERAND (expr
, 0)) == CALL_EXPR
)
15733 tree fndecl
= get_callee_fndecl (TREE_OPERAND (expr
, 0));
15735 && fndecl_built_in_p (fndecl
, BUILT_IN_CLEAR_PADDING
)
15736 && VOID_TYPE_P (TREE_TYPE (TREE_OPERAND (expr
, 0)))
15738 || goa_stabilize_expr (&TREE_OPERAND (expr
, 0), NULL
,
15740 target_expr
, true, depth
)))
15744 saw_lhs
= goa_stabilize_expr (&TREE_OPERAND (expr
, 0),
15745 pre_p
, lhs_addr
, lhs_var
,
15746 target_expr
, true, depth
);
15747 saw_lhs
|= goa_stabilize_expr (&TREE_OPERAND (expr
, 1),
15748 pre_p
, lhs_addr
, lhs_var
,
15749 target_expr
, rhs
, depth
);
15755 gimplify_stmt (&TREE_OPERAND (expr
, 0), pre_p
);
15758 return goa_stabilize_expr (&expr
, pre_p
, lhs_addr
, lhs_var
,
15759 target_expr
, rhs
, depth
);
15761 return goa_stabilize_expr (expr_p
, pre_p
, lhs_addr
, lhs_var
,
15762 target_expr
, rhs
, depth
);
15764 if (!goa_stabilize_expr (&TREE_OPERAND (expr
, 0), NULL
, lhs_addr
,
15765 lhs_var
, target_expr
, true, depth
))
15767 saw_lhs
|= goa_stabilize_expr (&TREE_OPERAND (expr
, 0), pre_p
,
15768 lhs_addr
, lhs_var
, target_expr
, true,
15770 saw_lhs
|= goa_stabilize_expr (&TREE_OPERAND (expr
, 1), pre_p
,
15771 lhs_addr
, lhs_var
, target_expr
, true,
15773 saw_lhs
|= goa_stabilize_expr (&TREE_OPERAND (expr
, 2), pre_p
,
15774 lhs_addr
, lhs_var
, target_expr
, true,
15778 if (TARGET_EXPR_INITIAL (expr
))
15780 if (pre_p
&& !goa_stabilize_expr (expr_p
, NULL
, lhs_addr
,
15781 lhs_var
, target_expr
, true,
15784 if (expr
== target_expr
)
15788 saw_lhs
= goa_stabilize_expr (&TARGET_EXPR_INITIAL (expr
),
15789 pre_p
, lhs_addr
, lhs_var
,
15790 target_expr
, true, depth
);
15791 if (saw_lhs
&& target_expr
== NULL_TREE
&& pre_p
)
15792 target_expr
= expr
;
15800 case tcc_reference
:
15801 if (TREE_CODE (expr
) == BIT_FIELD_REF
15802 || TREE_CODE (expr
) == VIEW_CONVERT_EXPR
)
15803 saw_lhs
|= goa_stabilize_expr (&TREE_OPERAND (expr
, 0), pre_p
,
15804 lhs_addr
, lhs_var
, target_expr
, true,
15808 if (TREE_CODE (expr
) == CALL_EXPR
)
15810 if (tree fndecl
= get_callee_fndecl (expr
))
15811 if (fndecl_built_in_p (fndecl
, BUILT_IN_CLEAR_PADDING
)
15812 || fndecl_built_in_p (fndecl
, BUILT_IN_MEMCMP
))
15814 int nargs
= call_expr_nargs (expr
);
15815 for (int i
= 0; i
< nargs
; i
++)
15816 saw_lhs
|= goa_stabilize_expr (&CALL_EXPR_ARG (expr
, i
),
15817 pre_p
, lhs_addr
, lhs_var
,
15818 target_expr
, true, depth
);
15827 if (saw_lhs
== 0 && pre_p
)
15829 enum gimplify_status gs
;
15830 if (TREE_CODE (expr
) == CALL_EXPR
&& VOID_TYPE_P (TREE_TYPE (expr
)))
15832 gimplify_stmt (&expr
, pre_p
);
15836 gs
= gimplify_expr (expr_p
, pre_p
, NULL
, is_gimple_val
, fb_rvalue
);
15838 gs
= gimplify_expr (expr_p
, pre_p
, NULL
, is_gimple_lvalue
, fb_lvalue
);
15839 if (gs
!= GS_ALL_DONE
)
15846 /* Gimplify an OMP_ATOMIC statement. */
15848 static enum gimplify_status
15849 gimplify_omp_atomic (tree
*expr_p
, gimple_seq
*pre_p
)
15851 tree addr
= TREE_OPERAND (*expr_p
, 0);
15852 tree rhs
= TREE_CODE (*expr_p
) == OMP_ATOMIC_READ
15853 ? NULL
: TREE_OPERAND (*expr_p
, 1);
15854 tree type
= TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (addr
)));
15856 gomp_atomic_load
*loadstmt
;
15857 gomp_atomic_store
*storestmt
;
15858 tree target_expr
= NULL_TREE
;
15860 tmp_load
= create_tmp_reg (type
);
15862 && goa_stabilize_expr (&rhs
, pre_p
, addr
, tmp_load
, target_expr
,
15866 if (gimplify_expr (&addr
, pre_p
, NULL
, is_gimple_val
, fb_rvalue
)
15870 loadstmt
= gimple_build_omp_atomic_load (tmp_load
, addr
,
15871 OMP_ATOMIC_MEMORY_ORDER (*expr_p
));
15872 gimplify_seq_add_stmt (pre_p
, loadstmt
);
15875 /* BIT_INSERT_EXPR is not valid for non-integral bitfield
15876 representatives. Use BIT_FIELD_REF on the lhs instead. */
15878 if (TREE_CODE (rhs
) == COND_EXPR
)
15879 rhsarg
= TREE_OPERAND (rhs
, 1);
15880 if (TREE_CODE (rhsarg
) == BIT_INSERT_EXPR
15881 && !INTEGRAL_TYPE_P (TREE_TYPE (tmp_load
)))
15883 tree bitpos
= TREE_OPERAND (rhsarg
, 2);
15884 tree op1
= TREE_OPERAND (rhsarg
, 1);
15886 tree tmp_store
= tmp_load
;
15887 if (TREE_CODE (*expr_p
) == OMP_ATOMIC_CAPTURE_OLD
)
15888 tmp_store
= get_initialized_tmp_var (tmp_load
, pre_p
);
15889 if (INTEGRAL_TYPE_P (TREE_TYPE (op1
)))
15890 bitsize
= bitsize_int (TYPE_PRECISION (TREE_TYPE (op1
)));
15892 bitsize
= TYPE_SIZE (TREE_TYPE (op1
));
15893 gcc_assert (TREE_OPERAND (rhsarg
, 0) == tmp_load
);
15894 tree t
= build2_loc (EXPR_LOCATION (rhsarg
),
15895 MODIFY_EXPR
, void_type_node
,
15896 build3_loc (EXPR_LOCATION (rhsarg
),
15897 BIT_FIELD_REF
, TREE_TYPE (op1
),
15898 tmp_store
, bitsize
, bitpos
), op1
);
15899 if (TREE_CODE (rhs
) == COND_EXPR
)
15900 t
= build3_loc (EXPR_LOCATION (rhs
), COND_EXPR
, void_type_node
,
15901 TREE_OPERAND (rhs
, 0), t
, void_node
);
15902 gimplify_and_add (t
, pre_p
);
15905 bool save_allow_rhs_cond_expr
= gimplify_ctxp
->allow_rhs_cond_expr
;
15906 if (TREE_CODE (rhs
) == COND_EXPR
)
15907 gimplify_ctxp
->allow_rhs_cond_expr
= true;
15908 enum gimplify_status gs
= gimplify_expr (&rhs
, pre_p
, NULL
,
15909 is_gimple_val
, fb_rvalue
);
15910 gimplify_ctxp
->allow_rhs_cond_expr
= save_allow_rhs_cond_expr
;
15911 if (gs
!= GS_ALL_DONE
)
15915 if (TREE_CODE (*expr_p
) == OMP_ATOMIC_READ
)
15918 = gimple_build_omp_atomic_store (rhs
, OMP_ATOMIC_MEMORY_ORDER (*expr_p
));
15919 if (TREE_CODE (*expr_p
) != OMP_ATOMIC_READ
&& OMP_ATOMIC_WEAK (*expr_p
))
15921 gimple_omp_atomic_set_weak (loadstmt
);
15922 gimple_omp_atomic_set_weak (storestmt
);
15924 gimplify_seq_add_stmt (pre_p
, storestmt
);
15925 switch (TREE_CODE (*expr_p
))
15927 case OMP_ATOMIC_READ
:
15928 case OMP_ATOMIC_CAPTURE_OLD
:
15929 *expr_p
= tmp_load
;
15930 gimple_omp_atomic_set_need_value (loadstmt
);
15932 case OMP_ATOMIC_CAPTURE_NEW
:
15934 gimple_omp_atomic_set_need_value (storestmt
);
15941 return GS_ALL_DONE
;
15944 /* Gimplify a TRANSACTION_EXPR. This involves gimplification of the
15945 body, and adding some EH bits. */
15947 static enum gimplify_status
15948 gimplify_transaction (tree
*expr_p
, gimple_seq
*pre_p
)
15950 tree expr
= *expr_p
, temp
, tbody
= TRANSACTION_EXPR_BODY (expr
);
15952 gtransaction
*trans_stmt
;
15953 gimple_seq body
= NULL
;
15956 /* Wrap the transaction body in a BIND_EXPR so we have a context
15957 where to put decls for OMP. */
15958 if (TREE_CODE (tbody
) != BIND_EXPR
)
15960 tree bind
= build3 (BIND_EXPR
, void_type_node
, NULL
, tbody
, NULL
);
15961 TREE_SIDE_EFFECTS (bind
) = 1;
15962 SET_EXPR_LOCATION (bind
, EXPR_LOCATION (tbody
));
15963 TRANSACTION_EXPR_BODY (expr
) = bind
;
15966 push_gimplify_context ();
15967 temp
= voidify_wrapper_expr (*expr_p
, NULL
);
15969 body_stmt
= gimplify_and_return_first (TRANSACTION_EXPR_BODY (expr
), &body
);
15970 pop_gimplify_context (body_stmt
);
15972 trans_stmt
= gimple_build_transaction (body
);
15973 if (TRANSACTION_EXPR_OUTER (expr
))
15974 subcode
= GTMA_IS_OUTER
;
15975 else if (TRANSACTION_EXPR_RELAXED (expr
))
15976 subcode
= GTMA_IS_RELAXED
;
15977 gimple_transaction_set_subcode (trans_stmt
, subcode
);
15979 gimplify_seq_add_stmt (pre_p
, trans_stmt
);
15987 *expr_p
= NULL_TREE
;
15988 return GS_ALL_DONE
;
15991 /* Gimplify an OMP_ORDERED construct. EXPR is the tree version. BODY
15992 is the OMP_BODY of the original EXPR (which has already been
15993 gimplified so it's not present in the EXPR).
15995 Return the gimplified GIMPLE_OMP_ORDERED tuple. */
15998 gimplify_omp_ordered (tree expr
, gimple_seq body
)
16003 tree source_c
= NULL_TREE
;
16004 tree sink_c
= NULL_TREE
;
16006 if (gimplify_omp_ctxp
)
16008 for (c
= OMP_ORDERED_CLAUSES (expr
); c
; c
= OMP_CLAUSE_CHAIN (c
))
16009 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_DOACROSS
16010 && gimplify_omp_ctxp
->loop_iter_var
.is_empty ())
16012 error_at (OMP_CLAUSE_LOCATION (c
),
16013 "%<ordered%> construct with %qs clause must be "
16014 "closely nested inside a loop with %<ordered%> clause",
16015 OMP_CLAUSE_DOACROSS_DEPEND (c
) ? "depend" : "doacross");
16018 else if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_DOACROSS
16019 && OMP_CLAUSE_DOACROSS_KIND (c
) == OMP_CLAUSE_DOACROSS_SINK
)
16023 if (OMP_CLAUSE_DECL (c
) == NULL_TREE
)
16024 continue; /* omp_cur_iteration - 1 */
16025 for (decls
= OMP_CLAUSE_DECL (c
), i
= 0;
16026 decls
&& TREE_CODE (decls
) == TREE_LIST
;
16027 decls
= TREE_CHAIN (decls
), ++i
)
16028 if (i
>= gimplify_omp_ctxp
->loop_iter_var
.length () / 2)
16030 else if (TREE_VALUE (decls
)
16031 != gimplify_omp_ctxp
->loop_iter_var
[2 * i
])
16033 error_at (OMP_CLAUSE_LOCATION (c
),
16034 "variable %qE is not an iteration "
16035 "of outermost loop %d, expected %qE",
16036 TREE_VALUE (decls
), i
+ 1,
16037 gimplify_omp_ctxp
->loop_iter_var
[2 * i
]);
16043 = gimplify_omp_ctxp
->loop_iter_var
[2 * i
+ 1];
16044 if (!fail
&& i
!= gimplify_omp_ctxp
->loop_iter_var
.length () / 2)
16046 error_at (OMP_CLAUSE_LOCATION (c
),
16047 "number of variables in %qs clause with "
16048 "%<sink%> modifier does not match number of "
16049 "iteration variables",
16050 OMP_CLAUSE_DOACROSS_DEPEND (c
)
16051 ? "depend" : "doacross");
16055 else if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_DOACROSS
16056 && OMP_CLAUSE_DOACROSS_KIND (c
) == OMP_CLAUSE_DOACROSS_SOURCE
)
16060 error_at (OMP_CLAUSE_LOCATION (c
),
16061 "more than one %qs clause with %<source%> "
16062 "modifier on an %<ordered%> construct",
16063 OMP_CLAUSE_DOACROSS_DEPEND (source_c
)
16064 ? "depend" : "doacross");
16071 if (source_c
&& sink_c
)
16073 error_at (OMP_CLAUSE_LOCATION (source_c
),
16074 "%qs clause with %<source%> modifier specified "
16075 "together with %qs clauses with %<sink%> modifier "
16076 "on the same construct",
16077 OMP_CLAUSE_DOACROSS_DEPEND (source_c
) ? "depend" : "doacross",
16078 OMP_CLAUSE_DOACROSS_DEPEND (sink_c
) ? "depend" : "doacross");
16083 return gimple_build_nop ();
16084 return gimple_build_omp_ordered (body
, OMP_ORDERED_CLAUSES (expr
));
16087 /* Convert the GENERIC expression tree *EXPR_P to GIMPLE. If the
16088 expression produces a value to be used as an operand inside a GIMPLE
16089 statement, the value will be stored back in *EXPR_P. This value will
16090 be a tree of class tcc_declaration, tcc_constant, tcc_reference or
16091 an SSA_NAME. The corresponding sequence of GIMPLE statements is
16092 emitted in PRE_P and POST_P.
16094 Additionally, this process may overwrite parts of the input
16095 expression during gimplification. Ideally, it should be
16096 possible to do non-destructive gimplification.
16098 EXPR_P points to the GENERIC expression to convert to GIMPLE. If
16099 the expression needs to evaluate to a value to be used as
16100 an operand in a GIMPLE statement, this value will be stored in
16101 *EXPR_P on exit. This happens when the caller specifies one
16102 of fb_lvalue or fb_rvalue fallback flags.
16104 PRE_P will contain the sequence of GIMPLE statements corresponding
16105 to the evaluation of EXPR and all the side-effects that must
16106 be executed before the main expression. On exit, the last
16107 statement of PRE_P is the core statement being gimplified. For
16108 instance, when gimplifying 'if (++a)' the last statement in
16109 PRE_P will be 'if (t.1)' where t.1 is the result of
16110 pre-incrementing 'a'.
16112 POST_P will contain the sequence of GIMPLE statements corresponding
16113 to the evaluation of all the side-effects that must be executed
16114 after the main expression. If this is NULL, the post
16115 side-effects are stored at the end of PRE_P.
16117 The reason why the output is split in two is to handle post
16118 side-effects explicitly. In some cases, an expression may have
16119 inner and outer post side-effects which need to be emitted in
16120 an order different from the one given by the recursive
16121 traversal. For instance, for the expression (*p--)++ the post
16122 side-effects of '--' must actually occur *after* the post
16123 side-effects of '++'. However, gimplification will first visit
16124 the inner expression, so if a separate POST sequence was not
16125 used, the resulting sequence would be:
16132 However, the post-decrement operation in line #2 must not be
16133 evaluated until after the store to *p at line #4, so the
16134 correct sequence should be:
16141 So, by specifying a separate post queue, it is possible
16142 to emit the post side-effects in the correct order.
16143 If POST_P is NULL, an internal queue will be used. Before
16144 returning to the caller, the sequence POST_P is appended to
16145 the main output sequence PRE_P.
16147 GIMPLE_TEST_F points to a function that takes a tree T and
16148 returns nonzero if T is in the GIMPLE form requested by the
16149 caller. The GIMPLE predicates are in gimple.cc.
16151 FALLBACK tells the function what sort of a temporary we want if
16152 gimplification cannot produce an expression that complies with
16155 fb_none means that no temporary should be generated
16156 fb_rvalue means that an rvalue is OK to generate
16157 fb_lvalue means that an lvalue is OK to generate
16158 fb_either means that either is OK, but an lvalue is preferable.
16159 fb_mayfail means that gimplification may fail (in which case
16160 GS_ERROR will be returned)
16162 The return value is either GS_ERROR or GS_ALL_DONE, since this
16163 function iterates until EXPR is completely gimplified or an error
16166 enum gimplify_status
16167 gimplify_expr (tree
*expr_p
, gimple_seq
*pre_p
, gimple_seq
*post_p
,
16168 bool (*gimple_test_f
) (tree
), fallback_t fallback
)
16171 gimple_seq internal_pre
= NULL
;
16172 gimple_seq internal_post
= NULL
;
16175 location_t saved_location
;
16176 enum gimplify_status ret
;
16177 gimple_stmt_iterator pre_last_gsi
, post_last_gsi
;
16180 save_expr
= *expr_p
;
16181 if (save_expr
== NULL_TREE
)
16182 return GS_ALL_DONE
;
16184 /* If we are gimplifying a top-level statement, PRE_P must be valid. */
16185 is_statement
= gimple_test_f
== is_gimple_stmt
;
16187 gcc_assert (pre_p
);
16189 /* Consistency checks. */
16190 if (gimple_test_f
== is_gimple_reg
)
16191 gcc_assert (fallback
& (fb_rvalue
| fb_lvalue
));
16192 else if (gimple_test_f
== is_gimple_val
16193 || gimple_test_f
== is_gimple_call_addr
16194 || gimple_test_f
== is_gimple_condexpr_for_cond
16195 || gimple_test_f
== is_gimple_mem_rhs
16196 || gimple_test_f
== is_gimple_mem_rhs_or_call
16197 || gimple_test_f
== is_gimple_reg_rhs
16198 || gimple_test_f
== is_gimple_reg_rhs_or_call
16199 || gimple_test_f
== is_gimple_asm_val
16200 || gimple_test_f
== is_gimple_mem_ref_addr
)
16201 gcc_assert (fallback
& fb_rvalue
);
16202 else if (gimple_test_f
== is_gimple_min_lval
16203 || gimple_test_f
== is_gimple_lvalue
)
16204 gcc_assert (fallback
& fb_lvalue
);
16205 else if (gimple_test_f
== is_gimple_addressable
)
16206 gcc_assert (fallback
& fb_either
);
16207 else if (gimple_test_f
== is_gimple_stmt
)
16208 gcc_assert (fallback
== fb_none
);
16211 /* We should have recognized the GIMPLE_TEST_F predicate to
16212 know what kind of fallback to use in case a temporary is
16213 needed to hold the value or address of *EXPR_P. */
16214 gcc_unreachable ();
16217 /* We used to check the predicate here and return immediately if it
16218 succeeds. This is wrong; the design is for gimplification to be
16219 idempotent, and for the predicates to only test for valid forms, not
16220 whether they are fully simplified. */
16222 pre_p
= &internal_pre
;
16224 if (post_p
== NULL
)
16225 post_p
= &internal_post
;
16227 /* Remember the last statements added to PRE_P and POST_P. Every
16228 new statement added by the gimplification helpers needs to be
16229 annotated with location information. To centralize the
16230 responsibility, we remember the last statement that had been
16231 added to both queues before gimplifying *EXPR_P. If
16232 gimplification produces new statements in PRE_P and POST_P, those
16233 statements will be annotated with the same location information
16235 pre_last_gsi
= gsi_last (*pre_p
);
16236 post_last_gsi
= gsi_last (*post_p
);
16238 saved_location
= input_location
;
16239 if (save_expr
!= error_mark_node
16240 && EXPR_HAS_LOCATION (*expr_p
))
16241 input_location
= EXPR_LOCATION (*expr_p
);
16243 /* Loop over the specific gimplifiers until the toplevel node
16244 remains the same. */
16247 /* Strip away as many useless type conversions as possible
16248 at the toplevel. */
16249 STRIP_USELESS_TYPE_CONVERSION (*expr_p
);
16251 /* Remember the expr. */
16252 save_expr
= *expr_p
;
16254 /* Die, die, die, my darling. */
16255 if (error_operand_p (save_expr
))
16261 /* Do any language-specific gimplification. */
16262 ret
= ((enum gimplify_status
)
16263 lang_hooks
.gimplify_expr (expr_p
, pre_p
, post_p
));
16266 if (*expr_p
== NULL_TREE
)
16268 if (*expr_p
!= save_expr
)
16271 else if (ret
!= GS_UNHANDLED
)
16274 /* Make sure that all the cases set 'ret' appropriately. */
16275 ret
= GS_UNHANDLED
;
16276 switch (TREE_CODE (*expr_p
))
16278 /* First deal with the special cases. */
16280 case POSTINCREMENT_EXPR
:
16281 case POSTDECREMENT_EXPR
:
16282 case PREINCREMENT_EXPR
:
16283 case PREDECREMENT_EXPR
:
16284 ret
= gimplify_self_mod_expr (expr_p
, pre_p
, post_p
,
16285 fallback
!= fb_none
,
16286 TREE_TYPE (*expr_p
));
16289 case VIEW_CONVERT_EXPR
:
16290 if ((fallback
& fb_rvalue
)
16291 && is_gimple_reg_type (TREE_TYPE (*expr_p
))
16292 && is_gimple_reg_type (TREE_TYPE (TREE_OPERAND (*expr_p
, 0))))
16294 ret
= gimplify_expr (&TREE_OPERAND (*expr_p
, 0), pre_p
,
16295 post_p
, is_gimple_val
, fb_rvalue
);
16296 recalculate_side_effects (*expr_p
);
16302 case ARRAY_RANGE_REF
:
16303 case REALPART_EXPR
:
16304 case IMAGPART_EXPR
:
16305 case COMPONENT_REF
:
16306 ret
= gimplify_compound_lval (expr_p
, pre_p
, post_p
,
16307 fallback
? fallback
: fb_rvalue
);
16311 ret
= gimplify_cond_expr (expr_p
, pre_p
, fallback
);
16313 /* C99 code may assign to an array in a structure value of a
16314 conditional expression, and this has undefined behavior
16315 only on execution, so create a temporary if an lvalue is
16317 if (fallback
== fb_lvalue
)
16319 *expr_p
= get_initialized_tmp_var (*expr_p
, pre_p
, post_p
, false);
16320 mark_addressable (*expr_p
);
16326 ret
= gimplify_call_expr (expr_p
, pre_p
, fallback
!= fb_none
);
16328 /* C99 code may assign to an array in a structure returned
16329 from a function, and this has undefined behavior only on
16330 execution, so create a temporary if an lvalue is
16332 if (fallback
== fb_lvalue
)
16334 *expr_p
= get_initialized_tmp_var (*expr_p
, pre_p
, post_p
, false);
16335 mark_addressable (*expr_p
);
16341 gcc_unreachable ();
16343 case COMPOUND_EXPR
:
16344 ret
= gimplify_compound_expr (expr_p
, pre_p
, fallback
!= fb_none
);
16347 case COMPOUND_LITERAL_EXPR
:
16348 ret
= gimplify_compound_literal_expr (expr_p
, pre_p
,
16349 gimple_test_f
, fallback
);
16354 ret
= gimplify_modify_expr (expr_p
, pre_p
, post_p
,
16355 fallback
!= fb_none
);
16358 case TRUTH_ANDIF_EXPR
:
16359 case TRUTH_ORIF_EXPR
:
16361 /* Preserve the original type of the expression and the
16362 source location of the outer expression. */
16363 tree org_type
= TREE_TYPE (*expr_p
);
16364 *expr_p
= gimple_boolify (*expr_p
);
16365 *expr_p
= build3_loc (input_location
, COND_EXPR
,
16369 org_type
, boolean_true_node
),
16372 org_type
, boolean_false_node
));
16377 case TRUTH_NOT_EXPR
:
16379 tree type
= TREE_TYPE (*expr_p
);
16380 /* The parsers are careful to generate TRUTH_NOT_EXPR
16381 only with operands that are always zero or one.
16382 We do not fold here but handle the only interesting case
16383 manually, as fold may re-introduce the TRUTH_NOT_EXPR. */
16384 *expr_p
= gimple_boolify (*expr_p
);
16385 if (TYPE_PRECISION (TREE_TYPE (*expr_p
)) == 1)
16386 *expr_p
= build1_loc (input_location
, BIT_NOT_EXPR
,
16387 TREE_TYPE (*expr_p
),
16388 TREE_OPERAND (*expr_p
, 0));
16390 *expr_p
= build2_loc (input_location
, BIT_XOR_EXPR
,
16391 TREE_TYPE (*expr_p
),
16392 TREE_OPERAND (*expr_p
, 0),
16393 build_int_cst (TREE_TYPE (*expr_p
), 1));
16394 if (!useless_type_conversion_p (type
, TREE_TYPE (*expr_p
)))
16395 *expr_p
= fold_convert_loc (input_location
, type
, *expr_p
);
16401 ret
= gimplify_addr_expr (expr_p
, pre_p
, post_p
);
16404 case ANNOTATE_EXPR
:
16406 tree cond
= TREE_OPERAND (*expr_p
, 0);
16407 tree kind
= TREE_OPERAND (*expr_p
, 1);
16408 tree data
= TREE_OPERAND (*expr_p
, 2);
16409 tree type
= TREE_TYPE (cond
);
16410 if (!INTEGRAL_TYPE_P (type
))
16416 tree tmp
= create_tmp_var (type
);
16417 gimplify_arg (&cond
, pre_p
, EXPR_LOCATION (*expr_p
));
16419 = gimple_build_call_internal (IFN_ANNOTATE
, 3, cond
, kind
, data
);
16420 gimple_call_set_lhs (call
, tmp
);
16421 gimplify_seq_add_stmt (pre_p
, call
);
16428 ret
= gimplify_va_arg_expr (expr_p
, pre_p
, post_p
);
16432 if (IS_EMPTY_STMT (*expr_p
))
16438 if (VOID_TYPE_P (TREE_TYPE (*expr_p
))
16439 || fallback
== fb_none
)
16441 /* Just strip a conversion to void (or in void context) and
16443 *expr_p
= TREE_OPERAND (*expr_p
, 0);
16448 ret
= gimplify_conversion (expr_p
);
16449 if (ret
== GS_ERROR
)
16451 if (*expr_p
!= save_expr
)
16455 case FIX_TRUNC_EXPR
:
16456 /* unary_expr: ... | '(' cast ')' val | ... */
16457 ret
= gimplify_expr (&TREE_OPERAND (*expr_p
, 0), pre_p
, post_p
,
16458 is_gimple_val
, fb_rvalue
);
16459 recalculate_side_effects (*expr_p
);
16464 bool volatilep
= TREE_THIS_VOLATILE (*expr_p
);
16465 bool notrap
= TREE_THIS_NOTRAP (*expr_p
);
16466 tree saved_ptr_type
= TREE_TYPE (TREE_OPERAND (*expr_p
, 0));
16468 *expr_p
= fold_indirect_ref_loc (input_location
, *expr_p
);
16469 if (*expr_p
!= save_expr
)
16475 ret
= gimplify_expr (&TREE_OPERAND (*expr_p
, 0), pre_p
, post_p
,
16476 is_gimple_reg
, fb_rvalue
);
16477 if (ret
== GS_ERROR
)
16480 recalculate_side_effects (*expr_p
);
16481 *expr_p
= fold_build2_loc (input_location
, MEM_REF
,
16482 TREE_TYPE (*expr_p
),
16483 TREE_OPERAND (*expr_p
, 0),
16484 build_int_cst (saved_ptr_type
, 0));
16485 TREE_THIS_VOLATILE (*expr_p
) = volatilep
;
16486 TREE_THIS_NOTRAP (*expr_p
) = notrap
;
16491 /* We arrive here through the various re-gimplifcation paths. */
16493 /* First try re-folding the whole thing. */
16494 tmp
= fold_binary (MEM_REF
, TREE_TYPE (*expr_p
),
16495 TREE_OPERAND (*expr_p
, 0),
16496 TREE_OPERAND (*expr_p
, 1));
16499 REF_REVERSE_STORAGE_ORDER (tmp
)
16500 = REF_REVERSE_STORAGE_ORDER (*expr_p
);
16502 recalculate_side_effects (*expr_p
);
16506 /* Avoid re-gimplifying the address operand if it is already
16507 in suitable form. Re-gimplifying would mark the address
16508 operand addressable. Always gimplify when not in SSA form
16509 as we still may have to gimplify decls with value-exprs. */
16510 if (!gimplify_ctxp
|| !gimple_in_ssa_p (cfun
)
16511 || !is_gimple_mem_ref_addr (TREE_OPERAND (*expr_p
, 0)))
16513 ret
= gimplify_expr (&TREE_OPERAND (*expr_p
, 0), pre_p
, post_p
,
16514 is_gimple_mem_ref_addr
, fb_rvalue
);
16515 if (ret
== GS_ERROR
)
16518 recalculate_side_effects (*expr_p
);
16522 /* Constants need not be gimplified. */
16529 /* Drop the overflow flag on constants, we do not want
16530 that in the GIMPLE IL. */
16531 if (TREE_OVERFLOW_P (*expr_p
))
16532 *expr_p
= drop_tree_overflow (*expr_p
);
16537 /* If we require an lvalue, such as for ADDR_EXPR, retain the
16538 CONST_DECL node. Otherwise the decl is replaceable by its
16540 /* ??? Should be == fb_lvalue, but ADDR_EXPR passes fb_either. */
16541 if (fallback
& fb_lvalue
)
16545 *expr_p
= DECL_INITIAL (*expr_p
);
16551 ret
= gimplify_decl_expr (expr_p
, pre_p
);
16555 ret
= gimplify_bind_expr (expr_p
, pre_p
);
16559 ret
= gimplify_loop_expr (expr_p
, pre_p
);
16563 ret
= gimplify_switch_expr (expr_p
, pre_p
);
16567 ret
= gimplify_exit_expr (expr_p
);
16571 /* If the target is not LABEL, then it is a computed jump
16572 and the target needs to be gimplified. */
16573 if (TREE_CODE (GOTO_DESTINATION (*expr_p
)) != LABEL_DECL
)
16575 ret
= gimplify_expr (&GOTO_DESTINATION (*expr_p
), pre_p
,
16576 NULL
, is_gimple_val
, fb_rvalue
);
16577 if (ret
== GS_ERROR
)
16580 gimplify_seq_add_stmt (pre_p
,
16581 gimple_build_goto (GOTO_DESTINATION (*expr_p
)));
16586 gimplify_seq_add_stmt (pre_p
,
16587 gimple_build_predict (PREDICT_EXPR_PREDICTOR (*expr_p
),
16588 PREDICT_EXPR_OUTCOME (*expr_p
)));
16593 ret
= gimplify_label_expr (expr_p
, pre_p
);
16594 label
= LABEL_EXPR_LABEL (*expr_p
);
16595 gcc_assert (decl_function_context (label
) == current_function_decl
);
16597 /* If the label is used in a goto statement, or address of the label
16598 is taken, we need to unpoison all variables that were seen so far.
16599 Doing so would prevent us from reporting a false positives. */
16600 if (asan_poisoned_variables
16601 && asan_used_labels
!= NULL
16602 && asan_used_labels
->contains (label
)
16603 && !gimplify_omp_ctxp
)
16604 asan_poison_variables (asan_poisoned_variables
, false, pre_p
);
16607 case CASE_LABEL_EXPR
:
16608 ret
= gimplify_case_label_expr (expr_p
, pre_p
);
16610 if (gimplify_ctxp
->live_switch_vars
)
16611 asan_poison_variables (gimplify_ctxp
->live_switch_vars
, false,
16616 ret
= gimplify_return_expr (*expr_p
, pre_p
);
16620 /* Don't reduce this in place; let gimplify_init_constructor work its
16621 magic. Buf if we're just elaborating this for side effects, just
16622 gimplify any element that has side-effects. */
16623 if (fallback
== fb_none
)
16625 unsigned HOST_WIDE_INT ix
;
16627 tree temp
= NULL_TREE
;
16628 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (*expr_p
), ix
, val
)
16629 if (TREE_SIDE_EFFECTS (val
))
16630 append_to_statement_list (val
, &temp
);
16633 ret
= temp
? GS_OK
: GS_ALL_DONE
;
16635 /* C99 code may assign to an array in a constructed
16636 structure or union, and this has undefined behavior only
16637 on execution, so create a temporary if an lvalue is
16639 else if (fallback
== fb_lvalue
)
16641 *expr_p
= get_initialized_tmp_var (*expr_p
, pre_p
, post_p
, false);
16642 mark_addressable (*expr_p
);
16649 /* The following are special cases that are not handled by the
16650 original GIMPLE grammar. */
16652 /* SAVE_EXPR nodes are converted into a GIMPLE identifier and
16655 ret
= gimplify_save_expr (expr_p
, pre_p
, post_p
);
16658 case BIT_FIELD_REF
:
16659 ret
= gimplify_expr (&TREE_OPERAND (*expr_p
, 0), pre_p
,
16660 post_p
, is_gimple_lvalue
, fb_either
);
16661 recalculate_side_effects (*expr_p
);
16664 case TARGET_MEM_REF
:
16666 enum gimplify_status r0
= GS_ALL_DONE
, r1
= GS_ALL_DONE
;
16668 if (TMR_BASE (*expr_p
))
16669 r0
= gimplify_expr (&TMR_BASE (*expr_p
), pre_p
,
16670 post_p
, is_gimple_mem_ref_addr
, fb_either
);
16671 if (TMR_INDEX (*expr_p
))
16672 r1
= gimplify_expr (&TMR_INDEX (*expr_p
), pre_p
,
16673 post_p
, is_gimple_val
, fb_rvalue
);
16674 if (TMR_INDEX2 (*expr_p
))
16675 r1
= gimplify_expr (&TMR_INDEX2 (*expr_p
), pre_p
,
16676 post_p
, is_gimple_val
, fb_rvalue
);
16677 /* TMR_STEP and TMR_OFFSET are always integer constants. */
16678 ret
= MIN (r0
, r1
);
16682 case NON_LVALUE_EXPR
:
16683 /* This should have been stripped above. */
16684 gcc_unreachable ();
16687 ret
= gimplify_asm_expr (expr_p
, pre_p
, post_p
);
16690 case TRY_FINALLY_EXPR
:
16691 case TRY_CATCH_EXPR
:
16693 gimple_seq eval
, cleanup
;
16696 /* Calls to destructors are generated automatically in FINALLY/CATCH
16697 block. They should have location as UNKNOWN_LOCATION. However,
16698 gimplify_call_expr will reset these call stmts to input_location
16699 if it finds stmt's location is unknown. To prevent resetting for
16700 destructors, we set the input_location to unknown.
16701 Note that this only affects the destructor calls in FINALLY/CATCH
16702 block, and will automatically reset to its original value by the
16703 end of gimplify_expr. */
16704 input_location
= UNKNOWN_LOCATION
;
16705 eval
= cleanup
= NULL
;
16706 gimplify_and_add (TREE_OPERAND (*expr_p
, 0), &eval
);
16707 if (TREE_CODE (*expr_p
) == TRY_FINALLY_EXPR
16708 && TREE_CODE (TREE_OPERAND (*expr_p
, 1)) == EH_ELSE_EXPR
)
16710 gimple_seq n
= NULL
, e
= NULL
;
16711 gimplify_and_add (TREE_OPERAND (TREE_OPERAND (*expr_p
, 1),
16713 gimplify_and_add (TREE_OPERAND (TREE_OPERAND (*expr_p
, 1),
16715 if (!gimple_seq_empty_p (n
) && !gimple_seq_empty_p (e
))
16717 geh_else
*stmt
= gimple_build_eh_else (n
, e
);
16718 gimple_seq_add_stmt (&cleanup
, stmt
);
16722 gimplify_and_add (TREE_OPERAND (*expr_p
, 1), &cleanup
);
16723 /* Don't create bogus GIMPLE_TRY with empty cleanup. */
16724 if (gimple_seq_empty_p (cleanup
))
16726 gimple_seq_add_seq (pre_p
, eval
);
16730 try_
= gimple_build_try (eval
, cleanup
,
16731 TREE_CODE (*expr_p
) == TRY_FINALLY_EXPR
16732 ? GIMPLE_TRY_FINALLY
16733 : GIMPLE_TRY_CATCH
);
16734 if (EXPR_HAS_LOCATION (save_expr
))
16735 gimple_set_location (try_
, EXPR_LOCATION (save_expr
));
16736 else if (LOCATION_LOCUS (saved_location
) != UNKNOWN_LOCATION
)
16737 gimple_set_location (try_
, saved_location
);
16738 if (TREE_CODE (*expr_p
) == TRY_CATCH_EXPR
)
16739 gimple_try_set_catch_is_cleanup (try_
,
16740 TRY_CATCH_IS_CLEANUP (*expr_p
));
16741 gimplify_seq_add_stmt (pre_p
, try_
);
16746 case CLEANUP_POINT_EXPR
:
16747 ret
= gimplify_cleanup_point_expr (expr_p
, pre_p
);
16751 ret
= gimplify_target_expr (expr_p
, pre_p
, post_p
);
16757 gimple_seq handler
= NULL
;
16758 gimplify_and_add (CATCH_BODY (*expr_p
), &handler
);
16759 c
= gimple_build_catch (CATCH_TYPES (*expr_p
), handler
);
16760 gimplify_seq_add_stmt (pre_p
, c
);
16765 case EH_FILTER_EXPR
:
16768 gimple_seq failure
= NULL
;
16770 gimplify_and_add (EH_FILTER_FAILURE (*expr_p
), &failure
);
16771 ehf
= gimple_build_eh_filter (EH_FILTER_TYPES (*expr_p
), failure
);
16772 copy_warning (ehf
, *expr_p
);
16773 gimplify_seq_add_stmt (pre_p
, ehf
);
16780 enum gimplify_status r0
, r1
;
16781 r0
= gimplify_expr (&OBJ_TYPE_REF_OBJECT (*expr_p
), pre_p
,
16782 post_p
, is_gimple_val
, fb_rvalue
);
16783 r1
= gimplify_expr (&OBJ_TYPE_REF_EXPR (*expr_p
), pre_p
,
16784 post_p
, is_gimple_val
, fb_rvalue
);
16785 TREE_SIDE_EFFECTS (*expr_p
) = 0;
16786 ret
= MIN (r0
, r1
);
16791 /* We get here when taking the address of a label. We mark
16792 the label as "forced"; meaning it can never be removed and
16793 it is a potential target for any computed goto. */
16794 FORCED_LABEL (*expr_p
) = 1;
16798 case STATEMENT_LIST
:
16799 ret
= gimplify_statement_list (expr_p
, pre_p
);
16802 case WITH_SIZE_EXPR
:
16804 gimplify_expr (&TREE_OPERAND (*expr_p
, 0), pre_p
,
16805 post_p
== &internal_post
? NULL
: post_p
,
16806 gimple_test_f
, fallback
);
16807 gimplify_expr (&TREE_OPERAND (*expr_p
, 1), pre_p
, post_p
,
16808 is_gimple_val
, fb_rvalue
);
16815 ret
= gimplify_var_or_parm_decl (expr_p
);
16819 /* When within an OMP context, notice uses of variables. */
16820 if (gimplify_omp_ctxp
)
16821 omp_notice_variable (gimplify_omp_ctxp
, *expr_p
, true);
16825 case DEBUG_EXPR_DECL
:
16826 gcc_unreachable ();
16828 case DEBUG_BEGIN_STMT
:
16829 gimplify_seq_add_stmt (pre_p
,
16830 gimple_build_debug_begin_stmt
16831 (TREE_BLOCK (*expr_p
),
16832 EXPR_LOCATION (*expr_p
)));
16838 /* Allow callbacks into the gimplifier during optimization. */
16843 gimplify_omp_parallel (expr_p
, pre_p
);
16848 gimplify_omp_task (expr_p
, pre_p
);
16854 /* Temporarily disable into_ssa, as scan_omp_simd
16855 which calls copy_gimple_seq_and_replace_locals can't deal
16856 with SSA_NAMEs defined outside of the body properly. */
16857 bool saved_into_ssa
= gimplify_ctxp
->into_ssa
;
16858 gimplify_ctxp
->into_ssa
= false;
16859 ret
= gimplify_omp_for (expr_p
, pre_p
);
16860 gimplify_ctxp
->into_ssa
= saved_into_ssa
;
16865 case OMP_DISTRIBUTE
:
16868 ret
= gimplify_omp_for (expr_p
, pre_p
);
16872 ret
= gimplify_omp_loop (expr_p
, pre_p
);
16876 gimplify_oacc_cache (expr_p
, pre_p
);
16881 gimplify_oacc_declare (expr_p
, pre_p
);
16885 case OACC_HOST_DATA
:
16888 case OACC_PARALLEL
:
16894 case OMP_TARGET_DATA
:
16896 gimplify_omp_workshare (expr_p
, pre_p
);
16900 case OACC_ENTER_DATA
:
16901 case OACC_EXIT_DATA
:
16903 case OMP_TARGET_UPDATE
:
16904 case OMP_TARGET_ENTER_DATA
:
16905 case OMP_TARGET_EXIT_DATA
:
16906 gimplify_omp_target_update (expr_p
, pre_p
);
16917 gimple_seq body
= NULL
;
16919 bool saved_in_omp_construct
= in_omp_construct
;
16921 in_omp_construct
= true;
16922 gimplify_and_add (OMP_BODY (*expr_p
), &body
);
16923 in_omp_construct
= saved_in_omp_construct
;
16924 switch (TREE_CODE (*expr_p
))
16927 g
= gimple_build_omp_section (body
);
16930 g
= gimple_build_omp_master (body
);
16933 g
= gimplify_omp_ordered (*expr_p
, body
);
16934 if (OMP_BODY (*expr_p
) == NULL_TREE
16935 && gimple_code (g
) == GIMPLE_OMP_ORDERED
)
16936 gimple_omp_ordered_standalone (g
);
16939 gimplify_scan_omp_clauses (&OMP_MASKED_CLAUSES (*expr_p
),
16940 pre_p
, ORT_WORKSHARE
, OMP_MASKED
);
16941 gimplify_adjust_omp_clauses (pre_p
, body
,
16942 &OMP_MASKED_CLAUSES (*expr_p
),
16944 g
= gimple_build_omp_masked (body
,
16945 OMP_MASKED_CLAUSES (*expr_p
));
16948 gimplify_scan_omp_clauses (&OMP_CRITICAL_CLAUSES (*expr_p
),
16949 pre_p
, ORT_WORKSHARE
, OMP_CRITICAL
);
16950 gimplify_adjust_omp_clauses (pre_p
, body
,
16951 &OMP_CRITICAL_CLAUSES (*expr_p
),
16953 g
= gimple_build_omp_critical (body
,
16954 OMP_CRITICAL_NAME (*expr_p
),
16955 OMP_CRITICAL_CLAUSES (*expr_p
));
16958 gimplify_scan_omp_clauses (&OMP_SCAN_CLAUSES (*expr_p
),
16959 pre_p
, ORT_WORKSHARE
, OMP_SCAN
);
16960 gimplify_adjust_omp_clauses (pre_p
, body
,
16961 &OMP_SCAN_CLAUSES (*expr_p
),
16963 g
= gimple_build_omp_scan (body
, OMP_SCAN_CLAUSES (*expr_p
));
16966 gcc_unreachable ();
16968 gimplify_seq_add_stmt (pre_p
, g
);
16973 case OMP_TASKGROUP
:
16975 gimple_seq body
= NULL
;
16977 tree
*pclauses
= &OMP_TASKGROUP_CLAUSES (*expr_p
);
16978 bool saved_in_omp_construct
= in_omp_construct
;
16979 gimplify_scan_omp_clauses (pclauses
, pre_p
, ORT_TASKGROUP
,
16981 gimplify_adjust_omp_clauses (pre_p
, NULL
, pclauses
, OMP_TASKGROUP
);
16983 in_omp_construct
= true;
16984 gimplify_and_add (OMP_BODY (*expr_p
), &body
);
16985 in_omp_construct
= saved_in_omp_construct
;
16986 gimple_seq cleanup
= NULL
;
16987 tree fn
= builtin_decl_explicit (BUILT_IN_GOMP_TASKGROUP_END
);
16988 gimple
*g
= gimple_build_call (fn
, 0);
16989 gimple_seq_add_stmt (&cleanup
, g
);
16990 g
= gimple_build_try (body
, cleanup
, GIMPLE_TRY_FINALLY
);
16992 gimple_seq_add_stmt (&body
, g
);
16993 g
= gimple_build_omp_taskgroup (body
, *pclauses
);
16994 gimplify_seq_add_stmt (pre_p
, g
);
17000 case OMP_ATOMIC_READ
:
17001 case OMP_ATOMIC_CAPTURE_OLD
:
17002 case OMP_ATOMIC_CAPTURE_NEW
:
17003 ret
= gimplify_omp_atomic (expr_p
, pre_p
);
17006 case TRANSACTION_EXPR
:
17007 ret
= gimplify_transaction (expr_p
, pre_p
);
17010 case TRUTH_AND_EXPR
:
17011 case TRUTH_OR_EXPR
:
17012 case TRUTH_XOR_EXPR
:
17014 tree orig_type
= TREE_TYPE (*expr_p
);
17015 tree new_type
, xop0
, xop1
;
17016 *expr_p
= gimple_boolify (*expr_p
);
17017 new_type
= TREE_TYPE (*expr_p
);
17018 if (!useless_type_conversion_p (orig_type
, new_type
))
17020 *expr_p
= fold_convert_loc (input_location
, orig_type
, *expr_p
);
17025 /* Boolified binary truth expressions are semantically equivalent
17026 to bitwise binary expressions. Canonicalize them to the
17027 bitwise variant. */
17028 switch (TREE_CODE (*expr_p
))
17030 case TRUTH_AND_EXPR
:
17031 TREE_SET_CODE (*expr_p
, BIT_AND_EXPR
);
17033 case TRUTH_OR_EXPR
:
17034 TREE_SET_CODE (*expr_p
, BIT_IOR_EXPR
);
17036 case TRUTH_XOR_EXPR
:
17037 TREE_SET_CODE (*expr_p
, BIT_XOR_EXPR
);
17042 /* Now make sure that operands have compatible type to
17043 expression's new_type. */
17044 xop0
= TREE_OPERAND (*expr_p
, 0);
17045 xop1
= TREE_OPERAND (*expr_p
, 1);
17046 if (!useless_type_conversion_p (new_type
, TREE_TYPE (xop0
)))
17047 TREE_OPERAND (*expr_p
, 0) = fold_convert_loc (input_location
,
17050 if (!useless_type_conversion_p (new_type
, TREE_TYPE (xop1
)))
17051 TREE_OPERAND (*expr_p
, 1) = fold_convert_loc (input_location
,
17054 /* Continue classified as tcc_binary. */
17058 case VEC_COND_EXPR
:
17061 case VEC_PERM_EXPR
:
17062 /* Classified as tcc_expression. */
17065 case BIT_INSERT_EXPR
:
17066 /* Argument 3 is a constant. */
17069 case POINTER_PLUS_EXPR
:
17071 enum gimplify_status r0
, r1
;
17072 r0
= gimplify_expr (&TREE_OPERAND (*expr_p
, 0), pre_p
,
17073 post_p
, is_gimple_val
, fb_rvalue
);
17074 r1
= gimplify_expr (&TREE_OPERAND (*expr_p
, 1), pre_p
,
17075 post_p
, is_gimple_val
, fb_rvalue
);
17076 recalculate_side_effects (*expr_p
);
17077 ret
= MIN (r0
, r1
);
17082 switch (TREE_CODE_CLASS (TREE_CODE (*expr_p
)))
17084 case tcc_comparison
:
17085 /* Handle comparison of objects of non scalar mode aggregates
17086 with a call to memcmp. It would be nice to only have to do
17087 this for variable-sized objects, but then we'd have to allow
17088 the same nest of reference nodes we allow for MODIFY_EXPR and
17089 that's too complex.
17091 Compare scalar mode aggregates as scalar mode values. Using
17092 memcmp for them would be very inefficient at best, and is
17093 plain wrong if bitfields are involved. */
17095 tree type
= TREE_TYPE (TREE_OPERAND (*expr_p
, 1));
17097 /* Vector comparisons need no boolification. */
17098 if (TREE_CODE (type
) == VECTOR_TYPE
)
17100 else if (!AGGREGATE_TYPE_P (type
))
17102 tree org_type
= TREE_TYPE (*expr_p
);
17103 *expr_p
= gimple_boolify (*expr_p
);
17104 if (!useless_type_conversion_p (org_type
,
17105 TREE_TYPE (*expr_p
)))
17107 *expr_p
= fold_convert_loc (input_location
,
17108 org_type
, *expr_p
);
17114 else if (TYPE_MODE (type
) != BLKmode
)
17115 ret
= gimplify_scalar_mode_aggregate_compare (expr_p
);
17117 ret
= gimplify_variable_sized_compare (expr_p
);
17122 /* If *EXPR_P does not need to be special-cased, handle it
17123 according to its class. */
17125 ret
= gimplify_expr (&TREE_OPERAND (*expr_p
, 0), pre_p
,
17126 post_p
, is_gimple_val
, fb_rvalue
);
17132 enum gimplify_status r0
, r1
;
17134 r0
= gimplify_expr (&TREE_OPERAND (*expr_p
, 0), pre_p
,
17135 post_p
, is_gimple_val
, fb_rvalue
);
17136 r1
= gimplify_expr (&TREE_OPERAND (*expr_p
, 1), pre_p
,
17137 post_p
, is_gimple_val
, fb_rvalue
);
17139 ret
= MIN (r0
, r1
);
17145 enum gimplify_status r0
, r1
, r2
;
17147 r0
= gimplify_expr (&TREE_OPERAND (*expr_p
, 0), pre_p
,
17148 post_p
, is_gimple_val
, fb_rvalue
);
17149 r1
= gimplify_expr (&TREE_OPERAND (*expr_p
, 1), pre_p
,
17150 post_p
, is_gimple_val
, fb_rvalue
);
17151 r2
= gimplify_expr (&TREE_OPERAND (*expr_p
, 2), pre_p
,
17152 post_p
, is_gimple_val
, fb_rvalue
);
17154 ret
= MIN (MIN (r0
, r1
), r2
);
17158 case tcc_declaration
:
17161 goto dont_recalculate
;
17164 gcc_unreachable ();
17167 recalculate_side_effects (*expr_p
);
17173 gcc_assert (*expr_p
|| ret
!= GS_OK
);
17175 while (ret
== GS_OK
);
17177 /* If we encountered an error_mark somewhere nested inside, either
17178 stub out the statement or propagate the error back out. */
17179 if (ret
== GS_ERROR
)
17186 /* This was only valid as a return value from the langhook, which
17187 we handled. Make sure it doesn't escape from any other context. */
17188 gcc_assert (ret
!= GS_UNHANDLED
);
17190 if (fallback
== fb_none
&& *expr_p
&& !is_gimple_stmt (*expr_p
))
17192 /* We aren't looking for a value, and we don't have a valid
17193 statement. If it doesn't have side-effects, throw it away.
17194 We can also get here with code such as "*&&L;", where L is
17195 a LABEL_DECL that is marked as FORCED_LABEL. */
17196 if (TREE_CODE (*expr_p
) == LABEL_DECL
17197 || !TREE_SIDE_EFFECTS (*expr_p
))
17199 else if (!TREE_THIS_VOLATILE (*expr_p
))
17201 /* This is probably a _REF that contains something nested that
17202 has side effects. Recurse through the operands to find it. */
17203 enum tree_code code
= TREE_CODE (*expr_p
);
17207 case COMPONENT_REF
:
17208 case REALPART_EXPR
:
17209 case IMAGPART_EXPR
:
17210 case VIEW_CONVERT_EXPR
:
17211 gimplify_expr (&TREE_OPERAND (*expr_p
, 0), pre_p
, post_p
,
17212 gimple_test_f
, fallback
);
17216 case ARRAY_RANGE_REF
:
17217 gimplify_expr (&TREE_OPERAND (*expr_p
, 0), pre_p
, post_p
,
17218 gimple_test_f
, fallback
);
17219 gimplify_expr (&TREE_OPERAND (*expr_p
, 1), pre_p
, post_p
,
17220 gimple_test_f
, fallback
);
17224 /* Anything else with side-effects must be converted to
17225 a valid statement before we get here. */
17226 gcc_unreachable ();
17231 else if (COMPLETE_TYPE_P (TREE_TYPE (*expr_p
))
17232 && TYPE_MODE (TREE_TYPE (*expr_p
)) != BLKmode
17233 && !is_empty_type (TREE_TYPE (*expr_p
)))
17235 /* Historically, the compiler has treated a bare reference
17236 to a non-BLKmode volatile lvalue as forcing a load. */
17237 tree type
= TYPE_MAIN_VARIANT (TREE_TYPE (*expr_p
));
17239 /* Normally, we do not want to create a temporary for a
17240 TREE_ADDRESSABLE type because such a type should not be
17241 copied by bitwise-assignment. However, we make an
17242 exception here, as all we are doing here is ensuring that
17243 we read the bytes that make up the type. We use
17244 create_tmp_var_raw because create_tmp_var will abort when
17245 given a TREE_ADDRESSABLE type. */
17246 tree tmp
= create_tmp_var_raw (type
, "vol");
17247 gimple_add_tmp_var (tmp
);
17248 gimplify_assign (tmp
, *expr_p
, pre_p
);
17252 /* We can't do anything useful with a volatile reference to
17253 an incomplete type, so just throw it away. Likewise for
17254 a BLKmode type, since any implicit inner load should
17255 already have been turned into an explicit one by the
17256 gimplification process. */
17260 /* If we are gimplifying at the statement level, we're done. Tack
17261 everything together and return. */
17262 if (fallback
== fb_none
|| is_statement
)
17264 /* Since *EXPR_P has been converted into a GIMPLE tuple, clear
17265 it out for GC to reclaim it. */
17266 *expr_p
= NULL_TREE
;
17268 if (!gimple_seq_empty_p (internal_pre
)
17269 || !gimple_seq_empty_p (internal_post
))
17271 gimplify_seq_add_seq (&internal_pre
, internal_post
);
17272 gimplify_seq_add_seq (pre_p
, internal_pre
);
17275 /* The result of gimplifying *EXPR_P is going to be the last few
17276 statements in *PRE_P and *POST_P. Add location information
17277 to all the statements that were added by the gimplification
17279 if (!gimple_seq_empty_p (*pre_p
))
17280 annotate_all_with_location_after (*pre_p
, pre_last_gsi
, input_location
);
17282 if (!gimple_seq_empty_p (*post_p
))
17283 annotate_all_with_location_after (*post_p
, post_last_gsi
,
17289 #ifdef ENABLE_GIMPLE_CHECKING
17292 enum tree_code code
= TREE_CODE (*expr_p
);
17293 /* These expressions should already be in gimple IR form. */
17294 gcc_assert (code
!= MODIFY_EXPR
17295 && code
!= ASM_EXPR
17296 && code
!= BIND_EXPR
17297 && code
!= CATCH_EXPR
17298 && (code
!= COND_EXPR
|| gimplify_ctxp
->allow_rhs_cond_expr
)
17299 && code
!= EH_FILTER_EXPR
17300 && code
!= GOTO_EXPR
17301 && code
!= LABEL_EXPR
17302 && code
!= LOOP_EXPR
17303 && code
!= SWITCH_EXPR
17304 && code
!= TRY_FINALLY_EXPR
17305 && code
!= EH_ELSE_EXPR
17306 && code
!= OACC_PARALLEL
17307 && code
!= OACC_KERNELS
17308 && code
!= OACC_SERIAL
17309 && code
!= OACC_DATA
17310 && code
!= OACC_HOST_DATA
17311 && code
!= OACC_DECLARE
17312 && code
!= OACC_UPDATE
17313 && code
!= OACC_ENTER_DATA
17314 && code
!= OACC_EXIT_DATA
17315 && code
!= OACC_CACHE
17316 && code
!= OMP_CRITICAL
17318 && code
!= OACC_LOOP
17319 && code
!= OMP_MASTER
17320 && code
!= OMP_MASKED
17321 && code
!= OMP_TASKGROUP
17322 && code
!= OMP_ORDERED
17323 && code
!= OMP_PARALLEL
17324 && code
!= OMP_SCAN
17325 && code
!= OMP_SECTIONS
17326 && code
!= OMP_SECTION
17327 && code
!= OMP_SINGLE
17328 && code
!= OMP_SCOPE
);
17332 /* Otherwise we're gimplifying a subexpression, so the resulting
17333 value is interesting. If it's a valid operand that matches
17334 GIMPLE_TEST_F, we're done. Unless we are handling some
17335 post-effects internally; if that's the case, we need to copy into
17336 a temporary before adding the post-effects to POST_P. */
17337 if (gimple_seq_empty_p (internal_post
) && (*gimple_test_f
) (*expr_p
))
17340 /* Otherwise, we need to create a new temporary for the gimplified
17343 /* We can't return an lvalue if we have an internal postqueue. The
17344 object the lvalue refers to would (probably) be modified by the
17345 postqueue; we need to copy the value out first, which means an
17347 if ((fallback
& fb_lvalue
)
17348 && gimple_seq_empty_p (internal_post
)
17349 && is_gimple_addressable (*expr_p
))
17351 /* An lvalue will do. Take the address of the expression, store it
17352 in a temporary, and replace the expression with an INDIRECT_REF of
17354 tree ref_alias_type
= reference_alias_ptr_type (*expr_p
);
17355 unsigned int ref_align
= get_object_alignment (*expr_p
);
17356 tree ref_type
= TREE_TYPE (*expr_p
);
17357 tmp
= build_fold_addr_expr_loc (input_location
, *expr_p
);
17358 gimplify_expr (&tmp
, pre_p
, post_p
, is_gimple_reg
, fb_rvalue
);
17359 if (TYPE_ALIGN (ref_type
) != ref_align
)
17360 ref_type
= build_aligned_type (ref_type
, ref_align
);
17361 *expr_p
= build2 (MEM_REF
, ref_type
,
17362 tmp
, build_zero_cst (ref_alias_type
));
17364 else if ((fallback
& fb_rvalue
) && is_gimple_reg_rhs_or_call (*expr_p
))
17366 /* An rvalue will do. Assign the gimplified expression into a
17367 new temporary TMP and replace the original expression with
17368 TMP. First, make sure that the expression has a type so that
17369 it can be assigned into a temporary. */
17370 gcc_assert (!VOID_TYPE_P (TREE_TYPE (*expr_p
)));
17371 *expr_p
= get_formal_tmp_var (*expr_p
, pre_p
);
17375 #ifdef ENABLE_GIMPLE_CHECKING
17376 if (!(fallback
& fb_mayfail
))
17378 fprintf (stderr
, "gimplification failed:\n");
17379 print_generic_expr (stderr
, *expr_p
);
17380 debug_tree (*expr_p
);
17381 internal_error ("gimplification failed");
17384 gcc_assert (fallback
& fb_mayfail
);
17386 /* If this is an asm statement, and the user asked for the
17387 impossible, don't die. Fail and let gimplify_asm_expr
17393 /* Make sure the temporary matches our predicate. */
17394 gcc_assert ((*gimple_test_f
) (*expr_p
));
17396 if (!gimple_seq_empty_p (internal_post
))
17398 annotate_all_with_location (internal_post
, input_location
);
17399 gimplify_seq_add_seq (pre_p
, internal_post
);
17403 input_location
= saved_location
;
17407 /* Like gimplify_expr but make sure the gimplified result is not itself
17408 a SSA name (but a decl if it were). Temporaries required by
17409 evaluating *EXPR_P may be still SSA names. */
17411 static enum gimplify_status
17412 gimplify_expr (tree
*expr_p
, gimple_seq
*pre_p
, gimple_seq
*post_p
,
17413 bool (*gimple_test_f
) (tree
), fallback_t fallback
,
17416 enum gimplify_status ret
= gimplify_expr (expr_p
, pre_p
, post_p
,
17417 gimple_test_f
, fallback
);
17419 && TREE_CODE (*expr_p
) == SSA_NAME
)
17420 *expr_p
= get_initialized_tmp_var (*expr_p
, pre_p
, NULL
, false);
17424 /* Look through TYPE for variable-sized objects and gimplify each such
17425 size that we find. Add to LIST_P any statements generated. */
17428 gimplify_type_sizes (tree type
, gimple_seq
*list_p
)
17430 if (type
== NULL
|| type
== error_mark_node
)
17433 const bool ignored_p
17435 && TREE_CODE (TYPE_NAME (type
)) == TYPE_DECL
17436 && DECL_IGNORED_P (TYPE_NAME (type
));
17439 /* We first do the main variant, then copy into any other variants. */
17440 type
= TYPE_MAIN_VARIANT (type
);
17442 /* Avoid infinite recursion. */
17443 if (TYPE_SIZES_GIMPLIFIED (type
))
17446 TYPE_SIZES_GIMPLIFIED (type
) = 1;
17448 switch (TREE_CODE (type
))
17451 case ENUMERAL_TYPE
:
17454 case FIXED_POINT_TYPE
:
17455 gimplify_one_sizepos (&TYPE_MIN_VALUE (type
), list_p
);
17456 gimplify_one_sizepos (&TYPE_MAX_VALUE (type
), list_p
);
17458 for (t
= TYPE_NEXT_VARIANT (type
); t
; t
= TYPE_NEXT_VARIANT (t
))
17460 TYPE_MIN_VALUE (t
) = TYPE_MIN_VALUE (type
);
17461 TYPE_MAX_VALUE (t
) = TYPE_MAX_VALUE (type
);
17466 /* These types may not have declarations, so handle them here. */
17467 gimplify_type_sizes (TREE_TYPE (type
), list_p
);
17468 gimplify_type_sizes (TYPE_DOMAIN (type
), list_p
);
17469 /* Ensure VLA bounds aren't removed, for -O0 they should be variables
17470 with assigned stack slots, for -O1+ -g they should be tracked
17473 && TYPE_DOMAIN (type
)
17474 && INTEGRAL_TYPE_P (TYPE_DOMAIN (type
)))
17476 t
= TYPE_MIN_VALUE (TYPE_DOMAIN (type
));
17477 if (t
&& VAR_P (t
) && DECL_ARTIFICIAL (t
))
17478 DECL_IGNORED_P (t
) = 0;
17479 t
= TYPE_MAX_VALUE (TYPE_DOMAIN (type
));
17480 if (t
&& VAR_P (t
) && DECL_ARTIFICIAL (t
))
17481 DECL_IGNORED_P (t
) = 0;
17487 case QUAL_UNION_TYPE
:
17488 for (tree field
= TYPE_FIELDS (type
); field
; field
= DECL_CHAIN (field
))
17489 if (TREE_CODE (field
) == FIELD_DECL
)
17491 gimplify_one_sizepos (&DECL_FIELD_OFFSET (field
), list_p
);
17492 /* Likewise, ensure variable offsets aren't removed. */
17494 && (t
= DECL_FIELD_OFFSET (field
))
17496 && DECL_ARTIFICIAL (t
))
17497 DECL_IGNORED_P (t
) = 0;
17498 gimplify_one_sizepos (&DECL_SIZE (field
), list_p
);
17499 gimplify_one_sizepos (&DECL_SIZE_UNIT (field
), list_p
);
17500 gimplify_type_sizes (TREE_TYPE (field
), list_p
);
17505 case REFERENCE_TYPE
:
17506 /* We used to recurse on the pointed-to type here, which turned out to
17507 be incorrect because its definition might refer to variables not
17508 yet initialized at this point if a forward declaration is involved.
17510 It was actually useful for anonymous pointed-to types to ensure
17511 that the sizes evaluation dominates every possible later use of the
17512 values. Restricting to such types here would be safe since there
17513 is no possible forward declaration around, but would introduce an
17514 undesirable middle-end semantic to anonymity. We then defer to
17515 front-ends the responsibility of ensuring that the sizes are
17516 evaluated both early and late enough, e.g. by attaching artificial
17517 type declarations to the tree. */
17524 gimplify_one_sizepos (&TYPE_SIZE (type
), list_p
);
17525 gimplify_one_sizepos (&TYPE_SIZE_UNIT (type
), list_p
);
17527 for (t
= TYPE_NEXT_VARIANT (type
); t
; t
= TYPE_NEXT_VARIANT (t
))
17529 TYPE_SIZE (t
) = TYPE_SIZE (type
);
17530 TYPE_SIZE_UNIT (t
) = TYPE_SIZE_UNIT (type
);
17531 TYPE_SIZES_GIMPLIFIED (t
) = 1;
17535 /* A subroutine of gimplify_type_sizes to make sure that *EXPR_P,
17536 a size or position, has had all of its SAVE_EXPRs evaluated.
17537 We add any required statements to *STMT_P. */
17540 gimplify_one_sizepos (tree
*expr_p
, gimple_seq
*stmt_p
)
17542 tree expr
= *expr_p
;
17544 /* We don't do anything if the value isn't there, is constant, or contains
17545 A PLACEHOLDER_EXPR. We also don't want to do anything if it's already
17546 a VAR_DECL. If it's a VAR_DECL from another function, the gimplifier
17547 will want to replace it with a new variable, but that will cause problems
17548 if this type is from outside the function. It's OK to have that here. */
17549 if (expr
== NULL_TREE
17550 || is_gimple_constant (expr
)
17551 || TREE_CODE (expr
) == VAR_DECL
17552 || CONTAINS_PLACEHOLDER_P (expr
))
17555 *expr_p
= unshare_expr (expr
);
17557 /* SSA names in decl/type fields are a bad idea - they'll get reclaimed
17558 if the def vanishes. */
17559 gimplify_expr (expr_p
, stmt_p
, NULL
, is_gimple_val
, fb_rvalue
, false);
17561 /* If expr wasn't already is_gimple_sizepos or is_gimple_constant from the
17562 FE, ensure that it is a VAR_DECL, otherwise we might handle some decls
17563 as gimplify_vla_decl even when they would have all sizes INTEGER_CSTs. */
17564 if (is_gimple_constant (*expr_p
))
17565 *expr_p
= get_initialized_tmp_var (*expr_p
, stmt_p
, NULL
, false);
17568 /* Gimplify the body of statements of FNDECL and return a GIMPLE_BIND node
17569 containing the sequence of corresponding GIMPLE statements. If DO_PARMS
17570 is true, also gimplify the parameters. */
17573 gimplify_body (tree fndecl
, bool do_parms
)
17575 location_t saved_location
= input_location
;
17576 gimple_seq parm_stmts
, parm_cleanup
= NULL
, seq
;
17577 gimple
*outer_stmt
;
17580 timevar_push (TV_TREE_GIMPLIFY
);
17582 init_tree_ssa (cfun
);
17584 /* Initialize for optimize_insn_for_s{ize,peed}_p possibly called during
17586 default_rtl_profile ();
17588 gcc_assert (gimplify_ctxp
== NULL
);
17589 push_gimplify_context (true);
17591 if (flag_openacc
|| flag_openmp
)
17593 gcc_assert (gimplify_omp_ctxp
== NULL
);
17594 if (lookup_attribute ("omp declare target", DECL_ATTRIBUTES (fndecl
)))
17595 gimplify_omp_ctxp
= new_omp_context (ORT_IMPLICIT_TARGET
);
17598 /* Unshare most shared trees in the body and in that of any nested functions.
17599 It would seem we don't have to do this for nested functions because
17600 they are supposed to be output and then the outer function gimplified
17601 first, but the g++ front end doesn't always do it that way. */
17602 unshare_body (fndecl
);
17603 unvisit_body (fndecl
);
17605 /* Make sure input_location isn't set to something weird. */
17606 input_location
= DECL_SOURCE_LOCATION (fndecl
);
17608 /* Resolve callee-copies. This has to be done before processing
17609 the body so that DECL_VALUE_EXPR gets processed correctly. */
17610 parm_stmts
= do_parms
? gimplify_parameters (&parm_cleanup
) : NULL
;
17612 /* Gimplify the function's body. */
17614 gimplify_stmt (&DECL_SAVED_TREE (fndecl
), &seq
);
17615 outer_stmt
= gimple_seq_first_nondebug_stmt (seq
);
17618 outer_stmt
= gimple_build_nop ();
17619 gimplify_seq_add_stmt (&seq
, outer_stmt
);
17622 /* The body must contain exactly one statement, a GIMPLE_BIND. If this is
17623 not the case, wrap everything in a GIMPLE_BIND to make it so. */
17624 if (gimple_code (outer_stmt
) == GIMPLE_BIND
17625 && (gimple_seq_first_nondebug_stmt (seq
)
17626 == gimple_seq_last_nondebug_stmt (seq
)))
17628 outer_bind
= as_a
<gbind
*> (outer_stmt
);
17629 if (gimple_seq_first_stmt (seq
) != outer_stmt
17630 || gimple_seq_last_stmt (seq
) != outer_stmt
)
17632 /* If there are debug stmts before or after outer_stmt, move them
17633 inside of outer_bind body. */
17634 gimple_stmt_iterator gsi
= gsi_for_stmt (outer_stmt
, &seq
);
17635 gimple_seq second_seq
= NULL
;
17636 if (gimple_seq_first_stmt (seq
) != outer_stmt
17637 && gimple_seq_last_stmt (seq
) != outer_stmt
)
17639 second_seq
= gsi_split_seq_after (gsi
);
17640 gsi_remove (&gsi
, false);
17642 else if (gimple_seq_first_stmt (seq
) != outer_stmt
)
17643 gsi_remove (&gsi
, false);
17646 gsi_remove (&gsi
, false);
17650 gimple_seq_add_seq_without_update (&seq
,
17651 gimple_bind_body (outer_bind
));
17652 gimple_seq_add_seq_without_update (&seq
, second_seq
);
17653 gimple_bind_set_body (outer_bind
, seq
);
17657 outer_bind
= gimple_build_bind (NULL_TREE
, seq
, NULL
);
17659 DECL_SAVED_TREE (fndecl
) = NULL_TREE
;
17661 /* If we had callee-copies statements, insert them at the beginning
17662 of the function and clear DECL_VALUE_EXPR_P on the parameters. */
17663 if (!gimple_seq_empty_p (parm_stmts
))
17667 gimplify_seq_add_seq (&parm_stmts
, gimple_bind_body (outer_bind
));
17670 gtry
*g
= gimple_build_try (parm_stmts
, parm_cleanup
,
17671 GIMPLE_TRY_FINALLY
);
17673 gimple_seq_add_stmt (&parm_stmts
, g
);
17675 gimple_bind_set_body (outer_bind
, parm_stmts
);
17677 for (parm
= DECL_ARGUMENTS (current_function_decl
);
17678 parm
; parm
= DECL_CHAIN (parm
))
17679 if (DECL_HAS_VALUE_EXPR_P (parm
))
17681 DECL_HAS_VALUE_EXPR_P (parm
) = 0;
17682 DECL_IGNORED_P (parm
) = 0;
17686 if ((flag_openacc
|| flag_openmp
|| flag_openmp_simd
)
17687 && gimplify_omp_ctxp
)
17689 delete_omp_context (gimplify_omp_ctxp
);
17690 gimplify_omp_ctxp
= NULL
;
17693 pop_gimplify_context (outer_bind
);
17694 gcc_assert (gimplify_ctxp
== NULL
);
17696 if (flag_checking
&& !seen_error ())
17697 verify_gimple_in_seq (gimple_bind_body (outer_bind
));
17699 timevar_pop (TV_TREE_GIMPLIFY
);
17700 input_location
= saved_location
;
17705 typedef char *char_p
; /* For DEF_VEC_P. */
17707 /* Return whether we should exclude FNDECL from instrumentation. */
17710 flag_instrument_functions_exclude_p (tree fndecl
)
17714 v
= (vec
<char_p
> *) flag_instrument_functions_exclude_functions
;
17715 if (v
&& v
->length () > 0)
17721 name
= lang_hooks
.decl_printable_name (fndecl
, 1);
17722 FOR_EACH_VEC_ELT (*v
, i
, s
)
17723 if (strstr (name
, s
) != NULL
)
17727 v
= (vec
<char_p
> *) flag_instrument_functions_exclude_files
;
17728 if (v
&& v
->length () > 0)
17734 name
= DECL_SOURCE_FILE (fndecl
);
17735 FOR_EACH_VEC_ELT (*v
, i
, s
)
17736 if (strstr (name
, s
) != NULL
)
17743 /* Build a call to the instrumentation function FNCODE and add it to SEQ.
17744 If COND_VAR is not NULL, it is a boolean variable guarding the call to
17745 the instrumentation function. IF STMT is not NULL, it is a statement
17746 to be executed just before the call to the instrumentation function. */
17749 build_instrumentation_call (gimple_seq
*seq
, enum built_in_function fncode
,
17750 tree cond_var
, gimple
*stmt
)
17752 /* The instrumentation hooks aren't going to call the instrumented
17753 function and the address they receive is expected to be matchable
17754 against symbol addresses. Make sure we don't create a trampoline,
17755 in case the current function is nested. */
17756 tree this_fn_addr
= build_fold_addr_expr (current_function_decl
);
17757 TREE_NO_TRAMPOLINE (this_fn_addr
) = 1;
17759 tree label_true
, label_false
;
17762 label_true
= create_artificial_label (UNKNOWN_LOCATION
);
17763 label_false
= create_artificial_label (UNKNOWN_LOCATION
);
17764 gcond
*cond
= gimple_build_cond (EQ_EXPR
, cond_var
, boolean_false_node
,
17765 label_true
, label_false
);
17766 gimplify_seq_add_stmt (seq
, cond
);
17767 gimplify_seq_add_stmt (seq
, gimple_build_label (label_true
));
17768 gimplify_seq_add_stmt (seq
, gimple_build_predict (PRED_COLD_LABEL
,
17773 gimplify_seq_add_stmt (seq
, stmt
);
17775 tree x
= builtin_decl_implicit (BUILT_IN_RETURN_ADDRESS
);
17776 gcall
*call
= gimple_build_call (x
, 1, integer_zero_node
);
17777 tree tmp_var
= create_tmp_var (ptr_type_node
, "return_addr");
17778 gimple_call_set_lhs (call
, tmp_var
);
17779 gimplify_seq_add_stmt (seq
, call
);
17780 x
= builtin_decl_implicit (fncode
);
17781 call
= gimple_build_call (x
, 2, this_fn_addr
, tmp_var
);
17782 gimplify_seq_add_stmt (seq
, call
);
17785 gimplify_seq_add_stmt (seq
, gimple_build_label (label_false
));
17788 /* Entry point to the gimplification pass. FNDECL is the FUNCTION_DECL
17789 node for the function we want to gimplify.
17791 Return the sequence of GIMPLE statements corresponding to the body
17795 gimplify_function_tree (tree fndecl
)
17800 gcc_assert (!gimple_body (fndecl
));
17802 if (DECL_STRUCT_FUNCTION (fndecl
))
17803 push_cfun (DECL_STRUCT_FUNCTION (fndecl
));
17805 push_struct_function (fndecl
);
17807 /* Tentatively set PROP_gimple_lva here, and reset it in gimplify_va_arg_expr
17809 cfun
->curr_properties
|= PROP_gimple_lva
;
17811 if (asan_sanitize_use_after_scope ())
17812 asan_poisoned_variables
= new hash_set
<tree
> ();
17813 bind
= gimplify_body (fndecl
, true);
17814 if (asan_poisoned_variables
)
17816 delete asan_poisoned_variables
;
17817 asan_poisoned_variables
= NULL
;
17820 /* The tree body of the function is no longer needed, replace it
17821 with the new GIMPLE body. */
17823 gimple_seq_add_stmt (&seq
, bind
);
17824 gimple_set_body (fndecl
, seq
);
17826 /* If we're instrumenting function entry/exit, then prepend the call to
17827 the entry hook and wrap the whole function in a TRY_FINALLY_EXPR to
17828 catch the exit hook. */
17829 /* ??? Add some way to ignore exceptions for this TFE. */
17830 if (flag_instrument_function_entry_exit
17831 && !DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (fndecl
)
17832 /* Do not instrument extern inline functions. */
17833 && !(DECL_DECLARED_INLINE_P (fndecl
)
17834 && DECL_EXTERNAL (fndecl
)
17835 && DECL_DISREGARD_INLINE_LIMITS (fndecl
))
17836 && !flag_instrument_functions_exclude_p (fndecl
))
17838 gimple_seq body
= NULL
, cleanup
= NULL
;
17842 /* If -finstrument-functions-once is specified, generate:
17844 static volatile bool C.0 = false;
17851 [call profiling enter function]
17854 without specific protection for data races. */
17855 if (flag_instrument_function_entry_exit
> 1)
17858 = build_decl (DECL_SOURCE_LOCATION (current_function_decl
),
17860 create_tmp_var_name ("C"),
17861 boolean_type_node
);
17862 DECL_ARTIFICIAL (first_var
) = 1;
17863 DECL_IGNORED_P (first_var
) = 1;
17864 TREE_STATIC (first_var
) = 1;
17865 TREE_THIS_VOLATILE (first_var
) = 1;
17866 TREE_USED (first_var
) = 1;
17867 DECL_INITIAL (first_var
) = boolean_false_node
;
17868 varpool_node::add (first_var
);
17870 cond_var
= create_tmp_var (boolean_type_node
, "tmp_called");
17871 assign
= gimple_build_assign (cond_var
, first_var
);
17872 gimplify_seq_add_stmt (&body
, assign
);
17874 assign
= gimple_build_assign (first_var
, boolean_true_node
);
17879 cond_var
= NULL_TREE
;
17883 build_instrumentation_call (&body
, BUILT_IN_PROFILE_FUNC_ENTER
,
17886 /* If -finstrument-functions-once is specified, generate:
17889 [call profiling exit function]
17891 without specific protection for data races. */
17892 build_instrumentation_call (&cleanup
, BUILT_IN_PROFILE_FUNC_EXIT
,
17895 gimple
*tf
= gimple_build_try (seq
, cleanup
, GIMPLE_TRY_FINALLY
);
17896 gimplify_seq_add_stmt (&body
, tf
);
17897 gbind
*new_bind
= gimple_build_bind (NULL
, body
, NULL
);
17899 /* Replace the current function body with the body
17900 wrapped in the try/finally TF. */
17902 gimple_seq_add_stmt (&seq
, new_bind
);
17903 gimple_set_body (fndecl
, seq
);
17907 if (sanitize_flags_p (SANITIZE_THREAD
)
17908 && param_tsan_instrument_func_entry_exit
)
17910 gcall
*call
= gimple_build_call_internal (IFN_TSAN_FUNC_EXIT
, 0);
17911 gimple
*tf
= gimple_build_try (seq
, call
, GIMPLE_TRY_FINALLY
);
17912 gbind
*new_bind
= gimple_build_bind (NULL
, tf
, NULL
);
17913 /* Replace the current function body with the body
17914 wrapped in the try/finally TF. */
17916 gimple_seq_add_stmt (&seq
, new_bind
);
17917 gimple_set_body (fndecl
, seq
);
17920 DECL_SAVED_TREE (fndecl
) = NULL_TREE
;
17921 cfun
->curr_properties
|= PROP_gimple_any
;
17925 dump_function (TDI_gimple
, fndecl
);
17928 /* Return a dummy expression of type TYPE in order to keep going after an
17932 dummy_object (tree type
)
17934 tree t
= build_int_cst (build_pointer_type (type
), 0);
17935 return build2 (MEM_REF
, type
, t
, t
);
17938 /* Gimplify __builtin_va_arg, aka VA_ARG_EXPR, which is not really a
17939 builtin function, but a very special sort of operator. */
17941 enum gimplify_status
17942 gimplify_va_arg_expr (tree
*expr_p
, gimple_seq
*pre_p
,
17943 gimple_seq
*post_p ATTRIBUTE_UNUSED
)
17945 tree promoted_type
, have_va_type
;
17946 tree valist
= TREE_OPERAND (*expr_p
, 0);
17947 tree type
= TREE_TYPE (*expr_p
);
17948 tree t
, tag
, aptag
;
17949 location_t loc
= EXPR_LOCATION (*expr_p
);
17951 /* Verify that valist is of the proper type. */
17952 have_va_type
= TREE_TYPE (valist
);
17953 if (have_va_type
== error_mark_node
)
17955 have_va_type
= targetm
.canonical_va_list_type (have_va_type
);
17956 if (have_va_type
== NULL_TREE
17957 && POINTER_TYPE_P (TREE_TYPE (valist
)))
17958 /* Handle 'Case 1: Not an array type' from c-common.cc/build_va_arg. */
17960 = targetm
.canonical_va_list_type (TREE_TYPE (TREE_TYPE (valist
)));
17961 gcc_assert (have_va_type
!= NULL_TREE
);
17963 /* Generate a diagnostic for requesting data of a type that cannot
17964 be passed through `...' due to type promotion at the call site. */
17965 if ((promoted_type
= lang_hooks
.types
.type_promotes_to (type
))
17968 static bool gave_help
;
17970 /* Use the expansion point to handle cases such as passing bool (defined
17971 in a system header) through `...'. */
17973 = expansion_point_location_if_in_system_header (loc
);
17975 /* Unfortunately, this is merely undefined, rather than a constraint
17976 violation, so we cannot make this an error. If this call is never
17977 executed, the program is still strictly conforming. */
17978 auto_diagnostic_group d
;
17979 warned
= warning_at (xloc
, 0,
17980 "%qT is promoted to %qT when passed through %<...%>",
17981 type
, promoted_type
);
17982 if (!gave_help
&& warned
)
17985 inform (xloc
, "(so you should pass %qT not %qT to %<va_arg%>)",
17986 promoted_type
, type
);
17989 /* We can, however, treat "undefined" any way we please.
17990 Call abort to encourage the user to fix the program. */
17992 inform (xloc
, "if this code is reached, the program will abort");
17993 /* Before the abort, allow the evaluation of the va_list
17994 expression to exit or longjmp. */
17995 gimplify_and_add (valist
, pre_p
);
17996 t
= build_call_expr_loc (loc
,
17997 builtin_decl_implicit (BUILT_IN_TRAP
), 0);
17998 gimplify_and_add (t
, pre_p
);
18000 /* This is dead code, but go ahead and finish so that the
18001 mode of the result comes out right. */
18002 *expr_p
= dummy_object (type
);
18003 return GS_ALL_DONE
;
18006 tag
= build_int_cst (build_pointer_type (type
), 0);
18007 aptag
= build_int_cst (TREE_TYPE (valist
), 0);
18009 *expr_p
= build_call_expr_internal_loc (loc
, IFN_VA_ARG
, type
, 3,
18010 valist
, tag
, aptag
);
18012 /* Clear the tentatively set PROP_gimple_lva, to indicate that IFN_VA_ARG
18013 needs to be expanded. */
18014 cfun
->curr_properties
&= ~PROP_gimple_lva
;
18019 /* Build a new GIMPLE_ASSIGN tuple and append it to the end of *SEQ_P.
18021 DST/SRC are the destination and source respectively. You can pass
18022 ungimplified trees in DST or SRC, in which case they will be
18023 converted to a gimple operand if necessary.
18025 This function returns the newly created GIMPLE_ASSIGN tuple. */
18028 gimplify_assign (tree dst
, tree src
, gimple_seq
*seq_p
)
18030 tree t
= build2 (MODIFY_EXPR
, TREE_TYPE (dst
), dst
, src
);
18031 gimplify_and_add (t
, seq_p
);
18033 return gimple_seq_last_stmt (*seq_p
);
18037 gimplify_hasher::hash (const elt_t
*p
)
18040 return iterative_hash_expr (t
, 0);
18044 gimplify_hasher::equal (const elt_t
*p1
, const elt_t
*p2
)
18048 enum tree_code code
= TREE_CODE (t1
);
18050 if (TREE_CODE (t2
) != code
18051 || TREE_TYPE (t1
) != TREE_TYPE (t2
))
18054 if (!operand_equal_p (t1
, t2
, 0))
18057 /* Only allow them to compare equal if they also hash equal; otherwise
18058 results are nondeterminate, and we fail bootstrap comparison. */
18059 gcc_checking_assert (hash (p1
) == hash (p2
));