1 /* Tree lowering pass. This pass converts the GENERIC functions-as-trees
2 tree representation into the GIMPLE form.
3 Copyright (C) 2002-2021 Free Software Foundation, Inc.
4 Major work done by Sebastian Pop <s.pop@laposte.net>,
5 Diego Novillo <dnovillo@redhat.com> and Jason Merrill <jason@redhat.com>.
7 This file is part of GCC.
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
25 #include "coretypes.h"
33 #include "gimple-predict.h"
34 #include "tree-pass.h" /* FIXME: only for PROP_gimple_any */
37 #include "tree-pretty-print.h"
38 #include "diagnostic-core.h"
40 #include "fold-const.h"
45 #include "gimple-fold.h"
48 #include "gimple-iterator.h"
49 #include "stor-layout.h"
50 #include "print-tree.h"
51 #include "tree-iterator.h"
52 #include "tree-inline.h"
53 #include "langhooks.h"
56 #include "omp-general.h"
58 #include "gimple-low.h"
59 #include "gomp-constants.h"
60 #include "splay-tree.h"
61 #include "gimple-walk.h"
62 #include "langhooks-def.h" /* FIXME: for lhd_set_decl_assembler_name */
64 #include "stringpool.h"
68 #include "omp-offload.h"
70 #include "tree-nested.h"
72 /* Hash set of poisoned variables in a bind expr. */
73 static hash_set
<tree
> *asan_poisoned_variables
= NULL
;
75 enum gimplify_omp_var_data
78 GOVD_EXPLICIT
= 0x000002,
79 GOVD_SHARED
= 0x000004,
80 GOVD_PRIVATE
= 0x000008,
81 GOVD_FIRSTPRIVATE
= 0x000010,
82 GOVD_LASTPRIVATE
= 0x000020,
83 GOVD_REDUCTION
= 0x000040,
86 GOVD_DEBUG_PRIVATE
= 0x000200,
87 GOVD_PRIVATE_OUTER_REF
= 0x000400,
88 GOVD_LINEAR
= 0x000800,
89 GOVD_ALIGNED
= 0x001000,
91 /* Flag for GOVD_MAP: don't copy back. */
92 GOVD_MAP_TO_ONLY
= 0x002000,
94 /* Flag for GOVD_LINEAR or GOVD_LASTPRIVATE: no outer reference. */
95 GOVD_LINEAR_LASTPRIVATE_NO_OUTER
= 0x004000,
97 GOVD_MAP_0LEN_ARRAY
= 0x008000,
99 /* Flag for GOVD_MAP, if it is always, to or always, tofrom mapping. */
100 GOVD_MAP_ALWAYS_TO
= 0x010000,
102 /* Flag for shared vars that are or might be stored to in the region. */
103 GOVD_WRITTEN
= 0x020000,
105 /* Flag for GOVD_MAP, if it is a forced mapping. */
106 GOVD_MAP_FORCE
= 0x040000,
108 /* Flag for GOVD_MAP: must be present already. */
109 GOVD_MAP_FORCE_PRESENT
= 0x080000,
111 /* Flag for GOVD_MAP: only allocate. */
112 GOVD_MAP_ALLOC_ONLY
= 0x100000,
114 /* Flag for GOVD_MAP: only copy back. */
115 GOVD_MAP_FROM_ONLY
= 0x200000,
117 GOVD_NONTEMPORAL
= 0x400000,
119 /* Flag for GOVD_LASTPRIVATE: conditional modifier. */
120 GOVD_LASTPRIVATE_CONDITIONAL
= 0x800000,
122 GOVD_CONDTEMP
= 0x1000000,
124 /* Flag for GOVD_REDUCTION: inscan seen in {in,ex}clusive clause. */
125 GOVD_REDUCTION_INSCAN
= 0x2000000,
127 /* Flag for GOVD_MAP: (struct) vars that have pointer attachments for
129 GOVD_MAP_HAS_ATTACHMENTS
= 0x4000000,
131 /* Flag for GOVD_FIRSTPRIVATE: OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT. */
132 GOVD_FIRSTPRIVATE_IMPLICIT
= 0x8000000,
134 GOVD_DATA_SHARE_CLASS
= (GOVD_SHARED
| GOVD_PRIVATE
| GOVD_FIRSTPRIVATE
135 | GOVD_LASTPRIVATE
| GOVD_REDUCTION
| GOVD_LINEAR
142 ORT_WORKSHARE
= 0x00,
143 ORT_TASKGROUP
= 0x01,
147 ORT_COMBINED_PARALLEL
= ORT_PARALLEL
| 1,
150 ORT_UNTIED_TASK
= ORT_TASK
| 1,
151 ORT_TASKLOOP
= ORT_TASK
| 2,
152 ORT_UNTIED_TASKLOOP
= ORT_UNTIED_TASK
| 2,
155 ORT_COMBINED_TEAMS
= ORT_TEAMS
| 1,
156 ORT_HOST_TEAMS
= ORT_TEAMS
| 2,
157 ORT_COMBINED_HOST_TEAMS
= ORT_COMBINED_TEAMS
| 2,
160 ORT_TARGET_DATA
= 0x40,
162 /* Data region with offloading. */
164 ORT_COMBINED_TARGET
= ORT_TARGET
| 1,
165 ORT_IMPLICIT_TARGET
= ORT_TARGET
| 2,
167 /* OpenACC variants. */
168 ORT_ACC
= 0x100, /* A generic OpenACC region. */
169 ORT_ACC_DATA
= ORT_ACC
| ORT_TARGET_DATA
, /* Data construct. */
170 ORT_ACC_PARALLEL
= ORT_ACC
| ORT_TARGET
, /* Parallel construct */
171 ORT_ACC_KERNELS
= ORT_ACC
| ORT_TARGET
| 2, /* Kernels construct. */
172 ORT_ACC_SERIAL
= ORT_ACC
| ORT_TARGET
| 4, /* Serial construct. */
173 ORT_ACC_HOST_DATA
= ORT_ACC
| ORT_TARGET_DATA
| 2, /* Host data. */
175 /* Dummy OpenMP region, used to disable expansion of
176 DECL_VALUE_EXPRs in taskloop pre body. */
180 /* Gimplify hashtable helper. */
182 struct gimplify_hasher
: free_ptr_hash
<elt_t
>
184 static inline hashval_t
hash (const elt_t
*);
185 static inline bool equal (const elt_t
*, const elt_t
*);
190 struct gimplify_ctx
*prev_context
;
192 vec
<gbind
*> bind_expr_stack
;
194 gimple_seq conditional_cleanups
;
198 vec
<tree
> case_labels
;
199 hash_set
<tree
> *live_switch_vars
;
200 /* The formal temporary table. Should this be persistent? */
201 hash_table
<gimplify_hasher
> *temp_htab
;
204 unsigned into_ssa
: 1;
205 unsigned allow_rhs_cond_expr
: 1;
206 unsigned in_cleanup_point_expr
: 1;
207 unsigned keep_stack
: 1;
208 unsigned save_stack
: 1;
209 unsigned in_switch_expr
: 1;
212 enum gimplify_defaultmap_kind
215 GDMK_SCALAR_TARGET
, /* w/ Fortran's target attr, implicit mapping, only. */
221 struct gimplify_omp_ctx
223 struct gimplify_omp_ctx
*outer_context
;
224 splay_tree variables
;
225 hash_set
<tree
> *privatized_types
;
227 /* Iteration variables in an OMP_FOR. */
228 vec
<tree
> loop_iter_var
;
230 enum omp_clause_default_kind default_kind
;
231 enum omp_region_type region_type
;
235 bool target_firstprivatize_array_bases
;
237 bool order_concurrent
;
243 static struct gimplify_ctx
*gimplify_ctxp
;
244 static struct gimplify_omp_ctx
*gimplify_omp_ctxp
;
245 static bool in_omp_construct
;
247 /* Forward declaration. */
248 static enum gimplify_status
gimplify_compound_expr (tree
*, gimple_seq
*, bool);
249 static hash_map
<tree
, tree
> *oacc_declare_returns
;
250 static enum gimplify_status
gimplify_expr (tree
*, gimple_seq
*, gimple_seq
*,
251 bool (*) (tree
), fallback_t
, bool);
253 /* Shorter alias name for the above function for use in gimplify.c
257 gimplify_seq_add_stmt (gimple_seq
*seq_p
, gimple
*gs
)
259 gimple_seq_add_stmt_without_update (seq_p
, gs
);
262 /* Append sequence SRC to the end of sequence *DST_P. If *DST_P is
263 NULL, a new sequence is allocated. This function is
264 similar to gimple_seq_add_seq, but does not scan the operands.
265 During gimplification, we need to manipulate statement sequences
266 before the def/use vectors have been constructed. */
269 gimplify_seq_add_seq (gimple_seq
*dst_p
, gimple_seq src
)
271 gimple_stmt_iterator si
;
276 si
= gsi_last (*dst_p
);
277 gsi_insert_seq_after_without_update (&si
, src
, GSI_NEW_STMT
);
281 /* Pointer to a list of allocated gimplify_ctx structs to be used for pushing
282 and popping gimplify contexts. */
284 static struct gimplify_ctx
*ctx_pool
= NULL
;
286 /* Return a gimplify context struct from the pool. */
288 static inline struct gimplify_ctx
*
291 struct gimplify_ctx
* c
= ctx_pool
;
294 ctx_pool
= c
->prev_context
;
296 c
= XNEW (struct gimplify_ctx
);
298 memset (c
, '\0', sizeof (*c
));
302 /* Put gimplify context C back into the pool. */
305 ctx_free (struct gimplify_ctx
*c
)
307 c
->prev_context
= ctx_pool
;
311 /* Free allocated ctx stack memory. */
314 free_gimplify_stack (void)
316 struct gimplify_ctx
*c
;
318 while ((c
= ctx_pool
))
320 ctx_pool
= c
->prev_context
;
326 /* Set up a context for the gimplifier. */
329 push_gimplify_context (bool in_ssa
, bool rhs_cond_ok
)
331 struct gimplify_ctx
*c
= ctx_alloc ();
333 c
->prev_context
= gimplify_ctxp
;
335 gimplify_ctxp
->into_ssa
= in_ssa
;
336 gimplify_ctxp
->allow_rhs_cond_expr
= rhs_cond_ok
;
339 /* Tear down a context for the gimplifier. If BODY is non-null, then
340 put the temporaries into the outer BIND_EXPR. Otherwise, put them
343 BODY is not a sequence, but the first tuple in a sequence. */
346 pop_gimplify_context (gimple
*body
)
348 struct gimplify_ctx
*c
= gimplify_ctxp
;
351 && (!c
->bind_expr_stack
.exists ()
352 || c
->bind_expr_stack
.is_empty ()));
353 c
->bind_expr_stack
.release ();
354 gimplify_ctxp
= c
->prev_context
;
357 declare_vars (c
->temps
, body
, false);
359 record_vars (c
->temps
);
366 /* Push a GIMPLE_BIND tuple onto the stack of bindings. */
369 gimple_push_bind_expr (gbind
*bind_stmt
)
371 gimplify_ctxp
->bind_expr_stack
.reserve (8);
372 gimplify_ctxp
->bind_expr_stack
.safe_push (bind_stmt
);
375 /* Pop the first element off the stack of bindings. */
378 gimple_pop_bind_expr (void)
380 gimplify_ctxp
->bind_expr_stack
.pop ();
383 /* Return the first element of the stack of bindings. */
386 gimple_current_bind_expr (void)
388 return gimplify_ctxp
->bind_expr_stack
.last ();
391 /* Return the stack of bindings created during gimplification. */
394 gimple_bind_expr_stack (void)
396 return gimplify_ctxp
->bind_expr_stack
;
399 /* Return true iff there is a COND_EXPR between us and the innermost
400 CLEANUP_POINT_EXPR. This info is used by gimple_push_cleanup. */
403 gimple_conditional_context (void)
405 return gimplify_ctxp
->conditions
> 0;
408 /* Note that we've entered a COND_EXPR. */
411 gimple_push_condition (void)
413 #ifdef ENABLE_GIMPLE_CHECKING
414 if (gimplify_ctxp
->conditions
== 0)
415 gcc_assert (gimple_seq_empty_p (gimplify_ctxp
->conditional_cleanups
));
417 ++(gimplify_ctxp
->conditions
);
420 /* Note that we've left a COND_EXPR. If we're back at unconditional scope
421 now, add any conditional cleanups we've seen to the prequeue. */
424 gimple_pop_condition (gimple_seq
*pre_p
)
426 int conds
= --(gimplify_ctxp
->conditions
);
428 gcc_assert (conds
>= 0);
431 gimplify_seq_add_seq (pre_p
, gimplify_ctxp
->conditional_cleanups
);
432 gimplify_ctxp
->conditional_cleanups
= NULL
;
436 /* A stable comparison routine for use with splay trees and DECLs. */
439 splay_tree_compare_decl_uid (splay_tree_key xa
, splay_tree_key xb
)
444 return DECL_UID (a
) - DECL_UID (b
);
447 /* Create a new omp construct that deals with variable remapping. */
449 static struct gimplify_omp_ctx
*
450 new_omp_context (enum omp_region_type region_type
)
452 struct gimplify_omp_ctx
*c
;
454 c
= XCNEW (struct gimplify_omp_ctx
);
455 c
->outer_context
= gimplify_omp_ctxp
;
456 c
->variables
= splay_tree_new (splay_tree_compare_decl_uid
, 0, 0);
457 c
->privatized_types
= new hash_set
<tree
>;
458 c
->location
= input_location
;
459 c
->region_type
= region_type
;
460 if ((region_type
& ORT_TASK
) == 0)
461 c
->default_kind
= OMP_CLAUSE_DEFAULT_SHARED
;
463 c
->default_kind
= OMP_CLAUSE_DEFAULT_UNSPECIFIED
;
464 c
->defaultmap
[GDMK_SCALAR
] = GOVD_MAP
;
465 c
->defaultmap
[GDMK_SCALAR_TARGET
] = GOVD_MAP
;
466 c
->defaultmap
[GDMK_AGGREGATE
] = GOVD_MAP
;
467 c
->defaultmap
[GDMK_ALLOCATABLE
] = GOVD_MAP
;
468 c
->defaultmap
[GDMK_POINTER
] = GOVD_MAP
;
473 /* Destroy an omp construct that deals with variable remapping. */
476 delete_omp_context (struct gimplify_omp_ctx
*c
)
478 splay_tree_delete (c
->variables
);
479 delete c
->privatized_types
;
480 c
->loop_iter_var
.release ();
484 static void omp_add_variable (struct gimplify_omp_ctx
*, tree
, unsigned int);
485 static bool omp_notice_variable (struct gimplify_omp_ctx
*, tree
, bool);
487 /* Both gimplify the statement T and append it to *SEQ_P. This function
488 behaves exactly as gimplify_stmt, but you don't have to pass T as a
492 gimplify_and_add (tree t
, gimple_seq
*seq_p
)
494 gimplify_stmt (&t
, seq_p
);
497 /* Gimplify statement T into sequence *SEQ_P, and return the first
498 tuple in the sequence of generated tuples for this statement.
499 Return NULL if gimplifying T produced no tuples. */
502 gimplify_and_return_first (tree t
, gimple_seq
*seq_p
)
504 gimple_stmt_iterator last
= gsi_last (*seq_p
);
506 gimplify_and_add (t
, seq_p
);
508 if (!gsi_end_p (last
))
511 return gsi_stmt (last
);
514 return gimple_seq_first_stmt (*seq_p
);
517 /* Returns true iff T is a valid RHS for an assignment to an un-renamed
518 LHS, or for a call argument. */
521 is_gimple_mem_rhs (tree t
)
523 /* If we're dealing with a renamable type, either source or dest must be
524 a renamed variable. */
525 if (is_gimple_reg_type (TREE_TYPE (t
)))
526 return is_gimple_val (t
);
528 return is_gimple_val (t
) || is_gimple_lvalue (t
);
531 /* Return true if T is a CALL_EXPR or an expression that can be
532 assigned to a temporary. Note that this predicate should only be
533 used during gimplification. See the rationale for this in
534 gimplify_modify_expr. */
537 is_gimple_reg_rhs_or_call (tree t
)
539 return (get_gimple_rhs_class (TREE_CODE (t
)) != GIMPLE_INVALID_RHS
540 || TREE_CODE (t
) == CALL_EXPR
);
543 /* Return true if T is a valid memory RHS or a CALL_EXPR. Note that
544 this predicate should only be used during gimplification. See the
545 rationale for this in gimplify_modify_expr. */
548 is_gimple_mem_rhs_or_call (tree t
)
550 /* If we're dealing with a renamable type, either source or dest must be
551 a renamed variable. */
552 if (is_gimple_reg_type (TREE_TYPE (t
)))
553 return is_gimple_val (t
);
555 return (is_gimple_val (t
)
556 || is_gimple_lvalue (t
)
557 || TREE_CLOBBER_P (t
)
558 || TREE_CODE (t
) == CALL_EXPR
);
561 /* Create a temporary with a name derived from VAL. Subroutine of
562 lookup_tmp_var; nobody else should call this function. */
565 create_tmp_from_val (tree val
)
567 /* Drop all qualifiers and address-space information from the value type. */
568 tree type
= TYPE_MAIN_VARIANT (TREE_TYPE (val
));
569 tree var
= create_tmp_var (type
, get_name (val
));
573 /* Create a temporary to hold the value of VAL. If IS_FORMAL, try to reuse
574 an existing expression temporary. */
577 lookup_tmp_var (tree val
, bool is_formal
)
581 /* If not optimizing, never really reuse a temporary. local-alloc
582 won't allocate any variable that is used in more than one basic
583 block, which means it will go into memory, causing much extra
584 work in reload and final and poorer code generation, outweighing
585 the extra memory allocation here. */
586 if (!optimize
|| !is_formal
|| TREE_SIDE_EFFECTS (val
))
587 ret
= create_tmp_from_val (val
);
594 if (!gimplify_ctxp
->temp_htab
)
595 gimplify_ctxp
->temp_htab
= new hash_table
<gimplify_hasher
> (1000);
596 slot
= gimplify_ctxp
->temp_htab
->find_slot (&elt
, INSERT
);
599 elt_p
= XNEW (elt_t
);
601 elt_p
->temp
= ret
= create_tmp_from_val (val
);
614 /* Helper for get_formal_tmp_var and get_initialized_tmp_var. */
617 internal_get_tmp_var (tree val
, gimple_seq
*pre_p
, gimple_seq
*post_p
,
618 bool is_formal
, bool allow_ssa
)
622 /* Notice that we explicitly allow VAL to be a CALL_EXPR so that we
623 can create an INIT_EXPR and convert it into a GIMPLE_CALL below. */
624 gimplify_expr (&val
, pre_p
, post_p
, is_gimple_reg_rhs_or_call
,
628 && gimplify_ctxp
->into_ssa
629 && is_gimple_reg_type (TREE_TYPE (val
)))
631 t
= make_ssa_name (TYPE_MAIN_VARIANT (TREE_TYPE (val
)));
632 if (! gimple_in_ssa_p (cfun
))
634 const char *name
= get_name (val
);
636 SET_SSA_NAME_VAR_OR_IDENTIFIER (t
, create_tmp_var_name (name
));
640 t
= lookup_tmp_var (val
, is_formal
);
642 mod
= build2 (INIT_EXPR
, TREE_TYPE (t
), t
, unshare_expr (val
));
644 SET_EXPR_LOCATION (mod
, EXPR_LOC_OR_LOC (val
, input_location
));
646 /* gimplify_modify_expr might want to reduce this further. */
647 gimplify_and_add (mod
, pre_p
);
653 /* Return a formal temporary variable initialized with VAL. PRE_P is as
654 in gimplify_expr. Only use this function if:
656 1) The value of the unfactored expression represented by VAL will not
657 change between the initialization and use of the temporary, and
658 2) The temporary will not be otherwise modified.
660 For instance, #1 means that this is inappropriate for SAVE_EXPR temps,
661 and #2 means it is inappropriate for && temps.
663 For other cases, use get_initialized_tmp_var instead. */
666 get_formal_tmp_var (tree val
, gimple_seq
*pre_p
)
668 return internal_get_tmp_var (val
, pre_p
, NULL
, true, true);
671 /* Return a temporary variable initialized with VAL. PRE_P and POST_P
672 are as in gimplify_expr. */
675 get_initialized_tmp_var (tree val
, gimple_seq
*pre_p
,
676 gimple_seq
*post_p
/* = NULL */,
677 bool allow_ssa
/* = true */)
679 return internal_get_tmp_var (val
, pre_p
, post_p
, false, allow_ssa
);
682 /* Declare all the variables in VARS in SCOPE. If DEBUG_INFO is true,
683 generate debug info for them; otherwise don't. */
686 declare_vars (tree vars
, gimple
*gs
, bool debug_info
)
693 gbind
*scope
= as_a
<gbind
*> (gs
);
695 temps
= nreverse (last
);
697 block
= gimple_bind_block (scope
);
698 gcc_assert (!block
|| TREE_CODE (block
) == BLOCK
);
699 if (!block
|| !debug_info
)
701 DECL_CHAIN (last
) = gimple_bind_vars (scope
);
702 gimple_bind_set_vars (scope
, temps
);
706 /* We need to attach the nodes both to the BIND_EXPR and to its
707 associated BLOCK for debugging purposes. The key point here
708 is that the BLOCK_VARS of the BIND_EXPR_BLOCK of a BIND_EXPR
709 is a subchain of the BIND_EXPR_VARS of the BIND_EXPR. */
710 if (BLOCK_VARS (block
))
711 BLOCK_VARS (block
) = chainon (BLOCK_VARS (block
), temps
);
714 gimple_bind_set_vars (scope
,
715 chainon (gimple_bind_vars (scope
), temps
));
716 BLOCK_VARS (block
) = temps
;
722 /* For VAR a VAR_DECL of variable size, try to find a constant upper bound
723 for the size and adjust DECL_SIZE/DECL_SIZE_UNIT accordingly. Abort if
724 no such upper bound can be obtained. */
727 force_constant_size (tree var
)
729 /* The only attempt we make is by querying the maximum size of objects
730 of the variable's type. */
732 HOST_WIDE_INT max_size
;
734 gcc_assert (VAR_P (var
));
736 max_size
= max_int_size_in_bytes (TREE_TYPE (var
));
738 gcc_assert (max_size
>= 0);
741 = build_int_cst (TREE_TYPE (DECL_SIZE_UNIT (var
)), max_size
);
743 = build_int_cst (TREE_TYPE (DECL_SIZE (var
)), max_size
* BITS_PER_UNIT
);
746 /* Push the temporary variable TMP into the current binding. */
749 gimple_add_tmp_var_fn (struct function
*fn
, tree tmp
)
751 gcc_assert (!DECL_CHAIN (tmp
) && !DECL_SEEN_IN_BIND_EXPR_P (tmp
));
753 /* Later processing assumes that the object size is constant, which might
754 not be true at this point. Force the use of a constant upper bound in
756 if (!tree_fits_poly_uint64_p (DECL_SIZE_UNIT (tmp
)))
757 force_constant_size (tmp
);
759 DECL_CONTEXT (tmp
) = fn
->decl
;
760 DECL_SEEN_IN_BIND_EXPR_P (tmp
) = 1;
762 record_vars_into (tmp
, fn
->decl
);
765 /* Push the temporary variable TMP into the current binding. */
768 gimple_add_tmp_var (tree tmp
)
770 gcc_assert (!DECL_CHAIN (tmp
) && !DECL_SEEN_IN_BIND_EXPR_P (tmp
));
772 /* Later processing assumes that the object size is constant, which might
773 not be true at this point. Force the use of a constant upper bound in
775 if (!tree_fits_poly_uint64_p (DECL_SIZE_UNIT (tmp
)))
776 force_constant_size (tmp
);
778 DECL_CONTEXT (tmp
) = current_function_decl
;
779 DECL_SEEN_IN_BIND_EXPR_P (tmp
) = 1;
783 DECL_CHAIN (tmp
) = gimplify_ctxp
->temps
;
784 gimplify_ctxp
->temps
= tmp
;
786 /* Mark temporaries local within the nearest enclosing parallel. */
787 if (gimplify_omp_ctxp
)
789 struct gimplify_omp_ctx
*ctx
= gimplify_omp_ctxp
;
790 int flag
= GOVD_LOCAL
| GOVD_SEEN
;
792 && (ctx
->region_type
== ORT_WORKSHARE
793 || ctx
->region_type
== ORT_TASKGROUP
794 || ctx
->region_type
== ORT_SIMD
795 || ctx
->region_type
== ORT_ACC
))
797 if (ctx
->region_type
== ORT_SIMD
798 && TREE_ADDRESSABLE (tmp
)
799 && !TREE_STATIC (tmp
))
801 if (TREE_CODE (DECL_SIZE_UNIT (tmp
)) != INTEGER_CST
)
802 ctx
->add_safelen1
= true;
803 else if (ctx
->in_for_exprs
)
806 flag
= GOVD_PRIVATE
| GOVD_SEEN
;
809 ctx
= ctx
->outer_context
;
812 omp_add_variable (ctx
, tmp
, flag
);
821 /* This case is for nested functions. We need to expose the locals
823 body_seq
= gimple_body (current_function_decl
);
824 declare_vars (tmp
, gimple_seq_first_stmt (body_seq
), false);
830 /* This page contains routines to unshare tree nodes, i.e. to duplicate tree
831 nodes that are referenced more than once in GENERIC functions. This is
832 necessary because gimplification (translation into GIMPLE) is performed
833 by modifying tree nodes in-place, so gimplication of a shared node in a
834 first context could generate an invalid GIMPLE form in a second context.
836 This is achieved with a simple mark/copy/unmark algorithm that walks the
837 GENERIC representation top-down, marks nodes with TREE_VISITED the first
838 time it encounters them, duplicates them if they already have TREE_VISITED
839 set, and finally removes the TREE_VISITED marks it has set.
841 The algorithm works only at the function level, i.e. it generates a GENERIC
842 representation of a function with no nodes shared within the function when
843 passed a GENERIC function (except for nodes that are allowed to be shared).
845 At the global level, it is also necessary to unshare tree nodes that are
846 referenced in more than one function, for the same aforementioned reason.
847 This requires some cooperation from the front-end. There are 2 strategies:
849 1. Manual unsharing. The front-end needs to call unshare_expr on every
850 expression that might end up being shared across functions.
852 2. Deep unsharing. This is an extension of regular unsharing. Instead
853 of calling unshare_expr on expressions that might be shared across
854 functions, the front-end pre-marks them with TREE_VISITED. This will
855 ensure that they are unshared on the first reference within functions
856 when the regular unsharing algorithm runs. The counterpart is that
857 this algorithm must look deeper than for manual unsharing, which is
858 specified by LANG_HOOKS_DEEP_UNSHARING.
860 If there are only few specific cases of node sharing across functions, it is
861 probably easier for a front-end to unshare the expressions manually. On the
862 contrary, if the expressions generated at the global level are as widespread
863 as expressions generated within functions, deep unsharing is very likely the
866 /* Similar to copy_tree_r but do not copy SAVE_EXPR or TARGET_EXPR nodes.
867 These nodes model computations that must be done once. If we were to
868 unshare something like SAVE_EXPR(i++), the gimplification process would
869 create wrong code. However, if DATA is non-null, it must hold a pointer
870 set that is used to unshare the subtrees of these nodes. */
873 mostly_copy_tree_r (tree
*tp
, int *walk_subtrees
, void *data
)
876 enum tree_code code
= TREE_CODE (t
);
878 /* Do not copy SAVE_EXPR, TARGET_EXPR or BIND_EXPR nodes themselves, but
879 copy their subtrees if we can make sure to do it only once. */
880 if (code
== SAVE_EXPR
|| code
== TARGET_EXPR
|| code
== BIND_EXPR
)
882 if (data
&& !((hash_set
<tree
> *)data
)->add (t
))
888 /* Stop at types, decls, constants like copy_tree_r. */
889 else if (TREE_CODE_CLASS (code
) == tcc_type
890 || TREE_CODE_CLASS (code
) == tcc_declaration
891 || TREE_CODE_CLASS (code
) == tcc_constant
)
894 /* Cope with the statement expression extension. */
895 else if (code
== STATEMENT_LIST
)
898 /* Leave the bulk of the work to copy_tree_r itself. */
900 copy_tree_r (tp
, walk_subtrees
, NULL
);
905 /* Callback for walk_tree to unshare most of the shared trees rooted at *TP.
906 If *TP has been visited already, then *TP is deeply copied by calling
907 mostly_copy_tree_r. DATA is passed to mostly_copy_tree_r unmodified. */
910 copy_if_shared_r (tree
*tp
, int *walk_subtrees
, void *data
)
913 enum tree_code code
= TREE_CODE (t
);
915 /* Skip types, decls, and constants. But we do want to look at their
916 types and the bounds of types. Mark them as visited so we properly
917 unmark their subtrees on the unmark pass. If we've already seen them,
918 don't look down further. */
919 if (TREE_CODE_CLASS (code
) == tcc_type
920 || TREE_CODE_CLASS (code
) == tcc_declaration
921 || TREE_CODE_CLASS (code
) == tcc_constant
)
923 if (TREE_VISITED (t
))
926 TREE_VISITED (t
) = 1;
929 /* If this node has been visited already, unshare it and don't look
931 else if (TREE_VISITED (t
))
933 walk_tree (tp
, mostly_copy_tree_r
, data
, NULL
);
937 /* Otherwise, mark the node as visited and keep looking. */
939 TREE_VISITED (t
) = 1;
944 /* Unshare most of the shared trees rooted at *TP. DATA is passed to the
945 copy_if_shared_r callback unmodified. */
948 copy_if_shared (tree
*tp
, void *data
)
950 walk_tree (tp
, copy_if_shared_r
, data
, NULL
);
953 /* Unshare all the trees in the body of FNDECL, as well as in the bodies of
954 any nested functions. */
957 unshare_body (tree fndecl
)
959 struct cgraph_node
*cgn
= cgraph_node::get (fndecl
);
960 /* If the language requires deep unsharing, we need a pointer set to make
961 sure we don't repeatedly unshare subtrees of unshareable nodes. */
962 hash_set
<tree
> *visited
963 = lang_hooks
.deep_unsharing
? new hash_set
<tree
> : NULL
;
965 copy_if_shared (&DECL_SAVED_TREE (fndecl
), visited
);
966 copy_if_shared (&DECL_SIZE (DECL_RESULT (fndecl
)), visited
);
967 copy_if_shared (&DECL_SIZE_UNIT (DECL_RESULT (fndecl
)), visited
);
972 for (cgn
= first_nested_function (cgn
); cgn
;
973 cgn
= next_nested_function (cgn
))
974 unshare_body (cgn
->decl
);
977 /* Callback for walk_tree to unmark the visited trees rooted at *TP.
978 Subtrees are walked until the first unvisited node is encountered. */
981 unmark_visited_r (tree
*tp
, int *walk_subtrees
, void *data ATTRIBUTE_UNUSED
)
985 /* If this node has been visited, unmark it and keep looking. */
986 if (TREE_VISITED (t
))
987 TREE_VISITED (t
) = 0;
989 /* Otherwise, don't look any deeper. */
996 /* Unmark the visited trees rooted at *TP. */
999 unmark_visited (tree
*tp
)
1001 walk_tree (tp
, unmark_visited_r
, NULL
, NULL
);
1004 /* Likewise, but mark all trees as not visited. */
1007 unvisit_body (tree fndecl
)
1009 struct cgraph_node
*cgn
= cgraph_node::get (fndecl
);
1011 unmark_visited (&DECL_SAVED_TREE (fndecl
));
1012 unmark_visited (&DECL_SIZE (DECL_RESULT (fndecl
)));
1013 unmark_visited (&DECL_SIZE_UNIT (DECL_RESULT (fndecl
)));
1016 for (cgn
= first_nested_function (cgn
);
1017 cgn
; cgn
= next_nested_function (cgn
))
1018 unvisit_body (cgn
->decl
);
1021 /* Unconditionally make an unshared copy of EXPR. This is used when using
1022 stored expressions which span multiple functions, such as BINFO_VTABLE,
1023 as the normal unsharing process can't tell that they're shared. */
1026 unshare_expr (tree expr
)
1028 walk_tree (&expr
, mostly_copy_tree_r
, NULL
, NULL
);
1032 /* Worker for unshare_expr_without_location. */
1035 prune_expr_location (tree
*tp
, int *walk_subtrees
, void *)
1038 SET_EXPR_LOCATION (*tp
, UNKNOWN_LOCATION
);
1044 /* Similar to unshare_expr but also prune all expression locations
1048 unshare_expr_without_location (tree expr
)
1050 walk_tree (&expr
, mostly_copy_tree_r
, NULL
, NULL
);
1052 walk_tree (&expr
, prune_expr_location
, NULL
, NULL
);
1056 /* Return the EXPR_LOCATION of EXPR, if it (maybe recursively) has
1057 one, OR_ELSE otherwise. The location of a STATEMENT_LISTs
1058 comprising at least one DEBUG_BEGIN_STMT followed by exactly one
1059 EXPR is the location of the EXPR. */
1062 rexpr_location (tree expr
, location_t or_else
= UNKNOWN_LOCATION
)
1067 if (EXPR_HAS_LOCATION (expr
))
1068 return EXPR_LOCATION (expr
);
1070 if (TREE_CODE (expr
) != STATEMENT_LIST
)
1073 tree_stmt_iterator i
= tsi_start (expr
);
1076 while (!tsi_end_p (i
) && TREE_CODE (tsi_stmt (i
)) == DEBUG_BEGIN_STMT
)
1082 if (!found
|| !tsi_one_before_end_p (i
))
1085 return rexpr_location (tsi_stmt (i
), or_else
);
1088 /* Return TRUE iff EXPR (maybe recursively) has a location; see
1089 rexpr_location for the potential recursion. */
1092 rexpr_has_location (tree expr
)
1094 return rexpr_location (expr
) != UNKNOWN_LOCATION
;
1098 /* WRAPPER is a code such as BIND_EXPR or CLEANUP_POINT_EXPR which can both
1099 contain statements and have a value. Assign its value to a temporary
1100 and give it void_type_node. Return the temporary, or NULL_TREE if
1101 WRAPPER was already void. */
1104 voidify_wrapper_expr (tree wrapper
, tree temp
)
1106 tree type
= TREE_TYPE (wrapper
);
1107 if (type
&& !VOID_TYPE_P (type
))
1111 /* Set p to point to the body of the wrapper. Loop until we find
1112 something that isn't a wrapper. */
1113 for (p
= &wrapper
; p
&& *p
; )
1115 switch (TREE_CODE (*p
))
1118 TREE_SIDE_EFFECTS (*p
) = 1;
1119 TREE_TYPE (*p
) = void_type_node
;
1120 /* For a BIND_EXPR, the body is operand 1. */
1121 p
= &BIND_EXPR_BODY (*p
);
1124 case CLEANUP_POINT_EXPR
:
1125 case TRY_FINALLY_EXPR
:
1126 case TRY_CATCH_EXPR
:
1127 TREE_SIDE_EFFECTS (*p
) = 1;
1128 TREE_TYPE (*p
) = void_type_node
;
1129 p
= &TREE_OPERAND (*p
, 0);
1132 case STATEMENT_LIST
:
1134 tree_stmt_iterator i
= tsi_last (*p
);
1135 TREE_SIDE_EFFECTS (*p
) = 1;
1136 TREE_TYPE (*p
) = void_type_node
;
1137 p
= tsi_end_p (i
) ? NULL
: tsi_stmt_ptr (i
);
1142 /* Advance to the last statement. Set all container types to
1144 for (; TREE_CODE (*p
) == COMPOUND_EXPR
; p
= &TREE_OPERAND (*p
, 1))
1146 TREE_SIDE_EFFECTS (*p
) = 1;
1147 TREE_TYPE (*p
) = void_type_node
;
1151 case TRANSACTION_EXPR
:
1152 TREE_SIDE_EFFECTS (*p
) = 1;
1153 TREE_TYPE (*p
) = void_type_node
;
1154 p
= &TRANSACTION_EXPR_BODY (*p
);
1158 /* Assume that any tree upon which voidify_wrapper_expr is
1159 directly called is a wrapper, and that its body is op0. */
1162 TREE_SIDE_EFFECTS (*p
) = 1;
1163 TREE_TYPE (*p
) = void_type_node
;
1164 p
= &TREE_OPERAND (*p
, 0);
1172 if (p
== NULL
|| IS_EMPTY_STMT (*p
))
1176 /* The wrapper is on the RHS of an assignment that we're pushing
1178 gcc_assert (TREE_CODE (temp
) == INIT_EXPR
1179 || TREE_CODE (temp
) == MODIFY_EXPR
);
1180 TREE_OPERAND (temp
, 1) = *p
;
1185 temp
= create_tmp_var (type
, "retval");
1186 *p
= build2 (INIT_EXPR
, type
, temp
, *p
);
1195 /* Prepare calls to builtins to SAVE and RESTORE the stack as well as
1196 a temporary through which they communicate. */
1199 build_stack_save_restore (gcall
**save
, gcall
**restore
)
1203 *save
= gimple_build_call (builtin_decl_implicit (BUILT_IN_STACK_SAVE
), 0);
1204 tmp_var
= create_tmp_var (ptr_type_node
, "saved_stack");
1205 gimple_call_set_lhs (*save
, tmp_var
);
1208 = gimple_build_call (builtin_decl_implicit (BUILT_IN_STACK_RESTORE
),
1212 /* Generate IFN_ASAN_MARK call that poisons shadow of a for DECL variable. */
1215 build_asan_poison_call_expr (tree decl
)
1217 /* Do not poison variables that have size equal to zero. */
1218 tree unit_size
= DECL_SIZE_UNIT (decl
);
1219 if (zerop (unit_size
))
1222 tree base
= build_fold_addr_expr (decl
);
1224 return build_call_expr_internal_loc (UNKNOWN_LOCATION
, IFN_ASAN_MARK
,
1226 build_int_cst (integer_type_node
,
1231 /* Generate IFN_ASAN_MARK call that would poison or unpoison, depending
1232 on POISON flag, shadow memory of a DECL variable. The call will be
1233 put on location identified by IT iterator, where BEFORE flag drives
1234 position where the stmt will be put. */
1237 asan_poison_variable (tree decl
, bool poison
, gimple_stmt_iterator
*it
,
1240 tree unit_size
= DECL_SIZE_UNIT (decl
);
1241 tree base
= build_fold_addr_expr (decl
);
1243 /* Do not poison variables that have size equal to zero. */
1244 if (zerop (unit_size
))
1247 /* It's necessary to have all stack variables aligned to ASAN granularity
1249 gcc_assert (!hwasan_sanitize_p () || hwasan_sanitize_stack_p ());
1250 unsigned shadow_granularity
1251 = hwasan_sanitize_p () ? HWASAN_TAG_GRANULE_SIZE
: ASAN_SHADOW_GRANULARITY
;
1252 if (DECL_ALIGN_UNIT (decl
) <= shadow_granularity
)
1253 SET_DECL_ALIGN (decl
, BITS_PER_UNIT
* shadow_granularity
);
1255 HOST_WIDE_INT flags
= poison
? ASAN_MARK_POISON
: ASAN_MARK_UNPOISON
;
1258 = gimple_build_call_internal (IFN_ASAN_MARK
, 3,
1259 build_int_cst (integer_type_node
, flags
),
1263 gsi_insert_before (it
, g
, GSI_NEW_STMT
);
1265 gsi_insert_after (it
, g
, GSI_NEW_STMT
);
1268 /* Generate IFN_ASAN_MARK internal call that depending on POISON flag
1269 either poisons or unpoisons a DECL. Created statement is appended
1270 to SEQ_P gimple sequence. */
1273 asan_poison_variable (tree decl
, bool poison
, gimple_seq
*seq_p
)
1275 gimple_stmt_iterator it
= gsi_last (*seq_p
);
1276 bool before
= false;
1281 asan_poison_variable (decl
, poison
, &it
, before
);
1284 /* Sort pair of VAR_DECLs A and B by DECL_UID. */
1287 sort_by_decl_uid (const void *a
, const void *b
)
1289 const tree
*t1
= (const tree
*)a
;
1290 const tree
*t2
= (const tree
*)b
;
1292 int uid1
= DECL_UID (*t1
);
1293 int uid2
= DECL_UID (*t2
);
1297 else if (uid1
> uid2
)
1303 /* Generate IFN_ASAN_MARK internal call for all VARIABLES
1304 depending on POISON flag. Created statement is appended
1305 to SEQ_P gimple sequence. */
1308 asan_poison_variables (hash_set
<tree
> *variables
, bool poison
, gimple_seq
*seq_p
)
1310 unsigned c
= variables
->elements ();
1314 auto_vec
<tree
> sorted_variables (c
);
1316 for (hash_set
<tree
>::iterator it
= variables
->begin ();
1317 it
!= variables
->end (); ++it
)
1318 sorted_variables
.safe_push (*it
);
1320 sorted_variables
.qsort (sort_by_decl_uid
);
1324 FOR_EACH_VEC_ELT (sorted_variables
, i
, var
)
1326 asan_poison_variable (var
, poison
, seq_p
);
1328 /* Add use_after_scope_memory attribute for the variable in order
1329 to prevent re-written into SSA. */
1330 if (!lookup_attribute (ASAN_USE_AFTER_SCOPE_ATTRIBUTE
,
1331 DECL_ATTRIBUTES (var
)))
1332 DECL_ATTRIBUTES (var
)
1333 = tree_cons (get_identifier (ASAN_USE_AFTER_SCOPE_ATTRIBUTE
),
1335 DECL_ATTRIBUTES (var
));
1339 /* Gimplify a BIND_EXPR. Just voidify and recurse. */
1341 static enum gimplify_status
1342 gimplify_bind_expr (tree
*expr_p
, gimple_seq
*pre_p
)
1344 tree bind_expr
= *expr_p
;
1345 bool old_keep_stack
= gimplify_ctxp
->keep_stack
;
1346 bool old_save_stack
= gimplify_ctxp
->save_stack
;
1349 gimple_seq body
, cleanup
;
1351 location_t start_locus
= 0, end_locus
= 0;
1352 tree ret_clauses
= NULL
;
1354 tree temp
= voidify_wrapper_expr (bind_expr
, NULL
);
1356 /* Mark variables seen in this bind expr. */
1357 for (t
= BIND_EXPR_VARS (bind_expr
); t
; t
= DECL_CHAIN (t
))
1361 struct gimplify_omp_ctx
*ctx
= gimplify_omp_ctxp
;
1363 /* Mark variable as local. */
1364 if (ctx
&& ctx
->region_type
!= ORT_NONE
&& !DECL_EXTERNAL (t
))
1366 if (! DECL_SEEN_IN_BIND_EXPR_P (t
)
1367 || splay_tree_lookup (ctx
->variables
,
1368 (splay_tree_key
) t
) == NULL
)
1370 int flag
= GOVD_LOCAL
;
1371 if (ctx
->region_type
== ORT_SIMD
1372 && TREE_ADDRESSABLE (t
)
1373 && !TREE_STATIC (t
))
1375 if (TREE_CODE (DECL_SIZE_UNIT (t
)) != INTEGER_CST
)
1376 ctx
->add_safelen1
= true;
1378 flag
= GOVD_PRIVATE
;
1380 omp_add_variable (ctx
, t
, flag
| GOVD_SEEN
);
1382 /* Static locals inside of target construct or offloaded
1383 routines need to be "omp declare target". */
1384 if (TREE_STATIC (t
))
1385 for (; ctx
; ctx
= ctx
->outer_context
)
1386 if ((ctx
->region_type
& ORT_TARGET
) != 0)
1388 if (!lookup_attribute ("omp declare target",
1389 DECL_ATTRIBUTES (t
)))
1391 tree id
= get_identifier ("omp declare target");
1393 = tree_cons (id
, NULL_TREE
, DECL_ATTRIBUTES (t
));
1394 varpool_node
*node
= varpool_node::get (t
);
1397 node
->offloadable
= 1;
1398 if (ENABLE_OFFLOADING
&& !DECL_EXTERNAL (t
))
1400 g
->have_offload
= true;
1402 vec_safe_push (offload_vars
, t
);
1410 DECL_SEEN_IN_BIND_EXPR_P (t
) = 1;
1412 if (DECL_HARD_REGISTER (t
) && !is_global_var (t
) && cfun
)
1413 cfun
->has_local_explicit_reg_vars
= true;
1417 bind_stmt
= gimple_build_bind (BIND_EXPR_VARS (bind_expr
), NULL
,
1418 BIND_EXPR_BLOCK (bind_expr
));
1419 gimple_push_bind_expr (bind_stmt
);
1421 gimplify_ctxp
->keep_stack
= false;
1422 gimplify_ctxp
->save_stack
= false;
1424 /* Gimplify the body into the GIMPLE_BIND tuple's body. */
1426 gimplify_stmt (&BIND_EXPR_BODY (bind_expr
), &body
);
1427 gimple_bind_set_body (bind_stmt
, body
);
1429 /* Source location wise, the cleanup code (stack_restore and clobbers)
1430 belongs to the end of the block, so propagate what we have. The
1431 stack_save operation belongs to the beginning of block, which we can
1432 infer from the bind_expr directly if the block has no explicit
1434 if (BIND_EXPR_BLOCK (bind_expr
))
1436 end_locus
= BLOCK_SOURCE_END_LOCATION (BIND_EXPR_BLOCK (bind_expr
));
1437 start_locus
= BLOCK_SOURCE_LOCATION (BIND_EXPR_BLOCK (bind_expr
));
1439 if (start_locus
== 0)
1440 start_locus
= EXPR_LOCATION (bind_expr
);
1445 /* If the code both contains VLAs and calls alloca, then we cannot reclaim
1446 the stack space allocated to the VLAs. */
1447 if (gimplify_ctxp
->save_stack
&& !gimplify_ctxp
->keep_stack
)
1449 gcall
*stack_restore
;
1451 /* Save stack on entry and restore it on exit. Add a try_finally
1452 block to achieve this. */
1453 build_stack_save_restore (&stack_save
, &stack_restore
);
1455 gimple_set_location (stack_save
, start_locus
);
1456 gimple_set_location (stack_restore
, end_locus
);
1458 gimplify_seq_add_stmt (&cleanup
, stack_restore
);
1461 /* Add clobbers for all variables that go out of scope. */
1462 for (t
= BIND_EXPR_VARS (bind_expr
); t
; t
= DECL_CHAIN (t
))
1465 && !is_global_var (t
)
1466 && DECL_CONTEXT (t
) == current_function_decl
)
1468 if (!DECL_HARD_REGISTER (t
)
1469 && !TREE_THIS_VOLATILE (t
)
1470 && !DECL_HAS_VALUE_EXPR_P (t
)
1471 /* Only care for variables that have to be in memory. Others
1472 will be rewritten into SSA names, hence moved to the
1474 && !is_gimple_reg (t
)
1475 && flag_stack_reuse
!= SR_NONE
)
1477 tree clobber
= build_clobber (TREE_TYPE (t
));
1478 gimple
*clobber_stmt
;
1479 clobber_stmt
= gimple_build_assign (t
, clobber
);
1480 gimple_set_location (clobber_stmt
, end_locus
);
1481 gimplify_seq_add_stmt (&cleanup
, clobber_stmt
);
1484 if (flag_openacc
&& oacc_declare_returns
!= NULL
)
1487 if (DECL_HAS_VALUE_EXPR_P (key
))
1489 key
= DECL_VALUE_EXPR (key
);
1490 if (TREE_CODE (key
) == INDIRECT_REF
)
1491 key
= TREE_OPERAND (key
, 0);
1493 tree
*c
= oacc_declare_returns
->get (key
);
1497 OMP_CLAUSE_CHAIN (*c
) = ret_clauses
;
1499 ret_clauses
= unshare_expr (*c
);
1501 oacc_declare_returns
->remove (key
);
1503 if (oacc_declare_returns
->is_empty ())
1505 delete oacc_declare_returns
;
1506 oacc_declare_returns
= NULL
;
1512 if (asan_poisoned_variables
!= NULL
1513 && asan_poisoned_variables
->contains (t
))
1515 asan_poisoned_variables
->remove (t
);
1516 asan_poison_variable (t
, true, &cleanup
);
1519 if (gimplify_ctxp
->live_switch_vars
!= NULL
1520 && gimplify_ctxp
->live_switch_vars
->contains (t
))
1521 gimplify_ctxp
->live_switch_vars
->remove (t
);
1527 gimple_stmt_iterator si
= gsi_start (cleanup
);
1529 stmt
= gimple_build_omp_target (NULL
, GF_OMP_TARGET_KIND_OACC_DECLARE
,
1531 gsi_insert_seq_before_without_update (&si
, stmt
, GSI_NEW_STMT
);
1537 gimple_seq new_body
;
1540 gs
= gimple_build_try (gimple_bind_body (bind_stmt
), cleanup
,
1541 GIMPLE_TRY_FINALLY
);
1544 gimplify_seq_add_stmt (&new_body
, stack_save
);
1545 gimplify_seq_add_stmt (&new_body
, gs
);
1546 gimple_bind_set_body (bind_stmt
, new_body
);
1549 /* keep_stack propagates all the way up to the outermost BIND_EXPR. */
1550 if (!gimplify_ctxp
->keep_stack
)
1551 gimplify_ctxp
->keep_stack
= old_keep_stack
;
1552 gimplify_ctxp
->save_stack
= old_save_stack
;
1554 gimple_pop_bind_expr ();
1556 gimplify_seq_add_stmt (pre_p
, bind_stmt
);
1564 *expr_p
= NULL_TREE
;
1568 /* Maybe add early return predict statement to PRE_P sequence. */
1571 maybe_add_early_return_predict_stmt (gimple_seq
*pre_p
)
1573 /* If we are not in a conditional context, add PREDICT statement. */
1574 if (gimple_conditional_context ())
1576 gimple
*predict
= gimple_build_predict (PRED_TREE_EARLY_RETURN
,
1578 gimplify_seq_add_stmt (pre_p
, predict
);
1582 /* Gimplify a RETURN_EXPR. If the expression to be returned is not a
1583 GIMPLE value, it is assigned to a new temporary and the statement is
1584 re-written to return the temporary.
1586 PRE_P points to the sequence where side effects that must happen before
1587 STMT should be stored. */
1589 static enum gimplify_status
1590 gimplify_return_expr (tree stmt
, gimple_seq
*pre_p
)
1593 tree ret_expr
= TREE_OPERAND (stmt
, 0);
1594 tree result_decl
, result
;
1596 if (ret_expr
== error_mark_node
)
1600 || TREE_CODE (ret_expr
) == RESULT_DECL
)
1602 maybe_add_early_return_predict_stmt (pre_p
);
1603 greturn
*ret
= gimple_build_return (ret_expr
);
1604 copy_warning (ret
, stmt
);
1605 gimplify_seq_add_stmt (pre_p
, ret
);
1609 if (VOID_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl
))))
1610 result_decl
= NULL_TREE
;
1611 else if (TREE_CODE (ret_expr
) == COMPOUND_EXPR
)
1613 /* Used in C++ for handling EH cleanup of the return value if a local
1614 cleanup throws. Assume the front-end knows what it's doing. */
1615 result_decl
= DECL_RESULT (current_function_decl
);
1616 /* But crash if we end up trying to modify ret_expr below. */
1617 ret_expr
= NULL_TREE
;
1621 result_decl
= TREE_OPERAND (ret_expr
, 0);
1623 /* See through a return by reference. */
1624 if (TREE_CODE (result_decl
) == INDIRECT_REF
)
1625 result_decl
= TREE_OPERAND (result_decl
, 0);
1627 gcc_assert ((TREE_CODE (ret_expr
) == MODIFY_EXPR
1628 || TREE_CODE (ret_expr
) == INIT_EXPR
)
1629 && TREE_CODE (result_decl
) == RESULT_DECL
);
1632 /* If aggregate_value_p is true, then we can return the bare RESULT_DECL.
1633 Recall that aggregate_value_p is FALSE for any aggregate type that is
1634 returned in registers. If we're returning values in registers, then
1635 we don't want to extend the lifetime of the RESULT_DECL, particularly
1636 across another call. In addition, for those aggregates for which
1637 hard_function_value generates a PARALLEL, we'll die during normal
1638 expansion of structure assignments; there's special code in expand_return
1639 to handle this case that does not exist in expand_expr. */
1642 else if (aggregate_value_p (result_decl
, TREE_TYPE (current_function_decl
)))
1644 if (!poly_int_tree_p (DECL_SIZE (result_decl
)))
1646 if (!TYPE_SIZES_GIMPLIFIED (TREE_TYPE (result_decl
)))
1647 gimplify_type_sizes (TREE_TYPE (result_decl
), pre_p
);
1648 /* Note that we don't use gimplify_vla_decl because the RESULT_DECL
1649 should be effectively allocated by the caller, i.e. all calls to
1650 this function must be subject to the Return Slot Optimization. */
1651 gimplify_one_sizepos (&DECL_SIZE (result_decl
), pre_p
);
1652 gimplify_one_sizepos (&DECL_SIZE_UNIT (result_decl
), pre_p
);
1654 result
= result_decl
;
1656 else if (gimplify_ctxp
->return_temp
)
1657 result
= gimplify_ctxp
->return_temp
;
1660 result
= create_tmp_reg (TREE_TYPE (result_decl
));
1662 /* ??? With complex control flow (usually involving abnormal edges),
1663 we can wind up warning about an uninitialized value for this. Due
1664 to how this variable is constructed and initialized, this is never
1665 true. Give up and never warn. */
1666 suppress_warning (result
, OPT_Wuninitialized
);
1668 gimplify_ctxp
->return_temp
= result
;
1671 /* Smash the lhs of the MODIFY_EXPR to the temporary we plan to use.
1672 Then gimplify the whole thing. */
1673 if (result
!= result_decl
)
1674 TREE_OPERAND (ret_expr
, 0) = result
;
1676 gimplify_and_add (TREE_OPERAND (stmt
, 0), pre_p
);
1678 maybe_add_early_return_predict_stmt (pre_p
);
1679 ret
= gimple_build_return (result
);
1680 copy_warning (ret
, stmt
);
1681 gimplify_seq_add_stmt (pre_p
, ret
);
1686 /* Gimplify a variable-length array DECL. */
1689 gimplify_vla_decl (tree decl
, gimple_seq
*seq_p
)
1691 /* This is a variable-sized decl. Simplify its size and mark it
1692 for deferred expansion. */
1693 tree t
, addr
, ptr_type
;
1695 gimplify_one_sizepos (&DECL_SIZE (decl
), seq_p
);
1696 gimplify_one_sizepos (&DECL_SIZE_UNIT (decl
), seq_p
);
1698 /* Don't mess with a DECL_VALUE_EXPR set by the front-end. */
1699 if (DECL_HAS_VALUE_EXPR_P (decl
))
1702 /* All occurrences of this decl in final gimplified code will be
1703 replaced by indirection. Setting DECL_VALUE_EXPR does two
1704 things: First, it lets the rest of the gimplifier know what
1705 replacement to use. Second, it lets the debug info know
1706 where to find the value. */
1707 ptr_type
= build_pointer_type (TREE_TYPE (decl
));
1708 addr
= create_tmp_var (ptr_type
, get_name (decl
));
1709 DECL_IGNORED_P (addr
) = 0;
1710 t
= build_fold_indirect_ref (addr
);
1711 TREE_THIS_NOTRAP (t
) = 1;
1712 SET_DECL_VALUE_EXPR (decl
, t
);
1713 DECL_HAS_VALUE_EXPR_P (decl
) = 1;
1715 t
= build_alloca_call_expr (DECL_SIZE_UNIT (decl
), DECL_ALIGN (decl
),
1716 max_int_size_in_bytes (TREE_TYPE (decl
)));
1717 /* The call has been built for a variable-sized object. */
1718 CALL_ALLOCA_FOR_VAR_P (t
) = 1;
1719 t
= fold_convert (ptr_type
, t
);
1720 t
= build2 (MODIFY_EXPR
, TREE_TYPE (addr
), addr
, t
);
1722 gimplify_and_add (t
, seq_p
);
1724 /* Record the dynamic allocation associated with DECL if requested. */
1725 if (flag_callgraph_info
& CALLGRAPH_INFO_DYNAMIC_ALLOC
)
1726 record_dynamic_alloc (decl
);
1729 /* A helper function to be called via walk_tree. Mark all labels under *TP
1730 as being forced. To be called for DECL_INITIAL of static variables. */
1733 force_labels_r (tree
*tp
, int *walk_subtrees
, void *data ATTRIBUTE_UNUSED
)
1737 if (TREE_CODE (*tp
) == LABEL_DECL
)
1739 FORCED_LABEL (*tp
) = 1;
1740 cfun
->has_forced_label_in_static
= 1;
1746 /* Gimplify a DECL_EXPR node *STMT_P by making any necessary allocation
1747 and initialization explicit. */
1749 static enum gimplify_status
1750 gimplify_decl_expr (tree
*stmt_p
, gimple_seq
*seq_p
)
1752 tree stmt
= *stmt_p
;
1753 tree decl
= DECL_EXPR_DECL (stmt
);
1755 *stmt_p
= NULL_TREE
;
1757 if (TREE_TYPE (decl
) == error_mark_node
)
1760 if ((TREE_CODE (decl
) == TYPE_DECL
1762 && !TYPE_SIZES_GIMPLIFIED (TREE_TYPE (decl
)))
1764 gimplify_type_sizes (TREE_TYPE (decl
), seq_p
);
1765 if (TREE_CODE (TREE_TYPE (decl
)) == REFERENCE_TYPE
)
1766 gimplify_type_sizes (TREE_TYPE (TREE_TYPE (decl
)), seq_p
);
1769 /* ??? DECL_ORIGINAL_TYPE is streamed for LTO so it needs to be gimplified
1770 in case its size expressions contain problematic nodes like CALL_EXPR. */
1771 if (TREE_CODE (decl
) == TYPE_DECL
1772 && DECL_ORIGINAL_TYPE (decl
)
1773 && !TYPE_SIZES_GIMPLIFIED (DECL_ORIGINAL_TYPE (decl
)))
1775 gimplify_type_sizes (DECL_ORIGINAL_TYPE (decl
), seq_p
);
1776 if (TREE_CODE (DECL_ORIGINAL_TYPE (decl
)) == REFERENCE_TYPE
)
1777 gimplify_type_sizes (TREE_TYPE (DECL_ORIGINAL_TYPE (decl
)), seq_p
);
1780 if (VAR_P (decl
) && !DECL_EXTERNAL (decl
))
1782 tree init
= DECL_INITIAL (decl
);
1783 bool is_vla
= false;
1786 if (!poly_int_tree_p (DECL_SIZE_UNIT (decl
), &size
)
1787 || (!TREE_STATIC (decl
)
1788 && flag_stack_check
== GENERIC_STACK_CHECK
1790 (unsigned HOST_WIDE_INT
) STACK_CHECK_MAX_VAR_SIZE
)))
1792 gimplify_vla_decl (decl
, seq_p
);
1796 if (asan_poisoned_variables
1798 && TREE_ADDRESSABLE (decl
)
1799 && !TREE_STATIC (decl
)
1800 && !DECL_HAS_VALUE_EXPR_P (decl
)
1801 && DECL_ALIGN (decl
) <= MAX_SUPPORTED_STACK_ALIGNMENT
1802 && dbg_cnt (asan_use_after_scope
)
1803 && !gimplify_omp_ctxp
1804 /* GNAT introduces temporaries to hold return values of calls in
1805 initializers of variables defined in other units, so the
1806 declaration of the variable is discarded completely. We do not
1807 want to issue poison calls for such dropped variables. */
1808 && (DECL_SEEN_IN_BIND_EXPR_P (decl
)
1809 || (DECL_ARTIFICIAL (decl
) && DECL_NAME (decl
) == NULL_TREE
)))
1811 asan_poisoned_variables
->add (decl
);
1812 asan_poison_variable (decl
, false, seq_p
);
1813 if (!DECL_ARTIFICIAL (decl
) && gimplify_ctxp
->live_switch_vars
)
1814 gimplify_ctxp
->live_switch_vars
->add (decl
);
1817 /* Some front ends do not explicitly declare all anonymous
1818 artificial variables. We compensate here by declaring the
1819 variables, though it would be better if the front ends would
1820 explicitly declare them. */
1821 if (!DECL_SEEN_IN_BIND_EXPR_P (decl
)
1822 && DECL_ARTIFICIAL (decl
) && DECL_NAME (decl
) == NULL_TREE
)
1823 gimple_add_tmp_var (decl
);
1825 if (init
&& init
!= error_mark_node
)
1827 if (!TREE_STATIC (decl
))
1829 DECL_INITIAL (decl
) = NULL_TREE
;
1830 init
= build2 (INIT_EXPR
, void_type_node
, decl
, init
);
1831 gimplify_and_add (init
, seq_p
);
1833 /* Clear TREE_READONLY if we really have an initialization. */
1834 if (!DECL_INITIAL (decl
) && !omp_is_reference (decl
))
1835 TREE_READONLY (decl
) = 0;
1838 /* We must still examine initializers for static variables
1839 as they may contain a label address. */
1840 walk_tree (&init
, force_labels_r
, NULL
, NULL
);
1847 /* Gimplify a LOOP_EXPR. Normally this just involves gimplifying the body
1848 and replacing the LOOP_EXPR with goto, but if the loop contains an
1849 EXIT_EXPR, we need to append a label for it to jump to. */
1851 static enum gimplify_status
1852 gimplify_loop_expr (tree
*expr_p
, gimple_seq
*pre_p
)
1854 tree saved_label
= gimplify_ctxp
->exit_label
;
1855 tree start_label
= create_artificial_label (UNKNOWN_LOCATION
);
1857 gimplify_seq_add_stmt (pre_p
, gimple_build_label (start_label
));
1859 gimplify_ctxp
->exit_label
= NULL_TREE
;
1861 gimplify_and_add (LOOP_EXPR_BODY (*expr_p
), pre_p
);
1863 gimplify_seq_add_stmt (pre_p
, gimple_build_goto (start_label
));
1865 if (gimplify_ctxp
->exit_label
)
1866 gimplify_seq_add_stmt (pre_p
,
1867 gimple_build_label (gimplify_ctxp
->exit_label
));
1869 gimplify_ctxp
->exit_label
= saved_label
;
1875 /* Gimplify a statement list onto a sequence. These may be created either
1876 by an enlightened front-end, or by shortcut_cond_expr. */
1878 static enum gimplify_status
1879 gimplify_statement_list (tree
*expr_p
, gimple_seq
*pre_p
)
1881 tree temp
= voidify_wrapper_expr (*expr_p
, NULL
);
1883 tree_stmt_iterator i
= tsi_start (*expr_p
);
1885 while (!tsi_end_p (i
))
1887 gimplify_stmt (tsi_stmt_ptr (i
), pre_p
);
1900 /* Callback for walk_gimple_seq. */
1903 warn_switch_unreachable_r (gimple_stmt_iterator
*gsi_p
, bool *handled_ops_p
,
1904 struct walk_stmt_info
*wi
)
1906 gimple
*stmt
= gsi_stmt (*gsi_p
);
1908 *handled_ops_p
= true;
1909 switch (gimple_code (stmt
))
1912 /* A compiler-generated cleanup or a user-written try block.
1913 If it's empty, don't dive into it--that would result in
1914 worse location info. */
1915 if (gimple_try_eval (stmt
) == NULL
)
1918 return integer_zero_node
;
1923 case GIMPLE_EH_FILTER
:
1924 case GIMPLE_TRANSACTION
:
1925 /* Walk the sub-statements. */
1926 *handled_ops_p
= false;
1930 /* Ignore these. We may generate them before declarations that
1931 are never executed. If there's something to warn about,
1932 there will be non-debug stmts too, and we'll catch those. */
1936 if (gimple_call_internal_p (stmt
, IFN_ASAN_MARK
))
1938 *handled_ops_p
= false;
1943 /* Save the first "real" statement (not a decl/lexical scope/...). */
1945 return integer_zero_node
;
1950 /* Possibly warn about unreachable statements between switch's controlling
1951 expression and the first case. SEQ is the body of a switch expression. */
1954 maybe_warn_switch_unreachable (gimple_seq seq
)
1956 if (!warn_switch_unreachable
1957 /* This warning doesn't play well with Fortran when optimizations
1959 || lang_GNU_Fortran ()
1963 struct walk_stmt_info wi
;
1964 memset (&wi
, 0, sizeof (wi
));
1965 walk_gimple_seq (seq
, warn_switch_unreachable_r
, NULL
, &wi
);
1966 gimple
*stmt
= (gimple
*) wi
.info
;
1968 if (stmt
&& gimple_code (stmt
) != GIMPLE_LABEL
)
1970 if (gimple_code (stmt
) == GIMPLE_GOTO
1971 && TREE_CODE (gimple_goto_dest (stmt
)) == LABEL_DECL
1972 && DECL_ARTIFICIAL (gimple_goto_dest (stmt
)))
1973 /* Don't warn for compiler-generated gotos. These occur
1974 in Duff's devices, for example. */;
1976 warning_at (gimple_location (stmt
), OPT_Wswitch_unreachable
,
1977 "statement will never be executed");
1982 /* A label entry that pairs label and a location. */
1989 /* Find LABEL in vector of label entries VEC. */
1991 static struct label_entry
*
1992 find_label_entry (const auto_vec
<struct label_entry
> *vec
, tree label
)
1995 struct label_entry
*l
;
1997 FOR_EACH_VEC_ELT (*vec
, i
, l
)
1998 if (l
->label
== label
)
2003 /* Return true if LABEL, a LABEL_DECL, represents a case label
2004 in a vector of labels CASES. */
2007 case_label_p (const vec
<tree
> *cases
, tree label
)
2012 FOR_EACH_VEC_ELT (*cases
, i
, l
)
2013 if (CASE_LABEL (l
) == label
)
2018 /* Find the last nondebug statement in a scope STMT. */
2021 last_stmt_in_scope (gimple
*stmt
)
2026 switch (gimple_code (stmt
))
2030 gbind
*bind
= as_a
<gbind
*> (stmt
);
2031 stmt
= gimple_seq_last_nondebug_stmt (gimple_bind_body (bind
));
2032 return last_stmt_in_scope (stmt
);
2037 gtry
*try_stmt
= as_a
<gtry
*> (stmt
);
2038 stmt
= gimple_seq_last_nondebug_stmt (gimple_try_eval (try_stmt
));
2039 gimple
*last_eval
= last_stmt_in_scope (stmt
);
2040 if (gimple_stmt_may_fallthru (last_eval
)
2041 && (last_eval
== NULL
2042 || !gimple_call_internal_p (last_eval
, IFN_FALLTHROUGH
))
2043 && gimple_try_kind (try_stmt
) == GIMPLE_TRY_FINALLY
)
2045 stmt
= gimple_seq_last_nondebug_stmt (gimple_try_cleanup (try_stmt
));
2046 return last_stmt_in_scope (stmt
);
2060 /* Collect interesting labels in LABELS and return the statement preceding
2061 another case label, or a user-defined label. Store a location useful
2062 to give warnings at *PREVLOC (usually the location of the returned
2063 statement or of its surrounding scope). */
2066 collect_fallthrough_labels (gimple_stmt_iterator
*gsi_p
,
2067 auto_vec
<struct label_entry
> *labels
,
2068 location_t
*prevloc
)
2070 gimple
*prev
= NULL
;
2072 *prevloc
= UNKNOWN_LOCATION
;
2075 if (gimple_code (gsi_stmt (*gsi_p
)) == GIMPLE_BIND
)
2077 /* Recognize the special GIMPLE_BIND added by gimplify_switch_expr,
2078 which starts on a GIMPLE_SWITCH and ends with a break label.
2079 Handle that as a single statement that can fall through. */
2080 gbind
*bind
= as_a
<gbind
*> (gsi_stmt (*gsi_p
));
2081 gimple
*first
= gimple_seq_first_stmt (gimple_bind_body (bind
));
2082 gimple
*last
= gimple_seq_last_stmt (gimple_bind_body (bind
));
2084 && gimple_code (first
) == GIMPLE_SWITCH
2085 && gimple_code (last
) == GIMPLE_LABEL
)
2087 tree label
= gimple_label_label (as_a
<glabel
*> (last
));
2088 if (SWITCH_BREAK_LABEL_P (label
))
2096 if (gimple_code (gsi_stmt (*gsi_p
)) == GIMPLE_BIND
2097 || gimple_code (gsi_stmt (*gsi_p
)) == GIMPLE_TRY
)
2099 /* Nested scope. Only look at the last statement of
2100 the innermost scope. */
2101 location_t bind_loc
= gimple_location (gsi_stmt (*gsi_p
));
2102 gimple
*last
= last_stmt_in_scope (gsi_stmt (*gsi_p
));
2106 /* It might be a label without a location. Use the
2107 location of the scope then. */
2108 if (!gimple_has_location (prev
))
2109 *prevloc
= bind_loc
;
2115 /* Ifs are tricky. */
2116 if (gimple_code (gsi_stmt (*gsi_p
)) == GIMPLE_COND
)
2118 gcond
*cond_stmt
= as_a
<gcond
*> (gsi_stmt (*gsi_p
));
2119 tree false_lab
= gimple_cond_false_label (cond_stmt
);
2120 location_t if_loc
= gimple_location (cond_stmt
);
2123 if (i > 1) goto <D.2259>; else goto D;
2124 we can't do much with the else-branch. */
2125 if (!DECL_ARTIFICIAL (false_lab
))
2128 /* Go on until the false label, then one step back. */
2129 for (; !gsi_end_p (*gsi_p
); gsi_next (gsi_p
))
2131 gimple
*stmt
= gsi_stmt (*gsi_p
);
2132 if (gimple_code (stmt
) == GIMPLE_LABEL
2133 && gimple_label_label (as_a
<glabel
*> (stmt
)) == false_lab
)
2137 /* Not found? Oops. */
2138 if (gsi_end_p (*gsi_p
))
2141 struct label_entry l
= { false_lab
, if_loc
};
2142 labels
->safe_push (l
);
2144 /* Go to the last statement of the then branch. */
2147 /* if (i != 0) goto <D.1759>; else goto <D.1760>;
2153 if (gimple_code (gsi_stmt (*gsi_p
)) == GIMPLE_GOTO
2154 && !gimple_has_location (gsi_stmt (*gsi_p
)))
2156 /* Look at the statement before, it might be
2157 attribute fallthrough, in which case don't warn. */
2159 bool fallthru_before_dest
2160 = gimple_call_internal_p (gsi_stmt (*gsi_p
), IFN_FALLTHROUGH
);
2162 tree goto_dest
= gimple_goto_dest (gsi_stmt (*gsi_p
));
2163 if (!fallthru_before_dest
)
2165 struct label_entry l
= { goto_dest
, if_loc
};
2166 labels
->safe_push (l
);
2169 /* And move back. */
2173 /* Remember the last statement. Skip labels that are of no interest
2175 if (gimple_code (gsi_stmt (*gsi_p
)) == GIMPLE_LABEL
)
2177 tree label
= gimple_label_label (as_a
<glabel
*> (gsi_stmt (*gsi_p
)));
2178 if (find_label_entry (labels
, label
))
2179 prev
= gsi_stmt (*gsi_p
);
2181 else if (gimple_call_internal_p (gsi_stmt (*gsi_p
), IFN_ASAN_MARK
))
2183 else if (gimple_code (gsi_stmt (*gsi_p
)) == GIMPLE_PREDICT
)
2185 else if (!is_gimple_debug (gsi_stmt (*gsi_p
)))
2186 prev
= gsi_stmt (*gsi_p
);
2189 while (!gsi_end_p (*gsi_p
)
2190 /* Stop if we find a case or a user-defined label. */
2191 && (gimple_code (gsi_stmt (*gsi_p
)) != GIMPLE_LABEL
2192 || !gimple_has_location (gsi_stmt (*gsi_p
))));
2194 if (prev
&& gimple_has_location (prev
))
2195 *prevloc
= gimple_location (prev
);
2199 /* Return true if the switch fallthough warning should occur. LABEL is
2200 the label statement that we're falling through to. */
2203 should_warn_for_implicit_fallthrough (gimple_stmt_iterator
*gsi_p
, tree label
)
2205 gimple_stmt_iterator gsi
= *gsi_p
;
2207 /* Don't warn if the label is marked with a "falls through" comment. */
2208 if (FALLTHROUGH_LABEL_P (label
))
2211 /* Don't warn for non-case labels followed by a statement:
2216 as these are likely intentional. */
2217 if (!case_label_p (&gimplify_ctxp
->case_labels
, label
))
2220 while (!gsi_end_p (gsi
)
2221 && gimple_code (gsi_stmt (gsi
)) == GIMPLE_LABEL
2222 && (l
= gimple_label_label (as_a
<glabel
*> (gsi_stmt (gsi
))))
2223 && !case_label_p (&gimplify_ctxp
->case_labels
, l
))
2224 gsi_next_nondebug (&gsi
);
2225 if (gsi_end_p (gsi
) || gimple_code (gsi_stmt (gsi
)) != GIMPLE_LABEL
)
2229 /* Don't warn for terminated branches, i.e. when the subsequent case labels
2230 immediately breaks. */
2233 /* Skip all immediately following labels. */
2234 while (!gsi_end_p (gsi
)
2235 && (gimple_code (gsi_stmt (gsi
)) == GIMPLE_LABEL
2236 || gimple_code (gsi_stmt (gsi
)) == GIMPLE_PREDICT
))
2237 gsi_next_nondebug (&gsi
);
2239 /* { ... something; default:; } */
2241 /* { ... something; default: break; } or
2242 { ... something; default: goto L; } */
2243 || gimple_code (gsi_stmt (gsi
)) == GIMPLE_GOTO
2244 /* { ... something; default: return; } */
2245 || gimple_code (gsi_stmt (gsi
)) == GIMPLE_RETURN
)
2251 /* Callback for walk_gimple_seq. */
2254 warn_implicit_fallthrough_r (gimple_stmt_iterator
*gsi_p
, bool *handled_ops_p
,
2255 struct walk_stmt_info
*)
2257 gimple
*stmt
= gsi_stmt (*gsi_p
);
2259 *handled_ops_p
= true;
2260 switch (gimple_code (stmt
))
2265 case GIMPLE_EH_FILTER
:
2266 case GIMPLE_TRANSACTION
:
2267 /* Walk the sub-statements. */
2268 *handled_ops_p
= false;
2271 /* Find a sequence of form:
2278 and possibly warn. */
2281 /* Found a label. Skip all immediately following labels. */
2282 while (!gsi_end_p (*gsi_p
)
2283 && gimple_code (gsi_stmt (*gsi_p
)) == GIMPLE_LABEL
)
2284 gsi_next_nondebug (gsi_p
);
2286 /* There might be no more statements. */
2287 if (gsi_end_p (*gsi_p
))
2288 return integer_zero_node
;
2290 /* Vector of labels that fall through. */
2291 auto_vec
<struct label_entry
> labels
;
2293 gimple
*prev
= collect_fallthrough_labels (gsi_p
, &labels
, &prevloc
);
2295 /* There might be no more statements. */
2296 if (gsi_end_p (*gsi_p
))
2297 return integer_zero_node
;
2299 gimple
*next
= gsi_stmt (*gsi_p
);
2301 /* If what follows is a label, then we may have a fallthrough. */
2302 if (gimple_code (next
) == GIMPLE_LABEL
2303 && gimple_has_location (next
)
2304 && (label
= gimple_label_label (as_a
<glabel
*> (next
)))
2307 struct label_entry
*l
;
2308 bool warned_p
= false;
2309 auto_diagnostic_group d
;
2310 if (!should_warn_for_implicit_fallthrough (gsi_p
, label
))
2312 else if (gimple_code (prev
) == GIMPLE_LABEL
2313 && (label
= gimple_label_label (as_a
<glabel
*> (prev
)))
2314 && (l
= find_label_entry (&labels
, label
)))
2315 warned_p
= warning_at (l
->loc
, OPT_Wimplicit_fallthrough_
,
2316 "this statement may fall through");
2317 else if (!gimple_call_internal_p (prev
, IFN_FALLTHROUGH
)
2318 /* Try to be clever and don't warn when the statement
2319 can't actually fall through. */
2320 && gimple_stmt_may_fallthru (prev
)
2321 && prevloc
!= UNKNOWN_LOCATION
)
2322 warned_p
= warning_at (prevloc
,
2323 OPT_Wimplicit_fallthrough_
,
2324 "this statement may fall through");
2326 inform (gimple_location (next
), "here");
2328 /* Mark this label as processed so as to prevent multiple
2329 warnings in nested switches. */
2330 FALLTHROUGH_LABEL_P (label
) = true;
2332 /* So that next warn_implicit_fallthrough_r will start looking for
2333 a new sequence starting with this label. */
2344 /* Warn when a switch case falls through. */
2347 maybe_warn_implicit_fallthrough (gimple_seq seq
)
2349 if (!warn_implicit_fallthrough
)
2352 /* This warning is meant for C/C++/ObjC/ObjC++ only. */
2355 || lang_GNU_OBJC ()))
2358 struct walk_stmt_info wi
;
2359 memset (&wi
, 0, sizeof (wi
));
2360 walk_gimple_seq (seq
, warn_implicit_fallthrough_r
, NULL
, &wi
);
2363 /* Callback for walk_gimple_seq. */
2366 expand_FALLTHROUGH_r (gimple_stmt_iterator
*gsi_p
, bool *handled_ops_p
,
2367 struct walk_stmt_info
*wi
)
2369 gimple
*stmt
= gsi_stmt (*gsi_p
);
2371 *handled_ops_p
= true;
2372 switch (gimple_code (stmt
))
2377 case GIMPLE_EH_FILTER
:
2378 case GIMPLE_TRANSACTION
:
2379 /* Walk the sub-statements. */
2380 *handled_ops_p
= false;
2383 if (gimple_call_internal_p (stmt
, IFN_FALLTHROUGH
))
2385 gsi_remove (gsi_p
, true);
2386 if (gsi_end_p (*gsi_p
))
2388 *static_cast<location_t
*>(wi
->info
) = gimple_location (stmt
);
2389 return integer_zero_node
;
2393 location_t loc
= gimple_location (stmt
);
2395 gimple_stmt_iterator gsi2
= *gsi_p
;
2396 stmt
= gsi_stmt (gsi2
);
2397 if (gimple_code (stmt
) == GIMPLE_GOTO
&& !gimple_has_location (stmt
))
2399 /* Go on until the artificial label. */
2400 tree goto_dest
= gimple_goto_dest (stmt
);
2401 for (; !gsi_end_p (gsi2
); gsi_next (&gsi2
))
2403 if (gimple_code (gsi_stmt (gsi2
)) == GIMPLE_LABEL
2404 && gimple_label_label (as_a
<glabel
*> (gsi_stmt (gsi2
)))
2409 /* Not found? Stop. */
2410 if (gsi_end_p (gsi2
))
2413 /* Look one past it. */
2417 /* We're looking for a case label or default label here. */
2418 while (!gsi_end_p (gsi2
))
2420 stmt
= gsi_stmt (gsi2
);
2421 if (gimple_code (stmt
) == GIMPLE_LABEL
)
2423 tree label
= gimple_label_label (as_a
<glabel
*> (stmt
));
2424 if (gimple_has_location (stmt
) && DECL_ARTIFICIAL (label
))
2430 else if (gimple_call_internal_p (stmt
, IFN_ASAN_MARK
))
2432 else if (!is_gimple_debug (stmt
))
2433 /* Anything else is not expected. */
2438 pedwarn (loc
, 0, "attribute %<fallthrough%> not preceding "
2439 "a case label or default label");
2448 /* Expand all FALLTHROUGH () calls in SEQ. */
2451 expand_FALLTHROUGH (gimple_seq
*seq_p
)
2453 struct walk_stmt_info wi
;
2455 memset (&wi
, 0, sizeof (wi
));
2456 wi
.info
= (void *) &loc
;
2457 walk_gimple_seq_mod (seq_p
, expand_FALLTHROUGH_r
, NULL
, &wi
);
2458 if (wi
.callback_result
== integer_zero_node
)
2459 /* We've found [[fallthrough]]; at the end of a switch, which the C++
2460 standard says is ill-formed; see [dcl.attr.fallthrough]. */
2461 pedwarn (loc
, 0, "attribute %<fallthrough%> not preceding "
2462 "a case label or default label");
2466 /* Gimplify a SWITCH_EXPR, and collect the vector of labels it can
2469 static enum gimplify_status
2470 gimplify_switch_expr (tree
*expr_p
, gimple_seq
*pre_p
)
2472 tree switch_expr
= *expr_p
;
2473 gimple_seq switch_body_seq
= NULL
;
2474 enum gimplify_status ret
;
2475 tree index_type
= TREE_TYPE (switch_expr
);
2476 if (index_type
== NULL_TREE
)
2477 index_type
= TREE_TYPE (SWITCH_COND (switch_expr
));
2479 ret
= gimplify_expr (&SWITCH_COND (switch_expr
), pre_p
, NULL
, is_gimple_val
,
2481 if (ret
== GS_ERROR
|| ret
== GS_UNHANDLED
)
2484 if (SWITCH_BODY (switch_expr
))
2487 vec
<tree
> saved_labels
;
2488 hash_set
<tree
> *saved_live_switch_vars
= NULL
;
2489 tree default_case
= NULL_TREE
;
2490 gswitch
*switch_stmt
;
2492 /* Save old labels, get new ones from body, then restore the old
2493 labels. Save all the things from the switch body to append after. */
2494 saved_labels
= gimplify_ctxp
->case_labels
;
2495 gimplify_ctxp
->case_labels
.create (8);
2497 /* Do not create live_switch_vars if SWITCH_BODY is not a BIND_EXPR. */
2498 saved_live_switch_vars
= gimplify_ctxp
->live_switch_vars
;
2499 tree_code body_type
= TREE_CODE (SWITCH_BODY (switch_expr
));
2500 if (body_type
== BIND_EXPR
|| body_type
== STATEMENT_LIST
)
2501 gimplify_ctxp
->live_switch_vars
= new hash_set
<tree
> (4);
2503 gimplify_ctxp
->live_switch_vars
= NULL
;
2505 bool old_in_switch_expr
= gimplify_ctxp
->in_switch_expr
;
2506 gimplify_ctxp
->in_switch_expr
= true;
2508 gimplify_stmt (&SWITCH_BODY (switch_expr
), &switch_body_seq
);
2510 gimplify_ctxp
->in_switch_expr
= old_in_switch_expr
;
2511 maybe_warn_switch_unreachable (switch_body_seq
);
2512 maybe_warn_implicit_fallthrough (switch_body_seq
);
2513 /* Only do this for the outermost GIMPLE_SWITCH. */
2514 if (!gimplify_ctxp
->in_switch_expr
)
2515 expand_FALLTHROUGH (&switch_body_seq
);
2517 labels
= gimplify_ctxp
->case_labels
;
2518 gimplify_ctxp
->case_labels
= saved_labels
;
2520 if (gimplify_ctxp
->live_switch_vars
)
2522 gcc_assert (gimplify_ctxp
->live_switch_vars
->is_empty ());
2523 delete gimplify_ctxp
->live_switch_vars
;
2525 gimplify_ctxp
->live_switch_vars
= saved_live_switch_vars
;
2527 preprocess_case_label_vec_for_gimple (labels
, index_type
,
2530 bool add_bind
= false;
2533 glabel
*new_default
;
2536 = build_case_label (NULL_TREE
, NULL_TREE
,
2537 create_artificial_label (UNKNOWN_LOCATION
));
2538 if (old_in_switch_expr
)
2540 SWITCH_BREAK_LABEL_P (CASE_LABEL (default_case
)) = 1;
2543 new_default
= gimple_build_label (CASE_LABEL (default_case
));
2544 gimplify_seq_add_stmt (&switch_body_seq
, new_default
);
2546 else if (old_in_switch_expr
)
2548 gimple
*last
= gimple_seq_last_stmt (switch_body_seq
);
2549 if (last
&& gimple_code (last
) == GIMPLE_LABEL
)
2551 tree label
= gimple_label_label (as_a
<glabel
*> (last
));
2552 if (SWITCH_BREAK_LABEL_P (label
))
2557 switch_stmt
= gimple_build_switch (SWITCH_COND (switch_expr
),
2558 default_case
, labels
);
2559 /* For the benefit of -Wimplicit-fallthrough, if switch_body_seq
2560 ends with a GIMPLE_LABEL holding SWITCH_BREAK_LABEL_P LABEL_DECL,
2561 wrap the GIMPLE_SWITCH up to that GIMPLE_LABEL into a GIMPLE_BIND,
2562 so that we can easily find the start and end of the switch
2566 gimple_seq bind_body
= NULL
;
2567 gimplify_seq_add_stmt (&bind_body
, switch_stmt
);
2568 gimple_seq_add_seq (&bind_body
, switch_body_seq
);
2569 gbind
*bind
= gimple_build_bind (NULL_TREE
, bind_body
, NULL_TREE
);
2570 gimple_set_location (bind
, EXPR_LOCATION (switch_expr
));
2571 gimplify_seq_add_stmt (pre_p
, bind
);
2575 gimplify_seq_add_stmt (pre_p
, switch_stmt
);
2576 gimplify_seq_add_seq (pre_p
, switch_body_seq
);
2586 /* Gimplify the LABEL_EXPR pointed to by EXPR_P. */
2588 static enum gimplify_status
2589 gimplify_label_expr (tree
*expr_p
, gimple_seq
*pre_p
)
2591 gcc_assert (decl_function_context (LABEL_EXPR_LABEL (*expr_p
))
2592 == current_function_decl
);
2594 tree label
= LABEL_EXPR_LABEL (*expr_p
);
2595 glabel
*label_stmt
= gimple_build_label (label
);
2596 gimple_set_location (label_stmt
, EXPR_LOCATION (*expr_p
));
2597 gimplify_seq_add_stmt (pre_p
, label_stmt
);
2599 if (lookup_attribute ("cold", DECL_ATTRIBUTES (label
)))
2600 gimple_seq_add_stmt (pre_p
, gimple_build_predict (PRED_COLD_LABEL
,
2602 else if (lookup_attribute ("hot", DECL_ATTRIBUTES (label
)))
2603 gimple_seq_add_stmt (pre_p
, gimple_build_predict (PRED_HOT_LABEL
,
2609 /* Gimplify the CASE_LABEL_EXPR pointed to by EXPR_P. */
2611 static enum gimplify_status
2612 gimplify_case_label_expr (tree
*expr_p
, gimple_seq
*pre_p
)
2614 struct gimplify_ctx
*ctxp
;
2617 /* Invalid programs can play Duff's Device type games with, for example,
2618 #pragma omp parallel. At least in the C front end, we don't
2619 detect such invalid branches until after gimplification, in the
2620 diagnose_omp_blocks pass. */
2621 for (ctxp
= gimplify_ctxp
; ; ctxp
= ctxp
->prev_context
)
2622 if (ctxp
->case_labels
.exists ())
2625 tree label
= CASE_LABEL (*expr_p
);
2626 label_stmt
= gimple_build_label (label
);
2627 gimple_set_location (label_stmt
, EXPR_LOCATION (*expr_p
));
2628 ctxp
->case_labels
.safe_push (*expr_p
);
2629 gimplify_seq_add_stmt (pre_p
, label_stmt
);
2631 if (lookup_attribute ("cold", DECL_ATTRIBUTES (label
)))
2632 gimple_seq_add_stmt (pre_p
, gimple_build_predict (PRED_COLD_LABEL
,
2634 else if (lookup_attribute ("hot", DECL_ATTRIBUTES (label
)))
2635 gimple_seq_add_stmt (pre_p
, gimple_build_predict (PRED_HOT_LABEL
,
2641 /* Build a GOTO to the LABEL_DECL pointed to by LABEL_P, building it first
2645 build_and_jump (tree
*label_p
)
2647 if (label_p
== NULL
)
2648 /* If there's nowhere to jump, just fall through. */
2651 if (*label_p
== NULL_TREE
)
2653 tree label
= create_artificial_label (UNKNOWN_LOCATION
);
2657 return build1 (GOTO_EXPR
, void_type_node
, *label_p
);
2660 /* Gimplify an EXIT_EXPR by converting to a GOTO_EXPR inside a COND_EXPR.
2661 This also involves building a label to jump to and communicating it to
2662 gimplify_loop_expr through gimplify_ctxp->exit_label. */
2664 static enum gimplify_status
2665 gimplify_exit_expr (tree
*expr_p
)
2667 tree cond
= TREE_OPERAND (*expr_p
, 0);
2670 expr
= build_and_jump (&gimplify_ctxp
->exit_label
);
2671 expr
= build3 (COND_EXPR
, void_type_node
, cond
, expr
, NULL_TREE
);
2677 /* *EXPR_P is a COMPONENT_REF being used as an rvalue. If its type is
2678 different from its canonical type, wrap the whole thing inside a
2679 NOP_EXPR and force the type of the COMPONENT_REF to be the canonical
2682 The canonical type of a COMPONENT_REF is the type of the field being
2683 referenced--unless the field is a bit-field which can be read directly
2684 in a smaller mode, in which case the canonical type is the
2685 sign-appropriate type corresponding to that mode. */
2688 canonicalize_component_ref (tree
*expr_p
)
2690 tree expr
= *expr_p
;
2693 gcc_assert (TREE_CODE (expr
) == COMPONENT_REF
);
2695 if (INTEGRAL_TYPE_P (TREE_TYPE (expr
)))
2696 type
= TREE_TYPE (get_unwidened (expr
, NULL_TREE
));
2698 type
= TREE_TYPE (TREE_OPERAND (expr
, 1));
2700 /* One could argue that all the stuff below is not necessary for
2701 the non-bitfield case and declare it a FE error if type
2702 adjustment would be needed. */
2703 if (TREE_TYPE (expr
) != type
)
2705 #ifdef ENABLE_TYPES_CHECKING
2706 tree old_type
= TREE_TYPE (expr
);
2710 /* We need to preserve qualifiers and propagate them from
2712 type_quals
= TYPE_QUALS (type
)
2713 | TYPE_QUALS (TREE_TYPE (TREE_OPERAND (expr
, 0)));
2714 if (TYPE_QUALS (type
) != type_quals
)
2715 type
= build_qualified_type (TYPE_MAIN_VARIANT (type
), type_quals
);
2717 /* Set the type of the COMPONENT_REF to the underlying type. */
2718 TREE_TYPE (expr
) = type
;
2720 #ifdef ENABLE_TYPES_CHECKING
2721 /* It is now a FE error, if the conversion from the canonical
2722 type to the original expression type is not useless. */
2723 gcc_assert (useless_type_conversion_p (old_type
, type
));
2728 /* If a NOP conversion is changing a pointer to array of foo to a pointer
2729 to foo, embed that change in the ADDR_EXPR by converting
2734 where L is the lower bound. For simplicity, only do this for constant
2736 The constraint is that the type of &array[L] is trivially convertible
2740 canonicalize_addr_expr (tree
*expr_p
)
2742 tree expr
= *expr_p
;
2743 tree addr_expr
= TREE_OPERAND (expr
, 0);
2744 tree datype
, ddatype
, pddatype
;
2746 /* We simplify only conversions from an ADDR_EXPR to a pointer type. */
2747 if (!POINTER_TYPE_P (TREE_TYPE (expr
))
2748 || TREE_CODE (addr_expr
) != ADDR_EXPR
)
2751 /* The addr_expr type should be a pointer to an array. */
2752 datype
= TREE_TYPE (TREE_TYPE (addr_expr
));
2753 if (TREE_CODE (datype
) != ARRAY_TYPE
)
2756 /* The pointer to element type shall be trivially convertible to
2757 the expression pointer type. */
2758 ddatype
= TREE_TYPE (datype
);
2759 pddatype
= build_pointer_type (ddatype
);
2760 if (!useless_type_conversion_p (TYPE_MAIN_VARIANT (TREE_TYPE (expr
)),
2764 /* The lower bound and element sizes must be constant. */
2765 if (!TYPE_SIZE_UNIT (ddatype
)
2766 || TREE_CODE (TYPE_SIZE_UNIT (ddatype
)) != INTEGER_CST
2767 || !TYPE_DOMAIN (datype
) || !TYPE_MIN_VALUE (TYPE_DOMAIN (datype
))
2768 || TREE_CODE (TYPE_MIN_VALUE (TYPE_DOMAIN (datype
))) != INTEGER_CST
)
2771 /* All checks succeeded. Build a new node to merge the cast. */
2772 *expr_p
= build4 (ARRAY_REF
, ddatype
, TREE_OPERAND (addr_expr
, 0),
2773 TYPE_MIN_VALUE (TYPE_DOMAIN (datype
)),
2774 NULL_TREE
, NULL_TREE
);
2775 *expr_p
= build1 (ADDR_EXPR
, pddatype
, *expr_p
);
2777 /* We can have stripped a required restrict qualifier above. */
2778 if (!useless_type_conversion_p (TREE_TYPE (expr
), TREE_TYPE (*expr_p
)))
2779 *expr_p
= fold_convert (TREE_TYPE (expr
), *expr_p
);
2782 /* *EXPR_P is a NOP_EXPR or CONVERT_EXPR. Remove it and/or other conversions
2783 underneath as appropriate. */
2785 static enum gimplify_status
2786 gimplify_conversion (tree
*expr_p
)
2788 location_t loc
= EXPR_LOCATION (*expr_p
);
2789 gcc_assert (CONVERT_EXPR_P (*expr_p
));
2791 /* Then strip away all but the outermost conversion. */
2792 STRIP_SIGN_NOPS (TREE_OPERAND (*expr_p
, 0));
2794 /* And remove the outermost conversion if it's useless. */
2795 if (tree_ssa_useless_type_conversion (*expr_p
))
2796 *expr_p
= TREE_OPERAND (*expr_p
, 0);
2798 /* If we still have a conversion at the toplevel,
2799 then canonicalize some constructs. */
2800 if (CONVERT_EXPR_P (*expr_p
))
2802 tree sub
= TREE_OPERAND (*expr_p
, 0);
2804 /* If a NOP conversion is changing the type of a COMPONENT_REF
2805 expression, then canonicalize its type now in order to expose more
2806 redundant conversions. */
2807 if (TREE_CODE (sub
) == COMPONENT_REF
)
2808 canonicalize_component_ref (&TREE_OPERAND (*expr_p
, 0));
2810 /* If a NOP conversion is changing a pointer to array of foo
2811 to a pointer to foo, embed that change in the ADDR_EXPR. */
2812 else if (TREE_CODE (sub
) == ADDR_EXPR
)
2813 canonicalize_addr_expr (expr_p
);
2816 /* If we have a conversion to a non-register type force the
2817 use of a VIEW_CONVERT_EXPR instead. */
2818 if (CONVERT_EXPR_P (*expr_p
) && !is_gimple_reg_type (TREE_TYPE (*expr_p
)))
2819 *expr_p
= fold_build1_loc (loc
, VIEW_CONVERT_EXPR
, TREE_TYPE (*expr_p
),
2820 TREE_OPERAND (*expr_p
, 0));
2822 /* Canonicalize CONVERT_EXPR to NOP_EXPR. */
2823 if (TREE_CODE (*expr_p
) == CONVERT_EXPR
)
2824 TREE_SET_CODE (*expr_p
, NOP_EXPR
);
2829 /* Gimplify a VAR_DECL or PARM_DECL. Return GS_OK if we expanded a
2830 DECL_VALUE_EXPR, and it's worth re-examining things. */
2832 static enum gimplify_status
2833 gimplify_var_or_parm_decl (tree
*expr_p
)
2835 tree decl
= *expr_p
;
2837 /* ??? If this is a local variable, and it has not been seen in any
2838 outer BIND_EXPR, then it's probably the result of a duplicate
2839 declaration, for which we've already issued an error. It would
2840 be really nice if the front end wouldn't leak these at all.
2841 Currently the only known culprit is C++ destructors, as seen
2842 in g++.old-deja/g++.jason/binding.C. */
2844 && !DECL_SEEN_IN_BIND_EXPR_P (decl
)
2845 && !TREE_STATIC (decl
) && !DECL_EXTERNAL (decl
)
2846 && decl_function_context (decl
) == current_function_decl
)
2848 gcc_assert (seen_error ());
2852 /* When within an OMP context, notice uses of variables. */
2853 if (gimplify_omp_ctxp
&& omp_notice_variable (gimplify_omp_ctxp
, decl
, true))
2856 /* If the decl is an alias for another expression, substitute it now. */
2857 if (DECL_HAS_VALUE_EXPR_P (decl
))
2859 *expr_p
= unshare_expr (DECL_VALUE_EXPR (decl
));
2866 /* Recalculate the value of the TREE_SIDE_EFFECTS flag for T. */
2869 recalculate_side_effects (tree t
)
2871 enum tree_code code
= TREE_CODE (t
);
2872 int len
= TREE_OPERAND_LENGTH (t
);
2875 switch (TREE_CODE_CLASS (code
))
2877 case tcc_expression
:
2883 case PREDECREMENT_EXPR
:
2884 case PREINCREMENT_EXPR
:
2885 case POSTDECREMENT_EXPR
:
2886 case POSTINCREMENT_EXPR
:
2887 /* All of these have side-effects, no matter what their
2896 case tcc_comparison
: /* a comparison expression */
2897 case tcc_unary
: /* a unary arithmetic expression */
2898 case tcc_binary
: /* a binary arithmetic expression */
2899 case tcc_reference
: /* a reference */
2900 case tcc_vl_exp
: /* a function call */
2901 TREE_SIDE_EFFECTS (t
) = TREE_THIS_VOLATILE (t
);
2902 for (i
= 0; i
< len
; ++i
)
2904 tree op
= TREE_OPERAND (t
, i
);
2905 if (op
&& TREE_SIDE_EFFECTS (op
))
2906 TREE_SIDE_EFFECTS (t
) = 1;
2911 /* No side-effects. */
2919 /* Gimplify the COMPONENT_REF, ARRAY_REF, REALPART_EXPR or IMAGPART_EXPR
2923 : min_lval '[' val ']'
2925 | compound_lval '[' val ']'
2926 | compound_lval '.' ID
2928 This is not part of the original SIMPLE definition, which separates
2929 array and member references, but it seems reasonable to handle them
2930 together. Also, this way we don't run into problems with union
2931 aliasing; gcc requires that for accesses through a union to alias, the
2932 union reference must be explicit, which was not always the case when we
2933 were splitting up array and member refs.
2935 PRE_P points to the sequence where side effects that must happen before
2936 *EXPR_P should be stored.
2938 POST_P points to the sequence where side effects that must happen after
2939 *EXPR_P should be stored. */
2941 static enum gimplify_status
2942 gimplify_compound_lval (tree
*expr_p
, gimple_seq
*pre_p
, gimple_seq
*post_p
,
2943 fallback_t fallback
)
2946 enum gimplify_status ret
= GS_ALL_DONE
, tret
;
2948 location_t loc
= EXPR_LOCATION (*expr_p
);
2949 tree expr
= *expr_p
;
2951 /* Create a stack of the subexpressions so later we can walk them in
2952 order from inner to outer. */
2953 auto_vec
<tree
, 10> expr_stack
;
2955 /* We can handle anything that get_inner_reference can deal with. */
2956 for (p
= expr_p
; ; p
= &TREE_OPERAND (*p
, 0))
2959 /* Fold INDIRECT_REFs now to turn them into ARRAY_REFs. */
2960 if (TREE_CODE (*p
) == INDIRECT_REF
)
2961 *p
= fold_indirect_ref_loc (loc
, *p
);
2963 if (handled_component_p (*p
))
2965 /* Expand DECL_VALUE_EXPR now. In some cases that may expose
2966 additional COMPONENT_REFs. */
2967 else if ((VAR_P (*p
) || TREE_CODE (*p
) == PARM_DECL
)
2968 && gimplify_var_or_parm_decl (p
) == GS_OK
)
2973 expr_stack
.safe_push (*p
);
2976 gcc_assert (expr_stack
.length ());
2978 /* Now EXPR_STACK is a stack of pointers to all the refs we've
2979 walked through and P points to the innermost expression.
2981 Java requires that we elaborated nodes in source order. That
2982 means we must gimplify the inner expression followed by each of
2983 the indices, in order. But we can't gimplify the inner
2984 expression until we deal with any variable bounds, sizes, or
2985 positions in order to deal with PLACEHOLDER_EXPRs.
2987 So we do this in three steps. First we deal with the annotations
2988 for any variables in the components, then we gimplify the base,
2989 then we gimplify any indices, from left to right. */
2990 for (i
= expr_stack
.length () - 1; i
>= 0; i
--)
2992 tree t
= expr_stack
[i
];
2994 if (TREE_CODE (t
) == ARRAY_REF
|| TREE_CODE (t
) == ARRAY_RANGE_REF
)
2996 /* Gimplify the low bound and element type size and put them into
2997 the ARRAY_REF. If these values are set, they have already been
2999 if (TREE_OPERAND (t
, 2) == NULL_TREE
)
3001 tree low
= unshare_expr (array_ref_low_bound (t
));
3002 if (!is_gimple_min_invariant (low
))
3004 TREE_OPERAND (t
, 2) = low
;
3005 tret
= gimplify_expr (&TREE_OPERAND (t
, 2), pre_p
,
3006 post_p
, is_gimple_reg
,
3008 ret
= MIN (ret
, tret
);
3013 tret
= gimplify_expr (&TREE_OPERAND (t
, 2), pre_p
, post_p
,
3014 is_gimple_reg
, fb_rvalue
);
3015 ret
= MIN (ret
, tret
);
3018 if (TREE_OPERAND (t
, 3) == NULL_TREE
)
3020 tree elmt_size
= array_ref_element_size (t
);
3021 if (!is_gimple_min_invariant (elmt_size
))
3023 elmt_size
= unshare_expr (elmt_size
);
3024 tree elmt_type
= TREE_TYPE (TREE_TYPE (TREE_OPERAND (t
, 0)));
3025 tree factor
= size_int (TYPE_ALIGN_UNIT (elmt_type
));
3027 /* Divide the element size by the alignment of the element
3029 elmt_size
= size_binop_loc (loc
, EXACT_DIV_EXPR
,
3032 TREE_OPERAND (t
, 3) = elmt_size
;
3033 tret
= gimplify_expr (&TREE_OPERAND (t
, 3), pre_p
,
3034 post_p
, is_gimple_reg
,
3036 ret
= MIN (ret
, tret
);
3041 tret
= gimplify_expr (&TREE_OPERAND (t
, 3), pre_p
, post_p
,
3042 is_gimple_reg
, fb_rvalue
);
3043 ret
= MIN (ret
, tret
);
3046 else if (TREE_CODE (t
) == COMPONENT_REF
)
3048 /* Set the field offset into T and gimplify it. */
3049 if (TREE_OPERAND (t
, 2) == NULL_TREE
)
3051 tree offset
= component_ref_field_offset (t
);
3052 if (!is_gimple_min_invariant (offset
))
3054 offset
= unshare_expr (offset
);
3055 tree field
= TREE_OPERAND (t
, 1);
3057 = size_int (DECL_OFFSET_ALIGN (field
) / BITS_PER_UNIT
);
3059 /* Divide the offset by its alignment. */
3060 offset
= size_binop_loc (loc
, EXACT_DIV_EXPR
,
3063 TREE_OPERAND (t
, 2) = offset
;
3064 tret
= gimplify_expr (&TREE_OPERAND (t
, 2), pre_p
,
3065 post_p
, is_gimple_reg
,
3067 ret
= MIN (ret
, tret
);
3072 tret
= gimplify_expr (&TREE_OPERAND (t
, 2), pre_p
, post_p
,
3073 is_gimple_reg
, fb_rvalue
);
3074 ret
= MIN (ret
, tret
);
3079 /* Step 2 is to gimplify the base expression. Make sure lvalue is set
3080 so as to match the min_lval predicate. Failure to do so may result
3081 in the creation of large aggregate temporaries. */
3082 tret
= gimplify_expr (p
, pre_p
, post_p
, is_gimple_min_lval
,
3083 fallback
| fb_lvalue
);
3084 ret
= MIN (ret
, tret
);
3086 /* And finally, the indices and operands of ARRAY_REF. During this
3087 loop we also remove any useless conversions. */
3088 for (; expr_stack
.length () > 0; )
3090 tree t
= expr_stack
.pop ();
3092 if (TREE_CODE (t
) == ARRAY_REF
|| TREE_CODE (t
) == ARRAY_RANGE_REF
)
3094 /* Gimplify the dimension. */
3095 if (!is_gimple_min_invariant (TREE_OPERAND (t
, 1)))
3097 tret
= gimplify_expr (&TREE_OPERAND (t
, 1), pre_p
, post_p
,
3098 is_gimple_val
, fb_rvalue
);
3099 ret
= MIN (ret
, tret
);
3103 STRIP_USELESS_TYPE_CONVERSION (TREE_OPERAND (t
, 0));
3105 /* The innermost expression P may have originally had
3106 TREE_SIDE_EFFECTS set which would have caused all the outer
3107 expressions in *EXPR_P leading to P to also have had
3108 TREE_SIDE_EFFECTS set. */
3109 recalculate_side_effects (t
);
3112 /* If the outermost expression is a COMPONENT_REF, canonicalize its type. */
3113 if ((fallback
& fb_rvalue
) && TREE_CODE (*expr_p
) == COMPONENT_REF
)
3115 canonicalize_component_ref (expr_p
);
3118 expr_stack
.release ();
3120 gcc_assert (*expr_p
== expr
|| ret
!= GS_ALL_DONE
);
3125 /* Gimplify the self modifying expression pointed to by EXPR_P
3128 PRE_P points to the list where side effects that must happen before
3129 *EXPR_P should be stored.
3131 POST_P points to the list where side effects that must happen after
3132 *EXPR_P should be stored.
3134 WANT_VALUE is nonzero iff we want to use the value of this expression
3135 in another expression.
3137 ARITH_TYPE is the type the computation should be performed in. */
3139 enum gimplify_status
3140 gimplify_self_mod_expr (tree
*expr_p
, gimple_seq
*pre_p
, gimple_seq
*post_p
,
3141 bool want_value
, tree arith_type
)
3143 enum tree_code code
;
3144 tree lhs
, lvalue
, rhs
, t1
;
3145 gimple_seq post
= NULL
, *orig_post_p
= post_p
;
3147 enum tree_code arith_code
;
3148 enum gimplify_status ret
;
3149 location_t loc
= EXPR_LOCATION (*expr_p
);
3151 code
= TREE_CODE (*expr_p
);
3153 gcc_assert (code
== POSTINCREMENT_EXPR
|| code
== POSTDECREMENT_EXPR
3154 || code
== PREINCREMENT_EXPR
|| code
== PREDECREMENT_EXPR
);
3156 /* Prefix or postfix? */
3157 if (code
== POSTINCREMENT_EXPR
|| code
== POSTDECREMENT_EXPR
)
3158 /* Faster to treat as prefix if result is not used. */
3159 postfix
= want_value
;
3163 /* For postfix, make sure the inner expression's post side effects
3164 are executed after side effects from this expression. */
3168 /* Add or subtract? */
3169 if (code
== PREINCREMENT_EXPR
|| code
== POSTINCREMENT_EXPR
)
3170 arith_code
= PLUS_EXPR
;
3172 arith_code
= MINUS_EXPR
;
3174 /* Gimplify the LHS into a GIMPLE lvalue. */
3175 lvalue
= TREE_OPERAND (*expr_p
, 0);
3176 ret
= gimplify_expr (&lvalue
, pre_p
, post_p
, is_gimple_lvalue
, fb_lvalue
);
3177 if (ret
== GS_ERROR
)
3180 /* Extract the operands to the arithmetic operation. */
3182 rhs
= TREE_OPERAND (*expr_p
, 1);
3184 /* For postfix operator, we evaluate the LHS to an rvalue and then use
3185 that as the result value and in the postqueue operation. */
3188 ret
= gimplify_expr (&lhs
, pre_p
, post_p
, is_gimple_val
, fb_rvalue
);
3189 if (ret
== GS_ERROR
)
3192 lhs
= get_initialized_tmp_var (lhs
, pre_p
);
3195 /* For POINTERs increment, use POINTER_PLUS_EXPR. */
3196 if (POINTER_TYPE_P (TREE_TYPE (lhs
)))
3198 rhs
= convert_to_ptrofftype_loc (loc
, rhs
);
3199 if (arith_code
== MINUS_EXPR
)
3200 rhs
= fold_build1_loc (loc
, NEGATE_EXPR
, TREE_TYPE (rhs
), rhs
);
3201 t1
= fold_build2 (POINTER_PLUS_EXPR
, TREE_TYPE (*expr_p
), lhs
, rhs
);
3204 t1
= fold_convert (TREE_TYPE (*expr_p
),
3205 fold_build2 (arith_code
, arith_type
,
3206 fold_convert (arith_type
, lhs
),
3207 fold_convert (arith_type
, rhs
)));
3211 gimplify_assign (lvalue
, t1
, pre_p
);
3212 gimplify_seq_add_seq (orig_post_p
, post
);
3218 *expr_p
= build2 (MODIFY_EXPR
, TREE_TYPE (lvalue
), lvalue
, t1
);
3223 /* If *EXPR_P has a variable sized type, wrap it in a WITH_SIZE_EXPR. */
3226 maybe_with_size_expr (tree
*expr_p
)
3228 tree expr
= *expr_p
;
3229 tree type
= TREE_TYPE (expr
);
3232 /* If we've already wrapped this or the type is error_mark_node, we can't do
3234 if (TREE_CODE (expr
) == WITH_SIZE_EXPR
3235 || type
== error_mark_node
)
3238 /* If the size isn't known or is a constant, we have nothing to do. */
3239 size
= TYPE_SIZE_UNIT (type
);
3240 if (!size
|| poly_int_tree_p (size
))
3243 /* Otherwise, make a WITH_SIZE_EXPR. */
3244 size
= unshare_expr (size
);
3245 size
= SUBSTITUTE_PLACEHOLDER_IN_EXPR (size
, expr
);
3246 *expr_p
= build2 (WITH_SIZE_EXPR
, type
, expr
, size
);
3249 /* Helper for gimplify_call_expr. Gimplify a single argument *ARG_P
3250 Store any side-effects in PRE_P. CALL_LOCATION is the location of
3251 the CALL_EXPR. If ALLOW_SSA is set the actual parameter may be
3252 gimplified to an SSA name. */
3254 enum gimplify_status
3255 gimplify_arg (tree
*arg_p
, gimple_seq
*pre_p
, location_t call_location
,
3258 bool (*test
) (tree
);
3261 /* In general, we allow lvalues for function arguments to avoid
3262 extra overhead of copying large aggregates out of even larger
3263 aggregates into temporaries only to copy the temporaries to
3264 the argument list. Make optimizers happy by pulling out to
3265 temporaries those types that fit in registers. */
3266 if (is_gimple_reg_type (TREE_TYPE (*arg_p
)))
3267 test
= is_gimple_val
, fb
= fb_rvalue
;
3270 test
= is_gimple_lvalue
, fb
= fb_either
;
3271 /* Also strip a TARGET_EXPR that would force an extra copy. */
3272 if (TREE_CODE (*arg_p
) == TARGET_EXPR
)
3274 tree init
= TARGET_EXPR_INITIAL (*arg_p
);
3276 && !VOID_TYPE_P (TREE_TYPE (init
)))
3281 /* If this is a variable sized type, we must remember the size. */
3282 maybe_with_size_expr (arg_p
);
3284 /* FIXME diagnostics: This will mess up gcc.dg/Warray-bounds.c. */
3285 /* Make sure arguments have the same location as the function call
3287 protected_set_expr_location (*arg_p
, call_location
);
3289 /* There is a sequence point before a function call. Side effects in
3290 the argument list must occur before the actual call. So, when
3291 gimplifying arguments, force gimplify_expr to use an internal
3292 post queue which is then appended to the end of PRE_P. */
3293 return gimplify_expr (arg_p
, pre_p
, NULL
, test
, fb
, allow_ssa
);
3296 /* Don't fold inside offloading or taskreg regions: it can break code by
3297 adding decl references that weren't in the source. We'll do it during
3298 omplower pass instead. */
3301 maybe_fold_stmt (gimple_stmt_iterator
*gsi
)
3303 struct gimplify_omp_ctx
*ctx
;
3304 for (ctx
= gimplify_omp_ctxp
; ctx
; ctx
= ctx
->outer_context
)
3305 if ((ctx
->region_type
& (ORT_TARGET
| ORT_PARALLEL
| ORT_TASK
)) != 0)
3307 else if ((ctx
->region_type
& ORT_HOST_TEAMS
) == ORT_HOST_TEAMS
)
3309 /* Delay folding of builtins until the IL is in consistent state
3310 so the diagnostic machinery can do a better job. */
3311 if (gimple_call_builtin_p (gsi_stmt (*gsi
)))
3313 return fold_stmt (gsi
);
3316 /* Gimplify the CALL_EXPR node *EXPR_P into the GIMPLE sequence PRE_P.
3317 WANT_VALUE is true if the result of the call is desired. */
3319 static enum gimplify_status
3320 gimplify_call_expr (tree
*expr_p
, gimple_seq
*pre_p
, bool want_value
)
3322 tree fndecl
, parms
, p
, fnptrtype
;
3323 enum gimplify_status ret
;
3326 bool builtin_va_start_p
= false;
3327 location_t loc
= EXPR_LOCATION (*expr_p
);
3329 gcc_assert (TREE_CODE (*expr_p
) == CALL_EXPR
);
3331 /* For reliable diagnostics during inlining, it is necessary that
3332 every call_expr be annotated with file and line. */
3333 if (! EXPR_HAS_LOCATION (*expr_p
))
3334 SET_EXPR_LOCATION (*expr_p
, input_location
);
3336 /* Gimplify internal functions created in the FEs. */
3337 if (CALL_EXPR_FN (*expr_p
) == NULL_TREE
)
3342 nargs
= call_expr_nargs (*expr_p
);
3343 enum internal_fn ifn
= CALL_EXPR_IFN (*expr_p
);
3344 auto_vec
<tree
> vargs (nargs
);
3346 for (i
= 0; i
< nargs
; i
++)
3348 gimplify_arg (&CALL_EXPR_ARG (*expr_p
, i
), pre_p
,
3349 EXPR_LOCATION (*expr_p
));
3350 vargs
.quick_push (CALL_EXPR_ARG (*expr_p
, i
));
3353 gcall
*call
= gimple_build_call_internal_vec (ifn
, vargs
);
3354 gimple_call_set_nothrow (call
, TREE_NOTHROW (*expr_p
));
3355 gimplify_seq_add_stmt (pre_p
, call
);
3359 /* This may be a call to a builtin function.
3361 Builtin function calls may be transformed into different
3362 (and more efficient) builtin function calls under certain
3363 circumstances. Unfortunately, gimplification can muck things
3364 up enough that the builtin expanders are not aware that certain
3365 transformations are still valid.
3367 So we attempt transformation/gimplification of the call before
3368 we gimplify the CALL_EXPR. At this time we do not manage to
3369 transform all calls in the same manner as the expanders do, but
3370 we do transform most of them. */
3371 fndecl
= get_callee_fndecl (*expr_p
);
3372 if (fndecl
&& fndecl_built_in_p (fndecl
, BUILT_IN_NORMAL
))
3373 switch (DECL_FUNCTION_CODE (fndecl
))
3375 CASE_BUILT_IN_ALLOCA
:
3376 /* If the call has been built for a variable-sized object, then we
3377 want to restore the stack level when the enclosing BIND_EXPR is
3378 exited to reclaim the allocated space; otherwise, we precisely
3379 need to do the opposite and preserve the latest stack level. */
3380 if (CALL_ALLOCA_FOR_VAR_P (*expr_p
))
3381 gimplify_ctxp
->save_stack
= true;
3383 gimplify_ctxp
->keep_stack
= true;
3386 case BUILT_IN_VA_START
:
3388 builtin_va_start_p
= TRUE
;
3389 if (call_expr_nargs (*expr_p
) < 2)
3391 error ("too few arguments to function %<va_start%>");
3392 *expr_p
= build_empty_stmt (EXPR_LOCATION (*expr_p
));
3396 if (fold_builtin_next_arg (*expr_p
, true))
3398 *expr_p
= build_empty_stmt (EXPR_LOCATION (*expr_p
));
3404 case BUILT_IN_EH_RETURN
:
3405 cfun
->calls_eh_return
= true;
3408 case BUILT_IN_CLEAR_PADDING
:
3409 if (call_expr_nargs (*expr_p
) == 1)
3411 /* Remember the original type of the argument in an internal
3412 dummy second argument, as in GIMPLE pointer conversions are
3414 p
= CALL_EXPR_ARG (*expr_p
, 0);
3416 = build_call_expr_loc (EXPR_LOCATION (*expr_p
), fndecl
, 2, p
,
3417 build_zero_cst (TREE_TYPE (p
)));
3425 if (fndecl
&& fndecl_built_in_p (fndecl
))
3427 tree new_tree
= fold_call_expr (input_location
, *expr_p
, !want_value
);
3428 if (new_tree
&& new_tree
!= *expr_p
)
3430 /* There was a transformation of this call which computes the
3431 same value, but in a more efficient way. Return and try
3438 /* Remember the original function pointer type. */
3439 fnptrtype
= TREE_TYPE (CALL_EXPR_FN (*expr_p
));
3444 && (cfun
->curr_properties
& PROP_gimple_any
) == 0)
3446 tree variant
= omp_resolve_declare_variant (fndecl
);
3447 if (variant
!= fndecl
)
3448 CALL_EXPR_FN (*expr_p
) = build1 (ADDR_EXPR
, fnptrtype
, variant
);
3451 /* There is a sequence point before the call, so any side effects in
3452 the calling expression must occur before the actual call. Force
3453 gimplify_expr to use an internal post queue. */
3454 ret
= gimplify_expr (&CALL_EXPR_FN (*expr_p
), pre_p
, NULL
,
3455 is_gimple_call_addr
, fb_rvalue
);
3457 nargs
= call_expr_nargs (*expr_p
);
3459 /* Get argument types for verification. */
3460 fndecl
= get_callee_fndecl (*expr_p
);
3463 parms
= TYPE_ARG_TYPES (TREE_TYPE (fndecl
));
3465 parms
= TYPE_ARG_TYPES (TREE_TYPE (fnptrtype
));
3467 if (fndecl
&& DECL_ARGUMENTS (fndecl
))
3468 p
= DECL_ARGUMENTS (fndecl
);
3473 for (i
= 0; i
< nargs
&& p
; i
++, p
= TREE_CHAIN (p
))
3476 /* If the last argument is __builtin_va_arg_pack () and it is not
3477 passed as a named argument, decrease the number of CALL_EXPR
3478 arguments and set instead the CALL_EXPR_VA_ARG_PACK flag. */
3481 && TREE_CODE (CALL_EXPR_ARG (*expr_p
, nargs
- 1)) == CALL_EXPR
)
3483 tree last_arg
= CALL_EXPR_ARG (*expr_p
, nargs
- 1);
3484 tree last_arg_fndecl
= get_callee_fndecl (last_arg
);
3487 && fndecl_built_in_p (last_arg_fndecl
, BUILT_IN_VA_ARG_PACK
))
3489 tree call
= *expr_p
;
3492 *expr_p
= build_call_array_loc (loc
, TREE_TYPE (call
),
3493 CALL_EXPR_FN (call
),
3494 nargs
, CALL_EXPR_ARGP (call
));
3496 /* Copy all CALL_EXPR flags, location and block, except
3497 CALL_EXPR_VA_ARG_PACK flag. */
3498 CALL_EXPR_STATIC_CHAIN (*expr_p
) = CALL_EXPR_STATIC_CHAIN (call
);
3499 CALL_EXPR_TAILCALL (*expr_p
) = CALL_EXPR_TAILCALL (call
);
3500 CALL_EXPR_RETURN_SLOT_OPT (*expr_p
)
3501 = CALL_EXPR_RETURN_SLOT_OPT (call
);
3502 CALL_FROM_THUNK_P (*expr_p
) = CALL_FROM_THUNK_P (call
);
3503 SET_EXPR_LOCATION (*expr_p
, EXPR_LOCATION (call
));
3505 /* Set CALL_EXPR_VA_ARG_PACK. */
3506 CALL_EXPR_VA_ARG_PACK (*expr_p
) = 1;
3510 /* If the call returns twice then after building the CFG the call
3511 argument computations will no longer dominate the call because
3512 we add an abnormal incoming edge to the call. So do not use SSA
3514 bool returns_twice
= call_expr_flags (*expr_p
) & ECF_RETURNS_TWICE
;
3516 /* Gimplify the function arguments. */
3519 for (i
= (PUSH_ARGS_REVERSED
? nargs
- 1 : 0);
3520 PUSH_ARGS_REVERSED
? i
>= 0 : i
< nargs
;
3521 PUSH_ARGS_REVERSED
? i
-- : i
++)
3523 enum gimplify_status t
;
3525 /* Avoid gimplifying the second argument to va_start, which needs to
3526 be the plain PARM_DECL. */
3527 if ((i
!= 1) || !builtin_va_start_p
)
3529 t
= gimplify_arg (&CALL_EXPR_ARG (*expr_p
, i
), pre_p
,
3530 EXPR_LOCATION (*expr_p
), ! returns_twice
);
3538 /* Gimplify the static chain. */
3539 if (CALL_EXPR_STATIC_CHAIN (*expr_p
))
3541 if (fndecl
&& !DECL_STATIC_CHAIN (fndecl
))
3542 CALL_EXPR_STATIC_CHAIN (*expr_p
) = NULL
;
3545 enum gimplify_status t
;
3546 t
= gimplify_arg (&CALL_EXPR_STATIC_CHAIN (*expr_p
), pre_p
,
3547 EXPR_LOCATION (*expr_p
), ! returns_twice
);
3553 /* Verify the function result. */
3554 if (want_value
&& fndecl
3555 && VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fnptrtype
))))
3557 error_at (loc
, "using result of function returning %<void%>");
3561 /* Try this again in case gimplification exposed something. */
3562 if (ret
!= GS_ERROR
)
3564 tree new_tree
= fold_call_expr (input_location
, *expr_p
, !want_value
);
3566 if (new_tree
&& new_tree
!= *expr_p
)
3568 /* There was a transformation of this call which computes the
3569 same value, but in a more efficient way. Return and try
3577 *expr_p
= error_mark_node
;
3581 /* If the function is "const" or "pure", then clear TREE_SIDE_EFFECTS on its
3582 decl. This allows us to eliminate redundant or useless
3583 calls to "const" functions. */
3584 if (TREE_CODE (*expr_p
) == CALL_EXPR
)
3586 int flags
= call_expr_flags (*expr_p
);
3587 if (flags
& (ECF_CONST
| ECF_PURE
)
3588 /* An infinite loop is considered a side effect. */
3589 && !(flags
& (ECF_LOOPING_CONST_OR_PURE
)))
3590 TREE_SIDE_EFFECTS (*expr_p
) = 0;
3593 /* If the value is not needed by the caller, emit a new GIMPLE_CALL
3594 and clear *EXPR_P. Otherwise, leave *EXPR_P in its gimplified
3595 form and delegate the creation of a GIMPLE_CALL to
3596 gimplify_modify_expr. This is always possible because when
3597 WANT_VALUE is true, the caller wants the result of this call into
3598 a temporary, which means that we will emit an INIT_EXPR in
3599 internal_get_tmp_var which will then be handled by
3600 gimplify_modify_expr. */
3603 /* The CALL_EXPR in *EXPR_P is already in GIMPLE form, so all we
3604 have to do is replicate it as a GIMPLE_CALL tuple. */
3605 gimple_stmt_iterator gsi
;
3606 call
= gimple_build_call_from_tree (*expr_p
, fnptrtype
);
3607 notice_special_calls (call
);
3608 gimplify_seq_add_stmt (pre_p
, call
);
3609 gsi
= gsi_last (*pre_p
);
3610 maybe_fold_stmt (&gsi
);
3611 *expr_p
= NULL_TREE
;
3614 /* Remember the original function type. */
3615 CALL_EXPR_FN (*expr_p
) = build1 (NOP_EXPR
, fnptrtype
,
3616 CALL_EXPR_FN (*expr_p
));
3621 /* Handle shortcut semantics in the predicate operand of a COND_EXPR by
3622 rewriting it into multiple COND_EXPRs, and possibly GOTO_EXPRs.
3624 TRUE_LABEL_P and FALSE_LABEL_P point to the labels to jump to if the
3625 condition is true or false, respectively. If null, we should generate
3626 our own to skip over the evaluation of this specific expression.
3628 LOCUS is the source location of the COND_EXPR.
3630 This function is the tree equivalent of do_jump.
3632 shortcut_cond_r should only be called by shortcut_cond_expr. */
3635 shortcut_cond_r (tree pred
, tree
*true_label_p
, tree
*false_label_p
,
3638 tree local_label
= NULL_TREE
;
3639 tree t
, expr
= NULL
;
3641 /* OK, it's not a simple case; we need to pull apart the COND_EXPR to
3642 retain the shortcut semantics. Just insert the gotos here;
3643 shortcut_cond_expr will append the real blocks later. */
3644 if (TREE_CODE (pred
) == TRUTH_ANDIF_EXPR
)
3646 location_t new_locus
;
3648 /* Turn if (a && b) into
3650 if (a); else goto no;
3651 if (b) goto yes; else goto no;
3654 if (false_label_p
== NULL
)
3655 false_label_p
= &local_label
;
3657 /* Keep the original source location on the first 'if'. */
3658 t
= shortcut_cond_r (TREE_OPERAND (pred
, 0), NULL
, false_label_p
, locus
);
3659 append_to_statement_list (t
, &expr
);
3661 /* Set the source location of the && on the second 'if'. */
3662 new_locus
= rexpr_location (pred
, locus
);
3663 t
= shortcut_cond_r (TREE_OPERAND (pred
, 1), true_label_p
, false_label_p
,
3665 append_to_statement_list (t
, &expr
);
3667 else if (TREE_CODE (pred
) == TRUTH_ORIF_EXPR
)
3669 location_t new_locus
;
3671 /* Turn if (a || b) into
3674 if (b) goto yes; else goto no;
3677 if (true_label_p
== NULL
)
3678 true_label_p
= &local_label
;
3680 /* Keep the original source location on the first 'if'. */
3681 t
= shortcut_cond_r (TREE_OPERAND (pred
, 0), true_label_p
, NULL
, locus
);
3682 append_to_statement_list (t
, &expr
);
3684 /* Set the source location of the || on the second 'if'. */
3685 new_locus
= rexpr_location (pred
, locus
);
3686 t
= shortcut_cond_r (TREE_OPERAND (pred
, 1), true_label_p
, false_label_p
,
3688 append_to_statement_list (t
, &expr
);
3690 else if (TREE_CODE (pred
) == COND_EXPR
3691 && !VOID_TYPE_P (TREE_TYPE (TREE_OPERAND (pred
, 1)))
3692 && !VOID_TYPE_P (TREE_TYPE (TREE_OPERAND (pred
, 2))))
3694 location_t new_locus
;
3696 /* As long as we're messing with gotos, turn if (a ? b : c) into
3698 if (b) goto yes; else goto no;
3700 if (c) goto yes; else goto no;
3702 Don't do this if one of the arms has void type, which can happen
3703 in C++ when the arm is throw. */
3705 /* Keep the original source location on the first 'if'. Set the source
3706 location of the ? on the second 'if'. */
3707 new_locus
= rexpr_location (pred
, locus
);
3708 expr
= build3 (COND_EXPR
, void_type_node
, TREE_OPERAND (pred
, 0),
3709 shortcut_cond_r (TREE_OPERAND (pred
, 1), true_label_p
,
3710 false_label_p
, locus
),
3711 shortcut_cond_r (TREE_OPERAND (pred
, 2), true_label_p
,
3712 false_label_p
, new_locus
));
3716 expr
= build3 (COND_EXPR
, void_type_node
, pred
,
3717 build_and_jump (true_label_p
),
3718 build_and_jump (false_label_p
));
3719 SET_EXPR_LOCATION (expr
, locus
);
3724 t
= build1 (LABEL_EXPR
, void_type_node
, local_label
);
3725 append_to_statement_list (t
, &expr
);
3731 /* If EXPR is a GOTO_EXPR, return it. If it is a STATEMENT_LIST, skip
3732 any of its leading DEBUG_BEGIN_STMTS and recurse on the subsequent
3733 statement, if it is the last one. Otherwise, return NULL. */
3736 find_goto (tree expr
)
3741 if (TREE_CODE (expr
) == GOTO_EXPR
)
3744 if (TREE_CODE (expr
) != STATEMENT_LIST
)
3747 tree_stmt_iterator i
= tsi_start (expr
);
3749 while (!tsi_end_p (i
) && TREE_CODE (tsi_stmt (i
)) == DEBUG_BEGIN_STMT
)
3752 if (!tsi_one_before_end_p (i
))
3755 return find_goto (tsi_stmt (i
));
3758 /* Same as find_goto, except that it returns NULL if the destination
3759 is not a LABEL_DECL. */
3762 find_goto_label (tree expr
)
3764 tree dest
= find_goto (expr
);
3765 if (dest
&& TREE_CODE (GOTO_DESTINATION (dest
)) == LABEL_DECL
)
3770 /* Given a conditional expression EXPR with short-circuit boolean
3771 predicates using TRUTH_ANDIF_EXPR or TRUTH_ORIF_EXPR, break the
3772 predicate apart into the equivalent sequence of conditionals. */
3775 shortcut_cond_expr (tree expr
)
3777 tree pred
= TREE_OPERAND (expr
, 0);
3778 tree then_
= TREE_OPERAND (expr
, 1);
3779 tree else_
= TREE_OPERAND (expr
, 2);
3780 tree true_label
, false_label
, end_label
, t
;
3782 tree
*false_label_p
;
3783 bool emit_end
, emit_false
, jump_over_else
;
3784 bool then_se
= then_
&& TREE_SIDE_EFFECTS (then_
);
3785 bool else_se
= else_
&& TREE_SIDE_EFFECTS (else_
);
3787 /* First do simple transformations. */
3790 /* If there is no 'else', turn
3793 if (a) if (b) then c. */
3794 while (TREE_CODE (pred
) == TRUTH_ANDIF_EXPR
)
3796 /* Keep the original source location on the first 'if'. */
3797 location_t locus
= EXPR_LOC_OR_LOC (expr
, input_location
);
3798 TREE_OPERAND (expr
, 0) = TREE_OPERAND (pred
, 1);
3799 /* Set the source location of the && on the second 'if'. */
3800 if (rexpr_has_location (pred
))
3801 SET_EXPR_LOCATION (expr
, rexpr_location (pred
));
3802 then_
= shortcut_cond_expr (expr
);
3803 then_se
= then_
&& TREE_SIDE_EFFECTS (then_
);
3804 pred
= TREE_OPERAND (pred
, 0);
3805 expr
= build3 (COND_EXPR
, void_type_node
, pred
, then_
, NULL_TREE
);
3806 SET_EXPR_LOCATION (expr
, locus
);
3812 /* If there is no 'then', turn
3815 if (a); else if (b); else d. */
3816 while (TREE_CODE (pred
) == TRUTH_ORIF_EXPR
)
3818 /* Keep the original source location on the first 'if'. */
3819 location_t locus
= EXPR_LOC_OR_LOC (expr
, input_location
);
3820 TREE_OPERAND (expr
, 0) = TREE_OPERAND (pred
, 1);
3821 /* Set the source location of the || on the second 'if'. */
3822 if (rexpr_has_location (pred
))
3823 SET_EXPR_LOCATION (expr
, rexpr_location (pred
));
3824 else_
= shortcut_cond_expr (expr
);
3825 else_se
= else_
&& TREE_SIDE_EFFECTS (else_
);
3826 pred
= TREE_OPERAND (pred
, 0);
3827 expr
= build3 (COND_EXPR
, void_type_node
, pred
, NULL_TREE
, else_
);
3828 SET_EXPR_LOCATION (expr
, locus
);
3832 /* If we're done, great. */
3833 if (TREE_CODE (pred
) != TRUTH_ANDIF_EXPR
3834 && TREE_CODE (pred
) != TRUTH_ORIF_EXPR
)
3837 /* Otherwise we need to mess with gotos. Change
3840 if (a); else goto no;
3843 and recursively gimplify the condition. */
3845 true_label
= false_label
= end_label
= NULL_TREE
;
3847 /* If our arms just jump somewhere, hijack those labels so we don't
3848 generate jumps to jumps. */
3850 if (tree then_goto
= find_goto_label (then_
))
3852 true_label
= GOTO_DESTINATION (then_goto
);
3857 if (tree else_goto
= find_goto_label (else_
))
3859 false_label
= GOTO_DESTINATION (else_goto
);
3864 /* If we aren't hijacking a label for the 'then' branch, it falls through. */
3866 true_label_p
= &true_label
;
3868 true_label_p
= NULL
;
3870 /* The 'else' branch also needs a label if it contains interesting code. */
3871 if (false_label
|| else_se
)
3872 false_label_p
= &false_label
;
3874 false_label_p
= NULL
;
3876 /* If there was nothing else in our arms, just forward the label(s). */
3877 if (!then_se
&& !else_se
)
3878 return shortcut_cond_r (pred
, true_label_p
, false_label_p
,
3879 EXPR_LOC_OR_LOC (expr
, input_location
));
3881 /* If our last subexpression already has a terminal label, reuse it. */
3883 t
= expr_last (else_
);
3885 t
= expr_last (then_
);
3888 if (t
&& TREE_CODE (t
) == LABEL_EXPR
)
3889 end_label
= LABEL_EXPR_LABEL (t
);
3891 /* If we don't care about jumping to the 'else' branch, jump to the end
3892 if the condition is false. */
3894 false_label_p
= &end_label
;
3896 /* We only want to emit these labels if we aren't hijacking them. */
3897 emit_end
= (end_label
== NULL_TREE
);
3898 emit_false
= (false_label
== NULL_TREE
);
3900 /* We only emit the jump over the else clause if we have to--if the
3901 then clause may fall through. Otherwise we can wind up with a
3902 useless jump and a useless label at the end of gimplified code,
3903 which will cause us to think that this conditional as a whole
3904 falls through even if it doesn't. If we then inline a function
3905 which ends with such a condition, that can cause us to issue an
3906 inappropriate warning about control reaching the end of a
3907 non-void function. */
3908 jump_over_else
= block_may_fallthru (then_
);
3910 pred
= shortcut_cond_r (pred
, true_label_p
, false_label_p
,
3911 EXPR_LOC_OR_LOC (expr
, input_location
));
3914 append_to_statement_list (pred
, &expr
);
3916 append_to_statement_list (then_
, &expr
);
3921 tree last
= expr_last (expr
);
3922 t
= build_and_jump (&end_label
);
3923 if (rexpr_has_location (last
))
3924 SET_EXPR_LOCATION (t
, rexpr_location (last
));
3925 append_to_statement_list (t
, &expr
);
3929 t
= build1 (LABEL_EXPR
, void_type_node
, false_label
);
3930 append_to_statement_list (t
, &expr
);
3932 append_to_statement_list (else_
, &expr
);
3934 if (emit_end
&& end_label
)
3936 t
= build1 (LABEL_EXPR
, void_type_node
, end_label
);
3937 append_to_statement_list (t
, &expr
);
3943 /* EXPR is used in a boolean context; make sure it has BOOLEAN_TYPE. */
3946 gimple_boolify (tree expr
)
3948 tree type
= TREE_TYPE (expr
);
3949 location_t loc
= EXPR_LOCATION (expr
);
3951 if (TREE_CODE (expr
) == NE_EXPR
3952 && TREE_CODE (TREE_OPERAND (expr
, 0)) == CALL_EXPR
3953 && integer_zerop (TREE_OPERAND (expr
, 1)))
3955 tree call
= TREE_OPERAND (expr
, 0);
3956 tree fn
= get_callee_fndecl (call
);
3958 /* For __builtin_expect ((long) (x), y) recurse into x as well
3959 if x is truth_value_p. */
3961 && fndecl_built_in_p (fn
, BUILT_IN_EXPECT
)
3962 && call_expr_nargs (call
) == 2)
3964 tree arg
= CALL_EXPR_ARG (call
, 0);
3967 if (TREE_CODE (arg
) == NOP_EXPR
3968 && TREE_TYPE (arg
) == TREE_TYPE (call
))
3969 arg
= TREE_OPERAND (arg
, 0);
3970 if (truth_value_p (TREE_CODE (arg
)))
3972 arg
= gimple_boolify (arg
);
3973 CALL_EXPR_ARG (call
, 0)
3974 = fold_convert_loc (loc
, TREE_TYPE (call
), arg
);
3980 switch (TREE_CODE (expr
))
3982 case TRUTH_AND_EXPR
:
3984 case TRUTH_XOR_EXPR
:
3985 case TRUTH_ANDIF_EXPR
:
3986 case TRUTH_ORIF_EXPR
:
3987 /* Also boolify the arguments of truth exprs. */
3988 TREE_OPERAND (expr
, 1) = gimple_boolify (TREE_OPERAND (expr
, 1));
3991 case TRUTH_NOT_EXPR
:
3992 TREE_OPERAND (expr
, 0) = gimple_boolify (TREE_OPERAND (expr
, 0));
3994 /* These expressions always produce boolean results. */
3995 if (TREE_CODE (type
) != BOOLEAN_TYPE
)
3996 TREE_TYPE (expr
) = boolean_type_node
;
4000 switch ((enum annot_expr_kind
) TREE_INT_CST_LOW (TREE_OPERAND (expr
, 1)))
4002 case annot_expr_ivdep_kind
:
4003 case annot_expr_unroll_kind
:
4004 case annot_expr_no_vector_kind
:
4005 case annot_expr_vector_kind
:
4006 case annot_expr_parallel_kind
:
4007 TREE_OPERAND (expr
, 0) = gimple_boolify (TREE_OPERAND (expr
, 0));
4008 if (TREE_CODE (type
) != BOOLEAN_TYPE
)
4009 TREE_TYPE (expr
) = boolean_type_node
;
4016 if (COMPARISON_CLASS_P (expr
))
4018 /* There expressions always prduce boolean results. */
4019 if (TREE_CODE (type
) != BOOLEAN_TYPE
)
4020 TREE_TYPE (expr
) = boolean_type_node
;
4023 /* Other expressions that get here must have boolean values, but
4024 might need to be converted to the appropriate mode. */
4025 if (TREE_CODE (type
) == BOOLEAN_TYPE
)
4027 return fold_convert_loc (loc
, boolean_type_node
, expr
);
4031 /* Given a conditional expression *EXPR_P without side effects, gimplify
4032 its operands. New statements are inserted to PRE_P. */
4034 static enum gimplify_status
4035 gimplify_pure_cond_expr (tree
*expr_p
, gimple_seq
*pre_p
)
4037 tree expr
= *expr_p
, cond
;
4038 enum gimplify_status ret
, tret
;
4039 enum tree_code code
;
4041 cond
= gimple_boolify (COND_EXPR_COND (expr
));
4043 /* We need to handle && and || specially, as their gimplification
4044 creates pure cond_expr, thus leading to an infinite cycle otherwise. */
4045 code
= TREE_CODE (cond
);
4046 if (code
== TRUTH_ANDIF_EXPR
)
4047 TREE_SET_CODE (cond
, TRUTH_AND_EXPR
);
4048 else if (code
== TRUTH_ORIF_EXPR
)
4049 TREE_SET_CODE (cond
, TRUTH_OR_EXPR
);
4050 ret
= gimplify_expr (&cond
, pre_p
, NULL
, is_gimple_condexpr
, fb_rvalue
);
4051 COND_EXPR_COND (*expr_p
) = cond
;
4053 tret
= gimplify_expr (&COND_EXPR_THEN (expr
), pre_p
, NULL
,
4054 is_gimple_val
, fb_rvalue
);
4055 ret
= MIN (ret
, tret
);
4056 tret
= gimplify_expr (&COND_EXPR_ELSE (expr
), pre_p
, NULL
,
4057 is_gimple_val
, fb_rvalue
);
4059 return MIN (ret
, tret
);
4062 /* Return true if evaluating EXPR could trap.
4063 EXPR is GENERIC, while tree_could_trap_p can be called
4067 generic_expr_could_trap_p (tree expr
)
4071 if (!expr
|| is_gimple_val (expr
))
4074 if (!EXPR_P (expr
) || tree_could_trap_p (expr
))
4077 n
= TREE_OPERAND_LENGTH (expr
);
4078 for (i
= 0; i
< n
; i
++)
4079 if (generic_expr_could_trap_p (TREE_OPERAND (expr
, i
)))
4085 /* Convert the conditional expression pointed to by EXPR_P '(p) ? a : b;'
4094 The second form is used when *EXPR_P is of type void.
4096 PRE_P points to the list where side effects that must happen before
4097 *EXPR_P should be stored. */
4099 static enum gimplify_status
4100 gimplify_cond_expr (tree
*expr_p
, gimple_seq
*pre_p
, fallback_t fallback
)
4102 tree expr
= *expr_p
;
4103 tree type
= TREE_TYPE (expr
);
4104 location_t loc
= EXPR_LOCATION (expr
);
4105 tree tmp
, arm1
, arm2
;
4106 enum gimplify_status ret
;
4107 tree label_true
, label_false
, label_cont
;
4108 bool have_then_clause_p
, have_else_clause_p
;
4110 enum tree_code pred_code
;
4111 gimple_seq seq
= NULL
;
4113 /* If this COND_EXPR has a value, copy the values into a temporary within
4115 if (!VOID_TYPE_P (type
))
4117 tree then_
= TREE_OPERAND (expr
, 1), else_
= TREE_OPERAND (expr
, 2);
4120 /* If either an rvalue is ok or we do not require an lvalue, create the
4121 temporary. But we cannot do that if the type is addressable. */
4122 if (((fallback
& fb_rvalue
) || !(fallback
& fb_lvalue
))
4123 && !TREE_ADDRESSABLE (type
))
4125 if (gimplify_ctxp
->allow_rhs_cond_expr
4126 /* If either branch has side effects or could trap, it can't be
4127 evaluated unconditionally. */
4128 && !TREE_SIDE_EFFECTS (then_
)
4129 && !generic_expr_could_trap_p (then_
)
4130 && !TREE_SIDE_EFFECTS (else_
)
4131 && !generic_expr_could_trap_p (else_
))
4132 return gimplify_pure_cond_expr (expr_p
, pre_p
);
4134 tmp
= create_tmp_var (type
, "iftmp");
4138 /* Otherwise, only create and copy references to the values. */
4141 type
= build_pointer_type (type
);
4143 if (!VOID_TYPE_P (TREE_TYPE (then_
)))
4144 then_
= build_fold_addr_expr_loc (loc
, then_
);
4146 if (!VOID_TYPE_P (TREE_TYPE (else_
)))
4147 else_
= build_fold_addr_expr_loc (loc
, else_
);
4150 = build3 (COND_EXPR
, type
, TREE_OPERAND (expr
, 0), then_
, else_
);
4152 tmp
= create_tmp_var (type
, "iftmp");
4153 result
= build_simple_mem_ref_loc (loc
, tmp
);
4156 /* Build the new then clause, `tmp = then_;'. But don't build the
4157 assignment if the value is void; in C++ it can be if it's a throw. */
4158 if (!VOID_TYPE_P (TREE_TYPE (then_
)))
4159 TREE_OPERAND (expr
, 1) = build2 (INIT_EXPR
, type
, tmp
, then_
);
4161 /* Similarly, build the new else clause, `tmp = else_;'. */
4162 if (!VOID_TYPE_P (TREE_TYPE (else_
)))
4163 TREE_OPERAND (expr
, 2) = build2 (INIT_EXPR
, type
, tmp
, else_
);
4165 TREE_TYPE (expr
) = void_type_node
;
4166 recalculate_side_effects (expr
);
4168 /* Move the COND_EXPR to the prequeue. */
4169 gimplify_stmt (&expr
, pre_p
);
4175 /* Remove any COMPOUND_EXPR so the following cases will be caught. */
4176 STRIP_TYPE_NOPS (TREE_OPERAND (expr
, 0));
4177 if (TREE_CODE (TREE_OPERAND (expr
, 0)) == COMPOUND_EXPR
)
4178 gimplify_compound_expr (&TREE_OPERAND (expr
, 0), pre_p
, true);
4180 /* Make sure the condition has BOOLEAN_TYPE. */
4181 TREE_OPERAND (expr
, 0) = gimple_boolify (TREE_OPERAND (expr
, 0));
4183 /* Break apart && and || conditions. */
4184 if (TREE_CODE (TREE_OPERAND (expr
, 0)) == TRUTH_ANDIF_EXPR
4185 || TREE_CODE (TREE_OPERAND (expr
, 0)) == TRUTH_ORIF_EXPR
)
4187 expr
= shortcut_cond_expr (expr
);
4189 if (expr
!= *expr_p
)
4193 /* We can't rely on gimplify_expr to re-gimplify the expanded
4194 form properly, as cleanups might cause the target labels to be
4195 wrapped in a TRY_FINALLY_EXPR. To prevent that, we need to
4196 set up a conditional context. */
4197 gimple_push_condition ();
4198 gimplify_stmt (expr_p
, &seq
);
4199 gimple_pop_condition (pre_p
);
4200 gimple_seq_add_seq (pre_p
, seq
);
4206 /* Now do the normal gimplification. */
4208 /* Gimplify condition. */
4209 ret
= gimplify_expr (&TREE_OPERAND (expr
, 0), pre_p
, NULL
,
4210 is_gimple_condexpr_for_cond
, fb_rvalue
);
4211 if (ret
== GS_ERROR
)
4213 gcc_assert (TREE_OPERAND (expr
, 0) != NULL_TREE
);
4215 gimple_push_condition ();
4217 have_then_clause_p
= have_else_clause_p
= false;
4218 label_true
= find_goto_label (TREE_OPERAND (expr
, 1));
4220 && DECL_CONTEXT (GOTO_DESTINATION (label_true
)) == current_function_decl
4221 /* For -O0 avoid this optimization if the COND_EXPR and GOTO_EXPR
4222 have different locations, otherwise we end up with incorrect
4223 location information on the branches. */
4225 || !EXPR_HAS_LOCATION (expr
)
4226 || !rexpr_has_location (label_true
)
4227 || EXPR_LOCATION (expr
) == rexpr_location (label_true
)))
4229 have_then_clause_p
= true;
4230 label_true
= GOTO_DESTINATION (label_true
);
4233 label_true
= create_artificial_label (UNKNOWN_LOCATION
);
4234 label_false
= find_goto_label (TREE_OPERAND (expr
, 2));
4236 && DECL_CONTEXT (GOTO_DESTINATION (label_false
)) == current_function_decl
4237 /* For -O0 avoid this optimization if the COND_EXPR and GOTO_EXPR
4238 have different locations, otherwise we end up with incorrect
4239 location information on the branches. */
4241 || !EXPR_HAS_LOCATION (expr
)
4242 || !rexpr_has_location (label_false
)
4243 || EXPR_LOCATION (expr
) == rexpr_location (label_false
)))
4245 have_else_clause_p
= true;
4246 label_false
= GOTO_DESTINATION (label_false
);
4249 label_false
= create_artificial_label (UNKNOWN_LOCATION
);
4251 gimple_cond_get_ops_from_tree (COND_EXPR_COND (expr
), &pred_code
, &arm1
,
4253 cond_stmt
= gimple_build_cond (pred_code
, arm1
, arm2
, label_true
,
4255 gimple_set_location (cond_stmt
, EXPR_LOCATION (expr
));
4256 copy_warning (cond_stmt
, COND_EXPR_COND (expr
));
4257 gimplify_seq_add_stmt (&seq
, cond_stmt
);
4258 gimple_stmt_iterator gsi
= gsi_last (seq
);
4259 maybe_fold_stmt (&gsi
);
4261 label_cont
= NULL_TREE
;
4262 if (!have_then_clause_p
)
4264 /* For if (...) {} else { code; } put label_true after
4266 if (TREE_OPERAND (expr
, 1) == NULL_TREE
4267 && !have_else_clause_p
4268 && TREE_OPERAND (expr
, 2) != NULL_TREE
)
4269 label_cont
= label_true
;
4272 gimplify_seq_add_stmt (&seq
, gimple_build_label (label_true
));
4273 have_then_clause_p
= gimplify_stmt (&TREE_OPERAND (expr
, 1), &seq
);
4274 /* For if (...) { code; } else {} or
4275 if (...) { code; } else goto label; or
4276 if (...) { code; return; } else { ... }
4277 label_cont isn't needed. */
4278 if (!have_else_clause_p
4279 && TREE_OPERAND (expr
, 2) != NULL_TREE
4280 && gimple_seq_may_fallthru (seq
))
4283 label_cont
= create_artificial_label (UNKNOWN_LOCATION
);
4285 g
= gimple_build_goto (label_cont
);
4287 /* GIMPLE_COND's are very low level; they have embedded
4288 gotos. This particular embedded goto should not be marked
4289 with the location of the original COND_EXPR, as it would
4290 correspond to the COND_EXPR's condition, not the ELSE or the
4291 THEN arms. To avoid marking it with the wrong location, flag
4292 it as "no location". */
4293 gimple_set_do_not_emit_location (g
);
4295 gimplify_seq_add_stmt (&seq
, g
);
4299 if (!have_else_clause_p
)
4301 gimplify_seq_add_stmt (&seq
, gimple_build_label (label_false
));
4302 have_else_clause_p
= gimplify_stmt (&TREE_OPERAND (expr
, 2), &seq
);
4305 gimplify_seq_add_stmt (&seq
, gimple_build_label (label_cont
));
4307 gimple_pop_condition (pre_p
);
4308 gimple_seq_add_seq (pre_p
, seq
);
4310 if (ret
== GS_ERROR
)
4312 else if (have_then_clause_p
|| have_else_clause_p
)
4316 /* Both arms are empty; replace the COND_EXPR with its predicate. */
4317 expr
= TREE_OPERAND (expr
, 0);
4318 gimplify_stmt (&expr
, pre_p
);
4325 /* Prepare the node pointed to by EXPR_P, an is_gimple_addressable expression,
4326 to be marked addressable.
4328 We cannot rely on such an expression being directly markable if a temporary
4329 has been created by the gimplification. In this case, we create another
4330 temporary and initialize it with a copy, which will become a store after we
4331 mark it addressable. This can happen if the front-end passed us something
4332 that it could not mark addressable yet, like a Fortran pass-by-reference
4333 parameter (int) floatvar. */
4336 prepare_gimple_addressable (tree
*expr_p
, gimple_seq
*seq_p
)
4338 while (handled_component_p (*expr_p
))
4339 expr_p
= &TREE_OPERAND (*expr_p
, 0);
4340 if (is_gimple_reg (*expr_p
))
4342 /* Do not allow an SSA name as the temporary. */
4343 tree var
= get_initialized_tmp_var (*expr_p
, seq_p
, NULL
, false);
4344 DECL_NOT_GIMPLE_REG_P (var
) = 1;
4349 /* A subroutine of gimplify_modify_expr. Replace a MODIFY_EXPR with
4350 a call to __builtin_memcpy. */
4352 static enum gimplify_status
4353 gimplify_modify_expr_to_memcpy (tree
*expr_p
, tree size
, bool want_value
,
4356 tree t
, to
, to_ptr
, from
, from_ptr
;
4358 location_t loc
= EXPR_LOCATION (*expr_p
);
4360 to
= TREE_OPERAND (*expr_p
, 0);
4361 from
= TREE_OPERAND (*expr_p
, 1);
4363 /* Mark the RHS addressable. Beware that it may not be possible to do so
4364 directly if a temporary has been created by the gimplification. */
4365 prepare_gimple_addressable (&from
, seq_p
);
4367 mark_addressable (from
);
4368 from_ptr
= build_fold_addr_expr_loc (loc
, from
);
4369 gimplify_arg (&from_ptr
, seq_p
, loc
);
4371 mark_addressable (to
);
4372 to_ptr
= build_fold_addr_expr_loc (loc
, to
);
4373 gimplify_arg (&to_ptr
, seq_p
, loc
);
4375 t
= builtin_decl_implicit (BUILT_IN_MEMCPY
);
4377 gs
= gimple_build_call (t
, 3, to_ptr
, from_ptr
, size
);
4378 gimple_call_set_alloca_for_var (gs
, true);
4382 /* tmp = memcpy() */
4383 t
= create_tmp_var (TREE_TYPE (to_ptr
));
4384 gimple_call_set_lhs (gs
, t
);
4385 gimplify_seq_add_stmt (seq_p
, gs
);
4387 *expr_p
= build_simple_mem_ref (t
);
4391 gimplify_seq_add_stmt (seq_p
, gs
);
4396 /* A subroutine of gimplify_modify_expr. Replace a MODIFY_EXPR with
4397 a call to __builtin_memset. In this case we know that the RHS is
4398 a CONSTRUCTOR with an empty element list. */
4400 static enum gimplify_status
4401 gimplify_modify_expr_to_memset (tree
*expr_p
, tree size
, bool want_value
,
4404 tree t
, from
, to
, to_ptr
;
4406 location_t loc
= EXPR_LOCATION (*expr_p
);
4408 /* Assert our assumptions, to abort instead of producing wrong code
4409 silently if they are not met. Beware that the RHS CONSTRUCTOR might
4410 not be immediately exposed. */
4411 from
= TREE_OPERAND (*expr_p
, 1);
4412 if (TREE_CODE (from
) == WITH_SIZE_EXPR
)
4413 from
= TREE_OPERAND (from
, 0);
4415 gcc_assert (TREE_CODE (from
) == CONSTRUCTOR
4416 && vec_safe_is_empty (CONSTRUCTOR_ELTS (from
)));
4419 to
= TREE_OPERAND (*expr_p
, 0);
4421 to_ptr
= build_fold_addr_expr_loc (loc
, to
);
4422 gimplify_arg (&to_ptr
, seq_p
, loc
);
4423 t
= builtin_decl_implicit (BUILT_IN_MEMSET
);
4425 gs
= gimple_build_call (t
, 3, to_ptr
, integer_zero_node
, size
);
4429 /* tmp = memset() */
4430 t
= create_tmp_var (TREE_TYPE (to_ptr
));
4431 gimple_call_set_lhs (gs
, t
);
4432 gimplify_seq_add_stmt (seq_p
, gs
);
4434 *expr_p
= build1 (INDIRECT_REF
, TREE_TYPE (to
), t
);
4438 gimplify_seq_add_stmt (seq_p
, gs
);
4443 /* A subroutine of gimplify_init_ctor_preeval. Called via walk_tree,
4444 determine, cautiously, if a CONSTRUCTOR overlaps the lhs of an
4445 assignment. Return non-null if we detect a potential overlap. */
4447 struct gimplify_init_ctor_preeval_data
4449 /* The base decl of the lhs object. May be NULL, in which case we
4450 have to assume the lhs is indirect. */
4453 /* The alias set of the lhs object. */
4454 alias_set_type lhs_alias_set
;
4458 gimplify_init_ctor_preeval_1 (tree
*tp
, int *walk_subtrees
, void *xdata
)
4460 struct gimplify_init_ctor_preeval_data
*data
4461 = (struct gimplify_init_ctor_preeval_data
*) xdata
;
4464 /* If we find the base object, obviously we have overlap. */
4465 if (data
->lhs_base_decl
== t
)
4468 /* If the constructor component is indirect, determine if we have a
4469 potential overlap with the lhs. The only bits of information we
4470 have to go on at this point are addressability and alias sets. */
4471 if ((INDIRECT_REF_P (t
)
4472 || TREE_CODE (t
) == MEM_REF
)
4473 && (!data
->lhs_base_decl
|| TREE_ADDRESSABLE (data
->lhs_base_decl
))
4474 && alias_sets_conflict_p (data
->lhs_alias_set
, get_alias_set (t
)))
4477 /* If the constructor component is a call, determine if it can hide a
4478 potential overlap with the lhs through an INDIRECT_REF like above.
4479 ??? Ugh - this is completely broken. In fact this whole analysis
4480 doesn't look conservative. */
4481 if (TREE_CODE (t
) == CALL_EXPR
)
4483 tree type
, fntype
= TREE_TYPE (TREE_TYPE (CALL_EXPR_FN (t
)));
4485 for (type
= TYPE_ARG_TYPES (fntype
); type
; type
= TREE_CHAIN (type
))
4486 if (POINTER_TYPE_P (TREE_VALUE (type
))
4487 && (!data
->lhs_base_decl
|| TREE_ADDRESSABLE (data
->lhs_base_decl
))
4488 && alias_sets_conflict_p (data
->lhs_alias_set
,
4490 (TREE_TYPE (TREE_VALUE (type
)))))
4494 if (IS_TYPE_OR_DECL_P (t
))
4499 /* A subroutine of gimplify_init_constructor. Pre-evaluate EXPR,
4500 force values that overlap with the lhs (as described by *DATA)
4501 into temporaries. */
4504 gimplify_init_ctor_preeval (tree
*expr_p
, gimple_seq
*pre_p
, gimple_seq
*post_p
,
4505 struct gimplify_init_ctor_preeval_data
*data
)
4507 enum gimplify_status one
;
4509 /* If the value is constant, then there's nothing to pre-evaluate. */
4510 if (TREE_CONSTANT (*expr_p
))
4512 /* Ensure it does not have side effects, it might contain a reference to
4513 the object we're initializing. */
4514 gcc_assert (!TREE_SIDE_EFFECTS (*expr_p
));
4518 /* If the type has non-trivial constructors, we can't pre-evaluate. */
4519 if (TREE_ADDRESSABLE (TREE_TYPE (*expr_p
)))
4522 /* Recurse for nested constructors. */
4523 if (TREE_CODE (*expr_p
) == CONSTRUCTOR
)
4525 unsigned HOST_WIDE_INT ix
;
4526 constructor_elt
*ce
;
4527 vec
<constructor_elt
, va_gc
> *v
= CONSTRUCTOR_ELTS (*expr_p
);
4529 FOR_EACH_VEC_SAFE_ELT (v
, ix
, ce
)
4530 gimplify_init_ctor_preeval (&ce
->value
, pre_p
, post_p
, data
);
4535 /* If this is a variable sized type, we must remember the size. */
4536 maybe_with_size_expr (expr_p
);
4538 /* Gimplify the constructor element to something appropriate for the rhs
4539 of a MODIFY_EXPR. Given that we know the LHS is an aggregate, we know
4540 the gimplifier will consider this a store to memory. Doing this
4541 gimplification now means that we won't have to deal with complicated
4542 language-specific trees, nor trees like SAVE_EXPR that can induce
4543 exponential search behavior. */
4544 one
= gimplify_expr (expr_p
, pre_p
, post_p
, is_gimple_mem_rhs
, fb_rvalue
);
4545 if (one
== GS_ERROR
)
4551 /* If we gimplified to a bare decl, we can be sure that it doesn't overlap
4552 with the lhs, since "a = { .x=a }" doesn't make sense. This will
4553 always be true for all scalars, since is_gimple_mem_rhs insists on a
4554 temporary variable for them. */
4555 if (DECL_P (*expr_p
))
4558 /* If this is of variable size, we have no choice but to assume it doesn't
4559 overlap since we can't make a temporary for it. */
4560 if (TREE_CODE (TYPE_SIZE (TREE_TYPE (*expr_p
))) != INTEGER_CST
)
4563 /* Otherwise, we must search for overlap ... */
4564 if (!walk_tree (expr_p
, gimplify_init_ctor_preeval_1
, data
, NULL
))
4567 /* ... and if found, force the value into a temporary. */
4568 *expr_p
= get_formal_tmp_var (*expr_p
, pre_p
);
4571 /* A subroutine of gimplify_init_ctor_eval. Create a loop for
4572 a RANGE_EXPR in a CONSTRUCTOR for an array.
4576 object[var] = value;
4583 We increment var _after_ the loop exit check because we might otherwise
4584 fail if upper == TYPE_MAX_VALUE (type for upper).
4586 Note that we never have to deal with SAVE_EXPRs here, because this has
4587 already been taken care of for us, in gimplify_init_ctor_preeval(). */
4589 static void gimplify_init_ctor_eval (tree
, vec
<constructor_elt
, va_gc
> *,
4590 gimple_seq
*, bool);
4593 gimplify_init_ctor_eval_range (tree object
, tree lower
, tree upper
,
4594 tree value
, tree array_elt_type
,
4595 gimple_seq
*pre_p
, bool cleared
)
4597 tree loop_entry_label
, loop_exit_label
, fall_thru_label
;
4598 tree var
, var_type
, cref
, tmp
;
4600 loop_entry_label
= create_artificial_label (UNKNOWN_LOCATION
);
4601 loop_exit_label
= create_artificial_label (UNKNOWN_LOCATION
);
4602 fall_thru_label
= create_artificial_label (UNKNOWN_LOCATION
);
4604 /* Create and initialize the index variable. */
4605 var_type
= TREE_TYPE (upper
);
4606 var
= create_tmp_var (var_type
);
4607 gimplify_seq_add_stmt (pre_p
, gimple_build_assign (var
, lower
));
4609 /* Add the loop entry label. */
4610 gimplify_seq_add_stmt (pre_p
, gimple_build_label (loop_entry_label
));
4612 /* Build the reference. */
4613 cref
= build4 (ARRAY_REF
, array_elt_type
, unshare_expr (object
),
4614 var
, NULL_TREE
, NULL_TREE
);
4616 /* If we are a constructor, just call gimplify_init_ctor_eval to do
4617 the store. Otherwise just assign value to the reference. */
4619 if (TREE_CODE (value
) == CONSTRUCTOR
)
4620 /* NB we might have to call ourself recursively through
4621 gimplify_init_ctor_eval if the value is a constructor. */
4622 gimplify_init_ctor_eval (cref
, CONSTRUCTOR_ELTS (value
),
4626 if (gimplify_expr (&value
, pre_p
, NULL
, is_gimple_val
, fb_rvalue
)
4628 gimplify_seq_add_stmt (pre_p
, gimple_build_assign (cref
, value
));
4631 /* We exit the loop when the index var is equal to the upper bound. */
4632 gimplify_seq_add_stmt (pre_p
,
4633 gimple_build_cond (EQ_EXPR
, var
, upper
,
4634 loop_exit_label
, fall_thru_label
));
4636 gimplify_seq_add_stmt (pre_p
, gimple_build_label (fall_thru_label
));
4638 /* Otherwise, increment the index var... */
4639 tmp
= build2 (PLUS_EXPR
, var_type
, var
,
4640 fold_convert (var_type
, integer_one_node
));
4641 gimplify_seq_add_stmt (pre_p
, gimple_build_assign (var
, tmp
));
4643 /* ...and jump back to the loop entry. */
4644 gimplify_seq_add_stmt (pre_p
, gimple_build_goto (loop_entry_label
));
4646 /* Add the loop exit label. */
4647 gimplify_seq_add_stmt (pre_p
, gimple_build_label (loop_exit_label
));
4650 /* A subroutine of gimplify_init_constructor. Generate individual
4651 MODIFY_EXPRs for a CONSTRUCTOR. OBJECT is the LHS against which the
4652 assignments should happen. ELTS is the CONSTRUCTOR_ELTS of the
4653 CONSTRUCTOR. CLEARED is true if the entire LHS object has been
4657 gimplify_init_ctor_eval (tree object
, vec
<constructor_elt
, va_gc
> *elts
,
4658 gimple_seq
*pre_p
, bool cleared
)
4660 tree array_elt_type
= NULL
;
4661 unsigned HOST_WIDE_INT ix
;
4662 tree purpose
, value
;
4664 if (TREE_CODE (TREE_TYPE (object
)) == ARRAY_TYPE
)
4665 array_elt_type
= TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (object
)));
4667 FOR_EACH_CONSTRUCTOR_ELT (elts
, ix
, purpose
, value
)
4671 /* NULL values are created above for gimplification errors. */
4675 if (cleared
&& initializer_zerop (value
))
4678 /* ??? Here's to hoping the front end fills in all of the indices,
4679 so we don't have to figure out what's missing ourselves. */
4680 gcc_assert (purpose
);
4682 /* Skip zero-sized fields, unless value has side-effects. This can
4683 happen with calls to functions returning a empty type, which
4684 we shouldn't discard. As a number of downstream passes don't
4685 expect sets of empty type fields, we rely on the gimplification of
4686 the MODIFY_EXPR we make below to drop the assignment statement. */
4687 if (!TREE_SIDE_EFFECTS (value
)
4688 && TREE_CODE (purpose
) == FIELD_DECL
4689 && is_empty_type (TREE_TYPE (purpose
)))
4692 /* If we have a RANGE_EXPR, we have to build a loop to assign the
4694 if (TREE_CODE (purpose
) == RANGE_EXPR
)
4696 tree lower
= TREE_OPERAND (purpose
, 0);
4697 tree upper
= TREE_OPERAND (purpose
, 1);
4699 /* If the lower bound is equal to upper, just treat it as if
4700 upper was the index. */
4701 if (simple_cst_equal (lower
, upper
))
4705 gimplify_init_ctor_eval_range (object
, lower
, upper
, value
,
4706 array_elt_type
, pre_p
, cleared
);
4713 /* Do not use bitsizetype for ARRAY_REF indices. */
4714 if (TYPE_DOMAIN (TREE_TYPE (object
)))
4716 = fold_convert (TREE_TYPE (TYPE_DOMAIN (TREE_TYPE (object
))),
4718 cref
= build4 (ARRAY_REF
, array_elt_type
, unshare_expr (object
),
4719 purpose
, NULL_TREE
, NULL_TREE
);
4723 gcc_assert (TREE_CODE (purpose
) == FIELD_DECL
);
4724 cref
= build3 (COMPONENT_REF
, TREE_TYPE (purpose
),
4725 unshare_expr (object
), purpose
, NULL_TREE
);
4728 if (TREE_CODE (value
) == CONSTRUCTOR
4729 && TREE_CODE (TREE_TYPE (value
)) != VECTOR_TYPE
)
4730 gimplify_init_ctor_eval (cref
, CONSTRUCTOR_ELTS (value
),
4734 tree init
= build2 (INIT_EXPR
, TREE_TYPE (cref
), cref
, value
);
4735 gimplify_and_add (init
, pre_p
);
4741 /* Return the appropriate RHS predicate for this LHS. */
4744 rhs_predicate_for (tree lhs
)
4746 if (is_gimple_reg (lhs
))
4747 return is_gimple_reg_rhs_or_call
;
4749 return is_gimple_mem_rhs_or_call
;
4752 /* Return the initial guess for an appropriate RHS predicate for this LHS,
4753 before the LHS has been gimplified. */
4755 static gimple_predicate
4756 initial_rhs_predicate_for (tree lhs
)
4758 if (is_gimple_reg_type (TREE_TYPE (lhs
)))
4759 return is_gimple_reg_rhs_or_call
;
4761 return is_gimple_mem_rhs_or_call
;
4764 /* Gimplify a C99 compound literal expression. This just means adding
4765 the DECL_EXPR before the current statement and using its anonymous
4768 static enum gimplify_status
4769 gimplify_compound_literal_expr (tree
*expr_p
, gimple_seq
*pre_p
,
4770 bool (*gimple_test_f
) (tree
),
4771 fallback_t fallback
)
4773 tree decl_s
= COMPOUND_LITERAL_EXPR_DECL_EXPR (*expr_p
);
4774 tree decl
= DECL_EXPR_DECL (decl_s
);
4775 tree init
= DECL_INITIAL (decl
);
4776 /* Mark the decl as addressable if the compound literal
4777 expression is addressable now, otherwise it is marked too late
4778 after we gimplify the initialization expression. */
4779 if (TREE_ADDRESSABLE (*expr_p
))
4780 TREE_ADDRESSABLE (decl
) = 1;
4781 /* Otherwise, if we don't need an lvalue and have a literal directly
4782 substitute it. Check if it matches the gimple predicate, as
4783 otherwise we'd generate a new temporary, and we can as well just
4784 use the decl we already have. */
4785 else if (!TREE_ADDRESSABLE (decl
)
4786 && !TREE_THIS_VOLATILE (decl
)
4788 && (fallback
& fb_lvalue
) == 0
4789 && gimple_test_f (init
))
4795 /* If the decl is not addressable, then it is being used in some
4796 expression or on the right hand side of a statement, and it can
4797 be put into a readonly data section. */
4798 if (!TREE_ADDRESSABLE (decl
) && (fallback
& fb_lvalue
) == 0)
4799 TREE_READONLY (decl
) = 1;
4801 /* This decl isn't mentioned in the enclosing block, so add it to the
4802 list of temps. FIXME it seems a bit of a kludge to say that
4803 anonymous artificial vars aren't pushed, but everything else is. */
4804 if (DECL_NAME (decl
) == NULL_TREE
&& !DECL_SEEN_IN_BIND_EXPR_P (decl
))
4805 gimple_add_tmp_var (decl
);
4807 gimplify_and_add (decl_s
, pre_p
);
4812 /* Optimize embedded COMPOUND_LITERAL_EXPRs within a CONSTRUCTOR,
4813 return a new CONSTRUCTOR if something changed. */
4816 optimize_compound_literals_in_ctor (tree orig_ctor
)
4818 tree ctor
= orig_ctor
;
4819 vec
<constructor_elt
, va_gc
> *elts
= CONSTRUCTOR_ELTS (ctor
);
4820 unsigned int idx
, num
= vec_safe_length (elts
);
4822 for (idx
= 0; idx
< num
; idx
++)
4824 tree value
= (*elts
)[idx
].value
;
4825 tree newval
= value
;
4826 if (TREE_CODE (value
) == CONSTRUCTOR
)
4827 newval
= optimize_compound_literals_in_ctor (value
);
4828 else if (TREE_CODE (value
) == COMPOUND_LITERAL_EXPR
)
4830 tree decl_s
= COMPOUND_LITERAL_EXPR_DECL_EXPR (value
);
4831 tree decl
= DECL_EXPR_DECL (decl_s
);
4832 tree init
= DECL_INITIAL (decl
);
4834 if (!TREE_ADDRESSABLE (value
)
4835 && !TREE_ADDRESSABLE (decl
)
4837 && TREE_CODE (init
) == CONSTRUCTOR
)
4838 newval
= optimize_compound_literals_in_ctor (init
);
4840 if (newval
== value
)
4843 if (ctor
== orig_ctor
)
4845 ctor
= copy_node (orig_ctor
);
4846 CONSTRUCTOR_ELTS (ctor
) = vec_safe_copy (elts
);
4847 elts
= CONSTRUCTOR_ELTS (ctor
);
4849 (*elts
)[idx
].value
= newval
;
4854 /* A subroutine of gimplify_modify_expr. Break out elements of a
4855 CONSTRUCTOR used as an initializer into separate MODIFY_EXPRs.
4857 Note that we still need to clear any elements that don't have explicit
4858 initializers, so if not all elements are initialized we keep the
4859 original MODIFY_EXPR, we just remove all of the constructor elements.
4861 If NOTIFY_TEMP_CREATION is true, do not gimplify, just return
4862 GS_ERROR if we would have to create a temporary when gimplifying
4863 this constructor. Otherwise, return GS_OK.
4865 If NOTIFY_TEMP_CREATION is false, just do the gimplification. */
4867 static enum gimplify_status
4868 gimplify_init_constructor (tree
*expr_p
, gimple_seq
*pre_p
, gimple_seq
*post_p
,
4869 bool want_value
, bool notify_temp_creation
)
4871 tree object
, ctor
, type
;
4872 enum gimplify_status ret
;
4873 vec
<constructor_elt
, va_gc
> *elts
;
4875 gcc_assert (TREE_CODE (TREE_OPERAND (*expr_p
, 1)) == CONSTRUCTOR
);
4877 if (!notify_temp_creation
)
4879 ret
= gimplify_expr (&TREE_OPERAND (*expr_p
, 0), pre_p
, post_p
,
4880 is_gimple_lvalue
, fb_lvalue
);
4881 if (ret
== GS_ERROR
)
4885 object
= TREE_OPERAND (*expr_p
, 0);
4886 ctor
= TREE_OPERAND (*expr_p
, 1)
4887 = optimize_compound_literals_in_ctor (TREE_OPERAND (*expr_p
, 1));
4888 type
= TREE_TYPE (ctor
);
4889 elts
= CONSTRUCTOR_ELTS (ctor
);
4892 switch (TREE_CODE (type
))
4896 case QUAL_UNION_TYPE
:
4899 /* Use readonly data for initializers of this or smaller size
4900 regardless of the num_nonzero_elements / num_unique_nonzero_elements
4902 const HOST_WIDE_INT min_unique_size
= 64;
4903 /* If num_nonzero_elements / num_unique_nonzero_elements ratio
4904 is smaller than this, use readonly data. */
4905 const int unique_nonzero_ratio
= 8;
4906 /* True if a single access of the object must be ensured. This is the
4907 case if the target is volatile, the type is non-addressable and more
4908 than one field need to be assigned. */
4909 const bool ensure_single_access
4910 = TREE_THIS_VOLATILE (object
)
4911 && !TREE_ADDRESSABLE (type
)
4912 && vec_safe_length (elts
) > 1;
4913 struct gimplify_init_ctor_preeval_data preeval_data
;
4914 HOST_WIDE_INT num_ctor_elements
, num_nonzero_elements
;
4915 HOST_WIDE_INT num_unique_nonzero_elements
;
4916 bool cleared
, complete_p
, valid_const_initializer
;
4918 /* Aggregate types must lower constructors to initialization of
4919 individual elements. The exception is that a CONSTRUCTOR node
4920 with no elements indicates zero-initialization of the whole. */
4921 if (vec_safe_is_empty (elts
))
4923 if (notify_temp_creation
)
4928 /* Fetch information about the constructor to direct later processing.
4929 We might want to make static versions of it in various cases, and
4930 can only do so if it known to be a valid constant initializer. */
4931 valid_const_initializer
4932 = categorize_ctor_elements (ctor
, &num_nonzero_elements
,
4933 &num_unique_nonzero_elements
,
4934 &num_ctor_elements
, &complete_p
);
4936 /* If a const aggregate variable is being initialized, then it
4937 should never be a lose to promote the variable to be static. */
4938 if (valid_const_initializer
4939 && num_nonzero_elements
> 1
4940 && TREE_READONLY (object
)
4942 && !DECL_REGISTER (object
)
4943 && (flag_merge_constants
>= 2 || !TREE_ADDRESSABLE (object
))
4944 /* For ctors that have many repeated nonzero elements
4945 represented through RANGE_EXPRs, prefer initializing
4946 those through runtime loops over copies of large amounts
4947 of data from readonly data section. */
4948 && (num_unique_nonzero_elements
4949 > num_nonzero_elements
/ unique_nonzero_ratio
4950 || ((unsigned HOST_WIDE_INT
) int_size_in_bytes (type
)
4951 <= (unsigned HOST_WIDE_INT
) min_unique_size
)))
4953 if (notify_temp_creation
)
4956 DECL_INITIAL (object
) = ctor
;
4957 TREE_STATIC (object
) = 1;
4958 if (!DECL_NAME (object
))
4959 DECL_NAME (object
) = create_tmp_var_name ("C");
4960 walk_tree (&DECL_INITIAL (object
), force_labels_r
, NULL
, NULL
);
4962 /* ??? C++ doesn't automatically append a .<number> to the
4963 assembler name, and even when it does, it looks at FE private
4964 data structures to figure out what that number should be,
4965 which are not set for this variable. I suppose this is
4966 important for local statics for inline functions, which aren't
4967 "local" in the object file sense. So in order to get a unique
4968 TU-local symbol, we must invoke the lhd version now. */
4969 lhd_set_decl_assembler_name (object
);
4971 *expr_p
= NULL_TREE
;
4975 /* If there are "lots" of initialized elements, even discounting
4976 those that are not address constants (and thus *must* be
4977 computed at runtime), then partition the constructor into
4978 constant and non-constant parts. Block copy the constant
4979 parts in, then generate code for the non-constant parts. */
4980 /* TODO. There's code in cp/typeck.c to do this. */
4982 if (int_size_in_bytes (TREE_TYPE (ctor
)) < 0)
4983 /* store_constructor will ignore the clearing of variable-sized
4984 objects. Initializers for such objects must explicitly set
4985 every field that needs to be set. */
4987 else if (!complete_p
)
4988 /* If the constructor isn't complete, clear the whole object
4989 beforehand, unless CONSTRUCTOR_NO_CLEARING is set on it.
4991 ??? This ought not to be needed. For any element not present
4992 in the initializer, we should simply set them to zero. Except
4993 we'd need to *find* the elements that are not present, and that
4994 requires trickery to avoid quadratic compile-time behavior in
4995 large cases or excessive memory use in small cases. */
4996 cleared
= !CONSTRUCTOR_NO_CLEARING (ctor
);
4997 else if (num_ctor_elements
- num_nonzero_elements
4998 > CLEAR_RATIO (optimize_function_for_speed_p (cfun
))
4999 && num_nonzero_elements
< num_ctor_elements
/ 4)
5000 /* If there are "lots" of zeros, it's more efficient to clear
5001 the memory and then set the nonzero elements. */
5003 else if (ensure_single_access
&& num_nonzero_elements
== 0)
5004 /* If a single access to the target must be ensured and all elements
5005 are zero, then it's optimal to clear whatever their number. */
5010 /* If there are "lots" of initialized elements, and all of them
5011 are valid address constants, then the entire initializer can
5012 be dropped to memory, and then memcpy'd out. Don't do this
5013 for sparse arrays, though, as it's more efficient to follow
5014 the standard CONSTRUCTOR behavior of memset followed by
5015 individual element initialization. Also don't do this for small
5016 all-zero initializers (which aren't big enough to merit
5017 clearing), and don't try to make bitwise copies of
5018 TREE_ADDRESSABLE types. */
5019 if (valid_const_initializer
5021 && !(cleared
|| num_nonzero_elements
== 0)
5022 && !TREE_ADDRESSABLE (type
))
5024 HOST_WIDE_INT size
= int_size_in_bytes (type
);
5027 /* ??? We can still get unbounded array types, at least
5028 from the C++ front end. This seems wrong, but attempt
5029 to work around it for now. */
5032 size
= int_size_in_bytes (TREE_TYPE (object
));
5034 TREE_TYPE (ctor
) = type
= TREE_TYPE (object
);
5037 /* Find the maximum alignment we can assume for the object. */
5038 /* ??? Make use of DECL_OFFSET_ALIGN. */
5039 if (DECL_P (object
))
5040 align
= DECL_ALIGN (object
);
5042 align
= TYPE_ALIGN (type
);
5044 /* Do a block move either if the size is so small as to make
5045 each individual move a sub-unit move on average, or if it
5046 is so large as to make individual moves inefficient. */
5048 && num_nonzero_elements
> 1
5049 /* For ctors that have many repeated nonzero elements
5050 represented through RANGE_EXPRs, prefer initializing
5051 those through runtime loops over copies of large amounts
5052 of data from readonly data section. */
5053 && (num_unique_nonzero_elements
5054 > num_nonzero_elements
/ unique_nonzero_ratio
5055 || size
<= min_unique_size
)
5056 && (size
< num_nonzero_elements
5057 || !can_move_by_pieces (size
, align
)))
5059 if (notify_temp_creation
)
5062 walk_tree (&ctor
, force_labels_r
, NULL
, NULL
);
5063 ctor
= tree_output_constant_def (ctor
);
5064 if (!useless_type_conversion_p (type
, TREE_TYPE (ctor
)))
5065 ctor
= build1 (VIEW_CONVERT_EXPR
, type
, ctor
);
5066 TREE_OPERAND (*expr_p
, 1) = ctor
;
5068 /* This is no longer an assignment of a CONSTRUCTOR, but
5069 we still may have processing to do on the LHS. So
5070 pretend we didn't do anything here to let that happen. */
5071 return GS_UNHANDLED
;
5075 /* If a single access to the target must be ensured and there are
5076 nonzero elements or the zero elements are not assigned en masse,
5077 initialize the target from a temporary. */
5078 if (ensure_single_access
&& (num_nonzero_elements
> 0 || !cleared
))
5080 if (notify_temp_creation
)
5083 tree temp
= create_tmp_var (TYPE_MAIN_VARIANT (type
));
5084 TREE_OPERAND (*expr_p
, 0) = temp
;
5085 *expr_p
= build2 (COMPOUND_EXPR
, TREE_TYPE (*expr_p
),
5087 build2 (MODIFY_EXPR
, void_type_node
,
5092 if (notify_temp_creation
)
5095 /* If there are nonzero elements and if needed, pre-evaluate to capture
5096 elements overlapping with the lhs into temporaries. We must do this
5097 before clearing to fetch the values before they are zeroed-out. */
5098 if (num_nonzero_elements
> 0 && TREE_CODE (*expr_p
) != INIT_EXPR
)
5100 preeval_data
.lhs_base_decl
= get_base_address (object
);
5101 if (!DECL_P (preeval_data
.lhs_base_decl
))
5102 preeval_data
.lhs_base_decl
= NULL
;
5103 preeval_data
.lhs_alias_set
= get_alias_set (object
);
5105 gimplify_init_ctor_preeval (&TREE_OPERAND (*expr_p
, 1),
5106 pre_p
, post_p
, &preeval_data
);
5109 bool ctor_has_side_effects_p
5110 = TREE_SIDE_EFFECTS (TREE_OPERAND (*expr_p
, 1));
5114 /* Zap the CONSTRUCTOR element list, which simplifies this case.
5115 Note that we still have to gimplify, in order to handle the
5116 case of variable sized types. Avoid shared tree structures. */
5117 CONSTRUCTOR_ELTS (ctor
) = NULL
;
5118 TREE_SIDE_EFFECTS (ctor
) = 0;
5119 object
= unshare_expr (object
);
5120 gimplify_stmt (expr_p
, pre_p
);
5123 /* If we have not block cleared the object, or if there are nonzero
5124 elements in the constructor, or if the constructor has side effects,
5125 add assignments to the individual scalar fields of the object. */
5127 || num_nonzero_elements
> 0
5128 || ctor_has_side_effects_p
)
5129 gimplify_init_ctor_eval (object
, elts
, pre_p
, cleared
);
5131 *expr_p
= NULL_TREE
;
5139 if (notify_temp_creation
)
5142 /* Extract the real and imaginary parts out of the ctor. */
5143 gcc_assert (elts
->length () == 2);
5144 r
= (*elts
)[0].value
;
5145 i
= (*elts
)[1].value
;
5146 if (r
== NULL
|| i
== NULL
)
5148 tree zero
= build_zero_cst (TREE_TYPE (type
));
5155 /* Complex types have either COMPLEX_CST or COMPLEX_EXPR to
5156 represent creation of a complex value. */
5157 if (TREE_CONSTANT (r
) && TREE_CONSTANT (i
))
5159 ctor
= build_complex (type
, r
, i
);
5160 TREE_OPERAND (*expr_p
, 1) = ctor
;
5164 ctor
= build2 (COMPLEX_EXPR
, type
, r
, i
);
5165 TREE_OPERAND (*expr_p
, 1) = ctor
;
5166 ret
= gimplify_expr (&TREE_OPERAND (*expr_p
, 1),
5169 rhs_predicate_for (TREE_OPERAND (*expr_p
, 0)),
5177 unsigned HOST_WIDE_INT ix
;
5178 constructor_elt
*ce
;
5180 if (notify_temp_creation
)
5183 /* Go ahead and simplify constant constructors to VECTOR_CST. */
5184 if (TREE_CONSTANT (ctor
))
5186 bool constant_p
= true;
5189 /* Even when ctor is constant, it might contain non-*_CST
5190 elements, such as addresses or trapping values like
5191 1.0/0.0 - 1.0/0.0. Such expressions don't belong
5192 in VECTOR_CST nodes. */
5193 FOR_EACH_CONSTRUCTOR_VALUE (elts
, ix
, value
)
5194 if (!CONSTANT_CLASS_P (value
))
5202 TREE_OPERAND (*expr_p
, 1) = build_vector_from_ctor (type
, elts
);
5206 TREE_CONSTANT (ctor
) = 0;
5209 /* Vector types use CONSTRUCTOR all the way through gimple
5210 compilation as a general initializer. */
5211 FOR_EACH_VEC_SAFE_ELT (elts
, ix
, ce
)
5213 enum gimplify_status tret
;
5214 tret
= gimplify_expr (&ce
->value
, pre_p
, post_p
, is_gimple_val
,
5216 if (tret
== GS_ERROR
)
5218 else if (TREE_STATIC (ctor
)
5219 && !initializer_constant_valid_p (ce
->value
,
5220 TREE_TYPE (ce
->value
)))
5221 TREE_STATIC (ctor
) = 0;
5223 recompute_constructor_flags (ctor
);
5224 if (!is_gimple_reg (TREE_OPERAND (*expr_p
, 0)))
5225 TREE_OPERAND (*expr_p
, 1) = get_formal_tmp_var (ctor
, pre_p
);
5230 /* So how did we get a CONSTRUCTOR for a scalar type? */
5234 if (ret
== GS_ERROR
)
5236 /* If we have gimplified both sides of the initializer but have
5237 not emitted an assignment, do so now. */
5240 tree lhs
= TREE_OPERAND (*expr_p
, 0);
5241 tree rhs
= TREE_OPERAND (*expr_p
, 1);
5242 if (want_value
&& object
== lhs
)
5243 lhs
= unshare_expr (lhs
);
5244 gassign
*init
= gimple_build_assign (lhs
, rhs
);
5245 gimplify_seq_add_stmt (pre_p
, init
);
5259 /* Given a pointer value OP0, return a simplified version of an
5260 indirection through OP0, or NULL_TREE if no simplification is
5261 possible. This may only be applied to a rhs of an expression.
5262 Note that the resulting type may be different from the type pointed
5263 to in the sense that it is still compatible from the langhooks
5267 gimple_fold_indirect_ref_rhs (tree t
)
5269 return gimple_fold_indirect_ref (t
);
5272 /* Subroutine of gimplify_modify_expr to do simplifications of
5273 MODIFY_EXPRs based on the code of the RHS. We loop for as long as
5274 something changes. */
5276 static enum gimplify_status
5277 gimplify_modify_expr_rhs (tree
*expr_p
, tree
*from_p
, tree
*to_p
,
5278 gimple_seq
*pre_p
, gimple_seq
*post_p
,
5281 enum gimplify_status ret
= GS_UNHANDLED
;
5287 switch (TREE_CODE (*from_p
))
5290 /* If we're assigning from a read-only variable initialized with
5291 a constructor and not volatile, do the direct assignment from
5292 the constructor, but only if the target is not volatile either
5293 since this latter assignment might end up being done on a per
5294 field basis. However, if the target is volatile and the type
5295 is aggregate and non-addressable, gimplify_init_constructor
5296 knows that it needs to ensure a single access to the target
5297 and it will return GS_OK only in this case. */
5298 if (TREE_READONLY (*from_p
)
5299 && DECL_INITIAL (*from_p
)
5300 && TREE_CODE (DECL_INITIAL (*from_p
)) == CONSTRUCTOR
5301 && !TREE_THIS_VOLATILE (*from_p
)
5302 && (!TREE_THIS_VOLATILE (*to_p
)
5303 || (AGGREGATE_TYPE_P (TREE_TYPE (*to_p
))
5304 && !TREE_ADDRESSABLE (TREE_TYPE (*to_p
)))))
5306 tree old_from
= *from_p
;
5307 enum gimplify_status subret
;
5309 /* Move the constructor into the RHS. */
5310 *from_p
= unshare_expr (DECL_INITIAL (*from_p
));
5312 /* Let's see if gimplify_init_constructor will need to put
5314 subret
= gimplify_init_constructor (expr_p
, NULL
, NULL
,
5316 if (subret
== GS_ERROR
)
5318 /* If so, revert the change. */
5330 /* If we have code like
5334 where the type of "x" is a (possibly cv-qualified variant
5335 of "A"), treat the entire expression as identical to "x".
5336 This kind of code arises in C++ when an object is bound
5337 to a const reference, and if "x" is a TARGET_EXPR we want
5338 to take advantage of the optimization below. */
5339 bool volatile_p
= TREE_THIS_VOLATILE (*from_p
);
5340 tree t
= gimple_fold_indirect_ref_rhs (TREE_OPERAND (*from_p
, 0));
5343 if (TREE_THIS_VOLATILE (t
) != volatile_p
)
5346 t
= build_simple_mem_ref_loc (EXPR_LOCATION (*from_p
),
5347 build_fold_addr_expr (t
));
5348 if (REFERENCE_CLASS_P (t
))
5349 TREE_THIS_VOLATILE (t
) = volatile_p
;
5360 /* If we are initializing something from a TARGET_EXPR, strip the
5361 TARGET_EXPR and initialize it directly, if possible. This can't
5362 be done if the initializer is void, since that implies that the
5363 temporary is set in some non-trivial way.
5365 ??? What about code that pulls out the temp and uses it
5366 elsewhere? I think that such code never uses the TARGET_EXPR as
5367 an initializer. If I'm wrong, we'll die because the temp won't
5368 have any RTL. In that case, I guess we'll need to replace
5369 references somehow. */
5370 tree init
= TARGET_EXPR_INITIAL (*from_p
);
5373 && (TREE_CODE (*expr_p
) != MODIFY_EXPR
5374 || !TARGET_EXPR_NO_ELIDE (*from_p
))
5375 && !VOID_TYPE_P (TREE_TYPE (init
)))
5385 /* Remove any COMPOUND_EXPR in the RHS so the following cases will be
5387 gimplify_compound_expr (from_p
, pre_p
, true);
5393 /* If we already made some changes, let the front end have a
5394 crack at this before we break it down. */
5395 if (ret
!= GS_UNHANDLED
)
5397 /* If we're initializing from a CONSTRUCTOR, break this into
5398 individual MODIFY_EXPRs. */
5399 return gimplify_init_constructor (expr_p
, pre_p
, post_p
, want_value
,
5403 /* If we're assigning to a non-register type, push the assignment
5404 down into the branches. This is mandatory for ADDRESSABLE types,
5405 since we cannot generate temporaries for such, but it saves a
5406 copy in other cases as well. */
5407 if (!is_gimple_reg_type (TREE_TYPE (*from_p
)))
5409 /* This code should mirror the code in gimplify_cond_expr. */
5410 enum tree_code code
= TREE_CODE (*expr_p
);
5411 tree cond
= *from_p
;
5412 tree result
= *to_p
;
5414 ret
= gimplify_expr (&result
, pre_p
, post_p
,
5415 is_gimple_lvalue
, fb_lvalue
);
5416 if (ret
!= GS_ERROR
)
5419 /* If we are going to write RESULT more than once, clear
5420 TREE_READONLY flag, otherwise we might incorrectly promote
5421 the variable to static const and initialize it at compile
5422 time in one of the branches. */
5424 && TREE_TYPE (TREE_OPERAND (cond
, 1)) != void_type_node
5425 && TREE_TYPE (TREE_OPERAND (cond
, 2)) != void_type_node
)
5426 TREE_READONLY (result
) = 0;
5427 if (TREE_TYPE (TREE_OPERAND (cond
, 1)) != void_type_node
)
5428 TREE_OPERAND (cond
, 1)
5429 = build2 (code
, void_type_node
, result
,
5430 TREE_OPERAND (cond
, 1));
5431 if (TREE_TYPE (TREE_OPERAND (cond
, 2)) != void_type_node
)
5432 TREE_OPERAND (cond
, 2)
5433 = build2 (code
, void_type_node
, unshare_expr (result
),
5434 TREE_OPERAND (cond
, 2));
5436 TREE_TYPE (cond
) = void_type_node
;
5437 recalculate_side_effects (cond
);
5441 gimplify_and_add (cond
, pre_p
);
5442 *expr_p
= unshare_expr (result
);
5451 /* For calls that return in memory, give *to_p as the CALL_EXPR's
5452 return slot so that we don't generate a temporary. */
5453 if (!CALL_EXPR_RETURN_SLOT_OPT (*from_p
)
5454 && aggregate_value_p (*from_p
, *from_p
))
5458 if (!(rhs_predicate_for (*to_p
))(*from_p
))
5459 /* If we need a temporary, *to_p isn't accurate. */
5461 /* It's OK to use the return slot directly unless it's an NRV. */
5462 else if (TREE_CODE (*to_p
) == RESULT_DECL
5463 && DECL_NAME (*to_p
) == NULL_TREE
5464 && needs_to_live_in_memory (*to_p
))
5466 else if (is_gimple_reg_type (TREE_TYPE (*to_p
))
5467 || (DECL_P (*to_p
) && DECL_REGISTER (*to_p
)))
5468 /* Don't force regs into memory. */
5470 else if (TREE_CODE (*expr_p
) == INIT_EXPR
)
5471 /* It's OK to use the target directly if it's being
5474 else if (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (*to_p
)))
5476 /* Always use the target and thus RSO for variable-sized types.
5477 GIMPLE cannot deal with a variable-sized assignment
5478 embedded in a call statement. */
5480 else if (TREE_CODE (*to_p
) != SSA_NAME
5481 && (!is_gimple_variable (*to_p
)
5482 || needs_to_live_in_memory (*to_p
)))
5483 /* Don't use the original target if it's already addressable;
5484 if its address escapes, and the called function uses the
5485 NRV optimization, a conforming program could see *to_p
5486 change before the called function returns; see c++/19317.
5487 When optimizing, the return_slot pass marks more functions
5488 as safe after we have escape info. */
5495 CALL_EXPR_RETURN_SLOT_OPT (*from_p
) = 1;
5496 mark_addressable (*to_p
);
5501 case WITH_SIZE_EXPR
:
5502 /* Likewise for calls that return an aggregate of non-constant size,
5503 since we would not be able to generate a temporary at all. */
5504 if (TREE_CODE (TREE_OPERAND (*from_p
, 0)) == CALL_EXPR
)
5506 *from_p
= TREE_OPERAND (*from_p
, 0);
5507 /* We don't change ret in this case because the
5508 WITH_SIZE_EXPR might have been added in
5509 gimplify_modify_expr, so returning GS_OK would lead to an
5515 /* If we're initializing from a container, push the initialization
5517 case CLEANUP_POINT_EXPR
:
5519 case STATEMENT_LIST
:
5521 tree wrap
= *from_p
;
5524 ret
= gimplify_expr (to_p
, pre_p
, post_p
, is_gimple_min_lval
,
5526 if (ret
!= GS_ERROR
)
5529 t
= voidify_wrapper_expr (wrap
, *expr_p
);
5530 gcc_assert (t
== *expr_p
);
5534 gimplify_and_add (wrap
, pre_p
);
5535 *expr_p
= unshare_expr (*to_p
);
5543 /* Pull out compound literal expressions from a NOP_EXPR.
5544 Those are created in the C FE to drop qualifiers during
5545 lvalue conversion. */
5546 if ((TREE_CODE (TREE_OPERAND (*from_p
, 0)) == COMPOUND_LITERAL_EXPR
)
5547 && tree_ssa_useless_type_conversion (*from_p
))
5549 *from_p
= TREE_OPERAND (*from_p
, 0);
5555 case COMPOUND_LITERAL_EXPR
:
5557 tree complit
= TREE_OPERAND (*expr_p
, 1);
5558 tree decl_s
= COMPOUND_LITERAL_EXPR_DECL_EXPR (complit
);
5559 tree decl
= DECL_EXPR_DECL (decl_s
);
5560 tree init
= DECL_INITIAL (decl
);
5562 /* struct T x = (struct T) { 0, 1, 2 } can be optimized
5563 into struct T x = { 0, 1, 2 } if the address of the
5564 compound literal has never been taken. */
5565 if (!TREE_ADDRESSABLE (complit
)
5566 && !TREE_ADDRESSABLE (decl
)
5569 *expr_p
= copy_node (*expr_p
);
5570 TREE_OPERAND (*expr_p
, 1) = init
;
5585 /* Return true if T looks like a valid GIMPLE statement. */
5588 is_gimple_stmt (tree t
)
5590 const enum tree_code code
= TREE_CODE (t
);
5595 /* The only valid NOP_EXPR is the empty statement. */
5596 return IS_EMPTY_STMT (t
);
5600 /* These are only valid if they're void. */
5601 return TREE_TYPE (t
) == NULL
|| VOID_TYPE_P (TREE_TYPE (t
));
5607 case CASE_LABEL_EXPR
:
5608 case TRY_CATCH_EXPR
:
5609 case TRY_FINALLY_EXPR
:
5610 case EH_FILTER_EXPR
:
5613 case STATEMENT_LIST
:
5618 case OACC_HOST_DATA
:
5621 case OACC_ENTER_DATA
:
5622 case OACC_EXIT_DATA
:
5627 case OMP_DISTRIBUTE
:
5640 case OMP_TARGET_DATA
:
5641 case OMP_TARGET_UPDATE
:
5642 case OMP_TARGET_ENTER_DATA
:
5643 case OMP_TARGET_EXIT_DATA
:
5646 /* These are always void. */
5652 /* These are valid regardless of their type. */
5661 /* Promote partial stores to COMPLEX variables to total stores. *EXPR_P is
5662 a MODIFY_EXPR with a lhs of a REAL/IMAGPART_EXPR of a gimple register.
5664 IMPORTANT NOTE: This promotion is performed by introducing a load of the
5665 other, unmodified part of the complex object just before the total store.
5666 As a consequence, if the object is still uninitialized, an undefined value
5667 will be loaded into a register, which may result in a spurious exception
5668 if the register is floating-point and the value happens to be a signaling
5669 NaN for example. Then the fully-fledged complex operations lowering pass
5670 followed by a DCE pass are necessary in order to fix things up. */
5672 static enum gimplify_status
5673 gimplify_modify_expr_complex_part (tree
*expr_p
, gimple_seq
*pre_p
,
5676 enum tree_code code
, ocode
;
5677 tree lhs
, rhs
, new_rhs
, other
, realpart
, imagpart
;
5679 lhs
= TREE_OPERAND (*expr_p
, 0);
5680 rhs
= TREE_OPERAND (*expr_p
, 1);
5681 code
= TREE_CODE (lhs
);
5682 lhs
= TREE_OPERAND (lhs
, 0);
5684 ocode
= code
== REALPART_EXPR
? IMAGPART_EXPR
: REALPART_EXPR
;
5685 other
= build1 (ocode
, TREE_TYPE (rhs
), lhs
);
5686 suppress_warning (other
);
5687 other
= get_formal_tmp_var (other
, pre_p
);
5689 realpart
= code
== REALPART_EXPR
? rhs
: other
;
5690 imagpart
= code
== REALPART_EXPR
? other
: rhs
;
5692 if (TREE_CONSTANT (realpart
) && TREE_CONSTANT (imagpart
))
5693 new_rhs
= build_complex (TREE_TYPE (lhs
), realpart
, imagpart
);
5695 new_rhs
= build2 (COMPLEX_EXPR
, TREE_TYPE (lhs
), realpart
, imagpart
);
5697 gimplify_seq_add_stmt (pre_p
, gimple_build_assign (lhs
, new_rhs
));
5698 *expr_p
= (want_value
) ? rhs
: NULL_TREE
;
5703 /* Gimplify the MODIFY_EXPR node pointed to by EXPR_P.
5709 PRE_P points to the list where side effects that must happen before
5710 *EXPR_P should be stored.
5712 POST_P points to the list where side effects that must happen after
5713 *EXPR_P should be stored.
5715 WANT_VALUE is nonzero iff we want to use the value of this expression
5716 in another expression. */
5718 static enum gimplify_status
5719 gimplify_modify_expr (tree
*expr_p
, gimple_seq
*pre_p
, gimple_seq
*post_p
,
5722 tree
*from_p
= &TREE_OPERAND (*expr_p
, 1);
5723 tree
*to_p
= &TREE_OPERAND (*expr_p
, 0);
5724 enum gimplify_status ret
= GS_UNHANDLED
;
5726 location_t loc
= EXPR_LOCATION (*expr_p
);
5727 gimple_stmt_iterator gsi
;
5729 gcc_assert (TREE_CODE (*expr_p
) == MODIFY_EXPR
5730 || TREE_CODE (*expr_p
) == INIT_EXPR
);
5732 /* Trying to simplify a clobber using normal logic doesn't work,
5733 so handle it here. */
5734 if (TREE_CLOBBER_P (*from_p
))
5736 ret
= gimplify_expr (to_p
, pre_p
, post_p
, is_gimple_lvalue
, fb_lvalue
);
5737 if (ret
== GS_ERROR
)
5739 gcc_assert (!want_value
);
5740 if (!VAR_P (*to_p
) && TREE_CODE (*to_p
) != MEM_REF
)
5742 tree addr
= get_initialized_tmp_var (build_fold_addr_expr (*to_p
),
5744 *to_p
= build_simple_mem_ref_loc (EXPR_LOCATION (*to_p
), addr
);
5746 gimplify_seq_add_stmt (pre_p
, gimple_build_assign (*to_p
, *from_p
));
5751 /* Insert pointer conversions required by the middle-end that are not
5752 required by the frontend. This fixes middle-end type checking for
5753 for example gcc.dg/redecl-6.c. */
5754 if (POINTER_TYPE_P (TREE_TYPE (*to_p
)))
5756 STRIP_USELESS_TYPE_CONVERSION (*from_p
);
5757 if (!useless_type_conversion_p (TREE_TYPE (*to_p
), TREE_TYPE (*from_p
)))
5758 *from_p
= fold_convert_loc (loc
, TREE_TYPE (*to_p
), *from_p
);
5761 /* See if any simplifications can be done based on what the RHS is. */
5762 ret
= gimplify_modify_expr_rhs (expr_p
, from_p
, to_p
, pre_p
, post_p
,
5764 if (ret
!= GS_UNHANDLED
)
5767 /* For empty types only gimplify the left hand side and right hand
5768 side as statements and throw away the assignment. Do this after
5769 gimplify_modify_expr_rhs so we handle TARGET_EXPRs of addressable
5771 if (is_empty_type (TREE_TYPE (*from_p
))
5773 /* Don't do this for calls that return addressable types, expand_call
5774 relies on those having a lhs. */
5775 && !(TREE_ADDRESSABLE (TREE_TYPE (*from_p
))
5776 && TREE_CODE (*from_p
) == CALL_EXPR
))
5778 gimplify_stmt (from_p
, pre_p
);
5779 gimplify_stmt (to_p
, pre_p
);
5780 *expr_p
= NULL_TREE
;
5784 /* If the value being copied is of variable width, compute the length
5785 of the copy into a WITH_SIZE_EXPR. Note that we need to do this
5786 before gimplifying any of the operands so that we can resolve any
5787 PLACEHOLDER_EXPRs in the size. Also note that the RTL expander uses
5788 the size of the expression to be copied, not of the destination, so
5789 that is what we must do here. */
5790 maybe_with_size_expr (from_p
);
5792 /* As a special case, we have to temporarily allow for assignments
5793 with a CALL_EXPR on the RHS. Since in GIMPLE a function call is
5794 a toplevel statement, when gimplifying the GENERIC expression
5795 MODIFY_EXPR <a, CALL_EXPR <foo>>, we cannot create the tuple
5796 GIMPLE_ASSIGN <a, GIMPLE_CALL <foo>>.
5798 Instead, we need to create the tuple GIMPLE_CALL <a, foo>. To
5799 prevent gimplify_expr from trying to create a new temporary for
5800 foo's LHS, we tell it that it should only gimplify until it
5801 reaches the CALL_EXPR. On return from gimplify_expr, the newly
5802 created GIMPLE_CALL <foo> will be the last statement in *PRE_P
5803 and all we need to do here is set 'a' to be its LHS. */
5805 /* Gimplify the RHS first for C++17 and bug 71104. */
5806 gimple_predicate initial_pred
= initial_rhs_predicate_for (*to_p
);
5807 ret
= gimplify_expr (from_p
, pre_p
, post_p
, initial_pred
, fb_rvalue
);
5808 if (ret
== GS_ERROR
)
5811 /* Then gimplify the LHS. */
5812 /* If we gimplified the RHS to a CALL_EXPR and that call may return
5813 twice we have to make sure to gimplify into non-SSA as otherwise
5814 the abnormal edge added later will make those defs not dominate
5816 ??? Technically this applies only to the registers used in the
5817 resulting non-register *TO_P. */
5818 bool saved_into_ssa
= gimplify_ctxp
->into_ssa
;
5820 && TREE_CODE (*from_p
) == CALL_EXPR
5821 && call_expr_flags (*from_p
) & ECF_RETURNS_TWICE
)
5822 gimplify_ctxp
->into_ssa
= false;
5823 ret
= gimplify_expr (to_p
, pre_p
, post_p
, is_gimple_lvalue
, fb_lvalue
);
5824 gimplify_ctxp
->into_ssa
= saved_into_ssa
;
5825 if (ret
== GS_ERROR
)
5828 /* Now that the LHS is gimplified, re-gimplify the RHS if our initial
5829 guess for the predicate was wrong. */
5830 gimple_predicate final_pred
= rhs_predicate_for (*to_p
);
5831 if (final_pred
!= initial_pred
)
5833 ret
= gimplify_expr (from_p
, pre_p
, post_p
, final_pred
, fb_rvalue
);
5834 if (ret
== GS_ERROR
)
5838 /* In case of va_arg internal fn wrappped in a WITH_SIZE_EXPR, add the type
5839 size as argument to the call. */
5840 if (TREE_CODE (*from_p
) == WITH_SIZE_EXPR
)
5842 tree call
= TREE_OPERAND (*from_p
, 0);
5843 tree vlasize
= TREE_OPERAND (*from_p
, 1);
5845 if (TREE_CODE (call
) == CALL_EXPR
5846 && CALL_EXPR_IFN (call
) == IFN_VA_ARG
)
5848 int nargs
= call_expr_nargs (call
);
5849 tree type
= TREE_TYPE (call
);
5850 tree ap
= CALL_EXPR_ARG (call
, 0);
5851 tree tag
= CALL_EXPR_ARG (call
, 1);
5852 tree aptag
= CALL_EXPR_ARG (call
, 2);
5853 tree newcall
= build_call_expr_internal_loc (EXPR_LOCATION (call
),
5857 TREE_OPERAND (*from_p
, 0) = newcall
;
5861 /* Now see if the above changed *from_p to something we handle specially. */
5862 ret
= gimplify_modify_expr_rhs (expr_p
, from_p
, to_p
, pre_p
, post_p
,
5864 if (ret
!= GS_UNHANDLED
)
5867 /* If we've got a variable sized assignment between two lvalues (i.e. does
5868 not involve a call), then we can make things a bit more straightforward
5869 by converting the assignment to memcpy or memset. */
5870 if (TREE_CODE (*from_p
) == WITH_SIZE_EXPR
)
5872 tree from
= TREE_OPERAND (*from_p
, 0);
5873 tree size
= TREE_OPERAND (*from_p
, 1);
5875 if (TREE_CODE (from
) == CONSTRUCTOR
)
5876 return gimplify_modify_expr_to_memset (expr_p
, size
, want_value
, pre_p
);
5878 if (is_gimple_addressable (from
))
5881 return gimplify_modify_expr_to_memcpy (expr_p
, size
, want_value
,
5886 /* Transform partial stores to non-addressable complex variables into
5887 total stores. This allows us to use real instead of virtual operands
5888 for these variables, which improves optimization. */
5889 if ((TREE_CODE (*to_p
) == REALPART_EXPR
5890 || TREE_CODE (*to_p
) == IMAGPART_EXPR
)
5891 && is_gimple_reg (TREE_OPERAND (*to_p
, 0)))
5892 return gimplify_modify_expr_complex_part (expr_p
, pre_p
, want_value
);
5894 /* Try to alleviate the effects of the gimplification creating artificial
5895 temporaries (see for example is_gimple_reg_rhs) on the debug info, but
5896 make sure not to create DECL_DEBUG_EXPR links across functions. */
5897 if (!gimplify_ctxp
->into_ssa
5899 && DECL_IGNORED_P (*from_p
)
5901 && !DECL_IGNORED_P (*to_p
)
5902 && decl_function_context (*to_p
) == current_function_decl
5903 && decl_function_context (*from_p
) == current_function_decl
)
5905 if (!DECL_NAME (*from_p
) && DECL_NAME (*to_p
))
5907 = create_tmp_var_name (IDENTIFIER_POINTER (DECL_NAME (*to_p
)));
5908 DECL_HAS_DEBUG_EXPR_P (*from_p
) = 1;
5909 SET_DECL_DEBUG_EXPR (*from_p
, *to_p
);
5912 if (want_value
&& TREE_THIS_VOLATILE (*to_p
))
5913 *from_p
= get_initialized_tmp_var (*from_p
, pre_p
, post_p
);
5915 if (TREE_CODE (*from_p
) == CALL_EXPR
)
5917 /* Since the RHS is a CALL_EXPR, we need to create a GIMPLE_CALL
5918 instead of a GIMPLE_ASSIGN. */
5920 if (CALL_EXPR_FN (*from_p
) == NULL_TREE
)
5922 /* Gimplify internal functions created in the FEs. */
5923 int nargs
= call_expr_nargs (*from_p
), i
;
5924 enum internal_fn ifn
= CALL_EXPR_IFN (*from_p
);
5925 auto_vec
<tree
> vargs (nargs
);
5927 for (i
= 0; i
< nargs
; i
++)
5929 gimplify_arg (&CALL_EXPR_ARG (*from_p
, i
), pre_p
,
5930 EXPR_LOCATION (*from_p
));
5931 vargs
.quick_push (CALL_EXPR_ARG (*from_p
, i
));
5933 call_stmt
= gimple_build_call_internal_vec (ifn
, vargs
);
5934 gimple_call_set_nothrow (call_stmt
, TREE_NOTHROW (*from_p
));
5935 gimple_set_location (call_stmt
, EXPR_LOCATION (*expr_p
));
5939 tree fnptrtype
= TREE_TYPE (CALL_EXPR_FN (*from_p
));
5940 CALL_EXPR_FN (*from_p
) = TREE_OPERAND (CALL_EXPR_FN (*from_p
), 0);
5941 STRIP_USELESS_TYPE_CONVERSION (CALL_EXPR_FN (*from_p
));
5942 tree fndecl
= get_callee_fndecl (*from_p
);
5944 && fndecl_built_in_p (fndecl
, BUILT_IN_EXPECT
)
5945 && call_expr_nargs (*from_p
) == 3)
5946 call_stmt
= gimple_build_call_internal (IFN_BUILTIN_EXPECT
, 3,
5947 CALL_EXPR_ARG (*from_p
, 0),
5948 CALL_EXPR_ARG (*from_p
, 1),
5949 CALL_EXPR_ARG (*from_p
, 2));
5952 call_stmt
= gimple_build_call_from_tree (*from_p
, fnptrtype
);
5955 notice_special_calls (call_stmt
);
5956 if (!gimple_call_noreturn_p (call_stmt
) || !should_remove_lhs_p (*to_p
))
5957 gimple_call_set_lhs (call_stmt
, *to_p
);
5958 else if (TREE_CODE (*to_p
) == SSA_NAME
)
5959 /* The above is somewhat premature, avoid ICEing later for a
5960 SSA name w/o a definition. We may have uses in the GIMPLE IL.
5961 ??? This doesn't make it a default-def. */
5962 SSA_NAME_DEF_STMT (*to_p
) = gimple_build_nop ();
5968 assign
= gimple_build_assign (*to_p
, *from_p
);
5969 gimple_set_location (assign
, EXPR_LOCATION (*expr_p
));
5970 if (COMPARISON_CLASS_P (*from_p
))
5971 copy_warning (assign
, *from_p
);
5974 if (gimplify_ctxp
->into_ssa
&& is_gimple_reg (*to_p
))
5976 /* We should have got an SSA name from the start. */
5977 gcc_assert (TREE_CODE (*to_p
) == SSA_NAME
5978 || ! gimple_in_ssa_p (cfun
));
5981 gimplify_seq_add_stmt (pre_p
, assign
);
5982 gsi
= gsi_last (*pre_p
);
5983 maybe_fold_stmt (&gsi
);
5987 *expr_p
= TREE_THIS_VOLATILE (*to_p
) ? *from_p
: unshare_expr (*to_p
);
5996 /* Gimplify a comparison between two variable-sized objects. Do this
5997 with a call to BUILT_IN_MEMCMP. */
5999 static enum gimplify_status
6000 gimplify_variable_sized_compare (tree
*expr_p
)
6002 location_t loc
= EXPR_LOCATION (*expr_p
);
6003 tree op0
= TREE_OPERAND (*expr_p
, 0);
6004 tree op1
= TREE_OPERAND (*expr_p
, 1);
6005 tree t
, arg
, dest
, src
, expr
;
6007 arg
= TYPE_SIZE_UNIT (TREE_TYPE (op0
));
6008 arg
= unshare_expr (arg
);
6009 arg
= SUBSTITUTE_PLACEHOLDER_IN_EXPR (arg
, op0
);
6010 src
= build_fold_addr_expr_loc (loc
, op1
);
6011 dest
= build_fold_addr_expr_loc (loc
, op0
);
6012 t
= builtin_decl_implicit (BUILT_IN_MEMCMP
);
6013 t
= build_call_expr_loc (loc
, t
, 3, dest
, src
, arg
);
6016 = build2 (TREE_CODE (*expr_p
), TREE_TYPE (*expr_p
), t
, integer_zero_node
);
6017 SET_EXPR_LOCATION (expr
, loc
);
6023 /* Gimplify a comparison between two aggregate objects of integral scalar
6024 mode as a comparison between the bitwise equivalent scalar values. */
6026 static enum gimplify_status
6027 gimplify_scalar_mode_aggregate_compare (tree
*expr_p
)
6029 location_t loc
= EXPR_LOCATION (*expr_p
);
6030 tree op0
= TREE_OPERAND (*expr_p
, 0);
6031 tree op1
= TREE_OPERAND (*expr_p
, 1);
6033 tree type
= TREE_TYPE (op0
);
6034 tree scalar_type
= lang_hooks
.types
.type_for_mode (TYPE_MODE (type
), 1);
6036 op0
= fold_build1_loc (loc
, VIEW_CONVERT_EXPR
, scalar_type
, op0
);
6037 op1
= fold_build1_loc (loc
, VIEW_CONVERT_EXPR
, scalar_type
, op1
);
6040 = fold_build2_loc (loc
, TREE_CODE (*expr_p
), TREE_TYPE (*expr_p
), op0
, op1
);
6045 /* Gimplify an expression sequence. This function gimplifies each
6046 expression and rewrites the original expression with the last
6047 expression of the sequence in GIMPLE form.
6049 PRE_P points to the list where the side effects for all the
6050 expressions in the sequence will be emitted.
6052 WANT_VALUE is true when the result of the last COMPOUND_EXPR is used. */
6054 static enum gimplify_status
6055 gimplify_compound_expr (tree
*expr_p
, gimple_seq
*pre_p
, bool want_value
)
6061 tree
*sub_p
= &TREE_OPERAND (t
, 0);
6063 if (TREE_CODE (*sub_p
) == COMPOUND_EXPR
)
6064 gimplify_compound_expr (sub_p
, pre_p
, false);
6066 gimplify_stmt (sub_p
, pre_p
);
6068 t
= TREE_OPERAND (t
, 1);
6070 while (TREE_CODE (t
) == COMPOUND_EXPR
);
6077 gimplify_stmt (expr_p
, pre_p
);
6082 /* Gimplify a SAVE_EXPR node. EXPR_P points to the expression to
6083 gimplify. After gimplification, EXPR_P will point to a new temporary
6084 that holds the original value of the SAVE_EXPR node.
6086 PRE_P points to the list where side effects that must happen before
6087 *EXPR_P should be stored. */
6089 static enum gimplify_status
6090 gimplify_save_expr (tree
*expr_p
, gimple_seq
*pre_p
, gimple_seq
*post_p
)
6092 enum gimplify_status ret
= GS_ALL_DONE
;
6095 gcc_assert (TREE_CODE (*expr_p
) == SAVE_EXPR
);
6096 val
= TREE_OPERAND (*expr_p
, 0);
6098 /* If the SAVE_EXPR has not been resolved, then evaluate it once. */
6099 if (!SAVE_EXPR_RESOLVED_P (*expr_p
))
6101 /* The operand may be a void-valued expression. It is
6102 being executed only for its side-effects. */
6103 if (TREE_TYPE (val
) == void_type_node
)
6105 ret
= gimplify_expr (&TREE_OPERAND (*expr_p
, 0), pre_p
, post_p
,
6106 is_gimple_stmt
, fb_none
);
6110 /* The temporary may not be an SSA name as later abnormal and EH
6111 control flow may invalidate use/def domination. When in SSA
6112 form then assume there are no such issues and SAVE_EXPRs only
6113 appear via GENERIC foldings. */
6114 val
= get_initialized_tmp_var (val
, pre_p
, post_p
,
6115 gimple_in_ssa_p (cfun
));
6117 TREE_OPERAND (*expr_p
, 0) = val
;
6118 SAVE_EXPR_RESOLVED_P (*expr_p
) = 1;
6126 /* Rewrite the ADDR_EXPR node pointed to by EXPR_P
6133 PRE_P points to the list where side effects that must happen before
6134 *EXPR_P should be stored.
6136 POST_P points to the list where side effects that must happen after
6137 *EXPR_P should be stored. */
6139 static enum gimplify_status
6140 gimplify_addr_expr (tree
*expr_p
, gimple_seq
*pre_p
, gimple_seq
*post_p
)
6142 tree expr
= *expr_p
;
6143 tree op0
= TREE_OPERAND (expr
, 0);
6144 enum gimplify_status ret
;
6145 location_t loc
= EXPR_LOCATION (*expr_p
);
6147 switch (TREE_CODE (op0
))
6151 /* Check if we are dealing with an expression of the form '&*ptr'.
6152 While the front end folds away '&*ptr' into 'ptr', these
6153 expressions may be generated internally by the compiler (e.g.,
6154 builtins like __builtin_va_end). */
6155 /* Caution: the silent array decomposition semantics we allow for
6156 ADDR_EXPR means we can't always discard the pair. */
6157 /* Gimplification of the ADDR_EXPR operand may drop
6158 cv-qualification conversions, so make sure we add them if
6161 tree op00
= TREE_OPERAND (op0
, 0);
6162 tree t_expr
= TREE_TYPE (expr
);
6163 tree t_op00
= TREE_TYPE (op00
);
6165 if (!useless_type_conversion_p (t_expr
, t_op00
))
6166 op00
= fold_convert_loc (loc
, TREE_TYPE (expr
), op00
);
6172 case VIEW_CONVERT_EXPR
:
6173 /* Take the address of our operand and then convert it to the type of
6176 ??? The interactions of VIEW_CONVERT_EXPR and aliasing is not at
6177 all clear. The impact of this transformation is even less clear. */
6179 /* If the operand is a useless conversion, look through it. Doing so
6180 guarantees that the ADDR_EXPR and its operand will remain of the
6182 if (tree_ssa_useless_type_conversion (TREE_OPERAND (op0
, 0)))
6183 op0
= TREE_OPERAND (op0
, 0);
6185 *expr_p
= fold_convert_loc (loc
, TREE_TYPE (expr
),
6186 build_fold_addr_expr_loc (loc
,
6187 TREE_OPERAND (op0
, 0)));
6192 if (integer_zerop (TREE_OPERAND (op0
, 1)))
6193 goto do_indirect_ref
;
6198 /* If we see a call to a declared builtin or see its address
6199 being taken (we can unify those cases here) then we can mark
6200 the builtin for implicit generation by GCC. */
6201 if (TREE_CODE (op0
) == FUNCTION_DECL
6202 && fndecl_built_in_p (op0
, BUILT_IN_NORMAL
)
6203 && builtin_decl_declared_p (DECL_FUNCTION_CODE (op0
)))
6204 set_builtin_decl_implicit_p (DECL_FUNCTION_CODE (op0
), true);
6206 /* We use fb_either here because the C frontend sometimes takes
6207 the address of a call that returns a struct; see
6208 gcc.dg/c99-array-lval-1.c. The gimplifier will correctly make
6209 the implied temporary explicit. */
6211 /* Make the operand addressable. */
6212 ret
= gimplify_expr (&TREE_OPERAND (expr
, 0), pre_p
, post_p
,
6213 is_gimple_addressable
, fb_either
);
6214 if (ret
== GS_ERROR
)
6217 /* Then mark it. Beware that it may not be possible to do so directly
6218 if a temporary has been created by the gimplification. */
6219 prepare_gimple_addressable (&TREE_OPERAND (expr
, 0), pre_p
);
6221 op0
= TREE_OPERAND (expr
, 0);
6223 /* For various reasons, the gimplification of the expression
6224 may have made a new INDIRECT_REF. */
6225 if (TREE_CODE (op0
) == INDIRECT_REF
6226 || (TREE_CODE (op0
) == MEM_REF
6227 && integer_zerop (TREE_OPERAND (op0
, 1))))
6228 goto do_indirect_ref
;
6230 mark_addressable (TREE_OPERAND (expr
, 0));
6232 /* The FEs may end up building ADDR_EXPRs early on a decl with
6233 an incomplete type. Re-build ADDR_EXPRs in canonical form
6235 if (!types_compatible_p (TREE_TYPE (op0
), TREE_TYPE (TREE_TYPE (expr
))))
6236 *expr_p
= build_fold_addr_expr (op0
);
6238 /* Make sure TREE_CONSTANT and TREE_SIDE_EFFECTS are set properly. */
6239 recompute_tree_invariant_for_addr_expr (*expr_p
);
6241 /* If we re-built the ADDR_EXPR add a conversion to the original type
6243 if (!useless_type_conversion_p (TREE_TYPE (expr
), TREE_TYPE (*expr_p
)))
6244 *expr_p
= fold_convert (TREE_TYPE (expr
), *expr_p
);
6252 /* Gimplify the operands of an ASM_EXPR. Input operands should be a gimple
6253 value; output operands should be a gimple lvalue. */
6255 static enum gimplify_status
6256 gimplify_asm_expr (tree
*expr_p
, gimple_seq
*pre_p
, gimple_seq
*post_p
)
6260 const char **oconstraints
;
6263 const char *constraint
;
6264 bool allows_mem
, allows_reg
, is_inout
;
6265 enum gimplify_status ret
, tret
;
6267 vec
<tree
, va_gc
> *inputs
;
6268 vec
<tree
, va_gc
> *outputs
;
6269 vec
<tree
, va_gc
> *clobbers
;
6270 vec
<tree
, va_gc
> *labels
;
6274 noutputs
= list_length (ASM_OUTPUTS (expr
));
6275 oconstraints
= (const char **) alloca ((noutputs
) * sizeof (const char *));
6283 link_next
= NULL_TREE
;
6284 for (i
= 0, link
= ASM_OUTPUTS (expr
); link
; ++i
, link
= link_next
)
6287 size_t constraint_len
;
6289 link_next
= TREE_CHAIN (link
);
6293 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link
)));
6294 constraint_len
= strlen (constraint
);
6295 if (constraint_len
== 0)
6298 ok
= parse_output_constraint (&constraint
, i
, 0, 0,
6299 &allows_mem
, &allows_reg
, &is_inout
);
6306 /* If we can't make copies, we can only accept memory.
6307 Similarly for VLAs. */
6308 tree outtype
= TREE_TYPE (TREE_VALUE (link
));
6309 if (outtype
!= error_mark_node
6310 && (TREE_ADDRESSABLE (outtype
)
6311 || !COMPLETE_TYPE_P (outtype
)
6312 || !tree_fits_poly_uint64_p (TYPE_SIZE_UNIT (outtype
))))
6318 error ("impossible constraint in %<asm%>");
6319 error ("non-memory output %d must stay in memory", i
);
6324 if (!allows_reg
&& allows_mem
)
6325 mark_addressable (TREE_VALUE (link
));
6327 tree orig
= TREE_VALUE (link
);
6328 tret
= gimplify_expr (&TREE_VALUE (link
), pre_p
, post_p
,
6329 is_inout
? is_gimple_min_lval
: is_gimple_lvalue
,
6330 fb_lvalue
| fb_mayfail
);
6331 if (tret
== GS_ERROR
)
6333 if (orig
!= error_mark_node
)
6334 error ("invalid lvalue in %<asm%> output %d", i
);
6338 /* If the constraint does not allow memory make sure we gimplify
6339 it to a register if it is not already but its base is. This
6340 happens for complex and vector components. */
6343 tree op
= TREE_VALUE (link
);
6344 if (! is_gimple_val (op
)
6345 && is_gimple_reg_type (TREE_TYPE (op
))
6346 && is_gimple_reg (get_base_address (op
)))
6348 tree tem
= create_tmp_reg (TREE_TYPE (op
));
6352 ass
= build2 (MODIFY_EXPR
, TREE_TYPE (tem
),
6353 tem
, unshare_expr (op
));
6354 gimplify_and_add (ass
, pre_p
);
6356 ass
= build2 (MODIFY_EXPR
, TREE_TYPE (tem
), op
, tem
);
6357 gimplify_and_add (ass
, post_p
);
6359 TREE_VALUE (link
) = tem
;
6364 vec_safe_push (outputs
, link
);
6365 TREE_CHAIN (link
) = NULL_TREE
;
6369 /* An input/output operand. To give the optimizers more
6370 flexibility, split it into separate input and output
6373 /* Buffer big enough to format a 32-bit UINT_MAX into. */
6376 /* Turn the in/out constraint into an output constraint. */
6377 char *p
= xstrdup (constraint
);
6379 TREE_VALUE (TREE_PURPOSE (link
)) = build_string (constraint_len
, p
);
6381 /* And add a matching input constraint. */
6384 sprintf (buf
, "%u", i
);
6386 /* If there are multiple alternatives in the constraint,
6387 handle each of them individually. Those that allow register
6388 will be replaced with operand number, the others will stay
6390 if (strchr (p
, ',') != NULL
)
6392 size_t len
= 0, buflen
= strlen (buf
);
6393 char *beg
, *end
, *str
, *dst
;
6397 end
= strchr (beg
, ',');
6399 end
= strchr (beg
, '\0');
6400 if ((size_t) (end
- beg
) < buflen
)
6403 len
+= end
- beg
+ 1;
6410 str
= (char *) alloca (len
);
6411 for (beg
= p
+ 1, dst
= str
;;)
6414 bool mem_p
, reg_p
, inout_p
;
6416 end
= strchr (beg
, ',');
6421 parse_output_constraint (&tem
, i
, 0, 0,
6422 &mem_p
, ®_p
, &inout_p
);
6427 memcpy (dst
, buf
, buflen
);
6436 memcpy (dst
, beg
, len
);
6445 input
= build_string (dst
- str
, str
);
6448 input
= build_string (strlen (buf
), buf
);
6451 input
= build_string (constraint_len
- 1, constraint
+ 1);
6455 input
= build_tree_list (build_tree_list (NULL_TREE
, input
),
6456 unshare_expr (TREE_VALUE (link
)));
6457 ASM_INPUTS (expr
) = chainon (ASM_INPUTS (expr
), input
);
6461 link_next
= NULL_TREE
;
6462 for (link
= ASM_INPUTS (expr
); link
; ++i
, link
= link_next
)
6464 link_next
= TREE_CHAIN (link
);
6465 constraint
= TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link
)));
6466 parse_input_constraint (&constraint
, 0, 0, noutputs
, 0,
6467 oconstraints
, &allows_mem
, &allows_reg
);
6469 /* If we can't make copies, we can only accept memory. */
6470 tree intype
= TREE_TYPE (TREE_VALUE (link
));
6471 if (intype
!= error_mark_node
6472 && (TREE_ADDRESSABLE (intype
)
6473 || !COMPLETE_TYPE_P (intype
)
6474 || !tree_fits_poly_uint64_p (TYPE_SIZE_UNIT (intype
))))
6480 error ("impossible constraint in %<asm%>");
6481 error ("non-memory input %d must stay in memory", i
);
6486 /* If the operand is a memory input, it should be an lvalue. */
6487 if (!allows_reg
&& allows_mem
)
6489 tree inputv
= TREE_VALUE (link
);
6490 STRIP_NOPS (inputv
);
6491 if (TREE_CODE (inputv
) == PREDECREMENT_EXPR
6492 || TREE_CODE (inputv
) == PREINCREMENT_EXPR
6493 || TREE_CODE (inputv
) == POSTDECREMENT_EXPR
6494 || TREE_CODE (inputv
) == POSTINCREMENT_EXPR
6495 || TREE_CODE (inputv
) == MODIFY_EXPR
)
6496 TREE_VALUE (link
) = error_mark_node
;
6497 tret
= gimplify_expr (&TREE_VALUE (link
), pre_p
, post_p
,
6498 is_gimple_lvalue
, fb_lvalue
| fb_mayfail
);
6499 if (tret
!= GS_ERROR
)
6501 /* Unlike output operands, memory inputs are not guaranteed
6502 to be lvalues by the FE, and while the expressions are
6503 marked addressable there, if it is e.g. a statement
6504 expression, temporaries in it might not end up being
6505 addressable. They might be already used in the IL and thus
6506 it is too late to make them addressable now though. */
6507 tree x
= TREE_VALUE (link
);
6508 while (handled_component_p (x
))
6509 x
= TREE_OPERAND (x
, 0);
6510 if (TREE_CODE (x
) == MEM_REF
6511 && TREE_CODE (TREE_OPERAND (x
, 0)) == ADDR_EXPR
)
6512 x
= TREE_OPERAND (TREE_OPERAND (x
, 0), 0);
6514 || TREE_CODE (x
) == PARM_DECL
6515 || TREE_CODE (x
) == RESULT_DECL
)
6516 && !TREE_ADDRESSABLE (x
)
6517 && is_gimple_reg (x
))
6519 warning_at (EXPR_LOC_OR_LOC (TREE_VALUE (link
),
6521 "memory input %d is not directly addressable",
6523 prepare_gimple_addressable (&TREE_VALUE (link
), pre_p
);
6526 mark_addressable (TREE_VALUE (link
));
6527 if (tret
== GS_ERROR
)
6529 if (inputv
!= error_mark_node
)
6530 error_at (EXPR_LOC_OR_LOC (TREE_VALUE (link
), input_location
),
6531 "memory input %d is not directly addressable", i
);
6537 tret
= gimplify_expr (&TREE_VALUE (link
), pre_p
, post_p
,
6538 is_gimple_asm_val
, fb_rvalue
);
6539 if (tret
== GS_ERROR
)
6543 TREE_CHAIN (link
) = NULL_TREE
;
6544 vec_safe_push (inputs
, link
);
6547 link_next
= NULL_TREE
;
6548 for (link
= ASM_CLOBBERS (expr
); link
; ++i
, link
= link_next
)
6550 link_next
= TREE_CHAIN (link
);
6551 TREE_CHAIN (link
) = NULL_TREE
;
6552 vec_safe_push (clobbers
, link
);
6555 link_next
= NULL_TREE
;
6556 for (link
= ASM_LABELS (expr
); link
; ++i
, link
= link_next
)
6558 link_next
= TREE_CHAIN (link
);
6559 TREE_CHAIN (link
) = NULL_TREE
;
6560 vec_safe_push (labels
, link
);
6563 /* Do not add ASMs with errors to the gimple IL stream. */
6564 if (ret
!= GS_ERROR
)
6566 stmt
= gimple_build_asm_vec (TREE_STRING_POINTER (ASM_STRING (expr
)),
6567 inputs
, outputs
, clobbers
, labels
);
6569 gimple_asm_set_volatile (stmt
, ASM_VOLATILE_P (expr
) || noutputs
== 0);
6570 gimple_asm_set_input (stmt
, ASM_INPUT_P (expr
));
6571 gimple_asm_set_inline (stmt
, ASM_INLINE_P (expr
));
6573 gimplify_seq_add_stmt (pre_p
, stmt
);
6579 /* Gimplify a CLEANUP_POINT_EXPR. Currently this works by adding
6580 GIMPLE_WITH_CLEANUP_EXPRs to the prequeue as we encounter cleanups while
6581 gimplifying the body, and converting them to TRY_FINALLY_EXPRs when we
6582 return to this function.
6584 FIXME should we complexify the prequeue handling instead? Or use flags
6585 for all the cleanups and let the optimizer tighten them up? The current
6586 code seems pretty fragile; it will break on a cleanup within any
6587 non-conditional nesting. But any such nesting would be broken, anyway;
6588 we can't write a TRY_FINALLY_EXPR that starts inside a nesting construct
6589 and continues out of it. We can do that at the RTL level, though, so
6590 having an optimizer to tighten up try/finally regions would be a Good
6593 static enum gimplify_status
6594 gimplify_cleanup_point_expr (tree
*expr_p
, gimple_seq
*pre_p
)
6596 gimple_stmt_iterator iter
;
6597 gimple_seq body_sequence
= NULL
;
6599 tree temp
= voidify_wrapper_expr (*expr_p
, NULL
);
6601 /* We only care about the number of conditions between the innermost
6602 CLEANUP_POINT_EXPR and the cleanup. So save and reset the count and
6603 any cleanups collected outside the CLEANUP_POINT_EXPR. */
6604 int old_conds
= gimplify_ctxp
->conditions
;
6605 gimple_seq old_cleanups
= gimplify_ctxp
->conditional_cleanups
;
6606 bool old_in_cleanup_point_expr
= gimplify_ctxp
->in_cleanup_point_expr
;
6607 gimplify_ctxp
->conditions
= 0;
6608 gimplify_ctxp
->conditional_cleanups
= NULL
;
6609 gimplify_ctxp
->in_cleanup_point_expr
= true;
6611 gimplify_stmt (&TREE_OPERAND (*expr_p
, 0), &body_sequence
);
6613 gimplify_ctxp
->conditions
= old_conds
;
6614 gimplify_ctxp
->conditional_cleanups
= old_cleanups
;
6615 gimplify_ctxp
->in_cleanup_point_expr
= old_in_cleanup_point_expr
;
6617 for (iter
= gsi_start (body_sequence
); !gsi_end_p (iter
); )
6619 gimple
*wce
= gsi_stmt (iter
);
6621 if (gimple_code (wce
) == GIMPLE_WITH_CLEANUP_EXPR
)
6623 if (gsi_one_before_end_p (iter
))
6625 /* Note that gsi_insert_seq_before and gsi_remove do not
6626 scan operands, unlike some other sequence mutators. */
6627 if (!gimple_wce_cleanup_eh_only (wce
))
6628 gsi_insert_seq_before_without_update (&iter
,
6629 gimple_wce_cleanup (wce
),
6631 gsi_remove (&iter
, true);
6638 enum gimple_try_flags kind
;
6640 if (gimple_wce_cleanup_eh_only (wce
))
6641 kind
= GIMPLE_TRY_CATCH
;
6643 kind
= GIMPLE_TRY_FINALLY
;
6644 seq
= gsi_split_seq_after (iter
);
6646 gtry
= gimple_build_try (seq
, gimple_wce_cleanup (wce
), kind
);
6647 /* Do not use gsi_replace here, as it may scan operands.
6648 We want to do a simple structural modification only. */
6649 gsi_set_stmt (&iter
, gtry
);
6650 iter
= gsi_start (gtry
->eval
);
6657 gimplify_seq_add_seq (pre_p
, body_sequence
);
6670 /* Insert a cleanup marker for gimplify_cleanup_point_expr. CLEANUP
6671 is the cleanup action required. EH_ONLY is true if the cleanup should
6672 only be executed if an exception is thrown, not on normal exit.
6673 If FORCE_UNCOND is true perform the cleanup unconditionally; this is
6674 only valid for clobbers. */
6677 gimple_push_cleanup (tree var
, tree cleanup
, bool eh_only
, gimple_seq
*pre_p
,
6678 bool force_uncond
= false)
6681 gimple_seq cleanup_stmts
= NULL
;
6683 /* Errors can result in improperly nested cleanups. Which results in
6684 confusion when trying to resolve the GIMPLE_WITH_CLEANUP_EXPR. */
6688 if (gimple_conditional_context ())
6690 /* If we're in a conditional context, this is more complex. We only
6691 want to run the cleanup if we actually ran the initialization that
6692 necessitates it, but we want to run it after the end of the
6693 conditional context. So we wrap the try/finally around the
6694 condition and use a flag to determine whether or not to actually
6695 run the destructor. Thus
6699 becomes (approximately)
6703 if (test) { A::A(temp); flag = 1; val = f(temp); }
6706 if (flag) A::~A(temp);
6712 gimplify_stmt (&cleanup
, &cleanup_stmts
);
6713 wce
= gimple_build_wce (cleanup_stmts
);
6714 gimplify_seq_add_stmt (&gimplify_ctxp
->conditional_cleanups
, wce
);
6718 tree flag
= create_tmp_var (boolean_type_node
, "cleanup");
6719 gassign
*ffalse
= gimple_build_assign (flag
, boolean_false_node
);
6720 gassign
*ftrue
= gimple_build_assign (flag
, boolean_true_node
);
6722 cleanup
= build3 (COND_EXPR
, void_type_node
, flag
, cleanup
, NULL
);
6723 gimplify_stmt (&cleanup
, &cleanup_stmts
);
6724 wce
= gimple_build_wce (cleanup_stmts
);
6726 gimplify_seq_add_stmt (&gimplify_ctxp
->conditional_cleanups
, ffalse
);
6727 gimplify_seq_add_stmt (&gimplify_ctxp
->conditional_cleanups
, wce
);
6728 gimplify_seq_add_stmt (pre_p
, ftrue
);
6730 /* Because of this manipulation, and the EH edges that jump
6731 threading cannot redirect, the temporary (VAR) will appear
6732 to be used uninitialized. Don't warn. */
6733 suppress_warning (var
, OPT_Wuninitialized
);
6738 gimplify_stmt (&cleanup
, &cleanup_stmts
);
6739 wce
= gimple_build_wce (cleanup_stmts
);
6740 gimple_wce_set_cleanup_eh_only (wce
, eh_only
);
6741 gimplify_seq_add_stmt (pre_p
, wce
);
6745 /* Gimplify a TARGET_EXPR which doesn't appear on the rhs of an INIT_EXPR. */
6747 static enum gimplify_status
6748 gimplify_target_expr (tree
*expr_p
, gimple_seq
*pre_p
, gimple_seq
*post_p
)
6750 tree targ
= *expr_p
;
6751 tree temp
= TARGET_EXPR_SLOT (targ
);
6752 tree init
= TARGET_EXPR_INITIAL (targ
);
6753 enum gimplify_status ret
;
6755 bool unpoison_empty_seq
= false;
6756 gimple_stmt_iterator unpoison_it
;
6760 tree cleanup
= NULL_TREE
;
6762 /* TARGET_EXPR temps aren't part of the enclosing block, so add it
6763 to the temps list. Handle also variable length TARGET_EXPRs. */
6764 if (!poly_int_tree_p (DECL_SIZE (temp
)))
6766 if (!TYPE_SIZES_GIMPLIFIED (TREE_TYPE (temp
)))
6767 gimplify_type_sizes (TREE_TYPE (temp
), pre_p
);
6768 gimplify_vla_decl (temp
, pre_p
);
6772 /* Save location where we need to place unpoisoning. It's possible
6773 that a variable will be converted to needs_to_live_in_memory. */
6774 unpoison_it
= gsi_last (*pre_p
);
6775 unpoison_empty_seq
= gsi_end_p (unpoison_it
);
6777 gimple_add_tmp_var (temp
);
6780 /* If TARGET_EXPR_INITIAL is void, then the mere evaluation of the
6781 expression is supposed to initialize the slot. */
6782 if (VOID_TYPE_P (TREE_TYPE (init
)))
6783 ret
= gimplify_expr (&init
, pre_p
, post_p
, is_gimple_stmt
, fb_none
);
6786 tree init_expr
= build2 (INIT_EXPR
, void_type_node
, temp
, init
);
6788 ret
= gimplify_expr (&init
, pre_p
, post_p
, is_gimple_stmt
, fb_none
);
6790 ggc_free (init_expr
);
6792 if (ret
== GS_ERROR
)
6794 /* PR c++/28266 Make sure this is expanded only once. */
6795 TARGET_EXPR_INITIAL (targ
) = NULL_TREE
;
6799 gimplify_and_add (init
, pre_p
);
6801 /* If needed, push the cleanup for the temp. */
6802 if (TARGET_EXPR_CLEANUP (targ
))
6804 if (CLEANUP_EH_ONLY (targ
))
6805 gimple_push_cleanup (temp
, TARGET_EXPR_CLEANUP (targ
),
6806 CLEANUP_EH_ONLY (targ
), pre_p
);
6808 cleanup
= TARGET_EXPR_CLEANUP (targ
);
6811 /* Add a clobber for the temporary going out of scope, like
6812 gimplify_bind_expr. */
6813 if (gimplify_ctxp
->in_cleanup_point_expr
6814 && needs_to_live_in_memory (temp
))
6816 if (flag_stack_reuse
== SR_ALL
)
6818 tree clobber
= build_clobber (TREE_TYPE (temp
));
6819 clobber
= build2 (MODIFY_EXPR
, TREE_TYPE (temp
), temp
, clobber
);
6820 gimple_push_cleanup (temp
, clobber
, false, pre_p
, true);
6822 if (asan_poisoned_variables
6823 && DECL_ALIGN (temp
) <= MAX_SUPPORTED_STACK_ALIGNMENT
6824 && !TREE_STATIC (temp
)
6825 && dbg_cnt (asan_use_after_scope
)
6826 && !gimplify_omp_ctxp
)
6828 tree asan_cleanup
= build_asan_poison_call_expr (temp
);
6831 if (unpoison_empty_seq
)
6832 unpoison_it
= gsi_start (*pre_p
);
6834 asan_poison_variable (temp
, false, &unpoison_it
,
6835 unpoison_empty_seq
);
6836 gimple_push_cleanup (temp
, asan_cleanup
, false, pre_p
);
6841 gimple_push_cleanup (temp
, cleanup
, false, pre_p
);
6843 /* Only expand this once. */
6844 TREE_OPERAND (targ
, 3) = init
;
6845 TARGET_EXPR_INITIAL (targ
) = NULL_TREE
;
6848 /* We should have expanded this before. */
6849 gcc_assert (DECL_SEEN_IN_BIND_EXPR_P (temp
));
6855 /* Gimplification of expression trees. */
6857 /* Gimplify an expression which appears at statement context. The
6858 corresponding GIMPLE statements are added to *SEQ_P. If *SEQ_P is
6859 NULL, a new sequence is allocated.
6861 Return true if we actually added a statement to the queue. */
6864 gimplify_stmt (tree
*stmt_p
, gimple_seq
*seq_p
)
6866 gimple_seq_node last
;
6868 last
= gimple_seq_last (*seq_p
);
6869 gimplify_expr (stmt_p
, seq_p
, NULL
, is_gimple_stmt
, fb_none
);
6870 return last
!= gimple_seq_last (*seq_p
);
6873 /* Add FIRSTPRIVATE entries for DECL in the OpenMP the surrounding parallels
6874 to CTX. If entries already exist, force them to be some flavor of private.
6875 If there is no enclosing parallel, do nothing. */
6878 omp_firstprivatize_variable (struct gimplify_omp_ctx
*ctx
, tree decl
)
6882 if (decl
== NULL
|| !DECL_P (decl
) || ctx
->region_type
== ORT_NONE
)
6887 n
= splay_tree_lookup (ctx
->variables
, (splay_tree_key
)decl
);
6890 if (n
->value
& GOVD_SHARED
)
6891 n
->value
= GOVD_FIRSTPRIVATE
| (n
->value
& GOVD_SEEN
);
6892 else if (n
->value
& GOVD_MAP
)
6893 n
->value
|= GOVD_MAP_TO_ONLY
;
6897 else if ((ctx
->region_type
& ORT_TARGET
) != 0)
6899 if (ctx
->defaultmap
[GDMK_SCALAR
] & GOVD_FIRSTPRIVATE
)
6900 omp_add_variable (ctx
, decl
, GOVD_FIRSTPRIVATE
);
6902 omp_add_variable (ctx
, decl
, GOVD_MAP
| GOVD_MAP_TO_ONLY
);
6904 else if (ctx
->region_type
!= ORT_WORKSHARE
6905 && ctx
->region_type
!= ORT_TASKGROUP
6906 && ctx
->region_type
!= ORT_SIMD
6907 && ctx
->region_type
!= ORT_ACC
6908 && !(ctx
->region_type
& ORT_TARGET_DATA
))
6909 omp_add_variable (ctx
, decl
, GOVD_FIRSTPRIVATE
);
6911 ctx
= ctx
->outer_context
;
6916 /* Similarly for each of the type sizes of TYPE. */
6919 omp_firstprivatize_type_sizes (struct gimplify_omp_ctx
*ctx
, tree type
)
6921 if (type
== NULL
|| type
== error_mark_node
)
6923 type
= TYPE_MAIN_VARIANT (type
);
6925 if (ctx
->privatized_types
->add (type
))
6928 switch (TREE_CODE (type
))
6934 case FIXED_POINT_TYPE
:
6935 omp_firstprivatize_variable (ctx
, TYPE_MIN_VALUE (type
));
6936 omp_firstprivatize_variable (ctx
, TYPE_MAX_VALUE (type
));
6940 omp_firstprivatize_type_sizes (ctx
, TREE_TYPE (type
));
6941 omp_firstprivatize_type_sizes (ctx
, TYPE_DOMAIN (type
));
6946 case QUAL_UNION_TYPE
:
6949 for (field
= TYPE_FIELDS (type
); field
; field
= DECL_CHAIN (field
))
6950 if (TREE_CODE (field
) == FIELD_DECL
)
6952 omp_firstprivatize_variable (ctx
, DECL_FIELD_OFFSET (field
));
6953 omp_firstprivatize_type_sizes (ctx
, TREE_TYPE (field
));
6959 case REFERENCE_TYPE
:
6960 omp_firstprivatize_type_sizes (ctx
, TREE_TYPE (type
));
6967 omp_firstprivatize_variable (ctx
, TYPE_SIZE (type
));
6968 omp_firstprivatize_variable (ctx
, TYPE_SIZE_UNIT (type
));
6969 lang_hooks
.types
.omp_firstprivatize_type_sizes (ctx
, type
);
6972 /* Add an entry for DECL in the OMP context CTX with FLAGS. */
6975 omp_add_variable (struct gimplify_omp_ctx
*ctx
, tree decl
, unsigned int flags
)
6978 unsigned int nflags
;
6981 if (error_operand_p (decl
) || ctx
->region_type
== ORT_NONE
)
6984 /* Never elide decls whose type has TREE_ADDRESSABLE set. This means
6985 there are constructors involved somewhere. Exception is a shared clause,
6986 there is nothing privatized in that case. */
6987 if ((flags
& GOVD_SHARED
) == 0
6988 && (TREE_ADDRESSABLE (TREE_TYPE (decl
))
6989 || TYPE_NEEDS_CONSTRUCTING (TREE_TYPE (decl
))))
6992 n
= splay_tree_lookup (ctx
->variables
, (splay_tree_key
)decl
);
6993 if (n
!= NULL
&& (n
->value
& GOVD_DATA_SHARE_CLASS
) != 0)
6995 /* We shouldn't be re-adding the decl with the same data
6997 gcc_assert ((n
->value
& GOVD_DATA_SHARE_CLASS
& flags
) == 0);
6998 nflags
= n
->value
| flags
;
6999 /* The only combination of data sharing classes we should see is
7000 FIRSTPRIVATE and LASTPRIVATE. However, OpenACC permits
7001 reduction variables to be used in data sharing clauses. */
7002 gcc_assert ((ctx
->region_type
& ORT_ACC
) != 0
7003 || ((nflags
& GOVD_DATA_SHARE_CLASS
)
7004 == (GOVD_FIRSTPRIVATE
| GOVD_LASTPRIVATE
))
7005 || (flags
& GOVD_DATA_SHARE_CLASS
) == 0);
7010 /* When adding a variable-sized variable, we have to handle all sorts
7011 of additional bits of data: the pointer replacement variable, and
7012 the parameters of the type. */
7013 if (DECL_SIZE (decl
) && TREE_CODE (DECL_SIZE (decl
)) != INTEGER_CST
)
7015 /* Add the pointer replacement variable as PRIVATE if the variable
7016 replacement is private, else FIRSTPRIVATE since we'll need the
7017 address of the original variable either for SHARED, or for the
7018 copy into or out of the context. */
7019 if (!(flags
& GOVD_LOCAL
) && ctx
->region_type
!= ORT_TASKGROUP
)
7021 if (flags
& GOVD_MAP
)
7022 nflags
= GOVD_MAP
| GOVD_MAP_TO_ONLY
| GOVD_EXPLICIT
;
7023 else if (flags
& GOVD_PRIVATE
)
7024 nflags
= GOVD_PRIVATE
;
7025 else if (((ctx
->region_type
& (ORT_TARGET
| ORT_TARGET_DATA
)) != 0
7026 && (flags
& GOVD_FIRSTPRIVATE
))
7027 || (ctx
->region_type
== ORT_TARGET_DATA
7028 && (flags
& GOVD_DATA_SHARE_CLASS
) == 0))
7029 nflags
= GOVD_PRIVATE
| GOVD_EXPLICIT
;
7031 nflags
= GOVD_FIRSTPRIVATE
;
7032 nflags
|= flags
& GOVD_SEEN
;
7033 t
= DECL_VALUE_EXPR (decl
);
7034 gcc_assert (TREE_CODE (t
) == INDIRECT_REF
);
7035 t
= TREE_OPERAND (t
, 0);
7036 gcc_assert (DECL_P (t
));
7037 omp_add_variable (ctx
, t
, nflags
);
7040 /* Add all of the variable and type parameters (which should have
7041 been gimplified to a formal temporary) as FIRSTPRIVATE. */
7042 omp_firstprivatize_variable (ctx
, DECL_SIZE_UNIT (decl
));
7043 omp_firstprivatize_variable (ctx
, DECL_SIZE (decl
));
7044 omp_firstprivatize_type_sizes (ctx
, TREE_TYPE (decl
));
7046 /* The variable-sized variable itself is never SHARED, only some form
7047 of PRIVATE. The sharing would take place via the pointer variable
7048 which we remapped above. */
7049 if (flags
& GOVD_SHARED
)
7050 flags
= GOVD_SHARED
| GOVD_DEBUG_PRIVATE
7051 | (flags
& (GOVD_SEEN
| GOVD_EXPLICIT
));
7053 /* We're going to make use of the TYPE_SIZE_UNIT at least in the
7054 alloca statement we generate for the variable, so make sure it
7055 is available. This isn't automatically needed for the SHARED
7056 case, since we won't be allocating local storage then.
7057 For local variables TYPE_SIZE_UNIT might not be gimplified yet,
7058 in this case omp_notice_variable will be called later
7059 on when it is gimplified. */
7060 else if (! (flags
& (GOVD_LOCAL
| GOVD_MAP
))
7061 && DECL_P (TYPE_SIZE_UNIT (TREE_TYPE (decl
))))
7062 omp_notice_variable (ctx
, TYPE_SIZE_UNIT (TREE_TYPE (decl
)), true);
7064 else if ((flags
& (GOVD_MAP
| GOVD_LOCAL
)) == 0
7065 && lang_hooks
.decls
.omp_privatize_by_reference (decl
))
7067 omp_firstprivatize_type_sizes (ctx
, TREE_TYPE (decl
));
7069 /* Similar to the direct variable sized case above, we'll need the
7070 size of references being privatized. */
7071 if ((flags
& GOVD_SHARED
) == 0)
7073 t
= TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl
)));
7074 if (t
&& DECL_P (t
))
7075 omp_notice_variable (ctx
, t
, true);
7082 splay_tree_insert (ctx
->variables
, (splay_tree_key
)decl
, flags
);
7084 /* For reductions clauses in OpenACC loop directives, by default create a
7085 copy clause on the enclosing parallel construct for carrying back the
7087 if (ctx
->region_type
== ORT_ACC
&& (flags
& GOVD_REDUCTION
))
7089 struct gimplify_omp_ctx
*outer_ctx
= ctx
->outer_context
;
7092 n
= splay_tree_lookup (outer_ctx
->variables
, (splay_tree_key
)decl
);
7095 /* Ignore local variables and explicitly declared clauses. */
7096 if (n
->value
& (GOVD_LOCAL
| GOVD_EXPLICIT
))
7098 else if (outer_ctx
->region_type
== ORT_ACC_KERNELS
)
7100 /* According to the OpenACC spec, such a reduction variable
7101 should already have a copy map on a kernels construct,
7102 verify that here. */
7103 gcc_assert (!(n
->value
& GOVD_FIRSTPRIVATE
)
7104 && (n
->value
& GOVD_MAP
));
7106 else if (outer_ctx
->region_type
== ORT_ACC_PARALLEL
)
7108 /* Remove firstprivate and make it a copy map. */
7109 n
->value
&= ~GOVD_FIRSTPRIVATE
;
7110 n
->value
|= GOVD_MAP
;
7113 else if (outer_ctx
->region_type
== ORT_ACC_PARALLEL
)
7115 splay_tree_insert (outer_ctx
->variables
, (splay_tree_key
)decl
,
7116 GOVD_MAP
| GOVD_SEEN
);
7119 outer_ctx
= outer_ctx
->outer_context
;
7124 /* Notice a threadprivate variable DECL used in OMP context CTX.
7125 This just prints out diagnostics about threadprivate variable uses
7126 in untied tasks. If DECL2 is non-NULL, prevent this warning
7127 on that variable. */
7130 omp_notice_threadprivate_variable (struct gimplify_omp_ctx
*ctx
, tree decl
,
7134 struct gimplify_omp_ctx
*octx
;
7136 for (octx
= ctx
; octx
; octx
= octx
->outer_context
)
7137 if ((octx
->region_type
& ORT_TARGET
) != 0
7138 || octx
->order_concurrent
)
7140 n
= splay_tree_lookup (octx
->variables
, (splay_tree_key
)decl
);
7143 if (octx
->order_concurrent
)
7145 error ("threadprivate variable %qE used in a region with"
7146 " %<order(concurrent)%> clause", DECL_NAME (decl
));
7147 inform (octx
->location
, "enclosing region");
7151 error ("threadprivate variable %qE used in target region",
7153 inform (octx
->location
, "enclosing target region");
7155 splay_tree_insert (octx
->variables
, (splay_tree_key
)decl
, 0);
7158 splay_tree_insert (octx
->variables
, (splay_tree_key
)decl2
, 0);
7161 if (ctx
->region_type
!= ORT_UNTIED_TASK
)
7163 n
= splay_tree_lookup (ctx
->variables
, (splay_tree_key
)decl
);
7166 error ("threadprivate variable %qE used in untied task",
7168 inform (ctx
->location
, "enclosing task");
7169 splay_tree_insert (ctx
->variables
, (splay_tree_key
)decl
, 0);
7172 splay_tree_insert (ctx
->variables
, (splay_tree_key
)decl2
, 0);
7176 /* Return true if global var DECL is device resident. */
7179 device_resident_p (tree decl
)
7181 tree attr
= lookup_attribute ("oacc declare target", DECL_ATTRIBUTES (decl
));
7186 for (tree t
= TREE_VALUE (attr
); t
; t
= TREE_PURPOSE (t
))
7188 tree c
= TREE_VALUE (t
);
7189 if (OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_DEVICE_RESIDENT
)
7196 /* Return true if DECL has an ACC DECLARE attribute. */
7199 is_oacc_declared (tree decl
)
7201 tree t
= TREE_CODE (decl
) == MEM_REF
? TREE_OPERAND (decl
, 0) : decl
;
7202 tree declared
= lookup_attribute ("oacc declare target", DECL_ATTRIBUTES (t
));
7203 return declared
!= NULL_TREE
;
7206 /* Determine outer default flags for DECL mentioned in an OMP region
7207 but not declared in an enclosing clause.
7209 ??? Some compiler-generated variables (like SAVE_EXPRs) could be
7210 remapped firstprivate instead of shared. To some extent this is
7211 addressed in omp_firstprivatize_type_sizes, but not
7215 omp_default_clause (struct gimplify_omp_ctx
*ctx
, tree decl
,
7216 bool in_code
, unsigned flags
)
7218 enum omp_clause_default_kind default_kind
= ctx
->default_kind
;
7219 enum omp_clause_default_kind kind
;
7221 kind
= lang_hooks
.decls
.omp_predetermined_sharing (decl
);
7222 if (ctx
->region_type
& ORT_TASK
)
7224 tree detach_clause
= omp_find_clause (ctx
->clauses
, OMP_CLAUSE_DETACH
);
7226 /* The event-handle specified by a detach clause should always be firstprivate,
7227 regardless of the current default. */
7228 if (detach_clause
&& OMP_CLAUSE_DECL (detach_clause
) == decl
)
7229 kind
= OMP_CLAUSE_DEFAULT_FIRSTPRIVATE
;
7231 if (kind
!= OMP_CLAUSE_DEFAULT_UNSPECIFIED
)
7232 default_kind
= kind
;
7233 else if (VAR_P (decl
) && TREE_STATIC (decl
) && DECL_IN_CONSTANT_POOL (decl
))
7234 default_kind
= OMP_CLAUSE_DEFAULT_SHARED
;
7236 switch (default_kind
)
7238 case OMP_CLAUSE_DEFAULT_NONE
:
7242 if (ctx
->region_type
& ORT_PARALLEL
)
7244 else if ((ctx
->region_type
& ORT_TASKLOOP
) == ORT_TASKLOOP
)
7246 else if (ctx
->region_type
& ORT_TASK
)
7248 else if (ctx
->region_type
& ORT_TEAMS
)
7253 error ("%qE not specified in enclosing %qs",
7254 DECL_NAME (lang_hooks
.decls
.omp_report_decl (decl
)), rtype
);
7255 inform (ctx
->location
, "enclosing %qs", rtype
);
7258 case OMP_CLAUSE_DEFAULT_SHARED
:
7259 flags
|= GOVD_SHARED
;
7261 case OMP_CLAUSE_DEFAULT_PRIVATE
:
7262 flags
|= GOVD_PRIVATE
;
7264 case OMP_CLAUSE_DEFAULT_FIRSTPRIVATE
:
7265 flags
|= GOVD_FIRSTPRIVATE
;
7267 case OMP_CLAUSE_DEFAULT_UNSPECIFIED
:
7268 /* decl will be either GOVD_FIRSTPRIVATE or GOVD_SHARED. */
7269 gcc_assert ((ctx
->region_type
& ORT_TASK
) != 0);
7270 if (struct gimplify_omp_ctx
*octx
= ctx
->outer_context
)
7272 omp_notice_variable (octx
, decl
, in_code
);
7273 for (; octx
; octx
= octx
->outer_context
)
7277 n2
= splay_tree_lookup (octx
->variables
, (splay_tree_key
) decl
);
7278 if ((octx
->region_type
& (ORT_TARGET_DATA
| ORT_TARGET
)) != 0
7279 && (n2
== NULL
|| (n2
->value
& GOVD_DATA_SHARE_CLASS
) == 0))
7281 if (n2
&& (n2
->value
& GOVD_DATA_SHARE_CLASS
) != GOVD_SHARED
)
7283 flags
|= GOVD_FIRSTPRIVATE
;
7286 if ((octx
->region_type
& (ORT_PARALLEL
| ORT_TEAMS
)) != 0)
7288 flags
|= GOVD_SHARED
;
7294 if (TREE_CODE (decl
) == PARM_DECL
7295 || (!is_global_var (decl
)
7296 && DECL_CONTEXT (decl
) == current_function_decl
))
7297 flags
|= GOVD_FIRSTPRIVATE
;
7299 flags
|= GOVD_SHARED
;
7311 /* Determine outer default flags for DECL mentioned in an OACC region
7312 but not declared in an enclosing clause. */
7315 oacc_default_clause (struct gimplify_omp_ctx
*ctx
, tree decl
, unsigned flags
)
7318 bool on_device
= false;
7319 bool is_private
= false;
7320 bool declared
= is_oacc_declared (decl
);
7321 tree type
= TREE_TYPE (decl
);
7323 if (lang_hooks
.decls
.omp_privatize_by_reference (decl
))
7324 type
= TREE_TYPE (type
);
7326 /* For Fortran COMMON blocks, only used variables in those blocks are
7327 transfered and remapped. The block itself will have a private clause to
7328 avoid transfering the data twice.
7329 The hook evaluates to false by default. For a variable in Fortran's COMMON
7330 or EQUIVALENCE block, returns 'true' (as we have shared=false) - as only
7331 the variables in such a COMMON/EQUIVALENCE block shall be privatized not
7332 the whole block. For C++ and Fortran, it can also be true under certain
7333 other conditions, if DECL_HAS_VALUE_EXPR. */
7334 if (RECORD_OR_UNION_TYPE_P (type
))
7335 is_private
= lang_hooks
.decls
.omp_disregard_value_expr (decl
, false);
7337 if ((ctx
->region_type
& (ORT_ACC_PARALLEL
| ORT_ACC_KERNELS
)) != 0
7338 && is_global_var (decl
)
7339 && device_resident_p (decl
)
7343 flags
|= GOVD_MAP_TO_ONLY
;
7346 switch (ctx
->region_type
)
7348 case ORT_ACC_KERNELS
:
7352 flags
|= GOVD_FIRSTPRIVATE
;
7353 else if (AGGREGATE_TYPE_P (type
))
7355 /* Aggregates default to 'present_or_copy', or 'present'. */
7356 if (ctx
->default_kind
!= OMP_CLAUSE_DEFAULT_PRESENT
)
7359 flags
|= GOVD_MAP
| GOVD_MAP_FORCE_PRESENT
;
7362 /* Scalars default to 'copy'. */
7363 flags
|= GOVD_MAP
| GOVD_MAP_FORCE
;
7367 case ORT_ACC_PARALLEL
:
7368 case ORT_ACC_SERIAL
:
7369 rkind
= ctx
->region_type
== ORT_ACC_PARALLEL
? "parallel" : "serial";
7372 flags
|= GOVD_FIRSTPRIVATE
;
7373 else if (on_device
|| declared
)
7375 else if (AGGREGATE_TYPE_P (type
))
7377 /* Aggregates default to 'present_or_copy', or 'present'. */
7378 if (ctx
->default_kind
!= OMP_CLAUSE_DEFAULT_PRESENT
)
7381 flags
|= GOVD_MAP
| GOVD_MAP_FORCE_PRESENT
;
7384 /* Scalars default to 'firstprivate'. */
7385 flags
|= GOVD_FIRSTPRIVATE
;
7393 if (DECL_ARTIFICIAL (decl
))
7394 ; /* We can get compiler-generated decls, and should not complain
7396 else if (ctx
->default_kind
== OMP_CLAUSE_DEFAULT_NONE
)
7398 error ("%qE not specified in enclosing OpenACC %qs construct",
7399 DECL_NAME (lang_hooks
.decls
.omp_report_decl (decl
)), rkind
);
7400 inform (ctx
->location
, "enclosing OpenACC %qs construct", rkind
);
7402 else if (ctx
->default_kind
== OMP_CLAUSE_DEFAULT_PRESENT
)
7403 ; /* Handled above. */
7405 gcc_checking_assert (ctx
->default_kind
== OMP_CLAUSE_DEFAULT_SHARED
);
7410 /* Record the fact that DECL was used within the OMP context CTX.
7411 IN_CODE is true when real code uses DECL, and false when we should
7412 merely emit default(none) errors. Return true if DECL is going to
7413 be remapped and thus DECL shouldn't be gimplified into its
7414 DECL_VALUE_EXPR (if any). */
7417 omp_notice_variable (struct gimplify_omp_ctx
*ctx
, tree decl
, bool in_code
)
7420 unsigned flags
= in_code
? GOVD_SEEN
: 0;
7421 bool ret
= false, shared
;
7423 if (error_operand_p (decl
))
7426 if (ctx
->region_type
== ORT_NONE
)
7427 return lang_hooks
.decls
.omp_disregard_value_expr (decl
, false);
7429 if (is_global_var (decl
))
7431 /* Threadprivate variables are predetermined. */
7432 if (DECL_THREAD_LOCAL_P (decl
))
7433 return omp_notice_threadprivate_variable (ctx
, decl
, NULL_TREE
);
7435 if (DECL_HAS_VALUE_EXPR_P (decl
))
7437 if (ctx
->region_type
& ORT_ACC
)
7438 /* For OpenACC, defer expansion of value to avoid transfering
7439 privatized common block data instead of im-/explicitly transfered
7440 variables which are in common blocks. */
7444 tree value
= get_base_address (DECL_VALUE_EXPR (decl
));
7446 if (value
&& DECL_P (value
) && DECL_THREAD_LOCAL_P (value
))
7447 return omp_notice_threadprivate_variable (ctx
, decl
, value
);
7451 if (gimplify_omp_ctxp
->outer_context
== NULL
7453 && oacc_get_fn_attrib (current_function_decl
))
7455 location_t loc
= DECL_SOURCE_LOCATION (decl
);
7457 if (lookup_attribute ("omp declare target link",
7458 DECL_ATTRIBUTES (decl
)))
7461 "%qE with %<link%> clause used in %<routine%> function",
7465 else if (!lookup_attribute ("omp declare target",
7466 DECL_ATTRIBUTES (decl
)))
7469 "%qE requires a %<declare%> directive for use "
7470 "in a %<routine%> function", DECL_NAME (decl
));
7476 n
= splay_tree_lookup (ctx
->variables
, (splay_tree_key
)decl
);
7477 if ((ctx
->region_type
& ORT_TARGET
) != 0)
7479 if (ctx
->region_type
& ORT_ACC
)
7480 /* For OpenACC, as remarked above, defer expansion. */
7485 ret
= lang_hooks
.decls
.omp_disregard_value_expr (decl
, shared
);
7488 unsigned nflags
= flags
;
7489 if ((ctx
->region_type
& ORT_ACC
) == 0)
7491 bool is_declare_target
= false;
7492 if (is_global_var (decl
)
7493 && varpool_node::get_create (decl
)->offloadable
)
7495 struct gimplify_omp_ctx
*octx
;
7496 for (octx
= ctx
->outer_context
;
7497 octx
; octx
= octx
->outer_context
)
7499 n
= splay_tree_lookup (octx
->variables
,
7500 (splay_tree_key
)decl
);
7502 && (n
->value
& GOVD_DATA_SHARE_CLASS
) != GOVD_SHARED
7503 && (n
->value
& GOVD_DATA_SHARE_CLASS
) != 0)
7506 is_declare_target
= octx
== NULL
;
7508 if (!is_declare_target
)
7511 enum omp_clause_defaultmap_kind kind
;
7512 if (lang_hooks
.decls
.omp_allocatable_p (decl
))
7513 gdmk
= GDMK_ALLOCATABLE
;
7514 else if (lang_hooks
.decls
.omp_scalar_target_p (decl
))
7515 gdmk
= GDMK_SCALAR_TARGET
;
7516 else if (lang_hooks
.decls
.omp_scalar_p (decl
, false))
7518 else if (TREE_CODE (TREE_TYPE (decl
)) == POINTER_TYPE
7519 || (TREE_CODE (TREE_TYPE (decl
)) == REFERENCE_TYPE
7520 && (TREE_CODE (TREE_TYPE (TREE_TYPE (decl
)))
7522 gdmk
= GDMK_POINTER
;
7524 gdmk
= GDMK_AGGREGATE
;
7525 kind
= lang_hooks
.decls
.omp_predetermined_mapping (decl
);
7526 if (kind
!= OMP_CLAUSE_DEFAULTMAP_CATEGORY_UNSPECIFIED
)
7528 if (kind
== OMP_CLAUSE_DEFAULTMAP_FIRSTPRIVATE
)
7529 nflags
|= GOVD_FIRSTPRIVATE
;
7530 else if (kind
== OMP_CLAUSE_DEFAULTMAP_TO
)
7531 nflags
|= GOVD_MAP
| GOVD_MAP_TO_ONLY
;
7535 else if (ctx
->defaultmap
[gdmk
] == 0)
7537 tree d
= lang_hooks
.decls
.omp_report_decl (decl
);
7538 error ("%qE not specified in enclosing %<target%>",
7540 inform (ctx
->location
, "enclosing %<target%>");
7542 else if (ctx
->defaultmap
[gdmk
]
7543 & (GOVD_MAP_0LEN_ARRAY
| GOVD_FIRSTPRIVATE
))
7544 nflags
|= ctx
->defaultmap
[gdmk
];
7547 gcc_assert (ctx
->defaultmap
[gdmk
] & GOVD_MAP
);
7548 nflags
|= ctx
->defaultmap
[gdmk
] & ~GOVD_MAP
;
7553 struct gimplify_omp_ctx
*octx
= ctx
->outer_context
;
7554 if ((ctx
->region_type
& ORT_ACC
) && octx
)
7556 /* Look in outer OpenACC contexts, to see if there's a
7557 data attribute for this variable. */
7558 omp_notice_variable (octx
, decl
, in_code
);
7560 for (; octx
; octx
= octx
->outer_context
)
7562 if (!(octx
->region_type
& (ORT_TARGET_DATA
| ORT_TARGET
)))
7565 = splay_tree_lookup (octx
->variables
,
7566 (splay_tree_key
) decl
);
7569 if (octx
->region_type
== ORT_ACC_HOST_DATA
)
7570 error ("variable %qE declared in enclosing "
7571 "%<host_data%> region", DECL_NAME (decl
));
7573 if (octx
->region_type
== ORT_ACC_DATA
7574 && (n2
->value
& GOVD_MAP_0LEN_ARRAY
))
7575 nflags
|= GOVD_MAP_0LEN_ARRAY
;
7581 if ((nflags
& ~(GOVD_MAP_TO_ONLY
| GOVD_MAP_FROM_ONLY
7582 | GOVD_MAP_ALLOC_ONLY
)) == flags
)
7584 tree type
= TREE_TYPE (decl
);
7586 if (gimplify_omp_ctxp
->target_firstprivatize_array_bases
7587 && lang_hooks
.decls
.omp_privatize_by_reference (decl
))
7588 type
= TREE_TYPE (type
);
7589 if (!lang_hooks
.types
.omp_mappable_type (type
))
7591 error ("%qD referenced in target region does not have "
7592 "a mappable type", decl
);
7593 nflags
|= GOVD_MAP
| GOVD_EXPLICIT
;
7597 if ((ctx
->region_type
& ORT_ACC
) != 0)
7598 nflags
= oacc_default_clause (ctx
, decl
, flags
);
7604 omp_add_variable (ctx
, decl
, nflags
);
7608 /* If nothing changed, there's nothing left to do. */
7609 if ((n
->value
& flags
) == flags
)
7619 if (ctx
->region_type
== ORT_WORKSHARE
7620 || ctx
->region_type
== ORT_TASKGROUP
7621 || ctx
->region_type
== ORT_SIMD
7622 || ctx
->region_type
== ORT_ACC
7623 || (ctx
->region_type
& ORT_TARGET_DATA
) != 0)
7626 flags
= omp_default_clause (ctx
, decl
, in_code
, flags
);
7628 if ((flags
& GOVD_PRIVATE
)
7629 && lang_hooks
.decls
.omp_private_outer_ref (decl
))
7630 flags
|= GOVD_PRIVATE_OUTER_REF
;
7632 omp_add_variable (ctx
, decl
, flags
);
7634 shared
= (flags
& GOVD_SHARED
) != 0;
7635 ret
= lang_hooks
.decls
.omp_disregard_value_expr (decl
, shared
);
7639 /* Don't mark as GOVD_SEEN addressable temporaries seen only in simd
7640 lb, b or incr expressions, those shouldn't be turned into simd arrays. */
7641 if (ctx
->region_type
== ORT_SIMD
7642 && ctx
->in_for_exprs
7643 && ((n
->value
& (GOVD_PRIVATE
| GOVD_SEEN
| GOVD_EXPLICIT
))
7645 flags
&= ~GOVD_SEEN
;
7647 if ((n
->value
& (GOVD_SEEN
| GOVD_LOCAL
)) == 0
7648 && (flags
& (GOVD_SEEN
| GOVD_LOCAL
)) == GOVD_SEEN
7649 && DECL_SIZE (decl
))
7651 if (TREE_CODE (DECL_SIZE (decl
)) != INTEGER_CST
)
7654 tree t
= DECL_VALUE_EXPR (decl
);
7655 gcc_assert (TREE_CODE (t
) == INDIRECT_REF
);
7656 t
= TREE_OPERAND (t
, 0);
7657 gcc_assert (DECL_P (t
));
7658 n2
= splay_tree_lookup (ctx
->variables
, (splay_tree_key
) t
);
7659 n2
->value
|= GOVD_SEEN
;
7661 else if (lang_hooks
.decls
.omp_privatize_by_reference (decl
)
7662 && TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl
)))
7663 && (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl
))))
7667 tree t
= TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl
)));
7668 gcc_assert (DECL_P (t
));
7669 n2
= splay_tree_lookup (ctx
->variables
, (splay_tree_key
) t
);
7671 omp_notice_variable (ctx
, t
, true);
7675 if (ctx
->region_type
& ORT_ACC
)
7676 /* For OpenACC, as remarked above, defer expansion. */
7679 shared
= ((flags
| n
->value
) & GOVD_SHARED
) != 0;
7680 ret
= lang_hooks
.decls
.omp_disregard_value_expr (decl
, shared
);
7682 /* If nothing changed, there's nothing left to do. */
7683 if ((n
->value
& flags
) == flags
)
7689 /* If the variable is private in the current context, then we don't
7690 need to propagate anything to an outer context. */
7691 if ((flags
& GOVD_PRIVATE
) && !(flags
& GOVD_PRIVATE_OUTER_REF
))
7693 if ((flags
& (GOVD_LINEAR
| GOVD_LINEAR_LASTPRIVATE_NO_OUTER
))
7694 == (GOVD_LINEAR
| GOVD_LINEAR_LASTPRIVATE_NO_OUTER
))
7696 if ((flags
& (GOVD_FIRSTPRIVATE
| GOVD_LASTPRIVATE
7697 | GOVD_LINEAR_LASTPRIVATE_NO_OUTER
))
7698 == (GOVD_LASTPRIVATE
| GOVD_LINEAR_LASTPRIVATE_NO_OUTER
))
7700 if (ctx
->outer_context
7701 && omp_notice_variable (ctx
->outer_context
, decl
, in_code
))
7706 /* Verify that DECL is private within CTX. If there's specific information
7707 to the contrary in the innermost scope, generate an error. */
7710 omp_is_private (struct gimplify_omp_ctx
*ctx
, tree decl
, int simd
)
7714 n
= splay_tree_lookup (ctx
->variables
, (splay_tree_key
)decl
);
7717 if (n
->value
& GOVD_SHARED
)
7719 if (ctx
== gimplify_omp_ctxp
)
7722 error ("iteration variable %qE is predetermined linear",
7725 error ("iteration variable %qE should be private",
7727 n
->value
= GOVD_PRIVATE
;
7733 else if ((n
->value
& GOVD_EXPLICIT
) != 0
7734 && (ctx
== gimplify_omp_ctxp
7735 || (ctx
->region_type
== ORT_COMBINED_PARALLEL
7736 && gimplify_omp_ctxp
->outer_context
== ctx
)))
7738 if ((n
->value
& GOVD_FIRSTPRIVATE
) != 0)
7739 error ("iteration variable %qE should not be firstprivate",
7741 else if ((n
->value
& GOVD_REDUCTION
) != 0)
7742 error ("iteration variable %qE should not be reduction",
7744 else if (simd
!= 1 && (n
->value
& GOVD_LINEAR
) != 0)
7745 error ("iteration variable %qE should not be linear",
7748 return (ctx
== gimplify_omp_ctxp
7749 || (ctx
->region_type
== ORT_COMBINED_PARALLEL
7750 && gimplify_omp_ctxp
->outer_context
== ctx
));
7753 if (ctx
->region_type
!= ORT_WORKSHARE
7754 && ctx
->region_type
!= ORT_TASKGROUP
7755 && ctx
->region_type
!= ORT_SIMD
7756 && ctx
->region_type
!= ORT_ACC
)
7758 else if (ctx
->outer_context
)
7759 return omp_is_private (ctx
->outer_context
, decl
, simd
);
7763 /* Return true if DECL is private within a parallel region
7764 that binds to the current construct's context or in parallel
7765 region's REDUCTION clause. */
7768 omp_check_private (struct gimplify_omp_ctx
*ctx
, tree decl
, bool copyprivate
)
7774 ctx
= ctx
->outer_context
;
7777 if (is_global_var (decl
))
7780 /* References might be private, but might be shared too,
7781 when checking for copyprivate, assume they might be
7782 private, otherwise assume they might be shared. */
7786 if (lang_hooks
.decls
.omp_privatize_by_reference (decl
))
7789 /* Treat C++ privatized non-static data members outside
7790 of the privatization the same. */
7791 if (omp_member_access_dummy_var (decl
))
7797 n
= splay_tree_lookup (ctx
->variables
, (splay_tree_key
) decl
);
7799 if ((ctx
->region_type
& (ORT_TARGET
| ORT_TARGET_DATA
)) != 0
7800 && (n
== NULL
|| (n
->value
& GOVD_DATA_SHARE_CLASS
) == 0))
7805 if ((n
->value
& GOVD_LOCAL
) != 0
7806 && omp_member_access_dummy_var (decl
))
7808 return (n
->value
& GOVD_SHARED
) == 0;
7811 while (ctx
->region_type
== ORT_WORKSHARE
7812 || ctx
->region_type
== ORT_TASKGROUP
7813 || ctx
->region_type
== ORT_SIMD
7814 || ctx
->region_type
== ORT_ACC
);
7818 /* Callback for walk_tree to find a DECL_EXPR for the given DECL. */
7821 find_decl_expr (tree
*tp
, int *walk_subtrees
, void *data
)
7825 /* If this node has been visited, unmark it and keep looking. */
7826 if (TREE_CODE (t
) == DECL_EXPR
&& DECL_EXPR_DECL (t
) == (tree
) data
)
7829 if (IS_TYPE_OR_DECL_P (t
))
7835 /* Gimplify the affinity clause but effectively ignore it.
7838 if ((step > 1) ? var <= end : var > end)
7839 locatator_var_expr; */
7842 gimplify_omp_affinity (tree
*list_p
, gimple_seq
*pre_p
)
7844 tree last_iter
= NULL_TREE
;
7845 tree last_bind
= NULL_TREE
;
7846 tree label
= NULL_TREE
;
7847 tree
*last_body
= NULL
;
7848 for (tree c
= *list_p
; c
; c
= OMP_CLAUSE_CHAIN (c
))
7849 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_AFFINITY
)
7851 tree t
= OMP_CLAUSE_DECL (c
);
7852 if (TREE_CODE (t
) == TREE_LIST
7854 && TREE_CODE (TREE_PURPOSE (t
)) == TREE_VEC
)
7856 if (TREE_VALUE (t
) == null_pointer_node
)
7858 if (TREE_PURPOSE (t
) != last_iter
)
7862 append_to_statement_list (label
, last_body
);
7863 gimplify_and_add (last_bind
, pre_p
);
7864 last_bind
= NULL_TREE
;
7866 for (tree it
= TREE_PURPOSE (t
); it
; it
= TREE_CHAIN (it
))
7868 if (gimplify_expr (&TREE_VEC_ELT (it
, 1), pre_p
, NULL
,
7869 is_gimple_val
, fb_rvalue
) == GS_ERROR
7870 || gimplify_expr (&TREE_VEC_ELT (it
, 2), pre_p
, NULL
,
7871 is_gimple_val
, fb_rvalue
) == GS_ERROR
7872 || gimplify_expr (&TREE_VEC_ELT (it
, 3), pre_p
, NULL
,
7873 is_gimple_val
, fb_rvalue
) == GS_ERROR
7874 || (gimplify_expr (&TREE_VEC_ELT (it
, 4), pre_p
, NULL
,
7875 is_gimple_val
, fb_rvalue
)
7879 last_iter
= TREE_PURPOSE (t
);
7880 tree block
= TREE_VEC_ELT (TREE_PURPOSE (t
), 5);
7881 last_bind
= build3 (BIND_EXPR
, void_type_node
, BLOCK_VARS (block
),
7883 last_body
= &BIND_EXPR_BODY (last_bind
);
7884 tree cond
= NULL_TREE
;
7885 location_t loc
= OMP_CLAUSE_LOCATION (c
);
7886 for (tree it
= TREE_PURPOSE (t
); it
; it
= TREE_CHAIN (it
))
7888 tree var
= TREE_VEC_ELT (it
, 0);
7889 tree begin
= TREE_VEC_ELT (it
, 1);
7890 tree end
= TREE_VEC_ELT (it
, 2);
7891 tree step
= TREE_VEC_ELT (it
, 3);
7892 loc
= DECL_SOURCE_LOCATION (var
);
7893 tree tem
= build2_loc (loc
, MODIFY_EXPR
, void_type_node
,
7895 append_to_statement_list_force (tem
, last_body
);
7897 tree cond1
= fold_build2_loc (loc
, GT_EXPR
, boolean_type_node
,
7898 step
, build_zero_cst (TREE_TYPE (step
)));
7899 tree cond2
= fold_build2_loc (loc
, LE_EXPR
, boolean_type_node
,
7901 tree cond3
= fold_build2_loc (loc
, GT_EXPR
, boolean_type_node
,
7903 cond1
= fold_build3_loc (loc
, COND_EXPR
, boolean_type_node
,
7904 cond1
, cond2
, cond3
);
7906 cond
= fold_build2_loc (loc
, TRUTH_AND_EXPR
,
7907 boolean_type_node
, cond
, cond1
);
7911 tree cont_label
= create_artificial_label (loc
);
7912 label
= build1 (LABEL_EXPR
, void_type_node
, cont_label
);
7913 tree tem
= fold_build3_loc (loc
, COND_EXPR
, void_type_node
, cond
,
7915 build_and_jump (&cont_label
));
7916 append_to_statement_list_force (tem
, last_body
);
7918 if (TREE_CODE (TREE_VALUE (t
)) == COMPOUND_EXPR
)
7920 append_to_statement_list (TREE_OPERAND (TREE_VALUE (t
), 0),
7922 TREE_VALUE (t
) = TREE_OPERAND (TREE_VALUE (t
), 1);
7924 if (error_operand_p (TREE_VALUE (t
)))
7926 append_to_statement_list_force (TREE_VALUE (t
), last_body
);
7927 TREE_VALUE (t
) = null_pointer_node
;
7933 append_to_statement_list (label
, last_body
);
7934 gimplify_and_add (last_bind
, pre_p
);
7935 last_bind
= NULL_TREE
;
7937 if (TREE_CODE (OMP_CLAUSE_DECL (c
)) == COMPOUND_EXPR
)
7939 gimplify_expr (&TREE_OPERAND (OMP_CLAUSE_DECL (c
), 0), pre_p
,
7940 NULL
, is_gimple_val
, fb_rvalue
);
7941 OMP_CLAUSE_DECL (c
) = TREE_OPERAND (OMP_CLAUSE_DECL (c
), 1);
7943 if (error_operand_p (OMP_CLAUSE_DECL (c
)))
7945 if (gimplify_expr (&OMP_CLAUSE_DECL (c
), pre_p
, NULL
,
7946 is_gimple_val
, fb_rvalue
) == GS_ERROR
)
7948 gimplify_and_add (OMP_CLAUSE_DECL (c
), pre_p
);
7953 append_to_statement_list (label
, last_body
);
7954 gimplify_and_add (last_bind
, pre_p
);
7959 /* If *LIST_P contains any OpenMP depend clauses with iterators,
7960 lower all the depend clauses by populating corresponding depend
7961 array. Returns 0 if there are no such depend clauses, or
7962 2 if all depend clauses should be removed, 1 otherwise. */
7965 gimplify_omp_depend (tree
*list_p
, gimple_seq
*pre_p
)
7969 size_t n
[4] = { 0, 0, 0, 0 };
7971 tree counts
[4] = { NULL_TREE
, NULL_TREE
, NULL_TREE
, NULL_TREE
};
7972 tree last_iter
= NULL_TREE
, last_count
= NULL_TREE
;
7974 location_t first_loc
= UNKNOWN_LOCATION
;
7976 for (c
= *list_p
; c
; c
= OMP_CLAUSE_CHAIN (c
))
7977 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_DEPEND
)
7979 switch (OMP_CLAUSE_DEPEND_KIND (c
))
7981 case OMP_CLAUSE_DEPEND_IN
:
7984 case OMP_CLAUSE_DEPEND_OUT
:
7985 case OMP_CLAUSE_DEPEND_INOUT
:
7988 case OMP_CLAUSE_DEPEND_MUTEXINOUTSET
:
7991 case OMP_CLAUSE_DEPEND_DEPOBJ
:
7994 case OMP_CLAUSE_DEPEND_SOURCE
:
7995 case OMP_CLAUSE_DEPEND_SINK
:
8000 tree t
= OMP_CLAUSE_DECL (c
);
8001 if (first_loc
== UNKNOWN_LOCATION
)
8002 first_loc
= OMP_CLAUSE_LOCATION (c
);
8003 if (TREE_CODE (t
) == TREE_LIST
8005 && TREE_CODE (TREE_PURPOSE (t
)) == TREE_VEC
)
8007 if (TREE_PURPOSE (t
) != last_iter
)
8009 tree tcnt
= size_one_node
;
8010 for (tree it
= TREE_PURPOSE (t
); it
; it
= TREE_CHAIN (it
))
8012 if (gimplify_expr (&TREE_VEC_ELT (it
, 1), pre_p
, NULL
,
8013 is_gimple_val
, fb_rvalue
) == GS_ERROR
8014 || gimplify_expr (&TREE_VEC_ELT (it
, 2), pre_p
, NULL
,
8015 is_gimple_val
, fb_rvalue
) == GS_ERROR
8016 || gimplify_expr (&TREE_VEC_ELT (it
, 3), pre_p
, NULL
,
8017 is_gimple_val
, fb_rvalue
) == GS_ERROR
8018 || (gimplify_expr (&TREE_VEC_ELT (it
, 4), pre_p
, NULL
,
8019 is_gimple_val
, fb_rvalue
)
8022 tree var
= TREE_VEC_ELT (it
, 0);
8023 tree begin
= TREE_VEC_ELT (it
, 1);
8024 tree end
= TREE_VEC_ELT (it
, 2);
8025 tree step
= TREE_VEC_ELT (it
, 3);
8026 tree orig_step
= TREE_VEC_ELT (it
, 4);
8027 tree type
= TREE_TYPE (var
);
8028 tree stype
= TREE_TYPE (step
);
8029 location_t loc
= DECL_SOURCE_LOCATION (var
);
8031 /* Compute count for this iterator as
8033 ? (begin < end ? (end - begin + (step - 1)) / step : 0)
8034 : (begin > end ? (end - begin + (step + 1)) / step : 0)
8035 and compute product of those for the entire depend
8037 if (POINTER_TYPE_P (type
))
8038 endmbegin
= fold_build2_loc (loc
, POINTER_DIFF_EXPR
,
8041 endmbegin
= fold_build2_loc (loc
, MINUS_EXPR
, type
,
8043 tree stepm1
= fold_build2_loc (loc
, MINUS_EXPR
, stype
,
8045 build_int_cst (stype
, 1));
8046 tree stepp1
= fold_build2_loc (loc
, PLUS_EXPR
, stype
, step
,
8047 build_int_cst (stype
, 1));
8048 tree pos
= fold_build2_loc (loc
, PLUS_EXPR
, stype
,
8049 unshare_expr (endmbegin
),
8051 pos
= fold_build2_loc (loc
, TRUNC_DIV_EXPR
, stype
,
8053 tree neg
= fold_build2_loc (loc
, PLUS_EXPR
, stype
,
8055 if (TYPE_UNSIGNED (stype
))
8057 neg
= fold_build1_loc (loc
, NEGATE_EXPR
, stype
, neg
);
8058 step
= fold_build1_loc (loc
, NEGATE_EXPR
, stype
, step
);
8060 neg
= fold_build2_loc (loc
, TRUNC_DIV_EXPR
, stype
,
8063 tree cond
= fold_build2_loc (loc
, LT_EXPR
,
8066 pos
= fold_build3_loc (loc
, COND_EXPR
, stype
, cond
, pos
,
8067 build_int_cst (stype
, 0));
8068 cond
= fold_build2_loc (loc
, LT_EXPR
, boolean_type_node
,
8070 neg
= fold_build3_loc (loc
, COND_EXPR
, stype
, cond
, neg
,
8071 build_int_cst (stype
, 0));
8072 tree osteptype
= TREE_TYPE (orig_step
);
8073 cond
= fold_build2_loc (loc
, GT_EXPR
, boolean_type_node
,
8075 build_int_cst (osteptype
, 0));
8076 tree cnt
= fold_build3_loc (loc
, COND_EXPR
, stype
,
8078 cnt
= fold_convert_loc (loc
, sizetype
, cnt
);
8079 if (gimplify_expr (&cnt
, pre_p
, NULL
, is_gimple_val
,
8080 fb_rvalue
) == GS_ERROR
)
8082 tcnt
= size_binop_loc (loc
, MULT_EXPR
, tcnt
, cnt
);
8084 if (gimplify_expr (&tcnt
, pre_p
, NULL
, is_gimple_val
,
8085 fb_rvalue
) == GS_ERROR
)
8087 last_iter
= TREE_PURPOSE (t
);
8090 if (counts
[i
] == NULL_TREE
)
8091 counts
[i
] = last_count
;
8093 counts
[i
] = size_binop_loc (OMP_CLAUSE_LOCATION (c
),
8094 PLUS_EXPR
, counts
[i
], last_count
);
8099 for (i
= 0; i
< 4; i
++)
8105 tree total
= size_zero_node
;
8106 for (i
= 0; i
< 4; i
++)
8108 unused
[i
] = counts
[i
] == NULL_TREE
&& n
[i
] == 0;
8109 if (counts
[i
] == NULL_TREE
)
8110 counts
[i
] = size_zero_node
;
8112 counts
[i
] = size_binop (PLUS_EXPR
, counts
[i
], size_int (n
[i
]));
8113 if (gimplify_expr (&counts
[i
], pre_p
, NULL
, is_gimple_val
,
8114 fb_rvalue
) == GS_ERROR
)
8116 total
= size_binop (PLUS_EXPR
, total
, counts
[i
]);
8119 if (gimplify_expr (&total
, pre_p
, NULL
, is_gimple_val
, fb_rvalue
)
8122 bool is_old
= unused
[1] && unused
[3];
8123 tree totalpx
= size_binop (PLUS_EXPR
, unshare_expr (total
),
8124 size_int (is_old
? 1 : 4));
8125 tree type
= build_array_type (ptr_type_node
, build_index_type (totalpx
));
8126 tree array
= create_tmp_var_raw (type
);
8127 TREE_ADDRESSABLE (array
) = 1;
8128 if (!poly_int_tree_p (totalpx
))
8130 if (!TYPE_SIZES_GIMPLIFIED (TREE_TYPE (array
)))
8131 gimplify_type_sizes (TREE_TYPE (array
), pre_p
);
8132 if (gimplify_omp_ctxp
)
8134 struct gimplify_omp_ctx
*ctx
= gimplify_omp_ctxp
;
8136 && (ctx
->region_type
== ORT_WORKSHARE
8137 || ctx
->region_type
== ORT_TASKGROUP
8138 || ctx
->region_type
== ORT_SIMD
8139 || ctx
->region_type
== ORT_ACC
))
8140 ctx
= ctx
->outer_context
;
8142 omp_add_variable (ctx
, array
, GOVD_LOCAL
| GOVD_SEEN
);
8144 gimplify_vla_decl (array
, pre_p
);
8147 gimple_add_tmp_var (array
);
8148 tree r
= build4 (ARRAY_REF
, ptr_type_node
, array
, size_int (0), NULL_TREE
,
8153 tem
= build2 (MODIFY_EXPR
, void_type_node
, r
,
8154 build_int_cst (ptr_type_node
, 0));
8155 gimplify_and_add (tem
, pre_p
);
8156 r
= build4 (ARRAY_REF
, ptr_type_node
, array
, size_int (1), NULL_TREE
,
8159 tem
= build2 (MODIFY_EXPR
, void_type_node
, r
,
8160 fold_convert (ptr_type_node
, total
));
8161 gimplify_and_add (tem
, pre_p
);
8162 for (i
= 1; i
< (is_old
? 2 : 4); i
++)
8164 r
= build4 (ARRAY_REF
, ptr_type_node
, array
, size_int (i
+ !is_old
),
8165 NULL_TREE
, NULL_TREE
);
8166 tem
= build2 (MODIFY_EXPR
, void_type_node
, r
, counts
[i
- 1]);
8167 gimplify_and_add (tem
, pre_p
);
8174 for (i
= 0; i
< 4; i
++)
8176 if (i
&& (i
>= j
|| unused
[i
- 1]))
8178 cnts
[i
] = cnts
[i
- 1];
8181 cnts
[i
] = create_tmp_var (sizetype
);
8183 g
= gimple_build_assign (cnts
[i
], size_int (is_old
? 2 : 5));
8188 t
= size_binop (PLUS_EXPR
, counts
[0], size_int (2));
8190 t
= size_binop (PLUS_EXPR
, cnts
[i
- 1], counts
[i
- 1]);
8191 if (gimplify_expr (&t
, pre_p
, NULL
, is_gimple_val
, fb_rvalue
)
8194 g
= gimple_build_assign (cnts
[i
], t
);
8196 gimple_seq_add_stmt (pre_p
, g
);
8199 last_iter
= NULL_TREE
;
8200 tree last_bind
= NULL_TREE
;
8201 tree
*last_body
= NULL
;
8202 for (c
= *list_p
; c
; c
= OMP_CLAUSE_CHAIN (c
))
8203 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_DEPEND
)
8205 switch (OMP_CLAUSE_DEPEND_KIND (c
))
8207 case OMP_CLAUSE_DEPEND_IN
:
8210 case OMP_CLAUSE_DEPEND_OUT
:
8211 case OMP_CLAUSE_DEPEND_INOUT
:
8214 case OMP_CLAUSE_DEPEND_MUTEXINOUTSET
:
8217 case OMP_CLAUSE_DEPEND_DEPOBJ
:
8220 case OMP_CLAUSE_DEPEND_SOURCE
:
8221 case OMP_CLAUSE_DEPEND_SINK
:
8226 tree t
= OMP_CLAUSE_DECL (c
);
8227 if (TREE_CODE (t
) == TREE_LIST
8229 && TREE_CODE (TREE_PURPOSE (t
)) == TREE_VEC
)
8231 if (TREE_PURPOSE (t
) != last_iter
)
8234 gimplify_and_add (last_bind
, pre_p
);
8235 tree block
= TREE_VEC_ELT (TREE_PURPOSE (t
), 5);
8236 last_bind
= build3 (BIND_EXPR
, void_type_node
,
8237 BLOCK_VARS (block
), NULL
, block
);
8238 TREE_SIDE_EFFECTS (last_bind
) = 1;
8239 SET_EXPR_LOCATION (last_bind
, OMP_CLAUSE_LOCATION (c
));
8240 tree
*p
= &BIND_EXPR_BODY (last_bind
);
8241 for (tree it
= TREE_PURPOSE (t
); it
; it
= TREE_CHAIN (it
))
8243 tree var
= TREE_VEC_ELT (it
, 0);
8244 tree begin
= TREE_VEC_ELT (it
, 1);
8245 tree end
= TREE_VEC_ELT (it
, 2);
8246 tree step
= TREE_VEC_ELT (it
, 3);
8247 tree orig_step
= TREE_VEC_ELT (it
, 4);
8248 tree type
= TREE_TYPE (var
);
8249 location_t loc
= DECL_SOURCE_LOCATION (var
);
8257 if (orig_step > 0) {
8258 if (var < end) goto beg_label;
8260 if (var > end) goto beg_label;
8262 for each iterator, with inner iterators added to
8264 tree beg_label
= create_artificial_label (loc
);
8265 tree cond_label
= NULL_TREE
;
8266 tem
= build2_loc (loc
, MODIFY_EXPR
, void_type_node
,
8268 append_to_statement_list_force (tem
, p
);
8269 tem
= build_and_jump (&cond_label
);
8270 append_to_statement_list_force (tem
, p
);
8271 tem
= build1 (LABEL_EXPR
, void_type_node
, beg_label
);
8272 append_to_statement_list (tem
, p
);
8273 tree bind
= build3 (BIND_EXPR
, void_type_node
, NULL_TREE
,
8274 NULL_TREE
, NULL_TREE
);
8275 TREE_SIDE_EFFECTS (bind
) = 1;
8276 SET_EXPR_LOCATION (bind
, loc
);
8277 append_to_statement_list_force (bind
, p
);
8278 if (POINTER_TYPE_P (type
))
8279 tem
= build2_loc (loc
, POINTER_PLUS_EXPR
, type
,
8280 var
, fold_convert_loc (loc
, sizetype
,
8283 tem
= build2_loc (loc
, PLUS_EXPR
, type
, var
, step
);
8284 tem
= build2_loc (loc
, MODIFY_EXPR
, void_type_node
,
8286 append_to_statement_list_force (tem
, p
);
8287 tem
= build1 (LABEL_EXPR
, void_type_node
, cond_label
);
8288 append_to_statement_list (tem
, p
);
8289 tree cond
= fold_build2_loc (loc
, LT_EXPR
,
8293 = fold_build3_loc (loc
, COND_EXPR
, void_type_node
,
8294 cond
, build_and_jump (&beg_label
),
8296 cond
= fold_build2_loc (loc
, GT_EXPR
, boolean_type_node
,
8299 = fold_build3_loc (loc
, COND_EXPR
, void_type_node
,
8300 cond
, build_and_jump (&beg_label
),
8302 tree osteptype
= TREE_TYPE (orig_step
);
8303 cond
= fold_build2_loc (loc
, GT_EXPR
, boolean_type_node
,
8305 build_int_cst (osteptype
, 0));
8306 tem
= fold_build3_loc (loc
, COND_EXPR
, void_type_node
,
8308 append_to_statement_list_force (tem
, p
);
8309 p
= &BIND_EXPR_BODY (bind
);
8313 last_iter
= TREE_PURPOSE (t
);
8314 if (TREE_CODE (TREE_VALUE (t
)) == COMPOUND_EXPR
)
8316 append_to_statement_list (TREE_OPERAND (TREE_VALUE (t
),
8318 TREE_VALUE (t
) = TREE_OPERAND (TREE_VALUE (t
), 1);
8320 if (error_operand_p (TREE_VALUE (t
)))
8322 TREE_VALUE (t
) = build_fold_addr_expr (TREE_VALUE (t
));
8323 r
= build4 (ARRAY_REF
, ptr_type_node
, array
, cnts
[i
],
8324 NULL_TREE
, NULL_TREE
);
8325 tem
= build2_loc (OMP_CLAUSE_LOCATION (c
), MODIFY_EXPR
,
8326 void_type_node
, r
, TREE_VALUE (t
));
8327 append_to_statement_list_force (tem
, last_body
);
8328 tem
= build2_loc (OMP_CLAUSE_LOCATION (c
), MODIFY_EXPR
,
8329 void_type_node
, cnts
[i
],
8330 size_binop (PLUS_EXPR
, cnts
[i
], size_int (1)));
8331 append_to_statement_list_force (tem
, last_body
);
8332 TREE_VALUE (t
) = null_pointer_node
;
8338 gimplify_and_add (last_bind
, pre_p
);
8339 last_bind
= NULL_TREE
;
8341 if (TREE_CODE (OMP_CLAUSE_DECL (c
)) == COMPOUND_EXPR
)
8343 gimplify_expr (&TREE_OPERAND (OMP_CLAUSE_DECL (c
), 0), pre_p
,
8344 NULL
, is_gimple_val
, fb_rvalue
);
8345 OMP_CLAUSE_DECL (c
) = TREE_OPERAND (OMP_CLAUSE_DECL (c
), 1);
8347 if (error_operand_p (OMP_CLAUSE_DECL (c
)))
8349 OMP_CLAUSE_DECL (c
) = build_fold_addr_expr (OMP_CLAUSE_DECL (c
));
8350 if (gimplify_expr (&OMP_CLAUSE_DECL (c
), pre_p
, NULL
,
8351 is_gimple_val
, fb_rvalue
) == GS_ERROR
)
8353 r
= build4 (ARRAY_REF
, ptr_type_node
, array
, cnts
[i
],
8354 NULL_TREE
, NULL_TREE
);
8355 tem
= build2 (MODIFY_EXPR
, void_type_node
, r
, OMP_CLAUSE_DECL (c
));
8356 gimplify_and_add (tem
, pre_p
);
8357 g
= gimple_build_assign (cnts
[i
], size_binop (PLUS_EXPR
, cnts
[i
],
8359 gimple_seq_add_stmt (pre_p
, g
);
8363 gimplify_and_add (last_bind
, pre_p
);
8364 tree cond
= boolean_false_node
;
8368 cond
= build2_loc (first_loc
, NE_EXPR
, boolean_type_node
, cnts
[0],
8369 size_binop_loc (first_loc
, PLUS_EXPR
, counts
[0],
8372 cond
= build2_loc (first_loc
, TRUTH_OR_EXPR
, boolean_type_node
, cond
,
8373 build2_loc (first_loc
, NE_EXPR
, boolean_type_node
,
8375 size_binop_loc (first_loc
, PLUS_EXPR
,
8381 tree prev
= size_int (5);
8382 for (i
= 0; i
< 4; i
++)
8386 prev
= size_binop_loc (first_loc
, PLUS_EXPR
, counts
[i
], prev
);
8387 cond
= build2_loc (first_loc
, TRUTH_OR_EXPR
, boolean_type_node
, cond
,
8388 build2_loc (first_loc
, NE_EXPR
, boolean_type_node
,
8389 cnts
[i
], unshare_expr (prev
)));
8392 tem
= build3_loc (first_loc
, COND_EXPR
, void_type_node
, cond
,
8393 build_call_expr_loc (first_loc
,
8394 builtin_decl_explicit (BUILT_IN_TRAP
),
8396 gimplify_and_add (tem
, pre_p
);
8397 c
= build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE_DEPEND
);
8398 OMP_CLAUSE_DEPEND_KIND (c
) = OMP_CLAUSE_DEPEND_LAST
;
8399 OMP_CLAUSE_DECL (c
) = build_fold_addr_expr (array
);
8400 OMP_CLAUSE_CHAIN (c
) = *list_p
;
8405 /* Insert a GOMP_MAP_ALLOC or GOMP_MAP_RELEASE node following a
8406 GOMP_MAP_STRUCT mapping. C is an always_pointer mapping. STRUCT_NODE is
8407 the struct node to insert the new mapping after (when the struct node is
8408 initially created). PREV_NODE is the first of two or three mappings for a
8409 pointer, and is either:
8410 - the node before C, when a pair of mappings is used, e.g. for a C/C++
8412 - not the node before C. This is true when we have a reference-to-pointer
8413 type (with a mapping for the reference and for the pointer), or for
8414 Fortran derived-type mappings with a GOMP_MAP_TO_PSET.
8415 If SCP is non-null, the new node is inserted before *SCP.
8416 if SCP is null, the new node is inserted before PREV_NODE.
8418 - PREV_NODE, if SCP is non-null.
8419 - The newly-created ALLOC or RELEASE node, if SCP is null.
8420 - The second newly-created ALLOC or RELEASE node, if we are mapping a
8421 reference to a pointer. */
8424 insert_struct_comp_map (enum tree_code code
, tree c
, tree struct_node
,
8425 tree prev_node
, tree
*scp
)
8427 enum gomp_map_kind mkind
8428 = (code
== OMP_TARGET_EXIT_DATA
|| code
== OACC_EXIT_DATA
)
8429 ? GOMP_MAP_RELEASE
: GOMP_MAP_ALLOC
;
8431 tree c2
= build_omp_clause (OMP_CLAUSE_LOCATION (c
), OMP_CLAUSE_MAP
);
8432 tree cl
= scp
? prev_node
: c2
;
8433 OMP_CLAUSE_SET_MAP_KIND (c2
, mkind
);
8434 OMP_CLAUSE_DECL (c2
) = unshare_expr (OMP_CLAUSE_DECL (c
));
8435 OMP_CLAUSE_CHAIN (c2
) = scp
? *scp
: prev_node
;
8436 if (OMP_CLAUSE_CHAIN (prev_node
) != c
8437 && OMP_CLAUSE_CODE (OMP_CLAUSE_CHAIN (prev_node
)) == OMP_CLAUSE_MAP
8438 && (OMP_CLAUSE_MAP_KIND (OMP_CLAUSE_CHAIN (prev_node
))
8439 == GOMP_MAP_TO_PSET
))
8440 OMP_CLAUSE_SIZE (c2
) = OMP_CLAUSE_SIZE (OMP_CLAUSE_CHAIN (prev_node
));
8442 OMP_CLAUSE_SIZE (c2
) = TYPE_SIZE_UNIT (ptr_type_node
);
8444 OMP_CLAUSE_CHAIN (struct_node
) = c2
;
8446 /* We might need to create an additional mapping if we have a reference to a
8447 pointer (in C++). Don't do this if we have something other than a
8448 GOMP_MAP_ALWAYS_POINTER though, i.e. a GOMP_MAP_TO_PSET. */
8449 if (OMP_CLAUSE_CHAIN (prev_node
) != c
8450 && OMP_CLAUSE_CODE (OMP_CLAUSE_CHAIN (prev_node
)) == OMP_CLAUSE_MAP
8451 && ((OMP_CLAUSE_MAP_KIND (OMP_CLAUSE_CHAIN (prev_node
))
8452 == GOMP_MAP_ALWAYS_POINTER
)
8453 || (OMP_CLAUSE_MAP_KIND (OMP_CLAUSE_CHAIN (prev_node
))
8454 == GOMP_MAP_ATTACH_DETACH
)))
8456 tree c4
= OMP_CLAUSE_CHAIN (prev_node
);
8457 tree c3
= build_omp_clause (OMP_CLAUSE_LOCATION (c
), OMP_CLAUSE_MAP
);
8458 OMP_CLAUSE_SET_MAP_KIND (c3
, mkind
);
8459 OMP_CLAUSE_DECL (c3
) = unshare_expr (OMP_CLAUSE_DECL (c4
));
8460 OMP_CLAUSE_SIZE (c3
) = TYPE_SIZE_UNIT (ptr_type_node
);
8461 OMP_CLAUSE_CHAIN (c3
) = prev_node
;
8463 OMP_CLAUSE_CHAIN (c2
) = c3
;
8474 /* Strip ARRAY_REFS or an indirect ref off BASE, find the containing object,
8475 and set *BITPOSP and *POFFSETP to the bit offset of the access.
8476 If BASE_REF is non-NULL and the containing object is a reference, set
8477 *BASE_REF to that reference before dereferencing the object.
8478 If BASE_REF is NULL, check that the containing object is a COMPONENT_REF or
8479 has array type, else return NULL. */
8482 extract_base_bit_offset (tree base
, tree
*base_ref
, poly_int64
*bitposp
,
8483 poly_offset_int
*poffsetp
)
8486 poly_int64 bitsize
, bitpos
;
8488 int unsignedp
, reversep
, volatilep
= 0;
8489 poly_offset_int poffset
;
8493 *base_ref
= NULL_TREE
;
8495 while (TREE_CODE (base
) == ARRAY_REF
)
8496 base
= TREE_OPERAND (base
, 0);
8498 if (TREE_CODE (base
) == INDIRECT_REF
)
8499 base
= TREE_OPERAND (base
, 0);
8503 if (TREE_CODE (base
) == ARRAY_REF
)
8505 while (TREE_CODE (base
) == ARRAY_REF
)
8506 base
= TREE_OPERAND (base
, 0);
8507 if (TREE_CODE (base
) != COMPONENT_REF
8508 || TREE_CODE (TREE_TYPE (base
)) != ARRAY_TYPE
)
8511 else if (TREE_CODE (base
) == INDIRECT_REF
8512 && TREE_CODE (TREE_OPERAND (base
, 0)) == COMPONENT_REF
8513 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (base
, 0)))
8515 base
= TREE_OPERAND (base
, 0);
8518 base
= get_inner_reference (base
, &bitsize
, &bitpos
, &offset
, &mode
,
8519 &unsignedp
, &reversep
, &volatilep
);
8521 tree orig_base
= base
;
8523 if ((TREE_CODE (base
) == INDIRECT_REF
8524 || (TREE_CODE (base
) == MEM_REF
8525 && integer_zerop (TREE_OPERAND (base
, 1))))
8526 && DECL_P (TREE_OPERAND (base
, 0))
8527 && TREE_CODE (TREE_TYPE (TREE_OPERAND (base
, 0))) == REFERENCE_TYPE
)
8528 base
= TREE_OPERAND (base
, 0);
8530 gcc_assert (offset
== NULL_TREE
|| poly_int_tree_p (offset
));
8533 poffset
= wi::to_poly_offset (offset
);
8537 if (maybe_ne (bitpos
, 0))
8538 poffset
+= bits_to_bytes_round_down (bitpos
);
8541 *poffsetp
= poffset
;
8543 /* Set *BASE_REF if BASE was a dereferenced reference variable. */
8544 if (base_ref
&& orig_base
!= base
)
8545 *base_ref
= orig_base
;
8550 /* Returns true if EXPR is or contains (as a sub-component) BASE_PTR. */
8553 is_or_contains_p (tree expr
, tree base_ptr
)
8555 while (expr
!= base_ptr
)
8556 if (TREE_CODE (base_ptr
) == COMPONENT_REF
)
8557 base_ptr
= TREE_OPERAND (base_ptr
, 0);
8560 return expr
== base_ptr
;
8563 /* Implement OpenMP 5.x map ordering rules for target directives. There are
8564 several rules, and with some level of ambiguity, hopefully we can at least
8565 collect the complexity here in one place. */
8568 omp_target_reorder_clauses (tree
*list_p
)
8570 /* Collect refs to alloc/release/delete maps. */
8571 auto_vec
<tree
, 32> ard
;
8573 while (*cp
!= NULL_TREE
)
8574 if (OMP_CLAUSE_CODE (*cp
) == OMP_CLAUSE_MAP
8575 && (OMP_CLAUSE_MAP_KIND (*cp
) == GOMP_MAP_ALLOC
8576 || OMP_CLAUSE_MAP_KIND (*cp
) == GOMP_MAP_RELEASE
8577 || OMP_CLAUSE_MAP_KIND (*cp
) == GOMP_MAP_DELETE
))
8579 /* Unlink cp and push to ard. */
8581 tree nc
= OMP_CLAUSE_CHAIN (c
);
8585 /* Any associated pointer type maps should also move along. */
8586 while (*cp
!= NULL_TREE
8587 && OMP_CLAUSE_CODE (*cp
) == OMP_CLAUSE_MAP
8588 && (OMP_CLAUSE_MAP_KIND (*cp
) == GOMP_MAP_FIRSTPRIVATE_REFERENCE
8589 || OMP_CLAUSE_MAP_KIND (*cp
) == GOMP_MAP_FIRSTPRIVATE_POINTER
8590 || OMP_CLAUSE_MAP_KIND (*cp
) == GOMP_MAP_ATTACH_DETACH
8591 || OMP_CLAUSE_MAP_KIND (*cp
) == GOMP_MAP_POINTER
8592 || OMP_CLAUSE_MAP_KIND (*cp
) == GOMP_MAP_ALWAYS_POINTER
8593 || OMP_CLAUSE_MAP_KIND (*cp
) == GOMP_MAP_TO_PSET
))
8596 nc
= OMP_CLAUSE_CHAIN (c
);
8602 cp
= &OMP_CLAUSE_CHAIN (*cp
);
8604 /* Link alloc/release/delete maps to the end of list. */
8605 for (unsigned int i
= 0; i
< ard
.length (); i
++)
8608 cp
= &OMP_CLAUSE_CHAIN (ard
[i
]);
8612 /* OpenMP 5.0 requires that pointer variables are mapped before
8613 its use as a base-pointer. */
8614 auto_vec
<tree
*, 32> atf
;
8615 for (tree
*cp
= list_p
; *cp
; cp
= &OMP_CLAUSE_CHAIN (*cp
))
8616 if (OMP_CLAUSE_CODE (*cp
) == OMP_CLAUSE_MAP
)
8618 /* Collect alloc, to, from, to/from clause tree pointers. */
8619 gomp_map_kind k
= OMP_CLAUSE_MAP_KIND (*cp
);
8620 if (k
== GOMP_MAP_ALLOC
8622 || k
== GOMP_MAP_FROM
8623 || k
== GOMP_MAP_TOFROM
8624 || k
== GOMP_MAP_ALWAYS_TO
8625 || k
== GOMP_MAP_ALWAYS_FROM
8626 || k
== GOMP_MAP_ALWAYS_TOFROM
)
8630 for (unsigned int i
= 0; i
< atf
.length (); i
++)
8634 tree decl
= OMP_CLAUSE_DECL (*cp
);
8635 if (TREE_CODE (decl
) == INDIRECT_REF
|| TREE_CODE (decl
) == MEM_REF
)
8637 tree base_ptr
= TREE_OPERAND (decl
, 0);
8638 STRIP_TYPE_NOPS (base_ptr
);
8639 for (unsigned int j
= i
+ 1; j
< atf
.length (); j
++)
8642 tree decl2
= OMP_CLAUSE_DECL (*cp2
);
8643 if (is_or_contains_p (decl2
, base_ptr
))
8645 /* Move *cp2 to before *cp. */
8647 *cp2
= OMP_CLAUSE_CHAIN (c
);
8648 OMP_CLAUSE_CHAIN (c
) = *cp
;
8657 /* DECL is supposed to have lastprivate semantics in the outer contexts
8658 of combined/composite constructs, starting with OCTX.
8659 Add needed lastprivate, shared or map clause if no data sharing or
8660 mapping clause are present. IMPLICIT_P is true if it is an implicit
8661 clause (IV on simd), in which case the lastprivate will not be
8662 copied to some constructs. */
8665 omp_lastprivate_for_combined_outer_constructs (struct gimplify_omp_ctx
*octx
,
8666 tree decl
, bool implicit_p
)
8668 struct gimplify_omp_ctx
*orig_octx
= octx
;
8669 for (; octx
; octx
= octx
->outer_context
)
8671 if ((octx
->region_type
== ORT_COMBINED_PARALLEL
8672 || (octx
->region_type
& ORT_COMBINED_TEAMS
) == ORT_COMBINED_TEAMS
)
8673 && splay_tree_lookup (octx
->variables
,
8674 (splay_tree_key
) decl
) == NULL
)
8676 omp_add_variable (octx
, decl
, GOVD_SHARED
| GOVD_SEEN
);
8679 if ((octx
->region_type
& ORT_TASK
) != 0
8680 && octx
->combined_loop
8681 && splay_tree_lookup (octx
->variables
,
8682 (splay_tree_key
) decl
) == NULL
)
8684 omp_add_variable (octx
, decl
, GOVD_LASTPRIVATE
| GOVD_SEEN
);
8688 && octx
->region_type
== ORT_WORKSHARE
8689 && octx
->combined_loop
8690 && splay_tree_lookup (octx
->variables
,
8691 (splay_tree_key
) decl
) == NULL
8692 && octx
->outer_context
8693 && octx
->outer_context
->region_type
== ORT_COMBINED_PARALLEL
8694 && splay_tree_lookup (octx
->outer_context
->variables
,
8695 (splay_tree_key
) decl
) == NULL
)
8697 octx
= octx
->outer_context
;
8698 omp_add_variable (octx
, decl
, GOVD_LASTPRIVATE
| GOVD_SEEN
);
8701 if ((octx
->region_type
== ORT_WORKSHARE
|| octx
->region_type
== ORT_ACC
)
8702 && octx
->combined_loop
8703 && splay_tree_lookup (octx
->variables
,
8704 (splay_tree_key
) decl
) == NULL
8705 && !omp_check_private (octx
, decl
, false))
8707 omp_add_variable (octx
, decl
, GOVD_LASTPRIVATE
| GOVD_SEEN
);
8710 if (octx
->region_type
== ORT_COMBINED_TARGET
)
8712 splay_tree_node n
= splay_tree_lookup (octx
->variables
,
8713 (splay_tree_key
) decl
);
8716 omp_add_variable (octx
, decl
, GOVD_MAP
| GOVD_SEEN
);
8717 octx
= octx
->outer_context
;
8719 else if (!implicit_p
8720 && (n
->value
& GOVD_FIRSTPRIVATE_IMPLICIT
))
8722 n
->value
&= ~(GOVD_FIRSTPRIVATE
8723 | GOVD_FIRSTPRIVATE_IMPLICIT
8725 omp_add_variable (octx
, decl
, GOVD_MAP
| GOVD_SEEN
);
8726 octx
= octx
->outer_context
;
8731 if (octx
&& (implicit_p
|| octx
!= orig_octx
))
8732 omp_notice_variable (octx
, decl
, true);
8735 /* Scan the OMP clauses in *LIST_P, installing mappings into a new
8736 and previous omp contexts. */
8739 gimplify_scan_omp_clauses (tree
*list_p
, gimple_seq
*pre_p
,
8740 enum omp_region_type region_type
,
8741 enum tree_code code
)
8743 struct gimplify_omp_ctx
*ctx
, *outer_ctx
;
8745 hash_map
<tree
, tree
> *struct_map_to_clause
= NULL
;
8746 hash_set
<tree
> *struct_deref_set
= NULL
;
8747 tree
*prev_list_p
= NULL
, *orig_list_p
= list_p
;
8748 int handled_depend_iterators
= -1;
8751 ctx
= new_omp_context (region_type
);
8753 outer_ctx
= ctx
->outer_context
;
8754 if (code
== OMP_TARGET
)
8756 if (!lang_GNU_Fortran ())
8757 ctx
->defaultmap
[GDMK_POINTER
] = GOVD_MAP
| GOVD_MAP_0LEN_ARRAY
;
8758 ctx
->defaultmap
[GDMK_SCALAR
] = GOVD_FIRSTPRIVATE
;
8759 ctx
->defaultmap
[GDMK_SCALAR_TARGET
] = (lang_GNU_Fortran ()
8760 ? GOVD_MAP
: GOVD_FIRSTPRIVATE
);
8762 if (!lang_GNU_Fortran ())
8766 case OMP_TARGET_DATA
:
8767 case OMP_TARGET_ENTER_DATA
:
8768 case OMP_TARGET_EXIT_DATA
:
8770 case OACC_HOST_DATA
:
8773 ctx
->target_firstprivatize_array_bases
= true;
8778 if (code
== OMP_TARGET
8779 || code
== OMP_TARGET_DATA
8780 || code
== OMP_TARGET_ENTER_DATA
8781 || code
== OMP_TARGET_EXIT_DATA
)
8782 omp_target_reorder_clauses (list_p
);
8784 while ((c
= *list_p
) != NULL
)
8786 bool remove
= false;
8787 bool notice_outer
= true;
8788 const char *check_non_private
= NULL
;
8792 switch (OMP_CLAUSE_CODE (c
))
8794 case OMP_CLAUSE_PRIVATE
:
8795 flags
= GOVD_PRIVATE
| GOVD_EXPLICIT
;
8796 if (lang_hooks
.decls
.omp_private_outer_ref (OMP_CLAUSE_DECL (c
)))
8798 flags
|= GOVD_PRIVATE_OUTER_REF
;
8799 OMP_CLAUSE_PRIVATE_OUTER_REF (c
) = 1;
8802 notice_outer
= false;
8804 case OMP_CLAUSE_SHARED
:
8805 flags
= GOVD_SHARED
| GOVD_EXPLICIT
;
8807 case OMP_CLAUSE_FIRSTPRIVATE
:
8808 flags
= GOVD_FIRSTPRIVATE
| GOVD_EXPLICIT
;
8809 check_non_private
= "firstprivate";
8810 if (OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c
))
8812 gcc_assert (code
== OMP_TARGET
);
8813 flags
|= GOVD_FIRSTPRIVATE_IMPLICIT
;
8816 case OMP_CLAUSE_LASTPRIVATE
:
8817 if (OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c
))
8820 case OMP_DISTRIBUTE
:
8821 error_at (OMP_CLAUSE_LOCATION (c
),
8822 "conditional %<lastprivate%> clause on "
8823 "%qs construct", "distribute");
8824 OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c
) = 0;
8827 error_at (OMP_CLAUSE_LOCATION (c
),
8828 "conditional %<lastprivate%> clause on "
8829 "%qs construct", "taskloop");
8830 OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c
) = 0;
8835 flags
= GOVD_LASTPRIVATE
| GOVD_SEEN
| GOVD_EXPLICIT
;
8836 if (code
!= OMP_LOOP
)
8837 check_non_private
= "lastprivate";
8838 decl
= OMP_CLAUSE_DECL (c
);
8839 if (error_operand_p (decl
))
8841 if (OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c
)
8842 && !lang_hooks
.decls
.omp_scalar_p (decl
, true))
8844 error_at (OMP_CLAUSE_LOCATION (c
),
8845 "non-scalar variable %qD in conditional "
8846 "%<lastprivate%> clause", decl
);
8847 OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c
) = 0;
8849 if (OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c
))
8850 flags
|= GOVD_LASTPRIVATE_CONDITIONAL
;
8851 omp_lastprivate_for_combined_outer_constructs (outer_ctx
, decl
,
8854 case OMP_CLAUSE_REDUCTION
:
8855 if (OMP_CLAUSE_REDUCTION_TASK (c
))
8857 if (region_type
== ORT_WORKSHARE
)
8860 nowait
= omp_find_clause (*list_p
,
8861 OMP_CLAUSE_NOWAIT
) != NULL_TREE
;
8863 && (outer_ctx
== NULL
8864 || outer_ctx
->region_type
!= ORT_COMBINED_PARALLEL
))
8866 error_at (OMP_CLAUSE_LOCATION (c
),
8867 "%<task%> reduction modifier on a construct "
8868 "with a %<nowait%> clause");
8869 OMP_CLAUSE_REDUCTION_TASK (c
) = 0;
8872 else if ((region_type
& ORT_PARALLEL
) != ORT_PARALLEL
)
8874 error_at (OMP_CLAUSE_LOCATION (c
),
8875 "invalid %<task%> reduction modifier on construct "
8876 "other than %<parallel%>, %qs or %<sections%>",
8877 lang_GNU_Fortran () ? "do" : "for");
8878 OMP_CLAUSE_REDUCTION_TASK (c
) = 0;
8881 if (OMP_CLAUSE_REDUCTION_INSCAN (c
))
8885 error_at (OMP_CLAUSE_LOCATION (c
),
8886 "%<inscan%> %<reduction%> clause on "
8887 "%qs construct", "sections");
8888 OMP_CLAUSE_REDUCTION_INSCAN (c
) = 0;
8891 error_at (OMP_CLAUSE_LOCATION (c
),
8892 "%<inscan%> %<reduction%> clause on "
8893 "%qs construct", "parallel");
8894 OMP_CLAUSE_REDUCTION_INSCAN (c
) = 0;
8897 error_at (OMP_CLAUSE_LOCATION (c
),
8898 "%<inscan%> %<reduction%> clause on "
8899 "%qs construct", "teams");
8900 OMP_CLAUSE_REDUCTION_INSCAN (c
) = 0;
8903 error_at (OMP_CLAUSE_LOCATION (c
),
8904 "%<inscan%> %<reduction%> clause on "
8905 "%qs construct", "taskloop");
8906 OMP_CLAUSE_REDUCTION_INSCAN (c
) = 0;
8912 case OMP_CLAUSE_IN_REDUCTION
:
8913 case OMP_CLAUSE_TASK_REDUCTION
:
8914 flags
= GOVD_REDUCTION
| GOVD_SEEN
| GOVD_EXPLICIT
;
8915 /* OpenACC permits reductions on private variables. */
8916 if (!(region_type
& ORT_ACC
)
8917 /* taskgroup is actually not a worksharing region. */
8918 && code
!= OMP_TASKGROUP
)
8919 check_non_private
= omp_clause_code_name
[OMP_CLAUSE_CODE (c
)];
8920 decl
= OMP_CLAUSE_DECL (c
);
8921 if (TREE_CODE (decl
) == MEM_REF
)
8923 tree type
= TREE_TYPE (decl
);
8924 bool saved_into_ssa
= gimplify_ctxp
->into_ssa
;
8925 gimplify_ctxp
->into_ssa
= false;
8926 if (gimplify_expr (&TYPE_MAX_VALUE (TYPE_DOMAIN (type
)), pre_p
,
8927 NULL
, is_gimple_val
, fb_rvalue
, false)
8930 gimplify_ctxp
->into_ssa
= saved_into_ssa
;
8934 gimplify_ctxp
->into_ssa
= saved_into_ssa
;
8935 tree v
= TYPE_MAX_VALUE (TYPE_DOMAIN (type
));
8938 omp_firstprivatize_variable (ctx
, v
);
8939 omp_notice_variable (ctx
, v
, true);
8941 decl
= TREE_OPERAND (decl
, 0);
8942 if (TREE_CODE (decl
) == POINTER_PLUS_EXPR
)
8944 gimplify_ctxp
->into_ssa
= false;
8945 if (gimplify_expr (&TREE_OPERAND (decl
, 1), pre_p
,
8946 NULL
, is_gimple_val
, fb_rvalue
, false)
8949 gimplify_ctxp
->into_ssa
= saved_into_ssa
;
8953 gimplify_ctxp
->into_ssa
= saved_into_ssa
;
8954 v
= TREE_OPERAND (decl
, 1);
8957 omp_firstprivatize_variable (ctx
, v
);
8958 omp_notice_variable (ctx
, v
, true);
8960 decl
= TREE_OPERAND (decl
, 0);
8962 if (TREE_CODE (decl
) == ADDR_EXPR
8963 || TREE_CODE (decl
) == INDIRECT_REF
)
8964 decl
= TREE_OPERAND (decl
, 0);
8967 case OMP_CLAUSE_LINEAR
:
8968 if (gimplify_expr (&OMP_CLAUSE_LINEAR_STEP (c
), pre_p
, NULL
,
8969 is_gimple_val
, fb_rvalue
) == GS_ERROR
)
8976 if (code
== OMP_SIMD
8977 && !OMP_CLAUSE_LINEAR_NO_COPYIN (c
))
8979 struct gimplify_omp_ctx
*octx
= outer_ctx
;
8981 && octx
->region_type
== ORT_WORKSHARE
8982 && octx
->combined_loop
8983 && !octx
->distribute
)
8985 if (octx
->outer_context
8986 && (octx
->outer_context
->region_type
8987 == ORT_COMBINED_PARALLEL
))
8988 octx
= octx
->outer_context
->outer_context
;
8990 octx
= octx
->outer_context
;
8993 && octx
->region_type
== ORT_WORKSHARE
8994 && octx
->combined_loop
8995 && octx
->distribute
)
8997 error_at (OMP_CLAUSE_LOCATION (c
),
8998 "%<linear%> clause for variable other than "
8999 "loop iterator specified on construct "
9000 "combined with %<distribute%>");
9005 /* For combined #pragma omp parallel for simd, need to put
9006 lastprivate and perhaps firstprivate too on the
9007 parallel. Similarly for #pragma omp for simd. */
9008 struct gimplify_omp_ctx
*octx
= outer_ctx
;
9009 bool taskloop_seen
= false;
9013 if (OMP_CLAUSE_LINEAR_NO_COPYIN (c
)
9014 && OMP_CLAUSE_LINEAR_NO_COPYOUT (c
))
9016 decl
= OMP_CLAUSE_DECL (c
);
9017 if (error_operand_p (decl
))
9023 if (!OMP_CLAUSE_LINEAR_NO_COPYIN (c
))
9024 flags
|= GOVD_FIRSTPRIVATE
;
9025 if (!OMP_CLAUSE_LINEAR_NO_COPYOUT (c
))
9026 flags
|= GOVD_LASTPRIVATE
;
9028 && octx
->region_type
== ORT_WORKSHARE
9029 && octx
->combined_loop
)
9031 if (octx
->outer_context
9032 && (octx
->outer_context
->region_type
9033 == ORT_COMBINED_PARALLEL
))
9034 octx
= octx
->outer_context
;
9035 else if (omp_check_private (octx
, decl
, false))
9039 && (octx
->region_type
& ORT_TASK
) != 0
9040 && octx
->combined_loop
)
9041 taskloop_seen
= true;
9043 && octx
->region_type
== ORT_COMBINED_PARALLEL
9044 && ((ctx
->region_type
== ORT_WORKSHARE
9045 && octx
== outer_ctx
)
9047 flags
= GOVD_SEEN
| GOVD_SHARED
;
9049 && ((octx
->region_type
& ORT_COMBINED_TEAMS
)
9050 == ORT_COMBINED_TEAMS
))
9051 flags
= GOVD_SEEN
| GOVD_SHARED
;
9053 && octx
->region_type
== ORT_COMBINED_TARGET
)
9055 if (flags
& GOVD_LASTPRIVATE
)
9056 flags
= GOVD_SEEN
| GOVD_MAP
;
9061 = splay_tree_lookup (octx
->variables
,
9062 (splay_tree_key
) decl
);
9063 if (on
&& (on
->value
& GOVD_DATA_SHARE_CLASS
) != 0)
9068 omp_add_variable (octx
, decl
, flags
);
9069 if (octx
->outer_context
== NULL
)
9071 octx
= octx
->outer_context
;
9076 && (!OMP_CLAUSE_LINEAR_NO_COPYIN (c
)
9077 || !OMP_CLAUSE_LINEAR_NO_COPYOUT (c
)))
9078 omp_notice_variable (octx
, decl
, true);
9080 flags
= GOVD_LINEAR
| GOVD_EXPLICIT
;
9081 if (OMP_CLAUSE_LINEAR_NO_COPYIN (c
)
9082 && OMP_CLAUSE_LINEAR_NO_COPYOUT (c
))
9084 notice_outer
= false;
9085 flags
|= GOVD_LINEAR_LASTPRIVATE_NO_OUTER
;
9089 case OMP_CLAUSE_MAP
:
9090 decl
= OMP_CLAUSE_DECL (c
);
9091 if (error_operand_p (decl
))
9098 if (TREE_CODE (TREE_TYPE (decl
)) != ARRAY_TYPE
)
9101 case OMP_TARGET_DATA
:
9102 case OMP_TARGET_ENTER_DATA
:
9103 case OMP_TARGET_EXIT_DATA
:
9104 case OACC_ENTER_DATA
:
9105 case OACC_EXIT_DATA
:
9106 case OACC_HOST_DATA
:
9107 if (OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_FIRSTPRIVATE_POINTER
9108 || (OMP_CLAUSE_MAP_KIND (c
)
9109 == GOMP_MAP_FIRSTPRIVATE_REFERENCE
))
9110 /* For target {,enter ,exit }data only the array slice is
9111 mapped, but not the pointer to it. */
9117 /* For Fortran, not only the pointer to the data is mapped but also
9118 the address of the pointer, the array descriptor etc.; for
9119 'exit data' - and in particular for 'delete:' - having an 'alloc:'
9120 does not make sense. Likewise, for 'update' only transferring the
9121 data itself is needed as the rest has been handled in previous
9122 directives. However, for 'exit data', the array descriptor needs
9123 to be delete; hence, we turn the MAP_TO_PSET into a MAP_DELETE.
9125 NOTE: Generally, it is not safe to perform "enter data" operations
9126 on arrays where the data *or the descriptor* may go out of scope
9127 before a corresponding "exit data" operation -- and such a
9128 descriptor may be synthesized temporarily, e.g. to pass an
9129 explicit-shape array to a function expecting an assumed-shape
9130 argument. Performing "enter data" inside the called function
9131 would thus be problematic. */
9132 if (code
== OMP_TARGET_EXIT_DATA
9133 && OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_TO_PSET
)
9134 OMP_CLAUSE_SET_MAP_KIND (c
, OMP_CLAUSE_MAP_KIND (*prev_list_p
)
9136 ? GOMP_MAP_DELETE
: GOMP_MAP_RELEASE
);
9137 else if ((code
== OMP_TARGET_EXIT_DATA
|| code
== OMP_TARGET_UPDATE
)
9138 && (OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_POINTER
9139 || OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_TO_PSET
))
9144 if (DECL_P (decl
) && outer_ctx
&& (region_type
& ORT_ACC
))
9146 struct gimplify_omp_ctx
*octx
;
9147 for (octx
= outer_ctx
; octx
; octx
= octx
->outer_context
)
9149 if (octx
->region_type
!= ORT_ACC_HOST_DATA
)
9152 = splay_tree_lookup (octx
->variables
,
9153 (splay_tree_key
) decl
);
9155 error_at (OMP_CLAUSE_LOCATION (c
), "variable %qE "
9156 "declared in enclosing %<host_data%> region",
9160 if (OMP_CLAUSE_SIZE (c
) == NULL_TREE
)
9161 OMP_CLAUSE_SIZE (c
) = DECL_P (decl
) ? DECL_SIZE_UNIT (decl
)
9162 : TYPE_SIZE_UNIT (TREE_TYPE (decl
));
9163 if (gimplify_expr (&OMP_CLAUSE_SIZE (c
), pre_p
,
9164 NULL
, is_gimple_val
, fb_rvalue
) == GS_ERROR
)
9169 else if ((OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_FIRSTPRIVATE_POINTER
9170 || (OMP_CLAUSE_MAP_KIND (c
)
9171 == GOMP_MAP_FIRSTPRIVATE_REFERENCE
)
9172 || OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_ATTACH_DETACH
)
9173 && TREE_CODE (OMP_CLAUSE_SIZE (c
)) != INTEGER_CST
)
9176 = get_initialized_tmp_var (OMP_CLAUSE_SIZE (c
), pre_p
, NULL
,
9178 if ((region_type
& ORT_TARGET
) != 0)
9179 omp_add_variable (ctx
, OMP_CLAUSE_SIZE (c
),
9180 GOVD_FIRSTPRIVATE
| GOVD_SEEN
);
9186 if (TREE_CODE (d
) == ARRAY_REF
)
9188 while (TREE_CODE (d
) == ARRAY_REF
)
9189 d
= TREE_OPERAND (d
, 0);
9190 if (TREE_CODE (d
) == COMPONENT_REF
9191 && TREE_CODE (TREE_TYPE (d
)) == ARRAY_TYPE
)
9194 pd
= &OMP_CLAUSE_DECL (c
);
9196 && TREE_CODE (decl
) == INDIRECT_REF
9197 && TREE_CODE (TREE_OPERAND (decl
, 0)) == COMPONENT_REF
9198 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (decl
, 0)))
9201 pd
= &TREE_OPERAND (decl
, 0);
9202 decl
= TREE_OPERAND (decl
, 0);
9204 bool indir_p
= false;
9205 tree orig_decl
= decl
;
9206 tree decl_ref
= NULL_TREE
;
9207 if ((region_type
& (ORT_ACC
| ORT_TARGET
| ORT_TARGET_DATA
)) != 0
9208 && TREE_CODE (*pd
) == COMPONENT_REF
9209 && OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_ATTACH_DETACH
9210 && code
!= OACC_UPDATE
)
9212 while (TREE_CODE (decl
) == COMPONENT_REF
)
9214 decl
= TREE_OPERAND (decl
, 0);
9215 if (((TREE_CODE (decl
) == MEM_REF
9216 && integer_zerop (TREE_OPERAND (decl
, 1)))
9217 || INDIRECT_REF_P (decl
))
9218 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (decl
, 0)))
9222 decl
= TREE_OPERAND (decl
, 0);
9224 if (TREE_CODE (decl
) == INDIRECT_REF
9225 && DECL_P (TREE_OPERAND (decl
, 0))
9226 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (decl
, 0)))
9230 decl
= TREE_OPERAND (decl
, 0);
9234 else if (TREE_CODE (decl
) == COMPONENT_REF
)
9236 while (TREE_CODE (decl
) == COMPONENT_REF
)
9237 decl
= TREE_OPERAND (decl
, 0);
9238 if (TREE_CODE (decl
) == INDIRECT_REF
9239 && DECL_P (TREE_OPERAND (decl
, 0))
9240 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (decl
, 0)))
9242 decl
= TREE_OPERAND (decl
, 0);
9244 if (decl
!= orig_decl
&& DECL_P (decl
) && indir_p
)
9247 = ((code
== OACC_EXIT_DATA
|| code
== OMP_TARGET_EXIT_DATA
)
9248 ? GOMP_MAP_DETACH
: GOMP_MAP_ATTACH
);
9249 /* We have a dereference of a struct member. Make this an
9250 attach/detach operation, and ensure the base pointer is
9251 mapped as a FIRSTPRIVATE_POINTER. */
9252 OMP_CLAUSE_SET_MAP_KIND (c
, k
);
9253 flags
= GOVD_MAP
| GOVD_SEEN
| GOVD_EXPLICIT
;
9254 tree next_clause
= OMP_CLAUSE_CHAIN (c
);
9255 if (k
== GOMP_MAP_ATTACH
9256 && code
!= OACC_ENTER_DATA
9257 && code
!= OMP_TARGET_ENTER_DATA
9259 || (OMP_CLAUSE_CODE (next_clause
) != OMP_CLAUSE_MAP
)
9260 || (OMP_CLAUSE_MAP_KIND (next_clause
)
9261 != GOMP_MAP_POINTER
)
9262 || OMP_CLAUSE_DECL (next_clause
) != decl
)
9263 && (!struct_deref_set
9264 || !struct_deref_set
->contains (decl
)))
9266 if (!struct_deref_set
)
9267 struct_deref_set
= new hash_set
<tree
> ();
9268 /* As well as the attach, we also need a
9269 FIRSTPRIVATE_POINTER clause to properly map the
9270 pointer to the struct base. */
9271 tree c2
= build_omp_clause (OMP_CLAUSE_LOCATION (c
),
9273 OMP_CLAUSE_SET_MAP_KIND (c2
, GOMP_MAP_ALLOC
);
9274 OMP_CLAUSE_MAP_MAYBE_ZERO_LENGTH_ARRAY_SECTION (c2
)
9277 = build_int_cst (build_pointer_type (char_type_node
),
9279 OMP_CLAUSE_DECL (c2
)
9280 = build2 (MEM_REF
, char_type_node
,
9281 decl_ref
? decl_ref
: decl
, charptr_zero
);
9282 OMP_CLAUSE_SIZE (c2
) = size_zero_node
;
9283 tree c3
= build_omp_clause (OMP_CLAUSE_LOCATION (c
),
9285 OMP_CLAUSE_SET_MAP_KIND (c3
,
9286 GOMP_MAP_FIRSTPRIVATE_POINTER
);
9287 OMP_CLAUSE_DECL (c3
) = decl
;
9288 OMP_CLAUSE_SIZE (c3
) = size_zero_node
;
9289 tree mapgrp
= *prev_list_p
;
9291 OMP_CLAUSE_CHAIN (c3
) = mapgrp
;
9292 OMP_CLAUSE_CHAIN (c2
) = c3
;
9294 struct_deref_set
->add (decl
);
9298 /* An "attach/detach" operation on an update directive should
9299 behave as a GOMP_MAP_ALWAYS_POINTER. Beware that
9300 unlike attach or detach map kinds, GOMP_MAP_ALWAYS_POINTER
9301 depends on the previous mapping. */
9302 if (code
== OACC_UPDATE
9303 && OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_ATTACH_DETACH
)
9304 OMP_CLAUSE_SET_MAP_KIND (c
, GOMP_MAP_ALWAYS_POINTER
);
9306 && OMP_CLAUSE_MAP_KIND (c
) != GOMP_MAP_TO_PSET
9307 && OMP_CLAUSE_MAP_KIND (c
) != GOMP_MAP_ATTACH
9308 && OMP_CLAUSE_MAP_KIND (c
) != GOMP_MAP_DETACH
9309 && code
!= OACC_UPDATE
9310 && code
!= OMP_TARGET_UPDATE
)
9312 if (error_operand_p (decl
))
9318 tree stype
= TREE_TYPE (decl
);
9319 if (TREE_CODE (stype
) == REFERENCE_TYPE
)
9320 stype
= TREE_TYPE (stype
);
9321 if (TYPE_SIZE_UNIT (stype
) == NULL
9322 || TREE_CODE (TYPE_SIZE_UNIT (stype
)) != INTEGER_CST
)
9324 error_at (OMP_CLAUSE_LOCATION (c
),
9325 "mapping field %qE of variable length "
9326 "structure", OMP_CLAUSE_DECL (c
));
9331 if (OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_ALWAYS_POINTER
9332 || OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_ATTACH_DETACH
)
9334 /* Error recovery. */
9335 if (prev_list_p
== NULL
)
9340 if (OMP_CLAUSE_CHAIN (*prev_list_p
) != c
)
9342 tree ch
= OMP_CLAUSE_CHAIN (*prev_list_p
);
9343 if (ch
== NULL_TREE
|| OMP_CLAUSE_CHAIN (ch
) != c
)
9351 poly_offset_int offset1
;
9356 = extract_base_bit_offset (OMP_CLAUSE_DECL (c
), &base_ref
,
9357 &bitpos1
, &offset1
);
9359 gcc_assert (base
== decl
);
9362 = splay_tree_lookup (ctx
->variables
, (splay_tree_key
)decl
);
9363 bool ptr
= (OMP_CLAUSE_MAP_KIND (c
)
9364 == GOMP_MAP_ALWAYS_POINTER
);
9365 bool attach_detach
= (OMP_CLAUSE_MAP_KIND (c
)
9366 == GOMP_MAP_ATTACH_DETACH
);
9367 bool attach
= OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_ATTACH
9368 || OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_DETACH
;
9369 bool has_attachments
= false;
9370 /* For OpenACC, pointers in structs should trigger an
9373 && ((region_type
& (ORT_ACC
| ORT_TARGET
| ORT_TARGET_DATA
))
9374 || code
== OMP_TARGET_ENTER_DATA
9375 || code
== OMP_TARGET_EXIT_DATA
))
9378 /* Turn a GOMP_MAP_ATTACH_DETACH clause into a
9379 GOMP_MAP_ATTACH or GOMP_MAP_DETACH clause after we
9380 have detected a case that needs a GOMP_MAP_STRUCT
9383 = ((code
== OACC_EXIT_DATA
|| code
== OMP_TARGET_EXIT_DATA
)
9384 ? GOMP_MAP_DETACH
: GOMP_MAP_ATTACH
);
9385 OMP_CLAUSE_SET_MAP_KIND (c
, k
);
9386 has_attachments
= true;
9388 if (n
== NULL
|| (n
->value
& GOVD_MAP
) == 0)
9390 tree l
= build_omp_clause (OMP_CLAUSE_LOCATION (c
),
9392 gomp_map_kind k
= attach
? GOMP_MAP_FORCE_PRESENT
9395 OMP_CLAUSE_SET_MAP_KIND (l
, k
);
9397 OMP_CLAUSE_DECL (l
) = unshare_expr (base_ref
);
9399 OMP_CLAUSE_DECL (l
) = decl
;
9403 : DECL_P (OMP_CLAUSE_DECL (l
))
9404 ? DECL_SIZE_UNIT (OMP_CLAUSE_DECL (l
))
9405 : TYPE_SIZE_UNIT (TREE_TYPE (OMP_CLAUSE_DECL (l
))));
9406 if (struct_map_to_clause
== NULL
)
9407 struct_map_to_clause
= new hash_map
<tree
, tree
>;
9408 struct_map_to_clause
->put (decl
, l
);
9409 if (ptr
|| attach_detach
)
9411 insert_struct_comp_map (code
, c
, l
, *prev_list_p
,
9418 OMP_CLAUSE_CHAIN (l
) = c
;
9420 list_p
= &OMP_CLAUSE_CHAIN (l
);
9422 if (base_ref
&& code
== OMP_TARGET
)
9424 tree c2
= build_omp_clause (OMP_CLAUSE_LOCATION (c
),
9426 enum gomp_map_kind mkind
9427 = GOMP_MAP_FIRSTPRIVATE_REFERENCE
;
9428 OMP_CLAUSE_SET_MAP_KIND (c2
, mkind
);
9429 OMP_CLAUSE_DECL (c2
) = decl
;
9430 OMP_CLAUSE_SIZE (c2
) = size_zero_node
;
9431 OMP_CLAUSE_CHAIN (c2
) = OMP_CLAUSE_CHAIN (l
);
9432 OMP_CLAUSE_CHAIN (l
) = c2
;
9434 flags
= GOVD_MAP
| GOVD_EXPLICIT
;
9435 if (GOMP_MAP_ALWAYS_P (OMP_CLAUSE_MAP_KIND (c
))
9439 if (has_attachments
)
9440 flags
|= GOVD_MAP_HAS_ATTACHMENTS
;
9443 else if (struct_map_to_clause
)
9445 tree
*osc
= struct_map_to_clause
->get (decl
);
9446 tree
*sc
= NULL
, *scp
= NULL
;
9447 if (GOMP_MAP_ALWAYS_P (OMP_CLAUSE_MAP_KIND (c
))
9450 n
->value
|= GOVD_SEEN
;
9451 sc
= &OMP_CLAUSE_CHAIN (*osc
);
9453 && (OMP_CLAUSE_MAP_KIND (*sc
)
9454 == GOMP_MAP_FIRSTPRIVATE_REFERENCE
))
9455 sc
= &OMP_CLAUSE_CHAIN (*sc
);
9456 /* Here "prev_list_p" is the end of the inserted
9457 alloc/release nodes after the struct node, OSC. */
9458 for (; *sc
!= c
; sc
= &OMP_CLAUSE_CHAIN (*sc
))
9459 if ((ptr
|| attach_detach
) && sc
== prev_list_p
)
9461 else if (TREE_CODE (OMP_CLAUSE_DECL (*sc
))
9463 && (TREE_CODE (OMP_CLAUSE_DECL (*sc
))
9465 && (TREE_CODE (OMP_CLAUSE_DECL (*sc
))
9470 tree sc_decl
= OMP_CLAUSE_DECL (*sc
);
9471 poly_offset_int offsetn
;
9474 = extract_base_bit_offset (sc_decl
, NULL
,
9475 &bitposn
, &offsetn
);
9480 if ((region_type
& ORT_ACC
) != 0)
9482 /* This duplicate checking code is currently only
9483 enabled for OpenACC. */
9484 tree d1
= OMP_CLAUSE_DECL (*sc
);
9485 tree d2
= OMP_CLAUSE_DECL (c
);
9486 while (TREE_CODE (d1
) == ARRAY_REF
)
9487 d1
= TREE_OPERAND (d1
, 0);
9488 while (TREE_CODE (d2
) == ARRAY_REF
)
9489 d2
= TREE_OPERAND (d2
, 0);
9490 if (TREE_CODE (d1
) == INDIRECT_REF
)
9491 d1
= TREE_OPERAND (d1
, 0);
9492 if (TREE_CODE (d2
) == INDIRECT_REF
)
9493 d2
= TREE_OPERAND (d2
, 0);
9494 while (TREE_CODE (d1
) == COMPONENT_REF
)
9495 if (TREE_CODE (d2
) == COMPONENT_REF
9496 && TREE_OPERAND (d1
, 1)
9497 == TREE_OPERAND (d2
, 1))
9499 d1
= TREE_OPERAND (d1
, 0);
9500 d2
= TREE_OPERAND (d2
, 0);
9506 error_at (OMP_CLAUSE_LOCATION (c
),
9507 "%qE appears more than once in map "
9508 "clauses", OMP_CLAUSE_DECL (c
));
9513 if (maybe_lt (offset1
, offsetn
)
9514 || (known_eq (offset1
, offsetn
)
9515 && maybe_lt (bitpos1
, bitposn
)))
9517 if (ptr
|| attach_detach
)
9526 OMP_CLAUSE_SIZE (*osc
)
9527 = size_binop (PLUS_EXPR
, OMP_CLAUSE_SIZE (*osc
),
9529 if (ptr
|| attach_detach
)
9531 tree cl
= insert_struct_comp_map (code
, c
, NULL
,
9533 if (sc
== prev_list_p
)
9540 *prev_list_p
= OMP_CLAUSE_CHAIN (c
);
9541 list_p
= prev_list_p
;
9543 OMP_CLAUSE_CHAIN (c
) = *sc
;
9550 *list_p
= OMP_CLAUSE_CHAIN (c
);
9551 OMP_CLAUSE_CHAIN (c
) = *sc
;
9557 else if ((code
== OACC_ENTER_DATA
9558 || code
== OACC_EXIT_DATA
9559 || code
== OACC_DATA
9560 || code
== OACC_PARALLEL
9561 || code
== OACC_KERNELS
9562 || code
== OACC_SERIAL
)
9563 && OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_ATTACH_DETACH
)
9565 gomp_map_kind k
= (code
== OACC_EXIT_DATA
9566 ? GOMP_MAP_DETACH
: GOMP_MAP_ATTACH
);
9567 OMP_CLAUSE_SET_MAP_KIND (c
, k
);
9570 if (code
== OMP_TARGET
&& OMP_CLAUSE_MAP_IN_REDUCTION (c
))
9572 /* Don't gimplify *pd fully at this point, as the base
9573 will need to be adjusted during omp lowering. */
9574 auto_vec
<tree
, 10> expr_stack
;
9576 while (handled_component_p (*p
)
9577 || TREE_CODE (*p
) == INDIRECT_REF
9578 || TREE_CODE (*p
) == ADDR_EXPR
9579 || TREE_CODE (*p
) == MEM_REF
9580 || TREE_CODE (*p
) == NON_LVALUE_EXPR
)
9582 expr_stack
.safe_push (*p
);
9583 p
= &TREE_OPERAND (*p
, 0);
9585 for (int i
= expr_stack
.length () - 1; i
>= 0; i
--)
9587 tree t
= expr_stack
[i
];
9588 if (TREE_CODE (t
) == ARRAY_REF
9589 || TREE_CODE (t
) == ARRAY_RANGE_REF
)
9591 if (TREE_OPERAND (t
, 2) == NULL_TREE
)
9593 tree low
= unshare_expr (array_ref_low_bound (t
));
9594 if (!is_gimple_min_invariant (low
))
9596 TREE_OPERAND (t
, 2) = low
;
9597 if (gimplify_expr (&TREE_OPERAND (t
, 2),
9600 fb_rvalue
) == GS_ERROR
)
9604 else if (gimplify_expr (&TREE_OPERAND (t
, 2), pre_p
,
9605 NULL
, is_gimple_reg
,
9606 fb_rvalue
) == GS_ERROR
)
9608 if (TREE_OPERAND (t
, 3) == NULL_TREE
)
9610 tree elmt_size
= array_ref_element_size (t
);
9611 if (!is_gimple_min_invariant (elmt_size
))
9613 elmt_size
= unshare_expr (elmt_size
);
9615 = TREE_TYPE (TREE_TYPE (TREE_OPERAND (t
,
9618 = size_int (TYPE_ALIGN_UNIT (elmt_type
));
9620 = size_binop (EXACT_DIV_EXPR
, elmt_size
,
9622 TREE_OPERAND (t
, 3) = elmt_size
;
9623 if (gimplify_expr (&TREE_OPERAND (t
, 3),
9626 fb_rvalue
) == GS_ERROR
)
9630 else if (gimplify_expr (&TREE_OPERAND (t
, 3), pre_p
,
9631 NULL
, is_gimple_reg
,
9632 fb_rvalue
) == GS_ERROR
)
9635 else if (TREE_CODE (t
) == COMPONENT_REF
)
9637 if (TREE_OPERAND (t
, 2) == NULL_TREE
)
9639 tree offset
= component_ref_field_offset (t
);
9640 if (!is_gimple_min_invariant (offset
))
9642 offset
= unshare_expr (offset
);
9643 tree field
= TREE_OPERAND (t
, 1);
9645 = size_int (DECL_OFFSET_ALIGN (field
)
9647 offset
= size_binop (EXACT_DIV_EXPR
, offset
,
9649 TREE_OPERAND (t
, 2) = offset
;
9650 if (gimplify_expr (&TREE_OPERAND (t
, 2),
9653 fb_rvalue
) == GS_ERROR
)
9657 else if (gimplify_expr (&TREE_OPERAND (t
, 2), pre_p
,
9658 NULL
, is_gimple_reg
,
9659 fb_rvalue
) == GS_ERROR
)
9663 for (; expr_stack
.length () > 0; )
9665 tree t
= expr_stack
.pop ();
9667 if (TREE_CODE (t
) == ARRAY_REF
9668 || TREE_CODE (t
) == ARRAY_RANGE_REF
)
9670 if (!is_gimple_min_invariant (TREE_OPERAND (t
, 1))
9671 && gimplify_expr (&TREE_OPERAND (t
, 1), pre_p
,
9672 NULL
, is_gimple_val
,
9673 fb_rvalue
) == GS_ERROR
)
9678 else if (gimplify_expr (pd
, pre_p
, NULL
, is_gimple_lvalue
,
9679 fb_lvalue
) == GS_ERROR
)
9686 && OMP_CLAUSE_MAP_KIND (c
) != GOMP_MAP_ALWAYS_POINTER
9687 && OMP_CLAUSE_MAP_KIND (c
) != GOMP_MAP_ATTACH_DETACH
9688 && OMP_CLAUSE_MAP_KIND (c
) != GOMP_MAP_TO_PSET
9689 && OMP_CLAUSE_CHAIN (c
)
9690 && OMP_CLAUSE_CODE (OMP_CLAUSE_CHAIN (c
)) == OMP_CLAUSE_MAP
9691 && ((OMP_CLAUSE_MAP_KIND (OMP_CLAUSE_CHAIN (c
))
9692 == GOMP_MAP_ALWAYS_POINTER
)
9693 || (OMP_CLAUSE_MAP_KIND (OMP_CLAUSE_CHAIN (c
))
9694 == GOMP_MAP_ATTACH_DETACH
)
9695 || (OMP_CLAUSE_MAP_KIND (OMP_CLAUSE_CHAIN (c
))
9696 == GOMP_MAP_TO_PSET
)))
9697 prev_list_p
= list_p
;
9703 /* DECL_P (decl) == true */
9705 if (struct_map_to_clause
9706 && (sc
= struct_map_to_clause
->get (decl
)) != NULL
9707 && OMP_CLAUSE_MAP_KIND (*sc
) == GOMP_MAP_STRUCT
9708 && decl
== OMP_CLAUSE_DECL (*sc
))
9710 /* We have found a map of the whole structure after a
9711 leading GOMP_MAP_STRUCT has been created, so refill the
9712 leading clause into a map of the whole structure
9713 variable, and remove the current one.
9714 TODO: we should be able to remove some maps of the
9715 following structure element maps if they are of
9716 compatible TO/FROM/ALLOC type. */
9717 OMP_CLAUSE_SET_MAP_KIND (*sc
, OMP_CLAUSE_MAP_KIND (c
));
9718 OMP_CLAUSE_SIZE (*sc
) = unshare_expr (OMP_CLAUSE_SIZE (c
));
9723 flags
= GOVD_MAP
| GOVD_EXPLICIT
;
9724 if (OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_ALWAYS_TO
9725 || OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_ALWAYS_TOFROM
)
9726 flags
|= GOVD_MAP_ALWAYS_TO
;
9728 if ((code
== OMP_TARGET
9729 || code
== OMP_TARGET_DATA
9730 || code
== OMP_TARGET_ENTER_DATA
9731 || code
== OMP_TARGET_EXIT_DATA
)
9732 && OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_ATTACH_DETACH
)
9734 for (struct gimplify_omp_ctx
*octx
= outer_ctx
; octx
;
9735 octx
= octx
->outer_context
)
9738 = splay_tree_lookup (octx
->variables
,
9739 (splay_tree_key
) OMP_CLAUSE_DECL (c
));
9740 /* If this is contained in an outer OpenMP region as a
9741 firstprivate value, remove the attach/detach. */
9742 if (n
&& (n
->value
& GOVD_FIRSTPRIVATE
))
9744 OMP_CLAUSE_SET_MAP_KIND (c
, GOMP_MAP_FIRSTPRIVATE_POINTER
);
9749 enum gomp_map_kind map_kind
= (code
== OMP_TARGET_EXIT_DATA
9752 OMP_CLAUSE_SET_MAP_KIND (c
, map_kind
);
9757 case OMP_CLAUSE_AFFINITY
:
9758 gimplify_omp_affinity (list_p
, pre_p
);
9761 case OMP_CLAUSE_DEPEND
:
9762 if (OMP_CLAUSE_DEPEND_KIND (c
) == OMP_CLAUSE_DEPEND_SINK
)
9764 tree deps
= OMP_CLAUSE_DECL (c
);
9765 while (deps
&& TREE_CODE (deps
) == TREE_LIST
)
9767 if (TREE_CODE (TREE_PURPOSE (deps
)) == TRUNC_DIV_EXPR
9768 && DECL_P (TREE_OPERAND (TREE_PURPOSE (deps
), 1)))
9769 gimplify_expr (&TREE_OPERAND (TREE_PURPOSE (deps
), 1),
9770 pre_p
, NULL
, is_gimple_val
, fb_rvalue
);
9771 deps
= TREE_CHAIN (deps
);
9775 else if (OMP_CLAUSE_DEPEND_KIND (c
) == OMP_CLAUSE_DEPEND_SOURCE
)
9777 if (handled_depend_iterators
== -1)
9778 handled_depend_iterators
= gimplify_omp_depend (list_p
, pre_p
);
9779 if (handled_depend_iterators
)
9781 if (handled_depend_iterators
== 2)
9785 if (TREE_CODE (OMP_CLAUSE_DECL (c
)) == COMPOUND_EXPR
)
9787 gimplify_expr (&TREE_OPERAND (OMP_CLAUSE_DECL (c
), 0), pre_p
,
9788 NULL
, is_gimple_val
, fb_rvalue
);
9789 OMP_CLAUSE_DECL (c
) = TREE_OPERAND (OMP_CLAUSE_DECL (c
), 1);
9791 if (error_operand_p (OMP_CLAUSE_DECL (c
)))
9796 OMP_CLAUSE_DECL (c
) = build_fold_addr_expr (OMP_CLAUSE_DECL (c
));
9797 if (gimplify_expr (&OMP_CLAUSE_DECL (c
), pre_p
, NULL
,
9798 is_gimple_val
, fb_rvalue
) == GS_ERROR
)
9803 if (code
== OMP_TASK
)
9804 ctx
->has_depend
= true;
9808 case OMP_CLAUSE_FROM
:
9809 case OMP_CLAUSE__CACHE_
:
9810 decl
= OMP_CLAUSE_DECL (c
);
9811 if (error_operand_p (decl
))
9816 if (OMP_CLAUSE_SIZE (c
) == NULL_TREE
)
9817 OMP_CLAUSE_SIZE (c
) = DECL_P (decl
) ? DECL_SIZE_UNIT (decl
)
9818 : TYPE_SIZE_UNIT (TREE_TYPE (decl
));
9819 if (gimplify_expr (&OMP_CLAUSE_SIZE (c
), pre_p
,
9820 NULL
, is_gimple_val
, fb_rvalue
) == GS_ERROR
)
9827 if (gimplify_expr (&OMP_CLAUSE_DECL (c
), pre_p
,
9828 NULL
, is_gimple_lvalue
, fb_lvalue
)
9838 case OMP_CLAUSE_USE_DEVICE_PTR
:
9839 case OMP_CLAUSE_USE_DEVICE_ADDR
:
9840 flags
= GOVD_EXPLICIT
;
9843 case OMP_CLAUSE_IS_DEVICE_PTR
:
9844 flags
= GOVD_FIRSTPRIVATE
| GOVD_EXPLICIT
;
9848 decl
= OMP_CLAUSE_DECL (c
);
9850 if (error_operand_p (decl
))
9855 if (DECL_NAME (decl
) == NULL_TREE
&& (flags
& GOVD_SHARED
) == 0)
9857 tree t
= omp_member_access_dummy_var (decl
);
9860 tree v
= DECL_VALUE_EXPR (decl
);
9861 DECL_NAME (decl
) = DECL_NAME (TREE_OPERAND (v
, 1));
9863 omp_notice_variable (outer_ctx
, t
, true);
9866 if (code
== OACC_DATA
9867 && OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
9868 && OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_FIRSTPRIVATE_POINTER
)
9869 flags
|= GOVD_MAP_0LEN_ARRAY
;
9870 omp_add_variable (ctx
, decl
, flags
);
9871 if ((OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_REDUCTION
9872 || OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_IN_REDUCTION
9873 || OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_TASK_REDUCTION
)
9874 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
))
9876 struct gimplify_omp_ctx
*pctx
9877 = code
== OMP_TARGET
? outer_ctx
: ctx
;
9879 omp_add_variable (pctx
, OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
),
9880 GOVD_LOCAL
| GOVD_SEEN
);
9882 && OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c
)
9883 && walk_tree (&OMP_CLAUSE_REDUCTION_INIT (c
),
9885 OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c
),
9887 omp_add_variable (pctx
,
9888 OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c
),
9889 GOVD_LOCAL
| GOVD_SEEN
);
9890 gimplify_omp_ctxp
= pctx
;
9891 push_gimplify_context ();
9893 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
) = NULL
;
9894 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
) = NULL
;
9896 gimplify_and_add (OMP_CLAUSE_REDUCTION_INIT (c
),
9897 &OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
));
9898 pop_gimplify_context
9899 (gimple_seq_first_stmt (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
)));
9900 push_gimplify_context ();
9901 gimplify_and_add (OMP_CLAUSE_REDUCTION_MERGE (c
),
9902 &OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
));
9903 pop_gimplify_context
9904 (gimple_seq_first_stmt (OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
)));
9905 OMP_CLAUSE_REDUCTION_INIT (c
) = NULL_TREE
;
9906 OMP_CLAUSE_REDUCTION_MERGE (c
) = NULL_TREE
;
9908 gimplify_omp_ctxp
= outer_ctx
;
9910 else if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
9911 && OMP_CLAUSE_LASTPRIVATE_STMT (c
))
9913 gimplify_omp_ctxp
= ctx
;
9914 push_gimplify_context ();
9915 if (TREE_CODE (OMP_CLAUSE_LASTPRIVATE_STMT (c
)) != BIND_EXPR
)
9917 tree bind
= build3 (BIND_EXPR
, void_type_node
, NULL
,
9919 TREE_SIDE_EFFECTS (bind
) = 1;
9920 BIND_EXPR_BODY (bind
) = OMP_CLAUSE_LASTPRIVATE_STMT (c
);
9921 OMP_CLAUSE_LASTPRIVATE_STMT (c
) = bind
;
9923 gimplify_and_add (OMP_CLAUSE_LASTPRIVATE_STMT (c
),
9924 &OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c
));
9925 pop_gimplify_context
9926 (gimple_seq_first_stmt (OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c
)));
9927 OMP_CLAUSE_LASTPRIVATE_STMT (c
) = NULL_TREE
;
9929 gimplify_omp_ctxp
= outer_ctx
;
9931 else if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LINEAR
9932 && OMP_CLAUSE_LINEAR_STMT (c
))
9934 gimplify_omp_ctxp
= ctx
;
9935 push_gimplify_context ();
9936 if (TREE_CODE (OMP_CLAUSE_LINEAR_STMT (c
)) != BIND_EXPR
)
9938 tree bind
= build3 (BIND_EXPR
, void_type_node
, NULL
,
9940 TREE_SIDE_EFFECTS (bind
) = 1;
9941 BIND_EXPR_BODY (bind
) = OMP_CLAUSE_LINEAR_STMT (c
);
9942 OMP_CLAUSE_LINEAR_STMT (c
) = bind
;
9944 gimplify_and_add (OMP_CLAUSE_LINEAR_STMT (c
),
9945 &OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c
));
9946 pop_gimplify_context
9947 (gimple_seq_first_stmt (OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c
)));
9948 OMP_CLAUSE_LINEAR_STMT (c
) = NULL_TREE
;
9950 gimplify_omp_ctxp
= outer_ctx
;
9956 case OMP_CLAUSE_COPYIN
:
9957 case OMP_CLAUSE_COPYPRIVATE
:
9958 decl
= OMP_CLAUSE_DECL (c
);
9959 if (error_operand_p (decl
))
9964 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_COPYPRIVATE
9966 && !omp_check_private (ctx
, decl
, true))
9969 if (is_global_var (decl
))
9971 if (DECL_THREAD_LOCAL_P (decl
))
9973 else if (DECL_HAS_VALUE_EXPR_P (decl
))
9975 tree value
= get_base_address (DECL_VALUE_EXPR (decl
));
9979 && DECL_THREAD_LOCAL_P (value
))
9984 error_at (OMP_CLAUSE_LOCATION (c
),
9985 "copyprivate variable %qE is not threadprivate"
9986 " or private in outer context", DECL_NAME (decl
));
9989 if ((OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_REDUCTION
9990 || OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_FIRSTPRIVATE
9991 || OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
)
9993 && ((region_type
& ORT_TASKLOOP
) == ORT_TASKLOOP
9994 || (region_type
== ORT_WORKSHARE
9995 && OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_REDUCTION
9996 && (OMP_CLAUSE_REDUCTION_INSCAN (c
)
9997 || code
== OMP_LOOP
)))
9998 && (outer_ctx
->region_type
== ORT_COMBINED_PARALLEL
9999 || (code
== OMP_LOOP
10000 && OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_REDUCTION
10001 && ((outer_ctx
->region_type
& ORT_COMBINED_TEAMS
)
10002 == ORT_COMBINED_TEAMS
))))
10005 = splay_tree_lookup (outer_ctx
->variables
,
10006 (splay_tree_key
)decl
);
10007 if (on
== NULL
|| (on
->value
& GOVD_DATA_SHARE_CLASS
) == 0)
10009 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_REDUCTION
10010 && TREE_CODE (OMP_CLAUSE_DECL (c
)) == MEM_REF
10011 && (TREE_CODE (TREE_TYPE (decl
)) == POINTER_TYPE
10012 || (TREE_CODE (TREE_TYPE (decl
)) == REFERENCE_TYPE
10013 && (TREE_CODE (TREE_TYPE (TREE_TYPE (decl
)))
10014 == POINTER_TYPE
))))
10015 omp_firstprivatize_variable (outer_ctx
, decl
);
10018 omp_add_variable (outer_ctx
, decl
,
10019 GOVD_SEEN
| GOVD_SHARED
);
10020 if (outer_ctx
->outer_context
)
10021 omp_notice_variable (outer_ctx
->outer_context
, decl
,
10027 omp_notice_variable (outer_ctx
, decl
, true);
10028 if (check_non_private
10029 && region_type
== ORT_WORKSHARE
10030 && (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_REDUCTION
10031 || decl
== OMP_CLAUSE_DECL (c
)
10032 || (TREE_CODE (OMP_CLAUSE_DECL (c
)) == MEM_REF
10033 && (TREE_CODE (TREE_OPERAND (OMP_CLAUSE_DECL (c
), 0))
10035 || (TREE_CODE (TREE_OPERAND (OMP_CLAUSE_DECL (c
), 0))
10036 == POINTER_PLUS_EXPR
10037 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND
10038 (OMP_CLAUSE_DECL (c
), 0), 0))
10040 && omp_check_private (ctx
, decl
, false))
10042 error ("%s variable %qE is private in outer context",
10043 check_non_private
, DECL_NAME (decl
));
10048 case OMP_CLAUSE_DETACH
:
10049 flags
= GOVD_FIRSTPRIVATE
| GOVD_SEEN
;
10052 case OMP_CLAUSE_IF
:
10053 if (OMP_CLAUSE_IF_MODIFIER (c
) != ERROR_MARK
10054 && OMP_CLAUSE_IF_MODIFIER (c
) != code
)
10057 for (int i
= 0; i
< 2; i
++)
10058 switch (i
? OMP_CLAUSE_IF_MODIFIER (c
) : code
)
10060 case VOID_CST
: p
[i
] = "cancel"; break;
10061 case OMP_PARALLEL
: p
[i
] = "parallel"; break;
10062 case OMP_SIMD
: p
[i
] = "simd"; break;
10063 case OMP_TASK
: p
[i
] = "task"; break;
10064 case OMP_TASKLOOP
: p
[i
] = "taskloop"; break;
10065 case OMP_TARGET_DATA
: p
[i
] = "target data"; break;
10066 case OMP_TARGET
: p
[i
] = "target"; break;
10067 case OMP_TARGET_UPDATE
: p
[i
] = "target update"; break;
10068 case OMP_TARGET_ENTER_DATA
:
10069 p
[i
] = "target enter data"; break;
10070 case OMP_TARGET_EXIT_DATA
: p
[i
] = "target exit data"; break;
10071 default: gcc_unreachable ();
10073 error_at (OMP_CLAUSE_LOCATION (c
),
10074 "expected %qs %<if%> clause modifier rather than %qs",
10078 /* Fall through. */
10080 case OMP_CLAUSE_FINAL
:
10081 OMP_CLAUSE_OPERAND (c
, 0)
10082 = gimple_boolify (OMP_CLAUSE_OPERAND (c
, 0));
10083 /* Fall through. */
10085 case OMP_CLAUSE_SCHEDULE
:
10086 case OMP_CLAUSE_NUM_THREADS
:
10087 case OMP_CLAUSE_NUM_TEAMS
:
10088 case OMP_CLAUSE_THREAD_LIMIT
:
10089 case OMP_CLAUSE_DIST_SCHEDULE
:
10090 case OMP_CLAUSE_DEVICE
:
10091 case OMP_CLAUSE_PRIORITY
:
10092 case OMP_CLAUSE_GRAINSIZE
:
10093 case OMP_CLAUSE_NUM_TASKS
:
10094 case OMP_CLAUSE_HINT
:
10095 case OMP_CLAUSE_ASYNC
:
10096 case OMP_CLAUSE_WAIT
:
10097 case OMP_CLAUSE_NUM_GANGS
:
10098 case OMP_CLAUSE_NUM_WORKERS
:
10099 case OMP_CLAUSE_VECTOR_LENGTH
:
10100 case OMP_CLAUSE_WORKER
:
10101 case OMP_CLAUSE_VECTOR
:
10102 if (gimplify_expr (&OMP_CLAUSE_OPERAND (c
, 0), pre_p
, NULL
,
10103 is_gimple_val
, fb_rvalue
) == GS_ERROR
)
10107 case OMP_CLAUSE_GANG
:
10108 if (gimplify_expr (&OMP_CLAUSE_OPERAND (c
, 0), pre_p
, NULL
,
10109 is_gimple_val
, fb_rvalue
) == GS_ERROR
)
10111 if (gimplify_expr (&OMP_CLAUSE_OPERAND (c
, 1), pre_p
, NULL
,
10112 is_gimple_val
, fb_rvalue
) == GS_ERROR
)
10116 case OMP_CLAUSE_NOWAIT
:
10120 case OMP_CLAUSE_ORDERED
:
10121 case OMP_CLAUSE_UNTIED
:
10122 case OMP_CLAUSE_COLLAPSE
:
10123 case OMP_CLAUSE_TILE
:
10124 case OMP_CLAUSE_AUTO
:
10125 case OMP_CLAUSE_SEQ
:
10126 case OMP_CLAUSE_INDEPENDENT
:
10127 case OMP_CLAUSE_MERGEABLE
:
10128 case OMP_CLAUSE_PROC_BIND
:
10129 case OMP_CLAUSE_SAFELEN
:
10130 case OMP_CLAUSE_SIMDLEN
:
10131 case OMP_CLAUSE_NOGROUP
:
10132 case OMP_CLAUSE_THREADS
:
10133 case OMP_CLAUSE_SIMD
:
10134 case OMP_CLAUSE_BIND
:
10135 case OMP_CLAUSE_IF_PRESENT
:
10136 case OMP_CLAUSE_FINALIZE
:
10139 case OMP_CLAUSE_ORDER
:
10140 ctx
->order_concurrent
= true;
10143 case OMP_CLAUSE_DEFAULTMAP
:
10144 enum gimplify_defaultmap_kind gdmkmin
, gdmkmax
;
10145 switch (OMP_CLAUSE_DEFAULTMAP_CATEGORY (c
))
10147 case OMP_CLAUSE_DEFAULTMAP_CATEGORY_UNSPECIFIED
:
10148 gdmkmin
= GDMK_SCALAR
;
10149 gdmkmax
= GDMK_POINTER
;
10151 case OMP_CLAUSE_DEFAULTMAP_CATEGORY_SCALAR
:
10152 gdmkmin
= GDMK_SCALAR
;
10153 gdmkmax
= GDMK_SCALAR_TARGET
;
10155 case OMP_CLAUSE_DEFAULTMAP_CATEGORY_AGGREGATE
:
10156 gdmkmin
= gdmkmax
= GDMK_AGGREGATE
;
10158 case OMP_CLAUSE_DEFAULTMAP_CATEGORY_ALLOCATABLE
:
10159 gdmkmin
= gdmkmax
= GDMK_ALLOCATABLE
;
10161 case OMP_CLAUSE_DEFAULTMAP_CATEGORY_POINTER
:
10162 gdmkmin
= gdmkmax
= GDMK_POINTER
;
10165 gcc_unreachable ();
10167 for (int gdmk
= gdmkmin
; gdmk
<= gdmkmax
; gdmk
++)
10168 switch (OMP_CLAUSE_DEFAULTMAP_BEHAVIOR (c
))
10170 case OMP_CLAUSE_DEFAULTMAP_ALLOC
:
10171 ctx
->defaultmap
[gdmk
] = GOVD_MAP
| GOVD_MAP_ALLOC_ONLY
;
10173 case OMP_CLAUSE_DEFAULTMAP_TO
:
10174 ctx
->defaultmap
[gdmk
] = GOVD_MAP
| GOVD_MAP_TO_ONLY
;
10176 case OMP_CLAUSE_DEFAULTMAP_FROM
:
10177 ctx
->defaultmap
[gdmk
] = GOVD_MAP
| GOVD_MAP_FROM_ONLY
;
10179 case OMP_CLAUSE_DEFAULTMAP_TOFROM
:
10180 ctx
->defaultmap
[gdmk
] = GOVD_MAP
;
10182 case OMP_CLAUSE_DEFAULTMAP_FIRSTPRIVATE
:
10183 ctx
->defaultmap
[gdmk
] = GOVD_FIRSTPRIVATE
;
10185 case OMP_CLAUSE_DEFAULTMAP_NONE
:
10186 ctx
->defaultmap
[gdmk
] = 0;
10188 case OMP_CLAUSE_DEFAULTMAP_DEFAULT
:
10192 ctx
->defaultmap
[gdmk
] = GOVD_FIRSTPRIVATE
;
10194 case GDMK_SCALAR_TARGET
:
10195 ctx
->defaultmap
[gdmk
] = (lang_GNU_Fortran ()
10196 ? GOVD_MAP
: GOVD_FIRSTPRIVATE
);
10198 case GDMK_AGGREGATE
:
10199 case GDMK_ALLOCATABLE
:
10200 ctx
->defaultmap
[gdmk
] = GOVD_MAP
;
10203 ctx
->defaultmap
[gdmk
] = GOVD_MAP
;
10204 if (!lang_GNU_Fortran ())
10205 ctx
->defaultmap
[gdmk
] |= GOVD_MAP_0LEN_ARRAY
;
10208 gcc_unreachable ();
10212 gcc_unreachable ();
10216 case OMP_CLAUSE_ALIGNED
:
10217 decl
= OMP_CLAUSE_DECL (c
);
10218 if (error_operand_p (decl
))
10223 if (gimplify_expr (&OMP_CLAUSE_ALIGNED_ALIGNMENT (c
), pre_p
, NULL
,
10224 is_gimple_val
, fb_rvalue
) == GS_ERROR
)
10229 if (!is_global_var (decl
)
10230 && TREE_CODE (TREE_TYPE (decl
)) == POINTER_TYPE
)
10231 omp_add_variable (ctx
, decl
, GOVD_ALIGNED
);
10234 case OMP_CLAUSE_NONTEMPORAL
:
10235 decl
= OMP_CLAUSE_DECL (c
);
10236 if (error_operand_p (decl
))
10241 omp_add_variable (ctx
, decl
, GOVD_NONTEMPORAL
);
10244 case OMP_CLAUSE_ALLOCATE
:
10245 decl
= OMP_CLAUSE_DECL (c
);
10246 if (error_operand_p (decl
))
10251 if (gimplify_expr (&OMP_CLAUSE_ALLOCATE_ALLOCATOR (c
), pre_p
, NULL
,
10252 is_gimple_val
, fb_rvalue
) == GS_ERROR
)
10257 else if (OMP_CLAUSE_ALLOCATE_ALLOCATOR (c
) == NULL_TREE
10258 || (TREE_CODE (OMP_CLAUSE_ALLOCATE_ALLOCATOR (c
))
10261 else if (code
== OMP_TASKLOOP
10262 || !DECL_P (OMP_CLAUSE_ALLOCATE_ALLOCATOR (c
)))
10263 OMP_CLAUSE_ALLOCATE_ALLOCATOR (c
)
10264 = get_initialized_tmp_var (OMP_CLAUSE_ALLOCATE_ALLOCATOR (c
),
10265 pre_p
, NULL
, false);
10268 case OMP_CLAUSE_DEFAULT
:
10269 ctx
->default_kind
= OMP_CLAUSE_DEFAULT_KIND (c
);
10272 case OMP_CLAUSE_INCLUSIVE
:
10273 case OMP_CLAUSE_EXCLUSIVE
:
10274 decl
= OMP_CLAUSE_DECL (c
);
10276 splay_tree_node n
= splay_tree_lookup (outer_ctx
->variables
,
10277 (splay_tree_key
) decl
);
10278 if (n
== NULL
|| (n
->value
& GOVD_REDUCTION
) == 0)
10280 error_at (OMP_CLAUSE_LOCATION (c
),
10281 "%qD specified in %qs clause but not in %<inscan%> "
10282 "%<reduction%> clause on the containing construct",
10283 decl
, omp_clause_code_name
[OMP_CLAUSE_CODE (c
)]);
10288 n
->value
|= GOVD_REDUCTION_INSCAN
;
10289 if (outer_ctx
->region_type
== ORT_SIMD
10290 && outer_ctx
->outer_context
10291 && outer_ctx
->outer_context
->region_type
== ORT_WORKSHARE
)
10293 n
= splay_tree_lookup (outer_ctx
->outer_context
->variables
,
10294 (splay_tree_key
) decl
);
10295 if (n
&& (n
->value
& GOVD_REDUCTION
) != 0)
10296 n
->value
|= GOVD_REDUCTION_INSCAN
;
10303 gcc_unreachable ();
10306 if (code
== OACC_DATA
10307 && OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
10308 && (OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_FIRSTPRIVATE_POINTER
10309 || OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_FIRSTPRIVATE_REFERENCE
))
10312 *list_p
= OMP_CLAUSE_CHAIN (c
);
10314 list_p
= &OMP_CLAUSE_CHAIN (c
);
10317 ctx
->clauses
= *orig_list_p
;
10318 gimplify_omp_ctxp
= ctx
;
10319 if (struct_map_to_clause
)
10320 delete struct_map_to_clause
;
10321 if (struct_deref_set
)
10322 delete struct_deref_set
;
10325 /* Return true if DECL is a candidate for shared to firstprivate
10326 optimization. We only consider non-addressable scalars, not
10327 too big, and not references. */
10330 omp_shared_to_firstprivate_optimizable_decl_p (tree decl
)
10332 if (TREE_ADDRESSABLE (decl
))
10334 tree type
= TREE_TYPE (decl
);
10335 if (!is_gimple_reg_type (type
)
10336 || TREE_CODE (type
) == REFERENCE_TYPE
10337 || TREE_ADDRESSABLE (type
))
10339 /* Don't optimize too large decls, as each thread/task will have
10341 HOST_WIDE_INT len
= int_size_in_bytes (type
);
10342 if (len
== -1 || len
> 4 * POINTER_SIZE
/ BITS_PER_UNIT
)
10344 if (lang_hooks
.decls
.omp_privatize_by_reference (decl
))
10349 /* Helper function of omp_find_stores_op and gimplify_adjust_omp_clauses*.
10350 For omp_shared_to_firstprivate_optimizable_decl_p decl mark it as
10351 GOVD_WRITTEN in outer contexts. */
10354 omp_mark_stores (struct gimplify_omp_ctx
*ctx
, tree decl
)
10356 for (; ctx
; ctx
= ctx
->outer_context
)
10358 splay_tree_node n
= splay_tree_lookup (ctx
->variables
,
10359 (splay_tree_key
) decl
);
10362 else if (n
->value
& GOVD_SHARED
)
10364 n
->value
|= GOVD_WRITTEN
;
10367 else if (n
->value
& GOVD_DATA_SHARE_CLASS
)
10372 /* Helper callback for walk_gimple_seq to discover possible stores
10373 to omp_shared_to_firstprivate_optimizable_decl_p decls and set
10374 GOVD_WRITTEN if they are GOVD_SHARED in some outer context
10378 omp_find_stores_op (tree
*tp
, int *walk_subtrees
, void *data
)
10380 struct walk_stmt_info
*wi
= (struct walk_stmt_info
*) data
;
10382 *walk_subtrees
= 0;
10389 if (handled_component_p (op
))
10390 op
= TREE_OPERAND (op
, 0);
10391 else if ((TREE_CODE (op
) == MEM_REF
|| TREE_CODE (op
) == TARGET_MEM_REF
)
10392 && TREE_CODE (TREE_OPERAND (op
, 0)) == ADDR_EXPR
)
10393 op
= TREE_OPERAND (TREE_OPERAND (op
, 0), 0);
10398 if (!DECL_P (op
) || !omp_shared_to_firstprivate_optimizable_decl_p (op
))
10401 omp_mark_stores (gimplify_omp_ctxp
, op
);
10405 /* Helper callback for walk_gimple_seq to discover possible stores
10406 to omp_shared_to_firstprivate_optimizable_decl_p decls and set
10407 GOVD_WRITTEN if they are GOVD_SHARED in some outer context
10411 omp_find_stores_stmt (gimple_stmt_iterator
*gsi_p
,
10412 bool *handled_ops_p
,
10413 struct walk_stmt_info
*wi
)
10415 gimple
*stmt
= gsi_stmt (*gsi_p
);
10416 switch (gimple_code (stmt
))
10418 /* Don't recurse on OpenMP constructs for which
10419 gimplify_adjust_omp_clauses already handled the bodies,
10420 except handle gimple_omp_for_pre_body. */
10421 case GIMPLE_OMP_FOR
:
10422 *handled_ops_p
= true;
10423 if (gimple_omp_for_pre_body (stmt
))
10424 walk_gimple_seq (gimple_omp_for_pre_body (stmt
),
10425 omp_find_stores_stmt
, omp_find_stores_op
, wi
);
10427 case GIMPLE_OMP_PARALLEL
:
10428 case GIMPLE_OMP_TASK
:
10429 case GIMPLE_OMP_SECTIONS
:
10430 case GIMPLE_OMP_SINGLE
:
10431 case GIMPLE_OMP_TARGET
:
10432 case GIMPLE_OMP_TEAMS
:
10433 case GIMPLE_OMP_CRITICAL
:
10434 *handled_ops_p
= true;
10442 struct gimplify_adjust_omp_clauses_data
10448 /* For all variables that were not actually used within the context,
10449 remove PRIVATE, SHARED, and FIRSTPRIVATE clauses. */
10452 gimplify_adjust_omp_clauses_1 (splay_tree_node n
, void *data
)
10454 tree
*list_p
= ((struct gimplify_adjust_omp_clauses_data
*) data
)->list_p
;
10456 = ((struct gimplify_adjust_omp_clauses_data
*) data
)->pre_p
;
10457 tree decl
= (tree
) n
->key
;
10458 unsigned flags
= n
->value
;
10459 enum omp_clause_code code
;
10461 bool private_debug
;
10463 if (gimplify_omp_ctxp
->region_type
== ORT_COMBINED_PARALLEL
10464 && (flags
& GOVD_LASTPRIVATE_CONDITIONAL
) != 0)
10465 flags
= GOVD_SHARED
| GOVD_SEEN
| GOVD_WRITTEN
;
10466 if (flags
& (GOVD_EXPLICIT
| GOVD_LOCAL
))
10468 if ((flags
& GOVD_SEEN
) == 0)
10470 if ((flags
& GOVD_MAP_HAS_ATTACHMENTS
) != 0)
10472 if (flags
& GOVD_DEBUG_PRIVATE
)
10474 gcc_assert ((flags
& GOVD_DATA_SHARE_CLASS
) == GOVD_SHARED
);
10475 private_debug
= true;
10477 else if (flags
& GOVD_MAP
)
10478 private_debug
= false;
10481 = lang_hooks
.decls
.omp_private_debug_clause (decl
,
10482 !!(flags
& GOVD_SHARED
));
10484 code
= OMP_CLAUSE_PRIVATE
;
10485 else if (flags
& GOVD_MAP
)
10487 code
= OMP_CLAUSE_MAP
;
10488 if ((gimplify_omp_ctxp
->region_type
& ORT_ACC
) == 0
10489 && TYPE_ATOMIC (strip_array_types (TREE_TYPE (decl
))))
10491 error ("%<_Atomic%> %qD in implicit %<map%> clause", decl
);
10495 && DECL_IN_CONSTANT_POOL (decl
)
10496 && !lookup_attribute ("omp declare target",
10497 DECL_ATTRIBUTES (decl
)))
10499 tree id
= get_identifier ("omp declare target");
10500 DECL_ATTRIBUTES (decl
)
10501 = tree_cons (id
, NULL_TREE
, DECL_ATTRIBUTES (decl
));
10502 varpool_node
*node
= varpool_node::get (decl
);
10505 node
->offloadable
= 1;
10506 if (ENABLE_OFFLOADING
)
10507 g
->have_offload
= true;
10511 else if (flags
& GOVD_SHARED
)
10513 if (is_global_var (decl
))
10515 struct gimplify_omp_ctx
*ctx
= gimplify_omp_ctxp
->outer_context
;
10516 while (ctx
!= NULL
)
10519 = splay_tree_lookup (ctx
->variables
, (splay_tree_key
) decl
);
10520 if (on
&& (on
->value
& (GOVD_FIRSTPRIVATE
| GOVD_LASTPRIVATE
10521 | GOVD_PRIVATE
| GOVD_REDUCTION
10522 | GOVD_LINEAR
| GOVD_MAP
)) != 0)
10524 ctx
= ctx
->outer_context
;
10529 code
= OMP_CLAUSE_SHARED
;
10530 /* Don't optimize shared into firstprivate for read-only vars
10531 on tasks with depend clause, we shouldn't try to copy them
10532 until the dependencies are satisfied. */
10533 if (gimplify_omp_ctxp
->has_depend
)
10534 flags
|= GOVD_WRITTEN
;
10536 else if (flags
& GOVD_PRIVATE
)
10537 code
= OMP_CLAUSE_PRIVATE
;
10538 else if (flags
& GOVD_FIRSTPRIVATE
)
10540 code
= OMP_CLAUSE_FIRSTPRIVATE
;
10541 if ((gimplify_omp_ctxp
->region_type
& ORT_TARGET
)
10542 && (gimplify_omp_ctxp
->region_type
& ORT_ACC
) == 0
10543 && TYPE_ATOMIC (strip_array_types (TREE_TYPE (decl
))))
10545 error ("%<_Atomic%> %qD in implicit %<firstprivate%> clause on "
10546 "%<target%> construct", decl
);
10550 else if (flags
& GOVD_LASTPRIVATE
)
10551 code
= OMP_CLAUSE_LASTPRIVATE
;
10552 else if (flags
& (GOVD_ALIGNED
| GOVD_NONTEMPORAL
))
10554 else if (flags
& GOVD_CONDTEMP
)
10556 code
= OMP_CLAUSE__CONDTEMP_
;
10557 gimple_add_tmp_var (decl
);
10560 gcc_unreachable ();
10562 if (((flags
& GOVD_LASTPRIVATE
)
10563 || (code
== OMP_CLAUSE_SHARED
&& (flags
& GOVD_WRITTEN
)))
10564 && omp_shared_to_firstprivate_optimizable_decl_p (decl
))
10565 omp_mark_stores (gimplify_omp_ctxp
->outer_context
, decl
);
10567 tree chain
= *list_p
;
10568 clause
= build_omp_clause (input_location
, code
);
10569 OMP_CLAUSE_DECL (clause
) = decl
;
10570 OMP_CLAUSE_CHAIN (clause
) = chain
;
10572 OMP_CLAUSE_PRIVATE_DEBUG (clause
) = 1;
10573 else if (code
== OMP_CLAUSE_PRIVATE
&& (flags
& GOVD_PRIVATE_OUTER_REF
))
10574 OMP_CLAUSE_PRIVATE_OUTER_REF (clause
) = 1;
10575 else if (code
== OMP_CLAUSE_SHARED
10576 && (flags
& GOVD_WRITTEN
) == 0
10577 && omp_shared_to_firstprivate_optimizable_decl_p (decl
))
10578 OMP_CLAUSE_SHARED_READONLY (clause
) = 1;
10579 else if (code
== OMP_CLAUSE_FIRSTPRIVATE
&& (flags
& GOVD_EXPLICIT
) == 0)
10580 OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (clause
) = 1;
10581 else if (code
== OMP_CLAUSE_MAP
&& (flags
& GOVD_MAP_0LEN_ARRAY
) != 0)
10583 tree nc
= build_omp_clause (input_location
, OMP_CLAUSE_MAP
);
10584 OMP_CLAUSE_DECL (nc
) = decl
;
10585 if (TREE_CODE (TREE_TYPE (decl
)) == REFERENCE_TYPE
10586 && TREE_CODE (TREE_TYPE (TREE_TYPE (decl
))) == POINTER_TYPE
)
10587 OMP_CLAUSE_DECL (clause
)
10588 = build_simple_mem_ref_loc (input_location
, decl
);
10589 OMP_CLAUSE_DECL (clause
)
10590 = build2 (MEM_REF
, char_type_node
, OMP_CLAUSE_DECL (clause
),
10591 build_int_cst (build_pointer_type (char_type_node
), 0));
10592 OMP_CLAUSE_SIZE (clause
) = size_zero_node
;
10593 OMP_CLAUSE_SIZE (nc
) = size_zero_node
;
10594 OMP_CLAUSE_SET_MAP_KIND (clause
, GOMP_MAP_ALLOC
);
10595 OMP_CLAUSE_MAP_MAYBE_ZERO_LENGTH_ARRAY_SECTION (clause
) = 1;
10596 OMP_CLAUSE_SET_MAP_KIND (nc
, GOMP_MAP_FIRSTPRIVATE_POINTER
);
10597 OMP_CLAUSE_CHAIN (nc
) = chain
;
10598 OMP_CLAUSE_CHAIN (clause
) = nc
;
10599 struct gimplify_omp_ctx
*ctx
= gimplify_omp_ctxp
;
10600 gimplify_omp_ctxp
= ctx
->outer_context
;
10601 gimplify_expr (&TREE_OPERAND (OMP_CLAUSE_DECL (clause
), 0),
10602 pre_p
, NULL
, is_gimple_val
, fb_rvalue
);
10603 gimplify_omp_ctxp
= ctx
;
10605 else if (code
== OMP_CLAUSE_MAP
)
10608 /* Not all combinations of these GOVD_MAP flags are actually valid. */
10609 switch (flags
& (GOVD_MAP_TO_ONLY
10611 | GOVD_MAP_FORCE_PRESENT
10612 | GOVD_MAP_ALLOC_ONLY
10613 | GOVD_MAP_FROM_ONLY
))
10616 kind
= GOMP_MAP_TOFROM
;
10618 case GOVD_MAP_FORCE
:
10619 kind
= GOMP_MAP_TOFROM
| GOMP_MAP_FLAG_FORCE
;
10621 case GOVD_MAP_TO_ONLY
:
10622 kind
= GOMP_MAP_TO
;
10624 case GOVD_MAP_FROM_ONLY
:
10625 kind
= GOMP_MAP_FROM
;
10627 case GOVD_MAP_ALLOC_ONLY
:
10628 kind
= GOMP_MAP_ALLOC
;
10630 case GOVD_MAP_TO_ONLY
| GOVD_MAP_FORCE
:
10631 kind
= GOMP_MAP_TO
| GOMP_MAP_FLAG_FORCE
;
10633 case GOVD_MAP_FORCE_PRESENT
:
10634 kind
= GOMP_MAP_FORCE_PRESENT
;
10637 gcc_unreachable ();
10639 OMP_CLAUSE_SET_MAP_KIND (clause
, kind
);
10640 if (DECL_SIZE (decl
)
10641 && TREE_CODE (DECL_SIZE (decl
)) != INTEGER_CST
)
10643 tree decl2
= DECL_VALUE_EXPR (decl
);
10644 gcc_assert (TREE_CODE (decl2
) == INDIRECT_REF
);
10645 decl2
= TREE_OPERAND (decl2
, 0);
10646 gcc_assert (DECL_P (decl2
));
10647 tree mem
= build_simple_mem_ref (decl2
);
10648 OMP_CLAUSE_DECL (clause
) = mem
;
10649 OMP_CLAUSE_SIZE (clause
) = TYPE_SIZE_UNIT (TREE_TYPE (decl
));
10650 if (gimplify_omp_ctxp
->outer_context
)
10652 struct gimplify_omp_ctx
*ctx
= gimplify_omp_ctxp
->outer_context
;
10653 omp_notice_variable (ctx
, decl2
, true);
10654 omp_notice_variable (ctx
, OMP_CLAUSE_SIZE (clause
), true);
10656 tree nc
= build_omp_clause (OMP_CLAUSE_LOCATION (clause
),
10658 OMP_CLAUSE_DECL (nc
) = decl
;
10659 OMP_CLAUSE_SIZE (nc
) = size_zero_node
;
10660 if (gimplify_omp_ctxp
->target_firstprivatize_array_bases
)
10661 OMP_CLAUSE_SET_MAP_KIND (nc
, GOMP_MAP_FIRSTPRIVATE_POINTER
);
10663 OMP_CLAUSE_SET_MAP_KIND (nc
, GOMP_MAP_POINTER
);
10664 OMP_CLAUSE_CHAIN (nc
) = OMP_CLAUSE_CHAIN (clause
);
10665 OMP_CLAUSE_CHAIN (clause
) = nc
;
10667 else if (gimplify_omp_ctxp
->target_firstprivatize_array_bases
10668 && lang_hooks
.decls
.omp_privatize_by_reference (decl
))
10670 OMP_CLAUSE_DECL (clause
) = build_simple_mem_ref (decl
);
10671 OMP_CLAUSE_SIZE (clause
)
10672 = unshare_expr (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl
))));
10673 struct gimplify_omp_ctx
*ctx
= gimplify_omp_ctxp
;
10674 gimplify_omp_ctxp
= ctx
->outer_context
;
10675 gimplify_expr (&OMP_CLAUSE_SIZE (clause
),
10676 pre_p
, NULL
, is_gimple_val
, fb_rvalue
);
10677 gimplify_omp_ctxp
= ctx
;
10678 tree nc
= build_omp_clause (OMP_CLAUSE_LOCATION (clause
),
10680 OMP_CLAUSE_DECL (nc
) = decl
;
10681 OMP_CLAUSE_SIZE (nc
) = size_zero_node
;
10682 OMP_CLAUSE_SET_MAP_KIND (nc
, GOMP_MAP_FIRSTPRIVATE_REFERENCE
);
10683 OMP_CLAUSE_CHAIN (nc
) = OMP_CLAUSE_CHAIN (clause
);
10684 OMP_CLAUSE_CHAIN (clause
) = nc
;
10687 OMP_CLAUSE_SIZE (clause
) = DECL_SIZE_UNIT (decl
);
10689 if (code
== OMP_CLAUSE_FIRSTPRIVATE
&& (flags
& GOVD_LASTPRIVATE
) != 0)
10691 tree nc
= build_omp_clause (input_location
, OMP_CLAUSE_LASTPRIVATE
);
10692 OMP_CLAUSE_DECL (nc
) = decl
;
10693 OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (nc
) = 1;
10694 OMP_CLAUSE_CHAIN (nc
) = chain
;
10695 OMP_CLAUSE_CHAIN (clause
) = nc
;
10696 struct gimplify_omp_ctx
*ctx
= gimplify_omp_ctxp
;
10697 gimplify_omp_ctxp
= ctx
->outer_context
;
10698 lang_hooks
.decls
.omp_finish_clause (nc
, pre_p
,
10699 (ctx
->region_type
& ORT_ACC
) != 0);
10700 gimplify_omp_ctxp
= ctx
;
10703 struct gimplify_omp_ctx
*ctx
= gimplify_omp_ctxp
;
10704 gimplify_omp_ctxp
= ctx
->outer_context
;
10705 lang_hooks
.decls
.omp_finish_clause (clause
, pre_p
,
10706 (ctx
->region_type
& ORT_ACC
) != 0);
10707 if (gimplify_omp_ctxp
)
10708 for (; clause
!= chain
; clause
= OMP_CLAUSE_CHAIN (clause
))
10709 if (OMP_CLAUSE_CODE (clause
) == OMP_CLAUSE_MAP
10710 && DECL_P (OMP_CLAUSE_SIZE (clause
)))
10711 omp_notice_variable (gimplify_omp_ctxp
, OMP_CLAUSE_SIZE (clause
),
10713 gimplify_omp_ctxp
= ctx
;
10718 gimplify_adjust_omp_clauses (gimple_seq
*pre_p
, gimple_seq body
, tree
*list_p
,
10719 enum tree_code code
)
10721 struct gimplify_omp_ctx
*ctx
= gimplify_omp_ctxp
;
10722 tree
*orig_list_p
= list_p
;
10724 bool has_inscan_reductions
= false;
10728 struct gimplify_omp_ctx
*octx
;
10729 for (octx
= ctx
; octx
; octx
= octx
->outer_context
)
10730 if ((octx
->region_type
& (ORT_PARALLEL
| ORT_TASK
| ORT_TEAMS
)) != 0)
10734 struct walk_stmt_info wi
;
10735 memset (&wi
, 0, sizeof (wi
));
10736 walk_gimple_seq (body
, omp_find_stores_stmt
,
10737 omp_find_stores_op
, &wi
);
10741 if (ctx
->add_safelen1
)
10743 /* If there are VLAs in the body of simd loop, prevent
10745 gcc_assert (ctx
->region_type
== ORT_SIMD
);
10746 c
= build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE_SAFELEN
);
10747 OMP_CLAUSE_SAFELEN_EXPR (c
) = integer_one_node
;
10748 OMP_CLAUSE_CHAIN (c
) = *list_p
;
10750 list_p
= &OMP_CLAUSE_CHAIN (c
);
10753 if (ctx
->region_type
== ORT_WORKSHARE
10754 && ctx
->outer_context
10755 && ctx
->outer_context
->region_type
== ORT_COMBINED_PARALLEL
)
10757 for (c
= ctx
->outer_context
->clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
10758 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
10759 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c
))
10761 decl
= OMP_CLAUSE_DECL (c
);
10763 = splay_tree_lookup (ctx
->outer_context
->variables
,
10764 (splay_tree_key
) decl
);
10765 gcc_checking_assert (!splay_tree_lookup (ctx
->variables
,
10766 (splay_tree_key
) decl
));
10767 omp_add_variable (ctx
, decl
, n
->value
);
10768 tree c2
= copy_node (c
);
10769 OMP_CLAUSE_CHAIN (c2
) = *list_p
;
10771 if ((n
->value
& GOVD_FIRSTPRIVATE
) == 0)
10773 c2
= build_omp_clause (OMP_CLAUSE_LOCATION (c
),
10774 OMP_CLAUSE_FIRSTPRIVATE
);
10775 OMP_CLAUSE_DECL (c2
) = decl
;
10776 OMP_CLAUSE_CHAIN (c2
) = *list_p
;
10780 while ((c
= *list_p
) != NULL
)
10783 bool remove
= false;
10785 switch (OMP_CLAUSE_CODE (c
))
10787 case OMP_CLAUSE_FIRSTPRIVATE
:
10788 if ((ctx
->region_type
& ORT_TARGET
)
10789 && (ctx
->region_type
& ORT_ACC
) == 0
10790 && TYPE_ATOMIC (strip_array_types
10791 (TREE_TYPE (OMP_CLAUSE_DECL (c
)))))
10793 error_at (OMP_CLAUSE_LOCATION (c
),
10794 "%<_Atomic%> %qD in %<firstprivate%> clause on "
10795 "%<target%> construct", OMP_CLAUSE_DECL (c
));
10799 if (OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c
))
10801 decl
= OMP_CLAUSE_DECL (c
);
10802 n
= splay_tree_lookup (ctx
->variables
, (splay_tree_key
) decl
);
10803 if ((n
->value
& GOVD_MAP
) != 0)
10808 OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT_TARGET (c
) = 0;
10809 OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c
) = 0;
10812 case OMP_CLAUSE_PRIVATE
:
10813 case OMP_CLAUSE_SHARED
:
10814 case OMP_CLAUSE_LINEAR
:
10815 decl
= OMP_CLAUSE_DECL (c
);
10816 n
= splay_tree_lookup (ctx
->variables
, (splay_tree_key
) decl
);
10817 remove
= !(n
->value
& GOVD_SEEN
);
10818 if ((n
->value
& GOVD_LASTPRIVATE_CONDITIONAL
) != 0
10819 && code
== OMP_PARALLEL
10820 && OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_FIRSTPRIVATE
)
10824 bool shared
= OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_SHARED
;
10825 if ((n
->value
& GOVD_DEBUG_PRIVATE
)
10826 || lang_hooks
.decls
.omp_private_debug_clause (decl
, shared
))
10828 gcc_assert ((n
->value
& GOVD_DEBUG_PRIVATE
) == 0
10829 || ((n
->value
& GOVD_DATA_SHARE_CLASS
)
10831 OMP_CLAUSE_SET_CODE (c
, OMP_CLAUSE_PRIVATE
);
10832 OMP_CLAUSE_PRIVATE_DEBUG (c
) = 1;
10834 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_SHARED
10837 n
->value
|= GOVD_WRITTEN
;
10838 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_SHARED
10839 && (n
->value
& GOVD_WRITTEN
) == 0
10841 && omp_shared_to_firstprivate_optimizable_decl_p (decl
))
10842 OMP_CLAUSE_SHARED_READONLY (c
) = 1;
10843 else if (DECL_P (decl
)
10844 && ((OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_SHARED
10845 && (n
->value
& GOVD_WRITTEN
) != 0)
10846 || (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LINEAR
10847 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c
)))
10848 && omp_shared_to_firstprivate_optimizable_decl_p (decl
))
10849 omp_mark_stores (gimplify_omp_ctxp
->outer_context
, decl
);
10852 n
->value
&= ~GOVD_EXPLICIT
;
10855 case OMP_CLAUSE_LASTPRIVATE
:
10856 /* Make sure OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE is set to
10857 accurately reflect the presence of a FIRSTPRIVATE clause. */
10858 decl
= OMP_CLAUSE_DECL (c
);
10859 n
= splay_tree_lookup (ctx
->variables
, (splay_tree_key
) decl
);
10860 OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c
)
10861 = (n
->value
& GOVD_FIRSTPRIVATE
) != 0;
10862 if (code
== OMP_DISTRIBUTE
10863 && OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c
))
10866 error_at (OMP_CLAUSE_LOCATION (c
),
10867 "same variable used in %<firstprivate%> and "
10868 "%<lastprivate%> clauses on %<distribute%> "
10872 && OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
10874 && omp_shared_to_firstprivate_optimizable_decl_p (decl
))
10875 omp_mark_stores (gimplify_omp_ctxp
->outer_context
, decl
);
10876 if (OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c
) && code
== OMP_PARALLEL
)
10880 case OMP_CLAUSE_ALIGNED
:
10881 decl
= OMP_CLAUSE_DECL (c
);
10882 if (!is_global_var (decl
))
10884 n
= splay_tree_lookup (ctx
->variables
, (splay_tree_key
) decl
);
10885 remove
= n
== NULL
|| !(n
->value
& GOVD_SEEN
);
10886 if (!remove
&& TREE_CODE (TREE_TYPE (decl
)) == POINTER_TYPE
)
10888 struct gimplify_omp_ctx
*octx
;
10890 && (n
->value
& (GOVD_DATA_SHARE_CLASS
10891 & ~GOVD_FIRSTPRIVATE
)))
10894 for (octx
= ctx
->outer_context
; octx
;
10895 octx
= octx
->outer_context
)
10897 n
= splay_tree_lookup (octx
->variables
,
10898 (splay_tree_key
) decl
);
10901 if (n
->value
& GOVD_LOCAL
)
10903 /* We have to avoid assigning a shared variable
10904 to itself when trying to add
10905 __builtin_assume_aligned. */
10906 if (n
->value
& GOVD_SHARED
)
10914 else if (TREE_CODE (TREE_TYPE (decl
)) == ARRAY_TYPE
)
10916 n
= splay_tree_lookup (ctx
->variables
, (splay_tree_key
) decl
);
10917 if (n
!= NULL
&& (n
->value
& GOVD_DATA_SHARE_CLASS
) != 0)
10922 case OMP_CLAUSE_NONTEMPORAL
:
10923 decl
= OMP_CLAUSE_DECL (c
);
10924 n
= splay_tree_lookup (ctx
->variables
, (splay_tree_key
) decl
);
10925 remove
= n
== NULL
|| !(n
->value
& GOVD_SEEN
);
10928 case OMP_CLAUSE_MAP
:
10929 if (code
== OMP_TARGET_EXIT_DATA
10930 && OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_ALWAYS_POINTER
)
10935 decl
= OMP_CLAUSE_DECL (c
);
10936 /* Data clauses associated with reductions must be
10937 compatible with present_or_copy. Warn and adjust the clause
10938 if that is not the case. */
10939 if (ctx
->region_type
== ORT_ACC_PARALLEL
10940 || ctx
->region_type
== ORT_ACC_SERIAL
)
10942 tree t
= DECL_P (decl
) ? decl
: TREE_OPERAND (decl
, 0);
10946 n
= splay_tree_lookup (ctx
->variables
, (splay_tree_key
) t
);
10948 if (n
&& (n
->value
& GOVD_REDUCTION
))
10950 enum gomp_map_kind kind
= OMP_CLAUSE_MAP_KIND (c
);
10952 OMP_CLAUSE_MAP_IN_REDUCTION (c
) = 1;
10953 if ((kind
& GOMP_MAP_TOFROM
) != GOMP_MAP_TOFROM
10954 && kind
!= GOMP_MAP_FORCE_PRESENT
10955 && kind
!= GOMP_MAP_POINTER
)
10957 warning_at (OMP_CLAUSE_LOCATION (c
), 0,
10958 "incompatible data clause with reduction "
10959 "on %qE; promoting to %<present_or_copy%>",
10961 OMP_CLAUSE_SET_MAP_KIND (c
, GOMP_MAP_TOFROM
);
10965 if (!DECL_P (decl
))
10967 if ((ctx
->region_type
& ORT_TARGET
) != 0
10968 && OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_FIRSTPRIVATE_POINTER
)
10970 if (TREE_CODE (decl
) == INDIRECT_REF
10971 && TREE_CODE (TREE_OPERAND (decl
, 0)) == COMPONENT_REF
10972 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (decl
, 0)))
10973 == REFERENCE_TYPE
))
10974 decl
= TREE_OPERAND (decl
, 0);
10975 if (TREE_CODE (decl
) == COMPONENT_REF
)
10977 while (TREE_CODE (decl
) == COMPONENT_REF
)
10978 decl
= TREE_OPERAND (decl
, 0);
10981 n
= splay_tree_lookup (ctx
->variables
,
10982 (splay_tree_key
) decl
);
10983 if (!(n
->value
& GOVD_SEEN
))
10990 n
= splay_tree_lookup (ctx
->variables
, (splay_tree_key
) decl
);
10991 if ((ctx
->region_type
& ORT_TARGET
) != 0
10992 && !(n
->value
& GOVD_SEEN
)
10993 && GOMP_MAP_ALWAYS_P (OMP_CLAUSE_MAP_KIND (c
)) == 0
10994 && (!is_global_var (decl
)
10995 || !lookup_attribute ("omp declare target link",
10996 DECL_ATTRIBUTES (decl
))))
10999 /* For struct element mapping, if struct is never referenced
11000 in target block and none of the mapping has always modifier,
11001 remove all the struct element mappings, which immediately
11002 follow the GOMP_MAP_STRUCT map clause. */
11003 if (OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_STRUCT
)
11005 HOST_WIDE_INT cnt
= tree_to_shwi (OMP_CLAUSE_SIZE (c
));
11007 OMP_CLAUSE_CHAIN (c
)
11008 = OMP_CLAUSE_CHAIN (OMP_CLAUSE_CHAIN (c
));
11011 else if (OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_STRUCT
11012 && (code
== OMP_TARGET_EXIT_DATA
11013 || code
== OACC_EXIT_DATA
))
11015 else if (DECL_SIZE (decl
)
11016 && TREE_CODE (DECL_SIZE (decl
)) != INTEGER_CST
11017 && OMP_CLAUSE_MAP_KIND (c
) != GOMP_MAP_POINTER
11018 && OMP_CLAUSE_MAP_KIND (c
) != GOMP_MAP_FIRSTPRIVATE_POINTER
11019 && (OMP_CLAUSE_MAP_KIND (c
)
11020 != GOMP_MAP_FIRSTPRIVATE_REFERENCE
))
11022 /* For GOMP_MAP_FORCE_DEVICEPTR, we'll never enter here, because
11023 for these, TREE_CODE (DECL_SIZE (decl)) will always be
11025 gcc_assert (OMP_CLAUSE_MAP_KIND (c
) != GOMP_MAP_FORCE_DEVICEPTR
);
11027 tree decl2
= DECL_VALUE_EXPR (decl
);
11028 gcc_assert (TREE_CODE (decl2
) == INDIRECT_REF
);
11029 decl2
= TREE_OPERAND (decl2
, 0);
11030 gcc_assert (DECL_P (decl2
));
11031 tree mem
= build_simple_mem_ref (decl2
);
11032 OMP_CLAUSE_DECL (c
) = mem
;
11033 OMP_CLAUSE_SIZE (c
) = TYPE_SIZE_UNIT (TREE_TYPE (decl
));
11034 if (ctx
->outer_context
)
11036 omp_notice_variable (ctx
->outer_context
, decl2
, true);
11037 omp_notice_variable (ctx
->outer_context
,
11038 OMP_CLAUSE_SIZE (c
), true);
11040 if (((ctx
->region_type
& ORT_TARGET
) != 0
11041 || !ctx
->target_firstprivatize_array_bases
)
11042 && ((n
->value
& GOVD_SEEN
) == 0
11043 || (n
->value
& (GOVD_PRIVATE
| GOVD_FIRSTPRIVATE
)) == 0))
11045 tree nc
= build_omp_clause (OMP_CLAUSE_LOCATION (c
),
11047 OMP_CLAUSE_DECL (nc
) = decl
;
11048 OMP_CLAUSE_SIZE (nc
) = size_zero_node
;
11049 if (ctx
->target_firstprivatize_array_bases
)
11050 OMP_CLAUSE_SET_MAP_KIND (nc
,
11051 GOMP_MAP_FIRSTPRIVATE_POINTER
);
11053 OMP_CLAUSE_SET_MAP_KIND (nc
, GOMP_MAP_POINTER
);
11054 OMP_CLAUSE_CHAIN (nc
) = OMP_CLAUSE_CHAIN (c
);
11055 OMP_CLAUSE_CHAIN (c
) = nc
;
11061 if (OMP_CLAUSE_SIZE (c
) == NULL_TREE
)
11062 OMP_CLAUSE_SIZE (c
) = DECL_SIZE_UNIT (decl
);
11063 gcc_assert ((n
->value
& GOVD_SEEN
) == 0
11064 || ((n
->value
& (GOVD_PRIVATE
| GOVD_FIRSTPRIVATE
))
11069 case OMP_CLAUSE_TO
:
11070 case OMP_CLAUSE_FROM
:
11071 case OMP_CLAUSE__CACHE_
:
11072 decl
= OMP_CLAUSE_DECL (c
);
11073 if (!DECL_P (decl
))
11075 if (DECL_SIZE (decl
)
11076 && TREE_CODE (DECL_SIZE (decl
)) != INTEGER_CST
)
11078 tree decl2
= DECL_VALUE_EXPR (decl
);
11079 gcc_assert (TREE_CODE (decl2
) == INDIRECT_REF
);
11080 decl2
= TREE_OPERAND (decl2
, 0);
11081 gcc_assert (DECL_P (decl2
));
11082 tree mem
= build_simple_mem_ref (decl2
);
11083 OMP_CLAUSE_DECL (c
) = mem
;
11084 OMP_CLAUSE_SIZE (c
) = TYPE_SIZE_UNIT (TREE_TYPE (decl
));
11085 if (ctx
->outer_context
)
11087 omp_notice_variable (ctx
->outer_context
, decl2
, true);
11088 omp_notice_variable (ctx
->outer_context
,
11089 OMP_CLAUSE_SIZE (c
), true);
11092 else if (OMP_CLAUSE_SIZE (c
) == NULL_TREE
)
11093 OMP_CLAUSE_SIZE (c
) = DECL_SIZE_UNIT (decl
);
11096 case OMP_CLAUSE_REDUCTION
:
11097 if (OMP_CLAUSE_REDUCTION_INSCAN (c
))
11099 decl
= OMP_CLAUSE_DECL (c
);
11100 n
= splay_tree_lookup (ctx
->variables
, (splay_tree_key
) decl
);
11101 if ((n
->value
& GOVD_REDUCTION_INSCAN
) == 0)
11104 error_at (OMP_CLAUSE_LOCATION (c
),
11105 "%qD specified in %<inscan%> %<reduction%> clause "
11106 "but not in %<scan%> directive clause", decl
);
11109 has_inscan_reductions
= true;
11112 case OMP_CLAUSE_IN_REDUCTION
:
11113 case OMP_CLAUSE_TASK_REDUCTION
:
11114 decl
= OMP_CLAUSE_DECL (c
);
11115 /* OpenACC reductions need a present_or_copy data clause.
11116 Add one if necessary. Emit error when the reduction is private. */
11117 if (ctx
->region_type
== ORT_ACC_PARALLEL
11118 || ctx
->region_type
== ORT_ACC_SERIAL
)
11120 n
= splay_tree_lookup (ctx
->variables
, (splay_tree_key
) decl
);
11121 if (n
->value
& (GOVD_PRIVATE
| GOVD_FIRSTPRIVATE
))
11124 error_at (OMP_CLAUSE_LOCATION (c
), "invalid private "
11125 "reduction on %qE", DECL_NAME (decl
));
11127 else if ((n
->value
& GOVD_MAP
) == 0)
11129 tree next
= OMP_CLAUSE_CHAIN (c
);
11130 tree nc
= build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE_MAP
);
11131 OMP_CLAUSE_SET_MAP_KIND (nc
, GOMP_MAP_TOFROM
);
11132 OMP_CLAUSE_DECL (nc
) = decl
;
11133 OMP_CLAUSE_CHAIN (c
) = nc
;
11134 lang_hooks
.decls
.omp_finish_clause (nc
, pre_p
,
11139 OMP_CLAUSE_MAP_IN_REDUCTION (nc
) = 1;
11140 if (OMP_CLAUSE_CHAIN (nc
) == NULL
)
11142 nc
= OMP_CLAUSE_CHAIN (nc
);
11144 OMP_CLAUSE_CHAIN (nc
) = next
;
11145 n
->value
|= GOVD_MAP
;
11149 && omp_shared_to_firstprivate_optimizable_decl_p (decl
))
11150 omp_mark_stores (gimplify_omp_ctxp
->outer_context
, decl
);
11153 case OMP_CLAUSE_ALLOCATE
:
11154 decl
= OMP_CLAUSE_DECL (c
);
11155 n
= splay_tree_lookup (ctx
->variables
, (splay_tree_key
) decl
);
11156 if (n
!= NULL
&& !(n
->value
& GOVD_SEEN
))
11158 if ((n
->value
& (GOVD_PRIVATE
| GOVD_FIRSTPRIVATE
| GOVD_LINEAR
))
11160 && (n
->value
& (GOVD_REDUCTION
| GOVD_LASTPRIVATE
)) == 0)
11164 && OMP_CLAUSE_ALLOCATE_ALLOCATOR (c
)
11165 && TREE_CODE (OMP_CLAUSE_ALLOCATE_ALLOCATOR (c
)) != INTEGER_CST
11166 && ((ctx
->region_type
& (ORT_PARALLEL
| ORT_TARGET
)) != 0
11167 || (ctx
->region_type
& ORT_TASKLOOP
) == ORT_TASK
11168 || (ctx
->region_type
& ORT_HOST_TEAMS
) == ORT_HOST_TEAMS
))
11170 tree allocator
= OMP_CLAUSE_ALLOCATE_ALLOCATOR (c
);
11171 n
= splay_tree_lookup (ctx
->variables
, (splay_tree_key
) allocator
);
11174 enum omp_clause_default_kind default_kind
11175 = ctx
->default_kind
;
11176 ctx
->default_kind
= OMP_CLAUSE_DEFAULT_FIRSTPRIVATE
;
11177 omp_notice_variable (ctx
, OMP_CLAUSE_ALLOCATE_ALLOCATOR (c
),
11179 ctx
->default_kind
= default_kind
;
11182 omp_notice_variable (ctx
, OMP_CLAUSE_ALLOCATE_ALLOCATOR (c
),
11187 case OMP_CLAUSE_COPYIN
:
11188 case OMP_CLAUSE_COPYPRIVATE
:
11189 case OMP_CLAUSE_IF
:
11190 case OMP_CLAUSE_NUM_THREADS
:
11191 case OMP_CLAUSE_NUM_TEAMS
:
11192 case OMP_CLAUSE_THREAD_LIMIT
:
11193 case OMP_CLAUSE_DIST_SCHEDULE
:
11194 case OMP_CLAUSE_DEVICE
:
11195 case OMP_CLAUSE_SCHEDULE
:
11196 case OMP_CLAUSE_NOWAIT
:
11197 case OMP_CLAUSE_ORDERED
:
11198 case OMP_CLAUSE_DEFAULT
:
11199 case OMP_CLAUSE_UNTIED
:
11200 case OMP_CLAUSE_COLLAPSE
:
11201 case OMP_CLAUSE_FINAL
:
11202 case OMP_CLAUSE_MERGEABLE
:
11203 case OMP_CLAUSE_PROC_BIND
:
11204 case OMP_CLAUSE_SAFELEN
:
11205 case OMP_CLAUSE_SIMDLEN
:
11206 case OMP_CLAUSE_DEPEND
:
11207 case OMP_CLAUSE_PRIORITY
:
11208 case OMP_CLAUSE_GRAINSIZE
:
11209 case OMP_CLAUSE_NUM_TASKS
:
11210 case OMP_CLAUSE_NOGROUP
:
11211 case OMP_CLAUSE_THREADS
:
11212 case OMP_CLAUSE_SIMD
:
11213 case OMP_CLAUSE_HINT
:
11214 case OMP_CLAUSE_DEFAULTMAP
:
11215 case OMP_CLAUSE_ORDER
:
11216 case OMP_CLAUSE_BIND
:
11217 case OMP_CLAUSE_DETACH
:
11218 case OMP_CLAUSE_USE_DEVICE_PTR
:
11219 case OMP_CLAUSE_USE_DEVICE_ADDR
:
11220 case OMP_CLAUSE_IS_DEVICE_PTR
:
11221 case OMP_CLAUSE_ASYNC
:
11222 case OMP_CLAUSE_WAIT
:
11223 case OMP_CLAUSE_INDEPENDENT
:
11224 case OMP_CLAUSE_NUM_GANGS
:
11225 case OMP_CLAUSE_NUM_WORKERS
:
11226 case OMP_CLAUSE_VECTOR_LENGTH
:
11227 case OMP_CLAUSE_GANG
:
11228 case OMP_CLAUSE_WORKER
:
11229 case OMP_CLAUSE_VECTOR
:
11230 case OMP_CLAUSE_AUTO
:
11231 case OMP_CLAUSE_SEQ
:
11232 case OMP_CLAUSE_TILE
:
11233 case OMP_CLAUSE_IF_PRESENT
:
11234 case OMP_CLAUSE_FINALIZE
:
11235 case OMP_CLAUSE_INCLUSIVE
:
11236 case OMP_CLAUSE_EXCLUSIVE
:
11240 gcc_unreachable ();
11244 *list_p
= OMP_CLAUSE_CHAIN (c
);
11246 list_p
= &OMP_CLAUSE_CHAIN (c
);
11249 /* Add in any implicit data sharing. */
11250 struct gimplify_adjust_omp_clauses_data data
;
11251 data
.list_p
= list_p
;
11252 data
.pre_p
= pre_p
;
11253 splay_tree_foreach (ctx
->variables
, gimplify_adjust_omp_clauses_1
, &data
);
11255 if (has_inscan_reductions
)
11256 for (c
= *orig_list_p
; c
; c
= OMP_CLAUSE_CHAIN (c
))
11257 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LINEAR
11258 && !OMP_CLAUSE_LINEAR_NO_COPYIN (c
))
11260 error_at (OMP_CLAUSE_LOCATION (c
),
11261 "%<inscan%> %<reduction%> clause used together with "
11262 "%<linear%> clause for a variable other than loop "
11267 gimplify_omp_ctxp
= ctx
->outer_context
;
11268 delete_omp_context (ctx
);
11271 /* Return 0 if CONSTRUCTS selectors don't match the OpenMP context,
11272 -1 if unknown yet (simd is involved, won't be known until vectorization)
11273 and 1 if they do. If SCORES is non-NULL, it should point to an array
11274 of at least 2*NCONSTRUCTS+2 ints, and will be filled with the positions
11275 of the CONSTRUCTS (position -1 if it will never match) followed by
11276 number of constructs in the OpenMP context construct trait. If the
11277 score depends on whether it will be in a declare simd clone or not,
11278 the function returns 2 and there will be two sets of the scores, the first
11279 one for the case that it is not in a declare simd clone, the other
11280 that it is in a declare simd clone. */
11283 omp_construct_selector_matches (enum tree_code
*constructs
, int nconstructs
,
11286 int matched
= 0, cnt
= 0;
11287 bool simd_seen
= false;
11288 bool target_seen
= false;
11289 int declare_simd_cnt
= -1;
11290 auto_vec
<enum tree_code
, 16> codes
;
11291 for (struct gimplify_omp_ctx
*ctx
= gimplify_omp_ctxp
; ctx
;)
11293 if (((ctx
->region_type
& ORT_PARALLEL
) && ctx
->code
== OMP_PARALLEL
)
11294 || ((ctx
->region_type
& (ORT_TARGET
| ORT_IMPLICIT_TARGET
| ORT_ACC
))
11295 == ORT_TARGET
&& ctx
->code
== OMP_TARGET
)
11296 || ((ctx
->region_type
& ORT_TEAMS
) && ctx
->code
== OMP_TEAMS
)
11297 || (ctx
->region_type
== ORT_WORKSHARE
&& ctx
->code
== OMP_FOR
)
11298 || (ctx
->region_type
== ORT_SIMD
11299 && ctx
->code
== OMP_SIMD
11300 && !omp_find_clause (ctx
->clauses
, OMP_CLAUSE_BIND
)))
11304 codes
.safe_push (ctx
->code
);
11305 else if (matched
< nconstructs
&& ctx
->code
== constructs
[matched
])
11307 if (ctx
->code
== OMP_SIMD
)
11315 if (ctx
->code
== OMP_TARGET
)
11317 if (scores
== NULL
)
11318 return matched
< nconstructs
? 0 : simd_seen
? -1 : 1;
11319 target_seen
= true;
11323 else if (ctx
->region_type
== ORT_WORKSHARE
11324 && ctx
->code
== OMP_LOOP
11325 && ctx
->outer_context
11326 && ctx
->outer_context
->region_type
== ORT_COMBINED_PARALLEL
11327 && ctx
->outer_context
->outer_context
11328 && ctx
->outer_context
->outer_context
->code
== OMP_LOOP
11329 && ctx
->outer_context
->outer_context
->distribute
)
11330 ctx
= ctx
->outer_context
->outer_context
;
11331 ctx
= ctx
->outer_context
;
11334 && lookup_attribute ("omp declare simd",
11335 DECL_ATTRIBUTES (current_function_decl
)))
11337 /* Declare simd is a maybe case, it is supposed to be added only to the
11338 omp-simd-clone.c added clones and not to the base function. */
11339 declare_simd_cnt
= cnt
++;
11341 codes
.safe_push (OMP_SIMD
);
11343 && constructs
[0] == OMP_SIMD
)
11345 gcc_assert (matched
== 0);
11347 if (++matched
== nconstructs
)
11351 if (tree attr
= lookup_attribute ("omp declare variant variant",
11352 DECL_ATTRIBUTES (current_function_decl
)))
11354 enum tree_code variant_constructs
[5];
11355 int variant_nconstructs
= 0;
11357 variant_nconstructs
11358 = omp_constructor_traits_to_codes (TREE_VALUE (attr
),
11359 variant_constructs
);
11360 for (int i
= 0; i
< variant_nconstructs
; i
++)
11364 codes
.safe_push (variant_constructs
[i
]);
11365 else if (matched
< nconstructs
11366 && variant_constructs
[i
] == constructs
[matched
])
11368 if (variant_constructs
[i
] == OMP_SIMD
)
11379 && lookup_attribute ("omp declare target block",
11380 DECL_ATTRIBUTES (current_function_decl
)))
11383 codes
.safe_push (OMP_TARGET
);
11384 else if (matched
< nconstructs
&& constructs
[matched
] == OMP_TARGET
)
11389 for (int pass
= 0; pass
< (declare_simd_cnt
== -1 ? 1 : 2); pass
++)
11391 int j
= codes
.length () - 1;
11392 for (int i
= nconstructs
- 1; i
>= 0; i
--)
11395 && (pass
!= 0 || declare_simd_cnt
!= j
)
11396 && constructs
[i
] != codes
[j
])
11398 if (pass
== 0 && declare_simd_cnt
!= -1 && j
> declare_simd_cnt
)
11403 *scores
++ = ((pass
== 0 && declare_simd_cnt
!= -1)
11404 ? codes
.length () - 1 : codes
.length ());
11406 return declare_simd_cnt
== -1 ? 1 : 2;
11408 if (matched
== nconstructs
)
11409 return simd_seen
? -1 : 1;
11413 /* Gimplify OACC_CACHE. */
11416 gimplify_oacc_cache (tree
*expr_p
, gimple_seq
*pre_p
)
11418 tree expr
= *expr_p
;
11420 gimplify_scan_omp_clauses (&OACC_CACHE_CLAUSES (expr
), pre_p
, ORT_ACC
,
11422 gimplify_adjust_omp_clauses (pre_p
, NULL
, &OACC_CACHE_CLAUSES (expr
),
11425 /* TODO: Do something sensible with this information. */
11427 *expr_p
= NULL_TREE
;
11430 /* Helper function of gimplify_oacc_declare. The helper's purpose is to,
11431 if required, translate 'kind' in CLAUSE into an 'entry' kind and 'exit'
11432 kind. The entry kind will replace the one in CLAUSE, while the exit
11433 kind will be used in a new omp_clause and returned to the caller. */
11436 gimplify_oacc_declare_1 (tree clause
)
11438 HOST_WIDE_INT kind
, new_op
;
11442 kind
= OMP_CLAUSE_MAP_KIND (clause
);
11446 case GOMP_MAP_ALLOC
:
11447 new_op
= GOMP_MAP_RELEASE
;
11451 case GOMP_MAP_FROM
:
11452 OMP_CLAUSE_SET_MAP_KIND (clause
, GOMP_MAP_FORCE_ALLOC
);
11453 new_op
= GOMP_MAP_FROM
;
11457 case GOMP_MAP_TOFROM
:
11458 OMP_CLAUSE_SET_MAP_KIND (clause
, GOMP_MAP_TO
);
11459 new_op
= GOMP_MAP_FROM
;
11463 case GOMP_MAP_DEVICE_RESIDENT
:
11464 case GOMP_MAP_FORCE_DEVICEPTR
:
11465 case GOMP_MAP_FORCE_PRESENT
:
11466 case GOMP_MAP_LINK
:
11467 case GOMP_MAP_POINTER
:
11472 gcc_unreachable ();
11478 c
= build_omp_clause (OMP_CLAUSE_LOCATION (clause
), OMP_CLAUSE_MAP
);
11479 OMP_CLAUSE_SET_MAP_KIND (c
, new_op
);
11480 OMP_CLAUSE_DECL (c
) = OMP_CLAUSE_DECL (clause
);
11486 /* Gimplify OACC_DECLARE. */
11489 gimplify_oacc_declare (tree
*expr_p
, gimple_seq
*pre_p
)
11491 tree expr
= *expr_p
;
11493 tree clauses
, t
, decl
;
11495 clauses
= OACC_DECLARE_CLAUSES (expr
);
11497 gimplify_scan_omp_clauses (&clauses
, pre_p
, ORT_TARGET_DATA
, OACC_DECLARE
);
11498 gimplify_adjust_omp_clauses (pre_p
, NULL
, &clauses
, OACC_DECLARE
);
11500 for (t
= clauses
; t
; t
= OMP_CLAUSE_CHAIN (t
))
11502 decl
= OMP_CLAUSE_DECL (t
);
11504 if (TREE_CODE (decl
) == MEM_REF
)
11505 decl
= TREE_OPERAND (decl
, 0);
11507 if (VAR_P (decl
) && !is_oacc_declared (decl
))
11509 tree attr
= get_identifier ("oacc declare target");
11510 DECL_ATTRIBUTES (decl
) = tree_cons (attr
, NULL_TREE
,
11511 DECL_ATTRIBUTES (decl
));
11515 && !is_global_var (decl
)
11516 && DECL_CONTEXT (decl
) == current_function_decl
)
11518 tree c
= gimplify_oacc_declare_1 (t
);
11521 if (oacc_declare_returns
== NULL
)
11522 oacc_declare_returns
= new hash_map
<tree
, tree
>;
11524 oacc_declare_returns
->put (decl
, c
);
11528 if (gimplify_omp_ctxp
)
11529 omp_add_variable (gimplify_omp_ctxp
, decl
, GOVD_SEEN
);
11532 stmt
= gimple_build_omp_target (NULL
, GF_OMP_TARGET_KIND_OACC_DECLARE
,
11535 gimplify_seq_add_stmt (pre_p
, stmt
);
11537 *expr_p
= NULL_TREE
;
11540 /* Gimplify the contents of an OMP_PARALLEL statement. This involves
11541 gimplification of the body, as well as scanning the body for used
11542 variables. We need to do this scan now, because variable-sized
11543 decls will be decomposed during gimplification. */
11546 gimplify_omp_parallel (tree
*expr_p
, gimple_seq
*pre_p
)
11548 tree expr
= *expr_p
;
11550 gimple_seq body
= NULL
;
11552 gimplify_scan_omp_clauses (&OMP_PARALLEL_CLAUSES (expr
), pre_p
,
11553 OMP_PARALLEL_COMBINED (expr
)
11554 ? ORT_COMBINED_PARALLEL
11555 : ORT_PARALLEL
, OMP_PARALLEL
);
11557 push_gimplify_context ();
11559 g
= gimplify_and_return_first (OMP_PARALLEL_BODY (expr
), &body
);
11560 if (gimple_code (g
) == GIMPLE_BIND
)
11561 pop_gimplify_context (g
);
11563 pop_gimplify_context (NULL
);
11565 gimplify_adjust_omp_clauses (pre_p
, body
, &OMP_PARALLEL_CLAUSES (expr
),
11568 g
= gimple_build_omp_parallel (body
,
11569 OMP_PARALLEL_CLAUSES (expr
),
11570 NULL_TREE
, NULL_TREE
);
11571 if (OMP_PARALLEL_COMBINED (expr
))
11572 gimple_omp_set_subcode (g
, GF_OMP_PARALLEL_COMBINED
);
11573 gimplify_seq_add_stmt (pre_p
, g
);
11574 *expr_p
= NULL_TREE
;
11577 /* Gimplify the contents of an OMP_TASK statement. This involves
11578 gimplification of the body, as well as scanning the body for used
11579 variables. We need to do this scan now, because variable-sized
11580 decls will be decomposed during gimplification. */
11583 gimplify_omp_task (tree
*expr_p
, gimple_seq
*pre_p
)
11585 tree expr
= *expr_p
;
11587 gimple_seq body
= NULL
;
11589 if (OMP_TASK_BODY (expr
) == NULL_TREE
)
11590 for (tree c
= OMP_TASK_CLAUSES (expr
); c
; c
= OMP_CLAUSE_CHAIN (c
))
11591 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_DEPEND
11592 && OMP_CLAUSE_DEPEND_KIND (c
) == OMP_CLAUSE_DEPEND_MUTEXINOUTSET
)
11594 error_at (OMP_CLAUSE_LOCATION (c
),
11595 "%<mutexinoutset%> kind in %<depend%> clause on a "
11596 "%<taskwait%> construct");
11600 gimplify_scan_omp_clauses (&OMP_TASK_CLAUSES (expr
), pre_p
,
11601 omp_find_clause (OMP_TASK_CLAUSES (expr
),
11603 ? ORT_UNTIED_TASK
: ORT_TASK
, OMP_TASK
);
11605 if (OMP_TASK_BODY (expr
))
11607 push_gimplify_context ();
11609 g
= gimplify_and_return_first (OMP_TASK_BODY (expr
), &body
);
11610 if (gimple_code (g
) == GIMPLE_BIND
)
11611 pop_gimplify_context (g
);
11613 pop_gimplify_context (NULL
);
11616 gimplify_adjust_omp_clauses (pre_p
, body
, &OMP_TASK_CLAUSES (expr
),
11619 g
= gimple_build_omp_task (body
,
11620 OMP_TASK_CLAUSES (expr
),
11621 NULL_TREE
, NULL_TREE
,
11622 NULL_TREE
, NULL_TREE
, NULL_TREE
);
11623 if (OMP_TASK_BODY (expr
) == NULL_TREE
)
11624 gimple_omp_task_set_taskwait_p (g
, true);
11625 gimplify_seq_add_stmt (pre_p
, g
);
11626 *expr_p
= NULL_TREE
;
11629 /* Helper function for gimplify_omp_for. If *TP is not a gimple constant,
11630 force it into a temporary initialized in PRE_P and add firstprivate clause
11631 to ORIG_FOR_STMT. */
11634 gimplify_omp_taskloop_expr (tree type
, tree
*tp
, gimple_seq
*pre_p
,
11635 tree orig_for_stmt
)
11637 if (*tp
== NULL
|| is_gimple_constant (*tp
))
11640 *tp
= get_initialized_tmp_var (*tp
, pre_p
, NULL
, false);
11641 /* Reference to pointer conversion is considered useless,
11642 but is significant for firstprivate clause. Force it
11645 && TREE_CODE (type
) == POINTER_TYPE
11646 && TREE_CODE (TREE_TYPE (*tp
)) == REFERENCE_TYPE
)
11648 tree v
= create_tmp_var (TYPE_MAIN_VARIANT (type
));
11649 tree m
= build2 (INIT_EXPR
, TREE_TYPE (v
), v
, *tp
);
11650 gimplify_and_add (m
, pre_p
);
11654 tree c
= build_omp_clause (input_location
, OMP_CLAUSE_FIRSTPRIVATE
);
11655 OMP_CLAUSE_DECL (c
) = *tp
;
11656 OMP_CLAUSE_CHAIN (c
) = OMP_FOR_CLAUSES (orig_for_stmt
);
11657 OMP_FOR_CLAUSES (orig_for_stmt
) = c
;
11660 /* Gimplify the gross structure of an OMP_FOR statement. */
11662 static enum gimplify_status
11663 gimplify_omp_for (tree
*expr_p
, gimple_seq
*pre_p
)
11665 tree for_stmt
, orig_for_stmt
, inner_for_stmt
= NULL_TREE
, decl
, var
, t
;
11666 enum gimplify_status ret
= GS_ALL_DONE
;
11667 enum gimplify_status tret
;
11669 gimple_seq for_body
, for_pre_body
;
11671 bitmap has_decl_expr
= NULL
;
11672 enum omp_region_type ort
= ORT_WORKSHARE
;
11673 bool openacc
= TREE_CODE (*expr_p
) == OACC_LOOP
;
11675 orig_for_stmt
= for_stmt
= *expr_p
;
11677 bool loop_p
= (omp_find_clause (OMP_FOR_CLAUSES (for_stmt
), OMP_CLAUSE_BIND
)
11679 if (OMP_FOR_INIT (for_stmt
) == NULL_TREE
)
11681 tree
*data
[4] = { NULL
, NULL
, NULL
, NULL
};
11682 gcc_assert (TREE_CODE (for_stmt
) != OACC_LOOP
);
11683 inner_for_stmt
= walk_tree (&OMP_FOR_BODY (for_stmt
),
11684 find_combined_omp_for
, data
, NULL
);
11685 if (inner_for_stmt
== NULL_TREE
)
11687 gcc_assert (seen_error ());
11688 *expr_p
= NULL_TREE
;
11691 if (data
[2] && OMP_FOR_PRE_BODY (*data
[2]))
11693 append_to_statement_list_force (OMP_FOR_PRE_BODY (*data
[2]),
11694 &OMP_FOR_PRE_BODY (for_stmt
));
11695 OMP_FOR_PRE_BODY (*data
[2]) = NULL_TREE
;
11697 if (OMP_FOR_PRE_BODY (inner_for_stmt
))
11699 append_to_statement_list_force (OMP_FOR_PRE_BODY (inner_for_stmt
),
11700 &OMP_FOR_PRE_BODY (for_stmt
));
11701 OMP_FOR_PRE_BODY (inner_for_stmt
) = NULL_TREE
;
11706 /* We have some statements or variable declarations in between
11707 the composite construct directives. Move them around the
11710 for (i
= 0; i
< 3; i
++)
11714 if (i
< 2 && data
[i
+ 1] == &OMP_BODY (t
))
11715 data
[i
+ 1] = data
[i
];
11716 *data
[i
] = OMP_BODY (t
);
11717 tree body
= build3 (BIND_EXPR
, void_type_node
, NULL_TREE
,
11718 NULL_TREE
, make_node (BLOCK
));
11719 OMP_BODY (t
) = body
;
11720 append_to_statement_list_force (inner_for_stmt
,
11721 &BIND_EXPR_BODY (body
));
11723 data
[3] = tsi_stmt_ptr (tsi_start (BIND_EXPR_BODY (body
)));
11724 gcc_assert (*data
[3] == inner_for_stmt
);
11729 for (i
= 0; i
< TREE_VEC_LENGTH (OMP_FOR_INIT (inner_for_stmt
)); i
++)
11731 && OMP_FOR_ORIG_DECLS (inner_for_stmt
)
11732 && TREE_CODE (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (inner_for_stmt
),
11734 && TREE_PURPOSE (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (inner_for_stmt
),
11737 tree orig
= TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (inner_for_stmt
), i
);
11738 /* Class iterators aren't allowed on OMP_SIMD, so the only
11739 case we need to solve is distribute parallel for. They are
11740 allowed on the loop construct, but that is already handled
11741 in gimplify_omp_loop. */
11742 gcc_assert (TREE_CODE (inner_for_stmt
) == OMP_FOR
11743 && TREE_CODE (for_stmt
) == OMP_DISTRIBUTE
11745 tree orig_decl
= TREE_PURPOSE (orig
);
11746 tree last
= TREE_VALUE (orig
);
11748 for (pc
= &OMP_FOR_CLAUSES (inner_for_stmt
);
11749 *pc
; pc
= &OMP_CLAUSE_CHAIN (*pc
))
11750 if ((OMP_CLAUSE_CODE (*pc
) == OMP_CLAUSE_PRIVATE
11751 || OMP_CLAUSE_CODE (*pc
) == OMP_CLAUSE_LASTPRIVATE
)
11752 && OMP_CLAUSE_DECL (*pc
) == orig_decl
)
11754 if (*pc
== NULL_TREE
)
11757 for (spc
= &OMP_PARALLEL_CLAUSES (*data
[1]);
11758 *spc
; spc
= &OMP_CLAUSE_CHAIN (*spc
))
11759 if (OMP_CLAUSE_CODE (*spc
) == OMP_CLAUSE_PRIVATE
11760 && OMP_CLAUSE_DECL (*spc
) == orig_decl
)
11765 *spc
= OMP_CLAUSE_CHAIN (c
);
11766 OMP_CLAUSE_CHAIN (c
) = NULL_TREE
;
11770 if (*pc
== NULL_TREE
)
11772 else if (OMP_CLAUSE_CODE (*pc
) == OMP_CLAUSE_PRIVATE
)
11774 /* private clause will appear only on inner_for_stmt.
11775 Change it into firstprivate, and add private clause
11777 tree c
= copy_node (*pc
);
11778 OMP_CLAUSE_CHAIN (c
) = OMP_FOR_CLAUSES (for_stmt
);
11779 OMP_FOR_CLAUSES (for_stmt
) = c
;
11780 OMP_CLAUSE_CODE (*pc
) = OMP_CLAUSE_FIRSTPRIVATE
;
11781 lang_hooks
.decls
.omp_finish_clause (*pc
, pre_p
, openacc
);
11785 /* lastprivate clause will appear on both inner_for_stmt
11786 and for_stmt. Add firstprivate clause to
11788 tree c
= build_omp_clause (OMP_CLAUSE_LOCATION (*pc
),
11789 OMP_CLAUSE_FIRSTPRIVATE
);
11790 OMP_CLAUSE_DECL (c
) = OMP_CLAUSE_DECL (*pc
);
11791 OMP_CLAUSE_CHAIN (c
) = *pc
;
11793 lang_hooks
.decls
.omp_finish_clause (*pc
, pre_p
, openacc
);
11795 tree c
= build_omp_clause (UNKNOWN_LOCATION
,
11796 OMP_CLAUSE_FIRSTPRIVATE
);
11797 OMP_CLAUSE_DECL (c
) = last
;
11798 OMP_CLAUSE_CHAIN (c
) = OMP_PARALLEL_CLAUSES (*data
[1]);
11799 OMP_PARALLEL_CLAUSES (*data
[1]) = c
;
11800 c
= build_omp_clause (UNKNOWN_LOCATION
,
11801 *pc
? OMP_CLAUSE_SHARED
11802 : OMP_CLAUSE_FIRSTPRIVATE
);
11803 OMP_CLAUSE_DECL (c
) = orig_decl
;
11804 OMP_CLAUSE_CHAIN (c
) = OMP_PARALLEL_CLAUSES (*data
[1]);
11805 OMP_PARALLEL_CLAUSES (*data
[1]) = c
;
11807 /* Similarly, take care of C++ range for temporaries, those should
11808 be firstprivate on OMP_PARALLEL if any. */
11810 for (i
= 0; i
< TREE_VEC_LENGTH (OMP_FOR_INIT (inner_for_stmt
)); i
++)
11811 if (OMP_FOR_ORIG_DECLS (inner_for_stmt
)
11812 && TREE_CODE (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (inner_for_stmt
),
11814 && TREE_CHAIN (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (inner_for_stmt
),
11818 = TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (inner_for_stmt
), i
);
11819 tree v
= TREE_CHAIN (orig
);
11820 tree c
= build_omp_clause (UNKNOWN_LOCATION
,
11821 OMP_CLAUSE_FIRSTPRIVATE
);
11822 /* First add firstprivate clause for the __for_end artificial
11824 OMP_CLAUSE_DECL (c
) = TREE_VEC_ELT (v
, 1);
11825 if (TREE_CODE (TREE_TYPE (OMP_CLAUSE_DECL (c
)))
11827 OMP_CLAUSE_FIRSTPRIVATE_NO_REFERENCE (c
) = 1;
11828 OMP_CLAUSE_CHAIN (c
) = OMP_PARALLEL_CLAUSES (*data
[1]);
11829 OMP_PARALLEL_CLAUSES (*data
[1]) = c
;
11830 if (TREE_VEC_ELT (v
, 0))
11832 /* And now the same for __for_range artificial decl if it
11834 c
= build_omp_clause (UNKNOWN_LOCATION
,
11835 OMP_CLAUSE_FIRSTPRIVATE
);
11836 OMP_CLAUSE_DECL (c
) = TREE_VEC_ELT (v
, 0);
11837 if (TREE_CODE (TREE_TYPE (OMP_CLAUSE_DECL (c
)))
11839 OMP_CLAUSE_FIRSTPRIVATE_NO_REFERENCE (c
) = 1;
11840 OMP_CLAUSE_CHAIN (c
) = OMP_PARALLEL_CLAUSES (*data
[1]);
11841 OMP_PARALLEL_CLAUSES (*data
[1]) = c
;
11846 switch (TREE_CODE (for_stmt
))
11849 if (OMP_FOR_NON_RECTANGULAR (inner_for_stmt
? inner_for_stmt
: for_stmt
))
11851 if (omp_find_clause (OMP_FOR_CLAUSES (for_stmt
),
11852 OMP_CLAUSE_SCHEDULE
))
11853 error_at (EXPR_LOCATION (for_stmt
),
11854 "%qs clause may not appear on non-rectangular %qs",
11855 "schedule", "for");
11856 if (omp_find_clause (OMP_FOR_CLAUSES (for_stmt
), OMP_CLAUSE_ORDERED
))
11857 error_at (EXPR_LOCATION (for_stmt
),
11858 "%qs clause may not appear on non-rectangular %qs",
11862 case OMP_DISTRIBUTE
:
11863 if (OMP_FOR_NON_RECTANGULAR (inner_for_stmt
? inner_for_stmt
: for_stmt
)
11864 && omp_find_clause (OMP_FOR_CLAUSES (for_stmt
),
11865 OMP_CLAUSE_DIST_SCHEDULE
))
11866 error_at (EXPR_LOCATION (for_stmt
),
11867 "%qs clause may not appear on non-rectangular %qs",
11868 "dist_schedule", "distribute");
11874 if (omp_find_clause (OMP_FOR_CLAUSES (for_stmt
), OMP_CLAUSE_UNTIED
))
11875 ort
= ORT_UNTIED_TASKLOOP
;
11877 ort
= ORT_TASKLOOP
;
11883 gcc_unreachable ();
11886 /* Set OMP_CLAUSE_LINEAR_NO_COPYIN flag on explicit linear
11887 clause for the IV. */
11888 if (ort
== ORT_SIMD
&& TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt
)) == 1)
11890 t
= TREE_VEC_ELT (OMP_FOR_INIT (for_stmt
), 0);
11891 gcc_assert (TREE_CODE (t
) == MODIFY_EXPR
);
11892 decl
= TREE_OPERAND (t
, 0);
11893 for (tree c
= OMP_FOR_CLAUSES (for_stmt
); c
; c
= OMP_CLAUSE_CHAIN (c
))
11894 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LINEAR
11895 && OMP_CLAUSE_DECL (c
) == decl
)
11897 OMP_CLAUSE_LINEAR_NO_COPYIN (c
) = 1;
11902 if (TREE_CODE (for_stmt
) != OMP_TASKLOOP
)
11903 gimplify_scan_omp_clauses (&OMP_FOR_CLAUSES (for_stmt
), pre_p
, ort
,
11904 loop_p
&& TREE_CODE (for_stmt
) != OMP_SIMD
11905 ? OMP_LOOP
: TREE_CODE (for_stmt
));
11907 if (TREE_CODE (for_stmt
) == OMP_DISTRIBUTE
)
11908 gimplify_omp_ctxp
->distribute
= true;
11910 /* Handle OMP_FOR_INIT. */
11911 for_pre_body
= NULL
;
11912 if ((ort
== ORT_SIMD
11913 || (inner_for_stmt
&& TREE_CODE (inner_for_stmt
) == OMP_SIMD
))
11914 && OMP_FOR_PRE_BODY (for_stmt
))
11916 has_decl_expr
= BITMAP_ALLOC (NULL
);
11917 if (TREE_CODE (OMP_FOR_PRE_BODY (for_stmt
)) == DECL_EXPR
11918 && TREE_CODE (DECL_EXPR_DECL (OMP_FOR_PRE_BODY (for_stmt
)))
11921 t
= OMP_FOR_PRE_BODY (for_stmt
);
11922 bitmap_set_bit (has_decl_expr
, DECL_UID (DECL_EXPR_DECL (t
)));
11924 else if (TREE_CODE (OMP_FOR_PRE_BODY (for_stmt
)) == STATEMENT_LIST
)
11926 tree_stmt_iterator si
;
11927 for (si
= tsi_start (OMP_FOR_PRE_BODY (for_stmt
)); !tsi_end_p (si
);
11931 if (TREE_CODE (t
) == DECL_EXPR
11932 && TREE_CODE (DECL_EXPR_DECL (t
)) == VAR_DECL
)
11933 bitmap_set_bit (has_decl_expr
, DECL_UID (DECL_EXPR_DECL (t
)));
11937 if (OMP_FOR_PRE_BODY (for_stmt
))
11939 if (TREE_CODE (for_stmt
) != OMP_TASKLOOP
|| gimplify_omp_ctxp
)
11940 gimplify_and_add (OMP_FOR_PRE_BODY (for_stmt
), &for_pre_body
);
11943 struct gimplify_omp_ctx ctx
;
11944 memset (&ctx
, 0, sizeof (ctx
));
11945 ctx
.region_type
= ORT_NONE
;
11946 gimplify_omp_ctxp
= &ctx
;
11947 gimplify_and_add (OMP_FOR_PRE_BODY (for_stmt
), &for_pre_body
);
11948 gimplify_omp_ctxp
= NULL
;
11951 OMP_FOR_PRE_BODY (for_stmt
) = NULL_TREE
;
11953 if (OMP_FOR_INIT (for_stmt
) == NULL_TREE
)
11954 for_stmt
= inner_for_stmt
;
11956 /* For taskloop, need to gimplify the start, end and step before the
11957 taskloop, outside of the taskloop omp context. */
11958 if (TREE_CODE (orig_for_stmt
) == OMP_TASKLOOP
)
11960 for (i
= 0; i
< TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt
)); i
++)
11962 t
= TREE_VEC_ELT (OMP_FOR_INIT (for_stmt
), i
);
11963 gimple_seq
*for_pre_p
= (gimple_seq_empty_p (for_pre_body
)
11964 ? pre_p
: &for_pre_body
);
11965 tree type
= TREE_TYPE (TREE_OPERAND (t
, 0));
11966 if (TREE_CODE (TREE_OPERAND (t
, 1)) == TREE_VEC
)
11968 tree v
= TREE_OPERAND (t
, 1);
11969 gimplify_omp_taskloop_expr (type
, &TREE_VEC_ELT (v
, 1),
11970 for_pre_p
, orig_for_stmt
);
11971 gimplify_omp_taskloop_expr (type
, &TREE_VEC_ELT (v
, 2),
11972 for_pre_p
, orig_for_stmt
);
11975 gimplify_omp_taskloop_expr (type
, &TREE_OPERAND (t
, 1), for_pre_p
,
11978 /* Handle OMP_FOR_COND. */
11979 t
= TREE_VEC_ELT (OMP_FOR_COND (for_stmt
), i
);
11980 if (TREE_CODE (TREE_OPERAND (t
, 1)) == TREE_VEC
)
11982 tree v
= TREE_OPERAND (t
, 1);
11983 gimplify_omp_taskloop_expr (type
, &TREE_VEC_ELT (v
, 1),
11984 for_pre_p
, orig_for_stmt
);
11985 gimplify_omp_taskloop_expr (type
, &TREE_VEC_ELT (v
, 2),
11986 for_pre_p
, orig_for_stmt
);
11989 gimplify_omp_taskloop_expr (type
, &TREE_OPERAND (t
, 1), for_pre_p
,
11992 /* Handle OMP_FOR_INCR. */
11993 t
= TREE_VEC_ELT (OMP_FOR_INCR (for_stmt
), i
);
11994 if (TREE_CODE (t
) == MODIFY_EXPR
)
11996 decl
= TREE_OPERAND (t
, 0);
11997 t
= TREE_OPERAND (t
, 1);
11998 tree
*tp
= &TREE_OPERAND (t
, 1);
11999 if (TREE_CODE (t
) == PLUS_EXPR
&& *tp
== decl
)
12000 tp
= &TREE_OPERAND (t
, 0);
12002 gimplify_omp_taskloop_expr (NULL_TREE
, tp
, for_pre_p
,
12007 gimplify_scan_omp_clauses (&OMP_FOR_CLAUSES (orig_for_stmt
), pre_p
, ort
,
12011 if (orig_for_stmt
!= for_stmt
)
12012 gimplify_omp_ctxp
->combined_loop
= true;
12015 gcc_assert (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt
))
12016 == TREE_VEC_LENGTH (OMP_FOR_COND (for_stmt
)));
12017 gcc_assert (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt
))
12018 == TREE_VEC_LENGTH (OMP_FOR_INCR (for_stmt
)));
12020 tree c
= omp_find_clause (OMP_FOR_CLAUSES (for_stmt
), OMP_CLAUSE_ORDERED
);
12021 bool is_doacross
= false;
12022 if (c
&& OMP_CLAUSE_ORDERED_EXPR (c
))
12024 is_doacross
= true;
12025 gimplify_omp_ctxp
->loop_iter_var
.create (TREE_VEC_LENGTH
12026 (OMP_FOR_INIT (for_stmt
))
12029 int collapse
= 1, tile
= 0;
12030 c
= omp_find_clause (OMP_FOR_CLAUSES (for_stmt
), OMP_CLAUSE_COLLAPSE
);
12032 collapse
= tree_to_shwi (OMP_CLAUSE_COLLAPSE_EXPR (c
));
12033 c
= omp_find_clause (OMP_FOR_CLAUSES (for_stmt
), OMP_CLAUSE_TILE
);
12035 tile
= list_length (OMP_CLAUSE_TILE_LIST (c
));
12036 c
= omp_find_clause (OMP_FOR_CLAUSES (for_stmt
), OMP_CLAUSE_ALLOCATE
);
12037 hash_set
<tree
> *allocate_uids
= NULL
;
12040 allocate_uids
= new hash_set
<tree
>;
12041 for (; c
; c
= OMP_CLAUSE_CHAIN (c
))
12042 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_ALLOCATE
)
12043 allocate_uids
->add (OMP_CLAUSE_DECL (c
));
12045 for (i
= 0; i
< TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt
)); i
++)
12047 t
= TREE_VEC_ELT (OMP_FOR_INIT (for_stmt
), i
);
12048 gcc_assert (TREE_CODE (t
) == MODIFY_EXPR
);
12049 decl
= TREE_OPERAND (t
, 0);
12050 gcc_assert (DECL_P (decl
));
12051 gcc_assert (INTEGRAL_TYPE_P (TREE_TYPE (decl
))
12052 || POINTER_TYPE_P (TREE_TYPE (decl
)));
12055 if (TREE_CODE (for_stmt
) == OMP_FOR
&& OMP_FOR_ORIG_DECLS (for_stmt
))
12057 tree orig_decl
= TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (for_stmt
), i
);
12058 if (TREE_CODE (orig_decl
) == TREE_LIST
)
12060 orig_decl
= TREE_PURPOSE (orig_decl
);
12064 gimplify_omp_ctxp
->loop_iter_var
.quick_push (orig_decl
);
12067 gimplify_omp_ctxp
->loop_iter_var
.quick_push (decl
);
12068 gimplify_omp_ctxp
->loop_iter_var
.quick_push (decl
);
12071 /* Make sure the iteration variable is private. */
12072 tree c
= NULL_TREE
;
12073 tree c2
= NULL_TREE
;
12074 if (orig_for_stmt
!= for_stmt
)
12076 /* Preserve this information until we gimplify the inner simd. */
12078 && bitmap_bit_p (has_decl_expr
, DECL_UID (decl
)))
12079 TREE_PRIVATE (t
) = 1;
12081 else if (ort
== ORT_SIMD
)
12083 splay_tree_node n
= splay_tree_lookup (gimplify_omp_ctxp
->variables
,
12084 (splay_tree_key
) decl
);
12085 omp_is_private (gimplify_omp_ctxp
, decl
,
12086 1 + (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt
))
12088 if (n
!= NULL
&& (n
->value
& GOVD_DATA_SHARE_CLASS
) != 0)
12090 omp_notice_variable (gimplify_omp_ctxp
, decl
, true);
12091 if (n
->value
& GOVD_LASTPRIVATE_CONDITIONAL
)
12092 for (tree c3
= omp_find_clause (OMP_FOR_CLAUSES (for_stmt
),
12093 OMP_CLAUSE_LASTPRIVATE
);
12094 c3
; c3
= omp_find_clause (OMP_CLAUSE_CHAIN (c3
),
12095 OMP_CLAUSE_LASTPRIVATE
))
12096 if (OMP_CLAUSE_DECL (c3
) == decl
)
12098 warning_at (OMP_CLAUSE_LOCATION (c3
), 0,
12099 "conditional %<lastprivate%> on loop "
12100 "iterator %qD ignored", decl
);
12101 OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c3
) = 0;
12102 n
->value
&= ~GOVD_LASTPRIVATE_CONDITIONAL
;
12105 else if (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt
)) == 1 && !loop_p
)
12107 c
= build_omp_clause (input_location
, OMP_CLAUSE_LINEAR
);
12108 OMP_CLAUSE_LINEAR_NO_COPYIN (c
) = 1;
12109 unsigned int flags
= GOVD_LINEAR
| GOVD_EXPLICIT
| GOVD_SEEN
;
12111 && bitmap_bit_p (has_decl_expr
, DECL_UID (decl
)))
12112 || TREE_PRIVATE (t
))
12114 OMP_CLAUSE_LINEAR_NO_COPYOUT (c
) = 1;
12115 flags
|= GOVD_LINEAR_LASTPRIVATE_NO_OUTER
;
12117 struct gimplify_omp_ctx
*outer
12118 = gimplify_omp_ctxp
->outer_context
;
12119 if (outer
&& !OMP_CLAUSE_LINEAR_NO_COPYOUT (c
))
12121 if (outer
->region_type
== ORT_WORKSHARE
12122 && outer
->combined_loop
)
12124 n
= splay_tree_lookup (outer
->variables
,
12125 (splay_tree_key
)decl
);
12126 if (n
!= NULL
&& (n
->value
& GOVD_LOCAL
) != 0)
12128 OMP_CLAUSE_LINEAR_NO_COPYOUT (c
) = 1;
12129 flags
|= GOVD_LINEAR_LASTPRIVATE_NO_OUTER
;
12133 struct gimplify_omp_ctx
*octx
= outer
->outer_context
;
12135 && octx
->region_type
== ORT_COMBINED_PARALLEL
12136 && octx
->outer_context
12137 && (octx
->outer_context
->region_type
12139 && octx
->outer_context
->combined_loop
)
12141 octx
= octx
->outer_context
;
12142 n
= splay_tree_lookup (octx
->variables
,
12143 (splay_tree_key
)decl
);
12144 if (n
!= NULL
&& (n
->value
& GOVD_LOCAL
) != 0)
12146 OMP_CLAUSE_LINEAR_NO_COPYOUT (c
) = 1;
12147 flags
|= GOVD_LINEAR_LASTPRIVATE_NO_OUTER
;
12154 OMP_CLAUSE_DECL (c
) = decl
;
12155 OMP_CLAUSE_CHAIN (c
) = OMP_FOR_CLAUSES (for_stmt
);
12156 OMP_FOR_CLAUSES (for_stmt
) = c
;
12157 omp_add_variable (gimplify_omp_ctxp
, decl
, flags
);
12158 if (outer
&& !OMP_CLAUSE_LINEAR_NO_COPYOUT (c
))
12159 omp_lastprivate_for_combined_outer_constructs (outer
, decl
,
12166 || !bitmap_bit_p (has_decl_expr
, DECL_UID (decl
)));
12167 if (TREE_PRIVATE (t
))
12168 lastprivate
= false;
12169 if (loop_p
&& OMP_FOR_ORIG_DECLS (for_stmt
))
12171 tree elt
= TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (for_stmt
), i
);
12172 if (TREE_CODE (elt
) == TREE_LIST
&& TREE_PURPOSE (elt
))
12173 lastprivate
= false;
12176 struct gimplify_omp_ctx
*outer
12177 = gimplify_omp_ctxp
->outer_context
;
12178 if (outer
&& lastprivate
)
12179 omp_lastprivate_for_combined_outer_constructs (outer
, decl
,
12182 c
= build_omp_clause (input_location
,
12183 lastprivate
? OMP_CLAUSE_LASTPRIVATE
12184 : OMP_CLAUSE_PRIVATE
);
12185 OMP_CLAUSE_DECL (c
) = decl
;
12186 OMP_CLAUSE_CHAIN (c
) = OMP_FOR_CLAUSES (for_stmt
);
12187 OMP_FOR_CLAUSES (for_stmt
) = c
;
12188 omp_add_variable (gimplify_omp_ctxp
, decl
,
12189 (lastprivate
? GOVD_LASTPRIVATE
: GOVD_PRIVATE
)
12190 | GOVD_EXPLICIT
| GOVD_SEEN
);
12194 else if (omp_is_private (gimplify_omp_ctxp
, decl
, 0))
12196 omp_notice_variable (gimplify_omp_ctxp
, decl
, true);
12197 splay_tree_node n
= splay_tree_lookup (gimplify_omp_ctxp
->variables
,
12198 (splay_tree_key
) decl
);
12199 if (n
&& (n
->value
& GOVD_LASTPRIVATE_CONDITIONAL
))
12200 for (tree c3
= omp_find_clause (OMP_FOR_CLAUSES (for_stmt
),
12201 OMP_CLAUSE_LASTPRIVATE
);
12202 c3
; c3
= omp_find_clause (OMP_CLAUSE_CHAIN (c3
),
12203 OMP_CLAUSE_LASTPRIVATE
))
12204 if (OMP_CLAUSE_DECL (c3
) == decl
)
12206 warning_at (OMP_CLAUSE_LOCATION (c3
), 0,
12207 "conditional %<lastprivate%> on loop "
12208 "iterator %qD ignored", decl
);
12209 OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c3
) = 0;
12210 n
->value
&= ~GOVD_LASTPRIVATE_CONDITIONAL
;
12214 omp_add_variable (gimplify_omp_ctxp
, decl
, GOVD_PRIVATE
| GOVD_SEEN
);
12216 /* If DECL is not a gimple register, create a temporary variable to act
12217 as an iteration counter. This is valid, since DECL cannot be
12218 modified in the body of the loop. Similarly for any iteration vars
12219 in simd with collapse > 1 where the iterator vars must be
12220 lastprivate. And similarly for vars mentioned in allocate clauses. */
12221 if (orig_for_stmt
!= for_stmt
)
12223 else if (!is_gimple_reg (decl
)
12224 || (ort
== ORT_SIMD
12225 && TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt
)) > 1)
12226 || (allocate_uids
&& allocate_uids
->contains (decl
)))
12228 struct gimplify_omp_ctx
*ctx
= gimplify_omp_ctxp
;
12229 /* Make sure omp_add_variable is not called on it prematurely.
12230 We call it ourselves a few lines later. */
12231 gimplify_omp_ctxp
= NULL
;
12232 var
= create_tmp_var (TREE_TYPE (decl
), get_name (decl
));
12233 gimplify_omp_ctxp
= ctx
;
12234 TREE_OPERAND (t
, 0) = var
;
12236 gimplify_seq_add_stmt (&for_body
, gimple_build_assign (decl
, var
));
12238 if (ort
== ORT_SIMD
12239 && TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt
)) == 1)
12241 c2
= build_omp_clause (input_location
, OMP_CLAUSE_LINEAR
);
12242 OMP_CLAUSE_LINEAR_NO_COPYIN (c2
) = 1;
12243 OMP_CLAUSE_LINEAR_NO_COPYOUT (c2
) = 1;
12244 OMP_CLAUSE_DECL (c2
) = var
;
12245 OMP_CLAUSE_CHAIN (c2
) = OMP_FOR_CLAUSES (for_stmt
);
12246 OMP_FOR_CLAUSES (for_stmt
) = c2
;
12247 omp_add_variable (gimplify_omp_ctxp
, var
,
12248 GOVD_LINEAR
| GOVD_EXPLICIT
| GOVD_SEEN
);
12249 if (c
== NULL_TREE
)
12256 omp_add_variable (gimplify_omp_ctxp
, var
,
12257 GOVD_PRIVATE
| GOVD_SEEN
);
12262 gimplify_omp_ctxp
->in_for_exprs
= true;
12263 if (TREE_CODE (TREE_OPERAND (t
, 1)) == TREE_VEC
)
12265 tree lb
= TREE_OPERAND (t
, 1);
12266 tret
= gimplify_expr (&TREE_VEC_ELT (lb
, 1), &for_pre_body
, NULL
,
12267 is_gimple_val
, fb_rvalue
, false);
12268 ret
= MIN (ret
, tret
);
12269 tret
= gimplify_expr (&TREE_VEC_ELT (lb
, 2), &for_pre_body
, NULL
,
12270 is_gimple_val
, fb_rvalue
, false);
12273 tret
= gimplify_expr (&TREE_OPERAND (t
, 1), &for_pre_body
, NULL
,
12274 is_gimple_val
, fb_rvalue
, false);
12275 gimplify_omp_ctxp
->in_for_exprs
= false;
12276 ret
= MIN (ret
, tret
);
12277 if (ret
== GS_ERROR
)
12280 /* Handle OMP_FOR_COND. */
12281 t
= TREE_VEC_ELT (OMP_FOR_COND (for_stmt
), i
);
12282 gcc_assert (COMPARISON_CLASS_P (t
));
12283 gcc_assert (TREE_OPERAND (t
, 0) == decl
);
12285 gimplify_omp_ctxp
->in_for_exprs
= true;
12286 if (TREE_CODE (TREE_OPERAND (t
, 1)) == TREE_VEC
)
12288 tree ub
= TREE_OPERAND (t
, 1);
12289 tret
= gimplify_expr (&TREE_VEC_ELT (ub
, 1), &for_pre_body
, NULL
,
12290 is_gimple_val
, fb_rvalue
, false);
12291 ret
= MIN (ret
, tret
);
12292 tret
= gimplify_expr (&TREE_VEC_ELT (ub
, 2), &for_pre_body
, NULL
,
12293 is_gimple_val
, fb_rvalue
, false);
12296 tret
= gimplify_expr (&TREE_OPERAND (t
, 1), &for_pre_body
, NULL
,
12297 is_gimple_val
, fb_rvalue
, false);
12298 gimplify_omp_ctxp
->in_for_exprs
= false;
12299 ret
= MIN (ret
, tret
);
12301 /* Handle OMP_FOR_INCR. */
12302 t
= TREE_VEC_ELT (OMP_FOR_INCR (for_stmt
), i
);
12303 switch (TREE_CODE (t
))
12305 case PREINCREMENT_EXPR
:
12306 case POSTINCREMENT_EXPR
:
12308 tree decl
= TREE_OPERAND (t
, 0);
12309 /* c_omp_for_incr_canonicalize_ptr() should have been
12310 called to massage things appropriately. */
12311 gcc_assert (!POINTER_TYPE_P (TREE_TYPE (decl
)));
12313 if (orig_for_stmt
!= for_stmt
)
12315 t
= build_int_cst (TREE_TYPE (decl
), 1);
12317 OMP_CLAUSE_LINEAR_STEP (c
) = t
;
12318 t
= build2 (PLUS_EXPR
, TREE_TYPE (decl
), var
, t
);
12319 t
= build2 (MODIFY_EXPR
, TREE_TYPE (var
), var
, t
);
12320 TREE_VEC_ELT (OMP_FOR_INCR (for_stmt
), i
) = t
;
12324 case PREDECREMENT_EXPR
:
12325 case POSTDECREMENT_EXPR
:
12326 /* c_omp_for_incr_canonicalize_ptr() should have been
12327 called to massage things appropriately. */
12328 gcc_assert (!POINTER_TYPE_P (TREE_TYPE (decl
)));
12329 if (orig_for_stmt
!= for_stmt
)
12331 t
= build_int_cst (TREE_TYPE (decl
), -1);
12333 OMP_CLAUSE_LINEAR_STEP (c
) = t
;
12334 t
= build2 (PLUS_EXPR
, TREE_TYPE (decl
), var
, t
);
12335 t
= build2 (MODIFY_EXPR
, TREE_TYPE (var
), var
, t
);
12336 TREE_VEC_ELT (OMP_FOR_INCR (for_stmt
), i
) = t
;
12340 gcc_assert (TREE_OPERAND (t
, 0) == decl
);
12341 TREE_OPERAND (t
, 0) = var
;
12343 t
= TREE_OPERAND (t
, 1);
12344 switch (TREE_CODE (t
))
12347 if (TREE_OPERAND (t
, 1) == decl
)
12349 TREE_OPERAND (t
, 1) = TREE_OPERAND (t
, 0);
12350 TREE_OPERAND (t
, 0) = var
;
12356 case POINTER_PLUS_EXPR
:
12357 gcc_assert (TREE_OPERAND (t
, 0) == decl
);
12358 TREE_OPERAND (t
, 0) = var
;
12361 gcc_unreachable ();
12364 gimplify_omp_ctxp
->in_for_exprs
= true;
12365 tret
= gimplify_expr (&TREE_OPERAND (t
, 1), &for_pre_body
, NULL
,
12366 is_gimple_val
, fb_rvalue
, false);
12367 ret
= MIN (ret
, tret
);
12370 tree step
= TREE_OPERAND (t
, 1);
12371 tree stept
= TREE_TYPE (decl
);
12372 if (POINTER_TYPE_P (stept
))
12374 step
= fold_convert (stept
, step
);
12375 if (TREE_CODE (t
) == MINUS_EXPR
)
12376 step
= fold_build1 (NEGATE_EXPR
, stept
, step
);
12377 OMP_CLAUSE_LINEAR_STEP (c
) = step
;
12378 if (step
!= TREE_OPERAND (t
, 1))
12380 tret
= gimplify_expr (&OMP_CLAUSE_LINEAR_STEP (c
),
12381 &for_pre_body
, NULL
,
12382 is_gimple_val
, fb_rvalue
, false);
12383 ret
= MIN (ret
, tret
);
12386 gimplify_omp_ctxp
->in_for_exprs
= false;
12390 gcc_unreachable ();
12396 OMP_CLAUSE_LINEAR_STEP (c2
) = OMP_CLAUSE_LINEAR_STEP (c
);
12399 if ((var
!= decl
|| collapse
> 1 || tile
) && orig_for_stmt
== for_stmt
)
12401 for (c
= OMP_FOR_CLAUSES (for_stmt
); c
; c
= OMP_CLAUSE_CHAIN (c
))
12402 if (((OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
12403 && OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c
) == NULL
)
12404 || (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LINEAR
12405 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c
)
12406 && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c
) == NULL
))
12407 && OMP_CLAUSE_DECL (c
) == decl
)
12409 if (is_doacross
&& (collapse
== 1 || i
>= collapse
))
12413 t
= TREE_VEC_ELT (OMP_FOR_INCR (for_stmt
), i
);
12414 gcc_assert (TREE_CODE (t
) == MODIFY_EXPR
);
12415 gcc_assert (TREE_OPERAND (t
, 0) == var
);
12416 t
= TREE_OPERAND (t
, 1);
12417 gcc_assert (TREE_CODE (t
) == PLUS_EXPR
12418 || TREE_CODE (t
) == MINUS_EXPR
12419 || TREE_CODE (t
) == POINTER_PLUS_EXPR
);
12420 gcc_assert (TREE_OPERAND (t
, 0) == var
);
12421 t
= build2 (TREE_CODE (t
), TREE_TYPE (decl
),
12422 is_doacross
? var
: decl
,
12423 TREE_OPERAND (t
, 1));
12426 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
)
12427 seq
= &OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c
);
12429 seq
= &OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c
);
12430 push_gimplify_context ();
12431 gimplify_assign (decl
, t
, seq
);
12432 gimple
*bind
= NULL
;
12433 if (gimplify_ctxp
->temps
)
12435 bind
= gimple_build_bind (NULL_TREE
, *seq
, NULL_TREE
);
12437 gimplify_seq_add_stmt (seq
, bind
);
12439 pop_gimplify_context (bind
);
12442 if (OMP_FOR_NON_RECTANGULAR (for_stmt
) && var
!= decl
)
12443 for (int j
= i
+ 1; j
< TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt
)); j
++)
12445 t
= TREE_VEC_ELT (OMP_FOR_INIT (for_stmt
), j
);
12446 gcc_assert (TREE_CODE (t
) == MODIFY_EXPR
);
12447 if (TREE_CODE (TREE_OPERAND (t
, 1)) == TREE_VEC
12448 && TREE_VEC_ELT (TREE_OPERAND (t
, 1), 0) == decl
)
12449 TREE_VEC_ELT (TREE_OPERAND (t
, 1), 0) = var
;
12450 t
= TREE_VEC_ELT (OMP_FOR_COND (for_stmt
), j
);
12451 gcc_assert (COMPARISON_CLASS_P (t
));
12452 if (TREE_CODE (TREE_OPERAND (t
, 1)) == TREE_VEC
12453 && TREE_VEC_ELT (TREE_OPERAND (t
, 1), 0) == decl
)
12454 TREE_VEC_ELT (TREE_OPERAND (t
, 1), 0) = var
;
12458 BITMAP_FREE (has_decl_expr
);
12459 delete allocate_uids
;
12461 if (TREE_CODE (orig_for_stmt
) == OMP_TASKLOOP
12462 || (loop_p
&& orig_for_stmt
== for_stmt
))
12464 push_gimplify_context ();
12465 if (TREE_CODE (OMP_FOR_BODY (orig_for_stmt
)) != BIND_EXPR
)
12467 OMP_FOR_BODY (orig_for_stmt
)
12468 = build3 (BIND_EXPR
, void_type_node
, NULL
,
12469 OMP_FOR_BODY (orig_for_stmt
), NULL
);
12470 TREE_SIDE_EFFECTS (OMP_FOR_BODY (orig_for_stmt
)) = 1;
12474 gimple
*g
= gimplify_and_return_first (OMP_FOR_BODY (orig_for_stmt
),
12477 if (TREE_CODE (orig_for_stmt
) == OMP_TASKLOOP
12478 || (loop_p
&& orig_for_stmt
== for_stmt
))
12480 if (gimple_code (g
) == GIMPLE_BIND
)
12481 pop_gimplify_context (g
);
12483 pop_gimplify_context (NULL
);
12486 if (orig_for_stmt
!= for_stmt
)
12487 for (i
= 0; i
< TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt
)); i
++)
12489 t
= TREE_VEC_ELT (OMP_FOR_INIT (for_stmt
), i
);
12490 decl
= TREE_OPERAND (t
, 0);
12491 struct gimplify_omp_ctx
*ctx
= gimplify_omp_ctxp
;
12492 if (TREE_CODE (orig_for_stmt
) == OMP_TASKLOOP
)
12493 gimplify_omp_ctxp
= ctx
->outer_context
;
12494 var
= create_tmp_var (TREE_TYPE (decl
), get_name (decl
));
12495 gimplify_omp_ctxp
= ctx
;
12496 omp_add_variable (gimplify_omp_ctxp
, var
, GOVD_PRIVATE
| GOVD_SEEN
);
12497 TREE_OPERAND (t
, 0) = var
;
12498 t
= TREE_VEC_ELT (OMP_FOR_INCR (for_stmt
), i
);
12499 TREE_OPERAND (t
, 1) = copy_node (TREE_OPERAND (t
, 1));
12500 TREE_OPERAND (TREE_OPERAND (t
, 1), 0) = var
;
12501 if (OMP_FOR_NON_RECTANGULAR (for_stmt
))
12502 for (int j
= i
+ 1;
12503 j
< TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt
)); j
++)
12505 t
= TREE_VEC_ELT (OMP_FOR_INIT (for_stmt
), j
);
12506 gcc_assert (TREE_CODE (t
) == MODIFY_EXPR
);
12507 if (TREE_CODE (TREE_OPERAND (t
, 1)) == TREE_VEC
12508 && TREE_VEC_ELT (TREE_OPERAND (t
, 1), 0) == decl
)
12510 TREE_OPERAND (t
, 1) = copy_node (TREE_OPERAND (t
, 1));
12511 TREE_VEC_ELT (TREE_OPERAND (t
, 1), 0) = var
;
12513 t
= TREE_VEC_ELT (OMP_FOR_COND (for_stmt
), j
);
12514 gcc_assert (COMPARISON_CLASS_P (t
));
12515 if (TREE_CODE (TREE_OPERAND (t
, 1)) == TREE_VEC
12516 && TREE_VEC_ELT (TREE_OPERAND (t
, 1), 0) == decl
)
12518 TREE_OPERAND (t
, 1) = copy_node (TREE_OPERAND (t
, 1));
12519 TREE_VEC_ELT (TREE_OPERAND (t
, 1), 0) = var
;
12524 gimplify_adjust_omp_clauses (pre_p
, for_body
,
12525 &OMP_FOR_CLAUSES (orig_for_stmt
),
12526 TREE_CODE (orig_for_stmt
));
12529 switch (TREE_CODE (orig_for_stmt
))
12531 case OMP_FOR
: kind
= GF_OMP_FOR_KIND_FOR
; break;
12532 case OMP_SIMD
: kind
= GF_OMP_FOR_KIND_SIMD
; break;
12533 case OMP_DISTRIBUTE
: kind
= GF_OMP_FOR_KIND_DISTRIBUTE
; break;
12534 case OMP_TASKLOOP
: kind
= GF_OMP_FOR_KIND_TASKLOOP
; break;
12535 case OACC_LOOP
: kind
= GF_OMP_FOR_KIND_OACC_LOOP
; break;
12537 gcc_unreachable ();
12539 if (loop_p
&& kind
== GF_OMP_FOR_KIND_SIMD
)
12541 gimplify_seq_add_seq (pre_p
, for_pre_body
);
12542 for_pre_body
= NULL
;
12544 gfor
= gimple_build_omp_for (for_body
, kind
, OMP_FOR_CLAUSES (orig_for_stmt
),
12545 TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt
)),
12547 if (orig_for_stmt
!= for_stmt
)
12548 gimple_omp_for_set_combined_p (gfor
, true);
12549 if (gimplify_omp_ctxp
12550 && (gimplify_omp_ctxp
->combined_loop
12551 || (gimplify_omp_ctxp
->region_type
== ORT_COMBINED_PARALLEL
12552 && gimplify_omp_ctxp
->outer_context
12553 && gimplify_omp_ctxp
->outer_context
->combined_loop
)))
12555 gimple_omp_for_set_combined_into_p (gfor
, true);
12556 if (gimplify_omp_ctxp
->combined_loop
)
12557 gcc_assert (TREE_CODE (orig_for_stmt
) == OMP_SIMD
);
12559 gcc_assert (TREE_CODE (orig_for_stmt
) == OMP_FOR
);
12562 for (i
= 0; i
< TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt
)); i
++)
12564 t
= TREE_VEC_ELT (OMP_FOR_INIT (for_stmt
), i
);
12565 gimple_omp_for_set_index (gfor
, i
, TREE_OPERAND (t
, 0));
12566 gimple_omp_for_set_initial (gfor
, i
, TREE_OPERAND (t
, 1));
12567 t
= TREE_VEC_ELT (OMP_FOR_COND (for_stmt
), i
);
12568 gimple_omp_for_set_cond (gfor
, i
, TREE_CODE (t
));
12569 gimple_omp_for_set_final (gfor
, i
, TREE_OPERAND (t
, 1));
12570 t
= TREE_VEC_ELT (OMP_FOR_INCR (for_stmt
), i
);
12571 gimple_omp_for_set_incr (gfor
, i
, TREE_OPERAND (t
, 1));
12574 /* OMP_TASKLOOP is gimplified as two GIMPLE_OMP_FOR taskloop
12575 constructs with GIMPLE_OMP_TASK sandwiched in between them.
12576 The outer taskloop stands for computing the number of iterations,
12577 counts for collapsed loops and holding taskloop specific clauses.
12578 The task construct stands for the effect of data sharing on the
12579 explicit task it creates and the inner taskloop stands for expansion
12580 of the static loop inside of the explicit task construct. */
12581 if (TREE_CODE (orig_for_stmt
) == OMP_TASKLOOP
)
12583 tree
*gfor_clauses_ptr
= gimple_omp_for_clauses_ptr (gfor
);
12584 tree task_clauses
= NULL_TREE
;
12585 tree c
= *gfor_clauses_ptr
;
12586 tree
*gtask_clauses_ptr
= &task_clauses
;
12587 tree outer_for_clauses
= NULL_TREE
;
12588 tree
*gforo_clauses_ptr
= &outer_for_clauses
;
12589 bitmap lastprivate_uids
= NULL
;
12590 if (omp_find_clause (c
, OMP_CLAUSE_ALLOCATE
))
12592 c
= omp_find_clause (c
, OMP_CLAUSE_LASTPRIVATE
);
12595 lastprivate_uids
= BITMAP_ALLOC (NULL
);
12596 for (; c
; c
= omp_find_clause (OMP_CLAUSE_CHAIN (c
),
12597 OMP_CLAUSE_LASTPRIVATE
))
12598 bitmap_set_bit (lastprivate_uids
,
12599 DECL_UID (OMP_CLAUSE_DECL (c
)));
12601 c
= *gfor_clauses_ptr
;
12603 for (; c
; c
= OMP_CLAUSE_CHAIN (c
))
12604 switch (OMP_CLAUSE_CODE (c
))
12606 /* These clauses are allowed on task, move them there. */
12607 case OMP_CLAUSE_SHARED
:
12608 case OMP_CLAUSE_FIRSTPRIVATE
:
12609 case OMP_CLAUSE_DEFAULT
:
12610 case OMP_CLAUSE_IF
:
12611 case OMP_CLAUSE_UNTIED
:
12612 case OMP_CLAUSE_FINAL
:
12613 case OMP_CLAUSE_MERGEABLE
:
12614 case OMP_CLAUSE_PRIORITY
:
12615 case OMP_CLAUSE_REDUCTION
:
12616 case OMP_CLAUSE_IN_REDUCTION
:
12617 *gtask_clauses_ptr
= c
;
12618 gtask_clauses_ptr
= &OMP_CLAUSE_CHAIN (c
);
12620 case OMP_CLAUSE_PRIVATE
:
12621 if (OMP_CLAUSE_PRIVATE_TASKLOOP_IV (c
))
12623 /* We want private on outer for and firstprivate
12626 = build_omp_clause (OMP_CLAUSE_LOCATION (c
),
12627 OMP_CLAUSE_FIRSTPRIVATE
);
12628 OMP_CLAUSE_DECL (*gtask_clauses_ptr
) = OMP_CLAUSE_DECL (c
);
12629 lang_hooks
.decls
.omp_finish_clause (*gtask_clauses_ptr
, NULL
,
12631 gtask_clauses_ptr
= &OMP_CLAUSE_CHAIN (*gtask_clauses_ptr
);
12632 *gforo_clauses_ptr
= c
;
12633 gforo_clauses_ptr
= &OMP_CLAUSE_CHAIN (c
);
12637 *gtask_clauses_ptr
= c
;
12638 gtask_clauses_ptr
= &OMP_CLAUSE_CHAIN (c
);
12641 /* These clauses go into outer taskloop clauses. */
12642 case OMP_CLAUSE_GRAINSIZE
:
12643 case OMP_CLAUSE_NUM_TASKS
:
12644 case OMP_CLAUSE_NOGROUP
:
12645 *gforo_clauses_ptr
= c
;
12646 gforo_clauses_ptr
= &OMP_CLAUSE_CHAIN (c
);
12648 /* Collapse clause we duplicate on both taskloops. */
12649 case OMP_CLAUSE_COLLAPSE
:
12650 *gfor_clauses_ptr
= c
;
12651 gfor_clauses_ptr
= &OMP_CLAUSE_CHAIN (c
);
12652 *gforo_clauses_ptr
= copy_node (c
);
12653 gforo_clauses_ptr
= &OMP_CLAUSE_CHAIN (*gforo_clauses_ptr
);
12655 /* For lastprivate, keep the clause on inner taskloop, and add
12656 a shared clause on task. If the same decl is also firstprivate,
12657 add also firstprivate clause on the inner taskloop. */
12658 case OMP_CLAUSE_LASTPRIVATE
:
12659 if (OMP_CLAUSE_LASTPRIVATE_LOOP_IV (c
))
12661 /* For taskloop C++ lastprivate IVs, we want:
12662 1) private on outer taskloop
12663 2) firstprivate and shared on task
12664 3) lastprivate on inner taskloop */
12666 = build_omp_clause (OMP_CLAUSE_LOCATION (c
),
12667 OMP_CLAUSE_FIRSTPRIVATE
);
12668 OMP_CLAUSE_DECL (*gtask_clauses_ptr
) = OMP_CLAUSE_DECL (c
);
12669 lang_hooks
.decls
.omp_finish_clause (*gtask_clauses_ptr
, NULL
,
12671 gtask_clauses_ptr
= &OMP_CLAUSE_CHAIN (*gtask_clauses_ptr
);
12672 OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c
) = 1;
12673 *gforo_clauses_ptr
= build_omp_clause (OMP_CLAUSE_LOCATION (c
),
12674 OMP_CLAUSE_PRIVATE
);
12675 OMP_CLAUSE_DECL (*gforo_clauses_ptr
) = OMP_CLAUSE_DECL (c
);
12676 OMP_CLAUSE_PRIVATE_TASKLOOP_IV (*gforo_clauses_ptr
) = 1;
12677 TREE_TYPE (*gforo_clauses_ptr
) = TREE_TYPE (c
);
12678 gforo_clauses_ptr
= &OMP_CLAUSE_CHAIN (*gforo_clauses_ptr
);
12680 *gfor_clauses_ptr
= c
;
12681 gfor_clauses_ptr
= &OMP_CLAUSE_CHAIN (c
);
12683 = build_omp_clause (OMP_CLAUSE_LOCATION (c
), OMP_CLAUSE_SHARED
);
12684 OMP_CLAUSE_DECL (*gtask_clauses_ptr
) = OMP_CLAUSE_DECL (c
);
12685 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c
))
12686 OMP_CLAUSE_SHARED_FIRSTPRIVATE (*gtask_clauses_ptr
) = 1;
12688 = &OMP_CLAUSE_CHAIN (*gtask_clauses_ptr
);
12690 /* Allocate clause we duplicate on task and inner taskloop
12691 if the decl is lastprivate, otherwise just put on task. */
12692 case OMP_CLAUSE_ALLOCATE
:
12693 if (OMP_CLAUSE_ALLOCATE_ALLOCATOR (c
)
12694 && DECL_P (OMP_CLAUSE_ALLOCATE_ALLOCATOR (c
)))
12696 /* Additionally, put firstprivate clause on task
12697 for the allocator if it is not constant. */
12699 = build_omp_clause (OMP_CLAUSE_LOCATION (c
),
12700 OMP_CLAUSE_FIRSTPRIVATE
);
12701 OMP_CLAUSE_DECL (*gtask_clauses_ptr
)
12702 = OMP_CLAUSE_ALLOCATE_ALLOCATOR (c
);
12703 gtask_clauses_ptr
= &OMP_CLAUSE_CHAIN (*gtask_clauses_ptr
);
12705 if (lastprivate_uids
12706 && bitmap_bit_p (lastprivate_uids
,
12707 DECL_UID (OMP_CLAUSE_DECL (c
))))
12709 *gfor_clauses_ptr
= c
;
12710 gfor_clauses_ptr
= &OMP_CLAUSE_CHAIN (c
);
12711 *gtask_clauses_ptr
= copy_node (c
);
12712 gtask_clauses_ptr
= &OMP_CLAUSE_CHAIN (*gtask_clauses_ptr
);
12716 *gtask_clauses_ptr
= c
;
12717 gtask_clauses_ptr
= &OMP_CLAUSE_CHAIN (c
);
12721 gcc_unreachable ();
12723 *gfor_clauses_ptr
= NULL_TREE
;
12724 *gtask_clauses_ptr
= NULL_TREE
;
12725 *gforo_clauses_ptr
= NULL_TREE
;
12726 BITMAP_FREE (lastprivate_uids
);
12727 g
= gimple_build_bind (NULL_TREE
, gfor
, NULL_TREE
);
12728 g
= gimple_build_omp_task (g
, task_clauses
, NULL_TREE
, NULL_TREE
,
12729 NULL_TREE
, NULL_TREE
, NULL_TREE
);
12730 gimple_omp_task_set_taskloop_p (g
, true);
12731 g
= gimple_build_bind (NULL_TREE
, g
, NULL_TREE
);
12733 = gimple_build_omp_for (g
, GF_OMP_FOR_KIND_TASKLOOP
, outer_for_clauses
,
12734 gimple_omp_for_collapse (gfor
),
12735 gimple_omp_for_pre_body (gfor
));
12736 gimple_omp_for_set_pre_body (gfor
, NULL
);
12737 gimple_omp_for_set_combined_p (gforo
, true);
12738 gimple_omp_for_set_combined_into_p (gfor
, true);
12739 for (i
= 0; i
< (int) gimple_omp_for_collapse (gfor
); i
++)
12741 tree type
= TREE_TYPE (gimple_omp_for_index (gfor
, i
));
12742 tree v
= create_tmp_var (type
);
12743 gimple_omp_for_set_index (gforo
, i
, v
);
12744 t
= unshare_expr (gimple_omp_for_initial (gfor
, i
));
12745 gimple_omp_for_set_initial (gforo
, i
, t
);
12746 gimple_omp_for_set_cond (gforo
, i
,
12747 gimple_omp_for_cond (gfor
, i
));
12748 t
= unshare_expr (gimple_omp_for_final (gfor
, i
));
12749 gimple_omp_for_set_final (gforo
, i
, t
);
12750 t
= unshare_expr (gimple_omp_for_incr (gfor
, i
));
12751 gcc_assert (TREE_OPERAND (t
, 0) == gimple_omp_for_index (gfor
, i
));
12752 TREE_OPERAND (t
, 0) = v
;
12753 gimple_omp_for_set_incr (gforo
, i
, t
);
12754 t
= build_omp_clause (input_location
, OMP_CLAUSE_PRIVATE
);
12755 OMP_CLAUSE_DECL (t
) = v
;
12756 OMP_CLAUSE_CHAIN (t
) = gimple_omp_for_clauses (gforo
);
12757 gimple_omp_for_set_clauses (gforo
, t
);
12758 if (OMP_FOR_NON_RECTANGULAR (for_stmt
))
12760 tree
*p1
= NULL
, *p2
= NULL
;
12761 t
= gimple_omp_for_initial (gforo
, i
);
12762 if (TREE_CODE (t
) == TREE_VEC
)
12763 p1
= &TREE_VEC_ELT (t
, 0);
12764 t
= gimple_omp_for_final (gforo
, i
);
12765 if (TREE_CODE (t
) == TREE_VEC
)
12768 p2
= &TREE_VEC_ELT (t
, 0);
12770 p1
= &TREE_VEC_ELT (t
, 0);
12775 for (j
= 0; j
< i
; j
++)
12776 if (*p1
== gimple_omp_for_index (gfor
, j
))
12778 *p1
= gimple_omp_for_index (gforo
, j
);
12783 gcc_assert (j
< i
);
12787 gimplify_seq_add_stmt (pre_p
, gforo
);
12790 gimplify_seq_add_stmt (pre_p
, gfor
);
12792 if (TREE_CODE (orig_for_stmt
) == OMP_FOR
)
12794 struct gimplify_omp_ctx
*ctx
= gimplify_omp_ctxp
;
12795 unsigned lastprivate_conditional
= 0;
12797 && (ctx
->region_type
== ORT_TARGET_DATA
12798 || ctx
->region_type
== ORT_TASKGROUP
))
12799 ctx
= ctx
->outer_context
;
12800 if (ctx
&& (ctx
->region_type
& ORT_PARALLEL
) != 0)
12801 for (tree c
= gimple_omp_for_clauses (gfor
);
12802 c
; c
= OMP_CLAUSE_CHAIN (c
))
12803 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
12804 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c
))
12805 ++lastprivate_conditional
;
12806 if (lastprivate_conditional
)
12808 struct omp_for_data fd
;
12809 omp_extract_for_data (gfor
, &fd
, NULL
);
12810 tree type
= build_array_type_nelts (unsigned_type_for (fd
.iter_type
),
12811 lastprivate_conditional
);
12812 tree var
= create_tmp_var_raw (type
);
12813 tree c
= build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE__CONDTEMP_
);
12814 OMP_CLAUSE_DECL (c
) = var
;
12815 OMP_CLAUSE_CHAIN (c
) = gimple_omp_for_clauses (gfor
);
12816 gimple_omp_for_set_clauses (gfor
, c
);
12817 omp_add_variable (ctx
, var
, GOVD_CONDTEMP
| GOVD_SEEN
);
12820 else if (TREE_CODE (orig_for_stmt
) == OMP_SIMD
)
12822 unsigned lastprivate_conditional
= 0;
12823 for (tree c
= gimple_omp_for_clauses (gfor
); c
; c
= OMP_CLAUSE_CHAIN (c
))
12824 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
12825 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c
))
12826 ++lastprivate_conditional
;
12827 if (lastprivate_conditional
)
12829 struct omp_for_data fd
;
12830 omp_extract_for_data (gfor
, &fd
, NULL
);
12831 tree type
= unsigned_type_for (fd
.iter_type
);
12832 while (lastprivate_conditional
--)
12834 tree c
= build_omp_clause (UNKNOWN_LOCATION
,
12835 OMP_CLAUSE__CONDTEMP_
);
12836 OMP_CLAUSE_DECL (c
) = create_tmp_var (type
);
12837 OMP_CLAUSE_CHAIN (c
) = gimple_omp_for_clauses (gfor
);
12838 gimple_omp_for_set_clauses (gfor
, c
);
12843 if (ret
!= GS_ALL_DONE
)
12845 *expr_p
= NULL_TREE
;
12846 return GS_ALL_DONE
;
12849 /* Helper for gimplify_omp_loop, called through walk_tree. */
12852 replace_reduction_placeholders (tree
*tp
, int *walk_subtrees
, void *data
)
12856 tree
*d
= (tree
*) data
;
12857 if (*tp
== OMP_CLAUSE_REDUCTION_PLACEHOLDER (d
[0]))
12859 *tp
= OMP_CLAUSE_REDUCTION_PLACEHOLDER (d
[1]);
12860 *walk_subtrees
= 0;
12862 else if (*tp
== OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (d
[0]))
12864 *tp
= OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (d
[1]);
12865 *walk_subtrees
= 0;
12871 /* Gimplify the gross structure of an OMP_LOOP statement. */
12873 static enum gimplify_status
12874 gimplify_omp_loop (tree
*expr_p
, gimple_seq
*pre_p
)
12876 tree for_stmt
= *expr_p
;
12877 tree clauses
= OMP_FOR_CLAUSES (for_stmt
);
12878 struct gimplify_omp_ctx
*octx
= gimplify_omp_ctxp
;
12879 enum omp_clause_bind_kind kind
= OMP_CLAUSE_BIND_THREAD
;
12882 /* If order is not present, the behavior is as if order(concurrent)
12884 tree order
= omp_find_clause (clauses
, OMP_CLAUSE_ORDER
);
12885 if (order
== NULL_TREE
)
12887 order
= build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE_ORDER
);
12888 OMP_CLAUSE_CHAIN (order
) = clauses
;
12889 OMP_FOR_CLAUSES (for_stmt
) = clauses
= order
;
12892 tree bind
= omp_find_clause (clauses
, OMP_CLAUSE_BIND
);
12893 if (bind
== NULL_TREE
)
12895 if (!flag_openmp
) /* flag_openmp_simd */
12897 else if (octx
&& (octx
->region_type
& ORT_TEAMS
) != 0)
12898 kind
= OMP_CLAUSE_BIND_TEAMS
;
12899 else if (octx
&& (octx
->region_type
& ORT_PARALLEL
) != 0)
12900 kind
= OMP_CLAUSE_BIND_PARALLEL
;
12903 for (; octx
; octx
= octx
->outer_context
)
12905 if ((octx
->region_type
& ORT_ACC
) != 0
12906 || octx
->region_type
== ORT_NONE
12907 || octx
->region_type
== ORT_IMPLICIT_TARGET
)
12911 if (octx
== NULL
&& !in_omp_construct
)
12912 error_at (EXPR_LOCATION (for_stmt
),
12913 "%<bind%> clause not specified on a %<loop%> "
12914 "construct not nested inside another OpenMP construct");
12916 bind
= build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE_BIND
);
12917 OMP_CLAUSE_CHAIN (bind
) = clauses
;
12918 OMP_CLAUSE_BIND_KIND (bind
) = kind
;
12919 OMP_FOR_CLAUSES (for_stmt
) = bind
;
12922 switch (OMP_CLAUSE_BIND_KIND (bind
))
12924 case OMP_CLAUSE_BIND_THREAD
:
12926 case OMP_CLAUSE_BIND_PARALLEL
:
12927 if (!flag_openmp
) /* flag_openmp_simd */
12929 OMP_CLAUSE_BIND_KIND (bind
) = OMP_CLAUSE_BIND_THREAD
;
12932 for (; octx
; octx
= octx
->outer_context
)
12933 if (octx
->region_type
== ORT_SIMD
12934 && omp_find_clause (octx
->clauses
, OMP_CLAUSE_BIND
) == NULL_TREE
)
12936 error_at (EXPR_LOCATION (for_stmt
),
12937 "%<bind(parallel)%> on a %<loop%> construct nested "
12938 "inside %<simd%> construct");
12939 OMP_CLAUSE_BIND_KIND (bind
) = OMP_CLAUSE_BIND_THREAD
;
12942 kind
= OMP_CLAUSE_BIND_PARALLEL
;
12944 case OMP_CLAUSE_BIND_TEAMS
:
12945 if (!flag_openmp
) /* flag_openmp_simd */
12947 OMP_CLAUSE_BIND_KIND (bind
) = OMP_CLAUSE_BIND_THREAD
;
12951 && octx
->region_type
!= ORT_IMPLICIT_TARGET
12952 && octx
->region_type
!= ORT_NONE
12953 && (octx
->region_type
& ORT_TEAMS
) == 0)
12954 || in_omp_construct
)
12956 error_at (EXPR_LOCATION (for_stmt
),
12957 "%<bind(teams)%> on a %<loop%> region not strictly "
12958 "nested inside of a %<teams%> region");
12959 OMP_CLAUSE_BIND_KIND (bind
) = OMP_CLAUSE_BIND_THREAD
;
12962 kind
= OMP_CLAUSE_BIND_TEAMS
;
12965 gcc_unreachable ();
12968 for (tree
*pc
= &OMP_FOR_CLAUSES (for_stmt
); *pc
; )
12969 switch (OMP_CLAUSE_CODE (*pc
))
12971 case OMP_CLAUSE_REDUCTION
:
12972 if (OMP_CLAUSE_REDUCTION_INSCAN (*pc
))
12974 error_at (OMP_CLAUSE_LOCATION (*pc
),
12975 "%<inscan%> %<reduction%> clause on "
12976 "%qs construct", "loop");
12977 OMP_CLAUSE_REDUCTION_INSCAN (*pc
) = 0;
12979 if (OMP_CLAUSE_REDUCTION_TASK (*pc
))
12981 error_at (OMP_CLAUSE_LOCATION (*pc
),
12982 "invalid %<task%> reduction modifier on construct "
12983 "other than %<parallel%>, %qs or %<sections%>",
12984 lang_GNU_Fortran () ? "do" : "for");
12985 OMP_CLAUSE_REDUCTION_TASK (*pc
) = 0;
12987 pc
= &OMP_CLAUSE_CHAIN (*pc
);
12989 case OMP_CLAUSE_LASTPRIVATE
:
12990 for (i
= 0; i
< TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt
)); i
++)
12992 tree t
= TREE_VEC_ELT (OMP_FOR_INIT (for_stmt
), i
);
12993 gcc_assert (TREE_CODE (t
) == MODIFY_EXPR
);
12994 if (OMP_CLAUSE_DECL (*pc
) == TREE_OPERAND (t
, 0))
12996 if (OMP_FOR_ORIG_DECLS (for_stmt
)
12997 && TREE_CODE (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (for_stmt
),
12999 && TREE_PURPOSE (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (for_stmt
),
13002 tree orig
= TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (for_stmt
), i
);
13003 if (OMP_CLAUSE_DECL (*pc
) == TREE_PURPOSE (orig
))
13007 if (i
== TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt
)))
13009 error_at (OMP_CLAUSE_LOCATION (*pc
),
13010 "%<lastprivate%> clause on a %<loop%> construct refers "
13011 "to a variable %qD which is not the loop iterator",
13012 OMP_CLAUSE_DECL (*pc
));
13013 *pc
= OMP_CLAUSE_CHAIN (*pc
);
13016 pc
= &OMP_CLAUSE_CHAIN (*pc
);
13019 pc
= &OMP_CLAUSE_CHAIN (*pc
);
13023 TREE_SET_CODE (for_stmt
, OMP_SIMD
);
13028 case OMP_CLAUSE_BIND_THREAD
: last
= 0; break;
13029 case OMP_CLAUSE_BIND_PARALLEL
: last
= 1; break;
13030 case OMP_CLAUSE_BIND_TEAMS
: last
= 2; break;
13032 for (int pass
= 1; pass
<= last
; pass
++)
13036 tree bind
= build3 (BIND_EXPR
, void_type_node
, NULL
, NULL
, NULL
);
13037 append_to_statement_list (*expr_p
, &BIND_EXPR_BODY (bind
));
13038 *expr_p
= make_node (OMP_PARALLEL
);
13039 TREE_TYPE (*expr_p
) = void_type_node
;
13040 OMP_PARALLEL_BODY (*expr_p
) = bind
;
13041 OMP_PARALLEL_COMBINED (*expr_p
) = 1;
13042 SET_EXPR_LOCATION (*expr_p
, EXPR_LOCATION (for_stmt
));
13043 tree
*pc
= &OMP_PARALLEL_CLAUSES (*expr_p
);
13044 for (i
= 0; i
< TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt
)); i
++)
13045 if (OMP_FOR_ORIG_DECLS (for_stmt
)
13046 && (TREE_CODE (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (for_stmt
), i
))
13049 tree elt
= TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (for_stmt
), i
);
13050 if (TREE_PURPOSE (elt
) && TREE_VALUE (elt
))
13052 *pc
= build_omp_clause (UNKNOWN_LOCATION
,
13053 OMP_CLAUSE_FIRSTPRIVATE
);
13054 OMP_CLAUSE_DECL (*pc
) = TREE_VALUE (elt
);
13055 pc
= &OMP_CLAUSE_CHAIN (*pc
);
13059 tree t
= make_node (pass
== 2 ? OMP_DISTRIBUTE
: OMP_FOR
);
13060 tree
*pc
= &OMP_FOR_CLAUSES (t
);
13061 TREE_TYPE (t
) = void_type_node
;
13062 OMP_FOR_BODY (t
) = *expr_p
;
13063 SET_EXPR_LOCATION (t
, EXPR_LOCATION (for_stmt
));
13064 for (tree c
= OMP_FOR_CLAUSES (for_stmt
); c
; c
= OMP_CLAUSE_CHAIN (c
))
13065 switch (OMP_CLAUSE_CODE (c
))
13067 case OMP_CLAUSE_BIND
:
13068 case OMP_CLAUSE_ORDER
:
13069 case OMP_CLAUSE_COLLAPSE
:
13070 *pc
= copy_node (c
);
13071 pc
= &OMP_CLAUSE_CHAIN (*pc
);
13073 case OMP_CLAUSE_PRIVATE
:
13074 case OMP_CLAUSE_FIRSTPRIVATE
:
13075 /* Only needed on innermost. */
13077 case OMP_CLAUSE_LASTPRIVATE
:
13078 if (OMP_CLAUSE_LASTPRIVATE_LOOP_IV (c
) && pass
!= last
)
13080 *pc
= build_omp_clause (OMP_CLAUSE_LOCATION (c
),
13081 OMP_CLAUSE_FIRSTPRIVATE
);
13082 OMP_CLAUSE_DECL (*pc
) = OMP_CLAUSE_DECL (c
);
13083 lang_hooks
.decls
.omp_finish_clause (*pc
, NULL
, false);
13084 pc
= &OMP_CLAUSE_CHAIN (*pc
);
13086 *pc
= copy_node (c
);
13087 OMP_CLAUSE_LASTPRIVATE_STMT (*pc
) = NULL_TREE
;
13088 TREE_TYPE (*pc
) = unshare_expr (TREE_TYPE (c
));
13089 if (OMP_CLAUSE_LASTPRIVATE_LOOP_IV (c
))
13092 OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (*pc
) = 1;
13094 lang_hooks
.decls
.omp_finish_clause (*pc
, NULL
, false);
13095 OMP_CLAUSE_LASTPRIVATE_LOOP_IV (*pc
) = 0;
13097 pc
= &OMP_CLAUSE_CHAIN (*pc
);
13099 case OMP_CLAUSE_REDUCTION
:
13100 *pc
= copy_node (c
);
13101 OMP_CLAUSE_DECL (*pc
) = unshare_expr (OMP_CLAUSE_DECL (c
));
13102 TREE_TYPE (*pc
) = unshare_expr (TREE_TYPE (c
));
13103 OMP_CLAUSE_REDUCTION_INIT (*pc
)
13104 = unshare_expr (OMP_CLAUSE_REDUCTION_INIT (c
));
13105 OMP_CLAUSE_REDUCTION_MERGE (*pc
)
13106 = unshare_expr (OMP_CLAUSE_REDUCTION_MERGE (c
));
13107 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (*pc
))
13109 OMP_CLAUSE_REDUCTION_PLACEHOLDER (*pc
)
13110 = copy_node (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
));
13111 if (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (*pc
))
13112 OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (*pc
)
13113 = copy_node (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c
));
13115 tree data
[2] = { c
, nc
};
13116 walk_tree_without_duplicates (&OMP_CLAUSE_REDUCTION_INIT (nc
),
13117 replace_reduction_placeholders
,
13119 walk_tree_without_duplicates (&OMP_CLAUSE_REDUCTION_MERGE (nc
),
13120 replace_reduction_placeholders
,
13123 pc
= &OMP_CLAUSE_CHAIN (*pc
);
13126 gcc_unreachable ();
13131 return gimplify_omp_for (expr_p
, pre_p
);
13135 /* Helper function of optimize_target_teams, find OMP_TEAMS inside
13136 of OMP_TARGET's body. */
13139 find_omp_teams (tree
*tp
, int *walk_subtrees
, void *)
13141 *walk_subtrees
= 0;
13142 switch (TREE_CODE (*tp
))
13147 case STATEMENT_LIST
:
13148 *walk_subtrees
= 1;
13156 /* Helper function of optimize_target_teams, determine if the expression
13157 can be computed safely before the target construct on the host. */
13160 computable_teams_clause (tree
*tp
, int *walk_subtrees
, void *)
13166 *walk_subtrees
= 0;
13169 switch (TREE_CODE (*tp
))
13174 *walk_subtrees
= 0;
13175 if (error_operand_p (*tp
)
13176 || !INTEGRAL_TYPE_P (TREE_TYPE (*tp
))
13177 || DECL_HAS_VALUE_EXPR_P (*tp
)
13178 || DECL_THREAD_LOCAL_P (*tp
)
13179 || TREE_SIDE_EFFECTS (*tp
)
13180 || TREE_THIS_VOLATILE (*tp
))
13182 if (is_global_var (*tp
)
13183 && (lookup_attribute ("omp declare target", DECL_ATTRIBUTES (*tp
))
13184 || lookup_attribute ("omp declare target link",
13185 DECL_ATTRIBUTES (*tp
))))
13188 && !DECL_SEEN_IN_BIND_EXPR_P (*tp
)
13189 && !is_global_var (*tp
)
13190 && decl_function_context (*tp
) == current_function_decl
)
13192 n
= splay_tree_lookup (gimplify_omp_ctxp
->variables
,
13193 (splay_tree_key
) *tp
);
13196 if (gimplify_omp_ctxp
->defaultmap
[GDMK_SCALAR
] & GOVD_FIRSTPRIVATE
)
13200 else if (n
->value
& GOVD_LOCAL
)
13202 else if (n
->value
& GOVD_FIRSTPRIVATE
)
13204 else if ((n
->value
& (GOVD_MAP
| GOVD_MAP_ALWAYS_TO
))
13205 == (GOVD_MAP
| GOVD_MAP_ALWAYS_TO
))
13209 if (!INTEGRAL_TYPE_P (TREE_TYPE (*tp
)))
13213 if (TARGET_EXPR_INITIAL (*tp
)
13214 || TREE_CODE (TARGET_EXPR_SLOT (*tp
)) != VAR_DECL
)
13216 return computable_teams_clause (&TARGET_EXPR_SLOT (*tp
),
13217 walk_subtrees
, NULL
);
13218 /* Allow some reasonable subset of integral arithmetics. */
13222 case TRUNC_DIV_EXPR
:
13223 case CEIL_DIV_EXPR
:
13224 case FLOOR_DIV_EXPR
:
13225 case ROUND_DIV_EXPR
:
13226 case TRUNC_MOD_EXPR
:
13227 case CEIL_MOD_EXPR
:
13228 case FLOOR_MOD_EXPR
:
13229 case ROUND_MOD_EXPR
:
13231 case EXACT_DIV_EXPR
:
13242 case NON_LVALUE_EXPR
:
13244 if (!INTEGRAL_TYPE_P (TREE_TYPE (*tp
)))
13247 /* And disallow anything else, except for comparisons. */
13249 if (COMPARISON_CLASS_P (*tp
))
13255 /* Try to determine if the num_teams and/or thread_limit expressions
13256 can have their values determined already before entering the
13258 INTEGER_CSTs trivially are,
13259 integral decls that are firstprivate (explicitly or implicitly)
13260 or explicitly map(always, to:) or map(always, tofrom:) on the target
13261 region too, and expressions involving simple arithmetics on those
13262 too, function calls are not ok, dereferencing something neither etc.
13263 Add NUM_TEAMS and THREAD_LIMIT clauses to the OMP_CLAUSES of
13264 EXPR based on what we find:
13265 0 stands for clause not specified at all, use implementation default
13266 -1 stands for value that can't be determined easily before entering
13267 the target construct.
13268 If teams construct is not present at all, use 1 for num_teams
13269 and 0 for thread_limit (only one team is involved, and the thread
13270 limit is implementation defined. */
13273 optimize_target_teams (tree target
, gimple_seq
*pre_p
)
13275 tree body
= OMP_BODY (target
);
13276 tree teams
= walk_tree (&body
, find_omp_teams
, NULL
, NULL
);
13277 tree num_teams
= integer_zero_node
;
13278 tree thread_limit
= integer_zero_node
;
13279 location_t num_teams_loc
= EXPR_LOCATION (target
);
13280 location_t thread_limit_loc
= EXPR_LOCATION (target
);
13282 struct gimplify_omp_ctx
*target_ctx
= gimplify_omp_ctxp
;
13284 if (teams
== NULL_TREE
)
13285 num_teams
= integer_one_node
;
13287 for (c
= OMP_TEAMS_CLAUSES (teams
); c
; c
= OMP_CLAUSE_CHAIN (c
))
13289 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_NUM_TEAMS
)
13292 num_teams_loc
= OMP_CLAUSE_LOCATION (c
);
13294 else if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_THREAD_LIMIT
)
13297 thread_limit_loc
= OMP_CLAUSE_LOCATION (c
);
13301 expr
= OMP_CLAUSE_OPERAND (c
, 0);
13302 if (TREE_CODE (expr
) == INTEGER_CST
)
13307 if (walk_tree (&expr
, computable_teams_clause
, NULL
, NULL
))
13309 *p
= integer_minus_one_node
;
13313 gimplify_omp_ctxp
= gimplify_omp_ctxp
->outer_context
;
13314 if (gimplify_expr (p
, pre_p
, NULL
, is_gimple_val
, fb_rvalue
, false)
13317 gimplify_omp_ctxp
= target_ctx
;
13318 *p
= integer_minus_one_node
;
13321 gimplify_omp_ctxp
= target_ctx
;
13322 if (!DECL_P (expr
) && TREE_CODE (expr
) != TARGET_EXPR
)
13323 OMP_CLAUSE_OPERAND (c
, 0) = *p
;
13325 c
= build_omp_clause (thread_limit_loc
, OMP_CLAUSE_THREAD_LIMIT
);
13326 OMP_CLAUSE_THREAD_LIMIT_EXPR (c
) = thread_limit
;
13327 OMP_CLAUSE_CHAIN (c
) = OMP_TARGET_CLAUSES (target
);
13328 OMP_TARGET_CLAUSES (target
) = c
;
13329 c
= build_omp_clause (num_teams_loc
, OMP_CLAUSE_NUM_TEAMS
);
13330 OMP_CLAUSE_NUM_TEAMS_EXPR (c
) = num_teams
;
13331 OMP_CLAUSE_CHAIN (c
) = OMP_TARGET_CLAUSES (target
);
13332 OMP_TARGET_CLAUSES (target
) = c
;
13335 /* Gimplify the gross structure of several OMP constructs. */
13338 gimplify_omp_workshare (tree
*expr_p
, gimple_seq
*pre_p
)
13340 tree expr
= *expr_p
;
13342 gimple_seq body
= NULL
;
13343 enum omp_region_type ort
;
13345 switch (TREE_CODE (expr
))
13349 ort
= ORT_WORKSHARE
;
13352 ort
= OMP_TARGET_COMBINED (expr
) ? ORT_COMBINED_TARGET
: ORT_TARGET
;
13355 ort
= ORT_ACC_KERNELS
;
13357 case OACC_PARALLEL
:
13358 ort
= ORT_ACC_PARALLEL
;
13361 ort
= ORT_ACC_SERIAL
;
13364 ort
= ORT_ACC_DATA
;
13366 case OMP_TARGET_DATA
:
13367 ort
= ORT_TARGET_DATA
;
13370 ort
= OMP_TEAMS_COMBINED (expr
) ? ORT_COMBINED_TEAMS
: ORT_TEAMS
;
13371 if (gimplify_omp_ctxp
== NULL
13372 || gimplify_omp_ctxp
->region_type
== ORT_IMPLICIT_TARGET
)
13373 ort
= (enum omp_region_type
) (ort
| ORT_HOST_TEAMS
);
13375 case OACC_HOST_DATA
:
13376 ort
= ORT_ACC_HOST_DATA
;
13379 gcc_unreachable ();
13382 bool save_in_omp_construct
= in_omp_construct
;
13383 if ((ort
& ORT_ACC
) == 0)
13384 in_omp_construct
= false;
13385 gimplify_scan_omp_clauses (&OMP_CLAUSES (expr
), pre_p
, ort
,
13387 if (TREE_CODE (expr
) == OMP_TARGET
)
13388 optimize_target_teams (expr
, pre_p
);
13389 if ((ort
& (ORT_TARGET
| ORT_TARGET_DATA
)) != 0
13390 || (ort
& ORT_HOST_TEAMS
) == ORT_HOST_TEAMS
)
13392 push_gimplify_context ();
13393 gimple
*g
= gimplify_and_return_first (OMP_BODY (expr
), &body
);
13394 if (gimple_code (g
) == GIMPLE_BIND
)
13395 pop_gimplify_context (g
);
13397 pop_gimplify_context (NULL
);
13398 if ((ort
& ORT_TARGET_DATA
) != 0)
13400 enum built_in_function end_ix
;
13401 switch (TREE_CODE (expr
))
13404 case OACC_HOST_DATA
:
13405 end_ix
= BUILT_IN_GOACC_DATA_END
;
13407 case OMP_TARGET_DATA
:
13408 end_ix
= BUILT_IN_GOMP_TARGET_END_DATA
;
13411 gcc_unreachable ();
13413 tree fn
= builtin_decl_explicit (end_ix
);
13414 g
= gimple_build_call (fn
, 0);
13415 gimple_seq cleanup
= NULL
;
13416 gimple_seq_add_stmt (&cleanup
, g
);
13417 g
= gimple_build_try (body
, cleanup
, GIMPLE_TRY_FINALLY
);
13419 gimple_seq_add_stmt (&body
, g
);
13423 gimplify_and_add (OMP_BODY (expr
), &body
);
13424 gimplify_adjust_omp_clauses (pre_p
, body
, &OMP_CLAUSES (expr
),
13426 in_omp_construct
= save_in_omp_construct
;
13428 switch (TREE_CODE (expr
))
13431 stmt
= gimple_build_omp_target (body
, GF_OMP_TARGET_KIND_OACC_DATA
,
13432 OMP_CLAUSES (expr
));
13434 case OACC_HOST_DATA
:
13435 if (omp_find_clause (OMP_CLAUSES (expr
), OMP_CLAUSE_IF_PRESENT
))
13437 for (tree c
= OMP_CLAUSES (expr
); c
; c
= OMP_CLAUSE_CHAIN (c
))
13438 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_USE_DEVICE_PTR
)
13439 OMP_CLAUSE_USE_DEVICE_PTR_IF_PRESENT (c
) = 1;
13442 stmt
= gimple_build_omp_target (body
, GF_OMP_TARGET_KIND_OACC_HOST_DATA
,
13443 OMP_CLAUSES (expr
));
13446 stmt
= gimple_build_omp_target (body
, GF_OMP_TARGET_KIND_OACC_KERNELS
,
13447 OMP_CLAUSES (expr
));
13449 case OACC_PARALLEL
:
13450 stmt
= gimple_build_omp_target (body
, GF_OMP_TARGET_KIND_OACC_PARALLEL
,
13451 OMP_CLAUSES (expr
));
13454 stmt
= gimple_build_omp_target (body
, GF_OMP_TARGET_KIND_OACC_SERIAL
,
13455 OMP_CLAUSES (expr
));
13458 stmt
= gimple_build_omp_sections (body
, OMP_CLAUSES (expr
));
13461 stmt
= gimple_build_omp_single (body
, OMP_CLAUSES (expr
));
13464 stmt
= gimple_build_omp_target (body
, GF_OMP_TARGET_KIND_REGION
,
13465 OMP_CLAUSES (expr
));
13467 case OMP_TARGET_DATA
:
13468 /* Put use_device_{ptr,addr} clauses last, as map clauses are supposed
13469 to be evaluated before the use_device_{ptr,addr} clauses if they
13470 refer to the same variables. */
13472 tree use_device_clauses
;
13473 tree
*pc
, *uc
= &use_device_clauses
;
13474 for (pc
= &OMP_CLAUSES (expr
); *pc
; )
13475 if (OMP_CLAUSE_CODE (*pc
) == OMP_CLAUSE_USE_DEVICE_PTR
13476 || OMP_CLAUSE_CODE (*pc
) == OMP_CLAUSE_USE_DEVICE_ADDR
)
13479 *pc
= OMP_CLAUSE_CHAIN (*pc
);
13480 uc
= &OMP_CLAUSE_CHAIN (*uc
);
13483 pc
= &OMP_CLAUSE_CHAIN (*pc
);
13485 *pc
= use_device_clauses
;
13486 stmt
= gimple_build_omp_target (body
, GF_OMP_TARGET_KIND_DATA
,
13487 OMP_CLAUSES (expr
));
13491 stmt
= gimple_build_omp_teams (body
, OMP_CLAUSES (expr
));
13492 if ((ort
& ORT_HOST_TEAMS
) == ORT_HOST_TEAMS
)
13493 gimple_omp_teams_set_host (as_a
<gomp_teams
*> (stmt
), true);
13496 gcc_unreachable ();
13499 gimplify_seq_add_stmt (pre_p
, stmt
);
13500 *expr_p
= NULL_TREE
;
13503 /* Gimplify the gross structure of OpenACC enter/exit data, update, and OpenMP
13504 target update constructs. */
13507 gimplify_omp_target_update (tree
*expr_p
, gimple_seq
*pre_p
)
13509 tree expr
= *expr_p
;
13512 enum omp_region_type ort
= ORT_WORKSHARE
;
13514 switch (TREE_CODE (expr
))
13516 case OACC_ENTER_DATA
:
13517 kind
= GF_OMP_TARGET_KIND_OACC_ENTER_DATA
;
13520 case OACC_EXIT_DATA
:
13521 kind
= GF_OMP_TARGET_KIND_OACC_EXIT_DATA
;
13525 kind
= GF_OMP_TARGET_KIND_OACC_UPDATE
;
13528 case OMP_TARGET_UPDATE
:
13529 kind
= GF_OMP_TARGET_KIND_UPDATE
;
13531 case OMP_TARGET_ENTER_DATA
:
13532 kind
= GF_OMP_TARGET_KIND_ENTER_DATA
;
13534 case OMP_TARGET_EXIT_DATA
:
13535 kind
= GF_OMP_TARGET_KIND_EXIT_DATA
;
13538 gcc_unreachable ();
13540 gimplify_scan_omp_clauses (&OMP_STANDALONE_CLAUSES (expr
), pre_p
,
13541 ort
, TREE_CODE (expr
));
13542 gimplify_adjust_omp_clauses (pre_p
, NULL
, &OMP_STANDALONE_CLAUSES (expr
),
13544 if (TREE_CODE (expr
) == OACC_UPDATE
13545 && omp_find_clause (OMP_STANDALONE_CLAUSES (expr
),
13546 OMP_CLAUSE_IF_PRESENT
))
13548 /* The runtime uses GOMP_MAP_{TO,FROM} to denote the if_present
13550 for (tree c
= OMP_STANDALONE_CLAUSES (expr
); c
; c
= OMP_CLAUSE_CHAIN (c
))
13551 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
)
13552 switch (OMP_CLAUSE_MAP_KIND (c
))
13554 case GOMP_MAP_FORCE_TO
:
13555 OMP_CLAUSE_SET_MAP_KIND (c
, GOMP_MAP_TO
);
13557 case GOMP_MAP_FORCE_FROM
:
13558 OMP_CLAUSE_SET_MAP_KIND (c
, GOMP_MAP_FROM
);
13564 else if (TREE_CODE (expr
) == OACC_EXIT_DATA
13565 && omp_find_clause (OMP_STANDALONE_CLAUSES (expr
),
13566 OMP_CLAUSE_FINALIZE
))
13568 /* Use GOMP_MAP_DELETE/GOMP_MAP_FORCE_FROM to denote "finalize"
13570 bool have_clause
= false;
13571 for (tree c
= OMP_STANDALONE_CLAUSES (expr
); c
; c
= OMP_CLAUSE_CHAIN (c
))
13572 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
)
13573 switch (OMP_CLAUSE_MAP_KIND (c
))
13575 case GOMP_MAP_FROM
:
13576 OMP_CLAUSE_SET_MAP_KIND (c
, GOMP_MAP_FORCE_FROM
);
13577 have_clause
= true;
13579 case GOMP_MAP_RELEASE
:
13580 OMP_CLAUSE_SET_MAP_KIND (c
, GOMP_MAP_DELETE
);
13581 have_clause
= true;
13583 case GOMP_MAP_TO_PSET
:
13584 /* Fortran arrays with descriptors must map that descriptor when
13585 doing standalone "attach" operations (in OpenACC). In that
13586 case GOMP_MAP_TO_PSET appears by itself with no preceding
13587 clause (see trans-openmp.c:gfc_trans_omp_clauses). */
13589 case GOMP_MAP_POINTER
:
13590 /* TODO PR92929: we may see these here, but they'll always follow
13591 one of the clauses above, and will be handled by libgomp as
13592 one group, so no handling required here. */
13593 gcc_assert (have_clause
);
13595 case GOMP_MAP_DETACH
:
13596 OMP_CLAUSE_SET_MAP_KIND (c
, GOMP_MAP_FORCE_DETACH
);
13597 have_clause
= false;
13599 case GOMP_MAP_STRUCT
:
13600 have_clause
= false;
13603 gcc_unreachable ();
13606 stmt
= gimple_build_omp_target (NULL
, kind
, OMP_STANDALONE_CLAUSES (expr
));
13608 gimplify_seq_add_stmt (pre_p
, stmt
);
13609 *expr_p
= NULL_TREE
;
13612 /* A subroutine of gimplify_omp_atomic. The front end is supposed to have
13613 stabilized the lhs of the atomic operation as *ADDR. Return true if
13614 EXPR is this stabilized form. */
13617 goa_lhs_expr_p (tree expr
, tree addr
)
13619 /* Also include casts to other type variants. The C front end is fond
13620 of adding these for e.g. volatile variables. This is like
13621 STRIP_TYPE_NOPS but includes the main variant lookup. */
13622 STRIP_USELESS_TYPE_CONVERSION (expr
);
13624 if (TREE_CODE (expr
) == INDIRECT_REF
)
13626 expr
= TREE_OPERAND (expr
, 0);
13627 while (expr
!= addr
13628 && (CONVERT_EXPR_P (expr
)
13629 || TREE_CODE (expr
) == NON_LVALUE_EXPR
)
13630 && TREE_CODE (expr
) == TREE_CODE (addr
)
13631 && types_compatible_p (TREE_TYPE (expr
), TREE_TYPE (addr
)))
13633 expr
= TREE_OPERAND (expr
, 0);
13634 addr
= TREE_OPERAND (addr
, 0);
13638 return (TREE_CODE (addr
) == ADDR_EXPR
13639 && TREE_CODE (expr
) == ADDR_EXPR
13640 && TREE_OPERAND (addr
, 0) == TREE_OPERAND (expr
, 0));
13642 if (TREE_CODE (addr
) == ADDR_EXPR
&& expr
== TREE_OPERAND (addr
, 0))
13647 /* Walk *EXPR_P and replace appearances of *LHS_ADDR with LHS_VAR. If an
13648 expression does not involve the lhs, evaluate it into a temporary.
13649 Return 1 if the lhs appeared as a subexpression, 0 if it did not,
13650 or -1 if an error was encountered. */
13653 goa_stabilize_expr (tree
*expr_p
, gimple_seq
*pre_p
, tree lhs_addr
,
13656 tree expr
= *expr_p
;
13659 if (goa_lhs_expr_p (expr
, lhs_addr
))
13664 if (is_gimple_val (expr
))
13668 switch (TREE_CODE_CLASS (TREE_CODE (expr
)))
13671 case tcc_comparison
:
13672 saw_lhs
|= goa_stabilize_expr (&TREE_OPERAND (expr
, 1), pre_p
, lhs_addr
,
13676 saw_lhs
|= goa_stabilize_expr (&TREE_OPERAND (expr
, 0), pre_p
, lhs_addr
,
13679 case tcc_expression
:
13680 switch (TREE_CODE (expr
))
13682 case TRUTH_ANDIF_EXPR
:
13683 case TRUTH_ORIF_EXPR
:
13684 case TRUTH_AND_EXPR
:
13685 case TRUTH_OR_EXPR
:
13686 case TRUTH_XOR_EXPR
:
13687 case BIT_INSERT_EXPR
:
13688 saw_lhs
|= goa_stabilize_expr (&TREE_OPERAND (expr
, 1), pre_p
,
13689 lhs_addr
, lhs_var
);
13691 case TRUTH_NOT_EXPR
:
13692 saw_lhs
|= goa_stabilize_expr (&TREE_OPERAND (expr
, 0), pre_p
,
13693 lhs_addr
, lhs_var
);
13695 case COMPOUND_EXPR
:
13696 /* Break out any preevaluations from cp_build_modify_expr. */
13697 for (; TREE_CODE (expr
) == COMPOUND_EXPR
;
13698 expr
= TREE_OPERAND (expr
, 1))
13699 gimplify_stmt (&TREE_OPERAND (expr
, 0), pre_p
);
13701 return goa_stabilize_expr (expr_p
, pre_p
, lhs_addr
, lhs_var
);
13706 case tcc_reference
:
13707 if (TREE_CODE (expr
) == BIT_FIELD_REF
)
13708 saw_lhs
|= goa_stabilize_expr (&TREE_OPERAND (expr
, 0), pre_p
,
13709 lhs_addr
, lhs_var
);
13717 enum gimplify_status gs
;
13718 gs
= gimplify_expr (expr_p
, pre_p
, NULL
, is_gimple_val
, fb_rvalue
);
13719 if (gs
!= GS_ALL_DONE
)
13726 /* Gimplify an OMP_ATOMIC statement. */
13728 static enum gimplify_status
13729 gimplify_omp_atomic (tree
*expr_p
, gimple_seq
*pre_p
)
13731 tree addr
= TREE_OPERAND (*expr_p
, 0);
13732 tree rhs
= TREE_CODE (*expr_p
) == OMP_ATOMIC_READ
13733 ? NULL
: TREE_OPERAND (*expr_p
, 1);
13734 tree type
= TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (addr
)));
13736 gomp_atomic_load
*loadstmt
;
13737 gomp_atomic_store
*storestmt
;
13739 tmp_load
= create_tmp_reg (type
);
13740 if (rhs
&& goa_stabilize_expr (&rhs
, pre_p
, addr
, tmp_load
) < 0)
13743 if (gimplify_expr (&addr
, pre_p
, NULL
, is_gimple_val
, fb_rvalue
)
13747 loadstmt
= gimple_build_omp_atomic_load (tmp_load
, addr
,
13748 OMP_ATOMIC_MEMORY_ORDER (*expr_p
));
13749 gimplify_seq_add_stmt (pre_p
, loadstmt
);
13752 /* BIT_INSERT_EXPR is not valid for non-integral bitfield
13753 representatives. Use BIT_FIELD_REF on the lhs instead. */
13754 if (TREE_CODE (rhs
) == BIT_INSERT_EXPR
13755 && !INTEGRAL_TYPE_P (TREE_TYPE (tmp_load
)))
13757 tree bitpos
= TREE_OPERAND (rhs
, 2);
13758 tree op1
= TREE_OPERAND (rhs
, 1);
13760 tree tmp_store
= tmp_load
;
13761 if (TREE_CODE (*expr_p
) == OMP_ATOMIC_CAPTURE_OLD
)
13762 tmp_store
= get_initialized_tmp_var (tmp_load
, pre_p
);
13763 if (INTEGRAL_TYPE_P (TREE_TYPE (op1
)))
13764 bitsize
= bitsize_int (TYPE_PRECISION (TREE_TYPE (op1
)));
13766 bitsize
= TYPE_SIZE (TREE_TYPE (op1
));
13767 gcc_assert (TREE_OPERAND (rhs
, 0) == tmp_load
);
13768 tree t
= build2_loc (EXPR_LOCATION (rhs
),
13769 MODIFY_EXPR
, void_type_node
,
13770 build3_loc (EXPR_LOCATION (rhs
), BIT_FIELD_REF
,
13771 TREE_TYPE (op1
), tmp_store
, bitsize
,
13773 gimplify_and_add (t
, pre_p
);
13776 if (gimplify_expr (&rhs
, pre_p
, NULL
, is_gimple_val
, fb_rvalue
)
13781 if (TREE_CODE (*expr_p
) == OMP_ATOMIC_READ
)
13784 = gimple_build_omp_atomic_store (rhs
, OMP_ATOMIC_MEMORY_ORDER (*expr_p
));
13785 gimplify_seq_add_stmt (pre_p
, storestmt
);
13786 switch (TREE_CODE (*expr_p
))
13788 case OMP_ATOMIC_READ
:
13789 case OMP_ATOMIC_CAPTURE_OLD
:
13790 *expr_p
= tmp_load
;
13791 gimple_omp_atomic_set_need_value (loadstmt
);
13793 case OMP_ATOMIC_CAPTURE_NEW
:
13795 gimple_omp_atomic_set_need_value (storestmt
);
13802 return GS_ALL_DONE
;
13805 /* Gimplify a TRANSACTION_EXPR. This involves gimplification of the
13806 body, and adding some EH bits. */
13808 static enum gimplify_status
13809 gimplify_transaction (tree
*expr_p
, gimple_seq
*pre_p
)
13811 tree expr
= *expr_p
, temp
, tbody
= TRANSACTION_EXPR_BODY (expr
);
13813 gtransaction
*trans_stmt
;
13814 gimple_seq body
= NULL
;
13817 /* Wrap the transaction body in a BIND_EXPR so we have a context
13818 where to put decls for OMP. */
13819 if (TREE_CODE (tbody
) != BIND_EXPR
)
13821 tree bind
= build3 (BIND_EXPR
, void_type_node
, NULL
, tbody
, NULL
);
13822 TREE_SIDE_EFFECTS (bind
) = 1;
13823 SET_EXPR_LOCATION (bind
, EXPR_LOCATION (tbody
));
13824 TRANSACTION_EXPR_BODY (expr
) = bind
;
13827 push_gimplify_context ();
13828 temp
= voidify_wrapper_expr (*expr_p
, NULL
);
13830 body_stmt
= gimplify_and_return_first (TRANSACTION_EXPR_BODY (expr
), &body
);
13831 pop_gimplify_context (body_stmt
);
13833 trans_stmt
= gimple_build_transaction (body
);
13834 if (TRANSACTION_EXPR_OUTER (expr
))
13835 subcode
= GTMA_IS_OUTER
;
13836 else if (TRANSACTION_EXPR_RELAXED (expr
))
13837 subcode
= GTMA_IS_RELAXED
;
13838 gimple_transaction_set_subcode (trans_stmt
, subcode
);
13840 gimplify_seq_add_stmt (pre_p
, trans_stmt
);
13848 *expr_p
= NULL_TREE
;
13849 return GS_ALL_DONE
;
13852 /* Gimplify an OMP_ORDERED construct. EXPR is the tree version. BODY
13853 is the OMP_BODY of the original EXPR (which has already been
13854 gimplified so it's not present in the EXPR).
13856 Return the gimplified GIMPLE_OMP_ORDERED tuple. */
13859 gimplify_omp_ordered (tree expr
, gimple_seq body
)
13864 tree source_c
= NULL_TREE
;
13865 tree sink_c
= NULL_TREE
;
13867 if (gimplify_omp_ctxp
)
13869 for (c
= OMP_ORDERED_CLAUSES (expr
); c
; c
= OMP_CLAUSE_CHAIN (c
))
13870 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_DEPEND
13871 && gimplify_omp_ctxp
->loop_iter_var
.is_empty ()
13872 && (OMP_CLAUSE_DEPEND_KIND (c
) == OMP_CLAUSE_DEPEND_SINK
13873 || OMP_CLAUSE_DEPEND_KIND (c
) == OMP_CLAUSE_DEPEND_SOURCE
))
13875 error_at (OMP_CLAUSE_LOCATION (c
),
13876 "%<ordered%> construct with %<depend%> clause must be "
13877 "closely nested inside a loop with %<ordered%> clause "
13878 "with a parameter");
13881 else if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_DEPEND
13882 && OMP_CLAUSE_DEPEND_KIND (c
) == OMP_CLAUSE_DEPEND_SINK
)
13885 for (decls
= OMP_CLAUSE_DECL (c
), i
= 0;
13886 decls
&& TREE_CODE (decls
) == TREE_LIST
;
13887 decls
= TREE_CHAIN (decls
), ++i
)
13888 if (i
>= gimplify_omp_ctxp
->loop_iter_var
.length () / 2)
13890 else if (TREE_VALUE (decls
)
13891 != gimplify_omp_ctxp
->loop_iter_var
[2 * i
])
13893 error_at (OMP_CLAUSE_LOCATION (c
),
13894 "variable %qE is not an iteration "
13895 "of outermost loop %d, expected %qE",
13896 TREE_VALUE (decls
), i
+ 1,
13897 gimplify_omp_ctxp
->loop_iter_var
[2 * i
]);
13903 = gimplify_omp_ctxp
->loop_iter_var
[2 * i
+ 1];
13904 if (!fail
&& i
!= gimplify_omp_ctxp
->loop_iter_var
.length () / 2)
13906 error_at (OMP_CLAUSE_LOCATION (c
),
13907 "number of variables in %<depend%> clause with "
13908 "%<sink%> modifier does not match number of "
13909 "iteration variables");
13914 else if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_DEPEND
13915 && OMP_CLAUSE_DEPEND_KIND (c
) == OMP_CLAUSE_DEPEND_SOURCE
)
13919 error_at (OMP_CLAUSE_LOCATION (c
),
13920 "more than one %<depend%> clause with %<source%> "
13921 "modifier on an %<ordered%> construct");
13928 if (source_c
&& sink_c
)
13930 error_at (OMP_CLAUSE_LOCATION (source_c
),
13931 "%<depend%> clause with %<source%> modifier specified "
13932 "together with %<depend%> clauses with %<sink%> modifier "
13933 "on the same construct");
13938 return gimple_build_nop ();
13939 return gimple_build_omp_ordered (body
, OMP_ORDERED_CLAUSES (expr
));
13942 /* Convert the GENERIC expression tree *EXPR_P to GIMPLE. If the
13943 expression produces a value to be used as an operand inside a GIMPLE
13944 statement, the value will be stored back in *EXPR_P. This value will
13945 be a tree of class tcc_declaration, tcc_constant, tcc_reference or
13946 an SSA_NAME. The corresponding sequence of GIMPLE statements is
13947 emitted in PRE_P and POST_P.
13949 Additionally, this process may overwrite parts of the input
13950 expression during gimplification. Ideally, it should be
13951 possible to do non-destructive gimplification.
13953 EXPR_P points to the GENERIC expression to convert to GIMPLE. If
13954 the expression needs to evaluate to a value to be used as
13955 an operand in a GIMPLE statement, this value will be stored in
13956 *EXPR_P on exit. This happens when the caller specifies one
13957 of fb_lvalue or fb_rvalue fallback flags.
13959 PRE_P will contain the sequence of GIMPLE statements corresponding
13960 to the evaluation of EXPR and all the side-effects that must
13961 be executed before the main expression. On exit, the last
13962 statement of PRE_P is the core statement being gimplified. For
13963 instance, when gimplifying 'if (++a)' the last statement in
13964 PRE_P will be 'if (t.1)' where t.1 is the result of
13965 pre-incrementing 'a'.
13967 POST_P will contain the sequence of GIMPLE statements corresponding
13968 to the evaluation of all the side-effects that must be executed
13969 after the main expression. If this is NULL, the post
13970 side-effects are stored at the end of PRE_P.
13972 The reason why the output is split in two is to handle post
13973 side-effects explicitly. In some cases, an expression may have
13974 inner and outer post side-effects which need to be emitted in
13975 an order different from the one given by the recursive
13976 traversal. For instance, for the expression (*p--)++ the post
13977 side-effects of '--' must actually occur *after* the post
13978 side-effects of '++'. However, gimplification will first visit
13979 the inner expression, so if a separate POST sequence was not
13980 used, the resulting sequence would be:
13987 However, the post-decrement operation in line #2 must not be
13988 evaluated until after the store to *p at line #4, so the
13989 correct sequence should be:
13996 So, by specifying a separate post queue, it is possible
13997 to emit the post side-effects in the correct order.
13998 If POST_P is NULL, an internal queue will be used. Before
13999 returning to the caller, the sequence POST_P is appended to
14000 the main output sequence PRE_P.
14002 GIMPLE_TEST_F points to a function that takes a tree T and
14003 returns nonzero if T is in the GIMPLE form requested by the
14004 caller. The GIMPLE predicates are in gimple.c.
14006 FALLBACK tells the function what sort of a temporary we want if
14007 gimplification cannot produce an expression that complies with
14010 fb_none means that no temporary should be generated
14011 fb_rvalue means that an rvalue is OK to generate
14012 fb_lvalue means that an lvalue is OK to generate
14013 fb_either means that either is OK, but an lvalue is preferable.
14014 fb_mayfail means that gimplification may fail (in which case
14015 GS_ERROR will be returned)
14017 The return value is either GS_ERROR or GS_ALL_DONE, since this
14018 function iterates until EXPR is completely gimplified or an error
14021 enum gimplify_status
14022 gimplify_expr (tree
*expr_p
, gimple_seq
*pre_p
, gimple_seq
*post_p
,
14023 bool (*gimple_test_f
) (tree
), fallback_t fallback
)
14026 gimple_seq internal_pre
= NULL
;
14027 gimple_seq internal_post
= NULL
;
14030 location_t saved_location
;
14031 enum gimplify_status ret
;
14032 gimple_stmt_iterator pre_last_gsi
, post_last_gsi
;
14035 save_expr
= *expr_p
;
14036 if (save_expr
== NULL_TREE
)
14037 return GS_ALL_DONE
;
14039 /* If we are gimplifying a top-level statement, PRE_P must be valid. */
14040 is_statement
= gimple_test_f
== is_gimple_stmt
;
14042 gcc_assert (pre_p
);
14044 /* Consistency checks. */
14045 if (gimple_test_f
== is_gimple_reg
)
14046 gcc_assert (fallback
& (fb_rvalue
| fb_lvalue
));
14047 else if (gimple_test_f
== is_gimple_val
14048 || gimple_test_f
== is_gimple_call_addr
14049 || gimple_test_f
== is_gimple_condexpr
14050 || gimple_test_f
== is_gimple_condexpr_for_cond
14051 || gimple_test_f
== is_gimple_mem_rhs
14052 || gimple_test_f
== is_gimple_mem_rhs_or_call
14053 || gimple_test_f
== is_gimple_reg_rhs
14054 || gimple_test_f
== is_gimple_reg_rhs_or_call
14055 || gimple_test_f
== is_gimple_asm_val
14056 || gimple_test_f
== is_gimple_mem_ref_addr
)
14057 gcc_assert (fallback
& fb_rvalue
);
14058 else if (gimple_test_f
== is_gimple_min_lval
14059 || gimple_test_f
== is_gimple_lvalue
)
14060 gcc_assert (fallback
& fb_lvalue
);
14061 else if (gimple_test_f
== is_gimple_addressable
)
14062 gcc_assert (fallback
& fb_either
);
14063 else if (gimple_test_f
== is_gimple_stmt
)
14064 gcc_assert (fallback
== fb_none
);
14067 /* We should have recognized the GIMPLE_TEST_F predicate to
14068 know what kind of fallback to use in case a temporary is
14069 needed to hold the value or address of *EXPR_P. */
14070 gcc_unreachable ();
14073 /* We used to check the predicate here and return immediately if it
14074 succeeds. This is wrong; the design is for gimplification to be
14075 idempotent, and for the predicates to only test for valid forms, not
14076 whether they are fully simplified. */
14078 pre_p
= &internal_pre
;
14080 if (post_p
== NULL
)
14081 post_p
= &internal_post
;
14083 /* Remember the last statements added to PRE_P and POST_P. Every
14084 new statement added by the gimplification helpers needs to be
14085 annotated with location information. To centralize the
14086 responsibility, we remember the last statement that had been
14087 added to both queues before gimplifying *EXPR_P. If
14088 gimplification produces new statements in PRE_P and POST_P, those
14089 statements will be annotated with the same location information
14091 pre_last_gsi
= gsi_last (*pre_p
);
14092 post_last_gsi
= gsi_last (*post_p
);
14094 saved_location
= input_location
;
14095 if (save_expr
!= error_mark_node
14096 && EXPR_HAS_LOCATION (*expr_p
))
14097 input_location
= EXPR_LOCATION (*expr_p
);
14099 /* Loop over the specific gimplifiers until the toplevel node
14100 remains the same. */
14103 /* Strip away as many useless type conversions as possible
14104 at the toplevel. */
14105 STRIP_USELESS_TYPE_CONVERSION (*expr_p
);
14107 /* Remember the expr. */
14108 save_expr
= *expr_p
;
14110 /* Die, die, die, my darling. */
14111 if (error_operand_p (save_expr
))
14117 /* Do any language-specific gimplification. */
14118 ret
= ((enum gimplify_status
)
14119 lang_hooks
.gimplify_expr (expr_p
, pre_p
, post_p
));
14122 if (*expr_p
== NULL_TREE
)
14124 if (*expr_p
!= save_expr
)
14127 else if (ret
!= GS_UNHANDLED
)
14130 /* Make sure that all the cases set 'ret' appropriately. */
14131 ret
= GS_UNHANDLED
;
14132 switch (TREE_CODE (*expr_p
))
14134 /* First deal with the special cases. */
14136 case POSTINCREMENT_EXPR
:
14137 case POSTDECREMENT_EXPR
:
14138 case PREINCREMENT_EXPR
:
14139 case PREDECREMENT_EXPR
:
14140 ret
= gimplify_self_mod_expr (expr_p
, pre_p
, post_p
,
14141 fallback
!= fb_none
,
14142 TREE_TYPE (*expr_p
));
14145 case VIEW_CONVERT_EXPR
:
14146 if ((fallback
& fb_rvalue
)
14147 && is_gimple_reg_type (TREE_TYPE (*expr_p
))
14148 && is_gimple_reg_type (TREE_TYPE (TREE_OPERAND (*expr_p
, 0))))
14150 ret
= gimplify_expr (&TREE_OPERAND (*expr_p
, 0), pre_p
,
14151 post_p
, is_gimple_val
, fb_rvalue
);
14152 recalculate_side_effects (*expr_p
);
14158 case ARRAY_RANGE_REF
:
14159 case REALPART_EXPR
:
14160 case IMAGPART_EXPR
:
14161 case COMPONENT_REF
:
14162 ret
= gimplify_compound_lval (expr_p
, pre_p
, post_p
,
14163 fallback
? fallback
: fb_rvalue
);
14167 ret
= gimplify_cond_expr (expr_p
, pre_p
, fallback
);
14169 /* C99 code may assign to an array in a structure value of a
14170 conditional expression, and this has undefined behavior
14171 only on execution, so create a temporary if an lvalue is
14173 if (fallback
== fb_lvalue
)
14175 *expr_p
= get_initialized_tmp_var (*expr_p
, pre_p
, post_p
, false);
14176 mark_addressable (*expr_p
);
14182 ret
= gimplify_call_expr (expr_p
, pre_p
, fallback
!= fb_none
);
14184 /* C99 code may assign to an array in a structure returned
14185 from a function, and this has undefined behavior only on
14186 execution, so create a temporary if an lvalue is
14188 if (fallback
== fb_lvalue
)
14190 *expr_p
= get_initialized_tmp_var (*expr_p
, pre_p
, post_p
, false);
14191 mark_addressable (*expr_p
);
14197 gcc_unreachable ();
14199 case COMPOUND_EXPR
:
14200 ret
= gimplify_compound_expr (expr_p
, pre_p
, fallback
!= fb_none
);
14203 case COMPOUND_LITERAL_EXPR
:
14204 ret
= gimplify_compound_literal_expr (expr_p
, pre_p
,
14205 gimple_test_f
, fallback
);
14210 ret
= gimplify_modify_expr (expr_p
, pre_p
, post_p
,
14211 fallback
!= fb_none
);
14214 case TRUTH_ANDIF_EXPR
:
14215 case TRUTH_ORIF_EXPR
:
14217 /* Preserve the original type of the expression and the
14218 source location of the outer expression. */
14219 tree org_type
= TREE_TYPE (*expr_p
);
14220 *expr_p
= gimple_boolify (*expr_p
);
14221 *expr_p
= build3_loc (input_location
, COND_EXPR
,
14225 org_type
, boolean_true_node
),
14228 org_type
, boolean_false_node
));
14233 case TRUTH_NOT_EXPR
:
14235 tree type
= TREE_TYPE (*expr_p
);
14236 /* The parsers are careful to generate TRUTH_NOT_EXPR
14237 only with operands that are always zero or one.
14238 We do not fold here but handle the only interesting case
14239 manually, as fold may re-introduce the TRUTH_NOT_EXPR. */
14240 *expr_p
= gimple_boolify (*expr_p
);
14241 if (TYPE_PRECISION (TREE_TYPE (*expr_p
)) == 1)
14242 *expr_p
= build1_loc (input_location
, BIT_NOT_EXPR
,
14243 TREE_TYPE (*expr_p
),
14244 TREE_OPERAND (*expr_p
, 0));
14246 *expr_p
= build2_loc (input_location
, BIT_XOR_EXPR
,
14247 TREE_TYPE (*expr_p
),
14248 TREE_OPERAND (*expr_p
, 0),
14249 build_int_cst (TREE_TYPE (*expr_p
), 1));
14250 if (!useless_type_conversion_p (type
, TREE_TYPE (*expr_p
)))
14251 *expr_p
= fold_convert_loc (input_location
, type
, *expr_p
);
14257 ret
= gimplify_addr_expr (expr_p
, pre_p
, post_p
);
14260 case ANNOTATE_EXPR
:
14262 tree cond
= TREE_OPERAND (*expr_p
, 0);
14263 tree kind
= TREE_OPERAND (*expr_p
, 1);
14264 tree data
= TREE_OPERAND (*expr_p
, 2);
14265 tree type
= TREE_TYPE (cond
);
14266 if (!INTEGRAL_TYPE_P (type
))
14272 tree tmp
= create_tmp_var (type
);
14273 gimplify_arg (&cond
, pre_p
, EXPR_LOCATION (*expr_p
));
14275 = gimple_build_call_internal (IFN_ANNOTATE
, 3, cond
, kind
, data
);
14276 gimple_call_set_lhs (call
, tmp
);
14277 gimplify_seq_add_stmt (pre_p
, call
);
14284 ret
= gimplify_va_arg_expr (expr_p
, pre_p
, post_p
);
14288 if (IS_EMPTY_STMT (*expr_p
))
14294 if (VOID_TYPE_P (TREE_TYPE (*expr_p
))
14295 || fallback
== fb_none
)
14297 /* Just strip a conversion to void (or in void context) and
14299 *expr_p
= TREE_OPERAND (*expr_p
, 0);
14304 ret
= gimplify_conversion (expr_p
);
14305 if (ret
== GS_ERROR
)
14307 if (*expr_p
!= save_expr
)
14311 case FIX_TRUNC_EXPR
:
14312 /* unary_expr: ... | '(' cast ')' val | ... */
14313 ret
= gimplify_expr (&TREE_OPERAND (*expr_p
, 0), pre_p
, post_p
,
14314 is_gimple_val
, fb_rvalue
);
14315 recalculate_side_effects (*expr_p
);
14320 bool volatilep
= TREE_THIS_VOLATILE (*expr_p
);
14321 bool notrap
= TREE_THIS_NOTRAP (*expr_p
);
14322 tree saved_ptr_type
= TREE_TYPE (TREE_OPERAND (*expr_p
, 0));
14324 *expr_p
= fold_indirect_ref_loc (input_location
, *expr_p
);
14325 if (*expr_p
!= save_expr
)
14331 ret
= gimplify_expr (&TREE_OPERAND (*expr_p
, 0), pre_p
, post_p
,
14332 is_gimple_reg
, fb_rvalue
);
14333 if (ret
== GS_ERROR
)
14336 recalculate_side_effects (*expr_p
);
14337 *expr_p
= fold_build2_loc (input_location
, MEM_REF
,
14338 TREE_TYPE (*expr_p
),
14339 TREE_OPERAND (*expr_p
, 0),
14340 build_int_cst (saved_ptr_type
, 0));
14341 TREE_THIS_VOLATILE (*expr_p
) = volatilep
;
14342 TREE_THIS_NOTRAP (*expr_p
) = notrap
;
14347 /* We arrive here through the various re-gimplifcation paths. */
14349 /* First try re-folding the whole thing. */
14350 tmp
= fold_binary (MEM_REF
, TREE_TYPE (*expr_p
),
14351 TREE_OPERAND (*expr_p
, 0),
14352 TREE_OPERAND (*expr_p
, 1));
14355 REF_REVERSE_STORAGE_ORDER (tmp
)
14356 = REF_REVERSE_STORAGE_ORDER (*expr_p
);
14358 recalculate_side_effects (*expr_p
);
14362 /* Avoid re-gimplifying the address operand if it is already
14363 in suitable form. Re-gimplifying would mark the address
14364 operand addressable. Always gimplify when not in SSA form
14365 as we still may have to gimplify decls with value-exprs. */
14366 if (!gimplify_ctxp
|| !gimple_in_ssa_p (cfun
)
14367 || !is_gimple_mem_ref_addr (TREE_OPERAND (*expr_p
, 0)))
14369 ret
= gimplify_expr (&TREE_OPERAND (*expr_p
, 0), pre_p
, post_p
,
14370 is_gimple_mem_ref_addr
, fb_rvalue
);
14371 if (ret
== GS_ERROR
)
14374 recalculate_side_effects (*expr_p
);
14378 /* Constants need not be gimplified. */
14385 /* Drop the overflow flag on constants, we do not want
14386 that in the GIMPLE IL. */
14387 if (TREE_OVERFLOW_P (*expr_p
))
14388 *expr_p
= drop_tree_overflow (*expr_p
);
14393 /* If we require an lvalue, such as for ADDR_EXPR, retain the
14394 CONST_DECL node. Otherwise the decl is replaceable by its
14396 /* ??? Should be == fb_lvalue, but ADDR_EXPR passes fb_either. */
14397 if (fallback
& fb_lvalue
)
14401 *expr_p
= DECL_INITIAL (*expr_p
);
14407 ret
= gimplify_decl_expr (expr_p
, pre_p
);
14411 ret
= gimplify_bind_expr (expr_p
, pre_p
);
14415 ret
= gimplify_loop_expr (expr_p
, pre_p
);
14419 ret
= gimplify_switch_expr (expr_p
, pre_p
);
14423 ret
= gimplify_exit_expr (expr_p
);
14427 /* If the target is not LABEL, then it is a computed jump
14428 and the target needs to be gimplified. */
14429 if (TREE_CODE (GOTO_DESTINATION (*expr_p
)) != LABEL_DECL
)
14431 ret
= gimplify_expr (&GOTO_DESTINATION (*expr_p
), pre_p
,
14432 NULL
, is_gimple_val
, fb_rvalue
);
14433 if (ret
== GS_ERROR
)
14436 gimplify_seq_add_stmt (pre_p
,
14437 gimple_build_goto (GOTO_DESTINATION (*expr_p
)));
14442 gimplify_seq_add_stmt (pre_p
,
14443 gimple_build_predict (PREDICT_EXPR_PREDICTOR (*expr_p
),
14444 PREDICT_EXPR_OUTCOME (*expr_p
)));
14449 ret
= gimplify_label_expr (expr_p
, pre_p
);
14450 label
= LABEL_EXPR_LABEL (*expr_p
);
14451 gcc_assert (decl_function_context (label
) == current_function_decl
);
14453 /* If the label is used in a goto statement, or address of the label
14454 is taken, we need to unpoison all variables that were seen so far.
14455 Doing so would prevent us from reporting a false positives. */
14456 if (asan_poisoned_variables
14457 && asan_used_labels
!= NULL
14458 && asan_used_labels
->contains (label
)
14459 && !gimplify_omp_ctxp
)
14460 asan_poison_variables (asan_poisoned_variables
, false, pre_p
);
14463 case CASE_LABEL_EXPR
:
14464 ret
= gimplify_case_label_expr (expr_p
, pre_p
);
14466 if (gimplify_ctxp
->live_switch_vars
)
14467 asan_poison_variables (gimplify_ctxp
->live_switch_vars
, false,
14472 ret
= gimplify_return_expr (*expr_p
, pre_p
);
14476 /* Don't reduce this in place; let gimplify_init_constructor work its
14477 magic. Buf if we're just elaborating this for side effects, just
14478 gimplify any element that has side-effects. */
14479 if (fallback
== fb_none
)
14481 unsigned HOST_WIDE_INT ix
;
14483 tree temp
= NULL_TREE
;
14484 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (*expr_p
), ix
, val
)
14485 if (TREE_SIDE_EFFECTS (val
))
14486 append_to_statement_list (val
, &temp
);
14489 ret
= temp
? GS_OK
: GS_ALL_DONE
;
14491 /* C99 code may assign to an array in a constructed
14492 structure or union, and this has undefined behavior only
14493 on execution, so create a temporary if an lvalue is
14495 else if (fallback
== fb_lvalue
)
14497 *expr_p
= get_initialized_tmp_var (*expr_p
, pre_p
, post_p
, false);
14498 mark_addressable (*expr_p
);
14505 /* The following are special cases that are not handled by the
14506 original GIMPLE grammar. */
14508 /* SAVE_EXPR nodes are converted into a GIMPLE identifier and
14511 ret
= gimplify_save_expr (expr_p
, pre_p
, post_p
);
14514 case BIT_FIELD_REF
:
14515 ret
= gimplify_expr (&TREE_OPERAND (*expr_p
, 0), pre_p
,
14516 post_p
, is_gimple_lvalue
, fb_either
);
14517 recalculate_side_effects (*expr_p
);
14520 case TARGET_MEM_REF
:
14522 enum gimplify_status r0
= GS_ALL_DONE
, r1
= GS_ALL_DONE
;
14524 if (TMR_BASE (*expr_p
))
14525 r0
= gimplify_expr (&TMR_BASE (*expr_p
), pre_p
,
14526 post_p
, is_gimple_mem_ref_addr
, fb_either
);
14527 if (TMR_INDEX (*expr_p
))
14528 r1
= gimplify_expr (&TMR_INDEX (*expr_p
), pre_p
,
14529 post_p
, is_gimple_val
, fb_rvalue
);
14530 if (TMR_INDEX2 (*expr_p
))
14531 r1
= gimplify_expr (&TMR_INDEX2 (*expr_p
), pre_p
,
14532 post_p
, is_gimple_val
, fb_rvalue
);
14533 /* TMR_STEP and TMR_OFFSET are always integer constants. */
14534 ret
= MIN (r0
, r1
);
14538 case NON_LVALUE_EXPR
:
14539 /* This should have been stripped above. */
14540 gcc_unreachable ();
14543 ret
= gimplify_asm_expr (expr_p
, pre_p
, post_p
);
14546 case TRY_FINALLY_EXPR
:
14547 case TRY_CATCH_EXPR
:
14549 gimple_seq eval
, cleanup
;
14552 /* Calls to destructors are generated automatically in FINALLY/CATCH
14553 block. They should have location as UNKNOWN_LOCATION. However,
14554 gimplify_call_expr will reset these call stmts to input_location
14555 if it finds stmt's location is unknown. To prevent resetting for
14556 destructors, we set the input_location to unknown.
14557 Note that this only affects the destructor calls in FINALLY/CATCH
14558 block, and will automatically reset to its original value by the
14559 end of gimplify_expr. */
14560 input_location
= UNKNOWN_LOCATION
;
14561 eval
= cleanup
= NULL
;
14562 gimplify_and_add (TREE_OPERAND (*expr_p
, 0), &eval
);
14563 if (TREE_CODE (*expr_p
) == TRY_FINALLY_EXPR
14564 && TREE_CODE (TREE_OPERAND (*expr_p
, 1)) == EH_ELSE_EXPR
)
14566 gimple_seq n
= NULL
, e
= NULL
;
14567 gimplify_and_add (TREE_OPERAND (TREE_OPERAND (*expr_p
, 1),
14569 gimplify_and_add (TREE_OPERAND (TREE_OPERAND (*expr_p
, 1),
14571 if (!gimple_seq_empty_p (n
) && !gimple_seq_empty_p (e
))
14573 geh_else
*stmt
= gimple_build_eh_else (n
, e
);
14574 gimple_seq_add_stmt (&cleanup
, stmt
);
14578 gimplify_and_add (TREE_OPERAND (*expr_p
, 1), &cleanup
);
14579 /* Don't create bogus GIMPLE_TRY with empty cleanup. */
14580 if (gimple_seq_empty_p (cleanup
))
14582 gimple_seq_add_seq (pre_p
, eval
);
14586 try_
= gimple_build_try (eval
, cleanup
,
14587 TREE_CODE (*expr_p
) == TRY_FINALLY_EXPR
14588 ? GIMPLE_TRY_FINALLY
14589 : GIMPLE_TRY_CATCH
);
14590 if (EXPR_HAS_LOCATION (save_expr
))
14591 gimple_set_location (try_
, EXPR_LOCATION (save_expr
));
14592 else if (LOCATION_LOCUS (saved_location
) != UNKNOWN_LOCATION
)
14593 gimple_set_location (try_
, saved_location
);
14594 if (TREE_CODE (*expr_p
) == TRY_CATCH_EXPR
)
14595 gimple_try_set_catch_is_cleanup (try_
,
14596 TRY_CATCH_IS_CLEANUP (*expr_p
));
14597 gimplify_seq_add_stmt (pre_p
, try_
);
14602 case CLEANUP_POINT_EXPR
:
14603 ret
= gimplify_cleanup_point_expr (expr_p
, pre_p
);
14607 ret
= gimplify_target_expr (expr_p
, pre_p
, post_p
);
14613 gimple_seq handler
= NULL
;
14614 gimplify_and_add (CATCH_BODY (*expr_p
), &handler
);
14615 c
= gimple_build_catch (CATCH_TYPES (*expr_p
), handler
);
14616 gimplify_seq_add_stmt (pre_p
, c
);
14621 case EH_FILTER_EXPR
:
14624 gimple_seq failure
= NULL
;
14626 gimplify_and_add (EH_FILTER_FAILURE (*expr_p
), &failure
);
14627 ehf
= gimple_build_eh_filter (EH_FILTER_TYPES (*expr_p
), failure
);
14628 copy_warning (ehf
, *expr_p
);
14629 gimplify_seq_add_stmt (pre_p
, ehf
);
14636 enum gimplify_status r0
, r1
;
14637 r0
= gimplify_expr (&OBJ_TYPE_REF_OBJECT (*expr_p
), pre_p
,
14638 post_p
, is_gimple_val
, fb_rvalue
);
14639 r1
= gimplify_expr (&OBJ_TYPE_REF_EXPR (*expr_p
), pre_p
,
14640 post_p
, is_gimple_val
, fb_rvalue
);
14641 TREE_SIDE_EFFECTS (*expr_p
) = 0;
14642 ret
= MIN (r0
, r1
);
14647 /* We get here when taking the address of a label. We mark
14648 the label as "forced"; meaning it can never be removed and
14649 it is a potential target for any computed goto. */
14650 FORCED_LABEL (*expr_p
) = 1;
14654 case STATEMENT_LIST
:
14655 ret
= gimplify_statement_list (expr_p
, pre_p
);
14658 case WITH_SIZE_EXPR
:
14660 gimplify_expr (&TREE_OPERAND (*expr_p
, 0), pre_p
,
14661 post_p
== &internal_post
? NULL
: post_p
,
14662 gimple_test_f
, fallback
);
14663 gimplify_expr (&TREE_OPERAND (*expr_p
, 1), pre_p
, post_p
,
14664 is_gimple_val
, fb_rvalue
);
14671 ret
= gimplify_var_or_parm_decl (expr_p
);
14675 /* When within an OMP context, notice uses of variables. */
14676 if (gimplify_omp_ctxp
)
14677 omp_notice_variable (gimplify_omp_ctxp
, *expr_p
, true);
14681 case DEBUG_EXPR_DECL
:
14682 gcc_unreachable ();
14684 case DEBUG_BEGIN_STMT
:
14685 gimplify_seq_add_stmt (pre_p
,
14686 gimple_build_debug_begin_stmt
14687 (TREE_BLOCK (*expr_p
),
14688 EXPR_LOCATION (*expr_p
)));
14694 /* Allow callbacks into the gimplifier during optimization. */
14699 gimplify_omp_parallel (expr_p
, pre_p
);
14704 gimplify_omp_task (expr_p
, pre_p
);
14710 case OMP_DISTRIBUTE
:
14713 ret
= gimplify_omp_for (expr_p
, pre_p
);
14717 ret
= gimplify_omp_loop (expr_p
, pre_p
);
14721 gimplify_oacc_cache (expr_p
, pre_p
);
14726 gimplify_oacc_declare (expr_p
, pre_p
);
14730 case OACC_HOST_DATA
:
14733 case OACC_PARALLEL
:
14738 case OMP_TARGET_DATA
:
14740 gimplify_omp_workshare (expr_p
, pre_p
);
14744 case OACC_ENTER_DATA
:
14745 case OACC_EXIT_DATA
:
14747 case OMP_TARGET_UPDATE
:
14748 case OMP_TARGET_ENTER_DATA
:
14749 case OMP_TARGET_EXIT_DATA
:
14750 gimplify_omp_target_update (expr_p
, pre_p
);
14760 gimple_seq body
= NULL
;
14762 bool saved_in_omp_construct
= in_omp_construct
;
14764 in_omp_construct
= true;
14765 gimplify_and_add (OMP_BODY (*expr_p
), &body
);
14766 in_omp_construct
= saved_in_omp_construct
;
14767 switch (TREE_CODE (*expr_p
))
14770 g
= gimple_build_omp_section (body
);
14773 g
= gimple_build_omp_master (body
);
14776 g
= gimplify_omp_ordered (*expr_p
, body
);
14779 gimplify_scan_omp_clauses (&OMP_CRITICAL_CLAUSES (*expr_p
),
14780 pre_p
, ORT_WORKSHARE
, OMP_CRITICAL
);
14781 gimplify_adjust_omp_clauses (pre_p
, body
,
14782 &OMP_CRITICAL_CLAUSES (*expr_p
),
14784 g
= gimple_build_omp_critical (body
,
14785 OMP_CRITICAL_NAME (*expr_p
),
14786 OMP_CRITICAL_CLAUSES (*expr_p
));
14789 gimplify_scan_omp_clauses (&OMP_SCAN_CLAUSES (*expr_p
),
14790 pre_p
, ORT_WORKSHARE
, OMP_SCAN
);
14791 gimplify_adjust_omp_clauses (pre_p
, body
,
14792 &OMP_SCAN_CLAUSES (*expr_p
),
14794 g
= gimple_build_omp_scan (body
, OMP_SCAN_CLAUSES (*expr_p
));
14797 gcc_unreachable ();
14799 gimplify_seq_add_stmt (pre_p
, g
);
14804 case OMP_TASKGROUP
:
14806 gimple_seq body
= NULL
;
14808 tree
*pclauses
= &OMP_TASKGROUP_CLAUSES (*expr_p
);
14809 bool saved_in_omp_construct
= in_omp_construct
;
14810 gimplify_scan_omp_clauses (pclauses
, pre_p
, ORT_TASKGROUP
,
14812 gimplify_adjust_omp_clauses (pre_p
, NULL
, pclauses
, OMP_TASKGROUP
);
14814 in_omp_construct
= true;
14815 gimplify_and_add (OMP_BODY (*expr_p
), &body
);
14816 in_omp_construct
= saved_in_omp_construct
;
14817 gimple_seq cleanup
= NULL
;
14818 tree fn
= builtin_decl_explicit (BUILT_IN_GOMP_TASKGROUP_END
);
14819 gimple
*g
= gimple_build_call (fn
, 0);
14820 gimple_seq_add_stmt (&cleanup
, g
);
14821 g
= gimple_build_try (body
, cleanup
, GIMPLE_TRY_FINALLY
);
14823 gimple_seq_add_stmt (&body
, g
);
14824 g
= gimple_build_omp_taskgroup (body
, *pclauses
);
14825 gimplify_seq_add_stmt (pre_p
, g
);
14831 case OMP_ATOMIC_READ
:
14832 case OMP_ATOMIC_CAPTURE_OLD
:
14833 case OMP_ATOMIC_CAPTURE_NEW
:
14834 ret
= gimplify_omp_atomic (expr_p
, pre_p
);
14837 case TRANSACTION_EXPR
:
14838 ret
= gimplify_transaction (expr_p
, pre_p
);
14841 case TRUTH_AND_EXPR
:
14842 case TRUTH_OR_EXPR
:
14843 case TRUTH_XOR_EXPR
:
14845 tree orig_type
= TREE_TYPE (*expr_p
);
14846 tree new_type
, xop0
, xop1
;
14847 *expr_p
= gimple_boolify (*expr_p
);
14848 new_type
= TREE_TYPE (*expr_p
);
14849 if (!useless_type_conversion_p (orig_type
, new_type
))
14851 *expr_p
= fold_convert_loc (input_location
, orig_type
, *expr_p
);
14856 /* Boolified binary truth expressions are semantically equivalent
14857 to bitwise binary expressions. Canonicalize them to the
14858 bitwise variant. */
14859 switch (TREE_CODE (*expr_p
))
14861 case TRUTH_AND_EXPR
:
14862 TREE_SET_CODE (*expr_p
, BIT_AND_EXPR
);
14864 case TRUTH_OR_EXPR
:
14865 TREE_SET_CODE (*expr_p
, BIT_IOR_EXPR
);
14867 case TRUTH_XOR_EXPR
:
14868 TREE_SET_CODE (*expr_p
, BIT_XOR_EXPR
);
14873 /* Now make sure that operands have compatible type to
14874 expression's new_type. */
14875 xop0
= TREE_OPERAND (*expr_p
, 0);
14876 xop1
= TREE_OPERAND (*expr_p
, 1);
14877 if (!useless_type_conversion_p (new_type
, TREE_TYPE (xop0
)))
14878 TREE_OPERAND (*expr_p
, 0) = fold_convert_loc (input_location
,
14881 if (!useless_type_conversion_p (new_type
, TREE_TYPE (xop1
)))
14882 TREE_OPERAND (*expr_p
, 1) = fold_convert_loc (input_location
,
14885 /* Continue classified as tcc_binary. */
14889 case VEC_COND_EXPR
:
14892 case VEC_PERM_EXPR
:
14893 /* Classified as tcc_expression. */
14896 case BIT_INSERT_EXPR
:
14897 /* Argument 3 is a constant. */
14900 case POINTER_PLUS_EXPR
:
14902 enum gimplify_status r0
, r1
;
14903 r0
= gimplify_expr (&TREE_OPERAND (*expr_p
, 0), pre_p
,
14904 post_p
, is_gimple_val
, fb_rvalue
);
14905 r1
= gimplify_expr (&TREE_OPERAND (*expr_p
, 1), pre_p
,
14906 post_p
, is_gimple_val
, fb_rvalue
);
14907 recalculate_side_effects (*expr_p
);
14908 ret
= MIN (r0
, r1
);
14913 switch (TREE_CODE_CLASS (TREE_CODE (*expr_p
)))
14915 case tcc_comparison
:
14916 /* Handle comparison of objects of non scalar mode aggregates
14917 with a call to memcmp. It would be nice to only have to do
14918 this for variable-sized objects, but then we'd have to allow
14919 the same nest of reference nodes we allow for MODIFY_EXPR and
14920 that's too complex.
14922 Compare scalar mode aggregates as scalar mode values. Using
14923 memcmp for them would be very inefficient at best, and is
14924 plain wrong if bitfields are involved. */
14926 tree type
= TREE_TYPE (TREE_OPERAND (*expr_p
, 1));
14928 /* Vector comparisons need no boolification. */
14929 if (TREE_CODE (type
) == VECTOR_TYPE
)
14931 else if (!AGGREGATE_TYPE_P (type
))
14933 tree org_type
= TREE_TYPE (*expr_p
);
14934 *expr_p
= gimple_boolify (*expr_p
);
14935 if (!useless_type_conversion_p (org_type
,
14936 TREE_TYPE (*expr_p
)))
14938 *expr_p
= fold_convert_loc (input_location
,
14939 org_type
, *expr_p
);
14945 else if (TYPE_MODE (type
) != BLKmode
)
14946 ret
= gimplify_scalar_mode_aggregate_compare (expr_p
);
14948 ret
= gimplify_variable_sized_compare (expr_p
);
14953 /* If *EXPR_P does not need to be special-cased, handle it
14954 according to its class. */
14956 ret
= gimplify_expr (&TREE_OPERAND (*expr_p
, 0), pre_p
,
14957 post_p
, is_gimple_val
, fb_rvalue
);
14963 enum gimplify_status r0
, r1
;
14965 r0
= gimplify_expr (&TREE_OPERAND (*expr_p
, 0), pre_p
,
14966 post_p
, is_gimple_val
, fb_rvalue
);
14967 r1
= gimplify_expr (&TREE_OPERAND (*expr_p
, 1), pre_p
,
14968 post_p
, is_gimple_val
, fb_rvalue
);
14970 ret
= MIN (r0
, r1
);
14976 enum gimplify_status r0
, r1
, r2
;
14978 r0
= gimplify_expr (&TREE_OPERAND (*expr_p
, 0), pre_p
,
14979 post_p
, is_gimple_val
, fb_rvalue
);
14980 r1
= gimplify_expr (&TREE_OPERAND (*expr_p
, 1), pre_p
,
14981 post_p
, is_gimple_val
, fb_rvalue
);
14982 r2
= gimplify_expr (&TREE_OPERAND (*expr_p
, 2), pre_p
,
14983 post_p
, is_gimple_val
, fb_rvalue
);
14985 ret
= MIN (MIN (r0
, r1
), r2
);
14989 case tcc_declaration
:
14992 goto dont_recalculate
;
14995 gcc_unreachable ();
14998 recalculate_side_effects (*expr_p
);
15004 gcc_assert (*expr_p
|| ret
!= GS_OK
);
15006 while (ret
== GS_OK
);
15008 /* If we encountered an error_mark somewhere nested inside, either
15009 stub out the statement or propagate the error back out. */
15010 if (ret
== GS_ERROR
)
15017 /* This was only valid as a return value from the langhook, which
15018 we handled. Make sure it doesn't escape from any other context. */
15019 gcc_assert (ret
!= GS_UNHANDLED
);
15021 if (fallback
== fb_none
&& *expr_p
&& !is_gimple_stmt (*expr_p
))
15023 /* We aren't looking for a value, and we don't have a valid
15024 statement. If it doesn't have side-effects, throw it away.
15025 We can also get here with code such as "*&&L;", where L is
15026 a LABEL_DECL that is marked as FORCED_LABEL. */
15027 if (TREE_CODE (*expr_p
) == LABEL_DECL
15028 || !TREE_SIDE_EFFECTS (*expr_p
))
15030 else if (!TREE_THIS_VOLATILE (*expr_p
))
15032 /* This is probably a _REF that contains something nested that
15033 has side effects. Recurse through the operands to find it. */
15034 enum tree_code code
= TREE_CODE (*expr_p
);
15038 case COMPONENT_REF
:
15039 case REALPART_EXPR
:
15040 case IMAGPART_EXPR
:
15041 case VIEW_CONVERT_EXPR
:
15042 gimplify_expr (&TREE_OPERAND (*expr_p
, 0), pre_p
, post_p
,
15043 gimple_test_f
, fallback
);
15047 case ARRAY_RANGE_REF
:
15048 gimplify_expr (&TREE_OPERAND (*expr_p
, 0), pre_p
, post_p
,
15049 gimple_test_f
, fallback
);
15050 gimplify_expr (&TREE_OPERAND (*expr_p
, 1), pre_p
, post_p
,
15051 gimple_test_f
, fallback
);
15055 /* Anything else with side-effects must be converted to
15056 a valid statement before we get here. */
15057 gcc_unreachable ();
15062 else if (COMPLETE_TYPE_P (TREE_TYPE (*expr_p
))
15063 && TYPE_MODE (TREE_TYPE (*expr_p
)) != BLKmode
)
15065 /* Historically, the compiler has treated a bare reference
15066 to a non-BLKmode volatile lvalue as forcing a load. */
15067 tree type
= TYPE_MAIN_VARIANT (TREE_TYPE (*expr_p
));
15069 /* Normally, we do not want to create a temporary for a
15070 TREE_ADDRESSABLE type because such a type should not be
15071 copied by bitwise-assignment. However, we make an
15072 exception here, as all we are doing here is ensuring that
15073 we read the bytes that make up the type. We use
15074 create_tmp_var_raw because create_tmp_var will abort when
15075 given a TREE_ADDRESSABLE type. */
15076 tree tmp
= create_tmp_var_raw (type
, "vol");
15077 gimple_add_tmp_var (tmp
);
15078 gimplify_assign (tmp
, *expr_p
, pre_p
);
15082 /* We can't do anything useful with a volatile reference to
15083 an incomplete type, so just throw it away. Likewise for
15084 a BLKmode type, since any implicit inner load should
15085 already have been turned into an explicit one by the
15086 gimplification process. */
15090 /* If we are gimplifying at the statement level, we're done. Tack
15091 everything together and return. */
15092 if (fallback
== fb_none
|| is_statement
)
15094 /* Since *EXPR_P has been converted into a GIMPLE tuple, clear
15095 it out for GC to reclaim it. */
15096 *expr_p
= NULL_TREE
;
15098 if (!gimple_seq_empty_p (internal_pre
)
15099 || !gimple_seq_empty_p (internal_post
))
15101 gimplify_seq_add_seq (&internal_pre
, internal_post
);
15102 gimplify_seq_add_seq (pre_p
, internal_pre
);
15105 /* The result of gimplifying *EXPR_P is going to be the last few
15106 statements in *PRE_P and *POST_P. Add location information
15107 to all the statements that were added by the gimplification
15109 if (!gimple_seq_empty_p (*pre_p
))
15110 annotate_all_with_location_after (*pre_p
, pre_last_gsi
, input_location
);
15112 if (!gimple_seq_empty_p (*post_p
))
15113 annotate_all_with_location_after (*post_p
, post_last_gsi
,
15119 #ifdef ENABLE_GIMPLE_CHECKING
15122 enum tree_code code
= TREE_CODE (*expr_p
);
15123 /* These expressions should already be in gimple IR form. */
15124 gcc_assert (code
!= MODIFY_EXPR
15125 && code
!= ASM_EXPR
15126 && code
!= BIND_EXPR
15127 && code
!= CATCH_EXPR
15128 && (code
!= COND_EXPR
|| gimplify_ctxp
->allow_rhs_cond_expr
)
15129 && code
!= EH_FILTER_EXPR
15130 && code
!= GOTO_EXPR
15131 && code
!= LABEL_EXPR
15132 && code
!= LOOP_EXPR
15133 && code
!= SWITCH_EXPR
15134 && code
!= TRY_FINALLY_EXPR
15135 && code
!= EH_ELSE_EXPR
15136 && code
!= OACC_PARALLEL
15137 && code
!= OACC_KERNELS
15138 && code
!= OACC_SERIAL
15139 && code
!= OACC_DATA
15140 && code
!= OACC_HOST_DATA
15141 && code
!= OACC_DECLARE
15142 && code
!= OACC_UPDATE
15143 && code
!= OACC_ENTER_DATA
15144 && code
!= OACC_EXIT_DATA
15145 && code
!= OACC_CACHE
15146 && code
!= OMP_CRITICAL
15148 && code
!= OACC_LOOP
15149 && code
!= OMP_MASTER
15150 && code
!= OMP_TASKGROUP
15151 && code
!= OMP_ORDERED
15152 && code
!= OMP_PARALLEL
15153 && code
!= OMP_SCAN
15154 && code
!= OMP_SECTIONS
15155 && code
!= OMP_SECTION
15156 && code
!= OMP_SINGLE
);
15160 /* Otherwise we're gimplifying a subexpression, so the resulting
15161 value is interesting. If it's a valid operand that matches
15162 GIMPLE_TEST_F, we're done. Unless we are handling some
15163 post-effects internally; if that's the case, we need to copy into
15164 a temporary before adding the post-effects to POST_P. */
15165 if (gimple_seq_empty_p (internal_post
) && (*gimple_test_f
) (*expr_p
))
15168 /* Otherwise, we need to create a new temporary for the gimplified
15171 /* We can't return an lvalue if we have an internal postqueue. The
15172 object the lvalue refers to would (probably) be modified by the
15173 postqueue; we need to copy the value out first, which means an
15175 if ((fallback
& fb_lvalue
)
15176 && gimple_seq_empty_p (internal_post
)
15177 && is_gimple_addressable (*expr_p
))
15179 /* An lvalue will do. Take the address of the expression, store it
15180 in a temporary, and replace the expression with an INDIRECT_REF of
15182 tree ref_alias_type
= reference_alias_ptr_type (*expr_p
);
15183 unsigned int ref_align
= get_object_alignment (*expr_p
);
15184 tree ref_type
= TREE_TYPE (*expr_p
);
15185 tmp
= build_fold_addr_expr_loc (input_location
, *expr_p
);
15186 gimplify_expr (&tmp
, pre_p
, post_p
, is_gimple_reg
, fb_rvalue
);
15187 if (TYPE_ALIGN (ref_type
) != ref_align
)
15188 ref_type
= build_aligned_type (ref_type
, ref_align
);
15189 *expr_p
= build2 (MEM_REF
, ref_type
,
15190 tmp
, build_zero_cst (ref_alias_type
));
15192 else if ((fallback
& fb_rvalue
) && is_gimple_reg_rhs_or_call (*expr_p
))
15194 /* An rvalue will do. Assign the gimplified expression into a
15195 new temporary TMP and replace the original expression with
15196 TMP. First, make sure that the expression has a type so that
15197 it can be assigned into a temporary. */
15198 gcc_assert (!VOID_TYPE_P (TREE_TYPE (*expr_p
)));
15199 *expr_p
= get_formal_tmp_var (*expr_p
, pre_p
);
15203 #ifdef ENABLE_GIMPLE_CHECKING
15204 if (!(fallback
& fb_mayfail
))
15206 fprintf (stderr
, "gimplification failed:\n");
15207 print_generic_expr (stderr
, *expr_p
);
15208 debug_tree (*expr_p
);
15209 internal_error ("gimplification failed");
15212 gcc_assert (fallback
& fb_mayfail
);
15214 /* If this is an asm statement, and the user asked for the
15215 impossible, don't die. Fail and let gimplify_asm_expr
15221 /* Make sure the temporary matches our predicate. */
15222 gcc_assert ((*gimple_test_f
) (*expr_p
));
15224 if (!gimple_seq_empty_p (internal_post
))
15226 annotate_all_with_location (internal_post
, input_location
);
15227 gimplify_seq_add_seq (pre_p
, internal_post
);
15231 input_location
= saved_location
;
15235 /* Like gimplify_expr but make sure the gimplified result is not itself
15236 a SSA name (but a decl if it were). Temporaries required by
15237 evaluating *EXPR_P may be still SSA names. */
15239 static enum gimplify_status
15240 gimplify_expr (tree
*expr_p
, gimple_seq
*pre_p
, gimple_seq
*post_p
,
15241 bool (*gimple_test_f
) (tree
), fallback_t fallback
,
15244 enum gimplify_status ret
= gimplify_expr (expr_p
, pre_p
, post_p
,
15245 gimple_test_f
, fallback
);
15247 && TREE_CODE (*expr_p
) == SSA_NAME
)
15248 *expr_p
= get_initialized_tmp_var (*expr_p
, pre_p
, NULL
, false);
15252 /* Look through TYPE for variable-sized objects and gimplify each such
15253 size that we find. Add to LIST_P any statements generated. */
15256 gimplify_type_sizes (tree type
, gimple_seq
*list_p
)
15258 if (type
== NULL
|| type
== error_mark_node
)
15261 const bool ignored_p
15263 && TREE_CODE (TYPE_NAME (type
)) == TYPE_DECL
15264 && DECL_IGNORED_P (TYPE_NAME (type
));
15267 /* We first do the main variant, then copy into any other variants. */
15268 type
= TYPE_MAIN_VARIANT (type
);
15270 /* Avoid infinite recursion. */
15271 if (TYPE_SIZES_GIMPLIFIED (type
))
15274 TYPE_SIZES_GIMPLIFIED (type
) = 1;
15276 switch (TREE_CODE (type
))
15279 case ENUMERAL_TYPE
:
15282 case FIXED_POINT_TYPE
:
15283 gimplify_one_sizepos (&TYPE_MIN_VALUE (type
), list_p
);
15284 gimplify_one_sizepos (&TYPE_MAX_VALUE (type
), list_p
);
15286 for (t
= TYPE_NEXT_VARIANT (type
); t
; t
= TYPE_NEXT_VARIANT (t
))
15288 TYPE_MIN_VALUE (t
) = TYPE_MIN_VALUE (type
);
15289 TYPE_MAX_VALUE (t
) = TYPE_MAX_VALUE (type
);
15294 /* These types may not have declarations, so handle them here. */
15295 gimplify_type_sizes (TREE_TYPE (type
), list_p
);
15296 gimplify_type_sizes (TYPE_DOMAIN (type
), list_p
);
15297 /* Ensure VLA bounds aren't removed, for -O0 they should be variables
15298 with assigned stack slots, for -O1+ -g they should be tracked
15301 && TYPE_DOMAIN (type
)
15302 && INTEGRAL_TYPE_P (TYPE_DOMAIN (type
)))
15304 t
= TYPE_MIN_VALUE (TYPE_DOMAIN (type
));
15305 if (t
&& VAR_P (t
) && DECL_ARTIFICIAL (t
))
15306 DECL_IGNORED_P (t
) = 0;
15307 t
= TYPE_MAX_VALUE (TYPE_DOMAIN (type
));
15308 if (t
&& VAR_P (t
) && DECL_ARTIFICIAL (t
))
15309 DECL_IGNORED_P (t
) = 0;
15315 case QUAL_UNION_TYPE
:
15316 for (tree field
= TYPE_FIELDS (type
); field
; field
= DECL_CHAIN (field
))
15317 if (TREE_CODE (field
) == FIELD_DECL
)
15319 gimplify_one_sizepos (&DECL_FIELD_OFFSET (field
), list_p
);
15320 /* Likewise, ensure variable offsets aren't removed. */
15322 && (t
= DECL_FIELD_OFFSET (field
))
15324 && DECL_ARTIFICIAL (t
))
15325 DECL_IGNORED_P (t
) = 0;
15326 gimplify_one_sizepos (&DECL_SIZE (field
), list_p
);
15327 gimplify_one_sizepos (&DECL_SIZE_UNIT (field
), list_p
);
15328 gimplify_type_sizes (TREE_TYPE (field
), list_p
);
15333 case REFERENCE_TYPE
:
15334 /* We used to recurse on the pointed-to type here, which turned out to
15335 be incorrect because its definition might refer to variables not
15336 yet initialized at this point if a forward declaration is involved.
15338 It was actually useful for anonymous pointed-to types to ensure
15339 that the sizes evaluation dominates every possible later use of the
15340 values. Restricting to such types here would be safe since there
15341 is no possible forward declaration around, but would introduce an
15342 undesirable middle-end semantic to anonymity. We then defer to
15343 front-ends the responsibility of ensuring that the sizes are
15344 evaluated both early and late enough, e.g. by attaching artificial
15345 type declarations to the tree. */
15352 gimplify_one_sizepos (&TYPE_SIZE (type
), list_p
);
15353 gimplify_one_sizepos (&TYPE_SIZE_UNIT (type
), list_p
);
15355 for (t
= TYPE_NEXT_VARIANT (type
); t
; t
= TYPE_NEXT_VARIANT (t
))
15357 TYPE_SIZE (t
) = TYPE_SIZE (type
);
15358 TYPE_SIZE_UNIT (t
) = TYPE_SIZE_UNIT (type
);
15359 TYPE_SIZES_GIMPLIFIED (t
) = 1;
15363 /* A subroutine of gimplify_type_sizes to make sure that *EXPR_P,
15364 a size or position, has had all of its SAVE_EXPRs evaluated.
15365 We add any required statements to *STMT_P. */
15368 gimplify_one_sizepos (tree
*expr_p
, gimple_seq
*stmt_p
)
15370 tree expr
= *expr_p
;
15372 /* We don't do anything if the value isn't there, is constant, or contains
15373 A PLACEHOLDER_EXPR. We also don't want to do anything if it's already
15374 a VAR_DECL. If it's a VAR_DECL from another function, the gimplifier
15375 will want to replace it with a new variable, but that will cause problems
15376 if this type is from outside the function. It's OK to have that here. */
15377 if (expr
== NULL_TREE
15378 || is_gimple_constant (expr
)
15379 || TREE_CODE (expr
) == VAR_DECL
15380 || CONTAINS_PLACEHOLDER_P (expr
))
15383 *expr_p
= unshare_expr (expr
);
15385 /* SSA names in decl/type fields are a bad idea - they'll get reclaimed
15386 if the def vanishes. */
15387 gimplify_expr (expr_p
, stmt_p
, NULL
, is_gimple_val
, fb_rvalue
, false);
15389 /* If expr wasn't already is_gimple_sizepos or is_gimple_constant from the
15390 FE, ensure that it is a VAR_DECL, otherwise we might handle some decls
15391 as gimplify_vla_decl even when they would have all sizes INTEGER_CSTs. */
15392 if (is_gimple_constant (*expr_p
))
15393 *expr_p
= get_initialized_tmp_var (*expr_p
, stmt_p
, NULL
, false);
15396 /* Gimplify the body of statements of FNDECL and return a GIMPLE_BIND node
15397 containing the sequence of corresponding GIMPLE statements. If DO_PARMS
15398 is true, also gimplify the parameters. */
15401 gimplify_body (tree fndecl
, bool do_parms
)
15403 location_t saved_location
= input_location
;
15404 gimple_seq parm_stmts
, parm_cleanup
= NULL
, seq
;
15405 gimple
*outer_stmt
;
15408 timevar_push (TV_TREE_GIMPLIFY
);
15410 init_tree_ssa (cfun
);
15412 /* Initialize for optimize_insn_for_s{ize,peed}_p possibly called during
15414 default_rtl_profile ();
15416 gcc_assert (gimplify_ctxp
== NULL
);
15417 push_gimplify_context (true);
15419 if (flag_openacc
|| flag_openmp
)
15421 gcc_assert (gimplify_omp_ctxp
== NULL
);
15422 if (lookup_attribute ("omp declare target", DECL_ATTRIBUTES (fndecl
)))
15423 gimplify_omp_ctxp
= new_omp_context (ORT_IMPLICIT_TARGET
);
15426 /* Unshare most shared trees in the body and in that of any nested functions.
15427 It would seem we don't have to do this for nested functions because
15428 they are supposed to be output and then the outer function gimplified
15429 first, but the g++ front end doesn't always do it that way. */
15430 unshare_body (fndecl
);
15431 unvisit_body (fndecl
);
15433 /* Make sure input_location isn't set to something weird. */
15434 input_location
= DECL_SOURCE_LOCATION (fndecl
);
15436 /* Resolve callee-copies. This has to be done before processing
15437 the body so that DECL_VALUE_EXPR gets processed correctly. */
15438 parm_stmts
= do_parms
? gimplify_parameters (&parm_cleanup
) : NULL
;
15440 /* Gimplify the function's body. */
15442 gimplify_stmt (&DECL_SAVED_TREE (fndecl
), &seq
);
15443 outer_stmt
= gimple_seq_first_nondebug_stmt (seq
);
15446 outer_stmt
= gimple_build_nop ();
15447 gimplify_seq_add_stmt (&seq
, outer_stmt
);
15450 /* The body must contain exactly one statement, a GIMPLE_BIND. If this is
15451 not the case, wrap everything in a GIMPLE_BIND to make it so. */
15452 if (gimple_code (outer_stmt
) == GIMPLE_BIND
15453 && (gimple_seq_first_nondebug_stmt (seq
)
15454 == gimple_seq_last_nondebug_stmt (seq
)))
15456 outer_bind
= as_a
<gbind
*> (outer_stmt
);
15457 if (gimple_seq_first_stmt (seq
) != outer_stmt
15458 || gimple_seq_last_stmt (seq
) != outer_stmt
)
15460 /* If there are debug stmts before or after outer_stmt, move them
15461 inside of outer_bind body. */
15462 gimple_stmt_iterator gsi
= gsi_for_stmt (outer_stmt
, &seq
);
15463 gimple_seq second_seq
= NULL
;
15464 if (gimple_seq_first_stmt (seq
) != outer_stmt
15465 && gimple_seq_last_stmt (seq
) != outer_stmt
)
15467 second_seq
= gsi_split_seq_after (gsi
);
15468 gsi_remove (&gsi
, false);
15470 else if (gimple_seq_first_stmt (seq
) != outer_stmt
)
15471 gsi_remove (&gsi
, false);
15474 gsi_remove (&gsi
, false);
15478 gimple_seq_add_seq_without_update (&seq
,
15479 gimple_bind_body (outer_bind
));
15480 gimple_seq_add_seq_without_update (&seq
, second_seq
);
15481 gimple_bind_set_body (outer_bind
, seq
);
15485 outer_bind
= gimple_build_bind (NULL_TREE
, seq
, NULL
);
15487 DECL_SAVED_TREE (fndecl
) = NULL_TREE
;
15489 /* If we had callee-copies statements, insert them at the beginning
15490 of the function and clear DECL_VALUE_EXPR_P on the parameters. */
15491 if (!gimple_seq_empty_p (parm_stmts
))
15495 gimplify_seq_add_seq (&parm_stmts
, gimple_bind_body (outer_bind
));
15498 gtry
*g
= gimple_build_try (parm_stmts
, parm_cleanup
,
15499 GIMPLE_TRY_FINALLY
);
15501 gimple_seq_add_stmt (&parm_stmts
, g
);
15503 gimple_bind_set_body (outer_bind
, parm_stmts
);
15505 for (parm
= DECL_ARGUMENTS (current_function_decl
);
15506 parm
; parm
= DECL_CHAIN (parm
))
15507 if (DECL_HAS_VALUE_EXPR_P (parm
))
15509 DECL_HAS_VALUE_EXPR_P (parm
) = 0;
15510 DECL_IGNORED_P (parm
) = 0;
15514 if ((flag_openacc
|| flag_openmp
|| flag_openmp_simd
)
15515 && gimplify_omp_ctxp
)
15517 delete_omp_context (gimplify_omp_ctxp
);
15518 gimplify_omp_ctxp
= NULL
;
15521 pop_gimplify_context (outer_bind
);
15522 gcc_assert (gimplify_ctxp
== NULL
);
15524 if (flag_checking
&& !seen_error ())
15525 verify_gimple_in_seq (gimple_bind_body (outer_bind
));
15527 timevar_pop (TV_TREE_GIMPLIFY
);
15528 input_location
= saved_location
;
15533 typedef char *char_p
; /* For DEF_VEC_P. */
15535 /* Return whether we should exclude FNDECL from instrumentation. */
15538 flag_instrument_functions_exclude_p (tree fndecl
)
15542 v
= (vec
<char_p
> *) flag_instrument_functions_exclude_functions
;
15543 if (v
&& v
->length () > 0)
15549 name
= lang_hooks
.decl_printable_name (fndecl
, 1);
15550 FOR_EACH_VEC_ELT (*v
, i
, s
)
15551 if (strstr (name
, s
) != NULL
)
15555 v
= (vec
<char_p
> *) flag_instrument_functions_exclude_files
;
15556 if (v
&& v
->length () > 0)
15562 name
= DECL_SOURCE_FILE (fndecl
);
15563 FOR_EACH_VEC_ELT (*v
, i
, s
)
15564 if (strstr (name
, s
) != NULL
)
15571 /* Entry point to the gimplification pass. FNDECL is the FUNCTION_DECL
15572 node for the function we want to gimplify.
15574 Return the sequence of GIMPLE statements corresponding to the body
15578 gimplify_function_tree (tree fndecl
)
15583 gcc_assert (!gimple_body (fndecl
));
15585 if (DECL_STRUCT_FUNCTION (fndecl
))
15586 push_cfun (DECL_STRUCT_FUNCTION (fndecl
));
15588 push_struct_function (fndecl
);
15590 /* Tentatively set PROP_gimple_lva here, and reset it in gimplify_va_arg_expr
15592 cfun
->curr_properties
|= PROP_gimple_lva
;
15594 if (asan_sanitize_use_after_scope ())
15595 asan_poisoned_variables
= new hash_set
<tree
> ();
15596 bind
= gimplify_body (fndecl
, true);
15597 if (asan_poisoned_variables
)
15599 delete asan_poisoned_variables
;
15600 asan_poisoned_variables
= NULL
;
15603 /* The tree body of the function is no longer needed, replace it
15604 with the new GIMPLE body. */
15606 gimple_seq_add_stmt (&seq
, bind
);
15607 gimple_set_body (fndecl
, seq
);
15609 /* If we're instrumenting function entry/exit, then prepend the call to
15610 the entry hook and wrap the whole function in a TRY_FINALLY_EXPR to
15611 catch the exit hook. */
15612 /* ??? Add some way to ignore exceptions for this TFE. */
15613 if (flag_instrument_function_entry_exit
15614 && !DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (fndecl
)
15615 /* Do not instrument extern inline functions. */
15616 && !(DECL_DECLARED_INLINE_P (fndecl
)
15617 && DECL_EXTERNAL (fndecl
)
15618 && DECL_DISREGARD_INLINE_LIMITS (fndecl
))
15619 && !flag_instrument_functions_exclude_p (fndecl
))
15624 gimple_seq cleanup
= NULL
, body
= NULL
;
15625 tree tmp_var
, this_fn_addr
;
15628 /* The instrumentation hooks aren't going to call the instrumented
15629 function and the address they receive is expected to be matchable
15630 against symbol addresses. Make sure we don't create a trampoline,
15631 in case the current function is nested. */
15632 this_fn_addr
= build_fold_addr_expr (current_function_decl
);
15633 TREE_NO_TRAMPOLINE (this_fn_addr
) = 1;
15635 x
= builtin_decl_implicit (BUILT_IN_RETURN_ADDRESS
);
15636 call
= gimple_build_call (x
, 1, integer_zero_node
);
15637 tmp_var
= create_tmp_var (ptr_type_node
, "return_addr");
15638 gimple_call_set_lhs (call
, tmp_var
);
15639 gimplify_seq_add_stmt (&cleanup
, call
);
15640 x
= builtin_decl_implicit (BUILT_IN_PROFILE_FUNC_EXIT
);
15641 call
= gimple_build_call (x
, 2, this_fn_addr
, tmp_var
);
15642 gimplify_seq_add_stmt (&cleanup
, call
);
15643 tf
= gimple_build_try (seq
, cleanup
, GIMPLE_TRY_FINALLY
);
15645 x
= builtin_decl_implicit (BUILT_IN_RETURN_ADDRESS
);
15646 call
= gimple_build_call (x
, 1, integer_zero_node
);
15647 tmp_var
= create_tmp_var (ptr_type_node
, "return_addr");
15648 gimple_call_set_lhs (call
, tmp_var
);
15649 gimplify_seq_add_stmt (&body
, call
);
15650 x
= builtin_decl_implicit (BUILT_IN_PROFILE_FUNC_ENTER
);
15651 call
= gimple_build_call (x
, 2, this_fn_addr
, tmp_var
);
15652 gimplify_seq_add_stmt (&body
, call
);
15653 gimplify_seq_add_stmt (&body
, tf
);
15654 new_bind
= gimple_build_bind (NULL
, body
, NULL
);
15656 /* Replace the current function body with the body
15657 wrapped in the try/finally TF. */
15659 gimple_seq_add_stmt (&seq
, new_bind
);
15660 gimple_set_body (fndecl
, seq
);
15664 if (sanitize_flags_p (SANITIZE_THREAD
)
15665 && param_tsan_instrument_func_entry_exit
)
15667 gcall
*call
= gimple_build_call_internal (IFN_TSAN_FUNC_EXIT
, 0);
15668 gimple
*tf
= gimple_build_try (seq
, call
, GIMPLE_TRY_FINALLY
);
15669 gbind
*new_bind
= gimple_build_bind (NULL
, tf
, NULL
);
15670 /* Replace the current function body with the body
15671 wrapped in the try/finally TF. */
15673 gimple_seq_add_stmt (&seq
, new_bind
);
15674 gimple_set_body (fndecl
, seq
);
15677 DECL_SAVED_TREE (fndecl
) = NULL_TREE
;
15678 cfun
->curr_properties
|= PROP_gimple_any
;
15682 dump_function (TDI_gimple
, fndecl
);
15685 /* Return a dummy expression of type TYPE in order to keep going after an
15689 dummy_object (tree type
)
15691 tree t
= build_int_cst (build_pointer_type (type
), 0);
15692 return build2 (MEM_REF
, type
, t
, t
);
15695 /* Gimplify __builtin_va_arg, aka VA_ARG_EXPR, which is not really a
15696 builtin function, but a very special sort of operator. */
15698 enum gimplify_status
15699 gimplify_va_arg_expr (tree
*expr_p
, gimple_seq
*pre_p
,
15700 gimple_seq
*post_p ATTRIBUTE_UNUSED
)
15702 tree promoted_type
, have_va_type
;
15703 tree valist
= TREE_OPERAND (*expr_p
, 0);
15704 tree type
= TREE_TYPE (*expr_p
);
15705 tree t
, tag
, aptag
;
15706 location_t loc
= EXPR_LOCATION (*expr_p
);
15708 /* Verify that valist is of the proper type. */
15709 have_va_type
= TREE_TYPE (valist
);
15710 if (have_va_type
== error_mark_node
)
15712 have_va_type
= targetm
.canonical_va_list_type (have_va_type
);
15713 if (have_va_type
== NULL_TREE
15714 && POINTER_TYPE_P (TREE_TYPE (valist
)))
15715 /* Handle 'Case 1: Not an array type' from c-common.c/build_va_arg. */
15717 = targetm
.canonical_va_list_type (TREE_TYPE (TREE_TYPE (valist
)));
15718 gcc_assert (have_va_type
!= NULL_TREE
);
15720 /* Generate a diagnostic for requesting data of a type that cannot
15721 be passed through `...' due to type promotion at the call site. */
15722 if ((promoted_type
= lang_hooks
.types
.type_promotes_to (type
))
15725 static bool gave_help
;
15727 /* Use the expansion point to handle cases such as passing bool (defined
15728 in a system header) through `...'. */
15730 = expansion_point_location_if_in_system_header (loc
);
15732 /* Unfortunately, this is merely undefined, rather than a constraint
15733 violation, so we cannot make this an error. If this call is never
15734 executed, the program is still strictly conforming. */
15735 auto_diagnostic_group d
;
15736 warned
= warning_at (xloc
, 0,
15737 "%qT is promoted to %qT when passed through %<...%>",
15738 type
, promoted_type
);
15739 if (!gave_help
&& warned
)
15742 inform (xloc
, "(so you should pass %qT not %qT to %<va_arg%>)",
15743 promoted_type
, type
);
15746 /* We can, however, treat "undefined" any way we please.
15747 Call abort to encourage the user to fix the program. */
15749 inform (xloc
, "if this code is reached, the program will abort");
15750 /* Before the abort, allow the evaluation of the va_list
15751 expression to exit or longjmp. */
15752 gimplify_and_add (valist
, pre_p
);
15753 t
= build_call_expr_loc (loc
,
15754 builtin_decl_implicit (BUILT_IN_TRAP
), 0);
15755 gimplify_and_add (t
, pre_p
);
15757 /* This is dead code, but go ahead and finish so that the
15758 mode of the result comes out right. */
15759 *expr_p
= dummy_object (type
);
15760 return GS_ALL_DONE
;
15763 tag
= build_int_cst (build_pointer_type (type
), 0);
15764 aptag
= build_int_cst (TREE_TYPE (valist
), 0);
15766 *expr_p
= build_call_expr_internal_loc (loc
, IFN_VA_ARG
, type
, 3,
15767 valist
, tag
, aptag
);
15769 /* Clear the tentatively set PROP_gimple_lva, to indicate that IFN_VA_ARG
15770 needs to be expanded. */
15771 cfun
->curr_properties
&= ~PROP_gimple_lva
;
15776 /* Build a new GIMPLE_ASSIGN tuple and append it to the end of *SEQ_P.
15778 DST/SRC are the destination and source respectively. You can pass
15779 ungimplified trees in DST or SRC, in which case they will be
15780 converted to a gimple operand if necessary.
15782 This function returns the newly created GIMPLE_ASSIGN tuple. */
15785 gimplify_assign (tree dst
, tree src
, gimple_seq
*seq_p
)
15787 tree t
= build2 (MODIFY_EXPR
, TREE_TYPE (dst
), dst
, src
);
15788 gimplify_and_add (t
, seq_p
);
15790 return gimple_seq_last_stmt (*seq_p
);
15794 gimplify_hasher::hash (const elt_t
*p
)
15797 return iterative_hash_expr (t
, 0);
15801 gimplify_hasher::equal (const elt_t
*p1
, const elt_t
*p2
)
15805 enum tree_code code
= TREE_CODE (t1
);
15807 if (TREE_CODE (t2
) != code
15808 || TREE_TYPE (t1
) != TREE_TYPE (t2
))
15811 if (!operand_equal_p (t1
, t2
, 0))
15814 /* Only allow them to compare equal if they also hash equal; otherwise
15815 results are nondeterminate, and we fail bootstrap comparison. */
15816 gcc_checking_assert (hash (p1
) == hash (p2
));