1 /* Tree lowering pass. This pass converts the GENERIC functions-as-trees
2 tree representation into the GIMPLE form.
3 Copyright (C) 2002-2019 Free Software Foundation, Inc.
4 Major work done by Sebastian Pop <s.pop@laposte.net>,
5 Diego Novillo <dnovillo@redhat.com> and Jason Merrill <jason@redhat.com>.
7 This file is part of GCC.
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
25 #include "coretypes.h"
33 #include "gimple-predict.h"
34 #include "tree-pass.h" /* FIXME: only for PROP_gimple_any */
37 #include "tree-pretty-print.h"
38 #include "diagnostic-core.h"
40 #include "fold-const.h"
45 #include "gimple-fold.h"
48 #include "gimple-iterator.h"
49 #include "stor-layout.h"
50 #include "print-tree.h"
51 #include "tree-iterator.h"
52 #include "tree-inline.h"
53 #include "langhooks.h"
56 #include "omp-general.h"
58 #include "gimple-low.h"
59 #include "gomp-constants.h"
60 #include "splay-tree.h"
61 #include "gimple-walk.h"
62 #include "langhooks-def.h" /* FIXME: for lhd_set_decl_assembler_name */
64 #include "stringpool.h"
68 #include "omp-offload.h"
71 /* Hash set of poisoned variables in a bind expr. */
72 static hash_set
<tree
> *asan_poisoned_variables
= NULL
;
74 enum gimplify_omp_var_data
77 GOVD_EXPLICIT
= 0x000002,
78 GOVD_SHARED
= 0x000004,
79 GOVD_PRIVATE
= 0x000008,
80 GOVD_FIRSTPRIVATE
= 0x000010,
81 GOVD_LASTPRIVATE
= 0x000020,
82 GOVD_REDUCTION
= 0x000040,
85 GOVD_DEBUG_PRIVATE
= 0x000200,
86 GOVD_PRIVATE_OUTER_REF
= 0x000400,
87 GOVD_LINEAR
= 0x000800,
88 GOVD_ALIGNED
= 0x001000,
90 /* Flag for GOVD_MAP: don't copy back. */
91 GOVD_MAP_TO_ONLY
= 0x002000,
93 /* Flag for GOVD_LINEAR or GOVD_LASTPRIVATE: no outer reference. */
94 GOVD_LINEAR_LASTPRIVATE_NO_OUTER
= 0x004000,
96 GOVD_MAP_0LEN_ARRAY
= 0x008000,
98 /* Flag for GOVD_MAP, if it is always, to or always, tofrom mapping. */
99 GOVD_MAP_ALWAYS_TO
= 0x010000,
101 /* Flag for shared vars that are or might be stored to in the region. */
102 GOVD_WRITTEN
= 0x020000,
104 /* Flag for GOVD_MAP, if it is a forced mapping. */
105 GOVD_MAP_FORCE
= 0x040000,
107 /* Flag for GOVD_MAP: must be present already. */
108 GOVD_MAP_FORCE_PRESENT
= 0x080000,
110 /* Flag for GOVD_MAP: only allocate. */
111 GOVD_MAP_ALLOC_ONLY
= 0x100000,
113 /* Flag for GOVD_MAP: only copy back. */
114 GOVD_MAP_FROM_ONLY
= 0x200000,
116 GOVD_NONTEMPORAL
= 0x400000,
118 /* Flag for GOVD_LASTPRIVATE: conditional modifier. */
119 GOVD_LASTPRIVATE_CONDITIONAL
= 0x800000,
121 GOVD_CONDTEMP
= 0x1000000,
123 /* Flag for GOVD_REDUCTION: inscan seen in {in,ex}clusive clause. */
124 GOVD_REDUCTION_INSCAN
= 0x2000000,
126 GOVD_DATA_SHARE_CLASS
= (GOVD_SHARED
| GOVD_PRIVATE
| GOVD_FIRSTPRIVATE
127 | GOVD_LASTPRIVATE
| GOVD_REDUCTION
| GOVD_LINEAR
134 ORT_WORKSHARE
= 0x00,
135 ORT_TASKGROUP
= 0x01,
139 ORT_COMBINED_PARALLEL
= ORT_PARALLEL
| 1,
142 ORT_UNTIED_TASK
= ORT_TASK
| 1,
143 ORT_TASKLOOP
= ORT_TASK
| 2,
144 ORT_UNTIED_TASKLOOP
= ORT_UNTIED_TASK
| 2,
147 ORT_COMBINED_TEAMS
= ORT_TEAMS
| 1,
148 ORT_HOST_TEAMS
= ORT_TEAMS
| 2,
149 ORT_COMBINED_HOST_TEAMS
= ORT_COMBINED_TEAMS
| 2,
152 ORT_TARGET_DATA
= 0x40,
154 /* Data region with offloading. */
156 ORT_COMBINED_TARGET
= ORT_TARGET
| 1,
157 ORT_IMPLICIT_TARGET
= ORT_TARGET
| 2,
159 /* OpenACC variants. */
160 ORT_ACC
= 0x100, /* A generic OpenACC region. */
161 ORT_ACC_DATA
= ORT_ACC
| ORT_TARGET_DATA
, /* Data construct. */
162 ORT_ACC_PARALLEL
= ORT_ACC
| ORT_TARGET
, /* Parallel construct */
163 ORT_ACC_KERNELS
= ORT_ACC
| ORT_TARGET
| 2, /* Kernels construct. */
164 ORT_ACC_HOST_DATA
= ORT_ACC
| ORT_TARGET_DATA
| 2, /* Host data. */
166 /* Dummy OpenMP region, used to disable expansion of
167 DECL_VALUE_EXPRs in taskloop pre body. */
171 /* Gimplify hashtable helper. */
173 struct gimplify_hasher
: free_ptr_hash
<elt_t
>
175 static inline hashval_t
hash (const elt_t
*);
176 static inline bool equal (const elt_t
*, const elt_t
*);
181 struct gimplify_ctx
*prev_context
;
183 vec
<gbind
*> bind_expr_stack
;
185 gimple_seq conditional_cleanups
;
189 vec
<tree
> case_labels
;
190 hash_set
<tree
> *live_switch_vars
;
191 /* The formal temporary table. Should this be persistent? */
192 hash_table
<gimplify_hasher
> *temp_htab
;
195 unsigned into_ssa
: 1;
196 unsigned allow_rhs_cond_expr
: 1;
197 unsigned in_cleanup_point_expr
: 1;
198 unsigned keep_stack
: 1;
199 unsigned save_stack
: 1;
200 unsigned in_switch_expr
: 1;
203 enum gimplify_defaultmap_kind
211 struct gimplify_omp_ctx
213 struct gimplify_omp_ctx
*outer_context
;
214 splay_tree variables
;
215 hash_set
<tree
> *privatized_types
;
217 /* Iteration variables in an OMP_FOR. */
218 vec
<tree
> loop_iter_var
;
220 enum omp_clause_default_kind default_kind
;
221 enum omp_region_type region_type
;
224 bool target_firstprivatize_array_bases
;
226 bool order_concurrent
;
230 static struct gimplify_ctx
*gimplify_ctxp
;
231 static struct gimplify_omp_ctx
*gimplify_omp_ctxp
;
232 static bool in_omp_construct
;
234 /* Forward declaration. */
235 static enum gimplify_status
gimplify_compound_expr (tree
*, gimple_seq
*, bool);
236 static hash_map
<tree
, tree
> *oacc_declare_returns
;
237 static enum gimplify_status
gimplify_expr (tree
*, gimple_seq
*, gimple_seq
*,
238 bool (*) (tree
), fallback_t
, bool);
240 /* Shorter alias name for the above function for use in gimplify.c
244 gimplify_seq_add_stmt (gimple_seq
*seq_p
, gimple
*gs
)
246 gimple_seq_add_stmt_without_update (seq_p
, gs
);
249 /* Append sequence SRC to the end of sequence *DST_P. If *DST_P is
250 NULL, a new sequence is allocated. This function is
251 similar to gimple_seq_add_seq, but does not scan the operands.
252 During gimplification, we need to manipulate statement sequences
253 before the def/use vectors have been constructed. */
256 gimplify_seq_add_seq (gimple_seq
*dst_p
, gimple_seq src
)
258 gimple_stmt_iterator si
;
263 si
= gsi_last (*dst_p
);
264 gsi_insert_seq_after_without_update (&si
, src
, GSI_NEW_STMT
);
268 /* Pointer to a list of allocated gimplify_ctx structs to be used for pushing
269 and popping gimplify contexts. */
271 static struct gimplify_ctx
*ctx_pool
= NULL
;
273 /* Return a gimplify context struct from the pool. */
275 static inline struct gimplify_ctx
*
278 struct gimplify_ctx
* c
= ctx_pool
;
281 ctx_pool
= c
->prev_context
;
283 c
= XNEW (struct gimplify_ctx
);
285 memset (c
, '\0', sizeof (*c
));
289 /* Put gimplify context C back into the pool. */
292 ctx_free (struct gimplify_ctx
*c
)
294 c
->prev_context
= ctx_pool
;
298 /* Free allocated ctx stack memory. */
301 free_gimplify_stack (void)
303 struct gimplify_ctx
*c
;
305 while ((c
= ctx_pool
))
307 ctx_pool
= c
->prev_context
;
313 /* Set up a context for the gimplifier. */
316 push_gimplify_context (bool in_ssa
, bool rhs_cond_ok
)
318 struct gimplify_ctx
*c
= ctx_alloc ();
320 c
->prev_context
= gimplify_ctxp
;
322 gimplify_ctxp
->into_ssa
= in_ssa
;
323 gimplify_ctxp
->allow_rhs_cond_expr
= rhs_cond_ok
;
326 /* Tear down a context for the gimplifier. If BODY is non-null, then
327 put the temporaries into the outer BIND_EXPR. Otherwise, put them
330 BODY is not a sequence, but the first tuple in a sequence. */
333 pop_gimplify_context (gimple
*body
)
335 struct gimplify_ctx
*c
= gimplify_ctxp
;
338 && (!c
->bind_expr_stack
.exists ()
339 || c
->bind_expr_stack
.is_empty ()));
340 c
->bind_expr_stack
.release ();
341 gimplify_ctxp
= c
->prev_context
;
344 declare_vars (c
->temps
, body
, false);
346 record_vars (c
->temps
);
353 /* Push a GIMPLE_BIND tuple onto the stack of bindings. */
356 gimple_push_bind_expr (gbind
*bind_stmt
)
358 gimplify_ctxp
->bind_expr_stack
.reserve (8);
359 gimplify_ctxp
->bind_expr_stack
.safe_push (bind_stmt
);
362 /* Pop the first element off the stack of bindings. */
365 gimple_pop_bind_expr (void)
367 gimplify_ctxp
->bind_expr_stack
.pop ();
370 /* Return the first element of the stack of bindings. */
373 gimple_current_bind_expr (void)
375 return gimplify_ctxp
->bind_expr_stack
.last ();
378 /* Return the stack of bindings created during gimplification. */
381 gimple_bind_expr_stack (void)
383 return gimplify_ctxp
->bind_expr_stack
;
386 /* Return true iff there is a COND_EXPR between us and the innermost
387 CLEANUP_POINT_EXPR. This info is used by gimple_push_cleanup. */
390 gimple_conditional_context (void)
392 return gimplify_ctxp
->conditions
> 0;
395 /* Note that we've entered a COND_EXPR. */
398 gimple_push_condition (void)
400 #ifdef ENABLE_GIMPLE_CHECKING
401 if (gimplify_ctxp
->conditions
== 0)
402 gcc_assert (gimple_seq_empty_p (gimplify_ctxp
->conditional_cleanups
));
404 ++(gimplify_ctxp
->conditions
);
407 /* Note that we've left a COND_EXPR. If we're back at unconditional scope
408 now, add any conditional cleanups we've seen to the prequeue. */
411 gimple_pop_condition (gimple_seq
*pre_p
)
413 int conds
= --(gimplify_ctxp
->conditions
);
415 gcc_assert (conds
>= 0);
418 gimplify_seq_add_seq (pre_p
, gimplify_ctxp
->conditional_cleanups
);
419 gimplify_ctxp
->conditional_cleanups
= NULL
;
423 /* A stable comparison routine for use with splay trees and DECLs. */
426 splay_tree_compare_decl_uid (splay_tree_key xa
, splay_tree_key xb
)
431 return DECL_UID (a
) - DECL_UID (b
);
434 /* Create a new omp construct that deals with variable remapping. */
436 static struct gimplify_omp_ctx
*
437 new_omp_context (enum omp_region_type region_type
)
439 struct gimplify_omp_ctx
*c
;
441 c
= XCNEW (struct gimplify_omp_ctx
);
442 c
->outer_context
= gimplify_omp_ctxp
;
443 c
->variables
= splay_tree_new (splay_tree_compare_decl_uid
, 0, 0);
444 c
->privatized_types
= new hash_set
<tree
>;
445 c
->location
= input_location
;
446 c
->region_type
= region_type
;
447 if ((region_type
& ORT_TASK
) == 0)
448 c
->default_kind
= OMP_CLAUSE_DEFAULT_SHARED
;
450 c
->default_kind
= OMP_CLAUSE_DEFAULT_UNSPECIFIED
;
451 c
->defaultmap
[GDMK_SCALAR
] = GOVD_MAP
;
452 c
->defaultmap
[GDMK_AGGREGATE
] = GOVD_MAP
;
453 c
->defaultmap
[GDMK_ALLOCATABLE
] = GOVD_MAP
;
454 c
->defaultmap
[GDMK_POINTER
] = GOVD_MAP
;
459 /* Destroy an omp construct that deals with variable remapping. */
462 delete_omp_context (struct gimplify_omp_ctx
*c
)
464 splay_tree_delete (c
->variables
);
465 delete c
->privatized_types
;
466 c
->loop_iter_var
.release ();
470 static void omp_add_variable (struct gimplify_omp_ctx
*, tree
, unsigned int);
471 static bool omp_notice_variable (struct gimplify_omp_ctx
*, tree
, bool);
473 /* Both gimplify the statement T and append it to *SEQ_P. This function
474 behaves exactly as gimplify_stmt, but you don't have to pass T as a
478 gimplify_and_add (tree t
, gimple_seq
*seq_p
)
480 gimplify_stmt (&t
, seq_p
);
483 /* Gimplify statement T into sequence *SEQ_P, and return the first
484 tuple in the sequence of generated tuples for this statement.
485 Return NULL if gimplifying T produced no tuples. */
488 gimplify_and_return_first (tree t
, gimple_seq
*seq_p
)
490 gimple_stmt_iterator last
= gsi_last (*seq_p
);
492 gimplify_and_add (t
, seq_p
);
494 if (!gsi_end_p (last
))
497 return gsi_stmt (last
);
500 return gimple_seq_first_stmt (*seq_p
);
503 /* Returns true iff T is a valid RHS for an assignment to an un-renamed
504 LHS, or for a call argument. */
507 is_gimple_mem_rhs (tree t
)
509 /* If we're dealing with a renamable type, either source or dest must be
510 a renamed variable. */
511 if (is_gimple_reg_type (TREE_TYPE (t
)))
512 return is_gimple_val (t
);
514 return is_gimple_val (t
) || is_gimple_lvalue (t
);
517 /* Return true if T is a CALL_EXPR or an expression that can be
518 assigned to a temporary. Note that this predicate should only be
519 used during gimplification. See the rationale for this in
520 gimplify_modify_expr. */
523 is_gimple_reg_rhs_or_call (tree t
)
525 return (get_gimple_rhs_class (TREE_CODE (t
)) != GIMPLE_INVALID_RHS
526 || TREE_CODE (t
) == CALL_EXPR
);
529 /* Return true if T is a valid memory RHS or a CALL_EXPR. Note that
530 this predicate should only be used during gimplification. See the
531 rationale for this in gimplify_modify_expr. */
534 is_gimple_mem_rhs_or_call (tree t
)
536 /* If we're dealing with a renamable type, either source or dest must be
537 a renamed variable. */
538 if (is_gimple_reg_type (TREE_TYPE (t
)))
539 return is_gimple_val (t
);
541 return (is_gimple_val (t
)
542 || is_gimple_lvalue (t
)
543 || TREE_CLOBBER_P (t
)
544 || TREE_CODE (t
) == CALL_EXPR
);
547 /* Create a temporary with a name derived from VAL. Subroutine of
548 lookup_tmp_var; nobody else should call this function. */
551 create_tmp_from_val (tree val
)
553 /* Drop all qualifiers and address-space information from the value type. */
554 tree type
= TYPE_MAIN_VARIANT (TREE_TYPE (val
));
555 tree var
= create_tmp_var (type
, get_name (val
));
556 if (TREE_CODE (TREE_TYPE (var
)) == COMPLEX_TYPE
557 || TREE_CODE (TREE_TYPE (var
)) == VECTOR_TYPE
)
558 DECL_GIMPLE_REG_P (var
) = 1;
562 /* Create a temporary to hold the value of VAL. If IS_FORMAL, try to reuse
563 an existing expression temporary. */
566 lookup_tmp_var (tree val
, bool is_formal
)
570 /* If not optimizing, never really reuse a temporary. local-alloc
571 won't allocate any variable that is used in more than one basic
572 block, which means it will go into memory, causing much extra
573 work in reload and final and poorer code generation, outweighing
574 the extra memory allocation here. */
575 if (!optimize
|| !is_formal
|| TREE_SIDE_EFFECTS (val
))
576 ret
= create_tmp_from_val (val
);
583 if (!gimplify_ctxp
->temp_htab
)
584 gimplify_ctxp
->temp_htab
= new hash_table
<gimplify_hasher
> (1000);
585 slot
= gimplify_ctxp
->temp_htab
->find_slot (&elt
, INSERT
);
588 elt_p
= XNEW (elt_t
);
590 elt_p
->temp
= ret
= create_tmp_from_val (val
);
603 /* Helper for get_formal_tmp_var and get_initialized_tmp_var. */
606 internal_get_tmp_var (tree val
, gimple_seq
*pre_p
, gimple_seq
*post_p
,
607 bool is_formal
, bool allow_ssa
)
611 /* Notice that we explicitly allow VAL to be a CALL_EXPR so that we
612 can create an INIT_EXPR and convert it into a GIMPLE_CALL below. */
613 gimplify_expr (&val
, pre_p
, post_p
, is_gimple_reg_rhs_or_call
,
617 && gimplify_ctxp
->into_ssa
618 && is_gimple_reg_type (TREE_TYPE (val
)))
620 t
= make_ssa_name (TYPE_MAIN_VARIANT (TREE_TYPE (val
)));
621 if (! gimple_in_ssa_p (cfun
))
623 const char *name
= get_name (val
);
625 SET_SSA_NAME_VAR_OR_IDENTIFIER (t
, create_tmp_var_name (name
));
629 t
= lookup_tmp_var (val
, is_formal
);
631 mod
= build2 (INIT_EXPR
, TREE_TYPE (t
), t
, unshare_expr (val
));
633 SET_EXPR_LOCATION (mod
, EXPR_LOC_OR_LOC (val
, input_location
));
635 /* gimplify_modify_expr might want to reduce this further. */
636 gimplify_and_add (mod
, pre_p
);
642 /* Return a formal temporary variable initialized with VAL. PRE_P is as
643 in gimplify_expr. Only use this function if:
645 1) The value of the unfactored expression represented by VAL will not
646 change between the initialization and use of the temporary, and
647 2) The temporary will not be otherwise modified.
649 For instance, #1 means that this is inappropriate for SAVE_EXPR temps,
650 and #2 means it is inappropriate for && temps.
652 For other cases, use get_initialized_tmp_var instead. */
655 get_formal_tmp_var (tree val
, gimple_seq
*pre_p
)
657 return internal_get_tmp_var (val
, pre_p
, NULL
, true, true);
660 /* Return a temporary variable initialized with VAL. PRE_P and POST_P
661 are as in gimplify_expr. */
664 get_initialized_tmp_var (tree val
, gimple_seq
*pre_p
, gimple_seq
*post_p
,
667 return internal_get_tmp_var (val
, pre_p
, post_p
, false, allow_ssa
);
670 /* Declare all the variables in VARS in SCOPE. If DEBUG_INFO is true,
671 generate debug info for them; otherwise don't. */
674 declare_vars (tree vars
, gimple
*gs
, bool debug_info
)
681 gbind
*scope
= as_a
<gbind
*> (gs
);
683 temps
= nreverse (last
);
685 block
= gimple_bind_block (scope
);
686 gcc_assert (!block
|| TREE_CODE (block
) == BLOCK
);
687 if (!block
|| !debug_info
)
689 DECL_CHAIN (last
) = gimple_bind_vars (scope
);
690 gimple_bind_set_vars (scope
, temps
);
694 /* We need to attach the nodes both to the BIND_EXPR and to its
695 associated BLOCK for debugging purposes. The key point here
696 is that the BLOCK_VARS of the BIND_EXPR_BLOCK of a BIND_EXPR
697 is a subchain of the BIND_EXPR_VARS of the BIND_EXPR. */
698 if (BLOCK_VARS (block
))
699 BLOCK_VARS (block
) = chainon (BLOCK_VARS (block
), temps
);
702 gimple_bind_set_vars (scope
,
703 chainon (gimple_bind_vars (scope
), temps
));
704 BLOCK_VARS (block
) = temps
;
710 /* For VAR a VAR_DECL of variable size, try to find a constant upper bound
711 for the size and adjust DECL_SIZE/DECL_SIZE_UNIT accordingly. Abort if
712 no such upper bound can be obtained. */
715 force_constant_size (tree var
)
717 /* The only attempt we make is by querying the maximum size of objects
718 of the variable's type. */
720 HOST_WIDE_INT max_size
;
722 gcc_assert (VAR_P (var
));
724 max_size
= max_int_size_in_bytes (TREE_TYPE (var
));
726 gcc_assert (max_size
>= 0);
729 = build_int_cst (TREE_TYPE (DECL_SIZE_UNIT (var
)), max_size
);
731 = build_int_cst (TREE_TYPE (DECL_SIZE (var
)), max_size
* BITS_PER_UNIT
);
734 /* Push the temporary variable TMP into the current binding. */
737 gimple_add_tmp_var_fn (struct function
*fn
, tree tmp
)
739 gcc_assert (!DECL_CHAIN (tmp
) && !DECL_SEEN_IN_BIND_EXPR_P (tmp
));
741 /* Later processing assumes that the object size is constant, which might
742 not be true at this point. Force the use of a constant upper bound in
744 if (!tree_fits_poly_uint64_p (DECL_SIZE_UNIT (tmp
)))
745 force_constant_size (tmp
);
747 DECL_CONTEXT (tmp
) = fn
->decl
;
748 DECL_SEEN_IN_BIND_EXPR_P (tmp
) = 1;
750 record_vars_into (tmp
, fn
->decl
);
753 /* Push the temporary variable TMP into the current binding. */
756 gimple_add_tmp_var (tree tmp
)
758 gcc_assert (!DECL_CHAIN (tmp
) && !DECL_SEEN_IN_BIND_EXPR_P (tmp
));
760 /* Later processing assumes that the object size is constant, which might
761 not be true at this point. Force the use of a constant upper bound in
763 if (!tree_fits_poly_uint64_p (DECL_SIZE_UNIT (tmp
)))
764 force_constant_size (tmp
);
766 DECL_CONTEXT (tmp
) = current_function_decl
;
767 DECL_SEEN_IN_BIND_EXPR_P (tmp
) = 1;
771 DECL_CHAIN (tmp
) = gimplify_ctxp
->temps
;
772 gimplify_ctxp
->temps
= tmp
;
774 /* Mark temporaries local within the nearest enclosing parallel. */
775 if (gimplify_omp_ctxp
)
777 struct gimplify_omp_ctx
*ctx
= gimplify_omp_ctxp
;
778 int flag
= GOVD_LOCAL
;
780 && (ctx
->region_type
== ORT_WORKSHARE
781 || ctx
->region_type
== ORT_TASKGROUP
782 || ctx
->region_type
== ORT_SIMD
783 || ctx
->region_type
== ORT_ACC
))
785 if (ctx
->region_type
== ORT_SIMD
786 && TREE_ADDRESSABLE (tmp
)
787 && !TREE_STATIC (tmp
))
789 if (TREE_CODE (DECL_SIZE_UNIT (tmp
)) != INTEGER_CST
)
790 ctx
->add_safelen1
= true;
795 ctx
= ctx
->outer_context
;
798 omp_add_variable (ctx
, tmp
, flag
| GOVD_SEEN
);
807 /* This case is for nested functions. We need to expose the locals
809 body_seq
= gimple_body (current_function_decl
);
810 declare_vars (tmp
, gimple_seq_first_stmt (body_seq
), false);
816 /* This page contains routines to unshare tree nodes, i.e. to duplicate tree
817 nodes that are referenced more than once in GENERIC functions. This is
818 necessary because gimplification (translation into GIMPLE) is performed
819 by modifying tree nodes in-place, so gimplication of a shared node in a
820 first context could generate an invalid GIMPLE form in a second context.
822 This is achieved with a simple mark/copy/unmark algorithm that walks the
823 GENERIC representation top-down, marks nodes with TREE_VISITED the first
824 time it encounters them, duplicates them if they already have TREE_VISITED
825 set, and finally removes the TREE_VISITED marks it has set.
827 The algorithm works only at the function level, i.e. it generates a GENERIC
828 representation of a function with no nodes shared within the function when
829 passed a GENERIC function (except for nodes that are allowed to be shared).
831 At the global level, it is also necessary to unshare tree nodes that are
832 referenced in more than one function, for the same aforementioned reason.
833 This requires some cooperation from the front-end. There are 2 strategies:
835 1. Manual unsharing. The front-end needs to call unshare_expr on every
836 expression that might end up being shared across functions.
838 2. Deep unsharing. This is an extension of regular unsharing. Instead
839 of calling unshare_expr on expressions that might be shared across
840 functions, the front-end pre-marks them with TREE_VISITED. This will
841 ensure that they are unshared on the first reference within functions
842 when the regular unsharing algorithm runs. The counterpart is that
843 this algorithm must look deeper than for manual unsharing, which is
844 specified by LANG_HOOKS_DEEP_UNSHARING.
846 If there are only few specific cases of node sharing across functions, it is
847 probably easier for a front-end to unshare the expressions manually. On the
848 contrary, if the expressions generated at the global level are as widespread
849 as expressions generated within functions, deep unsharing is very likely the
852 /* Similar to copy_tree_r but do not copy SAVE_EXPR or TARGET_EXPR nodes.
853 These nodes model computations that must be done once. If we were to
854 unshare something like SAVE_EXPR(i++), the gimplification process would
855 create wrong code. However, if DATA is non-null, it must hold a pointer
856 set that is used to unshare the subtrees of these nodes. */
859 mostly_copy_tree_r (tree
*tp
, int *walk_subtrees
, void *data
)
862 enum tree_code code
= TREE_CODE (t
);
864 /* Do not copy SAVE_EXPR, TARGET_EXPR or BIND_EXPR nodes themselves, but
865 copy their subtrees if we can make sure to do it only once. */
866 if (code
== SAVE_EXPR
|| code
== TARGET_EXPR
|| code
== BIND_EXPR
)
868 if (data
&& !((hash_set
<tree
> *)data
)->add (t
))
874 /* Stop at types, decls, constants like copy_tree_r. */
875 else if (TREE_CODE_CLASS (code
) == tcc_type
876 || TREE_CODE_CLASS (code
) == tcc_declaration
877 || TREE_CODE_CLASS (code
) == tcc_constant
)
880 /* Cope with the statement expression extension. */
881 else if (code
== STATEMENT_LIST
)
884 /* Leave the bulk of the work to copy_tree_r itself. */
886 copy_tree_r (tp
, walk_subtrees
, NULL
);
891 /* Callback for walk_tree to unshare most of the shared trees rooted at *TP.
892 If *TP has been visited already, then *TP is deeply copied by calling
893 mostly_copy_tree_r. DATA is passed to mostly_copy_tree_r unmodified. */
896 copy_if_shared_r (tree
*tp
, int *walk_subtrees
, void *data
)
899 enum tree_code code
= TREE_CODE (t
);
901 /* Skip types, decls, and constants. But we do want to look at their
902 types and the bounds of types. Mark them as visited so we properly
903 unmark their subtrees on the unmark pass. If we've already seen them,
904 don't look down further. */
905 if (TREE_CODE_CLASS (code
) == tcc_type
906 || TREE_CODE_CLASS (code
) == tcc_declaration
907 || TREE_CODE_CLASS (code
) == tcc_constant
)
909 if (TREE_VISITED (t
))
912 TREE_VISITED (t
) = 1;
915 /* If this node has been visited already, unshare it and don't look
917 else if (TREE_VISITED (t
))
919 walk_tree (tp
, mostly_copy_tree_r
, data
, NULL
);
923 /* Otherwise, mark the node as visited and keep looking. */
925 TREE_VISITED (t
) = 1;
930 /* Unshare most of the shared trees rooted at *TP. DATA is passed to the
931 copy_if_shared_r callback unmodified. */
934 copy_if_shared (tree
*tp
, void *data
)
936 walk_tree (tp
, copy_if_shared_r
, data
, NULL
);
939 /* Unshare all the trees in the body of FNDECL, as well as in the bodies of
940 any nested functions. */
943 unshare_body (tree fndecl
)
945 struct cgraph_node
*cgn
= cgraph_node::get (fndecl
);
946 /* If the language requires deep unsharing, we need a pointer set to make
947 sure we don't repeatedly unshare subtrees of unshareable nodes. */
948 hash_set
<tree
> *visited
949 = lang_hooks
.deep_unsharing
? new hash_set
<tree
> : NULL
;
951 copy_if_shared (&DECL_SAVED_TREE (fndecl
), visited
);
952 copy_if_shared (&DECL_SIZE (DECL_RESULT (fndecl
)), visited
);
953 copy_if_shared (&DECL_SIZE_UNIT (DECL_RESULT (fndecl
)), visited
);
958 for (cgn
= cgn
->nested
; cgn
; cgn
= cgn
->next_nested
)
959 unshare_body (cgn
->decl
);
962 /* Callback for walk_tree to unmark the visited trees rooted at *TP.
963 Subtrees are walked until the first unvisited node is encountered. */
966 unmark_visited_r (tree
*tp
, int *walk_subtrees
, void *data ATTRIBUTE_UNUSED
)
970 /* If this node has been visited, unmark it and keep looking. */
971 if (TREE_VISITED (t
))
972 TREE_VISITED (t
) = 0;
974 /* Otherwise, don't look any deeper. */
981 /* Unmark the visited trees rooted at *TP. */
984 unmark_visited (tree
*tp
)
986 walk_tree (tp
, unmark_visited_r
, NULL
, NULL
);
989 /* Likewise, but mark all trees as not visited. */
992 unvisit_body (tree fndecl
)
994 struct cgraph_node
*cgn
= cgraph_node::get (fndecl
);
996 unmark_visited (&DECL_SAVED_TREE (fndecl
));
997 unmark_visited (&DECL_SIZE (DECL_RESULT (fndecl
)));
998 unmark_visited (&DECL_SIZE_UNIT (DECL_RESULT (fndecl
)));
1001 for (cgn
= cgn
->nested
; cgn
; cgn
= cgn
->next_nested
)
1002 unvisit_body (cgn
->decl
);
1005 /* Unconditionally make an unshared copy of EXPR. This is used when using
1006 stored expressions which span multiple functions, such as BINFO_VTABLE,
1007 as the normal unsharing process can't tell that they're shared. */
1010 unshare_expr (tree expr
)
1012 walk_tree (&expr
, mostly_copy_tree_r
, NULL
, NULL
);
1016 /* Worker for unshare_expr_without_location. */
1019 prune_expr_location (tree
*tp
, int *walk_subtrees
, void *)
1022 SET_EXPR_LOCATION (*tp
, UNKNOWN_LOCATION
);
1028 /* Similar to unshare_expr but also prune all expression locations
1032 unshare_expr_without_location (tree expr
)
1034 walk_tree (&expr
, mostly_copy_tree_r
, NULL
, NULL
);
1036 walk_tree (&expr
, prune_expr_location
, NULL
, NULL
);
1040 /* Return the EXPR_LOCATION of EXPR, if it (maybe recursively) has
1041 one, OR_ELSE otherwise. The location of a STATEMENT_LISTs
1042 comprising at least one DEBUG_BEGIN_STMT followed by exactly one
1043 EXPR is the location of the EXPR. */
1046 rexpr_location (tree expr
, location_t or_else
= UNKNOWN_LOCATION
)
1051 if (EXPR_HAS_LOCATION (expr
))
1052 return EXPR_LOCATION (expr
);
1054 if (TREE_CODE (expr
) != STATEMENT_LIST
)
1057 tree_stmt_iterator i
= tsi_start (expr
);
1060 while (!tsi_end_p (i
) && TREE_CODE (tsi_stmt (i
)) == DEBUG_BEGIN_STMT
)
1066 if (!found
|| !tsi_one_before_end_p (i
))
1069 return rexpr_location (tsi_stmt (i
), or_else
);
1072 /* Return TRUE iff EXPR (maybe recursively) has a location; see
1073 rexpr_location for the potential recursion. */
1076 rexpr_has_location (tree expr
)
1078 return rexpr_location (expr
) != UNKNOWN_LOCATION
;
1082 /* WRAPPER is a code such as BIND_EXPR or CLEANUP_POINT_EXPR which can both
1083 contain statements and have a value. Assign its value to a temporary
1084 and give it void_type_node. Return the temporary, or NULL_TREE if
1085 WRAPPER was already void. */
1088 voidify_wrapper_expr (tree wrapper
, tree temp
)
1090 tree type
= TREE_TYPE (wrapper
);
1091 if (type
&& !VOID_TYPE_P (type
))
1095 /* Set p to point to the body of the wrapper. Loop until we find
1096 something that isn't a wrapper. */
1097 for (p
= &wrapper
; p
&& *p
; )
1099 switch (TREE_CODE (*p
))
1102 TREE_SIDE_EFFECTS (*p
) = 1;
1103 TREE_TYPE (*p
) = void_type_node
;
1104 /* For a BIND_EXPR, the body is operand 1. */
1105 p
= &BIND_EXPR_BODY (*p
);
1108 case CLEANUP_POINT_EXPR
:
1109 case TRY_FINALLY_EXPR
:
1110 case TRY_CATCH_EXPR
:
1111 TREE_SIDE_EFFECTS (*p
) = 1;
1112 TREE_TYPE (*p
) = void_type_node
;
1113 p
= &TREE_OPERAND (*p
, 0);
1116 case STATEMENT_LIST
:
1118 tree_stmt_iterator i
= tsi_last (*p
);
1119 TREE_SIDE_EFFECTS (*p
) = 1;
1120 TREE_TYPE (*p
) = void_type_node
;
1121 p
= tsi_end_p (i
) ? NULL
: tsi_stmt_ptr (i
);
1126 /* Advance to the last statement. Set all container types to
1128 for (; TREE_CODE (*p
) == COMPOUND_EXPR
; p
= &TREE_OPERAND (*p
, 1))
1130 TREE_SIDE_EFFECTS (*p
) = 1;
1131 TREE_TYPE (*p
) = void_type_node
;
1135 case TRANSACTION_EXPR
:
1136 TREE_SIDE_EFFECTS (*p
) = 1;
1137 TREE_TYPE (*p
) = void_type_node
;
1138 p
= &TRANSACTION_EXPR_BODY (*p
);
1142 /* Assume that any tree upon which voidify_wrapper_expr is
1143 directly called is a wrapper, and that its body is op0. */
1146 TREE_SIDE_EFFECTS (*p
) = 1;
1147 TREE_TYPE (*p
) = void_type_node
;
1148 p
= &TREE_OPERAND (*p
, 0);
1156 if (p
== NULL
|| IS_EMPTY_STMT (*p
))
1160 /* The wrapper is on the RHS of an assignment that we're pushing
1162 gcc_assert (TREE_CODE (temp
) == INIT_EXPR
1163 || TREE_CODE (temp
) == MODIFY_EXPR
);
1164 TREE_OPERAND (temp
, 1) = *p
;
1169 temp
= create_tmp_var (type
, "retval");
1170 *p
= build2 (INIT_EXPR
, type
, temp
, *p
);
1179 /* Prepare calls to builtins to SAVE and RESTORE the stack as well as
1180 a temporary through which they communicate. */
1183 build_stack_save_restore (gcall
**save
, gcall
**restore
)
1187 *save
= gimple_build_call (builtin_decl_implicit (BUILT_IN_STACK_SAVE
), 0);
1188 tmp_var
= create_tmp_var (ptr_type_node
, "saved_stack");
1189 gimple_call_set_lhs (*save
, tmp_var
);
1192 = gimple_build_call (builtin_decl_implicit (BUILT_IN_STACK_RESTORE
),
1196 /* Generate IFN_ASAN_MARK call that poisons shadow of a for DECL variable. */
1199 build_asan_poison_call_expr (tree decl
)
1201 /* Do not poison variables that have size equal to zero. */
1202 tree unit_size
= DECL_SIZE_UNIT (decl
);
1203 if (zerop (unit_size
))
1206 tree base
= build_fold_addr_expr (decl
);
1208 return build_call_expr_internal_loc (UNKNOWN_LOCATION
, IFN_ASAN_MARK
,
1210 build_int_cst (integer_type_node
,
1215 /* Generate IFN_ASAN_MARK call that would poison or unpoison, depending
1216 on POISON flag, shadow memory of a DECL variable. The call will be
1217 put on location identified by IT iterator, where BEFORE flag drives
1218 position where the stmt will be put. */
1221 asan_poison_variable (tree decl
, bool poison
, gimple_stmt_iterator
*it
,
1224 tree unit_size
= DECL_SIZE_UNIT (decl
);
1225 tree base
= build_fold_addr_expr (decl
);
1227 /* Do not poison variables that have size equal to zero. */
1228 if (zerop (unit_size
))
1231 /* It's necessary to have all stack variables aligned to ASAN granularity
1233 if (DECL_ALIGN_UNIT (decl
) <= ASAN_SHADOW_GRANULARITY
)
1234 SET_DECL_ALIGN (decl
, BITS_PER_UNIT
* ASAN_SHADOW_GRANULARITY
);
1236 HOST_WIDE_INT flags
= poison
? ASAN_MARK_POISON
: ASAN_MARK_UNPOISON
;
1239 = gimple_build_call_internal (IFN_ASAN_MARK
, 3,
1240 build_int_cst (integer_type_node
, flags
),
1244 gsi_insert_before (it
, g
, GSI_NEW_STMT
);
1246 gsi_insert_after (it
, g
, GSI_NEW_STMT
);
1249 /* Generate IFN_ASAN_MARK internal call that depending on POISON flag
1250 either poisons or unpoisons a DECL. Created statement is appended
1251 to SEQ_P gimple sequence. */
1254 asan_poison_variable (tree decl
, bool poison
, gimple_seq
*seq_p
)
1256 gimple_stmt_iterator it
= gsi_last (*seq_p
);
1257 bool before
= false;
1262 asan_poison_variable (decl
, poison
, &it
, before
);
1265 /* Sort pair of VAR_DECLs A and B by DECL_UID. */
1268 sort_by_decl_uid (const void *a
, const void *b
)
1270 const tree
*t1
= (const tree
*)a
;
1271 const tree
*t2
= (const tree
*)b
;
1273 int uid1
= DECL_UID (*t1
);
1274 int uid2
= DECL_UID (*t2
);
1278 else if (uid1
> uid2
)
1284 /* Generate IFN_ASAN_MARK internal call for all VARIABLES
1285 depending on POISON flag. Created statement is appended
1286 to SEQ_P gimple sequence. */
1289 asan_poison_variables (hash_set
<tree
> *variables
, bool poison
, gimple_seq
*seq_p
)
1291 unsigned c
= variables
->elements ();
1295 auto_vec
<tree
> sorted_variables (c
);
1297 for (hash_set
<tree
>::iterator it
= variables
->begin ();
1298 it
!= variables
->end (); ++it
)
1299 sorted_variables
.safe_push (*it
);
1301 sorted_variables
.qsort (sort_by_decl_uid
);
1305 FOR_EACH_VEC_ELT (sorted_variables
, i
, var
)
1307 asan_poison_variable (var
, poison
, seq_p
);
1309 /* Add use_after_scope_memory attribute for the variable in order
1310 to prevent re-written into SSA. */
1311 if (!lookup_attribute (ASAN_USE_AFTER_SCOPE_ATTRIBUTE
,
1312 DECL_ATTRIBUTES (var
)))
1313 DECL_ATTRIBUTES (var
)
1314 = tree_cons (get_identifier (ASAN_USE_AFTER_SCOPE_ATTRIBUTE
),
1316 DECL_ATTRIBUTES (var
));
1320 /* Gimplify a BIND_EXPR. Just voidify and recurse. */
1322 static enum gimplify_status
1323 gimplify_bind_expr (tree
*expr_p
, gimple_seq
*pre_p
)
1325 tree bind_expr
= *expr_p
;
1326 bool old_keep_stack
= gimplify_ctxp
->keep_stack
;
1327 bool old_save_stack
= gimplify_ctxp
->save_stack
;
1330 gimple_seq body
, cleanup
;
1332 location_t start_locus
= 0, end_locus
= 0;
1333 tree ret_clauses
= NULL
;
1335 tree temp
= voidify_wrapper_expr (bind_expr
, NULL
);
1337 /* Mark variables seen in this bind expr. */
1338 for (t
= BIND_EXPR_VARS (bind_expr
); t
; t
= DECL_CHAIN (t
))
1342 struct gimplify_omp_ctx
*ctx
= gimplify_omp_ctxp
;
1344 /* Mark variable as local. */
1345 if (ctx
&& ctx
->region_type
!= ORT_NONE
&& !DECL_EXTERNAL (t
))
1347 if (! DECL_SEEN_IN_BIND_EXPR_P (t
)
1348 || splay_tree_lookup (ctx
->variables
,
1349 (splay_tree_key
) t
) == NULL
)
1351 int flag
= GOVD_LOCAL
;
1352 if (ctx
->region_type
== ORT_SIMD
1353 && TREE_ADDRESSABLE (t
)
1354 && !TREE_STATIC (t
))
1356 if (TREE_CODE (DECL_SIZE_UNIT (t
)) != INTEGER_CST
)
1357 ctx
->add_safelen1
= true;
1359 flag
= GOVD_PRIVATE
;
1361 omp_add_variable (ctx
, t
, flag
| GOVD_SEEN
);
1363 /* Static locals inside of target construct or offloaded
1364 routines need to be "omp declare target". */
1365 if (TREE_STATIC (t
))
1366 for (; ctx
; ctx
= ctx
->outer_context
)
1367 if ((ctx
->region_type
& ORT_TARGET
) != 0)
1369 if (!lookup_attribute ("omp declare target",
1370 DECL_ATTRIBUTES (t
)))
1372 tree id
= get_identifier ("omp declare target");
1374 = tree_cons (id
, NULL_TREE
, DECL_ATTRIBUTES (t
));
1375 varpool_node
*node
= varpool_node::get (t
);
1378 node
->offloadable
= 1;
1379 if (ENABLE_OFFLOADING
&& !DECL_EXTERNAL (t
))
1381 g
->have_offload
= true;
1383 vec_safe_push (offload_vars
, t
);
1391 DECL_SEEN_IN_BIND_EXPR_P (t
) = 1;
1393 if (DECL_HARD_REGISTER (t
) && !is_global_var (t
) && cfun
)
1394 cfun
->has_local_explicit_reg_vars
= true;
1397 /* Preliminarily mark non-addressed complex variables as eligible
1398 for promotion to gimple registers. We'll transform their uses
1400 if ((TREE_CODE (TREE_TYPE (t
)) == COMPLEX_TYPE
1401 || TREE_CODE (TREE_TYPE (t
)) == VECTOR_TYPE
)
1402 && !TREE_THIS_VOLATILE (t
)
1403 && (VAR_P (t
) && !DECL_HARD_REGISTER (t
))
1404 && !needs_to_live_in_memory (t
))
1405 DECL_GIMPLE_REG_P (t
) = 1;
1408 bind_stmt
= gimple_build_bind (BIND_EXPR_VARS (bind_expr
), NULL
,
1409 BIND_EXPR_BLOCK (bind_expr
));
1410 gimple_push_bind_expr (bind_stmt
);
1412 gimplify_ctxp
->keep_stack
= false;
1413 gimplify_ctxp
->save_stack
= false;
1415 /* Gimplify the body into the GIMPLE_BIND tuple's body. */
1417 gimplify_stmt (&BIND_EXPR_BODY (bind_expr
), &body
);
1418 gimple_bind_set_body (bind_stmt
, body
);
1420 /* Source location wise, the cleanup code (stack_restore and clobbers)
1421 belongs to the end of the block, so propagate what we have. The
1422 stack_save operation belongs to the beginning of block, which we can
1423 infer from the bind_expr directly if the block has no explicit
1425 if (BIND_EXPR_BLOCK (bind_expr
))
1427 end_locus
= BLOCK_SOURCE_END_LOCATION (BIND_EXPR_BLOCK (bind_expr
));
1428 start_locus
= BLOCK_SOURCE_LOCATION (BIND_EXPR_BLOCK (bind_expr
));
1430 if (start_locus
== 0)
1431 start_locus
= EXPR_LOCATION (bind_expr
);
1436 /* If the code both contains VLAs and calls alloca, then we cannot reclaim
1437 the stack space allocated to the VLAs. */
1438 if (gimplify_ctxp
->save_stack
&& !gimplify_ctxp
->keep_stack
)
1440 gcall
*stack_restore
;
1442 /* Save stack on entry and restore it on exit. Add a try_finally
1443 block to achieve this. */
1444 build_stack_save_restore (&stack_save
, &stack_restore
);
1446 gimple_set_location (stack_save
, start_locus
);
1447 gimple_set_location (stack_restore
, end_locus
);
1449 gimplify_seq_add_stmt (&cleanup
, stack_restore
);
1452 /* Add clobbers for all variables that go out of scope. */
1453 for (t
= BIND_EXPR_VARS (bind_expr
); t
; t
= DECL_CHAIN (t
))
1456 && !is_global_var (t
)
1457 && DECL_CONTEXT (t
) == current_function_decl
)
1459 if (!DECL_HARD_REGISTER (t
)
1460 && !TREE_THIS_VOLATILE (t
)
1461 && !DECL_HAS_VALUE_EXPR_P (t
)
1462 /* Only care for variables that have to be in memory. Others
1463 will be rewritten into SSA names, hence moved to the
1465 && !is_gimple_reg (t
)
1466 && flag_stack_reuse
!= SR_NONE
)
1468 tree clobber
= build_clobber (TREE_TYPE (t
));
1469 gimple
*clobber_stmt
;
1470 clobber_stmt
= gimple_build_assign (t
, clobber
);
1471 gimple_set_location (clobber_stmt
, end_locus
);
1472 gimplify_seq_add_stmt (&cleanup
, clobber_stmt
);
1475 if (flag_openacc
&& oacc_declare_returns
!= NULL
)
1477 tree
*c
= oacc_declare_returns
->get (t
);
1481 OMP_CLAUSE_CHAIN (*c
) = ret_clauses
;
1485 oacc_declare_returns
->remove (t
);
1487 if (oacc_declare_returns
->is_empty ())
1489 delete oacc_declare_returns
;
1490 oacc_declare_returns
= NULL
;
1496 if (asan_poisoned_variables
!= NULL
1497 && asan_poisoned_variables
->contains (t
))
1499 asan_poisoned_variables
->remove (t
);
1500 asan_poison_variable (t
, true, &cleanup
);
1503 if (gimplify_ctxp
->live_switch_vars
!= NULL
1504 && gimplify_ctxp
->live_switch_vars
->contains (t
))
1505 gimplify_ctxp
->live_switch_vars
->remove (t
);
1511 gimple_stmt_iterator si
= gsi_start (cleanup
);
1513 stmt
= gimple_build_omp_target (NULL
, GF_OMP_TARGET_KIND_OACC_DECLARE
,
1515 gsi_insert_seq_before_without_update (&si
, stmt
, GSI_NEW_STMT
);
1521 gimple_seq new_body
;
1524 gs
= gimple_build_try (gimple_bind_body (bind_stmt
), cleanup
,
1525 GIMPLE_TRY_FINALLY
);
1528 gimplify_seq_add_stmt (&new_body
, stack_save
);
1529 gimplify_seq_add_stmt (&new_body
, gs
);
1530 gimple_bind_set_body (bind_stmt
, new_body
);
1533 /* keep_stack propagates all the way up to the outermost BIND_EXPR. */
1534 if (!gimplify_ctxp
->keep_stack
)
1535 gimplify_ctxp
->keep_stack
= old_keep_stack
;
1536 gimplify_ctxp
->save_stack
= old_save_stack
;
1538 gimple_pop_bind_expr ();
1540 gimplify_seq_add_stmt (pre_p
, bind_stmt
);
1548 *expr_p
= NULL_TREE
;
1552 /* Maybe add early return predict statement to PRE_P sequence. */
1555 maybe_add_early_return_predict_stmt (gimple_seq
*pre_p
)
1557 /* If we are not in a conditional context, add PREDICT statement. */
1558 if (gimple_conditional_context ())
1560 gimple
*predict
= gimple_build_predict (PRED_TREE_EARLY_RETURN
,
1562 gimplify_seq_add_stmt (pre_p
, predict
);
1566 /* Gimplify a RETURN_EXPR. If the expression to be returned is not a
1567 GIMPLE value, it is assigned to a new temporary and the statement is
1568 re-written to return the temporary.
1570 PRE_P points to the sequence where side effects that must happen before
1571 STMT should be stored. */
1573 static enum gimplify_status
1574 gimplify_return_expr (tree stmt
, gimple_seq
*pre_p
)
1577 tree ret_expr
= TREE_OPERAND (stmt
, 0);
1578 tree result_decl
, result
;
1580 if (ret_expr
== error_mark_node
)
1584 || TREE_CODE (ret_expr
) == RESULT_DECL
)
1586 maybe_add_early_return_predict_stmt (pre_p
);
1587 greturn
*ret
= gimple_build_return (ret_expr
);
1588 gimple_set_no_warning (ret
, TREE_NO_WARNING (stmt
));
1589 gimplify_seq_add_stmt (pre_p
, ret
);
1593 if (VOID_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl
))))
1594 result_decl
= NULL_TREE
;
1597 result_decl
= TREE_OPERAND (ret_expr
, 0);
1599 /* See through a return by reference. */
1600 if (TREE_CODE (result_decl
) == INDIRECT_REF
)
1601 result_decl
= TREE_OPERAND (result_decl
, 0);
1603 gcc_assert ((TREE_CODE (ret_expr
) == MODIFY_EXPR
1604 || TREE_CODE (ret_expr
) == INIT_EXPR
)
1605 && TREE_CODE (result_decl
) == RESULT_DECL
);
1608 /* If aggregate_value_p is true, then we can return the bare RESULT_DECL.
1609 Recall that aggregate_value_p is FALSE for any aggregate type that is
1610 returned in registers. If we're returning values in registers, then
1611 we don't want to extend the lifetime of the RESULT_DECL, particularly
1612 across another call. In addition, for those aggregates for which
1613 hard_function_value generates a PARALLEL, we'll die during normal
1614 expansion of structure assignments; there's special code in expand_return
1615 to handle this case that does not exist in expand_expr. */
1618 else if (aggregate_value_p (result_decl
, TREE_TYPE (current_function_decl
)))
1620 if (TREE_CODE (DECL_SIZE (result_decl
)) != INTEGER_CST
)
1622 if (!TYPE_SIZES_GIMPLIFIED (TREE_TYPE (result_decl
)))
1623 gimplify_type_sizes (TREE_TYPE (result_decl
), pre_p
);
1624 /* Note that we don't use gimplify_vla_decl because the RESULT_DECL
1625 should be effectively allocated by the caller, i.e. all calls to
1626 this function must be subject to the Return Slot Optimization. */
1627 gimplify_one_sizepos (&DECL_SIZE (result_decl
), pre_p
);
1628 gimplify_one_sizepos (&DECL_SIZE_UNIT (result_decl
), pre_p
);
1630 result
= result_decl
;
1632 else if (gimplify_ctxp
->return_temp
)
1633 result
= gimplify_ctxp
->return_temp
;
1636 result
= create_tmp_reg (TREE_TYPE (result_decl
));
1638 /* ??? With complex control flow (usually involving abnormal edges),
1639 we can wind up warning about an uninitialized value for this. Due
1640 to how this variable is constructed and initialized, this is never
1641 true. Give up and never warn. */
1642 TREE_NO_WARNING (result
) = 1;
1644 gimplify_ctxp
->return_temp
= result
;
1647 /* Smash the lhs of the MODIFY_EXPR to the temporary we plan to use.
1648 Then gimplify the whole thing. */
1649 if (result
!= result_decl
)
1650 TREE_OPERAND (ret_expr
, 0) = result
;
1652 gimplify_and_add (TREE_OPERAND (stmt
, 0), pre_p
);
1654 maybe_add_early_return_predict_stmt (pre_p
);
1655 ret
= gimple_build_return (result
);
1656 gimple_set_no_warning (ret
, TREE_NO_WARNING (stmt
));
1657 gimplify_seq_add_stmt (pre_p
, ret
);
1662 /* Gimplify a variable-length array DECL. */
1665 gimplify_vla_decl (tree decl
, gimple_seq
*seq_p
)
1667 /* This is a variable-sized decl. Simplify its size and mark it
1668 for deferred expansion. */
1669 tree t
, addr
, ptr_type
;
1671 gimplify_one_sizepos (&DECL_SIZE (decl
), seq_p
);
1672 gimplify_one_sizepos (&DECL_SIZE_UNIT (decl
), seq_p
);
1674 /* Don't mess with a DECL_VALUE_EXPR set by the front-end. */
1675 if (DECL_HAS_VALUE_EXPR_P (decl
))
1678 /* All occurrences of this decl in final gimplified code will be
1679 replaced by indirection. Setting DECL_VALUE_EXPR does two
1680 things: First, it lets the rest of the gimplifier know what
1681 replacement to use. Second, it lets the debug info know
1682 where to find the value. */
1683 ptr_type
= build_pointer_type (TREE_TYPE (decl
));
1684 addr
= create_tmp_var (ptr_type
, get_name (decl
));
1685 DECL_IGNORED_P (addr
) = 0;
1686 t
= build_fold_indirect_ref (addr
);
1687 TREE_THIS_NOTRAP (t
) = 1;
1688 SET_DECL_VALUE_EXPR (decl
, t
);
1689 DECL_HAS_VALUE_EXPR_P (decl
) = 1;
1691 t
= build_alloca_call_expr (DECL_SIZE_UNIT (decl
), DECL_ALIGN (decl
),
1692 max_int_size_in_bytes (TREE_TYPE (decl
)));
1693 /* The call has been built for a variable-sized object. */
1694 CALL_ALLOCA_FOR_VAR_P (t
) = 1;
1695 t
= fold_convert (ptr_type
, t
);
1696 t
= build2 (MODIFY_EXPR
, TREE_TYPE (addr
), addr
, t
);
1698 gimplify_and_add (t
, seq_p
);
1701 /* A helper function to be called via walk_tree. Mark all labels under *TP
1702 as being forced. To be called for DECL_INITIAL of static variables. */
1705 force_labels_r (tree
*tp
, int *walk_subtrees
, void *data ATTRIBUTE_UNUSED
)
1709 if (TREE_CODE (*tp
) == LABEL_DECL
)
1711 FORCED_LABEL (*tp
) = 1;
1712 cfun
->has_forced_label_in_static
= 1;
1718 /* Gimplify a DECL_EXPR node *STMT_P by making any necessary allocation
1719 and initialization explicit. */
1721 static enum gimplify_status
1722 gimplify_decl_expr (tree
*stmt_p
, gimple_seq
*seq_p
)
1724 tree stmt
= *stmt_p
;
1725 tree decl
= DECL_EXPR_DECL (stmt
);
1727 *stmt_p
= NULL_TREE
;
1729 if (TREE_TYPE (decl
) == error_mark_node
)
1732 if ((TREE_CODE (decl
) == TYPE_DECL
1734 && !TYPE_SIZES_GIMPLIFIED (TREE_TYPE (decl
)))
1736 gimplify_type_sizes (TREE_TYPE (decl
), seq_p
);
1737 if (TREE_CODE (TREE_TYPE (decl
)) == REFERENCE_TYPE
)
1738 gimplify_type_sizes (TREE_TYPE (TREE_TYPE (decl
)), seq_p
);
1741 /* ??? DECL_ORIGINAL_TYPE is streamed for LTO so it needs to be gimplified
1742 in case its size expressions contain problematic nodes like CALL_EXPR. */
1743 if (TREE_CODE (decl
) == TYPE_DECL
1744 && DECL_ORIGINAL_TYPE (decl
)
1745 && !TYPE_SIZES_GIMPLIFIED (DECL_ORIGINAL_TYPE (decl
)))
1747 gimplify_type_sizes (DECL_ORIGINAL_TYPE (decl
), seq_p
);
1748 if (TREE_CODE (DECL_ORIGINAL_TYPE (decl
)) == REFERENCE_TYPE
)
1749 gimplify_type_sizes (TREE_TYPE (DECL_ORIGINAL_TYPE (decl
)), seq_p
);
1752 if (VAR_P (decl
) && !DECL_EXTERNAL (decl
))
1754 tree init
= DECL_INITIAL (decl
);
1755 bool is_vla
= false;
1757 if (TREE_CODE (DECL_SIZE_UNIT (decl
)) != INTEGER_CST
1758 || (!TREE_STATIC (decl
)
1759 && flag_stack_check
== GENERIC_STACK_CHECK
1760 && compare_tree_int (DECL_SIZE_UNIT (decl
),
1761 STACK_CHECK_MAX_VAR_SIZE
) > 0))
1763 gimplify_vla_decl (decl
, seq_p
);
1767 if (asan_poisoned_variables
1769 && TREE_ADDRESSABLE (decl
)
1770 && !TREE_STATIC (decl
)
1771 && !DECL_HAS_VALUE_EXPR_P (decl
)
1772 && DECL_ALIGN (decl
) <= MAX_SUPPORTED_STACK_ALIGNMENT
1773 && dbg_cnt (asan_use_after_scope
)
1774 && !gimplify_omp_ctxp
)
1776 asan_poisoned_variables
->add (decl
);
1777 asan_poison_variable (decl
, false, seq_p
);
1778 if (!DECL_ARTIFICIAL (decl
) && gimplify_ctxp
->live_switch_vars
)
1779 gimplify_ctxp
->live_switch_vars
->add (decl
);
1782 /* Some front ends do not explicitly declare all anonymous
1783 artificial variables. We compensate here by declaring the
1784 variables, though it would be better if the front ends would
1785 explicitly declare them. */
1786 if (!DECL_SEEN_IN_BIND_EXPR_P (decl
)
1787 && DECL_ARTIFICIAL (decl
) && DECL_NAME (decl
) == NULL_TREE
)
1788 gimple_add_tmp_var (decl
);
1790 if (init
&& init
!= error_mark_node
)
1792 if (!TREE_STATIC (decl
))
1794 DECL_INITIAL (decl
) = NULL_TREE
;
1795 init
= build2 (INIT_EXPR
, void_type_node
, decl
, init
);
1796 gimplify_and_add (init
, seq_p
);
1800 /* We must still examine initializers for static variables
1801 as they may contain a label address. */
1802 walk_tree (&init
, force_labels_r
, NULL
, NULL
);
1809 /* Gimplify a LOOP_EXPR. Normally this just involves gimplifying the body
1810 and replacing the LOOP_EXPR with goto, but if the loop contains an
1811 EXIT_EXPR, we need to append a label for it to jump to. */
1813 static enum gimplify_status
1814 gimplify_loop_expr (tree
*expr_p
, gimple_seq
*pre_p
)
1816 tree saved_label
= gimplify_ctxp
->exit_label
;
1817 tree start_label
= create_artificial_label (UNKNOWN_LOCATION
);
1819 gimplify_seq_add_stmt (pre_p
, gimple_build_label (start_label
));
1821 gimplify_ctxp
->exit_label
= NULL_TREE
;
1823 gimplify_and_add (LOOP_EXPR_BODY (*expr_p
), pre_p
);
1825 gimplify_seq_add_stmt (pre_p
, gimple_build_goto (start_label
));
1827 if (gimplify_ctxp
->exit_label
)
1828 gimplify_seq_add_stmt (pre_p
,
1829 gimple_build_label (gimplify_ctxp
->exit_label
));
1831 gimplify_ctxp
->exit_label
= saved_label
;
1837 /* Gimplify a statement list onto a sequence. These may be created either
1838 by an enlightened front-end, or by shortcut_cond_expr. */
1840 static enum gimplify_status
1841 gimplify_statement_list (tree
*expr_p
, gimple_seq
*pre_p
)
1843 tree temp
= voidify_wrapper_expr (*expr_p
, NULL
);
1845 tree_stmt_iterator i
= tsi_start (*expr_p
);
1847 while (!tsi_end_p (i
))
1849 gimplify_stmt (tsi_stmt_ptr (i
), pre_p
);
1862 /* Callback for walk_gimple_seq. */
1865 warn_switch_unreachable_r (gimple_stmt_iterator
*gsi_p
, bool *handled_ops_p
,
1866 struct walk_stmt_info
*wi
)
1868 gimple
*stmt
= gsi_stmt (*gsi_p
);
1870 *handled_ops_p
= true;
1871 switch (gimple_code (stmt
))
1874 /* A compiler-generated cleanup or a user-written try block.
1875 If it's empty, don't dive into it--that would result in
1876 worse location info. */
1877 if (gimple_try_eval (stmt
) == NULL
)
1880 return integer_zero_node
;
1885 case GIMPLE_EH_FILTER
:
1886 case GIMPLE_TRANSACTION
:
1887 /* Walk the sub-statements. */
1888 *handled_ops_p
= false;
1892 /* Ignore these. We may generate them before declarations that
1893 are never executed. If there's something to warn about,
1894 there will be non-debug stmts too, and we'll catch those. */
1898 if (gimple_call_internal_p (stmt
, IFN_ASAN_MARK
))
1900 *handled_ops_p
= false;
1905 /* Save the first "real" statement (not a decl/lexical scope/...). */
1907 return integer_zero_node
;
1912 /* Possibly warn about unreachable statements between switch's controlling
1913 expression and the first case. SEQ is the body of a switch expression. */
1916 maybe_warn_switch_unreachable (gimple_seq seq
)
1918 if (!warn_switch_unreachable
1919 /* This warning doesn't play well with Fortran when optimizations
1921 || lang_GNU_Fortran ()
1925 struct walk_stmt_info wi
;
1926 memset (&wi
, 0, sizeof (wi
));
1927 walk_gimple_seq (seq
, warn_switch_unreachable_r
, NULL
, &wi
);
1928 gimple
*stmt
= (gimple
*) wi
.info
;
1930 if (stmt
&& gimple_code (stmt
) != GIMPLE_LABEL
)
1932 if (gimple_code (stmt
) == GIMPLE_GOTO
1933 && TREE_CODE (gimple_goto_dest (stmt
)) == LABEL_DECL
1934 && DECL_ARTIFICIAL (gimple_goto_dest (stmt
)))
1935 /* Don't warn for compiler-generated gotos. These occur
1936 in Duff's devices, for example. */;
1938 warning_at (gimple_location (stmt
), OPT_Wswitch_unreachable
,
1939 "statement will never be executed");
1944 /* A label entry that pairs label and a location. */
1951 /* Find LABEL in vector of label entries VEC. */
1953 static struct label_entry
*
1954 find_label_entry (const auto_vec
<struct label_entry
> *vec
, tree label
)
1957 struct label_entry
*l
;
1959 FOR_EACH_VEC_ELT (*vec
, i
, l
)
1960 if (l
->label
== label
)
1965 /* Return true if LABEL, a LABEL_DECL, represents a case label
1966 in a vector of labels CASES. */
1969 case_label_p (const vec
<tree
> *cases
, tree label
)
1974 FOR_EACH_VEC_ELT (*cases
, i
, l
)
1975 if (CASE_LABEL (l
) == label
)
1980 /* Find the last nondebug statement in a scope STMT. */
1983 last_stmt_in_scope (gimple
*stmt
)
1988 switch (gimple_code (stmt
))
1992 gbind
*bind
= as_a
<gbind
*> (stmt
);
1993 stmt
= gimple_seq_last_nondebug_stmt (gimple_bind_body (bind
));
1994 return last_stmt_in_scope (stmt
);
1999 gtry
*try_stmt
= as_a
<gtry
*> (stmt
);
2000 stmt
= gimple_seq_last_nondebug_stmt (gimple_try_eval (try_stmt
));
2001 gimple
*last_eval
= last_stmt_in_scope (stmt
);
2002 if (gimple_stmt_may_fallthru (last_eval
)
2003 && (last_eval
== NULL
2004 || !gimple_call_internal_p (last_eval
, IFN_FALLTHROUGH
))
2005 && gimple_try_kind (try_stmt
) == GIMPLE_TRY_FINALLY
)
2007 stmt
= gimple_seq_last_nondebug_stmt (gimple_try_cleanup (try_stmt
));
2008 return last_stmt_in_scope (stmt
);
2022 /* Collect interesting labels in LABELS and return the statement preceding
2023 another case label, or a user-defined label. Store a location useful
2024 to give warnings at *PREVLOC (usually the location of the returned
2025 statement or of its surrounding scope). */
2028 collect_fallthrough_labels (gimple_stmt_iterator
*gsi_p
,
2029 auto_vec
<struct label_entry
> *labels
,
2030 location_t
*prevloc
)
2032 gimple
*prev
= NULL
;
2034 *prevloc
= UNKNOWN_LOCATION
;
2037 if (gimple_code (gsi_stmt (*gsi_p
)) == GIMPLE_BIND
)
2039 /* Recognize the special GIMPLE_BIND added by gimplify_switch_expr,
2040 which starts on a GIMPLE_SWITCH and ends with a break label.
2041 Handle that as a single statement that can fall through. */
2042 gbind
*bind
= as_a
<gbind
*> (gsi_stmt (*gsi_p
));
2043 gimple
*first
= gimple_seq_first_stmt (gimple_bind_body (bind
));
2044 gimple
*last
= gimple_seq_last_stmt (gimple_bind_body (bind
));
2046 && gimple_code (first
) == GIMPLE_SWITCH
2047 && gimple_code (last
) == GIMPLE_LABEL
)
2049 tree label
= gimple_label_label (as_a
<glabel
*> (last
));
2050 if (SWITCH_BREAK_LABEL_P (label
))
2058 if (gimple_code (gsi_stmt (*gsi_p
)) == GIMPLE_BIND
2059 || gimple_code (gsi_stmt (*gsi_p
)) == GIMPLE_TRY
)
2061 /* Nested scope. Only look at the last statement of
2062 the innermost scope. */
2063 location_t bind_loc
= gimple_location (gsi_stmt (*gsi_p
));
2064 gimple
*last
= last_stmt_in_scope (gsi_stmt (*gsi_p
));
2068 /* It might be a label without a location. Use the
2069 location of the scope then. */
2070 if (!gimple_has_location (prev
))
2071 *prevloc
= bind_loc
;
2077 /* Ifs are tricky. */
2078 if (gimple_code (gsi_stmt (*gsi_p
)) == GIMPLE_COND
)
2080 gcond
*cond_stmt
= as_a
<gcond
*> (gsi_stmt (*gsi_p
));
2081 tree false_lab
= gimple_cond_false_label (cond_stmt
);
2082 location_t if_loc
= gimple_location (cond_stmt
);
2085 if (i > 1) goto <D.2259>; else goto D;
2086 we can't do much with the else-branch. */
2087 if (!DECL_ARTIFICIAL (false_lab
))
2090 /* Go on until the false label, then one step back. */
2091 for (; !gsi_end_p (*gsi_p
); gsi_next (gsi_p
))
2093 gimple
*stmt
= gsi_stmt (*gsi_p
);
2094 if (gimple_code (stmt
) == GIMPLE_LABEL
2095 && gimple_label_label (as_a
<glabel
*> (stmt
)) == false_lab
)
2099 /* Not found? Oops. */
2100 if (gsi_end_p (*gsi_p
))
2103 struct label_entry l
= { false_lab
, if_loc
};
2104 labels
->safe_push (l
);
2106 /* Go to the last statement of the then branch. */
2109 /* if (i != 0) goto <D.1759>; else goto <D.1760>;
2115 if (gimple_code (gsi_stmt (*gsi_p
)) == GIMPLE_GOTO
2116 && !gimple_has_location (gsi_stmt (*gsi_p
)))
2118 /* Look at the statement before, it might be
2119 attribute fallthrough, in which case don't warn. */
2121 bool fallthru_before_dest
2122 = gimple_call_internal_p (gsi_stmt (*gsi_p
), IFN_FALLTHROUGH
);
2124 tree goto_dest
= gimple_goto_dest (gsi_stmt (*gsi_p
));
2125 if (!fallthru_before_dest
)
2127 struct label_entry l
= { goto_dest
, if_loc
};
2128 labels
->safe_push (l
);
2131 /* And move back. */
2135 /* Remember the last statement. Skip labels that are of no interest
2137 if (gimple_code (gsi_stmt (*gsi_p
)) == GIMPLE_LABEL
)
2139 tree label
= gimple_label_label (as_a
<glabel
*> (gsi_stmt (*gsi_p
)));
2140 if (find_label_entry (labels
, label
))
2141 prev
= gsi_stmt (*gsi_p
);
2143 else if (gimple_call_internal_p (gsi_stmt (*gsi_p
), IFN_ASAN_MARK
))
2145 else if (gimple_code (gsi_stmt (*gsi_p
)) == GIMPLE_PREDICT
)
2147 else if (!is_gimple_debug (gsi_stmt (*gsi_p
)))
2148 prev
= gsi_stmt (*gsi_p
);
2151 while (!gsi_end_p (*gsi_p
)
2152 /* Stop if we find a case or a user-defined label. */
2153 && (gimple_code (gsi_stmt (*gsi_p
)) != GIMPLE_LABEL
2154 || !gimple_has_location (gsi_stmt (*gsi_p
))));
2156 if (prev
&& gimple_has_location (prev
))
2157 *prevloc
= gimple_location (prev
);
2161 /* Return true if the switch fallthough warning should occur. LABEL is
2162 the label statement that we're falling through to. */
2165 should_warn_for_implicit_fallthrough (gimple_stmt_iterator
*gsi_p
, tree label
)
2167 gimple_stmt_iterator gsi
= *gsi_p
;
2169 /* Don't warn if the label is marked with a "falls through" comment. */
2170 if (FALLTHROUGH_LABEL_P (label
))
2173 /* Don't warn for non-case labels followed by a statement:
2178 as these are likely intentional. */
2179 if (!case_label_p (&gimplify_ctxp
->case_labels
, label
))
2182 while (!gsi_end_p (gsi
)
2183 && gimple_code (gsi_stmt (gsi
)) == GIMPLE_LABEL
2184 && (l
= gimple_label_label (as_a
<glabel
*> (gsi_stmt (gsi
))))
2185 && !case_label_p (&gimplify_ctxp
->case_labels
, l
))
2186 gsi_next_nondebug (&gsi
);
2187 if (gsi_end_p (gsi
) || gimple_code (gsi_stmt (gsi
)) != GIMPLE_LABEL
)
2191 /* Don't warn for terminated branches, i.e. when the subsequent case labels
2192 immediately breaks. */
2195 /* Skip all immediately following labels. */
2196 while (!gsi_end_p (gsi
)
2197 && (gimple_code (gsi_stmt (gsi
)) == GIMPLE_LABEL
2198 || gimple_code (gsi_stmt (gsi
)) == GIMPLE_PREDICT
))
2199 gsi_next_nondebug (&gsi
);
2201 /* { ... something; default:; } */
2203 /* { ... something; default: break; } or
2204 { ... something; default: goto L; } */
2205 || gimple_code (gsi_stmt (gsi
)) == GIMPLE_GOTO
2206 /* { ... something; default: return; } */
2207 || gimple_code (gsi_stmt (gsi
)) == GIMPLE_RETURN
)
2213 /* Callback for walk_gimple_seq. */
2216 warn_implicit_fallthrough_r (gimple_stmt_iterator
*gsi_p
, bool *handled_ops_p
,
2217 struct walk_stmt_info
*)
2219 gimple
*stmt
= gsi_stmt (*gsi_p
);
2221 *handled_ops_p
= true;
2222 switch (gimple_code (stmt
))
2227 case GIMPLE_EH_FILTER
:
2228 case GIMPLE_TRANSACTION
:
2229 /* Walk the sub-statements. */
2230 *handled_ops_p
= false;
2233 /* Find a sequence of form:
2240 and possibly warn. */
2243 /* Found a label. Skip all immediately following labels. */
2244 while (!gsi_end_p (*gsi_p
)
2245 && gimple_code (gsi_stmt (*gsi_p
)) == GIMPLE_LABEL
)
2246 gsi_next_nondebug (gsi_p
);
2248 /* There might be no more statements. */
2249 if (gsi_end_p (*gsi_p
))
2250 return integer_zero_node
;
2252 /* Vector of labels that fall through. */
2253 auto_vec
<struct label_entry
> labels
;
2255 gimple
*prev
= collect_fallthrough_labels (gsi_p
, &labels
, &prevloc
);
2257 /* There might be no more statements. */
2258 if (gsi_end_p (*gsi_p
))
2259 return integer_zero_node
;
2261 gimple
*next
= gsi_stmt (*gsi_p
);
2263 /* If what follows is a label, then we may have a fallthrough. */
2264 if (gimple_code (next
) == GIMPLE_LABEL
2265 && gimple_has_location (next
)
2266 && (label
= gimple_label_label (as_a
<glabel
*> (next
)))
2269 struct label_entry
*l
;
2270 bool warned_p
= false;
2271 auto_diagnostic_group d
;
2272 if (!should_warn_for_implicit_fallthrough (gsi_p
, label
))
2274 else if (gimple_code (prev
) == GIMPLE_LABEL
2275 && (label
= gimple_label_label (as_a
<glabel
*> (prev
)))
2276 && (l
= find_label_entry (&labels
, label
)))
2277 warned_p
= warning_at (l
->loc
, OPT_Wimplicit_fallthrough_
,
2278 "this statement may fall through");
2279 else if (!gimple_call_internal_p (prev
, IFN_FALLTHROUGH
)
2280 /* Try to be clever and don't warn when the statement
2281 can't actually fall through. */
2282 && gimple_stmt_may_fallthru (prev
)
2283 && prevloc
!= UNKNOWN_LOCATION
)
2284 warned_p
= warning_at (prevloc
,
2285 OPT_Wimplicit_fallthrough_
,
2286 "this statement may fall through");
2288 inform (gimple_location (next
), "here");
2290 /* Mark this label as processed so as to prevent multiple
2291 warnings in nested switches. */
2292 FALLTHROUGH_LABEL_P (label
) = true;
2294 /* So that next warn_implicit_fallthrough_r will start looking for
2295 a new sequence starting with this label. */
2306 /* Warn when a switch case falls through. */
2309 maybe_warn_implicit_fallthrough (gimple_seq seq
)
2311 if (!warn_implicit_fallthrough
)
2314 /* This warning is meant for C/C++/ObjC/ObjC++ only. */
2317 || lang_GNU_OBJC ()))
2320 struct walk_stmt_info wi
;
2321 memset (&wi
, 0, sizeof (wi
));
2322 walk_gimple_seq (seq
, warn_implicit_fallthrough_r
, NULL
, &wi
);
2325 /* Callback for walk_gimple_seq. */
2328 expand_FALLTHROUGH_r (gimple_stmt_iterator
*gsi_p
, bool *handled_ops_p
,
2329 struct walk_stmt_info
*wi
)
2331 gimple
*stmt
= gsi_stmt (*gsi_p
);
2333 *handled_ops_p
= true;
2334 switch (gimple_code (stmt
))
2339 case GIMPLE_EH_FILTER
:
2340 case GIMPLE_TRANSACTION
:
2341 /* Walk the sub-statements. */
2342 *handled_ops_p
= false;
2345 if (gimple_call_internal_p (stmt
, IFN_FALLTHROUGH
))
2347 gsi_remove (gsi_p
, true);
2348 if (gsi_end_p (*gsi_p
))
2350 *static_cast<location_t
*>(wi
->info
) = gimple_location (stmt
);
2351 return integer_zero_node
;
2355 location_t loc
= gimple_location (stmt
);
2357 gimple_stmt_iterator gsi2
= *gsi_p
;
2358 stmt
= gsi_stmt (gsi2
);
2359 if (gimple_code (stmt
) == GIMPLE_GOTO
&& !gimple_has_location (stmt
))
2361 /* Go on until the artificial label. */
2362 tree goto_dest
= gimple_goto_dest (stmt
);
2363 for (; !gsi_end_p (gsi2
); gsi_next (&gsi2
))
2365 if (gimple_code (gsi_stmt (gsi2
)) == GIMPLE_LABEL
2366 && gimple_label_label (as_a
<glabel
*> (gsi_stmt (gsi2
)))
2371 /* Not found? Stop. */
2372 if (gsi_end_p (gsi2
))
2375 /* Look one past it. */
2379 /* We're looking for a case label or default label here. */
2380 while (!gsi_end_p (gsi2
))
2382 stmt
= gsi_stmt (gsi2
);
2383 if (gimple_code (stmt
) == GIMPLE_LABEL
)
2385 tree label
= gimple_label_label (as_a
<glabel
*> (stmt
));
2386 if (gimple_has_location (stmt
) && DECL_ARTIFICIAL (label
))
2392 else if (gimple_call_internal_p (stmt
, IFN_ASAN_MARK
))
2394 else if (!is_gimple_debug (stmt
))
2395 /* Anything else is not expected. */
2400 warning_at (loc
, 0, "attribute %<fallthrough%> not preceding "
2401 "a case label or default label");
2410 /* Expand all FALLTHROUGH () calls in SEQ. */
2413 expand_FALLTHROUGH (gimple_seq
*seq_p
)
2415 struct walk_stmt_info wi
;
2417 memset (&wi
, 0, sizeof (wi
));
2418 wi
.info
= (void *) &loc
;
2419 walk_gimple_seq_mod (seq_p
, expand_FALLTHROUGH_r
, NULL
, &wi
);
2420 if (wi
.callback_result
== integer_zero_node
)
2421 /* We've found [[fallthrough]]; at the end of a switch, which the C++
2422 standard says is ill-formed; see [dcl.attr.fallthrough]. */
2423 warning_at (loc
, 0, "attribute %<fallthrough%> not preceding "
2424 "a case label or default label");
2428 /* Gimplify a SWITCH_EXPR, and collect the vector of labels it can
2431 static enum gimplify_status
2432 gimplify_switch_expr (tree
*expr_p
, gimple_seq
*pre_p
)
2434 tree switch_expr
= *expr_p
;
2435 gimple_seq switch_body_seq
= NULL
;
2436 enum gimplify_status ret
;
2437 tree index_type
= TREE_TYPE (switch_expr
);
2438 if (index_type
== NULL_TREE
)
2439 index_type
= TREE_TYPE (SWITCH_COND (switch_expr
));
2441 ret
= gimplify_expr (&SWITCH_COND (switch_expr
), pre_p
, NULL
, is_gimple_val
,
2443 if (ret
== GS_ERROR
|| ret
== GS_UNHANDLED
)
2446 if (SWITCH_BODY (switch_expr
))
2449 vec
<tree
> saved_labels
;
2450 hash_set
<tree
> *saved_live_switch_vars
= NULL
;
2451 tree default_case
= NULL_TREE
;
2452 gswitch
*switch_stmt
;
2454 /* Save old labels, get new ones from body, then restore the old
2455 labels. Save all the things from the switch body to append after. */
2456 saved_labels
= gimplify_ctxp
->case_labels
;
2457 gimplify_ctxp
->case_labels
.create (8);
2459 /* Do not create live_switch_vars if SWITCH_BODY is not a BIND_EXPR. */
2460 saved_live_switch_vars
= gimplify_ctxp
->live_switch_vars
;
2461 tree_code body_type
= TREE_CODE (SWITCH_BODY (switch_expr
));
2462 if (body_type
== BIND_EXPR
|| body_type
== STATEMENT_LIST
)
2463 gimplify_ctxp
->live_switch_vars
= new hash_set
<tree
> (4);
2465 gimplify_ctxp
->live_switch_vars
= NULL
;
2467 bool old_in_switch_expr
= gimplify_ctxp
->in_switch_expr
;
2468 gimplify_ctxp
->in_switch_expr
= true;
2470 gimplify_stmt (&SWITCH_BODY (switch_expr
), &switch_body_seq
);
2472 gimplify_ctxp
->in_switch_expr
= old_in_switch_expr
;
2473 maybe_warn_switch_unreachable (switch_body_seq
);
2474 maybe_warn_implicit_fallthrough (switch_body_seq
);
2475 /* Only do this for the outermost GIMPLE_SWITCH. */
2476 if (!gimplify_ctxp
->in_switch_expr
)
2477 expand_FALLTHROUGH (&switch_body_seq
);
2479 labels
= gimplify_ctxp
->case_labels
;
2480 gimplify_ctxp
->case_labels
= saved_labels
;
2482 if (gimplify_ctxp
->live_switch_vars
)
2484 gcc_assert (gimplify_ctxp
->live_switch_vars
->is_empty ());
2485 delete gimplify_ctxp
->live_switch_vars
;
2487 gimplify_ctxp
->live_switch_vars
= saved_live_switch_vars
;
2489 preprocess_case_label_vec_for_gimple (labels
, index_type
,
2492 bool add_bind
= false;
2495 glabel
*new_default
;
2498 = build_case_label (NULL_TREE
, NULL_TREE
,
2499 create_artificial_label (UNKNOWN_LOCATION
));
2500 if (old_in_switch_expr
)
2502 SWITCH_BREAK_LABEL_P (CASE_LABEL (default_case
)) = 1;
2505 new_default
= gimple_build_label (CASE_LABEL (default_case
));
2506 gimplify_seq_add_stmt (&switch_body_seq
, new_default
);
2508 else if (old_in_switch_expr
)
2510 gimple
*last
= gimple_seq_last_stmt (switch_body_seq
);
2511 if (last
&& gimple_code (last
) == GIMPLE_LABEL
)
2513 tree label
= gimple_label_label (as_a
<glabel
*> (last
));
2514 if (SWITCH_BREAK_LABEL_P (label
))
2519 switch_stmt
= gimple_build_switch (SWITCH_COND (switch_expr
),
2520 default_case
, labels
);
2521 /* For the benefit of -Wimplicit-fallthrough, if switch_body_seq
2522 ends with a GIMPLE_LABEL holding SWITCH_BREAK_LABEL_P LABEL_DECL,
2523 wrap the GIMPLE_SWITCH up to that GIMPLE_LABEL into a GIMPLE_BIND,
2524 so that we can easily find the start and end of the switch
2528 gimple_seq bind_body
= NULL
;
2529 gimplify_seq_add_stmt (&bind_body
, switch_stmt
);
2530 gimple_seq_add_seq (&bind_body
, switch_body_seq
);
2531 gbind
*bind
= gimple_build_bind (NULL_TREE
, bind_body
, NULL_TREE
);
2532 gimple_set_location (bind
, EXPR_LOCATION (switch_expr
));
2533 gimplify_seq_add_stmt (pre_p
, bind
);
2537 gimplify_seq_add_stmt (pre_p
, switch_stmt
);
2538 gimplify_seq_add_seq (pre_p
, switch_body_seq
);
2548 /* Gimplify the LABEL_EXPR pointed to by EXPR_P. */
2550 static enum gimplify_status
2551 gimplify_label_expr (tree
*expr_p
, gimple_seq
*pre_p
)
2553 gcc_assert (decl_function_context (LABEL_EXPR_LABEL (*expr_p
))
2554 == current_function_decl
);
2556 tree label
= LABEL_EXPR_LABEL (*expr_p
);
2557 glabel
*label_stmt
= gimple_build_label (label
);
2558 gimple_set_location (label_stmt
, EXPR_LOCATION (*expr_p
));
2559 gimplify_seq_add_stmt (pre_p
, label_stmt
);
2561 if (lookup_attribute ("cold", DECL_ATTRIBUTES (label
)))
2562 gimple_seq_add_stmt (pre_p
, gimple_build_predict (PRED_COLD_LABEL
,
2564 else if (lookup_attribute ("hot", DECL_ATTRIBUTES (label
)))
2565 gimple_seq_add_stmt (pre_p
, gimple_build_predict (PRED_HOT_LABEL
,
2571 /* Gimplify the CASE_LABEL_EXPR pointed to by EXPR_P. */
2573 static enum gimplify_status
2574 gimplify_case_label_expr (tree
*expr_p
, gimple_seq
*pre_p
)
2576 struct gimplify_ctx
*ctxp
;
2579 /* Invalid programs can play Duff's Device type games with, for example,
2580 #pragma omp parallel. At least in the C front end, we don't
2581 detect such invalid branches until after gimplification, in the
2582 diagnose_omp_blocks pass. */
2583 for (ctxp
= gimplify_ctxp
; ; ctxp
= ctxp
->prev_context
)
2584 if (ctxp
->case_labels
.exists ())
2587 tree label
= CASE_LABEL (*expr_p
);
2588 label_stmt
= gimple_build_label (label
);
2589 gimple_set_location (label_stmt
, EXPR_LOCATION (*expr_p
));
2590 ctxp
->case_labels
.safe_push (*expr_p
);
2591 gimplify_seq_add_stmt (pre_p
, label_stmt
);
2593 if (lookup_attribute ("cold", DECL_ATTRIBUTES (label
)))
2594 gimple_seq_add_stmt (pre_p
, gimple_build_predict (PRED_COLD_LABEL
,
2596 else if (lookup_attribute ("hot", DECL_ATTRIBUTES (label
)))
2597 gimple_seq_add_stmt (pre_p
, gimple_build_predict (PRED_HOT_LABEL
,
2603 /* Build a GOTO to the LABEL_DECL pointed to by LABEL_P, building it first
2607 build_and_jump (tree
*label_p
)
2609 if (label_p
== NULL
)
2610 /* If there's nowhere to jump, just fall through. */
2613 if (*label_p
== NULL_TREE
)
2615 tree label
= create_artificial_label (UNKNOWN_LOCATION
);
2619 return build1 (GOTO_EXPR
, void_type_node
, *label_p
);
2622 /* Gimplify an EXIT_EXPR by converting to a GOTO_EXPR inside a COND_EXPR.
2623 This also involves building a label to jump to and communicating it to
2624 gimplify_loop_expr through gimplify_ctxp->exit_label. */
2626 static enum gimplify_status
2627 gimplify_exit_expr (tree
*expr_p
)
2629 tree cond
= TREE_OPERAND (*expr_p
, 0);
2632 expr
= build_and_jump (&gimplify_ctxp
->exit_label
);
2633 expr
= build3 (COND_EXPR
, void_type_node
, cond
, expr
, NULL_TREE
);
2639 /* *EXPR_P is a COMPONENT_REF being used as an rvalue. If its type is
2640 different from its canonical type, wrap the whole thing inside a
2641 NOP_EXPR and force the type of the COMPONENT_REF to be the canonical
2644 The canonical type of a COMPONENT_REF is the type of the field being
2645 referenced--unless the field is a bit-field which can be read directly
2646 in a smaller mode, in which case the canonical type is the
2647 sign-appropriate type corresponding to that mode. */
2650 canonicalize_component_ref (tree
*expr_p
)
2652 tree expr
= *expr_p
;
2655 gcc_assert (TREE_CODE (expr
) == COMPONENT_REF
);
2657 if (INTEGRAL_TYPE_P (TREE_TYPE (expr
)))
2658 type
= TREE_TYPE (get_unwidened (expr
, NULL_TREE
));
2660 type
= TREE_TYPE (TREE_OPERAND (expr
, 1));
2662 /* One could argue that all the stuff below is not necessary for
2663 the non-bitfield case and declare it a FE error if type
2664 adjustment would be needed. */
2665 if (TREE_TYPE (expr
) != type
)
2667 #ifdef ENABLE_TYPES_CHECKING
2668 tree old_type
= TREE_TYPE (expr
);
2672 /* We need to preserve qualifiers and propagate them from
2674 type_quals
= TYPE_QUALS (type
)
2675 | TYPE_QUALS (TREE_TYPE (TREE_OPERAND (expr
, 0)));
2676 if (TYPE_QUALS (type
) != type_quals
)
2677 type
= build_qualified_type (TYPE_MAIN_VARIANT (type
), type_quals
);
2679 /* Set the type of the COMPONENT_REF to the underlying type. */
2680 TREE_TYPE (expr
) = type
;
2682 #ifdef ENABLE_TYPES_CHECKING
2683 /* It is now a FE error, if the conversion from the canonical
2684 type to the original expression type is not useless. */
2685 gcc_assert (useless_type_conversion_p (old_type
, type
));
2690 /* If a NOP conversion is changing a pointer to array of foo to a pointer
2691 to foo, embed that change in the ADDR_EXPR by converting
2696 where L is the lower bound. For simplicity, only do this for constant
2698 The constraint is that the type of &array[L] is trivially convertible
2702 canonicalize_addr_expr (tree
*expr_p
)
2704 tree expr
= *expr_p
;
2705 tree addr_expr
= TREE_OPERAND (expr
, 0);
2706 tree datype
, ddatype
, pddatype
;
2708 /* We simplify only conversions from an ADDR_EXPR to a pointer type. */
2709 if (!POINTER_TYPE_P (TREE_TYPE (expr
))
2710 || TREE_CODE (addr_expr
) != ADDR_EXPR
)
2713 /* The addr_expr type should be a pointer to an array. */
2714 datype
= TREE_TYPE (TREE_TYPE (addr_expr
));
2715 if (TREE_CODE (datype
) != ARRAY_TYPE
)
2718 /* The pointer to element type shall be trivially convertible to
2719 the expression pointer type. */
2720 ddatype
= TREE_TYPE (datype
);
2721 pddatype
= build_pointer_type (ddatype
);
2722 if (!useless_type_conversion_p (TYPE_MAIN_VARIANT (TREE_TYPE (expr
)),
2726 /* The lower bound and element sizes must be constant. */
2727 if (!TYPE_SIZE_UNIT (ddatype
)
2728 || TREE_CODE (TYPE_SIZE_UNIT (ddatype
)) != INTEGER_CST
2729 || !TYPE_DOMAIN (datype
) || !TYPE_MIN_VALUE (TYPE_DOMAIN (datype
))
2730 || TREE_CODE (TYPE_MIN_VALUE (TYPE_DOMAIN (datype
))) != INTEGER_CST
)
2733 /* All checks succeeded. Build a new node to merge the cast. */
2734 *expr_p
= build4 (ARRAY_REF
, ddatype
, TREE_OPERAND (addr_expr
, 0),
2735 TYPE_MIN_VALUE (TYPE_DOMAIN (datype
)),
2736 NULL_TREE
, NULL_TREE
);
2737 *expr_p
= build1 (ADDR_EXPR
, pddatype
, *expr_p
);
2739 /* We can have stripped a required restrict qualifier above. */
2740 if (!useless_type_conversion_p (TREE_TYPE (expr
), TREE_TYPE (*expr_p
)))
2741 *expr_p
= fold_convert (TREE_TYPE (expr
), *expr_p
);
2744 /* *EXPR_P is a NOP_EXPR or CONVERT_EXPR. Remove it and/or other conversions
2745 underneath as appropriate. */
2747 static enum gimplify_status
2748 gimplify_conversion (tree
*expr_p
)
2750 location_t loc
= EXPR_LOCATION (*expr_p
);
2751 gcc_assert (CONVERT_EXPR_P (*expr_p
));
2753 /* Then strip away all but the outermost conversion. */
2754 STRIP_SIGN_NOPS (TREE_OPERAND (*expr_p
, 0));
2756 /* And remove the outermost conversion if it's useless. */
2757 if (tree_ssa_useless_type_conversion (*expr_p
))
2758 *expr_p
= TREE_OPERAND (*expr_p
, 0);
2760 /* If we still have a conversion at the toplevel,
2761 then canonicalize some constructs. */
2762 if (CONVERT_EXPR_P (*expr_p
))
2764 tree sub
= TREE_OPERAND (*expr_p
, 0);
2766 /* If a NOP conversion is changing the type of a COMPONENT_REF
2767 expression, then canonicalize its type now in order to expose more
2768 redundant conversions. */
2769 if (TREE_CODE (sub
) == COMPONENT_REF
)
2770 canonicalize_component_ref (&TREE_OPERAND (*expr_p
, 0));
2772 /* If a NOP conversion is changing a pointer to array of foo
2773 to a pointer to foo, embed that change in the ADDR_EXPR. */
2774 else if (TREE_CODE (sub
) == ADDR_EXPR
)
2775 canonicalize_addr_expr (expr_p
);
2778 /* If we have a conversion to a non-register type force the
2779 use of a VIEW_CONVERT_EXPR instead. */
2780 if (CONVERT_EXPR_P (*expr_p
) && !is_gimple_reg_type (TREE_TYPE (*expr_p
)))
2781 *expr_p
= fold_build1_loc (loc
, VIEW_CONVERT_EXPR
, TREE_TYPE (*expr_p
),
2782 TREE_OPERAND (*expr_p
, 0));
2784 /* Canonicalize CONVERT_EXPR to NOP_EXPR. */
2785 if (TREE_CODE (*expr_p
) == CONVERT_EXPR
)
2786 TREE_SET_CODE (*expr_p
, NOP_EXPR
);
2791 /* Gimplify a VAR_DECL or PARM_DECL. Return GS_OK if we expanded a
2792 DECL_VALUE_EXPR, and it's worth re-examining things. */
2794 static enum gimplify_status
2795 gimplify_var_or_parm_decl (tree
*expr_p
)
2797 tree decl
= *expr_p
;
2799 /* ??? If this is a local variable, and it has not been seen in any
2800 outer BIND_EXPR, then it's probably the result of a duplicate
2801 declaration, for which we've already issued an error. It would
2802 be really nice if the front end wouldn't leak these at all.
2803 Currently the only known culprit is C++ destructors, as seen
2804 in g++.old-deja/g++.jason/binding.C. */
2806 && !DECL_SEEN_IN_BIND_EXPR_P (decl
)
2807 && !TREE_STATIC (decl
) && !DECL_EXTERNAL (decl
)
2808 && decl_function_context (decl
) == current_function_decl
)
2810 gcc_assert (seen_error ());
2814 /* When within an OMP context, notice uses of variables. */
2815 if (gimplify_omp_ctxp
&& omp_notice_variable (gimplify_omp_ctxp
, decl
, true))
2818 /* If the decl is an alias for another expression, substitute it now. */
2819 if (DECL_HAS_VALUE_EXPR_P (decl
))
2821 *expr_p
= unshare_expr (DECL_VALUE_EXPR (decl
));
2828 /* Recalculate the value of the TREE_SIDE_EFFECTS flag for T. */
2831 recalculate_side_effects (tree t
)
2833 enum tree_code code
= TREE_CODE (t
);
2834 int len
= TREE_OPERAND_LENGTH (t
);
2837 switch (TREE_CODE_CLASS (code
))
2839 case tcc_expression
:
2845 case PREDECREMENT_EXPR
:
2846 case PREINCREMENT_EXPR
:
2847 case POSTDECREMENT_EXPR
:
2848 case POSTINCREMENT_EXPR
:
2849 /* All of these have side-effects, no matter what their
2858 case tcc_comparison
: /* a comparison expression */
2859 case tcc_unary
: /* a unary arithmetic expression */
2860 case tcc_binary
: /* a binary arithmetic expression */
2861 case tcc_reference
: /* a reference */
2862 case tcc_vl_exp
: /* a function call */
2863 TREE_SIDE_EFFECTS (t
) = TREE_THIS_VOLATILE (t
);
2864 for (i
= 0; i
< len
; ++i
)
2866 tree op
= TREE_OPERAND (t
, i
);
2867 if (op
&& TREE_SIDE_EFFECTS (op
))
2868 TREE_SIDE_EFFECTS (t
) = 1;
2873 /* No side-effects. */
2881 /* Gimplify the COMPONENT_REF, ARRAY_REF, REALPART_EXPR or IMAGPART_EXPR
2885 : min_lval '[' val ']'
2887 | compound_lval '[' val ']'
2888 | compound_lval '.' ID
2890 This is not part of the original SIMPLE definition, which separates
2891 array and member references, but it seems reasonable to handle them
2892 together. Also, this way we don't run into problems with union
2893 aliasing; gcc requires that for accesses through a union to alias, the
2894 union reference must be explicit, which was not always the case when we
2895 were splitting up array and member refs.
2897 PRE_P points to the sequence where side effects that must happen before
2898 *EXPR_P should be stored.
2900 POST_P points to the sequence where side effects that must happen after
2901 *EXPR_P should be stored. */
2903 static enum gimplify_status
2904 gimplify_compound_lval (tree
*expr_p
, gimple_seq
*pre_p
, gimple_seq
*post_p
,
2905 fallback_t fallback
)
2908 enum gimplify_status ret
= GS_ALL_DONE
, tret
;
2910 location_t loc
= EXPR_LOCATION (*expr_p
);
2911 tree expr
= *expr_p
;
2913 /* Create a stack of the subexpressions so later we can walk them in
2914 order from inner to outer. */
2915 auto_vec
<tree
, 10> expr_stack
;
2917 /* We can handle anything that get_inner_reference can deal with. */
2918 for (p
= expr_p
; ; p
= &TREE_OPERAND (*p
, 0))
2921 /* Fold INDIRECT_REFs now to turn them into ARRAY_REFs. */
2922 if (TREE_CODE (*p
) == INDIRECT_REF
)
2923 *p
= fold_indirect_ref_loc (loc
, *p
);
2925 if (handled_component_p (*p
))
2927 /* Expand DECL_VALUE_EXPR now. In some cases that may expose
2928 additional COMPONENT_REFs. */
2929 else if ((VAR_P (*p
) || TREE_CODE (*p
) == PARM_DECL
)
2930 && gimplify_var_or_parm_decl (p
) == GS_OK
)
2935 expr_stack
.safe_push (*p
);
2938 gcc_assert (expr_stack
.length ());
2940 /* Now EXPR_STACK is a stack of pointers to all the refs we've
2941 walked through and P points to the innermost expression.
2943 Java requires that we elaborated nodes in source order. That
2944 means we must gimplify the inner expression followed by each of
2945 the indices, in order. But we can't gimplify the inner
2946 expression until we deal with any variable bounds, sizes, or
2947 positions in order to deal with PLACEHOLDER_EXPRs.
2949 So we do this in three steps. First we deal with the annotations
2950 for any variables in the components, then we gimplify the base,
2951 then we gimplify any indices, from left to right. */
2952 for (i
= expr_stack
.length () - 1; i
>= 0; i
--)
2954 tree t
= expr_stack
[i
];
2956 if (TREE_CODE (t
) == ARRAY_REF
|| TREE_CODE (t
) == ARRAY_RANGE_REF
)
2958 /* Gimplify the low bound and element type size and put them into
2959 the ARRAY_REF. If these values are set, they have already been
2961 if (TREE_OPERAND (t
, 2) == NULL_TREE
)
2963 tree low
= unshare_expr (array_ref_low_bound (t
));
2964 if (!is_gimple_min_invariant (low
))
2966 TREE_OPERAND (t
, 2) = low
;
2967 tret
= gimplify_expr (&TREE_OPERAND (t
, 2), pre_p
,
2968 post_p
, is_gimple_reg
,
2970 ret
= MIN (ret
, tret
);
2975 tret
= gimplify_expr (&TREE_OPERAND (t
, 2), pre_p
, post_p
,
2976 is_gimple_reg
, fb_rvalue
);
2977 ret
= MIN (ret
, tret
);
2980 if (TREE_OPERAND (t
, 3) == NULL_TREE
)
2982 tree elmt_type
= TREE_TYPE (TREE_TYPE (TREE_OPERAND (t
, 0)));
2983 tree elmt_size
= unshare_expr (array_ref_element_size (t
));
2984 tree factor
= size_int (TYPE_ALIGN_UNIT (elmt_type
));
2986 /* Divide the element size by the alignment of the element
2989 = size_binop_loc (loc
, EXACT_DIV_EXPR
, elmt_size
, factor
);
2991 if (!is_gimple_min_invariant (elmt_size
))
2993 TREE_OPERAND (t
, 3) = elmt_size
;
2994 tret
= gimplify_expr (&TREE_OPERAND (t
, 3), pre_p
,
2995 post_p
, is_gimple_reg
,
2997 ret
= MIN (ret
, tret
);
3002 tret
= gimplify_expr (&TREE_OPERAND (t
, 3), pre_p
, post_p
,
3003 is_gimple_reg
, fb_rvalue
);
3004 ret
= MIN (ret
, tret
);
3007 else if (TREE_CODE (t
) == COMPONENT_REF
)
3009 /* Set the field offset into T and gimplify it. */
3010 if (TREE_OPERAND (t
, 2) == NULL_TREE
)
3012 tree offset
= unshare_expr (component_ref_field_offset (t
));
3013 tree field
= TREE_OPERAND (t
, 1);
3015 = size_int (DECL_OFFSET_ALIGN (field
) / BITS_PER_UNIT
);
3017 /* Divide the offset by its alignment. */
3018 offset
= size_binop_loc (loc
, EXACT_DIV_EXPR
, offset
, factor
);
3020 if (!is_gimple_min_invariant (offset
))
3022 TREE_OPERAND (t
, 2) = offset
;
3023 tret
= gimplify_expr (&TREE_OPERAND (t
, 2), pre_p
,
3024 post_p
, is_gimple_reg
,
3026 ret
= MIN (ret
, tret
);
3031 tret
= gimplify_expr (&TREE_OPERAND (t
, 2), pre_p
, post_p
,
3032 is_gimple_reg
, fb_rvalue
);
3033 ret
= MIN (ret
, tret
);
3038 /* Step 2 is to gimplify the base expression. Make sure lvalue is set
3039 so as to match the min_lval predicate. Failure to do so may result
3040 in the creation of large aggregate temporaries. */
3041 tret
= gimplify_expr (p
, pre_p
, post_p
, is_gimple_min_lval
,
3042 fallback
| fb_lvalue
);
3043 ret
= MIN (ret
, tret
);
3045 /* And finally, the indices and operands of ARRAY_REF. During this
3046 loop we also remove any useless conversions. */
3047 for (; expr_stack
.length () > 0; )
3049 tree t
= expr_stack
.pop ();
3051 if (TREE_CODE (t
) == ARRAY_REF
|| TREE_CODE (t
) == ARRAY_RANGE_REF
)
3053 /* Gimplify the dimension. */
3054 if (!is_gimple_min_invariant (TREE_OPERAND (t
, 1)))
3056 tret
= gimplify_expr (&TREE_OPERAND (t
, 1), pre_p
, post_p
,
3057 is_gimple_val
, fb_rvalue
);
3058 ret
= MIN (ret
, tret
);
3062 STRIP_USELESS_TYPE_CONVERSION (TREE_OPERAND (t
, 0));
3064 /* The innermost expression P may have originally had
3065 TREE_SIDE_EFFECTS set which would have caused all the outer
3066 expressions in *EXPR_P leading to P to also have had
3067 TREE_SIDE_EFFECTS set. */
3068 recalculate_side_effects (t
);
3071 /* If the outermost expression is a COMPONENT_REF, canonicalize its type. */
3072 if ((fallback
& fb_rvalue
) && TREE_CODE (*expr_p
) == COMPONENT_REF
)
3074 canonicalize_component_ref (expr_p
);
3077 expr_stack
.release ();
3079 gcc_assert (*expr_p
== expr
|| ret
!= GS_ALL_DONE
);
3084 /* Gimplify the self modifying expression pointed to by EXPR_P
3087 PRE_P points to the list where side effects that must happen before
3088 *EXPR_P should be stored.
3090 POST_P points to the list where side effects that must happen after
3091 *EXPR_P should be stored.
3093 WANT_VALUE is nonzero iff we want to use the value of this expression
3094 in another expression.
3096 ARITH_TYPE is the type the computation should be performed in. */
3098 enum gimplify_status
3099 gimplify_self_mod_expr (tree
*expr_p
, gimple_seq
*pre_p
, gimple_seq
*post_p
,
3100 bool want_value
, tree arith_type
)
3102 enum tree_code code
;
3103 tree lhs
, lvalue
, rhs
, t1
;
3104 gimple_seq post
= NULL
, *orig_post_p
= post_p
;
3106 enum tree_code arith_code
;
3107 enum gimplify_status ret
;
3108 location_t loc
= EXPR_LOCATION (*expr_p
);
3110 code
= TREE_CODE (*expr_p
);
3112 gcc_assert (code
== POSTINCREMENT_EXPR
|| code
== POSTDECREMENT_EXPR
3113 || code
== PREINCREMENT_EXPR
|| code
== PREDECREMENT_EXPR
);
3115 /* Prefix or postfix? */
3116 if (code
== POSTINCREMENT_EXPR
|| code
== POSTDECREMENT_EXPR
)
3117 /* Faster to treat as prefix if result is not used. */
3118 postfix
= want_value
;
3122 /* For postfix, make sure the inner expression's post side effects
3123 are executed after side effects from this expression. */
3127 /* Add or subtract? */
3128 if (code
== PREINCREMENT_EXPR
|| code
== POSTINCREMENT_EXPR
)
3129 arith_code
= PLUS_EXPR
;
3131 arith_code
= MINUS_EXPR
;
3133 /* Gimplify the LHS into a GIMPLE lvalue. */
3134 lvalue
= TREE_OPERAND (*expr_p
, 0);
3135 ret
= gimplify_expr (&lvalue
, pre_p
, post_p
, is_gimple_lvalue
, fb_lvalue
);
3136 if (ret
== GS_ERROR
)
3139 /* Extract the operands to the arithmetic operation. */
3141 rhs
= TREE_OPERAND (*expr_p
, 1);
3143 /* For postfix operator, we evaluate the LHS to an rvalue and then use
3144 that as the result value and in the postqueue operation. */
3147 ret
= gimplify_expr (&lhs
, pre_p
, post_p
, is_gimple_val
, fb_rvalue
);
3148 if (ret
== GS_ERROR
)
3151 lhs
= get_initialized_tmp_var (lhs
, pre_p
, NULL
);
3154 /* For POINTERs increment, use POINTER_PLUS_EXPR. */
3155 if (POINTER_TYPE_P (TREE_TYPE (lhs
)))
3157 rhs
= convert_to_ptrofftype_loc (loc
, rhs
);
3158 if (arith_code
== MINUS_EXPR
)
3159 rhs
= fold_build1_loc (loc
, NEGATE_EXPR
, TREE_TYPE (rhs
), rhs
);
3160 t1
= fold_build2 (POINTER_PLUS_EXPR
, TREE_TYPE (*expr_p
), lhs
, rhs
);
3163 t1
= fold_convert (TREE_TYPE (*expr_p
),
3164 fold_build2 (arith_code
, arith_type
,
3165 fold_convert (arith_type
, lhs
),
3166 fold_convert (arith_type
, rhs
)));
3170 gimplify_assign (lvalue
, t1
, pre_p
);
3171 gimplify_seq_add_seq (orig_post_p
, post
);
3177 *expr_p
= build2 (MODIFY_EXPR
, TREE_TYPE (lvalue
), lvalue
, t1
);
3182 /* If *EXPR_P has a variable sized type, wrap it in a WITH_SIZE_EXPR. */
3185 maybe_with_size_expr (tree
*expr_p
)
3187 tree expr
= *expr_p
;
3188 tree type
= TREE_TYPE (expr
);
3191 /* If we've already wrapped this or the type is error_mark_node, we can't do
3193 if (TREE_CODE (expr
) == WITH_SIZE_EXPR
3194 || type
== error_mark_node
)
3197 /* If the size isn't known or is a constant, we have nothing to do. */
3198 size
= TYPE_SIZE_UNIT (type
);
3199 if (!size
|| poly_int_tree_p (size
))
3202 /* Otherwise, make a WITH_SIZE_EXPR. */
3203 size
= unshare_expr (size
);
3204 size
= SUBSTITUTE_PLACEHOLDER_IN_EXPR (size
, expr
);
3205 *expr_p
= build2 (WITH_SIZE_EXPR
, type
, expr
, size
);
3208 /* Helper for gimplify_call_expr. Gimplify a single argument *ARG_P
3209 Store any side-effects in PRE_P. CALL_LOCATION is the location of
3210 the CALL_EXPR. If ALLOW_SSA is set the actual parameter may be
3211 gimplified to an SSA name. */
3213 enum gimplify_status
3214 gimplify_arg (tree
*arg_p
, gimple_seq
*pre_p
, location_t call_location
,
3217 bool (*test
) (tree
);
3220 /* In general, we allow lvalues for function arguments to avoid
3221 extra overhead of copying large aggregates out of even larger
3222 aggregates into temporaries only to copy the temporaries to
3223 the argument list. Make optimizers happy by pulling out to
3224 temporaries those types that fit in registers. */
3225 if (is_gimple_reg_type (TREE_TYPE (*arg_p
)))
3226 test
= is_gimple_val
, fb
= fb_rvalue
;
3229 test
= is_gimple_lvalue
, fb
= fb_either
;
3230 /* Also strip a TARGET_EXPR that would force an extra copy. */
3231 if (TREE_CODE (*arg_p
) == TARGET_EXPR
)
3233 tree init
= TARGET_EXPR_INITIAL (*arg_p
);
3235 && !VOID_TYPE_P (TREE_TYPE (init
)))
3240 /* If this is a variable sized type, we must remember the size. */
3241 maybe_with_size_expr (arg_p
);
3243 /* FIXME diagnostics: This will mess up gcc.dg/Warray-bounds.c. */
3244 /* Make sure arguments have the same location as the function call
3246 protected_set_expr_location (*arg_p
, call_location
);
3248 /* There is a sequence point before a function call. Side effects in
3249 the argument list must occur before the actual call. So, when
3250 gimplifying arguments, force gimplify_expr to use an internal
3251 post queue which is then appended to the end of PRE_P. */
3252 return gimplify_expr (arg_p
, pre_p
, NULL
, test
, fb
, allow_ssa
);
3255 /* Don't fold inside offloading or taskreg regions: it can break code by
3256 adding decl references that weren't in the source. We'll do it during
3257 omplower pass instead. */
3260 maybe_fold_stmt (gimple_stmt_iterator
*gsi
)
3262 struct gimplify_omp_ctx
*ctx
;
3263 for (ctx
= gimplify_omp_ctxp
; ctx
; ctx
= ctx
->outer_context
)
3264 if ((ctx
->region_type
& (ORT_TARGET
| ORT_PARALLEL
| ORT_TASK
)) != 0)
3266 else if ((ctx
->region_type
& ORT_HOST_TEAMS
) == ORT_HOST_TEAMS
)
3268 /* Delay folding of builtins until the IL is in consistent state
3269 so the diagnostic machinery can do a better job. */
3270 if (gimple_call_builtin_p (gsi_stmt (*gsi
)))
3272 return fold_stmt (gsi
);
3275 /* Gimplify the CALL_EXPR node *EXPR_P into the GIMPLE sequence PRE_P.
3276 WANT_VALUE is true if the result of the call is desired. */
3278 static enum gimplify_status
3279 gimplify_call_expr (tree
*expr_p
, gimple_seq
*pre_p
, bool want_value
)
3281 tree fndecl
, parms
, p
, fnptrtype
;
3282 enum gimplify_status ret
;
3285 bool builtin_va_start_p
= false;
3286 location_t loc
= EXPR_LOCATION (*expr_p
);
3288 gcc_assert (TREE_CODE (*expr_p
) == CALL_EXPR
);
3290 /* For reliable diagnostics during inlining, it is necessary that
3291 every call_expr be annotated with file and line. */
3292 if (! EXPR_HAS_LOCATION (*expr_p
))
3293 SET_EXPR_LOCATION (*expr_p
, input_location
);
3295 /* Gimplify internal functions created in the FEs. */
3296 if (CALL_EXPR_FN (*expr_p
) == NULL_TREE
)
3301 nargs
= call_expr_nargs (*expr_p
);
3302 enum internal_fn ifn
= CALL_EXPR_IFN (*expr_p
);
3303 auto_vec
<tree
> vargs (nargs
);
3305 for (i
= 0; i
< nargs
; i
++)
3307 gimplify_arg (&CALL_EXPR_ARG (*expr_p
, i
), pre_p
,
3308 EXPR_LOCATION (*expr_p
));
3309 vargs
.quick_push (CALL_EXPR_ARG (*expr_p
, i
));
3312 gcall
*call
= gimple_build_call_internal_vec (ifn
, vargs
);
3313 gimple_call_set_nothrow (call
, TREE_NOTHROW (*expr_p
));
3314 gimplify_seq_add_stmt (pre_p
, call
);
3318 /* This may be a call to a builtin function.
3320 Builtin function calls may be transformed into different
3321 (and more efficient) builtin function calls under certain
3322 circumstances. Unfortunately, gimplification can muck things
3323 up enough that the builtin expanders are not aware that certain
3324 transformations are still valid.
3326 So we attempt transformation/gimplification of the call before
3327 we gimplify the CALL_EXPR. At this time we do not manage to
3328 transform all calls in the same manner as the expanders do, but
3329 we do transform most of them. */
3330 fndecl
= get_callee_fndecl (*expr_p
);
3331 if (fndecl
&& fndecl_built_in_p (fndecl
, BUILT_IN_NORMAL
))
3332 switch (DECL_FUNCTION_CODE (fndecl
))
3334 CASE_BUILT_IN_ALLOCA
:
3335 /* If the call has been built for a variable-sized object, then we
3336 want to restore the stack level when the enclosing BIND_EXPR is
3337 exited to reclaim the allocated space; otherwise, we precisely
3338 need to do the opposite and preserve the latest stack level. */
3339 if (CALL_ALLOCA_FOR_VAR_P (*expr_p
))
3340 gimplify_ctxp
->save_stack
= true;
3342 gimplify_ctxp
->keep_stack
= true;
3345 case BUILT_IN_VA_START
:
3347 builtin_va_start_p
= TRUE
;
3348 if (call_expr_nargs (*expr_p
) < 2)
3350 error ("too few arguments to function %<va_start%>");
3351 *expr_p
= build_empty_stmt (EXPR_LOCATION (*expr_p
));
3355 if (fold_builtin_next_arg (*expr_p
, true))
3357 *expr_p
= build_empty_stmt (EXPR_LOCATION (*expr_p
));
3363 case BUILT_IN_EH_RETURN
:
3364 cfun
->calls_eh_return
= true;
3370 if (fndecl
&& fndecl_built_in_p (fndecl
))
3372 tree new_tree
= fold_call_expr (input_location
, *expr_p
, !want_value
);
3373 if (new_tree
&& new_tree
!= *expr_p
)
3375 /* There was a transformation of this call which computes the
3376 same value, but in a more efficient way. Return and try
3383 /* Remember the original function pointer type. */
3384 fnptrtype
= TREE_TYPE (CALL_EXPR_FN (*expr_p
));
3386 /* There is a sequence point before the call, so any side effects in
3387 the calling expression must occur before the actual call. Force
3388 gimplify_expr to use an internal post queue. */
3389 ret
= gimplify_expr (&CALL_EXPR_FN (*expr_p
), pre_p
, NULL
,
3390 is_gimple_call_addr
, fb_rvalue
);
3392 nargs
= call_expr_nargs (*expr_p
);
3394 /* Get argument types for verification. */
3395 fndecl
= get_callee_fndecl (*expr_p
);
3398 parms
= TYPE_ARG_TYPES (TREE_TYPE (fndecl
));
3400 parms
= TYPE_ARG_TYPES (TREE_TYPE (fnptrtype
));
3402 if (fndecl
&& DECL_ARGUMENTS (fndecl
))
3403 p
= DECL_ARGUMENTS (fndecl
);
3408 for (i
= 0; i
< nargs
&& p
; i
++, p
= TREE_CHAIN (p
))
3411 /* If the last argument is __builtin_va_arg_pack () and it is not
3412 passed as a named argument, decrease the number of CALL_EXPR
3413 arguments and set instead the CALL_EXPR_VA_ARG_PACK flag. */
3416 && TREE_CODE (CALL_EXPR_ARG (*expr_p
, nargs
- 1)) == CALL_EXPR
)
3418 tree last_arg
= CALL_EXPR_ARG (*expr_p
, nargs
- 1);
3419 tree last_arg_fndecl
= get_callee_fndecl (last_arg
);
3422 && fndecl_built_in_p (last_arg_fndecl
, BUILT_IN_VA_ARG_PACK
))
3424 tree call
= *expr_p
;
3427 *expr_p
= build_call_array_loc (loc
, TREE_TYPE (call
),
3428 CALL_EXPR_FN (call
),
3429 nargs
, CALL_EXPR_ARGP (call
));
3431 /* Copy all CALL_EXPR flags, location and block, except
3432 CALL_EXPR_VA_ARG_PACK flag. */
3433 CALL_EXPR_STATIC_CHAIN (*expr_p
) = CALL_EXPR_STATIC_CHAIN (call
);
3434 CALL_EXPR_TAILCALL (*expr_p
) = CALL_EXPR_TAILCALL (call
);
3435 CALL_EXPR_RETURN_SLOT_OPT (*expr_p
)
3436 = CALL_EXPR_RETURN_SLOT_OPT (call
);
3437 CALL_FROM_THUNK_P (*expr_p
) = CALL_FROM_THUNK_P (call
);
3438 SET_EXPR_LOCATION (*expr_p
, EXPR_LOCATION (call
));
3440 /* Set CALL_EXPR_VA_ARG_PACK. */
3441 CALL_EXPR_VA_ARG_PACK (*expr_p
) = 1;
3445 /* If the call returns twice then after building the CFG the call
3446 argument computations will no longer dominate the call because
3447 we add an abnormal incoming edge to the call. So do not use SSA
3449 bool returns_twice
= call_expr_flags (*expr_p
) & ECF_RETURNS_TWICE
;
3451 /* Gimplify the function arguments. */
3454 for (i
= (PUSH_ARGS_REVERSED
? nargs
- 1 : 0);
3455 PUSH_ARGS_REVERSED
? i
>= 0 : i
< nargs
;
3456 PUSH_ARGS_REVERSED
? i
-- : i
++)
3458 enum gimplify_status t
;
3460 /* Avoid gimplifying the second argument to va_start, which needs to
3461 be the plain PARM_DECL. */
3462 if ((i
!= 1) || !builtin_va_start_p
)
3464 t
= gimplify_arg (&CALL_EXPR_ARG (*expr_p
, i
), pre_p
,
3465 EXPR_LOCATION (*expr_p
), ! returns_twice
);
3473 /* Gimplify the static chain. */
3474 if (CALL_EXPR_STATIC_CHAIN (*expr_p
))
3476 if (fndecl
&& !DECL_STATIC_CHAIN (fndecl
))
3477 CALL_EXPR_STATIC_CHAIN (*expr_p
) = NULL
;
3480 enum gimplify_status t
;
3481 t
= gimplify_arg (&CALL_EXPR_STATIC_CHAIN (*expr_p
), pre_p
,
3482 EXPR_LOCATION (*expr_p
), ! returns_twice
);
3488 /* Verify the function result. */
3489 if (want_value
&& fndecl
3490 && VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fnptrtype
))))
3492 error_at (loc
, "using result of function returning %<void%>");
3496 /* Try this again in case gimplification exposed something. */
3497 if (ret
!= GS_ERROR
)
3499 tree new_tree
= fold_call_expr (input_location
, *expr_p
, !want_value
);
3501 if (new_tree
&& new_tree
!= *expr_p
)
3503 /* There was a transformation of this call which computes the
3504 same value, but in a more efficient way. Return and try
3512 *expr_p
= error_mark_node
;
3516 /* If the function is "const" or "pure", then clear TREE_SIDE_EFFECTS on its
3517 decl. This allows us to eliminate redundant or useless
3518 calls to "const" functions. */
3519 if (TREE_CODE (*expr_p
) == CALL_EXPR
)
3521 int flags
= call_expr_flags (*expr_p
);
3522 if (flags
& (ECF_CONST
| ECF_PURE
)
3523 /* An infinite loop is considered a side effect. */
3524 && !(flags
& (ECF_LOOPING_CONST_OR_PURE
)))
3525 TREE_SIDE_EFFECTS (*expr_p
) = 0;
3528 /* If the value is not needed by the caller, emit a new GIMPLE_CALL
3529 and clear *EXPR_P. Otherwise, leave *EXPR_P in its gimplified
3530 form and delegate the creation of a GIMPLE_CALL to
3531 gimplify_modify_expr. This is always possible because when
3532 WANT_VALUE is true, the caller wants the result of this call into
3533 a temporary, which means that we will emit an INIT_EXPR in
3534 internal_get_tmp_var which will then be handled by
3535 gimplify_modify_expr. */
3538 /* The CALL_EXPR in *EXPR_P is already in GIMPLE form, so all we
3539 have to do is replicate it as a GIMPLE_CALL tuple. */
3540 gimple_stmt_iterator gsi
;
3541 call
= gimple_build_call_from_tree (*expr_p
, fnptrtype
);
3542 notice_special_calls (call
);
3543 gimplify_seq_add_stmt (pre_p
, call
);
3544 gsi
= gsi_last (*pre_p
);
3545 maybe_fold_stmt (&gsi
);
3546 *expr_p
= NULL_TREE
;
3549 /* Remember the original function type. */
3550 CALL_EXPR_FN (*expr_p
) = build1 (NOP_EXPR
, fnptrtype
,
3551 CALL_EXPR_FN (*expr_p
));
3556 /* Handle shortcut semantics in the predicate operand of a COND_EXPR by
3557 rewriting it into multiple COND_EXPRs, and possibly GOTO_EXPRs.
3559 TRUE_LABEL_P and FALSE_LABEL_P point to the labels to jump to if the
3560 condition is true or false, respectively. If null, we should generate
3561 our own to skip over the evaluation of this specific expression.
3563 LOCUS is the source location of the COND_EXPR.
3565 This function is the tree equivalent of do_jump.
3567 shortcut_cond_r should only be called by shortcut_cond_expr. */
3570 shortcut_cond_r (tree pred
, tree
*true_label_p
, tree
*false_label_p
,
3573 tree local_label
= NULL_TREE
;
3574 tree t
, expr
= NULL
;
3576 /* OK, it's not a simple case; we need to pull apart the COND_EXPR to
3577 retain the shortcut semantics. Just insert the gotos here;
3578 shortcut_cond_expr will append the real blocks later. */
3579 if (TREE_CODE (pred
) == TRUTH_ANDIF_EXPR
)
3581 location_t new_locus
;
3583 /* Turn if (a && b) into
3585 if (a); else goto no;
3586 if (b) goto yes; else goto no;
3589 if (false_label_p
== NULL
)
3590 false_label_p
= &local_label
;
3592 /* Keep the original source location on the first 'if'. */
3593 t
= shortcut_cond_r (TREE_OPERAND (pred
, 0), NULL
, false_label_p
, locus
);
3594 append_to_statement_list (t
, &expr
);
3596 /* Set the source location of the && on the second 'if'. */
3597 new_locus
= rexpr_location (pred
, locus
);
3598 t
= shortcut_cond_r (TREE_OPERAND (pred
, 1), true_label_p
, false_label_p
,
3600 append_to_statement_list (t
, &expr
);
3602 else if (TREE_CODE (pred
) == TRUTH_ORIF_EXPR
)
3604 location_t new_locus
;
3606 /* Turn if (a || b) into
3609 if (b) goto yes; else goto no;
3612 if (true_label_p
== NULL
)
3613 true_label_p
= &local_label
;
3615 /* Keep the original source location on the first 'if'. */
3616 t
= shortcut_cond_r (TREE_OPERAND (pred
, 0), true_label_p
, NULL
, locus
);
3617 append_to_statement_list (t
, &expr
);
3619 /* Set the source location of the || on the second 'if'. */
3620 new_locus
= rexpr_location (pred
, locus
);
3621 t
= shortcut_cond_r (TREE_OPERAND (pred
, 1), true_label_p
, false_label_p
,
3623 append_to_statement_list (t
, &expr
);
3625 else if (TREE_CODE (pred
) == COND_EXPR
3626 && !VOID_TYPE_P (TREE_TYPE (TREE_OPERAND (pred
, 1)))
3627 && !VOID_TYPE_P (TREE_TYPE (TREE_OPERAND (pred
, 2))))
3629 location_t new_locus
;
3631 /* As long as we're messing with gotos, turn if (a ? b : c) into
3633 if (b) goto yes; else goto no;
3635 if (c) goto yes; else goto no;
3637 Don't do this if one of the arms has void type, which can happen
3638 in C++ when the arm is throw. */
3640 /* Keep the original source location on the first 'if'. Set the source
3641 location of the ? on the second 'if'. */
3642 new_locus
= rexpr_location (pred
, locus
);
3643 expr
= build3 (COND_EXPR
, void_type_node
, TREE_OPERAND (pred
, 0),
3644 shortcut_cond_r (TREE_OPERAND (pred
, 1), true_label_p
,
3645 false_label_p
, locus
),
3646 shortcut_cond_r (TREE_OPERAND (pred
, 2), true_label_p
,
3647 false_label_p
, new_locus
));
3651 expr
= build3 (COND_EXPR
, void_type_node
, pred
,
3652 build_and_jump (true_label_p
),
3653 build_and_jump (false_label_p
));
3654 SET_EXPR_LOCATION (expr
, locus
);
3659 t
= build1 (LABEL_EXPR
, void_type_node
, local_label
);
3660 append_to_statement_list (t
, &expr
);
3666 /* If EXPR is a GOTO_EXPR, return it. If it is a STATEMENT_LIST, skip
3667 any of its leading DEBUG_BEGIN_STMTS and recurse on the subsequent
3668 statement, if it is the last one. Otherwise, return NULL. */
3671 find_goto (tree expr
)
3676 if (TREE_CODE (expr
) == GOTO_EXPR
)
3679 if (TREE_CODE (expr
) != STATEMENT_LIST
)
3682 tree_stmt_iterator i
= tsi_start (expr
);
3684 while (!tsi_end_p (i
) && TREE_CODE (tsi_stmt (i
)) == DEBUG_BEGIN_STMT
)
3687 if (!tsi_one_before_end_p (i
))
3690 return find_goto (tsi_stmt (i
));
3693 /* Same as find_goto, except that it returns NULL if the destination
3694 is not a LABEL_DECL. */
3697 find_goto_label (tree expr
)
3699 tree dest
= find_goto (expr
);
3700 if (dest
&& TREE_CODE (GOTO_DESTINATION (dest
)) == LABEL_DECL
)
3705 /* Given a conditional expression EXPR with short-circuit boolean
3706 predicates using TRUTH_ANDIF_EXPR or TRUTH_ORIF_EXPR, break the
3707 predicate apart into the equivalent sequence of conditionals. */
3710 shortcut_cond_expr (tree expr
)
3712 tree pred
= TREE_OPERAND (expr
, 0);
3713 tree then_
= TREE_OPERAND (expr
, 1);
3714 tree else_
= TREE_OPERAND (expr
, 2);
3715 tree true_label
, false_label
, end_label
, t
;
3717 tree
*false_label_p
;
3718 bool emit_end
, emit_false
, jump_over_else
;
3719 bool then_se
= then_
&& TREE_SIDE_EFFECTS (then_
);
3720 bool else_se
= else_
&& TREE_SIDE_EFFECTS (else_
);
3722 /* First do simple transformations. */
3725 /* If there is no 'else', turn
3728 if (a) if (b) then c. */
3729 while (TREE_CODE (pred
) == TRUTH_ANDIF_EXPR
)
3731 /* Keep the original source location on the first 'if'. */
3732 location_t locus
= EXPR_LOC_OR_LOC (expr
, input_location
);
3733 TREE_OPERAND (expr
, 0) = TREE_OPERAND (pred
, 1);
3734 /* Set the source location of the && on the second 'if'. */
3735 if (rexpr_has_location (pred
))
3736 SET_EXPR_LOCATION (expr
, rexpr_location (pred
));
3737 then_
= shortcut_cond_expr (expr
);
3738 then_se
= then_
&& TREE_SIDE_EFFECTS (then_
);
3739 pred
= TREE_OPERAND (pred
, 0);
3740 expr
= build3 (COND_EXPR
, void_type_node
, pred
, then_
, NULL_TREE
);
3741 SET_EXPR_LOCATION (expr
, locus
);
3747 /* If there is no 'then', turn
3750 if (a); else if (b); else d. */
3751 while (TREE_CODE (pred
) == TRUTH_ORIF_EXPR
)
3753 /* Keep the original source location on the first 'if'. */
3754 location_t locus
= EXPR_LOC_OR_LOC (expr
, input_location
);
3755 TREE_OPERAND (expr
, 0) = TREE_OPERAND (pred
, 1);
3756 /* Set the source location of the || on the second 'if'. */
3757 if (rexpr_has_location (pred
))
3758 SET_EXPR_LOCATION (expr
, rexpr_location (pred
));
3759 else_
= shortcut_cond_expr (expr
);
3760 else_se
= else_
&& TREE_SIDE_EFFECTS (else_
);
3761 pred
= TREE_OPERAND (pred
, 0);
3762 expr
= build3 (COND_EXPR
, void_type_node
, pred
, NULL_TREE
, else_
);
3763 SET_EXPR_LOCATION (expr
, locus
);
3767 /* If we're done, great. */
3768 if (TREE_CODE (pred
) != TRUTH_ANDIF_EXPR
3769 && TREE_CODE (pred
) != TRUTH_ORIF_EXPR
)
3772 /* Otherwise we need to mess with gotos. Change
3775 if (a); else goto no;
3778 and recursively gimplify the condition. */
3780 true_label
= false_label
= end_label
= NULL_TREE
;
3782 /* If our arms just jump somewhere, hijack those labels so we don't
3783 generate jumps to jumps. */
3785 if (tree then_goto
= find_goto_label (then_
))
3787 true_label
= GOTO_DESTINATION (then_goto
);
3792 if (tree else_goto
= find_goto_label (else_
))
3794 false_label
= GOTO_DESTINATION (else_goto
);
3799 /* If we aren't hijacking a label for the 'then' branch, it falls through. */
3801 true_label_p
= &true_label
;
3803 true_label_p
= NULL
;
3805 /* The 'else' branch also needs a label if it contains interesting code. */
3806 if (false_label
|| else_se
)
3807 false_label_p
= &false_label
;
3809 false_label_p
= NULL
;
3811 /* If there was nothing else in our arms, just forward the label(s). */
3812 if (!then_se
&& !else_se
)
3813 return shortcut_cond_r (pred
, true_label_p
, false_label_p
,
3814 EXPR_LOC_OR_LOC (expr
, input_location
));
3816 /* If our last subexpression already has a terminal label, reuse it. */
3818 t
= expr_last (else_
);
3820 t
= expr_last (then_
);
3823 if (t
&& TREE_CODE (t
) == LABEL_EXPR
)
3824 end_label
= LABEL_EXPR_LABEL (t
);
3826 /* If we don't care about jumping to the 'else' branch, jump to the end
3827 if the condition is false. */
3829 false_label_p
= &end_label
;
3831 /* We only want to emit these labels if we aren't hijacking them. */
3832 emit_end
= (end_label
== NULL_TREE
);
3833 emit_false
= (false_label
== NULL_TREE
);
3835 /* We only emit the jump over the else clause if we have to--if the
3836 then clause may fall through. Otherwise we can wind up with a
3837 useless jump and a useless label at the end of gimplified code,
3838 which will cause us to think that this conditional as a whole
3839 falls through even if it doesn't. If we then inline a function
3840 which ends with such a condition, that can cause us to issue an
3841 inappropriate warning about control reaching the end of a
3842 non-void function. */
3843 jump_over_else
= block_may_fallthru (then_
);
3845 pred
= shortcut_cond_r (pred
, true_label_p
, false_label_p
,
3846 EXPR_LOC_OR_LOC (expr
, input_location
));
3849 append_to_statement_list (pred
, &expr
);
3851 append_to_statement_list (then_
, &expr
);
3856 tree last
= expr_last (expr
);
3857 t
= build_and_jump (&end_label
);
3858 if (rexpr_has_location (last
))
3859 SET_EXPR_LOCATION (t
, rexpr_location (last
));
3860 append_to_statement_list (t
, &expr
);
3864 t
= build1 (LABEL_EXPR
, void_type_node
, false_label
);
3865 append_to_statement_list (t
, &expr
);
3867 append_to_statement_list (else_
, &expr
);
3869 if (emit_end
&& end_label
)
3871 t
= build1 (LABEL_EXPR
, void_type_node
, end_label
);
3872 append_to_statement_list (t
, &expr
);
3878 /* EXPR is used in a boolean context; make sure it has BOOLEAN_TYPE. */
3881 gimple_boolify (tree expr
)
3883 tree type
= TREE_TYPE (expr
);
3884 location_t loc
= EXPR_LOCATION (expr
);
3886 if (TREE_CODE (expr
) == NE_EXPR
3887 && TREE_CODE (TREE_OPERAND (expr
, 0)) == CALL_EXPR
3888 && integer_zerop (TREE_OPERAND (expr
, 1)))
3890 tree call
= TREE_OPERAND (expr
, 0);
3891 tree fn
= get_callee_fndecl (call
);
3893 /* For __builtin_expect ((long) (x), y) recurse into x as well
3894 if x is truth_value_p. */
3896 && fndecl_built_in_p (fn
, BUILT_IN_EXPECT
)
3897 && call_expr_nargs (call
) == 2)
3899 tree arg
= CALL_EXPR_ARG (call
, 0);
3902 if (TREE_CODE (arg
) == NOP_EXPR
3903 && TREE_TYPE (arg
) == TREE_TYPE (call
))
3904 arg
= TREE_OPERAND (arg
, 0);
3905 if (truth_value_p (TREE_CODE (arg
)))
3907 arg
= gimple_boolify (arg
);
3908 CALL_EXPR_ARG (call
, 0)
3909 = fold_convert_loc (loc
, TREE_TYPE (call
), arg
);
3915 switch (TREE_CODE (expr
))
3917 case TRUTH_AND_EXPR
:
3919 case TRUTH_XOR_EXPR
:
3920 case TRUTH_ANDIF_EXPR
:
3921 case TRUTH_ORIF_EXPR
:
3922 /* Also boolify the arguments of truth exprs. */
3923 TREE_OPERAND (expr
, 1) = gimple_boolify (TREE_OPERAND (expr
, 1));
3926 case TRUTH_NOT_EXPR
:
3927 TREE_OPERAND (expr
, 0) = gimple_boolify (TREE_OPERAND (expr
, 0));
3929 /* These expressions always produce boolean results. */
3930 if (TREE_CODE (type
) != BOOLEAN_TYPE
)
3931 TREE_TYPE (expr
) = boolean_type_node
;
3935 switch ((enum annot_expr_kind
) TREE_INT_CST_LOW (TREE_OPERAND (expr
, 1)))
3937 case annot_expr_ivdep_kind
:
3938 case annot_expr_unroll_kind
:
3939 case annot_expr_no_vector_kind
:
3940 case annot_expr_vector_kind
:
3941 case annot_expr_parallel_kind
:
3942 TREE_OPERAND (expr
, 0) = gimple_boolify (TREE_OPERAND (expr
, 0));
3943 if (TREE_CODE (type
) != BOOLEAN_TYPE
)
3944 TREE_TYPE (expr
) = boolean_type_node
;
3951 if (COMPARISON_CLASS_P (expr
))
3953 /* There expressions always prduce boolean results. */
3954 if (TREE_CODE (type
) != BOOLEAN_TYPE
)
3955 TREE_TYPE (expr
) = boolean_type_node
;
3958 /* Other expressions that get here must have boolean values, but
3959 might need to be converted to the appropriate mode. */
3960 if (TREE_CODE (type
) == BOOLEAN_TYPE
)
3962 return fold_convert_loc (loc
, boolean_type_node
, expr
);
3966 /* Given a conditional expression *EXPR_P without side effects, gimplify
3967 its operands. New statements are inserted to PRE_P. */
3969 static enum gimplify_status
3970 gimplify_pure_cond_expr (tree
*expr_p
, gimple_seq
*pre_p
)
3972 tree expr
= *expr_p
, cond
;
3973 enum gimplify_status ret
, tret
;
3974 enum tree_code code
;
3976 cond
= gimple_boolify (COND_EXPR_COND (expr
));
3978 /* We need to handle && and || specially, as their gimplification
3979 creates pure cond_expr, thus leading to an infinite cycle otherwise. */
3980 code
= TREE_CODE (cond
);
3981 if (code
== TRUTH_ANDIF_EXPR
)
3982 TREE_SET_CODE (cond
, TRUTH_AND_EXPR
);
3983 else if (code
== TRUTH_ORIF_EXPR
)
3984 TREE_SET_CODE (cond
, TRUTH_OR_EXPR
);
3985 ret
= gimplify_expr (&cond
, pre_p
, NULL
, is_gimple_condexpr
, fb_rvalue
);
3986 COND_EXPR_COND (*expr_p
) = cond
;
3988 tret
= gimplify_expr (&COND_EXPR_THEN (expr
), pre_p
, NULL
,
3989 is_gimple_val
, fb_rvalue
);
3990 ret
= MIN (ret
, tret
);
3991 tret
= gimplify_expr (&COND_EXPR_ELSE (expr
), pre_p
, NULL
,
3992 is_gimple_val
, fb_rvalue
);
3994 return MIN (ret
, tret
);
3997 /* Return true if evaluating EXPR could trap.
3998 EXPR is GENERIC, while tree_could_trap_p can be called
4002 generic_expr_could_trap_p (tree expr
)
4006 if (!expr
|| is_gimple_val (expr
))
4009 if (!EXPR_P (expr
) || tree_could_trap_p (expr
))
4012 n
= TREE_OPERAND_LENGTH (expr
);
4013 for (i
= 0; i
< n
; i
++)
4014 if (generic_expr_could_trap_p (TREE_OPERAND (expr
, i
)))
4020 /* Convert the conditional expression pointed to by EXPR_P '(p) ? a : b;'
4029 The second form is used when *EXPR_P is of type void.
4031 PRE_P points to the list where side effects that must happen before
4032 *EXPR_P should be stored. */
4034 static enum gimplify_status
4035 gimplify_cond_expr (tree
*expr_p
, gimple_seq
*pre_p
, fallback_t fallback
)
4037 tree expr
= *expr_p
;
4038 tree type
= TREE_TYPE (expr
);
4039 location_t loc
= EXPR_LOCATION (expr
);
4040 tree tmp
, arm1
, arm2
;
4041 enum gimplify_status ret
;
4042 tree label_true
, label_false
, label_cont
;
4043 bool have_then_clause_p
, have_else_clause_p
;
4045 enum tree_code pred_code
;
4046 gimple_seq seq
= NULL
;
4048 /* If this COND_EXPR has a value, copy the values into a temporary within
4050 if (!VOID_TYPE_P (type
))
4052 tree then_
= TREE_OPERAND (expr
, 1), else_
= TREE_OPERAND (expr
, 2);
4055 /* If either an rvalue is ok or we do not require an lvalue, create the
4056 temporary. But we cannot do that if the type is addressable. */
4057 if (((fallback
& fb_rvalue
) || !(fallback
& fb_lvalue
))
4058 && !TREE_ADDRESSABLE (type
))
4060 if (gimplify_ctxp
->allow_rhs_cond_expr
4061 /* If either branch has side effects or could trap, it can't be
4062 evaluated unconditionally. */
4063 && !TREE_SIDE_EFFECTS (then_
)
4064 && !generic_expr_could_trap_p (then_
)
4065 && !TREE_SIDE_EFFECTS (else_
)
4066 && !generic_expr_could_trap_p (else_
))
4067 return gimplify_pure_cond_expr (expr_p
, pre_p
);
4069 tmp
= create_tmp_var (type
, "iftmp");
4073 /* Otherwise, only create and copy references to the values. */
4076 type
= build_pointer_type (type
);
4078 if (!VOID_TYPE_P (TREE_TYPE (then_
)))
4079 then_
= build_fold_addr_expr_loc (loc
, then_
);
4081 if (!VOID_TYPE_P (TREE_TYPE (else_
)))
4082 else_
= build_fold_addr_expr_loc (loc
, else_
);
4085 = build3 (COND_EXPR
, type
, TREE_OPERAND (expr
, 0), then_
, else_
);
4087 tmp
= create_tmp_var (type
, "iftmp");
4088 result
= build_simple_mem_ref_loc (loc
, tmp
);
4091 /* Build the new then clause, `tmp = then_;'. But don't build the
4092 assignment if the value is void; in C++ it can be if it's a throw. */
4093 if (!VOID_TYPE_P (TREE_TYPE (then_
)))
4094 TREE_OPERAND (expr
, 1) = build2 (INIT_EXPR
, type
, tmp
, then_
);
4096 /* Similarly, build the new else clause, `tmp = else_;'. */
4097 if (!VOID_TYPE_P (TREE_TYPE (else_
)))
4098 TREE_OPERAND (expr
, 2) = build2 (INIT_EXPR
, type
, tmp
, else_
);
4100 TREE_TYPE (expr
) = void_type_node
;
4101 recalculate_side_effects (expr
);
4103 /* Move the COND_EXPR to the prequeue. */
4104 gimplify_stmt (&expr
, pre_p
);
4110 /* Remove any COMPOUND_EXPR so the following cases will be caught. */
4111 STRIP_TYPE_NOPS (TREE_OPERAND (expr
, 0));
4112 if (TREE_CODE (TREE_OPERAND (expr
, 0)) == COMPOUND_EXPR
)
4113 gimplify_compound_expr (&TREE_OPERAND (expr
, 0), pre_p
, true);
4115 /* Make sure the condition has BOOLEAN_TYPE. */
4116 TREE_OPERAND (expr
, 0) = gimple_boolify (TREE_OPERAND (expr
, 0));
4118 /* Break apart && and || conditions. */
4119 if (TREE_CODE (TREE_OPERAND (expr
, 0)) == TRUTH_ANDIF_EXPR
4120 || TREE_CODE (TREE_OPERAND (expr
, 0)) == TRUTH_ORIF_EXPR
)
4122 expr
= shortcut_cond_expr (expr
);
4124 if (expr
!= *expr_p
)
4128 /* We can't rely on gimplify_expr to re-gimplify the expanded
4129 form properly, as cleanups might cause the target labels to be
4130 wrapped in a TRY_FINALLY_EXPR. To prevent that, we need to
4131 set up a conditional context. */
4132 gimple_push_condition ();
4133 gimplify_stmt (expr_p
, &seq
);
4134 gimple_pop_condition (pre_p
);
4135 gimple_seq_add_seq (pre_p
, seq
);
4141 /* Now do the normal gimplification. */
4143 /* Gimplify condition. */
4144 ret
= gimplify_expr (&TREE_OPERAND (expr
, 0), pre_p
, NULL
, is_gimple_condexpr
,
4146 if (ret
== GS_ERROR
)
4148 gcc_assert (TREE_OPERAND (expr
, 0) != NULL_TREE
);
4150 gimple_push_condition ();
4152 have_then_clause_p
= have_else_clause_p
= false;
4153 label_true
= find_goto_label (TREE_OPERAND (expr
, 1));
4155 && DECL_CONTEXT (GOTO_DESTINATION (label_true
)) == current_function_decl
4156 /* For -O0 avoid this optimization if the COND_EXPR and GOTO_EXPR
4157 have different locations, otherwise we end up with incorrect
4158 location information on the branches. */
4160 || !EXPR_HAS_LOCATION (expr
)
4161 || !rexpr_has_location (label_true
)
4162 || EXPR_LOCATION (expr
) == rexpr_location (label_true
)))
4164 have_then_clause_p
= true;
4165 label_true
= GOTO_DESTINATION (label_true
);
4168 label_true
= create_artificial_label (UNKNOWN_LOCATION
);
4169 label_false
= find_goto_label (TREE_OPERAND (expr
, 2));
4171 && DECL_CONTEXT (GOTO_DESTINATION (label_false
)) == current_function_decl
4172 /* For -O0 avoid this optimization if the COND_EXPR and GOTO_EXPR
4173 have different locations, otherwise we end up with incorrect
4174 location information on the branches. */
4176 || !EXPR_HAS_LOCATION (expr
)
4177 || !rexpr_has_location (label_false
)
4178 || EXPR_LOCATION (expr
) == rexpr_location (label_false
)))
4180 have_else_clause_p
= true;
4181 label_false
= GOTO_DESTINATION (label_false
);
4184 label_false
= create_artificial_label (UNKNOWN_LOCATION
);
4186 gimple_cond_get_ops_from_tree (COND_EXPR_COND (expr
), &pred_code
, &arm1
,
4188 cond_stmt
= gimple_build_cond (pred_code
, arm1
, arm2
, label_true
,
4190 gimple_set_no_warning (cond_stmt
, TREE_NO_WARNING (COND_EXPR_COND (expr
)));
4191 gimplify_seq_add_stmt (&seq
, cond_stmt
);
4192 gimple_stmt_iterator gsi
= gsi_last (seq
);
4193 maybe_fold_stmt (&gsi
);
4195 label_cont
= NULL_TREE
;
4196 if (!have_then_clause_p
)
4198 /* For if (...) {} else { code; } put label_true after
4200 if (TREE_OPERAND (expr
, 1) == NULL_TREE
4201 && !have_else_clause_p
4202 && TREE_OPERAND (expr
, 2) != NULL_TREE
)
4203 label_cont
= label_true
;
4206 gimplify_seq_add_stmt (&seq
, gimple_build_label (label_true
));
4207 have_then_clause_p
= gimplify_stmt (&TREE_OPERAND (expr
, 1), &seq
);
4208 /* For if (...) { code; } else {} or
4209 if (...) { code; } else goto label; or
4210 if (...) { code; return; } else { ... }
4211 label_cont isn't needed. */
4212 if (!have_else_clause_p
4213 && TREE_OPERAND (expr
, 2) != NULL_TREE
4214 && gimple_seq_may_fallthru (seq
))
4217 label_cont
= create_artificial_label (UNKNOWN_LOCATION
);
4219 g
= gimple_build_goto (label_cont
);
4221 /* GIMPLE_COND's are very low level; they have embedded
4222 gotos. This particular embedded goto should not be marked
4223 with the location of the original COND_EXPR, as it would
4224 correspond to the COND_EXPR's condition, not the ELSE or the
4225 THEN arms. To avoid marking it with the wrong location, flag
4226 it as "no location". */
4227 gimple_set_do_not_emit_location (g
);
4229 gimplify_seq_add_stmt (&seq
, g
);
4233 if (!have_else_clause_p
)
4235 gimplify_seq_add_stmt (&seq
, gimple_build_label (label_false
));
4236 have_else_clause_p
= gimplify_stmt (&TREE_OPERAND (expr
, 2), &seq
);
4239 gimplify_seq_add_stmt (&seq
, gimple_build_label (label_cont
));
4241 gimple_pop_condition (pre_p
);
4242 gimple_seq_add_seq (pre_p
, seq
);
4244 if (ret
== GS_ERROR
)
4246 else if (have_then_clause_p
|| have_else_clause_p
)
4250 /* Both arms are empty; replace the COND_EXPR with its predicate. */
4251 expr
= TREE_OPERAND (expr
, 0);
4252 gimplify_stmt (&expr
, pre_p
);
4259 /* Prepare the node pointed to by EXPR_P, an is_gimple_addressable expression,
4260 to be marked addressable.
4262 We cannot rely on such an expression being directly markable if a temporary
4263 has been created by the gimplification. In this case, we create another
4264 temporary and initialize it with a copy, which will become a store after we
4265 mark it addressable. This can happen if the front-end passed us something
4266 that it could not mark addressable yet, like a Fortran pass-by-reference
4267 parameter (int) floatvar. */
4270 prepare_gimple_addressable (tree
*expr_p
, gimple_seq
*seq_p
)
4272 while (handled_component_p (*expr_p
))
4273 expr_p
= &TREE_OPERAND (*expr_p
, 0);
4274 if (is_gimple_reg (*expr_p
))
4276 /* Do not allow an SSA name as the temporary. */
4277 tree var
= get_initialized_tmp_var (*expr_p
, seq_p
, NULL
, false);
4278 DECL_GIMPLE_REG_P (var
) = 0;
4283 /* A subroutine of gimplify_modify_expr. Replace a MODIFY_EXPR with
4284 a call to __builtin_memcpy. */
4286 static enum gimplify_status
4287 gimplify_modify_expr_to_memcpy (tree
*expr_p
, tree size
, bool want_value
,
4290 tree t
, to
, to_ptr
, from
, from_ptr
;
4292 location_t loc
= EXPR_LOCATION (*expr_p
);
4294 to
= TREE_OPERAND (*expr_p
, 0);
4295 from
= TREE_OPERAND (*expr_p
, 1);
4297 /* Mark the RHS addressable. Beware that it may not be possible to do so
4298 directly if a temporary has been created by the gimplification. */
4299 prepare_gimple_addressable (&from
, seq_p
);
4301 mark_addressable (from
);
4302 from_ptr
= build_fold_addr_expr_loc (loc
, from
);
4303 gimplify_arg (&from_ptr
, seq_p
, loc
);
4305 mark_addressable (to
);
4306 to_ptr
= build_fold_addr_expr_loc (loc
, to
);
4307 gimplify_arg (&to_ptr
, seq_p
, loc
);
4309 t
= builtin_decl_implicit (BUILT_IN_MEMCPY
);
4311 gs
= gimple_build_call (t
, 3, to_ptr
, from_ptr
, size
);
4315 /* tmp = memcpy() */
4316 t
= create_tmp_var (TREE_TYPE (to_ptr
));
4317 gimple_call_set_lhs (gs
, t
);
4318 gimplify_seq_add_stmt (seq_p
, gs
);
4320 *expr_p
= build_simple_mem_ref (t
);
4324 gimplify_seq_add_stmt (seq_p
, gs
);
4329 /* A subroutine of gimplify_modify_expr. Replace a MODIFY_EXPR with
4330 a call to __builtin_memset. In this case we know that the RHS is
4331 a CONSTRUCTOR with an empty element list. */
4333 static enum gimplify_status
4334 gimplify_modify_expr_to_memset (tree
*expr_p
, tree size
, bool want_value
,
4337 tree t
, from
, to
, to_ptr
;
4339 location_t loc
= EXPR_LOCATION (*expr_p
);
4341 /* Assert our assumptions, to abort instead of producing wrong code
4342 silently if they are not met. Beware that the RHS CONSTRUCTOR might
4343 not be immediately exposed. */
4344 from
= TREE_OPERAND (*expr_p
, 1);
4345 if (TREE_CODE (from
) == WITH_SIZE_EXPR
)
4346 from
= TREE_OPERAND (from
, 0);
4348 gcc_assert (TREE_CODE (from
) == CONSTRUCTOR
4349 && vec_safe_is_empty (CONSTRUCTOR_ELTS (from
)));
4352 to
= TREE_OPERAND (*expr_p
, 0);
4354 to_ptr
= build_fold_addr_expr_loc (loc
, to
);
4355 gimplify_arg (&to_ptr
, seq_p
, loc
);
4356 t
= builtin_decl_implicit (BUILT_IN_MEMSET
);
4358 gs
= gimple_build_call (t
, 3, to_ptr
, integer_zero_node
, size
);
4362 /* tmp = memset() */
4363 t
= create_tmp_var (TREE_TYPE (to_ptr
));
4364 gimple_call_set_lhs (gs
, t
);
4365 gimplify_seq_add_stmt (seq_p
, gs
);
4367 *expr_p
= build1 (INDIRECT_REF
, TREE_TYPE (to
), t
);
4371 gimplify_seq_add_stmt (seq_p
, gs
);
4376 /* A subroutine of gimplify_init_ctor_preeval. Called via walk_tree,
4377 determine, cautiously, if a CONSTRUCTOR overlaps the lhs of an
4378 assignment. Return non-null if we detect a potential overlap. */
4380 struct gimplify_init_ctor_preeval_data
4382 /* The base decl of the lhs object. May be NULL, in which case we
4383 have to assume the lhs is indirect. */
4386 /* The alias set of the lhs object. */
4387 alias_set_type lhs_alias_set
;
4391 gimplify_init_ctor_preeval_1 (tree
*tp
, int *walk_subtrees
, void *xdata
)
4393 struct gimplify_init_ctor_preeval_data
*data
4394 = (struct gimplify_init_ctor_preeval_data
*) xdata
;
4397 /* If we find the base object, obviously we have overlap. */
4398 if (data
->lhs_base_decl
== t
)
4401 /* If the constructor component is indirect, determine if we have a
4402 potential overlap with the lhs. The only bits of information we
4403 have to go on at this point are addressability and alias sets. */
4404 if ((INDIRECT_REF_P (t
)
4405 || TREE_CODE (t
) == MEM_REF
)
4406 && (!data
->lhs_base_decl
|| TREE_ADDRESSABLE (data
->lhs_base_decl
))
4407 && alias_sets_conflict_p (data
->lhs_alias_set
, get_alias_set (t
)))
4410 /* If the constructor component is a call, determine if it can hide a
4411 potential overlap with the lhs through an INDIRECT_REF like above.
4412 ??? Ugh - this is completely broken. In fact this whole analysis
4413 doesn't look conservative. */
4414 if (TREE_CODE (t
) == CALL_EXPR
)
4416 tree type
, fntype
= TREE_TYPE (TREE_TYPE (CALL_EXPR_FN (t
)));
4418 for (type
= TYPE_ARG_TYPES (fntype
); type
; type
= TREE_CHAIN (type
))
4419 if (POINTER_TYPE_P (TREE_VALUE (type
))
4420 && (!data
->lhs_base_decl
|| TREE_ADDRESSABLE (data
->lhs_base_decl
))
4421 && alias_sets_conflict_p (data
->lhs_alias_set
,
4423 (TREE_TYPE (TREE_VALUE (type
)))))
4427 if (IS_TYPE_OR_DECL_P (t
))
4432 /* A subroutine of gimplify_init_constructor. Pre-evaluate EXPR,
4433 force values that overlap with the lhs (as described by *DATA)
4434 into temporaries. */
4437 gimplify_init_ctor_preeval (tree
*expr_p
, gimple_seq
*pre_p
, gimple_seq
*post_p
,
4438 struct gimplify_init_ctor_preeval_data
*data
)
4440 enum gimplify_status one
;
4442 /* If the value is constant, then there's nothing to pre-evaluate. */
4443 if (TREE_CONSTANT (*expr_p
))
4445 /* Ensure it does not have side effects, it might contain a reference to
4446 the object we're initializing. */
4447 gcc_assert (!TREE_SIDE_EFFECTS (*expr_p
));
4451 /* If the type has non-trivial constructors, we can't pre-evaluate. */
4452 if (TREE_ADDRESSABLE (TREE_TYPE (*expr_p
)))
4455 /* Recurse for nested constructors. */
4456 if (TREE_CODE (*expr_p
) == CONSTRUCTOR
)
4458 unsigned HOST_WIDE_INT ix
;
4459 constructor_elt
*ce
;
4460 vec
<constructor_elt
, va_gc
> *v
= CONSTRUCTOR_ELTS (*expr_p
);
4462 FOR_EACH_VEC_SAFE_ELT (v
, ix
, ce
)
4463 gimplify_init_ctor_preeval (&ce
->value
, pre_p
, post_p
, data
);
4468 /* If this is a variable sized type, we must remember the size. */
4469 maybe_with_size_expr (expr_p
);
4471 /* Gimplify the constructor element to something appropriate for the rhs
4472 of a MODIFY_EXPR. Given that we know the LHS is an aggregate, we know
4473 the gimplifier will consider this a store to memory. Doing this
4474 gimplification now means that we won't have to deal with complicated
4475 language-specific trees, nor trees like SAVE_EXPR that can induce
4476 exponential search behavior. */
4477 one
= gimplify_expr (expr_p
, pre_p
, post_p
, is_gimple_mem_rhs
, fb_rvalue
);
4478 if (one
== GS_ERROR
)
4484 /* If we gimplified to a bare decl, we can be sure that it doesn't overlap
4485 with the lhs, since "a = { .x=a }" doesn't make sense. This will
4486 always be true for all scalars, since is_gimple_mem_rhs insists on a
4487 temporary variable for them. */
4488 if (DECL_P (*expr_p
))
4491 /* If this is of variable size, we have no choice but to assume it doesn't
4492 overlap since we can't make a temporary for it. */
4493 if (TREE_CODE (TYPE_SIZE (TREE_TYPE (*expr_p
))) != INTEGER_CST
)
4496 /* Otherwise, we must search for overlap ... */
4497 if (!walk_tree (expr_p
, gimplify_init_ctor_preeval_1
, data
, NULL
))
4500 /* ... and if found, force the value into a temporary. */
4501 *expr_p
= get_formal_tmp_var (*expr_p
, pre_p
);
4504 /* A subroutine of gimplify_init_ctor_eval. Create a loop for
4505 a RANGE_EXPR in a CONSTRUCTOR for an array.
4509 object[var] = value;
4516 We increment var _after_ the loop exit check because we might otherwise
4517 fail if upper == TYPE_MAX_VALUE (type for upper).
4519 Note that we never have to deal with SAVE_EXPRs here, because this has
4520 already been taken care of for us, in gimplify_init_ctor_preeval(). */
4522 static void gimplify_init_ctor_eval (tree
, vec
<constructor_elt
, va_gc
> *,
4523 gimple_seq
*, bool);
4526 gimplify_init_ctor_eval_range (tree object
, tree lower
, tree upper
,
4527 tree value
, tree array_elt_type
,
4528 gimple_seq
*pre_p
, bool cleared
)
4530 tree loop_entry_label
, loop_exit_label
, fall_thru_label
;
4531 tree var
, var_type
, cref
, tmp
;
4533 loop_entry_label
= create_artificial_label (UNKNOWN_LOCATION
);
4534 loop_exit_label
= create_artificial_label (UNKNOWN_LOCATION
);
4535 fall_thru_label
= create_artificial_label (UNKNOWN_LOCATION
);
4537 /* Create and initialize the index variable. */
4538 var_type
= TREE_TYPE (upper
);
4539 var
= create_tmp_var (var_type
);
4540 gimplify_seq_add_stmt (pre_p
, gimple_build_assign (var
, lower
));
4542 /* Add the loop entry label. */
4543 gimplify_seq_add_stmt (pre_p
, gimple_build_label (loop_entry_label
));
4545 /* Build the reference. */
4546 cref
= build4 (ARRAY_REF
, array_elt_type
, unshare_expr (object
),
4547 var
, NULL_TREE
, NULL_TREE
);
4549 /* If we are a constructor, just call gimplify_init_ctor_eval to do
4550 the store. Otherwise just assign value to the reference. */
4552 if (TREE_CODE (value
) == CONSTRUCTOR
)
4553 /* NB we might have to call ourself recursively through
4554 gimplify_init_ctor_eval if the value is a constructor. */
4555 gimplify_init_ctor_eval (cref
, CONSTRUCTOR_ELTS (value
),
4558 gimplify_seq_add_stmt (pre_p
, gimple_build_assign (cref
, value
));
4560 /* We exit the loop when the index var is equal to the upper bound. */
4561 gimplify_seq_add_stmt (pre_p
,
4562 gimple_build_cond (EQ_EXPR
, var
, upper
,
4563 loop_exit_label
, fall_thru_label
));
4565 gimplify_seq_add_stmt (pre_p
, gimple_build_label (fall_thru_label
));
4567 /* Otherwise, increment the index var... */
4568 tmp
= build2 (PLUS_EXPR
, var_type
, var
,
4569 fold_convert (var_type
, integer_one_node
));
4570 gimplify_seq_add_stmt (pre_p
, gimple_build_assign (var
, tmp
));
4572 /* ...and jump back to the loop entry. */
4573 gimplify_seq_add_stmt (pre_p
, gimple_build_goto (loop_entry_label
));
4575 /* Add the loop exit label. */
4576 gimplify_seq_add_stmt (pre_p
, gimple_build_label (loop_exit_label
));
4579 /* Return true if FDECL is accessing a field that is zero sized. */
4582 zero_sized_field_decl (const_tree fdecl
)
4584 if (TREE_CODE (fdecl
) == FIELD_DECL
&& DECL_SIZE (fdecl
)
4585 && integer_zerop (DECL_SIZE (fdecl
)))
4590 /* Return true if TYPE is zero sized. */
4593 zero_sized_type (const_tree type
)
4595 if (AGGREGATE_TYPE_P (type
) && TYPE_SIZE (type
)
4596 && integer_zerop (TYPE_SIZE (type
)))
4601 /* A subroutine of gimplify_init_constructor. Generate individual
4602 MODIFY_EXPRs for a CONSTRUCTOR. OBJECT is the LHS against which the
4603 assignments should happen. ELTS is the CONSTRUCTOR_ELTS of the
4604 CONSTRUCTOR. CLEARED is true if the entire LHS object has been
4608 gimplify_init_ctor_eval (tree object
, vec
<constructor_elt
, va_gc
> *elts
,
4609 gimple_seq
*pre_p
, bool cleared
)
4611 tree array_elt_type
= NULL
;
4612 unsigned HOST_WIDE_INT ix
;
4613 tree purpose
, value
;
4615 if (TREE_CODE (TREE_TYPE (object
)) == ARRAY_TYPE
)
4616 array_elt_type
= TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (object
)));
4618 FOR_EACH_CONSTRUCTOR_ELT (elts
, ix
, purpose
, value
)
4622 /* NULL values are created above for gimplification errors. */
4626 if (cleared
&& initializer_zerop (value
))
4629 /* ??? Here's to hoping the front end fills in all of the indices,
4630 so we don't have to figure out what's missing ourselves. */
4631 gcc_assert (purpose
);
4633 /* Skip zero-sized fields, unless value has side-effects. This can
4634 happen with calls to functions returning a zero-sized type, which
4635 we shouldn't discard. As a number of downstream passes don't
4636 expect sets of zero-sized fields, we rely on the gimplification of
4637 the MODIFY_EXPR we make below to drop the assignment statement. */
4638 if (! TREE_SIDE_EFFECTS (value
) && zero_sized_field_decl (purpose
))
4641 /* If we have a RANGE_EXPR, we have to build a loop to assign the
4643 if (TREE_CODE (purpose
) == RANGE_EXPR
)
4645 tree lower
= TREE_OPERAND (purpose
, 0);
4646 tree upper
= TREE_OPERAND (purpose
, 1);
4648 /* If the lower bound is equal to upper, just treat it as if
4649 upper was the index. */
4650 if (simple_cst_equal (lower
, upper
))
4654 gimplify_init_ctor_eval_range (object
, lower
, upper
, value
,
4655 array_elt_type
, pre_p
, cleared
);
4662 /* Do not use bitsizetype for ARRAY_REF indices. */
4663 if (TYPE_DOMAIN (TREE_TYPE (object
)))
4665 = fold_convert (TREE_TYPE (TYPE_DOMAIN (TREE_TYPE (object
))),
4667 cref
= build4 (ARRAY_REF
, array_elt_type
, unshare_expr (object
),
4668 purpose
, NULL_TREE
, NULL_TREE
);
4672 gcc_assert (TREE_CODE (purpose
) == FIELD_DECL
);
4673 cref
= build3 (COMPONENT_REF
, TREE_TYPE (purpose
),
4674 unshare_expr (object
), purpose
, NULL_TREE
);
4677 if (TREE_CODE (value
) == CONSTRUCTOR
4678 && TREE_CODE (TREE_TYPE (value
)) != VECTOR_TYPE
)
4679 gimplify_init_ctor_eval (cref
, CONSTRUCTOR_ELTS (value
),
4683 tree init
= build2 (INIT_EXPR
, TREE_TYPE (cref
), cref
, value
);
4684 gimplify_and_add (init
, pre_p
);
4690 /* Return the appropriate RHS predicate for this LHS. */
4693 rhs_predicate_for (tree lhs
)
4695 if (is_gimple_reg (lhs
))
4696 return is_gimple_reg_rhs_or_call
;
4698 return is_gimple_mem_rhs_or_call
;
4701 /* Return the initial guess for an appropriate RHS predicate for this LHS,
4702 before the LHS has been gimplified. */
4704 static gimple_predicate
4705 initial_rhs_predicate_for (tree lhs
)
4707 if (is_gimple_reg_type (TREE_TYPE (lhs
)))
4708 return is_gimple_reg_rhs_or_call
;
4710 return is_gimple_mem_rhs_or_call
;
4713 /* Gimplify a C99 compound literal expression. This just means adding
4714 the DECL_EXPR before the current statement and using its anonymous
4717 static enum gimplify_status
4718 gimplify_compound_literal_expr (tree
*expr_p
, gimple_seq
*pre_p
,
4719 bool (*gimple_test_f
) (tree
),
4720 fallback_t fallback
)
4722 tree decl_s
= COMPOUND_LITERAL_EXPR_DECL_EXPR (*expr_p
);
4723 tree decl
= DECL_EXPR_DECL (decl_s
);
4724 tree init
= DECL_INITIAL (decl
);
4725 /* Mark the decl as addressable if the compound literal
4726 expression is addressable now, otherwise it is marked too late
4727 after we gimplify the initialization expression. */
4728 if (TREE_ADDRESSABLE (*expr_p
))
4729 TREE_ADDRESSABLE (decl
) = 1;
4730 /* Otherwise, if we don't need an lvalue and have a literal directly
4731 substitute it. Check if it matches the gimple predicate, as
4732 otherwise we'd generate a new temporary, and we can as well just
4733 use the decl we already have. */
4734 else if (!TREE_ADDRESSABLE (decl
)
4735 && !TREE_THIS_VOLATILE (decl
)
4737 && (fallback
& fb_lvalue
) == 0
4738 && gimple_test_f (init
))
4744 /* Preliminarily mark non-addressed complex variables as eligible
4745 for promotion to gimple registers. We'll transform their uses
4747 if ((TREE_CODE (TREE_TYPE (decl
)) == COMPLEX_TYPE
4748 || TREE_CODE (TREE_TYPE (decl
)) == VECTOR_TYPE
)
4749 && !TREE_THIS_VOLATILE (decl
)
4750 && !needs_to_live_in_memory (decl
))
4751 DECL_GIMPLE_REG_P (decl
) = 1;
4753 /* If the decl is not addressable, then it is being used in some
4754 expression or on the right hand side of a statement, and it can
4755 be put into a readonly data section. */
4756 if (!TREE_ADDRESSABLE (decl
) && (fallback
& fb_lvalue
) == 0)
4757 TREE_READONLY (decl
) = 1;
4759 /* This decl isn't mentioned in the enclosing block, so add it to the
4760 list of temps. FIXME it seems a bit of a kludge to say that
4761 anonymous artificial vars aren't pushed, but everything else is. */
4762 if (DECL_NAME (decl
) == NULL_TREE
&& !DECL_SEEN_IN_BIND_EXPR_P (decl
))
4763 gimple_add_tmp_var (decl
);
4765 gimplify_and_add (decl_s
, pre_p
);
4770 /* Optimize embedded COMPOUND_LITERAL_EXPRs within a CONSTRUCTOR,
4771 return a new CONSTRUCTOR if something changed. */
4774 optimize_compound_literals_in_ctor (tree orig_ctor
)
4776 tree ctor
= orig_ctor
;
4777 vec
<constructor_elt
, va_gc
> *elts
= CONSTRUCTOR_ELTS (ctor
);
4778 unsigned int idx
, num
= vec_safe_length (elts
);
4780 for (idx
= 0; idx
< num
; idx
++)
4782 tree value
= (*elts
)[idx
].value
;
4783 tree newval
= value
;
4784 if (TREE_CODE (value
) == CONSTRUCTOR
)
4785 newval
= optimize_compound_literals_in_ctor (value
);
4786 else if (TREE_CODE (value
) == COMPOUND_LITERAL_EXPR
)
4788 tree decl_s
= COMPOUND_LITERAL_EXPR_DECL_EXPR (value
);
4789 tree decl
= DECL_EXPR_DECL (decl_s
);
4790 tree init
= DECL_INITIAL (decl
);
4792 if (!TREE_ADDRESSABLE (value
)
4793 && !TREE_ADDRESSABLE (decl
)
4795 && TREE_CODE (init
) == CONSTRUCTOR
)
4796 newval
= optimize_compound_literals_in_ctor (init
);
4798 if (newval
== value
)
4801 if (ctor
== orig_ctor
)
4803 ctor
= copy_node (orig_ctor
);
4804 CONSTRUCTOR_ELTS (ctor
) = vec_safe_copy (elts
);
4805 elts
= CONSTRUCTOR_ELTS (ctor
);
4807 (*elts
)[idx
].value
= newval
;
4812 /* A subroutine of gimplify_modify_expr. Break out elements of a
4813 CONSTRUCTOR used as an initializer into separate MODIFY_EXPRs.
4815 Note that we still need to clear any elements that don't have explicit
4816 initializers, so if not all elements are initialized we keep the
4817 original MODIFY_EXPR, we just remove all of the constructor elements.
4819 If NOTIFY_TEMP_CREATION is true, do not gimplify, just return
4820 GS_ERROR if we would have to create a temporary when gimplifying
4821 this constructor. Otherwise, return GS_OK.
4823 If NOTIFY_TEMP_CREATION is false, just do the gimplification. */
4825 static enum gimplify_status
4826 gimplify_init_constructor (tree
*expr_p
, gimple_seq
*pre_p
, gimple_seq
*post_p
,
4827 bool want_value
, bool notify_temp_creation
)
4829 tree object
, ctor
, type
;
4830 enum gimplify_status ret
;
4831 vec
<constructor_elt
, va_gc
> *elts
;
4833 gcc_assert (TREE_CODE (TREE_OPERAND (*expr_p
, 1)) == CONSTRUCTOR
);
4835 if (!notify_temp_creation
)
4837 ret
= gimplify_expr (&TREE_OPERAND (*expr_p
, 0), pre_p
, post_p
,
4838 is_gimple_lvalue
, fb_lvalue
);
4839 if (ret
== GS_ERROR
)
4843 object
= TREE_OPERAND (*expr_p
, 0);
4844 ctor
= TREE_OPERAND (*expr_p
, 1)
4845 = optimize_compound_literals_in_ctor (TREE_OPERAND (*expr_p
, 1));
4846 type
= TREE_TYPE (ctor
);
4847 elts
= CONSTRUCTOR_ELTS (ctor
);
4850 switch (TREE_CODE (type
))
4854 case QUAL_UNION_TYPE
:
4857 struct gimplify_init_ctor_preeval_data preeval_data
;
4858 HOST_WIDE_INT num_ctor_elements
, num_nonzero_elements
;
4859 HOST_WIDE_INT num_unique_nonzero_elements
;
4860 bool cleared
, complete_p
, valid_const_initializer
;
4861 /* Use readonly data for initializers of this or smaller size
4862 regardless of the num_nonzero_elements / num_unique_nonzero_elements
4864 const HOST_WIDE_INT min_unique_size
= 64;
4865 /* If num_nonzero_elements / num_unique_nonzero_elements ratio
4866 is smaller than this, use readonly data. */
4867 const int unique_nonzero_ratio
= 8;
4869 /* Aggregate types must lower constructors to initialization of
4870 individual elements. The exception is that a CONSTRUCTOR node
4871 with no elements indicates zero-initialization of the whole. */
4872 if (vec_safe_is_empty (elts
))
4874 if (notify_temp_creation
)
4879 /* Fetch information about the constructor to direct later processing.
4880 We might want to make static versions of it in various cases, and
4881 can only do so if it known to be a valid constant initializer. */
4882 valid_const_initializer
4883 = categorize_ctor_elements (ctor
, &num_nonzero_elements
,
4884 &num_unique_nonzero_elements
,
4885 &num_ctor_elements
, &complete_p
);
4887 /* If a const aggregate variable is being initialized, then it
4888 should never be a lose to promote the variable to be static. */
4889 if (valid_const_initializer
4890 && num_nonzero_elements
> 1
4891 && TREE_READONLY (object
)
4893 && (flag_merge_constants
>= 2 || !TREE_ADDRESSABLE (object
))
4894 /* For ctors that have many repeated nonzero elements
4895 represented through RANGE_EXPRs, prefer initializing
4896 those through runtime loops over copies of large amounts
4897 of data from readonly data section. */
4898 && (num_unique_nonzero_elements
4899 > num_nonzero_elements
/ unique_nonzero_ratio
4900 || ((unsigned HOST_WIDE_INT
) int_size_in_bytes (type
)
4901 <= (unsigned HOST_WIDE_INT
) min_unique_size
)))
4903 if (notify_temp_creation
)
4905 DECL_INITIAL (object
) = ctor
;
4906 TREE_STATIC (object
) = 1;
4907 if (!DECL_NAME (object
))
4908 DECL_NAME (object
) = create_tmp_var_name ("C");
4909 walk_tree (&DECL_INITIAL (object
), force_labels_r
, NULL
, NULL
);
4911 /* ??? C++ doesn't automatically append a .<number> to the
4912 assembler name, and even when it does, it looks at FE private
4913 data structures to figure out what that number should be,
4914 which are not set for this variable. I suppose this is
4915 important for local statics for inline functions, which aren't
4916 "local" in the object file sense. So in order to get a unique
4917 TU-local symbol, we must invoke the lhd version now. */
4918 lhd_set_decl_assembler_name (object
);
4920 *expr_p
= NULL_TREE
;
4924 /* If there are "lots" of initialized elements, even discounting
4925 those that are not address constants (and thus *must* be
4926 computed at runtime), then partition the constructor into
4927 constant and non-constant parts. Block copy the constant
4928 parts in, then generate code for the non-constant parts. */
4929 /* TODO. There's code in cp/typeck.c to do this. */
4931 if (int_size_in_bytes (TREE_TYPE (ctor
)) < 0)
4932 /* store_constructor will ignore the clearing of variable-sized
4933 objects. Initializers for such objects must explicitly set
4934 every field that needs to be set. */
4936 else if (!complete_p
)
4937 /* If the constructor isn't complete, clear the whole object
4938 beforehand, unless CONSTRUCTOR_NO_CLEARING is set on it.
4940 ??? This ought not to be needed. For any element not present
4941 in the initializer, we should simply set them to zero. Except
4942 we'd need to *find* the elements that are not present, and that
4943 requires trickery to avoid quadratic compile-time behavior in
4944 large cases or excessive memory use in small cases. */
4945 cleared
= !CONSTRUCTOR_NO_CLEARING (ctor
);
4946 else if (num_ctor_elements
- num_nonzero_elements
4947 > CLEAR_RATIO (optimize_function_for_speed_p (cfun
))
4948 && num_nonzero_elements
< num_ctor_elements
/ 4)
4949 /* If there are "lots" of zeros, it's more efficient to clear
4950 the memory and then set the nonzero elements. */
4955 /* If there are "lots" of initialized elements, and all of them
4956 are valid address constants, then the entire initializer can
4957 be dropped to memory, and then memcpy'd out. Don't do this
4958 for sparse arrays, though, as it's more efficient to follow
4959 the standard CONSTRUCTOR behavior of memset followed by
4960 individual element initialization. Also don't do this for small
4961 all-zero initializers (which aren't big enough to merit
4962 clearing), and don't try to make bitwise copies of
4963 TREE_ADDRESSABLE types. */
4965 if (valid_const_initializer
4966 && !(cleared
|| num_nonzero_elements
== 0)
4967 && !TREE_ADDRESSABLE (type
))
4969 HOST_WIDE_INT size
= int_size_in_bytes (type
);
4972 /* ??? We can still get unbounded array types, at least
4973 from the C++ front end. This seems wrong, but attempt
4974 to work around it for now. */
4977 size
= int_size_in_bytes (TREE_TYPE (object
));
4979 TREE_TYPE (ctor
) = type
= TREE_TYPE (object
);
4982 /* Find the maximum alignment we can assume for the object. */
4983 /* ??? Make use of DECL_OFFSET_ALIGN. */
4984 if (DECL_P (object
))
4985 align
= DECL_ALIGN (object
);
4987 align
= TYPE_ALIGN (type
);
4989 /* Do a block move either if the size is so small as to make
4990 each individual move a sub-unit move on average, or if it
4991 is so large as to make individual moves inefficient. */
4993 && num_nonzero_elements
> 1
4994 /* For ctors that have many repeated nonzero elements
4995 represented through RANGE_EXPRs, prefer initializing
4996 those through runtime loops over copies of large amounts
4997 of data from readonly data section. */
4998 && (num_unique_nonzero_elements
4999 > num_nonzero_elements
/ unique_nonzero_ratio
5000 || size
<= min_unique_size
)
5001 && (size
< num_nonzero_elements
5002 || !can_move_by_pieces (size
, align
)))
5004 if (notify_temp_creation
)
5007 walk_tree (&ctor
, force_labels_r
, NULL
, NULL
);
5008 ctor
= tree_output_constant_def (ctor
);
5009 if (!useless_type_conversion_p (type
, TREE_TYPE (ctor
)))
5010 ctor
= build1 (VIEW_CONVERT_EXPR
, type
, ctor
);
5011 TREE_OPERAND (*expr_p
, 1) = ctor
;
5013 /* This is no longer an assignment of a CONSTRUCTOR, but
5014 we still may have processing to do on the LHS. So
5015 pretend we didn't do anything here to let that happen. */
5016 return GS_UNHANDLED
;
5020 /* If the target is volatile, we have non-zero elements and more than
5021 one field to assign, initialize the target from a temporary. */
5022 if (TREE_THIS_VOLATILE (object
)
5023 && !TREE_ADDRESSABLE (type
)
5024 && (num_nonzero_elements
> 0 || !cleared
)
5025 && vec_safe_length (elts
) > 1)
5027 tree temp
= create_tmp_var (TYPE_MAIN_VARIANT (type
));
5028 TREE_OPERAND (*expr_p
, 0) = temp
;
5029 *expr_p
= build2 (COMPOUND_EXPR
, TREE_TYPE (*expr_p
),
5031 build2 (MODIFY_EXPR
, void_type_node
,
5036 if (notify_temp_creation
)
5039 /* If there are nonzero elements and if needed, pre-evaluate to capture
5040 elements overlapping with the lhs into temporaries. We must do this
5041 before clearing to fetch the values before they are zeroed-out. */
5042 if (num_nonzero_elements
> 0 && TREE_CODE (*expr_p
) != INIT_EXPR
)
5044 preeval_data
.lhs_base_decl
= get_base_address (object
);
5045 if (!DECL_P (preeval_data
.lhs_base_decl
))
5046 preeval_data
.lhs_base_decl
= NULL
;
5047 preeval_data
.lhs_alias_set
= get_alias_set (object
);
5049 gimplify_init_ctor_preeval (&TREE_OPERAND (*expr_p
, 1),
5050 pre_p
, post_p
, &preeval_data
);
5053 bool ctor_has_side_effects_p
5054 = TREE_SIDE_EFFECTS (TREE_OPERAND (*expr_p
, 1));
5058 /* Zap the CONSTRUCTOR element list, which simplifies this case.
5059 Note that we still have to gimplify, in order to handle the
5060 case of variable sized types. Avoid shared tree structures. */
5061 CONSTRUCTOR_ELTS (ctor
) = NULL
;
5062 TREE_SIDE_EFFECTS (ctor
) = 0;
5063 object
= unshare_expr (object
);
5064 gimplify_stmt (expr_p
, pre_p
);
5067 /* If we have not block cleared the object, or if there are nonzero
5068 elements in the constructor, or if the constructor has side effects,
5069 add assignments to the individual scalar fields of the object. */
5071 || num_nonzero_elements
> 0
5072 || ctor_has_side_effects_p
)
5073 gimplify_init_ctor_eval (object
, elts
, pre_p
, cleared
);
5075 *expr_p
= NULL_TREE
;
5083 if (notify_temp_creation
)
5086 /* Extract the real and imaginary parts out of the ctor. */
5087 gcc_assert (elts
->length () == 2);
5088 r
= (*elts
)[0].value
;
5089 i
= (*elts
)[1].value
;
5090 if (r
== NULL
|| i
== NULL
)
5092 tree zero
= build_zero_cst (TREE_TYPE (type
));
5099 /* Complex types have either COMPLEX_CST or COMPLEX_EXPR to
5100 represent creation of a complex value. */
5101 if (TREE_CONSTANT (r
) && TREE_CONSTANT (i
))
5103 ctor
= build_complex (type
, r
, i
);
5104 TREE_OPERAND (*expr_p
, 1) = ctor
;
5108 ctor
= build2 (COMPLEX_EXPR
, type
, r
, i
);
5109 TREE_OPERAND (*expr_p
, 1) = ctor
;
5110 ret
= gimplify_expr (&TREE_OPERAND (*expr_p
, 1),
5113 rhs_predicate_for (TREE_OPERAND (*expr_p
, 0)),
5121 unsigned HOST_WIDE_INT ix
;
5122 constructor_elt
*ce
;
5124 if (notify_temp_creation
)
5127 /* Go ahead and simplify constant constructors to VECTOR_CST. */
5128 if (TREE_CONSTANT (ctor
))
5130 bool constant_p
= true;
5133 /* Even when ctor is constant, it might contain non-*_CST
5134 elements, such as addresses or trapping values like
5135 1.0/0.0 - 1.0/0.0. Such expressions don't belong
5136 in VECTOR_CST nodes. */
5137 FOR_EACH_CONSTRUCTOR_VALUE (elts
, ix
, value
)
5138 if (!CONSTANT_CLASS_P (value
))
5146 TREE_OPERAND (*expr_p
, 1) = build_vector_from_ctor (type
, elts
);
5150 TREE_CONSTANT (ctor
) = 0;
5153 /* Vector types use CONSTRUCTOR all the way through gimple
5154 compilation as a general initializer. */
5155 FOR_EACH_VEC_SAFE_ELT (elts
, ix
, ce
)
5157 enum gimplify_status tret
;
5158 tret
= gimplify_expr (&ce
->value
, pre_p
, post_p
, is_gimple_val
,
5160 if (tret
== GS_ERROR
)
5162 else if (TREE_STATIC (ctor
)
5163 && !initializer_constant_valid_p (ce
->value
,
5164 TREE_TYPE (ce
->value
)))
5165 TREE_STATIC (ctor
) = 0;
5167 if (!is_gimple_reg (TREE_OPERAND (*expr_p
, 0)))
5168 TREE_OPERAND (*expr_p
, 1) = get_formal_tmp_var (ctor
, pre_p
);
5173 /* So how did we get a CONSTRUCTOR for a scalar type? */
5177 if (ret
== GS_ERROR
)
5179 /* If we have gimplified both sides of the initializer but have
5180 not emitted an assignment, do so now. */
5183 tree lhs
= TREE_OPERAND (*expr_p
, 0);
5184 tree rhs
= TREE_OPERAND (*expr_p
, 1);
5185 if (want_value
&& object
== lhs
)
5186 lhs
= unshare_expr (lhs
);
5187 gassign
*init
= gimple_build_assign (lhs
, rhs
);
5188 gimplify_seq_add_stmt (pre_p
, init
);
5202 /* Given a pointer value OP0, return a simplified version of an
5203 indirection through OP0, or NULL_TREE if no simplification is
5204 possible. This may only be applied to a rhs of an expression.
5205 Note that the resulting type may be different from the type pointed
5206 to in the sense that it is still compatible from the langhooks
5210 gimple_fold_indirect_ref_rhs (tree t
)
5212 return gimple_fold_indirect_ref (t
);
5215 /* Subroutine of gimplify_modify_expr to do simplifications of
5216 MODIFY_EXPRs based on the code of the RHS. We loop for as long as
5217 something changes. */
5219 static enum gimplify_status
5220 gimplify_modify_expr_rhs (tree
*expr_p
, tree
*from_p
, tree
*to_p
,
5221 gimple_seq
*pre_p
, gimple_seq
*post_p
,
5224 enum gimplify_status ret
= GS_UNHANDLED
;
5230 switch (TREE_CODE (*from_p
))
5233 /* If we're assigning from a read-only variable initialized with
5234 a constructor, do the direct assignment from the constructor,
5235 but only if neither source nor target are volatile since this
5236 latter assignment might end up being done on a per-field basis. */
5237 if (DECL_INITIAL (*from_p
)
5238 && TREE_READONLY (*from_p
)
5239 && !TREE_THIS_VOLATILE (*from_p
)
5240 && !TREE_THIS_VOLATILE (*to_p
)
5241 && TREE_CODE (DECL_INITIAL (*from_p
)) == CONSTRUCTOR
)
5243 tree old_from
= *from_p
;
5244 enum gimplify_status subret
;
5246 /* Move the constructor into the RHS. */
5247 *from_p
= unshare_expr (DECL_INITIAL (*from_p
));
5249 /* Let's see if gimplify_init_constructor will need to put
5251 subret
= gimplify_init_constructor (expr_p
, NULL
, NULL
,
5253 if (subret
== GS_ERROR
)
5255 /* If so, revert the change. */
5267 /* If we have code like
5271 where the type of "x" is a (possibly cv-qualified variant
5272 of "A"), treat the entire expression as identical to "x".
5273 This kind of code arises in C++ when an object is bound
5274 to a const reference, and if "x" is a TARGET_EXPR we want
5275 to take advantage of the optimization below. */
5276 bool volatile_p
= TREE_THIS_VOLATILE (*from_p
);
5277 tree t
= gimple_fold_indirect_ref_rhs (TREE_OPERAND (*from_p
, 0));
5280 if (TREE_THIS_VOLATILE (t
) != volatile_p
)
5283 t
= build_simple_mem_ref_loc (EXPR_LOCATION (*from_p
),
5284 build_fold_addr_expr (t
));
5285 if (REFERENCE_CLASS_P (t
))
5286 TREE_THIS_VOLATILE (t
) = volatile_p
;
5297 /* If we are initializing something from a TARGET_EXPR, strip the
5298 TARGET_EXPR and initialize it directly, if possible. This can't
5299 be done if the initializer is void, since that implies that the
5300 temporary is set in some non-trivial way.
5302 ??? What about code that pulls out the temp and uses it
5303 elsewhere? I think that such code never uses the TARGET_EXPR as
5304 an initializer. If I'm wrong, we'll die because the temp won't
5305 have any RTL. In that case, I guess we'll need to replace
5306 references somehow. */
5307 tree init
= TARGET_EXPR_INITIAL (*from_p
);
5310 && (TREE_CODE (*expr_p
) != MODIFY_EXPR
5311 || !TARGET_EXPR_NO_ELIDE (*from_p
))
5312 && !VOID_TYPE_P (TREE_TYPE (init
)))
5322 /* Remove any COMPOUND_EXPR in the RHS so the following cases will be
5324 gimplify_compound_expr (from_p
, pre_p
, true);
5330 /* If we already made some changes, let the front end have a
5331 crack at this before we break it down. */
5332 if (ret
!= GS_UNHANDLED
)
5334 /* If we're initializing from a CONSTRUCTOR, break this into
5335 individual MODIFY_EXPRs. */
5336 return gimplify_init_constructor (expr_p
, pre_p
, post_p
, want_value
,
5340 /* If we're assigning to a non-register type, push the assignment
5341 down into the branches. This is mandatory for ADDRESSABLE types,
5342 since we cannot generate temporaries for such, but it saves a
5343 copy in other cases as well. */
5344 if (!is_gimple_reg_type (TREE_TYPE (*from_p
)))
5346 /* This code should mirror the code in gimplify_cond_expr. */
5347 enum tree_code code
= TREE_CODE (*expr_p
);
5348 tree cond
= *from_p
;
5349 tree result
= *to_p
;
5351 ret
= gimplify_expr (&result
, pre_p
, post_p
,
5352 is_gimple_lvalue
, fb_lvalue
);
5353 if (ret
!= GS_ERROR
)
5356 /* If we are going to write RESULT more than once, clear
5357 TREE_READONLY flag, otherwise we might incorrectly promote
5358 the variable to static const and initialize it at compile
5359 time in one of the branches. */
5361 && TREE_TYPE (TREE_OPERAND (cond
, 1)) != void_type_node
5362 && TREE_TYPE (TREE_OPERAND (cond
, 2)) != void_type_node
)
5363 TREE_READONLY (result
) = 0;
5364 if (TREE_TYPE (TREE_OPERAND (cond
, 1)) != void_type_node
)
5365 TREE_OPERAND (cond
, 1)
5366 = build2 (code
, void_type_node
, result
,
5367 TREE_OPERAND (cond
, 1));
5368 if (TREE_TYPE (TREE_OPERAND (cond
, 2)) != void_type_node
)
5369 TREE_OPERAND (cond
, 2)
5370 = build2 (code
, void_type_node
, unshare_expr (result
),
5371 TREE_OPERAND (cond
, 2));
5373 TREE_TYPE (cond
) = void_type_node
;
5374 recalculate_side_effects (cond
);
5378 gimplify_and_add (cond
, pre_p
);
5379 *expr_p
= unshare_expr (result
);
5388 /* For calls that return in memory, give *to_p as the CALL_EXPR's
5389 return slot so that we don't generate a temporary. */
5390 if (!CALL_EXPR_RETURN_SLOT_OPT (*from_p
)
5391 && aggregate_value_p (*from_p
, *from_p
))
5395 if (!(rhs_predicate_for (*to_p
))(*from_p
))
5396 /* If we need a temporary, *to_p isn't accurate. */
5398 /* It's OK to use the return slot directly unless it's an NRV. */
5399 else if (TREE_CODE (*to_p
) == RESULT_DECL
5400 && DECL_NAME (*to_p
) == NULL_TREE
5401 && needs_to_live_in_memory (*to_p
))
5403 else if (is_gimple_reg_type (TREE_TYPE (*to_p
))
5404 || (DECL_P (*to_p
) && DECL_REGISTER (*to_p
)))
5405 /* Don't force regs into memory. */
5407 else if (TREE_CODE (*expr_p
) == INIT_EXPR
)
5408 /* It's OK to use the target directly if it's being
5411 else if (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (*to_p
)))
5413 /* Always use the target and thus RSO for variable-sized types.
5414 GIMPLE cannot deal with a variable-sized assignment
5415 embedded in a call statement. */
5417 else if (TREE_CODE (*to_p
) != SSA_NAME
5418 && (!is_gimple_variable (*to_p
)
5419 || needs_to_live_in_memory (*to_p
)))
5420 /* Don't use the original target if it's already addressable;
5421 if its address escapes, and the called function uses the
5422 NRV optimization, a conforming program could see *to_p
5423 change before the called function returns; see c++/19317.
5424 When optimizing, the return_slot pass marks more functions
5425 as safe after we have escape info. */
5432 CALL_EXPR_RETURN_SLOT_OPT (*from_p
) = 1;
5433 mark_addressable (*to_p
);
5438 case WITH_SIZE_EXPR
:
5439 /* Likewise for calls that return an aggregate of non-constant size,
5440 since we would not be able to generate a temporary at all. */
5441 if (TREE_CODE (TREE_OPERAND (*from_p
, 0)) == CALL_EXPR
)
5443 *from_p
= TREE_OPERAND (*from_p
, 0);
5444 /* We don't change ret in this case because the
5445 WITH_SIZE_EXPR might have been added in
5446 gimplify_modify_expr, so returning GS_OK would lead to an
5452 /* If we're initializing from a container, push the initialization
5454 case CLEANUP_POINT_EXPR
:
5456 case STATEMENT_LIST
:
5458 tree wrap
= *from_p
;
5461 ret
= gimplify_expr (to_p
, pre_p
, post_p
, is_gimple_min_lval
,
5463 if (ret
!= GS_ERROR
)
5466 t
= voidify_wrapper_expr (wrap
, *expr_p
);
5467 gcc_assert (t
== *expr_p
);
5471 gimplify_and_add (wrap
, pre_p
);
5472 *expr_p
= unshare_expr (*to_p
);
5479 case COMPOUND_LITERAL_EXPR
:
5481 tree complit
= TREE_OPERAND (*expr_p
, 1);
5482 tree decl_s
= COMPOUND_LITERAL_EXPR_DECL_EXPR (complit
);
5483 tree decl
= DECL_EXPR_DECL (decl_s
);
5484 tree init
= DECL_INITIAL (decl
);
5486 /* struct T x = (struct T) { 0, 1, 2 } can be optimized
5487 into struct T x = { 0, 1, 2 } if the address of the
5488 compound literal has never been taken. */
5489 if (!TREE_ADDRESSABLE (complit
)
5490 && !TREE_ADDRESSABLE (decl
)
5493 *expr_p
= copy_node (*expr_p
);
5494 TREE_OPERAND (*expr_p
, 1) = init
;
5509 /* Return true if T looks like a valid GIMPLE statement. */
5512 is_gimple_stmt (tree t
)
5514 const enum tree_code code
= TREE_CODE (t
);
5519 /* The only valid NOP_EXPR is the empty statement. */
5520 return IS_EMPTY_STMT (t
);
5524 /* These are only valid if they're void. */
5525 return TREE_TYPE (t
) == NULL
|| VOID_TYPE_P (TREE_TYPE (t
));
5531 case CASE_LABEL_EXPR
:
5532 case TRY_CATCH_EXPR
:
5533 case TRY_FINALLY_EXPR
:
5534 case EH_FILTER_EXPR
:
5537 case STATEMENT_LIST
:
5541 case OACC_HOST_DATA
:
5544 case OACC_ENTER_DATA
:
5545 case OACC_EXIT_DATA
:
5550 case OMP_DISTRIBUTE
:
5563 case OMP_TARGET_DATA
:
5564 case OMP_TARGET_UPDATE
:
5565 case OMP_TARGET_ENTER_DATA
:
5566 case OMP_TARGET_EXIT_DATA
:
5569 /* These are always void. */
5575 /* These are valid regardless of their type. */
5584 /* Promote partial stores to COMPLEX variables to total stores. *EXPR_P is
5585 a MODIFY_EXPR with a lhs of a REAL/IMAGPART_EXPR of a variable with
5586 DECL_GIMPLE_REG_P set.
5588 IMPORTANT NOTE: This promotion is performed by introducing a load of the
5589 other, unmodified part of the complex object just before the total store.
5590 As a consequence, if the object is still uninitialized, an undefined value
5591 will be loaded into a register, which may result in a spurious exception
5592 if the register is floating-point and the value happens to be a signaling
5593 NaN for example. Then the fully-fledged complex operations lowering pass
5594 followed by a DCE pass are necessary in order to fix things up. */
5596 static enum gimplify_status
5597 gimplify_modify_expr_complex_part (tree
*expr_p
, gimple_seq
*pre_p
,
5600 enum tree_code code
, ocode
;
5601 tree lhs
, rhs
, new_rhs
, other
, realpart
, imagpart
;
5603 lhs
= TREE_OPERAND (*expr_p
, 0);
5604 rhs
= TREE_OPERAND (*expr_p
, 1);
5605 code
= TREE_CODE (lhs
);
5606 lhs
= TREE_OPERAND (lhs
, 0);
5608 ocode
= code
== REALPART_EXPR
? IMAGPART_EXPR
: REALPART_EXPR
;
5609 other
= build1 (ocode
, TREE_TYPE (rhs
), lhs
);
5610 TREE_NO_WARNING (other
) = 1;
5611 other
= get_formal_tmp_var (other
, pre_p
);
5613 realpart
= code
== REALPART_EXPR
? rhs
: other
;
5614 imagpart
= code
== REALPART_EXPR
? other
: rhs
;
5616 if (TREE_CONSTANT (realpart
) && TREE_CONSTANT (imagpart
))
5617 new_rhs
= build_complex (TREE_TYPE (lhs
), realpart
, imagpart
);
5619 new_rhs
= build2 (COMPLEX_EXPR
, TREE_TYPE (lhs
), realpart
, imagpart
);
5621 gimplify_seq_add_stmt (pre_p
, gimple_build_assign (lhs
, new_rhs
));
5622 *expr_p
= (want_value
) ? rhs
: NULL_TREE
;
5627 /* Gimplify the MODIFY_EXPR node pointed to by EXPR_P.
5633 PRE_P points to the list where side effects that must happen before
5634 *EXPR_P should be stored.
5636 POST_P points to the list where side effects that must happen after
5637 *EXPR_P should be stored.
5639 WANT_VALUE is nonzero iff we want to use the value of this expression
5640 in another expression. */
5642 static enum gimplify_status
5643 gimplify_modify_expr (tree
*expr_p
, gimple_seq
*pre_p
, gimple_seq
*post_p
,
5646 tree
*from_p
= &TREE_OPERAND (*expr_p
, 1);
5647 tree
*to_p
= &TREE_OPERAND (*expr_p
, 0);
5648 enum gimplify_status ret
= GS_UNHANDLED
;
5650 location_t loc
= EXPR_LOCATION (*expr_p
);
5651 gimple_stmt_iterator gsi
;
5653 gcc_assert (TREE_CODE (*expr_p
) == MODIFY_EXPR
5654 || TREE_CODE (*expr_p
) == INIT_EXPR
);
5656 /* Trying to simplify a clobber using normal logic doesn't work,
5657 so handle it here. */
5658 if (TREE_CLOBBER_P (*from_p
))
5660 ret
= gimplify_expr (to_p
, pre_p
, post_p
, is_gimple_lvalue
, fb_lvalue
);
5661 if (ret
== GS_ERROR
)
5663 gcc_assert (!want_value
);
5664 if (!VAR_P (*to_p
) && TREE_CODE (*to_p
) != MEM_REF
)
5666 tree addr
= get_initialized_tmp_var (build_fold_addr_expr (*to_p
),
5668 *to_p
= build_simple_mem_ref_loc (EXPR_LOCATION (*to_p
), addr
);
5670 gimplify_seq_add_stmt (pre_p
, gimple_build_assign (*to_p
, *from_p
));
5675 /* Insert pointer conversions required by the middle-end that are not
5676 required by the frontend. This fixes middle-end type checking for
5677 for example gcc.dg/redecl-6.c. */
5678 if (POINTER_TYPE_P (TREE_TYPE (*to_p
)))
5680 STRIP_USELESS_TYPE_CONVERSION (*from_p
);
5681 if (!useless_type_conversion_p (TREE_TYPE (*to_p
), TREE_TYPE (*from_p
)))
5682 *from_p
= fold_convert_loc (loc
, TREE_TYPE (*to_p
), *from_p
);
5685 /* See if any simplifications can be done based on what the RHS is. */
5686 ret
= gimplify_modify_expr_rhs (expr_p
, from_p
, to_p
, pre_p
, post_p
,
5688 if (ret
!= GS_UNHANDLED
)
5691 /* For zero sized types only gimplify the left hand side and right hand
5692 side as statements and throw away the assignment. Do this after
5693 gimplify_modify_expr_rhs so we handle TARGET_EXPRs of addressable
5695 if (zero_sized_type (TREE_TYPE (*from_p
))
5697 /* Don't do this for calls that return addressable types, expand_call
5698 relies on those having a lhs. */
5699 && !(TREE_ADDRESSABLE (TREE_TYPE (*from_p
))
5700 && TREE_CODE (*from_p
) == CALL_EXPR
))
5702 gimplify_stmt (from_p
, pre_p
);
5703 gimplify_stmt (to_p
, pre_p
);
5704 *expr_p
= NULL_TREE
;
5708 /* If the value being copied is of variable width, compute the length
5709 of the copy into a WITH_SIZE_EXPR. Note that we need to do this
5710 before gimplifying any of the operands so that we can resolve any
5711 PLACEHOLDER_EXPRs in the size. Also note that the RTL expander uses
5712 the size of the expression to be copied, not of the destination, so
5713 that is what we must do here. */
5714 maybe_with_size_expr (from_p
);
5716 /* As a special case, we have to temporarily allow for assignments
5717 with a CALL_EXPR on the RHS. Since in GIMPLE a function call is
5718 a toplevel statement, when gimplifying the GENERIC expression
5719 MODIFY_EXPR <a, CALL_EXPR <foo>>, we cannot create the tuple
5720 GIMPLE_ASSIGN <a, GIMPLE_CALL <foo>>.
5722 Instead, we need to create the tuple GIMPLE_CALL <a, foo>. To
5723 prevent gimplify_expr from trying to create a new temporary for
5724 foo's LHS, we tell it that it should only gimplify until it
5725 reaches the CALL_EXPR. On return from gimplify_expr, the newly
5726 created GIMPLE_CALL <foo> will be the last statement in *PRE_P
5727 and all we need to do here is set 'a' to be its LHS. */
5729 /* Gimplify the RHS first for C++17 and bug 71104. */
5730 gimple_predicate initial_pred
= initial_rhs_predicate_for (*to_p
);
5731 ret
= gimplify_expr (from_p
, pre_p
, post_p
, initial_pred
, fb_rvalue
);
5732 if (ret
== GS_ERROR
)
5735 /* Then gimplify the LHS. */
5736 /* If we gimplified the RHS to a CALL_EXPR and that call may return
5737 twice we have to make sure to gimplify into non-SSA as otherwise
5738 the abnormal edge added later will make those defs not dominate
5740 ??? Technically this applies only to the registers used in the
5741 resulting non-register *TO_P. */
5742 bool saved_into_ssa
= gimplify_ctxp
->into_ssa
;
5744 && TREE_CODE (*from_p
) == CALL_EXPR
5745 && call_expr_flags (*from_p
) & ECF_RETURNS_TWICE
)
5746 gimplify_ctxp
->into_ssa
= false;
5747 ret
= gimplify_expr (to_p
, pre_p
, post_p
, is_gimple_lvalue
, fb_lvalue
);
5748 gimplify_ctxp
->into_ssa
= saved_into_ssa
;
5749 if (ret
== GS_ERROR
)
5752 /* Now that the LHS is gimplified, re-gimplify the RHS if our initial
5753 guess for the predicate was wrong. */
5754 gimple_predicate final_pred
= rhs_predicate_for (*to_p
);
5755 if (final_pred
!= initial_pred
)
5757 ret
= gimplify_expr (from_p
, pre_p
, post_p
, final_pred
, fb_rvalue
);
5758 if (ret
== GS_ERROR
)
5762 /* In case of va_arg internal fn wrappped in a WITH_SIZE_EXPR, add the type
5763 size as argument to the call. */
5764 if (TREE_CODE (*from_p
) == WITH_SIZE_EXPR
)
5766 tree call
= TREE_OPERAND (*from_p
, 0);
5767 tree vlasize
= TREE_OPERAND (*from_p
, 1);
5769 if (TREE_CODE (call
) == CALL_EXPR
5770 && CALL_EXPR_IFN (call
) == IFN_VA_ARG
)
5772 int nargs
= call_expr_nargs (call
);
5773 tree type
= TREE_TYPE (call
);
5774 tree ap
= CALL_EXPR_ARG (call
, 0);
5775 tree tag
= CALL_EXPR_ARG (call
, 1);
5776 tree aptag
= CALL_EXPR_ARG (call
, 2);
5777 tree newcall
= build_call_expr_internal_loc (EXPR_LOCATION (call
),
5781 TREE_OPERAND (*from_p
, 0) = newcall
;
5785 /* Now see if the above changed *from_p to something we handle specially. */
5786 ret
= gimplify_modify_expr_rhs (expr_p
, from_p
, to_p
, pre_p
, post_p
,
5788 if (ret
!= GS_UNHANDLED
)
5791 /* If we've got a variable sized assignment between two lvalues (i.e. does
5792 not involve a call), then we can make things a bit more straightforward
5793 by converting the assignment to memcpy or memset. */
5794 if (TREE_CODE (*from_p
) == WITH_SIZE_EXPR
)
5796 tree from
= TREE_OPERAND (*from_p
, 0);
5797 tree size
= TREE_OPERAND (*from_p
, 1);
5799 if (TREE_CODE (from
) == CONSTRUCTOR
)
5800 return gimplify_modify_expr_to_memset (expr_p
, size
, want_value
, pre_p
);
5802 if (is_gimple_addressable (from
))
5805 return gimplify_modify_expr_to_memcpy (expr_p
, size
, want_value
,
5810 /* Transform partial stores to non-addressable complex variables into
5811 total stores. This allows us to use real instead of virtual operands
5812 for these variables, which improves optimization. */
5813 if ((TREE_CODE (*to_p
) == REALPART_EXPR
5814 || TREE_CODE (*to_p
) == IMAGPART_EXPR
)
5815 && is_gimple_reg (TREE_OPERAND (*to_p
, 0)))
5816 return gimplify_modify_expr_complex_part (expr_p
, pre_p
, want_value
);
5818 /* Try to alleviate the effects of the gimplification creating artificial
5819 temporaries (see for example is_gimple_reg_rhs) on the debug info, but
5820 make sure not to create DECL_DEBUG_EXPR links across functions. */
5821 if (!gimplify_ctxp
->into_ssa
5823 && DECL_IGNORED_P (*from_p
)
5825 && !DECL_IGNORED_P (*to_p
)
5826 && decl_function_context (*to_p
) == current_function_decl
5827 && decl_function_context (*from_p
) == current_function_decl
)
5829 if (!DECL_NAME (*from_p
) && DECL_NAME (*to_p
))
5831 = create_tmp_var_name (IDENTIFIER_POINTER (DECL_NAME (*to_p
)));
5832 DECL_HAS_DEBUG_EXPR_P (*from_p
) = 1;
5833 SET_DECL_DEBUG_EXPR (*from_p
, *to_p
);
5836 if (want_value
&& TREE_THIS_VOLATILE (*to_p
))
5837 *from_p
= get_initialized_tmp_var (*from_p
, pre_p
, post_p
);
5839 if (TREE_CODE (*from_p
) == CALL_EXPR
)
5841 /* Since the RHS is a CALL_EXPR, we need to create a GIMPLE_CALL
5842 instead of a GIMPLE_ASSIGN. */
5844 if (CALL_EXPR_FN (*from_p
) == NULL_TREE
)
5846 /* Gimplify internal functions created in the FEs. */
5847 int nargs
= call_expr_nargs (*from_p
), i
;
5848 enum internal_fn ifn
= CALL_EXPR_IFN (*from_p
);
5849 auto_vec
<tree
> vargs (nargs
);
5851 for (i
= 0; i
< nargs
; i
++)
5853 gimplify_arg (&CALL_EXPR_ARG (*from_p
, i
), pre_p
,
5854 EXPR_LOCATION (*from_p
));
5855 vargs
.quick_push (CALL_EXPR_ARG (*from_p
, i
));
5857 call_stmt
= gimple_build_call_internal_vec (ifn
, vargs
);
5858 gimple_call_set_nothrow (call_stmt
, TREE_NOTHROW (*from_p
));
5859 gimple_set_location (call_stmt
, EXPR_LOCATION (*expr_p
));
5863 tree fnptrtype
= TREE_TYPE (CALL_EXPR_FN (*from_p
));
5864 CALL_EXPR_FN (*from_p
) = TREE_OPERAND (CALL_EXPR_FN (*from_p
), 0);
5865 STRIP_USELESS_TYPE_CONVERSION (CALL_EXPR_FN (*from_p
));
5866 tree fndecl
= get_callee_fndecl (*from_p
);
5868 && fndecl_built_in_p (fndecl
, BUILT_IN_EXPECT
)
5869 && call_expr_nargs (*from_p
) == 3)
5870 call_stmt
= gimple_build_call_internal (IFN_BUILTIN_EXPECT
, 3,
5871 CALL_EXPR_ARG (*from_p
, 0),
5872 CALL_EXPR_ARG (*from_p
, 1),
5873 CALL_EXPR_ARG (*from_p
, 2));
5876 call_stmt
= gimple_build_call_from_tree (*from_p
, fnptrtype
);
5879 notice_special_calls (call_stmt
);
5880 if (!gimple_call_noreturn_p (call_stmt
) || !should_remove_lhs_p (*to_p
))
5881 gimple_call_set_lhs (call_stmt
, *to_p
);
5882 else if (TREE_CODE (*to_p
) == SSA_NAME
)
5883 /* The above is somewhat premature, avoid ICEing later for a
5884 SSA name w/o a definition. We may have uses in the GIMPLE IL.
5885 ??? This doesn't make it a default-def. */
5886 SSA_NAME_DEF_STMT (*to_p
) = gimple_build_nop ();
5892 assign
= gimple_build_assign (*to_p
, *from_p
);
5893 gimple_set_location (assign
, EXPR_LOCATION (*expr_p
));
5894 if (COMPARISON_CLASS_P (*from_p
))
5895 gimple_set_no_warning (assign
, TREE_NO_WARNING (*from_p
));
5898 if (gimplify_ctxp
->into_ssa
&& is_gimple_reg (*to_p
))
5900 /* We should have got an SSA name from the start. */
5901 gcc_assert (TREE_CODE (*to_p
) == SSA_NAME
5902 || ! gimple_in_ssa_p (cfun
));
5905 gimplify_seq_add_stmt (pre_p
, assign
);
5906 gsi
= gsi_last (*pre_p
);
5907 maybe_fold_stmt (&gsi
);
5911 *expr_p
= TREE_THIS_VOLATILE (*to_p
) ? *from_p
: unshare_expr (*to_p
);
5920 /* Gimplify a comparison between two variable-sized objects. Do this
5921 with a call to BUILT_IN_MEMCMP. */
5923 static enum gimplify_status
5924 gimplify_variable_sized_compare (tree
*expr_p
)
5926 location_t loc
= EXPR_LOCATION (*expr_p
);
5927 tree op0
= TREE_OPERAND (*expr_p
, 0);
5928 tree op1
= TREE_OPERAND (*expr_p
, 1);
5929 tree t
, arg
, dest
, src
, expr
;
5931 arg
= TYPE_SIZE_UNIT (TREE_TYPE (op0
));
5932 arg
= unshare_expr (arg
);
5933 arg
= SUBSTITUTE_PLACEHOLDER_IN_EXPR (arg
, op0
);
5934 src
= build_fold_addr_expr_loc (loc
, op1
);
5935 dest
= build_fold_addr_expr_loc (loc
, op0
);
5936 t
= builtin_decl_implicit (BUILT_IN_MEMCMP
);
5937 t
= build_call_expr_loc (loc
, t
, 3, dest
, src
, arg
);
5940 = build2 (TREE_CODE (*expr_p
), TREE_TYPE (*expr_p
), t
, integer_zero_node
);
5941 SET_EXPR_LOCATION (expr
, loc
);
5947 /* Gimplify a comparison between two aggregate objects of integral scalar
5948 mode as a comparison between the bitwise equivalent scalar values. */
5950 static enum gimplify_status
5951 gimplify_scalar_mode_aggregate_compare (tree
*expr_p
)
5953 location_t loc
= EXPR_LOCATION (*expr_p
);
5954 tree op0
= TREE_OPERAND (*expr_p
, 0);
5955 tree op1
= TREE_OPERAND (*expr_p
, 1);
5957 tree type
= TREE_TYPE (op0
);
5958 tree scalar_type
= lang_hooks
.types
.type_for_mode (TYPE_MODE (type
), 1);
5960 op0
= fold_build1_loc (loc
, VIEW_CONVERT_EXPR
, scalar_type
, op0
);
5961 op1
= fold_build1_loc (loc
, VIEW_CONVERT_EXPR
, scalar_type
, op1
);
5964 = fold_build2_loc (loc
, TREE_CODE (*expr_p
), TREE_TYPE (*expr_p
), op0
, op1
);
5969 /* Gimplify an expression sequence. This function gimplifies each
5970 expression and rewrites the original expression with the last
5971 expression of the sequence in GIMPLE form.
5973 PRE_P points to the list where the side effects for all the
5974 expressions in the sequence will be emitted.
5976 WANT_VALUE is true when the result of the last COMPOUND_EXPR is used. */
5978 static enum gimplify_status
5979 gimplify_compound_expr (tree
*expr_p
, gimple_seq
*pre_p
, bool want_value
)
5985 tree
*sub_p
= &TREE_OPERAND (t
, 0);
5987 if (TREE_CODE (*sub_p
) == COMPOUND_EXPR
)
5988 gimplify_compound_expr (sub_p
, pre_p
, false);
5990 gimplify_stmt (sub_p
, pre_p
);
5992 t
= TREE_OPERAND (t
, 1);
5994 while (TREE_CODE (t
) == COMPOUND_EXPR
);
6001 gimplify_stmt (expr_p
, pre_p
);
6006 /* Gimplify a SAVE_EXPR node. EXPR_P points to the expression to
6007 gimplify. After gimplification, EXPR_P will point to a new temporary
6008 that holds the original value of the SAVE_EXPR node.
6010 PRE_P points to the list where side effects that must happen before
6011 *EXPR_P should be stored. */
6013 static enum gimplify_status
6014 gimplify_save_expr (tree
*expr_p
, gimple_seq
*pre_p
, gimple_seq
*post_p
)
6016 enum gimplify_status ret
= GS_ALL_DONE
;
6019 gcc_assert (TREE_CODE (*expr_p
) == SAVE_EXPR
);
6020 val
= TREE_OPERAND (*expr_p
, 0);
6022 /* If the SAVE_EXPR has not been resolved, then evaluate it once. */
6023 if (!SAVE_EXPR_RESOLVED_P (*expr_p
))
6025 /* The operand may be a void-valued expression. It is
6026 being executed only for its side-effects. */
6027 if (TREE_TYPE (val
) == void_type_node
)
6029 ret
= gimplify_expr (&TREE_OPERAND (*expr_p
, 0), pre_p
, post_p
,
6030 is_gimple_stmt
, fb_none
);
6034 /* The temporary may not be an SSA name as later abnormal and EH
6035 control flow may invalidate use/def domination. When in SSA
6036 form then assume there are no such issues and SAVE_EXPRs only
6037 appear via GENERIC foldings. */
6038 val
= get_initialized_tmp_var (val
, pre_p
, post_p
,
6039 gimple_in_ssa_p (cfun
));
6041 TREE_OPERAND (*expr_p
, 0) = val
;
6042 SAVE_EXPR_RESOLVED_P (*expr_p
) = 1;
6050 /* Rewrite the ADDR_EXPR node pointed to by EXPR_P
6057 PRE_P points to the list where side effects that must happen before
6058 *EXPR_P should be stored.
6060 POST_P points to the list where side effects that must happen after
6061 *EXPR_P should be stored. */
6063 static enum gimplify_status
6064 gimplify_addr_expr (tree
*expr_p
, gimple_seq
*pre_p
, gimple_seq
*post_p
)
6066 tree expr
= *expr_p
;
6067 tree op0
= TREE_OPERAND (expr
, 0);
6068 enum gimplify_status ret
;
6069 location_t loc
= EXPR_LOCATION (*expr_p
);
6071 switch (TREE_CODE (op0
))
6075 /* Check if we are dealing with an expression of the form '&*ptr'.
6076 While the front end folds away '&*ptr' into 'ptr', these
6077 expressions may be generated internally by the compiler (e.g.,
6078 builtins like __builtin_va_end). */
6079 /* Caution: the silent array decomposition semantics we allow for
6080 ADDR_EXPR means we can't always discard the pair. */
6081 /* Gimplification of the ADDR_EXPR operand may drop
6082 cv-qualification conversions, so make sure we add them if
6085 tree op00
= TREE_OPERAND (op0
, 0);
6086 tree t_expr
= TREE_TYPE (expr
);
6087 tree t_op00
= TREE_TYPE (op00
);
6089 if (!useless_type_conversion_p (t_expr
, t_op00
))
6090 op00
= fold_convert_loc (loc
, TREE_TYPE (expr
), op00
);
6096 case VIEW_CONVERT_EXPR
:
6097 /* Take the address of our operand and then convert it to the type of
6100 ??? The interactions of VIEW_CONVERT_EXPR and aliasing is not at
6101 all clear. The impact of this transformation is even less clear. */
6103 /* If the operand is a useless conversion, look through it. Doing so
6104 guarantees that the ADDR_EXPR and its operand will remain of the
6106 if (tree_ssa_useless_type_conversion (TREE_OPERAND (op0
, 0)))
6107 op0
= TREE_OPERAND (op0
, 0);
6109 *expr_p
= fold_convert_loc (loc
, TREE_TYPE (expr
),
6110 build_fold_addr_expr_loc (loc
,
6111 TREE_OPERAND (op0
, 0)));
6116 if (integer_zerop (TREE_OPERAND (op0
, 1)))
6117 goto do_indirect_ref
;
6122 /* If we see a call to a declared builtin or see its address
6123 being taken (we can unify those cases here) then we can mark
6124 the builtin for implicit generation by GCC. */
6125 if (TREE_CODE (op0
) == FUNCTION_DECL
6126 && fndecl_built_in_p (op0
, BUILT_IN_NORMAL
)
6127 && builtin_decl_declared_p (DECL_FUNCTION_CODE (op0
)))
6128 set_builtin_decl_implicit_p (DECL_FUNCTION_CODE (op0
), true);
6130 /* We use fb_either here because the C frontend sometimes takes
6131 the address of a call that returns a struct; see
6132 gcc.dg/c99-array-lval-1.c. The gimplifier will correctly make
6133 the implied temporary explicit. */
6135 /* Make the operand addressable. */
6136 ret
= gimplify_expr (&TREE_OPERAND (expr
, 0), pre_p
, post_p
,
6137 is_gimple_addressable
, fb_either
);
6138 if (ret
== GS_ERROR
)
6141 /* Then mark it. Beware that it may not be possible to do so directly
6142 if a temporary has been created by the gimplification. */
6143 prepare_gimple_addressable (&TREE_OPERAND (expr
, 0), pre_p
);
6145 op0
= TREE_OPERAND (expr
, 0);
6147 /* For various reasons, the gimplification of the expression
6148 may have made a new INDIRECT_REF. */
6149 if (TREE_CODE (op0
) == INDIRECT_REF
)
6150 goto do_indirect_ref
;
6152 mark_addressable (TREE_OPERAND (expr
, 0));
6154 /* The FEs may end up building ADDR_EXPRs early on a decl with
6155 an incomplete type. Re-build ADDR_EXPRs in canonical form
6157 if (!types_compatible_p (TREE_TYPE (op0
), TREE_TYPE (TREE_TYPE (expr
))))
6158 *expr_p
= build_fold_addr_expr (op0
);
6160 /* Make sure TREE_CONSTANT and TREE_SIDE_EFFECTS are set properly. */
6161 recompute_tree_invariant_for_addr_expr (*expr_p
);
6163 /* If we re-built the ADDR_EXPR add a conversion to the original type
6165 if (!useless_type_conversion_p (TREE_TYPE (expr
), TREE_TYPE (*expr_p
)))
6166 *expr_p
= fold_convert (TREE_TYPE (expr
), *expr_p
);
6174 /* Gimplify the operands of an ASM_EXPR. Input operands should be a gimple
6175 value; output operands should be a gimple lvalue. */
6177 static enum gimplify_status
6178 gimplify_asm_expr (tree
*expr_p
, gimple_seq
*pre_p
, gimple_seq
*post_p
)
6182 const char **oconstraints
;
6185 const char *constraint
;
6186 bool allows_mem
, allows_reg
, is_inout
;
6187 enum gimplify_status ret
, tret
;
6189 vec
<tree
, va_gc
> *inputs
;
6190 vec
<tree
, va_gc
> *outputs
;
6191 vec
<tree
, va_gc
> *clobbers
;
6192 vec
<tree
, va_gc
> *labels
;
6196 noutputs
= list_length (ASM_OUTPUTS (expr
));
6197 oconstraints
= (const char **) alloca ((noutputs
) * sizeof (const char *));
6205 link_next
= NULL_TREE
;
6206 for (i
= 0, link
= ASM_OUTPUTS (expr
); link
; ++i
, link
= link_next
)
6209 size_t constraint_len
;
6211 link_next
= TREE_CHAIN (link
);
6215 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link
)));
6216 constraint_len
= strlen (constraint
);
6217 if (constraint_len
== 0)
6220 ok
= parse_output_constraint (&constraint
, i
, 0, 0,
6221 &allows_mem
, &allows_reg
, &is_inout
);
6228 /* If we can't make copies, we can only accept memory. */
6229 if (TREE_ADDRESSABLE (TREE_TYPE (TREE_VALUE (link
))))
6235 error ("impossible constraint in %<asm%>");
6236 error ("non-memory output %d must stay in memory", i
);
6241 if (!allows_reg
&& allows_mem
)
6242 mark_addressable (TREE_VALUE (link
));
6244 tret
= gimplify_expr (&TREE_VALUE (link
), pre_p
, post_p
,
6245 is_inout
? is_gimple_min_lval
: is_gimple_lvalue
,
6246 fb_lvalue
| fb_mayfail
);
6247 if (tret
== GS_ERROR
)
6249 error ("invalid lvalue in %<asm%> output %d", i
);
6253 /* If the constraint does not allow memory make sure we gimplify
6254 it to a register if it is not already but its base is. This
6255 happens for complex and vector components. */
6258 tree op
= TREE_VALUE (link
);
6259 if (! is_gimple_val (op
)
6260 && is_gimple_reg_type (TREE_TYPE (op
))
6261 && is_gimple_reg (get_base_address (op
)))
6263 tree tem
= create_tmp_reg (TREE_TYPE (op
));
6267 ass
= build2 (MODIFY_EXPR
, TREE_TYPE (tem
),
6268 tem
, unshare_expr (op
));
6269 gimplify_and_add (ass
, pre_p
);
6271 ass
= build2 (MODIFY_EXPR
, TREE_TYPE (tem
), op
, tem
);
6272 gimplify_and_add (ass
, post_p
);
6274 TREE_VALUE (link
) = tem
;
6279 vec_safe_push (outputs
, link
);
6280 TREE_CHAIN (link
) = NULL_TREE
;
6284 /* An input/output operand. To give the optimizers more
6285 flexibility, split it into separate input and output
6288 /* Buffer big enough to format a 32-bit UINT_MAX into. */
6291 /* Turn the in/out constraint into an output constraint. */
6292 char *p
= xstrdup (constraint
);
6294 TREE_VALUE (TREE_PURPOSE (link
)) = build_string (constraint_len
, p
);
6296 /* And add a matching input constraint. */
6299 sprintf (buf
, "%u", i
);
6301 /* If there are multiple alternatives in the constraint,
6302 handle each of them individually. Those that allow register
6303 will be replaced with operand number, the others will stay
6305 if (strchr (p
, ',') != NULL
)
6307 size_t len
= 0, buflen
= strlen (buf
);
6308 char *beg
, *end
, *str
, *dst
;
6312 end
= strchr (beg
, ',');
6314 end
= strchr (beg
, '\0');
6315 if ((size_t) (end
- beg
) < buflen
)
6318 len
+= end
- beg
+ 1;
6325 str
= (char *) alloca (len
);
6326 for (beg
= p
+ 1, dst
= str
;;)
6329 bool mem_p
, reg_p
, inout_p
;
6331 end
= strchr (beg
, ',');
6336 parse_output_constraint (&tem
, i
, 0, 0,
6337 &mem_p
, ®_p
, &inout_p
);
6342 memcpy (dst
, buf
, buflen
);
6351 memcpy (dst
, beg
, len
);
6360 input
= build_string (dst
- str
, str
);
6363 input
= build_string (strlen (buf
), buf
);
6366 input
= build_string (constraint_len
- 1, constraint
+ 1);
6370 input
= build_tree_list (build_tree_list (NULL_TREE
, input
),
6371 unshare_expr (TREE_VALUE (link
)));
6372 ASM_INPUTS (expr
) = chainon (ASM_INPUTS (expr
), input
);
6376 link_next
= NULL_TREE
;
6377 for (link
= ASM_INPUTS (expr
); link
; ++i
, link
= link_next
)
6379 link_next
= TREE_CHAIN (link
);
6380 constraint
= TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link
)));
6381 parse_input_constraint (&constraint
, 0, 0, noutputs
, 0,
6382 oconstraints
, &allows_mem
, &allows_reg
);
6384 /* If we can't make copies, we can only accept memory. */
6385 if (TREE_ADDRESSABLE (TREE_TYPE (TREE_VALUE (link
))))
6391 error ("impossible constraint in %<asm%>");
6392 error ("non-memory input %d must stay in memory", i
);
6397 /* If the operand is a memory input, it should be an lvalue. */
6398 if (!allows_reg
&& allows_mem
)
6400 tree inputv
= TREE_VALUE (link
);
6401 STRIP_NOPS (inputv
);
6402 if (TREE_CODE (inputv
) == PREDECREMENT_EXPR
6403 || TREE_CODE (inputv
) == PREINCREMENT_EXPR
6404 || TREE_CODE (inputv
) == POSTDECREMENT_EXPR
6405 || TREE_CODE (inputv
) == POSTINCREMENT_EXPR
6406 || TREE_CODE (inputv
) == MODIFY_EXPR
)
6407 TREE_VALUE (link
) = error_mark_node
;
6408 tret
= gimplify_expr (&TREE_VALUE (link
), pre_p
, post_p
,
6409 is_gimple_lvalue
, fb_lvalue
| fb_mayfail
);
6410 if (tret
!= GS_ERROR
)
6412 /* Unlike output operands, memory inputs are not guaranteed
6413 to be lvalues by the FE, and while the expressions are
6414 marked addressable there, if it is e.g. a statement
6415 expression, temporaries in it might not end up being
6416 addressable. They might be already used in the IL and thus
6417 it is too late to make them addressable now though. */
6418 tree x
= TREE_VALUE (link
);
6419 while (handled_component_p (x
))
6420 x
= TREE_OPERAND (x
, 0);
6421 if (TREE_CODE (x
) == MEM_REF
6422 && TREE_CODE (TREE_OPERAND (x
, 0)) == ADDR_EXPR
)
6423 x
= TREE_OPERAND (TREE_OPERAND (x
, 0), 0);
6425 || TREE_CODE (x
) == PARM_DECL
6426 || TREE_CODE (x
) == RESULT_DECL
)
6427 && !TREE_ADDRESSABLE (x
)
6428 && is_gimple_reg (x
))
6430 warning_at (EXPR_LOC_OR_LOC (TREE_VALUE (link
),
6432 "memory input %d is not directly addressable",
6434 prepare_gimple_addressable (&TREE_VALUE (link
), pre_p
);
6437 mark_addressable (TREE_VALUE (link
));
6438 if (tret
== GS_ERROR
)
6440 error_at (EXPR_LOC_OR_LOC (TREE_VALUE (link
), input_location
),
6441 "memory input %d is not directly addressable", i
);
6447 tret
= gimplify_expr (&TREE_VALUE (link
), pre_p
, post_p
,
6448 is_gimple_asm_val
, fb_rvalue
);
6449 if (tret
== GS_ERROR
)
6453 TREE_CHAIN (link
) = NULL_TREE
;
6454 vec_safe_push (inputs
, link
);
6457 link_next
= NULL_TREE
;
6458 for (link
= ASM_CLOBBERS (expr
); link
; ++i
, link
= link_next
)
6460 link_next
= TREE_CHAIN (link
);
6461 TREE_CHAIN (link
) = NULL_TREE
;
6462 vec_safe_push (clobbers
, link
);
6465 link_next
= NULL_TREE
;
6466 for (link
= ASM_LABELS (expr
); link
; ++i
, link
= link_next
)
6468 link_next
= TREE_CHAIN (link
);
6469 TREE_CHAIN (link
) = NULL_TREE
;
6470 vec_safe_push (labels
, link
);
6473 /* Do not add ASMs with errors to the gimple IL stream. */
6474 if (ret
!= GS_ERROR
)
6476 stmt
= gimple_build_asm_vec (TREE_STRING_POINTER (ASM_STRING (expr
)),
6477 inputs
, outputs
, clobbers
, labels
);
6479 gimple_asm_set_volatile (stmt
, ASM_VOLATILE_P (expr
) || noutputs
== 0);
6480 gimple_asm_set_input (stmt
, ASM_INPUT_P (expr
));
6481 gimple_asm_set_inline (stmt
, ASM_INLINE_P (expr
));
6483 gimplify_seq_add_stmt (pre_p
, stmt
);
6489 /* Gimplify a CLEANUP_POINT_EXPR. Currently this works by adding
6490 GIMPLE_WITH_CLEANUP_EXPRs to the prequeue as we encounter cleanups while
6491 gimplifying the body, and converting them to TRY_FINALLY_EXPRs when we
6492 return to this function.
6494 FIXME should we complexify the prequeue handling instead? Or use flags
6495 for all the cleanups and let the optimizer tighten them up? The current
6496 code seems pretty fragile; it will break on a cleanup within any
6497 non-conditional nesting. But any such nesting would be broken, anyway;
6498 we can't write a TRY_FINALLY_EXPR that starts inside a nesting construct
6499 and continues out of it. We can do that at the RTL level, though, so
6500 having an optimizer to tighten up try/finally regions would be a Good
6503 static enum gimplify_status
6504 gimplify_cleanup_point_expr (tree
*expr_p
, gimple_seq
*pre_p
)
6506 gimple_stmt_iterator iter
;
6507 gimple_seq body_sequence
= NULL
;
6509 tree temp
= voidify_wrapper_expr (*expr_p
, NULL
);
6511 /* We only care about the number of conditions between the innermost
6512 CLEANUP_POINT_EXPR and the cleanup. So save and reset the count and
6513 any cleanups collected outside the CLEANUP_POINT_EXPR. */
6514 int old_conds
= gimplify_ctxp
->conditions
;
6515 gimple_seq old_cleanups
= gimplify_ctxp
->conditional_cleanups
;
6516 bool old_in_cleanup_point_expr
= gimplify_ctxp
->in_cleanup_point_expr
;
6517 gimplify_ctxp
->conditions
= 0;
6518 gimplify_ctxp
->conditional_cleanups
= NULL
;
6519 gimplify_ctxp
->in_cleanup_point_expr
= true;
6521 gimplify_stmt (&TREE_OPERAND (*expr_p
, 0), &body_sequence
);
6523 gimplify_ctxp
->conditions
= old_conds
;
6524 gimplify_ctxp
->conditional_cleanups
= old_cleanups
;
6525 gimplify_ctxp
->in_cleanup_point_expr
= old_in_cleanup_point_expr
;
6527 for (iter
= gsi_start (body_sequence
); !gsi_end_p (iter
); )
6529 gimple
*wce
= gsi_stmt (iter
);
6531 if (gimple_code (wce
) == GIMPLE_WITH_CLEANUP_EXPR
)
6533 if (gsi_one_before_end_p (iter
))
6535 /* Note that gsi_insert_seq_before and gsi_remove do not
6536 scan operands, unlike some other sequence mutators. */
6537 if (!gimple_wce_cleanup_eh_only (wce
))
6538 gsi_insert_seq_before_without_update (&iter
,
6539 gimple_wce_cleanup (wce
),
6541 gsi_remove (&iter
, true);
6548 enum gimple_try_flags kind
;
6550 if (gimple_wce_cleanup_eh_only (wce
))
6551 kind
= GIMPLE_TRY_CATCH
;
6553 kind
= GIMPLE_TRY_FINALLY
;
6554 seq
= gsi_split_seq_after (iter
);
6556 gtry
= gimple_build_try (seq
, gimple_wce_cleanup (wce
), kind
);
6557 /* Do not use gsi_replace here, as it may scan operands.
6558 We want to do a simple structural modification only. */
6559 gsi_set_stmt (&iter
, gtry
);
6560 iter
= gsi_start (gtry
->eval
);
6567 gimplify_seq_add_seq (pre_p
, body_sequence
);
6580 /* Insert a cleanup marker for gimplify_cleanup_point_expr. CLEANUP
6581 is the cleanup action required. EH_ONLY is true if the cleanup should
6582 only be executed if an exception is thrown, not on normal exit.
6583 If FORCE_UNCOND is true perform the cleanup unconditionally; this is
6584 only valid for clobbers. */
6587 gimple_push_cleanup (tree var
, tree cleanup
, bool eh_only
, gimple_seq
*pre_p
,
6588 bool force_uncond
= false)
6591 gimple_seq cleanup_stmts
= NULL
;
6593 /* Errors can result in improperly nested cleanups. Which results in
6594 confusion when trying to resolve the GIMPLE_WITH_CLEANUP_EXPR. */
6598 if (gimple_conditional_context ())
6600 /* If we're in a conditional context, this is more complex. We only
6601 want to run the cleanup if we actually ran the initialization that
6602 necessitates it, but we want to run it after the end of the
6603 conditional context. So we wrap the try/finally around the
6604 condition and use a flag to determine whether or not to actually
6605 run the destructor. Thus
6609 becomes (approximately)
6613 if (test) { A::A(temp); flag = 1; val = f(temp); }
6616 if (flag) A::~A(temp);
6622 gimplify_stmt (&cleanup
, &cleanup_stmts
);
6623 wce
= gimple_build_wce (cleanup_stmts
);
6624 gimplify_seq_add_stmt (&gimplify_ctxp
->conditional_cleanups
, wce
);
6628 tree flag
= create_tmp_var (boolean_type_node
, "cleanup");
6629 gassign
*ffalse
= gimple_build_assign (flag
, boolean_false_node
);
6630 gassign
*ftrue
= gimple_build_assign (flag
, boolean_true_node
);
6632 cleanup
= build3 (COND_EXPR
, void_type_node
, flag
, cleanup
, NULL
);
6633 gimplify_stmt (&cleanup
, &cleanup_stmts
);
6634 wce
= gimple_build_wce (cleanup_stmts
);
6636 gimplify_seq_add_stmt (&gimplify_ctxp
->conditional_cleanups
, ffalse
);
6637 gimplify_seq_add_stmt (&gimplify_ctxp
->conditional_cleanups
, wce
);
6638 gimplify_seq_add_stmt (pre_p
, ftrue
);
6640 /* Because of this manipulation, and the EH edges that jump
6641 threading cannot redirect, the temporary (VAR) will appear
6642 to be used uninitialized. Don't warn. */
6643 TREE_NO_WARNING (var
) = 1;
6648 gimplify_stmt (&cleanup
, &cleanup_stmts
);
6649 wce
= gimple_build_wce (cleanup_stmts
);
6650 gimple_wce_set_cleanup_eh_only (wce
, eh_only
);
6651 gimplify_seq_add_stmt (pre_p
, wce
);
6655 /* Gimplify a TARGET_EXPR which doesn't appear on the rhs of an INIT_EXPR. */
6657 static enum gimplify_status
6658 gimplify_target_expr (tree
*expr_p
, gimple_seq
*pre_p
, gimple_seq
*post_p
)
6660 tree targ
= *expr_p
;
6661 tree temp
= TARGET_EXPR_SLOT (targ
);
6662 tree init
= TARGET_EXPR_INITIAL (targ
);
6663 enum gimplify_status ret
;
6665 bool unpoison_empty_seq
= false;
6666 gimple_stmt_iterator unpoison_it
;
6670 tree cleanup
= NULL_TREE
;
6672 /* TARGET_EXPR temps aren't part of the enclosing block, so add it
6673 to the temps list. Handle also variable length TARGET_EXPRs. */
6674 if (TREE_CODE (DECL_SIZE (temp
)) != INTEGER_CST
)
6676 if (!TYPE_SIZES_GIMPLIFIED (TREE_TYPE (temp
)))
6677 gimplify_type_sizes (TREE_TYPE (temp
), pre_p
);
6678 gimplify_vla_decl (temp
, pre_p
);
6682 /* Save location where we need to place unpoisoning. It's possible
6683 that a variable will be converted to needs_to_live_in_memory. */
6684 unpoison_it
= gsi_last (*pre_p
);
6685 unpoison_empty_seq
= gsi_end_p (unpoison_it
);
6687 gimple_add_tmp_var (temp
);
6690 /* If TARGET_EXPR_INITIAL is void, then the mere evaluation of the
6691 expression is supposed to initialize the slot. */
6692 if (VOID_TYPE_P (TREE_TYPE (init
)))
6693 ret
= gimplify_expr (&init
, pre_p
, post_p
, is_gimple_stmt
, fb_none
);
6696 tree init_expr
= build2 (INIT_EXPR
, void_type_node
, temp
, init
);
6698 ret
= gimplify_expr (&init
, pre_p
, post_p
, is_gimple_stmt
, fb_none
);
6700 ggc_free (init_expr
);
6702 if (ret
== GS_ERROR
)
6704 /* PR c++/28266 Make sure this is expanded only once. */
6705 TARGET_EXPR_INITIAL (targ
) = NULL_TREE
;
6709 gimplify_and_add (init
, pre_p
);
6711 /* If needed, push the cleanup for the temp. */
6712 if (TARGET_EXPR_CLEANUP (targ
))
6714 if (CLEANUP_EH_ONLY (targ
))
6715 gimple_push_cleanup (temp
, TARGET_EXPR_CLEANUP (targ
),
6716 CLEANUP_EH_ONLY (targ
), pre_p
);
6718 cleanup
= TARGET_EXPR_CLEANUP (targ
);
6721 /* Add a clobber for the temporary going out of scope, like
6722 gimplify_bind_expr. */
6723 if (gimplify_ctxp
->in_cleanup_point_expr
6724 && needs_to_live_in_memory (temp
))
6726 if (flag_stack_reuse
== SR_ALL
)
6728 tree clobber
= build_clobber (TREE_TYPE (temp
));
6729 clobber
= build2 (MODIFY_EXPR
, TREE_TYPE (temp
), temp
, clobber
);
6730 gimple_push_cleanup (temp
, clobber
, false, pre_p
, true);
6732 if (asan_poisoned_variables
6733 && DECL_ALIGN (temp
) <= MAX_SUPPORTED_STACK_ALIGNMENT
6734 && !TREE_STATIC (temp
)
6735 && dbg_cnt (asan_use_after_scope
)
6736 && !gimplify_omp_ctxp
)
6738 tree asan_cleanup
= build_asan_poison_call_expr (temp
);
6741 if (unpoison_empty_seq
)
6742 unpoison_it
= gsi_start (*pre_p
);
6744 asan_poison_variable (temp
, false, &unpoison_it
,
6745 unpoison_empty_seq
);
6746 gimple_push_cleanup (temp
, asan_cleanup
, false, pre_p
);
6751 gimple_push_cleanup (temp
, cleanup
, false, pre_p
);
6753 /* Only expand this once. */
6754 TREE_OPERAND (targ
, 3) = init
;
6755 TARGET_EXPR_INITIAL (targ
) = NULL_TREE
;
6758 /* We should have expanded this before. */
6759 gcc_assert (DECL_SEEN_IN_BIND_EXPR_P (temp
));
6765 /* Gimplification of expression trees. */
6767 /* Gimplify an expression which appears at statement context. The
6768 corresponding GIMPLE statements are added to *SEQ_P. If *SEQ_P is
6769 NULL, a new sequence is allocated.
6771 Return true if we actually added a statement to the queue. */
6774 gimplify_stmt (tree
*stmt_p
, gimple_seq
*seq_p
)
6776 gimple_seq_node last
;
6778 last
= gimple_seq_last (*seq_p
);
6779 gimplify_expr (stmt_p
, seq_p
, NULL
, is_gimple_stmt
, fb_none
);
6780 return last
!= gimple_seq_last (*seq_p
);
6783 /* Add FIRSTPRIVATE entries for DECL in the OpenMP the surrounding parallels
6784 to CTX. If entries already exist, force them to be some flavor of private.
6785 If there is no enclosing parallel, do nothing. */
6788 omp_firstprivatize_variable (struct gimplify_omp_ctx
*ctx
, tree decl
)
6792 if (decl
== NULL
|| !DECL_P (decl
) || ctx
->region_type
== ORT_NONE
)
6797 n
= splay_tree_lookup (ctx
->variables
, (splay_tree_key
)decl
);
6800 if (n
->value
& GOVD_SHARED
)
6801 n
->value
= GOVD_FIRSTPRIVATE
| (n
->value
& GOVD_SEEN
);
6802 else if (n
->value
& GOVD_MAP
)
6803 n
->value
|= GOVD_MAP_TO_ONLY
;
6807 else if ((ctx
->region_type
& ORT_TARGET
) != 0)
6809 if (ctx
->defaultmap
[GDMK_SCALAR
] & GOVD_FIRSTPRIVATE
)
6810 omp_add_variable (ctx
, decl
, GOVD_FIRSTPRIVATE
);
6812 omp_add_variable (ctx
, decl
, GOVD_MAP
| GOVD_MAP_TO_ONLY
);
6814 else if (ctx
->region_type
!= ORT_WORKSHARE
6815 && ctx
->region_type
!= ORT_TASKGROUP
6816 && ctx
->region_type
!= ORT_SIMD
6817 && ctx
->region_type
!= ORT_ACC
6818 && !(ctx
->region_type
& ORT_TARGET_DATA
))
6819 omp_add_variable (ctx
, decl
, GOVD_FIRSTPRIVATE
);
6821 ctx
= ctx
->outer_context
;
6826 /* Similarly for each of the type sizes of TYPE. */
6829 omp_firstprivatize_type_sizes (struct gimplify_omp_ctx
*ctx
, tree type
)
6831 if (type
== NULL
|| type
== error_mark_node
)
6833 type
= TYPE_MAIN_VARIANT (type
);
6835 if (ctx
->privatized_types
->add (type
))
6838 switch (TREE_CODE (type
))
6844 case FIXED_POINT_TYPE
:
6845 omp_firstprivatize_variable (ctx
, TYPE_MIN_VALUE (type
));
6846 omp_firstprivatize_variable (ctx
, TYPE_MAX_VALUE (type
));
6850 omp_firstprivatize_type_sizes (ctx
, TREE_TYPE (type
));
6851 omp_firstprivatize_type_sizes (ctx
, TYPE_DOMAIN (type
));
6856 case QUAL_UNION_TYPE
:
6859 for (field
= TYPE_FIELDS (type
); field
; field
= DECL_CHAIN (field
))
6860 if (TREE_CODE (field
) == FIELD_DECL
)
6862 omp_firstprivatize_variable (ctx
, DECL_FIELD_OFFSET (field
));
6863 omp_firstprivatize_type_sizes (ctx
, TREE_TYPE (field
));
6869 case REFERENCE_TYPE
:
6870 omp_firstprivatize_type_sizes (ctx
, TREE_TYPE (type
));
6877 omp_firstprivatize_variable (ctx
, TYPE_SIZE (type
));
6878 omp_firstprivatize_variable (ctx
, TYPE_SIZE_UNIT (type
));
6879 lang_hooks
.types
.omp_firstprivatize_type_sizes (ctx
, type
);
6882 /* Add an entry for DECL in the OMP context CTX with FLAGS. */
6885 omp_add_variable (struct gimplify_omp_ctx
*ctx
, tree decl
, unsigned int flags
)
6888 unsigned int nflags
;
6891 if (error_operand_p (decl
) || ctx
->region_type
== ORT_NONE
)
6894 /* Never elide decls whose type has TREE_ADDRESSABLE set. This means
6895 there are constructors involved somewhere. Exception is a shared clause,
6896 there is nothing privatized in that case. */
6897 if ((flags
& GOVD_SHARED
) == 0
6898 && (TREE_ADDRESSABLE (TREE_TYPE (decl
))
6899 || TYPE_NEEDS_CONSTRUCTING (TREE_TYPE (decl
))))
6902 n
= splay_tree_lookup (ctx
->variables
, (splay_tree_key
)decl
);
6903 if (n
!= NULL
&& (n
->value
& GOVD_DATA_SHARE_CLASS
) != 0)
6905 /* We shouldn't be re-adding the decl with the same data
6907 gcc_assert ((n
->value
& GOVD_DATA_SHARE_CLASS
& flags
) == 0);
6908 nflags
= n
->value
| flags
;
6909 /* The only combination of data sharing classes we should see is
6910 FIRSTPRIVATE and LASTPRIVATE. However, OpenACC permits
6911 reduction variables to be used in data sharing clauses. */
6912 gcc_assert ((ctx
->region_type
& ORT_ACC
) != 0
6913 || ((nflags
& GOVD_DATA_SHARE_CLASS
)
6914 == (GOVD_FIRSTPRIVATE
| GOVD_LASTPRIVATE
))
6915 || (flags
& GOVD_DATA_SHARE_CLASS
) == 0);
6920 /* When adding a variable-sized variable, we have to handle all sorts
6921 of additional bits of data: the pointer replacement variable, and
6922 the parameters of the type. */
6923 if (DECL_SIZE (decl
) && TREE_CODE (DECL_SIZE (decl
)) != INTEGER_CST
)
6925 /* Add the pointer replacement variable as PRIVATE if the variable
6926 replacement is private, else FIRSTPRIVATE since we'll need the
6927 address of the original variable either for SHARED, or for the
6928 copy into or out of the context. */
6929 if (!(flags
& GOVD_LOCAL
) && ctx
->region_type
!= ORT_TASKGROUP
)
6931 if (flags
& GOVD_MAP
)
6932 nflags
= GOVD_MAP
| GOVD_MAP_TO_ONLY
| GOVD_EXPLICIT
;
6933 else if (flags
& GOVD_PRIVATE
)
6934 nflags
= GOVD_PRIVATE
;
6935 else if (((ctx
->region_type
& (ORT_TARGET
| ORT_TARGET_DATA
)) != 0
6936 && (flags
& GOVD_FIRSTPRIVATE
))
6937 || (ctx
->region_type
== ORT_TARGET_DATA
6938 && (flags
& GOVD_DATA_SHARE_CLASS
) == 0))
6939 nflags
= GOVD_PRIVATE
| GOVD_EXPLICIT
;
6941 nflags
= GOVD_FIRSTPRIVATE
;
6942 nflags
|= flags
& GOVD_SEEN
;
6943 t
= DECL_VALUE_EXPR (decl
);
6944 gcc_assert (TREE_CODE (t
) == INDIRECT_REF
);
6945 t
= TREE_OPERAND (t
, 0);
6946 gcc_assert (DECL_P (t
));
6947 omp_add_variable (ctx
, t
, nflags
);
6950 /* Add all of the variable and type parameters (which should have
6951 been gimplified to a formal temporary) as FIRSTPRIVATE. */
6952 omp_firstprivatize_variable (ctx
, DECL_SIZE_UNIT (decl
));
6953 omp_firstprivatize_variable (ctx
, DECL_SIZE (decl
));
6954 omp_firstprivatize_type_sizes (ctx
, TREE_TYPE (decl
));
6956 /* The variable-sized variable itself is never SHARED, only some form
6957 of PRIVATE. The sharing would take place via the pointer variable
6958 which we remapped above. */
6959 if (flags
& GOVD_SHARED
)
6960 flags
= GOVD_SHARED
| GOVD_DEBUG_PRIVATE
6961 | (flags
& (GOVD_SEEN
| GOVD_EXPLICIT
));
6963 /* We're going to make use of the TYPE_SIZE_UNIT at least in the
6964 alloca statement we generate for the variable, so make sure it
6965 is available. This isn't automatically needed for the SHARED
6966 case, since we won't be allocating local storage then.
6967 For local variables TYPE_SIZE_UNIT might not be gimplified yet,
6968 in this case omp_notice_variable will be called later
6969 on when it is gimplified. */
6970 else if (! (flags
& (GOVD_LOCAL
| GOVD_MAP
))
6971 && DECL_P (TYPE_SIZE_UNIT (TREE_TYPE (decl
))))
6972 omp_notice_variable (ctx
, TYPE_SIZE_UNIT (TREE_TYPE (decl
)), true);
6974 else if ((flags
& (GOVD_MAP
| GOVD_LOCAL
)) == 0
6975 && lang_hooks
.decls
.omp_privatize_by_reference (decl
))
6977 omp_firstprivatize_type_sizes (ctx
, TREE_TYPE (decl
));
6979 /* Similar to the direct variable sized case above, we'll need the
6980 size of references being privatized. */
6981 if ((flags
& GOVD_SHARED
) == 0)
6983 t
= TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl
)));
6985 omp_notice_variable (ctx
, t
, true);
6992 splay_tree_insert (ctx
->variables
, (splay_tree_key
)decl
, flags
);
6994 /* For reductions clauses in OpenACC loop directives, by default create a
6995 copy clause on the enclosing parallel construct for carrying back the
6997 if (ctx
->region_type
== ORT_ACC
&& (flags
& GOVD_REDUCTION
))
6999 struct gimplify_omp_ctx
*outer_ctx
= ctx
->outer_context
;
7002 n
= splay_tree_lookup (outer_ctx
->variables
, (splay_tree_key
)decl
);
7005 /* Ignore local variables and explicitly declared clauses. */
7006 if (n
->value
& (GOVD_LOCAL
| GOVD_EXPLICIT
))
7008 else if (outer_ctx
->region_type
== ORT_ACC_KERNELS
)
7010 /* According to the OpenACC spec, such a reduction variable
7011 should already have a copy map on a kernels construct,
7012 verify that here. */
7013 gcc_assert (!(n
->value
& GOVD_FIRSTPRIVATE
)
7014 && (n
->value
& GOVD_MAP
));
7016 else if (outer_ctx
->region_type
== ORT_ACC_PARALLEL
)
7018 /* Remove firstprivate and make it a copy map. */
7019 n
->value
&= ~GOVD_FIRSTPRIVATE
;
7020 n
->value
|= GOVD_MAP
;
7023 else if (outer_ctx
->region_type
== ORT_ACC_PARALLEL
)
7025 splay_tree_insert (outer_ctx
->variables
, (splay_tree_key
)decl
,
7026 GOVD_MAP
| GOVD_SEEN
);
7029 outer_ctx
= outer_ctx
->outer_context
;
7034 /* Notice a threadprivate variable DECL used in OMP context CTX.
7035 This just prints out diagnostics about threadprivate variable uses
7036 in untied tasks. If DECL2 is non-NULL, prevent this warning
7037 on that variable. */
7040 omp_notice_threadprivate_variable (struct gimplify_omp_ctx
*ctx
, tree decl
,
7044 struct gimplify_omp_ctx
*octx
;
7046 for (octx
= ctx
; octx
; octx
= octx
->outer_context
)
7047 if ((octx
->region_type
& ORT_TARGET
) != 0
7048 || octx
->order_concurrent
)
7050 n
= splay_tree_lookup (octx
->variables
, (splay_tree_key
)decl
);
7053 if (octx
->order_concurrent
)
7055 error ("threadprivate variable %qE used in a region with"
7056 " %<order(concurrent)%> clause", DECL_NAME (decl
));
7057 error_at (octx
->location
, "enclosing region");
7061 error ("threadprivate variable %qE used in target region",
7063 error_at (octx
->location
, "enclosing target region");
7065 splay_tree_insert (octx
->variables
, (splay_tree_key
)decl
, 0);
7068 splay_tree_insert (octx
->variables
, (splay_tree_key
)decl2
, 0);
7071 if (ctx
->region_type
!= ORT_UNTIED_TASK
)
7073 n
= splay_tree_lookup (ctx
->variables
, (splay_tree_key
)decl
);
7076 error ("threadprivate variable %qE used in untied task",
7078 error_at (ctx
->location
, "enclosing task");
7079 splay_tree_insert (ctx
->variables
, (splay_tree_key
)decl
, 0);
7082 splay_tree_insert (ctx
->variables
, (splay_tree_key
)decl2
, 0);
7086 /* Return true if global var DECL is device resident. */
7089 device_resident_p (tree decl
)
7091 tree attr
= lookup_attribute ("oacc declare target", DECL_ATTRIBUTES (decl
));
7096 for (tree t
= TREE_VALUE (attr
); t
; t
= TREE_PURPOSE (t
))
7098 tree c
= TREE_VALUE (t
);
7099 if (OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_DEVICE_RESIDENT
)
7106 /* Return true if DECL has an ACC DECLARE attribute. */
7109 is_oacc_declared (tree decl
)
7111 tree t
= TREE_CODE (decl
) == MEM_REF
? TREE_OPERAND (decl
, 0) : decl
;
7112 tree declared
= lookup_attribute ("oacc declare target", DECL_ATTRIBUTES (t
));
7113 return declared
!= NULL_TREE
;
7116 /* Determine outer default flags for DECL mentioned in an OMP region
7117 but not declared in an enclosing clause.
7119 ??? Some compiler-generated variables (like SAVE_EXPRs) could be
7120 remapped firstprivate instead of shared. To some extent this is
7121 addressed in omp_firstprivatize_type_sizes, but not
7125 omp_default_clause (struct gimplify_omp_ctx
*ctx
, tree decl
,
7126 bool in_code
, unsigned flags
)
7128 enum omp_clause_default_kind default_kind
= ctx
->default_kind
;
7129 enum omp_clause_default_kind kind
;
7131 kind
= lang_hooks
.decls
.omp_predetermined_sharing (decl
);
7132 if (kind
!= OMP_CLAUSE_DEFAULT_UNSPECIFIED
)
7133 default_kind
= kind
;
7135 switch (default_kind
)
7137 case OMP_CLAUSE_DEFAULT_NONE
:
7141 if (ctx
->region_type
& ORT_PARALLEL
)
7143 else if ((ctx
->region_type
& ORT_TASKLOOP
) == ORT_TASKLOOP
)
7145 else if (ctx
->region_type
& ORT_TASK
)
7147 else if (ctx
->region_type
& ORT_TEAMS
)
7152 error ("%qE not specified in enclosing %qs",
7153 DECL_NAME (lang_hooks
.decls
.omp_report_decl (decl
)), rtype
);
7154 error_at (ctx
->location
, "enclosing %qs", rtype
);
7157 case OMP_CLAUSE_DEFAULT_SHARED
:
7158 flags
|= GOVD_SHARED
;
7160 case OMP_CLAUSE_DEFAULT_PRIVATE
:
7161 flags
|= GOVD_PRIVATE
;
7163 case OMP_CLAUSE_DEFAULT_FIRSTPRIVATE
:
7164 flags
|= GOVD_FIRSTPRIVATE
;
7166 case OMP_CLAUSE_DEFAULT_UNSPECIFIED
:
7167 /* decl will be either GOVD_FIRSTPRIVATE or GOVD_SHARED. */
7168 gcc_assert ((ctx
->region_type
& ORT_TASK
) != 0);
7169 if (struct gimplify_omp_ctx
*octx
= ctx
->outer_context
)
7171 omp_notice_variable (octx
, decl
, in_code
);
7172 for (; octx
; octx
= octx
->outer_context
)
7176 n2
= splay_tree_lookup (octx
->variables
, (splay_tree_key
) decl
);
7177 if ((octx
->region_type
& (ORT_TARGET_DATA
| ORT_TARGET
)) != 0
7178 && (n2
== NULL
|| (n2
->value
& GOVD_DATA_SHARE_CLASS
) == 0))
7180 if (n2
&& (n2
->value
& GOVD_DATA_SHARE_CLASS
) != GOVD_SHARED
)
7182 flags
|= GOVD_FIRSTPRIVATE
;
7185 if ((octx
->region_type
& (ORT_PARALLEL
| ORT_TEAMS
)) != 0)
7187 flags
|= GOVD_SHARED
;
7193 if (TREE_CODE (decl
) == PARM_DECL
7194 || (!is_global_var (decl
)
7195 && DECL_CONTEXT (decl
) == current_function_decl
))
7196 flags
|= GOVD_FIRSTPRIVATE
;
7198 flags
|= GOVD_SHARED
;
7210 /* Determine outer default flags for DECL mentioned in an OACC region
7211 but not declared in an enclosing clause. */
7214 oacc_default_clause (struct gimplify_omp_ctx
*ctx
, tree decl
, unsigned flags
)
7217 bool on_device
= false;
7218 bool declared
= is_oacc_declared (decl
);
7219 tree type
= TREE_TYPE (decl
);
7221 if (lang_hooks
.decls
.omp_privatize_by_reference (decl
))
7222 type
= TREE_TYPE (type
);
7224 if ((ctx
->region_type
& (ORT_ACC_PARALLEL
| ORT_ACC_KERNELS
)) != 0
7225 && is_global_var (decl
)
7226 && device_resident_p (decl
))
7229 flags
|= GOVD_MAP_TO_ONLY
;
7232 switch (ctx
->region_type
)
7234 case ORT_ACC_KERNELS
:
7237 if (AGGREGATE_TYPE_P (type
))
7239 /* Aggregates default to 'present_or_copy', or 'present'. */
7240 if (ctx
->default_kind
!= OMP_CLAUSE_DEFAULT_PRESENT
)
7243 flags
|= GOVD_MAP
| GOVD_MAP_FORCE_PRESENT
;
7246 /* Scalars default to 'copy'. */
7247 flags
|= GOVD_MAP
| GOVD_MAP_FORCE
;
7251 case ORT_ACC_PARALLEL
:
7254 if (on_device
|| declared
)
7256 else if (AGGREGATE_TYPE_P (type
))
7258 /* Aggregates default to 'present_or_copy', or 'present'. */
7259 if (ctx
->default_kind
!= OMP_CLAUSE_DEFAULT_PRESENT
)
7262 flags
|= GOVD_MAP
| GOVD_MAP_FORCE_PRESENT
;
7265 /* Scalars default to 'firstprivate'. */
7266 flags
|= GOVD_FIRSTPRIVATE
;
7274 if (DECL_ARTIFICIAL (decl
))
7275 ; /* We can get compiler-generated decls, and should not complain
7277 else if (ctx
->default_kind
== OMP_CLAUSE_DEFAULT_NONE
)
7279 error ("%qE not specified in enclosing OpenACC %qs construct",
7280 DECL_NAME (lang_hooks
.decls
.omp_report_decl (decl
)), rkind
);
7281 inform (ctx
->location
, "enclosing OpenACC %qs construct", rkind
);
7283 else if (ctx
->default_kind
== OMP_CLAUSE_DEFAULT_PRESENT
)
7284 ; /* Handled above. */
7286 gcc_checking_assert (ctx
->default_kind
== OMP_CLAUSE_DEFAULT_SHARED
);
7291 /* Record the fact that DECL was used within the OMP context CTX.
7292 IN_CODE is true when real code uses DECL, and false when we should
7293 merely emit default(none) errors. Return true if DECL is going to
7294 be remapped and thus DECL shouldn't be gimplified into its
7295 DECL_VALUE_EXPR (if any). */
7298 omp_notice_variable (struct gimplify_omp_ctx
*ctx
, tree decl
, bool in_code
)
7301 unsigned flags
= in_code
? GOVD_SEEN
: 0;
7302 bool ret
= false, shared
;
7304 if (error_operand_p (decl
))
7307 if (ctx
->region_type
== ORT_NONE
)
7308 return lang_hooks
.decls
.omp_disregard_value_expr (decl
, false);
7310 if (is_global_var (decl
))
7312 /* Threadprivate variables are predetermined. */
7313 if (DECL_THREAD_LOCAL_P (decl
))
7314 return omp_notice_threadprivate_variable (ctx
, decl
, NULL_TREE
);
7316 if (DECL_HAS_VALUE_EXPR_P (decl
))
7318 tree value
= get_base_address (DECL_VALUE_EXPR (decl
));
7320 if (value
&& DECL_P (value
) && DECL_THREAD_LOCAL_P (value
))
7321 return omp_notice_threadprivate_variable (ctx
, decl
, value
);
7324 if (gimplify_omp_ctxp
->outer_context
== NULL
7326 && oacc_get_fn_attrib (current_function_decl
))
7328 location_t loc
= DECL_SOURCE_LOCATION (decl
);
7330 if (lookup_attribute ("omp declare target link",
7331 DECL_ATTRIBUTES (decl
)))
7334 "%qE with %<link%> clause used in %<routine%> function",
7338 else if (!lookup_attribute ("omp declare target",
7339 DECL_ATTRIBUTES (decl
)))
7342 "%qE requires a %<declare%> directive for use "
7343 "in a %<routine%> function", DECL_NAME (decl
));
7349 n
= splay_tree_lookup (ctx
->variables
, (splay_tree_key
)decl
);
7350 if ((ctx
->region_type
& ORT_TARGET
) != 0)
7352 ret
= lang_hooks
.decls
.omp_disregard_value_expr (decl
, true);
7355 unsigned nflags
= flags
;
7356 if ((ctx
->region_type
& ORT_ACC
) == 0)
7358 bool is_declare_target
= false;
7359 if (is_global_var (decl
)
7360 && varpool_node::get_create (decl
)->offloadable
)
7362 struct gimplify_omp_ctx
*octx
;
7363 for (octx
= ctx
->outer_context
;
7364 octx
; octx
= octx
->outer_context
)
7366 n
= splay_tree_lookup (octx
->variables
,
7367 (splay_tree_key
)decl
);
7369 && (n
->value
& GOVD_DATA_SHARE_CLASS
) != GOVD_SHARED
7370 && (n
->value
& GOVD_DATA_SHARE_CLASS
) != 0)
7373 is_declare_target
= octx
== NULL
;
7375 if (!is_declare_target
)
7378 if (TREE_CODE (TREE_TYPE (decl
)) == POINTER_TYPE
7379 || (TREE_CODE (TREE_TYPE (decl
)) == REFERENCE_TYPE
7380 && (TREE_CODE (TREE_TYPE (TREE_TYPE (decl
)))
7382 gdmk
= GDMK_POINTER
;
7383 else if (lang_hooks
.decls
.omp_scalar_p (decl
))
7386 gdmk
= GDMK_AGGREGATE
;
7387 if (ctx
->defaultmap
[gdmk
] == 0)
7389 tree d
= lang_hooks
.decls
.omp_report_decl (decl
);
7390 error ("%qE not specified in enclosing %<target%>",
7392 error_at (ctx
->location
, "enclosing %<target%>");
7394 else if (ctx
->defaultmap
[gdmk
]
7395 & (GOVD_MAP_0LEN_ARRAY
| GOVD_FIRSTPRIVATE
))
7396 nflags
|= ctx
->defaultmap
[gdmk
];
7399 gcc_assert (ctx
->defaultmap
[gdmk
] & GOVD_MAP
);
7400 nflags
|= ctx
->defaultmap
[gdmk
] & ~GOVD_MAP
;
7405 struct gimplify_omp_ctx
*octx
= ctx
->outer_context
;
7406 if ((ctx
->region_type
& ORT_ACC
) && octx
)
7408 /* Look in outer OpenACC contexts, to see if there's a
7409 data attribute for this variable. */
7410 omp_notice_variable (octx
, decl
, in_code
);
7412 for (; octx
; octx
= octx
->outer_context
)
7414 if (!(octx
->region_type
& (ORT_TARGET_DATA
| ORT_TARGET
)))
7417 = splay_tree_lookup (octx
->variables
,
7418 (splay_tree_key
) decl
);
7421 if (octx
->region_type
== ORT_ACC_HOST_DATA
)
7422 error ("variable %qE declared in enclosing "
7423 "%<host_data%> region", DECL_NAME (decl
));
7425 if (octx
->region_type
== ORT_ACC_DATA
7426 && (n2
->value
& GOVD_MAP_0LEN_ARRAY
))
7427 nflags
|= GOVD_MAP_0LEN_ARRAY
;
7433 if ((nflags
& ~(GOVD_MAP_TO_ONLY
| GOVD_MAP_FROM_ONLY
7434 | GOVD_MAP_ALLOC_ONLY
)) == flags
)
7436 tree type
= TREE_TYPE (decl
);
7438 if (gimplify_omp_ctxp
->target_firstprivatize_array_bases
7439 && lang_hooks
.decls
.omp_privatize_by_reference (decl
))
7440 type
= TREE_TYPE (type
);
7441 if (!lang_hooks
.types
.omp_mappable_type (type
))
7443 error ("%qD referenced in target region does not have "
7444 "a mappable type", decl
);
7445 nflags
|= GOVD_MAP
| GOVD_EXPLICIT
;
7449 if ((ctx
->region_type
& ORT_ACC
) != 0)
7450 nflags
= oacc_default_clause (ctx
, decl
, flags
);
7456 omp_add_variable (ctx
, decl
, nflags
);
7460 /* If nothing changed, there's nothing left to do. */
7461 if ((n
->value
& flags
) == flags
)
7471 if (ctx
->region_type
== ORT_WORKSHARE
7472 || ctx
->region_type
== ORT_TASKGROUP
7473 || ctx
->region_type
== ORT_SIMD
7474 || ctx
->region_type
== ORT_ACC
7475 || (ctx
->region_type
& ORT_TARGET_DATA
) != 0)
7478 flags
= omp_default_clause (ctx
, decl
, in_code
, flags
);
7480 if ((flags
& GOVD_PRIVATE
)
7481 && lang_hooks
.decls
.omp_private_outer_ref (decl
))
7482 flags
|= GOVD_PRIVATE_OUTER_REF
;
7484 omp_add_variable (ctx
, decl
, flags
);
7486 shared
= (flags
& GOVD_SHARED
) != 0;
7487 ret
= lang_hooks
.decls
.omp_disregard_value_expr (decl
, shared
);
7491 if ((n
->value
& (GOVD_SEEN
| GOVD_LOCAL
)) == 0
7492 && (flags
& (GOVD_SEEN
| GOVD_LOCAL
)) == GOVD_SEEN
7493 && DECL_SIZE (decl
))
7495 if (TREE_CODE (DECL_SIZE (decl
)) != INTEGER_CST
)
7498 tree t
= DECL_VALUE_EXPR (decl
);
7499 gcc_assert (TREE_CODE (t
) == INDIRECT_REF
);
7500 t
= TREE_OPERAND (t
, 0);
7501 gcc_assert (DECL_P (t
));
7502 n2
= splay_tree_lookup (ctx
->variables
, (splay_tree_key
) t
);
7503 n2
->value
|= GOVD_SEEN
;
7505 else if (lang_hooks
.decls
.omp_privatize_by_reference (decl
)
7506 && TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl
)))
7507 && (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl
))))
7511 tree t
= TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl
)));
7512 gcc_assert (DECL_P (t
));
7513 n2
= splay_tree_lookup (ctx
->variables
, (splay_tree_key
) t
);
7515 omp_notice_variable (ctx
, t
, true);
7519 shared
= ((flags
| n
->value
) & GOVD_SHARED
) != 0;
7520 ret
= lang_hooks
.decls
.omp_disregard_value_expr (decl
, shared
);
7522 /* If nothing changed, there's nothing left to do. */
7523 if ((n
->value
& flags
) == flags
)
7529 /* If the variable is private in the current context, then we don't
7530 need to propagate anything to an outer context. */
7531 if ((flags
& GOVD_PRIVATE
) && !(flags
& GOVD_PRIVATE_OUTER_REF
))
7533 if ((flags
& (GOVD_LINEAR
| GOVD_LINEAR_LASTPRIVATE_NO_OUTER
))
7534 == (GOVD_LINEAR
| GOVD_LINEAR_LASTPRIVATE_NO_OUTER
))
7536 if ((flags
& (GOVD_FIRSTPRIVATE
| GOVD_LASTPRIVATE
7537 | GOVD_LINEAR_LASTPRIVATE_NO_OUTER
))
7538 == (GOVD_LASTPRIVATE
| GOVD_LINEAR_LASTPRIVATE_NO_OUTER
))
7540 if (ctx
->outer_context
7541 && omp_notice_variable (ctx
->outer_context
, decl
, in_code
))
7546 /* Verify that DECL is private within CTX. If there's specific information
7547 to the contrary in the innermost scope, generate an error. */
7550 omp_is_private (struct gimplify_omp_ctx
*ctx
, tree decl
, int simd
)
7554 n
= splay_tree_lookup (ctx
->variables
, (splay_tree_key
)decl
);
7557 if (n
->value
& GOVD_SHARED
)
7559 if (ctx
== gimplify_omp_ctxp
)
7562 error ("iteration variable %qE is predetermined linear",
7565 error ("iteration variable %qE should be private",
7567 n
->value
= GOVD_PRIVATE
;
7573 else if ((n
->value
& GOVD_EXPLICIT
) != 0
7574 && (ctx
== gimplify_omp_ctxp
7575 || (ctx
->region_type
== ORT_COMBINED_PARALLEL
7576 && gimplify_omp_ctxp
->outer_context
== ctx
)))
7578 if ((n
->value
& GOVD_FIRSTPRIVATE
) != 0)
7579 error ("iteration variable %qE should not be firstprivate",
7581 else if ((n
->value
& GOVD_REDUCTION
) != 0)
7582 error ("iteration variable %qE should not be reduction",
7584 else if (simd
!= 1 && (n
->value
& GOVD_LINEAR
) != 0)
7585 error ("iteration variable %qE should not be linear",
7588 return (ctx
== gimplify_omp_ctxp
7589 || (ctx
->region_type
== ORT_COMBINED_PARALLEL
7590 && gimplify_omp_ctxp
->outer_context
== ctx
));
7593 if (ctx
->region_type
!= ORT_WORKSHARE
7594 && ctx
->region_type
!= ORT_TASKGROUP
7595 && ctx
->region_type
!= ORT_SIMD
7596 && ctx
->region_type
!= ORT_ACC
)
7598 else if (ctx
->outer_context
)
7599 return omp_is_private (ctx
->outer_context
, decl
, simd
);
7603 /* Return true if DECL is private within a parallel region
7604 that binds to the current construct's context or in parallel
7605 region's REDUCTION clause. */
7608 omp_check_private (struct gimplify_omp_ctx
*ctx
, tree decl
, bool copyprivate
)
7614 ctx
= ctx
->outer_context
;
7617 if (is_global_var (decl
))
7620 /* References might be private, but might be shared too,
7621 when checking for copyprivate, assume they might be
7622 private, otherwise assume they might be shared. */
7626 if (lang_hooks
.decls
.omp_privatize_by_reference (decl
))
7629 /* Treat C++ privatized non-static data members outside
7630 of the privatization the same. */
7631 if (omp_member_access_dummy_var (decl
))
7637 n
= splay_tree_lookup (ctx
->variables
, (splay_tree_key
) decl
);
7639 if ((ctx
->region_type
& (ORT_TARGET
| ORT_TARGET_DATA
)) != 0
7640 && (n
== NULL
|| (n
->value
& GOVD_DATA_SHARE_CLASS
) == 0))
7645 if ((n
->value
& GOVD_LOCAL
) != 0
7646 && omp_member_access_dummy_var (decl
))
7648 return (n
->value
& GOVD_SHARED
) == 0;
7651 while (ctx
->region_type
== ORT_WORKSHARE
7652 || ctx
->region_type
== ORT_TASKGROUP
7653 || ctx
->region_type
== ORT_SIMD
7654 || ctx
->region_type
== ORT_ACC
);
7658 /* Callback for walk_tree to find a DECL_EXPR for the given DECL. */
7661 find_decl_expr (tree
*tp
, int *walk_subtrees
, void *data
)
7665 /* If this node has been visited, unmark it and keep looking. */
7666 if (TREE_CODE (t
) == DECL_EXPR
&& DECL_EXPR_DECL (t
) == (tree
) data
)
7669 if (IS_TYPE_OR_DECL_P (t
))
7674 /* If *LIST_P contains any OpenMP depend clauses with iterators,
7675 lower all the depend clauses by populating corresponding depend
7676 array. Returns 0 if there are no such depend clauses, or
7677 2 if all depend clauses should be removed, 1 otherwise. */
7680 gimplify_omp_depend (tree
*list_p
, gimple_seq
*pre_p
)
7684 size_t n
[4] = { 0, 0, 0, 0 };
7686 tree counts
[4] = { NULL_TREE
, NULL_TREE
, NULL_TREE
, NULL_TREE
};
7687 tree last_iter
= NULL_TREE
, last_count
= NULL_TREE
;
7689 location_t first_loc
= UNKNOWN_LOCATION
;
7691 for (c
= *list_p
; c
; c
= OMP_CLAUSE_CHAIN (c
))
7692 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_DEPEND
)
7694 switch (OMP_CLAUSE_DEPEND_KIND (c
))
7696 case OMP_CLAUSE_DEPEND_IN
:
7699 case OMP_CLAUSE_DEPEND_OUT
:
7700 case OMP_CLAUSE_DEPEND_INOUT
:
7703 case OMP_CLAUSE_DEPEND_MUTEXINOUTSET
:
7706 case OMP_CLAUSE_DEPEND_DEPOBJ
:
7709 case OMP_CLAUSE_DEPEND_SOURCE
:
7710 case OMP_CLAUSE_DEPEND_SINK
:
7715 tree t
= OMP_CLAUSE_DECL (c
);
7716 if (first_loc
== UNKNOWN_LOCATION
)
7717 first_loc
= OMP_CLAUSE_LOCATION (c
);
7718 if (TREE_CODE (t
) == TREE_LIST
7720 && TREE_CODE (TREE_PURPOSE (t
)) == TREE_VEC
)
7722 if (TREE_PURPOSE (t
) != last_iter
)
7724 tree tcnt
= size_one_node
;
7725 for (tree it
= TREE_PURPOSE (t
); it
; it
= TREE_CHAIN (it
))
7727 if (gimplify_expr (&TREE_VEC_ELT (it
, 1), pre_p
, NULL
,
7728 is_gimple_val
, fb_rvalue
) == GS_ERROR
7729 || gimplify_expr (&TREE_VEC_ELT (it
, 2), pre_p
, NULL
,
7730 is_gimple_val
, fb_rvalue
) == GS_ERROR
7731 || gimplify_expr (&TREE_VEC_ELT (it
, 3), pre_p
, NULL
,
7732 is_gimple_val
, fb_rvalue
) == GS_ERROR
7733 || (gimplify_expr (&TREE_VEC_ELT (it
, 4), pre_p
, NULL
,
7734 is_gimple_val
, fb_rvalue
)
7737 tree var
= TREE_VEC_ELT (it
, 0);
7738 tree begin
= TREE_VEC_ELT (it
, 1);
7739 tree end
= TREE_VEC_ELT (it
, 2);
7740 tree step
= TREE_VEC_ELT (it
, 3);
7741 tree orig_step
= TREE_VEC_ELT (it
, 4);
7742 tree type
= TREE_TYPE (var
);
7743 tree stype
= TREE_TYPE (step
);
7744 location_t loc
= DECL_SOURCE_LOCATION (var
);
7746 /* Compute count for this iterator as
7748 ? (begin < end ? (end - begin + (step - 1)) / step : 0)
7749 : (begin > end ? (end - begin + (step + 1)) / step : 0)
7750 and compute product of those for the entire depend
7752 if (POINTER_TYPE_P (type
))
7753 endmbegin
= fold_build2_loc (loc
, POINTER_DIFF_EXPR
,
7756 endmbegin
= fold_build2_loc (loc
, MINUS_EXPR
, type
,
7758 tree stepm1
= fold_build2_loc (loc
, MINUS_EXPR
, stype
,
7760 build_int_cst (stype
, 1));
7761 tree stepp1
= fold_build2_loc (loc
, PLUS_EXPR
, stype
, step
,
7762 build_int_cst (stype
, 1));
7763 tree pos
= fold_build2_loc (loc
, PLUS_EXPR
, stype
,
7764 unshare_expr (endmbegin
),
7766 pos
= fold_build2_loc (loc
, TRUNC_DIV_EXPR
, stype
,
7768 tree neg
= fold_build2_loc (loc
, PLUS_EXPR
, stype
,
7770 if (TYPE_UNSIGNED (stype
))
7772 neg
= fold_build1_loc (loc
, NEGATE_EXPR
, stype
, neg
);
7773 step
= fold_build1_loc (loc
, NEGATE_EXPR
, stype
, step
);
7775 neg
= fold_build2_loc (loc
, TRUNC_DIV_EXPR
, stype
,
7778 tree cond
= fold_build2_loc (loc
, LT_EXPR
,
7781 pos
= fold_build3_loc (loc
, COND_EXPR
, stype
, cond
, pos
,
7782 build_int_cst (stype
, 0));
7783 cond
= fold_build2_loc (loc
, LT_EXPR
, boolean_type_node
,
7785 neg
= fold_build3_loc (loc
, COND_EXPR
, stype
, cond
, neg
,
7786 build_int_cst (stype
, 0));
7787 tree osteptype
= TREE_TYPE (orig_step
);
7788 cond
= fold_build2_loc (loc
, GT_EXPR
, boolean_type_node
,
7790 build_int_cst (osteptype
, 0));
7791 tree cnt
= fold_build3_loc (loc
, COND_EXPR
, stype
,
7793 cnt
= fold_convert_loc (loc
, sizetype
, cnt
);
7794 if (gimplify_expr (&cnt
, pre_p
, NULL
, is_gimple_val
,
7795 fb_rvalue
) == GS_ERROR
)
7797 tcnt
= size_binop_loc (loc
, MULT_EXPR
, tcnt
, cnt
);
7799 if (gimplify_expr (&tcnt
, pre_p
, NULL
, is_gimple_val
,
7800 fb_rvalue
) == GS_ERROR
)
7802 last_iter
= TREE_PURPOSE (t
);
7805 if (counts
[i
] == NULL_TREE
)
7806 counts
[i
] = last_count
;
7808 counts
[i
] = size_binop_loc (OMP_CLAUSE_LOCATION (c
),
7809 PLUS_EXPR
, counts
[i
], last_count
);
7814 for (i
= 0; i
< 4; i
++)
7820 tree total
= size_zero_node
;
7821 for (i
= 0; i
< 4; i
++)
7823 unused
[i
] = counts
[i
] == NULL_TREE
&& n
[i
] == 0;
7824 if (counts
[i
] == NULL_TREE
)
7825 counts
[i
] = size_zero_node
;
7827 counts
[i
] = size_binop (PLUS_EXPR
, counts
[i
], size_int (n
[i
]));
7828 if (gimplify_expr (&counts
[i
], pre_p
, NULL
, is_gimple_val
,
7829 fb_rvalue
) == GS_ERROR
)
7831 total
= size_binop (PLUS_EXPR
, total
, counts
[i
]);
7834 if (gimplify_expr (&total
, pre_p
, NULL
, is_gimple_val
, fb_rvalue
)
7837 bool is_old
= unused
[1] && unused
[3];
7838 tree totalpx
= size_binop (PLUS_EXPR
, unshare_expr (total
),
7839 size_int (is_old
? 1 : 4));
7840 tree type
= build_array_type (ptr_type_node
, build_index_type (totalpx
));
7841 tree array
= create_tmp_var_raw (type
);
7842 TREE_ADDRESSABLE (array
) = 1;
7843 if (TREE_CODE (totalpx
) != INTEGER_CST
)
7845 if (!TYPE_SIZES_GIMPLIFIED (TREE_TYPE (array
)))
7846 gimplify_type_sizes (TREE_TYPE (array
), pre_p
);
7847 if (gimplify_omp_ctxp
)
7849 struct gimplify_omp_ctx
*ctx
= gimplify_omp_ctxp
;
7851 && (ctx
->region_type
== ORT_WORKSHARE
7852 || ctx
->region_type
== ORT_TASKGROUP
7853 || ctx
->region_type
== ORT_SIMD
7854 || ctx
->region_type
== ORT_ACC
))
7855 ctx
= ctx
->outer_context
;
7857 omp_add_variable (ctx
, array
, GOVD_LOCAL
| GOVD_SEEN
);
7859 gimplify_vla_decl (array
, pre_p
);
7862 gimple_add_tmp_var (array
);
7863 tree r
= build4 (ARRAY_REF
, ptr_type_node
, array
, size_int (0), NULL_TREE
,
7868 tem
= build2 (MODIFY_EXPR
, void_type_node
, r
,
7869 build_int_cst (ptr_type_node
, 0));
7870 gimplify_and_add (tem
, pre_p
);
7871 r
= build4 (ARRAY_REF
, ptr_type_node
, array
, size_int (1), NULL_TREE
,
7874 tem
= build2 (MODIFY_EXPR
, void_type_node
, r
,
7875 fold_convert (ptr_type_node
, total
));
7876 gimplify_and_add (tem
, pre_p
);
7877 for (i
= 1; i
< (is_old
? 2 : 4); i
++)
7879 r
= build4 (ARRAY_REF
, ptr_type_node
, array
, size_int (i
+ !is_old
),
7880 NULL_TREE
, NULL_TREE
);
7881 tem
= build2 (MODIFY_EXPR
, void_type_node
, r
, counts
[i
- 1]);
7882 gimplify_and_add (tem
, pre_p
);
7889 for (i
= 0; i
< 4; i
++)
7891 if (i
&& (i
>= j
|| unused
[i
- 1]))
7893 cnts
[i
] = cnts
[i
- 1];
7896 cnts
[i
] = create_tmp_var (sizetype
);
7898 g
= gimple_build_assign (cnts
[i
], size_int (is_old
? 2 : 5));
7903 t
= size_binop (PLUS_EXPR
, counts
[0], size_int (2));
7905 t
= size_binop (PLUS_EXPR
, cnts
[i
- 1], counts
[i
- 1]);
7906 if (gimplify_expr (&t
, pre_p
, NULL
, is_gimple_val
, fb_rvalue
)
7909 g
= gimple_build_assign (cnts
[i
], t
);
7911 gimple_seq_add_stmt (pre_p
, g
);
7914 last_iter
= NULL_TREE
;
7915 tree last_bind
= NULL_TREE
;
7916 tree
*last_body
= NULL
;
7917 for (c
= *list_p
; c
; c
= OMP_CLAUSE_CHAIN (c
))
7918 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_DEPEND
)
7920 switch (OMP_CLAUSE_DEPEND_KIND (c
))
7922 case OMP_CLAUSE_DEPEND_IN
:
7925 case OMP_CLAUSE_DEPEND_OUT
:
7926 case OMP_CLAUSE_DEPEND_INOUT
:
7929 case OMP_CLAUSE_DEPEND_MUTEXINOUTSET
:
7932 case OMP_CLAUSE_DEPEND_DEPOBJ
:
7935 case OMP_CLAUSE_DEPEND_SOURCE
:
7936 case OMP_CLAUSE_DEPEND_SINK
:
7941 tree t
= OMP_CLAUSE_DECL (c
);
7942 if (TREE_CODE (t
) == TREE_LIST
7944 && TREE_CODE (TREE_PURPOSE (t
)) == TREE_VEC
)
7946 if (TREE_PURPOSE (t
) != last_iter
)
7949 gimplify_and_add (last_bind
, pre_p
);
7950 tree block
= TREE_VEC_ELT (TREE_PURPOSE (t
), 5);
7951 last_bind
= build3 (BIND_EXPR
, void_type_node
,
7952 BLOCK_VARS (block
), NULL
, block
);
7953 TREE_SIDE_EFFECTS (last_bind
) = 1;
7954 SET_EXPR_LOCATION (last_bind
, OMP_CLAUSE_LOCATION (c
));
7955 tree
*p
= &BIND_EXPR_BODY (last_bind
);
7956 for (tree it
= TREE_PURPOSE (t
); it
; it
= TREE_CHAIN (it
))
7958 tree var
= TREE_VEC_ELT (it
, 0);
7959 tree begin
= TREE_VEC_ELT (it
, 1);
7960 tree end
= TREE_VEC_ELT (it
, 2);
7961 tree step
= TREE_VEC_ELT (it
, 3);
7962 tree orig_step
= TREE_VEC_ELT (it
, 4);
7963 tree type
= TREE_TYPE (var
);
7964 location_t loc
= DECL_SOURCE_LOCATION (var
);
7972 if (orig_step > 0) {
7973 if (var < end) goto beg_label;
7975 if (var > end) goto beg_label;
7977 for each iterator, with inner iterators added to
7979 tree beg_label
= create_artificial_label (loc
);
7980 tree cond_label
= NULL_TREE
;
7981 tem
= build2_loc (loc
, MODIFY_EXPR
, void_type_node
,
7983 append_to_statement_list_force (tem
, p
);
7984 tem
= build_and_jump (&cond_label
);
7985 append_to_statement_list_force (tem
, p
);
7986 tem
= build1 (LABEL_EXPR
, void_type_node
, beg_label
);
7987 append_to_statement_list (tem
, p
);
7988 tree bind
= build3 (BIND_EXPR
, void_type_node
, NULL_TREE
,
7989 NULL_TREE
, NULL_TREE
);
7990 TREE_SIDE_EFFECTS (bind
) = 1;
7991 SET_EXPR_LOCATION (bind
, loc
);
7992 append_to_statement_list_force (bind
, p
);
7993 if (POINTER_TYPE_P (type
))
7994 tem
= build2_loc (loc
, POINTER_PLUS_EXPR
, type
,
7995 var
, fold_convert_loc (loc
, sizetype
,
7998 tem
= build2_loc (loc
, PLUS_EXPR
, type
, var
, step
);
7999 tem
= build2_loc (loc
, MODIFY_EXPR
, void_type_node
,
8001 append_to_statement_list_force (tem
, p
);
8002 tem
= build1 (LABEL_EXPR
, void_type_node
, cond_label
);
8003 append_to_statement_list (tem
, p
);
8004 tree cond
= fold_build2_loc (loc
, LT_EXPR
,
8008 = fold_build3_loc (loc
, COND_EXPR
, void_type_node
,
8009 cond
, build_and_jump (&beg_label
),
8011 cond
= fold_build2_loc (loc
, GT_EXPR
, boolean_type_node
,
8014 = fold_build3_loc (loc
, COND_EXPR
, void_type_node
,
8015 cond
, build_and_jump (&beg_label
),
8017 tree osteptype
= TREE_TYPE (orig_step
);
8018 cond
= fold_build2_loc (loc
, GT_EXPR
, boolean_type_node
,
8020 build_int_cst (osteptype
, 0));
8021 tem
= fold_build3_loc (loc
, COND_EXPR
, void_type_node
,
8023 append_to_statement_list_force (tem
, p
);
8024 p
= &BIND_EXPR_BODY (bind
);
8028 last_iter
= TREE_PURPOSE (t
);
8029 if (TREE_CODE (TREE_VALUE (t
)) == COMPOUND_EXPR
)
8031 append_to_statement_list (TREE_OPERAND (TREE_VALUE (t
),
8033 TREE_VALUE (t
) = TREE_OPERAND (TREE_VALUE (t
), 1);
8035 if (error_operand_p (TREE_VALUE (t
)))
8037 TREE_VALUE (t
) = build_fold_addr_expr (TREE_VALUE (t
));
8038 r
= build4 (ARRAY_REF
, ptr_type_node
, array
, cnts
[i
],
8039 NULL_TREE
, NULL_TREE
);
8040 tem
= build2_loc (OMP_CLAUSE_LOCATION (c
), MODIFY_EXPR
,
8041 void_type_node
, r
, TREE_VALUE (t
));
8042 append_to_statement_list_force (tem
, last_body
);
8043 tem
= build2_loc (OMP_CLAUSE_LOCATION (c
), MODIFY_EXPR
,
8044 void_type_node
, cnts
[i
],
8045 size_binop (PLUS_EXPR
, cnts
[i
], size_int (1)));
8046 append_to_statement_list_force (tem
, last_body
);
8047 TREE_VALUE (t
) = null_pointer_node
;
8053 gimplify_and_add (last_bind
, pre_p
);
8054 last_bind
= NULL_TREE
;
8056 if (TREE_CODE (OMP_CLAUSE_DECL (c
)) == COMPOUND_EXPR
)
8058 gimplify_expr (&TREE_OPERAND (OMP_CLAUSE_DECL (c
), 0), pre_p
,
8059 NULL
, is_gimple_val
, fb_rvalue
);
8060 OMP_CLAUSE_DECL (c
) = TREE_OPERAND (OMP_CLAUSE_DECL (c
), 1);
8062 if (error_operand_p (OMP_CLAUSE_DECL (c
)))
8064 OMP_CLAUSE_DECL (c
) = build_fold_addr_expr (OMP_CLAUSE_DECL (c
));
8065 if (gimplify_expr (&OMP_CLAUSE_DECL (c
), pre_p
, NULL
,
8066 is_gimple_val
, fb_rvalue
) == GS_ERROR
)
8068 r
= build4 (ARRAY_REF
, ptr_type_node
, array
, cnts
[i
],
8069 NULL_TREE
, NULL_TREE
);
8070 tem
= build2 (MODIFY_EXPR
, void_type_node
, r
, OMP_CLAUSE_DECL (c
));
8071 gimplify_and_add (tem
, pre_p
);
8072 g
= gimple_build_assign (cnts
[i
], size_binop (PLUS_EXPR
, cnts
[i
],
8074 gimple_seq_add_stmt (pre_p
, g
);
8078 gimplify_and_add (last_bind
, pre_p
);
8079 tree cond
= boolean_false_node
;
8083 cond
= build2_loc (first_loc
, NE_EXPR
, boolean_type_node
, cnts
[0],
8084 size_binop_loc (first_loc
, PLUS_EXPR
, counts
[0],
8087 cond
= build2_loc (first_loc
, TRUTH_OR_EXPR
, boolean_type_node
, cond
,
8088 build2_loc (first_loc
, NE_EXPR
, boolean_type_node
,
8090 size_binop_loc (first_loc
, PLUS_EXPR
,
8096 tree prev
= size_int (5);
8097 for (i
= 0; i
< 4; i
++)
8101 prev
= size_binop_loc (first_loc
, PLUS_EXPR
, counts
[i
], prev
);
8102 cond
= build2_loc (first_loc
, TRUTH_OR_EXPR
, boolean_type_node
, cond
,
8103 build2_loc (first_loc
, NE_EXPR
, boolean_type_node
,
8104 cnts
[i
], unshare_expr (prev
)));
8107 tem
= build3_loc (first_loc
, COND_EXPR
, void_type_node
, cond
,
8108 build_call_expr_loc (first_loc
,
8109 builtin_decl_explicit (BUILT_IN_TRAP
),
8111 gimplify_and_add (tem
, pre_p
);
8112 c
= build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE_DEPEND
);
8113 OMP_CLAUSE_DEPEND_KIND (c
) = OMP_CLAUSE_DEPEND_LAST
;
8114 OMP_CLAUSE_DECL (c
) = build_fold_addr_expr (array
);
8115 OMP_CLAUSE_CHAIN (c
) = *list_p
;
8120 /* Scan the OMP clauses in *LIST_P, installing mappings into a new
8121 and previous omp contexts. */
8124 gimplify_scan_omp_clauses (tree
*list_p
, gimple_seq
*pre_p
,
8125 enum omp_region_type region_type
,
8126 enum tree_code code
)
8128 struct gimplify_omp_ctx
*ctx
, *outer_ctx
;
8130 hash_map
<tree
, tree
> *struct_map_to_clause
= NULL
;
8131 tree
*prev_list_p
= NULL
, *orig_list_p
= list_p
;
8132 int handled_depend_iterators
= -1;
8135 ctx
= new_omp_context (region_type
);
8136 outer_ctx
= ctx
->outer_context
;
8137 if (code
== OMP_TARGET
)
8139 if (!lang_GNU_Fortran ())
8140 ctx
->defaultmap
[GDMK_POINTER
] = GOVD_MAP
| GOVD_MAP_0LEN_ARRAY
;
8141 ctx
->defaultmap
[GDMK_SCALAR
] = GOVD_FIRSTPRIVATE
;
8143 if (!lang_GNU_Fortran ())
8147 case OMP_TARGET_DATA
:
8148 case OMP_TARGET_ENTER_DATA
:
8149 case OMP_TARGET_EXIT_DATA
:
8151 case OACC_HOST_DATA
:
8154 ctx
->target_firstprivatize_array_bases
= true;
8159 while ((c
= *list_p
) != NULL
)
8161 bool remove
= false;
8162 bool notice_outer
= true;
8163 const char *check_non_private
= NULL
;
8167 switch (OMP_CLAUSE_CODE (c
))
8169 case OMP_CLAUSE_PRIVATE
:
8170 flags
= GOVD_PRIVATE
| GOVD_EXPLICIT
;
8171 if (lang_hooks
.decls
.omp_private_outer_ref (OMP_CLAUSE_DECL (c
)))
8173 flags
|= GOVD_PRIVATE_OUTER_REF
;
8174 OMP_CLAUSE_PRIVATE_OUTER_REF (c
) = 1;
8177 notice_outer
= false;
8179 case OMP_CLAUSE_SHARED
:
8180 flags
= GOVD_SHARED
| GOVD_EXPLICIT
;
8182 case OMP_CLAUSE_FIRSTPRIVATE
:
8183 flags
= GOVD_FIRSTPRIVATE
| GOVD_EXPLICIT
;
8184 check_non_private
= "firstprivate";
8186 case OMP_CLAUSE_LASTPRIVATE
:
8187 if (OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c
))
8190 case OMP_DISTRIBUTE
:
8191 error_at (OMP_CLAUSE_LOCATION (c
),
8192 "conditional %<lastprivate%> clause on "
8193 "%qs construct", "distribute");
8194 OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c
) = 0;
8197 error_at (OMP_CLAUSE_LOCATION (c
),
8198 "conditional %<lastprivate%> clause on "
8199 "%qs construct", "taskloop");
8200 OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c
) = 0;
8205 flags
= GOVD_LASTPRIVATE
| GOVD_SEEN
| GOVD_EXPLICIT
;
8206 if (code
!= OMP_LOOP
)
8207 check_non_private
= "lastprivate";
8208 decl
= OMP_CLAUSE_DECL (c
);
8209 if (error_operand_p (decl
))
8211 if (OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c
)
8212 && !lang_hooks
.decls
.omp_scalar_p (decl
))
8214 error_at (OMP_CLAUSE_LOCATION (c
),
8215 "non-scalar variable %qD in conditional "
8216 "%<lastprivate%> clause", decl
);
8217 OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c
) = 0;
8219 if (OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c
))
8220 flags
|= GOVD_LASTPRIVATE_CONDITIONAL
;
8222 && (outer_ctx
->region_type
== ORT_COMBINED_PARALLEL
8223 || ((outer_ctx
->region_type
& ORT_COMBINED_TEAMS
)
8224 == ORT_COMBINED_TEAMS
))
8225 && splay_tree_lookup (outer_ctx
->variables
,
8226 (splay_tree_key
) decl
) == NULL
)
8228 omp_add_variable (outer_ctx
, decl
, GOVD_SHARED
| GOVD_SEEN
);
8229 if (outer_ctx
->outer_context
)
8230 omp_notice_variable (outer_ctx
->outer_context
, decl
, true);
8233 && (outer_ctx
->region_type
& ORT_TASK
) != 0
8234 && outer_ctx
->combined_loop
8235 && splay_tree_lookup (outer_ctx
->variables
,
8236 (splay_tree_key
) decl
) == NULL
)
8238 omp_add_variable (outer_ctx
, decl
, GOVD_LASTPRIVATE
| GOVD_SEEN
);
8239 if (outer_ctx
->outer_context
)
8240 omp_notice_variable (outer_ctx
->outer_context
, decl
, true);
8243 && (outer_ctx
->region_type
== ORT_WORKSHARE
8244 || outer_ctx
->region_type
== ORT_ACC
)
8245 && outer_ctx
->combined_loop
8246 && splay_tree_lookup (outer_ctx
->variables
,
8247 (splay_tree_key
) decl
) == NULL
8248 && !omp_check_private (outer_ctx
, decl
, false))
8250 omp_add_variable (outer_ctx
, decl
, GOVD_LASTPRIVATE
| GOVD_SEEN
);
8251 if (outer_ctx
->outer_context
8252 && (outer_ctx
->outer_context
->region_type
8253 == ORT_COMBINED_PARALLEL
)
8254 && splay_tree_lookup (outer_ctx
->outer_context
->variables
,
8255 (splay_tree_key
) decl
) == NULL
)
8257 struct gimplify_omp_ctx
*octx
= outer_ctx
->outer_context
;
8258 omp_add_variable (octx
, decl
, GOVD_SHARED
| GOVD_SEEN
);
8259 if (octx
->outer_context
)
8261 octx
= octx
->outer_context
;
8262 if (octx
->region_type
== ORT_WORKSHARE
8263 && octx
->combined_loop
8264 && splay_tree_lookup (octx
->variables
,
8265 (splay_tree_key
) decl
) == NULL
8266 && !omp_check_private (octx
, decl
, false))
8268 omp_add_variable (octx
, decl
,
8269 GOVD_LASTPRIVATE
| GOVD_SEEN
);
8270 octx
= octx
->outer_context
;
8272 && ((octx
->region_type
& ORT_COMBINED_TEAMS
)
8273 == ORT_COMBINED_TEAMS
)
8274 && (splay_tree_lookup (octx
->variables
,
8275 (splay_tree_key
) decl
)
8278 omp_add_variable (octx
, decl
,
8279 GOVD_SHARED
| GOVD_SEEN
);
8280 octx
= octx
->outer_context
;
8284 omp_notice_variable (octx
, decl
, true);
8287 else if (outer_ctx
->outer_context
)
8288 omp_notice_variable (outer_ctx
->outer_context
, decl
, true);
8291 case OMP_CLAUSE_REDUCTION
:
8292 if (OMP_CLAUSE_REDUCTION_TASK (c
))
8294 if (region_type
== ORT_WORKSHARE
)
8297 nowait
= omp_find_clause (*list_p
,
8298 OMP_CLAUSE_NOWAIT
) != NULL_TREE
;
8300 && (outer_ctx
== NULL
8301 || outer_ctx
->region_type
!= ORT_COMBINED_PARALLEL
))
8303 error_at (OMP_CLAUSE_LOCATION (c
),
8304 "%<task%> reduction modifier on a construct "
8305 "with a %<nowait%> clause");
8306 OMP_CLAUSE_REDUCTION_TASK (c
) = 0;
8309 else if ((region_type
& ORT_PARALLEL
) != ORT_PARALLEL
)
8311 error_at (OMP_CLAUSE_LOCATION (c
),
8312 "invalid %<task%> reduction modifier on construct "
8313 "other than %<parallel%>, %<for%> or %<sections%>");
8314 OMP_CLAUSE_REDUCTION_TASK (c
) = 0;
8317 if (OMP_CLAUSE_REDUCTION_INSCAN (c
))
8321 error_at (OMP_CLAUSE_LOCATION (c
),
8322 "%<inscan%> %<reduction%> clause on "
8323 "%qs construct", "sections");
8324 OMP_CLAUSE_REDUCTION_INSCAN (c
) = 0;
8327 error_at (OMP_CLAUSE_LOCATION (c
),
8328 "%<inscan%> %<reduction%> clause on "
8329 "%qs construct", "parallel");
8330 OMP_CLAUSE_REDUCTION_INSCAN (c
) = 0;
8333 error_at (OMP_CLAUSE_LOCATION (c
),
8334 "%<inscan%> %<reduction%> clause on "
8335 "%qs construct", "teams");
8336 OMP_CLAUSE_REDUCTION_INSCAN (c
) = 0;
8339 error_at (OMP_CLAUSE_LOCATION (c
),
8340 "%<inscan%> %<reduction%> clause on "
8341 "%qs construct", "taskloop");
8342 OMP_CLAUSE_REDUCTION_INSCAN (c
) = 0;
8348 case OMP_CLAUSE_IN_REDUCTION
:
8349 case OMP_CLAUSE_TASK_REDUCTION
:
8350 flags
= GOVD_REDUCTION
| GOVD_SEEN
| GOVD_EXPLICIT
;
8351 /* OpenACC permits reductions on private variables. */
8352 if (!(region_type
& ORT_ACC
)
8353 /* taskgroup is actually not a worksharing region. */
8354 && code
!= OMP_TASKGROUP
)
8355 check_non_private
= omp_clause_code_name
[OMP_CLAUSE_CODE (c
)];
8356 decl
= OMP_CLAUSE_DECL (c
);
8357 if (TREE_CODE (decl
) == MEM_REF
)
8359 tree type
= TREE_TYPE (decl
);
8360 if (gimplify_expr (&TYPE_MAX_VALUE (TYPE_DOMAIN (type
)), pre_p
,
8361 NULL
, is_gimple_val
, fb_rvalue
, false)
8367 tree v
= TYPE_MAX_VALUE (TYPE_DOMAIN (type
));
8370 omp_firstprivatize_variable (ctx
, v
);
8371 omp_notice_variable (ctx
, v
, true);
8373 decl
= TREE_OPERAND (decl
, 0);
8374 if (TREE_CODE (decl
) == POINTER_PLUS_EXPR
)
8376 if (gimplify_expr (&TREE_OPERAND (decl
, 1), pre_p
,
8377 NULL
, is_gimple_val
, fb_rvalue
, false)
8383 v
= TREE_OPERAND (decl
, 1);
8386 omp_firstprivatize_variable (ctx
, v
);
8387 omp_notice_variable (ctx
, v
, true);
8389 decl
= TREE_OPERAND (decl
, 0);
8391 if (TREE_CODE (decl
) == ADDR_EXPR
8392 || TREE_CODE (decl
) == INDIRECT_REF
)
8393 decl
= TREE_OPERAND (decl
, 0);
8396 case OMP_CLAUSE_LINEAR
:
8397 if (gimplify_expr (&OMP_CLAUSE_LINEAR_STEP (c
), pre_p
, NULL
,
8398 is_gimple_val
, fb_rvalue
) == GS_ERROR
)
8405 if (code
== OMP_SIMD
8406 && !OMP_CLAUSE_LINEAR_NO_COPYIN (c
))
8408 struct gimplify_omp_ctx
*octx
= outer_ctx
;
8410 && octx
->region_type
== ORT_WORKSHARE
8411 && octx
->combined_loop
8412 && !octx
->distribute
)
8414 if (octx
->outer_context
8415 && (octx
->outer_context
->region_type
8416 == ORT_COMBINED_PARALLEL
))
8417 octx
= octx
->outer_context
->outer_context
;
8419 octx
= octx
->outer_context
;
8422 && octx
->region_type
== ORT_WORKSHARE
8423 && octx
->combined_loop
8424 && octx
->distribute
)
8426 error_at (OMP_CLAUSE_LOCATION (c
),
8427 "%<linear%> clause for variable other than "
8428 "loop iterator specified on construct "
8429 "combined with %<distribute%>");
8434 /* For combined #pragma omp parallel for simd, need to put
8435 lastprivate and perhaps firstprivate too on the
8436 parallel. Similarly for #pragma omp for simd. */
8437 struct gimplify_omp_ctx
*octx
= outer_ctx
;
8441 if (OMP_CLAUSE_LINEAR_NO_COPYIN (c
)
8442 && OMP_CLAUSE_LINEAR_NO_COPYOUT (c
))
8444 decl
= OMP_CLAUSE_DECL (c
);
8445 if (error_operand_p (decl
))
8451 if (!OMP_CLAUSE_LINEAR_NO_COPYIN (c
))
8452 flags
|= GOVD_FIRSTPRIVATE
;
8453 if (!OMP_CLAUSE_LINEAR_NO_COPYOUT (c
))
8454 flags
|= GOVD_LASTPRIVATE
;
8456 && octx
->region_type
== ORT_WORKSHARE
8457 && octx
->combined_loop
)
8459 if (octx
->outer_context
8460 && (octx
->outer_context
->region_type
8461 == ORT_COMBINED_PARALLEL
))
8462 octx
= octx
->outer_context
;
8463 else if (omp_check_private (octx
, decl
, false))
8467 && (octx
->region_type
& ORT_TASK
) != 0
8468 && octx
->combined_loop
)
8471 && octx
->region_type
== ORT_COMBINED_PARALLEL
8472 && ctx
->region_type
== ORT_WORKSHARE
8473 && octx
== outer_ctx
)
8474 flags
= GOVD_SEEN
| GOVD_SHARED
;
8476 && ((octx
->region_type
& ORT_COMBINED_TEAMS
)
8477 == ORT_COMBINED_TEAMS
))
8478 flags
= GOVD_SEEN
| GOVD_SHARED
;
8480 && octx
->region_type
== ORT_COMBINED_TARGET
)
8482 flags
&= ~GOVD_LASTPRIVATE
;
8483 if (flags
== GOVD_SEEN
)
8489 = splay_tree_lookup (octx
->variables
,
8490 (splay_tree_key
) decl
);
8491 if (on
&& (on
->value
& GOVD_DATA_SHARE_CLASS
) != 0)
8496 omp_add_variable (octx
, decl
, flags
);
8497 if (octx
->outer_context
== NULL
)
8499 octx
= octx
->outer_context
;
8504 && (!OMP_CLAUSE_LINEAR_NO_COPYIN (c
)
8505 || !OMP_CLAUSE_LINEAR_NO_COPYOUT (c
)))
8506 omp_notice_variable (octx
, decl
, true);
8508 flags
= GOVD_LINEAR
| GOVD_EXPLICIT
;
8509 if (OMP_CLAUSE_LINEAR_NO_COPYIN (c
)
8510 && OMP_CLAUSE_LINEAR_NO_COPYOUT (c
))
8512 notice_outer
= false;
8513 flags
|= GOVD_LINEAR_LASTPRIVATE_NO_OUTER
;
8517 case OMP_CLAUSE_MAP
:
8518 decl
= OMP_CLAUSE_DECL (c
);
8519 if (error_operand_p (decl
))
8526 if (TREE_CODE (TREE_TYPE (decl
)) != ARRAY_TYPE
)
8529 case OMP_TARGET_DATA
:
8530 case OMP_TARGET_ENTER_DATA
:
8531 case OMP_TARGET_EXIT_DATA
:
8532 case OACC_ENTER_DATA
:
8533 case OACC_EXIT_DATA
:
8534 case OACC_HOST_DATA
:
8535 if (OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_FIRSTPRIVATE_POINTER
8536 || (OMP_CLAUSE_MAP_KIND (c
)
8537 == GOMP_MAP_FIRSTPRIVATE_REFERENCE
))
8538 /* For target {,enter ,exit }data only the array slice is
8539 mapped, but not the pointer to it. */
8547 if (DECL_P (decl
) && outer_ctx
&& (region_type
& ORT_ACC
))
8549 struct gimplify_omp_ctx
*octx
;
8550 for (octx
= outer_ctx
; octx
; octx
= octx
->outer_context
)
8552 if (octx
->region_type
!= ORT_ACC_HOST_DATA
)
8555 = splay_tree_lookup (octx
->variables
,
8556 (splay_tree_key
) decl
);
8558 error_at (OMP_CLAUSE_LOCATION (c
), "variable %qE "
8559 "declared in enclosing %<host_data%> region",
8563 if (OMP_CLAUSE_SIZE (c
) == NULL_TREE
)
8564 OMP_CLAUSE_SIZE (c
) = DECL_P (decl
) ? DECL_SIZE_UNIT (decl
)
8565 : TYPE_SIZE_UNIT (TREE_TYPE (decl
));
8566 if (gimplify_expr (&OMP_CLAUSE_SIZE (c
), pre_p
,
8567 NULL
, is_gimple_val
, fb_rvalue
) == GS_ERROR
)
8572 else if ((OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_FIRSTPRIVATE_POINTER
8573 || (OMP_CLAUSE_MAP_KIND (c
)
8574 == GOMP_MAP_FIRSTPRIVATE_REFERENCE
))
8575 && TREE_CODE (OMP_CLAUSE_SIZE (c
)) != INTEGER_CST
)
8578 = get_initialized_tmp_var (OMP_CLAUSE_SIZE (c
), pre_p
, NULL
,
8580 omp_add_variable (ctx
, OMP_CLAUSE_SIZE (c
),
8581 GOVD_FIRSTPRIVATE
| GOVD_SEEN
);
8586 if (TREE_CODE (d
) == ARRAY_REF
)
8588 while (TREE_CODE (d
) == ARRAY_REF
)
8589 d
= TREE_OPERAND (d
, 0);
8590 if (TREE_CODE (d
) == COMPONENT_REF
8591 && TREE_CODE (TREE_TYPE (d
)) == ARRAY_TYPE
)
8594 pd
= &OMP_CLAUSE_DECL (c
);
8596 && TREE_CODE (decl
) == INDIRECT_REF
8597 && TREE_CODE (TREE_OPERAND (decl
, 0)) == COMPONENT_REF
8598 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (decl
, 0)))
8601 pd
= &TREE_OPERAND (decl
, 0);
8602 decl
= TREE_OPERAND (decl
, 0);
8604 if (TREE_CODE (decl
) == COMPONENT_REF
)
8606 while (TREE_CODE (decl
) == COMPONENT_REF
)
8607 decl
= TREE_OPERAND (decl
, 0);
8608 if (TREE_CODE (decl
) == INDIRECT_REF
8609 && DECL_P (TREE_OPERAND (decl
, 0))
8610 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (decl
, 0)))
8612 decl
= TREE_OPERAND (decl
, 0);
8614 if (gimplify_expr (pd
, pre_p
, NULL
, is_gimple_lvalue
, fb_lvalue
)
8622 if (error_operand_p (decl
))
8628 tree stype
= TREE_TYPE (decl
);
8629 if (TREE_CODE (stype
) == REFERENCE_TYPE
)
8630 stype
= TREE_TYPE (stype
);
8631 if (TYPE_SIZE_UNIT (stype
) == NULL
8632 || TREE_CODE (TYPE_SIZE_UNIT (stype
)) != INTEGER_CST
)
8634 error_at (OMP_CLAUSE_LOCATION (c
),
8635 "mapping field %qE of variable length "
8636 "structure", OMP_CLAUSE_DECL (c
));
8641 if (OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_ALWAYS_POINTER
)
8643 /* Error recovery. */
8644 if (prev_list_p
== NULL
)
8649 if (OMP_CLAUSE_CHAIN (*prev_list_p
) != c
)
8651 tree ch
= OMP_CLAUSE_CHAIN (*prev_list_p
);
8652 if (ch
== NULL_TREE
|| OMP_CLAUSE_CHAIN (ch
) != c
)
8661 poly_int64 bitsize
, bitpos
;
8663 int unsignedp
, reversep
, volatilep
= 0;
8664 tree base
= OMP_CLAUSE_DECL (c
);
8665 while (TREE_CODE (base
) == ARRAY_REF
)
8666 base
= TREE_OPERAND (base
, 0);
8667 if (TREE_CODE (base
) == INDIRECT_REF
)
8668 base
= TREE_OPERAND (base
, 0);
8669 base
= get_inner_reference (base
, &bitsize
, &bitpos
, &offset
,
8670 &mode
, &unsignedp
, &reversep
,
8672 tree orig_base
= base
;
8673 if ((TREE_CODE (base
) == INDIRECT_REF
8674 || (TREE_CODE (base
) == MEM_REF
8675 && integer_zerop (TREE_OPERAND (base
, 1))))
8676 && DECL_P (TREE_OPERAND (base
, 0))
8677 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (base
, 0)))
8679 base
= TREE_OPERAND (base
, 0);
8680 gcc_assert (base
== decl
8681 && (offset
== NULL_TREE
8682 || poly_int_tree_p (offset
)));
8685 = splay_tree_lookup (ctx
->variables
, (splay_tree_key
)decl
);
8686 bool ptr
= (OMP_CLAUSE_MAP_KIND (c
)
8687 == GOMP_MAP_ALWAYS_POINTER
);
8688 if (n
== NULL
|| (n
->value
& GOVD_MAP
) == 0)
8690 tree l
= build_omp_clause (OMP_CLAUSE_LOCATION (c
),
8692 OMP_CLAUSE_SET_MAP_KIND (l
, GOMP_MAP_STRUCT
);
8693 if (orig_base
!= base
)
8694 OMP_CLAUSE_DECL (l
) = unshare_expr (orig_base
);
8696 OMP_CLAUSE_DECL (l
) = decl
;
8697 OMP_CLAUSE_SIZE (l
) = size_int (1);
8698 if (struct_map_to_clause
== NULL
)
8699 struct_map_to_clause
= new hash_map
<tree
, tree
>;
8700 struct_map_to_clause
->put (decl
, l
);
8703 enum gomp_map_kind mkind
8704 = code
== OMP_TARGET_EXIT_DATA
8705 ? GOMP_MAP_RELEASE
: GOMP_MAP_ALLOC
;
8706 tree c2
= build_omp_clause (OMP_CLAUSE_LOCATION (c
),
8708 OMP_CLAUSE_SET_MAP_KIND (c2
, mkind
);
8709 OMP_CLAUSE_DECL (c2
)
8710 = unshare_expr (OMP_CLAUSE_DECL (c
));
8711 OMP_CLAUSE_CHAIN (c2
) = *prev_list_p
;
8712 OMP_CLAUSE_SIZE (c2
)
8713 = TYPE_SIZE_UNIT (ptr_type_node
);
8714 OMP_CLAUSE_CHAIN (l
) = c2
;
8715 if (OMP_CLAUSE_CHAIN (*prev_list_p
) != c
)
8717 tree c4
= OMP_CLAUSE_CHAIN (*prev_list_p
);
8719 = build_omp_clause (OMP_CLAUSE_LOCATION (c
),
8721 OMP_CLAUSE_SET_MAP_KIND (c3
, mkind
);
8722 OMP_CLAUSE_DECL (c3
)
8723 = unshare_expr (OMP_CLAUSE_DECL (c4
));
8724 OMP_CLAUSE_SIZE (c3
)
8725 = TYPE_SIZE_UNIT (ptr_type_node
);
8726 OMP_CLAUSE_CHAIN (c3
) = *prev_list_p
;
8727 OMP_CLAUSE_CHAIN (c2
) = c3
;
8734 OMP_CLAUSE_CHAIN (l
) = c
;
8736 list_p
= &OMP_CLAUSE_CHAIN (l
);
8738 if (orig_base
!= base
&& code
== OMP_TARGET
)
8740 tree c2
= build_omp_clause (OMP_CLAUSE_LOCATION (c
),
8742 enum gomp_map_kind mkind
8743 = GOMP_MAP_FIRSTPRIVATE_REFERENCE
;
8744 OMP_CLAUSE_SET_MAP_KIND (c2
, mkind
);
8745 OMP_CLAUSE_DECL (c2
) = decl
;
8746 OMP_CLAUSE_SIZE (c2
) = size_zero_node
;
8747 OMP_CLAUSE_CHAIN (c2
) = OMP_CLAUSE_CHAIN (l
);
8748 OMP_CLAUSE_CHAIN (l
) = c2
;
8750 flags
= GOVD_MAP
| GOVD_EXPLICIT
;
8751 if (GOMP_MAP_ALWAYS_P (OMP_CLAUSE_MAP_KIND (c
)) || ptr
)
8757 tree
*osc
= struct_map_to_clause
->get (decl
);
8758 tree
*sc
= NULL
, *scp
= NULL
;
8759 if (GOMP_MAP_ALWAYS_P (OMP_CLAUSE_MAP_KIND (c
)) || ptr
)
8760 n
->value
|= GOVD_SEEN
;
8761 poly_offset_int o1
, o2
;
8763 o1
= wi::to_poly_offset (offset
);
8766 if (maybe_ne (bitpos
, 0))
8767 o1
+= bits_to_bytes_round_down (bitpos
);
8768 sc
= &OMP_CLAUSE_CHAIN (*osc
);
8770 && (OMP_CLAUSE_MAP_KIND (*sc
)
8771 == GOMP_MAP_FIRSTPRIVATE_REFERENCE
))
8772 sc
= &OMP_CLAUSE_CHAIN (*sc
);
8773 for (; *sc
!= c
; sc
= &OMP_CLAUSE_CHAIN (*sc
))
8774 if (ptr
&& sc
== prev_list_p
)
8776 else if (TREE_CODE (OMP_CLAUSE_DECL (*sc
))
8778 && (TREE_CODE (OMP_CLAUSE_DECL (*sc
))
8780 && (TREE_CODE (OMP_CLAUSE_DECL (*sc
))
8786 poly_int64 bitsize2
, bitpos2
;
8787 base
= OMP_CLAUSE_DECL (*sc
);
8788 if (TREE_CODE (base
) == ARRAY_REF
)
8790 while (TREE_CODE (base
) == ARRAY_REF
)
8791 base
= TREE_OPERAND (base
, 0);
8792 if (TREE_CODE (base
) != COMPONENT_REF
8793 || (TREE_CODE (TREE_TYPE (base
))
8797 else if (TREE_CODE (base
) == INDIRECT_REF
8798 && (TREE_CODE (TREE_OPERAND (base
, 0))
8800 && (TREE_CODE (TREE_TYPE
8801 (TREE_OPERAND (base
, 0)))
8803 base
= TREE_OPERAND (base
, 0);
8804 base
= get_inner_reference (base
, &bitsize2
,
8807 &reversep
, &volatilep
);
8808 if ((TREE_CODE (base
) == INDIRECT_REF
8809 || (TREE_CODE (base
) == MEM_REF
8810 && integer_zerop (TREE_OPERAND (base
,
8812 && DECL_P (TREE_OPERAND (base
, 0))
8813 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (base
,
8816 base
= TREE_OPERAND (base
, 0);
8821 gcc_assert (offset
== NULL_TREE
8822 || poly_int_tree_p (offset
));
8823 tree d1
= OMP_CLAUSE_DECL (*sc
);
8824 tree d2
= OMP_CLAUSE_DECL (c
);
8825 while (TREE_CODE (d1
) == ARRAY_REF
)
8826 d1
= TREE_OPERAND (d1
, 0);
8827 while (TREE_CODE (d2
) == ARRAY_REF
)
8828 d2
= TREE_OPERAND (d2
, 0);
8829 if (TREE_CODE (d1
) == INDIRECT_REF
)
8830 d1
= TREE_OPERAND (d1
, 0);
8831 if (TREE_CODE (d2
) == INDIRECT_REF
)
8832 d2
= TREE_OPERAND (d2
, 0);
8833 while (TREE_CODE (d1
) == COMPONENT_REF
)
8834 if (TREE_CODE (d2
) == COMPONENT_REF
8835 && TREE_OPERAND (d1
, 1)
8836 == TREE_OPERAND (d2
, 1))
8838 d1
= TREE_OPERAND (d1
, 0);
8839 d2
= TREE_OPERAND (d2
, 0);
8845 error_at (OMP_CLAUSE_LOCATION (c
),
8846 "%qE appears more than once in map "
8847 "clauses", OMP_CLAUSE_DECL (c
));
8852 o2
= wi::to_poly_offset (offset2
);
8855 o2
+= bits_to_bytes_round_down (bitpos2
);
8856 if (maybe_lt (o1
, o2
)
8857 || (known_eq (o1
, o2
)
8858 && maybe_lt (bitpos
, bitpos2
)))
8868 OMP_CLAUSE_SIZE (*osc
)
8869 = size_binop (PLUS_EXPR
, OMP_CLAUSE_SIZE (*osc
),
8873 tree c2
= build_omp_clause (OMP_CLAUSE_LOCATION (c
),
8875 tree cl
= NULL_TREE
;
8876 enum gomp_map_kind mkind
8877 = code
== OMP_TARGET_EXIT_DATA
8878 ? GOMP_MAP_RELEASE
: GOMP_MAP_ALLOC
;
8879 OMP_CLAUSE_SET_MAP_KIND (c2
, mkind
);
8880 OMP_CLAUSE_DECL (c2
)
8881 = unshare_expr (OMP_CLAUSE_DECL (c
));
8882 OMP_CLAUSE_CHAIN (c2
) = scp
? *scp
: *prev_list_p
;
8883 OMP_CLAUSE_SIZE (c2
)
8884 = TYPE_SIZE_UNIT (ptr_type_node
);
8885 cl
= scp
? *prev_list_p
: c2
;
8886 if (OMP_CLAUSE_CHAIN (*prev_list_p
) != c
)
8888 tree c4
= OMP_CLAUSE_CHAIN (*prev_list_p
);
8890 = build_omp_clause (OMP_CLAUSE_LOCATION (c
),
8892 OMP_CLAUSE_SET_MAP_KIND (c3
, mkind
);
8893 OMP_CLAUSE_DECL (c3
)
8894 = unshare_expr (OMP_CLAUSE_DECL (c4
));
8895 OMP_CLAUSE_SIZE (c3
)
8896 = TYPE_SIZE_UNIT (ptr_type_node
);
8897 OMP_CLAUSE_CHAIN (c3
) = *prev_list_p
;
8899 OMP_CLAUSE_CHAIN (c2
) = c3
;
8905 if (sc
== prev_list_p
)
8912 *prev_list_p
= OMP_CLAUSE_CHAIN (c
);
8913 list_p
= prev_list_p
;
8915 OMP_CLAUSE_CHAIN (c
) = *sc
;
8922 *list_p
= OMP_CLAUSE_CHAIN (c
);
8923 OMP_CLAUSE_CHAIN (c
) = *sc
;
8930 && OMP_CLAUSE_MAP_KIND (c
) != GOMP_MAP_ALWAYS_POINTER
8931 && OMP_CLAUSE_CHAIN (c
)
8932 && OMP_CLAUSE_CODE (OMP_CLAUSE_CHAIN (c
)) == OMP_CLAUSE_MAP
8933 && (OMP_CLAUSE_MAP_KIND (OMP_CLAUSE_CHAIN (c
))
8934 == GOMP_MAP_ALWAYS_POINTER
))
8935 prev_list_p
= list_p
;
8938 flags
= GOVD_MAP
| GOVD_EXPLICIT
;
8939 if (OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_ALWAYS_TO
8940 || OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_ALWAYS_TOFROM
)
8941 flags
|= GOVD_MAP_ALWAYS_TO
;
8944 case OMP_CLAUSE_DEPEND
:
8945 if (OMP_CLAUSE_DEPEND_KIND (c
) == OMP_CLAUSE_DEPEND_SINK
)
8947 tree deps
= OMP_CLAUSE_DECL (c
);
8948 while (deps
&& TREE_CODE (deps
) == TREE_LIST
)
8950 if (TREE_CODE (TREE_PURPOSE (deps
)) == TRUNC_DIV_EXPR
8951 && DECL_P (TREE_OPERAND (TREE_PURPOSE (deps
), 1)))
8952 gimplify_expr (&TREE_OPERAND (TREE_PURPOSE (deps
), 1),
8953 pre_p
, NULL
, is_gimple_val
, fb_rvalue
);
8954 deps
= TREE_CHAIN (deps
);
8958 else if (OMP_CLAUSE_DEPEND_KIND (c
) == OMP_CLAUSE_DEPEND_SOURCE
)
8960 if (handled_depend_iterators
== -1)
8961 handled_depend_iterators
= gimplify_omp_depend (list_p
, pre_p
);
8962 if (handled_depend_iterators
)
8964 if (handled_depend_iterators
== 2)
8968 if (TREE_CODE (OMP_CLAUSE_DECL (c
)) == COMPOUND_EXPR
)
8970 gimplify_expr (&TREE_OPERAND (OMP_CLAUSE_DECL (c
), 0), pre_p
,
8971 NULL
, is_gimple_val
, fb_rvalue
);
8972 OMP_CLAUSE_DECL (c
) = TREE_OPERAND (OMP_CLAUSE_DECL (c
), 1);
8974 if (error_operand_p (OMP_CLAUSE_DECL (c
)))
8979 OMP_CLAUSE_DECL (c
) = build_fold_addr_expr (OMP_CLAUSE_DECL (c
));
8980 if (gimplify_expr (&OMP_CLAUSE_DECL (c
), pre_p
, NULL
,
8981 is_gimple_val
, fb_rvalue
) == GS_ERROR
)
8989 case OMP_CLAUSE_FROM
:
8990 case OMP_CLAUSE__CACHE_
:
8991 decl
= OMP_CLAUSE_DECL (c
);
8992 if (error_operand_p (decl
))
8997 if (OMP_CLAUSE_SIZE (c
) == NULL_TREE
)
8998 OMP_CLAUSE_SIZE (c
) = DECL_P (decl
) ? DECL_SIZE_UNIT (decl
)
8999 : TYPE_SIZE_UNIT (TREE_TYPE (decl
));
9000 if (gimplify_expr (&OMP_CLAUSE_SIZE (c
), pre_p
,
9001 NULL
, is_gimple_val
, fb_rvalue
) == GS_ERROR
)
9008 if (gimplify_expr (&OMP_CLAUSE_DECL (c
), pre_p
,
9009 NULL
, is_gimple_lvalue
, fb_lvalue
)
9019 case OMP_CLAUSE_USE_DEVICE_PTR
:
9020 case OMP_CLAUSE_USE_DEVICE_ADDR
:
9021 flags
= GOVD_EXPLICIT
;
9024 case OMP_CLAUSE_IS_DEVICE_PTR
:
9025 flags
= GOVD_FIRSTPRIVATE
| GOVD_EXPLICIT
;
9029 decl
= OMP_CLAUSE_DECL (c
);
9031 if (error_operand_p (decl
))
9036 if (DECL_NAME (decl
) == NULL_TREE
&& (flags
& GOVD_SHARED
) == 0)
9038 tree t
= omp_member_access_dummy_var (decl
);
9041 tree v
= DECL_VALUE_EXPR (decl
);
9042 DECL_NAME (decl
) = DECL_NAME (TREE_OPERAND (v
, 1));
9044 omp_notice_variable (outer_ctx
, t
, true);
9047 if (code
== OACC_DATA
9048 && OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
9049 && OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_FIRSTPRIVATE_POINTER
)
9050 flags
|= GOVD_MAP_0LEN_ARRAY
;
9051 omp_add_variable (ctx
, decl
, flags
);
9052 if ((OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_REDUCTION
9053 || OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_IN_REDUCTION
9054 || OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_TASK_REDUCTION
)
9055 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
))
9057 omp_add_variable (ctx
, OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
),
9058 GOVD_LOCAL
| GOVD_SEEN
);
9059 if (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c
)
9060 && walk_tree (&OMP_CLAUSE_REDUCTION_INIT (c
),
9062 OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c
),
9064 omp_add_variable (ctx
,
9065 OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c
),
9066 GOVD_LOCAL
| GOVD_SEEN
);
9067 gimplify_omp_ctxp
= ctx
;
9068 push_gimplify_context ();
9070 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
) = NULL
;
9071 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
) = NULL
;
9073 gimplify_and_add (OMP_CLAUSE_REDUCTION_INIT (c
),
9074 &OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
));
9075 pop_gimplify_context
9076 (gimple_seq_first_stmt (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
)));
9077 push_gimplify_context ();
9078 gimplify_and_add (OMP_CLAUSE_REDUCTION_MERGE (c
),
9079 &OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
));
9080 pop_gimplify_context
9081 (gimple_seq_first_stmt (OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
)));
9082 OMP_CLAUSE_REDUCTION_INIT (c
) = NULL_TREE
;
9083 OMP_CLAUSE_REDUCTION_MERGE (c
) = NULL_TREE
;
9085 gimplify_omp_ctxp
= outer_ctx
;
9087 else if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
9088 && OMP_CLAUSE_LASTPRIVATE_STMT (c
))
9090 gimplify_omp_ctxp
= ctx
;
9091 push_gimplify_context ();
9092 if (TREE_CODE (OMP_CLAUSE_LASTPRIVATE_STMT (c
)) != BIND_EXPR
)
9094 tree bind
= build3 (BIND_EXPR
, void_type_node
, NULL
,
9096 TREE_SIDE_EFFECTS (bind
) = 1;
9097 BIND_EXPR_BODY (bind
) = OMP_CLAUSE_LASTPRIVATE_STMT (c
);
9098 OMP_CLAUSE_LASTPRIVATE_STMT (c
) = bind
;
9100 gimplify_and_add (OMP_CLAUSE_LASTPRIVATE_STMT (c
),
9101 &OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c
));
9102 pop_gimplify_context
9103 (gimple_seq_first_stmt (OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c
)));
9104 OMP_CLAUSE_LASTPRIVATE_STMT (c
) = NULL_TREE
;
9106 gimplify_omp_ctxp
= outer_ctx
;
9108 else if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LINEAR
9109 && OMP_CLAUSE_LINEAR_STMT (c
))
9111 gimplify_omp_ctxp
= ctx
;
9112 push_gimplify_context ();
9113 if (TREE_CODE (OMP_CLAUSE_LINEAR_STMT (c
)) != BIND_EXPR
)
9115 tree bind
= build3 (BIND_EXPR
, void_type_node
, NULL
,
9117 TREE_SIDE_EFFECTS (bind
) = 1;
9118 BIND_EXPR_BODY (bind
) = OMP_CLAUSE_LINEAR_STMT (c
);
9119 OMP_CLAUSE_LINEAR_STMT (c
) = bind
;
9121 gimplify_and_add (OMP_CLAUSE_LINEAR_STMT (c
),
9122 &OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c
));
9123 pop_gimplify_context
9124 (gimple_seq_first_stmt (OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c
)));
9125 OMP_CLAUSE_LINEAR_STMT (c
) = NULL_TREE
;
9127 gimplify_omp_ctxp
= outer_ctx
;
9133 case OMP_CLAUSE_COPYIN
:
9134 case OMP_CLAUSE_COPYPRIVATE
:
9135 decl
= OMP_CLAUSE_DECL (c
);
9136 if (error_operand_p (decl
))
9141 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_COPYPRIVATE
9143 && !omp_check_private (ctx
, decl
, true))
9146 if (is_global_var (decl
))
9148 if (DECL_THREAD_LOCAL_P (decl
))
9150 else if (DECL_HAS_VALUE_EXPR_P (decl
))
9152 tree value
= get_base_address (DECL_VALUE_EXPR (decl
));
9156 && DECL_THREAD_LOCAL_P (value
))
9161 error_at (OMP_CLAUSE_LOCATION (c
),
9162 "copyprivate variable %qE is not threadprivate"
9163 " or private in outer context", DECL_NAME (decl
));
9166 if ((OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_REDUCTION
9167 || OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_FIRSTPRIVATE
9168 || OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
)
9170 && ((region_type
& ORT_TASKLOOP
) == ORT_TASKLOOP
9171 || (region_type
== ORT_WORKSHARE
9172 && OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_REDUCTION
9173 && (OMP_CLAUSE_REDUCTION_INSCAN (c
)
9174 || code
== OMP_LOOP
)))
9175 && (outer_ctx
->region_type
== ORT_COMBINED_PARALLEL
9176 || (code
== OMP_LOOP
9177 && OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_REDUCTION
9178 && ((outer_ctx
->region_type
& ORT_COMBINED_TEAMS
)
9179 == ORT_COMBINED_TEAMS
))))
9182 = splay_tree_lookup (outer_ctx
->variables
,
9183 (splay_tree_key
)decl
);
9184 if (on
== NULL
|| (on
->value
& GOVD_DATA_SHARE_CLASS
) == 0)
9186 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_REDUCTION
9187 && TREE_CODE (OMP_CLAUSE_DECL (c
)) == MEM_REF
9188 && (TREE_CODE (TREE_TYPE (decl
)) == POINTER_TYPE
9189 || (TREE_CODE (TREE_TYPE (decl
)) == REFERENCE_TYPE
9190 && (TREE_CODE (TREE_TYPE (TREE_TYPE (decl
)))
9192 omp_firstprivatize_variable (outer_ctx
, decl
);
9194 omp_add_variable (outer_ctx
, decl
,
9195 GOVD_SEEN
| GOVD_SHARED
);
9196 omp_notice_variable (outer_ctx
, decl
, true);
9200 omp_notice_variable (outer_ctx
, decl
, true);
9201 if (check_non_private
9202 && region_type
== ORT_WORKSHARE
9203 && (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_REDUCTION
9204 || decl
== OMP_CLAUSE_DECL (c
)
9205 || (TREE_CODE (OMP_CLAUSE_DECL (c
)) == MEM_REF
9206 && (TREE_CODE (TREE_OPERAND (OMP_CLAUSE_DECL (c
), 0))
9208 || (TREE_CODE (TREE_OPERAND (OMP_CLAUSE_DECL (c
), 0))
9209 == POINTER_PLUS_EXPR
9210 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND
9211 (OMP_CLAUSE_DECL (c
), 0), 0))
9213 && omp_check_private (ctx
, decl
, false))
9215 error ("%s variable %qE is private in outer context",
9216 check_non_private
, DECL_NAME (decl
));
9222 if (OMP_CLAUSE_IF_MODIFIER (c
) != ERROR_MARK
9223 && OMP_CLAUSE_IF_MODIFIER (c
) != code
)
9226 for (int i
= 0; i
< 2; i
++)
9227 switch (i
? OMP_CLAUSE_IF_MODIFIER (c
) : code
)
9229 case VOID_CST
: p
[i
] = "cancel"; break;
9230 case OMP_PARALLEL
: p
[i
] = "parallel"; break;
9231 case OMP_SIMD
: p
[i
] = "simd"; break;
9232 case OMP_TASK
: p
[i
] = "task"; break;
9233 case OMP_TASKLOOP
: p
[i
] = "taskloop"; break;
9234 case OMP_TARGET_DATA
: p
[i
] = "target data"; break;
9235 case OMP_TARGET
: p
[i
] = "target"; break;
9236 case OMP_TARGET_UPDATE
: p
[i
] = "target update"; break;
9237 case OMP_TARGET_ENTER_DATA
:
9238 p
[i
] = "target enter data"; break;
9239 case OMP_TARGET_EXIT_DATA
: p
[i
] = "target exit data"; break;
9240 default: gcc_unreachable ();
9242 error_at (OMP_CLAUSE_LOCATION (c
),
9243 "expected %qs %<if%> clause modifier rather than %qs",
9249 case OMP_CLAUSE_FINAL
:
9250 OMP_CLAUSE_OPERAND (c
, 0)
9251 = gimple_boolify (OMP_CLAUSE_OPERAND (c
, 0));
9254 case OMP_CLAUSE_SCHEDULE
:
9255 case OMP_CLAUSE_NUM_THREADS
:
9256 case OMP_CLAUSE_NUM_TEAMS
:
9257 case OMP_CLAUSE_THREAD_LIMIT
:
9258 case OMP_CLAUSE_DIST_SCHEDULE
:
9259 case OMP_CLAUSE_DEVICE
:
9260 case OMP_CLAUSE_PRIORITY
:
9261 case OMP_CLAUSE_GRAINSIZE
:
9262 case OMP_CLAUSE_NUM_TASKS
:
9263 case OMP_CLAUSE_HINT
:
9264 case OMP_CLAUSE_ASYNC
:
9265 case OMP_CLAUSE_WAIT
:
9266 case OMP_CLAUSE_NUM_GANGS
:
9267 case OMP_CLAUSE_NUM_WORKERS
:
9268 case OMP_CLAUSE_VECTOR_LENGTH
:
9269 case OMP_CLAUSE_WORKER
:
9270 case OMP_CLAUSE_VECTOR
:
9271 if (gimplify_expr (&OMP_CLAUSE_OPERAND (c
, 0), pre_p
, NULL
,
9272 is_gimple_val
, fb_rvalue
) == GS_ERROR
)
9276 case OMP_CLAUSE_GANG
:
9277 if (gimplify_expr (&OMP_CLAUSE_OPERAND (c
, 0), pre_p
, NULL
,
9278 is_gimple_val
, fb_rvalue
) == GS_ERROR
)
9280 if (gimplify_expr (&OMP_CLAUSE_OPERAND (c
, 1), pre_p
, NULL
,
9281 is_gimple_val
, fb_rvalue
) == GS_ERROR
)
9285 case OMP_CLAUSE_NOWAIT
:
9289 case OMP_CLAUSE_ORDERED
:
9290 case OMP_CLAUSE_UNTIED
:
9291 case OMP_CLAUSE_COLLAPSE
:
9292 case OMP_CLAUSE_TILE
:
9293 case OMP_CLAUSE_AUTO
:
9294 case OMP_CLAUSE_SEQ
:
9295 case OMP_CLAUSE_INDEPENDENT
:
9296 case OMP_CLAUSE_MERGEABLE
:
9297 case OMP_CLAUSE_PROC_BIND
:
9298 case OMP_CLAUSE_SAFELEN
:
9299 case OMP_CLAUSE_SIMDLEN
:
9300 case OMP_CLAUSE_NOGROUP
:
9301 case OMP_CLAUSE_THREADS
:
9302 case OMP_CLAUSE_SIMD
:
9303 case OMP_CLAUSE_BIND
:
9304 case OMP_CLAUSE_IF_PRESENT
:
9305 case OMP_CLAUSE_FINALIZE
:
9308 case OMP_CLAUSE_ORDER
:
9309 ctx
->order_concurrent
= true;
9312 case OMP_CLAUSE_DEFAULTMAP
:
9313 enum gimplify_defaultmap_kind gdmkmin
, gdmkmax
;
9314 switch (OMP_CLAUSE_DEFAULTMAP_CATEGORY (c
))
9316 case OMP_CLAUSE_DEFAULTMAP_CATEGORY_UNSPECIFIED
:
9317 gdmkmin
= GDMK_SCALAR
;
9318 gdmkmax
= GDMK_POINTER
;
9320 case OMP_CLAUSE_DEFAULTMAP_CATEGORY_SCALAR
:
9321 gdmkmin
= gdmkmax
= GDMK_SCALAR
;
9323 case OMP_CLAUSE_DEFAULTMAP_CATEGORY_AGGREGATE
:
9324 gdmkmin
= gdmkmax
= GDMK_AGGREGATE
;
9326 case OMP_CLAUSE_DEFAULTMAP_CATEGORY_ALLOCATABLE
:
9327 gdmkmin
= gdmkmax
= GDMK_ALLOCATABLE
;
9329 case OMP_CLAUSE_DEFAULTMAP_CATEGORY_POINTER
:
9330 gdmkmin
= gdmkmax
= GDMK_POINTER
;
9335 for (int gdmk
= gdmkmin
; gdmk
<= gdmkmax
; gdmk
++)
9336 switch (OMP_CLAUSE_DEFAULTMAP_BEHAVIOR (c
))
9338 case OMP_CLAUSE_DEFAULTMAP_ALLOC
:
9339 ctx
->defaultmap
[gdmk
] = GOVD_MAP
| GOVD_MAP_ALLOC_ONLY
;
9341 case OMP_CLAUSE_DEFAULTMAP_TO
:
9342 ctx
->defaultmap
[gdmk
] = GOVD_MAP
| GOVD_MAP_TO_ONLY
;
9344 case OMP_CLAUSE_DEFAULTMAP_FROM
:
9345 ctx
->defaultmap
[gdmk
] = GOVD_MAP
| GOVD_MAP_FROM_ONLY
;
9347 case OMP_CLAUSE_DEFAULTMAP_TOFROM
:
9348 ctx
->defaultmap
[gdmk
] = GOVD_MAP
;
9350 case OMP_CLAUSE_DEFAULTMAP_FIRSTPRIVATE
:
9351 ctx
->defaultmap
[gdmk
] = GOVD_FIRSTPRIVATE
;
9353 case OMP_CLAUSE_DEFAULTMAP_NONE
:
9354 ctx
->defaultmap
[gdmk
] = 0;
9356 case OMP_CLAUSE_DEFAULTMAP_DEFAULT
:
9360 ctx
->defaultmap
[gdmk
] = GOVD_FIRSTPRIVATE
;
9362 case GDMK_AGGREGATE
:
9363 case GDMK_ALLOCATABLE
:
9364 ctx
->defaultmap
[gdmk
] = GOVD_MAP
;
9367 ctx
->defaultmap
[gdmk
] = GOVD_MAP
| GOVD_MAP_0LEN_ARRAY
;
9378 case OMP_CLAUSE_ALIGNED
:
9379 decl
= OMP_CLAUSE_DECL (c
);
9380 if (error_operand_p (decl
))
9385 if (gimplify_expr (&OMP_CLAUSE_ALIGNED_ALIGNMENT (c
), pre_p
, NULL
,
9386 is_gimple_val
, fb_rvalue
) == GS_ERROR
)
9391 if (!is_global_var (decl
)
9392 && TREE_CODE (TREE_TYPE (decl
)) == POINTER_TYPE
)
9393 omp_add_variable (ctx
, decl
, GOVD_ALIGNED
);
9396 case OMP_CLAUSE_NONTEMPORAL
:
9397 decl
= OMP_CLAUSE_DECL (c
);
9398 if (error_operand_p (decl
))
9403 omp_add_variable (ctx
, decl
, GOVD_NONTEMPORAL
);
9406 case OMP_CLAUSE_DEFAULT
:
9407 ctx
->default_kind
= OMP_CLAUSE_DEFAULT_KIND (c
);
9410 case OMP_CLAUSE_INCLUSIVE
:
9411 case OMP_CLAUSE_EXCLUSIVE
:
9412 decl
= OMP_CLAUSE_DECL (c
);
9414 splay_tree_node n
= splay_tree_lookup (outer_ctx
->variables
,
9415 (splay_tree_key
) decl
);
9416 if (n
== NULL
|| (n
->value
& GOVD_REDUCTION
) == 0)
9418 error_at (OMP_CLAUSE_LOCATION (c
),
9419 "%qD specified in %qs clause but not in %<inscan%> "
9420 "%<reduction%> clause on the containing construct",
9421 decl
, omp_clause_code_name
[OMP_CLAUSE_CODE (c
)]);
9426 n
->value
|= GOVD_REDUCTION_INSCAN
;
9427 if (outer_ctx
->region_type
== ORT_SIMD
9428 && outer_ctx
->outer_context
9429 && outer_ctx
->outer_context
->region_type
== ORT_WORKSHARE
)
9431 n
= splay_tree_lookup (outer_ctx
->outer_context
->variables
,
9432 (splay_tree_key
) decl
);
9433 if (n
&& (n
->value
& GOVD_REDUCTION
) != 0)
9434 n
->value
|= GOVD_REDUCTION_INSCAN
;
9444 if (code
== OACC_DATA
9445 && OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
9446 && (OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_FIRSTPRIVATE_POINTER
9447 || OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_FIRSTPRIVATE_REFERENCE
))
9450 *list_p
= OMP_CLAUSE_CHAIN (c
);
9452 list_p
= &OMP_CLAUSE_CHAIN (c
);
9455 ctx
->clauses
= *orig_list_p
;
9456 gimplify_omp_ctxp
= ctx
;
9457 if (struct_map_to_clause
)
9458 delete struct_map_to_clause
;
9461 /* Return true if DECL is a candidate for shared to firstprivate
9462 optimization. We only consider non-addressable scalars, not
9463 too big, and not references. */
9466 omp_shared_to_firstprivate_optimizable_decl_p (tree decl
)
9468 if (TREE_ADDRESSABLE (decl
))
9470 tree type
= TREE_TYPE (decl
);
9471 if (!is_gimple_reg_type (type
)
9472 || TREE_CODE (type
) == REFERENCE_TYPE
9473 || TREE_ADDRESSABLE (type
))
9475 /* Don't optimize too large decls, as each thread/task will have
9477 HOST_WIDE_INT len
= int_size_in_bytes (type
);
9478 if (len
== -1 || len
> 4 * POINTER_SIZE
/ BITS_PER_UNIT
)
9480 if (lang_hooks
.decls
.omp_privatize_by_reference (decl
))
9485 /* Helper function of omp_find_stores_op and gimplify_adjust_omp_clauses*.
9486 For omp_shared_to_firstprivate_optimizable_decl_p decl mark it as
9487 GOVD_WRITTEN in outer contexts. */
9490 omp_mark_stores (struct gimplify_omp_ctx
*ctx
, tree decl
)
9492 for (; ctx
; ctx
= ctx
->outer_context
)
9494 splay_tree_node n
= splay_tree_lookup (ctx
->variables
,
9495 (splay_tree_key
) decl
);
9498 else if (n
->value
& GOVD_SHARED
)
9500 n
->value
|= GOVD_WRITTEN
;
9503 else if (n
->value
& GOVD_DATA_SHARE_CLASS
)
9508 /* Helper callback for walk_gimple_seq to discover possible stores
9509 to omp_shared_to_firstprivate_optimizable_decl_p decls and set
9510 GOVD_WRITTEN if they are GOVD_SHARED in some outer context
9514 omp_find_stores_op (tree
*tp
, int *walk_subtrees
, void *data
)
9516 struct walk_stmt_info
*wi
= (struct walk_stmt_info
*) data
;
9525 if (handled_component_p (op
))
9526 op
= TREE_OPERAND (op
, 0);
9527 else if ((TREE_CODE (op
) == MEM_REF
|| TREE_CODE (op
) == TARGET_MEM_REF
)
9528 && TREE_CODE (TREE_OPERAND (op
, 0)) == ADDR_EXPR
)
9529 op
= TREE_OPERAND (TREE_OPERAND (op
, 0), 0);
9534 if (!DECL_P (op
) || !omp_shared_to_firstprivate_optimizable_decl_p (op
))
9537 omp_mark_stores (gimplify_omp_ctxp
, op
);
9541 /* Helper callback for walk_gimple_seq to discover possible stores
9542 to omp_shared_to_firstprivate_optimizable_decl_p decls and set
9543 GOVD_WRITTEN if they are GOVD_SHARED in some outer context
9547 omp_find_stores_stmt (gimple_stmt_iterator
*gsi_p
,
9548 bool *handled_ops_p
,
9549 struct walk_stmt_info
*wi
)
9551 gimple
*stmt
= gsi_stmt (*gsi_p
);
9552 switch (gimple_code (stmt
))
9554 /* Don't recurse on OpenMP constructs for which
9555 gimplify_adjust_omp_clauses already handled the bodies,
9556 except handle gimple_omp_for_pre_body. */
9557 case GIMPLE_OMP_FOR
:
9558 *handled_ops_p
= true;
9559 if (gimple_omp_for_pre_body (stmt
))
9560 walk_gimple_seq (gimple_omp_for_pre_body (stmt
),
9561 omp_find_stores_stmt
, omp_find_stores_op
, wi
);
9563 case GIMPLE_OMP_PARALLEL
:
9564 case GIMPLE_OMP_TASK
:
9565 case GIMPLE_OMP_SECTIONS
:
9566 case GIMPLE_OMP_SINGLE
:
9567 case GIMPLE_OMP_TARGET
:
9568 case GIMPLE_OMP_TEAMS
:
9569 case GIMPLE_OMP_CRITICAL
:
9570 *handled_ops_p
= true;
9578 struct gimplify_adjust_omp_clauses_data
9584 /* For all variables that were not actually used within the context,
9585 remove PRIVATE, SHARED, and FIRSTPRIVATE clauses. */
9588 gimplify_adjust_omp_clauses_1 (splay_tree_node n
, void *data
)
9590 tree
*list_p
= ((struct gimplify_adjust_omp_clauses_data
*) data
)->list_p
;
9592 = ((struct gimplify_adjust_omp_clauses_data
*) data
)->pre_p
;
9593 tree decl
= (tree
) n
->key
;
9594 unsigned flags
= n
->value
;
9595 enum omp_clause_code code
;
9599 if (gimplify_omp_ctxp
->region_type
== ORT_COMBINED_PARALLEL
9600 && (flags
& GOVD_LASTPRIVATE_CONDITIONAL
) != 0)
9601 flags
= GOVD_SHARED
| GOVD_SEEN
| GOVD_WRITTEN
;
9602 if (flags
& (GOVD_EXPLICIT
| GOVD_LOCAL
))
9604 if ((flags
& GOVD_SEEN
) == 0)
9606 if (flags
& GOVD_DEBUG_PRIVATE
)
9608 gcc_assert ((flags
& GOVD_DATA_SHARE_CLASS
) == GOVD_SHARED
);
9609 private_debug
= true;
9611 else if (flags
& GOVD_MAP
)
9612 private_debug
= false;
9615 = lang_hooks
.decls
.omp_private_debug_clause (decl
,
9616 !!(flags
& GOVD_SHARED
));
9618 code
= OMP_CLAUSE_PRIVATE
;
9619 else if (flags
& GOVD_MAP
)
9621 code
= OMP_CLAUSE_MAP
;
9622 if ((gimplify_omp_ctxp
->region_type
& ORT_ACC
) == 0
9623 && TYPE_ATOMIC (strip_array_types (TREE_TYPE (decl
))))
9625 error ("%<_Atomic%> %qD in implicit %<map%> clause", decl
);
9629 else if (flags
& GOVD_SHARED
)
9631 if (is_global_var (decl
))
9633 struct gimplify_omp_ctx
*ctx
= gimplify_omp_ctxp
->outer_context
;
9637 = splay_tree_lookup (ctx
->variables
, (splay_tree_key
) decl
);
9638 if (on
&& (on
->value
& (GOVD_FIRSTPRIVATE
| GOVD_LASTPRIVATE
9639 | GOVD_PRIVATE
| GOVD_REDUCTION
9640 | GOVD_LINEAR
| GOVD_MAP
)) != 0)
9642 ctx
= ctx
->outer_context
;
9647 code
= OMP_CLAUSE_SHARED
;
9649 else if (flags
& GOVD_PRIVATE
)
9650 code
= OMP_CLAUSE_PRIVATE
;
9651 else if (flags
& GOVD_FIRSTPRIVATE
)
9653 code
= OMP_CLAUSE_FIRSTPRIVATE
;
9654 if ((gimplify_omp_ctxp
->region_type
& ORT_TARGET
)
9655 && (gimplify_omp_ctxp
->region_type
& ORT_ACC
) == 0
9656 && TYPE_ATOMIC (strip_array_types (TREE_TYPE (decl
))))
9658 error ("%<_Atomic%> %qD in implicit %<firstprivate%> clause on "
9659 "%<target%> construct", decl
);
9663 else if (flags
& GOVD_LASTPRIVATE
)
9664 code
= OMP_CLAUSE_LASTPRIVATE
;
9665 else if (flags
& (GOVD_ALIGNED
| GOVD_NONTEMPORAL
))
9667 else if (flags
& GOVD_CONDTEMP
)
9669 code
= OMP_CLAUSE__CONDTEMP_
;
9670 gimple_add_tmp_var (decl
);
9675 if (((flags
& GOVD_LASTPRIVATE
)
9676 || (code
== OMP_CLAUSE_SHARED
&& (flags
& GOVD_WRITTEN
)))
9677 && omp_shared_to_firstprivate_optimizable_decl_p (decl
))
9678 omp_mark_stores (gimplify_omp_ctxp
->outer_context
, decl
);
9680 tree chain
= *list_p
;
9681 clause
= build_omp_clause (input_location
, code
);
9682 OMP_CLAUSE_DECL (clause
) = decl
;
9683 OMP_CLAUSE_CHAIN (clause
) = chain
;
9685 OMP_CLAUSE_PRIVATE_DEBUG (clause
) = 1;
9686 else if (code
== OMP_CLAUSE_PRIVATE
&& (flags
& GOVD_PRIVATE_OUTER_REF
))
9687 OMP_CLAUSE_PRIVATE_OUTER_REF (clause
) = 1;
9688 else if (code
== OMP_CLAUSE_SHARED
9689 && (flags
& GOVD_WRITTEN
) == 0
9690 && omp_shared_to_firstprivate_optimizable_decl_p (decl
))
9691 OMP_CLAUSE_SHARED_READONLY (clause
) = 1;
9692 else if (code
== OMP_CLAUSE_FIRSTPRIVATE
&& (flags
& GOVD_EXPLICIT
) == 0)
9693 OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (clause
) = 1;
9694 else if (code
== OMP_CLAUSE_MAP
&& (flags
& GOVD_MAP_0LEN_ARRAY
) != 0)
9696 tree nc
= build_omp_clause (input_location
, OMP_CLAUSE_MAP
);
9697 OMP_CLAUSE_DECL (nc
) = decl
;
9698 if (TREE_CODE (TREE_TYPE (decl
)) == REFERENCE_TYPE
9699 && TREE_CODE (TREE_TYPE (TREE_TYPE (decl
))) == POINTER_TYPE
)
9700 OMP_CLAUSE_DECL (clause
)
9701 = build_simple_mem_ref_loc (input_location
, decl
);
9702 OMP_CLAUSE_DECL (clause
)
9703 = build2 (MEM_REF
, char_type_node
, OMP_CLAUSE_DECL (clause
),
9704 build_int_cst (build_pointer_type (char_type_node
), 0));
9705 OMP_CLAUSE_SIZE (clause
) = size_zero_node
;
9706 OMP_CLAUSE_SIZE (nc
) = size_zero_node
;
9707 OMP_CLAUSE_SET_MAP_KIND (clause
, GOMP_MAP_ALLOC
);
9708 OMP_CLAUSE_MAP_MAYBE_ZERO_LENGTH_ARRAY_SECTION (clause
) = 1;
9709 OMP_CLAUSE_SET_MAP_KIND (nc
, GOMP_MAP_FIRSTPRIVATE_POINTER
);
9710 OMP_CLAUSE_CHAIN (nc
) = chain
;
9711 OMP_CLAUSE_CHAIN (clause
) = nc
;
9712 struct gimplify_omp_ctx
*ctx
= gimplify_omp_ctxp
;
9713 gimplify_omp_ctxp
= ctx
->outer_context
;
9714 gimplify_expr (&TREE_OPERAND (OMP_CLAUSE_DECL (clause
), 0),
9715 pre_p
, NULL
, is_gimple_val
, fb_rvalue
);
9716 gimplify_omp_ctxp
= ctx
;
9718 else if (code
== OMP_CLAUSE_MAP
)
9721 /* Not all combinations of these GOVD_MAP flags are actually valid. */
9722 switch (flags
& (GOVD_MAP_TO_ONLY
9724 | GOVD_MAP_FORCE_PRESENT
9725 | GOVD_MAP_ALLOC_ONLY
9726 | GOVD_MAP_FROM_ONLY
))
9729 kind
= GOMP_MAP_TOFROM
;
9731 case GOVD_MAP_FORCE
:
9732 kind
= GOMP_MAP_TOFROM
| GOMP_MAP_FLAG_FORCE
;
9734 case GOVD_MAP_TO_ONLY
:
9737 case GOVD_MAP_FROM_ONLY
:
9738 kind
= GOMP_MAP_FROM
;
9740 case GOVD_MAP_ALLOC_ONLY
:
9741 kind
= GOMP_MAP_ALLOC
;
9743 case GOVD_MAP_TO_ONLY
| GOVD_MAP_FORCE
:
9744 kind
= GOMP_MAP_TO
| GOMP_MAP_FLAG_FORCE
;
9746 case GOVD_MAP_FORCE_PRESENT
:
9747 kind
= GOMP_MAP_FORCE_PRESENT
;
9752 OMP_CLAUSE_SET_MAP_KIND (clause
, kind
);
9753 if (DECL_SIZE (decl
)
9754 && TREE_CODE (DECL_SIZE (decl
)) != INTEGER_CST
)
9756 tree decl2
= DECL_VALUE_EXPR (decl
);
9757 gcc_assert (TREE_CODE (decl2
) == INDIRECT_REF
);
9758 decl2
= TREE_OPERAND (decl2
, 0);
9759 gcc_assert (DECL_P (decl2
));
9760 tree mem
= build_simple_mem_ref (decl2
);
9761 OMP_CLAUSE_DECL (clause
) = mem
;
9762 OMP_CLAUSE_SIZE (clause
) = TYPE_SIZE_UNIT (TREE_TYPE (decl
));
9763 if (gimplify_omp_ctxp
->outer_context
)
9765 struct gimplify_omp_ctx
*ctx
= gimplify_omp_ctxp
->outer_context
;
9766 omp_notice_variable (ctx
, decl2
, true);
9767 omp_notice_variable (ctx
, OMP_CLAUSE_SIZE (clause
), true);
9769 tree nc
= build_omp_clause (OMP_CLAUSE_LOCATION (clause
),
9771 OMP_CLAUSE_DECL (nc
) = decl
;
9772 OMP_CLAUSE_SIZE (nc
) = size_zero_node
;
9773 if (gimplify_omp_ctxp
->target_firstprivatize_array_bases
)
9774 OMP_CLAUSE_SET_MAP_KIND (nc
, GOMP_MAP_FIRSTPRIVATE_POINTER
);
9776 OMP_CLAUSE_SET_MAP_KIND (nc
, GOMP_MAP_POINTER
);
9777 OMP_CLAUSE_CHAIN (nc
) = OMP_CLAUSE_CHAIN (clause
);
9778 OMP_CLAUSE_CHAIN (clause
) = nc
;
9780 else if (gimplify_omp_ctxp
->target_firstprivatize_array_bases
9781 && lang_hooks
.decls
.omp_privatize_by_reference (decl
))
9783 OMP_CLAUSE_DECL (clause
) = build_simple_mem_ref (decl
);
9784 OMP_CLAUSE_SIZE (clause
)
9785 = unshare_expr (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl
))));
9786 struct gimplify_omp_ctx
*ctx
= gimplify_omp_ctxp
;
9787 gimplify_omp_ctxp
= ctx
->outer_context
;
9788 gimplify_expr (&OMP_CLAUSE_SIZE (clause
),
9789 pre_p
, NULL
, is_gimple_val
, fb_rvalue
);
9790 gimplify_omp_ctxp
= ctx
;
9791 tree nc
= build_omp_clause (OMP_CLAUSE_LOCATION (clause
),
9793 OMP_CLAUSE_DECL (nc
) = decl
;
9794 OMP_CLAUSE_SIZE (nc
) = size_zero_node
;
9795 OMP_CLAUSE_SET_MAP_KIND (nc
, GOMP_MAP_FIRSTPRIVATE_REFERENCE
);
9796 OMP_CLAUSE_CHAIN (nc
) = OMP_CLAUSE_CHAIN (clause
);
9797 OMP_CLAUSE_CHAIN (clause
) = nc
;
9800 OMP_CLAUSE_SIZE (clause
) = DECL_SIZE_UNIT (decl
);
9802 if (code
== OMP_CLAUSE_FIRSTPRIVATE
&& (flags
& GOVD_LASTPRIVATE
) != 0)
9804 tree nc
= build_omp_clause (input_location
, OMP_CLAUSE_LASTPRIVATE
);
9805 OMP_CLAUSE_DECL (nc
) = decl
;
9806 OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (nc
) = 1;
9807 OMP_CLAUSE_CHAIN (nc
) = chain
;
9808 OMP_CLAUSE_CHAIN (clause
) = nc
;
9809 struct gimplify_omp_ctx
*ctx
= gimplify_omp_ctxp
;
9810 gimplify_omp_ctxp
= ctx
->outer_context
;
9811 lang_hooks
.decls
.omp_finish_clause (nc
, pre_p
);
9812 gimplify_omp_ctxp
= ctx
;
9815 struct gimplify_omp_ctx
*ctx
= gimplify_omp_ctxp
;
9816 gimplify_omp_ctxp
= ctx
->outer_context
;
9817 lang_hooks
.decls
.omp_finish_clause (clause
, pre_p
);
9818 if (gimplify_omp_ctxp
)
9819 for (; clause
!= chain
; clause
= OMP_CLAUSE_CHAIN (clause
))
9820 if (OMP_CLAUSE_CODE (clause
) == OMP_CLAUSE_MAP
9821 && DECL_P (OMP_CLAUSE_SIZE (clause
)))
9822 omp_notice_variable (gimplify_omp_ctxp
, OMP_CLAUSE_SIZE (clause
),
9824 gimplify_omp_ctxp
= ctx
;
9829 gimplify_adjust_omp_clauses (gimple_seq
*pre_p
, gimple_seq body
, tree
*list_p
,
9830 enum tree_code code
)
9832 struct gimplify_omp_ctx
*ctx
= gimplify_omp_ctxp
;
9833 tree
*orig_list_p
= list_p
;
9835 bool has_inscan_reductions
= false;
9839 struct gimplify_omp_ctx
*octx
;
9840 for (octx
= ctx
; octx
; octx
= octx
->outer_context
)
9841 if ((octx
->region_type
& (ORT_PARALLEL
| ORT_TASK
| ORT_TEAMS
)) != 0)
9845 struct walk_stmt_info wi
;
9846 memset (&wi
, 0, sizeof (wi
));
9847 walk_gimple_seq (body
, omp_find_stores_stmt
,
9848 omp_find_stores_op
, &wi
);
9852 if (ctx
->add_safelen1
)
9854 /* If there are VLAs in the body of simd loop, prevent
9856 gcc_assert (ctx
->region_type
== ORT_SIMD
);
9857 c
= build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE_SAFELEN
);
9858 OMP_CLAUSE_SAFELEN_EXPR (c
) = integer_one_node
;
9859 OMP_CLAUSE_CHAIN (c
) = *list_p
;
9861 list_p
= &OMP_CLAUSE_CHAIN (c
);
9864 if (ctx
->region_type
== ORT_WORKSHARE
9865 && ctx
->outer_context
9866 && ctx
->outer_context
->region_type
== ORT_COMBINED_PARALLEL
)
9868 for (c
= ctx
->outer_context
->clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
9869 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
9870 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c
))
9872 decl
= OMP_CLAUSE_DECL (c
);
9874 = splay_tree_lookup (ctx
->outer_context
->variables
,
9875 (splay_tree_key
) decl
);
9876 gcc_checking_assert (!splay_tree_lookup (ctx
->variables
,
9877 (splay_tree_key
) decl
));
9878 omp_add_variable (ctx
, decl
, n
->value
);
9879 tree c2
= copy_node (c
);
9880 OMP_CLAUSE_CHAIN (c2
) = *list_p
;
9882 if ((n
->value
& GOVD_FIRSTPRIVATE
) == 0)
9884 c2
= build_omp_clause (OMP_CLAUSE_LOCATION (c
),
9885 OMP_CLAUSE_FIRSTPRIVATE
);
9886 OMP_CLAUSE_DECL (c2
) = decl
;
9887 OMP_CLAUSE_CHAIN (c2
) = *list_p
;
9891 while ((c
= *list_p
) != NULL
)
9894 bool remove
= false;
9896 switch (OMP_CLAUSE_CODE (c
))
9898 case OMP_CLAUSE_FIRSTPRIVATE
:
9899 if ((ctx
->region_type
& ORT_TARGET
)
9900 && (ctx
->region_type
& ORT_ACC
) == 0
9901 && TYPE_ATOMIC (strip_array_types
9902 (TREE_TYPE (OMP_CLAUSE_DECL (c
)))))
9904 error_at (OMP_CLAUSE_LOCATION (c
),
9905 "%<_Atomic%> %qD in %<firstprivate%> clause on "
9906 "%<target%> construct", OMP_CLAUSE_DECL (c
));
9911 case OMP_CLAUSE_PRIVATE
:
9912 case OMP_CLAUSE_SHARED
:
9913 case OMP_CLAUSE_LINEAR
:
9914 decl
= OMP_CLAUSE_DECL (c
);
9915 n
= splay_tree_lookup (ctx
->variables
, (splay_tree_key
) decl
);
9916 remove
= !(n
->value
& GOVD_SEEN
);
9917 if ((n
->value
& GOVD_LASTPRIVATE_CONDITIONAL
) != 0
9918 && code
== OMP_PARALLEL
9919 && OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_FIRSTPRIVATE
)
9923 bool shared
= OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_SHARED
;
9924 if ((n
->value
& GOVD_DEBUG_PRIVATE
)
9925 || lang_hooks
.decls
.omp_private_debug_clause (decl
, shared
))
9927 gcc_assert ((n
->value
& GOVD_DEBUG_PRIVATE
) == 0
9928 || ((n
->value
& GOVD_DATA_SHARE_CLASS
)
9930 OMP_CLAUSE_SET_CODE (c
, OMP_CLAUSE_PRIVATE
);
9931 OMP_CLAUSE_PRIVATE_DEBUG (c
) = 1;
9933 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_SHARED
9934 && (n
->value
& GOVD_WRITTEN
) == 0
9936 && omp_shared_to_firstprivate_optimizable_decl_p (decl
))
9937 OMP_CLAUSE_SHARED_READONLY (c
) = 1;
9938 else if (DECL_P (decl
)
9939 && ((OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_SHARED
9940 && (n
->value
& GOVD_WRITTEN
) != 0)
9941 || (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LINEAR
9942 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c
)))
9943 && omp_shared_to_firstprivate_optimizable_decl_p (decl
))
9944 omp_mark_stores (gimplify_omp_ctxp
->outer_context
, decl
);
9948 case OMP_CLAUSE_LASTPRIVATE
:
9949 /* Make sure OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE is set to
9950 accurately reflect the presence of a FIRSTPRIVATE clause. */
9951 decl
= OMP_CLAUSE_DECL (c
);
9952 n
= splay_tree_lookup (ctx
->variables
, (splay_tree_key
) decl
);
9953 OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c
)
9954 = (n
->value
& GOVD_FIRSTPRIVATE
) != 0;
9955 if (code
== OMP_DISTRIBUTE
9956 && OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c
))
9959 error_at (OMP_CLAUSE_LOCATION (c
),
9960 "same variable used in %<firstprivate%> and "
9961 "%<lastprivate%> clauses on %<distribute%> "
9965 && OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
9967 && omp_shared_to_firstprivate_optimizable_decl_p (decl
))
9968 omp_mark_stores (gimplify_omp_ctxp
->outer_context
, decl
);
9969 if (OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c
) && code
== OMP_PARALLEL
)
9973 case OMP_CLAUSE_ALIGNED
:
9974 decl
= OMP_CLAUSE_DECL (c
);
9975 if (!is_global_var (decl
))
9977 n
= splay_tree_lookup (ctx
->variables
, (splay_tree_key
) decl
);
9978 remove
= n
== NULL
|| !(n
->value
& GOVD_SEEN
);
9979 if (!remove
&& TREE_CODE (TREE_TYPE (decl
)) == POINTER_TYPE
)
9981 struct gimplify_omp_ctx
*octx
;
9983 && (n
->value
& (GOVD_DATA_SHARE_CLASS
9984 & ~GOVD_FIRSTPRIVATE
)))
9987 for (octx
= ctx
->outer_context
; octx
;
9988 octx
= octx
->outer_context
)
9990 n
= splay_tree_lookup (octx
->variables
,
9991 (splay_tree_key
) decl
);
9994 if (n
->value
& GOVD_LOCAL
)
9996 /* We have to avoid assigning a shared variable
9997 to itself when trying to add
9998 __builtin_assume_aligned. */
9999 if (n
->value
& GOVD_SHARED
)
10007 else if (TREE_CODE (TREE_TYPE (decl
)) == ARRAY_TYPE
)
10009 n
= splay_tree_lookup (ctx
->variables
, (splay_tree_key
) decl
);
10010 if (n
!= NULL
&& (n
->value
& GOVD_DATA_SHARE_CLASS
) != 0)
10015 case OMP_CLAUSE_NONTEMPORAL
:
10016 decl
= OMP_CLAUSE_DECL (c
);
10017 n
= splay_tree_lookup (ctx
->variables
, (splay_tree_key
) decl
);
10018 remove
= n
== NULL
|| !(n
->value
& GOVD_SEEN
);
10021 case OMP_CLAUSE_MAP
:
10022 if (code
== OMP_TARGET_EXIT_DATA
10023 && OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_ALWAYS_POINTER
)
10028 decl
= OMP_CLAUSE_DECL (c
);
10029 /* Data clauses associated with acc parallel reductions must be
10030 compatible with present_or_copy. Warn and adjust the clause
10031 if that is not the case. */
10032 if (ctx
->region_type
== ORT_ACC_PARALLEL
)
10034 tree t
= DECL_P (decl
) ? decl
: TREE_OPERAND (decl
, 0);
10038 n
= splay_tree_lookup (ctx
->variables
, (splay_tree_key
) t
);
10040 if (n
&& (n
->value
& GOVD_REDUCTION
))
10042 enum gomp_map_kind kind
= OMP_CLAUSE_MAP_KIND (c
);
10044 OMP_CLAUSE_MAP_IN_REDUCTION (c
) = 1;
10045 if ((kind
& GOMP_MAP_TOFROM
) != GOMP_MAP_TOFROM
10046 && kind
!= GOMP_MAP_FORCE_PRESENT
10047 && kind
!= GOMP_MAP_POINTER
)
10049 warning_at (OMP_CLAUSE_LOCATION (c
), 0,
10050 "incompatible data clause with reduction "
10051 "on %qE; promoting to %<present_or_copy%>",
10053 OMP_CLAUSE_SET_MAP_KIND (c
, GOMP_MAP_TOFROM
);
10057 if (!DECL_P (decl
))
10059 if ((ctx
->region_type
& ORT_TARGET
) != 0
10060 && OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_FIRSTPRIVATE_POINTER
)
10062 if (TREE_CODE (decl
) == INDIRECT_REF
10063 && TREE_CODE (TREE_OPERAND (decl
, 0)) == COMPONENT_REF
10064 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (decl
, 0)))
10065 == REFERENCE_TYPE
))
10066 decl
= TREE_OPERAND (decl
, 0);
10067 if (TREE_CODE (decl
) == COMPONENT_REF
)
10069 while (TREE_CODE (decl
) == COMPONENT_REF
)
10070 decl
= TREE_OPERAND (decl
, 0);
10073 n
= splay_tree_lookup (ctx
->variables
,
10074 (splay_tree_key
) decl
);
10075 if (!(n
->value
& GOVD_SEEN
))
10082 n
= splay_tree_lookup (ctx
->variables
, (splay_tree_key
) decl
);
10083 if ((ctx
->region_type
& ORT_TARGET
) != 0
10084 && !(n
->value
& GOVD_SEEN
)
10085 && GOMP_MAP_ALWAYS_P (OMP_CLAUSE_MAP_KIND (c
)) == 0
10086 && (!is_global_var (decl
)
10087 || !lookup_attribute ("omp declare target link",
10088 DECL_ATTRIBUTES (decl
))))
10091 /* For struct element mapping, if struct is never referenced
10092 in target block and none of the mapping has always modifier,
10093 remove all the struct element mappings, which immediately
10094 follow the GOMP_MAP_STRUCT map clause. */
10095 if (OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_STRUCT
)
10097 HOST_WIDE_INT cnt
= tree_to_shwi (OMP_CLAUSE_SIZE (c
));
10099 OMP_CLAUSE_CHAIN (c
)
10100 = OMP_CLAUSE_CHAIN (OMP_CLAUSE_CHAIN (c
));
10103 else if (OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_STRUCT
10104 && code
== OMP_TARGET_EXIT_DATA
)
10106 else if (DECL_SIZE (decl
)
10107 && TREE_CODE (DECL_SIZE (decl
)) != INTEGER_CST
10108 && OMP_CLAUSE_MAP_KIND (c
) != GOMP_MAP_POINTER
10109 && OMP_CLAUSE_MAP_KIND (c
) != GOMP_MAP_FIRSTPRIVATE_POINTER
10110 && (OMP_CLAUSE_MAP_KIND (c
)
10111 != GOMP_MAP_FIRSTPRIVATE_REFERENCE
))
10113 /* For GOMP_MAP_FORCE_DEVICEPTR, we'll never enter here, because
10114 for these, TREE_CODE (DECL_SIZE (decl)) will always be
10116 gcc_assert (OMP_CLAUSE_MAP_KIND (c
) != GOMP_MAP_FORCE_DEVICEPTR
);
10118 tree decl2
= DECL_VALUE_EXPR (decl
);
10119 gcc_assert (TREE_CODE (decl2
) == INDIRECT_REF
);
10120 decl2
= TREE_OPERAND (decl2
, 0);
10121 gcc_assert (DECL_P (decl2
));
10122 tree mem
= build_simple_mem_ref (decl2
);
10123 OMP_CLAUSE_DECL (c
) = mem
;
10124 OMP_CLAUSE_SIZE (c
) = TYPE_SIZE_UNIT (TREE_TYPE (decl
));
10125 if (ctx
->outer_context
)
10127 omp_notice_variable (ctx
->outer_context
, decl2
, true);
10128 omp_notice_variable (ctx
->outer_context
,
10129 OMP_CLAUSE_SIZE (c
), true);
10131 if (((ctx
->region_type
& ORT_TARGET
) != 0
10132 || !ctx
->target_firstprivatize_array_bases
)
10133 && ((n
->value
& GOVD_SEEN
) == 0
10134 || (n
->value
& (GOVD_PRIVATE
| GOVD_FIRSTPRIVATE
)) == 0))
10136 tree nc
= build_omp_clause (OMP_CLAUSE_LOCATION (c
),
10138 OMP_CLAUSE_DECL (nc
) = decl
;
10139 OMP_CLAUSE_SIZE (nc
) = size_zero_node
;
10140 if (ctx
->target_firstprivatize_array_bases
)
10141 OMP_CLAUSE_SET_MAP_KIND (nc
,
10142 GOMP_MAP_FIRSTPRIVATE_POINTER
);
10144 OMP_CLAUSE_SET_MAP_KIND (nc
, GOMP_MAP_POINTER
);
10145 OMP_CLAUSE_CHAIN (nc
) = OMP_CLAUSE_CHAIN (c
);
10146 OMP_CLAUSE_CHAIN (c
) = nc
;
10152 if (OMP_CLAUSE_SIZE (c
) == NULL_TREE
)
10153 OMP_CLAUSE_SIZE (c
) = DECL_SIZE_UNIT (decl
);
10154 gcc_assert ((n
->value
& GOVD_SEEN
) == 0
10155 || ((n
->value
& (GOVD_PRIVATE
| GOVD_FIRSTPRIVATE
))
10160 case OMP_CLAUSE_TO
:
10161 case OMP_CLAUSE_FROM
:
10162 case OMP_CLAUSE__CACHE_
:
10163 decl
= OMP_CLAUSE_DECL (c
);
10164 if (!DECL_P (decl
))
10166 if (DECL_SIZE (decl
)
10167 && TREE_CODE (DECL_SIZE (decl
)) != INTEGER_CST
)
10169 tree decl2
= DECL_VALUE_EXPR (decl
);
10170 gcc_assert (TREE_CODE (decl2
) == INDIRECT_REF
);
10171 decl2
= TREE_OPERAND (decl2
, 0);
10172 gcc_assert (DECL_P (decl2
));
10173 tree mem
= build_simple_mem_ref (decl2
);
10174 OMP_CLAUSE_DECL (c
) = mem
;
10175 OMP_CLAUSE_SIZE (c
) = TYPE_SIZE_UNIT (TREE_TYPE (decl
));
10176 if (ctx
->outer_context
)
10178 omp_notice_variable (ctx
->outer_context
, decl2
, true);
10179 omp_notice_variable (ctx
->outer_context
,
10180 OMP_CLAUSE_SIZE (c
), true);
10183 else if (OMP_CLAUSE_SIZE (c
) == NULL_TREE
)
10184 OMP_CLAUSE_SIZE (c
) = DECL_SIZE_UNIT (decl
);
10187 case OMP_CLAUSE_REDUCTION
:
10188 if (OMP_CLAUSE_REDUCTION_INSCAN (c
))
10190 decl
= OMP_CLAUSE_DECL (c
);
10191 n
= splay_tree_lookup (ctx
->variables
, (splay_tree_key
) decl
);
10192 if ((n
->value
& GOVD_REDUCTION_INSCAN
) == 0)
10195 error_at (OMP_CLAUSE_LOCATION (c
),
10196 "%qD specified in %<inscan%> %<reduction%> clause "
10197 "but not in %<scan%> directive clause", decl
);
10200 has_inscan_reductions
= true;
10203 case OMP_CLAUSE_IN_REDUCTION
:
10204 case OMP_CLAUSE_TASK_REDUCTION
:
10205 decl
= OMP_CLAUSE_DECL (c
);
10206 /* OpenACC reductions need a present_or_copy data clause.
10207 Add one if necessary. Emit error when the reduction is private. */
10208 if (ctx
->region_type
== ORT_ACC_PARALLEL
)
10210 n
= splay_tree_lookup (ctx
->variables
, (splay_tree_key
) decl
);
10211 if (n
->value
& (GOVD_PRIVATE
| GOVD_FIRSTPRIVATE
))
10214 error_at (OMP_CLAUSE_LOCATION (c
), "invalid private "
10215 "reduction on %qE", DECL_NAME (decl
));
10217 else if ((n
->value
& GOVD_MAP
) == 0)
10219 tree next
= OMP_CLAUSE_CHAIN (c
);
10220 tree nc
= build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE_MAP
);
10221 OMP_CLAUSE_SET_MAP_KIND (nc
, GOMP_MAP_TOFROM
);
10222 OMP_CLAUSE_DECL (nc
) = decl
;
10223 OMP_CLAUSE_CHAIN (c
) = nc
;
10224 lang_hooks
.decls
.omp_finish_clause (nc
, pre_p
);
10227 OMP_CLAUSE_MAP_IN_REDUCTION (nc
) = 1;
10228 if (OMP_CLAUSE_CHAIN (nc
) == NULL
)
10230 nc
= OMP_CLAUSE_CHAIN (nc
);
10232 OMP_CLAUSE_CHAIN (nc
) = next
;
10233 n
->value
|= GOVD_MAP
;
10237 && omp_shared_to_firstprivate_optimizable_decl_p (decl
))
10238 omp_mark_stores (gimplify_omp_ctxp
->outer_context
, decl
);
10240 case OMP_CLAUSE_COPYIN
:
10241 case OMP_CLAUSE_COPYPRIVATE
:
10242 case OMP_CLAUSE_IF
:
10243 case OMP_CLAUSE_NUM_THREADS
:
10244 case OMP_CLAUSE_NUM_TEAMS
:
10245 case OMP_CLAUSE_THREAD_LIMIT
:
10246 case OMP_CLAUSE_DIST_SCHEDULE
:
10247 case OMP_CLAUSE_DEVICE
:
10248 case OMP_CLAUSE_SCHEDULE
:
10249 case OMP_CLAUSE_NOWAIT
:
10250 case OMP_CLAUSE_ORDERED
:
10251 case OMP_CLAUSE_DEFAULT
:
10252 case OMP_CLAUSE_UNTIED
:
10253 case OMP_CLAUSE_COLLAPSE
:
10254 case OMP_CLAUSE_FINAL
:
10255 case OMP_CLAUSE_MERGEABLE
:
10256 case OMP_CLAUSE_PROC_BIND
:
10257 case OMP_CLAUSE_SAFELEN
:
10258 case OMP_CLAUSE_SIMDLEN
:
10259 case OMP_CLAUSE_DEPEND
:
10260 case OMP_CLAUSE_PRIORITY
:
10261 case OMP_CLAUSE_GRAINSIZE
:
10262 case OMP_CLAUSE_NUM_TASKS
:
10263 case OMP_CLAUSE_NOGROUP
:
10264 case OMP_CLAUSE_THREADS
:
10265 case OMP_CLAUSE_SIMD
:
10266 case OMP_CLAUSE_HINT
:
10267 case OMP_CLAUSE_DEFAULTMAP
:
10268 case OMP_CLAUSE_ORDER
:
10269 case OMP_CLAUSE_BIND
:
10270 case OMP_CLAUSE_USE_DEVICE_PTR
:
10271 case OMP_CLAUSE_USE_DEVICE_ADDR
:
10272 case OMP_CLAUSE_IS_DEVICE_PTR
:
10273 case OMP_CLAUSE_ASYNC
:
10274 case OMP_CLAUSE_WAIT
:
10275 case OMP_CLAUSE_INDEPENDENT
:
10276 case OMP_CLAUSE_NUM_GANGS
:
10277 case OMP_CLAUSE_NUM_WORKERS
:
10278 case OMP_CLAUSE_VECTOR_LENGTH
:
10279 case OMP_CLAUSE_GANG
:
10280 case OMP_CLAUSE_WORKER
:
10281 case OMP_CLAUSE_VECTOR
:
10282 case OMP_CLAUSE_AUTO
:
10283 case OMP_CLAUSE_SEQ
:
10284 case OMP_CLAUSE_TILE
:
10285 case OMP_CLAUSE_IF_PRESENT
:
10286 case OMP_CLAUSE_FINALIZE
:
10287 case OMP_CLAUSE_INCLUSIVE
:
10288 case OMP_CLAUSE_EXCLUSIVE
:
10292 gcc_unreachable ();
10296 *list_p
= OMP_CLAUSE_CHAIN (c
);
10298 list_p
= &OMP_CLAUSE_CHAIN (c
);
10301 /* Add in any implicit data sharing. */
10302 struct gimplify_adjust_omp_clauses_data data
;
10303 data
.list_p
= list_p
;
10304 data
.pre_p
= pre_p
;
10305 splay_tree_foreach (ctx
->variables
, gimplify_adjust_omp_clauses_1
, &data
);
10307 if (has_inscan_reductions
)
10308 for (c
= *orig_list_p
; c
; c
= OMP_CLAUSE_CHAIN (c
))
10309 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LINEAR
10310 && !OMP_CLAUSE_LINEAR_NO_COPYIN (c
))
10312 error_at (OMP_CLAUSE_LOCATION (c
),
10313 "%<inscan%> %<reduction%> clause used together with "
10314 "%<linear%> clause for a variable other than loop "
10319 gimplify_omp_ctxp
= ctx
->outer_context
;
10320 delete_omp_context (ctx
);
10323 /* Gimplify OACC_CACHE. */
10326 gimplify_oacc_cache (tree
*expr_p
, gimple_seq
*pre_p
)
10328 tree expr
= *expr_p
;
10330 gimplify_scan_omp_clauses (&OACC_CACHE_CLAUSES (expr
), pre_p
, ORT_ACC
,
10332 gimplify_adjust_omp_clauses (pre_p
, NULL
, &OACC_CACHE_CLAUSES (expr
),
10335 /* TODO: Do something sensible with this information. */
10337 *expr_p
= NULL_TREE
;
10340 /* Helper function of gimplify_oacc_declare. The helper's purpose is to,
10341 if required, translate 'kind' in CLAUSE into an 'entry' kind and 'exit'
10342 kind. The entry kind will replace the one in CLAUSE, while the exit
10343 kind will be used in a new omp_clause and returned to the caller. */
10346 gimplify_oacc_declare_1 (tree clause
)
10348 HOST_WIDE_INT kind
, new_op
;
10352 kind
= OMP_CLAUSE_MAP_KIND (clause
);
10356 case GOMP_MAP_ALLOC
:
10357 new_op
= GOMP_MAP_RELEASE
;
10361 case GOMP_MAP_FROM
:
10362 OMP_CLAUSE_SET_MAP_KIND (clause
, GOMP_MAP_FORCE_ALLOC
);
10363 new_op
= GOMP_MAP_FROM
;
10367 case GOMP_MAP_TOFROM
:
10368 OMP_CLAUSE_SET_MAP_KIND (clause
, GOMP_MAP_TO
);
10369 new_op
= GOMP_MAP_FROM
;
10373 case GOMP_MAP_DEVICE_RESIDENT
:
10374 case GOMP_MAP_FORCE_DEVICEPTR
:
10375 case GOMP_MAP_FORCE_PRESENT
:
10376 case GOMP_MAP_LINK
:
10377 case GOMP_MAP_POINTER
:
10382 gcc_unreachable ();
10388 c
= build_omp_clause (OMP_CLAUSE_LOCATION (clause
), OMP_CLAUSE_MAP
);
10389 OMP_CLAUSE_SET_MAP_KIND (c
, new_op
);
10390 OMP_CLAUSE_DECL (c
) = OMP_CLAUSE_DECL (clause
);
10396 /* Gimplify OACC_DECLARE. */
10399 gimplify_oacc_declare (tree
*expr_p
, gimple_seq
*pre_p
)
10401 tree expr
= *expr_p
;
10403 tree clauses
, t
, decl
;
10405 clauses
= OACC_DECLARE_CLAUSES (expr
);
10407 gimplify_scan_omp_clauses (&clauses
, pre_p
, ORT_TARGET_DATA
, OACC_DECLARE
);
10408 gimplify_adjust_omp_clauses (pre_p
, NULL
, &clauses
, OACC_DECLARE
);
10410 for (t
= clauses
; t
; t
= OMP_CLAUSE_CHAIN (t
))
10412 decl
= OMP_CLAUSE_DECL (t
);
10414 if (TREE_CODE (decl
) == MEM_REF
)
10415 decl
= TREE_OPERAND (decl
, 0);
10417 if (VAR_P (decl
) && !is_oacc_declared (decl
))
10419 tree attr
= get_identifier ("oacc declare target");
10420 DECL_ATTRIBUTES (decl
) = tree_cons (attr
, NULL_TREE
,
10421 DECL_ATTRIBUTES (decl
));
10425 && !is_global_var (decl
)
10426 && DECL_CONTEXT (decl
) == current_function_decl
)
10428 tree c
= gimplify_oacc_declare_1 (t
);
10431 if (oacc_declare_returns
== NULL
)
10432 oacc_declare_returns
= new hash_map
<tree
, tree
>;
10434 oacc_declare_returns
->put (decl
, c
);
10438 if (gimplify_omp_ctxp
)
10439 omp_add_variable (gimplify_omp_ctxp
, decl
, GOVD_SEEN
);
10442 stmt
= gimple_build_omp_target (NULL
, GF_OMP_TARGET_KIND_OACC_DECLARE
,
10445 gimplify_seq_add_stmt (pre_p
, stmt
);
10447 *expr_p
= NULL_TREE
;
10450 /* Gimplify the contents of an OMP_PARALLEL statement. This involves
10451 gimplification of the body, as well as scanning the body for used
10452 variables. We need to do this scan now, because variable-sized
10453 decls will be decomposed during gimplification. */
10456 gimplify_omp_parallel (tree
*expr_p
, gimple_seq
*pre_p
)
10458 tree expr
= *expr_p
;
10460 gimple_seq body
= NULL
;
10462 gimplify_scan_omp_clauses (&OMP_PARALLEL_CLAUSES (expr
), pre_p
,
10463 OMP_PARALLEL_COMBINED (expr
)
10464 ? ORT_COMBINED_PARALLEL
10465 : ORT_PARALLEL
, OMP_PARALLEL
);
10467 push_gimplify_context ();
10469 g
= gimplify_and_return_first (OMP_PARALLEL_BODY (expr
), &body
);
10470 if (gimple_code (g
) == GIMPLE_BIND
)
10471 pop_gimplify_context (g
);
10473 pop_gimplify_context (NULL
);
10475 gimplify_adjust_omp_clauses (pre_p
, body
, &OMP_PARALLEL_CLAUSES (expr
),
10478 g
= gimple_build_omp_parallel (body
,
10479 OMP_PARALLEL_CLAUSES (expr
),
10480 NULL_TREE
, NULL_TREE
);
10481 if (OMP_PARALLEL_COMBINED (expr
))
10482 gimple_omp_set_subcode (g
, GF_OMP_PARALLEL_COMBINED
);
10483 gimplify_seq_add_stmt (pre_p
, g
);
10484 *expr_p
= NULL_TREE
;
10487 /* Gimplify the contents of an OMP_TASK statement. This involves
10488 gimplification of the body, as well as scanning the body for used
10489 variables. We need to do this scan now, because variable-sized
10490 decls will be decomposed during gimplification. */
10493 gimplify_omp_task (tree
*expr_p
, gimple_seq
*pre_p
)
10495 tree expr
= *expr_p
;
10497 gimple_seq body
= NULL
;
10499 if (OMP_TASK_BODY (expr
) == NULL_TREE
)
10500 for (tree c
= OMP_TASK_CLAUSES (expr
); c
; c
= OMP_CLAUSE_CHAIN (c
))
10501 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_DEPEND
10502 && OMP_CLAUSE_DEPEND_KIND (c
) == OMP_CLAUSE_DEPEND_MUTEXINOUTSET
)
10504 error_at (OMP_CLAUSE_LOCATION (c
),
10505 "%<mutexinoutset%> kind in %<depend%> clause on a "
10506 "%<taskwait%> construct");
10510 gimplify_scan_omp_clauses (&OMP_TASK_CLAUSES (expr
), pre_p
,
10511 omp_find_clause (OMP_TASK_CLAUSES (expr
),
10513 ? ORT_UNTIED_TASK
: ORT_TASK
, OMP_TASK
);
10515 if (OMP_TASK_BODY (expr
))
10517 push_gimplify_context ();
10519 g
= gimplify_and_return_first (OMP_TASK_BODY (expr
), &body
);
10520 if (gimple_code (g
) == GIMPLE_BIND
)
10521 pop_gimplify_context (g
);
10523 pop_gimplify_context (NULL
);
10526 gimplify_adjust_omp_clauses (pre_p
, body
, &OMP_TASK_CLAUSES (expr
),
10529 g
= gimple_build_omp_task (body
,
10530 OMP_TASK_CLAUSES (expr
),
10531 NULL_TREE
, NULL_TREE
,
10532 NULL_TREE
, NULL_TREE
, NULL_TREE
);
10533 if (OMP_TASK_BODY (expr
) == NULL_TREE
)
10534 gimple_omp_task_set_taskwait_p (g
, true);
10535 gimplify_seq_add_stmt (pre_p
, g
);
10536 *expr_p
= NULL_TREE
;
10539 /* Helper function of gimplify_omp_for, find OMP_FOR resp. OMP_SIMD
10540 with non-NULL OMP_FOR_INIT. Also, fill in pdata array,
10541 pdata[0] non-NULL if there is anything non-trivial in between, pdata[1]
10542 is address of OMP_PARALLEL in between if any, pdata[2] is address of
10543 OMP_FOR in between if any and pdata[3] is address of the inner
10544 OMP_FOR/OMP_SIMD. */
10547 find_combined_omp_for (tree
*tp
, int *walk_subtrees
, void *data
)
10549 tree
**pdata
= (tree
**) data
;
10550 *walk_subtrees
= 0;
10551 switch (TREE_CODE (*tp
))
10554 if (OMP_FOR_INIT (*tp
) != NULL_TREE
)
10560 *walk_subtrees
= 1;
10563 if (OMP_FOR_INIT (*tp
) != NULL_TREE
)
10570 if (BIND_EXPR_VARS (*tp
)
10571 || (BIND_EXPR_BLOCK (*tp
)
10572 && BLOCK_VARS (BIND_EXPR_BLOCK (*tp
))))
10574 *walk_subtrees
= 1;
10576 case STATEMENT_LIST
:
10577 if (!tsi_one_before_end_p (tsi_start (*tp
)))
10579 *walk_subtrees
= 1;
10581 case TRY_FINALLY_EXPR
:
10583 *walk_subtrees
= 1;
10587 *walk_subtrees
= 1;
10595 /* Gimplify the gross structure of an OMP_FOR statement. */
10597 static enum gimplify_status
10598 gimplify_omp_for (tree
*expr_p
, gimple_seq
*pre_p
)
10600 tree for_stmt
, orig_for_stmt
, inner_for_stmt
= NULL_TREE
, decl
, var
, t
;
10601 enum gimplify_status ret
= GS_ALL_DONE
;
10602 enum gimplify_status tret
;
10604 gimple_seq for_body
, for_pre_body
;
10606 bitmap has_decl_expr
= NULL
;
10607 enum omp_region_type ort
= ORT_WORKSHARE
;
10609 orig_for_stmt
= for_stmt
= *expr_p
;
10611 bool loop_p
= (omp_find_clause (OMP_FOR_CLAUSES (for_stmt
), OMP_CLAUSE_BIND
)
10613 if (OMP_FOR_INIT (for_stmt
) == NULL_TREE
)
10615 tree
*data
[4] = { NULL
, NULL
, NULL
, NULL
};
10616 gcc_assert (TREE_CODE (for_stmt
) != OACC_LOOP
);
10617 inner_for_stmt
= walk_tree (&OMP_FOR_BODY (for_stmt
),
10618 find_combined_omp_for
, data
, NULL
);
10619 if (inner_for_stmt
== NULL_TREE
)
10621 gcc_assert (seen_error ());
10622 *expr_p
= NULL_TREE
;
10625 if (data
[2] && OMP_FOR_PRE_BODY (*data
[2]))
10627 append_to_statement_list_force (OMP_FOR_PRE_BODY (*data
[2]),
10628 &OMP_FOR_PRE_BODY (for_stmt
));
10629 OMP_FOR_PRE_BODY (*data
[2]) = NULL_TREE
;
10631 if (OMP_FOR_PRE_BODY (inner_for_stmt
))
10633 append_to_statement_list_force (OMP_FOR_PRE_BODY (inner_for_stmt
),
10634 &OMP_FOR_PRE_BODY (for_stmt
));
10635 OMP_FOR_PRE_BODY (inner_for_stmt
) = NULL_TREE
;
10640 /* We have some statements or variable declarations in between
10641 the composite construct directives. Move them around the
10644 for (i
= 0; i
< 3; i
++)
10648 if (i
< 2 && data
[i
+ 1] == &OMP_BODY (t
))
10649 data
[i
+ 1] = data
[i
];
10650 *data
[i
] = OMP_BODY (t
);
10651 tree body
= build3 (BIND_EXPR
, void_type_node
, NULL_TREE
,
10652 NULL_TREE
, make_node (BLOCK
));
10653 OMP_BODY (t
) = body
;
10654 append_to_statement_list_force (inner_for_stmt
,
10655 &BIND_EXPR_BODY (body
));
10657 data
[3] = tsi_stmt_ptr (tsi_start (BIND_EXPR_BODY (body
)));
10658 gcc_assert (*data
[3] == inner_for_stmt
);
10663 for (i
= 0; i
< TREE_VEC_LENGTH (OMP_FOR_INIT (inner_for_stmt
)); i
++)
10665 && OMP_FOR_ORIG_DECLS (inner_for_stmt
)
10666 && TREE_CODE (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (inner_for_stmt
),
10668 && TREE_PURPOSE (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (inner_for_stmt
),
10671 tree orig
= TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (inner_for_stmt
), i
);
10672 /* Class iterators aren't allowed on OMP_SIMD, so the only
10673 case we need to solve is distribute parallel for. They are
10674 allowed on the loop construct, but that is already handled
10675 in gimplify_omp_loop. */
10676 gcc_assert (TREE_CODE (inner_for_stmt
) == OMP_FOR
10677 && TREE_CODE (for_stmt
) == OMP_DISTRIBUTE
10679 tree orig_decl
= TREE_PURPOSE (orig
);
10680 tree last
= TREE_VALUE (orig
);
10682 for (pc
= &OMP_FOR_CLAUSES (inner_for_stmt
);
10683 *pc
; pc
= &OMP_CLAUSE_CHAIN (*pc
))
10684 if ((OMP_CLAUSE_CODE (*pc
) == OMP_CLAUSE_PRIVATE
10685 || OMP_CLAUSE_CODE (*pc
) == OMP_CLAUSE_LASTPRIVATE
)
10686 && OMP_CLAUSE_DECL (*pc
) == orig_decl
)
10688 if (*pc
== NULL_TREE
)
10691 for (spc
= &OMP_PARALLEL_CLAUSES (*data
[1]);
10692 *spc
; spc
= &OMP_CLAUSE_CHAIN (*spc
))
10693 if (OMP_CLAUSE_CODE (*spc
) == OMP_CLAUSE_PRIVATE
10694 && OMP_CLAUSE_DECL (*spc
) == orig_decl
)
10699 *spc
= OMP_CLAUSE_CHAIN (c
);
10700 OMP_CLAUSE_CHAIN (c
) = NULL_TREE
;
10704 if (*pc
== NULL_TREE
)
10706 else if (OMP_CLAUSE_CODE (*pc
) == OMP_CLAUSE_PRIVATE
)
10708 /* private clause will appear only on inner_for_stmt.
10709 Change it into firstprivate, and add private clause
10711 tree c
= copy_node (*pc
);
10712 OMP_CLAUSE_CHAIN (c
) = OMP_FOR_CLAUSES (for_stmt
);
10713 OMP_FOR_CLAUSES (for_stmt
) = c
;
10714 OMP_CLAUSE_CODE (*pc
) = OMP_CLAUSE_FIRSTPRIVATE
;
10715 lang_hooks
.decls
.omp_finish_clause (*pc
, pre_p
);
10719 /* lastprivate clause will appear on both inner_for_stmt
10720 and for_stmt. Add firstprivate clause to
10722 tree c
= build_omp_clause (OMP_CLAUSE_LOCATION (*pc
),
10723 OMP_CLAUSE_FIRSTPRIVATE
);
10724 OMP_CLAUSE_DECL (c
) = OMP_CLAUSE_DECL (*pc
);
10725 OMP_CLAUSE_CHAIN (c
) = *pc
;
10727 lang_hooks
.decls
.omp_finish_clause (*pc
, pre_p
);
10729 tree c
= build_omp_clause (UNKNOWN_LOCATION
,
10730 OMP_CLAUSE_FIRSTPRIVATE
);
10731 OMP_CLAUSE_DECL (c
) = last
;
10732 OMP_CLAUSE_CHAIN (c
) = OMP_PARALLEL_CLAUSES (*data
[1]);
10733 OMP_PARALLEL_CLAUSES (*data
[1]) = c
;
10734 c
= build_omp_clause (UNKNOWN_LOCATION
,
10735 *pc
? OMP_CLAUSE_SHARED
10736 : OMP_CLAUSE_FIRSTPRIVATE
);
10737 OMP_CLAUSE_DECL (c
) = orig_decl
;
10738 OMP_CLAUSE_CHAIN (c
) = OMP_PARALLEL_CLAUSES (*data
[1]);
10739 OMP_PARALLEL_CLAUSES (*data
[1]) = c
;
10741 /* Similarly, take care of C++ range for temporaries, those should
10742 be firstprivate on OMP_PARALLEL if any. */
10744 for (i
= 0; i
< TREE_VEC_LENGTH (OMP_FOR_INIT (inner_for_stmt
)); i
++)
10745 if (OMP_FOR_ORIG_DECLS (inner_for_stmt
)
10746 && TREE_CODE (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (inner_for_stmt
),
10748 && TREE_CHAIN (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (inner_for_stmt
),
10752 = TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (inner_for_stmt
), i
);
10753 tree v
= TREE_CHAIN (orig
);
10754 tree c
= build_omp_clause (UNKNOWN_LOCATION
,
10755 OMP_CLAUSE_FIRSTPRIVATE
);
10756 /* First add firstprivate clause for the __for_end artificial
10758 OMP_CLAUSE_DECL (c
) = TREE_VEC_ELT (v
, 1);
10759 if (TREE_CODE (TREE_TYPE (OMP_CLAUSE_DECL (c
)))
10761 OMP_CLAUSE_FIRSTPRIVATE_NO_REFERENCE (c
) = 1;
10762 OMP_CLAUSE_CHAIN (c
) = OMP_PARALLEL_CLAUSES (*data
[1]);
10763 OMP_PARALLEL_CLAUSES (*data
[1]) = c
;
10764 if (TREE_VEC_ELT (v
, 0))
10766 /* And now the same for __for_range artificial decl if it
10768 c
= build_omp_clause (UNKNOWN_LOCATION
,
10769 OMP_CLAUSE_FIRSTPRIVATE
);
10770 OMP_CLAUSE_DECL (c
) = TREE_VEC_ELT (v
, 0);
10771 if (TREE_CODE (TREE_TYPE (OMP_CLAUSE_DECL (c
)))
10773 OMP_CLAUSE_FIRSTPRIVATE_NO_REFERENCE (c
) = 1;
10774 OMP_CLAUSE_CHAIN (c
) = OMP_PARALLEL_CLAUSES (*data
[1]);
10775 OMP_PARALLEL_CLAUSES (*data
[1]) = c
;
10780 switch (TREE_CODE (for_stmt
))
10783 case OMP_DISTRIBUTE
:
10789 if (omp_find_clause (OMP_FOR_CLAUSES (for_stmt
), OMP_CLAUSE_UNTIED
))
10790 ort
= ORT_UNTIED_TASKLOOP
;
10792 ort
= ORT_TASKLOOP
;
10798 gcc_unreachable ();
10801 /* Set OMP_CLAUSE_LINEAR_NO_COPYIN flag on explicit linear
10802 clause for the IV. */
10803 if (ort
== ORT_SIMD
&& TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt
)) == 1)
10805 t
= TREE_VEC_ELT (OMP_FOR_INIT (for_stmt
), 0);
10806 gcc_assert (TREE_CODE (t
) == MODIFY_EXPR
);
10807 decl
= TREE_OPERAND (t
, 0);
10808 for (tree c
= OMP_FOR_CLAUSES (for_stmt
); c
; c
= OMP_CLAUSE_CHAIN (c
))
10809 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LINEAR
10810 && OMP_CLAUSE_DECL (c
) == decl
)
10812 OMP_CLAUSE_LINEAR_NO_COPYIN (c
) = 1;
10817 if (TREE_CODE (for_stmt
) != OMP_TASKLOOP
)
10818 gimplify_scan_omp_clauses (&OMP_FOR_CLAUSES (for_stmt
), pre_p
, ort
,
10819 loop_p
&& TREE_CODE (for_stmt
) != OMP_SIMD
10820 ? OMP_LOOP
: TREE_CODE (for_stmt
));
10822 if (TREE_CODE (for_stmt
) == OMP_DISTRIBUTE
)
10823 gimplify_omp_ctxp
->distribute
= true;
10825 /* Handle OMP_FOR_INIT. */
10826 for_pre_body
= NULL
;
10827 if ((ort
== ORT_SIMD
10828 || (inner_for_stmt
&& TREE_CODE (inner_for_stmt
) == OMP_SIMD
))
10829 && OMP_FOR_PRE_BODY (for_stmt
))
10831 has_decl_expr
= BITMAP_ALLOC (NULL
);
10832 if (TREE_CODE (OMP_FOR_PRE_BODY (for_stmt
)) == DECL_EXPR
10833 && TREE_CODE (DECL_EXPR_DECL (OMP_FOR_PRE_BODY (for_stmt
)))
10836 t
= OMP_FOR_PRE_BODY (for_stmt
);
10837 bitmap_set_bit (has_decl_expr
, DECL_UID (DECL_EXPR_DECL (t
)));
10839 else if (TREE_CODE (OMP_FOR_PRE_BODY (for_stmt
)) == STATEMENT_LIST
)
10841 tree_stmt_iterator si
;
10842 for (si
= tsi_start (OMP_FOR_PRE_BODY (for_stmt
)); !tsi_end_p (si
);
10846 if (TREE_CODE (t
) == DECL_EXPR
10847 && TREE_CODE (DECL_EXPR_DECL (t
)) == VAR_DECL
)
10848 bitmap_set_bit (has_decl_expr
, DECL_UID (DECL_EXPR_DECL (t
)));
10852 if (OMP_FOR_PRE_BODY (for_stmt
))
10854 if (TREE_CODE (for_stmt
) != OMP_TASKLOOP
|| gimplify_omp_ctxp
)
10855 gimplify_and_add (OMP_FOR_PRE_BODY (for_stmt
), &for_pre_body
);
10858 struct gimplify_omp_ctx ctx
;
10859 memset (&ctx
, 0, sizeof (ctx
));
10860 ctx
.region_type
= ORT_NONE
;
10861 gimplify_omp_ctxp
= &ctx
;
10862 gimplify_and_add (OMP_FOR_PRE_BODY (for_stmt
), &for_pre_body
);
10863 gimplify_omp_ctxp
= NULL
;
10866 OMP_FOR_PRE_BODY (for_stmt
) = NULL_TREE
;
10868 if (OMP_FOR_INIT (for_stmt
) == NULL_TREE
)
10869 for_stmt
= inner_for_stmt
;
10871 /* For taskloop, need to gimplify the start, end and step before the
10872 taskloop, outside of the taskloop omp context. */
10873 if (TREE_CODE (orig_for_stmt
) == OMP_TASKLOOP
)
10875 for (i
= 0; i
< TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt
)); i
++)
10877 t
= TREE_VEC_ELT (OMP_FOR_INIT (for_stmt
), i
);
10878 if (!is_gimple_constant (TREE_OPERAND (t
, 1)))
10880 tree type
= TREE_TYPE (TREE_OPERAND (t
, 0));
10881 TREE_OPERAND (t
, 1)
10882 = get_initialized_tmp_var (TREE_OPERAND (t
, 1),
10883 gimple_seq_empty_p (for_pre_body
)
10884 ? pre_p
: &for_pre_body
, NULL
,
10886 /* Reference to pointer conversion is considered useless,
10887 but is significant for firstprivate clause. Force it
10889 if (TREE_CODE (type
) == POINTER_TYPE
10890 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (t
, 1)))
10891 == REFERENCE_TYPE
))
10893 tree v
= create_tmp_var (TYPE_MAIN_VARIANT (type
));
10894 tree m
= build2 (INIT_EXPR
, TREE_TYPE (v
), v
,
10895 TREE_OPERAND (t
, 1));
10896 gimplify_and_add (m
, gimple_seq_empty_p (for_pre_body
)
10897 ? pre_p
: &for_pre_body
);
10898 TREE_OPERAND (t
, 1) = v
;
10900 tree c
= build_omp_clause (input_location
,
10901 OMP_CLAUSE_FIRSTPRIVATE
);
10902 OMP_CLAUSE_DECL (c
) = TREE_OPERAND (t
, 1);
10903 OMP_CLAUSE_CHAIN (c
) = OMP_FOR_CLAUSES (orig_for_stmt
);
10904 OMP_FOR_CLAUSES (orig_for_stmt
) = c
;
10907 /* Handle OMP_FOR_COND. */
10908 t
= TREE_VEC_ELT (OMP_FOR_COND (for_stmt
), i
);
10909 if (!is_gimple_constant (TREE_OPERAND (t
, 1)))
10911 tree type
= TREE_TYPE (TREE_OPERAND (t
, 0));
10912 TREE_OPERAND (t
, 1)
10913 = get_initialized_tmp_var (TREE_OPERAND (t
, 1),
10914 gimple_seq_empty_p (for_pre_body
)
10915 ? pre_p
: &for_pre_body
, NULL
,
10917 /* Reference to pointer conversion is considered useless,
10918 but is significant for firstprivate clause. Force it
10920 if (TREE_CODE (type
) == POINTER_TYPE
10921 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (t
, 1)))
10922 == REFERENCE_TYPE
))
10924 tree v
= create_tmp_var (TYPE_MAIN_VARIANT (type
));
10925 tree m
= build2 (INIT_EXPR
, TREE_TYPE (v
), v
,
10926 TREE_OPERAND (t
, 1));
10927 gimplify_and_add (m
, gimple_seq_empty_p (for_pre_body
)
10928 ? pre_p
: &for_pre_body
);
10929 TREE_OPERAND (t
, 1) = v
;
10931 tree c
= build_omp_clause (input_location
,
10932 OMP_CLAUSE_FIRSTPRIVATE
);
10933 OMP_CLAUSE_DECL (c
) = TREE_OPERAND (t
, 1);
10934 OMP_CLAUSE_CHAIN (c
) = OMP_FOR_CLAUSES (orig_for_stmt
);
10935 OMP_FOR_CLAUSES (orig_for_stmt
) = c
;
10938 /* Handle OMP_FOR_INCR. */
10939 t
= TREE_VEC_ELT (OMP_FOR_INCR (for_stmt
), i
);
10940 if (TREE_CODE (t
) == MODIFY_EXPR
)
10942 decl
= TREE_OPERAND (t
, 0);
10943 t
= TREE_OPERAND (t
, 1);
10944 tree
*tp
= &TREE_OPERAND (t
, 1);
10945 if (TREE_CODE (t
) == PLUS_EXPR
&& *tp
== decl
)
10946 tp
= &TREE_OPERAND (t
, 0);
10948 if (!is_gimple_constant (*tp
))
10950 gimple_seq
*seq
= gimple_seq_empty_p (for_pre_body
)
10951 ? pre_p
: &for_pre_body
;
10952 *tp
= get_initialized_tmp_var (*tp
, seq
, NULL
, false);
10953 tree c
= build_omp_clause (input_location
,
10954 OMP_CLAUSE_FIRSTPRIVATE
);
10955 OMP_CLAUSE_DECL (c
) = *tp
;
10956 OMP_CLAUSE_CHAIN (c
) = OMP_FOR_CLAUSES (orig_for_stmt
);
10957 OMP_FOR_CLAUSES (orig_for_stmt
) = c
;
10962 gimplify_scan_omp_clauses (&OMP_FOR_CLAUSES (orig_for_stmt
), pre_p
, ort
,
10966 if (orig_for_stmt
!= for_stmt
)
10967 gimplify_omp_ctxp
->combined_loop
= true;
10970 gcc_assert (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt
))
10971 == TREE_VEC_LENGTH (OMP_FOR_COND (for_stmt
)));
10972 gcc_assert (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt
))
10973 == TREE_VEC_LENGTH (OMP_FOR_INCR (for_stmt
)));
10975 tree c
= omp_find_clause (OMP_FOR_CLAUSES (for_stmt
), OMP_CLAUSE_ORDERED
);
10976 bool is_doacross
= false;
10977 if (c
&& OMP_CLAUSE_ORDERED_EXPR (c
))
10979 is_doacross
= true;
10980 gimplify_omp_ctxp
->loop_iter_var
.create (TREE_VEC_LENGTH
10981 (OMP_FOR_INIT (for_stmt
))
10984 int collapse
= 1, tile
= 0;
10985 c
= omp_find_clause (OMP_FOR_CLAUSES (for_stmt
), OMP_CLAUSE_COLLAPSE
);
10987 collapse
= tree_to_shwi (OMP_CLAUSE_COLLAPSE_EXPR (c
));
10988 c
= omp_find_clause (OMP_FOR_CLAUSES (for_stmt
), OMP_CLAUSE_TILE
);
10990 tile
= list_length (OMP_CLAUSE_TILE_LIST (c
));
10991 for (i
= 0; i
< TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt
)); i
++)
10993 t
= TREE_VEC_ELT (OMP_FOR_INIT (for_stmt
), i
);
10994 gcc_assert (TREE_CODE (t
) == MODIFY_EXPR
);
10995 decl
= TREE_OPERAND (t
, 0);
10996 gcc_assert (DECL_P (decl
));
10997 gcc_assert (INTEGRAL_TYPE_P (TREE_TYPE (decl
))
10998 || POINTER_TYPE_P (TREE_TYPE (decl
)));
11001 if (TREE_CODE (for_stmt
) == OMP_FOR
&& OMP_FOR_ORIG_DECLS (for_stmt
))
11003 tree orig_decl
= TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (for_stmt
), i
);
11004 if (TREE_CODE (orig_decl
) == TREE_LIST
)
11006 orig_decl
= TREE_PURPOSE (orig_decl
);
11010 gimplify_omp_ctxp
->loop_iter_var
.quick_push (orig_decl
);
11013 gimplify_omp_ctxp
->loop_iter_var
.quick_push (decl
);
11014 gimplify_omp_ctxp
->loop_iter_var
.quick_push (decl
);
11017 /* Make sure the iteration variable is private. */
11018 tree c
= NULL_TREE
;
11019 tree c2
= NULL_TREE
;
11020 if (orig_for_stmt
!= for_stmt
)
11022 /* Preserve this information until we gimplify the inner simd. */
11024 && bitmap_bit_p (has_decl_expr
, DECL_UID (decl
)))
11025 TREE_PRIVATE (t
) = 1;
11027 else if (ort
== ORT_SIMD
)
11029 splay_tree_node n
= splay_tree_lookup (gimplify_omp_ctxp
->variables
,
11030 (splay_tree_key
) decl
);
11031 omp_is_private (gimplify_omp_ctxp
, decl
,
11032 1 + (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt
))
11034 if (n
!= NULL
&& (n
->value
& GOVD_DATA_SHARE_CLASS
) != 0)
11036 omp_notice_variable (gimplify_omp_ctxp
, decl
, true);
11037 if (n
->value
& GOVD_LASTPRIVATE_CONDITIONAL
)
11038 for (tree c3
= omp_find_clause (OMP_FOR_CLAUSES (for_stmt
),
11039 OMP_CLAUSE_LASTPRIVATE
);
11040 c3
; c3
= omp_find_clause (OMP_CLAUSE_CHAIN (c3
),
11041 OMP_CLAUSE_LASTPRIVATE
))
11042 if (OMP_CLAUSE_DECL (c3
) == decl
)
11044 warning_at (OMP_CLAUSE_LOCATION (c3
), 0,
11045 "conditional %<lastprivate%> on loop "
11046 "iterator %qD ignored", decl
);
11047 OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c3
) = 0;
11048 n
->value
&= ~GOVD_LASTPRIVATE_CONDITIONAL
;
11051 else if (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt
)) == 1 && !loop_p
)
11053 c
= build_omp_clause (input_location
, OMP_CLAUSE_LINEAR
);
11054 OMP_CLAUSE_LINEAR_NO_COPYIN (c
) = 1;
11055 unsigned int flags
= GOVD_LINEAR
| GOVD_EXPLICIT
| GOVD_SEEN
;
11057 && bitmap_bit_p (has_decl_expr
, DECL_UID (decl
)))
11058 || TREE_PRIVATE (t
))
11060 OMP_CLAUSE_LINEAR_NO_COPYOUT (c
) = 1;
11061 flags
|= GOVD_LINEAR_LASTPRIVATE_NO_OUTER
;
11063 struct gimplify_omp_ctx
*outer
11064 = gimplify_omp_ctxp
->outer_context
;
11065 if (outer
&& !OMP_CLAUSE_LINEAR_NO_COPYOUT (c
))
11067 if (outer
->region_type
== ORT_WORKSHARE
11068 && outer
->combined_loop
)
11070 n
= splay_tree_lookup (outer
->variables
,
11071 (splay_tree_key
)decl
);
11072 if (n
!= NULL
&& (n
->value
& GOVD_LOCAL
) != 0)
11074 OMP_CLAUSE_LINEAR_NO_COPYOUT (c
) = 1;
11075 flags
|= GOVD_LINEAR_LASTPRIVATE_NO_OUTER
;
11079 struct gimplify_omp_ctx
*octx
= outer
->outer_context
;
11081 && octx
->region_type
== ORT_COMBINED_PARALLEL
11082 && octx
->outer_context
11083 && (octx
->outer_context
->region_type
11085 && octx
->outer_context
->combined_loop
)
11087 octx
= octx
->outer_context
;
11088 n
= splay_tree_lookup (octx
->variables
,
11089 (splay_tree_key
)decl
);
11090 if (n
!= NULL
&& (n
->value
& GOVD_LOCAL
) != 0)
11092 OMP_CLAUSE_LINEAR_NO_COPYOUT (c
) = 1;
11093 flags
|= GOVD_LINEAR_LASTPRIVATE_NO_OUTER
;
11100 OMP_CLAUSE_DECL (c
) = decl
;
11101 OMP_CLAUSE_CHAIN (c
) = OMP_FOR_CLAUSES (for_stmt
);
11102 OMP_FOR_CLAUSES (for_stmt
) = c
;
11103 omp_add_variable (gimplify_omp_ctxp
, decl
, flags
);
11104 if (outer
&& !OMP_CLAUSE_LINEAR_NO_COPYOUT (c
))
11106 if (outer
->region_type
== ORT_WORKSHARE
11107 && outer
->combined_loop
)
11109 if (outer
->outer_context
11110 && (outer
->outer_context
->region_type
11111 == ORT_COMBINED_PARALLEL
))
11112 outer
= outer
->outer_context
;
11113 else if (omp_check_private (outer
, decl
, false))
11116 else if (((outer
->region_type
& ORT_TASKLOOP
)
11118 && outer
->combined_loop
11119 && !omp_check_private (gimplify_omp_ctxp
,
11122 else if (outer
->region_type
!= ORT_COMBINED_PARALLEL
)
11124 omp_notice_variable (outer
, decl
, true);
11129 n
= splay_tree_lookup (outer
->variables
,
11130 (splay_tree_key
)decl
);
11131 if (n
== NULL
|| (n
->value
& GOVD_DATA_SHARE_CLASS
) == 0)
11133 omp_add_variable (outer
, decl
,
11134 GOVD_LASTPRIVATE
| GOVD_SEEN
);
11135 if (outer
->region_type
== ORT_COMBINED_PARALLEL
11136 && outer
->outer_context
11137 && (outer
->outer_context
->region_type
11139 && outer
->outer_context
->combined_loop
)
11141 outer
= outer
->outer_context
;
11142 n
= splay_tree_lookup (outer
->variables
,
11143 (splay_tree_key
)decl
);
11144 if (omp_check_private (outer
, decl
, false))
11147 || ((n
->value
& GOVD_DATA_SHARE_CLASS
)
11149 omp_add_variable (outer
, decl
,
11155 if (outer
&& outer
->outer_context
11156 && ((outer
->outer_context
->region_type
11157 & ORT_COMBINED_TEAMS
) == ORT_COMBINED_TEAMS
11158 || (((outer
->region_type
& ORT_TASKLOOP
)
11160 && (outer
->outer_context
->region_type
11161 == ORT_COMBINED_PARALLEL
))))
11163 outer
= outer
->outer_context
;
11164 n
= splay_tree_lookup (outer
->variables
,
11165 (splay_tree_key
)decl
);
11167 || (n
->value
& GOVD_DATA_SHARE_CLASS
) == 0)
11168 omp_add_variable (outer
, decl
,
11169 GOVD_SHARED
| GOVD_SEEN
);
11173 if (outer
&& outer
->outer_context
)
11174 omp_notice_variable (outer
->outer_context
, decl
,
11184 || !bitmap_bit_p (has_decl_expr
, DECL_UID (decl
)));
11185 if (TREE_PRIVATE (t
))
11186 lastprivate
= false;
11187 if (loop_p
&& OMP_FOR_ORIG_DECLS (for_stmt
))
11189 tree elt
= TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (for_stmt
), i
);
11190 if (TREE_CODE (elt
) == TREE_LIST
&& TREE_PURPOSE (elt
))
11191 lastprivate
= false;
11194 struct gimplify_omp_ctx
*outer
11195 = gimplify_omp_ctxp
->outer_context
;
11196 if (outer
&& lastprivate
)
11198 if (outer
->region_type
== ORT_WORKSHARE
11199 && outer
->combined_loop
)
11201 n
= splay_tree_lookup (outer
->variables
,
11202 (splay_tree_key
)decl
);
11203 if (n
!= NULL
&& (n
->value
& GOVD_LOCAL
) != 0)
11205 lastprivate
= false;
11208 else if (outer
->outer_context
11209 && (outer
->outer_context
->region_type
11210 == ORT_COMBINED_PARALLEL
))
11211 outer
= outer
->outer_context
;
11212 else if (omp_check_private (outer
, decl
, false))
11215 else if (((outer
->region_type
& ORT_TASKLOOP
)
11217 && outer
->combined_loop
11218 && !omp_check_private (gimplify_omp_ctxp
,
11221 else if (outer
->region_type
!= ORT_COMBINED_PARALLEL
)
11223 omp_notice_variable (outer
, decl
, true);
11228 n
= splay_tree_lookup (outer
->variables
,
11229 (splay_tree_key
)decl
);
11230 if (n
== NULL
|| (n
->value
& GOVD_DATA_SHARE_CLASS
) == 0)
11232 omp_add_variable (outer
, decl
,
11233 GOVD_LASTPRIVATE
| GOVD_SEEN
);
11234 if (outer
->region_type
== ORT_COMBINED_PARALLEL
11235 && outer
->outer_context
11236 && (outer
->outer_context
->region_type
11238 && outer
->outer_context
->combined_loop
)
11240 outer
= outer
->outer_context
;
11241 n
= splay_tree_lookup (outer
->variables
,
11242 (splay_tree_key
)decl
);
11243 if (omp_check_private (outer
, decl
, false))
11246 || ((n
->value
& GOVD_DATA_SHARE_CLASS
)
11248 omp_add_variable (outer
, decl
,
11254 if (outer
&& outer
->outer_context
11255 && ((outer
->outer_context
->region_type
11256 & ORT_COMBINED_TEAMS
) == ORT_COMBINED_TEAMS
11257 || (((outer
->region_type
& ORT_TASKLOOP
)
11259 && (outer
->outer_context
->region_type
11260 == ORT_COMBINED_PARALLEL
))))
11262 outer
= outer
->outer_context
;
11263 n
= splay_tree_lookup (outer
->variables
,
11264 (splay_tree_key
)decl
);
11266 || (n
->value
& GOVD_DATA_SHARE_CLASS
) == 0)
11267 omp_add_variable (outer
, decl
,
11268 GOVD_SHARED
| GOVD_SEEN
);
11272 if (outer
&& outer
->outer_context
)
11273 omp_notice_variable (outer
->outer_context
, decl
,
11279 c
= build_omp_clause (input_location
,
11280 lastprivate
? OMP_CLAUSE_LASTPRIVATE
11281 : OMP_CLAUSE_PRIVATE
);
11282 OMP_CLAUSE_DECL (c
) = decl
;
11283 OMP_CLAUSE_CHAIN (c
) = OMP_FOR_CLAUSES (for_stmt
);
11284 OMP_FOR_CLAUSES (for_stmt
) = c
;
11285 omp_add_variable (gimplify_omp_ctxp
, decl
,
11286 (lastprivate
? GOVD_LASTPRIVATE
: GOVD_PRIVATE
)
11287 | GOVD_EXPLICIT
| GOVD_SEEN
);
11291 else if (omp_is_private (gimplify_omp_ctxp
, decl
, 0))
11293 omp_notice_variable (gimplify_omp_ctxp
, decl
, true);
11294 splay_tree_node n
= splay_tree_lookup (gimplify_omp_ctxp
->variables
,
11295 (splay_tree_key
) decl
);
11296 if (n
&& (n
->value
& GOVD_LASTPRIVATE_CONDITIONAL
))
11297 for (tree c3
= omp_find_clause (OMP_FOR_CLAUSES (for_stmt
),
11298 OMP_CLAUSE_LASTPRIVATE
);
11299 c3
; c3
= omp_find_clause (OMP_CLAUSE_CHAIN (c3
),
11300 OMP_CLAUSE_LASTPRIVATE
))
11301 if (OMP_CLAUSE_DECL (c3
) == decl
)
11303 warning_at (OMP_CLAUSE_LOCATION (c3
), 0,
11304 "conditional %<lastprivate%> on loop "
11305 "iterator %qD ignored", decl
);
11306 OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c3
) = 0;
11307 n
->value
&= ~GOVD_LASTPRIVATE_CONDITIONAL
;
11311 omp_add_variable (gimplify_omp_ctxp
, decl
, GOVD_PRIVATE
| GOVD_SEEN
);
11313 /* If DECL is not a gimple register, create a temporary variable to act
11314 as an iteration counter. This is valid, since DECL cannot be
11315 modified in the body of the loop. Similarly for any iteration vars
11316 in simd with collapse > 1 where the iterator vars must be
11318 if (orig_for_stmt
!= for_stmt
)
11320 else if (!is_gimple_reg (decl
)
11321 || (ort
== ORT_SIMD
11322 && TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt
)) > 1))
11324 struct gimplify_omp_ctx
*ctx
= gimplify_omp_ctxp
;
11325 /* Make sure omp_add_variable is not called on it prematurely.
11326 We call it ourselves a few lines later. */
11327 gimplify_omp_ctxp
= NULL
;
11328 var
= create_tmp_var (TREE_TYPE (decl
), get_name (decl
));
11329 gimplify_omp_ctxp
= ctx
;
11330 TREE_OPERAND (t
, 0) = var
;
11332 gimplify_seq_add_stmt (&for_body
, gimple_build_assign (decl
, var
));
11334 if (ort
== ORT_SIMD
11335 && TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt
)) == 1)
11337 c2
= build_omp_clause (input_location
, OMP_CLAUSE_LINEAR
);
11338 OMP_CLAUSE_LINEAR_NO_COPYIN (c2
) = 1;
11339 OMP_CLAUSE_LINEAR_NO_COPYOUT (c2
) = 1;
11340 OMP_CLAUSE_DECL (c2
) = var
;
11341 OMP_CLAUSE_CHAIN (c2
) = OMP_FOR_CLAUSES (for_stmt
);
11342 OMP_FOR_CLAUSES (for_stmt
) = c2
;
11343 omp_add_variable (gimplify_omp_ctxp
, var
,
11344 GOVD_LINEAR
| GOVD_EXPLICIT
| GOVD_SEEN
);
11345 if (c
== NULL_TREE
)
11352 omp_add_variable (gimplify_omp_ctxp
, var
,
11353 GOVD_PRIVATE
| GOVD_SEEN
);
11358 tret
= gimplify_expr (&TREE_OPERAND (t
, 1), &for_pre_body
, NULL
,
11359 is_gimple_val
, fb_rvalue
, false);
11360 ret
= MIN (ret
, tret
);
11361 if (ret
== GS_ERROR
)
11364 /* Handle OMP_FOR_COND. */
11365 t
= TREE_VEC_ELT (OMP_FOR_COND (for_stmt
), i
);
11366 gcc_assert (COMPARISON_CLASS_P (t
));
11367 gcc_assert (TREE_OPERAND (t
, 0) == decl
);
11369 tret
= gimplify_expr (&TREE_OPERAND (t
, 1), &for_pre_body
, NULL
,
11370 is_gimple_val
, fb_rvalue
, false);
11371 ret
= MIN (ret
, tret
);
11373 /* Handle OMP_FOR_INCR. */
11374 t
= TREE_VEC_ELT (OMP_FOR_INCR (for_stmt
), i
);
11375 switch (TREE_CODE (t
))
11377 case PREINCREMENT_EXPR
:
11378 case POSTINCREMENT_EXPR
:
11380 tree decl
= TREE_OPERAND (t
, 0);
11381 /* c_omp_for_incr_canonicalize_ptr() should have been
11382 called to massage things appropriately. */
11383 gcc_assert (!POINTER_TYPE_P (TREE_TYPE (decl
)));
11385 if (orig_for_stmt
!= for_stmt
)
11387 t
= build_int_cst (TREE_TYPE (decl
), 1);
11389 OMP_CLAUSE_LINEAR_STEP (c
) = t
;
11390 t
= build2 (PLUS_EXPR
, TREE_TYPE (decl
), var
, t
);
11391 t
= build2 (MODIFY_EXPR
, TREE_TYPE (var
), var
, t
);
11392 TREE_VEC_ELT (OMP_FOR_INCR (for_stmt
), i
) = t
;
11396 case PREDECREMENT_EXPR
:
11397 case POSTDECREMENT_EXPR
:
11398 /* c_omp_for_incr_canonicalize_ptr() should have been
11399 called to massage things appropriately. */
11400 gcc_assert (!POINTER_TYPE_P (TREE_TYPE (decl
)));
11401 if (orig_for_stmt
!= for_stmt
)
11403 t
= build_int_cst (TREE_TYPE (decl
), -1);
11405 OMP_CLAUSE_LINEAR_STEP (c
) = t
;
11406 t
= build2 (PLUS_EXPR
, TREE_TYPE (decl
), var
, t
);
11407 t
= build2 (MODIFY_EXPR
, TREE_TYPE (var
), var
, t
);
11408 TREE_VEC_ELT (OMP_FOR_INCR (for_stmt
), i
) = t
;
11412 gcc_assert (TREE_OPERAND (t
, 0) == decl
);
11413 TREE_OPERAND (t
, 0) = var
;
11415 t
= TREE_OPERAND (t
, 1);
11416 switch (TREE_CODE (t
))
11419 if (TREE_OPERAND (t
, 1) == decl
)
11421 TREE_OPERAND (t
, 1) = TREE_OPERAND (t
, 0);
11422 TREE_OPERAND (t
, 0) = var
;
11428 case POINTER_PLUS_EXPR
:
11429 gcc_assert (TREE_OPERAND (t
, 0) == decl
);
11430 TREE_OPERAND (t
, 0) = var
;
11433 gcc_unreachable ();
11436 tret
= gimplify_expr (&TREE_OPERAND (t
, 1), &for_pre_body
, NULL
,
11437 is_gimple_val
, fb_rvalue
, false);
11438 ret
= MIN (ret
, tret
);
11441 tree step
= TREE_OPERAND (t
, 1);
11442 tree stept
= TREE_TYPE (decl
);
11443 if (POINTER_TYPE_P (stept
))
11445 step
= fold_convert (stept
, step
);
11446 if (TREE_CODE (t
) == MINUS_EXPR
)
11447 step
= fold_build1 (NEGATE_EXPR
, stept
, step
);
11448 OMP_CLAUSE_LINEAR_STEP (c
) = step
;
11449 if (step
!= TREE_OPERAND (t
, 1))
11451 tret
= gimplify_expr (&OMP_CLAUSE_LINEAR_STEP (c
),
11452 &for_pre_body
, NULL
,
11453 is_gimple_val
, fb_rvalue
, false);
11454 ret
= MIN (ret
, tret
);
11460 gcc_unreachable ();
11466 OMP_CLAUSE_LINEAR_STEP (c2
) = OMP_CLAUSE_LINEAR_STEP (c
);
11469 if ((var
!= decl
|| collapse
> 1 || tile
) && orig_for_stmt
== for_stmt
)
11471 for (c
= OMP_FOR_CLAUSES (for_stmt
); c
; c
= OMP_CLAUSE_CHAIN (c
))
11472 if (((OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
11473 && OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c
) == NULL
)
11474 || (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LINEAR
11475 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c
)
11476 && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c
) == NULL
))
11477 && OMP_CLAUSE_DECL (c
) == decl
)
11479 if (is_doacross
&& (collapse
== 1 || i
>= collapse
))
11483 t
= TREE_VEC_ELT (OMP_FOR_INCR (for_stmt
), i
);
11484 gcc_assert (TREE_CODE (t
) == MODIFY_EXPR
);
11485 gcc_assert (TREE_OPERAND (t
, 0) == var
);
11486 t
= TREE_OPERAND (t
, 1);
11487 gcc_assert (TREE_CODE (t
) == PLUS_EXPR
11488 || TREE_CODE (t
) == MINUS_EXPR
11489 || TREE_CODE (t
) == POINTER_PLUS_EXPR
);
11490 gcc_assert (TREE_OPERAND (t
, 0) == var
);
11491 t
= build2 (TREE_CODE (t
), TREE_TYPE (decl
),
11492 is_doacross
? var
: decl
,
11493 TREE_OPERAND (t
, 1));
11496 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
)
11497 seq
= &OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c
);
11499 seq
= &OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c
);
11500 push_gimplify_context ();
11501 gimplify_assign (decl
, t
, seq
);
11502 gimple
*bind
= NULL
;
11503 if (gimplify_ctxp
->temps
)
11505 bind
= gimple_build_bind (NULL_TREE
, *seq
, NULL_TREE
);
11507 gimplify_seq_add_stmt (seq
, bind
);
11509 pop_gimplify_context (bind
);
11514 BITMAP_FREE (has_decl_expr
);
11516 if (TREE_CODE (orig_for_stmt
) == OMP_TASKLOOP
11517 || (loop_p
&& orig_for_stmt
== for_stmt
))
11519 push_gimplify_context ();
11520 if (TREE_CODE (OMP_FOR_BODY (orig_for_stmt
)) != BIND_EXPR
)
11522 OMP_FOR_BODY (orig_for_stmt
)
11523 = build3 (BIND_EXPR
, void_type_node
, NULL
,
11524 OMP_FOR_BODY (orig_for_stmt
), NULL
);
11525 TREE_SIDE_EFFECTS (OMP_FOR_BODY (orig_for_stmt
)) = 1;
11529 gimple
*g
= gimplify_and_return_first (OMP_FOR_BODY (orig_for_stmt
),
11532 if (TREE_CODE (orig_for_stmt
) == OMP_TASKLOOP
11533 || (loop_p
&& orig_for_stmt
== for_stmt
))
11535 if (gimple_code (g
) == GIMPLE_BIND
)
11536 pop_gimplify_context (g
);
11538 pop_gimplify_context (NULL
);
11541 if (orig_for_stmt
!= for_stmt
)
11542 for (i
= 0; i
< TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt
)); i
++)
11544 t
= TREE_VEC_ELT (OMP_FOR_INIT (for_stmt
), i
);
11545 decl
= TREE_OPERAND (t
, 0);
11546 struct gimplify_omp_ctx
*ctx
= gimplify_omp_ctxp
;
11547 if (TREE_CODE (orig_for_stmt
) == OMP_TASKLOOP
)
11548 gimplify_omp_ctxp
= ctx
->outer_context
;
11549 var
= create_tmp_var (TREE_TYPE (decl
), get_name (decl
));
11550 gimplify_omp_ctxp
= ctx
;
11551 omp_add_variable (gimplify_omp_ctxp
, var
, GOVD_PRIVATE
| GOVD_SEEN
);
11552 TREE_OPERAND (t
, 0) = var
;
11553 t
= TREE_VEC_ELT (OMP_FOR_INCR (for_stmt
), i
);
11554 TREE_OPERAND (t
, 1) = copy_node (TREE_OPERAND (t
, 1));
11555 TREE_OPERAND (TREE_OPERAND (t
, 1), 0) = var
;
11558 gimplify_adjust_omp_clauses (pre_p
, for_body
,
11559 &OMP_FOR_CLAUSES (orig_for_stmt
),
11560 TREE_CODE (orig_for_stmt
));
11563 switch (TREE_CODE (orig_for_stmt
))
11565 case OMP_FOR
: kind
= GF_OMP_FOR_KIND_FOR
; break;
11566 case OMP_SIMD
: kind
= GF_OMP_FOR_KIND_SIMD
; break;
11567 case OMP_DISTRIBUTE
: kind
= GF_OMP_FOR_KIND_DISTRIBUTE
; break;
11568 case OMP_TASKLOOP
: kind
= GF_OMP_FOR_KIND_TASKLOOP
; break;
11569 case OACC_LOOP
: kind
= GF_OMP_FOR_KIND_OACC_LOOP
; break;
11571 gcc_unreachable ();
11573 if (loop_p
&& kind
== GF_OMP_FOR_KIND_SIMD
)
11575 gimplify_seq_add_seq (pre_p
, for_pre_body
);
11576 for_pre_body
= NULL
;
11578 gfor
= gimple_build_omp_for (for_body
, kind
, OMP_FOR_CLAUSES (orig_for_stmt
),
11579 TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt
)),
11581 if (orig_for_stmt
!= for_stmt
)
11582 gimple_omp_for_set_combined_p (gfor
, true);
11583 if (gimplify_omp_ctxp
11584 && (gimplify_omp_ctxp
->combined_loop
11585 || (gimplify_omp_ctxp
->region_type
== ORT_COMBINED_PARALLEL
11586 && gimplify_omp_ctxp
->outer_context
11587 && gimplify_omp_ctxp
->outer_context
->combined_loop
)))
11589 gimple_omp_for_set_combined_into_p (gfor
, true);
11590 if (gimplify_omp_ctxp
->combined_loop
)
11591 gcc_assert (TREE_CODE (orig_for_stmt
) == OMP_SIMD
);
11593 gcc_assert (TREE_CODE (orig_for_stmt
) == OMP_FOR
);
11596 for (i
= 0; i
< TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt
)); i
++)
11598 t
= TREE_VEC_ELT (OMP_FOR_INIT (for_stmt
), i
);
11599 gimple_omp_for_set_index (gfor
, i
, TREE_OPERAND (t
, 0));
11600 gimple_omp_for_set_initial (gfor
, i
, TREE_OPERAND (t
, 1));
11601 t
= TREE_VEC_ELT (OMP_FOR_COND (for_stmt
), i
);
11602 gimple_omp_for_set_cond (gfor
, i
, TREE_CODE (t
));
11603 gimple_omp_for_set_final (gfor
, i
, TREE_OPERAND (t
, 1));
11604 t
= TREE_VEC_ELT (OMP_FOR_INCR (for_stmt
), i
);
11605 gimple_omp_for_set_incr (gfor
, i
, TREE_OPERAND (t
, 1));
11608 /* OMP_TASKLOOP is gimplified as two GIMPLE_OMP_FOR taskloop
11609 constructs with GIMPLE_OMP_TASK sandwiched in between them.
11610 The outer taskloop stands for computing the number of iterations,
11611 counts for collapsed loops and holding taskloop specific clauses.
11612 The task construct stands for the effect of data sharing on the
11613 explicit task it creates and the inner taskloop stands for expansion
11614 of the static loop inside of the explicit task construct. */
11615 if (TREE_CODE (orig_for_stmt
) == OMP_TASKLOOP
)
11617 tree
*gfor_clauses_ptr
= gimple_omp_for_clauses_ptr (gfor
);
11618 tree task_clauses
= NULL_TREE
;
11619 tree c
= *gfor_clauses_ptr
;
11620 tree
*gtask_clauses_ptr
= &task_clauses
;
11621 tree outer_for_clauses
= NULL_TREE
;
11622 tree
*gforo_clauses_ptr
= &outer_for_clauses
;
11623 for (; c
; c
= OMP_CLAUSE_CHAIN (c
))
11624 switch (OMP_CLAUSE_CODE (c
))
11626 /* These clauses are allowed on task, move them there. */
11627 case OMP_CLAUSE_SHARED
:
11628 case OMP_CLAUSE_FIRSTPRIVATE
:
11629 case OMP_CLAUSE_DEFAULT
:
11630 case OMP_CLAUSE_IF
:
11631 case OMP_CLAUSE_UNTIED
:
11632 case OMP_CLAUSE_FINAL
:
11633 case OMP_CLAUSE_MERGEABLE
:
11634 case OMP_CLAUSE_PRIORITY
:
11635 case OMP_CLAUSE_REDUCTION
:
11636 case OMP_CLAUSE_IN_REDUCTION
:
11637 *gtask_clauses_ptr
= c
;
11638 gtask_clauses_ptr
= &OMP_CLAUSE_CHAIN (c
);
11640 case OMP_CLAUSE_PRIVATE
:
11641 if (OMP_CLAUSE_PRIVATE_TASKLOOP_IV (c
))
11643 /* We want private on outer for and firstprivate
11646 = build_omp_clause (OMP_CLAUSE_LOCATION (c
),
11647 OMP_CLAUSE_FIRSTPRIVATE
);
11648 OMP_CLAUSE_DECL (*gtask_clauses_ptr
) = OMP_CLAUSE_DECL (c
);
11649 lang_hooks
.decls
.omp_finish_clause (*gtask_clauses_ptr
, NULL
);
11650 gtask_clauses_ptr
= &OMP_CLAUSE_CHAIN (*gtask_clauses_ptr
);
11651 *gforo_clauses_ptr
= c
;
11652 gforo_clauses_ptr
= &OMP_CLAUSE_CHAIN (c
);
11656 *gtask_clauses_ptr
= c
;
11657 gtask_clauses_ptr
= &OMP_CLAUSE_CHAIN (c
);
11660 /* These clauses go into outer taskloop clauses. */
11661 case OMP_CLAUSE_GRAINSIZE
:
11662 case OMP_CLAUSE_NUM_TASKS
:
11663 case OMP_CLAUSE_NOGROUP
:
11664 *gforo_clauses_ptr
= c
;
11665 gforo_clauses_ptr
= &OMP_CLAUSE_CHAIN (c
);
11667 /* Taskloop clause we duplicate on both taskloops. */
11668 case OMP_CLAUSE_COLLAPSE
:
11669 *gfor_clauses_ptr
= c
;
11670 gfor_clauses_ptr
= &OMP_CLAUSE_CHAIN (c
);
11671 *gforo_clauses_ptr
= copy_node (c
);
11672 gforo_clauses_ptr
= &OMP_CLAUSE_CHAIN (*gforo_clauses_ptr
);
11674 /* For lastprivate, keep the clause on inner taskloop, and add
11675 a shared clause on task. If the same decl is also firstprivate,
11676 add also firstprivate clause on the inner taskloop. */
11677 case OMP_CLAUSE_LASTPRIVATE
:
11678 if (OMP_CLAUSE_LASTPRIVATE_LOOP_IV (c
))
11680 /* For taskloop C++ lastprivate IVs, we want:
11681 1) private on outer taskloop
11682 2) firstprivate and shared on task
11683 3) lastprivate on inner taskloop */
11685 = build_omp_clause (OMP_CLAUSE_LOCATION (c
),
11686 OMP_CLAUSE_FIRSTPRIVATE
);
11687 OMP_CLAUSE_DECL (*gtask_clauses_ptr
) = OMP_CLAUSE_DECL (c
);
11688 lang_hooks
.decls
.omp_finish_clause (*gtask_clauses_ptr
, NULL
);
11689 gtask_clauses_ptr
= &OMP_CLAUSE_CHAIN (*gtask_clauses_ptr
);
11690 OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c
) = 1;
11691 *gforo_clauses_ptr
= build_omp_clause (OMP_CLAUSE_LOCATION (c
),
11692 OMP_CLAUSE_PRIVATE
);
11693 OMP_CLAUSE_DECL (*gforo_clauses_ptr
) = OMP_CLAUSE_DECL (c
);
11694 OMP_CLAUSE_PRIVATE_TASKLOOP_IV (*gforo_clauses_ptr
) = 1;
11695 TREE_TYPE (*gforo_clauses_ptr
) = TREE_TYPE (c
);
11696 gforo_clauses_ptr
= &OMP_CLAUSE_CHAIN (*gforo_clauses_ptr
);
11698 *gfor_clauses_ptr
= c
;
11699 gfor_clauses_ptr
= &OMP_CLAUSE_CHAIN (c
);
11701 = build_omp_clause (OMP_CLAUSE_LOCATION (c
), OMP_CLAUSE_SHARED
);
11702 OMP_CLAUSE_DECL (*gtask_clauses_ptr
) = OMP_CLAUSE_DECL (c
);
11703 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c
))
11704 OMP_CLAUSE_SHARED_FIRSTPRIVATE (*gtask_clauses_ptr
) = 1;
11706 = &OMP_CLAUSE_CHAIN (*gtask_clauses_ptr
);
11709 gcc_unreachable ();
11711 *gfor_clauses_ptr
= NULL_TREE
;
11712 *gtask_clauses_ptr
= NULL_TREE
;
11713 *gforo_clauses_ptr
= NULL_TREE
;
11714 g
= gimple_build_bind (NULL_TREE
, gfor
, NULL_TREE
);
11715 g
= gimple_build_omp_task (g
, task_clauses
, NULL_TREE
, NULL_TREE
,
11716 NULL_TREE
, NULL_TREE
, NULL_TREE
);
11717 gimple_omp_task_set_taskloop_p (g
, true);
11718 g
= gimple_build_bind (NULL_TREE
, g
, NULL_TREE
);
11720 = gimple_build_omp_for (g
, GF_OMP_FOR_KIND_TASKLOOP
, outer_for_clauses
,
11721 gimple_omp_for_collapse (gfor
),
11722 gimple_omp_for_pre_body (gfor
));
11723 gimple_omp_for_set_pre_body (gfor
, NULL
);
11724 gimple_omp_for_set_combined_p (gforo
, true);
11725 gimple_omp_for_set_combined_into_p (gfor
, true);
11726 for (i
= 0; i
< (int) gimple_omp_for_collapse (gfor
); i
++)
11728 tree type
= TREE_TYPE (gimple_omp_for_index (gfor
, i
));
11729 tree v
= create_tmp_var (type
);
11730 gimple_omp_for_set_index (gforo
, i
, v
);
11731 t
= unshare_expr (gimple_omp_for_initial (gfor
, i
));
11732 gimple_omp_for_set_initial (gforo
, i
, t
);
11733 gimple_omp_for_set_cond (gforo
, i
,
11734 gimple_omp_for_cond (gfor
, i
));
11735 t
= unshare_expr (gimple_omp_for_final (gfor
, i
));
11736 gimple_omp_for_set_final (gforo
, i
, t
);
11737 t
= unshare_expr (gimple_omp_for_incr (gfor
, i
));
11738 gcc_assert (TREE_OPERAND (t
, 0) == gimple_omp_for_index (gfor
, i
));
11739 TREE_OPERAND (t
, 0) = v
;
11740 gimple_omp_for_set_incr (gforo
, i
, t
);
11741 t
= build_omp_clause (input_location
, OMP_CLAUSE_PRIVATE
);
11742 OMP_CLAUSE_DECL (t
) = v
;
11743 OMP_CLAUSE_CHAIN (t
) = gimple_omp_for_clauses (gforo
);
11744 gimple_omp_for_set_clauses (gforo
, t
);
11746 gimplify_seq_add_stmt (pre_p
, gforo
);
11749 gimplify_seq_add_stmt (pre_p
, gfor
);
11751 if (TREE_CODE (orig_for_stmt
) == OMP_FOR
)
11753 struct gimplify_omp_ctx
*ctx
= gimplify_omp_ctxp
;
11754 unsigned lastprivate_conditional
= 0;
11756 && (ctx
->region_type
== ORT_TARGET_DATA
11757 || ctx
->region_type
== ORT_TASKGROUP
))
11758 ctx
= ctx
->outer_context
;
11759 if (ctx
&& (ctx
->region_type
& ORT_PARALLEL
) != 0)
11760 for (tree c
= gimple_omp_for_clauses (gfor
);
11761 c
; c
= OMP_CLAUSE_CHAIN (c
))
11762 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
11763 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c
))
11764 ++lastprivate_conditional
;
11765 if (lastprivate_conditional
)
11767 struct omp_for_data fd
;
11768 omp_extract_for_data (gfor
, &fd
, NULL
);
11769 tree type
= build_array_type_nelts (unsigned_type_for (fd
.iter_type
),
11770 lastprivate_conditional
);
11771 tree var
= create_tmp_var_raw (type
);
11772 tree c
= build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE__CONDTEMP_
);
11773 OMP_CLAUSE_DECL (c
) = var
;
11774 OMP_CLAUSE_CHAIN (c
) = gimple_omp_for_clauses (gfor
);
11775 gimple_omp_for_set_clauses (gfor
, c
);
11776 omp_add_variable (ctx
, var
, GOVD_CONDTEMP
| GOVD_SEEN
);
11779 else if (TREE_CODE (orig_for_stmt
) == OMP_SIMD
)
11781 unsigned lastprivate_conditional
= 0;
11782 for (tree c
= gimple_omp_for_clauses (gfor
); c
; c
= OMP_CLAUSE_CHAIN (c
))
11783 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
11784 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c
))
11785 ++lastprivate_conditional
;
11786 if (lastprivate_conditional
)
11788 struct omp_for_data fd
;
11789 omp_extract_for_data (gfor
, &fd
, NULL
);
11790 tree type
= unsigned_type_for (fd
.iter_type
);
11791 while (lastprivate_conditional
--)
11793 tree c
= build_omp_clause (UNKNOWN_LOCATION
,
11794 OMP_CLAUSE__CONDTEMP_
);
11795 OMP_CLAUSE_DECL (c
) = create_tmp_var (type
);
11796 OMP_CLAUSE_CHAIN (c
) = gimple_omp_for_clauses (gfor
);
11797 gimple_omp_for_set_clauses (gfor
, c
);
11802 if (ret
!= GS_ALL_DONE
)
11804 *expr_p
= NULL_TREE
;
11805 return GS_ALL_DONE
;
11808 /* Helper for gimplify_omp_loop, called through walk_tree. */
11811 replace_reduction_placeholders (tree
*tp
, int *walk_subtrees
, void *data
)
11815 tree
*d
= (tree
*) data
;
11816 if (*tp
== OMP_CLAUSE_REDUCTION_PLACEHOLDER (d
[0]))
11818 *tp
= OMP_CLAUSE_REDUCTION_PLACEHOLDER (d
[1]);
11819 *walk_subtrees
= 0;
11821 else if (*tp
== OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (d
[0]))
11823 *tp
= OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (d
[1]);
11824 *walk_subtrees
= 0;
11830 /* Gimplify the gross structure of an OMP_LOOP statement. */
11832 static enum gimplify_status
11833 gimplify_omp_loop (tree
*expr_p
, gimple_seq
*pre_p
)
11835 tree for_stmt
= *expr_p
;
11836 tree clauses
= OMP_FOR_CLAUSES (for_stmt
);
11837 struct gimplify_omp_ctx
*octx
= gimplify_omp_ctxp
;
11838 enum omp_clause_bind_kind kind
= OMP_CLAUSE_BIND_THREAD
;
11841 /* If order is not present, the behavior is as if order(concurrent)
11843 tree order
= omp_find_clause (clauses
, OMP_CLAUSE_ORDER
);
11844 if (order
== NULL_TREE
)
11846 order
= build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE_ORDER
);
11847 OMP_CLAUSE_CHAIN (order
) = clauses
;
11848 OMP_FOR_CLAUSES (for_stmt
) = clauses
= order
;
11851 tree bind
= omp_find_clause (clauses
, OMP_CLAUSE_BIND
);
11852 if (bind
== NULL_TREE
)
11854 if (!flag_openmp
) /* flag_openmp_simd */
11856 else if (octx
&& (octx
->region_type
& ORT_TEAMS
) != 0)
11857 kind
= OMP_CLAUSE_BIND_TEAMS
;
11858 else if (octx
&& (octx
->region_type
& ORT_PARALLEL
) != 0)
11859 kind
= OMP_CLAUSE_BIND_PARALLEL
;
11862 for (; octx
; octx
= octx
->outer_context
)
11864 if ((octx
->region_type
& ORT_ACC
) != 0
11865 || octx
->region_type
== ORT_NONE
11866 || octx
->region_type
== ORT_IMPLICIT_TARGET
)
11870 if (octx
== NULL
&& !in_omp_construct
)
11871 error_at (EXPR_LOCATION (for_stmt
),
11872 "%<bind%> clause not specified on a %<loop%> "
11873 "construct not nested inside another OpenMP construct");
11875 bind
= build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE_BIND
);
11876 OMP_CLAUSE_CHAIN (bind
) = clauses
;
11877 OMP_CLAUSE_BIND_KIND (bind
) = kind
;
11878 OMP_FOR_CLAUSES (for_stmt
) = bind
;
11881 switch (OMP_CLAUSE_BIND_KIND (bind
))
11883 case OMP_CLAUSE_BIND_THREAD
:
11885 case OMP_CLAUSE_BIND_PARALLEL
:
11886 if (!flag_openmp
) /* flag_openmp_simd */
11888 OMP_CLAUSE_BIND_KIND (bind
) = OMP_CLAUSE_BIND_THREAD
;
11891 for (; octx
; octx
= octx
->outer_context
)
11892 if (octx
->region_type
== ORT_SIMD
11893 && omp_find_clause (octx
->clauses
, OMP_CLAUSE_BIND
) == NULL_TREE
)
11895 error_at (EXPR_LOCATION (for_stmt
),
11896 "%<bind(parallel)%> on a %<loop%> construct nested "
11897 "inside %<simd%> construct");
11898 OMP_CLAUSE_BIND_KIND (bind
) = OMP_CLAUSE_BIND_THREAD
;
11901 kind
= OMP_CLAUSE_BIND_PARALLEL
;
11903 case OMP_CLAUSE_BIND_TEAMS
:
11904 if (!flag_openmp
) /* flag_openmp_simd */
11906 OMP_CLAUSE_BIND_KIND (bind
) = OMP_CLAUSE_BIND_THREAD
;
11910 && octx
->region_type
!= ORT_IMPLICIT_TARGET
11911 && octx
->region_type
!= ORT_NONE
11912 && (octx
->region_type
& ORT_TEAMS
) == 0)
11913 || in_omp_construct
)
11915 error_at (EXPR_LOCATION (for_stmt
),
11916 "%<bind(teams)%> on a %<loop%> region not strictly "
11917 "nested inside of a %<teams%> region");
11918 OMP_CLAUSE_BIND_KIND (bind
) = OMP_CLAUSE_BIND_THREAD
;
11921 kind
= OMP_CLAUSE_BIND_TEAMS
;
11924 gcc_unreachable ();
11927 for (tree
*pc
= &OMP_FOR_CLAUSES (for_stmt
); *pc
; )
11928 switch (OMP_CLAUSE_CODE (*pc
))
11930 case OMP_CLAUSE_REDUCTION
:
11931 if (OMP_CLAUSE_REDUCTION_INSCAN (*pc
))
11933 error_at (OMP_CLAUSE_LOCATION (*pc
),
11934 "%<inscan%> %<reduction%> clause on "
11935 "%qs construct", "loop");
11936 OMP_CLAUSE_REDUCTION_INSCAN (*pc
) = 0;
11938 if (OMP_CLAUSE_REDUCTION_TASK (*pc
))
11940 error_at (OMP_CLAUSE_LOCATION (*pc
),
11941 "invalid %<task%> reduction modifier on construct "
11942 "other than %<parallel%>, %<for%> or %<sections%>");
11943 OMP_CLAUSE_REDUCTION_TASK (*pc
) = 0;
11945 pc
= &OMP_CLAUSE_CHAIN (*pc
);
11947 case OMP_CLAUSE_LASTPRIVATE
:
11948 for (i
= 0; i
< TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt
)); i
++)
11950 tree t
= TREE_VEC_ELT (OMP_FOR_INIT (for_stmt
), i
);
11951 gcc_assert (TREE_CODE (t
) == MODIFY_EXPR
);
11952 if (OMP_CLAUSE_DECL (*pc
) == TREE_OPERAND (t
, 0))
11954 if (OMP_FOR_ORIG_DECLS (for_stmt
)
11955 && TREE_CODE (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (for_stmt
),
11957 && TREE_PURPOSE (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (for_stmt
),
11960 tree orig
= TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (for_stmt
), i
);
11961 if (OMP_CLAUSE_DECL (*pc
) == TREE_PURPOSE (orig
))
11965 if (i
== TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt
)))
11967 error_at (OMP_CLAUSE_LOCATION (*pc
),
11968 "%<lastprivate%> clause on a %<loop%> construct refers "
11969 "to a variable %qD which is not the loop iterator",
11970 OMP_CLAUSE_DECL (*pc
));
11971 *pc
= OMP_CLAUSE_CHAIN (*pc
);
11974 pc
= &OMP_CLAUSE_CHAIN (*pc
);
11977 pc
= &OMP_CLAUSE_CHAIN (*pc
);
11981 TREE_SET_CODE (for_stmt
, OMP_SIMD
);
11986 case OMP_CLAUSE_BIND_THREAD
: last
= 0; break;
11987 case OMP_CLAUSE_BIND_PARALLEL
: last
= 1; break;
11988 case OMP_CLAUSE_BIND_TEAMS
: last
= 2; break;
11990 for (int pass
= 1; pass
<= last
; pass
++)
11994 tree bind
= build3 (BIND_EXPR
, void_type_node
, NULL
, NULL
, NULL
);
11995 append_to_statement_list (*expr_p
, &BIND_EXPR_BODY (bind
));
11996 *expr_p
= make_node (OMP_PARALLEL
);
11997 TREE_TYPE (*expr_p
) = void_type_node
;
11998 OMP_PARALLEL_BODY (*expr_p
) = bind
;
11999 OMP_PARALLEL_COMBINED (*expr_p
) = 1;
12000 SET_EXPR_LOCATION (*expr_p
, EXPR_LOCATION (for_stmt
));
12001 tree
*pc
= &OMP_PARALLEL_CLAUSES (*expr_p
);
12002 for (i
= 0; i
< TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt
)); i
++)
12003 if (OMP_FOR_ORIG_DECLS (for_stmt
)
12004 && (TREE_CODE (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (for_stmt
), i
))
12007 tree elt
= TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (for_stmt
), i
);
12008 if (TREE_PURPOSE (elt
) && TREE_VALUE (elt
))
12010 *pc
= build_omp_clause (UNKNOWN_LOCATION
,
12011 OMP_CLAUSE_FIRSTPRIVATE
);
12012 OMP_CLAUSE_DECL (*pc
) = TREE_VALUE (elt
);
12013 pc
= &OMP_CLAUSE_CHAIN (*pc
);
12017 tree t
= make_node (pass
== 2 ? OMP_DISTRIBUTE
: OMP_FOR
);
12018 tree
*pc
= &OMP_FOR_CLAUSES (t
);
12019 TREE_TYPE (t
) = void_type_node
;
12020 OMP_FOR_BODY (t
) = *expr_p
;
12021 SET_EXPR_LOCATION (t
, EXPR_LOCATION (for_stmt
));
12022 for (tree c
= OMP_FOR_CLAUSES (for_stmt
); c
; c
= OMP_CLAUSE_CHAIN (c
))
12023 switch (OMP_CLAUSE_CODE (c
))
12025 case OMP_CLAUSE_BIND
:
12026 case OMP_CLAUSE_ORDER
:
12027 case OMP_CLAUSE_COLLAPSE
:
12028 *pc
= copy_node (c
);
12029 pc
= &OMP_CLAUSE_CHAIN (*pc
);
12031 case OMP_CLAUSE_PRIVATE
:
12032 case OMP_CLAUSE_FIRSTPRIVATE
:
12033 /* Only needed on innermost. */
12035 case OMP_CLAUSE_LASTPRIVATE
:
12036 if (OMP_CLAUSE_LASTPRIVATE_LOOP_IV (c
) && pass
!= last
)
12038 *pc
= build_omp_clause (OMP_CLAUSE_LOCATION (c
),
12039 OMP_CLAUSE_FIRSTPRIVATE
);
12040 OMP_CLAUSE_DECL (*pc
) = OMP_CLAUSE_DECL (c
);
12041 lang_hooks
.decls
.omp_finish_clause (*pc
, NULL
);
12042 pc
= &OMP_CLAUSE_CHAIN (*pc
);
12044 *pc
= copy_node (c
);
12045 OMP_CLAUSE_LASTPRIVATE_STMT (*pc
) = NULL_TREE
;
12046 TREE_TYPE (*pc
) = unshare_expr (TREE_TYPE (c
));
12047 if (OMP_CLAUSE_LASTPRIVATE_LOOP_IV (c
))
12050 OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (*pc
) = 1;
12052 lang_hooks
.decls
.omp_finish_clause (*pc
, NULL
);
12053 OMP_CLAUSE_LASTPRIVATE_LOOP_IV (*pc
) = 0;
12055 pc
= &OMP_CLAUSE_CHAIN (*pc
);
12057 case OMP_CLAUSE_REDUCTION
:
12058 *pc
= copy_node (c
);
12059 OMP_CLAUSE_DECL (*pc
) = unshare_expr (OMP_CLAUSE_DECL (c
));
12060 TREE_TYPE (*pc
) = unshare_expr (TREE_TYPE (c
));
12061 OMP_CLAUSE_REDUCTION_INIT (*pc
)
12062 = unshare_expr (OMP_CLAUSE_REDUCTION_INIT (c
));
12063 OMP_CLAUSE_REDUCTION_MERGE (*pc
)
12064 = unshare_expr (OMP_CLAUSE_REDUCTION_MERGE (c
));
12065 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (*pc
))
12067 OMP_CLAUSE_REDUCTION_PLACEHOLDER (*pc
)
12068 = copy_node (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
));
12069 if (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (*pc
))
12070 OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (*pc
)
12071 = copy_node (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c
));
12073 tree data
[2] = { c
, nc
};
12074 walk_tree_without_duplicates (&OMP_CLAUSE_REDUCTION_INIT (nc
),
12075 replace_reduction_placeholders
,
12077 walk_tree_without_duplicates (&OMP_CLAUSE_REDUCTION_MERGE (nc
),
12078 replace_reduction_placeholders
,
12081 pc
= &OMP_CLAUSE_CHAIN (*pc
);
12084 gcc_unreachable ();
12089 return gimplify_omp_for (expr_p
, pre_p
);
12093 /* Helper function of optimize_target_teams, find OMP_TEAMS inside
12094 of OMP_TARGET's body. */
12097 find_omp_teams (tree
*tp
, int *walk_subtrees
, void *)
12099 *walk_subtrees
= 0;
12100 switch (TREE_CODE (*tp
))
12105 case STATEMENT_LIST
:
12106 *walk_subtrees
= 1;
12114 /* Helper function of optimize_target_teams, determine if the expression
12115 can be computed safely before the target construct on the host. */
12118 computable_teams_clause (tree
*tp
, int *walk_subtrees
, void *)
12124 *walk_subtrees
= 0;
12127 switch (TREE_CODE (*tp
))
12132 *walk_subtrees
= 0;
12133 if (error_operand_p (*tp
)
12134 || !INTEGRAL_TYPE_P (TREE_TYPE (*tp
))
12135 || DECL_HAS_VALUE_EXPR_P (*tp
)
12136 || DECL_THREAD_LOCAL_P (*tp
)
12137 || TREE_SIDE_EFFECTS (*tp
)
12138 || TREE_THIS_VOLATILE (*tp
))
12140 if (is_global_var (*tp
)
12141 && (lookup_attribute ("omp declare target", DECL_ATTRIBUTES (*tp
))
12142 || lookup_attribute ("omp declare target link",
12143 DECL_ATTRIBUTES (*tp
))))
12146 && !DECL_SEEN_IN_BIND_EXPR_P (*tp
)
12147 && !is_global_var (*tp
)
12148 && decl_function_context (*tp
) == current_function_decl
)
12150 n
= splay_tree_lookup (gimplify_omp_ctxp
->variables
,
12151 (splay_tree_key
) *tp
);
12154 if (gimplify_omp_ctxp
->defaultmap
[GDMK_SCALAR
] & GOVD_FIRSTPRIVATE
)
12158 else if (n
->value
& GOVD_LOCAL
)
12160 else if (n
->value
& GOVD_FIRSTPRIVATE
)
12162 else if ((n
->value
& (GOVD_MAP
| GOVD_MAP_ALWAYS_TO
))
12163 == (GOVD_MAP
| GOVD_MAP_ALWAYS_TO
))
12167 if (!INTEGRAL_TYPE_P (TREE_TYPE (*tp
)))
12171 if (TARGET_EXPR_INITIAL (*tp
)
12172 || TREE_CODE (TARGET_EXPR_SLOT (*tp
)) != VAR_DECL
)
12174 return computable_teams_clause (&TARGET_EXPR_SLOT (*tp
),
12175 walk_subtrees
, NULL
);
12176 /* Allow some reasonable subset of integral arithmetics. */
12180 case TRUNC_DIV_EXPR
:
12181 case CEIL_DIV_EXPR
:
12182 case FLOOR_DIV_EXPR
:
12183 case ROUND_DIV_EXPR
:
12184 case TRUNC_MOD_EXPR
:
12185 case CEIL_MOD_EXPR
:
12186 case FLOOR_MOD_EXPR
:
12187 case ROUND_MOD_EXPR
:
12189 case EXACT_DIV_EXPR
:
12200 case NON_LVALUE_EXPR
:
12202 if (!INTEGRAL_TYPE_P (TREE_TYPE (*tp
)))
12205 /* And disallow anything else, except for comparisons. */
12207 if (COMPARISON_CLASS_P (*tp
))
12213 /* Try to determine if the num_teams and/or thread_limit expressions
12214 can have their values determined already before entering the
12216 INTEGER_CSTs trivially are,
12217 integral decls that are firstprivate (explicitly or implicitly)
12218 or explicitly map(always, to:) or map(always, tofrom:) on the target
12219 region too, and expressions involving simple arithmetics on those
12220 too, function calls are not ok, dereferencing something neither etc.
12221 Add NUM_TEAMS and THREAD_LIMIT clauses to the OMP_CLAUSES of
12222 EXPR based on what we find:
12223 0 stands for clause not specified at all, use implementation default
12224 -1 stands for value that can't be determined easily before entering
12225 the target construct.
12226 If teams construct is not present at all, use 1 for num_teams
12227 and 0 for thread_limit (only one team is involved, and the thread
12228 limit is implementation defined. */
12231 optimize_target_teams (tree target
, gimple_seq
*pre_p
)
12233 tree body
= OMP_BODY (target
);
12234 tree teams
= walk_tree (&body
, find_omp_teams
, NULL
, NULL
);
12235 tree num_teams
= integer_zero_node
;
12236 tree thread_limit
= integer_zero_node
;
12237 location_t num_teams_loc
= EXPR_LOCATION (target
);
12238 location_t thread_limit_loc
= EXPR_LOCATION (target
);
12240 struct gimplify_omp_ctx
*target_ctx
= gimplify_omp_ctxp
;
12242 if (teams
== NULL_TREE
)
12243 num_teams
= integer_one_node
;
12245 for (c
= OMP_TEAMS_CLAUSES (teams
); c
; c
= OMP_CLAUSE_CHAIN (c
))
12247 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_NUM_TEAMS
)
12250 num_teams_loc
= OMP_CLAUSE_LOCATION (c
);
12252 else if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_THREAD_LIMIT
)
12255 thread_limit_loc
= OMP_CLAUSE_LOCATION (c
);
12259 expr
= OMP_CLAUSE_OPERAND (c
, 0);
12260 if (TREE_CODE (expr
) == INTEGER_CST
)
12265 if (walk_tree (&expr
, computable_teams_clause
, NULL
, NULL
))
12267 *p
= integer_minus_one_node
;
12271 gimplify_omp_ctxp
= gimplify_omp_ctxp
->outer_context
;
12272 if (gimplify_expr (p
, pre_p
, NULL
, is_gimple_val
, fb_rvalue
, false)
12275 gimplify_omp_ctxp
= target_ctx
;
12276 *p
= integer_minus_one_node
;
12279 gimplify_omp_ctxp
= target_ctx
;
12280 if (!DECL_P (expr
) && TREE_CODE (expr
) != TARGET_EXPR
)
12281 OMP_CLAUSE_OPERAND (c
, 0) = *p
;
12283 c
= build_omp_clause (thread_limit_loc
, OMP_CLAUSE_THREAD_LIMIT
);
12284 OMP_CLAUSE_THREAD_LIMIT_EXPR (c
) = thread_limit
;
12285 OMP_CLAUSE_CHAIN (c
) = OMP_TARGET_CLAUSES (target
);
12286 OMP_TARGET_CLAUSES (target
) = c
;
12287 c
= build_omp_clause (num_teams_loc
, OMP_CLAUSE_NUM_TEAMS
);
12288 OMP_CLAUSE_NUM_TEAMS_EXPR (c
) = num_teams
;
12289 OMP_CLAUSE_CHAIN (c
) = OMP_TARGET_CLAUSES (target
);
12290 OMP_TARGET_CLAUSES (target
) = c
;
12293 /* Gimplify the gross structure of several OMP constructs. */
12296 gimplify_omp_workshare (tree
*expr_p
, gimple_seq
*pre_p
)
12298 tree expr
= *expr_p
;
12300 gimple_seq body
= NULL
;
12301 enum omp_region_type ort
;
12303 switch (TREE_CODE (expr
))
12307 ort
= ORT_WORKSHARE
;
12310 ort
= OMP_TARGET_COMBINED (expr
) ? ORT_COMBINED_TARGET
: ORT_TARGET
;
12313 ort
= ORT_ACC_KERNELS
;
12315 case OACC_PARALLEL
:
12316 ort
= ORT_ACC_PARALLEL
;
12319 ort
= ORT_ACC_DATA
;
12321 case OMP_TARGET_DATA
:
12322 ort
= ORT_TARGET_DATA
;
12325 ort
= OMP_TEAMS_COMBINED (expr
) ? ORT_COMBINED_TEAMS
: ORT_TEAMS
;
12326 if (gimplify_omp_ctxp
== NULL
12327 || gimplify_omp_ctxp
->region_type
== ORT_IMPLICIT_TARGET
)
12328 ort
= (enum omp_region_type
) (ort
| ORT_HOST_TEAMS
);
12330 case OACC_HOST_DATA
:
12331 ort
= ORT_ACC_HOST_DATA
;
12334 gcc_unreachable ();
12337 bool save_in_omp_construct
= in_omp_construct
;
12338 if ((ort
& ORT_ACC
) == 0)
12339 in_omp_construct
= false;
12340 gimplify_scan_omp_clauses (&OMP_CLAUSES (expr
), pre_p
, ort
,
12342 if (TREE_CODE (expr
) == OMP_TARGET
)
12343 optimize_target_teams (expr
, pre_p
);
12344 if ((ort
& (ORT_TARGET
| ORT_TARGET_DATA
)) != 0
12345 || (ort
& ORT_HOST_TEAMS
) == ORT_HOST_TEAMS
)
12347 push_gimplify_context ();
12348 gimple
*g
= gimplify_and_return_first (OMP_BODY (expr
), &body
);
12349 if (gimple_code (g
) == GIMPLE_BIND
)
12350 pop_gimplify_context (g
);
12352 pop_gimplify_context (NULL
);
12353 if ((ort
& ORT_TARGET_DATA
) != 0)
12355 enum built_in_function end_ix
;
12356 switch (TREE_CODE (expr
))
12359 case OACC_HOST_DATA
:
12360 end_ix
= BUILT_IN_GOACC_DATA_END
;
12362 case OMP_TARGET_DATA
:
12363 end_ix
= BUILT_IN_GOMP_TARGET_END_DATA
;
12366 gcc_unreachable ();
12368 tree fn
= builtin_decl_explicit (end_ix
);
12369 g
= gimple_build_call (fn
, 0);
12370 gimple_seq cleanup
= NULL
;
12371 gimple_seq_add_stmt (&cleanup
, g
);
12372 g
= gimple_build_try (body
, cleanup
, GIMPLE_TRY_FINALLY
);
12374 gimple_seq_add_stmt (&body
, g
);
12378 gimplify_and_add (OMP_BODY (expr
), &body
);
12379 gimplify_adjust_omp_clauses (pre_p
, body
, &OMP_CLAUSES (expr
),
12381 in_omp_construct
= save_in_omp_construct
;
12383 switch (TREE_CODE (expr
))
12386 stmt
= gimple_build_omp_target (body
, GF_OMP_TARGET_KIND_OACC_DATA
,
12387 OMP_CLAUSES (expr
));
12390 stmt
= gimple_build_omp_target (body
, GF_OMP_TARGET_KIND_OACC_KERNELS
,
12391 OMP_CLAUSES (expr
));
12393 case OACC_HOST_DATA
:
12394 stmt
= gimple_build_omp_target (body
, GF_OMP_TARGET_KIND_OACC_HOST_DATA
,
12395 OMP_CLAUSES (expr
));
12397 case OACC_PARALLEL
:
12398 stmt
= gimple_build_omp_target (body
, GF_OMP_TARGET_KIND_OACC_PARALLEL
,
12399 OMP_CLAUSES (expr
));
12402 stmt
= gimple_build_omp_sections (body
, OMP_CLAUSES (expr
));
12405 stmt
= gimple_build_omp_single (body
, OMP_CLAUSES (expr
));
12408 stmt
= gimple_build_omp_target (body
, GF_OMP_TARGET_KIND_REGION
,
12409 OMP_CLAUSES (expr
));
12411 case OMP_TARGET_DATA
:
12412 /* Put use_device_{ptr,addr} clauses last, as map clauses are supposed
12413 to be evaluated before the use_device_{ptr,addr} clauses if they
12414 refer to the same variables. */
12416 tree use_device_clauses
;
12417 tree
*pc
, *uc
= &use_device_clauses
;
12418 for (pc
= &OMP_CLAUSES (expr
); *pc
; )
12419 if (OMP_CLAUSE_CODE (*pc
) == OMP_CLAUSE_USE_DEVICE_PTR
12420 || OMP_CLAUSE_CODE (*pc
) == OMP_CLAUSE_USE_DEVICE_ADDR
)
12423 *pc
= OMP_CLAUSE_CHAIN (*pc
);
12424 uc
= &OMP_CLAUSE_CHAIN (*uc
);
12427 pc
= &OMP_CLAUSE_CHAIN (*pc
);
12429 *pc
= use_device_clauses
;
12430 stmt
= gimple_build_omp_target (body
, GF_OMP_TARGET_KIND_DATA
,
12431 OMP_CLAUSES (expr
));
12435 stmt
= gimple_build_omp_teams (body
, OMP_CLAUSES (expr
));
12436 if ((ort
& ORT_HOST_TEAMS
) == ORT_HOST_TEAMS
)
12437 gimple_omp_teams_set_host (as_a
<gomp_teams
*> (stmt
), true);
12440 gcc_unreachable ();
12443 gimplify_seq_add_stmt (pre_p
, stmt
);
12444 *expr_p
= NULL_TREE
;
12447 /* Gimplify the gross structure of OpenACC enter/exit data, update, and OpenMP
12448 target update constructs. */
12451 gimplify_omp_target_update (tree
*expr_p
, gimple_seq
*pre_p
)
12453 tree expr
= *expr_p
;
12456 enum omp_region_type ort
= ORT_WORKSHARE
;
12458 switch (TREE_CODE (expr
))
12460 case OACC_ENTER_DATA
:
12461 case OACC_EXIT_DATA
:
12462 kind
= GF_OMP_TARGET_KIND_OACC_ENTER_EXIT_DATA
;
12466 kind
= GF_OMP_TARGET_KIND_OACC_UPDATE
;
12469 case OMP_TARGET_UPDATE
:
12470 kind
= GF_OMP_TARGET_KIND_UPDATE
;
12472 case OMP_TARGET_ENTER_DATA
:
12473 kind
= GF_OMP_TARGET_KIND_ENTER_DATA
;
12475 case OMP_TARGET_EXIT_DATA
:
12476 kind
= GF_OMP_TARGET_KIND_EXIT_DATA
;
12479 gcc_unreachable ();
12481 gimplify_scan_omp_clauses (&OMP_STANDALONE_CLAUSES (expr
), pre_p
,
12482 ort
, TREE_CODE (expr
));
12483 gimplify_adjust_omp_clauses (pre_p
, NULL
, &OMP_STANDALONE_CLAUSES (expr
),
12485 if (TREE_CODE (expr
) == OACC_UPDATE
12486 && omp_find_clause (OMP_STANDALONE_CLAUSES (expr
),
12487 OMP_CLAUSE_IF_PRESENT
))
12489 /* The runtime uses GOMP_MAP_{TO,FROM} to denote the if_present
12491 for (tree c
= OMP_STANDALONE_CLAUSES (expr
); c
; c
= OMP_CLAUSE_CHAIN (c
))
12492 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
)
12493 switch (OMP_CLAUSE_MAP_KIND (c
))
12495 case GOMP_MAP_FORCE_TO
:
12496 OMP_CLAUSE_SET_MAP_KIND (c
, GOMP_MAP_TO
);
12498 case GOMP_MAP_FORCE_FROM
:
12499 OMP_CLAUSE_SET_MAP_KIND (c
, GOMP_MAP_FROM
);
12505 else if (TREE_CODE (expr
) == OACC_EXIT_DATA
12506 && omp_find_clause (OMP_STANDALONE_CLAUSES (expr
),
12507 OMP_CLAUSE_FINALIZE
))
12509 /* Use GOMP_MAP_DELETE/GOMP_MAP_FORCE_FROM to denote that "finalize"
12510 semantics apply to all mappings of this OpenACC directive. */
12511 bool finalize_marked
= false;
12512 for (tree c
= OMP_STANDALONE_CLAUSES (expr
); c
; c
= OMP_CLAUSE_CHAIN (c
))
12513 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
)
12514 switch (OMP_CLAUSE_MAP_KIND (c
))
12516 case GOMP_MAP_FROM
:
12517 OMP_CLAUSE_SET_MAP_KIND (c
, GOMP_MAP_FORCE_FROM
);
12518 finalize_marked
= true;
12520 case GOMP_MAP_RELEASE
:
12521 OMP_CLAUSE_SET_MAP_KIND (c
, GOMP_MAP_DELETE
);
12522 finalize_marked
= true;
12525 /* Check consistency: libgomp relies on the very first data
12526 mapping clause being marked, so make sure we did that before
12527 any other mapping clauses. */
12528 gcc_assert (finalize_marked
);
12532 stmt
= gimple_build_omp_target (NULL
, kind
, OMP_STANDALONE_CLAUSES (expr
));
12534 gimplify_seq_add_stmt (pre_p
, stmt
);
12535 *expr_p
= NULL_TREE
;
12538 /* A subroutine of gimplify_omp_atomic. The front end is supposed to have
12539 stabilized the lhs of the atomic operation as *ADDR. Return true if
12540 EXPR is this stabilized form. */
12543 goa_lhs_expr_p (tree expr
, tree addr
)
12545 /* Also include casts to other type variants. The C front end is fond
12546 of adding these for e.g. volatile variables. This is like
12547 STRIP_TYPE_NOPS but includes the main variant lookup. */
12548 STRIP_USELESS_TYPE_CONVERSION (expr
);
12550 if (TREE_CODE (expr
) == INDIRECT_REF
)
12552 expr
= TREE_OPERAND (expr
, 0);
12553 while (expr
!= addr
12554 && (CONVERT_EXPR_P (expr
)
12555 || TREE_CODE (expr
) == NON_LVALUE_EXPR
)
12556 && TREE_CODE (expr
) == TREE_CODE (addr
)
12557 && types_compatible_p (TREE_TYPE (expr
), TREE_TYPE (addr
)))
12559 expr
= TREE_OPERAND (expr
, 0);
12560 addr
= TREE_OPERAND (addr
, 0);
12564 return (TREE_CODE (addr
) == ADDR_EXPR
12565 && TREE_CODE (expr
) == ADDR_EXPR
12566 && TREE_OPERAND (addr
, 0) == TREE_OPERAND (expr
, 0));
12568 if (TREE_CODE (addr
) == ADDR_EXPR
&& expr
== TREE_OPERAND (addr
, 0))
12573 /* Walk *EXPR_P and replace appearances of *LHS_ADDR with LHS_VAR. If an
12574 expression does not involve the lhs, evaluate it into a temporary.
12575 Return 1 if the lhs appeared as a subexpression, 0 if it did not,
12576 or -1 if an error was encountered. */
12579 goa_stabilize_expr (tree
*expr_p
, gimple_seq
*pre_p
, tree lhs_addr
,
12582 tree expr
= *expr_p
;
12585 if (goa_lhs_expr_p (expr
, lhs_addr
))
12590 if (is_gimple_val (expr
))
12594 switch (TREE_CODE_CLASS (TREE_CODE (expr
)))
12597 case tcc_comparison
:
12598 saw_lhs
|= goa_stabilize_expr (&TREE_OPERAND (expr
, 1), pre_p
, lhs_addr
,
12602 saw_lhs
|= goa_stabilize_expr (&TREE_OPERAND (expr
, 0), pre_p
, lhs_addr
,
12605 case tcc_expression
:
12606 switch (TREE_CODE (expr
))
12608 case TRUTH_ANDIF_EXPR
:
12609 case TRUTH_ORIF_EXPR
:
12610 case TRUTH_AND_EXPR
:
12611 case TRUTH_OR_EXPR
:
12612 case TRUTH_XOR_EXPR
:
12613 case BIT_INSERT_EXPR
:
12614 saw_lhs
|= goa_stabilize_expr (&TREE_OPERAND (expr
, 1), pre_p
,
12615 lhs_addr
, lhs_var
);
12617 case TRUTH_NOT_EXPR
:
12618 saw_lhs
|= goa_stabilize_expr (&TREE_OPERAND (expr
, 0), pre_p
,
12619 lhs_addr
, lhs_var
);
12621 case COMPOUND_EXPR
:
12622 /* Break out any preevaluations from cp_build_modify_expr. */
12623 for (; TREE_CODE (expr
) == COMPOUND_EXPR
;
12624 expr
= TREE_OPERAND (expr
, 1))
12625 gimplify_stmt (&TREE_OPERAND (expr
, 0), pre_p
);
12627 return goa_stabilize_expr (expr_p
, pre_p
, lhs_addr
, lhs_var
);
12632 case tcc_reference
:
12633 if (TREE_CODE (expr
) == BIT_FIELD_REF
)
12634 saw_lhs
|= goa_stabilize_expr (&TREE_OPERAND (expr
, 0), pre_p
,
12635 lhs_addr
, lhs_var
);
12643 enum gimplify_status gs
;
12644 gs
= gimplify_expr (expr_p
, pre_p
, NULL
, is_gimple_val
, fb_rvalue
);
12645 if (gs
!= GS_ALL_DONE
)
12652 /* Gimplify an OMP_ATOMIC statement. */
12654 static enum gimplify_status
12655 gimplify_omp_atomic (tree
*expr_p
, gimple_seq
*pre_p
)
12657 tree addr
= TREE_OPERAND (*expr_p
, 0);
12658 tree rhs
= TREE_CODE (*expr_p
) == OMP_ATOMIC_READ
12659 ? NULL
: TREE_OPERAND (*expr_p
, 1);
12660 tree type
= TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (addr
)));
12662 gomp_atomic_load
*loadstmt
;
12663 gomp_atomic_store
*storestmt
;
12665 tmp_load
= create_tmp_reg (type
);
12666 if (rhs
&& goa_stabilize_expr (&rhs
, pre_p
, addr
, tmp_load
) < 0)
12669 if (gimplify_expr (&addr
, pre_p
, NULL
, is_gimple_val
, fb_rvalue
)
12673 loadstmt
= gimple_build_omp_atomic_load (tmp_load
, addr
,
12674 OMP_ATOMIC_MEMORY_ORDER (*expr_p
));
12675 gimplify_seq_add_stmt (pre_p
, loadstmt
);
12678 /* BIT_INSERT_EXPR is not valid for non-integral bitfield
12679 representatives. Use BIT_FIELD_REF on the lhs instead. */
12680 if (TREE_CODE (rhs
) == BIT_INSERT_EXPR
12681 && !INTEGRAL_TYPE_P (TREE_TYPE (tmp_load
)))
12683 tree bitpos
= TREE_OPERAND (rhs
, 2);
12684 tree op1
= TREE_OPERAND (rhs
, 1);
12686 tree tmp_store
= tmp_load
;
12687 if (TREE_CODE (*expr_p
) == OMP_ATOMIC_CAPTURE_OLD
)
12688 tmp_store
= get_initialized_tmp_var (tmp_load
, pre_p
, NULL
);
12689 if (INTEGRAL_TYPE_P (TREE_TYPE (op1
)))
12690 bitsize
= bitsize_int (TYPE_PRECISION (TREE_TYPE (op1
)));
12692 bitsize
= TYPE_SIZE (TREE_TYPE (op1
));
12693 gcc_assert (TREE_OPERAND (rhs
, 0) == tmp_load
);
12694 tree t
= build2_loc (EXPR_LOCATION (rhs
),
12695 MODIFY_EXPR
, void_type_node
,
12696 build3_loc (EXPR_LOCATION (rhs
), BIT_FIELD_REF
,
12697 TREE_TYPE (op1
), tmp_store
, bitsize
,
12699 gimplify_and_add (t
, pre_p
);
12702 if (gimplify_expr (&rhs
, pre_p
, NULL
, is_gimple_val
, fb_rvalue
)
12707 if (TREE_CODE (*expr_p
) == OMP_ATOMIC_READ
)
12710 = gimple_build_omp_atomic_store (rhs
, OMP_ATOMIC_MEMORY_ORDER (*expr_p
));
12711 gimplify_seq_add_stmt (pre_p
, storestmt
);
12712 switch (TREE_CODE (*expr_p
))
12714 case OMP_ATOMIC_READ
:
12715 case OMP_ATOMIC_CAPTURE_OLD
:
12716 *expr_p
= tmp_load
;
12717 gimple_omp_atomic_set_need_value (loadstmt
);
12719 case OMP_ATOMIC_CAPTURE_NEW
:
12721 gimple_omp_atomic_set_need_value (storestmt
);
12728 return GS_ALL_DONE
;
12731 /* Gimplify a TRANSACTION_EXPR. This involves gimplification of the
12732 body, and adding some EH bits. */
12734 static enum gimplify_status
12735 gimplify_transaction (tree
*expr_p
, gimple_seq
*pre_p
)
12737 tree expr
= *expr_p
, temp
, tbody
= TRANSACTION_EXPR_BODY (expr
);
12739 gtransaction
*trans_stmt
;
12740 gimple_seq body
= NULL
;
12743 /* Wrap the transaction body in a BIND_EXPR so we have a context
12744 where to put decls for OMP. */
12745 if (TREE_CODE (tbody
) != BIND_EXPR
)
12747 tree bind
= build3 (BIND_EXPR
, void_type_node
, NULL
, tbody
, NULL
);
12748 TREE_SIDE_EFFECTS (bind
) = 1;
12749 SET_EXPR_LOCATION (bind
, EXPR_LOCATION (tbody
));
12750 TRANSACTION_EXPR_BODY (expr
) = bind
;
12753 push_gimplify_context ();
12754 temp
= voidify_wrapper_expr (*expr_p
, NULL
);
12756 body_stmt
= gimplify_and_return_first (TRANSACTION_EXPR_BODY (expr
), &body
);
12757 pop_gimplify_context (body_stmt
);
12759 trans_stmt
= gimple_build_transaction (body
);
12760 if (TRANSACTION_EXPR_OUTER (expr
))
12761 subcode
= GTMA_IS_OUTER
;
12762 else if (TRANSACTION_EXPR_RELAXED (expr
))
12763 subcode
= GTMA_IS_RELAXED
;
12764 gimple_transaction_set_subcode (trans_stmt
, subcode
);
12766 gimplify_seq_add_stmt (pre_p
, trans_stmt
);
12774 *expr_p
= NULL_TREE
;
12775 return GS_ALL_DONE
;
12778 /* Gimplify an OMP_ORDERED construct. EXPR is the tree version. BODY
12779 is the OMP_BODY of the original EXPR (which has already been
12780 gimplified so it's not present in the EXPR).
12782 Return the gimplified GIMPLE_OMP_ORDERED tuple. */
12785 gimplify_omp_ordered (tree expr
, gimple_seq body
)
12790 tree source_c
= NULL_TREE
;
12791 tree sink_c
= NULL_TREE
;
12793 if (gimplify_omp_ctxp
)
12795 for (c
= OMP_ORDERED_CLAUSES (expr
); c
; c
= OMP_CLAUSE_CHAIN (c
))
12796 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_DEPEND
12797 && gimplify_omp_ctxp
->loop_iter_var
.is_empty ()
12798 && (OMP_CLAUSE_DEPEND_KIND (c
) == OMP_CLAUSE_DEPEND_SINK
12799 || OMP_CLAUSE_DEPEND_KIND (c
) == OMP_CLAUSE_DEPEND_SOURCE
))
12801 error_at (OMP_CLAUSE_LOCATION (c
),
12802 "%<ordered%> construct with %<depend%> clause must be "
12803 "closely nested inside a loop with %<ordered%> clause "
12804 "with a parameter");
12807 else if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_DEPEND
12808 && OMP_CLAUSE_DEPEND_KIND (c
) == OMP_CLAUSE_DEPEND_SINK
)
12811 for (decls
= OMP_CLAUSE_DECL (c
), i
= 0;
12812 decls
&& TREE_CODE (decls
) == TREE_LIST
;
12813 decls
= TREE_CHAIN (decls
), ++i
)
12814 if (i
>= gimplify_omp_ctxp
->loop_iter_var
.length () / 2)
12816 else if (TREE_VALUE (decls
)
12817 != gimplify_omp_ctxp
->loop_iter_var
[2 * i
])
12819 error_at (OMP_CLAUSE_LOCATION (c
),
12820 "variable %qE is not an iteration "
12821 "of outermost loop %d, expected %qE",
12822 TREE_VALUE (decls
), i
+ 1,
12823 gimplify_omp_ctxp
->loop_iter_var
[2 * i
]);
12829 = gimplify_omp_ctxp
->loop_iter_var
[2 * i
+ 1];
12830 if (!fail
&& i
!= gimplify_omp_ctxp
->loop_iter_var
.length () / 2)
12832 error_at (OMP_CLAUSE_LOCATION (c
),
12833 "number of variables in %<depend%> clause with "
12834 "%<sink%> modifier does not match number of "
12835 "iteration variables");
12840 else if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_DEPEND
12841 && OMP_CLAUSE_DEPEND_KIND (c
) == OMP_CLAUSE_DEPEND_SOURCE
)
12845 error_at (OMP_CLAUSE_LOCATION (c
),
12846 "more than one %<depend%> clause with %<source%> "
12847 "modifier on an %<ordered%> construct");
12854 if (source_c
&& sink_c
)
12856 error_at (OMP_CLAUSE_LOCATION (source_c
),
12857 "%<depend%> clause with %<source%> modifier specified "
12858 "together with %<depend%> clauses with %<sink%> modifier "
12859 "on the same construct");
12864 return gimple_build_nop ();
12865 return gimple_build_omp_ordered (body
, OMP_ORDERED_CLAUSES (expr
));
12868 /* Convert the GENERIC expression tree *EXPR_P to GIMPLE. If the
12869 expression produces a value to be used as an operand inside a GIMPLE
12870 statement, the value will be stored back in *EXPR_P. This value will
12871 be a tree of class tcc_declaration, tcc_constant, tcc_reference or
12872 an SSA_NAME. The corresponding sequence of GIMPLE statements is
12873 emitted in PRE_P and POST_P.
12875 Additionally, this process may overwrite parts of the input
12876 expression during gimplification. Ideally, it should be
12877 possible to do non-destructive gimplification.
12879 EXPR_P points to the GENERIC expression to convert to GIMPLE. If
12880 the expression needs to evaluate to a value to be used as
12881 an operand in a GIMPLE statement, this value will be stored in
12882 *EXPR_P on exit. This happens when the caller specifies one
12883 of fb_lvalue or fb_rvalue fallback flags.
12885 PRE_P will contain the sequence of GIMPLE statements corresponding
12886 to the evaluation of EXPR and all the side-effects that must
12887 be executed before the main expression. On exit, the last
12888 statement of PRE_P is the core statement being gimplified. For
12889 instance, when gimplifying 'if (++a)' the last statement in
12890 PRE_P will be 'if (t.1)' where t.1 is the result of
12891 pre-incrementing 'a'.
12893 POST_P will contain the sequence of GIMPLE statements corresponding
12894 to the evaluation of all the side-effects that must be executed
12895 after the main expression. If this is NULL, the post
12896 side-effects are stored at the end of PRE_P.
12898 The reason why the output is split in two is to handle post
12899 side-effects explicitly. In some cases, an expression may have
12900 inner and outer post side-effects which need to be emitted in
12901 an order different from the one given by the recursive
12902 traversal. For instance, for the expression (*p--)++ the post
12903 side-effects of '--' must actually occur *after* the post
12904 side-effects of '++'. However, gimplification will first visit
12905 the inner expression, so if a separate POST sequence was not
12906 used, the resulting sequence would be:
12913 However, the post-decrement operation in line #2 must not be
12914 evaluated until after the store to *p at line #4, so the
12915 correct sequence should be:
12922 So, by specifying a separate post queue, it is possible
12923 to emit the post side-effects in the correct order.
12924 If POST_P is NULL, an internal queue will be used. Before
12925 returning to the caller, the sequence POST_P is appended to
12926 the main output sequence PRE_P.
12928 GIMPLE_TEST_F points to a function that takes a tree T and
12929 returns nonzero if T is in the GIMPLE form requested by the
12930 caller. The GIMPLE predicates are in gimple.c.
12932 FALLBACK tells the function what sort of a temporary we want if
12933 gimplification cannot produce an expression that complies with
12936 fb_none means that no temporary should be generated
12937 fb_rvalue means that an rvalue is OK to generate
12938 fb_lvalue means that an lvalue is OK to generate
12939 fb_either means that either is OK, but an lvalue is preferable.
12940 fb_mayfail means that gimplification may fail (in which case
12941 GS_ERROR will be returned)
12943 The return value is either GS_ERROR or GS_ALL_DONE, since this
12944 function iterates until EXPR is completely gimplified or an error
12947 enum gimplify_status
12948 gimplify_expr (tree
*expr_p
, gimple_seq
*pre_p
, gimple_seq
*post_p
,
12949 bool (*gimple_test_f
) (tree
), fallback_t fallback
)
12952 gimple_seq internal_pre
= NULL
;
12953 gimple_seq internal_post
= NULL
;
12956 location_t saved_location
;
12957 enum gimplify_status ret
;
12958 gimple_stmt_iterator pre_last_gsi
, post_last_gsi
;
12961 save_expr
= *expr_p
;
12962 if (save_expr
== NULL_TREE
)
12963 return GS_ALL_DONE
;
12965 /* If we are gimplifying a top-level statement, PRE_P must be valid. */
12966 is_statement
= gimple_test_f
== is_gimple_stmt
;
12968 gcc_assert (pre_p
);
12970 /* Consistency checks. */
12971 if (gimple_test_f
== is_gimple_reg
)
12972 gcc_assert (fallback
& (fb_rvalue
| fb_lvalue
));
12973 else if (gimple_test_f
== is_gimple_val
12974 || gimple_test_f
== is_gimple_call_addr
12975 || gimple_test_f
== is_gimple_condexpr
12976 || gimple_test_f
== is_gimple_mem_rhs
12977 || gimple_test_f
== is_gimple_mem_rhs_or_call
12978 || gimple_test_f
== is_gimple_reg_rhs
12979 || gimple_test_f
== is_gimple_reg_rhs_or_call
12980 || gimple_test_f
== is_gimple_asm_val
12981 || gimple_test_f
== is_gimple_mem_ref_addr
)
12982 gcc_assert (fallback
& fb_rvalue
);
12983 else if (gimple_test_f
== is_gimple_min_lval
12984 || gimple_test_f
== is_gimple_lvalue
)
12985 gcc_assert (fallback
& fb_lvalue
);
12986 else if (gimple_test_f
== is_gimple_addressable
)
12987 gcc_assert (fallback
& fb_either
);
12988 else if (gimple_test_f
== is_gimple_stmt
)
12989 gcc_assert (fallback
== fb_none
);
12992 /* We should have recognized the GIMPLE_TEST_F predicate to
12993 know what kind of fallback to use in case a temporary is
12994 needed to hold the value or address of *EXPR_P. */
12995 gcc_unreachable ();
12998 /* We used to check the predicate here and return immediately if it
12999 succeeds. This is wrong; the design is for gimplification to be
13000 idempotent, and for the predicates to only test for valid forms, not
13001 whether they are fully simplified. */
13003 pre_p
= &internal_pre
;
13005 if (post_p
== NULL
)
13006 post_p
= &internal_post
;
13008 /* Remember the last statements added to PRE_P and POST_P. Every
13009 new statement added by the gimplification helpers needs to be
13010 annotated with location information. To centralize the
13011 responsibility, we remember the last statement that had been
13012 added to both queues before gimplifying *EXPR_P. If
13013 gimplification produces new statements in PRE_P and POST_P, those
13014 statements will be annotated with the same location information
13016 pre_last_gsi
= gsi_last (*pre_p
);
13017 post_last_gsi
= gsi_last (*post_p
);
13019 saved_location
= input_location
;
13020 if (save_expr
!= error_mark_node
13021 && EXPR_HAS_LOCATION (*expr_p
))
13022 input_location
= EXPR_LOCATION (*expr_p
);
13024 /* Loop over the specific gimplifiers until the toplevel node
13025 remains the same. */
13028 /* Strip away as many useless type conversions as possible
13029 at the toplevel. */
13030 STRIP_USELESS_TYPE_CONVERSION (*expr_p
);
13032 /* Remember the expr. */
13033 save_expr
= *expr_p
;
13035 /* Die, die, die, my darling. */
13036 if (error_operand_p (save_expr
))
13042 /* Do any language-specific gimplification. */
13043 ret
= ((enum gimplify_status
)
13044 lang_hooks
.gimplify_expr (expr_p
, pre_p
, post_p
));
13047 if (*expr_p
== NULL_TREE
)
13049 if (*expr_p
!= save_expr
)
13052 else if (ret
!= GS_UNHANDLED
)
13055 /* Make sure that all the cases set 'ret' appropriately. */
13056 ret
= GS_UNHANDLED
;
13057 switch (TREE_CODE (*expr_p
))
13059 /* First deal with the special cases. */
13061 case POSTINCREMENT_EXPR
:
13062 case POSTDECREMENT_EXPR
:
13063 case PREINCREMENT_EXPR
:
13064 case PREDECREMENT_EXPR
:
13065 ret
= gimplify_self_mod_expr (expr_p
, pre_p
, post_p
,
13066 fallback
!= fb_none
,
13067 TREE_TYPE (*expr_p
));
13070 case VIEW_CONVERT_EXPR
:
13071 if ((fallback
& fb_rvalue
)
13072 && is_gimple_reg_type (TREE_TYPE (*expr_p
))
13073 && is_gimple_reg_type (TREE_TYPE (TREE_OPERAND (*expr_p
, 0))))
13075 ret
= gimplify_expr (&TREE_OPERAND (*expr_p
, 0), pre_p
,
13076 post_p
, is_gimple_val
, fb_rvalue
);
13077 recalculate_side_effects (*expr_p
);
13083 case ARRAY_RANGE_REF
:
13084 case REALPART_EXPR
:
13085 case IMAGPART_EXPR
:
13086 case COMPONENT_REF
:
13087 ret
= gimplify_compound_lval (expr_p
, pre_p
, post_p
,
13088 fallback
? fallback
: fb_rvalue
);
13092 ret
= gimplify_cond_expr (expr_p
, pre_p
, fallback
);
13094 /* C99 code may assign to an array in a structure value of a
13095 conditional expression, and this has undefined behavior
13096 only on execution, so create a temporary if an lvalue is
13098 if (fallback
== fb_lvalue
)
13100 *expr_p
= get_initialized_tmp_var (*expr_p
, pre_p
, post_p
, false);
13101 mark_addressable (*expr_p
);
13107 ret
= gimplify_call_expr (expr_p
, pre_p
, fallback
!= fb_none
);
13109 /* C99 code may assign to an array in a structure returned
13110 from a function, and this has undefined behavior only on
13111 execution, so create a temporary if an lvalue is
13113 if (fallback
== fb_lvalue
)
13115 *expr_p
= get_initialized_tmp_var (*expr_p
, pre_p
, post_p
, false);
13116 mark_addressable (*expr_p
);
13122 gcc_unreachable ();
13124 case COMPOUND_EXPR
:
13125 ret
= gimplify_compound_expr (expr_p
, pre_p
, fallback
!= fb_none
);
13128 case COMPOUND_LITERAL_EXPR
:
13129 ret
= gimplify_compound_literal_expr (expr_p
, pre_p
,
13130 gimple_test_f
, fallback
);
13135 ret
= gimplify_modify_expr (expr_p
, pre_p
, post_p
,
13136 fallback
!= fb_none
);
13139 case TRUTH_ANDIF_EXPR
:
13140 case TRUTH_ORIF_EXPR
:
13142 /* Preserve the original type of the expression and the
13143 source location of the outer expression. */
13144 tree org_type
= TREE_TYPE (*expr_p
);
13145 *expr_p
= gimple_boolify (*expr_p
);
13146 *expr_p
= build3_loc (input_location
, COND_EXPR
,
13150 org_type
, boolean_true_node
),
13153 org_type
, boolean_false_node
));
13158 case TRUTH_NOT_EXPR
:
13160 tree type
= TREE_TYPE (*expr_p
);
13161 /* The parsers are careful to generate TRUTH_NOT_EXPR
13162 only with operands that are always zero or one.
13163 We do not fold here but handle the only interesting case
13164 manually, as fold may re-introduce the TRUTH_NOT_EXPR. */
13165 *expr_p
= gimple_boolify (*expr_p
);
13166 if (TYPE_PRECISION (TREE_TYPE (*expr_p
)) == 1)
13167 *expr_p
= build1_loc (input_location
, BIT_NOT_EXPR
,
13168 TREE_TYPE (*expr_p
),
13169 TREE_OPERAND (*expr_p
, 0));
13171 *expr_p
= build2_loc (input_location
, BIT_XOR_EXPR
,
13172 TREE_TYPE (*expr_p
),
13173 TREE_OPERAND (*expr_p
, 0),
13174 build_int_cst (TREE_TYPE (*expr_p
), 1));
13175 if (!useless_type_conversion_p (type
, TREE_TYPE (*expr_p
)))
13176 *expr_p
= fold_convert_loc (input_location
, type
, *expr_p
);
13182 ret
= gimplify_addr_expr (expr_p
, pre_p
, post_p
);
13185 case ANNOTATE_EXPR
:
13187 tree cond
= TREE_OPERAND (*expr_p
, 0);
13188 tree kind
= TREE_OPERAND (*expr_p
, 1);
13189 tree data
= TREE_OPERAND (*expr_p
, 2);
13190 tree type
= TREE_TYPE (cond
);
13191 if (!INTEGRAL_TYPE_P (type
))
13197 tree tmp
= create_tmp_var (type
);
13198 gimplify_arg (&cond
, pre_p
, EXPR_LOCATION (*expr_p
));
13200 = gimple_build_call_internal (IFN_ANNOTATE
, 3, cond
, kind
, data
);
13201 gimple_call_set_lhs (call
, tmp
);
13202 gimplify_seq_add_stmt (pre_p
, call
);
13209 ret
= gimplify_va_arg_expr (expr_p
, pre_p
, post_p
);
13213 if (IS_EMPTY_STMT (*expr_p
))
13219 if (VOID_TYPE_P (TREE_TYPE (*expr_p
))
13220 || fallback
== fb_none
)
13222 /* Just strip a conversion to void (or in void context) and
13224 *expr_p
= TREE_OPERAND (*expr_p
, 0);
13229 ret
= gimplify_conversion (expr_p
);
13230 if (ret
== GS_ERROR
)
13232 if (*expr_p
!= save_expr
)
13236 case FIX_TRUNC_EXPR
:
13237 /* unary_expr: ... | '(' cast ')' val | ... */
13238 ret
= gimplify_expr (&TREE_OPERAND (*expr_p
, 0), pre_p
, post_p
,
13239 is_gimple_val
, fb_rvalue
);
13240 recalculate_side_effects (*expr_p
);
13245 bool volatilep
= TREE_THIS_VOLATILE (*expr_p
);
13246 bool notrap
= TREE_THIS_NOTRAP (*expr_p
);
13247 tree saved_ptr_type
= TREE_TYPE (TREE_OPERAND (*expr_p
, 0));
13249 *expr_p
= fold_indirect_ref_loc (input_location
, *expr_p
);
13250 if (*expr_p
!= save_expr
)
13256 ret
= gimplify_expr (&TREE_OPERAND (*expr_p
, 0), pre_p
, post_p
,
13257 is_gimple_reg
, fb_rvalue
);
13258 if (ret
== GS_ERROR
)
13261 recalculate_side_effects (*expr_p
);
13262 *expr_p
= fold_build2_loc (input_location
, MEM_REF
,
13263 TREE_TYPE (*expr_p
),
13264 TREE_OPERAND (*expr_p
, 0),
13265 build_int_cst (saved_ptr_type
, 0));
13266 TREE_THIS_VOLATILE (*expr_p
) = volatilep
;
13267 TREE_THIS_NOTRAP (*expr_p
) = notrap
;
13272 /* We arrive here through the various re-gimplifcation paths. */
13274 /* First try re-folding the whole thing. */
13275 tmp
= fold_binary (MEM_REF
, TREE_TYPE (*expr_p
),
13276 TREE_OPERAND (*expr_p
, 0),
13277 TREE_OPERAND (*expr_p
, 1));
13280 REF_REVERSE_STORAGE_ORDER (tmp
)
13281 = REF_REVERSE_STORAGE_ORDER (*expr_p
);
13283 recalculate_side_effects (*expr_p
);
13287 /* Avoid re-gimplifying the address operand if it is already
13288 in suitable form. Re-gimplifying would mark the address
13289 operand addressable. Always gimplify when not in SSA form
13290 as we still may have to gimplify decls with value-exprs. */
13291 if (!gimplify_ctxp
|| !gimple_in_ssa_p (cfun
)
13292 || !is_gimple_mem_ref_addr (TREE_OPERAND (*expr_p
, 0)))
13294 ret
= gimplify_expr (&TREE_OPERAND (*expr_p
, 0), pre_p
, post_p
,
13295 is_gimple_mem_ref_addr
, fb_rvalue
);
13296 if (ret
== GS_ERROR
)
13299 recalculate_side_effects (*expr_p
);
13303 /* Constants need not be gimplified. */
13310 /* Drop the overflow flag on constants, we do not want
13311 that in the GIMPLE IL. */
13312 if (TREE_OVERFLOW_P (*expr_p
))
13313 *expr_p
= drop_tree_overflow (*expr_p
);
13318 /* If we require an lvalue, such as for ADDR_EXPR, retain the
13319 CONST_DECL node. Otherwise the decl is replaceable by its
13321 /* ??? Should be == fb_lvalue, but ADDR_EXPR passes fb_either. */
13322 if (fallback
& fb_lvalue
)
13326 *expr_p
= DECL_INITIAL (*expr_p
);
13332 ret
= gimplify_decl_expr (expr_p
, pre_p
);
13336 ret
= gimplify_bind_expr (expr_p
, pre_p
);
13340 ret
= gimplify_loop_expr (expr_p
, pre_p
);
13344 ret
= gimplify_switch_expr (expr_p
, pre_p
);
13348 ret
= gimplify_exit_expr (expr_p
);
13352 /* If the target is not LABEL, then it is a computed jump
13353 and the target needs to be gimplified. */
13354 if (TREE_CODE (GOTO_DESTINATION (*expr_p
)) != LABEL_DECL
)
13356 ret
= gimplify_expr (&GOTO_DESTINATION (*expr_p
), pre_p
,
13357 NULL
, is_gimple_val
, fb_rvalue
);
13358 if (ret
== GS_ERROR
)
13361 gimplify_seq_add_stmt (pre_p
,
13362 gimple_build_goto (GOTO_DESTINATION (*expr_p
)));
13367 gimplify_seq_add_stmt (pre_p
,
13368 gimple_build_predict (PREDICT_EXPR_PREDICTOR (*expr_p
),
13369 PREDICT_EXPR_OUTCOME (*expr_p
)));
13374 ret
= gimplify_label_expr (expr_p
, pre_p
);
13375 label
= LABEL_EXPR_LABEL (*expr_p
);
13376 gcc_assert (decl_function_context (label
) == current_function_decl
);
13378 /* If the label is used in a goto statement, or address of the label
13379 is taken, we need to unpoison all variables that were seen so far.
13380 Doing so would prevent us from reporting a false positives. */
13381 if (asan_poisoned_variables
13382 && asan_used_labels
!= NULL
13383 && asan_used_labels
->contains (label
))
13384 asan_poison_variables (asan_poisoned_variables
, false, pre_p
);
13387 case CASE_LABEL_EXPR
:
13388 ret
= gimplify_case_label_expr (expr_p
, pre_p
);
13390 if (gimplify_ctxp
->live_switch_vars
)
13391 asan_poison_variables (gimplify_ctxp
->live_switch_vars
, false,
13396 ret
= gimplify_return_expr (*expr_p
, pre_p
);
13400 /* Don't reduce this in place; let gimplify_init_constructor work its
13401 magic. Buf if we're just elaborating this for side effects, just
13402 gimplify any element that has side-effects. */
13403 if (fallback
== fb_none
)
13405 unsigned HOST_WIDE_INT ix
;
13407 tree temp
= NULL_TREE
;
13408 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (*expr_p
), ix
, val
)
13409 if (TREE_SIDE_EFFECTS (val
))
13410 append_to_statement_list (val
, &temp
);
13413 ret
= temp
? GS_OK
: GS_ALL_DONE
;
13415 /* C99 code may assign to an array in a constructed
13416 structure or union, and this has undefined behavior only
13417 on execution, so create a temporary if an lvalue is
13419 else if (fallback
== fb_lvalue
)
13421 *expr_p
= get_initialized_tmp_var (*expr_p
, pre_p
, post_p
, false);
13422 mark_addressable (*expr_p
);
13429 /* The following are special cases that are not handled by the
13430 original GIMPLE grammar. */
13432 /* SAVE_EXPR nodes are converted into a GIMPLE identifier and
13435 ret
= gimplify_save_expr (expr_p
, pre_p
, post_p
);
13438 case BIT_FIELD_REF
:
13439 ret
= gimplify_expr (&TREE_OPERAND (*expr_p
, 0), pre_p
,
13440 post_p
, is_gimple_lvalue
, fb_either
);
13441 recalculate_side_effects (*expr_p
);
13444 case TARGET_MEM_REF
:
13446 enum gimplify_status r0
= GS_ALL_DONE
, r1
= GS_ALL_DONE
;
13448 if (TMR_BASE (*expr_p
))
13449 r0
= gimplify_expr (&TMR_BASE (*expr_p
), pre_p
,
13450 post_p
, is_gimple_mem_ref_addr
, fb_either
);
13451 if (TMR_INDEX (*expr_p
))
13452 r1
= gimplify_expr (&TMR_INDEX (*expr_p
), pre_p
,
13453 post_p
, is_gimple_val
, fb_rvalue
);
13454 if (TMR_INDEX2 (*expr_p
))
13455 r1
= gimplify_expr (&TMR_INDEX2 (*expr_p
), pre_p
,
13456 post_p
, is_gimple_val
, fb_rvalue
);
13457 /* TMR_STEP and TMR_OFFSET are always integer constants. */
13458 ret
= MIN (r0
, r1
);
13462 case NON_LVALUE_EXPR
:
13463 /* This should have been stripped above. */
13464 gcc_unreachable ();
13467 ret
= gimplify_asm_expr (expr_p
, pre_p
, post_p
);
13470 case TRY_FINALLY_EXPR
:
13471 case TRY_CATCH_EXPR
:
13473 gimple_seq eval
, cleanup
;
13476 /* Calls to destructors are generated automatically in FINALLY/CATCH
13477 block. They should have location as UNKNOWN_LOCATION. However,
13478 gimplify_call_expr will reset these call stmts to input_location
13479 if it finds stmt's location is unknown. To prevent resetting for
13480 destructors, we set the input_location to unknown.
13481 Note that this only affects the destructor calls in FINALLY/CATCH
13482 block, and will automatically reset to its original value by the
13483 end of gimplify_expr. */
13484 input_location
= UNKNOWN_LOCATION
;
13485 eval
= cleanup
= NULL
;
13486 gimplify_and_add (TREE_OPERAND (*expr_p
, 0), &eval
);
13487 if (TREE_CODE (*expr_p
) == TRY_FINALLY_EXPR
13488 && TREE_CODE (TREE_OPERAND (*expr_p
, 1)) == EH_ELSE_EXPR
)
13490 gimple_seq n
= NULL
, e
= NULL
;
13491 gimplify_and_add (TREE_OPERAND (TREE_OPERAND (*expr_p
, 1),
13493 gimplify_and_add (TREE_OPERAND (TREE_OPERAND (*expr_p
, 1),
13495 if (!gimple_seq_empty_p (n
) && !gimple_seq_empty_p (e
))
13497 geh_else
*stmt
= gimple_build_eh_else (n
, e
);
13498 gimple_seq_add_stmt (&cleanup
, stmt
);
13502 gimplify_and_add (TREE_OPERAND (*expr_p
, 1), &cleanup
);
13503 /* Don't create bogus GIMPLE_TRY with empty cleanup. */
13504 if (gimple_seq_empty_p (cleanup
))
13506 gimple_seq_add_seq (pre_p
, eval
);
13510 try_
= gimple_build_try (eval
, cleanup
,
13511 TREE_CODE (*expr_p
) == TRY_FINALLY_EXPR
13512 ? GIMPLE_TRY_FINALLY
13513 : GIMPLE_TRY_CATCH
);
13514 if (EXPR_HAS_LOCATION (save_expr
))
13515 gimple_set_location (try_
, EXPR_LOCATION (save_expr
));
13516 else if (LOCATION_LOCUS (saved_location
) != UNKNOWN_LOCATION
)
13517 gimple_set_location (try_
, saved_location
);
13518 if (TREE_CODE (*expr_p
) == TRY_CATCH_EXPR
)
13519 gimple_try_set_catch_is_cleanup (try_
,
13520 TRY_CATCH_IS_CLEANUP (*expr_p
));
13521 gimplify_seq_add_stmt (pre_p
, try_
);
13526 case CLEANUP_POINT_EXPR
:
13527 ret
= gimplify_cleanup_point_expr (expr_p
, pre_p
);
13531 ret
= gimplify_target_expr (expr_p
, pre_p
, post_p
);
13537 gimple_seq handler
= NULL
;
13538 gimplify_and_add (CATCH_BODY (*expr_p
), &handler
);
13539 c
= gimple_build_catch (CATCH_TYPES (*expr_p
), handler
);
13540 gimplify_seq_add_stmt (pre_p
, c
);
13545 case EH_FILTER_EXPR
:
13548 gimple_seq failure
= NULL
;
13550 gimplify_and_add (EH_FILTER_FAILURE (*expr_p
), &failure
);
13551 ehf
= gimple_build_eh_filter (EH_FILTER_TYPES (*expr_p
), failure
);
13552 gimple_set_no_warning (ehf
, TREE_NO_WARNING (*expr_p
));
13553 gimplify_seq_add_stmt (pre_p
, ehf
);
13560 enum gimplify_status r0
, r1
;
13561 r0
= gimplify_expr (&OBJ_TYPE_REF_OBJECT (*expr_p
), pre_p
,
13562 post_p
, is_gimple_val
, fb_rvalue
);
13563 r1
= gimplify_expr (&OBJ_TYPE_REF_EXPR (*expr_p
), pre_p
,
13564 post_p
, is_gimple_val
, fb_rvalue
);
13565 TREE_SIDE_EFFECTS (*expr_p
) = 0;
13566 ret
= MIN (r0
, r1
);
13571 /* We get here when taking the address of a label. We mark
13572 the label as "forced"; meaning it can never be removed and
13573 it is a potential target for any computed goto. */
13574 FORCED_LABEL (*expr_p
) = 1;
13578 case STATEMENT_LIST
:
13579 ret
= gimplify_statement_list (expr_p
, pre_p
);
13582 case WITH_SIZE_EXPR
:
13584 gimplify_expr (&TREE_OPERAND (*expr_p
, 0), pre_p
,
13585 post_p
== &internal_post
? NULL
: post_p
,
13586 gimple_test_f
, fallback
);
13587 gimplify_expr (&TREE_OPERAND (*expr_p
, 1), pre_p
, post_p
,
13588 is_gimple_val
, fb_rvalue
);
13595 ret
= gimplify_var_or_parm_decl (expr_p
);
13599 /* When within an OMP context, notice uses of variables. */
13600 if (gimplify_omp_ctxp
)
13601 omp_notice_variable (gimplify_omp_ctxp
, *expr_p
, true);
13605 case DEBUG_EXPR_DECL
:
13606 gcc_unreachable ();
13608 case DEBUG_BEGIN_STMT
:
13609 gimplify_seq_add_stmt (pre_p
,
13610 gimple_build_debug_begin_stmt
13611 (TREE_BLOCK (*expr_p
),
13612 EXPR_LOCATION (*expr_p
)));
13618 /* Allow callbacks into the gimplifier during optimization. */
13623 gimplify_omp_parallel (expr_p
, pre_p
);
13628 gimplify_omp_task (expr_p
, pre_p
);
13634 case OMP_DISTRIBUTE
:
13637 ret
= gimplify_omp_for (expr_p
, pre_p
);
13641 ret
= gimplify_omp_loop (expr_p
, pre_p
);
13645 gimplify_oacc_cache (expr_p
, pre_p
);
13650 gimplify_oacc_declare (expr_p
, pre_p
);
13654 case OACC_HOST_DATA
:
13657 case OACC_PARALLEL
:
13661 case OMP_TARGET_DATA
:
13663 gimplify_omp_workshare (expr_p
, pre_p
);
13667 case OACC_ENTER_DATA
:
13668 case OACC_EXIT_DATA
:
13670 case OMP_TARGET_UPDATE
:
13671 case OMP_TARGET_ENTER_DATA
:
13672 case OMP_TARGET_EXIT_DATA
:
13673 gimplify_omp_target_update (expr_p
, pre_p
);
13683 gimple_seq body
= NULL
;
13685 bool saved_in_omp_construct
= in_omp_construct
;
13687 in_omp_construct
= true;
13688 gimplify_and_add (OMP_BODY (*expr_p
), &body
);
13689 in_omp_construct
= saved_in_omp_construct
;
13690 switch (TREE_CODE (*expr_p
))
13693 g
= gimple_build_omp_section (body
);
13696 g
= gimple_build_omp_master (body
);
13699 g
= gimplify_omp_ordered (*expr_p
, body
);
13702 gimplify_scan_omp_clauses (&OMP_CRITICAL_CLAUSES (*expr_p
),
13703 pre_p
, ORT_WORKSHARE
, OMP_CRITICAL
);
13704 gimplify_adjust_omp_clauses (pre_p
, body
,
13705 &OMP_CRITICAL_CLAUSES (*expr_p
),
13707 g
= gimple_build_omp_critical (body
,
13708 OMP_CRITICAL_NAME (*expr_p
),
13709 OMP_CRITICAL_CLAUSES (*expr_p
));
13712 gimplify_scan_omp_clauses (&OMP_SCAN_CLAUSES (*expr_p
),
13713 pre_p
, ORT_WORKSHARE
, OMP_SCAN
);
13714 gimplify_adjust_omp_clauses (pre_p
, body
,
13715 &OMP_SCAN_CLAUSES (*expr_p
),
13717 g
= gimple_build_omp_scan (body
, OMP_SCAN_CLAUSES (*expr_p
));
13720 gcc_unreachable ();
13722 gimplify_seq_add_stmt (pre_p
, g
);
13727 case OMP_TASKGROUP
:
13729 gimple_seq body
= NULL
;
13731 tree
*pclauses
= &OMP_TASKGROUP_CLAUSES (*expr_p
);
13732 bool saved_in_omp_construct
= in_omp_construct
;
13733 gimplify_scan_omp_clauses (pclauses
, pre_p
, ORT_TASKGROUP
,
13735 gimplify_adjust_omp_clauses (pre_p
, NULL
, pclauses
, OMP_TASKGROUP
);
13737 in_omp_construct
= true;
13738 gimplify_and_add (OMP_BODY (*expr_p
), &body
);
13739 in_omp_construct
= saved_in_omp_construct
;
13740 gimple_seq cleanup
= NULL
;
13741 tree fn
= builtin_decl_explicit (BUILT_IN_GOMP_TASKGROUP_END
);
13742 gimple
*g
= gimple_build_call (fn
, 0);
13743 gimple_seq_add_stmt (&cleanup
, g
);
13744 g
= gimple_build_try (body
, cleanup
, GIMPLE_TRY_FINALLY
);
13746 gimple_seq_add_stmt (&body
, g
);
13747 g
= gimple_build_omp_taskgroup (body
, *pclauses
);
13748 gimplify_seq_add_stmt (pre_p
, g
);
13754 case OMP_ATOMIC_READ
:
13755 case OMP_ATOMIC_CAPTURE_OLD
:
13756 case OMP_ATOMIC_CAPTURE_NEW
:
13757 ret
= gimplify_omp_atomic (expr_p
, pre_p
);
13760 case TRANSACTION_EXPR
:
13761 ret
= gimplify_transaction (expr_p
, pre_p
);
13764 case TRUTH_AND_EXPR
:
13765 case TRUTH_OR_EXPR
:
13766 case TRUTH_XOR_EXPR
:
13768 tree orig_type
= TREE_TYPE (*expr_p
);
13769 tree new_type
, xop0
, xop1
;
13770 *expr_p
= gimple_boolify (*expr_p
);
13771 new_type
= TREE_TYPE (*expr_p
);
13772 if (!useless_type_conversion_p (orig_type
, new_type
))
13774 *expr_p
= fold_convert_loc (input_location
, orig_type
, *expr_p
);
13779 /* Boolified binary truth expressions are semantically equivalent
13780 to bitwise binary expressions. Canonicalize them to the
13781 bitwise variant. */
13782 switch (TREE_CODE (*expr_p
))
13784 case TRUTH_AND_EXPR
:
13785 TREE_SET_CODE (*expr_p
, BIT_AND_EXPR
);
13787 case TRUTH_OR_EXPR
:
13788 TREE_SET_CODE (*expr_p
, BIT_IOR_EXPR
);
13790 case TRUTH_XOR_EXPR
:
13791 TREE_SET_CODE (*expr_p
, BIT_XOR_EXPR
);
13796 /* Now make sure that operands have compatible type to
13797 expression's new_type. */
13798 xop0
= TREE_OPERAND (*expr_p
, 0);
13799 xop1
= TREE_OPERAND (*expr_p
, 1);
13800 if (!useless_type_conversion_p (new_type
, TREE_TYPE (xop0
)))
13801 TREE_OPERAND (*expr_p
, 0) = fold_convert_loc (input_location
,
13804 if (!useless_type_conversion_p (new_type
, TREE_TYPE (xop1
)))
13805 TREE_OPERAND (*expr_p
, 1) = fold_convert_loc (input_location
,
13808 /* Continue classified as tcc_binary. */
13812 case VEC_COND_EXPR
:
13814 enum gimplify_status r0
, r1
, r2
;
13816 r0
= gimplify_expr (&TREE_OPERAND (*expr_p
, 0), pre_p
,
13817 post_p
, is_gimple_condexpr
, fb_rvalue
);
13818 r1
= gimplify_expr (&TREE_OPERAND (*expr_p
, 1), pre_p
,
13819 post_p
, is_gimple_val
, fb_rvalue
);
13820 r2
= gimplify_expr (&TREE_OPERAND (*expr_p
, 2), pre_p
,
13821 post_p
, is_gimple_val
, fb_rvalue
);
13823 ret
= MIN (MIN (r0
, r1
), r2
);
13824 recalculate_side_effects (*expr_p
);
13828 case VEC_PERM_EXPR
:
13829 /* Classified as tcc_expression. */
13832 case BIT_INSERT_EXPR
:
13833 /* Argument 3 is a constant. */
13836 case POINTER_PLUS_EXPR
:
13838 enum gimplify_status r0
, r1
;
13839 r0
= gimplify_expr (&TREE_OPERAND (*expr_p
, 0), pre_p
,
13840 post_p
, is_gimple_val
, fb_rvalue
);
13841 r1
= gimplify_expr (&TREE_OPERAND (*expr_p
, 1), pre_p
,
13842 post_p
, is_gimple_val
, fb_rvalue
);
13843 recalculate_side_effects (*expr_p
);
13844 ret
= MIN (r0
, r1
);
13849 switch (TREE_CODE_CLASS (TREE_CODE (*expr_p
)))
13851 case tcc_comparison
:
13852 /* Handle comparison of objects of non scalar mode aggregates
13853 with a call to memcmp. It would be nice to only have to do
13854 this for variable-sized objects, but then we'd have to allow
13855 the same nest of reference nodes we allow for MODIFY_EXPR and
13856 that's too complex.
13858 Compare scalar mode aggregates as scalar mode values. Using
13859 memcmp for them would be very inefficient at best, and is
13860 plain wrong if bitfields are involved. */
13862 tree type
= TREE_TYPE (TREE_OPERAND (*expr_p
, 1));
13864 /* Vector comparisons need no boolification. */
13865 if (TREE_CODE (type
) == VECTOR_TYPE
)
13867 else if (!AGGREGATE_TYPE_P (type
))
13869 tree org_type
= TREE_TYPE (*expr_p
);
13870 *expr_p
= gimple_boolify (*expr_p
);
13871 if (!useless_type_conversion_p (org_type
,
13872 TREE_TYPE (*expr_p
)))
13874 *expr_p
= fold_convert_loc (input_location
,
13875 org_type
, *expr_p
);
13881 else if (TYPE_MODE (type
) != BLKmode
)
13882 ret
= gimplify_scalar_mode_aggregate_compare (expr_p
);
13884 ret
= gimplify_variable_sized_compare (expr_p
);
13889 /* If *EXPR_P does not need to be special-cased, handle it
13890 according to its class. */
13892 ret
= gimplify_expr (&TREE_OPERAND (*expr_p
, 0), pre_p
,
13893 post_p
, is_gimple_val
, fb_rvalue
);
13899 enum gimplify_status r0
, r1
;
13901 r0
= gimplify_expr (&TREE_OPERAND (*expr_p
, 0), pre_p
,
13902 post_p
, is_gimple_val
, fb_rvalue
);
13903 r1
= gimplify_expr (&TREE_OPERAND (*expr_p
, 1), pre_p
,
13904 post_p
, is_gimple_val
, fb_rvalue
);
13906 ret
= MIN (r0
, r1
);
13912 enum gimplify_status r0
, r1
, r2
;
13914 r0
= gimplify_expr (&TREE_OPERAND (*expr_p
, 0), pre_p
,
13915 post_p
, is_gimple_val
, fb_rvalue
);
13916 r1
= gimplify_expr (&TREE_OPERAND (*expr_p
, 1), pre_p
,
13917 post_p
, is_gimple_val
, fb_rvalue
);
13918 r2
= gimplify_expr (&TREE_OPERAND (*expr_p
, 2), pre_p
,
13919 post_p
, is_gimple_val
, fb_rvalue
);
13921 ret
= MIN (MIN (r0
, r1
), r2
);
13925 case tcc_declaration
:
13928 goto dont_recalculate
;
13931 gcc_unreachable ();
13934 recalculate_side_effects (*expr_p
);
13940 gcc_assert (*expr_p
|| ret
!= GS_OK
);
13942 while (ret
== GS_OK
);
13944 /* If we encountered an error_mark somewhere nested inside, either
13945 stub out the statement or propagate the error back out. */
13946 if (ret
== GS_ERROR
)
13953 /* This was only valid as a return value from the langhook, which
13954 we handled. Make sure it doesn't escape from any other context. */
13955 gcc_assert (ret
!= GS_UNHANDLED
);
13957 if (fallback
== fb_none
&& *expr_p
&& !is_gimple_stmt (*expr_p
))
13959 /* We aren't looking for a value, and we don't have a valid
13960 statement. If it doesn't have side-effects, throw it away.
13961 We can also get here with code such as "*&&L;", where L is
13962 a LABEL_DECL that is marked as FORCED_LABEL. */
13963 if (TREE_CODE (*expr_p
) == LABEL_DECL
13964 || !TREE_SIDE_EFFECTS (*expr_p
))
13966 else if (!TREE_THIS_VOLATILE (*expr_p
))
13968 /* This is probably a _REF that contains something nested that
13969 has side effects. Recurse through the operands to find it. */
13970 enum tree_code code
= TREE_CODE (*expr_p
);
13974 case COMPONENT_REF
:
13975 case REALPART_EXPR
:
13976 case IMAGPART_EXPR
:
13977 case VIEW_CONVERT_EXPR
:
13978 gimplify_expr (&TREE_OPERAND (*expr_p
, 0), pre_p
, post_p
,
13979 gimple_test_f
, fallback
);
13983 case ARRAY_RANGE_REF
:
13984 gimplify_expr (&TREE_OPERAND (*expr_p
, 0), pre_p
, post_p
,
13985 gimple_test_f
, fallback
);
13986 gimplify_expr (&TREE_OPERAND (*expr_p
, 1), pre_p
, post_p
,
13987 gimple_test_f
, fallback
);
13991 /* Anything else with side-effects must be converted to
13992 a valid statement before we get here. */
13993 gcc_unreachable ();
13998 else if (COMPLETE_TYPE_P (TREE_TYPE (*expr_p
))
13999 && TYPE_MODE (TREE_TYPE (*expr_p
)) != BLKmode
)
14001 /* Historically, the compiler has treated a bare reference
14002 to a non-BLKmode volatile lvalue as forcing a load. */
14003 tree type
= TYPE_MAIN_VARIANT (TREE_TYPE (*expr_p
));
14005 /* Normally, we do not want to create a temporary for a
14006 TREE_ADDRESSABLE type because such a type should not be
14007 copied by bitwise-assignment. However, we make an
14008 exception here, as all we are doing here is ensuring that
14009 we read the bytes that make up the type. We use
14010 create_tmp_var_raw because create_tmp_var will abort when
14011 given a TREE_ADDRESSABLE type. */
14012 tree tmp
= create_tmp_var_raw (type
, "vol");
14013 gimple_add_tmp_var (tmp
);
14014 gimplify_assign (tmp
, *expr_p
, pre_p
);
14018 /* We can't do anything useful with a volatile reference to
14019 an incomplete type, so just throw it away. Likewise for
14020 a BLKmode type, since any implicit inner load should
14021 already have been turned into an explicit one by the
14022 gimplification process. */
14026 /* If we are gimplifying at the statement level, we're done. Tack
14027 everything together and return. */
14028 if (fallback
== fb_none
|| is_statement
)
14030 /* Since *EXPR_P has been converted into a GIMPLE tuple, clear
14031 it out for GC to reclaim it. */
14032 *expr_p
= NULL_TREE
;
14034 if (!gimple_seq_empty_p (internal_pre
)
14035 || !gimple_seq_empty_p (internal_post
))
14037 gimplify_seq_add_seq (&internal_pre
, internal_post
);
14038 gimplify_seq_add_seq (pre_p
, internal_pre
);
14041 /* The result of gimplifying *EXPR_P is going to be the last few
14042 statements in *PRE_P and *POST_P. Add location information
14043 to all the statements that were added by the gimplification
14045 if (!gimple_seq_empty_p (*pre_p
))
14046 annotate_all_with_location_after (*pre_p
, pre_last_gsi
, input_location
);
14048 if (!gimple_seq_empty_p (*post_p
))
14049 annotate_all_with_location_after (*post_p
, post_last_gsi
,
14055 #ifdef ENABLE_GIMPLE_CHECKING
14058 enum tree_code code
= TREE_CODE (*expr_p
);
14059 /* These expressions should already be in gimple IR form. */
14060 gcc_assert (code
!= MODIFY_EXPR
14061 && code
!= ASM_EXPR
14062 && code
!= BIND_EXPR
14063 && code
!= CATCH_EXPR
14064 && (code
!= COND_EXPR
|| gimplify_ctxp
->allow_rhs_cond_expr
)
14065 && code
!= EH_FILTER_EXPR
14066 && code
!= GOTO_EXPR
14067 && code
!= LABEL_EXPR
14068 && code
!= LOOP_EXPR
14069 && code
!= SWITCH_EXPR
14070 && code
!= TRY_FINALLY_EXPR
14071 && code
!= EH_ELSE_EXPR
14072 && code
!= OACC_PARALLEL
14073 && code
!= OACC_KERNELS
14074 && code
!= OACC_DATA
14075 && code
!= OACC_HOST_DATA
14076 && code
!= OACC_DECLARE
14077 && code
!= OACC_UPDATE
14078 && code
!= OACC_ENTER_DATA
14079 && code
!= OACC_EXIT_DATA
14080 && code
!= OACC_CACHE
14081 && code
!= OMP_CRITICAL
14083 && code
!= OACC_LOOP
14084 && code
!= OMP_MASTER
14085 && code
!= OMP_TASKGROUP
14086 && code
!= OMP_ORDERED
14087 && code
!= OMP_PARALLEL
14088 && code
!= OMP_SCAN
14089 && code
!= OMP_SECTIONS
14090 && code
!= OMP_SECTION
14091 && code
!= OMP_SINGLE
);
14095 /* Otherwise we're gimplifying a subexpression, so the resulting
14096 value is interesting. If it's a valid operand that matches
14097 GIMPLE_TEST_F, we're done. Unless we are handling some
14098 post-effects internally; if that's the case, we need to copy into
14099 a temporary before adding the post-effects to POST_P. */
14100 if (gimple_seq_empty_p (internal_post
) && (*gimple_test_f
) (*expr_p
))
14103 /* Otherwise, we need to create a new temporary for the gimplified
14106 /* We can't return an lvalue if we have an internal postqueue. The
14107 object the lvalue refers to would (probably) be modified by the
14108 postqueue; we need to copy the value out first, which means an
14110 if ((fallback
& fb_lvalue
)
14111 && gimple_seq_empty_p (internal_post
)
14112 && is_gimple_addressable (*expr_p
))
14114 /* An lvalue will do. Take the address of the expression, store it
14115 in a temporary, and replace the expression with an INDIRECT_REF of
14117 tree ref_alias_type
= reference_alias_ptr_type (*expr_p
);
14118 unsigned int ref_align
= get_object_alignment (*expr_p
);
14119 tree ref_type
= TREE_TYPE (*expr_p
);
14120 tmp
= build_fold_addr_expr_loc (input_location
, *expr_p
);
14121 gimplify_expr (&tmp
, pre_p
, post_p
, is_gimple_reg
, fb_rvalue
);
14122 if (TYPE_ALIGN (ref_type
) != ref_align
)
14123 ref_type
= build_aligned_type (ref_type
, ref_align
);
14124 *expr_p
= build2 (MEM_REF
, ref_type
,
14125 tmp
, build_zero_cst (ref_alias_type
));
14127 else if ((fallback
& fb_rvalue
) && is_gimple_reg_rhs_or_call (*expr_p
))
14129 /* An rvalue will do. Assign the gimplified expression into a
14130 new temporary TMP and replace the original expression with
14131 TMP. First, make sure that the expression has a type so that
14132 it can be assigned into a temporary. */
14133 gcc_assert (!VOID_TYPE_P (TREE_TYPE (*expr_p
)));
14134 *expr_p
= get_formal_tmp_var (*expr_p
, pre_p
);
14138 #ifdef ENABLE_GIMPLE_CHECKING
14139 if (!(fallback
& fb_mayfail
))
14141 fprintf (stderr
, "gimplification failed:\n");
14142 print_generic_expr (stderr
, *expr_p
);
14143 debug_tree (*expr_p
);
14144 internal_error ("gimplification failed");
14147 gcc_assert (fallback
& fb_mayfail
);
14149 /* If this is an asm statement, and the user asked for the
14150 impossible, don't die. Fail and let gimplify_asm_expr
14156 /* Make sure the temporary matches our predicate. */
14157 gcc_assert ((*gimple_test_f
) (*expr_p
));
14159 if (!gimple_seq_empty_p (internal_post
))
14161 annotate_all_with_location (internal_post
, input_location
);
14162 gimplify_seq_add_seq (pre_p
, internal_post
);
14166 input_location
= saved_location
;
14170 /* Like gimplify_expr but make sure the gimplified result is not itself
14171 a SSA name (but a decl if it were). Temporaries required by
14172 evaluating *EXPR_P may be still SSA names. */
14174 static enum gimplify_status
14175 gimplify_expr (tree
*expr_p
, gimple_seq
*pre_p
, gimple_seq
*post_p
,
14176 bool (*gimple_test_f
) (tree
), fallback_t fallback
,
14179 bool was_ssa_name_p
= TREE_CODE (*expr_p
) == SSA_NAME
;
14180 enum gimplify_status ret
= gimplify_expr (expr_p
, pre_p
, post_p
,
14181 gimple_test_f
, fallback
);
14183 && TREE_CODE (*expr_p
) == SSA_NAME
)
14185 tree name
= *expr_p
;
14186 if (was_ssa_name_p
)
14187 *expr_p
= get_initialized_tmp_var (*expr_p
, pre_p
, NULL
, false);
14190 /* Avoid the extra copy if possible. */
14191 *expr_p
= create_tmp_reg (TREE_TYPE (name
));
14192 gimple_set_lhs (SSA_NAME_DEF_STMT (name
), *expr_p
);
14193 release_ssa_name (name
);
14199 /* Look through TYPE for variable-sized objects and gimplify each such
14200 size that we find. Add to LIST_P any statements generated. */
14203 gimplify_type_sizes (tree type
, gimple_seq
*list_p
)
14207 if (type
== NULL
|| type
== error_mark_node
)
14210 /* We first do the main variant, then copy into any other variants. */
14211 type
= TYPE_MAIN_VARIANT (type
);
14213 /* Avoid infinite recursion. */
14214 if (TYPE_SIZES_GIMPLIFIED (type
))
14217 TYPE_SIZES_GIMPLIFIED (type
) = 1;
14219 switch (TREE_CODE (type
))
14222 case ENUMERAL_TYPE
:
14225 case FIXED_POINT_TYPE
:
14226 gimplify_one_sizepos (&TYPE_MIN_VALUE (type
), list_p
);
14227 gimplify_one_sizepos (&TYPE_MAX_VALUE (type
), list_p
);
14229 for (t
= TYPE_NEXT_VARIANT (type
); t
; t
= TYPE_NEXT_VARIANT (t
))
14231 TYPE_MIN_VALUE (t
) = TYPE_MIN_VALUE (type
);
14232 TYPE_MAX_VALUE (t
) = TYPE_MAX_VALUE (type
);
14237 /* These types may not have declarations, so handle them here. */
14238 gimplify_type_sizes (TREE_TYPE (type
), list_p
);
14239 gimplify_type_sizes (TYPE_DOMAIN (type
), list_p
);
14240 /* Ensure VLA bounds aren't removed, for -O0 they should be variables
14241 with assigned stack slots, for -O1+ -g they should be tracked
14243 if (!(TYPE_NAME (type
)
14244 && TREE_CODE (TYPE_NAME (type
)) == TYPE_DECL
14245 && DECL_IGNORED_P (TYPE_NAME (type
)))
14246 && TYPE_DOMAIN (type
)
14247 && INTEGRAL_TYPE_P (TYPE_DOMAIN (type
)))
14249 t
= TYPE_MIN_VALUE (TYPE_DOMAIN (type
));
14250 if (t
&& VAR_P (t
) && DECL_ARTIFICIAL (t
))
14251 DECL_IGNORED_P (t
) = 0;
14252 t
= TYPE_MAX_VALUE (TYPE_DOMAIN (type
));
14253 if (t
&& VAR_P (t
) && DECL_ARTIFICIAL (t
))
14254 DECL_IGNORED_P (t
) = 0;
14260 case QUAL_UNION_TYPE
:
14261 for (field
= TYPE_FIELDS (type
); field
; field
= DECL_CHAIN (field
))
14262 if (TREE_CODE (field
) == FIELD_DECL
)
14264 gimplify_one_sizepos (&DECL_FIELD_OFFSET (field
), list_p
);
14265 gimplify_one_sizepos (&DECL_SIZE (field
), list_p
);
14266 gimplify_one_sizepos (&DECL_SIZE_UNIT (field
), list_p
);
14267 gimplify_type_sizes (TREE_TYPE (field
), list_p
);
14272 case REFERENCE_TYPE
:
14273 /* We used to recurse on the pointed-to type here, which turned out to
14274 be incorrect because its definition might refer to variables not
14275 yet initialized at this point if a forward declaration is involved.
14277 It was actually useful for anonymous pointed-to types to ensure
14278 that the sizes evaluation dominates every possible later use of the
14279 values. Restricting to such types here would be safe since there
14280 is no possible forward declaration around, but would introduce an
14281 undesirable middle-end semantic to anonymity. We then defer to
14282 front-ends the responsibility of ensuring that the sizes are
14283 evaluated both early and late enough, e.g. by attaching artificial
14284 type declarations to the tree. */
14291 gimplify_one_sizepos (&TYPE_SIZE (type
), list_p
);
14292 gimplify_one_sizepos (&TYPE_SIZE_UNIT (type
), list_p
);
14294 for (t
= TYPE_NEXT_VARIANT (type
); t
; t
= TYPE_NEXT_VARIANT (t
))
14296 TYPE_SIZE (t
) = TYPE_SIZE (type
);
14297 TYPE_SIZE_UNIT (t
) = TYPE_SIZE_UNIT (type
);
14298 TYPE_SIZES_GIMPLIFIED (t
) = 1;
14302 /* A subroutine of gimplify_type_sizes to make sure that *EXPR_P,
14303 a size or position, has had all of its SAVE_EXPRs evaluated.
14304 We add any required statements to *STMT_P. */
14307 gimplify_one_sizepos (tree
*expr_p
, gimple_seq
*stmt_p
)
14309 tree expr
= *expr_p
;
14311 /* We don't do anything if the value isn't there, is constant, or contains
14312 A PLACEHOLDER_EXPR. We also don't want to do anything if it's already
14313 a VAR_DECL. If it's a VAR_DECL from another function, the gimplifier
14314 will want to replace it with a new variable, but that will cause problems
14315 if this type is from outside the function. It's OK to have that here. */
14316 if (expr
== NULL_TREE
14317 || is_gimple_constant (expr
)
14318 || TREE_CODE (expr
) == VAR_DECL
14319 || CONTAINS_PLACEHOLDER_P (expr
))
14322 *expr_p
= unshare_expr (expr
);
14324 /* SSA names in decl/type fields are a bad idea - they'll get reclaimed
14325 if the def vanishes. */
14326 gimplify_expr (expr_p
, stmt_p
, NULL
, is_gimple_val
, fb_rvalue
, false);
14328 /* If expr wasn't already is_gimple_sizepos or is_gimple_constant from the
14329 FE, ensure that it is a VAR_DECL, otherwise we might handle some decls
14330 as gimplify_vla_decl even when they would have all sizes INTEGER_CSTs. */
14331 if (is_gimple_constant (*expr_p
))
14332 *expr_p
= get_initialized_tmp_var (*expr_p
, stmt_p
, NULL
, false);
14335 /* Gimplify the body of statements of FNDECL and return a GIMPLE_BIND node
14336 containing the sequence of corresponding GIMPLE statements. If DO_PARMS
14337 is true, also gimplify the parameters. */
14340 gimplify_body (tree fndecl
, bool do_parms
)
14342 location_t saved_location
= input_location
;
14343 gimple_seq parm_stmts
, parm_cleanup
= NULL
, seq
;
14344 gimple
*outer_stmt
;
14347 timevar_push (TV_TREE_GIMPLIFY
);
14349 init_tree_ssa (cfun
);
14351 /* Initialize for optimize_insn_for_s{ize,peed}_p possibly called during
14353 default_rtl_profile ();
14355 gcc_assert (gimplify_ctxp
== NULL
);
14356 push_gimplify_context (true);
14358 if (flag_openacc
|| flag_openmp
)
14360 gcc_assert (gimplify_omp_ctxp
== NULL
);
14361 if (lookup_attribute ("omp declare target", DECL_ATTRIBUTES (fndecl
)))
14362 gimplify_omp_ctxp
= new_omp_context (ORT_IMPLICIT_TARGET
);
14365 /* Unshare most shared trees in the body and in that of any nested functions.
14366 It would seem we don't have to do this for nested functions because
14367 they are supposed to be output and then the outer function gimplified
14368 first, but the g++ front end doesn't always do it that way. */
14369 unshare_body (fndecl
);
14370 unvisit_body (fndecl
);
14372 /* Make sure input_location isn't set to something weird. */
14373 input_location
= DECL_SOURCE_LOCATION (fndecl
);
14375 /* Resolve callee-copies. This has to be done before processing
14376 the body so that DECL_VALUE_EXPR gets processed correctly. */
14377 parm_stmts
= do_parms
? gimplify_parameters (&parm_cleanup
) : NULL
;
14379 /* Gimplify the function's body. */
14381 gimplify_stmt (&DECL_SAVED_TREE (fndecl
), &seq
);
14382 outer_stmt
= gimple_seq_first_stmt (seq
);
14385 outer_stmt
= gimple_build_nop ();
14386 gimplify_seq_add_stmt (&seq
, outer_stmt
);
14389 /* The body must contain exactly one statement, a GIMPLE_BIND. If this is
14390 not the case, wrap everything in a GIMPLE_BIND to make it so. */
14391 if (gimple_code (outer_stmt
) == GIMPLE_BIND
14392 && gimple_seq_first (seq
) == gimple_seq_last (seq
))
14393 outer_bind
= as_a
<gbind
*> (outer_stmt
);
14395 outer_bind
= gimple_build_bind (NULL_TREE
, seq
, NULL
);
14397 DECL_SAVED_TREE (fndecl
) = NULL_TREE
;
14399 /* If we had callee-copies statements, insert them at the beginning
14400 of the function and clear DECL_VALUE_EXPR_P on the parameters. */
14401 if (!gimple_seq_empty_p (parm_stmts
))
14405 gimplify_seq_add_seq (&parm_stmts
, gimple_bind_body (outer_bind
));
14408 gtry
*g
= gimple_build_try (parm_stmts
, parm_cleanup
,
14409 GIMPLE_TRY_FINALLY
);
14411 gimple_seq_add_stmt (&parm_stmts
, g
);
14413 gimple_bind_set_body (outer_bind
, parm_stmts
);
14415 for (parm
= DECL_ARGUMENTS (current_function_decl
);
14416 parm
; parm
= DECL_CHAIN (parm
))
14417 if (DECL_HAS_VALUE_EXPR_P (parm
))
14419 DECL_HAS_VALUE_EXPR_P (parm
) = 0;
14420 DECL_IGNORED_P (parm
) = 0;
14424 if ((flag_openacc
|| flag_openmp
|| flag_openmp_simd
)
14425 && gimplify_omp_ctxp
)
14427 delete_omp_context (gimplify_omp_ctxp
);
14428 gimplify_omp_ctxp
= NULL
;
14431 pop_gimplify_context (outer_bind
);
14432 gcc_assert (gimplify_ctxp
== NULL
);
14434 if (flag_checking
&& !seen_error ())
14435 verify_gimple_in_seq (gimple_bind_body (outer_bind
));
14437 timevar_pop (TV_TREE_GIMPLIFY
);
14438 input_location
= saved_location
;
14443 typedef char *char_p
; /* For DEF_VEC_P. */
14445 /* Return whether we should exclude FNDECL from instrumentation. */
14448 flag_instrument_functions_exclude_p (tree fndecl
)
14452 v
= (vec
<char_p
> *) flag_instrument_functions_exclude_functions
;
14453 if (v
&& v
->length () > 0)
14459 name
= lang_hooks
.decl_printable_name (fndecl
, 1);
14460 FOR_EACH_VEC_ELT (*v
, i
, s
)
14461 if (strstr (name
, s
) != NULL
)
14465 v
= (vec
<char_p
> *) flag_instrument_functions_exclude_files
;
14466 if (v
&& v
->length () > 0)
14472 name
= DECL_SOURCE_FILE (fndecl
);
14473 FOR_EACH_VEC_ELT (*v
, i
, s
)
14474 if (strstr (name
, s
) != NULL
)
14481 /* Entry point to the gimplification pass. FNDECL is the FUNCTION_DECL
14482 node for the function we want to gimplify.
14484 Return the sequence of GIMPLE statements corresponding to the body
14488 gimplify_function_tree (tree fndecl
)
14494 gcc_assert (!gimple_body (fndecl
));
14496 if (DECL_STRUCT_FUNCTION (fndecl
))
14497 push_cfun (DECL_STRUCT_FUNCTION (fndecl
));
14499 push_struct_function (fndecl
);
14501 /* Tentatively set PROP_gimple_lva here, and reset it in gimplify_va_arg_expr
14503 cfun
->curr_properties
|= PROP_gimple_lva
;
14505 for (parm
= DECL_ARGUMENTS (fndecl
); parm
; parm
= DECL_CHAIN (parm
))
14507 /* Preliminarily mark non-addressed complex variables as eligible
14508 for promotion to gimple registers. We'll transform their uses
14509 as we find them. */
14510 if ((TREE_CODE (TREE_TYPE (parm
)) == COMPLEX_TYPE
14511 || TREE_CODE (TREE_TYPE (parm
)) == VECTOR_TYPE
)
14512 && !TREE_THIS_VOLATILE (parm
)
14513 && !needs_to_live_in_memory (parm
))
14514 DECL_GIMPLE_REG_P (parm
) = 1;
14517 ret
= DECL_RESULT (fndecl
);
14518 if ((TREE_CODE (TREE_TYPE (ret
)) == COMPLEX_TYPE
14519 || TREE_CODE (TREE_TYPE (ret
)) == VECTOR_TYPE
)
14520 && !needs_to_live_in_memory (ret
))
14521 DECL_GIMPLE_REG_P (ret
) = 1;
14523 if (asan_sanitize_use_after_scope () && sanitize_flags_p (SANITIZE_ADDRESS
))
14524 asan_poisoned_variables
= new hash_set
<tree
> ();
14525 bind
= gimplify_body (fndecl
, true);
14526 if (asan_poisoned_variables
)
14528 delete asan_poisoned_variables
;
14529 asan_poisoned_variables
= NULL
;
14532 /* The tree body of the function is no longer needed, replace it
14533 with the new GIMPLE body. */
14535 gimple_seq_add_stmt (&seq
, bind
);
14536 gimple_set_body (fndecl
, seq
);
14538 /* If we're instrumenting function entry/exit, then prepend the call to
14539 the entry hook and wrap the whole function in a TRY_FINALLY_EXPR to
14540 catch the exit hook. */
14541 /* ??? Add some way to ignore exceptions for this TFE. */
14542 if (flag_instrument_function_entry_exit
14543 && !DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (fndecl
)
14544 /* Do not instrument extern inline functions. */
14545 && !(DECL_DECLARED_INLINE_P (fndecl
)
14546 && DECL_EXTERNAL (fndecl
)
14547 && DECL_DISREGARD_INLINE_LIMITS (fndecl
))
14548 && !flag_instrument_functions_exclude_p (fndecl
))
14553 gimple_seq cleanup
= NULL
, body
= NULL
;
14554 tree tmp_var
, this_fn_addr
;
14557 /* The instrumentation hooks aren't going to call the instrumented
14558 function and the address they receive is expected to be matchable
14559 against symbol addresses. Make sure we don't create a trampoline,
14560 in case the current function is nested. */
14561 this_fn_addr
= build_fold_addr_expr (current_function_decl
);
14562 TREE_NO_TRAMPOLINE (this_fn_addr
) = 1;
14564 x
= builtin_decl_implicit (BUILT_IN_RETURN_ADDRESS
);
14565 call
= gimple_build_call (x
, 1, integer_zero_node
);
14566 tmp_var
= create_tmp_var (ptr_type_node
, "return_addr");
14567 gimple_call_set_lhs (call
, tmp_var
);
14568 gimplify_seq_add_stmt (&cleanup
, call
);
14569 x
= builtin_decl_implicit (BUILT_IN_PROFILE_FUNC_EXIT
);
14570 call
= gimple_build_call (x
, 2, this_fn_addr
, tmp_var
);
14571 gimplify_seq_add_stmt (&cleanup
, call
);
14572 tf
= gimple_build_try (seq
, cleanup
, GIMPLE_TRY_FINALLY
);
14574 x
= builtin_decl_implicit (BUILT_IN_RETURN_ADDRESS
);
14575 call
= gimple_build_call (x
, 1, integer_zero_node
);
14576 tmp_var
= create_tmp_var (ptr_type_node
, "return_addr");
14577 gimple_call_set_lhs (call
, tmp_var
);
14578 gimplify_seq_add_stmt (&body
, call
);
14579 x
= builtin_decl_implicit (BUILT_IN_PROFILE_FUNC_ENTER
);
14580 call
= gimple_build_call (x
, 2, this_fn_addr
, tmp_var
);
14581 gimplify_seq_add_stmt (&body
, call
);
14582 gimplify_seq_add_stmt (&body
, tf
);
14583 new_bind
= gimple_build_bind (NULL
, body
, NULL
);
14585 /* Replace the current function body with the body
14586 wrapped in the try/finally TF. */
14588 gimple_seq_add_stmt (&seq
, new_bind
);
14589 gimple_set_body (fndecl
, seq
);
14593 if (sanitize_flags_p (SANITIZE_THREAD
))
14595 gcall
*call
= gimple_build_call_internal (IFN_TSAN_FUNC_EXIT
, 0);
14596 gimple
*tf
= gimple_build_try (seq
, call
, GIMPLE_TRY_FINALLY
);
14597 gbind
*new_bind
= gimple_build_bind (NULL
, tf
, NULL
);
14598 /* Replace the current function body with the body
14599 wrapped in the try/finally TF. */
14601 gimple_seq_add_stmt (&seq
, new_bind
);
14602 gimple_set_body (fndecl
, seq
);
14605 DECL_SAVED_TREE (fndecl
) = NULL_TREE
;
14606 cfun
->curr_properties
|= PROP_gimple_any
;
14610 dump_function (TDI_gimple
, fndecl
);
14613 /* Return a dummy expression of type TYPE in order to keep going after an
14617 dummy_object (tree type
)
14619 tree t
= build_int_cst (build_pointer_type (type
), 0);
14620 return build2 (MEM_REF
, type
, t
, t
);
14623 /* Gimplify __builtin_va_arg, aka VA_ARG_EXPR, which is not really a
14624 builtin function, but a very special sort of operator. */
14626 enum gimplify_status
14627 gimplify_va_arg_expr (tree
*expr_p
, gimple_seq
*pre_p
,
14628 gimple_seq
*post_p ATTRIBUTE_UNUSED
)
14630 tree promoted_type
, have_va_type
;
14631 tree valist
= TREE_OPERAND (*expr_p
, 0);
14632 tree type
= TREE_TYPE (*expr_p
);
14633 tree t
, tag
, aptag
;
14634 location_t loc
= EXPR_LOCATION (*expr_p
);
14636 /* Verify that valist is of the proper type. */
14637 have_va_type
= TREE_TYPE (valist
);
14638 if (have_va_type
== error_mark_node
)
14640 have_va_type
= targetm
.canonical_va_list_type (have_va_type
);
14641 if (have_va_type
== NULL_TREE
14642 && POINTER_TYPE_P (TREE_TYPE (valist
)))
14643 /* Handle 'Case 1: Not an array type' from c-common.c/build_va_arg. */
14645 = targetm
.canonical_va_list_type (TREE_TYPE (TREE_TYPE (valist
)));
14646 gcc_assert (have_va_type
!= NULL_TREE
);
14648 /* Generate a diagnostic for requesting data of a type that cannot
14649 be passed through `...' due to type promotion at the call site. */
14650 if ((promoted_type
= lang_hooks
.types
.type_promotes_to (type
))
14653 static bool gave_help
;
14655 /* Use the expansion point to handle cases such as passing bool (defined
14656 in a system header) through `...'. */
14658 = expansion_point_location_if_in_system_header (loc
);
14660 /* Unfortunately, this is merely undefined, rather than a constraint
14661 violation, so we cannot make this an error. If this call is never
14662 executed, the program is still strictly conforming. */
14663 auto_diagnostic_group d
;
14664 warned
= warning_at (xloc
, 0,
14665 "%qT is promoted to %qT when passed through %<...%>",
14666 type
, promoted_type
);
14667 if (!gave_help
&& warned
)
14670 inform (xloc
, "(so you should pass %qT not %qT to %<va_arg%>)",
14671 promoted_type
, type
);
14674 /* We can, however, treat "undefined" any way we please.
14675 Call abort to encourage the user to fix the program. */
14677 inform (xloc
, "if this code is reached, the program will abort");
14678 /* Before the abort, allow the evaluation of the va_list
14679 expression to exit or longjmp. */
14680 gimplify_and_add (valist
, pre_p
);
14681 t
= build_call_expr_loc (loc
,
14682 builtin_decl_implicit (BUILT_IN_TRAP
), 0);
14683 gimplify_and_add (t
, pre_p
);
14685 /* This is dead code, but go ahead and finish so that the
14686 mode of the result comes out right. */
14687 *expr_p
= dummy_object (type
);
14688 return GS_ALL_DONE
;
14691 tag
= build_int_cst (build_pointer_type (type
), 0);
14692 aptag
= build_int_cst (TREE_TYPE (valist
), 0);
14694 *expr_p
= build_call_expr_internal_loc (loc
, IFN_VA_ARG
, type
, 3,
14695 valist
, tag
, aptag
);
14697 /* Clear the tentatively set PROP_gimple_lva, to indicate that IFN_VA_ARG
14698 needs to be expanded. */
14699 cfun
->curr_properties
&= ~PROP_gimple_lva
;
14704 /* Build a new GIMPLE_ASSIGN tuple and append it to the end of *SEQ_P.
14706 DST/SRC are the destination and source respectively. You can pass
14707 ungimplified trees in DST or SRC, in which case they will be
14708 converted to a gimple operand if necessary.
14710 This function returns the newly created GIMPLE_ASSIGN tuple. */
14713 gimplify_assign (tree dst
, tree src
, gimple_seq
*seq_p
)
14715 tree t
= build2 (MODIFY_EXPR
, TREE_TYPE (dst
), dst
, src
);
14716 gimplify_and_add (t
, seq_p
);
14718 return gimple_seq_last_stmt (*seq_p
);
14722 gimplify_hasher::hash (const elt_t
*p
)
14725 return iterative_hash_expr (t
, 0);
14729 gimplify_hasher::equal (const elt_t
*p1
, const elt_t
*p2
)
14733 enum tree_code code
= TREE_CODE (t1
);
14735 if (TREE_CODE (t2
) != code
14736 || TREE_TYPE (t1
) != TREE_TYPE (t2
))
14739 if (!operand_equal_p (t1
, t2
, 0))
14742 /* Only allow them to compare equal if they also hash equal; otherwise
14743 results are nondeterminate, and we fail bootstrap comparison. */
14744 gcc_checking_assert (hash (p1
) == hash (p2
));