1 /* Tree lowering pass. This pass converts the GENERIC functions-as-trees
2 tree representation into the GIMPLE form.
3 Copyright (C) 2002-2020 Free Software Foundation, Inc.
4 Major work done by Sebastian Pop <s.pop@laposte.net>,
5 Diego Novillo <dnovillo@redhat.com> and Jason Merrill <jason@redhat.com>.
7 This file is part of GCC.
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
25 #include "coretypes.h"
33 #include "gimple-predict.h"
34 #include "tree-pass.h" /* FIXME: only for PROP_gimple_any */
37 #include "tree-pretty-print.h"
38 #include "diagnostic-core.h"
40 #include "fold-const.h"
45 #include "gimple-fold.h"
48 #include "gimple-iterator.h"
49 #include "stor-layout.h"
50 #include "print-tree.h"
51 #include "tree-iterator.h"
52 #include "tree-inline.h"
53 #include "langhooks.h"
56 #include "omp-general.h"
58 #include "gimple-low.h"
59 #include "gomp-constants.h"
60 #include "splay-tree.h"
61 #include "gimple-walk.h"
62 #include "langhooks-def.h" /* FIXME: for lhd_set_decl_assembler_name */
64 #include "stringpool.h"
68 #include "omp-offload.h"
71 /* Hash set of poisoned variables in a bind expr. */
72 static hash_set
<tree
> *asan_poisoned_variables
= NULL
;
74 enum gimplify_omp_var_data
77 GOVD_EXPLICIT
= 0x000002,
78 GOVD_SHARED
= 0x000004,
79 GOVD_PRIVATE
= 0x000008,
80 GOVD_FIRSTPRIVATE
= 0x000010,
81 GOVD_LASTPRIVATE
= 0x000020,
82 GOVD_REDUCTION
= 0x000040,
85 GOVD_DEBUG_PRIVATE
= 0x000200,
86 GOVD_PRIVATE_OUTER_REF
= 0x000400,
87 GOVD_LINEAR
= 0x000800,
88 GOVD_ALIGNED
= 0x001000,
90 /* Flag for GOVD_MAP: don't copy back. */
91 GOVD_MAP_TO_ONLY
= 0x002000,
93 /* Flag for GOVD_LINEAR or GOVD_LASTPRIVATE: no outer reference. */
94 GOVD_LINEAR_LASTPRIVATE_NO_OUTER
= 0x004000,
96 GOVD_MAP_0LEN_ARRAY
= 0x008000,
98 /* Flag for GOVD_MAP, if it is always, to or always, tofrom mapping. */
99 GOVD_MAP_ALWAYS_TO
= 0x010000,
101 /* Flag for shared vars that are or might be stored to in the region. */
102 GOVD_WRITTEN
= 0x020000,
104 /* Flag for GOVD_MAP, if it is a forced mapping. */
105 GOVD_MAP_FORCE
= 0x040000,
107 /* Flag for GOVD_MAP: must be present already. */
108 GOVD_MAP_FORCE_PRESENT
= 0x080000,
110 /* Flag for GOVD_MAP: only allocate. */
111 GOVD_MAP_ALLOC_ONLY
= 0x100000,
113 /* Flag for GOVD_MAP: only copy back. */
114 GOVD_MAP_FROM_ONLY
= 0x200000,
116 GOVD_NONTEMPORAL
= 0x400000,
118 /* Flag for GOVD_LASTPRIVATE: conditional modifier. */
119 GOVD_LASTPRIVATE_CONDITIONAL
= 0x800000,
121 GOVD_CONDTEMP
= 0x1000000,
123 /* Flag for GOVD_REDUCTION: inscan seen in {in,ex}clusive clause. */
124 GOVD_REDUCTION_INSCAN
= 0x2000000,
126 /* Flag for GOVD_MAP: (struct) vars that have pointer attachments for
128 GOVD_MAP_HAS_ATTACHMENTS
= 8388608,
130 GOVD_DATA_SHARE_CLASS
= (GOVD_SHARED
| GOVD_PRIVATE
| GOVD_FIRSTPRIVATE
131 | GOVD_LASTPRIVATE
| GOVD_REDUCTION
| GOVD_LINEAR
138 ORT_WORKSHARE
= 0x00,
139 ORT_TASKGROUP
= 0x01,
143 ORT_COMBINED_PARALLEL
= ORT_PARALLEL
| 1,
146 ORT_UNTIED_TASK
= ORT_TASK
| 1,
147 ORT_TASKLOOP
= ORT_TASK
| 2,
148 ORT_UNTIED_TASKLOOP
= ORT_UNTIED_TASK
| 2,
151 ORT_COMBINED_TEAMS
= ORT_TEAMS
| 1,
152 ORT_HOST_TEAMS
= ORT_TEAMS
| 2,
153 ORT_COMBINED_HOST_TEAMS
= ORT_COMBINED_TEAMS
| 2,
156 ORT_TARGET_DATA
= 0x40,
158 /* Data region with offloading. */
160 ORT_COMBINED_TARGET
= ORT_TARGET
| 1,
161 ORT_IMPLICIT_TARGET
= ORT_TARGET
| 2,
163 /* OpenACC variants. */
164 ORT_ACC
= 0x100, /* A generic OpenACC region. */
165 ORT_ACC_DATA
= ORT_ACC
| ORT_TARGET_DATA
, /* Data construct. */
166 ORT_ACC_PARALLEL
= ORT_ACC
| ORT_TARGET
, /* Parallel construct */
167 ORT_ACC_KERNELS
= ORT_ACC
| ORT_TARGET
| 2, /* Kernels construct. */
168 ORT_ACC_SERIAL
= ORT_ACC
| ORT_TARGET
| 4, /* Serial construct. */
169 ORT_ACC_HOST_DATA
= ORT_ACC
| ORT_TARGET_DATA
| 2, /* Host data. */
171 /* Dummy OpenMP region, used to disable expansion of
172 DECL_VALUE_EXPRs in taskloop pre body. */
176 /* Gimplify hashtable helper. */
178 struct gimplify_hasher
: free_ptr_hash
<elt_t
>
180 static inline hashval_t
hash (const elt_t
*);
181 static inline bool equal (const elt_t
*, const elt_t
*);
186 struct gimplify_ctx
*prev_context
;
188 vec
<gbind
*> bind_expr_stack
;
190 gimple_seq conditional_cleanups
;
194 vec
<tree
> case_labels
;
195 hash_set
<tree
> *live_switch_vars
;
196 /* The formal temporary table. Should this be persistent? */
197 hash_table
<gimplify_hasher
> *temp_htab
;
200 unsigned into_ssa
: 1;
201 unsigned allow_rhs_cond_expr
: 1;
202 unsigned in_cleanup_point_expr
: 1;
203 unsigned keep_stack
: 1;
204 unsigned save_stack
: 1;
205 unsigned in_switch_expr
: 1;
208 enum gimplify_defaultmap_kind
216 struct gimplify_omp_ctx
218 struct gimplify_omp_ctx
*outer_context
;
219 splay_tree variables
;
220 hash_set
<tree
> *privatized_types
;
222 /* Iteration variables in an OMP_FOR. */
223 vec
<tree
> loop_iter_var
;
225 enum omp_clause_default_kind default_kind
;
226 enum omp_region_type region_type
;
230 bool target_firstprivatize_array_bases
;
232 bool order_concurrent
;
236 static struct gimplify_ctx
*gimplify_ctxp
;
237 static struct gimplify_omp_ctx
*gimplify_omp_ctxp
;
238 static bool in_omp_construct
;
240 /* Forward declaration. */
241 static enum gimplify_status
gimplify_compound_expr (tree
*, gimple_seq
*, bool);
242 static hash_map
<tree
, tree
> *oacc_declare_returns
;
243 static enum gimplify_status
gimplify_expr (tree
*, gimple_seq
*, gimple_seq
*,
244 bool (*) (tree
), fallback_t
, bool);
246 /* Shorter alias name for the above function for use in gimplify.c
250 gimplify_seq_add_stmt (gimple_seq
*seq_p
, gimple
*gs
)
252 gimple_seq_add_stmt_without_update (seq_p
, gs
);
255 /* Append sequence SRC to the end of sequence *DST_P. If *DST_P is
256 NULL, a new sequence is allocated. This function is
257 similar to gimple_seq_add_seq, but does not scan the operands.
258 During gimplification, we need to manipulate statement sequences
259 before the def/use vectors have been constructed. */
262 gimplify_seq_add_seq (gimple_seq
*dst_p
, gimple_seq src
)
264 gimple_stmt_iterator si
;
269 si
= gsi_last (*dst_p
);
270 gsi_insert_seq_after_without_update (&si
, src
, GSI_NEW_STMT
);
274 /* Pointer to a list of allocated gimplify_ctx structs to be used for pushing
275 and popping gimplify contexts. */
277 static struct gimplify_ctx
*ctx_pool
= NULL
;
279 /* Return a gimplify context struct from the pool. */
281 static inline struct gimplify_ctx
*
284 struct gimplify_ctx
* c
= ctx_pool
;
287 ctx_pool
= c
->prev_context
;
289 c
= XNEW (struct gimplify_ctx
);
291 memset (c
, '\0', sizeof (*c
));
295 /* Put gimplify context C back into the pool. */
298 ctx_free (struct gimplify_ctx
*c
)
300 c
->prev_context
= ctx_pool
;
304 /* Free allocated ctx stack memory. */
307 free_gimplify_stack (void)
309 struct gimplify_ctx
*c
;
311 while ((c
= ctx_pool
))
313 ctx_pool
= c
->prev_context
;
319 /* Set up a context for the gimplifier. */
322 push_gimplify_context (bool in_ssa
, bool rhs_cond_ok
)
324 struct gimplify_ctx
*c
= ctx_alloc ();
326 c
->prev_context
= gimplify_ctxp
;
328 gimplify_ctxp
->into_ssa
= in_ssa
;
329 gimplify_ctxp
->allow_rhs_cond_expr
= rhs_cond_ok
;
332 /* Tear down a context for the gimplifier. If BODY is non-null, then
333 put the temporaries into the outer BIND_EXPR. Otherwise, put them
336 BODY is not a sequence, but the first tuple in a sequence. */
339 pop_gimplify_context (gimple
*body
)
341 struct gimplify_ctx
*c
= gimplify_ctxp
;
344 && (!c
->bind_expr_stack
.exists ()
345 || c
->bind_expr_stack
.is_empty ()));
346 c
->bind_expr_stack
.release ();
347 gimplify_ctxp
= c
->prev_context
;
350 declare_vars (c
->temps
, body
, false);
352 record_vars (c
->temps
);
359 /* Push a GIMPLE_BIND tuple onto the stack of bindings. */
362 gimple_push_bind_expr (gbind
*bind_stmt
)
364 gimplify_ctxp
->bind_expr_stack
.reserve (8);
365 gimplify_ctxp
->bind_expr_stack
.safe_push (bind_stmt
);
368 /* Pop the first element off the stack of bindings. */
371 gimple_pop_bind_expr (void)
373 gimplify_ctxp
->bind_expr_stack
.pop ();
376 /* Return the first element of the stack of bindings. */
379 gimple_current_bind_expr (void)
381 return gimplify_ctxp
->bind_expr_stack
.last ();
384 /* Return the stack of bindings created during gimplification. */
387 gimple_bind_expr_stack (void)
389 return gimplify_ctxp
->bind_expr_stack
;
392 /* Return true iff there is a COND_EXPR between us and the innermost
393 CLEANUP_POINT_EXPR. This info is used by gimple_push_cleanup. */
396 gimple_conditional_context (void)
398 return gimplify_ctxp
->conditions
> 0;
401 /* Note that we've entered a COND_EXPR. */
404 gimple_push_condition (void)
406 #ifdef ENABLE_GIMPLE_CHECKING
407 if (gimplify_ctxp
->conditions
== 0)
408 gcc_assert (gimple_seq_empty_p (gimplify_ctxp
->conditional_cleanups
));
410 ++(gimplify_ctxp
->conditions
);
413 /* Note that we've left a COND_EXPR. If we're back at unconditional scope
414 now, add any conditional cleanups we've seen to the prequeue. */
417 gimple_pop_condition (gimple_seq
*pre_p
)
419 int conds
= --(gimplify_ctxp
->conditions
);
421 gcc_assert (conds
>= 0);
424 gimplify_seq_add_seq (pre_p
, gimplify_ctxp
->conditional_cleanups
);
425 gimplify_ctxp
->conditional_cleanups
= NULL
;
429 /* A stable comparison routine for use with splay trees and DECLs. */
432 splay_tree_compare_decl_uid (splay_tree_key xa
, splay_tree_key xb
)
437 return DECL_UID (a
) - DECL_UID (b
);
440 /* Create a new omp construct that deals with variable remapping. */
442 static struct gimplify_omp_ctx
*
443 new_omp_context (enum omp_region_type region_type
)
445 struct gimplify_omp_ctx
*c
;
447 c
= XCNEW (struct gimplify_omp_ctx
);
448 c
->outer_context
= gimplify_omp_ctxp
;
449 c
->variables
= splay_tree_new (splay_tree_compare_decl_uid
, 0, 0);
450 c
->privatized_types
= new hash_set
<tree
>;
451 c
->location
= input_location
;
452 c
->region_type
= region_type
;
453 if ((region_type
& ORT_TASK
) == 0)
454 c
->default_kind
= OMP_CLAUSE_DEFAULT_SHARED
;
456 c
->default_kind
= OMP_CLAUSE_DEFAULT_UNSPECIFIED
;
457 c
->defaultmap
[GDMK_SCALAR
] = GOVD_MAP
;
458 c
->defaultmap
[GDMK_AGGREGATE
] = GOVD_MAP
;
459 c
->defaultmap
[GDMK_ALLOCATABLE
] = GOVD_MAP
;
460 c
->defaultmap
[GDMK_POINTER
] = GOVD_MAP
;
465 /* Destroy an omp construct that deals with variable remapping. */
468 delete_omp_context (struct gimplify_omp_ctx
*c
)
470 splay_tree_delete (c
->variables
);
471 delete c
->privatized_types
;
472 c
->loop_iter_var
.release ();
476 static void omp_add_variable (struct gimplify_omp_ctx
*, tree
, unsigned int);
477 static bool omp_notice_variable (struct gimplify_omp_ctx
*, tree
, bool);
479 /* Both gimplify the statement T and append it to *SEQ_P. This function
480 behaves exactly as gimplify_stmt, but you don't have to pass T as a
484 gimplify_and_add (tree t
, gimple_seq
*seq_p
)
486 gimplify_stmt (&t
, seq_p
);
489 /* Gimplify statement T into sequence *SEQ_P, and return the first
490 tuple in the sequence of generated tuples for this statement.
491 Return NULL if gimplifying T produced no tuples. */
494 gimplify_and_return_first (tree t
, gimple_seq
*seq_p
)
496 gimple_stmt_iterator last
= gsi_last (*seq_p
);
498 gimplify_and_add (t
, seq_p
);
500 if (!gsi_end_p (last
))
503 return gsi_stmt (last
);
506 return gimple_seq_first_stmt (*seq_p
);
509 /* Returns true iff T is a valid RHS for an assignment to an un-renamed
510 LHS, or for a call argument. */
513 is_gimple_mem_rhs (tree t
)
515 /* If we're dealing with a renamable type, either source or dest must be
516 a renamed variable. */
517 if (is_gimple_reg_type (TREE_TYPE (t
)))
518 return is_gimple_val (t
);
520 return is_gimple_val (t
) || is_gimple_lvalue (t
);
523 /* Return true if T is a CALL_EXPR or an expression that can be
524 assigned to a temporary. Note that this predicate should only be
525 used during gimplification. See the rationale for this in
526 gimplify_modify_expr. */
529 is_gimple_reg_rhs_or_call (tree t
)
531 return (get_gimple_rhs_class (TREE_CODE (t
)) != GIMPLE_INVALID_RHS
532 || TREE_CODE (t
) == CALL_EXPR
);
535 /* Return true if T is a valid memory RHS or a CALL_EXPR. Note that
536 this predicate should only be used during gimplification. See the
537 rationale for this in gimplify_modify_expr. */
540 is_gimple_mem_rhs_or_call (tree t
)
542 /* If we're dealing with a renamable type, either source or dest must be
543 a renamed variable. */
544 if (is_gimple_reg_type (TREE_TYPE (t
)))
545 return is_gimple_val (t
);
547 return (is_gimple_val (t
)
548 || is_gimple_lvalue (t
)
549 || TREE_CLOBBER_P (t
)
550 || TREE_CODE (t
) == CALL_EXPR
);
553 /* Create a temporary with a name derived from VAL. Subroutine of
554 lookup_tmp_var; nobody else should call this function. */
557 create_tmp_from_val (tree val
)
559 /* Drop all qualifiers and address-space information from the value type. */
560 tree type
= TYPE_MAIN_VARIANT (TREE_TYPE (val
));
561 tree var
= create_tmp_var (type
, get_name (val
));
565 /* Create a temporary to hold the value of VAL. If IS_FORMAL, try to reuse
566 an existing expression temporary. */
569 lookup_tmp_var (tree val
, bool is_formal
)
573 /* If not optimizing, never really reuse a temporary. local-alloc
574 won't allocate any variable that is used in more than one basic
575 block, which means it will go into memory, causing much extra
576 work in reload and final and poorer code generation, outweighing
577 the extra memory allocation here. */
578 if (!optimize
|| !is_formal
|| TREE_SIDE_EFFECTS (val
))
579 ret
= create_tmp_from_val (val
);
586 if (!gimplify_ctxp
->temp_htab
)
587 gimplify_ctxp
->temp_htab
= new hash_table
<gimplify_hasher
> (1000);
588 slot
= gimplify_ctxp
->temp_htab
->find_slot (&elt
, INSERT
);
591 elt_p
= XNEW (elt_t
);
593 elt_p
->temp
= ret
= create_tmp_from_val (val
);
606 /* Helper for get_formal_tmp_var and get_initialized_tmp_var. */
609 internal_get_tmp_var (tree val
, gimple_seq
*pre_p
, gimple_seq
*post_p
,
610 bool is_formal
, bool allow_ssa
)
614 /* Notice that we explicitly allow VAL to be a CALL_EXPR so that we
615 can create an INIT_EXPR and convert it into a GIMPLE_CALL below. */
616 gimplify_expr (&val
, pre_p
, post_p
, is_gimple_reg_rhs_or_call
,
620 && gimplify_ctxp
->into_ssa
621 && is_gimple_reg_type (TREE_TYPE (val
)))
623 t
= make_ssa_name (TYPE_MAIN_VARIANT (TREE_TYPE (val
)));
624 if (! gimple_in_ssa_p (cfun
))
626 const char *name
= get_name (val
);
628 SET_SSA_NAME_VAR_OR_IDENTIFIER (t
, create_tmp_var_name (name
));
632 t
= lookup_tmp_var (val
, is_formal
);
634 mod
= build2 (INIT_EXPR
, TREE_TYPE (t
), t
, unshare_expr (val
));
636 SET_EXPR_LOCATION (mod
, EXPR_LOC_OR_LOC (val
, input_location
));
638 /* gimplify_modify_expr might want to reduce this further. */
639 gimplify_and_add (mod
, pre_p
);
645 /* Return a formal temporary variable initialized with VAL. PRE_P is as
646 in gimplify_expr. Only use this function if:
648 1) The value of the unfactored expression represented by VAL will not
649 change between the initialization and use of the temporary, and
650 2) The temporary will not be otherwise modified.
652 For instance, #1 means that this is inappropriate for SAVE_EXPR temps,
653 and #2 means it is inappropriate for && temps.
655 For other cases, use get_initialized_tmp_var instead. */
658 get_formal_tmp_var (tree val
, gimple_seq
*pre_p
)
660 return internal_get_tmp_var (val
, pre_p
, NULL
, true, true);
663 /* Return a temporary variable initialized with VAL. PRE_P and POST_P
664 are as in gimplify_expr. */
667 get_initialized_tmp_var (tree val
, gimple_seq
*pre_p
,
668 gimple_seq
*post_p
/* = NULL */,
669 bool allow_ssa
/* = true */)
671 return internal_get_tmp_var (val
, pre_p
, post_p
, false, allow_ssa
);
674 /* Declare all the variables in VARS in SCOPE. If DEBUG_INFO is true,
675 generate debug info for them; otherwise don't. */
678 declare_vars (tree vars
, gimple
*gs
, bool debug_info
)
685 gbind
*scope
= as_a
<gbind
*> (gs
);
687 temps
= nreverse (last
);
689 block
= gimple_bind_block (scope
);
690 gcc_assert (!block
|| TREE_CODE (block
) == BLOCK
);
691 if (!block
|| !debug_info
)
693 DECL_CHAIN (last
) = gimple_bind_vars (scope
);
694 gimple_bind_set_vars (scope
, temps
);
698 /* We need to attach the nodes both to the BIND_EXPR and to its
699 associated BLOCK for debugging purposes. The key point here
700 is that the BLOCK_VARS of the BIND_EXPR_BLOCK of a BIND_EXPR
701 is a subchain of the BIND_EXPR_VARS of the BIND_EXPR. */
702 if (BLOCK_VARS (block
))
703 BLOCK_VARS (block
) = chainon (BLOCK_VARS (block
), temps
);
706 gimple_bind_set_vars (scope
,
707 chainon (gimple_bind_vars (scope
), temps
));
708 BLOCK_VARS (block
) = temps
;
714 /* For VAR a VAR_DECL of variable size, try to find a constant upper bound
715 for the size and adjust DECL_SIZE/DECL_SIZE_UNIT accordingly. Abort if
716 no such upper bound can be obtained. */
719 force_constant_size (tree var
)
721 /* The only attempt we make is by querying the maximum size of objects
722 of the variable's type. */
724 HOST_WIDE_INT max_size
;
726 gcc_assert (VAR_P (var
));
728 max_size
= max_int_size_in_bytes (TREE_TYPE (var
));
730 gcc_assert (max_size
>= 0);
733 = build_int_cst (TREE_TYPE (DECL_SIZE_UNIT (var
)), max_size
);
735 = build_int_cst (TREE_TYPE (DECL_SIZE (var
)), max_size
* BITS_PER_UNIT
);
738 /* Push the temporary variable TMP into the current binding. */
741 gimple_add_tmp_var_fn (struct function
*fn
, tree tmp
)
743 gcc_assert (!DECL_CHAIN (tmp
) && !DECL_SEEN_IN_BIND_EXPR_P (tmp
));
745 /* Later processing assumes that the object size is constant, which might
746 not be true at this point. Force the use of a constant upper bound in
748 if (!tree_fits_poly_uint64_p (DECL_SIZE_UNIT (tmp
)))
749 force_constant_size (tmp
);
751 DECL_CONTEXT (tmp
) = fn
->decl
;
752 DECL_SEEN_IN_BIND_EXPR_P (tmp
) = 1;
754 record_vars_into (tmp
, fn
->decl
);
757 /* Push the temporary variable TMP into the current binding. */
760 gimple_add_tmp_var (tree tmp
)
762 gcc_assert (!DECL_CHAIN (tmp
) && !DECL_SEEN_IN_BIND_EXPR_P (tmp
));
764 /* Later processing assumes that the object size is constant, which might
765 not be true at this point. Force the use of a constant upper bound in
767 if (!tree_fits_poly_uint64_p (DECL_SIZE_UNIT (tmp
)))
768 force_constant_size (tmp
);
770 DECL_CONTEXT (tmp
) = current_function_decl
;
771 DECL_SEEN_IN_BIND_EXPR_P (tmp
) = 1;
775 DECL_CHAIN (tmp
) = gimplify_ctxp
->temps
;
776 gimplify_ctxp
->temps
= tmp
;
778 /* Mark temporaries local within the nearest enclosing parallel. */
779 if (gimplify_omp_ctxp
)
781 struct gimplify_omp_ctx
*ctx
= gimplify_omp_ctxp
;
782 int flag
= GOVD_LOCAL
;
784 && (ctx
->region_type
== ORT_WORKSHARE
785 || ctx
->region_type
== ORT_TASKGROUP
786 || ctx
->region_type
== ORT_SIMD
787 || ctx
->region_type
== ORT_ACC
))
789 if (ctx
->region_type
== ORT_SIMD
790 && TREE_ADDRESSABLE (tmp
)
791 && !TREE_STATIC (tmp
))
793 if (TREE_CODE (DECL_SIZE_UNIT (tmp
)) != INTEGER_CST
)
794 ctx
->add_safelen1
= true;
799 ctx
= ctx
->outer_context
;
802 omp_add_variable (ctx
, tmp
, flag
| GOVD_SEEN
);
811 /* This case is for nested functions. We need to expose the locals
813 body_seq
= gimple_body (current_function_decl
);
814 declare_vars (tmp
, gimple_seq_first_stmt (body_seq
), false);
820 /* This page contains routines to unshare tree nodes, i.e. to duplicate tree
821 nodes that are referenced more than once in GENERIC functions. This is
822 necessary because gimplification (translation into GIMPLE) is performed
823 by modifying tree nodes in-place, so gimplication of a shared node in a
824 first context could generate an invalid GIMPLE form in a second context.
826 This is achieved with a simple mark/copy/unmark algorithm that walks the
827 GENERIC representation top-down, marks nodes with TREE_VISITED the first
828 time it encounters them, duplicates them if they already have TREE_VISITED
829 set, and finally removes the TREE_VISITED marks it has set.
831 The algorithm works only at the function level, i.e. it generates a GENERIC
832 representation of a function with no nodes shared within the function when
833 passed a GENERIC function (except for nodes that are allowed to be shared).
835 At the global level, it is also necessary to unshare tree nodes that are
836 referenced in more than one function, for the same aforementioned reason.
837 This requires some cooperation from the front-end. There are 2 strategies:
839 1. Manual unsharing. The front-end needs to call unshare_expr on every
840 expression that might end up being shared across functions.
842 2. Deep unsharing. This is an extension of regular unsharing. Instead
843 of calling unshare_expr on expressions that might be shared across
844 functions, the front-end pre-marks them with TREE_VISITED. This will
845 ensure that they are unshared on the first reference within functions
846 when the regular unsharing algorithm runs. The counterpart is that
847 this algorithm must look deeper than for manual unsharing, which is
848 specified by LANG_HOOKS_DEEP_UNSHARING.
850 If there are only few specific cases of node sharing across functions, it is
851 probably easier for a front-end to unshare the expressions manually. On the
852 contrary, if the expressions generated at the global level are as widespread
853 as expressions generated within functions, deep unsharing is very likely the
856 /* Similar to copy_tree_r but do not copy SAVE_EXPR or TARGET_EXPR nodes.
857 These nodes model computations that must be done once. If we were to
858 unshare something like SAVE_EXPR(i++), the gimplification process would
859 create wrong code. However, if DATA is non-null, it must hold a pointer
860 set that is used to unshare the subtrees of these nodes. */
863 mostly_copy_tree_r (tree
*tp
, int *walk_subtrees
, void *data
)
866 enum tree_code code
= TREE_CODE (t
);
868 /* Do not copy SAVE_EXPR, TARGET_EXPR or BIND_EXPR nodes themselves, but
869 copy their subtrees if we can make sure to do it only once. */
870 if (code
== SAVE_EXPR
|| code
== TARGET_EXPR
|| code
== BIND_EXPR
)
872 if (data
&& !((hash_set
<tree
> *)data
)->add (t
))
878 /* Stop at types, decls, constants like copy_tree_r. */
879 else if (TREE_CODE_CLASS (code
) == tcc_type
880 || TREE_CODE_CLASS (code
) == tcc_declaration
881 || TREE_CODE_CLASS (code
) == tcc_constant
)
884 /* Cope with the statement expression extension. */
885 else if (code
== STATEMENT_LIST
)
888 /* Leave the bulk of the work to copy_tree_r itself. */
890 copy_tree_r (tp
, walk_subtrees
, NULL
);
895 /* Callback for walk_tree to unshare most of the shared trees rooted at *TP.
896 If *TP has been visited already, then *TP is deeply copied by calling
897 mostly_copy_tree_r. DATA is passed to mostly_copy_tree_r unmodified. */
900 copy_if_shared_r (tree
*tp
, int *walk_subtrees
, void *data
)
903 enum tree_code code
= TREE_CODE (t
);
905 /* Skip types, decls, and constants. But we do want to look at their
906 types and the bounds of types. Mark them as visited so we properly
907 unmark their subtrees on the unmark pass. If we've already seen them,
908 don't look down further. */
909 if (TREE_CODE_CLASS (code
) == tcc_type
910 || TREE_CODE_CLASS (code
) == tcc_declaration
911 || TREE_CODE_CLASS (code
) == tcc_constant
)
913 if (TREE_VISITED (t
))
916 TREE_VISITED (t
) = 1;
919 /* If this node has been visited already, unshare it and don't look
921 else if (TREE_VISITED (t
))
923 walk_tree (tp
, mostly_copy_tree_r
, data
, NULL
);
927 /* Otherwise, mark the node as visited and keep looking. */
929 TREE_VISITED (t
) = 1;
934 /* Unshare most of the shared trees rooted at *TP. DATA is passed to the
935 copy_if_shared_r callback unmodified. */
938 copy_if_shared (tree
*tp
, void *data
)
940 walk_tree (tp
, copy_if_shared_r
, data
, NULL
);
943 /* Unshare all the trees in the body of FNDECL, as well as in the bodies of
944 any nested functions. */
947 unshare_body (tree fndecl
)
949 struct cgraph_node
*cgn
= cgraph_node::get (fndecl
);
950 /* If the language requires deep unsharing, we need a pointer set to make
951 sure we don't repeatedly unshare subtrees of unshareable nodes. */
952 hash_set
<tree
> *visited
953 = lang_hooks
.deep_unsharing
? new hash_set
<tree
> : NULL
;
955 copy_if_shared (&DECL_SAVED_TREE (fndecl
), visited
);
956 copy_if_shared (&DECL_SIZE (DECL_RESULT (fndecl
)), visited
);
957 copy_if_shared (&DECL_SIZE_UNIT (DECL_RESULT (fndecl
)), visited
);
962 for (cgn
= cgn
->nested
; cgn
; cgn
= cgn
->next_nested
)
963 unshare_body (cgn
->decl
);
966 /* Callback for walk_tree to unmark the visited trees rooted at *TP.
967 Subtrees are walked until the first unvisited node is encountered. */
970 unmark_visited_r (tree
*tp
, int *walk_subtrees
, void *data ATTRIBUTE_UNUSED
)
974 /* If this node has been visited, unmark it and keep looking. */
975 if (TREE_VISITED (t
))
976 TREE_VISITED (t
) = 0;
978 /* Otherwise, don't look any deeper. */
985 /* Unmark the visited trees rooted at *TP. */
988 unmark_visited (tree
*tp
)
990 walk_tree (tp
, unmark_visited_r
, NULL
, NULL
);
993 /* Likewise, but mark all trees as not visited. */
996 unvisit_body (tree fndecl
)
998 struct cgraph_node
*cgn
= cgraph_node::get (fndecl
);
1000 unmark_visited (&DECL_SAVED_TREE (fndecl
));
1001 unmark_visited (&DECL_SIZE (DECL_RESULT (fndecl
)));
1002 unmark_visited (&DECL_SIZE_UNIT (DECL_RESULT (fndecl
)));
1005 for (cgn
= cgn
->nested
; cgn
; cgn
= cgn
->next_nested
)
1006 unvisit_body (cgn
->decl
);
1009 /* Unconditionally make an unshared copy of EXPR. This is used when using
1010 stored expressions which span multiple functions, such as BINFO_VTABLE,
1011 as the normal unsharing process can't tell that they're shared. */
1014 unshare_expr (tree expr
)
1016 walk_tree (&expr
, mostly_copy_tree_r
, NULL
, NULL
);
1020 /* Worker for unshare_expr_without_location. */
1023 prune_expr_location (tree
*tp
, int *walk_subtrees
, void *)
1026 SET_EXPR_LOCATION (*tp
, UNKNOWN_LOCATION
);
1032 /* Similar to unshare_expr but also prune all expression locations
1036 unshare_expr_without_location (tree expr
)
1038 walk_tree (&expr
, mostly_copy_tree_r
, NULL
, NULL
);
1040 walk_tree (&expr
, prune_expr_location
, NULL
, NULL
);
1044 /* Return the EXPR_LOCATION of EXPR, if it (maybe recursively) has
1045 one, OR_ELSE otherwise. The location of a STATEMENT_LISTs
1046 comprising at least one DEBUG_BEGIN_STMT followed by exactly one
1047 EXPR is the location of the EXPR. */
1050 rexpr_location (tree expr
, location_t or_else
= UNKNOWN_LOCATION
)
1055 if (EXPR_HAS_LOCATION (expr
))
1056 return EXPR_LOCATION (expr
);
1058 if (TREE_CODE (expr
) != STATEMENT_LIST
)
1061 tree_stmt_iterator i
= tsi_start (expr
);
1064 while (!tsi_end_p (i
) && TREE_CODE (tsi_stmt (i
)) == DEBUG_BEGIN_STMT
)
1070 if (!found
|| !tsi_one_before_end_p (i
))
1073 return rexpr_location (tsi_stmt (i
), or_else
);
1076 /* Return TRUE iff EXPR (maybe recursively) has a location; see
1077 rexpr_location for the potential recursion. */
1080 rexpr_has_location (tree expr
)
1082 return rexpr_location (expr
) != UNKNOWN_LOCATION
;
1086 /* WRAPPER is a code such as BIND_EXPR or CLEANUP_POINT_EXPR which can both
1087 contain statements and have a value. Assign its value to a temporary
1088 and give it void_type_node. Return the temporary, or NULL_TREE if
1089 WRAPPER was already void. */
1092 voidify_wrapper_expr (tree wrapper
, tree temp
)
1094 tree type
= TREE_TYPE (wrapper
);
1095 if (type
&& !VOID_TYPE_P (type
))
1099 /* Set p to point to the body of the wrapper. Loop until we find
1100 something that isn't a wrapper. */
1101 for (p
= &wrapper
; p
&& *p
; )
1103 switch (TREE_CODE (*p
))
1106 TREE_SIDE_EFFECTS (*p
) = 1;
1107 TREE_TYPE (*p
) = void_type_node
;
1108 /* For a BIND_EXPR, the body is operand 1. */
1109 p
= &BIND_EXPR_BODY (*p
);
1112 case CLEANUP_POINT_EXPR
:
1113 case TRY_FINALLY_EXPR
:
1114 case TRY_CATCH_EXPR
:
1115 TREE_SIDE_EFFECTS (*p
) = 1;
1116 TREE_TYPE (*p
) = void_type_node
;
1117 p
= &TREE_OPERAND (*p
, 0);
1120 case STATEMENT_LIST
:
1122 tree_stmt_iterator i
= tsi_last (*p
);
1123 TREE_SIDE_EFFECTS (*p
) = 1;
1124 TREE_TYPE (*p
) = void_type_node
;
1125 p
= tsi_end_p (i
) ? NULL
: tsi_stmt_ptr (i
);
1130 /* Advance to the last statement. Set all container types to
1132 for (; TREE_CODE (*p
) == COMPOUND_EXPR
; p
= &TREE_OPERAND (*p
, 1))
1134 TREE_SIDE_EFFECTS (*p
) = 1;
1135 TREE_TYPE (*p
) = void_type_node
;
1139 case TRANSACTION_EXPR
:
1140 TREE_SIDE_EFFECTS (*p
) = 1;
1141 TREE_TYPE (*p
) = void_type_node
;
1142 p
= &TRANSACTION_EXPR_BODY (*p
);
1146 /* Assume that any tree upon which voidify_wrapper_expr is
1147 directly called is a wrapper, and that its body is op0. */
1150 TREE_SIDE_EFFECTS (*p
) = 1;
1151 TREE_TYPE (*p
) = void_type_node
;
1152 p
= &TREE_OPERAND (*p
, 0);
1160 if (p
== NULL
|| IS_EMPTY_STMT (*p
))
1164 /* The wrapper is on the RHS of an assignment that we're pushing
1166 gcc_assert (TREE_CODE (temp
) == INIT_EXPR
1167 || TREE_CODE (temp
) == MODIFY_EXPR
);
1168 TREE_OPERAND (temp
, 1) = *p
;
1173 temp
= create_tmp_var (type
, "retval");
1174 *p
= build2 (INIT_EXPR
, type
, temp
, *p
);
1183 /* Prepare calls to builtins to SAVE and RESTORE the stack as well as
1184 a temporary through which they communicate. */
1187 build_stack_save_restore (gcall
**save
, gcall
**restore
)
1191 *save
= gimple_build_call (builtin_decl_implicit (BUILT_IN_STACK_SAVE
), 0);
1192 tmp_var
= create_tmp_var (ptr_type_node
, "saved_stack");
1193 gimple_call_set_lhs (*save
, tmp_var
);
1196 = gimple_build_call (builtin_decl_implicit (BUILT_IN_STACK_RESTORE
),
1200 /* Generate IFN_ASAN_MARK call that poisons shadow of a for DECL variable. */
1203 build_asan_poison_call_expr (tree decl
)
1205 /* Do not poison variables that have size equal to zero. */
1206 tree unit_size
= DECL_SIZE_UNIT (decl
);
1207 if (zerop (unit_size
))
1210 tree base
= build_fold_addr_expr (decl
);
1212 return build_call_expr_internal_loc (UNKNOWN_LOCATION
, IFN_ASAN_MARK
,
1214 build_int_cst (integer_type_node
,
1219 /* Generate IFN_ASAN_MARK call that would poison or unpoison, depending
1220 on POISON flag, shadow memory of a DECL variable. The call will be
1221 put on location identified by IT iterator, where BEFORE flag drives
1222 position where the stmt will be put. */
1225 asan_poison_variable (tree decl
, bool poison
, gimple_stmt_iterator
*it
,
1228 tree unit_size
= DECL_SIZE_UNIT (decl
);
1229 tree base
= build_fold_addr_expr (decl
);
1231 /* Do not poison variables that have size equal to zero. */
1232 if (zerop (unit_size
))
1235 /* It's necessary to have all stack variables aligned to ASAN granularity
1237 if (DECL_ALIGN_UNIT (decl
) <= ASAN_SHADOW_GRANULARITY
)
1238 SET_DECL_ALIGN (decl
, BITS_PER_UNIT
* ASAN_SHADOW_GRANULARITY
);
1240 HOST_WIDE_INT flags
= poison
? ASAN_MARK_POISON
: ASAN_MARK_UNPOISON
;
1243 = gimple_build_call_internal (IFN_ASAN_MARK
, 3,
1244 build_int_cst (integer_type_node
, flags
),
1248 gsi_insert_before (it
, g
, GSI_NEW_STMT
);
1250 gsi_insert_after (it
, g
, GSI_NEW_STMT
);
1253 /* Generate IFN_ASAN_MARK internal call that depending on POISON flag
1254 either poisons or unpoisons a DECL. Created statement is appended
1255 to SEQ_P gimple sequence. */
1258 asan_poison_variable (tree decl
, bool poison
, gimple_seq
*seq_p
)
1260 gimple_stmt_iterator it
= gsi_last (*seq_p
);
1261 bool before
= false;
1266 asan_poison_variable (decl
, poison
, &it
, before
);
1269 /* Sort pair of VAR_DECLs A and B by DECL_UID. */
1272 sort_by_decl_uid (const void *a
, const void *b
)
1274 const tree
*t1
= (const tree
*)a
;
1275 const tree
*t2
= (const tree
*)b
;
1277 int uid1
= DECL_UID (*t1
);
1278 int uid2
= DECL_UID (*t2
);
1282 else if (uid1
> uid2
)
1288 /* Generate IFN_ASAN_MARK internal call for all VARIABLES
1289 depending on POISON flag. Created statement is appended
1290 to SEQ_P gimple sequence. */
1293 asan_poison_variables (hash_set
<tree
> *variables
, bool poison
, gimple_seq
*seq_p
)
1295 unsigned c
= variables
->elements ();
1299 auto_vec
<tree
> sorted_variables (c
);
1301 for (hash_set
<tree
>::iterator it
= variables
->begin ();
1302 it
!= variables
->end (); ++it
)
1303 sorted_variables
.safe_push (*it
);
1305 sorted_variables
.qsort (sort_by_decl_uid
);
1309 FOR_EACH_VEC_ELT (sorted_variables
, i
, var
)
1311 asan_poison_variable (var
, poison
, seq_p
);
1313 /* Add use_after_scope_memory attribute for the variable in order
1314 to prevent re-written into SSA. */
1315 if (!lookup_attribute (ASAN_USE_AFTER_SCOPE_ATTRIBUTE
,
1316 DECL_ATTRIBUTES (var
)))
1317 DECL_ATTRIBUTES (var
)
1318 = tree_cons (get_identifier (ASAN_USE_AFTER_SCOPE_ATTRIBUTE
),
1320 DECL_ATTRIBUTES (var
));
1324 /* Gimplify a BIND_EXPR. Just voidify and recurse. */
1326 static enum gimplify_status
1327 gimplify_bind_expr (tree
*expr_p
, gimple_seq
*pre_p
)
1329 tree bind_expr
= *expr_p
;
1330 bool old_keep_stack
= gimplify_ctxp
->keep_stack
;
1331 bool old_save_stack
= gimplify_ctxp
->save_stack
;
1334 gimple_seq body
, cleanup
;
1336 location_t start_locus
= 0, end_locus
= 0;
1337 tree ret_clauses
= NULL
;
1339 tree temp
= voidify_wrapper_expr (bind_expr
, NULL
);
1341 /* Mark variables seen in this bind expr. */
1342 for (t
= BIND_EXPR_VARS (bind_expr
); t
; t
= DECL_CHAIN (t
))
1346 struct gimplify_omp_ctx
*ctx
= gimplify_omp_ctxp
;
1348 /* Mark variable as local. */
1349 if (ctx
&& ctx
->region_type
!= ORT_NONE
&& !DECL_EXTERNAL (t
))
1351 if (! DECL_SEEN_IN_BIND_EXPR_P (t
)
1352 || splay_tree_lookup (ctx
->variables
,
1353 (splay_tree_key
) t
) == NULL
)
1355 int flag
= GOVD_LOCAL
;
1356 if (ctx
->region_type
== ORT_SIMD
1357 && TREE_ADDRESSABLE (t
)
1358 && !TREE_STATIC (t
))
1360 if (TREE_CODE (DECL_SIZE_UNIT (t
)) != INTEGER_CST
)
1361 ctx
->add_safelen1
= true;
1363 flag
= GOVD_PRIVATE
;
1365 omp_add_variable (ctx
, t
, flag
| GOVD_SEEN
);
1367 /* Static locals inside of target construct or offloaded
1368 routines need to be "omp declare target". */
1369 if (TREE_STATIC (t
))
1370 for (; ctx
; ctx
= ctx
->outer_context
)
1371 if ((ctx
->region_type
& ORT_TARGET
) != 0)
1373 if (!lookup_attribute ("omp declare target",
1374 DECL_ATTRIBUTES (t
)))
1376 tree id
= get_identifier ("omp declare target");
1378 = tree_cons (id
, NULL_TREE
, DECL_ATTRIBUTES (t
));
1379 varpool_node
*node
= varpool_node::get (t
);
1382 node
->offloadable
= 1;
1383 if (ENABLE_OFFLOADING
&& !DECL_EXTERNAL (t
))
1385 g
->have_offload
= true;
1387 vec_safe_push (offload_vars
, t
);
1395 DECL_SEEN_IN_BIND_EXPR_P (t
) = 1;
1397 if (DECL_HARD_REGISTER (t
) && !is_global_var (t
) && cfun
)
1398 cfun
->has_local_explicit_reg_vars
= true;
1402 bind_stmt
= gimple_build_bind (BIND_EXPR_VARS (bind_expr
), NULL
,
1403 BIND_EXPR_BLOCK (bind_expr
));
1404 gimple_push_bind_expr (bind_stmt
);
1406 gimplify_ctxp
->keep_stack
= false;
1407 gimplify_ctxp
->save_stack
= false;
1409 /* Gimplify the body into the GIMPLE_BIND tuple's body. */
1411 gimplify_stmt (&BIND_EXPR_BODY (bind_expr
), &body
);
1412 gimple_bind_set_body (bind_stmt
, body
);
1414 /* Source location wise, the cleanup code (stack_restore and clobbers)
1415 belongs to the end of the block, so propagate what we have. The
1416 stack_save operation belongs to the beginning of block, which we can
1417 infer from the bind_expr directly if the block has no explicit
1419 if (BIND_EXPR_BLOCK (bind_expr
))
1421 end_locus
= BLOCK_SOURCE_END_LOCATION (BIND_EXPR_BLOCK (bind_expr
));
1422 start_locus
= BLOCK_SOURCE_LOCATION (BIND_EXPR_BLOCK (bind_expr
));
1424 if (start_locus
== 0)
1425 start_locus
= EXPR_LOCATION (bind_expr
);
1430 /* If the code both contains VLAs and calls alloca, then we cannot reclaim
1431 the stack space allocated to the VLAs. */
1432 if (gimplify_ctxp
->save_stack
&& !gimplify_ctxp
->keep_stack
)
1434 gcall
*stack_restore
;
1436 /* Save stack on entry and restore it on exit. Add a try_finally
1437 block to achieve this. */
1438 build_stack_save_restore (&stack_save
, &stack_restore
);
1440 gimple_set_location (stack_save
, start_locus
);
1441 gimple_set_location (stack_restore
, end_locus
);
1443 gimplify_seq_add_stmt (&cleanup
, stack_restore
);
1446 /* Add clobbers for all variables that go out of scope. */
1447 for (t
= BIND_EXPR_VARS (bind_expr
); t
; t
= DECL_CHAIN (t
))
1450 && !is_global_var (t
)
1451 && DECL_CONTEXT (t
) == current_function_decl
)
1453 if (!DECL_HARD_REGISTER (t
)
1454 && !TREE_THIS_VOLATILE (t
)
1455 && !DECL_HAS_VALUE_EXPR_P (t
)
1456 /* Only care for variables that have to be in memory. Others
1457 will be rewritten into SSA names, hence moved to the
1459 && !is_gimple_reg (t
)
1460 && flag_stack_reuse
!= SR_NONE
)
1462 tree clobber
= build_clobber (TREE_TYPE (t
));
1463 gimple
*clobber_stmt
;
1464 clobber_stmt
= gimple_build_assign (t
, clobber
);
1465 gimple_set_location (clobber_stmt
, end_locus
);
1466 gimplify_seq_add_stmt (&cleanup
, clobber_stmt
);
1469 if (flag_openacc
&& oacc_declare_returns
!= NULL
)
1471 tree
*c
= oacc_declare_returns
->get (t
);
1475 OMP_CLAUSE_CHAIN (*c
) = ret_clauses
;
1479 oacc_declare_returns
->remove (t
);
1481 if (oacc_declare_returns
->is_empty ())
1483 delete oacc_declare_returns
;
1484 oacc_declare_returns
= NULL
;
1490 if (asan_poisoned_variables
!= NULL
1491 && asan_poisoned_variables
->contains (t
))
1493 asan_poisoned_variables
->remove (t
);
1494 asan_poison_variable (t
, true, &cleanup
);
1497 if (gimplify_ctxp
->live_switch_vars
!= NULL
1498 && gimplify_ctxp
->live_switch_vars
->contains (t
))
1499 gimplify_ctxp
->live_switch_vars
->remove (t
);
1505 gimple_stmt_iterator si
= gsi_start (cleanup
);
1507 stmt
= gimple_build_omp_target (NULL
, GF_OMP_TARGET_KIND_OACC_DECLARE
,
1509 gsi_insert_seq_before_without_update (&si
, stmt
, GSI_NEW_STMT
);
1515 gimple_seq new_body
;
1518 gs
= gimple_build_try (gimple_bind_body (bind_stmt
), cleanup
,
1519 GIMPLE_TRY_FINALLY
);
1522 gimplify_seq_add_stmt (&new_body
, stack_save
);
1523 gimplify_seq_add_stmt (&new_body
, gs
);
1524 gimple_bind_set_body (bind_stmt
, new_body
);
1527 /* keep_stack propagates all the way up to the outermost BIND_EXPR. */
1528 if (!gimplify_ctxp
->keep_stack
)
1529 gimplify_ctxp
->keep_stack
= old_keep_stack
;
1530 gimplify_ctxp
->save_stack
= old_save_stack
;
1532 gimple_pop_bind_expr ();
1534 gimplify_seq_add_stmt (pre_p
, bind_stmt
);
1542 *expr_p
= NULL_TREE
;
1546 /* Maybe add early return predict statement to PRE_P sequence. */
1549 maybe_add_early_return_predict_stmt (gimple_seq
*pre_p
)
1551 /* If we are not in a conditional context, add PREDICT statement. */
1552 if (gimple_conditional_context ())
1554 gimple
*predict
= gimple_build_predict (PRED_TREE_EARLY_RETURN
,
1556 gimplify_seq_add_stmt (pre_p
, predict
);
1560 /* Gimplify a RETURN_EXPR. If the expression to be returned is not a
1561 GIMPLE value, it is assigned to a new temporary and the statement is
1562 re-written to return the temporary.
1564 PRE_P points to the sequence where side effects that must happen before
1565 STMT should be stored. */
1567 static enum gimplify_status
1568 gimplify_return_expr (tree stmt
, gimple_seq
*pre_p
)
1571 tree ret_expr
= TREE_OPERAND (stmt
, 0);
1572 tree result_decl
, result
;
1574 if (ret_expr
== error_mark_node
)
1578 || TREE_CODE (ret_expr
) == RESULT_DECL
)
1580 maybe_add_early_return_predict_stmt (pre_p
);
1581 greturn
*ret
= gimple_build_return (ret_expr
);
1582 gimple_set_no_warning (ret
, TREE_NO_WARNING (stmt
));
1583 gimplify_seq_add_stmt (pre_p
, ret
);
1587 if (VOID_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl
))))
1588 result_decl
= NULL_TREE
;
1589 else if (TREE_CODE (ret_expr
) == COMPOUND_EXPR
)
1591 /* Used in C++ for handling EH cleanup of the return value if a local
1592 cleanup throws. Assume the front-end knows what it's doing. */
1593 result_decl
= DECL_RESULT (current_function_decl
);
1594 /* But crash if we end up trying to modify ret_expr below. */
1595 ret_expr
= NULL_TREE
;
1599 result_decl
= TREE_OPERAND (ret_expr
, 0);
1601 /* See through a return by reference. */
1602 if (TREE_CODE (result_decl
) == INDIRECT_REF
)
1603 result_decl
= TREE_OPERAND (result_decl
, 0);
1605 gcc_assert ((TREE_CODE (ret_expr
) == MODIFY_EXPR
1606 || TREE_CODE (ret_expr
) == INIT_EXPR
)
1607 && TREE_CODE (result_decl
) == RESULT_DECL
);
1610 /* If aggregate_value_p is true, then we can return the bare RESULT_DECL.
1611 Recall that aggregate_value_p is FALSE for any aggregate type that is
1612 returned in registers. If we're returning values in registers, then
1613 we don't want to extend the lifetime of the RESULT_DECL, particularly
1614 across another call. In addition, for those aggregates for which
1615 hard_function_value generates a PARALLEL, we'll die during normal
1616 expansion of structure assignments; there's special code in expand_return
1617 to handle this case that does not exist in expand_expr. */
1620 else if (aggregate_value_p (result_decl
, TREE_TYPE (current_function_decl
)))
1622 if (!poly_int_tree_p (DECL_SIZE (result_decl
)))
1624 if (!TYPE_SIZES_GIMPLIFIED (TREE_TYPE (result_decl
)))
1625 gimplify_type_sizes (TREE_TYPE (result_decl
), pre_p
);
1626 /* Note that we don't use gimplify_vla_decl because the RESULT_DECL
1627 should be effectively allocated by the caller, i.e. all calls to
1628 this function must be subject to the Return Slot Optimization. */
1629 gimplify_one_sizepos (&DECL_SIZE (result_decl
), pre_p
);
1630 gimplify_one_sizepos (&DECL_SIZE_UNIT (result_decl
), pre_p
);
1632 result
= result_decl
;
1634 else if (gimplify_ctxp
->return_temp
)
1635 result
= gimplify_ctxp
->return_temp
;
1638 result
= create_tmp_reg (TREE_TYPE (result_decl
));
1640 /* ??? With complex control flow (usually involving abnormal edges),
1641 we can wind up warning about an uninitialized value for this. Due
1642 to how this variable is constructed and initialized, this is never
1643 true. Give up and never warn. */
1644 TREE_NO_WARNING (result
) = 1;
1646 gimplify_ctxp
->return_temp
= result
;
1649 /* Smash the lhs of the MODIFY_EXPR to the temporary we plan to use.
1650 Then gimplify the whole thing. */
1651 if (result
!= result_decl
)
1652 TREE_OPERAND (ret_expr
, 0) = result
;
1654 gimplify_and_add (TREE_OPERAND (stmt
, 0), pre_p
);
1656 maybe_add_early_return_predict_stmt (pre_p
);
1657 ret
= gimple_build_return (result
);
1658 gimple_set_no_warning (ret
, TREE_NO_WARNING (stmt
));
1659 gimplify_seq_add_stmt (pre_p
, ret
);
1664 /* Gimplify a variable-length array DECL. */
1667 gimplify_vla_decl (tree decl
, gimple_seq
*seq_p
)
1669 /* This is a variable-sized decl. Simplify its size and mark it
1670 for deferred expansion. */
1671 tree t
, addr
, ptr_type
;
1673 gimplify_one_sizepos (&DECL_SIZE (decl
), seq_p
);
1674 gimplify_one_sizepos (&DECL_SIZE_UNIT (decl
), seq_p
);
1676 /* Don't mess with a DECL_VALUE_EXPR set by the front-end. */
1677 if (DECL_HAS_VALUE_EXPR_P (decl
))
1680 /* All occurrences of this decl in final gimplified code will be
1681 replaced by indirection. Setting DECL_VALUE_EXPR does two
1682 things: First, it lets the rest of the gimplifier know what
1683 replacement to use. Second, it lets the debug info know
1684 where to find the value. */
1685 ptr_type
= build_pointer_type (TREE_TYPE (decl
));
1686 addr
= create_tmp_var (ptr_type
, get_name (decl
));
1687 DECL_IGNORED_P (addr
) = 0;
1688 t
= build_fold_indirect_ref (addr
);
1689 TREE_THIS_NOTRAP (t
) = 1;
1690 SET_DECL_VALUE_EXPR (decl
, t
);
1691 DECL_HAS_VALUE_EXPR_P (decl
) = 1;
1693 t
= build_alloca_call_expr (DECL_SIZE_UNIT (decl
), DECL_ALIGN (decl
),
1694 max_int_size_in_bytes (TREE_TYPE (decl
)));
1695 /* The call has been built for a variable-sized object. */
1696 CALL_ALLOCA_FOR_VAR_P (t
) = 1;
1697 t
= fold_convert (ptr_type
, t
);
1698 t
= build2 (MODIFY_EXPR
, TREE_TYPE (addr
), addr
, t
);
1700 gimplify_and_add (t
, seq_p
);
1702 /* Record the dynamic allocation associated with DECL if requested. */
1703 if (flag_callgraph_info
& CALLGRAPH_INFO_DYNAMIC_ALLOC
)
1704 record_dynamic_alloc (decl
);
1707 /* A helper function to be called via walk_tree. Mark all labels under *TP
1708 as being forced. To be called for DECL_INITIAL of static variables. */
1711 force_labels_r (tree
*tp
, int *walk_subtrees
, void *data ATTRIBUTE_UNUSED
)
1715 if (TREE_CODE (*tp
) == LABEL_DECL
)
1717 FORCED_LABEL (*tp
) = 1;
1718 cfun
->has_forced_label_in_static
= 1;
1724 /* Gimplify a DECL_EXPR node *STMT_P by making any necessary allocation
1725 and initialization explicit. */
1727 static enum gimplify_status
1728 gimplify_decl_expr (tree
*stmt_p
, gimple_seq
*seq_p
)
1730 tree stmt
= *stmt_p
;
1731 tree decl
= DECL_EXPR_DECL (stmt
);
1733 *stmt_p
= NULL_TREE
;
1735 if (TREE_TYPE (decl
) == error_mark_node
)
1738 if ((TREE_CODE (decl
) == TYPE_DECL
1740 && !TYPE_SIZES_GIMPLIFIED (TREE_TYPE (decl
)))
1742 gimplify_type_sizes (TREE_TYPE (decl
), seq_p
);
1743 if (TREE_CODE (TREE_TYPE (decl
)) == REFERENCE_TYPE
)
1744 gimplify_type_sizes (TREE_TYPE (TREE_TYPE (decl
)), seq_p
);
1747 /* ??? DECL_ORIGINAL_TYPE is streamed for LTO so it needs to be gimplified
1748 in case its size expressions contain problematic nodes like CALL_EXPR. */
1749 if (TREE_CODE (decl
) == TYPE_DECL
1750 && DECL_ORIGINAL_TYPE (decl
)
1751 && !TYPE_SIZES_GIMPLIFIED (DECL_ORIGINAL_TYPE (decl
)))
1753 gimplify_type_sizes (DECL_ORIGINAL_TYPE (decl
), seq_p
);
1754 if (TREE_CODE (DECL_ORIGINAL_TYPE (decl
)) == REFERENCE_TYPE
)
1755 gimplify_type_sizes (TREE_TYPE (DECL_ORIGINAL_TYPE (decl
)), seq_p
);
1758 if (VAR_P (decl
) && !DECL_EXTERNAL (decl
))
1760 tree init
= DECL_INITIAL (decl
);
1761 bool is_vla
= false;
1764 if (!poly_int_tree_p (DECL_SIZE_UNIT (decl
), &size
)
1765 || (!TREE_STATIC (decl
)
1766 && flag_stack_check
== GENERIC_STACK_CHECK
1768 (unsigned HOST_WIDE_INT
) STACK_CHECK_MAX_VAR_SIZE
)))
1770 gimplify_vla_decl (decl
, seq_p
);
1774 if (asan_poisoned_variables
1776 && TREE_ADDRESSABLE (decl
)
1777 && !TREE_STATIC (decl
)
1778 && !DECL_HAS_VALUE_EXPR_P (decl
)
1779 && DECL_ALIGN (decl
) <= MAX_SUPPORTED_STACK_ALIGNMENT
1780 && dbg_cnt (asan_use_after_scope
)
1781 && !gimplify_omp_ctxp
)
1783 asan_poisoned_variables
->add (decl
);
1784 asan_poison_variable (decl
, false, seq_p
);
1785 if (!DECL_ARTIFICIAL (decl
) && gimplify_ctxp
->live_switch_vars
)
1786 gimplify_ctxp
->live_switch_vars
->add (decl
);
1789 /* Some front ends do not explicitly declare all anonymous
1790 artificial variables. We compensate here by declaring the
1791 variables, though it would be better if the front ends would
1792 explicitly declare them. */
1793 if (!DECL_SEEN_IN_BIND_EXPR_P (decl
)
1794 && DECL_ARTIFICIAL (decl
) && DECL_NAME (decl
) == NULL_TREE
)
1795 gimple_add_tmp_var (decl
);
1797 if (init
&& init
!= error_mark_node
)
1799 if (!TREE_STATIC (decl
))
1801 DECL_INITIAL (decl
) = NULL_TREE
;
1802 init
= build2 (INIT_EXPR
, void_type_node
, decl
, init
);
1803 gimplify_and_add (init
, seq_p
);
1807 /* We must still examine initializers for static variables
1808 as they may contain a label address. */
1809 walk_tree (&init
, force_labels_r
, NULL
, NULL
);
1816 /* Gimplify a LOOP_EXPR. Normally this just involves gimplifying the body
1817 and replacing the LOOP_EXPR with goto, but if the loop contains an
1818 EXIT_EXPR, we need to append a label for it to jump to. */
1820 static enum gimplify_status
1821 gimplify_loop_expr (tree
*expr_p
, gimple_seq
*pre_p
)
1823 tree saved_label
= gimplify_ctxp
->exit_label
;
1824 tree start_label
= create_artificial_label (UNKNOWN_LOCATION
);
1826 gimplify_seq_add_stmt (pre_p
, gimple_build_label (start_label
));
1828 gimplify_ctxp
->exit_label
= NULL_TREE
;
1830 gimplify_and_add (LOOP_EXPR_BODY (*expr_p
), pre_p
);
1832 gimplify_seq_add_stmt (pre_p
, gimple_build_goto (start_label
));
1834 if (gimplify_ctxp
->exit_label
)
1835 gimplify_seq_add_stmt (pre_p
,
1836 gimple_build_label (gimplify_ctxp
->exit_label
));
1838 gimplify_ctxp
->exit_label
= saved_label
;
1844 /* Gimplify a statement list onto a sequence. These may be created either
1845 by an enlightened front-end, or by shortcut_cond_expr. */
1847 static enum gimplify_status
1848 gimplify_statement_list (tree
*expr_p
, gimple_seq
*pre_p
)
1850 tree temp
= voidify_wrapper_expr (*expr_p
, NULL
);
1852 tree_stmt_iterator i
= tsi_start (*expr_p
);
1854 while (!tsi_end_p (i
))
1856 gimplify_stmt (tsi_stmt_ptr (i
), pre_p
);
1869 /* Callback for walk_gimple_seq. */
1872 warn_switch_unreachable_r (gimple_stmt_iterator
*gsi_p
, bool *handled_ops_p
,
1873 struct walk_stmt_info
*wi
)
1875 gimple
*stmt
= gsi_stmt (*gsi_p
);
1877 *handled_ops_p
= true;
1878 switch (gimple_code (stmt
))
1881 /* A compiler-generated cleanup or a user-written try block.
1882 If it's empty, don't dive into it--that would result in
1883 worse location info. */
1884 if (gimple_try_eval (stmt
) == NULL
)
1887 return integer_zero_node
;
1892 case GIMPLE_EH_FILTER
:
1893 case GIMPLE_TRANSACTION
:
1894 /* Walk the sub-statements. */
1895 *handled_ops_p
= false;
1899 /* Ignore these. We may generate them before declarations that
1900 are never executed. If there's something to warn about,
1901 there will be non-debug stmts too, and we'll catch those. */
1905 if (gimple_call_internal_p (stmt
, IFN_ASAN_MARK
))
1907 *handled_ops_p
= false;
1912 /* Save the first "real" statement (not a decl/lexical scope/...). */
1914 return integer_zero_node
;
1919 /* Possibly warn about unreachable statements between switch's controlling
1920 expression and the first case. SEQ is the body of a switch expression. */
1923 maybe_warn_switch_unreachable (gimple_seq seq
)
1925 if (!warn_switch_unreachable
1926 /* This warning doesn't play well with Fortran when optimizations
1928 || lang_GNU_Fortran ()
1932 struct walk_stmt_info wi
;
1933 memset (&wi
, 0, sizeof (wi
));
1934 walk_gimple_seq (seq
, warn_switch_unreachable_r
, NULL
, &wi
);
1935 gimple
*stmt
= (gimple
*) wi
.info
;
1937 if (stmt
&& gimple_code (stmt
) != GIMPLE_LABEL
)
1939 if (gimple_code (stmt
) == GIMPLE_GOTO
1940 && TREE_CODE (gimple_goto_dest (stmt
)) == LABEL_DECL
1941 && DECL_ARTIFICIAL (gimple_goto_dest (stmt
)))
1942 /* Don't warn for compiler-generated gotos. These occur
1943 in Duff's devices, for example. */;
1945 warning_at (gimple_location (stmt
), OPT_Wswitch_unreachable
,
1946 "statement will never be executed");
1951 /* A label entry that pairs label and a location. */
1958 /* Find LABEL in vector of label entries VEC. */
1960 static struct label_entry
*
1961 find_label_entry (const auto_vec
<struct label_entry
> *vec
, tree label
)
1964 struct label_entry
*l
;
1966 FOR_EACH_VEC_ELT (*vec
, i
, l
)
1967 if (l
->label
== label
)
1972 /* Return true if LABEL, a LABEL_DECL, represents a case label
1973 in a vector of labels CASES. */
1976 case_label_p (const vec
<tree
> *cases
, tree label
)
1981 FOR_EACH_VEC_ELT (*cases
, i
, l
)
1982 if (CASE_LABEL (l
) == label
)
1987 /* Find the last nondebug statement in a scope STMT. */
1990 last_stmt_in_scope (gimple
*stmt
)
1995 switch (gimple_code (stmt
))
1999 gbind
*bind
= as_a
<gbind
*> (stmt
);
2000 stmt
= gimple_seq_last_nondebug_stmt (gimple_bind_body (bind
));
2001 return last_stmt_in_scope (stmt
);
2006 gtry
*try_stmt
= as_a
<gtry
*> (stmt
);
2007 stmt
= gimple_seq_last_nondebug_stmt (gimple_try_eval (try_stmt
));
2008 gimple
*last_eval
= last_stmt_in_scope (stmt
);
2009 if (gimple_stmt_may_fallthru (last_eval
)
2010 && (last_eval
== NULL
2011 || !gimple_call_internal_p (last_eval
, IFN_FALLTHROUGH
))
2012 && gimple_try_kind (try_stmt
) == GIMPLE_TRY_FINALLY
)
2014 stmt
= gimple_seq_last_nondebug_stmt (gimple_try_cleanup (try_stmt
));
2015 return last_stmt_in_scope (stmt
);
2029 /* Collect interesting labels in LABELS and return the statement preceding
2030 another case label, or a user-defined label. Store a location useful
2031 to give warnings at *PREVLOC (usually the location of the returned
2032 statement or of its surrounding scope). */
2035 collect_fallthrough_labels (gimple_stmt_iterator
*gsi_p
,
2036 auto_vec
<struct label_entry
> *labels
,
2037 location_t
*prevloc
)
2039 gimple
*prev
= NULL
;
2041 *prevloc
= UNKNOWN_LOCATION
;
2044 if (gimple_code (gsi_stmt (*gsi_p
)) == GIMPLE_BIND
)
2046 /* Recognize the special GIMPLE_BIND added by gimplify_switch_expr,
2047 which starts on a GIMPLE_SWITCH and ends with a break label.
2048 Handle that as a single statement that can fall through. */
2049 gbind
*bind
= as_a
<gbind
*> (gsi_stmt (*gsi_p
));
2050 gimple
*first
= gimple_seq_first_stmt (gimple_bind_body (bind
));
2051 gimple
*last
= gimple_seq_last_stmt (gimple_bind_body (bind
));
2053 && gimple_code (first
) == GIMPLE_SWITCH
2054 && gimple_code (last
) == GIMPLE_LABEL
)
2056 tree label
= gimple_label_label (as_a
<glabel
*> (last
));
2057 if (SWITCH_BREAK_LABEL_P (label
))
2065 if (gimple_code (gsi_stmt (*gsi_p
)) == GIMPLE_BIND
2066 || gimple_code (gsi_stmt (*gsi_p
)) == GIMPLE_TRY
)
2068 /* Nested scope. Only look at the last statement of
2069 the innermost scope. */
2070 location_t bind_loc
= gimple_location (gsi_stmt (*gsi_p
));
2071 gimple
*last
= last_stmt_in_scope (gsi_stmt (*gsi_p
));
2075 /* It might be a label without a location. Use the
2076 location of the scope then. */
2077 if (!gimple_has_location (prev
))
2078 *prevloc
= bind_loc
;
2084 /* Ifs are tricky. */
2085 if (gimple_code (gsi_stmt (*gsi_p
)) == GIMPLE_COND
)
2087 gcond
*cond_stmt
= as_a
<gcond
*> (gsi_stmt (*gsi_p
));
2088 tree false_lab
= gimple_cond_false_label (cond_stmt
);
2089 location_t if_loc
= gimple_location (cond_stmt
);
2092 if (i > 1) goto <D.2259>; else goto D;
2093 we can't do much with the else-branch. */
2094 if (!DECL_ARTIFICIAL (false_lab
))
2097 /* Go on until the false label, then one step back. */
2098 for (; !gsi_end_p (*gsi_p
); gsi_next (gsi_p
))
2100 gimple
*stmt
= gsi_stmt (*gsi_p
);
2101 if (gimple_code (stmt
) == GIMPLE_LABEL
2102 && gimple_label_label (as_a
<glabel
*> (stmt
)) == false_lab
)
2106 /* Not found? Oops. */
2107 if (gsi_end_p (*gsi_p
))
2110 struct label_entry l
= { false_lab
, if_loc
};
2111 labels
->safe_push (l
);
2113 /* Go to the last statement of the then branch. */
2116 /* if (i != 0) goto <D.1759>; else goto <D.1760>;
2122 if (gimple_code (gsi_stmt (*gsi_p
)) == GIMPLE_GOTO
2123 && !gimple_has_location (gsi_stmt (*gsi_p
)))
2125 /* Look at the statement before, it might be
2126 attribute fallthrough, in which case don't warn. */
2128 bool fallthru_before_dest
2129 = gimple_call_internal_p (gsi_stmt (*gsi_p
), IFN_FALLTHROUGH
);
2131 tree goto_dest
= gimple_goto_dest (gsi_stmt (*gsi_p
));
2132 if (!fallthru_before_dest
)
2134 struct label_entry l
= { goto_dest
, if_loc
};
2135 labels
->safe_push (l
);
2138 /* And move back. */
2142 /* Remember the last statement. Skip labels that are of no interest
2144 if (gimple_code (gsi_stmt (*gsi_p
)) == GIMPLE_LABEL
)
2146 tree label
= gimple_label_label (as_a
<glabel
*> (gsi_stmt (*gsi_p
)));
2147 if (find_label_entry (labels
, label
))
2148 prev
= gsi_stmt (*gsi_p
);
2150 else if (gimple_call_internal_p (gsi_stmt (*gsi_p
), IFN_ASAN_MARK
))
2152 else if (gimple_code (gsi_stmt (*gsi_p
)) == GIMPLE_PREDICT
)
2154 else if (!is_gimple_debug (gsi_stmt (*gsi_p
)))
2155 prev
= gsi_stmt (*gsi_p
);
2158 while (!gsi_end_p (*gsi_p
)
2159 /* Stop if we find a case or a user-defined label. */
2160 && (gimple_code (gsi_stmt (*gsi_p
)) != GIMPLE_LABEL
2161 || !gimple_has_location (gsi_stmt (*gsi_p
))));
2163 if (prev
&& gimple_has_location (prev
))
2164 *prevloc
= gimple_location (prev
);
2168 /* Return true if the switch fallthough warning should occur. LABEL is
2169 the label statement that we're falling through to. */
2172 should_warn_for_implicit_fallthrough (gimple_stmt_iterator
*gsi_p
, tree label
)
2174 gimple_stmt_iterator gsi
= *gsi_p
;
2176 /* Don't warn if the label is marked with a "falls through" comment. */
2177 if (FALLTHROUGH_LABEL_P (label
))
2180 /* Don't warn for non-case labels followed by a statement:
2185 as these are likely intentional. */
2186 if (!case_label_p (&gimplify_ctxp
->case_labels
, label
))
2189 while (!gsi_end_p (gsi
)
2190 && gimple_code (gsi_stmt (gsi
)) == GIMPLE_LABEL
2191 && (l
= gimple_label_label (as_a
<glabel
*> (gsi_stmt (gsi
))))
2192 && !case_label_p (&gimplify_ctxp
->case_labels
, l
))
2193 gsi_next_nondebug (&gsi
);
2194 if (gsi_end_p (gsi
) || gimple_code (gsi_stmt (gsi
)) != GIMPLE_LABEL
)
2198 /* Don't warn for terminated branches, i.e. when the subsequent case labels
2199 immediately breaks. */
2202 /* Skip all immediately following labels. */
2203 while (!gsi_end_p (gsi
)
2204 && (gimple_code (gsi_stmt (gsi
)) == GIMPLE_LABEL
2205 || gimple_code (gsi_stmt (gsi
)) == GIMPLE_PREDICT
))
2206 gsi_next_nondebug (&gsi
);
2208 /* { ... something; default:; } */
2210 /* { ... something; default: break; } or
2211 { ... something; default: goto L; } */
2212 || gimple_code (gsi_stmt (gsi
)) == GIMPLE_GOTO
2213 /* { ... something; default: return; } */
2214 || gimple_code (gsi_stmt (gsi
)) == GIMPLE_RETURN
)
2220 /* Callback for walk_gimple_seq. */
2223 warn_implicit_fallthrough_r (gimple_stmt_iterator
*gsi_p
, bool *handled_ops_p
,
2224 struct walk_stmt_info
*)
2226 gimple
*stmt
= gsi_stmt (*gsi_p
);
2228 *handled_ops_p
= true;
2229 switch (gimple_code (stmt
))
2234 case GIMPLE_EH_FILTER
:
2235 case GIMPLE_TRANSACTION
:
2236 /* Walk the sub-statements. */
2237 *handled_ops_p
= false;
2240 /* Find a sequence of form:
2247 and possibly warn. */
2250 /* Found a label. Skip all immediately following labels. */
2251 while (!gsi_end_p (*gsi_p
)
2252 && gimple_code (gsi_stmt (*gsi_p
)) == GIMPLE_LABEL
)
2253 gsi_next_nondebug (gsi_p
);
2255 /* There might be no more statements. */
2256 if (gsi_end_p (*gsi_p
))
2257 return integer_zero_node
;
2259 /* Vector of labels that fall through. */
2260 auto_vec
<struct label_entry
> labels
;
2262 gimple
*prev
= collect_fallthrough_labels (gsi_p
, &labels
, &prevloc
);
2264 /* There might be no more statements. */
2265 if (gsi_end_p (*gsi_p
))
2266 return integer_zero_node
;
2268 gimple
*next
= gsi_stmt (*gsi_p
);
2270 /* If what follows is a label, then we may have a fallthrough. */
2271 if (gimple_code (next
) == GIMPLE_LABEL
2272 && gimple_has_location (next
)
2273 && (label
= gimple_label_label (as_a
<glabel
*> (next
)))
2276 struct label_entry
*l
;
2277 bool warned_p
= false;
2278 auto_diagnostic_group d
;
2279 if (!should_warn_for_implicit_fallthrough (gsi_p
, label
))
2281 else if (gimple_code (prev
) == GIMPLE_LABEL
2282 && (label
= gimple_label_label (as_a
<glabel
*> (prev
)))
2283 && (l
= find_label_entry (&labels
, label
)))
2284 warned_p
= warning_at (l
->loc
, OPT_Wimplicit_fallthrough_
,
2285 "this statement may fall through");
2286 else if (!gimple_call_internal_p (prev
, IFN_FALLTHROUGH
)
2287 /* Try to be clever and don't warn when the statement
2288 can't actually fall through. */
2289 && gimple_stmt_may_fallthru (prev
)
2290 && prevloc
!= UNKNOWN_LOCATION
)
2291 warned_p
= warning_at (prevloc
,
2292 OPT_Wimplicit_fallthrough_
,
2293 "this statement may fall through");
2295 inform (gimple_location (next
), "here");
2297 /* Mark this label as processed so as to prevent multiple
2298 warnings in nested switches. */
2299 FALLTHROUGH_LABEL_P (label
) = true;
2301 /* So that next warn_implicit_fallthrough_r will start looking for
2302 a new sequence starting with this label. */
2313 /* Warn when a switch case falls through. */
2316 maybe_warn_implicit_fallthrough (gimple_seq seq
)
2318 if (!warn_implicit_fallthrough
)
2321 /* This warning is meant for C/C++/ObjC/ObjC++ only. */
2324 || lang_GNU_OBJC ()))
2327 struct walk_stmt_info wi
;
2328 memset (&wi
, 0, sizeof (wi
));
2329 walk_gimple_seq (seq
, warn_implicit_fallthrough_r
, NULL
, &wi
);
2332 /* Callback for walk_gimple_seq. */
2335 expand_FALLTHROUGH_r (gimple_stmt_iterator
*gsi_p
, bool *handled_ops_p
,
2336 struct walk_stmt_info
*wi
)
2338 gimple
*stmt
= gsi_stmt (*gsi_p
);
2340 *handled_ops_p
= true;
2341 switch (gimple_code (stmt
))
2346 case GIMPLE_EH_FILTER
:
2347 case GIMPLE_TRANSACTION
:
2348 /* Walk the sub-statements. */
2349 *handled_ops_p
= false;
2352 if (gimple_call_internal_p (stmt
, IFN_FALLTHROUGH
))
2354 gsi_remove (gsi_p
, true);
2355 if (gsi_end_p (*gsi_p
))
2357 *static_cast<location_t
*>(wi
->info
) = gimple_location (stmt
);
2358 return integer_zero_node
;
2362 location_t loc
= gimple_location (stmt
);
2364 gimple_stmt_iterator gsi2
= *gsi_p
;
2365 stmt
= gsi_stmt (gsi2
);
2366 if (gimple_code (stmt
) == GIMPLE_GOTO
&& !gimple_has_location (stmt
))
2368 /* Go on until the artificial label. */
2369 tree goto_dest
= gimple_goto_dest (stmt
);
2370 for (; !gsi_end_p (gsi2
); gsi_next (&gsi2
))
2372 if (gimple_code (gsi_stmt (gsi2
)) == GIMPLE_LABEL
2373 && gimple_label_label (as_a
<glabel
*> (gsi_stmt (gsi2
)))
2378 /* Not found? Stop. */
2379 if (gsi_end_p (gsi2
))
2382 /* Look one past it. */
2386 /* We're looking for a case label or default label here. */
2387 while (!gsi_end_p (gsi2
))
2389 stmt
= gsi_stmt (gsi2
);
2390 if (gimple_code (stmt
) == GIMPLE_LABEL
)
2392 tree label
= gimple_label_label (as_a
<glabel
*> (stmt
));
2393 if (gimple_has_location (stmt
) && DECL_ARTIFICIAL (label
))
2399 else if (gimple_call_internal_p (stmt
, IFN_ASAN_MARK
))
2401 else if (!is_gimple_debug (stmt
))
2402 /* Anything else is not expected. */
2407 pedwarn (loc
, 0, "attribute %<fallthrough%> not preceding "
2408 "a case label or default label");
2417 /* Expand all FALLTHROUGH () calls in SEQ. */
2420 expand_FALLTHROUGH (gimple_seq
*seq_p
)
2422 struct walk_stmt_info wi
;
2424 memset (&wi
, 0, sizeof (wi
));
2425 wi
.info
= (void *) &loc
;
2426 walk_gimple_seq_mod (seq_p
, expand_FALLTHROUGH_r
, NULL
, &wi
);
2427 if (wi
.callback_result
== integer_zero_node
)
2428 /* We've found [[fallthrough]]; at the end of a switch, which the C++
2429 standard says is ill-formed; see [dcl.attr.fallthrough]. */
2430 pedwarn (loc
, 0, "attribute %<fallthrough%> not preceding "
2431 "a case label or default label");
2435 /* Gimplify a SWITCH_EXPR, and collect the vector of labels it can
2438 static enum gimplify_status
2439 gimplify_switch_expr (tree
*expr_p
, gimple_seq
*pre_p
)
2441 tree switch_expr
= *expr_p
;
2442 gimple_seq switch_body_seq
= NULL
;
2443 enum gimplify_status ret
;
2444 tree index_type
= TREE_TYPE (switch_expr
);
2445 if (index_type
== NULL_TREE
)
2446 index_type
= TREE_TYPE (SWITCH_COND (switch_expr
));
2448 ret
= gimplify_expr (&SWITCH_COND (switch_expr
), pre_p
, NULL
, is_gimple_val
,
2450 if (ret
== GS_ERROR
|| ret
== GS_UNHANDLED
)
2453 if (SWITCH_BODY (switch_expr
))
2456 vec
<tree
> saved_labels
;
2457 hash_set
<tree
> *saved_live_switch_vars
= NULL
;
2458 tree default_case
= NULL_TREE
;
2459 gswitch
*switch_stmt
;
2461 /* Save old labels, get new ones from body, then restore the old
2462 labels. Save all the things from the switch body to append after. */
2463 saved_labels
= gimplify_ctxp
->case_labels
;
2464 gimplify_ctxp
->case_labels
.create (8);
2466 /* Do not create live_switch_vars if SWITCH_BODY is not a BIND_EXPR. */
2467 saved_live_switch_vars
= gimplify_ctxp
->live_switch_vars
;
2468 tree_code body_type
= TREE_CODE (SWITCH_BODY (switch_expr
));
2469 if (body_type
== BIND_EXPR
|| body_type
== STATEMENT_LIST
)
2470 gimplify_ctxp
->live_switch_vars
= new hash_set
<tree
> (4);
2472 gimplify_ctxp
->live_switch_vars
= NULL
;
2474 bool old_in_switch_expr
= gimplify_ctxp
->in_switch_expr
;
2475 gimplify_ctxp
->in_switch_expr
= true;
2477 gimplify_stmt (&SWITCH_BODY (switch_expr
), &switch_body_seq
);
2479 gimplify_ctxp
->in_switch_expr
= old_in_switch_expr
;
2480 maybe_warn_switch_unreachable (switch_body_seq
);
2481 maybe_warn_implicit_fallthrough (switch_body_seq
);
2482 /* Only do this for the outermost GIMPLE_SWITCH. */
2483 if (!gimplify_ctxp
->in_switch_expr
)
2484 expand_FALLTHROUGH (&switch_body_seq
);
2486 labels
= gimplify_ctxp
->case_labels
;
2487 gimplify_ctxp
->case_labels
= saved_labels
;
2489 if (gimplify_ctxp
->live_switch_vars
)
2491 gcc_assert (gimplify_ctxp
->live_switch_vars
->is_empty ());
2492 delete gimplify_ctxp
->live_switch_vars
;
2494 gimplify_ctxp
->live_switch_vars
= saved_live_switch_vars
;
2496 preprocess_case_label_vec_for_gimple (labels
, index_type
,
2499 bool add_bind
= false;
2502 glabel
*new_default
;
2505 = build_case_label (NULL_TREE
, NULL_TREE
,
2506 create_artificial_label (UNKNOWN_LOCATION
));
2507 if (old_in_switch_expr
)
2509 SWITCH_BREAK_LABEL_P (CASE_LABEL (default_case
)) = 1;
2512 new_default
= gimple_build_label (CASE_LABEL (default_case
));
2513 gimplify_seq_add_stmt (&switch_body_seq
, new_default
);
2515 else if (old_in_switch_expr
)
2517 gimple
*last
= gimple_seq_last_stmt (switch_body_seq
);
2518 if (last
&& gimple_code (last
) == GIMPLE_LABEL
)
2520 tree label
= gimple_label_label (as_a
<glabel
*> (last
));
2521 if (SWITCH_BREAK_LABEL_P (label
))
2526 switch_stmt
= gimple_build_switch (SWITCH_COND (switch_expr
),
2527 default_case
, labels
);
2528 /* For the benefit of -Wimplicit-fallthrough, if switch_body_seq
2529 ends with a GIMPLE_LABEL holding SWITCH_BREAK_LABEL_P LABEL_DECL,
2530 wrap the GIMPLE_SWITCH up to that GIMPLE_LABEL into a GIMPLE_BIND,
2531 so that we can easily find the start and end of the switch
2535 gimple_seq bind_body
= NULL
;
2536 gimplify_seq_add_stmt (&bind_body
, switch_stmt
);
2537 gimple_seq_add_seq (&bind_body
, switch_body_seq
);
2538 gbind
*bind
= gimple_build_bind (NULL_TREE
, bind_body
, NULL_TREE
);
2539 gimple_set_location (bind
, EXPR_LOCATION (switch_expr
));
2540 gimplify_seq_add_stmt (pre_p
, bind
);
2544 gimplify_seq_add_stmt (pre_p
, switch_stmt
);
2545 gimplify_seq_add_seq (pre_p
, switch_body_seq
);
2555 /* Gimplify the LABEL_EXPR pointed to by EXPR_P. */
2557 static enum gimplify_status
2558 gimplify_label_expr (tree
*expr_p
, gimple_seq
*pre_p
)
2560 gcc_assert (decl_function_context (LABEL_EXPR_LABEL (*expr_p
))
2561 == current_function_decl
);
2563 tree label
= LABEL_EXPR_LABEL (*expr_p
);
2564 glabel
*label_stmt
= gimple_build_label (label
);
2565 gimple_set_location (label_stmt
, EXPR_LOCATION (*expr_p
));
2566 gimplify_seq_add_stmt (pre_p
, label_stmt
);
2568 if (lookup_attribute ("cold", DECL_ATTRIBUTES (label
)))
2569 gimple_seq_add_stmt (pre_p
, gimple_build_predict (PRED_COLD_LABEL
,
2571 else if (lookup_attribute ("hot", DECL_ATTRIBUTES (label
)))
2572 gimple_seq_add_stmt (pre_p
, gimple_build_predict (PRED_HOT_LABEL
,
2578 /* Gimplify the CASE_LABEL_EXPR pointed to by EXPR_P. */
2580 static enum gimplify_status
2581 gimplify_case_label_expr (tree
*expr_p
, gimple_seq
*pre_p
)
2583 struct gimplify_ctx
*ctxp
;
2586 /* Invalid programs can play Duff's Device type games with, for example,
2587 #pragma omp parallel. At least in the C front end, we don't
2588 detect such invalid branches until after gimplification, in the
2589 diagnose_omp_blocks pass. */
2590 for (ctxp
= gimplify_ctxp
; ; ctxp
= ctxp
->prev_context
)
2591 if (ctxp
->case_labels
.exists ())
2594 tree label
= CASE_LABEL (*expr_p
);
2595 label_stmt
= gimple_build_label (label
);
2596 gimple_set_location (label_stmt
, EXPR_LOCATION (*expr_p
));
2597 ctxp
->case_labels
.safe_push (*expr_p
);
2598 gimplify_seq_add_stmt (pre_p
, label_stmt
);
2600 if (lookup_attribute ("cold", DECL_ATTRIBUTES (label
)))
2601 gimple_seq_add_stmt (pre_p
, gimple_build_predict (PRED_COLD_LABEL
,
2603 else if (lookup_attribute ("hot", DECL_ATTRIBUTES (label
)))
2604 gimple_seq_add_stmt (pre_p
, gimple_build_predict (PRED_HOT_LABEL
,
2610 /* Build a GOTO to the LABEL_DECL pointed to by LABEL_P, building it first
2614 build_and_jump (tree
*label_p
)
2616 if (label_p
== NULL
)
2617 /* If there's nowhere to jump, just fall through. */
2620 if (*label_p
== NULL_TREE
)
2622 tree label
= create_artificial_label (UNKNOWN_LOCATION
);
2626 return build1 (GOTO_EXPR
, void_type_node
, *label_p
);
2629 /* Gimplify an EXIT_EXPR by converting to a GOTO_EXPR inside a COND_EXPR.
2630 This also involves building a label to jump to and communicating it to
2631 gimplify_loop_expr through gimplify_ctxp->exit_label. */
2633 static enum gimplify_status
2634 gimplify_exit_expr (tree
*expr_p
)
2636 tree cond
= TREE_OPERAND (*expr_p
, 0);
2639 expr
= build_and_jump (&gimplify_ctxp
->exit_label
);
2640 expr
= build3 (COND_EXPR
, void_type_node
, cond
, expr
, NULL_TREE
);
2646 /* *EXPR_P is a COMPONENT_REF being used as an rvalue. If its type is
2647 different from its canonical type, wrap the whole thing inside a
2648 NOP_EXPR and force the type of the COMPONENT_REF to be the canonical
2651 The canonical type of a COMPONENT_REF is the type of the field being
2652 referenced--unless the field is a bit-field which can be read directly
2653 in a smaller mode, in which case the canonical type is the
2654 sign-appropriate type corresponding to that mode. */
2657 canonicalize_component_ref (tree
*expr_p
)
2659 tree expr
= *expr_p
;
2662 gcc_assert (TREE_CODE (expr
) == COMPONENT_REF
);
2664 if (INTEGRAL_TYPE_P (TREE_TYPE (expr
)))
2665 type
= TREE_TYPE (get_unwidened (expr
, NULL_TREE
));
2667 type
= TREE_TYPE (TREE_OPERAND (expr
, 1));
2669 /* One could argue that all the stuff below is not necessary for
2670 the non-bitfield case and declare it a FE error if type
2671 adjustment would be needed. */
2672 if (TREE_TYPE (expr
) != type
)
2674 #ifdef ENABLE_TYPES_CHECKING
2675 tree old_type
= TREE_TYPE (expr
);
2679 /* We need to preserve qualifiers and propagate them from
2681 type_quals
= TYPE_QUALS (type
)
2682 | TYPE_QUALS (TREE_TYPE (TREE_OPERAND (expr
, 0)));
2683 if (TYPE_QUALS (type
) != type_quals
)
2684 type
= build_qualified_type (TYPE_MAIN_VARIANT (type
), type_quals
);
2686 /* Set the type of the COMPONENT_REF to the underlying type. */
2687 TREE_TYPE (expr
) = type
;
2689 #ifdef ENABLE_TYPES_CHECKING
2690 /* It is now a FE error, if the conversion from the canonical
2691 type to the original expression type is not useless. */
2692 gcc_assert (useless_type_conversion_p (old_type
, type
));
2697 /* If a NOP conversion is changing a pointer to array of foo to a pointer
2698 to foo, embed that change in the ADDR_EXPR by converting
2703 where L is the lower bound. For simplicity, only do this for constant
2705 The constraint is that the type of &array[L] is trivially convertible
2709 canonicalize_addr_expr (tree
*expr_p
)
2711 tree expr
= *expr_p
;
2712 tree addr_expr
= TREE_OPERAND (expr
, 0);
2713 tree datype
, ddatype
, pddatype
;
2715 /* We simplify only conversions from an ADDR_EXPR to a pointer type. */
2716 if (!POINTER_TYPE_P (TREE_TYPE (expr
))
2717 || TREE_CODE (addr_expr
) != ADDR_EXPR
)
2720 /* The addr_expr type should be a pointer to an array. */
2721 datype
= TREE_TYPE (TREE_TYPE (addr_expr
));
2722 if (TREE_CODE (datype
) != ARRAY_TYPE
)
2725 /* The pointer to element type shall be trivially convertible to
2726 the expression pointer type. */
2727 ddatype
= TREE_TYPE (datype
);
2728 pddatype
= build_pointer_type (ddatype
);
2729 if (!useless_type_conversion_p (TYPE_MAIN_VARIANT (TREE_TYPE (expr
)),
2733 /* The lower bound and element sizes must be constant. */
2734 if (!TYPE_SIZE_UNIT (ddatype
)
2735 || TREE_CODE (TYPE_SIZE_UNIT (ddatype
)) != INTEGER_CST
2736 || !TYPE_DOMAIN (datype
) || !TYPE_MIN_VALUE (TYPE_DOMAIN (datype
))
2737 || TREE_CODE (TYPE_MIN_VALUE (TYPE_DOMAIN (datype
))) != INTEGER_CST
)
2740 /* All checks succeeded. Build a new node to merge the cast. */
2741 *expr_p
= build4 (ARRAY_REF
, ddatype
, TREE_OPERAND (addr_expr
, 0),
2742 TYPE_MIN_VALUE (TYPE_DOMAIN (datype
)),
2743 NULL_TREE
, NULL_TREE
);
2744 *expr_p
= build1 (ADDR_EXPR
, pddatype
, *expr_p
);
2746 /* We can have stripped a required restrict qualifier above. */
2747 if (!useless_type_conversion_p (TREE_TYPE (expr
), TREE_TYPE (*expr_p
)))
2748 *expr_p
= fold_convert (TREE_TYPE (expr
), *expr_p
);
2751 /* *EXPR_P is a NOP_EXPR or CONVERT_EXPR. Remove it and/or other conversions
2752 underneath as appropriate. */
2754 static enum gimplify_status
2755 gimplify_conversion (tree
*expr_p
)
2757 location_t loc
= EXPR_LOCATION (*expr_p
);
2758 gcc_assert (CONVERT_EXPR_P (*expr_p
));
2760 /* Then strip away all but the outermost conversion. */
2761 STRIP_SIGN_NOPS (TREE_OPERAND (*expr_p
, 0));
2763 /* And remove the outermost conversion if it's useless. */
2764 if (tree_ssa_useless_type_conversion (*expr_p
))
2765 *expr_p
= TREE_OPERAND (*expr_p
, 0);
2767 /* If we still have a conversion at the toplevel,
2768 then canonicalize some constructs. */
2769 if (CONVERT_EXPR_P (*expr_p
))
2771 tree sub
= TREE_OPERAND (*expr_p
, 0);
2773 /* If a NOP conversion is changing the type of a COMPONENT_REF
2774 expression, then canonicalize its type now in order to expose more
2775 redundant conversions. */
2776 if (TREE_CODE (sub
) == COMPONENT_REF
)
2777 canonicalize_component_ref (&TREE_OPERAND (*expr_p
, 0));
2779 /* If a NOP conversion is changing a pointer to array of foo
2780 to a pointer to foo, embed that change in the ADDR_EXPR. */
2781 else if (TREE_CODE (sub
) == ADDR_EXPR
)
2782 canonicalize_addr_expr (expr_p
);
2785 /* If we have a conversion to a non-register type force the
2786 use of a VIEW_CONVERT_EXPR instead. */
2787 if (CONVERT_EXPR_P (*expr_p
) && !is_gimple_reg_type (TREE_TYPE (*expr_p
)))
2788 *expr_p
= fold_build1_loc (loc
, VIEW_CONVERT_EXPR
, TREE_TYPE (*expr_p
),
2789 TREE_OPERAND (*expr_p
, 0));
2791 /* Canonicalize CONVERT_EXPR to NOP_EXPR. */
2792 if (TREE_CODE (*expr_p
) == CONVERT_EXPR
)
2793 TREE_SET_CODE (*expr_p
, NOP_EXPR
);
2798 /* Gimplify a VAR_DECL or PARM_DECL. Return GS_OK if we expanded a
2799 DECL_VALUE_EXPR, and it's worth re-examining things. */
2801 static enum gimplify_status
2802 gimplify_var_or_parm_decl (tree
*expr_p
)
2804 tree decl
= *expr_p
;
2806 /* ??? If this is a local variable, and it has not been seen in any
2807 outer BIND_EXPR, then it's probably the result of a duplicate
2808 declaration, for which we've already issued an error. It would
2809 be really nice if the front end wouldn't leak these at all.
2810 Currently the only known culprit is C++ destructors, as seen
2811 in g++.old-deja/g++.jason/binding.C. */
2813 && !DECL_SEEN_IN_BIND_EXPR_P (decl
)
2814 && !TREE_STATIC (decl
) && !DECL_EXTERNAL (decl
)
2815 && decl_function_context (decl
) == current_function_decl
)
2817 gcc_assert (seen_error ());
2821 /* When within an OMP context, notice uses of variables. */
2822 if (gimplify_omp_ctxp
&& omp_notice_variable (gimplify_omp_ctxp
, decl
, true))
2825 /* If the decl is an alias for another expression, substitute it now. */
2826 if (DECL_HAS_VALUE_EXPR_P (decl
))
2828 *expr_p
= unshare_expr (DECL_VALUE_EXPR (decl
));
2835 /* Recalculate the value of the TREE_SIDE_EFFECTS flag for T. */
2838 recalculate_side_effects (tree t
)
2840 enum tree_code code
= TREE_CODE (t
);
2841 int len
= TREE_OPERAND_LENGTH (t
);
2844 switch (TREE_CODE_CLASS (code
))
2846 case tcc_expression
:
2852 case PREDECREMENT_EXPR
:
2853 case PREINCREMENT_EXPR
:
2854 case POSTDECREMENT_EXPR
:
2855 case POSTINCREMENT_EXPR
:
2856 /* All of these have side-effects, no matter what their
2865 case tcc_comparison
: /* a comparison expression */
2866 case tcc_unary
: /* a unary arithmetic expression */
2867 case tcc_binary
: /* a binary arithmetic expression */
2868 case tcc_reference
: /* a reference */
2869 case tcc_vl_exp
: /* a function call */
2870 TREE_SIDE_EFFECTS (t
) = TREE_THIS_VOLATILE (t
);
2871 for (i
= 0; i
< len
; ++i
)
2873 tree op
= TREE_OPERAND (t
, i
);
2874 if (op
&& TREE_SIDE_EFFECTS (op
))
2875 TREE_SIDE_EFFECTS (t
) = 1;
2880 /* No side-effects. */
2888 /* Gimplify the COMPONENT_REF, ARRAY_REF, REALPART_EXPR or IMAGPART_EXPR
2892 : min_lval '[' val ']'
2894 | compound_lval '[' val ']'
2895 | compound_lval '.' ID
2897 This is not part of the original SIMPLE definition, which separates
2898 array and member references, but it seems reasonable to handle them
2899 together. Also, this way we don't run into problems with union
2900 aliasing; gcc requires that for accesses through a union to alias, the
2901 union reference must be explicit, which was not always the case when we
2902 were splitting up array and member refs.
2904 PRE_P points to the sequence where side effects that must happen before
2905 *EXPR_P should be stored.
2907 POST_P points to the sequence where side effects that must happen after
2908 *EXPR_P should be stored. */
2910 static enum gimplify_status
2911 gimplify_compound_lval (tree
*expr_p
, gimple_seq
*pre_p
, gimple_seq
*post_p
,
2912 fallback_t fallback
)
2915 enum gimplify_status ret
= GS_ALL_DONE
, tret
;
2917 location_t loc
= EXPR_LOCATION (*expr_p
);
2918 tree expr
= *expr_p
;
2920 /* Create a stack of the subexpressions so later we can walk them in
2921 order from inner to outer. */
2922 auto_vec
<tree
, 10> expr_stack
;
2924 /* We can handle anything that get_inner_reference can deal with. */
2925 for (p
= expr_p
; ; p
= &TREE_OPERAND (*p
, 0))
2928 /* Fold INDIRECT_REFs now to turn them into ARRAY_REFs. */
2929 if (TREE_CODE (*p
) == INDIRECT_REF
)
2930 *p
= fold_indirect_ref_loc (loc
, *p
);
2932 if (handled_component_p (*p
))
2934 /* Expand DECL_VALUE_EXPR now. In some cases that may expose
2935 additional COMPONENT_REFs. */
2936 else if ((VAR_P (*p
) || TREE_CODE (*p
) == PARM_DECL
)
2937 && gimplify_var_or_parm_decl (p
) == GS_OK
)
2942 expr_stack
.safe_push (*p
);
2945 gcc_assert (expr_stack
.length ());
2947 /* Now EXPR_STACK is a stack of pointers to all the refs we've
2948 walked through and P points to the innermost expression.
2950 Java requires that we elaborated nodes in source order. That
2951 means we must gimplify the inner expression followed by each of
2952 the indices, in order. But we can't gimplify the inner
2953 expression until we deal with any variable bounds, sizes, or
2954 positions in order to deal with PLACEHOLDER_EXPRs.
2956 So we do this in three steps. First we deal with the annotations
2957 for any variables in the components, then we gimplify the base,
2958 then we gimplify any indices, from left to right. */
2959 for (i
= expr_stack
.length () - 1; i
>= 0; i
--)
2961 tree t
= expr_stack
[i
];
2963 if (TREE_CODE (t
) == ARRAY_REF
|| TREE_CODE (t
) == ARRAY_RANGE_REF
)
2965 /* Gimplify the low bound and element type size and put them into
2966 the ARRAY_REF. If these values are set, they have already been
2968 if (TREE_OPERAND (t
, 2) == NULL_TREE
)
2970 tree low
= unshare_expr (array_ref_low_bound (t
));
2971 if (!is_gimple_min_invariant (low
))
2973 TREE_OPERAND (t
, 2) = low
;
2974 tret
= gimplify_expr (&TREE_OPERAND (t
, 2), pre_p
,
2975 post_p
, is_gimple_reg
,
2977 ret
= MIN (ret
, tret
);
2982 tret
= gimplify_expr (&TREE_OPERAND (t
, 2), pre_p
, post_p
,
2983 is_gimple_reg
, fb_rvalue
);
2984 ret
= MIN (ret
, tret
);
2987 if (TREE_OPERAND (t
, 3) == NULL_TREE
)
2989 tree elmt_size
= array_ref_element_size (t
);
2990 if (!is_gimple_min_invariant (elmt_size
))
2992 elmt_size
= unshare_expr (elmt_size
);
2993 tree elmt_type
= TREE_TYPE (TREE_TYPE (TREE_OPERAND (t
, 0)));
2994 tree factor
= size_int (TYPE_ALIGN_UNIT (elmt_type
));
2996 /* Divide the element size by the alignment of the element
2998 elmt_size
= size_binop_loc (loc
, EXACT_DIV_EXPR
,
3001 TREE_OPERAND (t
, 3) = elmt_size
;
3002 tret
= gimplify_expr (&TREE_OPERAND (t
, 3), pre_p
,
3003 post_p
, is_gimple_reg
,
3005 ret
= MIN (ret
, tret
);
3010 tret
= gimplify_expr (&TREE_OPERAND (t
, 3), pre_p
, post_p
,
3011 is_gimple_reg
, fb_rvalue
);
3012 ret
= MIN (ret
, tret
);
3015 else if (TREE_CODE (t
) == COMPONENT_REF
)
3017 /* Set the field offset into T and gimplify it. */
3018 if (TREE_OPERAND (t
, 2) == NULL_TREE
)
3020 tree offset
= component_ref_field_offset (t
);
3021 if (!is_gimple_min_invariant (offset
))
3023 offset
= unshare_expr (offset
);
3024 tree field
= TREE_OPERAND (t
, 1);
3026 = size_int (DECL_OFFSET_ALIGN (field
) / BITS_PER_UNIT
);
3028 /* Divide the offset by its alignment. */
3029 offset
= size_binop_loc (loc
, EXACT_DIV_EXPR
,
3032 TREE_OPERAND (t
, 2) = offset
;
3033 tret
= gimplify_expr (&TREE_OPERAND (t
, 2), pre_p
,
3034 post_p
, is_gimple_reg
,
3036 ret
= MIN (ret
, tret
);
3041 tret
= gimplify_expr (&TREE_OPERAND (t
, 2), pre_p
, post_p
,
3042 is_gimple_reg
, fb_rvalue
);
3043 ret
= MIN (ret
, tret
);
3048 /* Step 2 is to gimplify the base expression. Make sure lvalue is set
3049 so as to match the min_lval predicate. Failure to do so may result
3050 in the creation of large aggregate temporaries. */
3051 tret
= gimplify_expr (p
, pre_p
, post_p
, is_gimple_min_lval
,
3052 fallback
| fb_lvalue
);
3053 ret
= MIN (ret
, tret
);
3055 /* And finally, the indices and operands of ARRAY_REF. During this
3056 loop we also remove any useless conversions. */
3057 for (; expr_stack
.length () > 0; )
3059 tree t
= expr_stack
.pop ();
3061 if (TREE_CODE (t
) == ARRAY_REF
|| TREE_CODE (t
) == ARRAY_RANGE_REF
)
3063 /* Gimplify the dimension. */
3064 if (!is_gimple_min_invariant (TREE_OPERAND (t
, 1)))
3066 tret
= gimplify_expr (&TREE_OPERAND (t
, 1), pre_p
, post_p
,
3067 is_gimple_val
, fb_rvalue
);
3068 ret
= MIN (ret
, tret
);
3072 STRIP_USELESS_TYPE_CONVERSION (TREE_OPERAND (t
, 0));
3074 /* The innermost expression P may have originally had
3075 TREE_SIDE_EFFECTS set which would have caused all the outer
3076 expressions in *EXPR_P leading to P to also have had
3077 TREE_SIDE_EFFECTS set. */
3078 recalculate_side_effects (t
);
3081 /* If the outermost expression is a COMPONENT_REF, canonicalize its type. */
3082 if ((fallback
& fb_rvalue
) && TREE_CODE (*expr_p
) == COMPONENT_REF
)
3084 canonicalize_component_ref (expr_p
);
3087 expr_stack
.release ();
3089 gcc_assert (*expr_p
== expr
|| ret
!= GS_ALL_DONE
);
3094 /* Gimplify the self modifying expression pointed to by EXPR_P
3097 PRE_P points to the list where side effects that must happen before
3098 *EXPR_P should be stored.
3100 POST_P points to the list where side effects that must happen after
3101 *EXPR_P should be stored.
3103 WANT_VALUE is nonzero iff we want to use the value of this expression
3104 in another expression.
3106 ARITH_TYPE is the type the computation should be performed in. */
3108 enum gimplify_status
3109 gimplify_self_mod_expr (tree
*expr_p
, gimple_seq
*pre_p
, gimple_seq
*post_p
,
3110 bool want_value
, tree arith_type
)
3112 enum tree_code code
;
3113 tree lhs
, lvalue
, rhs
, t1
;
3114 gimple_seq post
= NULL
, *orig_post_p
= post_p
;
3116 enum tree_code arith_code
;
3117 enum gimplify_status ret
;
3118 location_t loc
= EXPR_LOCATION (*expr_p
);
3120 code
= TREE_CODE (*expr_p
);
3122 gcc_assert (code
== POSTINCREMENT_EXPR
|| code
== POSTDECREMENT_EXPR
3123 || code
== PREINCREMENT_EXPR
|| code
== PREDECREMENT_EXPR
);
3125 /* Prefix or postfix? */
3126 if (code
== POSTINCREMENT_EXPR
|| code
== POSTDECREMENT_EXPR
)
3127 /* Faster to treat as prefix if result is not used. */
3128 postfix
= want_value
;
3132 /* For postfix, make sure the inner expression's post side effects
3133 are executed after side effects from this expression. */
3137 /* Add or subtract? */
3138 if (code
== PREINCREMENT_EXPR
|| code
== POSTINCREMENT_EXPR
)
3139 arith_code
= PLUS_EXPR
;
3141 arith_code
= MINUS_EXPR
;
3143 /* Gimplify the LHS into a GIMPLE lvalue. */
3144 lvalue
= TREE_OPERAND (*expr_p
, 0);
3145 ret
= gimplify_expr (&lvalue
, pre_p
, post_p
, is_gimple_lvalue
, fb_lvalue
);
3146 if (ret
== GS_ERROR
)
3149 /* Extract the operands to the arithmetic operation. */
3151 rhs
= TREE_OPERAND (*expr_p
, 1);
3153 /* For postfix operator, we evaluate the LHS to an rvalue and then use
3154 that as the result value and in the postqueue operation. */
3157 ret
= gimplify_expr (&lhs
, pre_p
, post_p
, is_gimple_val
, fb_rvalue
);
3158 if (ret
== GS_ERROR
)
3161 lhs
= get_initialized_tmp_var (lhs
, pre_p
);
3164 /* For POINTERs increment, use POINTER_PLUS_EXPR. */
3165 if (POINTER_TYPE_P (TREE_TYPE (lhs
)))
3167 rhs
= convert_to_ptrofftype_loc (loc
, rhs
);
3168 if (arith_code
== MINUS_EXPR
)
3169 rhs
= fold_build1_loc (loc
, NEGATE_EXPR
, TREE_TYPE (rhs
), rhs
);
3170 t1
= fold_build2 (POINTER_PLUS_EXPR
, TREE_TYPE (*expr_p
), lhs
, rhs
);
3173 t1
= fold_convert (TREE_TYPE (*expr_p
),
3174 fold_build2 (arith_code
, arith_type
,
3175 fold_convert (arith_type
, lhs
),
3176 fold_convert (arith_type
, rhs
)));
3180 gimplify_assign (lvalue
, t1
, pre_p
);
3181 gimplify_seq_add_seq (orig_post_p
, post
);
3187 *expr_p
= build2 (MODIFY_EXPR
, TREE_TYPE (lvalue
), lvalue
, t1
);
3192 /* If *EXPR_P has a variable sized type, wrap it in a WITH_SIZE_EXPR. */
3195 maybe_with_size_expr (tree
*expr_p
)
3197 tree expr
= *expr_p
;
3198 tree type
= TREE_TYPE (expr
);
3201 /* If we've already wrapped this or the type is error_mark_node, we can't do
3203 if (TREE_CODE (expr
) == WITH_SIZE_EXPR
3204 || type
== error_mark_node
)
3207 /* If the size isn't known or is a constant, we have nothing to do. */
3208 size
= TYPE_SIZE_UNIT (type
);
3209 if (!size
|| poly_int_tree_p (size
))
3212 /* Otherwise, make a WITH_SIZE_EXPR. */
3213 size
= unshare_expr (size
);
3214 size
= SUBSTITUTE_PLACEHOLDER_IN_EXPR (size
, expr
);
3215 *expr_p
= build2 (WITH_SIZE_EXPR
, type
, expr
, size
);
3218 /* Helper for gimplify_call_expr. Gimplify a single argument *ARG_P
3219 Store any side-effects in PRE_P. CALL_LOCATION is the location of
3220 the CALL_EXPR. If ALLOW_SSA is set the actual parameter may be
3221 gimplified to an SSA name. */
3223 enum gimplify_status
3224 gimplify_arg (tree
*arg_p
, gimple_seq
*pre_p
, location_t call_location
,
3227 bool (*test
) (tree
);
3230 /* In general, we allow lvalues for function arguments to avoid
3231 extra overhead of copying large aggregates out of even larger
3232 aggregates into temporaries only to copy the temporaries to
3233 the argument list. Make optimizers happy by pulling out to
3234 temporaries those types that fit in registers. */
3235 if (is_gimple_reg_type (TREE_TYPE (*arg_p
)))
3236 test
= is_gimple_val
, fb
= fb_rvalue
;
3239 test
= is_gimple_lvalue
, fb
= fb_either
;
3240 /* Also strip a TARGET_EXPR that would force an extra copy. */
3241 if (TREE_CODE (*arg_p
) == TARGET_EXPR
)
3243 tree init
= TARGET_EXPR_INITIAL (*arg_p
);
3245 && !VOID_TYPE_P (TREE_TYPE (init
)))
3250 /* If this is a variable sized type, we must remember the size. */
3251 maybe_with_size_expr (arg_p
);
3253 /* FIXME diagnostics: This will mess up gcc.dg/Warray-bounds.c. */
3254 /* Make sure arguments have the same location as the function call
3256 protected_set_expr_location (*arg_p
, call_location
);
3258 /* There is a sequence point before a function call. Side effects in
3259 the argument list must occur before the actual call. So, when
3260 gimplifying arguments, force gimplify_expr to use an internal
3261 post queue which is then appended to the end of PRE_P. */
3262 return gimplify_expr (arg_p
, pre_p
, NULL
, test
, fb
, allow_ssa
);
3265 /* Don't fold inside offloading or taskreg regions: it can break code by
3266 adding decl references that weren't in the source. We'll do it during
3267 omplower pass instead. */
3270 maybe_fold_stmt (gimple_stmt_iterator
*gsi
)
3272 struct gimplify_omp_ctx
*ctx
;
3273 for (ctx
= gimplify_omp_ctxp
; ctx
; ctx
= ctx
->outer_context
)
3274 if ((ctx
->region_type
& (ORT_TARGET
| ORT_PARALLEL
| ORT_TASK
)) != 0)
3276 else if ((ctx
->region_type
& ORT_HOST_TEAMS
) == ORT_HOST_TEAMS
)
3278 /* Delay folding of builtins until the IL is in consistent state
3279 so the diagnostic machinery can do a better job. */
3280 if (gimple_call_builtin_p (gsi_stmt (*gsi
)))
3282 return fold_stmt (gsi
);
3285 /* Gimplify the CALL_EXPR node *EXPR_P into the GIMPLE sequence PRE_P.
3286 WANT_VALUE is true if the result of the call is desired. */
3288 static enum gimplify_status
3289 gimplify_call_expr (tree
*expr_p
, gimple_seq
*pre_p
, bool want_value
)
3291 tree fndecl
, parms
, p
, fnptrtype
;
3292 enum gimplify_status ret
;
3295 bool builtin_va_start_p
= false;
3296 location_t loc
= EXPR_LOCATION (*expr_p
);
3298 gcc_assert (TREE_CODE (*expr_p
) == CALL_EXPR
);
3300 /* For reliable diagnostics during inlining, it is necessary that
3301 every call_expr be annotated with file and line. */
3302 if (! EXPR_HAS_LOCATION (*expr_p
))
3303 SET_EXPR_LOCATION (*expr_p
, input_location
);
3305 /* Gimplify internal functions created in the FEs. */
3306 if (CALL_EXPR_FN (*expr_p
) == NULL_TREE
)
3311 nargs
= call_expr_nargs (*expr_p
);
3312 enum internal_fn ifn
= CALL_EXPR_IFN (*expr_p
);
3313 auto_vec
<tree
> vargs (nargs
);
3315 for (i
= 0; i
< nargs
; i
++)
3317 gimplify_arg (&CALL_EXPR_ARG (*expr_p
, i
), pre_p
,
3318 EXPR_LOCATION (*expr_p
));
3319 vargs
.quick_push (CALL_EXPR_ARG (*expr_p
, i
));
3322 gcall
*call
= gimple_build_call_internal_vec (ifn
, vargs
);
3323 gimple_call_set_nothrow (call
, TREE_NOTHROW (*expr_p
));
3324 gimplify_seq_add_stmt (pre_p
, call
);
3328 /* This may be a call to a builtin function.
3330 Builtin function calls may be transformed into different
3331 (and more efficient) builtin function calls under certain
3332 circumstances. Unfortunately, gimplification can muck things
3333 up enough that the builtin expanders are not aware that certain
3334 transformations are still valid.
3336 So we attempt transformation/gimplification of the call before
3337 we gimplify the CALL_EXPR. At this time we do not manage to
3338 transform all calls in the same manner as the expanders do, but
3339 we do transform most of them. */
3340 fndecl
= get_callee_fndecl (*expr_p
);
3341 if (fndecl
&& fndecl_built_in_p (fndecl
, BUILT_IN_NORMAL
))
3342 switch (DECL_FUNCTION_CODE (fndecl
))
3344 CASE_BUILT_IN_ALLOCA
:
3345 /* If the call has been built for a variable-sized object, then we
3346 want to restore the stack level when the enclosing BIND_EXPR is
3347 exited to reclaim the allocated space; otherwise, we precisely
3348 need to do the opposite and preserve the latest stack level. */
3349 if (CALL_ALLOCA_FOR_VAR_P (*expr_p
))
3350 gimplify_ctxp
->save_stack
= true;
3352 gimplify_ctxp
->keep_stack
= true;
3355 case BUILT_IN_VA_START
:
3357 builtin_va_start_p
= TRUE
;
3358 if (call_expr_nargs (*expr_p
) < 2)
3360 error ("too few arguments to function %<va_start%>");
3361 *expr_p
= build_empty_stmt (EXPR_LOCATION (*expr_p
));
3365 if (fold_builtin_next_arg (*expr_p
, true))
3367 *expr_p
= build_empty_stmt (EXPR_LOCATION (*expr_p
));
3373 case BUILT_IN_EH_RETURN
:
3374 cfun
->calls_eh_return
= true;
3380 if (fndecl
&& fndecl_built_in_p (fndecl
))
3382 tree new_tree
= fold_call_expr (input_location
, *expr_p
, !want_value
);
3383 if (new_tree
&& new_tree
!= *expr_p
)
3385 /* There was a transformation of this call which computes the
3386 same value, but in a more efficient way. Return and try
3393 /* Remember the original function pointer type. */
3394 fnptrtype
= TREE_TYPE (CALL_EXPR_FN (*expr_p
));
3399 && (cfun
->curr_properties
& PROP_gimple_any
) == 0)
3401 tree variant
= omp_resolve_declare_variant (fndecl
);
3402 if (variant
!= fndecl
)
3403 CALL_EXPR_FN (*expr_p
) = build1 (ADDR_EXPR
, fnptrtype
, variant
);
3406 /* There is a sequence point before the call, so any side effects in
3407 the calling expression must occur before the actual call. Force
3408 gimplify_expr to use an internal post queue. */
3409 ret
= gimplify_expr (&CALL_EXPR_FN (*expr_p
), pre_p
, NULL
,
3410 is_gimple_call_addr
, fb_rvalue
);
3412 nargs
= call_expr_nargs (*expr_p
);
3414 /* Get argument types for verification. */
3415 fndecl
= get_callee_fndecl (*expr_p
);
3418 parms
= TYPE_ARG_TYPES (TREE_TYPE (fndecl
));
3420 parms
= TYPE_ARG_TYPES (TREE_TYPE (fnptrtype
));
3422 if (fndecl
&& DECL_ARGUMENTS (fndecl
))
3423 p
= DECL_ARGUMENTS (fndecl
);
3428 for (i
= 0; i
< nargs
&& p
; i
++, p
= TREE_CHAIN (p
))
3431 /* If the last argument is __builtin_va_arg_pack () and it is not
3432 passed as a named argument, decrease the number of CALL_EXPR
3433 arguments and set instead the CALL_EXPR_VA_ARG_PACK flag. */
3436 && TREE_CODE (CALL_EXPR_ARG (*expr_p
, nargs
- 1)) == CALL_EXPR
)
3438 tree last_arg
= CALL_EXPR_ARG (*expr_p
, nargs
- 1);
3439 tree last_arg_fndecl
= get_callee_fndecl (last_arg
);
3442 && fndecl_built_in_p (last_arg_fndecl
, BUILT_IN_VA_ARG_PACK
))
3444 tree call
= *expr_p
;
3447 *expr_p
= build_call_array_loc (loc
, TREE_TYPE (call
),
3448 CALL_EXPR_FN (call
),
3449 nargs
, CALL_EXPR_ARGP (call
));
3451 /* Copy all CALL_EXPR flags, location and block, except
3452 CALL_EXPR_VA_ARG_PACK flag. */
3453 CALL_EXPR_STATIC_CHAIN (*expr_p
) = CALL_EXPR_STATIC_CHAIN (call
);
3454 CALL_EXPR_TAILCALL (*expr_p
) = CALL_EXPR_TAILCALL (call
);
3455 CALL_EXPR_RETURN_SLOT_OPT (*expr_p
)
3456 = CALL_EXPR_RETURN_SLOT_OPT (call
);
3457 CALL_FROM_THUNK_P (*expr_p
) = CALL_FROM_THUNK_P (call
);
3458 SET_EXPR_LOCATION (*expr_p
, EXPR_LOCATION (call
));
3460 /* Set CALL_EXPR_VA_ARG_PACK. */
3461 CALL_EXPR_VA_ARG_PACK (*expr_p
) = 1;
3465 /* If the call returns twice then after building the CFG the call
3466 argument computations will no longer dominate the call because
3467 we add an abnormal incoming edge to the call. So do not use SSA
3469 bool returns_twice
= call_expr_flags (*expr_p
) & ECF_RETURNS_TWICE
;
3471 /* Gimplify the function arguments. */
3474 for (i
= (PUSH_ARGS_REVERSED
? nargs
- 1 : 0);
3475 PUSH_ARGS_REVERSED
? i
>= 0 : i
< nargs
;
3476 PUSH_ARGS_REVERSED
? i
-- : i
++)
3478 enum gimplify_status t
;
3480 /* Avoid gimplifying the second argument to va_start, which needs to
3481 be the plain PARM_DECL. */
3482 if ((i
!= 1) || !builtin_va_start_p
)
3484 t
= gimplify_arg (&CALL_EXPR_ARG (*expr_p
, i
), pre_p
,
3485 EXPR_LOCATION (*expr_p
), ! returns_twice
);
3493 /* Gimplify the static chain. */
3494 if (CALL_EXPR_STATIC_CHAIN (*expr_p
))
3496 if (fndecl
&& !DECL_STATIC_CHAIN (fndecl
))
3497 CALL_EXPR_STATIC_CHAIN (*expr_p
) = NULL
;
3500 enum gimplify_status t
;
3501 t
= gimplify_arg (&CALL_EXPR_STATIC_CHAIN (*expr_p
), pre_p
,
3502 EXPR_LOCATION (*expr_p
), ! returns_twice
);
3508 /* Verify the function result. */
3509 if (want_value
&& fndecl
3510 && VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fnptrtype
))))
3512 error_at (loc
, "using result of function returning %<void%>");
3516 /* Try this again in case gimplification exposed something. */
3517 if (ret
!= GS_ERROR
)
3519 tree new_tree
= fold_call_expr (input_location
, *expr_p
, !want_value
);
3521 if (new_tree
&& new_tree
!= *expr_p
)
3523 /* There was a transformation of this call which computes the
3524 same value, but in a more efficient way. Return and try
3532 *expr_p
= error_mark_node
;
3536 /* If the function is "const" or "pure", then clear TREE_SIDE_EFFECTS on its
3537 decl. This allows us to eliminate redundant or useless
3538 calls to "const" functions. */
3539 if (TREE_CODE (*expr_p
) == CALL_EXPR
)
3541 int flags
= call_expr_flags (*expr_p
);
3542 if (flags
& (ECF_CONST
| ECF_PURE
)
3543 /* An infinite loop is considered a side effect. */
3544 && !(flags
& (ECF_LOOPING_CONST_OR_PURE
)))
3545 TREE_SIDE_EFFECTS (*expr_p
) = 0;
3548 /* If the value is not needed by the caller, emit a new GIMPLE_CALL
3549 and clear *EXPR_P. Otherwise, leave *EXPR_P in its gimplified
3550 form and delegate the creation of a GIMPLE_CALL to
3551 gimplify_modify_expr. This is always possible because when
3552 WANT_VALUE is true, the caller wants the result of this call into
3553 a temporary, which means that we will emit an INIT_EXPR in
3554 internal_get_tmp_var which will then be handled by
3555 gimplify_modify_expr. */
3558 /* The CALL_EXPR in *EXPR_P is already in GIMPLE form, so all we
3559 have to do is replicate it as a GIMPLE_CALL tuple. */
3560 gimple_stmt_iterator gsi
;
3561 call
= gimple_build_call_from_tree (*expr_p
, fnptrtype
);
3562 notice_special_calls (call
);
3563 gimplify_seq_add_stmt (pre_p
, call
);
3564 gsi
= gsi_last (*pre_p
);
3565 maybe_fold_stmt (&gsi
);
3566 *expr_p
= NULL_TREE
;
3569 /* Remember the original function type. */
3570 CALL_EXPR_FN (*expr_p
) = build1 (NOP_EXPR
, fnptrtype
,
3571 CALL_EXPR_FN (*expr_p
));
3576 /* Handle shortcut semantics in the predicate operand of a COND_EXPR by
3577 rewriting it into multiple COND_EXPRs, and possibly GOTO_EXPRs.
3579 TRUE_LABEL_P and FALSE_LABEL_P point to the labels to jump to if the
3580 condition is true or false, respectively. If null, we should generate
3581 our own to skip over the evaluation of this specific expression.
3583 LOCUS is the source location of the COND_EXPR.
3585 This function is the tree equivalent of do_jump.
3587 shortcut_cond_r should only be called by shortcut_cond_expr. */
3590 shortcut_cond_r (tree pred
, tree
*true_label_p
, tree
*false_label_p
,
3593 tree local_label
= NULL_TREE
;
3594 tree t
, expr
= NULL
;
3596 /* OK, it's not a simple case; we need to pull apart the COND_EXPR to
3597 retain the shortcut semantics. Just insert the gotos here;
3598 shortcut_cond_expr will append the real blocks later. */
3599 if (TREE_CODE (pred
) == TRUTH_ANDIF_EXPR
)
3601 location_t new_locus
;
3603 /* Turn if (a && b) into
3605 if (a); else goto no;
3606 if (b) goto yes; else goto no;
3609 if (false_label_p
== NULL
)
3610 false_label_p
= &local_label
;
3612 /* Keep the original source location on the first 'if'. */
3613 t
= shortcut_cond_r (TREE_OPERAND (pred
, 0), NULL
, false_label_p
, locus
);
3614 append_to_statement_list (t
, &expr
);
3616 /* Set the source location of the && on the second 'if'. */
3617 new_locus
= rexpr_location (pred
, locus
);
3618 t
= shortcut_cond_r (TREE_OPERAND (pred
, 1), true_label_p
, false_label_p
,
3620 append_to_statement_list (t
, &expr
);
3622 else if (TREE_CODE (pred
) == TRUTH_ORIF_EXPR
)
3624 location_t new_locus
;
3626 /* Turn if (a || b) into
3629 if (b) goto yes; else goto no;
3632 if (true_label_p
== NULL
)
3633 true_label_p
= &local_label
;
3635 /* Keep the original source location on the first 'if'. */
3636 t
= shortcut_cond_r (TREE_OPERAND (pred
, 0), true_label_p
, NULL
, locus
);
3637 append_to_statement_list (t
, &expr
);
3639 /* Set the source location of the || on the second 'if'. */
3640 new_locus
= rexpr_location (pred
, locus
);
3641 t
= shortcut_cond_r (TREE_OPERAND (pred
, 1), true_label_p
, false_label_p
,
3643 append_to_statement_list (t
, &expr
);
3645 else if (TREE_CODE (pred
) == COND_EXPR
3646 && !VOID_TYPE_P (TREE_TYPE (TREE_OPERAND (pred
, 1)))
3647 && !VOID_TYPE_P (TREE_TYPE (TREE_OPERAND (pred
, 2))))
3649 location_t new_locus
;
3651 /* As long as we're messing with gotos, turn if (a ? b : c) into
3653 if (b) goto yes; else goto no;
3655 if (c) goto yes; else goto no;
3657 Don't do this if one of the arms has void type, which can happen
3658 in C++ when the arm is throw. */
3660 /* Keep the original source location on the first 'if'. Set the source
3661 location of the ? on the second 'if'. */
3662 new_locus
= rexpr_location (pred
, locus
);
3663 expr
= build3 (COND_EXPR
, void_type_node
, TREE_OPERAND (pred
, 0),
3664 shortcut_cond_r (TREE_OPERAND (pred
, 1), true_label_p
,
3665 false_label_p
, locus
),
3666 shortcut_cond_r (TREE_OPERAND (pred
, 2), true_label_p
,
3667 false_label_p
, new_locus
));
3671 expr
= build3 (COND_EXPR
, void_type_node
, pred
,
3672 build_and_jump (true_label_p
),
3673 build_and_jump (false_label_p
));
3674 SET_EXPR_LOCATION (expr
, locus
);
3679 t
= build1 (LABEL_EXPR
, void_type_node
, local_label
);
3680 append_to_statement_list (t
, &expr
);
3686 /* If EXPR is a GOTO_EXPR, return it. If it is a STATEMENT_LIST, skip
3687 any of its leading DEBUG_BEGIN_STMTS and recurse on the subsequent
3688 statement, if it is the last one. Otherwise, return NULL. */
3691 find_goto (tree expr
)
3696 if (TREE_CODE (expr
) == GOTO_EXPR
)
3699 if (TREE_CODE (expr
) != STATEMENT_LIST
)
3702 tree_stmt_iterator i
= tsi_start (expr
);
3704 while (!tsi_end_p (i
) && TREE_CODE (tsi_stmt (i
)) == DEBUG_BEGIN_STMT
)
3707 if (!tsi_one_before_end_p (i
))
3710 return find_goto (tsi_stmt (i
));
3713 /* Same as find_goto, except that it returns NULL if the destination
3714 is not a LABEL_DECL. */
3717 find_goto_label (tree expr
)
3719 tree dest
= find_goto (expr
);
3720 if (dest
&& TREE_CODE (GOTO_DESTINATION (dest
)) == LABEL_DECL
)
3725 /* Given a conditional expression EXPR with short-circuit boolean
3726 predicates using TRUTH_ANDIF_EXPR or TRUTH_ORIF_EXPR, break the
3727 predicate apart into the equivalent sequence of conditionals. */
3730 shortcut_cond_expr (tree expr
)
3732 tree pred
= TREE_OPERAND (expr
, 0);
3733 tree then_
= TREE_OPERAND (expr
, 1);
3734 tree else_
= TREE_OPERAND (expr
, 2);
3735 tree true_label
, false_label
, end_label
, t
;
3737 tree
*false_label_p
;
3738 bool emit_end
, emit_false
, jump_over_else
;
3739 bool then_se
= then_
&& TREE_SIDE_EFFECTS (then_
);
3740 bool else_se
= else_
&& TREE_SIDE_EFFECTS (else_
);
3742 /* First do simple transformations. */
3745 /* If there is no 'else', turn
3748 if (a) if (b) then c. */
3749 while (TREE_CODE (pred
) == TRUTH_ANDIF_EXPR
)
3751 /* Keep the original source location on the first 'if'. */
3752 location_t locus
= EXPR_LOC_OR_LOC (expr
, input_location
);
3753 TREE_OPERAND (expr
, 0) = TREE_OPERAND (pred
, 1);
3754 /* Set the source location of the && on the second 'if'. */
3755 if (rexpr_has_location (pred
))
3756 SET_EXPR_LOCATION (expr
, rexpr_location (pred
));
3757 then_
= shortcut_cond_expr (expr
);
3758 then_se
= then_
&& TREE_SIDE_EFFECTS (then_
);
3759 pred
= TREE_OPERAND (pred
, 0);
3760 expr
= build3 (COND_EXPR
, void_type_node
, pred
, then_
, NULL_TREE
);
3761 SET_EXPR_LOCATION (expr
, locus
);
3767 /* If there is no 'then', turn
3770 if (a); else if (b); else d. */
3771 while (TREE_CODE (pred
) == TRUTH_ORIF_EXPR
)
3773 /* Keep the original source location on the first 'if'. */
3774 location_t locus
= EXPR_LOC_OR_LOC (expr
, input_location
);
3775 TREE_OPERAND (expr
, 0) = TREE_OPERAND (pred
, 1);
3776 /* Set the source location of the || on the second 'if'. */
3777 if (rexpr_has_location (pred
))
3778 SET_EXPR_LOCATION (expr
, rexpr_location (pred
));
3779 else_
= shortcut_cond_expr (expr
);
3780 else_se
= else_
&& TREE_SIDE_EFFECTS (else_
);
3781 pred
= TREE_OPERAND (pred
, 0);
3782 expr
= build3 (COND_EXPR
, void_type_node
, pred
, NULL_TREE
, else_
);
3783 SET_EXPR_LOCATION (expr
, locus
);
3787 /* If we're done, great. */
3788 if (TREE_CODE (pred
) != TRUTH_ANDIF_EXPR
3789 && TREE_CODE (pred
) != TRUTH_ORIF_EXPR
)
3792 /* Otherwise we need to mess with gotos. Change
3795 if (a); else goto no;
3798 and recursively gimplify the condition. */
3800 true_label
= false_label
= end_label
= NULL_TREE
;
3802 /* If our arms just jump somewhere, hijack those labels so we don't
3803 generate jumps to jumps. */
3805 if (tree then_goto
= find_goto_label (then_
))
3807 true_label
= GOTO_DESTINATION (then_goto
);
3812 if (tree else_goto
= find_goto_label (else_
))
3814 false_label
= GOTO_DESTINATION (else_goto
);
3819 /* If we aren't hijacking a label for the 'then' branch, it falls through. */
3821 true_label_p
= &true_label
;
3823 true_label_p
= NULL
;
3825 /* The 'else' branch also needs a label if it contains interesting code. */
3826 if (false_label
|| else_se
)
3827 false_label_p
= &false_label
;
3829 false_label_p
= NULL
;
3831 /* If there was nothing else in our arms, just forward the label(s). */
3832 if (!then_se
&& !else_se
)
3833 return shortcut_cond_r (pred
, true_label_p
, false_label_p
,
3834 EXPR_LOC_OR_LOC (expr
, input_location
));
3836 /* If our last subexpression already has a terminal label, reuse it. */
3838 t
= expr_last (else_
);
3840 t
= expr_last (then_
);
3843 if (t
&& TREE_CODE (t
) == LABEL_EXPR
)
3844 end_label
= LABEL_EXPR_LABEL (t
);
3846 /* If we don't care about jumping to the 'else' branch, jump to the end
3847 if the condition is false. */
3849 false_label_p
= &end_label
;
3851 /* We only want to emit these labels if we aren't hijacking them. */
3852 emit_end
= (end_label
== NULL_TREE
);
3853 emit_false
= (false_label
== NULL_TREE
);
3855 /* We only emit the jump over the else clause if we have to--if the
3856 then clause may fall through. Otherwise we can wind up with a
3857 useless jump and a useless label at the end of gimplified code,
3858 which will cause us to think that this conditional as a whole
3859 falls through even if it doesn't. If we then inline a function
3860 which ends with such a condition, that can cause us to issue an
3861 inappropriate warning about control reaching the end of a
3862 non-void function. */
3863 jump_over_else
= block_may_fallthru (then_
);
3865 pred
= shortcut_cond_r (pred
, true_label_p
, false_label_p
,
3866 EXPR_LOC_OR_LOC (expr
, input_location
));
3869 append_to_statement_list (pred
, &expr
);
3871 append_to_statement_list (then_
, &expr
);
3876 tree last
= expr_last (expr
);
3877 t
= build_and_jump (&end_label
);
3878 if (rexpr_has_location (last
))
3879 SET_EXPR_LOCATION (t
, rexpr_location (last
));
3880 append_to_statement_list (t
, &expr
);
3884 t
= build1 (LABEL_EXPR
, void_type_node
, false_label
);
3885 append_to_statement_list (t
, &expr
);
3887 append_to_statement_list (else_
, &expr
);
3889 if (emit_end
&& end_label
)
3891 t
= build1 (LABEL_EXPR
, void_type_node
, end_label
);
3892 append_to_statement_list (t
, &expr
);
3898 /* EXPR is used in a boolean context; make sure it has BOOLEAN_TYPE. */
3901 gimple_boolify (tree expr
)
3903 tree type
= TREE_TYPE (expr
);
3904 location_t loc
= EXPR_LOCATION (expr
);
3906 if (TREE_CODE (expr
) == NE_EXPR
3907 && TREE_CODE (TREE_OPERAND (expr
, 0)) == CALL_EXPR
3908 && integer_zerop (TREE_OPERAND (expr
, 1)))
3910 tree call
= TREE_OPERAND (expr
, 0);
3911 tree fn
= get_callee_fndecl (call
);
3913 /* For __builtin_expect ((long) (x), y) recurse into x as well
3914 if x is truth_value_p. */
3916 && fndecl_built_in_p (fn
, BUILT_IN_EXPECT
)
3917 && call_expr_nargs (call
) == 2)
3919 tree arg
= CALL_EXPR_ARG (call
, 0);
3922 if (TREE_CODE (arg
) == NOP_EXPR
3923 && TREE_TYPE (arg
) == TREE_TYPE (call
))
3924 arg
= TREE_OPERAND (arg
, 0);
3925 if (truth_value_p (TREE_CODE (arg
)))
3927 arg
= gimple_boolify (arg
);
3928 CALL_EXPR_ARG (call
, 0)
3929 = fold_convert_loc (loc
, TREE_TYPE (call
), arg
);
3935 switch (TREE_CODE (expr
))
3937 case TRUTH_AND_EXPR
:
3939 case TRUTH_XOR_EXPR
:
3940 case TRUTH_ANDIF_EXPR
:
3941 case TRUTH_ORIF_EXPR
:
3942 /* Also boolify the arguments of truth exprs. */
3943 TREE_OPERAND (expr
, 1) = gimple_boolify (TREE_OPERAND (expr
, 1));
3946 case TRUTH_NOT_EXPR
:
3947 TREE_OPERAND (expr
, 0) = gimple_boolify (TREE_OPERAND (expr
, 0));
3949 /* These expressions always produce boolean results. */
3950 if (TREE_CODE (type
) != BOOLEAN_TYPE
)
3951 TREE_TYPE (expr
) = boolean_type_node
;
3955 switch ((enum annot_expr_kind
) TREE_INT_CST_LOW (TREE_OPERAND (expr
, 1)))
3957 case annot_expr_ivdep_kind
:
3958 case annot_expr_unroll_kind
:
3959 case annot_expr_no_vector_kind
:
3960 case annot_expr_vector_kind
:
3961 case annot_expr_parallel_kind
:
3962 TREE_OPERAND (expr
, 0) = gimple_boolify (TREE_OPERAND (expr
, 0));
3963 if (TREE_CODE (type
) != BOOLEAN_TYPE
)
3964 TREE_TYPE (expr
) = boolean_type_node
;
3971 if (COMPARISON_CLASS_P (expr
))
3973 /* There expressions always prduce boolean results. */
3974 if (TREE_CODE (type
) != BOOLEAN_TYPE
)
3975 TREE_TYPE (expr
) = boolean_type_node
;
3978 /* Other expressions that get here must have boolean values, but
3979 might need to be converted to the appropriate mode. */
3980 if (TREE_CODE (type
) == BOOLEAN_TYPE
)
3982 return fold_convert_loc (loc
, boolean_type_node
, expr
);
3986 /* Given a conditional expression *EXPR_P without side effects, gimplify
3987 its operands. New statements are inserted to PRE_P. */
3989 static enum gimplify_status
3990 gimplify_pure_cond_expr (tree
*expr_p
, gimple_seq
*pre_p
)
3992 tree expr
= *expr_p
, cond
;
3993 enum gimplify_status ret
, tret
;
3994 enum tree_code code
;
3996 cond
= gimple_boolify (COND_EXPR_COND (expr
));
3998 /* We need to handle && and || specially, as their gimplification
3999 creates pure cond_expr, thus leading to an infinite cycle otherwise. */
4000 code
= TREE_CODE (cond
);
4001 if (code
== TRUTH_ANDIF_EXPR
)
4002 TREE_SET_CODE (cond
, TRUTH_AND_EXPR
);
4003 else if (code
== TRUTH_ORIF_EXPR
)
4004 TREE_SET_CODE (cond
, TRUTH_OR_EXPR
);
4005 ret
= gimplify_expr (&cond
, pre_p
, NULL
, is_gimple_condexpr
, fb_rvalue
);
4006 COND_EXPR_COND (*expr_p
) = cond
;
4008 tret
= gimplify_expr (&COND_EXPR_THEN (expr
), pre_p
, NULL
,
4009 is_gimple_val
, fb_rvalue
);
4010 ret
= MIN (ret
, tret
);
4011 tret
= gimplify_expr (&COND_EXPR_ELSE (expr
), pre_p
, NULL
,
4012 is_gimple_val
, fb_rvalue
);
4014 return MIN (ret
, tret
);
4017 /* Return true if evaluating EXPR could trap.
4018 EXPR is GENERIC, while tree_could_trap_p can be called
4022 generic_expr_could_trap_p (tree expr
)
4026 if (!expr
|| is_gimple_val (expr
))
4029 if (!EXPR_P (expr
) || tree_could_trap_p (expr
))
4032 n
= TREE_OPERAND_LENGTH (expr
);
4033 for (i
= 0; i
< n
; i
++)
4034 if (generic_expr_could_trap_p (TREE_OPERAND (expr
, i
)))
4040 /* Convert the conditional expression pointed to by EXPR_P '(p) ? a : b;'
4049 The second form is used when *EXPR_P is of type void.
4051 PRE_P points to the list where side effects that must happen before
4052 *EXPR_P should be stored. */
4054 static enum gimplify_status
4055 gimplify_cond_expr (tree
*expr_p
, gimple_seq
*pre_p
, fallback_t fallback
)
4057 tree expr
= *expr_p
;
4058 tree type
= TREE_TYPE (expr
);
4059 location_t loc
= EXPR_LOCATION (expr
);
4060 tree tmp
, arm1
, arm2
;
4061 enum gimplify_status ret
;
4062 tree label_true
, label_false
, label_cont
;
4063 bool have_then_clause_p
, have_else_clause_p
;
4065 enum tree_code pred_code
;
4066 gimple_seq seq
= NULL
;
4068 /* If this COND_EXPR has a value, copy the values into a temporary within
4070 if (!VOID_TYPE_P (type
))
4072 tree then_
= TREE_OPERAND (expr
, 1), else_
= TREE_OPERAND (expr
, 2);
4075 /* If either an rvalue is ok or we do not require an lvalue, create the
4076 temporary. But we cannot do that if the type is addressable. */
4077 if (((fallback
& fb_rvalue
) || !(fallback
& fb_lvalue
))
4078 && !TREE_ADDRESSABLE (type
))
4080 if (gimplify_ctxp
->allow_rhs_cond_expr
4081 /* If either branch has side effects or could trap, it can't be
4082 evaluated unconditionally. */
4083 && !TREE_SIDE_EFFECTS (then_
)
4084 && !generic_expr_could_trap_p (then_
)
4085 && !TREE_SIDE_EFFECTS (else_
)
4086 && !generic_expr_could_trap_p (else_
))
4087 return gimplify_pure_cond_expr (expr_p
, pre_p
);
4089 tmp
= create_tmp_var (type
, "iftmp");
4093 /* Otherwise, only create and copy references to the values. */
4096 type
= build_pointer_type (type
);
4098 if (!VOID_TYPE_P (TREE_TYPE (then_
)))
4099 then_
= build_fold_addr_expr_loc (loc
, then_
);
4101 if (!VOID_TYPE_P (TREE_TYPE (else_
)))
4102 else_
= build_fold_addr_expr_loc (loc
, else_
);
4105 = build3 (COND_EXPR
, type
, TREE_OPERAND (expr
, 0), then_
, else_
);
4107 tmp
= create_tmp_var (type
, "iftmp");
4108 result
= build_simple_mem_ref_loc (loc
, tmp
);
4111 /* Build the new then clause, `tmp = then_;'. But don't build the
4112 assignment if the value is void; in C++ it can be if it's a throw. */
4113 if (!VOID_TYPE_P (TREE_TYPE (then_
)))
4114 TREE_OPERAND (expr
, 1) = build2 (INIT_EXPR
, type
, tmp
, then_
);
4116 /* Similarly, build the new else clause, `tmp = else_;'. */
4117 if (!VOID_TYPE_P (TREE_TYPE (else_
)))
4118 TREE_OPERAND (expr
, 2) = build2 (INIT_EXPR
, type
, tmp
, else_
);
4120 TREE_TYPE (expr
) = void_type_node
;
4121 recalculate_side_effects (expr
);
4123 /* Move the COND_EXPR to the prequeue. */
4124 gimplify_stmt (&expr
, pre_p
);
4130 /* Remove any COMPOUND_EXPR so the following cases will be caught. */
4131 STRIP_TYPE_NOPS (TREE_OPERAND (expr
, 0));
4132 if (TREE_CODE (TREE_OPERAND (expr
, 0)) == COMPOUND_EXPR
)
4133 gimplify_compound_expr (&TREE_OPERAND (expr
, 0), pre_p
, true);
4135 /* Make sure the condition has BOOLEAN_TYPE. */
4136 TREE_OPERAND (expr
, 0) = gimple_boolify (TREE_OPERAND (expr
, 0));
4138 /* Break apart && and || conditions. */
4139 if (TREE_CODE (TREE_OPERAND (expr
, 0)) == TRUTH_ANDIF_EXPR
4140 || TREE_CODE (TREE_OPERAND (expr
, 0)) == TRUTH_ORIF_EXPR
)
4142 expr
= shortcut_cond_expr (expr
);
4144 if (expr
!= *expr_p
)
4148 /* We can't rely on gimplify_expr to re-gimplify the expanded
4149 form properly, as cleanups might cause the target labels to be
4150 wrapped in a TRY_FINALLY_EXPR. To prevent that, we need to
4151 set up a conditional context. */
4152 gimple_push_condition ();
4153 gimplify_stmt (expr_p
, &seq
);
4154 gimple_pop_condition (pre_p
);
4155 gimple_seq_add_seq (pre_p
, seq
);
4161 /* Now do the normal gimplification. */
4163 /* Gimplify condition. */
4164 ret
= gimplify_expr (&TREE_OPERAND (expr
, 0), pre_p
, NULL
,
4165 is_gimple_condexpr_for_cond
, fb_rvalue
);
4166 if (ret
== GS_ERROR
)
4168 gcc_assert (TREE_OPERAND (expr
, 0) != NULL_TREE
);
4170 gimple_push_condition ();
4172 have_then_clause_p
= have_else_clause_p
= false;
4173 label_true
= find_goto_label (TREE_OPERAND (expr
, 1));
4175 && DECL_CONTEXT (GOTO_DESTINATION (label_true
)) == current_function_decl
4176 /* For -O0 avoid this optimization if the COND_EXPR and GOTO_EXPR
4177 have different locations, otherwise we end up with incorrect
4178 location information on the branches. */
4180 || !EXPR_HAS_LOCATION (expr
)
4181 || !rexpr_has_location (label_true
)
4182 || EXPR_LOCATION (expr
) == rexpr_location (label_true
)))
4184 have_then_clause_p
= true;
4185 label_true
= GOTO_DESTINATION (label_true
);
4188 label_true
= create_artificial_label (UNKNOWN_LOCATION
);
4189 label_false
= find_goto_label (TREE_OPERAND (expr
, 2));
4191 && DECL_CONTEXT (GOTO_DESTINATION (label_false
)) == current_function_decl
4192 /* For -O0 avoid this optimization if the COND_EXPR and GOTO_EXPR
4193 have different locations, otherwise we end up with incorrect
4194 location information on the branches. */
4196 || !EXPR_HAS_LOCATION (expr
)
4197 || !rexpr_has_location (label_false
)
4198 || EXPR_LOCATION (expr
) == rexpr_location (label_false
)))
4200 have_else_clause_p
= true;
4201 label_false
= GOTO_DESTINATION (label_false
);
4204 label_false
= create_artificial_label (UNKNOWN_LOCATION
);
4206 gimple_cond_get_ops_from_tree (COND_EXPR_COND (expr
), &pred_code
, &arm1
,
4208 cond_stmt
= gimple_build_cond (pred_code
, arm1
, arm2
, label_true
,
4210 gimple_set_no_warning (cond_stmt
, TREE_NO_WARNING (COND_EXPR_COND (expr
)));
4211 gimplify_seq_add_stmt (&seq
, cond_stmt
);
4212 gimple_stmt_iterator gsi
= gsi_last (seq
);
4213 maybe_fold_stmt (&gsi
);
4215 label_cont
= NULL_TREE
;
4216 if (!have_then_clause_p
)
4218 /* For if (...) {} else { code; } put label_true after
4220 if (TREE_OPERAND (expr
, 1) == NULL_TREE
4221 && !have_else_clause_p
4222 && TREE_OPERAND (expr
, 2) != NULL_TREE
)
4223 label_cont
= label_true
;
4226 gimplify_seq_add_stmt (&seq
, gimple_build_label (label_true
));
4227 have_then_clause_p
= gimplify_stmt (&TREE_OPERAND (expr
, 1), &seq
);
4228 /* For if (...) { code; } else {} or
4229 if (...) { code; } else goto label; or
4230 if (...) { code; return; } else { ... }
4231 label_cont isn't needed. */
4232 if (!have_else_clause_p
4233 && TREE_OPERAND (expr
, 2) != NULL_TREE
4234 && gimple_seq_may_fallthru (seq
))
4237 label_cont
= create_artificial_label (UNKNOWN_LOCATION
);
4239 g
= gimple_build_goto (label_cont
);
4241 /* GIMPLE_COND's are very low level; they have embedded
4242 gotos. This particular embedded goto should not be marked
4243 with the location of the original COND_EXPR, as it would
4244 correspond to the COND_EXPR's condition, not the ELSE or the
4245 THEN arms. To avoid marking it with the wrong location, flag
4246 it as "no location". */
4247 gimple_set_do_not_emit_location (g
);
4249 gimplify_seq_add_stmt (&seq
, g
);
4253 if (!have_else_clause_p
)
4255 gimplify_seq_add_stmt (&seq
, gimple_build_label (label_false
));
4256 have_else_clause_p
= gimplify_stmt (&TREE_OPERAND (expr
, 2), &seq
);
4259 gimplify_seq_add_stmt (&seq
, gimple_build_label (label_cont
));
4261 gimple_pop_condition (pre_p
);
4262 gimple_seq_add_seq (pre_p
, seq
);
4264 if (ret
== GS_ERROR
)
4266 else if (have_then_clause_p
|| have_else_clause_p
)
4270 /* Both arms are empty; replace the COND_EXPR with its predicate. */
4271 expr
= TREE_OPERAND (expr
, 0);
4272 gimplify_stmt (&expr
, pre_p
);
4279 /* Prepare the node pointed to by EXPR_P, an is_gimple_addressable expression,
4280 to be marked addressable.
4282 We cannot rely on such an expression being directly markable if a temporary
4283 has been created by the gimplification. In this case, we create another
4284 temporary and initialize it with a copy, which will become a store after we
4285 mark it addressable. This can happen if the front-end passed us something
4286 that it could not mark addressable yet, like a Fortran pass-by-reference
4287 parameter (int) floatvar. */
4290 prepare_gimple_addressable (tree
*expr_p
, gimple_seq
*seq_p
)
4292 while (handled_component_p (*expr_p
))
4293 expr_p
= &TREE_OPERAND (*expr_p
, 0);
4294 if (is_gimple_reg (*expr_p
))
4296 /* Do not allow an SSA name as the temporary. */
4297 tree var
= get_initialized_tmp_var (*expr_p
, seq_p
, NULL
, false);
4298 DECL_NOT_GIMPLE_REG_P (var
) = 1;
4303 /* A subroutine of gimplify_modify_expr. Replace a MODIFY_EXPR with
4304 a call to __builtin_memcpy. */
4306 static enum gimplify_status
4307 gimplify_modify_expr_to_memcpy (tree
*expr_p
, tree size
, bool want_value
,
4310 tree t
, to
, to_ptr
, from
, from_ptr
;
4312 location_t loc
= EXPR_LOCATION (*expr_p
);
4314 to
= TREE_OPERAND (*expr_p
, 0);
4315 from
= TREE_OPERAND (*expr_p
, 1);
4317 /* Mark the RHS addressable. Beware that it may not be possible to do so
4318 directly if a temporary has been created by the gimplification. */
4319 prepare_gimple_addressable (&from
, seq_p
);
4321 mark_addressable (from
);
4322 from_ptr
= build_fold_addr_expr_loc (loc
, from
);
4323 gimplify_arg (&from_ptr
, seq_p
, loc
);
4325 mark_addressable (to
);
4326 to_ptr
= build_fold_addr_expr_loc (loc
, to
);
4327 gimplify_arg (&to_ptr
, seq_p
, loc
);
4329 t
= builtin_decl_implicit (BUILT_IN_MEMCPY
);
4331 gs
= gimple_build_call (t
, 3, to_ptr
, from_ptr
, size
);
4335 /* tmp = memcpy() */
4336 t
= create_tmp_var (TREE_TYPE (to_ptr
));
4337 gimple_call_set_lhs (gs
, t
);
4338 gimplify_seq_add_stmt (seq_p
, gs
);
4340 *expr_p
= build_simple_mem_ref (t
);
4344 gimplify_seq_add_stmt (seq_p
, gs
);
4349 /* A subroutine of gimplify_modify_expr. Replace a MODIFY_EXPR with
4350 a call to __builtin_memset. In this case we know that the RHS is
4351 a CONSTRUCTOR with an empty element list. */
4353 static enum gimplify_status
4354 gimplify_modify_expr_to_memset (tree
*expr_p
, tree size
, bool want_value
,
4357 tree t
, from
, to
, to_ptr
;
4359 location_t loc
= EXPR_LOCATION (*expr_p
);
4361 /* Assert our assumptions, to abort instead of producing wrong code
4362 silently if they are not met. Beware that the RHS CONSTRUCTOR might
4363 not be immediately exposed. */
4364 from
= TREE_OPERAND (*expr_p
, 1);
4365 if (TREE_CODE (from
) == WITH_SIZE_EXPR
)
4366 from
= TREE_OPERAND (from
, 0);
4368 gcc_assert (TREE_CODE (from
) == CONSTRUCTOR
4369 && vec_safe_is_empty (CONSTRUCTOR_ELTS (from
)));
4372 to
= TREE_OPERAND (*expr_p
, 0);
4374 to_ptr
= build_fold_addr_expr_loc (loc
, to
);
4375 gimplify_arg (&to_ptr
, seq_p
, loc
);
4376 t
= builtin_decl_implicit (BUILT_IN_MEMSET
);
4378 gs
= gimple_build_call (t
, 3, to_ptr
, integer_zero_node
, size
);
4382 /* tmp = memset() */
4383 t
= create_tmp_var (TREE_TYPE (to_ptr
));
4384 gimple_call_set_lhs (gs
, t
);
4385 gimplify_seq_add_stmt (seq_p
, gs
);
4387 *expr_p
= build1 (INDIRECT_REF
, TREE_TYPE (to
), t
);
4391 gimplify_seq_add_stmt (seq_p
, gs
);
4396 /* A subroutine of gimplify_init_ctor_preeval. Called via walk_tree,
4397 determine, cautiously, if a CONSTRUCTOR overlaps the lhs of an
4398 assignment. Return non-null if we detect a potential overlap. */
4400 struct gimplify_init_ctor_preeval_data
4402 /* The base decl of the lhs object. May be NULL, in which case we
4403 have to assume the lhs is indirect. */
4406 /* The alias set of the lhs object. */
4407 alias_set_type lhs_alias_set
;
4411 gimplify_init_ctor_preeval_1 (tree
*tp
, int *walk_subtrees
, void *xdata
)
4413 struct gimplify_init_ctor_preeval_data
*data
4414 = (struct gimplify_init_ctor_preeval_data
*) xdata
;
4417 /* If we find the base object, obviously we have overlap. */
4418 if (data
->lhs_base_decl
== t
)
4421 /* If the constructor component is indirect, determine if we have a
4422 potential overlap with the lhs. The only bits of information we
4423 have to go on at this point are addressability and alias sets. */
4424 if ((INDIRECT_REF_P (t
)
4425 || TREE_CODE (t
) == MEM_REF
)
4426 && (!data
->lhs_base_decl
|| TREE_ADDRESSABLE (data
->lhs_base_decl
))
4427 && alias_sets_conflict_p (data
->lhs_alias_set
, get_alias_set (t
)))
4430 /* If the constructor component is a call, determine if it can hide a
4431 potential overlap with the lhs through an INDIRECT_REF like above.
4432 ??? Ugh - this is completely broken. In fact this whole analysis
4433 doesn't look conservative. */
4434 if (TREE_CODE (t
) == CALL_EXPR
)
4436 tree type
, fntype
= TREE_TYPE (TREE_TYPE (CALL_EXPR_FN (t
)));
4438 for (type
= TYPE_ARG_TYPES (fntype
); type
; type
= TREE_CHAIN (type
))
4439 if (POINTER_TYPE_P (TREE_VALUE (type
))
4440 && (!data
->lhs_base_decl
|| TREE_ADDRESSABLE (data
->lhs_base_decl
))
4441 && alias_sets_conflict_p (data
->lhs_alias_set
,
4443 (TREE_TYPE (TREE_VALUE (type
)))))
4447 if (IS_TYPE_OR_DECL_P (t
))
4452 /* A subroutine of gimplify_init_constructor. Pre-evaluate EXPR,
4453 force values that overlap with the lhs (as described by *DATA)
4454 into temporaries. */
4457 gimplify_init_ctor_preeval (tree
*expr_p
, gimple_seq
*pre_p
, gimple_seq
*post_p
,
4458 struct gimplify_init_ctor_preeval_data
*data
)
4460 enum gimplify_status one
;
4462 /* If the value is constant, then there's nothing to pre-evaluate. */
4463 if (TREE_CONSTANT (*expr_p
))
4465 /* Ensure it does not have side effects, it might contain a reference to
4466 the object we're initializing. */
4467 gcc_assert (!TREE_SIDE_EFFECTS (*expr_p
));
4471 /* If the type has non-trivial constructors, we can't pre-evaluate. */
4472 if (TREE_ADDRESSABLE (TREE_TYPE (*expr_p
)))
4475 /* Recurse for nested constructors. */
4476 if (TREE_CODE (*expr_p
) == CONSTRUCTOR
)
4478 unsigned HOST_WIDE_INT ix
;
4479 constructor_elt
*ce
;
4480 vec
<constructor_elt
, va_gc
> *v
= CONSTRUCTOR_ELTS (*expr_p
);
4482 FOR_EACH_VEC_SAFE_ELT (v
, ix
, ce
)
4483 gimplify_init_ctor_preeval (&ce
->value
, pre_p
, post_p
, data
);
4488 /* If this is a variable sized type, we must remember the size. */
4489 maybe_with_size_expr (expr_p
);
4491 /* Gimplify the constructor element to something appropriate for the rhs
4492 of a MODIFY_EXPR. Given that we know the LHS is an aggregate, we know
4493 the gimplifier will consider this a store to memory. Doing this
4494 gimplification now means that we won't have to deal with complicated
4495 language-specific trees, nor trees like SAVE_EXPR that can induce
4496 exponential search behavior. */
4497 one
= gimplify_expr (expr_p
, pre_p
, post_p
, is_gimple_mem_rhs
, fb_rvalue
);
4498 if (one
== GS_ERROR
)
4504 /* If we gimplified to a bare decl, we can be sure that it doesn't overlap
4505 with the lhs, since "a = { .x=a }" doesn't make sense. This will
4506 always be true for all scalars, since is_gimple_mem_rhs insists on a
4507 temporary variable for them. */
4508 if (DECL_P (*expr_p
))
4511 /* If this is of variable size, we have no choice but to assume it doesn't
4512 overlap since we can't make a temporary for it. */
4513 if (TREE_CODE (TYPE_SIZE (TREE_TYPE (*expr_p
))) != INTEGER_CST
)
4516 /* Otherwise, we must search for overlap ... */
4517 if (!walk_tree (expr_p
, gimplify_init_ctor_preeval_1
, data
, NULL
))
4520 /* ... and if found, force the value into a temporary. */
4521 *expr_p
= get_formal_tmp_var (*expr_p
, pre_p
);
4524 /* A subroutine of gimplify_init_ctor_eval. Create a loop for
4525 a RANGE_EXPR in a CONSTRUCTOR for an array.
4529 object[var] = value;
4536 We increment var _after_ the loop exit check because we might otherwise
4537 fail if upper == TYPE_MAX_VALUE (type for upper).
4539 Note that we never have to deal with SAVE_EXPRs here, because this has
4540 already been taken care of for us, in gimplify_init_ctor_preeval(). */
4542 static void gimplify_init_ctor_eval (tree
, vec
<constructor_elt
, va_gc
> *,
4543 gimple_seq
*, bool);
4546 gimplify_init_ctor_eval_range (tree object
, tree lower
, tree upper
,
4547 tree value
, tree array_elt_type
,
4548 gimple_seq
*pre_p
, bool cleared
)
4550 tree loop_entry_label
, loop_exit_label
, fall_thru_label
;
4551 tree var
, var_type
, cref
, tmp
;
4553 loop_entry_label
= create_artificial_label (UNKNOWN_LOCATION
);
4554 loop_exit_label
= create_artificial_label (UNKNOWN_LOCATION
);
4555 fall_thru_label
= create_artificial_label (UNKNOWN_LOCATION
);
4557 /* Create and initialize the index variable. */
4558 var_type
= TREE_TYPE (upper
);
4559 var
= create_tmp_var (var_type
);
4560 gimplify_seq_add_stmt (pre_p
, gimple_build_assign (var
, lower
));
4562 /* Add the loop entry label. */
4563 gimplify_seq_add_stmt (pre_p
, gimple_build_label (loop_entry_label
));
4565 /* Build the reference. */
4566 cref
= build4 (ARRAY_REF
, array_elt_type
, unshare_expr (object
),
4567 var
, NULL_TREE
, NULL_TREE
);
4569 /* If we are a constructor, just call gimplify_init_ctor_eval to do
4570 the store. Otherwise just assign value to the reference. */
4572 if (TREE_CODE (value
) == CONSTRUCTOR
)
4573 /* NB we might have to call ourself recursively through
4574 gimplify_init_ctor_eval if the value is a constructor. */
4575 gimplify_init_ctor_eval (cref
, CONSTRUCTOR_ELTS (value
),
4578 gimplify_seq_add_stmt (pre_p
, gimple_build_assign (cref
, value
));
4580 /* We exit the loop when the index var is equal to the upper bound. */
4581 gimplify_seq_add_stmt (pre_p
,
4582 gimple_build_cond (EQ_EXPR
, var
, upper
,
4583 loop_exit_label
, fall_thru_label
));
4585 gimplify_seq_add_stmt (pre_p
, gimple_build_label (fall_thru_label
));
4587 /* Otherwise, increment the index var... */
4588 tmp
= build2 (PLUS_EXPR
, var_type
, var
,
4589 fold_convert (var_type
, integer_one_node
));
4590 gimplify_seq_add_stmt (pre_p
, gimple_build_assign (var
, tmp
));
4592 /* ...and jump back to the loop entry. */
4593 gimplify_seq_add_stmt (pre_p
, gimple_build_goto (loop_entry_label
));
4595 /* Add the loop exit label. */
4596 gimplify_seq_add_stmt (pre_p
, gimple_build_label (loop_exit_label
));
4599 /* Return true if FDECL is accessing a field that is zero sized. */
4602 zero_sized_field_decl (const_tree fdecl
)
4604 if (TREE_CODE (fdecl
) == FIELD_DECL
&& DECL_SIZE (fdecl
)
4605 && integer_zerop (DECL_SIZE (fdecl
)))
4610 /* Return true if TYPE is zero sized. */
4613 zero_sized_type (const_tree type
)
4615 if (AGGREGATE_TYPE_P (type
) && TYPE_SIZE (type
)
4616 && integer_zerop (TYPE_SIZE (type
)))
4621 /* A subroutine of gimplify_init_constructor. Generate individual
4622 MODIFY_EXPRs for a CONSTRUCTOR. OBJECT is the LHS against which the
4623 assignments should happen. ELTS is the CONSTRUCTOR_ELTS of the
4624 CONSTRUCTOR. CLEARED is true if the entire LHS object has been
4628 gimplify_init_ctor_eval (tree object
, vec
<constructor_elt
, va_gc
> *elts
,
4629 gimple_seq
*pre_p
, bool cleared
)
4631 tree array_elt_type
= NULL
;
4632 unsigned HOST_WIDE_INT ix
;
4633 tree purpose
, value
;
4635 if (TREE_CODE (TREE_TYPE (object
)) == ARRAY_TYPE
)
4636 array_elt_type
= TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (object
)));
4638 FOR_EACH_CONSTRUCTOR_ELT (elts
, ix
, purpose
, value
)
4642 /* NULL values are created above for gimplification errors. */
4646 if (cleared
&& initializer_zerop (value
))
4649 /* ??? Here's to hoping the front end fills in all of the indices,
4650 so we don't have to figure out what's missing ourselves. */
4651 gcc_assert (purpose
);
4653 /* Skip zero-sized fields, unless value has side-effects. This can
4654 happen with calls to functions returning a zero-sized type, which
4655 we shouldn't discard. As a number of downstream passes don't
4656 expect sets of zero-sized fields, we rely on the gimplification of
4657 the MODIFY_EXPR we make below to drop the assignment statement. */
4658 if (! TREE_SIDE_EFFECTS (value
) && zero_sized_field_decl (purpose
))
4661 /* If we have a RANGE_EXPR, we have to build a loop to assign the
4663 if (TREE_CODE (purpose
) == RANGE_EXPR
)
4665 tree lower
= TREE_OPERAND (purpose
, 0);
4666 tree upper
= TREE_OPERAND (purpose
, 1);
4668 /* If the lower bound is equal to upper, just treat it as if
4669 upper was the index. */
4670 if (simple_cst_equal (lower
, upper
))
4674 gimplify_init_ctor_eval_range (object
, lower
, upper
, value
,
4675 array_elt_type
, pre_p
, cleared
);
4682 /* Do not use bitsizetype for ARRAY_REF indices. */
4683 if (TYPE_DOMAIN (TREE_TYPE (object
)))
4685 = fold_convert (TREE_TYPE (TYPE_DOMAIN (TREE_TYPE (object
))),
4687 cref
= build4 (ARRAY_REF
, array_elt_type
, unshare_expr (object
),
4688 purpose
, NULL_TREE
, NULL_TREE
);
4692 gcc_assert (TREE_CODE (purpose
) == FIELD_DECL
);
4693 cref
= build3 (COMPONENT_REF
, TREE_TYPE (purpose
),
4694 unshare_expr (object
), purpose
, NULL_TREE
);
4697 if (TREE_CODE (value
) == CONSTRUCTOR
4698 && TREE_CODE (TREE_TYPE (value
)) != VECTOR_TYPE
)
4699 gimplify_init_ctor_eval (cref
, CONSTRUCTOR_ELTS (value
),
4703 tree init
= build2 (INIT_EXPR
, TREE_TYPE (cref
), cref
, value
);
4704 gimplify_and_add (init
, pre_p
);
4710 /* Return the appropriate RHS predicate for this LHS. */
4713 rhs_predicate_for (tree lhs
)
4715 if (is_gimple_reg (lhs
))
4716 return is_gimple_reg_rhs_or_call
;
4718 return is_gimple_mem_rhs_or_call
;
4721 /* Return the initial guess for an appropriate RHS predicate for this LHS,
4722 before the LHS has been gimplified. */
4724 static gimple_predicate
4725 initial_rhs_predicate_for (tree lhs
)
4727 if (is_gimple_reg_type (TREE_TYPE (lhs
)))
4728 return is_gimple_reg_rhs_or_call
;
4730 return is_gimple_mem_rhs_or_call
;
4733 /* Gimplify a C99 compound literal expression. This just means adding
4734 the DECL_EXPR before the current statement and using its anonymous
4737 static enum gimplify_status
4738 gimplify_compound_literal_expr (tree
*expr_p
, gimple_seq
*pre_p
,
4739 bool (*gimple_test_f
) (tree
),
4740 fallback_t fallback
)
4742 tree decl_s
= COMPOUND_LITERAL_EXPR_DECL_EXPR (*expr_p
);
4743 tree decl
= DECL_EXPR_DECL (decl_s
);
4744 tree init
= DECL_INITIAL (decl
);
4745 /* Mark the decl as addressable if the compound literal
4746 expression is addressable now, otherwise it is marked too late
4747 after we gimplify the initialization expression. */
4748 if (TREE_ADDRESSABLE (*expr_p
))
4749 TREE_ADDRESSABLE (decl
) = 1;
4750 /* Otherwise, if we don't need an lvalue and have a literal directly
4751 substitute it. Check if it matches the gimple predicate, as
4752 otherwise we'd generate a new temporary, and we can as well just
4753 use the decl we already have. */
4754 else if (!TREE_ADDRESSABLE (decl
)
4755 && !TREE_THIS_VOLATILE (decl
)
4757 && (fallback
& fb_lvalue
) == 0
4758 && gimple_test_f (init
))
4764 /* If the decl is not addressable, then it is being used in some
4765 expression or on the right hand side of a statement, and it can
4766 be put into a readonly data section. */
4767 if (!TREE_ADDRESSABLE (decl
) && (fallback
& fb_lvalue
) == 0)
4768 TREE_READONLY (decl
) = 1;
4770 /* This decl isn't mentioned in the enclosing block, so add it to the
4771 list of temps. FIXME it seems a bit of a kludge to say that
4772 anonymous artificial vars aren't pushed, but everything else is. */
4773 if (DECL_NAME (decl
) == NULL_TREE
&& !DECL_SEEN_IN_BIND_EXPR_P (decl
))
4774 gimple_add_tmp_var (decl
);
4776 gimplify_and_add (decl_s
, pre_p
);
4781 /* Optimize embedded COMPOUND_LITERAL_EXPRs within a CONSTRUCTOR,
4782 return a new CONSTRUCTOR if something changed. */
4785 optimize_compound_literals_in_ctor (tree orig_ctor
)
4787 tree ctor
= orig_ctor
;
4788 vec
<constructor_elt
, va_gc
> *elts
= CONSTRUCTOR_ELTS (ctor
);
4789 unsigned int idx
, num
= vec_safe_length (elts
);
4791 for (idx
= 0; idx
< num
; idx
++)
4793 tree value
= (*elts
)[idx
].value
;
4794 tree newval
= value
;
4795 if (TREE_CODE (value
) == CONSTRUCTOR
)
4796 newval
= optimize_compound_literals_in_ctor (value
);
4797 else if (TREE_CODE (value
) == COMPOUND_LITERAL_EXPR
)
4799 tree decl_s
= COMPOUND_LITERAL_EXPR_DECL_EXPR (value
);
4800 tree decl
= DECL_EXPR_DECL (decl_s
);
4801 tree init
= DECL_INITIAL (decl
);
4803 if (!TREE_ADDRESSABLE (value
)
4804 && !TREE_ADDRESSABLE (decl
)
4806 && TREE_CODE (init
) == CONSTRUCTOR
)
4807 newval
= optimize_compound_literals_in_ctor (init
);
4809 if (newval
== value
)
4812 if (ctor
== orig_ctor
)
4814 ctor
= copy_node (orig_ctor
);
4815 CONSTRUCTOR_ELTS (ctor
) = vec_safe_copy (elts
);
4816 elts
= CONSTRUCTOR_ELTS (ctor
);
4818 (*elts
)[idx
].value
= newval
;
4823 /* A subroutine of gimplify_modify_expr. Break out elements of a
4824 CONSTRUCTOR used as an initializer into separate MODIFY_EXPRs.
4826 Note that we still need to clear any elements that don't have explicit
4827 initializers, so if not all elements are initialized we keep the
4828 original MODIFY_EXPR, we just remove all of the constructor elements.
4830 If NOTIFY_TEMP_CREATION is true, do not gimplify, just return
4831 GS_ERROR if we would have to create a temporary when gimplifying
4832 this constructor. Otherwise, return GS_OK.
4834 If NOTIFY_TEMP_CREATION is false, just do the gimplification. */
4836 static enum gimplify_status
4837 gimplify_init_constructor (tree
*expr_p
, gimple_seq
*pre_p
, gimple_seq
*post_p
,
4838 bool want_value
, bool notify_temp_creation
)
4840 tree object
, ctor
, type
;
4841 enum gimplify_status ret
;
4842 vec
<constructor_elt
, va_gc
> *elts
;
4844 gcc_assert (TREE_CODE (TREE_OPERAND (*expr_p
, 1)) == CONSTRUCTOR
);
4846 if (!notify_temp_creation
)
4848 ret
= gimplify_expr (&TREE_OPERAND (*expr_p
, 0), pre_p
, post_p
,
4849 is_gimple_lvalue
, fb_lvalue
);
4850 if (ret
== GS_ERROR
)
4854 object
= TREE_OPERAND (*expr_p
, 0);
4855 ctor
= TREE_OPERAND (*expr_p
, 1)
4856 = optimize_compound_literals_in_ctor (TREE_OPERAND (*expr_p
, 1));
4857 type
= TREE_TYPE (ctor
);
4858 elts
= CONSTRUCTOR_ELTS (ctor
);
4861 switch (TREE_CODE (type
))
4865 case QUAL_UNION_TYPE
:
4868 struct gimplify_init_ctor_preeval_data preeval_data
;
4869 HOST_WIDE_INT num_ctor_elements
, num_nonzero_elements
;
4870 HOST_WIDE_INT num_unique_nonzero_elements
;
4871 bool cleared
, complete_p
, valid_const_initializer
;
4872 /* Use readonly data for initializers of this or smaller size
4873 regardless of the num_nonzero_elements / num_unique_nonzero_elements
4875 const HOST_WIDE_INT min_unique_size
= 64;
4876 /* If num_nonzero_elements / num_unique_nonzero_elements ratio
4877 is smaller than this, use readonly data. */
4878 const int unique_nonzero_ratio
= 8;
4880 /* Aggregate types must lower constructors to initialization of
4881 individual elements. The exception is that a CONSTRUCTOR node
4882 with no elements indicates zero-initialization of the whole. */
4883 if (vec_safe_is_empty (elts
))
4885 if (notify_temp_creation
)
4890 /* Fetch information about the constructor to direct later processing.
4891 We might want to make static versions of it in various cases, and
4892 can only do so if it known to be a valid constant initializer. */
4893 valid_const_initializer
4894 = categorize_ctor_elements (ctor
, &num_nonzero_elements
,
4895 &num_unique_nonzero_elements
,
4896 &num_ctor_elements
, &complete_p
);
4898 /* If a const aggregate variable is being initialized, then it
4899 should never be a lose to promote the variable to be static. */
4900 if (valid_const_initializer
4901 && num_nonzero_elements
> 1
4902 && TREE_READONLY (object
)
4904 && !DECL_REGISTER (object
)
4905 && (flag_merge_constants
>= 2 || !TREE_ADDRESSABLE (object
))
4906 /* For ctors that have many repeated nonzero elements
4907 represented through RANGE_EXPRs, prefer initializing
4908 those through runtime loops over copies of large amounts
4909 of data from readonly data section. */
4910 && (num_unique_nonzero_elements
4911 > num_nonzero_elements
/ unique_nonzero_ratio
4912 || ((unsigned HOST_WIDE_INT
) int_size_in_bytes (type
)
4913 <= (unsigned HOST_WIDE_INT
) min_unique_size
)))
4915 if (notify_temp_creation
)
4917 DECL_INITIAL (object
) = ctor
;
4918 TREE_STATIC (object
) = 1;
4919 if (!DECL_NAME (object
))
4920 DECL_NAME (object
) = create_tmp_var_name ("C");
4921 walk_tree (&DECL_INITIAL (object
), force_labels_r
, NULL
, NULL
);
4923 /* ??? C++ doesn't automatically append a .<number> to the
4924 assembler name, and even when it does, it looks at FE private
4925 data structures to figure out what that number should be,
4926 which are not set for this variable. I suppose this is
4927 important for local statics for inline functions, which aren't
4928 "local" in the object file sense. So in order to get a unique
4929 TU-local symbol, we must invoke the lhd version now. */
4930 lhd_set_decl_assembler_name (object
);
4932 *expr_p
= NULL_TREE
;
4936 /* If there are "lots" of initialized elements, even discounting
4937 those that are not address constants (and thus *must* be
4938 computed at runtime), then partition the constructor into
4939 constant and non-constant parts. Block copy the constant
4940 parts in, then generate code for the non-constant parts. */
4941 /* TODO. There's code in cp/typeck.c to do this. */
4943 if (int_size_in_bytes (TREE_TYPE (ctor
)) < 0)
4944 /* store_constructor will ignore the clearing of variable-sized
4945 objects. Initializers for such objects must explicitly set
4946 every field that needs to be set. */
4948 else if (!complete_p
)
4949 /* If the constructor isn't complete, clear the whole object
4950 beforehand, unless CONSTRUCTOR_NO_CLEARING is set on it.
4952 ??? This ought not to be needed. For any element not present
4953 in the initializer, we should simply set them to zero. Except
4954 we'd need to *find* the elements that are not present, and that
4955 requires trickery to avoid quadratic compile-time behavior in
4956 large cases or excessive memory use in small cases. */
4957 cleared
= !CONSTRUCTOR_NO_CLEARING (ctor
);
4958 else if (num_ctor_elements
- num_nonzero_elements
4959 > CLEAR_RATIO (optimize_function_for_speed_p (cfun
))
4960 && num_nonzero_elements
< num_ctor_elements
/ 4)
4961 /* If there are "lots" of zeros, it's more efficient to clear
4962 the memory and then set the nonzero elements. */
4967 /* If there are "lots" of initialized elements, and all of them
4968 are valid address constants, then the entire initializer can
4969 be dropped to memory, and then memcpy'd out. Don't do this
4970 for sparse arrays, though, as it's more efficient to follow
4971 the standard CONSTRUCTOR behavior of memset followed by
4972 individual element initialization. Also don't do this for small
4973 all-zero initializers (which aren't big enough to merit
4974 clearing), and don't try to make bitwise copies of
4975 TREE_ADDRESSABLE types. */
4976 if (valid_const_initializer
4978 && !(cleared
|| num_nonzero_elements
== 0)
4979 && !TREE_ADDRESSABLE (type
))
4981 HOST_WIDE_INT size
= int_size_in_bytes (type
);
4984 /* ??? We can still get unbounded array types, at least
4985 from the C++ front end. This seems wrong, but attempt
4986 to work around it for now. */
4989 size
= int_size_in_bytes (TREE_TYPE (object
));
4991 TREE_TYPE (ctor
) = type
= TREE_TYPE (object
);
4994 /* Find the maximum alignment we can assume for the object. */
4995 /* ??? Make use of DECL_OFFSET_ALIGN. */
4996 if (DECL_P (object
))
4997 align
= DECL_ALIGN (object
);
4999 align
= TYPE_ALIGN (type
);
5001 /* Do a block move either if the size is so small as to make
5002 each individual move a sub-unit move on average, or if it
5003 is so large as to make individual moves inefficient. */
5005 && num_nonzero_elements
> 1
5006 /* For ctors that have many repeated nonzero elements
5007 represented through RANGE_EXPRs, prefer initializing
5008 those through runtime loops over copies of large amounts
5009 of data from readonly data section. */
5010 && (num_unique_nonzero_elements
5011 > num_nonzero_elements
/ unique_nonzero_ratio
5012 || size
<= min_unique_size
)
5013 && (size
< num_nonzero_elements
5014 || !can_move_by_pieces (size
, align
)))
5016 if (notify_temp_creation
)
5019 walk_tree (&ctor
, force_labels_r
, NULL
, NULL
);
5020 ctor
= tree_output_constant_def (ctor
);
5021 if (!useless_type_conversion_p (type
, TREE_TYPE (ctor
)))
5022 ctor
= build1 (VIEW_CONVERT_EXPR
, type
, ctor
);
5023 TREE_OPERAND (*expr_p
, 1) = ctor
;
5025 /* This is no longer an assignment of a CONSTRUCTOR, but
5026 we still may have processing to do on the LHS. So
5027 pretend we didn't do anything here to let that happen. */
5028 return GS_UNHANDLED
;
5032 /* If the target is volatile, we have non-zero elements and more than
5033 one field to assign, initialize the target from a temporary. */
5034 if (TREE_THIS_VOLATILE (object
)
5035 && !TREE_ADDRESSABLE (type
)
5036 && (num_nonzero_elements
> 0 || !cleared
)
5037 && vec_safe_length (elts
) > 1)
5039 tree temp
= create_tmp_var (TYPE_MAIN_VARIANT (type
));
5040 TREE_OPERAND (*expr_p
, 0) = temp
;
5041 *expr_p
= build2 (COMPOUND_EXPR
, TREE_TYPE (*expr_p
),
5043 build2 (MODIFY_EXPR
, void_type_node
,
5048 if (notify_temp_creation
)
5051 /* If there are nonzero elements and if needed, pre-evaluate to capture
5052 elements overlapping with the lhs into temporaries. We must do this
5053 before clearing to fetch the values before they are zeroed-out. */
5054 if (num_nonzero_elements
> 0 && TREE_CODE (*expr_p
) != INIT_EXPR
)
5056 preeval_data
.lhs_base_decl
= get_base_address (object
);
5057 if (!DECL_P (preeval_data
.lhs_base_decl
))
5058 preeval_data
.lhs_base_decl
= NULL
;
5059 preeval_data
.lhs_alias_set
= get_alias_set (object
);
5061 gimplify_init_ctor_preeval (&TREE_OPERAND (*expr_p
, 1),
5062 pre_p
, post_p
, &preeval_data
);
5065 bool ctor_has_side_effects_p
5066 = TREE_SIDE_EFFECTS (TREE_OPERAND (*expr_p
, 1));
5070 /* Zap the CONSTRUCTOR element list, which simplifies this case.
5071 Note that we still have to gimplify, in order to handle the
5072 case of variable sized types. Avoid shared tree structures. */
5073 CONSTRUCTOR_ELTS (ctor
) = NULL
;
5074 TREE_SIDE_EFFECTS (ctor
) = 0;
5075 object
= unshare_expr (object
);
5076 gimplify_stmt (expr_p
, pre_p
);
5079 /* If we have not block cleared the object, or if there are nonzero
5080 elements in the constructor, or if the constructor has side effects,
5081 add assignments to the individual scalar fields of the object. */
5083 || num_nonzero_elements
> 0
5084 || ctor_has_side_effects_p
)
5085 gimplify_init_ctor_eval (object
, elts
, pre_p
, cleared
);
5087 *expr_p
= NULL_TREE
;
5095 if (notify_temp_creation
)
5098 /* Extract the real and imaginary parts out of the ctor. */
5099 gcc_assert (elts
->length () == 2);
5100 r
= (*elts
)[0].value
;
5101 i
= (*elts
)[1].value
;
5102 if (r
== NULL
|| i
== NULL
)
5104 tree zero
= build_zero_cst (TREE_TYPE (type
));
5111 /* Complex types have either COMPLEX_CST or COMPLEX_EXPR to
5112 represent creation of a complex value. */
5113 if (TREE_CONSTANT (r
) && TREE_CONSTANT (i
))
5115 ctor
= build_complex (type
, r
, i
);
5116 TREE_OPERAND (*expr_p
, 1) = ctor
;
5120 ctor
= build2 (COMPLEX_EXPR
, type
, r
, i
);
5121 TREE_OPERAND (*expr_p
, 1) = ctor
;
5122 ret
= gimplify_expr (&TREE_OPERAND (*expr_p
, 1),
5125 rhs_predicate_for (TREE_OPERAND (*expr_p
, 0)),
5133 unsigned HOST_WIDE_INT ix
;
5134 constructor_elt
*ce
;
5136 if (notify_temp_creation
)
5139 /* Go ahead and simplify constant constructors to VECTOR_CST. */
5140 if (TREE_CONSTANT (ctor
))
5142 bool constant_p
= true;
5145 /* Even when ctor is constant, it might contain non-*_CST
5146 elements, such as addresses or trapping values like
5147 1.0/0.0 - 1.0/0.0. Such expressions don't belong
5148 in VECTOR_CST nodes. */
5149 FOR_EACH_CONSTRUCTOR_VALUE (elts
, ix
, value
)
5150 if (!CONSTANT_CLASS_P (value
))
5158 TREE_OPERAND (*expr_p
, 1) = build_vector_from_ctor (type
, elts
);
5162 TREE_CONSTANT (ctor
) = 0;
5165 /* Vector types use CONSTRUCTOR all the way through gimple
5166 compilation as a general initializer. */
5167 FOR_EACH_VEC_SAFE_ELT (elts
, ix
, ce
)
5169 enum gimplify_status tret
;
5170 tret
= gimplify_expr (&ce
->value
, pre_p
, post_p
, is_gimple_val
,
5172 if (tret
== GS_ERROR
)
5174 else if (TREE_STATIC (ctor
)
5175 && !initializer_constant_valid_p (ce
->value
,
5176 TREE_TYPE (ce
->value
)))
5177 TREE_STATIC (ctor
) = 0;
5179 if (!is_gimple_reg (TREE_OPERAND (*expr_p
, 0)))
5180 TREE_OPERAND (*expr_p
, 1) = get_formal_tmp_var (ctor
, pre_p
);
5185 /* So how did we get a CONSTRUCTOR for a scalar type? */
5189 if (ret
== GS_ERROR
)
5191 /* If we have gimplified both sides of the initializer but have
5192 not emitted an assignment, do so now. */
5195 tree lhs
= TREE_OPERAND (*expr_p
, 0);
5196 tree rhs
= TREE_OPERAND (*expr_p
, 1);
5197 if (want_value
&& object
== lhs
)
5198 lhs
= unshare_expr (lhs
);
5199 gassign
*init
= gimple_build_assign (lhs
, rhs
);
5200 gimplify_seq_add_stmt (pre_p
, init
);
5214 /* Given a pointer value OP0, return a simplified version of an
5215 indirection through OP0, or NULL_TREE if no simplification is
5216 possible. This may only be applied to a rhs of an expression.
5217 Note that the resulting type may be different from the type pointed
5218 to in the sense that it is still compatible from the langhooks
5222 gimple_fold_indirect_ref_rhs (tree t
)
5224 return gimple_fold_indirect_ref (t
);
5227 /* Subroutine of gimplify_modify_expr to do simplifications of
5228 MODIFY_EXPRs based on the code of the RHS. We loop for as long as
5229 something changes. */
5231 static enum gimplify_status
5232 gimplify_modify_expr_rhs (tree
*expr_p
, tree
*from_p
, tree
*to_p
,
5233 gimple_seq
*pre_p
, gimple_seq
*post_p
,
5236 enum gimplify_status ret
= GS_UNHANDLED
;
5242 switch (TREE_CODE (*from_p
))
5245 /* If we're assigning from a read-only variable initialized with
5246 a constructor, do the direct assignment from the constructor,
5247 but only if neither source nor target are volatile since this
5248 latter assignment might end up being done on a per-field basis. */
5249 if (DECL_INITIAL (*from_p
)
5250 && TREE_READONLY (*from_p
)
5251 && !TREE_THIS_VOLATILE (*from_p
)
5252 && !TREE_THIS_VOLATILE (*to_p
)
5253 && TREE_CODE (DECL_INITIAL (*from_p
)) == CONSTRUCTOR
)
5255 tree old_from
= *from_p
;
5256 enum gimplify_status subret
;
5258 /* Move the constructor into the RHS. */
5259 *from_p
= unshare_expr (DECL_INITIAL (*from_p
));
5261 /* Let's see if gimplify_init_constructor will need to put
5263 subret
= gimplify_init_constructor (expr_p
, NULL
, NULL
,
5265 if (subret
== GS_ERROR
)
5267 /* If so, revert the change. */
5279 /* If we have code like
5283 where the type of "x" is a (possibly cv-qualified variant
5284 of "A"), treat the entire expression as identical to "x".
5285 This kind of code arises in C++ when an object is bound
5286 to a const reference, and if "x" is a TARGET_EXPR we want
5287 to take advantage of the optimization below. */
5288 bool volatile_p
= TREE_THIS_VOLATILE (*from_p
);
5289 tree t
= gimple_fold_indirect_ref_rhs (TREE_OPERAND (*from_p
, 0));
5292 if (TREE_THIS_VOLATILE (t
) != volatile_p
)
5295 t
= build_simple_mem_ref_loc (EXPR_LOCATION (*from_p
),
5296 build_fold_addr_expr (t
));
5297 if (REFERENCE_CLASS_P (t
))
5298 TREE_THIS_VOLATILE (t
) = volatile_p
;
5309 /* If we are initializing something from a TARGET_EXPR, strip the
5310 TARGET_EXPR and initialize it directly, if possible. This can't
5311 be done if the initializer is void, since that implies that the
5312 temporary is set in some non-trivial way.
5314 ??? What about code that pulls out the temp and uses it
5315 elsewhere? I think that such code never uses the TARGET_EXPR as
5316 an initializer. If I'm wrong, we'll die because the temp won't
5317 have any RTL. In that case, I guess we'll need to replace
5318 references somehow. */
5319 tree init
= TARGET_EXPR_INITIAL (*from_p
);
5322 && (TREE_CODE (*expr_p
) != MODIFY_EXPR
5323 || !TARGET_EXPR_NO_ELIDE (*from_p
))
5324 && !VOID_TYPE_P (TREE_TYPE (init
)))
5334 /* Remove any COMPOUND_EXPR in the RHS so the following cases will be
5336 gimplify_compound_expr (from_p
, pre_p
, true);
5342 /* If we already made some changes, let the front end have a
5343 crack at this before we break it down. */
5344 if (ret
!= GS_UNHANDLED
)
5346 /* If we're initializing from a CONSTRUCTOR, break this into
5347 individual MODIFY_EXPRs. */
5348 return gimplify_init_constructor (expr_p
, pre_p
, post_p
, want_value
,
5352 /* If we're assigning to a non-register type, push the assignment
5353 down into the branches. This is mandatory for ADDRESSABLE types,
5354 since we cannot generate temporaries for such, but it saves a
5355 copy in other cases as well. */
5356 if (!is_gimple_reg_type (TREE_TYPE (*from_p
)))
5358 /* This code should mirror the code in gimplify_cond_expr. */
5359 enum tree_code code
= TREE_CODE (*expr_p
);
5360 tree cond
= *from_p
;
5361 tree result
= *to_p
;
5363 ret
= gimplify_expr (&result
, pre_p
, post_p
,
5364 is_gimple_lvalue
, fb_lvalue
);
5365 if (ret
!= GS_ERROR
)
5368 /* If we are going to write RESULT more than once, clear
5369 TREE_READONLY flag, otherwise we might incorrectly promote
5370 the variable to static const and initialize it at compile
5371 time in one of the branches. */
5373 && TREE_TYPE (TREE_OPERAND (cond
, 1)) != void_type_node
5374 && TREE_TYPE (TREE_OPERAND (cond
, 2)) != void_type_node
)
5375 TREE_READONLY (result
) = 0;
5376 if (TREE_TYPE (TREE_OPERAND (cond
, 1)) != void_type_node
)
5377 TREE_OPERAND (cond
, 1)
5378 = build2 (code
, void_type_node
, result
,
5379 TREE_OPERAND (cond
, 1));
5380 if (TREE_TYPE (TREE_OPERAND (cond
, 2)) != void_type_node
)
5381 TREE_OPERAND (cond
, 2)
5382 = build2 (code
, void_type_node
, unshare_expr (result
),
5383 TREE_OPERAND (cond
, 2));
5385 TREE_TYPE (cond
) = void_type_node
;
5386 recalculate_side_effects (cond
);
5390 gimplify_and_add (cond
, pre_p
);
5391 *expr_p
= unshare_expr (result
);
5400 /* For calls that return in memory, give *to_p as the CALL_EXPR's
5401 return slot so that we don't generate a temporary. */
5402 if (!CALL_EXPR_RETURN_SLOT_OPT (*from_p
)
5403 && aggregate_value_p (*from_p
, *from_p
))
5407 if (!(rhs_predicate_for (*to_p
))(*from_p
))
5408 /* If we need a temporary, *to_p isn't accurate. */
5410 /* It's OK to use the return slot directly unless it's an NRV. */
5411 else if (TREE_CODE (*to_p
) == RESULT_DECL
5412 && DECL_NAME (*to_p
) == NULL_TREE
5413 && needs_to_live_in_memory (*to_p
))
5415 else if (is_gimple_reg_type (TREE_TYPE (*to_p
))
5416 || (DECL_P (*to_p
) && DECL_REGISTER (*to_p
)))
5417 /* Don't force regs into memory. */
5419 else if (TREE_CODE (*expr_p
) == INIT_EXPR
)
5420 /* It's OK to use the target directly if it's being
5423 else if (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (*to_p
)))
5425 /* Always use the target and thus RSO for variable-sized types.
5426 GIMPLE cannot deal with a variable-sized assignment
5427 embedded in a call statement. */
5429 else if (TREE_CODE (*to_p
) != SSA_NAME
5430 && (!is_gimple_variable (*to_p
)
5431 || needs_to_live_in_memory (*to_p
)))
5432 /* Don't use the original target if it's already addressable;
5433 if its address escapes, and the called function uses the
5434 NRV optimization, a conforming program could see *to_p
5435 change before the called function returns; see c++/19317.
5436 When optimizing, the return_slot pass marks more functions
5437 as safe after we have escape info. */
5444 CALL_EXPR_RETURN_SLOT_OPT (*from_p
) = 1;
5445 mark_addressable (*to_p
);
5450 case WITH_SIZE_EXPR
:
5451 /* Likewise for calls that return an aggregate of non-constant size,
5452 since we would not be able to generate a temporary at all. */
5453 if (TREE_CODE (TREE_OPERAND (*from_p
, 0)) == CALL_EXPR
)
5455 *from_p
= TREE_OPERAND (*from_p
, 0);
5456 /* We don't change ret in this case because the
5457 WITH_SIZE_EXPR might have been added in
5458 gimplify_modify_expr, so returning GS_OK would lead to an
5464 /* If we're initializing from a container, push the initialization
5466 case CLEANUP_POINT_EXPR
:
5468 case STATEMENT_LIST
:
5470 tree wrap
= *from_p
;
5473 ret
= gimplify_expr (to_p
, pre_p
, post_p
, is_gimple_min_lval
,
5475 if (ret
!= GS_ERROR
)
5478 t
= voidify_wrapper_expr (wrap
, *expr_p
);
5479 gcc_assert (t
== *expr_p
);
5483 gimplify_and_add (wrap
, pre_p
);
5484 *expr_p
= unshare_expr (*to_p
);
5491 case COMPOUND_LITERAL_EXPR
:
5493 tree complit
= TREE_OPERAND (*expr_p
, 1);
5494 tree decl_s
= COMPOUND_LITERAL_EXPR_DECL_EXPR (complit
);
5495 tree decl
= DECL_EXPR_DECL (decl_s
);
5496 tree init
= DECL_INITIAL (decl
);
5498 /* struct T x = (struct T) { 0, 1, 2 } can be optimized
5499 into struct T x = { 0, 1, 2 } if the address of the
5500 compound literal has never been taken. */
5501 if (!TREE_ADDRESSABLE (complit
)
5502 && !TREE_ADDRESSABLE (decl
)
5505 *expr_p
= copy_node (*expr_p
);
5506 TREE_OPERAND (*expr_p
, 1) = init
;
5521 /* Return true if T looks like a valid GIMPLE statement. */
5524 is_gimple_stmt (tree t
)
5526 const enum tree_code code
= TREE_CODE (t
);
5531 /* The only valid NOP_EXPR is the empty statement. */
5532 return IS_EMPTY_STMT (t
);
5536 /* These are only valid if they're void. */
5537 return TREE_TYPE (t
) == NULL
|| VOID_TYPE_P (TREE_TYPE (t
));
5543 case CASE_LABEL_EXPR
:
5544 case TRY_CATCH_EXPR
:
5545 case TRY_FINALLY_EXPR
:
5546 case EH_FILTER_EXPR
:
5549 case STATEMENT_LIST
:
5554 case OACC_HOST_DATA
:
5557 case OACC_ENTER_DATA
:
5558 case OACC_EXIT_DATA
:
5563 case OMP_DISTRIBUTE
:
5576 case OMP_TARGET_DATA
:
5577 case OMP_TARGET_UPDATE
:
5578 case OMP_TARGET_ENTER_DATA
:
5579 case OMP_TARGET_EXIT_DATA
:
5582 /* These are always void. */
5588 /* These are valid regardless of their type. */
5597 /* Promote partial stores to COMPLEX variables to total stores. *EXPR_P is
5598 a MODIFY_EXPR with a lhs of a REAL/IMAGPART_EXPR of a gimple register.
5600 IMPORTANT NOTE: This promotion is performed by introducing a load of the
5601 other, unmodified part of the complex object just before the total store.
5602 As a consequence, if the object is still uninitialized, an undefined value
5603 will be loaded into a register, which may result in a spurious exception
5604 if the register is floating-point and the value happens to be a signaling
5605 NaN for example. Then the fully-fledged complex operations lowering pass
5606 followed by a DCE pass are necessary in order to fix things up. */
5608 static enum gimplify_status
5609 gimplify_modify_expr_complex_part (tree
*expr_p
, gimple_seq
*pre_p
,
5612 enum tree_code code
, ocode
;
5613 tree lhs
, rhs
, new_rhs
, other
, realpart
, imagpart
;
5615 lhs
= TREE_OPERAND (*expr_p
, 0);
5616 rhs
= TREE_OPERAND (*expr_p
, 1);
5617 code
= TREE_CODE (lhs
);
5618 lhs
= TREE_OPERAND (lhs
, 0);
5620 ocode
= code
== REALPART_EXPR
? IMAGPART_EXPR
: REALPART_EXPR
;
5621 other
= build1 (ocode
, TREE_TYPE (rhs
), lhs
);
5622 TREE_NO_WARNING (other
) = 1;
5623 other
= get_formal_tmp_var (other
, pre_p
);
5625 realpart
= code
== REALPART_EXPR
? rhs
: other
;
5626 imagpart
= code
== REALPART_EXPR
? other
: rhs
;
5628 if (TREE_CONSTANT (realpart
) && TREE_CONSTANT (imagpart
))
5629 new_rhs
= build_complex (TREE_TYPE (lhs
), realpart
, imagpart
);
5631 new_rhs
= build2 (COMPLEX_EXPR
, TREE_TYPE (lhs
), realpart
, imagpart
);
5633 gimplify_seq_add_stmt (pre_p
, gimple_build_assign (lhs
, new_rhs
));
5634 *expr_p
= (want_value
) ? rhs
: NULL_TREE
;
5639 /* Gimplify the MODIFY_EXPR node pointed to by EXPR_P.
5645 PRE_P points to the list where side effects that must happen before
5646 *EXPR_P should be stored.
5648 POST_P points to the list where side effects that must happen after
5649 *EXPR_P should be stored.
5651 WANT_VALUE is nonzero iff we want to use the value of this expression
5652 in another expression. */
5654 static enum gimplify_status
5655 gimplify_modify_expr (tree
*expr_p
, gimple_seq
*pre_p
, gimple_seq
*post_p
,
5658 tree
*from_p
= &TREE_OPERAND (*expr_p
, 1);
5659 tree
*to_p
= &TREE_OPERAND (*expr_p
, 0);
5660 enum gimplify_status ret
= GS_UNHANDLED
;
5662 location_t loc
= EXPR_LOCATION (*expr_p
);
5663 gimple_stmt_iterator gsi
;
5665 gcc_assert (TREE_CODE (*expr_p
) == MODIFY_EXPR
5666 || TREE_CODE (*expr_p
) == INIT_EXPR
);
5668 /* Trying to simplify a clobber using normal logic doesn't work,
5669 so handle it here. */
5670 if (TREE_CLOBBER_P (*from_p
))
5672 ret
= gimplify_expr (to_p
, pre_p
, post_p
, is_gimple_lvalue
, fb_lvalue
);
5673 if (ret
== GS_ERROR
)
5675 gcc_assert (!want_value
);
5676 if (!VAR_P (*to_p
) && TREE_CODE (*to_p
) != MEM_REF
)
5678 tree addr
= get_initialized_tmp_var (build_fold_addr_expr (*to_p
),
5680 *to_p
= build_simple_mem_ref_loc (EXPR_LOCATION (*to_p
), addr
);
5682 gimplify_seq_add_stmt (pre_p
, gimple_build_assign (*to_p
, *from_p
));
5687 /* Insert pointer conversions required by the middle-end that are not
5688 required by the frontend. This fixes middle-end type checking for
5689 for example gcc.dg/redecl-6.c. */
5690 if (POINTER_TYPE_P (TREE_TYPE (*to_p
)))
5692 STRIP_USELESS_TYPE_CONVERSION (*from_p
);
5693 if (!useless_type_conversion_p (TREE_TYPE (*to_p
), TREE_TYPE (*from_p
)))
5694 *from_p
= fold_convert_loc (loc
, TREE_TYPE (*to_p
), *from_p
);
5697 /* See if any simplifications can be done based on what the RHS is. */
5698 ret
= gimplify_modify_expr_rhs (expr_p
, from_p
, to_p
, pre_p
, post_p
,
5700 if (ret
!= GS_UNHANDLED
)
5703 /* For zero sized types only gimplify the left hand side and right hand
5704 side as statements and throw away the assignment. Do this after
5705 gimplify_modify_expr_rhs so we handle TARGET_EXPRs of addressable
5707 if (zero_sized_type (TREE_TYPE (*from_p
))
5709 /* Don't do this for calls that return addressable types, expand_call
5710 relies on those having a lhs. */
5711 && !(TREE_ADDRESSABLE (TREE_TYPE (*from_p
))
5712 && TREE_CODE (*from_p
) == CALL_EXPR
))
5714 gimplify_stmt (from_p
, pre_p
);
5715 gimplify_stmt (to_p
, pre_p
);
5716 *expr_p
= NULL_TREE
;
5720 /* If the value being copied is of variable width, compute the length
5721 of the copy into a WITH_SIZE_EXPR. Note that we need to do this
5722 before gimplifying any of the operands so that we can resolve any
5723 PLACEHOLDER_EXPRs in the size. Also note that the RTL expander uses
5724 the size of the expression to be copied, not of the destination, so
5725 that is what we must do here. */
5726 maybe_with_size_expr (from_p
);
5728 /* As a special case, we have to temporarily allow for assignments
5729 with a CALL_EXPR on the RHS. Since in GIMPLE a function call is
5730 a toplevel statement, when gimplifying the GENERIC expression
5731 MODIFY_EXPR <a, CALL_EXPR <foo>>, we cannot create the tuple
5732 GIMPLE_ASSIGN <a, GIMPLE_CALL <foo>>.
5734 Instead, we need to create the tuple GIMPLE_CALL <a, foo>. To
5735 prevent gimplify_expr from trying to create a new temporary for
5736 foo's LHS, we tell it that it should only gimplify until it
5737 reaches the CALL_EXPR. On return from gimplify_expr, the newly
5738 created GIMPLE_CALL <foo> will be the last statement in *PRE_P
5739 and all we need to do here is set 'a' to be its LHS. */
5741 /* Gimplify the RHS first for C++17 and bug 71104. */
5742 gimple_predicate initial_pred
= initial_rhs_predicate_for (*to_p
);
5743 ret
= gimplify_expr (from_p
, pre_p
, post_p
, initial_pred
, fb_rvalue
);
5744 if (ret
== GS_ERROR
)
5747 /* Then gimplify the LHS. */
5748 /* If we gimplified the RHS to a CALL_EXPR and that call may return
5749 twice we have to make sure to gimplify into non-SSA as otherwise
5750 the abnormal edge added later will make those defs not dominate
5752 ??? Technically this applies only to the registers used in the
5753 resulting non-register *TO_P. */
5754 bool saved_into_ssa
= gimplify_ctxp
->into_ssa
;
5756 && TREE_CODE (*from_p
) == CALL_EXPR
5757 && call_expr_flags (*from_p
) & ECF_RETURNS_TWICE
)
5758 gimplify_ctxp
->into_ssa
= false;
5759 ret
= gimplify_expr (to_p
, pre_p
, post_p
, is_gimple_lvalue
, fb_lvalue
);
5760 gimplify_ctxp
->into_ssa
= saved_into_ssa
;
5761 if (ret
== GS_ERROR
)
5764 /* Now that the LHS is gimplified, re-gimplify the RHS if our initial
5765 guess for the predicate was wrong. */
5766 gimple_predicate final_pred
= rhs_predicate_for (*to_p
);
5767 if (final_pred
!= initial_pred
)
5769 ret
= gimplify_expr (from_p
, pre_p
, post_p
, final_pred
, fb_rvalue
);
5770 if (ret
== GS_ERROR
)
5774 /* In case of va_arg internal fn wrappped in a WITH_SIZE_EXPR, add the type
5775 size as argument to the call. */
5776 if (TREE_CODE (*from_p
) == WITH_SIZE_EXPR
)
5778 tree call
= TREE_OPERAND (*from_p
, 0);
5779 tree vlasize
= TREE_OPERAND (*from_p
, 1);
5781 if (TREE_CODE (call
) == CALL_EXPR
5782 && CALL_EXPR_IFN (call
) == IFN_VA_ARG
)
5784 int nargs
= call_expr_nargs (call
);
5785 tree type
= TREE_TYPE (call
);
5786 tree ap
= CALL_EXPR_ARG (call
, 0);
5787 tree tag
= CALL_EXPR_ARG (call
, 1);
5788 tree aptag
= CALL_EXPR_ARG (call
, 2);
5789 tree newcall
= build_call_expr_internal_loc (EXPR_LOCATION (call
),
5793 TREE_OPERAND (*from_p
, 0) = newcall
;
5797 /* Now see if the above changed *from_p to something we handle specially. */
5798 ret
= gimplify_modify_expr_rhs (expr_p
, from_p
, to_p
, pre_p
, post_p
,
5800 if (ret
!= GS_UNHANDLED
)
5803 /* If we've got a variable sized assignment between two lvalues (i.e. does
5804 not involve a call), then we can make things a bit more straightforward
5805 by converting the assignment to memcpy or memset. */
5806 if (TREE_CODE (*from_p
) == WITH_SIZE_EXPR
)
5808 tree from
= TREE_OPERAND (*from_p
, 0);
5809 tree size
= TREE_OPERAND (*from_p
, 1);
5811 if (TREE_CODE (from
) == CONSTRUCTOR
)
5812 return gimplify_modify_expr_to_memset (expr_p
, size
, want_value
, pre_p
);
5814 if (is_gimple_addressable (from
))
5817 return gimplify_modify_expr_to_memcpy (expr_p
, size
, want_value
,
5822 /* Transform partial stores to non-addressable complex variables into
5823 total stores. This allows us to use real instead of virtual operands
5824 for these variables, which improves optimization. */
5825 if ((TREE_CODE (*to_p
) == REALPART_EXPR
5826 || TREE_CODE (*to_p
) == IMAGPART_EXPR
)
5827 && is_gimple_reg (TREE_OPERAND (*to_p
, 0)))
5828 return gimplify_modify_expr_complex_part (expr_p
, pre_p
, want_value
);
5830 /* Try to alleviate the effects of the gimplification creating artificial
5831 temporaries (see for example is_gimple_reg_rhs) on the debug info, but
5832 make sure not to create DECL_DEBUG_EXPR links across functions. */
5833 if (!gimplify_ctxp
->into_ssa
5835 && DECL_IGNORED_P (*from_p
)
5837 && !DECL_IGNORED_P (*to_p
)
5838 && decl_function_context (*to_p
) == current_function_decl
5839 && decl_function_context (*from_p
) == current_function_decl
)
5841 if (!DECL_NAME (*from_p
) && DECL_NAME (*to_p
))
5843 = create_tmp_var_name (IDENTIFIER_POINTER (DECL_NAME (*to_p
)));
5844 DECL_HAS_DEBUG_EXPR_P (*from_p
) = 1;
5845 SET_DECL_DEBUG_EXPR (*from_p
, *to_p
);
5848 if (want_value
&& TREE_THIS_VOLATILE (*to_p
))
5849 *from_p
= get_initialized_tmp_var (*from_p
, pre_p
, post_p
);
5851 if (TREE_CODE (*from_p
) == CALL_EXPR
)
5853 /* Since the RHS is a CALL_EXPR, we need to create a GIMPLE_CALL
5854 instead of a GIMPLE_ASSIGN. */
5856 if (CALL_EXPR_FN (*from_p
) == NULL_TREE
)
5858 /* Gimplify internal functions created in the FEs. */
5859 int nargs
= call_expr_nargs (*from_p
), i
;
5860 enum internal_fn ifn
= CALL_EXPR_IFN (*from_p
);
5861 auto_vec
<tree
> vargs (nargs
);
5863 for (i
= 0; i
< nargs
; i
++)
5865 gimplify_arg (&CALL_EXPR_ARG (*from_p
, i
), pre_p
,
5866 EXPR_LOCATION (*from_p
));
5867 vargs
.quick_push (CALL_EXPR_ARG (*from_p
, i
));
5869 call_stmt
= gimple_build_call_internal_vec (ifn
, vargs
);
5870 gimple_call_set_nothrow (call_stmt
, TREE_NOTHROW (*from_p
));
5871 gimple_set_location (call_stmt
, EXPR_LOCATION (*expr_p
));
5875 tree fnptrtype
= TREE_TYPE (CALL_EXPR_FN (*from_p
));
5876 CALL_EXPR_FN (*from_p
) = TREE_OPERAND (CALL_EXPR_FN (*from_p
), 0);
5877 STRIP_USELESS_TYPE_CONVERSION (CALL_EXPR_FN (*from_p
));
5878 tree fndecl
= get_callee_fndecl (*from_p
);
5880 && fndecl_built_in_p (fndecl
, BUILT_IN_EXPECT
)
5881 && call_expr_nargs (*from_p
) == 3)
5882 call_stmt
= gimple_build_call_internal (IFN_BUILTIN_EXPECT
, 3,
5883 CALL_EXPR_ARG (*from_p
, 0),
5884 CALL_EXPR_ARG (*from_p
, 1),
5885 CALL_EXPR_ARG (*from_p
, 2));
5888 call_stmt
= gimple_build_call_from_tree (*from_p
, fnptrtype
);
5891 notice_special_calls (call_stmt
);
5892 if (!gimple_call_noreturn_p (call_stmt
) || !should_remove_lhs_p (*to_p
))
5893 gimple_call_set_lhs (call_stmt
, *to_p
);
5894 else if (TREE_CODE (*to_p
) == SSA_NAME
)
5895 /* The above is somewhat premature, avoid ICEing later for a
5896 SSA name w/o a definition. We may have uses in the GIMPLE IL.
5897 ??? This doesn't make it a default-def. */
5898 SSA_NAME_DEF_STMT (*to_p
) = gimple_build_nop ();
5904 assign
= gimple_build_assign (*to_p
, *from_p
);
5905 gimple_set_location (assign
, EXPR_LOCATION (*expr_p
));
5906 if (COMPARISON_CLASS_P (*from_p
))
5907 gimple_set_no_warning (assign
, TREE_NO_WARNING (*from_p
));
5910 if (gimplify_ctxp
->into_ssa
&& is_gimple_reg (*to_p
))
5912 /* We should have got an SSA name from the start. */
5913 gcc_assert (TREE_CODE (*to_p
) == SSA_NAME
5914 || ! gimple_in_ssa_p (cfun
));
5917 gimplify_seq_add_stmt (pre_p
, assign
);
5918 gsi
= gsi_last (*pre_p
);
5919 maybe_fold_stmt (&gsi
);
5923 *expr_p
= TREE_THIS_VOLATILE (*to_p
) ? *from_p
: unshare_expr (*to_p
);
5932 /* Gimplify a comparison between two variable-sized objects. Do this
5933 with a call to BUILT_IN_MEMCMP. */
5935 static enum gimplify_status
5936 gimplify_variable_sized_compare (tree
*expr_p
)
5938 location_t loc
= EXPR_LOCATION (*expr_p
);
5939 tree op0
= TREE_OPERAND (*expr_p
, 0);
5940 tree op1
= TREE_OPERAND (*expr_p
, 1);
5941 tree t
, arg
, dest
, src
, expr
;
5943 arg
= TYPE_SIZE_UNIT (TREE_TYPE (op0
));
5944 arg
= unshare_expr (arg
);
5945 arg
= SUBSTITUTE_PLACEHOLDER_IN_EXPR (arg
, op0
);
5946 src
= build_fold_addr_expr_loc (loc
, op1
);
5947 dest
= build_fold_addr_expr_loc (loc
, op0
);
5948 t
= builtin_decl_implicit (BUILT_IN_MEMCMP
);
5949 t
= build_call_expr_loc (loc
, t
, 3, dest
, src
, arg
);
5952 = build2 (TREE_CODE (*expr_p
), TREE_TYPE (*expr_p
), t
, integer_zero_node
);
5953 SET_EXPR_LOCATION (expr
, loc
);
5959 /* Gimplify a comparison between two aggregate objects of integral scalar
5960 mode as a comparison between the bitwise equivalent scalar values. */
5962 static enum gimplify_status
5963 gimplify_scalar_mode_aggregate_compare (tree
*expr_p
)
5965 location_t loc
= EXPR_LOCATION (*expr_p
);
5966 tree op0
= TREE_OPERAND (*expr_p
, 0);
5967 tree op1
= TREE_OPERAND (*expr_p
, 1);
5969 tree type
= TREE_TYPE (op0
);
5970 tree scalar_type
= lang_hooks
.types
.type_for_mode (TYPE_MODE (type
), 1);
5972 op0
= fold_build1_loc (loc
, VIEW_CONVERT_EXPR
, scalar_type
, op0
);
5973 op1
= fold_build1_loc (loc
, VIEW_CONVERT_EXPR
, scalar_type
, op1
);
5976 = fold_build2_loc (loc
, TREE_CODE (*expr_p
), TREE_TYPE (*expr_p
), op0
, op1
);
5981 /* Gimplify an expression sequence. This function gimplifies each
5982 expression and rewrites the original expression with the last
5983 expression of the sequence in GIMPLE form.
5985 PRE_P points to the list where the side effects for all the
5986 expressions in the sequence will be emitted.
5988 WANT_VALUE is true when the result of the last COMPOUND_EXPR is used. */
5990 static enum gimplify_status
5991 gimplify_compound_expr (tree
*expr_p
, gimple_seq
*pre_p
, bool want_value
)
5997 tree
*sub_p
= &TREE_OPERAND (t
, 0);
5999 if (TREE_CODE (*sub_p
) == COMPOUND_EXPR
)
6000 gimplify_compound_expr (sub_p
, pre_p
, false);
6002 gimplify_stmt (sub_p
, pre_p
);
6004 t
= TREE_OPERAND (t
, 1);
6006 while (TREE_CODE (t
) == COMPOUND_EXPR
);
6013 gimplify_stmt (expr_p
, pre_p
);
6018 /* Gimplify a SAVE_EXPR node. EXPR_P points to the expression to
6019 gimplify. After gimplification, EXPR_P will point to a new temporary
6020 that holds the original value of the SAVE_EXPR node.
6022 PRE_P points to the list where side effects that must happen before
6023 *EXPR_P should be stored. */
6025 static enum gimplify_status
6026 gimplify_save_expr (tree
*expr_p
, gimple_seq
*pre_p
, gimple_seq
*post_p
)
6028 enum gimplify_status ret
= GS_ALL_DONE
;
6031 gcc_assert (TREE_CODE (*expr_p
) == SAVE_EXPR
);
6032 val
= TREE_OPERAND (*expr_p
, 0);
6034 /* If the SAVE_EXPR has not been resolved, then evaluate it once. */
6035 if (!SAVE_EXPR_RESOLVED_P (*expr_p
))
6037 /* The operand may be a void-valued expression. It is
6038 being executed only for its side-effects. */
6039 if (TREE_TYPE (val
) == void_type_node
)
6041 ret
= gimplify_expr (&TREE_OPERAND (*expr_p
, 0), pre_p
, post_p
,
6042 is_gimple_stmt
, fb_none
);
6046 /* The temporary may not be an SSA name as later abnormal and EH
6047 control flow may invalidate use/def domination. When in SSA
6048 form then assume there are no such issues and SAVE_EXPRs only
6049 appear via GENERIC foldings. */
6050 val
= get_initialized_tmp_var (val
, pre_p
, post_p
,
6051 gimple_in_ssa_p (cfun
));
6053 TREE_OPERAND (*expr_p
, 0) = val
;
6054 SAVE_EXPR_RESOLVED_P (*expr_p
) = 1;
6062 /* Rewrite the ADDR_EXPR node pointed to by EXPR_P
6069 PRE_P points to the list where side effects that must happen before
6070 *EXPR_P should be stored.
6072 POST_P points to the list where side effects that must happen after
6073 *EXPR_P should be stored. */
6075 static enum gimplify_status
6076 gimplify_addr_expr (tree
*expr_p
, gimple_seq
*pre_p
, gimple_seq
*post_p
)
6078 tree expr
= *expr_p
;
6079 tree op0
= TREE_OPERAND (expr
, 0);
6080 enum gimplify_status ret
;
6081 location_t loc
= EXPR_LOCATION (*expr_p
);
6083 switch (TREE_CODE (op0
))
6087 /* Check if we are dealing with an expression of the form '&*ptr'.
6088 While the front end folds away '&*ptr' into 'ptr', these
6089 expressions may be generated internally by the compiler (e.g.,
6090 builtins like __builtin_va_end). */
6091 /* Caution: the silent array decomposition semantics we allow for
6092 ADDR_EXPR means we can't always discard the pair. */
6093 /* Gimplification of the ADDR_EXPR operand may drop
6094 cv-qualification conversions, so make sure we add them if
6097 tree op00
= TREE_OPERAND (op0
, 0);
6098 tree t_expr
= TREE_TYPE (expr
);
6099 tree t_op00
= TREE_TYPE (op00
);
6101 if (!useless_type_conversion_p (t_expr
, t_op00
))
6102 op00
= fold_convert_loc (loc
, TREE_TYPE (expr
), op00
);
6108 case VIEW_CONVERT_EXPR
:
6109 /* Take the address of our operand and then convert it to the type of
6112 ??? The interactions of VIEW_CONVERT_EXPR and aliasing is not at
6113 all clear. The impact of this transformation is even less clear. */
6115 /* If the operand is a useless conversion, look through it. Doing so
6116 guarantees that the ADDR_EXPR and its operand will remain of the
6118 if (tree_ssa_useless_type_conversion (TREE_OPERAND (op0
, 0)))
6119 op0
= TREE_OPERAND (op0
, 0);
6121 *expr_p
= fold_convert_loc (loc
, TREE_TYPE (expr
),
6122 build_fold_addr_expr_loc (loc
,
6123 TREE_OPERAND (op0
, 0)));
6128 if (integer_zerop (TREE_OPERAND (op0
, 1)))
6129 goto do_indirect_ref
;
6134 /* If we see a call to a declared builtin or see its address
6135 being taken (we can unify those cases here) then we can mark
6136 the builtin for implicit generation by GCC. */
6137 if (TREE_CODE (op0
) == FUNCTION_DECL
6138 && fndecl_built_in_p (op0
, BUILT_IN_NORMAL
)
6139 && builtin_decl_declared_p (DECL_FUNCTION_CODE (op0
)))
6140 set_builtin_decl_implicit_p (DECL_FUNCTION_CODE (op0
), true);
6142 /* We use fb_either here because the C frontend sometimes takes
6143 the address of a call that returns a struct; see
6144 gcc.dg/c99-array-lval-1.c. The gimplifier will correctly make
6145 the implied temporary explicit. */
6147 /* Make the operand addressable. */
6148 ret
= gimplify_expr (&TREE_OPERAND (expr
, 0), pre_p
, post_p
,
6149 is_gimple_addressable
, fb_either
);
6150 if (ret
== GS_ERROR
)
6153 /* Then mark it. Beware that it may not be possible to do so directly
6154 if a temporary has been created by the gimplification. */
6155 prepare_gimple_addressable (&TREE_OPERAND (expr
, 0), pre_p
);
6157 op0
= TREE_OPERAND (expr
, 0);
6159 /* For various reasons, the gimplification of the expression
6160 may have made a new INDIRECT_REF. */
6161 if (TREE_CODE (op0
) == INDIRECT_REF
6162 || (TREE_CODE (op0
) == MEM_REF
6163 && integer_zerop (TREE_OPERAND (op0
, 1))))
6164 goto do_indirect_ref
;
6166 mark_addressable (TREE_OPERAND (expr
, 0));
6168 /* The FEs may end up building ADDR_EXPRs early on a decl with
6169 an incomplete type. Re-build ADDR_EXPRs in canonical form
6171 if (!types_compatible_p (TREE_TYPE (op0
), TREE_TYPE (TREE_TYPE (expr
))))
6172 *expr_p
= build_fold_addr_expr (op0
);
6174 /* Make sure TREE_CONSTANT and TREE_SIDE_EFFECTS are set properly. */
6175 recompute_tree_invariant_for_addr_expr (*expr_p
);
6177 /* If we re-built the ADDR_EXPR add a conversion to the original type
6179 if (!useless_type_conversion_p (TREE_TYPE (expr
), TREE_TYPE (*expr_p
)))
6180 *expr_p
= fold_convert (TREE_TYPE (expr
), *expr_p
);
6188 /* Gimplify the operands of an ASM_EXPR. Input operands should be a gimple
6189 value; output operands should be a gimple lvalue. */
6191 static enum gimplify_status
6192 gimplify_asm_expr (tree
*expr_p
, gimple_seq
*pre_p
, gimple_seq
*post_p
)
6196 const char **oconstraints
;
6199 const char *constraint
;
6200 bool allows_mem
, allows_reg
, is_inout
;
6201 enum gimplify_status ret
, tret
;
6203 vec
<tree
, va_gc
> *inputs
;
6204 vec
<tree
, va_gc
> *outputs
;
6205 vec
<tree
, va_gc
> *clobbers
;
6206 vec
<tree
, va_gc
> *labels
;
6210 noutputs
= list_length (ASM_OUTPUTS (expr
));
6211 oconstraints
= (const char **) alloca ((noutputs
) * sizeof (const char *));
6219 link_next
= NULL_TREE
;
6220 for (i
= 0, link
= ASM_OUTPUTS (expr
); link
; ++i
, link
= link_next
)
6223 size_t constraint_len
;
6225 link_next
= TREE_CHAIN (link
);
6229 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link
)));
6230 constraint_len
= strlen (constraint
);
6231 if (constraint_len
== 0)
6234 ok
= parse_output_constraint (&constraint
, i
, 0, 0,
6235 &allows_mem
, &allows_reg
, &is_inout
);
6242 /* If we can't make copies, we can only accept memory.
6243 Similarly for VLAs. */
6244 tree outtype
= TREE_TYPE (TREE_VALUE (link
));
6245 if (outtype
!= error_mark_node
6246 && (TREE_ADDRESSABLE (outtype
)
6247 || !COMPLETE_TYPE_P (outtype
)
6248 || !tree_fits_poly_uint64_p (TYPE_SIZE_UNIT (outtype
))))
6254 error ("impossible constraint in %<asm%>");
6255 error ("non-memory output %d must stay in memory", i
);
6260 if (!allows_reg
&& allows_mem
)
6261 mark_addressable (TREE_VALUE (link
));
6263 tret
= gimplify_expr (&TREE_VALUE (link
), pre_p
, post_p
,
6264 is_inout
? is_gimple_min_lval
: is_gimple_lvalue
,
6265 fb_lvalue
| fb_mayfail
);
6266 if (tret
== GS_ERROR
)
6268 error ("invalid lvalue in %<asm%> output %d", i
);
6272 /* If the constraint does not allow memory make sure we gimplify
6273 it to a register if it is not already but its base is. This
6274 happens for complex and vector components. */
6277 tree op
= TREE_VALUE (link
);
6278 if (! is_gimple_val (op
)
6279 && is_gimple_reg_type (TREE_TYPE (op
))
6280 && is_gimple_reg (get_base_address (op
)))
6282 tree tem
= create_tmp_reg (TREE_TYPE (op
));
6286 ass
= build2 (MODIFY_EXPR
, TREE_TYPE (tem
),
6287 tem
, unshare_expr (op
));
6288 gimplify_and_add (ass
, pre_p
);
6290 ass
= build2 (MODIFY_EXPR
, TREE_TYPE (tem
), op
, tem
);
6291 gimplify_and_add (ass
, post_p
);
6293 TREE_VALUE (link
) = tem
;
6298 vec_safe_push (outputs
, link
);
6299 TREE_CHAIN (link
) = NULL_TREE
;
6303 /* An input/output operand. To give the optimizers more
6304 flexibility, split it into separate input and output
6307 /* Buffer big enough to format a 32-bit UINT_MAX into. */
6310 /* Turn the in/out constraint into an output constraint. */
6311 char *p
= xstrdup (constraint
);
6313 TREE_VALUE (TREE_PURPOSE (link
)) = build_string (constraint_len
, p
);
6315 /* And add a matching input constraint. */
6318 sprintf (buf
, "%u", i
);
6320 /* If there are multiple alternatives in the constraint,
6321 handle each of them individually. Those that allow register
6322 will be replaced with operand number, the others will stay
6324 if (strchr (p
, ',') != NULL
)
6326 size_t len
= 0, buflen
= strlen (buf
);
6327 char *beg
, *end
, *str
, *dst
;
6331 end
= strchr (beg
, ',');
6333 end
= strchr (beg
, '\0');
6334 if ((size_t) (end
- beg
) < buflen
)
6337 len
+= end
- beg
+ 1;
6344 str
= (char *) alloca (len
);
6345 for (beg
= p
+ 1, dst
= str
;;)
6348 bool mem_p
, reg_p
, inout_p
;
6350 end
= strchr (beg
, ',');
6355 parse_output_constraint (&tem
, i
, 0, 0,
6356 &mem_p
, ®_p
, &inout_p
);
6361 memcpy (dst
, buf
, buflen
);
6370 memcpy (dst
, beg
, len
);
6379 input
= build_string (dst
- str
, str
);
6382 input
= build_string (strlen (buf
), buf
);
6385 input
= build_string (constraint_len
- 1, constraint
+ 1);
6389 input
= build_tree_list (build_tree_list (NULL_TREE
, input
),
6390 unshare_expr (TREE_VALUE (link
)));
6391 ASM_INPUTS (expr
) = chainon (ASM_INPUTS (expr
), input
);
6395 link_next
= NULL_TREE
;
6396 for (link
= ASM_INPUTS (expr
); link
; ++i
, link
= link_next
)
6398 link_next
= TREE_CHAIN (link
);
6399 constraint
= TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link
)));
6400 parse_input_constraint (&constraint
, 0, 0, noutputs
, 0,
6401 oconstraints
, &allows_mem
, &allows_reg
);
6403 /* If we can't make copies, we can only accept memory. */
6404 tree intype
= TREE_TYPE (TREE_VALUE (link
));
6405 if (intype
!= error_mark_node
6406 && (TREE_ADDRESSABLE (intype
)
6407 || !COMPLETE_TYPE_P (intype
)
6408 || !tree_fits_poly_uint64_p (TYPE_SIZE_UNIT (intype
))))
6414 error ("impossible constraint in %<asm%>");
6415 error ("non-memory input %d must stay in memory", i
);
6420 /* If the operand is a memory input, it should be an lvalue. */
6421 if (!allows_reg
&& allows_mem
)
6423 tree inputv
= TREE_VALUE (link
);
6424 STRIP_NOPS (inputv
);
6425 if (TREE_CODE (inputv
) == PREDECREMENT_EXPR
6426 || TREE_CODE (inputv
) == PREINCREMENT_EXPR
6427 || TREE_CODE (inputv
) == POSTDECREMENT_EXPR
6428 || TREE_CODE (inputv
) == POSTINCREMENT_EXPR
6429 || TREE_CODE (inputv
) == MODIFY_EXPR
)
6430 TREE_VALUE (link
) = error_mark_node
;
6431 tret
= gimplify_expr (&TREE_VALUE (link
), pre_p
, post_p
,
6432 is_gimple_lvalue
, fb_lvalue
| fb_mayfail
);
6433 if (tret
!= GS_ERROR
)
6435 /* Unlike output operands, memory inputs are not guaranteed
6436 to be lvalues by the FE, and while the expressions are
6437 marked addressable there, if it is e.g. a statement
6438 expression, temporaries in it might not end up being
6439 addressable. They might be already used in the IL and thus
6440 it is too late to make them addressable now though. */
6441 tree x
= TREE_VALUE (link
);
6442 while (handled_component_p (x
))
6443 x
= TREE_OPERAND (x
, 0);
6444 if (TREE_CODE (x
) == MEM_REF
6445 && TREE_CODE (TREE_OPERAND (x
, 0)) == ADDR_EXPR
)
6446 x
= TREE_OPERAND (TREE_OPERAND (x
, 0), 0);
6448 || TREE_CODE (x
) == PARM_DECL
6449 || TREE_CODE (x
) == RESULT_DECL
)
6450 && !TREE_ADDRESSABLE (x
)
6451 && is_gimple_reg (x
))
6453 warning_at (EXPR_LOC_OR_LOC (TREE_VALUE (link
),
6455 "memory input %d is not directly addressable",
6457 prepare_gimple_addressable (&TREE_VALUE (link
), pre_p
);
6460 mark_addressable (TREE_VALUE (link
));
6461 if (tret
== GS_ERROR
)
6463 error_at (EXPR_LOC_OR_LOC (TREE_VALUE (link
), input_location
),
6464 "memory input %d is not directly addressable", i
);
6470 tret
= gimplify_expr (&TREE_VALUE (link
), pre_p
, post_p
,
6471 is_gimple_asm_val
, fb_rvalue
);
6472 if (tret
== GS_ERROR
)
6476 TREE_CHAIN (link
) = NULL_TREE
;
6477 vec_safe_push (inputs
, link
);
6480 link_next
= NULL_TREE
;
6481 for (link
= ASM_CLOBBERS (expr
); link
; ++i
, link
= link_next
)
6483 link_next
= TREE_CHAIN (link
);
6484 TREE_CHAIN (link
) = NULL_TREE
;
6485 vec_safe_push (clobbers
, link
);
6488 link_next
= NULL_TREE
;
6489 for (link
= ASM_LABELS (expr
); link
; ++i
, link
= link_next
)
6491 link_next
= TREE_CHAIN (link
);
6492 TREE_CHAIN (link
) = NULL_TREE
;
6493 vec_safe_push (labels
, link
);
6496 /* Do not add ASMs with errors to the gimple IL stream. */
6497 if (ret
!= GS_ERROR
)
6499 stmt
= gimple_build_asm_vec (TREE_STRING_POINTER (ASM_STRING (expr
)),
6500 inputs
, outputs
, clobbers
, labels
);
6502 gimple_asm_set_volatile (stmt
, ASM_VOLATILE_P (expr
) || noutputs
== 0);
6503 gimple_asm_set_input (stmt
, ASM_INPUT_P (expr
));
6504 gimple_asm_set_inline (stmt
, ASM_INLINE_P (expr
));
6506 gimplify_seq_add_stmt (pre_p
, stmt
);
6512 /* Gimplify a CLEANUP_POINT_EXPR. Currently this works by adding
6513 GIMPLE_WITH_CLEANUP_EXPRs to the prequeue as we encounter cleanups while
6514 gimplifying the body, and converting them to TRY_FINALLY_EXPRs when we
6515 return to this function.
6517 FIXME should we complexify the prequeue handling instead? Or use flags
6518 for all the cleanups and let the optimizer tighten them up? The current
6519 code seems pretty fragile; it will break on a cleanup within any
6520 non-conditional nesting. But any such nesting would be broken, anyway;
6521 we can't write a TRY_FINALLY_EXPR that starts inside a nesting construct
6522 and continues out of it. We can do that at the RTL level, though, so
6523 having an optimizer to tighten up try/finally regions would be a Good
6526 static enum gimplify_status
6527 gimplify_cleanup_point_expr (tree
*expr_p
, gimple_seq
*pre_p
)
6529 gimple_stmt_iterator iter
;
6530 gimple_seq body_sequence
= NULL
;
6532 tree temp
= voidify_wrapper_expr (*expr_p
, NULL
);
6534 /* We only care about the number of conditions between the innermost
6535 CLEANUP_POINT_EXPR and the cleanup. So save and reset the count and
6536 any cleanups collected outside the CLEANUP_POINT_EXPR. */
6537 int old_conds
= gimplify_ctxp
->conditions
;
6538 gimple_seq old_cleanups
= gimplify_ctxp
->conditional_cleanups
;
6539 bool old_in_cleanup_point_expr
= gimplify_ctxp
->in_cleanup_point_expr
;
6540 gimplify_ctxp
->conditions
= 0;
6541 gimplify_ctxp
->conditional_cleanups
= NULL
;
6542 gimplify_ctxp
->in_cleanup_point_expr
= true;
6544 gimplify_stmt (&TREE_OPERAND (*expr_p
, 0), &body_sequence
);
6546 gimplify_ctxp
->conditions
= old_conds
;
6547 gimplify_ctxp
->conditional_cleanups
= old_cleanups
;
6548 gimplify_ctxp
->in_cleanup_point_expr
= old_in_cleanup_point_expr
;
6550 for (iter
= gsi_start (body_sequence
); !gsi_end_p (iter
); )
6552 gimple
*wce
= gsi_stmt (iter
);
6554 if (gimple_code (wce
) == GIMPLE_WITH_CLEANUP_EXPR
)
6556 if (gsi_one_before_end_p (iter
))
6558 /* Note that gsi_insert_seq_before and gsi_remove do not
6559 scan operands, unlike some other sequence mutators. */
6560 if (!gimple_wce_cleanup_eh_only (wce
))
6561 gsi_insert_seq_before_without_update (&iter
,
6562 gimple_wce_cleanup (wce
),
6564 gsi_remove (&iter
, true);
6571 enum gimple_try_flags kind
;
6573 if (gimple_wce_cleanup_eh_only (wce
))
6574 kind
= GIMPLE_TRY_CATCH
;
6576 kind
= GIMPLE_TRY_FINALLY
;
6577 seq
= gsi_split_seq_after (iter
);
6579 gtry
= gimple_build_try (seq
, gimple_wce_cleanup (wce
), kind
);
6580 /* Do not use gsi_replace here, as it may scan operands.
6581 We want to do a simple structural modification only. */
6582 gsi_set_stmt (&iter
, gtry
);
6583 iter
= gsi_start (gtry
->eval
);
6590 gimplify_seq_add_seq (pre_p
, body_sequence
);
6603 /* Insert a cleanup marker for gimplify_cleanup_point_expr. CLEANUP
6604 is the cleanup action required. EH_ONLY is true if the cleanup should
6605 only be executed if an exception is thrown, not on normal exit.
6606 If FORCE_UNCOND is true perform the cleanup unconditionally; this is
6607 only valid for clobbers. */
6610 gimple_push_cleanup (tree var
, tree cleanup
, bool eh_only
, gimple_seq
*pre_p
,
6611 bool force_uncond
= false)
6614 gimple_seq cleanup_stmts
= NULL
;
6616 /* Errors can result in improperly nested cleanups. Which results in
6617 confusion when trying to resolve the GIMPLE_WITH_CLEANUP_EXPR. */
6621 if (gimple_conditional_context ())
6623 /* If we're in a conditional context, this is more complex. We only
6624 want to run the cleanup if we actually ran the initialization that
6625 necessitates it, but we want to run it after the end of the
6626 conditional context. So we wrap the try/finally around the
6627 condition and use a flag to determine whether or not to actually
6628 run the destructor. Thus
6632 becomes (approximately)
6636 if (test) { A::A(temp); flag = 1; val = f(temp); }
6639 if (flag) A::~A(temp);
6645 gimplify_stmt (&cleanup
, &cleanup_stmts
);
6646 wce
= gimple_build_wce (cleanup_stmts
);
6647 gimplify_seq_add_stmt (&gimplify_ctxp
->conditional_cleanups
, wce
);
6651 tree flag
= create_tmp_var (boolean_type_node
, "cleanup");
6652 gassign
*ffalse
= gimple_build_assign (flag
, boolean_false_node
);
6653 gassign
*ftrue
= gimple_build_assign (flag
, boolean_true_node
);
6655 cleanup
= build3 (COND_EXPR
, void_type_node
, flag
, cleanup
, NULL
);
6656 gimplify_stmt (&cleanup
, &cleanup_stmts
);
6657 wce
= gimple_build_wce (cleanup_stmts
);
6659 gimplify_seq_add_stmt (&gimplify_ctxp
->conditional_cleanups
, ffalse
);
6660 gimplify_seq_add_stmt (&gimplify_ctxp
->conditional_cleanups
, wce
);
6661 gimplify_seq_add_stmt (pre_p
, ftrue
);
6663 /* Because of this manipulation, and the EH edges that jump
6664 threading cannot redirect, the temporary (VAR) will appear
6665 to be used uninitialized. Don't warn. */
6666 TREE_NO_WARNING (var
) = 1;
6671 gimplify_stmt (&cleanup
, &cleanup_stmts
);
6672 wce
= gimple_build_wce (cleanup_stmts
);
6673 gimple_wce_set_cleanup_eh_only (wce
, eh_only
);
6674 gimplify_seq_add_stmt (pre_p
, wce
);
6678 /* Gimplify a TARGET_EXPR which doesn't appear on the rhs of an INIT_EXPR. */
6680 static enum gimplify_status
6681 gimplify_target_expr (tree
*expr_p
, gimple_seq
*pre_p
, gimple_seq
*post_p
)
6683 tree targ
= *expr_p
;
6684 tree temp
= TARGET_EXPR_SLOT (targ
);
6685 tree init
= TARGET_EXPR_INITIAL (targ
);
6686 enum gimplify_status ret
;
6688 bool unpoison_empty_seq
= false;
6689 gimple_stmt_iterator unpoison_it
;
6693 tree cleanup
= NULL_TREE
;
6695 /* TARGET_EXPR temps aren't part of the enclosing block, so add it
6696 to the temps list. Handle also variable length TARGET_EXPRs. */
6697 if (!poly_int_tree_p (DECL_SIZE (temp
)))
6699 if (!TYPE_SIZES_GIMPLIFIED (TREE_TYPE (temp
)))
6700 gimplify_type_sizes (TREE_TYPE (temp
), pre_p
);
6701 gimplify_vla_decl (temp
, pre_p
);
6705 /* Save location where we need to place unpoisoning. It's possible
6706 that a variable will be converted to needs_to_live_in_memory. */
6707 unpoison_it
= gsi_last (*pre_p
);
6708 unpoison_empty_seq
= gsi_end_p (unpoison_it
);
6710 gimple_add_tmp_var (temp
);
6713 /* If TARGET_EXPR_INITIAL is void, then the mere evaluation of the
6714 expression is supposed to initialize the slot. */
6715 if (VOID_TYPE_P (TREE_TYPE (init
)))
6716 ret
= gimplify_expr (&init
, pre_p
, post_p
, is_gimple_stmt
, fb_none
);
6719 tree init_expr
= build2 (INIT_EXPR
, void_type_node
, temp
, init
);
6721 ret
= gimplify_expr (&init
, pre_p
, post_p
, is_gimple_stmt
, fb_none
);
6723 ggc_free (init_expr
);
6725 if (ret
== GS_ERROR
)
6727 /* PR c++/28266 Make sure this is expanded only once. */
6728 TARGET_EXPR_INITIAL (targ
) = NULL_TREE
;
6732 gimplify_and_add (init
, pre_p
);
6734 /* If needed, push the cleanup for the temp. */
6735 if (TARGET_EXPR_CLEANUP (targ
))
6737 if (CLEANUP_EH_ONLY (targ
))
6738 gimple_push_cleanup (temp
, TARGET_EXPR_CLEANUP (targ
),
6739 CLEANUP_EH_ONLY (targ
), pre_p
);
6741 cleanup
= TARGET_EXPR_CLEANUP (targ
);
6744 /* Add a clobber for the temporary going out of scope, like
6745 gimplify_bind_expr. */
6746 if (gimplify_ctxp
->in_cleanup_point_expr
6747 && needs_to_live_in_memory (temp
))
6749 if (flag_stack_reuse
== SR_ALL
)
6751 tree clobber
= build_clobber (TREE_TYPE (temp
));
6752 clobber
= build2 (MODIFY_EXPR
, TREE_TYPE (temp
), temp
, clobber
);
6753 gimple_push_cleanup (temp
, clobber
, false, pre_p
, true);
6755 if (asan_poisoned_variables
6756 && DECL_ALIGN (temp
) <= MAX_SUPPORTED_STACK_ALIGNMENT
6757 && !TREE_STATIC (temp
)
6758 && dbg_cnt (asan_use_after_scope
)
6759 && !gimplify_omp_ctxp
)
6761 tree asan_cleanup
= build_asan_poison_call_expr (temp
);
6764 if (unpoison_empty_seq
)
6765 unpoison_it
= gsi_start (*pre_p
);
6767 asan_poison_variable (temp
, false, &unpoison_it
,
6768 unpoison_empty_seq
);
6769 gimple_push_cleanup (temp
, asan_cleanup
, false, pre_p
);
6774 gimple_push_cleanup (temp
, cleanup
, false, pre_p
);
6776 /* Only expand this once. */
6777 TREE_OPERAND (targ
, 3) = init
;
6778 TARGET_EXPR_INITIAL (targ
) = NULL_TREE
;
6781 /* We should have expanded this before. */
6782 gcc_assert (DECL_SEEN_IN_BIND_EXPR_P (temp
));
6788 /* Gimplification of expression trees. */
6790 /* Gimplify an expression which appears at statement context. The
6791 corresponding GIMPLE statements are added to *SEQ_P. If *SEQ_P is
6792 NULL, a new sequence is allocated.
6794 Return true if we actually added a statement to the queue. */
6797 gimplify_stmt (tree
*stmt_p
, gimple_seq
*seq_p
)
6799 gimple_seq_node last
;
6801 last
= gimple_seq_last (*seq_p
);
6802 gimplify_expr (stmt_p
, seq_p
, NULL
, is_gimple_stmt
, fb_none
);
6803 return last
!= gimple_seq_last (*seq_p
);
6806 /* Add FIRSTPRIVATE entries for DECL in the OpenMP the surrounding parallels
6807 to CTX. If entries already exist, force them to be some flavor of private.
6808 If there is no enclosing parallel, do nothing. */
6811 omp_firstprivatize_variable (struct gimplify_omp_ctx
*ctx
, tree decl
)
6815 if (decl
== NULL
|| !DECL_P (decl
) || ctx
->region_type
== ORT_NONE
)
6820 n
= splay_tree_lookup (ctx
->variables
, (splay_tree_key
)decl
);
6823 if (n
->value
& GOVD_SHARED
)
6824 n
->value
= GOVD_FIRSTPRIVATE
| (n
->value
& GOVD_SEEN
);
6825 else if (n
->value
& GOVD_MAP
)
6826 n
->value
|= GOVD_MAP_TO_ONLY
;
6830 else if ((ctx
->region_type
& ORT_TARGET
) != 0)
6832 if (ctx
->defaultmap
[GDMK_SCALAR
] & GOVD_FIRSTPRIVATE
)
6833 omp_add_variable (ctx
, decl
, GOVD_FIRSTPRIVATE
);
6835 omp_add_variable (ctx
, decl
, GOVD_MAP
| GOVD_MAP_TO_ONLY
);
6837 else if (ctx
->region_type
!= ORT_WORKSHARE
6838 && ctx
->region_type
!= ORT_TASKGROUP
6839 && ctx
->region_type
!= ORT_SIMD
6840 && ctx
->region_type
!= ORT_ACC
6841 && !(ctx
->region_type
& ORT_TARGET_DATA
))
6842 omp_add_variable (ctx
, decl
, GOVD_FIRSTPRIVATE
);
6844 ctx
= ctx
->outer_context
;
6849 /* Similarly for each of the type sizes of TYPE. */
6852 omp_firstprivatize_type_sizes (struct gimplify_omp_ctx
*ctx
, tree type
)
6854 if (type
== NULL
|| type
== error_mark_node
)
6856 type
= TYPE_MAIN_VARIANT (type
);
6858 if (ctx
->privatized_types
->add (type
))
6861 switch (TREE_CODE (type
))
6867 case FIXED_POINT_TYPE
:
6868 omp_firstprivatize_variable (ctx
, TYPE_MIN_VALUE (type
));
6869 omp_firstprivatize_variable (ctx
, TYPE_MAX_VALUE (type
));
6873 omp_firstprivatize_type_sizes (ctx
, TREE_TYPE (type
));
6874 omp_firstprivatize_type_sizes (ctx
, TYPE_DOMAIN (type
));
6879 case QUAL_UNION_TYPE
:
6882 for (field
= TYPE_FIELDS (type
); field
; field
= DECL_CHAIN (field
))
6883 if (TREE_CODE (field
) == FIELD_DECL
)
6885 omp_firstprivatize_variable (ctx
, DECL_FIELD_OFFSET (field
));
6886 omp_firstprivatize_type_sizes (ctx
, TREE_TYPE (field
));
6892 case REFERENCE_TYPE
:
6893 omp_firstprivatize_type_sizes (ctx
, TREE_TYPE (type
));
6900 omp_firstprivatize_variable (ctx
, TYPE_SIZE (type
));
6901 omp_firstprivatize_variable (ctx
, TYPE_SIZE_UNIT (type
));
6902 lang_hooks
.types
.omp_firstprivatize_type_sizes (ctx
, type
);
6905 /* Add an entry for DECL in the OMP context CTX with FLAGS. */
6908 omp_add_variable (struct gimplify_omp_ctx
*ctx
, tree decl
, unsigned int flags
)
6911 unsigned int nflags
;
6914 if (error_operand_p (decl
) || ctx
->region_type
== ORT_NONE
)
6917 /* Never elide decls whose type has TREE_ADDRESSABLE set. This means
6918 there are constructors involved somewhere. Exception is a shared clause,
6919 there is nothing privatized in that case. */
6920 if ((flags
& GOVD_SHARED
) == 0
6921 && (TREE_ADDRESSABLE (TREE_TYPE (decl
))
6922 || TYPE_NEEDS_CONSTRUCTING (TREE_TYPE (decl
))))
6925 n
= splay_tree_lookup (ctx
->variables
, (splay_tree_key
)decl
);
6926 if (n
!= NULL
&& (n
->value
& GOVD_DATA_SHARE_CLASS
) != 0)
6928 /* We shouldn't be re-adding the decl with the same data
6930 gcc_assert ((n
->value
& GOVD_DATA_SHARE_CLASS
& flags
) == 0);
6931 nflags
= n
->value
| flags
;
6932 /* The only combination of data sharing classes we should see is
6933 FIRSTPRIVATE and LASTPRIVATE. However, OpenACC permits
6934 reduction variables to be used in data sharing clauses. */
6935 gcc_assert ((ctx
->region_type
& ORT_ACC
) != 0
6936 || ((nflags
& GOVD_DATA_SHARE_CLASS
)
6937 == (GOVD_FIRSTPRIVATE
| GOVD_LASTPRIVATE
))
6938 || (flags
& GOVD_DATA_SHARE_CLASS
) == 0);
6943 /* When adding a variable-sized variable, we have to handle all sorts
6944 of additional bits of data: the pointer replacement variable, and
6945 the parameters of the type. */
6946 if (DECL_SIZE (decl
) && TREE_CODE (DECL_SIZE (decl
)) != INTEGER_CST
)
6948 /* Add the pointer replacement variable as PRIVATE if the variable
6949 replacement is private, else FIRSTPRIVATE since we'll need the
6950 address of the original variable either for SHARED, or for the
6951 copy into or out of the context. */
6952 if (!(flags
& GOVD_LOCAL
) && ctx
->region_type
!= ORT_TASKGROUP
)
6954 if (flags
& GOVD_MAP
)
6955 nflags
= GOVD_MAP
| GOVD_MAP_TO_ONLY
| GOVD_EXPLICIT
;
6956 else if (flags
& GOVD_PRIVATE
)
6957 nflags
= GOVD_PRIVATE
;
6958 else if (((ctx
->region_type
& (ORT_TARGET
| ORT_TARGET_DATA
)) != 0
6959 && (flags
& GOVD_FIRSTPRIVATE
))
6960 || (ctx
->region_type
== ORT_TARGET_DATA
6961 && (flags
& GOVD_DATA_SHARE_CLASS
) == 0))
6962 nflags
= GOVD_PRIVATE
| GOVD_EXPLICIT
;
6964 nflags
= GOVD_FIRSTPRIVATE
;
6965 nflags
|= flags
& GOVD_SEEN
;
6966 t
= DECL_VALUE_EXPR (decl
);
6967 gcc_assert (TREE_CODE (t
) == INDIRECT_REF
);
6968 t
= TREE_OPERAND (t
, 0);
6969 gcc_assert (DECL_P (t
));
6970 omp_add_variable (ctx
, t
, nflags
);
6973 /* Add all of the variable and type parameters (which should have
6974 been gimplified to a formal temporary) as FIRSTPRIVATE. */
6975 omp_firstprivatize_variable (ctx
, DECL_SIZE_UNIT (decl
));
6976 omp_firstprivatize_variable (ctx
, DECL_SIZE (decl
));
6977 omp_firstprivatize_type_sizes (ctx
, TREE_TYPE (decl
));
6979 /* The variable-sized variable itself is never SHARED, only some form
6980 of PRIVATE. The sharing would take place via the pointer variable
6981 which we remapped above. */
6982 if (flags
& GOVD_SHARED
)
6983 flags
= GOVD_SHARED
| GOVD_DEBUG_PRIVATE
6984 | (flags
& (GOVD_SEEN
| GOVD_EXPLICIT
));
6986 /* We're going to make use of the TYPE_SIZE_UNIT at least in the
6987 alloca statement we generate for the variable, so make sure it
6988 is available. This isn't automatically needed for the SHARED
6989 case, since we won't be allocating local storage then.
6990 For local variables TYPE_SIZE_UNIT might not be gimplified yet,
6991 in this case omp_notice_variable will be called later
6992 on when it is gimplified. */
6993 else if (! (flags
& (GOVD_LOCAL
| GOVD_MAP
))
6994 && DECL_P (TYPE_SIZE_UNIT (TREE_TYPE (decl
))))
6995 omp_notice_variable (ctx
, TYPE_SIZE_UNIT (TREE_TYPE (decl
)), true);
6997 else if ((flags
& (GOVD_MAP
| GOVD_LOCAL
)) == 0
6998 && lang_hooks
.decls
.omp_privatize_by_reference (decl
))
7000 omp_firstprivatize_type_sizes (ctx
, TREE_TYPE (decl
));
7002 /* Similar to the direct variable sized case above, we'll need the
7003 size of references being privatized. */
7004 if ((flags
& GOVD_SHARED
) == 0)
7006 t
= TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl
)));
7008 omp_notice_variable (ctx
, t
, true);
7015 splay_tree_insert (ctx
->variables
, (splay_tree_key
)decl
, flags
);
7017 /* For reductions clauses in OpenACC loop directives, by default create a
7018 copy clause on the enclosing parallel construct for carrying back the
7020 if (ctx
->region_type
== ORT_ACC
&& (flags
& GOVD_REDUCTION
))
7022 struct gimplify_omp_ctx
*outer_ctx
= ctx
->outer_context
;
7025 n
= splay_tree_lookup (outer_ctx
->variables
, (splay_tree_key
)decl
);
7028 /* Ignore local variables and explicitly declared clauses. */
7029 if (n
->value
& (GOVD_LOCAL
| GOVD_EXPLICIT
))
7031 else if (outer_ctx
->region_type
== ORT_ACC_KERNELS
)
7033 /* According to the OpenACC spec, such a reduction variable
7034 should already have a copy map on a kernels construct,
7035 verify that here. */
7036 gcc_assert (!(n
->value
& GOVD_FIRSTPRIVATE
)
7037 && (n
->value
& GOVD_MAP
));
7039 else if (outer_ctx
->region_type
== ORT_ACC_PARALLEL
)
7041 /* Remove firstprivate and make it a copy map. */
7042 n
->value
&= ~GOVD_FIRSTPRIVATE
;
7043 n
->value
|= GOVD_MAP
;
7046 else if (outer_ctx
->region_type
== ORT_ACC_PARALLEL
)
7048 splay_tree_insert (outer_ctx
->variables
, (splay_tree_key
)decl
,
7049 GOVD_MAP
| GOVD_SEEN
);
7052 outer_ctx
= outer_ctx
->outer_context
;
7057 /* Notice a threadprivate variable DECL used in OMP context CTX.
7058 This just prints out diagnostics about threadprivate variable uses
7059 in untied tasks. If DECL2 is non-NULL, prevent this warning
7060 on that variable. */
7063 omp_notice_threadprivate_variable (struct gimplify_omp_ctx
*ctx
, tree decl
,
7067 struct gimplify_omp_ctx
*octx
;
7069 for (octx
= ctx
; octx
; octx
= octx
->outer_context
)
7070 if ((octx
->region_type
& ORT_TARGET
) != 0
7071 || octx
->order_concurrent
)
7073 n
= splay_tree_lookup (octx
->variables
, (splay_tree_key
)decl
);
7076 if (octx
->order_concurrent
)
7078 error ("threadprivate variable %qE used in a region with"
7079 " %<order(concurrent)%> clause", DECL_NAME (decl
));
7080 error_at (octx
->location
, "enclosing region");
7084 error ("threadprivate variable %qE used in target region",
7086 error_at (octx
->location
, "enclosing target region");
7088 splay_tree_insert (octx
->variables
, (splay_tree_key
)decl
, 0);
7091 splay_tree_insert (octx
->variables
, (splay_tree_key
)decl2
, 0);
7094 if (ctx
->region_type
!= ORT_UNTIED_TASK
)
7096 n
= splay_tree_lookup (ctx
->variables
, (splay_tree_key
)decl
);
7099 error ("threadprivate variable %qE used in untied task",
7101 error_at (ctx
->location
, "enclosing task");
7102 splay_tree_insert (ctx
->variables
, (splay_tree_key
)decl
, 0);
7105 splay_tree_insert (ctx
->variables
, (splay_tree_key
)decl2
, 0);
7109 /* Return true if global var DECL is device resident. */
7112 device_resident_p (tree decl
)
7114 tree attr
= lookup_attribute ("oacc declare target", DECL_ATTRIBUTES (decl
));
7119 for (tree t
= TREE_VALUE (attr
); t
; t
= TREE_PURPOSE (t
))
7121 tree c
= TREE_VALUE (t
);
7122 if (OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_DEVICE_RESIDENT
)
7129 /* Return true if DECL has an ACC DECLARE attribute. */
7132 is_oacc_declared (tree decl
)
7134 tree t
= TREE_CODE (decl
) == MEM_REF
? TREE_OPERAND (decl
, 0) : decl
;
7135 tree declared
= lookup_attribute ("oacc declare target", DECL_ATTRIBUTES (t
));
7136 return declared
!= NULL_TREE
;
7139 /* Determine outer default flags for DECL mentioned in an OMP region
7140 but not declared in an enclosing clause.
7142 ??? Some compiler-generated variables (like SAVE_EXPRs) could be
7143 remapped firstprivate instead of shared. To some extent this is
7144 addressed in omp_firstprivatize_type_sizes, but not
7148 omp_default_clause (struct gimplify_omp_ctx
*ctx
, tree decl
,
7149 bool in_code
, unsigned flags
)
7151 enum omp_clause_default_kind default_kind
= ctx
->default_kind
;
7152 enum omp_clause_default_kind kind
;
7154 kind
= lang_hooks
.decls
.omp_predetermined_sharing (decl
);
7155 if (kind
!= OMP_CLAUSE_DEFAULT_UNSPECIFIED
)
7156 default_kind
= kind
;
7157 else if (VAR_P (decl
) && TREE_STATIC (decl
) && DECL_IN_CONSTANT_POOL (decl
))
7158 default_kind
= OMP_CLAUSE_DEFAULT_SHARED
;
7160 switch (default_kind
)
7162 case OMP_CLAUSE_DEFAULT_NONE
:
7166 if (ctx
->region_type
& ORT_PARALLEL
)
7168 else if ((ctx
->region_type
& ORT_TASKLOOP
) == ORT_TASKLOOP
)
7170 else if (ctx
->region_type
& ORT_TASK
)
7172 else if (ctx
->region_type
& ORT_TEAMS
)
7177 error ("%qE not specified in enclosing %qs",
7178 DECL_NAME (lang_hooks
.decls
.omp_report_decl (decl
)), rtype
);
7179 error_at (ctx
->location
, "enclosing %qs", rtype
);
7182 case OMP_CLAUSE_DEFAULT_SHARED
:
7183 flags
|= GOVD_SHARED
;
7185 case OMP_CLAUSE_DEFAULT_PRIVATE
:
7186 flags
|= GOVD_PRIVATE
;
7188 case OMP_CLAUSE_DEFAULT_FIRSTPRIVATE
:
7189 flags
|= GOVD_FIRSTPRIVATE
;
7191 case OMP_CLAUSE_DEFAULT_UNSPECIFIED
:
7192 /* decl will be either GOVD_FIRSTPRIVATE or GOVD_SHARED. */
7193 gcc_assert ((ctx
->region_type
& ORT_TASK
) != 0);
7194 if (struct gimplify_omp_ctx
*octx
= ctx
->outer_context
)
7196 omp_notice_variable (octx
, decl
, in_code
);
7197 for (; octx
; octx
= octx
->outer_context
)
7201 n2
= splay_tree_lookup (octx
->variables
, (splay_tree_key
) decl
);
7202 if ((octx
->region_type
& (ORT_TARGET_DATA
| ORT_TARGET
)) != 0
7203 && (n2
== NULL
|| (n2
->value
& GOVD_DATA_SHARE_CLASS
) == 0))
7205 if (n2
&& (n2
->value
& GOVD_DATA_SHARE_CLASS
) != GOVD_SHARED
)
7207 flags
|= GOVD_FIRSTPRIVATE
;
7210 if ((octx
->region_type
& (ORT_PARALLEL
| ORT_TEAMS
)) != 0)
7212 flags
|= GOVD_SHARED
;
7218 if (TREE_CODE (decl
) == PARM_DECL
7219 || (!is_global_var (decl
)
7220 && DECL_CONTEXT (decl
) == current_function_decl
))
7221 flags
|= GOVD_FIRSTPRIVATE
;
7223 flags
|= GOVD_SHARED
;
7235 /* Determine outer default flags for DECL mentioned in an OACC region
7236 but not declared in an enclosing clause. */
7239 oacc_default_clause (struct gimplify_omp_ctx
*ctx
, tree decl
, unsigned flags
)
7242 bool on_device
= false;
7243 bool is_private
= false;
7244 bool declared
= is_oacc_declared (decl
);
7245 tree type
= TREE_TYPE (decl
);
7247 if (lang_hooks
.decls
.omp_privatize_by_reference (decl
))
7248 type
= TREE_TYPE (type
);
7250 /* For Fortran COMMON blocks, only used variables in those blocks are
7251 transfered and remapped. The block itself will have a private clause to
7252 avoid transfering the data twice.
7253 The hook evaluates to false by default. For a variable in Fortran's COMMON
7254 or EQUIVALENCE block, returns 'true' (as we have shared=false) - as only
7255 the variables in such a COMMON/EQUIVALENCE block shall be privatized not
7256 the whole block. For C++ and Fortran, it can also be true under certain
7257 other conditions, if DECL_HAS_VALUE_EXPR. */
7258 if (RECORD_OR_UNION_TYPE_P (type
))
7259 is_private
= lang_hooks
.decls
.omp_disregard_value_expr (decl
, false);
7261 if ((ctx
->region_type
& (ORT_ACC_PARALLEL
| ORT_ACC_KERNELS
)) != 0
7262 && is_global_var (decl
)
7263 && device_resident_p (decl
)
7267 flags
|= GOVD_MAP_TO_ONLY
;
7270 switch (ctx
->region_type
)
7272 case ORT_ACC_KERNELS
:
7276 flags
|= GOVD_FIRSTPRIVATE
;
7277 else if (AGGREGATE_TYPE_P (type
))
7279 /* Aggregates default to 'present_or_copy', or 'present'. */
7280 if (ctx
->default_kind
!= OMP_CLAUSE_DEFAULT_PRESENT
)
7283 flags
|= GOVD_MAP
| GOVD_MAP_FORCE_PRESENT
;
7286 /* Scalars default to 'copy'. */
7287 flags
|= GOVD_MAP
| GOVD_MAP_FORCE
;
7291 case ORT_ACC_PARALLEL
:
7292 case ORT_ACC_SERIAL
:
7293 rkind
= ctx
->region_type
== ORT_ACC_PARALLEL
? "parallel" : "serial";
7296 flags
|= GOVD_FIRSTPRIVATE
;
7297 else if (on_device
|| declared
)
7299 else if (AGGREGATE_TYPE_P (type
))
7301 /* Aggregates default to 'present_or_copy', or 'present'. */
7302 if (ctx
->default_kind
!= OMP_CLAUSE_DEFAULT_PRESENT
)
7305 flags
|= GOVD_MAP
| GOVD_MAP_FORCE_PRESENT
;
7308 /* Scalars default to 'firstprivate'. */
7309 flags
|= GOVD_FIRSTPRIVATE
;
7317 if (DECL_ARTIFICIAL (decl
))
7318 ; /* We can get compiler-generated decls, and should not complain
7320 else if (ctx
->default_kind
== OMP_CLAUSE_DEFAULT_NONE
)
7322 error ("%qE not specified in enclosing OpenACC %qs construct",
7323 DECL_NAME (lang_hooks
.decls
.omp_report_decl (decl
)), rkind
);
7324 inform (ctx
->location
, "enclosing OpenACC %qs construct", rkind
);
7326 else if (ctx
->default_kind
== OMP_CLAUSE_DEFAULT_PRESENT
)
7327 ; /* Handled above. */
7329 gcc_checking_assert (ctx
->default_kind
== OMP_CLAUSE_DEFAULT_SHARED
);
7334 /* Record the fact that DECL was used within the OMP context CTX.
7335 IN_CODE is true when real code uses DECL, and false when we should
7336 merely emit default(none) errors. Return true if DECL is going to
7337 be remapped and thus DECL shouldn't be gimplified into its
7338 DECL_VALUE_EXPR (if any). */
7341 omp_notice_variable (struct gimplify_omp_ctx
*ctx
, tree decl
, bool in_code
)
7344 unsigned flags
= in_code
? GOVD_SEEN
: 0;
7345 bool ret
= false, shared
;
7347 if (error_operand_p (decl
))
7350 if (ctx
->region_type
== ORT_NONE
)
7351 return lang_hooks
.decls
.omp_disregard_value_expr (decl
, false);
7353 if (is_global_var (decl
))
7355 /* Threadprivate variables are predetermined. */
7356 if (DECL_THREAD_LOCAL_P (decl
))
7357 return omp_notice_threadprivate_variable (ctx
, decl
, NULL_TREE
);
7359 if (DECL_HAS_VALUE_EXPR_P (decl
))
7361 if (ctx
->region_type
& ORT_ACC
)
7362 /* For OpenACC, defer expansion of value to avoid transfering
7363 privatized common block data instead of im-/explicitly transfered
7364 variables which are in common blocks. */
7368 tree value
= get_base_address (DECL_VALUE_EXPR (decl
));
7370 if (value
&& DECL_P (value
) && DECL_THREAD_LOCAL_P (value
))
7371 return omp_notice_threadprivate_variable (ctx
, decl
, value
);
7375 if (gimplify_omp_ctxp
->outer_context
== NULL
7377 && oacc_get_fn_attrib (current_function_decl
))
7379 location_t loc
= DECL_SOURCE_LOCATION (decl
);
7381 if (lookup_attribute ("omp declare target link",
7382 DECL_ATTRIBUTES (decl
)))
7385 "%qE with %<link%> clause used in %<routine%> function",
7389 else if (!lookup_attribute ("omp declare target",
7390 DECL_ATTRIBUTES (decl
)))
7393 "%qE requires a %<declare%> directive for use "
7394 "in a %<routine%> function", DECL_NAME (decl
));
7400 n
= splay_tree_lookup (ctx
->variables
, (splay_tree_key
)decl
);
7401 if ((ctx
->region_type
& ORT_TARGET
) != 0)
7403 if (ctx
->region_type
& ORT_ACC
)
7404 /* For OpenACC, as remarked above, defer expansion. */
7409 ret
= lang_hooks
.decls
.omp_disregard_value_expr (decl
, shared
);
7412 unsigned nflags
= flags
;
7413 if ((ctx
->region_type
& ORT_ACC
) == 0)
7415 bool is_declare_target
= false;
7416 if (is_global_var (decl
)
7417 && varpool_node::get_create (decl
)->offloadable
)
7419 struct gimplify_omp_ctx
*octx
;
7420 for (octx
= ctx
->outer_context
;
7421 octx
; octx
= octx
->outer_context
)
7423 n
= splay_tree_lookup (octx
->variables
,
7424 (splay_tree_key
)decl
);
7426 && (n
->value
& GOVD_DATA_SHARE_CLASS
) != GOVD_SHARED
7427 && (n
->value
& GOVD_DATA_SHARE_CLASS
) != 0)
7430 is_declare_target
= octx
== NULL
;
7432 if (!is_declare_target
)
7435 if (TREE_CODE (TREE_TYPE (decl
)) == POINTER_TYPE
7436 || (TREE_CODE (TREE_TYPE (decl
)) == REFERENCE_TYPE
7437 && (TREE_CODE (TREE_TYPE (TREE_TYPE (decl
)))
7439 gdmk
= GDMK_POINTER
;
7440 else if (lang_hooks
.decls
.omp_scalar_p (decl
))
7443 gdmk
= GDMK_AGGREGATE
;
7444 if (ctx
->defaultmap
[gdmk
] == 0)
7446 tree d
= lang_hooks
.decls
.omp_report_decl (decl
);
7447 error ("%qE not specified in enclosing %<target%>",
7449 error_at (ctx
->location
, "enclosing %<target%>");
7451 else if (ctx
->defaultmap
[gdmk
]
7452 & (GOVD_MAP_0LEN_ARRAY
| GOVD_FIRSTPRIVATE
))
7453 nflags
|= ctx
->defaultmap
[gdmk
];
7456 gcc_assert (ctx
->defaultmap
[gdmk
] & GOVD_MAP
);
7457 nflags
|= ctx
->defaultmap
[gdmk
] & ~GOVD_MAP
;
7462 struct gimplify_omp_ctx
*octx
= ctx
->outer_context
;
7463 if ((ctx
->region_type
& ORT_ACC
) && octx
)
7465 /* Look in outer OpenACC contexts, to see if there's a
7466 data attribute for this variable. */
7467 omp_notice_variable (octx
, decl
, in_code
);
7469 for (; octx
; octx
= octx
->outer_context
)
7471 if (!(octx
->region_type
& (ORT_TARGET_DATA
| ORT_TARGET
)))
7474 = splay_tree_lookup (octx
->variables
,
7475 (splay_tree_key
) decl
);
7478 if (octx
->region_type
== ORT_ACC_HOST_DATA
)
7479 error ("variable %qE declared in enclosing "
7480 "%<host_data%> region", DECL_NAME (decl
));
7482 if (octx
->region_type
== ORT_ACC_DATA
7483 && (n2
->value
& GOVD_MAP_0LEN_ARRAY
))
7484 nflags
|= GOVD_MAP_0LEN_ARRAY
;
7490 if ((nflags
& ~(GOVD_MAP_TO_ONLY
| GOVD_MAP_FROM_ONLY
7491 | GOVD_MAP_ALLOC_ONLY
)) == flags
)
7493 tree type
= TREE_TYPE (decl
);
7495 if (gimplify_omp_ctxp
->target_firstprivatize_array_bases
7496 && lang_hooks
.decls
.omp_privatize_by_reference (decl
))
7497 type
= TREE_TYPE (type
);
7498 if (!lang_hooks
.types
.omp_mappable_type (type
))
7500 error ("%qD referenced in target region does not have "
7501 "a mappable type", decl
);
7502 nflags
|= GOVD_MAP
| GOVD_EXPLICIT
;
7506 if ((ctx
->region_type
& ORT_ACC
) != 0)
7507 nflags
= oacc_default_clause (ctx
, decl
, flags
);
7513 omp_add_variable (ctx
, decl
, nflags
);
7517 /* If nothing changed, there's nothing left to do. */
7518 if ((n
->value
& flags
) == flags
)
7528 if (ctx
->region_type
== ORT_WORKSHARE
7529 || ctx
->region_type
== ORT_TASKGROUP
7530 || ctx
->region_type
== ORT_SIMD
7531 || ctx
->region_type
== ORT_ACC
7532 || (ctx
->region_type
& ORT_TARGET_DATA
) != 0)
7535 flags
= omp_default_clause (ctx
, decl
, in_code
, flags
);
7537 if ((flags
& GOVD_PRIVATE
)
7538 && lang_hooks
.decls
.omp_private_outer_ref (decl
))
7539 flags
|= GOVD_PRIVATE_OUTER_REF
;
7541 omp_add_variable (ctx
, decl
, flags
);
7543 shared
= (flags
& GOVD_SHARED
) != 0;
7544 ret
= lang_hooks
.decls
.omp_disregard_value_expr (decl
, shared
);
7548 if ((n
->value
& (GOVD_SEEN
| GOVD_LOCAL
)) == 0
7549 && (flags
& (GOVD_SEEN
| GOVD_LOCAL
)) == GOVD_SEEN
7550 && DECL_SIZE (decl
))
7552 if (TREE_CODE (DECL_SIZE (decl
)) != INTEGER_CST
)
7555 tree t
= DECL_VALUE_EXPR (decl
);
7556 gcc_assert (TREE_CODE (t
) == INDIRECT_REF
);
7557 t
= TREE_OPERAND (t
, 0);
7558 gcc_assert (DECL_P (t
));
7559 n2
= splay_tree_lookup (ctx
->variables
, (splay_tree_key
) t
);
7560 n2
->value
|= GOVD_SEEN
;
7562 else if (lang_hooks
.decls
.omp_privatize_by_reference (decl
)
7563 && TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl
)))
7564 && (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl
))))
7568 tree t
= TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl
)));
7569 gcc_assert (DECL_P (t
));
7570 n2
= splay_tree_lookup (ctx
->variables
, (splay_tree_key
) t
);
7572 omp_notice_variable (ctx
, t
, true);
7576 if (ctx
->region_type
& ORT_ACC
)
7577 /* For OpenACC, as remarked above, defer expansion. */
7580 shared
= ((flags
| n
->value
) & GOVD_SHARED
) != 0;
7581 ret
= lang_hooks
.decls
.omp_disregard_value_expr (decl
, shared
);
7583 /* If nothing changed, there's nothing left to do. */
7584 if ((n
->value
& flags
) == flags
)
7590 /* If the variable is private in the current context, then we don't
7591 need to propagate anything to an outer context. */
7592 if ((flags
& GOVD_PRIVATE
) && !(flags
& GOVD_PRIVATE_OUTER_REF
))
7594 if ((flags
& (GOVD_LINEAR
| GOVD_LINEAR_LASTPRIVATE_NO_OUTER
))
7595 == (GOVD_LINEAR
| GOVD_LINEAR_LASTPRIVATE_NO_OUTER
))
7597 if ((flags
& (GOVD_FIRSTPRIVATE
| GOVD_LASTPRIVATE
7598 | GOVD_LINEAR_LASTPRIVATE_NO_OUTER
))
7599 == (GOVD_LASTPRIVATE
| GOVD_LINEAR_LASTPRIVATE_NO_OUTER
))
7601 if (ctx
->outer_context
7602 && omp_notice_variable (ctx
->outer_context
, decl
, in_code
))
7607 /* Verify that DECL is private within CTX. If there's specific information
7608 to the contrary in the innermost scope, generate an error. */
7611 omp_is_private (struct gimplify_omp_ctx
*ctx
, tree decl
, int simd
)
7615 n
= splay_tree_lookup (ctx
->variables
, (splay_tree_key
)decl
);
7618 if (n
->value
& GOVD_SHARED
)
7620 if (ctx
== gimplify_omp_ctxp
)
7623 error ("iteration variable %qE is predetermined linear",
7626 error ("iteration variable %qE should be private",
7628 n
->value
= GOVD_PRIVATE
;
7634 else if ((n
->value
& GOVD_EXPLICIT
) != 0
7635 && (ctx
== gimplify_omp_ctxp
7636 || (ctx
->region_type
== ORT_COMBINED_PARALLEL
7637 && gimplify_omp_ctxp
->outer_context
== ctx
)))
7639 if ((n
->value
& GOVD_FIRSTPRIVATE
) != 0)
7640 error ("iteration variable %qE should not be firstprivate",
7642 else if ((n
->value
& GOVD_REDUCTION
) != 0)
7643 error ("iteration variable %qE should not be reduction",
7645 else if (simd
!= 1 && (n
->value
& GOVD_LINEAR
) != 0)
7646 error ("iteration variable %qE should not be linear",
7649 return (ctx
== gimplify_omp_ctxp
7650 || (ctx
->region_type
== ORT_COMBINED_PARALLEL
7651 && gimplify_omp_ctxp
->outer_context
== ctx
));
7654 if (ctx
->region_type
!= ORT_WORKSHARE
7655 && ctx
->region_type
!= ORT_TASKGROUP
7656 && ctx
->region_type
!= ORT_SIMD
7657 && ctx
->region_type
!= ORT_ACC
)
7659 else if (ctx
->outer_context
)
7660 return omp_is_private (ctx
->outer_context
, decl
, simd
);
7664 /* Return true if DECL is private within a parallel region
7665 that binds to the current construct's context or in parallel
7666 region's REDUCTION clause. */
7669 omp_check_private (struct gimplify_omp_ctx
*ctx
, tree decl
, bool copyprivate
)
7675 ctx
= ctx
->outer_context
;
7678 if (is_global_var (decl
))
7681 /* References might be private, but might be shared too,
7682 when checking for copyprivate, assume they might be
7683 private, otherwise assume they might be shared. */
7687 if (lang_hooks
.decls
.omp_privatize_by_reference (decl
))
7690 /* Treat C++ privatized non-static data members outside
7691 of the privatization the same. */
7692 if (omp_member_access_dummy_var (decl
))
7698 n
= splay_tree_lookup (ctx
->variables
, (splay_tree_key
) decl
);
7700 if ((ctx
->region_type
& (ORT_TARGET
| ORT_TARGET_DATA
)) != 0
7701 && (n
== NULL
|| (n
->value
& GOVD_DATA_SHARE_CLASS
) == 0))
7706 if ((n
->value
& GOVD_LOCAL
) != 0
7707 && omp_member_access_dummy_var (decl
))
7709 return (n
->value
& GOVD_SHARED
) == 0;
7712 while (ctx
->region_type
== ORT_WORKSHARE
7713 || ctx
->region_type
== ORT_TASKGROUP
7714 || ctx
->region_type
== ORT_SIMD
7715 || ctx
->region_type
== ORT_ACC
);
7719 /* Callback for walk_tree to find a DECL_EXPR for the given DECL. */
7722 find_decl_expr (tree
*tp
, int *walk_subtrees
, void *data
)
7726 /* If this node has been visited, unmark it and keep looking. */
7727 if (TREE_CODE (t
) == DECL_EXPR
&& DECL_EXPR_DECL (t
) == (tree
) data
)
7730 if (IS_TYPE_OR_DECL_P (t
))
7735 /* If *LIST_P contains any OpenMP depend clauses with iterators,
7736 lower all the depend clauses by populating corresponding depend
7737 array. Returns 0 if there are no such depend clauses, or
7738 2 if all depend clauses should be removed, 1 otherwise. */
7741 gimplify_omp_depend (tree
*list_p
, gimple_seq
*pre_p
)
7745 size_t n
[4] = { 0, 0, 0, 0 };
7747 tree counts
[4] = { NULL_TREE
, NULL_TREE
, NULL_TREE
, NULL_TREE
};
7748 tree last_iter
= NULL_TREE
, last_count
= NULL_TREE
;
7750 location_t first_loc
= UNKNOWN_LOCATION
;
7752 for (c
= *list_p
; c
; c
= OMP_CLAUSE_CHAIN (c
))
7753 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_DEPEND
)
7755 switch (OMP_CLAUSE_DEPEND_KIND (c
))
7757 case OMP_CLAUSE_DEPEND_IN
:
7760 case OMP_CLAUSE_DEPEND_OUT
:
7761 case OMP_CLAUSE_DEPEND_INOUT
:
7764 case OMP_CLAUSE_DEPEND_MUTEXINOUTSET
:
7767 case OMP_CLAUSE_DEPEND_DEPOBJ
:
7770 case OMP_CLAUSE_DEPEND_SOURCE
:
7771 case OMP_CLAUSE_DEPEND_SINK
:
7776 tree t
= OMP_CLAUSE_DECL (c
);
7777 if (first_loc
== UNKNOWN_LOCATION
)
7778 first_loc
= OMP_CLAUSE_LOCATION (c
);
7779 if (TREE_CODE (t
) == TREE_LIST
7781 && TREE_CODE (TREE_PURPOSE (t
)) == TREE_VEC
)
7783 if (TREE_PURPOSE (t
) != last_iter
)
7785 tree tcnt
= size_one_node
;
7786 for (tree it
= TREE_PURPOSE (t
); it
; it
= TREE_CHAIN (it
))
7788 if (gimplify_expr (&TREE_VEC_ELT (it
, 1), pre_p
, NULL
,
7789 is_gimple_val
, fb_rvalue
) == GS_ERROR
7790 || gimplify_expr (&TREE_VEC_ELT (it
, 2), pre_p
, NULL
,
7791 is_gimple_val
, fb_rvalue
) == GS_ERROR
7792 || gimplify_expr (&TREE_VEC_ELT (it
, 3), pre_p
, NULL
,
7793 is_gimple_val
, fb_rvalue
) == GS_ERROR
7794 || (gimplify_expr (&TREE_VEC_ELT (it
, 4), pre_p
, NULL
,
7795 is_gimple_val
, fb_rvalue
)
7798 tree var
= TREE_VEC_ELT (it
, 0);
7799 tree begin
= TREE_VEC_ELT (it
, 1);
7800 tree end
= TREE_VEC_ELT (it
, 2);
7801 tree step
= TREE_VEC_ELT (it
, 3);
7802 tree orig_step
= TREE_VEC_ELT (it
, 4);
7803 tree type
= TREE_TYPE (var
);
7804 tree stype
= TREE_TYPE (step
);
7805 location_t loc
= DECL_SOURCE_LOCATION (var
);
7807 /* Compute count for this iterator as
7809 ? (begin < end ? (end - begin + (step - 1)) / step : 0)
7810 : (begin > end ? (end - begin + (step + 1)) / step : 0)
7811 and compute product of those for the entire depend
7813 if (POINTER_TYPE_P (type
))
7814 endmbegin
= fold_build2_loc (loc
, POINTER_DIFF_EXPR
,
7817 endmbegin
= fold_build2_loc (loc
, MINUS_EXPR
, type
,
7819 tree stepm1
= fold_build2_loc (loc
, MINUS_EXPR
, stype
,
7821 build_int_cst (stype
, 1));
7822 tree stepp1
= fold_build2_loc (loc
, PLUS_EXPR
, stype
, step
,
7823 build_int_cst (stype
, 1));
7824 tree pos
= fold_build2_loc (loc
, PLUS_EXPR
, stype
,
7825 unshare_expr (endmbegin
),
7827 pos
= fold_build2_loc (loc
, TRUNC_DIV_EXPR
, stype
,
7829 tree neg
= fold_build2_loc (loc
, PLUS_EXPR
, stype
,
7831 if (TYPE_UNSIGNED (stype
))
7833 neg
= fold_build1_loc (loc
, NEGATE_EXPR
, stype
, neg
);
7834 step
= fold_build1_loc (loc
, NEGATE_EXPR
, stype
, step
);
7836 neg
= fold_build2_loc (loc
, TRUNC_DIV_EXPR
, stype
,
7839 tree cond
= fold_build2_loc (loc
, LT_EXPR
,
7842 pos
= fold_build3_loc (loc
, COND_EXPR
, stype
, cond
, pos
,
7843 build_int_cst (stype
, 0));
7844 cond
= fold_build2_loc (loc
, LT_EXPR
, boolean_type_node
,
7846 neg
= fold_build3_loc (loc
, COND_EXPR
, stype
, cond
, neg
,
7847 build_int_cst (stype
, 0));
7848 tree osteptype
= TREE_TYPE (orig_step
);
7849 cond
= fold_build2_loc (loc
, GT_EXPR
, boolean_type_node
,
7851 build_int_cst (osteptype
, 0));
7852 tree cnt
= fold_build3_loc (loc
, COND_EXPR
, stype
,
7854 cnt
= fold_convert_loc (loc
, sizetype
, cnt
);
7855 if (gimplify_expr (&cnt
, pre_p
, NULL
, is_gimple_val
,
7856 fb_rvalue
) == GS_ERROR
)
7858 tcnt
= size_binop_loc (loc
, MULT_EXPR
, tcnt
, cnt
);
7860 if (gimplify_expr (&tcnt
, pre_p
, NULL
, is_gimple_val
,
7861 fb_rvalue
) == GS_ERROR
)
7863 last_iter
= TREE_PURPOSE (t
);
7866 if (counts
[i
] == NULL_TREE
)
7867 counts
[i
] = last_count
;
7869 counts
[i
] = size_binop_loc (OMP_CLAUSE_LOCATION (c
),
7870 PLUS_EXPR
, counts
[i
], last_count
);
7875 for (i
= 0; i
< 4; i
++)
7881 tree total
= size_zero_node
;
7882 for (i
= 0; i
< 4; i
++)
7884 unused
[i
] = counts
[i
] == NULL_TREE
&& n
[i
] == 0;
7885 if (counts
[i
] == NULL_TREE
)
7886 counts
[i
] = size_zero_node
;
7888 counts
[i
] = size_binop (PLUS_EXPR
, counts
[i
], size_int (n
[i
]));
7889 if (gimplify_expr (&counts
[i
], pre_p
, NULL
, is_gimple_val
,
7890 fb_rvalue
) == GS_ERROR
)
7892 total
= size_binop (PLUS_EXPR
, total
, counts
[i
]);
7895 if (gimplify_expr (&total
, pre_p
, NULL
, is_gimple_val
, fb_rvalue
)
7898 bool is_old
= unused
[1] && unused
[3];
7899 tree totalpx
= size_binop (PLUS_EXPR
, unshare_expr (total
),
7900 size_int (is_old
? 1 : 4));
7901 tree type
= build_array_type (ptr_type_node
, build_index_type (totalpx
));
7902 tree array
= create_tmp_var_raw (type
);
7903 TREE_ADDRESSABLE (array
) = 1;
7904 if (!poly_int_tree_p (totalpx
))
7906 if (!TYPE_SIZES_GIMPLIFIED (TREE_TYPE (array
)))
7907 gimplify_type_sizes (TREE_TYPE (array
), pre_p
);
7908 if (gimplify_omp_ctxp
)
7910 struct gimplify_omp_ctx
*ctx
= gimplify_omp_ctxp
;
7912 && (ctx
->region_type
== ORT_WORKSHARE
7913 || ctx
->region_type
== ORT_TASKGROUP
7914 || ctx
->region_type
== ORT_SIMD
7915 || ctx
->region_type
== ORT_ACC
))
7916 ctx
= ctx
->outer_context
;
7918 omp_add_variable (ctx
, array
, GOVD_LOCAL
| GOVD_SEEN
);
7920 gimplify_vla_decl (array
, pre_p
);
7923 gimple_add_tmp_var (array
);
7924 tree r
= build4 (ARRAY_REF
, ptr_type_node
, array
, size_int (0), NULL_TREE
,
7929 tem
= build2 (MODIFY_EXPR
, void_type_node
, r
,
7930 build_int_cst (ptr_type_node
, 0));
7931 gimplify_and_add (tem
, pre_p
);
7932 r
= build4 (ARRAY_REF
, ptr_type_node
, array
, size_int (1), NULL_TREE
,
7935 tem
= build2 (MODIFY_EXPR
, void_type_node
, r
,
7936 fold_convert (ptr_type_node
, total
));
7937 gimplify_and_add (tem
, pre_p
);
7938 for (i
= 1; i
< (is_old
? 2 : 4); i
++)
7940 r
= build4 (ARRAY_REF
, ptr_type_node
, array
, size_int (i
+ !is_old
),
7941 NULL_TREE
, NULL_TREE
);
7942 tem
= build2 (MODIFY_EXPR
, void_type_node
, r
, counts
[i
- 1]);
7943 gimplify_and_add (tem
, pre_p
);
7950 for (i
= 0; i
< 4; i
++)
7952 if (i
&& (i
>= j
|| unused
[i
- 1]))
7954 cnts
[i
] = cnts
[i
- 1];
7957 cnts
[i
] = create_tmp_var (sizetype
);
7959 g
= gimple_build_assign (cnts
[i
], size_int (is_old
? 2 : 5));
7964 t
= size_binop (PLUS_EXPR
, counts
[0], size_int (2));
7966 t
= size_binop (PLUS_EXPR
, cnts
[i
- 1], counts
[i
- 1]);
7967 if (gimplify_expr (&t
, pre_p
, NULL
, is_gimple_val
, fb_rvalue
)
7970 g
= gimple_build_assign (cnts
[i
], t
);
7972 gimple_seq_add_stmt (pre_p
, g
);
7975 last_iter
= NULL_TREE
;
7976 tree last_bind
= NULL_TREE
;
7977 tree
*last_body
= NULL
;
7978 for (c
= *list_p
; c
; c
= OMP_CLAUSE_CHAIN (c
))
7979 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_DEPEND
)
7981 switch (OMP_CLAUSE_DEPEND_KIND (c
))
7983 case OMP_CLAUSE_DEPEND_IN
:
7986 case OMP_CLAUSE_DEPEND_OUT
:
7987 case OMP_CLAUSE_DEPEND_INOUT
:
7990 case OMP_CLAUSE_DEPEND_MUTEXINOUTSET
:
7993 case OMP_CLAUSE_DEPEND_DEPOBJ
:
7996 case OMP_CLAUSE_DEPEND_SOURCE
:
7997 case OMP_CLAUSE_DEPEND_SINK
:
8002 tree t
= OMP_CLAUSE_DECL (c
);
8003 if (TREE_CODE (t
) == TREE_LIST
8005 && TREE_CODE (TREE_PURPOSE (t
)) == TREE_VEC
)
8007 if (TREE_PURPOSE (t
) != last_iter
)
8010 gimplify_and_add (last_bind
, pre_p
);
8011 tree block
= TREE_VEC_ELT (TREE_PURPOSE (t
), 5);
8012 last_bind
= build3 (BIND_EXPR
, void_type_node
,
8013 BLOCK_VARS (block
), NULL
, block
);
8014 TREE_SIDE_EFFECTS (last_bind
) = 1;
8015 SET_EXPR_LOCATION (last_bind
, OMP_CLAUSE_LOCATION (c
));
8016 tree
*p
= &BIND_EXPR_BODY (last_bind
);
8017 for (tree it
= TREE_PURPOSE (t
); it
; it
= TREE_CHAIN (it
))
8019 tree var
= TREE_VEC_ELT (it
, 0);
8020 tree begin
= TREE_VEC_ELT (it
, 1);
8021 tree end
= TREE_VEC_ELT (it
, 2);
8022 tree step
= TREE_VEC_ELT (it
, 3);
8023 tree orig_step
= TREE_VEC_ELT (it
, 4);
8024 tree type
= TREE_TYPE (var
);
8025 location_t loc
= DECL_SOURCE_LOCATION (var
);
8033 if (orig_step > 0) {
8034 if (var < end) goto beg_label;
8036 if (var > end) goto beg_label;
8038 for each iterator, with inner iterators added to
8040 tree beg_label
= create_artificial_label (loc
);
8041 tree cond_label
= NULL_TREE
;
8042 tem
= build2_loc (loc
, MODIFY_EXPR
, void_type_node
,
8044 append_to_statement_list_force (tem
, p
);
8045 tem
= build_and_jump (&cond_label
);
8046 append_to_statement_list_force (tem
, p
);
8047 tem
= build1 (LABEL_EXPR
, void_type_node
, beg_label
);
8048 append_to_statement_list (tem
, p
);
8049 tree bind
= build3 (BIND_EXPR
, void_type_node
, NULL_TREE
,
8050 NULL_TREE
, NULL_TREE
);
8051 TREE_SIDE_EFFECTS (bind
) = 1;
8052 SET_EXPR_LOCATION (bind
, loc
);
8053 append_to_statement_list_force (bind
, p
);
8054 if (POINTER_TYPE_P (type
))
8055 tem
= build2_loc (loc
, POINTER_PLUS_EXPR
, type
,
8056 var
, fold_convert_loc (loc
, sizetype
,
8059 tem
= build2_loc (loc
, PLUS_EXPR
, type
, var
, step
);
8060 tem
= build2_loc (loc
, MODIFY_EXPR
, void_type_node
,
8062 append_to_statement_list_force (tem
, p
);
8063 tem
= build1 (LABEL_EXPR
, void_type_node
, cond_label
);
8064 append_to_statement_list (tem
, p
);
8065 tree cond
= fold_build2_loc (loc
, LT_EXPR
,
8069 = fold_build3_loc (loc
, COND_EXPR
, void_type_node
,
8070 cond
, build_and_jump (&beg_label
),
8072 cond
= fold_build2_loc (loc
, GT_EXPR
, boolean_type_node
,
8075 = fold_build3_loc (loc
, COND_EXPR
, void_type_node
,
8076 cond
, build_and_jump (&beg_label
),
8078 tree osteptype
= TREE_TYPE (orig_step
);
8079 cond
= fold_build2_loc (loc
, GT_EXPR
, boolean_type_node
,
8081 build_int_cst (osteptype
, 0));
8082 tem
= fold_build3_loc (loc
, COND_EXPR
, void_type_node
,
8084 append_to_statement_list_force (tem
, p
);
8085 p
= &BIND_EXPR_BODY (bind
);
8089 last_iter
= TREE_PURPOSE (t
);
8090 if (TREE_CODE (TREE_VALUE (t
)) == COMPOUND_EXPR
)
8092 append_to_statement_list (TREE_OPERAND (TREE_VALUE (t
),
8094 TREE_VALUE (t
) = TREE_OPERAND (TREE_VALUE (t
), 1);
8096 if (error_operand_p (TREE_VALUE (t
)))
8098 TREE_VALUE (t
) = build_fold_addr_expr (TREE_VALUE (t
));
8099 r
= build4 (ARRAY_REF
, ptr_type_node
, array
, cnts
[i
],
8100 NULL_TREE
, NULL_TREE
);
8101 tem
= build2_loc (OMP_CLAUSE_LOCATION (c
), MODIFY_EXPR
,
8102 void_type_node
, r
, TREE_VALUE (t
));
8103 append_to_statement_list_force (tem
, last_body
);
8104 tem
= build2_loc (OMP_CLAUSE_LOCATION (c
), MODIFY_EXPR
,
8105 void_type_node
, cnts
[i
],
8106 size_binop (PLUS_EXPR
, cnts
[i
], size_int (1)));
8107 append_to_statement_list_force (tem
, last_body
);
8108 TREE_VALUE (t
) = null_pointer_node
;
8114 gimplify_and_add (last_bind
, pre_p
);
8115 last_bind
= NULL_TREE
;
8117 if (TREE_CODE (OMP_CLAUSE_DECL (c
)) == COMPOUND_EXPR
)
8119 gimplify_expr (&TREE_OPERAND (OMP_CLAUSE_DECL (c
), 0), pre_p
,
8120 NULL
, is_gimple_val
, fb_rvalue
);
8121 OMP_CLAUSE_DECL (c
) = TREE_OPERAND (OMP_CLAUSE_DECL (c
), 1);
8123 if (error_operand_p (OMP_CLAUSE_DECL (c
)))
8125 OMP_CLAUSE_DECL (c
) = build_fold_addr_expr (OMP_CLAUSE_DECL (c
));
8126 if (gimplify_expr (&OMP_CLAUSE_DECL (c
), pre_p
, NULL
,
8127 is_gimple_val
, fb_rvalue
) == GS_ERROR
)
8129 r
= build4 (ARRAY_REF
, ptr_type_node
, array
, cnts
[i
],
8130 NULL_TREE
, NULL_TREE
);
8131 tem
= build2 (MODIFY_EXPR
, void_type_node
, r
, OMP_CLAUSE_DECL (c
));
8132 gimplify_and_add (tem
, pre_p
);
8133 g
= gimple_build_assign (cnts
[i
], size_binop (PLUS_EXPR
, cnts
[i
],
8135 gimple_seq_add_stmt (pre_p
, g
);
8139 gimplify_and_add (last_bind
, pre_p
);
8140 tree cond
= boolean_false_node
;
8144 cond
= build2_loc (first_loc
, NE_EXPR
, boolean_type_node
, cnts
[0],
8145 size_binop_loc (first_loc
, PLUS_EXPR
, counts
[0],
8148 cond
= build2_loc (first_loc
, TRUTH_OR_EXPR
, boolean_type_node
, cond
,
8149 build2_loc (first_loc
, NE_EXPR
, boolean_type_node
,
8151 size_binop_loc (first_loc
, PLUS_EXPR
,
8157 tree prev
= size_int (5);
8158 for (i
= 0; i
< 4; i
++)
8162 prev
= size_binop_loc (first_loc
, PLUS_EXPR
, counts
[i
], prev
);
8163 cond
= build2_loc (first_loc
, TRUTH_OR_EXPR
, boolean_type_node
, cond
,
8164 build2_loc (first_loc
, NE_EXPR
, boolean_type_node
,
8165 cnts
[i
], unshare_expr (prev
)));
8168 tem
= build3_loc (first_loc
, COND_EXPR
, void_type_node
, cond
,
8169 build_call_expr_loc (first_loc
,
8170 builtin_decl_explicit (BUILT_IN_TRAP
),
8172 gimplify_and_add (tem
, pre_p
);
8173 c
= build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE_DEPEND
);
8174 OMP_CLAUSE_DEPEND_KIND (c
) = OMP_CLAUSE_DEPEND_LAST
;
8175 OMP_CLAUSE_DECL (c
) = build_fold_addr_expr (array
);
8176 OMP_CLAUSE_CHAIN (c
) = *list_p
;
8181 /* Insert a GOMP_MAP_ALLOC or GOMP_MAP_RELEASE node following a
8182 GOMP_MAP_STRUCT mapping. C is an always_pointer mapping. STRUCT_NODE is
8183 the struct node to insert the new mapping after (when the struct node is
8184 initially created). PREV_NODE is the first of two or three mappings for a
8185 pointer, and is either:
8186 - the node before C, when a pair of mappings is used, e.g. for a C/C++
8188 - not the node before C. This is true when we have a reference-to-pointer
8189 type (with a mapping for the reference and for the pointer), or for
8190 Fortran derived-type mappings with a GOMP_MAP_TO_PSET.
8191 If SCP is non-null, the new node is inserted before *SCP.
8192 if SCP is null, the new node is inserted before PREV_NODE.
8194 - PREV_NODE, if SCP is non-null.
8195 - The newly-created ALLOC or RELEASE node, if SCP is null.
8196 - The second newly-created ALLOC or RELEASE node, if we are mapping a
8197 reference to a pointer. */
8200 insert_struct_comp_map (enum tree_code code
, tree c
, tree struct_node
,
8201 tree prev_node
, tree
*scp
)
8203 enum gomp_map_kind mkind
8204 = (code
== OMP_TARGET_EXIT_DATA
|| code
== OACC_EXIT_DATA
)
8205 ? GOMP_MAP_RELEASE
: GOMP_MAP_ALLOC
;
8207 tree c2
= build_omp_clause (OMP_CLAUSE_LOCATION (c
), OMP_CLAUSE_MAP
);
8208 tree cl
= scp
? prev_node
: c2
;
8209 OMP_CLAUSE_SET_MAP_KIND (c2
, mkind
);
8210 OMP_CLAUSE_DECL (c2
) = unshare_expr (OMP_CLAUSE_DECL (c
));
8211 OMP_CLAUSE_CHAIN (c2
) = scp
? *scp
: prev_node
;
8212 if (OMP_CLAUSE_CHAIN (prev_node
) != c
8213 && OMP_CLAUSE_CODE (OMP_CLAUSE_CHAIN (prev_node
)) == OMP_CLAUSE_MAP
8214 && (OMP_CLAUSE_MAP_KIND (OMP_CLAUSE_CHAIN (prev_node
))
8215 == GOMP_MAP_TO_PSET
))
8216 OMP_CLAUSE_SIZE (c2
) = OMP_CLAUSE_SIZE (OMP_CLAUSE_CHAIN (prev_node
));
8218 OMP_CLAUSE_SIZE (c2
) = TYPE_SIZE_UNIT (ptr_type_node
);
8220 OMP_CLAUSE_CHAIN (struct_node
) = c2
;
8222 /* We might need to create an additional mapping if we have a reference to a
8223 pointer (in C++). Don't do this if we have something other than a
8224 GOMP_MAP_ALWAYS_POINTER though, i.e. a GOMP_MAP_TO_PSET. */
8225 if (OMP_CLAUSE_CHAIN (prev_node
) != c
8226 && OMP_CLAUSE_CODE (OMP_CLAUSE_CHAIN (prev_node
)) == OMP_CLAUSE_MAP
8227 && ((OMP_CLAUSE_MAP_KIND (OMP_CLAUSE_CHAIN (prev_node
))
8228 == GOMP_MAP_ALWAYS_POINTER
)
8229 || (OMP_CLAUSE_MAP_KIND (OMP_CLAUSE_CHAIN (prev_node
))
8230 == GOMP_MAP_ATTACH_DETACH
)))
8232 tree c4
= OMP_CLAUSE_CHAIN (prev_node
);
8233 tree c3
= build_omp_clause (OMP_CLAUSE_LOCATION (c
), OMP_CLAUSE_MAP
);
8234 OMP_CLAUSE_SET_MAP_KIND (c3
, mkind
);
8235 OMP_CLAUSE_DECL (c3
) = unshare_expr (OMP_CLAUSE_DECL (c4
));
8236 OMP_CLAUSE_SIZE (c3
) = TYPE_SIZE_UNIT (ptr_type_node
);
8237 OMP_CLAUSE_CHAIN (c3
) = prev_node
;
8239 OMP_CLAUSE_CHAIN (c2
) = c3
;
8250 /* Strip ARRAY_REFS or an indirect ref off BASE, find the containing object,
8251 and set *BITPOSP and *POFFSETP to the bit offset of the access.
8252 If BASE_REF is non-NULL and the containing object is a reference, set
8253 *BASE_REF to that reference before dereferencing the object.
8254 If BASE_REF is NULL, check that the containing object is a COMPONENT_REF or
8255 has array type, else return NULL. */
8258 extract_base_bit_offset (tree base
, tree
*base_ref
, poly_int64
*bitposp
,
8259 poly_offset_int
*poffsetp
)
8262 poly_int64 bitsize
, bitpos
;
8264 int unsignedp
, reversep
, volatilep
= 0;
8265 poly_offset_int poffset
;
8269 *base_ref
= NULL_TREE
;
8271 while (TREE_CODE (base
) == ARRAY_REF
)
8272 base
= TREE_OPERAND (base
, 0);
8274 if (TREE_CODE (base
) == INDIRECT_REF
)
8275 base
= TREE_OPERAND (base
, 0);
8279 if (TREE_CODE (base
) == ARRAY_REF
)
8281 while (TREE_CODE (base
) == ARRAY_REF
)
8282 base
= TREE_OPERAND (base
, 0);
8283 if (TREE_CODE (base
) != COMPONENT_REF
8284 || TREE_CODE (TREE_TYPE (base
)) != ARRAY_TYPE
)
8287 else if (TREE_CODE (base
) == INDIRECT_REF
8288 && TREE_CODE (TREE_OPERAND (base
, 0)) == COMPONENT_REF
8289 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (base
, 0)))
8291 base
= TREE_OPERAND (base
, 0);
8294 base
= get_inner_reference (base
, &bitsize
, &bitpos
, &offset
, &mode
,
8295 &unsignedp
, &reversep
, &volatilep
);
8297 tree orig_base
= base
;
8299 if ((TREE_CODE (base
) == INDIRECT_REF
8300 || (TREE_CODE (base
) == MEM_REF
8301 && integer_zerop (TREE_OPERAND (base
, 1))))
8302 && DECL_P (TREE_OPERAND (base
, 0))
8303 && TREE_CODE (TREE_TYPE (TREE_OPERAND (base
, 0))) == REFERENCE_TYPE
)
8304 base
= TREE_OPERAND (base
, 0);
8306 gcc_assert (offset
== NULL_TREE
|| poly_int_tree_p (offset
));
8309 poffset
= wi::to_poly_offset (offset
);
8313 if (maybe_ne (bitpos
, 0))
8314 poffset
+= bits_to_bytes_round_down (bitpos
);
8317 *poffsetp
= poffset
;
8319 /* Set *BASE_REF if BASE was a dereferenced reference variable. */
8320 if (base_ref
&& orig_base
!= base
)
8321 *base_ref
= orig_base
;
8326 /* Scan the OMP clauses in *LIST_P, installing mappings into a new
8327 and previous omp contexts. */
8330 gimplify_scan_omp_clauses (tree
*list_p
, gimple_seq
*pre_p
,
8331 enum omp_region_type region_type
,
8332 enum tree_code code
)
8334 struct gimplify_omp_ctx
*ctx
, *outer_ctx
;
8336 hash_map
<tree
, tree
> *struct_map_to_clause
= NULL
;
8337 hash_set
<tree
> *struct_deref_set
= NULL
;
8338 tree
*prev_list_p
= NULL
, *orig_list_p
= list_p
;
8339 int handled_depend_iterators
= -1;
8342 ctx
= new_omp_context (region_type
);
8344 outer_ctx
= ctx
->outer_context
;
8345 if (code
== OMP_TARGET
)
8347 if (!lang_GNU_Fortran ())
8348 ctx
->defaultmap
[GDMK_POINTER
] = GOVD_MAP
| GOVD_MAP_0LEN_ARRAY
;
8349 ctx
->defaultmap
[GDMK_SCALAR
] = GOVD_FIRSTPRIVATE
;
8351 if (!lang_GNU_Fortran ())
8355 case OMP_TARGET_DATA
:
8356 case OMP_TARGET_ENTER_DATA
:
8357 case OMP_TARGET_EXIT_DATA
:
8359 case OACC_HOST_DATA
:
8362 ctx
->target_firstprivatize_array_bases
= true;
8367 while ((c
= *list_p
) != NULL
)
8369 bool remove
= false;
8370 bool notice_outer
= true;
8371 const char *check_non_private
= NULL
;
8375 switch (OMP_CLAUSE_CODE (c
))
8377 case OMP_CLAUSE_PRIVATE
:
8378 flags
= GOVD_PRIVATE
| GOVD_EXPLICIT
;
8379 if (lang_hooks
.decls
.omp_private_outer_ref (OMP_CLAUSE_DECL (c
)))
8381 flags
|= GOVD_PRIVATE_OUTER_REF
;
8382 OMP_CLAUSE_PRIVATE_OUTER_REF (c
) = 1;
8385 notice_outer
= false;
8387 case OMP_CLAUSE_SHARED
:
8388 flags
= GOVD_SHARED
| GOVD_EXPLICIT
;
8390 case OMP_CLAUSE_FIRSTPRIVATE
:
8391 flags
= GOVD_FIRSTPRIVATE
| GOVD_EXPLICIT
;
8392 check_non_private
= "firstprivate";
8394 case OMP_CLAUSE_LASTPRIVATE
:
8395 if (OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c
))
8398 case OMP_DISTRIBUTE
:
8399 error_at (OMP_CLAUSE_LOCATION (c
),
8400 "conditional %<lastprivate%> clause on "
8401 "%qs construct", "distribute");
8402 OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c
) = 0;
8405 error_at (OMP_CLAUSE_LOCATION (c
),
8406 "conditional %<lastprivate%> clause on "
8407 "%qs construct", "taskloop");
8408 OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c
) = 0;
8413 flags
= GOVD_LASTPRIVATE
| GOVD_SEEN
| GOVD_EXPLICIT
;
8414 if (code
!= OMP_LOOP
)
8415 check_non_private
= "lastprivate";
8416 decl
= OMP_CLAUSE_DECL (c
);
8417 if (error_operand_p (decl
))
8419 if (OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c
)
8420 && !lang_hooks
.decls
.omp_scalar_p (decl
))
8422 error_at (OMP_CLAUSE_LOCATION (c
),
8423 "non-scalar variable %qD in conditional "
8424 "%<lastprivate%> clause", decl
);
8425 OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c
) = 0;
8427 if (OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c
))
8428 flags
|= GOVD_LASTPRIVATE_CONDITIONAL
;
8430 && (outer_ctx
->region_type
== ORT_COMBINED_PARALLEL
8431 || ((outer_ctx
->region_type
& ORT_COMBINED_TEAMS
)
8432 == ORT_COMBINED_TEAMS
))
8433 && splay_tree_lookup (outer_ctx
->variables
,
8434 (splay_tree_key
) decl
) == NULL
)
8436 omp_add_variable (outer_ctx
, decl
, GOVD_SHARED
| GOVD_SEEN
);
8437 if (outer_ctx
->outer_context
)
8438 omp_notice_variable (outer_ctx
->outer_context
, decl
, true);
8441 && (outer_ctx
->region_type
& ORT_TASK
) != 0
8442 && outer_ctx
->combined_loop
8443 && splay_tree_lookup (outer_ctx
->variables
,
8444 (splay_tree_key
) decl
) == NULL
)
8446 omp_add_variable (outer_ctx
, decl
, GOVD_LASTPRIVATE
| GOVD_SEEN
);
8447 if (outer_ctx
->outer_context
)
8448 omp_notice_variable (outer_ctx
->outer_context
, decl
, true);
8451 && (outer_ctx
->region_type
== ORT_WORKSHARE
8452 || outer_ctx
->region_type
== ORT_ACC
)
8453 && outer_ctx
->combined_loop
8454 && splay_tree_lookup (outer_ctx
->variables
,
8455 (splay_tree_key
) decl
) == NULL
8456 && !omp_check_private (outer_ctx
, decl
, false))
8458 omp_add_variable (outer_ctx
, decl
, GOVD_LASTPRIVATE
| GOVD_SEEN
);
8459 if (outer_ctx
->outer_context
8460 && (outer_ctx
->outer_context
->region_type
8461 == ORT_COMBINED_PARALLEL
)
8462 && splay_tree_lookup (outer_ctx
->outer_context
->variables
,
8463 (splay_tree_key
) decl
) == NULL
)
8465 struct gimplify_omp_ctx
*octx
= outer_ctx
->outer_context
;
8466 omp_add_variable (octx
, decl
, GOVD_SHARED
| GOVD_SEEN
);
8467 if (octx
->outer_context
)
8469 octx
= octx
->outer_context
;
8470 if (octx
->region_type
== ORT_WORKSHARE
8471 && octx
->combined_loop
8472 && splay_tree_lookup (octx
->variables
,
8473 (splay_tree_key
) decl
) == NULL
8474 && !omp_check_private (octx
, decl
, false))
8476 omp_add_variable (octx
, decl
,
8477 GOVD_LASTPRIVATE
| GOVD_SEEN
);
8478 octx
= octx
->outer_context
;
8480 && ((octx
->region_type
& ORT_COMBINED_TEAMS
)
8481 == ORT_COMBINED_TEAMS
)
8482 && (splay_tree_lookup (octx
->variables
,
8483 (splay_tree_key
) decl
)
8486 omp_add_variable (octx
, decl
,
8487 GOVD_SHARED
| GOVD_SEEN
);
8488 octx
= octx
->outer_context
;
8492 omp_notice_variable (octx
, decl
, true);
8495 else if (outer_ctx
->outer_context
)
8496 omp_notice_variable (outer_ctx
->outer_context
, decl
, true);
8499 case OMP_CLAUSE_REDUCTION
:
8500 if (OMP_CLAUSE_REDUCTION_TASK (c
))
8502 if (region_type
== ORT_WORKSHARE
)
8505 nowait
= omp_find_clause (*list_p
,
8506 OMP_CLAUSE_NOWAIT
) != NULL_TREE
;
8508 && (outer_ctx
== NULL
8509 || outer_ctx
->region_type
!= ORT_COMBINED_PARALLEL
))
8511 error_at (OMP_CLAUSE_LOCATION (c
),
8512 "%<task%> reduction modifier on a construct "
8513 "with a %<nowait%> clause");
8514 OMP_CLAUSE_REDUCTION_TASK (c
) = 0;
8517 else if ((region_type
& ORT_PARALLEL
) != ORT_PARALLEL
)
8519 error_at (OMP_CLAUSE_LOCATION (c
),
8520 "invalid %<task%> reduction modifier on construct "
8521 "other than %<parallel%>, %<for%> or %<sections%>");
8522 OMP_CLAUSE_REDUCTION_TASK (c
) = 0;
8525 if (OMP_CLAUSE_REDUCTION_INSCAN (c
))
8529 error_at (OMP_CLAUSE_LOCATION (c
),
8530 "%<inscan%> %<reduction%> clause on "
8531 "%qs construct", "sections");
8532 OMP_CLAUSE_REDUCTION_INSCAN (c
) = 0;
8535 error_at (OMP_CLAUSE_LOCATION (c
),
8536 "%<inscan%> %<reduction%> clause on "
8537 "%qs construct", "parallel");
8538 OMP_CLAUSE_REDUCTION_INSCAN (c
) = 0;
8541 error_at (OMP_CLAUSE_LOCATION (c
),
8542 "%<inscan%> %<reduction%> clause on "
8543 "%qs construct", "teams");
8544 OMP_CLAUSE_REDUCTION_INSCAN (c
) = 0;
8547 error_at (OMP_CLAUSE_LOCATION (c
),
8548 "%<inscan%> %<reduction%> clause on "
8549 "%qs construct", "taskloop");
8550 OMP_CLAUSE_REDUCTION_INSCAN (c
) = 0;
8556 case OMP_CLAUSE_IN_REDUCTION
:
8557 case OMP_CLAUSE_TASK_REDUCTION
:
8558 flags
= GOVD_REDUCTION
| GOVD_SEEN
| GOVD_EXPLICIT
;
8559 /* OpenACC permits reductions on private variables. */
8560 if (!(region_type
& ORT_ACC
)
8561 /* taskgroup is actually not a worksharing region. */
8562 && code
!= OMP_TASKGROUP
)
8563 check_non_private
= omp_clause_code_name
[OMP_CLAUSE_CODE (c
)];
8564 decl
= OMP_CLAUSE_DECL (c
);
8565 if (TREE_CODE (decl
) == MEM_REF
)
8567 tree type
= TREE_TYPE (decl
);
8568 if (gimplify_expr (&TYPE_MAX_VALUE (TYPE_DOMAIN (type
)), pre_p
,
8569 NULL
, is_gimple_val
, fb_rvalue
, false)
8575 tree v
= TYPE_MAX_VALUE (TYPE_DOMAIN (type
));
8578 omp_firstprivatize_variable (ctx
, v
);
8579 omp_notice_variable (ctx
, v
, true);
8581 decl
= TREE_OPERAND (decl
, 0);
8582 if (TREE_CODE (decl
) == POINTER_PLUS_EXPR
)
8584 if (gimplify_expr (&TREE_OPERAND (decl
, 1), pre_p
,
8585 NULL
, is_gimple_val
, fb_rvalue
, false)
8591 v
= TREE_OPERAND (decl
, 1);
8594 omp_firstprivatize_variable (ctx
, v
);
8595 omp_notice_variable (ctx
, v
, true);
8597 decl
= TREE_OPERAND (decl
, 0);
8599 if (TREE_CODE (decl
) == ADDR_EXPR
8600 || TREE_CODE (decl
) == INDIRECT_REF
)
8601 decl
= TREE_OPERAND (decl
, 0);
8604 case OMP_CLAUSE_LINEAR
:
8605 if (gimplify_expr (&OMP_CLAUSE_LINEAR_STEP (c
), pre_p
, NULL
,
8606 is_gimple_val
, fb_rvalue
) == GS_ERROR
)
8613 if (code
== OMP_SIMD
8614 && !OMP_CLAUSE_LINEAR_NO_COPYIN (c
))
8616 struct gimplify_omp_ctx
*octx
= outer_ctx
;
8618 && octx
->region_type
== ORT_WORKSHARE
8619 && octx
->combined_loop
8620 && !octx
->distribute
)
8622 if (octx
->outer_context
8623 && (octx
->outer_context
->region_type
8624 == ORT_COMBINED_PARALLEL
))
8625 octx
= octx
->outer_context
->outer_context
;
8627 octx
= octx
->outer_context
;
8630 && octx
->region_type
== ORT_WORKSHARE
8631 && octx
->combined_loop
8632 && octx
->distribute
)
8634 error_at (OMP_CLAUSE_LOCATION (c
),
8635 "%<linear%> clause for variable other than "
8636 "loop iterator specified on construct "
8637 "combined with %<distribute%>");
8642 /* For combined #pragma omp parallel for simd, need to put
8643 lastprivate and perhaps firstprivate too on the
8644 parallel. Similarly for #pragma omp for simd. */
8645 struct gimplify_omp_ctx
*octx
= outer_ctx
;
8649 if (OMP_CLAUSE_LINEAR_NO_COPYIN (c
)
8650 && OMP_CLAUSE_LINEAR_NO_COPYOUT (c
))
8652 decl
= OMP_CLAUSE_DECL (c
);
8653 if (error_operand_p (decl
))
8659 if (!OMP_CLAUSE_LINEAR_NO_COPYIN (c
))
8660 flags
|= GOVD_FIRSTPRIVATE
;
8661 if (!OMP_CLAUSE_LINEAR_NO_COPYOUT (c
))
8662 flags
|= GOVD_LASTPRIVATE
;
8664 && octx
->region_type
== ORT_WORKSHARE
8665 && octx
->combined_loop
)
8667 if (octx
->outer_context
8668 && (octx
->outer_context
->region_type
8669 == ORT_COMBINED_PARALLEL
))
8670 octx
= octx
->outer_context
;
8671 else if (omp_check_private (octx
, decl
, false))
8675 && (octx
->region_type
& ORT_TASK
) != 0
8676 && octx
->combined_loop
)
8679 && octx
->region_type
== ORT_COMBINED_PARALLEL
8680 && ctx
->region_type
== ORT_WORKSHARE
8681 && octx
== outer_ctx
)
8682 flags
= GOVD_SEEN
| GOVD_SHARED
;
8684 && ((octx
->region_type
& ORT_COMBINED_TEAMS
)
8685 == ORT_COMBINED_TEAMS
))
8686 flags
= GOVD_SEEN
| GOVD_SHARED
;
8688 && octx
->region_type
== ORT_COMBINED_TARGET
)
8690 flags
&= ~GOVD_LASTPRIVATE
;
8691 if (flags
== GOVD_SEEN
)
8697 = splay_tree_lookup (octx
->variables
,
8698 (splay_tree_key
) decl
);
8699 if (on
&& (on
->value
& GOVD_DATA_SHARE_CLASS
) != 0)
8704 omp_add_variable (octx
, decl
, flags
);
8705 if (octx
->outer_context
== NULL
)
8707 octx
= octx
->outer_context
;
8712 && (!OMP_CLAUSE_LINEAR_NO_COPYIN (c
)
8713 || !OMP_CLAUSE_LINEAR_NO_COPYOUT (c
)))
8714 omp_notice_variable (octx
, decl
, true);
8716 flags
= GOVD_LINEAR
| GOVD_EXPLICIT
;
8717 if (OMP_CLAUSE_LINEAR_NO_COPYIN (c
)
8718 && OMP_CLAUSE_LINEAR_NO_COPYOUT (c
))
8720 notice_outer
= false;
8721 flags
|= GOVD_LINEAR_LASTPRIVATE_NO_OUTER
;
8725 case OMP_CLAUSE_MAP
:
8726 decl
= OMP_CLAUSE_DECL (c
);
8727 if (error_operand_p (decl
))
8734 if (TREE_CODE (TREE_TYPE (decl
)) != ARRAY_TYPE
)
8737 case OMP_TARGET_DATA
:
8738 case OMP_TARGET_ENTER_DATA
:
8739 case OMP_TARGET_EXIT_DATA
:
8740 case OACC_HOST_DATA
:
8741 if (OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_FIRSTPRIVATE_POINTER
8742 || (OMP_CLAUSE_MAP_KIND (c
)
8743 == GOMP_MAP_FIRSTPRIVATE_REFERENCE
))
8744 /* For target {,enter ,exit }data only the array slice is
8745 mapped, but not the pointer to it. */
8748 case OACC_ENTER_DATA
:
8749 case OACC_EXIT_DATA
:
8750 if (OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_POINTER
8751 || OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_TO_PSET
8752 || OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_FIRSTPRIVATE_POINTER
8753 || (OMP_CLAUSE_MAP_KIND (c
)
8754 == GOMP_MAP_FIRSTPRIVATE_REFERENCE
))
8760 /* For Fortran, not only the pointer to the data is mapped but also
8761 the address of the pointer, the array descriptor etc.; for
8762 'exit data' - and in particular for 'delete:' - having an 'alloc:'
8763 does not make sense. Likewise, for 'update' only transferring the
8764 data itself is needed as the rest has been handled in previous
8765 directives. However, for 'exit data', the array descriptor needs
8766 to be delete; hence, we turn the MAP_TO_PSET into a MAP_DELETE. */
8767 if (code
== OMP_TARGET_EXIT_DATA
8768 && OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_TO_PSET
)
8769 OMP_CLAUSE_SET_MAP_KIND (c
, OMP_CLAUSE_MAP_KIND (*prev_list_p
)
8771 ? GOMP_MAP_DELETE
: GOMP_MAP_RELEASE
);
8772 else if ((code
== OMP_TARGET_EXIT_DATA
|| code
== OMP_TARGET_UPDATE
)
8773 && (OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_POINTER
8774 || OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_TO_PSET
))
8779 if (DECL_P (decl
) && outer_ctx
&& (region_type
& ORT_ACC
))
8781 struct gimplify_omp_ctx
*octx
;
8782 for (octx
= outer_ctx
; octx
; octx
= octx
->outer_context
)
8784 if (octx
->region_type
!= ORT_ACC_HOST_DATA
)
8787 = splay_tree_lookup (octx
->variables
,
8788 (splay_tree_key
) decl
);
8790 error_at (OMP_CLAUSE_LOCATION (c
), "variable %qE "
8791 "declared in enclosing %<host_data%> region",
8795 if (OMP_CLAUSE_SIZE (c
) == NULL_TREE
)
8796 OMP_CLAUSE_SIZE (c
) = DECL_P (decl
) ? DECL_SIZE_UNIT (decl
)
8797 : TYPE_SIZE_UNIT (TREE_TYPE (decl
));
8798 if (gimplify_expr (&OMP_CLAUSE_SIZE (c
), pre_p
,
8799 NULL
, is_gimple_val
, fb_rvalue
) == GS_ERROR
)
8804 else if ((OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_FIRSTPRIVATE_POINTER
8805 || (OMP_CLAUSE_MAP_KIND (c
)
8806 == GOMP_MAP_FIRSTPRIVATE_REFERENCE
))
8807 && TREE_CODE (OMP_CLAUSE_SIZE (c
)) != INTEGER_CST
)
8810 = get_initialized_tmp_var (OMP_CLAUSE_SIZE (c
), pre_p
, NULL
,
8812 omp_add_variable (ctx
, OMP_CLAUSE_SIZE (c
),
8813 GOVD_FIRSTPRIVATE
| GOVD_SEEN
);
8818 if (TREE_CODE (d
) == ARRAY_REF
)
8820 while (TREE_CODE (d
) == ARRAY_REF
)
8821 d
= TREE_OPERAND (d
, 0);
8822 if (TREE_CODE (d
) == COMPONENT_REF
8823 && TREE_CODE (TREE_TYPE (d
)) == ARRAY_TYPE
)
8826 pd
= &OMP_CLAUSE_DECL (c
);
8828 && TREE_CODE (decl
) == INDIRECT_REF
8829 && TREE_CODE (TREE_OPERAND (decl
, 0)) == COMPONENT_REF
8830 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (decl
, 0)))
8833 pd
= &TREE_OPERAND (decl
, 0);
8834 decl
= TREE_OPERAND (decl
, 0);
8836 bool indir_p
= false;
8837 tree orig_decl
= decl
;
8838 tree decl_ref
= NULL_TREE
;
8839 if ((region_type
& ORT_ACC
) != 0
8840 && TREE_CODE (*pd
) == COMPONENT_REF
8841 && OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_ATTACH_DETACH
8842 && code
!= OACC_UPDATE
)
8844 while (TREE_CODE (decl
) == COMPONENT_REF
)
8846 decl
= TREE_OPERAND (decl
, 0);
8847 if ((TREE_CODE (decl
) == MEM_REF
8848 && integer_zerop (TREE_OPERAND (decl
, 1)))
8849 || INDIRECT_REF_P (decl
))
8852 decl
= TREE_OPERAND (decl
, 0);
8854 if (TREE_CODE (decl
) == INDIRECT_REF
8855 && DECL_P (TREE_OPERAND (decl
, 0))
8856 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (decl
, 0)))
8860 decl
= TREE_OPERAND (decl
, 0);
8864 else if (TREE_CODE (decl
) == COMPONENT_REF
)
8866 while (TREE_CODE (decl
) == COMPONENT_REF
)
8867 decl
= TREE_OPERAND (decl
, 0);
8868 if (TREE_CODE (decl
) == INDIRECT_REF
8869 && DECL_P (TREE_OPERAND (decl
, 0))
8870 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (decl
, 0)))
8872 decl
= TREE_OPERAND (decl
, 0);
8874 if (decl
!= orig_decl
&& DECL_P (decl
) && indir_p
)
8876 gomp_map_kind k
= (code
== OACC_EXIT_DATA
) ? GOMP_MAP_DETACH
8878 /* We have a dereference of a struct member. Make this an
8879 attach/detach operation, and ensure the base pointer is
8880 mapped as a FIRSTPRIVATE_POINTER. */
8881 OMP_CLAUSE_SET_MAP_KIND (c
, k
);
8882 flags
= GOVD_MAP
| GOVD_SEEN
| GOVD_EXPLICIT
;
8883 tree next_clause
= OMP_CLAUSE_CHAIN (c
);
8884 if (k
== GOMP_MAP_ATTACH
8885 && code
!= OACC_ENTER_DATA
8887 || (OMP_CLAUSE_CODE (next_clause
) != OMP_CLAUSE_MAP
)
8888 || (OMP_CLAUSE_MAP_KIND (next_clause
)
8889 != GOMP_MAP_POINTER
)
8890 || OMP_CLAUSE_DECL (next_clause
) != decl
)
8891 && (!struct_deref_set
8892 || !struct_deref_set
->contains (decl
)))
8894 if (!struct_deref_set
)
8895 struct_deref_set
= new hash_set
<tree
> ();
8896 /* As well as the attach, we also need a
8897 FIRSTPRIVATE_POINTER clause to properly map the
8898 pointer to the struct base. */
8899 tree c2
= build_omp_clause (OMP_CLAUSE_LOCATION (c
),
8901 OMP_CLAUSE_SET_MAP_KIND (c2
, GOMP_MAP_ALLOC
);
8902 OMP_CLAUSE_MAP_MAYBE_ZERO_LENGTH_ARRAY_SECTION (c2
)
8905 = build_int_cst (build_pointer_type (char_type_node
),
8907 OMP_CLAUSE_DECL (c2
)
8908 = build2 (MEM_REF
, char_type_node
,
8909 decl_ref
? decl_ref
: decl
, charptr_zero
);
8910 OMP_CLAUSE_SIZE (c2
) = size_zero_node
;
8911 tree c3
= build_omp_clause (OMP_CLAUSE_LOCATION (c
),
8913 OMP_CLAUSE_SET_MAP_KIND (c3
,
8914 GOMP_MAP_FIRSTPRIVATE_POINTER
);
8915 OMP_CLAUSE_DECL (c3
) = decl
;
8916 OMP_CLAUSE_SIZE (c3
) = size_zero_node
;
8917 tree mapgrp
= *prev_list_p
;
8919 OMP_CLAUSE_CHAIN (c3
) = mapgrp
;
8920 OMP_CLAUSE_CHAIN (c2
) = c3
;
8922 struct_deref_set
->add (decl
);
8926 /* An "attach/detach" operation on an update directive should
8927 behave as a GOMP_MAP_ALWAYS_POINTER. Beware that
8928 unlike attach or detach map kinds, GOMP_MAP_ALWAYS_POINTER
8929 depends on the previous mapping. */
8930 if (code
== OACC_UPDATE
8931 && OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_ATTACH_DETACH
)
8932 OMP_CLAUSE_SET_MAP_KIND (c
, GOMP_MAP_ALWAYS_POINTER
);
8933 if (gimplify_expr (pd
, pre_p
, NULL
, is_gimple_lvalue
, fb_lvalue
)
8940 && OMP_CLAUSE_MAP_KIND (c
) != GOMP_MAP_TO_PSET
8941 && OMP_CLAUSE_MAP_KIND (c
) != GOMP_MAP_ATTACH
8942 && OMP_CLAUSE_MAP_KIND (c
) != GOMP_MAP_DETACH
8943 && code
!= OACC_UPDATE
)
8945 if (error_operand_p (decl
))
8951 tree stype
= TREE_TYPE (decl
);
8952 if (TREE_CODE (stype
) == REFERENCE_TYPE
)
8953 stype
= TREE_TYPE (stype
);
8954 if (TYPE_SIZE_UNIT (stype
) == NULL
8955 || TREE_CODE (TYPE_SIZE_UNIT (stype
)) != INTEGER_CST
)
8957 error_at (OMP_CLAUSE_LOCATION (c
),
8958 "mapping field %qE of variable length "
8959 "structure", OMP_CLAUSE_DECL (c
));
8964 if (OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_ALWAYS_POINTER
8965 || OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_ATTACH_DETACH
)
8967 /* Error recovery. */
8968 if (prev_list_p
== NULL
)
8973 if (OMP_CLAUSE_CHAIN (*prev_list_p
) != c
)
8975 tree ch
= OMP_CLAUSE_CHAIN (*prev_list_p
);
8976 if (ch
== NULL_TREE
|| OMP_CLAUSE_CHAIN (ch
) != c
)
8984 poly_offset_int offset1
;
8989 = extract_base_bit_offset (OMP_CLAUSE_DECL (c
), &base_ref
,
8990 &bitpos1
, &offset1
);
8992 gcc_assert (base
== decl
);
8995 = splay_tree_lookup (ctx
->variables
, (splay_tree_key
)decl
);
8996 bool ptr
= (OMP_CLAUSE_MAP_KIND (c
)
8997 == GOMP_MAP_ALWAYS_POINTER
);
8998 bool attach_detach
= (OMP_CLAUSE_MAP_KIND (c
)
8999 == GOMP_MAP_ATTACH_DETACH
);
9000 bool attach
= OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_ATTACH
9001 || OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_DETACH
;
9002 bool has_attachments
= false;
9003 /* For OpenACC, pointers in structs should trigger an
9005 if (attach_detach
&& (region_type
& ORT_ACC
) != 0)
9007 /* Turn a GOMP_MAP_ATTACH_DETACH clause into a
9008 GOMP_MAP_ATTACH or GOMP_MAP_DETACH clause after we
9009 have detected a case that needs a GOMP_MAP_STRUCT
9012 = (code
== OACC_EXIT_DATA
) ? GOMP_MAP_DETACH
9014 OMP_CLAUSE_SET_MAP_KIND (c
, k
);
9015 has_attachments
= true;
9017 if (n
== NULL
|| (n
->value
& GOVD_MAP
) == 0)
9019 tree l
= build_omp_clause (OMP_CLAUSE_LOCATION (c
),
9021 gomp_map_kind k
= attach
? GOMP_MAP_FORCE_PRESENT
9024 OMP_CLAUSE_SET_MAP_KIND (l
, k
);
9026 OMP_CLAUSE_DECL (l
) = unshare_expr (base_ref
);
9028 OMP_CLAUSE_DECL (l
) = decl
;
9032 : DECL_P (OMP_CLAUSE_DECL (l
))
9033 ? DECL_SIZE_UNIT (OMP_CLAUSE_DECL (l
))
9034 : TYPE_SIZE_UNIT (TREE_TYPE (OMP_CLAUSE_DECL (l
))));
9035 if (struct_map_to_clause
== NULL
)
9036 struct_map_to_clause
= new hash_map
<tree
, tree
>;
9037 struct_map_to_clause
->put (decl
, l
);
9038 if (ptr
|| attach_detach
)
9040 insert_struct_comp_map (code
, c
, l
, *prev_list_p
,
9047 OMP_CLAUSE_CHAIN (l
) = c
;
9049 list_p
= &OMP_CLAUSE_CHAIN (l
);
9051 if (base_ref
&& code
== OMP_TARGET
)
9053 tree c2
= build_omp_clause (OMP_CLAUSE_LOCATION (c
),
9055 enum gomp_map_kind mkind
9056 = GOMP_MAP_FIRSTPRIVATE_REFERENCE
;
9057 OMP_CLAUSE_SET_MAP_KIND (c2
, mkind
);
9058 OMP_CLAUSE_DECL (c2
) = decl
;
9059 OMP_CLAUSE_SIZE (c2
) = size_zero_node
;
9060 OMP_CLAUSE_CHAIN (c2
) = OMP_CLAUSE_CHAIN (l
);
9061 OMP_CLAUSE_CHAIN (l
) = c2
;
9063 flags
= GOVD_MAP
| GOVD_EXPLICIT
;
9064 if (GOMP_MAP_ALWAYS_P (OMP_CLAUSE_MAP_KIND (c
))
9068 if (has_attachments
)
9069 flags
|= GOVD_MAP_HAS_ATTACHMENTS
;
9072 else if (struct_map_to_clause
)
9074 tree
*osc
= struct_map_to_clause
->get (decl
);
9075 tree
*sc
= NULL
, *scp
= NULL
;
9076 if (GOMP_MAP_ALWAYS_P (OMP_CLAUSE_MAP_KIND (c
))
9079 n
->value
|= GOVD_SEEN
;
9080 sc
= &OMP_CLAUSE_CHAIN (*osc
);
9082 && (OMP_CLAUSE_MAP_KIND (*sc
)
9083 == GOMP_MAP_FIRSTPRIVATE_REFERENCE
))
9084 sc
= &OMP_CLAUSE_CHAIN (*sc
);
9085 /* Here "prev_list_p" is the end of the inserted
9086 alloc/release nodes after the struct node, OSC. */
9087 for (; *sc
!= c
; sc
= &OMP_CLAUSE_CHAIN (*sc
))
9088 if ((ptr
|| attach_detach
) && sc
== prev_list_p
)
9090 else if (TREE_CODE (OMP_CLAUSE_DECL (*sc
))
9092 && (TREE_CODE (OMP_CLAUSE_DECL (*sc
))
9094 && (TREE_CODE (OMP_CLAUSE_DECL (*sc
))
9099 tree sc_decl
= OMP_CLAUSE_DECL (*sc
);
9100 poly_offset_int offsetn
;
9103 = extract_base_bit_offset (sc_decl
, NULL
,
9104 &bitposn
, &offsetn
);
9109 tree d1
= OMP_CLAUSE_DECL (*sc
);
9110 tree d2
= OMP_CLAUSE_DECL (c
);
9111 while (TREE_CODE (d1
) == ARRAY_REF
)
9112 d1
= TREE_OPERAND (d1
, 0);
9113 while (TREE_CODE (d2
) == ARRAY_REF
)
9114 d2
= TREE_OPERAND (d2
, 0);
9115 if (TREE_CODE (d1
) == INDIRECT_REF
)
9116 d1
= TREE_OPERAND (d1
, 0);
9117 if (TREE_CODE (d2
) == INDIRECT_REF
)
9118 d2
= TREE_OPERAND (d2
, 0);
9119 while (TREE_CODE (d1
) == COMPONENT_REF
)
9120 if (TREE_CODE (d2
) == COMPONENT_REF
9121 && TREE_OPERAND (d1
, 1)
9122 == TREE_OPERAND (d2
, 1))
9124 d1
= TREE_OPERAND (d1
, 0);
9125 d2
= TREE_OPERAND (d2
, 0);
9131 error_at (OMP_CLAUSE_LOCATION (c
),
9132 "%qE appears more than once in map "
9133 "clauses", OMP_CLAUSE_DECL (c
));
9137 if (maybe_lt (offset1
, offsetn
)
9138 || (known_eq (offset1
, offsetn
)
9139 && maybe_lt (bitpos1
, bitposn
)))
9141 if (ptr
|| attach_detach
)
9150 OMP_CLAUSE_SIZE (*osc
)
9151 = size_binop (PLUS_EXPR
, OMP_CLAUSE_SIZE (*osc
),
9153 if (ptr
|| attach_detach
)
9155 tree cl
= insert_struct_comp_map (code
, c
, NULL
,
9157 if (sc
== prev_list_p
)
9164 *prev_list_p
= OMP_CLAUSE_CHAIN (c
);
9165 list_p
= prev_list_p
;
9167 OMP_CLAUSE_CHAIN (c
) = *sc
;
9174 *list_p
= OMP_CLAUSE_CHAIN (c
);
9175 OMP_CLAUSE_CHAIN (c
) = *sc
;
9182 && OMP_CLAUSE_MAP_KIND (c
) != GOMP_MAP_ALWAYS_POINTER
9183 && OMP_CLAUSE_MAP_KIND (c
) != GOMP_MAP_ATTACH_DETACH
9184 && OMP_CLAUSE_MAP_KIND (c
) != GOMP_MAP_TO_PSET
9185 && OMP_CLAUSE_CHAIN (c
)
9186 && OMP_CLAUSE_CODE (OMP_CLAUSE_CHAIN (c
)) == OMP_CLAUSE_MAP
9187 && ((OMP_CLAUSE_MAP_KIND (OMP_CLAUSE_CHAIN (c
))
9188 == GOMP_MAP_ALWAYS_POINTER
)
9189 || (OMP_CLAUSE_MAP_KIND (OMP_CLAUSE_CHAIN (c
))
9190 == GOMP_MAP_ATTACH_DETACH
)
9191 || (OMP_CLAUSE_MAP_KIND (OMP_CLAUSE_CHAIN (c
))
9192 == GOMP_MAP_TO_PSET
)))
9193 prev_list_p
= list_p
;
9197 flags
= GOVD_MAP
| GOVD_EXPLICIT
;
9198 if (OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_ALWAYS_TO
9199 || OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_ALWAYS_TOFROM
)
9200 flags
|= GOVD_MAP_ALWAYS_TO
;
9203 case OMP_CLAUSE_DEPEND
:
9204 if (OMP_CLAUSE_DEPEND_KIND (c
) == OMP_CLAUSE_DEPEND_SINK
)
9206 tree deps
= OMP_CLAUSE_DECL (c
);
9207 while (deps
&& TREE_CODE (deps
) == TREE_LIST
)
9209 if (TREE_CODE (TREE_PURPOSE (deps
)) == TRUNC_DIV_EXPR
9210 && DECL_P (TREE_OPERAND (TREE_PURPOSE (deps
), 1)))
9211 gimplify_expr (&TREE_OPERAND (TREE_PURPOSE (deps
), 1),
9212 pre_p
, NULL
, is_gimple_val
, fb_rvalue
);
9213 deps
= TREE_CHAIN (deps
);
9217 else if (OMP_CLAUSE_DEPEND_KIND (c
) == OMP_CLAUSE_DEPEND_SOURCE
)
9219 if (handled_depend_iterators
== -1)
9220 handled_depend_iterators
= gimplify_omp_depend (list_p
, pre_p
);
9221 if (handled_depend_iterators
)
9223 if (handled_depend_iterators
== 2)
9227 if (TREE_CODE (OMP_CLAUSE_DECL (c
)) == COMPOUND_EXPR
)
9229 gimplify_expr (&TREE_OPERAND (OMP_CLAUSE_DECL (c
), 0), pre_p
,
9230 NULL
, is_gimple_val
, fb_rvalue
);
9231 OMP_CLAUSE_DECL (c
) = TREE_OPERAND (OMP_CLAUSE_DECL (c
), 1);
9233 if (error_operand_p (OMP_CLAUSE_DECL (c
)))
9238 OMP_CLAUSE_DECL (c
) = build_fold_addr_expr (OMP_CLAUSE_DECL (c
));
9239 if (gimplify_expr (&OMP_CLAUSE_DECL (c
), pre_p
, NULL
,
9240 is_gimple_val
, fb_rvalue
) == GS_ERROR
)
9248 case OMP_CLAUSE_FROM
:
9249 case OMP_CLAUSE__CACHE_
:
9250 decl
= OMP_CLAUSE_DECL (c
);
9251 if (error_operand_p (decl
))
9256 if (OMP_CLAUSE_SIZE (c
) == NULL_TREE
)
9257 OMP_CLAUSE_SIZE (c
) = DECL_P (decl
) ? DECL_SIZE_UNIT (decl
)
9258 : TYPE_SIZE_UNIT (TREE_TYPE (decl
));
9259 if (gimplify_expr (&OMP_CLAUSE_SIZE (c
), pre_p
,
9260 NULL
, is_gimple_val
, fb_rvalue
) == GS_ERROR
)
9267 if (gimplify_expr (&OMP_CLAUSE_DECL (c
), pre_p
,
9268 NULL
, is_gimple_lvalue
, fb_lvalue
)
9278 case OMP_CLAUSE_USE_DEVICE_PTR
:
9279 case OMP_CLAUSE_USE_DEVICE_ADDR
:
9280 flags
= GOVD_EXPLICIT
;
9283 case OMP_CLAUSE_IS_DEVICE_PTR
:
9284 flags
= GOVD_FIRSTPRIVATE
| GOVD_EXPLICIT
;
9288 decl
= OMP_CLAUSE_DECL (c
);
9290 if (error_operand_p (decl
))
9295 if (DECL_NAME (decl
) == NULL_TREE
&& (flags
& GOVD_SHARED
) == 0)
9297 tree t
= omp_member_access_dummy_var (decl
);
9300 tree v
= DECL_VALUE_EXPR (decl
);
9301 DECL_NAME (decl
) = DECL_NAME (TREE_OPERAND (v
, 1));
9303 omp_notice_variable (outer_ctx
, t
, true);
9306 if (code
== OACC_DATA
9307 && OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
9308 && OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_FIRSTPRIVATE_POINTER
)
9309 flags
|= GOVD_MAP_0LEN_ARRAY
;
9310 omp_add_variable (ctx
, decl
, flags
);
9311 if ((OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_REDUCTION
9312 || OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_IN_REDUCTION
9313 || OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_TASK_REDUCTION
)
9314 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
))
9316 omp_add_variable (ctx
, OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
),
9317 GOVD_LOCAL
| GOVD_SEEN
);
9318 if (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c
)
9319 && walk_tree (&OMP_CLAUSE_REDUCTION_INIT (c
),
9321 OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c
),
9323 omp_add_variable (ctx
,
9324 OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c
),
9325 GOVD_LOCAL
| GOVD_SEEN
);
9326 gimplify_omp_ctxp
= ctx
;
9327 push_gimplify_context ();
9329 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
) = NULL
;
9330 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
) = NULL
;
9332 gimplify_and_add (OMP_CLAUSE_REDUCTION_INIT (c
),
9333 &OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
));
9334 pop_gimplify_context
9335 (gimple_seq_first_stmt (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
)));
9336 push_gimplify_context ();
9337 gimplify_and_add (OMP_CLAUSE_REDUCTION_MERGE (c
),
9338 &OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
));
9339 pop_gimplify_context
9340 (gimple_seq_first_stmt (OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
)));
9341 OMP_CLAUSE_REDUCTION_INIT (c
) = NULL_TREE
;
9342 OMP_CLAUSE_REDUCTION_MERGE (c
) = NULL_TREE
;
9344 gimplify_omp_ctxp
= outer_ctx
;
9346 else if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
9347 && OMP_CLAUSE_LASTPRIVATE_STMT (c
))
9349 gimplify_omp_ctxp
= ctx
;
9350 push_gimplify_context ();
9351 if (TREE_CODE (OMP_CLAUSE_LASTPRIVATE_STMT (c
)) != BIND_EXPR
)
9353 tree bind
= build3 (BIND_EXPR
, void_type_node
, NULL
,
9355 TREE_SIDE_EFFECTS (bind
) = 1;
9356 BIND_EXPR_BODY (bind
) = OMP_CLAUSE_LASTPRIVATE_STMT (c
);
9357 OMP_CLAUSE_LASTPRIVATE_STMT (c
) = bind
;
9359 gimplify_and_add (OMP_CLAUSE_LASTPRIVATE_STMT (c
),
9360 &OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c
));
9361 pop_gimplify_context
9362 (gimple_seq_first_stmt (OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c
)));
9363 OMP_CLAUSE_LASTPRIVATE_STMT (c
) = NULL_TREE
;
9365 gimplify_omp_ctxp
= outer_ctx
;
9367 else if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LINEAR
9368 && OMP_CLAUSE_LINEAR_STMT (c
))
9370 gimplify_omp_ctxp
= ctx
;
9371 push_gimplify_context ();
9372 if (TREE_CODE (OMP_CLAUSE_LINEAR_STMT (c
)) != BIND_EXPR
)
9374 tree bind
= build3 (BIND_EXPR
, void_type_node
, NULL
,
9376 TREE_SIDE_EFFECTS (bind
) = 1;
9377 BIND_EXPR_BODY (bind
) = OMP_CLAUSE_LINEAR_STMT (c
);
9378 OMP_CLAUSE_LINEAR_STMT (c
) = bind
;
9380 gimplify_and_add (OMP_CLAUSE_LINEAR_STMT (c
),
9381 &OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c
));
9382 pop_gimplify_context
9383 (gimple_seq_first_stmt (OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c
)));
9384 OMP_CLAUSE_LINEAR_STMT (c
) = NULL_TREE
;
9386 gimplify_omp_ctxp
= outer_ctx
;
9392 case OMP_CLAUSE_COPYIN
:
9393 case OMP_CLAUSE_COPYPRIVATE
:
9394 decl
= OMP_CLAUSE_DECL (c
);
9395 if (error_operand_p (decl
))
9400 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_COPYPRIVATE
9402 && !omp_check_private (ctx
, decl
, true))
9405 if (is_global_var (decl
))
9407 if (DECL_THREAD_LOCAL_P (decl
))
9409 else if (DECL_HAS_VALUE_EXPR_P (decl
))
9411 tree value
= get_base_address (DECL_VALUE_EXPR (decl
));
9415 && DECL_THREAD_LOCAL_P (value
))
9420 error_at (OMP_CLAUSE_LOCATION (c
),
9421 "copyprivate variable %qE is not threadprivate"
9422 " or private in outer context", DECL_NAME (decl
));
9425 if ((OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_REDUCTION
9426 || OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_FIRSTPRIVATE
9427 || OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
)
9429 && ((region_type
& ORT_TASKLOOP
) == ORT_TASKLOOP
9430 || (region_type
== ORT_WORKSHARE
9431 && OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_REDUCTION
9432 && (OMP_CLAUSE_REDUCTION_INSCAN (c
)
9433 || code
== OMP_LOOP
)))
9434 && (outer_ctx
->region_type
== ORT_COMBINED_PARALLEL
9435 || (code
== OMP_LOOP
9436 && OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_REDUCTION
9437 && ((outer_ctx
->region_type
& ORT_COMBINED_TEAMS
)
9438 == ORT_COMBINED_TEAMS
))))
9441 = splay_tree_lookup (outer_ctx
->variables
,
9442 (splay_tree_key
)decl
);
9443 if (on
== NULL
|| (on
->value
& GOVD_DATA_SHARE_CLASS
) == 0)
9445 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_REDUCTION
9446 && TREE_CODE (OMP_CLAUSE_DECL (c
)) == MEM_REF
9447 && (TREE_CODE (TREE_TYPE (decl
)) == POINTER_TYPE
9448 || (TREE_CODE (TREE_TYPE (decl
)) == REFERENCE_TYPE
9449 && (TREE_CODE (TREE_TYPE (TREE_TYPE (decl
)))
9451 omp_firstprivatize_variable (outer_ctx
, decl
);
9454 omp_add_variable (outer_ctx
, decl
,
9455 GOVD_SEEN
| GOVD_SHARED
);
9456 if (outer_ctx
->outer_context
)
9457 omp_notice_variable (outer_ctx
->outer_context
, decl
,
9463 omp_notice_variable (outer_ctx
, decl
, true);
9464 if (check_non_private
9465 && region_type
== ORT_WORKSHARE
9466 && (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_REDUCTION
9467 || decl
== OMP_CLAUSE_DECL (c
)
9468 || (TREE_CODE (OMP_CLAUSE_DECL (c
)) == MEM_REF
9469 && (TREE_CODE (TREE_OPERAND (OMP_CLAUSE_DECL (c
), 0))
9471 || (TREE_CODE (TREE_OPERAND (OMP_CLAUSE_DECL (c
), 0))
9472 == POINTER_PLUS_EXPR
9473 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND
9474 (OMP_CLAUSE_DECL (c
), 0), 0))
9476 && omp_check_private (ctx
, decl
, false))
9478 error ("%s variable %qE is private in outer context",
9479 check_non_private
, DECL_NAME (decl
));
9485 if (OMP_CLAUSE_IF_MODIFIER (c
) != ERROR_MARK
9486 && OMP_CLAUSE_IF_MODIFIER (c
) != code
)
9489 for (int i
= 0; i
< 2; i
++)
9490 switch (i
? OMP_CLAUSE_IF_MODIFIER (c
) : code
)
9492 case VOID_CST
: p
[i
] = "cancel"; break;
9493 case OMP_PARALLEL
: p
[i
] = "parallel"; break;
9494 case OMP_SIMD
: p
[i
] = "simd"; break;
9495 case OMP_TASK
: p
[i
] = "task"; break;
9496 case OMP_TASKLOOP
: p
[i
] = "taskloop"; break;
9497 case OMP_TARGET_DATA
: p
[i
] = "target data"; break;
9498 case OMP_TARGET
: p
[i
] = "target"; break;
9499 case OMP_TARGET_UPDATE
: p
[i
] = "target update"; break;
9500 case OMP_TARGET_ENTER_DATA
:
9501 p
[i
] = "target enter data"; break;
9502 case OMP_TARGET_EXIT_DATA
: p
[i
] = "target exit data"; break;
9503 default: gcc_unreachable ();
9505 error_at (OMP_CLAUSE_LOCATION (c
),
9506 "expected %qs %<if%> clause modifier rather than %qs",
9512 case OMP_CLAUSE_FINAL
:
9513 OMP_CLAUSE_OPERAND (c
, 0)
9514 = gimple_boolify (OMP_CLAUSE_OPERAND (c
, 0));
9517 case OMP_CLAUSE_SCHEDULE
:
9518 case OMP_CLAUSE_NUM_THREADS
:
9519 case OMP_CLAUSE_NUM_TEAMS
:
9520 case OMP_CLAUSE_THREAD_LIMIT
:
9521 case OMP_CLAUSE_DIST_SCHEDULE
:
9522 case OMP_CLAUSE_DEVICE
:
9523 case OMP_CLAUSE_PRIORITY
:
9524 case OMP_CLAUSE_GRAINSIZE
:
9525 case OMP_CLAUSE_NUM_TASKS
:
9526 case OMP_CLAUSE_HINT
:
9527 case OMP_CLAUSE_ASYNC
:
9528 case OMP_CLAUSE_WAIT
:
9529 case OMP_CLAUSE_NUM_GANGS
:
9530 case OMP_CLAUSE_NUM_WORKERS
:
9531 case OMP_CLAUSE_VECTOR_LENGTH
:
9532 case OMP_CLAUSE_WORKER
:
9533 case OMP_CLAUSE_VECTOR
:
9534 if (gimplify_expr (&OMP_CLAUSE_OPERAND (c
, 0), pre_p
, NULL
,
9535 is_gimple_val
, fb_rvalue
) == GS_ERROR
)
9539 case OMP_CLAUSE_GANG
:
9540 if (gimplify_expr (&OMP_CLAUSE_OPERAND (c
, 0), pre_p
, NULL
,
9541 is_gimple_val
, fb_rvalue
) == GS_ERROR
)
9543 if (gimplify_expr (&OMP_CLAUSE_OPERAND (c
, 1), pre_p
, NULL
,
9544 is_gimple_val
, fb_rvalue
) == GS_ERROR
)
9548 case OMP_CLAUSE_NOWAIT
:
9552 case OMP_CLAUSE_ORDERED
:
9553 case OMP_CLAUSE_UNTIED
:
9554 case OMP_CLAUSE_COLLAPSE
:
9555 case OMP_CLAUSE_TILE
:
9556 case OMP_CLAUSE_AUTO
:
9557 case OMP_CLAUSE_SEQ
:
9558 case OMP_CLAUSE_INDEPENDENT
:
9559 case OMP_CLAUSE_MERGEABLE
:
9560 case OMP_CLAUSE_PROC_BIND
:
9561 case OMP_CLAUSE_SAFELEN
:
9562 case OMP_CLAUSE_SIMDLEN
:
9563 case OMP_CLAUSE_NOGROUP
:
9564 case OMP_CLAUSE_THREADS
:
9565 case OMP_CLAUSE_SIMD
:
9566 case OMP_CLAUSE_BIND
:
9567 case OMP_CLAUSE_IF_PRESENT
:
9568 case OMP_CLAUSE_FINALIZE
:
9571 case OMP_CLAUSE_ORDER
:
9572 ctx
->order_concurrent
= true;
9575 case OMP_CLAUSE_DEFAULTMAP
:
9576 enum gimplify_defaultmap_kind gdmkmin
, gdmkmax
;
9577 switch (OMP_CLAUSE_DEFAULTMAP_CATEGORY (c
))
9579 case OMP_CLAUSE_DEFAULTMAP_CATEGORY_UNSPECIFIED
:
9580 gdmkmin
= GDMK_SCALAR
;
9581 gdmkmax
= GDMK_POINTER
;
9583 case OMP_CLAUSE_DEFAULTMAP_CATEGORY_SCALAR
:
9584 gdmkmin
= gdmkmax
= GDMK_SCALAR
;
9586 case OMP_CLAUSE_DEFAULTMAP_CATEGORY_AGGREGATE
:
9587 gdmkmin
= gdmkmax
= GDMK_AGGREGATE
;
9589 case OMP_CLAUSE_DEFAULTMAP_CATEGORY_ALLOCATABLE
:
9590 gdmkmin
= gdmkmax
= GDMK_ALLOCATABLE
;
9592 case OMP_CLAUSE_DEFAULTMAP_CATEGORY_POINTER
:
9593 gdmkmin
= gdmkmax
= GDMK_POINTER
;
9598 for (int gdmk
= gdmkmin
; gdmk
<= gdmkmax
; gdmk
++)
9599 switch (OMP_CLAUSE_DEFAULTMAP_BEHAVIOR (c
))
9601 case OMP_CLAUSE_DEFAULTMAP_ALLOC
:
9602 ctx
->defaultmap
[gdmk
] = GOVD_MAP
| GOVD_MAP_ALLOC_ONLY
;
9604 case OMP_CLAUSE_DEFAULTMAP_TO
:
9605 ctx
->defaultmap
[gdmk
] = GOVD_MAP
| GOVD_MAP_TO_ONLY
;
9607 case OMP_CLAUSE_DEFAULTMAP_FROM
:
9608 ctx
->defaultmap
[gdmk
] = GOVD_MAP
| GOVD_MAP_FROM_ONLY
;
9610 case OMP_CLAUSE_DEFAULTMAP_TOFROM
:
9611 ctx
->defaultmap
[gdmk
] = GOVD_MAP
;
9613 case OMP_CLAUSE_DEFAULTMAP_FIRSTPRIVATE
:
9614 ctx
->defaultmap
[gdmk
] = GOVD_FIRSTPRIVATE
;
9616 case OMP_CLAUSE_DEFAULTMAP_NONE
:
9617 ctx
->defaultmap
[gdmk
] = 0;
9619 case OMP_CLAUSE_DEFAULTMAP_DEFAULT
:
9623 ctx
->defaultmap
[gdmk
] = GOVD_FIRSTPRIVATE
;
9625 case GDMK_AGGREGATE
:
9626 case GDMK_ALLOCATABLE
:
9627 ctx
->defaultmap
[gdmk
] = GOVD_MAP
;
9630 ctx
->defaultmap
[gdmk
] = GOVD_MAP
| GOVD_MAP_0LEN_ARRAY
;
9641 case OMP_CLAUSE_ALIGNED
:
9642 decl
= OMP_CLAUSE_DECL (c
);
9643 if (error_operand_p (decl
))
9648 if (gimplify_expr (&OMP_CLAUSE_ALIGNED_ALIGNMENT (c
), pre_p
, NULL
,
9649 is_gimple_val
, fb_rvalue
) == GS_ERROR
)
9654 if (!is_global_var (decl
)
9655 && TREE_CODE (TREE_TYPE (decl
)) == POINTER_TYPE
)
9656 omp_add_variable (ctx
, decl
, GOVD_ALIGNED
);
9659 case OMP_CLAUSE_NONTEMPORAL
:
9660 decl
= OMP_CLAUSE_DECL (c
);
9661 if (error_operand_p (decl
))
9666 omp_add_variable (ctx
, decl
, GOVD_NONTEMPORAL
);
9669 case OMP_CLAUSE_DEFAULT
:
9670 ctx
->default_kind
= OMP_CLAUSE_DEFAULT_KIND (c
);
9673 case OMP_CLAUSE_INCLUSIVE
:
9674 case OMP_CLAUSE_EXCLUSIVE
:
9675 decl
= OMP_CLAUSE_DECL (c
);
9677 splay_tree_node n
= splay_tree_lookup (outer_ctx
->variables
,
9678 (splay_tree_key
) decl
);
9679 if (n
== NULL
|| (n
->value
& GOVD_REDUCTION
) == 0)
9681 error_at (OMP_CLAUSE_LOCATION (c
),
9682 "%qD specified in %qs clause but not in %<inscan%> "
9683 "%<reduction%> clause on the containing construct",
9684 decl
, omp_clause_code_name
[OMP_CLAUSE_CODE (c
)]);
9689 n
->value
|= GOVD_REDUCTION_INSCAN
;
9690 if (outer_ctx
->region_type
== ORT_SIMD
9691 && outer_ctx
->outer_context
9692 && outer_ctx
->outer_context
->region_type
== ORT_WORKSHARE
)
9694 n
= splay_tree_lookup (outer_ctx
->outer_context
->variables
,
9695 (splay_tree_key
) decl
);
9696 if (n
&& (n
->value
& GOVD_REDUCTION
) != 0)
9697 n
->value
|= GOVD_REDUCTION_INSCAN
;
9707 if (code
== OACC_DATA
9708 && OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
9709 && (OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_FIRSTPRIVATE_POINTER
9710 || OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_FIRSTPRIVATE_REFERENCE
))
9713 *list_p
= OMP_CLAUSE_CHAIN (c
);
9715 list_p
= &OMP_CLAUSE_CHAIN (c
);
9718 ctx
->clauses
= *orig_list_p
;
9719 gimplify_omp_ctxp
= ctx
;
9720 if (struct_map_to_clause
)
9721 delete struct_map_to_clause
;
9722 if (struct_deref_set
)
9723 delete struct_deref_set
;
9726 /* Return true if DECL is a candidate for shared to firstprivate
9727 optimization. We only consider non-addressable scalars, not
9728 too big, and not references. */
9731 omp_shared_to_firstprivate_optimizable_decl_p (tree decl
)
9733 if (TREE_ADDRESSABLE (decl
))
9735 tree type
= TREE_TYPE (decl
);
9736 if (!is_gimple_reg_type (type
)
9737 || TREE_CODE (type
) == REFERENCE_TYPE
9738 || TREE_ADDRESSABLE (type
))
9740 /* Don't optimize too large decls, as each thread/task will have
9742 HOST_WIDE_INT len
= int_size_in_bytes (type
);
9743 if (len
== -1 || len
> 4 * POINTER_SIZE
/ BITS_PER_UNIT
)
9745 if (lang_hooks
.decls
.omp_privatize_by_reference (decl
))
9750 /* Helper function of omp_find_stores_op and gimplify_adjust_omp_clauses*.
9751 For omp_shared_to_firstprivate_optimizable_decl_p decl mark it as
9752 GOVD_WRITTEN in outer contexts. */
9755 omp_mark_stores (struct gimplify_omp_ctx
*ctx
, tree decl
)
9757 for (; ctx
; ctx
= ctx
->outer_context
)
9759 splay_tree_node n
= splay_tree_lookup (ctx
->variables
,
9760 (splay_tree_key
) decl
);
9763 else if (n
->value
& GOVD_SHARED
)
9765 n
->value
|= GOVD_WRITTEN
;
9768 else if (n
->value
& GOVD_DATA_SHARE_CLASS
)
9773 /* Helper callback for walk_gimple_seq to discover possible stores
9774 to omp_shared_to_firstprivate_optimizable_decl_p decls and set
9775 GOVD_WRITTEN if they are GOVD_SHARED in some outer context
9779 omp_find_stores_op (tree
*tp
, int *walk_subtrees
, void *data
)
9781 struct walk_stmt_info
*wi
= (struct walk_stmt_info
*) data
;
9790 if (handled_component_p (op
))
9791 op
= TREE_OPERAND (op
, 0);
9792 else if ((TREE_CODE (op
) == MEM_REF
|| TREE_CODE (op
) == TARGET_MEM_REF
)
9793 && TREE_CODE (TREE_OPERAND (op
, 0)) == ADDR_EXPR
)
9794 op
= TREE_OPERAND (TREE_OPERAND (op
, 0), 0);
9799 if (!DECL_P (op
) || !omp_shared_to_firstprivate_optimizable_decl_p (op
))
9802 omp_mark_stores (gimplify_omp_ctxp
, op
);
9806 /* Helper callback for walk_gimple_seq to discover possible stores
9807 to omp_shared_to_firstprivate_optimizable_decl_p decls and set
9808 GOVD_WRITTEN if they are GOVD_SHARED in some outer context
9812 omp_find_stores_stmt (gimple_stmt_iterator
*gsi_p
,
9813 bool *handled_ops_p
,
9814 struct walk_stmt_info
*wi
)
9816 gimple
*stmt
= gsi_stmt (*gsi_p
);
9817 switch (gimple_code (stmt
))
9819 /* Don't recurse on OpenMP constructs for which
9820 gimplify_adjust_omp_clauses already handled the bodies,
9821 except handle gimple_omp_for_pre_body. */
9822 case GIMPLE_OMP_FOR
:
9823 *handled_ops_p
= true;
9824 if (gimple_omp_for_pre_body (stmt
))
9825 walk_gimple_seq (gimple_omp_for_pre_body (stmt
),
9826 omp_find_stores_stmt
, omp_find_stores_op
, wi
);
9828 case GIMPLE_OMP_PARALLEL
:
9829 case GIMPLE_OMP_TASK
:
9830 case GIMPLE_OMP_SECTIONS
:
9831 case GIMPLE_OMP_SINGLE
:
9832 case GIMPLE_OMP_TARGET
:
9833 case GIMPLE_OMP_TEAMS
:
9834 case GIMPLE_OMP_CRITICAL
:
9835 *handled_ops_p
= true;
9843 struct gimplify_adjust_omp_clauses_data
9849 /* For all variables that were not actually used within the context,
9850 remove PRIVATE, SHARED, and FIRSTPRIVATE clauses. */
9853 gimplify_adjust_omp_clauses_1 (splay_tree_node n
, void *data
)
9855 tree
*list_p
= ((struct gimplify_adjust_omp_clauses_data
*) data
)->list_p
;
9857 = ((struct gimplify_adjust_omp_clauses_data
*) data
)->pre_p
;
9858 tree decl
= (tree
) n
->key
;
9859 unsigned flags
= n
->value
;
9860 enum omp_clause_code code
;
9864 if (gimplify_omp_ctxp
->region_type
== ORT_COMBINED_PARALLEL
9865 && (flags
& GOVD_LASTPRIVATE_CONDITIONAL
) != 0)
9866 flags
= GOVD_SHARED
| GOVD_SEEN
| GOVD_WRITTEN
;
9867 if (flags
& (GOVD_EXPLICIT
| GOVD_LOCAL
))
9869 if ((flags
& GOVD_SEEN
) == 0)
9871 if ((flags
& GOVD_MAP_HAS_ATTACHMENTS
) != 0)
9873 if (flags
& GOVD_DEBUG_PRIVATE
)
9875 gcc_assert ((flags
& GOVD_DATA_SHARE_CLASS
) == GOVD_SHARED
);
9876 private_debug
= true;
9878 else if (flags
& GOVD_MAP
)
9879 private_debug
= false;
9882 = lang_hooks
.decls
.omp_private_debug_clause (decl
,
9883 !!(flags
& GOVD_SHARED
));
9885 code
= OMP_CLAUSE_PRIVATE
;
9886 else if (flags
& GOVD_MAP
)
9888 code
= OMP_CLAUSE_MAP
;
9889 if ((gimplify_omp_ctxp
->region_type
& ORT_ACC
) == 0
9890 && TYPE_ATOMIC (strip_array_types (TREE_TYPE (decl
))))
9892 error ("%<_Atomic%> %qD in implicit %<map%> clause", decl
);
9896 && DECL_IN_CONSTANT_POOL (decl
)
9897 && !lookup_attribute ("omp declare target",
9898 DECL_ATTRIBUTES (decl
)))
9900 tree id
= get_identifier ("omp declare target");
9901 DECL_ATTRIBUTES (decl
)
9902 = tree_cons (id
, NULL_TREE
, DECL_ATTRIBUTES (decl
));
9903 varpool_node
*node
= varpool_node::get (decl
);
9906 node
->offloadable
= 1;
9907 if (ENABLE_OFFLOADING
)
9908 g
->have_offload
= true;
9912 else if (flags
& GOVD_SHARED
)
9914 if (is_global_var (decl
))
9916 struct gimplify_omp_ctx
*ctx
= gimplify_omp_ctxp
->outer_context
;
9920 = splay_tree_lookup (ctx
->variables
, (splay_tree_key
) decl
);
9921 if (on
&& (on
->value
& (GOVD_FIRSTPRIVATE
| GOVD_LASTPRIVATE
9922 | GOVD_PRIVATE
| GOVD_REDUCTION
9923 | GOVD_LINEAR
| GOVD_MAP
)) != 0)
9925 ctx
= ctx
->outer_context
;
9930 code
= OMP_CLAUSE_SHARED
;
9932 else if (flags
& GOVD_PRIVATE
)
9933 code
= OMP_CLAUSE_PRIVATE
;
9934 else if (flags
& GOVD_FIRSTPRIVATE
)
9936 code
= OMP_CLAUSE_FIRSTPRIVATE
;
9937 if ((gimplify_omp_ctxp
->region_type
& ORT_TARGET
)
9938 && (gimplify_omp_ctxp
->region_type
& ORT_ACC
) == 0
9939 && TYPE_ATOMIC (strip_array_types (TREE_TYPE (decl
))))
9941 error ("%<_Atomic%> %qD in implicit %<firstprivate%> clause on "
9942 "%<target%> construct", decl
);
9946 else if (flags
& GOVD_LASTPRIVATE
)
9947 code
= OMP_CLAUSE_LASTPRIVATE
;
9948 else if (flags
& (GOVD_ALIGNED
| GOVD_NONTEMPORAL
))
9950 else if (flags
& GOVD_CONDTEMP
)
9952 code
= OMP_CLAUSE__CONDTEMP_
;
9953 gimple_add_tmp_var (decl
);
9958 if (((flags
& GOVD_LASTPRIVATE
)
9959 || (code
== OMP_CLAUSE_SHARED
&& (flags
& GOVD_WRITTEN
)))
9960 && omp_shared_to_firstprivate_optimizable_decl_p (decl
))
9961 omp_mark_stores (gimplify_omp_ctxp
->outer_context
, decl
);
9963 tree chain
= *list_p
;
9964 clause
= build_omp_clause (input_location
, code
);
9965 OMP_CLAUSE_DECL (clause
) = decl
;
9966 OMP_CLAUSE_CHAIN (clause
) = chain
;
9968 OMP_CLAUSE_PRIVATE_DEBUG (clause
) = 1;
9969 else if (code
== OMP_CLAUSE_PRIVATE
&& (flags
& GOVD_PRIVATE_OUTER_REF
))
9970 OMP_CLAUSE_PRIVATE_OUTER_REF (clause
) = 1;
9971 else if (code
== OMP_CLAUSE_SHARED
9972 && (flags
& GOVD_WRITTEN
) == 0
9973 && omp_shared_to_firstprivate_optimizable_decl_p (decl
))
9974 OMP_CLAUSE_SHARED_READONLY (clause
) = 1;
9975 else if (code
== OMP_CLAUSE_FIRSTPRIVATE
&& (flags
& GOVD_EXPLICIT
) == 0)
9976 OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (clause
) = 1;
9977 else if (code
== OMP_CLAUSE_MAP
&& (flags
& GOVD_MAP_0LEN_ARRAY
) != 0)
9979 tree nc
= build_omp_clause (input_location
, OMP_CLAUSE_MAP
);
9980 OMP_CLAUSE_DECL (nc
) = decl
;
9981 if (TREE_CODE (TREE_TYPE (decl
)) == REFERENCE_TYPE
9982 && TREE_CODE (TREE_TYPE (TREE_TYPE (decl
))) == POINTER_TYPE
)
9983 OMP_CLAUSE_DECL (clause
)
9984 = build_simple_mem_ref_loc (input_location
, decl
);
9985 OMP_CLAUSE_DECL (clause
)
9986 = build2 (MEM_REF
, char_type_node
, OMP_CLAUSE_DECL (clause
),
9987 build_int_cst (build_pointer_type (char_type_node
), 0));
9988 OMP_CLAUSE_SIZE (clause
) = size_zero_node
;
9989 OMP_CLAUSE_SIZE (nc
) = size_zero_node
;
9990 OMP_CLAUSE_SET_MAP_KIND (clause
, GOMP_MAP_ALLOC
);
9991 OMP_CLAUSE_MAP_MAYBE_ZERO_LENGTH_ARRAY_SECTION (clause
) = 1;
9992 OMP_CLAUSE_SET_MAP_KIND (nc
, GOMP_MAP_FIRSTPRIVATE_POINTER
);
9993 OMP_CLAUSE_CHAIN (nc
) = chain
;
9994 OMP_CLAUSE_CHAIN (clause
) = nc
;
9995 struct gimplify_omp_ctx
*ctx
= gimplify_omp_ctxp
;
9996 gimplify_omp_ctxp
= ctx
->outer_context
;
9997 gimplify_expr (&TREE_OPERAND (OMP_CLAUSE_DECL (clause
), 0),
9998 pre_p
, NULL
, is_gimple_val
, fb_rvalue
);
9999 gimplify_omp_ctxp
= ctx
;
10001 else if (code
== OMP_CLAUSE_MAP
)
10004 /* Not all combinations of these GOVD_MAP flags are actually valid. */
10005 switch (flags
& (GOVD_MAP_TO_ONLY
10007 | GOVD_MAP_FORCE_PRESENT
10008 | GOVD_MAP_ALLOC_ONLY
10009 | GOVD_MAP_FROM_ONLY
))
10012 kind
= GOMP_MAP_TOFROM
;
10014 case GOVD_MAP_FORCE
:
10015 kind
= GOMP_MAP_TOFROM
| GOMP_MAP_FLAG_FORCE
;
10017 case GOVD_MAP_TO_ONLY
:
10018 kind
= GOMP_MAP_TO
;
10020 case GOVD_MAP_FROM_ONLY
:
10021 kind
= GOMP_MAP_FROM
;
10023 case GOVD_MAP_ALLOC_ONLY
:
10024 kind
= GOMP_MAP_ALLOC
;
10026 case GOVD_MAP_TO_ONLY
| GOVD_MAP_FORCE
:
10027 kind
= GOMP_MAP_TO
| GOMP_MAP_FLAG_FORCE
;
10029 case GOVD_MAP_FORCE_PRESENT
:
10030 kind
= GOMP_MAP_FORCE_PRESENT
;
10033 gcc_unreachable ();
10035 OMP_CLAUSE_SET_MAP_KIND (clause
, kind
);
10036 if (DECL_SIZE (decl
)
10037 && TREE_CODE (DECL_SIZE (decl
)) != INTEGER_CST
)
10039 tree decl2
= DECL_VALUE_EXPR (decl
);
10040 gcc_assert (TREE_CODE (decl2
) == INDIRECT_REF
);
10041 decl2
= TREE_OPERAND (decl2
, 0);
10042 gcc_assert (DECL_P (decl2
));
10043 tree mem
= build_simple_mem_ref (decl2
);
10044 OMP_CLAUSE_DECL (clause
) = mem
;
10045 OMP_CLAUSE_SIZE (clause
) = TYPE_SIZE_UNIT (TREE_TYPE (decl
));
10046 if (gimplify_omp_ctxp
->outer_context
)
10048 struct gimplify_omp_ctx
*ctx
= gimplify_omp_ctxp
->outer_context
;
10049 omp_notice_variable (ctx
, decl2
, true);
10050 omp_notice_variable (ctx
, OMP_CLAUSE_SIZE (clause
), true);
10052 tree nc
= build_omp_clause (OMP_CLAUSE_LOCATION (clause
),
10054 OMP_CLAUSE_DECL (nc
) = decl
;
10055 OMP_CLAUSE_SIZE (nc
) = size_zero_node
;
10056 if (gimplify_omp_ctxp
->target_firstprivatize_array_bases
)
10057 OMP_CLAUSE_SET_MAP_KIND (nc
, GOMP_MAP_FIRSTPRIVATE_POINTER
);
10059 OMP_CLAUSE_SET_MAP_KIND (nc
, GOMP_MAP_POINTER
);
10060 OMP_CLAUSE_CHAIN (nc
) = OMP_CLAUSE_CHAIN (clause
);
10061 OMP_CLAUSE_CHAIN (clause
) = nc
;
10063 else if (gimplify_omp_ctxp
->target_firstprivatize_array_bases
10064 && lang_hooks
.decls
.omp_privatize_by_reference (decl
))
10066 OMP_CLAUSE_DECL (clause
) = build_simple_mem_ref (decl
);
10067 OMP_CLAUSE_SIZE (clause
)
10068 = unshare_expr (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl
))));
10069 struct gimplify_omp_ctx
*ctx
= gimplify_omp_ctxp
;
10070 gimplify_omp_ctxp
= ctx
->outer_context
;
10071 gimplify_expr (&OMP_CLAUSE_SIZE (clause
),
10072 pre_p
, NULL
, is_gimple_val
, fb_rvalue
);
10073 gimplify_omp_ctxp
= ctx
;
10074 tree nc
= build_omp_clause (OMP_CLAUSE_LOCATION (clause
),
10076 OMP_CLAUSE_DECL (nc
) = decl
;
10077 OMP_CLAUSE_SIZE (nc
) = size_zero_node
;
10078 OMP_CLAUSE_SET_MAP_KIND (nc
, GOMP_MAP_FIRSTPRIVATE_REFERENCE
);
10079 OMP_CLAUSE_CHAIN (nc
) = OMP_CLAUSE_CHAIN (clause
);
10080 OMP_CLAUSE_CHAIN (clause
) = nc
;
10083 OMP_CLAUSE_SIZE (clause
) = DECL_SIZE_UNIT (decl
);
10085 if (code
== OMP_CLAUSE_FIRSTPRIVATE
&& (flags
& GOVD_LASTPRIVATE
) != 0)
10087 tree nc
= build_omp_clause (input_location
, OMP_CLAUSE_LASTPRIVATE
);
10088 OMP_CLAUSE_DECL (nc
) = decl
;
10089 OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (nc
) = 1;
10090 OMP_CLAUSE_CHAIN (nc
) = chain
;
10091 OMP_CLAUSE_CHAIN (clause
) = nc
;
10092 struct gimplify_omp_ctx
*ctx
= gimplify_omp_ctxp
;
10093 gimplify_omp_ctxp
= ctx
->outer_context
;
10094 lang_hooks
.decls
.omp_finish_clause (nc
, pre_p
);
10095 gimplify_omp_ctxp
= ctx
;
10098 struct gimplify_omp_ctx
*ctx
= gimplify_omp_ctxp
;
10099 gimplify_omp_ctxp
= ctx
->outer_context
;
10100 lang_hooks
.decls
.omp_finish_clause (clause
, pre_p
);
10101 if (gimplify_omp_ctxp
)
10102 for (; clause
!= chain
; clause
= OMP_CLAUSE_CHAIN (clause
))
10103 if (OMP_CLAUSE_CODE (clause
) == OMP_CLAUSE_MAP
10104 && DECL_P (OMP_CLAUSE_SIZE (clause
)))
10105 omp_notice_variable (gimplify_omp_ctxp
, OMP_CLAUSE_SIZE (clause
),
10107 gimplify_omp_ctxp
= ctx
;
10112 gimplify_adjust_omp_clauses (gimple_seq
*pre_p
, gimple_seq body
, tree
*list_p
,
10113 enum tree_code code
)
10115 struct gimplify_omp_ctx
*ctx
= gimplify_omp_ctxp
;
10116 tree
*orig_list_p
= list_p
;
10118 bool has_inscan_reductions
= false;
10122 struct gimplify_omp_ctx
*octx
;
10123 for (octx
= ctx
; octx
; octx
= octx
->outer_context
)
10124 if ((octx
->region_type
& (ORT_PARALLEL
| ORT_TASK
| ORT_TEAMS
)) != 0)
10128 struct walk_stmt_info wi
;
10129 memset (&wi
, 0, sizeof (wi
));
10130 walk_gimple_seq (body
, omp_find_stores_stmt
,
10131 omp_find_stores_op
, &wi
);
10135 if (ctx
->add_safelen1
)
10137 /* If there are VLAs in the body of simd loop, prevent
10139 gcc_assert (ctx
->region_type
== ORT_SIMD
);
10140 c
= build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE_SAFELEN
);
10141 OMP_CLAUSE_SAFELEN_EXPR (c
) = integer_one_node
;
10142 OMP_CLAUSE_CHAIN (c
) = *list_p
;
10144 list_p
= &OMP_CLAUSE_CHAIN (c
);
10147 if (ctx
->region_type
== ORT_WORKSHARE
10148 && ctx
->outer_context
10149 && ctx
->outer_context
->region_type
== ORT_COMBINED_PARALLEL
)
10151 for (c
= ctx
->outer_context
->clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
10152 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
10153 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c
))
10155 decl
= OMP_CLAUSE_DECL (c
);
10157 = splay_tree_lookup (ctx
->outer_context
->variables
,
10158 (splay_tree_key
) decl
);
10159 gcc_checking_assert (!splay_tree_lookup (ctx
->variables
,
10160 (splay_tree_key
) decl
));
10161 omp_add_variable (ctx
, decl
, n
->value
);
10162 tree c2
= copy_node (c
);
10163 OMP_CLAUSE_CHAIN (c2
) = *list_p
;
10165 if ((n
->value
& GOVD_FIRSTPRIVATE
) == 0)
10167 c2
= build_omp_clause (OMP_CLAUSE_LOCATION (c
),
10168 OMP_CLAUSE_FIRSTPRIVATE
);
10169 OMP_CLAUSE_DECL (c2
) = decl
;
10170 OMP_CLAUSE_CHAIN (c2
) = *list_p
;
10174 while ((c
= *list_p
) != NULL
)
10177 bool remove
= false;
10179 switch (OMP_CLAUSE_CODE (c
))
10181 case OMP_CLAUSE_FIRSTPRIVATE
:
10182 if ((ctx
->region_type
& ORT_TARGET
)
10183 && (ctx
->region_type
& ORT_ACC
) == 0
10184 && TYPE_ATOMIC (strip_array_types
10185 (TREE_TYPE (OMP_CLAUSE_DECL (c
)))))
10187 error_at (OMP_CLAUSE_LOCATION (c
),
10188 "%<_Atomic%> %qD in %<firstprivate%> clause on "
10189 "%<target%> construct", OMP_CLAUSE_DECL (c
));
10194 case OMP_CLAUSE_PRIVATE
:
10195 case OMP_CLAUSE_SHARED
:
10196 case OMP_CLAUSE_LINEAR
:
10197 decl
= OMP_CLAUSE_DECL (c
);
10198 n
= splay_tree_lookup (ctx
->variables
, (splay_tree_key
) decl
);
10199 remove
= !(n
->value
& GOVD_SEEN
);
10200 if ((n
->value
& GOVD_LASTPRIVATE_CONDITIONAL
) != 0
10201 && code
== OMP_PARALLEL
10202 && OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_FIRSTPRIVATE
)
10206 bool shared
= OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_SHARED
;
10207 if ((n
->value
& GOVD_DEBUG_PRIVATE
)
10208 || lang_hooks
.decls
.omp_private_debug_clause (decl
, shared
))
10210 gcc_assert ((n
->value
& GOVD_DEBUG_PRIVATE
) == 0
10211 || ((n
->value
& GOVD_DATA_SHARE_CLASS
)
10213 OMP_CLAUSE_SET_CODE (c
, OMP_CLAUSE_PRIVATE
);
10214 OMP_CLAUSE_PRIVATE_DEBUG (c
) = 1;
10216 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_SHARED
10217 && (n
->value
& GOVD_WRITTEN
) == 0
10219 && omp_shared_to_firstprivate_optimizable_decl_p (decl
))
10220 OMP_CLAUSE_SHARED_READONLY (c
) = 1;
10221 else if (DECL_P (decl
)
10222 && ((OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_SHARED
10223 && (n
->value
& GOVD_WRITTEN
) != 0)
10224 || (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LINEAR
10225 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c
)))
10226 && omp_shared_to_firstprivate_optimizable_decl_p (decl
))
10227 omp_mark_stores (gimplify_omp_ctxp
->outer_context
, decl
);
10231 case OMP_CLAUSE_LASTPRIVATE
:
10232 /* Make sure OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE is set to
10233 accurately reflect the presence of a FIRSTPRIVATE clause. */
10234 decl
= OMP_CLAUSE_DECL (c
);
10235 n
= splay_tree_lookup (ctx
->variables
, (splay_tree_key
) decl
);
10236 OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c
)
10237 = (n
->value
& GOVD_FIRSTPRIVATE
) != 0;
10238 if (code
== OMP_DISTRIBUTE
10239 && OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c
))
10242 error_at (OMP_CLAUSE_LOCATION (c
),
10243 "same variable used in %<firstprivate%> and "
10244 "%<lastprivate%> clauses on %<distribute%> "
10248 && OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
10250 && omp_shared_to_firstprivate_optimizable_decl_p (decl
))
10251 omp_mark_stores (gimplify_omp_ctxp
->outer_context
, decl
);
10252 if (OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c
) && code
== OMP_PARALLEL
)
10256 case OMP_CLAUSE_ALIGNED
:
10257 decl
= OMP_CLAUSE_DECL (c
);
10258 if (!is_global_var (decl
))
10260 n
= splay_tree_lookup (ctx
->variables
, (splay_tree_key
) decl
);
10261 remove
= n
== NULL
|| !(n
->value
& GOVD_SEEN
);
10262 if (!remove
&& TREE_CODE (TREE_TYPE (decl
)) == POINTER_TYPE
)
10264 struct gimplify_omp_ctx
*octx
;
10266 && (n
->value
& (GOVD_DATA_SHARE_CLASS
10267 & ~GOVD_FIRSTPRIVATE
)))
10270 for (octx
= ctx
->outer_context
; octx
;
10271 octx
= octx
->outer_context
)
10273 n
= splay_tree_lookup (octx
->variables
,
10274 (splay_tree_key
) decl
);
10277 if (n
->value
& GOVD_LOCAL
)
10279 /* We have to avoid assigning a shared variable
10280 to itself when trying to add
10281 __builtin_assume_aligned. */
10282 if (n
->value
& GOVD_SHARED
)
10290 else if (TREE_CODE (TREE_TYPE (decl
)) == ARRAY_TYPE
)
10292 n
= splay_tree_lookup (ctx
->variables
, (splay_tree_key
) decl
);
10293 if (n
!= NULL
&& (n
->value
& GOVD_DATA_SHARE_CLASS
) != 0)
10298 case OMP_CLAUSE_NONTEMPORAL
:
10299 decl
= OMP_CLAUSE_DECL (c
);
10300 n
= splay_tree_lookup (ctx
->variables
, (splay_tree_key
) decl
);
10301 remove
= n
== NULL
|| !(n
->value
& GOVD_SEEN
);
10304 case OMP_CLAUSE_MAP
:
10305 if (code
== OMP_TARGET_EXIT_DATA
10306 && OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_ALWAYS_POINTER
)
10311 decl
= OMP_CLAUSE_DECL (c
);
10312 /* Data clauses associated with reductions must be
10313 compatible with present_or_copy. Warn and adjust the clause
10314 if that is not the case. */
10315 if (ctx
->region_type
== ORT_ACC_PARALLEL
10316 || ctx
->region_type
== ORT_ACC_SERIAL
)
10318 tree t
= DECL_P (decl
) ? decl
: TREE_OPERAND (decl
, 0);
10322 n
= splay_tree_lookup (ctx
->variables
, (splay_tree_key
) t
);
10324 if (n
&& (n
->value
& GOVD_REDUCTION
))
10326 enum gomp_map_kind kind
= OMP_CLAUSE_MAP_KIND (c
);
10328 OMP_CLAUSE_MAP_IN_REDUCTION (c
) = 1;
10329 if ((kind
& GOMP_MAP_TOFROM
) != GOMP_MAP_TOFROM
10330 && kind
!= GOMP_MAP_FORCE_PRESENT
10331 && kind
!= GOMP_MAP_POINTER
)
10333 warning_at (OMP_CLAUSE_LOCATION (c
), 0,
10334 "incompatible data clause with reduction "
10335 "on %qE; promoting to %<present_or_copy%>",
10337 OMP_CLAUSE_SET_MAP_KIND (c
, GOMP_MAP_TOFROM
);
10341 if (!DECL_P (decl
))
10343 if ((ctx
->region_type
& ORT_TARGET
) != 0
10344 && OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_FIRSTPRIVATE_POINTER
)
10346 if (TREE_CODE (decl
) == INDIRECT_REF
10347 && TREE_CODE (TREE_OPERAND (decl
, 0)) == COMPONENT_REF
10348 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (decl
, 0)))
10349 == REFERENCE_TYPE
))
10350 decl
= TREE_OPERAND (decl
, 0);
10351 if (TREE_CODE (decl
) == COMPONENT_REF
)
10353 while (TREE_CODE (decl
) == COMPONENT_REF
)
10354 decl
= TREE_OPERAND (decl
, 0);
10357 n
= splay_tree_lookup (ctx
->variables
,
10358 (splay_tree_key
) decl
);
10359 if (!(n
->value
& GOVD_SEEN
))
10366 n
= splay_tree_lookup (ctx
->variables
, (splay_tree_key
) decl
);
10367 if ((ctx
->region_type
& ORT_TARGET
) != 0
10368 && !(n
->value
& GOVD_SEEN
)
10369 && GOMP_MAP_ALWAYS_P (OMP_CLAUSE_MAP_KIND (c
)) == 0
10370 && (!is_global_var (decl
)
10371 || !lookup_attribute ("omp declare target link",
10372 DECL_ATTRIBUTES (decl
))))
10375 /* For struct element mapping, if struct is never referenced
10376 in target block and none of the mapping has always modifier,
10377 remove all the struct element mappings, which immediately
10378 follow the GOMP_MAP_STRUCT map clause. */
10379 if (OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_STRUCT
)
10381 HOST_WIDE_INT cnt
= tree_to_shwi (OMP_CLAUSE_SIZE (c
));
10383 OMP_CLAUSE_CHAIN (c
)
10384 = OMP_CLAUSE_CHAIN (OMP_CLAUSE_CHAIN (c
));
10387 else if (OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_STRUCT
10388 && code
== OMP_TARGET_EXIT_DATA
)
10390 else if (DECL_SIZE (decl
)
10391 && TREE_CODE (DECL_SIZE (decl
)) != INTEGER_CST
10392 && OMP_CLAUSE_MAP_KIND (c
) != GOMP_MAP_POINTER
10393 && OMP_CLAUSE_MAP_KIND (c
) != GOMP_MAP_FIRSTPRIVATE_POINTER
10394 && (OMP_CLAUSE_MAP_KIND (c
)
10395 != GOMP_MAP_FIRSTPRIVATE_REFERENCE
))
10397 /* For GOMP_MAP_FORCE_DEVICEPTR, we'll never enter here, because
10398 for these, TREE_CODE (DECL_SIZE (decl)) will always be
10400 gcc_assert (OMP_CLAUSE_MAP_KIND (c
) != GOMP_MAP_FORCE_DEVICEPTR
);
10402 tree decl2
= DECL_VALUE_EXPR (decl
);
10403 gcc_assert (TREE_CODE (decl2
) == INDIRECT_REF
);
10404 decl2
= TREE_OPERAND (decl2
, 0);
10405 gcc_assert (DECL_P (decl2
));
10406 tree mem
= build_simple_mem_ref (decl2
);
10407 OMP_CLAUSE_DECL (c
) = mem
;
10408 OMP_CLAUSE_SIZE (c
) = TYPE_SIZE_UNIT (TREE_TYPE (decl
));
10409 if (ctx
->outer_context
)
10411 omp_notice_variable (ctx
->outer_context
, decl2
, true);
10412 omp_notice_variable (ctx
->outer_context
,
10413 OMP_CLAUSE_SIZE (c
), true);
10415 if (((ctx
->region_type
& ORT_TARGET
) != 0
10416 || !ctx
->target_firstprivatize_array_bases
)
10417 && ((n
->value
& GOVD_SEEN
) == 0
10418 || (n
->value
& (GOVD_PRIVATE
| GOVD_FIRSTPRIVATE
)) == 0))
10420 tree nc
= build_omp_clause (OMP_CLAUSE_LOCATION (c
),
10422 OMP_CLAUSE_DECL (nc
) = decl
;
10423 OMP_CLAUSE_SIZE (nc
) = size_zero_node
;
10424 if (ctx
->target_firstprivatize_array_bases
)
10425 OMP_CLAUSE_SET_MAP_KIND (nc
,
10426 GOMP_MAP_FIRSTPRIVATE_POINTER
);
10428 OMP_CLAUSE_SET_MAP_KIND (nc
, GOMP_MAP_POINTER
);
10429 OMP_CLAUSE_CHAIN (nc
) = OMP_CLAUSE_CHAIN (c
);
10430 OMP_CLAUSE_CHAIN (c
) = nc
;
10436 if (OMP_CLAUSE_SIZE (c
) == NULL_TREE
)
10437 OMP_CLAUSE_SIZE (c
) = DECL_SIZE_UNIT (decl
);
10438 gcc_assert ((n
->value
& GOVD_SEEN
) == 0
10439 || ((n
->value
& (GOVD_PRIVATE
| GOVD_FIRSTPRIVATE
))
10444 case OMP_CLAUSE_TO
:
10445 case OMP_CLAUSE_FROM
:
10446 case OMP_CLAUSE__CACHE_
:
10447 decl
= OMP_CLAUSE_DECL (c
);
10448 if (!DECL_P (decl
))
10450 if (DECL_SIZE (decl
)
10451 && TREE_CODE (DECL_SIZE (decl
)) != INTEGER_CST
)
10453 tree decl2
= DECL_VALUE_EXPR (decl
);
10454 gcc_assert (TREE_CODE (decl2
) == INDIRECT_REF
);
10455 decl2
= TREE_OPERAND (decl2
, 0);
10456 gcc_assert (DECL_P (decl2
));
10457 tree mem
= build_simple_mem_ref (decl2
);
10458 OMP_CLAUSE_DECL (c
) = mem
;
10459 OMP_CLAUSE_SIZE (c
) = TYPE_SIZE_UNIT (TREE_TYPE (decl
));
10460 if (ctx
->outer_context
)
10462 omp_notice_variable (ctx
->outer_context
, decl2
, true);
10463 omp_notice_variable (ctx
->outer_context
,
10464 OMP_CLAUSE_SIZE (c
), true);
10467 else if (OMP_CLAUSE_SIZE (c
) == NULL_TREE
)
10468 OMP_CLAUSE_SIZE (c
) = DECL_SIZE_UNIT (decl
);
10471 case OMP_CLAUSE_REDUCTION
:
10472 if (OMP_CLAUSE_REDUCTION_INSCAN (c
))
10474 decl
= OMP_CLAUSE_DECL (c
);
10475 n
= splay_tree_lookup (ctx
->variables
, (splay_tree_key
) decl
);
10476 if ((n
->value
& GOVD_REDUCTION_INSCAN
) == 0)
10479 error_at (OMP_CLAUSE_LOCATION (c
),
10480 "%qD specified in %<inscan%> %<reduction%> clause "
10481 "but not in %<scan%> directive clause", decl
);
10484 has_inscan_reductions
= true;
10487 case OMP_CLAUSE_IN_REDUCTION
:
10488 case OMP_CLAUSE_TASK_REDUCTION
:
10489 decl
= OMP_CLAUSE_DECL (c
);
10490 /* OpenACC reductions need a present_or_copy data clause.
10491 Add one if necessary. Emit error when the reduction is private. */
10492 if (ctx
->region_type
== ORT_ACC_PARALLEL
10493 || ctx
->region_type
== ORT_ACC_SERIAL
)
10495 n
= splay_tree_lookup (ctx
->variables
, (splay_tree_key
) decl
);
10496 if (n
->value
& (GOVD_PRIVATE
| GOVD_FIRSTPRIVATE
))
10499 error_at (OMP_CLAUSE_LOCATION (c
), "invalid private "
10500 "reduction on %qE", DECL_NAME (decl
));
10502 else if ((n
->value
& GOVD_MAP
) == 0)
10504 tree next
= OMP_CLAUSE_CHAIN (c
);
10505 tree nc
= build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE_MAP
);
10506 OMP_CLAUSE_SET_MAP_KIND (nc
, GOMP_MAP_TOFROM
);
10507 OMP_CLAUSE_DECL (nc
) = decl
;
10508 OMP_CLAUSE_CHAIN (c
) = nc
;
10509 lang_hooks
.decls
.omp_finish_clause (nc
, pre_p
);
10512 OMP_CLAUSE_MAP_IN_REDUCTION (nc
) = 1;
10513 if (OMP_CLAUSE_CHAIN (nc
) == NULL
)
10515 nc
= OMP_CLAUSE_CHAIN (nc
);
10517 OMP_CLAUSE_CHAIN (nc
) = next
;
10518 n
->value
|= GOVD_MAP
;
10522 && omp_shared_to_firstprivate_optimizable_decl_p (decl
))
10523 omp_mark_stores (gimplify_omp_ctxp
->outer_context
, decl
);
10525 case OMP_CLAUSE_COPYIN
:
10526 case OMP_CLAUSE_COPYPRIVATE
:
10527 case OMP_CLAUSE_IF
:
10528 case OMP_CLAUSE_NUM_THREADS
:
10529 case OMP_CLAUSE_NUM_TEAMS
:
10530 case OMP_CLAUSE_THREAD_LIMIT
:
10531 case OMP_CLAUSE_DIST_SCHEDULE
:
10532 case OMP_CLAUSE_DEVICE
:
10533 case OMP_CLAUSE_SCHEDULE
:
10534 case OMP_CLAUSE_NOWAIT
:
10535 case OMP_CLAUSE_ORDERED
:
10536 case OMP_CLAUSE_DEFAULT
:
10537 case OMP_CLAUSE_UNTIED
:
10538 case OMP_CLAUSE_COLLAPSE
:
10539 case OMP_CLAUSE_FINAL
:
10540 case OMP_CLAUSE_MERGEABLE
:
10541 case OMP_CLAUSE_PROC_BIND
:
10542 case OMP_CLAUSE_SAFELEN
:
10543 case OMP_CLAUSE_SIMDLEN
:
10544 case OMP_CLAUSE_DEPEND
:
10545 case OMP_CLAUSE_PRIORITY
:
10546 case OMP_CLAUSE_GRAINSIZE
:
10547 case OMP_CLAUSE_NUM_TASKS
:
10548 case OMP_CLAUSE_NOGROUP
:
10549 case OMP_CLAUSE_THREADS
:
10550 case OMP_CLAUSE_SIMD
:
10551 case OMP_CLAUSE_HINT
:
10552 case OMP_CLAUSE_DEFAULTMAP
:
10553 case OMP_CLAUSE_ORDER
:
10554 case OMP_CLAUSE_BIND
:
10555 case OMP_CLAUSE_USE_DEVICE_PTR
:
10556 case OMP_CLAUSE_USE_DEVICE_ADDR
:
10557 case OMP_CLAUSE_IS_DEVICE_PTR
:
10558 case OMP_CLAUSE_ASYNC
:
10559 case OMP_CLAUSE_WAIT
:
10560 case OMP_CLAUSE_INDEPENDENT
:
10561 case OMP_CLAUSE_NUM_GANGS
:
10562 case OMP_CLAUSE_NUM_WORKERS
:
10563 case OMP_CLAUSE_VECTOR_LENGTH
:
10564 case OMP_CLAUSE_GANG
:
10565 case OMP_CLAUSE_WORKER
:
10566 case OMP_CLAUSE_VECTOR
:
10567 case OMP_CLAUSE_AUTO
:
10568 case OMP_CLAUSE_SEQ
:
10569 case OMP_CLAUSE_TILE
:
10570 case OMP_CLAUSE_IF_PRESENT
:
10571 case OMP_CLAUSE_FINALIZE
:
10572 case OMP_CLAUSE_INCLUSIVE
:
10573 case OMP_CLAUSE_EXCLUSIVE
:
10577 gcc_unreachable ();
10581 *list_p
= OMP_CLAUSE_CHAIN (c
);
10583 list_p
= &OMP_CLAUSE_CHAIN (c
);
10586 /* Add in any implicit data sharing. */
10587 struct gimplify_adjust_omp_clauses_data data
;
10588 data
.list_p
= list_p
;
10589 data
.pre_p
= pre_p
;
10590 splay_tree_foreach (ctx
->variables
, gimplify_adjust_omp_clauses_1
, &data
);
10592 if (has_inscan_reductions
)
10593 for (c
= *orig_list_p
; c
; c
= OMP_CLAUSE_CHAIN (c
))
10594 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LINEAR
10595 && !OMP_CLAUSE_LINEAR_NO_COPYIN (c
))
10597 error_at (OMP_CLAUSE_LOCATION (c
),
10598 "%<inscan%> %<reduction%> clause used together with "
10599 "%<linear%> clause for a variable other than loop "
10604 gimplify_omp_ctxp
= ctx
->outer_context
;
10605 delete_omp_context (ctx
);
10608 /* Return 0 if CONSTRUCTS selectors don't match the OpenMP context,
10609 -1 if unknown yet (simd is involved, won't be known until vectorization)
10610 and 1 if they do. If SCORES is non-NULL, it should point to an array
10611 of at least 2*NCONSTRUCTS+2 ints, and will be filled with the positions
10612 of the CONSTRUCTS (position -1 if it will never match) followed by
10613 number of constructs in the OpenMP context construct trait. If the
10614 score depends on whether it will be in a declare simd clone or not,
10615 the function returns 2 and there will be two sets of the scores, the first
10616 one for the case that it is not in a declare simd clone, the other
10617 that it is in a declare simd clone. */
10620 omp_construct_selector_matches (enum tree_code
*constructs
, int nconstructs
,
10623 int matched
= 0, cnt
= 0;
10624 bool simd_seen
= false;
10625 bool target_seen
= false;
10626 int declare_simd_cnt
= -1;
10627 auto_vec
<enum tree_code
, 16> codes
;
10628 for (struct gimplify_omp_ctx
*ctx
= gimplify_omp_ctxp
; ctx
;)
10630 if (((ctx
->region_type
& ORT_PARALLEL
) && ctx
->code
== OMP_PARALLEL
)
10631 || ((ctx
->region_type
& (ORT_TARGET
| ORT_IMPLICIT_TARGET
| ORT_ACC
))
10632 == ORT_TARGET
&& ctx
->code
== OMP_TARGET
)
10633 || ((ctx
->region_type
& ORT_TEAMS
) && ctx
->code
== OMP_TEAMS
)
10634 || (ctx
->region_type
== ORT_WORKSHARE
&& ctx
->code
== OMP_FOR
)
10635 || (ctx
->region_type
== ORT_SIMD
10636 && ctx
->code
== OMP_SIMD
10637 && !omp_find_clause (ctx
->clauses
, OMP_CLAUSE_BIND
)))
10641 codes
.safe_push (ctx
->code
);
10642 else if (matched
< nconstructs
&& ctx
->code
== constructs
[matched
])
10644 if (ctx
->code
== OMP_SIMD
)
10652 if (ctx
->code
== OMP_TARGET
)
10654 if (scores
== NULL
)
10655 return matched
< nconstructs
? 0 : simd_seen
? -1 : 1;
10656 target_seen
= true;
10660 else if (ctx
->region_type
== ORT_WORKSHARE
10661 && ctx
->code
== OMP_LOOP
10662 && ctx
->outer_context
10663 && ctx
->outer_context
->region_type
== ORT_COMBINED_PARALLEL
10664 && ctx
->outer_context
->outer_context
10665 && ctx
->outer_context
->outer_context
->code
== OMP_LOOP
10666 && ctx
->outer_context
->outer_context
->distribute
)
10667 ctx
= ctx
->outer_context
->outer_context
;
10668 ctx
= ctx
->outer_context
;
10671 && lookup_attribute ("omp declare simd",
10672 DECL_ATTRIBUTES (current_function_decl
)))
10674 /* Declare simd is a maybe case, it is supposed to be added only to the
10675 omp-simd-clone.c added clones and not to the base function. */
10676 declare_simd_cnt
= cnt
++;
10678 codes
.safe_push (OMP_SIMD
);
10680 && constructs
[0] == OMP_SIMD
)
10682 gcc_assert (matched
== 0);
10684 if (++matched
== nconstructs
)
10688 if (tree attr
= lookup_attribute ("omp declare variant variant",
10689 DECL_ATTRIBUTES (current_function_decl
)))
10691 enum tree_code variant_constructs
[5];
10692 int variant_nconstructs
= 0;
10694 variant_nconstructs
10695 = omp_constructor_traits_to_codes (TREE_VALUE (attr
),
10696 variant_constructs
);
10697 for (int i
= 0; i
< variant_nconstructs
; i
++)
10701 codes
.safe_push (variant_constructs
[i
]);
10702 else if (matched
< nconstructs
10703 && variant_constructs
[i
] == constructs
[matched
])
10705 if (variant_constructs
[i
] == OMP_SIMD
)
10716 && lookup_attribute ("omp declare target block",
10717 DECL_ATTRIBUTES (current_function_decl
)))
10720 codes
.safe_push (OMP_TARGET
);
10721 else if (matched
< nconstructs
&& constructs
[matched
] == OMP_TARGET
)
10726 for (int pass
= 0; pass
< (declare_simd_cnt
== -1 ? 1 : 2); pass
++)
10728 int j
= codes
.length () - 1;
10729 for (int i
= nconstructs
- 1; i
>= 0; i
--)
10732 && (pass
!= 0 || declare_simd_cnt
!= j
)
10733 && constructs
[i
] != codes
[j
])
10735 if (pass
== 0 && declare_simd_cnt
!= -1 && j
> declare_simd_cnt
)
10740 *scores
++ = ((pass
== 0 && declare_simd_cnt
!= -1)
10741 ? codes
.length () - 1 : codes
.length ());
10743 return declare_simd_cnt
== -1 ? 1 : 2;
10745 if (matched
== nconstructs
)
10746 return simd_seen
? -1 : 1;
10750 /* Gimplify OACC_CACHE. */
10753 gimplify_oacc_cache (tree
*expr_p
, gimple_seq
*pre_p
)
10755 tree expr
= *expr_p
;
10757 gimplify_scan_omp_clauses (&OACC_CACHE_CLAUSES (expr
), pre_p
, ORT_ACC
,
10759 gimplify_adjust_omp_clauses (pre_p
, NULL
, &OACC_CACHE_CLAUSES (expr
),
10762 /* TODO: Do something sensible with this information. */
10764 *expr_p
= NULL_TREE
;
10767 /* Helper function of gimplify_oacc_declare. The helper's purpose is to,
10768 if required, translate 'kind' in CLAUSE into an 'entry' kind and 'exit'
10769 kind. The entry kind will replace the one in CLAUSE, while the exit
10770 kind will be used in a new omp_clause and returned to the caller. */
10773 gimplify_oacc_declare_1 (tree clause
)
10775 HOST_WIDE_INT kind
, new_op
;
10779 kind
= OMP_CLAUSE_MAP_KIND (clause
);
10783 case GOMP_MAP_ALLOC
:
10784 new_op
= GOMP_MAP_RELEASE
;
10788 case GOMP_MAP_FROM
:
10789 OMP_CLAUSE_SET_MAP_KIND (clause
, GOMP_MAP_FORCE_ALLOC
);
10790 new_op
= GOMP_MAP_FROM
;
10794 case GOMP_MAP_TOFROM
:
10795 OMP_CLAUSE_SET_MAP_KIND (clause
, GOMP_MAP_TO
);
10796 new_op
= GOMP_MAP_FROM
;
10800 case GOMP_MAP_DEVICE_RESIDENT
:
10801 case GOMP_MAP_FORCE_DEVICEPTR
:
10802 case GOMP_MAP_FORCE_PRESENT
:
10803 case GOMP_MAP_LINK
:
10804 case GOMP_MAP_POINTER
:
10809 gcc_unreachable ();
10815 c
= build_omp_clause (OMP_CLAUSE_LOCATION (clause
), OMP_CLAUSE_MAP
);
10816 OMP_CLAUSE_SET_MAP_KIND (c
, new_op
);
10817 OMP_CLAUSE_DECL (c
) = OMP_CLAUSE_DECL (clause
);
10823 /* Gimplify OACC_DECLARE. */
10826 gimplify_oacc_declare (tree
*expr_p
, gimple_seq
*pre_p
)
10828 tree expr
= *expr_p
;
10830 tree clauses
, t
, decl
;
10832 clauses
= OACC_DECLARE_CLAUSES (expr
);
10834 gimplify_scan_omp_clauses (&clauses
, pre_p
, ORT_TARGET_DATA
, OACC_DECLARE
);
10835 gimplify_adjust_omp_clauses (pre_p
, NULL
, &clauses
, OACC_DECLARE
);
10837 for (t
= clauses
; t
; t
= OMP_CLAUSE_CHAIN (t
))
10839 decl
= OMP_CLAUSE_DECL (t
);
10841 if (TREE_CODE (decl
) == MEM_REF
)
10842 decl
= TREE_OPERAND (decl
, 0);
10844 if (VAR_P (decl
) && !is_oacc_declared (decl
))
10846 tree attr
= get_identifier ("oacc declare target");
10847 DECL_ATTRIBUTES (decl
) = tree_cons (attr
, NULL_TREE
,
10848 DECL_ATTRIBUTES (decl
));
10852 && !is_global_var (decl
)
10853 && DECL_CONTEXT (decl
) == current_function_decl
)
10855 tree c
= gimplify_oacc_declare_1 (t
);
10858 if (oacc_declare_returns
== NULL
)
10859 oacc_declare_returns
= new hash_map
<tree
, tree
>;
10861 oacc_declare_returns
->put (decl
, c
);
10865 if (gimplify_omp_ctxp
)
10866 omp_add_variable (gimplify_omp_ctxp
, decl
, GOVD_SEEN
);
10869 stmt
= gimple_build_omp_target (NULL
, GF_OMP_TARGET_KIND_OACC_DECLARE
,
10872 gimplify_seq_add_stmt (pre_p
, stmt
);
10874 *expr_p
= NULL_TREE
;
10877 /* Gimplify the contents of an OMP_PARALLEL statement. This involves
10878 gimplification of the body, as well as scanning the body for used
10879 variables. We need to do this scan now, because variable-sized
10880 decls will be decomposed during gimplification. */
10883 gimplify_omp_parallel (tree
*expr_p
, gimple_seq
*pre_p
)
10885 tree expr
= *expr_p
;
10887 gimple_seq body
= NULL
;
10889 gimplify_scan_omp_clauses (&OMP_PARALLEL_CLAUSES (expr
), pre_p
,
10890 OMP_PARALLEL_COMBINED (expr
)
10891 ? ORT_COMBINED_PARALLEL
10892 : ORT_PARALLEL
, OMP_PARALLEL
);
10894 push_gimplify_context ();
10896 g
= gimplify_and_return_first (OMP_PARALLEL_BODY (expr
), &body
);
10897 if (gimple_code (g
) == GIMPLE_BIND
)
10898 pop_gimplify_context (g
);
10900 pop_gimplify_context (NULL
);
10902 gimplify_adjust_omp_clauses (pre_p
, body
, &OMP_PARALLEL_CLAUSES (expr
),
10905 g
= gimple_build_omp_parallel (body
,
10906 OMP_PARALLEL_CLAUSES (expr
),
10907 NULL_TREE
, NULL_TREE
);
10908 if (OMP_PARALLEL_COMBINED (expr
))
10909 gimple_omp_set_subcode (g
, GF_OMP_PARALLEL_COMBINED
);
10910 gimplify_seq_add_stmt (pre_p
, g
);
10911 *expr_p
= NULL_TREE
;
10914 /* Gimplify the contents of an OMP_TASK statement. This involves
10915 gimplification of the body, as well as scanning the body for used
10916 variables. We need to do this scan now, because variable-sized
10917 decls will be decomposed during gimplification. */
10920 gimplify_omp_task (tree
*expr_p
, gimple_seq
*pre_p
)
10922 tree expr
= *expr_p
;
10924 gimple_seq body
= NULL
;
10926 if (OMP_TASK_BODY (expr
) == NULL_TREE
)
10927 for (tree c
= OMP_TASK_CLAUSES (expr
); c
; c
= OMP_CLAUSE_CHAIN (c
))
10928 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_DEPEND
10929 && OMP_CLAUSE_DEPEND_KIND (c
) == OMP_CLAUSE_DEPEND_MUTEXINOUTSET
)
10931 error_at (OMP_CLAUSE_LOCATION (c
),
10932 "%<mutexinoutset%> kind in %<depend%> clause on a "
10933 "%<taskwait%> construct");
10937 gimplify_scan_omp_clauses (&OMP_TASK_CLAUSES (expr
), pre_p
,
10938 omp_find_clause (OMP_TASK_CLAUSES (expr
),
10940 ? ORT_UNTIED_TASK
: ORT_TASK
, OMP_TASK
);
10942 if (OMP_TASK_BODY (expr
))
10944 push_gimplify_context ();
10946 g
= gimplify_and_return_first (OMP_TASK_BODY (expr
), &body
);
10947 if (gimple_code (g
) == GIMPLE_BIND
)
10948 pop_gimplify_context (g
);
10950 pop_gimplify_context (NULL
);
10953 gimplify_adjust_omp_clauses (pre_p
, body
, &OMP_TASK_CLAUSES (expr
),
10956 g
= gimple_build_omp_task (body
,
10957 OMP_TASK_CLAUSES (expr
),
10958 NULL_TREE
, NULL_TREE
,
10959 NULL_TREE
, NULL_TREE
, NULL_TREE
);
10960 if (OMP_TASK_BODY (expr
) == NULL_TREE
)
10961 gimple_omp_task_set_taskwait_p (g
, true);
10962 gimplify_seq_add_stmt (pre_p
, g
);
10963 *expr_p
= NULL_TREE
;
10966 /* Gimplify the gross structure of an OMP_FOR statement. */
10968 static enum gimplify_status
10969 gimplify_omp_for (tree
*expr_p
, gimple_seq
*pre_p
)
10971 tree for_stmt
, orig_for_stmt
, inner_for_stmt
= NULL_TREE
, decl
, var
, t
;
10972 enum gimplify_status ret
= GS_ALL_DONE
;
10973 enum gimplify_status tret
;
10975 gimple_seq for_body
, for_pre_body
;
10977 bitmap has_decl_expr
= NULL
;
10978 enum omp_region_type ort
= ORT_WORKSHARE
;
10980 orig_for_stmt
= for_stmt
= *expr_p
;
10982 bool loop_p
= (omp_find_clause (OMP_FOR_CLAUSES (for_stmt
), OMP_CLAUSE_BIND
)
10984 if (OMP_FOR_INIT (for_stmt
) == NULL_TREE
)
10986 tree
*data
[4] = { NULL
, NULL
, NULL
, NULL
};
10987 gcc_assert (TREE_CODE (for_stmt
) != OACC_LOOP
);
10988 inner_for_stmt
= walk_tree (&OMP_FOR_BODY (for_stmt
),
10989 find_combined_omp_for
, data
, NULL
);
10990 if (inner_for_stmt
== NULL_TREE
)
10992 gcc_assert (seen_error ());
10993 *expr_p
= NULL_TREE
;
10996 if (data
[2] && OMP_FOR_PRE_BODY (*data
[2]))
10998 append_to_statement_list_force (OMP_FOR_PRE_BODY (*data
[2]),
10999 &OMP_FOR_PRE_BODY (for_stmt
));
11000 OMP_FOR_PRE_BODY (*data
[2]) = NULL_TREE
;
11002 if (OMP_FOR_PRE_BODY (inner_for_stmt
))
11004 append_to_statement_list_force (OMP_FOR_PRE_BODY (inner_for_stmt
),
11005 &OMP_FOR_PRE_BODY (for_stmt
));
11006 OMP_FOR_PRE_BODY (inner_for_stmt
) = NULL_TREE
;
11011 /* We have some statements or variable declarations in between
11012 the composite construct directives. Move them around the
11015 for (i
= 0; i
< 3; i
++)
11019 if (i
< 2 && data
[i
+ 1] == &OMP_BODY (t
))
11020 data
[i
+ 1] = data
[i
];
11021 *data
[i
] = OMP_BODY (t
);
11022 tree body
= build3 (BIND_EXPR
, void_type_node
, NULL_TREE
,
11023 NULL_TREE
, make_node (BLOCK
));
11024 OMP_BODY (t
) = body
;
11025 append_to_statement_list_force (inner_for_stmt
,
11026 &BIND_EXPR_BODY (body
));
11028 data
[3] = tsi_stmt_ptr (tsi_start (BIND_EXPR_BODY (body
)));
11029 gcc_assert (*data
[3] == inner_for_stmt
);
11034 for (i
= 0; i
< TREE_VEC_LENGTH (OMP_FOR_INIT (inner_for_stmt
)); i
++)
11036 && OMP_FOR_ORIG_DECLS (inner_for_stmt
)
11037 && TREE_CODE (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (inner_for_stmt
),
11039 && TREE_PURPOSE (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (inner_for_stmt
),
11042 tree orig
= TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (inner_for_stmt
), i
);
11043 /* Class iterators aren't allowed on OMP_SIMD, so the only
11044 case we need to solve is distribute parallel for. They are
11045 allowed on the loop construct, but that is already handled
11046 in gimplify_omp_loop. */
11047 gcc_assert (TREE_CODE (inner_for_stmt
) == OMP_FOR
11048 && TREE_CODE (for_stmt
) == OMP_DISTRIBUTE
11050 tree orig_decl
= TREE_PURPOSE (orig
);
11051 tree last
= TREE_VALUE (orig
);
11053 for (pc
= &OMP_FOR_CLAUSES (inner_for_stmt
);
11054 *pc
; pc
= &OMP_CLAUSE_CHAIN (*pc
))
11055 if ((OMP_CLAUSE_CODE (*pc
) == OMP_CLAUSE_PRIVATE
11056 || OMP_CLAUSE_CODE (*pc
) == OMP_CLAUSE_LASTPRIVATE
)
11057 && OMP_CLAUSE_DECL (*pc
) == orig_decl
)
11059 if (*pc
== NULL_TREE
)
11062 for (spc
= &OMP_PARALLEL_CLAUSES (*data
[1]);
11063 *spc
; spc
= &OMP_CLAUSE_CHAIN (*spc
))
11064 if (OMP_CLAUSE_CODE (*spc
) == OMP_CLAUSE_PRIVATE
11065 && OMP_CLAUSE_DECL (*spc
) == orig_decl
)
11070 *spc
= OMP_CLAUSE_CHAIN (c
);
11071 OMP_CLAUSE_CHAIN (c
) = NULL_TREE
;
11075 if (*pc
== NULL_TREE
)
11077 else if (OMP_CLAUSE_CODE (*pc
) == OMP_CLAUSE_PRIVATE
)
11079 /* private clause will appear only on inner_for_stmt.
11080 Change it into firstprivate, and add private clause
11082 tree c
= copy_node (*pc
);
11083 OMP_CLAUSE_CHAIN (c
) = OMP_FOR_CLAUSES (for_stmt
);
11084 OMP_FOR_CLAUSES (for_stmt
) = c
;
11085 OMP_CLAUSE_CODE (*pc
) = OMP_CLAUSE_FIRSTPRIVATE
;
11086 lang_hooks
.decls
.omp_finish_clause (*pc
, pre_p
);
11090 /* lastprivate clause will appear on both inner_for_stmt
11091 and for_stmt. Add firstprivate clause to
11093 tree c
= build_omp_clause (OMP_CLAUSE_LOCATION (*pc
),
11094 OMP_CLAUSE_FIRSTPRIVATE
);
11095 OMP_CLAUSE_DECL (c
) = OMP_CLAUSE_DECL (*pc
);
11096 OMP_CLAUSE_CHAIN (c
) = *pc
;
11098 lang_hooks
.decls
.omp_finish_clause (*pc
, pre_p
);
11100 tree c
= build_omp_clause (UNKNOWN_LOCATION
,
11101 OMP_CLAUSE_FIRSTPRIVATE
);
11102 OMP_CLAUSE_DECL (c
) = last
;
11103 OMP_CLAUSE_CHAIN (c
) = OMP_PARALLEL_CLAUSES (*data
[1]);
11104 OMP_PARALLEL_CLAUSES (*data
[1]) = c
;
11105 c
= build_omp_clause (UNKNOWN_LOCATION
,
11106 *pc
? OMP_CLAUSE_SHARED
11107 : OMP_CLAUSE_FIRSTPRIVATE
);
11108 OMP_CLAUSE_DECL (c
) = orig_decl
;
11109 OMP_CLAUSE_CHAIN (c
) = OMP_PARALLEL_CLAUSES (*data
[1]);
11110 OMP_PARALLEL_CLAUSES (*data
[1]) = c
;
11112 /* Similarly, take care of C++ range for temporaries, those should
11113 be firstprivate on OMP_PARALLEL if any. */
11115 for (i
= 0; i
< TREE_VEC_LENGTH (OMP_FOR_INIT (inner_for_stmt
)); i
++)
11116 if (OMP_FOR_ORIG_DECLS (inner_for_stmt
)
11117 && TREE_CODE (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (inner_for_stmt
),
11119 && TREE_CHAIN (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (inner_for_stmt
),
11123 = TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (inner_for_stmt
), i
);
11124 tree v
= TREE_CHAIN (orig
);
11125 tree c
= build_omp_clause (UNKNOWN_LOCATION
,
11126 OMP_CLAUSE_FIRSTPRIVATE
);
11127 /* First add firstprivate clause for the __for_end artificial
11129 OMP_CLAUSE_DECL (c
) = TREE_VEC_ELT (v
, 1);
11130 if (TREE_CODE (TREE_TYPE (OMP_CLAUSE_DECL (c
)))
11132 OMP_CLAUSE_FIRSTPRIVATE_NO_REFERENCE (c
) = 1;
11133 OMP_CLAUSE_CHAIN (c
) = OMP_PARALLEL_CLAUSES (*data
[1]);
11134 OMP_PARALLEL_CLAUSES (*data
[1]) = c
;
11135 if (TREE_VEC_ELT (v
, 0))
11137 /* And now the same for __for_range artificial decl if it
11139 c
= build_omp_clause (UNKNOWN_LOCATION
,
11140 OMP_CLAUSE_FIRSTPRIVATE
);
11141 OMP_CLAUSE_DECL (c
) = TREE_VEC_ELT (v
, 0);
11142 if (TREE_CODE (TREE_TYPE (OMP_CLAUSE_DECL (c
)))
11144 OMP_CLAUSE_FIRSTPRIVATE_NO_REFERENCE (c
) = 1;
11145 OMP_CLAUSE_CHAIN (c
) = OMP_PARALLEL_CLAUSES (*data
[1]);
11146 OMP_PARALLEL_CLAUSES (*data
[1]) = c
;
11151 switch (TREE_CODE (for_stmt
))
11154 case OMP_DISTRIBUTE
:
11160 if (omp_find_clause (OMP_FOR_CLAUSES (for_stmt
), OMP_CLAUSE_UNTIED
))
11161 ort
= ORT_UNTIED_TASKLOOP
;
11163 ort
= ORT_TASKLOOP
;
11169 gcc_unreachable ();
11172 /* Set OMP_CLAUSE_LINEAR_NO_COPYIN flag on explicit linear
11173 clause for the IV. */
11174 if (ort
== ORT_SIMD
&& TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt
)) == 1)
11176 t
= TREE_VEC_ELT (OMP_FOR_INIT (for_stmt
), 0);
11177 gcc_assert (TREE_CODE (t
) == MODIFY_EXPR
);
11178 decl
= TREE_OPERAND (t
, 0);
11179 for (tree c
= OMP_FOR_CLAUSES (for_stmt
); c
; c
= OMP_CLAUSE_CHAIN (c
))
11180 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LINEAR
11181 && OMP_CLAUSE_DECL (c
) == decl
)
11183 OMP_CLAUSE_LINEAR_NO_COPYIN (c
) = 1;
11188 if (TREE_CODE (for_stmt
) != OMP_TASKLOOP
)
11189 gimplify_scan_omp_clauses (&OMP_FOR_CLAUSES (for_stmt
), pre_p
, ort
,
11190 loop_p
&& TREE_CODE (for_stmt
) != OMP_SIMD
11191 ? OMP_LOOP
: TREE_CODE (for_stmt
));
11193 if (TREE_CODE (for_stmt
) == OMP_DISTRIBUTE
)
11194 gimplify_omp_ctxp
->distribute
= true;
11196 /* Handle OMP_FOR_INIT. */
11197 for_pre_body
= NULL
;
11198 if ((ort
== ORT_SIMD
11199 || (inner_for_stmt
&& TREE_CODE (inner_for_stmt
) == OMP_SIMD
))
11200 && OMP_FOR_PRE_BODY (for_stmt
))
11202 has_decl_expr
= BITMAP_ALLOC (NULL
);
11203 if (TREE_CODE (OMP_FOR_PRE_BODY (for_stmt
)) == DECL_EXPR
11204 && TREE_CODE (DECL_EXPR_DECL (OMP_FOR_PRE_BODY (for_stmt
)))
11207 t
= OMP_FOR_PRE_BODY (for_stmt
);
11208 bitmap_set_bit (has_decl_expr
, DECL_UID (DECL_EXPR_DECL (t
)));
11210 else if (TREE_CODE (OMP_FOR_PRE_BODY (for_stmt
)) == STATEMENT_LIST
)
11212 tree_stmt_iterator si
;
11213 for (si
= tsi_start (OMP_FOR_PRE_BODY (for_stmt
)); !tsi_end_p (si
);
11217 if (TREE_CODE (t
) == DECL_EXPR
11218 && TREE_CODE (DECL_EXPR_DECL (t
)) == VAR_DECL
)
11219 bitmap_set_bit (has_decl_expr
, DECL_UID (DECL_EXPR_DECL (t
)));
11223 if (OMP_FOR_PRE_BODY (for_stmt
))
11225 if (TREE_CODE (for_stmt
) != OMP_TASKLOOP
|| gimplify_omp_ctxp
)
11226 gimplify_and_add (OMP_FOR_PRE_BODY (for_stmt
), &for_pre_body
);
11229 struct gimplify_omp_ctx ctx
;
11230 memset (&ctx
, 0, sizeof (ctx
));
11231 ctx
.region_type
= ORT_NONE
;
11232 gimplify_omp_ctxp
= &ctx
;
11233 gimplify_and_add (OMP_FOR_PRE_BODY (for_stmt
), &for_pre_body
);
11234 gimplify_omp_ctxp
= NULL
;
11237 OMP_FOR_PRE_BODY (for_stmt
) = NULL_TREE
;
11239 if (OMP_FOR_INIT (for_stmt
) == NULL_TREE
)
11240 for_stmt
= inner_for_stmt
;
11242 /* For taskloop, need to gimplify the start, end and step before the
11243 taskloop, outside of the taskloop omp context. */
11244 if (TREE_CODE (orig_for_stmt
) == OMP_TASKLOOP
)
11246 for (i
= 0; i
< TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt
)); i
++)
11248 t
= TREE_VEC_ELT (OMP_FOR_INIT (for_stmt
), i
);
11249 if (!is_gimple_constant (TREE_OPERAND (t
, 1)))
11251 tree type
= TREE_TYPE (TREE_OPERAND (t
, 0));
11252 TREE_OPERAND (t
, 1)
11253 = get_initialized_tmp_var (TREE_OPERAND (t
, 1),
11254 gimple_seq_empty_p (for_pre_body
)
11255 ? pre_p
: &for_pre_body
, NULL
,
11257 /* Reference to pointer conversion is considered useless,
11258 but is significant for firstprivate clause. Force it
11260 if (TREE_CODE (type
) == POINTER_TYPE
11261 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (t
, 1)))
11262 == REFERENCE_TYPE
))
11264 tree v
= create_tmp_var (TYPE_MAIN_VARIANT (type
));
11265 tree m
= build2 (INIT_EXPR
, TREE_TYPE (v
), v
,
11266 TREE_OPERAND (t
, 1));
11267 gimplify_and_add (m
, gimple_seq_empty_p (for_pre_body
)
11268 ? pre_p
: &for_pre_body
);
11269 TREE_OPERAND (t
, 1) = v
;
11271 tree c
= build_omp_clause (input_location
,
11272 OMP_CLAUSE_FIRSTPRIVATE
);
11273 OMP_CLAUSE_DECL (c
) = TREE_OPERAND (t
, 1);
11274 OMP_CLAUSE_CHAIN (c
) = OMP_FOR_CLAUSES (orig_for_stmt
);
11275 OMP_FOR_CLAUSES (orig_for_stmt
) = c
;
11278 /* Handle OMP_FOR_COND. */
11279 t
= TREE_VEC_ELT (OMP_FOR_COND (for_stmt
), i
);
11280 if (!is_gimple_constant (TREE_OPERAND (t
, 1)))
11282 tree type
= TREE_TYPE (TREE_OPERAND (t
, 0));
11283 TREE_OPERAND (t
, 1)
11284 = get_initialized_tmp_var (TREE_OPERAND (t
, 1),
11285 gimple_seq_empty_p (for_pre_body
)
11286 ? pre_p
: &for_pre_body
, NULL
,
11288 /* Reference to pointer conversion is considered useless,
11289 but is significant for firstprivate clause. Force it
11291 if (TREE_CODE (type
) == POINTER_TYPE
11292 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (t
, 1)))
11293 == REFERENCE_TYPE
))
11295 tree v
= create_tmp_var (TYPE_MAIN_VARIANT (type
));
11296 tree m
= build2 (INIT_EXPR
, TREE_TYPE (v
), v
,
11297 TREE_OPERAND (t
, 1));
11298 gimplify_and_add (m
, gimple_seq_empty_p (for_pre_body
)
11299 ? pre_p
: &for_pre_body
);
11300 TREE_OPERAND (t
, 1) = v
;
11302 tree c
= build_omp_clause (input_location
,
11303 OMP_CLAUSE_FIRSTPRIVATE
);
11304 OMP_CLAUSE_DECL (c
) = TREE_OPERAND (t
, 1);
11305 OMP_CLAUSE_CHAIN (c
) = OMP_FOR_CLAUSES (orig_for_stmt
);
11306 OMP_FOR_CLAUSES (orig_for_stmt
) = c
;
11309 /* Handle OMP_FOR_INCR. */
11310 t
= TREE_VEC_ELT (OMP_FOR_INCR (for_stmt
), i
);
11311 if (TREE_CODE (t
) == MODIFY_EXPR
)
11313 decl
= TREE_OPERAND (t
, 0);
11314 t
= TREE_OPERAND (t
, 1);
11315 tree
*tp
= &TREE_OPERAND (t
, 1);
11316 if (TREE_CODE (t
) == PLUS_EXPR
&& *tp
== decl
)
11317 tp
= &TREE_OPERAND (t
, 0);
11319 if (!is_gimple_constant (*tp
))
11321 gimple_seq
*seq
= gimple_seq_empty_p (for_pre_body
)
11322 ? pre_p
: &for_pre_body
;
11323 *tp
= get_initialized_tmp_var (*tp
, seq
, NULL
, false);
11324 tree c
= build_omp_clause (input_location
,
11325 OMP_CLAUSE_FIRSTPRIVATE
);
11326 OMP_CLAUSE_DECL (c
) = *tp
;
11327 OMP_CLAUSE_CHAIN (c
) = OMP_FOR_CLAUSES (orig_for_stmt
);
11328 OMP_FOR_CLAUSES (orig_for_stmt
) = c
;
11333 gimplify_scan_omp_clauses (&OMP_FOR_CLAUSES (orig_for_stmt
), pre_p
, ort
,
11337 if (orig_for_stmt
!= for_stmt
)
11338 gimplify_omp_ctxp
->combined_loop
= true;
11341 gcc_assert (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt
))
11342 == TREE_VEC_LENGTH (OMP_FOR_COND (for_stmt
)));
11343 gcc_assert (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt
))
11344 == TREE_VEC_LENGTH (OMP_FOR_INCR (for_stmt
)));
11346 tree c
= omp_find_clause (OMP_FOR_CLAUSES (for_stmt
), OMP_CLAUSE_ORDERED
);
11347 bool is_doacross
= false;
11348 if (c
&& OMP_CLAUSE_ORDERED_EXPR (c
))
11350 is_doacross
= true;
11351 gimplify_omp_ctxp
->loop_iter_var
.create (TREE_VEC_LENGTH
11352 (OMP_FOR_INIT (for_stmt
))
11355 int collapse
= 1, tile
= 0;
11356 c
= omp_find_clause (OMP_FOR_CLAUSES (for_stmt
), OMP_CLAUSE_COLLAPSE
);
11358 collapse
= tree_to_shwi (OMP_CLAUSE_COLLAPSE_EXPR (c
));
11359 c
= omp_find_clause (OMP_FOR_CLAUSES (for_stmt
), OMP_CLAUSE_TILE
);
11361 tile
= list_length (OMP_CLAUSE_TILE_LIST (c
));
11362 for (i
= 0; i
< TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt
)); i
++)
11364 t
= TREE_VEC_ELT (OMP_FOR_INIT (for_stmt
), i
);
11365 gcc_assert (TREE_CODE (t
) == MODIFY_EXPR
);
11366 decl
= TREE_OPERAND (t
, 0);
11367 gcc_assert (DECL_P (decl
));
11368 gcc_assert (INTEGRAL_TYPE_P (TREE_TYPE (decl
))
11369 || POINTER_TYPE_P (TREE_TYPE (decl
)));
11372 if (TREE_CODE (for_stmt
) == OMP_FOR
&& OMP_FOR_ORIG_DECLS (for_stmt
))
11374 tree orig_decl
= TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (for_stmt
), i
);
11375 if (TREE_CODE (orig_decl
) == TREE_LIST
)
11377 orig_decl
= TREE_PURPOSE (orig_decl
);
11381 gimplify_omp_ctxp
->loop_iter_var
.quick_push (orig_decl
);
11384 gimplify_omp_ctxp
->loop_iter_var
.quick_push (decl
);
11385 gimplify_omp_ctxp
->loop_iter_var
.quick_push (decl
);
11388 /* Make sure the iteration variable is private. */
11389 tree c
= NULL_TREE
;
11390 tree c2
= NULL_TREE
;
11391 if (orig_for_stmt
!= for_stmt
)
11393 /* Preserve this information until we gimplify the inner simd. */
11395 && bitmap_bit_p (has_decl_expr
, DECL_UID (decl
)))
11396 TREE_PRIVATE (t
) = 1;
11398 else if (ort
== ORT_SIMD
)
11400 splay_tree_node n
= splay_tree_lookup (gimplify_omp_ctxp
->variables
,
11401 (splay_tree_key
) decl
);
11402 omp_is_private (gimplify_omp_ctxp
, decl
,
11403 1 + (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt
))
11405 if (n
!= NULL
&& (n
->value
& GOVD_DATA_SHARE_CLASS
) != 0)
11407 omp_notice_variable (gimplify_omp_ctxp
, decl
, true);
11408 if (n
->value
& GOVD_LASTPRIVATE_CONDITIONAL
)
11409 for (tree c3
= omp_find_clause (OMP_FOR_CLAUSES (for_stmt
),
11410 OMP_CLAUSE_LASTPRIVATE
);
11411 c3
; c3
= omp_find_clause (OMP_CLAUSE_CHAIN (c3
),
11412 OMP_CLAUSE_LASTPRIVATE
))
11413 if (OMP_CLAUSE_DECL (c3
) == decl
)
11415 warning_at (OMP_CLAUSE_LOCATION (c3
), 0,
11416 "conditional %<lastprivate%> on loop "
11417 "iterator %qD ignored", decl
);
11418 OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c3
) = 0;
11419 n
->value
&= ~GOVD_LASTPRIVATE_CONDITIONAL
;
11422 else if (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt
)) == 1 && !loop_p
)
11424 c
= build_omp_clause (input_location
, OMP_CLAUSE_LINEAR
);
11425 OMP_CLAUSE_LINEAR_NO_COPYIN (c
) = 1;
11426 unsigned int flags
= GOVD_LINEAR
| GOVD_EXPLICIT
| GOVD_SEEN
;
11428 && bitmap_bit_p (has_decl_expr
, DECL_UID (decl
)))
11429 || TREE_PRIVATE (t
))
11431 OMP_CLAUSE_LINEAR_NO_COPYOUT (c
) = 1;
11432 flags
|= GOVD_LINEAR_LASTPRIVATE_NO_OUTER
;
11434 struct gimplify_omp_ctx
*outer
11435 = gimplify_omp_ctxp
->outer_context
;
11436 if (outer
&& !OMP_CLAUSE_LINEAR_NO_COPYOUT (c
))
11438 if (outer
->region_type
== ORT_WORKSHARE
11439 && outer
->combined_loop
)
11441 n
= splay_tree_lookup (outer
->variables
,
11442 (splay_tree_key
)decl
);
11443 if (n
!= NULL
&& (n
->value
& GOVD_LOCAL
) != 0)
11445 OMP_CLAUSE_LINEAR_NO_COPYOUT (c
) = 1;
11446 flags
|= GOVD_LINEAR_LASTPRIVATE_NO_OUTER
;
11450 struct gimplify_omp_ctx
*octx
= outer
->outer_context
;
11452 && octx
->region_type
== ORT_COMBINED_PARALLEL
11453 && octx
->outer_context
11454 && (octx
->outer_context
->region_type
11456 && octx
->outer_context
->combined_loop
)
11458 octx
= octx
->outer_context
;
11459 n
= splay_tree_lookup (octx
->variables
,
11460 (splay_tree_key
)decl
);
11461 if (n
!= NULL
&& (n
->value
& GOVD_LOCAL
) != 0)
11463 OMP_CLAUSE_LINEAR_NO_COPYOUT (c
) = 1;
11464 flags
|= GOVD_LINEAR_LASTPRIVATE_NO_OUTER
;
11471 OMP_CLAUSE_DECL (c
) = decl
;
11472 OMP_CLAUSE_CHAIN (c
) = OMP_FOR_CLAUSES (for_stmt
);
11473 OMP_FOR_CLAUSES (for_stmt
) = c
;
11474 omp_add_variable (gimplify_omp_ctxp
, decl
, flags
);
11475 if (outer
&& !OMP_CLAUSE_LINEAR_NO_COPYOUT (c
))
11477 if (outer
->region_type
== ORT_WORKSHARE
11478 && outer
->combined_loop
)
11480 if (outer
->outer_context
11481 && (outer
->outer_context
->region_type
11482 == ORT_COMBINED_PARALLEL
))
11483 outer
= outer
->outer_context
;
11484 else if (omp_check_private (outer
, decl
, false))
11487 else if (((outer
->region_type
& ORT_TASKLOOP
)
11489 && outer
->combined_loop
11490 && !omp_check_private (gimplify_omp_ctxp
,
11493 else if (outer
->region_type
!= ORT_COMBINED_PARALLEL
)
11495 omp_notice_variable (outer
, decl
, true);
11500 n
= splay_tree_lookup (outer
->variables
,
11501 (splay_tree_key
)decl
);
11502 if (n
== NULL
|| (n
->value
& GOVD_DATA_SHARE_CLASS
) == 0)
11504 omp_add_variable (outer
, decl
,
11505 GOVD_LASTPRIVATE
| GOVD_SEEN
);
11506 if (outer
->region_type
== ORT_COMBINED_PARALLEL
11507 && outer
->outer_context
11508 && (outer
->outer_context
->region_type
11510 && outer
->outer_context
->combined_loop
)
11512 outer
= outer
->outer_context
;
11513 n
= splay_tree_lookup (outer
->variables
,
11514 (splay_tree_key
)decl
);
11515 if (omp_check_private (outer
, decl
, false))
11518 || ((n
->value
& GOVD_DATA_SHARE_CLASS
)
11520 omp_add_variable (outer
, decl
,
11526 if (outer
&& outer
->outer_context
11527 && ((outer
->outer_context
->region_type
11528 & ORT_COMBINED_TEAMS
) == ORT_COMBINED_TEAMS
11529 || (((outer
->region_type
& ORT_TASKLOOP
)
11531 && (outer
->outer_context
->region_type
11532 == ORT_COMBINED_PARALLEL
))))
11534 outer
= outer
->outer_context
;
11535 n
= splay_tree_lookup (outer
->variables
,
11536 (splay_tree_key
)decl
);
11538 || (n
->value
& GOVD_DATA_SHARE_CLASS
) == 0)
11539 omp_add_variable (outer
, decl
,
11540 GOVD_SHARED
| GOVD_SEEN
);
11544 if (outer
&& outer
->outer_context
)
11545 omp_notice_variable (outer
->outer_context
, decl
,
11555 || !bitmap_bit_p (has_decl_expr
, DECL_UID (decl
)));
11556 if (TREE_PRIVATE (t
))
11557 lastprivate
= false;
11558 if (loop_p
&& OMP_FOR_ORIG_DECLS (for_stmt
))
11560 tree elt
= TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (for_stmt
), i
);
11561 if (TREE_CODE (elt
) == TREE_LIST
&& TREE_PURPOSE (elt
))
11562 lastprivate
= false;
11565 struct gimplify_omp_ctx
*outer
11566 = gimplify_omp_ctxp
->outer_context
;
11567 if (outer
&& lastprivate
)
11569 if (outer
->region_type
== ORT_WORKSHARE
11570 && outer
->combined_loop
)
11572 n
= splay_tree_lookup (outer
->variables
,
11573 (splay_tree_key
)decl
);
11574 if (n
!= NULL
&& (n
->value
& GOVD_LOCAL
) != 0)
11576 lastprivate
= false;
11579 else if (outer
->outer_context
11580 && (outer
->outer_context
->region_type
11581 == ORT_COMBINED_PARALLEL
))
11582 outer
= outer
->outer_context
;
11583 else if (omp_check_private (outer
, decl
, false))
11586 else if (((outer
->region_type
& ORT_TASKLOOP
)
11588 && outer
->combined_loop
11589 && !omp_check_private (gimplify_omp_ctxp
,
11592 else if (outer
->region_type
!= ORT_COMBINED_PARALLEL
)
11594 omp_notice_variable (outer
, decl
, true);
11599 n
= splay_tree_lookup (outer
->variables
,
11600 (splay_tree_key
)decl
);
11601 if (n
== NULL
|| (n
->value
& GOVD_DATA_SHARE_CLASS
) == 0)
11603 omp_add_variable (outer
, decl
,
11604 GOVD_LASTPRIVATE
| GOVD_SEEN
);
11605 if (outer
->region_type
== ORT_COMBINED_PARALLEL
11606 && outer
->outer_context
11607 && (outer
->outer_context
->region_type
11609 && outer
->outer_context
->combined_loop
)
11611 outer
= outer
->outer_context
;
11612 n
= splay_tree_lookup (outer
->variables
,
11613 (splay_tree_key
)decl
);
11614 if (omp_check_private (outer
, decl
, false))
11617 || ((n
->value
& GOVD_DATA_SHARE_CLASS
)
11619 omp_add_variable (outer
, decl
,
11625 if (outer
&& outer
->outer_context
11626 && ((outer
->outer_context
->region_type
11627 & ORT_COMBINED_TEAMS
) == ORT_COMBINED_TEAMS
11628 || (((outer
->region_type
& ORT_TASKLOOP
)
11630 && (outer
->outer_context
->region_type
11631 == ORT_COMBINED_PARALLEL
))))
11633 outer
= outer
->outer_context
;
11634 n
= splay_tree_lookup (outer
->variables
,
11635 (splay_tree_key
)decl
);
11637 || (n
->value
& GOVD_DATA_SHARE_CLASS
) == 0)
11638 omp_add_variable (outer
, decl
,
11639 GOVD_SHARED
| GOVD_SEEN
);
11643 if (outer
&& outer
->outer_context
)
11644 omp_notice_variable (outer
->outer_context
, decl
,
11650 c
= build_omp_clause (input_location
,
11651 lastprivate
? OMP_CLAUSE_LASTPRIVATE
11652 : OMP_CLAUSE_PRIVATE
);
11653 OMP_CLAUSE_DECL (c
) = decl
;
11654 OMP_CLAUSE_CHAIN (c
) = OMP_FOR_CLAUSES (for_stmt
);
11655 OMP_FOR_CLAUSES (for_stmt
) = c
;
11656 omp_add_variable (gimplify_omp_ctxp
, decl
,
11657 (lastprivate
? GOVD_LASTPRIVATE
: GOVD_PRIVATE
)
11658 | GOVD_EXPLICIT
| GOVD_SEEN
);
11662 else if (omp_is_private (gimplify_omp_ctxp
, decl
, 0))
11664 omp_notice_variable (gimplify_omp_ctxp
, decl
, true);
11665 splay_tree_node n
= splay_tree_lookup (gimplify_omp_ctxp
->variables
,
11666 (splay_tree_key
) decl
);
11667 if (n
&& (n
->value
& GOVD_LASTPRIVATE_CONDITIONAL
))
11668 for (tree c3
= omp_find_clause (OMP_FOR_CLAUSES (for_stmt
),
11669 OMP_CLAUSE_LASTPRIVATE
);
11670 c3
; c3
= omp_find_clause (OMP_CLAUSE_CHAIN (c3
),
11671 OMP_CLAUSE_LASTPRIVATE
))
11672 if (OMP_CLAUSE_DECL (c3
) == decl
)
11674 warning_at (OMP_CLAUSE_LOCATION (c3
), 0,
11675 "conditional %<lastprivate%> on loop "
11676 "iterator %qD ignored", decl
);
11677 OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c3
) = 0;
11678 n
->value
&= ~GOVD_LASTPRIVATE_CONDITIONAL
;
11682 omp_add_variable (gimplify_omp_ctxp
, decl
, GOVD_PRIVATE
| GOVD_SEEN
);
11684 /* If DECL is not a gimple register, create a temporary variable to act
11685 as an iteration counter. This is valid, since DECL cannot be
11686 modified in the body of the loop. Similarly for any iteration vars
11687 in simd with collapse > 1 where the iterator vars must be
11689 if (orig_for_stmt
!= for_stmt
)
11691 else if (!is_gimple_reg (decl
)
11692 || (ort
== ORT_SIMD
11693 && TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt
)) > 1))
11695 struct gimplify_omp_ctx
*ctx
= gimplify_omp_ctxp
;
11696 /* Make sure omp_add_variable is not called on it prematurely.
11697 We call it ourselves a few lines later. */
11698 gimplify_omp_ctxp
= NULL
;
11699 var
= create_tmp_var (TREE_TYPE (decl
), get_name (decl
));
11700 gimplify_omp_ctxp
= ctx
;
11701 TREE_OPERAND (t
, 0) = var
;
11703 gimplify_seq_add_stmt (&for_body
, gimple_build_assign (decl
, var
));
11705 if (ort
== ORT_SIMD
11706 && TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt
)) == 1)
11708 c2
= build_omp_clause (input_location
, OMP_CLAUSE_LINEAR
);
11709 OMP_CLAUSE_LINEAR_NO_COPYIN (c2
) = 1;
11710 OMP_CLAUSE_LINEAR_NO_COPYOUT (c2
) = 1;
11711 OMP_CLAUSE_DECL (c2
) = var
;
11712 OMP_CLAUSE_CHAIN (c2
) = OMP_FOR_CLAUSES (for_stmt
);
11713 OMP_FOR_CLAUSES (for_stmt
) = c2
;
11714 omp_add_variable (gimplify_omp_ctxp
, var
,
11715 GOVD_LINEAR
| GOVD_EXPLICIT
| GOVD_SEEN
);
11716 if (c
== NULL_TREE
)
11723 omp_add_variable (gimplify_omp_ctxp
, var
,
11724 GOVD_PRIVATE
| GOVD_SEEN
);
11729 tret
= gimplify_expr (&TREE_OPERAND (t
, 1), &for_pre_body
, NULL
,
11730 is_gimple_val
, fb_rvalue
, false);
11731 ret
= MIN (ret
, tret
);
11732 if (ret
== GS_ERROR
)
11735 /* Handle OMP_FOR_COND. */
11736 t
= TREE_VEC_ELT (OMP_FOR_COND (for_stmt
), i
);
11737 gcc_assert (COMPARISON_CLASS_P (t
));
11738 gcc_assert (TREE_OPERAND (t
, 0) == decl
);
11740 tret
= gimplify_expr (&TREE_OPERAND (t
, 1), &for_pre_body
, NULL
,
11741 is_gimple_val
, fb_rvalue
, false);
11742 ret
= MIN (ret
, tret
);
11744 /* Handle OMP_FOR_INCR. */
11745 t
= TREE_VEC_ELT (OMP_FOR_INCR (for_stmt
), i
);
11746 switch (TREE_CODE (t
))
11748 case PREINCREMENT_EXPR
:
11749 case POSTINCREMENT_EXPR
:
11751 tree decl
= TREE_OPERAND (t
, 0);
11752 /* c_omp_for_incr_canonicalize_ptr() should have been
11753 called to massage things appropriately. */
11754 gcc_assert (!POINTER_TYPE_P (TREE_TYPE (decl
)));
11756 if (orig_for_stmt
!= for_stmt
)
11758 t
= build_int_cst (TREE_TYPE (decl
), 1);
11760 OMP_CLAUSE_LINEAR_STEP (c
) = t
;
11761 t
= build2 (PLUS_EXPR
, TREE_TYPE (decl
), var
, t
);
11762 t
= build2 (MODIFY_EXPR
, TREE_TYPE (var
), var
, t
);
11763 TREE_VEC_ELT (OMP_FOR_INCR (for_stmt
), i
) = t
;
11767 case PREDECREMENT_EXPR
:
11768 case POSTDECREMENT_EXPR
:
11769 /* c_omp_for_incr_canonicalize_ptr() should have been
11770 called to massage things appropriately. */
11771 gcc_assert (!POINTER_TYPE_P (TREE_TYPE (decl
)));
11772 if (orig_for_stmt
!= for_stmt
)
11774 t
= build_int_cst (TREE_TYPE (decl
), -1);
11776 OMP_CLAUSE_LINEAR_STEP (c
) = t
;
11777 t
= build2 (PLUS_EXPR
, TREE_TYPE (decl
), var
, t
);
11778 t
= build2 (MODIFY_EXPR
, TREE_TYPE (var
), var
, t
);
11779 TREE_VEC_ELT (OMP_FOR_INCR (for_stmt
), i
) = t
;
11783 gcc_assert (TREE_OPERAND (t
, 0) == decl
);
11784 TREE_OPERAND (t
, 0) = var
;
11786 t
= TREE_OPERAND (t
, 1);
11787 switch (TREE_CODE (t
))
11790 if (TREE_OPERAND (t
, 1) == decl
)
11792 TREE_OPERAND (t
, 1) = TREE_OPERAND (t
, 0);
11793 TREE_OPERAND (t
, 0) = var
;
11799 case POINTER_PLUS_EXPR
:
11800 gcc_assert (TREE_OPERAND (t
, 0) == decl
);
11801 TREE_OPERAND (t
, 0) = var
;
11804 gcc_unreachable ();
11807 tret
= gimplify_expr (&TREE_OPERAND (t
, 1), &for_pre_body
, NULL
,
11808 is_gimple_val
, fb_rvalue
, false);
11809 ret
= MIN (ret
, tret
);
11812 tree step
= TREE_OPERAND (t
, 1);
11813 tree stept
= TREE_TYPE (decl
);
11814 if (POINTER_TYPE_P (stept
))
11816 step
= fold_convert (stept
, step
);
11817 if (TREE_CODE (t
) == MINUS_EXPR
)
11818 step
= fold_build1 (NEGATE_EXPR
, stept
, step
);
11819 OMP_CLAUSE_LINEAR_STEP (c
) = step
;
11820 if (step
!= TREE_OPERAND (t
, 1))
11822 tret
= gimplify_expr (&OMP_CLAUSE_LINEAR_STEP (c
),
11823 &for_pre_body
, NULL
,
11824 is_gimple_val
, fb_rvalue
, false);
11825 ret
= MIN (ret
, tret
);
11831 gcc_unreachable ();
11837 OMP_CLAUSE_LINEAR_STEP (c2
) = OMP_CLAUSE_LINEAR_STEP (c
);
11840 if ((var
!= decl
|| collapse
> 1 || tile
) && orig_for_stmt
== for_stmt
)
11842 for (c
= OMP_FOR_CLAUSES (for_stmt
); c
; c
= OMP_CLAUSE_CHAIN (c
))
11843 if (((OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
11844 && OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c
) == NULL
)
11845 || (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LINEAR
11846 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c
)
11847 && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c
) == NULL
))
11848 && OMP_CLAUSE_DECL (c
) == decl
)
11850 if (is_doacross
&& (collapse
== 1 || i
>= collapse
))
11854 t
= TREE_VEC_ELT (OMP_FOR_INCR (for_stmt
), i
);
11855 gcc_assert (TREE_CODE (t
) == MODIFY_EXPR
);
11856 gcc_assert (TREE_OPERAND (t
, 0) == var
);
11857 t
= TREE_OPERAND (t
, 1);
11858 gcc_assert (TREE_CODE (t
) == PLUS_EXPR
11859 || TREE_CODE (t
) == MINUS_EXPR
11860 || TREE_CODE (t
) == POINTER_PLUS_EXPR
);
11861 gcc_assert (TREE_OPERAND (t
, 0) == var
);
11862 t
= build2 (TREE_CODE (t
), TREE_TYPE (decl
),
11863 is_doacross
? var
: decl
,
11864 TREE_OPERAND (t
, 1));
11867 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
)
11868 seq
= &OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c
);
11870 seq
= &OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c
);
11871 push_gimplify_context ();
11872 gimplify_assign (decl
, t
, seq
);
11873 gimple
*bind
= NULL
;
11874 if (gimplify_ctxp
->temps
)
11876 bind
= gimple_build_bind (NULL_TREE
, *seq
, NULL_TREE
);
11878 gimplify_seq_add_stmt (seq
, bind
);
11880 pop_gimplify_context (bind
);
11885 BITMAP_FREE (has_decl_expr
);
11887 if (TREE_CODE (orig_for_stmt
) == OMP_TASKLOOP
11888 || (loop_p
&& orig_for_stmt
== for_stmt
))
11890 push_gimplify_context ();
11891 if (TREE_CODE (OMP_FOR_BODY (orig_for_stmt
)) != BIND_EXPR
)
11893 OMP_FOR_BODY (orig_for_stmt
)
11894 = build3 (BIND_EXPR
, void_type_node
, NULL
,
11895 OMP_FOR_BODY (orig_for_stmt
), NULL
);
11896 TREE_SIDE_EFFECTS (OMP_FOR_BODY (orig_for_stmt
)) = 1;
11900 gimple
*g
= gimplify_and_return_first (OMP_FOR_BODY (orig_for_stmt
),
11903 if (TREE_CODE (orig_for_stmt
) == OMP_TASKLOOP
11904 || (loop_p
&& orig_for_stmt
== for_stmt
))
11906 if (gimple_code (g
) == GIMPLE_BIND
)
11907 pop_gimplify_context (g
);
11909 pop_gimplify_context (NULL
);
11912 if (orig_for_stmt
!= for_stmt
)
11913 for (i
= 0; i
< TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt
)); i
++)
11915 t
= TREE_VEC_ELT (OMP_FOR_INIT (for_stmt
), i
);
11916 decl
= TREE_OPERAND (t
, 0);
11917 struct gimplify_omp_ctx
*ctx
= gimplify_omp_ctxp
;
11918 if (TREE_CODE (orig_for_stmt
) == OMP_TASKLOOP
)
11919 gimplify_omp_ctxp
= ctx
->outer_context
;
11920 var
= create_tmp_var (TREE_TYPE (decl
), get_name (decl
));
11921 gimplify_omp_ctxp
= ctx
;
11922 omp_add_variable (gimplify_omp_ctxp
, var
, GOVD_PRIVATE
| GOVD_SEEN
);
11923 TREE_OPERAND (t
, 0) = var
;
11924 t
= TREE_VEC_ELT (OMP_FOR_INCR (for_stmt
), i
);
11925 TREE_OPERAND (t
, 1) = copy_node (TREE_OPERAND (t
, 1));
11926 TREE_OPERAND (TREE_OPERAND (t
, 1), 0) = var
;
11929 gimplify_adjust_omp_clauses (pre_p
, for_body
,
11930 &OMP_FOR_CLAUSES (orig_for_stmt
),
11931 TREE_CODE (orig_for_stmt
));
11934 switch (TREE_CODE (orig_for_stmt
))
11936 case OMP_FOR
: kind
= GF_OMP_FOR_KIND_FOR
; break;
11937 case OMP_SIMD
: kind
= GF_OMP_FOR_KIND_SIMD
; break;
11938 case OMP_DISTRIBUTE
: kind
= GF_OMP_FOR_KIND_DISTRIBUTE
; break;
11939 case OMP_TASKLOOP
: kind
= GF_OMP_FOR_KIND_TASKLOOP
; break;
11940 case OACC_LOOP
: kind
= GF_OMP_FOR_KIND_OACC_LOOP
; break;
11942 gcc_unreachable ();
11944 if (loop_p
&& kind
== GF_OMP_FOR_KIND_SIMD
)
11946 gimplify_seq_add_seq (pre_p
, for_pre_body
);
11947 for_pre_body
= NULL
;
11949 gfor
= gimple_build_omp_for (for_body
, kind
, OMP_FOR_CLAUSES (orig_for_stmt
),
11950 TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt
)),
11952 if (orig_for_stmt
!= for_stmt
)
11953 gimple_omp_for_set_combined_p (gfor
, true);
11954 if (gimplify_omp_ctxp
11955 && (gimplify_omp_ctxp
->combined_loop
11956 || (gimplify_omp_ctxp
->region_type
== ORT_COMBINED_PARALLEL
11957 && gimplify_omp_ctxp
->outer_context
11958 && gimplify_omp_ctxp
->outer_context
->combined_loop
)))
11960 gimple_omp_for_set_combined_into_p (gfor
, true);
11961 if (gimplify_omp_ctxp
->combined_loop
)
11962 gcc_assert (TREE_CODE (orig_for_stmt
) == OMP_SIMD
);
11964 gcc_assert (TREE_CODE (orig_for_stmt
) == OMP_FOR
);
11967 for (i
= 0; i
< TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt
)); i
++)
11969 t
= TREE_VEC_ELT (OMP_FOR_INIT (for_stmt
), i
);
11970 gimple_omp_for_set_index (gfor
, i
, TREE_OPERAND (t
, 0));
11971 gimple_omp_for_set_initial (gfor
, i
, TREE_OPERAND (t
, 1));
11972 t
= TREE_VEC_ELT (OMP_FOR_COND (for_stmt
), i
);
11973 gimple_omp_for_set_cond (gfor
, i
, TREE_CODE (t
));
11974 gimple_omp_for_set_final (gfor
, i
, TREE_OPERAND (t
, 1));
11975 t
= TREE_VEC_ELT (OMP_FOR_INCR (for_stmt
), i
);
11976 gimple_omp_for_set_incr (gfor
, i
, TREE_OPERAND (t
, 1));
11979 /* OMP_TASKLOOP is gimplified as two GIMPLE_OMP_FOR taskloop
11980 constructs with GIMPLE_OMP_TASK sandwiched in between them.
11981 The outer taskloop stands for computing the number of iterations,
11982 counts for collapsed loops and holding taskloop specific clauses.
11983 The task construct stands for the effect of data sharing on the
11984 explicit task it creates and the inner taskloop stands for expansion
11985 of the static loop inside of the explicit task construct. */
11986 if (TREE_CODE (orig_for_stmt
) == OMP_TASKLOOP
)
11988 tree
*gfor_clauses_ptr
= gimple_omp_for_clauses_ptr (gfor
);
11989 tree task_clauses
= NULL_TREE
;
11990 tree c
= *gfor_clauses_ptr
;
11991 tree
*gtask_clauses_ptr
= &task_clauses
;
11992 tree outer_for_clauses
= NULL_TREE
;
11993 tree
*gforo_clauses_ptr
= &outer_for_clauses
;
11994 for (; c
; c
= OMP_CLAUSE_CHAIN (c
))
11995 switch (OMP_CLAUSE_CODE (c
))
11997 /* These clauses are allowed on task, move them there. */
11998 case OMP_CLAUSE_SHARED
:
11999 case OMP_CLAUSE_FIRSTPRIVATE
:
12000 case OMP_CLAUSE_DEFAULT
:
12001 case OMP_CLAUSE_IF
:
12002 case OMP_CLAUSE_UNTIED
:
12003 case OMP_CLAUSE_FINAL
:
12004 case OMP_CLAUSE_MERGEABLE
:
12005 case OMP_CLAUSE_PRIORITY
:
12006 case OMP_CLAUSE_REDUCTION
:
12007 case OMP_CLAUSE_IN_REDUCTION
:
12008 *gtask_clauses_ptr
= c
;
12009 gtask_clauses_ptr
= &OMP_CLAUSE_CHAIN (c
);
12011 case OMP_CLAUSE_PRIVATE
:
12012 if (OMP_CLAUSE_PRIVATE_TASKLOOP_IV (c
))
12014 /* We want private on outer for and firstprivate
12017 = build_omp_clause (OMP_CLAUSE_LOCATION (c
),
12018 OMP_CLAUSE_FIRSTPRIVATE
);
12019 OMP_CLAUSE_DECL (*gtask_clauses_ptr
) = OMP_CLAUSE_DECL (c
);
12020 lang_hooks
.decls
.omp_finish_clause (*gtask_clauses_ptr
, NULL
);
12021 gtask_clauses_ptr
= &OMP_CLAUSE_CHAIN (*gtask_clauses_ptr
);
12022 *gforo_clauses_ptr
= c
;
12023 gforo_clauses_ptr
= &OMP_CLAUSE_CHAIN (c
);
12027 *gtask_clauses_ptr
= c
;
12028 gtask_clauses_ptr
= &OMP_CLAUSE_CHAIN (c
);
12031 /* These clauses go into outer taskloop clauses. */
12032 case OMP_CLAUSE_GRAINSIZE
:
12033 case OMP_CLAUSE_NUM_TASKS
:
12034 case OMP_CLAUSE_NOGROUP
:
12035 *gforo_clauses_ptr
= c
;
12036 gforo_clauses_ptr
= &OMP_CLAUSE_CHAIN (c
);
12038 /* Taskloop clause we duplicate on both taskloops. */
12039 case OMP_CLAUSE_COLLAPSE
:
12040 *gfor_clauses_ptr
= c
;
12041 gfor_clauses_ptr
= &OMP_CLAUSE_CHAIN (c
);
12042 *gforo_clauses_ptr
= copy_node (c
);
12043 gforo_clauses_ptr
= &OMP_CLAUSE_CHAIN (*gforo_clauses_ptr
);
12045 /* For lastprivate, keep the clause on inner taskloop, and add
12046 a shared clause on task. If the same decl is also firstprivate,
12047 add also firstprivate clause on the inner taskloop. */
12048 case OMP_CLAUSE_LASTPRIVATE
:
12049 if (OMP_CLAUSE_LASTPRIVATE_LOOP_IV (c
))
12051 /* For taskloop C++ lastprivate IVs, we want:
12052 1) private on outer taskloop
12053 2) firstprivate and shared on task
12054 3) lastprivate on inner taskloop */
12056 = build_omp_clause (OMP_CLAUSE_LOCATION (c
),
12057 OMP_CLAUSE_FIRSTPRIVATE
);
12058 OMP_CLAUSE_DECL (*gtask_clauses_ptr
) = OMP_CLAUSE_DECL (c
);
12059 lang_hooks
.decls
.omp_finish_clause (*gtask_clauses_ptr
, NULL
);
12060 gtask_clauses_ptr
= &OMP_CLAUSE_CHAIN (*gtask_clauses_ptr
);
12061 OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c
) = 1;
12062 *gforo_clauses_ptr
= build_omp_clause (OMP_CLAUSE_LOCATION (c
),
12063 OMP_CLAUSE_PRIVATE
);
12064 OMP_CLAUSE_DECL (*gforo_clauses_ptr
) = OMP_CLAUSE_DECL (c
);
12065 OMP_CLAUSE_PRIVATE_TASKLOOP_IV (*gforo_clauses_ptr
) = 1;
12066 TREE_TYPE (*gforo_clauses_ptr
) = TREE_TYPE (c
);
12067 gforo_clauses_ptr
= &OMP_CLAUSE_CHAIN (*gforo_clauses_ptr
);
12069 *gfor_clauses_ptr
= c
;
12070 gfor_clauses_ptr
= &OMP_CLAUSE_CHAIN (c
);
12072 = build_omp_clause (OMP_CLAUSE_LOCATION (c
), OMP_CLAUSE_SHARED
);
12073 OMP_CLAUSE_DECL (*gtask_clauses_ptr
) = OMP_CLAUSE_DECL (c
);
12074 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c
))
12075 OMP_CLAUSE_SHARED_FIRSTPRIVATE (*gtask_clauses_ptr
) = 1;
12077 = &OMP_CLAUSE_CHAIN (*gtask_clauses_ptr
);
12080 gcc_unreachable ();
12082 *gfor_clauses_ptr
= NULL_TREE
;
12083 *gtask_clauses_ptr
= NULL_TREE
;
12084 *gforo_clauses_ptr
= NULL_TREE
;
12085 g
= gimple_build_bind (NULL_TREE
, gfor
, NULL_TREE
);
12086 g
= gimple_build_omp_task (g
, task_clauses
, NULL_TREE
, NULL_TREE
,
12087 NULL_TREE
, NULL_TREE
, NULL_TREE
);
12088 gimple_omp_task_set_taskloop_p (g
, true);
12089 g
= gimple_build_bind (NULL_TREE
, g
, NULL_TREE
);
12091 = gimple_build_omp_for (g
, GF_OMP_FOR_KIND_TASKLOOP
, outer_for_clauses
,
12092 gimple_omp_for_collapse (gfor
),
12093 gimple_omp_for_pre_body (gfor
));
12094 gimple_omp_for_set_pre_body (gfor
, NULL
);
12095 gimple_omp_for_set_combined_p (gforo
, true);
12096 gimple_omp_for_set_combined_into_p (gfor
, true);
12097 for (i
= 0; i
< (int) gimple_omp_for_collapse (gfor
); i
++)
12099 tree type
= TREE_TYPE (gimple_omp_for_index (gfor
, i
));
12100 tree v
= create_tmp_var (type
);
12101 gimple_omp_for_set_index (gforo
, i
, v
);
12102 t
= unshare_expr (gimple_omp_for_initial (gfor
, i
));
12103 gimple_omp_for_set_initial (gforo
, i
, t
);
12104 gimple_omp_for_set_cond (gforo
, i
,
12105 gimple_omp_for_cond (gfor
, i
));
12106 t
= unshare_expr (gimple_omp_for_final (gfor
, i
));
12107 gimple_omp_for_set_final (gforo
, i
, t
);
12108 t
= unshare_expr (gimple_omp_for_incr (gfor
, i
));
12109 gcc_assert (TREE_OPERAND (t
, 0) == gimple_omp_for_index (gfor
, i
));
12110 TREE_OPERAND (t
, 0) = v
;
12111 gimple_omp_for_set_incr (gforo
, i
, t
);
12112 t
= build_omp_clause (input_location
, OMP_CLAUSE_PRIVATE
);
12113 OMP_CLAUSE_DECL (t
) = v
;
12114 OMP_CLAUSE_CHAIN (t
) = gimple_omp_for_clauses (gforo
);
12115 gimple_omp_for_set_clauses (gforo
, t
);
12117 gimplify_seq_add_stmt (pre_p
, gforo
);
12120 gimplify_seq_add_stmt (pre_p
, gfor
);
12122 if (TREE_CODE (orig_for_stmt
) == OMP_FOR
)
12124 struct gimplify_omp_ctx
*ctx
= gimplify_omp_ctxp
;
12125 unsigned lastprivate_conditional
= 0;
12127 && (ctx
->region_type
== ORT_TARGET_DATA
12128 || ctx
->region_type
== ORT_TASKGROUP
))
12129 ctx
= ctx
->outer_context
;
12130 if (ctx
&& (ctx
->region_type
& ORT_PARALLEL
) != 0)
12131 for (tree c
= gimple_omp_for_clauses (gfor
);
12132 c
; c
= OMP_CLAUSE_CHAIN (c
))
12133 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
12134 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c
))
12135 ++lastprivate_conditional
;
12136 if (lastprivate_conditional
)
12138 struct omp_for_data fd
;
12139 omp_extract_for_data (gfor
, &fd
, NULL
);
12140 tree type
= build_array_type_nelts (unsigned_type_for (fd
.iter_type
),
12141 lastprivate_conditional
);
12142 tree var
= create_tmp_var_raw (type
);
12143 tree c
= build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE__CONDTEMP_
);
12144 OMP_CLAUSE_DECL (c
) = var
;
12145 OMP_CLAUSE_CHAIN (c
) = gimple_omp_for_clauses (gfor
);
12146 gimple_omp_for_set_clauses (gfor
, c
);
12147 omp_add_variable (ctx
, var
, GOVD_CONDTEMP
| GOVD_SEEN
);
12150 else if (TREE_CODE (orig_for_stmt
) == OMP_SIMD
)
12152 unsigned lastprivate_conditional
= 0;
12153 for (tree c
= gimple_omp_for_clauses (gfor
); c
; c
= OMP_CLAUSE_CHAIN (c
))
12154 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
12155 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c
))
12156 ++lastprivate_conditional
;
12157 if (lastprivate_conditional
)
12159 struct omp_for_data fd
;
12160 omp_extract_for_data (gfor
, &fd
, NULL
);
12161 tree type
= unsigned_type_for (fd
.iter_type
);
12162 while (lastprivate_conditional
--)
12164 tree c
= build_omp_clause (UNKNOWN_LOCATION
,
12165 OMP_CLAUSE__CONDTEMP_
);
12166 OMP_CLAUSE_DECL (c
) = create_tmp_var (type
);
12167 OMP_CLAUSE_CHAIN (c
) = gimple_omp_for_clauses (gfor
);
12168 gimple_omp_for_set_clauses (gfor
, c
);
12173 if (ret
!= GS_ALL_DONE
)
12175 *expr_p
= NULL_TREE
;
12176 return GS_ALL_DONE
;
12179 /* Helper for gimplify_omp_loop, called through walk_tree. */
12182 replace_reduction_placeholders (tree
*tp
, int *walk_subtrees
, void *data
)
12186 tree
*d
= (tree
*) data
;
12187 if (*tp
== OMP_CLAUSE_REDUCTION_PLACEHOLDER (d
[0]))
12189 *tp
= OMP_CLAUSE_REDUCTION_PLACEHOLDER (d
[1]);
12190 *walk_subtrees
= 0;
12192 else if (*tp
== OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (d
[0]))
12194 *tp
= OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (d
[1]);
12195 *walk_subtrees
= 0;
12201 /* Gimplify the gross structure of an OMP_LOOP statement. */
12203 static enum gimplify_status
12204 gimplify_omp_loop (tree
*expr_p
, gimple_seq
*pre_p
)
12206 tree for_stmt
= *expr_p
;
12207 tree clauses
= OMP_FOR_CLAUSES (for_stmt
);
12208 struct gimplify_omp_ctx
*octx
= gimplify_omp_ctxp
;
12209 enum omp_clause_bind_kind kind
= OMP_CLAUSE_BIND_THREAD
;
12212 /* If order is not present, the behavior is as if order(concurrent)
12214 tree order
= omp_find_clause (clauses
, OMP_CLAUSE_ORDER
);
12215 if (order
== NULL_TREE
)
12217 order
= build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE_ORDER
);
12218 OMP_CLAUSE_CHAIN (order
) = clauses
;
12219 OMP_FOR_CLAUSES (for_stmt
) = clauses
= order
;
12222 tree bind
= omp_find_clause (clauses
, OMP_CLAUSE_BIND
);
12223 if (bind
== NULL_TREE
)
12225 if (!flag_openmp
) /* flag_openmp_simd */
12227 else if (octx
&& (octx
->region_type
& ORT_TEAMS
) != 0)
12228 kind
= OMP_CLAUSE_BIND_TEAMS
;
12229 else if (octx
&& (octx
->region_type
& ORT_PARALLEL
) != 0)
12230 kind
= OMP_CLAUSE_BIND_PARALLEL
;
12233 for (; octx
; octx
= octx
->outer_context
)
12235 if ((octx
->region_type
& ORT_ACC
) != 0
12236 || octx
->region_type
== ORT_NONE
12237 || octx
->region_type
== ORT_IMPLICIT_TARGET
)
12241 if (octx
== NULL
&& !in_omp_construct
)
12242 error_at (EXPR_LOCATION (for_stmt
),
12243 "%<bind%> clause not specified on a %<loop%> "
12244 "construct not nested inside another OpenMP construct");
12246 bind
= build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE_BIND
);
12247 OMP_CLAUSE_CHAIN (bind
) = clauses
;
12248 OMP_CLAUSE_BIND_KIND (bind
) = kind
;
12249 OMP_FOR_CLAUSES (for_stmt
) = bind
;
12252 switch (OMP_CLAUSE_BIND_KIND (bind
))
12254 case OMP_CLAUSE_BIND_THREAD
:
12256 case OMP_CLAUSE_BIND_PARALLEL
:
12257 if (!flag_openmp
) /* flag_openmp_simd */
12259 OMP_CLAUSE_BIND_KIND (bind
) = OMP_CLAUSE_BIND_THREAD
;
12262 for (; octx
; octx
= octx
->outer_context
)
12263 if (octx
->region_type
== ORT_SIMD
12264 && omp_find_clause (octx
->clauses
, OMP_CLAUSE_BIND
) == NULL_TREE
)
12266 error_at (EXPR_LOCATION (for_stmt
),
12267 "%<bind(parallel)%> on a %<loop%> construct nested "
12268 "inside %<simd%> construct");
12269 OMP_CLAUSE_BIND_KIND (bind
) = OMP_CLAUSE_BIND_THREAD
;
12272 kind
= OMP_CLAUSE_BIND_PARALLEL
;
12274 case OMP_CLAUSE_BIND_TEAMS
:
12275 if (!flag_openmp
) /* flag_openmp_simd */
12277 OMP_CLAUSE_BIND_KIND (bind
) = OMP_CLAUSE_BIND_THREAD
;
12281 && octx
->region_type
!= ORT_IMPLICIT_TARGET
12282 && octx
->region_type
!= ORT_NONE
12283 && (octx
->region_type
& ORT_TEAMS
) == 0)
12284 || in_omp_construct
)
12286 error_at (EXPR_LOCATION (for_stmt
),
12287 "%<bind(teams)%> on a %<loop%> region not strictly "
12288 "nested inside of a %<teams%> region");
12289 OMP_CLAUSE_BIND_KIND (bind
) = OMP_CLAUSE_BIND_THREAD
;
12292 kind
= OMP_CLAUSE_BIND_TEAMS
;
12295 gcc_unreachable ();
12298 for (tree
*pc
= &OMP_FOR_CLAUSES (for_stmt
); *pc
; )
12299 switch (OMP_CLAUSE_CODE (*pc
))
12301 case OMP_CLAUSE_REDUCTION
:
12302 if (OMP_CLAUSE_REDUCTION_INSCAN (*pc
))
12304 error_at (OMP_CLAUSE_LOCATION (*pc
),
12305 "%<inscan%> %<reduction%> clause on "
12306 "%qs construct", "loop");
12307 OMP_CLAUSE_REDUCTION_INSCAN (*pc
) = 0;
12309 if (OMP_CLAUSE_REDUCTION_TASK (*pc
))
12311 error_at (OMP_CLAUSE_LOCATION (*pc
),
12312 "invalid %<task%> reduction modifier on construct "
12313 "other than %<parallel%>, %<for%> or %<sections%>");
12314 OMP_CLAUSE_REDUCTION_TASK (*pc
) = 0;
12316 pc
= &OMP_CLAUSE_CHAIN (*pc
);
12318 case OMP_CLAUSE_LASTPRIVATE
:
12319 for (i
= 0; i
< TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt
)); i
++)
12321 tree t
= TREE_VEC_ELT (OMP_FOR_INIT (for_stmt
), i
);
12322 gcc_assert (TREE_CODE (t
) == MODIFY_EXPR
);
12323 if (OMP_CLAUSE_DECL (*pc
) == TREE_OPERAND (t
, 0))
12325 if (OMP_FOR_ORIG_DECLS (for_stmt
)
12326 && TREE_CODE (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (for_stmt
),
12328 && TREE_PURPOSE (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (for_stmt
),
12331 tree orig
= TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (for_stmt
), i
);
12332 if (OMP_CLAUSE_DECL (*pc
) == TREE_PURPOSE (orig
))
12336 if (i
== TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt
)))
12338 error_at (OMP_CLAUSE_LOCATION (*pc
),
12339 "%<lastprivate%> clause on a %<loop%> construct refers "
12340 "to a variable %qD which is not the loop iterator",
12341 OMP_CLAUSE_DECL (*pc
));
12342 *pc
= OMP_CLAUSE_CHAIN (*pc
);
12345 pc
= &OMP_CLAUSE_CHAIN (*pc
);
12348 pc
= &OMP_CLAUSE_CHAIN (*pc
);
12352 TREE_SET_CODE (for_stmt
, OMP_SIMD
);
12357 case OMP_CLAUSE_BIND_THREAD
: last
= 0; break;
12358 case OMP_CLAUSE_BIND_PARALLEL
: last
= 1; break;
12359 case OMP_CLAUSE_BIND_TEAMS
: last
= 2; break;
12361 for (int pass
= 1; pass
<= last
; pass
++)
12365 tree bind
= build3 (BIND_EXPR
, void_type_node
, NULL
, NULL
, NULL
);
12366 append_to_statement_list (*expr_p
, &BIND_EXPR_BODY (bind
));
12367 *expr_p
= make_node (OMP_PARALLEL
);
12368 TREE_TYPE (*expr_p
) = void_type_node
;
12369 OMP_PARALLEL_BODY (*expr_p
) = bind
;
12370 OMP_PARALLEL_COMBINED (*expr_p
) = 1;
12371 SET_EXPR_LOCATION (*expr_p
, EXPR_LOCATION (for_stmt
));
12372 tree
*pc
= &OMP_PARALLEL_CLAUSES (*expr_p
);
12373 for (i
= 0; i
< TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt
)); i
++)
12374 if (OMP_FOR_ORIG_DECLS (for_stmt
)
12375 && (TREE_CODE (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (for_stmt
), i
))
12378 tree elt
= TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (for_stmt
), i
);
12379 if (TREE_PURPOSE (elt
) && TREE_VALUE (elt
))
12381 *pc
= build_omp_clause (UNKNOWN_LOCATION
,
12382 OMP_CLAUSE_FIRSTPRIVATE
);
12383 OMP_CLAUSE_DECL (*pc
) = TREE_VALUE (elt
);
12384 pc
= &OMP_CLAUSE_CHAIN (*pc
);
12388 tree t
= make_node (pass
== 2 ? OMP_DISTRIBUTE
: OMP_FOR
);
12389 tree
*pc
= &OMP_FOR_CLAUSES (t
);
12390 TREE_TYPE (t
) = void_type_node
;
12391 OMP_FOR_BODY (t
) = *expr_p
;
12392 SET_EXPR_LOCATION (t
, EXPR_LOCATION (for_stmt
));
12393 for (tree c
= OMP_FOR_CLAUSES (for_stmt
); c
; c
= OMP_CLAUSE_CHAIN (c
))
12394 switch (OMP_CLAUSE_CODE (c
))
12396 case OMP_CLAUSE_BIND
:
12397 case OMP_CLAUSE_ORDER
:
12398 case OMP_CLAUSE_COLLAPSE
:
12399 *pc
= copy_node (c
);
12400 pc
= &OMP_CLAUSE_CHAIN (*pc
);
12402 case OMP_CLAUSE_PRIVATE
:
12403 case OMP_CLAUSE_FIRSTPRIVATE
:
12404 /* Only needed on innermost. */
12406 case OMP_CLAUSE_LASTPRIVATE
:
12407 if (OMP_CLAUSE_LASTPRIVATE_LOOP_IV (c
) && pass
!= last
)
12409 *pc
= build_omp_clause (OMP_CLAUSE_LOCATION (c
),
12410 OMP_CLAUSE_FIRSTPRIVATE
);
12411 OMP_CLAUSE_DECL (*pc
) = OMP_CLAUSE_DECL (c
);
12412 lang_hooks
.decls
.omp_finish_clause (*pc
, NULL
);
12413 pc
= &OMP_CLAUSE_CHAIN (*pc
);
12415 *pc
= copy_node (c
);
12416 OMP_CLAUSE_LASTPRIVATE_STMT (*pc
) = NULL_TREE
;
12417 TREE_TYPE (*pc
) = unshare_expr (TREE_TYPE (c
));
12418 if (OMP_CLAUSE_LASTPRIVATE_LOOP_IV (c
))
12421 OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (*pc
) = 1;
12423 lang_hooks
.decls
.omp_finish_clause (*pc
, NULL
);
12424 OMP_CLAUSE_LASTPRIVATE_LOOP_IV (*pc
) = 0;
12426 pc
= &OMP_CLAUSE_CHAIN (*pc
);
12428 case OMP_CLAUSE_REDUCTION
:
12429 *pc
= copy_node (c
);
12430 OMP_CLAUSE_DECL (*pc
) = unshare_expr (OMP_CLAUSE_DECL (c
));
12431 TREE_TYPE (*pc
) = unshare_expr (TREE_TYPE (c
));
12432 OMP_CLAUSE_REDUCTION_INIT (*pc
)
12433 = unshare_expr (OMP_CLAUSE_REDUCTION_INIT (c
));
12434 OMP_CLAUSE_REDUCTION_MERGE (*pc
)
12435 = unshare_expr (OMP_CLAUSE_REDUCTION_MERGE (c
));
12436 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (*pc
))
12438 OMP_CLAUSE_REDUCTION_PLACEHOLDER (*pc
)
12439 = copy_node (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
));
12440 if (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (*pc
))
12441 OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (*pc
)
12442 = copy_node (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c
));
12444 tree data
[2] = { c
, nc
};
12445 walk_tree_without_duplicates (&OMP_CLAUSE_REDUCTION_INIT (nc
),
12446 replace_reduction_placeholders
,
12448 walk_tree_without_duplicates (&OMP_CLAUSE_REDUCTION_MERGE (nc
),
12449 replace_reduction_placeholders
,
12452 pc
= &OMP_CLAUSE_CHAIN (*pc
);
12455 gcc_unreachable ();
12460 return gimplify_omp_for (expr_p
, pre_p
);
12464 /* Helper function of optimize_target_teams, find OMP_TEAMS inside
12465 of OMP_TARGET's body. */
12468 find_omp_teams (tree
*tp
, int *walk_subtrees
, void *)
12470 *walk_subtrees
= 0;
12471 switch (TREE_CODE (*tp
))
12476 case STATEMENT_LIST
:
12477 *walk_subtrees
= 1;
12485 /* Helper function of optimize_target_teams, determine if the expression
12486 can be computed safely before the target construct on the host. */
12489 computable_teams_clause (tree
*tp
, int *walk_subtrees
, void *)
12495 *walk_subtrees
= 0;
12498 switch (TREE_CODE (*tp
))
12503 *walk_subtrees
= 0;
12504 if (error_operand_p (*tp
)
12505 || !INTEGRAL_TYPE_P (TREE_TYPE (*tp
))
12506 || DECL_HAS_VALUE_EXPR_P (*tp
)
12507 || DECL_THREAD_LOCAL_P (*tp
)
12508 || TREE_SIDE_EFFECTS (*tp
)
12509 || TREE_THIS_VOLATILE (*tp
))
12511 if (is_global_var (*tp
)
12512 && (lookup_attribute ("omp declare target", DECL_ATTRIBUTES (*tp
))
12513 || lookup_attribute ("omp declare target link",
12514 DECL_ATTRIBUTES (*tp
))))
12517 && !DECL_SEEN_IN_BIND_EXPR_P (*tp
)
12518 && !is_global_var (*tp
)
12519 && decl_function_context (*tp
) == current_function_decl
)
12521 n
= splay_tree_lookup (gimplify_omp_ctxp
->variables
,
12522 (splay_tree_key
) *tp
);
12525 if (gimplify_omp_ctxp
->defaultmap
[GDMK_SCALAR
] & GOVD_FIRSTPRIVATE
)
12529 else if (n
->value
& GOVD_LOCAL
)
12531 else if (n
->value
& GOVD_FIRSTPRIVATE
)
12533 else if ((n
->value
& (GOVD_MAP
| GOVD_MAP_ALWAYS_TO
))
12534 == (GOVD_MAP
| GOVD_MAP_ALWAYS_TO
))
12538 if (!INTEGRAL_TYPE_P (TREE_TYPE (*tp
)))
12542 if (TARGET_EXPR_INITIAL (*tp
)
12543 || TREE_CODE (TARGET_EXPR_SLOT (*tp
)) != VAR_DECL
)
12545 return computable_teams_clause (&TARGET_EXPR_SLOT (*tp
),
12546 walk_subtrees
, NULL
);
12547 /* Allow some reasonable subset of integral arithmetics. */
12551 case TRUNC_DIV_EXPR
:
12552 case CEIL_DIV_EXPR
:
12553 case FLOOR_DIV_EXPR
:
12554 case ROUND_DIV_EXPR
:
12555 case TRUNC_MOD_EXPR
:
12556 case CEIL_MOD_EXPR
:
12557 case FLOOR_MOD_EXPR
:
12558 case ROUND_MOD_EXPR
:
12560 case EXACT_DIV_EXPR
:
12571 case NON_LVALUE_EXPR
:
12573 if (!INTEGRAL_TYPE_P (TREE_TYPE (*tp
)))
12576 /* And disallow anything else, except for comparisons. */
12578 if (COMPARISON_CLASS_P (*tp
))
12584 /* Try to determine if the num_teams and/or thread_limit expressions
12585 can have their values determined already before entering the
12587 INTEGER_CSTs trivially are,
12588 integral decls that are firstprivate (explicitly or implicitly)
12589 or explicitly map(always, to:) or map(always, tofrom:) on the target
12590 region too, and expressions involving simple arithmetics on those
12591 too, function calls are not ok, dereferencing something neither etc.
12592 Add NUM_TEAMS and THREAD_LIMIT clauses to the OMP_CLAUSES of
12593 EXPR based on what we find:
12594 0 stands for clause not specified at all, use implementation default
12595 -1 stands for value that can't be determined easily before entering
12596 the target construct.
12597 If teams construct is not present at all, use 1 for num_teams
12598 and 0 for thread_limit (only one team is involved, and the thread
12599 limit is implementation defined. */
12602 optimize_target_teams (tree target
, gimple_seq
*pre_p
)
12604 tree body
= OMP_BODY (target
);
12605 tree teams
= walk_tree (&body
, find_omp_teams
, NULL
, NULL
);
12606 tree num_teams
= integer_zero_node
;
12607 tree thread_limit
= integer_zero_node
;
12608 location_t num_teams_loc
= EXPR_LOCATION (target
);
12609 location_t thread_limit_loc
= EXPR_LOCATION (target
);
12611 struct gimplify_omp_ctx
*target_ctx
= gimplify_omp_ctxp
;
12613 if (teams
== NULL_TREE
)
12614 num_teams
= integer_one_node
;
12616 for (c
= OMP_TEAMS_CLAUSES (teams
); c
; c
= OMP_CLAUSE_CHAIN (c
))
12618 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_NUM_TEAMS
)
12621 num_teams_loc
= OMP_CLAUSE_LOCATION (c
);
12623 else if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_THREAD_LIMIT
)
12626 thread_limit_loc
= OMP_CLAUSE_LOCATION (c
);
12630 expr
= OMP_CLAUSE_OPERAND (c
, 0);
12631 if (TREE_CODE (expr
) == INTEGER_CST
)
12636 if (walk_tree (&expr
, computable_teams_clause
, NULL
, NULL
))
12638 *p
= integer_minus_one_node
;
12642 gimplify_omp_ctxp
= gimplify_omp_ctxp
->outer_context
;
12643 if (gimplify_expr (p
, pre_p
, NULL
, is_gimple_val
, fb_rvalue
, false)
12646 gimplify_omp_ctxp
= target_ctx
;
12647 *p
= integer_minus_one_node
;
12650 gimplify_omp_ctxp
= target_ctx
;
12651 if (!DECL_P (expr
) && TREE_CODE (expr
) != TARGET_EXPR
)
12652 OMP_CLAUSE_OPERAND (c
, 0) = *p
;
12654 c
= build_omp_clause (thread_limit_loc
, OMP_CLAUSE_THREAD_LIMIT
);
12655 OMP_CLAUSE_THREAD_LIMIT_EXPR (c
) = thread_limit
;
12656 OMP_CLAUSE_CHAIN (c
) = OMP_TARGET_CLAUSES (target
);
12657 OMP_TARGET_CLAUSES (target
) = c
;
12658 c
= build_omp_clause (num_teams_loc
, OMP_CLAUSE_NUM_TEAMS
);
12659 OMP_CLAUSE_NUM_TEAMS_EXPR (c
) = num_teams
;
12660 OMP_CLAUSE_CHAIN (c
) = OMP_TARGET_CLAUSES (target
);
12661 OMP_TARGET_CLAUSES (target
) = c
;
12664 /* Gimplify the gross structure of several OMP constructs. */
12667 gimplify_omp_workshare (tree
*expr_p
, gimple_seq
*pre_p
)
12669 tree expr
= *expr_p
;
12671 gimple_seq body
= NULL
;
12672 enum omp_region_type ort
;
12674 switch (TREE_CODE (expr
))
12678 ort
= ORT_WORKSHARE
;
12681 ort
= OMP_TARGET_COMBINED (expr
) ? ORT_COMBINED_TARGET
: ORT_TARGET
;
12684 ort
= ORT_ACC_KERNELS
;
12686 case OACC_PARALLEL
:
12687 ort
= ORT_ACC_PARALLEL
;
12690 ort
= ORT_ACC_SERIAL
;
12693 ort
= ORT_ACC_DATA
;
12695 case OMP_TARGET_DATA
:
12696 ort
= ORT_TARGET_DATA
;
12699 ort
= OMP_TEAMS_COMBINED (expr
) ? ORT_COMBINED_TEAMS
: ORT_TEAMS
;
12700 if (gimplify_omp_ctxp
== NULL
12701 || gimplify_omp_ctxp
->region_type
== ORT_IMPLICIT_TARGET
)
12702 ort
= (enum omp_region_type
) (ort
| ORT_HOST_TEAMS
);
12704 case OACC_HOST_DATA
:
12705 ort
= ORT_ACC_HOST_DATA
;
12708 gcc_unreachable ();
12711 bool save_in_omp_construct
= in_omp_construct
;
12712 if ((ort
& ORT_ACC
) == 0)
12713 in_omp_construct
= false;
12714 gimplify_scan_omp_clauses (&OMP_CLAUSES (expr
), pre_p
, ort
,
12716 if (TREE_CODE (expr
) == OMP_TARGET
)
12717 optimize_target_teams (expr
, pre_p
);
12718 if ((ort
& (ORT_TARGET
| ORT_TARGET_DATA
)) != 0
12719 || (ort
& ORT_HOST_TEAMS
) == ORT_HOST_TEAMS
)
12721 push_gimplify_context ();
12722 gimple
*g
= gimplify_and_return_first (OMP_BODY (expr
), &body
);
12723 if (gimple_code (g
) == GIMPLE_BIND
)
12724 pop_gimplify_context (g
);
12726 pop_gimplify_context (NULL
);
12727 if ((ort
& ORT_TARGET_DATA
) != 0)
12729 enum built_in_function end_ix
;
12730 switch (TREE_CODE (expr
))
12733 case OACC_HOST_DATA
:
12734 end_ix
= BUILT_IN_GOACC_DATA_END
;
12736 case OMP_TARGET_DATA
:
12737 end_ix
= BUILT_IN_GOMP_TARGET_END_DATA
;
12740 gcc_unreachable ();
12742 tree fn
= builtin_decl_explicit (end_ix
);
12743 g
= gimple_build_call (fn
, 0);
12744 gimple_seq cleanup
= NULL
;
12745 gimple_seq_add_stmt (&cleanup
, g
);
12746 g
= gimple_build_try (body
, cleanup
, GIMPLE_TRY_FINALLY
);
12748 gimple_seq_add_stmt (&body
, g
);
12752 gimplify_and_add (OMP_BODY (expr
), &body
);
12753 gimplify_adjust_omp_clauses (pre_p
, body
, &OMP_CLAUSES (expr
),
12755 in_omp_construct
= save_in_omp_construct
;
12757 switch (TREE_CODE (expr
))
12760 stmt
= gimple_build_omp_target (body
, GF_OMP_TARGET_KIND_OACC_DATA
,
12761 OMP_CLAUSES (expr
));
12763 case OACC_HOST_DATA
:
12764 if (omp_find_clause (OMP_CLAUSES (expr
), OMP_CLAUSE_IF_PRESENT
))
12766 for (tree c
= OMP_CLAUSES (expr
); c
; c
= OMP_CLAUSE_CHAIN (c
))
12767 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_USE_DEVICE_PTR
)
12768 OMP_CLAUSE_USE_DEVICE_PTR_IF_PRESENT (c
) = 1;
12771 stmt
= gimple_build_omp_target (body
, GF_OMP_TARGET_KIND_OACC_HOST_DATA
,
12772 OMP_CLAUSES (expr
));
12775 stmt
= gimple_build_omp_target (body
, GF_OMP_TARGET_KIND_OACC_KERNELS
,
12776 OMP_CLAUSES (expr
));
12778 case OACC_PARALLEL
:
12779 stmt
= gimple_build_omp_target (body
, GF_OMP_TARGET_KIND_OACC_PARALLEL
,
12780 OMP_CLAUSES (expr
));
12783 stmt
= gimple_build_omp_target (body
, GF_OMP_TARGET_KIND_OACC_SERIAL
,
12784 OMP_CLAUSES (expr
));
12787 stmt
= gimple_build_omp_sections (body
, OMP_CLAUSES (expr
));
12790 stmt
= gimple_build_omp_single (body
, OMP_CLAUSES (expr
));
12793 stmt
= gimple_build_omp_target (body
, GF_OMP_TARGET_KIND_REGION
,
12794 OMP_CLAUSES (expr
));
12796 case OMP_TARGET_DATA
:
12797 /* Put use_device_{ptr,addr} clauses last, as map clauses are supposed
12798 to be evaluated before the use_device_{ptr,addr} clauses if they
12799 refer to the same variables. */
12801 tree use_device_clauses
;
12802 tree
*pc
, *uc
= &use_device_clauses
;
12803 for (pc
= &OMP_CLAUSES (expr
); *pc
; )
12804 if (OMP_CLAUSE_CODE (*pc
) == OMP_CLAUSE_USE_DEVICE_PTR
12805 || OMP_CLAUSE_CODE (*pc
) == OMP_CLAUSE_USE_DEVICE_ADDR
)
12808 *pc
= OMP_CLAUSE_CHAIN (*pc
);
12809 uc
= &OMP_CLAUSE_CHAIN (*uc
);
12812 pc
= &OMP_CLAUSE_CHAIN (*pc
);
12814 *pc
= use_device_clauses
;
12815 stmt
= gimple_build_omp_target (body
, GF_OMP_TARGET_KIND_DATA
,
12816 OMP_CLAUSES (expr
));
12820 stmt
= gimple_build_omp_teams (body
, OMP_CLAUSES (expr
));
12821 if ((ort
& ORT_HOST_TEAMS
) == ORT_HOST_TEAMS
)
12822 gimple_omp_teams_set_host (as_a
<gomp_teams
*> (stmt
), true);
12825 gcc_unreachable ();
12828 gimplify_seq_add_stmt (pre_p
, stmt
);
12829 *expr_p
= NULL_TREE
;
12832 /* Gimplify the gross structure of OpenACC enter/exit data, update, and OpenMP
12833 target update constructs. */
12836 gimplify_omp_target_update (tree
*expr_p
, gimple_seq
*pre_p
)
12838 tree expr
= *expr_p
;
12841 enum omp_region_type ort
= ORT_WORKSHARE
;
12843 switch (TREE_CODE (expr
))
12845 case OACC_ENTER_DATA
:
12846 case OACC_EXIT_DATA
:
12847 kind
= GF_OMP_TARGET_KIND_OACC_ENTER_EXIT_DATA
;
12851 kind
= GF_OMP_TARGET_KIND_OACC_UPDATE
;
12854 case OMP_TARGET_UPDATE
:
12855 kind
= GF_OMP_TARGET_KIND_UPDATE
;
12857 case OMP_TARGET_ENTER_DATA
:
12858 kind
= GF_OMP_TARGET_KIND_ENTER_DATA
;
12860 case OMP_TARGET_EXIT_DATA
:
12861 kind
= GF_OMP_TARGET_KIND_EXIT_DATA
;
12864 gcc_unreachable ();
12866 gimplify_scan_omp_clauses (&OMP_STANDALONE_CLAUSES (expr
), pre_p
,
12867 ort
, TREE_CODE (expr
));
12868 gimplify_adjust_omp_clauses (pre_p
, NULL
, &OMP_STANDALONE_CLAUSES (expr
),
12870 if (TREE_CODE (expr
) == OACC_UPDATE
12871 && omp_find_clause (OMP_STANDALONE_CLAUSES (expr
),
12872 OMP_CLAUSE_IF_PRESENT
))
12874 /* The runtime uses GOMP_MAP_{TO,FROM} to denote the if_present
12876 for (tree c
= OMP_STANDALONE_CLAUSES (expr
); c
; c
= OMP_CLAUSE_CHAIN (c
))
12877 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
)
12878 switch (OMP_CLAUSE_MAP_KIND (c
))
12880 case GOMP_MAP_FORCE_TO
:
12881 OMP_CLAUSE_SET_MAP_KIND (c
, GOMP_MAP_TO
);
12883 case GOMP_MAP_FORCE_FROM
:
12884 OMP_CLAUSE_SET_MAP_KIND (c
, GOMP_MAP_FROM
);
12890 else if (TREE_CODE (expr
) == OACC_EXIT_DATA
12891 && omp_find_clause (OMP_STANDALONE_CLAUSES (expr
),
12892 OMP_CLAUSE_FINALIZE
))
12894 /* Use GOMP_MAP_DELETE/GOMP_MAP_FORCE_FROM to denote "finalize"
12896 bool have_clause
= false;
12897 for (tree c
= OMP_STANDALONE_CLAUSES (expr
); c
; c
= OMP_CLAUSE_CHAIN (c
))
12898 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
)
12899 switch (OMP_CLAUSE_MAP_KIND (c
))
12901 case GOMP_MAP_FROM
:
12902 OMP_CLAUSE_SET_MAP_KIND (c
, GOMP_MAP_FORCE_FROM
);
12903 have_clause
= true;
12905 case GOMP_MAP_RELEASE
:
12906 OMP_CLAUSE_SET_MAP_KIND (c
, GOMP_MAP_DELETE
);
12907 have_clause
= true;
12909 case GOMP_MAP_POINTER
:
12910 case GOMP_MAP_TO_PSET
:
12911 /* TODO PR92929: we may see these here, but they'll always follow
12912 one of the clauses above, and will be handled by libgomp as
12913 one group, so no handling required here. */
12914 gcc_assert (have_clause
);
12916 case GOMP_MAP_DETACH
:
12917 OMP_CLAUSE_SET_MAP_KIND (c
, GOMP_MAP_FORCE_DETACH
);
12918 have_clause
= false;
12920 case GOMP_MAP_STRUCT
:
12921 have_clause
= false;
12924 gcc_unreachable ();
12927 stmt
= gimple_build_omp_target (NULL
, kind
, OMP_STANDALONE_CLAUSES (expr
));
12929 gimplify_seq_add_stmt (pre_p
, stmt
);
12930 *expr_p
= NULL_TREE
;
12933 /* A subroutine of gimplify_omp_atomic. The front end is supposed to have
12934 stabilized the lhs of the atomic operation as *ADDR. Return true if
12935 EXPR is this stabilized form. */
12938 goa_lhs_expr_p (tree expr
, tree addr
)
12940 /* Also include casts to other type variants. The C front end is fond
12941 of adding these for e.g. volatile variables. This is like
12942 STRIP_TYPE_NOPS but includes the main variant lookup. */
12943 STRIP_USELESS_TYPE_CONVERSION (expr
);
12945 if (TREE_CODE (expr
) == INDIRECT_REF
)
12947 expr
= TREE_OPERAND (expr
, 0);
12948 while (expr
!= addr
12949 && (CONVERT_EXPR_P (expr
)
12950 || TREE_CODE (expr
) == NON_LVALUE_EXPR
)
12951 && TREE_CODE (expr
) == TREE_CODE (addr
)
12952 && types_compatible_p (TREE_TYPE (expr
), TREE_TYPE (addr
)))
12954 expr
= TREE_OPERAND (expr
, 0);
12955 addr
= TREE_OPERAND (addr
, 0);
12959 return (TREE_CODE (addr
) == ADDR_EXPR
12960 && TREE_CODE (expr
) == ADDR_EXPR
12961 && TREE_OPERAND (addr
, 0) == TREE_OPERAND (expr
, 0));
12963 if (TREE_CODE (addr
) == ADDR_EXPR
&& expr
== TREE_OPERAND (addr
, 0))
12968 /* Walk *EXPR_P and replace appearances of *LHS_ADDR with LHS_VAR. If an
12969 expression does not involve the lhs, evaluate it into a temporary.
12970 Return 1 if the lhs appeared as a subexpression, 0 if it did not,
12971 or -1 if an error was encountered. */
12974 goa_stabilize_expr (tree
*expr_p
, gimple_seq
*pre_p
, tree lhs_addr
,
12977 tree expr
= *expr_p
;
12980 if (goa_lhs_expr_p (expr
, lhs_addr
))
12985 if (is_gimple_val (expr
))
12989 switch (TREE_CODE_CLASS (TREE_CODE (expr
)))
12992 case tcc_comparison
:
12993 saw_lhs
|= goa_stabilize_expr (&TREE_OPERAND (expr
, 1), pre_p
, lhs_addr
,
12997 saw_lhs
|= goa_stabilize_expr (&TREE_OPERAND (expr
, 0), pre_p
, lhs_addr
,
13000 case tcc_expression
:
13001 switch (TREE_CODE (expr
))
13003 case TRUTH_ANDIF_EXPR
:
13004 case TRUTH_ORIF_EXPR
:
13005 case TRUTH_AND_EXPR
:
13006 case TRUTH_OR_EXPR
:
13007 case TRUTH_XOR_EXPR
:
13008 case BIT_INSERT_EXPR
:
13009 saw_lhs
|= goa_stabilize_expr (&TREE_OPERAND (expr
, 1), pre_p
,
13010 lhs_addr
, lhs_var
);
13012 case TRUTH_NOT_EXPR
:
13013 saw_lhs
|= goa_stabilize_expr (&TREE_OPERAND (expr
, 0), pre_p
,
13014 lhs_addr
, lhs_var
);
13016 case COMPOUND_EXPR
:
13017 /* Break out any preevaluations from cp_build_modify_expr. */
13018 for (; TREE_CODE (expr
) == COMPOUND_EXPR
;
13019 expr
= TREE_OPERAND (expr
, 1))
13020 gimplify_stmt (&TREE_OPERAND (expr
, 0), pre_p
);
13022 return goa_stabilize_expr (expr_p
, pre_p
, lhs_addr
, lhs_var
);
13027 case tcc_reference
:
13028 if (TREE_CODE (expr
) == BIT_FIELD_REF
)
13029 saw_lhs
|= goa_stabilize_expr (&TREE_OPERAND (expr
, 0), pre_p
,
13030 lhs_addr
, lhs_var
);
13038 enum gimplify_status gs
;
13039 gs
= gimplify_expr (expr_p
, pre_p
, NULL
, is_gimple_val
, fb_rvalue
);
13040 if (gs
!= GS_ALL_DONE
)
13047 /* Gimplify an OMP_ATOMIC statement. */
13049 static enum gimplify_status
13050 gimplify_omp_atomic (tree
*expr_p
, gimple_seq
*pre_p
)
13052 tree addr
= TREE_OPERAND (*expr_p
, 0);
13053 tree rhs
= TREE_CODE (*expr_p
) == OMP_ATOMIC_READ
13054 ? NULL
: TREE_OPERAND (*expr_p
, 1);
13055 tree type
= TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (addr
)));
13057 gomp_atomic_load
*loadstmt
;
13058 gomp_atomic_store
*storestmt
;
13060 tmp_load
= create_tmp_reg (type
);
13061 if (rhs
&& goa_stabilize_expr (&rhs
, pre_p
, addr
, tmp_load
) < 0)
13064 if (gimplify_expr (&addr
, pre_p
, NULL
, is_gimple_val
, fb_rvalue
)
13068 loadstmt
= gimple_build_omp_atomic_load (tmp_load
, addr
,
13069 OMP_ATOMIC_MEMORY_ORDER (*expr_p
));
13070 gimplify_seq_add_stmt (pre_p
, loadstmt
);
13073 /* BIT_INSERT_EXPR is not valid for non-integral bitfield
13074 representatives. Use BIT_FIELD_REF on the lhs instead. */
13075 if (TREE_CODE (rhs
) == BIT_INSERT_EXPR
13076 && !INTEGRAL_TYPE_P (TREE_TYPE (tmp_load
)))
13078 tree bitpos
= TREE_OPERAND (rhs
, 2);
13079 tree op1
= TREE_OPERAND (rhs
, 1);
13081 tree tmp_store
= tmp_load
;
13082 if (TREE_CODE (*expr_p
) == OMP_ATOMIC_CAPTURE_OLD
)
13083 tmp_store
= get_initialized_tmp_var (tmp_load
, pre_p
);
13084 if (INTEGRAL_TYPE_P (TREE_TYPE (op1
)))
13085 bitsize
= bitsize_int (TYPE_PRECISION (TREE_TYPE (op1
)));
13087 bitsize
= TYPE_SIZE (TREE_TYPE (op1
));
13088 gcc_assert (TREE_OPERAND (rhs
, 0) == tmp_load
);
13089 tree t
= build2_loc (EXPR_LOCATION (rhs
),
13090 MODIFY_EXPR
, void_type_node
,
13091 build3_loc (EXPR_LOCATION (rhs
), BIT_FIELD_REF
,
13092 TREE_TYPE (op1
), tmp_store
, bitsize
,
13094 gimplify_and_add (t
, pre_p
);
13097 if (gimplify_expr (&rhs
, pre_p
, NULL
, is_gimple_val
, fb_rvalue
)
13102 if (TREE_CODE (*expr_p
) == OMP_ATOMIC_READ
)
13105 = gimple_build_omp_atomic_store (rhs
, OMP_ATOMIC_MEMORY_ORDER (*expr_p
));
13106 gimplify_seq_add_stmt (pre_p
, storestmt
);
13107 switch (TREE_CODE (*expr_p
))
13109 case OMP_ATOMIC_READ
:
13110 case OMP_ATOMIC_CAPTURE_OLD
:
13111 *expr_p
= tmp_load
;
13112 gimple_omp_atomic_set_need_value (loadstmt
);
13114 case OMP_ATOMIC_CAPTURE_NEW
:
13116 gimple_omp_atomic_set_need_value (storestmt
);
13123 return GS_ALL_DONE
;
13126 /* Gimplify a TRANSACTION_EXPR. This involves gimplification of the
13127 body, and adding some EH bits. */
13129 static enum gimplify_status
13130 gimplify_transaction (tree
*expr_p
, gimple_seq
*pre_p
)
13132 tree expr
= *expr_p
, temp
, tbody
= TRANSACTION_EXPR_BODY (expr
);
13134 gtransaction
*trans_stmt
;
13135 gimple_seq body
= NULL
;
13138 /* Wrap the transaction body in a BIND_EXPR so we have a context
13139 where to put decls for OMP. */
13140 if (TREE_CODE (tbody
) != BIND_EXPR
)
13142 tree bind
= build3 (BIND_EXPR
, void_type_node
, NULL
, tbody
, NULL
);
13143 TREE_SIDE_EFFECTS (bind
) = 1;
13144 SET_EXPR_LOCATION (bind
, EXPR_LOCATION (tbody
));
13145 TRANSACTION_EXPR_BODY (expr
) = bind
;
13148 push_gimplify_context ();
13149 temp
= voidify_wrapper_expr (*expr_p
, NULL
);
13151 body_stmt
= gimplify_and_return_first (TRANSACTION_EXPR_BODY (expr
), &body
);
13152 pop_gimplify_context (body_stmt
);
13154 trans_stmt
= gimple_build_transaction (body
);
13155 if (TRANSACTION_EXPR_OUTER (expr
))
13156 subcode
= GTMA_IS_OUTER
;
13157 else if (TRANSACTION_EXPR_RELAXED (expr
))
13158 subcode
= GTMA_IS_RELAXED
;
13159 gimple_transaction_set_subcode (trans_stmt
, subcode
);
13161 gimplify_seq_add_stmt (pre_p
, trans_stmt
);
13169 *expr_p
= NULL_TREE
;
13170 return GS_ALL_DONE
;
13173 /* Gimplify an OMP_ORDERED construct. EXPR is the tree version. BODY
13174 is the OMP_BODY of the original EXPR (which has already been
13175 gimplified so it's not present in the EXPR).
13177 Return the gimplified GIMPLE_OMP_ORDERED tuple. */
13180 gimplify_omp_ordered (tree expr
, gimple_seq body
)
13185 tree source_c
= NULL_TREE
;
13186 tree sink_c
= NULL_TREE
;
13188 if (gimplify_omp_ctxp
)
13190 for (c
= OMP_ORDERED_CLAUSES (expr
); c
; c
= OMP_CLAUSE_CHAIN (c
))
13191 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_DEPEND
13192 && gimplify_omp_ctxp
->loop_iter_var
.is_empty ()
13193 && (OMP_CLAUSE_DEPEND_KIND (c
) == OMP_CLAUSE_DEPEND_SINK
13194 || OMP_CLAUSE_DEPEND_KIND (c
) == OMP_CLAUSE_DEPEND_SOURCE
))
13196 error_at (OMP_CLAUSE_LOCATION (c
),
13197 "%<ordered%> construct with %<depend%> clause must be "
13198 "closely nested inside a loop with %<ordered%> clause "
13199 "with a parameter");
13202 else if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_DEPEND
13203 && OMP_CLAUSE_DEPEND_KIND (c
) == OMP_CLAUSE_DEPEND_SINK
)
13206 for (decls
= OMP_CLAUSE_DECL (c
), i
= 0;
13207 decls
&& TREE_CODE (decls
) == TREE_LIST
;
13208 decls
= TREE_CHAIN (decls
), ++i
)
13209 if (i
>= gimplify_omp_ctxp
->loop_iter_var
.length () / 2)
13211 else if (TREE_VALUE (decls
)
13212 != gimplify_omp_ctxp
->loop_iter_var
[2 * i
])
13214 error_at (OMP_CLAUSE_LOCATION (c
),
13215 "variable %qE is not an iteration "
13216 "of outermost loop %d, expected %qE",
13217 TREE_VALUE (decls
), i
+ 1,
13218 gimplify_omp_ctxp
->loop_iter_var
[2 * i
]);
13224 = gimplify_omp_ctxp
->loop_iter_var
[2 * i
+ 1];
13225 if (!fail
&& i
!= gimplify_omp_ctxp
->loop_iter_var
.length () / 2)
13227 error_at (OMP_CLAUSE_LOCATION (c
),
13228 "number of variables in %<depend%> clause with "
13229 "%<sink%> modifier does not match number of "
13230 "iteration variables");
13235 else if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_DEPEND
13236 && OMP_CLAUSE_DEPEND_KIND (c
) == OMP_CLAUSE_DEPEND_SOURCE
)
13240 error_at (OMP_CLAUSE_LOCATION (c
),
13241 "more than one %<depend%> clause with %<source%> "
13242 "modifier on an %<ordered%> construct");
13249 if (source_c
&& sink_c
)
13251 error_at (OMP_CLAUSE_LOCATION (source_c
),
13252 "%<depend%> clause with %<source%> modifier specified "
13253 "together with %<depend%> clauses with %<sink%> modifier "
13254 "on the same construct");
13259 return gimple_build_nop ();
13260 return gimple_build_omp_ordered (body
, OMP_ORDERED_CLAUSES (expr
));
13263 /* Convert the GENERIC expression tree *EXPR_P to GIMPLE. If the
13264 expression produces a value to be used as an operand inside a GIMPLE
13265 statement, the value will be stored back in *EXPR_P. This value will
13266 be a tree of class tcc_declaration, tcc_constant, tcc_reference or
13267 an SSA_NAME. The corresponding sequence of GIMPLE statements is
13268 emitted in PRE_P and POST_P.
13270 Additionally, this process may overwrite parts of the input
13271 expression during gimplification. Ideally, it should be
13272 possible to do non-destructive gimplification.
13274 EXPR_P points to the GENERIC expression to convert to GIMPLE. If
13275 the expression needs to evaluate to a value to be used as
13276 an operand in a GIMPLE statement, this value will be stored in
13277 *EXPR_P on exit. This happens when the caller specifies one
13278 of fb_lvalue or fb_rvalue fallback flags.
13280 PRE_P will contain the sequence of GIMPLE statements corresponding
13281 to the evaluation of EXPR and all the side-effects that must
13282 be executed before the main expression. On exit, the last
13283 statement of PRE_P is the core statement being gimplified. For
13284 instance, when gimplifying 'if (++a)' the last statement in
13285 PRE_P will be 'if (t.1)' where t.1 is the result of
13286 pre-incrementing 'a'.
13288 POST_P will contain the sequence of GIMPLE statements corresponding
13289 to the evaluation of all the side-effects that must be executed
13290 after the main expression. If this is NULL, the post
13291 side-effects are stored at the end of PRE_P.
13293 The reason why the output is split in two is to handle post
13294 side-effects explicitly. In some cases, an expression may have
13295 inner and outer post side-effects which need to be emitted in
13296 an order different from the one given by the recursive
13297 traversal. For instance, for the expression (*p--)++ the post
13298 side-effects of '--' must actually occur *after* the post
13299 side-effects of '++'. However, gimplification will first visit
13300 the inner expression, so if a separate POST sequence was not
13301 used, the resulting sequence would be:
13308 However, the post-decrement operation in line #2 must not be
13309 evaluated until after the store to *p at line #4, so the
13310 correct sequence should be:
13317 So, by specifying a separate post queue, it is possible
13318 to emit the post side-effects in the correct order.
13319 If POST_P is NULL, an internal queue will be used. Before
13320 returning to the caller, the sequence POST_P is appended to
13321 the main output sequence PRE_P.
13323 GIMPLE_TEST_F points to a function that takes a tree T and
13324 returns nonzero if T is in the GIMPLE form requested by the
13325 caller. The GIMPLE predicates are in gimple.c.
13327 FALLBACK tells the function what sort of a temporary we want if
13328 gimplification cannot produce an expression that complies with
13331 fb_none means that no temporary should be generated
13332 fb_rvalue means that an rvalue is OK to generate
13333 fb_lvalue means that an lvalue is OK to generate
13334 fb_either means that either is OK, but an lvalue is preferable.
13335 fb_mayfail means that gimplification may fail (in which case
13336 GS_ERROR will be returned)
13338 The return value is either GS_ERROR or GS_ALL_DONE, since this
13339 function iterates until EXPR is completely gimplified or an error
13342 enum gimplify_status
13343 gimplify_expr (tree
*expr_p
, gimple_seq
*pre_p
, gimple_seq
*post_p
,
13344 bool (*gimple_test_f
) (tree
), fallback_t fallback
)
13347 gimple_seq internal_pre
= NULL
;
13348 gimple_seq internal_post
= NULL
;
13351 location_t saved_location
;
13352 enum gimplify_status ret
;
13353 gimple_stmt_iterator pre_last_gsi
, post_last_gsi
;
13356 save_expr
= *expr_p
;
13357 if (save_expr
== NULL_TREE
)
13358 return GS_ALL_DONE
;
13360 /* If we are gimplifying a top-level statement, PRE_P must be valid. */
13361 is_statement
= gimple_test_f
== is_gimple_stmt
;
13363 gcc_assert (pre_p
);
13365 /* Consistency checks. */
13366 if (gimple_test_f
== is_gimple_reg
)
13367 gcc_assert (fallback
& (fb_rvalue
| fb_lvalue
));
13368 else if (gimple_test_f
== is_gimple_val
13369 || gimple_test_f
== is_gimple_call_addr
13370 || gimple_test_f
== is_gimple_condexpr
13371 || gimple_test_f
== is_gimple_condexpr_for_cond
13372 || gimple_test_f
== is_gimple_mem_rhs
13373 || gimple_test_f
== is_gimple_mem_rhs_or_call
13374 || gimple_test_f
== is_gimple_reg_rhs
13375 || gimple_test_f
== is_gimple_reg_rhs_or_call
13376 || gimple_test_f
== is_gimple_asm_val
13377 || gimple_test_f
== is_gimple_mem_ref_addr
)
13378 gcc_assert (fallback
& fb_rvalue
);
13379 else if (gimple_test_f
== is_gimple_min_lval
13380 || gimple_test_f
== is_gimple_lvalue
)
13381 gcc_assert (fallback
& fb_lvalue
);
13382 else if (gimple_test_f
== is_gimple_addressable
)
13383 gcc_assert (fallback
& fb_either
);
13384 else if (gimple_test_f
== is_gimple_stmt
)
13385 gcc_assert (fallback
== fb_none
);
13388 /* We should have recognized the GIMPLE_TEST_F predicate to
13389 know what kind of fallback to use in case a temporary is
13390 needed to hold the value or address of *EXPR_P. */
13391 gcc_unreachable ();
13394 /* We used to check the predicate here and return immediately if it
13395 succeeds. This is wrong; the design is for gimplification to be
13396 idempotent, and for the predicates to only test for valid forms, not
13397 whether they are fully simplified. */
13399 pre_p
= &internal_pre
;
13401 if (post_p
== NULL
)
13402 post_p
= &internal_post
;
13404 /* Remember the last statements added to PRE_P and POST_P. Every
13405 new statement added by the gimplification helpers needs to be
13406 annotated with location information. To centralize the
13407 responsibility, we remember the last statement that had been
13408 added to both queues before gimplifying *EXPR_P. If
13409 gimplification produces new statements in PRE_P and POST_P, those
13410 statements will be annotated with the same location information
13412 pre_last_gsi
= gsi_last (*pre_p
);
13413 post_last_gsi
= gsi_last (*post_p
);
13415 saved_location
= input_location
;
13416 if (save_expr
!= error_mark_node
13417 && EXPR_HAS_LOCATION (*expr_p
))
13418 input_location
= EXPR_LOCATION (*expr_p
);
13420 /* Loop over the specific gimplifiers until the toplevel node
13421 remains the same. */
13424 /* Strip away as many useless type conversions as possible
13425 at the toplevel. */
13426 STRIP_USELESS_TYPE_CONVERSION (*expr_p
);
13428 /* Remember the expr. */
13429 save_expr
= *expr_p
;
13431 /* Die, die, die, my darling. */
13432 if (error_operand_p (save_expr
))
13438 /* Do any language-specific gimplification. */
13439 ret
= ((enum gimplify_status
)
13440 lang_hooks
.gimplify_expr (expr_p
, pre_p
, post_p
));
13443 if (*expr_p
== NULL_TREE
)
13445 if (*expr_p
!= save_expr
)
13448 else if (ret
!= GS_UNHANDLED
)
13451 /* Make sure that all the cases set 'ret' appropriately. */
13452 ret
= GS_UNHANDLED
;
13453 switch (TREE_CODE (*expr_p
))
13455 /* First deal with the special cases. */
13457 case POSTINCREMENT_EXPR
:
13458 case POSTDECREMENT_EXPR
:
13459 case PREINCREMENT_EXPR
:
13460 case PREDECREMENT_EXPR
:
13461 ret
= gimplify_self_mod_expr (expr_p
, pre_p
, post_p
,
13462 fallback
!= fb_none
,
13463 TREE_TYPE (*expr_p
));
13466 case VIEW_CONVERT_EXPR
:
13467 if ((fallback
& fb_rvalue
)
13468 && is_gimple_reg_type (TREE_TYPE (*expr_p
))
13469 && is_gimple_reg_type (TREE_TYPE (TREE_OPERAND (*expr_p
, 0))))
13471 ret
= gimplify_expr (&TREE_OPERAND (*expr_p
, 0), pre_p
,
13472 post_p
, is_gimple_val
, fb_rvalue
);
13473 recalculate_side_effects (*expr_p
);
13479 case ARRAY_RANGE_REF
:
13480 case REALPART_EXPR
:
13481 case IMAGPART_EXPR
:
13482 case COMPONENT_REF
:
13483 ret
= gimplify_compound_lval (expr_p
, pre_p
, post_p
,
13484 fallback
? fallback
: fb_rvalue
);
13488 ret
= gimplify_cond_expr (expr_p
, pre_p
, fallback
);
13490 /* C99 code may assign to an array in a structure value of a
13491 conditional expression, and this has undefined behavior
13492 only on execution, so create a temporary if an lvalue is
13494 if (fallback
== fb_lvalue
)
13496 *expr_p
= get_initialized_tmp_var (*expr_p
, pre_p
, post_p
, false);
13497 mark_addressable (*expr_p
);
13503 ret
= gimplify_call_expr (expr_p
, pre_p
, fallback
!= fb_none
);
13505 /* C99 code may assign to an array in a structure returned
13506 from a function, and this has undefined behavior only on
13507 execution, so create a temporary if an lvalue is
13509 if (fallback
== fb_lvalue
)
13511 *expr_p
= get_initialized_tmp_var (*expr_p
, pre_p
, post_p
, false);
13512 mark_addressable (*expr_p
);
13518 gcc_unreachable ();
13520 case COMPOUND_EXPR
:
13521 ret
= gimplify_compound_expr (expr_p
, pre_p
, fallback
!= fb_none
);
13524 case COMPOUND_LITERAL_EXPR
:
13525 ret
= gimplify_compound_literal_expr (expr_p
, pre_p
,
13526 gimple_test_f
, fallback
);
13531 ret
= gimplify_modify_expr (expr_p
, pre_p
, post_p
,
13532 fallback
!= fb_none
);
13535 case TRUTH_ANDIF_EXPR
:
13536 case TRUTH_ORIF_EXPR
:
13538 /* Preserve the original type of the expression and the
13539 source location of the outer expression. */
13540 tree org_type
= TREE_TYPE (*expr_p
);
13541 *expr_p
= gimple_boolify (*expr_p
);
13542 *expr_p
= build3_loc (input_location
, COND_EXPR
,
13546 org_type
, boolean_true_node
),
13549 org_type
, boolean_false_node
));
13554 case TRUTH_NOT_EXPR
:
13556 tree type
= TREE_TYPE (*expr_p
);
13557 /* The parsers are careful to generate TRUTH_NOT_EXPR
13558 only with operands that are always zero or one.
13559 We do not fold here but handle the only interesting case
13560 manually, as fold may re-introduce the TRUTH_NOT_EXPR. */
13561 *expr_p
= gimple_boolify (*expr_p
);
13562 if (TYPE_PRECISION (TREE_TYPE (*expr_p
)) == 1)
13563 *expr_p
= build1_loc (input_location
, BIT_NOT_EXPR
,
13564 TREE_TYPE (*expr_p
),
13565 TREE_OPERAND (*expr_p
, 0));
13567 *expr_p
= build2_loc (input_location
, BIT_XOR_EXPR
,
13568 TREE_TYPE (*expr_p
),
13569 TREE_OPERAND (*expr_p
, 0),
13570 build_int_cst (TREE_TYPE (*expr_p
), 1));
13571 if (!useless_type_conversion_p (type
, TREE_TYPE (*expr_p
)))
13572 *expr_p
= fold_convert_loc (input_location
, type
, *expr_p
);
13578 ret
= gimplify_addr_expr (expr_p
, pre_p
, post_p
);
13581 case ANNOTATE_EXPR
:
13583 tree cond
= TREE_OPERAND (*expr_p
, 0);
13584 tree kind
= TREE_OPERAND (*expr_p
, 1);
13585 tree data
= TREE_OPERAND (*expr_p
, 2);
13586 tree type
= TREE_TYPE (cond
);
13587 if (!INTEGRAL_TYPE_P (type
))
13593 tree tmp
= create_tmp_var (type
);
13594 gimplify_arg (&cond
, pre_p
, EXPR_LOCATION (*expr_p
));
13596 = gimple_build_call_internal (IFN_ANNOTATE
, 3, cond
, kind
, data
);
13597 gimple_call_set_lhs (call
, tmp
);
13598 gimplify_seq_add_stmt (pre_p
, call
);
13605 ret
= gimplify_va_arg_expr (expr_p
, pre_p
, post_p
);
13609 if (IS_EMPTY_STMT (*expr_p
))
13615 if (VOID_TYPE_P (TREE_TYPE (*expr_p
))
13616 || fallback
== fb_none
)
13618 /* Just strip a conversion to void (or in void context) and
13620 *expr_p
= TREE_OPERAND (*expr_p
, 0);
13625 ret
= gimplify_conversion (expr_p
);
13626 if (ret
== GS_ERROR
)
13628 if (*expr_p
!= save_expr
)
13632 case FIX_TRUNC_EXPR
:
13633 /* unary_expr: ... | '(' cast ')' val | ... */
13634 ret
= gimplify_expr (&TREE_OPERAND (*expr_p
, 0), pre_p
, post_p
,
13635 is_gimple_val
, fb_rvalue
);
13636 recalculate_side_effects (*expr_p
);
13641 bool volatilep
= TREE_THIS_VOLATILE (*expr_p
);
13642 bool notrap
= TREE_THIS_NOTRAP (*expr_p
);
13643 tree saved_ptr_type
= TREE_TYPE (TREE_OPERAND (*expr_p
, 0));
13645 *expr_p
= fold_indirect_ref_loc (input_location
, *expr_p
);
13646 if (*expr_p
!= save_expr
)
13652 ret
= gimplify_expr (&TREE_OPERAND (*expr_p
, 0), pre_p
, post_p
,
13653 is_gimple_reg
, fb_rvalue
);
13654 if (ret
== GS_ERROR
)
13657 recalculate_side_effects (*expr_p
);
13658 *expr_p
= fold_build2_loc (input_location
, MEM_REF
,
13659 TREE_TYPE (*expr_p
),
13660 TREE_OPERAND (*expr_p
, 0),
13661 build_int_cst (saved_ptr_type
, 0));
13662 TREE_THIS_VOLATILE (*expr_p
) = volatilep
;
13663 TREE_THIS_NOTRAP (*expr_p
) = notrap
;
13668 /* We arrive here through the various re-gimplifcation paths. */
13670 /* First try re-folding the whole thing. */
13671 tmp
= fold_binary (MEM_REF
, TREE_TYPE (*expr_p
),
13672 TREE_OPERAND (*expr_p
, 0),
13673 TREE_OPERAND (*expr_p
, 1));
13676 REF_REVERSE_STORAGE_ORDER (tmp
)
13677 = REF_REVERSE_STORAGE_ORDER (*expr_p
);
13679 recalculate_side_effects (*expr_p
);
13683 /* Avoid re-gimplifying the address operand if it is already
13684 in suitable form. Re-gimplifying would mark the address
13685 operand addressable. Always gimplify when not in SSA form
13686 as we still may have to gimplify decls with value-exprs. */
13687 if (!gimplify_ctxp
|| !gimple_in_ssa_p (cfun
)
13688 || !is_gimple_mem_ref_addr (TREE_OPERAND (*expr_p
, 0)))
13690 ret
= gimplify_expr (&TREE_OPERAND (*expr_p
, 0), pre_p
, post_p
,
13691 is_gimple_mem_ref_addr
, fb_rvalue
);
13692 if (ret
== GS_ERROR
)
13695 recalculate_side_effects (*expr_p
);
13699 /* Constants need not be gimplified. */
13706 /* Drop the overflow flag on constants, we do not want
13707 that in the GIMPLE IL. */
13708 if (TREE_OVERFLOW_P (*expr_p
))
13709 *expr_p
= drop_tree_overflow (*expr_p
);
13714 /* If we require an lvalue, such as for ADDR_EXPR, retain the
13715 CONST_DECL node. Otherwise the decl is replaceable by its
13717 /* ??? Should be == fb_lvalue, but ADDR_EXPR passes fb_either. */
13718 if (fallback
& fb_lvalue
)
13722 *expr_p
= DECL_INITIAL (*expr_p
);
13728 ret
= gimplify_decl_expr (expr_p
, pre_p
);
13732 ret
= gimplify_bind_expr (expr_p
, pre_p
);
13736 ret
= gimplify_loop_expr (expr_p
, pre_p
);
13740 ret
= gimplify_switch_expr (expr_p
, pre_p
);
13744 ret
= gimplify_exit_expr (expr_p
);
13748 /* If the target is not LABEL, then it is a computed jump
13749 and the target needs to be gimplified. */
13750 if (TREE_CODE (GOTO_DESTINATION (*expr_p
)) != LABEL_DECL
)
13752 ret
= gimplify_expr (&GOTO_DESTINATION (*expr_p
), pre_p
,
13753 NULL
, is_gimple_val
, fb_rvalue
);
13754 if (ret
== GS_ERROR
)
13757 gimplify_seq_add_stmt (pre_p
,
13758 gimple_build_goto (GOTO_DESTINATION (*expr_p
)));
13763 gimplify_seq_add_stmt (pre_p
,
13764 gimple_build_predict (PREDICT_EXPR_PREDICTOR (*expr_p
),
13765 PREDICT_EXPR_OUTCOME (*expr_p
)));
13770 ret
= gimplify_label_expr (expr_p
, pre_p
);
13771 label
= LABEL_EXPR_LABEL (*expr_p
);
13772 gcc_assert (decl_function_context (label
) == current_function_decl
);
13774 /* If the label is used in a goto statement, or address of the label
13775 is taken, we need to unpoison all variables that were seen so far.
13776 Doing so would prevent us from reporting a false positives. */
13777 if (asan_poisoned_variables
13778 && asan_used_labels
!= NULL
13779 && asan_used_labels
->contains (label
))
13780 asan_poison_variables (asan_poisoned_variables
, false, pre_p
);
13783 case CASE_LABEL_EXPR
:
13784 ret
= gimplify_case_label_expr (expr_p
, pre_p
);
13786 if (gimplify_ctxp
->live_switch_vars
)
13787 asan_poison_variables (gimplify_ctxp
->live_switch_vars
, false,
13792 ret
= gimplify_return_expr (*expr_p
, pre_p
);
13796 /* Don't reduce this in place; let gimplify_init_constructor work its
13797 magic. Buf if we're just elaborating this for side effects, just
13798 gimplify any element that has side-effects. */
13799 if (fallback
== fb_none
)
13801 unsigned HOST_WIDE_INT ix
;
13803 tree temp
= NULL_TREE
;
13804 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (*expr_p
), ix
, val
)
13805 if (TREE_SIDE_EFFECTS (val
))
13806 append_to_statement_list (val
, &temp
);
13809 ret
= temp
? GS_OK
: GS_ALL_DONE
;
13811 /* C99 code may assign to an array in a constructed
13812 structure or union, and this has undefined behavior only
13813 on execution, so create a temporary if an lvalue is
13815 else if (fallback
== fb_lvalue
)
13817 *expr_p
= get_initialized_tmp_var (*expr_p
, pre_p
, post_p
, false);
13818 mark_addressable (*expr_p
);
13825 /* The following are special cases that are not handled by the
13826 original GIMPLE grammar. */
13828 /* SAVE_EXPR nodes are converted into a GIMPLE identifier and
13831 ret
= gimplify_save_expr (expr_p
, pre_p
, post_p
);
13834 case BIT_FIELD_REF
:
13835 ret
= gimplify_expr (&TREE_OPERAND (*expr_p
, 0), pre_p
,
13836 post_p
, is_gimple_lvalue
, fb_either
);
13837 recalculate_side_effects (*expr_p
);
13840 case TARGET_MEM_REF
:
13842 enum gimplify_status r0
= GS_ALL_DONE
, r1
= GS_ALL_DONE
;
13844 if (TMR_BASE (*expr_p
))
13845 r0
= gimplify_expr (&TMR_BASE (*expr_p
), pre_p
,
13846 post_p
, is_gimple_mem_ref_addr
, fb_either
);
13847 if (TMR_INDEX (*expr_p
))
13848 r1
= gimplify_expr (&TMR_INDEX (*expr_p
), pre_p
,
13849 post_p
, is_gimple_val
, fb_rvalue
);
13850 if (TMR_INDEX2 (*expr_p
))
13851 r1
= gimplify_expr (&TMR_INDEX2 (*expr_p
), pre_p
,
13852 post_p
, is_gimple_val
, fb_rvalue
);
13853 /* TMR_STEP and TMR_OFFSET are always integer constants. */
13854 ret
= MIN (r0
, r1
);
13858 case NON_LVALUE_EXPR
:
13859 /* This should have been stripped above. */
13860 gcc_unreachable ();
13863 ret
= gimplify_asm_expr (expr_p
, pre_p
, post_p
);
13866 case TRY_FINALLY_EXPR
:
13867 case TRY_CATCH_EXPR
:
13869 gimple_seq eval
, cleanup
;
13872 /* Calls to destructors are generated automatically in FINALLY/CATCH
13873 block. They should have location as UNKNOWN_LOCATION. However,
13874 gimplify_call_expr will reset these call stmts to input_location
13875 if it finds stmt's location is unknown. To prevent resetting for
13876 destructors, we set the input_location to unknown.
13877 Note that this only affects the destructor calls in FINALLY/CATCH
13878 block, and will automatically reset to its original value by the
13879 end of gimplify_expr. */
13880 input_location
= UNKNOWN_LOCATION
;
13881 eval
= cleanup
= NULL
;
13882 gimplify_and_add (TREE_OPERAND (*expr_p
, 0), &eval
);
13883 if (TREE_CODE (*expr_p
) == TRY_FINALLY_EXPR
13884 && TREE_CODE (TREE_OPERAND (*expr_p
, 1)) == EH_ELSE_EXPR
)
13886 gimple_seq n
= NULL
, e
= NULL
;
13887 gimplify_and_add (TREE_OPERAND (TREE_OPERAND (*expr_p
, 1),
13889 gimplify_and_add (TREE_OPERAND (TREE_OPERAND (*expr_p
, 1),
13891 if (!gimple_seq_empty_p (n
) && !gimple_seq_empty_p (e
))
13893 geh_else
*stmt
= gimple_build_eh_else (n
, e
);
13894 gimple_seq_add_stmt (&cleanup
, stmt
);
13898 gimplify_and_add (TREE_OPERAND (*expr_p
, 1), &cleanup
);
13899 /* Don't create bogus GIMPLE_TRY with empty cleanup. */
13900 if (gimple_seq_empty_p (cleanup
))
13902 gimple_seq_add_seq (pre_p
, eval
);
13906 try_
= gimple_build_try (eval
, cleanup
,
13907 TREE_CODE (*expr_p
) == TRY_FINALLY_EXPR
13908 ? GIMPLE_TRY_FINALLY
13909 : GIMPLE_TRY_CATCH
);
13910 if (EXPR_HAS_LOCATION (save_expr
))
13911 gimple_set_location (try_
, EXPR_LOCATION (save_expr
));
13912 else if (LOCATION_LOCUS (saved_location
) != UNKNOWN_LOCATION
)
13913 gimple_set_location (try_
, saved_location
);
13914 if (TREE_CODE (*expr_p
) == TRY_CATCH_EXPR
)
13915 gimple_try_set_catch_is_cleanup (try_
,
13916 TRY_CATCH_IS_CLEANUP (*expr_p
));
13917 gimplify_seq_add_stmt (pre_p
, try_
);
13922 case CLEANUP_POINT_EXPR
:
13923 ret
= gimplify_cleanup_point_expr (expr_p
, pre_p
);
13927 ret
= gimplify_target_expr (expr_p
, pre_p
, post_p
);
13933 gimple_seq handler
= NULL
;
13934 gimplify_and_add (CATCH_BODY (*expr_p
), &handler
);
13935 c
= gimple_build_catch (CATCH_TYPES (*expr_p
), handler
);
13936 gimplify_seq_add_stmt (pre_p
, c
);
13941 case EH_FILTER_EXPR
:
13944 gimple_seq failure
= NULL
;
13946 gimplify_and_add (EH_FILTER_FAILURE (*expr_p
), &failure
);
13947 ehf
= gimple_build_eh_filter (EH_FILTER_TYPES (*expr_p
), failure
);
13948 gimple_set_no_warning (ehf
, TREE_NO_WARNING (*expr_p
));
13949 gimplify_seq_add_stmt (pre_p
, ehf
);
13956 enum gimplify_status r0
, r1
;
13957 r0
= gimplify_expr (&OBJ_TYPE_REF_OBJECT (*expr_p
), pre_p
,
13958 post_p
, is_gimple_val
, fb_rvalue
);
13959 r1
= gimplify_expr (&OBJ_TYPE_REF_EXPR (*expr_p
), pre_p
,
13960 post_p
, is_gimple_val
, fb_rvalue
);
13961 TREE_SIDE_EFFECTS (*expr_p
) = 0;
13962 ret
= MIN (r0
, r1
);
13967 /* We get here when taking the address of a label. We mark
13968 the label as "forced"; meaning it can never be removed and
13969 it is a potential target for any computed goto. */
13970 FORCED_LABEL (*expr_p
) = 1;
13974 case STATEMENT_LIST
:
13975 ret
= gimplify_statement_list (expr_p
, pre_p
);
13978 case WITH_SIZE_EXPR
:
13980 gimplify_expr (&TREE_OPERAND (*expr_p
, 0), pre_p
,
13981 post_p
== &internal_post
? NULL
: post_p
,
13982 gimple_test_f
, fallback
);
13983 gimplify_expr (&TREE_OPERAND (*expr_p
, 1), pre_p
, post_p
,
13984 is_gimple_val
, fb_rvalue
);
13991 ret
= gimplify_var_or_parm_decl (expr_p
);
13995 /* When within an OMP context, notice uses of variables. */
13996 if (gimplify_omp_ctxp
)
13997 omp_notice_variable (gimplify_omp_ctxp
, *expr_p
, true);
14001 case DEBUG_EXPR_DECL
:
14002 gcc_unreachable ();
14004 case DEBUG_BEGIN_STMT
:
14005 gimplify_seq_add_stmt (pre_p
,
14006 gimple_build_debug_begin_stmt
14007 (TREE_BLOCK (*expr_p
),
14008 EXPR_LOCATION (*expr_p
)));
14014 /* Allow callbacks into the gimplifier during optimization. */
14019 gimplify_omp_parallel (expr_p
, pre_p
);
14024 gimplify_omp_task (expr_p
, pre_p
);
14030 case OMP_DISTRIBUTE
:
14033 ret
= gimplify_omp_for (expr_p
, pre_p
);
14037 ret
= gimplify_omp_loop (expr_p
, pre_p
);
14041 gimplify_oacc_cache (expr_p
, pre_p
);
14046 gimplify_oacc_declare (expr_p
, pre_p
);
14050 case OACC_HOST_DATA
:
14053 case OACC_PARALLEL
:
14058 case OMP_TARGET_DATA
:
14060 gimplify_omp_workshare (expr_p
, pre_p
);
14064 case OACC_ENTER_DATA
:
14065 case OACC_EXIT_DATA
:
14067 case OMP_TARGET_UPDATE
:
14068 case OMP_TARGET_ENTER_DATA
:
14069 case OMP_TARGET_EXIT_DATA
:
14070 gimplify_omp_target_update (expr_p
, pre_p
);
14080 gimple_seq body
= NULL
;
14082 bool saved_in_omp_construct
= in_omp_construct
;
14084 in_omp_construct
= true;
14085 gimplify_and_add (OMP_BODY (*expr_p
), &body
);
14086 in_omp_construct
= saved_in_omp_construct
;
14087 switch (TREE_CODE (*expr_p
))
14090 g
= gimple_build_omp_section (body
);
14093 g
= gimple_build_omp_master (body
);
14096 g
= gimplify_omp_ordered (*expr_p
, body
);
14099 gimplify_scan_omp_clauses (&OMP_CRITICAL_CLAUSES (*expr_p
),
14100 pre_p
, ORT_WORKSHARE
, OMP_CRITICAL
);
14101 gimplify_adjust_omp_clauses (pre_p
, body
,
14102 &OMP_CRITICAL_CLAUSES (*expr_p
),
14104 g
= gimple_build_omp_critical (body
,
14105 OMP_CRITICAL_NAME (*expr_p
),
14106 OMP_CRITICAL_CLAUSES (*expr_p
));
14109 gimplify_scan_omp_clauses (&OMP_SCAN_CLAUSES (*expr_p
),
14110 pre_p
, ORT_WORKSHARE
, OMP_SCAN
);
14111 gimplify_adjust_omp_clauses (pre_p
, body
,
14112 &OMP_SCAN_CLAUSES (*expr_p
),
14114 g
= gimple_build_omp_scan (body
, OMP_SCAN_CLAUSES (*expr_p
));
14117 gcc_unreachable ();
14119 gimplify_seq_add_stmt (pre_p
, g
);
14124 case OMP_TASKGROUP
:
14126 gimple_seq body
= NULL
;
14128 tree
*pclauses
= &OMP_TASKGROUP_CLAUSES (*expr_p
);
14129 bool saved_in_omp_construct
= in_omp_construct
;
14130 gimplify_scan_omp_clauses (pclauses
, pre_p
, ORT_TASKGROUP
,
14132 gimplify_adjust_omp_clauses (pre_p
, NULL
, pclauses
, OMP_TASKGROUP
);
14134 in_omp_construct
= true;
14135 gimplify_and_add (OMP_BODY (*expr_p
), &body
);
14136 in_omp_construct
= saved_in_omp_construct
;
14137 gimple_seq cleanup
= NULL
;
14138 tree fn
= builtin_decl_explicit (BUILT_IN_GOMP_TASKGROUP_END
);
14139 gimple
*g
= gimple_build_call (fn
, 0);
14140 gimple_seq_add_stmt (&cleanup
, g
);
14141 g
= gimple_build_try (body
, cleanup
, GIMPLE_TRY_FINALLY
);
14143 gimple_seq_add_stmt (&body
, g
);
14144 g
= gimple_build_omp_taskgroup (body
, *pclauses
);
14145 gimplify_seq_add_stmt (pre_p
, g
);
14151 case OMP_ATOMIC_READ
:
14152 case OMP_ATOMIC_CAPTURE_OLD
:
14153 case OMP_ATOMIC_CAPTURE_NEW
:
14154 ret
= gimplify_omp_atomic (expr_p
, pre_p
);
14157 case TRANSACTION_EXPR
:
14158 ret
= gimplify_transaction (expr_p
, pre_p
);
14161 case TRUTH_AND_EXPR
:
14162 case TRUTH_OR_EXPR
:
14163 case TRUTH_XOR_EXPR
:
14165 tree orig_type
= TREE_TYPE (*expr_p
);
14166 tree new_type
, xop0
, xop1
;
14167 *expr_p
= gimple_boolify (*expr_p
);
14168 new_type
= TREE_TYPE (*expr_p
);
14169 if (!useless_type_conversion_p (orig_type
, new_type
))
14171 *expr_p
= fold_convert_loc (input_location
, orig_type
, *expr_p
);
14176 /* Boolified binary truth expressions are semantically equivalent
14177 to bitwise binary expressions. Canonicalize them to the
14178 bitwise variant. */
14179 switch (TREE_CODE (*expr_p
))
14181 case TRUTH_AND_EXPR
:
14182 TREE_SET_CODE (*expr_p
, BIT_AND_EXPR
);
14184 case TRUTH_OR_EXPR
:
14185 TREE_SET_CODE (*expr_p
, BIT_IOR_EXPR
);
14187 case TRUTH_XOR_EXPR
:
14188 TREE_SET_CODE (*expr_p
, BIT_XOR_EXPR
);
14193 /* Now make sure that operands have compatible type to
14194 expression's new_type. */
14195 xop0
= TREE_OPERAND (*expr_p
, 0);
14196 xop1
= TREE_OPERAND (*expr_p
, 1);
14197 if (!useless_type_conversion_p (new_type
, TREE_TYPE (xop0
)))
14198 TREE_OPERAND (*expr_p
, 0) = fold_convert_loc (input_location
,
14201 if (!useless_type_conversion_p (new_type
, TREE_TYPE (xop1
)))
14202 TREE_OPERAND (*expr_p
, 1) = fold_convert_loc (input_location
,
14205 /* Continue classified as tcc_binary. */
14209 case VEC_COND_EXPR
:
14211 enum gimplify_status r0
, r1
, r2
;
14213 r0
= gimplify_expr (&TREE_OPERAND (*expr_p
, 0), pre_p
,
14214 post_p
, is_gimple_condexpr
, fb_rvalue
);
14215 r1
= gimplify_expr (&TREE_OPERAND (*expr_p
, 1), pre_p
,
14216 post_p
, is_gimple_val
, fb_rvalue
);
14217 r2
= gimplify_expr (&TREE_OPERAND (*expr_p
, 2), pre_p
,
14218 post_p
, is_gimple_val
, fb_rvalue
);
14220 ret
= MIN (MIN (r0
, r1
), r2
);
14221 recalculate_side_effects (*expr_p
);
14225 case VEC_PERM_EXPR
:
14226 /* Classified as tcc_expression. */
14229 case BIT_INSERT_EXPR
:
14230 /* Argument 3 is a constant. */
14233 case POINTER_PLUS_EXPR
:
14235 enum gimplify_status r0
, r1
;
14236 r0
= gimplify_expr (&TREE_OPERAND (*expr_p
, 0), pre_p
,
14237 post_p
, is_gimple_val
, fb_rvalue
);
14238 r1
= gimplify_expr (&TREE_OPERAND (*expr_p
, 1), pre_p
,
14239 post_p
, is_gimple_val
, fb_rvalue
);
14240 recalculate_side_effects (*expr_p
);
14241 ret
= MIN (r0
, r1
);
14246 switch (TREE_CODE_CLASS (TREE_CODE (*expr_p
)))
14248 case tcc_comparison
:
14249 /* Handle comparison of objects of non scalar mode aggregates
14250 with a call to memcmp. It would be nice to only have to do
14251 this for variable-sized objects, but then we'd have to allow
14252 the same nest of reference nodes we allow for MODIFY_EXPR and
14253 that's too complex.
14255 Compare scalar mode aggregates as scalar mode values. Using
14256 memcmp for them would be very inefficient at best, and is
14257 plain wrong if bitfields are involved. */
14259 tree type
= TREE_TYPE (TREE_OPERAND (*expr_p
, 1));
14261 /* Vector comparisons need no boolification. */
14262 if (TREE_CODE (type
) == VECTOR_TYPE
)
14264 else if (!AGGREGATE_TYPE_P (type
))
14266 tree org_type
= TREE_TYPE (*expr_p
);
14267 *expr_p
= gimple_boolify (*expr_p
);
14268 if (!useless_type_conversion_p (org_type
,
14269 TREE_TYPE (*expr_p
)))
14271 *expr_p
= fold_convert_loc (input_location
,
14272 org_type
, *expr_p
);
14278 else if (TYPE_MODE (type
) != BLKmode
)
14279 ret
= gimplify_scalar_mode_aggregate_compare (expr_p
);
14281 ret
= gimplify_variable_sized_compare (expr_p
);
14286 /* If *EXPR_P does not need to be special-cased, handle it
14287 according to its class. */
14289 ret
= gimplify_expr (&TREE_OPERAND (*expr_p
, 0), pre_p
,
14290 post_p
, is_gimple_val
, fb_rvalue
);
14296 enum gimplify_status r0
, r1
;
14298 r0
= gimplify_expr (&TREE_OPERAND (*expr_p
, 0), pre_p
,
14299 post_p
, is_gimple_val
, fb_rvalue
);
14300 r1
= gimplify_expr (&TREE_OPERAND (*expr_p
, 1), pre_p
,
14301 post_p
, is_gimple_val
, fb_rvalue
);
14303 ret
= MIN (r0
, r1
);
14309 enum gimplify_status r0
, r1
, r2
;
14311 r0
= gimplify_expr (&TREE_OPERAND (*expr_p
, 0), pre_p
,
14312 post_p
, is_gimple_val
, fb_rvalue
);
14313 r1
= gimplify_expr (&TREE_OPERAND (*expr_p
, 1), pre_p
,
14314 post_p
, is_gimple_val
, fb_rvalue
);
14315 r2
= gimplify_expr (&TREE_OPERAND (*expr_p
, 2), pre_p
,
14316 post_p
, is_gimple_val
, fb_rvalue
);
14318 ret
= MIN (MIN (r0
, r1
), r2
);
14322 case tcc_declaration
:
14325 goto dont_recalculate
;
14328 gcc_unreachable ();
14331 recalculate_side_effects (*expr_p
);
14337 gcc_assert (*expr_p
|| ret
!= GS_OK
);
14339 while (ret
== GS_OK
);
14341 /* If we encountered an error_mark somewhere nested inside, either
14342 stub out the statement or propagate the error back out. */
14343 if (ret
== GS_ERROR
)
14350 /* This was only valid as a return value from the langhook, which
14351 we handled. Make sure it doesn't escape from any other context. */
14352 gcc_assert (ret
!= GS_UNHANDLED
);
14354 if (fallback
== fb_none
&& *expr_p
&& !is_gimple_stmt (*expr_p
))
14356 /* We aren't looking for a value, and we don't have a valid
14357 statement. If it doesn't have side-effects, throw it away.
14358 We can also get here with code such as "*&&L;", where L is
14359 a LABEL_DECL that is marked as FORCED_LABEL. */
14360 if (TREE_CODE (*expr_p
) == LABEL_DECL
14361 || !TREE_SIDE_EFFECTS (*expr_p
))
14363 else if (!TREE_THIS_VOLATILE (*expr_p
))
14365 /* This is probably a _REF that contains something nested that
14366 has side effects. Recurse through the operands to find it. */
14367 enum tree_code code
= TREE_CODE (*expr_p
);
14371 case COMPONENT_REF
:
14372 case REALPART_EXPR
:
14373 case IMAGPART_EXPR
:
14374 case VIEW_CONVERT_EXPR
:
14375 gimplify_expr (&TREE_OPERAND (*expr_p
, 0), pre_p
, post_p
,
14376 gimple_test_f
, fallback
);
14380 case ARRAY_RANGE_REF
:
14381 gimplify_expr (&TREE_OPERAND (*expr_p
, 0), pre_p
, post_p
,
14382 gimple_test_f
, fallback
);
14383 gimplify_expr (&TREE_OPERAND (*expr_p
, 1), pre_p
, post_p
,
14384 gimple_test_f
, fallback
);
14388 /* Anything else with side-effects must be converted to
14389 a valid statement before we get here. */
14390 gcc_unreachable ();
14395 else if (COMPLETE_TYPE_P (TREE_TYPE (*expr_p
))
14396 && TYPE_MODE (TREE_TYPE (*expr_p
)) != BLKmode
)
14398 /* Historically, the compiler has treated a bare reference
14399 to a non-BLKmode volatile lvalue as forcing a load. */
14400 tree type
= TYPE_MAIN_VARIANT (TREE_TYPE (*expr_p
));
14402 /* Normally, we do not want to create a temporary for a
14403 TREE_ADDRESSABLE type because such a type should not be
14404 copied by bitwise-assignment. However, we make an
14405 exception here, as all we are doing here is ensuring that
14406 we read the bytes that make up the type. We use
14407 create_tmp_var_raw because create_tmp_var will abort when
14408 given a TREE_ADDRESSABLE type. */
14409 tree tmp
= create_tmp_var_raw (type
, "vol");
14410 gimple_add_tmp_var (tmp
);
14411 gimplify_assign (tmp
, *expr_p
, pre_p
);
14415 /* We can't do anything useful with a volatile reference to
14416 an incomplete type, so just throw it away. Likewise for
14417 a BLKmode type, since any implicit inner load should
14418 already have been turned into an explicit one by the
14419 gimplification process. */
14423 /* If we are gimplifying at the statement level, we're done. Tack
14424 everything together and return. */
14425 if (fallback
== fb_none
|| is_statement
)
14427 /* Since *EXPR_P has been converted into a GIMPLE tuple, clear
14428 it out for GC to reclaim it. */
14429 *expr_p
= NULL_TREE
;
14431 if (!gimple_seq_empty_p (internal_pre
)
14432 || !gimple_seq_empty_p (internal_post
))
14434 gimplify_seq_add_seq (&internal_pre
, internal_post
);
14435 gimplify_seq_add_seq (pre_p
, internal_pre
);
14438 /* The result of gimplifying *EXPR_P is going to be the last few
14439 statements in *PRE_P and *POST_P. Add location information
14440 to all the statements that were added by the gimplification
14442 if (!gimple_seq_empty_p (*pre_p
))
14443 annotate_all_with_location_after (*pre_p
, pre_last_gsi
, input_location
);
14445 if (!gimple_seq_empty_p (*post_p
))
14446 annotate_all_with_location_after (*post_p
, post_last_gsi
,
14452 #ifdef ENABLE_GIMPLE_CHECKING
14455 enum tree_code code
= TREE_CODE (*expr_p
);
14456 /* These expressions should already be in gimple IR form. */
14457 gcc_assert (code
!= MODIFY_EXPR
14458 && code
!= ASM_EXPR
14459 && code
!= BIND_EXPR
14460 && code
!= CATCH_EXPR
14461 && (code
!= COND_EXPR
|| gimplify_ctxp
->allow_rhs_cond_expr
)
14462 && code
!= EH_FILTER_EXPR
14463 && code
!= GOTO_EXPR
14464 && code
!= LABEL_EXPR
14465 && code
!= LOOP_EXPR
14466 && code
!= SWITCH_EXPR
14467 && code
!= TRY_FINALLY_EXPR
14468 && code
!= EH_ELSE_EXPR
14469 && code
!= OACC_PARALLEL
14470 && code
!= OACC_KERNELS
14471 && code
!= OACC_SERIAL
14472 && code
!= OACC_DATA
14473 && code
!= OACC_HOST_DATA
14474 && code
!= OACC_DECLARE
14475 && code
!= OACC_UPDATE
14476 && code
!= OACC_ENTER_DATA
14477 && code
!= OACC_EXIT_DATA
14478 && code
!= OACC_CACHE
14479 && code
!= OMP_CRITICAL
14481 && code
!= OACC_LOOP
14482 && code
!= OMP_MASTER
14483 && code
!= OMP_TASKGROUP
14484 && code
!= OMP_ORDERED
14485 && code
!= OMP_PARALLEL
14486 && code
!= OMP_SCAN
14487 && code
!= OMP_SECTIONS
14488 && code
!= OMP_SECTION
14489 && code
!= OMP_SINGLE
);
14493 /* Otherwise we're gimplifying a subexpression, so the resulting
14494 value is interesting. If it's a valid operand that matches
14495 GIMPLE_TEST_F, we're done. Unless we are handling some
14496 post-effects internally; if that's the case, we need to copy into
14497 a temporary before adding the post-effects to POST_P. */
14498 if (gimple_seq_empty_p (internal_post
) && (*gimple_test_f
) (*expr_p
))
14501 /* Otherwise, we need to create a new temporary for the gimplified
14504 /* We can't return an lvalue if we have an internal postqueue. The
14505 object the lvalue refers to would (probably) be modified by the
14506 postqueue; we need to copy the value out first, which means an
14508 if ((fallback
& fb_lvalue
)
14509 && gimple_seq_empty_p (internal_post
)
14510 && is_gimple_addressable (*expr_p
))
14512 /* An lvalue will do. Take the address of the expression, store it
14513 in a temporary, and replace the expression with an INDIRECT_REF of
14515 tree ref_alias_type
= reference_alias_ptr_type (*expr_p
);
14516 unsigned int ref_align
= get_object_alignment (*expr_p
);
14517 tree ref_type
= TREE_TYPE (*expr_p
);
14518 tmp
= build_fold_addr_expr_loc (input_location
, *expr_p
);
14519 gimplify_expr (&tmp
, pre_p
, post_p
, is_gimple_reg
, fb_rvalue
);
14520 if (TYPE_ALIGN (ref_type
) != ref_align
)
14521 ref_type
= build_aligned_type (ref_type
, ref_align
);
14522 *expr_p
= build2 (MEM_REF
, ref_type
,
14523 tmp
, build_zero_cst (ref_alias_type
));
14525 else if ((fallback
& fb_rvalue
) && is_gimple_reg_rhs_or_call (*expr_p
))
14527 /* An rvalue will do. Assign the gimplified expression into a
14528 new temporary TMP and replace the original expression with
14529 TMP. First, make sure that the expression has a type so that
14530 it can be assigned into a temporary. */
14531 gcc_assert (!VOID_TYPE_P (TREE_TYPE (*expr_p
)));
14532 *expr_p
= get_formal_tmp_var (*expr_p
, pre_p
);
14536 #ifdef ENABLE_GIMPLE_CHECKING
14537 if (!(fallback
& fb_mayfail
))
14539 fprintf (stderr
, "gimplification failed:\n");
14540 print_generic_expr (stderr
, *expr_p
);
14541 debug_tree (*expr_p
);
14542 internal_error ("gimplification failed");
14545 gcc_assert (fallback
& fb_mayfail
);
14547 /* If this is an asm statement, and the user asked for the
14548 impossible, don't die. Fail and let gimplify_asm_expr
14554 /* Make sure the temporary matches our predicate. */
14555 gcc_assert ((*gimple_test_f
) (*expr_p
));
14557 if (!gimple_seq_empty_p (internal_post
))
14559 annotate_all_with_location (internal_post
, input_location
);
14560 gimplify_seq_add_seq (pre_p
, internal_post
);
14564 input_location
= saved_location
;
14568 /* Like gimplify_expr but make sure the gimplified result is not itself
14569 a SSA name (but a decl if it were). Temporaries required by
14570 evaluating *EXPR_P may be still SSA names. */
14572 static enum gimplify_status
14573 gimplify_expr (tree
*expr_p
, gimple_seq
*pre_p
, gimple_seq
*post_p
,
14574 bool (*gimple_test_f
) (tree
), fallback_t fallback
,
14577 bool was_ssa_name_p
= TREE_CODE (*expr_p
) == SSA_NAME
;
14578 enum gimplify_status ret
= gimplify_expr (expr_p
, pre_p
, post_p
,
14579 gimple_test_f
, fallback
);
14581 && TREE_CODE (*expr_p
) == SSA_NAME
)
14583 tree name
= *expr_p
;
14584 if (was_ssa_name_p
)
14585 *expr_p
= get_initialized_tmp_var (*expr_p
, pre_p
, NULL
, false);
14588 /* Avoid the extra copy if possible. */
14589 *expr_p
= create_tmp_reg (TREE_TYPE (name
));
14590 if (!gimple_nop_p (SSA_NAME_DEF_STMT (name
)))
14591 gimple_set_lhs (SSA_NAME_DEF_STMT (name
), *expr_p
);
14592 release_ssa_name (name
);
14598 /* Look through TYPE for variable-sized objects and gimplify each such
14599 size that we find. Add to LIST_P any statements generated. */
14602 gimplify_type_sizes (tree type
, gimple_seq
*list_p
)
14606 if (type
== NULL
|| type
== error_mark_node
)
14609 /* We first do the main variant, then copy into any other variants. */
14610 type
= TYPE_MAIN_VARIANT (type
);
14612 /* Avoid infinite recursion. */
14613 if (TYPE_SIZES_GIMPLIFIED (type
))
14616 TYPE_SIZES_GIMPLIFIED (type
) = 1;
14618 switch (TREE_CODE (type
))
14621 case ENUMERAL_TYPE
:
14624 case FIXED_POINT_TYPE
:
14625 gimplify_one_sizepos (&TYPE_MIN_VALUE (type
), list_p
);
14626 gimplify_one_sizepos (&TYPE_MAX_VALUE (type
), list_p
);
14628 for (t
= TYPE_NEXT_VARIANT (type
); t
; t
= TYPE_NEXT_VARIANT (t
))
14630 TYPE_MIN_VALUE (t
) = TYPE_MIN_VALUE (type
);
14631 TYPE_MAX_VALUE (t
) = TYPE_MAX_VALUE (type
);
14636 /* These types may not have declarations, so handle them here. */
14637 gimplify_type_sizes (TREE_TYPE (type
), list_p
);
14638 gimplify_type_sizes (TYPE_DOMAIN (type
), list_p
);
14639 /* Ensure VLA bounds aren't removed, for -O0 they should be variables
14640 with assigned stack slots, for -O1+ -g they should be tracked
14642 if (!(TYPE_NAME (type
)
14643 && TREE_CODE (TYPE_NAME (type
)) == TYPE_DECL
14644 && DECL_IGNORED_P (TYPE_NAME (type
)))
14645 && TYPE_DOMAIN (type
)
14646 && INTEGRAL_TYPE_P (TYPE_DOMAIN (type
)))
14648 t
= TYPE_MIN_VALUE (TYPE_DOMAIN (type
));
14649 if (t
&& VAR_P (t
) && DECL_ARTIFICIAL (t
))
14650 DECL_IGNORED_P (t
) = 0;
14651 t
= TYPE_MAX_VALUE (TYPE_DOMAIN (type
));
14652 if (t
&& VAR_P (t
) && DECL_ARTIFICIAL (t
))
14653 DECL_IGNORED_P (t
) = 0;
14659 case QUAL_UNION_TYPE
:
14660 for (field
= TYPE_FIELDS (type
); field
; field
= DECL_CHAIN (field
))
14661 if (TREE_CODE (field
) == FIELD_DECL
)
14663 gimplify_one_sizepos (&DECL_FIELD_OFFSET (field
), list_p
);
14664 gimplify_one_sizepos (&DECL_SIZE (field
), list_p
);
14665 gimplify_one_sizepos (&DECL_SIZE_UNIT (field
), list_p
);
14666 gimplify_type_sizes (TREE_TYPE (field
), list_p
);
14671 case REFERENCE_TYPE
:
14672 /* We used to recurse on the pointed-to type here, which turned out to
14673 be incorrect because its definition might refer to variables not
14674 yet initialized at this point if a forward declaration is involved.
14676 It was actually useful for anonymous pointed-to types to ensure
14677 that the sizes evaluation dominates every possible later use of the
14678 values. Restricting to such types here would be safe since there
14679 is no possible forward declaration around, but would introduce an
14680 undesirable middle-end semantic to anonymity. We then defer to
14681 front-ends the responsibility of ensuring that the sizes are
14682 evaluated both early and late enough, e.g. by attaching artificial
14683 type declarations to the tree. */
14690 gimplify_one_sizepos (&TYPE_SIZE (type
), list_p
);
14691 gimplify_one_sizepos (&TYPE_SIZE_UNIT (type
), list_p
);
14693 for (t
= TYPE_NEXT_VARIANT (type
); t
; t
= TYPE_NEXT_VARIANT (t
))
14695 TYPE_SIZE (t
) = TYPE_SIZE (type
);
14696 TYPE_SIZE_UNIT (t
) = TYPE_SIZE_UNIT (type
);
14697 TYPE_SIZES_GIMPLIFIED (t
) = 1;
14701 /* A subroutine of gimplify_type_sizes to make sure that *EXPR_P,
14702 a size or position, has had all of its SAVE_EXPRs evaluated.
14703 We add any required statements to *STMT_P. */
14706 gimplify_one_sizepos (tree
*expr_p
, gimple_seq
*stmt_p
)
14708 tree expr
= *expr_p
;
14710 /* We don't do anything if the value isn't there, is constant, or contains
14711 A PLACEHOLDER_EXPR. We also don't want to do anything if it's already
14712 a VAR_DECL. If it's a VAR_DECL from another function, the gimplifier
14713 will want to replace it with a new variable, but that will cause problems
14714 if this type is from outside the function. It's OK to have that here. */
14715 if (expr
== NULL_TREE
14716 || is_gimple_constant (expr
)
14717 || TREE_CODE (expr
) == VAR_DECL
14718 || CONTAINS_PLACEHOLDER_P (expr
))
14721 *expr_p
= unshare_expr (expr
);
14723 /* SSA names in decl/type fields are a bad idea - they'll get reclaimed
14724 if the def vanishes. */
14725 gimplify_expr (expr_p
, stmt_p
, NULL
, is_gimple_val
, fb_rvalue
, false);
14727 /* If expr wasn't already is_gimple_sizepos or is_gimple_constant from the
14728 FE, ensure that it is a VAR_DECL, otherwise we might handle some decls
14729 as gimplify_vla_decl even when they would have all sizes INTEGER_CSTs. */
14730 if (is_gimple_constant (*expr_p
))
14731 *expr_p
= get_initialized_tmp_var (*expr_p
, stmt_p
, NULL
, false);
14734 /* Gimplify the body of statements of FNDECL and return a GIMPLE_BIND node
14735 containing the sequence of corresponding GIMPLE statements. If DO_PARMS
14736 is true, also gimplify the parameters. */
14739 gimplify_body (tree fndecl
, bool do_parms
)
14741 location_t saved_location
= input_location
;
14742 gimple_seq parm_stmts
, parm_cleanup
= NULL
, seq
;
14743 gimple
*outer_stmt
;
14746 timevar_push (TV_TREE_GIMPLIFY
);
14748 init_tree_ssa (cfun
);
14750 /* Initialize for optimize_insn_for_s{ize,peed}_p possibly called during
14752 default_rtl_profile ();
14754 gcc_assert (gimplify_ctxp
== NULL
);
14755 push_gimplify_context (true);
14757 if (flag_openacc
|| flag_openmp
)
14759 gcc_assert (gimplify_omp_ctxp
== NULL
);
14760 if (lookup_attribute ("omp declare target", DECL_ATTRIBUTES (fndecl
)))
14761 gimplify_omp_ctxp
= new_omp_context (ORT_IMPLICIT_TARGET
);
14764 /* Unshare most shared trees in the body and in that of any nested functions.
14765 It would seem we don't have to do this for nested functions because
14766 they are supposed to be output and then the outer function gimplified
14767 first, but the g++ front end doesn't always do it that way. */
14768 unshare_body (fndecl
);
14769 unvisit_body (fndecl
);
14771 /* Make sure input_location isn't set to something weird. */
14772 input_location
= DECL_SOURCE_LOCATION (fndecl
);
14774 /* Resolve callee-copies. This has to be done before processing
14775 the body so that DECL_VALUE_EXPR gets processed correctly. */
14776 parm_stmts
= do_parms
? gimplify_parameters (&parm_cleanup
) : NULL
;
14778 /* Gimplify the function's body. */
14780 gimplify_stmt (&DECL_SAVED_TREE (fndecl
), &seq
);
14781 outer_stmt
= gimple_seq_first_nondebug_stmt (seq
);
14784 outer_stmt
= gimple_build_nop ();
14785 gimplify_seq_add_stmt (&seq
, outer_stmt
);
14788 /* The body must contain exactly one statement, a GIMPLE_BIND. If this is
14789 not the case, wrap everything in a GIMPLE_BIND to make it so. */
14790 if (gimple_code (outer_stmt
) == GIMPLE_BIND
14791 && (gimple_seq_first_nondebug_stmt (seq
)
14792 == gimple_seq_last_nondebug_stmt (seq
)))
14794 outer_bind
= as_a
<gbind
*> (outer_stmt
);
14795 if (gimple_seq_first_stmt (seq
) != outer_stmt
14796 || gimple_seq_last_stmt (seq
) != outer_stmt
)
14798 /* If there are debug stmts before or after outer_stmt, move them
14799 inside of outer_bind body. */
14800 gimple_stmt_iterator gsi
= gsi_for_stmt (outer_stmt
, &seq
);
14801 gimple_seq second_seq
= NULL
;
14802 if (gimple_seq_first_stmt (seq
) != outer_stmt
14803 && gimple_seq_last_stmt (seq
) != outer_stmt
)
14805 second_seq
= gsi_split_seq_after (gsi
);
14806 gsi_remove (&gsi
, false);
14808 else if (gimple_seq_first_stmt (seq
) != outer_stmt
)
14809 gsi_remove (&gsi
, false);
14812 gsi_remove (&gsi
, false);
14816 gimple_seq_add_seq_without_update (&seq
,
14817 gimple_bind_body (outer_bind
));
14818 gimple_seq_add_seq_without_update (&seq
, second_seq
);
14819 gimple_bind_set_body (outer_bind
, seq
);
14823 outer_bind
= gimple_build_bind (NULL_TREE
, seq
, NULL
);
14825 DECL_SAVED_TREE (fndecl
) = NULL_TREE
;
14827 /* If we had callee-copies statements, insert them at the beginning
14828 of the function and clear DECL_VALUE_EXPR_P on the parameters. */
14829 if (!gimple_seq_empty_p (parm_stmts
))
14833 gimplify_seq_add_seq (&parm_stmts
, gimple_bind_body (outer_bind
));
14836 gtry
*g
= gimple_build_try (parm_stmts
, parm_cleanup
,
14837 GIMPLE_TRY_FINALLY
);
14839 gimple_seq_add_stmt (&parm_stmts
, g
);
14841 gimple_bind_set_body (outer_bind
, parm_stmts
);
14843 for (parm
= DECL_ARGUMENTS (current_function_decl
);
14844 parm
; parm
= DECL_CHAIN (parm
))
14845 if (DECL_HAS_VALUE_EXPR_P (parm
))
14847 DECL_HAS_VALUE_EXPR_P (parm
) = 0;
14848 DECL_IGNORED_P (parm
) = 0;
14852 if ((flag_openacc
|| flag_openmp
|| flag_openmp_simd
)
14853 && gimplify_omp_ctxp
)
14855 delete_omp_context (gimplify_omp_ctxp
);
14856 gimplify_omp_ctxp
= NULL
;
14859 pop_gimplify_context (outer_bind
);
14860 gcc_assert (gimplify_ctxp
== NULL
);
14862 if (flag_checking
&& !seen_error ())
14863 verify_gimple_in_seq (gimple_bind_body (outer_bind
));
14865 timevar_pop (TV_TREE_GIMPLIFY
);
14866 input_location
= saved_location
;
14871 typedef char *char_p
; /* For DEF_VEC_P. */
14873 /* Return whether we should exclude FNDECL from instrumentation. */
14876 flag_instrument_functions_exclude_p (tree fndecl
)
14880 v
= (vec
<char_p
> *) flag_instrument_functions_exclude_functions
;
14881 if (v
&& v
->length () > 0)
14887 name
= lang_hooks
.decl_printable_name (fndecl
, 1);
14888 FOR_EACH_VEC_ELT (*v
, i
, s
)
14889 if (strstr (name
, s
) != NULL
)
14893 v
= (vec
<char_p
> *) flag_instrument_functions_exclude_files
;
14894 if (v
&& v
->length () > 0)
14900 name
= DECL_SOURCE_FILE (fndecl
);
14901 FOR_EACH_VEC_ELT (*v
, i
, s
)
14902 if (strstr (name
, s
) != NULL
)
14909 /* Entry point to the gimplification pass. FNDECL is the FUNCTION_DECL
14910 node for the function we want to gimplify.
14912 Return the sequence of GIMPLE statements corresponding to the body
14916 gimplify_function_tree (tree fndecl
)
14921 gcc_assert (!gimple_body (fndecl
));
14923 if (DECL_STRUCT_FUNCTION (fndecl
))
14924 push_cfun (DECL_STRUCT_FUNCTION (fndecl
));
14926 push_struct_function (fndecl
);
14928 /* Tentatively set PROP_gimple_lva here, and reset it in gimplify_va_arg_expr
14930 cfun
->curr_properties
|= PROP_gimple_lva
;
14932 if (asan_sanitize_use_after_scope () && sanitize_flags_p (SANITIZE_ADDRESS
))
14933 asan_poisoned_variables
= new hash_set
<tree
> ();
14934 bind
= gimplify_body (fndecl
, true);
14935 if (asan_poisoned_variables
)
14937 delete asan_poisoned_variables
;
14938 asan_poisoned_variables
= NULL
;
14941 /* The tree body of the function is no longer needed, replace it
14942 with the new GIMPLE body. */
14944 gimple_seq_add_stmt (&seq
, bind
);
14945 gimple_set_body (fndecl
, seq
);
14947 /* If we're instrumenting function entry/exit, then prepend the call to
14948 the entry hook and wrap the whole function in a TRY_FINALLY_EXPR to
14949 catch the exit hook. */
14950 /* ??? Add some way to ignore exceptions for this TFE. */
14951 if (flag_instrument_function_entry_exit
14952 && !DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (fndecl
)
14953 /* Do not instrument extern inline functions. */
14954 && !(DECL_DECLARED_INLINE_P (fndecl
)
14955 && DECL_EXTERNAL (fndecl
)
14956 && DECL_DISREGARD_INLINE_LIMITS (fndecl
))
14957 && !flag_instrument_functions_exclude_p (fndecl
))
14962 gimple_seq cleanup
= NULL
, body
= NULL
;
14963 tree tmp_var
, this_fn_addr
;
14966 /* The instrumentation hooks aren't going to call the instrumented
14967 function and the address they receive is expected to be matchable
14968 against symbol addresses. Make sure we don't create a trampoline,
14969 in case the current function is nested. */
14970 this_fn_addr
= build_fold_addr_expr (current_function_decl
);
14971 TREE_NO_TRAMPOLINE (this_fn_addr
) = 1;
14973 x
= builtin_decl_implicit (BUILT_IN_RETURN_ADDRESS
);
14974 call
= gimple_build_call (x
, 1, integer_zero_node
);
14975 tmp_var
= create_tmp_var (ptr_type_node
, "return_addr");
14976 gimple_call_set_lhs (call
, tmp_var
);
14977 gimplify_seq_add_stmt (&cleanup
, call
);
14978 x
= builtin_decl_implicit (BUILT_IN_PROFILE_FUNC_EXIT
);
14979 call
= gimple_build_call (x
, 2, this_fn_addr
, tmp_var
);
14980 gimplify_seq_add_stmt (&cleanup
, call
);
14981 tf
= gimple_build_try (seq
, cleanup
, GIMPLE_TRY_FINALLY
);
14983 x
= builtin_decl_implicit (BUILT_IN_RETURN_ADDRESS
);
14984 call
= gimple_build_call (x
, 1, integer_zero_node
);
14985 tmp_var
= create_tmp_var (ptr_type_node
, "return_addr");
14986 gimple_call_set_lhs (call
, tmp_var
);
14987 gimplify_seq_add_stmt (&body
, call
);
14988 x
= builtin_decl_implicit (BUILT_IN_PROFILE_FUNC_ENTER
);
14989 call
= gimple_build_call (x
, 2, this_fn_addr
, tmp_var
);
14990 gimplify_seq_add_stmt (&body
, call
);
14991 gimplify_seq_add_stmt (&body
, tf
);
14992 new_bind
= gimple_build_bind (NULL
, body
, NULL
);
14994 /* Replace the current function body with the body
14995 wrapped in the try/finally TF. */
14997 gimple_seq_add_stmt (&seq
, new_bind
);
14998 gimple_set_body (fndecl
, seq
);
15002 if (sanitize_flags_p (SANITIZE_THREAD
))
15004 gcall
*call
= gimple_build_call_internal (IFN_TSAN_FUNC_EXIT
, 0);
15005 gimple
*tf
= gimple_build_try (seq
, call
, GIMPLE_TRY_FINALLY
);
15006 gbind
*new_bind
= gimple_build_bind (NULL
, tf
, NULL
);
15007 /* Replace the current function body with the body
15008 wrapped in the try/finally TF. */
15010 gimple_seq_add_stmt (&seq
, new_bind
);
15011 gimple_set_body (fndecl
, seq
);
15014 DECL_SAVED_TREE (fndecl
) = NULL_TREE
;
15015 cfun
->curr_properties
|= PROP_gimple_any
;
15019 dump_function (TDI_gimple
, fndecl
);
15022 /* Return a dummy expression of type TYPE in order to keep going after an
15026 dummy_object (tree type
)
15028 tree t
= build_int_cst (build_pointer_type (type
), 0);
15029 return build2 (MEM_REF
, type
, t
, t
);
15032 /* Gimplify __builtin_va_arg, aka VA_ARG_EXPR, which is not really a
15033 builtin function, but a very special sort of operator. */
15035 enum gimplify_status
15036 gimplify_va_arg_expr (tree
*expr_p
, gimple_seq
*pre_p
,
15037 gimple_seq
*post_p ATTRIBUTE_UNUSED
)
15039 tree promoted_type
, have_va_type
;
15040 tree valist
= TREE_OPERAND (*expr_p
, 0);
15041 tree type
= TREE_TYPE (*expr_p
);
15042 tree t
, tag
, aptag
;
15043 location_t loc
= EXPR_LOCATION (*expr_p
);
15045 /* Verify that valist is of the proper type. */
15046 have_va_type
= TREE_TYPE (valist
);
15047 if (have_va_type
== error_mark_node
)
15049 have_va_type
= targetm
.canonical_va_list_type (have_va_type
);
15050 if (have_va_type
== NULL_TREE
15051 && POINTER_TYPE_P (TREE_TYPE (valist
)))
15052 /* Handle 'Case 1: Not an array type' from c-common.c/build_va_arg. */
15054 = targetm
.canonical_va_list_type (TREE_TYPE (TREE_TYPE (valist
)));
15055 gcc_assert (have_va_type
!= NULL_TREE
);
15057 /* Generate a diagnostic for requesting data of a type that cannot
15058 be passed through `...' due to type promotion at the call site. */
15059 if ((promoted_type
= lang_hooks
.types
.type_promotes_to (type
))
15062 static bool gave_help
;
15064 /* Use the expansion point to handle cases such as passing bool (defined
15065 in a system header) through `...'. */
15067 = expansion_point_location_if_in_system_header (loc
);
15069 /* Unfortunately, this is merely undefined, rather than a constraint
15070 violation, so we cannot make this an error. If this call is never
15071 executed, the program is still strictly conforming. */
15072 auto_diagnostic_group d
;
15073 warned
= warning_at (xloc
, 0,
15074 "%qT is promoted to %qT when passed through %<...%>",
15075 type
, promoted_type
);
15076 if (!gave_help
&& warned
)
15079 inform (xloc
, "(so you should pass %qT not %qT to %<va_arg%>)",
15080 promoted_type
, type
);
15083 /* We can, however, treat "undefined" any way we please.
15084 Call abort to encourage the user to fix the program. */
15086 inform (xloc
, "if this code is reached, the program will abort");
15087 /* Before the abort, allow the evaluation of the va_list
15088 expression to exit or longjmp. */
15089 gimplify_and_add (valist
, pre_p
);
15090 t
= build_call_expr_loc (loc
,
15091 builtin_decl_implicit (BUILT_IN_TRAP
), 0);
15092 gimplify_and_add (t
, pre_p
);
15094 /* This is dead code, but go ahead and finish so that the
15095 mode of the result comes out right. */
15096 *expr_p
= dummy_object (type
);
15097 return GS_ALL_DONE
;
15100 tag
= build_int_cst (build_pointer_type (type
), 0);
15101 aptag
= build_int_cst (TREE_TYPE (valist
), 0);
15103 *expr_p
= build_call_expr_internal_loc (loc
, IFN_VA_ARG
, type
, 3,
15104 valist
, tag
, aptag
);
15106 /* Clear the tentatively set PROP_gimple_lva, to indicate that IFN_VA_ARG
15107 needs to be expanded. */
15108 cfun
->curr_properties
&= ~PROP_gimple_lva
;
15113 /* Build a new GIMPLE_ASSIGN tuple and append it to the end of *SEQ_P.
15115 DST/SRC are the destination and source respectively. You can pass
15116 ungimplified trees in DST or SRC, in which case they will be
15117 converted to a gimple operand if necessary.
15119 This function returns the newly created GIMPLE_ASSIGN tuple. */
15122 gimplify_assign (tree dst
, tree src
, gimple_seq
*seq_p
)
15124 tree t
= build2 (MODIFY_EXPR
, TREE_TYPE (dst
), dst
, src
);
15125 gimplify_and_add (t
, seq_p
);
15127 return gimple_seq_last_stmt (*seq_p
);
15131 gimplify_hasher::hash (const elt_t
*p
)
15134 return iterative_hash_expr (t
, 0);
15138 gimplify_hasher::equal (const elt_t
*p1
, const elt_t
*p2
)
15142 enum tree_code code
= TREE_CODE (t1
);
15144 if (TREE_CODE (t2
) != code
15145 || TREE_TYPE (t1
) != TREE_TYPE (t2
))
15148 if (!operand_equal_p (t1
, t2
, 0))
15151 /* Only allow them to compare equal if they also hash equal; otherwise
15152 results are nondeterminate, and we fail bootstrap comparison. */
15153 gcc_checking_assert (hash (p1
) == hash (p2
));