1 /* Tree lowering pass. This pass converts the GENERIC functions-as-trees
2 tree representation into the GIMPLE form.
3 Copyright (C) 2002-2017 Free Software Foundation, Inc.
4 Major work done by Sebastian Pop <s.pop@laposte.net>,
5 Diego Novillo <dnovillo@redhat.com> and Jason Merrill <jason@redhat.com>.
7 This file is part of GCC.
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
25 #include "coretypes.h"
31 #include "gimple-predict.h"
32 #include "tree-pass.h" /* FIXME: only for PROP_gimple_any */
35 #include "tree-pretty-print.h"
36 #include "diagnostic-core.h"
38 #include "fold-const.h"
43 #include "gimple-fold.h"
46 #include "gimple-iterator.h"
47 #include "stor-layout.h"
48 #include "print-tree.h"
49 #include "tree-iterator.h"
50 #include "tree-inline.h"
51 #include "langhooks.h"
54 #include "omp-general.h"
56 #include "gimple-low.h"
58 #include "gomp-constants.h"
59 #include "splay-tree.h"
60 #include "gimple-walk.h"
61 #include "langhooks-def.h" /* FIXME: for lhd_set_decl_assembler_name */
66 /* Hash set of poisoned variables in a bind expr. */
67 static hash_set
<tree
> *asan_poisoned_variables
= NULL
;
69 enum gimplify_omp_var_data
75 GOVD_FIRSTPRIVATE
= 16,
76 GOVD_LASTPRIVATE
= 32,
80 GOVD_DEBUG_PRIVATE
= 512,
81 GOVD_PRIVATE_OUTER_REF
= 1024,
85 /* Flag for GOVD_MAP: don't copy back. */
86 GOVD_MAP_TO_ONLY
= 8192,
88 /* Flag for GOVD_LINEAR or GOVD_LASTPRIVATE: no outer reference. */
89 GOVD_LINEAR_LASTPRIVATE_NO_OUTER
= 16384,
91 GOVD_MAP_0LEN_ARRAY
= 32768,
93 /* Flag for GOVD_MAP, if it is always, to or always, tofrom mapping. */
94 GOVD_MAP_ALWAYS_TO
= 65536,
96 /* Flag for shared vars that are or might be stored to in the region. */
97 GOVD_WRITTEN
= 131072,
99 /* Flag for GOVD_MAP, if it is a forced mapping. */
100 GOVD_MAP_FORCE
= 262144,
102 /* Flag for GOVD_MAP: must be present already. */
103 GOVD_MAP_FORCE_PRESENT
= 524288,
105 GOVD_DATA_SHARE_CLASS
= (GOVD_SHARED
| GOVD_PRIVATE
| GOVD_FIRSTPRIVATE
106 | GOVD_LASTPRIVATE
| GOVD_REDUCTION
| GOVD_LINEAR
113 ORT_WORKSHARE
= 0x00,
117 ORT_COMBINED_PARALLEL
= 0x03,
120 ORT_UNTIED_TASK
= 0x05,
123 ORT_COMBINED_TEAMS
= 0x09,
126 ORT_TARGET_DATA
= 0x10,
128 /* Data region with offloading. */
130 ORT_COMBINED_TARGET
= 0x21,
132 /* OpenACC variants. */
133 ORT_ACC
= 0x40, /* A generic OpenACC region. */
134 ORT_ACC_DATA
= ORT_ACC
| ORT_TARGET_DATA
, /* Data construct. */
135 ORT_ACC_PARALLEL
= ORT_ACC
| ORT_TARGET
, /* Parallel construct */
136 ORT_ACC_KERNELS
= ORT_ACC
| ORT_TARGET
| 0x80, /* Kernels construct. */
137 ORT_ACC_HOST_DATA
= ORT_ACC
| ORT_TARGET_DATA
| 0x80, /* Host data. */
139 /* Dummy OpenMP region, used to disable expansion of
140 DECL_VALUE_EXPRs in taskloop pre body. */
144 /* Gimplify hashtable helper. */
146 struct gimplify_hasher
: free_ptr_hash
<elt_t
>
148 static inline hashval_t
hash (const elt_t
*);
149 static inline bool equal (const elt_t
*, const elt_t
*);
154 struct gimplify_ctx
*prev_context
;
156 vec
<gbind
*> bind_expr_stack
;
158 gimple_seq conditional_cleanups
;
162 vec
<tree
> case_labels
;
163 hash_set
<tree
> *live_switch_vars
;
164 /* The formal temporary table. Should this be persistent? */
165 hash_table
<gimplify_hasher
> *temp_htab
;
168 unsigned into_ssa
: 1;
169 unsigned allow_rhs_cond_expr
: 1;
170 unsigned in_cleanup_point_expr
: 1;
171 unsigned keep_stack
: 1;
172 unsigned save_stack
: 1;
173 unsigned in_switch_expr
: 1;
176 struct gimplify_omp_ctx
178 struct gimplify_omp_ctx
*outer_context
;
179 splay_tree variables
;
180 hash_set
<tree
> *privatized_types
;
181 /* Iteration variables in an OMP_FOR. */
182 vec
<tree
> loop_iter_var
;
184 enum omp_clause_default_kind default_kind
;
185 enum omp_region_type region_type
;
188 bool target_map_scalars_firstprivate
;
189 bool target_map_pointers_as_0len_arrays
;
190 bool target_firstprivatize_array_bases
;
193 static struct gimplify_ctx
*gimplify_ctxp
;
194 static struct gimplify_omp_ctx
*gimplify_omp_ctxp
;
196 /* Forward declaration. */
197 static enum gimplify_status
gimplify_compound_expr (tree
*, gimple_seq
*, bool);
198 static hash_map
<tree
, tree
> *oacc_declare_returns
;
199 static enum gimplify_status
gimplify_expr (tree
*, gimple_seq
*, gimple_seq
*,
200 bool (*) (tree
), fallback_t
, bool);
202 /* Shorter alias name for the above function for use in gimplify.c
206 gimplify_seq_add_stmt (gimple_seq
*seq_p
, gimple
*gs
)
208 gimple_seq_add_stmt_without_update (seq_p
, gs
);
211 /* Append sequence SRC to the end of sequence *DST_P. If *DST_P is
212 NULL, a new sequence is allocated. This function is
213 similar to gimple_seq_add_seq, but does not scan the operands.
214 During gimplification, we need to manipulate statement sequences
215 before the def/use vectors have been constructed. */
218 gimplify_seq_add_seq (gimple_seq
*dst_p
, gimple_seq src
)
220 gimple_stmt_iterator si
;
225 si
= gsi_last (*dst_p
);
226 gsi_insert_seq_after_without_update (&si
, src
, GSI_NEW_STMT
);
230 /* Pointer to a list of allocated gimplify_ctx structs to be used for pushing
231 and popping gimplify contexts. */
233 static struct gimplify_ctx
*ctx_pool
= NULL
;
235 /* Return a gimplify context struct from the pool. */
237 static inline struct gimplify_ctx
*
240 struct gimplify_ctx
* c
= ctx_pool
;
243 ctx_pool
= c
->prev_context
;
245 c
= XNEW (struct gimplify_ctx
);
247 memset (c
, '\0', sizeof (*c
));
251 /* Put gimplify context C back into the pool. */
254 ctx_free (struct gimplify_ctx
*c
)
256 c
->prev_context
= ctx_pool
;
260 /* Free allocated ctx stack memory. */
263 free_gimplify_stack (void)
265 struct gimplify_ctx
*c
;
267 while ((c
= ctx_pool
))
269 ctx_pool
= c
->prev_context
;
275 /* Set up a context for the gimplifier. */
278 push_gimplify_context (bool in_ssa
, bool rhs_cond_ok
)
280 struct gimplify_ctx
*c
= ctx_alloc ();
282 c
->prev_context
= gimplify_ctxp
;
284 gimplify_ctxp
->into_ssa
= in_ssa
;
285 gimplify_ctxp
->allow_rhs_cond_expr
= rhs_cond_ok
;
288 /* Tear down a context for the gimplifier. If BODY is non-null, then
289 put the temporaries into the outer BIND_EXPR. Otherwise, put them
292 BODY is not a sequence, but the first tuple in a sequence. */
295 pop_gimplify_context (gimple
*body
)
297 struct gimplify_ctx
*c
= gimplify_ctxp
;
300 && (!c
->bind_expr_stack
.exists ()
301 || c
->bind_expr_stack
.is_empty ()));
302 c
->bind_expr_stack
.release ();
303 gimplify_ctxp
= c
->prev_context
;
306 declare_vars (c
->temps
, body
, false);
308 record_vars (c
->temps
);
315 /* Push a GIMPLE_BIND tuple onto the stack of bindings. */
318 gimple_push_bind_expr (gbind
*bind_stmt
)
320 gimplify_ctxp
->bind_expr_stack
.reserve (8);
321 gimplify_ctxp
->bind_expr_stack
.safe_push (bind_stmt
);
324 /* Pop the first element off the stack of bindings. */
327 gimple_pop_bind_expr (void)
329 gimplify_ctxp
->bind_expr_stack
.pop ();
332 /* Return the first element of the stack of bindings. */
335 gimple_current_bind_expr (void)
337 return gimplify_ctxp
->bind_expr_stack
.last ();
340 /* Return the stack of bindings created during gimplification. */
343 gimple_bind_expr_stack (void)
345 return gimplify_ctxp
->bind_expr_stack
;
348 /* Return true iff there is a COND_EXPR between us and the innermost
349 CLEANUP_POINT_EXPR. This info is used by gimple_push_cleanup. */
352 gimple_conditional_context (void)
354 return gimplify_ctxp
->conditions
> 0;
357 /* Note that we've entered a COND_EXPR. */
360 gimple_push_condition (void)
362 #ifdef ENABLE_GIMPLE_CHECKING
363 if (gimplify_ctxp
->conditions
== 0)
364 gcc_assert (gimple_seq_empty_p (gimplify_ctxp
->conditional_cleanups
));
366 ++(gimplify_ctxp
->conditions
);
369 /* Note that we've left a COND_EXPR. If we're back at unconditional scope
370 now, add any conditional cleanups we've seen to the prequeue. */
373 gimple_pop_condition (gimple_seq
*pre_p
)
375 int conds
= --(gimplify_ctxp
->conditions
);
377 gcc_assert (conds
>= 0);
380 gimplify_seq_add_seq (pre_p
, gimplify_ctxp
->conditional_cleanups
);
381 gimplify_ctxp
->conditional_cleanups
= NULL
;
385 /* A stable comparison routine for use with splay trees and DECLs. */
388 splay_tree_compare_decl_uid (splay_tree_key xa
, splay_tree_key xb
)
393 return DECL_UID (a
) - DECL_UID (b
);
396 /* Create a new omp construct that deals with variable remapping. */
398 static struct gimplify_omp_ctx
*
399 new_omp_context (enum omp_region_type region_type
)
401 struct gimplify_omp_ctx
*c
;
403 c
= XCNEW (struct gimplify_omp_ctx
);
404 c
->outer_context
= gimplify_omp_ctxp
;
405 c
->variables
= splay_tree_new (splay_tree_compare_decl_uid
, 0, 0);
406 c
->privatized_types
= new hash_set
<tree
>;
407 c
->location
= input_location
;
408 c
->region_type
= region_type
;
409 if ((region_type
& ORT_TASK
) == 0)
410 c
->default_kind
= OMP_CLAUSE_DEFAULT_SHARED
;
412 c
->default_kind
= OMP_CLAUSE_DEFAULT_UNSPECIFIED
;
417 /* Destroy an omp construct that deals with variable remapping. */
420 delete_omp_context (struct gimplify_omp_ctx
*c
)
422 splay_tree_delete (c
->variables
);
423 delete c
->privatized_types
;
424 c
->loop_iter_var
.release ();
428 static void omp_add_variable (struct gimplify_omp_ctx
*, tree
, unsigned int);
429 static bool omp_notice_variable (struct gimplify_omp_ctx
*, tree
, bool);
431 /* Both gimplify the statement T and append it to *SEQ_P. This function
432 behaves exactly as gimplify_stmt, but you don't have to pass T as a
436 gimplify_and_add (tree t
, gimple_seq
*seq_p
)
438 gimplify_stmt (&t
, seq_p
);
441 /* Gimplify statement T into sequence *SEQ_P, and return the first
442 tuple in the sequence of generated tuples for this statement.
443 Return NULL if gimplifying T produced no tuples. */
446 gimplify_and_return_first (tree t
, gimple_seq
*seq_p
)
448 gimple_stmt_iterator last
= gsi_last (*seq_p
);
450 gimplify_and_add (t
, seq_p
);
452 if (!gsi_end_p (last
))
455 return gsi_stmt (last
);
458 return gimple_seq_first_stmt (*seq_p
);
461 /* Returns true iff T is a valid RHS for an assignment to an un-renamed
462 LHS, or for a call argument. */
465 is_gimple_mem_rhs (tree t
)
467 /* If we're dealing with a renamable type, either source or dest must be
468 a renamed variable. */
469 if (is_gimple_reg_type (TREE_TYPE (t
)))
470 return is_gimple_val (t
);
472 return is_gimple_val (t
) || is_gimple_lvalue (t
);
475 /* Return true if T is a CALL_EXPR or an expression that can be
476 assigned to a temporary. Note that this predicate should only be
477 used during gimplification. See the rationale for this in
478 gimplify_modify_expr. */
481 is_gimple_reg_rhs_or_call (tree t
)
483 return (get_gimple_rhs_class (TREE_CODE (t
)) != GIMPLE_INVALID_RHS
484 || TREE_CODE (t
) == CALL_EXPR
);
487 /* Return true if T is a valid memory RHS or a CALL_EXPR. Note that
488 this predicate should only be used during gimplification. See the
489 rationale for this in gimplify_modify_expr. */
492 is_gimple_mem_rhs_or_call (tree t
)
494 /* If we're dealing with a renamable type, either source or dest must be
495 a renamed variable. */
496 if (is_gimple_reg_type (TREE_TYPE (t
)))
497 return is_gimple_val (t
);
499 return (is_gimple_val (t
)
500 || is_gimple_lvalue (t
)
501 || TREE_CLOBBER_P (t
)
502 || TREE_CODE (t
) == CALL_EXPR
);
505 /* Create a temporary with a name derived from VAL. Subroutine of
506 lookup_tmp_var; nobody else should call this function. */
509 create_tmp_from_val (tree val
)
511 /* Drop all qualifiers and address-space information from the value type. */
512 tree type
= TYPE_MAIN_VARIANT (TREE_TYPE (val
));
513 tree var
= create_tmp_var (type
, get_name (val
));
514 if (TREE_CODE (TREE_TYPE (var
)) == COMPLEX_TYPE
515 || TREE_CODE (TREE_TYPE (var
)) == VECTOR_TYPE
)
516 DECL_GIMPLE_REG_P (var
) = 1;
520 /* Create a temporary to hold the value of VAL. If IS_FORMAL, try to reuse
521 an existing expression temporary. */
524 lookup_tmp_var (tree val
, bool is_formal
)
528 /* If not optimizing, never really reuse a temporary. local-alloc
529 won't allocate any variable that is used in more than one basic
530 block, which means it will go into memory, causing much extra
531 work in reload and final and poorer code generation, outweighing
532 the extra memory allocation here. */
533 if (!optimize
|| !is_formal
|| TREE_SIDE_EFFECTS (val
))
534 ret
= create_tmp_from_val (val
);
541 if (!gimplify_ctxp
->temp_htab
)
542 gimplify_ctxp
->temp_htab
= new hash_table
<gimplify_hasher
> (1000);
543 slot
= gimplify_ctxp
->temp_htab
->find_slot (&elt
, INSERT
);
546 elt_p
= XNEW (elt_t
);
548 elt_p
->temp
= ret
= create_tmp_from_val (val
);
561 /* Helper for get_formal_tmp_var and get_initialized_tmp_var. */
564 internal_get_tmp_var (tree val
, gimple_seq
*pre_p
, gimple_seq
*post_p
,
565 bool is_formal
, bool allow_ssa
)
569 /* Notice that we explicitly allow VAL to be a CALL_EXPR so that we
570 can create an INIT_EXPR and convert it into a GIMPLE_CALL below. */
571 gimplify_expr (&val
, pre_p
, post_p
, is_gimple_reg_rhs_or_call
,
575 && gimplify_ctxp
->into_ssa
576 && is_gimple_reg_type (TREE_TYPE (val
)))
578 t
= make_ssa_name (TYPE_MAIN_VARIANT (TREE_TYPE (val
)));
579 if (! gimple_in_ssa_p (cfun
))
581 const char *name
= get_name (val
);
583 SET_SSA_NAME_VAR_OR_IDENTIFIER (t
, create_tmp_var_name (name
));
587 t
= lookup_tmp_var (val
, is_formal
);
589 mod
= build2 (INIT_EXPR
, TREE_TYPE (t
), t
, unshare_expr (val
));
591 SET_EXPR_LOCATION (mod
, EXPR_LOC_OR_LOC (val
, input_location
));
593 /* gimplify_modify_expr might want to reduce this further. */
594 gimplify_and_add (mod
, pre_p
);
600 /* Return a formal temporary variable initialized with VAL. PRE_P is as
601 in gimplify_expr. Only use this function if:
603 1) The value of the unfactored expression represented by VAL will not
604 change between the initialization and use of the temporary, and
605 2) The temporary will not be otherwise modified.
607 For instance, #1 means that this is inappropriate for SAVE_EXPR temps,
608 and #2 means it is inappropriate for && temps.
610 For other cases, use get_initialized_tmp_var instead. */
613 get_formal_tmp_var (tree val
, gimple_seq
*pre_p
)
615 return internal_get_tmp_var (val
, pre_p
, NULL
, true, true);
618 /* Return a temporary variable initialized with VAL. PRE_P and POST_P
619 are as in gimplify_expr. */
622 get_initialized_tmp_var (tree val
, gimple_seq
*pre_p
, gimple_seq
*post_p
,
625 return internal_get_tmp_var (val
, pre_p
, post_p
, false, allow_ssa
);
628 /* Declare all the variables in VARS in SCOPE. If DEBUG_INFO is true,
629 generate debug info for them; otherwise don't. */
632 declare_vars (tree vars
, gimple
*gs
, bool debug_info
)
639 gbind
*scope
= as_a
<gbind
*> (gs
);
641 temps
= nreverse (last
);
643 block
= gimple_bind_block (scope
);
644 gcc_assert (!block
|| TREE_CODE (block
) == BLOCK
);
645 if (!block
|| !debug_info
)
647 DECL_CHAIN (last
) = gimple_bind_vars (scope
);
648 gimple_bind_set_vars (scope
, temps
);
652 /* We need to attach the nodes both to the BIND_EXPR and to its
653 associated BLOCK for debugging purposes. The key point here
654 is that the BLOCK_VARS of the BIND_EXPR_BLOCK of a BIND_EXPR
655 is a subchain of the BIND_EXPR_VARS of the BIND_EXPR. */
656 if (BLOCK_VARS (block
))
657 BLOCK_VARS (block
) = chainon (BLOCK_VARS (block
), temps
);
660 gimple_bind_set_vars (scope
,
661 chainon (gimple_bind_vars (scope
), temps
));
662 BLOCK_VARS (block
) = temps
;
668 /* For VAR a VAR_DECL of variable size, try to find a constant upper bound
669 for the size and adjust DECL_SIZE/DECL_SIZE_UNIT accordingly. Abort if
670 no such upper bound can be obtained. */
673 force_constant_size (tree var
)
675 /* The only attempt we make is by querying the maximum size of objects
676 of the variable's type. */
678 HOST_WIDE_INT max_size
;
680 gcc_assert (VAR_P (var
));
682 max_size
= max_int_size_in_bytes (TREE_TYPE (var
));
684 gcc_assert (max_size
>= 0);
687 = build_int_cst (TREE_TYPE (DECL_SIZE_UNIT (var
)), max_size
);
689 = build_int_cst (TREE_TYPE (DECL_SIZE (var
)), max_size
* BITS_PER_UNIT
);
692 /* Push the temporary variable TMP into the current binding. */
695 gimple_add_tmp_var_fn (struct function
*fn
, tree tmp
)
697 gcc_assert (!DECL_CHAIN (tmp
) && !DECL_SEEN_IN_BIND_EXPR_P (tmp
));
699 /* Later processing assumes that the object size is constant, which might
700 not be true at this point. Force the use of a constant upper bound in
702 if (!tree_fits_uhwi_p (DECL_SIZE_UNIT (tmp
)))
703 force_constant_size (tmp
);
705 DECL_CONTEXT (tmp
) = fn
->decl
;
706 DECL_SEEN_IN_BIND_EXPR_P (tmp
) = 1;
708 record_vars_into (tmp
, fn
->decl
);
711 /* Push the temporary variable TMP into the current binding. */
714 gimple_add_tmp_var (tree tmp
)
716 gcc_assert (!DECL_CHAIN (tmp
) && !DECL_SEEN_IN_BIND_EXPR_P (tmp
));
718 /* Later processing assumes that the object size is constant, which might
719 not be true at this point. Force the use of a constant upper bound in
721 if (!tree_fits_uhwi_p (DECL_SIZE_UNIT (tmp
)))
722 force_constant_size (tmp
);
724 DECL_CONTEXT (tmp
) = current_function_decl
;
725 DECL_SEEN_IN_BIND_EXPR_P (tmp
) = 1;
729 DECL_CHAIN (tmp
) = gimplify_ctxp
->temps
;
730 gimplify_ctxp
->temps
= tmp
;
732 /* Mark temporaries local within the nearest enclosing parallel. */
733 if (gimplify_omp_ctxp
)
735 struct gimplify_omp_ctx
*ctx
= gimplify_omp_ctxp
;
737 && (ctx
->region_type
== ORT_WORKSHARE
738 || ctx
->region_type
== ORT_SIMD
739 || ctx
->region_type
== ORT_ACC
))
740 ctx
= ctx
->outer_context
;
742 omp_add_variable (ctx
, tmp
, GOVD_LOCAL
| GOVD_SEEN
);
751 /* This case is for nested functions. We need to expose the locals
753 body_seq
= gimple_body (current_function_decl
);
754 declare_vars (tmp
, gimple_seq_first_stmt (body_seq
), false);
760 /* This page contains routines to unshare tree nodes, i.e. to duplicate tree
761 nodes that are referenced more than once in GENERIC functions. This is
762 necessary because gimplification (translation into GIMPLE) is performed
763 by modifying tree nodes in-place, so gimplication of a shared node in a
764 first context could generate an invalid GIMPLE form in a second context.
766 This is achieved with a simple mark/copy/unmark algorithm that walks the
767 GENERIC representation top-down, marks nodes with TREE_VISITED the first
768 time it encounters them, duplicates them if they already have TREE_VISITED
769 set, and finally removes the TREE_VISITED marks it has set.
771 The algorithm works only at the function level, i.e. it generates a GENERIC
772 representation of a function with no nodes shared within the function when
773 passed a GENERIC function (except for nodes that are allowed to be shared).
775 At the global level, it is also necessary to unshare tree nodes that are
776 referenced in more than one function, for the same aforementioned reason.
777 This requires some cooperation from the front-end. There are 2 strategies:
779 1. Manual unsharing. The front-end needs to call unshare_expr on every
780 expression that might end up being shared across functions.
782 2. Deep unsharing. This is an extension of regular unsharing. Instead
783 of calling unshare_expr on expressions that might be shared across
784 functions, the front-end pre-marks them with TREE_VISITED. This will
785 ensure that they are unshared on the first reference within functions
786 when the regular unsharing algorithm runs. The counterpart is that
787 this algorithm must look deeper than for manual unsharing, which is
788 specified by LANG_HOOKS_DEEP_UNSHARING.
790 If there are only few specific cases of node sharing across functions, it is
791 probably easier for a front-end to unshare the expressions manually. On the
792 contrary, if the expressions generated at the global level are as widespread
793 as expressions generated within functions, deep unsharing is very likely the
796 /* Similar to copy_tree_r but do not copy SAVE_EXPR or TARGET_EXPR nodes.
797 These nodes model computations that must be done once. If we were to
798 unshare something like SAVE_EXPR(i++), the gimplification process would
799 create wrong code. However, if DATA is non-null, it must hold a pointer
800 set that is used to unshare the subtrees of these nodes. */
803 mostly_copy_tree_r (tree
*tp
, int *walk_subtrees
, void *data
)
806 enum tree_code code
= TREE_CODE (t
);
808 /* Do not copy SAVE_EXPR, TARGET_EXPR or BIND_EXPR nodes themselves, but
809 copy their subtrees if we can make sure to do it only once. */
810 if (code
== SAVE_EXPR
|| code
== TARGET_EXPR
|| code
== BIND_EXPR
)
812 if (data
&& !((hash_set
<tree
> *)data
)->add (t
))
818 /* Stop at types, decls, constants like copy_tree_r. */
819 else if (TREE_CODE_CLASS (code
) == tcc_type
820 || TREE_CODE_CLASS (code
) == tcc_declaration
821 || TREE_CODE_CLASS (code
) == tcc_constant
)
824 /* Cope with the statement expression extension. */
825 else if (code
== STATEMENT_LIST
)
828 /* Leave the bulk of the work to copy_tree_r itself. */
830 copy_tree_r (tp
, walk_subtrees
, NULL
);
835 /* Callback for walk_tree to unshare most of the shared trees rooted at *TP.
836 If *TP has been visited already, then *TP is deeply copied by calling
837 mostly_copy_tree_r. DATA is passed to mostly_copy_tree_r unmodified. */
840 copy_if_shared_r (tree
*tp
, int *walk_subtrees
, void *data
)
843 enum tree_code code
= TREE_CODE (t
);
845 /* Skip types, decls, and constants. But we do want to look at their
846 types and the bounds of types. Mark them as visited so we properly
847 unmark their subtrees on the unmark pass. If we've already seen them,
848 don't look down further. */
849 if (TREE_CODE_CLASS (code
) == tcc_type
850 || TREE_CODE_CLASS (code
) == tcc_declaration
851 || TREE_CODE_CLASS (code
) == tcc_constant
)
853 if (TREE_VISITED (t
))
856 TREE_VISITED (t
) = 1;
859 /* If this node has been visited already, unshare it and don't look
861 else if (TREE_VISITED (t
))
863 walk_tree (tp
, mostly_copy_tree_r
, data
, NULL
);
867 /* Otherwise, mark the node as visited and keep looking. */
869 TREE_VISITED (t
) = 1;
874 /* Unshare most of the shared trees rooted at *TP. DATA is passed to the
875 copy_if_shared_r callback unmodified. */
878 copy_if_shared (tree
*tp
, void *data
)
880 walk_tree (tp
, copy_if_shared_r
, data
, NULL
);
883 /* Unshare all the trees in the body of FNDECL, as well as in the bodies of
884 any nested functions. */
887 unshare_body (tree fndecl
)
889 struct cgraph_node
*cgn
= cgraph_node::get (fndecl
);
890 /* If the language requires deep unsharing, we need a pointer set to make
891 sure we don't repeatedly unshare subtrees of unshareable nodes. */
892 hash_set
<tree
> *visited
893 = lang_hooks
.deep_unsharing
? new hash_set
<tree
> : NULL
;
895 copy_if_shared (&DECL_SAVED_TREE (fndecl
), visited
);
896 copy_if_shared (&DECL_SIZE (DECL_RESULT (fndecl
)), visited
);
897 copy_if_shared (&DECL_SIZE_UNIT (DECL_RESULT (fndecl
)), visited
);
902 for (cgn
= cgn
->nested
; cgn
; cgn
= cgn
->next_nested
)
903 unshare_body (cgn
->decl
);
906 /* Callback for walk_tree to unmark the visited trees rooted at *TP.
907 Subtrees are walked until the first unvisited node is encountered. */
910 unmark_visited_r (tree
*tp
, int *walk_subtrees
, void *data ATTRIBUTE_UNUSED
)
914 /* If this node has been visited, unmark it and keep looking. */
915 if (TREE_VISITED (t
))
916 TREE_VISITED (t
) = 0;
918 /* Otherwise, don't look any deeper. */
925 /* Unmark the visited trees rooted at *TP. */
928 unmark_visited (tree
*tp
)
930 walk_tree (tp
, unmark_visited_r
, NULL
, NULL
);
933 /* Likewise, but mark all trees as not visited. */
936 unvisit_body (tree fndecl
)
938 struct cgraph_node
*cgn
= cgraph_node::get (fndecl
);
940 unmark_visited (&DECL_SAVED_TREE (fndecl
));
941 unmark_visited (&DECL_SIZE (DECL_RESULT (fndecl
)));
942 unmark_visited (&DECL_SIZE_UNIT (DECL_RESULT (fndecl
)));
945 for (cgn
= cgn
->nested
; cgn
; cgn
= cgn
->next_nested
)
946 unvisit_body (cgn
->decl
);
949 /* Unconditionally make an unshared copy of EXPR. This is used when using
950 stored expressions which span multiple functions, such as BINFO_VTABLE,
951 as the normal unsharing process can't tell that they're shared. */
954 unshare_expr (tree expr
)
956 walk_tree (&expr
, mostly_copy_tree_r
, NULL
, NULL
);
960 /* Worker for unshare_expr_without_location. */
963 prune_expr_location (tree
*tp
, int *walk_subtrees
, void *)
966 SET_EXPR_LOCATION (*tp
, UNKNOWN_LOCATION
);
972 /* Similar to unshare_expr but also prune all expression locations
976 unshare_expr_without_location (tree expr
)
978 walk_tree (&expr
, mostly_copy_tree_r
, NULL
, NULL
);
980 walk_tree (&expr
, prune_expr_location
, NULL
, NULL
);
984 /* WRAPPER is a code such as BIND_EXPR or CLEANUP_POINT_EXPR which can both
985 contain statements and have a value. Assign its value to a temporary
986 and give it void_type_node. Return the temporary, or NULL_TREE if
987 WRAPPER was already void. */
990 voidify_wrapper_expr (tree wrapper
, tree temp
)
992 tree type
= TREE_TYPE (wrapper
);
993 if (type
&& !VOID_TYPE_P (type
))
997 /* Set p to point to the body of the wrapper. Loop until we find
998 something that isn't a wrapper. */
999 for (p
= &wrapper
; p
&& *p
; )
1001 switch (TREE_CODE (*p
))
1004 TREE_SIDE_EFFECTS (*p
) = 1;
1005 TREE_TYPE (*p
) = void_type_node
;
1006 /* For a BIND_EXPR, the body is operand 1. */
1007 p
= &BIND_EXPR_BODY (*p
);
1010 case CLEANUP_POINT_EXPR
:
1011 case TRY_FINALLY_EXPR
:
1012 case TRY_CATCH_EXPR
:
1013 TREE_SIDE_EFFECTS (*p
) = 1;
1014 TREE_TYPE (*p
) = void_type_node
;
1015 p
= &TREE_OPERAND (*p
, 0);
1018 case STATEMENT_LIST
:
1020 tree_stmt_iterator i
= tsi_last (*p
);
1021 TREE_SIDE_EFFECTS (*p
) = 1;
1022 TREE_TYPE (*p
) = void_type_node
;
1023 p
= tsi_end_p (i
) ? NULL
: tsi_stmt_ptr (i
);
1028 /* Advance to the last statement. Set all container types to
1030 for (; TREE_CODE (*p
) == COMPOUND_EXPR
; p
= &TREE_OPERAND (*p
, 1))
1032 TREE_SIDE_EFFECTS (*p
) = 1;
1033 TREE_TYPE (*p
) = void_type_node
;
1037 case TRANSACTION_EXPR
:
1038 TREE_SIDE_EFFECTS (*p
) = 1;
1039 TREE_TYPE (*p
) = void_type_node
;
1040 p
= &TRANSACTION_EXPR_BODY (*p
);
1044 /* Assume that any tree upon which voidify_wrapper_expr is
1045 directly called is a wrapper, and that its body is op0. */
1048 TREE_SIDE_EFFECTS (*p
) = 1;
1049 TREE_TYPE (*p
) = void_type_node
;
1050 p
= &TREE_OPERAND (*p
, 0);
1058 if (p
== NULL
|| IS_EMPTY_STMT (*p
))
1062 /* The wrapper is on the RHS of an assignment that we're pushing
1064 gcc_assert (TREE_CODE (temp
) == INIT_EXPR
1065 || TREE_CODE (temp
) == MODIFY_EXPR
);
1066 TREE_OPERAND (temp
, 1) = *p
;
1071 temp
= create_tmp_var (type
, "retval");
1072 *p
= build2 (INIT_EXPR
, type
, temp
, *p
);
1081 /* Prepare calls to builtins to SAVE and RESTORE the stack as well as
1082 a temporary through which they communicate. */
1085 build_stack_save_restore (gcall
**save
, gcall
**restore
)
1089 *save
= gimple_build_call (builtin_decl_implicit (BUILT_IN_STACK_SAVE
), 0);
1090 tmp_var
= create_tmp_var (ptr_type_node
, "saved_stack");
1091 gimple_call_set_lhs (*save
, tmp_var
);
1094 = gimple_build_call (builtin_decl_implicit (BUILT_IN_STACK_RESTORE
),
1098 /* Generate IFN_ASAN_MARK call that poisons shadow of a for DECL variable. */
1101 build_asan_poison_call_expr (tree decl
)
1103 /* Do not poison variables that have size equal to zero. */
1104 tree unit_size
= DECL_SIZE_UNIT (decl
);
1105 if (zerop (unit_size
))
1108 tree base
= build_fold_addr_expr (decl
);
1110 return build_call_expr_internal_loc (UNKNOWN_LOCATION
, IFN_ASAN_MARK
,
1112 build_int_cst (integer_type_node
,
1117 /* Generate IFN_ASAN_MARK call that would poison or unpoison, depending
1118 on POISON flag, shadow memory of a DECL variable. The call will be
1119 put on location identified by IT iterator, where BEFORE flag drives
1120 position where the stmt will be put. */
1123 asan_poison_variable (tree decl
, bool poison
, gimple_stmt_iterator
*it
,
1126 /* When within an OMP context, do not emit ASAN_MARK internal fns. */
1127 if (gimplify_omp_ctxp
)
1130 tree unit_size
= DECL_SIZE_UNIT (decl
);
1131 tree base
= build_fold_addr_expr (decl
);
1133 /* Do not poison variables that have size equal to zero. */
1134 if (zerop (unit_size
))
1137 /* It's necessary to have all stack variables aligned to ASAN granularity
1139 if (DECL_ALIGN_UNIT (decl
) <= ASAN_SHADOW_GRANULARITY
)
1140 SET_DECL_ALIGN (decl
, BITS_PER_UNIT
* ASAN_SHADOW_GRANULARITY
);
1142 HOST_WIDE_INT flags
= poison
? ASAN_MARK_POISON
: ASAN_MARK_UNPOISON
;
1145 = gimple_build_call_internal (IFN_ASAN_MARK
, 3,
1146 build_int_cst (integer_type_node
, flags
),
1150 gsi_insert_before (it
, g
, GSI_NEW_STMT
);
1152 gsi_insert_after (it
, g
, GSI_NEW_STMT
);
1155 /* Generate IFN_ASAN_MARK internal call that depending on POISON flag
1156 either poisons or unpoisons a DECL. Created statement is appended
1157 to SEQ_P gimple sequence. */
1160 asan_poison_variable (tree decl
, bool poison
, gimple_seq
*seq_p
)
1162 gimple_stmt_iterator it
= gsi_last (*seq_p
);
1163 bool before
= false;
1168 asan_poison_variable (decl
, poison
, &it
, before
);
1171 /* Sort pair of VAR_DECLs A and B by DECL_UID. */
1174 sort_by_decl_uid (const void *a
, const void *b
)
1176 const tree
*t1
= (const tree
*)a
;
1177 const tree
*t2
= (const tree
*)b
;
1179 int uid1
= DECL_UID (*t1
);
1180 int uid2
= DECL_UID (*t2
);
1184 else if (uid1
> uid2
)
1190 /* Generate IFN_ASAN_MARK internal call for all VARIABLES
1191 depending on POISON flag. Created statement is appended
1192 to SEQ_P gimple sequence. */
1195 asan_poison_variables (hash_set
<tree
> *variables
, bool poison
, gimple_seq
*seq_p
)
1197 unsigned c
= variables
->elements ();
1201 auto_vec
<tree
> sorted_variables (c
);
1203 for (hash_set
<tree
>::iterator it
= variables
->begin ();
1204 it
!= variables
->end (); ++it
)
1205 sorted_variables
.safe_push (*it
);
1207 sorted_variables
.qsort (sort_by_decl_uid
);
1211 FOR_EACH_VEC_ELT (sorted_variables
, i
, var
)
1213 asan_poison_variable (var
, poison
, seq_p
);
1215 /* Add use_after_scope_memory attribute for the variable in order
1216 to prevent re-written into SSA. */
1217 if (!lookup_attribute (ASAN_USE_AFTER_SCOPE_ATTRIBUTE
,
1218 DECL_ATTRIBUTES (var
)))
1219 DECL_ATTRIBUTES (var
)
1220 = tree_cons (get_identifier (ASAN_USE_AFTER_SCOPE_ATTRIBUTE
),
1222 DECL_ATTRIBUTES (var
));
1226 /* Gimplify a BIND_EXPR. Just voidify and recurse. */
1228 static enum gimplify_status
1229 gimplify_bind_expr (tree
*expr_p
, gimple_seq
*pre_p
)
1231 tree bind_expr
= *expr_p
;
1232 bool old_keep_stack
= gimplify_ctxp
->keep_stack
;
1233 bool old_save_stack
= gimplify_ctxp
->save_stack
;
1236 gimple_seq body
, cleanup
;
1238 location_t start_locus
= 0, end_locus
= 0;
1239 tree ret_clauses
= NULL
;
1241 tree temp
= voidify_wrapper_expr (bind_expr
, NULL
);
1243 /* Mark variables seen in this bind expr. */
1244 for (t
= BIND_EXPR_VARS (bind_expr
); t
; t
= DECL_CHAIN (t
))
1248 struct gimplify_omp_ctx
*ctx
= gimplify_omp_ctxp
;
1250 /* Mark variable as local. */
1251 if (ctx
&& ctx
->region_type
!= ORT_NONE
&& !DECL_EXTERNAL (t
)
1252 && (! DECL_SEEN_IN_BIND_EXPR_P (t
)
1253 || splay_tree_lookup (ctx
->variables
,
1254 (splay_tree_key
) t
) == NULL
))
1256 if (ctx
->region_type
== ORT_SIMD
1257 && TREE_ADDRESSABLE (t
)
1258 && !TREE_STATIC (t
))
1259 omp_add_variable (ctx
, t
, GOVD_PRIVATE
| GOVD_SEEN
);
1261 omp_add_variable (ctx
, t
, GOVD_LOCAL
| GOVD_SEEN
);
1264 DECL_SEEN_IN_BIND_EXPR_P (t
) = 1;
1266 if (DECL_HARD_REGISTER (t
) && !is_global_var (t
) && cfun
)
1267 cfun
->has_local_explicit_reg_vars
= true;
1270 /* Preliminarily mark non-addressed complex variables as eligible
1271 for promotion to gimple registers. We'll transform their uses
1273 if ((TREE_CODE (TREE_TYPE (t
)) == COMPLEX_TYPE
1274 || TREE_CODE (TREE_TYPE (t
)) == VECTOR_TYPE
)
1275 && !TREE_THIS_VOLATILE (t
)
1276 && (VAR_P (t
) && !DECL_HARD_REGISTER (t
))
1277 && !needs_to_live_in_memory (t
))
1278 DECL_GIMPLE_REG_P (t
) = 1;
1281 bind_stmt
= gimple_build_bind (BIND_EXPR_VARS (bind_expr
), NULL
,
1282 BIND_EXPR_BLOCK (bind_expr
));
1283 gimple_push_bind_expr (bind_stmt
);
1285 gimplify_ctxp
->keep_stack
= false;
1286 gimplify_ctxp
->save_stack
= false;
1288 /* Gimplify the body into the GIMPLE_BIND tuple's body. */
1290 gimplify_stmt (&BIND_EXPR_BODY (bind_expr
), &body
);
1291 gimple_bind_set_body (bind_stmt
, body
);
1293 /* Source location wise, the cleanup code (stack_restore and clobbers)
1294 belongs to the end of the block, so propagate what we have. The
1295 stack_save operation belongs to the beginning of block, which we can
1296 infer from the bind_expr directly if the block has no explicit
1298 if (BIND_EXPR_BLOCK (bind_expr
))
1300 end_locus
= BLOCK_SOURCE_END_LOCATION (BIND_EXPR_BLOCK (bind_expr
));
1301 start_locus
= BLOCK_SOURCE_LOCATION (BIND_EXPR_BLOCK (bind_expr
));
1303 if (start_locus
== 0)
1304 start_locus
= EXPR_LOCATION (bind_expr
);
1309 /* If the code both contains VLAs and calls alloca, then we cannot reclaim
1310 the stack space allocated to the VLAs. */
1311 if (gimplify_ctxp
->save_stack
&& !gimplify_ctxp
->keep_stack
)
1313 gcall
*stack_restore
;
1315 /* Save stack on entry and restore it on exit. Add a try_finally
1316 block to achieve this. */
1317 build_stack_save_restore (&stack_save
, &stack_restore
);
1319 gimple_set_location (stack_save
, start_locus
);
1320 gimple_set_location (stack_restore
, end_locus
);
1322 gimplify_seq_add_stmt (&cleanup
, stack_restore
);
1325 /* Add clobbers for all variables that go out of scope. */
1326 for (t
= BIND_EXPR_VARS (bind_expr
); t
; t
= DECL_CHAIN (t
))
1329 && !is_global_var (t
)
1330 && DECL_CONTEXT (t
) == current_function_decl
)
1332 if (!DECL_HARD_REGISTER (t
)
1333 && !TREE_THIS_VOLATILE (t
)
1334 && !DECL_HAS_VALUE_EXPR_P (t
)
1335 /* Only care for variables that have to be in memory. Others
1336 will be rewritten into SSA names, hence moved to the
1338 && !is_gimple_reg (t
)
1339 && flag_stack_reuse
!= SR_NONE
)
1341 tree clobber
= build_constructor (TREE_TYPE (t
), NULL
);
1342 gimple
*clobber_stmt
;
1343 TREE_THIS_VOLATILE (clobber
) = 1;
1344 clobber_stmt
= gimple_build_assign (t
, clobber
);
1345 gimple_set_location (clobber_stmt
, end_locus
);
1346 gimplify_seq_add_stmt (&cleanup
, clobber_stmt
);
1349 if (flag_openacc
&& oacc_declare_returns
!= NULL
)
1351 tree
*c
= oacc_declare_returns
->get (t
);
1355 OMP_CLAUSE_CHAIN (*c
) = ret_clauses
;
1359 oacc_declare_returns
->remove (t
);
1361 if (oacc_declare_returns
->elements () == 0)
1363 delete oacc_declare_returns
;
1364 oacc_declare_returns
= NULL
;
1370 if (asan_poisoned_variables
!= NULL
1371 && asan_poisoned_variables
->contains (t
))
1373 asan_poisoned_variables
->remove (t
);
1374 asan_poison_variable (t
, true, &cleanup
);
1377 if (gimplify_ctxp
->live_switch_vars
!= NULL
1378 && gimplify_ctxp
->live_switch_vars
->contains (t
))
1379 gimplify_ctxp
->live_switch_vars
->remove (t
);
1385 gimple_stmt_iterator si
= gsi_start (cleanup
);
1387 stmt
= gimple_build_omp_target (NULL
, GF_OMP_TARGET_KIND_OACC_DECLARE
,
1389 gsi_insert_seq_before_without_update (&si
, stmt
, GSI_NEW_STMT
);
1395 gimple_seq new_body
;
1398 gs
= gimple_build_try (gimple_bind_body (bind_stmt
), cleanup
,
1399 GIMPLE_TRY_FINALLY
);
1402 gimplify_seq_add_stmt (&new_body
, stack_save
);
1403 gimplify_seq_add_stmt (&new_body
, gs
);
1404 gimple_bind_set_body (bind_stmt
, new_body
);
1407 /* keep_stack propagates all the way up to the outermost BIND_EXPR. */
1408 if (!gimplify_ctxp
->keep_stack
)
1409 gimplify_ctxp
->keep_stack
= old_keep_stack
;
1410 gimplify_ctxp
->save_stack
= old_save_stack
;
1412 gimple_pop_bind_expr ();
1414 gimplify_seq_add_stmt (pre_p
, bind_stmt
);
1422 *expr_p
= NULL_TREE
;
1426 /* Maybe add early return predict statement to PRE_P sequence. */
1429 maybe_add_early_return_predict_stmt (gimple_seq
*pre_p
)
1431 /* If we are not in a conditional context, add PREDICT statement. */
1432 if (gimple_conditional_context ())
1434 gimple
*predict
= gimple_build_predict (PRED_TREE_EARLY_RETURN
,
1436 gimplify_seq_add_stmt (pre_p
, predict
);
1440 /* Gimplify a RETURN_EXPR. If the expression to be returned is not a
1441 GIMPLE value, it is assigned to a new temporary and the statement is
1442 re-written to return the temporary.
1444 PRE_P points to the sequence where side effects that must happen before
1445 STMT should be stored. */
1447 static enum gimplify_status
1448 gimplify_return_expr (tree stmt
, gimple_seq
*pre_p
)
1451 tree ret_expr
= TREE_OPERAND (stmt
, 0);
1452 tree result_decl
, result
;
1454 if (ret_expr
== error_mark_node
)
1457 /* Implicit _Cilk_sync must be inserted right before any return statement
1458 if there is a _Cilk_spawn in the function. If the user has provided a
1459 _Cilk_sync, the optimizer should remove this duplicate one. */
1460 if (fn_contains_cilk_spawn_p (cfun
))
1462 tree impl_sync
= build0 (CILK_SYNC_STMT
, void_type_node
);
1463 gimplify_and_add (impl_sync
, pre_p
);
1467 || TREE_CODE (ret_expr
) == RESULT_DECL
1468 || ret_expr
== error_mark_node
)
1470 maybe_add_early_return_predict_stmt (pre_p
);
1471 greturn
*ret
= gimple_build_return (ret_expr
);
1472 gimple_set_no_warning (ret
, TREE_NO_WARNING (stmt
));
1473 gimplify_seq_add_stmt (pre_p
, ret
);
1477 if (VOID_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl
))))
1478 result_decl
= NULL_TREE
;
1481 result_decl
= TREE_OPERAND (ret_expr
, 0);
1483 /* See through a return by reference. */
1484 if (TREE_CODE (result_decl
) == INDIRECT_REF
)
1485 result_decl
= TREE_OPERAND (result_decl
, 0);
1487 gcc_assert ((TREE_CODE (ret_expr
) == MODIFY_EXPR
1488 || TREE_CODE (ret_expr
) == INIT_EXPR
)
1489 && TREE_CODE (result_decl
) == RESULT_DECL
);
1492 /* If aggregate_value_p is true, then we can return the bare RESULT_DECL.
1493 Recall that aggregate_value_p is FALSE for any aggregate type that is
1494 returned in registers. If we're returning values in registers, then
1495 we don't want to extend the lifetime of the RESULT_DECL, particularly
1496 across another call. In addition, for those aggregates for which
1497 hard_function_value generates a PARALLEL, we'll die during normal
1498 expansion of structure assignments; there's special code in expand_return
1499 to handle this case that does not exist in expand_expr. */
1502 else if (aggregate_value_p (result_decl
, TREE_TYPE (current_function_decl
)))
1504 if (TREE_CODE (DECL_SIZE (result_decl
)) != INTEGER_CST
)
1506 if (!TYPE_SIZES_GIMPLIFIED (TREE_TYPE (result_decl
)))
1507 gimplify_type_sizes (TREE_TYPE (result_decl
), pre_p
);
1508 /* Note that we don't use gimplify_vla_decl because the RESULT_DECL
1509 should be effectively allocated by the caller, i.e. all calls to
1510 this function must be subject to the Return Slot Optimization. */
1511 gimplify_one_sizepos (&DECL_SIZE (result_decl
), pre_p
);
1512 gimplify_one_sizepos (&DECL_SIZE_UNIT (result_decl
), pre_p
);
1514 result
= result_decl
;
1516 else if (gimplify_ctxp
->return_temp
)
1517 result
= gimplify_ctxp
->return_temp
;
1520 result
= create_tmp_reg (TREE_TYPE (result_decl
));
1522 /* ??? With complex control flow (usually involving abnormal edges),
1523 we can wind up warning about an uninitialized value for this. Due
1524 to how this variable is constructed and initialized, this is never
1525 true. Give up and never warn. */
1526 TREE_NO_WARNING (result
) = 1;
1528 gimplify_ctxp
->return_temp
= result
;
1531 /* Smash the lhs of the MODIFY_EXPR to the temporary we plan to use.
1532 Then gimplify the whole thing. */
1533 if (result
!= result_decl
)
1534 TREE_OPERAND (ret_expr
, 0) = result
;
1536 gimplify_and_add (TREE_OPERAND (stmt
, 0), pre_p
);
1538 maybe_add_early_return_predict_stmt (pre_p
);
1539 ret
= gimple_build_return (result
);
1540 gimple_set_no_warning (ret
, TREE_NO_WARNING (stmt
));
1541 gimplify_seq_add_stmt (pre_p
, ret
);
1546 /* Gimplify a variable-length array DECL. */
1549 gimplify_vla_decl (tree decl
, gimple_seq
*seq_p
)
1551 /* This is a variable-sized decl. Simplify its size and mark it
1552 for deferred expansion. */
1553 tree t
, addr
, ptr_type
;
1555 gimplify_one_sizepos (&DECL_SIZE (decl
), seq_p
);
1556 gimplify_one_sizepos (&DECL_SIZE_UNIT (decl
), seq_p
);
1558 /* Don't mess with a DECL_VALUE_EXPR set by the front-end. */
1559 if (DECL_HAS_VALUE_EXPR_P (decl
))
1562 /* All occurrences of this decl in final gimplified code will be
1563 replaced by indirection. Setting DECL_VALUE_EXPR does two
1564 things: First, it lets the rest of the gimplifier know what
1565 replacement to use. Second, it lets the debug info know
1566 where to find the value. */
1567 ptr_type
= build_pointer_type (TREE_TYPE (decl
));
1568 addr
= create_tmp_var (ptr_type
, get_name (decl
));
1569 DECL_IGNORED_P (addr
) = 0;
1570 t
= build_fold_indirect_ref (addr
);
1571 TREE_THIS_NOTRAP (t
) = 1;
1572 SET_DECL_VALUE_EXPR (decl
, t
);
1573 DECL_HAS_VALUE_EXPR_P (decl
) = 1;
1575 t
= builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN
);
1576 t
= build_call_expr (t
, 2, DECL_SIZE_UNIT (decl
),
1577 size_int (DECL_ALIGN (decl
)));
1578 /* The call has been built for a variable-sized object. */
1579 CALL_ALLOCA_FOR_VAR_P (t
) = 1;
1580 t
= fold_convert (ptr_type
, t
);
1581 t
= build2 (MODIFY_EXPR
, TREE_TYPE (addr
), addr
, t
);
1583 gimplify_and_add (t
, seq_p
);
1586 /* A helper function to be called via walk_tree. Mark all labels under *TP
1587 as being forced. To be called for DECL_INITIAL of static variables. */
1590 force_labels_r (tree
*tp
, int *walk_subtrees
, void *data ATTRIBUTE_UNUSED
)
1594 if (TREE_CODE (*tp
) == LABEL_DECL
)
1596 FORCED_LABEL (*tp
) = 1;
1597 cfun
->has_forced_label_in_static
= 1;
1603 /* Gimplify a DECL_EXPR node *STMT_P by making any necessary allocation
1604 and initialization explicit. */
1606 static enum gimplify_status
1607 gimplify_decl_expr (tree
*stmt_p
, gimple_seq
*seq_p
)
1609 tree stmt
= *stmt_p
;
1610 tree decl
= DECL_EXPR_DECL (stmt
);
1612 *stmt_p
= NULL_TREE
;
1614 if (TREE_TYPE (decl
) == error_mark_node
)
1617 if ((TREE_CODE (decl
) == TYPE_DECL
1619 && !TYPE_SIZES_GIMPLIFIED (TREE_TYPE (decl
)))
1621 gimplify_type_sizes (TREE_TYPE (decl
), seq_p
);
1622 if (TREE_CODE (TREE_TYPE (decl
)) == REFERENCE_TYPE
)
1623 gimplify_type_sizes (TREE_TYPE (TREE_TYPE (decl
)), seq_p
);
1626 /* ??? DECL_ORIGINAL_TYPE is streamed for LTO so it needs to be gimplified
1627 in case its size expressions contain problematic nodes like CALL_EXPR. */
1628 if (TREE_CODE (decl
) == TYPE_DECL
1629 && DECL_ORIGINAL_TYPE (decl
)
1630 && !TYPE_SIZES_GIMPLIFIED (DECL_ORIGINAL_TYPE (decl
)))
1632 gimplify_type_sizes (DECL_ORIGINAL_TYPE (decl
), seq_p
);
1633 if (TREE_CODE (DECL_ORIGINAL_TYPE (decl
)) == REFERENCE_TYPE
)
1634 gimplify_type_sizes (TREE_TYPE (DECL_ORIGINAL_TYPE (decl
)), seq_p
);
1637 if (VAR_P (decl
) && !DECL_EXTERNAL (decl
))
1639 tree init
= DECL_INITIAL (decl
);
1640 bool is_vla
= false;
1642 if (TREE_CODE (DECL_SIZE_UNIT (decl
)) != INTEGER_CST
1643 || (!TREE_STATIC (decl
)
1644 && flag_stack_check
== GENERIC_STACK_CHECK
1645 && compare_tree_int (DECL_SIZE_UNIT (decl
),
1646 STACK_CHECK_MAX_VAR_SIZE
) > 0))
1648 gimplify_vla_decl (decl
, seq_p
);
1652 if (asan_poisoned_variables
1654 && TREE_ADDRESSABLE (decl
)
1655 && !TREE_STATIC (decl
)
1656 && !DECL_HAS_VALUE_EXPR_P (decl
)
1657 && dbg_cnt (asan_use_after_scope
))
1659 asan_poisoned_variables
->add (decl
);
1660 asan_poison_variable (decl
, false, seq_p
);
1661 if (!DECL_ARTIFICIAL (decl
) && gimplify_ctxp
->live_switch_vars
)
1662 gimplify_ctxp
->live_switch_vars
->add (decl
);
1665 /* Some front ends do not explicitly declare all anonymous
1666 artificial variables. We compensate here by declaring the
1667 variables, though it would be better if the front ends would
1668 explicitly declare them. */
1669 if (!DECL_SEEN_IN_BIND_EXPR_P (decl
)
1670 && DECL_ARTIFICIAL (decl
) && DECL_NAME (decl
) == NULL_TREE
)
1671 gimple_add_tmp_var (decl
);
1673 if (init
&& init
!= error_mark_node
)
1675 if (!TREE_STATIC (decl
))
1677 DECL_INITIAL (decl
) = NULL_TREE
;
1678 init
= build2 (INIT_EXPR
, void_type_node
, decl
, init
);
1679 gimplify_and_add (init
, seq_p
);
1683 /* We must still examine initializers for static variables
1684 as they may contain a label address. */
1685 walk_tree (&init
, force_labels_r
, NULL
, NULL
);
1692 /* Gimplify a LOOP_EXPR. Normally this just involves gimplifying the body
1693 and replacing the LOOP_EXPR with goto, but if the loop contains an
1694 EXIT_EXPR, we need to append a label for it to jump to. */
1696 static enum gimplify_status
1697 gimplify_loop_expr (tree
*expr_p
, gimple_seq
*pre_p
)
1699 tree saved_label
= gimplify_ctxp
->exit_label
;
1700 tree start_label
= create_artificial_label (UNKNOWN_LOCATION
);
1702 gimplify_seq_add_stmt (pre_p
, gimple_build_label (start_label
));
1704 gimplify_ctxp
->exit_label
= NULL_TREE
;
1706 gimplify_and_add (LOOP_EXPR_BODY (*expr_p
), pre_p
);
1708 gimplify_seq_add_stmt (pre_p
, gimple_build_goto (start_label
));
1710 if (gimplify_ctxp
->exit_label
)
1711 gimplify_seq_add_stmt (pre_p
,
1712 gimple_build_label (gimplify_ctxp
->exit_label
));
1714 gimplify_ctxp
->exit_label
= saved_label
;
1720 /* Gimplify a statement list onto a sequence. These may be created either
1721 by an enlightened front-end, or by shortcut_cond_expr. */
1723 static enum gimplify_status
1724 gimplify_statement_list (tree
*expr_p
, gimple_seq
*pre_p
)
1726 tree temp
= voidify_wrapper_expr (*expr_p
, NULL
);
1728 tree_stmt_iterator i
= tsi_start (*expr_p
);
1730 while (!tsi_end_p (i
))
1732 gimplify_stmt (tsi_stmt_ptr (i
), pre_p
);
1745 /* Callback for walk_gimple_seq. */
1748 warn_switch_unreachable_r (gimple_stmt_iterator
*gsi_p
, bool *handled_ops_p
,
1749 struct walk_stmt_info
*wi
)
1751 gimple
*stmt
= gsi_stmt (*gsi_p
);
1753 *handled_ops_p
= true;
1754 switch (gimple_code (stmt
))
1757 /* A compiler-generated cleanup or a user-written try block.
1758 If it's empty, don't dive into it--that would result in
1759 worse location info. */
1760 if (gimple_try_eval (stmt
) == NULL
)
1763 return integer_zero_node
;
1768 case GIMPLE_EH_FILTER
:
1769 case GIMPLE_TRANSACTION
:
1770 /* Walk the sub-statements. */
1771 *handled_ops_p
= false;
1774 if (gimple_call_internal_p (stmt
, IFN_ASAN_MARK
))
1776 *handled_ops_p
= false;
1781 /* Save the first "real" statement (not a decl/lexical scope/...). */
1783 return integer_zero_node
;
1788 /* Possibly warn about unreachable statements between switch's controlling
1789 expression and the first case. SEQ is the body of a switch expression. */
1792 maybe_warn_switch_unreachable (gimple_seq seq
)
1794 if (!warn_switch_unreachable
1795 /* This warning doesn't play well with Fortran when optimizations
1797 || lang_GNU_Fortran ()
1801 struct walk_stmt_info wi
;
1802 memset (&wi
, 0, sizeof (wi
));
1803 walk_gimple_seq (seq
, warn_switch_unreachable_r
, NULL
, &wi
);
1804 gimple
*stmt
= (gimple
*) wi
.info
;
1806 if (stmt
&& gimple_code (stmt
) != GIMPLE_LABEL
)
1808 if (gimple_code (stmt
) == GIMPLE_GOTO
1809 && TREE_CODE (gimple_goto_dest (stmt
)) == LABEL_DECL
1810 && DECL_ARTIFICIAL (gimple_goto_dest (stmt
)))
1811 /* Don't warn for compiler-generated gotos. These occur
1812 in Duff's devices, for example. */;
1814 warning_at (gimple_location (stmt
), OPT_Wswitch_unreachable
,
1815 "statement will never be executed");
1820 /* A label entry that pairs label and a location. */
1827 /* Find LABEL in vector of label entries VEC. */
1829 static struct label_entry
*
1830 find_label_entry (const auto_vec
<struct label_entry
> *vec
, tree label
)
1833 struct label_entry
*l
;
1835 FOR_EACH_VEC_ELT (*vec
, i
, l
)
1836 if (l
->label
== label
)
1841 /* Return true if LABEL, a LABEL_DECL, represents a case label
1842 in a vector of labels CASES. */
1845 case_label_p (const vec
<tree
> *cases
, tree label
)
1850 FOR_EACH_VEC_ELT (*cases
, i
, l
)
1851 if (CASE_LABEL (l
) == label
)
1856 /* Find the last statement in a scope STMT. */
1859 last_stmt_in_scope (gimple
*stmt
)
1864 switch (gimple_code (stmt
))
1868 gbind
*bind
= as_a
<gbind
*> (stmt
);
1869 stmt
= gimple_seq_last_stmt (gimple_bind_body (bind
));
1870 return last_stmt_in_scope (stmt
);
1875 gtry
*try_stmt
= as_a
<gtry
*> (stmt
);
1876 stmt
= gimple_seq_last_stmt (gimple_try_eval (try_stmt
));
1877 gimple
*last_eval
= last_stmt_in_scope (stmt
);
1878 if (gimple_stmt_may_fallthru (last_eval
)
1879 && (last_eval
== NULL
1880 || !gimple_call_internal_p (last_eval
, IFN_FALLTHROUGH
))
1881 && gimple_try_kind (try_stmt
) == GIMPLE_TRY_FINALLY
)
1883 stmt
= gimple_seq_last_stmt (gimple_try_cleanup (try_stmt
));
1884 return last_stmt_in_scope (stmt
);
1895 /* Collect interesting labels in LABELS and return the statement preceding
1896 another case label, or a user-defined label. */
1899 collect_fallthrough_labels (gimple_stmt_iterator
*gsi_p
,
1900 auto_vec
<struct label_entry
> *labels
)
1902 gimple
*prev
= NULL
;
1906 if (gimple_code (gsi_stmt (*gsi_p
)) == GIMPLE_BIND
1907 || gimple_code (gsi_stmt (*gsi_p
)) == GIMPLE_TRY
)
1909 /* Nested scope. Only look at the last statement of
1910 the innermost scope. */
1911 location_t bind_loc
= gimple_location (gsi_stmt (*gsi_p
));
1912 gimple
*last
= last_stmt_in_scope (gsi_stmt (*gsi_p
));
1916 /* It might be a label without a location. Use the
1917 location of the scope then. */
1918 if (!gimple_has_location (prev
))
1919 gimple_set_location (prev
, bind_loc
);
1925 /* Ifs are tricky. */
1926 if (gimple_code (gsi_stmt (*gsi_p
)) == GIMPLE_COND
)
1928 gcond
*cond_stmt
= as_a
<gcond
*> (gsi_stmt (*gsi_p
));
1929 tree false_lab
= gimple_cond_false_label (cond_stmt
);
1930 location_t if_loc
= gimple_location (cond_stmt
);
1933 if (i > 1) goto <D.2259>; else goto D;
1934 we can't do much with the else-branch. */
1935 if (!DECL_ARTIFICIAL (false_lab
))
1938 /* Go on until the false label, then one step back. */
1939 for (; !gsi_end_p (*gsi_p
); gsi_next (gsi_p
))
1941 gimple
*stmt
= gsi_stmt (*gsi_p
);
1942 if (gimple_code (stmt
) == GIMPLE_LABEL
1943 && gimple_label_label (as_a
<glabel
*> (stmt
)) == false_lab
)
1947 /* Not found? Oops. */
1948 if (gsi_end_p (*gsi_p
))
1951 struct label_entry l
= { false_lab
, if_loc
};
1952 labels
->safe_push (l
);
1954 /* Go to the last statement of the then branch. */
1957 /* if (i != 0) goto <D.1759>; else goto <D.1760>;
1963 if (gimple_code (gsi_stmt (*gsi_p
)) == GIMPLE_GOTO
1964 && !gimple_has_location (gsi_stmt (*gsi_p
)))
1966 /* Look at the statement before, it might be
1967 attribute fallthrough, in which case don't warn. */
1969 bool fallthru_before_dest
1970 = gimple_call_internal_p (gsi_stmt (*gsi_p
), IFN_FALLTHROUGH
);
1972 tree goto_dest
= gimple_goto_dest (gsi_stmt (*gsi_p
));
1973 if (!fallthru_before_dest
)
1975 struct label_entry l
= { goto_dest
, if_loc
};
1976 labels
->safe_push (l
);
1979 /* And move back. */
1983 /* Remember the last statement. Skip labels that are of no interest
1985 if (gimple_code (gsi_stmt (*gsi_p
)) == GIMPLE_LABEL
)
1987 tree label
= gimple_label_label (as_a
<glabel
*> (gsi_stmt (*gsi_p
)));
1988 if (find_label_entry (labels
, label
))
1989 prev
= gsi_stmt (*gsi_p
);
1991 else if (gimple_call_internal_p (gsi_stmt (*gsi_p
), IFN_ASAN_MARK
))
1994 prev
= gsi_stmt (*gsi_p
);
1997 while (!gsi_end_p (*gsi_p
)
1998 /* Stop if we find a case or a user-defined label. */
1999 && (gimple_code (gsi_stmt (*gsi_p
)) != GIMPLE_LABEL
2000 || !gimple_has_location (gsi_stmt (*gsi_p
))));
2005 /* Return true if the switch fallthough warning should occur. LABEL is
2006 the label statement that we're falling through to. */
2009 should_warn_for_implicit_fallthrough (gimple_stmt_iterator
*gsi_p
, tree label
)
2011 gimple_stmt_iterator gsi
= *gsi_p
;
2013 /* Don't warn if the label is marked with a "falls through" comment. */
2014 if (FALLTHROUGH_LABEL_P (label
))
2017 /* Don't warn for non-case labels followed by a statement:
2022 as these are likely intentional. */
2023 if (!case_label_p (&gimplify_ctxp
->case_labels
, label
))
2026 while (!gsi_end_p (gsi
)
2027 && gimple_code (gsi_stmt (gsi
)) == GIMPLE_LABEL
2028 && (l
= gimple_label_label (as_a
<glabel
*> (gsi_stmt (gsi
))))
2029 && !case_label_p (&gimplify_ctxp
->case_labels
, l
))
2031 if (gsi_end_p (gsi
) || gimple_code (gsi_stmt (gsi
)) != GIMPLE_LABEL
)
2035 /* Don't warn for terminated branches, i.e. when the subsequent case labels
2036 immediately breaks. */
2039 /* Skip all immediately following labels. */
2040 while (!gsi_end_p (gsi
) && gimple_code (gsi_stmt (gsi
)) == GIMPLE_LABEL
)
2043 /* { ... something; default:; } */
2045 /* { ... something; default: break; } or
2046 { ... something; default: goto L; } */
2047 || gimple_code (gsi_stmt (gsi
)) == GIMPLE_GOTO
2048 /* { ... something; default: return; } */
2049 || gimple_code (gsi_stmt (gsi
)) == GIMPLE_RETURN
)
2055 /* Callback for walk_gimple_seq. */
2058 warn_implicit_fallthrough_r (gimple_stmt_iterator
*gsi_p
, bool *handled_ops_p
,
2059 struct walk_stmt_info
*)
2061 gimple
*stmt
= gsi_stmt (*gsi_p
);
2063 *handled_ops_p
= true;
2064 switch (gimple_code (stmt
))
2069 case GIMPLE_EH_FILTER
:
2070 case GIMPLE_TRANSACTION
:
2071 /* Walk the sub-statements. */
2072 *handled_ops_p
= false;
2075 /* Find a sequence of form:
2082 and possibly warn. */
2085 /* Found a label. Skip all immediately following labels. */
2086 while (!gsi_end_p (*gsi_p
)
2087 && gimple_code (gsi_stmt (*gsi_p
)) == GIMPLE_LABEL
)
2090 /* There might be no more statements. */
2091 if (gsi_end_p (*gsi_p
))
2092 return integer_zero_node
;
2094 /* Vector of labels that fall through. */
2095 auto_vec
<struct label_entry
> labels
;
2096 gimple
*prev
= collect_fallthrough_labels (gsi_p
, &labels
);
2098 /* There might be no more statements. */
2099 if (gsi_end_p (*gsi_p
))
2100 return integer_zero_node
;
2102 gimple
*next
= gsi_stmt (*gsi_p
);
2104 /* If what follows is a label, then we may have a fallthrough. */
2105 if (gimple_code (next
) == GIMPLE_LABEL
2106 && gimple_has_location (next
)
2107 && (label
= gimple_label_label (as_a
<glabel
*> (next
)))
2110 struct label_entry
*l
;
2111 bool warned_p
= false;
2112 if (!should_warn_for_implicit_fallthrough (gsi_p
, label
))
2114 else if (gimple_code (prev
) == GIMPLE_LABEL
2115 && (label
= gimple_label_label (as_a
<glabel
*> (prev
)))
2116 && (l
= find_label_entry (&labels
, label
)))
2117 warned_p
= warning_at (l
->loc
, OPT_Wimplicit_fallthrough_
,
2118 "this statement may fall through");
2119 else if (!gimple_call_internal_p (prev
, IFN_FALLTHROUGH
)
2120 /* Try to be clever and don't warn when the statement
2121 can't actually fall through. */
2122 && gimple_stmt_may_fallthru (prev
)
2123 && gimple_has_location (prev
))
2124 warned_p
= warning_at (gimple_location (prev
),
2125 OPT_Wimplicit_fallthrough_
,
2126 "this statement may fall through");
2128 inform (gimple_location (next
), "here");
2130 /* Mark this label as processed so as to prevent multiple
2131 warnings in nested switches. */
2132 FALLTHROUGH_LABEL_P (label
) = true;
2134 /* So that next warn_implicit_fallthrough_r will start looking for
2135 a new sequence starting with this label. */
2146 /* Warn when a switch case falls through. */
2149 maybe_warn_implicit_fallthrough (gimple_seq seq
)
2151 if (!warn_implicit_fallthrough
)
2154 /* This warning is meant for C/C++/ObjC/ObjC++ only. */
2157 || lang_GNU_OBJC ()))
2160 struct walk_stmt_info wi
;
2161 memset (&wi
, 0, sizeof (wi
));
2162 walk_gimple_seq (seq
, warn_implicit_fallthrough_r
, NULL
, &wi
);
2165 /* Callback for walk_gimple_seq. */
2168 expand_FALLTHROUGH_r (gimple_stmt_iterator
*gsi_p
, bool *handled_ops_p
,
2169 struct walk_stmt_info
*)
2171 gimple
*stmt
= gsi_stmt (*gsi_p
);
2173 *handled_ops_p
= true;
2174 switch (gimple_code (stmt
))
2179 case GIMPLE_EH_FILTER
:
2180 case GIMPLE_TRANSACTION
:
2181 /* Walk the sub-statements. */
2182 *handled_ops_p
= false;
2185 if (gimple_call_internal_p (stmt
, IFN_FALLTHROUGH
))
2187 gsi_remove (gsi_p
, true);
2188 if (gsi_end_p (*gsi_p
))
2189 return integer_zero_node
;
2192 location_t loc
= gimple_location (stmt
);
2194 gimple_stmt_iterator gsi2
= *gsi_p
;
2195 stmt
= gsi_stmt (gsi2
);
2196 if (gimple_code (stmt
) == GIMPLE_GOTO
&& !gimple_has_location (stmt
))
2198 /* Go on until the artificial label. */
2199 tree goto_dest
= gimple_goto_dest (stmt
);
2200 for (; !gsi_end_p (gsi2
); gsi_next (&gsi2
))
2202 if (gimple_code (gsi_stmt (gsi2
)) == GIMPLE_LABEL
2203 && gimple_label_label (as_a
<glabel
*> (gsi_stmt (gsi2
)))
2208 /* Not found? Stop. */
2209 if (gsi_end_p (gsi2
))
2212 /* Look one past it. */
2216 /* We're looking for a case label or default label here. */
2217 while (!gsi_end_p (gsi2
))
2219 stmt
= gsi_stmt (gsi2
);
2220 if (gimple_code (stmt
) == GIMPLE_LABEL
)
2222 tree label
= gimple_label_label (as_a
<glabel
*> (stmt
));
2223 if (gimple_has_location (stmt
) && DECL_ARTIFICIAL (label
))
2230 /* Something other than a label. That's not expected. */
2235 warning_at (loc
, 0, "attribute %<fallthrough%> not preceding "
2236 "a case label or default label");
2245 /* Expand all FALLTHROUGH () calls in SEQ. */
2248 expand_FALLTHROUGH (gimple_seq
*seq_p
)
2250 struct walk_stmt_info wi
;
2251 memset (&wi
, 0, sizeof (wi
));
2252 walk_gimple_seq_mod (seq_p
, expand_FALLTHROUGH_r
, NULL
, &wi
);
2256 /* Gimplify a SWITCH_EXPR, and collect the vector of labels it can
2259 static enum gimplify_status
2260 gimplify_switch_expr (tree
*expr_p
, gimple_seq
*pre_p
)
2262 tree switch_expr
= *expr_p
;
2263 gimple_seq switch_body_seq
= NULL
;
2264 enum gimplify_status ret
;
2265 tree index_type
= TREE_TYPE (switch_expr
);
2266 if (index_type
== NULL_TREE
)
2267 index_type
= TREE_TYPE (SWITCH_COND (switch_expr
));
2269 ret
= gimplify_expr (&SWITCH_COND (switch_expr
), pre_p
, NULL
, is_gimple_val
,
2271 if (ret
== GS_ERROR
|| ret
== GS_UNHANDLED
)
2274 if (SWITCH_BODY (switch_expr
))
2277 vec
<tree
> saved_labels
;
2278 hash_set
<tree
> *saved_live_switch_vars
= NULL
;
2279 tree default_case
= NULL_TREE
;
2280 gswitch
*switch_stmt
;
2282 /* If someone can be bothered to fill in the labels, they can
2283 be bothered to null out the body too. */
2284 gcc_assert (!SWITCH_LABELS (switch_expr
));
2286 /* Save old labels, get new ones from body, then restore the old
2287 labels. Save all the things from the switch body to append after. */
2288 saved_labels
= gimplify_ctxp
->case_labels
;
2289 gimplify_ctxp
->case_labels
.create (8);
2291 /* Do not create live_switch_vars if SWITCH_BODY is not a BIND_EXPR. */
2292 saved_live_switch_vars
= gimplify_ctxp
->live_switch_vars
;
2293 tree_code body_type
= TREE_CODE (SWITCH_BODY (switch_expr
));
2294 if (body_type
== BIND_EXPR
|| body_type
== STATEMENT_LIST
)
2295 gimplify_ctxp
->live_switch_vars
= new hash_set
<tree
> (4);
2297 gimplify_ctxp
->live_switch_vars
= NULL
;
2299 bool old_in_switch_expr
= gimplify_ctxp
->in_switch_expr
;
2300 gimplify_ctxp
->in_switch_expr
= true;
2302 gimplify_stmt (&SWITCH_BODY (switch_expr
), &switch_body_seq
);
2304 gimplify_ctxp
->in_switch_expr
= old_in_switch_expr
;
2305 maybe_warn_switch_unreachable (switch_body_seq
);
2306 maybe_warn_implicit_fallthrough (switch_body_seq
);
2307 /* Only do this for the outermost GIMPLE_SWITCH. */
2308 if (!gimplify_ctxp
->in_switch_expr
)
2309 expand_FALLTHROUGH (&switch_body_seq
);
2311 labels
= gimplify_ctxp
->case_labels
;
2312 gimplify_ctxp
->case_labels
= saved_labels
;
2314 if (gimplify_ctxp
->live_switch_vars
)
2316 gcc_assert (gimplify_ctxp
->live_switch_vars
->elements () == 0);
2317 delete gimplify_ctxp
->live_switch_vars
;
2319 gimplify_ctxp
->live_switch_vars
= saved_live_switch_vars
;
2321 preprocess_case_label_vec_for_gimple (labels
, index_type
,
2326 glabel
*new_default
;
2329 = build_case_label (NULL_TREE
, NULL_TREE
,
2330 create_artificial_label (UNKNOWN_LOCATION
));
2331 new_default
= gimple_build_label (CASE_LABEL (default_case
));
2332 gimplify_seq_add_stmt (&switch_body_seq
, new_default
);
2335 switch_stmt
= gimple_build_switch (SWITCH_COND (switch_expr
),
2336 default_case
, labels
);
2337 gimplify_seq_add_stmt (pre_p
, switch_stmt
);
2338 gimplify_seq_add_seq (pre_p
, switch_body_seq
);
2342 gcc_assert (SWITCH_LABELS (switch_expr
));
2347 /* Gimplify the LABEL_EXPR pointed to by EXPR_P. */
2349 static enum gimplify_status
2350 gimplify_label_expr (tree
*expr_p
, gimple_seq
*pre_p
)
2352 gcc_assert (decl_function_context (LABEL_EXPR_LABEL (*expr_p
))
2353 == current_function_decl
);
2355 tree label
= LABEL_EXPR_LABEL (*expr_p
);
2356 glabel
*label_stmt
= gimple_build_label (label
);
2357 gimple_set_location (label_stmt
, EXPR_LOCATION (*expr_p
));
2358 gimplify_seq_add_stmt (pre_p
, label_stmt
);
2360 if (lookup_attribute ("cold", DECL_ATTRIBUTES (label
)))
2361 gimple_seq_add_stmt (pre_p
, gimple_build_predict (PRED_COLD_LABEL
,
2363 else if (lookup_attribute ("hot", DECL_ATTRIBUTES (label
)))
2364 gimple_seq_add_stmt (pre_p
, gimple_build_predict (PRED_HOT_LABEL
,
2370 /* Gimplify the CASE_LABEL_EXPR pointed to by EXPR_P. */
2372 static enum gimplify_status
2373 gimplify_case_label_expr (tree
*expr_p
, gimple_seq
*pre_p
)
2375 struct gimplify_ctx
*ctxp
;
2378 /* Invalid programs can play Duff's Device type games with, for example,
2379 #pragma omp parallel. At least in the C front end, we don't
2380 detect such invalid branches until after gimplification, in the
2381 diagnose_omp_blocks pass. */
2382 for (ctxp
= gimplify_ctxp
; ; ctxp
= ctxp
->prev_context
)
2383 if (ctxp
->case_labels
.exists ())
2386 label_stmt
= gimple_build_label (CASE_LABEL (*expr_p
));
2387 gimple_set_location (label_stmt
, EXPR_LOCATION (*expr_p
));
2388 ctxp
->case_labels
.safe_push (*expr_p
);
2389 gimplify_seq_add_stmt (pre_p
, label_stmt
);
2394 /* Build a GOTO to the LABEL_DECL pointed to by LABEL_P, building it first
2398 build_and_jump (tree
*label_p
)
2400 if (label_p
== NULL
)
2401 /* If there's nowhere to jump, just fall through. */
2404 if (*label_p
== NULL_TREE
)
2406 tree label
= create_artificial_label (UNKNOWN_LOCATION
);
2410 return build1 (GOTO_EXPR
, void_type_node
, *label_p
);
2413 /* Gimplify an EXIT_EXPR by converting to a GOTO_EXPR inside a COND_EXPR.
2414 This also involves building a label to jump to and communicating it to
2415 gimplify_loop_expr through gimplify_ctxp->exit_label. */
2417 static enum gimplify_status
2418 gimplify_exit_expr (tree
*expr_p
)
2420 tree cond
= TREE_OPERAND (*expr_p
, 0);
2423 expr
= build_and_jump (&gimplify_ctxp
->exit_label
);
2424 expr
= build3 (COND_EXPR
, void_type_node
, cond
, expr
, NULL_TREE
);
2430 /* *EXPR_P is a COMPONENT_REF being used as an rvalue. If its type is
2431 different from its canonical type, wrap the whole thing inside a
2432 NOP_EXPR and force the type of the COMPONENT_REF to be the canonical
2435 The canonical type of a COMPONENT_REF is the type of the field being
2436 referenced--unless the field is a bit-field which can be read directly
2437 in a smaller mode, in which case the canonical type is the
2438 sign-appropriate type corresponding to that mode. */
2441 canonicalize_component_ref (tree
*expr_p
)
2443 tree expr
= *expr_p
;
2446 gcc_assert (TREE_CODE (expr
) == COMPONENT_REF
);
2448 if (INTEGRAL_TYPE_P (TREE_TYPE (expr
)))
2449 type
= TREE_TYPE (get_unwidened (expr
, NULL_TREE
));
2451 type
= TREE_TYPE (TREE_OPERAND (expr
, 1));
2453 /* One could argue that all the stuff below is not necessary for
2454 the non-bitfield case and declare it a FE error if type
2455 adjustment would be needed. */
2456 if (TREE_TYPE (expr
) != type
)
2458 #ifdef ENABLE_TYPES_CHECKING
2459 tree old_type
= TREE_TYPE (expr
);
2463 /* We need to preserve qualifiers and propagate them from
2465 type_quals
= TYPE_QUALS (type
)
2466 | TYPE_QUALS (TREE_TYPE (TREE_OPERAND (expr
, 0)));
2467 if (TYPE_QUALS (type
) != type_quals
)
2468 type
= build_qualified_type (TYPE_MAIN_VARIANT (type
), type_quals
);
2470 /* Set the type of the COMPONENT_REF to the underlying type. */
2471 TREE_TYPE (expr
) = type
;
2473 #ifdef ENABLE_TYPES_CHECKING
2474 /* It is now a FE error, if the conversion from the canonical
2475 type to the original expression type is not useless. */
2476 gcc_assert (useless_type_conversion_p (old_type
, type
));
2481 /* If a NOP conversion is changing a pointer to array of foo to a pointer
2482 to foo, embed that change in the ADDR_EXPR by converting
2487 where L is the lower bound. For simplicity, only do this for constant
2489 The constraint is that the type of &array[L] is trivially convertible
2493 canonicalize_addr_expr (tree
*expr_p
)
2495 tree expr
= *expr_p
;
2496 tree addr_expr
= TREE_OPERAND (expr
, 0);
2497 tree datype
, ddatype
, pddatype
;
2499 /* We simplify only conversions from an ADDR_EXPR to a pointer type. */
2500 if (!POINTER_TYPE_P (TREE_TYPE (expr
))
2501 || TREE_CODE (addr_expr
) != ADDR_EXPR
)
2504 /* The addr_expr type should be a pointer to an array. */
2505 datype
= TREE_TYPE (TREE_TYPE (addr_expr
));
2506 if (TREE_CODE (datype
) != ARRAY_TYPE
)
2509 /* The pointer to element type shall be trivially convertible to
2510 the expression pointer type. */
2511 ddatype
= TREE_TYPE (datype
);
2512 pddatype
= build_pointer_type (ddatype
);
2513 if (!useless_type_conversion_p (TYPE_MAIN_VARIANT (TREE_TYPE (expr
)),
2517 /* The lower bound and element sizes must be constant. */
2518 if (!TYPE_SIZE_UNIT (ddatype
)
2519 || TREE_CODE (TYPE_SIZE_UNIT (ddatype
)) != INTEGER_CST
2520 || !TYPE_DOMAIN (datype
) || !TYPE_MIN_VALUE (TYPE_DOMAIN (datype
))
2521 || TREE_CODE (TYPE_MIN_VALUE (TYPE_DOMAIN (datype
))) != INTEGER_CST
)
2524 /* All checks succeeded. Build a new node to merge the cast. */
2525 *expr_p
= build4 (ARRAY_REF
, ddatype
, TREE_OPERAND (addr_expr
, 0),
2526 TYPE_MIN_VALUE (TYPE_DOMAIN (datype
)),
2527 NULL_TREE
, NULL_TREE
);
2528 *expr_p
= build1 (ADDR_EXPR
, pddatype
, *expr_p
);
2530 /* We can have stripped a required restrict qualifier above. */
2531 if (!useless_type_conversion_p (TREE_TYPE (expr
), TREE_TYPE (*expr_p
)))
2532 *expr_p
= fold_convert (TREE_TYPE (expr
), *expr_p
);
2535 /* *EXPR_P is a NOP_EXPR or CONVERT_EXPR. Remove it and/or other conversions
2536 underneath as appropriate. */
2538 static enum gimplify_status
2539 gimplify_conversion (tree
*expr_p
)
2541 location_t loc
= EXPR_LOCATION (*expr_p
);
2542 gcc_assert (CONVERT_EXPR_P (*expr_p
));
2544 /* Then strip away all but the outermost conversion. */
2545 STRIP_SIGN_NOPS (TREE_OPERAND (*expr_p
, 0));
2547 /* And remove the outermost conversion if it's useless. */
2548 if (tree_ssa_useless_type_conversion (*expr_p
))
2549 *expr_p
= TREE_OPERAND (*expr_p
, 0);
2551 /* If we still have a conversion at the toplevel,
2552 then canonicalize some constructs. */
2553 if (CONVERT_EXPR_P (*expr_p
))
2555 tree sub
= TREE_OPERAND (*expr_p
, 0);
2557 /* If a NOP conversion is changing the type of a COMPONENT_REF
2558 expression, then canonicalize its type now in order to expose more
2559 redundant conversions. */
2560 if (TREE_CODE (sub
) == COMPONENT_REF
)
2561 canonicalize_component_ref (&TREE_OPERAND (*expr_p
, 0));
2563 /* If a NOP conversion is changing a pointer to array of foo
2564 to a pointer to foo, embed that change in the ADDR_EXPR. */
2565 else if (TREE_CODE (sub
) == ADDR_EXPR
)
2566 canonicalize_addr_expr (expr_p
);
2569 /* If we have a conversion to a non-register type force the
2570 use of a VIEW_CONVERT_EXPR instead. */
2571 if (CONVERT_EXPR_P (*expr_p
) && !is_gimple_reg_type (TREE_TYPE (*expr_p
)))
2572 *expr_p
= fold_build1_loc (loc
, VIEW_CONVERT_EXPR
, TREE_TYPE (*expr_p
),
2573 TREE_OPERAND (*expr_p
, 0));
2575 /* Canonicalize CONVERT_EXPR to NOP_EXPR. */
2576 if (TREE_CODE (*expr_p
) == CONVERT_EXPR
)
2577 TREE_SET_CODE (*expr_p
, NOP_EXPR
);
2582 /* Nonlocal VLAs seen in the current function. */
2583 static hash_set
<tree
> *nonlocal_vlas
;
2585 /* The VAR_DECLs created for nonlocal VLAs for debug info purposes. */
2586 static tree nonlocal_vla_vars
;
2588 /* Gimplify a VAR_DECL or PARM_DECL. Return GS_OK if we expanded a
2589 DECL_VALUE_EXPR, and it's worth re-examining things. */
2591 static enum gimplify_status
2592 gimplify_var_or_parm_decl (tree
*expr_p
)
2594 tree decl
= *expr_p
;
2596 /* ??? If this is a local variable, and it has not been seen in any
2597 outer BIND_EXPR, then it's probably the result of a duplicate
2598 declaration, for which we've already issued an error. It would
2599 be really nice if the front end wouldn't leak these at all.
2600 Currently the only known culprit is C++ destructors, as seen
2601 in g++.old-deja/g++.jason/binding.C. */
2603 && !DECL_SEEN_IN_BIND_EXPR_P (decl
)
2604 && !TREE_STATIC (decl
) && !DECL_EXTERNAL (decl
)
2605 && decl_function_context (decl
) == current_function_decl
)
2607 gcc_assert (seen_error ());
2611 /* When within an OMP context, notice uses of variables. */
2612 if (gimplify_omp_ctxp
&& omp_notice_variable (gimplify_omp_ctxp
, decl
, true))
2615 /* If the decl is an alias for another expression, substitute it now. */
2616 if (DECL_HAS_VALUE_EXPR_P (decl
))
2618 tree value_expr
= DECL_VALUE_EXPR (decl
);
2620 /* For referenced nonlocal VLAs add a decl for debugging purposes
2621 to the current function. */
2623 && TREE_CODE (DECL_SIZE_UNIT (decl
)) != INTEGER_CST
2624 && nonlocal_vlas
!= NULL
2625 && TREE_CODE (value_expr
) == INDIRECT_REF
2626 && TREE_CODE (TREE_OPERAND (value_expr
, 0)) == VAR_DECL
2627 && decl_function_context (decl
) != current_function_decl
)
2629 struct gimplify_omp_ctx
*ctx
= gimplify_omp_ctxp
;
2631 && (ctx
->region_type
== ORT_WORKSHARE
2632 || ctx
->region_type
== ORT_SIMD
2633 || ctx
->region_type
== ORT_ACC
))
2634 ctx
= ctx
->outer_context
;
2635 if (!ctx
&& !nonlocal_vlas
->add (decl
))
2637 tree copy
= copy_node (decl
);
2639 lang_hooks
.dup_lang_specific_decl (copy
);
2640 SET_DECL_RTL (copy
, 0);
2641 TREE_USED (copy
) = 1;
2642 DECL_CHAIN (copy
) = nonlocal_vla_vars
;
2643 nonlocal_vla_vars
= copy
;
2644 SET_DECL_VALUE_EXPR (copy
, unshare_expr (value_expr
));
2645 DECL_HAS_VALUE_EXPR_P (copy
) = 1;
2649 *expr_p
= unshare_expr (value_expr
);
2656 /* Recalculate the value of the TREE_SIDE_EFFECTS flag for T. */
2659 recalculate_side_effects (tree t
)
2661 enum tree_code code
= TREE_CODE (t
);
2662 int len
= TREE_OPERAND_LENGTH (t
);
2665 switch (TREE_CODE_CLASS (code
))
2667 case tcc_expression
:
2673 case PREDECREMENT_EXPR
:
2674 case PREINCREMENT_EXPR
:
2675 case POSTDECREMENT_EXPR
:
2676 case POSTINCREMENT_EXPR
:
2677 /* All of these have side-effects, no matter what their
2686 case tcc_comparison
: /* a comparison expression */
2687 case tcc_unary
: /* a unary arithmetic expression */
2688 case tcc_binary
: /* a binary arithmetic expression */
2689 case tcc_reference
: /* a reference */
2690 case tcc_vl_exp
: /* a function call */
2691 TREE_SIDE_EFFECTS (t
) = TREE_THIS_VOLATILE (t
);
2692 for (i
= 0; i
< len
; ++i
)
2694 tree op
= TREE_OPERAND (t
, i
);
2695 if (op
&& TREE_SIDE_EFFECTS (op
))
2696 TREE_SIDE_EFFECTS (t
) = 1;
2701 /* No side-effects. */
2709 /* Gimplify the COMPONENT_REF, ARRAY_REF, REALPART_EXPR or IMAGPART_EXPR
2713 : min_lval '[' val ']'
2715 | compound_lval '[' val ']'
2716 | compound_lval '.' ID
2718 This is not part of the original SIMPLE definition, which separates
2719 array and member references, but it seems reasonable to handle them
2720 together. Also, this way we don't run into problems with union
2721 aliasing; gcc requires that for accesses through a union to alias, the
2722 union reference must be explicit, which was not always the case when we
2723 were splitting up array and member refs.
2725 PRE_P points to the sequence where side effects that must happen before
2726 *EXPR_P should be stored.
2728 POST_P points to the sequence where side effects that must happen after
2729 *EXPR_P should be stored. */
2731 static enum gimplify_status
2732 gimplify_compound_lval (tree
*expr_p
, gimple_seq
*pre_p
, gimple_seq
*post_p
,
2733 fallback_t fallback
)
2736 enum gimplify_status ret
= GS_ALL_DONE
, tret
;
2738 location_t loc
= EXPR_LOCATION (*expr_p
);
2739 tree expr
= *expr_p
;
2741 /* Create a stack of the subexpressions so later we can walk them in
2742 order from inner to outer. */
2743 auto_vec
<tree
, 10> expr_stack
;
2745 /* We can handle anything that get_inner_reference can deal with. */
2746 for (p
= expr_p
; ; p
= &TREE_OPERAND (*p
, 0))
2749 /* Fold INDIRECT_REFs now to turn them into ARRAY_REFs. */
2750 if (TREE_CODE (*p
) == INDIRECT_REF
)
2751 *p
= fold_indirect_ref_loc (loc
, *p
);
2753 if (handled_component_p (*p
))
2755 /* Expand DECL_VALUE_EXPR now. In some cases that may expose
2756 additional COMPONENT_REFs. */
2757 else if ((VAR_P (*p
) || TREE_CODE (*p
) == PARM_DECL
)
2758 && gimplify_var_or_parm_decl (p
) == GS_OK
)
2763 expr_stack
.safe_push (*p
);
2766 gcc_assert (expr_stack
.length ());
2768 /* Now EXPR_STACK is a stack of pointers to all the refs we've
2769 walked through and P points to the innermost expression.
2771 Java requires that we elaborated nodes in source order. That
2772 means we must gimplify the inner expression followed by each of
2773 the indices, in order. But we can't gimplify the inner
2774 expression until we deal with any variable bounds, sizes, or
2775 positions in order to deal with PLACEHOLDER_EXPRs.
2777 So we do this in three steps. First we deal with the annotations
2778 for any variables in the components, then we gimplify the base,
2779 then we gimplify any indices, from left to right. */
2780 for (i
= expr_stack
.length () - 1; i
>= 0; i
--)
2782 tree t
= expr_stack
[i
];
2784 if (TREE_CODE (t
) == ARRAY_REF
|| TREE_CODE (t
) == ARRAY_RANGE_REF
)
2786 /* Gimplify the low bound and element type size and put them into
2787 the ARRAY_REF. If these values are set, they have already been
2789 if (TREE_OPERAND (t
, 2) == NULL_TREE
)
2791 tree low
= unshare_expr (array_ref_low_bound (t
));
2792 if (!is_gimple_min_invariant (low
))
2794 TREE_OPERAND (t
, 2) = low
;
2795 tret
= gimplify_expr (&TREE_OPERAND (t
, 2), pre_p
,
2796 post_p
, is_gimple_reg
,
2798 ret
= MIN (ret
, tret
);
2803 tret
= gimplify_expr (&TREE_OPERAND (t
, 2), pre_p
, post_p
,
2804 is_gimple_reg
, fb_rvalue
);
2805 ret
= MIN (ret
, tret
);
2808 if (TREE_OPERAND (t
, 3) == NULL_TREE
)
2810 tree elmt_type
= TREE_TYPE (TREE_TYPE (TREE_OPERAND (t
, 0)));
2811 tree elmt_size
= unshare_expr (array_ref_element_size (t
));
2812 tree factor
= size_int (TYPE_ALIGN_UNIT (elmt_type
));
2814 /* Divide the element size by the alignment of the element
2817 = size_binop_loc (loc
, EXACT_DIV_EXPR
, elmt_size
, factor
);
2819 if (!is_gimple_min_invariant (elmt_size
))
2821 TREE_OPERAND (t
, 3) = elmt_size
;
2822 tret
= gimplify_expr (&TREE_OPERAND (t
, 3), pre_p
,
2823 post_p
, is_gimple_reg
,
2825 ret
= MIN (ret
, tret
);
2830 tret
= gimplify_expr (&TREE_OPERAND (t
, 3), pre_p
, post_p
,
2831 is_gimple_reg
, fb_rvalue
);
2832 ret
= MIN (ret
, tret
);
2835 else if (TREE_CODE (t
) == COMPONENT_REF
)
2837 /* Set the field offset into T and gimplify it. */
2838 if (TREE_OPERAND (t
, 2) == NULL_TREE
)
2840 tree offset
= unshare_expr (component_ref_field_offset (t
));
2841 tree field
= TREE_OPERAND (t
, 1);
2843 = size_int (DECL_OFFSET_ALIGN (field
) / BITS_PER_UNIT
);
2845 /* Divide the offset by its alignment. */
2846 offset
= size_binop_loc (loc
, EXACT_DIV_EXPR
, offset
, factor
);
2848 if (!is_gimple_min_invariant (offset
))
2850 TREE_OPERAND (t
, 2) = offset
;
2851 tret
= gimplify_expr (&TREE_OPERAND (t
, 2), pre_p
,
2852 post_p
, is_gimple_reg
,
2854 ret
= MIN (ret
, tret
);
2859 tret
= gimplify_expr (&TREE_OPERAND (t
, 2), pre_p
, post_p
,
2860 is_gimple_reg
, fb_rvalue
);
2861 ret
= MIN (ret
, tret
);
2866 /* Step 2 is to gimplify the base expression. Make sure lvalue is set
2867 so as to match the min_lval predicate. Failure to do so may result
2868 in the creation of large aggregate temporaries. */
2869 tret
= gimplify_expr (p
, pre_p
, post_p
, is_gimple_min_lval
,
2870 fallback
| fb_lvalue
);
2871 ret
= MIN (ret
, tret
);
2873 /* And finally, the indices and operands of ARRAY_REF. During this
2874 loop we also remove any useless conversions. */
2875 for (; expr_stack
.length () > 0; )
2877 tree t
= expr_stack
.pop ();
2879 if (TREE_CODE (t
) == ARRAY_REF
|| TREE_CODE (t
) == ARRAY_RANGE_REF
)
2881 /* Gimplify the dimension. */
2882 if (!is_gimple_min_invariant (TREE_OPERAND (t
, 1)))
2884 tret
= gimplify_expr (&TREE_OPERAND (t
, 1), pre_p
, post_p
,
2885 is_gimple_val
, fb_rvalue
);
2886 ret
= MIN (ret
, tret
);
2890 STRIP_USELESS_TYPE_CONVERSION (TREE_OPERAND (t
, 0));
2892 /* The innermost expression P may have originally had
2893 TREE_SIDE_EFFECTS set which would have caused all the outer
2894 expressions in *EXPR_P leading to P to also have had
2895 TREE_SIDE_EFFECTS set. */
2896 recalculate_side_effects (t
);
2899 /* If the outermost expression is a COMPONENT_REF, canonicalize its type. */
2900 if ((fallback
& fb_rvalue
) && TREE_CODE (*expr_p
) == COMPONENT_REF
)
2902 canonicalize_component_ref (expr_p
);
2905 expr_stack
.release ();
2907 gcc_assert (*expr_p
== expr
|| ret
!= GS_ALL_DONE
);
2912 /* Gimplify the self modifying expression pointed to by EXPR_P
2915 PRE_P points to the list where side effects that must happen before
2916 *EXPR_P should be stored.
2918 POST_P points to the list where side effects that must happen after
2919 *EXPR_P should be stored.
2921 WANT_VALUE is nonzero iff we want to use the value of this expression
2922 in another expression.
2924 ARITH_TYPE is the type the computation should be performed in. */
2926 enum gimplify_status
2927 gimplify_self_mod_expr (tree
*expr_p
, gimple_seq
*pre_p
, gimple_seq
*post_p
,
2928 bool want_value
, tree arith_type
)
2930 enum tree_code code
;
2931 tree lhs
, lvalue
, rhs
, t1
;
2932 gimple_seq post
= NULL
, *orig_post_p
= post_p
;
2934 enum tree_code arith_code
;
2935 enum gimplify_status ret
;
2936 location_t loc
= EXPR_LOCATION (*expr_p
);
2938 code
= TREE_CODE (*expr_p
);
2940 gcc_assert (code
== POSTINCREMENT_EXPR
|| code
== POSTDECREMENT_EXPR
2941 || code
== PREINCREMENT_EXPR
|| code
== PREDECREMENT_EXPR
);
2943 /* Prefix or postfix? */
2944 if (code
== POSTINCREMENT_EXPR
|| code
== POSTDECREMENT_EXPR
)
2945 /* Faster to treat as prefix if result is not used. */
2946 postfix
= want_value
;
2950 /* For postfix, make sure the inner expression's post side effects
2951 are executed after side effects from this expression. */
2955 /* Add or subtract? */
2956 if (code
== PREINCREMENT_EXPR
|| code
== POSTINCREMENT_EXPR
)
2957 arith_code
= PLUS_EXPR
;
2959 arith_code
= MINUS_EXPR
;
2961 /* Gimplify the LHS into a GIMPLE lvalue. */
2962 lvalue
= TREE_OPERAND (*expr_p
, 0);
2963 ret
= gimplify_expr (&lvalue
, pre_p
, post_p
, is_gimple_lvalue
, fb_lvalue
);
2964 if (ret
== GS_ERROR
)
2967 /* Extract the operands to the arithmetic operation. */
2969 rhs
= TREE_OPERAND (*expr_p
, 1);
2971 /* For postfix operator, we evaluate the LHS to an rvalue and then use
2972 that as the result value and in the postqueue operation. */
2975 ret
= gimplify_expr (&lhs
, pre_p
, post_p
, is_gimple_val
, fb_rvalue
);
2976 if (ret
== GS_ERROR
)
2979 lhs
= get_initialized_tmp_var (lhs
, pre_p
, NULL
);
2982 /* For POINTERs increment, use POINTER_PLUS_EXPR. */
2983 if (POINTER_TYPE_P (TREE_TYPE (lhs
)))
2985 rhs
= convert_to_ptrofftype_loc (loc
, rhs
);
2986 if (arith_code
== MINUS_EXPR
)
2987 rhs
= fold_build1_loc (loc
, NEGATE_EXPR
, TREE_TYPE (rhs
), rhs
);
2988 t1
= fold_build2 (POINTER_PLUS_EXPR
, TREE_TYPE (*expr_p
), lhs
, rhs
);
2991 t1
= fold_convert (TREE_TYPE (*expr_p
),
2992 fold_build2 (arith_code
, arith_type
,
2993 fold_convert (arith_type
, lhs
),
2994 fold_convert (arith_type
, rhs
)));
2998 gimplify_assign (lvalue
, t1
, pre_p
);
2999 gimplify_seq_add_seq (orig_post_p
, post
);
3005 *expr_p
= build2 (MODIFY_EXPR
, TREE_TYPE (lvalue
), lvalue
, t1
);
3010 /* If *EXPR_P has a variable sized type, wrap it in a WITH_SIZE_EXPR. */
3013 maybe_with_size_expr (tree
*expr_p
)
3015 tree expr
= *expr_p
;
3016 tree type
= TREE_TYPE (expr
);
3019 /* If we've already wrapped this or the type is error_mark_node, we can't do
3021 if (TREE_CODE (expr
) == WITH_SIZE_EXPR
3022 || type
== error_mark_node
)
3025 /* If the size isn't known or is a constant, we have nothing to do. */
3026 size
= TYPE_SIZE_UNIT (type
);
3027 if (!size
|| TREE_CODE (size
) == INTEGER_CST
)
3030 /* Otherwise, make a WITH_SIZE_EXPR. */
3031 size
= unshare_expr (size
);
3032 size
= SUBSTITUTE_PLACEHOLDER_IN_EXPR (size
, expr
);
3033 *expr_p
= build2 (WITH_SIZE_EXPR
, type
, expr
, size
);
3036 /* Helper for gimplify_call_expr. Gimplify a single argument *ARG_P
3037 Store any side-effects in PRE_P. CALL_LOCATION is the location of
3038 the CALL_EXPR. If ALLOW_SSA is set the actual parameter may be
3039 gimplified to an SSA name. */
3041 enum gimplify_status
3042 gimplify_arg (tree
*arg_p
, gimple_seq
*pre_p
, location_t call_location
,
3045 bool (*test
) (tree
);
3048 /* In general, we allow lvalues for function arguments to avoid
3049 extra overhead of copying large aggregates out of even larger
3050 aggregates into temporaries only to copy the temporaries to
3051 the argument list. Make optimizers happy by pulling out to
3052 temporaries those types that fit in registers. */
3053 if (is_gimple_reg_type (TREE_TYPE (*arg_p
)))
3054 test
= is_gimple_val
, fb
= fb_rvalue
;
3057 test
= is_gimple_lvalue
, fb
= fb_either
;
3058 /* Also strip a TARGET_EXPR that would force an extra copy. */
3059 if (TREE_CODE (*arg_p
) == TARGET_EXPR
)
3061 tree init
= TARGET_EXPR_INITIAL (*arg_p
);
3063 && !VOID_TYPE_P (TREE_TYPE (init
)))
3068 /* If this is a variable sized type, we must remember the size. */
3069 maybe_with_size_expr (arg_p
);
3071 /* FIXME diagnostics: This will mess up gcc.dg/Warray-bounds.c. */
3072 /* Make sure arguments have the same location as the function call
3074 protected_set_expr_location (*arg_p
, call_location
);
3076 /* There is a sequence point before a function call. Side effects in
3077 the argument list must occur before the actual call. So, when
3078 gimplifying arguments, force gimplify_expr to use an internal
3079 post queue which is then appended to the end of PRE_P. */
3080 return gimplify_expr (arg_p
, pre_p
, NULL
, test
, fb
, allow_ssa
);
3083 /* Don't fold inside offloading or taskreg regions: it can break code by
3084 adding decl references that weren't in the source. We'll do it during
3085 omplower pass instead. */
3088 maybe_fold_stmt (gimple_stmt_iterator
*gsi
)
3090 struct gimplify_omp_ctx
*ctx
;
3091 for (ctx
= gimplify_omp_ctxp
; ctx
; ctx
= ctx
->outer_context
)
3092 if ((ctx
->region_type
& (ORT_TARGET
| ORT_PARALLEL
| ORT_TASK
)) != 0)
3094 return fold_stmt (gsi
);
3097 /* Add a gimple call to __builtin_cilk_detach to GIMPLE sequence PRE_P,
3098 with the pointer to the proper cilk frame. */
3100 gimplify_cilk_detach (gimple_seq
*pre_p
)
3102 tree frame
= cfun
->cilk_frame_decl
;
3103 tree ptrf
= build1 (ADDR_EXPR
, cilk_frame_ptr_type_decl
,
3105 gcall
*detach
= gimple_build_call (cilk_detach_fndecl
, 1,
3107 gimplify_seq_add_stmt(pre_p
, detach
);
3110 /* Gimplify the CALL_EXPR node *EXPR_P into the GIMPLE sequence PRE_P.
3111 WANT_VALUE is true if the result of the call is desired. */
3113 static enum gimplify_status
3114 gimplify_call_expr (tree
*expr_p
, gimple_seq
*pre_p
, bool want_value
)
3116 tree fndecl
, parms
, p
, fnptrtype
;
3117 enum gimplify_status ret
;
3120 bool builtin_va_start_p
= false;
3121 location_t loc
= EXPR_LOCATION (*expr_p
);
3123 gcc_assert (TREE_CODE (*expr_p
) == CALL_EXPR
);
3125 /* For reliable diagnostics during inlining, it is necessary that
3126 every call_expr be annotated with file and line. */
3127 if (! EXPR_HAS_LOCATION (*expr_p
))
3128 SET_EXPR_LOCATION (*expr_p
, input_location
);
3130 /* Gimplify internal functions created in the FEs. */
3131 if (CALL_EXPR_FN (*expr_p
) == NULL_TREE
)
3136 nargs
= call_expr_nargs (*expr_p
);
3137 enum internal_fn ifn
= CALL_EXPR_IFN (*expr_p
);
3138 auto_vec
<tree
> vargs (nargs
);
3140 for (i
= 0; i
< nargs
; i
++)
3142 gimplify_arg (&CALL_EXPR_ARG (*expr_p
, i
), pre_p
,
3143 EXPR_LOCATION (*expr_p
));
3144 vargs
.quick_push (CALL_EXPR_ARG (*expr_p
, i
));
3147 if (EXPR_CILK_SPAWN (*expr_p
))
3148 gimplify_cilk_detach (pre_p
);
3149 gimple
*call
= gimple_build_call_internal_vec (ifn
, vargs
);
3150 gimplify_seq_add_stmt (pre_p
, call
);
3154 /* This may be a call to a builtin function.
3156 Builtin function calls may be transformed into different
3157 (and more efficient) builtin function calls under certain
3158 circumstances. Unfortunately, gimplification can muck things
3159 up enough that the builtin expanders are not aware that certain
3160 transformations are still valid.
3162 So we attempt transformation/gimplification of the call before
3163 we gimplify the CALL_EXPR. At this time we do not manage to
3164 transform all calls in the same manner as the expanders do, but
3165 we do transform most of them. */
3166 fndecl
= get_callee_fndecl (*expr_p
);
3168 && DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_NORMAL
)
3169 switch (DECL_FUNCTION_CODE (fndecl
))
3171 case BUILT_IN_ALLOCA
:
3172 case BUILT_IN_ALLOCA_WITH_ALIGN
:
3173 /* If the call has been built for a variable-sized object, then we
3174 want to restore the stack level when the enclosing BIND_EXPR is
3175 exited to reclaim the allocated space; otherwise, we precisely
3176 need to do the opposite and preserve the latest stack level. */
3177 if (CALL_ALLOCA_FOR_VAR_P (*expr_p
))
3178 gimplify_ctxp
->save_stack
= true;
3180 gimplify_ctxp
->keep_stack
= true;
3183 case BUILT_IN_VA_START
:
3185 builtin_va_start_p
= TRUE
;
3186 if (call_expr_nargs (*expr_p
) < 2)
3188 error ("too few arguments to function %<va_start%>");
3189 *expr_p
= build_empty_stmt (EXPR_LOCATION (*expr_p
));
3193 if (fold_builtin_next_arg (*expr_p
, true))
3195 *expr_p
= build_empty_stmt (EXPR_LOCATION (*expr_p
));
3204 if (fndecl
&& DECL_BUILT_IN (fndecl
))
3206 tree new_tree
= fold_call_expr (input_location
, *expr_p
, !want_value
);
3207 if (new_tree
&& new_tree
!= *expr_p
)
3209 /* There was a transformation of this call which computes the
3210 same value, but in a more efficient way. Return and try
3217 /* Remember the original function pointer type. */
3218 fnptrtype
= TREE_TYPE (CALL_EXPR_FN (*expr_p
));
3220 /* There is a sequence point before the call, so any side effects in
3221 the calling expression must occur before the actual call. Force
3222 gimplify_expr to use an internal post queue. */
3223 ret
= gimplify_expr (&CALL_EXPR_FN (*expr_p
), pre_p
, NULL
,
3224 is_gimple_call_addr
, fb_rvalue
);
3226 nargs
= call_expr_nargs (*expr_p
);
3228 /* Get argument types for verification. */
3229 fndecl
= get_callee_fndecl (*expr_p
);
3232 parms
= TYPE_ARG_TYPES (TREE_TYPE (fndecl
));
3234 parms
= TYPE_ARG_TYPES (TREE_TYPE (fnptrtype
));
3236 if (fndecl
&& DECL_ARGUMENTS (fndecl
))
3237 p
= DECL_ARGUMENTS (fndecl
);
3242 for (i
= 0; i
< nargs
&& p
; i
++, p
= TREE_CHAIN (p
))
3245 /* If the last argument is __builtin_va_arg_pack () and it is not
3246 passed as a named argument, decrease the number of CALL_EXPR
3247 arguments and set instead the CALL_EXPR_VA_ARG_PACK flag. */
3250 && TREE_CODE (CALL_EXPR_ARG (*expr_p
, nargs
- 1)) == CALL_EXPR
)
3252 tree last_arg
= CALL_EXPR_ARG (*expr_p
, nargs
- 1);
3253 tree last_arg_fndecl
= get_callee_fndecl (last_arg
);
3256 && TREE_CODE (last_arg_fndecl
) == FUNCTION_DECL
3257 && DECL_BUILT_IN_CLASS (last_arg_fndecl
) == BUILT_IN_NORMAL
3258 && DECL_FUNCTION_CODE (last_arg_fndecl
) == BUILT_IN_VA_ARG_PACK
)
3260 tree call
= *expr_p
;
3263 *expr_p
= build_call_array_loc (loc
, TREE_TYPE (call
),
3264 CALL_EXPR_FN (call
),
3265 nargs
, CALL_EXPR_ARGP (call
));
3267 /* Copy all CALL_EXPR flags, location and block, except
3268 CALL_EXPR_VA_ARG_PACK flag. */
3269 CALL_EXPR_STATIC_CHAIN (*expr_p
) = CALL_EXPR_STATIC_CHAIN (call
);
3270 CALL_EXPR_TAILCALL (*expr_p
) = CALL_EXPR_TAILCALL (call
);
3271 CALL_EXPR_RETURN_SLOT_OPT (*expr_p
)
3272 = CALL_EXPR_RETURN_SLOT_OPT (call
);
3273 CALL_FROM_THUNK_P (*expr_p
) = CALL_FROM_THUNK_P (call
);
3274 SET_EXPR_LOCATION (*expr_p
, EXPR_LOCATION (call
));
3276 /* Set CALL_EXPR_VA_ARG_PACK. */
3277 CALL_EXPR_VA_ARG_PACK (*expr_p
) = 1;
3281 /* If the call returns twice then after building the CFG the call
3282 argument computations will no longer dominate the call because
3283 we add an abnormal incoming edge to the call. So do not use SSA
3285 bool returns_twice
= call_expr_flags (*expr_p
) & ECF_RETURNS_TWICE
;
3287 /* Gimplify the function arguments. */
3290 for (i
= (PUSH_ARGS_REVERSED
? nargs
- 1 : 0);
3291 PUSH_ARGS_REVERSED
? i
>= 0 : i
< nargs
;
3292 PUSH_ARGS_REVERSED
? i
-- : i
++)
3294 enum gimplify_status t
;
3296 /* Avoid gimplifying the second argument to va_start, which needs to
3297 be the plain PARM_DECL. */
3298 if ((i
!= 1) || !builtin_va_start_p
)
3300 t
= gimplify_arg (&CALL_EXPR_ARG (*expr_p
, i
), pre_p
,
3301 EXPR_LOCATION (*expr_p
), ! returns_twice
);
3309 /* Gimplify the static chain. */
3310 if (CALL_EXPR_STATIC_CHAIN (*expr_p
))
3312 if (fndecl
&& !DECL_STATIC_CHAIN (fndecl
))
3313 CALL_EXPR_STATIC_CHAIN (*expr_p
) = NULL
;
3316 enum gimplify_status t
;
3317 t
= gimplify_arg (&CALL_EXPR_STATIC_CHAIN (*expr_p
), pre_p
,
3318 EXPR_LOCATION (*expr_p
), ! returns_twice
);
3324 /* Verify the function result. */
3325 if (want_value
&& fndecl
3326 && VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fnptrtype
))))
3328 error_at (loc
, "using result of function returning %<void%>");
3332 /* Try this again in case gimplification exposed something. */
3333 if (ret
!= GS_ERROR
)
3335 tree new_tree
= fold_call_expr (input_location
, *expr_p
, !want_value
);
3337 if (new_tree
&& new_tree
!= *expr_p
)
3339 /* There was a transformation of this call which computes the
3340 same value, but in a more efficient way. Return and try
3348 *expr_p
= error_mark_node
;
3352 /* If the function is "const" or "pure", then clear TREE_SIDE_EFFECTS on its
3353 decl. This allows us to eliminate redundant or useless
3354 calls to "const" functions. */
3355 if (TREE_CODE (*expr_p
) == CALL_EXPR
)
3357 int flags
= call_expr_flags (*expr_p
);
3358 if (flags
& (ECF_CONST
| ECF_PURE
)
3359 /* An infinite loop is considered a side effect. */
3360 && !(flags
& (ECF_LOOPING_CONST_OR_PURE
)))
3361 TREE_SIDE_EFFECTS (*expr_p
) = 0;
3364 /* If the value is not needed by the caller, emit a new GIMPLE_CALL
3365 and clear *EXPR_P. Otherwise, leave *EXPR_P in its gimplified
3366 form and delegate the creation of a GIMPLE_CALL to
3367 gimplify_modify_expr. This is always possible because when
3368 WANT_VALUE is true, the caller wants the result of this call into
3369 a temporary, which means that we will emit an INIT_EXPR in
3370 internal_get_tmp_var which will then be handled by
3371 gimplify_modify_expr. */
3374 /* The CALL_EXPR in *EXPR_P is already in GIMPLE form, so all we
3375 have to do is replicate it as a GIMPLE_CALL tuple. */
3376 gimple_stmt_iterator gsi
;
3377 call
= gimple_build_call_from_tree (*expr_p
);
3378 gimple_call_set_fntype (call
, TREE_TYPE (fnptrtype
));
3379 notice_special_calls (call
);
3380 if (EXPR_CILK_SPAWN (*expr_p
))
3381 gimplify_cilk_detach (pre_p
);
3382 gimplify_seq_add_stmt (pre_p
, call
);
3383 gsi
= gsi_last (*pre_p
);
3384 maybe_fold_stmt (&gsi
);
3385 *expr_p
= NULL_TREE
;
3388 /* Remember the original function type. */
3389 CALL_EXPR_FN (*expr_p
) = build1 (NOP_EXPR
, fnptrtype
,
3390 CALL_EXPR_FN (*expr_p
));
3395 /* Handle shortcut semantics in the predicate operand of a COND_EXPR by
3396 rewriting it into multiple COND_EXPRs, and possibly GOTO_EXPRs.
3398 TRUE_LABEL_P and FALSE_LABEL_P point to the labels to jump to if the
3399 condition is true or false, respectively. If null, we should generate
3400 our own to skip over the evaluation of this specific expression.
3402 LOCUS is the source location of the COND_EXPR.
3404 This function is the tree equivalent of do_jump.
3406 shortcut_cond_r should only be called by shortcut_cond_expr. */
3409 shortcut_cond_r (tree pred
, tree
*true_label_p
, tree
*false_label_p
,
3412 tree local_label
= NULL_TREE
;
3413 tree t
, expr
= NULL
;
3415 /* OK, it's not a simple case; we need to pull apart the COND_EXPR to
3416 retain the shortcut semantics. Just insert the gotos here;
3417 shortcut_cond_expr will append the real blocks later. */
3418 if (TREE_CODE (pred
) == TRUTH_ANDIF_EXPR
)
3420 location_t new_locus
;
3422 /* Turn if (a && b) into
3424 if (a); else goto no;
3425 if (b) goto yes; else goto no;
3428 if (false_label_p
== NULL
)
3429 false_label_p
= &local_label
;
3431 /* Keep the original source location on the first 'if'. */
3432 t
= shortcut_cond_r (TREE_OPERAND (pred
, 0), NULL
, false_label_p
, locus
);
3433 append_to_statement_list (t
, &expr
);
3435 /* Set the source location of the && on the second 'if'. */
3436 new_locus
= EXPR_HAS_LOCATION (pred
) ? EXPR_LOCATION (pred
) : locus
;
3437 t
= shortcut_cond_r (TREE_OPERAND (pred
, 1), true_label_p
, false_label_p
,
3439 append_to_statement_list (t
, &expr
);
3441 else if (TREE_CODE (pred
) == TRUTH_ORIF_EXPR
)
3443 location_t new_locus
;
3445 /* Turn if (a || b) into
3448 if (b) goto yes; else goto no;
3451 if (true_label_p
== NULL
)
3452 true_label_p
= &local_label
;
3454 /* Keep the original source location on the first 'if'. */
3455 t
= shortcut_cond_r (TREE_OPERAND (pred
, 0), true_label_p
, NULL
, locus
);
3456 append_to_statement_list (t
, &expr
);
3458 /* Set the source location of the || on the second 'if'. */
3459 new_locus
= EXPR_HAS_LOCATION (pred
) ? EXPR_LOCATION (pred
) : locus
;
3460 t
= shortcut_cond_r (TREE_OPERAND (pred
, 1), true_label_p
, false_label_p
,
3462 append_to_statement_list (t
, &expr
);
3464 else if (TREE_CODE (pred
) == COND_EXPR
3465 && !VOID_TYPE_P (TREE_TYPE (TREE_OPERAND (pred
, 1)))
3466 && !VOID_TYPE_P (TREE_TYPE (TREE_OPERAND (pred
, 2))))
3468 location_t new_locus
;
3470 /* As long as we're messing with gotos, turn if (a ? b : c) into
3472 if (b) goto yes; else goto no;
3474 if (c) goto yes; else goto no;
3476 Don't do this if one of the arms has void type, which can happen
3477 in C++ when the arm is throw. */
3479 /* Keep the original source location on the first 'if'. Set the source
3480 location of the ? on the second 'if'. */
3481 new_locus
= EXPR_HAS_LOCATION (pred
) ? EXPR_LOCATION (pred
) : locus
;
3482 expr
= build3 (COND_EXPR
, void_type_node
, TREE_OPERAND (pred
, 0),
3483 shortcut_cond_r (TREE_OPERAND (pred
, 1), true_label_p
,
3484 false_label_p
, locus
),
3485 shortcut_cond_r (TREE_OPERAND (pred
, 2), true_label_p
,
3486 false_label_p
, new_locus
));
3490 expr
= build3 (COND_EXPR
, void_type_node
, pred
,
3491 build_and_jump (true_label_p
),
3492 build_and_jump (false_label_p
));
3493 SET_EXPR_LOCATION (expr
, locus
);
3498 t
= build1 (LABEL_EXPR
, void_type_node
, local_label
);
3499 append_to_statement_list (t
, &expr
);
3505 /* Given a conditional expression EXPR with short-circuit boolean
3506 predicates using TRUTH_ANDIF_EXPR or TRUTH_ORIF_EXPR, break the
3507 predicate apart into the equivalent sequence of conditionals. */
3510 shortcut_cond_expr (tree expr
)
3512 tree pred
= TREE_OPERAND (expr
, 0);
3513 tree then_
= TREE_OPERAND (expr
, 1);
3514 tree else_
= TREE_OPERAND (expr
, 2);
3515 tree true_label
, false_label
, end_label
, t
;
3517 tree
*false_label_p
;
3518 bool emit_end
, emit_false
, jump_over_else
;
3519 bool then_se
= then_
&& TREE_SIDE_EFFECTS (then_
);
3520 bool else_se
= else_
&& TREE_SIDE_EFFECTS (else_
);
3522 /* First do simple transformations. */
3525 /* If there is no 'else', turn
3528 if (a) if (b) then c. */
3529 while (TREE_CODE (pred
) == TRUTH_ANDIF_EXPR
)
3531 /* Keep the original source location on the first 'if'. */
3532 location_t locus
= EXPR_LOC_OR_LOC (expr
, input_location
);
3533 TREE_OPERAND (expr
, 0) = TREE_OPERAND (pred
, 1);
3534 /* Set the source location of the && on the second 'if'. */
3535 if (EXPR_HAS_LOCATION (pred
))
3536 SET_EXPR_LOCATION (expr
, EXPR_LOCATION (pred
));
3537 then_
= shortcut_cond_expr (expr
);
3538 then_se
= then_
&& TREE_SIDE_EFFECTS (then_
);
3539 pred
= TREE_OPERAND (pred
, 0);
3540 expr
= build3 (COND_EXPR
, void_type_node
, pred
, then_
, NULL_TREE
);
3541 SET_EXPR_LOCATION (expr
, locus
);
3547 /* If there is no 'then', turn
3550 if (a); else if (b); else d. */
3551 while (TREE_CODE (pred
) == TRUTH_ORIF_EXPR
)
3553 /* Keep the original source location on the first 'if'. */
3554 location_t locus
= EXPR_LOC_OR_LOC (expr
, input_location
);
3555 TREE_OPERAND (expr
, 0) = TREE_OPERAND (pred
, 1);
3556 /* Set the source location of the || on the second 'if'. */
3557 if (EXPR_HAS_LOCATION (pred
))
3558 SET_EXPR_LOCATION (expr
, EXPR_LOCATION (pred
));
3559 else_
= shortcut_cond_expr (expr
);
3560 else_se
= else_
&& TREE_SIDE_EFFECTS (else_
);
3561 pred
= TREE_OPERAND (pred
, 0);
3562 expr
= build3 (COND_EXPR
, void_type_node
, pred
, NULL_TREE
, else_
);
3563 SET_EXPR_LOCATION (expr
, locus
);
3567 /* If we're done, great. */
3568 if (TREE_CODE (pred
) != TRUTH_ANDIF_EXPR
3569 && TREE_CODE (pred
) != TRUTH_ORIF_EXPR
)
3572 /* Otherwise we need to mess with gotos. Change
3575 if (a); else goto no;
3578 and recursively gimplify the condition. */
3580 true_label
= false_label
= end_label
= NULL_TREE
;
3582 /* If our arms just jump somewhere, hijack those labels so we don't
3583 generate jumps to jumps. */
3586 && TREE_CODE (then_
) == GOTO_EXPR
3587 && TREE_CODE (GOTO_DESTINATION (then_
)) == LABEL_DECL
)
3589 true_label
= GOTO_DESTINATION (then_
);
3595 && TREE_CODE (else_
) == GOTO_EXPR
3596 && TREE_CODE (GOTO_DESTINATION (else_
)) == LABEL_DECL
)
3598 false_label
= GOTO_DESTINATION (else_
);
3603 /* If we aren't hijacking a label for the 'then' branch, it falls through. */
3605 true_label_p
= &true_label
;
3607 true_label_p
= NULL
;
3609 /* The 'else' branch also needs a label if it contains interesting code. */
3610 if (false_label
|| else_se
)
3611 false_label_p
= &false_label
;
3613 false_label_p
= NULL
;
3615 /* If there was nothing else in our arms, just forward the label(s). */
3616 if (!then_se
&& !else_se
)
3617 return shortcut_cond_r (pred
, true_label_p
, false_label_p
,
3618 EXPR_LOC_OR_LOC (expr
, input_location
));
3620 /* If our last subexpression already has a terminal label, reuse it. */
3622 t
= expr_last (else_
);
3624 t
= expr_last (then_
);
3627 if (t
&& TREE_CODE (t
) == LABEL_EXPR
)
3628 end_label
= LABEL_EXPR_LABEL (t
);
3630 /* If we don't care about jumping to the 'else' branch, jump to the end
3631 if the condition is false. */
3633 false_label_p
= &end_label
;
3635 /* We only want to emit these labels if we aren't hijacking them. */
3636 emit_end
= (end_label
== NULL_TREE
);
3637 emit_false
= (false_label
== NULL_TREE
);
3639 /* We only emit the jump over the else clause if we have to--if the
3640 then clause may fall through. Otherwise we can wind up with a
3641 useless jump and a useless label at the end of gimplified code,
3642 which will cause us to think that this conditional as a whole
3643 falls through even if it doesn't. If we then inline a function
3644 which ends with such a condition, that can cause us to issue an
3645 inappropriate warning about control reaching the end of a
3646 non-void function. */
3647 jump_over_else
= block_may_fallthru (then_
);
3649 pred
= shortcut_cond_r (pred
, true_label_p
, false_label_p
,
3650 EXPR_LOC_OR_LOC (expr
, input_location
));
3653 append_to_statement_list (pred
, &expr
);
3655 append_to_statement_list (then_
, &expr
);
3660 tree last
= expr_last (expr
);
3661 t
= build_and_jump (&end_label
);
3662 if (EXPR_HAS_LOCATION (last
))
3663 SET_EXPR_LOCATION (t
, EXPR_LOCATION (last
));
3664 append_to_statement_list (t
, &expr
);
3668 t
= build1 (LABEL_EXPR
, void_type_node
, false_label
);
3669 append_to_statement_list (t
, &expr
);
3671 append_to_statement_list (else_
, &expr
);
3673 if (emit_end
&& end_label
)
3675 t
= build1 (LABEL_EXPR
, void_type_node
, end_label
);
3676 append_to_statement_list (t
, &expr
);
3682 /* EXPR is used in a boolean context; make sure it has BOOLEAN_TYPE. */
3685 gimple_boolify (tree expr
)
3687 tree type
= TREE_TYPE (expr
);
3688 location_t loc
= EXPR_LOCATION (expr
);
3690 if (TREE_CODE (expr
) == NE_EXPR
3691 && TREE_CODE (TREE_OPERAND (expr
, 0)) == CALL_EXPR
3692 && integer_zerop (TREE_OPERAND (expr
, 1)))
3694 tree call
= TREE_OPERAND (expr
, 0);
3695 tree fn
= get_callee_fndecl (call
);
3697 /* For __builtin_expect ((long) (x), y) recurse into x as well
3698 if x is truth_value_p. */
3700 && DECL_BUILT_IN_CLASS (fn
) == BUILT_IN_NORMAL
3701 && DECL_FUNCTION_CODE (fn
) == BUILT_IN_EXPECT
3702 && call_expr_nargs (call
) == 2)
3704 tree arg
= CALL_EXPR_ARG (call
, 0);
3707 if (TREE_CODE (arg
) == NOP_EXPR
3708 && TREE_TYPE (arg
) == TREE_TYPE (call
))
3709 arg
= TREE_OPERAND (arg
, 0);
3710 if (truth_value_p (TREE_CODE (arg
)))
3712 arg
= gimple_boolify (arg
);
3713 CALL_EXPR_ARG (call
, 0)
3714 = fold_convert_loc (loc
, TREE_TYPE (call
), arg
);
3720 switch (TREE_CODE (expr
))
3722 case TRUTH_AND_EXPR
:
3724 case TRUTH_XOR_EXPR
:
3725 case TRUTH_ANDIF_EXPR
:
3726 case TRUTH_ORIF_EXPR
:
3727 /* Also boolify the arguments of truth exprs. */
3728 TREE_OPERAND (expr
, 1) = gimple_boolify (TREE_OPERAND (expr
, 1));
3731 case TRUTH_NOT_EXPR
:
3732 TREE_OPERAND (expr
, 0) = gimple_boolify (TREE_OPERAND (expr
, 0));
3734 /* These expressions always produce boolean results. */
3735 if (TREE_CODE (type
) != BOOLEAN_TYPE
)
3736 TREE_TYPE (expr
) = boolean_type_node
;
3740 switch ((enum annot_expr_kind
) TREE_INT_CST_LOW (TREE_OPERAND (expr
, 1)))
3742 case annot_expr_ivdep_kind
:
3743 case annot_expr_no_vector_kind
:
3744 case annot_expr_vector_kind
:
3745 TREE_OPERAND (expr
, 0) = gimple_boolify (TREE_OPERAND (expr
, 0));
3746 if (TREE_CODE (type
) != BOOLEAN_TYPE
)
3747 TREE_TYPE (expr
) = boolean_type_node
;
3754 if (COMPARISON_CLASS_P (expr
))
3756 /* There expressions always prduce boolean results. */
3757 if (TREE_CODE (type
) != BOOLEAN_TYPE
)
3758 TREE_TYPE (expr
) = boolean_type_node
;
3761 /* Other expressions that get here must have boolean values, but
3762 might need to be converted to the appropriate mode. */
3763 if (TREE_CODE (type
) == BOOLEAN_TYPE
)
3765 return fold_convert_loc (loc
, boolean_type_node
, expr
);
3769 /* Given a conditional expression *EXPR_P without side effects, gimplify
3770 its operands. New statements are inserted to PRE_P. */
3772 static enum gimplify_status
3773 gimplify_pure_cond_expr (tree
*expr_p
, gimple_seq
*pre_p
)
3775 tree expr
= *expr_p
, cond
;
3776 enum gimplify_status ret
, tret
;
3777 enum tree_code code
;
3779 cond
= gimple_boolify (COND_EXPR_COND (expr
));
3781 /* We need to handle && and || specially, as their gimplification
3782 creates pure cond_expr, thus leading to an infinite cycle otherwise. */
3783 code
= TREE_CODE (cond
);
3784 if (code
== TRUTH_ANDIF_EXPR
)
3785 TREE_SET_CODE (cond
, TRUTH_AND_EXPR
);
3786 else if (code
== TRUTH_ORIF_EXPR
)
3787 TREE_SET_CODE (cond
, TRUTH_OR_EXPR
);
3788 ret
= gimplify_expr (&cond
, pre_p
, NULL
, is_gimple_condexpr
, fb_rvalue
);
3789 COND_EXPR_COND (*expr_p
) = cond
;
3791 tret
= gimplify_expr (&COND_EXPR_THEN (expr
), pre_p
, NULL
,
3792 is_gimple_val
, fb_rvalue
);
3793 ret
= MIN (ret
, tret
);
3794 tret
= gimplify_expr (&COND_EXPR_ELSE (expr
), pre_p
, NULL
,
3795 is_gimple_val
, fb_rvalue
);
3797 return MIN (ret
, tret
);
3800 /* Return true if evaluating EXPR could trap.
3801 EXPR is GENERIC, while tree_could_trap_p can be called
3805 generic_expr_could_trap_p (tree expr
)
3809 if (!expr
|| is_gimple_val (expr
))
3812 if (!EXPR_P (expr
) || tree_could_trap_p (expr
))
3815 n
= TREE_OPERAND_LENGTH (expr
);
3816 for (i
= 0; i
< n
; i
++)
3817 if (generic_expr_could_trap_p (TREE_OPERAND (expr
, i
)))
3823 /* Convert the conditional expression pointed to by EXPR_P '(p) ? a : b;'
3832 The second form is used when *EXPR_P is of type void.
3834 PRE_P points to the list where side effects that must happen before
3835 *EXPR_P should be stored. */
3837 static enum gimplify_status
3838 gimplify_cond_expr (tree
*expr_p
, gimple_seq
*pre_p
, fallback_t fallback
)
3840 tree expr
= *expr_p
;
3841 tree type
= TREE_TYPE (expr
);
3842 location_t loc
= EXPR_LOCATION (expr
);
3843 tree tmp
, arm1
, arm2
;
3844 enum gimplify_status ret
;
3845 tree label_true
, label_false
, label_cont
;
3846 bool have_then_clause_p
, have_else_clause_p
;
3848 enum tree_code pred_code
;
3849 gimple_seq seq
= NULL
;
3851 /* If this COND_EXPR has a value, copy the values into a temporary within
3853 if (!VOID_TYPE_P (type
))
3855 tree then_
= TREE_OPERAND (expr
, 1), else_
= TREE_OPERAND (expr
, 2);
3858 /* If either an rvalue is ok or we do not require an lvalue, create the
3859 temporary. But we cannot do that if the type is addressable. */
3860 if (((fallback
& fb_rvalue
) || !(fallback
& fb_lvalue
))
3861 && !TREE_ADDRESSABLE (type
))
3863 if (gimplify_ctxp
->allow_rhs_cond_expr
3864 /* If either branch has side effects or could trap, it can't be
3865 evaluated unconditionally. */
3866 && !TREE_SIDE_EFFECTS (then_
)
3867 && !generic_expr_could_trap_p (then_
)
3868 && !TREE_SIDE_EFFECTS (else_
)
3869 && !generic_expr_could_trap_p (else_
))
3870 return gimplify_pure_cond_expr (expr_p
, pre_p
);
3872 tmp
= create_tmp_var (type
, "iftmp");
3876 /* Otherwise, only create and copy references to the values. */
3879 type
= build_pointer_type (type
);
3881 if (!VOID_TYPE_P (TREE_TYPE (then_
)))
3882 then_
= build_fold_addr_expr_loc (loc
, then_
);
3884 if (!VOID_TYPE_P (TREE_TYPE (else_
)))
3885 else_
= build_fold_addr_expr_loc (loc
, else_
);
3888 = build3 (COND_EXPR
, type
, TREE_OPERAND (expr
, 0), then_
, else_
);
3890 tmp
= create_tmp_var (type
, "iftmp");
3891 result
= build_simple_mem_ref_loc (loc
, tmp
);
3894 /* Build the new then clause, `tmp = then_;'. But don't build the
3895 assignment if the value is void; in C++ it can be if it's a throw. */
3896 if (!VOID_TYPE_P (TREE_TYPE (then_
)))
3897 TREE_OPERAND (expr
, 1) = build2 (MODIFY_EXPR
, type
, tmp
, then_
);
3899 /* Similarly, build the new else clause, `tmp = else_;'. */
3900 if (!VOID_TYPE_P (TREE_TYPE (else_
)))
3901 TREE_OPERAND (expr
, 2) = build2 (MODIFY_EXPR
, type
, tmp
, else_
);
3903 TREE_TYPE (expr
) = void_type_node
;
3904 recalculate_side_effects (expr
);
3906 /* Move the COND_EXPR to the prequeue. */
3907 gimplify_stmt (&expr
, pre_p
);
3913 /* Remove any COMPOUND_EXPR so the following cases will be caught. */
3914 STRIP_TYPE_NOPS (TREE_OPERAND (expr
, 0));
3915 if (TREE_CODE (TREE_OPERAND (expr
, 0)) == COMPOUND_EXPR
)
3916 gimplify_compound_expr (&TREE_OPERAND (expr
, 0), pre_p
, true);
3918 /* Make sure the condition has BOOLEAN_TYPE. */
3919 TREE_OPERAND (expr
, 0) = gimple_boolify (TREE_OPERAND (expr
, 0));
3921 /* Break apart && and || conditions. */
3922 if (TREE_CODE (TREE_OPERAND (expr
, 0)) == TRUTH_ANDIF_EXPR
3923 || TREE_CODE (TREE_OPERAND (expr
, 0)) == TRUTH_ORIF_EXPR
)
3925 expr
= shortcut_cond_expr (expr
);
3927 if (expr
!= *expr_p
)
3931 /* We can't rely on gimplify_expr to re-gimplify the expanded
3932 form properly, as cleanups might cause the target labels to be
3933 wrapped in a TRY_FINALLY_EXPR. To prevent that, we need to
3934 set up a conditional context. */
3935 gimple_push_condition ();
3936 gimplify_stmt (expr_p
, &seq
);
3937 gimple_pop_condition (pre_p
);
3938 gimple_seq_add_seq (pre_p
, seq
);
3944 /* Now do the normal gimplification. */
3946 /* Gimplify condition. */
3947 ret
= gimplify_expr (&TREE_OPERAND (expr
, 0), pre_p
, NULL
, is_gimple_condexpr
,
3949 if (ret
== GS_ERROR
)
3951 gcc_assert (TREE_OPERAND (expr
, 0) != NULL_TREE
);
3953 gimple_push_condition ();
3955 have_then_clause_p
= have_else_clause_p
= false;
3956 if (TREE_OPERAND (expr
, 1) != NULL
3957 && TREE_CODE (TREE_OPERAND (expr
, 1)) == GOTO_EXPR
3958 && TREE_CODE (GOTO_DESTINATION (TREE_OPERAND (expr
, 1))) == LABEL_DECL
3959 && (DECL_CONTEXT (GOTO_DESTINATION (TREE_OPERAND (expr
, 1)))
3960 == current_function_decl
)
3961 /* For -O0 avoid this optimization if the COND_EXPR and GOTO_EXPR
3962 have different locations, otherwise we end up with incorrect
3963 location information on the branches. */
3965 || !EXPR_HAS_LOCATION (expr
)
3966 || !EXPR_HAS_LOCATION (TREE_OPERAND (expr
, 1))
3967 || EXPR_LOCATION (expr
) == EXPR_LOCATION (TREE_OPERAND (expr
, 1))))
3969 label_true
= GOTO_DESTINATION (TREE_OPERAND (expr
, 1));
3970 have_then_clause_p
= true;
3973 label_true
= create_artificial_label (UNKNOWN_LOCATION
);
3974 if (TREE_OPERAND (expr
, 2) != NULL
3975 && TREE_CODE (TREE_OPERAND (expr
, 2)) == GOTO_EXPR
3976 && TREE_CODE (GOTO_DESTINATION (TREE_OPERAND (expr
, 2))) == LABEL_DECL
3977 && (DECL_CONTEXT (GOTO_DESTINATION (TREE_OPERAND (expr
, 2)))
3978 == current_function_decl
)
3979 /* For -O0 avoid this optimization if the COND_EXPR and GOTO_EXPR
3980 have different locations, otherwise we end up with incorrect
3981 location information on the branches. */
3983 || !EXPR_HAS_LOCATION (expr
)
3984 || !EXPR_HAS_LOCATION (TREE_OPERAND (expr
, 2))
3985 || EXPR_LOCATION (expr
) == EXPR_LOCATION (TREE_OPERAND (expr
, 2))))
3987 label_false
= GOTO_DESTINATION (TREE_OPERAND (expr
, 2));
3988 have_else_clause_p
= true;
3991 label_false
= create_artificial_label (UNKNOWN_LOCATION
);
3993 gimple_cond_get_ops_from_tree (COND_EXPR_COND (expr
), &pred_code
, &arm1
,
3995 cond_stmt
= gimple_build_cond (pred_code
, arm1
, arm2
, label_true
,
3997 gimple_set_no_warning (cond_stmt
, TREE_NO_WARNING (COND_EXPR_COND (expr
)));
3998 gimplify_seq_add_stmt (&seq
, cond_stmt
);
3999 gimple_stmt_iterator gsi
= gsi_last (seq
);
4000 maybe_fold_stmt (&gsi
);
4002 label_cont
= NULL_TREE
;
4003 if (!have_then_clause_p
)
4005 /* For if (...) {} else { code; } put label_true after
4007 if (TREE_OPERAND (expr
, 1) == NULL_TREE
4008 && !have_else_clause_p
4009 && TREE_OPERAND (expr
, 2) != NULL_TREE
)
4010 label_cont
= label_true
;
4013 gimplify_seq_add_stmt (&seq
, gimple_build_label (label_true
));
4014 have_then_clause_p
= gimplify_stmt (&TREE_OPERAND (expr
, 1), &seq
);
4015 /* For if (...) { code; } else {} or
4016 if (...) { code; } else goto label; or
4017 if (...) { code; return; } else { ... }
4018 label_cont isn't needed. */
4019 if (!have_else_clause_p
4020 && TREE_OPERAND (expr
, 2) != NULL_TREE
4021 && gimple_seq_may_fallthru (seq
))
4024 label_cont
= create_artificial_label (UNKNOWN_LOCATION
);
4026 g
= gimple_build_goto (label_cont
);
4028 /* GIMPLE_COND's are very low level; they have embedded
4029 gotos. This particular embedded goto should not be marked
4030 with the location of the original COND_EXPR, as it would
4031 correspond to the COND_EXPR's condition, not the ELSE or the
4032 THEN arms. To avoid marking it with the wrong location, flag
4033 it as "no location". */
4034 gimple_set_do_not_emit_location (g
);
4036 gimplify_seq_add_stmt (&seq
, g
);
4040 if (!have_else_clause_p
)
4042 gimplify_seq_add_stmt (&seq
, gimple_build_label (label_false
));
4043 have_else_clause_p
= gimplify_stmt (&TREE_OPERAND (expr
, 2), &seq
);
4046 gimplify_seq_add_stmt (&seq
, gimple_build_label (label_cont
));
4048 gimple_pop_condition (pre_p
);
4049 gimple_seq_add_seq (pre_p
, seq
);
4051 if (ret
== GS_ERROR
)
4053 else if (have_then_clause_p
|| have_else_clause_p
)
4057 /* Both arms are empty; replace the COND_EXPR with its predicate. */
4058 expr
= TREE_OPERAND (expr
, 0);
4059 gimplify_stmt (&expr
, pre_p
);
4066 /* Prepare the node pointed to by EXPR_P, an is_gimple_addressable expression,
4067 to be marked addressable.
4069 We cannot rely on such an expression being directly markable if a temporary
4070 has been created by the gimplification. In this case, we create another
4071 temporary and initialize it with a copy, which will become a store after we
4072 mark it addressable. This can happen if the front-end passed us something
4073 that it could not mark addressable yet, like a Fortran pass-by-reference
4074 parameter (int) floatvar. */
4077 prepare_gimple_addressable (tree
*expr_p
, gimple_seq
*seq_p
)
4079 while (handled_component_p (*expr_p
))
4080 expr_p
= &TREE_OPERAND (*expr_p
, 0);
4081 if (is_gimple_reg (*expr_p
))
4083 /* Do not allow an SSA name as the temporary. */
4084 tree var
= get_initialized_tmp_var (*expr_p
, seq_p
, NULL
, false);
4085 DECL_GIMPLE_REG_P (var
) = 0;
4090 /* A subroutine of gimplify_modify_expr. Replace a MODIFY_EXPR with
4091 a call to __builtin_memcpy. */
4093 static enum gimplify_status
4094 gimplify_modify_expr_to_memcpy (tree
*expr_p
, tree size
, bool want_value
,
4097 tree t
, to
, to_ptr
, from
, from_ptr
;
4099 location_t loc
= EXPR_LOCATION (*expr_p
);
4101 to
= TREE_OPERAND (*expr_p
, 0);
4102 from
= TREE_OPERAND (*expr_p
, 1);
4104 /* Mark the RHS addressable. Beware that it may not be possible to do so
4105 directly if a temporary has been created by the gimplification. */
4106 prepare_gimple_addressable (&from
, seq_p
);
4108 mark_addressable (from
);
4109 from_ptr
= build_fold_addr_expr_loc (loc
, from
);
4110 gimplify_arg (&from_ptr
, seq_p
, loc
);
4112 mark_addressable (to
);
4113 to_ptr
= build_fold_addr_expr_loc (loc
, to
);
4114 gimplify_arg (&to_ptr
, seq_p
, loc
);
4116 t
= builtin_decl_implicit (BUILT_IN_MEMCPY
);
4118 gs
= gimple_build_call (t
, 3, to_ptr
, from_ptr
, size
);
4122 /* tmp = memcpy() */
4123 t
= create_tmp_var (TREE_TYPE (to_ptr
));
4124 gimple_call_set_lhs (gs
, t
);
4125 gimplify_seq_add_stmt (seq_p
, gs
);
4127 *expr_p
= build_simple_mem_ref (t
);
4131 gimplify_seq_add_stmt (seq_p
, gs
);
4136 /* A subroutine of gimplify_modify_expr. Replace a MODIFY_EXPR with
4137 a call to __builtin_memset. In this case we know that the RHS is
4138 a CONSTRUCTOR with an empty element list. */
4140 static enum gimplify_status
4141 gimplify_modify_expr_to_memset (tree
*expr_p
, tree size
, bool want_value
,
4144 tree t
, from
, to
, to_ptr
;
4146 location_t loc
= EXPR_LOCATION (*expr_p
);
4148 /* Assert our assumptions, to abort instead of producing wrong code
4149 silently if they are not met. Beware that the RHS CONSTRUCTOR might
4150 not be immediately exposed. */
4151 from
= TREE_OPERAND (*expr_p
, 1);
4152 if (TREE_CODE (from
) == WITH_SIZE_EXPR
)
4153 from
= TREE_OPERAND (from
, 0);
4155 gcc_assert (TREE_CODE (from
) == CONSTRUCTOR
4156 && vec_safe_is_empty (CONSTRUCTOR_ELTS (from
)));
4159 to
= TREE_OPERAND (*expr_p
, 0);
4161 to_ptr
= build_fold_addr_expr_loc (loc
, to
);
4162 gimplify_arg (&to_ptr
, seq_p
, loc
);
4163 t
= builtin_decl_implicit (BUILT_IN_MEMSET
);
4165 gs
= gimple_build_call (t
, 3, to_ptr
, integer_zero_node
, size
);
4169 /* tmp = memset() */
4170 t
= create_tmp_var (TREE_TYPE (to_ptr
));
4171 gimple_call_set_lhs (gs
, t
);
4172 gimplify_seq_add_stmt (seq_p
, gs
);
4174 *expr_p
= build1 (INDIRECT_REF
, TREE_TYPE (to
), t
);
4178 gimplify_seq_add_stmt (seq_p
, gs
);
4183 /* A subroutine of gimplify_init_ctor_preeval. Called via walk_tree,
4184 determine, cautiously, if a CONSTRUCTOR overlaps the lhs of an
4185 assignment. Return non-null if we detect a potential overlap. */
4187 struct gimplify_init_ctor_preeval_data
4189 /* The base decl of the lhs object. May be NULL, in which case we
4190 have to assume the lhs is indirect. */
4193 /* The alias set of the lhs object. */
4194 alias_set_type lhs_alias_set
;
4198 gimplify_init_ctor_preeval_1 (tree
*tp
, int *walk_subtrees
, void *xdata
)
4200 struct gimplify_init_ctor_preeval_data
*data
4201 = (struct gimplify_init_ctor_preeval_data
*) xdata
;
4204 /* If we find the base object, obviously we have overlap. */
4205 if (data
->lhs_base_decl
== t
)
4208 /* If the constructor component is indirect, determine if we have a
4209 potential overlap with the lhs. The only bits of information we
4210 have to go on at this point are addressability and alias sets. */
4211 if ((INDIRECT_REF_P (t
)
4212 || TREE_CODE (t
) == MEM_REF
)
4213 && (!data
->lhs_base_decl
|| TREE_ADDRESSABLE (data
->lhs_base_decl
))
4214 && alias_sets_conflict_p (data
->lhs_alias_set
, get_alias_set (t
)))
4217 /* If the constructor component is a call, determine if it can hide a
4218 potential overlap with the lhs through an INDIRECT_REF like above.
4219 ??? Ugh - this is completely broken. In fact this whole analysis
4220 doesn't look conservative. */
4221 if (TREE_CODE (t
) == CALL_EXPR
)
4223 tree type
, fntype
= TREE_TYPE (TREE_TYPE (CALL_EXPR_FN (t
)));
4225 for (type
= TYPE_ARG_TYPES (fntype
); type
; type
= TREE_CHAIN (type
))
4226 if (POINTER_TYPE_P (TREE_VALUE (type
))
4227 && (!data
->lhs_base_decl
|| TREE_ADDRESSABLE (data
->lhs_base_decl
))
4228 && alias_sets_conflict_p (data
->lhs_alias_set
,
4230 (TREE_TYPE (TREE_VALUE (type
)))))
4234 if (IS_TYPE_OR_DECL_P (t
))
4239 /* A subroutine of gimplify_init_constructor. Pre-evaluate EXPR,
4240 force values that overlap with the lhs (as described by *DATA)
4241 into temporaries. */
4244 gimplify_init_ctor_preeval (tree
*expr_p
, gimple_seq
*pre_p
, gimple_seq
*post_p
,
4245 struct gimplify_init_ctor_preeval_data
*data
)
4247 enum gimplify_status one
;
4249 /* If the value is constant, then there's nothing to pre-evaluate. */
4250 if (TREE_CONSTANT (*expr_p
))
4252 /* Ensure it does not have side effects, it might contain a reference to
4253 the object we're initializing. */
4254 gcc_assert (!TREE_SIDE_EFFECTS (*expr_p
));
4258 /* If the type has non-trivial constructors, we can't pre-evaluate. */
4259 if (TREE_ADDRESSABLE (TREE_TYPE (*expr_p
)))
4262 /* Recurse for nested constructors. */
4263 if (TREE_CODE (*expr_p
) == CONSTRUCTOR
)
4265 unsigned HOST_WIDE_INT ix
;
4266 constructor_elt
*ce
;
4267 vec
<constructor_elt
, va_gc
> *v
= CONSTRUCTOR_ELTS (*expr_p
);
4269 FOR_EACH_VEC_SAFE_ELT (v
, ix
, ce
)
4270 gimplify_init_ctor_preeval (&ce
->value
, pre_p
, post_p
, data
);
4275 /* If this is a variable sized type, we must remember the size. */
4276 maybe_with_size_expr (expr_p
);
4278 /* Gimplify the constructor element to something appropriate for the rhs
4279 of a MODIFY_EXPR. Given that we know the LHS is an aggregate, we know
4280 the gimplifier will consider this a store to memory. Doing this
4281 gimplification now means that we won't have to deal with complicated
4282 language-specific trees, nor trees like SAVE_EXPR that can induce
4283 exponential search behavior. */
4284 one
= gimplify_expr (expr_p
, pre_p
, post_p
, is_gimple_mem_rhs
, fb_rvalue
);
4285 if (one
== GS_ERROR
)
4291 /* If we gimplified to a bare decl, we can be sure that it doesn't overlap
4292 with the lhs, since "a = { .x=a }" doesn't make sense. This will
4293 always be true for all scalars, since is_gimple_mem_rhs insists on a
4294 temporary variable for them. */
4295 if (DECL_P (*expr_p
))
4298 /* If this is of variable size, we have no choice but to assume it doesn't
4299 overlap since we can't make a temporary for it. */
4300 if (TREE_CODE (TYPE_SIZE (TREE_TYPE (*expr_p
))) != INTEGER_CST
)
4303 /* Otherwise, we must search for overlap ... */
4304 if (!walk_tree (expr_p
, gimplify_init_ctor_preeval_1
, data
, NULL
))
4307 /* ... and if found, force the value into a temporary. */
4308 *expr_p
= get_formal_tmp_var (*expr_p
, pre_p
);
4311 /* A subroutine of gimplify_init_ctor_eval. Create a loop for
4312 a RANGE_EXPR in a CONSTRUCTOR for an array.
4316 object[var] = value;
4323 We increment var _after_ the loop exit check because we might otherwise
4324 fail if upper == TYPE_MAX_VALUE (type for upper).
4326 Note that we never have to deal with SAVE_EXPRs here, because this has
4327 already been taken care of for us, in gimplify_init_ctor_preeval(). */
4329 static void gimplify_init_ctor_eval (tree
, vec
<constructor_elt
, va_gc
> *,
4330 gimple_seq
*, bool);
4333 gimplify_init_ctor_eval_range (tree object
, tree lower
, tree upper
,
4334 tree value
, tree array_elt_type
,
4335 gimple_seq
*pre_p
, bool cleared
)
4337 tree loop_entry_label
, loop_exit_label
, fall_thru_label
;
4338 tree var
, var_type
, cref
, tmp
;
4340 loop_entry_label
= create_artificial_label (UNKNOWN_LOCATION
);
4341 loop_exit_label
= create_artificial_label (UNKNOWN_LOCATION
);
4342 fall_thru_label
= create_artificial_label (UNKNOWN_LOCATION
);
4344 /* Create and initialize the index variable. */
4345 var_type
= TREE_TYPE (upper
);
4346 var
= create_tmp_var (var_type
);
4347 gimplify_seq_add_stmt (pre_p
, gimple_build_assign (var
, lower
));
4349 /* Add the loop entry label. */
4350 gimplify_seq_add_stmt (pre_p
, gimple_build_label (loop_entry_label
));
4352 /* Build the reference. */
4353 cref
= build4 (ARRAY_REF
, array_elt_type
, unshare_expr (object
),
4354 var
, NULL_TREE
, NULL_TREE
);
4356 /* If we are a constructor, just call gimplify_init_ctor_eval to do
4357 the store. Otherwise just assign value to the reference. */
4359 if (TREE_CODE (value
) == CONSTRUCTOR
)
4360 /* NB we might have to call ourself recursively through
4361 gimplify_init_ctor_eval if the value is a constructor. */
4362 gimplify_init_ctor_eval (cref
, CONSTRUCTOR_ELTS (value
),
4365 gimplify_seq_add_stmt (pre_p
, gimple_build_assign (cref
, value
));
4367 /* We exit the loop when the index var is equal to the upper bound. */
4368 gimplify_seq_add_stmt (pre_p
,
4369 gimple_build_cond (EQ_EXPR
, var
, upper
,
4370 loop_exit_label
, fall_thru_label
));
4372 gimplify_seq_add_stmt (pre_p
, gimple_build_label (fall_thru_label
));
4374 /* Otherwise, increment the index var... */
4375 tmp
= build2 (PLUS_EXPR
, var_type
, var
,
4376 fold_convert (var_type
, integer_one_node
));
4377 gimplify_seq_add_stmt (pre_p
, gimple_build_assign (var
, tmp
));
4379 /* ...and jump back to the loop entry. */
4380 gimplify_seq_add_stmt (pre_p
, gimple_build_goto (loop_entry_label
));
4382 /* Add the loop exit label. */
4383 gimplify_seq_add_stmt (pre_p
, gimple_build_label (loop_exit_label
));
4386 /* Return true if FDECL is accessing a field that is zero sized. */
4389 zero_sized_field_decl (const_tree fdecl
)
4391 if (TREE_CODE (fdecl
) == FIELD_DECL
&& DECL_SIZE (fdecl
)
4392 && integer_zerop (DECL_SIZE (fdecl
)))
4397 /* Return true if TYPE is zero sized. */
4400 zero_sized_type (const_tree type
)
4402 if (AGGREGATE_TYPE_P (type
) && TYPE_SIZE (type
)
4403 && integer_zerop (TYPE_SIZE (type
)))
4408 /* A subroutine of gimplify_init_constructor. Generate individual
4409 MODIFY_EXPRs for a CONSTRUCTOR. OBJECT is the LHS against which the
4410 assignments should happen. ELTS is the CONSTRUCTOR_ELTS of the
4411 CONSTRUCTOR. CLEARED is true if the entire LHS object has been
4415 gimplify_init_ctor_eval (tree object
, vec
<constructor_elt
, va_gc
> *elts
,
4416 gimple_seq
*pre_p
, bool cleared
)
4418 tree array_elt_type
= NULL
;
4419 unsigned HOST_WIDE_INT ix
;
4420 tree purpose
, value
;
4422 if (TREE_CODE (TREE_TYPE (object
)) == ARRAY_TYPE
)
4423 array_elt_type
= TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (object
)));
4425 FOR_EACH_CONSTRUCTOR_ELT (elts
, ix
, purpose
, value
)
4429 /* NULL values are created above for gimplification errors. */
4433 if (cleared
&& initializer_zerop (value
))
4436 /* ??? Here's to hoping the front end fills in all of the indices,
4437 so we don't have to figure out what's missing ourselves. */
4438 gcc_assert (purpose
);
4440 /* Skip zero-sized fields, unless value has side-effects. This can
4441 happen with calls to functions returning a zero-sized type, which
4442 we shouldn't discard. As a number of downstream passes don't
4443 expect sets of zero-sized fields, we rely on the gimplification of
4444 the MODIFY_EXPR we make below to drop the assignment statement. */
4445 if (! TREE_SIDE_EFFECTS (value
) && zero_sized_field_decl (purpose
))
4448 /* If we have a RANGE_EXPR, we have to build a loop to assign the
4450 if (TREE_CODE (purpose
) == RANGE_EXPR
)
4452 tree lower
= TREE_OPERAND (purpose
, 0);
4453 tree upper
= TREE_OPERAND (purpose
, 1);
4455 /* If the lower bound is equal to upper, just treat it as if
4456 upper was the index. */
4457 if (simple_cst_equal (lower
, upper
))
4461 gimplify_init_ctor_eval_range (object
, lower
, upper
, value
,
4462 array_elt_type
, pre_p
, cleared
);
4469 /* Do not use bitsizetype for ARRAY_REF indices. */
4470 if (TYPE_DOMAIN (TREE_TYPE (object
)))
4472 = fold_convert (TREE_TYPE (TYPE_DOMAIN (TREE_TYPE (object
))),
4474 cref
= build4 (ARRAY_REF
, array_elt_type
, unshare_expr (object
),
4475 purpose
, NULL_TREE
, NULL_TREE
);
4479 gcc_assert (TREE_CODE (purpose
) == FIELD_DECL
);
4480 cref
= build3 (COMPONENT_REF
, TREE_TYPE (purpose
),
4481 unshare_expr (object
), purpose
, NULL_TREE
);
4484 if (TREE_CODE (value
) == CONSTRUCTOR
4485 && TREE_CODE (TREE_TYPE (value
)) != VECTOR_TYPE
)
4486 gimplify_init_ctor_eval (cref
, CONSTRUCTOR_ELTS (value
),
4490 tree init
= build2 (INIT_EXPR
, TREE_TYPE (cref
), cref
, value
);
4491 gimplify_and_add (init
, pre_p
);
4497 /* Return the appropriate RHS predicate for this LHS. */
4500 rhs_predicate_for (tree lhs
)
4502 if (is_gimple_reg (lhs
))
4503 return is_gimple_reg_rhs_or_call
;
4505 return is_gimple_mem_rhs_or_call
;
4508 /* Return the initial guess for an appropriate RHS predicate for this LHS,
4509 before the LHS has been gimplified. */
4511 static gimple_predicate
4512 initial_rhs_predicate_for (tree lhs
)
4514 if (is_gimple_reg_type (TREE_TYPE (lhs
)))
4515 return is_gimple_reg_rhs_or_call
;
4517 return is_gimple_mem_rhs_or_call
;
4520 /* Gimplify a C99 compound literal expression. This just means adding
4521 the DECL_EXPR before the current statement and using its anonymous
4524 static enum gimplify_status
4525 gimplify_compound_literal_expr (tree
*expr_p
, gimple_seq
*pre_p
,
4526 bool (*gimple_test_f
) (tree
),
4527 fallback_t fallback
)
4529 tree decl_s
= COMPOUND_LITERAL_EXPR_DECL_EXPR (*expr_p
);
4530 tree decl
= DECL_EXPR_DECL (decl_s
);
4531 tree init
= DECL_INITIAL (decl
);
4532 /* Mark the decl as addressable if the compound literal
4533 expression is addressable now, otherwise it is marked too late
4534 after we gimplify the initialization expression. */
4535 if (TREE_ADDRESSABLE (*expr_p
))
4536 TREE_ADDRESSABLE (decl
) = 1;
4537 /* Otherwise, if we don't need an lvalue and have a literal directly
4538 substitute it. Check if it matches the gimple predicate, as
4539 otherwise we'd generate a new temporary, and we can as well just
4540 use the decl we already have. */
4541 else if (!TREE_ADDRESSABLE (decl
)
4543 && (fallback
& fb_lvalue
) == 0
4544 && gimple_test_f (init
))
4550 /* Preliminarily mark non-addressed complex variables as eligible
4551 for promotion to gimple registers. We'll transform their uses
4553 if ((TREE_CODE (TREE_TYPE (decl
)) == COMPLEX_TYPE
4554 || TREE_CODE (TREE_TYPE (decl
)) == VECTOR_TYPE
)
4555 && !TREE_THIS_VOLATILE (decl
)
4556 && !needs_to_live_in_memory (decl
))
4557 DECL_GIMPLE_REG_P (decl
) = 1;
4559 /* If the decl is not addressable, then it is being used in some
4560 expression or on the right hand side of a statement, and it can
4561 be put into a readonly data section. */
4562 if (!TREE_ADDRESSABLE (decl
) && (fallback
& fb_lvalue
) == 0)
4563 TREE_READONLY (decl
) = 1;
4565 /* This decl isn't mentioned in the enclosing block, so add it to the
4566 list of temps. FIXME it seems a bit of a kludge to say that
4567 anonymous artificial vars aren't pushed, but everything else is. */
4568 if (DECL_NAME (decl
) == NULL_TREE
&& !DECL_SEEN_IN_BIND_EXPR_P (decl
))
4569 gimple_add_tmp_var (decl
);
4571 gimplify_and_add (decl_s
, pre_p
);
4576 /* Optimize embedded COMPOUND_LITERAL_EXPRs within a CONSTRUCTOR,
4577 return a new CONSTRUCTOR if something changed. */
4580 optimize_compound_literals_in_ctor (tree orig_ctor
)
4582 tree ctor
= orig_ctor
;
4583 vec
<constructor_elt
, va_gc
> *elts
= CONSTRUCTOR_ELTS (ctor
);
4584 unsigned int idx
, num
= vec_safe_length (elts
);
4586 for (idx
= 0; idx
< num
; idx
++)
4588 tree value
= (*elts
)[idx
].value
;
4589 tree newval
= value
;
4590 if (TREE_CODE (value
) == CONSTRUCTOR
)
4591 newval
= optimize_compound_literals_in_ctor (value
);
4592 else if (TREE_CODE (value
) == COMPOUND_LITERAL_EXPR
)
4594 tree decl_s
= COMPOUND_LITERAL_EXPR_DECL_EXPR (value
);
4595 tree decl
= DECL_EXPR_DECL (decl_s
);
4596 tree init
= DECL_INITIAL (decl
);
4598 if (!TREE_ADDRESSABLE (value
)
4599 && !TREE_ADDRESSABLE (decl
)
4601 && TREE_CODE (init
) == CONSTRUCTOR
)
4602 newval
= optimize_compound_literals_in_ctor (init
);
4604 if (newval
== value
)
4607 if (ctor
== orig_ctor
)
4609 ctor
= copy_node (orig_ctor
);
4610 CONSTRUCTOR_ELTS (ctor
) = vec_safe_copy (elts
);
4611 elts
= CONSTRUCTOR_ELTS (ctor
);
4613 (*elts
)[idx
].value
= newval
;
4618 /* A subroutine of gimplify_modify_expr. Break out elements of a
4619 CONSTRUCTOR used as an initializer into separate MODIFY_EXPRs.
4621 Note that we still need to clear any elements that don't have explicit
4622 initializers, so if not all elements are initialized we keep the
4623 original MODIFY_EXPR, we just remove all of the constructor elements.
4625 If NOTIFY_TEMP_CREATION is true, do not gimplify, just return
4626 GS_ERROR if we would have to create a temporary when gimplifying
4627 this constructor. Otherwise, return GS_OK.
4629 If NOTIFY_TEMP_CREATION is false, just do the gimplification. */
4631 static enum gimplify_status
4632 gimplify_init_constructor (tree
*expr_p
, gimple_seq
*pre_p
, gimple_seq
*post_p
,
4633 bool want_value
, bool notify_temp_creation
)
4635 tree object
, ctor
, type
;
4636 enum gimplify_status ret
;
4637 vec
<constructor_elt
, va_gc
> *elts
;
4639 gcc_assert (TREE_CODE (TREE_OPERAND (*expr_p
, 1)) == CONSTRUCTOR
);
4641 if (!notify_temp_creation
)
4643 ret
= gimplify_expr (&TREE_OPERAND (*expr_p
, 0), pre_p
, post_p
,
4644 is_gimple_lvalue
, fb_lvalue
);
4645 if (ret
== GS_ERROR
)
4649 object
= TREE_OPERAND (*expr_p
, 0);
4650 ctor
= TREE_OPERAND (*expr_p
, 1)
4651 = optimize_compound_literals_in_ctor (TREE_OPERAND (*expr_p
, 1));
4652 type
= TREE_TYPE (ctor
);
4653 elts
= CONSTRUCTOR_ELTS (ctor
);
4656 switch (TREE_CODE (type
))
4660 case QUAL_UNION_TYPE
:
4663 struct gimplify_init_ctor_preeval_data preeval_data
;
4664 HOST_WIDE_INT num_ctor_elements
, num_nonzero_elements
;
4665 bool cleared
, complete_p
, valid_const_initializer
;
4667 /* Aggregate types must lower constructors to initialization of
4668 individual elements. The exception is that a CONSTRUCTOR node
4669 with no elements indicates zero-initialization of the whole. */
4670 if (vec_safe_is_empty (elts
))
4672 if (notify_temp_creation
)
4677 /* Fetch information about the constructor to direct later processing.
4678 We might want to make static versions of it in various cases, and
4679 can only do so if it known to be a valid constant initializer. */
4680 valid_const_initializer
4681 = categorize_ctor_elements (ctor
, &num_nonzero_elements
,
4682 &num_ctor_elements
, &complete_p
);
4684 /* If a const aggregate variable is being initialized, then it
4685 should never be a lose to promote the variable to be static. */
4686 if (valid_const_initializer
4687 && num_nonzero_elements
> 1
4688 && TREE_READONLY (object
)
4690 && (flag_merge_constants
>= 2 || !TREE_ADDRESSABLE (object
)))
4692 if (notify_temp_creation
)
4694 DECL_INITIAL (object
) = ctor
;
4695 TREE_STATIC (object
) = 1;
4696 if (!DECL_NAME (object
))
4697 DECL_NAME (object
) = create_tmp_var_name ("C");
4698 walk_tree (&DECL_INITIAL (object
), force_labels_r
, NULL
, NULL
);
4700 /* ??? C++ doesn't automatically append a .<number> to the
4701 assembler name, and even when it does, it looks at FE private
4702 data structures to figure out what that number should be,
4703 which are not set for this variable. I suppose this is
4704 important for local statics for inline functions, which aren't
4705 "local" in the object file sense. So in order to get a unique
4706 TU-local symbol, we must invoke the lhd version now. */
4707 lhd_set_decl_assembler_name (object
);
4709 *expr_p
= NULL_TREE
;
4713 /* If there are "lots" of initialized elements, even discounting
4714 those that are not address constants (and thus *must* be
4715 computed at runtime), then partition the constructor into
4716 constant and non-constant parts. Block copy the constant
4717 parts in, then generate code for the non-constant parts. */
4718 /* TODO. There's code in cp/typeck.c to do this. */
4720 if (int_size_in_bytes (TREE_TYPE (ctor
)) < 0)
4721 /* store_constructor will ignore the clearing of variable-sized
4722 objects. Initializers for such objects must explicitly set
4723 every field that needs to be set. */
4725 else if (!complete_p
&& !CONSTRUCTOR_NO_CLEARING (ctor
))
4726 /* If the constructor isn't complete, clear the whole object
4727 beforehand, unless CONSTRUCTOR_NO_CLEARING is set on it.
4729 ??? This ought not to be needed. For any element not present
4730 in the initializer, we should simply set them to zero. Except
4731 we'd need to *find* the elements that are not present, and that
4732 requires trickery to avoid quadratic compile-time behavior in
4733 large cases or excessive memory use in small cases. */
4735 else if (num_ctor_elements
- num_nonzero_elements
4736 > CLEAR_RATIO (optimize_function_for_speed_p (cfun
))
4737 && num_nonzero_elements
< num_ctor_elements
/ 4)
4738 /* If there are "lots" of zeros, it's more efficient to clear
4739 the memory and then set the nonzero elements. */
4744 /* If there are "lots" of initialized elements, and all of them
4745 are valid address constants, then the entire initializer can
4746 be dropped to memory, and then memcpy'd out. Don't do this
4747 for sparse arrays, though, as it's more efficient to follow
4748 the standard CONSTRUCTOR behavior of memset followed by
4749 individual element initialization. Also don't do this for small
4750 all-zero initializers (which aren't big enough to merit
4751 clearing), and don't try to make bitwise copies of
4752 TREE_ADDRESSABLE types.
4754 We cannot apply such transformation when compiling chkp static
4755 initializer because creation of initializer image in the memory
4756 will require static initialization of bounds for it. It should
4757 result in another gimplification of similar initializer and we
4758 may fall into infinite loop. */
4759 if (valid_const_initializer
4760 && !(cleared
|| num_nonzero_elements
== 0)
4761 && !TREE_ADDRESSABLE (type
)
4762 && (!current_function_decl
4763 || !lookup_attribute ("chkp ctor",
4764 DECL_ATTRIBUTES (current_function_decl
))))
4766 HOST_WIDE_INT size
= int_size_in_bytes (type
);
4769 /* ??? We can still get unbounded array types, at least
4770 from the C++ front end. This seems wrong, but attempt
4771 to work around it for now. */
4774 size
= int_size_in_bytes (TREE_TYPE (object
));
4776 TREE_TYPE (ctor
) = type
= TREE_TYPE (object
);
4779 /* Find the maximum alignment we can assume for the object. */
4780 /* ??? Make use of DECL_OFFSET_ALIGN. */
4781 if (DECL_P (object
))
4782 align
= DECL_ALIGN (object
);
4784 align
= TYPE_ALIGN (type
);
4786 /* Do a block move either if the size is so small as to make
4787 each individual move a sub-unit move on average, or if it
4788 is so large as to make individual moves inefficient. */
4790 && num_nonzero_elements
> 1
4791 && (size
< num_nonzero_elements
4792 || !can_move_by_pieces (size
, align
)))
4794 if (notify_temp_creation
)
4797 walk_tree (&ctor
, force_labels_r
, NULL
, NULL
);
4798 ctor
= tree_output_constant_def (ctor
);
4799 if (!useless_type_conversion_p (type
, TREE_TYPE (ctor
)))
4800 ctor
= build1 (VIEW_CONVERT_EXPR
, type
, ctor
);
4801 TREE_OPERAND (*expr_p
, 1) = ctor
;
4803 /* This is no longer an assignment of a CONSTRUCTOR, but
4804 we still may have processing to do on the LHS. So
4805 pretend we didn't do anything here to let that happen. */
4806 return GS_UNHANDLED
;
4810 /* If the target is volatile, we have non-zero elements and more than
4811 one field to assign, initialize the target from a temporary. */
4812 if (TREE_THIS_VOLATILE (object
)
4813 && !TREE_ADDRESSABLE (type
)
4814 && num_nonzero_elements
> 0
4815 && vec_safe_length (elts
) > 1)
4817 tree temp
= create_tmp_var (TYPE_MAIN_VARIANT (type
));
4818 TREE_OPERAND (*expr_p
, 0) = temp
;
4819 *expr_p
= build2 (COMPOUND_EXPR
, TREE_TYPE (*expr_p
),
4821 build2 (MODIFY_EXPR
, void_type_node
,
4826 if (notify_temp_creation
)
4829 /* If there are nonzero elements and if needed, pre-evaluate to capture
4830 elements overlapping with the lhs into temporaries. We must do this
4831 before clearing to fetch the values before they are zeroed-out. */
4832 if (num_nonzero_elements
> 0 && TREE_CODE (*expr_p
) != INIT_EXPR
)
4834 preeval_data
.lhs_base_decl
= get_base_address (object
);
4835 if (!DECL_P (preeval_data
.lhs_base_decl
))
4836 preeval_data
.lhs_base_decl
= NULL
;
4837 preeval_data
.lhs_alias_set
= get_alias_set (object
);
4839 gimplify_init_ctor_preeval (&TREE_OPERAND (*expr_p
, 1),
4840 pre_p
, post_p
, &preeval_data
);
4843 bool ctor_has_side_effects_p
4844 = TREE_SIDE_EFFECTS (TREE_OPERAND (*expr_p
, 1));
4848 /* Zap the CONSTRUCTOR element list, which simplifies this case.
4849 Note that we still have to gimplify, in order to handle the
4850 case of variable sized types. Avoid shared tree structures. */
4851 CONSTRUCTOR_ELTS (ctor
) = NULL
;
4852 TREE_SIDE_EFFECTS (ctor
) = 0;
4853 object
= unshare_expr (object
);
4854 gimplify_stmt (expr_p
, pre_p
);
4857 /* If we have not block cleared the object, or if there are nonzero
4858 elements in the constructor, or if the constructor has side effects,
4859 add assignments to the individual scalar fields of the object. */
4861 || num_nonzero_elements
> 0
4862 || ctor_has_side_effects_p
)
4863 gimplify_init_ctor_eval (object
, elts
, pre_p
, cleared
);
4865 *expr_p
= NULL_TREE
;
4873 if (notify_temp_creation
)
4876 /* Extract the real and imaginary parts out of the ctor. */
4877 gcc_assert (elts
->length () == 2);
4878 r
= (*elts
)[0].value
;
4879 i
= (*elts
)[1].value
;
4880 if (r
== NULL
|| i
== NULL
)
4882 tree zero
= build_zero_cst (TREE_TYPE (type
));
4889 /* Complex types have either COMPLEX_CST or COMPLEX_EXPR to
4890 represent creation of a complex value. */
4891 if (TREE_CONSTANT (r
) && TREE_CONSTANT (i
))
4893 ctor
= build_complex (type
, r
, i
);
4894 TREE_OPERAND (*expr_p
, 1) = ctor
;
4898 ctor
= build2 (COMPLEX_EXPR
, type
, r
, i
);
4899 TREE_OPERAND (*expr_p
, 1) = ctor
;
4900 ret
= gimplify_expr (&TREE_OPERAND (*expr_p
, 1),
4903 rhs_predicate_for (TREE_OPERAND (*expr_p
, 0)),
4911 unsigned HOST_WIDE_INT ix
;
4912 constructor_elt
*ce
;
4914 if (notify_temp_creation
)
4917 /* Go ahead and simplify constant constructors to VECTOR_CST. */
4918 if (TREE_CONSTANT (ctor
))
4920 bool constant_p
= true;
4923 /* Even when ctor is constant, it might contain non-*_CST
4924 elements, such as addresses or trapping values like
4925 1.0/0.0 - 1.0/0.0. Such expressions don't belong
4926 in VECTOR_CST nodes. */
4927 FOR_EACH_CONSTRUCTOR_VALUE (elts
, ix
, value
)
4928 if (!CONSTANT_CLASS_P (value
))
4936 TREE_OPERAND (*expr_p
, 1) = build_vector_from_ctor (type
, elts
);
4940 TREE_CONSTANT (ctor
) = 0;
4943 /* Vector types use CONSTRUCTOR all the way through gimple
4944 compilation as a general initializer. */
4945 FOR_EACH_VEC_SAFE_ELT (elts
, ix
, ce
)
4947 enum gimplify_status tret
;
4948 tret
= gimplify_expr (&ce
->value
, pre_p
, post_p
, is_gimple_val
,
4950 if (tret
== GS_ERROR
)
4952 else if (TREE_STATIC (ctor
)
4953 && !initializer_constant_valid_p (ce
->value
,
4954 TREE_TYPE (ce
->value
)))
4955 TREE_STATIC (ctor
) = 0;
4957 if (!is_gimple_reg (TREE_OPERAND (*expr_p
, 0)))
4958 TREE_OPERAND (*expr_p
, 1) = get_formal_tmp_var (ctor
, pre_p
);
4963 /* So how did we get a CONSTRUCTOR for a scalar type? */
4967 if (ret
== GS_ERROR
)
4969 /* If we have gimplified both sides of the initializer but have
4970 not emitted an assignment, do so now. */
4973 tree lhs
= TREE_OPERAND (*expr_p
, 0);
4974 tree rhs
= TREE_OPERAND (*expr_p
, 1);
4975 if (want_value
&& object
== lhs
)
4976 lhs
= unshare_expr (lhs
);
4977 gassign
*init
= gimple_build_assign (lhs
, rhs
);
4978 gimplify_seq_add_stmt (pre_p
, init
);
4992 /* Given a pointer value OP0, return a simplified version of an
4993 indirection through OP0, or NULL_TREE if no simplification is
4994 possible. This may only be applied to a rhs of an expression.
4995 Note that the resulting type may be different from the type pointed
4996 to in the sense that it is still compatible from the langhooks
5000 gimple_fold_indirect_ref_rhs (tree t
)
5002 return gimple_fold_indirect_ref (t
);
5005 /* Subroutine of gimplify_modify_expr to do simplifications of
5006 MODIFY_EXPRs based on the code of the RHS. We loop for as long as
5007 something changes. */
5009 static enum gimplify_status
5010 gimplify_modify_expr_rhs (tree
*expr_p
, tree
*from_p
, tree
*to_p
,
5011 gimple_seq
*pre_p
, gimple_seq
*post_p
,
5014 enum gimplify_status ret
= GS_UNHANDLED
;
5020 switch (TREE_CODE (*from_p
))
5023 /* If we're assigning from a read-only variable initialized with
5024 a constructor, do the direct assignment from the constructor,
5025 but only if neither source nor target are volatile since this
5026 latter assignment might end up being done on a per-field basis. */
5027 if (DECL_INITIAL (*from_p
)
5028 && TREE_READONLY (*from_p
)
5029 && !TREE_THIS_VOLATILE (*from_p
)
5030 && !TREE_THIS_VOLATILE (*to_p
)
5031 && TREE_CODE (DECL_INITIAL (*from_p
)) == CONSTRUCTOR
)
5033 tree old_from
= *from_p
;
5034 enum gimplify_status subret
;
5036 /* Move the constructor into the RHS. */
5037 *from_p
= unshare_expr (DECL_INITIAL (*from_p
));
5039 /* Let's see if gimplify_init_constructor will need to put
5041 subret
= gimplify_init_constructor (expr_p
, NULL
, NULL
,
5043 if (subret
== GS_ERROR
)
5045 /* If so, revert the change. */
5057 /* If we have code like
5061 where the type of "x" is a (possibly cv-qualified variant
5062 of "A"), treat the entire expression as identical to "x".
5063 This kind of code arises in C++ when an object is bound
5064 to a const reference, and if "x" is a TARGET_EXPR we want
5065 to take advantage of the optimization below. */
5066 bool volatile_p
= TREE_THIS_VOLATILE (*from_p
);
5067 tree t
= gimple_fold_indirect_ref_rhs (TREE_OPERAND (*from_p
, 0));
5070 if (TREE_THIS_VOLATILE (t
) != volatile_p
)
5073 t
= build_simple_mem_ref_loc (EXPR_LOCATION (*from_p
),
5074 build_fold_addr_expr (t
));
5075 if (REFERENCE_CLASS_P (t
))
5076 TREE_THIS_VOLATILE (t
) = volatile_p
;
5087 /* If we are initializing something from a TARGET_EXPR, strip the
5088 TARGET_EXPR and initialize it directly, if possible. This can't
5089 be done if the initializer is void, since that implies that the
5090 temporary is set in some non-trivial way.
5092 ??? What about code that pulls out the temp and uses it
5093 elsewhere? I think that such code never uses the TARGET_EXPR as
5094 an initializer. If I'm wrong, we'll die because the temp won't
5095 have any RTL. In that case, I guess we'll need to replace
5096 references somehow. */
5097 tree init
= TARGET_EXPR_INITIAL (*from_p
);
5100 && !VOID_TYPE_P (TREE_TYPE (init
)))
5110 /* Remove any COMPOUND_EXPR in the RHS so the following cases will be
5112 gimplify_compound_expr (from_p
, pre_p
, true);
5118 /* If we already made some changes, let the front end have a
5119 crack at this before we break it down. */
5120 if (ret
!= GS_UNHANDLED
)
5122 /* If we're initializing from a CONSTRUCTOR, break this into
5123 individual MODIFY_EXPRs. */
5124 return gimplify_init_constructor (expr_p
, pre_p
, post_p
, want_value
,
5128 /* If we're assigning to a non-register type, push the assignment
5129 down into the branches. This is mandatory for ADDRESSABLE types,
5130 since we cannot generate temporaries for such, but it saves a
5131 copy in other cases as well. */
5132 if (!is_gimple_reg_type (TREE_TYPE (*from_p
)))
5134 /* This code should mirror the code in gimplify_cond_expr. */
5135 enum tree_code code
= TREE_CODE (*expr_p
);
5136 tree cond
= *from_p
;
5137 tree result
= *to_p
;
5139 ret
= gimplify_expr (&result
, pre_p
, post_p
,
5140 is_gimple_lvalue
, fb_lvalue
);
5141 if (ret
!= GS_ERROR
)
5144 /* If we are going to write RESULT more than once, clear
5145 TREE_READONLY flag, otherwise we might incorrectly promote
5146 the variable to static const and initialize it at compile
5147 time in one of the branches. */
5149 && TREE_TYPE (TREE_OPERAND (cond
, 1)) != void_type_node
5150 && TREE_TYPE (TREE_OPERAND (cond
, 2)) != void_type_node
)
5151 TREE_READONLY (result
) = 0;
5152 if (TREE_TYPE (TREE_OPERAND (cond
, 1)) != void_type_node
)
5153 TREE_OPERAND (cond
, 1)
5154 = build2 (code
, void_type_node
, result
,
5155 TREE_OPERAND (cond
, 1));
5156 if (TREE_TYPE (TREE_OPERAND (cond
, 2)) != void_type_node
)
5157 TREE_OPERAND (cond
, 2)
5158 = build2 (code
, void_type_node
, unshare_expr (result
),
5159 TREE_OPERAND (cond
, 2));
5161 TREE_TYPE (cond
) = void_type_node
;
5162 recalculate_side_effects (cond
);
5166 gimplify_and_add (cond
, pre_p
);
5167 *expr_p
= unshare_expr (result
);
5176 /* For calls that return in memory, give *to_p as the CALL_EXPR's
5177 return slot so that we don't generate a temporary. */
5178 if (!CALL_EXPR_RETURN_SLOT_OPT (*from_p
)
5179 && aggregate_value_p (*from_p
, *from_p
))
5183 if (!(rhs_predicate_for (*to_p
))(*from_p
))
5184 /* If we need a temporary, *to_p isn't accurate. */
5186 /* It's OK to use the return slot directly unless it's an NRV. */
5187 else if (TREE_CODE (*to_p
) == RESULT_DECL
5188 && DECL_NAME (*to_p
) == NULL_TREE
5189 && needs_to_live_in_memory (*to_p
))
5191 else if (is_gimple_reg_type (TREE_TYPE (*to_p
))
5192 || (DECL_P (*to_p
) && DECL_REGISTER (*to_p
)))
5193 /* Don't force regs into memory. */
5195 else if (TREE_CODE (*expr_p
) == INIT_EXPR
)
5196 /* It's OK to use the target directly if it's being
5199 else if (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (*to_p
)))
5201 /* Always use the target and thus RSO for variable-sized types.
5202 GIMPLE cannot deal with a variable-sized assignment
5203 embedded in a call statement. */
5205 else if (TREE_CODE (*to_p
) != SSA_NAME
5206 && (!is_gimple_variable (*to_p
)
5207 || needs_to_live_in_memory (*to_p
)))
5208 /* Don't use the original target if it's already addressable;
5209 if its address escapes, and the called function uses the
5210 NRV optimization, a conforming program could see *to_p
5211 change before the called function returns; see c++/19317.
5212 When optimizing, the return_slot pass marks more functions
5213 as safe after we have escape info. */
5220 CALL_EXPR_RETURN_SLOT_OPT (*from_p
) = 1;
5221 mark_addressable (*to_p
);
5226 case WITH_SIZE_EXPR
:
5227 /* Likewise for calls that return an aggregate of non-constant size,
5228 since we would not be able to generate a temporary at all. */
5229 if (TREE_CODE (TREE_OPERAND (*from_p
, 0)) == CALL_EXPR
)
5231 *from_p
= TREE_OPERAND (*from_p
, 0);
5232 /* We don't change ret in this case because the
5233 WITH_SIZE_EXPR might have been added in
5234 gimplify_modify_expr, so returning GS_OK would lead to an
5240 /* If we're initializing from a container, push the initialization
5242 case CLEANUP_POINT_EXPR
:
5244 case STATEMENT_LIST
:
5246 tree wrap
= *from_p
;
5249 ret
= gimplify_expr (to_p
, pre_p
, post_p
, is_gimple_min_lval
,
5251 if (ret
!= GS_ERROR
)
5254 t
= voidify_wrapper_expr (wrap
, *expr_p
);
5255 gcc_assert (t
== *expr_p
);
5259 gimplify_and_add (wrap
, pre_p
);
5260 *expr_p
= unshare_expr (*to_p
);
5267 case COMPOUND_LITERAL_EXPR
:
5269 tree complit
= TREE_OPERAND (*expr_p
, 1);
5270 tree decl_s
= COMPOUND_LITERAL_EXPR_DECL_EXPR (complit
);
5271 tree decl
= DECL_EXPR_DECL (decl_s
);
5272 tree init
= DECL_INITIAL (decl
);
5274 /* struct T x = (struct T) { 0, 1, 2 } can be optimized
5275 into struct T x = { 0, 1, 2 } if the address of the
5276 compound literal has never been taken. */
5277 if (!TREE_ADDRESSABLE (complit
)
5278 && !TREE_ADDRESSABLE (decl
)
5281 *expr_p
= copy_node (*expr_p
);
5282 TREE_OPERAND (*expr_p
, 1) = init
;
5297 /* Return true if T looks like a valid GIMPLE statement. */
5300 is_gimple_stmt (tree t
)
5302 const enum tree_code code
= TREE_CODE (t
);
5307 /* The only valid NOP_EXPR is the empty statement. */
5308 return IS_EMPTY_STMT (t
);
5312 /* These are only valid if they're void. */
5313 return TREE_TYPE (t
) == NULL
|| VOID_TYPE_P (TREE_TYPE (t
));
5319 case CASE_LABEL_EXPR
:
5320 case TRY_CATCH_EXPR
:
5321 case TRY_FINALLY_EXPR
:
5322 case EH_FILTER_EXPR
:
5325 case STATEMENT_LIST
:
5329 case OACC_HOST_DATA
:
5332 case OACC_ENTER_DATA
:
5333 case OACC_EXIT_DATA
:
5339 case OMP_DISTRIBUTE
:
5350 case OMP_TARGET_DATA
:
5351 case OMP_TARGET_UPDATE
:
5352 case OMP_TARGET_ENTER_DATA
:
5353 case OMP_TARGET_EXIT_DATA
:
5356 /* These are always void. */
5362 /* These are valid regardless of their type. */
5371 /* Promote partial stores to COMPLEX variables to total stores. *EXPR_P is
5372 a MODIFY_EXPR with a lhs of a REAL/IMAGPART_EXPR of a variable with
5373 DECL_GIMPLE_REG_P set.
5375 IMPORTANT NOTE: This promotion is performed by introducing a load of the
5376 other, unmodified part of the complex object just before the total store.
5377 As a consequence, if the object is still uninitialized, an undefined value
5378 will be loaded into a register, which may result in a spurious exception
5379 if the register is floating-point and the value happens to be a signaling
5380 NaN for example. Then the fully-fledged complex operations lowering pass
5381 followed by a DCE pass are necessary in order to fix things up. */
5383 static enum gimplify_status
5384 gimplify_modify_expr_complex_part (tree
*expr_p
, gimple_seq
*pre_p
,
5387 enum tree_code code
, ocode
;
5388 tree lhs
, rhs
, new_rhs
, other
, realpart
, imagpart
;
5390 lhs
= TREE_OPERAND (*expr_p
, 0);
5391 rhs
= TREE_OPERAND (*expr_p
, 1);
5392 code
= TREE_CODE (lhs
);
5393 lhs
= TREE_OPERAND (lhs
, 0);
5395 ocode
= code
== REALPART_EXPR
? IMAGPART_EXPR
: REALPART_EXPR
;
5396 other
= build1 (ocode
, TREE_TYPE (rhs
), lhs
);
5397 TREE_NO_WARNING (other
) = 1;
5398 other
= get_formal_tmp_var (other
, pre_p
);
5400 realpart
= code
== REALPART_EXPR
? rhs
: other
;
5401 imagpart
= code
== REALPART_EXPR
? other
: rhs
;
5403 if (TREE_CONSTANT (realpart
) && TREE_CONSTANT (imagpart
))
5404 new_rhs
= build_complex (TREE_TYPE (lhs
), realpart
, imagpart
);
5406 new_rhs
= build2 (COMPLEX_EXPR
, TREE_TYPE (lhs
), realpart
, imagpart
);
5408 gimplify_seq_add_stmt (pre_p
, gimple_build_assign (lhs
, new_rhs
));
5409 *expr_p
= (want_value
) ? rhs
: NULL_TREE
;
5414 /* Gimplify the MODIFY_EXPR node pointed to by EXPR_P.
5420 PRE_P points to the list where side effects that must happen before
5421 *EXPR_P should be stored.
5423 POST_P points to the list where side effects that must happen after
5424 *EXPR_P should be stored.
5426 WANT_VALUE is nonzero iff we want to use the value of this expression
5427 in another expression. */
5429 static enum gimplify_status
5430 gimplify_modify_expr (tree
*expr_p
, gimple_seq
*pre_p
, gimple_seq
*post_p
,
5433 tree
*from_p
= &TREE_OPERAND (*expr_p
, 1);
5434 tree
*to_p
= &TREE_OPERAND (*expr_p
, 0);
5435 enum gimplify_status ret
= GS_UNHANDLED
;
5437 location_t loc
= EXPR_LOCATION (*expr_p
);
5438 gimple_stmt_iterator gsi
;
5440 gcc_assert (TREE_CODE (*expr_p
) == MODIFY_EXPR
5441 || TREE_CODE (*expr_p
) == INIT_EXPR
);
5443 /* Trying to simplify a clobber using normal logic doesn't work,
5444 so handle it here. */
5445 if (TREE_CLOBBER_P (*from_p
))
5447 ret
= gimplify_expr (to_p
, pre_p
, post_p
, is_gimple_lvalue
, fb_lvalue
);
5448 if (ret
== GS_ERROR
)
5450 gcc_assert (!want_value
5451 && (VAR_P (*to_p
) || TREE_CODE (*to_p
) == MEM_REF
));
5452 gimplify_seq_add_stmt (pre_p
, gimple_build_assign (*to_p
, *from_p
));
5457 /* Insert pointer conversions required by the middle-end that are not
5458 required by the frontend. This fixes middle-end type checking for
5459 for example gcc.dg/redecl-6.c. */
5460 if (POINTER_TYPE_P (TREE_TYPE (*to_p
)))
5462 STRIP_USELESS_TYPE_CONVERSION (*from_p
);
5463 if (!useless_type_conversion_p (TREE_TYPE (*to_p
), TREE_TYPE (*from_p
)))
5464 *from_p
= fold_convert_loc (loc
, TREE_TYPE (*to_p
), *from_p
);
5467 /* See if any simplifications can be done based on what the RHS is. */
5468 ret
= gimplify_modify_expr_rhs (expr_p
, from_p
, to_p
, pre_p
, post_p
,
5470 if (ret
!= GS_UNHANDLED
)
5473 /* For zero sized types only gimplify the left hand side and right hand
5474 side as statements and throw away the assignment. Do this after
5475 gimplify_modify_expr_rhs so we handle TARGET_EXPRs of addressable
5477 if (zero_sized_type (TREE_TYPE (*from_p
)) && !want_value
)
5479 gimplify_stmt (from_p
, pre_p
);
5480 gimplify_stmt (to_p
, pre_p
);
5481 *expr_p
= NULL_TREE
;
5485 /* If the value being copied is of variable width, compute the length
5486 of the copy into a WITH_SIZE_EXPR. Note that we need to do this
5487 before gimplifying any of the operands so that we can resolve any
5488 PLACEHOLDER_EXPRs in the size. Also note that the RTL expander uses
5489 the size of the expression to be copied, not of the destination, so
5490 that is what we must do here. */
5491 maybe_with_size_expr (from_p
);
5493 /* As a special case, we have to temporarily allow for assignments
5494 with a CALL_EXPR on the RHS. Since in GIMPLE a function call is
5495 a toplevel statement, when gimplifying the GENERIC expression
5496 MODIFY_EXPR <a, CALL_EXPR <foo>>, we cannot create the tuple
5497 GIMPLE_ASSIGN <a, GIMPLE_CALL <foo>>.
5499 Instead, we need to create the tuple GIMPLE_CALL <a, foo>. To
5500 prevent gimplify_expr from trying to create a new temporary for
5501 foo's LHS, we tell it that it should only gimplify until it
5502 reaches the CALL_EXPR. On return from gimplify_expr, the newly
5503 created GIMPLE_CALL <foo> will be the last statement in *PRE_P
5504 and all we need to do here is set 'a' to be its LHS. */
5506 /* Gimplify the RHS first for C++17 and bug 71104. */
5507 gimple_predicate initial_pred
= initial_rhs_predicate_for (*to_p
);
5508 ret
= gimplify_expr (from_p
, pre_p
, post_p
, initial_pred
, fb_rvalue
);
5509 if (ret
== GS_ERROR
)
5512 /* Then gimplify the LHS. */
5513 /* If we gimplified the RHS to a CALL_EXPR and that call may return
5514 twice we have to make sure to gimplify into non-SSA as otherwise
5515 the abnormal edge added later will make those defs not dominate
5517 ??? Technically this applies only to the registers used in the
5518 resulting non-register *TO_P. */
5519 bool saved_into_ssa
= gimplify_ctxp
->into_ssa
;
5521 && TREE_CODE (*from_p
) == CALL_EXPR
5522 && call_expr_flags (*from_p
) & ECF_RETURNS_TWICE
)
5523 gimplify_ctxp
->into_ssa
= false;
5524 ret
= gimplify_expr (to_p
, pre_p
, post_p
, is_gimple_lvalue
, fb_lvalue
);
5525 gimplify_ctxp
->into_ssa
= saved_into_ssa
;
5526 if (ret
== GS_ERROR
)
5529 /* Now that the LHS is gimplified, re-gimplify the RHS if our initial
5530 guess for the predicate was wrong. */
5531 gimple_predicate final_pred
= rhs_predicate_for (*to_p
);
5532 if (final_pred
!= initial_pred
)
5534 ret
= gimplify_expr (from_p
, pre_p
, post_p
, final_pred
, fb_rvalue
);
5535 if (ret
== GS_ERROR
)
5539 /* In case of va_arg internal fn wrappped in a WITH_SIZE_EXPR, add the type
5540 size as argument to the call. */
5541 if (TREE_CODE (*from_p
) == WITH_SIZE_EXPR
)
5543 tree call
= TREE_OPERAND (*from_p
, 0);
5544 tree vlasize
= TREE_OPERAND (*from_p
, 1);
5546 if (TREE_CODE (call
) == CALL_EXPR
5547 && CALL_EXPR_IFN (call
) == IFN_VA_ARG
)
5549 int nargs
= call_expr_nargs (call
);
5550 tree type
= TREE_TYPE (call
);
5551 tree ap
= CALL_EXPR_ARG (call
, 0);
5552 tree tag
= CALL_EXPR_ARG (call
, 1);
5553 tree aptag
= CALL_EXPR_ARG (call
, 2);
5554 tree newcall
= build_call_expr_internal_loc (EXPR_LOCATION (call
),
5558 TREE_OPERAND (*from_p
, 0) = newcall
;
5562 /* Now see if the above changed *from_p to something we handle specially. */
5563 ret
= gimplify_modify_expr_rhs (expr_p
, from_p
, to_p
, pre_p
, post_p
,
5565 if (ret
!= GS_UNHANDLED
)
5568 /* If we've got a variable sized assignment between two lvalues (i.e. does
5569 not involve a call), then we can make things a bit more straightforward
5570 by converting the assignment to memcpy or memset. */
5571 if (TREE_CODE (*from_p
) == WITH_SIZE_EXPR
)
5573 tree from
= TREE_OPERAND (*from_p
, 0);
5574 tree size
= TREE_OPERAND (*from_p
, 1);
5576 if (TREE_CODE (from
) == CONSTRUCTOR
)
5577 return gimplify_modify_expr_to_memset (expr_p
, size
, want_value
, pre_p
);
5579 if (is_gimple_addressable (from
))
5582 return gimplify_modify_expr_to_memcpy (expr_p
, size
, want_value
,
5587 /* Transform partial stores to non-addressable complex variables into
5588 total stores. This allows us to use real instead of virtual operands
5589 for these variables, which improves optimization. */
5590 if ((TREE_CODE (*to_p
) == REALPART_EXPR
5591 || TREE_CODE (*to_p
) == IMAGPART_EXPR
)
5592 && is_gimple_reg (TREE_OPERAND (*to_p
, 0)))
5593 return gimplify_modify_expr_complex_part (expr_p
, pre_p
, want_value
);
5595 /* Try to alleviate the effects of the gimplification creating artificial
5596 temporaries (see for example is_gimple_reg_rhs) on the debug info, but
5597 make sure not to create DECL_DEBUG_EXPR links across functions. */
5598 if (!gimplify_ctxp
->into_ssa
5600 && DECL_IGNORED_P (*from_p
)
5602 && !DECL_IGNORED_P (*to_p
)
5603 && decl_function_context (*to_p
) == current_function_decl
5604 && decl_function_context (*from_p
) == current_function_decl
)
5606 if (!DECL_NAME (*from_p
) && DECL_NAME (*to_p
))
5608 = create_tmp_var_name (IDENTIFIER_POINTER (DECL_NAME (*to_p
)));
5609 DECL_HAS_DEBUG_EXPR_P (*from_p
) = 1;
5610 SET_DECL_DEBUG_EXPR (*from_p
, *to_p
);
5613 if (want_value
&& TREE_THIS_VOLATILE (*to_p
))
5614 *from_p
= get_initialized_tmp_var (*from_p
, pre_p
, post_p
);
5616 if (TREE_CODE (*from_p
) == CALL_EXPR
)
5618 /* Since the RHS is a CALL_EXPR, we need to create a GIMPLE_CALL
5619 instead of a GIMPLE_ASSIGN. */
5621 if (CALL_EXPR_FN (*from_p
) == NULL_TREE
)
5623 /* Gimplify internal functions created in the FEs. */
5624 int nargs
= call_expr_nargs (*from_p
), i
;
5625 enum internal_fn ifn
= CALL_EXPR_IFN (*from_p
);
5626 auto_vec
<tree
> vargs (nargs
);
5628 for (i
= 0; i
< nargs
; i
++)
5630 gimplify_arg (&CALL_EXPR_ARG (*from_p
, i
), pre_p
,
5631 EXPR_LOCATION (*from_p
));
5632 vargs
.quick_push (CALL_EXPR_ARG (*from_p
, i
));
5634 call_stmt
= gimple_build_call_internal_vec (ifn
, vargs
);
5635 gimple_set_location (call_stmt
, EXPR_LOCATION (*expr_p
));
5639 tree fnptrtype
= TREE_TYPE (CALL_EXPR_FN (*from_p
));
5640 CALL_EXPR_FN (*from_p
) = TREE_OPERAND (CALL_EXPR_FN (*from_p
), 0);
5641 STRIP_USELESS_TYPE_CONVERSION (CALL_EXPR_FN (*from_p
));
5642 tree fndecl
= get_callee_fndecl (*from_p
);
5644 && DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_NORMAL
5645 && DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_EXPECT
5646 && call_expr_nargs (*from_p
) == 3)
5647 call_stmt
= gimple_build_call_internal (IFN_BUILTIN_EXPECT
, 3,
5648 CALL_EXPR_ARG (*from_p
, 0),
5649 CALL_EXPR_ARG (*from_p
, 1),
5650 CALL_EXPR_ARG (*from_p
, 2));
5653 call_stmt
= gimple_build_call_from_tree (*from_p
);
5654 gimple_call_set_fntype (call_stmt
, TREE_TYPE (fnptrtype
));
5657 notice_special_calls (call_stmt
);
5658 if (!gimple_call_noreturn_p (call_stmt
) || !should_remove_lhs_p (*to_p
))
5659 gimple_call_set_lhs (call_stmt
, *to_p
);
5660 else if (TREE_CODE (*to_p
) == SSA_NAME
)
5661 /* The above is somewhat premature, avoid ICEing later for a
5662 SSA name w/o a definition. We may have uses in the GIMPLE IL.
5663 ??? This doesn't make it a default-def. */
5664 SSA_NAME_DEF_STMT (*to_p
) = gimple_build_nop ();
5666 if (EXPR_CILK_SPAWN (*from_p
))
5667 gimplify_cilk_detach (pre_p
);
5672 assign
= gimple_build_assign (*to_p
, *from_p
);
5673 gimple_set_location (assign
, EXPR_LOCATION (*expr_p
));
5674 if (COMPARISON_CLASS_P (*from_p
))
5675 gimple_set_no_warning (assign
, TREE_NO_WARNING (*from_p
));
5678 if (gimplify_ctxp
->into_ssa
&& is_gimple_reg (*to_p
))
5680 /* We should have got an SSA name from the start. */
5681 gcc_assert (TREE_CODE (*to_p
) == SSA_NAME
5682 || ! gimple_in_ssa_p (cfun
));
5685 gimplify_seq_add_stmt (pre_p
, assign
);
5686 gsi
= gsi_last (*pre_p
);
5687 maybe_fold_stmt (&gsi
);
5691 *expr_p
= TREE_THIS_VOLATILE (*to_p
) ? *from_p
: unshare_expr (*to_p
);
5700 /* Gimplify a comparison between two variable-sized objects. Do this
5701 with a call to BUILT_IN_MEMCMP. */
5703 static enum gimplify_status
5704 gimplify_variable_sized_compare (tree
*expr_p
)
5706 location_t loc
= EXPR_LOCATION (*expr_p
);
5707 tree op0
= TREE_OPERAND (*expr_p
, 0);
5708 tree op1
= TREE_OPERAND (*expr_p
, 1);
5709 tree t
, arg
, dest
, src
, expr
;
5711 arg
= TYPE_SIZE_UNIT (TREE_TYPE (op0
));
5712 arg
= unshare_expr (arg
);
5713 arg
= SUBSTITUTE_PLACEHOLDER_IN_EXPR (arg
, op0
);
5714 src
= build_fold_addr_expr_loc (loc
, op1
);
5715 dest
= build_fold_addr_expr_loc (loc
, op0
);
5716 t
= builtin_decl_implicit (BUILT_IN_MEMCMP
);
5717 t
= build_call_expr_loc (loc
, t
, 3, dest
, src
, arg
);
5720 = build2 (TREE_CODE (*expr_p
), TREE_TYPE (*expr_p
), t
, integer_zero_node
);
5721 SET_EXPR_LOCATION (expr
, loc
);
5727 /* Gimplify a comparison between two aggregate objects of integral scalar
5728 mode as a comparison between the bitwise equivalent scalar values. */
5730 static enum gimplify_status
5731 gimplify_scalar_mode_aggregate_compare (tree
*expr_p
)
5733 location_t loc
= EXPR_LOCATION (*expr_p
);
5734 tree op0
= TREE_OPERAND (*expr_p
, 0);
5735 tree op1
= TREE_OPERAND (*expr_p
, 1);
5737 tree type
= TREE_TYPE (op0
);
5738 tree scalar_type
= lang_hooks
.types
.type_for_mode (TYPE_MODE (type
), 1);
5740 op0
= fold_build1_loc (loc
, VIEW_CONVERT_EXPR
, scalar_type
, op0
);
5741 op1
= fold_build1_loc (loc
, VIEW_CONVERT_EXPR
, scalar_type
, op1
);
5744 = fold_build2_loc (loc
, TREE_CODE (*expr_p
), TREE_TYPE (*expr_p
), op0
, op1
);
5749 /* Gimplify an expression sequence. This function gimplifies each
5750 expression and rewrites the original expression with the last
5751 expression of the sequence in GIMPLE form.
5753 PRE_P points to the list where the side effects for all the
5754 expressions in the sequence will be emitted.
5756 WANT_VALUE is true when the result of the last COMPOUND_EXPR is used. */
5758 static enum gimplify_status
5759 gimplify_compound_expr (tree
*expr_p
, gimple_seq
*pre_p
, bool want_value
)
5765 tree
*sub_p
= &TREE_OPERAND (t
, 0);
5767 if (TREE_CODE (*sub_p
) == COMPOUND_EXPR
)
5768 gimplify_compound_expr (sub_p
, pre_p
, false);
5770 gimplify_stmt (sub_p
, pre_p
);
5772 t
= TREE_OPERAND (t
, 1);
5774 while (TREE_CODE (t
) == COMPOUND_EXPR
);
5781 gimplify_stmt (expr_p
, pre_p
);
5786 /* Gimplify a SAVE_EXPR node. EXPR_P points to the expression to
5787 gimplify. After gimplification, EXPR_P will point to a new temporary
5788 that holds the original value of the SAVE_EXPR node.
5790 PRE_P points to the list where side effects that must happen before
5791 *EXPR_P should be stored. */
5793 static enum gimplify_status
5794 gimplify_save_expr (tree
*expr_p
, gimple_seq
*pre_p
, gimple_seq
*post_p
)
5796 enum gimplify_status ret
= GS_ALL_DONE
;
5799 gcc_assert (TREE_CODE (*expr_p
) == SAVE_EXPR
);
5800 val
= TREE_OPERAND (*expr_p
, 0);
5802 /* If the SAVE_EXPR has not been resolved, then evaluate it once. */
5803 if (!SAVE_EXPR_RESOLVED_P (*expr_p
))
5805 gcc_assert (TREE_TYPE (val
) != void_type_node
);
5806 /* The temporary may not be an SSA name as later abnormal and EH
5807 control flow may invalidate use/def domination. */
5808 val
= get_initialized_tmp_var (val
, pre_p
, post_p
, false);
5810 TREE_OPERAND (*expr_p
, 0) = val
;
5811 SAVE_EXPR_RESOLVED_P (*expr_p
) = 1;
5819 /* Rewrite the ADDR_EXPR node pointed to by EXPR_P
5826 PRE_P points to the list where side effects that must happen before
5827 *EXPR_P should be stored.
5829 POST_P points to the list where side effects that must happen after
5830 *EXPR_P should be stored. */
5832 static enum gimplify_status
5833 gimplify_addr_expr (tree
*expr_p
, gimple_seq
*pre_p
, gimple_seq
*post_p
)
5835 tree expr
= *expr_p
;
5836 tree op0
= TREE_OPERAND (expr
, 0);
5837 enum gimplify_status ret
;
5838 location_t loc
= EXPR_LOCATION (*expr_p
);
5840 switch (TREE_CODE (op0
))
5844 /* Check if we are dealing with an expression of the form '&*ptr'.
5845 While the front end folds away '&*ptr' into 'ptr', these
5846 expressions may be generated internally by the compiler (e.g.,
5847 builtins like __builtin_va_end). */
5848 /* Caution: the silent array decomposition semantics we allow for
5849 ADDR_EXPR means we can't always discard the pair. */
5850 /* Gimplification of the ADDR_EXPR operand may drop
5851 cv-qualification conversions, so make sure we add them if
5854 tree op00
= TREE_OPERAND (op0
, 0);
5855 tree t_expr
= TREE_TYPE (expr
);
5856 tree t_op00
= TREE_TYPE (op00
);
5858 if (!useless_type_conversion_p (t_expr
, t_op00
))
5859 op00
= fold_convert_loc (loc
, TREE_TYPE (expr
), op00
);
5865 case VIEW_CONVERT_EXPR
:
5866 /* Take the address of our operand and then convert it to the type of
5869 ??? The interactions of VIEW_CONVERT_EXPR and aliasing is not at
5870 all clear. The impact of this transformation is even less clear. */
5872 /* If the operand is a useless conversion, look through it. Doing so
5873 guarantees that the ADDR_EXPR and its operand will remain of the
5875 if (tree_ssa_useless_type_conversion (TREE_OPERAND (op0
, 0)))
5876 op0
= TREE_OPERAND (op0
, 0);
5878 *expr_p
= fold_convert_loc (loc
, TREE_TYPE (expr
),
5879 build_fold_addr_expr_loc (loc
,
5880 TREE_OPERAND (op0
, 0)));
5885 if (integer_zerop (TREE_OPERAND (op0
, 1)))
5886 goto do_indirect_ref
;
5891 /* If we see a call to a declared builtin or see its address
5892 being taken (we can unify those cases here) then we can mark
5893 the builtin for implicit generation by GCC. */
5894 if (TREE_CODE (op0
) == FUNCTION_DECL
5895 && DECL_BUILT_IN_CLASS (op0
) == BUILT_IN_NORMAL
5896 && builtin_decl_declared_p (DECL_FUNCTION_CODE (op0
)))
5897 set_builtin_decl_implicit_p (DECL_FUNCTION_CODE (op0
), true);
5899 /* We use fb_either here because the C frontend sometimes takes
5900 the address of a call that returns a struct; see
5901 gcc.dg/c99-array-lval-1.c. The gimplifier will correctly make
5902 the implied temporary explicit. */
5904 /* Make the operand addressable. */
5905 ret
= gimplify_expr (&TREE_OPERAND (expr
, 0), pre_p
, post_p
,
5906 is_gimple_addressable
, fb_either
);
5907 if (ret
== GS_ERROR
)
5910 /* Then mark it. Beware that it may not be possible to do so directly
5911 if a temporary has been created by the gimplification. */
5912 prepare_gimple_addressable (&TREE_OPERAND (expr
, 0), pre_p
);
5914 op0
= TREE_OPERAND (expr
, 0);
5916 /* For various reasons, the gimplification of the expression
5917 may have made a new INDIRECT_REF. */
5918 if (TREE_CODE (op0
) == INDIRECT_REF
)
5919 goto do_indirect_ref
;
5921 mark_addressable (TREE_OPERAND (expr
, 0));
5923 /* The FEs may end up building ADDR_EXPRs early on a decl with
5924 an incomplete type. Re-build ADDR_EXPRs in canonical form
5926 if (!types_compatible_p (TREE_TYPE (op0
), TREE_TYPE (TREE_TYPE (expr
))))
5927 *expr_p
= build_fold_addr_expr (op0
);
5929 /* Make sure TREE_CONSTANT and TREE_SIDE_EFFECTS are set properly. */
5930 recompute_tree_invariant_for_addr_expr (*expr_p
);
5932 /* If we re-built the ADDR_EXPR add a conversion to the original type
5934 if (!useless_type_conversion_p (TREE_TYPE (expr
), TREE_TYPE (*expr_p
)))
5935 *expr_p
= fold_convert (TREE_TYPE (expr
), *expr_p
);
5943 /* Gimplify the operands of an ASM_EXPR. Input operands should be a gimple
5944 value; output operands should be a gimple lvalue. */
5946 static enum gimplify_status
5947 gimplify_asm_expr (tree
*expr_p
, gimple_seq
*pre_p
, gimple_seq
*post_p
)
5951 const char **oconstraints
;
5954 const char *constraint
;
5955 bool allows_mem
, allows_reg
, is_inout
;
5956 enum gimplify_status ret
, tret
;
5958 vec
<tree
, va_gc
> *inputs
;
5959 vec
<tree
, va_gc
> *outputs
;
5960 vec
<tree
, va_gc
> *clobbers
;
5961 vec
<tree
, va_gc
> *labels
;
5965 noutputs
= list_length (ASM_OUTPUTS (expr
));
5966 oconstraints
= (const char **) alloca ((noutputs
) * sizeof (const char *));
5974 link_next
= NULL_TREE
;
5975 for (i
= 0, link
= ASM_OUTPUTS (expr
); link
; ++i
, link
= link_next
)
5978 size_t constraint_len
;
5980 link_next
= TREE_CHAIN (link
);
5984 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link
)));
5985 constraint_len
= strlen (constraint
);
5986 if (constraint_len
== 0)
5989 ok
= parse_output_constraint (&constraint
, i
, 0, 0,
5990 &allows_mem
, &allows_reg
, &is_inout
);
5997 if (!allows_reg
&& allows_mem
)
5998 mark_addressable (TREE_VALUE (link
));
6000 tret
= gimplify_expr (&TREE_VALUE (link
), pre_p
, post_p
,
6001 is_inout
? is_gimple_min_lval
: is_gimple_lvalue
,
6002 fb_lvalue
| fb_mayfail
);
6003 if (tret
== GS_ERROR
)
6005 error ("invalid lvalue in asm output %d", i
);
6009 /* If the constraint does not allow memory make sure we gimplify
6010 it to a register if it is not already but its base is. This
6011 happens for complex and vector components. */
6014 tree op
= TREE_VALUE (link
);
6015 if (! is_gimple_val (op
)
6016 && is_gimple_reg_type (TREE_TYPE (op
))
6017 && is_gimple_reg (get_base_address (op
)))
6019 tree tem
= create_tmp_reg (TREE_TYPE (op
));
6023 ass
= build2 (MODIFY_EXPR
, TREE_TYPE (tem
),
6024 tem
, unshare_expr (op
));
6025 gimplify_and_add (ass
, pre_p
);
6027 ass
= build2 (MODIFY_EXPR
, TREE_TYPE (tem
), op
, tem
);
6028 gimplify_and_add (ass
, post_p
);
6030 TREE_VALUE (link
) = tem
;
6035 vec_safe_push (outputs
, link
);
6036 TREE_CHAIN (link
) = NULL_TREE
;
6040 /* An input/output operand. To give the optimizers more
6041 flexibility, split it into separate input and output
6044 /* Buffer big enough to format a 32-bit UINT_MAX into. */
6047 /* Turn the in/out constraint into an output constraint. */
6048 char *p
= xstrdup (constraint
);
6050 TREE_VALUE (TREE_PURPOSE (link
)) = build_string (constraint_len
, p
);
6052 /* And add a matching input constraint. */
6055 sprintf (buf
, "%u", i
);
6057 /* If there are multiple alternatives in the constraint,
6058 handle each of them individually. Those that allow register
6059 will be replaced with operand number, the others will stay
6061 if (strchr (p
, ',') != NULL
)
6063 size_t len
= 0, buflen
= strlen (buf
);
6064 char *beg
, *end
, *str
, *dst
;
6068 end
= strchr (beg
, ',');
6070 end
= strchr (beg
, '\0');
6071 if ((size_t) (end
- beg
) < buflen
)
6074 len
+= end
- beg
+ 1;
6081 str
= (char *) alloca (len
);
6082 for (beg
= p
+ 1, dst
= str
;;)
6085 bool mem_p
, reg_p
, inout_p
;
6087 end
= strchr (beg
, ',');
6092 parse_output_constraint (&tem
, i
, 0, 0,
6093 &mem_p
, ®_p
, &inout_p
);
6098 memcpy (dst
, buf
, buflen
);
6107 memcpy (dst
, beg
, len
);
6116 input
= build_string (dst
- str
, str
);
6119 input
= build_string (strlen (buf
), buf
);
6122 input
= build_string (constraint_len
- 1, constraint
+ 1);
6126 input
= build_tree_list (build_tree_list (NULL_TREE
, input
),
6127 unshare_expr (TREE_VALUE (link
)));
6128 ASM_INPUTS (expr
) = chainon (ASM_INPUTS (expr
), input
);
6132 link_next
= NULL_TREE
;
6133 for (link
= ASM_INPUTS (expr
); link
; ++i
, link
= link_next
)
6135 link_next
= TREE_CHAIN (link
);
6136 constraint
= TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link
)));
6137 parse_input_constraint (&constraint
, 0, 0, noutputs
, 0,
6138 oconstraints
, &allows_mem
, &allows_reg
);
6140 /* If we can't make copies, we can only accept memory. */
6141 if (TREE_ADDRESSABLE (TREE_TYPE (TREE_VALUE (link
))))
6147 error ("impossible constraint in %<asm%>");
6148 error ("non-memory input %d must stay in memory", i
);
6153 /* If the operand is a memory input, it should be an lvalue. */
6154 if (!allows_reg
&& allows_mem
)
6156 tree inputv
= TREE_VALUE (link
);
6157 STRIP_NOPS (inputv
);
6158 if (TREE_CODE (inputv
) == PREDECREMENT_EXPR
6159 || TREE_CODE (inputv
) == PREINCREMENT_EXPR
6160 || TREE_CODE (inputv
) == POSTDECREMENT_EXPR
6161 || TREE_CODE (inputv
) == POSTINCREMENT_EXPR
6162 || TREE_CODE (inputv
) == MODIFY_EXPR
)
6163 TREE_VALUE (link
) = error_mark_node
;
6164 tret
= gimplify_expr (&TREE_VALUE (link
), pre_p
, post_p
,
6165 is_gimple_lvalue
, fb_lvalue
| fb_mayfail
);
6166 if (tret
!= GS_ERROR
)
6168 /* Unlike output operands, memory inputs are not guaranteed
6169 to be lvalues by the FE, and while the expressions are
6170 marked addressable there, if it is e.g. a statement
6171 expression, temporaries in it might not end up being
6172 addressable. They might be already used in the IL and thus
6173 it is too late to make them addressable now though. */
6174 tree x
= TREE_VALUE (link
);
6175 while (handled_component_p (x
))
6176 x
= TREE_OPERAND (x
, 0);
6177 if (TREE_CODE (x
) == MEM_REF
6178 && TREE_CODE (TREE_OPERAND (x
, 0)) == ADDR_EXPR
)
6179 x
= TREE_OPERAND (TREE_OPERAND (x
, 0), 0);
6181 || TREE_CODE (x
) == PARM_DECL
6182 || TREE_CODE (x
) == RESULT_DECL
)
6183 && !TREE_ADDRESSABLE (x
)
6184 && is_gimple_reg (x
))
6186 warning_at (EXPR_LOC_OR_LOC (TREE_VALUE (link
),
6188 "memory input %d is not directly addressable",
6190 prepare_gimple_addressable (&TREE_VALUE (link
), pre_p
);
6193 mark_addressable (TREE_VALUE (link
));
6194 if (tret
== GS_ERROR
)
6196 error_at (EXPR_LOC_OR_LOC (TREE_VALUE (link
), input_location
),
6197 "memory input %d is not directly addressable", i
);
6203 tret
= gimplify_expr (&TREE_VALUE (link
), pre_p
, post_p
,
6204 is_gimple_asm_val
, fb_rvalue
);
6205 if (tret
== GS_ERROR
)
6209 TREE_CHAIN (link
) = NULL_TREE
;
6210 vec_safe_push (inputs
, link
);
6213 link_next
= NULL_TREE
;
6214 for (link
= ASM_CLOBBERS (expr
); link
; ++i
, link
= link_next
)
6216 link_next
= TREE_CHAIN (link
);
6217 TREE_CHAIN (link
) = NULL_TREE
;
6218 vec_safe_push (clobbers
, link
);
6221 link_next
= NULL_TREE
;
6222 for (link
= ASM_LABELS (expr
); link
; ++i
, link
= link_next
)
6224 link_next
= TREE_CHAIN (link
);
6225 TREE_CHAIN (link
) = NULL_TREE
;
6226 vec_safe_push (labels
, link
);
6229 /* Do not add ASMs with errors to the gimple IL stream. */
6230 if (ret
!= GS_ERROR
)
6232 stmt
= gimple_build_asm_vec (TREE_STRING_POINTER (ASM_STRING (expr
)),
6233 inputs
, outputs
, clobbers
, labels
);
6235 gimple_asm_set_volatile (stmt
, ASM_VOLATILE_P (expr
) || noutputs
== 0);
6236 gimple_asm_set_input (stmt
, ASM_INPUT_P (expr
));
6238 gimplify_seq_add_stmt (pre_p
, stmt
);
6244 /* Gimplify a CLEANUP_POINT_EXPR. Currently this works by adding
6245 GIMPLE_WITH_CLEANUP_EXPRs to the prequeue as we encounter cleanups while
6246 gimplifying the body, and converting them to TRY_FINALLY_EXPRs when we
6247 return to this function.
6249 FIXME should we complexify the prequeue handling instead? Or use flags
6250 for all the cleanups and let the optimizer tighten them up? The current
6251 code seems pretty fragile; it will break on a cleanup within any
6252 non-conditional nesting. But any such nesting would be broken, anyway;
6253 we can't write a TRY_FINALLY_EXPR that starts inside a nesting construct
6254 and continues out of it. We can do that at the RTL level, though, so
6255 having an optimizer to tighten up try/finally regions would be a Good
6258 static enum gimplify_status
6259 gimplify_cleanup_point_expr (tree
*expr_p
, gimple_seq
*pre_p
)
6261 gimple_stmt_iterator iter
;
6262 gimple_seq body_sequence
= NULL
;
6264 tree temp
= voidify_wrapper_expr (*expr_p
, NULL
);
6266 /* We only care about the number of conditions between the innermost
6267 CLEANUP_POINT_EXPR and the cleanup. So save and reset the count and
6268 any cleanups collected outside the CLEANUP_POINT_EXPR. */
6269 int old_conds
= gimplify_ctxp
->conditions
;
6270 gimple_seq old_cleanups
= gimplify_ctxp
->conditional_cleanups
;
6271 bool old_in_cleanup_point_expr
= gimplify_ctxp
->in_cleanup_point_expr
;
6272 gimplify_ctxp
->conditions
= 0;
6273 gimplify_ctxp
->conditional_cleanups
= NULL
;
6274 gimplify_ctxp
->in_cleanup_point_expr
= true;
6276 gimplify_stmt (&TREE_OPERAND (*expr_p
, 0), &body_sequence
);
6278 gimplify_ctxp
->conditions
= old_conds
;
6279 gimplify_ctxp
->conditional_cleanups
= old_cleanups
;
6280 gimplify_ctxp
->in_cleanup_point_expr
= old_in_cleanup_point_expr
;
6282 for (iter
= gsi_start (body_sequence
); !gsi_end_p (iter
); )
6284 gimple
*wce
= gsi_stmt (iter
);
6286 if (gimple_code (wce
) == GIMPLE_WITH_CLEANUP_EXPR
)
6288 if (gsi_one_before_end_p (iter
))
6290 /* Note that gsi_insert_seq_before and gsi_remove do not
6291 scan operands, unlike some other sequence mutators. */
6292 if (!gimple_wce_cleanup_eh_only (wce
))
6293 gsi_insert_seq_before_without_update (&iter
,
6294 gimple_wce_cleanup (wce
),
6296 gsi_remove (&iter
, true);
6303 enum gimple_try_flags kind
;
6305 if (gimple_wce_cleanup_eh_only (wce
))
6306 kind
= GIMPLE_TRY_CATCH
;
6308 kind
= GIMPLE_TRY_FINALLY
;
6309 seq
= gsi_split_seq_after (iter
);
6311 gtry
= gimple_build_try (seq
, gimple_wce_cleanup (wce
), kind
);
6312 /* Do not use gsi_replace here, as it may scan operands.
6313 We want to do a simple structural modification only. */
6314 gsi_set_stmt (&iter
, gtry
);
6315 iter
= gsi_start (gtry
->eval
);
6322 gimplify_seq_add_seq (pre_p
, body_sequence
);
6335 /* Insert a cleanup marker for gimplify_cleanup_point_expr. CLEANUP
6336 is the cleanup action required. EH_ONLY is true if the cleanup should
6337 only be executed if an exception is thrown, not on normal exit.
6338 If FORCE_UNCOND is true perform the cleanup unconditionally; this is
6339 only valid for clobbers. */
6342 gimple_push_cleanup (tree var
, tree cleanup
, bool eh_only
, gimple_seq
*pre_p
,
6343 bool force_uncond
= false)
6346 gimple_seq cleanup_stmts
= NULL
;
6348 /* Errors can result in improperly nested cleanups. Which results in
6349 confusion when trying to resolve the GIMPLE_WITH_CLEANUP_EXPR. */
6353 if (gimple_conditional_context ())
6355 /* If we're in a conditional context, this is more complex. We only
6356 want to run the cleanup if we actually ran the initialization that
6357 necessitates it, but we want to run it after the end of the
6358 conditional context. So we wrap the try/finally around the
6359 condition and use a flag to determine whether or not to actually
6360 run the destructor. Thus
6364 becomes (approximately)
6368 if (test) { A::A(temp); flag = 1; val = f(temp); }
6371 if (flag) A::~A(temp);
6377 gimplify_stmt (&cleanup
, &cleanup_stmts
);
6378 wce
= gimple_build_wce (cleanup_stmts
);
6379 gimplify_seq_add_stmt (&gimplify_ctxp
->conditional_cleanups
, wce
);
6383 tree flag
= create_tmp_var (boolean_type_node
, "cleanup");
6384 gassign
*ffalse
= gimple_build_assign (flag
, boolean_false_node
);
6385 gassign
*ftrue
= gimple_build_assign (flag
, boolean_true_node
);
6387 cleanup
= build3 (COND_EXPR
, void_type_node
, flag
, cleanup
, NULL
);
6388 gimplify_stmt (&cleanup
, &cleanup_stmts
);
6389 wce
= gimple_build_wce (cleanup_stmts
);
6391 gimplify_seq_add_stmt (&gimplify_ctxp
->conditional_cleanups
, ffalse
);
6392 gimplify_seq_add_stmt (&gimplify_ctxp
->conditional_cleanups
, wce
);
6393 gimplify_seq_add_stmt (pre_p
, ftrue
);
6395 /* Because of this manipulation, and the EH edges that jump
6396 threading cannot redirect, the temporary (VAR) will appear
6397 to be used uninitialized. Don't warn. */
6398 TREE_NO_WARNING (var
) = 1;
6403 gimplify_stmt (&cleanup
, &cleanup_stmts
);
6404 wce
= gimple_build_wce (cleanup_stmts
);
6405 gimple_wce_set_cleanup_eh_only (wce
, eh_only
);
6406 gimplify_seq_add_stmt (pre_p
, wce
);
6410 /* Gimplify a TARGET_EXPR which doesn't appear on the rhs of an INIT_EXPR. */
6412 static enum gimplify_status
6413 gimplify_target_expr (tree
*expr_p
, gimple_seq
*pre_p
, gimple_seq
*post_p
)
6415 tree targ
= *expr_p
;
6416 tree temp
= TARGET_EXPR_SLOT (targ
);
6417 tree init
= TARGET_EXPR_INITIAL (targ
);
6418 enum gimplify_status ret
;
6420 bool unpoison_empty_seq
= false;
6421 gimple_stmt_iterator unpoison_it
;
6425 tree cleanup
= NULL_TREE
;
6427 /* TARGET_EXPR temps aren't part of the enclosing block, so add it
6428 to the temps list. Handle also variable length TARGET_EXPRs. */
6429 if (TREE_CODE (DECL_SIZE (temp
)) != INTEGER_CST
)
6431 if (!TYPE_SIZES_GIMPLIFIED (TREE_TYPE (temp
)))
6432 gimplify_type_sizes (TREE_TYPE (temp
), pre_p
);
6433 gimplify_vla_decl (temp
, pre_p
);
6437 /* Save location where we need to place unpoisoning. It's possible
6438 that a variable will be converted to needs_to_live_in_memory. */
6439 unpoison_it
= gsi_last (*pre_p
);
6440 unpoison_empty_seq
= gsi_end_p (unpoison_it
);
6442 gimple_add_tmp_var (temp
);
6445 /* If TARGET_EXPR_INITIAL is void, then the mere evaluation of the
6446 expression is supposed to initialize the slot. */
6447 if (VOID_TYPE_P (TREE_TYPE (init
)))
6448 ret
= gimplify_expr (&init
, pre_p
, post_p
, is_gimple_stmt
, fb_none
);
6451 tree init_expr
= build2 (INIT_EXPR
, void_type_node
, temp
, init
);
6453 ret
= gimplify_expr (&init
, pre_p
, post_p
, is_gimple_stmt
, fb_none
);
6455 ggc_free (init_expr
);
6457 if (ret
== GS_ERROR
)
6459 /* PR c++/28266 Make sure this is expanded only once. */
6460 TARGET_EXPR_INITIAL (targ
) = NULL_TREE
;
6464 gimplify_and_add (init
, pre_p
);
6466 /* If needed, push the cleanup for the temp. */
6467 if (TARGET_EXPR_CLEANUP (targ
))
6469 if (CLEANUP_EH_ONLY (targ
))
6470 gimple_push_cleanup (temp
, TARGET_EXPR_CLEANUP (targ
),
6471 CLEANUP_EH_ONLY (targ
), pre_p
);
6473 cleanup
= TARGET_EXPR_CLEANUP (targ
);
6476 /* Add a clobber for the temporary going out of scope, like
6477 gimplify_bind_expr. */
6478 if (gimplify_ctxp
->in_cleanup_point_expr
6479 && needs_to_live_in_memory (temp
))
6481 if (flag_stack_reuse
== SR_ALL
)
6483 tree clobber
= build_constructor (TREE_TYPE (temp
),
6485 TREE_THIS_VOLATILE (clobber
) = true;
6486 clobber
= build2 (MODIFY_EXPR
, TREE_TYPE (temp
), temp
, clobber
);
6487 gimple_push_cleanup (temp
, clobber
, false, pre_p
, true);
6489 if (asan_poisoned_variables
&& dbg_cnt (asan_use_after_scope
))
6491 tree asan_cleanup
= build_asan_poison_call_expr (temp
);
6494 if (unpoison_empty_seq
)
6495 unpoison_it
= gsi_start (*pre_p
);
6497 asan_poison_variable (temp
, false, &unpoison_it
,
6498 unpoison_empty_seq
);
6499 gimple_push_cleanup (temp
, asan_cleanup
, false, pre_p
);
6504 gimple_push_cleanup (temp
, cleanup
, false, pre_p
);
6506 /* Only expand this once. */
6507 TREE_OPERAND (targ
, 3) = init
;
6508 TARGET_EXPR_INITIAL (targ
) = NULL_TREE
;
6511 /* We should have expanded this before. */
6512 gcc_assert (DECL_SEEN_IN_BIND_EXPR_P (temp
));
6518 /* Gimplification of expression trees. */
6520 /* Gimplify an expression which appears at statement context. The
6521 corresponding GIMPLE statements are added to *SEQ_P. If *SEQ_P is
6522 NULL, a new sequence is allocated.
6524 Return true if we actually added a statement to the queue. */
6527 gimplify_stmt (tree
*stmt_p
, gimple_seq
*seq_p
)
6529 gimple_seq_node last
;
6531 last
= gimple_seq_last (*seq_p
);
6532 gimplify_expr (stmt_p
, seq_p
, NULL
, is_gimple_stmt
, fb_none
);
6533 return last
!= gimple_seq_last (*seq_p
);
6536 /* Add FIRSTPRIVATE entries for DECL in the OpenMP the surrounding parallels
6537 to CTX. If entries already exist, force them to be some flavor of private.
6538 If there is no enclosing parallel, do nothing. */
6541 omp_firstprivatize_variable (struct gimplify_omp_ctx
*ctx
, tree decl
)
6545 if (decl
== NULL
|| !DECL_P (decl
) || ctx
->region_type
== ORT_NONE
)
6550 n
= splay_tree_lookup (ctx
->variables
, (splay_tree_key
)decl
);
6553 if (n
->value
& GOVD_SHARED
)
6554 n
->value
= GOVD_FIRSTPRIVATE
| (n
->value
& GOVD_SEEN
);
6555 else if (n
->value
& GOVD_MAP
)
6556 n
->value
|= GOVD_MAP_TO_ONLY
;
6560 else if ((ctx
->region_type
& ORT_TARGET
) != 0)
6562 if (ctx
->target_map_scalars_firstprivate
)
6563 omp_add_variable (ctx
, decl
, GOVD_FIRSTPRIVATE
);
6565 omp_add_variable (ctx
, decl
, GOVD_MAP
| GOVD_MAP_TO_ONLY
);
6567 else if (ctx
->region_type
!= ORT_WORKSHARE
6568 && ctx
->region_type
!= ORT_SIMD
6569 && ctx
->region_type
!= ORT_ACC
6570 && !(ctx
->region_type
& ORT_TARGET_DATA
))
6571 omp_add_variable (ctx
, decl
, GOVD_FIRSTPRIVATE
);
6573 ctx
= ctx
->outer_context
;
6578 /* Similarly for each of the type sizes of TYPE. */
6581 omp_firstprivatize_type_sizes (struct gimplify_omp_ctx
*ctx
, tree type
)
6583 if (type
== NULL
|| type
== error_mark_node
)
6585 type
= TYPE_MAIN_VARIANT (type
);
6587 if (ctx
->privatized_types
->add (type
))
6590 switch (TREE_CODE (type
))
6596 case FIXED_POINT_TYPE
:
6597 omp_firstprivatize_variable (ctx
, TYPE_MIN_VALUE (type
));
6598 omp_firstprivatize_variable (ctx
, TYPE_MAX_VALUE (type
));
6602 omp_firstprivatize_type_sizes (ctx
, TREE_TYPE (type
));
6603 omp_firstprivatize_type_sizes (ctx
, TYPE_DOMAIN (type
));
6608 case QUAL_UNION_TYPE
:
6611 for (field
= TYPE_FIELDS (type
); field
; field
= DECL_CHAIN (field
))
6612 if (TREE_CODE (field
) == FIELD_DECL
)
6614 omp_firstprivatize_variable (ctx
, DECL_FIELD_OFFSET (field
));
6615 omp_firstprivatize_type_sizes (ctx
, TREE_TYPE (field
));
6621 case REFERENCE_TYPE
:
6622 omp_firstprivatize_type_sizes (ctx
, TREE_TYPE (type
));
6629 omp_firstprivatize_variable (ctx
, TYPE_SIZE (type
));
6630 omp_firstprivatize_variable (ctx
, TYPE_SIZE_UNIT (type
));
6631 lang_hooks
.types
.omp_firstprivatize_type_sizes (ctx
, type
);
6634 /* Add an entry for DECL in the OMP context CTX with FLAGS. */
6637 omp_add_variable (struct gimplify_omp_ctx
*ctx
, tree decl
, unsigned int flags
)
6640 unsigned int nflags
;
6643 if (error_operand_p (decl
) || ctx
->region_type
== ORT_NONE
)
6646 /* Never elide decls whose type has TREE_ADDRESSABLE set. This means
6647 there are constructors involved somewhere. Exception is a shared clause,
6648 there is nothing privatized in that case. */
6649 if ((flags
& GOVD_SHARED
) == 0
6650 && (TREE_ADDRESSABLE (TREE_TYPE (decl
))
6651 || TYPE_NEEDS_CONSTRUCTING (TREE_TYPE (decl
))))
6654 n
= splay_tree_lookup (ctx
->variables
, (splay_tree_key
)decl
);
6655 if (n
!= NULL
&& (n
->value
& GOVD_DATA_SHARE_CLASS
) != 0)
6657 /* We shouldn't be re-adding the decl with the same data
6659 gcc_assert ((n
->value
& GOVD_DATA_SHARE_CLASS
& flags
) == 0);
6660 nflags
= n
->value
| flags
;
6661 /* The only combination of data sharing classes we should see is
6662 FIRSTPRIVATE and LASTPRIVATE. However, OpenACC permits
6663 reduction variables to be used in data sharing clauses. */
6664 gcc_assert ((ctx
->region_type
& ORT_ACC
) != 0
6665 || ((nflags
& GOVD_DATA_SHARE_CLASS
)
6666 == (GOVD_FIRSTPRIVATE
| GOVD_LASTPRIVATE
))
6667 || (flags
& GOVD_DATA_SHARE_CLASS
) == 0);
6672 /* When adding a variable-sized variable, we have to handle all sorts
6673 of additional bits of data: the pointer replacement variable, and
6674 the parameters of the type. */
6675 if (DECL_SIZE (decl
) && TREE_CODE (DECL_SIZE (decl
)) != INTEGER_CST
)
6677 /* Add the pointer replacement variable as PRIVATE if the variable
6678 replacement is private, else FIRSTPRIVATE since we'll need the
6679 address of the original variable either for SHARED, or for the
6680 copy into or out of the context. */
6681 if (!(flags
& GOVD_LOCAL
))
6683 if (flags
& GOVD_MAP
)
6684 nflags
= GOVD_MAP
| GOVD_MAP_TO_ONLY
| GOVD_EXPLICIT
;
6685 else if (flags
& GOVD_PRIVATE
)
6686 nflags
= GOVD_PRIVATE
;
6687 else if ((ctx
->region_type
& (ORT_TARGET
| ORT_TARGET_DATA
)) != 0
6688 && (flags
& GOVD_FIRSTPRIVATE
))
6689 nflags
= GOVD_PRIVATE
| GOVD_EXPLICIT
;
6691 nflags
= GOVD_FIRSTPRIVATE
;
6692 nflags
|= flags
& GOVD_SEEN
;
6693 t
= DECL_VALUE_EXPR (decl
);
6694 gcc_assert (TREE_CODE (t
) == INDIRECT_REF
);
6695 t
= TREE_OPERAND (t
, 0);
6696 gcc_assert (DECL_P (t
));
6697 omp_add_variable (ctx
, t
, nflags
);
6700 /* Add all of the variable and type parameters (which should have
6701 been gimplified to a formal temporary) as FIRSTPRIVATE. */
6702 omp_firstprivatize_variable (ctx
, DECL_SIZE_UNIT (decl
));
6703 omp_firstprivatize_variable (ctx
, DECL_SIZE (decl
));
6704 omp_firstprivatize_type_sizes (ctx
, TREE_TYPE (decl
));
6706 /* The variable-sized variable itself is never SHARED, only some form
6707 of PRIVATE. The sharing would take place via the pointer variable
6708 which we remapped above. */
6709 if (flags
& GOVD_SHARED
)
6710 flags
= GOVD_SHARED
| GOVD_DEBUG_PRIVATE
6711 | (flags
& (GOVD_SEEN
| GOVD_EXPLICIT
));
6713 /* We're going to make use of the TYPE_SIZE_UNIT at least in the
6714 alloca statement we generate for the variable, so make sure it
6715 is available. This isn't automatically needed for the SHARED
6716 case, since we won't be allocating local storage then.
6717 For local variables TYPE_SIZE_UNIT might not be gimplified yet,
6718 in this case omp_notice_variable will be called later
6719 on when it is gimplified. */
6720 else if (! (flags
& (GOVD_LOCAL
| GOVD_MAP
))
6721 && DECL_P (TYPE_SIZE_UNIT (TREE_TYPE (decl
))))
6722 omp_notice_variable (ctx
, TYPE_SIZE_UNIT (TREE_TYPE (decl
)), true);
6724 else if ((flags
& (GOVD_MAP
| GOVD_LOCAL
)) == 0
6725 && lang_hooks
.decls
.omp_privatize_by_reference (decl
))
6727 omp_firstprivatize_type_sizes (ctx
, TREE_TYPE (decl
));
6729 /* Similar to the direct variable sized case above, we'll need the
6730 size of references being privatized. */
6731 if ((flags
& GOVD_SHARED
) == 0)
6733 t
= TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl
)));
6735 omp_notice_variable (ctx
, t
, true);
6742 splay_tree_insert (ctx
->variables
, (splay_tree_key
)decl
, flags
);
6744 /* For reductions clauses in OpenACC loop directives, by default create a
6745 copy clause on the enclosing parallel construct for carrying back the
6747 if (ctx
->region_type
== ORT_ACC
&& (flags
& GOVD_REDUCTION
))
6749 struct gimplify_omp_ctx
*outer_ctx
= ctx
->outer_context
;
6752 n
= splay_tree_lookup (outer_ctx
->variables
, (splay_tree_key
)decl
);
6755 /* Ignore local variables and explicitly declared clauses. */
6756 if (n
->value
& (GOVD_LOCAL
| GOVD_EXPLICIT
))
6758 else if (outer_ctx
->region_type
== ORT_ACC_KERNELS
)
6760 /* According to the OpenACC spec, such a reduction variable
6761 should already have a copy map on a kernels construct,
6762 verify that here. */
6763 gcc_assert (!(n
->value
& GOVD_FIRSTPRIVATE
)
6764 && (n
->value
& GOVD_MAP
));
6766 else if (outer_ctx
->region_type
== ORT_ACC_PARALLEL
)
6768 /* Remove firstprivate and make it a copy map. */
6769 n
->value
&= ~GOVD_FIRSTPRIVATE
;
6770 n
->value
|= GOVD_MAP
;
6773 else if (outer_ctx
->region_type
== ORT_ACC_PARALLEL
)
6775 splay_tree_insert (outer_ctx
->variables
, (splay_tree_key
)decl
,
6776 GOVD_MAP
| GOVD_SEEN
);
6779 outer_ctx
= outer_ctx
->outer_context
;
6784 /* Notice a threadprivate variable DECL used in OMP context CTX.
6785 This just prints out diagnostics about threadprivate variable uses
6786 in untied tasks. If DECL2 is non-NULL, prevent this warning
6787 on that variable. */
6790 omp_notice_threadprivate_variable (struct gimplify_omp_ctx
*ctx
, tree decl
,
6794 struct gimplify_omp_ctx
*octx
;
6796 for (octx
= ctx
; octx
; octx
= octx
->outer_context
)
6797 if ((octx
->region_type
& ORT_TARGET
) != 0)
6799 n
= splay_tree_lookup (octx
->variables
, (splay_tree_key
)decl
);
6802 error ("threadprivate variable %qE used in target region",
6804 error_at (octx
->location
, "enclosing target region");
6805 splay_tree_insert (octx
->variables
, (splay_tree_key
)decl
, 0);
6808 splay_tree_insert (octx
->variables
, (splay_tree_key
)decl2
, 0);
6811 if (ctx
->region_type
!= ORT_UNTIED_TASK
)
6813 n
= splay_tree_lookup (ctx
->variables
, (splay_tree_key
)decl
);
6816 error ("threadprivate variable %qE used in untied task",
6818 error_at (ctx
->location
, "enclosing task");
6819 splay_tree_insert (ctx
->variables
, (splay_tree_key
)decl
, 0);
6822 splay_tree_insert (ctx
->variables
, (splay_tree_key
)decl2
, 0);
6826 /* Return true if global var DECL is device resident. */
6829 device_resident_p (tree decl
)
6831 tree attr
= lookup_attribute ("oacc declare target", DECL_ATTRIBUTES (decl
));
6836 for (tree t
= TREE_VALUE (attr
); t
; t
= TREE_PURPOSE (t
))
6838 tree c
= TREE_VALUE (t
);
6839 if (OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_DEVICE_RESIDENT
)
6846 /* Return true if DECL has an ACC DECLARE attribute. */
6849 is_oacc_declared (tree decl
)
6851 tree t
= TREE_CODE (decl
) == MEM_REF
? TREE_OPERAND (decl
, 0) : decl
;
6852 tree declared
= lookup_attribute ("oacc declare target", DECL_ATTRIBUTES (t
));
6853 return declared
!= NULL_TREE
;
6856 /* Determine outer default flags for DECL mentioned in an OMP region
6857 but not declared in an enclosing clause.
6859 ??? Some compiler-generated variables (like SAVE_EXPRs) could be
6860 remapped firstprivate instead of shared. To some extent this is
6861 addressed in omp_firstprivatize_type_sizes, but not
6865 omp_default_clause (struct gimplify_omp_ctx
*ctx
, tree decl
,
6866 bool in_code
, unsigned flags
)
6868 enum omp_clause_default_kind default_kind
= ctx
->default_kind
;
6869 enum omp_clause_default_kind kind
;
6871 kind
= lang_hooks
.decls
.omp_predetermined_sharing (decl
);
6872 if (kind
!= OMP_CLAUSE_DEFAULT_UNSPECIFIED
)
6873 default_kind
= kind
;
6875 switch (default_kind
)
6877 case OMP_CLAUSE_DEFAULT_NONE
:
6881 if (ctx
->region_type
& ORT_PARALLEL
)
6883 else if (ctx
->region_type
& ORT_TASK
)
6885 else if (ctx
->region_type
& ORT_TEAMS
)
6890 error ("%qE not specified in enclosing %qs",
6891 DECL_NAME (lang_hooks
.decls
.omp_report_decl (decl
)), rtype
);
6892 error_at (ctx
->location
, "enclosing %qs", rtype
);
6895 case OMP_CLAUSE_DEFAULT_SHARED
:
6896 flags
|= GOVD_SHARED
;
6898 case OMP_CLAUSE_DEFAULT_PRIVATE
:
6899 flags
|= GOVD_PRIVATE
;
6901 case OMP_CLAUSE_DEFAULT_FIRSTPRIVATE
:
6902 flags
|= GOVD_FIRSTPRIVATE
;
6904 case OMP_CLAUSE_DEFAULT_UNSPECIFIED
:
6905 /* decl will be either GOVD_FIRSTPRIVATE or GOVD_SHARED. */
6906 gcc_assert ((ctx
->region_type
& ORT_TASK
) != 0);
6907 if (struct gimplify_omp_ctx
*octx
= ctx
->outer_context
)
6909 omp_notice_variable (octx
, decl
, in_code
);
6910 for (; octx
; octx
= octx
->outer_context
)
6914 n2
= splay_tree_lookup (octx
->variables
, (splay_tree_key
) decl
);
6915 if ((octx
->region_type
& (ORT_TARGET_DATA
| ORT_TARGET
)) != 0
6916 && (n2
== NULL
|| (n2
->value
& GOVD_DATA_SHARE_CLASS
) == 0))
6918 if (n2
&& (n2
->value
& GOVD_DATA_SHARE_CLASS
) != GOVD_SHARED
)
6920 flags
|= GOVD_FIRSTPRIVATE
;
6923 if ((octx
->region_type
& (ORT_PARALLEL
| ORT_TEAMS
)) != 0)
6925 flags
|= GOVD_SHARED
;
6931 if (TREE_CODE (decl
) == PARM_DECL
6932 || (!is_global_var (decl
)
6933 && DECL_CONTEXT (decl
) == current_function_decl
))
6934 flags
|= GOVD_FIRSTPRIVATE
;
6936 flags
|= GOVD_SHARED
;
6948 /* Determine outer default flags for DECL mentioned in an OACC region
6949 but not declared in an enclosing clause. */
6952 oacc_default_clause (struct gimplify_omp_ctx
*ctx
, tree decl
, unsigned flags
)
6955 bool on_device
= false;
6956 bool declared
= is_oacc_declared (decl
);
6957 tree type
= TREE_TYPE (decl
);
6959 if (lang_hooks
.decls
.omp_privatize_by_reference (decl
))
6960 type
= TREE_TYPE (type
);
6962 if ((ctx
->region_type
& (ORT_ACC_PARALLEL
| ORT_ACC_KERNELS
)) != 0
6963 && is_global_var (decl
)
6964 && device_resident_p (decl
))
6967 flags
|= GOVD_MAP_TO_ONLY
;
6970 switch (ctx
->region_type
)
6972 case ORT_ACC_KERNELS
:
6975 if (AGGREGATE_TYPE_P (type
))
6977 /* Aggregates default to 'present_or_copy', or 'present'. */
6978 if (ctx
->default_kind
!= OMP_CLAUSE_DEFAULT_PRESENT
)
6981 flags
|= GOVD_MAP
| GOVD_MAP_FORCE_PRESENT
;
6984 /* Scalars default to 'copy'. */
6985 flags
|= GOVD_MAP
| GOVD_MAP_FORCE
;
6989 case ORT_ACC_PARALLEL
:
6992 if (on_device
|| declared
)
6994 else if (AGGREGATE_TYPE_P (type
))
6996 /* Aggregates default to 'present_or_copy', or 'present'. */
6997 if (ctx
->default_kind
!= OMP_CLAUSE_DEFAULT_PRESENT
)
7000 flags
|= GOVD_MAP
| GOVD_MAP_FORCE_PRESENT
;
7003 /* Scalars default to 'firstprivate'. */
7004 flags
|= GOVD_FIRSTPRIVATE
;
7012 if (DECL_ARTIFICIAL (decl
))
7013 ; /* We can get compiler-generated decls, and should not complain
7015 else if (ctx
->default_kind
== OMP_CLAUSE_DEFAULT_NONE
)
7017 error ("%qE not specified in enclosing OpenACC %qs construct",
7018 DECL_NAME (lang_hooks
.decls
.omp_report_decl (decl
)), rkind
);
7019 inform (ctx
->location
, "enclosing OpenACC %qs construct", rkind
);
7021 else if (ctx
->default_kind
== OMP_CLAUSE_DEFAULT_PRESENT
)
7022 ; /* Handled above. */
7024 gcc_checking_assert (ctx
->default_kind
== OMP_CLAUSE_DEFAULT_SHARED
);
7029 /* Record the fact that DECL was used within the OMP context CTX.
7030 IN_CODE is true when real code uses DECL, and false when we should
7031 merely emit default(none) errors. Return true if DECL is going to
7032 be remapped and thus DECL shouldn't be gimplified into its
7033 DECL_VALUE_EXPR (if any). */
7036 omp_notice_variable (struct gimplify_omp_ctx
*ctx
, tree decl
, bool in_code
)
7039 unsigned flags
= in_code
? GOVD_SEEN
: 0;
7040 bool ret
= false, shared
;
7042 if (error_operand_p (decl
))
7045 if (ctx
->region_type
== ORT_NONE
)
7046 return lang_hooks
.decls
.omp_disregard_value_expr (decl
, false);
7048 if (is_global_var (decl
))
7050 /* Threadprivate variables are predetermined. */
7051 if (DECL_THREAD_LOCAL_P (decl
))
7052 return omp_notice_threadprivate_variable (ctx
, decl
, NULL_TREE
);
7054 if (DECL_HAS_VALUE_EXPR_P (decl
))
7056 tree value
= get_base_address (DECL_VALUE_EXPR (decl
));
7058 if (value
&& DECL_P (value
) && DECL_THREAD_LOCAL_P (value
))
7059 return omp_notice_threadprivate_variable (ctx
, decl
, value
);
7062 if (gimplify_omp_ctxp
->outer_context
== NULL
7064 && oacc_get_fn_attrib (current_function_decl
))
7066 location_t loc
= DECL_SOURCE_LOCATION (decl
);
7068 if (lookup_attribute ("omp declare target link",
7069 DECL_ATTRIBUTES (decl
)))
7072 "%qE with %<link%> clause used in %<routine%> function",
7076 else if (!lookup_attribute ("omp declare target",
7077 DECL_ATTRIBUTES (decl
)))
7080 "%qE requires a %<declare%> directive for use "
7081 "in a %<routine%> function", DECL_NAME (decl
));
7087 n
= splay_tree_lookup (ctx
->variables
, (splay_tree_key
)decl
);
7088 if ((ctx
->region_type
& ORT_TARGET
) != 0)
7090 ret
= lang_hooks
.decls
.omp_disregard_value_expr (decl
, true);
7093 unsigned nflags
= flags
;
7094 if (ctx
->target_map_pointers_as_0len_arrays
7095 || ctx
->target_map_scalars_firstprivate
)
7097 bool is_declare_target
= false;
7098 bool is_scalar
= false;
7099 if (is_global_var (decl
)
7100 && varpool_node::get_create (decl
)->offloadable
)
7102 struct gimplify_omp_ctx
*octx
;
7103 for (octx
= ctx
->outer_context
;
7104 octx
; octx
= octx
->outer_context
)
7106 n
= splay_tree_lookup (octx
->variables
,
7107 (splay_tree_key
)decl
);
7109 && (n
->value
& GOVD_DATA_SHARE_CLASS
) != GOVD_SHARED
7110 && (n
->value
& GOVD_DATA_SHARE_CLASS
) != 0)
7113 is_declare_target
= octx
== NULL
;
7115 if (!is_declare_target
&& ctx
->target_map_scalars_firstprivate
)
7116 is_scalar
= lang_hooks
.decls
.omp_scalar_p (decl
);
7117 if (is_declare_target
)
7119 else if (ctx
->target_map_pointers_as_0len_arrays
7120 && (TREE_CODE (TREE_TYPE (decl
)) == POINTER_TYPE
7121 || (TREE_CODE (TREE_TYPE (decl
)) == REFERENCE_TYPE
7122 && TREE_CODE (TREE_TYPE (TREE_TYPE (decl
)))
7124 nflags
|= GOVD_MAP
| GOVD_MAP_0LEN_ARRAY
;
7126 nflags
|= GOVD_FIRSTPRIVATE
;
7129 struct gimplify_omp_ctx
*octx
= ctx
->outer_context
;
7130 if ((ctx
->region_type
& ORT_ACC
) && octx
)
7132 /* Look in outer OpenACC contexts, to see if there's a
7133 data attribute for this variable. */
7134 omp_notice_variable (octx
, decl
, in_code
);
7136 for (; octx
; octx
= octx
->outer_context
)
7138 if (!(octx
->region_type
& (ORT_TARGET_DATA
| ORT_TARGET
)))
7141 = splay_tree_lookup (octx
->variables
,
7142 (splay_tree_key
) decl
);
7145 if (octx
->region_type
== ORT_ACC_HOST_DATA
)
7146 error ("variable %qE declared in enclosing "
7147 "%<host_data%> region", DECL_NAME (decl
));
7149 if (octx
->region_type
== ORT_ACC_DATA
7150 && (n2
->value
& GOVD_MAP_0LEN_ARRAY
))
7151 nflags
|= GOVD_MAP_0LEN_ARRAY
;
7158 tree type
= TREE_TYPE (decl
);
7161 && gimplify_omp_ctxp
->target_firstprivatize_array_bases
7162 && lang_hooks
.decls
.omp_privatize_by_reference (decl
))
7163 type
= TREE_TYPE (type
);
7165 && !lang_hooks
.types
.omp_mappable_type (type
))
7167 error ("%qD referenced in target region does not have "
7168 "a mappable type", decl
);
7169 nflags
|= GOVD_MAP
| GOVD_EXPLICIT
;
7171 else if (nflags
== flags
)
7173 if ((ctx
->region_type
& ORT_ACC
) != 0)
7174 nflags
= oacc_default_clause (ctx
, decl
, flags
);
7180 omp_add_variable (ctx
, decl
, nflags
);
7184 /* If nothing changed, there's nothing left to do. */
7185 if ((n
->value
& flags
) == flags
)
7195 if (ctx
->region_type
== ORT_WORKSHARE
7196 || ctx
->region_type
== ORT_SIMD
7197 || ctx
->region_type
== ORT_ACC
7198 || (ctx
->region_type
& ORT_TARGET_DATA
) != 0)
7201 flags
= omp_default_clause (ctx
, decl
, in_code
, flags
);
7203 if ((flags
& GOVD_PRIVATE
)
7204 && lang_hooks
.decls
.omp_private_outer_ref (decl
))
7205 flags
|= GOVD_PRIVATE_OUTER_REF
;
7207 omp_add_variable (ctx
, decl
, flags
);
7209 shared
= (flags
& GOVD_SHARED
) != 0;
7210 ret
= lang_hooks
.decls
.omp_disregard_value_expr (decl
, shared
);
7214 if ((n
->value
& (GOVD_SEEN
| GOVD_LOCAL
)) == 0
7215 && (flags
& (GOVD_SEEN
| GOVD_LOCAL
)) == GOVD_SEEN
7216 && DECL_SIZE (decl
))
7218 if (TREE_CODE (DECL_SIZE (decl
)) != INTEGER_CST
)
7221 tree t
= DECL_VALUE_EXPR (decl
);
7222 gcc_assert (TREE_CODE (t
) == INDIRECT_REF
);
7223 t
= TREE_OPERAND (t
, 0);
7224 gcc_assert (DECL_P (t
));
7225 n2
= splay_tree_lookup (ctx
->variables
, (splay_tree_key
) t
);
7226 n2
->value
|= GOVD_SEEN
;
7228 else if (lang_hooks
.decls
.omp_privatize_by_reference (decl
)
7229 && TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl
)))
7230 && (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl
))))
7234 tree t
= TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl
)));
7235 gcc_assert (DECL_P (t
));
7236 n2
= splay_tree_lookup (ctx
->variables
, (splay_tree_key
) t
);
7238 omp_notice_variable (ctx
, t
, true);
7242 shared
= ((flags
| n
->value
) & GOVD_SHARED
) != 0;
7243 ret
= lang_hooks
.decls
.omp_disregard_value_expr (decl
, shared
);
7245 /* If nothing changed, there's nothing left to do. */
7246 if ((n
->value
& flags
) == flags
)
7252 /* If the variable is private in the current context, then we don't
7253 need to propagate anything to an outer context. */
7254 if ((flags
& GOVD_PRIVATE
) && !(flags
& GOVD_PRIVATE_OUTER_REF
))
7256 if ((flags
& (GOVD_LINEAR
| GOVD_LINEAR_LASTPRIVATE_NO_OUTER
))
7257 == (GOVD_LINEAR
| GOVD_LINEAR_LASTPRIVATE_NO_OUTER
))
7259 if ((flags
& (GOVD_FIRSTPRIVATE
| GOVD_LASTPRIVATE
7260 | GOVD_LINEAR_LASTPRIVATE_NO_OUTER
))
7261 == (GOVD_LASTPRIVATE
| GOVD_LINEAR_LASTPRIVATE_NO_OUTER
))
7263 if (ctx
->outer_context
7264 && omp_notice_variable (ctx
->outer_context
, decl
, in_code
))
7269 /* Verify that DECL is private within CTX. If there's specific information
7270 to the contrary in the innermost scope, generate an error. */
7273 omp_is_private (struct gimplify_omp_ctx
*ctx
, tree decl
, int simd
)
7277 n
= splay_tree_lookup (ctx
->variables
, (splay_tree_key
)decl
);
7280 if (n
->value
& GOVD_SHARED
)
7282 if (ctx
== gimplify_omp_ctxp
)
7285 error ("iteration variable %qE is predetermined linear",
7288 error ("iteration variable %qE should be private",
7290 n
->value
= GOVD_PRIVATE
;
7296 else if ((n
->value
& GOVD_EXPLICIT
) != 0
7297 && (ctx
== gimplify_omp_ctxp
7298 || (ctx
->region_type
== ORT_COMBINED_PARALLEL
7299 && gimplify_omp_ctxp
->outer_context
== ctx
)))
7301 if ((n
->value
& GOVD_FIRSTPRIVATE
) != 0)
7302 error ("iteration variable %qE should not be firstprivate",
7304 else if ((n
->value
& GOVD_REDUCTION
) != 0)
7305 error ("iteration variable %qE should not be reduction",
7307 else if (simd
== 0 && (n
->value
& GOVD_LINEAR
) != 0)
7308 error ("iteration variable %qE should not be linear",
7310 else if (simd
== 1 && (n
->value
& GOVD_LASTPRIVATE
) != 0)
7311 error ("iteration variable %qE should not be lastprivate",
7313 else if (simd
&& (n
->value
& GOVD_PRIVATE
) != 0)
7314 error ("iteration variable %qE should not be private",
7316 else if (simd
== 2 && (n
->value
& GOVD_LINEAR
) != 0)
7317 error ("iteration variable %qE is predetermined linear",
7320 return (ctx
== gimplify_omp_ctxp
7321 || (ctx
->region_type
== ORT_COMBINED_PARALLEL
7322 && gimplify_omp_ctxp
->outer_context
== ctx
));
7325 if (ctx
->region_type
!= ORT_WORKSHARE
7326 && ctx
->region_type
!= ORT_SIMD
7327 && ctx
->region_type
!= ORT_ACC
)
7329 else if (ctx
->outer_context
)
7330 return omp_is_private (ctx
->outer_context
, decl
, simd
);
7334 /* Return true if DECL is private within a parallel region
7335 that binds to the current construct's context or in parallel
7336 region's REDUCTION clause. */
7339 omp_check_private (struct gimplify_omp_ctx
*ctx
, tree decl
, bool copyprivate
)
7345 ctx
= ctx
->outer_context
;
7348 if (is_global_var (decl
))
7351 /* References might be private, but might be shared too,
7352 when checking for copyprivate, assume they might be
7353 private, otherwise assume they might be shared. */
7357 if (lang_hooks
.decls
.omp_privatize_by_reference (decl
))
7360 /* Treat C++ privatized non-static data members outside
7361 of the privatization the same. */
7362 if (omp_member_access_dummy_var (decl
))
7368 n
= splay_tree_lookup (ctx
->variables
, (splay_tree_key
) decl
);
7370 if ((ctx
->region_type
& (ORT_TARGET
| ORT_TARGET_DATA
)) != 0
7371 && (n
== NULL
|| (n
->value
& GOVD_DATA_SHARE_CLASS
) == 0))
7376 if ((n
->value
& GOVD_LOCAL
) != 0
7377 && omp_member_access_dummy_var (decl
))
7379 return (n
->value
& GOVD_SHARED
) == 0;
7382 while (ctx
->region_type
== ORT_WORKSHARE
7383 || ctx
->region_type
== ORT_SIMD
7384 || ctx
->region_type
== ORT_ACC
);
7388 /* Callback for walk_tree to find a DECL_EXPR for the given DECL. */
7391 find_decl_expr (tree
*tp
, int *walk_subtrees
, void *data
)
7395 /* If this node has been visited, unmark it and keep looking. */
7396 if (TREE_CODE (t
) == DECL_EXPR
&& DECL_EXPR_DECL (t
) == (tree
) data
)
7399 if (IS_TYPE_OR_DECL_P (t
))
7404 /* Scan the OMP clauses in *LIST_P, installing mappings into a new
7405 and previous omp contexts. */
7408 gimplify_scan_omp_clauses (tree
*list_p
, gimple_seq
*pre_p
,
7409 enum omp_region_type region_type
,
7410 enum tree_code code
)
7412 struct gimplify_omp_ctx
*ctx
, *outer_ctx
;
7414 hash_map
<tree
, tree
> *struct_map_to_clause
= NULL
;
7415 tree
*prev_list_p
= NULL
;
7417 ctx
= new_omp_context (region_type
);
7418 outer_ctx
= ctx
->outer_context
;
7419 if (code
== OMP_TARGET
)
7421 if (!lang_GNU_Fortran ())
7422 ctx
->target_map_pointers_as_0len_arrays
= true;
7423 ctx
->target_map_scalars_firstprivate
= true;
7425 if (!lang_GNU_Fortran ())
7429 case OMP_TARGET_DATA
:
7430 case OMP_TARGET_ENTER_DATA
:
7431 case OMP_TARGET_EXIT_DATA
:
7433 case OACC_HOST_DATA
:
7434 ctx
->target_firstprivatize_array_bases
= true;
7439 while ((c
= *list_p
) != NULL
)
7441 bool remove
= false;
7442 bool notice_outer
= true;
7443 const char *check_non_private
= NULL
;
7447 switch (OMP_CLAUSE_CODE (c
))
7449 case OMP_CLAUSE_PRIVATE
:
7450 flags
= GOVD_PRIVATE
| GOVD_EXPLICIT
;
7451 if (lang_hooks
.decls
.omp_private_outer_ref (OMP_CLAUSE_DECL (c
)))
7453 flags
|= GOVD_PRIVATE_OUTER_REF
;
7454 OMP_CLAUSE_PRIVATE_OUTER_REF (c
) = 1;
7457 notice_outer
= false;
7459 case OMP_CLAUSE_SHARED
:
7460 flags
= GOVD_SHARED
| GOVD_EXPLICIT
;
7462 case OMP_CLAUSE_FIRSTPRIVATE
:
7463 flags
= GOVD_FIRSTPRIVATE
| GOVD_EXPLICIT
;
7464 check_non_private
= "firstprivate";
7466 case OMP_CLAUSE_LASTPRIVATE
:
7467 flags
= GOVD_LASTPRIVATE
| GOVD_SEEN
| GOVD_EXPLICIT
;
7468 check_non_private
= "lastprivate";
7469 decl
= OMP_CLAUSE_DECL (c
);
7470 if (error_operand_p (decl
))
7473 && (outer_ctx
->region_type
== ORT_COMBINED_PARALLEL
7474 || outer_ctx
->region_type
== ORT_COMBINED_TEAMS
)
7475 && splay_tree_lookup (outer_ctx
->variables
,
7476 (splay_tree_key
) decl
) == NULL
)
7478 omp_add_variable (outer_ctx
, decl
, GOVD_SHARED
| GOVD_SEEN
);
7479 if (outer_ctx
->outer_context
)
7480 omp_notice_variable (outer_ctx
->outer_context
, decl
, true);
7483 && (outer_ctx
->region_type
& ORT_TASK
) != 0
7484 && outer_ctx
->combined_loop
7485 && splay_tree_lookup (outer_ctx
->variables
,
7486 (splay_tree_key
) decl
) == NULL
)
7488 omp_add_variable (outer_ctx
, decl
, GOVD_LASTPRIVATE
| GOVD_SEEN
);
7489 if (outer_ctx
->outer_context
)
7490 omp_notice_variable (outer_ctx
->outer_context
, decl
, true);
7493 && (outer_ctx
->region_type
== ORT_WORKSHARE
7494 || outer_ctx
->region_type
== ORT_ACC
)
7495 && outer_ctx
->combined_loop
7496 && splay_tree_lookup (outer_ctx
->variables
,
7497 (splay_tree_key
) decl
) == NULL
7498 && !omp_check_private (outer_ctx
, decl
, false))
7500 omp_add_variable (outer_ctx
, decl
, GOVD_LASTPRIVATE
| GOVD_SEEN
);
7501 if (outer_ctx
->outer_context
7502 && (outer_ctx
->outer_context
->region_type
7503 == ORT_COMBINED_PARALLEL
)
7504 && splay_tree_lookup (outer_ctx
->outer_context
->variables
,
7505 (splay_tree_key
) decl
) == NULL
)
7507 struct gimplify_omp_ctx
*octx
= outer_ctx
->outer_context
;
7508 omp_add_variable (octx
, decl
, GOVD_SHARED
| GOVD_SEEN
);
7509 if (octx
->outer_context
)
7511 octx
= octx
->outer_context
;
7512 if (octx
->region_type
== ORT_WORKSHARE
7513 && octx
->combined_loop
7514 && splay_tree_lookup (octx
->variables
,
7515 (splay_tree_key
) decl
) == NULL
7516 && !omp_check_private (octx
, decl
, false))
7518 omp_add_variable (octx
, decl
,
7519 GOVD_LASTPRIVATE
| GOVD_SEEN
);
7520 octx
= octx
->outer_context
;
7522 && octx
->region_type
== ORT_COMBINED_TEAMS
7523 && (splay_tree_lookup (octx
->variables
,
7524 (splay_tree_key
) decl
)
7527 omp_add_variable (octx
, decl
,
7528 GOVD_SHARED
| GOVD_SEEN
);
7529 octx
= octx
->outer_context
;
7533 omp_notice_variable (octx
, decl
, true);
7536 else if (outer_ctx
->outer_context
)
7537 omp_notice_variable (outer_ctx
->outer_context
, decl
, true);
7540 case OMP_CLAUSE_REDUCTION
:
7541 flags
= GOVD_REDUCTION
| GOVD_SEEN
| GOVD_EXPLICIT
;
7542 /* OpenACC permits reductions on private variables. */
7543 if (!(region_type
& ORT_ACC
))
7544 check_non_private
= "reduction";
7545 decl
= OMP_CLAUSE_DECL (c
);
7546 if (TREE_CODE (decl
) == MEM_REF
)
7548 tree type
= TREE_TYPE (decl
);
7549 if (gimplify_expr (&TYPE_MAX_VALUE (TYPE_DOMAIN (type
)), pre_p
,
7550 NULL
, is_gimple_val
, fb_rvalue
, false)
7556 tree v
= TYPE_MAX_VALUE (TYPE_DOMAIN (type
));
7559 omp_firstprivatize_variable (ctx
, v
);
7560 omp_notice_variable (ctx
, v
, true);
7562 decl
= TREE_OPERAND (decl
, 0);
7563 if (TREE_CODE (decl
) == POINTER_PLUS_EXPR
)
7565 if (gimplify_expr (&TREE_OPERAND (decl
, 1), pre_p
,
7566 NULL
, is_gimple_val
, fb_rvalue
, false)
7572 v
= TREE_OPERAND (decl
, 1);
7575 omp_firstprivatize_variable (ctx
, v
);
7576 omp_notice_variable (ctx
, v
, true);
7578 decl
= TREE_OPERAND (decl
, 0);
7580 if (TREE_CODE (decl
) == ADDR_EXPR
7581 || TREE_CODE (decl
) == INDIRECT_REF
)
7582 decl
= TREE_OPERAND (decl
, 0);
7585 case OMP_CLAUSE_LINEAR
:
7586 if (gimplify_expr (&OMP_CLAUSE_LINEAR_STEP (c
), pre_p
, NULL
,
7587 is_gimple_val
, fb_rvalue
) == GS_ERROR
)
7594 if (code
== OMP_SIMD
7595 && !OMP_CLAUSE_LINEAR_NO_COPYIN (c
))
7597 struct gimplify_omp_ctx
*octx
= outer_ctx
;
7599 && octx
->region_type
== ORT_WORKSHARE
7600 && octx
->combined_loop
7601 && !octx
->distribute
)
7603 if (octx
->outer_context
7604 && (octx
->outer_context
->region_type
7605 == ORT_COMBINED_PARALLEL
))
7606 octx
= octx
->outer_context
->outer_context
;
7608 octx
= octx
->outer_context
;
7611 && octx
->region_type
== ORT_WORKSHARE
7612 && octx
->combined_loop
7613 && octx
->distribute
)
7615 error_at (OMP_CLAUSE_LOCATION (c
),
7616 "%<linear%> clause for variable other than "
7617 "loop iterator specified on construct "
7618 "combined with %<distribute%>");
7623 /* For combined #pragma omp parallel for simd, need to put
7624 lastprivate and perhaps firstprivate too on the
7625 parallel. Similarly for #pragma omp for simd. */
7626 struct gimplify_omp_ctx
*octx
= outer_ctx
;
7630 if (OMP_CLAUSE_LINEAR_NO_COPYIN (c
)
7631 && OMP_CLAUSE_LINEAR_NO_COPYOUT (c
))
7633 decl
= OMP_CLAUSE_DECL (c
);
7634 if (error_operand_p (decl
))
7640 if (!OMP_CLAUSE_LINEAR_NO_COPYIN (c
))
7641 flags
|= GOVD_FIRSTPRIVATE
;
7642 if (!OMP_CLAUSE_LINEAR_NO_COPYOUT (c
))
7643 flags
|= GOVD_LASTPRIVATE
;
7645 && octx
->region_type
== ORT_WORKSHARE
7646 && octx
->combined_loop
)
7648 if (octx
->outer_context
7649 && (octx
->outer_context
->region_type
7650 == ORT_COMBINED_PARALLEL
))
7651 octx
= octx
->outer_context
;
7652 else if (omp_check_private (octx
, decl
, false))
7656 && (octx
->region_type
& ORT_TASK
) != 0
7657 && octx
->combined_loop
)
7660 && octx
->region_type
== ORT_COMBINED_PARALLEL
7661 && ctx
->region_type
== ORT_WORKSHARE
7662 && octx
== outer_ctx
)
7663 flags
= GOVD_SEEN
| GOVD_SHARED
;
7665 && octx
->region_type
== ORT_COMBINED_TEAMS
)
7666 flags
= GOVD_SEEN
| GOVD_SHARED
;
7668 && octx
->region_type
== ORT_COMBINED_TARGET
)
7670 flags
&= ~GOVD_LASTPRIVATE
;
7671 if (flags
== GOVD_SEEN
)
7677 = splay_tree_lookup (octx
->variables
,
7678 (splay_tree_key
) decl
);
7679 if (on
&& (on
->value
& GOVD_DATA_SHARE_CLASS
) != 0)
7684 omp_add_variable (octx
, decl
, flags
);
7685 if (octx
->outer_context
== NULL
)
7687 octx
= octx
->outer_context
;
7692 && (!OMP_CLAUSE_LINEAR_NO_COPYIN (c
)
7693 || !OMP_CLAUSE_LINEAR_NO_COPYOUT (c
)))
7694 omp_notice_variable (octx
, decl
, true);
7696 flags
= GOVD_LINEAR
| GOVD_EXPLICIT
;
7697 if (OMP_CLAUSE_LINEAR_NO_COPYIN (c
)
7698 && OMP_CLAUSE_LINEAR_NO_COPYOUT (c
))
7700 notice_outer
= false;
7701 flags
|= GOVD_LINEAR_LASTPRIVATE_NO_OUTER
;
7705 case OMP_CLAUSE_MAP
:
7706 decl
= OMP_CLAUSE_DECL (c
);
7707 if (error_operand_p (decl
))
7714 if (TREE_CODE (TREE_TYPE (decl
)) != ARRAY_TYPE
)
7717 case OMP_TARGET_DATA
:
7718 case OMP_TARGET_ENTER_DATA
:
7719 case OMP_TARGET_EXIT_DATA
:
7720 case OACC_ENTER_DATA
:
7721 case OACC_EXIT_DATA
:
7722 case OACC_HOST_DATA
:
7723 if (OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_FIRSTPRIVATE_POINTER
7724 || (OMP_CLAUSE_MAP_KIND (c
)
7725 == GOMP_MAP_FIRSTPRIVATE_REFERENCE
))
7726 /* For target {,enter ,exit }data only the array slice is
7727 mapped, but not the pointer to it. */
7735 if (DECL_P (decl
) && outer_ctx
&& (region_type
& ORT_ACC
))
7737 struct gimplify_omp_ctx
*octx
;
7738 for (octx
= outer_ctx
; octx
; octx
= octx
->outer_context
)
7740 if (octx
->region_type
!= ORT_ACC_HOST_DATA
)
7743 = splay_tree_lookup (octx
->variables
,
7744 (splay_tree_key
) decl
);
7746 error_at (OMP_CLAUSE_LOCATION (c
), "variable %qE "
7747 "declared in enclosing %<host_data%> region",
7751 if (OMP_CLAUSE_SIZE (c
) == NULL_TREE
)
7752 OMP_CLAUSE_SIZE (c
) = DECL_P (decl
) ? DECL_SIZE_UNIT (decl
)
7753 : TYPE_SIZE_UNIT (TREE_TYPE (decl
));
7754 if (gimplify_expr (&OMP_CLAUSE_SIZE (c
), pre_p
,
7755 NULL
, is_gimple_val
, fb_rvalue
) == GS_ERROR
)
7760 else if ((OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_FIRSTPRIVATE_POINTER
7761 || (OMP_CLAUSE_MAP_KIND (c
)
7762 == GOMP_MAP_FIRSTPRIVATE_REFERENCE
))
7763 && TREE_CODE (OMP_CLAUSE_SIZE (c
)) != INTEGER_CST
)
7766 = get_initialized_tmp_var (OMP_CLAUSE_SIZE (c
), pre_p
, NULL
,
7768 omp_add_variable (ctx
, OMP_CLAUSE_SIZE (c
),
7769 GOVD_FIRSTPRIVATE
| GOVD_SEEN
);
7774 if (TREE_CODE (d
) == ARRAY_REF
)
7776 while (TREE_CODE (d
) == ARRAY_REF
)
7777 d
= TREE_OPERAND (d
, 0);
7778 if (TREE_CODE (d
) == COMPONENT_REF
7779 && TREE_CODE (TREE_TYPE (d
)) == ARRAY_TYPE
)
7782 pd
= &OMP_CLAUSE_DECL (c
);
7784 && TREE_CODE (decl
) == INDIRECT_REF
7785 && TREE_CODE (TREE_OPERAND (decl
, 0)) == COMPONENT_REF
7786 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (decl
, 0)))
7789 pd
= &TREE_OPERAND (decl
, 0);
7790 decl
= TREE_OPERAND (decl
, 0);
7792 if (TREE_CODE (decl
) == COMPONENT_REF
)
7794 while (TREE_CODE (decl
) == COMPONENT_REF
)
7795 decl
= TREE_OPERAND (decl
, 0);
7796 if (TREE_CODE (decl
) == INDIRECT_REF
7797 && DECL_P (TREE_OPERAND (decl
, 0))
7798 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (decl
, 0)))
7800 decl
= TREE_OPERAND (decl
, 0);
7802 if (gimplify_expr (pd
, pre_p
, NULL
, is_gimple_lvalue
, fb_lvalue
)
7810 if (error_operand_p (decl
))
7816 tree stype
= TREE_TYPE (decl
);
7817 if (TREE_CODE (stype
) == REFERENCE_TYPE
)
7818 stype
= TREE_TYPE (stype
);
7819 if (TYPE_SIZE_UNIT (stype
) == NULL
7820 || TREE_CODE (TYPE_SIZE_UNIT (stype
)) != INTEGER_CST
)
7822 error_at (OMP_CLAUSE_LOCATION (c
),
7823 "mapping field %qE of variable length "
7824 "structure", OMP_CLAUSE_DECL (c
));
7829 if (OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_ALWAYS_POINTER
)
7831 /* Error recovery. */
7832 if (prev_list_p
== NULL
)
7837 if (OMP_CLAUSE_CHAIN (*prev_list_p
) != c
)
7839 tree ch
= OMP_CLAUSE_CHAIN (*prev_list_p
);
7840 if (ch
== NULL_TREE
|| OMP_CLAUSE_CHAIN (ch
) != c
)
7849 HOST_WIDE_INT bitsize
, bitpos
;
7851 int unsignedp
, reversep
, volatilep
= 0;
7852 tree base
= OMP_CLAUSE_DECL (c
);
7853 while (TREE_CODE (base
) == ARRAY_REF
)
7854 base
= TREE_OPERAND (base
, 0);
7855 if (TREE_CODE (base
) == INDIRECT_REF
)
7856 base
= TREE_OPERAND (base
, 0);
7857 base
= get_inner_reference (base
, &bitsize
, &bitpos
, &offset
,
7858 &mode
, &unsignedp
, &reversep
,
7860 tree orig_base
= base
;
7861 if ((TREE_CODE (base
) == INDIRECT_REF
7862 || (TREE_CODE (base
) == MEM_REF
7863 && integer_zerop (TREE_OPERAND (base
, 1))))
7864 && DECL_P (TREE_OPERAND (base
, 0))
7865 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (base
, 0)))
7867 base
= TREE_OPERAND (base
, 0);
7868 gcc_assert (base
== decl
7869 && (offset
== NULL_TREE
7870 || TREE_CODE (offset
) == INTEGER_CST
));
7873 = splay_tree_lookup (ctx
->variables
, (splay_tree_key
)decl
);
7874 bool ptr
= (OMP_CLAUSE_MAP_KIND (c
)
7875 == GOMP_MAP_ALWAYS_POINTER
);
7876 if (n
== NULL
|| (n
->value
& GOVD_MAP
) == 0)
7878 tree l
= build_omp_clause (OMP_CLAUSE_LOCATION (c
),
7880 OMP_CLAUSE_SET_MAP_KIND (l
, GOMP_MAP_STRUCT
);
7881 if (orig_base
!= base
)
7882 OMP_CLAUSE_DECL (l
) = unshare_expr (orig_base
);
7884 OMP_CLAUSE_DECL (l
) = decl
;
7885 OMP_CLAUSE_SIZE (l
) = size_int (1);
7886 if (struct_map_to_clause
== NULL
)
7887 struct_map_to_clause
= new hash_map
<tree
, tree
>;
7888 struct_map_to_clause
->put (decl
, l
);
7891 enum gomp_map_kind mkind
7892 = code
== OMP_TARGET_EXIT_DATA
7893 ? GOMP_MAP_RELEASE
: GOMP_MAP_ALLOC
;
7894 tree c2
= build_omp_clause (OMP_CLAUSE_LOCATION (c
),
7896 OMP_CLAUSE_SET_MAP_KIND (c2
, mkind
);
7897 OMP_CLAUSE_DECL (c2
)
7898 = unshare_expr (OMP_CLAUSE_DECL (c
));
7899 OMP_CLAUSE_CHAIN (c2
) = *prev_list_p
;
7900 OMP_CLAUSE_SIZE (c2
)
7901 = TYPE_SIZE_UNIT (ptr_type_node
);
7902 OMP_CLAUSE_CHAIN (l
) = c2
;
7903 if (OMP_CLAUSE_CHAIN (*prev_list_p
) != c
)
7905 tree c4
= OMP_CLAUSE_CHAIN (*prev_list_p
);
7907 = build_omp_clause (OMP_CLAUSE_LOCATION (c
),
7909 OMP_CLAUSE_SET_MAP_KIND (c3
, mkind
);
7910 OMP_CLAUSE_DECL (c3
)
7911 = unshare_expr (OMP_CLAUSE_DECL (c4
));
7912 OMP_CLAUSE_SIZE (c3
)
7913 = TYPE_SIZE_UNIT (ptr_type_node
);
7914 OMP_CLAUSE_CHAIN (c3
) = *prev_list_p
;
7915 OMP_CLAUSE_CHAIN (c2
) = c3
;
7922 OMP_CLAUSE_CHAIN (l
) = c
;
7924 list_p
= &OMP_CLAUSE_CHAIN (l
);
7926 if (orig_base
!= base
&& code
== OMP_TARGET
)
7928 tree c2
= build_omp_clause (OMP_CLAUSE_LOCATION (c
),
7930 enum gomp_map_kind mkind
7931 = GOMP_MAP_FIRSTPRIVATE_REFERENCE
;
7932 OMP_CLAUSE_SET_MAP_KIND (c2
, mkind
);
7933 OMP_CLAUSE_DECL (c2
) = decl
;
7934 OMP_CLAUSE_SIZE (c2
) = size_zero_node
;
7935 OMP_CLAUSE_CHAIN (c2
) = OMP_CLAUSE_CHAIN (l
);
7936 OMP_CLAUSE_CHAIN (l
) = c2
;
7938 flags
= GOVD_MAP
| GOVD_EXPLICIT
;
7939 if (GOMP_MAP_ALWAYS_P (OMP_CLAUSE_MAP_KIND (c
)) || ptr
)
7945 tree
*osc
= struct_map_to_clause
->get (decl
);
7946 tree
*sc
= NULL
, *scp
= NULL
;
7947 if (GOMP_MAP_ALWAYS_P (OMP_CLAUSE_MAP_KIND (c
)) || ptr
)
7948 n
->value
|= GOVD_SEEN
;
7951 o1
= wi::to_offset (offset
);
7955 o1
= o1
+ bitpos
/ BITS_PER_UNIT
;
7956 sc
= &OMP_CLAUSE_CHAIN (*osc
);
7958 && (OMP_CLAUSE_MAP_KIND (*sc
)
7959 == GOMP_MAP_FIRSTPRIVATE_REFERENCE
))
7960 sc
= &OMP_CLAUSE_CHAIN (*sc
);
7961 for (; *sc
!= c
; sc
= &OMP_CLAUSE_CHAIN (*sc
))
7962 if (ptr
&& sc
== prev_list_p
)
7964 else if (TREE_CODE (OMP_CLAUSE_DECL (*sc
))
7966 && (TREE_CODE (OMP_CLAUSE_DECL (*sc
))
7968 && (TREE_CODE (OMP_CLAUSE_DECL (*sc
))
7974 HOST_WIDE_INT bitsize2
, bitpos2
;
7975 base
= OMP_CLAUSE_DECL (*sc
);
7976 if (TREE_CODE (base
) == ARRAY_REF
)
7978 while (TREE_CODE (base
) == ARRAY_REF
)
7979 base
= TREE_OPERAND (base
, 0);
7980 if (TREE_CODE (base
) != COMPONENT_REF
7981 || (TREE_CODE (TREE_TYPE (base
))
7985 else if (TREE_CODE (base
) == INDIRECT_REF
7986 && (TREE_CODE (TREE_OPERAND (base
, 0))
7988 && (TREE_CODE (TREE_TYPE
7989 (TREE_OPERAND (base
, 0)))
7991 base
= TREE_OPERAND (base
, 0);
7992 base
= get_inner_reference (base
, &bitsize2
,
7995 &reversep
, &volatilep
);
7996 if ((TREE_CODE (base
) == INDIRECT_REF
7997 || (TREE_CODE (base
) == MEM_REF
7998 && integer_zerop (TREE_OPERAND (base
,
8000 && DECL_P (TREE_OPERAND (base
, 0))
8001 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (base
,
8004 base
= TREE_OPERAND (base
, 0);
8009 gcc_assert (offset
== NULL_TREE
8010 || TREE_CODE (offset
) == INTEGER_CST
);
8011 tree d1
= OMP_CLAUSE_DECL (*sc
);
8012 tree d2
= OMP_CLAUSE_DECL (c
);
8013 while (TREE_CODE (d1
) == ARRAY_REF
)
8014 d1
= TREE_OPERAND (d1
, 0);
8015 while (TREE_CODE (d2
) == ARRAY_REF
)
8016 d2
= TREE_OPERAND (d2
, 0);
8017 if (TREE_CODE (d1
) == INDIRECT_REF
)
8018 d1
= TREE_OPERAND (d1
, 0);
8019 if (TREE_CODE (d2
) == INDIRECT_REF
)
8020 d2
= TREE_OPERAND (d2
, 0);
8021 while (TREE_CODE (d1
) == COMPONENT_REF
)
8022 if (TREE_CODE (d2
) == COMPONENT_REF
8023 && TREE_OPERAND (d1
, 1)
8024 == TREE_OPERAND (d2
, 1))
8026 d1
= TREE_OPERAND (d1
, 0);
8027 d2
= TREE_OPERAND (d2
, 0);
8033 error_at (OMP_CLAUSE_LOCATION (c
),
8034 "%qE appears more than once in map "
8035 "clauses", OMP_CLAUSE_DECL (c
));
8040 o2
= wi::to_offset (offset2
);
8044 o2
= o2
+ bitpos2
/ BITS_PER_UNIT
;
8045 if (wi::ltu_p (o1
, o2
)
8046 || (wi::eq_p (o1
, o2
) && bitpos
< bitpos2
))
8056 OMP_CLAUSE_SIZE (*osc
)
8057 = size_binop (PLUS_EXPR
, OMP_CLAUSE_SIZE (*osc
),
8061 tree c2
= build_omp_clause (OMP_CLAUSE_LOCATION (c
),
8063 tree cl
= NULL_TREE
;
8064 enum gomp_map_kind mkind
8065 = code
== OMP_TARGET_EXIT_DATA
8066 ? GOMP_MAP_RELEASE
: GOMP_MAP_ALLOC
;
8067 OMP_CLAUSE_SET_MAP_KIND (c2
, mkind
);
8068 OMP_CLAUSE_DECL (c2
)
8069 = unshare_expr (OMP_CLAUSE_DECL (c
));
8070 OMP_CLAUSE_CHAIN (c2
) = scp
? *scp
: *prev_list_p
;
8071 OMP_CLAUSE_SIZE (c2
)
8072 = TYPE_SIZE_UNIT (ptr_type_node
);
8073 cl
= scp
? *prev_list_p
: c2
;
8074 if (OMP_CLAUSE_CHAIN (*prev_list_p
) != c
)
8076 tree c4
= OMP_CLAUSE_CHAIN (*prev_list_p
);
8078 = build_omp_clause (OMP_CLAUSE_LOCATION (c
),
8080 OMP_CLAUSE_SET_MAP_KIND (c3
, mkind
);
8081 OMP_CLAUSE_DECL (c3
)
8082 = unshare_expr (OMP_CLAUSE_DECL (c4
));
8083 OMP_CLAUSE_SIZE (c3
)
8084 = TYPE_SIZE_UNIT (ptr_type_node
);
8085 OMP_CLAUSE_CHAIN (c3
) = *prev_list_p
;
8087 OMP_CLAUSE_CHAIN (c2
) = c3
;
8093 if (sc
== prev_list_p
)
8100 *prev_list_p
= OMP_CLAUSE_CHAIN (c
);
8101 list_p
= prev_list_p
;
8103 OMP_CLAUSE_CHAIN (c
) = *sc
;
8110 *list_p
= OMP_CLAUSE_CHAIN (c
);
8111 OMP_CLAUSE_CHAIN (c
) = *sc
;
8118 && OMP_CLAUSE_MAP_KIND (c
) != GOMP_MAP_ALWAYS_POINTER
8119 && OMP_CLAUSE_CHAIN (c
)
8120 && OMP_CLAUSE_CODE (OMP_CLAUSE_CHAIN (c
)) == OMP_CLAUSE_MAP
8121 && (OMP_CLAUSE_MAP_KIND (OMP_CLAUSE_CHAIN (c
))
8122 == GOMP_MAP_ALWAYS_POINTER
))
8123 prev_list_p
= list_p
;
8126 flags
= GOVD_MAP
| GOVD_EXPLICIT
;
8127 if (OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_ALWAYS_TO
8128 || OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_ALWAYS_TOFROM
)
8129 flags
|= GOVD_MAP_ALWAYS_TO
;
8132 case OMP_CLAUSE_DEPEND
:
8133 if (OMP_CLAUSE_DEPEND_KIND (c
) == OMP_CLAUSE_DEPEND_SINK
)
8135 tree deps
= OMP_CLAUSE_DECL (c
);
8136 while (deps
&& TREE_CODE (deps
) == TREE_LIST
)
8138 if (TREE_CODE (TREE_PURPOSE (deps
)) == TRUNC_DIV_EXPR
8139 && DECL_P (TREE_OPERAND (TREE_PURPOSE (deps
), 1)))
8140 gimplify_expr (&TREE_OPERAND (TREE_PURPOSE (deps
), 1),
8141 pre_p
, NULL
, is_gimple_val
, fb_rvalue
);
8142 deps
= TREE_CHAIN (deps
);
8146 else if (OMP_CLAUSE_DEPEND_KIND (c
) == OMP_CLAUSE_DEPEND_SOURCE
)
8148 if (TREE_CODE (OMP_CLAUSE_DECL (c
)) == COMPOUND_EXPR
)
8150 gimplify_expr (&TREE_OPERAND (OMP_CLAUSE_DECL (c
), 0), pre_p
,
8151 NULL
, is_gimple_val
, fb_rvalue
);
8152 OMP_CLAUSE_DECL (c
) = TREE_OPERAND (OMP_CLAUSE_DECL (c
), 1);
8154 if (error_operand_p (OMP_CLAUSE_DECL (c
)))
8159 OMP_CLAUSE_DECL (c
) = build_fold_addr_expr (OMP_CLAUSE_DECL (c
));
8160 if (gimplify_expr (&OMP_CLAUSE_DECL (c
), pre_p
, NULL
,
8161 is_gimple_val
, fb_rvalue
) == GS_ERROR
)
8169 case OMP_CLAUSE_FROM
:
8170 case OMP_CLAUSE__CACHE_
:
8171 decl
= OMP_CLAUSE_DECL (c
);
8172 if (error_operand_p (decl
))
8177 if (OMP_CLAUSE_SIZE (c
) == NULL_TREE
)
8178 OMP_CLAUSE_SIZE (c
) = DECL_P (decl
) ? DECL_SIZE_UNIT (decl
)
8179 : TYPE_SIZE_UNIT (TREE_TYPE (decl
));
8180 if (gimplify_expr (&OMP_CLAUSE_SIZE (c
), pre_p
,
8181 NULL
, is_gimple_val
, fb_rvalue
) == GS_ERROR
)
8188 if (gimplify_expr (&OMP_CLAUSE_DECL (c
), pre_p
,
8189 NULL
, is_gimple_lvalue
, fb_lvalue
)
8199 case OMP_CLAUSE_USE_DEVICE_PTR
:
8200 flags
= GOVD_FIRSTPRIVATE
| GOVD_EXPLICIT
;
8202 case OMP_CLAUSE_IS_DEVICE_PTR
:
8203 flags
= GOVD_FIRSTPRIVATE
| GOVD_EXPLICIT
;
8207 decl
= OMP_CLAUSE_DECL (c
);
8209 if (error_operand_p (decl
))
8214 if (DECL_NAME (decl
) == NULL_TREE
&& (flags
& GOVD_SHARED
) == 0)
8216 tree t
= omp_member_access_dummy_var (decl
);
8219 tree v
= DECL_VALUE_EXPR (decl
);
8220 DECL_NAME (decl
) = DECL_NAME (TREE_OPERAND (v
, 1));
8222 omp_notice_variable (outer_ctx
, t
, true);
8225 if (code
== OACC_DATA
8226 && OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
8227 && OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_FIRSTPRIVATE_POINTER
)
8228 flags
|= GOVD_MAP_0LEN_ARRAY
;
8229 omp_add_variable (ctx
, decl
, flags
);
8230 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_REDUCTION
8231 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
))
8233 omp_add_variable (ctx
, OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
),
8234 GOVD_LOCAL
| GOVD_SEEN
);
8235 if (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c
)
8236 && walk_tree (&OMP_CLAUSE_REDUCTION_INIT (c
),
8238 OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c
),
8240 omp_add_variable (ctx
,
8241 OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c
),
8242 GOVD_LOCAL
| GOVD_SEEN
);
8243 gimplify_omp_ctxp
= ctx
;
8244 push_gimplify_context ();
8246 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
) = NULL
;
8247 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
) = NULL
;
8249 gimplify_and_add (OMP_CLAUSE_REDUCTION_INIT (c
),
8250 &OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
));
8251 pop_gimplify_context
8252 (gimple_seq_first_stmt (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
)));
8253 push_gimplify_context ();
8254 gimplify_and_add (OMP_CLAUSE_REDUCTION_MERGE (c
),
8255 &OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
));
8256 pop_gimplify_context
8257 (gimple_seq_first_stmt (OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
)));
8258 OMP_CLAUSE_REDUCTION_INIT (c
) = NULL_TREE
;
8259 OMP_CLAUSE_REDUCTION_MERGE (c
) = NULL_TREE
;
8261 gimplify_omp_ctxp
= outer_ctx
;
8263 else if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
8264 && OMP_CLAUSE_LASTPRIVATE_STMT (c
))
8266 gimplify_omp_ctxp
= ctx
;
8267 push_gimplify_context ();
8268 if (TREE_CODE (OMP_CLAUSE_LASTPRIVATE_STMT (c
)) != BIND_EXPR
)
8270 tree bind
= build3 (BIND_EXPR
, void_type_node
, NULL
,
8272 TREE_SIDE_EFFECTS (bind
) = 1;
8273 BIND_EXPR_BODY (bind
) = OMP_CLAUSE_LASTPRIVATE_STMT (c
);
8274 OMP_CLAUSE_LASTPRIVATE_STMT (c
) = bind
;
8276 gimplify_and_add (OMP_CLAUSE_LASTPRIVATE_STMT (c
),
8277 &OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c
));
8278 pop_gimplify_context
8279 (gimple_seq_first_stmt (OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c
)));
8280 OMP_CLAUSE_LASTPRIVATE_STMT (c
) = NULL_TREE
;
8282 gimplify_omp_ctxp
= outer_ctx
;
8284 else if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LINEAR
8285 && OMP_CLAUSE_LINEAR_STMT (c
))
8287 gimplify_omp_ctxp
= ctx
;
8288 push_gimplify_context ();
8289 if (TREE_CODE (OMP_CLAUSE_LINEAR_STMT (c
)) != BIND_EXPR
)
8291 tree bind
= build3 (BIND_EXPR
, void_type_node
, NULL
,
8293 TREE_SIDE_EFFECTS (bind
) = 1;
8294 BIND_EXPR_BODY (bind
) = OMP_CLAUSE_LINEAR_STMT (c
);
8295 OMP_CLAUSE_LINEAR_STMT (c
) = bind
;
8297 gimplify_and_add (OMP_CLAUSE_LINEAR_STMT (c
),
8298 &OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c
));
8299 pop_gimplify_context
8300 (gimple_seq_first_stmt (OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c
)));
8301 OMP_CLAUSE_LINEAR_STMT (c
) = NULL_TREE
;
8303 gimplify_omp_ctxp
= outer_ctx
;
8309 case OMP_CLAUSE_COPYIN
:
8310 case OMP_CLAUSE_COPYPRIVATE
:
8311 decl
= OMP_CLAUSE_DECL (c
);
8312 if (error_operand_p (decl
))
8317 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_COPYPRIVATE
8319 && !omp_check_private (ctx
, decl
, true))
8322 if (is_global_var (decl
))
8324 if (DECL_THREAD_LOCAL_P (decl
))
8326 else if (DECL_HAS_VALUE_EXPR_P (decl
))
8328 tree value
= get_base_address (DECL_VALUE_EXPR (decl
));
8332 && DECL_THREAD_LOCAL_P (value
))
8337 error_at (OMP_CLAUSE_LOCATION (c
),
8338 "copyprivate variable %qE is not threadprivate"
8339 " or private in outer context", DECL_NAME (decl
));
8343 omp_notice_variable (outer_ctx
, decl
, true);
8344 if (check_non_private
8345 && region_type
== ORT_WORKSHARE
8346 && (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_REDUCTION
8347 || decl
== OMP_CLAUSE_DECL (c
)
8348 || (TREE_CODE (OMP_CLAUSE_DECL (c
)) == MEM_REF
8349 && (TREE_CODE (TREE_OPERAND (OMP_CLAUSE_DECL (c
), 0))
8351 || (TREE_CODE (TREE_OPERAND (OMP_CLAUSE_DECL (c
), 0))
8352 == POINTER_PLUS_EXPR
8353 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND
8354 (OMP_CLAUSE_DECL (c
), 0), 0))
8356 && omp_check_private (ctx
, decl
, false))
8358 error ("%s variable %qE is private in outer context",
8359 check_non_private
, DECL_NAME (decl
));
8365 if (OMP_CLAUSE_IF_MODIFIER (c
) != ERROR_MARK
8366 && OMP_CLAUSE_IF_MODIFIER (c
) != code
)
8369 for (int i
= 0; i
< 2; i
++)
8370 switch (i
? OMP_CLAUSE_IF_MODIFIER (c
) : code
)
8372 case OMP_PARALLEL
: p
[i
] = "parallel"; break;
8373 case OMP_TASK
: p
[i
] = "task"; break;
8374 case OMP_TASKLOOP
: p
[i
] = "taskloop"; break;
8375 case OMP_TARGET_DATA
: p
[i
] = "target data"; break;
8376 case OMP_TARGET
: p
[i
] = "target"; break;
8377 case OMP_TARGET_UPDATE
: p
[i
] = "target update"; break;
8378 case OMP_TARGET_ENTER_DATA
:
8379 p
[i
] = "target enter data"; break;
8380 case OMP_TARGET_EXIT_DATA
: p
[i
] = "target exit data"; break;
8381 default: gcc_unreachable ();
8383 error_at (OMP_CLAUSE_LOCATION (c
),
8384 "expected %qs %<if%> clause modifier rather than %qs",
8390 case OMP_CLAUSE_FINAL
:
8391 OMP_CLAUSE_OPERAND (c
, 0)
8392 = gimple_boolify (OMP_CLAUSE_OPERAND (c
, 0));
8395 case OMP_CLAUSE_SCHEDULE
:
8396 case OMP_CLAUSE_NUM_THREADS
:
8397 case OMP_CLAUSE_NUM_TEAMS
:
8398 case OMP_CLAUSE_THREAD_LIMIT
:
8399 case OMP_CLAUSE_DIST_SCHEDULE
:
8400 case OMP_CLAUSE_DEVICE
:
8401 case OMP_CLAUSE_PRIORITY
:
8402 case OMP_CLAUSE_GRAINSIZE
:
8403 case OMP_CLAUSE_NUM_TASKS
:
8404 case OMP_CLAUSE_HINT
:
8405 case OMP_CLAUSE__CILK_FOR_COUNT_
:
8406 case OMP_CLAUSE_ASYNC
:
8407 case OMP_CLAUSE_WAIT
:
8408 case OMP_CLAUSE_NUM_GANGS
:
8409 case OMP_CLAUSE_NUM_WORKERS
:
8410 case OMP_CLAUSE_VECTOR_LENGTH
:
8411 case OMP_CLAUSE_WORKER
:
8412 case OMP_CLAUSE_VECTOR
:
8413 if (gimplify_expr (&OMP_CLAUSE_OPERAND (c
, 0), pre_p
, NULL
,
8414 is_gimple_val
, fb_rvalue
) == GS_ERROR
)
8418 case OMP_CLAUSE_GANG
:
8419 if (gimplify_expr (&OMP_CLAUSE_OPERAND (c
, 0), pre_p
, NULL
,
8420 is_gimple_val
, fb_rvalue
) == GS_ERROR
)
8422 if (gimplify_expr (&OMP_CLAUSE_OPERAND (c
, 1), pre_p
, NULL
,
8423 is_gimple_val
, fb_rvalue
) == GS_ERROR
)
8427 case OMP_CLAUSE_NOWAIT
:
8428 case OMP_CLAUSE_ORDERED
:
8429 case OMP_CLAUSE_UNTIED
:
8430 case OMP_CLAUSE_COLLAPSE
:
8431 case OMP_CLAUSE_TILE
:
8432 case OMP_CLAUSE_AUTO
:
8433 case OMP_CLAUSE_SEQ
:
8434 case OMP_CLAUSE_INDEPENDENT
:
8435 case OMP_CLAUSE_MERGEABLE
:
8436 case OMP_CLAUSE_PROC_BIND
:
8437 case OMP_CLAUSE_SAFELEN
:
8438 case OMP_CLAUSE_SIMDLEN
:
8439 case OMP_CLAUSE_NOGROUP
:
8440 case OMP_CLAUSE_THREADS
:
8441 case OMP_CLAUSE_SIMD
:
8444 case OMP_CLAUSE_DEFAULTMAP
:
8445 ctx
->target_map_scalars_firstprivate
= false;
8448 case OMP_CLAUSE_ALIGNED
:
8449 decl
= OMP_CLAUSE_DECL (c
);
8450 if (error_operand_p (decl
))
8455 if (gimplify_expr (&OMP_CLAUSE_ALIGNED_ALIGNMENT (c
), pre_p
, NULL
,
8456 is_gimple_val
, fb_rvalue
) == GS_ERROR
)
8461 if (!is_global_var (decl
)
8462 && TREE_CODE (TREE_TYPE (decl
)) == POINTER_TYPE
)
8463 omp_add_variable (ctx
, decl
, GOVD_ALIGNED
);
8466 case OMP_CLAUSE_DEFAULT
:
8467 ctx
->default_kind
= OMP_CLAUSE_DEFAULT_KIND (c
);
8474 if (code
== OACC_DATA
8475 && OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
8476 && OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_FIRSTPRIVATE_POINTER
)
8479 *list_p
= OMP_CLAUSE_CHAIN (c
);
8481 list_p
= &OMP_CLAUSE_CHAIN (c
);
8484 gimplify_omp_ctxp
= ctx
;
8485 if (struct_map_to_clause
)
8486 delete struct_map_to_clause
;
8489 /* Return true if DECL is a candidate for shared to firstprivate
8490 optimization. We only consider non-addressable scalars, not
8491 too big, and not references. */
8494 omp_shared_to_firstprivate_optimizable_decl_p (tree decl
)
8496 if (TREE_ADDRESSABLE (decl
))
8498 tree type
= TREE_TYPE (decl
);
8499 if (!is_gimple_reg_type (type
)
8500 || TREE_CODE (type
) == REFERENCE_TYPE
8501 || TREE_ADDRESSABLE (type
))
8503 /* Don't optimize too large decls, as each thread/task will have
8505 HOST_WIDE_INT len
= int_size_in_bytes (type
);
8506 if (len
== -1 || len
> 4 * POINTER_SIZE
/ BITS_PER_UNIT
)
8508 if (lang_hooks
.decls
.omp_privatize_by_reference (decl
))
8513 /* Helper function of omp_find_stores_op and gimplify_adjust_omp_clauses*.
8514 For omp_shared_to_firstprivate_optimizable_decl_p decl mark it as
8515 GOVD_WRITTEN in outer contexts. */
8518 omp_mark_stores (struct gimplify_omp_ctx
*ctx
, tree decl
)
8520 for (; ctx
; ctx
= ctx
->outer_context
)
8522 splay_tree_node n
= splay_tree_lookup (ctx
->variables
,
8523 (splay_tree_key
) decl
);
8526 else if (n
->value
& GOVD_SHARED
)
8528 n
->value
|= GOVD_WRITTEN
;
8531 else if (n
->value
& GOVD_DATA_SHARE_CLASS
)
8536 /* Helper callback for walk_gimple_seq to discover possible stores
8537 to omp_shared_to_firstprivate_optimizable_decl_p decls and set
8538 GOVD_WRITTEN if they are GOVD_SHARED in some outer context
8542 omp_find_stores_op (tree
*tp
, int *walk_subtrees
, void *data
)
8544 struct walk_stmt_info
*wi
= (struct walk_stmt_info
*) data
;
8553 if (handled_component_p (op
))
8554 op
= TREE_OPERAND (op
, 0);
8555 else if ((TREE_CODE (op
) == MEM_REF
|| TREE_CODE (op
) == TARGET_MEM_REF
)
8556 && TREE_CODE (TREE_OPERAND (op
, 0)) == ADDR_EXPR
)
8557 op
= TREE_OPERAND (TREE_OPERAND (op
, 0), 0);
8562 if (!DECL_P (op
) || !omp_shared_to_firstprivate_optimizable_decl_p (op
))
8565 omp_mark_stores (gimplify_omp_ctxp
, op
);
8569 /* Helper callback for walk_gimple_seq to discover possible stores
8570 to omp_shared_to_firstprivate_optimizable_decl_p decls and set
8571 GOVD_WRITTEN if they are GOVD_SHARED in some outer context
8575 omp_find_stores_stmt (gimple_stmt_iterator
*gsi_p
,
8576 bool *handled_ops_p
,
8577 struct walk_stmt_info
*wi
)
8579 gimple
*stmt
= gsi_stmt (*gsi_p
);
8580 switch (gimple_code (stmt
))
8582 /* Don't recurse on OpenMP constructs for which
8583 gimplify_adjust_omp_clauses already handled the bodies,
8584 except handle gimple_omp_for_pre_body. */
8585 case GIMPLE_OMP_FOR
:
8586 *handled_ops_p
= true;
8587 if (gimple_omp_for_pre_body (stmt
))
8588 walk_gimple_seq (gimple_omp_for_pre_body (stmt
),
8589 omp_find_stores_stmt
, omp_find_stores_op
, wi
);
8591 case GIMPLE_OMP_PARALLEL
:
8592 case GIMPLE_OMP_TASK
:
8593 case GIMPLE_OMP_SECTIONS
:
8594 case GIMPLE_OMP_SINGLE
:
8595 case GIMPLE_OMP_TARGET
:
8596 case GIMPLE_OMP_TEAMS
:
8597 case GIMPLE_OMP_CRITICAL
:
8598 *handled_ops_p
= true;
8606 struct gimplify_adjust_omp_clauses_data
8612 /* For all variables that were not actually used within the context,
8613 remove PRIVATE, SHARED, and FIRSTPRIVATE clauses. */
8616 gimplify_adjust_omp_clauses_1 (splay_tree_node n
, void *data
)
8618 tree
*list_p
= ((struct gimplify_adjust_omp_clauses_data
*) data
)->list_p
;
8620 = ((struct gimplify_adjust_omp_clauses_data
*) data
)->pre_p
;
8621 tree decl
= (tree
) n
->key
;
8622 unsigned flags
= n
->value
;
8623 enum omp_clause_code code
;
8627 if (flags
& (GOVD_EXPLICIT
| GOVD_LOCAL
))
8629 if ((flags
& GOVD_SEEN
) == 0)
8631 if (flags
& GOVD_DEBUG_PRIVATE
)
8633 gcc_assert ((flags
& GOVD_DATA_SHARE_CLASS
) == GOVD_SHARED
);
8634 private_debug
= true;
8636 else if (flags
& GOVD_MAP
)
8637 private_debug
= false;
8640 = lang_hooks
.decls
.omp_private_debug_clause (decl
,
8641 !!(flags
& GOVD_SHARED
));
8643 code
= OMP_CLAUSE_PRIVATE
;
8644 else if (flags
& GOVD_MAP
)
8646 code
= OMP_CLAUSE_MAP
;
8647 if ((gimplify_omp_ctxp
->region_type
& ORT_ACC
) == 0
8648 && TYPE_ATOMIC (strip_array_types (TREE_TYPE (decl
))))
8650 error ("%<_Atomic%> %qD in implicit %<map%> clause", decl
);
8654 else if (flags
& GOVD_SHARED
)
8656 if (is_global_var (decl
))
8658 struct gimplify_omp_ctx
*ctx
= gimplify_omp_ctxp
->outer_context
;
8662 = splay_tree_lookup (ctx
->variables
, (splay_tree_key
) decl
);
8663 if (on
&& (on
->value
& (GOVD_FIRSTPRIVATE
| GOVD_LASTPRIVATE
8664 | GOVD_PRIVATE
| GOVD_REDUCTION
8665 | GOVD_LINEAR
| GOVD_MAP
)) != 0)
8667 ctx
= ctx
->outer_context
;
8672 code
= OMP_CLAUSE_SHARED
;
8674 else if (flags
& GOVD_PRIVATE
)
8675 code
= OMP_CLAUSE_PRIVATE
;
8676 else if (flags
& GOVD_FIRSTPRIVATE
)
8678 code
= OMP_CLAUSE_FIRSTPRIVATE
;
8679 if ((gimplify_omp_ctxp
->region_type
& ORT_TARGET
)
8680 && (gimplify_omp_ctxp
->region_type
& ORT_ACC
) == 0
8681 && TYPE_ATOMIC (strip_array_types (TREE_TYPE (decl
))))
8683 error ("%<_Atomic%> %qD in implicit %<firstprivate%> clause on "
8684 "%<target%> construct", decl
);
8688 else if (flags
& GOVD_LASTPRIVATE
)
8689 code
= OMP_CLAUSE_LASTPRIVATE
;
8690 else if (flags
& GOVD_ALIGNED
)
8695 if (((flags
& GOVD_LASTPRIVATE
)
8696 || (code
== OMP_CLAUSE_SHARED
&& (flags
& GOVD_WRITTEN
)))
8697 && omp_shared_to_firstprivate_optimizable_decl_p (decl
))
8698 omp_mark_stores (gimplify_omp_ctxp
->outer_context
, decl
);
8700 tree chain
= *list_p
;
8701 clause
= build_omp_clause (input_location
, code
);
8702 OMP_CLAUSE_DECL (clause
) = decl
;
8703 OMP_CLAUSE_CHAIN (clause
) = chain
;
8705 OMP_CLAUSE_PRIVATE_DEBUG (clause
) = 1;
8706 else if (code
== OMP_CLAUSE_PRIVATE
&& (flags
& GOVD_PRIVATE_OUTER_REF
))
8707 OMP_CLAUSE_PRIVATE_OUTER_REF (clause
) = 1;
8708 else if (code
== OMP_CLAUSE_SHARED
8709 && (flags
& GOVD_WRITTEN
) == 0
8710 && omp_shared_to_firstprivate_optimizable_decl_p (decl
))
8711 OMP_CLAUSE_SHARED_READONLY (clause
) = 1;
8712 else if (code
== OMP_CLAUSE_FIRSTPRIVATE
&& (flags
& GOVD_EXPLICIT
) == 0)
8713 OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (clause
) = 1;
8714 else if (code
== OMP_CLAUSE_MAP
&& (flags
& GOVD_MAP_0LEN_ARRAY
) != 0)
8716 tree nc
= build_omp_clause (input_location
, OMP_CLAUSE_MAP
);
8717 OMP_CLAUSE_DECL (nc
) = decl
;
8718 if (TREE_CODE (TREE_TYPE (decl
)) == REFERENCE_TYPE
8719 && TREE_CODE (TREE_TYPE (TREE_TYPE (decl
))) == POINTER_TYPE
)
8720 OMP_CLAUSE_DECL (clause
)
8721 = build_simple_mem_ref_loc (input_location
, decl
);
8722 OMP_CLAUSE_DECL (clause
)
8723 = build2 (MEM_REF
, char_type_node
, OMP_CLAUSE_DECL (clause
),
8724 build_int_cst (build_pointer_type (char_type_node
), 0));
8725 OMP_CLAUSE_SIZE (clause
) = size_zero_node
;
8726 OMP_CLAUSE_SIZE (nc
) = size_zero_node
;
8727 OMP_CLAUSE_SET_MAP_KIND (clause
, GOMP_MAP_ALLOC
);
8728 OMP_CLAUSE_MAP_MAYBE_ZERO_LENGTH_ARRAY_SECTION (clause
) = 1;
8729 OMP_CLAUSE_SET_MAP_KIND (nc
, GOMP_MAP_FIRSTPRIVATE_POINTER
);
8730 OMP_CLAUSE_CHAIN (nc
) = chain
;
8731 OMP_CLAUSE_CHAIN (clause
) = nc
;
8732 struct gimplify_omp_ctx
*ctx
= gimplify_omp_ctxp
;
8733 gimplify_omp_ctxp
= ctx
->outer_context
;
8734 gimplify_expr (&TREE_OPERAND (OMP_CLAUSE_DECL (clause
), 0),
8735 pre_p
, NULL
, is_gimple_val
, fb_rvalue
);
8736 gimplify_omp_ctxp
= ctx
;
8738 else if (code
== OMP_CLAUSE_MAP
)
8741 /* Not all combinations of these GOVD_MAP flags are actually valid. */
8742 switch (flags
& (GOVD_MAP_TO_ONLY
8744 | GOVD_MAP_FORCE_PRESENT
))
8747 kind
= GOMP_MAP_TOFROM
;
8749 case GOVD_MAP_FORCE
:
8750 kind
= GOMP_MAP_TOFROM
| GOMP_MAP_FLAG_FORCE
;
8752 case GOVD_MAP_TO_ONLY
:
8755 case GOVD_MAP_TO_ONLY
| GOVD_MAP_FORCE
:
8756 kind
= GOMP_MAP_TO
| GOMP_MAP_FLAG_FORCE
;
8758 case GOVD_MAP_FORCE_PRESENT
:
8759 kind
= GOMP_MAP_FORCE_PRESENT
;
8764 OMP_CLAUSE_SET_MAP_KIND (clause
, kind
);
8765 if (DECL_SIZE (decl
)
8766 && TREE_CODE (DECL_SIZE (decl
)) != INTEGER_CST
)
8768 tree decl2
= DECL_VALUE_EXPR (decl
);
8769 gcc_assert (TREE_CODE (decl2
) == INDIRECT_REF
);
8770 decl2
= TREE_OPERAND (decl2
, 0);
8771 gcc_assert (DECL_P (decl2
));
8772 tree mem
= build_simple_mem_ref (decl2
);
8773 OMP_CLAUSE_DECL (clause
) = mem
;
8774 OMP_CLAUSE_SIZE (clause
) = TYPE_SIZE_UNIT (TREE_TYPE (decl
));
8775 if (gimplify_omp_ctxp
->outer_context
)
8777 struct gimplify_omp_ctx
*ctx
= gimplify_omp_ctxp
->outer_context
;
8778 omp_notice_variable (ctx
, decl2
, true);
8779 omp_notice_variable (ctx
, OMP_CLAUSE_SIZE (clause
), true);
8781 tree nc
= build_omp_clause (OMP_CLAUSE_LOCATION (clause
),
8783 OMP_CLAUSE_DECL (nc
) = decl
;
8784 OMP_CLAUSE_SIZE (nc
) = size_zero_node
;
8785 if (gimplify_omp_ctxp
->target_firstprivatize_array_bases
)
8786 OMP_CLAUSE_SET_MAP_KIND (nc
, GOMP_MAP_FIRSTPRIVATE_POINTER
);
8788 OMP_CLAUSE_SET_MAP_KIND (nc
, GOMP_MAP_POINTER
);
8789 OMP_CLAUSE_CHAIN (nc
) = OMP_CLAUSE_CHAIN (clause
);
8790 OMP_CLAUSE_CHAIN (clause
) = nc
;
8792 else if (gimplify_omp_ctxp
->target_firstprivatize_array_bases
8793 && lang_hooks
.decls
.omp_privatize_by_reference (decl
))
8795 OMP_CLAUSE_DECL (clause
) = build_simple_mem_ref (decl
);
8796 OMP_CLAUSE_SIZE (clause
)
8797 = unshare_expr (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl
))));
8798 struct gimplify_omp_ctx
*ctx
= gimplify_omp_ctxp
;
8799 gimplify_omp_ctxp
= ctx
->outer_context
;
8800 gimplify_expr (&OMP_CLAUSE_SIZE (clause
),
8801 pre_p
, NULL
, is_gimple_val
, fb_rvalue
);
8802 gimplify_omp_ctxp
= ctx
;
8803 tree nc
= build_omp_clause (OMP_CLAUSE_LOCATION (clause
),
8805 OMP_CLAUSE_DECL (nc
) = decl
;
8806 OMP_CLAUSE_SIZE (nc
) = size_zero_node
;
8807 OMP_CLAUSE_SET_MAP_KIND (nc
, GOMP_MAP_FIRSTPRIVATE_REFERENCE
);
8808 OMP_CLAUSE_CHAIN (nc
) = OMP_CLAUSE_CHAIN (clause
);
8809 OMP_CLAUSE_CHAIN (clause
) = nc
;
8812 OMP_CLAUSE_SIZE (clause
) = DECL_SIZE_UNIT (decl
);
8814 if (code
== OMP_CLAUSE_FIRSTPRIVATE
&& (flags
& GOVD_LASTPRIVATE
) != 0)
8816 tree nc
= build_omp_clause (input_location
, OMP_CLAUSE_LASTPRIVATE
);
8817 OMP_CLAUSE_DECL (nc
) = decl
;
8818 OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (nc
) = 1;
8819 OMP_CLAUSE_CHAIN (nc
) = chain
;
8820 OMP_CLAUSE_CHAIN (clause
) = nc
;
8821 struct gimplify_omp_ctx
*ctx
= gimplify_omp_ctxp
;
8822 gimplify_omp_ctxp
= ctx
->outer_context
;
8823 lang_hooks
.decls
.omp_finish_clause (nc
, pre_p
);
8824 gimplify_omp_ctxp
= ctx
;
8827 struct gimplify_omp_ctx
*ctx
= gimplify_omp_ctxp
;
8828 gimplify_omp_ctxp
= ctx
->outer_context
;
8829 lang_hooks
.decls
.omp_finish_clause (clause
, pre_p
);
8830 if (gimplify_omp_ctxp
)
8831 for (; clause
!= chain
; clause
= OMP_CLAUSE_CHAIN (clause
))
8832 if (OMP_CLAUSE_CODE (clause
) == OMP_CLAUSE_MAP
8833 && DECL_P (OMP_CLAUSE_SIZE (clause
)))
8834 omp_notice_variable (gimplify_omp_ctxp
, OMP_CLAUSE_SIZE (clause
),
8836 gimplify_omp_ctxp
= ctx
;
8841 gimplify_adjust_omp_clauses (gimple_seq
*pre_p
, gimple_seq body
, tree
*list_p
,
8842 enum tree_code code
)
8844 struct gimplify_omp_ctx
*ctx
= gimplify_omp_ctxp
;
8849 struct gimplify_omp_ctx
*octx
;
8850 for (octx
= ctx
; octx
; octx
= octx
->outer_context
)
8851 if ((octx
->region_type
& (ORT_PARALLEL
| ORT_TASK
| ORT_TEAMS
)) != 0)
8855 struct walk_stmt_info wi
;
8856 memset (&wi
, 0, sizeof (wi
));
8857 walk_gimple_seq (body
, omp_find_stores_stmt
,
8858 omp_find_stores_op
, &wi
);
8861 while ((c
= *list_p
) != NULL
)
8864 bool remove
= false;
8866 switch (OMP_CLAUSE_CODE (c
))
8868 case OMP_CLAUSE_FIRSTPRIVATE
:
8869 if ((ctx
->region_type
& ORT_TARGET
)
8870 && (ctx
->region_type
& ORT_ACC
) == 0
8871 && TYPE_ATOMIC (strip_array_types
8872 (TREE_TYPE (OMP_CLAUSE_DECL (c
)))))
8874 error_at (OMP_CLAUSE_LOCATION (c
),
8875 "%<_Atomic%> %qD in %<firstprivate%> clause on "
8876 "%<target%> construct", OMP_CLAUSE_DECL (c
));
8881 case OMP_CLAUSE_PRIVATE
:
8882 case OMP_CLAUSE_SHARED
:
8883 case OMP_CLAUSE_LINEAR
:
8884 decl
= OMP_CLAUSE_DECL (c
);
8885 n
= splay_tree_lookup (ctx
->variables
, (splay_tree_key
) decl
);
8886 remove
= !(n
->value
& GOVD_SEEN
);
8889 bool shared
= OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_SHARED
;
8890 if ((n
->value
& GOVD_DEBUG_PRIVATE
)
8891 || lang_hooks
.decls
.omp_private_debug_clause (decl
, shared
))
8893 gcc_assert ((n
->value
& GOVD_DEBUG_PRIVATE
) == 0
8894 || ((n
->value
& GOVD_DATA_SHARE_CLASS
)
8896 OMP_CLAUSE_SET_CODE (c
, OMP_CLAUSE_PRIVATE
);
8897 OMP_CLAUSE_PRIVATE_DEBUG (c
) = 1;
8899 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_SHARED
8900 && (n
->value
& GOVD_WRITTEN
) == 0
8902 && omp_shared_to_firstprivate_optimizable_decl_p (decl
))
8903 OMP_CLAUSE_SHARED_READONLY (c
) = 1;
8904 else if (DECL_P (decl
)
8905 && ((OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_SHARED
8906 && (n
->value
& GOVD_WRITTEN
) != 1)
8907 || (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LINEAR
8908 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c
)))
8909 && omp_shared_to_firstprivate_optimizable_decl_p (decl
))
8910 omp_mark_stores (gimplify_omp_ctxp
->outer_context
, decl
);
8914 case OMP_CLAUSE_LASTPRIVATE
:
8915 /* Make sure OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE is set to
8916 accurately reflect the presence of a FIRSTPRIVATE clause. */
8917 decl
= OMP_CLAUSE_DECL (c
);
8918 n
= splay_tree_lookup (ctx
->variables
, (splay_tree_key
) decl
);
8919 OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c
)
8920 = (n
->value
& GOVD_FIRSTPRIVATE
) != 0;
8921 if (code
== OMP_DISTRIBUTE
8922 && OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c
))
8925 error_at (OMP_CLAUSE_LOCATION (c
),
8926 "same variable used in %<firstprivate%> and "
8927 "%<lastprivate%> clauses on %<distribute%> "
8931 && OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
8933 && omp_shared_to_firstprivate_optimizable_decl_p (decl
))
8934 omp_mark_stores (gimplify_omp_ctxp
->outer_context
, decl
);
8937 case OMP_CLAUSE_ALIGNED
:
8938 decl
= OMP_CLAUSE_DECL (c
);
8939 if (!is_global_var (decl
))
8941 n
= splay_tree_lookup (ctx
->variables
, (splay_tree_key
) decl
);
8942 remove
= n
== NULL
|| !(n
->value
& GOVD_SEEN
);
8943 if (!remove
&& TREE_CODE (TREE_TYPE (decl
)) == POINTER_TYPE
)
8945 struct gimplify_omp_ctx
*octx
;
8947 && (n
->value
& (GOVD_DATA_SHARE_CLASS
8948 & ~GOVD_FIRSTPRIVATE
)))
8951 for (octx
= ctx
->outer_context
; octx
;
8952 octx
= octx
->outer_context
)
8954 n
= splay_tree_lookup (octx
->variables
,
8955 (splay_tree_key
) decl
);
8958 if (n
->value
& GOVD_LOCAL
)
8960 /* We have to avoid assigning a shared variable
8961 to itself when trying to add
8962 __builtin_assume_aligned. */
8963 if (n
->value
& GOVD_SHARED
)
8971 else if (TREE_CODE (TREE_TYPE (decl
)) == ARRAY_TYPE
)
8973 n
= splay_tree_lookup (ctx
->variables
, (splay_tree_key
) decl
);
8974 if (n
!= NULL
&& (n
->value
& GOVD_DATA_SHARE_CLASS
) != 0)
8979 case OMP_CLAUSE_MAP
:
8980 if (code
== OMP_TARGET_EXIT_DATA
8981 && OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_ALWAYS_POINTER
)
8986 decl
= OMP_CLAUSE_DECL (c
);
8987 /* Data clauses associated with acc parallel reductions must be
8988 compatible with present_or_copy. Warn and adjust the clause
8989 if that is not the case. */
8990 if (ctx
->region_type
== ORT_ACC_PARALLEL
)
8992 tree t
= DECL_P (decl
) ? decl
: TREE_OPERAND (decl
, 0);
8996 n
= splay_tree_lookup (ctx
->variables
, (splay_tree_key
) t
);
8998 if (n
&& (n
->value
& GOVD_REDUCTION
))
9000 enum gomp_map_kind kind
= OMP_CLAUSE_MAP_KIND (c
);
9002 OMP_CLAUSE_MAP_IN_REDUCTION (c
) = 1;
9003 if ((kind
& GOMP_MAP_TOFROM
) != GOMP_MAP_TOFROM
9004 && kind
!= GOMP_MAP_FORCE_PRESENT
9005 && kind
!= GOMP_MAP_POINTER
)
9007 warning_at (OMP_CLAUSE_LOCATION (c
), 0,
9008 "incompatible data clause with reduction "
9009 "on %qE; promoting to present_or_copy",
9011 OMP_CLAUSE_SET_MAP_KIND (c
, GOMP_MAP_TOFROM
);
9017 if ((ctx
->region_type
& ORT_TARGET
) != 0
9018 && OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_FIRSTPRIVATE_POINTER
)
9020 if (TREE_CODE (decl
) == INDIRECT_REF
9021 && TREE_CODE (TREE_OPERAND (decl
, 0)) == COMPONENT_REF
9022 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (decl
, 0)))
9024 decl
= TREE_OPERAND (decl
, 0);
9025 if (TREE_CODE (decl
) == COMPONENT_REF
)
9027 while (TREE_CODE (decl
) == COMPONENT_REF
)
9028 decl
= TREE_OPERAND (decl
, 0);
9031 n
= splay_tree_lookup (ctx
->variables
,
9032 (splay_tree_key
) decl
);
9033 if (!(n
->value
& GOVD_SEEN
))
9040 n
= splay_tree_lookup (ctx
->variables
, (splay_tree_key
) decl
);
9041 if ((ctx
->region_type
& ORT_TARGET
) != 0
9042 && !(n
->value
& GOVD_SEEN
)
9043 && GOMP_MAP_ALWAYS_P (OMP_CLAUSE_MAP_KIND (c
)) == 0
9044 && (!is_global_var (decl
)
9045 || !lookup_attribute ("omp declare target link",
9046 DECL_ATTRIBUTES (decl
))))
9049 /* For struct element mapping, if struct is never referenced
9050 in target block and none of the mapping has always modifier,
9051 remove all the struct element mappings, which immediately
9052 follow the GOMP_MAP_STRUCT map clause. */
9053 if (OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_STRUCT
)
9055 HOST_WIDE_INT cnt
= tree_to_shwi (OMP_CLAUSE_SIZE (c
));
9057 OMP_CLAUSE_CHAIN (c
)
9058 = OMP_CLAUSE_CHAIN (OMP_CLAUSE_CHAIN (c
));
9061 else if (OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_STRUCT
9062 && code
== OMP_TARGET_EXIT_DATA
)
9064 else if (DECL_SIZE (decl
)
9065 && TREE_CODE (DECL_SIZE (decl
)) != INTEGER_CST
9066 && OMP_CLAUSE_MAP_KIND (c
) != GOMP_MAP_POINTER
9067 && OMP_CLAUSE_MAP_KIND (c
) != GOMP_MAP_FIRSTPRIVATE_POINTER
9068 && (OMP_CLAUSE_MAP_KIND (c
)
9069 != GOMP_MAP_FIRSTPRIVATE_REFERENCE
))
9071 /* For GOMP_MAP_FORCE_DEVICEPTR, we'll never enter here, because
9072 for these, TREE_CODE (DECL_SIZE (decl)) will always be
9074 gcc_assert (OMP_CLAUSE_MAP_KIND (c
) != GOMP_MAP_FORCE_DEVICEPTR
);
9076 tree decl2
= DECL_VALUE_EXPR (decl
);
9077 gcc_assert (TREE_CODE (decl2
) == INDIRECT_REF
);
9078 decl2
= TREE_OPERAND (decl2
, 0);
9079 gcc_assert (DECL_P (decl2
));
9080 tree mem
= build_simple_mem_ref (decl2
);
9081 OMP_CLAUSE_DECL (c
) = mem
;
9082 OMP_CLAUSE_SIZE (c
) = TYPE_SIZE_UNIT (TREE_TYPE (decl
));
9083 if (ctx
->outer_context
)
9085 omp_notice_variable (ctx
->outer_context
, decl2
, true);
9086 omp_notice_variable (ctx
->outer_context
,
9087 OMP_CLAUSE_SIZE (c
), true);
9089 if (((ctx
->region_type
& ORT_TARGET
) != 0
9090 || !ctx
->target_firstprivatize_array_bases
)
9091 && ((n
->value
& GOVD_SEEN
) == 0
9092 || (n
->value
& (GOVD_PRIVATE
| GOVD_FIRSTPRIVATE
)) == 0))
9094 tree nc
= build_omp_clause (OMP_CLAUSE_LOCATION (c
),
9096 OMP_CLAUSE_DECL (nc
) = decl
;
9097 OMP_CLAUSE_SIZE (nc
) = size_zero_node
;
9098 if (ctx
->target_firstprivatize_array_bases
)
9099 OMP_CLAUSE_SET_MAP_KIND (nc
,
9100 GOMP_MAP_FIRSTPRIVATE_POINTER
);
9102 OMP_CLAUSE_SET_MAP_KIND (nc
, GOMP_MAP_POINTER
);
9103 OMP_CLAUSE_CHAIN (nc
) = OMP_CLAUSE_CHAIN (c
);
9104 OMP_CLAUSE_CHAIN (c
) = nc
;
9110 if (OMP_CLAUSE_SIZE (c
) == NULL_TREE
)
9111 OMP_CLAUSE_SIZE (c
) = DECL_SIZE_UNIT (decl
);
9112 gcc_assert ((n
->value
& GOVD_SEEN
) == 0
9113 || ((n
->value
& (GOVD_PRIVATE
| GOVD_FIRSTPRIVATE
))
9119 case OMP_CLAUSE_FROM
:
9120 case OMP_CLAUSE__CACHE_
:
9121 decl
= OMP_CLAUSE_DECL (c
);
9124 if (DECL_SIZE (decl
)
9125 && TREE_CODE (DECL_SIZE (decl
)) != INTEGER_CST
)
9127 tree decl2
= DECL_VALUE_EXPR (decl
);
9128 gcc_assert (TREE_CODE (decl2
) == INDIRECT_REF
);
9129 decl2
= TREE_OPERAND (decl2
, 0);
9130 gcc_assert (DECL_P (decl2
));
9131 tree mem
= build_simple_mem_ref (decl2
);
9132 OMP_CLAUSE_DECL (c
) = mem
;
9133 OMP_CLAUSE_SIZE (c
) = TYPE_SIZE_UNIT (TREE_TYPE (decl
));
9134 if (ctx
->outer_context
)
9136 omp_notice_variable (ctx
->outer_context
, decl2
, true);
9137 omp_notice_variable (ctx
->outer_context
,
9138 OMP_CLAUSE_SIZE (c
), true);
9141 else if (OMP_CLAUSE_SIZE (c
) == NULL_TREE
)
9142 OMP_CLAUSE_SIZE (c
) = DECL_SIZE_UNIT (decl
);
9145 case OMP_CLAUSE_REDUCTION
:
9146 decl
= OMP_CLAUSE_DECL (c
);
9147 /* OpenACC reductions need a present_or_copy data clause.
9148 Add one if necessary. Error is the reduction is private. */
9149 if (ctx
->region_type
== ORT_ACC_PARALLEL
)
9151 n
= splay_tree_lookup (ctx
->variables
, (splay_tree_key
) decl
);
9152 if (n
->value
& (GOVD_PRIVATE
| GOVD_FIRSTPRIVATE
))
9153 error_at (OMP_CLAUSE_LOCATION (c
), "invalid private "
9154 "reduction on %qE", DECL_NAME (decl
));
9155 else if ((n
->value
& GOVD_MAP
) == 0)
9157 tree next
= OMP_CLAUSE_CHAIN (c
);
9158 tree nc
= build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE_MAP
);
9159 OMP_CLAUSE_SET_MAP_KIND (nc
, GOMP_MAP_TOFROM
);
9160 OMP_CLAUSE_DECL (nc
) = decl
;
9161 OMP_CLAUSE_CHAIN (c
) = nc
;
9162 lang_hooks
.decls
.omp_finish_clause (nc
, pre_p
);
9165 OMP_CLAUSE_MAP_IN_REDUCTION (nc
) = 1;
9166 if (OMP_CLAUSE_CHAIN (nc
) == NULL
)
9168 nc
= OMP_CLAUSE_CHAIN (nc
);
9170 OMP_CLAUSE_CHAIN (nc
) = next
;
9171 n
->value
|= GOVD_MAP
;
9175 && omp_shared_to_firstprivate_optimizable_decl_p (decl
))
9176 omp_mark_stores (gimplify_omp_ctxp
->outer_context
, decl
);
9178 case OMP_CLAUSE_COPYIN
:
9179 case OMP_CLAUSE_COPYPRIVATE
:
9181 case OMP_CLAUSE_NUM_THREADS
:
9182 case OMP_CLAUSE_NUM_TEAMS
:
9183 case OMP_CLAUSE_THREAD_LIMIT
:
9184 case OMP_CLAUSE_DIST_SCHEDULE
:
9185 case OMP_CLAUSE_DEVICE
:
9186 case OMP_CLAUSE_SCHEDULE
:
9187 case OMP_CLAUSE_NOWAIT
:
9188 case OMP_CLAUSE_ORDERED
:
9189 case OMP_CLAUSE_DEFAULT
:
9190 case OMP_CLAUSE_UNTIED
:
9191 case OMP_CLAUSE_COLLAPSE
:
9192 case OMP_CLAUSE_FINAL
:
9193 case OMP_CLAUSE_MERGEABLE
:
9194 case OMP_CLAUSE_PROC_BIND
:
9195 case OMP_CLAUSE_SAFELEN
:
9196 case OMP_CLAUSE_SIMDLEN
:
9197 case OMP_CLAUSE_DEPEND
:
9198 case OMP_CLAUSE_PRIORITY
:
9199 case OMP_CLAUSE_GRAINSIZE
:
9200 case OMP_CLAUSE_NUM_TASKS
:
9201 case OMP_CLAUSE_NOGROUP
:
9202 case OMP_CLAUSE_THREADS
:
9203 case OMP_CLAUSE_SIMD
:
9204 case OMP_CLAUSE_HINT
:
9205 case OMP_CLAUSE_DEFAULTMAP
:
9206 case OMP_CLAUSE_USE_DEVICE_PTR
:
9207 case OMP_CLAUSE_IS_DEVICE_PTR
:
9208 case OMP_CLAUSE__CILK_FOR_COUNT_
:
9209 case OMP_CLAUSE_ASYNC
:
9210 case OMP_CLAUSE_WAIT
:
9211 case OMP_CLAUSE_INDEPENDENT
:
9212 case OMP_CLAUSE_NUM_GANGS
:
9213 case OMP_CLAUSE_NUM_WORKERS
:
9214 case OMP_CLAUSE_VECTOR_LENGTH
:
9215 case OMP_CLAUSE_GANG
:
9216 case OMP_CLAUSE_WORKER
:
9217 case OMP_CLAUSE_VECTOR
:
9218 case OMP_CLAUSE_AUTO
:
9219 case OMP_CLAUSE_SEQ
:
9220 case OMP_CLAUSE_TILE
:
9228 *list_p
= OMP_CLAUSE_CHAIN (c
);
9230 list_p
= &OMP_CLAUSE_CHAIN (c
);
9233 /* Add in any implicit data sharing. */
9234 struct gimplify_adjust_omp_clauses_data data
;
9235 data
.list_p
= list_p
;
9237 splay_tree_foreach (ctx
->variables
, gimplify_adjust_omp_clauses_1
, &data
);
9239 gimplify_omp_ctxp
= ctx
->outer_context
;
9240 delete_omp_context (ctx
);
9243 /* Gimplify OACC_CACHE. */
9246 gimplify_oacc_cache (tree
*expr_p
, gimple_seq
*pre_p
)
9248 tree expr
= *expr_p
;
9250 gimplify_scan_omp_clauses (&OACC_CACHE_CLAUSES (expr
), pre_p
, ORT_ACC
,
9252 gimplify_adjust_omp_clauses (pre_p
, NULL
, &OACC_CACHE_CLAUSES (expr
),
9255 /* TODO: Do something sensible with this information. */
9257 *expr_p
= NULL_TREE
;
9260 /* Helper function of gimplify_oacc_declare. The helper's purpose is to,
9261 if required, translate 'kind' in CLAUSE into an 'entry' kind and 'exit'
9262 kind. The entry kind will replace the one in CLAUSE, while the exit
9263 kind will be used in a new omp_clause and returned to the caller. */
9266 gimplify_oacc_declare_1 (tree clause
)
9268 HOST_WIDE_INT kind
, new_op
;
9272 kind
= OMP_CLAUSE_MAP_KIND (clause
);
9276 case GOMP_MAP_ALLOC
:
9277 case GOMP_MAP_FORCE_ALLOC
:
9278 case GOMP_MAP_FORCE_TO
:
9279 new_op
= GOMP_MAP_DELETE
;
9283 case GOMP_MAP_FORCE_FROM
:
9284 OMP_CLAUSE_SET_MAP_KIND (clause
, GOMP_MAP_FORCE_ALLOC
);
9285 new_op
= GOMP_MAP_FORCE_FROM
;
9289 case GOMP_MAP_FORCE_TOFROM
:
9290 OMP_CLAUSE_SET_MAP_KIND (clause
, GOMP_MAP_FORCE_TO
);
9291 new_op
= GOMP_MAP_FORCE_FROM
;
9296 OMP_CLAUSE_SET_MAP_KIND (clause
, GOMP_MAP_FORCE_ALLOC
);
9297 new_op
= GOMP_MAP_FROM
;
9301 case GOMP_MAP_TOFROM
:
9302 OMP_CLAUSE_SET_MAP_KIND (clause
, GOMP_MAP_TO
);
9303 new_op
= GOMP_MAP_FROM
;
9307 case GOMP_MAP_DEVICE_RESIDENT
:
9308 case GOMP_MAP_FORCE_DEVICEPTR
:
9309 case GOMP_MAP_FORCE_PRESENT
:
9311 case GOMP_MAP_POINTER
:
9322 c
= build_omp_clause (OMP_CLAUSE_LOCATION (clause
), OMP_CLAUSE_MAP
);
9323 OMP_CLAUSE_SET_MAP_KIND (c
, new_op
);
9324 OMP_CLAUSE_DECL (c
) = OMP_CLAUSE_DECL (clause
);
9330 /* Gimplify OACC_DECLARE. */
9333 gimplify_oacc_declare (tree
*expr_p
, gimple_seq
*pre_p
)
9335 tree expr
= *expr_p
;
9337 tree clauses
, t
, decl
;
9339 clauses
= OACC_DECLARE_CLAUSES (expr
);
9341 gimplify_scan_omp_clauses (&clauses
, pre_p
, ORT_TARGET_DATA
, OACC_DECLARE
);
9342 gimplify_adjust_omp_clauses (pre_p
, NULL
, &clauses
, OACC_DECLARE
);
9344 for (t
= clauses
; t
; t
= OMP_CLAUSE_CHAIN (t
))
9346 decl
= OMP_CLAUSE_DECL (t
);
9348 if (TREE_CODE (decl
) == MEM_REF
)
9349 decl
= TREE_OPERAND (decl
, 0);
9351 if (VAR_P (decl
) && !is_oacc_declared (decl
))
9353 tree attr
= get_identifier ("oacc declare target");
9354 DECL_ATTRIBUTES (decl
) = tree_cons (attr
, NULL_TREE
,
9355 DECL_ATTRIBUTES (decl
));
9359 && !is_global_var (decl
)
9360 && DECL_CONTEXT (decl
) == current_function_decl
)
9362 tree c
= gimplify_oacc_declare_1 (t
);
9365 if (oacc_declare_returns
== NULL
)
9366 oacc_declare_returns
= new hash_map
<tree
, tree
>;
9368 oacc_declare_returns
->put (decl
, c
);
9372 if (gimplify_omp_ctxp
)
9373 omp_add_variable (gimplify_omp_ctxp
, decl
, GOVD_SEEN
);
9376 stmt
= gimple_build_omp_target (NULL
, GF_OMP_TARGET_KIND_OACC_DECLARE
,
9379 gimplify_seq_add_stmt (pre_p
, stmt
);
9381 *expr_p
= NULL_TREE
;
9384 /* Gimplify the contents of an OMP_PARALLEL statement. This involves
9385 gimplification of the body, as well as scanning the body for used
9386 variables. We need to do this scan now, because variable-sized
9387 decls will be decomposed during gimplification. */
9390 gimplify_omp_parallel (tree
*expr_p
, gimple_seq
*pre_p
)
9392 tree expr
= *expr_p
;
9394 gimple_seq body
= NULL
;
9396 gimplify_scan_omp_clauses (&OMP_PARALLEL_CLAUSES (expr
), pre_p
,
9397 OMP_PARALLEL_COMBINED (expr
)
9398 ? ORT_COMBINED_PARALLEL
9399 : ORT_PARALLEL
, OMP_PARALLEL
);
9401 push_gimplify_context ();
9403 g
= gimplify_and_return_first (OMP_PARALLEL_BODY (expr
), &body
);
9404 if (gimple_code (g
) == GIMPLE_BIND
)
9405 pop_gimplify_context (g
);
9407 pop_gimplify_context (NULL
);
9409 gimplify_adjust_omp_clauses (pre_p
, body
, &OMP_PARALLEL_CLAUSES (expr
),
9412 g
= gimple_build_omp_parallel (body
,
9413 OMP_PARALLEL_CLAUSES (expr
),
9414 NULL_TREE
, NULL_TREE
);
9415 if (OMP_PARALLEL_COMBINED (expr
))
9416 gimple_omp_set_subcode (g
, GF_OMP_PARALLEL_COMBINED
);
9417 gimplify_seq_add_stmt (pre_p
, g
);
9418 *expr_p
= NULL_TREE
;
9421 /* Gimplify the contents of an OMP_TASK statement. This involves
9422 gimplification of the body, as well as scanning the body for used
9423 variables. We need to do this scan now, because variable-sized
9424 decls will be decomposed during gimplification. */
9427 gimplify_omp_task (tree
*expr_p
, gimple_seq
*pre_p
)
9429 tree expr
= *expr_p
;
9431 gimple_seq body
= NULL
;
9433 gimplify_scan_omp_clauses (&OMP_TASK_CLAUSES (expr
), pre_p
,
9434 omp_find_clause (OMP_TASK_CLAUSES (expr
),
9436 ? ORT_UNTIED_TASK
: ORT_TASK
, OMP_TASK
);
9438 push_gimplify_context ();
9440 g
= gimplify_and_return_first (OMP_TASK_BODY (expr
), &body
);
9441 if (gimple_code (g
) == GIMPLE_BIND
)
9442 pop_gimplify_context (g
);
9444 pop_gimplify_context (NULL
);
9446 gimplify_adjust_omp_clauses (pre_p
, body
, &OMP_TASK_CLAUSES (expr
),
9449 g
= gimple_build_omp_task (body
,
9450 OMP_TASK_CLAUSES (expr
),
9451 NULL_TREE
, NULL_TREE
,
9452 NULL_TREE
, NULL_TREE
, NULL_TREE
);
9453 gimplify_seq_add_stmt (pre_p
, g
);
9454 *expr_p
= NULL_TREE
;
9457 /* Helper function of gimplify_omp_for, find OMP_FOR resp. OMP_SIMD
9458 with non-NULL OMP_FOR_INIT. */
9461 find_combined_omp_for (tree
*tp
, int *walk_subtrees
, void *)
9464 switch (TREE_CODE (*tp
))
9470 if (OMP_FOR_INIT (*tp
) != NULL_TREE
)
9474 case STATEMENT_LIST
:
9484 /* Gimplify the gross structure of an OMP_FOR statement. */
9486 static enum gimplify_status
9487 gimplify_omp_for (tree
*expr_p
, gimple_seq
*pre_p
)
9489 tree for_stmt
, orig_for_stmt
, inner_for_stmt
= NULL_TREE
, decl
, var
, t
;
9490 enum gimplify_status ret
= GS_ALL_DONE
;
9491 enum gimplify_status tret
;
9493 gimple_seq for_body
, for_pre_body
;
9495 bitmap has_decl_expr
= NULL
;
9496 enum omp_region_type ort
= ORT_WORKSHARE
;
9498 orig_for_stmt
= for_stmt
= *expr_p
;
9500 switch (TREE_CODE (for_stmt
))
9504 case OMP_DISTRIBUTE
:
9510 if (omp_find_clause (OMP_FOR_CLAUSES (for_stmt
), OMP_CLAUSE_UNTIED
))
9511 ort
= ORT_UNTIED_TASK
;
9523 /* Set OMP_CLAUSE_LINEAR_NO_COPYIN flag on explicit linear
9524 clause for the IV. */
9525 if (ort
== ORT_SIMD
&& TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt
)) == 1)
9527 t
= TREE_VEC_ELT (OMP_FOR_INIT (for_stmt
), 0);
9528 gcc_assert (TREE_CODE (t
) == MODIFY_EXPR
);
9529 decl
= TREE_OPERAND (t
, 0);
9530 for (tree c
= OMP_FOR_CLAUSES (for_stmt
); c
; c
= OMP_CLAUSE_CHAIN (c
))
9531 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LINEAR
9532 && OMP_CLAUSE_DECL (c
) == decl
)
9534 OMP_CLAUSE_LINEAR_NO_COPYIN (c
) = 1;
9539 if (OMP_FOR_INIT (for_stmt
) == NULL_TREE
)
9541 gcc_assert (TREE_CODE (for_stmt
) != OACC_LOOP
);
9542 inner_for_stmt
= walk_tree (&OMP_FOR_BODY (for_stmt
),
9543 find_combined_omp_for
, NULL
, NULL
);
9544 if (inner_for_stmt
== NULL_TREE
)
9546 gcc_assert (seen_error ());
9547 *expr_p
= NULL_TREE
;
9552 if (TREE_CODE (for_stmt
) != OMP_TASKLOOP
)
9553 gimplify_scan_omp_clauses (&OMP_FOR_CLAUSES (for_stmt
), pre_p
, ort
,
9554 TREE_CODE (for_stmt
));
9556 if (TREE_CODE (for_stmt
) == OMP_DISTRIBUTE
)
9557 gimplify_omp_ctxp
->distribute
= true;
9559 /* Handle OMP_FOR_INIT. */
9560 for_pre_body
= NULL
;
9561 if (ort
== ORT_SIMD
&& OMP_FOR_PRE_BODY (for_stmt
))
9563 has_decl_expr
= BITMAP_ALLOC (NULL
);
9564 if (TREE_CODE (OMP_FOR_PRE_BODY (for_stmt
)) == DECL_EXPR
9565 && TREE_CODE (DECL_EXPR_DECL (OMP_FOR_PRE_BODY (for_stmt
)))
9568 t
= OMP_FOR_PRE_BODY (for_stmt
);
9569 bitmap_set_bit (has_decl_expr
, DECL_UID (DECL_EXPR_DECL (t
)));
9571 else if (TREE_CODE (OMP_FOR_PRE_BODY (for_stmt
)) == STATEMENT_LIST
)
9573 tree_stmt_iterator si
;
9574 for (si
= tsi_start (OMP_FOR_PRE_BODY (for_stmt
)); !tsi_end_p (si
);
9578 if (TREE_CODE (t
) == DECL_EXPR
9579 && TREE_CODE (DECL_EXPR_DECL (t
)) == VAR_DECL
)
9580 bitmap_set_bit (has_decl_expr
, DECL_UID (DECL_EXPR_DECL (t
)));
9584 if (OMP_FOR_PRE_BODY (for_stmt
))
9586 if (TREE_CODE (for_stmt
) != OMP_TASKLOOP
|| gimplify_omp_ctxp
)
9587 gimplify_and_add (OMP_FOR_PRE_BODY (for_stmt
), &for_pre_body
);
9590 struct gimplify_omp_ctx ctx
;
9591 memset (&ctx
, 0, sizeof (ctx
));
9592 ctx
.region_type
= ORT_NONE
;
9593 gimplify_omp_ctxp
= &ctx
;
9594 gimplify_and_add (OMP_FOR_PRE_BODY (for_stmt
), &for_pre_body
);
9595 gimplify_omp_ctxp
= NULL
;
9598 OMP_FOR_PRE_BODY (for_stmt
) = NULL_TREE
;
9600 if (OMP_FOR_INIT (for_stmt
) == NULL_TREE
)
9601 for_stmt
= inner_for_stmt
;
9603 /* For taskloop, need to gimplify the start, end and step before the
9604 taskloop, outside of the taskloop omp context. */
9605 if (TREE_CODE (orig_for_stmt
) == OMP_TASKLOOP
)
9607 for (i
= 0; i
< TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt
)); i
++)
9609 t
= TREE_VEC_ELT (OMP_FOR_INIT (for_stmt
), i
);
9610 if (!is_gimple_constant (TREE_OPERAND (t
, 1)))
9613 = get_initialized_tmp_var (TREE_OPERAND (t
, 1),
9614 pre_p
, NULL
, false);
9615 tree c
= build_omp_clause (input_location
,
9616 OMP_CLAUSE_FIRSTPRIVATE
);
9617 OMP_CLAUSE_DECL (c
) = TREE_OPERAND (t
, 1);
9618 OMP_CLAUSE_CHAIN (c
) = OMP_FOR_CLAUSES (orig_for_stmt
);
9619 OMP_FOR_CLAUSES (orig_for_stmt
) = c
;
9622 /* Handle OMP_FOR_COND. */
9623 t
= TREE_VEC_ELT (OMP_FOR_COND (for_stmt
), i
);
9624 if (!is_gimple_constant (TREE_OPERAND (t
, 1)))
9627 = get_initialized_tmp_var (TREE_OPERAND (t
, 1),
9628 gimple_seq_empty_p (for_pre_body
)
9629 ? pre_p
: &for_pre_body
, NULL
,
9631 tree c
= build_omp_clause (input_location
,
9632 OMP_CLAUSE_FIRSTPRIVATE
);
9633 OMP_CLAUSE_DECL (c
) = TREE_OPERAND (t
, 1);
9634 OMP_CLAUSE_CHAIN (c
) = OMP_FOR_CLAUSES (orig_for_stmt
);
9635 OMP_FOR_CLAUSES (orig_for_stmt
) = c
;
9638 /* Handle OMP_FOR_INCR. */
9639 t
= TREE_VEC_ELT (OMP_FOR_INCR (for_stmt
), i
);
9640 if (TREE_CODE (t
) == MODIFY_EXPR
)
9642 decl
= TREE_OPERAND (t
, 0);
9643 t
= TREE_OPERAND (t
, 1);
9644 tree
*tp
= &TREE_OPERAND (t
, 1);
9645 if (TREE_CODE (t
) == PLUS_EXPR
&& *tp
== decl
)
9646 tp
= &TREE_OPERAND (t
, 0);
9648 if (!is_gimple_constant (*tp
))
9650 gimple_seq
*seq
= gimple_seq_empty_p (for_pre_body
)
9651 ? pre_p
: &for_pre_body
;
9652 *tp
= get_initialized_tmp_var (*tp
, seq
, NULL
, false);
9653 tree c
= build_omp_clause (input_location
,
9654 OMP_CLAUSE_FIRSTPRIVATE
);
9655 OMP_CLAUSE_DECL (c
) = *tp
;
9656 OMP_CLAUSE_CHAIN (c
) = OMP_FOR_CLAUSES (orig_for_stmt
);
9657 OMP_FOR_CLAUSES (orig_for_stmt
) = c
;
9662 gimplify_scan_omp_clauses (&OMP_FOR_CLAUSES (orig_for_stmt
), pre_p
, ort
,
9666 if (orig_for_stmt
!= for_stmt
)
9667 gimplify_omp_ctxp
->combined_loop
= true;
9670 gcc_assert (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt
))
9671 == TREE_VEC_LENGTH (OMP_FOR_COND (for_stmt
)));
9672 gcc_assert (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt
))
9673 == TREE_VEC_LENGTH (OMP_FOR_INCR (for_stmt
)));
9675 tree c
= omp_find_clause (OMP_FOR_CLAUSES (for_stmt
), OMP_CLAUSE_ORDERED
);
9676 bool is_doacross
= false;
9677 if (c
&& OMP_CLAUSE_ORDERED_EXPR (c
))
9680 gimplify_omp_ctxp
->loop_iter_var
.create (TREE_VEC_LENGTH
9681 (OMP_FOR_INIT (for_stmt
))
9684 int collapse
= 1, tile
= 0;
9685 c
= omp_find_clause (OMP_FOR_CLAUSES (for_stmt
), OMP_CLAUSE_COLLAPSE
);
9687 collapse
= tree_to_shwi (OMP_CLAUSE_COLLAPSE_EXPR (c
));
9688 c
= omp_find_clause (OMP_FOR_CLAUSES (for_stmt
), OMP_CLAUSE_TILE
);
9690 tile
= list_length (OMP_CLAUSE_TILE_LIST (c
));
9691 for (i
= 0; i
< TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt
)); i
++)
9693 t
= TREE_VEC_ELT (OMP_FOR_INIT (for_stmt
), i
);
9694 gcc_assert (TREE_CODE (t
) == MODIFY_EXPR
);
9695 decl
= TREE_OPERAND (t
, 0);
9696 gcc_assert (DECL_P (decl
));
9697 gcc_assert (INTEGRAL_TYPE_P (TREE_TYPE (decl
))
9698 || POINTER_TYPE_P (TREE_TYPE (decl
)));
9701 if (TREE_CODE (for_stmt
) == OMP_FOR
&& OMP_FOR_ORIG_DECLS (for_stmt
))
9702 gimplify_omp_ctxp
->loop_iter_var
.quick_push
9703 (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (for_stmt
), i
));
9705 gimplify_omp_ctxp
->loop_iter_var
.quick_push (decl
);
9706 gimplify_omp_ctxp
->loop_iter_var
.quick_push (decl
);
9709 /* Make sure the iteration variable is private. */
9711 tree c2
= NULL_TREE
;
9712 if (orig_for_stmt
!= for_stmt
)
9713 /* Do this only on innermost construct for combined ones. */;
9714 else if (ort
== ORT_SIMD
)
9716 splay_tree_node n
= splay_tree_lookup (gimplify_omp_ctxp
->variables
,
9717 (splay_tree_key
) decl
);
9718 omp_is_private (gimplify_omp_ctxp
, decl
,
9719 1 + (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt
))
9721 if (n
!= NULL
&& (n
->value
& GOVD_DATA_SHARE_CLASS
) != 0)
9722 omp_notice_variable (gimplify_omp_ctxp
, decl
, true);
9723 else if (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt
)) == 1)
9725 c
= build_omp_clause (input_location
, OMP_CLAUSE_LINEAR
);
9726 OMP_CLAUSE_LINEAR_NO_COPYIN (c
) = 1;
9727 unsigned int flags
= GOVD_LINEAR
| GOVD_EXPLICIT
| GOVD_SEEN
;
9729 && bitmap_bit_p (has_decl_expr
, DECL_UID (decl
)))
9731 OMP_CLAUSE_LINEAR_NO_COPYOUT (c
) = 1;
9732 flags
|= GOVD_LINEAR_LASTPRIVATE_NO_OUTER
;
9734 struct gimplify_omp_ctx
*outer
9735 = gimplify_omp_ctxp
->outer_context
;
9736 if (outer
&& !OMP_CLAUSE_LINEAR_NO_COPYOUT (c
))
9738 if (outer
->region_type
== ORT_WORKSHARE
9739 && outer
->combined_loop
)
9741 n
= splay_tree_lookup (outer
->variables
,
9742 (splay_tree_key
)decl
);
9743 if (n
!= NULL
&& (n
->value
& GOVD_LOCAL
) != 0)
9745 OMP_CLAUSE_LINEAR_NO_COPYOUT (c
) = 1;
9746 flags
|= GOVD_LINEAR_LASTPRIVATE_NO_OUTER
;
9750 struct gimplify_omp_ctx
*octx
= outer
->outer_context
;
9752 && octx
->region_type
== ORT_COMBINED_PARALLEL
9753 && octx
->outer_context
9754 && (octx
->outer_context
->region_type
9756 && octx
->outer_context
->combined_loop
)
9758 octx
= octx
->outer_context
;
9759 n
= splay_tree_lookup (octx
->variables
,
9760 (splay_tree_key
)decl
);
9761 if (n
!= NULL
&& (n
->value
& GOVD_LOCAL
) != 0)
9763 OMP_CLAUSE_LINEAR_NO_COPYOUT (c
) = 1;
9764 flags
|= GOVD_LINEAR_LASTPRIVATE_NO_OUTER
;
9771 OMP_CLAUSE_DECL (c
) = decl
;
9772 OMP_CLAUSE_CHAIN (c
) = OMP_FOR_CLAUSES (for_stmt
);
9773 OMP_FOR_CLAUSES (for_stmt
) = c
;
9774 omp_add_variable (gimplify_omp_ctxp
, decl
, flags
);
9775 if (outer
&& !OMP_CLAUSE_LINEAR_NO_COPYOUT (c
))
9777 if (outer
->region_type
== ORT_WORKSHARE
9778 && outer
->combined_loop
)
9780 if (outer
->outer_context
9781 && (outer
->outer_context
->region_type
9782 == ORT_COMBINED_PARALLEL
))
9783 outer
= outer
->outer_context
;
9784 else if (omp_check_private (outer
, decl
, false))
9787 else if (((outer
->region_type
& ORT_TASK
) != 0)
9788 && outer
->combined_loop
9789 && !omp_check_private (gimplify_omp_ctxp
,
9792 else if (outer
->region_type
!= ORT_COMBINED_PARALLEL
)
9794 omp_notice_variable (outer
, decl
, true);
9799 n
= splay_tree_lookup (outer
->variables
,
9800 (splay_tree_key
)decl
);
9801 if (n
== NULL
|| (n
->value
& GOVD_DATA_SHARE_CLASS
) == 0)
9803 omp_add_variable (outer
, decl
,
9804 GOVD_LASTPRIVATE
| GOVD_SEEN
);
9805 if (outer
->region_type
== ORT_COMBINED_PARALLEL
9806 && outer
->outer_context
9807 && (outer
->outer_context
->region_type
9809 && outer
->outer_context
->combined_loop
)
9811 outer
= outer
->outer_context
;
9812 n
= splay_tree_lookup (outer
->variables
,
9813 (splay_tree_key
)decl
);
9814 if (omp_check_private (outer
, decl
, false))
9817 || ((n
->value
& GOVD_DATA_SHARE_CLASS
)
9819 omp_add_variable (outer
, decl
,
9825 if (outer
&& outer
->outer_context
9826 && (outer
->outer_context
->region_type
9827 == ORT_COMBINED_TEAMS
))
9829 outer
= outer
->outer_context
;
9830 n
= splay_tree_lookup (outer
->variables
,
9831 (splay_tree_key
)decl
);
9833 || (n
->value
& GOVD_DATA_SHARE_CLASS
) == 0)
9834 omp_add_variable (outer
, decl
,
9835 GOVD_SHARED
| GOVD_SEEN
);
9839 if (outer
&& outer
->outer_context
)
9840 omp_notice_variable (outer
->outer_context
, decl
,
9850 || !bitmap_bit_p (has_decl_expr
, DECL_UID (decl
)));
9851 struct gimplify_omp_ctx
*outer
9852 = gimplify_omp_ctxp
->outer_context
;
9853 if (outer
&& lastprivate
)
9855 if (outer
->region_type
== ORT_WORKSHARE
9856 && outer
->combined_loop
)
9858 n
= splay_tree_lookup (outer
->variables
,
9859 (splay_tree_key
)decl
);
9860 if (n
!= NULL
&& (n
->value
& GOVD_LOCAL
) != 0)
9862 lastprivate
= false;
9865 else if (outer
->outer_context
9866 && (outer
->outer_context
->region_type
9867 == ORT_COMBINED_PARALLEL
))
9868 outer
= outer
->outer_context
;
9869 else if (omp_check_private (outer
, decl
, false))
9872 else if (((outer
->region_type
& ORT_TASK
) != 0)
9873 && outer
->combined_loop
9874 && !omp_check_private (gimplify_omp_ctxp
,
9877 else if (outer
->region_type
!= ORT_COMBINED_PARALLEL
)
9879 omp_notice_variable (outer
, decl
, true);
9884 n
= splay_tree_lookup (outer
->variables
,
9885 (splay_tree_key
)decl
);
9886 if (n
== NULL
|| (n
->value
& GOVD_DATA_SHARE_CLASS
) == 0)
9888 omp_add_variable (outer
, decl
,
9889 GOVD_LASTPRIVATE
| GOVD_SEEN
);
9890 if (outer
->region_type
== ORT_COMBINED_PARALLEL
9891 && outer
->outer_context
9892 && (outer
->outer_context
->region_type
9894 && outer
->outer_context
->combined_loop
)
9896 outer
= outer
->outer_context
;
9897 n
= splay_tree_lookup (outer
->variables
,
9898 (splay_tree_key
)decl
);
9899 if (omp_check_private (outer
, decl
, false))
9902 || ((n
->value
& GOVD_DATA_SHARE_CLASS
)
9904 omp_add_variable (outer
, decl
,
9910 if (outer
&& outer
->outer_context
9911 && (outer
->outer_context
->region_type
9912 == ORT_COMBINED_TEAMS
))
9914 outer
= outer
->outer_context
;
9915 n
= splay_tree_lookup (outer
->variables
,
9916 (splay_tree_key
)decl
);
9918 || (n
->value
& GOVD_DATA_SHARE_CLASS
) == 0)
9919 omp_add_variable (outer
, decl
,
9920 GOVD_SHARED
| GOVD_SEEN
);
9924 if (outer
&& outer
->outer_context
)
9925 omp_notice_variable (outer
->outer_context
, decl
,
9931 c
= build_omp_clause (input_location
,
9932 lastprivate
? OMP_CLAUSE_LASTPRIVATE
9933 : OMP_CLAUSE_PRIVATE
);
9934 OMP_CLAUSE_DECL (c
) = decl
;
9935 OMP_CLAUSE_CHAIN (c
) = OMP_FOR_CLAUSES (for_stmt
);
9936 OMP_FOR_CLAUSES (for_stmt
) = c
;
9937 omp_add_variable (gimplify_omp_ctxp
, decl
,
9938 (lastprivate
? GOVD_LASTPRIVATE
: GOVD_PRIVATE
)
9939 | GOVD_EXPLICIT
| GOVD_SEEN
);
9943 else if (omp_is_private (gimplify_omp_ctxp
, decl
, 0))
9944 omp_notice_variable (gimplify_omp_ctxp
, decl
, true);
9946 omp_add_variable (gimplify_omp_ctxp
, decl
, GOVD_PRIVATE
| GOVD_SEEN
);
9948 /* If DECL is not a gimple register, create a temporary variable to act
9949 as an iteration counter. This is valid, since DECL cannot be
9950 modified in the body of the loop. Similarly for any iteration vars
9951 in simd with collapse > 1 where the iterator vars must be
9953 if (orig_for_stmt
!= for_stmt
)
9955 else if (!is_gimple_reg (decl
)
9957 && TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt
)) > 1))
9959 struct gimplify_omp_ctx
*ctx
= gimplify_omp_ctxp
;
9960 /* Make sure omp_add_variable is not called on it prematurely.
9961 We call it ourselves a few lines later. */
9962 gimplify_omp_ctxp
= NULL
;
9963 var
= create_tmp_var (TREE_TYPE (decl
), get_name (decl
));
9964 gimplify_omp_ctxp
= ctx
;
9965 TREE_OPERAND (t
, 0) = var
;
9967 gimplify_seq_add_stmt (&for_body
, gimple_build_assign (decl
, var
));
9970 && TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt
)) == 1)
9972 c2
= build_omp_clause (input_location
, OMP_CLAUSE_LINEAR
);
9973 OMP_CLAUSE_LINEAR_NO_COPYIN (c2
) = 1;
9974 OMP_CLAUSE_LINEAR_NO_COPYOUT (c2
) = 1;
9975 OMP_CLAUSE_DECL (c2
) = var
;
9976 OMP_CLAUSE_CHAIN (c2
) = OMP_FOR_CLAUSES (for_stmt
);
9977 OMP_FOR_CLAUSES (for_stmt
) = c2
;
9978 omp_add_variable (gimplify_omp_ctxp
, var
,
9979 GOVD_LINEAR
| GOVD_EXPLICIT
| GOVD_SEEN
);
9987 omp_add_variable (gimplify_omp_ctxp
, var
,
9988 GOVD_PRIVATE
| GOVD_SEEN
);
9993 tret
= gimplify_expr (&TREE_OPERAND (t
, 1), &for_pre_body
, NULL
,
9994 is_gimple_val
, fb_rvalue
, false);
9995 ret
= MIN (ret
, tret
);
9996 if (ret
== GS_ERROR
)
9999 /* Handle OMP_FOR_COND. */
10000 t
= TREE_VEC_ELT (OMP_FOR_COND (for_stmt
), i
);
10001 gcc_assert (COMPARISON_CLASS_P (t
));
10002 gcc_assert (TREE_OPERAND (t
, 0) == decl
);
10004 tret
= gimplify_expr (&TREE_OPERAND (t
, 1), &for_pre_body
, NULL
,
10005 is_gimple_val
, fb_rvalue
, false);
10006 ret
= MIN (ret
, tret
);
10008 /* Handle OMP_FOR_INCR. */
10009 t
= TREE_VEC_ELT (OMP_FOR_INCR (for_stmt
), i
);
10010 switch (TREE_CODE (t
))
10012 case PREINCREMENT_EXPR
:
10013 case POSTINCREMENT_EXPR
:
10015 tree decl
= TREE_OPERAND (t
, 0);
10016 /* c_omp_for_incr_canonicalize_ptr() should have been
10017 called to massage things appropriately. */
10018 gcc_assert (!POINTER_TYPE_P (TREE_TYPE (decl
)));
10020 if (orig_for_stmt
!= for_stmt
)
10022 t
= build_int_cst (TREE_TYPE (decl
), 1);
10024 OMP_CLAUSE_LINEAR_STEP (c
) = t
;
10025 t
= build2 (PLUS_EXPR
, TREE_TYPE (decl
), var
, t
);
10026 t
= build2 (MODIFY_EXPR
, TREE_TYPE (var
), var
, t
);
10027 TREE_VEC_ELT (OMP_FOR_INCR (for_stmt
), i
) = t
;
10031 case PREDECREMENT_EXPR
:
10032 case POSTDECREMENT_EXPR
:
10033 /* c_omp_for_incr_canonicalize_ptr() should have been
10034 called to massage things appropriately. */
10035 gcc_assert (!POINTER_TYPE_P (TREE_TYPE (decl
)));
10036 if (orig_for_stmt
!= for_stmt
)
10038 t
= build_int_cst (TREE_TYPE (decl
), -1);
10040 OMP_CLAUSE_LINEAR_STEP (c
) = t
;
10041 t
= build2 (PLUS_EXPR
, TREE_TYPE (decl
), var
, t
);
10042 t
= build2 (MODIFY_EXPR
, TREE_TYPE (var
), var
, t
);
10043 TREE_VEC_ELT (OMP_FOR_INCR (for_stmt
), i
) = t
;
10047 gcc_assert (TREE_OPERAND (t
, 0) == decl
);
10048 TREE_OPERAND (t
, 0) = var
;
10050 t
= TREE_OPERAND (t
, 1);
10051 switch (TREE_CODE (t
))
10054 if (TREE_OPERAND (t
, 1) == decl
)
10056 TREE_OPERAND (t
, 1) = TREE_OPERAND (t
, 0);
10057 TREE_OPERAND (t
, 0) = var
;
10063 case POINTER_PLUS_EXPR
:
10064 gcc_assert (TREE_OPERAND (t
, 0) == decl
);
10065 TREE_OPERAND (t
, 0) = var
;
10068 gcc_unreachable ();
10071 tret
= gimplify_expr (&TREE_OPERAND (t
, 1), &for_pre_body
, NULL
,
10072 is_gimple_val
, fb_rvalue
, false);
10073 ret
= MIN (ret
, tret
);
10076 tree step
= TREE_OPERAND (t
, 1);
10077 tree stept
= TREE_TYPE (decl
);
10078 if (POINTER_TYPE_P (stept
))
10080 step
= fold_convert (stept
, step
);
10081 if (TREE_CODE (t
) == MINUS_EXPR
)
10082 step
= fold_build1 (NEGATE_EXPR
, stept
, step
);
10083 OMP_CLAUSE_LINEAR_STEP (c
) = step
;
10084 if (step
!= TREE_OPERAND (t
, 1))
10086 tret
= gimplify_expr (&OMP_CLAUSE_LINEAR_STEP (c
),
10087 &for_pre_body
, NULL
,
10088 is_gimple_val
, fb_rvalue
, false);
10089 ret
= MIN (ret
, tret
);
10095 gcc_unreachable ();
10101 OMP_CLAUSE_LINEAR_STEP (c2
) = OMP_CLAUSE_LINEAR_STEP (c
);
10104 if ((var
!= decl
|| collapse
> 1 || tile
) && orig_for_stmt
== for_stmt
)
10106 for (c
= OMP_FOR_CLAUSES (for_stmt
); c
; c
= OMP_CLAUSE_CHAIN (c
))
10107 if (((OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
10108 && OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c
) == NULL
)
10109 || (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LINEAR
10110 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c
)
10111 && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c
) == NULL
))
10112 && OMP_CLAUSE_DECL (c
) == decl
)
10114 if (is_doacross
&& (collapse
== 1 || i
>= collapse
))
10118 t
= TREE_VEC_ELT (OMP_FOR_INCR (for_stmt
), i
);
10119 gcc_assert (TREE_CODE (t
) == MODIFY_EXPR
);
10120 gcc_assert (TREE_OPERAND (t
, 0) == var
);
10121 t
= TREE_OPERAND (t
, 1);
10122 gcc_assert (TREE_CODE (t
) == PLUS_EXPR
10123 || TREE_CODE (t
) == MINUS_EXPR
10124 || TREE_CODE (t
) == POINTER_PLUS_EXPR
);
10125 gcc_assert (TREE_OPERAND (t
, 0) == var
);
10126 t
= build2 (TREE_CODE (t
), TREE_TYPE (decl
),
10127 is_doacross
? var
: decl
,
10128 TREE_OPERAND (t
, 1));
10131 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
)
10132 seq
= &OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c
);
10134 seq
= &OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c
);
10135 gimplify_assign (decl
, t
, seq
);
10140 BITMAP_FREE (has_decl_expr
);
10142 if (TREE_CODE (orig_for_stmt
) == OMP_TASKLOOP
)
10144 push_gimplify_context ();
10145 if (TREE_CODE (OMP_FOR_BODY (orig_for_stmt
)) != BIND_EXPR
)
10147 OMP_FOR_BODY (orig_for_stmt
)
10148 = build3 (BIND_EXPR
, void_type_node
, NULL
,
10149 OMP_FOR_BODY (orig_for_stmt
), NULL
);
10150 TREE_SIDE_EFFECTS (OMP_FOR_BODY (orig_for_stmt
)) = 1;
10154 gimple
*g
= gimplify_and_return_first (OMP_FOR_BODY (orig_for_stmt
),
10157 if (TREE_CODE (orig_for_stmt
) == OMP_TASKLOOP
)
10159 if (gimple_code (g
) == GIMPLE_BIND
)
10160 pop_gimplify_context (g
);
10162 pop_gimplify_context (NULL
);
10165 if (orig_for_stmt
!= for_stmt
)
10166 for (i
= 0; i
< TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt
)); i
++)
10168 t
= TREE_VEC_ELT (OMP_FOR_INIT (for_stmt
), i
);
10169 decl
= TREE_OPERAND (t
, 0);
10170 struct gimplify_omp_ctx
*ctx
= gimplify_omp_ctxp
;
10171 if (TREE_CODE (orig_for_stmt
) == OMP_TASKLOOP
)
10172 gimplify_omp_ctxp
= ctx
->outer_context
;
10173 var
= create_tmp_var (TREE_TYPE (decl
), get_name (decl
));
10174 gimplify_omp_ctxp
= ctx
;
10175 omp_add_variable (gimplify_omp_ctxp
, var
, GOVD_PRIVATE
| GOVD_SEEN
);
10176 TREE_OPERAND (t
, 0) = var
;
10177 t
= TREE_VEC_ELT (OMP_FOR_INCR (for_stmt
), i
);
10178 TREE_OPERAND (t
, 1) = copy_node (TREE_OPERAND (t
, 1));
10179 TREE_OPERAND (TREE_OPERAND (t
, 1), 0) = var
;
10182 gimplify_adjust_omp_clauses (pre_p
, for_body
,
10183 &OMP_FOR_CLAUSES (orig_for_stmt
),
10184 TREE_CODE (orig_for_stmt
));
10187 switch (TREE_CODE (orig_for_stmt
))
10189 case OMP_FOR
: kind
= GF_OMP_FOR_KIND_FOR
; break;
10190 case OMP_SIMD
: kind
= GF_OMP_FOR_KIND_SIMD
; break;
10191 case CILK_SIMD
: kind
= GF_OMP_FOR_KIND_CILKSIMD
; break;
10192 case CILK_FOR
: kind
= GF_OMP_FOR_KIND_CILKFOR
; break;
10193 case OMP_DISTRIBUTE
: kind
= GF_OMP_FOR_KIND_DISTRIBUTE
; break;
10194 case OMP_TASKLOOP
: kind
= GF_OMP_FOR_KIND_TASKLOOP
; break;
10195 case OACC_LOOP
: kind
= GF_OMP_FOR_KIND_OACC_LOOP
; break;
10197 gcc_unreachable ();
10199 gfor
= gimple_build_omp_for (for_body
, kind
, OMP_FOR_CLAUSES (orig_for_stmt
),
10200 TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt
)),
10202 if (orig_for_stmt
!= for_stmt
)
10203 gimple_omp_for_set_combined_p (gfor
, true);
10204 if (gimplify_omp_ctxp
10205 && (gimplify_omp_ctxp
->combined_loop
10206 || (gimplify_omp_ctxp
->region_type
== ORT_COMBINED_PARALLEL
10207 && gimplify_omp_ctxp
->outer_context
10208 && gimplify_omp_ctxp
->outer_context
->combined_loop
)))
10210 gimple_omp_for_set_combined_into_p (gfor
, true);
10211 if (gimplify_omp_ctxp
->combined_loop
)
10212 gcc_assert (TREE_CODE (orig_for_stmt
) == OMP_SIMD
);
10214 gcc_assert (TREE_CODE (orig_for_stmt
) == OMP_FOR
);
10217 for (i
= 0; i
< TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt
)); i
++)
10219 t
= TREE_VEC_ELT (OMP_FOR_INIT (for_stmt
), i
);
10220 gimple_omp_for_set_index (gfor
, i
, TREE_OPERAND (t
, 0));
10221 gimple_omp_for_set_initial (gfor
, i
, TREE_OPERAND (t
, 1));
10222 t
= TREE_VEC_ELT (OMP_FOR_COND (for_stmt
), i
);
10223 gimple_omp_for_set_cond (gfor
, i
, TREE_CODE (t
));
10224 gimple_omp_for_set_final (gfor
, i
, TREE_OPERAND (t
, 1));
10225 t
= TREE_VEC_ELT (OMP_FOR_INCR (for_stmt
), i
);
10226 gimple_omp_for_set_incr (gfor
, i
, TREE_OPERAND (t
, 1));
10229 /* OMP_TASKLOOP is gimplified as two GIMPLE_OMP_FOR taskloop
10230 constructs with GIMPLE_OMP_TASK sandwiched in between them.
10231 The outer taskloop stands for computing the number of iterations,
10232 counts for collapsed loops and holding taskloop specific clauses.
10233 The task construct stands for the effect of data sharing on the
10234 explicit task it creates and the inner taskloop stands for expansion
10235 of the static loop inside of the explicit task construct. */
10236 if (TREE_CODE (orig_for_stmt
) == OMP_TASKLOOP
)
10238 tree
*gfor_clauses_ptr
= gimple_omp_for_clauses_ptr (gfor
);
10239 tree task_clauses
= NULL_TREE
;
10240 tree c
= *gfor_clauses_ptr
;
10241 tree
*gtask_clauses_ptr
= &task_clauses
;
10242 tree outer_for_clauses
= NULL_TREE
;
10243 tree
*gforo_clauses_ptr
= &outer_for_clauses
;
10244 for (; c
; c
= OMP_CLAUSE_CHAIN (c
))
10245 switch (OMP_CLAUSE_CODE (c
))
10247 /* These clauses are allowed on task, move them there. */
10248 case OMP_CLAUSE_SHARED
:
10249 case OMP_CLAUSE_FIRSTPRIVATE
:
10250 case OMP_CLAUSE_DEFAULT
:
10251 case OMP_CLAUSE_IF
:
10252 case OMP_CLAUSE_UNTIED
:
10253 case OMP_CLAUSE_FINAL
:
10254 case OMP_CLAUSE_MERGEABLE
:
10255 case OMP_CLAUSE_PRIORITY
:
10256 *gtask_clauses_ptr
= c
;
10257 gtask_clauses_ptr
= &OMP_CLAUSE_CHAIN (c
);
10259 case OMP_CLAUSE_PRIVATE
:
10260 if (OMP_CLAUSE_PRIVATE_TASKLOOP_IV (c
))
10262 /* We want private on outer for and firstprivate
10265 = build_omp_clause (OMP_CLAUSE_LOCATION (c
),
10266 OMP_CLAUSE_FIRSTPRIVATE
);
10267 OMP_CLAUSE_DECL (*gtask_clauses_ptr
) = OMP_CLAUSE_DECL (c
);
10268 lang_hooks
.decls
.omp_finish_clause (*gtask_clauses_ptr
, NULL
);
10269 gtask_clauses_ptr
= &OMP_CLAUSE_CHAIN (*gtask_clauses_ptr
);
10270 *gforo_clauses_ptr
= c
;
10271 gforo_clauses_ptr
= &OMP_CLAUSE_CHAIN (c
);
10275 *gtask_clauses_ptr
= c
;
10276 gtask_clauses_ptr
= &OMP_CLAUSE_CHAIN (c
);
10279 /* These clauses go into outer taskloop clauses. */
10280 case OMP_CLAUSE_GRAINSIZE
:
10281 case OMP_CLAUSE_NUM_TASKS
:
10282 case OMP_CLAUSE_NOGROUP
:
10283 *gforo_clauses_ptr
= c
;
10284 gforo_clauses_ptr
= &OMP_CLAUSE_CHAIN (c
);
10286 /* Taskloop clause we duplicate on both taskloops. */
10287 case OMP_CLAUSE_COLLAPSE
:
10288 *gfor_clauses_ptr
= c
;
10289 gfor_clauses_ptr
= &OMP_CLAUSE_CHAIN (c
);
10290 *gforo_clauses_ptr
= copy_node (c
);
10291 gforo_clauses_ptr
= &OMP_CLAUSE_CHAIN (*gforo_clauses_ptr
);
10293 /* For lastprivate, keep the clause on inner taskloop, and add
10294 a shared clause on task. If the same decl is also firstprivate,
10295 add also firstprivate clause on the inner taskloop. */
10296 case OMP_CLAUSE_LASTPRIVATE
:
10297 if (OMP_CLAUSE_LASTPRIVATE_TASKLOOP_IV (c
))
10299 /* For taskloop C++ lastprivate IVs, we want:
10300 1) private on outer taskloop
10301 2) firstprivate and shared on task
10302 3) lastprivate on inner taskloop */
10304 = build_omp_clause (OMP_CLAUSE_LOCATION (c
),
10305 OMP_CLAUSE_FIRSTPRIVATE
);
10306 OMP_CLAUSE_DECL (*gtask_clauses_ptr
) = OMP_CLAUSE_DECL (c
);
10307 lang_hooks
.decls
.omp_finish_clause (*gtask_clauses_ptr
, NULL
);
10308 gtask_clauses_ptr
= &OMP_CLAUSE_CHAIN (*gtask_clauses_ptr
);
10309 OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c
) = 1;
10310 *gforo_clauses_ptr
= build_omp_clause (OMP_CLAUSE_LOCATION (c
),
10311 OMP_CLAUSE_PRIVATE
);
10312 OMP_CLAUSE_DECL (*gforo_clauses_ptr
) = OMP_CLAUSE_DECL (c
);
10313 OMP_CLAUSE_PRIVATE_TASKLOOP_IV (*gforo_clauses_ptr
) = 1;
10314 TREE_TYPE (*gforo_clauses_ptr
) = TREE_TYPE (c
);
10315 gforo_clauses_ptr
= &OMP_CLAUSE_CHAIN (*gforo_clauses_ptr
);
10317 *gfor_clauses_ptr
= c
;
10318 gfor_clauses_ptr
= &OMP_CLAUSE_CHAIN (c
);
10320 = build_omp_clause (OMP_CLAUSE_LOCATION (c
), OMP_CLAUSE_SHARED
);
10321 OMP_CLAUSE_DECL (*gtask_clauses_ptr
) = OMP_CLAUSE_DECL (c
);
10322 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c
))
10323 OMP_CLAUSE_SHARED_FIRSTPRIVATE (*gtask_clauses_ptr
) = 1;
10325 = &OMP_CLAUSE_CHAIN (*gtask_clauses_ptr
);
10328 gcc_unreachable ();
10330 *gfor_clauses_ptr
= NULL_TREE
;
10331 *gtask_clauses_ptr
= NULL_TREE
;
10332 *gforo_clauses_ptr
= NULL_TREE
;
10333 g
= gimple_build_bind (NULL_TREE
, gfor
, NULL_TREE
);
10334 g
= gimple_build_omp_task (g
, task_clauses
, NULL_TREE
, NULL_TREE
,
10335 NULL_TREE
, NULL_TREE
, NULL_TREE
);
10336 gimple_omp_task_set_taskloop_p (g
, true);
10337 g
= gimple_build_bind (NULL_TREE
, g
, NULL_TREE
);
10339 = gimple_build_omp_for (g
, GF_OMP_FOR_KIND_TASKLOOP
, outer_for_clauses
,
10340 gimple_omp_for_collapse (gfor
),
10341 gimple_omp_for_pre_body (gfor
));
10342 gimple_omp_for_set_pre_body (gfor
, NULL
);
10343 gimple_omp_for_set_combined_p (gforo
, true);
10344 gimple_omp_for_set_combined_into_p (gfor
, true);
10345 for (i
= 0; i
< (int) gimple_omp_for_collapse (gfor
); i
++)
10347 tree type
= TREE_TYPE (gimple_omp_for_index (gfor
, i
));
10348 tree v
= create_tmp_var (type
);
10349 gimple_omp_for_set_index (gforo
, i
, v
);
10350 t
= unshare_expr (gimple_omp_for_initial (gfor
, i
));
10351 gimple_omp_for_set_initial (gforo
, i
, t
);
10352 gimple_omp_for_set_cond (gforo
, i
,
10353 gimple_omp_for_cond (gfor
, i
));
10354 t
= unshare_expr (gimple_omp_for_final (gfor
, i
));
10355 gimple_omp_for_set_final (gforo
, i
, t
);
10356 t
= unshare_expr (gimple_omp_for_incr (gfor
, i
));
10357 gcc_assert (TREE_OPERAND (t
, 0) == gimple_omp_for_index (gfor
, i
));
10358 TREE_OPERAND (t
, 0) = v
;
10359 gimple_omp_for_set_incr (gforo
, i
, t
);
10360 t
= build_omp_clause (input_location
, OMP_CLAUSE_PRIVATE
);
10361 OMP_CLAUSE_DECL (t
) = v
;
10362 OMP_CLAUSE_CHAIN (t
) = gimple_omp_for_clauses (gforo
);
10363 gimple_omp_for_set_clauses (gforo
, t
);
10365 gimplify_seq_add_stmt (pre_p
, gforo
);
10368 gimplify_seq_add_stmt (pre_p
, gfor
);
10369 if (ret
!= GS_ALL_DONE
)
10371 *expr_p
= NULL_TREE
;
10372 return GS_ALL_DONE
;
10375 /* Helper function of optimize_target_teams, find OMP_TEAMS inside
10376 of OMP_TARGET's body. */
10379 find_omp_teams (tree
*tp
, int *walk_subtrees
, void *)
10381 *walk_subtrees
= 0;
10382 switch (TREE_CODE (*tp
))
10387 case STATEMENT_LIST
:
10388 *walk_subtrees
= 1;
10396 /* Helper function of optimize_target_teams, determine if the expression
10397 can be computed safely before the target construct on the host. */
10400 computable_teams_clause (tree
*tp
, int *walk_subtrees
, void *)
10406 *walk_subtrees
= 0;
10409 switch (TREE_CODE (*tp
))
10414 *walk_subtrees
= 0;
10415 if (error_operand_p (*tp
)
10416 || !INTEGRAL_TYPE_P (TREE_TYPE (*tp
))
10417 || DECL_HAS_VALUE_EXPR_P (*tp
)
10418 || DECL_THREAD_LOCAL_P (*tp
)
10419 || TREE_SIDE_EFFECTS (*tp
)
10420 || TREE_THIS_VOLATILE (*tp
))
10422 if (is_global_var (*tp
)
10423 && (lookup_attribute ("omp declare target", DECL_ATTRIBUTES (*tp
))
10424 || lookup_attribute ("omp declare target link",
10425 DECL_ATTRIBUTES (*tp
))))
10428 && !DECL_SEEN_IN_BIND_EXPR_P (*tp
)
10429 && !is_global_var (*tp
)
10430 && decl_function_context (*tp
) == current_function_decl
)
10432 n
= splay_tree_lookup (gimplify_omp_ctxp
->variables
,
10433 (splay_tree_key
) *tp
);
10436 if (gimplify_omp_ctxp
->target_map_scalars_firstprivate
)
10440 else if (n
->value
& GOVD_LOCAL
)
10442 else if (n
->value
& GOVD_FIRSTPRIVATE
)
10444 else if ((n
->value
& (GOVD_MAP
| GOVD_MAP_ALWAYS_TO
))
10445 == (GOVD_MAP
| GOVD_MAP_ALWAYS_TO
))
10449 if (!INTEGRAL_TYPE_P (TREE_TYPE (*tp
)))
10453 if (TARGET_EXPR_INITIAL (*tp
)
10454 || TREE_CODE (TARGET_EXPR_SLOT (*tp
)) != VAR_DECL
)
10456 return computable_teams_clause (&TARGET_EXPR_SLOT (*tp
),
10457 walk_subtrees
, NULL
);
10458 /* Allow some reasonable subset of integral arithmetics. */
10462 case TRUNC_DIV_EXPR
:
10463 case CEIL_DIV_EXPR
:
10464 case FLOOR_DIV_EXPR
:
10465 case ROUND_DIV_EXPR
:
10466 case TRUNC_MOD_EXPR
:
10467 case CEIL_MOD_EXPR
:
10468 case FLOOR_MOD_EXPR
:
10469 case ROUND_MOD_EXPR
:
10471 case EXACT_DIV_EXPR
:
10482 case NON_LVALUE_EXPR
:
10484 if (!INTEGRAL_TYPE_P (TREE_TYPE (*tp
)))
10487 /* And disallow anything else, except for comparisons. */
10489 if (COMPARISON_CLASS_P (*tp
))
10495 /* Try to determine if the num_teams and/or thread_limit expressions
10496 can have their values determined already before entering the
10498 INTEGER_CSTs trivially are,
10499 integral decls that are firstprivate (explicitly or implicitly)
10500 or explicitly map(always, to:) or map(always, tofrom:) on the target
10501 region too, and expressions involving simple arithmetics on those
10502 too, function calls are not ok, dereferencing something neither etc.
10503 Add NUM_TEAMS and THREAD_LIMIT clauses to the OMP_CLAUSES of
10504 EXPR based on what we find:
10505 0 stands for clause not specified at all, use implementation default
10506 -1 stands for value that can't be determined easily before entering
10507 the target construct.
10508 If teams construct is not present at all, use 1 for num_teams
10509 and 0 for thread_limit (only one team is involved, and the thread
10510 limit is implementation defined. */
10513 optimize_target_teams (tree target
, gimple_seq
*pre_p
)
10515 tree body
= OMP_BODY (target
);
10516 tree teams
= walk_tree (&body
, find_omp_teams
, NULL
, NULL
);
10517 tree num_teams
= integer_zero_node
;
10518 tree thread_limit
= integer_zero_node
;
10519 location_t num_teams_loc
= EXPR_LOCATION (target
);
10520 location_t thread_limit_loc
= EXPR_LOCATION (target
);
10522 struct gimplify_omp_ctx
*target_ctx
= gimplify_omp_ctxp
;
10524 if (teams
== NULL_TREE
)
10525 num_teams
= integer_one_node
;
10527 for (c
= OMP_TEAMS_CLAUSES (teams
); c
; c
= OMP_CLAUSE_CHAIN (c
))
10529 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_NUM_TEAMS
)
10532 num_teams_loc
= OMP_CLAUSE_LOCATION (c
);
10534 else if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_THREAD_LIMIT
)
10537 thread_limit_loc
= OMP_CLAUSE_LOCATION (c
);
10541 expr
= OMP_CLAUSE_OPERAND (c
, 0);
10542 if (TREE_CODE (expr
) == INTEGER_CST
)
10547 if (walk_tree (&expr
, computable_teams_clause
, NULL
, NULL
))
10549 *p
= integer_minus_one_node
;
10553 gimplify_omp_ctxp
= gimplify_omp_ctxp
->outer_context
;
10554 if (gimplify_expr (p
, pre_p
, NULL
, is_gimple_val
, fb_rvalue
, false)
10557 gimplify_omp_ctxp
= target_ctx
;
10558 *p
= integer_minus_one_node
;
10561 gimplify_omp_ctxp
= target_ctx
;
10562 if (!DECL_P (expr
) && TREE_CODE (expr
) != TARGET_EXPR
)
10563 OMP_CLAUSE_OPERAND (c
, 0) = *p
;
10565 c
= build_omp_clause (thread_limit_loc
, OMP_CLAUSE_THREAD_LIMIT
);
10566 OMP_CLAUSE_THREAD_LIMIT_EXPR (c
) = thread_limit
;
10567 OMP_CLAUSE_CHAIN (c
) = OMP_TARGET_CLAUSES (target
);
10568 OMP_TARGET_CLAUSES (target
) = c
;
10569 c
= build_omp_clause (num_teams_loc
, OMP_CLAUSE_NUM_TEAMS
);
10570 OMP_CLAUSE_NUM_TEAMS_EXPR (c
) = num_teams
;
10571 OMP_CLAUSE_CHAIN (c
) = OMP_TARGET_CLAUSES (target
);
10572 OMP_TARGET_CLAUSES (target
) = c
;
10575 /* Gimplify the gross structure of several OMP constructs. */
10578 gimplify_omp_workshare (tree
*expr_p
, gimple_seq
*pre_p
)
10580 tree expr
= *expr_p
;
10582 gimple_seq body
= NULL
;
10583 enum omp_region_type ort
;
10585 switch (TREE_CODE (expr
))
10589 ort
= ORT_WORKSHARE
;
10592 ort
= OMP_TARGET_COMBINED (expr
) ? ORT_COMBINED_TARGET
: ORT_TARGET
;
10595 ort
= ORT_ACC_KERNELS
;
10597 case OACC_PARALLEL
:
10598 ort
= ORT_ACC_PARALLEL
;
10601 ort
= ORT_ACC_DATA
;
10603 case OMP_TARGET_DATA
:
10604 ort
= ORT_TARGET_DATA
;
10607 ort
= OMP_TEAMS_COMBINED (expr
) ? ORT_COMBINED_TEAMS
: ORT_TEAMS
;
10609 case OACC_HOST_DATA
:
10610 ort
= ORT_ACC_HOST_DATA
;
10613 gcc_unreachable ();
10615 gimplify_scan_omp_clauses (&OMP_CLAUSES (expr
), pre_p
, ort
,
10617 if (TREE_CODE (expr
) == OMP_TARGET
)
10618 optimize_target_teams (expr
, pre_p
);
10619 if ((ort
& (ORT_TARGET
| ORT_TARGET_DATA
)) != 0)
10621 push_gimplify_context ();
10622 gimple
*g
= gimplify_and_return_first (OMP_BODY (expr
), &body
);
10623 if (gimple_code (g
) == GIMPLE_BIND
)
10624 pop_gimplify_context (g
);
10626 pop_gimplify_context (NULL
);
10627 if ((ort
& ORT_TARGET_DATA
) != 0)
10629 enum built_in_function end_ix
;
10630 switch (TREE_CODE (expr
))
10633 case OACC_HOST_DATA
:
10634 end_ix
= BUILT_IN_GOACC_DATA_END
;
10636 case OMP_TARGET_DATA
:
10637 end_ix
= BUILT_IN_GOMP_TARGET_END_DATA
;
10640 gcc_unreachable ();
10642 tree fn
= builtin_decl_explicit (end_ix
);
10643 g
= gimple_build_call (fn
, 0);
10644 gimple_seq cleanup
= NULL
;
10645 gimple_seq_add_stmt (&cleanup
, g
);
10646 g
= gimple_build_try (body
, cleanup
, GIMPLE_TRY_FINALLY
);
10648 gimple_seq_add_stmt (&body
, g
);
10652 gimplify_and_add (OMP_BODY (expr
), &body
);
10653 gimplify_adjust_omp_clauses (pre_p
, body
, &OMP_CLAUSES (expr
),
10656 switch (TREE_CODE (expr
))
10659 stmt
= gimple_build_omp_target (body
, GF_OMP_TARGET_KIND_OACC_DATA
,
10660 OMP_CLAUSES (expr
));
10663 stmt
= gimple_build_omp_target (body
, GF_OMP_TARGET_KIND_OACC_KERNELS
,
10664 OMP_CLAUSES (expr
));
10666 case OACC_HOST_DATA
:
10667 stmt
= gimple_build_omp_target (body
, GF_OMP_TARGET_KIND_OACC_HOST_DATA
,
10668 OMP_CLAUSES (expr
));
10670 case OACC_PARALLEL
:
10671 stmt
= gimple_build_omp_target (body
, GF_OMP_TARGET_KIND_OACC_PARALLEL
,
10672 OMP_CLAUSES (expr
));
10675 stmt
= gimple_build_omp_sections (body
, OMP_CLAUSES (expr
));
10678 stmt
= gimple_build_omp_single (body
, OMP_CLAUSES (expr
));
10681 stmt
= gimple_build_omp_target (body
, GF_OMP_TARGET_KIND_REGION
,
10682 OMP_CLAUSES (expr
));
10684 case OMP_TARGET_DATA
:
10685 stmt
= gimple_build_omp_target (body
, GF_OMP_TARGET_KIND_DATA
,
10686 OMP_CLAUSES (expr
));
10689 stmt
= gimple_build_omp_teams (body
, OMP_CLAUSES (expr
));
10692 gcc_unreachable ();
10695 gimplify_seq_add_stmt (pre_p
, stmt
);
10696 *expr_p
= NULL_TREE
;
10699 /* Gimplify the gross structure of OpenACC enter/exit data, update, and OpenMP
10700 target update constructs. */
10703 gimplify_omp_target_update (tree
*expr_p
, gimple_seq
*pre_p
)
10705 tree expr
= *expr_p
;
10708 enum omp_region_type ort
= ORT_WORKSHARE
;
10710 switch (TREE_CODE (expr
))
10712 case OACC_ENTER_DATA
:
10713 case OACC_EXIT_DATA
:
10714 kind
= GF_OMP_TARGET_KIND_OACC_ENTER_EXIT_DATA
;
10718 kind
= GF_OMP_TARGET_KIND_OACC_UPDATE
;
10721 case OMP_TARGET_UPDATE
:
10722 kind
= GF_OMP_TARGET_KIND_UPDATE
;
10724 case OMP_TARGET_ENTER_DATA
:
10725 kind
= GF_OMP_TARGET_KIND_ENTER_DATA
;
10727 case OMP_TARGET_EXIT_DATA
:
10728 kind
= GF_OMP_TARGET_KIND_EXIT_DATA
;
10731 gcc_unreachable ();
10733 gimplify_scan_omp_clauses (&OMP_STANDALONE_CLAUSES (expr
), pre_p
,
10734 ort
, TREE_CODE (expr
));
10735 gimplify_adjust_omp_clauses (pre_p
, NULL
, &OMP_STANDALONE_CLAUSES (expr
),
10737 stmt
= gimple_build_omp_target (NULL
, kind
, OMP_STANDALONE_CLAUSES (expr
));
10739 gimplify_seq_add_stmt (pre_p
, stmt
);
10740 *expr_p
= NULL_TREE
;
10743 /* A subroutine of gimplify_omp_atomic. The front end is supposed to have
10744 stabilized the lhs of the atomic operation as *ADDR. Return true if
10745 EXPR is this stabilized form. */
10748 goa_lhs_expr_p (tree expr
, tree addr
)
10750 /* Also include casts to other type variants. The C front end is fond
10751 of adding these for e.g. volatile variables. This is like
10752 STRIP_TYPE_NOPS but includes the main variant lookup. */
10753 STRIP_USELESS_TYPE_CONVERSION (expr
);
10755 if (TREE_CODE (expr
) == INDIRECT_REF
)
10757 expr
= TREE_OPERAND (expr
, 0);
10758 while (expr
!= addr
10759 && (CONVERT_EXPR_P (expr
)
10760 || TREE_CODE (expr
) == NON_LVALUE_EXPR
)
10761 && TREE_CODE (expr
) == TREE_CODE (addr
)
10762 && types_compatible_p (TREE_TYPE (expr
), TREE_TYPE (addr
)))
10764 expr
= TREE_OPERAND (expr
, 0);
10765 addr
= TREE_OPERAND (addr
, 0);
10769 return (TREE_CODE (addr
) == ADDR_EXPR
10770 && TREE_CODE (expr
) == ADDR_EXPR
10771 && TREE_OPERAND (addr
, 0) == TREE_OPERAND (expr
, 0));
10773 if (TREE_CODE (addr
) == ADDR_EXPR
&& expr
== TREE_OPERAND (addr
, 0))
10778 /* Walk *EXPR_P and replace appearances of *LHS_ADDR with LHS_VAR. If an
10779 expression does not involve the lhs, evaluate it into a temporary.
10780 Return 1 if the lhs appeared as a subexpression, 0 if it did not,
10781 or -1 if an error was encountered. */
10784 goa_stabilize_expr (tree
*expr_p
, gimple_seq
*pre_p
, tree lhs_addr
,
10787 tree expr
= *expr_p
;
10790 if (goa_lhs_expr_p (expr
, lhs_addr
))
10795 if (is_gimple_val (expr
))
10799 switch (TREE_CODE_CLASS (TREE_CODE (expr
)))
10802 case tcc_comparison
:
10803 saw_lhs
|= goa_stabilize_expr (&TREE_OPERAND (expr
, 1), pre_p
, lhs_addr
,
10807 saw_lhs
|= goa_stabilize_expr (&TREE_OPERAND (expr
, 0), pre_p
, lhs_addr
,
10810 case tcc_expression
:
10811 switch (TREE_CODE (expr
))
10813 case TRUTH_ANDIF_EXPR
:
10814 case TRUTH_ORIF_EXPR
:
10815 case TRUTH_AND_EXPR
:
10816 case TRUTH_OR_EXPR
:
10817 case TRUTH_XOR_EXPR
:
10818 saw_lhs
|= goa_stabilize_expr (&TREE_OPERAND (expr
, 1), pre_p
,
10819 lhs_addr
, lhs_var
);
10821 case TRUTH_NOT_EXPR
:
10822 saw_lhs
|= goa_stabilize_expr (&TREE_OPERAND (expr
, 0), pre_p
,
10823 lhs_addr
, lhs_var
);
10825 case COMPOUND_EXPR
:
10826 /* Break out any preevaluations from cp_build_modify_expr. */
10827 for (; TREE_CODE (expr
) == COMPOUND_EXPR
;
10828 expr
= TREE_OPERAND (expr
, 1))
10829 gimplify_stmt (&TREE_OPERAND (expr
, 0), pre_p
);
10831 return goa_stabilize_expr (expr_p
, pre_p
, lhs_addr
, lhs_var
);
10842 enum gimplify_status gs
;
10843 gs
= gimplify_expr (expr_p
, pre_p
, NULL
, is_gimple_val
, fb_rvalue
);
10844 if (gs
!= GS_ALL_DONE
)
10851 /* Gimplify an OMP_ATOMIC statement. */
10853 static enum gimplify_status
10854 gimplify_omp_atomic (tree
*expr_p
, gimple_seq
*pre_p
)
10856 tree addr
= TREE_OPERAND (*expr_p
, 0);
10857 tree rhs
= TREE_CODE (*expr_p
) == OMP_ATOMIC_READ
10858 ? NULL
: TREE_OPERAND (*expr_p
, 1);
10859 tree type
= TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (addr
)));
10861 gomp_atomic_load
*loadstmt
;
10862 gomp_atomic_store
*storestmt
;
10864 tmp_load
= create_tmp_reg (type
);
10865 if (rhs
&& goa_stabilize_expr (&rhs
, pre_p
, addr
, tmp_load
) < 0)
10868 if (gimplify_expr (&addr
, pre_p
, NULL
, is_gimple_val
, fb_rvalue
)
10872 loadstmt
= gimple_build_omp_atomic_load (tmp_load
, addr
);
10873 gimplify_seq_add_stmt (pre_p
, loadstmt
);
10874 if (rhs
&& gimplify_expr (&rhs
, pre_p
, NULL
, is_gimple_val
, fb_rvalue
)
10878 if (TREE_CODE (*expr_p
) == OMP_ATOMIC_READ
)
10880 storestmt
= gimple_build_omp_atomic_store (rhs
);
10881 gimplify_seq_add_stmt (pre_p
, storestmt
);
10882 if (OMP_ATOMIC_SEQ_CST (*expr_p
))
10884 gimple_omp_atomic_set_seq_cst (loadstmt
);
10885 gimple_omp_atomic_set_seq_cst (storestmt
);
10887 switch (TREE_CODE (*expr_p
))
10889 case OMP_ATOMIC_READ
:
10890 case OMP_ATOMIC_CAPTURE_OLD
:
10891 *expr_p
= tmp_load
;
10892 gimple_omp_atomic_set_need_value (loadstmt
);
10894 case OMP_ATOMIC_CAPTURE_NEW
:
10896 gimple_omp_atomic_set_need_value (storestmt
);
10903 return GS_ALL_DONE
;
10906 /* Gimplify a TRANSACTION_EXPR. This involves gimplification of the
10907 body, and adding some EH bits. */
10909 static enum gimplify_status
10910 gimplify_transaction (tree
*expr_p
, gimple_seq
*pre_p
)
10912 tree expr
= *expr_p
, temp
, tbody
= TRANSACTION_EXPR_BODY (expr
);
10914 gtransaction
*trans_stmt
;
10915 gimple_seq body
= NULL
;
10918 /* Wrap the transaction body in a BIND_EXPR so we have a context
10919 where to put decls for OMP. */
10920 if (TREE_CODE (tbody
) != BIND_EXPR
)
10922 tree bind
= build3 (BIND_EXPR
, void_type_node
, NULL
, tbody
, NULL
);
10923 TREE_SIDE_EFFECTS (bind
) = 1;
10924 SET_EXPR_LOCATION (bind
, EXPR_LOCATION (tbody
));
10925 TRANSACTION_EXPR_BODY (expr
) = bind
;
10928 push_gimplify_context ();
10929 temp
= voidify_wrapper_expr (*expr_p
, NULL
);
10931 body_stmt
= gimplify_and_return_first (TRANSACTION_EXPR_BODY (expr
), &body
);
10932 pop_gimplify_context (body_stmt
);
10934 trans_stmt
= gimple_build_transaction (body
);
10935 if (TRANSACTION_EXPR_OUTER (expr
))
10936 subcode
= GTMA_IS_OUTER
;
10937 else if (TRANSACTION_EXPR_RELAXED (expr
))
10938 subcode
= GTMA_IS_RELAXED
;
10939 gimple_transaction_set_subcode (trans_stmt
, subcode
);
10941 gimplify_seq_add_stmt (pre_p
, trans_stmt
);
10949 *expr_p
= NULL_TREE
;
10950 return GS_ALL_DONE
;
10953 /* Gimplify an OMP_ORDERED construct. EXPR is the tree version. BODY
10954 is the OMP_BODY of the original EXPR (which has already been
10955 gimplified so it's not present in the EXPR).
10957 Return the gimplified GIMPLE_OMP_ORDERED tuple. */
10960 gimplify_omp_ordered (tree expr
, gimple_seq body
)
10965 tree source_c
= NULL_TREE
;
10966 tree sink_c
= NULL_TREE
;
10968 if (gimplify_omp_ctxp
)
10970 for (c
= OMP_ORDERED_CLAUSES (expr
); c
; c
= OMP_CLAUSE_CHAIN (c
))
10971 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_DEPEND
10972 && gimplify_omp_ctxp
->loop_iter_var
.is_empty ()
10973 && (OMP_CLAUSE_DEPEND_KIND (c
) == OMP_CLAUSE_DEPEND_SINK
10974 || OMP_CLAUSE_DEPEND_KIND (c
) == OMP_CLAUSE_DEPEND_SOURCE
))
10976 error_at (OMP_CLAUSE_LOCATION (c
),
10977 "%<ordered%> construct with %<depend%> clause must be "
10978 "closely nested inside a loop with %<ordered%> clause "
10979 "with a parameter");
10982 else if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_DEPEND
10983 && OMP_CLAUSE_DEPEND_KIND (c
) == OMP_CLAUSE_DEPEND_SINK
)
10986 for (decls
= OMP_CLAUSE_DECL (c
), i
= 0;
10987 decls
&& TREE_CODE (decls
) == TREE_LIST
;
10988 decls
= TREE_CHAIN (decls
), ++i
)
10989 if (i
>= gimplify_omp_ctxp
->loop_iter_var
.length () / 2)
10991 else if (TREE_VALUE (decls
)
10992 != gimplify_omp_ctxp
->loop_iter_var
[2 * i
])
10994 error_at (OMP_CLAUSE_LOCATION (c
),
10995 "variable %qE is not an iteration "
10996 "of outermost loop %d, expected %qE",
10997 TREE_VALUE (decls
), i
+ 1,
10998 gimplify_omp_ctxp
->loop_iter_var
[2 * i
]);
11004 = gimplify_omp_ctxp
->loop_iter_var
[2 * i
+ 1];
11005 if (!fail
&& i
!= gimplify_omp_ctxp
->loop_iter_var
.length () / 2)
11007 error_at (OMP_CLAUSE_LOCATION (c
),
11008 "number of variables in %<depend(sink)%> "
11009 "clause does not match number of "
11010 "iteration variables");
11015 else if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_DEPEND
11016 && OMP_CLAUSE_DEPEND_KIND (c
) == OMP_CLAUSE_DEPEND_SOURCE
)
11020 error_at (OMP_CLAUSE_LOCATION (c
),
11021 "more than one %<depend(source)%> clause on an "
11022 "%<ordered%> construct");
11029 if (source_c
&& sink_c
)
11031 error_at (OMP_CLAUSE_LOCATION (source_c
),
11032 "%<depend(source)%> clause specified together with "
11033 "%<depend(sink:)%> clauses on the same construct");
11038 return gimple_build_nop ();
11039 return gimple_build_omp_ordered (body
, OMP_ORDERED_CLAUSES (expr
));
11042 /* Convert the GENERIC expression tree *EXPR_P to GIMPLE. If the
11043 expression produces a value to be used as an operand inside a GIMPLE
11044 statement, the value will be stored back in *EXPR_P. This value will
11045 be a tree of class tcc_declaration, tcc_constant, tcc_reference or
11046 an SSA_NAME. The corresponding sequence of GIMPLE statements is
11047 emitted in PRE_P and POST_P.
11049 Additionally, this process may overwrite parts of the input
11050 expression during gimplification. Ideally, it should be
11051 possible to do non-destructive gimplification.
11053 EXPR_P points to the GENERIC expression to convert to GIMPLE. If
11054 the expression needs to evaluate to a value to be used as
11055 an operand in a GIMPLE statement, this value will be stored in
11056 *EXPR_P on exit. This happens when the caller specifies one
11057 of fb_lvalue or fb_rvalue fallback flags.
11059 PRE_P will contain the sequence of GIMPLE statements corresponding
11060 to the evaluation of EXPR and all the side-effects that must
11061 be executed before the main expression. On exit, the last
11062 statement of PRE_P is the core statement being gimplified. For
11063 instance, when gimplifying 'if (++a)' the last statement in
11064 PRE_P will be 'if (t.1)' where t.1 is the result of
11065 pre-incrementing 'a'.
11067 POST_P will contain the sequence of GIMPLE statements corresponding
11068 to the evaluation of all the side-effects that must be executed
11069 after the main expression. If this is NULL, the post
11070 side-effects are stored at the end of PRE_P.
11072 The reason why the output is split in two is to handle post
11073 side-effects explicitly. In some cases, an expression may have
11074 inner and outer post side-effects which need to be emitted in
11075 an order different from the one given by the recursive
11076 traversal. For instance, for the expression (*p--)++ the post
11077 side-effects of '--' must actually occur *after* the post
11078 side-effects of '++'. However, gimplification will first visit
11079 the inner expression, so if a separate POST sequence was not
11080 used, the resulting sequence would be:
11087 However, the post-decrement operation in line #2 must not be
11088 evaluated until after the store to *p at line #4, so the
11089 correct sequence should be:
11096 So, by specifying a separate post queue, it is possible
11097 to emit the post side-effects in the correct order.
11098 If POST_P is NULL, an internal queue will be used. Before
11099 returning to the caller, the sequence POST_P is appended to
11100 the main output sequence PRE_P.
11102 GIMPLE_TEST_F points to a function that takes a tree T and
11103 returns nonzero if T is in the GIMPLE form requested by the
11104 caller. The GIMPLE predicates are in gimple.c.
11106 FALLBACK tells the function what sort of a temporary we want if
11107 gimplification cannot produce an expression that complies with
11110 fb_none means that no temporary should be generated
11111 fb_rvalue means that an rvalue is OK to generate
11112 fb_lvalue means that an lvalue is OK to generate
11113 fb_either means that either is OK, but an lvalue is preferable.
11114 fb_mayfail means that gimplification may fail (in which case
11115 GS_ERROR will be returned)
11117 The return value is either GS_ERROR or GS_ALL_DONE, since this
11118 function iterates until EXPR is completely gimplified or an error
11121 enum gimplify_status
11122 gimplify_expr (tree
*expr_p
, gimple_seq
*pre_p
, gimple_seq
*post_p
,
11123 bool (*gimple_test_f
) (tree
), fallback_t fallback
)
11126 gimple_seq internal_pre
= NULL
;
11127 gimple_seq internal_post
= NULL
;
11130 location_t saved_location
;
11131 enum gimplify_status ret
;
11132 gimple_stmt_iterator pre_last_gsi
, post_last_gsi
;
11135 save_expr
= *expr_p
;
11136 if (save_expr
== NULL_TREE
)
11137 return GS_ALL_DONE
;
11139 /* If we are gimplifying a top-level statement, PRE_P must be valid. */
11140 is_statement
= gimple_test_f
== is_gimple_stmt
;
11142 gcc_assert (pre_p
);
11144 /* Consistency checks. */
11145 if (gimple_test_f
== is_gimple_reg
)
11146 gcc_assert (fallback
& (fb_rvalue
| fb_lvalue
));
11147 else if (gimple_test_f
== is_gimple_val
11148 || gimple_test_f
== is_gimple_call_addr
11149 || gimple_test_f
== is_gimple_condexpr
11150 || gimple_test_f
== is_gimple_mem_rhs
11151 || gimple_test_f
== is_gimple_mem_rhs_or_call
11152 || gimple_test_f
== is_gimple_reg_rhs
11153 || gimple_test_f
== is_gimple_reg_rhs_or_call
11154 || gimple_test_f
== is_gimple_asm_val
11155 || gimple_test_f
== is_gimple_mem_ref_addr
)
11156 gcc_assert (fallback
& fb_rvalue
);
11157 else if (gimple_test_f
== is_gimple_min_lval
11158 || gimple_test_f
== is_gimple_lvalue
)
11159 gcc_assert (fallback
& fb_lvalue
);
11160 else if (gimple_test_f
== is_gimple_addressable
)
11161 gcc_assert (fallback
& fb_either
);
11162 else if (gimple_test_f
== is_gimple_stmt
)
11163 gcc_assert (fallback
== fb_none
);
11166 /* We should have recognized the GIMPLE_TEST_F predicate to
11167 know what kind of fallback to use in case a temporary is
11168 needed to hold the value or address of *EXPR_P. */
11169 gcc_unreachable ();
11172 /* We used to check the predicate here and return immediately if it
11173 succeeds. This is wrong; the design is for gimplification to be
11174 idempotent, and for the predicates to only test for valid forms, not
11175 whether they are fully simplified. */
11177 pre_p
= &internal_pre
;
11179 if (post_p
== NULL
)
11180 post_p
= &internal_post
;
11182 /* Remember the last statements added to PRE_P and POST_P. Every
11183 new statement added by the gimplification helpers needs to be
11184 annotated with location information. To centralize the
11185 responsibility, we remember the last statement that had been
11186 added to both queues before gimplifying *EXPR_P. If
11187 gimplification produces new statements in PRE_P and POST_P, those
11188 statements will be annotated with the same location information
11190 pre_last_gsi
= gsi_last (*pre_p
);
11191 post_last_gsi
= gsi_last (*post_p
);
11193 saved_location
= input_location
;
11194 if (save_expr
!= error_mark_node
11195 && EXPR_HAS_LOCATION (*expr_p
))
11196 input_location
= EXPR_LOCATION (*expr_p
);
11198 /* Loop over the specific gimplifiers until the toplevel node
11199 remains the same. */
11202 /* Strip away as many useless type conversions as possible
11203 at the toplevel. */
11204 STRIP_USELESS_TYPE_CONVERSION (*expr_p
);
11206 /* Remember the expr. */
11207 save_expr
= *expr_p
;
11209 /* Die, die, die, my darling. */
11210 if (save_expr
== error_mark_node
11211 || (TREE_TYPE (save_expr
)
11212 && TREE_TYPE (save_expr
) == error_mark_node
))
11218 /* Do any language-specific gimplification. */
11219 ret
= ((enum gimplify_status
)
11220 lang_hooks
.gimplify_expr (expr_p
, pre_p
, post_p
));
11223 if (*expr_p
== NULL_TREE
)
11225 if (*expr_p
!= save_expr
)
11228 else if (ret
!= GS_UNHANDLED
)
11231 /* Make sure that all the cases set 'ret' appropriately. */
11232 ret
= GS_UNHANDLED
;
11233 switch (TREE_CODE (*expr_p
))
11235 /* First deal with the special cases. */
11237 case POSTINCREMENT_EXPR
:
11238 case POSTDECREMENT_EXPR
:
11239 case PREINCREMENT_EXPR
:
11240 case PREDECREMENT_EXPR
:
11241 ret
= gimplify_self_mod_expr (expr_p
, pre_p
, post_p
,
11242 fallback
!= fb_none
,
11243 TREE_TYPE (*expr_p
));
11246 case VIEW_CONVERT_EXPR
:
11247 if (is_gimple_reg_type (TREE_TYPE (*expr_p
))
11248 && is_gimple_reg_type (TREE_TYPE (TREE_OPERAND (*expr_p
, 0))))
11250 ret
= gimplify_expr (&TREE_OPERAND (*expr_p
, 0), pre_p
,
11251 post_p
, is_gimple_val
, fb_rvalue
);
11252 recalculate_side_effects (*expr_p
);
11258 case ARRAY_RANGE_REF
:
11259 case REALPART_EXPR
:
11260 case IMAGPART_EXPR
:
11261 case COMPONENT_REF
:
11262 ret
= gimplify_compound_lval (expr_p
, pre_p
, post_p
,
11263 fallback
? fallback
: fb_rvalue
);
11267 ret
= gimplify_cond_expr (expr_p
, pre_p
, fallback
);
11269 /* C99 code may assign to an array in a structure value of a
11270 conditional expression, and this has undefined behavior
11271 only on execution, so create a temporary if an lvalue is
11273 if (fallback
== fb_lvalue
)
11275 *expr_p
= get_initialized_tmp_var (*expr_p
, pre_p
, post_p
, false);
11276 mark_addressable (*expr_p
);
11282 ret
= gimplify_call_expr (expr_p
, pre_p
, fallback
!= fb_none
);
11284 /* C99 code may assign to an array in a structure returned
11285 from a function, and this has undefined behavior only on
11286 execution, so create a temporary if an lvalue is
11288 if (fallback
== fb_lvalue
)
11290 *expr_p
= get_initialized_tmp_var (*expr_p
, pre_p
, post_p
, false);
11291 mark_addressable (*expr_p
);
11297 gcc_unreachable ();
11299 case COMPOUND_EXPR
:
11300 ret
= gimplify_compound_expr (expr_p
, pre_p
, fallback
!= fb_none
);
11303 case COMPOUND_LITERAL_EXPR
:
11304 ret
= gimplify_compound_literal_expr (expr_p
, pre_p
,
11305 gimple_test_f
, fallback
);
11310 ret
= gimplify_modify_expr (expr_p
, pre_p
, post_p
,
11311 fallback
!= fb_none
);
11314 case TRUTH_ANDIF_EXPR
:
11315 case TRUTH_ORIF_EXPR
:
11317 /* Preserve the original type of the expression and the
11318 source location of the outer expression. */
11319 tree org_type
= TREE_TYPE (*expr_p
);
11320 *expr_p
= gimple_boolify (*expr_p
);
11321 *expr_p
= build3_loc (input_location
, COND_EXPR
,
11325 org_type
, boolean_true_node
),
11328 org_type
, boolean_false_node
));
11333 case TRUTH_NOT_EXPR
:
11335 tree type
= TREE_TYPE (*expr_p
);
11336 /* The parsers are careful to generate TRUTH_NOT_EXPR
11337 only with operands that are always zero or one.
11338 We do not fold here but handle the only interesting case
11339 manually, as fold may re-introduce the TRUTH_NOT_EXPR. */
11340 *expr_p
= gimple_boolify (*expr_p
);
11341 if (TYPE_PRECISION (TREE_TYPE (*expr_p
)) == 1)
11342 *expr_p
= build1_loc (input_location
, BIT_NOT_EXPR
,
11343 TREE_TYPE (*expr_p
),
11344 TREE_OPERAND (*expr_p
, 0));
11346 *expr_p
= build2_loc (input_location
, BIT_XOR_EXPR
,
11347 TREE_TYPE (*expr_p
),
11348 TREE_OPERAND (*expr_p
, 0),
11349 build_int_cst (TREE_TYPE (*expr_p
), 1));
11350 if (!useless_type_conversion_p (type
, TREE_TYPE (*expr_p
)))
11351 *expr_p
= fold_convert_loc (input_location
, type
, *expr_p
);
11357 ret
= gimplify_addr_expr (expr_p
, pre_p
, post_p
);
11360 case ANNOTATE_EXPR
:
11362 tree cond
= TREE_OPERAND (*expr_p
, 0);
11363 tree kind
= TREE_OPERAND (*expr_p
, 1);
11364 tree type
= TREE_TYPE (cond
);
11365 if (!INTEGRAL_TYPE_P (type
))
11371 tree tmp
= create_tmp_var (type
);
11372 gimplify_arg (&cond
, pre_p
, EXPR_LOCATION (*expr_p
));
11374 = gimple_build_call_internal (IFN_ANNOTATE
, 2, cond
, kind
);
11375 gimple_call_set_lhs (call
, tmp
);
11376 gimplify_seq_add_stmt (pre_p
, call
);
11383 ret
= gimplify_va_arg_expr (expr_p
, pre_p
, post_p
);
11387 if (IS_EMPTY_STMT (*expr_p
))
11393 if (VOID_TYPE_P (TREE_TYPE (*expr_p
))
11394 || fallback
== fb_none
)
11396 /* Just strip a conversion to void (or in void context) and
11398 *expr_p
= TREE_OPERAND (*expr_p
, 0);
11403 ret
= gimplify_conversion (expr_p
);
11404 if (ret
== GS_ERROR
)
11406 if (*expr_p
!= save_expr
)
11410 case FIX_TRUNC_EXPR
:
11411 /* unary_expr: ... | '(' cast ')' val | ... */
11412 ret
= gimplify_expr (&TREE_OPERAND (*expr_p
, 0), pre_p
, post_p
,
11413 is_gimple_val
, fb_rvalue
);
11414 recalculate_side_effects (*expr_p
);
11419 bool volatilep
= TREE_THIS_VOLATILE (*expr_p
);
11420 bool notrap
= TREE_THIS_NOTRAP (*expr_p
);
11421 tree saved_ptr_type
= TREE_TYPE (TREE_OPERAND (*expr_p
, 0));
11423 *expr_p
= fold_indirect_ref_loc (input_location
, *expr_p
);
11424 if (*expr_p
!= save_expr
)
11430 ret
= gimplify_expr (&TREE_OPERAND (*expr_p
, 0), pre_p
, post_p
,
11431 is_gimple_reg
, fb_rvalue
);
11432 if (ret
== GS_ERROR
)
11435 recalculate_side_effects (*expr_p
);
11436 *expr_p
= fold_build2_loc (input_location
, MEM_REF
,
11437 TREE_TYPE (*expr_p
),
11438 TREE_OPERAND (*expr_p
, 0),
11439 build_int_cst (saved_ptr_type
, 0));
11440 TREE_THIS_VOLATILE (*expr_p
) = volatilep
;
11441 TREE_THIS_NOTRAP (*expr_p
) = notrap
;
11446 /* We arrive here through the various re-gimplifcation paths. */
11448 /* First try re-folding the whole thing. */
11449 tmp
= fold_binary (MEM_REF
, TREE_TYPE (*expr_p
),
11450 TREE_OPERAND (*expr_p
, 0),
11451 TREE_OPERAND (*expr_p
, 1));
11454 REF_REVERSE_STORAGE_ORDER (tmp
)
11455 = REF_REVERSE_STORAGE_ORDER (*expr_p
);
11457 recalculate_side_effects (*expr_p
);
11461 /* Avoid re-gimplifying the address operand if it is already
11462 in suitable form. Re-gimplifying would mark the address
11463 operand addressable. Always gimplify when not in SSA form
11464 as we still may have to gimplify decls with value-exprs. */
11465 if (!gimplify_ctxp
|| !gimple_in_ssa_p (cfun
)
11466 || !is_gimple_mem_ref_addr (TREE_OPERAND (*expr_p
, 0)))
11468 ret
= gimplify_expr (&TREE_OPERAND (*expr_p
, 0), pre_p
, post_p
,
11469 is_gimple_mem_ref_addr
, fb_rvalue
);
11470 if (ret
== GS_ERROR
)
11473 recalculate_side_effects (*expr_p
);
11477 /* Constants need not be gimplified. */
11484 /* Drop the overflow flag on constants, we do not want
11485 that in the GIMPLE IL. */
11486 if (TREE_OVERFLOW_P (*expr_p
))
11487 *expr_p
= drop_tree_overflow (*expr_p
);
11492 /* If we require an lvalue, such as for ADDR_EXPR, retain the
11493 CONST_DECL node. Otherwise the decl is replaceable by its
11495 /* ??? Should be == fb_lvalue, but ADDR_EXPR passes fb_either. */
11496 if (fallback
& fb_lvalue
)
11500 *expr_p
= DECL_INITIAL (*expr_p
);
11506 ret
= gimplify_decl_expr (expr_p
, pre_p
);
11510 ret
= gimplify_bind_expr (expr_p
, pre_p
);
11514 ret
= gimplify_loop_expr (expr_p
, pre_p
);
11518 ret
= gimplify_switch_expr (expr_p
, pre_p
);
11522 ret
= gimplify_exit_expr (expr_p
);
11526 /* If the target is not LABEL, then it is a computed jump
11527 and the target needs to be gimplified. */
11528 if (TREE_CODE (GOTO_DESTINATION (*expr_p
)) != LABEL_DECL
)
11530 ret
= gimplify_expr (&GOTO_DESTINATION (*expr_p
), pre_p
,
11531 NULL
, is_gimple_val
, fb_rvalue
);
11532 if (ret
== GS_ERROR
)
11535 gimplify_seq_add_stmt (pre_p
,
11536 gimple_build_goto (GOTO_DESTINATION (*expr_p
)));
11541 gimplify_seq_add_stmt (pre_p
,
11542 gimple_build_predict (PREDICT_EXPR_PREDICTOR (*expr_p
),
11543 PREDICT_EXPR_OUTCOME (*expr_p
)));
11548 ret
= gimplify_label_expr (expr_p
, pre_p
);
11549 label
= LABEL_EXPR_LABEL (*expr_p
);
11550 gcc_assert (decl_function_context (label
) == current_function_decl
);
11552 /* If the label is used in a goto statement, or address of the label
11553 is taken, we need to unpoison all variables that were seen so far.
11554 Doing so would prevent us from reporting a false positives. */
11555 if (asan_poisoned_variables
11556 && asan_used_labels
!= NULL
11557 && asan_used_labels
->contains (label
))
11558 asan_poison_variables (asan_poisoned_variables
, false, pre_p
);
11561 case CASE_LABEL_EXPR
:
11562 ret
= gimplify_case_label_expr (expr_p
, pre_p
);
11564 if (gimplify_ctxp
->live_switch_vars
)
11565 asan_poison_variables (gimplify_ctxp
->live_switch_vars
, false,
11570 ret
= gimplify_return_expr (*expr_p
, pre_p
);
11574 /* Don't reduce this in place; let gimplify_init_constructor work its
11575 magic. Buf if we're just elaborating this for side effects, just
11576 gimplify any element that has side-effects. */
11577 if (fallback
== fb_none
)
11579 unsigned HOST_WIDE_INT ix
;
11581 tree temp
= NULL_TREE
;
11582 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (*expr_p
), ix
, val
)
11583 if (TREE_SIDE_EFFECTS (val
))
11584 append_to_statement_list (val
, &temp
);
11587 ret
= temp
? GS_OK
: GS_ALL_DONE
;
11589 /* C99 code may assign to an array in a constructed
11590 structure or union, and this has undefined behavior only
11591 on execution, so create a temporary if an lvalue is
11593 else if (fallback
== fb_lvalue
)
11595 *expr_p
= get_initialized_tmp_var (*expr_p
, pre_p
, post_p
, false);
11596 mark_addressable (*expr_p
);
11603 /* The following are special cases that are not handled by the
11604 original GIMPLE grammar. */
11606 /* SAVE_EXPR nodes are converted into a GIMPLE identifier and
11609 ret
= gimplify_save_expr (expr_p
, pre_p
, post_p
);
11612 case BIT_FIELD_REF
:
11613 ret
= gimplify_expr (&TREE_OPERAND (*expr_p
, 0), pre_p
,
11614 post_p
, is_gimple_lvalue
, fb_either
);
11615 recalculate_side_effects (*expr_p
);
11618 case TARGET_MEM_REF
:
11620 enum gimplify_status r0
= GS_ALL_DONE
, r1
= GS_ALL_DONE
;
11622 if (TMR_BASE (*expr_p
))
11623 r0
= gimplify_expr (&TMR_BASE (*expr_p
), pre_p
,
11624 post_p
, is_gimple_mem_ref_addr
, fb_either
);
11625 if (TMR_INDEX (*expr_p
))
11626 r1
= gimplify_expr (&TMR_INDEX (*expr_p
), pre_p
,
11627 post_p
, is_gimple_val
, fb_rvalue
);
11628 if (TMR_INDEX2 (*expr_p
))
11629 r1
= gimplify_expr (&TMR_INDEX2 (*expr_p
), pre_p
,
11630 post_p
, is_gimple_val
, fb_rvalue
);
11631 /* TMR_STEP and TMR_OFFSET are always integer constants. */
11632 ret
= MIN (r0
, r1
);
11636 case NON_LVALUE_EXPR
:
11637 /* This should have been stripped above. */
11638 gcc_unreachable ();
11641 ret
= gimplify_asm_expr (expr_p
, pre_p
, post_p
);
11644 case TRY_FINALLY_EXPR
:
11645 case TRY_CATCH_EXPR
:
11647 gimple_seq eval
, cleanup
;
11650 /* Calls to destructors are generated automatically in FINALLY/CATCH
11651 block. They should have location as UNKNOWN_LOCATION. However,
11652 gimplify_call_expr will reset these call stmts to input_location
11653 if it finds stmt's location is unknown. To prevent resetting for
11654 destructors, we set the input_location to unknown.
11655 Note that this only affects the destructor calls in FINALLY/CATCH
11656 block, and will automatically reset to its original value by the
11657 end of gimplify_expr. */
11658 input_location
= UNKNOWN_LOCATION
;
11659 eval
= cleanup
= NULL
;
11660 gimplify_and_add (TREE_OPERAND (*expr_p
, 0), &eval
);
11661 gimplify_and_add (TREE_OPERAND (*expr_p
, 1), &cleanup
);
11662 /* Don't create bogus GIMPLE_TRY with empty cleanup. */
11663 if (gimple_seq_empty_p (cleanup
))
11665 gimple_seq_add_seq (pre_p
, eval
);
11669 try_
= gimple_build_try (eval
, cleanup
,
11670 TREE_CODE (*expr_p
) == TRY_FINALLY_EXPR
11671 ? GIMPLE_TRY_FINALLY
11672 : GIMPLE_TRY_CATCH
);
11673 if (EXPR_HAS_LOCATION (save_expr
))
11674 gimple_set_location (try_
, EXPR_LOCATION (save_expr
));
11675 else if (LOCATION_LOCUS (saved_location
) != UNKNOWN_LOCATION
)
11676 gimple_set_location (try_
, saved_location
);
11677 if (TREE_CODE (*expr_p
) == TRY_CATCH_EXPR
)
11678 gimple_try_set_catch_is_cleanup (try_
,
11679 TRY_CATCH_IS_CLEANUP (*expr_p
));
11680 gimplify_seq_add_stmt (pre_p
, try_
);
11685 case CLEANUP_POINT_EXPR
:
11686 ret
= gimplify_cleanup_point_expr (expr_p
, pre_p
);
11690 ret
= gimplify_target_expr (expr_p
, pre_p
, post_p
);
11696 gimple_seq handler
= NULL
;
11697 gimplify_and_add (CATCH_BODY (*expr_p
), &handler
);
11698 c
= gimple_build_catch (CATCH_TYPES (*expr_p
), handler
);
11699 gimplify_seq_add_stmt (pre_p
, c
);
11704 case EH_FILTER_EXPR
:
11707 gimple_seq failure
= NULL
;
11709 gimplify_and_add (EH_FILTER_FAILURE (*expr_p
), &failure
);
11710 ehf
= gimple_build_eh_filter (EH_FILTER_TYPES (*expr_p
), failure
);
11711 gimple_set_no_warning (ehf
, TREE_NO_WARNING (*expr_p
));
11712 gimplify_seq_add_stmt (pre_p
, ehf
);
11719 enum gimplify_status r0
, r1
;
11720 r0
= gimplify_expr (&OBJ_TYPE_REF_OBJECT (*expr_p
), pre_p
,
11721 post_p
, is_gimple_val
, fb_rvalue
);
11722 r1
= gimplify_expr (&OBJ_TYPE_REF_EXPR (*expr_p
), pre_p
,
11723 post_p
, is_gimple_val
, fb_rvalue
);
11724 TREE_SIDE_EFFECTS (*expr_p
) = 0;
11725 ret
= MIN (r0
, r1
);
11730 /* We get here when taking the address of a label. We mark
11731 the label as "forced"; meaning it can never be removed and
11732 it is a potential target for any computed goto. */
11733 FORCED_LABEL (*expr_p
) = 1;
11737 case STATEMENT_LIST
:
11738 ret
= gimplify_statement_list (expr_p
, pre_p
);
11741 case WITH_SIZE_EXPR
:
11743 gimplify_expr (&TREE_OPERAND (*expr_p
, 0), pre_p
,
11744 post_p
== &internal_post
? NULL
: post_p
,
11745 gimple_test_f
, fallback
);
11746 gimplify_expr (&TREE_OPERAND (*expr_p
, 1), pre_p
, post_p
,
11747 is_gimple_val
, fb_rvalue
);
11754 ret
= gimplify_var_or_parm_decl (expr_p
);
11758 /* When within an OMP context, notice uses of variables. */
11759 if (gimplify_omp_ctxp
)
11760 omp_notice_variable (gimplify_omp_ctxp
, *expr_p
, true);
11765 /* Allow callbacks into the gimplifier during optimization. */
11770 gimplify_omp_parallel (expr_p
, pre_p
);
11775 gimplify_omp_task (expr_p
, pre_p
);
11783 case OMP_DISTRIBUTE
:
11786 ret
= gimplify_omp_for (expr_p
, pre_p
);
11790 gimplify_oacc_cache (expr_p
, pre_p
);
11795 gimplify_oacc_declare (expr_p
, pre_p
);
11799 case OACC_HOST_DATA
:
11802 case OACC_PARALLEL
:
11806 case OMP_TARGET_DATA
:
11808 gimplify_omp_workshare (expr_p
, pre_p
);
11812 case OACC_ENTER_DATA
:
11813 case OACC_EXIT_DATA
:
11815 case OMP_TARGET_UPDATE
:
11816 case OMP_TARGET_ENTER_DATA
:
11817 case OMP_TARGET_EXIT_DATA
:
11818 gimplify_omp_target_update (expr_p
, pre_p
);
11824 case OMP_TASKGROUP
:
11828 gimple_seq body
= NULL
;
11831 gimplify_and_add (OMP_BODY (*expr_p
), &body
);
11832 switch (TREE_CODE (*expr_p
))
11835 g
= gimple_build_omp_section (body
);
11838 g
= gimple_build_omp_master (body
);
11840 case OMP_TASKGROUP
:
11842 gimple_seq cleanup
= NULL
;
11844 = builtin_decl_explicit (BUILT_IN_GOMP_TASKGROUP_END
);
11845 g
= gimple_build_call (fn
, 0);
11846 gimple_seq_add_stmt (&cleanup
, g
);
11847 g
= gimple_build_try (body
, cleanup
, GIMPLE_TRY_FINALLY
);
11849 gimple_seq_add_stmt (&body
, g
);
11850 g
= gimple_build_omp_taskgroup (body
);
11854 g
= gimplify_omp_ordered (*expr_p
, body
);
11857 gimplify_scan_omp_clauses (&OMP_CRITICAL_CLAUSES (*expr_p
),
11858 pre_p
, ORT_WORKSHARE
, OMP_CRITICAL
);
11859 gimplify_adjust_omp_clauses (pre_p
, body
,
11860 &OMP_CRITICAL_CLAUSES (*expr_p
),
11862 g
= gimple_build_omp_critical (body
,
11863 OMP_CRITICAL_NAME (*expr_p
),
11864 OMP_CRITICAL_CLAUSES (*expr_p
));
11867 gcc_unreachable ();
11869 gimplify_seq_add_stmt (pre_p
, g
);
11875 case OMP_ATOMIC_READ
:
11876 case OMP_ATOMIC_CAPTURE_OLD
:
11877 case OMP_ATOMIC_CAPTURE_NEW
:
11878 ret
= gimplify_omp_atomic (expr_p
, pre_p
);
11881 case TRANSACTION_EXPR
:
11882 ret
= gimplify_transaction (expr_p
, pre_p
);
11885 case TRUTH_AND_EXPR
:
11886 case TRUTH_OR_EXPR
:
11887 case TRUTH_XOR_EXPR
:
11889 tree orig_type
= TREE_TYPE (*expr_p
);
11890 tree new_type
, xop0
, xop1
;
11891 *expr_p
= gimple_boolify (*expr_p
);
11892 new_type
= TREE_TYPE (*expr_p
);
11893 if (!useless_type_conversion_p (orig_type
, new_type
))
11895 *expr_p
= fold_convert_loc (input_location
, orig_type
, *expr_p
);
11900 /* Boolified binary truth expressions are semantically equivalent
11901 to bitwise binary expressions. Canonicalize them to the
11902 bitwise variant. */
11903 switch (TREE_CODE (*expr_p
))
11905 case TRUTH_AND_EXPR
:
11906 TREE_SET_CODE (*expr_p
, BIT_AND_EXPR
);
11908 case TRUTH_OR_EXPR
:
11909 TREE_SET_CODE (*expr_p
, BIT_IOR_EXPR
);
11911 case TRUTH_XOR_EXPR
:
11912 TREE_SET_CODE (*expr_p
, BIT_XOR_EXPR
);
11917 /* Now make sure that operands have compatible type to
11918 expression's new_type. */
11919 xop0
= TREE_OPERAND (*expr_p
, 0);
11920 xop1
= TREE_OPERAND (*expr_p
, 1);
11921 if (!useless_type_conversion_p (new_type
, TREE_TYPE (xop0
)))
11922 TREE_OPERAND (*expr_p
, 0) = fold_convert_loc (input_location
,
11925 if (!useless_type_conversion_p (new_type
, TREE_TYPE (xop1
)))
11926 TREE_OPERAND (*expr_p
, 1) = fold_convert_loc (input_location
,
11929 /* Continue classified as tcc_binary. */
11933 case VEC_COND_EXPR
:
11935 enum gimplify_status r0
, r1
, r2
;
11937 r0
= gimplify_expr (&TREE_OPERAND (*expr_p
, 0), pre_p
,
11938 post_p
, is_gimple_condexpr
, fb_rvalue
);
11939 r1
= gimplify_expr (&TREE_OPERAND (*expr_p
, 1), pre_p
,
11940 post_p
, is_gimple_val
, fb_rvalue
);
11941 r2
= gimplify_expr (&TREE_OPERAND (*expr_p
, 2), pre_p
,
11942 post_p
, is_gimple_val
, fb_rvalue
);
11944 ret
= MIN (MIN (r0
, r1
), r2
);
11945 recalculate_side_effects (*expr_p
);
11950 case VEC_PERM_EXPR
:
11951 /* Classified as tcc_expression. */
11954 case BIT_INSERT_EXPR
:
11955 /* Argument 3 is a constant. */
11958 case POINTER_PLUS_EXPR
:
11960 enum gimplify_status r0
, r1
;
11961 r0
= gimplify_expr (&TREE_OPERAND (*expr_p
, 0), pre_p
,
11962 post_p
, is_gimple_val
, fb_rvalue
);
11963 r1
= gimplify_expr (&TREE_OPERAND (*expr_p
, 1), pre_p
,
11964 post_p
, is_gimple_val
, fb_rvalue
);
11965 recalculate_side_effects (*expr_p
);
11966 ret
= MIN (r0
, r1
);
11970 case CILK_SYNC_STMT
:
11972 if (!fn_contains_cilk_spawn_p (cfun
))
11974 error_at (EXPR_LOCATION (*expr_p
),
11975 "expected %<_Cilk_spawn%> before %<_Cilk_sync%>");
11980 gimplify_cilk_sync (expr_p
, pre_p
);
11987 switch (TREE_CODE_CLASS (TREE_CODE (*expr_p
)))
11989 case tcc_comparison
:
11990 /* Handle comparison of objects of non scalar mode aggregates
11991 with a call to memcmp. It would be nice to only have to do
11992 this for variable-sized objects, but then we'd have to allow
11993 the same nest of reference nodes we allow for MODIFY_EXPR and
11994 that's too complex.
11996 Compare scalar mode aggregates as scalar mode values. Using
11997 memcmp for them would be very inefficient at best, and is
11998 plain wrong if bitfields are involved. */
12000 tree type
= TREE_TYPE (TREE_OPERAND (*expr_p
, 1));
12002 /* Vector comparisons need no boolification. */
12003 if (TREE_CODE (type
) == VECTOR_TYPE
)
12005 else if (!AGGREGATE_TYPE_P (type
))
12007 tree org_type
= TREE_TYPE (*expr_p
);
12008 *expr_p
= gimple_boolify (*expr_p
);
12009 if (!useless_type_conversion_p (org_type
,
12010 TREE_TYPE (*expr_p
)))
12012 *expr_p
= fold_convert_loc (input_location
,
12013 org_type
, *expr_p
);
12019 else if (TYPE_MODE (type
) != BLKmode
)
12020 ret
= gimplify_scalar_mode_aggregate_compare (expr_p
);
12022 ret
= gimplify_variable_sized_compare (expr_p
);
12027 /* If *EXPR_P does not need to be special-cased, handle it
12028 according to its class. */
12030 ret
= gimplify_expr (&TREE_OPERAND (*expr_p
, 0), pre_p
,
12031 post_p
, is_gimple_val
, fb_rvalue
);
12037 enum gimplify_status r0
, r1
;
12039 r0
= gimplify_expr (&TREE_OPERAND (*expr_p
, 0), pre_p
,
12040 post_p
, is_gimple_val
, fb_rvalue
);
12041 r1
= gimplify_expr (&TREE_OPERAND (*expr_p
, 1), pre_p
,
12042 post_p
, is_gimple_val
, fb_rvalue
);
12044 ret
= MIN (r0
, r1
);
12050 enum gimplify_status r0
, r1
, r2
;
12052 r0
= gimplify_expr (&TREE_OPERAND (*expr_p
, 0), pre_p
,
12053 post_p
, is_gimple_val
, fb_rvalue
);
12054 r1
= gimplify_expr (&TREE_OPERAND (*expr_p
, 1), pre_p
,
12055 post_p
, is_gimple_val
, fb_rvalue
);
12056 r2
= gimplify_expr (&TREE_OPERAND (*expr_p
, 2), pre_p
,
12057 post_p
, is_gimple_val
, fb_rvalue
);
12059 ret
= MIN (MIN (r0
, r1
), r2
);
12063 case tcc_declaration
:
12066 goto dont_recalculate
;
12069 gcc_unreachable ();
12072 recalculate_side_effects (*expr_p
);
12078 gcc_assert (*expr_p
|| ret
!= GS_OK
);
12080 while (ret
== GS_OK
);
12082 /* If we encountered an error_mark somewhere nested inside, either
12083 stub out the statement or propagate the error back out. */
12084 if (ret
== GS_ERROR
)
12091 /* This was only valid as a return value from the langhook, which
12092 we handled. Make sure it doesn't escape from any other context. */
12093 gcc_assert (ret
!= GS_UNHANDLED
);
12095 if (fallback
== fb_none
&& *expr_p
&& !is_gimple_stmt (*expr_p
))
12097 /* We aren't looking for a value, and we don't have a valid
12098 statement. If it doesn't have side-effects, throw it away.
12099 We can also get here with code such as "*&&L;", where L is
12100 a LABEL_DECL that is marked as FORCED_LABEL. */
12101 if (TREE_CODE (*expr_p
) == LABEL_DECL
12102 || !TREE_SIDE_EFFECTS (*expr_p
))
12104 else if (!TREE_THIS_VOLATILE (*expr_p
))
12106 /* This is probably a _REF that contains something nested that
12107 has side effects. Recurse through the operands to find it. */
12108 enum tree_code code
= TREE_CODE (*expr_p
);
12112 case COMPONENT_REF
:
12113 case REALPART_EXPR
:
12114 case IMAGPART_EXPR
:
12115 case VIEW_CONVERT_EXPR
:
12116 gimplify_expr (&TREE_OPERAND (*expr_p
, 0), pre_p
, post_p
,
12117 gimple_test_f
, fallback
);
12121 case ARRAY_RANGE_REF
:
12122 gimplify_expr (&TREE_OPERAND (*expr_p
, 0), pre_p
, post_p
,
12123 gimple_test_f
, fallback
);
12124 gimplify_expr (&TREE_OPERAND (*expr_p
, 1), pre_p
, post_p
,
12125 gimple_test_f
, fallback
);
12129 /* Anything else with side-effects must be converted to
12130 a valid statement before we get here. */
12131 gcc_unreachable ();
12136 else if (COMPLETE_TYPE_P (TREE_TYPE (*expr_p
))
12137 && TYPE_MODE (TREE_TYPE (*expr_p
)) != BLKmode
)
12139 /* Historically, the compiler has treated a bare reference
12140 to a non-BLKmode volatile lvalue as forcing a load. */
12141 tree type
= TYPE_MAIN_VARIANT (TREE_TYPE (*expr_p
));
12143 /* Normally, we do not want to create a temporary for a
12144 TREE_ADDRESSABLE type because such a type should not be
12145 copied by bitwise-assignment. However, we make an
12146 exception here, as all we are doing here is ensuring that
12147 we read the bytes that make up the type. We use
12148 create_tmp_var_raw because create_tmp_var will abort when
12149 given a TREE_ADDRESSABLE type. */
12150 tree tmp
= create_tmp_var_raw (type
, "vol");
12151 gimple_add_tmp_var (tmp
);
12152 gimplify_assign (tmp
, *expr_p
, pre_p
);
12156 /* We can't do anything useful with a volatile reference to
12157 an incomplete type, so just throw it away. Likewise for
12158 a BLKmode type, since any implicit inner load should
12159 already have been turned into an explicit one by the
12160 gimplification process. */
12164 /* If we are gimplifying at the statement level, we're done. Tack
12165 everything together and return. */
12166 if (fallback
== fb_none
|| is_statement
)
12168 /* Since *EXPR_P has been converted into a GIMPLE tuple, clear
12169 it out for GC to reclaim it. */
12170 *expr_p
= NULL_TREE
;
12172 if (!gimple_seq_empty_p (internal_pre
)
12173 || !gimple_seq_empty_p (internal_post
))
12175 gimplify_seq_add_seq (&internal_pre
, internal_post
);
12176 gimplify_seq_add_seq (pre_p
, internal_pre
);
12179 /* The result of gimplifying *EXPR_P is going to be the last few
12180 statements in *PRE_P and *POST_P. Add location information
12181 to all the statements that were added by the gimplification
12183 if (!gimple_seq_empty_p (*pre_p
))
12184 annotate_all_with_location_after (*pre_p
, pre_last_gsi
, input_location
);
12186 if (!gimple_seq_empty_p (*post_p
))
12187 annotate_all_with_location_after (*post_p
, post_last_gsi
,
12193 #ifdef ENABLE_GIMPLE_CHECKING
12196 enum tree_code code
= TREE_CODE (*expr_p
);
12197 /* These expressions should already be in gimple IR form. */
12198 gcc_assert (code
!= MODIFY_EXPR
12199 && code
!= ASM_EXPR
12200 && code
!= BIND_EXPR
12201 && code
!= CATCH_EXPR
12202 && (code
!= COND_EXPR
|| gimplify_ctxp
->allow_rhs_cond_expr
)
12203 && code
!= EH_FILTER_EXPR
12204 && code
!= GOTO_EXPR
12205 && code
!= LABEL_EXPR
12206 && code
!= LOOP_EXPR
12207 && code
!= SWITCH_EXPR
12208 && code
!= TRY_FINALLY_EXPR
12209 && code
!= OACC_PARALLEL
12210 && code
!= OACC_KERNELS
12211 && code
!= OACC_DATA
12212 && code
!= OACC_HOST_DATA
12213 && code
!= OACC_DECLARE
12214 && code
!= OACC_UPDATE
12215 && code
!= OACC_ENTER_DATA
12216 && code
!= OACC_EXIT_DATA
12217 && code
!= OACC_CACHE
12218 && code
!= OMP_CRITICAL
12220 && code
!= OACC_LOOP
12221 && code
!= OMP_MASTER
12222 && code
!= OMP_TASKGROUP
12223 && code
!= OMP_ORDERED
12224 && code
!= OMP_PARALLEL
12225 && code
!= OMP_SECTIONS
12226 && code
!= OMP_SECTION
12227 && code
!= OMP_SINGLE
);
12231 /* Otherwise we're gimplifying a subexpression, so the resulting
12232 value is interesting. If it's a valid operand that matches
12233 GIMPLE_TEST_F, we're done. Unless we are handling some
12234 post-effects internally; if that's the case, we need to copy into
12235 a temporary before adding the post-effects to POST_P. */
12236 if (gimple_seq_empty_p (internal_post
) && (*gimple_test_f
) (*expr_p
))
12239 /* Otherwise, we need to create a new temporary for the gimplified
12242 /* We can't return an lvalue if we have an internal postqueue. The
12243 object the lvalue refers to would (probably) be modified by the
12244 postqueue; we need to copy the value out first, which means an
12246 if ((fallback
& fb_lvalue
)
12247 && gimple_seq_empty_p (internal_post
)
12248 && is_gimple_addressable (*expr_p
))
12250 /* An lvalue will do. Take the address of the expression, store it
12251 in a temporary, and replace the expression with an INDIRECT_REF of
12253 tmp
= build_fold_addr_expr_loc (input_location
, *expr_p
);
12254 gimplify_expr (&tmp
, pre_p
, post_p
, is_gimple_reg
, fb_rvalue
);
12255 *expr_p
= build_simple_mem_ref (tmp
);
12257 else if ((fallback
& fb_rvalue
) && is_gimple_reg_rhs_or_call (*expr_p
))
12259 /* An rvalue will do. Assign the gimplified expression into a
12260 new temporary TMP and replace the original expression with
12261 TMP. First, make sure that the expression has a type so that
12262 it can be assigned into a temporary. */
12263 gcc_assert (!VOID_TYPE_P (TREE_TYPE (*expr_p
)));
12264 *expr_p
= get_formal_tmp_var (*expr_p
, pre_p
);
12268 #ifdef ENABLE_GIMPLE_CHECKING
12269 if (!(fallback
& fb_mayfail
))
12271 fprintf (stderr
, "gimplification failed:\n");
12272 print_generic_expr (stderr
, *expr_p
);
12273 debug_tree (*expr_p
);
12274 internal_error ("gimplification failed");
12277 gcc_assert (fallback
& fb_mayfail
);
12279 /* If this is an asm statement, and the user asked for the
12280 impossible, don't die. Fail and let gimplify_asm_expr
12286 /* Make sure the temporary matches our predicate. */
12287 gcc_assert ((*gimple_test_f
) (*expr_p
));
12289 if (!gimple_seq_empty_p (internal_post
))
12291 annotate_all_with_location (internal_post
, input_location
);
12292 gimplify_seq_add_seq (pre_p
, internal_post
);
12296 input_location
= saved_location
;
12300 /* Like gimplify_expr but make sure the gimplified result is not itself
12301 a SSA name (but a decl if it were). Temporaries required by
12302 evaluating *EXPR_P may be still SSA names. */
12304 static enum gimplify_status
12305 gimplify_expr (tree
*expr_p
, gimple_seq
*pre_p
, gimple_seq
*post_p
,
12306 bool (*gimple_test_f
) (tree
), fallback_t fallback
,
12309 bool was_ssa_name_p
= TREE_CODE (*expr_p
) == SSA_NAME
;
12310 enum gimplify_status ret
= gimplify_expr (expr_p
, pre_p
, post_p
,
12311 gimple_test_f
, fallback
);
12313 && TREE_CODE (*expr_p
) == SSA_NAME
)
12315 tree name
= *expr_p
;
12316 if (was_ssa_name_p
)
12317 *expr_p
= get_initialized_tmp_var (*expr_p
, pre_p
, NULL
, false);
12320 /* Avoid the extra copy if possible. */
12321 *expr_p
= create_tmp_reg (TREE_TYPE (name
));
12322 gimple_set_lhs (SSA_NAME_DEF_STMT (name
), *expr_p
);
12323 release_ssa_name (name
);
12329 /* Look through TYPE for variable-sized objects and gimplify each such
12330 size that we find. Add to LIST_P any statements generated. */
12333 gimplify_type_sizes (tree type
, gimple_seq
*list_p
)
12337 if (type
== NULL
|| type
== error_mark_node
)
12340 /* We first do the main variant, then copy into any other variants. */
12341 type
= TYPE_MAIN_VARIANT (type
);
12343 /* Avoid infinite recursion. */
12344 if (TYPE_SIZES_GIMPLIFIED (type
))
12347 TYPE_SIZES_GIMPLIFIED (type
) = 1;
12349 switch (TREE_CODE (type
))
12352 case ENUMERAL_TYPE
:
12355 case FIXED_POINT_TYPE
:
12356 gimplify_one_sizepos (&TYPE_MIN_VALUE (type
), list_p
);
12357 gimplify_one_sizepos (&TYPE_MAX_VALUE (type
), list_p
);
12359 for (t
= TYPE_NEXT_VARIANT (type
); t
; t
= TYPE_NEXT_VARIANT (t
))
12361 TYPE_MIN_VALUE (t
) = TYPE_MIN_VALUE (type
);
12362 TYPE_MAX_VALUE (t
) = TYPE_MAX_VALUE (type
);
12367 /* These types may not have declarations, so handle them here. */
12368 gimplify_type_sizes (TREE_TYPE (type
), list_p
);
12369 gimplify_type_sizes (TYPE_DOMAIN (type
), list_p
);
12370 /* Ensure VLA bounds aren't removed, for -O0 they should be variables
12371 with assigned stack slots, for -O1+ -g they should be tracked
12373 if (!(TYPE_NAME (type
)
12374 && TREE_CODE (TYPE_NAME (type
)) == TYPE_DECL
12375 && DECL_IGNORED_P (TYPE_NAME (type
)))
12376 && TYPE_DOMAIN (type
)
12377 && INTEGRAL_TYPE_P (TYPE_DOMAIN (type
)))
12379 t
= TYPE_MIN_VALUE (TYPE_DOMAIN (type
));
12380 if (t
&& VAR_P (t
) && DECL_ARTIFICIAL (t
))
12381 DECL_IGNORED_P (t
) = 0;
12382 t
= TYPE_MAX_VALUE (TYPE_DOMAIN (type
));
12383 if (t
&& VAR_P (t
) && DECL_ARTIFICIAL (t
))
12384 DECL_IGNORED_P (t
) = 0;
12390 case QUAL_UNION_TYPE
:
12391 for (field
= TYPE_FIELDS (type
); field
; field
= DECL_CHAIN (field
))
12392 if (TREE_CODE (field
) == FIELD_DECL
)
12394 gimplify_one_sizepos (&DECL_FIELD_OFFSET (field
), list_p
);
12395 gimplify_one_sizepos (&DECL_SIZE (field
), list_p
);
12396 gimplify_one_sizepos (&DECL_SIZE_UNIT (field
), list_p
);
12397 gimplify_type_sizes (TREE_TYPE (field
), list_p
);
12402 case REFERENCE_TYPE
:
12403 /* We used to recurse on the pointed-to type here, which turned out to
12404 be incorrect because its definition might refer to variables not
12405 yet initialized at this point if a forward declaration is involved.
12407 It was actually useful for anonymous pointed-to types to ensure
12408 that the sizes evaluation dominates every possible later use of the
12409 values. Restricting to such types here would be safe since there
12410 is no possible forward declaration around, but would introduce an
12411 undesirable middle-end semantic to anonymity. We then defer to
12412 front-ends the responsibility of ensuring that the sizes are
12413 evaluated both early and late enough, e.g. by attaching artificial
12414 type declarations to the tree. */
12421 gimplify_one_sizepos (&TYPE_SIZE (type
), list_p
);
12422 gimplify_one_sizepos (&TYPE_SIZE_UNIT (type
), list_p
);
12424 for (t
= TYPE_NEXT_VARIANT (type
); t
; t
= TYPE_NEXT_VARIANT (t
))
12426 TYPE_SIZE (t
) = TYPE_SIZE (type
);
12427 TYPE_SIZE_UNIT (t
) = TYPE_SIZE_UNIT (type
);
12428 TYPE_SIZES_GIMPLIFIED (t
) = 1;
12432 /* A subroutine of gimplify_type_sizes to make sure that *EXPR_P,
12433 a size or position, has had all of its SAVE_EXPRs evaluated.
12434 We add any required statements to *STMT_P. */
12437 gimplify_one_sizepos (tree
*expr_p
, gimple_seq
*stmt_p
)
12439 tree expr
= *expr_p
;
12441 /* We don't do anything if the value isn't there, is constant, or contains
12442 A PLACEHOLDER_EXPR. We also don't want to do anything if it's already
12443 a VAR_DECL. If it's a VAR_DECL from another function, the gimplifier
12444 will want to replace it with a new variable, but that will cause problems
12445 if this type is from outside the function. It's OK to have that here. */
12446 if (is_gimple_sizepos (expr
))
12449 *expr_p
= unshare_expr (expr
);
12451 /* SSA names in decl/type fields are a bad idea - they'll get reclaimed
12452 if the def vanishes. */
12453 gimplify_expr (expr_p
, stmt_p
, NULL
, is_gimple_val
, fb_rvalue
, false);
12456 /* Gimplify the body of statements of FNDECL and return a GIMPLE_BIND node
12457 containing the sequence of corresponding GIMPLE statements. If DO_PARMS
12458 is true, also gimplify the parameters. */
12461 gimplify_body (tree fndecl
, bool do_parms
)
12463 location_t saved_location
= input_location
;
12464 gimple_seq parm_stmts
, seq
;
12465 gimple
*outer_stmt
;
12467 struct cgraph_node
*cgn
;
12469 timevar_push (TV_TREE_GIMPLIFY
);
12471 init_tree_ssa (cfun
);
12473 /* Initialize for optimize_insn_for_s{ize,peed}_p possibly called during
12475 default_rtl_profile ();
12477 gcc_assert (gimplify_ctxp
== NULL
);
12478 push_gimplify_context (true);
12480 if (flag_openacc
|| flag_openmp
)
12482 gcc_assert (gimplify_omp_ctxp
== NULL
);
12483 if (lookup_attribute ("omp declare target", DECL_ATTRIBUTES (fndecl
)))
12484 gimplify_omp_ctxp
= new_omp_context (ORT_TARGET
);
12487 /* Unshare most shared trees in the body and in that of any nested functions.
12488 It would seem we don't have to do this for nested functions because
12489 they are supposed to be output and then the outer function gimplified
12490 first, but the g++ front end doesn't always do it that way. */
12491 unshare_body (fndecl
);
12492 unvisit_body (fndecl
);
12494 cgn
= cgraph_node::get (fndecl
);
12495 if (cgn
&& cgn
->origin
)
12496 nonlocal_vlas
= new hash_set
<tree
>;
12498 /* Make sure input_location isn't set to something weird. */
12499 input_location
= DECL_SOURCE_LOCATION (fndecl
);
12501 /* Resolve callee-copies. This has to be done before processing
12502 the body so that DECL_VALUE_EXPR gets processed correctly. */
12503 parm_stmts
= do_parms
? gimplify_parameters () : NULL
;
12505 /* Gimplify the function's body. */
12507 gimplify_stmt (&DECL_SAVED_TREE (fndecl
), &seq
);
12508 outer_stmt
= gimple_seq_first_stmt (seq
);
12511 outer_stmt
= gimple_build_nop ();
12512 gimplify_seq_add_stmt (&seq
, outer_stmt
);
12515 /* The body must contain exactly one statement, a GIMPLE_BIND. If this is
12516 not the case, wrap everything in a GIMPLE_BIND to make it so. */
12517 if (gimple_code (outer_stmt
) == GIMPLE_BIND
12518 && gimple_seq_first (seq
) == gimple_seq_last (seq
))
12519 outer_bind
= as_a
<gbind
*> (outer_stmt
);
12521 outer_bind
= gimple_build_bind (NULL_TREE
, seq
, NULL
);
12523 DECL_SAVED_TREE (fndecl
) = NULL_TREE
;
12525 /* If we had callee-copies statements, insert them at the beginning
12526 of the function and clear DECL_VALUE_EXPR_P on the parameters. */
12527 if (!gimple_seq_empty_p (parm_stmts
))
12531 gimplify_seq_add_seq (&parm_stmts
, gimple_bind_body (outer_bind
));
12532 gimple_bind_set_body (outer_bind
, parm_stmts
);
12534 for (parm
= DECL_ARGUMENTS (current_function_decl
);
12535 parm
; parm
= DECL_CHAIN (parm
))
12536 if (DECL_HAS_VALUE_EXPR_P (parm
))
12538 DECL_HAS_VALUE_EXPR_P (parm
) = 0;
12539 DECL_IGNORED_P (parm
) = 0;
12545 if (nonlocal_vla_vars
)
12547 /* tree-nested.c may later on call declare_vars (..., true);
12548 which relies on BLOCK_VARS chain to be the tail of the
12549 gimple_bind_vars chain. Ensure we don't violate that
12551 if (gimple_bind_block (outer_bind
)
12552 == DECL_INITIAL (current_function_decl
))
12553 declare_vars (nonlocal_vla_vars
, outer_bind
, true);
12555 BLOCK_VARS (DECL_INITIAL (current_function_decl
))
12556 = chainon (BLOCK_VARS (DECL_INITIAL (current_function_decl
)),
12557 nonlocal_vla_vars
);
12558 nonlocal_vla_vars
= NULL_TREE
;
12560 delete nonlocal_vlas
;
12561 nonlocal_vlas
= NULL
;
12564 if ((flag_openacc
|| flag_openmp
|| flag_openmp_simd
)
12565 && gimplify_omp_ctxp
)
12567 delete_omp_context (gimplify_omp_ctxp
);
12568 gimplify_omp_ctxp
= NULL
;
12571 pop_gimplify_context (outer_bind
);
12572 gcc_assert (gimplify_ctxp
== NULL
);
12574 if (flag_checking
&& !seen_error ())
12575 verify_gimple_in_seq (gimple_bind_body (outer_bind
));
12577 timevar_pop (TV_TREE_GIMPLIFY
);
12578 input_location
= saved_location
;
12583 typedef char *char_p
; /* For DEF_VEC_P. */
12585 /* Return whether we should exclude FNDECL from instrumentation. */
12588 flag_instrument_functions_exclude_p (tree fndecl
)
12592 v
= (vec
<char_p
> *) flag_instrument_functions_exclude_functions
;
12593 if (v
&& v
->length () > 0)
12599 name
= lang_hooks
.decl_printable_name (fndecl
, 0);
12600 FOR_EACH_VEC_ELT (*v
, i
, s
)
12601 if (strstr (name
, s
) != NULL
)
12605 v
= (vec
<char_p
> *) flag_instrument_functions_exclude_files
;
12606 if (v
&& v
->length () > 0)
12612 name
= DECL_SOURCE_FILE (fndecl
);
12613 FOR_EACH_VEC_ELT (*v
, i
, s
)
12614 if (strstr (name
, s
) != NULL
)
12621 /* Entry point to the gimplification pass. FNDECL is the FUNCTION_DECL
12622 node for the function we want to gimplify.
12624 Return the sequence of GIMPLE statements corresponding to the body
12628 gimplify_function_tree (tree fndecl
)
12634 gcc_assert (!gimple_body (fndecl
));
12636 if (DECL_STRUCT_FUNCTION (fndecl
))
12637 push_cfun (DECL_STRUCT_FUNCTION (fndecl
));
12639 push_struct_function (fndecl
);
12641 /* Tentatively set PROP_gimple_lva here, and reset it in gimplify_va_arg_expr
12643 cfun
->curr_properties
|= PROP_gimple_lva
;
12645 for (parm
= DECL_ARGUMENTS (fndecl
); parm
; parm
= DECL_CHAIN (parm
))
12647 /* Preliminarily mark non-addressed complex variables as eligible
12648 for promotion to gimple registers. We'll transform their uses
12649 as we find them. */
12650 if ((TREE_CODE (TREE_TYPE (parm
)) == COMPLEX_TYPE
12651 || TREE_CODE (TREE_TYPE (parm
)) == VECTOR_TYPE
)
12652 && !TREE_THIS_VOLATILE (parm
)
12653 && !needs_to_live_in_memory (parm
))
12654 DECL_GIMPLE_REG_P (parm
) = 1;
12657 ret
= DECL_RESULT (fndecl
);
12658 if ((TREE_CODE (TREE_TYPE (ret
)) == COMPLEX_TYPE
12659 || TREE_CODE (TREE_TYPE (ret
)) == VECTOR_TYPE
)
12660 && !needs_to_live_in_memory (ret
))
12661 DECL_GIMPLE_REG_P (ret
) = 1;
12663 if (asan_sanitize_use_after_scope () && sanitize_flags_p (SANITIZE_ADDRESS
))
12664 asan_poisoned_variables
= new hash_set
<tree
> ();
12665 bind
= gimplify_body (fndecl
, true);
12666 if (asan_poisoned_variables
)
12668 delete asan_poisoned_variables
;
12669 asan_poisoned_variables
= NULL
;
12672 /* The tree body of the function is no longer needed, replace it
12673 with the new GIMPLE body. */
12675 gimple_seq_add_stmt (&seq
, bind
);
12676 gimple_set_body (fndecl
, seq
);
12678 /* If we're instrumenting function entry/exit, then prepend the call to
12679 the entry hook and wrap the whole function in a TRY_FINALLY_EXPR to
12680 catch the exit hook. */
12681 /* ??? Add some way to ignore exceptions for this TFE. */
12682 if (flag_instrument_function_entry_exit
12683 && !DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (fndecl
)
12684 /* Do not instrument extern inline functions. */
12685 && !(DECL_DECLARED_INLINE_P (fndecl
)
12686 && DECL_EXTERNAL (fndecl
)
12687 && DECL_DISREGARD_INLINE_LIMITS (fndecl
))
12688 && !flag_instrument_functions_exclude_p (fndecl
))
12693 gimple_seq cleanup
= NULL
, body
= NULL
;
12697 x
= builtin_decl_implicit (BUILT_IN_RETURN_ADDRESS
);
12698 call
= gimple_build_call (x
, 1, integer_zero_node
);
12699 tmp_var
= create_tmp_var (ptr_type_node
, "return_addr");
12700 gimple_call_set_lhs (call
, tmp_var
);
12701 gimplify_seq_add_stmt (&cleanup
, call
);
12702 x
= builtin_decl_implicit (BUILT_IN_PROFILE_FUNC_EXIT
);
12703 call
= gimple_build_call (x
, 2,
12704 build_fold_addr_expr (current_function_decl
),
12706 gimplify_seq_add_stmt (&cleanup
, call
);
12707 tf
= gimple_build_try (seq
, cleanup
, GIMPLE_TRY_FINALLY
);
12709 x
= builtin_decl_implicit (BUILT_IN_RETURN_ADDRESS
);
12710 call
= gimple_build_call (x
, 1, integer_zero_node
);
12711 tmp_var
= create_tmp_var (ptr_type_node
, "return_addr");
12712 gimple_call_set_lhs (call
, tmp_var
);
12713 gimplify_seq_add_stmt (&body
, call
);
12714 x
= builtin_decl_implicit (BUILT_IN_PROFILE_FUNC_ENTER
);
12715 call
= gimple_build_call (x
, 2,
12716 build_fold_addr_expr (current_function_decl
),
12718 gimplify_seq_add_stmt (&body
, call
);
12719 gimplify_seq_add_stmt (&body
, tf
);
12720 new_bind
= gimple_build_bind (NULL
, body
, NULL
);
12722 /* Replace the current function body with the body
12723 wrapped in the try/finally TF. */
12725 gimple_seq_add_stmt (&seq
, new_bind
);
12726 gimple_set_body (fndecl
, seq
);
12730 if (sanitize_flags_p (SANITIZE_THREAD
))
12732 gcall
*call
= gimple_build_call_internal (IFN_TSAN_FUNC_EXIT
, 0);
12733 gimple
*tf
= gimple_build_try (seq
, call
, GIMPLE_TRY_FINALLY
);
12734 gbind
*new_bind
= gimple_build_bind (NULL
, tf
, NULL
);
12735 /* Replace the current function body with the body
12736 wrapped in the try/finally TF. */
12738 gimple_seq_add_stmt (&seq
, new_bind
);
12739 gimple_set_body (fndecl
, seq
);
12742 DECL_SAVED_TREE (fndecl
) = NULL_TREE
;
12743 cfun
->curr_properties
|= PROP_gimple_any
;
12747 dump_function (TDI_gimple
, fndecl
);
12750 /* Return a dummy expression of type TYPE in order to keep going after an
12754 dummy_object (tree type
)
12756 tree t
= build_int_cst (build_pointer_type (type
), 0);
12757 return build2 (MEM_REF
, type
, t
, t
);
12760 /* Gimplify __builtin_va_arg, aka VA_ARG_EXPR, which is not really a
12761 builtin function, but a very special sort of operator. */
12763 enum gimplify_status
12764 gimplify_va_arg_expr (tree
*expr_p
, gimple_seq
*pre_p
,
12765 gimple_seq
*post_p ATTRIBUTE_UNUSED
)
12767 tree promoted_type
, have_va_type
;
12768 tree valist
= TREE_OPERAND (*expr_p
, 0);
12769 tree type
= TREE_TYPE (*expr_p
);
12770 tree t
, tag
, aptag
;
12771 location_t loc
= EXPR_LOCATION (*expr_p
);
12773 /* Verify that valist is of the proper type. */
12774 have_va_type
= TREE_TYPE (valist
);
12775 if (have_va_type
== error_mark_node
)
12777 have_va_type
= targetm
.canonical_va_list_type (have_va_type
);
12778 if (have_va_type
== NULL_TREE
12779 && POINTER_TYPE_P (TREE_TYPE (valist
)))
12780 /* Handle 'Case 1: Not an array type' from c-common.c/build_va_arg. */
12782 = targetm
.canonical_va_list_type (TREE_TYPE (TREE_TYPE (valist
)));
12783 gcc_assert (have_va_type
!= NULL_TREE
);
12785 /* Generate a diagnostic for requesting data of a type that cannot
12786 be passed through `...' due to type promotion at the call site. */
12787 if ((promoted_type
= lang_hooks
.types
.type_promotes_to (type
))
12790 static bool gave_help
;
12792 /* Use the expansion point to handle cases such as passing bool (defined
12793 in a system header) through `...'. */
12794 source_location xloc
12795 = expansion_point_location_if_in_system_header (loc
);
12797 /* Unfortunately, this is merely undefined, rather than a constraint
12798 violation, so we cannot make this an error. If this call is never
12799 executed, the program is still strictly conforming. */
12800 warned
= warning_at (xloc
, 0,
12801 "%qT is promoted to %qT when passed through %<...%>",
12802 type
, promoted_type
);
12803 if (!gave_help
&& warned
)
12806 inform (xloc
, "(so you should pass %qT not %qT to %<va_arg%>)",
12807 promoted_type
, type
);
12810 /* We can, however, treat "undefined" any way we please.
12811 Call abort to encourage the user to fix the program. */
12813 inform (xloc
, "if this code is reached, the program will abort");
12814 /* Before the abort, allow the evaluation of the va_list
12815 expression to exit or longjmp. */
12816 gimplify_and_add (valist
, pre_p
);
12817 t
= build_call_expr_loc (loc
,
12818 builtin_decl_implicit (BUILT_IN_TRAP
), 0);
12819 gimplify_and_add (t
, pre_p
);
12821 /* This is dead code, but go ahead and finish so that the
12822 mode of the result comes out right. */
12823 *expr_p
= dummy_object (type
);
12824 return GS_ALL_DONE
;
12827 tag
= build_int_cst (build_pointer_type (type
), 0);
12828 aptag
= build_int_cst (TREE_TYPE (valist
), 0);
12830 *expr_p
= build_call_expr_internal_loc (loc
, IFN_VA_ARG
, type
, 3,
12831 valist
, tag
, aptag
);
12833 /* Clear the tentatively set PROP_gimple_lva, to indicate that IFN_VA_ARG
12834 needs to be expanded. */
12835 cfun
->curr_properties
&= ~PROP_gimple_lva
;
12840 /* Build a new GIMPLE_ASSIGN tuple and append it to the end of *SEQ_P.
12842 DST/SRC are the destination and source respectively. You can pass
12843 ungimplified trees in DST or SRC, in which case they will be
12844 converted to a gimple operand if necessary.
12846 This function returns the newly created GIMPLE_ASSIGN tuple. */
12849 gimplify_assign (tree dst
, tree src
, gimple_seq
*seq_p
)
12851 tree t
= build2 (MODIFY_EXPR
, TREE_TYPE (dst
), dst
, src
);
12852 gimplify_and_add (t
, seq_p
);
12854 return gimple_seq_last_stmt (*seq_p
);
12858 gimplify_hasher::hash (const elt_t
*p
)
12861 return iterative_hash_expr (t
, 0);
12865 gimplify_hasher::equal (const elt_t
*p1
, const elt_t
*p2
)
12869 enum tree_code code
= TREE_CODE (t1
);
12871 if (TREE_CODE (t2
) != code
12872 || TREE_TYPE (t1
) != TREE_TYPE (t2
))
12875 if (!operand_equal_p (t1
, t2
, 0))
12878 /* Only allow them to compare equal if they also hash equal; otherwise
12879 results are nondeterminate, and we fail bootstrap comparison. */
12880 gcc_checking_assert (hash (p1
) == hash (p2
));