1 /* Tree lowering pass. This pass converts the GENERIC functions-as-trees
2 tree representation into the GIMPLE form.
3 Copyright (C) 2002-2019 Free Software Foundation, Inc.
4 Major work done by Sebastian Pop <s.pop@laposte.net>,
5 Diego Novillo <dnovillo@redhat.com> and Jason Merrill <jason@redhat.com>.
7 This file is part of GCC.
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
25 #include "coretypes.h"
33 #include "gimple-predict.h"
34 #include "tree-pass.h" /* FIXME: only for PROP_gimple_any */
37 #include "tree-pretty-print.h"
38 #include "diagnostic-core.h"
40 #include "fold-const.h"
45 #include "gimple-fold.h"
48 #include "gimple-iterator.h"
49 #include "stor-layout.h"
50 #include "print-tree.h"
51 #include "tree-iterator.h"
52 #include "tree-inline.h"
53 #include "langhooks.h"
56 #include "omp-general.h"
58 #include "gimple-low.h"
59 #include "gomp-constants.h"
60 #include "splay-tree.h"
61 #include "gimple-walk.h"
62 #include "langhooks-def.h" /* FIXME: for lhd_set_decl_assembler_name */
64 #include "stringpool.h"
68 #include "omp-offload.h"
71 /* Hash set of poisoned variables in a bind expr. */
72 static hash_set
<tree
> *asan_poisoned_variables
= NULL
;
74 enum gimplify_omp_var_data
77 GOVD_EXPLICIT
= 0x000002,
78 GOVD_SHARED
= 0x000004,
79 GOVD_PRIVATE
= 0x000008,
80 GOVD_FIRSTPRIVATE
= 0x000010,
81 GOVD_LASTPRIVATE
= 0x000020,
82 GOVD_REDUCTION
= 0x000040,
85 GOVD_DEBUG_PRIVATE
= 0x000200,
86 GOVD_PRIVATE_OUTER_REF
= 0x000400,
87 GOVD_LINEAR
= 0x000800,
88 GOVD_ALIGNED
= 0x001000,
90 /* Flag for GOVD_MAP: don't copy back. */
91 GOVD_MAP_TO_ONLY
= 0x002000,
93 /* Flag for GOVD_LINEAR or GOVD_LASTPRIVATE: no outer reference. */
94 GOVD_LINEAR_LASTPRIVATE_NO_OUTER
= 0x004000,
96 GOVD_MAP_0LEN_ARRAY
= 0x008000,
98 /* Flag for GOVD_MAP, if it is always, to or always, tofrom mapping. */
99 GOVD_MAP_ALWAYS_TO
= 0x010000,
101 /* Flag for shared vars that are or might be stored to in the region. */
102 GOVD_WRITTEN
= 0x020000,
104 /* Flag for GOVD_MAP, if it is a forced mapping. */
105 GOVD_MAP_FORCE
= 0x040000,
107 /* Flag for GOVD_MAP: must be present already. */
108 GOVD_MAP_FORCE_PRESENT
= 0x080000,
110 /* Flag for GOVD_MAP: only allocate. */
111 GOVD_MAP_ALLOC_ONLY
= 0x100000,
113 /* Flag for GOVD_MAP: only copy back. */
114 GOVD_MAP_FROM_ONLY
= 0x200000,
116 GOVD_NONTEMPORAL
= 0x400000,
118 /* Flag for GOVD_LASTPRIVATE: conditional modifier. */
119 GOVD_LASTPRIVATE_CONDITIONAL
= 0x800000,
121 GOVD_CONDTEMP
= 0x1000000,
123 /* Flag for GOVD_REDUCTION: inscan seen in {in,ex}clusive clause. */
124 GOVD_REDUCTION_INSCAN
= 0x2000000,
126 GOVD_DATA_SHARE_CLASS
= (GOVD_SHARED
| GOVD_PRIVATE
| GOVD_FIRSTPRIVATE
127 | GOVD_LASTPRIVATE
| GOVD_REDUCTION
| GOVD_LINEAR
134 ORT_WORKSHARE
= 0x00,
135 ORT_TASKGROUP
= 0x01,
139 ORT_COMBINED_PARALLEL
= ORT_PARALLEL
| 1,
142 ORT_UNTIED_TASK
= ORT_TASK
| 1,
143 ORT_TASKLOOP
= ORT_TASK
| 2,
144 ORT_UNTIED_TASKLOOP
= ORT_UNTIED_TASK
| 2,
147 ORT_COMBINED_TEAMS
= ORT_TEAMS
| 1,
148 ORT_HOST_TEAMS
= ORT_TEAMS
| 2,
149 ORT_COMBINED_HOST_TEAMS
= ORT_COMBINED_TEAMS
| 2,
152 ORT_TARGET_DATA
= 0x40,
154 /* Data region with offloading. */
156 ORT_COMBINED_TARGET
= ORT_TARGET
| 1,
158 /* OpenACC variants. */
159 ORT_ACC
= 0x100, /* A generic OpenACC region. */
160 ORT_ACC_DATA
= ORT_ACC
| ORT_TARGET_DATA
, /* Data construct. */
161 ORT_ACC_PARALLEL
= ORT_ACC
| ORT_TARGET
, /* Parallel construct */
162 ORT_ACC_KERNELS
= ORT_ACC
| ORT_TARGET
| 2, /* Kernels construct. */
163 ORT_ACC_HOST_DATA
= ORT_ACC
| ORT_TARGET_DATA
| 2, /* Host data. */
165 /* Dummy OpenMP region, used to disable expansion of
166 DECL_VALUE_EXPRs in taskloop pre body. */
170 /* Gimplify hashtable helper. */
172 struct gimplify_hasher
: free_ptr_hash
<elt_t
>
174 static inline hashval_t
hash (const elt_t
*);
175 static inline bool equal (const elt_t
*, const elt_t
*);
180 struct gimplify_ctx
*prev_context
;
182 vec
<gbind
*> bind_expr_stack
;
184 gimple_seq conditional_cleanups
;
188 vec
<tree
> case_labels
;
189 hash_set
<tree
> *live_switch_vars
;
190 /* The formal temporary table. Should this be persistent? */
191 hash_table
<gimplify_hasher
> *temp_htab
;
194 unsigned into_ssa
: 1;
195 unsigned allow_rhs_cond_expr
: 1;
196 unsigned in_cleanup_point_expr
: 1;
197 unsigned keep_stack
: 1;
198 unsigned save_stack
: 1;
199 unsigned in_switch_expr
: 1;
202 enum gimplify_defaultmap_kind
210 struct gimplify_omp_ctx
212 struct gimplify_omp_ctx
*outer_context
;
213 splay_tree variables
;
214 hash_set
<tree
> *privatized_types
;
216 /* Iteration variables in an OMP_FOR. */
217 vec
<tree
> loop_iter_var
;
219 enum omp_clause_default_kind default_kind
;
220 enum omp_region_type region_type
;
223 bool target_firstprivatize_array_bases
;
227 static struct gimplify_ctx
*gimplify_ctxp
;
228 static struct gimplify_omp_ctx
*gimplify_omp_ctxp
;
230 /* Forward declaration. */
231 static enum gimplify_status
gimplify_compound_expr (tree
*, gimple_seq
*, bool);
232 static hash_map
<tree
, tree
> *oacc_declare_returns
;
233 static enum gimplify_status
gimplify_expr (tree
*, gimple_seq
*, gimple_seq
*,
234 bool (*) (tree
), fallback_t
, bool);
236 /* Shorter alias name for the above function for use in gimplify.c
240 gimplify_seq_add_stmt (gimple_seq
*seq_p
, gimple
*gs
)
242 gimple_seq_add_stmt_without_update (seq_p
, gs
);
245 /* Append sequence SRC to the end of sequence *DST_P. If *DST_P is
246 NULL, a new sequence is allocated. This function is
247 similar to gimple_seq_add_seq, but does not scan the operands.
248 During gimplification, we need to manipulate statement sequences
249 before the def/use vectors have been constructed. */
252 gimplify_seq_add_seq (gimple_seq
*dst_p
, gimple_seq src
)
254 gimple_stmt_iterator si
;
259 si
= gsi_last (*dst_p
);
260 gsi_insert_seq_after_without_update (&si
, src
, GSI_NEW_STMT
);
264 /* Pointer to a list of allocated gimplify_ctx structs to be used for pushing
265 and popping gimplify contexts. */
267 static struct gimplify_ctx
*ctx_pool
= NULL
;
269 /* Return a gimplify context struct from the pool. */
271 static inline struct gimplify_ctx
*
274 struct gimplify_ctx
* c
= ctx_pool
;
277 ctx_pool
= c
->prev_context
;
279 c
= XNEW (struct gimplify_ctx
);
281 memset (c
, '\0', sizeof (*c
));
285 /* Put gimplify context C back into the pool. */
288 ctx_free (struct gimplify_ctx
*c
)
290 c
->prev_context
= ctx_pool
;
294 /* Free allocated ctx stack memory. */
297 free_gimplify_stack (void)
299 struct gimplify_ctx
*c
;
301 while ((c
= ctx_pool
))
303 ctx_pool
= c
->prev_context
;
309 /* Set up a context for the gimplifier. */
312 push_gimplify_context (bool in_ssa
, bool rhs_cond_ok
)
314 struct gimplify_ctx
*c
= ctx_alloc ();
316 c
->prev_context
= gimplify_ctxp
;
318 gimplify_ctxp
->into_ssa
= in_ssa
;
319 gimplify_ctxp
->allow_rhs_cond_expr
= rhs_cond_ok
;
322 /* Tear down a context for the gimplifier. If BODY is non-null, then
323 put the temporaries into the outer BIND_EXPR. Otherwise, put them
326 BODY is not a sequence, but the first tuple in a sequence. */
329 pop_gimplify_context (gimple
*body
)
331 struct gimplify_ctx
*c
= gimplify_ctxp
;
334 && (!c
->bind_expr_stack
.exists ()
335 || c
->bind_expr_stack
.is_empty ()));
336 c
->bind_expr_stack
.release ();
337 gimplify_ctxp
= c
->prev_context
;
340 declare_vars (c
->temps
, body
, false);
342 record_vars (c
->temps
);
349 /* Push a GIMPLE_BIND tuple onto the stack of bindings. */
352 gimple_push_bind_expr (gbind
*bind_stmt
)
354 gimplify_ctxp
->bind_expr_stack
.reserve (8);
355 gimplify_ctxp
->bind_expr_stack
.safe_push (bind_stmt
);
358 /* Pop the first element off the stack of bindings. */
361 gimple_pop_bind_expr (void)
363 gimplify_ctxp
->bind_expr_stack
.pop ();
366 /* Return the first element of the stack of bindings. */
369 gimple_current_bind_expr (void)
371 return gimplify_ctxp
->bind_expr_stack
.last ();
374 /* Return the stack of bindings created during gimplification. */
377 gimple_bind_expr_stack (void)
379 return gimplify_ctxp
->bind_expr_stack
;
382 /* Return true iff there is a COND_EXPR between us and the innermost
383 CLEANUP_POINT_EXPR. This info is used by gimple_push_cleanup. */
386 gimple_conditional_context (void)
388 return gimplify_ctxp
->conditions
> 0;
391 /* Note that we've entered a COND_EXPR. */
394 gimple_push_condition (void)
396 #ifdef ENABLE_GIMPLE_CHECKING
397 if (gimplify_ctxp
->conditions
== 0)
398 gcc_assert (gimple_seq_empty_p (gimplify_ctxp
->conditional_cleanups
));
400 ++(gimplify_ctxp
->conditions
);
403 /* Note that we've left a COND_EXPR. If we're back at unconditional scope
404 now, add any conditional cleanups we've seen to the prequeue. */
407 gimple_pop_condition (gimple_seq
*pre_p
)
409 int conds
= --(gimplify_ctxp
->conditions
);
411 gcc_assert (conds
>= 0);
414 gimplify_seq_add_seq (pre_p
, gimplify_ctxp
->conditional_cleanups
);
415 gimplify_ctxp
->conditional_cleanups
= NULL
;
419 /* A stable comparison routine for use with splay trees and DECLs. */
422 splay_tree_compare_decl_uid (splay_tree_key xa
, splay_tree_key xb
)
427 return DECL_UID (a
) - DECL_UID (b
);
430 /* Create a new omp construct that deals with variable remapping. */
432 static struct gimplify_omp_ctx
*
433 new_omp_context (enum omp_region_type region_type
)
435 struct gimplify_omp_ctx
*c
;
437 c
= XCNEW (struct gimplify_omp_ctx
);
438 c
->outer_context
= gimplify_omp_ctxp
;
439 c
->variables
= splay_tree_new (splay_tree_compare_decl_uid
, 0, 0);
440 c
->privatized_types
= new hash_set
<tree
>;
441 c
->location
= input_location
;
442 c
->region_type
= region_type
;
443 if ((region_type
& ORT_TASK
) == 0)
444 c
->default_kind
= OMP_CLAUSE_DEFAULT_SHARED
;
446 c
->default_kind
= OMP_CLAUSE_DEFAULT_UNSPECIFIED
;
447 c
->defaultmap
[GDMK_SCALAR
] = GOVD_MAP
;
448 c
->defaultmap
[GDMK_AGGREGATE
] = GOVD_MAP
;
449 c
->defaultmap
[GDMK_ALLOCATABLE
] = GOVD_MAP
;
450 c
->defaultmap
[GDMK_POINTER
] = GOVD_MAP
;
455 /* Destroy an omp construct that deals with variable remapping. */
458 delete_omp_context (struct gimplify_omp_ctx
*c
)
460 splay_tree_delete (c
->variables
);
461 delete c
->privatized_types
;
462 c
->loop_iter_var
.release ();
466 static void omp_add_variable (struct gimplify_omp_ctx
*, tree
, unsigned int);
467 static bool omp_notice_variable (struct gimplify_omp_ctx
*, tree
, bool);
469 /* Both gimplify the statement T and append it to *SEQ_P. This function
470 behaves exactly as gimplify_stmt, but you don't have to pass T as a
474 gimplify_and_add (tree t
, gimple_seq
*seq_p
)
476 gimplify_stmt (&t
, seq_p
);
479 /* Gimplify statement T into sequence *SEQ_P, and return the first
480 tuple in the sequence of generated tuples for this statement.
481 Return NULL if gimplifying T produced no tuples. */
484 gimplify_and_return_first (tree t
, gimple_seq
*seq_p
)
486 gimple_stmt_iterator last
= gsi_last (*seq_p
);
488 gimplify_and_add (t
, seq_p
);
490 if (!gsi_end_p (last
))
493 return gsi_stmt (last
);
496 return gimple_seq_first_stmt (*seq_p
);
499 /* Returns true iff T is a valid RHS for an assignment to an un-renamed
500 LHS, or for a call argument. */
503 is_gimple_mem_rhs (tree t
)
505 /* If we're dealing with a renamable type, either source or dest must be
506 a renamed variable. */
507 if (is_gimple_reg_type (TREE_TYPE (t
)))
508 return is_gimple_val (t
);
510 return is_gimple_val (t
) || is_gimple_lvalue (t
);
513 /* Return true if T is a CALL_EXPR or an expression that can be
514 assigned to a temporary. Note that this predicate should only be
515 used during gimplification. See the rationale for this in
516 gimplify_modify_expr. */
519 is_gimple_reg_rhs_or_call (tree t
)
521 return (get_gimple_rhs_class (TREE_CODE (t
)) != GIMPLE_INVALID_RHS
522 || TREE_CODE (t
) == CALL_EXPR
);
525 /* Return true if T is a valid memory RHS or a CALL_EXPR. Note that
526 this predicate should only be used during gimplification. See the
527 rationale for this in gimplify_modify_expr. */
530 is_gimple_mem_rhs_or_call (tree t
)
532 /* If we're dealing with a renamable type, either source or dest must be
533 a renamed variable. */
534 if (is_gimple_reg_type (TREE_TYPE (t
)))
535 return is_gimple_val (t
);
537 return (is_gimple_val (t
)
538 || is_gimple_lvalue (t
)
539 || TREE_CLOBBER_P (t
)
540 || TREE_CODE (t
) == CALL_EXPR
);
543 /* Create a temporary with a name derived from VAL. Subroutine of
544 lookup_tmp_var; nobody else should call this function. */
547 create_tmp_from_val (tree val
)
549 /* Drop all qualifiers and address-space information from the value type. */
550 tree type
= TYPE_MAIN_VARIANT (TREE_TYPE (val
));
551 tree var
= create_tmp_var (type
, get_name (val
));
552 if (TREE_CODE (TREE_TYPE (var
)) == COMPLEX_TYPE
553 || TREE_CODE (TREE_TYPE (var
)) == VECTOR_TYPE
)
554 DECL_GIMPLE_REG_P (var
) = 1;
558 /* Create a temporary to hold the value of VAL. If IS_FORMAL, try to reuse
559 an existing expression temporary. */
562 lookup_tmp_var (tree val
, bool is_formal
)
566 /* If not optimizing, never really reuse a temporary. local-alloc
567 won't allocate any variable that is used in more than one basic
568 block, which means it will go into memory, causing much extra
569 work in reload and final and poorer code generation, outweighing
570 the extra memory allocation here. */
571 if (!optimize
|| !is_formal
|| TREE_SIDE_EFFECTS (val
))
572 ret
= create_tmp_from_val (val
);
579 if (!gimplify_ctxp
->temp_htab
)
580 gimplify_ctxp
->temp_htab
= new hash_table
<gimplify_hasher
> (1000);
581 slot
= gimplify_ctxp
->temp_htab
->find_slot (&elt
, INSERT
);
584 elt_p
= XNEW (elt_t
);
586 elt_p
->temp
= ret
= create_tmp_from_val (val
);
599 /* Helper for get_formal_tmp_var and get_initialized_tmp_var. */
602 internal_get_tmp_var (tree val
, gimple_seq
*pre_p
, gimple_seq
*post_p
,
603 bool is_formal
, bool allow_ssa
)
607 /* Notice that we explicitly allow VAL to be a CALL_EXPR so that we
608 can create an INIT_EXPR and convert it into a GIMPLE_CALL below. */
609 gimplify_expr (&val
, pre_p
, post_p
, is_gimple_reg_rhs_or_call
,
613 && gimplify_ctxp
->into_ssa
614 && is_gimple_reg_type (TREE_TYPE (val
)))
616 t
= make_ssa_name (TYPE_MAIN_VARIANT (TREE_TYPE (val
)));
617 if (! gimple_in_ssa_p (cfun
))
619 const char *name
= get_name (val
);
621 SET_SSA_NAME_VAR_OR_IDENTIFIER (t
, create_tmp_var_name (name
));
625 t
= lookup_tmp_var (val
, is_formal
);
627 mod
= build2 (INIT_EXPR
, TREE_TYPE (t
), t
, unshare_expr (val
));
629 SET_EXPR_LOCATION (mod
, EXPR_LOC_OR_LOC (val
, input_location
));
631 /* gimplify_modify_expr might want to reduce this further. */
632 gimplify_and_add (mod
, pre_p
);
638 /* Return a formal temporary variable initialized with VAL. PRE_P is as
639 in gimplify_expr. Only use this function if:
641 1) The value of the unfactored expression represented by VAL will not
642 change between the initialization and use of the temporary, and
643 2) The temporary will not be otherwise modified.
645 For instance, #1 means that this is inappropriate for SAVE_EXPR temps,
646 and #2 means it is inappropriate for && temps.
648 For other cases, use get_initialized_tmp_var instead. */
651 get_formal_tmp_var (tree val
, gimple_seq
*pre_p
)
653 return internal_get_tmp_var (val
, pre_p
, NULL
, true, true);
656 /* Return a temporary variable initialized with VAL. PRE_P and POST_P
657 are as in gimplify_expr. */
660 get_initialized_tmp_var (tree val
, gimple_seq
*pre_p
, gimple_seq
*post_p
,
663 return internal_get_tmp_var (val
, pre_p
, post_p
, false, allow_ssa
);
666 /* Declare all the variables in VARS in SCOPE. If DEBUG_INFO is true,
667 generate debug info for them; otherwise don't. */
670 declare_vars (tree vars
, gimple
*gs
, bool debug_info
)
677 gbind
*scope
= as_a
<gbind
*> (gs
);
679 temps
= nreverse (last
);
681 block
= gimple_bind_block (scope
);
682 gcc_assert (!block
|| TREE_CODE (block
) == BLOCK
);
683 if (!block
|| !debug_info
)
685 DECL_CHAIN (last
) = gimple_bind_vars (scope
);
686 gimple_bind_set_vars (scope
, temps
);
690 /* We need to attach the nodes both to the BIND_EXPR and to its
691 associated BLOCK for debugging purposes. The key point here
692 is that the BLOCK_VARS of the BIND_EXPR_BLOCK of a BIND_EXPR
693 is a subchain of the BIND_EXPR_VARS of the BIND_EXPR. */
694 if (BLOCK_VARS (block
))
695 BLOCK_VARS (block
) = chainon (BLOCK_VARS (block
), temps
);
698 gimple_bind_set_vars (scope
,
699 chainon (gimple_bind_vars (scope
), temps
));
700 BLOCK_VARS (block
) = temps
;
706 /* For VAR a VAR_DECL of variable size, try to find a constant upper bound
707 for the size and adjust DECL_SIZE/DECL_SIZE_UNIT accordingly. Abort if
708 no such upper bound can be obtained. */
711 force_constant_size (tree var
)
713 /* The only attempt we make is by querying the maximum size of objects
714 of the variable's type. */
716 HOST_WIDE_INT max_size
;
718 gcc_assert (VAR_P (var
));
720 max_size
= max_int_size_in_bytes (TREE_TYPE (var
));
722 gcc_assert (max_size
>= 0);
725 = build_int_cst (TREE_TYPE (DECL_SIZE_UNIT (var
)), max_size
);
727 = build_int_cst (TREE_TYPE (DECL_SIZE (var
)), max_size
* BITS_PER_UNIT
);
730 /* Push the temporary variable TMP into the current binding. */
733 gimple_add_tmp_var_fn (struct function
*fn
, tree tmp
)
735 gcc_assert (!DECL_CHAIN (tmp
) && !DECL_SEEN_IN_BIND_EXPR_P (tmp
));
737 /* Later processing assumes that the object size is constant, which might
738 not be true at this point. Force the use of a constant upper bound in
740 if (!tree_fits_poly_uint64_p (DECL_SIZE_UNIT (tmp
)))
741 force_constant_size (tmp
);
743 DECL_CONTEXT (tmp
) = fn
->decl
;
744 DECL_SEEN_IN_BIND_EXPR_P (tmp
) = 1;
746 record_vars_into (tmp
, fn
->decl
);
749 /* Push the temporary variable TMP into the current binding. */
752 gimple_add_tmp_var (tree tmp
)
754 gcc_assert (!DECL_CHAIN (tmp
) && !DECL_SEEN_IN_BIND_EXPR_P (tmp
));
756 /* Later processing assumes that the object size is constant, which might
757 not be true at this point. Force the use of a constant upper bound in
759 if (!tree_fits_poly_uint64_p (DECL_SIZE_UNIT (tmp
)))
760 force_constant_size (tmp
);
762 DECL_CONTEXT (tmp
) = current_function_decl
;
763 DECL_SEEN_IN_BIND_EXPR_P (tmp
) = 1;
767 DECL_CHAIN (tmp
) = gimplify_ctxp
->temps
;
768 gimplify_ctxp
->temps
= tmp
;
770 /* Mark temporaries local within the nearest enclosing parallel. */
771 if (gimplify_omp_ctxp
)
773 struct gimplify_omp_ctx
*ctx
= gimplify_omp_ctxp
;
775 && (ctx
->region_type
== ORT_WORKSHARE
776 || ctx
->region_type
== ORT_TASKGROUP
777 || ctx
->region_type
== ORT_SIMD
778 || ctx
->region_type
== ORT_ACC
))
779 ctx
= ctx
->outer_context
;
781 omp_add_variable (ctx
, tmp
, GOVD_LOCAL
| GOVD_SEEN
);
790 /* This case is for nested functions. We need to expose the locals
792 body_seq
= gimple_body (current_function_decl
);
793 declare_vars (tmp
, gimple_seq_first_stmt (body_seq
), false);
799 /* This page contains routines to unshare tree nodes, i.e. to duplicate tree
800 nodes that are referenced more than once in GENERIC functions. This is
801 necessary because gimplification (translation into GIMPLE) is performed
802 by modifying tree nodes in-place, so gimplication of a shared node in a
803 first context could generate an invalid GIMPLE form in a second context.
805 This is achieved with a simple mark/copy/unmark algorithm that walks the
806 GENERIC representation top-down, marks nodes with TREE_VISITED the first
807 time it encounters them, duplicates them if they already have TREE_VISITED
808 set, and finally removes the TREE_VISITED marks it has set.
810 The algorithm works only at the function level, i.e. it generates a GENERIC
811 representation of a function with no nodes shared within the function when
812 passed a GENERIC function (except for nodes that are allowed to be shared).
814 At the global level, it is also necessary to unshare tree nodes that are
815 referenced in more than one function, for the same aforementioned reason.
816 This requires some cooperation from the front-end. There are 2 strategies:
818 1. Manual unsharing. The front-end needs to call unshare_expr on every
819 expression that might end up being shared across functions.
821 2. Deep unsharing. This is an extension of regular unsharing. Instead
822 of calling unshare_expr on expressions that might be shared across
823 functions, the front-end pre-marks them with TREE_VISITED. This will
824 ensure that they are unshared on the first reference within functions
825 when the regular unsharing algorithm runs. The counterpart is that
826 this algorithm must look deeper than for manual unsharing, which is
827 specified by LANG_HOOKS_DEEP_UNSHARING.
829 If there are only few specific cases of node sharing across functions, it is
830 probably easier for a front-end to unshare the expressions manually. On the
831 contrary, if the expressions generated at the global level are as widespread
832 as expressions generated within functions, deep unsharing is very likely the
835 /* Similar to copy_tree_r but do not copy SAVE_EXPR or TARGET_EXPR nodes.
836 These nodes model computations that must be done once. If we were to
837 unshare something like SAVE_EXPR(i++), the gimplification process would
838 create wrong code. However, if DATA is non-null, it must hold a pointer
839 set that is used to unshare the subtrees of these nodes. */
842 mostly_copy_tree_r (tree
*tp
, int *walk_subtrees
, void *data
)
845 enum tree_code code
= TREE_CODE (t
);
847 /* Do not copy SAVE_EXPR, TARGET_EXPR or BIND_EXPR nodes themselves, but
848 copy their subtrees if we can make sure to do it only once. */
849 if (code
== SAVE_EXPR
|| code
== TARGET_EXPR
|| code
== BIND_EXPR
)
851 if (data
&& !((hash_set
<tree
> *)data
)->add (t
))
857 /* Stop at types, decls, constants like copy_tree_r. */
858 else if (TREE_CODE_CLASS (code
) == tcc_type
859 || TREE_CODE_CLASS (code
) == tcc_declaration
860 || TREE_CODE_CLASS (code
) == tcc_constant
)
863 /* Cope with the statement expression extension. */
864 else if (code
== STATEMENT_LIST
)
867 /* Leave the bulk of the work to copy_tree_r itself. */
869 copy_tree_r (tp
, walk_subtrees
, NULL
);
874 /* Callback for walk_tree to unshare most of the shared trees rooted at *TP.
875 If *TP has been visited already, then *TP is deeply copied by calling
876 mostly_copy_tree_r. DATA is passed to mostly_copy_tree_r unmodified. */
879 copy_if_shared_r (tree
*tp
, int *walk_subtrees
, void *data
)
882 enum tree_code code
= TREE_CODE (t
);
884 /* Skip types, decls, and constants. But we do want to look at their
885 types and the bounds of types. Mark them as visited so we properly
886 unmark their subtrees on the unmark pass. If we've already seen them,
887 don't look down further. */
888 if (TREE_CODE_CLASS (code
) == tcc_type
889 || TREE_CODE_CLASS (code
) == tcc_declaration
890 || TREE_CODE_CLASS (code
) == tcc_constant
)
892 if (TREE_VISITED (t
))
895 TREE_VISITED (t
) = 1;
898 /* If this node has been visited already, unshare it and don't look
900 else if (TREE_VISITED (t
))
902 walk_tree (tp
, mostly_copy_tree_r
, data
, NULL
);
906 /* Otherwise, mark the node as visited and keep looking. */
908 TREE_VISITED (t
) = 1;
913 /* Unshare most of the shared trees rooted at *TP. DATA is passed to the
914 copy_if_shared_r callback unmodified. */
917 copy_if_shared (tree
*tp
, void *data
)
919 walk_tree (tp
, copy_if_shared_r
, data
, NULL
);
922 /* Unshare all the trees in the body of FNDECL, as well as in the bodies of
923 any nested functions. */
926 unshare_body (tree fndecl
)
928 struct cgraph_node
*cgn
= cgraph_node::get (fndecl
);
929 /* If the language requires deep unsharing, we need a pointer set to make
930 sure we don't repeatedly unshare subtrees of unshareable nodes. */
931 hash_set
<tree
> *visited
932 = lang_hooks
.deep_unsharing
? new hash_set
<tree
> : NULL
;
934 copy_if_shared (&DECL_SAVED_TREE (fndecl
), visited
);
935 copy_if_shared (&DECL_SIZE (DECL_RESULT (fndecl
)), visited
);
936 copy_if_shared (&DECL_SIZE_UNIT (DECL_RESULT (fndecl
)), visited
);
941 for (cgn
= cgn
->nested
; cgn
; cgn
= cgn
->next_nested
)
942 unshare_body (cgn
->decl
);
945 /* Callback for walk_tree to unmark the visited trees rooted at *TP.
946 Subtrees are walked until the first unvisited node is encountered. */
949 unmark_visited_r (tree
*tp
, int *walk_subtrees
, void *data ATTRIBUTE_UNUSED
)
953 /* If this node has been visited, unmark it and keep looking. */
954 if (TREE_VISITED (t
))
955 TREE_VISITED (t
) = 0;
957 /* Otherwise, don't look any deeper. */
964 /* Unmark the visited trees rooted at *TP. */
967 unmark_visited (tree
*tp
)
969 walk_tree (tp
, unmark_visited_r
, NULL
, NULL
);
972 /* Likewise, but mark all trees as not visited. */
975 unvisit_body (tree fndecl
)
977 struct cgraph_node
*cgn
= cgraph_node::get (fndecl
);
979 unmark_visited (&DECL_SAVED_TREE (fndecl
));
980 unmark_visited (&DECL_SIZE (DECL_RESULT (fndecl
)));
981 unmark_visited (&DECL_SIZE_UNIT (DECL_RESULT (fndecl
)));
984 for (cgn
= cgn
->nested
; cgn
; cgn
= cgn
->next_nested
)
985 unvisit_body (cgn
->decl
);
988 /* Unconditionally make an unshared copy of EXPR. This is used when using
989 stored expressions which span multiple functions, such as BINFO_VTABLE,
990 as the normal unsharing process can't tell that they're shared. */
993 unshare_expr (tree expr
)
995 walk_tree (&expr
, mostly_copy_tree_r
, NULL
, NULL
);
999 /* Worker for unshare_expr_without_location. */
1002 prune_expr_location (tree
*tp
, int *walk_subtrees
, void *)
1005 SET_EXPR_LOCATION (*tp
, UNKNOWN_LOCATION
);
1011 /* Similar to unshare_expr but also prune all expression locations
1015 unshare_expr_without_location (tree expr
)
1017 walk_tree (&expr
, mostly_copy_tree_r
, NULL
, NULL
);
1019 walk_tree (&expr
, prune_expr_location
, NULL
, NULL
);
1023 /* Return the EXPR_LOCATION of EXPR, if it (maybe recursively) has
1024 one, OR_ELSE otherwise. The location of a STATEMENT_LISTs
1025 comprising at least one DEBUG_BEGIN_STMT followed by exactly one
1026 EXPR is the location of the EXPR. */
1029 rexpr_location (tree expr
, location_t or_else
= UNKNOWN_LOCATION
)
1034 if (EXPR_HAS_LOCATION (expr
))
1035 return EXPR_LOCATION (expr
);
1037 if (TREE_CODE (expr
) != STATEMENT_LIST
)
1040 tree_stmt_iterator i
= tsi_start (expr
);
1043 while (!tsi_end_p (i
) && TREE_CODE (tsi_stmt (i
)) == DEBUG_BEGIN_STMT
)
1049 if (!found
|| !tsi_one_before_end_p (i
))
1052 return rexpr_location (tsi_stmt (i
), or_else
);
1055 /* Return TRUE iff EXPR (maybe recursively) has a location; see
1056 rexpr_location for the potential recursion. */
1059 rexpr_has_location (tree expr
)
1061 return rexpr_location (expr
) != UNKNOWN_LOCATION
;
1065 /* WRAPPER is a code such as BIND_EXPR or CLEANUP_POINT_EXPR which can both
1066 contain statements and have a value. Assign its value to a temporary
1067 and give it void_type_node. Return the temporary, or NULL_TREE if
1068 WRAPPER was already void. */
1071 voidify_wrapper_expr (tree wrapper
, tree temp
)
1073 tree type
= TREE_TYPE (wrapper
);
1074 if (type
&& !VOID_TYPE_P (type
))
1078 /* Set p to point to the body of the wrapper. Loop until we find
1079 something that isn't a wrapper. */
1080 for (p
= &wrapper
; p
&& *p
; )
1082 switch (TREE_CODE (*p
))
1085 TREE_SIDE_EFFECTS (*p
) = 1;
1086 TREE_TYPE (*p
) = void_type_node
;
1087 /* For a BIND_EXPR, the body is operand 1. */
1088 p
= &BIND_EXPR_BODY (*p
);
1091 case CLEANUP_POINT_EXPR
:
1092 case TRY_FINALLY_EXPR
:
1093 case TRY_CATCH_EXPR
:
1094 TREE_SIDE_EFFECTS (*p
) = 1;
1095 TREE_TYPE (*p
) = void_type_node
;
1096 p
= &TREE_OPERAND (*p
, 0);
1099 case STATEMENT_LIST
:
1101 tree_stmt_iterator i
= tsi_last (*p
);
1102 TREE_SIDE_EFFECTS (*p
) = 1;
1103 TREE_TYPE (*p
) = void_type_node
;
1104 p
= tsi_end_p (i
) ? NULL
: tsi_stmt_ptr (i
);
1109 /* Advance to the last statement. Set all container types to
1111 for (; TREE_CODE (*p
) == COMPOUND_EXPR
; p
= &TREE_OPERAND (*p
, 1))
1113 TREE_SIDE_EFFECTS (*p
) = 1;
1114 TREE_TYPE (*p
) = void_type_node
;
1118 case TRANSACTION_EXPR
:
1119 TREE_SIDE_EFFECTS (*p
) = 1;
1120 TREE_TYPE (*p
) = void_type_node
;
1121 p
= &TRANSACTION_EXPR_BODY (*p
);
1125 /* Assume that any tree upon which voidify_wrapper_expr is
1126 directly called is a wrapper, and that its body is op0. */
1129 TREE_SIDE_EFFECTS (*p
) = 1;
1130 TREE_TYPE (*p
) = void_type_node
;
1131 p
= &TREE_OPERAND (*p
, 0);
1139 if (p
== NULL
|| IS_EMPTY_STMT (*p
))
1143 /* The wrapper is on the RHS of an assignment that we're pushing
1145 gcc_assert (TREE_CODE (temp
) == INIT_EXPR
1146 || TREE_CODE (temp
) == MODIFY_EXPR
);
1147 TREE_OPERAND (temp
, 1) = *p
;
1152 temp
= create_tmp_var (type
, "retval");
1153 *p
= build2 (INIT_EXPR
, type
, temp
, *p
);
1162 /* Prepare calls to builtins to SAVE and RESTORE the stack as well as
1163 a temporary through which they communicate. */
1166 build_stack_save_restore (gcall
**save
, gcall
**restore
)
1170 *save
= gimple_build_call (builtin_decl_implicit (BUILT_IN_STACK_SAVE
), 0);
1171 tmp_var
= create_tmp_var (ptr_type_node
, "saved_stack");
1172 gimple_call_set_lhs (*save
, tmp_var
);
1175 = gimple_build_call (builtin_decl_implicit (BUILT_IN_STACK_RESTORE
),
1179 /* Generate IFN_ASAN_MARK call that poisons shadow of a for DECL variable. */
1182 build_asan_poison_call_expr (tree decl
)
1184 /* Do not poison variables that have size equal to zero. */
1185 tree unit_size
= DECL_SIZE_UNIT (decl
);
1186 if (zerop (unit_size
))
1189 tree base
= build_fold_addr_expr (decl
);
1191 return build_call_expr_internal_loc (UNKNOWN_LOCATION
, IFN_ASAN_MARK
,
1193 build_int_cst (integer_type_node
,
1198 /* Generate IFN_ASAN_MARK call that would poison or unpoison, depending
1199 on POISON flag, shadow memory of a DECL variable. The call will be
1200 put on location identified by IT iterator, where BEFORE flag drives
1201 position where the stmt will be put. */
1204 asan_poison_variable (tree decl
, bool poison
, gimple_stmt_iterator
*it
,
1207 tree unit_size
= DECL_SIZE_UNIT (decl
);
1208 tree base
= build_fold_addr_expr (decl
);
1210 /* Do not poison variables that have size equal to zero. */
1211 if (zerop (unit_size
))
1214 /* It's necessary to have all stack variables aligned to ASAN granularity
1216 if (DECL_ALIGN_UNIT (decl
) <= ASAN_SHADOW_GRANULARITY
)
1217 SET_DECL_ALIGN (decl
, BITS_PER_UNIT
* ASAN_SHADOW_GRANULARITY
);
1219 HOST_WIDE_INT flags
= poison
? ASAN_MARK_POISON
: ASAN_MARK_UNPOISON
;
1222 = gimple_build_call_internal (IFN_ASAN_MARK
, 3,
1223 build_int_cst (integer_type_node
, flags
),
1227 gsi_insert_before (it
, g
, GSI_NEW_STMT
);
1229 gsi_insert_after (it
, g
, GSI_NEW_STMT
);
1232 /* Generate IFN_ASAN_MARK internal call that depending on POISON flag
1233 either poisons or unpoisons a DECL. Created statement is appended
1234 to SEQ_P gimple sequence. */
1237 asan_poison_variable (tree decl
, bool poison
, gimple_seq
*seq_p
)
1239 gimple_stmt_iterator it
= gsi_last (*seq_p
);
1240 bool before
= false;
1245 asan_poison_variable (decl
, poison
, &it
, before
);
1248 /* Sort pair of VAR_DECLs A and B by DECL_UID. */
1251 sort_by_decl_uid (const void *a
, const void *b
)
1253 const tree
*t1
= (const tree
*)a
;
1254 const tree
*t2
= (const tree
*)b
;
1256 int uid1
= DECL_UID (*t1
);
1257 int uid2
= DECL_UID (*t2
);
1261 else if (uid1
> uid2
)
1267 /* Generate IFN_ASAN_MARK internal call for all VARIABLES
1268 depending on POISON flag. Created statement is appended
1269 to SEQ_P gimple sequence. */
1272 asan_poison_variables (hash_set
<tree
> *variables
, bool poison
, gimple_seq
*seq_p
)
1274 unsigned c
= variables
->elements ();
1278 auto_vec
<tree
> sorted_variables (c
);
1280 for (hash_set
<tree
>::iterator it
= variables
->begin ();
1281 it
!= variables
->end (); ++it
)
1282 sorted_variables
.safe_push (*it
);
1284 sorted_variables
.qsort (sort_by_decl_uid
);
1288 FOR_EACH_VEC_ELT (sorted_variables
, i
, var
)
1290 asan_poison_variable (var
, poison
, seq_p
);
1292 /* Add use_after_scope_memory attribute for the variable in order
1293 to prevent re-written into SSA. */
1294 if (!lookup_attribute (ASAN_USE_AFTER_SCOPE_ATTRIBUTE
,
1295 DECL_ATTRIBUTES (var
)))
1296 DECL_ATTRIBUTES (var
)
1297 = tree_cons (get_identifier (ASAN_USE_AFTER_SCOPE_ATTRIBUTE
),
1299 DECL_ATTRIBUTES (var
));
1303 /* Gimplify a BIND_EXPR. Just voidify and recurse. */
1305 static enum gimplify_status
1306 gimplify_bind_expr (tree
*expr_p
, gimple_seq
*pre_p
)
1308 tree bind_expr
= *expr_p
;
1309 bool old_keep_stack
= gimplify_ctxp
->keep_stack
;
1310 bool old_save_stack
= gimplify_ctxp
->save_stack
;
1313 gimple_seq body
, cleanup
;
1315 location_t start_locus
= 0, end_locus
= 0;
1316 tree ret_clauses
= NULL
;
1318 tree temp
= voidify_wrapper_expr (bind_expr
, NULL
);
1320 /* Mark variables seen in this bind expr. */
1321 for (t
= BIND_EXPR_VARS (bind_expr
); t
; t
= DECL_CHAIN (t
))
1325 struct gimplify_omp_ctx
*ctx
= gimplify_omp_ctxp
;
1327 /* Mark variable as local. */
1328 if (ctx
&& ctx
->region_type
!= ORT_NONE
&& !DECL_EXTERNAL (t
))
1330 if (! DECL_SEEN_IN_BIND_EXPR_P (t
)
1331 || splay_tree_lookup (ctx
->variables
,
1332 (splay_tree_key
) t
) == NULL
)
1334 if (ctx
->region_type
== ORT_SIMD
1335 && TREE_ADDRESSABLE (t
)
1336 && !TREE_STATIC (t
))
1337 omp_add_variable (ctx
, t
, GOVD_PRIVATE
| GOVD_SEEN
);
1339 omp_add_variable (ctx
, t
, GOVD_LOCAL
| GOVD_SEEN
);
1341 /* Static locals inside of target construct or offloaded
1342 routines need to be "omp declare target". */
1343 if (TREE_STATIC (t
))
1344 for (; ctx
; ctx
= ctx
->outer_context
)
1345 if ((ctx
->region_type
& ORT_TARGET
) != 0)
1347 if (!lookup_attribute ("omp declare target",
1348 DECL_ATTRIBUTES (t
)))
1350 tree id
= get_identifier ("omp declare target");
1352 = tree_cons (id
, NULL_TREE
, DECL_ATTRIBUTES (t
));
1353 varpool_node
*node
= varpool_node::get (t
);
1356 node
->offloadable
= 1;
1357 if (ENABLE_OFFLOADING
&& !DECL_EXTERNAL (t
))
1359 g
->have_offload
= true;
1361 vec_safe_push (offload_vars
, t
);
1369 DECL_SEEN_IN_BIND_EXPR_P (t
) = 1;
1371 if (DECL_HARD_REGISTER (t
) && !is_global_var (t
) && cfun
)
1372 cfun
->has_local_explicit_reg_vars
= true;
1375 /* Preliminarily mark non-addressed complex variables as eligible
1376 for promotion to gimple registers. We'll transform their uses
1378 if ((TREE_CODE (TREE_TYPE (t
)) == COMPLEX_TYPE
1379 || TREE_CODE (TREE_TYPE (t
)) == VECTOR_TYPE
)
1380 && !TREE_THIS_VOLATILE (t
)
1381 && (VAR_P (t
) && !DECL_HARD_REGISTER (t
))
1382 && !needs_to_live_in_memory (t
))
1383 DECL_GIMPLE_REG_P (t
) = 1;
1386 bind_stmt
= gimple_build_bind (BIND_EXPR_VARS (bind_expr
), NULL
,
1387 BIND_EXPR_BLOCK (bind_expr
));
1388 gimple_push_bind_expr (bind_stmt
);
1390 gimplify_ctxp
->keep_stack
= false;
1391 gimplify_ctxp
->save_stack
= false;
1393 /* Gimplify the body into the GIMPLE_BIND tuple's body. */
1395 gimplify_stmt (&BIND_EXPR_BODY (bind_expr
), &body
);
1396 gimple_bind_set_body (bind_stmt
, body
);
1398 /* Source location wise, the cleanup code (stack_restore and clobbers)
1399 belongs to the end of the block, so propagate what we have. The
1400 stack_save operation belongs to the beginning of block, which we can
1401 infer from the bind_expr directly if the block has no explicit
1403 if (BIND_EXPR_BLOCK (bind_expr
))
1405 end_locus
= BLOCK_SOURCE_END_LOCATION (BIND_EXPR_BLOCK (bind_expr
));
1406 start_locus
= BLOCK_SOURCE_LOCATION (BIND_EXPR_BLOCK (bind_expr
));
1408 if (start_locus
== 0)
1409 start_locus
= EXPR_LOCATION (bind_expr
);
1414 /* If the code both contains VLAs and calls alloca, then we cannot reclaim
1415 the stack space allocated to the VLAs. */
1416 if (gimplify_ctxp
->save_stack
&& !gimplify_ctxp
->keep_stack
)
1418 gcall
*stack_restore
;
1420 /* Save stack on entry and restore it on exit. Add a try_finally
1421 block to achieve this. */
1422 build_stack_save_restore (&stack_save
, &stack_restore
);
1424 gimple_set_location (stack_save
, start_locus
);
1425 gimple_set_location (stack_restore
, end_locus
);
1427 gimplify_seq_add_stmt (&cleanup
, stack_restore
);
1430 /* Add clobbers for all variables that go out of scope. */
1431 for (t
= BIND_EXPR_VARS (bind_expr
); t
; t
= DECL_CHAIN (t
))
1434 && !is_global_var (t
)
1435 && DECL_CONTEXT (t
) == current_function_decl
)
1437 if (!DECL_HARD_REGISTER (t
)
1438 && !TREE_THIS_VOLATILE (t
)
1439 && !DECL_HAS_VALUE_EXPR_P (t
)
1440 /* Only care for variables that have to be in memory. Others
1441 will be rewritten into SSA names, hence moved to the
1443 && !is_gimple_reg (t
)
1444 && flag_stack_reuse
!= SR_NONE
)
1446 tree clobber
= build_clobber (TREE_TYPE (t
));
1447 gimple
*clobber_stmt
;
1448 clobber_stmt
= gimple_build_assign (t
, clobber
);
1449 gimple_set_location (clobber_stmt
, end_locus
);
1450 gimplify_seq_add_stmt (&cleanup
, clobber_stmt
);
1453 if (flag_openacc
&& oacc_declare_returns
!= NULL
)
1455 tree
*c
= oacc_declare_returns
->get (t
);
1459 OMP_CLAUSE_CHAIN (*c
) = ret_clauses
;
1463 oacc_declare_returns
->remove (t
);
1465 if (oacc_declare_returns
->is_empty ())
1467 delete oacc_declare_returns
;
1468 oacc_declare_returns
= NULL
;
1474 if (asan_poisoned_variables
!= NULL
1475 && asan_poisoned_variables
->contains (t
))
1477 asan_poisoned_variables
->remove (t
);
1478 asan_poison_variable (t
, true, &cleanup
);
1481 if (gimplify_ctxp
->live_switch_vars
!= NULL
1482 && gimplify_ctxp
->live_switch_vars
->contains (t
))
1483 gimplify_ctxp
->live_switch_vars
->remove (t
);
1489 gimple_stmt_iterator si
= gsi_start (cleanup
);
1491 stmt
= gimple_build_omp_target (NULL
, GF_OMP_TARGET_KIND_OACC_DECLARE
,
1493 gsi_insert_seq_before_without_update (&si
, stmt
, GSI_NEW_STMT
);
1499 gimple_seq new_body
;
1502 gs
= gimple_build_try (gimple_bind_body (bind_stmt
), cleanup
,
1503 GIMPLE_TRY_FINALLY
);
1506 gimplify_seq_add_stmt (&new_body
, stack_save
);
1507 gimplify_seq_add_stmt (&new_body
, gs
);
1508 gimple_bind_set_body (bind_stmt
, new_body
);
1511 /* keep_stack propagates all the way up to the outermost BIND_EXPR. */
1512 if (!gimplify_ctxp
->keep_stack
)
1513 gimplify_ctxp
->keep_stack
= old_keep_stack
;
1514 gimplify_ctxp
->save_stack
= old_save_stack
;
1516 gimple_pop_bind_expr ();
1518 gimplify_seq_add_stmt (pre_p
, bind_stmt
);
1526 *expr_p
= NULL_TREE
;
1530 /* Maybe add early return predict statement to PRE_P sequence. */
1533 maybe_add_early_return_predict_stmt (gimple_seq
*pre_p
)
1535 /* If we are not in a conditional context, add PREDICT statement. */
1536 if (gimple_conditional_context ())
1538 gimple
*predict
= gimple_build_predict (PRED_TREE_EARLY_RETURN
,
1540 gimplify_seq_add_stmt (pre_p
, predict
);
1544 /* Gimplify a RETURN_EXPR. If the expression to be returned is not a
1545 GIMPLE value, it is assigned to a new temporary and the statement is
1546 re-written to return the temporary.
1548 PRE_P points to the sequence where side effects that must happen before
1549 STMT should be stored. */
1551 static enum gimplify_status
1552 gimplify_return_expr (tree stmt
, gimple_seq
*pre_p
)
1555 tree ret_expr
= TREE_OPERAND (stmt
, 0);
1556 tree result_decl
, result
;
1558 if (ret_expr
== error_mark_node
)
1562 || TREE_CODE (ret_expr
) == RESULT_DECL
)
1564 maybe_add_early_return_predict_stmt (pre_p
);
1565 greturn
*ret
= gimple_build_return (ret_expr
);
1566 gimple_set_no_warning (ret
, TREE_NO_WARNING (stmt
));
1567 gimplify_seq_add_stmt (pre_p
, ret
);
1571 if (VOID_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl
))))
1572 result_decl
= NULL_TREE
;
1575 result_decl
= TREE_OPERAND (ret_expr
, 0);
1577 /* See through a return by reference. */
1578 if (TREE_CODE (result_decl
) == INDIRECT_REF
)
1579 result_decl
= TREE_OPERAND (result_decl
, 0);
1581 gcc_assert ((TREE_CODE (ret_expr
) == MODIFY_EXPR
1582 || TREE_CODE (ret_expr
) == INIT_EXPR
)
1583 && TREE_CODE (result_decl
) == RESULT_DECL
);
1586 /* If aggregate_value_p is true, then we can return the bare RESULT_DECL.
1587 Recall that aggregate_value_p is FALSE for any aggregate type that is
1588 returned in registers. If we're returning values in registers, then
1589 we don't want to extend the lifetime of the RESULT_DECL, particularly
1590 across another call. In addition, for those aggregates for which
1591 hard_function_value generates a PARALLEL, we'll die during normal
1592 expansion of structure assignments; there's special code in expand_return
1593 to handle this case that does not exist in expand_expr. */
1596 else if (aggregate_value_p (result_decl
, TREE_TYPE (current_function_decl
)))
1598 if (TREE_CODE (DECL_SIZE (result_decl
)) != INTEGER_CST
)
1600 if (!TYPE_SIZES_GIMPLIFIED (TREE_TYPE (result_decl
)))
1601 gimplify_type_sizes (TREE_TYPE (result_decl
), pre_p
);
1602 /* Note that we don't use gimplify_vla_decl because the RESULT_DECL
1603 should be effectively allocated by the caller, i.e. all calls to
1604 this function must be subject to the Return Slot Optimization. */
1605 gimplify_one_sizepos (&DECL_SIZE (result_decl
), pre_p
);
1606 gimplify_one_sizepos (&DECL_SIZE_UNIT (result_decl
), pre_p
);
1608 result
= result_decl
;
1610 else if (gimplify_ctxp
->return_temp
)
1611 result
= gimplify_ctxp
->return_temp
;
1614 result
= create_tmp_reg (TREE_TYPE (result_decl
));
1616 /* ??? With complex control flow (usually involving abnormal edges),
1617 we can wind up warning about an uninitialized value for this. Due
1618 to how this variable is constructed and initialized, this is never
1619 true. Give up and never warn. */
1620 TREE_NO_WARNING (result
) = 1;
1622 gimplify_ctxp
->return_temp
= result
;
1625 /* Smash the lhs of the MODIFY_EXPR to the temporary we plan to use.
1626 Then gimplify the whole thing. */
1627 if (result
!= result_decl
)
1628 TREE_OPERAND (ret_expr
, 0) = result
;
1630 gimplify_and_add (TREE_OPERAND (stmt
, 0), pre_p
);
1632 maybe_add_early_return_predict_stmt (pre_p
);
1633 ret
= gimple_build_return (result
);
1634 gimple_set_no_warning (ret
, TREE_NO_WARNING (stmt
));
1635 gimplify_seq_add_stmt (pre_p
, ret
);
1640 /* Gimplify a variable-length array DECL. */
1643 gimplify_vla_decl (tree decl
, gimple_seq
*seq_p
)
1645 /* This is a variable-sized decl. Simplify its size and mark it
1646 for deferred expansion. */
1647 tree t
, addr
, ptr_type
;
1649 gimplify_one_sizepos (&DECL_SIZE (decl
), seq_p
);
1650 gimplify_one_sizepos (&DECL_SIZE_UNIT (decl
), seq_p
);
1652 /* Don't mess with a DECL_VALUE_EXPR set by the front-end. */
1653 if (DECL_HAS_VALUE_EXPR_P (decl
))
1656 /* All occurrences of this decl in final gimplified code will be
1657 replaced by indirection. Setting DECL_VALUE_EXPR does two
1658 things: First, it lets the rest of the gimplifier know what
1659 replacement to use. Second, it lets the debug info know
1660 where to find the value. */
1661 ptr_type
= build_pointer_type (TREE_TYPE (decl
));
1662 addr
= create_tmp_var (ptr_type
, get_name (decl
));
1663 DECL_IGNORED_P (addr
) = 0;
1664 t
= build_fold_indirect_ref (addr
);
1665 TREE_THIS_NOTRAP (t
) = 1;
1666 SET_DECL_VALUE_EXPR (decl
, t
);
1667 DECL_HAS_VALUE_EXPR_P (decl
) = 1;
1669 t
= build_alloca_call_expr (DECL_SIZE_UNIT (decl
), DECL_ALIGN (decl
),
1670 max_int_size_in_bytes (TREE_TYPE (decl
)));
1671 /* The call has been built for a variable-sized object. */
1672 CALL_ALLOCA_FOR_VAR_P (t
) = 1;
1673 t
= fold_convert (ptr_type
, t
);
1674 t
= build2 (MODIFY_EXPR
, TREE_TYPE (addr
), addr
, t
);
1676 gimplify_and_add (t
, seq_p
);
1679 /* A helper function to be called via walk_tree. Mark all labels under *TP
1680 as being forced. To be called for DECL_INITIAL of static variables. */
1683 force_labels_r (tree
*tp
, int *walk_subtrees
, void *data ATTRIBUTE_UNUSED
)
1687 if (TREE_CODE (*tp
) == LABEL_DECL
)
1689 FORCED_LABEL (*tp
) = 1;
1690 cfun
->has_forced_label_in_static
= 1;
1696 /* Gimplify a DECL_EXPR node *STMT_P by making any necessary allocation
1697 and initialization explicit. */
1699 static enum gimplify_status
1700 gimplify_decl_expr (tree
*stmt_p
, gimple_seq
*seq_p
)
1702 tree stmt
= *stmt_p
;
1703 tree decl
= DECL_EXPR_DECL (stmt
);
1705 *stmt_p
= NULL_TREE
;
1707 if (TREE_TYPE (decl
) == error_mark_node
)
1710 if ((TREE_CODE (decl
) == TYPE_DECL
1712 && !TYPE_SIZES_GIMPLIFIED (TREE_TYPE (decl
)))
1714 gimplify_type_sizes (TREE_TYPE (decl
), seq_p
);
1715 if (TREE_CODE (TREE_TYPE (decl
)) == REFERENCE_TYPE
)
1716 gimplify_type_sizes (TREE_TYPE (TREE_TYPE (decl
)), seq_p
);
1719 /* ??? DECL_ORIGINAL_TYPE is streamed for LTO so it needs to be gimplified
1720 in case its size expressions contain problematic nodes like CALL_EXPR. */
1721 if (TREE_CODE (decl
) == TYPE_DECL
1722 && DECL_ORIGINAL_TYPE (decl
)
1723 && !TYPE_SIZES_GIMPLIFIED (DECL_ORIGINAL_TYPE (decl
)))
1725 gimplify_type_sizes (DECL_ORIGINAL_TYPE (decl
), seq_p
);
1726 if (TREE_CODE (DECL_ORIGINAL_TYPE (decl
)) == REFERENCE_TYPE
)
1727 gimplify_type_sizes (TREE_TYPE (DECL_ORIGINAL_TYPE (decl
)), seq_p
);
1730 if (VAR_P (decl
) && !DECL_EXTERNAL (decl
))
1732 tree init
= DECL_INITIAL (decl
);
1733 bool is_vla
= false;
1735 if (TREE_CODE (DECL_SIZE_UNIT (decl
)) != INTEGER_CST
1736 || (!TREE_STATIC (decl
)
1737 && flag_stack_check
== GENERIC_STACK_CHECK
1738 && compare_tree_int (DECL_SIZE_UNIT (decl
),
1739 STACK_CHECK_MAX_VAR_SIZE
) > 0))
1741 gimplify_vla_decl (decl
, seq_p
);
1745 if (asan_poisoned_variables
1747 && TREE_ADDRESSABLE (decl
)
1748 && !TREE_STATIC (decl
)
1749 && !DECL_HAS_VALUE_EXPR_P (decl
)
1750 && DECL_ALIGN (decl
) <= MAX_SUPPORTED_STACK_ALIGNMENT
1751 && dbg_cnt (asan_use_after_scope
)
1752 && !gimplify_omp_ctxp
)
1754 asan_poisoned_variables
->add (decl
);
1755 asan_poison_variable (decl
, false, seq_p
);
1756 if (!DECL_ARTIFICIAL (decl
) && gimplify_ctxp
->live_switch_vars
)
1757 gimplify_ctxp
->live_switch_vars
->add (decl
);
1760 /* Some front ends do not explicitly declare all anonymous
1761 artificial variables. We compensate here by declaring the
1762 variables, though it would be better if the front ends would
1763 explicitly declare them. */
1764 if (!DECL_SEEN_IN_BIND_EXPR_P (decl
)
1765 && DECL_ARTIFICIAL (decl
) && DECL_NAME (decl
) == NULL_TREE
)
1766 gimple_add_tmp_var (decl
);
1768 if (init
&& init
!= error_mark_node
)
1770 if (!TREE_STATIC (decl
))
1772 DECL_INITIAL (decl
) = NULL_TREE
;
1773 init
= build2 (INIT_EXPR
, void_type_node
, decl
, init
);
1774 gimplify_and_add (init
, seq_p
);
1778 /* We must still examine initializers for static variables
1779 as they may contain a label address. */
1780 walk_tree (&init
, force_labels_r
, NULL
, NULL
);
1787 /* Gimplify a LOOP_EXPR. Normally this just involves gimplifying the body
1788 and replacing the LOOP_EXPR with goto, but if the loop contains an
1789 EXIT_EXPR, we need to append a label for it to jump to. */
1791 static enum gimplify_status
1792 gimplify_loop_expr (tree
*expr_p
, gimple_seq
*pre_p
)
1794 tree saved_label
= gimplify_ctxp
->exit_label
;
1795 tree start_label
= create_artificial_label (UNKNOWN_LOCATION
);
1797 gimplify_seq_add_stmt (pre_p
, gimple_build_label (start_label
));
1799 gimplify_ctxp
->exit_label
= NULL_TREE
;
1801 gimplify_and_add (LOOP_EXPR_BODY (*expr_p
), pre_p
);
1803 gimplify_seq_add_stmt (pre_p
, gimple_build_goto (start_label
));
1805 if (gimplify_ctxp
->exit_label
)
1806 gimplify_seq_add_stmt (pre_p
,
1807 gimple_build_label (gimplify_ctxp
->exit_label
));
1809 gimplify_ctxp
->exit_label
= saved_label
;
1815 /* Gimplify a statement list onto a sequence. These may be created either
1816 by an enlightened front-end, or by shortcut_cond_expr. */
1818 static enum gimplify_status
1819 gimplify_statement_list (tree
*expr_p
, gimple_seq
*pre_p
)
1821 tree temp
= voidify_wrapper_expr (*expr_p
, NULL
);
1823 tree_stmt_iterator i
= tsi_start (*expr_p
);
1825 while (!tsi_end_p (i
))
1827 gimplify_stmt (tsi_stmt_ptr (i
), pre_p
);
1840 /* Callback for walk_gimple_seq. */
1843 warn_switch_unreachable_r (gimple_stmt_iterator
*gsi_p
, bool *handled_ops_p
,
1844 struct walk_stmt_info
*wi
)
1846 gimple
*stmt
= gsi_stmt (*gsi_p
);
1848 *handled_ops_p
= true;
1849 switch (gimple_code (stmt
))
1852 /* A compiler-generated cleanup or a user-written try block.
1853 If it's empty, don't dive into it--that would result in
1854 worse location info. */
1855 if (gimple_try_eval (stmt
) == NULL
)
1858 return integer_zero_node
;
1863 case GIMPLE_EH_FILTER
:
1864 case GIMPLE_TRANSACTION
:
1865 /* Walk the sub-statements. */
1866 *handled_ops_p
= false;
1870 /* Ignore these. We may generate them before declarations that
1871 are never executed. If there's something to warn about,
1872 there will be non-debug stmts too, and we'll catch those. */
1876 if (gimple_call_internal_p (stmt
, IFN_ASAN_MARK
))
1878 *handled_ops_p
= false;
1883 /* Save the first "real" statement (not a decl/lexical scope/...). */
1885 return integer_zero_node
;
1890 /* Possibly warn about unreachable statements between switch's controlling
1891 expression and the first case. SEQ is the body of a switch expression. */
1894 maybe_warn_switch_unreachable (gimple_seq seq
)
1896 if (!warn_switch_unreachable
1897 /* This warning doesn't play well with Fortran when optimizations
1899 || lang_GNU_Fortran ()
1903 struct walk_stmt_info wi
;
1904 memset (&wi
, 0, sizeof (wi
));
1905 walk_gimple_seq (seq
, warn_switch_unreachable_r
, NULL
, &wi
);
1906 gimple
*stmt
= (gimple
*) wi
.info
;
1908 if (stmt
&& gimple_code (stmt
) != GIMPLE_LABEL
)
1910 if (gimple_code (stmt
) == GIMPLE_GOTO
1911 && TREE_CODE (gimple_goto_dest (stmt
)) == LABEL_DECL
1912 && DECL_ARTIFICIAL (gimple_goto_dest (stmt
)))
1913 /* Don't warn for compiler-generated gotos. These occur
1914 in Duff's devices, for example. */;
1916 warning_at (gimple_location (stmt
), OPT_Wswitch_unreachable
,
1917 "statement will never be executed");
1922 /* A label entry that pairs label and a location. */
1929 /* Find LABEL in vector of label entries VEC. */
1931 static struct label_entry
*
1932 find_label_entry (const auto_vec
<struct label_entry
> *vec
, tree label
)
1935 struct label_entry
*l
;
1937 FOR_EACH_VEC_ELT (*vec
, i
, l
)
1938 if (l
->label
== label
)
1943 /* Return true if LABEL, a LABEL_DECL, represents a case label
1944 in a vector of labels CASES. */
1947 case_label_p (const vec
<tree
> *cases
, tree label
)
1952 FOR_EACH_VEC_ELT (*cases
, i
, l
)
1953 if (CASE_LABEL (l
) == label
)
1958 /* Find the last nondebug statement in a scope STMT. */
1961 last_stmt_in_scope (gimple
*stmt
)
1966 switch (gimple_code (stmt
))
1970 gbind
*bind
= as_a
<gbind
*> (stmt
);
1971 stmt
= gimple_seq_last_nondebug_stmt (gimple_bind_body (bind
));
1972 return last_stmt_in_scope (stmt
);
1977 gtry
*try_stmt
= as_a
<gtry
*> (stmt
);
1978 stmt
= gimple_seq_last_nondebug_stmt (gimple_try_eval (try_stmt
));
1979 gimple
*last_eval
= last_stmt_in_scope (stmt
);
1980 if (gimple_stmt_may_fallthru (last_eval
)
1981 && (last_eval
== NULL
1982 || !gimple_call_internal_p (last_eval
, IFN_FALLTHROUGH
))
1983 && gimple_try_kind (try_stmt
) == GIMPLE_TRY_FINALLY
)
1985 stmt
= gimple_seq_last_nondebug_stmt (gimple_try_cleanup (try_stmt
));
1986 return last_stmt_in_scope (stmt
);
2000 /* Collect interesting labels in LABELS and return the statement preceding
2001 another case label, or a user-defined label. Store a location useful
2002 to give warnings at *PREVLOC (usually the location of the returned
2003 statement or of its surrounding scope). */
2006 collect_fallthrough_labels (gimple_stmt_iterator
*gsi_p
,
2007 auto_vec
<struct label_entry
> *labels
,
2008 location_t
*prevloc
)
2010 gimple
*prev
= NULL
;
2012 *prevloc
= UNKNOWN_LOCATION
;
2015 if (gimple_code (gsi_stmt (*gsi_p
)) == GIMPLE_BIND
)
2017 /* Recognize the special GIMPLE_BIND added by gimplify_switch_expr,
2018 which starts on a GIMPLE_SWITCH and ends with a break label.
2019 Handle that as a single statement that can fall through. */
2020 gbind
*bind
= as_a
<gbind
*> (gsi_stmt (*gsi_p
));
2021 gimple
*first
= gimple_seq_first_stmt (gimple_bind_body (bind
));
2022 gimple
*last
= gimple_seq_last_stmt (gimple_bind_body (bind
));
2024 && gimple_code (first
) == GIMPLE_SWITCH
2025 && gimple_code (last
) == GIMPLE_LABEL
)
2027 tree label
= gimple_label_label (as_a
<glabel
*> (last
));
2028 if (SWITCH_BREAK_LABEL_P (label
))
2036 if (gimple_code (gsi_stmt (*gsi_p
)) == GIMPLE_BIND
2037 || gimple_code (gsi_stmt (*gsi_p
)) == GIMPLE_TRY
)
2039 /* Nested scope. Only look at the last statement of
2040 the innermost scope. */
2041 location_t bind_loc
= gimple_location (gsi_stmt (*gsi_p
));
2042 gimple
*last
= last_stmt_in_scope (gsi_stmt (*gsi_p
));
2046 /* It might be a label without a location. Use the
2047 location of the scope then. */
2048 if (!gimple_has_location (prev
))
2049 *prevloc
= bind_loc
;
2055 /* Ifs are tricky. */
2056 if (gimple_code (gsi_stmt (*gsi_p
)) == GIMPLE_COND
)
2058 gcond
*cond_stmt
= as_a
<gcond
*> (gsi_stmt (*gsi_p
));
2059 tree false_lab
= gimple_cond_false_label (cond_stmt
);
2060 location_t if_loc
= gimple_location (cond_stmt
);
2063 if (i > 1) goto <D.2259>; else goto D;
2064 we can't do much with the else-branch. */
2065 if (!DECL_ARTIFICIAL (false_lab
))
2068 /* Go on until the false label, then one step back. */
2069 for (; !gsi_end_p (*gsi_p
); gsi_next (gsi_p
))
2071 gimple
*stmt
= gsi_stmt (*gsi_p
);
2072 if (gimple_code (stmt
) == GIMPLE_LABEL
2073 && gimple_label_label (as_a
<glabel
*> (stmt
)) == false_lab
)
2077 /* Not found? Oops. */
2078 if (gsi_end_p (*gsi_p
))
2081 struct label_entry l
= { false_lab
, if_loc
};
2082 labels
->safe_push (l
);
2084 /* Go to the last statement of the then branch. */
2087 /* if (i != 0) goto <D.1759>; else goto <D.1760>;
2093 if (gimple_code (gsi_stmt (*gsi_p
)) == GIMPLE_GOTO
2094 && !gimple_has_location (gsi_stmt (*gsi_p
)))
2096 /* Look at the statement before, it might be
2097 attribute fallthrough, in which case don't warn. */
2099 bool fallthru_before_dest
2100 = gimple_call_internal_p (gsi_stmt (*gsi_p
), IFN_FALLTHROUGH
);
2102 tree goto_dest
= gimple_goto_dest (gsi_stmt (*gsi_p
));
2103 if (!fallthru_before_dest
)
2105 struct label_entry l
= { goto_dest
, if_loc
};
2106 labels
->safe_push (l
);
2109 /* And move back. */
2113 /* Remember the last statement. Skip labels that are of no interest
2115 if (gimple_code (gsi_stmt (*gsi_p
)) == GIMPLE_LABEL
)
2117 tree label
= gimple_label_label (as_a
<glabel
*> (gsi_stmt (*gsi_p
)));
2118 if (find_label_entry (labels
, label
))
2119 prev
= gsi_stmt (*gsi_p
);
2121 else if (gimple_call_internal_p (gsi_stmt (*gsi_p
), IFN_ASAN_MARK
))
2123 else if (!is_gimple_debug (gsi_stmt (*gsi_p
)))
2124 prev
= gsi_stmt (*gsi_p
);
2127 while (!gsi_end_p (*gsi_p
)
2128 /* Stop if we find a case or a user-defined label. */
2129 && (gimple_code (gsi_stmt (*gsi_p
)) != GIMPLE_LABEL
2130 || !gimple_has_location (gsi_stmt (*gsi_p
))));
2132 if (prev
&& gimple_has_location (prev
))
2133 *prevloc
= gimple_location (prev
);
2137 /* Return true if the switch fallthough warning should occur. LABEL is
2138 the label statement that we're falling through to. */
2141 should_warn_for_implicit_fallthrough (gimple_stmt_iterator
*gsi_p
, tree label
)
2143 gimple_stmt_iterator gsi
= *gsi_p
;
2145 /* Don't warn if the label is marked with a "falls through" comment. */
2146 if (FALLTHROUGH_LABEL_P (label
))
2149 /* Don't warn for non-case labels followed by a statement:
2154 as these are likely intentional. */
2155 if (!case_label_p (&gimplify_ctxp
->case_labels
, label
))
2158 while (!gsi_end_p (gsi
)
2159 && gimple_code (gsi_stmt (gsi
)) == GIMPLE_LABEL
2160 && (l
= gimple_label_label (as_a
<glabel
*> (gsi_stmt (gsi
))))
2161 && !case_label_p (&gimplify_ctxp
->case_labels
, l
))
2162 gsi_next_nondebug (&gsi
);
2163 if (gsi_end_p (gsi
) || gimple_code (gsi_stmt (gsi
)) != GIMPLE_LABEL
)
2167 /* Don't warn for terminated branches, i.e. when the subsequent case labels
2168 immediately breaks. */
2171 /* Skip all immediately following labels. */
2172 while (!gsi_end_p (gsi
)
2173 && (gimple_code (gsi_stmt (gsi
)) == GIMPLE_LABEL
2174 || gimple_code (gsi_stmt (gsi
)) == GIMPLE_PREDICT
))
2175 gsi_next_nondebug (&gsi
);
2177 /* { ... something; default:; } */
2179 /* { ... something; default: break; } or
2180 { ... something; default: goto L; } */
2181 || gimple_code (gsi_stmt (gsi
)) == GIMPLE_GOTO
2182 /* { ... something; default: return; } */
2183 || gimple_code (gsi_stmt (gsi
)) == GIMPLE_RETURN
)
2189 /* Callback for walk_gimple_seq. */
2192 warn_implicit_fallthrough_r (gimple_stmt_iterator
*gsi_p
, bool *handled_ops_p
,
2193 struct walk_stmt_info
*)
2195 gimple
*stmt
= gsi_stmt (*gsi_p
);
2197 *handled_ops_p
= true;
2198 switch (gimple_code (stmt
))
2203 case GIMPLE_EH_FILTER
:
2204 case GIMPLE_TRANSACTION
:
2205 /* Walk the sub-statements. */
2206 *handled_ops_p
= false;
2209 /* Find a sequence of form:
2216 and possibly warn. */
2219 /* Found a label. Skip all immediately following labels. */
2220 while (!gsi_end_p (*gsi_p
)
2221 && gimple_code (gsi_stmt (*gsi_p
)) == GIMPLE_LABEL
)
2222 gsi_next_nondebug (gsi_p
);
2224 /* There might be no more statements. */
2225 if (gsi_end_p (*gsi_p
))
2226 return integer_zero_node
;
2228 /* Vector of labels that fall through. */
2229 auto_vec
<struct label_entry
> labels
;
2231 gimple
*prev
= collect_fallthrough_labels (gsi_p
, &labels
, &prevloc
);
2233 /* There might be no more statements. */
2234 if (gsi_end_p (*gsi_p
))
2235 return integer_zero_node
;
2237 gimple
*next
= gsi_stmt (*gsi_p
);
2239 /* If what follows is a label, then we may have a fallthrough. */
2240 if (gimple_code (next
) == GIMPLE_LABEL
2241 && gimple_has_location (next
)
2242 && (label
= gimple_label_label (as_a
<glabel
*> (next
)))
2245 struct label_entry
*l
;
2246 bool warned_p
= false;
2247 auto_diagnostic_group d
;
2248 if (!should_warn_for_implicit_fallthrough (gsi_p
, label
))
2250 else if (gimple_code (prev
) == GIMPLE_LABEL
2251 && (label
= gimple_label_label (as_a
<glabel
*> (prev
)))
2252 && (l
= find_label_entry (&labels
, label
)))
2253 warned_p
= warning_at (l
->loc
, OPT_Wimplicit_fallthrough_
,
2254 "this statement may fall through");
2255 else if (!gimple_call_internal_p (prev
, IFN_FALLTHROUGH
)
2256 /* Try to be clever and don't warn when the statement
2257 can't actually fall through. */
2258 && gimple_stmt_may_fallthru (prev
)
2259 && prevloc
!= UNKNOWN_LOCATION
)
2260 warned_p
= warning_at (prevloc
,
2261 OPT_Wimplicit_fallthrough_
,
2262 "this statement may fall through");
2264 inform (gimple_location (next
), "here");
2266 /* Mark this label as processed so as to prevent multiple
2267 warnings in nested switches. */
2268 FALLTHROUGH_LABEL_P (label
) = true;
2270 /* So that next warn_implicit_fallthrough_r will start looking for
2271 a new sequence starting with this label. */
2282 /* Warn when a switch case falls through. */
2285 maybe_warn_implicit_fallthrough (gimple_seq seq
)
2287 if (!warn_implicit_fallthrough
)
2290 /* This warning is meant for C/C++/ObjC/ObjC++ only. */
2293 || lang_GNU_OBJC ()))
2296 struct walk_stmt_info wi
;
2297 memset (&wi
, 0, sizeof (wi
));
2298 walk_gimple_seq (seq
, warn_implicit_fallthrough_r
, NULL
, &wi
);
2301 /* Callback for walk_gimple_seq. */
2304 expand_FALLTHROUGH_r (gimple_stmt_iterator
*gsi_p
, bool *handled_ops_p
,
2305 struct walk_stmt_info
*wi
)
2307 gimple
*stmt
= gsi_stmt (*gsi_p
);
2309 *handled_ops_p
= true;
2310 switch (gimple_code (stmt
))
2315 case GIMPLE_EH_FILTER
:
2316 case GIMPLE_TRANSACTION
:
2317 /* Walk the sub-statements. */
2318 *handled_ops_p
= false;
2321 if (gimple_call_internal_p (stmt
, IFN_FALLTHROUGH
))
2323 gsi_remove (gsi_p
, true);
2324 if (gsi_end_p (*gsi_p
))
2326 *static_cast<location_t
*>(wi
->info
) = gimple_location (stmt
);
2327 return integer_zero_node
;
2331 location_t loc
= gimple_location (stmt
);
2333 gimple_stmt_iterator gsi2
= *gsi_p
;
2334 stmt
= gsi_stmt (gsi2
);
2335 if (gimple_code (stmt
) == GIMPLE_GOTO
&& !gimple_has_location (stmt
))
2337 /* Go on until the artificial label. */
2338 tree goto_dest
= gimple_goto_dest (stmt
);
2339 for (; !gsi_end_p (gsi2
); gsi_next (&gsi2
))
2341 if (gimple_code (gsi_stmt (gsi2
)) == GIMPLE_LABEL
2342 && gimple_label_label (as_a
<glabel
*> (gsi_stmt (gsi2
)))
2347 /* Not found? Stop. */
2348 if (gsi_end_p (gsi2
))
2351 /* Look one past it. */
2355 /* We're looking for a case label or default label here. */
2356 while (!gsi_end_p (gsi2
))
2358 stmt
= gsi_stmt (gsi2
);
2359 if (gimple_code (stmt
) == GIMPLE_LABEL
)
2361 tree label
= gimple_label_label (as_a
<glabel
*> (stmt
));
2362 if (gimple_has_location (stmt
) && DECL_ARTIFICIAL (label
))
2368 else if (gimple_call_internal_p (stmt
, IFN_ASAN_MARK
))
2370 else if (!is_gimple_debug (stmt
))
2371 /* Anything else is not expected. */
2376 warning_at (loc
, 0, "attribute %<fallthrough%> not preceding "
2377 "a case label or default label");
2386 /* Expand all FALLTHROUGH () calls in SEQ. */
2389 expand_FALLTHROUGH (gimple_seq
*seq_p
)
2391 struct walk_stmt_info wi
;
2393 memset (&wi
, 0, sizeof (wi
));
2394 wi
.info
= (void *) &loc
;
2395 walk_gimple_seq_mod (seq_p
, expand_FALLTHROUGH_r
, NULL
, &wi
);
2396 if (wi
.callback_result
== integer_zero_node
)
2397 /* We've found [[fallthrough]]; at the end of a switch, which the C++
2398 standard says is ill-formed; see [dcl.attr.fallthrough]. */
2399 warning_at (loc
, 0, "attribute %<fallthrough%> not preceding "
2400 "a case label or default label");
2404 /* Gimplify a SWITCH_EXPR, and collect the vector of labels it can
2407 static enum gimplify_status
2408 gimplify_switch_expr (tree
*expr_p
, gimple_seq
*pre_p
)
2410 tree switch_expr
= *expr_p
;
2411 gimple_seq switch_body_seq
= NULL
;
2412 enum gimplify_status ret
;
2413 tree index_type
= TREE_TYPE (switch_expr
);
2414 if (index_type
== NULL_TREE
)
2415 index_type
= TREE_TYPE (SWITCH_COND (switch_expr
));
2417 ret
= gimplify_expr (&SWITCH_COND (switch_expr
), pre_p
, NULL
, is_gimple_val
,
2419 if (ret
== GS_ERROR
|| ret
== GS_UNHANDLED
)
2422 if (SWITCH_BODY (switch_expr
))
2425 vec
<tree
> saved_labels
;
2426 hash_set
<tree
> *saved_live_switch_vars
= NULL
;
2427 tree default_case
= NULL_TREE
;
2428 gswitch
*switch_stmt
;
2430 /* Save old labels, get new ones from body, then restore the old
2431 labels. Save all the things from the switch body to append after. */
2432 saved_labels
= gimplify_ctxp
->case_labels
;
2433 gimplify_ctxp
->case_labels
.create (8);
2435 /* Do not create live_switch_vars if SWITCH_BODY is not a BIND_EXPR. */
2436 saved_live_switch_vars
= gimplify_ctxp
->live_switch_vars
;
2437 tree_code body_type
= TREE_CODE (SWITCH_BODY (switch_expr
));
2438 if (body_type
== BIND_EXPR
|| body_type
== STATEMENT_LIST
)
2439 gimplify_ctxp
->live_switch_vars
= new hash_set
<tree
> (4);
2441 gimplify_ctxp
->live_switch_vars
= NULL
;
2443 bool old_in_switch_expr
= gimplify_ctxp
->in_switch_expr
;
2444 gimplify_ctxp
->in_switch_expr
= true;
2446 gimplify_stmt (&SWITCH_BODY (switch_expr
), &switch_body_seq
);
2448 gimplify_ctxp
->in_switch_expr
= old_in_switch_expr
;
2449 maybe_warn_switch_unreachable (switch_body_seq
);
2450 maybe_warn_implicit_fallthrough (switch_body_seq
);
2451 /* Only do this for the outermost GIMPLE_SWITCH. */
2452 if (!gimplify_ctxp
->in_switch_expr
)
2453 expand_FALLTHROUGH (&switch_body_seq
);
2455 labels
= gimplify_ctxp
->case_labels
;
2456 gimplify_ctxp
->case_labels
= saved_labels
;
2458 if (gimplify_ctxp
->live_switch_vars
)
2460 gcc_assert (gimplify_ctxp
->live_switch_vars
->is_empty ());
2461 delete gimplify_ctxp
->live_switch_vars
;
2463 gimplify_ctxp
->live_switch_vars
= saved_live_switch_vars
;
2465 preprocess_case_label_vec_for_gimple (labels
, index_type
,
2468 bool add_bind
= false;
2471 glabel
*new_default
;
2474 = build_case_label (NULL_TREE
, NULL_TREE
,
2475 create_artificial_label (UNKNOWN_LOCATION
));
2476 if (old_in_switch_expr
)
2478 SWITCH_BREAK_LABEL_P (CASE_LABEL (default_case
)) = 1;
2481 new_default
= gimple_build_label (CASE_LABEL (default_case
));
2482 gimplify_seq_add_stmt (&switch_body_seq
, new_default
);
2484 else if (old_in_switch_expr
)
2486 gimple
*last
= gimple_seq_last_stmt (switch_body_seq
);
2487 if (last
&& gimple_code (last
) == GIMPLE_LABEL
)
2489 tree label
= gimple_label_label (as_a
<glabel
*> (last
));
2490 if (SWITCH_BREAK_LABEL_P (label
))
2495 switch_stmt
= gimple_build_switch (SWITCH_COND (switch_expr
),
2496 default_case
, labels
);
2497 /* For the benefit of -Wimplicit-fallthrough, if switch_body_seq
2498 ends with a GIMPLE_LABEL holding SWITCH_BREAK_LABEL_P LABEL_DECL,
2499 wrap the GIMPLE_SWITCH up to that GIMPLE_LABEL into a GIMPLE_BIND,
2500 so that we can easily find the start and end of the switch
2504 gimple_seq bind_body
= NULL
;
2505 gimplify_seq_add_stmt (&bind_body
, switch_stmt
);
2506 gimple_seq_add_seq (&bind_body
, switch_body_seq
);
2507 gbind
*bind
= gimple_build_bind (NULL_TREE
, bind_body
, NULL_TREE
);
2508 gimple_set_location (bind
, EXPR_LOCATION (switch_expr
));
2509 gimplify_seq_add_stmt (pre_p
, bind
);
2513 gimplify_seq_add_stmt (pre_p
, switch_stmt
);
2514 gimplify_seq_add_seq (pre_p
, switch_body_seq
);
2524 /* Gimplify the LABEL_EXPR pointed to by EXPR_P. */
2526 static enum gimplify_status
2527 gimplify_label_expr (tree
*expr_p
, gimple_seq
*pre_p
)
2529 gcc_assert (decl_function_context (LABEL_EXPR_LABEL (*expr_p
))
2530 == current_function_decl
);
2532 tree label
= LABEL_EXPR_LABEL (*expr_p
);
2533 glabel
*label_stmt
= gimple_build_label (label
);
2534 gimple_set_location (label_stmt
, EXPR_LOCATION (*expr_p
));
2535 gimplify_seq_add_stmt (pre_p
, label_stmt
);
2537 if (lookup_attribute ("cold", DECL_ATTRIBUTES (label
)))
2538 gimple_seq_add_stmt (pre_p
, gimple_build_predict (PRED_COLD_LABEL
,
2540 else if (lookup_attribute ("hot", DECL_ATTRIBUTES (label
)))
2541 gimple_seq_add_stmt (pre_p
, gimple_build_predict (PRED_HOT_LABEL
,
2547 /* Gimplify the CASE_LABEL_EXPR pointed to by EXPR_P. */
2549 static enum gimplify_status
2550 gimplify_case_label_expr (tree
*expr_p
, gimple_seq
*pre_p
)
2552 struct gimplify_ctx
*ctxp
;
2555 /* Invalid programs can play Duff's Device type games with, for example,
2556 #pragma omp parallel. At least in the C front end, we don't
2557 detect such invalid branches until after gimplification, in the
2558 diagnose_omp_blocks pass. */
2559 for (ctxp
= gimplify_ctxp
; ; ctxp
= ctxp
->prev_context
)
2560 if (ctxp
->case_labels
.exists ())
2563 tree label
= CASE_LABEL (*expr_p
);
2564 label_stmt
= gimple_build_label (label
);
2565 gimple_set_location (label_stmt
, EXPR_LOCATION (*expr_p
));
2566 ctxp
->case_labels
.safe_push (*expr_p
);
2567 gimplify_seq_add_stmt (pre_p
, label_stmt
);
2569 if (lookup_attribute ("cold", DECL_ATTRIBUTES (label
)))
2570 gimple_seq_add_stmt (pre_p
, gimple_build_predict (PRED_COLD_LABEL
,
2572 else if (lookup_attribute ("hot", DECL_ATTRIBUTES (label
)))
2573 gimple_seq_add_stmt (pre_p
, gimple_build_predict (PRED_HOT_LABEL
,
2579 /* Build a GOTO to the LABEL_DECL pointed to by LABEL_P, building it first
2583 build_and_jump (tree
*label_p
)
2585 if (label_p
== NULL
)
2586 /* If there's nowhere to jump, just fall through. */
2589 if (*label_p
== NULL_TREE
)
2591 tree label
= create_artificial_label (UNKNOWN_LOCATION
);
2595 return build1 (GOTO_EXPR
, void_type_node
, *label_p
);
2598 /* Gimplify an EXIT_EXPR by converting to a GOTO_EXPR inside a COND_EXPR.
2599 This also involves building a label to jump to and communicating it to
2600 gimplify_loop_expr through gimplify_ctxp->exit_label. */
2602 static enum gimplify_status
2603 gimplify_exit_expr (tree
*expr_p
)
2605 tree cond
= TREE_OPERAND (*expr_p
, 0);
2608 expr
= build_and_jump (&gimplify_ctxp
->exit_label
);
2609 expr
= build3 (COND_EXPR
, void_type_node
, cond
, expr
, NULL_TREE
);
2615 /* *EXPR_P is a COMPONENT_REF being used as an rvalue. If its type is
2616 different from its canonical type, wrap the whole thing inside a
2617 NOP_EXPR and force the type of the COMPONENT_REF to be the canonical
2620 The canonical type of a COMPONENT_REF is the type of the field being
2621 referenced--unless the field is a bit-field which can be read directly
2622 in a smaller mode, in which case the canonical type is the
2623 sign-appropriate type corresponding to that mode. */
2626 canonicalize_component_ref (tree
*expr_p
)
2628 tree expr
= *expr_p
;
2631 gcc_assert (TREE_CODE (expr
) == COMPONENT_REF
);
2633 if (INTEGRAL_TYPE_P (TREE_TYPE (expr
)))
2634 type
= TREE_TYPE (get_unwidened (expr
, NULL_TREE
));
2636 type
= TREE_TYPE (TREE_OPERAND (expr
, 1));
2638 /* One could argue that all the stuff below is not necessary for
2639 the non-bitfield case and declare it a FE error if type
2640 adjustment would be needed. */
2641 if (TREE_TYPE (expr
) != type
)
2643 #ifdef ENABLE_TYPES_CHECKING
2644 tree old_type
= TREE_TYPE (expr
);
2648 /* We need to preserve qualifiers and propagate them from
2650 type_quals
= TYPE_QUALS (type
)
2651 | TYPE_QUALS (TREE_TYPE (TREE_OPERAND (expr
, 0)));
2652 if (TYPE_QUALS (type
) != type_quals
)
2653 type
= build_qualified_type (TYPE_MAIN_VARIANT (type
), type_quals
);
2655 /* Set the type of the COMPONENT_REF to the underlying type. */
2656 TREE_TYPE (expr
) = type
;
2658 #ifdef ENABLE_TYPES_CHECKING
2659 /* It is now a FE error, if the conversion from the canonical
2660 type to the original expression type is not useless. */
2661 gcc_assert (useless_type_conversion_p (old_type
, type
));
2666 /* If a NOP conversion is changing a pointer to array of foo to a pointer
2667 to foo, embed that change in the ADDR_EXPR by converting
2672 where L is the lower bound. For simplicity, only do this for constant
2674 The constraint is that the type of &array[L] is trivially convertible
2678 canonicalize_addr_expr (tree
*expr_p
)
2680 tree expr
= *expr_p
;
2681 tree addr_expr
= TREE_OPERAND (expr
, 0);
2682 tree datype
, ddatype
, pddatype
;
2684 /* We simplify only conversions from an ADDR_EXPR to a pointer type. */
2685 if (!POINTER_TYPE_P (TREE_TYPE (expr
))
2686 || TREE_CODE (addr_expr
) != ADDR_EXPR
)
2689 /* The addr_expr type should be a pointer to an array. */
2690 datype
= TREE_TYPE (TREE_TYPE (addr_expr
));
2691 if (TREE_CODE (datype
) != ARRAY_TYPE
)
2694 /* The pointer to element type shall be trivially convertible to
2695 the expression pointer type. */
2696 ddatype
= TREE_TYPE (datype
);
2697 pddatype
= build_pointer_type (ddatype
);
2698 if (!useless_type_conversion_p (TYPE_MAIN_VARIANT (TREE_TYPE (expr
)),
2702 /* The lower bound and element sizes must be constant. */
2703 if (!TYPE_SIZE_UNIT (ddatype
)
2704 || TREE_CODE (TYPE_SIZE_UNIT (ddatype
)) != INTEGER_CST
2705 || !TYPE_DOMAIN (datype
) || !TYPE_MIN_VALUE (TYPE_DOMAIN (datype
))
2706 || TREE_CODE (TYPE_MIN_VALUE (TYPE_DOMAIN (datype
))) != INTEGER_CST
)
2709 /* All checks succeeded. Build a new node to merge the cast. */
2710 *expr_p
= build4 (ARRAY_REF
, ddatype
, TREE_OPERAND (addr_expr
, 0),
2711 TYPE_MIN_VALUE (TYPE_DOMAIN (datype
)),
2712 NULL_TREE
, NULL_TREE
);
2713 *expr_p
= build1 (ADDR_EXPR
, pddatype
, *expr_p
);
2715 /* We can have stripped a required restrict qualifier above. */
2716 if (!useless_type_conversion_p (TREE_TYPE (expr
), TREE_TYPE (*expr_p
)))
2717 *expr_p
= fold_convert (TREE_TYPE (expr
), *expr_p
);
2720 /* *EXPR_P is a NOP_EXPR or CONVERT_EXPR. Remove it and/or other conversions
2721 underneath as appropriate. */
2723 static enum gimplify_status
2724 gimplify_conversion (tree
*expr_p
)
2726 location_t loc
= EXPR_LOCATION (*expr_p
);
2727 gcc_assert (CONVERT_EXPR_P (*expr_p
));
2729 /* Then strip away all but the outermost conversion. */
2730 STRIP_SIGN_NOPS (TREE_OPERAND (*expr_p
, 0));
2732 /* And remove the outermost conversion if it's useless. */
2733 if (tree_ssa_useless_type_conversion (*expr_p
))
2734 *expr_p
= TREE_OPERAND (*expr_p
, 0);
2736 /* If we still have a conversion at the toplevel,
2737 then canonicalize some constructs. */
2738 if (CONVERT_EXPR_P (*expr_p
))
2740 tree sub
= TREE_OPERAND (*expr_p
, 0);
2742 /* If a NOP conversion is changing the type of a COMPONENT_REF
2743 expression, then canonicalize its type now in order to expose more
2744 redundant conversions. */
2745 if (TREE_CODE (sub
) == COMPONENT_REF
)
2746 canonicalize_component_ref (&TREE_OPERAND (*expr_p
, 0));
2748 /* If a NOP conversion is changing a pointer to array of foo
2749 to a pointer to foo, embed that change in the ADDR_EXPR. */
2750 else if (TREE_CODE (sub
) == ADDR_EXPR
)
2751 canonicalize_addr_expr (expr_p
);
2754 /* If we have a conversion to a non-register type force the
2755 use of a VIEW_CONVERT_EXPR instead. */
2756 if (CONVERT_EXPR_P (*expr_p
) && !is_gimple_reg_type (TREE_TYPE (*expr_p
)))
2757 *expr_p
= fold_build1_loc (loc
, VIEW_CONVERT_EXPR
, TREE_TYPE (*expr_p
),
2758 TREE_OPERAND (*expr_p
, 0));
2760 /* Canonicalize CONVERT_EXPR to NOP_EXPR. */
2761 if (TREE_CODE (*expr_p
) == CONVERT_EXPR
)
2762 TREE_SET_CODE (*expr_p
, NOP_EXPR
);
2767 /* Gimplify a VAR_DECL or PARM_DECL. Return GS_OK if we expanded a
2768 DECL_VALUE_EXPR, and it's worth re-examining things. */
2770 static enum gimplify_status
2771 gimplify_var_or_parm_decl (tree
*expr_p
)
2773 tree decl
= *expr_p
;
2775 /* ??? If this is a local variable, and it has not been seen in any
2776 outer BIND_EXPR, then it's probably the result of a duplicate
2777 declaration, for which we've already issued an error. It would
2778 be really nice if the front end wouldn't leak these at all.
2779 Currently the only known culprit is C++ destructors, as seen
2780 in g++.old-deja/g++.jason/binding.C. */
2782 && !DECL_SEEN_IN_BIND_EXPR_P (decl
)
2783 && !TREE_STATIC (decl
) && !DECL_EXTERNAL (decl
)
2784 && decl_function_context (decl
) == current_function_decl
)
2786 gcc_assert (seen_error ());
2790 /* When within an OMP context, notice uses of variables. */
2791 if (gimplify_omp_ctxp
&& omp_notice_variable (gimplify_omp_ctxp
, decl
, true))
2794 /* If the decl is an alias for another expression, substitute it now. */
2795 if (DECL_HAS_VALUE_EXPR_P (decl
))
2797 *expr_p
= unshare_expr (DECL_VALUE_EXPR (decl
));
2804 /* Recalculate the value of the TREE_SIDE_EFFECTS flag for T. */
2807 recalculate_side_effects (tree t
)
2809 enum tree_code code
= TREE_CODE (t
);
2810 int len
= TREE_OPERAND_LENGTH (t
);
2813 switch (TREE_CODE_CLASS (code
))
2815 case tcc_expression
:
2821 case PREDECREMENT_EXPR
:
2822 case PREINCREMENT_EXPR
:
2823 case POSTDECREMENT_EXPR
:
2824 case POSTINCREMENT_EXPR
:
2825 /* All of these have side-effects, no matter what their
2834 case tcc_comparison
: /* a comparison expression */
2835 case tcc_unary
: /* a unary arithmetic expression */
2836 case tcc_binary
: /* a binary arithmetic expression */
2837 case tcc_reference
: /* a reference */
2838 case tcc_vl_exp
: /* a function call */
2839 TREE_SIDE_EFFECTS (t
) = TREE_THIS_VOLATILE (t
);
2840 for (i
= 0; i
< len
; ++i
)
2842 tree op
= TREE_OPERAND (t
, i
);
2843 if (op
&& TREE_SIDE_EFFECTS (op
))
2844 TREE_SIDE_EFFECTS (t
) = 1;
2849 /* No side-effects. */
2857 /* Gimplify the COMPONENT_REF, ARRAY_REF, REALPART_EXPR or IMAGPART_EXPR
2861 : min_lval '[' val ']'
2863 | compound_lval '[' val ']'
2864 | compound_lval '.' ID
2866 This is not part of the original SIMPLE definition, which separates
2867 array and member references, but it seems reasonable to handle them
2868 together. Also, this way we don't run into problems with union
2869 aliasing; gcc requires that for accesses through a union to alias, the
2870 union reference must be explicit, which was not always the case when we
2871 were splitting up array and member refs.
2873 PRE_P points to the sequence where side effects that must happen before
2874 *EXPR_P should be stored.
2876 POST_P points to the sequence where side effects that must happen after
2877 *EXPR_P should be stored. */
2879 static enum gimplify_status
2880 gimplify_compound_lval (tree
*expr_p
, gimple_seq
*pre_p
, gimple_seq
*post_p
,
2881 fallback_t fallback
)
2884 enum gimplify_status ret
= GS_ALL_DONE
, tret
;
2886 location_t loc
= EXPR_LOCATION (*expr_p
);
2887 tree expr
= *expr_p
;
2889 /* Create a stack of the subexpressions so later we can walk them in
2890 order from inner to outer. */
2891 auto_vec
<tree
, 10> expr_stack
;
2893 /* We can handle anything that get_inner_reference can deal with. */
2894 for (p
= expr_p
; ; p
= &TREE_OPERAND (*p
, 0))
2897 /* Fold INDIRECT_REFs now to turn them into ARRAY_REFs. */
2898 if (TREE_CODE (*p
) == INDIRECT_REF
)
2899 *p
= fold_indirect_ref_loc (loc
, *p
);
2901 if (handled_component_p (*p
))
2903 /* Expand DECL_VALUE_EXPR now. In some cases that may expose
2904 additional COMPONENT_REFs. */
2905 else if ((VAR_P (*p
) || TREE_CODE (*p
) == PARM_DECL
)
2906 && gimplify_var_or_parm_decl (p
) == GS_OK
)
2911 expr_stack
.safe_push (*p
);
2914 gcc_assert (expr_stack
.length ());
2916 /* Now EXPR_STACK is a stack of pointers to all the refs we've
2917 walked through and P points to the innermost expression.
2919 Java requires that we elaborated nodes in source order. That
2920 means we must gimplify the inner expression followed by each of
2921 the indices, in order. But we can't gimplify the inner
2922 expression until we deal with any variable bounds, sizes, or
2923 positions in order to deal with PLACEHOLDER_EXPRs.
2925 So we do this in three steps. First we deal with the annotations
2926 for any variables in the components, then we gimplify the base,
2927 then we gimplify any indices, from left to right. */
2928 for (i
= expr_stack
.length () - 1; i
>= 0; i
--)
2930 tree t
= expr_stack
[i
];
2932 if (TREE_CODE (t
) == ARRAY_REF
|| TREE_CODE (t
) == ARRAY_RANGE_REF
)
2934 /* Gimplify the low bound and element type size and put them into
2935 the ARRAY_REF. If these values are set, they have already been
2937 if (TREE_OPERAND (t
, 2) == NULL_TREE
)
2939 tree low
= unshare_expr (array_ref_low_bound (t
));
2940 if (!is_gimple_min_invariant (low
))
2942 TREE_OPERAND (t
, 2) = low
;
2943 tret
= gimplify_expr (&TREE_OPERAND (t
, 2), pre_p
,
2944 post_p
, is_gimple_reg
,
2946 ret
= MIN (ret
, tret
);
2951 tret
= gimplify_expr (&TREE_OPERAND (t
, 2), pre_p
, post_p
,
2952 is_gimple_reg
, fb_rvalue
);
2953 ret
= MIN (ret
, tret
);
2956 if (TREE_OPERAND (t
, 3) == NULL_TREE
)
2958 tree elmt_type
= TREE_TYPE (TREE_TYPE (TREE_OPERAND (t
, 0)));
2959 tree elmt_size
= unshare_expr (array_ref_element_size (t
));
2960 tree factor
= size_int (TYPE_ALIGN_UNIT (elmt_type
));
2962 /* Divide the element size by the alignment of the element
2965 = size_binop_loc (loc
, EXACT_DIV_EXPR
, elmt_size
, factor
);
2967 if (!is_gimple_min_invariant (elmt_size
))
2969 TREE_OPERAND (t
, 3) = elmt_size
;
2970 tret
= gimplify_expr (&TREE_OPERAND (t
, 3), pre_p
,
2971 post_p
, is_gimple_reg
,
2973 ret
= MIN (ret
, tret
);
2978 tret
= gimplify_expr (&TREE_OPERAND (t
, 3), pre_p
, post_p
,
2979 is_gimple_reg
, fb_rvalue
);
2980 ret
= MIN (ret
, tret
);
2983 else if (TREE_CODE (t
) == COMPONENT_REF
)
2985 /* Set the field offset into T and gimplify it. */
2986 if (TREE_OPERAND (t
, 2) == NULL_TREE
)
2988 tree offset
= unshare_expr (component_ref_field_offset (t
));
2989 tree field
= TREE_OPERAND (t
, 1);
2991 = size_int (DECL_OFFSET_ALIGN (field
) / BITS_PER_UNIT
);
2993 /* Divide the offset by its alignment. */
2994 offset
= size_binop_loc (loc
, EXACT_DIV_EXPR
, offset
, factor
);
2996 if (!is_gimple_min_invariant (offset
))
2998 TREE_OPERAND (t
, 2) = offset
;
2999 tret
= gimplify_expr (&TREE_OPERAND (t
, 2), pre_p
,
3000 post_p
, is_gimple_reg
,
3002 ret
= MIN (ret
, tret
);
3007 tret
= gimplify_expr (&TREE_OPERAND (t
, 2), pre_p
, post_p
,
3008 is_gimple_reg
, fb_rvalue
);
3009 ret
= MIN (ret
, tret
);
3014 /* Step 2 is to gimplify the base expression. Make sure lvalue is set
3015 so as to match the min_lval predicate. Failure to do so may result
3016 in the creation of large aggregate temporaries. */
3017 tret
= gimplify_expr (p
, pre_p
, post_p
, is_gimple_min_lval
,
3018 fallback
| fb_lvalue
);
3019 ret
= MIN (ret
, tret
);
3021 /* And finally, the indices and operands of ARRAY_REF. During this
3022 loop we also remove any useless conversions. */
3023 for (; expr_stack
.length () > 0; )
3025 tree t
= expr_stack
.pop ();
3027 if (TREE_CODE (t
) == ARRAY_REF
|| TREE_CODE (t
) == ARRAY_RANGE_REF
)
3029 /* Gimplify the dimension. */
3030 if (!is_gimple_min_invariant (TREE_OPERAND (t
, 1)))
3032 tret
= gimplify_expr (&TREE_OPERAND (t
, 1), pre_p
, post_p
,
3033 is_gimple_val
, fb_rvalue
);
3034 ret
= MIN (ret
, tret
);
3038 STRIP_USELESS_TYPE_CONVERSION (TREE_OPERAND (t
, 0));
3040 /* The innermost expression P may have originally had
3041 TREE_SIDE_EFFECTS set which would have caused all the outer
3042 expressions in *EXPR_P leading to P to also have had
3043 TREE_SIDE_EFFECTS set. */
3044 recalculate_side_effects (t
);
3047 /* If the outermost expression is a COMPONENT_REF, canonicalize its type. */
3048 if ((fallback
& fb_rvalue
) && TREE_CODE (*expr_p
) == COMPONENT_REF
)
3050 canonicalize_component_ref (expr_p
);
3053 expr_stack
.release ();
3055 gcc_assert (*expr_p
== expr
|| ret
!= GS_ALL_DONE
);
3060 /* Gimplify the self modifying expression pointed to by EXPR_P
3063 PRE_P points to the list where side effects that must happen before
3064 *EXPR_P should be stored.
3066 POST_P points to the list where side effects that must happen after
3067 *EXPR_P should be stored.
3069 WANT_VALUE is nonzero iff we want to use the value of this expression
3070 in another expression.
3072 ARITH_TYPE is the type the computation should be performed in. */
3074 enum gimplify_status
3075 gimplify_self_mod_expr (tree
*expr_p
, gimple_seq
*pre_p
, gimple_seq
*post_p
,
3076 bool want_value
, tree arith_type
)
3078 enum tree_code code
;
3079 tree lhs
, lvalue
, rhs
, t1
;
3080 gimple_seq post
= NULL
, *orig_post_p
= post_p
;
3082 enum tree_code arith_code
;
3083 enum gimplify_status ret
;
3084 location_t loc
= EXPR_LOCATION (*expr_p
);
3086 code
= TREE_CODE (*expr_p
);
3088 gcc_assert (code
== POSTINCREMENT_EXPR
|| code
== POSTDECREMENT_EXPR
3089 || code
== PREINCREMENT_EXPR
|| code
== PREDECREMENT_EXPR
);
3091 /* Prefix or postfix? */
3092 if (code
== POSTINCREMENT_EXPR
|| code
== POSTDECREMENT_EXPR
)
3093 /* Faster to treat as prefix if result is not used. */
3094 postfix
= want_value
;
3098 /* For postfix, make sure the inner expression's post side effects
3099 are executed after side effects from this expression. */
3103 /* Add or subtract? */
3104 if (code
== PREINCREMENT_EXPR
|| code
== POSTINCREMENT_EXPR
)
3105 arith_code
= PLUS_EXPR
;
3107 arith_code
= MINUS_EXPR
;
3109 /* Gimplify the LHS into a GIMPLE lvalue. */
3110 lvalue
= TREE_OPERAND (*expr_p
, 0);
3111 ret
= gimplify_expr (&lvalue
, pre_p
, post_p
, is_gimple_lvalue
, fb_lvalue
);
3112 if (ret
== GS_ERROR
)
3115 /* Extract the operands to the arithmetic operation. */
3117 rhs
= TREE_OPERAND (*expr_p
, 1);
3119 /* For postfix operator, we evaluate the LHS to an rvalue and then use
3120 that as the result value and in the postqueue operation. */
3123 ret
= gimplify_expr (&lhs
, pre_p
, post_p
, is_gimple_val
, fb_rvalue
);
3124 if (ret
== GS_ERROR
)
3127 lhs
= get_initialized_tmp_var (lhs
, pre_p
, NULL
);
3130 /* For POINTERs increment, use POINTER_PLUS_EXPR. */
3131 if (POINTER_TYPE_P (TREE_TYPE (lhs
)))
3133 rhs
= convert_to_ptrofftype_loc (loc
, rhs
);
3134 if (arith_code
== MINUS_EXPR
)
3135 rhs
= fold_build1_loc (loc
, NEGATE_EXPR
, TREE_TYPE (rhs
), rhs
);
3136 t1
= fold_build2 (POINTER_PLUS_EXPR
, TREE_TYPE (*expr_p
), lhs
, rhs
);
3139 t1
= fold_convert (TREE_TYPE (*expr_p
),
3140 fold_build2 (arith_code
, arith_type
,
3141 fold_convert (arith_type
, lhs
),
3142 fold_convert (arith_type
, rhs
)));
3146 gimplify_assign (lvalue
, t1
, pre_p
);
3147 gimplify_seq_add_seq (orig_post_p
, post
);
3153 *expr_p
= build2 (MODIFY_EXPR
, TREE_TYPE (lvalue
), lvalue
, t1
);
3158 /* If *EXPR_P has a variable sized type, wrap it in a WITH_SIZE_EXPR. */
3161 maybe_with_size_expr (tree
*expr_p
)
3163 tree expr
= *expr_p
;
3164 tree type
= TREE_TYPE (expr
);
3167 /* If we've already wrapped this or the type is error_mark_node, we can't do
3169 if (TREE_CODE (expr
) == WITH_SIZE_EXPR
3170 || type
== error_mark_node
)
3173 /* If the size isn't known or is a constant, we have nothing to do. */
3174 size
= TYPE_SIZE_UNIT (type
);
3175 if (!size
|| poly_int_tree_p (size
))
3178 /* Otherwise, make a WITH_SIZE_EXPR. */
3179 size
= unshare_expr (size
);
3180 size
= SUBSTITUTE_PLACEHOLDER_IN_EXPR (size
, expr
);
3181 *expr_p
= build2 (WITH_SIZE_EXPR
, type
, expr
, size
);
3184 /* Helper for gimplify_call_expr. Gimplify a single argument *ARG_P
3185 Store any side-effects in PRE_P. CALL_LOCATION is the location of
3186 the CALL_EXPR. If ALLOW_SSA is set the actual parameter may be
3187 gimplified to an SSA name. */
3189 enum gimplify_status
3190 gimplify_arg (tree
*arg_p
, gimple_seq
*pre_p
, location_t call_location
,
3193 bool (*test
) (tree
);
3196 /* In general, we allow lvalues for function arguments to avoid
3197 extra overhead of copying large aggregates out of even larger
3198 aggregates into temporaries only to copy the temporaries to
3199 the argument list. Make optimizers happy by pulling out to
3200 temporaries those types that fit in registers. */
3201 if (is_gimple_reg_type (TREE_TYPE (*arg_p
)))
3202 test
= is_gimple_val
, fb
= fb_rvalue
;
3205 test
= is_gimple_lvalue
, fb
= fb_either
;
3206 /* Also strip a TARGET_EXPR that would force an extra copy. */
3207 if (TREE_CODE (*arg_p
) == TARGET_EXPR
)
3209 tree init
= TARGET_EXPR_INITIAL (*arg_p
);
3211 && !VOID_TYPE_P (TREE_TYPE (init
)))
3216 /* If this is a variable sized type, we must remember the size. */
3217 maybe_with_size_expr (arg_p
);
3219 /* FIXME diagnostics: This will mess up gcc.dg/Warray-bounds.c. */
3220 /* Make sure arguments have the same location as the function call
3222 protected_set_expr_location (*arg_p
, call_location
);
3224 /* There is a sequence point before a function call. Side effects in
3225 the argument list must occur before the actual call. So, when
3226 gimplifying arguments, force gimplify_expr to use an internal
3227 post queue which is then appended to the end of PRE_P. */
3228 return gimplify_expr (arg_p
, pre_p
, NULL
, test
, fb
, allow_ssa
);
3231 /* Don't fold inside offloading or taskreg regions: it can break code by
3232 adding decl references that weren't in the source. We'll do it during
3233 omplower pass instead. */
3236 maybe_fold_stmt (gimple_stmt_iterator
*gsi
)
3238 struct gimplify_omp_ctx
*ctx
;
3239 for (ctx
= gimplify_omp_ctxp
; ctx
; ctx
= ctx
->outer_context
)
3240 if ((ctx
->region_type
& (ORT_TARGET
| ORT_PARALLEL
| ORT_TASK
)) != 0)
3242 else if ((ctx
->region_type
& ORT_HOST_TEAMS
) == ORT_HOST_TEAMS
)
3244 /* Delay folding of builtins until the IL is in consistent state
3245 so the diagnostic machinery can do a better job. */
3246 if (gimple_call_builtin_p (gsi_stmt (*gsi
)))
3248 return fold_stmt (gsi
);
3251 /* Gimplify the CALL_EXPR node *EXPR_P into the GIMPLE sequence PRE_P.
3252 WANT_VALUE is true if the result of the call is desired. */
3254 static enum gimplify_status
3255 gimplify_call_expr (tree
*expr_p
, gimple_seq
*pre_p
, bool want_value
)
3257 tree fndecl
, parms
, p
, fnptrtype
;
3258 enum gimplify_status ret
;
3261 bool builtin_va_start_p
= false;
3262 location_t loc
= EXPR_LOCATION (*expr_p
);
3264 gcc_assert (TREE_CODE (*expr_p
) == CALL_EXPR
);
3266 /* For reliable diagnostics during inlining, it is necessary that
3267 every call_expr be annotated with file and line. */
3268 if (! EXPR_HAS_LOCATION (*expr_p
))
3269 SET_EXPR_LOCATION (*expr_p
, input_location
);
3271 /* Gimplify internal functions created in the FEs. */
3272 if (CALL_EXPR_FN (*expr_p
) == NULL_TREE
)
3277 nargs
= call_expr_nargs (*expr_p
);
3278 enum internal_fn ifn
= CALL_EXPR_IFN (*expr_p
);
3279 auto_vec
<tree
> vargs (nargs
);
3281 for (i
= 0; i
< nargs
; i
++)
3283 gimplify_arg (&CALL_EXPR_ARG (*expr_p
, i
), pre_p
,
3284 EXPR_LOCATION (*expr_p
));
3285 vargs
.quick_push (CALL_EXPR_ARG (*expr_p
, i
));
3288 gcall
*call
= gimple_build_call_internal_vec (ifn
, vargs
);
3289 gimple_call_set_nothrow (call
, TREE_NOTHROW (*expr_p
));
3290 gimplify_seq_add_stmt (pre_p
, call
);
3294 /* This may be a call to a builtin function.
3296 Builtin function calls may be transformed into different
3297 (and more efficient) builtin function calls under certain
3298 circumstances. Unfortunately, gimplification can muck things
3299 up enough that the builtin expanders are not aware that certain
3300 transformations are still valid.
3302 So we attempt transformation/gimplification of the call before
3303 we gimplify the CALL_EXPR. At this time we do not manage to
3304 transform all calls in the same manner as the expanders do, but
3305 we do transform most of them. */
3306 fndecl
= get_callee_fndecl (*expr_p
);
3307 if (fndecl
&& fndecl_built_in_p (fndecl
, BUILT_IN_NORMAL
))
3308 switch (DECL_FUNCTION_CODE (fndecl
))
3310 CASE_BUILT_IN_ALLOCA
:
3311 /* If the call has been built for a variable-sized object, then we
3312 want to restore the stack level when the enclosing BIND_EXPR is
3313 exited to reclaim the allocated space; otherwise, we precisely
3314 need to do the opposite and preserve the latest stack level. */
3315 if (CALL_ALLOCA_FOR_VAR_P (*expr_p
))
3316 gimplify_ctxp
->save_stack
= true;
3318 gimplify_ctxp
->keep_stack
= true;
3321 case BUILT_IN_VA_START
:
3323 builtin_va_start_p
= TRUE
;
3324 if (call_expr_nargs (*expr_p
) < 2)
3326 error ("too few arguments to function %<va_start%>");
3327 *expr_p
= build_empty_stmt (EXPR_LOCATION (*expr_p
));
3331 if (fold_builtin_next_arg (*expr_p
, true))
3333 *expr_p
= build_empty_stmt (EXPR_LOCATION (*expr_p
));
3339 case BUILT_IN_EH_RETURN
:
3340 cfun
->calls_eh_return
= true;
3346 if (fndecl
&& fndecl_built_in_p (fndecl
))
3348 tree new_tree
= fold_call_expr (input_location
, *expr_p
, !want_value
);
3349 if (new_tree
&& new_tree
!= *expr_p
)
3351 /* There was a transformation of this call which computes the
3352 same value, but in a more efficient way. Return and try
3359 /* Remember the original function pointer type. */
3360 fnptrtype
= TREE_TYPE (CALL_EXPR_FN (*expr_p
));
3362 /* There is a sequence point before the call, so any side effects in
3363 the calling expression must occur before the actual call. Force
3364 gimplify_expr to use an internal post queue. */
3365 ret
= gimplify_expr (&CALL_EXPR_FN (*expr_p
), pre_p
, NULL
,
3366 is_gimple_call_addr
, fb_rvalue
);
3368 nargs
= call_expr_nargs (*expr_p
);
3370 /* Get argument types for verification. */
3371 fndecl
= get_callee_fndecl (*expr_p
);
3374 parms
= TYPE_ARG_TYPES (TREE_TYPE (fndecl
));
3376 parms
= TYPE_ARG_TYPES (TREE_TYPE (fnptrtype
));
3378 if (fndecl
&& DECL_ARGUMENTS (fndecl
))
3379 p
= DECL_ARGUMENTS (fndecl
);
3384 for (i
= 0; i
< nargs
&& p
; i
++, p
= TREE_CHAIN (p
))
3387 /* If the last argument is __builtin_va_arg_pack () and it is not
3388 passed as a named argument, decrease the number of CALL_EXPR
3389 arguments and set instead the CALL_EXPR_VA_ARG_PACK flag. */
3392 && TREE_CODE (CALL_EXPR_ARG (*expr_p
, nargs
- 1)) == CALL_EXPR
)
3394 tree last_arg
= CALL_EXPR_ARG (*expr_p
, nargs
- 1);
3395 tree last_arg_fndecl
= get_callee_fndecl (last_arg
);
3398 && fndecl_built_in_p (last_arg_fndecl
, BUILT_IN_VA_ARG_PACK
))
3400 tree call
= *expr_p
;
3403 *expr_p
= build_call_array_loc (loc
, TREE_TYPE (call
),
3404 CALL_EXPR_FN (call
),
3405 nargs
, CALL_EXPR_ARGP (call
));
3407 /* Copy all CALL_EXPR flags, location and block, except
3408 CALL_EXPR_VA_ARG_PACK flag. */
3409 CALL_EXPR_STATIC_CHAIN (*expr_p
) = CALL_EXPR_STATIC_CHAIN (call
);
3410 CALL_EXPR_TAILCALL (*expr_p
) = CALL_EXPR_TAILCALL (call
);
3411 CALL_EXPR_RETURN_SLOT_OPT (*expr_p
)
3412 = CALL_EXPR_RETURN_SLOT_OPT (call
);
3413 CALL_FROM_THUNK_P (*expr_p
) = CALL_FROM_THUNK_P (call
);
3414 SET_EXPR_LOCATION (*expr_p
, EXPR_LOCATION (call
));
3416 /* Set CALL_EXPR_VA_ARG_PACK. */
3417 CALL_EXPR_VA_ARG_PACK (*expr_p
) = 1;
3421 /* If the call returns twice then after building the CFG the call
3422 argument computations will no longer dominate the call because
3423 we add an abnormal incoming edge to the call. So do not use SSA
3425 bool returns_twice
= call_expr_flags (*expr_p
) & ECF_RETURNS_TWICE
;
3427 /* Gimplify the function arguments. */
3430 for (i
= (PUSH_ARGS_REVERSED
? nargs
- 1 : 0);
3431 PUSH_ARGS_REVERSED
? i
>= 0 : i
< nargs
;
3432 PUSH_ARGS_REVERSED
? i
-- : i
++)
3434 enum gimplify_status t
;
3436 /* Avoid gimplifying the second argument to va_start, which needs to
3437 be the plain PARM_DECL. */
3438 if ((i
!= 1) || !builtin_va_start_p
)
3440 t
= gimplify_arg (&CALL_EXPR_ARG (*expr_p
, i
), pre_p
,
3441 EXPR_LOCATION (*expr_p
), ! returns_twice
);
3449 /* Gimplify the static chain. */
3450 if (CALL_EXPR_STATIC_CHAIN (*expr_p
))
3452 if (fndecl
&& !DECL_STATIC_CHAIN (fndecl
))
3453 CALL_EXPR_STATIC_CHAIN (*expr_p
) = NULL
;
3456 enum gimplify_status t
;
3457 t
= gimplify_arg (&CALL_EXPR_STATIC_CHAIN (*expr_p
), pre_p
,
3458 EXPR_LOCATION (*expr_p
), ! returns_twice
);
3464 /* Verify the function result. */
3465 if (want_value
&& fndecl
3466 && VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fnptrtype
))))
3468 error_at (loc
, "using result of function returning %<void%>");
3472 /* Try this again in case gimplification exposed something. */
3473 if (ret
!= GS_ERROR
)
3475 tree new_tree
= fold_call_expr (input_location
, *expr_p
, !want_value
);
3477 if (new_tree
&& new_tree
!= *expr_p
)
3479 /* There was a transformation of this call which computes the
3480 same value, but in a more efficient way. Return and try
3488 *expr_p
= error_mark_node
;
3492 /* If the function is "const" or "pure", then clear TREE_SIDE_EFFECTS on its
3493 decl. This allows us to eliminate redundant or useless
3494 calls to "const" functions. */
3495 if (TREE_CODE (*expr_p
) == CALL_EXPR
)
3497 int flags
= call_expr_flags (*expr_p
);
3498 if (flags
& (ECF_CONST
| ECF_PURE
)
3499 /* An infinite loop is considered a side effect. */
3500 && !(flags
& (ECF_LOOPING_CONST_OR_PURE
)))
3501 TREE_SIDE_EFFECTS (*expr_p
) = 0;
3504 /* If the value is not needed by the caller, emit a new GIMPLE_CALL
3505 and clear *EXPR_P. Otherwise, leave *EXPR_P in its gimplified
3506 form and delegate the creation of a GIMPLE_CALL to
3507 gimplify_modify_expr. This is always possible because when
3508 WANT_VALUE is true, the caller wants the result of this call into
3509 a temporary, which means that we will emit an INIT_EXPR in
3510 internal_get_tmp_var which will then be handled by
3511 gimplify_modify_expr. */
3514 /* The CALL_EXPR in *EXPR_P is already in GIMPLE form, so all we
3515 have to do is replicate it as a GIMPLE_CALL tuple. */
3516 gimple_stmt_iterator gsi
;
3517 call
= gimple_build_call_from_tree (*expr_p
, fnptrtype
);
3518 notice_special_calls (call
);
3519 gimplify_seq_add_stmt (pre_p
, call
);
3520 gsi
= gsi_last (*pre_p
);
3521 maybe_fold_stmt (&gsi
);
3522 *expr_p
= NULL_TREE
;
3525 /* Remember the original function type. */
3526 CALL_EXPR_FN (*expr_p
) = build1 (NOP_EXPR
, fnptrtype
,
3527 CALL_EXPR_FN (*expr_p
));
3532 /* Handle shortcut semantics in the predicate operand of a COND_EXPR by
3533 rewriting it into multiple COND_EXPRs, and possibly GOTO_EXPRs.
3535 TRUE_LABEL_P and FALSE_LABEL_P point to the labels to jump to if the
3536 condition is true or false, respectively. If null, we should generate
3537 our own to skip over the evaluation of this specific expression.
3539 LOCUS is the source location of the COND_EXPR.
3541 This function is the tree equivalent of do_jump.
3543 shortcut_cond_r should only be called by shortcut_cond_expr. */
3546 shortcut_cond_r (tree pred
, tree
*true_label_p
, tree
*false_label_p
,
3549 tree local_label
= NULL_TREE
;
3550 tree t
, expr
= NULL
;
3552 /* OK, it's not a simple case; we need to pull apart the COND_EXPR to
3553 retain the shortcut semantics. Just insert the gotos here;
3554 shortcut_cond_expr will append the real blocks later. */
3555 if (TREE_CODE (pred
) == TRUTH_ANDIF_EXPR
)
3557 location_t new_locus
;
3559 /* Turn if (a && b) into
3561 if (a); else goto no;
3562 if (b) goto yes; else goto no;
3565 if (false_label_p
== NULL
)
3566 false_label_p
= &local_label
;
3568 /* Keep the original source location on the first 'if'. */
3569 t
= shortcut_cond_r (TREE_OPERAND (pred
, 0), NULL
, false_label_p
, locus
);
3570 append_to_statement_list (t
, &expr
);
3572 /* Set the source location of the && on the second 'if'. */
3573 new_locus
= rexpr_location (pred
, locus
);
3574 t
= shortcut_cond_r (TREE_OPERAND (pred
, 1), true_label_p
, false_label_p
,
3576 append_to_statement_list (t
, &expr
);
3578 else if (TREE_CODE (pred
) == TRUTH_ORIF_EXPR
)
3580 location_t new_locus
;
3582 /* Turn if (a || b) into
3585 if (b) goto yes; else goto no;
3588 if (true_label_p
== NULL
)
3589 true_label_p
= &local_label
;
3591 /* Keep the original source location on the first 'if'. */
3592 t
= shortcut_cond_r (TREE_OPERAND (pred
, 0), true_label_p
, NULL
, locus
);
3593 append_to_statement_list (t
, &expr
);
3595 /* Set the source location of the || on the second 'if'. */
3596 new_locus
= rexpr_location (pred
, locus
);
3597 t
= shortcut_cond_r (TREE_OPERAND (pred
, 1), true_label_p
, false_label_p
,
3599 append_to_statement_list (t
, &expr
);
3601 else if (TREE_CODE (pred
) == COND_EXPR
3602 && !VOID_TYPE_P (TREE_TYPE (TREE_OPERAND (pred
, 1)))
3603 && !VOID_TYPE_P (TREE_TYPE (TREE_OPERAND (pred
, 2))))
3605 location_t new_locus
;
3607 /* As long as we're messing with gotos, turn if (a ? b : c) into
3609 if (b) goto yes; else goto no;
3611 if (c) goto yes; else goto no;
3613 Don't do this if one of the arms has void type, which can happen
3614 in C++ when the arm is throw. */
3616 /* Keep the original source location on the first 'if'. Set the source
3617 location of the ? on the second 'if'. */
3618 new_locus
= rexpr_location (pred
, locus
);
3619 expr
= build3 (COND_EXPR
, void_type_node
, TREE_OPERAND (pred
, 0),
3620 shortcut_cond_r (TREE_OPERAND (pred
, 1), true_label_p
,
3621 false_label_p
, locus
),
3622 shortcut_cond_r (TREE_OPERAND (pred
, 2), true_label_p
,
3623 false_label_p
, new_locus
));
3627 expr
= build3 (COND_EXPR
, void_type_node
, pred
,
3628 build_and_jump (true_label_p
),
3629 build_and_jump (false_label_p
));
3630 SET_EXPR_LOCATION (expr
, locus
);
3635 t
= build1 (LABEL_EXPR
, void_type_node
, local_label
);
3636 append_to_statement_list (t
, &expr
);
3642 /* If EXPR is a GOTO_EXPR, return it. If it is a STATEMENT_LIST, skip
3643 any of its leading DEBUG_BEGIN_STMTS and recurse on the subsequent
3644 statement, if it is the last one. Otherwise, return NULL. */
3647 find_goto (tree expr
)
3652 if (TREE_CODE (expr
) == GOTO_EXPR
)
3655 if (TREE_CODE (expr
) != STATEMENT_LIST
)
3658 tree_stmt_iterator i
= tsi_start (expr
);
3660 while (!tsi_end_p (i
) && TREE_CODE (tsi_stmt (i
)) == DEBUG_BEGIN_STMT
)
3663 if (!tsi_one_before_end_p (i
))
3666 return find_goto (tsi_stmt (i
));
3669 /* Same as find_goto, except that it returns NULL if the destination
3670 is not a LABEL_DECL. */
3673 find_goto_label (tree expr
)
3675 tree dest
= find_goto (expr
);
3676 if (dest
&& TREE_CODE (GOTO_DESTINATION (dest
)) == LABEL_DECL
)
3681 /* Given a conditional expression EXPR with short-circuit boolean
3682 predicates using TRUTH_ANDIF_EXPR or TRUTH_ORIF_EXPR, break the
3683 predicate apart into the equivalent sequence of conditionals. */
3686 shortcut_cond_expr (tree expr
)
3688 tree pred
= TREE_OPERAND (expr
, 0);
3689 tree then_
= TREE_OPERAND (expr
, 1);
3690 tree else_
= TREE_OPERAND (expr
, 2);
3691 tree true_label
, false_label
, end_label
, t
;
3693 tree
*false_label_p
;
3694 bool emit_end
, emit_false
, jump_over_else
;
3695 bool then_se
= then_
&& TREE_SIDE_EFFECTS (then_
);
3696 bool else_se
= else_
&& TREE_SIDE_EFFECTS (else_
);
3698 /* First do simple transformations. */
3701 /* If there is no 'else', turn
3704 if (a) if (b) then c. */
3705 while (TREE_CODE (pred
) == TRUTH_ANDIF_EXPR
)
3707 /* Keep the original source location on the first 'if'. */
3708 location_t locus
= EXPR_LOC_OR_LOC (expr
, input_location
);
3709 TREE_OPERAND (expr
, 0) = TREE_OPERAND (pred
, 1);
3710 /* Set the source location of the && on the second 'if'. */
3711 if (rexpr_has_location (pred
))
3712 SET_EXPR_LOCATION (expr
, rexpr_location (pred
));
3713 then_
= shortcut_cond_expr (expr
);
3714 then_se
= then_
&& TREE_SIDE_EFFECTS (then_
);
3715 pred
= TREE_OPERAND (pred
, 0);
3716 expr
= build3 (COND_EXPR
, void_type_node
, pred
, then_
, NULL_TREE
);
3717 SET_EXPR_LOCATION (expr
, locus
);
3723 /* If there is no 'then', turn
3726 if (a); else if (b); else d. */
3727 while (TREE_CODE (pred
) == TRUTH_ORIF_EXPR
)
3729 /* Keep the original source location on the first 'if'. */
3730 location_t locus
= EXPR_LOC_OR_LOC (expr
, input_location
);
3731 TREE_OPERAND (expr
, 0) = TREE_OPERAND (pred
, 1);
3732 /* Set the source location of the || on the second 'if'. */
3733 if (rexpr_has_location (pred
))
3734 SET_EXPR_LOCATION (expr
, rexpr_location (pred
));
3735 else_
= shortcut_cond_expr (expr
);
3736 else_se
= else_
&& TREE_SIDE_EFFECTS (else_
);
3737 pred
= TREE_OPERAND (pred
, 0);
3738 expr
= build3 (COND_EXPR
, void_type_node
, pred
, NULL_TREE
, else_
);
3739 SET_EXPR_LOCATION (expr
, locus
);
3743 /* If we're done, great. */
3744 if (TREE_CODE (pred
) != TRUTH_ANDIF_EXPR
3745 && TREE_CODE (pred
) != TRUTH_ORIF_EXPR
)
3748 /* Otherwise we need to mess with gotos. Change
3751 if (a); else goto no;
3754 and recursively gimplify the condition. */
3756 true_label
= false_label
= end_label
= NULL_TREE
;
3758 /* If our arms just jump somewhere, hijack those labels so we don't
3759 generate jumps to jumps. */
3761 if (tree then_goto
= find_goto_label (then_
))
3763 true_label
= GOTO_DESTINATION (then_goto
);
3768 if (tree else_goto
= find_goto_label (else_
))
3770 false_label
= GOTO_DESTINATION (else_goto
);
3775 /* If we aren't hijacking a label for the 'then' branch, it falls through. */
3777 true_label_p
= &true_label
;
3779 true_label_p
= NULL
;
3781 /* The 'else' branch also needs a label if it contains interesting code. */
3782 if (false_label
|| else_se
)
3783 false_label_p
= &false_label
;
3785 false_label_p
= NULL
;
3787 /* If there was nothing else in our arms, just forward the label(s). */
3788 if (!then_se
&& !else_se
)
3789 return shortcut_cond_r (pred
, true_label_p
, false_label_p
,
3790 EXPR_LOC_OR_LOC (expr
, input_location
));
3792 /* If our last subexpression already has a terminal label, reuse it. */
3794 t
= expr_last (else_
);
3796 t
= expr_last (then_
);
3799 if (t
&& TREE_CODE (t
) == LABEL_EXPR
)
3800 end_label
= LABEL_EXPR_LABEL (t
);
3802 /* If we don't care about jumping to the 'else' branch, jump to the end
3803 if the condition is false. */
3805 false_label_p
= &end_label
;
3807 /* We only want to emit these labels if we aren't hijacking them. */
3808 emit_end
= (end_label
== NULL_TREE
);
3809 emit_false
= (false_label
== NULL_TREE
);
3811 /* We only emit the jump over the else clause if we have to--if the
3812 then clause may fall through. Otherwise we can wind up with a
3813 useless jump and a useless label at the end of gimplified code,
3814 which will cause us to think that this conditional as a whole
3815 falls through even if it doesn't. If we then inline a function
3816 which ends with such a condition, that can cause us to issue an
3817 inappropriate warning about control reaching the end of a
3818 non-void function. */
3819 jump_over_else
= block_may_fallthru (then_
);
3821 pred
= shortcut_cond_r (pred
, true_label_p
, false_label_p
,
3822 EXPR_LOC_OR_LOC (expr
, input_location
));
3825 append_to_statement_list (pred
, &expr
);
3827 append_to_statement_list (then_
, &expr
);
3832 tree last
= expr_last (expr
);
3833 t
= build_and_jump (&end_label
);
3834 if (rexpr_has_location (last
))
3835 SET_EXPR_LOCATION (t
, rexpr_location (last
));
3836 append_to_statement_list (t
, &expr
);
3840 t
= build1 (LABEL_EXPR
, void_type_node
, false_label
);
3841 append_to_statement_list (t
, &expr
);
3843 append_to_statement_list (else_
, &expr
);
3845 if (emit_end
&& end_label
)
3847 t
= build1 (LABEL_EXPR
, void_type_node
, end_label
);
3848 append_to_statement_list (t
, &expr
);
3854 /* EXPR is used in a boolean context; make sure it has BOOLEAN_TYPE. */
3857 gimple_boolify (tree expr
)
3859 tree type
= TREE_TYPE (expr
);
3860 location_t loc
= EXPR_LOCATION (expr
);
3862 if (TREE_CODE (expr
) == NE_EXPR
3863 && TREE_CODE (TREE_OPERAND (expr
, 0)) == CALL_EXPR
3864 && integer_zerop (TREE_OPERAND (expr
, 1)))
3866 tree call
= TREE_OPERAND (expr
, 0);
3867 tree fn
= get_callee_fndecl (call
);
3869 /* For __builtin_expect ((long) (x), y) recurse into x as well
3870 if x is truth_value_p. */
3872 && fndecl_built_in_p (fn
, BUILT_IN_EXPECT
)
3873 && call_expr_nargs (call
) == 2)
3875 tree arg
= CALL_EXPR_ARG (call
, 0);
3878 if (TREE_CODE (arg
) == NOP_EXPR
3879 && TREE_TYPE (arg
) == TREE_TYPE (call
))
3880 arg
= TREE_OPERAND (arg
, 0);
3881 if (truth_value_p (TREE_CODE (arg
)))
3883 arg
= gimple_boolify (arg
);
3884 CALL_EXPR_ARG (call
, 0)
3885 = fold_convert_loc (loc
, TREE_TYPE (call
), arg
);
3891 switch (TREE_CODE (expr
))
3893 case TRUTH_AND_EXPR
:
3895 case TRUTH_XOR_EXPR
:
3896 case TRUTH_ANDIF_EXPR
:
3897 case TRUTH_ORIF_EXPR
:
3898 /* Also boolify the arguments of truth exprs. */
3899 TREE_OPERAND (expr
, 1) = gimple_boolify (TREE_OPERAND (expr
, 1));
3902 case TRUTH_NOT_EXPR
:
3903 TREE_OPERAND (expr
, 0) = gimple_boolify (TREE_OPERAND (expr
, 0));
3905 /* These expressions always produce boolean results. */
3906 if (TREE_CODE (type
) != BOOLEAN_TYPE
)
3907 TREE_TYPE (expr
) = boolean_type_node
;
3911 switch ((enum annot_expr_kind
) TREE_INT_CST_LOW (TREE_OPERAND (expr
, 1)))
3913 case annot_expr_ivdep_kind
:
3914 case annot_expr_unroll_kind
:
3915 case annot_expr_no_vector_kind
:
3916 case annot_expr_vector_kind
:
3917 case annot_expr_parallel_kind
:
3918 TREE_OPERAND (expr
, 0) = gimple_boolify (TREE_OPERAND (expr
, 0));
3919 if (TREE_CODE (type
) != BOOLEAN_TYPE
)
3920 TREE_TYPE (expr
) = boolean_type_node
;
3927 if (COMPARISON_CLASS_P (expr
))
3929 /* There expressions always prduce boolean results. */
3930 if (TREE_CODE (type
) != BOOLEAN_TYPE
)
3931 TREE_TYPE (expr
) = boolean_type_node
;
3934 /* Other expressions that get here must have boolean values, but
3935 might need to be converted to the appropriate mode. */
3936 if (TREE_CODE (type
) == BOOLEAN_TYPE
)
3938 return fold_convert_loc (loc
, boolean_type_node
, expr
);
3942 /* Given a conditional expression *EXPR_P without side effects, gimplify
3943 its operands. New statements are inserted to PRE_P. */
3945 static enum gimplify_status
3946 gimplify_pure_cond_expr (tree
*expr_p
, gimple_seq
*pre_p
)
3948 tree expr
= *expr_p
, cond
;
3949 enum gimplify_status ret
, tret
;
3950 enum tree_code code
;
3952 cond
= gimple_boolify (COND_EXPR_COND (expr
));
3954 /* We need to handle && and || specially, as their gimplification
3955 creates pure cond_expr, thus leading to an infinite cycle otherwise. */
3956 code
= TREE_CODE (cond
);
3957 if (code
== TRUTH_ANDIF_EXPR
)
3958 TREE_SET_CODE (cond
, TRUTH_AND_EXPR
);
3959 else if (code
== TRUTH_ORIF_EXPR
)
3960 TREE_SET_CODE (cond
, TRUTH_OR_EXPR
);
3961 ret
= gimplify_expr (&cond
, pre_p
, NULL
, is_gimple_condexpr
, fb_rvalue
);
3962 COND_EXPR_COND (*expr_p
) = cond
;
3964 tret
= gimplify_expr (&COND_EXPR_THEN (expr
), pre_p
, NULL
,
3965 is_gimple_val
, fb_rvalue
);
3966 ret
= MIN (ret
, tret
);
3967 tret
= gimplify_expr (&COND_EXPR_ELSE (expr
), pre_p
, NULL
,
3968 is_gimple_val
, fb_rvalue
);
3970 return MIN (ret
, tret
);
3973 /* Return true if evaluating EXPR could trap.
3974 EXPR is GENERIC, while tree_could_trap_p can be called
3978 generic_expr_could_trap_p (tree expr
)
3982 if (!expr
|| is_gimple_val (expr
))
3985 if (!EXPR_P (expr
) || tree_could_trap_p (expr
))
3988 n
= TREE_OPERAND_LENGTH (expr
);
3989 for (i
= 0; i
< n
; i
++)
3990 if (generic_expr_could_trap_p (TREE_OPERAND (expr
, i
)))
3996 /* Convert the conditional expression pointed to by EXPR_P '(p) ? a : b;'
4005 The second form is used when *EXPR_P is of type void.
4007 PRE_P points to the list where side effects that must happen before
4008 *EXPR_P should be stored. */
4010 static enum gimplify_status
4011 gimplify_cond_expr (tree
*expr_p
, gimple_seq
*pre_p
, fallback_t fallback
)
4013 tree expr
= *expr_p
;
4014 tree type
= TREE_TYPE (expr
);
4015 location_t loc
= EXPR_LOCATION (expr
);
4016 tree tmp
, arm1
, arm2
;
4017 enum gimplify_status ret
;
4018 tree label_true
, label_false
, label_cont
;
4019 bool have_then_clause_p
, have_else_clause_p
;
4021 enum tree_code pred_code
;
4022 gimple_seq seq
= NULL
;
4024 /* If this COND_EXPR has a value, copy the values into a temporary within
4026 if (!VOID_TYPE_P (type
))
4028 tree then_
= TREE_OPERAND (expr
, 1), else_
= TREE_OPERAND (expr
, 2);
4031 /* If either an rvalue is ok or we do not require an lvalue, create the
4032 temporary. But we cannot do that if the type is addressable. */
4033 if (((fallback
& fb_rvalue
) || !(fallback
& fb_lvalue
))
4034 && !TREE_ADDRESSABLE (type
))
4036 if (gimplify_ctxp
->allow_rhs_cond_expr
4037 /* If either branch has side effects or could trap, it can't be
4038 evaluated unconditionally. */
4039 && !TREE_SIDE_EFFECTS (then_
)
4040 && !generic_expr_could_trap_p (then_
)
4041 && !TREE_SIDE_EFFECTS (else_
)
4042 && !generic_expr_could_trap_p (else_
))
4043 return gimplify_pure_cond_expr (expr_p
, pre_p
);
4045 tmp
= create_tmp_var (type
, "iftmp");
4049 /* Otherwise, only create and copy references to the values. */
4052 type
= build_pointer_type (type
);
4054 if (!VOID_TYPE_P (TREE_TYPE (then_
)))
4055 then_
= build_fold_addr_expr_loc (loc
, then_
);
4057 if (!VOID_TYPE_P (TREE_TYPE (else_
)))
4058 else_
= build_fold_addr_expr_loc (loc
, else_
);
4061 = build3 (COND_EXPR
, type
, TREE_OPERAND (expr
, 0), then_
, else_
);
4063 tmp
= create_tmp_var (type
, "iftmp");
4064 result
= build_simple_mem_ref_loc (loc
, tmp
);
4067 /* Build the new then clause, `tmp = then_;'. But don't build the
4068 assignment if the value is void; in C++ it can be if it's a throw. */
4069 if (!VOID_TYPE_P (TREE_TYPE (then_
)))
4070 TREE_OPERAND (expr
, 1) = build2 (INIT_EXPR
, type
, tmp
, then_
);
4072 /* Similarly, build the new else clause, `tmp = else_;'. */
4073 if (!VOID_TYPE_P (TREE_TYPE (else_
)))
4074 TREE_OPERAND (expr
, 2) = build2 (INIT_EXPR
, type
, tmp
, else_
);
4076 TREE_TYPE (expr
) = void_type_node
;
4077 recalculate_side_effects (expr
);
4079 /* Move the COND_EXPR to the prequeue. */
4080 gimplify_stmt (&expr
, pre_p
);
4086 /* Remove any COMPOUND_EXPR so the following cases will be caught. */
4087 STRIP_TYPE_NOPS (TREE_OPERAND (expr
, 0));
4088 if (TREE_CODE (TREE_OPERAND (expr
, 0)) == COMPOUND_EXPR
)
4089 gimplify_compound_expr (&TREE_OPERAND (expr
, 0), pre_p
, true);
4091 /* Make sure the condition has BOOLEAN_TYPE. */
4092 TREE_OPERAND (expr
, 0) = gimple_boolify (TREE_OPERAND (expr
, 0));
4094 /* Break apart && and || conditions. */
4095 if (TREE_CODE (TREE_OPERAND (expr
, 0)) == TRUTH_ANDIF_EXPR
4096 || TREE_CODE (TREE_OPERAND (expr
, 0)) == TRUTH_ORIF_EXPR
)
4098 expr
= shortcut_cond_expr (expr
);
4100 if (expr
!= *expr_p
)
4104 /* We can't rely on gimplify_expr to re-gimplify the expanded
4105 form properly, as cleanups might cause the target labels to be
4106 wrapped in a TRY_FINALLY_EXPR. To prevent that, we need to
4107 set up a conditional context. */
4108 gimple_push_condition ();
4109 gimplify_stmt (expr_p
, &seq
);
4110 gimple_pop_condition (pre_p
);
4111 gimple_seq_add_seq (pre_p
, seq
);
4117 /* Now do the normal gimplification. */
4119 /* Gimplify condition. */
4120 ret
= gimplify_expr (&TREE_OPERAND (expr
, 0), pre_p
, NULL
, is_gimple_condexpr
,
4122 if (ret
== GS_ERROR
)
4124 gcc_assert (TREE_OPERAND (expr
, 0) != NULL_TREE
);
4126 gimple_push_condition ();
4128 have_then_clause_p
= have_else_clause_p
= false;
4129 label_true
= find_goto_label (TREE_OPERAND (expr
, 1));
4131 && DECL_CONTEXT (GOTO_DESTINATION (label_true
)) == current_function_decl
4132 /* For -O0 avoid this optimization if the COND_EXPR and GOTO_EXPR
4133 have different locations, otherwise we end up with incorrect
4134 location information on the branches. */
4136 || !EXPR_HAS_LOCATION (expr
)
4137 || !rexpr_has_location (label_true
)
4138 || EXPR_LOCATION (expr
) == rexpr_location (label_true
)))
4140 have_then_clause_p
= true;
4141 label_true
= GOTO_DESTINATION (label_true
);
4144 label_true
= create_artificial_label (UNKNOWN_LOCATION
);
4145 label_false
= find_goto_label (TREE_OPERAND (expr
, 2));
4147 && DECL_CONTEXT (GOTO_DESTINATION (label_false
)) == current_function_decl
4148 /* For -O0 avoid this optimization if the COND_EXPR and GOTO_EXPR
4149 have different locations, otherwise we end up with incorrect
4150 location information on the branches. */
4152 || !EXPR_HAS_LOCATION (expr
)
4153 || !rexpr_has_location (label_false
)
4154 || EXPR_LOCATION (expr
) == rexpr_location (label_false
)))
4156 have_else_clause_p
= true;
4157 label_false
= GOTO_DESTINATION (label_false
);
4160 label_false
= create_artificial_label (UNKNOWN_LOCATION
);
4162 gimple_cond_get_ops_from_tree (COND_EXPR_COND (expr
), &pred_code
, &arm1
,
4164 cond_stmt
= gimple_build_cond (pred_code
, arm1
, arm2
, label_true
,
4166 gimple_set_no_warning (cond_stmt
, TREE_NO_WARNING (COND_EXPR_COND (expr
)));
4167 gimplify_seq_add_stmt (&seq
, cond_stmt
);
4168 gimple_stmt_iterator gsi
= gsi_last (seq
);
4169 maybe_fold_stmt (&gsi
);
4171 label_cont
= NULL_TREE
;
4172 if (!have_then_clause_p
)
4174 /* For if (...) {} else { code; } put label_true after
4176 if (TREE_OPERAND (expr
, 1) == NULL_TREE
4177 && !have_else_clause_p
4178 && TREE_OPERAND (expr
, 2) != NULL_TREE
)
4179 label_cont
= label_true
;
4182 gimplify_seq_add_stmt (&seq
, gimple_build_label (label_true
));
4183 have_then_clause_p
= gimplify_stmt (&TREE_OPERAND (expr
, 1), &seq
);
4184 /* For if (...) { code; } else {} or
4185 if (...) { code; } else goto label; or
4186 if (...) { code; return; } else { ... }
4187 label_cont isn't needed. */
4188 if (!have_else_clause_p
4189 && TREE_OPERAND (expr
, 2) != NULL_TREE
4190 && gimple_seq_may_fallthru (seq
))
4193 label_cont
= create_artificial_label (UNKNOWN_LOCATION
);
4195 g
= gimple_build_goto (label_cont
);
4197 /* GIMPLE_COND's are very low level; they have embedded
4198 gotos. This particular embedded goto should not be marked
4199 with the location of the original COND_EXPR, as it would
4200 correspond to the COND_EXPR's condition, not the ELSE or the
4201 THEN arms. To avoid marking it with the wrong location, flag
4202 it as "no location". */
4203 gimple_set_do_not_emit_location (g
);
4205 gimplify_seq_add_stmt (&seq
, g
);
4209 if (!have_else_clause_p
)
4211 gimplify_seq_add_stmt (&seq
, gimple_build_label (label_false
));
4212 have_else_clause_p
= gimplify_stmt (&TREE_OPERAND (expr
, 2), &seq
);
4215 gimplify_seq_add_stmt (&seq
, gimple_build_label (label_cont
));
4217 gimple_pop_condition (pre_p
);
4218 gimple_seq_add_seq (pre_p
, seq
);
4220 if (ret
== GS_ERROR
)
4222 else if (have_then_clause_p
|| have_else_clause_p
)
4226 /* Both arms are empty; replace the COND_EXPR with its predicate. */
4227 expr
= TREE_OPERAND (expr
, 0);
4228 gimplify_stmt (&expr
, pre_p
);
4235 /* Prepare the node pointed to by EXPR_P, an is_gimple_addressable expression,
4236 to be marked addressable.
4238 We cannot rely on such an expression being directly markable if a temporary
4239 has been created by the gimplification. In this case, we create another
4240 temporary and initialize it with a copy, which will become a store after we
4241 mark it addressable. This can happen if the front-end passed us something
4242 that it could not mark addressable yet, like a Fortran pass-by-reference
4243 parameter (int) floatvar. */
4246 prepare_gimple_addressable (tree
*expr_p
, gimple_seq
*seq_p
)
4248 while (handled_component_p (*expr_p
))
4249 expr_p
= &TREE_OPERAND (*expr_p
, 0);
4250 if (is_gimple_reg (*expr_p
))
4252 /* Do not allow an SSA name as the temporary. */
4253 tree var
= get_initialized_tmp_var (*expr_p
, seq_p
, NULL
, false);
4254 DECL_GIMPLE_REG_P (var
) = 0;
4259 /* A subroutine of gimplify_modify_expr. Replace a MODIFY_EXPR with
4260 a call to __builtin_memcpy. */
4262 static enum gimplify_status
4263 gimplify_modify_expr_to_memcpy (tree
*expr_p
, tree size
, bool want_value
,
4266 tree t
, to
, to_ptr
, from
, from_ptr
;
4268 location_t loc
= EXPR_LOCATION (*expr_p
);
4270 to
= TREE_OPERAND (*expr_p
, 0);
4271 from
= TREE_OPERAND (*expr_p
, 1);
4273 /* Mark the RHS addressable. Beware that it may not be possible to do so
4274 directly if a temporary has been created by the gimplification. */
4275 prepare_gimple_addressable (&from
, seq_p
);
4277 mark_addressable (from
);
4278 from_ptr
= build_fold_addr_expr_loc (loc
, from
);
4279 gimplify_arg (&from_ptr
, seq_p
, loc
);
4281 mark_addressable (to
);
4282 to_ptr
= build_fold_addr_expr_loc (loc
, to
);
4283 gimplify_arg (&to_ptr
, seq_p
, loc
);
4285 t
= builtin_decl_implicit (BUILT_IN_MEMCPY
);
4287 gs
= gimple_build_call (t
, 3, to_ptr
, from_ptr
, size
);
4291 /* tmp = memcpy() */
4292 t
= create_tmp_var (TREE_TYPE (to_ptr
));
4293 gimple_call_set_lhs (gs
, t
);
4294 gimplify_seq_add_stmt (seq_p
, gs
);
4296 *expr_p
= build_simple_mem_ref (t
);
4300 gimplify_seq_add_stmt (seq_p
, gs
);
4305 /* A subroutine of gimplify_modify_expr. Replace a MODIFY_EXPR with
4306 a call to __builtin_memset. In this case we know that the RHS is
4307 a CONSTRUCTOR with an empty element list. */
4309 static enum gimplify_status
4310 gimplify_modify_expr_to_memset (tree
*expr_p
, tree size
, bool want_value
,
4313 tree t
, from
, to
, to_ptr
;
4315 location_t loc
= EXPR_LOCATION (*expr_p
);
4317 /* Assert our assumptions, to abort instead of producing wrong code
4318 silently if they are not met. Beware that the RHS CONSTRUCTOR might
4319 not be immediately exposed. */
4320 from
= TREE_OPERAND (*expr_p
, 1);
4321 if (TREE_CODE (from
) == WITH_SIZE_EXPR
)
4322 from
= TREE_OPERAND (from
, 0);
4324 gcc_assert (TREE_CODE (from
) == CONSTRUCTOR
4325 && vec_safe_is_empty (CONSTRUCTOR_ELTS (from
)));
4328 to
= TREE_OPERAND (*expr_p
, 0);
4330 to_ptr
= build_fold_addr_expr_loc (loc
, to
);
4331 gimplify_arg (&to_ptr
, seq_p
, loc
);
4332 t
= builtin_decl_implicit (BUILT_IN_MEMSET
);
4334 gs
= gimple_build_call (t
, 3, to_ptr
, integer_zero_node
, size
);
4338 /* tmp = memset() */
4339 t
= create_tmp_var (TREE_TYPE (to_ptr
));
4340 gimple_call_set_lhs (gs
, t
);
4341 gimplify_seq_add_stmt (seq_p
, gs
);
4343 *expr_p
= build1 (INDIRECT_REF
, TREE_TYPE (to
), t
);
4347 gimplify_seq_add_stmt (seq_p
, gs
);
4352 /* A subroutine of gimplify_init_ctor_preeval. Called via walk_tree,
4353 determine, cautiously, if a CONSTRUCTOR overlaps the lhs of an
4354 assignment. Return non-null if we detect a potential overlap. */
4356 struct gimplify_init_ctor_preeval_data
4358 /* The base decl of the lhs object. May be NULL, in which case we
4359 have to assume the lhs is indirect. */
4362 /* The alias set of the lhs object. */
4363 alias_set_type lhs_alias_set
;
4367 gimplify_init_ctor_preeval_1 (tree
*tp
, int *walk_subtrees
, void *xdata
)
4369 struct gimplify_init_ctor_preeval_data
*data
4370 = (struct gimplify_init_ctor_preeval_data
*) xdata
;
4373 /* If we find the base object, obviously we have overlap. */
4374 if (data
->lhs_base_decl
== t
)
4377 /* If the constructor component is indirect, determine if we have a
4378 potential overlap with the lhs. The only bits of information we
4379 have to go on at this point are addressability and alias sets. */
4380 if ((INDIRECT_REF_P (t
)
4381 || TREE_CODE (t
) == MEM_REF
)
4382 && (!data
->lhs_base_decl
|| TREE_ADDRESSABLE (data
->lhs_base_decl
))
4383 && alias_sets_conflict_p (data
->lhs_alias_set
, get_alias_set (t
)))
4386 /* If the constructor component is a call, determine if it can hide a
4387 potential overlap with the lhs through an INDIRECT_REF like above.
4388 ??? Ugh - this is completely broken. In fact this whole analysis
4389 doesn't look conservative. */
4390 if (TREE_CODE (t
) == CALL_EXPR
)
4392 tree type
, fntype
= TREE_TYPE (TREE_TYPE (CALL_EXPR_FN (t
)));
4394 for (type
= TYPE_ARG_TYPES (fntype
); type
; type
= TREE_CHAIN (type
))
4395 if (POINTER_TYPE_P (TREE_VALUE (type
))
4396 && (!data
->lhs_base_decl
|| TREE_ADDRESSABLE (data
->lhs_base_decl
))
4397 && alias_sets_conflict_p (data
->lhs_alias_set
,
4399 (TREE_TYPE (TREE_VALUE (type
)))))
4403 if (IS_TYPE_OR_DECL_P (t
))
4408 /* A subroutine of gimplify_init_constructor. Pre-evaluate EXPR,
4409 force values that overlap with the lhs (as described by *DATA)
4410 into temporaries. */
4413 gimplify_init_ctor_preeval (tree
*expr_p
, gimple_seq
*pre_p
, gimple_seq
*post_p
,
4414 struct gimplify_init_ctor_preeval_data
*data
)
4416 enum gimplify_status one
;
4418 /* If the value is constant, then there's nothing to pre-evaluate. */
4419 if (TREE_CONSTANT (*expr_p
))
4421 /* Ensure it does not have side effects, it might contain a reference to
4422 the object we're initializing. */
4423 gcc_assert (!TREE_SIDE_EFFECTS (*expr_p
));
4427 /* If the type has non-trivial constructors, we can't pre-evaluate. */
4428 if (TREE_ADDRESSABLE (TREE_TYPE (*expr_p
)))
4431 /* Recurse for nested constructors. */
4432 if (TREE_CODE (*expr_p
) == CONSTRUCTOR
)
4434 unsigned HOST_WIDE_INT ix
;
4435 constructor_elt
*ce
;
4436 vec
<constructor_elt
, va_gc
> *v
= CONSTRUCTOR_ELTS (*expr_p
);
4438 FOR_EACH_VEC_SAFE_ELT (v
, ix
, ce
)
4439 gimplify_init_ctor_preeval (&ce
->value
, pre_p
, post_p
, data
);
4444 /* If this is a variable sized type, we must remember the size. */
4445 maybe_with_size_expr (expr_p
);
4447 /* Gimplify the constructor element to something appropriate for the rhs
4448 of a MODIFY_EXPR. Given that we know the LHS is an aggregate, we know
4449 the gimplifier will consider this a store to memory. Doing this
4450 gimplification now means that we won't have to deal with complicated
4451 language-specific trees, nor trees like SAVE_EXPR that can induce
4452 exponential search behavior. */
4453 one
= gimplify_expr (expr_p
, pre_p
, post_p
, is_gimple_mem_rhs
, fb_rvalue
);
4454 if (one
== GS_ERROR
)
4460 /* If we gimplified to a bare decl, we can be sure that it doesn't overlap
4461 with the lhs, since "a = { .x=a }" doesn't make sense. This will
4462 always be true for all scalars, since is_gimple_mem_rhs insists on a
4463 temporary variable for them. */
4464 if (DECL_P (*expr_p
))
4467 /* If this is of variable size, we have no choice but to assume it doesn't
4468 overlap since we can't make a temporary for it. */
4469 if (TREE_CODE (TYPE_SIZE (TREE_TYPE (*expr_p
))) != INTEGER_CST
)
4472 /* Otherwise, we must search for overlap ... */
4473 if (!walk_tree (expr_p
, gimplify_init_ctor_preeval_1
, data
, NULL
))
4476 /* ... and if found, force the value into a temporary. */
4477 *expr_p
= get_formal_tmp_var (*expr_p
, pre_p
);
4480 /* A subroutine of gimplify_init_ctor_eval. Create a loop for
4481 a RANGE_EXPR in a CONSTRUCTOR for an array.
4485 object[var] = value;
4492 We increment var _after_ the loop exit check because we might otherwise
4493 fail if upper == TYPE_MAX_VALUE (type for upper).
4495 Note that we never have to deal with SAVE_EXPRs here, because this has
4496 already been taken care of for us, in gimplify_init_ctor_preeval(). */
4498 static void gimplify_init_ctor_eval (tree
, vec
<constructor_elt
, va_gc
> *,
4499 gimple_seq
*, bool);
4502 gimplify_init_ctor_eval_range (tree object
, tree lower
, tree upper
,
4503 tree value
, tree array_elt_type
,
4504 gimple_seq
*pre_p
, bool cleared
)
4506 tree loop_entry_label
, loop_exit_label
, fall_thru_label
;
4507 tree var
, var_type
, cref
, tmp
;
4509 loop_entry_label
= create_artificial_label (UNKNOWN_LOCATION
);
4510 loop_exit_label
= create_artificial_label (UNKNOWN_LOCATION
);
4511 fall_thru_label
= create_artificial_label (UNKNOWN_LOCATION
);
4513 /* Create and initialize the index variable. */
4514 var_type
= TREE_TYPE (upper
);
4515 var
= create_tmp_var (var_type
);
4516 gimplify_seq_add_stmt (pre_p
, gimple_build_assign (var
, lower
));
4518 /* Add the loop entry label. */
4519 gimplify_seq_add_stmt (pre_p
, gimple_build_label (loop_entry_label
));
4521 /* Build the reference. */
4522 cref
= build4 (ARRAY_REF
, array_elt_type
, unshare_expr (object
),
4523 var
, NULL_TREE
, NULL_TREE
);
4525 /* If we are a constructor, just call gimplify_init_ctor_eval to do
4526 the store. Otherwise just assign value to the reference. */
4528 if (TREE_CODE (value
) == CONSTRUCTOR
)
4529 /* NB we might have to call ourself recursively through
4530 gimplify_init_ctor_eval if the value is a constructor. */
4531 gimplify_init_ctor_eval (cref
, CONSTRUCTOR_ELTS (value
),
4534 gimplify_seq_add_stmt (pre_p
, gimple_build_assign (cref
, value
));
4536 /* We exit the loop when the index var is equal to the upper bound. */
4537 gimplify_seq_add_stmt (pre_p
,
4538 gimple_build_cond (EQ_EXPR
, var
, upper
,
4539 loop_exit_label
, fall_thru_label
));
4541 gimplify_seq_add_stmt (pre_p
, gimple_build_label (fall_thru_label
));
4543 /* Otherwise, increment the index var... */
4544 tmp
= build2 (PLUS_EXPR
, var_type
, var
,
4545 fold_convert (var_type
, integer_one_node
));
4546 gimplify_seq_add_stmt (pre_p
, gimple_build_assign (var
, tmp
));
4548 /* ...and jump back to the loop entry. */
4549 gimplify_seq_add_stmt (pre_p
, gimple_build_goto (loop_entry_label
));
4551 /* Add the loop exit label. */
4552 gimplify_seq_add_stmt (pre_p
, gimple_build_label (loop_exit_label
));
4555 /* Return true if FDECL is accessing a field that is zero sized. */
4558 zero_sized_field_decl (const_tree fdecl
)
4560 if (TREE_CODE (fdecl
) == FIELD_DECL
&& DECL_SIZE (fdecl
)
4561 && integer_zerop (DECL_SIZE (fdecl
)))
4566 /* Return true if TYPE is zero sized. */
4569 zero_sized_type (const_tree type
)
4571 if (AGGREGATE_TYPE_P (type
) && TYPE_SIZE (type
)
4572 && integer_zerop (TYPE_SIZE (type
)))
4577 /* A subroutine of gimplify_init_constructor. Generate individual
4578 MODIFY_EXPRs for a CONSTRUCTOR. OBJECT is the LHS against which the
4579 assignments should happen. ELTS is the CONSTRUCTOR_ELTS of the
4580 CONSTRUCTOR. CLEARED is true if the entire LHS object has been
4584 gimplify_init_ctor_eval (tree object
, vec
<constructor_elt
, va_gc
> *elts
,
4585 gimple_seq
*pre_p
, bool cleared
)
4587 tree array_elt_type
= NULL
;
4588 unsigned HOST_WIDE_INT ix
;
4589 tree purpose
, value
;
4591 if (TREE_CODE (TREE_TYPE (object
)) == ARRAY_TYPE
)
4592 array_elt_type
= TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (object
)));
4594 FOR_EACH_CONSTRUCTOR_ELT (elts
, ix
, purpose
, value
)
4598 /* NULL values are created above for gimplification errors. */
4602 if (cleared
&& initializer_zerop (value
))
4605 /* ??? Here's to hoping the front end fills in all of the indices,
4606 so we don't have to figure out what's missing ourselves. */
4607 gcc_assert (purpose
);
4609 /* Skip zero-sized fields, unless value has side-effects. This can
4610 happen with calls to functions returning a zero-sized type, which
4611 we shouldn't discard. As a number of downstream passes don't
4612 expect sets of zero-sized fields, we rely on the gimplification of
4613 the MODIFY_EXPR we make below to drop the assignment statement. */
4614 if (! TREE_SIDE_EFFECTS (value
) && zero_sized_field_decl (purpose
))
4617 /* If we have a RANGE_EXPR, we have to build a loop to assign the
4619 if (TREE_CODE (purpose
) == RANGE_EXPR
)
4621 tree lower
= TREE_OPERAND (purpose
, 0);
4622 tree upper
= TREE_OPERAND (purpose
, 1);
4624 /* If the lower bound is equal to upper, just treat it as if
4625 upper was the index. */
4626 if (simple_cst_equal (lower
, upper
))
4630 gimplify_init_ctor_eval_range (object
, lower
, upper
, value
,
4631 array_elt_type
, pre_p
, cleared
);
4638 /* Do not use bitsizetype for ARRAY_REF indices. */
4639 if (TYPE_DOMAIN (TREE_TYPE (object
)))
4641 = fold_convert (TREE_TYPE (TYPE_DOMAIN (TREE_TYPE (object
))),
4643 cref
= build4 (ARRAY_REF
, array_elt_type
, unshare_expr (object
),
4644 purpose
, NULL_TREE
, NULL_TREE
);
4648 gcc_assert (TREE_CODE (purpose
) == FIELD_DECL
);
4649 cref
= build3 (COMPONENT_REF
, TREE_TYPE (purpose
),
4650 unshare_expr (object
), purpose
, NULL_TREE
);
4653 if (TREE_CODE (value
) == CONSTRUCTOR
4654 && TREE_CODE (TREE_TYPE (value
)) != VECTOR_TYPE
)
4655 gimplify_init_ctor_eval (cref
, CONSTRUCTOR_ELTS (value
),
4659 tree init
= build2 (INIT_EXPR
, TREE_TYPE (cref
), cref
, value
);
4660 gimplify_and_add (init
, pre_p
);
4666 /* Return the appropriate RHS predicate for this LHS. */
4669 rhs_predicate_for (tree lhs
)
4671 if (is_gimple_reg (lhs
))
4672 return is_gimple_reg_rhs_or_call
;
4674 return is_gimple_mem_rhs_or_call
;
4677 /* Return the initial guess for an appropriate RHS predicate for this LHS,
4678 before the LHS has been gimplified. */
4680 static gimple_predicate
4681 initial_rhs_predicate_for (tree lhs
)
4683 if (is_gimple_reg_type (TREE_TYPE (lhs
)))
4684 return is_gimple_reg_rhs_or_call
;
4686 return is_gimple_mem_rhs_or_call
;
4689 /* Gimplify a C99 compound literal expression. This just means adding
4690 the DECL_EXPR before the current statement and using its anonymous
4693 static enum gimplify_status
4694 gimplify_compound_literal_expr (tree
*expr_p
, gimple_seq
*pre_p
,
4695 bool (*gimple_test_f
) (tree
),
4696 fallback_t fallback
)
4698 tree decl_s
= COMPOUND_LITERAL_EXPR_DECL_EXPR (*expr_p
);
4699 tree decl
= DECL_EXPR_DECL (decl_s
);
4700 tree init
= DECL_INITIAL (decl
);
4701 /* Mark the decl as addressable if the compound literal
4702 expression is addressable now, otherwise it is marked too late
4703 after we gimplify the initialization expression. */
4704 if (TREE_ADDRESSABLE (*expr_p
))
4705 TREE_ADDRESSABLE (decl
) = 1;
4706 /* Otherwise, if we don't need an lvalue and have a literal directly
4707 substitute it. Check if it matches the gimple predicate, as
4708 otherwise we'd generate a new temporary, and we can as well just
4709 use the decl we already have. */
4710 else if (!TREE_ADDRESSABLE (decl
)
4711 && !TREE_THIS_VOLATILE (decl
)
4713 && (fallback
& fb_lvalue
) == 0
4714 && gimple_test_f (init
))
4720 /* Preliminarily mark non-addressed complex variables as eligible
4721 for promotion to gimple registers. We'll transform their uses
4723 if ((TREE_CODE (TREE_TYPE (decl
)) == COMPLEX_TYPE
4724 || TREE_CODE (TREE_TYPE (decl
)) == VECTOR_TYPE
)
4725 && !TREE_THIS_VOLATILE (decl
)
4726 && !needs_to_live_in_memory (decl
))
4727 DECL_GIMPLE_REG_P (decl
) = 1;
4729 /* If the decl is not addressable, then it is being used in some
4730 expression or on the right hand side of a statement, and it can
4731 be put into a readonly data section. */
4732 if (!TREE_ADDRESSABLE (decl
) && (fallback
& fb_lvalue
) == 0)
4733 TREE_READONLY (decl
) = 1;
4735 /* This decl isn't mentioned in the enclosing block, so add it to the
4736 list of temps. FIXME it seems a bit of a kludge to say that
4737 anonymous artificial vars aren't pushed, but everything else is. */
4738 if (DECL_NAME (decl
) == NULL_TREE
&& !DECL_SEEN_IN_BIND_EXPR_P (decl
))
4739 gimple_add_tmp_var (decl
);
4741 gimplify_and_add (decl_s
, pre_p
);
4746 /* Optimize embedded COMPOUND_LITERAL_EXPRs within a CONSTRUCTOR,
4747 return a new CONSTRUCTOR if something changed. */
4750 optimize_compound_literals_in_ctor (tree orig_ctor
)
4752 tree ctor
= orig_ctor
;
4753 vec
<constructor_elt
, va_gc
> *elts
= CONSTRUCTOR_ELTS (ctor
);
4754 unsigned int idx
, num
= vec_safe_length (elts
);
4756 for (idx
= 0; idx
< num
; idx
++)
4758 tree value
= (*elts
)[idx
].value
;
4759 tree newval
= value
;
4760 if (TREE_CODE (value
) == CONSTRUCTOR
)
4761 newval
= optimize_compound_literals_in_ctor (value
);
4762 else if (TREE_CODE (value
) == COMPOUND_LITERAL_EXPR
)
4764 tree decl_s
= COMPOUND_LITERAL_EXPR_DECL_EXPR (value
);
4765 tree decl
= DECL_EXPR_DECL (decl_s
);
4766 tree init
= DECL_INITIAL (decl
);
4768 if (!TREE_ADDRESSABLE (value
)
4769 && !TREE_ADDRESSABLE (decl
)
4771 && TREE_CODE (init
) == CONSTRUCTOR
)
4772 newval
= optimize_compound_literals_in_ctor (init
);
4774 if (newval
== value
)
4777 if (ctor
== orig_ctor
)
4779 ctor
= copy_node (orig_ctor
);
4780 CONSTRUCTOR_ELTS (ctor
) = vec_safe_copy (elts
);
4781 elts
= CONSTRUCTOR_ELTS (ctor
);
4783 (*elts
)[idx
].value
= newval
;
4788 /* A subroutine of gimplify_modify_expr. Break out elements of a
4789 CONSTRUCTOR used as an initializer into separate MODIFY_EXPRs.
4791 Note that we still need to clear any elements that don't have explicit
4792 initializers, so if not all elements are initialized we keep the
4793 original MODIFY_EXPR, we just remove all of the constructor elements.
4795 If NOTIFY_TEMP_CREATION is true, do not gimplify, just return
4796 GS_ERROR if we would have to create a temporary when gimplifying
4797 this constructor. Otherwise, return GS_OK.
4799 If NOTIFY_TEMP_CREATION is false, just do the gimplification. */
4801 static enum gimplify_status
4802 gimplify_init_constructor (tree
*expr_p
, gimple_seq
*pre_p
, gimple_seq
*post_p
,
4803 bool want_value
, bool notify_temp_creation
)
4805 tree object
, ctor
, type
;
4806 enum gimplify_status ret
;
4807 vec
<constructor_elt
, va_gc
> *elts
;
4809 gcc_assert (TREE_CODE (TREE_OPERAND (*expr_p
, 1)) == CONSTRUCTOR
);
4811 if (!notify_temp_creation
)
4813 ret
= gimplify_expr (&TREE_OPERAND (*expr_p
, 0), pre_p
, post_p
,
4814 is_gimple_lvalue
, fb_lvalue
);
4815 if (ret
== GS_ERROR
)
4819 object
= TREE_OPERAND (*expr_p
, 0);
4820 ctor
= TREE_OPERAND (*expr_p
, 1)
4821 = optimize_compound_literals_in_ctor (TREE_OPERAND (*expr_p
, 1));
4822 type
= TREE_TYPE (ctor
);
4823 elts
= CONSTRUCTOR_ELTS (ctor
);
4826 switch (TREE_CODE (type
))
4830 case QUAL_UNION_TYPE
:
4833 struct gimplify_init_ctor_preeval_data preeval_data
;
4834 HOST_WIDE_INT num_ctor_elements
, num_nonzero_elements
;
4835 HOST_WIDE_INT num_unique_nonzero_elements
;
4836 bool cleared
, complete_p
, valid_const_initializer
;
4837 /* Use readonly data for initializers of this or smaller size
4838 regardless of the num_nonzero_elements / num_unique_nonzero_elements
4840 const HOST_WIDE_INT min_unique_size
= 64;
4841 /* If num_nonzero_elements / num_unique_nonzero_elements ratio
4842 is smaller than this, use readonly data. */
4843 const int unique_nonzero_ratio
= 8;
4845 /* Aggregate types must lower constructors to initialization of
4846 individual elements. The exception is that a CONSTRUCTOR node
4847 with no elements indicates zero-initialization of the whole. */
4848 if (vec_safe_is_empty (elts
))
4850 if (notify_temp_creation
)
4855 /* Fetch information about the constructor to direct later processing.
4856 We might want to make static versions of it in various cases, and
4857 can only do so if it known to be a valid constant initializer. */
4858 valid_const_initializer
4859 = categorize_ctor_elements (ctor
, &num_nonzero_elements
,
4860 &num_unique_nonzero_elements
,
4861 &num_ctor_elements
, &complete_p
);
4863 /* If a const aggregate variable is being initialized, then it
4864 should never be a lose to promote the variable to be static. */
4865 if (valid_const_initializer
4866 && num_nonzero_elements
> 1
4867 && TREE_READONLY (object
)
4869 && (flag_merge_constants
>= 2 || !TREE_ADDRESSABLE (object
))
4870 /* For ctors that have many repeated nonzero elements
4871 represented through RANGE_EXPRs, prefer initializing
4872 those through runtime loops over copies of large amounts
4873 of data from readonly data section. */
4874 && (num_unique_nonzero_elements
4875 > num_nonzero_elements
/ unique_nonzero_ratio
4876 || ((unsigned HOST_WIDE_INT
) int_size_in_bytes (type
)
4877 <= (unsigned HOST_WIDE_INT
) min_unique_size
)))
4879 if (notify_temp_creation
)
4881 DECL_INITIAL (object
) = ctor
;
4882 TREE_STATIC (object
) = 1;
4883 if (!DECL_NAME (object
))
4884 DECL_NAME (object
) = create_tmp_var_name ("C");
4885 walk_tree (&DECL_INITIAL (object
), force_labels_r
, NULL
, NULL
);
4887 /* ??? C++ doesn't automatically append a .<number> to the
4888 assembler name, and even when it does, it looks at FE private
4889 data structures to figure out what that number should be,
4890 which are not set for this variable. I suppose this is
4891 important for local statics for inline functions, which aren't
4892 "local" in the object file sense. So in order to get a unique
4893 TU-local symbol, we must invoke the lhd version now. */
4894 lhd_set_decl_assembler_name (object
);
4896 *expr_p
= NULL_TREE
;
4900 /* If there are "lots" of initialized elements, even discounting
4901 those that are not address constants (and thus *must* be
4902 computed at runtime), then partition the constructor into
4903 constant and non-constant parts. Block copy the constant
4904 parts in, then generate code for the non-constant parts. */
4905 /* TODO. There's code in cp/typeck.c to do this. */
4907 if (int_size_in_bytes (TREE_TYPE (ctor
)) < 0)
4908 /* store_constructor will ignore the clearing of variable-sized
4909 objects. Initializers for such objects must explicitly set
4910 every field that needs to be set. */
4912 else if (!complete_p
)
4913 /* If the constructor isn't complete, clear the whole object
4914 beforehand, unless CONSTRUCTOR_NO_CLEARING is set on it.
4916 ??? This ought not to be needed. For any element not present
4917 in the initializer, we should simply set them to zero. Except
4918 we'd need to *find* the elements that are not present, and that
4919 requires trickery to avoid quadratic compile-time behavior in
4920 large cases or excessive memory use in small cases. */
4921 cleared
= !CONSTRUCTOR_NO_CLEARING (ctor
);
4922 else if (num_ctor_elements
- num_nonzero_elements
4923 > CLEAR_RATIO (optimize_function_for_speed_p (cfun
))
4924 && num_nonzero_elements
< num_ctor_elements
/ 4)
4925 /* If there are "lots" of zeros, it's more efficient to clear
4926 the memory and then set the nonzero elements. */
4931 /* If there are "lots" of initialized elements, and all of them
4932 are valid address constants, then the entire initializer can
4933 be dropped to memory, and then memcpy'd out. Don't do this
4934 for sparse arrays, though, as it's more efficient to follow
4935 the standard CONSTRUCTOR behavior of memset followed by
4936 individual element initialization. Also don't do this for small
4937 all-zero initializers (which aren't big enough to merit
4938 clearing), and don't try to make bitwise copies of
4939 TREE_ADDRESSABLE types. */
4941 if (valid_const_initializer
4942 && !(cleared
|| num_nonzero_elements
== 0)
4943 && !TREE_ADDRESSABLE (type
))
4945 HOST_WIDE_INT size
= int_size_in_bytes (type
);
4948 /* ??? We can still get unbounded array types, at least
4949 from the C++ front end. This seems wrong, but attempt
4950 to work around it for now. */
4953 size
= int_size_in_bytes (TREE_TYPE (object
));
4955 TREE_TYPE (ctor
) = type
= TREE_TYPE (object
);
4958 /* Find the maximum alignment we can assume for the object. */
4959 /* ??? Make use of DECL_OFFSET_ALIGN. */
4960 if (DECL_P (object
))
4961 align
= DECL_ALIGN (object
);
4963 align
= TYPE_ALIGN (type
);
4965 /* Do a block move either if the size is so small as to make
4966 each individual move a sub-unit move on average, or if it
4967 is so large as to make individual moves inefficient. */
4969 && num_nonzero_elements
> 1
4970 /* For ctors that have many repeated nonzero elements
4971 represented through RANGE_EXPRs, prefer initializing
4972 those through runtime loops over copies of large amounts
4973 of data from readonly data section. */
4974 && (num_unique_nonzero_elements
4975 > num_nonzero_elements
/ unique_nonzero_ratio
4976 || size
<= min_unique_size
)
4977 && (size
< num_nonzero_elements
4978 || !can_move_by_pieces (size
, align
)))
4980 if (notify_temp_creation
)
4983 walk_tree (&ctor
, force_labels_r
, NULL
, NULL
);
4984 ctor
= tree_output_constant_def (ctor
);
4985 if (!useless_type_conversion_p (type
, TREE_TYPE (ctor
)))
4986 ctor
= build1 (VIEW_CONVERT_EXPR
, type
, ctor
);
4987 TREE_OPERAND (*expr_p
, 1) = ctor
;
4989 /* This is no longer an assignment of a CONSTRUCTOR, but
4990 we still may have processing to do on the LHS. So
4991 pretend we didn't do anything here to let that happen. */
4992 return GS_UNHANDLED
;
4996 /* If the target is volatile, we have non-zero elements and more than
4997 one field to assign, initialize the target from a temporary. */
4998 if (TREE_THIS_VOLATILE (object
)
4999 && !TREE_ADDRESSABLE (type
)
5000 && num_nonzero_elements
> 0
5001 && vec_safe_length (elts
) > 1)
5003 tree temp
= create_tmp_var (TYPE_MAIN_VARIANT (type
));
5004 TREE_OPERAND (*expr_p
, 0) = temp
;
5005 *expr_p
= build2 (COMPOUND_EXPR
, TREE_TYPE (*expr_p
),
5007 build2 (MODIFY_EXPR
, void_type_node
,
5012 if (notify_temp_creation
)
5015 /* If there are nonzero elements and if needed, pre-evaluate to capture
5016 elements overlapping with the lhs into temporaries. We must do this
5017 before clearing to fetch the values before they are zeroed-out. */
5018 if (num_nonzero_elements
> 0 && TREE_CODE (*expr_p
) != INIT_EXPR
)
5020 preeval_data
.lhs_base_decl
= get_base_address (object
);
5021 if (!DECL_P (preeval_data
.lhs_base_decl
))
5022 preeval_data
.lhs_base_decl
= NULL
;
5023 preeval_data
.lhs_alias_set
= get_alias_set (object
);
5025 gimplify_init_ctor_preeval (&TREE_OPERAND (*expr_p
, 1),
5026 pre_p
, post_p
, &preeval_data
);
5029 bool ctor_has_side_effects_p
5030 = TREE_SIDE_EFFECTS (TREE_OPERAND (*expr_p
, 1));
5034 /* Zap the CONSTRUCTOR element list, which simplifies this case.
5035 Note that we still have to gimplify, in order to handle the
5036 case of variable sized types. Avoid shared tree structures. */
5037 CONSTRUCTOR_ELTS (ctor
) = NULL
;
5038 TREE_SIDE_EFFECTS (ctor
) = 0;
5039 object
= unshare_expr (object
);
5040 gimplify_stmt (expr_p
, pre_p
);
5043 /* If we have not block cleared the object, or if there are nonzero
5044 elements in the constructor, or if the constructor has side effects,
5045 add assignments to the individual scalar fields of the object. */
5047 || num_nonzero_elements
> 0
5048 || ctor_has_side_effects_p
)
5049 gimplify_init_ctor_eval (object
, elts
, pre_p
, cleared
);
5051 *expr_p
= NULL_TREE
;
5059 if (notify_temp_creation
)
5062 /* Extract the real and imaginary parts out of the ctor. */
5063 gcc_assert (elts
->length () == 2);
5064 r
= (*elts
)[0].value
;
5065 i
= (*elts
)[1].value
;
5066 if (r
== NULL
|| i
== NULL
)
5068 tree zero
= build_zero_cst (TREE_TYPE (type
));
5075 /* Complex types have either COMPLEX_CST or COMPLEX_EXPR to
5076 represent creation of a complex value. */
5077 if (TREE_CONSTANT (r
) && TREE_CONSTANT (i
))
5079 ctor
= build_complex (type
, r
, i
);
5080 TREE_OPERAND (*expr_p
, 1) = ctor
;
5084 ctor
= build2 (COMPLEX_EXPR
, type
, r
, i
);
5085 TREE_OPERAND (*expr_p
, 1) = ctor
;
5086 ret
= gimplify_expr (&TREE_OPERAND (*expr_p
, 1),
5089 rhs_predicate_for (TREE_OPERAND (*expr_p
, 0)),
5097 unsigned HOST_WIDE_INT ix
;
5098 constructor_elt
*ce
;
5100 if (notify_temp_creation
)
5103 /* Go ahead and simplify constant constructors to VECTOR_CST. */
5104 if (TREE_CONSTANT (ctor
))
5106 bool constant_p
= true;
5109 /* Even when ctor is constant, it might contain non-*_CST
5110 elements, such as addresses or trapping values like
5111 1.0/0.0 - 1.0/0.0. Such expressions don't belong
5112 in VECTOR_CST nodes. */
5113 FOR_EACH_CONSTRUCTOR_VALUE (elts
, ix
, value
)
5114 if (!CONSTANT_CLASS_P (value
))
5122 TREE_OPERAND (*expr_p
, 1) = build_vector_from_ctor (type
, elts
);
5126 TREE_CONSTANT (ctor
) = 0;
5129 /* Vector types use CONSTRUCTOR all the way through gimple
5130 compilation as a general initializer. */
5131 FOR_EACH_VEC_SAFE_ELT (elts
, ix
, ce
)
5133 enum gimplify_status tret
;
5134 tret
= gimplify_expr (&ce
->value
, pre_p
, post_p
, is_gimple_val
,
5136 if (tret
== GS_ERROR
)
5138 else if (TREE_STATIC (ctor
)
5139 && !initializer_constant_valid_p (ce
->value
,
5140 TREE_TYPE (ce
->value
)))
5141 TREE_STATIC (ctor
) = 0;
5143 if (!is_gimple_reg (TREE_OPERAND (*expr_p
, 0)))
5144 TREE_OPERAND (*expr_p
, 1) = get_formal_tmp_var (ctor
, pre_p
);
5149 /* So how did we get a CONSTRUCTOR for a scalar type? */
5153 if (ret
== GS_ERROR
)
5155 /* If we have gimplified both sides of the initializer but have
5156 not emitted an assignment, do so now. */
5159 tree lhs
= TREE_OPERAND (*expr_p
, 0);
5160 tree rhs
= TREE_OPERAND (*expr_p
, 1);
5161 if (want_value
&& object
== lhs
)
5162 lhs
= unshare_expr (lhs
);
5163 gassign
*init
= gimple_build_assign (lhs
, rhs
);
5164 gimplify_seq_add_stmt (pre_p
, init
);
5178 /* Given a pointer value OP0, return a simplified version of an
5179 indirection through OP0, or NULL_TREE if no simplification is
5180 possible. This may only be applied to a rhs of an expression.
5181 Note that the resulting type may be different from the type pointed
5182 to in the sense that it is still compatible from the langhooks
5186 gimple_fold_indirect_ref_rhs (tree t
)
5188 return gimple_fold_indirect_ref (t
);
5191 /* Subroutine of gimplify_modify_expr to do simplifications of
5192 MODIFY_EXPRs based on the code of the RHS. We loop for as long as
5193 something changes. */
5195 static enum gimplify_status
5196 gimplify_modify_expr_rhs (tree
*expr_p
, tree
*from_p
, tree
*to_p
,
5197 gimple_seq
*pre_p
, gimple_seq
*post_p
,
5200 enum gimplify_status ret
= GS_UNHANDLED
;
5206 switch (TREE_CODE (*from_p
))
5209 /* If we're assigning from a read-only variable initialized with
5210 a constructor, do the direct assignment from the constructor,
5211 but only if neither source nor target are volatile since this
5212 latter assignment might end up being done on a per-field basis. */
5213 if (DECL_INITIAL (*from_p
)
5214 && TREE_READONLY (*from_p
)
5215 && !TREE_THIS_VOLATILE (*from_p
)
5216 && !TREE_THIS_VOLATILE (*to_p
)
5217 && TREE_CODE (DECL_INITIAL (*from_p
)) == CONSTRUCTOR
)
5219 tree old_from
= *from_p
;
5220 enum gimplify_status subret
;
5222 /* Move the constructor into the RHS. */
5223 *from_p
= unshare_expr (DECL_INITIAL (*from_p
));
5225 /* Let's see if gimplify_init_constructor will need to put
5227 subret
= gimplify_init_constructor (expr_p
, NULL
, NULL
,
5229 if (subret
== GS_ERROR
)
5231 /* If so, revert the change. */
5243 /* If we have code like
5247 where the type of "x" is a (possibly cv-qualified variant
5248 of "A"), treat the entire expression as identical to "x".
5249 This kind of code arises in C++ when an object is bound
5250 to a const reference, and if "x" is a TARGET_EXPR we want
5251 to take advantage of the optimization below. */
5252 bool volatile_p
= TREE_THIS_VOLATILE (*from_p
);
5253 tree t
= gimple_fold_indirect_ref_rhs (TREE_OPERAND (*from_p
, 0));
5256 if (TREE_THIS_VOLATILE (t
) != volatile_p
)
5259 t
= build_simple_mem_ref_loc (EXPR_LOCATION (*from_p
),
5260 build_fold_addr_expr (t
));
5261 if (REFERENCE_CLASS_P (t
))
5262 TREE_THIS_VOLATILE (t
) = volatile_p
;
5273 /* If we are initializing something from a TARGET_EXPR, strip the
5274 TARGET_EXPR and initialize it directly, if possible. This can't
5275 be done if the initializer is void, since that implies that the
5276 temporary is set in some non-trivial way.
5278 ??? What about code that pulls out the temp and uses it
5279 elsewhere? I think that such code never uses the TARGET_EXPR as
5280 an initializer. If I'm wrong, we'll die because the temp won't
5281 have any RTL. In that case, I guess we'll need to replace
5282 references somehow. */
5283 tree init
= TARGET_EXPR_INITIAL (*from_p
);
5286 && (TREE_CODE (*expr_p
) != MODIFY_EXPR
5287 || !TARGET_EXPR_NO_ELIDE (*from_p
))
5288 && !VOID_TYPE_P (TREE_TYPE (init
)))
5298 /* Remove any COMPOUND_EXPR in the RHS so the following cases will be
5300 gimplify_compound_expr (from_p
, pre_p
, true);
5306 /* If we already made some changes, let the front end have a
5307 crack at this before we break it down. */
5308 if (ret
!= GS_UNHANDLED
)
5310 /* If we're initializing from a CONSTRUCTOR, break this into
5311 individual MODIFY_EXPRs. */
5312 return gimplify_init_constructor (expr_p
, pre_p
, post_p
, want_value
,
5316 /* If we're assigning to a non-register type, push the assignment
5317 down into the branches. This is mandatory for ADDRESSABLE types,
5318 since we cannot generate temporaries for such, but it saves a
5319 copy in other cases as well. */
5320 if (!is_gimple_reg_type (TREE_TYPE (*from_p
)))
5322 /* This code should mirror the code in gimplify_cond_expr. */
5323 enum tree_code code
= TREE_CODE (*expr_p
);
5324 tree cond
= *from_p
;
5325 tree result
= *to_p
;
5327 ret
= gimplify_expr (&result
, pre_p
, post_p
,
5328 is_gimple_lvalue
, fb_lvalue
);
5329 if (ret
!= GS_ERROR
)
5332 /* If we are going to write RESULT more than once, clear
5333 TREE_READONLY flag, otherwise we might incorrectly promote
5334 the variable to static const and initialize it at compile
5335 time in one of the branches. */
5337 && TREE_TYPE (TREE_OPERAND (cond
, 1)) != void_type_node
5338 && TREE_TYPE (TREE_OPERAND (cond
, 2)) != void_type_node
)
5339 TREE_READONLY (result
) = 0;
5340 if (TREE_TYPE (TREE_OPERAND (cond
, 1)) != void_type_node
)
5341 TREE_OPERAND (cond
, 1)
5342 = build2 (code
, void_type_node
, result
,
5343 TREE_OPERAND (cond
, 1));
5344 if (TREE_TYPE (TREE_OPERAND (cond
, 2)) != void_type_node
)
5345 TREE_OPERAND (cond
, 2)
5346 = build2 (code
, void_type_node
, unshare_expr (result
),
5347 TREE_OPERAND (cond
, 2));
5349 TREE_TYPE (cond
) = void_type_node
;
5350 recalculate_side_effects (cond
);
5354 gimplify_and_add (cond
, pre_p
);
5355 *expr_p
= unshare_expr (result
);
5364 /* For calls that return in memory, give *to_p as the CALL_EXPR's
5365 return slot so that we don't generate a temporary. */
5366 if (!CALL_EXPR_RETURN_SLOT_OPT (*from_p
)
5367 && aggregate_value_p (*from_p
, *from_p
))
5371 if (!(rhs_predicate_for (*to_p
))(*from_p
))
5372 /* If we need a temporary, *to_p isn't accurate. */
5374 /* It's OK to use the return slot directly unless it's an NRV. */
5375 else if (TREE_CODE (*to_p
) == RESULT_DECL
5376 && DECL_NAME (*to_p
) == NULL_TREE
5377 && needs_to_live_in_memory (*to_p
))
5379 else if (is_gimple_reg_type (TREE_TYPE (*to_p
))
5380 || (DECL_P (*to_p
) && DECL_REGISTER (*to_p
)))
5381 /* Don't force regs into memory. */
5383 else if (TREE_CODE (*expr_p
) == INIT_EXPR
)
5384 /* It's OK to use the target directly if it's being
5387 else if (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (*to_p
)))
5389 /* Always use the target and thus RSO for variable-sized types.
5390 GIMPLE cannot deal with a variable-sized assignment
5391 embedded in a call statement. */
5393 else if (TREE_CODE (*to_p
) != SSA_NAME
5394 && (!is_gimple_variable (*to_p
)
5395 || needs_to_live_in_memory (*to_p
)))
5396 /* Don't use the original target if it's already addressable;
5397 if its address escapes, and the called function uses the
5398 NRV optimization, a conforming program could see *to_p
5399 change before the called function returns; see c++/19317.
5400 When optimizing, the return_slot pass marks more functions
5401 as safe after we have escape info. */
5408 CALL_EXPR_RETURN_SLOT_OPT (*from_p
) = 1;
5409 mark_addressable (*to_p
);
5414 case WITH_SIZE_EXPR
:
5415 /* Likewise for calls that return an aggregate of non-constant size,
5416 since we would not be able to generate a temporary at all. */
5417 if (TREE_CODE (TREE_OPERAND (*from_p
, 0)) == CALL_EXPR
)
5419 *from_p
= TREE_OPERAND (*from_p
, 0);
5420 /* We don't change ret in this case because the
5421 WITH_SIZE_EXPR might have been added in
5422 gimplify_modify_expr, so returning GS_OK would lead to an
5428 /* If we're initializing from a container, push the initialization
5430 case CLEANUP_POINT_EXPR
:
5432 case STATEMENT_LIST
:
5434 tree wrap
= *from_p
;
5437 ret
= gimplify_expr (to_p
, pre_p
, post_p
, is_gimple_min_lval
,
5439 if (ret
!= GS_ERROR
)
5442 t
= voidify_wrapper_expr (wrap
, *expr_p
);
5443 gcc_assert (t
== *expr_p
);
5447 gimplify_and_add (wrap
, pre_p
);
5448 *expr_p
= unshare_expr (*to_p
);
5455 case COMPOUND_LITERAL_EXPR
:
5457 tree complit
= TREE_OPERAND (*expr_p
, 1);
5458 tree decl_s
= COMPOUND_LITERAL_EXPR_DECL_EXPR (complit
);
5459 tree decl
= DECL_EXPR_DECL (decl_s
);
5460 tree init
= DECL_INITIAL (decl
);
5462 /* struct T x = (struct T) { 0, 1, 2 } can be optimized
5463 into struct T x = { 0, 1, 2 } if the address of the
5464 compound literal has never been taken. */
5465 if (!TREE_ADDRESSABLE (complit
)
5466 && !TREE_ADDRESSABLE (decl
)
5469 *expr_p
= copy_node (*expr_p
);
5470 TREE_OPERAND (*expr_p
, 1) = init
;
5485 /* Return true if T looks like a valid GIMPLE statement. */
5488 is_gimple_stmt (tree t
)
5490 const enum tree_code code
= TREE_CODE (t
);
5495 /* The only valid NOP_EXPR is the empty statement. */
5496 return IS_EMPTY_STMT (t
);
5500 /* These are only valid if they're void. */
5501 return TREE_TYPE (t
) == NULL
|| VOID_TYPE_P (TREE_TYPE (t
));
5507 case CASE_LABEL_EXPR
:
5508 case TRY_CATCH_EXPR
:
5509 case TRY_FINALLY_EXPR
:
5510 case EH_FILTER_EXPR
:
5513 case STATEMENT_LIST
:
5517 case OACC_HOST_DATA
:
5520 case OACC_ENTER_DATA
:
5521 case OACC_EXIT_DATA
:
5526 case OMP_DISTRIBUTE
:
5538 case OMP_TARGET_DATA
:
5539 case OMP_TARGET_UPDATE
:
5540 case OMP_TARGET_ENTER_DATA
:
5541 case OMP_TARGET_EXIT_DATA
:
5544 /* These are always void. */
5550 /* These are valid regardless of their type. */
5559 /* Promote partial stores to COMPLEX variables to total stores. *EXPR_P is
5560 a MODIFY_EXPR with a lhs of a REAL/IMAGPART_EXPR of a variable with
5561 DECL_GIMPLE_REG_P set.
5563 IMPORTANT NOTE: This promotion is performed by introducing a load of the
5564 other, unmodified part of the complex object just before the total store.
5565 As a consequence, if the object is still uninitialized, an undefined value
5566 will be loaded into a register, which may result in a spurious exception
5567 if the register is floating-point and the value happens to be a signaling
5568 NaN for example. Then the fully-fledged complex operations lowering pass
5569 followed by a DCE pass are necessary in order to fix things up. */
5571 static enum gimplify_status
5572 gimplify_modify_expr_complex_part (tree
*expr_p
, gimple_seq
*pre_p
,
5575 enum tree_code code
, ocode
;
5576 tree lhs
, rhs
, new_rhs
, other
, realpart
, imagpart
;
5578 lhs
= TREE_OPERAND (*expr_p
, 0);
5579 rhs
= TREE_OPERAND (*expr_p
, 1);
5580 code
= TREE_CODE (lhs
);
5581 lhs
= TREE_OPERAND (lhs
, 0);
5583 ocode
= code
== REALPART_EXPR
? IMAGPART_EXPR
: REALPART_EXPR
;
5584 other
= build1 (ocode
, TREE_TYPE (rhs
), lhs
);
5585 TREE_NO_WARNING (other
) = 1;
5586 other
= get_formal_tmp_var (other
, pre_p
);
5588 realpart
= code
== REALPART_EXPR
? rhs
: other
;
5589 imagpart
= code
== REALPART_EXPR
? other
: rhs
;
5591 if (TREE_CONSTANT (realpart
) && TREE_CONSTANT (imagpart
))
5592 new_rhs
= build_complex (TREE_TYPE (lhs
), realpart
, imagpart
);
5594 new_rhs
= build2 (COMPLEX_EXPR
, TREE_TYPE (lhs
), realpart
, imagpart
);
5596 gimplify_seq_add_stmt (pre_p
, gimple_build_assign (lhs
, new_rhs
));
5597 *expr_p
= (want_value
) ? rhs
: NULL_TREE
;
5602 /* Gimplify the MODIFY_EXPR node pointed to by EXPR_P.
5608 PRE_P points to the list where side effects that must happen before
5609 *EXPR_P should be stored.
5611 POST_P points to the list where side effects that must happen after
5612 *EXPR_P should be stored.
5614 WANT_VALUE is nonzero iff we want to use the value of this expression
5615 in another expression. */
5617 static enum gimplify_status
5618 gimplify_modify_expr (tree
*expr_p
, gimple_seq
*pre_p
, gimple_seq
*post_p
,
5621 tree
*from_p
= &TREE_OPERAND (*expr_p
, 1);
5622 tree
*to_p
= &TREE_OPERAND (*expr_p
, 0);
5623 enum gimplify_status ret
= GS_UNHANDLED
;
5625 location_t loc
= EXPR_LOCATION (*expr_p
);
5626 gimple_stmt_iterator gsi
;
5628 gcc_assert (TREE_CODE (*expr_p
) == MODIFY_EXPR
5629 || TREE_CODE (*expr_p
) == INIT_EXPR
);
5631 /* Trying to simplify a clobber using normal logic doesn't work,
5632 so handle it here. */
5633 if (TREE_CLOBBER_P (*from_p
))
5635 ret
= gimplify_expr (to_p
, pre_p
, post_p
, is_gimple_lvalue
, fb_lvalue
);
5636 if (ret
== GS_ERROR
)
5638 gcc_assert (!want_value
);
5639 if (!VAR_P (*to_p
) && TREE_CODE (*to_p
) != MEM_REF
)
5641 tree addr
= get_initialized_tmp_var (build_fold_addr_expr (*to_p
),
5643 *to_p
= build_simple_mem_ref_loc (EXPR_LOCATION (*to_p
), addr
);
5645 gimplify_seq_add_stmt (pre_p
, gimple_build_assign (*to_p
, *from_p
));
5650 /* Insert pointer conversions required by the middle-end that are not
5651 required by the frontend. This fixes middle-end type checking for
5652 for example gcc.dg/redecl-6.c. */
5653 if (POINTER_TYPE_P (TREE_TYPE (*to_p
)))
5655 STRIP_USELESS_TYPE_CONVERSION (*from_p
);
5656 if (!useless_type_conversion_p (TREE_TYPE (*to_p
), TREE_TYPE (*from_p
)))
5657 *from_p
= fold_convert_loc (loc
, TREE_TYPE (*to_p
), *from_p
);
5660 /* See if any simplifications can be done based on what the RHS is. */
5661 ret
= gimplify_modify_expr_rhs (expr_p
, from_p
, to_p
, pre_p
, post_p
,
5663 if (ret
!= GS_UNHANDLED
)
5666 /* For zero sized types only gimplify the left hand side and right hand
5667 side as statements and throw away the assignment. Do this after
5668 gimplify_modify_expr_rhs so we handle TARGET_EXPRs of addressable
5670 if (zero_sized_type (TREE_TYPE (*from_p
))
5672 /* Don't do this for calls that return addressable types, expand_call
5673 relies on those having a lhs. */
5674 && !(TREE_ADDRESSABLE (TREE_TYPE (*from_p
))
5675 && TREE_CODE (*from_p
) == CALL_EXPR
))
5677 gimplify_stmt (from_p
, pre_p
);
5678 gimplify_stmt (to_p
, pre_p
);
5679 *expr_p
= NULL_TREE
;
5683 /* If the value being copied is of variable width, compute the length
5684 of the copy into a WITH_SIZE_EXPR. Note that we need to do this
5685 before gimplifying any of the operands so that we can resolve any
5686 PLACEHOLDER_EXPRs in the size. Also note that the RTL expander uses
5687 the size of the expression to be copied, not of the destination, so
5688 that is what we must do here. */
5689 maybe_with_size_expr (from_p
);
5691 /* As a special case, we have to temporarily allow for assignments
5692 with a CALL_EXPR on the RHS. Since in GIMPLE a function call is
5693 a toplevel statement, when gimplifying the GENERIC expression
5694 MODIFY_EXPR <a, CALL_EXPR <foo>>, we cannot create the tuple
5695 GIMPLE_ASSIGN <a, GIMPLE_CALL <foo>>.
5697 Instead, we need to create the tuple GIMPLE_CALL <a, foo>. To
5698 prevent gimplify_expr from trying to create a new temporary for
5699 foo's LHS, we tell it that it should only gimplify until it
5700 reaches the CALL_EXPR. On return from gimplify_expr, the newly
5701 created GIMPLE_CALL <foo> will be the last statement in *PRE_P
5702 and all we need to do here is set 'a' to be its LHS. */
5704 /* Gimplify the RHS first for C++17 and bug 71104. */
5705 gimple_predicate initial_pred
= initial_rhs_predicate_for (*to_p
);
5706 ret
= gimplify_expr (from_p
, pre_p
, post_p
, initial_pred
, fb_rvalue
);
5707 if (ret
== GS_ERROR
)
5710 /* Then gimplify the LHS. */
5711 /* If we gimplified the RHS to a CALL_EXPR and that call may return
5712 twice we have to make sure to gimplify into non-SSA as otherwise
5713 the abnormal edge added later will make those defs not dominate
5715 ??? Technically this applies only to the registers used in the
5716 resulting non-register *TO_P. */
5717 bool saved_into_ssa
= gimplify_ctxp
->into_ssa
;
5719 && TREE_CODE (*from_p
) == CALL_EXPR
5720 && call_expr_flags (*from_p
) & ECF_RETURNS_TWICE
)
5721 gimplify_ctxp
->into_ssa
= false;
5722 ret
= gimplify_expr (to_p
, pre_p
, post_p
, is_gimple_lvalue
, fb_lvalue
);
5723 gimplify_ctxp
->into_ssa
= saved_into_ssa
;
5724 if (ret
== GS_ERROR
)
5727 /* Now that the LHS is gimplified, re-gimplify the RHS if our initial
5728 guess for the predicate was wrong. */
5729 gimple_predicate final_pred
= rhs_predicate_for (*to_p
);
5730 if (final_pred
!= initial_pred
)
5732 ret
= gimplify_expr (from_p
, pre_p
, post_p
, final_pred
, fb_rvalue
);
5733 if (ret
== GS_ERROR
)
5737 /* In case of va_arg internal fn wrappped in a WITH_SIZE_EXPR, add the type
5738 size as argument to the call. */
5739 if (TREE_CODE (*from_p
) == WITH_SIZE_EXPR
)
5741 tree call
= TREE_OPERAND (*from_p
, 0);
5742 tree vlasize
= TREE_OPERAND (*from_p
, 1);
5744 if (TREE_CODE (call
) == CALL_EXPR
5745 && CALL_EXPR_IFN (call
) == IFN_VA_ARG
)
5747 int nargs
= call_expr_nargs (call
);
5748 tree type
= TREE_TYPE (call
);
5749 tree ap
= CALL_EXPR_ARG (call
, 0);
5750 tree tag
= CALL_EXPR_ARG (call
, 1);
5751 tree aptag
= CALL_EXPR_ARG (call
, 2);
5752 tree newcall
= build_call_expr_internal_loc (EXPR_LOCATION (call
),
5756 TREE_OPERAND (*from_p
, 0) = newcall
;
5760 /* Now see if the above changed *from_p to something we handle specially. */
5761 ret
= gimplify_modify_expr_rhs (expr_p
, from_p
, to_p
, pre_p
, post_p
,
5763 if (ret
!= GS_UNHANDLED
)
5766 /* If we've got a variable sized assignment between two lvalues (i.e. does
5767 not involve a call), then we can make things a bit more straightforward
5768 by converting the assignment to memcpy or memset. */
5769 if (TREE_CODE (*from_p
) == WITH_SIZE_EXPR
)
5771 tree from
= TREE_OPERAND (*from_p
, 0);
5772 tree size
= TREE_OPERAND (*from_p
, 1);
5774 if (TREE_CODE (from
) == CONSTRUCTOR
)
5775 return gimplify_modify_expr_to_memset (expr_p
, size
, want_value
, pre_p
);
5777 if (is_gimple_addressable (from
))
5780 return gimplify_modify_expr_to_memcpy (expr_p
, size
, want_value
,
5785 /* Transform partial stores to non-addressable complex variables into
5786 total stores. This allows us to use real instead of virtual operands
5787 for these variables, which improves optimization. */
5788 if ((TREE_CODE (*to_p
) == REALPART_EXPR
5789 || TREE_CODE (*to_p
) == IMAGPART_EXPR
)
5790 && is_gimple_reg (TREE_OPERAND (*to_p
, 0)))
5791 return gimplify_modify_expr_complex_part (expr_p
, pre_p
, want_value
);
5793 /* Try to alleviate the effects of the gimplification creating artificial
5794 temporaries (see for example is_gimple_reg_rhs) on the debug info, but
5795 make sure not to create DECL_DEBUG_EXPR links across functions. */
5796 if (!gimplify_ctxp
->into_ssa
5798 && DECL_IGNORED_P (*from_p
)
5800 && !DECL_IGNORED_P (*to_p
)
5801 && decl_function_context (*to_p
) == current_function_decl
5802 && decl_function_context (*from_p
) == current_function_decl
)
5804 if (!DECL_NAME (*from_p
) && DECL_NAME (*to_p
))
5806 = create_tmp_var_name (IDENTIFIER_POINTER (DECL_NAME (*to_p
)));
5807 DECL_HAS_DEBUG_EXPR_P (*from_p
) = 1;
5808 SET_DECL_DEBUG_EXPR (*from_p
, *to_p
);
5811 if (want_value
&& TREE_THIS_VOLATILE (*to_p
))
5812 *from_p
= get_initialized_tmp_var (*from_p
, pre_p
, post_p
);
5814 if (TREE_CODE (*from_p
) == CALL_EXPR
)
5816 /* Since the RHS is a CALL_EXPR, we need to create a GIMPLE_CALL
5817 instead of a GIMPLE_ASSIGN. */
5819 if (CALL_EXPR_FN (*from_p
) == NULL_TREE
)
5821 /* Gimplify internal functions created in the FEs. */
5822 int nargs
= call_expr_nargs (*from_p
), i
;
5823 enum internal_fn ifn
= CALL_EXPR_IFN (*from_p
);
5824 auto_vec
<tree
> vargs (nargs
);
5826 for (i
= 0; i
< nargs
; i
++)
5828 gimplify_arg (&CALL_EXPR_ARG (*from_p
, i
), pre_p
,
5829 EXPR_LOCATION (*from_p
));
5830 vargs
.quick_push (CALL_EXPR_ARG (*from_p
, i
));
5832 call_stmt
= gimple_build_call_internal_vec (ifn
, vargs
);
5833 gimple_call_set_nothrow (call_stmt
, TREE_NOTHROW (*from_p
));
5834 gimple_set_location (call_stmt
, EXPR_LOCATION (*expr_p
));
5838 tree fnptrtype
= TREE_TYPE (CALL_EXPR_FN (*from_p
));
5839 CALL_EXPR_FN (*from_p
) = TREE_OPERAND (CALL_EXPR_FN (*from_p
), 0);
5840 STRIP_USELESS_TYPE_CONVERSION (CALL_EXPR_FN (*from_p
));
5841 tree fndecl
= get_callee_fndecl (*from_p
);
5843 && fndecl_built_in_p (fndecl
, BUILT_IN_EXPECT
)
5844 && call_expr_nargs (*from_p
) == 3)
5845 call_stmt
= gimple_build_call_internal (IFN_BUILTIN_EXPECT
, 3,
5846 CALL_EXPR_ARG (*from_p
, 0),
5847 CALL_EXPR_ARG (*from_p
, 1),
5848 CALL_EXPR_ARG (*from_p
, 2));
5851 call_stmt
= gimple_build_call_from_tree (*from_p
, fnptrtype
);
5854 notice_special_calls (call_stmt
);
5855 if (!gimple_call_noreturn_p (call_stmt
) || !should_remove_lhs_p (*to_p
))
5856 gimple_call_set_lhs (call_stmt
, *to_p
);
5857 else if (TREE_CODE (*to_p
) == SSA_NAME
)
5858 /* The above is somewhat premature, avoid ICEing later for a
5859 SSA name w/o a definition. We may have uses in the GIMPLE IL.
5860 ??? This doesn't make it a default-def. */
5861 SSA_NAME_DEF_STMT (*to_p
) = gimple_build_nop ();
5867 assign
= gimple_build_assign (*to_p
, *from_p
);
5868 gimple_set_location (assign
, EXPR_LOCATION (*expr_p
));
5869 if (COMPARISON_CLASS_P (*from_p
))
5870 gimple_set_no_warning (assign
, TREE_NO_WARNING (*from_p
));
5873 if (gimplify_ctxp
->into_ssa
&& is_gimple_reg (*to_p
))
5875 /* We should have got an SSA name from the start. */
5876 gcc_assert (TREE_CODE (*to_p
) == SSA_NAME
5877 || ! gimple_in_ssa_p (cfun
));
5880 gimplify_seq_add_stmt (pre_p
, assign
);
5881 gsi
= gsi_last (*pre_p
);
5882 maybe_fold_stmt (&gsi
);
5886 *expr_p
= TREE_THIS_VOLATILE (*to_p
) ? *from_p
: unshare_expr (*to_p
);
5895 /* Gimplify a comparison between two variable-sized objects. Do this
5896 with a call to BUILT_IN_MEMCMP. */
5898 static enum gimplify_status
5899 gimplify_variable_sized_compare (tree
*expr_p
)
5901 location_t loc
= EXPR_LOCATION (*expr_p
);
5902 tree op0
= TREE_OPERAND (*expr_p
, 0);
5903 tree op1
= TREE_OPERAND (*expr_p
, 1);
5904 tree t
, arg
, dest
, src
, expr
;
5906 arg
= TYPE_SIZE_UNIT (TREE_TYPE (op0
));
5907 arg
= unshare_expr (arg
);
5908 arg
= SUBSTITUTE_PLACEHOLDER_IN_EXPR (arg
, op0
);
5909 src
= build_fold_addr_expr_loc (loc
, op1
);
5910 dest
= build_fold_addr_expr_loc (loc
, op0
);
5911 t
= builtin_decl_implicit (BUILT_IN_MEMCMP
);
5912 t
= build_call_expr_loc (loc
, t
, 3, dest
, src
, arg
);
5915 = build2 (TREE_CODE (*expr_p
), TREE_TYPE (*expr_p
), t
, integer_zero_node
);
5916 SET_EXPR_LOCATION (expr
, loc
);
5922 /* Gimplify a comparison between two aggregate objects of integral scalar
5923 mode as a comparison between the bitwise equivalent scalar values. */
5925 static enum gimplify_status
5926 gimplify_scalar_mode_aggregate_compare (tree
*expr_p
)
5928 location_t loc
= EXPR_LOCATION (*expr_p
);
5929 tree op0
= TREE_OPERAND (*expr_p
, 0);
5930 tree op1
= TREE_OPERAND (*expr_p
, 1);
5932 tree type
= TREE_TYPE (op0
);
5933 tree scalar_type
= lang_hooks
.types
.type_for_mode (TYPE_MODE (type
), 1);
5935 op0
= fold_build1_loc (loc
, VIEW_CONVERT_EXPR
, scalar_type
, op0
);
5936 op1
= fold_build1_loc (loc
, VIEW_CONVERT_EXPR
, scalar_type
, op1
);
5939 = fold_build2_loc (loc
, TREE_CODE (*expr_p
), TREE_TYPE (*expr_p
), op0
, op1
);
5944 /* Gimplify an expression sequence. This function gimplifies each
5945 expression and rewrites the original expression with the last
5946 expression of the sequence in GIMPLE form.
5948 PRE_P points to the list where the side effects for all the
5949 expressions in the sequence will be emitted.
5951 WANT_VALUE is true when the result of the last COMPOUND_EXPR is used. */
5953 static enum gimplify_status
5954 gimplify_compound_expr (tree
*expr_p
, gimple_seq
*pre_p
, bool want_value
)
5960 tree
*sub_p
= &TREE_OPERAND (t
, 0);
5962 if (TREE_CODE (*sub_p
) == COMPOUND_EXPR
)
5963 gimplify_compound_expr (sub_p
, pre_p
, false);
5965 gimplify_stmt (sub_p
, pre_p
);
5967 t
= TREE_OPERAND (t
, 1);
5969 while (TREE_CODE (t
) == COMPOUND_EXPR
);
5976 gimplify_stmt (expr_p
, pre_p
);
5981 /* Gimplify a SAVE_EXPR node. EXPR_P points to the expression to
5982 gimplify. After gimplification, EXPR_P will point to a new temporary
5983 that holds the original value of the SAVE_EXPR node.
5985 PRE_P points to the list where side effects that must happen before
5986 *EXPR_P should be stored. */
5988 static enum gimplify_status
5989 gimplify_save_expr (tree
*expr_p
, gimple_seq
*pre_p
, gimple_seq
*post_p
)
5991 enum gimplify_status ret
= GS_ALL_DONE
;
5994 gcc_assert (TREE_CODE (*expr_p
) == SAVE_EXPR
);
5995 val
= TREE_OPERAND (*expr_p
, 0);
5997 /* If the SAVE_EXPR has not been resolved, then evaluate it once. */
5998 if (!SAVE_EXPR_RESOLVED_P (*expr_p
))
6000 /* The operand may be a void-valued expression. It is
6001 being executed only for its side-effects. */
6002 if (TREE_TYPE (val
) == void_type_node
)
6004 ret
= gimplify_expr (&TREE_OPERAND (*expr_p
, 0), pre_p
, post_p
,
6005 is_gimple_stmt
, fb_none
);
6009 /* The temporary may not be an SSA name as later abnormal and EH
6010 control flow may invalidate use/def domination. When in SSA
6011 form then assume there are no such issues and SAVE_EXPRs only
6012 appear via GENERIC foldings. */
6013 val
= get_initialized_tmp_var (val
, pre_p
, post_p
,
6014 gimple_in_ssa_p (cfun
));
6016 TREE_OPERAND (*expr_p
, 0) = val
;
6017 SAVE_EXPR_RESOLVED_P (*expr_p
) = 1;
6025 /* Rewrite the ADDR_EXPR node pointed to by EXPR_P
6032 PRE_P points to the list where side effects that must happen before
6033 *EXPR_P should be stored.
6035 POST_P points to the list where side effects that must happen after
6036 *EXPR_P should be stored. */
6038 static enum gimplify_status
6039 gimplify_addr_expr (tree
*expr_p
, gimple_seq
*pre_p
, gimple_seq
*post_p
)
6041 tree expr
= *expr_p
;
6042 tree op0
= TREE_OPERAND (expr
, 0);
6043 enum gimplify_status ret
;
6044 location_t loc
= EXPR_LOCATION (*expr_p
);
6046 switch (TREE_CODE (op0
))
6050 /* Check if we are dealing with an expression of the form '&*ptr'.
6051 While the front end folds away '&*ptr' into 'ptr', these
6052 expressions may be generated internally by the compiler (e.g.,
6053 builtins like __builtin_va_end). */
6054 /* Caution: the silent array decomposition semantics we allow for
6055 ADDR_EXPR means we can't always discard the pair. */
6056 /* Gimplification of the ADDR_EXPR operand may drop
6057 cv-qualification conversions, so make sure we add them if
6060 tree op00
= TREE_OPERAND (op0
, 0);
6061 tree t_expr
= TREE_TYPE (expr
);
6062 tree t_op00
= TREE_TYPE (op00
);
6064 if (!useless_type_conversion_p (t_expr
, t_op00
))
6065 op00
= fold_convert_loc (loc
, TREE_TYPE (expr
), op00
);
6071 case VIEW_CONVERT_EXPR
:
6072 /* Take the address of our operand and then convert it to the type of
6075 ??? The interactions of VIEW_CONVERT_EXPR and aliasing is not at
6076 all clear. The impact of this transformation is even less clear. */
6078 /* If the operand is a useless conversion, look through it. Doing so
6079 guarantees that the ADDR_EXPR and its operand will remain of the
6081 if (tree_ssa_useless_type_conversion (TREE_OPERAND (op0
, 0)))
6082 op0
= TREE_OPERAND (op0
, 0);
6084 *expr_p
= fold_convert_loc (loc
, TREE_TYPE (expr
),
6085 build_fold_addr_expr_loc (loc
,
6086 TREE_OPERAND (op0
, 0)));
6091 if (integer_zerop (TREE_OPERAND (op0
, 1)))
6092 goto do_indirect_ref
;
6097 /* If we see a call to a declared builtin or see its address
6098 being taken (we can unify those cases here) then we can mark
6099 the builtin for implicit generation by GCC. */
6100 if (TREE_CODE (op0
) == FUNCTION_DECL
6101 && fndecl_built_in_p (op0
, BUILT_IN_NORMAL
)
6102 && builtin_decl_declared_p (DECL_FUNCTION_CODE (op0
)))
6103 set_builtin_decl_implicit_p (DECL_FUNCTION_CODE (op0
), true);
6105 /* We use fb_either here because the C frontend sometimes takes
6106 the address of a call that returns a struct; see
6107 gcc.dg/c99-array-lval-1.c. The gimplifier will correctly make
6108 the implied temporary explicit. */
6110 /* Make the operand addressable. */
6111 ret
= gimplify_expr (&TREE_OPERAND (expr
, 0), pre_p
, post_p
,
6112 is_gimple_addressable
, fb_either
);
6113 if (ret
== GS_ERROR
)
6116 /* Then mark it. Beware that it may not be possible to do so directly
6117 if a temporary has been created by the gimplification. */
6118 prepare_gimple_addressable (&TREE_OPERAND (expr
, 0), pre_p
);
6120 op0
= TREE_OPERAND (expr
, 0);
6122 /* For various reasons, the gimplification of the expression
6123 may have made a new INDIRECT_REF. */
6124 if (TREE_CODE (op0
) == INDIRECT_REF
)
6125 goto do_indirect_ref
;
6127 mark_addressable (TREE_OPERAND (expr
, 0));
6129 /* The FEs may end up building ADDR_EXPRs early on a decl with
6130 an incomplete type. Re-build ADDR_EXPRs in canonical form
6132 if (!types_compatible_p (TREE_TYPE (op0
), TREE_TYPE (TREE_TYPE (expr
))))
6133 *expr_p
= build_fold_addr_expr (op0
);
6135 /* Make sure TREE_CONSTANT and TREE_SIDE_EFFECTS are set properly. */
6136 recompute_tree_invariant_for_addr_expr (*expr_p
);
6138 /* If we re-built the ADDR_EXPR add a conversion to the original type
6140 if (!useless_type_conversion_p (TREE_TYPE (expr
), TREE_TYPE (*expr_p
)))
6141 *expr_p
= fold_convert (TREE_TYPE (expr
), *expr_p
);
6149 /* Gimplify the operands of an ASM_EXPR. Input operands should be a gimple
6150 value; output operands should be a gimple lvalue. */
6152 static enum gimplify_status
6153 gimplify_asm_expr (tree
*expr_p
, gimple_seq
*pre_p
, gimple_seq
*post_p
)
6157 const char **oconstraints
;
6160 const char *constraint
;
6161 bool allows_mem
, allows_reg
, is_inout
;
6162 enum gimplify_status ret
, tret
;
6164 vec
<tree
, va_gc
> *inputs
;
6165 vec
<tree
, va_gc
> *outputs
;
6166 vec
<tree
, va_gc
> *clobbers
;
6167 vec
<tree
, va_gc
> *labels
;
6171 noutputs
= list_length (ASM_OUTPUTS (expr
));
6172 oconstraints
= (const char **) alloca ((noutputs
) * sizeof (const char *));
6180 link_next
= NULL_TREE
;
6181 for (i
= 0, link
= ASM_OUTPUTS (expr
); link
; ++i
, link
= link_next
)
6184 size_t constraint_len
;
6186 link_next
= TREE_CHAIN (link
);
6190 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link
)));
6191 constraint_len
= strlen (constraint
);
6192 if (constraint_len
== 0)
6195 ok
= parse_output_constraint (&constraint
, i
, 0, 0,
6196 &allows_mem
, &allows_reg
, &is_inout
);
6203 /* If we can't make copies, we can only accept memory. */
6204 if (TREE_ADDRESSABLE (TREE_TYPE (TREE_VALUE (link
))))
6210 error ("impossible constraint in %<asm%>");
6211 error ("non-memory output %d must stay in memory", i
);
6216 if (!allows_reg
&& allows_mem
)
6217 mark_addressable (TREE_VALUE (link
));
6219 tret
= gimplify_expr (&TREE_VALUE (link
), pre_p
, post_p
,
6220 is_inout
? is_gimple_min_lval
: is_gimple_lvalue
,
6221 fb_lvalue
| fb_mayfail
);
6222 if (tret
== GS_ERROR
)
6224 error ("invalid lvalue in %<asm%> output %d", i
);
6228 /* If the constraint does not allow memory make sure we gimplify
6229 it to a register if it is not already but its base is. This
6230 happens for complex and vector components. */
6233 tree op
= TREE_VALUE (link
);
6234 if (! is_gimple_val (op
)
6235 && is_gimple_reg_type (TREE_TYPE (op
))
6236 && is_gimple_reg (get_base_address (op
)))
6238 tree tem
= create_tmp_reg (TREE_TYPE (op
));
6242 ass
= build2 (MODIFY_EXPR
, TREE_TYPE (tem
),
6243 tem
, unshare_expr (op
));
6244 gimplify_and_add (ass
, pre_p
);
6246 ass
= build2 (MODIFY_EXPR
, TREE_TYPE (tem
), op
, tem
);
6247 gimplify_and_add (ass
, post_p
);
6249 TREE_VALUE (link
) = tem
;
6254 vec_safe_push (outputs
, link
);
6255 TREE_CHAIN (link
) = NULL_TREE
;
6259 /* An input/output operand. To give the optimizers more
6260 flexibility, split it into separate input and output
6263 /* Buffer big enough to format a 32-bit UINT_MAX into. */
6266 /* Turn the in/out constraint into an output constraint. */
6267 char *p
= xstrdup (constraint
);
6269 TREE_VALUE (TREE_PURPOSE (link
)) = build_string (constraint_len
, p
);
6271 /* And add a matching input constraint. */
6274 sprintf (buf
, "%u", i
);
6276 /* If there are multiple alternatives in the constraint,
6277 handle each of them individually. Those that allow register
6278 will be replaced with operand number, the others will stay
6280 if (strchr (p
, ',') != NULL
)
6282 size_t len
= 0, buflen
= strlen (buf
);
6283 char *beg
, *end
, *str
, *dst
;
6287 end
= strchr (beg
, ',');
6289 end
= strchr (beg
, '\0');
6290 if ((size_t) (end
- beg
) < buflen
)
6293 len
+= end
- beg
+ 1;
6300 str
= (char *) alloca (len
);
6301 for (beg
= p
+ 1, dst
= str
;;)
6304 bool mem_p
, reg_p
, inout_p
;
6306 end
= strchr (beg
, ',');
6311 parse_output_constraint (&tem
, i
, 0, 0,
6312 &mem_p
, ®_p
, &inout_p
);
6317 memcpy (dst
, buf
, buflen
);
6326 memcpy (dst
, beg
, len
);
6335 input
= build_string (dst
- str
, str
);
6338 input
= build_string (strlen (buf
), buf
);
6341 input
= build_string (constraint_len
- 1, constraint
+ 1);
6345 input
= build_tree_list (build_tree_list (NULL_TREE
, input
),
6346 unshare_expr (TREE_VALUE (link
)));
6347 ASM_INPUTS (expr
) = chainon (ASM_INPUTS (expr
), input
);
6351 link_next
= NULL_TREE
;
6352 for (link
= ASM_INPUTS (expr
); link
; ++i
, link
= link_next
)
6354 link_next
= TREE_CHAIN (link
);
6355 constraint
= TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link
)));
6356 parse_input_constraint (&constraint
, 0, 0, noutputs
, 0,
6357 oconstraints
, &allows_mem
, &allows_reg
);
6359 /* If we can't make copies, we can only accept memory. */
6360 if (TREE_ADDRESSABLE (TREE_TYPE (TREE_VALUE (link
))))
6366 error ("impossible constraint in %<asm%>");
6367 error ("non-memory input %d must stay in memory", i
);
6372 /* If the operand is a memory input, it should be an lvalue. */
6373 if (!allows_reg
&& allows_mem
)
6375 tree inputv
= TREE_VALUE (link
);
6376 STRIP_NOPS (inputv
);
6377 if (TREE_CODE (inputv
) == PREDECREMENT_EXPR
6378 || TREE_CODE (inputv
) == PREINCREMENT_EXPR
6379 || TREE_CODE (inputv
) == POSTDECREMENT_EXPR
6380 || TREE_CODE (inputv
) == POSTINCREMENT_EXPR
6381 || TREE_CODE (inputv
) == MODIFY_EXPR
)
6382 TREE_VALUE (link
) = error_mark_node
;
6383 tret
= gimplify_expr (&TREE_VALUE (link
), pre_p
, post_p
,
6384 is_gimple_lvalue
, fb_lvalue
| fb_mayfail
);
6385 if (tret
!= GS_ERROR
)
6387 /* Unlike output operands, memory inputs are not guaranteed
6388 to be lvalues by the FE, and while the expressions are
6389 marked addressable there, if it is e.g. a statement
6390 expression, temporaries in it might not end up being
6391 addressable. They might be already used in the IL and thus
6392 it is too late to make them addressable now though. */
6393 tree x
= TREE_VALUE (link
);
6394 while (handled_component_p (x
))
6395 x
= TREE_OPERAND (x
, 0);
6396 if (TREE_CODE (x
) == MEM_REF
6397 && TREE_CODE (TREE_OPERAND (x
, 0)) == ADDR_EXPR
)
6398 x
= TREE_OPERAND (TREE_OPERAND (x
, 0), 0);
6400 || TREE_CODE (x
) == PARM_DECL
6401 || TREE_CODE (x
) == RESULT_DECL
)
6402 && !TREE_ADDRESSABLE (x
)
6403 && is_gimple_reg (x
))
6405 warning_at (EXPR_LOC_OR_LOC (TREE_VALUE (link
),
6407 "memory input %d is not directly addressable",
6409 prepare_gimple_addressable (&TREE_VALUE (link
), pre_p
);
6412 mark_addressable (TREE_VALUE (link
));
6413 if (tret
== GS_ERROR
)
6415 error_at (EXPR_LOC_OR_LOC (TREE_VALUE (link
), input_location
),
6416 "memory input %d is not directly addressable", i
);
6422 tret
= gimplify_expr (&TREE_VALUE (link
), pre_p
, post_p
,
6423 is_gimple_asm_val
, fb_rvalue
);
6424 if (tret
== GS_ERROR
)
6428 TREE_CHAIN (link
) = NULL_TREE
;
6429 vec_safe_push (inputs
, link
);
6432 link_next
= NULL_TREE
;
6433 for (link
= ASM_CLOBBERS (expr
); link
; ++i
, link
= link_next
)
6435 link_next
= TREE_CHAIN (link
);
6436 TREE_CHAIN (link
) = NULL_TREE
;
6437 vec_safe_push (clobbers
, link
);
6440 link_next
= NULL_TREE
;
6441 for (link
= ASM_LABELS (expr
); link
; ++i
, link
= link_next
)
6443 link_next
= TREE_CHAIN (link
);
6444 TREE_CHAIN (link
) = NULL_TREE
;
6445 vec_safe_push (labels
, link
);
6448 /* Do not add ASMs with errors to the gimple IL stream. */
6449 if (ret
!= GS_ERROR
)
6451 stmt
= gimple_build_asm_vec (TREE_STRING_POINTER (ASM_STRING (expr
)),
6452 inputs
, outputs
, clobbers
, labels
);
6454 gimple_asm_set_volatile (stmt
, ASM_VOLATILE_P (expr
) || noutputs
== 0);
6455 gimple_asm_set_input (stmt
, ASM_INPUT_P (expr
));
6456 gimple_asm_set_inline (stmt
, ASM_INLINE_P (expr
));
6458 gimplify_seq_add_stmt (pre_p
, stmt
);
6464 /* Gimplify a CLEANUP_POINT_EXPR. Currently this works by adding
6465 GIMPLE_WITH_CLEANUP_EXPRs to the prequeue as we encounter cleanups while
6466 gimplifying the body, and converting them to TRY_FINALLY_EXPRs when we
6467 return to this function.
6469 FIXME should we complexify the prequeue handling instead? Or use flags
6470 for all the cleanups and let the optimizer tighten them up? The current
6471 code seems pretty fragile; it will break on a cleanup within any
6472 non-conditional nesting. But any such nesting would be broken, anyway;
6473 we can't write a TRY_FINALLY_EXPR that starts inside a nesting construct
6474 and continues out of it. We can do that at the RTL level, though, so
6475 having an optimizer to tighten up try/finally regions would be a Good
6478 static enum gimplify_status
6479 gimplify_cleanup_point_expr (tree
*expr_p
, gimple_seq
*pre_p
)
6481 gimple_stmt_iterator iter
;
6482 gimple_seq body_sequence
= NULL
;
6484 tree temp
= voidify_wrapper_expr (*expr_p
, NULL
);
6486 /* We only care about the number of conditions between the innermost
6487 CLEANUP_POINT_EXPR and the cleanup. So save and reset the count and
6488 any cleanups collected outside the CLEANUP_POINT_EXPR. */
6489 int old_conds
= gimplify_ctxp
->conditions
;
6490 gimple_seq old_cleanups
= gimplify_ctxp
->conditional_cleanups
;
6491 bool old_in_cleanup_point_expr
= gimplify_ctxp
->in_cleanup_point_expr
;
6492 gimplify_ctxp
->conditions
= 0;
6493 gimplify_ctxp
->conditional_cleanups
= NULL
;
6494 gimplify_ctxp
->in_cleanup_point_expr
= true;
6496 gimplify_stmt (&TREE_OPERAND (*expr_p
, 0), &body_sequence
);
6498 gimplify_ctxp
->conditions
= old_conds
;
6499 gimplify_ctxp
->conditional_cleanups
= old_cleanups
;
6500 gimplify_ctxp
->in_cleanup_point_expr
= old_in_cleanup_point_expr
;
6502 for (iter
= gsi_start (body_sequence
); !gsi_end_p (iter
); )
6504 gimple
*wce
= gsi_stmt (iter
);
6506 if (gimple_code (wce
) == GIMPLE_WITH_CLEANUP_EXPR
)
6508 if (gsi_one_before_end_p (iter
))
6510 /* Note that gsi_insert_seq_before and gsi_remove do not
6511 scan operands, unlike some other sequence mutators. */
6512 if (!gimple_wce_cleanup_eh_only (wce
))
6513 gsi_insert_seq_before_without_update (&iter
,
6514 gimple_wce_cleanup (wce
),
6516 gsi_remove (&iter
, true);
6523 enum gimple_try_flags kind
;
6525 if (gimple_wce_cleanup_eh_only (wce
))
6526 kind
= GIMPLE_TRY_CATCH
;
6528 kind
= GIMPLE_TRY_FINALLY
;
6529 seq
= gsi_split_seq_after (iter
);
6531 gtry
= gimple_build_try (seq
, gimple_wce_cleanup (wce
), kind
);
6532 /* Do not use gsi_replace here, as it may scan operands.
6533 We want to do a simple structural modification only. */
6534 gsi_set_stmt (&iter
, gtry
);
6535 iter
= gsi_start (gtry
->eval
);
6542 gimplify_seq_add_seq (pre_p
, body_sequence
);
6555 /* Insert a cleanup marker for gimplify_cleanup_point_expr. CLEANUP
6556 is the cleanup action required. EH_ONLY is true if the cleanup should
6557 only be executed if an exception is thrown, not on normal exit.
6558 If FORCE_UNCOND is true perform the cleanup unconditionally; this is
6559 only valid for clobbers. */
6562 gimple_push_cleanup (tree var
, tree cleanup
, bool eh_only
, gimple_seq
*pre_p
,
6563 bool force_uncond
= false)
6566 gimple_seq cleanup_stmts
= NULL
;
6568 /* Errors can result in improperly nested cleanups. Which results in
6569 confusion when trying to resolve the GIMPLE_WITH_CLEANUP_EXPR. */
6573 if (gimple_conditional_context ())
6575 /* If we're in a conditional context, this is more complex. We only
6576 want to run the cleanup if we actually ran the initialization that
6577 necessitates it, but we want to run it after the end of the
6578 conditional context. So we wrap the try/finally around the
6579 condition and use a flag to determine whether or not to actually
6580 run the destructor. Thus
6584 becomes (approximately)
6588 if (test) { A::A(temp); flag = 1; val = f(temp); }
6591 if (flag) A::~A(temp);
6597 gimplify_stmt (&cleanup
, &cleanup_stmts
);
6598 wce
= gimple_build_wce (cleanup_stmts
);
6599 gimplify_seq_add_stmt (&gimplify_ctxp
->conditional_cleanups
, wce
);
6603 tree flag
= create_tmp_var (boolean_type_node
, "cleanup");
6604 gassign
*ffalse
= gimple_build_assign (flag
, boolean_false_node
);
6605 gassign
*ftrue
= gimple_build_assign (flag
, boolean_true_node
);
6607 cleanup
= build3 (COND_EXPR
, void_type_node
, flag
, cleanup
, NULL
);
6608 gimplify_stmt (&cleanup
, &cleanup_stmts
);
6609 wce
= gimple_build_wce (cleanup_stmts
);
6611 gimplify_seq_add_stmt (&gimplify_ctxp
->conditional_cleanups
, ffalse
);
6612 gimplify_seq_add_stmt (&gimplify_ctxp
->conditional_cleanups
, wce
);
6613 gimplify_seq_add_stmt (pre_p
, ftrue
);
6615 /* Because of this manipulation, and the EH edges that jump
6616 threading cannot redirect, the temporary (VAR) will appear
6617 to be used uninitialized. Don't warn. */
6618 TREE_NO_WARNING (var
) = 1;
6623 gimplify_stmt (&cleanup
, &cleanup_stmts
);
6624 wce
= gimple_build_wce (cleanup_stmts
);
6625 gimple_wce_set_cleanup_eh_only (wce
, eh_only
);
6626 gimplify_seq_add_stmt (pre_p
, wce
);
6630 /* Gimplify a TARGET_EXPR which doesn't appear on the rhs of an INIT_EXPR. */
6632 static enum gimplify_status
6633 gimplify_target_expr (tree
*expr_p
, gimple_seq
*pre_p
, gimple_seq
*post_p
)
6635 tree targ
= *expr_p
;
6636 tree temp
= TARGET_EXPR_SLOT (targ
);
6637 tree init
= TARGET_EXPR_INITIAL (targ
);
6638 enum gimplify_status ret
;
6640 bool unpoison_empty_seq
= false;
6641 gimple_stmt_iterator unpoison_it
;
6645 tree cleanup
= NULL_TREE
;
6647 /* TARGET_EXPR temps aren't part of the enclosing block, so add it
6648 to the temps list. Handle also variable length TARGET_EXPRs. */
6649 if (TREE_CODE (DECL_SIZE (temp
)) != INTEGER_CST
)
6651 if (!TYPE_SIZES_GIMPLIFIED (TREE_TYPE (temp
)))
6652 gimplify_type_sizes (TREE_TYPE (temp
), pre_p
);
6653 gimplify_vla_decl (temp
, pre_p
);
6657 /* Save location where we need to place unpoisoning. It's possible
6658 that a variable will be converted to needs_to_live_in_memory. */
6659 unpoison_it
= gsi_last (*pre_p
);
6660 unpoison_empty_seq
= gsi_end_p (unpoison_it
);
6662 gimple_add_tmp_var (temp
);
6665 /* If TARGET_EXPR_INITIAL is void, then the mere evaluation of the
6666 expression is supposed to initialize the slot. */
6667 if (VOID_TYPE_P (TREE_TYPE (init
)))
6668 ret
= gimplify_expr (&init
, pre_p
, post_p
, is_gimple_stmt
, fb_none
);
6671 tree init_expr
= build2 (INIT_EXPR
, void_type_node
, temp
, init
);
6673 ret
= gimplify_expr (&init
, pre_p
, post_p
, is_gimple_stmt
, fb_none
);
6675 ggc_free (init_expr
);
6677 if (ret
== GS_ERROR
)
6679 /* PR c++/28266 Make sure this is expanded only once. */
6680 TARGET_EXPR_INITIAL (targ
) = NULL_TREE
;
6684 gimplify_and_add (init
, pre_p
);
6686 /* If needed, push the cleanup for the temp. */
6687 if (TARGET_EXPR_CLEANUP (targ
))
6689 if (CLEANUP_EH_ONLY (targ
))
6690 gimple_push_cleanup (temp
, TARGET_EXPR_CLEANUP (targ
),
6691 CLEANUP_EH_ONLY (targ
), pre_p
);
6693 cleanup
= TARGET_EXPR_CLEANUP (targ
);
6696 /* Add a clobber for the temporary going out of scope, like
6697 gimplify_bind_expr. */
6698 if (gimplify_ctxp
->in_cleanup_point_expr
6699 && needs_to_live_in_memory (temp
))
6701 if (flag_stack_reuse
== SR_ALL
)
6703 tree clobber
= build_clobber (TREE_TYPE (temp
));
6704 clobber
= build2 (MODIFY_EXPR
, TREE_TYPE (temp
), temp
, clobber
);
6705 gimple_push_cleanup (temp
, clobber
, false, pre_p
, true);
6707 if (asan_poisoned_variables
6708 && DECL_ALIGN (temp
) <= MAX_SUPPORTED_STACK_ALIGNMENT
6709 && !TREE_STATIC (temp
)
6710 && dbg_cnt (asan_use_after_scope
)
6711 && !gimplify_omp_ctxp
)
6713 tree asan_cleanup
= build_asan_poison_call_expr (temp
);
6716 if (unpoison_empty_seq
)
6717 unpoison_it
= gsi_start (*pre_p
);
6719 asan_poison_variable (temp
, false, &unpoison_it
,
6720 unpoison_empty_seq
);
6721 gimple_push_cleanup (temp
, asan_cleanup
, false, pre_p
);
6726 gimple_push_cleanup (temp
, cleanup
, false, pre_p
);
6728 /* Only expand this once. */
6729 TREE_OPERAND (targ
, 3) = init
;
6730 TARGET_EXPR_INITIAL (targ
) = NULL_TREE
;
6733 /* We should have expanded this before. */
6734 gcc_assert (DECL_SEEN_IN_BIND_EXPR_P (temp
));
6740 /* Gimplification of expression trees. */
6742 /* Gimplify an expression which appears at statement context. The
6743 corresponding GIMPLE statements are added to *SEQ_P. If *SEQ_P is
6744 NULL, a new sequence is allocated.
6746 Return true if we actually added a statement to the queue. */
6749 gimplify_stmt (tree
*stmt_p
, gimple_seq
*seq_p
)
6751 gimple_seq_node last
;
6753 last
= gimple_seq_last (*seq_p
);
6754 gimplify_expr (stmt_p
, seq_p
, NULL
, is_gimple_stmt
, fb_none
);
6755 return last
!= gimple_seq_last (*seq_p
);
6758 /* Add FIRSTPRIVATE entries for DECL in the OpenMP the surrounding parallels
6759 to CTX. If entries already exist, force them to be some flavor of private.
6760 If there is no enclosing parallel, do nothing. */
6763 omp_firstprivatize_variable (struct gimplify_omp_ctx
*ctx
, tree decl
)
6767 if (decl
== NULL
|| !DECL_P (decl
) || ctx
->region_type
== ORT_NONE
)
6772 n
= splay_tree_lookup (ctx
->variables
, (splay_tree_key
)decl
);
6775 if (n
->value
& GOVD_SHARED
)
6776 n
->value
= GOVD_FIRSTPRIVATE
| (n
->value
& GOVD_SEEN
);
6777 else if (n
->value
& GOVD_MAP
)
6778 n
->value
|= GOVD_MAP_TO_ONLY
;
6782 else if ((ctx
->region_type
& ORT_TARGET
) != 0)
6784 if (ctx
->defaultmap
[GDMK_SCALAR
] & GOVD_FIRSTPRIVATE
)
6785 omp_add_variable (ctx
, decl
, GOVD_FIRSTPRIVATE
);
6787 omp_add_variable (ctx
, decl
, GOVD_MAP
| GOVD_MAP_TO_ONLY
);
6789 else if (ctx
->region_type
!= ORT_WORKSHARE
6790 && ctx
->region_type
!= ORT_TASKGROUP
6791 && ctx
->region_type
!= ORT_SIMD
6792 && ctx
->region_type
!= ORT_ACC
6793 && !(ctx
->region_type
& ORT_TARGET_DATA
))
6794 omp_add_variable (ctx
, decl
, GOVD_FIRSTPRIVATE
);
6796 ctx
= ctx
->outer_context
;
6801 /* Similarly for each of the type sizes of TYPE. */
6804 omp_firstprivatize_type_sizes (struct gimplify_omp_ctx
*ctx
, tree type
)
6806 if (type
== NULL
|| type
== error_mark_node
)
6808 type
= TYPE_MAIN_VARIANT (type
);
6810 if (ctx
->privatized_types
->add (type
))
6813 switch (TREE_CODE (type
))
6819 case FIXED_POINT_TYPE
:
6820 omp_firstprivatize_variable (ctx
, TYPE_MIN_VALUE (type
));
6821 omp_firstprivatize_variable (ctx
, TYPE_MAX_VALUE (type
));
6825 omp_firstprivatize_type_sizes (ctx
, TREE_TYPE (type
));
6826 omp_firstprivatize_type_sizes (ctx
, TYPE_DOMAIN (type
));
6831 case QUAL_UNION_TYPE
:
6834 for (field
= TYPE_FIELDS (type
); field
; field
= DECL_CHAIN (field
))
6835 if (TREE_CODE (field
) == FIELD_DECL
)
6837 omp_firstprivatize_variable (ctx
, DECL_FIELD_OFFSET (field
));
6838 omp_firstprivatize_type_sizes (ctx
, TREE_TYPE (field
));
6844 case REFERENCE_TYPE
:
6845 omp_firstprivatize_type_sizes (ctx
, TREE_TYPE (type
));
6852 omp_firstprivatize_variable (ctx
, TYPE_SIZE (type
));
6853 omp_firstprivatize_variable (ctx
, TYPE_SIZE_UNIT (type
));
6854 lang_hooks
.types
.omp_firstprivatize_type_sizes (ctx
, type
);
6857 /* Add an entry for DECL in the OMP context CTX with FLAGS. */
6860 omp_add_variable (struct gimplify_omp_ctx
*ctx
, tree decl
, unsigned int flags
)
6863 unsigned int nflags
;
6866 if (error_operand_p (decl
) || ctx
->region_type
== ORT_NONE
)
6869 /* Never elide decls whose type has TREE_ADDRESSABLE set. This means
6870 there are constructors involved somewhere. Exception is a shared clause,
6871 there is nothing privatized in that case. */
6872 if ((flags
& GOVD_SHARED
) == 0
6873 && (TREE_ADDRESSABLE (TREE_TYPE (decl
))
6874 || TYPE_NEEDS_CONSTRUCTING (TREE_TYPE (decl
))))
6877 n
= splay_tree_lookup (ctx
->variables
, (splay_tree_key
)decl
);
6878 if (n
!= NULL
&& (n
->value
& GOVD_DATA_SHARE_CLASS
) != 0)
6880 /* We shouldn't be re-adding the decl with the same data
6882 gcc_assert ((n
->value
& GOVD_DATA_SHARE_CLASS
& flags
) == 0);
6883 nflags
= n
->value
| flags
;
6884 /* The only combination of data sharing classes we should see is
6885 FIRSTPRIVATE and LASTPRIVATE. However, OpenACC permits
6886 reduction variables to be used in data sharing clauses. */
6887 gcc_assert ((ctx
->region_type
& ORT_ACC
) != 0
6888 || ((nflags
& GOVD_DATA_SHARE_CLASS
)
6889 == (GOVD_FIRSTPRIVATE
| GOVD_LASTPRIVATE
))
6890 || (flags
& GOVD_DATA_SHARE_CLASS
) == 0);
6895 /* When adding a variable-sized variable, we have to handle all sorts
6896 of additional bits of data: the pointer replacement variable, and
6897 the parameters of the type. */
6898 if (DECL_SIZE (decl
) && TREE_CODE (DECL_SIZE (decl
)) != INTEGER_CST
)
6900 /* Add the pointer replacement variable as PRIVATE if the variable
6901 replacement is private, else FIRSTPRIVATE since we'll need the
6902 address of the original variable either for SHARED, or for the
6903 copy into or out of the context. */
6904 if (!(flags
& GOVD_LOCAL
) && ctx
->region_type
!= ORT_TASKGROUP
)
6906 if (flags
& GOVD_MAP
)
6907 nflags
= GOVD_MAP
| GOVD_MAP_TO_ONLY
| GOVD_EXPLICIT
;
6908 else if (flags
& GOVD_PRIVATE
)
6909 nflags
= GOVD_PRIVATE
;
6910 else if ((ctx
->region_type
& (ORT_TARGET
| ORT_TARGET_DATA
)) != 0
6911 && (flags
& GOVD_FIRSTPRIVATE
))
6912 nflags
= GOVD_PRIVATE
| GOVD_EXPLICIT
;
6914 nflags
= GOVD_FIRSTPRIVATE
;
6915 nflags
|= flags
& GOVD_SEEN
;
6916 t
= DECL_VALUE_EXPR (decl
);
6917 gcc_assert (TREE_CODE (t
) == INDIRECT_REF
);
6918 t
= TREE_OPERAND (t
, 0);
6919 gcc_assert (DECL_P (t
));
6920 omp_add_variable (ctx
, t
, nflags
);
6923 /* Add all of the variable and type parameters (which should have
6924 been gimplified to a formal temporary) as FIRSTPRIVATE. */
6925 omp_firstprivatize_variable (ctx
, DECL_SIZE_UNIT (decl
));
6926 omp_firstprivatize_variable (ctx
, DECL_SIZE (decl
));
6927 omp_firstprivatize_type_sizes (ctx
, TREE_TYPE (decl
));
6929 /* The variable-sized variable itself is never SHARED, only some form
6930 of PRIVATE. The sharing would take place via the pointer variable
6931 which we remapped above. */
6932 if (flags
& GOVD_SHARED
)
6933 flags
= GOVD_SHARED
| GOVD_DEBUG_PRIVATE
6934 | (flags
& (GOVD_SEEN
| GOVD_EXPLICIT
));
6936 /* We're going to make use of the TYPE_SIZE_UNIT at least in the
6937 alloca statement we generate for the variable, so make sure it
6938 is available. This isn't automatically needed for the SHARED
6939 case, since we won't be allocating local storage then.
6940 For local variables TYPE_SIZE_UNIT might not be gimplified yet,
6941 in this case omp_notice_variable will be called later
6942 on when it is gimplified. */
6943 else if (! (flags
& (GOVD_LOCAL
| GOVD_MAP
))
6944 && DECL_P (TYPE_SIZE_UNIT (TREE_TYPE (decl
))))
6945 omp_notice_variable (ctx
, TYPE_SIZE_UNIT (TREE_TYPE (decl
)), true);
6947 else if ((flags
& (GOVD_MAP
| GOVD_LOCAL
)) == 0
6948 && lang_hooks
.decls
.omp_privatize_by_reference (decl
))
6950 omp_firstprivatize_type_sizes (ctx
, TREE_TYPE (decl
));
6952 /* Similar to the direct variable sized case above, we'll need the
6953 size of references being privatized. */
6954 if ((flags
& GOVD_SHARED
) == 0)
6956 t
= TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl
)));
6958 omp_notice_variable (ctx
, t
, true);
6965 splay_tree_insert (ctx
->variables
, (splay_tree_key
)decl
, flags
);
6967 /* For reductions clauses in OpenACC loop directives, by default create a
6968 copy clause on the enclosing parallel construct for carrying back the
6970 if (ctx
->region_type
== ORT_ACC
&& (flags
& GOVD_REDUCTION
))
6972 struct gimplify_omp_ctx
*outer_ctx
= ctx
->outer_context
;
6975 n
= splay_tree_lookup (outer_ctx
->variables
, (splay_tree_key
)decl
);
6978 /* Ignore local variables and explicitly declared clauses. */
6979 if (n
->value
& (GOVD_LOCAL
| GOVD_EXPLICIT
))
6981 else if (outer_ctx
->region_type
== ORT_ACC_KERNELS
)
6983 /* According to the OpenACC spec, such a reduction variable
6984 should already have a copy map on a kernels construct,
6985 verify that here. */
6986 gcc_assert (!(n
->value
& GOVD_FIRSTPRIVATE
)
6987 && (n
->value
& GOVD_MAP
));
6989 else if (outer_ctx
->region_type
== ORT_ACC_PARALLEL
)
6991 /* Remove firstprivate and make it a copy map. */
6992 n
->value
&= ~GOVD_FIRSTPRIVATE
;
6993 n
->value
|= GOVD_MAP
;
6996 else if (outer_ctx
->region_type
== ORT_ACC_PARALLEL
)
6998 splay_tree_insert (outer_ctx
->variables
, (splay_tree_key
)decl
,
6999 GOVD_MAP
| GOVD_SEEN
);
7002 outer_ctx
= outer_ctx
->outer_context
;
7007 /* Notice a threadprivate variable DECL used in OMP context CTX.
7008 This just prints out diagnostics about threadprivate variable uses
7009 in untied tasks. If DECL2 is non-NULL, prevent this warning
7010 on that variable. */
7013 omp_notice_threadprivate_variable (struct gimplify_omp_ctx
*ctx
, tree decl
,
7017 struct gimplify_omp_ctx
*octx
;
7019 for (octx
= ctx
; octx
; octx
= octx
->outer_context
)
7020 if ((octx
->region_type
& ORT_TARGET
) != 0)
7022 n
= splay_tree_lookup (octx
->variables
, (splay_tree_key
)decl
);
7025 error ("threadprivate variable %qE used in target region",
7027 error_at (octx
->location
, "enclosing target region");
7028 splay_tree_insert (octx
->variables
, (splay_tree_key
)decl
, 0);
7031 splay_tree_insert (octx
->variables
, (splay_tree_key
)decl2
, 0);
7034 if (ctx
->region_type
!= ORT_UNTIED_TASK
)
7036 n
= splay_tree_lookup (ctx
->variables
, (splay_tree_key
)decl
);
7039 error ("threadprivate variable %qE used in untied task",
7041 error_at (ctx
->location
, "enclosing task");
7042 splay_tree_insert (ctx
->variables
, (splay_tree_key
)decl
, 0);
7045 splay_tree_insert (ctx
->variables
, (splay_tree_key
)decl2
, 0);
7049 /* Return true if global var DECL is device resident. */
7052 device_resident_p (tree decl
)
7054 tree attr
= lookup_attribute ("oacc declare target", DECL_ATTRIBUTES (decl
));
7059 for (tree t
= TREE_VALUE (attr
); t
; t
= TREE_PURPOSE (t
))
7061 tree c
= TREE_VALUE (t
);
7062 if (OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_DEVICE_RESIDENT
)
7069 /* Return true if DECL has an ACC DECLARE attribute. */
7072 is_oacc_declared (tree decl
)
7074 tree t
= TREE_CODE (decl
) == MEM_REF
? TREE_OPERAND (decl
, 0) : decl
;
7075 tree declared
= lookup_attribute ("oacc declare target", DECL_ATTRIBUTES (t
));
7076 return declared
!= NULL_TREE
;
7079 /* Determine outer default flags for DECL mentioned in an OMP region
7080 but not declared in an enclosing clause.
7082 ??? Some compiler-generated variables (like SAVE_EXPRs) could be
7083 remapped firstprivate instead of shared. To some extent this is
7084 addressed in omp_firstprivatize_type_sizes, but not
7088 omp_default_clause (struct gimplify_omp_ctx
*ctx
, tree decl
,
7089 bool in_code
, unsigned flags
)
7091 enum omp_clause_default_kind default_kind
= ctx
->default_kind
;
7092 enum omp_clause_default_kind kind
;
7094 kind
= lang_hooks
.decls
.omp_predetermined_sharing (decl
);
7095 if (kind
!= OMP_CLAUSE_DEFAULT_UNSPECIFIED
)
7096 default_kind
= kind
;
7098 switch (default_kind
)
7100 case OMP_CLAUSE_DEFAULT_NONE
:
7104 if (ctx
->region_type
& ORT_PARALLEL
)
7106 else if ((ctx
->region_type
& ORT_TASKLOOP
) == ORT_TASKLOOP
)
7108 else if (ctx
->region_type
& ORT_TASK
)
7110 else if (ctx
->region_type
& ORT_TEAMS
)
7115 error ("%qE not specified in enclosing %qs",
7116 DECL_NAME (lang_hooks
.decls
.omp_report_decl (decl
)), rtype
);
7117 error_at (ctx
->location
, "enclosing %qs", rtype
);
7120 case OMP_CLAUSE_DEFAULT_SHARED
:
7121 flags
|= GOVD_SHARED
;
7123 case OMP_CLAUSE_DEFAULT_PRIVATE
:
7124 flags
|= GOVD_PRIVATE
;
7126 case OMP_CLAUSE_DEFAULT_FIRSTPRIVATE
:
7127 flags
|= GOVD_FIRSTPRIVATE
;
7129 case OMP_CLAUSE_DEFAULT_UNSPECIFIED
:
7130 /* decl will be either GOVD_FIRSTPRIVATE or GOVD_SHARED. */
7131 gcc_assert ((ctx
->region_type
& ORT_TASK
) != 0);
7132 if (struct gimplify_omp_ctx
*octx
= ctx
->outer_context
)
7134 omp_notice_variable (octx
, decl
, in_code
);
7135 for (; octx
; octx
= octx
->outer_context
)
7139 n2
= splay_tree_lookup (octx
->variables
, (splay_tree_key
) decl
);
7140 if ((octx
->region_type
& (ORT_TARGET_DATA
| ORT_TARGET
)) != 0
7141 && (n2
== NULL
|| (n2
->value
& GOVD_DATA_SHARE_CLASS
) == 0))
7143 if (n2
&& (n2
->value
& GOVD_DATA_SHARE_CLASS
) != GOVD_SHARED
)
7145 flags
|= GOVD_FIRSTPRIVATE
;
7148 if ((octx
->region_type
& (ORT_PARALLEL
| ORT_TEAMS
)) != 0)
7150 flags
|= GOVD_SHARED
;
7156 if (TREE_CODE (decl
) == PARM_DECL
7157 || (!is_global_var (decl
)
7158 && DECL_CONTEXT (decl
) == current_function_decl
))
7159 flags
|= GOVD_FIRSTPRIVATE
;
7161 flags
|= GOVD_SHARED
;
7173 /* Determine outer default flags for DECL mentioned in an OACC region
7174 but not declared in an enclosing clause. */
7177 oacc_default_clause (struct gimplify_omp_ctx
*ctx
, tree decl
, unsigned flags
)
7180 bool on_device
= false;
7181 bool declared
= is_oacc_declared (decl
);
7182 tree type
= TREE_TYPE (decl
);
7184 if (lang_hooks
.decls
.omp_privatize_by_reference (decl
))
7185 type
= TREE_TYPE (type
);
7187 if ((ctx
->region_type
& (ORT_ACC_PARALLEL
| ORT_ACC_KERNELS
)) != 0
7188 && is_global_var (decl
)
7189 && device_resident_p (decl
))
7192 flags
|= GOVD_MAP_TO_ONLY
;
7195 switch (ctx
->region_type
)
7197 case ORT_ACC_KERNELS
:
7200 if (AGGREGATE_TYPE_P (type
))
7202 /* Aggregates default to 'present_or_copy', or 'present'. */
7203 if (ctx
->default_kind
!= OMP_CLAUSE_DEFAULT_PRESENT
)
7206 flags
|= GOVD_MAP
| GOVD_MAP_FORCE_PRESENT
;
7209 /* Scalars default to 'copy'. */
7210 flags
|= GOVD_MAP
| GOVD_MAP_FORCE
;
7214 case ORT_ACC_PARALLEL
:
7217 if (on_device
|| declared
)
7219 else if (AGGREGATE_TYPE_P (type
))
7221 /* Aggregates default to 'present_or_copy', or 'present'. */
7222 if (ctx
->default_kind
!= OMP_CLAUSE_DEFAULT_PRESENT
)
7225 flags
|= GOVD_MAP
| GOVD_MAP_FORCE_PRESENT
;
7228 /* Scalars default to 'firstprivate'. */
7229 flags
|= GOVD_FIRSTPRIVATE
;
7237 if (DECL_ARTIFICIAL (decl
))
7238 ; /* We can get compiler-generated decls, and should not complain
7240 else if (ctx
->default_kind
== OMP_CLAUSE_DEFAULT_NONE
)
7242 error ("%qE not specified in enclosing OpenACC %qs construct",
7243 DECL_NAME (lang_hooks
.decls
.omp_report_decl (decl
)), rkind
);
7244 inform (ctx
->location
, "enclosing OpenACC %qs construct", rkind
);
7246 else if (ctx
->default_kind
== OMP_CLAUSE_DEFAULT_PRESENT
)
7247 ; /* Handled above. */
7249 gcc_checking_assert (ctx
->default_kind
== OMP_CLAUSE_DEFAULT_SHARED
);
7254 /* Record the fact that DECL was used within the OMP context CTX.
7255 IN_CODE is true when real code uses DECL, and false when we should
7256 merely emit default(none) errors. Return true if DECL is going to
7257 be remapped and thus DECL shouldn't be gimplified into its
7258 DECL_VALUE_EXPR (if any). */
7261 omp_notice_variable (struct gimplify_omp_ctx
*ctx
, tree decl
, bool in_code
)
7264 unsigned flags
= in_code
? GOVD_SEEN
: 0;
7265 bool ret
= false, shared
;
7267 if (error_operand_p (decl
))
7270 if (ctx
->region_type
== ORT_NONE
)
7271 return lang_hooks
.decls
.omp_disregard_value_expr (decl
, false);
7273 if (is_global_var (decl
))
7275 /* Threadprivate variables are predetermined. */
7276 if (DECL_THREAD_LOCAL_P (decl
))
7277 return omp_notice_threadprivate_variable (ctx
, decl
, NULL_TREE
);
7279 if (DECL_HAS_VALUE_EXPR_P (decl
))
7281 tree value
= get_base_address (DECL_VALUE_EXPR (decl
));
7283 if (value
&& DECL_P (value
) && DECL_THREAD_LOCAL_P (value
))
7284 return omp_notice_threadprivate_variable (ctx
, decl
, value
);
7287 if (gimplify_omp_ctxp
->outer_context
== NULL
7289 && oacc_get_fn_attrib (current_function_decl
))
7291 location_t loc
= DECL_SOURCE_LOCATION (decl
);
7293 if (lookup_attribute ("omp declare target link",
7294 DECL_ATTRIBUTES (decl
)))
7297 "%qE with %<link%> clause used in %<routine%> function",
7301 else if (!lookup_attribute ("omp declare target",
7302 DECL_ATTRIBUTES (decl
)))
7305 "%qE requires a %<declare%> directive for use "
7306 "in a %<routine%> function", DECL_NAME (decl
));
7312 n
= splay_tree_lookup (ctx
->variables
, (splay_tree_key
)decl
);
7313 if ((ctx
->region_type
& ORT_TARGET
) != 0)
7315 ret
= lang_hooks
.decls
.omp_disregard_value_expr (decl
, true);
7318 unsigned nflags
= flags
;
7319 if ((ctx
->region_type
& ORT_ACC
) == 0)
7321 bool is_declare_target
= false;
7322 if (is_global_var (decl
)
7323 && varpool_node::get_create (decl
)->offloadable
)
7325 struct gimplify_omp_ctx
*octx
;
7326 for (octx
= ctx
->outer_context
;
7327 octx
; octx
= octx
->outer_context
)
7329 n
= splay_tree_lookup (octx
->variables
,
7330 (splay_tree_key
)decl
);
7332 && (n
->value
& GOVD_DATA_SHARE_CLASS
) != GOVD_SHARED
7333 && (n
->value
& GOVD_DATA_SHARE_CLASS
) != 0)
7336 is_declare_target
= octx
== NULL
;
7338 if (!is_declare_target
)
7341 if (TREE_CODE (TREE_TYPE (decl
)) == POINTER_TYPE
7342 || (TREE_CODE (TREE_TYPE (decl
)) == REFERENCE_TYPE
7343 && (TREE_CODE (TREE_TYPE (TREE_TYPE (decl
)))
7345 gdmk
= GDMK_POINTER
;
7346 else if (lang_hooks
.decls
.omp_scalar_p (decl
))
7349 gdmk
= GDMK_AGGREGATE
;
7350 if (ctx
->defaultmap
[gdmk
] == 0)
7352 tree d
= lang_hooks
.decls
.omp_report_decl (decl
);
7353 error ("%qE not specified in enclosing %<target%>",
7355 error_at (ctx
->location
, "enclosing %<target%>");
7357 else if (ctx
->defaultmap
[gdmk
]
7358 & (GOVD_MAP_0LEN_ARRAY
| GOVD_FIRSTPRIVATE
))
7359 nflags
|= ctx
->defaultmap
[gdmk
];
7362 gcc_assert (ctx
->defaultmap
[gdmk
] & GOVD_MAP
);
7363 nflags
|= ctx
->defaultmap
[gdmk
] & ~GOVD_MAP
;
7368 struct gimplify_omp_ctx
*octx
= ctx
->outer_context
;
7369 if ((ctx
->region_type
& ORT_ACC
) && octx
)
7371 /* Look in outer OpenACC contexts, to see if there's a
7372 data attribute for this variable. */
7373 omp_notice_variable (octx
, decl
, in_code
);
7375 for (; octx
; octx
= octx
->outer_context
)
7377 if (!(octx
->region_type
& (ORT_TARGET_DATA
| ORT_TARGET
)))
7380 = splay_tree_lookup (octx
->variables
,
7381 (splay_tree_key
) decl
);
7384 if (octx
->region_type
== ORT_ACC_HOST_DATA
)
7385 error ("variable %qE declared in enclosing "
7386 "%<host_data%> region", DECL_NAME (decl
));
7388 if (octx
->region_type
== ORT_ACC_DATA
7389 && (n2
->value
& GOVD_MAP_0LEN_ARRAY
))
7390 nflags
|= GOVD_MAP_0LEN_ARRAY
;
7396 if ((nflags
& ~(GOVD_MAP_TO_ONLY
| GOVD_MAP_FROM_ONLY
7397 | GOVD_MAP_ALLOC_ONLY
)) == flags
)
7399 tree type
= TREE_TYPE (decl
);
7401 if (gimplify_omp_ctxp
->target_firstprivatize_array_bases
7402 && lang_hooks
.decls
.omp_privatize_by_reference (decl
))
7403 type
= TREE_TYPE (type
);
7404 if (!lang_hooks
.types
.omp_mappable_type (type
))
7406 error ("%qD referenced in target region does not have "
7407 "a mappable type", decl
);
7408 nflags
|= GOVD_MAP
| GOVD_EXPLICIT
;
7412 if ((ctx
->region_type
& ORT_ACC
) != 0)
7413 nflags
= oacc_default_clause (ctx
, decl
, flags
);
7419 omp_add_variable (ctx
, decl
, nflags
);
7423 /* If nothing changed, there's nothing left to do. */
7424 if ((n
->value
& flags
) == flags
)
7434 if (ctx
->region_type
== ORT_WORKSHARE
7435 || ctx
->region_type
== ORT_TASKGROUP
7436 || ctx
->region_type
== ORT_SIMD
7437 || ctx
->region_type
== ORT_ACC
7438 || (ctx
->region_type
& ORT_TARGET_DATA
) != 0)
7441 flags
= omp_default_clause (ctx
, decl
, in_code
, flags
);
7443 if ((flags
& GOVD_PRIVATE
)
7444 && lang_hooks
.decls
.omp_private_outer_ref (decl
))
7445 flags
|= GOVD_PRIVATE_OUTER_REF
;
7447 omp_add_variable (ctx
, decl
, flags
);
7449 shared
= (flags
& GOVD_SHARED
) != 0;
7450 ret
= lang_hooks
.decls
.omp_disregard_value_expr (decl
, shared
);
7454 if ((n
->value
& (GOVD_SEEN
| GOVD_LOCAL
)) == 0
7455 && (flags
& (GOVD_SEEN
| GOVD_LOCAL
)) == GOVD_SEEN
7456 && DECL_SIZE (decl
))
7458 if (TREE_CODE (DECL_SIZE (decl
)) != INTEGER_CST
)
7461 tree t
= DECL_VALUE_EXPR (decl
);
7462 gcc_assert (TREE_CODE (t
) == INDIRECT_REF
);
7463 t
= TREE_OPERAND (t
, 0);
7464 gcc_assert (DECL_P (t
));
7465 n2
= splay_tree_lookup (ctx
->variables
, (splay_tree_key
) t
);
7466 n2
->value
|= GOVD_SEEN
;
7468 else if (lang_hooks
.decls
.omp_privatize_by_reference (decl
)
7469 && TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl
)))
7470 && (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl
))))
7474 tree t
= TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl
)));
7475 gcc_assert (DECL_P (t
));
7476 n2
= splay_tree_lookup (ctx
->variables
, (splay_tree_key
) t
);
7478 omp_notice_variable (ctx
, t
, true);
7482 shared
= ((flags
| n
->value
) & GOVD_SHARED
) != 0;
7483 ret
= lang_hooks
.decls
.omp_disregard_value_expr (decl
, shared
);
7485 /* If nothing changed, there's nothing left to do. */
7486 if ((n
->value
& flags
) == flags
)
7492 /* If the variable is private in the current context, then we don't
7493 need to propagate anything to an outer context. */
7494 if ((flags
& GOVD_PRIVATE
) && !(flags
& GOVD_PRIVATE_OUTER_REF
))
7496 if ((flags
& (GOVD_LINEAR
| GOVD_LINEAR_LASTPRIVATE_NO_OUTER
))
7497 == (GOVD_LINEAR
| GOVD_LINEAR_LASTPRIVATE_NO_OUTER
))
7499 if ((flags
& (GOVD_FIRSTPRIVATE
| GOVD_LASTPRIVATE
7500 | GOVD_LINEAR_LASTPRIVATE_NO_OUTER
))
7501 == (GOVD_LASTPRIVATE
| GOVD_LINEAR_LASTPRIVATE_NO_OUTER
))
7503 if (ctx
->outer_context
7504 && omp_notice_variable (ctx
->outer_context
, decl
, in_code
))
7509 /* Verify that DECL is private within CTX. If there's specific information
7510 to the contrary in the innermost scope, generate an error. */
7513 omp_is_private (struct gimplify_omp_ctx
*ctx
, tree decl
, int simd
)
7517 n
= splay_tree_lookup (ctx
->variables
, (splay_tree_key
)decl
);
7520 if (n
->value
& GOVD_SHARED
)
7522 if (ctx
== gimplify_omp_ctxp
)
7525 error ("iteration variable %qE is predetermined linear",
7528 error ("iteration variable %qE should be private",
7530 n
->value
= GOVD_PRIVATE
;
7536 else if ((n
->value
& GOVD_EXPLICIT
) != 0
7537 && (ctx
== gimplify_omp_ctxp
7538 || (ctx
->region_type
== ORT_COMBINED_PARALLEL
7539 && gimplify_omp_ctxp
->outer_context
== ctx
)))
7541 if ((n
->value
& GOVD_FIRSTPRIVATE
) != 0)
7542 error ("iteration variable %qE should not be firstprivate",
7544 else if ((n
->value
& GOVD_REDUCTION
) != 0)
7545 error ("iteration variable %qE should not be reduction",
7547 else if (simd
!= 1 && (n
->value
& GOVD_LINEAR
) != 0)
7548 error ("iteration variable %qE should not be linear",
7551 return (ctx
== gimplify_omp_ctxp
7552 || (ctx
->region_type
== ORT_COMBINED_PARALLEL
7553 && gimplify_omp_ctxp
->outer_context
== ctx
));
7556 if (ctx
->region_type
!= ORT_WORKSHARE
7557 && ctx
->region_type
!= ORT_TASKGROUP
7558 && ctx
->region_type
!= ORT_SIMD
7559 && ctx
->region_type
!= ORT_ACC
)
7561 else if (ctx
->outer_context
)
7562 return omp_is_private (ctx
->outer_context
, decl
, simd
);
7566 /* Return true if DECL is private within a parallel region
7567 that binds to the current construct's context or in parallel
7568 region's REDUCTION clause. */
7571 omp_check_private (struct gimplify_omp_ctx
*ctx
, tree decl
, bool copyprivate
)
7577 ctx
= ctx
->outer_context
;
7580 if (is_global_var (decl
))
7583 /* References might be private, but might be shared too,
7584 when checking for copyprivate, assume they might be
7585 private, otherwise assume they might be shared. */
7589 if (lang_hooks
.decls
.omp_privatize_by_reference (decl
))
7592 /* Treat C++ privatized non-static data members outside
7593 of the privatization the same. */
7594 if (omp_member_access_dummy_var (decl
))
7600 n
= splay_tree_lookup (ctx
->variables
, (splay_tree_key
) decl
);
7602 if ((ctx
->region_type
& (ORT_TARGET
| ORT_TARGET_DATA
)) != 0
7603 && (n
== NULL
|| (n
->value
& GOVD_DATA_SHARE_CLASS
) == 0))
7608 if ((n
->value
& GOVD_LOCAL
) != 0
7609 && omp_member_access_dummy_var (decl
))
7611 return (n
->value
& GOVD_SHARED
) == 0;
7614 while (ctx
->region_type
== ORT_WORKSHARE
7615 || ctx
->region_type
== ORT_TASKGROUP
7616 || ctx
->region_type
== ORT_SIMD
7617 || ctx
->region_type
== ORT_ACC
);
7621 /* Callback for walk_tree to find a DECL_EXPR for the given DECL. */
7624 find_decl_expr (tree
*tp
, int *walk_subtrees
, void *data
)
7628 /* If this node has been visited, unmark it and keep looking. */
7629 if (TREE_CODE (t
) == DECL_EXPR
&& DECL_EXPR_DECL (t
) == (tree
) data
)
7632 if (IS_TYPE_OR_DECL_P (t
))
7637 /* If *LIST_P contains any OpenMP depend clauses with iterators,
7638 lower all the depend clauses by populating corresponding depend
7639 array. Returns 0 if there are no such depend clauses, or
7640 2 if all depend clauses should be removed, 1 otherwise. */
7643 gimplify_omp_depend (tree
*list_p
, gimple_seq
*pre_p
)
7647 size_t n
[4] = { 0, 0, 0, 0 };
7649 tree counts
[4] = { NULL_TREE
, NULL_TREE
, NULL_TREE
, NULL_TREE
};
7650 tree last_iter
= NULL_TREE
, last_count
= NULL_TREE
;
7652 location_t first_loc
= UNKNOWN_LOCATION
;
7654 for (c
= *list_p
; c
; c
= OMP_CLAUSE_CHAIN (c
))
7655 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_DEPEND
)
7657 switch (OMP_CLAUSE_DEPEND_KIND (c
))
7659 case OMP_CLAUSE_DEPEND_IN
:
7662 case OMP_CLAUSE_DEPEND_OUT
:
7663 case OMP_CLAUSE_DEPEND_INOUT
:
7666 case OMP_CLAUSE_DEPEND_MUTEXINOUTSET
:
7669 case OMP_CLAUSE_DEPEND_DEPOBJ
:
7672 case OMP_CLAUSE_DEPEND_SOURCE
:
7673 case OMP_CLAUSE_DEPEND_SINK
:
7678 tree t
= OMP_CLAUSE_DECL (c
);
7679 if (first_loc
== UNKNOWN_LOCATION
)
7680 first_loc
= OMP_CLAUSE_LOCATION (c
);
7681 if (TREE_CODE (t
) == TREE_LIST
7683 && TREE_CODE (TREE_PURPOSE (t
)) == TREE_VEC
)
7685 if (TREE_PURPOSE (t
) != last_iter
)
7687 tree tcnt
= size_one_node
;
7688 for (tree it
= TREE_PURPOSE (t
); it
; it
= TREE_CHAIN (it
))
7690 if (gimplify_expr (&TREE_VEC_ELT (it
, 1), pre_p
, NULL
,
7691 is_gimple_val
, fb_rvalue
) == GS_ERROR
7692 || gimplify_expr (&TREE_VEC_ELT (it
, 2), pre_p
, NULL
,
7693 is_gimple_val
, fb_rvalue
) == GS_ERROR
7694 || gimplify_expr (&TREE_VEC_ELT (it
, 3), pre_p
, NULL
,
7695 is_gimple_val
, fb_rvalue
) == GS_ERROR
7696 || (gimplify_expr (&TREE_VEC_ELT (it
, 4), pre_p
, NULL
,
7697 is_gimple_val
, fb_rvalue
)
7700 tree var
= TREE_VEC_ELT (it
, 0);
7701 tree begin
= TREE_VEC_ELT (it
, 1);
7702 tree end
= TREE_VEC_ELT (it
, 2);
7703 tree step
= TREE_VEC_ELT (it
, 3);
7704 tree orig_step
= TREE_VEC_ELT (it
, 4);
7705 tree type
= TREE_TYPE (var
);
7706 tree stype
= TREE_TYPE (step
);
7707 location_t loc
= DECL_SOURCE_LOCATION (var
);
7709 /* Compute count for this iterator as
7711 ? (begin < end ? (end - begin + (step - 1)) / step : 0)
7712 : (begin > end ? (end - begin + (step + 1)) / step : 0)
7713 and compute product of those for the entire depend
7715 if (POINTER_TYPE_P (type
))
7716 endmbegin
= fold_build2_loc (loc
, POINTER_DIFF_EXPR
,
7719 endmbegin
= fold_build2_loc (loc
, MINUS_EXPR
, type
,
7721 tree stepm1
= fold_build2_loc (loc
, MINUS_EXPR
, stype
,
7723 build_int_cst (stype
, 1));
7724 tree stepp1
= fold_build2_loc (loc
, PLUS_EXPR
, stype
, step
,
7725 build_int_cst (stype
, 1));
7726 tree pos
= fold_build2_loc (loc
, PLUS_EXPR
, stype
,
7727 unshare_expr (endmbegin
),
7729 pos
= fold_build2_loc (loc
, TRUNC_DIV_EXPR
, stype
,
7731 tree neg
= fold_build2_loc (loc
, PLUS_EXPR
, stype
,
7733 if (TYPE_UNSIGNED (stype
))
7735 neg
= fold_build1_loc (loc
, NEGATE_EXPR
, stype
, neg
);
7736 step
= fold_build1_loc (loc
, NEGATE_EXPR
, stype
, step
);
7738 neg
= fold_build2_loc (loc
, TRUNC_DIV_EXPR
, stype
,
7741 tree cond
= fold_build2_loc (loc
, LT_EXPR
,
7744 pos
= fold_build3_loc (loc
, COND_EXPR
, stype
, cond
, pos
,
7745 build_int_cst (stype
, 0));
7746 cond
= fold_build2_loc (loc
, LT_EXPR
, boolean_type_node
,
7748 neg
= fold_build3_loc (loc
, COND_EXPR
, stype
, cond
, neg
,
7749 build_int_cst (stype
, 0));
7750 tree osteptype
= TREE_TYPE (orig_step
);
7751 cond
= fold_build2_loc (loc
, GT_EXPR
, boolean_type_node
,
7753 build_int_cst (osteptype
, 0));
7754 tree cnt
= fold_build3_loc (loc
, COND_EXPR
, stype
,
7756 cnt
= fold_convert_loc (loc
, sizetype
, cnt
);
7757 if (gimplify_expr (&cnt
, pre_p
, NULL
, is_gimple_val
,
7758 fb_rvalue
) == GS_ERROR
)
7760 tcnt
= size_binop_loc (loc
, MULT_EXPR
, tcnt
, cnt
);
7762 if (gimplify_expr (&tcnt
, pre_p
, NULL
, is_gimple_val
,
7763 fb_rvalue
) == GS_ERROR
)
7765 last_iter
= TREE_PURPOSE (t
);
7768 if (counts
[i
] == NULL_TREE
)
7769 counts
[i
] = last_count
;
7771 counts
[i
] = size_binop_loc (OMP_CLAUSE_LOCATION (c
),
7772 PLUS_EXPR
, counts
[i
], last_count
);
7777 for (i
= 0; i
< 4; i
++)
7783 tree total
= size_zero_node
;
7784 for (i
= 0; i
< 4; i
++)
7786 unused
[i
] = counts
[i
] == NULL_TREE
&& n
[i
] == 0;
7787 if (counts
[i
] == NULL_TREE
)
7788 counts
[i
] = size_zero_node
;
7790 counts
[i
] = size_binop (PLUS_EXPR
, counts
[i
], size_int (n
[i
]));
7791 if (gimplify_expr (&counts
[i
], pre_p
, NULL
, is_gimple_val
,
7792 fb_rvalue
) == GS_ERROR
)
7794 total
= size_binop (PLUS_EXPR
, total
, counts
[i
]);
7797 if (gimplify_expr (&total
, pre_p
, NULL
, is_gimple_val
, fb_rvalue
)
7800 bool is_old
= unused
[1] && unused
[3];
7801 tree totalpx
= size_binop (PLUS_EXPR
, unshare_expr (total
),
7802 size_int (is_old
? 1 : 4));
7803 tree type
= build_array_type (ptr_type_node
, build_index_type (totalpx
));
7804 tree array
= create_tmp_var_raw (type
);
7805 TREE_ADDRESSABLE (array
) = 1;
7806 if (TREE_CODE (totalpx
) != INTEGER_CST
)
7808 if (!TYPE_SIZES_GIMPLIFIED (TREE_TYPE (array
)))
7809 gimplify_type_sizes (TREE_TYPE (array
), pre_p
);
7810 if (gimplify_omp_ctxp
)
7812 struct gimplify_omp_ctx
*ctx
= gimplify_omp_ctxp
;
7814 && (ctx
->region_type
== ORT_WORKSHARE
7815 || ctx
->region_type
== ORT_TASKGROUP
7816 || ctx
->region_type
== ORT_SIMD
7817 || ctx
->region_type
== ORT_ACC
))
7818 ctx
= ctx
->outer_context
;
7820 omp_add_variable (ctx
, array
, GOVD_LOCAL
| GOVD_SEEN
);
7822 gimplify_vla_decl (array
, pre_p
);
7825 gimple_add_tmp_var (array
);
7826 tree r
= build4 (ARRAY_REF
, ptr_type_node
, array
, size_int (0), NULL_TREE
,
7831 tem
= build2 (MODIFY_EXPR
, void_type_node
, r
,
7832 build_int_cst (ptr_type_node
, 0));
7833 gimplify_and_add (tem
, pre_p
);
7834 r
= build4 (ARRAY_REF
, ptr_type_node
, array
, size_int (1), NULL_TREE
,
7837 tem
= build2 (MODIFY_EXPR
, void_type_node
, r
,
7838 fold_convert (ptr_type_node
, total
));
7839 gimplify_and_add (tem
, pre_p
);
7840 for (i
= 1; i
< (is_old
? 2 : 4); i
++)
7842 r
= build4 (ARRAY_REF
, ptr_type_node
, array
, size_int (i
+ !is_old
),
7843 NULL_TREE
, NULL_TREE
);
7844 tem
= build2 (MODIFY_EXPR
, void_type_node
, r
, counts
[i
- 1]);
7845 gimplify_and_add (tem
, pre_p
);
7852 for (i
= 0; i
< 4; i
++)
7854 if (i
&& (i
>= j
|| unused
[i
- 1]))
7856 cnts
[i
] = cnts
[i
- 1];
7859 cnts
[i
] = create_tmp_var (sizetype
);
7861 g
= gimple_build_assign (cnts
[i
], size_int (is_old
? 2 : 5));
7866 t
= size_binop (PLUS_EXPR
, counts
[0], size_int (2));
7868 t
= size_binop (PLUS_EXPR
, cnts
[i
- 1], counts
[i
- 1]);
7869 if (gimplify_expr (&t
, pre_p
, NULL
, is_gimple_val
, fb_rvalue
)
7872 g
= gimple_build_assign (cnts
[i
], t
);
7874 gimple_seq_add_stmt (pre_p
, g
);
7877 last_iter
= NULL_TREE
;
7878 tree last_bind
= NULL_TREE
;
7879 tree
*last_body
= NULL
;
7880 for (c
= *list_p
; c
; c
= OMP_CLAUSE_CHAIN (c
))
7881 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_DEPEND
)
7883 switch (OMP_CLAUSE_DEPEND_KIND (c
))
7885 case OMP_CLAUSE_DEPEND_IN
:
7888 case OMP_CLAUSE_DEPEND_OUT
:
7889 case OMP_CLAUSE_DEPEND_INOUT
:
7892 case OMP_CLAUSE_DEPEND_MUTEXINOUTSET
:
7895 case OMP_CLAUSE_DEPEND_DEPOBJ
:
7898 case OMP_CLAUSE_DEPEND_SOURCE
:
7899 case OMP_CLAUSE_DEPEND_SINK
:
7904 tree t
= OMP_CLAUSE_DECL (c
);
7905 if (TREE_CODE (t
) == TREE_LIST
7907 && TREE_CODE (TREE_PURPOSE (t
)) == TREE_VEC
)
7909 if (TREE_PURPOSE (t
) != last_iter
)
7912 gimplify_and_add (last_bind
, pre_p
);
7913 tree block
= TREE_VEC_ELT (TREE_PURPOSE (t
), 5);
7914 last_bind
= build3 (BIND_EXPR
, void_type_node
,
7915 BLOCK_VARS (block
), NULL
, block
);
7916 TREE_SIDE_EFFECTS (last_bind
) = 1;
7917 SET_EXPR_LOCATION (last_bind
, OMP_CLAUSE_LOCATION (c
));
7918 tree
*p
= &BIND_EXPR_BODY (last_bind
);
7919 for (tree it
= TREE_PURPOSE (t
); it
; it
= TREE_CHAIN (it
))
7921 tree var
= TREE_VEC_ELT (it
, 0);
7922 tree begin
= TREE_VEC_ELT (it
, 1);
7923 tree end
= TREE_VEC_ELT (it
, 2);
7924 tree step
= TREE_VEC_ELT (it
, 3);
7925 tree orig_step
= TREE_VEC_ELT (it
, 4);
7926 tree type
= TREE_TYPE (var
);
7927 location_t loc
= DECL_SOURCE_LOCATION (var
);
7935 if (orig_step > 0) {
7936 if (var < end) goto beg_label;
7938 if (var > end) goto beg_label;
7940 for each iterator, with inner iterators added to
7942 tree beg_label
= create_artificial_label (loc
);
7943 tree cond_label
= NULL_TREE
;
7944 tem
= build2_loc (loc
, MODIFY_EXPR
, void_type_node
,
7946 append_to_statement_list_force (tem
, p
);
7947 tem
= build_and_jump (&cond_label
);
7948 append_to_statement_list_force (tem
, p
);
7949 tem
= build1 (LABEL_EXPR
, void_type_node
, beg_label
);
7950 append_to_statement_list (tem
, p
);
7951 tree bind
= build3 (BIND_EXPR
, void_type_node
, NULL_TREE
,
7952 NULL_TREE
, NULL_TREE
);
7953 TREE_SIDE_EFFECTS (bind
) = 1;
7954 SET_EXPR_LOCATION (bind
, loc
);
7955 append_to_statement_list_force (bind
, p
);
7956 if (POINTER_TYPE_P (type
))
7957 tem
= build2_loc (loc
, POINTER_PLUS_EXPR
, type
,
7958 var
, fold_convert_loc (loc
, sizetype
,
7961 tem
= build2_loc (loc
, PLUS_EXPR
, type
, var
, step
);
7962 tem
= build2_loc (loc
, MODIFY_EXPR
, void_type_node
,
7964 append_to_statement_list_force (tem
, p
);
7965 tem
= build1 (LABEL_EXPR
, void_type_node
, cond_label
);
7966 append_to_statement_list (tem
, p
);
7967 tree cond
= fold_build2_loc (loc
, LT_EXPR
,
7971 = fold_build3_loc (loc
, COND_EXPR
, void_type_node
,
7972 cond
, build_and_jump (&beg_label
),
7974 cond
= fold_build2_loc (loc
, GT_EXPR
, boolean_type_node
,
7977 = fold_build3_loc (loc
, COND_EXPR
, void_type_node
,
7978 cond
, build_and_jump (&beg_label
),
7980 tree osteptype
= TREE_TYPE (orig_step
);
7981 cond
= fold_build2_loc (loc
, GT_EXPR
, boolean_type_node
,
7983 build_int_cst (osteptype
, 0));
7984 tem
= fold_build3_loc (loc
, COND_EXPR
, void_type_node
,
7986 append_to_statement_list_force (tem
, p
);
7987 p
= &BIND_EXPR_BODY (bind
);
7991 last_iter
= TREE_PURPOSE (t
);
7992 if (TREE_CODE (TREE_VALUE (t
)) == COMPOUND_EXPR
)
7994 append_to_statement_list (TREE_OPERAND (TREE_VALUE (t
),
7996 TREE_VALUE (t
) = TREE_OPERAND (TREE_VALUE (t
), 1);
7998 if (error_operand_p (TREE_VALUE (t
)))
8000 TREE_VALUE (t
) = build_fold_addr_expr (TREE_VALUE (t
));
8001 r
= build4 (ARRAY_REF
, ptr_type_node
, array
, cnts
[i
],
8002 NULL_TREE
, NULL_TREE
);
8003 tem
= build2_loc (OMP_CLAUSE_LOCATION (c
), MODIFY_EXPR
,
8004 void_type_node
, r
, TREE_VALUE (t
));
8005 append_to_statement_list_force (tem
, last_body
);
8006 tem
= build2_loc (OMP_CLAUSE_LOCATION (c
), MODIFY_EXPR
,
8007 void_type_node
, cnts
[i
],
8008 size_binop (PLUS_EXPR
, cnts
[i
], size_int (1)));
8009 append_to_statement_list_force (tem
, last_body
);
8010 TREE_VALUE (t
) = null_pointer_node
;
8016 gimplify_and_add (last_bind
, pre_p
);
8017 last_bind
= NULL_TREE
;
8019 if (TREE_CODE (OMP_CLAUSE_DECL (c
)) == COMPOUND_EXPR
)
8021 gimplify_expr (&TREE_OPERAND (OMP_CLAUSE_DECL (c
), 0), pre_p
,
8022 NULL
, is_gimple_val
, fb_rvalue
);
8023 OMP_CLAUSE_DECL (c
) = TREE_OPERAND (OMP_CLAUSE_DECL (c
), 1);
8025 if (error_operand_p (OMP_CLAUSE_DECL (c
)))
8027 OMP_CLAUSE_DECL (c
) = build_fold_addr_expr (OMP_CLAUSE_DECL (c
));
8028 if (gimplify_expr (&OMP_CLAUSE_DECL (c
), pre_p
, NULL
,
8029 is_gimple_val
, fb_rvalue
) == GS_ERROR
)
8031 r
= build4 (ARRAY_REF
, ptr_type_node
, array
, cnts
[i
],
8032 NULL_TREE
, NULL_TREE
);
8033 tem
= build2 (MODIFY_EXPR
, void_type_node
, r
, OMP_CLAUSE_DECL (c
));
8034 gimplify_and_add (tem
, pre_p
);
8035 g
= gimple_build_assign (cnts
[i
], size_binop (PLUS_EXPR
, cnts
[i
],
8037 gimple_seq_add_stmt (pre_p
, g
);
8041 gimplify_and_add (last_bind
, pre_p
);
8042 tree cond
= boolean_false_node
;
8046 cond
= build2_loc (first_loc
, NE_EXPR
, boolean_type_node
, cnts
[0],
8047 size_binop_loc (first_loc
, PLUS_EXPR
, counts
[0],
8050 cond
= build2_loc (first_loc
, TRUTH_OR_EXPR
, boolean_type_node
, cond
,
8051 build2_loc (first_loc
, NE_EXPR
, boolean_type_node
,
8053 size_binop_loc (first_loc
, PLUS_EXPR
,
8059 tree prev
= size_int (5);
8060 for (i
= 0; i
< 4; i
++)
8064 prev
= size_binop_loc (first_loc
, PLUS_EXPR
, counts
[i
], prev
);
8065 cond
= build2_loc (first_loc
, TRUTH_OR_EXPR
, boolean_type_node
, cond
,
8066 build2_loc (first_loc
, NE_EXPR
, boolean_type_node
,
8067 cnts
[i
], unshare_expr (prev
)));
8070 tem
= build3_loc (first_loc
, COND_EXPR
, void_type_node
, cond
,
8071 build_call_expr_loc (first_loc
,
8072 builtin_decl_explicit (BUILT_IN_TRAP
),
8074 gimplify_and_add (tem
, pre_p
);
8075 c
= build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE_DEPEND
);
8076 OMP_CLAUSE_DEPEND_KIND (c
) = OMP_CLAUSE_DEPEND_LAST
;
8077 OMP_CLAUSE_DECL (c
) = build_fold_addr_expr (array
);
8078 OMP_CLAUSE_CHAIN (c
) = *list_p
;
8083 /* Scan the OMP clauses in *LIST_P, installing mappings into a new
8084 and previous omp contexts. */
8087 gimplify_scan_omp_clauses (tree
*list_p
, gimple_seq
*pre_p
,
8088 enum omp_region_type region_type
,
8089 enum tree_code code
)
8091 struct gimplify_omp_ctx
*ctx
, *outer_ctx
;
8093 hash_map
<tree
, tree
> *struct_map_to_clause
= NULL
;
8094 tree
*prev_list_p
= NULL
, *orig_list_p
= list_p
;
8095 int handled_depend_iterators
= -1;
8098 ctx
= new_omp_context (region_type
);
8099 outer_ctx
= ctx
->outer_context
;
8100 if (code
== OMP_TARGET
)
8102 if (!lang_GNU_Fortran ())
8103 ctx
->defaultmap
[GDMK_POINTER
] = GOVD_MAP
| GOVD_MAP_0LEN_ARRAY
;
8104 ctx
->defaultmap
[GDMK_SCALAR
] = GOVD_FIRSTPRIVATE
;
8106 if (!lang_GNU_Fortran ())
8110 case OMP_TARGET_DATA
:
8111 case OMP_TARGET_ENTER_DATA
:
8112 case OMP_TARGET_EXIT_DATA
:
8114 case OACC_HOST_DATA
:
8117 ctx
->target_firstprivatize_array_bases
= true;
8122 while ((c
= *list_p
) != NULL
)
8124 bool remove
= false;
8125 bool notice_outer
= true;
8126 const char *check_non_private
= NULL
;
8130 switch (OMP_CLAUSE_CODE (c
))
8132 case OMP_CLAUSE_PRIVATE
:
8133 flags
= GOVD_PRIVATE
| GOVD_EXPLICIT
;
8134 if (lang_hooks
.decls
.omp_private_outer_ref (OMP_CLAUSE_DECL (c
)))
8136 flags
|= GOVD_PRIVATE_OUTER_REF
;
8137 OMP_CLAUSE_PRIVATE_OUTER_REF (c
) = 1;
8140 notice_outer
= false;
8142 case OMP_CLAUSE_SHARED
:
8143 flags
= GOVD_SHARED
| GOVD_EXPLICIT
;
8145 case OMP_CLAUSE_FIRSTPRIVATE
:
8146 flags
= GOVD_FIRSTPRIVATE
| GOVD_EXPLICIT
;
8147 check_non_private
= "firstprivate";
8149 case OMP_CLAUSE_LASTPRIVATE
:
8150 if (OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c
))
8153 case OMP_DISTRIBUTE
:
8154 error_at (OMP_CLAUSE_LOCATION (c
),
8155 "conditional %<lastprivate%> clause on "
8156 "%qs construct", "distribute");
8157 OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c
) = 0;
8160 error_at (OMP_CLAUSE_LOCATION (c
),
8161 "conditional %<lastprivate%> clause on "
8162 "%qs construct", "taskloop");
8163 OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c
) = 0;
8168 flags
= GOVD_LASTPRIVATE
| GOVD_SEEN
| GOVD_EXPLICIT
;
8169 check_non_private
= "lastprivate";
8170 decl
= OMP_CLAUSE_DECL (c
);
8171 if (error_operand_p (decl
))
8173 if (OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c
)
8174 && !lang_hooks
.decls
.omp_scalar_p (decl
))
8176 error_at (OMP_CLAUSE_LOCATION (c
),
8177 "non-scalar variable %qD in conditional "
8178 "%<lastprivate%> clause", decl
);
8179 OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c
) = 0;
8181 if (OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c
))
8182 flags
|= GOVD_LASTPRIVATE_CONDITIONAL
;
8184 && (outer_ctx
->region_type
== ORT_COMBINED_PARALLEL
8185 || ((outer_ctx
->region_type
& ORT_COMBINED_TEAMS
)
8186 == ORT_COMBINED_TEAMS
))
8187 && splay_tree_lookup (outer_ctx
->variables
,
8188 (splay_tree_key
) decl
) == NULL
)
8190 omp_add_variable (outer_ctx
, decl
, GOVD_SHARED
| GOVD_SEEN
);
8191 if (outer_ctx
->outer_context
)
8192 omp_notice_variable (outer_ctx
->outer_context
, decl
, true);
8195 && (outer_ctx
->region_type
& ORT_TASK
) != 0
8196 && outer_ctx
->combined_loop
8197 && splay_tree_lookup (outer_ctx
->variables
,
8198 (splay_tree_key
) decl
) == NULL
)
8200 omp_add_variable (outer_ctx
, decl
, GOVD_LASTPRIVATE
| GOVD_SEEN
);
8201 if (outer_ctx
->outer_context
)
8202 omp_notice_variable (outer_ctx
->outer_context
, decl
, true);
8205 && (outer_ctx
->region_type
== ORT_WORKSHARE
8206 || outer_ctx
->region_type
== ORT_ACC
)
8207 && outer_ctx
->combined_loop
8208 && splay_tree_lookup (outer_ctx
->variables
,
8209 (splay_tree_key
) decl
) == NULL
8210 && !omp_check_private (outer_ctx
, decl
, false))
8212 omp_add_variable (outer_ctx
, decl
, GOVD_LASTPRIVATE
| GOVD_SEEN
);
8213 if (outer_ctx
->outer_context
8214 && (outer_ctx
->outer_context
->region_type
8215 == ORT_COMBINED_PARALLEL
)
8216 && splay_tree_lookup (outer_ctx
->outer_context
->variables
,
8217 (splay_tree_key
) decl
) == NULL
)
8219 struct gimplify_omp_ctx
*octx
= outer_ctx
->outer_context
;
8220 omp_add_variable (octx
, decl
, GOVD_SHARED
| GOVD_SEEN
);
8221 if (octx
->outer_context
)
8223 octx
= octx
->outer_context
;
8224 if (octx
->region_type
== ORT_WORKSHARE
8225 && octx
->combined_loop
8226 && splay_tree_lookup (octx
->variables
,
8227 (splay_tree_key
) decl
) == NULL
8228 && !omp_check_private (octx
, decl
, false))
8230 omp_add_variable (octx
, decl
,
8231 GOVD_LASTPRIVATE
| GOVD_SEEN
);
8232 octx
= octx
->outer_context
;
8234 && ((octx
->region_type
& ORT_COMBINED_TEAMS
)
8235 == ORT_COMBINED_TEAMS
)
8236 && (splay_tree_lookup (octx
->variables
,
8237 (splay_tree_key
) decl
)
8240 omp_add_variable (octx
, decl
,
8241 GOVD_SHARED
| GOVD_SEEN
);
8242 octx
= octx
->outer_context
;
8246 omp_notice_variable (octx
, decl
, true);
8249 else if (outer_ctx
->outer_context
)
8250 omp_notice_variable (outer_ctx
->outer_context
, decl
, true);
8253 case OMP_CLAUSE_REDUCTION
:
8254 if (OMP_CLAUSE_REDUCTION_TASK (c
))
8256 if (region_type
== ORT_WORKSHARE
)
8259 nowait
= omp_find_clause (*list_p
,
8260 OMP_CLAUSE_NOWAIT
) != NULL_TREE
;
8262 && (outer_ctx
== NULL
8263 || outer_ctx
->region_type
!= ORT_COMBINED_PARALLEL
))
8265 error_at (OMP_CLAUSE_LOCATION (c
),
8266 "%<task%> reduction modifier on a construct "
8267 "with a %<nowait%> clause");
8268 OMP_CLAUSE_REDUCTION_TASK (c
) = 0;
8271 else if ((region_type
& ORT_PARALLEL
) != ORT_PARALLEL
)
8273 error_at (OMP_CLAUSE_LOCATION (c
),
8274 "invalid %<task%> reduction modifier on construct "
8275 "other than %<parallel%>, %<for%> or %<sections%>");
8276 OMP_CLAUSE_REDUCTION_TASK (c
) = 0;
8279 if (OMP_CLAUSE_REDUCTION_INSCAN (c
))
8283 error_at (OMP_CLAUSE_LOCATION (c
),
8284 "%<inscan%> %<reduction%> clause on "
8285 "%qs construct", "sections");
8286 OMP_CLAUSE_REDUCTION_INSCAN (c
) = 0;
8289 error_at (OMP_CLAUSE_LOCATION (c
),
8290 "%<inscan%> %<reduction%> clause on "
8291 "%qs construct", "parallel");
8292 OMP_CLAUSE_REDUCTION_INSCAN (c
) = 0;
8295 error_at (OMP_CLAUSE_LOCATION (c
),
8296 "%<inscan%> %<reduction%> clause on "
8297 "%qs construct", "teams");
8298 OMP_CLAUSE_REDUCTION_INSCAN (c
) = 0;
8301 error_at (OMP_CLAUSE_LOCATION (c
),
8302 "%<inscan%> %<reduction%> clause on "
8303 "%qs construct", "taskloop");
8304 OMP_CLAUSE_REDUCTION_INSCAN (c
) = 0;
8310 case OMP_CLAUSE_IN_REDUCTION
:
8311 case OMP_CLAUSE_TASK_REDUCTION
:
8312 flags
= GOVD_REDUCTION
| GOVD_SEEN
| GOVD_EXPLICIT
;
8313 /* OpenACC permits reductions on private variables. */
8314 if (!(region_type
& ORT_ACC
)
8315 /* taskgroup is actually not a worksharing region. */
8316 && code
!= OMP_TASKGROUP
)
8317 check_non_private
= omp_clause_code_name
[OMP_CLAUSE_CODE (c
)];
8318 decl
= OMP_CLAUSE_DECL (c
);
8319 if (TREE_CODE (decl
) == MEM_REF
)
8321 tree type
= TREE_TYPE (decl
);
8322 if (gimplify_expr (&TYPE_MAX_VALUE (TYPE_DOMAIN (type
)), pre_p
,
8323 NULL
, is_gimple_val
, fb_rvalue
, false)
8329 tree v
= TYPE_MAX_VALUE (TYPE_DOMAIN (type
));
8332 omp_firstprivatize_variable (ctx
, v
);
8333 omp_notice_variable (ctx
, v
, true);
8335 decl
= TREE_OPERAND (decl
, 0);
8336 if (TREE_CODE (decl
) == POINTER_PLUS_EXPR
)
8338 if (gimplify_expr (&TREE_OPERAND (decl
, 1), pre_p
,
8339 NULL
, is_gimple_val
, fb_rvalue
, false)
8345 v
= TREE_OPERAND (decl
, 1);
8348 omp_firstprivatize_variable (ctx
, v
);
8349 omp_notice_variable (ctx
, v
, true);
8351 decl
= TREE_OPERAND (decl
, 0);
8353 if (TREE_CODE (decl
) == ADDR_EXPR
8354 || TREE_CODE (decl
) == INDIRECT_REF
)
8355 decl
= TREE_OPERAND (decl
, 0);
8358 case OMP_CLAUSE_LINEAR
:
8359 if (gimplify_expr (&OMP_CLAUSE_LINEAR_STEP (c
), pre_p
, NULL
,
8360 is_gimple_val
, fb_rvalue
) == GS_ERROR
)
8367 if (code
== OMP_SIMD
8368 && !OMP_CLAUSE_LINEAR_NO_COPYIN (c
))
8370 struct gimplify_omp_ctx
*octx
= outer_ctx
;
8372 && octx
->region_type
== ORT_WORKSHARE
8373 && octx
->combined_loop
8374 && !octx
->distribute
)
8376 if (octx
->outer_context
8377 && (octx
->outer_context
->region_type
8378 == ORT_COMBINED_PARALLEL
))
8379 octx
= octx
->outer_context
->outer_context
;
8381 octx
= octx
->outer_context
;
8384 && octx
->region_type
== ORT_WORKSHARE
8385 && octx
->combined_loop
8386 && octx
->distribute
)
8388 error_at (OMP_CLAUSE_LOCATION (c
),
8389 "%<linear%> clause for variable other than "
8390 "loop iterator specified on construct "
8391 "combined with %<distribute%>");
8396 /* For combined #pragma omp parallel for simd, need to put
8397 lastprivate and perhaps firstprivate too on the
8398 parallel. Similarly for #pragma omp for simd. */
8399 struct gimplify_omp_ctx
*octx
= outer_ctx
;
8403 if (OMP_CLAUSE_LINEAR_NO_COPYIN (c
)
8404 && OMP_CLAUSE_LINEAR_NO_COPYOUT (c
))
8406 decl
= OMP_CLAUSE_DECL (c
);
8407 if (error_operand_p (decl
))
8413 if (!OMP_CLAUSE_LINEAR_NO_COPYIN (c
))
8414 flags
|= GOVD_FIRSTPRIVATE
;
8415 if (!OMP_CLAUSE_LINEAR_NO_COPYOUT (c
))
8416 flags
|= GOVD_LASTPRIVATE
;
8418 && octx
->region_type
== ORT_WORKSHARE
8419 && octx
->combined_loop
)
8421 if (octx
->outer_context
8422 && (octx
->outer_context
->region_type
8423 == ORT_COMBINED_PARALLEL
))
8424 octx
= octx
->outer_context
;
8425 else if (omp_check_private (octx
, decl
, false))
8429 && (octx
->region_type
& ORT_TASK
) != 0
8430 && octx
->combined_loop
)
8433 && octx
->region_type
== ORT_COMBINED_PARALLEL
8434 && ctx
->region_type
== ORT_WORKSHARE
8435 && octx
== outer_ctx
)
8436 flags
= GOVD_SEEN
| GOVD_SHARED
;
8438 && ((octx
->region_type
& ORT_COMBINED_TEAMS
)
8439 == ORT_COMBINED_TEAMS
))
8440 flags
= GOVD_SEEN
| GOVD_SHARED
;
8442 && octx
->region_type
== ORT_COMBINED_TARGET
)
8444 flags
&= ~GOVD_LASTPRIVATE
;
8445 if (flags
== GOVD_SEEN
)
8451 = splay_tree_lookup (octx
->variables
,
8452 (splay_tree_key
) decl
);
8453 if (on
&& (on
->value
& GOVD_DATA_SHARE_CLASS
) != 0)
8458 omp_add_variable (octx
, decl
, flags
);
8459 if (octx
->outer_context
== NULL
)
8461 octx
= octx
->outer_context
;
8466 && (!OMP_CLAUSE_LINEAR_NO_COPYIN (c
)
8467 || !OMP_CLAUSE_LINEAR_NO_COPYOUT (c
)))
8468 omp_notice_variable (octx
, decl
, true);
8470 flags
= GOVD_LINEAR
| GOVD_EXPLICIT
;
8471 if (OMP_CLAUSE_LINEAR_NO_COPYIN (c
)
8472 && OMP_CLAUSE_LINEAR_NO_COPYOUT (c
))
8474 notice_outer
= false;
8475 flags
|= GOVD_LINEAR_LASTPRIVATE_NO_OUTER
;
8479 case OMP_CLAUSE_MAP
:
8480 decl
= OMP_CLAUSE_DECL (c
);
8481 if (error_operand_p (decl
))
8488 if (TREE_CODE (TREE_TYPE (decl
)) != ARRAY_TYPE
)
8491 case OMP_TARGET_DATA
:
8492 case OMP_TARGET_ENTER_DATA
:
8493 case OMP_TARGET_EXIT_DATA
:
8494 case OACC_ENTER_DATA
:
8495 case OACC_EXIT_DATA
:
8496 case OACC_HOST_DATA
:
8497 if (OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_FIRSTPRIVATE_POINTER
8498 || (OMP_CLAUSE_MAP_KIND (c
)
8499 == GOMP_MAP_FIRSTPRIVATE_REFERENCE
))
8500 /* For target {,enter ,exit }data only the array slice is
8501 mapped, but not the pointer to it. */
8509 if (DECL_P (decl
) && outer_ctx
&& (region_type
& ORT_ACC
))
8511 struct gimplify_omp_ctx
*octx
;
8512 for (octx
= outer_ctx
; octx
; octx
= octx
->outer_context
)
8514 if (octx
->region_type
!= ORT_ACC_HOST_DATA
)
8517 = splay_tree_lookup (octx
->variables
,
8518 (splay_tree_key
) decl
);
8520 error_at (OMP_CLAUSE_LOCATION (c
), "variable %qE "
8521 "declared in enclosing %<host_data%> region",
8525 if (OMP_CLAUSE_SIZE (c
) == NULL_TREE
)
8526 OMP_CLAUSE_SIZE (c
) = DECL_P (decl
) ? DECL_SIZE_UNIT (decl
)
8527 : TYPE_SIZE_UNIT (TREE_TYPE (decl
));
8528 if (gimplify_expr (&OMP_CLAUSE_SIZE (c
), pre_p
,
8529 NULL
, is_gimple_val
, fb_rvalue
) == GS_ERROR
)
8534 else if ((OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_FIRSTPRIVATE_POINTER
8535 || (OMP_CLAUSE_MAP_KIND (c
)
8536 == GOMP_MAP_FIRSTPRIVATE_REFERENCE
))
8537 && TREE_CODE (OMP_CLAUSE_SIZE (c
)) != INTEGER_CST
)
8540 = get_initialized_tmp_var (OMP_CLAUSE_SIZE (c
), pre_p
, NULL
,
8542 omp_add_variable (ctx
, OMP_CLAUSE_SIZE (c
),
8543 GOVD_FIRSTPRIVATE
| GOVD_SEEN
);
8548 if (TREE_CODE (d
) == ARRAY_REF
)
8550 while (TREE_CODE (d
) == ARRAY_REF
)
8551 d
= TREE_OPERAND (d
, 0);
8552 if (TREE_CODE (d
) == COMPONENT_REF
8553 && TREE_CODE (TREE_TYPE (d
)) == ARRAY_TYPE
)
8556 pd
= &OMP_CLAUSE_DECL (c
);
8558 && TREE_CODE (decl
) == INDIRECT_REF
8559 && TREE_CODE (TREE_OPERAND (decl
, 0)) == COMPONENT_REF
8560 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (decl
, 0)))
8563 pd
= &TREE_OPERAND (decl
, 0);
8564 decl
= TREE_OPERAND (decl
, 0);
8566 if (TREE_CODE (decl
) == COMPONENT_REF
)
8568 while (TREE_CODE (decl
) == COMPONENT_REF
)
8569 decl
= TREE_OPERAND (decl
, 0);
8570 if (TREE_CODE (decl
) == INDIRECT_REF
8571 && DECL_P (TREE_OPERAND (decl
, 0))
8572 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (decl
, 0)))
8574 decl
= TREE_OPERAND (decl
, 0);
8576 if (gimplify_expr (pd
, pre_p
, NULL
, is_gimple_lvalue
, fb_lvalue
)
8584 if (error_operand_p (decl
))
8590 tree stype
= TREE_TYPE (decl
);
8591 if (TREE_CODE (stype
) == REFERENCE_TYPE
)
8592 stype
= TREE_TYPE (stype
);
8593 if (TYPE_SIZE_UNIT (stype
) == NULL
8594 || TREE_CODE (TYPE_SIZE_UNIT (stype
)) != INTEGER_CST
)
8596 error_at (OMP_CLAUSE_LOCATION (c
),
8597 "mapping field %qE of variable length "
8598 "structure", OMP_CLAUSE_DECL (c
));
8603 if (OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_ALWAYS_POINTER
)
8605 /* Error recovery. */
8606 if (prev_list_p
== NULL
)
8611 if (OMP_CLAUSE_CHAIN (*prev_list_p
) != c
)
8613 tree ch
= OMP_CLAUSE_CHAIN (*prev_list_p
);
8614 if (ch
== NULL_TREE
|| OMP_CLAUSE_CHAIN (ch
) != c
)
8623 poly_int64 bitsize
, bitpos
;
8625 int unsignedp
, reversep
, volatilep
= 0;
8626 tree base
= OMP_CLAUSE_DECL (c
);
8627 while (TREE_CODE (base
) == ARRAY_REF
)
8628 base
= TREE_OPERAND (base
, 0);
8629 if (TREE_CODE (base
) == INDIRECT_REF
)
8630 base
= TREE_OPERAND (base
, 0);
8631 base
= get_inner_reference (base
, &bitsize
, &bitpos
, &offset
,
8632 &mode
, &unsignedp
, &reversep
,
8634 tree orig_base
= base
;
8635 if ((TREE_CODE (base
) == INDIRECT_REF
8636 || (TREE_CODE (base
) == MEM_REF
8637 && integer_zerop (TREE_OPERAND (base
, 1))))
8638 && DECL_P (TREE_OPERAND (base
, 0))
8639 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (base
, 0)))
8641 base
= TREE_OPERAND (base
, 0);
8642 gcc_assert (base
== decl
8643 && (offset
== NULL_TREE
8644 || poly_int_tree_p (offset
)));
8647 = splay_tree_lookup (ctx
->variables
, (splay_tree_key
)decl
);
8648 bool ptr
= (OMP_CLAUSE_MAP_KIND (c
)
8649 == GOMP_MAP_ALWAYS_POINTER
);
8650 if (n
== NULL
|| (n
->value
& GOVD_MAP
) == 0)
8652 tree l
= build_omp_clause (OMP_CLAUSE_LOCATION (c
),
8654 OMP_CLAUSE_SET_MAP_KIND (l
, GOMP_MAP_STRUCT
);
8655 if (orig_base
!= base
)
8656 OMP_CLAUSE_DECL (l
) = unshare_expr (orig_base
);
8658 OMP_CLAUSE_DECL (l
) = decl
;
8659 OMP_CLAUSE_SIZE (l
) = size_int (1);
8660 if (struct_map_to_clause
== NULL
)
8661 struct_map_to_clause
= new hash_map
<tree
, tree
>;
8662 struct_map_to_clause
->put (decl
, l
);
8665 enum gomp_map_kind mkind
8666 = code
== OMP_TARGET_EXIT_DATA
8667 ? GOMP_MAP_RELEASE
: GOMP_MAP_ALLOC
;
8668 tree c2
= build_omp_clause (OMP_CLAUSE_LOCATION (c
),
8670 OMP_CLAUSE_SET_MAP_KIND (c2
, mkind
);
8671 OMP_CLAUSE_DECL (c2
)
8672 = unshare_expr (OMP_CLAUSE_DECL (c
));
8673 OMP_CLAUSE_CHAIN (c2
) = *prev_list_p
;
8674 OMP_CLAUSE_SIZE (c2
)
8675 = TYPE_SIZE_UNIT (ptr_type_node
);
8676 OMP_CLAUSE_CHAIN (l
) = c2
;
8677 if (OMP_CLAUSE_CHAIN (*prev_list_p
) != c
)
8679 tree c4
= OMP_CLAUSE_CHAIN (*prev_list_p
);
8681 = build_omp_clause (OMP_CLAUSE_LOCATION (c
),
8683 OMP_CLAUSE_SET_MAP_KIND (c3
, mkind
);
8684 OMP_CLAUSE_DECL (c3
)
8685 = unshare_expr (OMP_CLAUSE_DECL (c4
));
8686 OMP_CLAUSE_SIZE (c3
)
8687 = TYPE_SIZE_UNIT (ptr_type_node
);
8688 OMP_CLAUSE_CHAIN (c3
) = *prev_list_p
;
8689 OMP_CLAUSE_CHAIN (c2
) = c3
;
8696 OMP_CLAUSE_CHAIN (l
) = c
;
8698 list_p
= &OMP_CLAUSE_CHAIN (l
);
8700 if (orig_base
!= base
&& code
== OMP_TARGET
)
8702 tree c2
= build_omp_clause (OMP_CLAUSE_LOCATION (c
),
8704 enum gomp_map_kind mkind
8705 = GOMP_MAP_FIRSTPRIVATE_REFERENCE
;
8706 OMP_CLAUSE_SET_MAP_KIND (c2
, mkind
);
8707 OMP_CLAUSE_DECL (c2
) = decl
;
8708 OMP_CLAUSE_SIZE (c2
) = size_zero_node
;
8709 OMP_CLAUSE_CHAIN (c2
) = OMP_CLAUSE_CHAIN (l
);
8710 OMP_CLAUSE_CHAIN (l
) = c2
;
8712 flags
= GOVD_MAP
| GOVD_EXPLICIT
;
8713 if (GOMP_MAP_ALWAYS_P (OMP_CLAUSE_MAP_KIND (c
)) || ptr
)
8719 tree
*osc
= struct_map_to_clause
->get (decl
);
8720 tree
*sc
= NULL
, *scp
= NULL
;
8721 if (GOMP_MAP_ALWAYS_P (OMP_CLAUSE_MAP_KIND (c
)) || ptr
)
8722 n
->value
|= GOVD_SEEN
;
8723 poly_offset_int o1
, o2
;
8725 o1
= wi::to_poly_offset (offset
);
8728 if (maybe_ne (bitpos
, 0))
8729 o1
+= bits_to_bytes_round_down (bitpos
);
8730 sc
= &OMP_CLAUSE_CHAIN (*osc
);
8732 && (OMP_CLAUSE_MAP_KIND (*sc
)
8733 == GOMP_MAP_FIRSTPRIVATE_REFERENCE
))
8734 sc
= &OMP_CLAUSE_CHAIN (*sc
);
8735 for (; *sc
!= c
; sc
= &OMP_CLAUSE_CHAIN (*sc
))
8736 if (ptr
&& sc
== prev_list_p
)
8738 else if (TREE_CODE (OMP_CLAUSE_DECL (*sc
))
8740 && (TREE_CODE (OMP_CLAUSE_DECL (*sc
))
8742 && (TREE_CODE (OMP_CLAUSE_DECL (*sc
))
8748 poly_int64 bitsize2
, bitpos2
;
8749 base
= OMP_CLAUSE_DECL (*sc
);
8750 if (TREE_CODE (base
) == ARRAY_REF
)
8752 while (TREE_CODE (base
) == ARRAY_REF
)
8753 base
= TREE_OPERAND (base
, 0);
8754 if (TREE_CODE (base
) != COMPONENT_REF
8755 || (TREE_CODE (TREE_TYPE (base
))
8759 else if (TREE_CODE (base
) == INDIRECT_REF
8760 && (TREE_CODE (TREE_OPERAND (base
, 0))
8762 && (TREE_CODE (TREE_TYPE
8763 (TREE_OPERAND (base
, 0)))
8765 base
= TREE_OPERAND (base
, 0);
8766 base
= get_inner_reference (base
, &bitsize2
,
8769 &reversep
, &volatilep
);
8770 if ((TREE_CODE (base
) == INDIRECT_REF
8771 || (TREE_CODE (base
) == MEM_REF
8772 && integer_zerop (TREE_OPERAND (base
,
8774 && DECL_P (TREE_OPERAND (base
, 0))
8775 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (base
,
8778 base
= TREE_OPERAND (base
, 0);
8783 gcc_assert (offset
== NULL_TREE
8784 || poly_int_tree_p (offset
));
8785 tree d1
= OMP_CLAUSE_DECL (*sc
);
8786 tree d2
= OMP_CLAUSE_DECL (c
);
8787 while (TREE_CODE (d1
) == ARRAY_REF
)
8788 d1
= TREE_OPERAND (d1
, 0);
8789 while (TREE_CODE (d2
) == ARRAY_REF
)
8790 d2
= TREE_OPERAND (d2
, 0);
8791 if (TREE_CODE (d1
) == INDIRECT_REF
)
8792 d1
= TREE_OPERAND (d1
, 0);
8793 if (TREE_CODE (d2
) == INDIRECT_REF
)
8794 d2
= TREE_OPERAND (d2
, 0);
8795 while (TREE_CODE (d1
) == COMPONENT_REF
)
8796 if (TREE_CODE (d2
) == COMPONENT_REF
8797 && TREE_OPERAND (d1
, 1)
8798 == TREE_OPERAND (d2
, 1))
8800 d1
= TREE_OPERAND (d1
, 0);
8801 d2
= TREE_OPERAND (d2
, 0);
8807 error_at (OMP_CLAUSE_LOCATION (c
),
8808 "%qE appears more than once in map "
8809 "clauses", OMP_CLAUSE_DECL (c
));
8814 o2
= wi::to_poly_offset (offset2
);
8817 o2
+= bits_to_bytes_round_down (bitpos2
);
8818 if (maybe_lt (o1
, o2
)
8819 || (known_eq (o1
, o2
)
8820 && maybe_lt (bitpos
, bitpos2
)))
8830 OMP_CLAUSE_SIZE (*osc
)
8831 = size_binop (PLUS_EXPR
, OMP_CLAUSE_SIZE (*osc
),
8835 tree c2
= build_omp_clause (OMP_CLAUSE_LOCATION (c
),
8837 tree cl
= NULL_TREE
;
8838 enum gomp_map_kind mkind
8839 = code
== OMP_TARGET_EXIT_DATA
8840 ? GOMP_MAP_RELEASE
: GOMP_MAP_ALLOC
;
8841 OMP_CLAUSE_SET_MAP_KIND (c2
, mkind
);
8842 OMP_CLAUSE_DECL (c2
)
8843 = unshare_expr (OMP_CLAUSE_DECL (c
));
8844 OMP_CLAUSE_CHAIN (c2
) = scp
? *scp
: *prev_list_p
;
8845 OMP_CLAUSE_SIZE (c2
)
8846 = TYPE_SIZE_UNIT (ptr_type_node
);
8847 cl
= scp
? *prev_list_p
: c2
;
8848 if (OMP_CLAUSE_CHAIN (*prev_list_p
) != c
)
8850 tree c4
= OMP_CLAUSE_CHAIN (*prev_list_p
);
8852 = build_omp_clause (OMP_CLAUSE_LOCATION (c
),
8854 OMP_CLAUSE_SET_MAP_KIND (c3
, mkind
);
8855 OMP_CLAUSE_DECL (c3
)
8856 = unshare_expr (OMP_CLAUSE_DECL (c4
));
8857 OMP_CLAUSE_SIZE (c3
)
8858 = TYPE_SIZE_UNIT (ptr_type_node
);
8859 OMP_CLAUSE_CHAIN (c3
) = *prev_list_p
;
8861 OMP_CLAUSE_CHAIN (c2
) = c3
;
8867 if (sc
== prev_list_p
)
8874 *prev_list_p
= OMP_CLAUSE_CHAIN (c
);
8875 list_p
= prev_list_p
;
8877 OMP_CLAUSE_CHAIN (c
) = *sc
;
8884 *list_p
= OMP_CLAUSE_CHAIN (c
);
8885 OMP_CLAUSE_CHAIN (c
) = *sc
;
8892 && OMP_CLAUSE_MAP_KIND (c
) != GOMP_MAP_ALWAYS_POINTER
8893 && OMP_CLAUSE_CHAIN (c
)
8894 && OMP_CLAUSE_CODE (OMP_CLAUSE_CHAIN (c
)) == OMP_CLAUSE_MAP
8895 && (OMP_CLAUSE_MAP_KIND (OMP_CLAUSE_CHAIN (c
))
8896 == GOMP_MAP_ALWAYS_POINTER
))
8897 prev_list_p
= list_p
;
8900 flags
= GOVD_MAP
| GOVD_EXPLICIT
;
8901 if (OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_ALWAYS_TO
8902 || OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_ALWAYS_TOFROM
)
8903 flags
|= GOVD_MAP_ALWAYS_TO
;
8906 case OMP_CLAUSE_DEPEND
:
8907 if (OMP_CLAUSE_DEPEND_KIND (c
) == OMP_CLAUSE_DEPEND_SINK
)
8909 tree deps
= OMP_CLAUSE_DECL (c
);
8910 while (deps
&& TREE_CODE (deps
) == TREE_LIST
)
8912 if (TREE_CODE (TREE_PURPOSE (deps
)) == TRUNC_DIV_EXPR
8913 && DECL_P (TREE_OPERAND (TREE_PURPOSE (deps
), 1)))
8914 gimplify_expr (&TREE_OPERAND (TREE_PURPOSE (deps
), 1),
8915 pre_p
, NULL
, is_gimple_val
, fb_rvalue
);
8916 deps
= TREE_CHAIN (deps
);
8920 else if (OMP_CLAUSE_DEPEND_KIND (c
) == OMP_CLAUSE_DEPEND_SOURCE
)
8922 if (handled_depend_iterators
== -1)
8923 handled_depend_iterators
= gimplify_omp_depend (list_p
, pre_p
);
8924 if (handled_depend_iterators
)
8926 if (handled_depend_iterators
== 2)
8930 if (TREE_CODE (OMP_CLAUSE_DECL (c
)) == COMPOUND_EXPR
)
8932 gimplify_expr (&TREE_OPERAND (OMP_CLAUSE_DECL (c
), 0), pre_p
,
8933 NULL
, is_gimple_val
, fb_rvalue
);
8934 OMP_CLAUSE_DECL (c
) = TREE_OPERAND (OMP_CLAUSE_DECL (c
), 1);
8936 if (error_operand_p (OMP_CLAUSE_DECL (c
)))
8941 OMP_CLAUSE_DECL (c
) = build_fold_addr_expr (OMP_CLAUSE_DECL (c
));
8942 if (gimplify_expr (&OMP_CLAUSE_DECL (c
), pre_p
, NULL
,
8943 is_gimple_val
, fb_rvalue
) == GS_ERROR
)
8951 case OMP_CLAUSE_FROM
:
8952 case OMP_CLAUSE__CACHE_
:
8953 decl
= OMP_CLAUSE_DECL (c
);
8954 if (error_operand_p (decl
))
8959 if (OMP_CLAUSE_SIZE (c
) == NULL_TREE
)
8960 OMP_CLAUSE_SIZE (c
) = DECL_P (decl
) ? DECL_SIZE_UNIT (decl
)
8961 : TYPE_SIZE_UNIT (TREE_TYPE (decl
));
8962 if (gimplify_expr (&OMP_CLAUSE_SIZE (c
), pre_p
,
8963 NULL
, is_gimple_val
, fb_rvalue
) == GS_ERROR
)
8970 if (gimplify_expr (&OMP_CLAUSE_DECL (c
), pre_p
,
8971 NULL
, is_gimple_lvalue
, fb_lvalue
)
8981 case OMP_CLAUSE_USE_DEVICE_PTR
:
8982 flags
= GOVD_FIRSTPRIVATE
| GOVD_EXPLICIT
;
8984 case OMP_CLAUSE_IS_DEVICE_PTR
:
8985 flags
= GOVD_FIRSTPRIVATE
| GOVD_EXPLICIT
;
8989 decl
= OMP_CLAUSE_DECL (c
);
8991 if (error_operand_p (decl
))
8996 if (DECL_NAME (decl
) == NULL_TREE
&& (flags
& GOVD_SHARED
) == 0)
8998 tree t
= omp_member_access_dummy_var (decl
);
9001 tree v
= DECL_VALUE_EXPR (decl
);
9002 DECL_NAME (decl
) = DECL_NAME (TREE_OPERAND (v
, 1));
9004 omp_notice_variable (outer_ctx
, t
, true);
9007 if (code
== OACC_DATA
9008 && OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
9009 && OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_FIRSTPRIVATE_POINTER
)
9010 flags
|= GOVD_MAP_0LEN_ARRAY
;
9011 omp_add_variable (ctx
, decl
, flags
);
9012 if ((OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_REDUCTION
9013 || OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_IN_REDUCTION
9014 || OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_TASK_REDUCTION
)
9015 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
))
9017 omp_add_variable (ctx
, OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
),
9018 GOVD_LOCAL
| GOVD_SEEN
);
9019 if (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c
)
9020 && walk_tree (&OMP_CLAUSE_REDUCTION_INIT (c
),
9022 OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c
),
9024 omp_add_variable (ctx
,
9025 OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c
),
9026 GOVD_LOCAL
| GOVD_SEEN
);
9027 gimplify_omp_ctxp
= ctx
;
9028 push_gimplify_context ();
9030 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
) = NULL
;
9031 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
) = NULL
;
9033 gimplify_and_add (OMP_CLAUSE_REDUCTION_INIT (c
),
9034 &OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
));
9035 pop_gimplify_context
9036 (gimple_seq_first_stmt (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
)));
9037 push_gimplify_context ();
9038 gimplify_and_add (OMP_CLAUSE_REDUCTION_MERGE (c
),
9039 &OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
));
9040 pop_gimplify_context
9041 (gimple_seq_first_stmt (OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
)));
9042 OMP_CLAUSE_REDUCTION_INIT (c
) = NULL_TREE
;
9043 OMP_CLAUSE_REDUCTION_MERGE (c
) = NULL_TREE
;
9045 gimplify_omp_ctxp
= outer_ctx
;
9047 else if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
9048 && OMP_CLAUSE_LASTPRIVATE_STMT (c
))
9050 gimplify_omp_ctxp
= ctx
;
9051 push_gimplify_context ();
9052 if (TREE_CODE (OMP_CLAUSE_LASTPRIVATE_STMT (c
)) != BIND_EXPR
)
9054 tree bind
= build3 (BIND_EXPR
, void_type_node
, NULL
,
9056 TREE_SIDE_EFFECTS (bind
) = 1;
9057 BIND_EXPR_BODY (bind
) = OMP_CLAUSE_LASTPRIVATE_STMT (c
);
9058 OMP_CLAUSE_LASTPRIVATE_STMT (c
) = bind
;
9060 gimplify_and_add (OMP_CLAUSE_LASTPRIVATE_STMT (c
),
9061 &OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c
));
9062 pop_gimplify_context
9063 (gimple_seq_first_stmt (OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c
)));
9064 OMP_CLAUSE_LASTPRIVATE_STMT (c
) = NULL_TREE
;
9066 gimplify_omp_ctxp
= outer_ctx
;
9068 else if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LINEAR
9069 && OMP_CLAUSE_LINEAR_STMT (c
))
9071 gimplify_omp_ctxp
= ctx
;
9072 push_gimplify_context ();
9073 if (TREE_CODE (OMP_CLAUSE_LINEAR_STMT (c
)) != BIND_EXPR
)
9075 tree bind
= build3 (BIND_EXPR
, void_type_node
, NULL
,
9077 TREE_SIDE_EFFECTS (bind
) = 1;
9078 BIND_EXPR_BODY (bind
) = OMP_CLAUSE_LINEAR_STMT (c
);
9079 OMP_CLAUSE_LINEAR_STMT (c
) = bind
;
9081 gimplify_and_add (OMP_CLAUSE_LINEAR_STMT (c
),
9082 &OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c
));
9083 pop_gimplify_context
9084 (gimple_seq_first_stmt (OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c
)));
9085 OMP_CLAUSE_LINEAR_STMT (c
) = NULL_TREE
;
9087 gimplify_omp_ctxp
= outer_ctx
;
9093 case OMP_CLAUSE_COPYIN
:
9094 case OMP_CLAUSE_COPYPRIVATE
:
9095 decl
= OMP_CLAUSE_DECL (c
);
9096 if (error_operand_p (decl
))
9101 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_COPYPRIVATE
9103 && !omp_check_private (ctx
, decl
, true))
9106 if (is_global_var (decl
))
9108 if (DECL_THREAD_LOCAL_P (decl
))
9110 else if (DECL_HAS_VALUE_EXPR_P (decl
))
9112 tree value
= get_base_address (DECL_VALUE_EXPR (decl
));
9116 && DECL_THREAD_LOCAL_P (value
))
9121 error_at (OMP_CLAUSE_LOCATION (c
),
9122 "copyprivate variable %qE is not threadprivate"
9123 " or private in outer context", DECL_NAME (decl
));
9126 if ((region_type
& ORT_TASKLOOP
) == ORT_TASKLOOP
9128 && outer_ctx
->region_type
== ORT_COMBINED_PARALLEL
9129 && (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_REDUCTION
9130 || OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_FIRSTPRIVATE
9131 || OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
))
9134 = splay_tree_lookup (outer_ctx
->variables
,
9135 (splay_tree_key
)decl
);
9136 if (on
== NULL
|| (on
->value
& GOVD_DATA_SHARE_CLASS
) == 0)
9138 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_REDUCTION
9139 && TREE_CODE (OMP_CLAUSE_DECL (c
)) == MEM_REF
9140 && (TREE_CODE (TREE_TYPE (decl
)) == POINTER_TYPE
9141 || (TREE_CODE (TREE_TYPE (decl
)) == REFERENCE_TYPE
9142 && (TREE_CODE (TREE_TYPE (TREE_TYPE (decl
)))
9144 omp_firstprivatize_variable (outer_ctx
, decl
);
9146 omp_add_variable (outer_ctx
, decl
,
9147 GOVD_SEEN
| GOVD_SHARED
);
9148 omp_notice_variable (outer_ctx
, decl
, true);
9152 omp_notice_variable (outer_ctx
, decl
, true);
9153 if (check_non_private
9154 && region_type
== ORT_WORKSHARE
9155 && (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_REDUCTION
9156 || decl
== OMP_CLAUSE_DECL (c
)
9157 || (TREE_CODE (OMP_CLAUSE_DECL (c
)) == MEM_REF
9158 && (TREE_CODE (TREE_OPERAND (OMP_CLAUSE_DECL (c
), 0))
9160 || (TREE_CODE (TREE_OPERAND (OMP_CLAUSE_DECL (c
), 0))
9161 == POINTER_PLUS_EXPR
9162 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND
9163 (OMP_CLAUSE_DECL (c
), 0), 0))
9165 && omp_check_private (ctx
, decl
, false))
9167 error ("%s variable %qE is private in outer context",
9168 check_non_private
, DECL_NAME (decl
));
9174 if (OMP_CLAUSE_IF_MODIFIER (c
) != ERROR_MARK
9175 && OMP_CLAUSE_IF_MODIFIER (c
) != code
)
9178 for (int i
= 0; i
< 2; i
++)
9179 switch (i
? OMP_CLAUSE_IF_MODIFIER (c
) : code
)
9181 case VOID_CST
: p
[i
] = "cancel"; break;
9182 case OMP_PARALLEL
: p
[i
] = "parallel"; break;
9183 case OMP_SIMD
: p
[i
] = "simd"; break;
9184 case OMP_TASK
: p
[i
] = "task"; break;
9185 case OMP_TASKLOOP
: p
[i
] = "taskloop"; break;
9186 case OMP_TARGET_DATA
: p
[i
] = "target data"; break;
9187 case OMP_TARGET
: p
[i
] = "target"; break;
9188 case OMP_TARGET_UPDATE
: p
[i
] = "target update"; break;
9189 case OMP_TARGET_ENTER_DATA
:
9190 p
[i
] = "target enter data"; break;
9191 case OMP_TARGET_EXIT_DATA
: p
[i
] = "target exit data"; break;
9192 default: gcc_unreachable ();
9194 error_at (OMP_CLAUSE_LOCATION (c
),
9195 "expected %qs %<if%> clause modifier rather than %qs",
9201 case OMP_CLAUSE_FINAL
:
9202 OMP_CLAUSE_OPERAND (c
, 0)
9203 = gimple_boolify (OMP_CLAUSE_OPERAND (c
, 0));
9206 case OMP_CLAUSE_SCHEDULE
:
9207 case OMP_CLAUSE_NUM_THREADS
:
9208 case OMP_CLAUSE_NUM_TEAMS
:
9209 case OMP_CLAUSE_THREAD_LIMIT
:
9210 case OMP_CLAUSE_DIST_SCHEDULE
:
9211 case OMP_CLAUSE_DEVICE
:
9212 case OMP_CLAUSE_PRIORITY
:
9213 case OMP_CLAUSE_GRAINSIZE
:
9214 case OMP_CLAUSE_NUM_TASKS
:
9215 case OMP_CLAUSE_HINT
:
9216 case OMP_CLAUSE_ASYNC
:
9217 case OMP_CLAUSE_WAIT
:
9218 case OMP_CLAUSE_NUM_GANGS
:
9219 case OMP_CLAUSE_NUM_WORKERS
:
9220 case OMP_CLAUSE_VECTOR_LENGTH
:
9221 case OMP_CLAUSE_WORKER
:
9222 case OMP_CLAUSE_VECTOR
:
9223 if (gimplify_expr (&OMP_CLAUSE_OPERAND (c
, 0), pre_p
, NULL
,
9224 is_gimple_val
, fb_rvalue
) == GS_ERROR
)
9228 case OMP_CLAUSE_GANG
:
9229 if (gimplify_expr (&OMP_CLAUSE_OPERAND (c
, 0), pre_p
, NULL
,
9230 is_gimple_val
, fb_rvalue
) == GS_ERROR
)
9232 if (gimplify_expr (&OMP_CLAUSE_OPERAND (c
, 1), pre_p
, NULL
,
9233 is_gimple_val
, fb_rvalue
) == GS_ERROR
)
9237 case OMP_CLAUSE_NOWAIT
:
9241 case OMP_CLAUSE_ORDERED
:
9242 case OMP_CLAUSE_UNTIED
:
9243 case OMP_CLAUSE_COLLAPSE
:
9244 case OMP_CLAUSE_TILE
:
9245 case OMP_CLAUSE_AUTO
:
9246 case OMP_CLAUSE_SEQ
:
9247 case OMP_CLAUSE_INDEPENDENT
:
9248 case OMP_CLAUSE_MERGEABLE
:
9249 case OMP_CLAUSE_PROC_BIND
:
9250 case OMP_CLAUSE_SAFELEN
:
9251 case OMP_CLAUSE_SIMDLEN
:
9252 case OMP_CLAUSE_NOGROUP
:
9253 case OMP_CLAUSE_THREADS
:
9254 case OMP_CLAUSE_SIMD
:
9255 case OMP_CLAUSE_IF_PRESENT
:
9256 case OMP_CLAUSE_FINALIZE
:
9259 case OMP_CLAUSE_DEFAULTMAP
:
9260 enum gimplify_defaultmap_kind gdmkmin
, gdmkmax
;
9261 switch (OMP_CLAUSE_DEFAULTMAP_CATEGORY (c
))
9263 case OMP_CLAUSE_DEFAULTMAP_CATEGORY_UNSPECIFIED
:
9264 gdmkmin
= GDMK_SCALAR
;
9265 gdmkmax
= GDMK_POINTER
;
9267 case OMP_CLAUSE_DEFAULTMAP_CATEGORY_SCALAR
:
9268 gdmkmin
= gdmkmax
= GDMK_SCALAR
;
9270 case OMP_CLAUSE_DEFAULTMAP_CATEGORY_AGGREGATE
:
9271 gdmkmin
= gdmkmax
= GDMK_AGGREGATE
;
9273 case OMP_CLAUSE_DEFAULTMAP_CATEGORY_ALLOCATABLE
:
9274 gdmkmin
= gdmkmax
= GDMK_ALLOCATABLE
;
9276 case OMP_CLAUSE_DEFAULTMAP_CATEGORY_POINTER
:
9277 gdmkmin
= gdmkmax
= GDMK_POINTER
;
9282 for (int gdmk
= gdmkmin
; gdmk
<= gdmkmax
; gdmk
++)
9283 switch (OMP_CLAUSE_DEFAULTMAP_BEHAVIOR (c
))
9285 case OMP_CLAUSE_DEFAULTMAP_ALLOC
:
9286 ctx
->defaultmap
[gdmk
] = GOVD_MAP
| GOVD_MAP_ALLOC_ONLY
;
9288 case OMP_CLAUSE_DEFAULTMAP_TO
:
9289 ctx
->defaultmap
[gdmk
] = GOVD_MAP
| GOVD_MAP_TO_ONLY
;
9291 case OMP_CLAUSE_DEFAULTMAP_FROM
:
9292 ctx
->defaultmap
[gdmk
] = GOVD_MAP
| GOVD_MAP_FROM_ONLY
;
9294 case OMP_CLAUSE_DEFAULTMAP_TOFROM
:
9295 ctx
->defaultmap
[gdmk
] = GOVD_MAP
;
9297 case OMP_CLAUSE_DEFAULTMAP_FIRSTPRIVATE
:
9298 ctx
->defaultmap
[gdmk
] = GOVD_FIRSTPRIVATE
;
9300 case OMP_CLAUSE_DEFAULTMAP_NONE
:
9301 ctx
->defaultmap
[gdmk
] = 0;
9303 case OMP_CLAUSE_DEFAULTMAP_DEFAULT
:
9307 ctx
->defaultmap
[gdmk
] = GOVD_FIRSTPRIVATE
;
9309 case GDMK_AGGREGATE
:
9310 case GDMK_ALLOCATABLE
:
9311 ctx
->defaultmap
[gdmk
] = GOVD_MAP
;
9314 ctx
->defaultmap
[gdmk
] = GOVD_MAP
| GOVD_MAP_0LEN_ARRAY
;
9325 case OMP_CLAUSE_ALIGNED
:
9326 decl
= OMP_CLAUSE_DECL (c
);
9327 if (error_operand_p (decl
))
9332 if (gimplify_expr (&OMP_CLAUSE_ALIGNED_ALIGNMENT (c
), pre_p
, NULL
,
9333 is_gimple_val
, fb_rvalue
) == GS_ERROR
)
9338 if (!is_global_var (decl
)
9339 && TREE_CODE (TREE_TYPE (decl
)) == POINTER_TYPE
)
9340 omp_add_variable (ctx
, decl
, GOVD_ALIGNED
);
9343 case OMP_CLAUSE_NONTEMPORAL
:
9344 decl
= OMP_CLAUSE_DECL (c
);
9345 if (error_operand_p (decl
))
9350 omp_add_variable (ctx
, decl
, GOVD_NONTEMPORAL
);
9353 case OMP_CLAUSE_DEFAULT
:
9354 ctx
->default_kind
= OMP_CLAUSE_DEFAULT_KIND (c
);
9357 case OMP_CLAUSE_INCLUSIVE
:
9358 case OMP_CLAUSE_EXCLUSIVE
:
9359 decl
= OMP_CLAUSE_DECL (c
);
9361 splay_tree_node n
= splay_tree_lookup (outer_ctx
->variables
,
9362 (splay_tree_key
) decl
);
9363 if (n
== NULL
|| (n
->value
& GOVD_REDUCTION
) == 0)
9365 error_at (OMP_CLAUSE_LOCATION (c
),
9366 "%qD specified in %qs clause but not in %<inscan%> "
9367 "%<reduction%> clause on the containing construct",
9368 decl
, omp_clause_code_name
[OMP_CLAUSE_CODE (c
)]);
9373 n
->value
|= GOVD_REDUCTION_INSCAN
;
9374 if (outer_ctx
->region_type
== ORT_SIMD
9375 && outer_ctx
->outer_context
9376 && outer_ctx
->outer_context
->region_type
== ORT_WORKSHARE
)
9378 n
= splay_tree_lookup (outer_ctx
->outer_context
->variables
,
9379 (splay_tree_key
) decl
);
9380 if (n
&& (n
->value
& GOVD_REDUCTION
) != 0)
9381 n
->value
|= GOVD_REDUCTION_INSCAN
;
9391 if (code
== OACC_DATA
9392 && OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
9393 && (OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_FIRSTPRIVATE_POINTER
9394 || OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_FIRSTPRIVATE_REFERENCE
))
9397 *list_p
= OMP_CLAUSE_CHAIN (c
);
9399 list_p
= &OMP_CLAUSE_CHAIN (c
);
9402 ctx
->clauses
= *orig_list_p
;
9403 gimplify_omp_ctxp
= ctx
;
9404 if (struct_map_to_clause
)
9405 delete struct_map_to_clause
;
9408 /* Return true if DECL is a candidate for shared to firstprivate
9409 optimization. We only consider non-addressable scalars, not
9410 too big, and not references. */
9413 omp_shared_to_firstprivate_optimizable_decl_p (tree decl
)
9415 if (TREE_ADDRESSABLE (decl
))
9417 tree type
= TREE_TYPE (decl
);
9418 if (!is_gimple_reg_type (type
)
9419 || TREE_CODE (type
) == REFERENCE_TYPE
9420 || TREE_ADDRESSABLE (type
))
9422 /* Don't optimize too large decls, as each thread/task will have
9424 HOST_WIDE_INT len
= int_size_in_bytes (type
);
9425 if (len
== -1 || len
> 4 * POINTER_SIZE
/ BITS_PER_UNIT
)
9427 if (lang_hooks
.decls
.omp_privatize_by_reference (decl
))
9432 /* Helper function of omp_find_stores_op and gimplify_adjust_omp_clauses*.
9433 For omp_shared_to_firstprivate_optimizable_decl_p decl mark it as
9434 GOVD_WRITTEN in outer contexts. */
9437 omp_mark_stores (struct gimplify_omp_ctx
*ctx
, tree decl
)
9439 for (; ctx
; ctx
= ctx
->outer_context
)
9441 splay_tree_node n
= splay_tree_lookup (ctx
->variables
,
9442 (splay_tree_key
) decl
);
9445 else if (n
->value
& GOVD_SHARED
)
9447 n
->value
|= GOVD_WRITTEN
;
9450 else if (n
->value
& GOVD_DATA_SHARE_CLASS
)
9455 /* Helper callback for walk_gimple_seq to discover possible stores
9456 to omp_shared_to_firstprivate_optimizable_decl_p decls and set
9457 GOVD_WRITTEN if they are GOVD_SHARED in some outer context
9461 omp_find_stores_op (tree
*tp
, int *walk_subtrees
, void *data
)
9463 struct walk_stmt_info
*wi
= (struct walk_stmt_info
*) data
;
9472 if (handled_component_p (op
))
9473 op
= TREE_OPERAND (op
, 0);
9474 else if ((TREE_CODE (op
) == MEM_REF
|| TREE_CODE (op
) == TARGET_MEM_REF
)
9475 && TREE_CODE (TREE_OPERAND (op
, 0)) == ADDR_EXPR
)
9476 op
= TREE_OPERAND (TREE_OPERAND (op
, 0), 0);
9481 if (!DECL_P (op
) || !omp_shared_to_firstprivate_optimizable_decl_p (op
))
9484 omp_mark_stores (gimplify_omp_ctxp
, op
);
9488 /* Helper callback for walk_gimple_seq to discover possible stores
9489 to omp_shared_to_firstprivate_optimizable_decl_p decls and set
9490 GOVD_WRITTEN if they are GOVD_SHARED in some outer context
9494 omp_find_stores_stmt (gimple_stmt_iterator
*gsi_p
,
9495 bool *handled_ops_p
,
9496 struct walk_stmt_info
*wi
)
9498 gimple
*stmt
= gsi_stmt (*gsi_p
);
9499 switch (gimple_code (stmt
))
9501 /* Don't recurse on OpenMP constructs for which
9502 gimplify_adjust_omp_clauses already handled the bodies,
9503 except handle gimple_omp_for_pre_body. */
9504 case GIMPLE_OMP_FOR
:
9505 *handled_ops_p
= true;
9506 if (gimple_omp_for_pre_body (stmt
))
9507 walk_gimple_seq (gimple_omp_for_pre_body (stmt
),
9508 omp_find_stores_stmt
, omp_find_stores_op
, wi
);
9510 case GIMPLE_OMP_PARALLEL
:
9511 case GIMPLE_OMP_TASK
:
9512 case GIMPLE_OMP_SECTIONS
:
9513 case GIMPLE_OMP_SINGLE
:
9514 case GIMPLE_OMP_TARGET
:
9515 case GIMPLE_OMP_TEAMS
:
9516 case GIMPLE_OMP_CRITICAL
:
9517 *handled_ops_p
= true;
9525 struct gimplify_adjust_omp_clauses_data
9531 /* For all variables that were not actually used within the context,
9532 remove PRIVATE, SHARED, and FIRSTPRIVATE clauses. */
9535 gimplify_adjust_omp_clauses_1 (splay_tree_node n
, void *data
)
9537 tree
*list_p
= ((struct gimplify_adjust_omp_clauses_data
*) data
)->list_p
;
9539 = ((struct gimplify_adjust_omp_clauses_data
*) data
)->pre_p
;
9540 tree decl
= (tree
) n
->key
;
9541 unsigned flags
= n
->value
;
9542 enum omp_clause_code code
;
9546 if (gimplify_omp_ctxp
->region_type
== ORT_COMBINED_PARALLEL
9547 && (flags
& GOVD_LASTPRIVATE_CONDITIONAL
) != 0)
9548 flags
= GOVD_SHARED
| GOVD_SEEN
| GOVD_WRITTEN
;
9549 if (flags
& (GOVD_EXPLICIT
| GOVD_LOCAL
))
9551 if ((flags
& GOVD_SEEN
) == 0)
9553 if (flags
& GOVD_DEBUG_PRIVATE
)
9555 gcc_assert ((flags
& GOVD_DATA_SHARE_CLASS
) == GOVD_SHARED
);
9556 private_debug
= true;
9558 else if (flags
& GOVD_MAP
)
9559 private_debug
= false;
9562 = lang_hooks
.decls
.omp_private_debug_clause (decl
,
9563 !!(flags
& GOVD_SHARED
));
9565 code
= OMP_CLAUSE_PRIVATE
;
9566 else if (flags
& GOVD_MAP
)
9568 code
= OMP_CLAUSE_MAP
;
9569 if ((gimplify_omp_ctxp
->region_type
& ORT_ACC
) == 0
9570 && TYPE_ATOMIC (strip_array_types (TREE_TYPE (decl
))))
9572 error ("%<_Atomic%> %qD in implicit %<map%> clause", decl
);
9576 else if (flags
& GOVD_SHARED
)
9578 if (is_global_var (decl
))
9580 struct gimplify_omp_ctx
*ctx
= gimplify_omp_ctxp
->outer_context
;
9584 = splay_tree_lookup (ctx
->variables
, (splay_tree_key
) decl
);
9585 if (on
&& (on
->value
& (GOVD_FIRSTPRIVATE
| GOVD_LASTPRIVATE
9586 | GOVD_PRIVATE
| GOVD_REDUCTION
9587 | GOVD_LINEAR
| GOVD_MAP
)) != 0)
9589 ctx
= ctx
->outer_context
;
9594 code
= OMP_CLAUSE_SHARED
;
9596 else if (flags
& GOVD_PRIVATE
)
9597 code
= OMP_CLAUSE_PRIVATE
;
9598 else if (flags
& GOVD_FIRSTPRIVATE
)
9600 code
= OMP_CLAUSE_FIRSTPRIVATE
;
9601 if ((gimplify_omp_ctxp
->region_type
& ORT_TARGET
)
9602 && (gimplify_omp_ctxp
->region_type
& ORT_ACC
) == 0
9603 && TYPE_ATOMIC (strip_array_types (TREE_TYPE (decl
))))
9605 error ("%<_Atomic%> %qD in implicit %<firstprivate%> clause on "
9606 "%<target%> construct", decl
);
9610 else if (flags
& GOVD_LASTPRIVATE
)
9611 code
= OMP_CLAUSE_LASTPRIVATE
;
9612 else if (flags
& (GOVD_ALIGNED
| GOVD_NONTEMPORAL
))
9614 else if (flags
& GOVD_CONDTEMP
)
9616 code
= OMP_CLAUSE__CONDTEMP_
;
9617 gimple_add_tmp_var (decl
);
9622 if (((flags
& GOVD_LASTPRIVATE
)
9623 || (code
== OMP_CLAUSE_SHARED
&& (flags
& GOVD_WRITTEN
)))
9624 && omp_shared_to_firstprivate_optimizable_decl_p (decl
))
9625 omp_mark_stores (gimplify_omp_ctxp
->outer_context
, decl
);
9627 tree chain
= *list_p
;
9628 clause
= build_omp_clause (input_location
, code
);
9629 OMP_CLAUSE_DECL (clause
) = decl
;
9630 OMP_CLAUSE_CHAIN (clause
) = chain
;
9632 OMP_CLAUSE_PRIVATE_DEBUG (clause
) = 1;
9633 else if (code
== OMP_CLAUSE_PRIVATE
&& (flags
& GOVD_PRIVATE_OUTER_REF
))
9634 OMP_CLAUSE_PRIVATE_OUTER_REF (clause
) = 1;
9635 else if (code
== OMP_CLAUSE_SHARED
9636 && (flags
& GOVD_WRITTEN
) == 0
9637 && omp_shared_to_firstprivate_optimizable_decl_p (decl
))
9638 OMP_CLAUSE_SHARED_READONLY (clause
) = 1;
9639 else if (code
== OMP_CLAUSE_FIRSTPRIVATE
&& (flags
& GOVD_EXPLICIT
) == 0)
9640 OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (clause
) = 1;
9641 else if (code
== OMP_CLAUSE_MAP
&& (flags
& GOVD_MAP_0LEN_ARRAY
) != 0)
9643 tree nc
= build_omp_clause (input_location
, OMP_CLAUSE_MAP
);
9644 OMP_CLAUSE_DECL (nc
) = decl
;
9645 if (TREE_CODE (TREE_TYPE (decl
)) == REFERENCE_TYPE
9646 && TREE_CODE (TREE_TYPE (TREE_TYPE (decl
))) == POINTER_TYPE
)
9647 OMP_CLAUSE_DECL (clause
)
9648 = build_simple_mem_ref_loc (input_location
, decl
);
9649 OMP_CLAUSE_DECL (clause
)
9650 = build2 (MEM_REF
, char_type_node
, OMP_CLAUSE_DECL (clause
),
9651 build_int_cst (build_pointer_type (char_type_node
), 0));
9652 OMP_CLAUSE_SIZE (clause
) = size_zero_node
;
9653 OMP_CLAUSE_SIZE (nc
) = size_zero_node
;
9654 OMP_CLAUSE_SET_MAP_KIND (clause
, GOMP_MAP_ALLOC
);
9655 OMP_CLAUSE_MAP_MAYBE_ZERO_LENGTH_ARRAY_SECTION (clause
) = 1;
9656 OMP_CLAUSE_SET_MAP_KIND (nc
, GOMP_MAP_FIRSTPRIVATE_POINTER
);
9657 OMP_CLAUSE_CHAIN (nc
) = chain
;
9658 OMP_CLAUSE_CHAIN (clause
) = nc
;
9659 struct gimplify_omp_ctx
*ctx
= gimplify_omp_ctxp
;
9660 gimplify_omp_ctxp
= ctx
->outer_context
;
9661 gimplify_expr (&TREE_OPERAND (OMP_CLAUSE_DECL (clause
), 0),
9662 pre_p
, NULL
, is_gimple_val
, fb_rvalue
);
9663 gimplify_omp_ctxp
= ctx
;
9665 else if (code
== OMP_CLAUSE_MAP
)
9668 /* Not all combinations of these GOVD_MAP flags are actually valid. */
9669 switch (flags
& (GOVD_MAP_TO_ONLY
9671 | GOVD_MAP_FORCE_PRESENT
9672 | GOVD_MAP_ALLOC_ONLY
9673 | GOVD_MAP_FROM_ONLY
))
9676 kind
= GOMP_MAP_TOFROM
;
9678 case GOVD_MAP_FORCE
:
9679 kind
= GOMP_MAP_TOFROM
| GOMP_MAP_FLAG_FORCE
;
9681 case GOVD_MAP_TO_ONLY
:
9684 case GOVD_MAP_FROM_ONLY
:
9685 kind
= GOMP_MAP_FROM
;
9687 case GOVD_MAP_ALLOC_ONLY
:
9688 kind
= GOMP_MAP_ALLOC
;
9690 case GOVD_MAP_TO_ONLY
| GOVD_MAP_FORCE
:
9691 kind
= GOMP_MAP_TO
| GOMP_MAP_FLAG_FORCE
;
9693 case GOVD_MAP_FORCE_PRESENT
:
9694 kind
= GOMP_MAP_FORCE_PRESENT
;
9699 OMP_CLAUSE_SET_MAP_KIND (clause
, kind
);
9700 if (DECL_SIZE (decl
)
9701 && TREE_CODE (DECL_SIZE (decl
)) != INTEGER_CST
)
9703 tree decl2
= DECL_VALUE_EXPR (decl
);
9704 gcc_assert (TREE_CODE (decl2
) == INDIRECT_REF
);
9705 decl2
= TREE_OPERAND (decl2
, 0);
9706 gcc_assert (DECL_P (decl2
));
9707 tree mem
= build_simple_mem_ref (decl2
);
9708 OMP_CLAUSE_DECL (clause
) = mem
;
9709 OMP_CLAUSE_SIZE (clause
) = TYPE_SIZE_UNIT (TREE_TYPE (decl
));
9710 if (gimplify_omp_ctxp
->outer_context
)
9712 struct gimplify_omp_ctx
*ctx
= gimplify_omp_ctxp
->outer_context
;
9713 omp_notice_variable (ctx
, decl2
, true);
9714 omp_notice_variable (ctx
, OMP_CLAUSE_SIZE (clause
), true);
9716 tree nc
= build_omp_clause (OMP_CLAUSE_LOCATION (clause
),
9718 OMP_CLAUSE_DECL (nc
) = decl
;
9719 OMP_CLAUSE_SIZE (nc
) = size_zero_node
;
9720 if (gimplify_omp_ctxp
->target_firstprivatize_array_bases
)
9721 OMP_CLAUSE_SET_MAP_KIND (nc
, GOMP_MAP_FIRSTPRIVATE_POINTER
);
9723 OMP_CLAUSE_SET_MAP_KIND (nc
, GOMP_MAP_POINTER
);
9724 OMP_CLAUSE_CHAIN (nc
) = OMP_CLAUSE_CHAIN (clause
);
9725 OMP_CLAUSE_CHAIN (clause
) = nc
;
9727 else if (gimplify_omp_ctxp
->target_firstprivatize_array_bases
9728 && lang_hooks
.decls
.omp_privatize_by_reference (decl
))
9730 OMP_CLAUSE_DECL (clause
) = build_simple_mem_ref (decl
);
9731 OMP_CLAUSE_SIZE (clause
)
9732 = unshare_expr (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl
))));
9733 struct gimplify_omp_ctx
*ctx
= gimplify_omp_ctxp
;
9734 gimplify_omp_ctxp
= ctx
->outer_context
;
9735 gimplify_expr (&OMP_CLAUSE_SIZE (clause
),
9736 pre_p
, NULL
, is_gimple_val
, fb_rvalue
);
9737 gimplify_omp_ctxp
= ctx
;
9738 tree nc
= build_omp_clause (OMP_CLAUSE_LOCATION (clause
),
9740 OMP_CLAUSE_DECL (nc
) = decl
;
9741 OMP_CLAUSE_SIZE (nc
) = size_zero_node
;
9742 OMP_CLAUSE_SET_MAP_KIND (nc
, GOMP_MAP_FIRSTPRIVATE_REFERENCE
);
9743 OMP_CLAUSE_CHAIN (nc
) = OMP_CLAUSE_CHAIN (clause
);
9744 OMP_CLAUSE_CHAIN (clause
) = nc
;
9747 OMP_CLAUSE_SIZE (clause
) = DECL_SIZE_UNIT (decl
);
9749 if (code
== OMP_CLAUSE_FIRSTPRIVATE
&& (flags
& GOVD_LASTPRIVATE
) != 0)
9751 tree nc
= build_omp_clause (input_location
, OMP_CLAUSE_LASTPRIVATE
);
9752 OMP_CLAUSE_DECL (nc
) = decl
;
9753 OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (nc
) = 1;
9754 OMP_CLAUSE_CHAIN (nc
) = chain
;
9755 OMP_CLAUSE_CHAIN (clause
) = nc
;
9756 struct gimplify_omp_ctx
*ctx
= gimplify_omp_ctxp
;
9757 gimplify_omp_ctxp
= ctx
->outer_context
;
9758 lang_hooks
.decls
.omp_finish_clause (nc
, pre_p
);
9759 gimplify_omp_ctxp
= ctx
;
9762 struct gimplify_omp_ctx
*ctx
= gimplify_omp_ctxp
;
9763 gimplify_omp_ctxp
= ctx
->outer_context
;
9764 lang_hooks
.decls
.omp_finish_clause (clause
, pre_p
);
9765 if (gimplify_omp_ctxp
)
9766 for (; clause
!= chain
; clause
= OMP_CLAUSE_CHAIN (clause
))
9767 if (OMP_CLAUSE_CODE (clause
) == OMP_CLAUSE_MAP
9768 && DECL_P (OMP_CLAUSE_SIZE (clause
)))
9769 omp_notice_variable (gimplify_omp_ctxp
, OMP_CLAUSE_SIZE (clause
),
9771 gimplify_omp_ctxp
= ctx
;
9776 gimplify_adjust_omp_clauses (gimple_seq
*pre_p
, gimple_seq body
, tree
*list_p
,
9777 enum tree_code code
)
9779 struct gimplify_omp_ctx
*ctx
= gimplify_omp_ctxp
;
9780 tree
*orig_list_p
= list_p
;
9782 bool has_inscan_reductions
= false;
9786 struct gimplify_omp_ctx
*octx
;
9787 for (octx
= ctx
; octx
; octx
= octx
->outer_context
)
9788 if ((octx
->region_type
& (ORT_PARALLEL
| ORT_TASK
| ORT_TEAMS
)) != 0)
9792 struct walk_stmt_info wi
;
9793 memset (&wi
, 0, sizeof (wi
));
9794 walk_gimple_seq (body
, omp_find_stores_stmt
,
9795 omp_find_stores_op
, &wi
);
9799 if (ctx
->region_type
== ORT_WORKSHARE
9800 && ctx
->outer_context
9801 && ctx
->outer_context
->region_type
== ORT_COMBINED_PARALLEL
)
9803 for (c
= ctx
->outer_context
->clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
9804 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
9805 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c
))
9807 decl
= OMP_CLAUSE_DECL (c
);
9809 = splay_tree_lookup (ctx
->outer_context
->variables
,
9810 (splay_tree_key
) decl
);
9811 gcc_checking_assert (!splay_tree_lookup (ctx
->variables
,
9812 (splay_tree_key
) decl
));
9813 omp_add_variable (ctx
, decl
, n
->value
);
9814 tree c2
= copy_node (c
);
9815 OMP_CLAUSE_CHAIN (c2
) = *list_p
;
9817 if ((n
->value
& GOVD_FIRSTPRIVATE
) == 0)
9819 c2
= build_omp_clause (OMP_CLAUSE_LOCATION (c
),
9820 OMP_CLAUSE_FIRSTPRIVATE
);
9821 OMP_CLAUSE_DECL (c2
) = decl
;
9822 OMP_CLAUSE_CHAIN (c2
) = *list_p
;
9826 while ((c
= *list_p
) != NULL
)
9829 bool remove
= false;
9831 switch (OMP_CLAUSE_CODE (c
))
9833 case OMP_CLAUSE_FIRSTPRIVATE
:
9834 if ((ctx
->region_type
& ORT_TARGET
)
9835 && (ctx
->region_type
& ORT_ACC
) == 0
9836 && TYPE_ATOMIC (strip_array_types
9837 (TREE_TYPE (OMP_CLAUSE_DECL (c
)))))
9839 error_at (OMP_CLAUSE_LOCATION (c
),
9840 "%<_Atomic%> %qD in %<firstprivate%> clause on "
9841 "%<target%> construct", OMP_CLAUSE_DECL (c
));
9846 case OMP_CLAUSE_PRIVATE
:
9847 case OMP_CLAUSE_SHARED
:
9848 case OMP_CLAUSE_LINEAR
:
9849 decl
= OMP_CLAUSE_DECL (c
);
9850 n
= splay_tree_lookup (ctx
->variables
, (splay_tree_key
) decl
);
9851 remove
= !(n
->value
& GOVD_SEEN
);
9852 if ((n
->value
& GOVD_LASTPRIVATE_CONDITIONAL
) != 0
9853 && code
== OMP_PARALLEL
9854 && OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_FIRSTPRIVATE
)
9858 bool shared
= OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_SHARED
;
9859 if ((n
->value
& GOVD_DEBUG_PRIVATE
)
9860 || lang_hooks
.decls
.omp_private_debug_clause (decl
, shared
))
9862 gcc_assert ((n
->value
& GOVD_DEBUG_PRIVATE
) == 0
9863 || ((n
->value
& GOVD_DATA_SHARE_CLASS
)
9865 OMP_CLAUSE_SET_CODE (c
, OMP_CLAUSE_PRIVATE
);
9866 OMP_CLAUSE_PRIVATE_DEBUG (c
) = 1;
9868 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_SHARED
9869 && (n
->value
& GOVD_WRITTEN
) == 0
9871 && omp_shared_to_firstprivate_optimizable_decl_p (decl
))
9872 OMP_CLAUSE_SHARED_READONLY (c
) = 1;
9873 else if (DECL_P (decl
)
9874 && ((OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_SHARED
9875 && (n
->value
& GOVD_WRITTEN
) != 0)
9876 || (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LINEAR
9877 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c
)))
9878 && omp_shared_to_firstprivate_optimizable_decl_p (decl
))
9879 omp_mark_stores (gimplify_omp_ctxp
->outer_context
, decl
);
9883 case OMP_CLAUSE_LASTPRIVATE
:
9884 /* Make sure OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE is set to
9885 accurately reflect the presence of a FIRSTPRIVATE clause. */
9886 decl
= OMP_CLAUSE_DECL (c
);
9887 n
= splay_tree_lookup (ctx
->variables
, (splay_tree_key
) decl
);
9888 OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c
)
9889 = (n
->value
& GOVD_FIRSTPRIVATE
) != 0;
9890 if (code
== OMP_DISTRIBUTE
9891 && OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c
))
9894 error_at (OMP_CLAUSE_LOCATION (c
),
9895 "same variable used in %<firstprivate%> and "
9896 "%<lastprivate%> clauses on %<distribute%> "
9900 && OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
9902 && omp_shared_to_firstprivate_optimizable_decl_p (decl
))
9903 omp_mark_stores (gimplify_omp_ctxp
->outer_context
, decl
);
9904 if (OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c
) && code
== OMP_PARALLEL
)
9908 case OMP_CLAUSE_ALIGNED
:
9909 decl
= OMP_CLAUSE_DECL (c
);
9910 if (!is_global_var (decl
))
9912 n
= splay_tree_lookup (ctx
->variables
, (splay_tree_key
) decl
);
9913 remove
= n
== NULL
|| !(n
->value
& GOVD_SEEN
);
9914 if (!remove
&& TREE_CODE (TREE_TYPE (decl
)) == POINTER_TYPE
)
9916 struct gimplify_omp_ctx
*octx
;
9918 && (n
->value
& (GOVD_DATA_SHARE_CLASS
9919 & ~GOVD_FIRSTPRIVATE
)))
9922 for (octx
= ctx
->outer_context
; octx
;
9923 octx
= octx
->outer_context
)
9925 n
= splay_tree_lookup (octx
->variables
,
9926 (splay_tree_key
) decl
);
9929 if (n
->value
& GOVD_LOCAL
)
9931 /* We have to avoid assigning a shared variable
9932 to itself when trying to add
9933 __builtin_assume_aligned. */
9934 if (n
->value
& GOVD_SHARED
)
9942 else if (TREE_CODE (TREE_TYPE (decl
)) == ARRAY_TYPE
)
9944 n
= splay_tree_lookup (ctx
->variables
, (splay_tree_key
) decl
);
9945 if (n
!= NULL
&& (n
->value
& GOVD_DATA_SHARE_CLASS
) != 0)
9950 case OMP_CLAUSE_NONTEMPORAL
:
9951 decl
= OMP_CLAUSE_DECL (c
);
9952 n
= splay_tree_lookup (ctx
->variables
, (splay_tree_key
) decl
);
9953 remove
= n
== NULL
|| !(n
->value
& GOVD_SEEN
);
9956 case OMP_CLAUSE_MAP
:
9957 if (code
== OMP_TARGET_EXIT_DATA
9958 && OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_ALWAYS_POINTER
)
9963 decl
= OMP_CLAUSE_DECL (c
);
9964 /* Data clauses associated with acc parallel reductions must be
9965 compatible with present_or_copy. Warn and adjust the clause
9966 if that is not the case. */
9967 if (ctx
->region_type
== ORT_ACC_PARALLEL
)
9969 tree t
= DECL_P (decl
) ? decl
: TREE_OPERAND (decl
, 0);
9973 n
= splay_tree_lookup (ctx
->variables
, (splay_tree_key
) t
);
9975 if (n
&& (n
->value
& GOVD_REDUCTION
))
9977 enum gomp_map_kind kind
= OMP_CLAUSE_MAP_KIND (c
);
9979 OMP_CLAUSE_MAP_IN_REDUCTION (c
) = 1;
9980 if ((kind
& GOMP_MAP_TOFROM
) != GOMP_MAP_TOFROM
9981 && kind
!= GOMP_MAP_FORCE_PRESENT
9982 && kind
!= GOMP_MAP_POINTER
)
9984 warning_at (OMP_CLAUSE_LOCATION (c
), 0,
9985 "incompatible data clause with reduction "
9986 "on %qE; promoting to %<present_or_copy%>",
9988 OMP_CLAUSE_SET_MAP_KIND (c
, GOMP_MAP_TOFROM
);
9994 if ((ctx
->region_type
& ORT_TARGET
) != 0
9995 && OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_FIRSTPRIVATE_POINTER
)
9997 if (TREE_CODE (decl
) == INDIRECT_REF
9998 && TREE_CODE (TREE_OPERAND (decl
, 0)) == COMPONENT_REF
9999 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (decl
, 0)))
10000 == REFERENCE_TYPE
))
10001 decl
= TREE_OPERAND (decl
, 0);
10002 if (TREE_CODE (decl
) == COMPONENT_REF
)
10004 while (TREE_CODE (decl
) == COMPONENT_REF
)
10005 decl
= TREE_OPERAND (decl
, 0);
10008 n
= splay_tree_lookup (ctx
->variables
,
10009 (splay_tree_key
) decl
);
10010 if (!(n
->value
& GOVD_SEEN
))
10017 n
= splay_tree_lookup (ctx
->variables
, (splay_tree_key
) decl
);
10018 if ((ctx
->region_type
& ORT_TARGET
) != 0
10019 && !(n
->value
& GOVD_SEEN
)
10020 && GOMP_MAP_ALWAYS_P (OMP_CLAUSE_MAP_KIND (c
)) == 0
10021 && (!is_global_var (decl
)
10022 || !lookup_attribute ("omp declare target link",
10023 DECL_ATTRIBUTES (decl
))))
10026 /* For struct element mapping, if struct is never referenced
10027 in target block and none of the mapping has always modifier,
10028 remove all the struct element mappings, which immediately
10029 follow the GOMP_MAP_STRUCT map clause. */
10030 if (OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_STRUCT
)
10032 HOST_WIDE_INT cnt
= tree_to_shwi (OMP_CLAUSE_SIZE (c
));
10034 OMP_CLAUSE_CHAIN (c
)
10035 = OMP_CLAUSE_CHAIN (OMP_CLAUSE_CHAIN (c
));
10038 else if (OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_STRUCT
10039 && code
== OMP_TARGET_EXIT_DATA
)
10041 else if (DECL_SIZE (decl
)
10042 && TREE_CODE (DECL_SIZE (decl
)) != INTEGER_CST
10043 && OMP_CLAUSE_MAP_KIND (c
) != GOMP_MAP_POINTER
10044 && OMP_CLAUSE_MAP_KIND (c
) != GOMP_MAP_FIRSTPRIVATE_POINTER
10045 && (OMP_CLAUSE_MAP_KIND (c
)
10046 != GOMP_MAP_FIRSTPRIVATE_REFERENCE
))
10048 /* For GOMP_MAP_FORCE_DEVICEPTR, we'll never enter here, because
10049 for these, TREE_CODE (DECL_SIZE (decl)) will always be
10051 gcc_assert (OMP_CLAUSE_MAP_KIND (c
) != GOMP_MAP_FORCE_DEVICEPTR
);
10053 tree decl2
= DECL_VALUE_EXPR (decl
);
10054 gcc_assert (TREE_CODE (decl2
) == INDIRECT_REF
);
10055 decl2
= TREE_OPERAND (decl2
, 0);
10056 gcc_assert (DECL_P (decl2
));
10057 tree mem
= build_simple_mem_ref (decl2
);
10058 OMP_CLAUSE_DECL (c
) = mem
;
10059 OMP_CLAUSE_SIZE (c
) = TYPE_SIZE_UNIT (TREE_TYPE (decl
));
10060 if (ctx
->outer_context
)
10062 omp_notice_variable (ctx
->outer_context
, decl2
, true);
10063 omp_notice_variable (ctx
->outer_context
,
10064 OMP_CLAUSE_SIZE (c
), true);
10066 if (((ctx
->region_type
& ORT_TARGET
) != 0
10067 || !ctx
->target_firstprivatize_array_bases
)
10068 && ((n
->value
& GOVD_SEEN
) == 0
10069 || (n
->value
& (GOVD_PRIVATE
| GOVD_FIRSTPRIVATE
)) == 0))
10071 tree nc
= build_omp_clause (OMP_CLAUSE_LOCATION (c
),
10073 OMP_CLAUSE_DECL (nc
) = decl
;
10074 OMP_CLAUSE_SIZE (nc
) = size_zero_node
;
10075 if (ctx
->target_firstprivatize_array_bases
)
10076 OMP_CLAUSE_SET_MAP_KIND (nc
,
10077 GOMP_MAP_FIRSTPRIVATE_POINTER
);
10079 OMP_CLAUSE_SET_MAP_KIND (nc
, GOMP_MAP_POINTER
);
10080 OMP_CLAUSE_CHAIN (nc
) = OMP_CLAUSE_CHAIN (c
);
10081 OMP_CLAUSE_CHAIN (c
) = nc
;
10087 if (OMP_CLAUSE_SIZE (c
) == NULL_TREE
)
10088 OMP_CLAUSE_SIZE (c
) = DECL_SIZE_UNIT (decl
);
10089 gcc_assert ((n
->value
& GOVD_SEEN
) == 0
10090 || ((n
->value
& (GOVD_PRIVATE
| GOVD_FIRSTPRIVATE
))
10095 case OMP_CLAUSE_TO
:
10096 case OMP_CLAUSE_FROM
:
10097 case OMP_CLAUSE__CACHE_
:
10098 decl
= OMP_CLAUSE_DECL (c
);
10099 if (!DECL_P (decl
))
10101 if (DECL_SIZE (decl
)
10102 && TREE_CODE (DECL_SIZE (decl
)) != INTEGER_CST
)
10104 tree decl2
= DECL_VALUE_EXPR (decl
);
10105 gcc_assert (TREE_CODE (decl2
) == INDIRECT_REF
);
10106 decl2
= TREE_OPERAND (decl2
, 0);
10107 gcc_assert (DECL_P (decl2
));
10108 tree mem
= build_simple_mem_ref (decl2
);
10109 OMP_CLAUSE_DECL (c
) = mem
;
10110 OMP_CLAUSE_SIZE (c
) = TYPE_SIZE_UNIT (TREE_TYPE (decl
));
10111 if (ctx
->outer_context
)
10113 omp_notice_variable (ctx
->outer_context
, decl2
, true);
10114 omp_notice_variable (ctx
->outer_context
,
10115 OMP_CLAUSE_SIZE (c
), true);
10118 else if (OMP_CLAUSE_SIZE (c
) == NULL_TREE
)
10119 OMP_CLAUSE_SIZE (c
) = DECL_SIZE_UNIT (decl
);
10122 case OMP_CLAUSE_REDUCTION
:
10123 if (OMP_CLAUSE_REDUCTION_INSCAN (c
))
10125 decl
= OMP_CLAUSE_DECL (c
);
10126 n
= splay_tree_lookup (ctx
->variables
, (splay_tree_key
) decl
);
10127 if ((n
->value
& GOVD_REDUCTION_INSCAN
) == 0)
10130 error_at (OMP_CLAUSE_LOCATION (c
),
10131 "%qD specified in %<inscan%> %<reduction%> clause "
10132 "but not in %<scan%> directive clause", decl
);
10135 has_inscan_reductions
= true;
10138 case OMP_CLAUSE_IN_REDUCTION
:
10139 case OMP_CLAUSE_TASK_REDUCTION
:
10140 decl
= OMP_CLAUSE_DECL (c
);
10141 /* OpenACC reductions need a present_or_copy data clause.
10142 Add one if necessary. Emit error when the reduction is private. */
10143 if (ctx
->region_type
== ORT_ACC_PARALLEL
)
10145 n
= splay_tree_lookup (ctx
->variables
, (splay_tree_key
) decl
);
10146 if (n
->value
& (GOVD_PRIVATE
| GOVD_FIRSTPRIVATE
))
10149 error_at (OMP_CLAUSE_LOCATION (c
), "invalid private "
10150 "reduction on %qE", DECL_NAME (decl
));
10152 else if ((n
->value
& GOVD_MAP
) == 0)
10154 tree next
= OMP_CLAUSE_CHAIN (c
);
10155 tree nc
= build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE_MAP
);
10156 OMP_CLAUSE_SET_MAP_KIND (nc
, GOMP_MAP_TOFROM
);
10157 OMP_CLAUSE_DECL (nc
) = decl
;
10158 OMP_CLAUSE_CHAIN (c
) = nc
;
10159 lang_hooks
.decls
.omp_finish_clause (nc
, pre_p
);
10162 OMP_CLAUSE_MAP_IN_REDUCTION (nc
) = 1;
10163 if (OMP_CLAUSE_CHAIN (nc
) == NULL
)
10165 nc
= OMP_CLAUSE_CHAIN (nc
);
10167 OMP_CLAUSE_CHAIN (nc
) = next
;
10168 n
->value
|= GOVD_MAP
;
10172 && omp_shared_to_firstprivate_optimizable_decl_p (decl
))
10173 omp_mark_stores (gimplify_omp_ctxp
->outer_context
, decl
);
10175 case OMP_CLAUSE_COPYIN
:
10176 case OMP_CLAUSE_COPYPRIVATE
:
10177 case OMP_CLAUSE_IF
:
10178 case OMP_CLAUSE_NUM_THREADS
:
10179 case OMP_CLAUSE_NUM_TEAMS
:
10180 case OMP_CLAUSE_THREAD_LIMIT
:
10181 case OMP_CLAUSE_DIST_SCHEDULE
:
10182 case OMP_CLAUSE_DEVICE
:
10183 case OMP_CLAUSE_SCHEDULE
:
10184 case OMP_CLAUSE_NOWAIT
:
10185 case OMP_CLAUSE_ORDERED
:
10186 case OMP_CLAUSE_DEFAULT
:
10187 case OMP_CLAUSE_UNTIED
:
10188 case OMP_CLAUSE_COLLAPSE
:
10189 case OMP_CLAUSE_FINAL
:
10190 case OMP_CLAUSE_MERGEABLE
:
10191 case OMP_CLAUSE_PROC_BIND
:
10192 case OMP_CLAUSE_SAFELEN
:
10193 case OMP_CLAUSE_SIMDLEN
:
10194 case OMP_CLAUSE_DEPEND
:
10195 case OMP_CLAUSE_PRIORITY
:
10196 case OMP_CLAUSE_GRAINSIZE
:
10197 case OMP_CLAUSE_NUM_TASKS
:
10198 case OMP_CLAUSE_NOGROUP
:
10199 case OMP_CLAUSE_THREADS
:
10200 case OMP_CLAUSE_SIMD
:
10201 case OMP_CLAUSE_HINT
:
10202 case OMP_CLAUSE_DEFAULTMAP
:
10203 case OMP_CLAUSE_USE_DEVICE_PTR
:
10204 case OMP_CLAUSE_IS_DEVICE_PTR
:
10205 case OMP_CLAUSE_ASYNC
:
10206 case OMP_CLAUSE_WAIT
:
10207 case OMP_CLAUSE_INDEPENDENT
:
10208 case OMP_CLAUSE_NUM_GANGS
:
10209 case OMP_CLAUSE_NUM_WORKERS
:
10210 case OMP_CLAUSE_VECTOR_LENGTH
:
10211 case OMP_CLAUSE_GANG
:
10212 case OMP_CLAUSE_WORKER
:
10213 case OMP_CLAUSE_VECTOR
:
10214 case OMP_CLAUSE_AUTO
:
10215 case OMP_CLAUSE_SEQ
:
10216 case OMP_CLAUSE_TILE
:
10217 case OMP_CLAUSE_IF_PRESENT
:
10218 case OMP_CLAUSE_FINALIZE
:
10219 case OMP_CLAUSE_INCLUSIVE
:
10220 case OMP_CLAUSE_EXCLUSIVE
:
10224 gcc_unreachable ();
10228 *list_p
= OMP_CLAUSE_CHAIN (c
);
10230 list_p
= &OMP_CLAUSE_CHAIN (c
);
10233 /* Add in any implicit data sharing. */
10234 struct gimplify_adjust_omp_clauses_data data
;
10235 data
.list_p
= list_p
;
10236 data
.pre_p
= pre_p
;
10237 splay_tree_foreach (ctx
->variables
, gimplify_adjust_omp_clauses_1
, &data
);
10239 if (has_inscan_reductions
)
10240 for (c
= *orig_list_p
; c
; c
= OMP_CLAUSE_CHAIN (c
))
10241 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LINEAR
10242 && !OMP_CLAUSE_LINEAR_NO_COPYIN (c
))
10244 error_at (OMP_CLAUSE_LOCATION (c
),
10245 "%<inscan%> %<reduction%> clause used together with "
10246 "%<linear%> clause for a variable other than loop "
10251 gimplify_omp_ctxp
= ctx
->outer_context
;
10252 delete_omp_context (ctx
);
10255 /* Gimplify OACC_CACHE. */
10258 gimplify_oacc_cache (tree
*expr_p
, gimple_seq
*pre_p
)
10260 tree expr
= *expr_p
;
10262 gimplify_scan_omp_clauses (&OACC_CACHE_CLAUSES (expr
), pre_p
, ORT_ACC
,
10264 gimplify_adjust_omp_clauses (pre_p
, NULL
, &OACC_CACHE_CLAUSES (expr
),
10267 /* TODO: Do something sensible with this information. */
10269 *expr_p
= NULL_TREE
;
10272 /* Helper function of gimplify_oacc_declare. The helper's purpose is to,
10273 if required, translate 'kind' in CLAUSE into an 'entry' kind and 'exit'
10274 kind. The entry kind will replace the one in CLAUSE, while the exit
10275 kind will be used in a new omp_clause and returned to the caller. */
10278 gimplify_oacc_declare_1 (tree clause
)
10280 HOST_WIDE_INT kind
, new_op
;
10284 kind
= OMP_CLAUSE_MAP_KIND (clause
);
10288 case GOMP_MAP_ALLOC
:
10289 new_op
= GOMP_MAP_RELEASE
;
10293 case GOMP_MAP_FROM
:
10294 OMP_CLAUSE_SET_MAP_KIND (clause
, GOMP_MAP_FORCE_ALLOC
);
10295 new_op
= GOMP_MAP_FROM
;
10299 case GOMP_MAP_TOFROM
:
10300 OMP_CLAUSE_SET_MAP_KIND (clause
, GOMP_MAP_TO
);
10301 new_op
= GOMP_MAP_FROM
;
10305 case GOMP_MAP_DEVICE_RESIDENT
:
10306 case GOMP_MAP_FORCE_DEVICEPTR
:
10307 case GOMP_MAP_FORCE_PRESENT
:
10308 case GOMP_MAP_LINK
:
10309 case GOMP_MAP_POINTER
:
10314 gcc_unreachable ();
10320 c
= build_omp_clause (OMP_CLAUSE_LOCATION (clause
), OMP_CLAUSE_MAP
);
10321 OMP_CLAUSE_SET_MAP_KIND (c
, new_op
);
10322 OMP_CLAUSE_DECL (c
) = OMP_CLAUSE_DECL (clause
);
10328 /* Gimplify OACC_DECLARE. */
10331 gimplify_oacc_declare (tree
*expr_p
, gimple_seq
*pre_p
)
10333 tree expr
= *expr_p
;
10335 tree clauses
, t
, decl
;
10337 clauses
= OACC_DECLARE_CLAUSES (expr
);
10339 gimplify_scan_omp_clauses (&clauses
, pre_p
, ORT_TARGET_DATA
, OACC_DECLARE
);
10340 gimplify_adjust_omp_clauses (pre_p
, NULL
, &clauses
, OACC_DECLARE
);
10342 for (t
= clauses
; t
; t
= OMP_CLAUSE_CHAIN (t
))
10344 decl
= OMP_CLAUSE_DECL (t
);
10346 if (TREE_CODE (decl
) == MEM_REF
)
10347 decl
= TREE_OPERAND (decl
, 0);
10349 if (VAR_P (decl
) && !is_oacc_declared (decl
))
10351 tree attr
= get_identifier ("oacc declare target");
10352 DECL_ATTRIBUTES (decl
) = tree_cons (attr
, NULL_TREE
,
10353 DECL_ATTRIBUTES (decl
));
10357 && !is_global_var (decl
)
10358 && DECL_CONTEXT (decl
) == current_function_decl
)
10360 tree c
= gimplify_oacc_declare_1 (t
);
10363 if (oacc_declare_returns
== NULL
)
10364 oacc_declare_returns
= new hash_map
<tree
, tree
>;
10366 oacc_declare_returns
->put (decl
, c
);
10370 if (gimplify_omp_ctxp
)
10371 omp_add_variable (gimplify_omp_ctxp
, decl
, GOVD_SEEN
);
10374 stmt
= gimple_build_omp_target (NULL
, GF_OMP_TARGET_KIND_OACC_DECLARE
,
10377 gimplify_seq_add_stmt (pre_p
, stmt
);
10379 *expr_p
= NULL_TREE
;
10382 /* Gimplify the contents of an OMP_PARALLEL statement. This involves
10383 gimplification of the body, as well as scanning the body for used
10384 variables. We need to do this scan now, because variable-sized
10385 decls will be decomposed during gimplification. */
10388 gimplify_omp_parallel (tree
*expr_p
, gimple_seq
*pre_p
)
10390 tree expr
= *expr_p
;
10392 gimple_seq body
= NULL
;
10394 gimplify_scan_omp_clauses (&OMP_PARALLEL_CLAUSES (expr
), pre_p
,
10395 OMP_PARALLEL_COMBINED (expr
)
10396 ? ORT_COMBINED_PARALLEL
10397 : ORT_PARALLEL
, OMP_PARALLEL
);
10399 push_gimplify_context ();
10401 g
= gimplify_and_return_first (OMP_PARALLEL_BODY (expr
), &body
);
10402 if (gimple_code (g
) == GIMPLE_BIND
)
10403 pop_gimplify_context (g
);
10405 pop_gimplify_context (NULL
);
10407 gimplify_adjust_omp_clauses (pre_p
, body
, &OMP_PARALLEL_CLAUSES (expr
),
10410 g
= gimple_build_omp_parallel (body
,
10411 OMP_PARALLEL_CLAUSES (expr
),
10412 NULL_TREE
, NULL_TREE
);
10413 if (OMP_PARALLEL_COMBINED (expr
))
10414 gimple_omp_set_subcode (g
, GF_OMP_PARALLEL_COMBINED
);
10415 gimplify_seq_add_stmt (pre_p
, g
);
10416 *expr_p
= NULL_TREE
;
10419 /* Gimplify the contents of an OMP_TASK statement. This involves
10420 gimplification of the body, as well as scanning the body for used
10421 variables. We need to do this scan now, because variable-sized
10422 decls will be decomposed during gimplification. */
10425 gimplify_omp_task (tree
*expr_p
, gimple_seq
*pre_p
)
10427 tree expr
= *expr_p
;
10429 gimple_seq body
= NULL
;
10431 if (OMP_TASK_BODY (expr
) == NULL_TREE
)
10432 for (tree c
= OMP_TASK_CLAUSES (expr
); c
; c
= OMP_CLAUSE_CHAIN (c
))
10433 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_DEPEND
10434 && OMP_CLAUSE_DEPEND_KIND (c
) == OMP_CLAUSE_DEPEND_MUTEXINOUTSET
)
10436 error_at (OMP_CLAUSE_LOCATION (c
),
10437 "%<mutexinoutset%> kind in %<depend%> clause on a "
10438 "%<taskwait%> construct");
10442 gimplify_scan_omp_clauses (&OMP_TASK_CLAUSES (expr
), pre_p
,
10443 omp_find_clause (OMP_TASK_CLAUSES (expr
),
10445 ? ORT_UNTIED_TASK
: ORT_TASK
, OMP_TASK
);
10447 if (OMP_TASK_BODY (expr
))
10449 push_gimplify_context ();
10451 g
= gimplify_and_return_first (OMP_TASK_BODY (expr
), &body
);
10452 if (gimple_code (g
) == GIMPLE_BIND
)
10453 pop_gimplify_context (g
);
10455 pop_gimplify_context (NULL
);
10458 gimplify_adjust_omp_clauses (pre_p
, body
, &OMP_TASK_CLAUSES (expr
),
10461 g
= gimple_build_omp_task (body
,
10462 OMP_TASK_CLAUSES (expr
),
10463 NULL_TREE
, NULL_TREE
,
10464 NULL_TREE
, NULL_TREE
, NULL_TREE
);
10465 if (OMP_TASK_BODY (expr
) == NULL_TREE
)
10466 gimple_omp_task_set_taskwait_p (g
, true);
10467 gimplify_seq_add_stmt (pre_p
, g
);
10468 *expr_p
= NULL_TREE
;
10471 /* Helper function of gimplify_omp_for, find OMP_FOR resp. OMP_SIMD
10472 with non-NULL OMP_FOR_INIT. Also, fill in pdata array,
10473 pdata[0] non-NULL if there is anything non-trivial in between, pdata[1]
10474 is address of OMP_PARALLEL in between if any, pdata[2] is address of
10475 OMP_FOR in between if any and pdata[3] is address of the inner
10476 OMP_FOR/OMP_SIMD. */
10479 find_combined_omp_for (tree
*tp
, int *walk_subtrees
, void *data
)
10481 tree
**pdata
= (tree
**) data
;
10482 *walk_subtrees
= 0;
10483 switch (TREE_CODE (*tp
))
10486 if (OMP_FOR_INIT (*tp
) != NULL_TREE
)
10492 *walk_subtrees
= 1;
10495 if (OMP_FOR_INIT (*tp
) != NULL_TREE
)
10502 if (BIND_EXPR_VARS (*tp
)
10503 || (BIND_EXPR_BLOCK (*tp
)
10504 && BLOCK_VARS (BIND_EXPR_BLOCK (*tp
))))
10506 *walk_subtrees
= 1;
10508 case STATEMENT_LIST
:
10509 if (!tsi_one_before_end_p (tsi_start (*tp
)))
10511 *walk_subtrees
= 1;
10513 case TRY_FINALLY_EXPR
:
10515 *walk_subtrees
= 1;
10519 *walk_subtrees
= 1;
10527 /* Gimplify the gross structure of an OMP_FOR statement. */
10529 static enum gimplify_status
10530 gimplify_omp_for (tree
*expr_p
, gimple_seq
*pre_p
)
10532 tree for_stmt
, orig_for_stmt
, inner_for_stmt
= NULL_TREE
, decl
, var
, t
;
10533 enum gimplify_status ret
= GS_ALL_DONE
;
10534 enum gimplify_status tret
;
10536 gimple_seq for_body
, for_pre_body
;
10538 bitmap has_decl_expr
= NULL
;
10539 enum omp_region_type ort
= ORT_WORKSHARE
;
10541 orig_for_stmt
= for_stmt
= *expr_p
;
10543 if (OMP_FOR_INIT (for_stmt
) == NULL_TREE
)
10545 tree
*data
[4] = { NULL
, NULL
, NULL
, NULL
};
10546 gcc_assert (TREE_CODE (for_stmt
) != OACC_LOOP
);
10547 inner_for_stmt
= walk_tree (&OMP_FOR_BODY (for_stmt
),
10548 find_combined_omp_for
, data
, NULL
);
10549 if (inner_for_stmt
== NULL_TREE
)
10551 gcc_assert (seen_error ());
10552 *expr_p
= NULL_TREE
;
10555 if (data
[2] && OMP_FOR_PRE_BODY (*data
[2]))
10557 append_to_statement_list_force (OMP_FOR_PRE_BODY (*data
[2]),
10558 &OMP_FOR_PRE_BODY (for_stmt
));
10559 OMP_FOR_PRE_BODY (*data
[2]) = NULL_TREE
;
10561 if (OMP_FOR_PRE_BODY (inner_for_stmt
))
10563 append_to_statement_list_force (OMP_FOR_PRE_BODY (inner_for_stmt
),
10564 &OMP_FOR_PRE_BODY (for_stmt
));
10565 OMP_FOR_PRE_BODY (inner_for_stmt
) = NULL_TREE
;
10570 /* We have some statements or variable declarations in between
10571 the composite construct directives. Move them around the
10574 for (i
= 0; i
< 3; i
++)
10578 if (i
< 2 && data
[i
+ 1] == &OMP_BODY (t
))
10579 data
[i
+ 1] = data
[i
];
10580 *data
[i
] = OMP_BODY (t
);
10581 tree body
= build3 (BIND_EXPR
, void_type_node
, NULL_TREE
,
10582 NULL_TREE
, make_node (BLOCK
));
10583 OMP_BODY (t
) = body
;
10584 append_to_statement_list_force (inner_for_stmt
,
10585 &BIND_EXPR_BODY (body
));
10587 data
[3] = tsi_stmt_ptr (tsi_start (BIND_EXPR_BODY (body
)));
10588 gcc_assert (*data
[3] == inner_for_stmt
);
10593 for (i
= 0; i
< TREE_VEC_LENGTH (OMP_FOR_INIT (inner_for_stmt
)); i
++)
10594 if (OMP_FOR_ORIG_DECLS (inner_for_stmt
)
10595 && TREE_CODE (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (inner_for_stmt
),
10597 && TREE_PURPOSE (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (inner_for_stmt
),
10600 tree orig
= TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (inner_for_stmt
), i
);
10601 /* Class iterators aren't allowed on OMP_SIMD, so the only
10602 case we need to solve is distribute parallel for. */
10603 gcc_assert (TREE_CODE (inner_for_stmt
) == OMP_FOR
10604 && TREE_CODE (for_stmt
) == OMP_DISTRIBUTE
10606 tree orig_decl
= TREE_PURPOSE (orig
);
10607 tree last
= TREE_VALUE (orig
);
10609 for (pc
= &OMP_FOR_CLAUSES (inner_for_stmt
);
10610 *pc
; pc
= &OMP_CLAUSE_CHAIN (*pc
))
10611 if ((OMP_CLAUSE_CODE (*pc
) == OMP_CLAUSE_PRIVATE
10612 || OMP_CLAUSE_CODE (*pc
) == OMP_CLAUSE_LASTPRIVATE
)
10613 && OMP_CLAUSE_DECL (*pc
) == orig_decl
)
10615 if (*pc
== NULL_TREE
)
10617 else if (OMP_CLAUSE_CODE (*pc
) == OMP_CLAUSE_PRIVATE
)
10619 /* private clause will appear only on inner_for_stmt.
10620 Change it into firstprivate, and add private clause
10622 tree c
= copy_node (*pc
);
10623 OMP_CLAUSE_CHAIN (c
) = OMP_FOR_CLAUSES (for_stmt
);
10624 OMP_FOR_CLAUSES (for_stmt
) = c
;
10625 OMP_CLAUSE_CODE (*pc
) = OMP_CLAUSE_FIRSTPRIVATE
;
10626 lang_hooks
.decls
.omp_finish_clause (*pc
, pre_p
);
10630 /* lastprivate clause will appear on both inner_for_stmt
10631 and for_stmt. Add firstprivate clause to
10633 tree c
= build_omp_clause (OMP_CLAUSE_LOCATION (*pc
),
10634 OMP_CLAUSE_FIRSTPRIVATE
);
10635 OMP_CLAUSE_DECL (c
) = OMP_CLAUSE_DECL (*pc
);
10636 OMP_CLAUSE_CHAIN (c
) = *pc
;
10638 lang_hooks
.decls
.omp_finish_clause (*pc
, pre_p
);
10640 tree c
= build_omp_clause (UNKNOWN_LOCATION
,
10641 OMP_CLAUSE_FIRSTPRIVATE
);
10642 OMP_CLAUSE_DECL (c
) = last
;
10643 OMP_CLAUSE_CHAIN (c
) = OMP_PARALLEL_CLAUSES (*data
[1]);
10644 OMP_PARALLEL_CLAUSES (*data
[1]) = c
;
10645 c
= build_omp_clause (UNKNOWN_LOCATION
,
10646 *pc
? OMP_CLAUSE_SHARED
10647 : OMP_CLAUSE_FIRSTPRIVATE
);
10648 OMP_CLAUSE_DECL (c
) = orig_decl
;
10649 OMP_CLAUSE_CHAIN (c
) = OMP_PARALLEL_CLAUSES (*data
[1]);
10650 OMP_PARALLEL_CLAUSES (*data
[1]) = c
;
10652 /* Similarly, take care of C++ range for temporaries, those should
10653 be firstprivate on OMP_PARALLEL if any. */
10655 for (i
= 0; i
< TREE_VEC_LENGTH (OMP_FOR_INIT (inner_for_stmt
)); i
++)
10656 if (OMP_FOR_ORIG_DECLS (inner_for_stmt
)
10657 && TREE_CODE (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (inner_for_stmt
),
10659 && TREE_CHAIN (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (inner_for_stmt
),
10663 = TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (inner_for_stmt
), i
);
10664 tree v
= TREE_CHAIN (orig
);
10665 tree c
= build_omp_clause (UNKNOWN_LOCATION
,
10666 OMP_CLAUSE_FIRSTPRIVATE
);
10667 /* First add firstprivate clause for the __for_end artificial
10669 OMP_CLAUSE_DECL (c
) = TREE_VEC_ELT (v
, 1);
10670 if (TREE_CODE (TREE_TYPE (OMP_CLAUSE_DECL (c
)))
10672 OMP_CLAUSE_FIRSTPRIVATE_NO_REFERENCE (c
) = 1;
10673 OMP_CLAUSE_CHAIN (c
) = OMP_PARALLEL_CLAUSES (*data
[1]);
10674 OMP_PARALLEL_CLAUSES (*data
[1]) = c
;
10675 if (TREE_VEC_ELT (v
, 0))
10677 /* And now the same for __for_range artificial decl if it
10679 c
= build_omp_clause (UNKNOWN_LOCATION
,
10680 OMP_CLAUSE_FIRSTPRIVATE
);
10681 OMP_CLAUSE_DECL (c
) = TREE_VEC_ELT (v
, 0);
10682 if (TREE_CODE (TREE_TYPE (OMP_CLAUSE_DECL (c
)))
10684 OMP_CLAUSE_FIRSTPRIVATE_NO_REFERENCE (c
) = 1;
10685 OMP_CLAUSE_CHAIN (c
) = OMP_PARALLEL_CLAUSES (*data
[1]);
10686 OMP_PARALLEL_CLAUSES (*data
[1]) = c
;
10691 switch (TREE_CODE (for_stmt
))
10694 case OMP_DISTRIBUTE
:
10700 if (omp_find_clause (OMP_FOR_CLAUSES (for_stmt
), OMP_CLAUSE_UNTIED
))
10701 ort
= ORT_UNTIED_TASKLOOP
;
10703 ort
= ORT_TASKLOOP
;
10709 gcc_unreachable ();
10712 /* Set OMP_CLAUSE_LINEAR_NO_COPYIN flag on explicit linear
10713 clause for the IV. */
10714 if (ort
== ORT_SIMD
&& TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt
)) == 1)
10716 t
= TREE_VEC_ELT (OMP_FOR_INIT (for_stmt
), 0);
10717 gcc_assert (TREE_CODE (t
) == MODIFY_EXPR
);
10718 decl
= TREE_OPERAND (t
, 0);
10719 for (tree c
= OMP_FOR_CLAUSES (for_stmt
); c
; c
= OMP_CLAUSE_CHAIN (c
))
10720 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LINEAR
10721 && OMP_CLAUSE_DECL (c
) == decl
)
10723 OMP_CLAUSE_LINEAR_NO_COPYIN (c
) = 1;
10728 if (TREE_CODE (for_stmt
) != OMP_TASKLOOP
)
10729 gimplify_scan_omp_clauses (&OMP_FOR_CLAUSES (for_stmt
), pre_p
, ort
,
10730 TREE_CODE (for_stmt
));
10732 if (TREE_CODE (for_stmt
) == OMP_DISTRIBUTE
)
10733 gimplify_omp_ctxp
->distribute
= true;
10735 /* Handle OMP_FOR_INIT. */
10736 for_pre_body
= NULL
;
10737 if ((ort
== ORT_SIMD
10738 || (inner_for_stmt
&& TREE_CODE (inner_for_stmt
) == OMP_SIMD
))
10739 && OMP_FOR_PRE_BODY (for_stmt
))
10741 has_decl_expr
= BITMAP_ALLOC (NULL
);
10742 if (TREE_CODE (OMP_FOR_PRE_BODY (for_stmt
)) == DECL_EXPR
10743 && TREE_CODE (DECL_EXPR_DECL (OMP_FOR_PRE_BODY (for_stmt
)))
10746 t
= OMP_FOR_PRE_BODY (for_stmt
);
10747 bitmap_set_bit (has_decl_expr
, DECL_UID (DECL_EXPR_DECL (t
)));
10749 else if (TREE_CODE (OMP_FOR_PRE_BODY (for_stmt
)) == STATEMENT_LIST
)
10751 tree_stmt_iterator si
;
10752 for (si
= tsi_start (OMP_FOR_PRE_BODY (for_stmt
)); !tsi_end_p (si
);
10756 if (TREE_CODE (t
) == DECL_EXPR
10757 && TREE_CODE (DECL_EXPR_DECL (t
)) == VAR_DECL
)
10758 bitmap_set_bit (has_decl_expr
, DECL_UID (DECL_EXPR_DECL (t
)));
10762 if (OMP_FOR_PRE_BODY (for_stmt
))
10764 if (TREE_CODE (for_stmt
) != OMP_TASKLOOP
|| gimplify_omp_ctxp
)
10765 gimplify_and_add (OMP_FOR_PRE_BODY (for_stmt
), &for_pre_body
);
10768 struct gimplify_omp_ctx ctx
;
10769 memset (&ctx
, 0, sizeof (ctx
));
10770 ctx
.region_type
= ORT_NONE
;
10771 gimplify_omp_ctxp
= &ctx
;
10772 gimplify_and_add (OMP_FOR_PRE_BODY (for_stmt
), &for_pre_body
);
10773 gimplify_omp_ctxp
= NULL
;
10776 OMP_FOR_PRE_BODY (for_stmt
) = NULL_TREE
;
10778 if (OMP_FOR_INIT (for_stmt
) == NULL_TREE
)
10779 for_stmt
= inner_for_stmt
;
10781 /* For taskloop, need to gimplify the start, end and step before the
10782 taskloop, outside of the taskloop omp context. */
10783 if (TREE_CODE (orig_for_stmt
) == OMP_TASKLOOP
)
10785 for (i
= 0; i
< TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt
)); i
++)
10787 t
= TREE_VEC_ELT (OMP_FOR_INIT (for_stmt
), i
);
10788 if (!is_gimple_constant (TREE_OPERAND (t
, 1)))
10790 tree type
= TREE_TYPE (TREE_OPERAND (t
, 0));
10791 TREE_OPERAND (t
, 1)
10792 = get_initialized_tmp_var (TREE_OPERAND (t
, 1),
10793 gimple_seq_empty_p (for_pre_body
)
10794 ? pre_p
: &for_pre_body
, NULL
,
10796 /* Reference to pointer conversion is considered useless,
10797 but is significant for firstprivate clause. Force it
10799 if (TREE_CODE (type
) == POINTER_TYPE
10800 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (t
, 1)))
10801 == REFERENCE_TYPE
))
10803 tree v
= create_tmp_var (TYPE_MAIN_VARIANT (type
));
10804 tree m
= build2 (INIT_EXPR
, TREE_TYPE (v
), v
,
10805 TREE_OPERAND (t
, 1));
10806 gimplify_and_add (m
, gimple_seq_empty_p (for_pre_body
)
10807 ? pre_p
: &for_pre_body
);
10808 TREE_OPERAND (t
, 1) = v
;
10810 tree c
= build_omp_clause (input_location
,
10811 OMP_CLAUSE_FIRSTPRIVATE
);
10812 OMP_CLAUSE_DECL (c
) = TREE_OPERAND (t
, 1);
10813 OMP_CLAUSE_CHAIN (c
) = OMP_FOR_CLAUSES (orig_for_stmt
);
10814 OMP_FOR_CLAUSES (orig_for_stmt
) = c
;
10817 /* Handle OMP_FOR_COND. */
10818 t
= TREE_VEC_ELT (OMP_FOR_COND (for_stmt
), i
);
10819 if (!is_gimple_constant (TREE_OPERAND (t
, 1)))
10821 tree type
= TREE_TYPE (TREE_OPERAND (t
, 0));
10822 TREE_OPERAND (t
, 1)
10823 = get_initialized_tmp_var (TREE_OPERAND (t
, 1),
10824 gimple_seq_empty_p (for_pre_body
)
10825 ? pre_p
: &for_pre_body
, NULL
,
10827 /* Reference to pointer conversion is considered useless,
10828 but is significant for firstprivate clause. Force it
10830 if (TREE_CODE (type
) == POINTER_TYPE
10831 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (t
, 1)))
10832 == REFERENCE_TYPE
))
10834 tree v
= create_tmp_var (TYPE_MAIN_VARIANT (type
));
10835 tree m
= build2 (INIT_EXPR
, TREE_TYPE (v
), v
,
10836 TREE_OPERAND (t
, 1));
10837 gimplify_and_add (m
, gimple_seq_empty_p (for_pre_body
)
10838 ? pre_p
: &for_pre_body
);
10839 TREE_OPERAND (t
, 1) = v
;
10841 tree c
= build_omp_clause (input_location
,
10842 OMP_CLAUSE_FIRSTPRIVATE
);
10843 OMP_CLAUSE_DECL (c
) = TREE_OPERAND (t
, 1);
10844 OMP_CLAUSE_CHAIN (c
) = OMP_FOR_CLAUSES (orig_for_stmt
);
10845 OMP_FOR_CLAUSES (orig_for_stmt
) = c
;
10848 /* Handle OMP_FOR_INCR. */
10849 t
= TREE_VEC_ELT (OMP_FOR_INCR (for_stmt
), i
);
10850 if (TREE_CODE (t
) == MODIFY_EXPR
)
10852 decl
= TREE_OPERAND (t
, 0);
10853 t
= TREE_OPERAND (t
, 1);
10854 tree
*tp
= &TREE_OPERAND (t
, 1);
10855 if (TREE_CODE (t
) == PLUS_EXPR
&& *tp
== decl
)
10856 tp
= &TREE_OPERAND (t
, 0);
10858 if (!is_gimple_constant (*tp
))
10860 gimple_seq
*seq
= gimple_seq_empty_p (for_pre_body
)
10861 ? pre_p
: &for_pre_body
;
10862 *tp
= get_initialized_tmp_var (*tp
, seq
, NULL
, false);
10863 tree c
= build_omp_clause (input_location
,
10864 OMP_CLAUSE_FIRSTPRIVATE
);
10865 OMP_CLAUSE_DECL (c
) = *tp
;
10866 OMP_CLAUSE_CHAIN (c
) = OMP_FOR_CLAUSES (orig_for_stmt
);
10867 OMP_FOR_CLAUSES (orig_for_stmt
) = c
;
10872 gimplify_scan_omp_clauses (&OMP_FOR_CLAUSES (orig_for_stmt
), pre_p
, ort
,
10876 if (orig_for_stmt
!= for_stmt
)
10877 gimplify_omp_ctxp
->combined_loop
= true;
10880 gcc_assert (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt
))
10881 == TREE_VEC_LENGTH (OMP_FOR_COND (for_stmt
)));
10882 gcc_assert (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt
))
10883 == TREE_VEC_LENGTH (OMP_FOR_INCR (for_stmt
)));
10885 tree c
= omp_find_clause (OMP_FOR_CLAUSES (for_stmt
), OMP_CLAUSE_ORDERED
);
10886 bool is_doacross
= false;
10887 if (c
&& OMP_CLAUSE_ORDERED_EXPR (c
))
10889 is_doacross
= true;
10890 gimplify_omp_ctxp
->loop_iter_var
.create (TREE_VEC_LENGTH
10891 (OMP_FOR_INIT (for_stmt
))
10894 int collapse
= 1, tile
= 0;
10895 c
= omp_find_clause (OMP_FOR_CLAUSES (for_stmt
), OMP_CLAUSE_COLLAPSE
);
10897 collapse
= tree_to_shwi (OMP_CLAUSE_COLLAPSE_EXPR (c
));
10898 c
= omp_find_clause (OMP_FOR_CLAUSES (for_stmt
), OMP_CLAUSE_TILE
);
10900 tile
= list_length (OMP_CLAUSE_TILE_LIST (c
));
10901 for (i
= 0; i
< TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt
)); i
++)
10903 t
= TREE_VEC_ELT (OMP_FOR_INIT (for_stmt
), i
);
10904 gcc_assert (TREE_CODE (t
) == MODIFY_EXPR
);
10905 decl
= TREE_OPERAND (t
, 0);
10906 gcc_assert (DECL_P (decl
));
10907 gcc_assert (INTEGRAL_TYPE_P (TREE_TYPE (decl
))
10908 || POINTER_TYPE_P (TREE_TYPE (decl
)));
10911 if (TREE_CODE (for_stmt
) == OMP_FOR
&& OMP_FOR_ORIG_DECLS (for_stmt
))
10913 tree orig_decl
= TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (for_stmt
), i
);
10914 if (TREE_CODE (orig_decl
) == TREE_LIST
)
10916 orig_decl
= TREE_PURPOSE (orig_decl
);
10920 gimplify_omp_ctxp
->loop_iter_var
.quick_push (orig_decl
);
10923 gimplify_omp_ctxp
->loop_iter_var
.quick_push (decl
);
10924 gimplify_omp_ctxp
->loop_iter_var
.quick_push (decl
);
10927 /* Make sure the iteration variable is private. */
10928 tree c
= NULL_TREE
;
10929 tree c2
= NULL_TREE
;
10930 if (orig_for_stmt
!= for_stmt
)
10932 /* Preserve this information until we gimplify the inner simd. */
10934 && bitmap_bit_p (has_decl_expr
, DECL_UID (decl
)))
10935 TREE_PRIVATE (t
) = 1;
10937 else if (ort
== ORT_SIMD
)
10939 splay_tree_node n
= splay_tree_lookup (gimplify_omp_ctxp
->variables
,
10940 (splay_tree_key
) decl
);
10941 omp_is_private (gimplify_omp_ctxp
, decl
,
10942 1 + (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt
))
10944 if (n
!= NULL
&& (n
->value
& GOVD_DATA_SHARE_CLASS
) != 0)
10946 omp_notice_variable (gimplify_omp_ctxp
, decl
, true);
10947 if (n
->value
& GOVD_LASTPRIVATE_CONDITIONAL
)
10948 for (tree c3
= omp_find_clause (OMP_FOR_CLAUSES (for_stmt
),
10949 OMP_CLAUSE_LASTPRIVATE
);
10950 c3
; c3
= omp_find_clause (OMP_CLAUSE_CHAIN (c3
),
10951 OMP_CLAUSE_LASTPRIVATE
))
10952 if (OMP_CLAUSE_DECL (c3
) == decl
)
10954 warning_at (OMP_CLAUSE_LOCATION (c3
), 0,
10955 "conditional %<lastprivate%> on loop "
10956 "iterator %qD ignored", decl
);
10957 OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c3
) = 0;
10958 n
->value
&= ~GOVD_LASTPRIVATE_CONDITIONAL
;
10961 else if (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt
)) == 1)
10963 c
= build_omp_clause (input_location
, OMP_CLAUSE_LINEAR
);
10964 OMP_CLAUSE_LINEAR_NO_COPYIN (c
) = 1;
10965 unsigned int flags
= GOVD_LINEAR
| GOVD_EXPLICIT
| GOVD_SEEN
;
10967 && bitmap_bit_p (has_decl_expr
, DECL_UID (decl
)))
10968 || TREE_PRIVATE (t
))
10970 OMP_CLAUSE_LINEAR_NO_COPYOUT (c
) = 1;
10971 flags
|= GOVD_LINEAR_LASTPRIVATE_NO_OUTER
;
10973 struct gimplify_omp_ctx
*outer
10974 = gimplify_omp_ctxp
->outer_context
;
10975 if (outer
&& !OMP_CLAUSE_LINEAR_NO_COPYOUT (c
))
10977 if (outer
->region_type
== ORT_WORKSHARE
10978 && outer
->combined_loop
)
10980 n
= splay_tree_lookup (outer
->variables
,
10981 (splay_tree_key
)decl
);
10982 if (n
!= NULL
&& (n
->value
& GOVD_LOCAL
) != 0)
10984 OMP_CLAUSE_LINEAR_NO_COPYOUT (c
) = 1;
10985 flags
|= GOVD_LINEAR_LASTPRIVATE_NO_OUTER
;
10989 struct gimplify_omp_ctx
*octx
= outer
->outer_context
;
10991 && octx
->region_type
== ORT_COMBINED_PARALLEL
10992 && octx
->outer_context
10993 && (octx
->outer_context
->region_type
10995 && octx
->outer_context
->combined_loop
)
10997 octx
= octx
->outer_context
;
10998 n
= splay_tree_lookup (octx
->variables
,
10999 (splay_tree_key
)decl
);
11000 if (n
!= NULL
&& (n
->value
& GOVD_LOCAL
) != 0)
11002 OMP_CLAUSE_LINEAR_NO_COPYOUT (c
) = 1;
11003 flags
|= GOVD_LINEAR_LASTPRIVATE_NO_OUTER
;
11010 OMP_CLAUSE_DECL (c
) = decl
;
11011 OMP_CLAUSE_CHAIN (c
) = OMP_FOR_CLAUSES (for_stmt
);
11012 OMP_FOR_CLAUSES (for_stmt
) = c
;
11013 omp_add_variable (gimplify_omp_ctxp
, decl
, flags
);
11014 if (outer
&& !OMP_CLAUSE_LINEAR_NO_COPYOUT (c
))
11016 if (outer
->region_type
== ORT_WORKSHARE
11017 && outer
->combined_loop
)
11019 if (outer
->outer_context
11020 && (outer
->outer_context
->region_type
11021 == ORT_COMBINED_PARALLEL
))
11022 outer
= outer
->outer_context
;
11023 else if (omp_check_private (outer
, decl
, false))
11026 else if (((outer
->region_type
& ORT_TASKLOOP
)
11028 && outer
->combined_loop
11029 && !omp_check_private (gimplify_omp_ctxp
,
11032 else if (outer
->region_type
!= ORT_COMBINED_PARALLEL
)
11034 omp_notice_variable (outer
, decl
, true);
11039 n
= splay_tree_lookup (outer
->variables
,
11040 (splay_tree_key
)decl
);
11041 if (n
== NULL
|| (n
->value
& GOVD_DATA_SHARE_CLASS
) == 0)
11043 omp_add_variable (outer
, decl
,
11044 GOVD_LASTPRIVATE
| GOVD_SEEN
);
11045 if (outer
->region_type
== ORT_COMBINED_PARALLEL
11046 && outer
->outer_context
11047 && (outer
->outer_context
->region_type
11049 && outer
->outer_context
->combined_loop
)
11051 outer
= outer
->outer_context
;
11052 n
= splay_tree_lookup (outer
->variables
,
11053 (splay_tree_key
)decl
);
11054 if (omp_check_private (outer
, decl
, false))
11057 || ((n
->value
& GOVD_DATA_SHARE_CLASS
)
11059 omp_add_variable (outer
, decl
,
11065 if (outer
&& outer
->outer_context
11066 && ((outer
->outer_context
->region_type
11067 & ORT_COMBINED_TEAMS
) == ORT_COMBINED_TEAMS
11068 || (((outer
->region_type
& ORT_TASKLOOP
)
11070 && (outer
->outer_context
->region_type
11071 == ORT_COMBINED_PARALLEL
))))
11073 outer
= outer
->outer_context
;
11074 n
= splay_tree_lookup (outer
->variables
,
11075 (splay_tree_key
)decl
);
11077 || (n
->value
& GOVD_DATA_SHARE_CLASS
) == 0)
11078 omp_add_variable (outer
, decl
,
11079 GOVD_SHARED
| GOVD_SEEN
);
11083 if (outer
&& outer
->outer_context
)
11084 omp_notice_variable (outer
->outer_context
, decl
,
11094 || !bitmap_bit_p (has_decl_expr
, DECL_UID (decl
)));
11095 if (TREE_PRIVATE (t
))
11096 lastprivate
= false;
11097 struct gimplify_omp_ctx
*outer
11098 = gimplify_omp_ctxp
->outer_context
;
11099 if (outer
&& lastprivate
)
11101 if (outer
->region_type
== ORT_WORKSHARE
11102 && outer
->combined_loop
)
11104 n
= splay_tree_lookup (outer
->variables
,
11105 (splay_tree_key
)decl
);
11106 if (n
!= NULL
&& (n
->value
& GOVD_LOCAL
) != 0)
11108 lastprivate
= false;
11111 else if (outer
->outer_context
11112 && (outer
->outer_context
->region_type
11113 == ORT_COMBINED_PARALLEL
))
11114 outer
= outer
->outer_context
;
11115 else if (omp_check_private (outer
, decl
, false))
11118 else if (((outer
->region_type
& ORT_TASKLOOP
)
11120 && outer
->combined_loop
11121 && !omp_check_private (gimplify_omp_ctxp
,
11124 else if (outer
->region_type
!= ORT_COMBINED_PARALLEL
)
11126 omp_notice_variable (outer
, decl
, true);
11131 n
= splay_tree_lookup (outer
->variables
,
11132 (splay_tree_key
)decl
);
11133 if (n
== NULL
|| (n
->value
& GOVD_DATA_SHARE_CLASS
) == 0)
11135 omp_add_variable (outer
, decl
,
11136 GOVD_LASTPRIVATE
| GOVD_SEEN
);
11137 if (outer
->region_type
== ORT_COMBINED_PARALLEL
11138 && outer
->outer_context
11139 && (outer
->outer_context
->region_type
11141 && outer
->outer_context
->combined_loop
)
11143 outer
= outer
->outer_context
;
11144 n
= splay_tree_lookup (outer
->variables
,
11145 (splay_tree_key
)decl
);
11146 if (omp_check_private (outer
, decl
, false))
11149 || ((n
->value
& GOVD_DATA_SHARE_CLASS
)
11151 omp_add_variable (outer
, decl
,
11157 if (outer
&& outer
->outer_context
11158 && ((outer
->outer_context
->region_type
11159 & ORT_COMBINED_TEAMS
) == ORT_COMBINED_TEAMS
11160 || (((outer
->region_type
& ORT_TASKLOOP
)
11162 && (outer
->outer_context
->region_type
11163 == ORT_COMBINED_PARALLEL
))))
11165 outer
= outer
->outer_context
;
11166 n
= splay_tree_lookup (outer
->variables
,
11167 (splay_tree_key
)decl
);
11169 || (n
->value
& GOVD_DATA_SHARE_CLASS
) == 0)
11170 omp_add_variable (outer
, decl
,
11171 GOVD_SHARED
| GOVD_SEEN
);
11175 if (outer
&& outer
->outer_context
)
11176 omp_notice_variable (outer
->outer_context
, decl
,
11182 c
= build_omp_clause (input_location
,
11183 lastprivate
? OMP_CLAUSE_LASTPRIVATE
11184 : OMP_CLAUSE_PRIVATE
);
11185 OMP_CLAUSE_DECL (c
) = decl
;
11186 OMP_CLAUSE_CHAIN (c
) = OMP_FOR_CLAUSES (for_stmt
);
11187 OMP_FOR_CLAUSES (for_stmt
) = c
;
11188 omp_add_variable (gimplify_omp_ctxp
, decl
,
11189 (lastprivate
? GOVD_LASTPRIVATE
: GOVD_PRIVATE
)
11190 | GOVD_EXPLICIT
| GOVD_SEEN
);
11194 else if (omp_is_private (gimplify_omp_ctxp
, decl
, 0))
11196 omp_notice_variable (gimplify_omp_ctxp
, decl
, true);
11197 splay_tree_node n
= splay_tree_lookup (gimplify_omp_ctxp
->variables
,
11198 (splay_tree_key
) decl
);
11199 if (n
&& (n
->value
& GOVD_LASTPRIVATE_CONDITIONAL
))
11200 for (tree c3
= omp_find_clause (OMP_FOR_CLAUSES (for_stmt
),
11201 OMP_CLAUSE_LASTPRIVATE
);
11202 c3
; c3
= omp_find_clause (OMP_CLAUSE_CHAIN (c3
),
11203 OMP_CLAUSE_LASTPRIVATE
))
11204 if (OMP_CLAUSE_DECL (c3
) == decl
)
11206 warning_at (OMP_CLAUSE_LOCATION (c3
), 0,
11207 "conditional %<lastprivate%> on loop "
11208 "iterator %qD ignored", decl
);
11209 OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c3
) = 0;
11210 n
->value
&= ~GOVD_LASTPRIVATE_CONDITIONAL
;
11214 omp_add_variable (gimplify_omp_ctxp
, decl
, GOVD_PRIVATE
| GOVD_SEEN
);
11216 /* If DECL is not a gimple register, create a temporary variable to act
11217 as an iteration counter. This is valid, since DECL cannot be
11218 modified in the body of the loop. Similarly for any iteration vars
11219 in simd with collapse > 1 where the iterator vars must be
11221 if (orig_for_stmt
!= for_stmt
)
11223 else if (!is_gimple_reg (decl
)
11224 || (ort
== ORT_SIMD
11225 && TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt
)) > 1))
11227 struct gimplify_omp_ctx
*ctx
= gimplify_omp_ctxp
;
11228 /* Make sure omp_add_variable is not called on it prematurely.
11229 We call it ourselves a few lines later. */
11230 gimplify_omp_ctxp
= NULL
;
11231 var
= create_tmp_var (TREE_TYPE (decl
), get_name (decl
));
11232 gimplify_omp_ctxp
= ctx
;
11233 TREE_OPERAND (t
, 0) = var
;
11235 gimplify_seq_add_stmt (&for_body
, gimple_build_assign (decl
, var
));
11237 if (ort
== ORT_SIMD
11238 && TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt
)) == 1)
11240 c2
= build_omp_clause (input_location
, OMP_CLAUSE_LINEAR
);
11241 OMP_CLAUSE_LINEAR_NO_COPYIN (c2
) = 1;
11242 OMP_CLAUSE_LINEAR_NO_COPYOUT (c2
) = 1;
11243 OMP_CLAUSE_DECL (c2
) = var
;
11244 OMP_CLAUSE_CHAIN (c2
) = OMP_FOR_CLAUSES (for_stmt
);
11245 OMP_FOR_CLAUSES (for_stmt
) = c2
;
11246 omp_add_variable (gimplify_omp_ctxp
, var
,
11247 GOVD_LINEAR
| GOVD_EXPLICIT
| GOVD_SEEN
);
11248 if (c
== NULL_TREE
)
11255 omp_add_variable (gimplify_omp_ctxp
, var
,
11256 GOVD_PRIVATE
| GOVD_SEEN
);
11261 tret
= gimplify_expr (&TREE_OPERAND (t
, 1), &for_pre_body
, NULL
,
11262 is_gimple_val
, fb_rvalue
, false);
11263 ret
= MIN (ret
, tret
);
11264 if (ret
== GS_ERROR
)
11267 /* Handle OMP_FOR_COND. */
11268 t
= TREE_VEC_ELT (OMP_FOR_COND (for_stmt
), i
);
11269 gcc_assert (COMPARISON_CLASS_P (t
));
11270 gcc_assert (TREE_OPERAND (t
, 0) == decl
);
11272 tret
= gimplify_expr (&TREE_OPERAND (t
, 1), &for_pre_body
, NULL
,
11273 is_gimple_val
, fb_rvalue
, false);
11274 ret
= MIN (ret
, tret
);
11276 /* Handle OMP_FOR_INCR. */
11277 t
= TREE_VEC_ELT (OMP_FOR_INCR (for_stmt
), i
);
11278 switch (TREE_CODE (t
))
11280 case PREINCREMENT_EXPR
:
11281 case POSTINCREMENT_EXPR
:
11283 tree decl
= TREE_OPERAND (t
, 0);
11284 /* c_omp_for_incr_canonicalize_ptr() should have been
11285 called to massage things appropriately. */
11286 gcc_assert (!POINTER_TYPE_P (TREE_TYPE (decl
)));
11288 if (orig_for_stmt
!= for_stmt
)
11290 t
= build_int_cst (TREE_TYPE (decl
), 1);
11292 OMP_CLAUSE_LINEAR_STEP (c
) = t
;
11293 t
= build2 (PLUS_EXPR
, TREE_TYPE (decl
), var
, t
);
11294 t
= build2 (MODIFY_EXPR
, TREE_TYPE (var
), var
, t
);
11295 TREE_VEC_ELT (OMP_FOR_INCR (for_stmt
), i
) = t
;
11299 case PREDECREMENT_EXPR
:
11300 case POSTDECREMENT_EXPR
:
11301 /* c_omp_for_incr_canonicalize_ptr() should have been
11302 called to massage things appropriately. */
11303 gcc_assert (!POINTER_TYPE_P (TREE_TYPE (decl
)));
11304 if (orig_for_stmt
!= for_stmt
)
11306 t
= build_int_cst (TREE_TYPE (decl
), -1);
11308 OMP_CLAUSE_LINEAR_STEP (c
) = t
;
11309 t
= build2 (PLUS_EXPR
, TREE_TYPE (decl
), var
, t
);
11310 t
= build2 (MODIFY_EXPR
, TREE_TYPE (var
), var
, t
);
11311 TREE_VEC_ELT (OMP_FOR_INCR (for_stmt
), i
) = t
;
11315 gcc_assert (TREE_OPERAND (t
, 0) == decl
);
11316 TREE_OPERAND (t
, 0) = var
;
11318 t
= TREE_OPERAND (t
, 1);
11319 switch (TREE_CODE (t
))
11322 if (TREE_OPERAND (t
, 1) == decl
)
11324 TREE_OPERAND (t
, 1) = TREE_OPERAND (t
, 0);
11325 TREE_OPERAND (t
, 0) = var
;
11331 case POINTER_PLUS_EXPR
:
11332 gcc_assert (TREE_OPERAND (t
, 0) == decl
);
11333 TREE_OPERAND (t
, 0) = var
;
11336 gcc_unreachable ();
11339 tret
= gimplify_expr (&TREE_OPERAND (t
, 1), &for_pre_body
, NULL
,
11340 is_gimple_val
, fb_rvalue
, false);
11341 ret
= MIN (ret
, tret
);
11344 tree step
= TREE_OPERAND (t
, 1);
11345 tree stept
= TREE_TYPE (decl
);
11346 if (POINTER_TYPE_P (stept
))
11348 step
= fold_convert (stept
, step
);
11349 if (TREE_CODE (t
) == MINUS_EXPR
)
11350 step
= fold_build1 (NEGATE_EXPR
, stept
, step
);
11351 OMP_CLAUSE_LINEAR_STEP (c
) = step
;
11352 if (step
!= TREE_OPERAND (t
, 1))
11354 tret
= gimplify_expr (&OMP_CLAUSE_LINEAR_STEP (c
),
11355 &for_pre_body
, NULL
,
11356 is_gimple_val
, fb_rvalue
, false);
11357 ret
= MIN (ret
, tret
);
11363 gcc_unreachable ();
11369 OMP_CLAUSE_LINEAR_STEP (c2
) = OMP_CLAUSE_LINEAR_STEP (c
);
11372 if ((var
!= decl
|| collapse
> 1 || tile
) && orig_for_stmt
== for_stmt
)
11374 for (c
= OMP_FOR_CLAUSES (for_stmt
); c
; c
= OMP_CLAUSE_CHAIN (c
))
11375 if (((OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
11376 && OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c
) == NULL
)
11377 || (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LINEAR
11378 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c
)
11379 && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c
) == NULL
))
11380 && OMP_CLAUSE_DECL (c
) == decl
)
11382 if (is_doacross
&& (collapse
== 1 || i
>= collapse
))
11386 t
= TREE_VEC_ELT (OMP_FOR_INCR (for_stmt
), i
);
11387 gcc_assert (TREE_CODE (t
) == MODIFY_EXPR
);
11388 gcc_assert (TREE_OPERAND (t
, 0) == var
);
11389 t
= TREE_OPERAND (t
, 1);
11390 gcc_assert (TREE_CODE (t
) == PLUS_EXPR
11391 || TREE_CODE (t
) == MINUS_EXPR
11392 || TREE_CODE (t
) == POINTER_PLUS_EXPR
);
11393 gcc_assert (TREE_OPERAND (t
, 0) == var
);
11394 t
= build2 (TREE_CODE (t
), TREE_TYPE (decl
),
11395 is_doacross
? var
: decl
,
11396 TREE_OPERAND (t
, 1));
11399 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
)
11400 seq
= &OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c
);
11402 seq
= &OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c
);
11403 push_gimplify_context ();
11404 gimplify_assign (decl
, t
, seq
);
11405 gimple
*bind
= NULL
;
11406 if (gimplify_ctxp
->temps
)
11408 bind
= gimple_build_bind (NULL_TREE
, *seq
, NULL_TREE
);
11410 gimplify_seq_add_stmt (seq
, bind
);
11412 pop_gimplify_context (bind
);
11417 BITMAP_FREE (has_decl_expr
);
11419 if (TREE_CODE (orig_for_stmt
) == OMP_TASKLOOP
)
11421 push_gimplify_context ();
11422 if (TREE_CODE (OMP_FOR_BODY (orig_for_stmt
)) != BIND_EXPR
)
11424 OMP_FOR_BODY (orig_for_stmt
)
11425 = build3 (BIND_EXPR
, void_type_node
, NULL
,
11426 OMP_FOR_BODY (orig_for_stmt
), NULL
);
11427 TREE_SIDE_EFFECTS (OMP_FOR_BODY (orig_for_stmt
)) = 1;
11431 gimple
*g
= gimplify_and_return_first (OMP_FOR_BODY (orig_for_stmt
),
11434 if (TREE_CODE (orig_for_stmt
) == OMP_TASKLOOP
)
11436 if (gimple_code (g
) == GIMPLE_BIND
)
11437 pop_gimplify_context (g
);
11439 pop_gimplify_context (NULL
);
11442 if (orig_for_stmt
!= for_stmt
)
11443 for (i
= 0; i
< TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt
)); i
++)
11445 t
= TREE_VEC_ELT (OMP_FOR_INIT (for_stmt
), i
);
11446 decl
= TREE_OPERAND (t
, 0);
11447 struct gimplify_omp_ctx
*ctx
= gimplify_omp_ctxp
;
11448 if (TREE_CODE (orig_for_stmt
) == OMP_TASKLOOP
)
11449 gimplify_omp_ctxp
= ctx
->outer_context
;
11450 var
= create_tmp_var (TREE_TYPE (decl
), get_name (decl
));
11451 gimplify_omp_ctxp
= ctx
;
11452 omp_add_variable (gimplify_omp_ctxp
, var
, GOVD_PRIVATE
| GOVD_SEEN
);
11453 TREE_OPERAND (t
, 0) = var
;
11454 t
= TREE_VEC_ELT (OMP_FOR_INCR (for_stmt
), i
);
11455 TREE_OPERAND (t
, 1) = copy_node (TREE_OPERAND (t
, 1));
11456 TREE_OPERAND (TREE_OPERAND (t
, 1), 0) = var
;
11459 gimplify_adjust_omp_clauses (pre_p
, for_body
,
11460 &OMP_FOR_CLAUSES (orig_for_stmt
),
11461 TREE_CODE (orig_for_stmt
));
11464 switch (TREE_CODE (orig_for_stmt
))
11466 case OMP_FOR
: kind
= GF_OMP_FOR_KIND_FOR
; break;
11467 case OMP_SIMD
: kind
= GF_OMP_FOR_KIND_SIMD
; break;
11468 case OMP_DISTRIBUTE
: kind
= GF_OMP_FOR_KIND_DISTRIBUTE
; break;
11469 case OMP_TASKLOOP
: kind
= GF_OMP_FOR_KIND_TASKLOOP
; break;
11470 case OACC_LOOP
: kind
= GF_OMP_FOR_KIND_OACC_LOOP
; break;
11472 gcc_unreachable ();
11474 gfor
= gimple_build_omp_for (for_body
, kind
, OMP_FOR_CLAUSES (orig_for_stmt
),
11475 TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt
)),
11477 if (orig_for_stmt
!= for_stmt
)
11478 gimple_omp_for_set_combined_p (gfor
, true);
11479 if (gimplify_omp_ctxp
11480 && (gimplify_omp_ctxp
->combined_loop
11481 || (gimplify_omp_ctxp
->region_type
== ORT_COMBINED_PARALLEL
11482 && gimplify_omp_ctxp
->outer_context
11483 && gimplify_omp_ctxp
->outer_context
->combined_loop
)))
11485 gimple_omp_for_set_combined_into_p (gfor
, true);
11486 if (gimplify_omp_ctxp
->combined_loop
)
11487 gcc_assert (TREE_CODE (orig_for_stmt
) == OMP_SIMD
);
11489 gcc_assert (TREE_CODE (orig_for_stmt
) == OMP_FOR
);
11492 for (i
= 0; i
< TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt
)); i
++)
11494 t
= TREE_VEC_ELT (OMP_FOR_INIT (for_stmt
), i
);
11495 gimple_omp_for_set_index (gfor
, i
, TREE_OPERAND (t
, 0));
11496 gimple_omp_for_set_initial (gfor
, i
, TREE_OPERAND (t
, 1));
11497 t
= TREE_VEC_ELT (OMP_FOR_COND (for_stmt
), i
);
11498 gimple_omp_for_set_cond (gfor
, i
, TREE_CODE (t
));
11499 gimple_omp_for_set_final (gfor
, i
, TREE_OPERAND (t
, 1));
11500 t
= TREE_VEC_ELT (OMP_FOR_INCR (for_stmt
), i
);
11501 gimple_omp_for_set_incr (gfor
, i
, TREE_OPERAND (t
, 1));
11504 /* OMP_TASKLOOP is gimplified as two GIMPLE_OMP_FOR taskloop
11505 constructs with GIMPLE_OMP_TASK sandwiched in between them.
11506 The outer taskloop stands for computing the number of iterations,
11507 counts for collapsed loops and holding taskloop specific clauses.
11508 The task construct stands for the effect of data sharing on the
11509 explicit task it creates and the inner taskloop stands for expansion
11510 of the static loop inside of the explicit task construct. */
11511 if (TREE_CODE (orig_for_stmt
) == OMP_TASKLOOP
)
11513 tree
*gfor_clauses_ptr
= gimple_omp_for_clauses_ptr (gfor
);
11514 tree task_clauses
= NULL_TREE
;
11515 tree c
= *gfor_clauses_ptr
;
11516 tree
*gtask_clauses_ptr
= &task_clauses
;
11517 tree outer_for_clauses
= NULL_TREE
;
11518 tree
*gforo_clauses_ptr
= &outer_for_clauses
;
11519 for (; c
; c
= OMP_CLAUSE_CHAIN (c
))
11520 switch (OMP_CLAUSE_CODE (c
))
11522 /* These clauses are allowed on task, move them there. */
11523 case OMP_CLAUSE_SHARED
:
11524 case OMP_CLAUSE_FIRSTPRIVATE
:
11525 case OMP_CLAUSE_DEFAULT
:
11526 case OMP_CLAUSE_IF
:
11527 case OMP_CLAUSE_UNTIED
:
11528 case OMP_CLAUSE_FINAL
:
11529 case OMP_CLAUSE_MERGEABLE
:
11530 case OMP_CLAUSE_PRIORITY
:
11531 case OMP_CLAUSE_REDUCTION
:
11532 case OMP_CLAUSE_IN_REDUCTION
:
11533 *gtask_clauses_ptr
= c
;
11534 gtask_clauses_ptr
= &OMP_CLAUSE_CHAIN (c
);
11536 case OMP_CLAUSE_PRIVATE
:
11537 if (OMP_CLAUSE_PRIVATE_TASKLOOP_IV (c
))
11539 /* We want private on outer for and firstprivate
11542 = build_omp_clause (OMP_CLAUSE_LOCATION (c
),
11543 OMP_CLAUSE_FIRSTPRIVATE
);
11544 OMP_CLAUSE_DECL (*gtask_clauses_ptr
) = OMP_CLAUSE_DECL (c
);
11545 lang_hooks
.decls
.omp_finish_clause (*gtask_clauses_ptr
, NULL
);
11546 gtask_clauses_ptr
= &OMP_CLAUSE_CHAIN (*gtask_clauses_ptr
);
11547 *gforo_clauses_ptr
= c
;
11548 gforo_clauses_ptr
= &OMP_CLAUSE_CHAIN (c
);
11552 *gtask_clauses_ptr
= c
;
11553 gtask_clauses_ptr
= &OMP_CLAUSE_CHAIN (c
);
11556 /* These clauses go into outer taskloop clauses. */
11557 case OMP_CLAUSE_GRAINSIZE
:
11558 case OMP_CLAUSE_NUM_TASKS
:
11559 case OMP_CLAUSE_NOGROUP
:
11560 *gforo_clauses_ptr
= c
;
11561 gforo_clauses_ptr
= &OMP_CLAUSE_CHAIN (c
);
11563 /* Taskloop clause we duplicate on both taskloops. */
11564 case OMP_CLAUSE_COLLAPSE
:
11565 *gfor_clauses_ptr
= c
;
11566 gfor_clauses_ptr
= &OMP_CLAUSE_CHAIN (c
);
11567 *gforo_clauses_ptr
= copy_node (c
);
11568 gforo_clauses_ptr
= &OMP_CLAUSE_CHAIN (*gforo_clauses_ptr
);
11570 /* For lastprivate, keep the clause on inner taskloop, and add
11571 a shared clause on task. If the same decl is also firstprivate,
11572 add also firstprivate clause on the inner taskloop. */
11573 case OMP_CLAUSE_LASTPRIVATE
:
11574 if (OMP_CLAUSE_LASTPRIVATE_TASKLOOP_IV (c
))
11576 /* For taskloop C++ lastprivate IVs, we want:
11577 1) private on outer taskloop
11578 2) firstprivate and shared on task
11579 3) lastprivate on inner taskloop */
11581 = build_omp_clause (OMP_CLAUSE_LOCATION (c
),
11582 OMP_CLAUSE_FIRSTPRIVATE
);
11583 OMP_CLAUSE_DECL (*gtask_clauses_ptr
) = OMP_CLAUSE_DECL (c
);
11584 lang_hooks
.decls
.omp_finish_clause (*gtask_clauses_ptr
, NULL
);
11585 gtask_clauses_ptr
= &OMP_CLAUSE_CHAIN (*gtask_clauses_ptr
);
11586 OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c
) = 1;
11587 *gforo_clauses_ptr
= build_omp_clause (OMP_CLAUSE_LOCATION (c
),
11588 OMP_CLAUSE_PRIVATE
);
11589 OMP_CLAUSE_DECL (*gforo_clauses_ptr
) = OMP_CLAUSE_DECL (c
);
11590 OMP_CLAUSE_PRIVATE_TASKLOOP_IV (*gforo_clauses_ptr
) = 1;
11591 TREE_TYPE (*gforo_clauses_ptr
) = TREE_TYPE (c
);
11592 gforo_clauses_ptr
= &OMP_CLAUSE_CHAIN (*gforo_clauses_ptr
);
11594 *gfor_clauses_ptr
= c
;
11595 gfor_clauses_ptr
= &OMP_CLAUSE_CHAIN (c
);
11597 = build_omp_clause (OMP_CLAUSE_LOCATION (c
), OMP_CLAUSE_SHARED
);
11598 OMP_CLAUSE_DECL (*gtask_clauses_ptr
) = OMP_CLAUSE_DECL (c
);
11599 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c
))
11600 OMP_CLAUSE_SHARED_FIRSTPRIVATE (*gtask_clauses_ptr
) = 1;
11602 = &OMP_CLAUSE_CHAIN (*gtask_clauses_ptr
);
11605 gcc_unreachable ();
11607 *gfor_clauses_ptr
= NULL_TREE
;
11608 *gtask_clauses_ptr
= NULL_TREE
;
11609 *gforo_clauses_ptr
= NULL_TREE
;
11610 g
= gimple_build_bind (NULL_TREE
, gfor
, NULL_TREE
);
11611 g
= gimple_build_omp_task (g
, task_clauses
, NULL_TREE
, NULL_TREE
,
11612 NULL_TREE
, NULL_TREE
, NULL_TREE
);
11613 gimple_omp_task_set_taskloop_p (g
, true);
11614 g
= gimple_build_bind (NULL_TREE
, g
, NULL_TREE
);
11616 = gimple_build_omp_for (g
, GF_OMP_FOR_KIND_TASKLOOP
, outer_for_clauses
,
11617 gimple_omp_for_collapse (gfor
),
11618 gimple_omp_for_pre_body (gfor
));
11619 gimple_omp_for_set_pre_body (gfor
, NULL
);
11620 gimple_omp_for_set_combined_p (gforo
, true);
11621 gimple_omp_for_set_combined_into_p (gfor
, true);
11622 for (i
= 0; i
< (int) gimple_omp_for_collapse (gfor
); i
++)
11624 tree type
= TREE_TYPE (gimple_omp_for_index (gfor
, i
));
11625 tree v
= create_tmp_var (type
);
11626 gimple_omp_for_set_index (gforo
, i
, v
);
11627 t
= unshare_expr (gimple_omp_for_initial (gfor
, i
));
11628 gimple_omp_for_set_initial (gforo
, i
, t
);
11629 gimple_omp_for_set_cond (gforo
, i
,
11630 gimple_omp_for_cond (gfor
, i
));
11631 t
= unshare_expr (gimple_omp_for_final (gfor
, i
));
11632 gimple_omp_for_set_final (gforo
, i
, t
);
11633 t
= unshare_expr (gimple_omp_for_incr (gfor
, i
));
11634 gcc_assert (TREE_OPERAND (t
, 0) == gimple_omp_for_index (gfor
, i
));
11635 TREE_OPERAND (t
, 0) = v
;
11636 gimple_omp_for_set_incr (gforo
, i
, t
);
11637 t
= build_omp_clause (input_location
, OMP_CLAUSE_PRIVATE
);
11638 OMP_CLAUSE_DECL (t
) = v
;
11639 OMP_CLAUSE_CHAIN (t
) = gimple_omp_for_clauses (gforo
);
11640 gimple_omp_for_set_clauses (gforo
, t
);
11642 gimplify_seq_add_stmt (pre_p
, gforo
);
11645 gimplify_seq_add_stmt (pre_p
, gfor
);
11647 if (TREE_CODE (orig_for_stmt
) == OMP_FOR
)
11649 struct gimplify_omp_ctx
*ctx
= gimplify_omp_ctxp
;
11650 unsigned lastprivate_conditional
= 0;
11652 && (ctx
->region_type
== ORT_TARGET_DATA
11653 || ctx
->region_type
== ORT_TASKGROUP
))
11654 ctx
= ctx
->outer_context
;
11655 if (ctx
&& (ctx
->region_type
& ORT_PARALLEL
) != 0)
11656 for (tree c
= gimple_omp_for_clauses (gfor
);
11657 c
; c
= OMP_CLAUSE_CHAIN (c
))
11658 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
11659 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c
))
11660 ++lastprivate_conditional
;
11661 if (lastprivate_conditional
)
11663 struct omp_for_data fd
;
11664 omp_extract_for_data (gfor
, &fd
, NULL
);
11665 tree type
= build_array_type_nelts (unsigned_type_for (fd
.iter_type
),
11666 lastprivate_conditional
);
11667 tree var
= create_tmp_var_raw (type
);
11668 tree c
= build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE__CONDTEMP_
);
11669 OMP_CLAUSE_DECL (c
) = var
;
11670 OMP_CLAUSE_CHAIN (c
) = gimple_omp_for_clauses (gfor
);
11671 gimple_omp_for_set_clauses (gfor
, c
);
11672 omp_add_variable (ctx
, var
, GOVD_CONDTEMP
| GOVD_SEEN
);
11675 else if (TREE_CODE (orig_for_stmt
) == OMP_SIMD
)
11677 unsigned lastprivate_conditional
= 0;
11678 for (tree c
= gimple_omp_for_clauses (gfor
); c
; c
= OMP_CLAUSE_CHAIN (c
))
11679 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
11680 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c
))
11681 ++lastprivate_conditional
;
11682 if (lastprivate_conditional
)
11684 struct omp_for_data fd
;
11685 omp_extract_for_data (gfor
, &fd
, NULL
);
11686 tree type
= unsigned_type_for (fd
.iter_type
);
11687 while (lastprivate_conditional
--)
11689 tree c
= build_omp_clause (UNKNOWN_LOCATION
,
11690 OMP_CLAUSE__CONDTEMP_
);
11691 OMP_CLAUSE_DECL (c
) = create_tmp_var (type
);
11692 OMP_CLAUSE_CHAIN (c
) = gimple_omp_for_clauses (gfor
);
11693 gimple_omp_for_set_clauses (gfor
, c
);
11698 if (ret
!= GS_ALL_DONE
)
11700 *expr_p
= NULL_TREE
;
11701 return GS_ALL_DONE
;
11704 /* Helper function of optimize_target_teams, find OMP_TEAMS inside
11705 of OMP_TARGET's body. */
11708 find_omp_teams (tree
*tp
, int *walk_subtrees
, void *)
11710 *walk_subtrees
= 0;
11711 switch (TREE_CODE (*tp
))
11716 case STATEMENT_LIST
:
11717 *walk_subtrees
= 1;
11725 /* Helper function of optimize_target_teams, determine if the expression
11726 can be computed safely before the target construct on the host. */
11729 computable_teams_clause (tree
*tp
, int *walk_subtrees
, void *)
11735 *walk_subtrees
= 0;
11738 switch (TREE_CODE (*tp
))
11743 *walk_subtrees
= 0;
11744 if (error_operand_p (*tp
)
11745 || !INTEGRAL_TYPE_P (TREE_TYPE (*tp
))
11746 || DECL_HAS_VALUE_EXPR_P (*tp
)
11747 || DECL_THREAD_LOCAL_P (*tp
)
11748 || TREE_SIDE_EFFECTS (*tp
)
11749 || TREE_THIS_VOLATILE (*tp
))
11751 if (is_global_var (*tp
)
11752 && (lookup_attribute ("omp declare target", DECL_ATTRIBUTES (*tp
))
11753 || lookup_attribute ("omp declare target link",
11754 DECL_ATTRIBUTES (*tp
))))
11757 && !DECL_SEEN_IN_BIND_EXPR_P (*tp
)
11758 && !is_global_var (*tp
)
11759 && decl_function_context (*tp
) == current_function_decl
)
11761 n
= splay_tree_lookup (gimplify_omp_ctxp
->variables
,
11762 (splay_tree_key
) *tp
);
11765 if (gimplify_omp_ctxp
->defaultmap
[GDMK_SCALAR
] & GOVD_FIRSTPRIVATE
)
11769 else if (n
->value
& GOVD_LOCAL
)
11771 else if (n
->value
& GOVD_FIRSTPRIVATE
)
11773 else if ((n
->value
& (GOVD_MAP
| GOVD_MAP_ALWAYS_TO
))
11774 == (GOVD_MAP
| GOVD_MAP_ALWAYS_TO
))
11778 if (!INTEGRAL_TYPE_P (TREE_TYPE (*tp
)))
11782 if (TARGET_EXPR_INITIAL (*tp
)
11783 || TREE_CODE (TARGET_EXPR_SLOT (*tp
)) != VAR_DECL
)
11785 return computable_teams_clause (&TARGET_EXPR_SLOT (*tp
),
11786 walk_subtrees
, NULL
);
11787 /* Allow some reasonable subset of integral arithmetics. */
11791 case TRUNC_DIV_EXPR
:
11792 case CEIL_DIV_EXPR
:
11793 case FLOOR_DIV_EXPR
:
11794 case ROUND_DIV_EXPR
:
11795 case TRUNC_MOD_EXPR
:
11796 case CEIL_MOD_EXPR
:
11797 case FLOOR_MOD_EXPR
:
11798 case ROUND_MOD_EXPR
:
11800 case EXACT_DIV_EXPR
:
11811 case NON_LVALUE_EXPR
:
11813 if (!INTEGRAL_TYPE_P (TREE_TYPE (*tp
)))
11816 /* And disallow anything else, except for comparisons. */
11818 if (COMPARISON_CLASS_P (*tp
))
11824 /* Try to determine if the num_teams and/or thread_limit expressions
11825 can have their values determined already before entering the
11827 INTEGER_CSTs trivially are,
11828 integral decls that are firstprivate (explicitly or implicitly)
11829 or explicitly map(always, to:) or map(always, tofrom:) on the target
11830 region too, and expressions involving simple arithmetics on those
11831 too, function calls are not ok, dereferencing something neither etc.
11832 Add NUM_TEAMS and THREAD_LIMIT clauses to the OMP_CLAUSES of
11833 EXPR based on what we find:
11834 0 stands for clause not specified at all, use implementation default
11835 -1 stands for value that can't be determined easily before entering
11836 the target construct.
11837 If teams construct is not present at all, use 1 for num_teams
11838 and 0 for thread_limit (only one team is involved, and the thread
11839 limit is implementation defined. */
11842 optimize_target_teams (tree target
, gimple_seq
*pre_p
)
11844 tree body
= OMP_BODY (target
);
11845 tree teams
= walk_tree (&body
, find_omp_teams
, NULL
, NULL
);
11846 tree num_teams
= integer_zero_node
;
11847 tree thread_limit
= integer_zero_node
;
11848 location_t num_teams_loc
= EXPR_LOCATION (target
);
11849 location_t thread_limit_loc
= EXPR_LOCATION (target
);
11851 struct gimplify_omp_ctx
*target_ctx
= gimplify_omp_ctxp
;
11853 if (teams
== NULL_TREE
)
11854 num_teams
= integer_one_node
;
11856 for (c
= OMP_TEAMS_CLAUSES (teams
); c
; c
= OMP_CLAUSE_CHAIN (c
))
11858 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_NUM_TEAMS
)
11861 num_teams_loc
= OMP_CLAUSE_LOCATION (c
);
11863 else if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_THREAD_LIMIT
)
11866 thread_limit_loc
= OMP_CLAUSE_LOCATION (c
);
11870 expr
= OMP_CLAUSE_OPERAND (c
, 0);
11871 if (TREE_CODE (expr
) == INTEGER_CST
)
11876 if (walk_tree (&expr
, computable_teams_clause
, NULL
, NULL
))
11878 *p
= integer_minus_one_node
;
11882 gimplify_omp_ctxp
= gimplify_omp_ctxp
->outer_context
;
11883 if (gimplify_expr (p
, pre_p
, NULL
, is_gimple_val
, fb_rvalue
, false)
11886 gimplify_omp_ctxp
= target_ctx
;
11887 *p
= integer_minus_one_node
;
11890 gimplify_omp_ctxp
= target_ctx
;
11891 if (!DECL_P (expr
) && TREE_CODE (expr
) != TARGET_EXPR
)
11892 OMP_CLAUSE_OPERAND (c
, 0) = *p
;
11894 c
= build_omp_clause (thread_limit_loc
, OMP_CLAUSE_THREAD_LIMIT
);
11895 OMP_CLAUSE_THREAD_LIMIT_EXPR (c
) = thread_limit
;
11896 OMP_CLAUSE_CHAIN (c
) = OMP_TARGET_CLAUSES (target
);
11897 OMP_TARGET_CLAUSES (target
) = c
;
11898 c
= build_omp_clause (num_teams_loc
, OMP_CLAUSE_NUM_TEAMS
);
11899 OMP_CLAUSE_NUM_TEAMS_EXPR (c
) = num_teams
;
11900 OMP_CLAUSE_CHAIN (c
) = OMP_TARGET_CLAUSES (target
);
11901 OMP_TARGET_CLAUSES (target
) = c
;
11904 /* Gimplify the gross structure of several OMP constructs. */
11907 gimplify_omp_workshare (tree
*expr_p
, gimple_seq
*pre_p
)
11909 tree expr
= *expr_p
;
11911 gimple_seq body
= NULL
;
11912 enum omp_region_type ort
;
11914 switch (TREE_CODE (expr
))
11918 ort
= ORT_WORKSHARE
;
11921 ort
= OMP_TARGET_COMBINED (expr
) ? ORT_COMBINED_TARGET
: ORT_TARGET
;
11924 ort
= ORT_ACC_KERNELS
;
11926 case OACC_PARALLEL
:
11927 ort
= ORT_ACC_PARALLEL
;
11930 ort
= ORT_ACC_DATA
;
11932 case OMP_TARGET_DATA
:
11933 ort
= ORT_TARGET_DATA
;
11936 ort
= OMP_TEAMS_COMBINED (expr
) ? ORT_COMBINED_TEAMS
: ORT_TEAMS
;
11937 if (gimplify_omp_ctxp
== NULL
11938 || (gimplify_omp_ctxp
->region_type
== ORT_TARGET
11939 && gimplify_omp_ctxp
->outer_context
== NULL
11940 && lookup_attribute ("omp declare target",
11941 DECL_ATTRIBUTES (current_function_decl
))))
11942 ort
= (enum omp_region_type
) (ort
| ORT_HOST_TEAMS
);
11944 case OACC_HOST_DATA
:
11945 ort
= ORT_ACC_HOST_DATA
;
11948 gcc_unreachable ();
11950 gimplify_scan_omp_clauses (&OMP_CLAUSES (expr
), pre_p
, ort
,
11952 if (TREE_CODE (expr
) == OMP_TARGET
)
11953 optimize_target_teams (expr
, pre_p
);
11954 if ((ort
& (ORT_TARGET
| ORT_TARGET_DATA
)) != 0
11955 || (ort
& ORT_HOST_TEAMS
) == ORT_HOST_TEAMS
)
11957 push_gimplify_context ();
11958 gimple
*g
= gimplify_and_return_first (OMP_BODY (expr
), &body
);
11959 if (gimple_code (g
) == GIMPLE_BIND
)
11960 pop_gimplify_context (g
);
11962 pop_gimplify_context (NULL
);
11963 if ((ort
& ORT_TARGET_DATA
) != 0)
11965 enum built_in_function end_ix
;
11966 switch (TREE_CODE (expr
))
11969 case OACC_HOST_DATA
:
11970 end_ix
= BUILT_IN_GOACC_DATA_END
;
11972 case OMP_TARGET_DATA
:
11973 end_ix
= BUILT_IN_GOMP_TARGET_END_DATA
;
11976 gcc_unreachable ();
11978 tree fn
= builtin_decl_explicit (end_ix
);
11979 g
= gimple_build_call (fn
, 0);
11980 gimple_seq cleanup
= NULL
;
11981 gimple_seq_add_stmt (&cleanup
, g
);
11982 g
= gimple_build_try (body
, cleanup
, GIMPLE_TRY_FINALLY
);
11984 gimple_seq_add_stmt (&body
, g
);
11988 gimplify_and_add (OMP_BODY (expr
), &body
);
11989 gimplify_adjust_omp_clauses (pre_p
, body
, &OMP_CLAUSES (expr
),
11992 switch (TREE_CODE (expr
))
11995 stmt
= gimple_build_omp_target (body
, GF_OMP_TARGET_KIND_OACC_DATA
,
11996 OMP_CLAUSES (expr
));
11999 stmt
= gimple_build_omp_target (body
, GF_OMP_TARGET_KIND_OACC_KERNELS
,
12000 OMP_CLAUSES (expr
));
12002 case OACC_HOST_DATA
:
12003 stmt
= gimple_build_omp_target (body
, GF_OMP_TARGET_KIND_OACC_HOST_DATA
,
12004 OMP_CLAUSES (expr
));
12006 case OACC_PARALLEL
:
12007 stmt
= gimple_build_omp_target (body
, GF_OMP_TARGET_KIND_OACC_PARALLEL
,
12008 OMP_CLAUSES (expr
));
12011 stmt
= gimple_build_omp_sections (body
, OMP_CLAUSES (expr
));
12014 stmt
= gimple_build_omp_single (body
, OMP_CLAUSES (expr
));
12017 stmt
= gimple_build_omp_target (body
, GF_OMP_TARGET_KIND_REGION
,
12018 OMP_CLAUSES (expr
));
12020 case OMP_TARGET_DATA
:
12021 stmt
= gimple_build_omp_target (body
, GF_OMP_TARGET_KIND_DATA
,
12022 OMP_CLAUSES (expr
));
12025 stmt
= gimple_build_omp_teams (body
, OMP_CLAUSES (expr
));
12026 if ((ort
& ORT_HOST_TEAMS
) == ORT_HOST_TEAMS
)
12027 gimple_omp_teams_set_host (as_a
<gomp_teams
*> (stmt
), true);
12030 gcc_unreachable ();
12033 gimplify_seq_add_stmt (pre_p
, stmt
);
12034 *expr_p
= NULL_TREE
;
12037 /* Gimplify the gross structure of OpenACC enter/exit data, update, and OpenMP
12038 target update constructs. */
12041 gimplify_omp_target_update (tree
*expr_p
, gimple_seq
*pre_p
)
12043 tree expr
= *expr_p
;
12046 enum omp_region_type ort
= ORT_WORKSHARE
;
12048 switch (TREE_CODE (expr
))
12050 case OACC_ENTER_DATA
:
12051 case OACC_EXIT_DATA
:
12052 kind
= GF_OMP_TARGET_KIND_OACC_ENTER_EXIT_DATA
;
12056 kind
= GF_OMP_TARGET_KIND_OACC_UPDATE
;
12059 case OMP_TARGET_UPDATE
:
12060 kind
= GF_OMP_TARGET_KIND_UPDATE
;
12062 case OMP_TARGET_ENTER_DATA
:
12063 kind
= GF_OMP_TARGET_KIND_ENTER_DATA
;
12065 case OMP_TARGET_EXIT_DATA
:
12066 kind
= GF_OMP_TARGET_KIND_EXIT_DATA
;
12069 gcc_unreachable ();
12071 gimplify_scan_omp_clauses (&OMP_STANDALONE_CLAUSES (expr
), pre_p
,
12072 ort
, TREE_CODE (expr
));
12073 gimplify_adjust_omp_clauses (pre_p
, NULL
, &OMP_STANDALONE_CLAUSES (expr
),
12075 if (TREE_CODE (expr
) == OACC_UPDATE
12076 && omp_find_clause (OMP_STANDALONE_CLAUSES (expr
),
12077 OMP_CLAUSE_IF_PRESENT
))
12079 /* The runtime uses GOMP_MAP_{TO,FROM} to denote the if_present
12081 for (tree c
= OMP_STANDALONE_CLAUSES (expr
); c
; c
= OMP_CLAUSE_CHAIN (c
))
12082 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
)
12083 switch (OMP_CLAUSE_MAP_KIND (c
))
12085 case GOMP_MAP_FORCE_TO
:
12086 OMP_CLAUSE_SET_MAP_KIND (c
, GOMP_MAP_TO
);
12088 case GOMP_MAP_FORCE_FROM
:
12089 OMP_CLAUSE_SET_MAP_KIND (c
, GOMP_MAP_FROM
);
12095 else if (TREE_CODE (expr
) == OACC_EXIT_DATA
12096 && omp_find_clause (OMP_STANDALONE_CLAUSES (expr
),
12097 OMP_CLAUSE_FINALIZE
))
12099 /* Use GOMP_MAP_DELETE/GOMP_MAP_FORCE_FROM to denote that "finalize"
12100 semantics apply to all mappings of this OpenACC directive. */
12101 bool finalize_marked
= false;
12102 for (tree c
= OMP_STANDALONE_CLAUSES (expr
); c
; c
= OMP_CLAUSE_CHAIN (c
))
12103 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
)
12104 switch (OMP_CLAUSE_MAP_KIND (c
))
12106 case GOMP_MAP_FROM
:
12107 OMP_CLAUSE_SET_MAP_KIND (c
, GOMP_MAP_FORCE_FROM
);
12108 finalize_marked
= true;
12110 case GOMP_MAP_RELEASE
:
12111 OMP_CLAUSE_SET_MAP_KIND (c
, GOMP_MAP_DELETE
);
12112 finalize_marked
= true;
12115 /* Check consistency: libgomp relies on the very first data
12116 mapping clause being marked, so make sure we did that before
12117 any other mapping clauses. */
12118 gcc_assert (finalize_marked
);
12122 stmt
= gimple_build_omp_target (NULL
, kind
, OMP_STANDALONE_CLAUSES (expr
));
12124 gimplify_seq_add_stmt (pre_p
, stmt
);
12125 *expr_p
= NULL_TREE
;
12128 /* A subroutine of gimplify_omp_atomic. The front end is supposed to have
12129 stabilized the lhs of the atomic operation as *ADDR. Return true if
12130 EXPR is this stabilized form. */
12133 goa_lhs_expr_p (tree expr
, tree addr
)
12135 /* Also include casts to other type variants. The C front end is fond
12136 of adding these for e.g. volatile variables. This is like
12137 STRIP_TYPE_NOPS but includes the main variant lookup. */
12138 STRIP_USELESS_TYPE_CONVERSION (expr
);
12140 if (TREE_CODE (expr
) == INDIRECT_REF
)
12142 expr
= TREE_OPERAND (expr
, 0);
12143 while (expr
!= addr
12144 && (CONVERT_EXPR_P (expr
)
12145 || TREE_CODE (expr
) == NON_LVALUE_EXPR
)
12146 && TREE_CODE (expr
) == TREE_CODE (addr
)
12147 && types_compatible_p (TREE_TYPE (expr
), TREE_TYPE (addr
)))
12149 expr
= TREE_OPERAND (expr
, 0);
12150 addr
= TREE_OPERAND (addr
, 0);
12154 return (TREE_CODE (addr
) == ADDR_EXPR
12155 && TREE_CODE (expr
) == ADDR_EXPR
12156 && TREE_OPERAND (addr
, 0) == TREE_OPERAND (expr
, 0));
12158 if (TREE_CODE (addr
) == ADDR_EXPR
&& expr
== TREE_OPERAND (addr
, 0))
12163 /* Walk *EXPR_P and replace appearances of *LHS_ADDR with LHS_VAR. If an
12164 expression does not involve the lhs, evaluate it into a temporary.
12165 Return 1 if the lhs appeared as a subexpression, 0 if it did not,
12166 or -1 if an error was encountered. */
12169 goa_stabilize_expr (tree
*expr_p
, gimple_seq
*pre_p
, tree lhs_addr
,
12172 tree expr
= *expr_p
;
12175 if (goa_lhs_expr_p (expr
, lhs_addr
))
12180 if (is_gimple_val (expr
))
12184 switch (TREE_CODE_CLASS (TREE_CODE (expr
)))
12187 case tcc_comparison
:
12188 saw_lhs
|= goa_stabilize_expr (&TREE_OPERAND (expr
, 1), pre_p
, lhs_addr
,
12192 saw_lhs
|= goa_stabilize_expr (&TREE_OPERAND (expr
, 0), pre_p
, lhs_addr
,
12195 case tcc_expression
:
12196 switch (TREE_CODE (expr
))
12198 case TRUTH_ANDIF_EXPR
:
12199 case TRUTH_ORIF_EXPR
:
12200 case TRUTH_AND_EXPR
:
12201 case TRUTH_OR_EXPR
:
12202 case TRUTH_XOR_EXPR
:
12203 case BIT_INSERT_EXPR
:
12204 saw_lhs
|= goa_stabilize_expr (&TREE_OPERAND (expr
, 1), pre_p
,
12205 lhs_addr
, lhs_var
);
12207 case TRUTH_NOT_EXPR
:
12208 saw_lhs
|= goa_stabilize_expr (&TREE_OPERAND (expr
, 0), pre_p
,
12209 lhs_addr
, lhs_var
);
12211 case COMPOUND_EXPR
:
12212 /* Break out any preevaluations from cp_build_modify_expr. */
12213 for (; TREE_CODE (expr
) == COMPOUND_EXPR
;
12214 expr
= TREE_OPERAND (expr
, 1))
12215 gimplify_stmt (&TREE_OPERAND (expr
, 0), pre_p
);
12217 return goa_stabilize_expr (expr_p
, pre_p
, lhs_addr
, lhs_var
);
12222 case tcc_reference
:
12223 if (TREE_CODE (expr
) == BIT_FIELD_REF
)
12224 saw_lhs
|= goa_stabilize_expr (&TREE_OPERAND (expr
, 0), pre_p
,
12225 lhs_addr
, lhs_var
);
12233 enum gimplify_status gs
;
12234 gs
= gimplify_expr (expr_p
, pre_p
, NULL
, is_gimple_val
, fb_rvalue
);
12235 if (gs
!= GS_ALL_DONE
)
12242 /* Gimplify an OMP_ATOMIC statement. */
12244 static enum gimplify_status
12245 gimplify_omp_atomic (tree
*expr_p
, gimple_seq
*pre_p
)
12247 tree addr
= TREE_OPERAND (*expr_p
, 0);
12248 tree rhs
= TREE_CODE (*expr_p
) == OMP_ATOMIC_READ
12249 ? NULL
: TREE_OPERAND (*expr_p
, 1);
12250 tree type
= TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (addr
)));
12252 gomp_atomic_load
*loadstmt
;
12253 gomp_atomic_store
*storestmt
;
12255 tmp_load
= create_tmp_reg (type
);
12256 if (rhs
&& goa_stabilize_expr (&rhs
, pre_p
, addr
, tmp_load
) < 0)
12259 if (gimplify_expr (&addr
, pre_p
, NULL
, is_gimple_val
, fb_rvalue
)
12263 loadstmt
= gimple_build_omp_atomic_load (tmp_load
, addr
,
12264 OMP_ATOMIC_MEMORY_ORDER (*expr_p
));
12265 gimplify_seq_add_stmt (pre_p
, loadstmt
);
12268 /* BIT_INSERT_EXPR is not valid for non-integral bitfield
12269 representatives. Use BIT_FIELD_REF on the lhs instead. */
12270 if (TREE_CODE (rhs
) == BIT_INSERT_EXPR
12271 && !INTEGRAL_TYPE_P (TREE_TYPE (tmp_load
)))
12273 tree bitpos
= TREE_OPERAND (rhs
, 2);
12274 tree op1
= TREE_OPERAND (rhs
, 1);
12276 tree tmp_store
= tmp_load
;
12277 if (TREE_CODE (*expr_p
) == OMP_ATOMIC_CAPTURE_OLD
)
12278 tmp_store
= get_initialized_tmp_var (tmp_load
, pre_p
, NULL
);
12279 if (INTEGRAL_TYPE_P (TREE_TYPE (op1
)))
12280 bitsize
= bitsize_int (TYPE_PRECISION (TREE_TYPE (op1
)));
12282 bitsize
= TYPE_SIZE (TREE_TYPE (op1
));
12283 gcc_assert (TREE_OPERAND (rhs
, 0) == tmp_load
);
12284 tree t
= build2_loc (EXPR_LOCATION (rhs
),
12285 MODIFY_EXPR
, void_type_node
,
12286 build3_loc (EXPR_LOCATION (rhs
), BIT_FIELD_REF
,
12287 TREE_TYPE (op1
), tmp_store
, bitsize
,
12289 gimplify_and_add (t
, pre_p
);
12292 if (gimplify_expr (&rhs
, pre_p
, NULL
, is_gimple_val
, fb_rvalue
)
12297 if (TREE_CODE (*expr_p
) == OMP_ATOMIC_READ
)
12300 = gimple_build_omp_atomic_store (rhs
, OMP_ATOMIC_MEMORY_ORDER (*expr_p
));
12301 gimplify_seq_add_stmt (pre_p
, storestmt
);
12302 switch (TREE_CODE (*expr_p
))
12304 case OMP_ATOMIC_READ
:
12305 case OMP_ATOMIC_CAPTURE_OLD
:
12306 *expr_p
= tmp_load
;
12307 gimple_omp_atomic_set_need_value (loadstmt
);
12309 case OMP_ATOMIC_CAPTURE_NEW
:
12311 gimple_omp_atomic_set_need_value (storestmt
);
12318 return GS_ALL_DONE
;
12321 /* Gimplify a TRANSACTION_EXPR. This involves gimplification of the
12322 body, and adding some EH bits. */
12324 static enum gimplify_status
12325 gimplify_transaction (tree
*expr_p
, gimple_seq
*pre_p
)
12327 tree expr
= *expr_p
, temp
, tbody
= TRANSACTION_EXPR_BODY (expr
);
12329 gtransaction
*trans_stmt
;
12330 gimple_seq body
= NULL
;
12333 /* Wrap the transaction body in a BIND_EXPR so we have a context
12334 where to put decls for OMP. */
12335 if (TREE_CODE (tbody
) != BIND_EXPR
)
12337 tree bind
= build3 (BIND_EXPR
, void_type_node
, NULL
, tbody
, NULL
);
12338 TREE_SIDE_EFFECTS (bind
) = 1;
12339 SET_EXPR_LOCATION (bind
, EXPR_LOCATION (tbody
));
12340 TRANSACTION_EXPR_BODY (expr
) = bind
;
12343 push_gimplify_context ();
12344 temp
= voidify_wrapper_expr (*expr_p
, NULL
);
12346 body_stmt
= gimplify_and_return_first (TRANSACTION_EXPR_BODY (expr
), &body
);
12347 pop_gimplify_context (body_stmt
);
12349 trans_stmt
= gimple_build_transaction (body
);
12350 if (TRANSACTION_EXPR_OUTER (expr
))
12351 subcode
= GTMA_IS_OUTER
;
12352 else if (TRANSACTION_EXPR_RELAXED (expr
))
12353 subcode
= GTMA_IS_RELAXED
;
12354 gimple_transaction_set_subcode (trans_stmt
, subcode
);
12356 gimplify_seq_add_stmt (pre_p
, trans_stmt
);
12364 *expr_p
= NULL_TREE
;
12365 return GS_ALL_DONE
;
12368 /* Gimplify an OMP_ORDERED construct. EXPR is the tree version. BODY
12369 is the OMP_BODY of the original EXPR (which has already been
12370 gimplified so it's not present in the EXPR).
12372 Return the gimplified GIMPLE_OMP_ORDERED tuple. */
12375 gimplify_omp_ordered (tree expr
, gimple_seq body
)
12380 tree source_c
= NULL_TREE
;
12381 tree sink_c
= NULL_TREE
;
12383 if (gimplify_omp_ctxp
)
12385 for (c
= OMP_ORDERED_CLAUSES (expr
); c
; c
= OMP_CLAUSE_CHAIN (c
))
12386 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_DEPEND
12387 && gimplify_omp_ctxp
->loop_iter_var
.is_empty ()
12388 && (OMP_CLAUSE_DEPEND_KIND (c
) == OMP_CLAUSE_DEPEND_SINK
12389 || OMP_CLAUSE_DEPEND_KIND (c
) == OMP_CLAUSE_DEPEND_SOURCE
))
12391 error_at (OMP_CLAUSE_LOCATION (c
),
12392 "%<ordered%> construct with %<depend%> clause must be "
12393 "closely nested inside a loop with %<ordered%> clause "
12394 "with a parameter");
12397 else if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_DEPEND
12398 && OMP_CLAUSE_DEPEND_KIND (c
) == OMP_CLAUSE_DEPEND_SINK
)
12401 for (decls
= OMP_CLAUSE_DECL (c
), i
= 0;
12402 decls
&& TREE_CODE (decls
) == TREE_LIST
;
12403 decls
= TREE_CHAIN (decls
), ++i
)
12404 if (i
>= gimplify_omp_ctxp
->loop_iter_var
.length () / 2)
12406 else if (TREE_VALUE (decls
)
12407 != gimplify_omp_ctxp
->loop_iter_var
[2 * i
])
12409 error_at (OMP_CLAUSE_LOCATION (c
),
12410 "variable %qE is not an iteration "
12411 "of outermost loop %d, expected %qE",
12412 TREE_VALUE (decls
), i
+ 1,
12413 gimplify_omp_ctxp
->loop_iter_var
[2 * i
]);
12419 = gimplify_omp_ctxp
->loop_iter_var
[2 * i
+ 1];
12420 if (!fail
&& i
!= gimplify_omp_ctxp
->loop_iter_var
.length () / 2)
12422 error_at (OMP_CLAUSE_LOCATION (c
),
12423 "number of variables in %<depend%> clause with "
12424 "%<sink%> modifier does not match number of "
12425 "iteration variables");
12430 else if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_DEPEND
12431 && OMP_CLAUSE_DEPEND_KIND (c
) == OMP_CLAUSE_DEPEND_SOURCE
)
12435 error_at (OMP_CLAUSE_LOCATION (c
),
12436 "more than one %<depend%> clause with %<source%> "
12437 "modifier on an %<ordered%> construct");
12444 if (source_c
&& sink_c
)
12446 error_at (OMP_CLAUSE_LOCATION (source_c
),
12447 "%<depend%> clause with %<source%> modifier specified "
12448 "together with %<depend%> clauses with %<sink%> modifier "
12449 "on the same construct");
12454 return gimple_build_nop ();
12455 return gimple_build_omp_ordered (body
, OMP_ORDERED_CLAUSES (expr
));
12458 /* Convert the GENERIC expression tree *EXPR_P to GIMPLE. If the
12459 expression produces a value to be used as an operand inside a GIMPLE
12460 statement, the value will be stored back in *EXPR_P. This value will
12461 be a tree of class tcc_declaration, tcc_constant, tcc_reference or
12462 an SSA_NAME. The corresponding sequence of GIMPLE statements is
12463 emitted in PRE_P and POST_P.
12465 Additionally, this process may overwrite parts of the input
12466 expression during gimplification. Ideally, it should be
12467 possible to do non-destructive gimplification.
12469 EXPR_P points to the GENERIC expression to convert to GIMPLE. If
12470 the expression needs to evaluate to a value to be used as
12471 an operand in a GIMPLE statement, this value will be stored in
12472 *EXPR_P on exit. This happens when the caller specifies one
12473 of fb_lvalue or fb_rvalue fallback flags.
12475 PRE_P will contain the sequence of GIMPLE statements corresponding
12476 to the evaluation of EXPR and all the side-effects that must
12477 be executed before the main expression. On exit, the last
12478 statement of PRE_P is the core statement being gimplified. For
12479 instance, when gimplifying 'if (++a)' the last statement in
12480 PRE_P will be 'if (t.1)' where t.1 is the result of
12481 pre-incrementing 'a'.
12483 POST_P will contain the sequence of GIMPLE statements corresponding
12484 to the evaluation of all the side-effects that must be executed
12485 after the main expression. If this is NULL, the post
12486 side-effects are stored at the end of PRE_P.
12488 The reason why the output is split in two is to handle post
12489 side-effects explicitly. In some cases, an expression may have
12490 inner and outer post side-effects which need to be emitted in
12491 an order different from the one given by the recursive
12492 traversal. For instance, for the expression (*p--)++ the post
12493 side-effects of '--' must actually occur *after* the post
12494 side-effects of '++'. However, gimplification will first visit
12495 the inner expression, so if a separate POST sequence was not
12496 used, the resulting sequence would be:
12503 However, the post-decrement operation in line #2 must not be
12504 evaluated until after the store to *p at line #4, so the
12505 correct sequence should be:
12512 So, by specifying a separate post queue, it is possible
12513 to emit the post side-effects in the correct order.
12514 If POST_P is NULL, an internal queue will be used. Before
12515 returning to the caller, the sequence POST_P is appended to
12516 the main output sequence PRE_P.
12518 GIMPLE_TEST_F points to a function that takes a tree T and
12519 returns nonzero if T is in the GIMPLE form requested by the
12520 caller. The GIMPLE predicates are in gimple.c.
12522 FALLBACK tells the function what sort of a temporary we want if
12523 gimplification cannot produce an expression that complies with
12526 fb_none means that no temporary should be generated
12527 fb_rvalue means that an rvalue is OK to generate
12528 fb_lvalue means that an lvalue is OK to generate
12529 fb_either means that either is OK, but an lvalue is preferable.
12530 fb_mayfail means that gimplification may fail (in which case
12531 GS_ERROR will be returned)
12533 The return value is either GS_ERROR or GS_ALL_DONE, since this
12534 function iterates until EXPR is completely gimplified or an error
12537 enum gimplify_status
12538 gimplify_expr (tree
*expr_p
, gimple_seq
*pre_p
, gimple_seq
*post_p
,
12539 bool (*gimple_test_f
) (tree
), fallback_t fallback
)
12542 gimple_seq internal_pre
= NULL
;
12543 gimple_seq internal_post
= NULL
;
12546 location_t saved_location
;
12547 enum gimplify_status ret
;
12548 gimple_stmt_iterator pre_last_gsi
, post_last_gsi
;
12551 save_expr
= *expr_p
;
12552 if (save_expr
== NULL_TREE
)
12553 return GS_ALL_DONE
;
12555 /* If we are gimplifying a top-level statement, PRE_P must be valid. */
12556 is_statement
= gimple_test_f
== is_gimple_stmt
;
12558 gcc_assert (pre_p
);
12560 /* Consistency checks. */
12561 if (gimple_test_f
== is_gimple_reg
)
12562 gcc_assert (fallback
& (fb_rvalue
| fb_lvalue
));
12563 else if (gimple_test_f
== is_gimple_val
12564 || gimple_test_f
== is_gimple_call_addr
12565 || gimple_test_f
== is_gimple_condexpr
12566 || gimple_test_f
== is_gimple_mem_rhs
12567 || gimple_test_f
== is_gimple_mem_rhs_or_call
12568 || gimple_test_f
== is_gimple_reg_rhs
12569 || gimple_test_f
== is_gimple_reg_rhs_or_call
12570 || gimple_test_f
== is_gimple_asm_val
12571 || gimple_test_f
== is_gimple_mem_ref_addr
)
12572 gcc_assert (fallback
& fb_rvalue
);
12573 else if (gimple_test_f
== is_gimple_min_lval
12574 || gimple_test_f
== is_gimple_lvalue
)
12575 gcc_assert (fallback
& fb_lvalue
);
12576 else if (gimple_test_f
== is_gimple_addressable
)
12577 gcc_assert (fallback
& fb_either
);
12578 else if (gimple_test_f
== is_gimple_stmt
)
12579 gcc_assert (fallback
== fb_none
);
12582 /* We should have recognized the GIMPLE_TEST_F predicate to
12583 know what kind of fallback to use in case a temporary is
12584 needed to hold the value or address of *EXPR_P. */
12585 gcc_unreachable ();
12588 /* We used to check the predicate here and return immediately if it
12589 succeeds. This is wrong; the design is for gimplification to be
12590 idempotent, and for the predicates to only test for valid forms, not
12591 whether they are fully simplified. */
12593 pre_p
= &internal_pre
;
12595 if (post_p
== NULL
)
12596 post_p
= &internal_post
;
12598 /* Remember the last statements added to PRE_P and POST_P. Every
12599 new statement added by the gimplification helpers needs to be
12600 annotated with location information. To centralize the
12601 responsibility, we remember the last statement that had been
12602 added to both queues before gimplifying *EXPR_P. If
12603 gimplification produces new statements in PRE_P and POST_P, those
12604 statements will be annotated with the same location information
12606 pre_last_gsi
= gsi_last (*pre_p
);
12607 post_last_gsi
= gsi_last (*post_p
);
12609 saved_location
= input_location
;
12610 if (save_expr
!= error_mark_node
12611 && EXPR_HAS_LOCATION (*expr_p
))
12612 input_location
= EXPR_LOCATION (*expr_p
);
12614 /* Loop over the specific gimplifiers until the toplevel node
12615 remains the same. */
12618 /* Strip away as many useless type conversions as possible
12619 at the toplevel. */
12620 STRIP_USELESS_TYPE_CONVERSION (*expr_p
);
12622 /* Remember the expr. */
12623 save_expr
= *expr_p
;
12625 /* Die, die, die, my darling. */
12626 if (error_operand_p (save_expr
))
12632 /* Do any language-specific gimplification. */
12633 ret
= ((enum gimplify_status
)
12634 lang_hooks
.gimplify_expr (expr_p
, pre_p
, post_p
));
12637 if (*expr_p
== NULL_TREE
)
12639 if (*expr_p
!= save_expr
)
12642 else if (ret
!= GS_UNHANDLED
)
12645 /* Make sure that all the cases set 'ret' appropriately. */
12646 ret
= GS_UNHANDLED
;
12647 switch (TREE_CODE (*expr_p
))
12649 /* First deal with the special cases. */
12651 case POSTINCREMENT_EXPR
:
12652 case POSTDECREMENT_EXPR
:
12653 case PREINCREMENT_EXPR
:
12654 case PREDECREMENT_EXPR
:
12655 ret
= gimplify_self_mod_expr (expr_p
, pre_p
, post_p
,
12656 fallback
!= fb_none
,
12657 TREE_TYPE (*expr_p
));
12660 case VIEW_CONVERT_EXPR
:
12661 if ((fallback
& fb_rvalue
)
12662 && is_gimple_reg_type (TREE_TYPE (*expr_p
))
12663 && is_gimple_reg_type (TREE_TYPE (TREE_OPERAND (*expr_p
, 0))))
12665 ret
= gimplify_expr (&TREE_OPERAND (*expr_p
, 0), pre_p
,
12666 post_p
, is_gimple_val
, fb_rvalue
);
12667 recalculate_side_effects (*expr_p
);
12673 case ARRAY_RANGE_REF
:
12674 case REALPART_EXPR
:
12675 case IMAGPART_EXPR
:
12676 case COMPONENT_REF
:
12677 ret
= gimplify_compound_lval (expr_p
, pre_p
, post_p
,
12678 fallback
? fallback
: fb_rvalue
);
12682 ret
= gimplify_cond_expr (expr_p
, pre_p
, fallback
);
12684 /* C99 code may assign to an array in a structure value of a
12685 conditional expression, and this has undefined behavior
12686 only on execution, so create a temporary if an lvalue is
12688 if (fallback
== fb_lvalue
)
12690 *expr_p
= get_initialized_tmp_var (*expr_p
, pre_p
, post_p
, false);
12691 mark_addressable (*expr_p
);
12697 ret
= gimplify_call_expr (expr_p
, pre_p
, fallback
!= fb_none
);
12699 /* C99 code may assign to an array in a structure returned
12700 from a function, and this has undefined behavior only on
12701 execution, so create a temporary if an lvalue is
12703 if (fallback
== fb_lvalue
)
12705 *expr_p
= get_initialized_tmp_var (*expr_p
, pre_p
, post_p
, false);
12706 mark_addressable (*expr_p
);
12712 gcc_unreachable ();
12714 case COMPOUND_EXPR
:
12715 ret
= gimplify_compound_expr (expr_p
, pre_p
, fallback
!= fb_none
);
12718 case COMPOUND_LITERAL_EXPR
:
12719 ret
= gimplify_compound_literal_expr (expr_p
, pre_p
,
12720 gimple_test_f
, fallback
);
12725 ret
= gimplify_modify_expr (expr_p
, pre_p
, post_p
,
12726 fallback
!= fb_none
);
12729 case TRUTH_ANDIF_EXPR
:
12730 case TRUTH_ORIF_EXPR
:
12732 /* Preserve the original type of the expression and the
12733 source location of the outer expression. */
12734 tree org_type
= TREE_TYPE (*expr_p
);
12735 *expr_p
= gimple_boolify (*expr_p
);
12736 *expr_p
= build3_loc (input_location
, COND_EXPR
,
12740 org_type
, boolean_true_node
),
12743 org_type
, boolean_false_node
));
12748 case TRUTH_NOT_EXPR
:
12750 tree type
= TREE_TYPE (*expr_p
);
12751 /* The parsers are careful to generate TRUTH_NOT_EXPR
12752 only with operands that are always zero or one.
12753 We do not fold here but handle the only interesting case
12754 manually, as fold may re-introduce the TRUTH_NOT_EXPR. */
12755 *expr_p
= gimple_boolify (*expr_p
);
12756 if (TYPE_PRECISION (TREE_TYPE (*expr_p
)) == 1)
12757 *expr_p
= build1_loc (input_location
, BIT_NOT_EXPR
,
12758 TREE_TYPE (*expr_p
),
12759 TREE_OPERAND (*expr_p
, 0));
12761 *expr_p
= build2_loc (input_location
, BIT_XOR_EXPR
,
12762 TREE_TYPE (*expr_p
),
12763 TREE_OPERAND (*expr_p
, 0),
12764 build_int_cst (TREE_TYPE (*expr_p
), 1));
12765 if (!useless_type_conversion_p (type
, TREE_TYPE (*expr_p
)))
12766 *expr_p
= fold_convert_loc (input_location
, type
, *expr_p
);
12772 ret
= gimplify_addr_expr (expr_p
, pre_p
, post_p
);
12775 case ANNOTATE_EXPR
:
12777 tree cond
= TREE_OPERAND (*expr_p
, 0);
12778 tree kind
= TREE_OPERAND (*expr_p
, 1);
12779 tree data
= TREE_OPERAND (*expr_p
, 2);
12780 tree type
= TREE_TYPE (cond
);
12781 if (!INTEGRAL_TYPE_P (type
))
12787 tree tmp
= create_tmp_var (type
);
12788 gimplify_arg (&cond
, pre_p
, EXPR_LOCATION (*expr_p
));
12790 = gimple_build_call_internal (IFN_ANNOTATE
, 3, cond
, kind
, data
);
12791 gimple_call_set_lhs (call
, tmp
);
12792 gimplify_seq_add_stmt (pre_p
, call
);
12799 ret
= gimplify_va_arg_expr (expr_p
, pre_p
, post_p
);
12803 if (IS_EMPTY_STMT (*expr_p
))
12809 if (VOID_TYPE_P (TREE_TYPE (*expr_p
))
12810 || fallback
== fb_none
)
12812 /* Just strip a conversion to void (or in void context) and
12814 *expr_p
= TREE_OPERAND (*expr_p
, 0);
12819 ret
= gimplify_conversion (expr_p
);
12820 if (ret
== GS_ERROR
)
12822 if (*expr_p
!= save_expr
)
12826 case FIX_TRUNC_EXPR
:
12827 /* unary_expr: ... | '(' cast ')' val | ... */
12828 ret
= gimplify_expr (&TREE_OPERAND (*expr_p
, 0), pre_p
, post_p
,
12829 is_gimple_val
, fb_rvalue
);
12830 recalculate_side_effects (*expr_p
);
12835 bool volatilep
= TREE_THIS_VOLATILE (*expr_p
);
12836 bool notrap
= TREE_THIS_NOTRAP (*expr_p
);
12837 tree saved_ptr_type
= TREE_TYPE (TREE_OPERAND (*expr_p
, 0));
12839 *expr_p
= fold_indirect_ref_loc (input_location
, *expr_p
);
12840 if (*expr_p
!= save_expr
)
12846 ret
= gimplify_expr (&TREE_OPERAND (*expr_p
, 0), pre_p
, post_p
,
12847 is_gimple_reg
, fb_rvalue
);
12848 if (ret
== GS_ERROR
)
12851 recalculate_side_effects (*expr_p
);
12852 *expr_p
= fold_build2_loc (input_location
, MEM_REF
,
12853 TREE_TYPE (*expr_p
),
12854 TREE_OPERAND (*expr_p
, 0),
12855 build_int_cst (saved_ptr_type
, 0));
12856 TREE_THIS_VOLATILE (*expr_p
) = volatilep
;
12857 TREE_THIS_NOTRAP (*expr_p
) = notrap
;
12862 /* We arrive here through the various re-gimplifcation paths. */
12864 /* First try re-folding the whole thing. */
12865 tmp
= fold_binary (MEM_REF
, TREE_TYPE (*expr_p
),
12866 TREE_OPERAND (*expr_p
, 0),
12867 TREE_OPERAND (*expr_p
, 1));
12870 REF_REVERSE_STORAGE_ORDER (tmp
)
12871 = REF_REVERSE_STORAGE_ORDER (*expr_p
);
12873 recalculate_side_effects (*expr_p
);
12877 /* Avoid re-gimplifying the address operand if it is already
12878 in suitable form. Re-gimplifying would mark the address
12879 operand addressable. Always gimplify when not in SSA form
12880 as we still may have to gimplify decls with value-exprs. */
12881 if (!gimplify_ctxp
|| !gimple_in_ssa_p (cfun
)
12882 || !is_gimple_mem_ref_addr (TREE_OPERAND (*expr_p
, 0)))
12884 ret
= gimplify_expr (&TREE_OPERAND (*expr_p
, 0), pre_p
, post_p
,
12885 is_gimple_mem_ref_addr
, fb_rvalue
);
12886 if (ret
== GS_ERROR
)
12889 recalculate_side_effects (*expr_p
);
12893 /* Constants need not be gimplified. */
12900 /* Drop the overflow flag on constants, we do not want
12901 that in the GIMPLE IL. */
12902 if (TREE_OVERFLOW_P (*expr_p
))
12903 *expr_p
= drop_tree_overflow (*expr_p
);
12908 /* If we require an lvalue, such as for ADDR_EXPR, retain the
12909 CONST_DECL node. Otherwise the decl is replaceable by its
12911 /* ??? Should be == fb_lvalue, but ADDR_EXPR passes fb_either. */
12912 if (fallback
& fb_lvalue
)
12916 *expr_p
= DECL_INITIAL (*expr_p
);
12922 ret
= gimplify_decl_expr (expr_p
, pre_p
);
12926 ret
= gimplify_bind_expr (expr_p
, pre_p
);
12930 ret
= gimplify_loop_expr (expr_p
, pre_p
);
12934 ret
= gimplify_switch_expr (expr_p
, pre_p
);
12938 ret
= gimplify_exit_expr (expr_p
);
12942 /* If the target is not LABEL, then it is a computed jump
12943 and the target needs to be gimplified. */
12944 if (TREE_CODE (GOTO_DESTINATION (*expr_p
)) != LABEL_DECL
)
12946 ret
= gimplify_expr (&GOTO_DESTINATION (*expr_p
), pre_p
,
12947 NULL
, is_gimple_val
, fb_rvalue
);
12948 if (ret
== GS_ERROR
)
12951 gimplify_seq_add_stmt (pre_p
,
12952 gimple_build_goto (GOTO_DESTINATION (*expr_p
)));
12957 gimplify_seq_add_stmt (pre_p
,
12958 gimple_build_predict (PREDICT_EXPR_PREDICTOR (*expr_p
),
12959 PREDICT_EXPR_OUTCOME (*expr_p
)));
12964 ret
= gimplify_label_expr (expr_p
, pre_p
);
12965 label
= LABEL_EXPR_LABEL (*expr_p
);
12966 gcc_assert (decl_function_context (label
) == current_function_decl
);
12968 /* If the label is used in a goto statement, or address of the label
12969 is taken, we need to unpoison all variables that were seen so far.
12970 Doing so would prevent us from reporting a false positives. */
12971 if (asan_poisoned_variables
12972 && asan_used_labels
!= NULL
12973 && asan_used_labels
->contains (label
))
12974 asan_poison_variables (asan_poisoned_variables
, false, pre_p
);
12977 case CASE_LABEL_EXPR
:
12978 ret
= gimplify_case_label_expr (expr_p
, pre_p
);
12980 if (gimplify_ctxp
->live_switch_vars
)
12981 asan_poison_variables (gimplify_ctxp
->live_switch_vars
, false,
12986 ret
= gimplify_return_expr (*expr_p
, pre_p
);
12990 /* Don't reduce this in place; let gimplify_init_constructor work its
12991 magic. Buf if we're just elaborating this for side effects, just
12992 gimplify any element that has side-effects. */
12993 if (fallback
== fb_none
)
12995 unsigned HOST_WIDE_INT ix
;
12997 tree temp
= NULL_TREE
;
12998 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (*expr_p
), ix
, val
)
12999 if (TREE_SIDE_EFFECTS (val
))
13000 append_to_statement_list (val
, &temp
);
13003 ret
= temp
? GS_OK
: GS_ALL_DONE
;
13005 /* C99 code may assign to an array in a constructed
13006 structure or union, and this has undefined behavior only
13007 on execution, so create a temporary if an lvalue is
13009 else if (fallback
== fb_lvalue
)
13011 *expr_p
= get_initialized_tmp_var (*expr_p
, pre_p
, post_p
, false);
13012 mark_addressable (*expr_p
);
13019 /* The following are special cases that are not handled by the
13020 original GIMPLE grammar. */
13022 /* SAVE_EXPR nodes are converted into a GIMPLE identifier and
13025 ret
= gimplify_save_expr (expr_p
, pre_p
, post_p
);
13028 case BIT_FIELD_REF
:
13029 ret
= gimplify_expr (&TREE_OPERAND (*expr_p
, 0), pre_p
,
13030 post_p
, is_gimple_lvalue
, fb_either
);
13031 recalculate_side_effects (*expr_p
);
13034 case TARGET_MEM_REF
:
13036 enum gimplify_status r0
= GS_ALL_DONE
, r1
= GS_ALL_DONE
;
13038 if (TMR_BASE (*expr_p
))
13039 r0
= gimplify_expr (&TMR_BASE (*expr_p
), pre_p
,
13040 post_p
, is_gimple_mem_ref_addr
, fb_either
);
13041 if (TMR_INDEX (*expr_p
))
13042 r1
= gimplify_expr (&TMR_INDEX (*expr_p
), pre_p
,
13043 post_p
, is_gimple_val
, fb_rvalue
);
13044 if (TMR_INDEX2 (*expr_p
))
13045 r1
= gimplify_expr (&TMR_INDEX2 (*expr_p
), pre_p
,
13046 post_p
, is_gimple_val
, fb_rvalue
);
13047 /* TMR_STEP and TMR_OFFSET are always integer constants. */
13048 ret
= MIN (r0
, r1
);
13052 case NON_LVALUE_EXPR
:
13053 /* This should have been stripped above. */
13054 gcc_unreachable ();
13057 ret
= gimplify_asm_expr (expr_p
, pre_p
, post_p
);
13060 case TRY_FINALLY_EXPR
:
13061 case TRY_CATCH_EXPR
:
13063 gimple_seq eval
, cleanup
;
13066 /* Calls to destructors are generated automatically in FINALLY/CATCH
13067 block. They should have location as UNKNOWN_LOCATION. However,
13068 gimplify_call_expr will reset these call stmts to input_location
13069 if it finds stmt's location is unknown. To prevent resetting for
13070 destructors, we set the input_location to unknown.
13071 Note that this only affects the destructor calls in FINALLY/CATCH
13072 block, and will automatically reset to its original value by the
13073 end of gimplify_expr. */
13074 input_location
= UNKNOWN_LOCATION
;
13075 eval
= cleanup
= NULL
;
13076 gimplify_and_add (TREE_OPERAND (*expr_p
, 0), &eval
);
13077 gimplify_and_add (TREE_OPERAND (*expr_p
, 1), &cleanup
);
13078 /* Don't create bogus GIMPLE_TRY with empty cleanup. */
13079 if (gimple_seq_empty_p (cleanup
))
13081 gimple_seq_add_seq (pre_p
, eval
);
13085 try_
= gimple_build_try (eval
, cleanup
,
13086 TREE_CODE (*expr_p
) == TRY_FINALLY_EXPR
13087 ? GIMPLE_TRY_FINALLY
13088 : GIMPLE_TRY_CATCH
);
13089 if (EXPR_HAS_LOCATION (save_expr
))
13090 gimple_set_location (try_
, EXPR_LOCATION (save_expr
));
13091 else if (LOCATION_LOCUS (saved_location
) != UNKNOWN_LOCATION
)
13092 gimple_set_location (try_
, saved_location
);
13093 if (TREE_CODE (*expr_p
) == TRY_CATCH_EXPR
)
13094 gimple_try_set_catch_is_cleanup (try_
,
13095 TRY_CATCH_IS_CLEANUP (*expr_p
));
13096 gimplify_seq_add_stmt (pre_p
, try_
);
13101 case CLEANUP_POINT_EXPR
:
13102 ret
= gimplify_cleanup_point_expr (expr_p
, pre_p
);
13106 ret
= gimplify_target_expr (expr_p
, pre_p
, post_p
);
13112 gimple_seq handler
= NULL
;
13113 gimplify_and_add (CATCH_BODY (*expr_p
), &handler
);
13114 c
= gimple_build_catch (CATCH_TYPES (*expr_p
), handler
);
13115 gimplify_seq_add_stmt (pre_p
, c
);
13120 case EH_FILTER_EXPR
:
13123 gimple_seq failure
= NULL
;
13125 gimplify_and_add (EH_FILTER_FAILURE (*expr_p
), &failure
);
13126 ehf
= gimple_build_eh_filter (EH_FILTER_TYPES (*expr_p
), failure
);
13127 gimple_set_no_warning (ehf
, TREE_NO_WARNING (*expr_p
));
13128 gimplify_seq_add_stmt (pre_p
, ehf
);
13135 enum gimplify_status r0
, r1
;
13136 r0
= gimplify_expr (&OBJ_TYPE_REF_OBJECT (*expr_p
), pre_p
,
13137 post_p
, is_gimple_val
, fb_rvalue
);
13138 r1
= gimplify_expr (&OBJ_TYPE_REF_EXPR (*expr_p
), pre_p
,
13139 post_p
, is_gimple_val
, fb_rvalue
);
13140 TREE_SIDE_EFFECTS (*expr_p
) = 0;
13141 ret
= MIN (r0
, r1
);
13146 /* We get here when taking the address of a label. We mark
13147 the label as "forced"; meaning it can never be removed and
13148 it is a potential target for any computed goto. */
13149 FORCED_LABEL (*expr_p
) = 1;
13153 case STATEMENT_LIST
:
13154 ret
= gimplify_statement_list (expr_p
, pre_p
);
13157 case WITH_SIZE_EXPR
:
13159 gimplify_expr (&TREE_OPERAND (*expr_p
, 0), pre_p
,
13160 post_p
== &internal_post
? NULL
: post_p
,
13161 gimple_test_f
, fallback
);
13162 gimplify_expr (&TREE_OPERAND (*expr_p
, 1), pre_p
, post_p
,
13163 is_gimple_val
, fb_rvalue
);
13170 ret
= gimplify_var_or_parm_decl (expr_p
);
13174 /* When within an OMP context, notice uses of variables. */
13175 if (gimplify_omp_ctxp
)
13176 omp_notice_variable (gimplify_omp_ctxp
, *expr_p
, true);
13180 case DEBUG_EXPR_DECL
:
13181 gcc_unreachable ();
13183 case DEBUG_BEGIN_STMT
:
13184 gimplify_seq_add_stmt (pre_p
,
13185 gimple_build_debug_begin_stmt
13186 (TREE_BLOCK (*expr_p
),
13187 EXPR_LOCATION (*expr_p
)));
13193 /* Allow callbacks into the gimplifier during optimization. */
13198 gimplify_omp_parallel (expr_p
, pre_p
);
13203 gimplify_omp_task (expr_p
, pre_p
);
13209 case OMP_DISTRIBUTE
:
13212 ret
= gimplify_omp_for (expr_p
, pre_p
);
13216 gimplify_oacc_cache (expr_p
, pre_p
);
13221 gimplify_oacc_declare (expr_p
, pre_p
);
13225 case OACC_HOST_DATA
:
13228 case OACC_PARALLEL
:
13232 case OMP_TARGET_DATA
:
13234 gimplify_omp_workshare (expr_p
, pre_p
);
13238 case OACC_ENTER_DATA
:
13239 case OACC_EXIT_DATA
:
13241 case OMP_TARGET_UPDATE
:
13242 case OMP_TARGET_ENTER_DATA
:
13243 case OMP_TARGET_EXIT_DATA
:
13244 gimplify_omp_target_update (expr_p
, pre_p
);
13254 gimple_seq body
= NULL
;
13257 gimplify_and_add (OMP_BODY (*expr_p
), &body
);
13258 switch (TREE_CODE (*expr_p
))
13261 g
= gimple_build_omp_section (body
);
13264 g
= gimple_build_omp_master (body
);
13267 g
= gimplify_omp_ordered (*expr_p
, body
);
13270 gimplify_scan_omp_clauses (&OMP_CRITICAL_CLAUSES (*expr_p
),
13271 pre_p
, ORT_WORKSHARE
, OMP_CRITICAL
);
13272 gimplify_adjust_omp_clauses (pre_p
, body
,
13273 &OMP_CRITICAL_CLAUSES (*expr_p
),
13275 g
= gimple_build_omp_critical (body
,
13276 OMP_CRITICAL_NAME (*expr_p
),
13277 OMP_CRITICAL_CLAUSES (*expr_p
));
13280 gimplify_scan_omp_clauses (&OMP_SCAN_CLAUSES (*expr_p
),
13281 pre_p
, ORT_WORKSHARE
, OMP_SCAN
);
13282 gimplify_adjust_omp_clauses (pre_p
, body
,
13283 &OMP_SCAN_CLAUSES (*expr_p
),
13285 g
= gimple_build_omp_scan (body
, OMP_SCAN_CLAUSES (*expr_p
));
13288 gcc_unreachable ();
13290 gimplify_seq_add_stmt (pre_p
, g
);
13295 case OMP_TASKGROUP
:
13297 gimple_seq body
= NULL
;
13299 tree
*pclauses
= &OMP_TASKGROUP_CLAUSES (*expr_p
);
13300 gimplify_scan_omp_clauses (pclauses
, pre_p
, ORT_TASKGROUP
,
13302 gimplify_adjust_omp_clauses (pre_p
, NULL
, pclauses
, OMP_TASKGROUP
);
13303 gimplify_and_add (OMP_BODY (*expr_p
), &body
);
13304 gimple_seq cleanup
= NULL
;
13305 tree fn
= builtin_decl_explicit (BUILT_IN_GOMP_TASKGROUP_END
);
13306 gimple
*g
= gimple_build_call (fn
, 0);
13307 gimple_seq_add_stmt (&cleanup
, g
);
13308 g
= gimple_build_try (body
, cleanup
, GIMPLE_TRY_FINALLY
);
13310 gimple_seq_add_stmt (&body
, g
);
13311 g
= gimple_build_omp_taskgroup (body
, *pclauses
);
13312 gimplify_seq_add_stmt (pre_p
, g
);
13318 case OMP_ATOMIC_READ
:
13319 case OMP_ATOMIC_CAPTURE_OLD
:
13320 case OMP_ATOMIC_CAPTURE_NEW
:
13321 ret
= gimplify_omp_atomic (expr_p
, pre_p
);
13324 case TRANSACTION_EXPR
:
13325 ret
= gimplify_transaction (expr_p
, pre_p
);
13328 case TRUTH_AND_EXPR
:
13329 case TRUTH_OR_EXPR
:
13330 case TRUTH_XOR_EXPR
:
13332 tree orig_type
= TREE_TYPE (*expr_p
);
13333 tree new_type
, xop0
, xop1
;
13334 *expr_p
= gimple_boolify (*expr_p
);
13335 new_type
= TREE_TYPE (*expr_p
);
13336 if (!useless_type_conversion_p (orig_type
, new_type
))
13338 *expr_p
= fold_convert_loc (input_location
, orig_type
, *expr_p
);
13343 /* Boolified binary truth expressions are semantically equivalent
13344 to bitwise binary expressions. Canonicalize them to the
13345 bitwise variant. */
13346 switch (TREE_CODE (*expr_p
))
13348 case TRUTH_AND_EXPR
:
13349 TREE_SET_CODE (*expr_p
, BIT_AND_EXPR
);
13351 case TRUTH_OR_EXPR
:
13352 TREE_SET_CODE (*expr_p
, BIT_IOR_EXPR
);
13354 case TRUTH_XOR_EXPR
:
13355 TREE_SET_CODE (*expr_p
, BIT_XOR_EXPR
);
13360 /* Now make sure that operands have compatible type to
13361 expression's new_type. */
13362 xop0
= TREE_OPERAND (*expr_p
, 0);
13363 xop1
= TREE_OPERAND (*expr_p
, 1);
13364 if (!useless_type_conversion_p (new_type
, TREE_TYPE (xop0
)))
13365 TREE_OPERAND (*expr_p
, 0) = fold_convert_loc (input_location
,
13368 if (!useless_type_conversion_p (new_type
, TREE_TYPE (xop1
)))
13369 TREE_OPERAND (*expr_p
, 1) = fold_convert_loc (input_location
,
13372 /* Continue classified as tcc_binary. */
13376 case VEC_COND_EXPR
:
13378 enum gimplify_status r0
, r1
, r2
;
13380 r0
= gimplify_expr (&TREE_OPERAND (*expr_p
, 0), pre_p
,
13381 post_p
, is_gimple_condexpr
, fb_rvalue
);
13382 r1
= gimplify_expr (&TREE_OPERAND (*expr_p
, 1), pre_p
,
13383 post_p
, is_gimple_val
, fb_rvalue
);
13384 r2
= gimplify_expr (&TREE_OPERAND (*expr_p
, 2), pre_p
,
13385 post_p
, is_gimple_val
, fb_rvalue
);
13387 ret
= MIN (MIN (r0
, r1
), r2
);
13388 recalculate_side_effects (*expr_p
);
13392 case VEC_PERM_EXPR
:
13393 /* Classified as tcc_expression. */
13396 case BIT_INSERT_EXPR
:
13397 /* Argument 3 is a constant. */
13400 case POINTER_PLUS_EXPR
:
13402 enum gimplify_status r0
, r1
;
13403 r0
= gimplify_expr (&TREE_OPERAND (*expr_p
, 0), pre_p
,
13404 post_p
, is_gimple_val
, fb_rvalue
);
13405 r1
= gimplify_expr (&TREE_OPERAND (*expr_p
, 1), pre_p
,
13406 post_p
, is_gimple_val
, fb_rvalue
);
13407 recalculate_side_effects (*expr_p
);
13408 ret
= MIN (r0
, r1
);
13413 switch (TREE_CODE_CLASS (TREE_CODE (*expr_p
)))
13415 case tcc_comparison
:
13416 /* Handle comparison of objects of non scalar mode aggregates
13417 with a call to memcmp. It would be nice to only have to do
13418 this for variable-sized objects, but then we'd have to allow
13419 the same nest of reference nodes we allow for MODIFY_EXPR and
13420 that's too complex.
13422 Compare scalar mode aggregates as scalar mode values. Using
13423 memcmp for them would be very inefficient at best, and is
13424 plain wrong if bitfields are involved. */
13426 tree type
= TREE_TYPE (TREE_OPERAND (*expr_p
, 1));
13428 /* Vector comparisons need no boolification. */
13429 if (TREE_CODE (type
) == VECTOR_TYPE
)
13431 else if (!AGGREGATE_TYPE_P (type
))
13433 tree org_type
= TREE_TYPE (*expr_p
);
13434 *expr_p
= gimple_boolify (*expr_p
);
13435 if (!useless_type_conversion_p (org_type
,
13436 TREE_TYPE (*expr_p
)))
13438 *expr_p
= fold_convert_loc (input_location
,
13439 org_type
, *expr_p
);
13445 else if (TYPE_MODE (type
) != BLKmode
)
13446 ret
= gimplify_scalar_mode_aggregate_compare (expr_p
);
13448 ret
= gimplify_variable_sized_compare (expr_p
);
13453 /* If *EXPR_P does not need to be special-cased, handle it
13454 according to its class. */
13456 ret
= gimplify_expr (&TREE_OPERAND (*expr_p
, 0), pre_p
,
13457 post_p
, is_gimple_val
, fb_rvalue
);
13463 enum gimplify_status r0
, r1
;
13465 r0
= gimplify_expr (&TREE_OPERAND (*expr_p
, 0), pre_p
,
13466 post_p
, is_gimple_val
, fb_rvalue
);
13467 r1
= gimplify_expr (&TREE_OPERAND (*expr_p
, 1), pre_p
,
13468 post_p
, is_gimple_val
, fb_rvalue
);
13470 ret
= MIN (r0
, r1
);
13476 enum gimplify_status r0
, r1
, r2
;
13478 r0
= gimplify_expr (&TREE_OPERAND (*expr_p
, 0), pre_p
,
13479 post_p
, is_gimple_val
, fb_rvalue
);
13480 r1
= gimplify_expr (&TREE_OPERAND (*expr_p
, 1), pre_p
,
13481 post_p
, is_gimple_val
, fb_rvalue
);
13482 r2
= gimplify_expr (&TREE_OPERAND (*expr_p
, 2), pre_p
,
13483 post_p
, is_gimple_val
, fb_rvalue
);
13485 ret
= MIN (MIN (r0
, r1
), r2
);
13489 case tcc_declaration
:
13492 goto dont_recalculate
;
13495 gcc_unreachable ();
13498 recalculate_side_effects (*expr_p
);
13504 gcc_assert (*expr_p
|| ret
!= GS_OK
);
13506 while (ret
== GS_OK
);
13508 /* If we encountered an error_mark somewhere nested inside, either
13509 stub out the statement or propagate the error back out. */
13510 if (ret
== GS_ERROR
)
13517 /* This was only valid as a return value from the langhook, which
13518 we handled. Make sure it doesn't escape from any other context. */
13519 gcc_assert (ret
!= GS_UNHANDLED
);
13521 if (fallback
== fb_none
&& *expr_p
&& !is_gimple_stmt (*expr_p
))
13523 /* We aren't looking for a value, and we don't have a valid
13524 statement. If it doesn't have side-effects, throw it away.
13525 We can also get here with code such as "*&&L;", where L is
13526 a LABEL_DECL that is marked as FORCED_LABEL. */
13527 if (TREE_CODE (*expr_p
) == LABEL_DECL
13528 || !TREE_SIDE_EFFECTS (*expr_p
))
13530 else if (!TREE_THIS_VOLATILE (*expr_p
))
13532 /* This is probably a _REF that contains something nested that
13533 has side effects. Recurse through the operands to find it. */
13534 enum tree_code code
= TREE_CODE (*expr_p
);
13538 case COMPONENT_REF
:
13539 case REALPART_EXPR
:
13540 case IMAGPART_EXPR
:
13541 case VIEW_CONVERT_EXPR
:
13542 gimplify_expr (&TREE_OPERAND (*expr_p
, 0), pre_p
, post_p
,
13543 gimple_test_f
, fallback
);
13547 case ARRAY_RANGE_REF
:
13548 gimplify_expr (&TREE_OPERAND (*expr_p
, 0), pre_p
, post_p
,
13549 gimple_test_f
, fallback
);
13550 gimplify_expr (&TREE_OPERAND (*expr_p
, 1), pre_p
, post_p
,
13551 gimple_test_f
, fallback
);
13555 /* Anything else with side-effects must be converted to
13556 a valid statement before we get here. */
13557 gcc_unreachable ();
13562 else if (COMPLETE_TYPE_P (TREE_TYPE (*expr_p
))
13563 && TYPE_MODE (TREE_TYPE (*expr_p
)) != BLKmode
)
13565 /* Historically, the compiler has treated a bare reference
13566 to a non-BLKmode volatile lvalue as forcing a load. */
13567 tree type
= TYPE_MAIN_VARIANT (TREE_TYPE (*expr_p
));
13569 /* Normally, we do not want to create a temporary for a
13570 TREE_ADDRESSABLE type because such a type should not be
13571 copied by bitwise-assignment. However, we make an
13572 exception here, as all we are doing here is ensuring that
13573 we read the bytes that make up the type. We use
13574 create_tmp_var_raw because create_tmp_var will abort when
13575 given a TREE_ADDRESSABLE type. */
13576 tree tmp
= create_tmp_var_raw (type
, "vol");
13577 gimple_add_tmp_var (tmp
);
13578 gimplify_assign (tmp
, *expr_p
, pre_p
);
13582 /* We can't do anything useful with a volatile reference to
13583 an incomplete type, so just throw it away. Likewise for
13584 a BLKmode type, since any implicit inner load should
13585 already have been turned into an explicit one by the
13586 gimplification process. */
13590 /* If we are gimplifying at the statement level, we're done. Tack
13591 everything together and return. */
13592 if (fallback
== fb_none
|| is_statement
)
13594 /* Since *EXPR_P has been converted into a GIMPLE tuple, clear
13595 it out for GC to reclaim it. */
13596 *expr_p
= NULL_TREE
;
13598 if (!gimple_seq_empty_p (internal_pre
)
13599 || !gimple_seq_empty_p (internal_post
))
13601 gimplify_seq_add_seq (&internal_pre
, internal_post
);
13602 gimplify_seq_add_seq (pre_p
, internal_pre
);
13605 /* The result of gimplifying *EXPR_P is going to be the last few
13606 statements in *PRE_P and *POST_P. Add location information
13607 to all the statements that were added by the gimplification
13609 if (!gimple_seq_empty_p (*pre_p
))
13610 annotate_all_with_location_after (*pre_p
, pre_last_gsi
, input_location
);
13612 if (!gimple_seq_empty_p (*post_p
))
13613 annotate_all_with_location_after (*post_p
, post_last_gsi
,
13619 #ifdef ENABLE_GIMPLE_CHECKING
13622 enum tree_code code
= TREE_CODE (*expr_p
);
13623 /* These expressions should already be in gimple IR form. */
13624 gcc_assert (code
!= MODIFY_EXPR
13625 && code
!= ASM_EXPR
13626 && code
!= BIND_EXPR
13627 && code
!= CATCH_EXPR
13628 && (code
!= COND_EXPR
|| gimplify_ctxp
->allow_rhs_cond_expr
)
13629 && code
!= EH_FILTER_EXPR
13630 && code
!= GOTO_EXPR
13631 && code
!= LABEL_EXPR
13632 && code
!= LOOP_EXPR
13633 && code
!= SWITCH_EXPR
13634 && code
!= TRY_FINALLY_EXPR
13635 && code
!= OACC_PARALLEL
13636 && code
!= OACC_KERNELS
13637 && code
!= OACC_DATA
13638 && code
!= OACC_HOST_DATA
13639 && code
!= OACC_DECLARE
13640 && code
!= OACC_UPDATE
13641 && code
!= OACC_ENTER_DATA
13642 && code
!= OACC_EXIT_DATA
13643 && code
!= OACC_CACHE
13644 && code
!= OMP_CRITICAL
13646 && code
!= OACC_LOOP
13647 && code
!= OMP_MASTER
13648 && code
!= OMP_TASKGROUP
13649 && code
!= OMP_ORDERED
13650 && code
!= OMP_PARALLEL
13651 && code
!= OMP_SCAN
13652 && code
!= OMP_SECTIONS
13653 && code
!= OMP_SECTION
13654 && code
!= OMP_SINGLE
);
13658 /* Otherwise we're gimplifying a subexpression, so the resulting
13659 value is interesting. If it's a valid operand that matches
13660 GIMPLE_TEST_F, we're done. Unless we are handling some
13661 post-effects internally; if that's the case, we need to copy into
13662 a temporary before adding the post-effects to POST_P. */
13663 if (gimple_seq_empty_p (internal_post
) && (*gimple_test_f
) (*expr_p
))
13666 /* Otherwise, we need to create a new temporary for the gimplified
13669 /* We can't return an lvalue if we have an internal postqueue. The
13670 object the lvalue refers to would (probably) be modified by the
13671 postqueue; we need to copy the value out first, which means an
13673 if ((fallback
& fb_lvalue
)
13674 && gimple_seq_empty_p (internal_post
)
13675 && is_gimple_addressable (*expr_p
))
13677 /* An lvalue will do. Take the address of the expression, store it
13678 in a temporary, and replace the expression with an INDIRECT_REF of
13680 tree ref_alias_type
= reference_alias_ptr_type (*expr_p
);
13681 unsigned int ref_align
= get_object_alignment (*expr_p
);
13682 tree ref_type
= TREE_TYPE (*expr_p
);
13683 tmp
= build_fold_addr_expr_loc (input_location
, *expr_p
);
13684 gimplify_expr (&tmp
, pre_p
, post_p
, is_gimple_reg
, fb_rvalue
);
13685 if (TYPE_ALIGN (ref_type
) != ref_align
)
13686 ref_type
= build_aligned_type (ref_type
, ref_align
);
13687 *expr_p
= build2 (MEM_REF
, ref_type
,
13688 tmp
, build_zero_cst (ref_alias_type
));
13690 else if ((fallback
& fb_rvalue
) && is_gimple_reg_rhs_or_call (*expr_p
))
13692 /* An rvalue will do. Assign the gimplified expression into a
13693 new temporary TMP and replace the original expression with
13694 TMP. First, make sure that the expression has a type so that
13695 it can be assigned into a temporary. */
13696 gcc_assert (!VOID_TYPE_P (TREE_TYPE (*expr_p
)));
13697 *expr_p
= get_formal_tmp_var (*expr_p
, pre_p
);
13701 #ifdef ENABLE_GIMPLE_CHECKING
13702 if (!(fallback
& fb_mayfail
))
13704 fprintf (stderr
, "gimplification failed:\n");
13705 print_generic_expr (stderr
, *expr_p
);
13706 debug_tree (*expr_p
);
13707 internal_error ("gimplification failed");
13710 gcc_assert (fallback
& fb_mayfail
);
13712 /* If this is an asm statement, and the user asked for the
13713 impossible, don't die. Fail and let gimplify_asm_expr
13719 /* Make sure the temporary matches our predicate. */
13720 gcc_assert ((*gimple_test_f
) (*expr_p
));
13722 if (!gimple_seq_empty_p (internal_post
))
13724 annotate_all_with_location (internal_post
, input_location
);
13725 gimplify_seq_add_seq (pre_p
, internal_post
);
13729 input_location
= saved_location
;
13733 /* Like gimplify_expr but make sure the gimplified result is not itself
13734 a SSA name (but a decl if it were). Temporaries required by
13735 evaluating *EXPR_P may be still SSA names. */
13737 static enum gimplify_status
13738 gimplify_expr (tree
*expr_p
, gimple_seq
*pre_p
, gimple_seq
*post_p
,
13739 bool (*gimple_test_f
) (tree
), fallback_t fallback
,
13742 bool was_ssa_name_p
= TREE_CODE (*expr_p
) == SSA_NAME
;
13743 enum gimplify_status ret
= gimplify_expr (expr_p
, pre_p
, post_p
,
13744 gimple_test_f
, fallback
);
13746 && TREE_CODE (*expr_p
) == SSA_NAME
)
13748 tree name
= *expr_p
;
13749 if (was_ssa_name_p
)
13750 *expr_p
= get_initialized_tmp_var (*expr_p
, pre_p
, NULL
, false);
13753 /* Avoid the extra copy if possible. */
13754 *expr_p
= create_tmp_reg (TREE_TYPE (name
));
13755 gimple_set_lhs (SSA_NAME_DEF_STMT (name
), *expr_p
);
13756 release_ssa_name (name
);
13762 /* Look through TYPE for variable-sized objects and gimplify each such
13763 size that we find. Add to LIST_P any statements generated. */
13766 gimplify_type_sizes (tree type
, gimple_seq
*list_p
)
13770 if (type
== NULL
|| type
== error_mark_node
)
13773 /* We first do the main variant, then copy into any other variants. */
13774 type
= TYPE_MAIN_VARIANT (type
);
13776 /* Avoid infinite recursion. */
13777 if (TYPE_SIZES_GIMPLIFIED (type
))
13780 TYPE_SIZES_GIMPLIFIED (type
) = 1;
13782 switch (TREE_CODE (type
))
13785 case ENUMERAL_TYPE
:
13788 case FIXED_POINT_TYPE
:
13789 gimplify_one_sizepos (&TYPE_MIN_VALUE (type
), list_p
);
13790 gimplify_one_sizepos (&TYPE_MAX_VALUE (type
), list_p
);
13792 for (t
= TYPE_NEXT_VARIANT (type
); t
; t
= TYPE_NEXT_VARIANT (t
))
13794 TYPE_MIN_VALUE (t
) = TYPE_MIN_VALUE (type
);
13795 TYPE_MAX_VALUE (t
) = TYPE_MAX_VALUE (type
);
13800 /* These types may not have declarations, so handle them here. */
13801 gimplify_type_sizes (TREE_TYPE (type
), list_p
);
13802 gimplify_type_sizes (TYPE_DOMAIN (type
), list_p
);
13803 /* Ensure VLA bounds aren't removed, for -O0 they should be variables
13804 with assigned stack slots, for -O1+ -g they should be tracked
13806 if (!(TYPE_NAME (type
)
13807 && TREE_CODE (TYPE_NAME (type
)) == TYPE_DECL
13808 && DECL_IGNORED_P (TYPE_NAME (type
)))
13809 && TYPE_DOMAIN (type
)
13810 && INTEGRAL_TYPE_P (TYPE_DOMAIN (type
)))
13812 t
= TYPE_MIN_VALUE (TYPE_DOMAIN (type
));
13813 if (t
&& VAR_P (t
) && DECL_ARTIFICIAL (t
))
13814 DECL_IGNORED_P (t
) = 0;
13815 t
= TYPE_MAX_VALUE (TYPE_DOMAIN (type
));
13816 if (t
&& VAR_P (t
) && DECL_ARTIFICIAL (t
))
13817 DECL_IGNORED_P (t
) = 0;
13823 case QUAL_UNION_TYPE
:
13824 for (field
= TYPE_FIELDS (type
); field
; field
= DECL_CHAIN (field
))
13825 if (TREE_CODE (field
) == FIELD_DECL
)
13827 gimplify_one_sizepos (&DECL_FIELD_OFFSET (field
), list_p
);
13828 gimplify_one_sizepos (&DECL_SIZE (field
), list_p
);
13829 gimplify_one_sizepos (&DECL_SIZE_UNIT (field
), list_p
);
13830 gimplify_type_sizes (TREE_TYPE (field
), list_p
);
13835 case REFERENCE_TYPE
:
13836 /* We used to recurse on the pointed-to type here, which turned out to
13837 be incorrect because its definition might refer to variables not
13838 yet initialized at this point if a forward declaration is involved.
13840 It was actually useful for anonymous pointed-to types to ensure
13841 that the sizes evaluation dominates every possible later use of the
13842 values. Restricting to such types here would be safe since there
13843 is no possible forward declaration around, but would introduce an
13844 undesirable middle-end semantic to anonymity. We then defer to
13845 front-ends the responsibility of ensuring that the sizes are
13846 evaluated both early and late enough, e.g. by attaching artificial
13847 type declarations to the tree. */
13854 gimplify_one_sizepos (&TYPE_SIZE (type
), list_p
);
13855 gimplify_one_sizepos (&TYPE_SIZE_UNIT (type
), list_p
);
13857 for (t
= TYPE_NEXT_VARIANT (type
); t
; t
= TYPE_NEXT_VARIANT (t
))
13859 TYPE_SIZE (t
) = TYPE_SIZE (type
);
13860 TYPE_SIZE_UNIT (t
) = TYPE_SIZE_UNIT (type
);
13861 TYPE_SIZES_GIMPLIFIED (t
) = 1;
13865 /* A subroutine of gimplify_type_sizes to make sure that *EXPR_P,
13866 a size or position, has had all of its SAVE_EXPRs evaluated.
13867 We add any required statements to *STMT_P. */
13870 gimplify_one_sizepos (tree
*expr_p
, gimple_seq
*stmt_p
)
13872 tree expr
= *expr_p
;
13874 /* We don't do anything if the value isn't there, is constant, or contains
13875 A PLACEHOLDER_EXPR. We also don't want to do anything if it's already
13876 a VAR_DECL. If it's a VAR_DECL from another function, the gimplifier
13877 will want to replace it with a new variable, but that will cause problems
13878 if this type is from outside the function. It's OK to have that here. */
13879 if (expr
== NULL_TREE
13880 || is_gimple_constant (expr
)
13881 || TREE_CODE (expr
) == VAR_DECL
13882 || CONTAINS_PLACEHOLDER_P (expr
))
13885 *expr_p
= unshare_expr (expr
);
13887 /* SSA names in decl/type fields are a bad idea - they'll get reclaimed
13888 if the def vanishes. */
13889 gimplify_expr (expr_p
, stmt_p
, NULL
, is_gimple_val
, fb_rvalue
, false);
13891 /* If expr wasn't already is_gimple_sizepos or is_gimple_constant from the
13892 FE, ensure that it is a VAR_DECL, otherwise we might handle some decls
13893 as gimplify_vla_decl even when they would have all sizes INTEGER_CSTs. */
13894 if (is_gimple_constant (*expr_p
))
13895 *expr_p
= get_initialized_tmp_var (*expr_p
, stmt_p
, NULL
, false);
13898 /* Gimplify the body of statements of FNDECL and return a GIMPLE_BIND node
13899 containing the sequence of corresponding GIMPLE statements. If DO_PARMS
13900 is true, also gimplify the parameters. */
13903 gimplify_body (tree fndecl
, bool do_parms
)
13905 location_t saved_location
= input_location
;
13906 gimple_seq parm_stmts
, parm_cleanup
= NULL
, seq
;
13907 gimple
*outer_stmt
;
13910 timevar_push (TV_TREE_GIMPLIFY
);
13912 init_tree_ssa (cfun
);
13914 /* Initialize for optimize_insn_for_s{ize,peed}_p possibly called during
13916 default_rtl_profile ();
13918 gcc_assert (gimplify_ctxp
== NULL
);
13919 push_gimplify_context (true);
13921 if (flag_openacc
|| flag_openmp
)
13923 gcc_assert (gimplify_omp_ctxp
== NULL
);
13924 if (lookup_attribute ("omp declare target", DECL_ATTRIBUTES (fndecl
)))
13925 gimplify_omp_ctxp
= new_omp_context (ORT_TARGET
);
13928 /* Unshare most shared trees in the body and in that of any nested functions.
13929 It would seem we don't have to do this for nested functions because
13930 they are supposed to be output and then the outer function gimplified
13931 first, but the g++ front end doesn't always do it that way. */
13932 unshare_body (fndecl
);
13933 unvisit_body (fndecl
);
13935 /* Make sure input_location isn't set to something weird. */
13936 input_location
= DECL_SOURCE_LOCATION (fndecl
);
13938 /* Resolve callee-copies. This has to be done before processing
13939 the body so that DECL_VALUE_EXPR gets processed correctly. */
13940 parm_stmts
= do_parms
? gimplify_parameters (&parm_cleanup
) : NULL
;
13942 /* Gimplify the function's body. */
13944 gimplify_stmt (&DECL_SAVED_TREE (fndecl
), &seq
);
13945 outer_stmt
= gimple_seq_first_stmt (seq
);
13948 outer_stmt
= gimple_build_nop ();
13949 gimplify_seq_add_stmt (&seq
, outer_stmt
);
13952 /* The body must contain exactly one statement, a GIMPLE_BIND. If this is
13953 not the case, wrap everything in a GIMPLE_BIND to make it so. */
13954 if (gimple_code (outer_stmt
) == GIMPLE_BIND
13955 && gimple_seq_first (seq
) == gimple_seq_last (seq
))
13956 outer_bind
= as_a
<gbind
*> (outer_stmt
);
13958 outer_bind
= gimple_build_bind (NULL_TREE
, seq
, NULL
);
13960 DECL_SAVED_TREE (fndecl
) = NULL_TREE
;
13962 /* If we had callee-copies statements, insert them at the beginning
13963 of the function and clear DECL_VALUE_EXPR_P on the parameters. */
13964 if (!gimple_seq_empty_p (parm_stmts
))
13968 gimplify_seq_add_seq (&parm_stmts
, gimple_bind_body (outer_bind
));
13971 gtry
*g
= gimple_build_try (parm_stmts
, parm_cleanup
,
13972 GIMPLE_TRY_FINALLY
);
13974 gimple_seq_add_stmt (&parm_stmts
, g
);
13976 gimple_bind_set_body (outer_bind
, parm_stmts
);
13978 for (parm
= DECL_ARGUMENTS (current_function_decl
);
13979 parm
; parm
= DECL_CHAIN (parm
))
13980 if (DECL_HAS_VALUE_EXPR_P (parm
))
13982 DECL_HAS_VALUE_EXPR_P (parm
) = 0;
13983 DECL_IGNORED_P (parm
) = 0;
13987 if ((flag_openacc
|| flag_openmp
|| flag_openmp_simd
)
13988 && gimplify_omp_ctxp
)
13990 delete_omp_context (gimplify_omp_ctxp
);
13991 gimplify_omp_ctxp
= NULL
;
13994 pop_gimplify_context (outer_bind
);
13995 gcc_assert (gimplify_ctxp
== NULL
);
13997 if (flag_checking
&& !seen_error ())
13998 verify_gimple_in_seq (gimple_bind_body (outer_bind
));
14000 timevar_pop (TV_TREE_GIMPLIFY
);
14001 input_location
= saved_location
;
14006 typedef char *char_p
; /* For DEF_VEC_P. */
14008 /* Return whether we should exclude FNDECL from instrumentation. */
14011 flag_instrument_functions_exclude_p (tree fndecl
)
14015 v
= (vec
<char_p
> *) flag_instrument_functions_exclude_functions
;
14016 if (v
&& v
->length () > 0)
14022 name
= lang_hooks
.decl_printable_name (fndecl
, 0);
14023 FOR_EACH_VEC_ELT (*v
, i
, s
)
14024 if (strstr (name
, s
) != NULL
)
14028 v
= (vec
<char_p
> *) flag_instrument_functions_exclude_files
;
14029 if (v
&& v
->length () > 0)
14035 name
= DECL_SOURCE_FILE (fndecl
);
14036 FOR_EACH_VEC_ELT (*v
, i
, s
)
14037 if (strstr (name
, s
) != NULL
)
14044 /* Entry point to the gimplification pass. FNDECL is the FUNCTION_DECL
14045 node for the function we want to gimplify.
14047 Return the sequence of GIMPLE statements corresponding to the body
14051 gimplify_function_tree (tree fndecl
)
14057 gcc_assert (!gimple_body (fndecl
));
14059 if (DECL_STRUCT_FUNCTION (fndecl
))
14060 push_cfun (DECL_STRUCT_FUNCTION (fndecl
));
14062 push_struct_function (fndecl
);
14064 /* Tentatively set PROP_gimple_lva here, and reset it in gimplify_va_arg_expr
14066 cfun
->curr_properties
|= PROP_gimple_lva
;
14068 for (parm
= DECL_ARGUMENTS (fndecl
); parm
; parm
= DECL_CHAIN (parm
))
14070 /* Preliminarily mark non-addressed complex variables as eligible
14071 for promotion to gimple registers. We'll transform their uses
14072 as we find them. */
14073 if ((TREE_CODE (TREE_TYPE (parm
)) == COMPLEX_TYPE
14074 || TREE_CODE (TREE_TYPE (parm
)) == VECTOR_TYPE
)
14075 && !TREE_THIS_VOLATILE (parm
)
14076 && !needs_to_live_in_memory (parm
))
14077 DECL_GIMPLE_REG_P (parm
) = 1;
14080 ret
= DECL_RESULT (fndecl
);
14081 if ((TREE_CODE (TREE_TYPE (ret
)) == COMPLEX_TYPE
14082 || TREE_CODE (TREE_TYPE (ret
)) == VECTOR_TYPE
)
14083 && !needs_to_live_in_memory (ret
))
14084 DECL_GIMPLE_REG_P (ret
) = 1;
14086 if (asan_sanitize_use_after_scope () && sanitize_flags_p (SANITIZE_ADDRESS
))
14087 asan_poisoned_variables
= new hash_set
<tree
> ();
14088 bind
= gimplify_body (fndecl
, true);
14089 if (asan_poisoned_variables
)
14091 delete asan_poisoned_variables
;
14092 asan_poisoned_variables
= NULL
;
14095 /* The tree body of the function is no longer needed, replace it
14096 with the new GIMPLE body. */
14098 gimple_seq_add_stmt (&seq
, bind
);
14099 gimple_set_body (fndecl
, seq
);
14101 /* If we're instrumenting function entry/exit, then prepend the call to
14102 the entry hook and wrap the whole function in a TRY_FINALLY_EXPR to
14103 catch the exit hook. */
14104 /* ??? Add some way to ignore exceptions for this TFE. */
14105 if (flag_instrument_function_entry_exit
14106 && !DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (fndecl
)
14107 /* Do not instrument extern inline functions. */
14108 && !(DECL_DECLARED_INLINE_P (fndecl
)
14109 && DECL_EXTERNAL (fndecl
)
14110 && DECL_DISREGARD_INLINE_LIMITS (fndecl
))
14111 && !flag_instrument_functions_exclude_p (fndecl
))
14116 gimple_seq cleanup
= NULL
, body
= NULL
;
14117 tree tmp_var
, this_fn_addr
;
14120 /* The instrumentation hooks aren't going to call the instrumented
14121 function and the address they receive is expected to be matchable
14122 against symbol addresses. Make sure we don't create a trampoline,
14123 in case the current function is nested. */
14124 this_fn_addr
= build_fold_addr_expr (current_function_decl
);
14125 TREE_NO_TRAMPOLINE (this_fn_addr
) = 1;
14127 x
= builtin_decl_implicit (BUILT_IN_RETURN_ADDRESS
);
14128 call
= gimple_build_call (x
, 1, integer_zero_node
);
14129 tmp_var
= create_tmp_var (ptr_type_node
, "return_addr");
14130 gimple_call_set_lhs (call
, tmp_var
);
14131 gimplify_seq_add_stmt (&cleanup
, call
);
14132 x
= builtin_decl_implicit (BUILT_IN_PROFILE_FUNC_EXIT
);
14133 call
= gimple_build_call (x
, 2, this_fn_addr
, tmp_var
);
14134 gimplify_seq_add_stmt (&cleanup
, call
);
14135 tf
= gimple_build_try (seq
, cleanup
, GIMPLE_TRY_FINALLY
);
14137 x
= builtin_decl_implicit (BUILT_IN_RETURN_ADDRESS
);
14138 call
= gimple_build_call (x
, 1, integer_zero_node
);
14139 tmp_var
= create_tmp_var (ptr_type_node
, "return_addr");
14140 gimple_call_set_lhs (call
, tmp_var
);
14141 gimplify_seq_add_stmt (&body
, call
);
14142 x
= builtin_decl_implicit (BUILT_IN_PROFILE_FUNC_ENTER
);
14143 call
= gimple_build_call (x
, 2, this_fn_addr
, tmp_var
);
14144 gimplify_seq_add_stmt (&body
, call
);
14145 gimplify_seq_add_stmt (&body
, tf
);
14146 new_bind
= gimple_build_bind (NULL
, body
, NULL
);
14148 /* Replace the current function body with the body
14149 wrapped in the try/finally TF. */
14151 gimple_seq_add_stmt (&seq
, new_bind
);
14152 gimple_set_body (fndecl
, seq
);
14156 if (sanitize_flags_p (SANITIZE_THREAD
))
14158 gcall
*call
= gimple_build_call_internal (IFN_TSAN_FUNC_EXIT
, 0);
14159 gimple
*tf
= gimple_build_try (seq
, call
, GIMPLE_TRY_FINALLY
);
14160 gbind
*new_bind
= gimple_build_bind (NULL
, tf
, NULL
);
14161 /* Replace the current function body with the body
14162 wrapped in the try/finally TF. */
14164 gimple_seq_add_stmt (&seq
, new_bind
);
14165 gimple_set_body (fndecl
, seq
);
14168 DECL_SAVED_TREE (fndecl
) = NULL_TREE
;
14169 cfun
->curr_properties
|= PROP_gimple_any
;
14173 dump_function (TDI_gimple
, fndecl
);
14176 /* Return a dummy expression of type TYPE in order to keep going after an
14180 dummy_object (tree type
)
14182 tree t
= build_int_cst (build_pointer_type (type
), 0);
14183 return build2 (MEM_REF
, type
, t
, t
);
14186 /* Gimplify __builtin_va_arg, aka VA_ARG_EXPR, which is not really a
14187 builtin function, but a very special sort of operator. */
14189 enum gimplify_status
14190 gimplify_va_arg_expr (tree
*expr_p
, gimple_seq
*pre_p
,
14191 gimple_seq
*post_p ATTRIBUTE_UNUSED
)
14193 tree promoted_type
, have_va_type
;
14194 tree valist
= TREE_OPERAND (*expr_p
, 0);
14195 tree type
= TREE_TYPE (*expr_p
);
14196 tree t
, tag
, aptag
;
14197 location_t loc
= EXPR_LOCATION (*expr_p
);
14199 /* Verify that valist is of the proper type. */
14200 have_va_type
= TREE_TYPE (valist
);
14201 if (have_va_type
== error_mark_node
)
14203 have_va_type
= targetm
.canonical_va_list_type (have_va_type
);
14204 if (have_va_type
== NULL_TREE
14205 && POINTER_TYPE_P (TREE_TYPE (valist
)))
14206 /* Handle 'Case 1: Not an array type' from c-common.c/build_va_arg. */
14208 = targetm
.canonical_va_list_type (TREE_TYPE (TREE_TYPE (valist
)));
14209 gcc_assert (have_va_type
!= NULL_TREE
);
14211 /* Generate a diagnostic for requesting data of a type that cannot
14212 be passed through `...' due to type promotion at the call site. */
14213 if ((promoted_type
= lang_hooks
.types
.type_promotes_to (type
))
14216 static bool gave_help
;
14218 /* Use the expansion point to handle cases such as passing bool (defined
14219 in a system header) through `...'. */
14221 = expansion_point_location_if_in_system_header (loc
);
14223 /* Unfortunately, this is merely undefined, rather than a constraint
14224 violation, so we cannot make this an error. If this call is never
14225 executed, the program is still strictly conforming. */
14226 auto_diagnostic_group d
;
14227 warned
= warning_at (xloc
, 0,
14228 "%qT is promoted to %qT when passed through %<...%>",
14229 type
, promoted_type
);
14230 if (!gave_help
&& warned
)
14233 inform (xloc
, "(so you should pass %qT not %qT to %<va_arg%>)",
14234 promoted_type
, type
);
14237 /* We can, however, treat "undefined" any way we please.
14238 Call abort to encourage the user to fix the program. */
14240 inform (xloc
, "if this code is reached, the program will abort");
14241 /* Before the abort, allow the evaluation of the va_list
14242 expression to exit or longjmp. */
14243 gimplify_and_add (valist
, pre_p
);
14244 t
= build_call_expr_loc (loc
,
14245 builtin_decl_implicit (BUILT_IN_TRAP
), 0);
14246 gimplify_and_add (t
, pre_p
);
14248 /* This is dead code, but go ahead and finish so that the
14249 mode of the result comes out right. */
14250 *expr_p
= dummy_object (type
);
14251 return GS_ALL_DONE
;
14254 tag
= build_int_cst (build_pointer_type (type
), 0);
14255 aptag
= build_int_cst (TREE_TYPE (valist
), 0);
14257 *expr_p
= build_call_expr_internal_loc (loc
, IFN_VA_ARG
, type
, 3,
14258 valist
, tag
, aptag
);
14260 /* Clear the tentatively set PROP_gimple_lva, to indicate that IFN_VA_ARG
14261 needs to be expanded. */
14262 cfun
->curr_properties
&= ~PROP_gimple_lva
;
14267 /* Build a new GIMPLE_ASSIGN tuple and append it to the end of *SEQ_P.
14269 DST/SRC are the destination and source respectively. You can pass
14270 ungimplified trees in DST or SRC, in which case they will be
14271 converted to a gimple operand if necessary.
14273 This function returns the newly created GIMPLE_ASSIGN tuple. */
14276 gimplify_assign (tree dst
, tree src
, gimple_seq
*seq_p
)
14278 tree t
= build2 (MODIFY_EXPR
, TREE_TYPE (dst
), dst
, src
);
14279 gimplify_and_add (t
, seq_p
);
14281 return gimple_seq_last_stmt (*seq_p
);
14285 gimplify_hasher::hash (const elt_t
*p
)
14288 return iterative_hash_expr (t
, 0);
14292 gimplify_hasher::equal (const elt_t
*p1
, const elt_t
*p2
)
14296 enum tree_code code
= TREE_CODE (t1
);
14298 if (TREE_CODE (t2
) != code
14299 || TREE_TYPE (t1
) != TREE_TYPE (t2
))
14302 if (!operand_equal_p (t1
, t2
, 0))
14305 /* Only allow them to compare equal if they also hash equal; otherwise
14306 results are nondeterminate, and we fail bootstrap comparison. */
14307 gcc_checking_assert (hash (p1
) == hash (p2
));