]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/gimplify.c
Remove Java references in source code.
[thirdparty/gcc.git] / gcc / gimplify.c
1 /* Tree lowering pass. This pass converts the GENERIC functions-as-trees
2 tree representation into the GIMPLE form.
3 Copyright (C) 2002-2017 Free Software Foundation, Inc.
4 Major work done by Sebastian Pop <s.pop@laposte.net>,
5 Diego Novillo <dnovillo@redhat.com> and Jason Merrill <jason@redhat.com>.
6
7 This file is part of GCC.
8
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
12 version.
13
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 for more details.
18
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
22
23 #include "config.h"
24 #include "system.h"
25 #include "coretypes.h"
26 #include "backend.h"
27 #include "target.h"
28 #include "rtl.h"
29 #include "tree.h"
30 #include "gimple.h"
31 #include "gimple-predict.h"
32 #include "tree-pass.h" /* FIXME: only for PROP_gimple_any */
33 #include "ssa.h"
34 #include "cgraph.h"
35 #include "tree-pretty-print.h"
36 #include "diagnostic-core.h"
37 #include "alias.h"
38 #include "fold-const.h"
39 #include "calls.h"
40 #include "varasm.h"
41 #include "stmt.h"
42 #include "expr.h"
43 #include "gimple-fold.h"
44 #include "tree-eh.h"
45 #include "gimplify.h"
46 #include "gimple-iterator.h"
47 #include "stor-layout.h"
48 #include "print-tree.h"
49 #include "tree-iterator.h"
50 #include "tree-inline.h"
51 #include "langhooks.h"
52 #include "tree-cfg.h"
53 #include "tree-ssa.h"
54 #include "omp-general.h"
55 #include "omp-low.h"
56 #include "gimple-low.h"
57 #include "cilk.h"
58 #include "gomp-constants.h"
59 #include "splay-tree.h"
60 #include "gimple-walk.h"
61 #include "langhooks-def.h" /* FIXME: for lhd_set_decl_assembler_name */
62 #include "builtins.h"
63 #include "asan.h"
64 #include "dbgcnt.h"
65
66 /* Hash set of poisoned variables in a bind expr. */
67 static hash_set<tree> *asan_poisoned_variables = NULL;
68
69 enum gimplify_omp_var_data
70 {
71 GOVD_SEEN = 1,
72 GOVD_EXPLICIT = 2,
73 GOVD_SHARED = 4,
74 GOVD_PRIVATE = 8,
75 GOVD_FIRSTPRIVATE = 16,
76 GOVD_LASTPRIVATE = 32,
77 GOVD_REDUCTION = 64,
78 GOVD_LOCAL = 128,
79 GOVD_MAP = 256,
80 GOVD_DEBUG_PRIVATE = 512,
81 GOVD_PRIVATE_OUTER_REF = 1024,
82 GOVD_LINEAR = 2048,
83 GOVD_ALIGNED = 4096,
84
85 /* Flag for GOVD_MAP: don't copy back. */
86 GOVD_MAP_TO_ONLY = 8192,
87
88 /* Flag for GOVD_LINEAR or GOVD_LASTPRIVATE: no outer reference. */
89 GOVD_LINEAR_LASTPRIVATE_NO_OUTER = 16384,
90
91 GOVD_MAP_0LEN_ARRAY = 32768,
92
93 /* Flag for GOVD_MAP, if it is always, to or always, tofrom mapping. */
94 GOVD_MAP_ALWAYS_TO = 65536,
95
96 /* Flag for shared vars that are or might be stored to in the region. */
97 GOVD_WRITTEN = 131072,
98
99 /* Flag for GOVD_MAP, if it is a forced mapping. */
100 GOVD_MAP_FORCE = 262144,
101
102 /* Flag for GOVD_MAP: must be present already. */
103 GOVD_MAP_FORCE_PRESENT = 524288,
104
105 GOVD_DATA_SHARE_CLASS = (GOVD_SHARED | GOVD_PRIVATE | GOVD_FIRSTPRIVATE
106 | GOVD_LASTPRIVATE | GOVD_REDUCTION | GOVD_LINEAR
107 | GOVD_LOCAL)
108 };
109
110
111 enum omp_region_type
112 {
113 ORT_WORKSHARE = 0x00,
114 ORT_SIMD = 0x01,
115
116 ORT_PARALLEL = 0x02,
117 ORT_COMBINED_PARALLEL = 0x03,
118
119 ORT_TASK = 0x04,
120 ORT_UNTIED_TASK = 0x05,
121
122 ORT_TEAMS = 0x08,
123 ORT_COMBINED_TEAMS = 0x09,
124
125 /* Data region. */
126 ORT_TARGET_DATA = 0x10,
127
128 /* Data region with offloading. */
129 ORT_TARGET = 0x20,
130 ORT_COMBINED_TARGET = 0x21,
131
132 /* OpenACC variants. */
133 ORT_ACC = 0x40, /* A generic OpenACC region. */
134 ORT_ACC_DATA = ORT_ACC | ORT_TARGET_DATA, /* Data construct. */
135 ORT_ACC_PARALLEL = ORT_ACC | ORT_TARGET, /* Parallel construct */
136 ORT_ACC_KERNELS = ORT_ACC | ORT_TARGET | 0x80, /* Kernels construct. */
137 ORT_ACC_HOST_DATA = ORT_ACC | ORT_TARGET_DATA | 0x80, /* Host data. */
138
139 /* Dummy OpenMP region, used to disable expansion of
140 DECL_VALUE_EXPRs in taskloop pre body. */
141 ORT_NONE = 0x100
142 };
143
144 /* Gimplify hashtable helper. */
145
146 struct gimplify_hasher : free_ptr_hash <elt_t>
147 {
148 static inline hashval_t hash (const elt_t *);
149 static inline bool equal (const elt_t *, const elt_t *);
150 };
151
152 struct gimplify_ctx
153 {
154 struct gimplify_ctx *prev_context;
155
156 vec<gbind *> bind_expr_stack;
157 tree temps;
158 gimple_seq conditional_cleanups;
159 tree exit_label;
160 tree return_temp;
161
162 vec<tree> case_labels;
163 hash_set<tree> *live_switch_vars;
164 /* The formal temporary table. Should this be persistent? */
165 hash_table<gimplify_hasher> *temp_htab;
166
167 int conditions;
168 unsigned into_ssa : 1;
169 unsigned allow_rhs_cond_expr : 1;
170 unsigned in_cleanup_point_expr : 1;
171 unsigned keep_stack : 1;
172 unsigned save_stack : 1;
173 unsigned in_switch_expr : 1;
174 };
175
176 struct gimplify_omp_ctx
177 {
178 struct gimplify_omp_ctx *outer_context;
179 splay_tree variables;
180 hash_set<tree> *privatized_types;
181 /* Iteration variables in an OMP_FOR. */
182 vec<tree> loop_iter_var;
183 location_t location;
184 enum omp_clause_default_kind default_kind;
185 enum omp_region_type region_type;
186 bool combined_loop;
187 bool distribute;
188 bool target_map_scalars_firstprivate;
189 bool target_map_pointers_as_0len_arrays;
190 bool target_firstprivatize_array_bases;
191 };
192
193 static struct gimplify_ctx *gimplify_ctxp;
194 static struct gimplify_omp_ctx *gimplify_omp_ctxp;
195
196 /* Forward declaration. */
197 static enum gimplify_status gimplify_compound_expr (tree *, gimple_seq *, bool);
198 static hash_map<tree, tree> *oacc_declare_returns;
199 static enum gimplify_status gimplify_expr (tree *, gimple_seq *, gimple_seq *,
200 bool (*) (tree), fallback_t, bool);
201
202 /* Shorter alias name for the above function for use in gimplify.c
203 only. */
204
205 static inline void
206 gimplify_seq_add_stmt (gimple_seq *seq_p, gimple *gs)
207 {
208 gimple_seq_add_stmt_without_update (seq_p, gs);
209 }
210
211 /* Append sequence SRC to the end of sequence *DST_P. If *DST_P is
212 NULL, a new sequence is allocated. This function is
213 similar to gimple_seq_add_seq, but does not scan the operands.
214 During gimplification, we need to manipulate statement sequences
215 before the def/use vectors have been constructed. */
216
217 static void
218 gimplify_seq_add_seq (gimple_seq *dst_p, gimple_seq src)
219 {
220 gimple_stmt_iterator si;
221
222 if (src == NULL)
223 return;
224
225 si = gsi_last (*dst_p);
226 gsi_insert_seq_after_without_update (&si, src, GSI_NEW_STMT);
227 }
228
229
230 /* Pointer to a list of allocated gimplify_ctx structs to be used for pushing
231 and popping gimplify contexts. */
232
233 static struct gimplify_ctx *ctx_pool = NULL;
234
235 /* Return a gimplify context struct from the pool. */
236
237 static inline struct gimplify_ctx *
238 ctx_alloc (void)
239 {
240 struct gimplify_ctx * c = ctx_pool;
241
242 if (c)
243 ctx_pool = c->prev_context;
244 else
245 c = XNEW (struct gimplify_ctx);
246
247 memset (c, '\0', sizeof (*c));
248 return c;
249 }
250
251 /* Put gimplify context C back into the pool. */
252
253 static inline void
254 ctx_free (struct gimplify_ctx *c)
255 {
256 c->prev_context = ctx_pool;
257 ctx_pool = c;
258 }
259
260 /* Free allocated ctx stack memory. */
261
262 void
263 free_gimplify_stack (void)
264 {
265 struct gimplify_ctx *c;
266
267 while ((c = ctx_pool))
268 {
269 ctx_pool = c->prev_context;
270 free (c);
271 }
272 }
273
274
275 /* Set up a context for the gimplifier. */
276
277 void
278 push_gimplify_context (bool in_ssa, bool rhs_cond_ok)
279 {
280 struct gimplify_ctx *c = ctx_alloc ();
281
282 c->prev_context = gimplify_ctxp;
283 gimplify_ctxp = c;
284 gimplify_ctxp->into_ssa = in_ssa;
285 gimplify_ctxp->allow_rhs_cond_expr = rhs_cond_ok;
286 }
287
288 /* Tear down a context for the gimplifier. If BODY is non-null, then
289 put the temporaries into the outer BIND_EXPR. Otherwise, put them
290 in the local_decls.
291
292 BODY is not a sequence, but the first tuple in a sequence. */
293
294 void
295 pop_gimplify_context (gimple *body)
296 {
297 struct gimplify_ctx *c = gimplify_ctxp;
298
299 gcc_assert (c
300 && (!c->bind_expr_stack.exists ()
301 || c->bind_expr_stack.is_empty ()));
302 c->bind_expr_stack.release ();
303 gimplify_ctxp = c->prev_context;
304
305 if (body)
306 declare_vars (c->temps, body, false);
307 else
308 record_vars (c->temps);
309
310 delete c->temp_htab;
311 c->temp_htab = NULL;
312 ctx_free (c);
313 }
314
315 /* Push a GIMPLE_BIND tuple onto the stack of bindings. */
316
317 static void
318 gimple_push_bind_expr (gbind *bind_stmt)
319 {
320 gimplify_ctxp->bind_expr_stack.reserve (8);
321 gimplify_ctxp->bind_expr_stack.safe_push (bind_stmt);
322 }
323
324 /* Pop the first element off the stack of bindings. */
325
326 static void
327 gimple_pop_bind_expr (void)
328 {
329 gimplify_ctxp->bind_expr_stack.pop ();
330 }
331
332 /* Return the first element of the stack of bindings. */
333
334 gbind *
335 gimple_current_bind_expr (void)
336 {
337 return gimplify_ctxp->bind_expr_stack.last ();
338 }
339
340 /* Return the stack of bindings created during gimplification. */
341
342 vec<gbind *>
343 gimple_bind_expr_stack (void)
344 {
345 return gimplify_ctxp->bind_expr_stack;
346 }
347
348 /* Return true iff there is a COND_EXPR between us and the innermost
349 CLEANUP_POINT_EXPR. This info is used by gimple_push_cleanup. */
350
351 static bool
352 gimple_conditional_context (void)
353 {
354 return gimplify_ctxp->conditions > 0;
355 }
356
357 /* Note that we've entered a COND_EXPR. */
358
359 static void
360 gimple_push_condition (void)
361 {
362 #ifdef ENABLE_GIMPLE_CHECKING
363 if (gimplify_ctxp->conditions == 0)
364 gcc_assert (gimple_seq_empty_p (gimplify_ctxp->conditional_cleanups));
365 #endif
366 ++(gimplify_ctxp->conditions);
367 }
368
369 /* Note that we've left a COND_EXPR. If we're back at unconditional scope
370 now, add any conditional cleanups we've seen to the prequeue. */
371
372 static void
373 gimple_pop_condition (gimple_seq *pre_p)
374 {
375 int conds = --(gimplify_ctxp->conditions);
376
377 gcc_assert (conds >= 0);
378 if (conds == 0)
379 {
380 gimplify_seq_add_seq (pre_p, gimplify_ctxp->conditional_cleanups);
381 gimplify_ctxp->conditional_cleanups = NULL;
382 }
383 }
384
385 /* A stable comparison routine for use with splay trees and DECLs. */
386
387 static int
388 splay_tree_compare_decl_uid (splay_tree_key xa, splay_tree_key xb)
389 {
390 tree a = (tree) xa;
391 tree b = (tree) xb;
392
393 return DECL_UID (a) - DECL_UID (b);
394 }
395
396 /* Create a new omp construct that deals with variable remapping. */
397
398 static struct gimplify_omp_ctx *
399 new_omp_context (enum omp_region_type region_type)
400 {
401 struct gimplify_omp_ctx *c;
402
403 c = XCNEW (struct gimplify_omp_ctx);
404 c->outer_context = gimplify_omp_ctxp;
405 c->variables = splay_tree_new (splay_tree_compare_decl_uid, 0, 0);
406 c->privatized_types = new hash_set<tree>;
407 c->location = input_location;
408 c->region_type = region_type;
409 if ((region_type & ORT_TASK) == 0)
410 c->default_kind = OMP_CLAUSE_DEFAULT_SHARED;
411 else
412 c->default_kind = OMP_CLAUSE_DEFAULT_UNSPECIFIED;
413
414 return c;
415 }
416
417 /* Destroy an omp construct that deals with variable remapping. */
418
419 static void
420 delete_omp_context (struct gimplify_omp_ctx *c)
421 {
422 splay_tree_delete (c->variables);
423 delete c->privatized_types;
424 c->loop_iter_var.release ();
425 XDELETE (c);
426 }
427
428 static void omp_add_variable (struct gimplify_omp_ctx *, tree, unsigned int);
429 static bool omp_notice_variable (struct gimplify_omp_ctx *, tree, bool);
430
431 /* Both gimplify the statement T and append it to *SEQ_P. This function
432 behaves exactly as gimplify_stmt, but you don't have to pass T as a
433 reference. */
434
435 void
436 gimplify_and_add (tree t, gimple_seq *seq_p)
437 {
438 gimplify_stmt (&t, seq_p);
439 }
440
441 /* Gimplify statement T into sequence *SEQ_P, and return the first
442 tuple in the sequence of generated tuples for this statement.
443 Return NULL if gimplifying T produced no tuples. */
444
445 static gimple *
446 gimplify_and_return_first (tree t, gimple_seq *seq_p)
447 {
448 gimple_stmt_iterator last = gsi_last (*seq_p);
449
450 gimplify_and_add (t, seq_p);
451
452 if (!gsi_end_p (last))
453 {
454 gsi_next (&last);
455 return gsi_stmt (last);
456 }
457 else
458 return gimple_seq_first_stmt (*seq_p);
459 }
460
461 /* Returns true iff T is a valid RHS for an assignment to an un-renamed
462 LHS, or for a call argument. */
463
464 static bool
465 is_gimple_mem_rhs (tree t)
466 {
467 /* If we're dealing with a renamable type, either source or dest must be
468 a renamed variable. */
469 if (is_gimple_reg_type (TREE_TYPE (t)))
470 return is_gimple_val (t);
471 else
472 return is_gimple_val (t) || is_gimple_lvalue (t);
473 }
474
475 /* Return true if T is a CALL_EXPR or an expression that can be
476 assigned to a temporary. Note that this predicate should only be
477 used during gimplification. See the rationale for this in
478 gimplify_modify_expr. */
479
480 static bool
481 is_gimple_reg_rhs_or_call (tree t)
482 {
483 return (get_gimple_rhs_class (TREE_CODE (t)) != GIMPLE_INVALID_RHS
484 || TREE_CODE (t) == CALL_EXPR);
485 }
486
487 /* Return true if T is a valid memory RHS or a CALL_EXPR. Note that
488 this predicate should only be used during gimplification. See the
489 rationale for this in gimplify_modify_expr. */
490
491 static bool
492 is_gimple_mem_rhs_or_call (tree t)
493 {
494 /* If we're dealing with a renamable type, either source or dest must be
495 a renamed variable. */
496 if (is_gimple_reg_type (TREE_TYPE (t)))
497 return is_gimple_val (t);
498 else
499 return (is_gimple_val (t)
500 || is_gimple_lvalue (t)
501 || TREE_CLOBBER_P (t)
502 || TREE_CODE (t) == CALL_EXPR);
503 }
504
505 /* Create a temporary with a name derived from VAL. Subroutine of
506 lookup_tmp_var; nobody else should call this function. */
507
508 static inline tree
509 create_tmp_from_val (tree val)
510 {
511 /* Drop all qualifiers and address-space information from the value type. */
512 tree type = TYPE_MAIN_VARIANT (TREE_TYPE (val));
513 tree var = create_tmp_var (type, get_name (val));
514 if (TREE_CODE (TREE_TYPE (var)) == COMPLEX_TYPE
515 || TREE_CODE (TREE_TYPE (var)) == VECTOR_TYPE)
516 DECL_GIMPLE_REG_P (var) = 1;
517 return var;
518 }
519
520 /* Create a temporary to hold the value of VAL. If IS_FORMAL, try to reuse
521 an existing expression temporary. */
522
523 static tree
524 lookup_tmp_var (tree val, bool is_formal)
525 {
526 tree ret;
527
528 /* If not optimizing, never really reuse a temporary. local-alloc
529 won't allocate any variable that is used in more than one basic
530 block, which means it will go into memory, causing much extra
531 work in reload and final and poorer code generation, outweighing
532 the extra memory allocation here. */
533 if (!optimize || !is_formal || TREE_SIDE_EFFECTS (val))
534 ret = create_tmp_from_val (val);
535 else
536 {
537 elt_t elt, *elt_p;
538 elt_t **slot;
539
540 elt.val = val;
541 if (!gimplify_ctxp->temp_htab)
542 gimplify_ctxp->temp_htab = new hash_table<gimplify_hasher> (1000);
543 slot = gimplify_ctxp->temp_htab->find_slot (&elt, INSERT);
544 if (*slot == NULL)
545 {
546 elt_p = XNEW (elt_t);
547 elt_p->val = val;
548 elt_p->temp = ret = create_tmp_from_val (val);
549 *slot = elt_p;
550 }
551 else
552 {
553 elt_p = *slot;
554 ret = elt_p->temp;
555 }
556 }
557
558 return ret;
559 }
560
561 /* Helper for get_formal_tmp_var and get_initialized_tmp_var. */
562
563 static tree
564 internal_get_tmp_var (tree val, gimple_seq *pre_p, gimple_seq *post_p,
565 bool is_formal, bool allow_ssa)
566 {
567 tree t, mod;
568
569 /* Notice that we explicitly allow VAL to be a CALL_EXPR so that we
570 can create an INIT_EXPR and convert it into a GIMPLE_CALL below. */
571 gimplify_expr (&val, pre_p, post_p, is_gimple_reg_rhs_or_call,
572 fb_rvalue);
573
574 if (allow_ssa
575 && gimplify_ctxp->into_ssa
576 && is_gimple_reg_type (TREE_TYPE (val)))
577 {
578 t = make_ssa_name (TYPE_MAIN_VARIANT (TREE_TYPE (val)));
579 if (! gimple_in_ssa_p (cfun))
580 {
581 const char *name = get_name (val);
582 if (name)
583 SET_SSA_NAME_VAR_OR_IDENTIFIER (t, create_tmp_var_name (name));
584 }
585 }
586 else
587 t = lookup_tmp_var (val, is_formal);
588
589 mod = build2 (INIT_EXPR, TREE_TYPE (t), t, unshare_expr (val));
590
591 SET_EXPR_LOCATION (mod, EXPR_LOC_OR_LOC (val, input_location));
592
593 /* gimplify_modify_expr might want to reduce this further. */
594 gimplify_and_add (mod, pre_p);
595 ggc_free (mod);
596
597 return t;
598 }
599
600 /* Return a formal temporary variable initialized with VAL. PRE_P is as
601 in gimplify_expr. Only use this function if:
602
603 1) The value of the unfactored expression represented by VAL will not
604 change between the initialization and use of the temporary, and
605 2) The temporary will not be otherwise modified.
606
607 For instance, #1 means that this is inappropriate for SAVE_EXPR temps,
608 and #2 means it is inappropriate for && temps.
609
610 For other cases, use get_initialized_tmp_var instead. */
611
612 tree
613 get_formal_tmp_var (tree val, gimple_seq *pre_p)
614 {
615 return internal_get_tmp_var (val, pre_p, NULL, true, true);
616 }
617
618 /* Return a temporary variable initialized with VAL. PRE_P and POST_P
619 are as in gimplify_expr. */
620
621 tree
622 get_initialized_tmp_var (tree val, gimple_seq *pre_p, gimple_seq *post_p,
623 bool allow_ssa)
624 {
625 return internal_get_tmp_var (val, pre_p, post_p, false, allow_ssa);
626 }
627
628 /* Declare all the variables in VARS in SCOPE. If DEBUG_INFO is true,
629 generate debug info for them; otherwise don't. */
630
631 void
632 declare_vars (tree vars, gimple *gs, bool debug_info)
633 {
634 tree last = vars;
635 if (last)
636 {
637 tree temps, block;
638
639 gbind *scope = as_a <gbind *> (gs);
640
641 temps = nreverse (last);
642
643 block = gimple_bind_block (scope);
644 gcc_assert (!block || TREE_CODE (block) == BLOCK);
645 if (!block || !debug_info)
646 {
647 DECL_CHAIN (last) = gimple_bind_vars (scope);
648 gimple_bind_set_vars (scope, temps);
649 }
650 else
651 {
652 /* We need to attach the nodes both to the BIND_EXPR and to its
653 associated BLOCK for debugging purposes. The key point here
654 is that the BLOCK_VARS of the BIND_EXPR_BLOCK of a BIND_EXPR
655 is a subchain of the BIND_EXPR_VARS of the BIND_EXPR. */
656 if (BLOCK_VARS (block))
657 BLOCK_VARS (block) = chainon (BLOCK_VARS (block), temps);
658 else
659 {
660 gimple_bind_set_vars (scope,
661 chainon (gimple_bind_vars (scope), temps));
662 BLOCK_VARS (block) = temps;
663 }
664 }
665 }
666 }
667
668 /* For VAR a VAR_DECL of variable size, try to find a constant upper bound
669 for the size and adjust DECL_SIZE/DECL_SIZE_UNIT accordingly. Abort if
670 no such upper bound can be obtained. */
671
672 static void
673 force_constant_size (tree var)
674 {
675 /* The only attempt we make is by querying the maximum size of objects
676 of the variable's type. */
677
678 HOST_WIDE_INT max_size;
679
680 gcc_assert (VAR_P (var));
681
682 max_size = max_int_size_in_bytes (TREE_TYPE (var));
683
684 gcc_assert (max_size >= 0);
685
686 DECL_SIZE_UNIT (var)
687 = build_int_cst (TREE_TYPE (DECL_SIZE_UNIT (var)), max_size);
688 DECL_SIZE (var)
689 = build_int_cst (TREE_TYPE (DECL_SIZE (var)), max_size * BITS_PER_UNIT);
690 }
691
692 /* Push the temporary variable TMP into the current binding. */
693
694 void
695 gimple_add_tmp_var_fn (struct function *fn, tree tmp)
696 {
697 gcc_assert (!DECL_CHAIN (tmp) && !DECL_SEEN_IN_BIND_EXPR_P (tmp));
698
699 /* Later processing assumes that the object size is constant, which might
700 not be true at this point. Force the use of a constant upper bound in
701 this case. */
702 if (!tree_fits_uhwi_p (DECL_SIZE_UNIT (tmp)))
703 force_constant_size (tmp);
704
705 DECL_CONTEXT (tmp) = fn->decl;
706 DECL_SEEN_IN_BIND_EXPR_P (tmp) = 1;
707
708 record_vars_into (tmp, fn->decl);
709 }
710
711 /* Push the temporary variable TMP into the current binding. */
712
713 void
714 gimple_add_tmp_var (tree tmp)
715 {
716 gcc_assert (!DECL_CHAIN (tmp) && !DECL_SEEN_IN_BIND_EXPR_P (tmp));
717
718 /* Later processing assumes that the object size is constant, which might
719 not be true at this point. Force the use of a constant upper bound in
720 this case. */
721 if (!tree_fits_uhwi_p (DECL_SIZE_UNIT (tmp)))
722 force_constant_size (tmp);
723
724 DECL_CONTEXT (tmp) = current_function_decl;
725 DECL_SEEN_IN_BIND_EXPR_P (tmp) = 1;
726
727 if (gimplify_ctxp)
728 {
729 DECL_CHAIN (tmp) = gimplify_ctxp->temps;
730 gimplify_ctxp->temps = tmp;
731
732 /* Mark temporaries local within the nearest enclosing parallel. */
733 if (gimplify_omp_ctxp)
734 {
735 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
736 while (ctx
737 && (ctx->region_type == ORT_WORKSHARE
738 || ctx->region_type == ORT_SIMD
739 || ctx->region_type == ORT_ACC))
740 ctx = ctx->outer_context;
741 if (ctx)
742 omp_add_variable (ctx, tmp, GOVD_LOCAL | GOVD_SEEN);
743 }
744 }
745 else if (cfun)
746 record_vars (tmp);
747 else
748 {
749 gimple_seq body_seq;
750
751 /* This case is for nested functions. We need to expose the locals
752 they create. */
753 body_seq = gimple_body (current_function_decl);
754 declare_vars (tmp, gimple_seq_first_stmt (body_seq), false);
755 }
756 }
757
758
759 \f
760 /* This page contains routines to unshare tree nodes, i.e. to duplicate tree
761 nodes that are referenced more than once in GENERIC functions. This is
762 necessary because gimplification (translation into GIMPLE) is performed
763 by modifying tree nodes in-place, so gimplication of a shared node in a
764 first context could generate an invalid GIMPLE form in a second context.
765
766 This is achieved with a simple mark/copy/unmark algorithm that walks the
767 GENERIC representation top-down, marks nodes with TREE_VISITED the first
768 time it encounters them, duplicates them if they already have TREE_VISITED
769 set, and finally removes the TREE_VISITED marks it has set.
770
771 The algorithm works only at the function level, i.e. it generates a GENERIC
772 representation of a function with no nodes shared within the function when
773 passed a GENERIC function (except for nodes that are allowed to be shared).
774
775 At the global level, it is also necessary to unshare tree nodes that are
776 referenced in more than one function, for the same aforementioned reason.
777 This requires some cooperation from the front-end. There are 2 strategies:
778
779 1. Manual unsharing. The front-end needs to call unshare_expr on every
780 expression that might end up being shared across functions.
781
782 2. Deep unsharing. This is an extension of regular unsharing. Instead
783 of calling unshare_expr on expressions that might be shared across
784 functions, the front-end pre-marks them with TREE_VISITED. This will
785 ensure that they are unshared on the first reference within functions
786 when the regular unsharing algorithm runs. The counterpart is that
787 this algorithm must look deeper than for manual unsharing, which is
788 specified by LANG_HOOKS_DEEP_UNSHARING.
789
790 If there are only few specific cases of node sharing across functions, it is
791 probably easier for a front-end to unshare the expressions manually. On the
792 contrary, if the expressions generated at the global level are as widespread
793 as expressions generated within functions, deep unsharing is very likely the
794 way to go. */
795
796 /* Similar to copy_tree_r but do not copy SAVE_EXPR or TARGET_EXPR nodes.
797 These nodes model computations that must be done once. If we were to
798 unshare something like SAVE_EXPR(i++), the gimplification process would
799 create wrong code. However, if DATA is non-null, it must hold a pointer
800 set that is used to unshare the subtrees of these nodes. */
801
802 static tree
803 mostly_copy_tree_r (tree *tp, int *walk_subtrees, void *data)
804 {
805 tree t = *tp;
806 enum tree_code code = TREE_CODE (t);
807
808 /* Do not copy SAVE_EXPR, TARGET_EXPR or BIND_EXPR nodes themselves, but
809 copy their subtrees if we can make sure to do it only once. */
810 if (code == SAVE_EXPR || code == TARGET_EXPR || code == BIND_EXPR)
811 {
812 if (data && !((hash_set<tree> *)data)->add (t))
813 ;
814 else
815 *walk_subtrees = 0;
816 }
817
818 /* Stop at types, decls, constants like copy_tree_r. */
819 else if (TREE_CODE_CLASS (code) == tcc_type
820 || TREE_CODE_CLASS (code) == tcc_declaration
821 || TREE_CODE_CLASS (code) == tcc_constant)
822 *walk_subtrees = 0;
823
824 /* Cope with the statement expression extension. */
825 else if (code == STATEMENT_LIST)
826 ;
827
828 /* Leave the bulk of the work to copy_tree_r itself. */
829 else
830 copy_tree_r (tp, walk_subtrees, NULL);
831
832 return NULL_TREE;
833 }
834
835 /* Callback for walk_tree to unshare most of the shared trees rooted at *TP.
836 If *TP has been visited already, then *TP is deeply copied by calling
837 mostly_copy_tree_r. DATA is passed to mostly_copy_tree_r unmodified. */
838
839 static tree
840 copy_if_shared_r (tree *tp, int *walk_subtrees, void *data)
841 {
842 tree t = *tp;
843 enum tree_code code = TREE_CODE (t);
844
845 /* Skip types, decls, and constants. But we do want to look at their
846 types and the bounds of types. Mark them as visited so we properly
847 unmark their subtrees on the unmark pass. If we've already seen them,
848 don't look down further. */
849 if (TREE_CODE_CLASS (code) == tcc_type
850 || TREE_CODE_CLASS (code) == tcc_declaration
851 || TREE_CODE_CLASS (code) == tcc_constant)
852 {
853 if (TREE_VISITED (t))
854 *walk_subtrees = 0;
855 else
856 TREE_VISITED (t) = 1;
857 }
858
859 /* If this node has been visited already, unshare it and don't look
860 any deeper. */
861 else if (TREE_VISITED (t))
862 {
863 walk_tree (tp, mostly_copy_tree_r, data, NULL);
864 *walk_subtrees = 0;
865 }
866
867 /* Otherwise, mark the node as visited and keep looking. */
868 else
869 TREE_VISITED (t) = 1;
870
871 return NULL_TREE;
872 }
873
874 /* Unshare most of the shared trees rooted at *TP. DATA is passed to the
875 copy_if_shared_r callback unmodified. */
876
877 static inline void
878 copy_if_shared (tree *tp, void *data)
879 {
880 walk_tree (tp, copy_if_shared_r, data, NULL);
881 }
882
883 /* Unshare all the trees in the body of FNDECL, as well as in the bodies of
884 any nested functions. */
885
886 static void
887 unshare_body (tree fndecl)
888 {
889 struct cgraph_node *cgn = cgraph_node::get (fndecl);
890 /* If the language requires deep unsharing, we need a pointer set to make
891 sure we don't repeatedly unshare subtrees of unshareable nodes. */
892 hash_set<tree> *visited
893 = lang_hooks.deep_unsharing ? new hash_set<tree> : NULL;
894
895 copy_if_shared (&DECL_SAVED_TREE (fndecl), visited);
896 copy_if_shared (&DECL_SIZE (DECL_RESULT (fndecl)), visited);
897 copy_if_shared (&DECL_SIZE_UNIT (DECL_RESULT (fndecl)), visited);
898
899 delete visited;
900
901 if (cgn)
902 for (cgn = cgn->nested; cgn; cgn = cgn->next_nested)
903 unshare_body (cgn->decl);
904 }
905
906 /* Callback for walk_tree to unmark the visited trees rooted at *TP.
907 Subtrees are walked until the first unvisited node is encountered. */
908
909 static tree
910 unmark_visited_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
911 {
912 tree t = *tp;
913
914 /* If this node has been visited, unmark it and keep looking. */
915 if (TREE_VISITED (t))
916 TREE_VISITED (t) = 0;
917
918 /* Otherwise, don't look any deeper. */
919 else
920 *walk_subtrees = 0;
921
922 return NULL_TREE;
923 }
924
925 /* Unmark the visited trees rooted at *TP. */
926
927 static inline void
928 unmark_visited (tree *tp)
929 {
930 walk_tree (tp, unmark_visited_r, NULL, NULL);
931 }
932
933 /* Likewise, but mark all trees as not visited. */
934
935 static void
936 unvisit_body (tree fndecl)
937 {
938 struct cgraph_node *cgn = cgraph_node::get (fndecl);
939
940 unmark_visited (&DECL_SAVED_TREE (fndecl));
941 unmark_visited (&DECL_SIZE (DECL_RESULT (fndecl)));
942 unmark_visited (&DECL_SIZE_UNIT (DECL_RESULT (fndecl)));
943
944 if (cgn)
945 for (cgn = cgn->nested; cgn; cgn = cgn->next_nested)
946 unvisit_body (cgn->decl);
947 }
948
949 /* Unconditionally make an unshared copy of EXPR. This is used when using
950 stored expressions which span multiple functions, such as BINFO_VTABLE,
951 as the normal unsharing process can't tell that they're shared. */
952
953 tree
954 unshare_expr (tree expr)
955 {
956 walk_tree (&expr, mostly_copy_tree_r, NULL, NULL);
957 return expr;
958 }
959
960 /* Worker for unshare_expr_without_location. */
961
962 static tree
963 prune_expr_location (tree *tp, int *walk_subtrees, void *)
964 {
965 if (EXPR_P (*tp))
966 SET_EXPR_LOCATION (*tp, UNKNOWN_LOCATION);
967 else
968 *walk_subtrees = 0;
969 return NULL_TREE;
970 }
971
972 /* Similar to unshare_expr but also prune all expression locations
973 from EXPR. */
974
975 tree
976 unshare_expr_without_location (tree expr)
977 {
978 walk_tree (&expr, mostly_copy_tree_r, NULL, NULL);
979 if (EXPR_P (expr))
980 walk_tree (&expr, prune_expr_location, NULL, NULL);
981 return expr;
982 }
983 \f
984 /* WRAPPER is a code such as BIND_EXPR or CLEANUP_POINT_EXPR which can both
985 contain statements and have a value. Assign its value to a temporary
986 and give it void_type_node. Return the temporary, or NULL_TREE if
987 WRAPPER was already void. */
988
989 tree
990 voidify_wrapper_expr (tree wrapper, tree temp)
991 {
992 tree type = TREE_TYPE (wrapper);
993 if (type && !VOID_TYPE_P (type))
994 {
995 tree *p;
996
997 /* Set p to point to the body of the wrapper. Loop until we find
998 something that isn't a wrapper. */
999 for (p = &wrapper; p && *p; )
1000 {
1001 switch (TREE_CODE (*p))
1002 {
1003 case BIND_EXPR:
1004 TREE_SIDE_EFFECTS (*p) = 1;
1005 TREE_TYPE (*p) = void_type_node;
1006 /* For a BIND_EXPR, the body is operand 1. */
1007 p = &BIND_EXPR_BODY (*p);
1008 break;
1009
1010 case CLEANUP_POINT_EXPR:
1011 case TRY_FINALLY_EXPR:
1012 case TRY_CATCH_EXPR:
1013 TREE_SIDE_EFFECTS (*p) = 1;
1014 TREE_TYPE (*p) = void_type_node;
1015 p = &TREE_OPERAND (*p, 0);
1016 break;
1017
1018 case STATEMENT_LIST:
1019 {
1020 tree_stmt_iterator i = tsi_last (*p);
1021 TREE_SIDE_EFFECTS (*p) = 1;
1022 TREE_TYPE (*p) = void_type_node;
1023 p = tsi_end_p (i) ? NULL : tsi_stmt_ptr (i);
1024 }
1025 break;
1026
1027 case COMPOUND_EXPR:
1028 /* Advance to the last statement. Set all container types to
1029 void. */
1030 for (; TREE_CODE (*p) == COMPOUND_EXPR; p = &TREE_OPERAND (*p, 1))
1031 {
1032 TREE_SIDE_EFFECTS (*p) = 1;
1033 TREE_TYPE (*p) = void_type_node;
1034 }
1035 break;
1036
1037 case TRANSACTION_EXPR:
1038 TREE_SIDE_EFFECTS (*p) = 1;
1039 TREE_TYPE (*p) = void_type_node;
1040 p = &TRANSACTION_EXPR_BODY (*p);
1041 break;
1042
1043 default:
1044 /* Assume that any tree upon which voidify_wrapper_expr is
1045 directly called is a wrapper, and that its body is op0. */
1046 if (p == &wrapper)
1047 {
1048 TREE_SIDE_EFFECTS (*p) = 1;
1049 TREE_TYPE (*p) = void_type_node;
1050 p = &TREE_OPERAND (*p, 0);
1051 break;
1052 }
1053 goto out;
1054 }
1055 }
1056
1057 out:
1058 if (p == NULL || IS_EMPTY_STMT (*p))
1059 temp = NULL_TREE;
1060 else if (temp)
1061 {
1062 /* The wrapper is on the RHS of an assignment that we're pushing
1063 down. */
1064 gcc_assert (TREE_CODE (temp) == INIT_EXPR
1065 || TREE_CODE (temp) == MODIFY_EXPR);
1066 TREE_OPERAND (temp, 1) = *p;
1067 *p = temp;
1068 }
1069 else
1070 {
1071 temp = create_tmp_var (type, "retval");
1072 *p = build2 (INIT_EXPR, type, temp, *p);
1073 }
1074
1075 return temp;
1076 }
1077
1078 return NULL_TREE;
1079 }
1080
1081 /* Prepare calls to builtins to SAVE and RESTORE the stack as well as
1082 a temporary through which they communicate. */
1083
1084 static void
1085 build_stack_save_restore (gcall **save, gcall **restore)
1086 {
1087 tree tmp_var;
1088
1089 *save = gimple_build_call (builtin_decl_implicit (BUILT_IN_STACK_SAVE), 0);
1090 tmp_var = create_tmp_var (ptr_type_node, "saved_stack");
1091 gimple_call_set_lhs (*save, tmp_var);
1092
1093 *restore
1094 = gimple_build_call (builtin_decl_implicit (BUILT_IN_STACK_RESTORE),
1095 1, tmp_var);
1096 }
1097
1098 /* Generate IFN_ASAN_MARK call that poisons shadow of a for DECL variable. */
1099
1100 static tree
1101 build_asan_poison_call_expr (tree decl)
1102 {
1103 /* Do not poison variables that have size equal to zero. */
1104 tree unit_size = DECL_SIZE_UNIT (decl);
1105 if (zerop (unit_size))
1106 return NULL_TREE;
1107
1108 tree base = build_fold_addr_expr (decl);
1109
1110 return build_call_expr_internal_loc (UNKNOWN_LOCATION, IFN_ASAN_MARK,
1111 void_type_node, 3,
1112 build_int_cst (integer_type_node,
1113 ASAN_MARK_POISON),
1114 base, unit_size);
1115 }
1116
1117 /* Generate IFN_ASAN_MARK call that would poison or unpoison, depending
1118 on POISON flag, shadow memory of a DECL variable. The call will be
1119 put on location identified by IT iterator, where BEFORE flag drives
1120 position where the stmt will be put. */
1121
1122 static void
1123 asan_poison_variable (tree decl, bool poison, gimple_stmt_iterator *it,
1124 bool before)
1125 {
1126 /* When within an OMP context, do not emit ASAN_MARK internal fns. */
1127 if (gimplify_omp_ctxp)
1128 return;
1129
1130 tree unit_size = DECL_SIZE_UNIT (decl);
1131 tree base = build_fold_addr_expr (decl);
1132
1133 /* Do not poison variables that have size equal to zero. */
1134 if (zerop (unit_size))
1135 return;
1136
1137 /* It's necessary to have all stack variables aligned to ASAN granularity
1138 bytes. */
1139 if (DECL_ALIGN_UNIT (decl) <= ASAN_SHADOW_GRANULARITY)
1140 SET_DECL_ALIGN (decl, BITS_PER_UNIT * ASAN_SHADOW_GRANULARITY);
1141
1142 HOST_WIDE_INT flags = poison ? ASAN_MARK_POISON : ASAN_MARK_UNPOISON;
1143
1144 gimple *g
1145 = gimple_build_call_internal (IFN_ASAN_MARK, 3,
1146 build_int_cst (integer_type_node, flags),
1147 base, unit_size);
1148
1149 if (before)
1150 gsi_insert_before (it, g, GSI_NEW_STMT);
1151 else
1152 gsi_insert_after (it, g, GSI_NEW_STMT);
1153 }
1154
1155 /* Generate IFN_ASAN_MARK internal call that depending on POISON flag
1156 either poisons or unpoisons a DECL. Created statement is appended
1157 to SEQ_P gimple sequence. */
1158
1159 static void
1160 asan_poison_variable (tree decl, bool poison, gimple_seq *seq_p)
1161 {
1162 gimple_stmt_iterator it = gsi_last (*seq_p);
1163 bool before = false;
1164
1165 if (gsi_end_p (it))
1166 before = true;
1167
1168 asan_poison_variable (decl, poison, &it, before);
1169 }
1170
1171 /* Sort pair of VAR_DECLs A and B by DECL_UID. */
1172
1173 static int
1174 sort_by_decl_uid (const void *a, const void *b)
1175 {
1176 const tree *t1 = (const tree *)a;
1177 const tree *t2 = (const tree *)b;
1178
1179 int uid1 = DECL_UID (*t1);
1180 int uid2 = DECL_UID (*t2);
1181
1182 if (uid1 < uid2)
1183 return -1;
1184 else if (uid1 > uid2)
1185 return 1;
1186 else
1187 return 0;
1188 }
1189
1190 /* Generate IFN_ASAN_MARK internal call for all VARIABLES
1191 depending on POISON flag. Created statement is appended
1192 to SEQ_P gimple sequence. */
1193
1194 static void
1195 asan_poison_variables (hash_set<tree> *variables, bool poison, gimple_seq *seq_p)
1196 {
1197 unsigned c = variables->elements ();
1198 if (c == 0)
1199 return;
1200
1201 auto_vec<tree> sorted_variables (c);
1202
1203 for (hash_set<tree>::iterator it = variables->begin ();
1204 it != variables->end (); ++it)
1205 sorted_variables.safe_push (*it);
1206
1207 sorted_variables.qsort (sort_by_decl_uid);
1208
1209 unsigned i;
1210 tree var;
1211 FOR_EACH_VEC_ELT (sorted_variables, i, var)
1212 {
1213 asan_poison_variable (var, poison, seq_p);
1214
1215 /* Add use_after_scope_memory attribute for the variable in order
1216 to prevent re-written into SSA. */
1217 if (!lookup_attribute (ASAN_USE_AFTER_SCOPE_ATTRIBUTE,
1218 DECL_ATTRIBUTES (var)))
1219 DECL_ATTRIBUTES (var)
1220 = tree_cons (get_identifier (ASAN_USE_AFTER_SCOPE_ATTRIBUTE),
1221 integer_one_node,
1222 DECL_ATTRIBUTES (var));
1223 }
1224 }
1225
1226 /* Gimplify a BIND_EXPR. Just voidify and recurse. */
1227
1228 static enum gimplify_status
1229 gimplify_bind_expr (tree *expr_p, gimple_seq *pre_p)
1230 {
1231 tree bind_expr = *expr_p;
1232 bool old_keep_stack = gimplify_ctxp->keep_stack;
1233 bool old_save_stack = gimplify_ctxp->save_stack;
1234 tree t;
1235 gbind *bind_stmt;
1236 gimple_seq body, cleanup;
1237 gcall *stack_save;
1238 location_t start_locus = 0, end_locus = 0;
1239 tree ret_clauses = NULL;
1240
1241 tree temp = voidify_wrapper_expr (bind_expr, NULL);
1242
1243 /* Mark variables seen in this bind expr. */
1244 for (t = BIND_EXPR_VARS (bind_expr); t ; t = DECL_CHAIN (t))
1245 {
1246 if (VAR_P (t))
1247 {
1248 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
1249
1250 /* Mark variable as local. */
1251 if (ctx && ctx->region_type != ORT_NONE && !DECL_EXTERNAL (t)
1252 && (! DECL_SEEN_IN_BIND_EXPR_P (t)
1253 || splay_tree_lookup (ctx->variables,
1254 (splay_tree_key) t) == NULL))
1255 {
1256 if (ctx->region_type == ORT_SIMD
1257 && TREE_ADDRESSABLE (t)
1258 && !TREE_STATIC (t))
1259 omp_add_variable (ctx, t, GOVD_PRIVATE | GOVD_SEEN);
1260 else
1261 omp_add_variable (ctx, t, GOVD_LOCAL | GOVD_SEEN);
1262 }
1263
1264 DECL_SEEN_IN_BIND_EXPR_P (t) = 1;
1265
1266 if (DECL_HARD_REGISTER (t) && !is_global_var (t) && cfun)
1267 cfun->has_local_explicit_reg_vars = true;
1268 }
1269
1270 /* Preliminarily mark non-addressed complex variables as eligible
1271 for promotion to gimple registers. We'll transform their uses
1272 as we find them. */
1273 if ((TREE_CODE (TREE_TYPE (t)) == COMPLEX_TYPE
1274 || TREE_CODE (TREE_TYPE (t)) == VECTOR_TYPE)
1275 && !TREE_THIS_VOLATILE (t)
1276 && (VAR_P (t) && !DECL_HARD_REGISTER (t))
1277 && !needs_to_live_in_memory (t))
1278 DECL_GIMPLE_REG_P (t) = 1;
1279 }
1280
1281 bind_stmt = gimple_build_bind (BIND_EXPR_VARS (bind_expr), NULL,
1282 BIND_EXPR_BLOCK (bind_expr));
1283 gimple_push_bind_expr (bind_stmt);
1284
1285 gimplify_ctxp->keep_stack = false;
1286 gimplify_ctxp->save_stack = false;
1287
1288 /* Gimplify the body into the GIMPLE_BIND tuple's body. */
1289 body = NULL;
1290 gimplify_stmt (&BIND_EXPR_BODY (bind_expr), &body);
1291 gimple_bind_set_body (bind_stmt, body);
1292
1293 /* Source location wise, the cleanup code (stack_restore and clobbers)
1294 belongs to the end of the block, so propagate what we have. The
1295 stack_save operation belongs to the beginning of block, which we can
1296 infer from the bind_expr directly if the block has no explicit
1297 assignment. */
1298 if (BIND_EXPR_BLOCK (bind_expr))
1299 {
1300 end_locus = BLOCK_SOURCE_END_LOCATION (BIND_EXPR_BLOCK (bind_expr));
1301 start_locus = BLOCK_SOURCE_LOCATION (BIND_EXPR_BLOCK (bind_expr));
1302 }
1303 if (start_locus == 0)
1304 start_locus = EXPR_LOCATION (bind_expr);
1305
1306 cleanup = NULL;
1307 stack_save = NULL;
1308
1309 /* If the code both contains VLAs and calls alloca, then we cannot reclaim
1310 the stack space allocated to the VLAs. */
1311 if (gimplify_ctxp->save_stack && !gimplify_ctxp->keep_stack)
1312 {
1313 gcall *stack_restore;
1314
1315 /* Save stack on entry and restore it on exit. Add a try_finally
1316 block to achieve this. */
1317 build_stack_save_restore (&stack_save, &stack_restore);
1318
1319 gimple_set_location (stack_save, start_locus);
1320 gimple_set_location (stack_restore, end_locus);
1321
1322 gimplify_seq_add_stmt (&cleanup, stack_restore);
1323 }
1324
1325 /* Add clobbers for all variables that go out of scope. */
1326 for (t = BIND_EXPR_VARS (bind_expr); t ; t = DECL_CHAIN (t))
1327 {
1328 if (VAR_P (t)
1329 && !is_global_var (t)
1330 && DECL_CONTEXT (t) == current_function_decl)
1331 {
1332 if (!DECL_HARD_REGISTER (t)
1333 && !TREE_THIS_VOLATILE (t)
1334 && !DECL_HAS_VALUE_EXPR_P (t)
1335 /* Only care for variables that have to be in memory. Others
1336 will be rewritten into SSA names, hence moved to the
1337 top-level. */
1338 && !is_gimple_reg (t)
1339 && flag_stack_reuse != SR_NONE)
1340 {
1341 tree clobber = build_constructor (TREE_TYPE (t), NULL);
1342 gimple *clobber_stmt;
1343 TREE_THIS_VOLATILE (clobber) = 1;
1344 clobber_stmt = gimple_build_assign (t, clobber);
1345 gimple_set_location (clobber_stmt, end_locus);
1346 gimplify_seq_add_stmt (&cleanup, clobber_stmt);
1347 }
1348
1349 if (flag_openacc && oacc_declare_returns != NULL)
1350 {
1351 tree *c = oacc_declare_returns->get (t);
1352 if (c != NULL)
1353 {
1354 if (ret_clauses)
1355 OMP_CLAUSE_CHAIN (*c) = ret_clauses;
1356
1357 ret_clauses = *c;
1358
1359 oacc_declare_returns->remove (t);
1360
1361 if (oacc_declare_returns->elements () == 0)
1362 {
1363 delete oacc_declare_returns;
1364 oacc_declare_returns = NULL;
1365 }
1366 }
1367 }
1368 }
1369
1370 if (asan_poisoned_variables != NULL
1371 && asan_poisoned_variables->contains (t))
1372 {
1373 asan_poisoned_variables->remove (t);
1374 asan_poison_variable (t, true, &cleanup);
1375 }
1376
1377 if (gimplify_ctxp->live_switch_vars != NULL
1378 && gimplify_ctxp->live_switch_vars->contains (t))
1379 gimplify_ctxp->live_switch_vars->remove (t);
1380 }
1381
1382 if (ret_clauses)
1383 {
1384 gomp_target *stmt;
1385 gimple_stmt_iterator si = gsi_start (cleanup);
1386
1387 stmt = gimple_build_omp_target (NULL, GF_OMP_TARGET_KIND_OACC_DECLARE,
1388 ret_clauses);
1389 gsi_insert_seq_before_without_update (&si, stmt, GSI_NEW_STMT);
1390 }
1391
1392 if (cleanup)
1393 {
1394 gtry *gs;
1395 gimple_seq new_body;
1396
1397 new_body = NULL;
1398 gs = gimple_build_try (gimple_bind_body (bind_stmt), cleanup,
1399 GIMPLE_TRY_FINALLY);
1400
1401 if (stack_save)
1402 gimplify_seq_add_stmt (&new_body, stack_save);
1403 gimplify_seq_add_stmt (&new_body, gs);
1404 gimple_bind_set_body (bind_stmt, new_body);
1405 }
1406
1407 /* keep_stack propagates all the way up to the outermost BIND_EXPR. */
1408 if (!gimplify_ctxp->keep_stack)
1409 gimplify_ctxp->keep_stack = old_keep_stack;
1410 gimplify_ctxp->save_stack = old_save_stack;
1411
1412 gimple_pop_bind_expr ();
1413
1414 gimplify_seq_add_stmt (pre_p, bind_stmt);
1415
1416 if (temp)
1417 {
1418 *expr_p = temp;
1419 return GS_OK;
1420 }
1421
1422 *expr_p = NULL_TREE;
1423 return GS_ALL_DONE;
1424 }
1425
1426 /* Maybe add early return predict statement to PRE_P sequence. */
1427
1428 static void
1429 maybe_add_early_return_predict_stmt (gimple_seq *pre_p)
1430 {
1431 /* If we are not in a conditional context, add PREDICT statement. */
1432 if (gimple_conditional_context ())
1433 {
1434 gimple *predict = gimple_build_predict (PRED_TREE_EARLY_RETURN,
1435 NOT_TAKEN);
1436 gimplify_seq_add_stmt (pre_p, predict);
1437 }
1438 }
1439
1440 /* Gimplify a RETURN_EXPR. If the expression to be returned is not a
1441 GIMPLE value, it is assigned to a new temporary and the statement is
1442 re-written to return the temporary.
1443
1444 PRE_P points to the sequence where side effects that must happen before
1445 STMT should be stored. */
1446
1447 static enum gimplify_status
1448 gimplify_return_expr (tree stmt, gimple_seq *pre_p)
1449 {
1450 greturn *ret;
1451 tree ret_expr = TREE_OPERAND (stmt, 0);
1452 tree result_decl, result;
1453
1454 if (ret_expr == error_mark_node)
1455 return GS_ERROR;
1456
1457 /* Implicit _Cilk_sync must be inserted right before any return statement
1458 if there is a _Cilk_spawn in the function. If the user has provided a
1459 _Cilk_sync, the optimizer should remove this duplicate one. */
1460 if (fn_contains_cilk_spawn_p (cfun))
1461 {
1462 tree impl_sync = build0 (CILK_SYNC_STMT, void_type_node);
1463 gimplify_and_add (impl_sync, pre_p);
1464 }
1465
1466 if (!ret_expr
1467 || TREE_CODE (ret_expr) == RESULT_DECL
1468 || ret_expr == error_mark_node)
1469 {
1470 maybe_add_early_return_predict_stmt (pre_p);
1471 greturn *ret = gimple_build_return (ret_expr);
1472 gimple_set_no_warning (ret, TREE_NO_WARNING (stmt));
1473 gimplify_seq_add_stmt (pre_p, ret);
1474 return GS_ALL_DONE;
1475 }
1476
1477 if (VOID_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl))))
1478 result_decl = NULL_TREE;
1479 else
1480 {
1481 result_decl = TREE_OPERAND (ret_expr, 0);
1482
1483 /* See through a return by reference. */
1484 if (TREE_CODE (result_decl) == INDIRECT_REF)
1485 result_decl = TREE_OPERAND (result_decl, 0);
1486
1487 gcc_assert ((TREE_CODE (ret_expr) == MODIFY_EXPR
1488 || TREE_CODE (ret_expr) == INIT_EXPR)
1489 && TREE_CODE (result_decl) == RESULT_DECL);
1490 }
1491
1492 /* If aggregate_value_p is true, then we can return the bare RESULT_DECL.
1493 Recall that aggregate_value_p is FALSE for any aggregate type that is
1494 returned in registers. If we're returning values in registers, then
1495 we don't want to extend the lifetime of the RESULT_DECL, particularly
1496 across another call. In addition, for those aggregates for which
1497 hard_function_value generates a PARALLEL, we'll die during normal
1498 expansion of structure assignments; there's special code in expand_return
1499 to handle this case that does not exist in expand_expr. */
1500 if (!result_decl)
1501 result = NULL_TREE;
1502 else if (aggregate_value_p (result_decl, TREE_TYPE (current_function_decl)))
1503 {
1504 if (TREE_CODE (DECL_SIZE (result_decl)) != INTEGER_CST)
1505 {
1506 if (!TYPE_SIZES_GIMPLIFIED (TREE_TYPE (result_decl)))
1507 gimplify_type_sizes (TREE_TYPE (result_decl), pre_p);
1508 /* Note that we don't use gimplify_vla_decl because the RESULT_DECL
1509 should be effectively allocated by the caller, i.e. all calls to
1510 this function must be subject to the Return Slot Optimization. */
1511 gimplify_one_sizepos (&DECL_SIZE (result_decl), pre_p);
1512 gimplify_one_sizepos (&DECL_SIZE_UNIT (result_decl), pre_p);
1513 }
1514 result = result_decl;
1515 }
1516 else if (gimplify_ctxp->return_temp)
1517 result = gimplify_ctxp->return_temp;
1518 else
1519 {
1520 result = create_tmp_reg (TREE_TYPE (result_decl));
1521
1522 /* ??? With complex control flow (usually involving abnormal edges),
1523 we can wind up warning about an uninitialized value for this. Due
1524 to how this variable is constructed and initialized, this is never
1525 true. Give up and never warn. */
1526 TREE_NO_WARNING (result) = 1;
1527
1528 gimplify_ctxp->return_temp = result;
1529 }
1530
1531 /* Smash the lhs of the MODIFY_EXPR to the temporary we plan to use.
1532 Then gimplify the whole thing. */
1533 if (result != result_decl)
1534 TREE_OPERAND (ret_expr, 0) = result;
1535
1536 gimplify_and_add (TREE_OPERAND (stmt, 0), pre_p);
1537
1538 maybe_add_early_return_predict_stmt (pre_p);
1539 ret = gimple_build_return (result);
1540 gimple_set_no_warning (ret, TREE_NO_WARNING (stmt));
1541 gimplify_seq_add_stmt (pre_p, ret);
1542
1543 return GS_ALL_DONE;
1544 }
1545
1546 /* Gimplify a variable-length array DECL. */
1547
1548 static void
1549 gimplify_vla_decl (tree decl, gimple_seq *seq_p)
1550 {
1551 /* This is a variable-sized decl. Simplify its size and mark it
1552 for deferred expansion. */
1553 tree t, addr, ptr_type;
1554
1555 gimplify_one_sizepos (&DECL_SIZE (decl), seq_p);
1556 gimplify_one_sizepos (&DECL_SIZE_UNIT (decl), seq_p);
1557
1558 /* Don't mess with a DECL_VALUE_EXPR set by the front-end. */
1559 if (DECL_HAS_VALUE_EXPR_P (decl))
1560 return;
1561
1562 /* All occurrences of this decl in final gimplified code will be
1563 replaced by indirection. Setting DECL_VALUE_EXPR does two
1564 things: First, it lets the rest of the gimplifier know what
1565 replacement to use. Second, it lets the debug info know
1566 where to find the value. */
1567 ptr_type = build_pointer_type (TREE_TYPE (decl));
1568 addr = create_tmp_var (ptr_type, get_name (decl));
1569 DECL_IGNORED_P (addr) = 0;
1570 t = build_fold_indirect_ref (addr);
1571 TREE_THIS_NOTRAP (t) = 1;
1572 SET_DECL_VALUE_EXPR (decl, t);
1573 DECL_HAS_VALUE_EXPR_P (decl) = 1;
1574
1575 t = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
1576 t = build_call_expr (t, 2, DECL_SIZE_UNIT (decl),
1577 size_int (DECL_ALIGN (decl)));
1578 /* The call has been built for a variable-sized object. */
1579 CALL_ALLOCA_FOR_VAR_P (t) = 1;
1580 t = fold_convert (ptr_type, t);
1581 t = build2 (MODIFY_EXPR, TREE_TYPE (addr), addr, t);
1582
1583 gimplify_and_add (t, seq_p);
1584 }
1585
1586 /* A helper function to be called via walk_tree. Mark all labels under *TP
1587 as being forced. To be called for DECL_INITIAL of static variables. */
1588
1589 static tree
1590 force_labels_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
1591 {
1592 if (TYPE_P (*tp))
1593 *walk_subtrees = 0;
1594 if (TREE_CODE (*tp) == LABEL_DECL)
1595 {
1596 FORCED_LABEL (*tp) = 1;
1597 cfun->has_forced_label_in_static = 1;
1598 }
1599
1600 return NULL_TREE;
1601 }
1602
1603 /* Gimplify a DECL_EXPR node *STMT_P by making any necessary allocation
1604 and initialization explicit. */
1605
1606 static enum gimplify_status
1607 gimplify_decl_expr (tree *stmt_p, gimple_seq *seq_p)
1608 {
1609 tree stmt = *stmt_p;
1610 tree decl = DECL_EXPR_DECL (stmt);
1611
1612 *stmt_p = NULL_TREE;
1613
1614 if (TREE_TYPE (decl) == error_mark_node)
1615 return GS_ERROR;
1616
1617 if ((TREE_CODE (decl) == TYPE_DECL
1618 || VAR_P (decl))
1619 && !TYPE_SIZES_GIMPLIFIED (TREE_TYPE (decl)))
1620 {
1621 gimplify_type_sizes (TREE_TYPE (decl), seq_p);
1622 if (TREE_CODE (TREE_TYPE (decl)) == REFERENCE_TYPE)
1623 gimplify_type_sizes (TREE_TYPE (TREE_TYPE (decl)), seq_p);
1624 }
1625
1626 /* ??? DECL_ORIGINAL_TYPE is streamed for LTO so it needs to be gimplified
1627 in case its size expressions contain problematic nodes like CALL_EXPR. */
1628 if (TREE_CODE (decl) == TYPE_DECL
1629 && DECL_ORIGINAL_TYPE (decl)
1630 && !TYPE_SIZES_GIMPLIFIED (DECL_ORIGINAL_TYPE (decl)))
1631 {
1632 gimplify_type_sizes (DECL_ORIGINAL_TYPE (decl), seq_p);
1633 if (TREE_CODE (DECL_ORIGINAL_TYPE (decl)) == REFERENCE_TYPE)
1634 gimplify_type_sizes (TREE_TYPE (DECL_ORIGINAL_TYPE (decl)), seq_p);
1635 }
1636
1637 if (VAR_P (decl) && !DECL_EXTERNAL (decl))
1638 {
1639 tree init = DECL_INITIAL (decl);
1640 bool is_vla = false;
1641
1642 if (TREE_CODE (DECL_SIZE_UNIT (decl)) != INTEGER_CST
1643 || (!TREE_STATIC (decl)
1644 && flag_stack_check == GENERIC_STACK_CHECK
1645 && compare_tree_int (DECL_SIZE_UNIT (decl),
1646 STACK_CHECK_MAX_VAR_SIZE) > 0))
1647 {
1648 gimplify_vla_decl (decl, seq_p);
1649 is_vla = true;
1650 }
1651
1652 if (asan_poisoned_variables
1653 && !is_vla
1654 && TREE_ADDRESSABLE (decl)
1655 && !TREE_STATIC (decl)
1656 && !DECL_HAS_VALUE_EXPR_P (decl)
1657 && dbg_cnt (asan_use_after_scope))
1658 {
1659 asan_poisoned_variables->add (decl);
1660 asan_poison_variable (decl, false, seq_p);
1661 if (!DECL_ARTIFICIAL (decl) && gimplify_ctxp->live_switch_vars)
1662 gimplify_ctxp->live_switch_vars->add (decl);
1663 }
1664
1665 /* Some front ends do not explicitly declare all anonymous
1666 artificial variables. We compensate here by declaring the
1667 variables, though it would be better if the front ends would
1668 explicitly declare them. */
1669 if (!DECL_SEEN_IN_BIND_EXPR_P (decl)
1670 && DECL_ARTIFICIAL (decl) && DECL_NAME (decl) == NULL_TREE)
1671 gimple_add_tmp_var (decl);
1672
1673 if (init && init != error_mark_node)
1674 {
1675 if (!TREE_STATIC (decl))
1676 {
1677 DECL_INITIAL (decl) = NULL_TREE;
1678 init = build2 (INIT_EXPR, void_type_node, decl, init);
1679 gimplify_and_add (init, seq_p);
1680 ggc_free (init);
1681 }
1682 else
1683 /* We must still examine initializers for static variables
1684 as they may contain a label address. */
1685 walk_tree (&init, force_labels_r, NULL, NULL);
1686 }
1687 }
1688
1689 return GS_ALL_DONE;
1690 }
1691
1692 /* Gimplify a LOOP_EXPR. Normally this just involves gimplifying the body
1693 and replacing the LOOP_EXPR with goto, but if the loop contains an
1694 EXIT_EXPR, we need to append a label for it to jump to. */
1695
1696 static enum gimplify_status
1697 gimplify_loop_expr (tree *expr_p, gimple_seq *pre_p)
1698 {
1699 tree saved_label = gimplify_ctxp->exit_label;
1700 tree start_label = create_artificial_label (UNKNOWN_LOCATION);
1701
1702 gimplify_seq_add_stmt (pre_p, gimple_build_label (start_label));
1703
1704 gimplify_ctxp->exit_label = NULL_TREE;
1705
1706 gimplify_and_add (LOOP_EXPR_BODY (*expr_p), pre_p);
1707
1708 gimplify_seq_add_stmt (pre_p, gimple_build_goto (start_label));
1709
1710 if (gimplify_ctxp->exit_label)
1711 gimplify_seq_add_stmt (pre_p,
1712 gimple_build_label (gimplify_ctxp->exit_label));
1713
1714 gimplify_ctxp->exit_label = saved_label;
1715
1716 *expr_p = NULL;
1717 return GS_ALL_DONE;
1718 }
1719
1720 /* Gimplify a statement list onto a sequence. These may be created either
1721 by an enlightened front-end, or by shortcut_cond_expr. */
1722
1723 static enum gimplify_status
1724 gimplify_statement_list (tree *expr_p, gimple_seq *pre_p)
1725 {
1726 tree temp = voidify_wrapper_expr (*expr_p, NULL);
1727
1728 tree_stmt_iterator i = tsi_start (*expr_p);
1729
1730 while (!tsi_end_p (i))
1731 {
1732 gimplify_stmt (tsi_stmt_ptr (i), pre_p);
1733 tsi_delink (&i);
1734 }
1735
1736 if (temp)
1737 {
1738 *expr_p = temp;
1739 return GS_OK;
1740 }
1741
1742 return GS_ALL_DONE;
1743 }
1744
1745 /* Callback for walk_gimple_seq. */
1746
1747 static tree
1748 warn_switch_unreachable_r (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
1749 struct walk_stmt_info *wi)
1750 {
1751 gimple *stmt = gsi_stmt (*gsi_p);
1752
1753 *handled_ops_p = true;
1754 switch (gimple_code (stmt))
1755 {
1756 case GIMPLE_TRY:
1757 /* A compiler-generated cleanup or a user-written try block.
1758 If it's empty, don't dive into it--that would result in
1759 worse location info. */
1760 if (gimple_try_eval (stmt) == NULL)
1761 {
1762 wi->info = stmt;
1763 return integer_zero_node;
1764 }
1765 /* Fall through. */
1766 case GIMPLE_BIND:
1767 case GIMPLE_CATCH:
1768 case GIMPLE_EH_FILTER:
1769 case GIMPLE_TRANSACTION:
1770 /* Walk the sub-statements. */
1771 *handled_ops_p = false;
1772 break;
1773 case GIMPLE_CALL:
1774 if (gimple_call_internal_p (stmt, IFN_ASAN_MARK))
1775 {
1776 *handled_ops_p = false;
1777 break;
1778 }
1779 /* Fall through. */
1780 default:
1781 /* Save the first "real" statement (not a decl/lexical scope/...). */
1782 wi->info = stmt;
1783 return integer_zero_node;
1784 }
1785 return NULL_TREE;
1786 }
1787
1788 /* Possibly warn about unreachable statements between switch's controlling
1789 expression and the first case. SEQ is the body of a switch expression. */
1790
1791 static void
1792 maybe_warn_switch_unreachable (gimple_seq seq)
1793 {
1794 if (!warn_switch_unreachable
1795 /* This warning doesn't play well with Fortran when optimizations
1796 are on. */
1797 || lang_GNU_Fortran ()
1798 || seq == NULL)
1799 return;
1800
1801 struct walk_stmt_info wi;
1802 memset (&wi, 0, sizeof (wi));
1803 walk_gimple_seq (seq, warn_switch_unreachable_r, NULL, &wi);
1804 gimple *stmt = (gimple *) wi.info;
1805
1806 if (stmt && gimple_code (stmt) != GIMPLE_LABEL)
1807 {
1808 if (gimple_code (stmt) == GIMPLE_GOTO
1809 && TREE_CODE (gimple_goto_dest (stmt)) == LABEL_DECL
1810 && DECL_ARTIFICIAL (gimple_goto_dest (stmt)))
1811 /* Don't warn for compiler-generated gotos. These occur
1812 in Duff's devices, for example. */;
1813 else
1814 warning_at (gimple_location (stmt), OPT_Wswitch_unreachable,
1815 "statement will never be executed");
1816 }
1817 }
1818
1819
1820 /* A label entry that pairs label and a location. */
1821 struct label_entry
1822 {
1823 tree label;
1824 location_t loc;
1825 };
1826
1827 /* Find LABEL in vector of label entries VEC. */
1828
1829 static struct label_entry *
1830 find_label_entry (const auto_vec<struct label_entry> *vec, tree label)
1831 {
1832 unsigned int i;
1833 struct label_entry *l;
1834
1835 FOR_EACH_VEC_ELT (*vec, i, l)
1836 if (l->label == label)
1837 return l;
1838 return NULL;
1839 }
1840
1841 /* Return true if LABEL, a LABEL_DECL, represents a case label
1842 in a vector of labels CASES. */
1843
1844 static bool
1845 case_label_p (const vec<tree> *cases, tree label)
1846 {
1847 unsigned int i;
1848 tree l;
1849
1850 FOR_EACH_VEC_ELT (*cases, i, l)
1851 if (CASE_LABEL (l) == label)
1852 return true;
1853 return false;
1854 }
1855
1856 /* Find the last statement in a scope STMT. */
1857
1858 static gimple *
1859 last_stmt_in_scope (gimple *stmt)
1860 {
1861 if (!stmt)
1862 return NULL;
1863
1864 switch (gimple_code (stmt))
1865 {
1866 case GIMPLE_BIND:
1867 {
1868 gbind *bind = as_a <gbind *> (stmt);
1869 stmt = gimple_seq_last_stmt (gimple_bind_body (bind));
1870 return last_stmt_in_scope (stmt);
1871 }
1872
1873 case GIMPLE_TRY:
1874 {
1875 gtry *try_stmt = as_a <gtry *> (stmt);
1876 stmt = gimple_seq_last_stmt (gimple_try_eval (try_stmt));
1877 gimple *last_eval = last_stmt_in_scope (stmt);
1878 if (gimple_stmt_may_fallthru (last_eval)
1879 && (last_eval == NULL
1880 || !gimple_call_internal_p (last_eval, IFN_FALLTHROUGH))
1881 && gimple_try_kind (try_stmt) == GIMPLE_TRY_FINALLY)
1882 {
1883 stmt = gimple_seq_last_stmt (gimple_try_cleanup (try_stmt));
1884 return last_stmt_in_scope (stmt);
1885 }
1886 else
1887 return last_eval;
1888 }
1889
1890 default:
1891 return stmt;
1892 }
1893 }
1894
1895 /* Collect interesting labels in LABELS and return the statement preceding
1896 another case label, or a user-defined label. */
1897
1898 static gimple *
1899 collect_fallthrough_labels (gimple_stmt_iterator *gsi_p,
1900 auto_vec <struct label_entry> *labels)
1901 {
1902 gimple *prev = NULL;
1903
1904 do
1905 {
1906 if (gimple_code (gsi_stmt (*gsi_p)) == GIMPLE_BIND
1907 || gimple_code (gsi_stmt (*gsi_p)) == GIMPLE_TRY)
1908 {
1909 /* Nested scope. Only look at the last statement of
1910 the innermost scope. */
1911 location_t bind_loc = gimple_location (gsi_stmt (*gsi_p));
1912 gimple *last = last_stmt_in_scope (gsi_stmt (*gsi_p));
1913 if (last)
1914 {
1915 prev = last;
1916 /* It might be a label without a location. Use the
1917 location of the scope then. */
1918 if (!gimple_has_location (prev))
1919 gimple_set_location (prev, bind_loc);
1920 }
1921 gsi_next (gsi_p);
1922 continue;
1923 }
1924
1925 /* Ifs are tricky. */
1926 if (gimple_code (gsi_stmt (*gsi_p)) == GIMPLE_COND)
1927 {
1928 gcond *cond_stmt = as_a <gcond *> (gsi_stmt (*gsi_p));
1929 tree false_lab = gimple_cond_false_label (cond_stmt);
1930 location_t if_loc = gimple_location (cond_stmt);
1931
1932 /* If we have e.g.
1933 if (i > 1) goto <D.2259>; else goto D;
1934 we can't do much with the else-branch. */
1935 if (!DECL_ARTIFICIAL (false_lab))
1936 break;
1937
1938 /* Go on until the false label, then one step back. */
1939 for (; !gsi_end_p (*gsi_p); gsi_next (gsi_p))
1940 {
1941 gimple *stmt = gsi_stmt (*gsi_p);
1942 if (gimple_code (stmt) == GIMPLE_LABEL
1943 && gimple_label_label (as_a <glabel *> (stmt)) == false_lab)
1944 break;
1945 }
1946
1947 /* Not found? Oops. */
1948 if (gsi_end_p (*gsi_p))
1949 break;
1950
1951 struct label_entry l = { false_lab, if_loc };
1952 labels->safe_push (l);
1953
1954 /* Go to the last statement of the then branch. */
1955 gsi_prev (gsi_p);
1956
1957 /* if (i != 0) goto <D.1759>; else goto <D.1760>;
1958 <D.1759>:
1959 <stmt>;
1960 goto <D.1761>;
1961 <D.1760>:
1962 */
1963 if (gimple_code (gsi_stmt (*gsi_p)) == GIMPLE_GOTO
1964 && !gimple_has_location (gsi_stmt (*gsi_p)))
1965 {
1966 /* Look at the statement before, it might be
1967 attribute fallthrough, in which case don't warn. */
1968 gsi_prev (gsi_p);
1969 bool fallthru_before_dest
1970 = gimple_call_internal_p (gsi_stmt (*gsi_p), IFN_FALLTHROUGH);
1971 gsi_next (gsi_p);
1972 tree goto_dest = gimple_goto_dest (gsi_stmt (*gsi_p));
1973 if (!fallthru_before_dest)
1974 {
1975 struct label_entry l = { goto_dest, if_loc };
1976 labels->safe_push (l);
1977 }
1978 }
1979 /* And move back. */
1980 gsi_next (gsi_p);
1981 }
1982
1983 /* Remember the last statement. Skip labels that are of no interest
1984 to us. */
1985 if (gimple_code (gsi_stmt (*gsi_p)) == GIMPLE_LABEL)
1986 {
1987 tree label = gimple_label_label (as_a <glabel *> (gsi_stmt (*gsi_p)));
1988 if (find_label_entry (labels, label))
1989 prev = gsi_stmt (*gsi_p);
1990 }
1991 else if (gimple_call_internal_p (gsi_stmt (*gsi_p), IFN_ASAN_MARK))
1992 ;
1993 else
1994 prev = gsi_stmt (*gsi_p);
1995 gsi_next (gsi_p);
1996 }
1997 while (!gsi_end_p (*gsi_p)
1998 /* Stop if we find a case or a user-defined label. */
1999 && (gimple_code (gsi_stmt (*gsi_p)) != GIMPLE_LABEL
2000 || !gimple_has_location (gsi_stmt (*gsi_p))));
2001
2002 return prev;
2003 }
2004
2005 /* Return true if the switch fallthough warning should occur. LABEL is
2006 the label statement that we're falling through to. */
2007
2008 static bool
2009 should_warn_for_implicit_fallthrough (gimple_stmt_iterator *gsi_p, tree label)
2010 {
2011 gimple_stmt_iterator gsi = *gsi_p;
2012
2013 /* Don't warn if the label is marked with a "falls through" comment. */
2014 if (FALLTHROUGH_LABEL_P (label))
2015 return false;
2016
2017 /* Don't warn for non-case labels followed by a statement:
2018 case 0:
2019 foo ();
2020 label:
2021 bar ();
2022 as these are likely intentional. */
2023 if (!case_label_p (&gimplify_ctxp->case_labels, label))
2024 {
2025 tree l;
2026 while (!gsi_end_p (gsi)
2027 && gimple_code (gsi_stmt (gsi)) == GIMPLE_LABEL
2028 && (l = gimple_label_label (as_a <glabel *> (gsi_stmt (gsi))))
2029 && !case_label_p (&gimplify_ctxp->case_labels, l))
2030 gsi_next (&gsi);
2031 if (gsi_end_p (gsi) || gimple_code (gsi_stmt (gsi)) != GIMPLE_LABEL)
2032 return false;
2033 }
2034
2035 /* Don't warn for terminated branches, i.e. when the subsequent case labels
2036 immediately breaks. */
2037 gsi = *gsi_p;
2038
2039 /* Skip all immediately following labels. */
2040 while (!gsi_end_p (gsi) && gimple_code (gsi_stmt (gsi)) == GIMPLE_LABEL)
2041 gsi_next (&gsi);
2042
2043 /* { ... something; default:; } */
2044 if (gsi_end_p (gsi)
2045 /* { ... something; default: break; } or
2046 { ... something; default: goto L; } */
2047 || gimple_code (gsi_stmt (gsi)) == GIMPLE_GOTO
2048 /* { ... something; default: return; } */
2049 || gimple_code (gsi_stmt (gsi)) == GIMPLE_RETURN)
2050 return false;
2051
2052 return true;
2053 }
2054
2055 /* Callback for walk_gimple_seq. */
2056
2057 static tree
2058 warn_implicit_fallthrough_r (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
2059 struct walk_stmt_info *)
2060 {
2061 gimple *stmt = gsi_stmt (*gsi_p);
2062
2063 *handled_ops_p = true;
2064 switch (gimple_code (stmt))
2065 {
2066 case GIMPLE_TRY:
2067 case GIMPLE_BIND:
2068 case GIMPLE_CATCH:
2069 case GIMPLE_EH_FILTER:
2070 case GIMPLE_TRANSACTION:
2071 /* Walk the sub-statements. */
2072 *handled_ops_p = false;
2073 break;
2074
2075 /* Find a sequence of form:
2076
2077 GIMPLE_LABEL
2078 [...]
2079 <may fallthru stmt>
2080 GIMPLE_LABEL
2081
2082 and possibly warn. */
2083 case GIMPLE_LABEL:
2084 {
2085 /* Found a label. Skip all immediately following labels. */
2086 while (!gsi_end_p (*gsi_p)
2087 && gimple_code (gsi_stmt (*gsi_p)) == GIMPLE_LABEL)
2088 gsi_next (gsi_p);
2089
2090 /* There might be no more statements. */
2091 if (gsi_end_p (*gsi_p))
2092 return integer_zero_node;
2093
2094 /* Vector of labels that fall through. */
2095 auto_vec <struct label_entry> labels;
2096 gimple *prev = collect_fallthrough_labels (gsi_p, &labels);
2097
2098 /* There might be no more statements. */
2099 if (gsi_end_p (*gsi_p))
2100 return integer_zero_node;
2101
2102 gimple *next = gsi_stmt (*gsi_p);
2103 tree label;
2104 /* If what follows is a label, then we may have a fallthrough. */
2105 if (gimple_code (next) == GIMPLE_LABEL
2106 && gimple_has_location (next)
2107 && (label = gimple_label_label (as_a <glabel *> (next)))
2108 && prev != NULL)
2109 {
2110 struct label_entry *l;
2111 bool warned_p = false;
2112 if (!should_warn_for_implicit_fallthrough (gsi_p, label))
2113 /* Quiet. */;
2114 else if (gimple_code (prev) == GIMPLE_LABEL
2115 && (label = gimple_label_label (as_a <glabel *> (prev)))
2116 && (l = find_label_entry (&labels, label)))
2117 warned_p = warning_at (l->loc, OPT_Wimplicit_fallthrough_,
2118 "this statement may fall through");
2119 else if (!gimple_call_internal_p (prev, IFN_FALLTHROUGH)
2120 /* Try to be clever and don't warn when the statement
2121 can't actually fall through. */
2122 && gimple_stmt_may_fallthru (prev)
2123 && gimple_has_location (prev))
2124 warned_p = warning_at (gimple_location (prev),
2125 OPT_Wimplicit_fallthrough_,
2126 "this statement may fall through");
2127 if (warned_p)
2128 inform (gimple_location (next), "here");
2129
2130 /* Mark this label as processed so as to prevent multiple
2131 warnings in nested switches. */
2132 FALLTHROUGH_LABEL_P (label) = true;
2133
2134 /* So that next warn_implicit_fallthrough_r will start looking for
2135 a new sequence starting with this label. */
2136 gsi_prev (gsi_p);
2137 }
2138 }
2139 break;
2140 default:
2141 break;
2142 }
2143 return NULL_TREE;
2144 }
2145
2146 /* Warn when a switch case falls through. */
2147
2148 static void
2149 maybe_warn_implicit_fallthrough (gimple_seq seq)
2150 {
2151 if (!warn_implicit_fallthrough)
2152 return;
2153
2154 /* This warning is meant for C/C++/ObjC/ObjC++ only. */
2155 if (!(lang_GNU_C ()
2156 || lang_GNU_CXX ()
2157 || lang_GNU_OBJC ()))
2158 return;
2159
2160 struct walk_stmt_info wi;
2161 memset (&wi, 0, sizeof (wi));
2162 walk_gimple_seq (seq, warn_implicit_fallthrough_r, NULL, &wi);
2163 }
2164
2165 /* Callback for walk_gimple_seq. */
2166
2167 static tree
2168 expand_FALLTHROUGH_r (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
2169 struct walk_stmt_info *)
2170 {
2171 gimple *stmt = gsi_stmt (*gsi_p);
2172
2173 *handled_ops_p = true;
2174 switch (gimple_code (stmt))
2175 {
2176 case GIMPLE_TRY:
2177 case GIMPLE_BIND:
2178 case GIMPLE_CATCH:
2179 case GIMPLE_EH_FILTER:
2180 case GIMPLE_TRANSACTION:
2181 /* Walk the sub-statements. */
2182 *handled_ops_p = false;
2183 break;
2184 case GIMPLE_CALL:
2185 if (gimple_call_internal_p (stmt, IFN_FALLTHROUGH))
2186 {
2187 gsi_remove (gsi_p, true);
2188 if (gsi_end_p (*gsi_p))
2189 return integer_zero_node;
2190
2191 bool found = false;
2192 location_t loc = gimple_location (stmt);
2193
2194 gimple_stmt_iterator gsi2 = *gsi_p;
2195 stmt = gsi_stmt (gsi2);
2196 if (gimple_code (stmt) == GIMPLE_GOTO && !gimple_has_location (stmt))
2197 {
2198 /* Go on until the artificial label. */
2199 tree goto_dest = gimple_goto_dest (stmt);
2200 for (; !gsi_end_p (gsi2); gsi_next (&gsi2))
2201 {
2202 if (gimple_code (gsi_stmt (gsi2)) == GIMPLE_LABEL
2203 && gimple_label_label (as_a <glabel *> (gsi_stmt (gsi2)))
2204 == goto_dest)
2205 break;
2206 }
2207
2208 /* Not found? Stop. */
2209 if (gsi_end_p (gsi2))
2210 break;
2211
2212 /* Look one past it. */
2213 gsi_next (&gsi2);
2214 }
2215
2216 /* We're looking for a case label or default label here. */
2217 while (!gsi_end_p (gsi2))
2218 {
2219 stmt = gsi_stmt (gsi2);
2220 if (gimple_code (stmt) == GIMPLE_LABEL)
2221 {
2222 tree label = gimple_label_label (as_a <glabel *> (stmt));
2223 if (gimple_has_location (stmt) && DECL_ARTIFICIAL (label))
2224 {
2225 found = true;
2226 break;
2227 }
2228 }
2229 else
2230 /* Something other than a label. That's not expected. */
2231 break;
2232 gsi_next (&gsi2);
2233 }
2234 if (!found)
2235 warning_at (loc, 0, "attribute %<fallthrough%> not preceding "
2236 "a case label or default label");
2237 }
2238 break;
2239 default:
2240 break;
2241 }
2242 return NULL_TREE;
2243 }
2244
2245 /* Expand all FALLTHROUGH () calls in SEQ. */
2246
2247 static void
2248 expand_FALLTHROUGH (gimple_seq *seq_p)
2249 {
2250 struct walk_stmt_info wi;
2251 memset (&wi, 0, sizeof (wi));
2252 walk_gimple_seq_mod (seq_p, expand_FALLTHROUGH_r, NULL, &wi);
2253 }
2254
2255 \f
2256 /* Gimplify a SWITCH_EXPR, and collect the vector of labels it can
2257 branch to. */
2258
2259 static enum gimplify_status
2260 gimplify_switch_expr (tree *expr_p, gimple_seq *pre_p)
2261 {
2262 tree switch_expr = *expr_p;
2263 gimple_seq switch_body_seq = NULL;
2264 enum gimplify_status ret;
2265 tree index_type = TREE_TYPE (switch_expr);
2266 if (index_type == NULL_TREE)
2267 index_type = TREE_TYPE (SWITCH_COND (switch_expr));
2268
2269 ret = gimplify_expr (&SWITCH_COND (switch_expr), pre_p, NULL, is_gimple_val,
2270 fb_rvalue);
2271 if (ret == GS_ERROR || ret == GS_UNHANDLED)
2272 return ret;
2273
2274 if (SWITCH_BODY (switch_expr))
2275 {
2276 vec<tree> labels;
2277 vec<tree> saved_labels;
2278 hash_set<tree> *saved_live_switch_vars = NULL;
2279 tree default_case = NULL_TREE;
2280 gswitch *switch_stmt;
2281
2282 /* If someone can be bothered to fill in the labels, they can
2283 be bothered to null out the body too. */
2284 gcc_assert (!SWITCH_LABELS (switch_expr));
2285
2286 /* Save old labels, get new ones from body, then restore the old
2287 labels. Save all the things from the switch body to append after. */
2288 saved_labels = gimplify_ctxp->case_labels;
2289 gimplify_ctxp->case_labels.create (8);
2290
2291 /* Do not create live_switch_vars if SWITCH_BODY is not a BIND_EXPR. */
2292 saved_live_switch_vars = gimplify_ctxp->live_switch_vars;
2293 tree_code body_type = TREE_CODE (SWITCH_BODY (switch_expr));
2294 if (body_type == BIND_EXPR || body_type == STATEMENT_LIST)
2295 gimplify_ctxp->live_switch_vars = new hash_set<tree> (4);
2296 else
2297 gimplify_ctxp->live_switch_vars = NULL;
2298
2299 bool old_in_switch_expr = gimplify_ctxp->in_switch_expr;
2300 gimplify_ctxp->in_switch_expr = true;
2301
2302 gimplify_stmt (&SWITCH_BODY (switch_expr), &switch_body_seq);
2303
2304 gimplify_ctxp->in_switch_expr = old_in_switch_expr;
2305 maybe_warn_switch_unreachable (switch_body_seq);
2306 maybe_warn_implicit_fallthrough (switch_body_seq);
2307 /* Only do this for the outermost GIMPLE_SWITCH. */
2308 if (!gimplify_ctxp->in_switch_expr)
2309 expand_FALLTHROUGH (&switch_body_seq);
2310
2311 labels = gimplify_ctxp->case_labels;
2312 gimplify_ctxp->case_labels = saved_labels;
2313
2314 if (gimplify_ctxp->live_switch_vars)
2315 {
2316 gcc_assert (gimplify_ctxp->live_switch_vars->elements () == 0);
2317 delete gimplify_ctxp->live_switch_vars;
2318 }
2319 gimplify_ctxp->live_switch_vars = saved_live_switch_vars;
2320
2321 preprocess_case_label_vec_for_gimple (labels, index_type,
2322 &default_case);
2323
2324 if (!default_case)
2325 {
2326 glabel *new_default;
2327
2328 default_case
2329 = build_case_label (NULL_TREE, NULL_TREE,
2330 create_artificial_label (UNKNOWN_LOCATION));
2331 new_default = gimple_build_label (CASE_LABEL (default_case));
2332 gimplify_seq_add_stmt (&switch_body_seq, new_default);
2333 }
2334
2335 switch_stmt = gimple_build_switch (SWITCH_COND (switch_expr),
2336 default_case, labels);
2337 gimplify_seq_add_stmt (pre_p, switch_stmt);
2338 gimplify_seq_add_seq (pre_p, switch_body_seq);
2339 labels.release ();
2340 }
2341 else
2342 gcc_assert (SWITCH_LABELS (switch_expr));
2343
2344 return GS_ALL_DONE;
2345 }
2346
2347 /* Gimplify the LABEL_EXPR pointed to by EXPR_P. */
2348
2349 static enum gimplify_status
2350 gimplify_label_expr (tree *expr_p, gimple_seq *pre_p)
2351 {
2352 gcc_assert (decl_function_context (LABEL_EXPR_LABEL (*expr_p))
2353 == current_function_decl);
2354
2355 tree label = LABEL_EXPR_LABEL (*expr_p);
2356 glabel *label_stmt = gimple_build_label (label);
2357 gimple_set_location (label_stmt, EXPR_LOCATION (*expr_p));
2358 gimplify_seq_add_stmt (pre_p, label_stmt);
2359
2360 if (lookup_attribute ("cold", DECL_ATTRIBUTES (label)))
2361 gimple_seq_add_stmt (pre_p, gimple_build_predict (PRED_COLD_LABEL,
2362 NOT_TAKEN));
2363 else if (lookup_attribute ("hot", DECL_ATTRIBUTES (label)))
2364 gimple_seq_add_stmt (pre_p, gimple_build_predict (PRED_HOT_LABEL,
2365 TAKEN));
2366
2367 return GS_ALL_DONE;
2368 }
2369
2370 /* Gimplify the CASE_LABEL_EXPR pointed to by EXPR_P. */
2371
2372 static enum gimplify_status
2373 gimplify_case_label_expr (tree *expr_p, gimple_seq *pre_p)
2374 {
2375 struct gimplify_ctx *ctxp;
2376 glabel *label_stmt;
2377
2378 /* Invalid programs can play Duff's Device type games with, for example,
2379 #pragma omp parallel. At least in the C front end, we don't
2380 detect such invalid branches until after gimplification, in the
2381 diagnose_omp_blocks pass. */
2382 for (ctxp = gimplify_ctxp; ; ctxp = ctxp->prev_context)
2383 if (ctxp->case_labels.exists ())
2384 break;
2385
2386 label_stmt = gimple_build_label (CASE_LABEL (*expr_p));
2387 gimple_set_location (label_stmt, EXPR_LOCATION (*expr_p));
2388 ctxp->case_labels.safe_push (*expr_p);
2389 gimplify_seq_add_stmt (pre_p, label_stmt);
2390
2391 return GS_ALL_DONE;
2392 }
2393
2394 /* Build a GOTO to the LABEL_DECL pointed to by LABEL_P, building it first
2395 if necessary. */
2396
2397 tree
2398 build_and_jump (tree *label_p)
2399 {
2400 if (label_p == NULL)
2401 /* If there's nowhere to jump, just fall through. */
2402 return NULL_TREE;
2403
2404 if (*label_p == NULL_TREE)
2405 {
2406 tree label = create_artificial_label (UNKNOWN_LOCATION);
2407 *label_p = label;
2408 }
2409
2410 return build1 (GOTO_EXPR, void_type_node, *label_p);
2411 }
2412
2413 /* Gimplify an EXIT_EXPR by converting to a GOTO_EXPR inside a COND_EXPR.
2414 This also involves building a label to jump to and communicating it to
2415 gimplify_loop_expr through gimplify_ctxp->exit_label. */
2416
2417 static enum gimplify_status
2418 gimplify_exit_expr (tree *expr_p)
2419 {
2420 tree cond = TREE_OPERAND (*expr_p, 0);
2421 tree expr;
2422
2423 expr = build_and_jump (&gimplify_ctxp->exit_label);
2424 expr = build3 (COND_EXPR, void_type_node, cond, expr, NULL_TREE);
2425 *expr_p = expr;
2426
2427 return GS_OK;
2428 }
2429
2430 /* *EXPR_P is a COMPONENT_REF being used as an rvalue. If its type is
2431 different from its canonical type, wrap the whole thing inside a
2432 NOP_EXPR and force the type of the COMPONENT_REF to be the canonical
2433 type.
2434
2435 The canonical type of a COMPONENT_REF is the type of the field being
2436 referenced--unless the field is a bit-field which can be read directly
2437 in a smaller mode, in which case the canonical type is the
2438 sign-appropriate type corresponding to that mode. */
2439
2440 static void
2441 canonicalize_component_ref (tree *expr_p)
2442 {
2443 tree expr = *expr_p;
2444 tree type;
2445
2446 gcc_assert (TREE_CODE (expr) == COMPONENT_REF);
2447
2448 if (INTEGRAL_TYPE_P (TREE_TYPE (expr)))
2449 type = TREE_TYPE (get_unwidened (expr, NULL_TREE));
2450 else
2451 type = TREE_TYPE (TREE_OPERAND (expr, 1));
2452
2453 /* One could argue that all the stuff below is not necessary for
2454 the non-bitfield case and declare it a FE error if type
2455 adjustment would be needed. */
2456 if (TREE_TYPE (expr) != type)
2457 {
2458 #ifdef ENABLE_TYPES_CHECKING
2459 tree old_type = TREE_TYPE (expr);
2460 #endif
2461 int type_quals;
2462
2463 /* We need to preserve qualifiers and propagate them from
2464 operand 0. */
2465 type_quals = TYPE_QUALS (type)
2466 | TYPE_QUALS (TREE_TYPE (TREE_OPERAND (expr, 0)));
2467 if (TYPE_QUALS (type) != type_quals)
2468 type = build_qualified_type (TYPE_MAIN_VARIANT (type), type_quals);
2469
2470 /* Set the type of the COMPONENT_REF to the underlying type. */
2471 TREE_TYPE (expr) = type;
2472
2473 #ifdef ENABLE_TYPES_CHECKING
2474 /* It is now a FE error, if the conversion from the canonical
2475 type to the original expression type is not useless. */
2476 gcc_assert (useless_type_conversion_p (old_type, type));
2477 #endif
2478 }
2479 }
2480
2481 /* If a NOP conversion is changing a pointer to array of foo to a pointer
2482 to foo, embed that change in the ADDR_EXPR by converting
2483 T array[U];
2484 (T *)&array
2485 ==>
2486 &array[L]
2487 where L is the lower bound. For simplicity, only do this for constant
2488 lower bound.
2489 The constraint is that the type of &array[L] is trivially convertible
2490 to T *. */
2491
2492 static void
2493 canonicalize_addr_expr (tree *expr_p)
2494 {
2495 tree expr = *expr_p;
2496 tree addr_expr = TREE_OPERAND (expr, 0);
2497 tree datype, ddatype, pddatype;
2498
2499 /* We simplify only conversions from an ADDR_EXPR to a pointer type. */
2500 if (!POINTER_TYPE_P (TREE_TYPE (expr))
2501 || TREE_CODE (addr_expr) != ADDR_EXPR)
2502 return;
2503
2504 /* The addr_expr type should be a pointer to an array. */
2505 datype = TREE_TYPE (TREE_TYPE (addr_expr));
2506 if (TREE_CODE (datype) != ARRAY_TYPE)
2507 return;
2508
2509 /* The pointer to element type shall be trivially convertible to
2510 the expression pointer type. */
2511 ddatype = TREE_TYPE (datype);
2512 pddatype = build_pointer_type (ddatype);
2513 if (!useless_type_conversion_p (TYPE_MAIN_VARIANT (TREE_TYPE (expr)),
2514 pddatype))
2515 return;
2516
2517 /* The lower bound and element sizes must be constant. */
2518 if (!TYPE_SIZE_UNIT (ddatype)
2519 || TREE_CODE (TYPE_SIZE_UNIT (ddatype)) != INTEGER_CST
2520 || !TYPE_DOMAIN (datype) || !TYPE_MIN_VALUE (TYPE_DOMAIN (datype))
2521 || TREE_CODE (TYPE_MIN_VALUE (TYPE_DOMAIN (datype))) != INTEGER_CST)
2522 return;
2523
2524 /* All checks succeeded. Build a new node to merge the cast. */
2525 *expr_p = build4 (ARRAY_REF, ddatype, TREE_OPERAND (addr_expr, 0),
2526 TYPE_MIN_VALUE (TYPE_DOMAIN (datype)),
2527 NULL_TREE, NULL_TREE);
2528 *expr_p = build1 (ADDR_EXPR, pddatype, *expr_p);
2529
2530 /* We can have stripped a required restrict qualifier above. */
2531 if (!useless_type_conversion_p (TREE_TYPE (expr), TREE_TYPE (*expr_p)))
2532 *expr_p = fold_convert (TREE_TYPE (expr), *expr_p);
2533 }
2534
2535 /* *EXPR_P is a NOP_EXPR or CONVERT_EXPR. Remove it and/or other conversions
2536 underneath as appropriate. */
2537
2538 static enum gimplify_status
2539 gimplify_conversion (tree *expr_p)
2540 {
2541 location_t loc = EXPR_LOCATION (*expr_p);
2542 gcc_assert (CONVERT_EXPR_P (*expr_p));
2543
2544 /* Then strip away all but the outermost conversion. */
2545 STRIP_SIGN_NOPS (TREE_OPERAND (*expr_p, 0));
2546
2547 /* And remove the outermost conversion if it's useless. */
2548 if (tree_ssa_useless_type_conversion (*expr_p))
2549 *expr_p = TREE_OPERAND (*expr_p, 0);
2550
2551 /* If we still have a conversion at the toplevel,
2552 then canonicalize some constructs. */
2553 if (CONVERT_EXPR_P (*expr_p))
2554 {
2555 tree sub = TREE_OPERAND (*expr_p, 0);
2556
2557 /* If a NOP conversion is changing the type of a COMPONENT_REF
2558 expression, then canonicalize its type now in order to expose more
2559 redundant conversions. */
2560 if (TREE_CODE (sub) == COMPONENT_REF)
2561 canonicalize_component_ref (&TREE_OPERAND (*expr_p, 0));
2562
2563 /* If a NOP conversion is changing a pointer to array of foo
2564 to a pointer to foo, embed that change in the ADDR_EXPR. */
2565 else if (TREE_CODE (sub) == ADDR_EXPR)
2566 canonicalize_addr_expr (expr_p);
2567 }
2568
2569 /* If we have a conversion to a non-register type force the
2570 use of a VIEW_CONVERT_EXPR instead. */
2571 if (CONVERT_EXPR_P (*expr_p) && !is_gimple_reg_type (TREE_TYPE (*expr_p)))
2572 *expr_p = fold_build1_loc (loc, VIEW_CONVERT_EXPR, TREE_TYPE (*expr_p),
2573 TREE_OPERAND (*expr_p, 0));
2574
2575 /* Canonicalize CONVERT_EXPR to NOP_EXPR. */
2576 if (TREE_CODE (*expr_p) == CONVERT_EXPR)
2577 TREE_SET_CODE (*expr_p, NOP_EXPR);
2578
2579 return GS_OK;
2580 }
2581
2582 /* Nonlocal VLAs seen in the current function. */
2583 static hash_set<tree> *nonlocal_vlas;
2584
2585 /* The VAR_DECLs created for nonlocal VLAs for debug info purposes. */
2586 static tree nonlocal_vla_vars;
2587
2588 /* Gimplify a VAR_DECL or PARM_DECL. Return GS_OK if we expanded a
2589 DECL_VALUE_EXPR, and it's worth re-examining things. */
2590
2591 static enum gimplify_status
2592 gimplify_var_or_parm_decl (tree *expr_p)
2593 {
2594 tree decl = *expr_p;
2595
2596 /* ??? If this is a local variable, and it has not been seen in any
2597 outer BIND_EXPR, then it's probably the result of a duplicate
2598 declaration, for which we've already issued an error. It would
2599 be really nice if the front end wouldn't leak these at all.
2600 Currently the only known culprit is C++ destructors, as seen
2601 in g++.old-deja/g++.jason/binding.C. */
2602 if (VAR_P (decl)
2603 && !DECL_SEEN_IN_BIND_EXPR_P (decl)
2604 && !TREE_STATIC (decl) && !DECL_EXTERNAL (decl)
2605 && decl_function_context (decl) == current_function_decl)
2606 {
2607 gcc_assert (seen_error ());
2608 return GS_ERROR;
2609 }
2610
2611 /* When within an OMP context, notice uses of variables. */
2612 if (gimplify_omp_ctxp && omp_notice_variable (gimplify_omp_ctxp, decl, true))
2613 return GS_ALL_DONE;
2614
2615 /* If the decl is an alias for another expression, substitute it now. */
2616 if (DECL_HAS_VALUE_EXPR_P (decl))
2617 {
2618 tree value_expr = DECL_VALUE_EXPR (decl);
2619
2620 /* For referenced nonlocal VLAs add a decl for debugging purposes
2621 to the current function. */
2622 if (VAR_P (decl)
2623 && TREE_CODE (DECL_SIZE_UNIT (decl)) != INTEGER_CST
2624 && nonlocal_vlas != NULL
2625 && TREE_CODE (value_expr) == INDIRECT_REF
2626 && TREE_CODE (TREE_OPERAND (value_expr, 0)) == VAR_DECL
2627 && decl_function_context (decl) != current_function_decl)
2628 {
2629 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
2630 while (ctx
2631 && (ctx->region_type == ORT_WORKSHARE
2632 || ctx->region_type == ORT_SIMD
2633 || ctx->region_type == ORT_ACC))
2634 ctx = ctx->outer_context;
2635 if (!ctx && !nonlocal_vlas->add (decl))
2636 {
2637 tree copy = copy_node (decl);
2638
2639 lang_hooks.dup_lang_specific_decl (copy);
2640 SET_DECL_RTL (copy, 0);
2641 TREE_USED (copy) = 1;
2642 DECL_CHAIN (copy) = nonlocal_vla_vars;
2643 nonlocal_vla_vars = copy;
2644 SET_DECL_VALUE_EXPR (copy, unshare_expr (value_expr));
2645 DECL_HAS_VALUE_EXPR_P (copy) = 1;
2646 }
2647 }
2648
2649 *expr_p = unshare_expr (value_expr);
2650 return GS_OK;
2651 }
2652
2653 return GS_ALL_DONE;
2654 }
2655
2656 /* Recalculate the value of the TREE_SIDE_EFFECTS flag for T. */
2657
2658 static void
2659 recalculate_side_effects (tree t)
2660 {
2661 enum tree_code code = TREE_CODE (t);
2662 int len = TREE_OPERAND_LENGTH (t);
2663 int i;
2664
2665 switch (TREE_CODE_CLASS (code))
2666 {
2667 case tcc_expression:
2668 switch (code)
2669 {
2670 case INIT_EXPR:
2671 case MODIFY_EXPR:
2672 case VA_ARG_EXPR:
2673 case PREDECREMENT_EXPR:
2674 case PREINCREMENT_EXPR:
2675 case POSTDECREMENT_EXPR:
2676 case POSTINCREMENT_EXPR:
2677 /* All of these have side-effects, no matter what their
2678 operands are. */
2679 return;
2680
2681 default:
2682 break;
2683 }
2684 /* Fall through. */
2685
2686 case tcc_comparison: /* a comparison expression */
2687 case tcc_unary: /* a unary arithmetic expression */
2688 case tcc_binary: /* a binary arithmetic expression */
2689 case tcc_reference: /* a reference */
2690 case tcc_vl_exp: /* a function call */
2691 TREE_SIDE_EFFECTS (t) = TREE_THIS_VOLATILE (t);
2692 for (i = 0; i < len; ++i)
2693 {
2694 tree op = TREE_OPERAND (t, i);
2695 if (op && TREE_SIDE_EFFECTS (op))
2696 TREE_SIDE_EFFECTS (t) = 1;
2697 }
2698 break;
2699
2700 case tcc_constant:
2701 /* No side-effects. */
2702 return;
2703
2704 default:
2705 gcc_unreachable ();
2706 }
2707 }
2708
2709 /* Gimplify the COMPONENT_REF, ARRAY_REF, REALPART_EXPR or IMAGPART_EXPR
2710 node *EXPR_P.
2711
2712 compound_lval
2713 : min_lval '[' val ']'
2714 | min_lval '.' ID
2715 | compound_lval '[' val ']'
2716 | compound_lval '.' ID
2717
2718 This is not part of the original SIMPLE definition, which separates
2719 array and member references, but it seems reasonable to handle them
2720 together. Also, this way we don't run into problems with union
2721 aliasing; gcc requires that for accesses through a union to alias, the
2722 union reference must be explicit, which was not always the case when we
2723 were splitting up array and member refs.
2724
2725 PRE_P points to the sequence where side effects that must happen before
2726 *EXPR_P should be stored.
2727
2728 POST_P points to the sequence where side effects that must happen after
2729 *EXPR_P should be stored. */
2730
2731 static enum gimplify_status
2732 gimplify_compound_lval (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
2733 fallback_t fallback)
2734 {
2735 tree *p;
2736 enum gimplify_status ret = GS_ALL_DONE, tret;
2737 int i;
2738 location_t loc = EXPR_LOCATION (*expr_p);
2739 tree expr = *expr_p;
2740
2741 /* Create a stack of the subexpressions so later we can walk them in
2742 order from inner to outer. */
2743 auto_vec<tree, 10> expr_stack;
2744
2745 /* We can handle anything that get_inner_reference can deal with. */
2746 for (p = expr_p; ; p = &TREE_OPERAND (*p, 0))
2747 {
2748 restart:
2749 /* Fold INDIRECT_REFs now to turn them into ARRAY_REFs. */
2750 if (TREE_CODE (*p) == INDIRECT_REF)
2751 *p = fold_indirect_ref_loc (loc, *p);
2752
2753 if (handled_component_p (*p))
2754 ;
2755 /* Expand DECL_VALUE_EXPR now. In some cases that may expose
2756 additional COMPONENT_REFs. */
2757 else if ((VAR_P (*p) || TREE_CODE (*p) == PARM_DECL)
2758 && gimplify_var_or_parm_decl (p) == GS_OK)
2759 goto restart;
2760 else
2761 break;
2762
2763 expr_stack.safe_push (*p);
2764 }
2765
2766 gcc_assert (expr_stack.length ());
2767
2768 /* Now EXPR_STACK is a stack of pointers to all the refs we've
2769 walked through and P points to the innermost expression.
2770
2771 Java requires that we elaborated nodes in source order. That
2772 means we must gimplify the inner expression followed by each of
2773 the indices, in order. But we can't gimplify the inner
2774 expression until we deal with any variable bounds, sizes, or
2775 positions in order to deal with PLACEHOLDER_EXPRs.
2776
2777 So we do this in three steps. First we deal with the annotations
2778 for any variables in the components, then we gimplify the base,
2779 then we gimplify any indices, from left to right. */
2780 for (i = expr_stack.length () - 1; i >= 0; i--)
2781 {
2782 tree t = expr_stack[i];
2783
2784 if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
2785 {
2786 /* Gimplify the low bound and element type size and put them into
2787 the ARRAY_REF. If these values are set, they have already been
2788 gimplified. */
2789 if (TREE_OPERAND (t, 2) == NULL_TREE)
2790 {
2791 tree low = unshare_expr (array_ref_low_bound (t));
2792 if (!is_gimple_min_invariant (low))
2793 {
2794 TREE_OPERAND (t, 2) = low;
2795 tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p,
2796 post_p, is_gimple_reg,
2797 fb_rvalue);
2798 ret = MIN (ret, tret);
2799 }
2800 }
2801 else
2802 {
2803 tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p, post_p,
2804 is_gimple_reg, fb_rvalue);
2805 ret = MIN (ret, tret);
2806 }
2807
2808 if (TREE_OPERAND (t, 3) == NULL_TREE)
2809 {
2810 tree elmt_type = TREE_TYPE (TREE_TYPE (TREE_OPERAND (t, 0)));
2811 tree elmt_size = unshare_expr (array_ref_element_size (t));
2812 tree factor = size_int (TYPE_ALIGN_UNIT (elmt_type));
2813
2814 /* Divide the element size by the alignment of the element
2815 type (above). */
2816 elmt_size
2817 = size_binop_loc (loc, EXACT_DIV_EXPR, elmt_size, factor);
2818
2819 if (!is_gimple_min_invariant (elmt_size))
2820 {
2821 TREE_OPERAND (t, 3) = elmt_size;
2822 tret = gimplify_expr (&TREE_OPERAND (t, 3), pre_p,
2823 post_p, is_gimple_reg,
2824 fb_rvalue);
2825 ret = MIN (ret, tret);
2826 }
2827 }
2828 else
2829 {
2830 tret = gimplify_expr (&TREE_OPERAND (t, 3), pre_p, post_p,
2831 is_gimple_reg, fb_rvalue);
2832 ret = MIN (ret, tret);
2833 }
2834 }
2835 else if (TREE_CODE (t) == COMPONENT_REF)
2836 {
2837 /* Set the field offset into T and gimplify it. */
2838 if (TREE_OPERAND (t, 2) == NULL_TREE)
2839 {
2840 tree offset = unshare_expr (component_ref_field_offset (t));
2841 tree field = TREE_OPERAND (t, 1);
2842 tree factor
2843 = size_int (DECL_OFFSET_ALIGN (field) / BITS_PER_UNIT);
2844
2845 /* Divide the offset by its alignment. */
2846 offset = size_binop_loc (loc, EXACT_DIV_EXPR, offset, factor);
2847
2848 if (!is_gimple_min_invariant (offset))
2849 {
2850 TREE_OPERAND (t, 2) = offset;
2851 tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p,
2852 post_p, is_gimple_reg,
2853 fb_rvalue);
2854 ret = MIN (ret, tret);
2855 }
2856 }
2857 else
2858 {
2859 tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p, post_p,
2860 is_gimple_reg, fb_rvalue);
2861 ret = MIN (ret, tret);
2862 }
2863 }
2864 }
2865
2866 /* Step 2 is to gimplify the base expression. Make sure lvalue is set
2867 so as to match the min_lval predicate. Failure to do so may result
2868 in the creation of large aggregate temporaries. */
2869 tret = gimplify_expr (p, pre_p, post_p, is_gimple_min_lval,
2870 fallback | fb_lvalue);
2871 ret = MIN (ret, tret);
2872
2873 /* And finally, the indices and operands of ARRAY_REF. During this
2874 loop we also remove any useless conversions. */
2875 for (; expr_stack.length () > 0; )
2876 {
2877 tree t = expr_stack.pop ();
2878
2879 if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
2880 {
2881 /* Gimplify the dimension. */
2882 if (!is_gimple_min_invariant (TREE_OPERAND (t, 1)))
2883 {
2884 tret = gimplify_expr (&TREE_OPERAND (t, 1), pre_p, post_p,
2885 is_gimple_val, fb_rvalue);
2886 ret = MIN (ret, tret);
2887 }
2888 }
2889
2890 STRIP_USELESS_TYPE_CONVERSION (TREE_OPERAND (t, 0));
2891
2892 /* The innermost expression P may have originally had
2893 TREE_SIDE_EFFECTS set which would have caused all the outer
2894 expressions in *EXPR_P leading to P to also have had
2895 TREE_SIDE_EFFECTS set. */
2896 recalculate_side_effects (t);
2897 }
2898
2899 /* If the outermost expression is a COMPONENT_REF, canonicalize its type. */
2900 if ((fallback & fb_rvalue) && TREE_CODE (*expr_p) == COMPONENT_REF)
2901 {
2902 canonicalize_component_ref (expr_p);
2903 }
2904
2905 expr_stack.release ();
2906
2907 gcc_assert (*expr_p == expr || ret != GS_ALL_DONE);
2908
2909 return ret;
2910 }
2911
2912 /* Gimplify the self modifying expression pointed to by EXPR_P
2913 (++, --, +=, -=).
2914
2915 PRE_P points to the list where side effects that must happen before
2916 *EXPR_P should be stored.
2917
2918 POST_P points to the list where side effects that must happen after
2919 *EXPR_P should be stored.
2920
2921 WANT_VALUE is nonzero iff we want to use the value of this expression
2922 in another expression.
2923
2924 ARITH_TYPE is the type the computation should be performed in. */
2925
2926 enum gimplify_status
2927 gimplify_self_mod_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
2928 bool want_value, tree arith_type)
2929 {
2930 enum tree_code code;
2931 tree lhs, lvalue, rhs, t1;
2932 gimple_seq post = NULL, *orig_post_p = post_p;
2933 bool postfix;
2934 enum tree_code arith_code;
2935 enum gimplify_status ret;
2936 location_t loc = EXPR_LOCATION (*expr_p);
2937
2938 code = TREE_CODE (*expr_p);
2939
2940 gcc_assert (code == POSTINCREMENT_EXPR || code == POSTDECREMENT_EXPR
2941 || code == PREINCREMENT_EXPR || code == PREDECREMENT_EXPR);
2942
2943 /* Prefix or postfix? */
2944 if (code == POSTINCREMENT_EXPR || code == POSTDECREMENT_EXPR)
2945 /* Faster to treat as prefix if result is not used. */
2946 postfix = want_value;
2947 else
2948 postfix = false;
2949
2950 /* For postfix, make sure the inner expression's post side effects
2951 are executed after side effects from this expression. */
2952 if (postfix)
2953 post_p = &post;
2954
2955 /* Add or subtract? */
2956 if (code == PREINCREMENT_EXPR || code == POSTINCREMENT_EXPR)
2957 arith_code = PLUS_EXPR;
2958 else
2959 arith_code = MINUS_EXPR;
2960
2961 /* Gimplify the LHS into a GIMPLE lvalue. */
2962 lvalue = TREE_OPERAND (*expr_p, 0);
2963 ret = gimplify_expr (&lvalue, pre_p, post_p, is_gimple_lvalue, fb_lvalue);
2964 if (ret == GS_ERROR)
2965 return ret;
2966
2967 /* Extract the operands to the arithmetic operation. */
2968 lhs = lvalue;
2969 rhs = TREE_OPERAND (*expr_p, 1);
2970
2971 /* For postfix operator, we evaluate the LHS to an rvalue and then use
2972 that as the result value and in the postqueue operation. */
2973 if (postfix)
2974 {
2975 ret = gimplify_expr (&lhs, pre_p, post_p, is_gimple_val, fb_rvalue);
2976 if (ret == GS_ERROR)
2977 return ret;
2978
2979 lhs = get_initialized_tmp_var (lhs, pre_p, NULL);
2980 }
2981
2982 /* For POINTERs increment, use POINTER_PLUS_EXPR. */
2983 if (POINTER_TYPE_P (TREE_TYPE (lhs)))
2984 {
2985 rhs = convert_to_ptrofftype_loc (loc, rhs);
2986 if (arith_code == MINUS_EXPR)
2987 rhs = fold_build1_loc (loc, NEGATE_EXPR, TREE_TYPE (rhs), rhs);
2988 t1 = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (*expr_p), lhs, rhs);
2989 }
2990 else
2991 t1 = fold_convert (TREE_TYPE (*expr_p),
2992 fold_build2 (arith_code, arith_type,
2993 fold_convert (arith_type, lhs),
2994 fold_convert (arith_type, rhs)));
2995
2996 if (postfix)
2997 {
2998 gimplify_assign (lvalue, t1, pre_p);
2999 gimplify_seq_add_seq (orig_post_p, post);
3000 *expr_p = lhs;
3001 return GS_ALL_DONE;
3002 }
3003 else
3004 {
3005 *expr_p = build2 (MODIFY_EXPR, TREE_TYPE (lvalue), lvalue, t1);
3006 return GS_OK;
3007 }
3008 }
3009
3010 /* If *EXPR_P has a variable sized type, wrap it in a WITH_SIZE_EXPR. */
3011
3012 static void
3013 maybe_with_size_expr (tree *expr_p)
3014 {
3015 tree expr = *expr_p;
3016 tree type = TREE_TYPE (expr);
3017 tree size;
3018
3019 /* If we've already wrapped this or the type is error_mark_node, we can't do
3020 anything. */
3021 if (TREE_CODE (expr) == WITH_SIZE_EXPR
3022 || type == error_mark_node)
3023 return;
3024
3025 /* If the size isn't known or is a constant, we have nothing to do. */
3026 size = TYPE_SIZE_UNIT (type);
3027 if (!size || TREE_CODE (size) == INTEGER_CST)
3028 return;
3029
3030 /* Otherwise, make a WITH_SIZE_EXPR. */
3031 size = unshare_expr (size);
3032 size = SUBSTITUTE_PLACEHOLDER_IN_EXPR (size, expr);
3033 *expr_p = build2 (WITH_SIZE_EXPR, type, expr, size);
3034 }
3035
3036 /* Helper for gimplify_call_expr. Gimplify a single argument *ARG_P
3037 Store any side-effects in PRE_P. CALL_LOCATION is the location of
3038 the CALL_EXPR. If ALLOW_SSA is set the actual parameter may be
3039 gimplified to an SSA name. */
3040
3041 enum gimplify_status
3042 gimplify_arg (tree *arg_p, gimple_seq *pre_p, location_t call_location,
3043 bool allow_ssa)
3044 {
3045 bool (*test) (tree);
3046 fallback_t fb;
3047
3048 /* In general, we allow lvalues for function arguments to avoid
3049 extra overhead of copying large aggregates out of even larger
3050 aggregates into temporaries only to copy the temporaries to
3051 the argument list. Make optimizers happy by pulling out to
3052 temporaries those types that fit in registers. */
3053 if (is_gimple_reg_type (TREE_TYPE (*arg_p)))
3054 test = is_gimple_val, fb = fb_rvalue;
3055 else
3056 {
3057 test = is_gimple_lvalue, fb = fb_either;
3058 /* Also strip a TARGET_EXPR that would force an extra copy. */
3059 if (TREE_CODE (*arg_p) == TARGET_EXPR)
3060 {
3061 tree init = TARGET_EXPR_INITIAL (*arg_p);
3062 if (init
3063 && !VOID_TYPE_P (TREE_TYPE (init)))
3064 *arg_p = init;
3065 }
3066 }
3067
3068 /* If this is a variable sized type, we must remember the size. */
3069 maybe_with_size_expr (arg_p);
3070
3071 /* FIXME diagnostics: This will mess up gcc.dg/Warray-bounds.c. */
3072 /* Make sure arguments have the same location as the function call
3073 itself. */
3074 protected_set_expr_location (*arg_p, call_location);
3075
3076 /* There is a sequence point before a function call. Side effects in
3077 the argument list must occur before the actual call. So, when
3078 gimplifying arguments, force gimplify_expr to use an internal
3079 post queue which is then appended to the end of PRE_P. */
3080 return gimplify_expr (arg_p, pre_p, NULL, test, fb, allow_ssa);
3081 }
3082
3083 /* Don't fold inside offloading or taskreg regions: it can break code by
3084 adding decl references that weren't in the source. We'll do it during
3085 omplower pass instead. */
3086
3087 static bool
3088 maybe_fold_stmt (gimple_stmt_iterator *gsi)
3089 {
3090 struct gimplify_omp_ctx *ctx;
3091 for (ctx = gimplify_omp_ctxp; ctx; ctx = ctx->outer_context)
3092 if ((ctx->region_type & (ORT_TARGET | ORT_PARALLEL | ORT_TASK)) != 0)
3093 return false;
3094 return fold_stmt (gsi);
3095 }
3096
3097 /* Add a gimple call to __builtin_cilk_detach to GIMPLE sequence PRE_P,
3098 with the pointer to the proper cilk frame. */
3099 static void
3100 gimplify_cilk_detach (gimple_seq *pre_p)
3101 {
3102 tree frame = cfun->cilk_frame_decl;
3103 tree ptrf = build1 (ADDR_EXPR, cilk_frame_ptr_type_decl,
3104 frame);
3105 gcall *detach = gimple_build_call (cilk_detach_fndecl, 1,
3106 ptrf);
3107 gimplify_seq_add_stmt(pre_p, detach);
3108 }
3109
3110 /* Gimplify the CALL_EXPR node *EXPR_P into the GIMPLE sequence PRE_P.
3111 WANT_VALUE is true if the result of the call is desired. */
3112
3113 static enum gimplify_status
3114 gimplify_call_expr (tree *expr_p, gimple_seq *pre_p, bool want_value)
3115 {
3116 tree fndecl, parms, p, fnptrtype;
3117 enum gimplify_status ret;
3118 int i, nargs;
3119 gcall *call;
3120 bool builtin_va_start_p = false;
3121 location_t loc = EXPR_LOCATION (*expr_p);
3122
3123 gcc_assert (TREE_CODE (*expr_p) == CALL_EXPR);
3124
3125 /* For reliable diagnostics during inlining, it is necessary that
3126 every call_expr be annotated with file and line. */
3127 if (! EXPR_HAS_LOCATION (*expr_p))
3128 SET_EXPR_LOCATION (*expr_p, input_location);
3129
3130 /* Gimplify internal functions created in the FEs. */
3131 if (CALL_EXPR_FN (*expr_p) == NULL_TREE)
3132 {
3133 if (want_value)
3134 return GS_ALL_DONE;
3135
3136 nargs = call_expr_nargs (*expr_p);
3137 enum internal_fn ifn = CALL_EXPR_IFN (*expr_p);
3138 auto_vec<tree> vargs (nargs);
3139
3140 for (i = 0; i < nargs; i++)
3141 {
3142 gimplify_arg (&CALL_EXPR_ARG (*expr_p, i), pre_p,
3143 EXPR_LOCATION (*expr_p));
3144 vargs.quick_push (CALL_EXPR_ARG (*expr_p, i));
3145 }
3146
3147 if (EXPR_CILK_SPAWN (*expr_p))
3148 gimplify_cilk_detach (pre_p);
3149 gimple *call = gimple_build_call_internal_vec (ifn, vargs);
3150 gimplify_seq_add_stmt (pre_p, call);
3151 return GS_ALL_DONE;
3152 }
3153
3154 /* This may be a call to a builtin function.
3155
3156 Builtin function calls may be transformed into different
3157 (and more efficient) builtin function calls under certain
3158 circumstances. Unfortunately, gimplification can muck things
3159 up enough that the builtin expanders are not aware that certain
3160 transformations are still valid.
3161
3162 So we attempt transformation/gimplification of the call before
3163 we gimplify the CALL_EXPR. At this time we do not manage to
3164 transform all calls in the same manner as the expanders do, but
3165 we do transform most of them. */
3166 fndecl = get_callee_fndecl (*expr_p);
3167 if (fndecl
3168 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
3169 switch (DECL_FUNCTION_CODE (fndecl))
3170 {
3171 case BUILT_IN_ALLOCA:
3172 case BUILT_IN_ALLOCA_WITH_ALIGN:
3173 /* If the call has been built for a variable-sized object, then we
3174 want to restore the stack level when the enclosing BIND_EXPR is
3175 exited to reclaim the allocated space; otherwise, we precisely
3176 need to do the opposite and preserve the latest stack level. */
3177 if (CALL_ALLOCA_FOR_VAR_P (*expr_p))
3178 gimplify_ctxp->save_stack = true;
3179 else
3180 gimplify_ctxp->keep_stack = true;
3181 break;
3182
3183 case BUILT_IN_VA_START:
3184 {
3185 builtin_va_start_p = TRUE;
3186 if (call_expr_nargs (*expr_p) < 2)
3187 {
3188 error ("too few arguments to function %<va_start%>");
3189 *expr_p = build_empty_stmt (EXPR_LOCATION (*expr_p));
3190 return GS_OK;
3191 }
3192
3193 if (fold_builtin_next_arg (*expr_p, true))
3194 {
3195 *expr_p = build_empty_stmt (EXPR_LOCATION (*expr_p));
3196 return GS_OK;
3197 }
3198 break;
3199 }
3200
3201 default:
3202 ;
3203 }
3204 if (fndecl && DECL_BUILT_IN (fndecl))
3205 {
3206 tree new_tree = fold_call_expr (input_location, *expr_p, !want_value);
3207 if (new_tree && new_tree != *expr_p)
3208 {
3209 /* There was a transformation of this call which computes the
3210 same value, but in a more efficient way. Return and try
3211 again. */
3212 *expr_p = new_tree;
3213 return GS_OK;
3214 }
3215 }
3216
3217 /* Remember the original function pointer type. */
3218 fnptrtype = TREE_TYPE (CALL_EXPR_FN (*expr_p));
3219
3220 /* There is a sequence point before the call, so any side effects in
3221 the calling expression must occur before the actual call. Force
3222 gimplify_expr to use an internal post queue. */
3223 ret = gimplify_expr (&CALL_EXPR_FN (*expr_p), pre_p, NULL,
3224 is_gimple_call_addr, fb_rvalue);
3225
3226 nargs = call_expr_nargs (*expr_p);
3227
3228 /* Get argument types for verification. */
3229 fndecl = get_callee_fndecl (*expr_p);
3230 parms = NULL_TREE;
3231 if (fndecl)
3232 parms = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
3233 else
3234 parms = TYPE_ARG_TYPES (TREE_TYPE (fnptrtype));
3235
3236 if (fndecl && DECL_ARGUMENTS (fndecl))
3237 p = DECL_ARGUMENTS (fndecl);
3238 else if (parms)
3239 p = parms;
3240 else
3241 p = NULL_TREE;
3242 for (i = 0; i < nargs && p; i++, p = TREE_CHAIN (p))
3243 ;
3244
3245 /* If the last argument is __builtin_va_arg_pack () and it is not
3246 passed as a named argument, decrease the number of CALL_EXPR
3247 arguments and set instead the CALL_EXPR_VA_ARG_PACK flag. */
3248 if (!p
3249 && i < nargs
3250 && TREE_CODE (CALL_EXPR_ARG (*expr_p, nargs - 1)) == CALL_EXPR)
3251 {
3252 tree last_arg = CALL_EXPR_ARG (*expr_p, nargs - 1);
3253 tree last_arg_fndecl = get_callee_fndecl (last_arg);
3254
3255 if (last_arg_fndecl
3256 && TREE_CODE (last_arg_fndecl) == FUNCTION_DECL
3257 && DECL_BUILT_IN_CLASS (last_arg_fndecl) == BUILT_IN_NORMAL
3258 && DECL_FUNCTION_CODE (last_arg_fndecl) == BUILT_IN_VA_ARG_PACK)
3259 {
3260 tree call = *expr_p;
3261
3262 --nargs;
3263 *expr_p = build_call_array_loc (loc, TREE_TYPE (call),
3264 CALL_EXPR_FN (call),
3265 nargs, CALL_EXPR_ARGP (call));
3266
3267 /* Copy all CALL_EXPR flags, location and block, except
3268 CALL_EXPR_VA_ARG_PACK flag. */
3269 CALL_EXPR_STATIC_CHAIN (*expr_p) = CALL_EXPR_STATIC_CHAIN (call);
3270 CALL_EXPR_TAILCALL (*expr_p) = CALL_EXPR_TAILCALL (call);
3271 CALL_EXPR_RETURN_SLOT_OPT (*expr_p)
3272 = CALL_EXPR_RETURN_SLOT_OPT (call);
3273 CALL_FROM_THUNK_P (*expr_p) = CALL_FROM_THUNK_P (call);
3274 SET_EXPR_LOCATION (*expr_p, EXPR_LOCATION (call));
3275
3276 /* Set CALL_EXPR_VA_ARG_PACK. */
3277 CALL_EXPR_VA_ARG_PACK (*expr_p) = 1;
3278 }
3279 }
3280
3281 /* If the call returns twice then after building the CFG the call
3282 argument computations will no longer dominate the call because
3283 we add an abnormal incoming edge to the call. So do not use SSA
3284 vars there. */
3285 bool returns_twice = call_expr_flags (*expr_p) & ECF_RETURNS_TWICE;
3286
3287 /* Gimplify the function arguments. */
3288 if (nargs > 0)
3289 {
3290 for (i = (PUSH_ARGS_REVERSED ? nargs - 1 : 0);
3291 PUSH_ARGS_REVERSED ? i >= 0 : i < nargs;
3292 PUSH_ARGS_REVERSED ? i-- : i++)
3293 {
3294 enum gimplify_status t;
3295
3296 /* Avoid gimplifying the second argument to va_start, which needs to
3297 be the plain PARM_DECL. */
3298 if ((i != 1) || !builtin_va_start_p)
3299 {
3300 t = gimplify_arg (&CALL_EXPR_ARG (*expr_p, i), pre_p,
3301 EXPR_LOCATION (*expr_p), ! returns_twice);
3302
3303 if (t == GS_ERROR)
3304 ret = GS_ERROR;
3305 }
3306 }
3307 }
3308
3309 /* Gimplify the static chain. */
3310 if (CALL_EXPR_STATIC_CHAIN (*expr_p))
3311 {
3312 if (fndecl && !DECL_STATIC_CHAIN (fndecl))
3313 CALL_EXPR_STATIC_CHAIN (*expr_p) = NULL;
3314 else
3315 {
3316 enum gimplify_status t;
3317 t = gimplify_arg (&CALL_EXPR_STATIC_CHAIN (*expr_p), pre_p,
3318 EXPR_LOCATION (*expr_p), ! returns_twice);
3319 if (t == GS_ERROR)
3320 ret = GS_ERROR;
3321 }
3322 }
3323
3324 /* Verify the function result. */
3325 if (want_value && fndecl
3326 && VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fnptrtype))))
3327 {
3328 error_at (loc, "using result of function returning %<void%>");
3329 ret = GS_ERROR;
3330 }
3331
3332 /* Try this again in case gimplification exposed something. */
3333 if (ret != GS_ERROR)
3334 {
3335 tree new_tree = fold_call_expr (input_location, *expr_p, !want_value);
3336
3337 if (new_tree && new_tree != *expr_p)
3338 {
3339 /* There was a transformation of this call which computes the
3340 same value, but in a more efficient way. Return and try
3341 again. */
3342 *expr_p = new_tree;
3343 return GS_OK;
3344 }
3345 }
3346 else
3347 {
3348 *expr_p = error_mark_node;
3349 return GS_ERROR;
3350 }
3351
3352 /* If the function is "const" or "pure", then clear TREE_SIDE_EFFECTS on its
3353 decl. This allows us to eliminate redundant or useless
3354 calls to "const" functions. */
3355 if (TREE_CODE (*expr_p) == CALL_EXPR)
3356 {
3357 int flags = call_expr_flags (*expr_p);
3358 if (flags & (ECF_CONST | ECF_PURE)
3359 /* An infinite loop is considered a side effect. */
3360 && !(flags & (ECF_LOOPING_CONST_OR_PURE)))
3361 TREE_SIDE_EFFECTS (*expr_p) = 0;
3362 }
3363
3364 /* If the value is not needed by the caller, emit a new GIMPLE_CALL
3365 and clear *EXPR_P. Otherwise, leave *EXPR_P in its gimplified
3366 form and delegate the creation of a GIMPLE_CALL to
3367 gimplify_modify_expr. This is always possible because when
3368 WANT_VALUE is true, the caller wants the result of this call into
3369 a temporary, which means that we will emit an INIT_EXPR in
3370 internal_get_tmp_var which will then be handled by
3371 gimplify_modify_expr. */
3372 if (!want_value)
3373 {
3374 /* The CALL_EXPR in *EXPR_P is already in GIMPLE form, so all we
3375 have to do is replicate it as a GIMPLE_CALL tuple. */
3376 gimple_stmt_iterator gsi;
3377 call = gimple_build_call_from_tree (*expr_p);
3378 gimple_call_set_fntype (call, TREE_TYPE (fnptrtype));
3379 notice_special_calls (call);
3380 if (EXPR_CILK_SPAWN (*expr_p))
3381 gimplify_cilk_detach (pre_p);
3382 gimplify_seq_add_stmt (pre_p, call);
3383 gsi = gsi_last (*pre_p);
3384 maybe_fold_stmt (&gsi);
3385 *expr_p = NULL_TREE;
3386 }
3387 else
3388 /* Remember the original function type. */
3389 CALL_EXPR_FN (*expr_p) = build1 (NOP_EXPR, fnptrtype,
3390 CALL_EXPR_FN (*expr_p));
3391
3392 return ret;
3393 }
3394
3395 /* Handle shortcut semantics in the predicate operand of a COND_EXPR by
3396 rewriting it into multiple COND_EXPRs, and possibly GOTO_EXPRs.
3397
3398 TRUE_LABEL_P and FALSE_LABEL_P point to the labels to jump to if the
3399 condition is true or false, respectively. If null, we should generate
3400 our own to skip over the evaluation of this specific expression.
3401
3402 LOCUS is the source location of the COND_EXPR.
3403
3404 This function is the tree equivalent of do_jump.
3405
3406 shortcut_cond_r should only be called by shortcut_cond_expr. */
3407
3408 static tree
3409 shortcut_cond_r (tree pred, tree *true_label_p, tree *false_label_p,
3410 location_t locus)
3411 {
3412 tree local_label = NULL_TREE;
3413 tree t, expr = NULL;
3414
3415 /* OK, it's not a simple case; we need to pull apart the COND_EXPR to
3416 retain the shortcut semantics. Just insert the gotos here;
3417 shortcut_cond_expr will append the real blocks later. */
3418 if (TREE_CODE (pred) == TRUTH_ANDIF_EXPR)
3419 {
3420 location_t new_locus;
3421
3422 /* Turn if (a && b) into
3423
3424 if (a); else goto no;
3425 if (b) goto yes; else goto no;
3426 (no:) */
3427
3428 if (false_label_p == NULL)
3429 false_label_p = &local_label;
3430
3431 /* Keep the original source location on the first 'if'. */
3432 t = shortcut_cond_r (TREE_OPERAND (pred, 0), NULL, false_label_p, locus);
3433 append_to_statement_list (t, &expr);
3434
3435 /* Set the source location of the && on the second 'if'. */
3436 new_locus = EXPR_HAS_LOCATION (pred) ? EXPR_LOCATION (pred) : locus;
3437 t = shortcut_cond_r (TREE_OPERAND (pred, 1), true_label_p, false_label_p,
3438 new_locus);
3439 append_to_statement_list (t, &expr);
3440 }
3441 else if (TREE_CODE (pred) == TRUTH_ORIF_EXPR)
3442 {
3443 location_t new_locus;
3444
3445 /* Turn if (a || b) into
3446
3447 if (a) goto yes;
3448 if (b) goto yes; else goto no;
3449 (yes:) */
3450
3451 if (true_label_p == NULL)
3452 true_label_p = &local_label;
3453
3454 /* Keep the original source location on the first 'if'. */
3455 t = shortcut_cond_r (TREE_OPERAND (pred, 0), true_label_p, NULL, locus);
3456 append_to_statement_list (t, &expr);
3457
3458 /* Set the source location of the || on the second 'if'. */
3459 new_locus = EXPR_HAS_LOCATION (pred) ? EXPR_LOCATION (pred) : locus;
3460 t = shortcut_cond_r (TREE_OPERAND (pred, 1), true_label_p, false_label_p,
3461 new_locus);
3462 append_to_statement_list (t, &expr);
3463 }
3464 else if (TREE_CODE (pred) == COND_EXPR
3465 && !VOID_TYPE_P (TREE_TYPE (TREE_OPERAND (pred, 1)))
3466 && !VOID_TYPE_P (TREE_TYPE (TREE_OPERAND (pred, 2))))
3467 {
3468 location_t new_locus;
3469
3470 /* As long as we're messing with gotos, turn if (a ? b : c) into
3471 if (a)
3472 if (b) goto yes; else goto no;
3473 else
3474 if (c) goto yes; else goto no;
3475
3476 Don't do this if one of the arms has void type, which can happen
3477 in C++ when the arm is throw. */
3478
3479 /* Keep the original source location on the first 'if'. Set the source
3480 location of the ? on the second 'if'. */
3481 new_locus = EXPR_HAS_LOCATION (pred) ? EXPR_LOCATION (pred) : locus;
3482 expr = build3 (COND_EXPR, void_type_node, TREE_OPERAND (pred, 0),
3483 shortcut_cond_r (TREE_OPERAND (pred, 1), true_label_p,
3484 false_label_p, locus),
3485 shortcut_cond_r (TREE_OPERAND (pred, 2), true_label_p,
3486 false_label_p, new_locus));
3487 }
3488 else
3489 {
3490 expr = build3 (COND_EXPR, void_type_node, pred,
3491 build_and_jump (true_label_p),
3492 build_and_jump (false_label_p));
3493 SET_EXPR_LOCATION (expr, locus);
3494 }
3495
3496 if (local_label)
3497 {
3498 t = build1 (LABEL_EXPR, void_type_node, local_label);
3499 append_to_statement_list (t, &expr);
3500 }
3501
3502 return expr;
3503 }
3504
3505 /* Given a conditional expression EXPR with short-circuit boolean
3506 predicates using TRUTH_ANDIF_EXPR or TRUTH_ORIF_EXPR, break the
3507 predicate apart into the equivalent sequence of conditionals. */
3508
3509 static tree
3510 shortcut_cond_expr (tree expr)
3511 {
3512 tree pred = TREE_OPERAND (expr, 0);
3513 tree then_ = TREE_OPERAND (expr, 1);
3514 tree else_ = TREE_OPERAND (expr, 2);
3515 tree true_label, false_label, end_label, t;
3516 tree *true_label_p;
3517 tree *false_label_p;
3518 bool emit_end, emit_false, jump_over_else;
3519 bool then_se = then_ && TREE_SIDE_EFFECTS (then_);
3520 bool else_se = else_ && TREE_SIDE_EFFECTS (else_);
3521
3522 /* First do simple transformations. */
3523 if (!else_se)
3524 {
3525 /* If there is no 'else', turn
3526 if (a && b) then c
3527 into
3528 if (a) if (b) then c. */
3529 while (TREE_CODE (pred) == TRUTH_ANDIF_EXPR)
3530 {
3531 /* Keep the original source location on the first 'if'. */
3532 location_t locus = EXPR_LOC_OR_LOC (expr, input_location);
3533 TREE_OPERAND (expr, 0) = TREE_OPERAND (pred, 1);
3534 /* Set the source location of the && on the second 'if'. */
3535 if (EXPR_HAS_LOCATION (pred))
3536 SET_EXPR_LOCATION (expr, EXPR_LOCATION (pred));
3537 then_ = shortcut_cond_expr (expr);
3538 then_se = then_ && TREE_SIDE_EFFECTS (then_);
3539 pred = TREE_OPERAND (pred, 0);
3540 expr = build3 (COND_EXPR, void_type_node, pred, then_, NULL_TREE);
3541 SET_EXPR_LOCATION (expr, locus);
3542 }
3543 }
3544
3545 if (!then_se)
3546 {
3547 /* If there is no 'then', turn
3548 if (a || b); else d
3549 into
3550 if (a); else if (b); else d. */
3551 while (TREE_CODE (pred) == TRUTH_ORIF_EXPR)
3552 {
3553 /* Keep the original source location on the first 'if'. */
3554 location_t locus = EXPR_LOC_OR_LOC (expr, input_location);
3555 TREE_OPERAND (expr, 0) = TREE_OPERAND (pred, 1);
3556 /* Set the source location of the || on the second 'if'. */
3557 if (EXPR_HAS_LOCATION (pred))
3558 SET_EXPR_LOCATION (expr, EXPR_LOCATION (pred));
3559 else_ = shortcut_cond_expr (expr);
3560 else_se = else_ && TREE_SIDE_EFFECTS (else_);
3561 pred = TREE_OPERAND (pred, 0);
3562 expr = build3 (COND_EXPR, void_type_node, pred, NULL_TREE, else_);
3563 SET_EXPR_LOCATION (expr, locus);
3564 }
3565 }
3566
3567 /* If we're done, great. */
3568 if (TREE_CODE (pred) != TRUTH_ANDIF_EXPR
3569 && TREE_CODE (pred) != TRUTH_ORIF_EXPR)
3570 return expr;
3571
3572 /* Otherwise we need to mess with gotos. Change
3573 if (a) c; else d;
3574 to
3575 if (a); else goto no;
3576 c; goto end;
3577 no: d; end:
3578 and recursively gimplify the condition. */
3579
3580 true_label = false_label = end_label = NULL_TREE;
3581
3582 /* If our arms just jump somewhere, hijack those labels so we don't
3583 generate jumps to jumps. */
3584
3585 if (then_
3586 && TREE_CODE (then_) == GOTO_EXPR
3587 && TREE_CODE (GOTO_DESTINATION (then_)) == LABEL_DECL)
3588 {
3589 true_label = GOTO_DESTINATION (then_);
3590 then_ = NULL;
3591 then_se = false;
3592 }
3593
3594 if (else_
3595 && TREE_CODE (else_) == GOTO_EXPR
3596 && TREE_CODE (GOTO_DESTINATION (else_)) == LABEL_DECL)
3597 {
3598 false_label = GOTO_DESTINATION (else_);
3599 else_ = NULL;
3600 else_se = false;
3601 }
3602
3603 /* If we aren't hijacking a label for the 'then' branch, it falls through. */
3604 if (true_label)
3605 true_label_p = &true_label;
3606 else
3607 true_label_p = NULL;
3608
3609 /* The 'else' branch also needs a label if it contains interesting code. */
3610 if (false_label || else_se)
3611 false_label_p = &false_label;
3612 else
3613 false_label_p = NULL;
3614
3615 /* If there was nothing else in our arms, just forward the label(s). */
3616 if (!then_se && !else_se)
3617 return shortcut_cond_r (pred, true_label_p, false_label_p,
3618 EXPR_LOC_OR_LOC (expr, input_location));
3619
3620 /* If our last subexpression already has a terminal label, reuse it. */
3621 if (else_se)
3622 t = expr_last (else_);
3623 else if (then_se)
3624 t = expr_last (then_);
3625 else
3626 t = NULL;
3627 if (t && TREE_CODE (t) == LABEL_EXPR)
3628 end_label = LABEL_EXPR_LABEL (t);
3629
3630 /* If we don't care about jumping to the 'else' branch, jump to the end
3631 if the condition is false. */
3632 if (!false_label_p)
3633 false_label_p = &end_label;
3634
3635 /* We only want to emit these labels if we aren't hijacking them. */
3636 emit_end = (end_label == NULL_TREE);
3637 emit_false = (false_label == NULL_TREE);
3638
3639 /* We only emit the jump over the else clause if we have to--if the
3640 then clause may fall through. Otherwise we can wind up with a
3641 useless jump and a useless label at the end of gimplified code,
3642 which will cause us to think that this conditional as a whole
3643 falls through even if it doesn't. If we then inline a function
3644 which ends with such a condition, that can cause us to issue an
3645 inappropriate warning about control reaching the end of a
3646 non-void function. */
3647 jump_over_else = block_may_fallthru (then_);
3648
3649 pred = shortcut_cond_r (pred, true_label_p, false_label_p,
3650 EXPR_LOC_OR_LOC (expr, input_location));
3651
3652 expr = NULL;
3653 append_to_statement_list (pred, &expr);
3654
3655 append_to_statement_list (then_, &expr);
3656 if (else_se)
3657 {
3658 if (jump_over_else)
3659 {
3660 tree last = expr_last (expr);
3661 t = build_and_jump (&end_label);
3662 if (EXPR_HAS_LOCATION (last))
3663 SET_EXPR_LOCATION (t, EXPR_LOCATION (last));
3664 append_to_statement_list (t, &expr);
3665 }
3666 if (emit_false)
3667 {
3668 t = build1 (LABEL_EXPR, void_type_node, false_label);
3669 append_to_statement_list (t, &expr);
3670 }
3671 append_to_statement_list (else_, &expr);
3672 }
3673 if (emit_end && end_label)
3674 {
3675 t = build1 (LABEL_EXPR, void_type_node, end_label);
3676 append_to_statement_list (t, &expr);
3677 }
3678
3679 return expr;
3680 }
3681
3682 /* EXPR is used in a boolean context; make sure it has BOOLEAN_TYPE. */
3683
3684 tree
3685 gimple_boolify (tree expr)
3686 {
3687 tree type = TREE_TYPE (expr);
3688 location_t loc = EXPR_LOCATION (expr);
3689
3690 if (TREE_CODE (expr) == NE_EXPR
3691 && TREE_CODE (TREE_OPERAND (expr, 0)) == CALL_EXPR
3692 && integer_zerop (TREE_OPERAND (expr, 1)))
3693 {
3694 tree call = TREE_OPERAND (expr, 0);
3695 tree fn = get_callee_fndecl (call);
3696
3697 /* For __builtin_expect ((long) (x), y) recurse into x as well
3698 if x is truth_value_p. */
3699 if (fn
3700 && DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL
3701 && DECL_FUNCTION_CODE (fn) == BUILT_IN_EXPECT
3702 && call_expr_nargs (call) == 2)
3703 {
3704 tree arg = CALL_EXPR_ARG (call, 0);
3705 if (arg)
3706 {
3707 if (TREE_CODE (arg) == NOP_EXPR
3708 && TREE_TYPE (arg) == TREE_TYPE (call))
3709 arg = TREE_OPERAND (arg, 0);
3710 if (truth_value_p (TREE_CODE (arg)))
3711 {
3712 arg = gimple_boolify (arg);
3713 CALL_EXPR_ARG (call, 0)
3714 = fold_convert_loc (loc, TREE_TYPE (call), arg);
3715 }
3716 }
3717 }
3718 }
3719
3720 switch (TREE_CODE (expr))
3721 {
3722 case TRUTH_AND_EXPR:
3723 case TRUTH_OR_EXPR:
3724 case TRUTH_XOR_EXPR:
3725 case TRUTH_ANDIF_EXPR:
3726 case TRUTH_ORIF_EXPR:
3727 /* Also boolify the arguments of truth exprs. */
3728 TREE_OPERAND (expr, 1) = gimple_boolify (TREE_OPERAND (expr, 1));
3729 /* FALLTHRU */
3730
3731 case TRUTH_NOT_EXPR:
3732 TREE_OPERAND (expr, 0) = gimple_boolify (TREE_OPERAND (expr, 0));
3733
3734 /* These expressions always produce boolean results. */
3735 if (TREE_CODE (type) != BOOLEAN_TYPE)
3736 TREE_TYPE (expr) = boolean_type_node;
3737 return expr;
3738
3739 case ANNOTATE_EXPR:
3740 switch ((enum annot_expr_kind) TREE_INT_CST_LOW (TREE_OPERAND (expr, 1)))
3741 {
3742 case annot_expr_ivdep_kind:
3743 case annot_expr_no_vector_kind:
3744 case annot_expr_vector_kind:
3745 TREE_OPERAND (expr, 0) = gimple_boolify (TREE_OPERAND (expr, 0));
3746 if (TREE_CODE (type) != BOOLEAN_TYPE)
3747 TREE_TYPE (expr) = boolean_type_node;
3748 return expr;
3749 default:
3750 gcc_unreachable ();
3751 }
3752
3753 default:
3754 if (COMPARISON_CLASS_P (expr))
3755 {
3756 /* There expressions always prduce boolean results. */
3757 if (TREE_CODE (type) != BOOLEAN_TYPE)
3758 TREE_TYPE (expr) = boolean_type_node;
3759 return expr;
3760 }
3761 /* Other expressions that get here must have boolean values, but
3762 might need to be converted to the appropriate mode. */
3763 if (TREE_CODE (type) == BOOLEAN_TYPE)
3764 return expr;
3765 return fold_convert_loc (loc, boolean_type_node, expr);
3766 }
3767 }
3768
3769 /* Given a conditional expression *EXPR_P without side effects, gimplify
3770 its operands. New statements are inserted to PRE_P. */
3771
3772 static enum gimplify_status
3773 gimplify_pure_cond_expr (tree *expr_p, gimple_seq *pre_p)
3774 {
3775 tree expr = *expr_p, cond;
3776 enum gimplify_status ret, tret;
3777 enum tree_code code;
3778
3779 cond = gimple_boolify (COND_EXPR_COND (expr));
3780
3781 /* We need to handle && and || specially, as their gimplification
3782 creates pure cond_expr, thus leading to an infinite cycle otherwise. */
3783 code = TREE_CODE (cond);
3784 if (code == TRUTH_ANDIF_EXPR)
3785 TREE_SET_CODE (cond, TRUTH_AND_EXPR);
3786 else if (code == TRUTH_ORIF_EXPR)
3787 TREE_SET_CODE (cond, TRUTH_OR_EXPR);
3788 ret = gimplify_expr (&cond, pre_p, NULL, is_gimple_condexpr, fb_rvalue);
3789 COND_EXPR_COND (*expr_p) = cond;
3790
3791 tret = gimplify_expr (&COND_EXPR_THEN (expr), pre_p, NULL,
3792 is_gimple_val, fb_rvalue);
3793 ret = MIN (ret, tret);
3794 tret = gimplify_expr (&COND_EXPR_ELSE (expr), pre_p, NULL,
3795 is_gimple_val, fb_rvalue);
3796
3797 return MIN (ret, tret);
3798 }
3799
3800 /* Return true if evaluating EXPR could trap.
3801 EXPR is GENERIC, while tree_could_trap_p can be called
3802 only on GIMPLE. */
3803
3804 static bool
3805 generic_expr_could_trap_p (tree expr)
3806 {
3807 unsigned i, n;
3808
3809 if (!expr || is_gimple_val (expr))
3810 return false;
3811
3812 if (!EXPR_P (expr) || tree_could_trap_p (expr))
3813 return true;
3814
3815 n = TREE_OPERAND_LENGTH (expr);
3816 for (i = 0; i < n; i++)
3817 if (generic_expr_could_trap_p (TREE_OPERAND (expr, i)))
3818 return true;
3819
3820 return false;
3821 }
3822
3823 /* Convert the conditional expression pointed to by EXPR_P '(p) ? a : b;'
3824 into
3825
3826 if (p) if (p)
3827 t1 = a; a;
3828 else or else
3829 t1 = b; b;
3830 t1;
3831
3832 The second form is used when *EXPR_P is of type void.
3833
3834 PRE_P points to the list where side effects that must happen before
3835 *EXPR_P should be stored. */
3836
3837 static enum gimplify_status
3838 gimplify_cond_expr (tree *expr_p, gimple_seq *pre_p, fallback_t fallback)
3839 {
3840 tree expr = *expr_p;
3841 tree type = TREE_TYPE (expr);
3842 location_t loc = EXPR_LOCATION (expr);
3843 tree tmp, arm1, arm2;
3844 enum gimplify_status ret;
3845 tree label_true, label_false, label_cont;
3846 bool have_then_clause_p, have_else_clause_p;
3847 gcond *cond_stmt;
3848 enum tree_code pred_code;
3849 gimple_seq seq = NULL;
3850
3851 /* If this COND_EXPR has a value, copy the values into a temporary within
3852 the arms. */
3853 if (!VOID_TYPE_P (type))
3854 {
3855 tree then_ = TREE_OPERAND (expr, 1), else_ = TREE_OPERAND (expr, 2);
3856 tree result;
3857
3858 /* If either an rvalue is ok or we do not require an lvalue, create the
3859 temporary. But we cannot do that if the type is addressable. */
3860 if (((fallback & fb_rvalue) || !(fallback & fb_lvalue))
3861 && !TREE_ADDRESSABLE (type))
3862 {
3863 if (gimplify_ctxp->allow_rhs_cond_expr
3864 /* If either branch has side effects or could trap, it can't be
3865 evaluated unconditionally. */
3866 && !TREE_SIDE_EFFECTS (then_)
3867 && !generic_expr_could_trap_p (then_)
3868 && !TREE_SIDE_EFFECTS (else_)
3869 && !generic_expr_could_trap_p (else_))
3870 return gimplify_pure_cond_expr (expr_p, pre_p);
3871
3872 tmp = create_tmp_var (type, "iftmp");
3873 result = tmp;
3874 }
3875
3876 /* Otherwise, only create and copy references to the values. */
3877 else
3878 {
3879 type = build_pointer_type (type);
3880
3881 if (!VOID_TYPE_P (TREE_TYPE (then_)))
3882 then_ = build_fold_addr_expr_loc (loc, then_);
3883
3884 if (!VOID_TYPE_P (TREE_TYPE (else_)))
3885 else_ = build_fold_addr_expr_loc (loc, else_);
3886
3887 expr
3888 = build3 (COND_EXPR, type, TREE_OPERAND (expr, 0), then_, else_);
3889
3890 tmp = create_tmp_var (type, "iftmp");
3891 result = build_simple_mem_ref_loc (loc, tmp);
3892 }
3893
3894 /* Build the new then clause, `tmp = then_;'. But don't build the
3895 assignment if the value is void; in C++ it can be if it's a throw. */
3896 if (!VOID_TYPE_P (TREE_TYPE (then_)))
3897 TREE_OPERAND (expr, 1) = build2 (MODIFY_EXPR, type, tmp, then_);
3898
3899 /* Similarly, build the new else clause, `tmp = else_;'. */
3900 if (!VOID_TYPE_P (TREE_TYPE (else_)))
3901 TREE_OPERAND (expr, 2) = build2 (MODIFY_EXPR, type, tmp, else_);
3902
3903 TREE_TYPE (expr) = void_type_node;
3904 recalculate_side_effects (expr);
3905
3906 /* Move the COND_EXPR to the prequeue. */
3907 gimplify_stmt (&expr, pre_p);
3908
3909 *expr_p = result;
3910 return GS_ALL_DONE;
3911 }
3912
3913 /* Remove any COMPOUND_EXPR so the following cases will be caught. */
3914 STRIP_TYPE_NOPS (TREE_OPERAND (expr, 0));
3915 if (TREE_CODE (TREE_OPERAND (expr, 0)) == COMPOUND_EXPR)
3916 gimplify_compound_expr (&TREE_OPERAND (expr, 0), pre_p, true);
3917
3918 /* Make sure the condition has BOOLEAN_TYPE. */
3919 TREE_OPERAND (expr, 0) = gimple_boolify (TREE_OPERAND (expr, 0));
3920
3921 /* Break apart && and || conditions. */
3922 if (TREE_CODE (TREE_OPERAND (expr, 0)) == TRUTH_ANDIF_EXPR
3923 || TREE_CODE (TREE_OPERAND (expr, 0)) == TRUTH_ORIF_EXPR)
3924 {
3925 expr = shortcut_cond_expr (expr);
3926
3927 if (expr != *expr_p)
3928 {
3929 *expr_p = expr;
3930
3931 /* We can't rely on gimplify_expr to re-gimplify the expanded
3932 form properly, as cleanups might cause the target labels to be
3933 wrapped in a TRY_FINALLY_EXPR. To prevent that, we need to
3934 set up a conditional context. */
3935 gimple_push_condition ();
3936 gimplify_stmt (expr_p, &seq);
3937 gimple_pop_condition (pre_p);
3938 gimple_seq_add_seq (pre_p, seq);
3939
3940 return GS_ALL_DONE;
3941 }
3942 }
3943
3944 /* Now do the normal gimplification. */
3945
3946 /* Gimplify condition. */
3947 ret = gimplify_expr (&TREE_OPERAND (expr, 0), pre_p, NULL, is_gimple_condexpr,
3948 fb_rvalue);
3949 if (ret == GS_ERROR)
3950 return GS_ERROR;
3951 gcc_assert (TREE_OPERAND (expr, 0) != NULL_TREE);
3952
3953 gimple_push_condition ();
3954
3955 have_then_clause_p = have_else_clause_p = false;
3956 if (TREE_OPERAND (expr, 1) != NULL
3957 && TREE_CODE (TREE_OPERAND (expr, 1)) == GOTO_EXPR
3958 && TREE_CODE (GOTO_DESTINATION (TREE_OPERAND (expr, 1))) == LABEL_DECL
3959 && (DECL_CONTEXT (GOTO_DESTINATION (TREE_OPERAND (expr, 1)))
3960 == current_function_decl)
3961 /* For -O0 avoid this optimization if the COND_EXPR and GOTO_EXPR
3962 have different locations, otherwise we end up with incorrect
3963 location information on the branches. */
3964 && (optimize
3965 || !EXPR_HAS_LOCATION (expr)
3966 || !EXPR_HAS_LOCATION (TREE_OPERAND (expr, 1))
3967 || EXPR_LOCATION (expr) == EXPR_LOCATION (TREE_OPERAND (expr, 1))))
3968 {
3969 label_true = GOTO_DESTINATION (TREE_OPERAND (expr, 1));
3970 have_then_clause_p = true;
3971 }
3972 else
3973 label_true = create_artificial_label (UNKNOWN_LOCATION);
3974 if (TREE_OPERAND (expr, 2) != NULL
3975 && TREE_CODE (TREE_OPERAND (expr, 2)) == GOTO_EXPR
3976 && TREE_CODE (GOTO_DESTINATION (TREE_OPERAND (expr, 2))) == LABEL_DECL
3977 && (DECL_CONTEXT (GOTO_DESTINATION (TREE_OPERAND (expr, 2)))
3978 == current_function_decl)
3979 /* For -O0 avoid this optimization if the COND_EXPR and GOTO_EXPR
3980 have different locations, otherwise we end up with incorrect
3981 location information on the branches. */
3982 && (optimize
3983 || !EXPR_HAS_LOCATION (expr)
3984 || !EXPR_HAS_LOCATION (TREE_OPERAND (expr, 2))
3985 || EXPR_LOCATION (expr) == EXPR_LOCATION (TREE_OPERAND (expr, 2))))
3986 {
3987 label_false = GOTO_DESTINATION (TREE_OPERAND (expr, 2));
3988 have_else_clause_p = true;
3989 }
3990 else
3991 label_false = create_artificial_label (UNKNOWN_LOCATION);
3992
3993 gimple_cond_get_ops_from_tree (COND_EXPR_COND (expr), &pred_code, &arm1,
3994 &arm2);
3995 cond_stmt = gimple_build_cond (pred_code, arm1, arm2, label_true,
3996 label_false);
3997 gimple_set_no_warning (cond_stmt, TREE_NO_WARNING (COND_EXPR_COND (expr)));
3998 gimplify_seq_add_stmt (&seq, cond_stmt);
3999 gimple_stmt_iterator gsi = gsi_last (seq);
4000 maybe_fold_stmt (&gsi);
4001
4002 label_cont = NULL_TREE;
4003 if (!have_then_clause_p)
4004 {
4005 /* For if (...) {} else { code; } put label_true after
4006 the else block. */
4007 if (TREE_OPERAND (expr, 1) == NULL_TREE
4008 && !have_else_clause_p
4009 && TREE_OPERAND (expr, 2) != NULL_TREE)
4010 label_cont = label_true;
4011 else
4012 {
4013 gimplify_seq_add_stmt (&seq, gimple_build_label (label_true));
4014 have_then_clause_p = gimplify_stmt (&TREE_OPERAND (expr, 1), &seq);
4015 /* For if (...) { code; } else {} or
4016 if (...) { code; } else goto label; or
4017 if (...) { code; return; } else { ... }
4018 label_cont isn't needed. */
4019 if (!have_else_clause_p
4020 && TREE_OPERAND (expr, 2) != NULL_TREE
4021 && gimple_seq_may_fallthru (seq))
4022 {
4023 gimple *g;
4024 label_cont = create_artificial_label (UNKNOWN_LOCATION);
4025
4026 g = gimple_build_goto (label_cont);
4027
4028 /* GIMPLE_COND's are very low level; they have embedded
4029 gotos. This particular embedded goto should not be marked
4030 with the location of the original COND_EXPR, as it would
4031 correspond to the COND_EXPR's condition, not the ELSE or the
4032 THEN arms. To avoid marking it with the wrong location, flag
4033 it as "no location". */
4034 gimple_set_do_not_emit_location (g);
4035
4036 gimplify_seq_add_stmt (&seq, g);
4037 }
4038 }
4039 }
4040 if (!have_else_clause_p)
4041 {
4042 gimplify_seq_add_stmt (&seq, gimple_build_label (label_false));
4043 have_else_clause_p = gimplify_stmt (&TREE_OPERAND (expr, 2), &seq);
4044 }
4045 if (label_cont)
4046 gimplify_seq_add_stmt (&seq, gimple_build_label (label_cont));
4047
4048 gimple_pop_condition (pre_p);
4049 gimple_seq_add_seq (pre_p, seq);
4050
4051 if (ret == GS_ERROR)
4052 ; /* Do nothing. */
4053 else if (have_then_clause_p || have_else_clause_p)
4054 ret = GS_ALL_DONE;
4055 else
4056 {
4057 /* Both arms are empty; replace the COND_EXPR with its predicate. */
4058 expr = TREE_OPERAND (expr, 0);
4059 gimplify_stmt (&expr, pre_p);
4060 }
4061
4062 *expr_p = NULL;
4063 return ret;
4064 }
4065
4066 /* Prepare the node pointed to by EXPR_P, an is_gimple_addressable expression,
4067 to be marked addressable.
4068
4069 We cannot rely on such an expression being directly markable if a temporary
4070 has been created by the gimplification. In this case, we create another
4071 temporary and initialize it with a copy, which will become a store after we
4072 mark it addressable. This can happen if the front-end passed us something
4073 that it could not mark addressable yet, like a Fortran pass-by-reference
4074 parameter (int) floatvar. */
4075
4076 static void
4077 prepare_gimple_addressable (tree *expr_p, gimple_seq *seq_p)
4078 {
4079 while (handled_component_p (*expr_p))
4080 expr_p = &TREE_OPERAND (*expr_p, 0);
4081 if (is_gimple_reg (*expr_p))
4082 {
4083 /* Do not allow an SSA name as the temporary. */
4084 tree var = get_initialized_tmp_var (*expr_p, seq_p, NULL, false);
4085 DECL_GIMPLE_REG_P (var) = 0;
4086 *expr_p = var;
4087 }
4088 }
4089
4090 /* A subroutine of gimplify_modify_expr. Replace a MODIFY_EXPR with
4091 a call to __builtin_memcpy. */
4092
4093 static enum gimplify_status
4094 gimplify_modify_expr_to_memcpy (tree *expr_p, tree size, bool want_value,
4095 gimple_seq *seq_p)
4096 {
4097 tree t, to, to_ptr, from, from_ptr;
4098 gcall *gs;
4099 location_t loc = EXPR_LOCATION (*expr_p);
4100
4101 to = TREE_OPERAND (*expr_p, 0);
4102 from = TREE_OPERAND (*expr_p, 1);
4103
4104 /* Mark the RHS addressable. Beware that it may not be possible to do so
4105 directly if a temporary has been created by the gimplification. */
4106 prepare_gimple_addressable (&from, seq_p);
4107
4108 mark_addressable (from);
4109 from_ptr = build_fold_addr_expr_loc (loc, from);
4110 gimplify_arg (&from_ptr, seq_p, loc);
4111
4112 mark_addressable (to);
4113 to_ptr = build_fold_addr_expr_loc (loc, to);
4114 gimplify_arg (&to_ptr, seq_p, loc);
4115
4116 t = builtin_decl_implicit (BUILT_IN_MEMCPY);
4117
4118 gs = gimple_build_call (t, 3, to_ptr, from_ptr, size);
4119
4120 if (want_value)
4121 {
4122 /* tmp = memcpy() */
4123 t = create_tmp_var (TREE_TYPE (to_ptr));
4124 gimple_call_set_lhs (gs, t);
4125 gimplify_seq_add_stmt (seq_p, gs);
4126
4127 *expr_p = build_simple_mem_ref (t);
4128 return GS_ALL_DONE;
4129 }
4130
4131 gimplify_seq_add_stmt (seq_p, gs);
4132 *expr_p = NULL;
4133 return GS_ALL_DONE;
4134 }
4135
4136 /* A subroutine of gimplify_modify_expr. Replace a MODIFY_EXPR with
4137 a call to __builtin_memset. In this case we know that the RHS is
4138 a CONSTRUCTOR with an empty element list. */
4139
4140 static enum gimplify_status
4141 gimplify_modify_expr_to_memset (tree *expr_p, tree size, bool want_value,
4142 gimple_seq *seq_p)
4143 {
4144 tree t, from, to, to_ptr;
4145 gcall *gs;
4146 location_t loc = EXPR_LOCATION (*expr_p);
4147
4148 /* Assert our assumptions, to abort instead of producing wrong code
4149 silently if they are not met. Beware that the RHS CONSTRUCTOR might
4150 not be immediately exposed. */
4151 from = TREE_OPERAND (*expr_p, 1);
4152 if (TREE_CODE (from) == WITH_SIZE_EXPR)
4153 from = TREE_OPERAND (from, 0);
4154
4155 gcc_assert (TREE_CODE (from) == CONSTRUCTOR
4156 && vec_safe_is_empty (CONSTRUCTOR_ELTS (from)));
4157
4158 /* Now proceed. */
4159 to = TREE_OPERAND (*expr_p, 0);
4160
4161 to_ptr = build_fold_addr_expr_loc (loc, to);
4162 gimplify_arg (&to_ptr, seq_p, loc);
4163 t = builtin_decl_implicit (BUILT_IN_MEMSET);
4164
4165 gs = gimple_build_call (t, 3, to_ptr, integer_zero_node, size);
4166
4167 if (want_value)
4168 {
4169 /* tmp = memset() */
4170 t = create_tmp_var (TREE_TYPE (to_ptr));
4171 gimple_call_set_lhs (gs, t);
4172 gimplify_seq_add_stmt (seq_p, gs);
4173
4174 *expr_p = build1 (INDIRECT_REF, TREE_TYPE (to), t);
4175 return GS_ALL_DONE;
4176 }
4177
4178 gimplify_seq_add_stmt (seq_p, gs);
4179 *expr_p = NULL;
4180 return GS_ALL_DONE;
4181 }
4182
4183 /* A subroutine of gimplify_init_ctor_preeval. Called via walk_tree,
4184 determine, cautiously, if a CONSTRUCTOR overlaps the lhs of an
4185 assignment. Return non-null if we detect a potential overlap. */
4186
4187 struct gimplify_init_ctor_preeval_data
4188 {
4189 /* The base decl of the lhs object. May be NULL, in which case we
4190 have to assume the lhs is indirect. */
4191 tree lhs_base_decl;
4192
4193 /* The alias set of the lhs object. */
4194 alias_set_type lhs_alias_set;
4195 };
4196
4197 static tree
4198 gimplify_init_ctor_preeval_1 (tree *tp, int *walk_subtrees, void *xdata)
4199 {
4200 struct gimplify_init_ctor_preeval_data *data
4201 = (struct gimplify_init_ctor_preeval_data *) xdata;
4202 tree t = *tp;
4203
4204 /* If we find the base object, obviously we have overlap. */
4205 if (data->lhs_base_decl == t)
4206 return t;
4207
4208 /* If the constructor component is indirect, determine if we have a
4209 potential overlap with the lhs. The only bits of information we
4210 have to go on at this point are addressability and alias sets. */
4211 if ((INDIRECT_REF_P (t)
4212 || TREE_CODE (t) == MEM_REF)
4213 && (!data->lhs_base_decl || TREE_ADDRESSABLE (data->lhs_base_decl))
4214 && alias_sets_conflict_p (data->lhs_alias_set, get_alias_set (t)))
4215 return t;
4216
4217 /* If the constructor component is a call, determine if it can hide a
4218 potential overlap with the lhs through an INDIRECT_REF like above.
4219 ??? Ugh - this is completely broken. In fact this whole analysis
4220 doesn't look conservative. */
4221 if (TREE_CODE (t) == CALL_EXPR)
4222 {
4223 tree type, fntype = TREE_TYPE (TREE_TYPE (CALL_EXPR_FN (t)));
4224
4225 for (type = TYPE_ARG_TYPES (fntype); type; type = TREE_CHAIN (type))
4226 if (POINTER_TYPE_P (TREE_VALUE (type))
4227 && (!data->lhs_base_decl || TREE_ADDRESSABLE (data->lhs_base_decl))
4228 && alias_sets_conflict_p (data->lhs_alias_set,
4229 get_alias_set
4230 (TREE_TYPE (TREE_VALUE (type)))))
4231 return t;
4232 }
4233
4234 if (IS_TYPE_OR_DECL_P (t))
4235 *walk_subtrees = 0;
4236 return NULL;
4237 }
4238
4239 /* A subroutine of gimplify_init_constructor. Pre-evaluate EXPR,
4240 force values that overlap with the lhs (as described by *DATA)
4241 into temporaries. */
4242
4243 static void
4244 gimplify_init_ctor_preeval (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
4245 struct gimplify_init_ctor_preeval_data *data)
4246 {
4247 enum gimplify_status one;
4248
4249 /* If the value is constant, then there's nothing to pre-evaluate. */
4250 if (TREE_CONSTANT (*expr_p))
4251 {
4252 /* Ensure it does not have side effects, it might contain a reference to
4253 the object we're initializing. */
4254 gcc_assert (!TREE_SIDE_EFFECTS (*expr_p));
4255 return;
4256 }
4257
4258 /* If the type has non-trivial constructors, we can't pre-evaluate. */
4259 if (TREE_ADDRESSABLE (TREE_TYPE (*expr_p)))
4260 return;
4261
4262 /* Recurse for nested constructors. */
4263 if (TREE_CODE (*expr_p) == CONSTRUCTOR)
4264 {
4265 unsigned HOST_WIDE_INT ix;
4266 constructor_elt *ce;
4267 vec<constructor_elt, va_gc> *v = CONSTRUCTOR_ELTS (*expr_p);
4268
4269 FOR_EACH_VEC_SAFE_ELT (v, ix, ce)
4270 gimplify_init_ctor_preeval (&ce->value, pre_p, post_p, data);
4271
4272 return;
4273 }
4274
4275 /* If this is a variable sized type, we must remember the size. */
4276 maybe_with_size_expr (expr_p);
4277
4278 /* Gimplify the constructor element to something appropriate for the rhs
4279 of a MODIFY_EXPR. Given that we know the LHS is an aggregate, we know
4280 the gimplifier will consider this a store to memory. Doing this
4281 gimplification now means that we won't have to deal with complicated
4282 language-specific trees, nor trees like SAVE_EXPR that can induce
4283 exponential search behavior. */
4284 one = gimplify_expr (expr_p, pre_p, post_p, is_gimple_mem_rhs, fb_rvalue);
4285 if (one == GS_ERROR)
4286 {
4287 *expr_p = NULL;
4288 return;
4289 }
4290
4291 /* If we gimplified to a bare decl, we can be sure that it doesn't overlap
4292 with the lhs, since "a = { .x=a }" doesn't make sense. This will
4293 always be true for all scalars, since is_gimple_mem_rhs insists on a
4294 temporary variable for them. */
4295 if (DECL_P (*expr_p))
4296 return;
4297
4298 /* If this is of variable size, we have no choice but to assume it doesn't
4299 overlap since we can't make a temporary for it. */
4300 if (TREE_CODE (TYPE_SIZE (TREE_TYPE (*expr_p))) != INTEGER_CST)
4301 return;
4302
4303 /* Otherwise, we must search for overlap ... */
4304 if (!walk_tree (expr_p, gimplify_init_ctor_preeval_1, data, NULL))
4305 return;
4306
4307 /* ... and if found, force the value into a temporary. */
4308 *expr_p = get_formal_tmp_var (*expr_p, pre_p);
4309 }
4310
4311 /* A subroutine of gimplify_init_ctor_eval. Create a loop for
4312 a RANGE_EXPR in a CONSTRUCTOR for an array.
4313
4314 var = lower;
4315 loop_entry:
4316 object[var] = value;
4317 if (var == upper)
4318 goto loop_exit;
4319 var = var + 1;
4320 goto loop_entry;
4321 loop_exit:
4322
4323 We increment var _after_ the loop exit check because we might otherwise
4324 fail if upper == TYPE_MAX_VALUE (type for upper).
4325
4326 Note that we never have to deal with SAVE_EXPRs here, because this has
4327 already been taken care of for us, in gimplify_init_ctor_preeval(). */
4328
4329 static void gimplify_init_ctor_eval (tree, vec<constructor_elt, va_gc> *,
4330 gimple_seq *, bool);
4331
4332 static void
4333 gimplify_init_ctor_eval_range (tree object, tree lower, tree upper,
4334 tree value, tree array_elt_type,
4335 gimple_seq *pre_p, bool cleared)
4336 {
4337 tree loop_entry_label, loop_exit_label, fall_thru_label;
4338 tree var, var_type, cref, tmp;
4339
4340 loop_entry_label = create_artificial_label (UNKNOWN_LOCATION);
4341 loop_exit_label = create_artificial_label (UNKNOWN_LOCATION);
4342 fall_thru_label = create_artificial_label (UNKNOWN_LOCATION);
4343
4344 /* Create and initialize the index variable. */
4345 var_type = TREE_TYPE (upper);
4346 var = create_tmp_var (var_type);
4347 gimplify_seq_add_stmt (pre_p, gimple_build_assign (var, lower));
4348
4349 /* Add the loop entry label. */
4350 gimplify_seq_add_stmt (pre_p, gimple_build_label (loop_entry_label));
4351
4352 /* Build the reference. */
4353 cref = build4 (ARRAY_REF, array_elt_type, unshare_expr (object),
4354 var, NULL_TREE, NULL_TREE);
4355
4356 /* If we are a constructor, just call gimplify_init_ctor_eval to do
4357 the store. Otherwise just assign value to the reference. */
4358
4359 if (TREE_CODE (value) == CONSTRUCTOR)
4360 /* NB we might have to call ourself recursively through
4361 gimplify_init_ctor_eval if the value is a constructor. */
4362 gimplify_init_ctor_eval (cref, CONSTRUCTOR_ELTS (value),
4363 pre_p, cleared);
4364 else
4365 gimplify_seq_add_stmt (pre_p, gimple_build_assign (cref, value));
4366
4367 /* We exit the loop when the index var is equal to the upper bound. */
4368 gimplify_seq_add_stmt (pre_p,
4369 gimple_build_cond (EQ_EXPR, var, upper,
4370 loop_exit_label, fall_thru_label));
4371
4372 gimplify_seq_add_stmt (pre_p, gimple_build_label (fall_thru_label));
4373
4374 /* Otherwise, increment the index var... */
4375 tmp = build2 (PLUS_EXPR, var_type, var,
4376 fold_convert (var_type, integer_one_node));
4377 gimplify_seq_add_stmt (pre_p, gimple_build_assign (var, tmp));
4378
4379 /* ...and jump back to the loop entry. */
4380 gimplify_seq_add_stmt (pre_p, gimple_build_goto (loop_entry_label));
4381
4382 /* Add the loop exit label. */
4383 gimplify_seq_add_stmt (pre_p, gimple_build_label (loop_exit_label));
4384 }
4385
4386 /* Return true if FDECL is accessing a field that is zero sized. */
4387
4388 static bool
4389 zero_sized_field_decl (const_tree fdecl)
4390 {
4391 if (TREE_CODE (fdecl) == FIELD_DECL && DECL_SIZE (fdecl)
4392 && integer_zerop (DECL_SIZE (fdecl)))
4393 return true;
4394 return false;
4395 }
4396
4397 /* Return true if TYPE is zero sized. */
4398
4399 static bool
4400 zero_sized_type (const_tree type)
4401 {
4402 if (AGGREGATE_TYPE_P (type) && TYPE_SIZE (type)
4403 && integer_zerop (TYPE_SIZE (type)))
4404 return true;
4405 return false;
4406 }
4407
4408 /* A subroutine of gimplify_init_constructor. Generate individual
4409 MODIFY_EXPRs for a CONSTRUCTOR. OBJECT is the LHS against which the
4410 assignments should happen. ELTS is the CONSTRUCTOR_ELTS of the
4411 CONSTRUCTOR. CLEARED is true if the entire LHS object has been
4412 zeroed first. */
4413
4414 static void
4415 gimplify_init_ctor_eval (tree object, vec<constructor_elt, va_gc> *elts,
4416 gimple_seq *pre_p, bool cleared)
4417 {
4418 tree array_elt_type = NULL;
4419 unsigned HOST_WIDE_INT ix;
4420 tree purpose, value;
4421
4422 if (TREE_CODE (TREE_TYPE (object)) == ARRAY_TYPE)
4423 array_elt_type = TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (object)));
4424
4425 FOR_EACH_CONSTRUCTOR_ELT (elts, ix, purpose, value)
4426 {
4427 tree cref;
4428
4429 /* NULL values are created above for gimplification errors. */
4430 if (value == NULL)
4431 continue;
4432
4433 if (cleared && initializer_zerop (value))
4434 continue;
4435
4436 /* ??? Here's to hoping the front end fills in all of the indices,
4437 so we don't have to figure out what's missing ourselves. */
4438 gcc_assert (purpose);
4439
4440 /* Skip zero-sized fields, unless value has side-effects. This can
4441 happen with calls to functions returning a zero-sized type, which
4442 we shouldn't discard. As a number of downstream passes don't
4443 expect sets of zero-sized fields, we rely on the gimplification of
4444 the MODIFY_EXPR we make below to drop the assignment statement. */
4445 if (! TREE_SIDE_EFFECTS (value) && zero_sized_field_decl (purpose))
4446 continue;
4447
4448 /* If we have a RANGE_EXPR, we have to build a loop to assign the
4449 whole range. */
4450 if (TREE_CODE (purpose) == RANGE_EXPR)
4451 {
4452 tree lower = TREE_OPERAND (purpose, 0);
4453 tree upper = TREE_OPERAND (purpose, 1);
4454
4455 /* If the lower bound is equal to upper, just treat it as if
4456 upper was the index. */
4457 if (simple_cst_equal (lower, upper))
4458 purpose = upper;
4459 else
4460 {
4461 gimplify_init_ctor_eval_range (object, lower, upper, value,
4462 array_elt_type, pre_p, cleared);
4463 continue;
4464 }
4465 }
4466
4467 if (array_elt_type)
4468 {
4469 /* Do not use bitsizetype for ARRAY_REF indices. */
4470 if (TYPE_DOMAIN (TREE_TYPE (object)))
4471 purpose
4472 = fold_convert (TREE_TYPE (TYPE_DOMAIN (TREE_TYPE (object))),
4473 purpose);
4474 cref = build4 (ARRAY_REF, array_elt_type, unshare_expr (object),
4475 purpose, NULL_TREE, NULL_TREE);
4476 }
4477 else
4478 {
4479 gcc_assert (TREE_CODE (purpose) == FIELD_DECL);
4480 cref = build3 (COMPONENT_REF, TREE_TYPE (purpose),
4481 unshare_expr (object), purpose, NULL_TREE);
4482 }
4483
4484 if (TREE_CODE (value) == CONSTRUCTOR
4485 && TREE_CODE (TREE_TYPE (value)) != VECTOR_TYPE)
4486 gimplify_init_ctor_eval (cref, CONSTRUCTOR_ELTS (value),
4487 pre_p, cleared);
4488 else
4489 {
4490 tree init = build2 (INIT_EXPR, TREE_TYPE (cref), cref, value);
4491 gimplify_and_add (init, pre_p);
4492 ggc_free (init);
4493 }
4494 }
4495 }
4496
4497 /* Return the appropriate RHS predicate for this LHS. */
4498
4499 gimple_predicate
4500 rhs_predicate_for (tree lhs)
4501 {
4502 if (is_gimple_reg (lhs))
4503 return is_gimple_reg_rhs_or_call;
4504 else
4505 return is_gimple_mem_rhs_or_call;
4506 }
4507
4508 /* Return the initial guess for an appropriate RHS predicate for this LHS,
4509 before the LHS has been gimplified. */
4510
4511 static gimple_predicate
4512 initial_rhs_predicate_for (tree lhs)
4513 {
4514 if (is_gimple_reg_type (TREE_TYPE (lhs)))
4515 return is_gimple_reg_rhs_or_call;
4516 else
4517 return is_gimple_mem_rhs_or_call;
4518 }
4519
4520 /* Gimplify a C99 compound literal expression. This just means adding
4521 the DECL_EXPR before the current statement and using its anonymous
4522 decl instead. */
4523
4524 static enum gimplify_status
4525 gimplify_compound_literal_expr (tree *expr_p, gimple_seq *pre_p,
4526 bool (*gimple_test_f) (tree),
4527 fallback_t fallback)
4528 {
4529 tree decl_s = COMPOUND_LITERAL_EXPR_DECL_EXPR (*expr_p);
4530 tree decl = DECL_EXPR_DECL (decl_s);
4531 tree init = DECL_INITIAL (decl);
4532 /* Mark the decl as addressable if the compound literal
4533 expression is addressable now, otherwise it is marked too late
4534 after we gimplify the initialization expression. */
4535 if (TREE_ADDRESSABLE (*expr_p))
4536 TREE_ADDRESSABLE (decl) = 1;
4537 /* Otherwise, if we don't need an lvalue and have a literal directly
4538 substitute it. Check if it matches the gimple predicate, as
4539 otherwise we'd generate a new temporary, and we can as well just
4540 use the decl we already have. */
4541 else if (!TREE_ADDRESSABLE (decl)
4542 && init
4543 && (fallback & fb_lvalue) == 0
4544 && gimple_test_f (init))
4545 {
4546 *expr_p = init;
4547 return GS_OK;
4548 }
4549
4550 /* Preliminarily mark non-addressed complex variables as eligible
4551 for promotion to gimple registers. We'll transform their uses
4552 as we find them. */
4553 if ((TREE_CODE (TREE_TYPE (decl)) == COMPLEX_TYPE
4554 || TREE_CODE (TREE_TYPE (decl)) == VECTOR_TYPE)
4555 && !TREE_THIS_VOLATILE (decl)
4556 && !needs_to_live_in_memory (decl))
4557 DECL_GIMPLE_REG_P (decl) = 1;
4558
4559 /* If the decl is not addressable, then it is being used in some
4560 expression or on the right hand side of a statement, and it can
4561 be put into a readonly data section. */
4562 if (!TREE_ADDRESSABLE (decl) && (fallback & fb_lvalue) == 0)
4563 TREE_READONLY (decl) = 1;
4564
4565 /* This decl isn't mentioned in the enclosing block, so add it to the
4566 list of temps. FIXME it seems a bit of a kludge to say that
4567 anonymous artificial vars aren't pushed, but everything else is. */
4568 if (DECL_NAME (decl) == NULL_TREE && !DECL_SEEN_IN_BIND_EXPR_P (decl))
4569 gimple_add_tmp_var (decl);
4570
4571 gimplify_and_add (decl_s, pre_p);
4572 *expr_p = decl;
4573 return GS_OK;
4574 }
4575
4576 /* Optimize embedded COMPOUND_LITERAL_EXPRs within a CONSTRUCTOR,
4577 return a new CONSTRUCTOR if something changed. */
4578
4579 static tree
4580 optimize_compound_literals_in_ctor (tree orig_ctor)
4581 {
4582 tree ctor = orig_ctor;
4583 vec<constructor_elt, va_gc> *elts = CONSTRUCTOR_ELTS (ctor);
4584 unsigned int idx, num = vec_safe_length (elts);
4585
4586 for (idx = 0; idx < num; idx++)
4587 {
4588 tree value = (*elts)[idx].value;
4589 tree newval = value;
4590 if (TREE_CODE (value) == CONSTRUCTOR)
4591 newval = optimize_compound_literals_in_ctor (value);
4592 else if (TREE_CODE (value) == COMPOUND_LITERAL_EXPR)
4593 {
4594 tree decl_s = COMPOUND_LITERAL_EXPR_DECL_EXPR (value);
4595 tree decl = DECL_EXPR_DECL (decl_s);
4596 tree init = DECL_INITIAL (decl);
4597
4598 if (!TREE_ADDRESSABLE (value)
4599 && !TREE_ADDRESSABLE (decl)
4600 && init
4601 && TREE_CODE (init) == CONSTRUCTOR)
4602 newval = optimize_compound_literals_in_ctor (init);
4603 }
4604 if (newval == value)
4605 continue;
4606
4607 if (ctor == orig_ctor)
4608 {
4609 ctor = copy_node (orig_ctor);
4610 CONSTRUCTOR_ELTS (ctor) = vec_safe_copy (elts);
4611 elts = CONSTRUCTOR_ELTS (ctor);
4612 }
4613 (*elts)[idx].value = newval;
4614 }
4615 return ctor;
4616 }
4617
4618 /* A subroutine of gimplify_modify_expr. Break out elements of a
4619 CONSTRUCTOR used as an initializer into separate MODIFY_EXPRs.
4620
4621 Note that we still need to clear any elements that don't have explicit
4622 initializers, so if not all elements are initialized we keep the
4623 original MODIFY_EXPR, we just remove all of the constructor elements.
4624
4625 If NOTIFY_TEMP_CREATION is true, do not gimplify, just return
4626 GS_ERROR if we would have to create a temporary when gimplifying
4627 this constructor. Otherwise, return GS_OK.
4628
4629 If NOTIFY_TEMP_CREATION is false, just do the gimplification. */
4630
4631 static enum gimplify_status
4632 gimplify_init_constructor (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
4633 bool want_value, bool notify_temp_creation)
4634 {
4635 tree object, ctor, type;
4636 enum gimplify_status ret;
4637 vec<constructor_elt, va_gc> *elts;
4638
4639 gcc_assert (TREE_CODE (TREE_OPERAND (*expr_p, 1)) == CONSTRUCTOR);
4640
4641 if (!notify_temp_creation)
4642 {
4643 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
4644 is_gimple_lvalue, fb_lvalue);
4645 if (ret == GS_ERROR)
4646 return ret;
4647 }
4648
4649 object = TREE_OPERAND (*expr_p, 0);
4650 ctor = TREE_OPERAND (*expr_p, 1)
4651 = optimize_compound_literals_in_ctor (TREE_OPERAND (*expr_p, 1));
4652 type = TREE_TYPE (ctor);
4653 elts = CONSTRUCTOR_ELTS (ctor);
4654 ret = GS_ALL_DONE;
4655
4656 switch (TREE_CODE (type))
4657 {
4658 case RECORD_TYPE:
4659 case UNION_TYPE:
4660 case QUAL_UNION_TYPE:
4661 case ARRAY_TYPE:
4662 {
4663 struct gimplify_init_ctor_preeval_data preeval_data;
4664 HOST_WIDE_INT num_ctor_elements, num_nonzero_elements;
4665 bool cleared, complete_p, valid_const_initializer;
4666
4667 /* Aggregate types must lower constructors to initialization of
4668 individual elements. The exception is that a CONSTRUCTOR node
4669 with no elements indicates zero-initialization of the whole. */
4670 if (vec_safe_is_empty (elts))
4671 {
4672 if (notify_temp_creation)
4673 return GS_OK;
4674 break;
4675 }
4676
4677 /* Fetch information about the constructor to direct later processing.
4678 We might want to make static versions of it in various cases, and
4679 can only do so if it known to be a valid constant initializer. */
4680 valid_const_initializer
4681 = categorize_ctor_elements (ctor, &num_nonzero_elements,
4682 &num_ctor_elements, &complete_p);
4683
4684 /* If a const aggregate variable is being initialized, then it
4685 should never be a lose to promote the variable to be static. */
4686 if (valid_const_initializer
4687 && num_nonzero_elements > 1
4688 && TREE_READONLY (object)
4689 && VAR_P (object)
4690 && (flag_merge_constants >= 2 || !TREE_ADDRESSABLE (object)))
4691 {
4692 if (notify_temp_creation)
4693 return GS_ERROR;
4694 DECL_INITIAL (object) = ctor;
4695 TREE_STATIC (object) = 1;
4696 if (!DECL_NAME (object))
4697 DECL_NAME (object) = create_tmp_var_name ("C");
4698 walk_tree (&DECL_INITIAL (object), force_labels_r, NULL, NULL);
4699
4700 /* ??? C++ doesn't automatically append a .<number> to the
4701 assembler name, and even when it does, it looks at FE private
4702 data structures to figure out what that number should be,
4703 which are not set for this variable. I suppose this is
4704 important for local statics for inline functions, which aren't
4705 "local" in the object file sense. So in order to get a unique
4706 TU-local symbol, we must invoke the lhd version now. */
4707 lhd_set_decl_assembler_name (object);
4708
4709 *expr_p = NULL_TREE;
4710 break;
4711 }
4712
4713 /* If there are "lots" of initialized elements, even discounting
4714 those that are not address constants (and thus *must* be
4715 computed at runtime), then partition the constructor into
4716 constant and non-constant parts. Block copy the constant
4717 parts in, then generate code for the non-constant parts. */
4718 /* TODO. There's code in cp/typeck.c to do this. */
4719
4720 if (int_size_in_bytes (TREE_TYPE (ctor)) < 0)
4721 /* store_constructor will ignore the clearing of variable-sized
4722 objects. Initializers for such objects must explicitly set
4723 every field that needs to be set. */
4724 cleared = false;
4725 else if (!complete_p && !CONSTRUCTOR_NO_CLEARING (ctor))
4726 /* If the constructor isn't complete, clear the whole object
4727 beforehand, unless CONSTRUCTOR_NO_CLEARING is set on it.
4728
4729 ??? This ought not to be needed. For any element not present
4730 in the initializer, we should simply set them to zero. Except
4731 we'd need to *find* the elements that are not present, and that
4732 requires trickery to avoid quadratic compile-time behavior in
4733 large cases or excessive memory use in small cases. */
4734 cleared = true;
4735 else if (num_ctor_elements - num_nonzero_elements
4736 > CLEAR_RATIO (optimize_function_for_speed_p (cfun))
4737 && num_nonzero_elements < num_ctor_elements / 4)
4738 /* If there are "lots" of zeros, it's more efficient to clear
4739 the memory and then set the nonzero elements. */
4740 cleared = true;
4741 else
4742 cleared = false;
4743
4744 /* If there are "lots" of initialized elements, and all of them
4745 are valid address constants, then the entire initializer can
4746 be dropped to memory, and then memcpy'd out. Don't do this
4747 for sparse arrays, though, as it's more efficient to follow
4748 the standard CONSTRUCTOR behavior of memset followed by
4749 individual element initialization. Also don't do this for small
4750 all-zero initializers (which aren't big enough to merit
4751 clearing), and don't try to make bitwise copies of
4752 TREE_ADDRESSABLE types.
4753
4754 We cannot apply such transformation when compiling chkp static
4755 initializer because creation of initializer image in the memory
4756 will require static initialization of bounds for it. It should
4757 result in another gimplification of similar initializer and we
4758 may fall into infinite loop. */
4759 if (valid_const_initializer
4760 && !(cleared || num_nonzero_elements == 0)
4761 && !TREE_ADDRESSABLE (type)
4762 && (!current_function_decl
4763 || !lookup_attribute ("chkp ctor",
4764 DECL_ATTRIBUTES (current_function_decl))))
4765 {
4766 HOST_WIDE_INT size = int_size_in_bytes (type);
4767 unsigned int align;
4768
4769 /* ??? We can still get unbounded array types, at least
4770 from the C++ front end. This seems wrong, but attempt
4771 to work around it for now. */
4772 if (size < 0)
4773 {
4774 size = int_size_in_bytes (TREE_TYPE (object));
4775 if (size >= 0)
4776 TREE_TYPE (ctor) = type = TREE_TYPE (object);
4777 }
4778
4779 /* Find the maximum alignment we can assume for the object. */
4780 /* ??? Make use of DECL_OFFSET_ALIGN. */
4781 if (DECL_P (object))
4782 align = DECL_ALIGN (object);
4783 else
4784 align = TYPE_ALIGN (type);
4785
4786 /* Do a block move either if the size is so small as to make
4787 each individual move a sub-unit move on average, or if it
4788 is so large as to make individual moves inefficient. */
4789 if (size > 0
4790 && num_nonzero_elements > 1
4791 && (size < num_nonzero_elements
4792 || !can_move_by_pieces (size, align)))
4793 {
4794 if (notify_temp_creation)
4795 return GS_ERROR;
4796
4797 walk_tree (&ctor, force_labels_r, NULL, NULL);
4798 ctor = tree_output_constant_def (ctor);
4799 if (!useless_type_conversion_p (type, TREE_TYPE (ctor)))
4800 ctor = build1 (VIEW_CONVERT_EXPR, type, ctor);
4801 TREE_OPERAND (*expr_p, 1) = ctor;
4802
4803 /* This is no longer an assignment of a CONSTRUCTOR, but
4804 we still may have processing to do on the LHS. So
4805 pretend we didn't do anything here to let that happen. */
4806 return GS_UNHANDLED;
4807 }
4808 }
4809
4810 /* If the target is volatile, we have non-zero elements and more than
4811 one field to assign, initialize the target from a temporary. */
4812 if (TREE_THIS_VOLATILE (object)
4813 && !TREE_ADDRESSABLE (type)
4814 && num_nonzero_elements > 0
4815 && vec_safe_length (elts) > 1)
4816 {
4817 tree temp = create_tmp_var (TYPE_MAIN_VARIANT (type));
4818 TREE_OPERAND (*expr_p, 0) = temp;
4819 *expr_p = build2 (COMPOUND_EXPR, TREE_TYPE (*expr_p),
4820 *expr_p,
4821 build2 (MODIFY_EXPR, void_type_node,
4822 object, temp));
4823 return GS_OK;
4824 }
4825
4826 if (notify_temp_creation)
4827 return GS_OK;
4828
4829 /* If there are nonzero elements and if needed, pre-evaluate to capture
4830 elements overlapping with the lhs into temporaries. We must do this
4831 before clearing to fetch the values before they are zeroed-out. */
4832 if (num_nonzero_elements > 0 && TREE_CODE (*expr_p) != INIT_EXPR)
4833 {
4834 preeval_data.lhs_base_decl = get_base_address (object);
4835 if (!DECL_P (preeval_data.lhs_base_decl))
4836 preeval_data.lhs_base_decl = NULL;
4837 preeval_data.lhs_alias_set = get_alias_set (object);
4838
4839 gimplify_init_ctor_preeval (&TREE_OPERAND (*expr_p, 1),
4840 pre_p, post_p, &preeval_data);
4841 }
4842
4843 bool ctor_has_side_effects_p
4844 = TREE_SIDE_EFFECTS (TREE_OPERAND (*expr_p, 1));
4845
4846 if (cleared)
4847 {
4848 /* Zap the CONSTRUCTOR element list, which simplifies this case.
4849 Note that we still have to gimplify, in order to handle the
4850 case of variable sized types. Avoid shared tree structures. */
4851 CONSTRUCTOR_ELTS (ctor) = NULL;
4852 TREE_SIDE_EFFECTS (ctor) = 0;
4853 object = unshare_expr (object);
4854 gimplify_stmt (expr_p, pre_p);
4855 }
4856
4857 /* If we have not block cleared the object, or if there are nonzero
4858 elements in the constructor, or if the constructor has side effects,
4859 add assignments to the individual scalar fields of the object. */
4860 if (!cleared
4861 || num_nonzero_elements > 0
4862 || ctor_has_side_effects_p)
4863 gimplify_init_ctor_eval (object, elts, pre_p, cleared);
4864
4865 *expr_p = NULL_TREE;
4866 }
4867 break;
4868
4869 case COMPLEX_TYPE:
4870 {
4871 tree r, i;
4872
4873 if (notify_temp_creation)
4874 return GS_OK;
4875
4876 /* Extract the real and imaginary parts out of the ctor. */
4877 gcc_assert (elts->length () == 2);
4878 r = (*elts)[0].value;
4879 i = (*elts)[1].value;
4880 if (r == NULL || i == NULL)
4881 {
4882 tree zero = build_zero_cst (TREE_TYPE (type));
4883 if (r == NULL)
4884 r = zero;
4885 if (i == NULL)
4886 i = zero;
4887 }
4888
4889 /* Complex types have either COMPLEX_CST or COMPLEX_EXPR to
4890 represent creation of a complex value. */
4891 if (TREE_CONSTANT (r) && TREE_CONSTANT (i))
4892 {
4893 ctor = build_complex (type, r, i);
4894 TREE_OPERAND (*expr_p, 1) = ctor;
4895 }
4896 else
4897 {
4898 ctor = build2 (COMPLEX_EXPR, type, r, i);
4899 TREE_OPERAND (*expr_p, 1) = ctor;
4900 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 1),
4901 pre_p,
4902 post_p,
4903 rhs_predicate_for (TREE_OPERAND (*expr_p, 0)),
4904 fb_rvalue);
4905 }
4906 }
4907 break;
4908
4909 case VECTOR_TYPE:
4910 {
4911 unsigned HOST_WIDE_INT ix;
4912 constructor_elt *ce;
4913
4914 if (notify_temp_creation)
4915 return GS_OK;
4916
4917 /* Go ahead and simplify constant constructors to VECTOR_CST. */
4918 if (TREE_CONSTANT (ctor))
4919 {
4920 bool constant_p = true;
4921 tree value;
4922
4923 /* Even when ctor is constant, it might contain non-*_CST
4924 elements, such as addresses or trapping values like
4925 1.0/0.0 - 1.0/0.0. Such expressions don't belong
4926 in VECTOR_CST nodes. */
4927 FOR_EACH_CONSTRUCTOR_VALUE (elts, ix, value)
4928 if (!CONSTANT_CLASS_P (value))
4929 {
4930 constant_p = false;
4931 break;
4932 }
4933
4934 if (constant_p)
4935 {
4936 TREE_OPERAND (*expr_p, 1) = build_vector_from_ctor (type, elts);
4937 break;
4938 }
4939
4940 TREE_CONSTANT (ctor) = 0;
4941 }
4942
4943 /* Vector types use CONSTRUCTOR all the way through gimple
4944 compilation as a general initializer. */
4945 FOR_EACH_VEC_SAFE_ELT (elts, ix, ce)
4946 {
4947 enum gimplify_status tret;
4948 tret = gimplify_expr (&ce->value, pre_p, post_p, is_gimple_val,
4949 fb_rvalue);
4950 if (tret == GS_ERROR)
4951 ret = GS_ERROR;
4952 else if (TREE_STATIC (ctor)
4953 && !initializer_constant_valid_p (ce->value,
4954 TREE_TYPE (ce->value)))
4955 TREE_STATIC (ctor) = 0;
4956 }
4957 if (!is_gimple_reg (TREE_OPERAND (*expr_p, 0)))
4958 TREE_OPERAND (*expr_p, 1) = get_formal_tmp_var (ctor, pre_p);
4959 }
4960 break;
4961
4962 default:
4963 /* So how did we get a CONSTRUCTOR for a scalar type? */
4964 gcc_unreachable ();
4965 }
4966
4967 if (ret == GS_ERROR)
4968 return GS_ERROR;
4969 /* If we have gimplified both sides of the initializer but have
4970 not emitted an assignment, do so now. */
4971 if (*expr_p)
4972 {
4973 tree lhs = TREE_OPERAND (*expr_p, 0);
4974 tree rhs = TREE_OPERAND (*expr_p, 1);
4975 if (want_value && object == lhs)
4976 lhs = unshare_expr (lhs);
4977 gassign *init = gimple_build_assign (lhs, rhs);
4978 gimplify_seq_add_stmt (pre_p, init);
4979 }
4980 if (want_value)
4981 {
4982 *expr_p = object;
4983 return GS_OK;
4984 }
4985 else
4986 {
4987 *expr_p = NULL;
4988 return GS_ALL_DONE;
4989 }
4990 }
4991
4992 /* Given a pointer value OP0, return a simplified version of an
4993 indirection through OP0, or NULL_TREE if no simplification is
4994 possible. This may only be applied to a rhs of an expression.
4995 Note that the resulting type may be different from the type pointed
4996 to in the sense that it is still compatible from the langhooks
4997 point of view. */
4998
4999 static tree
5000 gimple_fold_indirect_ref_rhs (tree t)
5001 {
5002 return gimple_fold_indirect_ref (t);
5003 }
5004
5005 /* Subroutine of gimplify_modify_expr to do simplifications of
5006 MODIFY_EXPRs based on the code of the RHS. We loop for as long as
5007 something changes. */
5008
5009 static enum gimplify_status
5010 gimplify_modify_expr_rhs (tree *expr_p, tree *from_p, tree *to_p,
5011 gimple_seq *pre_p, gimple_seq *post_p,
5012 bool want_value)
5013 {
5014 enum gimplify_status ret = GS_UNHANDLED;
5015 bool changed;
5016
5017 do
5018 {
5019 changed = false;
5020 switch (TREE_CODE (*from_p))
5021 {
5022 case VAR_DECL:
5023 /* If we're assigning from a read-only variable initialized with
5024 a constructor, do the direct assignment from the constructor,
5025 but only if neither source nor target are volatile since this
5026 latter assignment might end up being done on a per-field basis. */
5027 if (DECL_INITIAL (*from_p)
5028 && TREE_READONLY (*from_p)
5029 && !TREE_THIS_VOLATILE (*from_p)
5030 && !TREE_THIS_VOLATILE (*to_p)
5031 && TREE_CODE (DECL_INITIAL (*from_p)) == CONSTRUCTOR)
5032 {
5033 tree old_from = *from_p;
5034 enum gimplify_status subret;
5035
5036 /* Move the constructor into the RHS. */
5037 *from_p = unshare_expr (DECL_INITIAL (*from_p));
5038
5039 /* Let's see if gimplify_init_constructor will need to put
5040 it in memory. */
5041 subret = gimplify_init_constructor (expr_p, NULL, NULL,
5042 false, true);
5043 if (subret == GS_ERROR)
5044 {
5045 /* If so, revert the change. */
5046 *from_p = old_from;
5047 }
5048 else
5049 {
5050 ret = GS_OK;
5051 changed = true;
5052 }
5053 }
5054 break;
5055 case INDIRECT_REF:
5056 {
5057 /* If we have code like
5058
5059 *(const A*)(A*)&x
5060
5061 where the type of "x" is a (possibly cv-qualified variant
5062 of "A"), treat the entire expression as identical to "x".
5063 This kind of code arises in C++ when an object is bound
5064 to a const reference, and if "x" is a TARGET_EXPR we want
5065 to take advantage of the optimization below. */
5066 bool volatile_p = TREE_THIS_VOLATILE (*from_p);
5067 tree t = gimple_fold_indirect_ref_rhs (TREE_OPERAND (*from_p, 0));
5068 if (t)
5069 {
5070 if (TREE_THIS_VOLATILE (t) != volatile_p)
5071 {
5072 if (DECL_P (t))
5073 t = build_simple_mem_ref_loc (EXPR_LOCATION (*from_p),
5074 build_fold_addr_expr (t));
5075 if (REFERENCE_CLASS_P (t))
5076 TREE_THIS_VOLATILE (t) = volatile_p;
5077 }
5078 *from_p = t;
5079 ret = GS_OK;
5080 changed = true;
5081 }
5082 break;
5083 }
5084
5085 case TARGET_EXPR:
5086 {
5087 /* If we are initializing something from a TARGET_EXPR, strip the
5088 TARGET_EXPR and initialize it directly, if possible. This can't
5089 be done if the initializer is void, since that implies that the
5090 temporary is set in some non-trivial way.
5091
5092 ??? What about code that pulls out the temp and uses it
5093 elsewhere? I think that such code never uses the TARGET_EXPR as
5094 an initializer. If I'm wrong, we'll die because the temp won't
5095 have any RTL. In that case, I guess we'll need to replace
5096 references somehow. */
5097 tree init = TARGET_EXPR_INITIAL (*from_p);
5098
5099 if (init
5100 && !VOID_TYPE_P (TREE_TYPE (init)))
5101 {
5102 *from_p = init;
5103 ret = GS_OK;
5104 changed = true;
5105 }
5106 }
5107 break;
5108
5109 case COMPOUND_EXPR:
5110 /* Remove any COMPOUND_EXPR in the RHS so the following cases will be
5111 caught. */
5112 gimplify_compound_expr (from_p, pre_p, true);
5113 ret = GS_OK;
5114 changed = true;
5115 break;
5116
5117 case CONSTRUCTOR:
5118 /* If we already made some changes, let the front end have a
5119 crack at this before we break it down. */
5120 if (ret != GS_UNHANDLED)
5121 break;
5122 /* If we're initializing from a CONSTRUCTOR, break this into
5123 individual MODIFY_EXPRs. */
5124 return gimplify_init_constructor (expr_p, pre_p, post_p, want_value,
5125 false);
5126
5127 case COND_EXPR:
5128 /* If we're assigning to a non-register type, push the assignment
5129 down into the branches. This is mandatory for ADDRESSABLE types,
5130 since we cannot generate temporaries for such, but it saves a
5131 copy in other cases as well. */
5132 if (!is_gimple_reg_type (TREE_TYPE (*from_p)))
5133 {
5134 /* This code should mirror the code in gimplify_cond_expr. */
5135 enum tree_code code = TREE_CODE (*expr_p);
5136 tree cond = *from_p;
5137 tree result = *to_p;
5138
5139 ret = gimplify_expr (&result, pre_p, post_p,
5140 is_gimple_lvalue, fb_lvalue);
5141 if (ret != GS_ERROR)
5142 ret = GS_OK;
5143
5144 /* If we are going to write RESULT more than once, clear
5145 TREE_READONLY flag, otherwise we might incorrectly promote
5146 the variable to static const and initialize it at compile
5147 time in one of the branches. */
5148 if (VAR_P (result)
5149 && TREE_TYPE (TREE_OPERAND (cond, 1)) != void_type_node
5150 && TREE_TYPE (TREE_OPERAND (cond, 2)) != void_type_node)
5151 TREE_READONLY (result) = 0;
5152 if (TREE_TYPE (TREE_OPERAND (cond, 1)) != void_type_node)
5153 TREE_OPERAND (cond, 1)
5154 = build2 (code, void_type_node, result,
5155 TREE_OPERAND (cond, 1));
5156 if (TREE_TYPE (TREE_OPERAND (cond, 2)) != void_type_node)
5157 TREE_OPERAND (cond, 2)
5158 = build2 (code, void_type_node, unshare_expr (result),
5159 TREE_OPERAND (cond, 2));
5160
5161 TREE_TYPE (cond) = void_type_node;
5162 recalculate_side_effects (cond);
5163
5164 if (want_value)
5165 {
5166 gimplify_and_add (cond, pre_p);
5167 *expr_p = unshare_expr (result);
5168 }
5169 else
5170 *expr_p = cond;
5171 return ret;
5172 }
5173 break;
5174
5175 case CALL_EXPR:
5176 /* For calls that return in memory, give *to_p as the CALL_EXPR's
5177 return slot so that we don't generate a temporary. */
5178 if (!CALL_EXPR_RETURN_SLOT_OPT (*from_p)
5179 && aggregate_value_p (*from_p, *from_p))
5180 {
5181 bool use_target;
5182
5183 if (!(rhs_predicate_for (*to_p))(*from_p))
5184 /* If we need a temporary, *to_p isn't accurate. */
5185 use_target = false;
5186 /* It's OK to use the return slot directly unless it's an NRV. */
5187 else if (TREE_CODE (*to_p) == RESULT_DECL
5188 && DECL_NAME (*to_p) == NULL_TREE
5189 && needs_to_live_in_memory (*to_p))
5190 use_target = true;
5191 else if (is_gimple_reg_type (TREE_TYPE (*to_p))
5192 || (DECL_P (*to_p) && DECL_REGISTER (*to_p)))
5193 /* Don't force regs into memory. */
5194 use_target = false;
5195 else if (TREE_CODE (*expr_p) == INIT_EXPR)
5196 /* It's OK to use the target directly if it's being
5197 initialized. */
5198 use_target = true;
5199 else if (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (*to_p)))
5200 != INTEGER_CST)
5201 /* Always use the target and thus RSO for variable-sized types.
5202 GIMPLE cannot deal with a variable-sized assignment
5203 embedded in a call statement. */
5204 use_target = true;
5205 else if (TREE_CODE (*to_p) != SSA_NAME
5206 && (!is_gimple_variable (*to_p)
5207 || needs_to_live_in_memory (*to_p)))
5208 /* Don't use the original target if it's already addressable;
5209 if its address escapes, and the called function uses the
5210 NRV optimization, a conforming program could see *to_p
5211 change before the called function returns; see c++/19317.
5212 When optimizing, the return_slot pass marks more functions
5213 as safe after we have escape info. */
5214 use_target = false;
5215 else
5216 use_target = true;
5217
5218 if (use_target)
5219 {
5220 CALL_EXPR_RETURN_SLOT_OPT (*from_p) = 1;
5221 mark_addressable (*to_p);
5222 }
5223 }
5224 break;
5225
5226 case WITH_SIZE_EXPR:
5227 /* Likewise for calls that return an aggregate of non-constant size,
5228 since we would not be able to generate a temporary at all. */
5229 if (TREE_CODE (TREE_OPERAND (*from_p, 0)) == CALL_EXPR)
5230 {
5231 *from_p = TREE_OPERAND (*from_p, 0);
5232 /* We don't change ret in this case because the
5233 WITH_SIZE_EXPR might have been added in
5234 gimplify_modify_expr, so returning GS_OK would lead to an
5235 infinite loop. */
5236 changed = true;
5237 }
5238 break;
5239
5240 /* If we're initializing from a container, push the initialization
5241 inside it. */
5242 case CLEANUP_POINT_EXPR:
5243 case BIND_EXPR:
5244 case STATEMENT_LIST:
5245 {
5246 tree wrap = *from_p;
5247 tree t;
5248
5249 ret = gimplify_expr (to_p, pre_p, post_p, is_gimple_min_lval,
5250 fb_lvalue);
5251 if (ret != GS_ERROR)
5252 ret = GS_OK;
5253
5254 t = voidify_wrapper_expr (wrap, *expr_p);
5255 gcc_assert (t == *expr_p);
5256
5257 if (want_value)
5258 {
5259 gimplify_and_add (wrap, pre_p);
5260 *expr_p = unshare_expr (*to_p);
5261 }
5262 else
5263 *expr_p = wrap;
5264 return GS_OK;
5265 }
5266
5267 case COMPOUND_LITERAL_EXPR:
5268 {
5269 tree complit = TREE_OPERAND (*expr_p, 1);
5270 tree decl_s = COMPOUND_LITERAL_EXPR_DECL_EXPR (complit);
5271 tree decl = DECL_EXPR_DECL (decl_s);
5272 tree init = DECL_INITIAL (decl);
5273
5274 /* struct T x = (struct T) { 0, 1, 2 } can be optimized
5275 into struct T x = { 0, 1, 2 } if the address of the
5276 compound literal has never been taken. */
5277 if (!TREE_ADDRESSABLE (complit)
5278 && !TREE_ADDRESSABLE (decl)
5279 && init)
5280 {
5281 *expr_p = copy_node (*expr_p);
5282 TREE_OPERAND (*expr_p, 1) = init;
5283 return GS_OK;
5284 }
5285 }
5286
5287 default:
5288 break;
5289 }
5290 }
5291 while (changed);
5292
5293 return ret;
5294 }
5295
5296
5297 /* Return true if T looks like a valid GIMPLE statement. */
5298
5299 static bool
5300 is_gimple_stmt (tree t)
5301 {
5302 const enum tree_code code = TREE_CODE (t);
5303
5304 switch (code)
5305 {
5306 case NOP_EXPR:
5307 /* The only valid NOP_EXPR is the empty statement. */
5308 return IS_EMPTY_STMT (t);
5309
5310 case BIND_EXPR:
5311 case COND_EXPR:
5312 /* These are only valid if they're void. */
5313 return TREE_TYPE (t) == NULL || VOID_TYPE_P (TREE_TYPE (t));
5314
5315 case SWITCH_EXPR:
5316 case GOTO_EXPR:
5317 case RETURN_EXPR:
5318 case LABEL_EXPR:
5319 case CASE_LABEL_EXPR:
5320 case TRY_CATCH_EXPR:
5321 case TRY_FINALLY_EXPR:
5322 case EH_FILTER_EXPR:
5323 case CATCH_EXPR:
5324 case ASM_EXPR:
5325 case STATEMENT_LIST:
5326 case OACC_PARALLEL:
5327 case OACC_KERNELS:
5328 case OACC_DATA:
5329 case OACC_HOST_DATA:
5330 case OACC_DECLARE:
5331 case OACC_UPDATE:
5332 case OACC_ENTER_DATA:
5333 case OACC_EXIT_DATA:
5334 case OACC_CACHE:
5335 case OMP_PARALLEL:
5336 case OMP_FOR:
5337 case OMP_SIMD:
5338 case CILK_SIMD:
5339 case OMP_DISTRIBUTE:
5340 case OACC_LOOP:
5341 case OMP_SECTIONS:
5342 case OMP_SECTION:
5343 case OMP_SINGLE:
5344 case OMP_MASTER:
5345 case OMP_TASKGROUP:
5346 case OMP_ORDERED:
5347 case OMP_CRITICAL:
5348 case OMP_TASK:
5349 case OMP_TARGET:
5350 case OMP_TARGET_DATA:
5351 case OMP_TARGET_UPDATE:
5352 case OMP_TARGET_ENTER_DATA:
5353 case OMP_TARGET_EXIT_DATA:
5354 case OMP_TASKLOOP:
5355 case OMP_TEAMS:
5356 /* These are always void. */
5357 return true;
5358
5359 case CALL_EXPR:
5360 case MODIFY_EXPR:
5361 case PREDICT_EXPR:
5362 /* These are valid regardless of their type. */
5363 return true;
5364
5365 default:
5366 return false;
5367 }
5368 }
5369
5370
5371 /* Promote partial stores to COMPLEX variables to total stores. *EXPR_P is
5372 a MODIFY_EXPR with a lhs of a REAL/IMAGPART_EXPR of a variable with
5373 DECL_GIMPLE_REG_P set.
5374
5375 IMPORTANT NOTE: This promotion is performed by introducing a load of the
5376 other, unmodified part of the complex object just before the total store.
5377 As a consequence, if the object is still uninitialized, an undefined value
5378 will be loaded into a register, which may result in a spurious exception
5379 if the register is floating-point and the value happens to be a signaling
5380 NaN for example. Then the fully-fledged complex operations lowering pass
5381 followed by a DCE pass are necessary in order to fix things up. */
5382
5383 static enum gimplify_status
5384 gimplify_modify_expr_complex_part (tree *expr_p, gimple_seq *pre_p,
5385 bool want_value)
5386 {
5387 enum tree_code code, ocode;
5388 tree lhs, rhs, new_rhs, other, realpart, imagpart;
5389
5390 lhs = TREE_OPERAND (*expr_p, 0);
5391 rhs = TREE_OPERAND (*expr_p, 1);
5392 code = TREE_CODE (lhs);
5393 lhs = TREE_OPERAND (lhs, 0);
5394
5395 ocode = code == REALPART_EXPR ? IMAGPART_EXPR : REALPART_EXPR;
5396 other = build1 (ocode, TREE_TYPE (rhs), lhs);
5397 TREE_NO_WARNING (other) = 1;
5398 other = get_formal_tmp_var (other, pre_p);
5399
5400 realpart = code == REALPART_EXPR ? rhs : other;
5401 imagpart = code == REALPART_EXPR ? other : rhs;
5402
5403 if (TREE_CONSTANT (realpart) && TREE_CONSTANT (imagpart))
5404 new_rhs = build_complex (TREE_TYPE (lhs), realpart, imagpart);
5405 else
5406 new_rhs = build2 (COMPLEX_EXPR, TREE_TYPE (lhs), realpart, imagpart);
5407
5408 gimplify_seq_add_stmt (pre_p, gimple_build_assign (lhs, new_rhs));
5409 *expr_p = (want_value) ? rhs : NULL_TREE;
5410
5411 return GS_ALL_DONE;
5412 }
5413
5414 /* Gimplify the MODIFY_EXPR node pointed to by EXPR_P.
5415
5416 modify_expr
5417 : varname '=' rhs
5418 | '*' ID '=' rhs
5419
5420 PRE_P points to the list where side effects that must happen before
5421 *EXPR_P should be stored.
5422
5423 POST_P points to the list where side effects that must happen after
5424 *EXPR_P should be stored.
5425
5426 WANT_VALUE is nonzero iff we want to use the value of this expression
5427 in another expression. */
5428
5429 static enum gimplify_status
5430 gimplify_modify_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
5431 bool want_value)
5432 {
5433 tree *from_p = &TREE_OPERAND (*expr_p, 1);
5434 tree *to_p = &TREE_OPERAND (*expr_p, 0);
5435 enum gimplify_status ret = GS_UNHANDLED;
5436 gimple *assign;
5437 location_t loc = EXPR_LOCATION (*expr_p);
5438 gimple_stmt_iterator gsi;
5439
5440 gcc_assert (TREE_CODE (*expr_p) == MODIFY_EXPR
5441 || TREE_CODE (*expr_p) == INIT_EXPR);
5442
5443 /* Trying to simplify a clobber using normal logic doesn't work,
5444 so handle it here. */
5445 if (TREE_CLOBBER_P (*from_p))
5446 {
5447 ret = gimplify_expr (to_p, pre_p, post_p, is_gimple_lvalue, fb_lvalue);
5448 if (ret == GS_ERROR)
5449 return ret;
5450 gcc_assert (!want_value
5451 && (VAR_P (*to_p) || TREE_CODE (*to_p) == MEM_REF));
5452 gimplify_seq_add_stmt (pre_p, gimple_build_assign (*to_p, *from_p));
5453 *expr_p = NULL;
5454 return GS_ALL_DONE;
5455 }
5456
5457 /* Insert pointer conversions required by the middle-end that are not
5458 required by the frontend. This fixes middle-end type checking for
5459 for example gcc.dg/redecl-6.c. */
5460 if (POINTER_TYPE_P (TREE_TYPE (*to_p)))
5461 {
5462 STRIP_USELESS_TYPE_CONVERSION (*from_p);
5463 if (!useless_type_conversion_p (TREE_TYPE (*to_p), TREE_TYPE (*from_p)))
5464 *from_p = fold_convert_loc (loc, TREE_TYPE (*to_p), *from_p);
5465 }
5466
5467 /* See if any simplifications can be done based on what the RHS is. */
5468 ret = gimplify_modify_expr_rhs (expr_p, from_p, to_p, pre_p, post_p,
5469 want_value);
5470 if (ret != GS_UNHANDLED)
5471 return ret;
5472
5473 /* For zero sized types only gimplify the left hand side and right hand
5474 side as statements and throw away the assignment. Do this after
5475 gimplify_modify_expr_rhs so we handle TARGET_EXPRs of addressable
5476 types properly. */
5477 if (zero_sized_type (TREE_TYPE (*from_p)) && !want_value)
5478 {
5479 gimplify_stmt (from_p, pre_p);
5480 gimplify_stmt (to_p, pre_p);
5481 *expr_p = NULL_TREE;
5482 return GS_ALL_DONE;
5483 }
5484
5485 /* If the value being copied is of variable width, compute the length
5486 of the copy into a WITH_SIZE_EXPR. Note that we need to do this
5487 before gimplifying any of the operands so that we can resolve any
5488 PLACEHOLDER_EXPRs in the size. Also note that the RTL expander uses
5489 the size of the expression to be copied, not of the destination, so
5490 that is what we must do here. */
5491 maybe_with_size_expr (from_p);
5492
5493 /* As a special case, we have to temporarily allow for assignments
5494 with a CALL_EXPR on the RHS. Since in GIMPLE a function call is
5495 a toplevel statement, when gimplifying the GENERIC expression
5496 MODIFY_EXPR <a, CALL_EXPR <foo>>, we cannot create the tuple
5497 GIMPLE_ASSIGN <a, GIMPLE_CALL <foo>>.
5498
5499 Instead, we need to create the tuple GIMPLE_CALL <a, foo>. To
5500 prevent gimplify_expr from trying to create a new temporary for
5501 foo's LHS, we tell it that it should only gimplify until it
5502 reaches the CALL_EXPR. On return from gimplify_expr, the newly
5503 created GIMPLE_CALL <foo> will be the last statement in *PRE_P
5504 and all we need to do here is set 'a' to be its LHS. */
5505
5506 /* Gimplify the RHS first for C++17 and bug 71104. */
5507 gimple_predicate initial_pred = initial_rhs_predicate_for (*to_p);
5508 ret = gimplify_expr (from_p, pre_p, post_p, initial_pred, fb_rvalue);
5509 if (ret == GS_ERROR)
5510 return ret;
5511
5512 /* Then gimplify the LHS. */
5513 /* If we gimplified the RHS to a CALL_EXPR and that call may return
5514 twice we have to make sure to gimplify into non-SSA as otherwise
5515 the abnormal edge added later will make those defs not dominate
5516 their uses.
5517 ??? Technically this applies only to the registers used in the
5518 resulting non-register *TO_P. */
5519 bool saved_into_ssa = gimplify_ctxp->into_ssa;
5520 if (saved_into_ssa
5521 && TREE_CODE (*from_p) == CALL_EXPR
5522 && call_expr_flags (*from_p) & ECF_RETURNS_TWICE)
5523 gimplify_ctxp->into_ssa = false;
5524 ret = gimplify_expr (to_p, pre_p, post_p, is_gimple_lvalue, fb_lvalue);
5525 gimplify_ctxp->into_ssa = saved_into_ssa;
5526 if (ret == GS_ERROR)
5527 return ret;
5528
5529 /* Now that the LHS is gimplified, re-gimplify the RHS if our initial
5530 guess for the predicate was wrong. */
5531 gimple_predicate final_pred = rhs_predicate_for (*to_p);
5532 if (final_pred != initial_pred)
5533 {
5534 ret = gimplify_expr (from_p, pre_p, post_p, final_pred, fb_rvalue);
5535 if (ret == GS_ERROR)
5536 return ret;
5537 }
5538
5539 /* In case of va_arg internal fn wrappped in a WITH_SIZE_EXPR, add the type
5540 size as argument to the call. */
5541 if (TREE_CODE (*from_p) == WITH_SIZE_EXPR)
5542 {
5543 tree call = TREE_OPERAND (*from_p, 0);
5544 tree vlasize = TREE_OPERAND (*from_p, 1);
5545
5546 if (TREE_CODE (call) == CALL_EXPR
5547 && CALL_EXPR_IFN (call) == IFN_VA_ARG)
5548 {
5549 int nargs = call_expr_nargs (call);
5550 tree type = TREE_TYPE (call);
5551 tree ap = CALL_EXPR_ARG (call, 0);
5552 tree tag = CALL_EXPR_ARG (call, 1);
5553 tree aptag = CALL_EXPR_ARG (call, 2);
5554 tree newcall = build_call_expr_internal_loc (EXPR_LOCATION (call),
5555 IFN_VA_ARG, type,
5556 nargs + 1, ap, tag,
5557 aptag, vlasize);
5558 TREE_OPERAND (*from_p, 0) = newcall;
5559 }
5560 }
5561
5562 /* Now see if the above changed *from_p to something we handle specially. */
5563 ret = gimplify_modify_expr_rhs (expr_p, from_p, to_p, pre_p, post_p,
5564 want_value);
5565 if (ret != GS_UNHANDLED)
5566 return ret;
5567
5568 /* If we've got a variable sized assignment between two lvalues (i.e. does
5569 not involve a call), then we can make things a bit more straightforward
5570 by converting the assignment to memcpy or memset. */
5571 if (TREE_CODE (*from_p) == WITH_SIZE_EXPR)
5572 {
5573 tree from = TREE_OPERAND (*from_p, 0);
5574 tree size = TREE_OPERAND (*from_p, 1);
5575
5576 if (TREE_CODE (from) == CONSTRUCTOR)
5577 return gimplify_modify_expr_to_memset (expr_p, size, want_value, pre_p);
5578
5579 if (is_gimple_addressable (from))
5580 {
5581 *from_p = from;
5582 return gimplify_modify_expr_to_memcpy (expr_p, size, want_value,
5583 pre_p);
5584 }
5585 }
5586
5587 /* Transform partial stores to non-addressable complex variables into
5588 total stores. This allows us to use real instead of virtual operands
5589 for these variables, which improves optimization. */
5590 if ((TREE_CODE (*to_p) == REALPART_EXPR
5591 || TREE_CODE (*to_p) == IMAGPART_EXPR)
5592 && is_gimple_reg (TREE_OPERAND (*to_p, 0)))
5593 return gimplify_modify_expr_complex_part (expr_p, pre_p, want_value);
5594
5595 /* Try to alleviate the effects of the gimplification creating artificial
5596 temporaries (see for example is_gimple_reg_rhs) on the debug info, but
5597 make sure not to create DECL_DEBUG_EXPR links across functions. */
5598 if (!gimplify_ctxp->into_ssa
5599 && VAR_P (*from_p)
5600 && DECL_IGNORED_P (*from_p)
5601 && DECL_P (*to_p)
5602 && !DECL_IGNORED_P (*to_p)
5603 && decl_function_context (*to_p) == current_function_decl
5604 && decl_function_context (*from_p) == current_function_decl)
5605 {
5606 if (!DECL_NAME (*from_p) && DECL_NAME (*to_p))
5607 DECL_NAME (*from_p)
5608 = create_tmp_var_name (IDENTIFIER_POINTER (DECL_NAME (*to_p)));
5609 DECL_HAS_DEBUG_EXPR_P (*from_p) = 1;
5610 SET_DECL_DEBUG_EXPR (*from_p, *to_p);
5611 }
5612
5613 if (want_value && TREE_THIS_VOLATILE (*to_p))
5614 *from_p = get_initialized_tmp_var (*from_p, pre_p, post_p);
5615
5616 if (TREE_CODE (*from_p) == CALL_EXPR)
5617 {
5618 /* Since the RHS is a CALL_EXPR, we need to create a GIMPLE_CALL
5619 instead of a GIMPLE_ASSIGN. */
5620 gcall *call_stmt;
5621 if (CALL_EXPR_FN (*from_p) == NULL_TREE)
5622 {
5623 /* Gimplify internal functions created in the FEs. */
5624 int nargs = call_expr_nargs (*from_p), i;
5625 enum internal_fn ifn = CALL_EXPR_IFN (*from_p);
5626 auto_vec<tree> vargs (nargs);
5627
5628 for (i = 0; i < nargs; i++)
5629 {
5630 gimplify_arg (&CALL_EXPR_ARG (*from_p, i), pre_p,
5631 EXPR_LOCATION (*from_p));
5632 vargs.quick_push (CALL_EXPR_ARG (*from_p, i));
5633 }
5634 call_stmt = gimple_build_call_internal_vec (ifn, vargs);
5635 gimple_set_location (call_stmt, EXPR_LOCATION (*expr_p));
5636 }
5637 else
5638 {
5639 tree fnptrtype = TREE_TYPE (CALL_EXPR_FN (*from_p));
5640 CALL_EXPR_FN (*from_p) = TREE_OPERAND (CALL_EXPR_FN (*from_p), 0);
5641 STRIP_USELESS_TYPE_CONVERSION (CALL_EXPR_FN (*from_p));
5642 tree fndecl = get_callee_fndecl (*from_p);
5643 if (fndecl
5644 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
5645 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_EXPECT
5646 && call_expr_nargs (*from_p) == 3)
5647 call_stmt = gimple_build_call_internal (IFN_BUILTIN_EXPECT, 3,
5648 CALL_EXPR_ARG (*from_p, 0),
5649 CALL_EXPR_ARG (*from_p, 1),
5650 CALL_EXPR_ARG (*from_p, 2));
5651 else
5652 {
5653 call_stmt = gimple_build_call_from_tree (*from_p);
5654 gimple_call_set_fntype (call_stmt, TREE_TYPE (fnptrtype));
5655 }
5656 }
5657 notice_special_calls (call_stmt);
5658 if (!gimple_call_noreturn_p (call_stmt) || !should_remove_lhs_p (*to_p))
5659 gimple_call_set_lhs (call_stmt, *to_p);
5660 else if (TREE_CODE (*to_p) == SSA_NAME)
5661 /* The above is somewhat premature, avoid ICEing later for a
5662 SSA name w/o a definition. We may have uses in the GIMPLE IL.
5663 ??? This doesn't make it a default-def. */
5664 SSA_NAME_DEF_STMT (*to_p) = gimple_build_nop ();
5665
5666 if (EXPR_CILK_SPAWN (*from_p))
5667 gimplify_cilk_detach (pre_p);
5668 assign = call_stmt;
5669 }
5670 else
5671 {
5672 assign = gimple_build_assign (*to_p, *from_p);
5673 gimple_set_location (assign, EXPR_LOCATION (*expr_p));
5674 if (COMPARISON_CLASS_P (*from_p))
5675 gimple_set_no_warning (assign, TREE_NO_WARNING (*from_p));
5676 }
5677
5678 if (gimplify_ctxp->into_ssa && is_gimple_reg (*to_p))
5679 {
5680 /* We should have got an SSA name from the start. */
5681 gcc_assert (TREE_CODE (*to_p) == SSA_NAME
5682 || ! gimple_in_ssa_p (cfun));
5683 }
5684
5685 gimplify_seq_add_stmt (pre_p, assign);
5686 gsi = gsi_last (*pre_p);
5687 maybe_fold_stmt (&gsi);
5688
5689 if (want_value)
5690 {
5691 *expr_p = TREE_THIS_VOLATILE (*to_p) ? *from_p : unshare_expr (*to_p);
5692 return GS_OK;
5693 }
5694 else
5695 *expr_p = NULL;
5696
5697 return GS_ALL_DONE;
5698 }
5699
5700 /* Gimplify a comparison between two variable-sized objects. Do this
5701 with a call to BUILT_IN_MEMCMP. */
5702
5703 static enum gimplify_status
5704 gimplify_variable_sized_compare (tree *expr_p)
5705 {
5706 location_t loc = EXPR_LOCATION (*expr_p);
5707 tree op0 = TREE_OPERAND (*expr_p, 0);
5708 tree op1 = TREE_OPERAND (*expr_p, 1);
5709 tree t, arg, dest, src, expr;
5710
5711 arg = TYPE_SIZE_UNIT (TREE_TYPE (op0));
5712 arg = unshare_expr (arg);
5713 arg = SUBSTITUTE_PLACEHOLDER_IN_EXPR (arg, op0);
5714 src = build_fold_addr_expr_loc (loc, op1);
5715 dest = build_fold_addr_expr_loc (loc, op0);
5716 t = builtin_decl_implicit (BUILT_IN_MEMCMP);
5717 t = build_call_expr_loc (loc, t, 3, dest, src, arg);
5718
5719 expr
5720 = build2 (TREE_CODE (*expr_p), TREE_TYPE (*expr_p), t, integer_zero_node);
5721 SET_EXPR_LOCATION (expr, loc);
5722 *expr_p = expr;
5723
5724 return GS_OK;
5725 }
5726
5727 /* Gimplify a comparison between two aggregate objects of integral scalar
5728 mode as a comparison between the bitwise equivalent scalar values. */
5729
5730 static enum gimplify_status
5731 gimplify_scalar_mode_aggregate_compare (tree *expr_p)
5732 {
5733 location_t loc = EXPR_LOCATION (*expr_p);
5734 tree op0 = TREE_OPERAND (*expr_p, 0);
5735 tree op1 = TREE_OPERAND (*expr_p, 1);
5736
5737 tree type = TREE_TYPE (op0);
5738 tree scalar_type = lang_hooks.types.type_for_mode (TYPE_MODE (type), 1);
5739
5740 op0 = fold_build1_loc (loc, VIEW_CONVERT_EXPR, scalar_type, op0);
5741 op1 = fold_build1_loc (loc, VIEW_CONVERT_EXPR, scalar_type, op1);
5742
5743 *expr_p
5744 = fold_build2_loc (loc, TREE_CODE (*expr_p), TREE_TYPE (*expr_p), op0, op1);
5745
5746 return GS_OK;
5747 }
5748
5749 /* Gimplify an expression sequence. This function gimplifies each
5750 expression and rewrites the original expression with the last
5751 expression of the sequence in GIMPLE form.
5752
5753 PRE_P points to the list where the side effects for all the
5754 expressions in the sequence will be emitted.
5755
5756 WANT_VALUE is true when the result of the last COMPOUND_EXPR is used. */
5757
5758 static enum gimplify_status
5759 gimplify_compound_expr (tree *expr_p, gimple_seq *pre_p, bool want_value)
5760 {
5761 tree t = *expr_p;
5762
5763 do
5764 {
5765 tree *sub_p = &TREE_OPERAND (t, 0);
5766
5767 if (TREE_CODE (*sub_p) == COMPOUND_EXPR)
5768 gimplify_compound_expr (sub_p, pre_p, false);
5769 else
5770 gimplify_stmt (sub_p, pre_p);
5771
5772 t = TREE_OPERAND (t, 1);
5773 }
5774 while (TREE_CODE (t) == COMPOUND_EXPR);
5775
5776 *expr_p = t;
5777 if (want_value)
5778 return GS_OK;
5779 else
5780 {
5781 gimplify_stmt (expr_p, pre_p);
5782 return GS_ALL_DONE;
5783 }
5784 }
5785
5786 /* Gimplify a SAVE_EXPR node. EXPR_P points to the expression to
5787 gimplify. After gimplification, EXPR_P will point to a new temporary
5788 that holds the original value of the SAVE_EXPR node.
5789
5790 PRE_P points to the list where side effects that must happen before
5791 *EXPR_P should be stored. */
5792
5793 static enum gimplify_status
5794 gimplify_save_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
5795 {
5796 enum gimplify_status ret = GS_ALL_DONE;
5797 tree val;
5798
5799 gcc_assert (TREE_CODE (*expr_p) == SAVE_EXPR);
5800 val = TREE_OPERAND (*expr_p, 0);
5801
5802 /* If the SAVE_EXPR has not been resolved, then evaluate it once. */
5803 if (!SAVE_EXPR_RESOLVED_P (*expr_p))
5804 {
5805 gcc_assert (TREE_TYPE (val) != void_type_node);
5806 /* The temporary may not be an SSA name as later abnormal and EH
5807 control flow may invalidate use/def domination. */
5808 val = get_initialized_tmp_var (val, pre_p, post_p, false);
5809
5810 TREE_OPERAND (*expr_p, 0) = val;
5811 SAVE_EXPR_RESOLVED_P (*expr_p) = 1;
5812 }
5813
5814 *expr_p = val;
5815
5816 return ret;
5817 }
5818
5819 /* Rewrite the ADDR_EXPR node pointed to by EXPR_P
5820
5821 unary_expr
5822 : ...
5823 | '&' varname
5824 ...
5825
5826 PRE_P points to the list where side effects that must happen before
5827 *EXPR_P should be stored.
5828
5829 POST_P points to the list where side effects that must happen after
5830 *EXPR_P should be stored. */
5831
5832 static enum gimplify_status
5833 gimplify_addr_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
5834 {
5835 tree expr = *expr_p;
5836 tree op0 = TREE_OPERAND (expr, 0);
5837 enum gimplify_status ret;
5838 location_t loc = EXPR_LOCATION (*expr_p);
5839
5840 switch (TREE_CODE (op0))
5841 {
5842 case INDIRECT_REF:
5843 do_indirect_ref:
5844 /* Check if we are dealing with an expression of the form '&*ptr'.
5845 While the front end folds away '&*ptr' into 'ptr', these
5846 expressions may be generated internally by the compiler (e.g.,
5847 builtins like __builtin_va_end). */
5848 /* Caution: the silent array decomposition semantics we allow for
5849 ADDR_EXPR means we can't always discard the pair. */
5850 /* Gimplification of the ADDR_EXPR operand may drop
5851 cv-qualification conversions, so make sure we add them if
5852 needed. */
5853 {
5854 tree op00 = TREE_OPERAND (op0, 0);
5855 tree t_expr = TREE_TYPE (expr);
5856 tree t_op00 = TREE_TYPE (op00);
5857
5858 if (!useless_type_conversion_p (t_expr, t_op00))
5859 op00 = fold_convert_loc (loc, TREE_TYPE (expr), op00);
5860 *expr_p = op00;
5861 ret = GS_OK;
5862 }
5863 break;
5864
5865 case VIEW_CONVERT_EXPR:
5866 /* Take the address of our operand and then convert it to the type of
5867 this ADDR_EXPR.
5868
5869 ??? The interactions of VIEW_CONVERT_EXPR and aliasing is not at
5870 all clear. The impact of this transformation is even less clear. */
5871
5872 /* If the operand is a useless conversion, look through it. Doing so
5873 guarantees that the ADDR_EXPR and its operand will remain of the
5874 same type. */
5875 if (tree_ssa_useless_type_conversion (TREE_OPERAND (op0, 0)))
5876 op0 = TREE_OPERAND (op0, 0);
5877
5878 *expr_p = fold_convert_loc (loc, TREE_TYPE (expr),
5879 build_fold_addr_expr_loc (loc,
5880 TREE_OPERAND (op0, 0)));
5881 ret = GS_OK;
5882 break;
5883
5884 case MEM_REF:
5885 if (integer_zerop (TREE_OPERAND (op0, 1)))
5886 goto do_indirect_ref;
5887
5888 /* fall through */
5889
5890 default:
5891 /* If we see a call to a declared builtin or see its address
5892 being taken (we can unify those cases here) then we can mark
5893 the builtin for implicit generation by GCC. */
5894 if (TREE_CODE (op0) == FUNCTION_DECL
5895 && DECL_BUILT_IN_CLASS (op0) == BUILT_IN_NORMAL
5896 && builtin_decl_declared_p (DECL_FUNCTION_CODE (op0)))
5897 set_builtin_decl_implicit_p (DECL_FUNCTION_CODE (op0), true);
5898
5899 /* We use fb_either here because the C frontend sometimes takes
5900 the address of a call that returns a struct; see
5901 gcc.dg/c99-array-lval-1.c. The gimplifier will correctly make
5902 the implied temporary explicit. */
5903
5904 /* Make the operand addressable. */
5905 ret = gimplify_expr (&TREE_OPERAND (expr, 0), pre_p, post_p,
5906 is_gimple_addressable, fb_either);
5907 if (ret == GS_ERROR)
5908 break;
5909
5910 /* Then mark it. Beware that it may not be possible to do so directly
5911 if a temporary has been created by the gimplification. */
5912 prepare_gimple_addressable (&TREE_OPERAND (expr, 0), pre_p);
5913
5914 op0 = TREE_OPERAND (expr, 0);
5915
5916 /* For various reasons, the gimplification of the expression
5917 may have made a new INDIRECT_REF. */
5918 if (TREE_CODE (op0) == INDIRECT_REF)
5919 goto do_indirect_ref;
5920
5921 mark_addressable (TREE_OPERAND (expr, 0));
5922
5923 /* The FEs may end up building ADDR_EXPRs early on a decl with
5924 an incomplete type. Re-build ADDR_EXPRs in canonical form
5925 here. */
5926 if (!types_compatible_p (TREE_TYPE (op0), TREE_TYPE (TREE_TYPE (expr))))
5927 *expr_p = build_fold_addr_expr (op0);
5928
5929 /* Make sure TREE_CONSTANT and TREE_SIDE_EFFECTS are set properly. */
5930 recompute_tree_invariant_for_addr_expr (*expr_p);
5931
5932 /* If we re-built the ADDR_EXPR add a conversion to the original type
5933 if required. */
5934 if (!useless_type_conversion_p (TREE_TYPE (expr), TREE_TYPE (*expr_p)))
5935 *expr_p = fold_convert (TREE_TYPE (expr), *expr_p);
5936
5937 break;
5938 }
5939
5940 return ret;
5941 }
5942
5943 /* Gimplify the operands of an ASM_EXPR. Input operands should be a gimple
5944 value; output operands should be a gimple lvalue. */
5945
5946 static enum gimplify_status
5947 gimplify_asm_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
5948 {
5949 tree expr;
5950 int noutputs;
5951 const char **oconstraints;
5952 int i;
5953 tree link;
5954 const char *constraint;
5955 bool allows_mem, allows_reg, is_inout;
5956 enum gimplify_status ret, tret;
5957 gasm *stmt;
5958 vec<tree, va_gc> *inputs;
5959 vec<tree, va_gc> *outputs;
5960 vec<tree, va_gc> *clobbers;
5961 vec<tree, va_gc> *labels;
5962 tree link_next;
5963
5964 expr = *expr_p;
5965 noutputs = list_length (ASM_OUTPUTS (expr));
5966 oconstraints = (const char **) alloca ((noutputs) * sizeof (const char *));
5967
5968 inputs = NULL;
5969 outputs = NULL;
5970 clobbers = NULL;
5971 labels = NULL;
5972
5973 ret = GS_ALL_DONE;
5974 link_next = NULL_TREE;
5975 for (i = 0, link = ASM_OUTPUTS (expr); link; ++i, link = link_next)
5976 {
5977 bool ok;
5978 size_t constraint_len;
5979
5980 link_next = TREE_CHAIN (link);
5981
5982 oconstraints[i]
5983 = constraint
5984 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
5985 constraint_len = strlen (constraint);
5986 if (constraint_len == 0)
5987 continue;
5988
5989 ok = parse_output_constraint (&constraint, i, 0, 0,
5990 &allows_mem, &allows_reg, &is_inout);
5991 if (!ok)
5992 {
5993 ret = GS_ERROR;
5994 is_inout = false;
5995 }
5996
5997 if (!allows_reg && allows_mem)
5998 mark_addressable (TREE_VALUE (link));
5999
6000 tret = gimplify_expr (&TREE_VALUE (link), pre_p, post_p,
6001 is_inout ? is_gimple_min_lval : is_gimple_lvalue,
6002 fb_lvalue | fb_mayfail);
6003 if (tret == GS_ERROR)
6004 {
6005 error ("invalid lvalue in asm output %d", i);
6006 ret = tret;
6007 }
6008
6009 /* If the constraint does not allow memory make sure we gimplify
6010 it to a register if it is not already but its base is. This
6011 happens for complex and vector components. */
6012 if (!allows_mem)
6013 {
6014 tree op = TREE_VALUE (link);
6015 if (! is_gimple_val (op)
6016 && is_gimple_reg_type (TREE_TYPE (op))
6017 && is_gimple_reg (get_base_address (op)))
6018 {
6019 tree tem = create_tmp_reg (TREE_TYPE (op));
6020 tree ass;
6021 if (is_inout)
6022 {
6023 ass = build2 (MODIFY_EXPR, TREE_TYPE (tem),
6024 tem, unshare_expr (op));
6025 gimplify_and_add (ass, pre_p);
6026 }
6027 ass = build2 (MODIFY_EXPR, TREE_TYPE (tem), op, tem);
6028 gimplify_and_add (ass, post_p);
6029
6030 TREE_VALUE (link) = tem;
6031 tret = GS_OK;
6032 }
6033 }
6034
6035 vec_safe_push (outputs, link);
6036 TREE_CHAIN (link) = NULL_TREE;
6037
6038 if (is_inout)
6039 {
6040 /* An input/output operand. To give the optimizers more
6041 flexibility, split it into separate input and output
6042 operands. */
6043 tree input;
6044 /* Buffer big enough to format a 32-bit UINT_MAX into. */
6045 char buf[11];
6046
6047 /* Turn the in/out constraint into an output constraint. */
6048 char *p = xstrdup (constraint);
6049 p[0] = '=';
6050 TREE_VALUE (TREE_PURPOSE (link)) = build_string (constraint_len, p);
6051
6052 /* And add a matching input constraint. */
6053 if (allows_reg)
6054 {
6055 sprintf (buf, "%u", i);
6056
6057 /* If there are multiple alternatives in the constraint,
6058 handle each of them individually. Those that allow register
6059 will be replaced with operand number, the others will stay
6060 unchanged. */
6061 if (strchr (p, ',') != NULL)
6062 {
6063 size_t len = 0, buflen = strlen (buf);
6064 char *beg, *end, *str, *dst;
6065
6066 for (beg = p + 1;;)
6067 {
6068 end = strchr (beg, ',');
6069 if (end == NULL)
6070 end = strchr (beg, '\0');
6071 if ((size_t) (end - beg) < buflen)
6072 len += buflen + 1;
6073 else
6074 len += end - beg + 1;
6075 if (*end)
6076 beg = end + 1;
6077 else
6078 break;
6079 }
6080
6081 str = (char *) alloca (len);
6082 for (beg = p + 1, dst = str;;)
6083 {
6084 const char *tem;
6085 bool mem_p, reg_p, inout_p;
6086
6087 end = strchr (beg, ',');
6088 if (end)
6089 *end = '\0';
6090 beg[-1] = '=';
6091 tem = beg - 1;
6092 parse_output_constraint (&tem, i, 0, 0,
6093 &mem_p, &reg_p, &inout_p);
6094 if (dst != str)
6095 *dst++ = ',';
6096 if (reg_p)
6097 {
6098 memcpy (dst, buf, buflen);
6099 dst += buflen;
6100 }
6101 else
6102 {
6103 if (end)
6104 len = end - beg;
6105 else
6106 len = strlen (beg);
6107 memcpy (dst, beg, len);
6108 dst += len;
6109 }
6110 if (end)
6111 beg = end + 1;
6112 else
6113 break;
6114 }
6115 *dst = '\0';
6116 input = build_string (dst - str, str);
6117 }
6118 else
6119 input = build_string (strlen (buf), buf);
6120 }
6121 else
6122 input = build_string (constraint_len - 1, constraint + 1);
6123
6124 free (p);
6125
6126 input = build_tree_list (build_tree_list (NULL_TREE, input),
6127 unshare_expr (TREE_VALUE (link)));
6128 ASM_INPUTS (expr) = chainon (ASM_INPUTS (expr), input);
6129 }
6130 }
6131
6132 link_next = NULL_TREE;
6133 for (link = ASM_INPUTS (expr); link; ++i, link = link_next)
6134 {
6135 link_next = TREE_CHAIN (link);
6136 constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
6137 parse_input_constraint (&constraint, 0, 0, noutputs, 0,
6138 oconstraints, &allows_mem, &allows_reg);
6139
6140 /* If we can't make copies, we can only accept memory. */
6141 if (TREE_ADDRESSABLE (TREE_TYPE (TREE_VALUE (link))))
6142 {
6143 if (allows_mem)
6144 allows_reg = 0;
6145 else
6146 {
6147 error ("impossible constraint in %<asm%>");
6148 error ("non-memory input %d must stay in memory", i);
6149 return GS_ERROR;
6150 }
6151 }
6152
6153 /* If the operand is a memory input, it should be an lvalue. */
6154 if (!allows_reg && allows_mem)
6155 {
6156 tree inputv = TREE_VALUE (link);
6157 STRIP_NOPS (inputv);
6158 if (TREE_CODE (inputv) == PREDECREMENT_EXPR
6159 || TREE_CODE (inputv) == PREINCREMENT_EXPR
6160 || TREE_CODE (inputv) == POSTDECREMENT_EXPR
6161 || TREE_CODE (inputv) == POSTINCREMENT_EXPR
6162 || TREE_CODE (inputv) == MODIFY_EXPR)
6163 TREE_VALUE (link) = error_mark_node;
6164 tret = gimplify_expr (&TREE_VALUE (link), pre_p, post_p,
6165 is_gimple_lvalue, fb_lvalue | fb_mayfail);
6166 if (tret != GS_ERROR)
6167 {
6168 /* Unlike output operands, memory inputs are not guaranteed
6169 to be lvalues by the FE, and while the expressions are
6170 marked addressable there, if it is e.g. a statement
6171 expression, temporaries in it might not end up being
6172 addressable. They might be already used in the IL and thus
6173 it is too late to make them addressable now though. */
6174 tree x = TREE_VALUE (link);
6175 while (handled_component_p (x))
6176 x = TREE_OPERAND (x, 0);
6177 if (TREE_CODE (x) == MEM_REF
6178 && TREE_CODE (TREE_OPERAND (x, 0)) == ADDR_EXPR)
6179 x = TREE_OPERAND (TREE_OPERAND (x, 0), 0);
6180 if ((VAR_P (x)
6181 || TREE_CODE (x) == PARM_DECL
6182 || TREE_CODE (x) == RESULT_DECL)
6183 && !TREE_ADDRESSABLE (x)
6184 && is_gimple_reg (x))
6185 {
6186 warning_at (EXPR_LOC_OR_LOC (TREE_VALUE (link),
6187 input_location), 0,
6188 "memory input %d is not directly addressable",
6189 i);
6190 prepare_gimple_addressable (&TREE_VALUE (link), pre_p);
6191 }
6192 }
6193 mark_addressable (TREE_VALUE (link));
6194 if (tret == GS_ERROR)
6195 {
6196 error_at (EXPR_LOC_OR_LOC (TREE_VALUE (link), input_location),
6197 "memory input %d is not directly addressable", i);
6198 ret = tret;
6199 }
6200 }
6201 else
6202 {
6203 tret = gimplify_expr (&TREE_VALUE (link), pre_p, post_p,
6204 is_gimple_asm_val, fb_rvalue);
6205 if (tret == GS_ERROR)
6206 ret = tret;
6207 }
6208
6209 TREE_CHAIN (link) = NULL_TREE;
6210 vec_safe_push (inputs, link);
6211 }
6212
6213 link_next = NULL_TREE;
6214 for (link = ASM_CLOBBERS (expr); link; ++i, link = link_next)
6215 {
6216 link_next = TREE_CHAIN (link);
6217 TREE_CHAIN (link) = NULL_TREE;
6218 vec_safe_push (clobbers, link);
6219 }
6220
6221 link_next = NULL_TREE;
6222 for (link = ASM_LABELS (expr); link; ++i, link = link_next)
6223 {
6224 link_next = TREE_CHAIN (link);
6225 TREE_CHAIN (link) = NULL_TREE;
6226 vec_safe_push (labels, link);
6227 }
6228
6229 /* Do not add ASMs with errors to the gimple IL stream. */
6230 if (ret != GS_ERROR)
6231 {
6232 stmt = gimple_build_asm_vec (TREE_STRING_POINTER (ASM_STRING (expr)),
6233 inputs, outputs, clobbers, labels);
6234
6235 gimple_asm_set_volatile (stmt, ASM_VOLATILE_P (expr) || noutputs == 0);
6236 gimple_asm_set_input (stmt, ASM_INPUT_P (expr));
6237
6238 gimplify_seq_add_stmt (pre_p, stmt);
6239 }
6240
6241 return ret;
6242 }
6243
6244 /* Gimplify a CLEANUP_POINT_EXPR. Currently this works by adding
6245 GIMPLE_WITH_CLEANUP_EXPRs to the prequeue as we encounter cleanups while
6246 gimplifying the body, and converting them to TRY_FINALLY_EXPRs when we
6247 return to this function.
6248
6249 FIXME should we complexify the prequeue handling instead? Or use flags
6250 for all the cleanups and let the optimizer tighten them up? The current
6251 code seems pretty fragile; it will break on a cleanup within any
6252 non-conditional nesting. But any such nesting would be broken, anyway;
6253 we can't write a TRY_FINALLY_EXPR that starts inside a nesting construct
6254 and continues out of it. We can do that at the RTL level, though, so
6255 having an optimizer to tighten up try/finally regions would be a Good
6256 Thing. */
6257
6258 static enum gimplify_status
6259 gimplify_cleanup_point_expr (tree *expr_p, gimple_seq *pre_p)
6260 {
6261 gimple_stmt_iterator iter;
6262 gimple_seq body_sequence = NULL;
6263
6264 tree temp = voidify_wrapper_expr (*expr_p, NULL);
6265
6266 /* We only care about the number of conditions between the innermost
6267 CLEANUP_POINT_EXPR and the cleanup. So save and reset the count and
6268 any cleanups collected outside the CLEANUP_POINT_EXPR. */
6269 int old_conds = gimplify_ctxp->conditions;
6270 gimple_seq old_cleanups = gimplify_ctxp->conditional_cleanups;
6271 bool old_in_cleanup_point_expr = gimplify_ctxp->in_cleanup_point_expr;
6272 gimplify_ctxp->conditions = 0;
6273 gimplify_ctxp->conditional_cleanups = NULL;
6274 gimplify_ctxp->in_cleanup_point_expr = true;
6275
6276 gimplify_stmt (&TREE_OPERAND (*expr_p, 0), &body_sequence);
6277
6278 gimplify_ctxp->conditions = old_conds;
6279 gimplify_ctxp->conditional_cleanups = old_cleanups;
6280 gimplify_ctxp->in_cleanup_point_expr = old_in_cleanup_point_expr;
6281
6282 for (iter = gsi_start (body_sequence); !gsi_end_p (iter); )
6283 {
6284 gimple *wce = gsi_stmt (iter);
6285
6286 if (gimple_code (wce) == GIMPLE_WITH_CLEANUP_EXPR)
6287 {
6288 if (gsi_one_before_end_p (iter))
6289 {
6290 /* Note that gsi_insert_seq_before and gsi_remove do not
6291 scan operands, unlike some other sequence mutators. */
6292 if (!gimple_wce_cleanup_eh_only (wce))
6293 gsi_insert_seq_before_without_update (&iter,
6294 gimple_wce_cleanup (wce),
6295 GSI_SAME_STMT);
6296 gsi_remove (&iter, true);
6297 break;
6298 }
6299 else
6300 {
6301 gtry *gtry;
6302 gimple_seq seq;
6303 enum gimple_try_flags kind;
6304
6305 if (gimple_wce_cleanup_eh_only (wce))
6306 kind = GIMPLE_TRY_CATCH;
6307 else
6308 kind = GIMPLE_TRY_FINALLY;
6309 seq = gsi_split_seq_after (iter);
6310
6311 gtry = gimple_build_try (seq, gimple_wce_cleanup (wce), kind);
6312 /* Do not use gsi_replace here, as it may scan operands.
6313 We want to do a simple structural modification only. */
6314 gsi_set_stmt (&iter, gtry);
6315 iter = gsi_start (gtry->eval);
6316 }
6317 }
6318 else
6319 gsi_next (&iter);
6320 }
6321
6322 gimplify_seq_add_seq (pre_p, body_sequence);
6323 if (temp)
6324 {
6325 *expr_p = temp;
6326 return GS_OK;
6327 }
6328 else
6329 {
6330 *expr_p = NULL;
6331 return GS_ALL_DONE;
6332 }
6333 }
6334
6335 /* Insert a cleanup marker for gimplify_cleanup_point_expr. CLEANUP
6336 is the cleanup action required. EH_ONLY is true if the cleanup should
6337 only be executed if an exception is thrown, not on normal exit.
6338 If FORCE_UNCOND is true perform the cleanup unconditionally; this is
6339 only valid for clobbers. */
6340
6341 static void
6342 gimple_push_cleanup (tree var, tree cleanup, bool eh_only, gimple_seq *pre_p,
6343 bool force_uncond = false)
6344 {
6345 gimple *wce;
6346 gimple_seq cleanup_stmts = NULL;
6347
6348 /* Errors can result in improperly nested cleanups. Which results in
6349 confusion when trying to resolve the GIMPLE_WITH_CLEANUP_EXPR. */
6350 if (seen_error ())
6351 return;
6352
6353 if (gimple_conditional_context ())
6354 {
6355 /* If we're in a conditional context, this is more complex. We only
6356 want to run the cleanup if we actually ran the initialization that
6357 necessitates it, but we want to run it after the end of the
6358 conditional context. So we wrap the try/finally around the
6359 condition and use a flag to determine whether or not to actually
6360 run the destructor. Thus
6361
6362 test ? f(A()) : 0
6363
6364 becomes (approximately)
6365
6366 flag = 0;
6367 try {
6368 if (test) { A::A(temp); flag = 1; val = f(temp); }
6369 else { val = 0; }
6370 } finally {
6371 if (flag) A::~A(temp);
6372 }
6373 val
6374 */
6375 if (force_uncond)
6376 {
6377 gimplify_stmt (&cleanup, &cleanup_stmts);
6378 wce = gimple_build_wce (cleanup_stmts);
6379 gimplify_seq_add_stmt (&gimplify_ctxp->conditional_cleanups, wce);
6380 }
6381 else
6382 {
6383 tree flag = create_tmp_var (boolean_type_node, "cleanup");
6384 gassign *ffalse = gimple_build_assign (flag, boolean_false_node);
6385 gassign *ftrue = gimple_build_assign (flag, boolean_true_node);
6386
6387 cleanup = build3 (COND_EXPR, void_type_node, flag, cleanup, NULL);
6388 gimplify_stmt (&cleanup, &cleanup_stmts);
6389 wce = gimple_build_wce (cleanup_stmts);
6390
6391 gimplify_seq_add_stmt (&gimplify_ctxp->conditional_cleanups, ffalse);
6392 gimplify_seq_add_stmt (&gimplify_ctxp->conditional_cleanups, wce);
6393 gimplify_seq_add_stmt (pre_p, ftrue);
6394
6395 /* Because of this manipulation, and the EH edges that jump
6396 threading cannot redirect, the temporary (VAR) will appear
6397 to be used uninitialized. Don't warn. */
6398 TREE_NO_WARNING (var) = 1;
6399 }
6400 }
6401 else
6402 {
6403 gimplify_stmt (&cleanup, &cleanup_stmts);
6404 wce = gimple_build_wce (cleanup_stmts);
6405 gimple_wce_set_cleanup_eh_only (wce, eh_only);
6406 gimplify_seq_add_stmt (pre_p, wce);
6407 }
6408 }
6409
6410 /* Gimplify a TARGET_EXPR which doesn't appear on the rhs of an INIT_EXPR. */
6411
6412 static enum gimplify_status
6413 gimplify_target_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
6414 {
6415 tree targ = *expr_p;
6416 tree temp = TARGET_EXPR_SLOT (targ);
6417 tree init = TARGET_EXPR_INITIAL (targ);
6418 enum gimplify_status ret;
6419
6420 bool unpoison_empty_seq = false;
6421 gimple_stmt_iterator unpoison_it;
6422
6423 if (init)
6424 {
6425 tree cleanup = NULL_TREE;
6426
6427 /* TARGET_EXPR temps aren't part of the enclosing block, so add it
6428 to the temps list. Handle also variable length TARGET_EXPRs. */
6429 if (TREE_CODE (DECL_SIZE (temp)) != INTEGER_CST)
6430 {
6431 if (!TYPE_SIZES_GIMPLIFIED (TREE_TYPE (temp)))
6432 gimplify_type_sizes (TREE_TYPE (temp), pre_p);
6433 gimplify_vla_decl (temp, pre_p);
6434 }
6435 else
6436 {
6437 /* Save location where we need to place unpoisoning. It's possible
6438 that a variable will be converted to needs_to_live_in_memory. */
6439 unpoison_it = gsi_last (*pre_p);
6440 unpoison_empty_seq = gsi_end_p (unpoison_it);
6441
6442 gimple_add_tmp_var (temp);
6443 }
6444
6445 /* If TARGET_EXPR_INITIAL is void, then the mere evaluation of the
6446 expression is supposed to initialize the slot. */
6447 if (VOID_TYPE_P (TREE_TYPE (init)))
6448 ret = gimplify_expr (&init, pre_p, post_p, is_gimple_stmt, fb_none);
6449 else
6450 {
6451 tree init_expr = build2 (INIT_EXPR, void_type_node, temp, init);
6452 init = init_expr;
6453 ret = gimplify_expr (&init, pre_p, post_p, is_gimple_stmt, fb_none);
6454 init = NULL;
6455 ggc_free (init_expr);
6456 }
6457 if (ret == GS_ERROR)
6458 {
6459 /* PR c++/28266 Make sure this is expanded only once. */
6460 TARGET_EXPR_INITIAL (targ) = NULL_TREE;
6461 return GS_ERROR;
6462 }
6463 if (init)
6464 gimplify_and_add (init, pre_p);
6465
6466 /* If needed, push the cleanup for the temp. */
6467 if (TARGET_EXPR_CLEANUP (targ))
6468 {
6469 if (CLEANUP_EH_ONLY (targ))
6470 gimple_push_cleanup (temp, TARGET_EXPR_CLEANUP (targ),
6471 CLEANUP_EH_ONLY (targ), pre_p);
6472 else
6473 cleanup = TARGET_EXPR_CLEANUP (targ);
6474 }
6475
6476 /* Add a clobber for the temporary going out of scope, like
6477 gimplify_bind_expr. */
6478 if (gimplify_ctxp->in_cleanup_point_expr
6479 && needs_to_live_in_memory (temp))
6480 {
6481 if (flag_stack_reuse == SR_ALL)
6482 {
6483 tree clobber = build_constructor (TREE_TYPE (temp),
6484 NULL);
6485 TREE_THIS_VOLATILE (clobber) = true;
6486 clobber = build2 (MODIFY_EXPR, TREE_TYPE (temp), temp, clobber);
6487 gimple_push_cleanup (temp, clobber, false, pre_p, true);
6488 }
6489 if (asan_poisoned_variables && dbg_cnt (asan_use_after_scope))
6490 {
6491 tree asan_cleanup = build_asan_poison_call_expr (temp);
6492 if (asan_cleanup)
6493 {
6494 if (unpoison_empty_seq)
6495 unpoison_it = gsi_start (*pre_p);
6496
6497 asan_poison_variable (temp, false, &unpoison_it,
6498 unpoison_empty_seq);
6499 gimple_push_cleanup (temp, asan_cleanup, false, pre_p);
6500 }
6501 }
6502 }
6503 if (cleanup)
6504 gimple_push_cleanup (temp, cleanup, false, pre_p);
6505
6506 /* Only expand this once. */
6507 TREE_OPERAND (targ, 3) = init;
6508 TARGET_EXPR_INITIAL (targ) = NULL_TREE;
6509 }
6510 else
6511 /* We should have expanded this before. */
6512 gcc_assert (DECL_SEEN_IN_BIND_EXPR_P (temp));
6513
6514 *expr_p = temp;
6515 return GS_OK;
6516 }
6517
6518 /* Gimplification of expression trees. */
6519
6520 /* Gimplify an expression which appears at statement context. The
6521 corresponding GIMPLE statements are added to *SEQ_P. If *SEQ_P is
6522 NULL, a new sequence is allocated.
6523
6524 Return true if we actually added a statement to the queue. */
6525
6526 bool
6527 gimplify_stmt (tree *stmt_p, gimple_seq *seq_p)
6528 {
6529 gimple_seq_node last;
6530
6531 last = gimple_seq_last (*seq_p);
6532 gimplify_expr (stmt_p, seq_p, NULL, is_gimple_stmt, fb_none);
6533 return last != gimple_seq_last (*seq_p);
6534 }
6535
6536 /* Add FIRSTPRIVATE entries for DECL in the OpenMP the surrounding parallels
6537 to CTX. If entries already exist, force them to be some flavor of private.
6538 If there is no enclosing parallel, do nothing. */
6539
6540 void
6541 omp_firstprivatize_variable (struct gimplify_omp_ctx *ctx, tree decl)
6542 {
6543 splay_tree_node n;
6544
6545 if (decl == NULL || !DECL_P (decl) || ctx->region_type == ORT_NONE)
6546 return;
6547
6548 do
6549 {
6550 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
6551 if (n != NULL)
6552 {
6553 if (n->value & GOVD_SHARED)
6554 n->value = GOVD_FIRSTPRIVATE | (n->value & GOVD_SEEN);
6555 else if (n->value & GOVD_MAP)
6556 n->value |= GOVD_MAP_TO_ONLY;
6557 else
6558 return;
6559 }
6560 else if ((ctx->region_type & ORT_TARGET) != 0)
6561 {
6562 if (ctx->target_map_scalars_firstprivate)
6563 omp_add_variable (ctx, decl, GOVD_FIRSTPRIVATE);
6564 else
6565 omp_add_variable (ctx, decl, GOVD_MAP | GOVD_MAP_TO_ONLY);
6566 }
6567 else if (ctx->region_type != ORT_WORKSHARE
6568 && ctx->region_type != ORT_SIMD
6569 && ctx->region_type != ORT_ACC
6570 && !(ctx->region_type & ORT_TARGET_DATA))
6571 omp_add_variable (ctx, decl, GOVD_FIRSTPRIVATE);
6572
6573 ctx = ctx->outer_context;
6574 }
6575 while (ctx);
6576 }
6577
6578 /* Similarly for each of the type sizes of TYPE. */
6579
6580 static void
6581 omp_firstprivatize_type_sizes (struct gimplify_omp_ctx *ctx, tree type)
6582 {
6583 if (type == NULL || type == error_mark_node)
6584 return;
6585 type = TYPE_MAIN_VARIANT (type);
6586
6587 if (ctx->privatized_types->add (type))
6588 return;
6589
6590 switch (TREE_CODE (type))
6591 {
6592 case INTEGER_TYPE:
6593 case ENUMERAL_TYPE:
6594 case BOOLEAN_TYPE:
6595 case REAL_TYPE:
6596 case FIXED_POINT_TYPE:
6597 omp_firstprivatize_variable (ctx, TYPE_MIN_VALUE (type));
6598 omp_firstprivatize_variable (ctx, TYPE_MAX_VALUE (type));
6599 break;
6600
6601 case ARRAY_TYPE:
6602 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (type));
6603 omp_firstprivatize_type_sizes (ctx, TYPE_DOMAIN (type));
6604 break;
6605
6606 case RECORD_TYPE:
6607 case UNION_TYPE:
6608 case QUAL_UNION_TYPE:
6609 {
6610 tree field;
6611 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
6612 if (TREE_CODE (field) == FIELD_DECL)
6613 {
6614 omp_firstprivatize_variable (ctx, DECL_FIELD_OFFSET (field));
6615 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (field));
6616 }
6617 }
6618 break;
6619
6620 case POINTER_TYPE:
6621 case REFERENCE_TYPE:
6622 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (type));
6623 break;
6624
6625 default:
6626 break;
6627 }
6628
6629 omp_firstprivatize_variable (ctx, TYPE_SIZE (type));
6630 omp_firstprivatize_variable (ctx, TYPE_SIZE_UNIT (type));
6631 lang_hooks.types.omp_firstprivatize_type_sizes (ctx, type);
6632 }
6633
6634 /* Add an entry for DECL in the OMP context CTX with FLAGS. */
6635
6636 static void
6637 omp_add_variable (struct gimplify_omp_ctx *ctx, tree decl, unsigned int flags)
6638 {
6639 splay_tree_node n;
6640 unsigned int nflags;
6641 tree t;
6642
6643 if (error_operand_p (decl) || ctx->region_type == ORT_NONE)
6644 return;
6645
6646 /* Never elide decls whose type has TREE_ADDRESSABLE set. This means
6647 there are constructors involved somewhere. Exception is a shared clause,
6648 there is nothing privatized in that case. */
6649 if ((flags & GOVD_SHARED) == 0
6650 && (TREE_ADDRESSABLE (TREE_TYPE (decl))
6651 || TYPE_NEEDS_CONSTRUCTING (TREE_TYPE (decl))))
6652 flags |= GOVD_SEEN;
6653
6654 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
6655 if (n != NULL && (n->value & GOVD_DATA_SHARE_CLASS) != 0)
6656 {
6657 /* We shouldn't be re-adding the decl with the same data
6658 sharing class. */
6659 gcc_assert ((n->value & GOVD_DATA_SHARE_CLASS & flags) == 0);
6660 nflags = n->value | flags;
6661 /* The only combination of data sharing classes we should see is
6662 FIRSTPRIVATE and LASTPRIVATE. However, OpenACC permits
6663 reduction variables to be used in data sharing clauses. */
6664 gcc_assert ((ctx->region_type & ORT_ACC) != 0
6665 || ((nflags & GOVD_DATA_SHARE_CLASS)
6666 == (GOVD_FIRSTPRIVATE | GOVD_LASTPRIVATE))
6667 || (flags & GOVD_DATA_SHARE_CLASS) == 0);
6668 n->value = nflags;
6669 return;
6670 }
6671
6672 /* When adding a variable-sized variable, we have to handle all sorts
6673 of additional bits of data: the pointer replacement variable, and
6674 the parameters of the type. */
6675 if (DECL_SIZE (decl) && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
6676 {
6677 /* Add the pointer replacement variable as PRIVATE if the variable
6678 replacement is private, else FIRSTPRIVATE since we'll need the
6679 address of the original variable either for SHARED, or for the
6680 copy into or out of the context. */
6681 if (!(flags & GOVD_LOCAL))
6682 {
6683 if (flags & GOVD_MAP)
6684 nflags = GOVD_MAP | GOVD_MAP_TO_ONLY | GOVD_EXPLICIT;
6685 else if (flags & GOVD_PRIVATE)
6686 nflags = GOVD_PRIVATE;
6687 else if ((ctx->region_type & (ORT_TARGET | ORT_TARGET_DATA)) != 0
6688 && (flags & GOVD_FIRSTPRIVATE))
6689 nflags = GOVD_PRIVATE | GOVD_EXPLICIT;
6690 else
6691 nflags = GOVD_FIRSTPRIVATE;
6692 nflags |= flags & GOVD_SEEN;
6693 t = DECL_VALUE_EXPR (decl);
6694 gcc_assert (TREE_CODE (t) == INDIRECT_REF);
6695 t = TREE_OPERAND (t, 0);
6696 gcc_assert (DECL_P (t));
6697 omp_add_variable (ctx, t, nflags);
6698 }
6699
6700 /* Add all of the variable and type parameters (which should have
6701 been gimplified to a formal temporary) as FIRSTPRIVATE. */
6702 omp_firstprivatize_variable (ctx, DECL_SIZE_UNIT (decl));
6703 omp_firstprivatize_variable (ctx, DECL_SIZE (decl));
6704 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (decl));
6705
6706 /* The variable-sized variable itself is never SHARED, only some form
6707 of PRIVATE. The sharing would take place via the pointer variable
6708 which we remapped above. */
6709 if (flags & GOVD_SHARED)
6710 flags = GOVD_SHARED | GOVD_DEBUG_PRIVATE
6711 | (flags & (GOVD_SEEN | GOVD_EXPLICIT));
6712
6713 /* We're going to make use of the TYPE_SIZE_UNIT at least in the
6714 alloca statement we generate for the variable, so make sure it
6715 is available. This isn't automatically needed for the SHARED
6716 case, since we won't be allocating local storage then.
6717 For local variables TYPE_SIZE_UNIT might not be gimplified yet,
6718 in this case omp_notice_variable will be called later
6719 on when it is gimplified. */
6720 else if (! (flags & (GOVD_LOCAL | GOVD_MAP))
6721 && DECL_P (TYPE_SIZE_UNIT (TREE_TYPE (decl))))
6722 omp_notice_variable (ctx, TYPE_SIZE_UNIT (TREE_TYPE (decl)), true);
6723 }
6724 else if ((flags & (GOVD_MAP | GOVD_LOCAL)) == 0
6725 && lang_hooks.decls.omp_privatize_by_reference (decl))
6726 {
6727 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (decl));
6728
6729 /* Similar to the direct variable sized case above, we'll need the
6730 size of references being privatized. */
6731 if ((flags & GOVD_SHARED) == 0)
6732 {
6733 t = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl)));
6734 if (DECL_P (t))
6735 omp_notice_variable (ctx, t, true);
6736 }
6737 }
6738
6739 if (n != NULL)
6740 n->value |= flags;
6741 else
6742 splay_tree_insert (ctx->variables, (splay_tree_key)decl, flags);
6743
6744 /* For reductions clauses in OpenACC loop directives, by default create a
6745 copy clause on the enclosing parallel construct for carrying back the
6746 results. */
6747 if (ctx->region_type == ORT_ACC && (flags & GOVD_REDUCTION))
6748 {
6749 struct gimplify_omp_ctx *outer_ctx = ctx->outer_context;
6750 while (outer_ctx)
6751 {
6752 n = splay_tree_lookup (outer_ctx->variables, (splay_tree_key)decl);
6753 if (n != NULL)
6754 {
6755 /* Ignore local variables and explicitly declared clauses. */
6756 if (n->value & (GOVD_LOCAL | GOVD_EXPLICIT))
6757 break;
6758 else if (outer_ctx->region_type == ORT_ACC_KERNELS)
6759 {
6760 /* According to the OpenACC spec, such a reduction variable
6761 should already have a copy map on a kernels construct,
6762 verify that here. */
6763 gcc_assert (!(n->value & GOVD_FIRSTPRIVATE)
6764 && (n->value & GOVD_MAP));
6765 }
6766 else if (outer_ctx->region_type == ORT_ACC_PARALLEL)
6767 {
6768 /* Remove firstprivate and make it a copy map. */
6769 n->value &= ~GOVD_FIRSTPRIVATE;
6770 n->value |= GOVD_MAP;
6771 }
6772 }
6773 else if (outer_ctx->region_type == ORT_ACC_PARALLEL)
6774 {
6775 splay_tree_insert (outer_ctx->variables, (splay_tree_key)decl,
6776 GOVD_MAP | GOVD_SEEN);
6777 break;
6778 }
6779 outer_ctx = outer_ctx->outer_context;
6780 }
6781 }
6782 }
6783
6784 /* Notice a threadprivate variable DECL used in OMP context CTX.
6785 This just prints out diagnostics about threadprivate variable uses
6786 in untied tasks. If DECL2 is non-NULL, prevent this warning
6787 on that variable. */
6788
6789 static bool
6790 omp_notice_threadprivate_variable (struct gimplify_omp_ctx *ctx, tree decl,
6791 tree decl2)
6792 {
6793 splay_tree_node n;
6794 struct gimplify_omp_ctx *octx;
6795
6796 for (octx = ctx; octx; octx = octx->outer_context)
6797 if ((octx->region_type & ORT_TARGET) != 0)
6798 {
6799 n = splay_tree_lookup (octx->variables, (splay_tree_key)decl);
6800 if (n == NULL)
6801 {
6802 error ("threadprivate variable %qE used in target region",
6803 DECL_NAME (decl));
6804 error_at (octx->location, "enclosing target region");
6805 splay_tree_insert (octx->variables, (splay_tree_key)decl, 0);
6806 }
6807 if (decl2)
6808 splay_tree_insert (octx->variables, (splay_tree_key)decl2, 0);
6809 }
6810
6811 if (ctx->region_type != ORT_UNTIED_TASK)
6812 return false;
6813 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
6814 if (n == NULL)
6815 {
6816 error ("threadprivate variable %qE used in untied task",
6817 DECL_NAME (decl));
6818 error_at (ctx->location, "enclosing task");
6819 splay_tree_insert (ctx->variables, (splay_tree_key)decl, 0);
6820 }
6821 if (decl2)
6822 splay_tree_insert (ctx->variables, (splay_tree_key)decl2, 0);
6823 return false;
6824 }
6825
6826 /* Return true if global var DECL is device resident. */
6827
6828 static bool
6829 device_resident_p (tree decl)
6830 {
6831 tree attr = lookup_attribute ("oacc declare target", DECL_ATTRIBUTES (decl));
6832
6833 if (!attr)
6834 return false;
6835
6836 for (tree t = TREE_VALUE (attr); t; t = TREE_PURPOSE (t))
6837 {
6838 tree c = TREE_VALUE (t);
6839 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_DEVICE_RESIDENT)
6840 return true;
6841 }
6842
6843 return false;
6844 }
6845
6846 /* Return true if DECL has an ACC DECLARE attribute. */
6847
6848 static bool
6849 is_oacc_declared (tree decl)
6850 {
6851 tree t = TREE_CODE (decl) == MEM_REF ? TREE_OPERAND (decl, 0) : decl;
6852 tree declared = lookup_attribute ("oacc declare target", DECL_ATTRIBUTES (t));
6853 return declared != NULL_TREE;
6854 }
6855
6856 /* Determine outer default flags for DECL mentioned in an OMP region
6857 but not declared in an enclosing clause.
6858
6859 ??? Some compiler-generated variables (like SAVE_EXPRs) could be
6860 remapped firstprivate instead of shared. To some extent this is
6861 addressed in omp_firstprivatize_type_sizes, but not
6862 effectively. */
6863
6864 static unsigned
6865 omp_default_clause (struct gimplify_omp_ctx *ctx, tree decl,
6866 bool in_code, unsigned flags)
6867 {
6868 enum omp_clause_default_kind default_kind = ctx->default_kind;
6869 enum omp_clause_default_kind kind;
6870
6871 kind = lang_hooks.decls.omp_predetermined_sharing (decl);
6872 if (kind != OMP_CLAUSE_DEFAULT_UNSPECIFIED)
6873 default_kind = kind;
6874
6875 switch (default_kind)
6876 {
6877 case OMP_CLAUSE_DEFAULT_NONE:
6878 {
6879 const char *rtype;
6880
6881 if (ctx->region_type & ORT_PARALLEL)
6882 rtype = "parallel";
6883 else if (ctx->region_type & ORT_TASK)
6884 rtype = "task";
6885 else if (ctx->region_type & ORT_TEAMS)
6886 rtype = "teams";
6887 else
6888 gcc_unreachable ();
6889
6890 error ("%qE not specified in enclosing %qs",
6891 DECL_NAME (lang_hooks.decls.omp_report_decl (decl)), rtype);
6892 error_at (ctx->location, "enclosing %qs", rtype);
6893 }
6894 /* FALLTHRU */
6895 case OMP_CLAUSE_DEFAULT_SHARED:
6896 flags |= GOVD_SHARED;
6897 break;
6898 case OMP_CLAUSE_DEFAULT_PRIVATE:
6899 flags |= GOVD_PRIVATE;
6900 break;
6901 case OMP_CLAUSE_DEFAULT_FIRSTPRIVATE:
6902 flags |= GOVD_FIRSTPRIVATE;
6903 break;
6904 case OMP_CLAUSE_DEFAULT_UNSPECIFIED:
6905 /* decl will be either GOVD_FIRSTPRIVATE or GOVD_SHARED. */
6906 gcc_assert ((ctx->region_type & ORT_TASK) != 0);
6907 if (struct gimplify_omp_ctx *octx = ctx->outer_context)
6908 {
6909 omp_notice_variable (octx, decl, in_code);
6910 for (; octx; octx = octx->outer_context)
6911 {
6912 splay_tree_node n2;
6913
6914 n2 = splay_tree_lookup (octx->variables, (splay_tree_key) decl);
6915 if ((octx->region_type & (ORT_TARGET_DATA | ORT_TARGET)) != 0
6916 && (n2 == NULL || (n2->value & GOVD_DATA_SHARE_CLASS) == 0))
6917 continue;
6918 if (n2 && (n2->value & GOVD_DATA_SHARE_CLASS) != GOVD_SHARED)
6919 {
6920 flags |= GOVD_FIRSTPRIVATE;
6921 goto found_outer;
6922 }
6923 if ((octx->region_type & (ORT_PARALLEL | ORT_TEAMS)) != 0)
6924 {
6925 flags |= GOVD_SHARED;
6926 goto found_outer;
6927 }
6928 }
6929 }
6930
6931 if (TREE_CODE (decl) == PARM_DECL
6932 || (!is_global_var (decl)
6933 && DECL_CONTEXT (decl) == current_function_decl))
6934 flags |= GOVD_FIRSTPRIVATE;
6935 else
6936 flags |= GOVD_SHARED;
6937 found_outer:
6938 break;
6939
6940 default:
6941 gcc_unreachable ();
6942 }
6943
6944 return flags;
6945 }
6946
6947
6948 /* Determine outer default flags for DECL mentioned in an OACC region
6949 but not declared in an enclosing clause. */
6950
6951 static unsigned
6952 oacc_default_clause (struct gimplify_omp_ctx *ctx, tree decl, unsigned flags)
6953 {
6954 const char *rkind;
6955 bool on_device = false;
6956 bool declared = is_oacc_declared (decl);
6957 tree type = TREE_TYPE (decl);
6958
6959 if (lang_hooks.decls.omp_privatize_by_reference (decl))
6960 type = TREE_TYPE (type);
6961
6962 if ((ctx->region_type & (ORT_ACC_PARALLEL | ORT_ACC_KERNELS)) != 0
6963 && is_global_var (decl)
6964 && device_resident_p (decl))
6965 {
6966 on_device = true;
6967 flags |= GOVD_MAP_TO_ONLY;
6968 }
6969
6970 switch (ctx->region_type)
6971 {
6972 case ORT_ACC_KERNELS:
6973 rkind = "kernels";
6974
6975 if (AGGREGATE_TYPE_P (type))
6976 {
6977 /* Aggregates default to 'present_or_copy', or 'present'. */
6978 if (ctx->default_kind != OMP_CLAUSE_DEFAULT_PRESENT)
6979 flags |= GOVD_MAP;
6980 else
6981 flags |= GOVD_MAP | GOVD_MAP_FORCE_PRESENT;
6982 }
6983 else
6984 /* Scalars default to 'copy'. */
6985 flags |= GOVD_MAP | GOVD_MAP_FORCE;
6986
6987 break;
6988
6989 case ORT_ACC_PARALLEL:
6990 rkind = "parallel";
6991
6992 if (on_device || declared)
6993 flags |= GOVD_MAP;
6994 else if (AGGREGATE_TYPE_P (type))
6995 {
6996 /* Aggregates default to 'present_or_copy', or 'present'. */
6997 if (ctx->default_kind != OMP_CLAUSE_DEFAULT_PRESENT)
6998 flags |= GOVD_MAP;
6999 else
7000 flags |= GOVD_MAP | GOVD_MAP_FORCE_PRESENT;
7001 }
7002 else
7003 /* Scalars default to 'firstprivate'. */
7004 flags |= GOVD_FIRSTPRIVATE;
7005
7006 break;
7007
7008 default:
7009 gcc_unreachable ();
7010 }
7011
7012 if (DECL_ARTIFICIAL (decl))
7013 ; /* We can get compiler-generated decls, and should not complain
7014 about them. */
7015 else if (ctx->default_kind == OMP_CLAUSE_DEFAULT_NONE)
7016 {
7017 error ("%qE not specified in enclosing OpenACC %qs construct",
7018 DECL_NAME (lang_hooks.decls.omp_report_decl (decl)), rkind);
7019 inform (ctx->location, "enclosing OpenACC %qs construct", rkind);
7020 }
7021 else if (ctx->default_kind == OMP_CLAUSE_DEFAULT_PRESENT)
7022 ; /* Handled above. */
7023 else
7024 gcc_checking_assert (ctx->default_kind == OMP_CLAUSE_DEFAULT_SHARED);
7025
7026 return flags;
7027 }
7028
7029 /* Record the fact that DECL was used within the OMP context CTX.
7030 IN_CODE is true when real code uses DECL, and false when we should
7031 merely emit default(none) errors. Return true if DECL is going to
7032 be remapped and thus DECL shouldn't be gimplified into its
7033 DECL_VALUE_EXPR (if any). */
7034
7035 static bool
7036 omp_notice_variable (struct gimplify_omp_ctx *ctx, tree decl, bool in_code)
7037 {
7038 splay_tree_node n;
7039 unsigned flags = in_code ? GOVD_SEEN : 0;
7040 bool ret = false, shared;
7041
7042 if (error_operand_p (decl))
7043 return false;
7044
7045 if (ctx->region_type == ORT_NONE)
7046 return lang_hooks.decls.omp_disregard_value_expr (decl, false);
7047
7048 if (is_global_var (decl))
7049 {
7050 /* Threadprivate variables are predetermined. */
7051 if (DECL_THREAD_LOCAL_P (decl))
7052 return omp_notice_threadprivate_variable (ctx, decl, NULL_TREE);
7053
7054 if (DECL_HAS_VALUE_EXPR_P (decl))
7055 {
7056 tree value = get_base_address (DECL_VALUE_EXPR (decl));
7057
7058 if (value && DECL_P (value) && DECL_THREAD_LOCAL_P (value))
7059 return omp_notice_threadprivate_variable (ctx, decl, value);
7060 }
7061
7062 if (gimplify_omp_ctxp->outer_context == NULL
7063 && VAR_P (decl)
7064 && oacc_get_fn_attrib (current_function_decl))
7065 {
7066 location_t loc = DECL_SOURCE_LOCATION (decl);
7067
7068 if (lookup_attribute ("omp declare target link",
7069 DECL_ATTRIBUTES (decl)))
7070 {
7071 error_at (loc,
7072 "%qE with %<link%> clause used in %<routine%> function",
7073 DECL_NAME (decl));
7074 return false;
7075 }
7076 else if (!lookup_attribute ("omp declare target",
7077 DECL_ATTRIBUTES (decl)))
7078 {
7079 error_at (loc,
7080 "%qE requires a %<declare%> directive for use "
7081 "in a %<routine%> function", DECL_NAME (decl));
7082 return false;
7083 }
7084 }
7085 }
7086
7087 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
7088 if ((ctx->region_type & ORT_TARGET) != 0)
7089 {
7090 ret = lang_hooks.decls.omp_disregard_value_expr (decl, true);
7091 if (n == NULL)
7092 {
7093 unsigned nflags = flags;
7094 if (ctx->target_map_pointers_as_0len_arrays
7095 || ctx->target_map_scalars_firstprivate)
7096 {
7097 bool is_declare_target = false;
7098 bool is_scalar = false;
7099 if (is_global_var (decl)
7100 && varpool_node::get_create (decl)->offloadable)
7101 {
7102 struct gimplify_omp_ctx *octx;
7103 for (octx = ctx->outer_context;
7104 octx; octx = octx->outer_context)
7105 {
7106 n = splay_tree_lookup (octx->variables,
7107 (splay_tree_key)decl);
7108 if (n
7109 && (n->value & GOVD_DATA_SHARE_CLASS) != GOVD_SHARED
7110 && (n->value & GOVD_DATA_SHARE_CLASS) != 0)
7111 break;
7112 }
7113 is_declare_target = octx == NULL;
7114 }
7115 if (!is_declare_target && ctx->target_map_scalars_firstprivate)
7116 is_scalar = lang_hooks.decls.omp_scalar_p (decl);
7117 if (is_declare_target)
7118 ;
7119 else if (ctx->target_map_pointers_as_0len_arrays
7120 && (TREE_CODE (TREE_TYPE (decl)) == POINTER_TYPE
7121 || (TREE_CODE (TREE_TYPE (decl)) == REFERENCE_TYPE
7122 && TREE_CODE (TREE_TYPE (TREE_TYPE (decl)))
7123 == POINTER_TYPE)))
7124 nflags |= GOVD_MAP | GOVD_MAP_0LEN_ARRAY;
7125 else if (is_scalar)
7126 nflags |= GOVD_FIRSTPRIVATE;
7127 }
7128
7129 struct gimplify_omp_ctx *octx = ctx->outer_context;
7130 if ((ctx->region_type & ORT_ACC) && octx)
7131 {
7132 /* Look in outer OpenACC contexts, to see if there's a
7133 data attribute for this variable. */
7134 omp_notice_variable (octx, decl, in_code);
7135
7136 for (; octx; octx = octx->outer_context)
7137 {
7138 if (!(octx->region_type & (ORT_TARGET_DATA | ORT_TARGET)))
7139 break;
7140 splay_tree_node n2
7141 = splay_tree_lookup (octx->variables,
7142 (splay_tree_key) decl);
7143 if (n2)
7144 {
7145 if (octx->region_type == ORT_ACC_HOST_DATA)
7146 error ("variable %qE declared in enclosing "
7147 "%<host_data%> region", DECL_NAME (decl));
7148 nflags |= GOVD_MAP;
7149 if (octx->region_type == ORT_ACC_DATA
7150 && (n2->value & GOVD_MAP_0LEN_ARRAY))
7151 nflags |= GOVD_MAP_0LEN_ARRAY;
7152 goto found_outer;
7153 }
7154 }
7155 }
7156
7157 {
7158 tree type = TREE_TYPE (decl);
7159
7160 if (nflags == flags
7161 && gimplify_omp_ctxp->target_firstprivatize_array_bases
7162 && lang_hooks.decls.omp_privatize_by_reference (decl))
7163 type = TREE_TYPE (type);
7164 if (nflags == flags
7165 && !lang_hooks.types.omp_mappable_type (type))
7166 {
7167 error ("%qD referenced in target region does not have "
7168 "a mappable type", decl);
7169 nflags |= GOVD_MAP | GOVD_EXPLICIT;
7170 }
7171 else if (nflags == flags)
7172 {
7173 if ((ctx->region_type & ORT_ACC) != 0)
7174 nflags = oacc_default_clause (ctx, decl, flags);
7175 else
7176 nflags |= GOVD_MAP;
7177 }
7178 }
7179 found_outer:
7180 omp_add_variable (ctx, decl, nflags);
7181 }
7182 else
7183 {
7184 /* If nothing changed, there's nothing left to do. */
7185 if ((n->value & flags) == flags)
7186 return ret;
7187 flags |= n->value;
7188 n->value = flags;
7189 }
7190 goto do_outer;
7191 }
7192
7193 if (n == NULL)
7194 {
7195 if (ctx->region_type == ORT_WORKSHARE
7196 || ctx->region_type == ORT_SIMD
7197 || ctx->region_type == ORT_ACC
7198 || (ctx->region_type & ORT_TARGET_DATA) != 0)
7199 goto do_outer;
7200
7201 flags = omp_default_clause (ctx, decl, in_code, flags);
7202
7203 if ((flags & GOVD_PRIVATE)
7204 && lang_hooks.decls.omp_private_outer_ref (decl))
7205 flags |= GOVD_PRIVATE_OUTER_REF;
7206
7207 omp_add_variable (ctx, decl, flags);
7208
7209 shared = (flags & GOVD_SHARED) != 0;
7210 ret = lang_hooks.decls.omp_disregard_value_expr (decl, shared);
7211 goto do_outer;
7212 }
7213
7214 if ((n->value & (GOVD_SEEN | GOVD_LOCAL)) == 0
7215 && (flags & (GOVD_SEEN | GOVD_LOCAL)) == GOVD_SEEN
7216 && DECL_SIZE (decl))
7217 {
7218 if (TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
7219 {
7220 splay_tree_node n2;
7221 tree t = DECL_VALUE_EXPR (decl);
7222 gcc_assert (TREE_CODE (t) == INDIRECT_REF);
7223 t = TREE_OPERAND (t, 0);
7224 gcc_assert (DECL_P (t));
7225 n2 = splay_tree_lookup (ctx->variables, (splay_tree_key) t);
7226 n2->value |= GOVD_SEEN;
7227 }
7228 else if (lang_hooks.decls.omp_privatize_by_reference (decl)
7229 && TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl)))
7230 && (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl))))
7231 != INTEGER_CST))
7232 {
7233 splay_tree_node n2;
7234 tree t = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl)));
7235 gcc_assert (DECL_P (t));
7236 n2 = splay_tree_lookup (ctx->variables, (splay_tree_key) t);
7237 if (n2)
7238 omp_notice_variable (ctx, t, true);
7239 }
7240 }
7241
7242 shared = ((flags | n->value) & GOVD_SHARED) != 0;
7243 ret = lang_hooks.decls.omp_disregard_value_expr (decl, shared);
7244
7245 /* If nothing changed, there's nothing left to do. */
7246 if ((n->value & flags) == flags)
7247 return ret;
7248 flags |= n->value;
7249 n->value = flags;
7250
7251 do_outer:
7252 /* If the variable is private in the current context, then we don't
7253 need to propagate anything to an outer context. */
7254 if ((flags & GOVD_PRIVATE) && !(flags & GOVD_PRIVATE_OUTER_REF))
7255 return ret;
7256 if ((flags & (GOVD_LINEAR | GOVD_LINEAR_LASTPRIVATE_NO_OUTER))
7257 == (GOVD_LINEAR | GOVD_LINEAR_LASTPRIVATE_NO_OUTER))
7258 return ret;
7259 if ((flags & (GOVD_FIRSTPRIVATE | GOVD_LASTPRIVATE
7260 | GOVD_LINEAR_LASTPRIVATE_NO_OUTER))
7261 == (GOVD_LASTPRIVATE | GOVD_LINEAR_LASTPRIVATE_NO_OUTER))
7262 return ret;
7263 if (ctx->outer_context
7264 && omp_notice_variable (ctx->outer_context, decl, in_code))
7265 return true;
7266 return ret;
7267 }
7268
7269 /* Verify that DECL is private within CTX. If there's specific information
7270 to the contrary in the innermost scope, generate an error. */
7271
7272 static bool
7273 omp_is_private (struct gimplify_omp_ctx *ctx, tree decl, int simd)
7274 {
7275 splay_tree_node n;
7276
7277 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
7278 if (n != NULL)
7279 {
7280 if (n->value & GOVD_SHARED)
7281 {
7282 if (ctx == gimplify_omp_ctxp)
7283 {
7284 if (simd)
7285 error ("iteration variable %qE is predetermined linear",
7286 DECL_NAME (decl));
7287 else
7288 error ("iteration variable %qE should be private",
7289 DECL_NAME (decl));
7290 n->value = GOVD_PRIVATE;
7291 return true;
7292 }
7293 else
7294 return false;
7295 }
7296 else if ((n->value & GOVD_EXPLICIT) != 0
7297 && (ctx == gimplify_omp_ctxp
7298 || (ctx->region_type == ORT_COMBINED_PARALLEL
7299 && gimplify_omp_ctxp->outer_context == ctx)))
7300 {
7301 if ((n->value & GOVD_FIRSTPRIVATE) != 0)
7302 error ("iteration variable %qE should not be firstprivate",
7303 DECL_NAME (decl));
7304 else if ((n->value & GOVD_REDUCTION) != 0)
7305 error ("iteration variable %qE should not be reduction",
7306 DECL_NAME (decl));
7307 else if (simd == 0 && (n->value & GOVD_LINEAR) != 0)
7308 error ("iteration variable %qE should not be linear",
7309 DECL_NAME (decl));
7310 else if (simd == 1 && (n->value & GOVD_LASTPRIVATE) != 0)
7311 error ("iteration variable %qE should not be lastprivate",
7312 DECL_NAME (decl));
7313 else if (simd && (n->value & GOVD_PRIVATE) != 0)
7314 error ("iteration variable %qE should not be private",
7315 DECL_NAME (decl));
7316 else if (simd == 2 && (n->value & GOVD_LINEAR) != 0)
7317 error ("iteration variable %qE is predetermined linear",
7318 DECL_NAME (decl));
7319 }
7320 return (ctx == gimplify_omp_ctxp
7321 || (ctx->region_type == ORT_COMBINED_PARALLEL
7322 && gimplify_omp_ctxp->outer_context == ctx));
7323 }
7324
7325 if (ctx->region_type != ORT_WORKSHARE
7326 && ctx->region_type != ORT_SIMD
7327 && ctx->region_type != ORT_ACC)
7328 return false;
7329 else if (ctx->outer_context)
7330 return omp_is_private (ctx->outer_context, decl, simd);
7331 return false;
7332 }
7333
7334 /* Return true if DECL is private within a parallel region
7335 that binds to the current construct's context or in parallel
7336 region's REDUCTION clause. */
7337
7338 static bool
7339 omp_check_private (struct gimplify_omp_ctx *ctx, tree decl, bool copyprivate)
7340 {
7341 splay_tree_node n;
7342
7343 do
7344 {
7345 ctx = ctx->outer_context;
7346 if (ctx == NULL)
7347 {
7348 if (is_global_var (decl))
7349 return false;
7350
7351 /* References might be private, but might be shared too,
7352 when checking for copyprivate, assume they might be
7353 private, otherwise assume they might be shared. */
7354 if (copyprivate)
7355 return true;
7356
7357 if (lang_hooks.decls.omp_privatize_by_reference (decl))
7358 return false;
7359
7360 /* Treat C++ privatized non-static data members outside
7361 of the privatization the same. */
7362 if (omp_member_access_dummy_var (decl))
7363 return false;
7364
7365 return true;
7366 }
7367
7368 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
7369
7370 if ((ctx->region_type & (ORT_TARGET | ORT_TARGET_DATA)) != 0
7371 && (n == NULL || (n->value & GOVD_DATA_SHARE_CLASS) == 0))
7372 continue;
7373
7374 if (n != NULL)
7375 {
7376 if ((n->value & GOVD_LOCAL) != 0
7377 && omp_member_access_dummy_var (decl))
7378 return false;
7379 return (n->value & GOVD_SHARED) == 0;
7380 }
7381 }
7382 while (ctx->region_type == ORT_WORKSHARE
7383 || ctx->region_type == ORT_SIMD
7384 || ctx->region_type == ORT_ACC);
7385 return false;
7386 }
7387
7388 /* Callback for walk_tree to find a DECL_EXPR for the given DECL. */
7389
7390 static tree
7391 find_decl_expr (tree *tp, int *walk_subtrees, void *data)
7392 {
7393 tree t = *tp;
7394
7395 /* If this node has been visited, unmark it and keep looking. */
7396 if (TREE_CODE (t) == DECL_EXPR && DECL_EXPR_DECL (t) == (tree) data)
7397 return t;
7398
7399 if (IS_TYPE_OR_DECL_P (t))
7400 *walk_subtrees = 0;
7401 return NULL_TREE;
7402 }
7403
7404 /* Scan the OMP clauses in *LIST_P, installing mappings into a new
7405 and previous omp contexts. */
7406
7407 static void
7408 gimplify_scan_omp_clauses (tree *list_p, gimple_seq *pre_p,
7409 enum omp_region_type region_type,
7410 enum tree_code code)
7411 {
7412 struct gimplify_omp_ctx *ctx, *outer_ctx;
7413 tree c;
7414 hash_map<tree, tree> *struct_map_to_clause = NULL;
7415 tree *prev_list_p = NULL;
7416
7417 ctx = new_omp_context (region_type);
7418 outer_ctx = ctx->outer_context;
7419 if (code == OMP_TARGET)
7420 {
7421 if (!lang_GNU_Fortran ())
7422 ctx->target_map_pointers_as_0len_arrays = true;
7423 ctx->target_map_scalars_firstprivate = true;
7424 }
7425 if (!lang_GNU_Fortran ())
7426 switch (code)
7427 {
7428 case OMP_TARGET:
7429 case OMP_TARGET_DATA:
7430 case OMP_TARGET_ENTER_DATA:
7431 case OMP_TARGET_EXIT_DATA:
7432 case OACC_DECLARE:
7433 case OACC_HOST_DATA:
7434 ctx->target_firstprivatize_array_bases = true;
7435 default:
7436 break;
7437 }
7438
7439 while ((c = *list_p) != NULL)
7440 {
7441 bool remove = false;
7442 bool notice_outer = true;
7443 const char *check_non_private = NULL;
7444 unsigned int flags;
7445 tree decl;
7446
7447 switch (OMP_CLAUSE_CODE (c))
7448 {
7449 case OMP_CLAUSE_PRIVATE:
7450 flags = GOVD_PRIVATE | GOVD_EXPLICIT;
7451 if (lang_hooks.decls.omp_private_outer_ref (OMP_CLAUSE_DECL (c)))
7452 {
7453 flags |= GOVD_PRIVATE_OUTER_REF;
7454 OMP_CLAUSE_PRIVATE_OUTER_REF (c) = 1;
7455 }
7456 else
7457 notice_outer = false;
7458 goto do_add;
7459 case OMP_CLAUSE_SHARED:
7460 flags = GOVD_SHARED | GOVD_EXPLICIT;
7461 goto do_add;
7462 case OMP_CLAUSE_FIRSTPRIVATE:
7463 flags = GOVD_FIRSTPRIVATE | GOVD_EXPLICIT;
7464 check_non_private = "firstprivate";
7465 goto do_add;
7466 case OMP_CLAUSE_LASTPRIVATE:
7467 flags = GOVD_LASTPRIVATE | GOVD_SEEN | GOVD_EXPLICIT;
7468 check_non_private = "lastprivate";
7469 decl = OMP_CLAUSE_DECL (c);
7470 if (error_operand_p (decl))
7471 goto do_add;
7472 else if (outer_ctx
7473 && (outer_ctx->region_type == ORT_COMBINED_PARALLEL
7474 || outer_ctx->region_type == ORT_COMBINED_TEAMS)
7475 && splay_tree_lookup (outer_ctx->variables,
7476 (splay_tree_key) decl) == NULL)
7477 {
7478 omp_add_variable (outer_ctx, decl, GOVD_SHARED | GOVD_SEEN);
7479 if (outer_ctx->outer_context)
7480 omp_notice_variable (outer_ctx->outer_context, decl, true);
7481 }
7482 else if (outer_ctx
7483 && (outer_ctx->region_type & ORT_TASK) != 0
7484 && outer_ctx->combined_loop
7485 && splay_tree_lookup (outer_ctx->variables,
7486 (splay_tree_key) decl) == NULL)
7487 {
7488 omp_add_variable (outer_ctx, decl, GOVD_LASTPRIVATE | GOVD_SEEN);
7489 if (outer_ctx->outer_context)
7490 omp_notice_variable (outer_ctx->outer_context, decl, true);
7491 }
7492 else if (outer_ctx
7493 && (outer_ctx->region_type == ORT_WORKSHARE
7494 || outer_ctx->region_type == ORT_ACC)
7495 && outer_ctx->combined_loop
7496 && splay_tree_lookup (outer_ctx->variables,
7497 (splay_tree_key) decl) == NULL
7498 && !omp_check_private (outer_ctx, decl, false))
7499 {
7500 omp_add_variable (outer_ctx, decl, GOVD_LASTPRIVATE | GOVD_SEEN);
7501 if (outer_ctx->outer_context
7502 && (outer_ctx->outer_context->region_type
7503 == ORT_COMBINED_PARALLEL)
7504 && splay_tree_lookup (outer_ctx->outer_context->variables,
7505 (splay_tree_key) decl) == NULL)
7506 {
7507 struct gimplify_omp_ctx *octx = outer_ctx->outer_context;
7508 omp_add_variable (octx, decl, GOVD_SHARED | GOVD_SEEN);
7509 if (octx->outer_context)
7510 {
7511 octx = octx->outer_context;
7512 if (octx->region_type == ORT_WORKSHARE
7513 && octx->combined_loop
7514 && splay_tree_lookup (octx->variables,
7515 (splay_tree_key) decl) == NULL
7516 && !omp_check_private (octx, decl, false))
7517 {
7518 omp_add_variable (octx, decl,
7519 GOVD_LASTPRIVATE | GOVD_SEEN);
7520 octx = octx->outer_context;
7521 if (octx
7522 && octx->region_type == ORT_COMBINED_TEAMS
7523 && (splay_tree_lookup (octx->variables,
7524 (splay_tree_key) decl)
7525 == NULL))
7526 {
7527 omp_add_variable (octx, decl,
7528 GOVD_SHARED | GOVD_SEEN);
7529 octx = octx->outer_context;
7530 }
7531 }
7532 if (octx)
7533 omp_notice_variable (octx, decl, true);
7534 }
7535 }
7536 else if (outer_ctx->outer_context)
7537 omp_notice_variable (outer_ctx->outer_context, decl, true);
7538 }
7539 goto do_add;
7540 case OMP_CLAUSE_REDUCTION:
7541 flags = GOVD_REDUCTION | GOVD_SEEN | GOVD_EXPLICIT;
7542 /* OpenACC permits reductions on private variables. */
7543 if (!(region_type & ORT_ACC))
7544 check_non_private = "reduction";
7545 decl = OMP_CLAUSE_DECL (c);
7546 if (TREE_CODE (decl) == MEM_REF)
7547 {
7548 tree type = TREE_TYPE (decl);
7549 if (gimplify_expr (&TYPE_MAX_VALUE (TYPE_DOMAIN (type)), pre_p,
7550 NULL, is_gimple_val, fb_rvalue, false)
7551 == GS_ERROR)
7552 {
7553 remove = true;
7554 break;
7555 }
7556 tree v = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
7557 if (DECL_P (v))
7558 {
7559 omp_firstprivatize_variable (ctx, v);
7560 omp_notice_variable (ctx, v, true);
7561 }
7562 decl = TREE_OPERAND (decl, 0);
7563 if (TREE_CODE (decl) == POINTER_PLUS_EXPR)
7564 {
7565 if (gimplify_expr (&TREE_OPERAND (decl, 1), pre_p,
7566 NULL, is_gimple_val, fb_rvalue, false)
7567 == GS_ERROR)
7568 {
7569 remove = true;
7570 break;
7571 }
7572 v = TREE_OPERAND (decl, 1);
7573 if (DECL_P (v))
7574 {
7575 omp_firstprivatize_variable (ctx, v);
7576 omp_notice_variable (ctx, v, true);
7577 }
7578 decl = TREE_OPERAND (decl, 0);
7579 }
7580 if (TREE_CODE (decl) == ADDR_EXPR
7581 || TREE_CODE (decl) == INDIRECT_REF)
7582 decl = TREE_OPERAND (decl, 0);
7583 }
7584 goto do_add_decl;
7585 case OMP_CLAUSE_LINEAR:
7586 if (gimplify_expr (&OMP_CLAUSE_LINEAR_STEP (c), pre_p, NULL,
7587 is_gimple_val, fb_rvalue) == GS_ERROR)
7588 {
7589 remove = true;
7590 break;
7591 }
7592 else
7593 {
7594 if (code == OMP_SIMD
7595 && !OMP_CLAUSE_LINEAR_NO_COPYIN (c))
7596 {
7597 struct gimplify_omp_ctx *octx = outer_ctx;
7598 if (octx
7599 && octx->region_type == ORT_WORKSHARE
7600 && octx->combined_loop
7601 && !octx->distribute)
7602 {
7603 if (octx->outer_context
7604 && (octx->outer_context->region_type
7605 == ORT_COMBINED_PARALLEL))
7606 octx = octx->outer_context->outer_context;
7607 else
7608 octx = octx->outer_context;
7609 }
7610 if (octx
7611 && octx->region_type == ORT_WORKSHARE
7612 && octx->combined_loop
7613 && octx->distribute)
7614 {
7615 error_at (OMP_CLAUSE_LOCATION (c),
7616 "%<linear%> clause for variable other than "
7617 "loop iterator specified on construct "
7618 "combined with %<distribute%>");
7619 remove = true;
7620 break;
7621 }
7622 }
7623 /* For combined #pragma omp parallel for simd, need to put
7624 lastprivate and perhaps firstprivate too on the
7625 parallel. Similarly for #pragma omp for simd. */
7626 struct gimplify_omp_ctx *octx = outer_ctx;
7627 decl = NULL_TREE;
7628 do
7629 {
7630 if (OMP_CLAUSE_LINEAR_NO_COPYIN (c)
7631 && OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
7632 break;
7633 decl = OMP_CLAUSE_DECL (c);
7634 if (error_operand_p (decl))
7635 {
7636 decl = NULL_TREE;
7637 break;
7638 }
7639 flags = GOVD_SEEN;
7640 if (!OMP_CLAUSE_LINEAR_NO_COPYIN (c))
7641 flags |= GOVD_FIRSTPRIVATE;
7642 if (!OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
7643 flags |= GOVD_LASTPRIVATE;
7644 if (octx
7645 && octx->region_type == ORT_WORKSHARE
7646 && octx->combined_loop)
7647 {
7648 if (octx->outer_context
7649 && (octx->outer_context->region_type
7650 == ORT_COMBINED_PARALLEL))
7651 octx = octx->outer_context;
7652 else if (omp_check_private (octx, decl, false))
7653 break;
7654 }
7655 else if (octx
7656 && (octx->region_type & ORT_TASK) != 0
7657 && octx->combined_loop)
7658 ;
7659 else if (octx
7660 && octx->region_type == ORT_COMBINED_PARALLEL
7661 && ctx->region_type == ORT_WORKSHARE
7662 && octx == outer_ctx)
7663 flags = GOVD_SEEN | GOVD_SHARED;
7664 else if (octx
7665 && octx->region_type == ORT_COMBINED_TEAMS)
7666 flags = GOVD_SEEN | GOVD_SHARED;
7667 else if (octx
7668 && octx->region_type == ORT_COMBINED_TARGET)
7669 {
7670 flags &= ~GOVD_LASTPRIVATE;
7671 if (flags == GOVD_SEEN)
7672 break;
7673 }
7674 else
7675 break;
7676 splay_tree_node on
7677 = splay_tree_lookup (octx->variables,
7678 (splay_tree_key) decl);
7679 if (on && (on->value & GOVD_DATA_SHARE_CLASS) != 0)
7680 {
7681 octx = NULL;
7682 break;
7683 }
7684 omp_add_variable (octx, decl, flags);
7685 if (octx->outer_context == NULL)
7686 break;
7687 octx = octx->outer_context;
7688 }
7689 while (1);
7690 if (octx
7691 && decl
7692 && (!OMP_CLAUSE_LINEAR_NO_COPYIN (c)
7693 || !OMP_CLAUSE_LINEAR_NO_COPYOUT (c)))
7694 omp_notice_variable (octx, decl, true);
7695 }
7696 flags = GOVD_LINEAR | GOVD_EXPLICIT;
7697 if (OMP_CLAUSE_LINEAR_NO_COPYIN (c)
7698 && OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
7699 {
7700 notice_outer = false;
7701 flags |= GOVD_LINEAR_LASTPRIVATE_NO_OUTER;
7702 }
7703 goto do_add;
7704
7705 case OMP_CLAUSE_MAP:
7706 decl = OMP_CLAUSE_DECL (c);
7707 if (error_operand_p (decl))
7708 remove = true;
7709 switch (code)
7710 {
7711 case OMP_TARGET:
7712 break;
7713 case OACC_DATA:
7714 if (TREE_CODE (TREE_TYPE (decl)) != ARRAY_TYPE)
7715 break;
7716 /* FALLTHRU */
7717 case OMP_TARGET_DATA:
7718 case OMP_TARGET_ENTER_DATA:
7719 case OMP_TARGET_EXIT_DATA:
7720 case OACC_ENTER_DATA:
7721 case OACC_EXIT_DATA:
7722 case OACC_HOST_DATA:
7723 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
7724 || (OMP_CLAUSE_MAP_KIND (c)
7725 == GOMP_MAP_FIRSTPRIVATE_REFERENCE))
7726 /* For target {,enter ,exit }data only the array slice is
7727 mapped, but not the pointer to it. */
7728 remove = true;
7729 break;
7730 default:
7731 break;
7732 }
7733 if (remove)
7734 break;
7735 if (DECL_P (decl) && outer_ctx && (region_type & ORT_ACC))
7736 {
7737 struct gimplify_omp_ctx *octx;
7738 for (octx = outer_ctx; octx; octx = octx->outer_context)
7739 {
7740 if (octx->region_type != ORT_ACC_HOST_DATA)
7741 break;
7742 splay_tree_node n2
7743 = splay_tree_lookup (octx->variables,
7744 (splay_tree_key) decl);
7745 if (n2)
7746 error_at (OMP_CLAUSE_LOCATION (c), "variable %qE "
7747 "declared in enclosing %<host_data%> region",
7748 DECL_NAME (decl));
7749 }
7750 }
7751 if (OMP_CLAUSE_SIZE (c) == NULL_TREE)
7752 OMP_CLAUSE_SIZE (c) = DECL_P (decl) ? DECL_SIZE_UNIT (decl)
7753 : TYPE_SIZE_UNIT (TREE_TYPE (decl));
7754 if (gimplify_expr (&OMP_CLAUSE_SIZE (c), pre_p,
7755 NULL, is_gimple_val, fb_rvalue) == GS_ERROR)
7756 {
7757 remove = true;
7758 break;
7759 }
7760 else if ((OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
7761 || (OMP_CLAUSE_MAP_KIND (c)
7762 == GOMP_MAP_FIRSTPRIVATE_REFERENCE))
7763 && TREE_CODE (OMP_CLAUSE_SIZE (c)) != INTEGER_CST)
7764 {
7765 OMP_CLAUSE_SIZE (c)
7766 = get_initialized_tmp_var (OMP_CLAUSE_SIZE (c), pre_p, NULL,
7767 false);
7768 omp_add_variable (ctx, OMP_CLAUSE_SIZE (c),
7769 GOVD_FIRSTPRIVATE | GOVD_SEEN);
7770 }
7771 if (!DECL_P (decl))
7772 {
7773 tree d = decl, *pd;
7774 if (TREE_CODE (d) == ARRAY_REF)
7775 {
7776 while (TREE_CODE (d) == ARRAY_REF)
7777 d = TREE_OPERAND (d, 0);
7778 if (TREE_CODE (d) == COMPONENT_REF
7779 && TREE_CODE (TREE_TYPE (d)) == ARRAY_TYPE)
7780 decl = d;
7781 }
7782 pd = &OMP_CLAUSE_DECL (c);
7783 if (d == decl
7784 && TREE_CODE (decl) == INDIRECT_REF
7785 && TREE_CODE (TREE_OPERAND (decl, 0)) == COMPONENT_REF
7786 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (decl, 0)))
7787 == REFERENCE_TYPE))
7788 {
7789 pd = &TREE_OPERAND (decl, 0);
7790 decl = TREE_OPERAND (decl, 0);
7791 }
7792 if (TREE_CODE (decl) == COMPONENT_REF)
7793 {
7794 while (TREE_CODE (decl) == COMPONENT_REF)
7795 decl = TREE_OPERAND (decl, 0);
7796 if (TREE_CODE (decl) == INDIRECT_REF
7797 && DECL_P (TREE_OPERAND (decl, 0))
7798 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (decl, 0)))
7799 == REFERENCE_TYPE))
7800 decl = TREE_OPERAND (decl, 0);
7801 }
7802 if (gimplify_expr (pd, pre_p, NULL, is_gimple_lvalue, fb_lvalue)
7803 == GS_ERROR)
7804 {
7805 remove = true;
7806 break;
7807 }
7808 if (DECL_P (decl))
7809 {
7810 if (error_operand_p (decl))
7811 {
7812 remove = true;
7813 break;
7814 }
7815
7816 tree stype = TREE_TYPE (decl);
7817 if (TREE_CODE (stype) == REFERENCE_TYPE)
7818 stype = TREE_TYPE (stype);
7819 if (TYPE_SIZE_UNIT (stype) == NULL
7820 || TREE_CODE (TYPE_SIZE_UNIT (stype)) != INTEGER_CST)
7821 {
7822 error_at (OMP_CLAUSE_LOCATION (c),
7823 "mapping field %qE of variable length "
7824 "structure", OMP_CLAUSE_DECL (c));
7825 remove = true;
7826 break;
7827 }
7828
7829 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ALWAYS_POINTER)
7830 {
7831 /* Error recovery. */
7832 if (prev_list_p == NULL)
7833 {
7834 remove = true;
7835 break;
7836 }
7837 if (OMP_CLAUSE_CHAIN (*prev_list_p) != c)
7838 {
7839 tree ch = OMP_CLAUSE_CHAIN (*prev_list_p);
7840 if (ch == NULL_TREE || OMP_CLAUSE_CHAIN (ch) != c)
7841 {
7842 remove = true;
7843 break;
7844 }
7845 }
7846 }
7847
7848 tree offset;
7849 HOST_WIDE_INT bitsize, bitpos;
7850 machine_mode mode;
7851 int unsignedp, reversep, volatilep = 0;
7852 tree base = OMP_CLAUSE_DECL (c);
7853 while (TREE_CODE (base) == ARRAY_REF)
7854 base = TREE_OPERAND (base, 0);
7855 if (TREE_CODE (base) == INDIRECT_REF)
7856 base = TREE_OPERAND (base, 0);
7857 base = get_inner_reference (base, &bitsize, &bitpos, &offset,
7858 &mode, &unsignedp, &reversep,
7859 &volatilep);
7860 tree orig_base = base;
7861 if ((TREE_CODE (base) == INDIRECT_REF
7862 || (TREE_CODE (base) == MEM_REF
7863 && integer_zerop (TREE_OPERAND (base, 1))))
7864 && DECL_P (TREE_OPERAND (base, 0))
7865 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (base, 0)))
7866 == REFERENCE_TYPE))
7867 base = TREE_OPERAND (base, 0);
7868 gcc_assert (base == decl
7869 && (offset == NULL_TREE
7870 || TREE_CODE (offset) == INTEGER_CST));
7871
7872 splay_tree_node n
7873 = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
7874 bool ptr = (OMP_CLAUSE_MAP_KIND (c)
7875 == GOMP_MAP_ALWAYS_POINTER);
7876 if (n == NULL || (n->value & GOVD_MAP) == 0)
7877 {
7878 tree l = build_omp_clause (OMP_CLAUSE_LOCATION (c),
7879 OMP_CLAUSE_MAP);
7880 OMP_CLAUSE_SET_MAP_KIND (l, GOMP_MAP_STRUCT);
7881 if (orig_base != base)
7882 OMP_CLAUSE_DECL (l) = unshare_expr (orig_base);
7883 else
7884 OMP_CLAUSE_DECL (l) = decl;
7885 OMP_CLAUSE_SIZE (l) = size_int (1);
7886 if (struct_map_to_clause == NULL)
7887 struct_map_to_clause = new hash_map<tree, tree>;
7888 struct_map_to_clause->put (decl, l);
7889 if (ptr)
7890 {
7891 enum gomp_map_kind mkind
7892 = code == OMP_TARGET_EXIT_DATA
7893 ? GOMP_MAP_RELEASE : GOMP_MAP_ALLOC;
7894 tree c2 = build_omp_clause (OMP_CLAUSE_LOCATION (c),
7895 OMP_CLAUSE_MAP);
7896 OMP_CLAUSE_SET_MAP_KIND (c2, mkind);
7897 OMP_CLAUSE_DECL (c2)
7898 = unshare_expr (OMP_CLAUSE_DECL (c));
7899 OMP_CLAUSE_CHAIN (c2) = *prev_list_p;
7900 OMP_CLAUSE_SIZE (c2)
7901 = TYPE_SIZE_UNIT (ptr_type_node);
7902 OMP_CLAUSE_CHAIN (l) = c2;
7903 if (OMP_CLAUSE_CHAIN (*prev_list_p) != c)
7904 {
7905 tree c4 = OMP_CLAUSE_CHAIN (*prev_list_p);
7906 tree c3
7907 = build_omp_clause (OMP_CLAUSE_LOCATION (c),
7908 OMP_CLAUSE_MAP);
7909 OMP_CLAUSE_SET_MAP_KIND (c3, mkind);
7910 OMP_CLAUSE_DECL (c3)
7911 = unshare_expr (OMP_CLAUSE_DECL (c4));
7912 OMP_CLAUSE_SIZE (c3)
7913 = TYPE_SIZE_UNIT (ptr_type_node);
7914 OMP_CLAUSE_CHAIN (c3) = *prev_list_p;
7915 OMP_CLAUSE_CHAIN (c2) = c3;
7916 }
7917 *prev_list_p = l;
7918 prev_list_p = NULL;
7919 }
7920 else
7921 {
7922 OMP_CLAUSE_CHAIN (l) = c;
7923 *list_p = l;
7924 list_p = &OMP_CLAUSE_CHAIN (l);
7925 }
7926 if (orig_base != base && code == OMP_TARGET)
7927 {
7928 tree c2 = build_omp_clause (OMP_CLAUSE_LOCATION (c),
7929 OMP_CLAUSE_MAP);
7930 enum gomp_map_kind mkind
7931 = GOMP_MAP_FIRSTPRIVATE_REFERENCE;
7932 OMP_CLAUSE_SET_MAP_KIND (c2, mkind);
7933 OMP_CLAUSE_DECL (c2) = decl;
7934 OMP_CLAUSE_SIZE (c2) = size_zero_node;
7935 OMP_CLAUSE_CHAIN (c2) = OMP_CLAUSE_CHAIN (l);
7936 OMP_CLAUSE_CHAIN (l) = c2;
7937 }
7938 flags = GOVD_MAP | GOVD_EXPLICIT;
7939 if (GOMP_MAP_ALWAYS_P (OMP_CLAUSE_MAP_KIND (c)) || ptr)
7940 flags |= GOVD_SEEN;
7941 goto do_add_decl;
7942 }
7943 else
7944 {
7945 tree *osc = struct_map_to_clause->get (decl);
7946 tree *sc = NULL, *scp = NULL;
7947 if (GOMP_MAP_ALWAYS_P (OMP_CLAUSE_MAP_KIND (c)) || ptr)
7948 n->value |= GOVD_SEEN;
7949 offset_int o1, o2;
7950 if (offset)
7951 o1 = wi::to_offset (offset);
7952 else
7953 o1 = 0;
7954 if (bitpos)
7955 o1 = o1 + bitpos / BITS_PER_UNIT;
7956 sc = &OMP_CLAUSE_CHAIN (*osc);
7957 if (*sc != c
7958 && (OMP_CLAUSE_MAP_KIND (*sc)
7959 == GOMP_MAP_FIRSTPRIVATE_REFERENCE))
7960 sc = &OMP_CLAUSE_CHAIN (*sc);
7961 for (; *sc != c; sc = &OMP_CLAUSE_CHAIN (*sc))
7962 if (ptr && sc == prev_list_p)
7963 break;
7964 else if (TREE_CODE (OMP_CLAUSE_DECL (*sc))
7965 != COMPONENT_REF
7966 && (TREE_CODE (OMP_CLAUSE_DECL (*sc))
7967 != INDIRECT_REF)
7968 && (TREE_CODE (OMP_CLAUSE_DECL (*sc))
7969 != ARRAY_REF))
7970 break;
7971 else
7972 {
7973 tree offset2;
7974 HOST_WIDE_INT bitsize2, bitpos2;
7975 base = OMP_CLAUSE_DECL (*sc);
7976 if (TREE_CODE (base) == ARRAY_REF)
7977 {
7978 while (TREE_CODE (base) == ARRAY_REF)
7979 base = TREE_OPERAND (base, 0);
7980 if (TREE_CODE (base) != COMPONENT_REF
7981 || (TREE_CODE (TREE_TYPE (base))
7982 != ARRAY_TYPE))
7983 break;
7984 }
7985 else if (TREE_CODE (base) == INDIRECT_REF
7986 && (TREE_CODE (TREE_OPERAND (base, 0))
7987 == COMPONENT_REF)
7988 && (TREE_CODE (TREE_TYPE
7989 (TREE_OPERAND (base, 0)))
7990 == REFERENCE_TYPE))
7991 base = TREE_OPERAND (base, 0);
7992 base = get_inner_reference (base, &bitsize2,
7993 &bitpos2, &offset2,
7994 &mode, &unsignedp,
7995 &reversep, &volatilep);
7996 if ((TREE_CODE (base) == INDIRECT_REF
7997 || (TREE_CODE (base) == MEM_REF
7998 && integer_zerop (TREE_OPERAND (base,
7999 1))))
8000 && DECL_P (TREE_OPERAND (base, 0))
8001 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (base,
8002 0)))
8003 == REFERENCE_TYPE))
8004 base = TREE_OPERAND (base, 0);
8005 if (base != decl)
8006 break;
8007 if (scp)
8008 continue;
8009 gcc_assert (offset == NULL_TREE
8010 || TREE_CODE (offset) == INTEGER_CST);
8011 tree d1 = OMP_CLAUSE_DECL (*sc);
8012 tree d2 = OMP_CLAUSE_DECL (c);
8013 while (TREE_CODE (d1) == ARRAY_REF)
8014 d1 = TREE_OPERAND (d1, 0);
8015 while (TREE_CODE (d2) == ARRAY_REF)
8016 d2 = TREE_OPERAND (d2, 0);
8017 if (TREE_CODE (d1) == INDIRECT_REF)
8018 d1 = TREE_OPERAND (d1, 0);
8019 if (TREE_CODE (d2) == INDIRECT_REF)
8020 d2 = TREE_OPERAND (d2, 0);
8021 while (TREE_CODE (d1) == COMPONENT_REF)
8022 if (TREE_CODE (d2) == COMPONENT_REF
8023 && TREE_OPERAND (d1, 1)
8024 == TREE_OPERAND (d2, 1))
8025 {
8026 d1 = TREE_OPERAND (d1, 0);
8027 d2 = TREE_OPERAND (d2, 0);
8028 }
8029 else
8030 break;
8031 if (d1 == d2)
8032 {
8033 error_at (OMP_CLAUSE_LOCATION (c),
8034 "%qE appears more than once in map "
8035 "clauses", OMP_CLAUSE_DECL (c));
8036 remove = true;
8037 break;
8038 }
8039 if (offset2)
8040 o2 = wi::to_offset (offset2);
8041 else
8042 o2 = 0;
8043 if (bitpos2)
8044 o2 = o2 + bitpos2 / BITS_PER_UNIT;
8045 if (wi::ltu_p (o1, o2)
8046 || (wi::eq_p (o1, o2) && bitpos < bitpos2))
8047 {
8048 if (ptr)
8049 scp = sc;
8050 else
8051 break;
8052 }
8053 }
8054 if (remove)
8055 break;
8056 OMP_CLAUSE_SIZE (*osc)
8057 = size_binop (PLUS_EXPR, OMP_CLAUSE_SIZE (*osc),
8058 size_one_node);
8059 if (ptr)
8060 {
8061 tree c2 = build_omp_clause (OMP_CLAUSE_LOCATION (c),
8062 OMP_CLAUSE_MAP);
8063 tree cl = NULL_TREE;
8064 enum gomp_map_kind mkind
8065 = code == OMP_TARGET_EXIT_DATA
8066 ? GOMP_MAP_RELEASE : GOMP_MAP_ALLOC;
8067 OMP_CLAUSE_SET_MAP_KIND (c2, mkind);
8068 OMP_CLAUSE_DECL (c2)
8069 = unshare_expr (OMP_CLAUSE_DECL (c));
8070 OMP_CLAUSE_CHAIN (c2) = scp ? *scp : *prev_list_p;
8071 OMP_CLAUSE_SIZE (c2)
8072 = TYPE_SIZE_UNIT (ptr_type_node);
8073 cl = scp ? *prev_list_p : c2;
8074 if (OMP_CLAUSE_CHAIN (*prev_list_p) != c)
8075 {
8076 tree c4 = OMP_CLAUSE_CHAIN (*prev_list_p);
8077 tree c3
8078 = build_omp_clause (OMP_CLAUSE_LOCATION (c),
8079 OMP_CLAUSE_MAP);
8080 OMP_CLAUSE_SET_MAP_KIND (c3, mkind);
8081 OMP_CLAUSE_DECL (c3)
8082 = unshare_expr (OMP_CLAUSE_DECL (c4));
8083 OMP_CLAUSE_SIZE (c3)
8084 = TYPE_SIZE_UNIT (ptr_type_node);
8085 OMP_CLAUSE_CHAIN (c3) = *prev_list_p;
8086 if (!scp)
8087 OMP_CLAUSE_CHAIN (c2) = c3;
8088 else
8089 cl = c3;
8090 }
8091 if (scp)
8092 *scp = c2;
8093 if (sc == prev_list_p)
8094 {
8095 *sc = cl;
8096 prev_list_p = NULL;
8097 }
8098 else
8099 {
8100 *prev_list_p = OMP_CLAUSE_CHAIN (c);
8101 list_p = prev_list_p;
8102 prev_list_p = NULL;
8103 OMP_CLAUSE_CHAIN (c) = *sc;
8104 *sc = cl;
8105 continue;
8106 }
8107 }
8108 else if (*sc != c)
8109 {
8110 *list_p = OMP_CLAUSE_CHAIN (c);
8111 OMP_CLAUSE_CHAIN (c) = *sc;
8112 *sc = c;
8113 continue;
8114 }
8115 }
8116 }
8117 if (!remove
8118 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_ALWAYS_POINTER
8119 && OMP_CLAUSE_CHAIN (c)
8120 && OMP_CLAUSE_CODE (OMP_CLAUSE_CHAIN (c)) == OMP_CLAUSE_MAP
8121 && (OMP_CLAUSE_MAP_KIND (OMP_CLAUSE_CHAIN (c))
8122 == GOMP_MAP_ALWAYS_POINTER))
8123 prev_list_p = list_p;
8124 break;
8125 }
8126 flags = GOVD_MAP | GOVD_EXPLICIT;
8127 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ALWAYS_TO
8128 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ALWAYS_TOFROM)
8129 flags |= GOVD_MAP_ALWAYS_TO;
8130 goto do_add;
8131
8132 case OMP_CLAUSE_DEPEND:
8133 if (OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SINK)
8134 {
8135 tree deps = OMP_CLAUSE_DECL (c);
8136 while (deps && TREE_CODE (deps) == TREE_LIST)
8137 {
8138 if (TREE_CODE (TREE_PURPOSE (deps)) == TRUNC_DIV_EXPR
8139 && DECL_P (TREE_OPERAND (TREE_PURPOSE (deps), 1)))
8140 gimplify_expr (&TREE_OPERAND (TREE_PURPOSE (deps), 1),
8141 pre_p, NULL, is_gimple_val, fb_rvalue);
8142 deps = TREE_CHAIN (deps);
8143 }
8144 break;
8145 }
8146 else if (OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SOURCE)
8147 break;
8148 if (TREE_CODE (OMP_CLAUSE_DECL (c)) == COMPOUND_EXPR)
8149 {
8150 gimplify_expr (&TREE_OPERAND (OMP_CLAUSE_DECL (c), 0), pre_p,
8151 NULL, is_gimple_val, fb_rvalue);
8152 OMP_CLAUSE_DECL (c) = TREE_OPERAND (OMP_CLAUSE_DECL (c), 1);
8153 }
8154 if (error_operand_p (OMP_CLAUSE_DECL (c)))
8155 {
8156 remove = true;
8157 break;
8158 }
8159 OMP_CLAUSE_DECL (c) = build_fold_addr_expr (OMP_CLAUSE_DECL (c));
8160 if (gimplify_expr (&OMP_CLAUSE_DECL (c), pre_p, NULL,
8161 is_gimple_val, fb_rvalue) == GS_ERROR)
8162 {
8163 remove = true;
8164 break;
8165 }
8166 break;
8167
8168 case OMP_CLAUSE_TO:
8169 case OMP_CLAUSE_FROM:
8170 case OMP_CLAUSE__CACHE_:
8171 decl = OMP_CLAUSE_DECL (c);
8172 if (error_operand_p (decl))
8173 {
8174 remove = true;
8175 break;
8176 }
8177 if (OMP_CLAUSE_SIZE (c) == NULL_TREE)
8178 OMP_CLAUSE_SIZE (c) = DECL_P (decl) ? DECL_SIZE_UNIT (decl)
8179 : TYPE_SIZE_UNIT (TREE_TYPE (decl));
8180 if (gimplify_expr (&OMP_CLAUSE_SIZE (c), pre_p,
8181 NULL, is_gimple_val, fb_rvalue) == GS_ERROR)
8182 {
8183 remove = true;
8184 break;
8185 }
8186 if (!DECL_P (decl))
8187 {
8188 if (gimplify_expr (&OMP_CLAUSE_DECL (c), pre_p,
8189 NULL, is_gimple_lvalue, fb_lvalue)
8190 == GS_ERROR)
8191 {
8192 remove = true;
8193 break;
8194 }
8195 break;
8196 }
8197 goto do_notice;
8198
8199 case OMP_CLAUSE_USE_DEVICE_PTR:
8200 flags = GOVD_FIRSTPRIVATE | GOVD_EXPLICIT;
8201 goto do_add;
8202 case OMP_CLAUSE_IS_DEVICE_PTR:
8203 flags = GOVD_FIRSTPRIVATE | GOVD_EXPLICIT;
8204 goto do_add;
8205
8206 do_add:
8207 decl = OMP_CLAUSE_DECL (c);
8208 do_add_decl:
8209 if (error_operand_p (decl))
8210 {
8211 remove = true;
8212 break;
8213 }
8214 if (DECL_NAME (decl) == NULL_TREE && (flags & GOVD_SHARED) == 0)
8215 {
8216 tree t = omp_member_access_dummy_var (decl);
8217 if (t)
8218 {
8219 tree v = DECL_VALUE_EXPR (decl);
8220 DECL_NAME (decl) = DECL_NAME (TREE_OPERAND (v, 1));
8221 if (outer_ctx)
8222 omp_notice_variable (outer_ctx, t, true);
8223 }
8224 }
8225 if (code == OACC_DATA
8226 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
8227 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER)
8228 flags |= GOVD_MAP_0LEN_ARRAY;
8229 omp_add_variable (ctx, decl, flags);
8230 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
8231 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
8232 {
8233 omp_add_variable (ctx, OMP_CLAUSE_REDUCTION_PLACEHOLDER (c),
8234 GOVD_LOCAL | GOVD_SEEN);
8235 if (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c)
8236 && walk_tree (&OMP_CLAUSE_REDUCTION_INIT (c),
8237 find_decl_expr,
8238 OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c),
8239 NULL) == NULL_TREE)
8240 omp_add_variable (ctx,
8241 OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c),
8242 GOVD_LOCAL | GOVD_SEEN);
8243 gimplify_omp_ctxp = ctx;
8244 push_gimplify_context ();
8245
8246 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
8247 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
8248
8249 gimplify_and_add (OMP_CLAUSE_REDUCTION_INIT (c),
8250 &OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c));
8251 pop_gimplify_context
8252 (gimple_seq_first_stmt (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c)));
8253 push_gimplify_context ();
8254 gimplify_and_add (OMP_CLAUSE_REDUCTION_MERGE (c),
8255 &OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c));
8256 pop_gimplify_context
8257 (gimple_seq_first_stmt (OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c)));
8258 OMP_CLAUSE_REDUCTION_INIT (c) = NULL_TREE;
8259 OMP_CLAUSE_REDUCTION_MERGE (c) = NULL_TREE;
8260
8261 gimplify_omp_ctxp = outer_ctx;
8262 }
8263 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
8264 && OMP_CLAUSE_LASTPRIVATE_STMT (c))
8265 {
8266 gimplify_omp_ctxp = ctx;
8267 push_gimplify_context ();
8268 if (TREE_CODE (OMP_CLAUSE_LASTPRIVATE_STMT (c)) != BIND_EXPR)
8269 {
8270 tree bind = build3 (BIND_EXPR, void_type_node, NULL,
8271 NULL, NULL);
8272 TREE_SIDE_EFFECTS (bind) = 1;
8273 BIND_EXPR_BODY (bind) = OMP_CLAUSE_LASTPRIVATE_STMT (c);
8274 OMP_CLAUSE_LASTPRIVATE_STMT (c) = bind;
8275 }
8276 gimplify_and_add (OMP_CLAUSE_LASTPRIVATE_STMT (c),
8277 &OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c));
8278 pop_gimplify_context
8279 (gimple_seq_first_stmt (OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c)));
8280 OMP_CLAUSE_LASTPRIVATE_STMT (c) = NULL_TREE;
8281
8282 gimplify_omp_ctxp = outer_ctx;
8283 }
8284 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
8285 && OMP_CLAUSE_LINEAR_STMT (c))
8286 {
8287 gimplify_omp_ctxp = ctx;
8288 push_gimplify_context ();
8289 if (TREE_CODE (OMP_CLAUSE_LINEAR_STMT (c)) != BIND_EXPR)
8290 {
8291 tree bind = build3 (BIND_EXPR, void_type_node, NULL,
8292 NULL, NULL);
8293 TREE_SIDE_EFFECTS (bind) = 1;
8294 BIND_EXPR_BODY (bind) = OMP_CLAUSE_LINEAR_STMT (c);
8295 OMP_CLAUSE_LINEAR_STMT (c) = bind;
8296 }
8297 gimplify_and_add (OMP_CLAUSE_LINEAR_STMT (c),
8298 &OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c));
8299 pop_gimplify_context
8300 (gimple_seq_first_stmt (OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c)));
8301 OMP_CLAUSE_LINEAR_STMT (c) = NULL_TREE;
8302
8303 gimplify_omp_ctxp = outer_ctx;
8304 }
8305 if (notice_outer)
8306 goto do_notice;
8307 break;
8308
8309 case OMP_CLAUSE_COPYIN:
8310 case OMP_CLAUSE_COPYPRIVATE:
8311 decl = OMP_CLAUSE_DECL (c);
8312 if (error_operand_p (decl))
8313 {
8314 remove = true;
8315 break;
8316 }
8317 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_COPYPRIVATE
8318 && !remove
8319 && !omp_check_private (ctx, decl, true))
8320 {
8321 remove = true;
8322 if (is_global_var (decl))
8323 {
8324 if (DECL_THREAD_LOCAL_P (decl))
8325 remove = false;
8326 else if (DECL_HAS_VALUE_EXPR_P (decl))
8327 {
8328 tree value = get_base_address (DECL_VALUE_EXPR (decl));
8329
8330 if (value
8331 && DECL_P (value)
8332 && DECL_THREAD_LOCAL_P (value))
8333 remove = false;
8334 }
8335 }
8336 if (remove)
8337 error_at (OMP_CLAUSE_LOCATION (c),
8338 "copyprivate variable %qE is not threadprivate"
8339 " or private in outer context", DECL_NAME (decl));
8340 }
8341 do_notice:
8342 if (outer_ctx)
8343 omp_notice_variable (outer_ctx, decl, true);
8344 if (check_non_private
8345 && region_type == ORT_WORKSHARE
8346 && (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION
8347 || decl == OMP_CLAUSE_DECL (c)
8348 || (TREE_CODE (OMP_CLAUSE_DECL (c)) == MEM_REF
8349 && (TREE_CODE (TREE_OPERAND (OMP_CLAUSE_DECL (c), 0))
8350 == ADDR_EXPR
8351 || (TREE_CODE (TREE_OPERAND (OMP_CLAUSE_DECL (c), 0))
8352 == POINTER_PLUS_EXPR
8353 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND
8354 (OMP_CLAUSE_DECL (c), 0), 0))
8355 == ADDR_EXPR)))))
8356 && omp_check_private (ctx, decl, false))
8357 {
8358 error ("%s variable %qE is private in outer context",
8359 check_non_private, DECL_NAME (decl));
8360 remove = true;
8361 }
8362 break;
8363
8364 case OMP_CLAUSE_IF:
8365 if (OMP_CLAUSE_IF_MODIFIER (c) != ERROR_MARK
8366 && OMP_CLAUSE_IF_MODIFIER (c) != code)
8367 {
8368 const char *p[2];
8369 for (int i = 0; i < 2; i++)
8370 switch (i ? OMP_CLAUSE_IF_MODIFIER (c) : code)
8371 {
8372 case OMP_PARALLEL: p[i] = "parallel"; break;
8373 case OMP_TASK: p[i] = "task"; break;
8374 case OMP_TASKLOOP: p[i] = "taskloop"; break;
8375 case OMP_TARGET_DATA: p[i] = "target data"; break;
8376 case OMP_TARGET: p[i] = "target"; break;
8377 case OMP_TARGET_UPDATE: p[i] = "target update"; break;
8378 case OMP_TARGET_ENTER_DATA:
8379 p[i] = "target enter data"; break;
8380 case OMP_TARGET_EXIT_DATA: p[i] = "target exit data"; break;
8381 default: gcc_unreachable ();
8382 }
8383 error_at (OMP_CLAUSE_LOCATION (c),
8384 "expected %qs %<if%> clause modifier rather than %qs",
8385 p[0], p[1]);
8386 remove = true;
8387 }
8388 /* Fall through. */
8389
8390 case OMP_CLAUSE_FINAL:
8391 OMP_CLAUSE_OPERAND (c, 0)
8392 = gimple_boolify (OMP_CLAUSE_OPERAND (c, 0));
8393 /* Fall through. */
8394
8395 case OMP_CLAUSE_SCHEDULE:
8396 case OMP_CLAUSE_NUM_THREADS:
8397 case OMP_CLAUSE_NUM_TEAMS:
8398 case OMP_CLAUSE_THREAD_LIMIT:
8399 case OMP_CLAUSE_DIST_SCHEDULE:
8400 case OMP_CLAUSE_DEVICE:
8401 case OMP_CLAUSE_PRIORITY:
8402 case OMP_CLAUSE_GRAINSIZE:
8403 case OMP_CLAUSE_NUM_TASKS:
8404 case OMP_CLAUSE_HINT:
8405 case OMP_CLAUSE__CILK_FOR_COUNT_:
8406 case OMP_CLAUSE_ASYNC:
8407 case OMP_CLAUSE_WAIT:
8408 case OMP_CLAUSE_NUM_GANGS:
8409 case OMP_CLAUSE_NUM_WORKERS:
8410 case OMP_CLAUSE_VECTOR_LENGTH:
8411 case OMP_CLAUSE_WORKER:
8412 case OMP_CLAUSE_VECTOR:
8413 if (gimplify_expr (&OMP_CLAUSE_OPERAND (c, 0), pre_p, NULL,
8414 is_gimple_val, fb_rvalue) == GS_ERROR)
8415 remove = true;
8416 break;
8417
8418 case OMP_CLAUSE_GANG:
8419 if (gimplify_expr (&OMP_CLAUSE_OPERAND (c, 0), pre_p, NULL,
8420 is_gimple_val, fb_rvalue) == GS_ERROR)
8421 remove = true;
8422 if (gimplify_expr (&OMP_CLAUSE_OPERAND (c, 1), pre_p, NULL,
8423 is_gimple_val, fb_rvalue) == GS_ERROR)
8424 remove = true;
8425 break;
8426
8427 case OMP_CLAUSE_NOWAIT:
8428 case OMP_CLAUSE_ORDERED:
8429 case OMP_CLAUSE_UNTIED:
8430 case OMP_CLAUSE_COLLAPSE:
8431 case OMP_CLAUSE_TILE:
8432 case OMP_CLAUSE_AUTO:
8433 case OMP_CLAUSE_SEQ:
8434 case OMP_CLAUSE_INDEPENDENT:
8435 case OMP_CLAUSE_MERGEABLE:
8436 case OMP_CLAUSE_PROC_BIND:
8437 case OMP_CLAUSE_SAFELEN:
8438 case OMP_CLAUSE_SIMDLEN:
8439 case OMP_CLAUSE_NOGROUP:
8440 case OMP_CLAUSE_THREADS:
8441 case OMP_CLAUSE_SIMD:
8442 break;
8443
8444 case OMP_CLAUSE_DEFAULTMAP:
8445 ctx->target_map_scalars_firstprivate = false;
8446 break;
8447
8448 case OMP_CLAUSE_ALIGNED:
8449 decl = OMP_CLAUSE_DECL (c);
8450 if (error_operand_p (decl))
8451 {
8452 remove = true;
8453 break;
8454 }
8455 if (gimplify_expr (&OMP_CLAUSE_ALIGNED_ALIGNMENT (c), pre_p, NULL,
8456 is_gimple_val, fb_rvalue) == GS_ERROR)
8457 {
8458 remove = true;
8459 break;
8460 }
8461 if (!is_global_var (decl)
8462 && TREE_CODE (TREE_TYPE (decl)) == POINTER_TYPE)
8463 omp_add_variable (ctx, decl, GOVD_ALIGNED);
8464 break;
8465
8466 case OMP_CLAUSE_DEFAULT:
8467 ctx->default_kind = OMP_CLAUSE_DEFAULT_KIND (c);
8468 break;
8469
8470 default:
8471 gcc_unreachable ();
8472 }
8473
8474 if (code == OACC_DATA
8475 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
8476 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER)
8477 remove = true;
8478 if (remove)
8479 *list_p = OMP_CLAUSE_CHAIN (c);
8480 else
8481 list_p = &OMP_CLAUSE_CHAIN (c);
8482 }
8483
8484 gimplify_omp_ctxp = ctx;
8485 if (struct_map_to_clause)
8486 delete struct_map_to_clause;
8487 }
8488
8489 /* Return true if DECL is a candidate for shared to firstprivate
8490 optimization. We only consider non-addressable scalars, not
8491 too big, and not references. */
8492
8493 static bool
8494 omp_shared_to_firstprivate_optimizable_decl_p (tree decl)
8495 {
8496 if (TREE_ADDRESSABLE (decl))
8497 return false;
8498 tree type = TREE_TYPE (decl);
8499 if (!is_gimple_reg_type (type)
8500 || TREE_CODE (type) == REFERENCE_TYPE
8501 || TREE_ADDRESSABLE (type))
8502 return false;
8503 /* Don't optimize too large decls, as each thread/task will have
8504 its own. */
8505 HOST_WIDE_INT len = int_size_in_bytes (type);
8506 if (len == -1 || len > 4 * POINTER_SIZE / BITS_PER_UNIT)
8507 return false;
8508 if (lang_hooks.decls.omp_privatize_by_reference (decl))
8509 return false;
8510 return true;
8511 }
8512
8513 /* Helper function of omp_find_stores_op and gimplify_adjust_omp_clauses*.
8514 For omp_shared_to_firstprivate_optimizable_decl_p decl mark it as
8515 GOVD_WRITTEN in outer contexts. */
8516
8517 static void
8518 omp_mark_stores (struct gimplify_omp_ctx *ctx, tree decl)
8519 {
8520 for (; ctx; ctx = ctx->outer_context)
8521 {
8522 splay_tree_node n = splay_tree_lookup (ctx->variables,
8523 (splay_tree_key) decl);
8524 if (n == NULL)
8525 continue;
8526 else if (n->value & GOVD_SHARED)
8527 {
8528 n->value |= GOVD_WRITTEN;
8529 return;
8530 }
8531 else if (n->value & GOVD_DATA_SHARE_CLASS)
8532 return;
8533 }
8534 }
8535
8536 /* Helper callback for walk_gimple_seq to discover possible stores
8537 to omp_shared_to_firstprivate_optimizable_decl_p decls and set
8538 GOVD_WRITTEN if they are GOVD_SHARED in some outer context
8539 for those. */
8540
8541 static tree
8542 omp_find_stores_op (tree *tp, int *walk_subtrees, void *data)
8543 {
8544 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
8545
8546 *walk_subtrees = 0;
8547 if (!wi->is_lhs)
8548 return NULL_TREE;
8549
8550 tree op = *tp;
8551 do
8552 {
8553 if (handled_component_p (op))
8554 op = TREE_OPERAND (op, 0);
8555 else if ((TREE_CODE (op) == MEM_REF || TREE_CODE (op) == TARGET_MEM_REF)
8556 && TREE_CODE (TREE_OPERAND (op, 0)) == ADDR_EXPR)
8557 op = TREE_OPERAND (TREE_OPERAND (op, 0), 0);
8558 else
8559 break;
8560 }
8561 while (1);
8562 if (!DECL_P (op) || !omp_shared_to_firstprivate_optimizable_decl_p (op))
8563 return NULL_TREE;
8564
8565 omp_mark_stores (gimplify_omp_ctxp, op);
8566 return NULL_TREE;
8567 }
8568
8569 /* Helper callback for walk_gimple_seq to discover possible stores
8570 to omp_shared_to_firstprivate_optimizable_decl_p decls and set
8571 GOVD_WRITTEN if they are GOVD_SHARED in some outer context
8572 for those. */
8573
8574 static tree
8575 omp_find_stores_stmt (gimple_stmt_iterator *gsi_p,
8576 bool *handled_ops_p,
8577 struct walk_stmt_info *wi)
8578 {
8579 gimple *stmt = gsi_stmt (*gsi_p);
8580 switch (gimple_code (stmt))
8581 {
8582 /* Don't recurse on OpenMP constructs for which
8583 gimplify_adjust_omp_clauses already handled the bodies,
8584 except handle gimple_omp_for_pre_body. */
8585 case GIMPLE_OMP_FOR:
8586 *handled_ops_p = true;
8587 if (gimple_omp_for_pre_body (stmt))
8588 walk_gimple_seq (gimple_omp_for_pre_body (stmt),
8589 omp_find_stores_stmt, omp_find_stores_op, wi);
8590 break;
8591 case GIMPLE_OMP_PARALLEL:
8592 case GIMPLE_OMP_TASK:
8593 case GIMPLE_OMP_SECTIONS:
8594 case GIMPLE_OMP_SINGLE:
8595 case GIMPLE_OMP_TARGET:
8596 case GIMPLE_OMP_TEAMS:
8597 case GIMPLE_OMP_CRITICAL:
8598 *handled_ops_p = true;
8599 break;
8600 default:
8601 break;
8602 }
8603 return NULL_TREE;
8604 }
8605
8606 struct gimplify_adjust_omp_clauses_data
8607 {
8608 tree *list_p;
8609 gimple_seq *pre_p;
8610 };
8611
8612 /* For all variables that were not actually used within the context,
8613 remove PRIVATE, SHARED, and FIRSTPRIVATE clauses. */
8614
8615 static int
8616 gimplify_adjust_omp_clauses_1 (splay_tree_node n, void *data)
8617 {
8618 tree *list_p = ((struct gimplify_adjust_omp_clauses_data *) data)->list_p;
8619 gimple_seq *pre_p
8620 = ((struct gimplify_adjust_omp_clauses_data *) data)->pre_p;
8621 tree decl = (tree) n->key;
8622 unsigned flags = n->value;
8623 enum omp_clause_code code;
8624 tree clause;
8625 bool private_debug;
8626
8627 if (flags & (GOVD_EXPLICIT | GOVD_LOCAL))
8628 return 0;
8629 if ((flags & GOVD_SEEN) == 0)
8630 return 0;
8631 if (flags & GOVD_DEBUG_PRIVATE)
8632 {
8633 gcc_assert ((flags & GOVD_DATA_SHARE_CLASS) == GOVD_SHARED);
8634 private_debug = true;
8635 }
8636 else if (flags & GOVD_MAP)
8637 private_debug = false;
8638 else
8639 private_debug
8640 = lang_hooks.decls.omp_private_debug_clause (decl,
8641 !!(flags & GOVD_SHARED));
8642 if (private_debug)
8643 code = OMP_CLAUSE_PRIVATE;
8644 else if (flags & GOVD_MAP)
8645 {
8646 code = OMP_CLAUSE_MAP;
8647 if ((gimplify_omp_ctxp->region_type & ORT_ACC) == 0
8648 && TYPE_ATOMIC (strip_array_types (TREE_TYPE (decl))))
8649 {
8650 error ("%<_Atomic%> %qD in implicit %<map%> clause", decl);
8651 return 0;
8652 }
8653 }
8654 else if (flags & GOVD_SHARED)
8655 {
8656 if (is_global_var (decl))
8657 {
8658 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp->outer_context;
8659 while (ctx != NULL)
8660 {
8661 splay_tree_node on
8662 = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
8663 if (on && (on->value & (GOVD_FIRSTPRIVATE | GOVD_LASTPRIVATE
8664 | GOVD_PRIVATE | GOVD_REDUCTION
8665 | GOVD_LINEAR | GOVD_MAP)) != 0)
8666 break;
8667 ctx = ctx->outer_context;
8668 }
8669 if (ctx == NULL)
8670 return 0;
8671 }
8672 code = OMP_CLAUSE_SHARED;
8673 }
8674 else if (flags & GOVD_PRIVATE)
8675 code = OMP_CLAUSE_PRIVATE;
8676 else if (flags & GOVD_FIRSTPRIVATE)
8677 {
8678 code = OMP_CLAUSE_FIRSTPRIVATE;
8679 if ((gimplify_omp_ctxp->region_type & ORT_TARGET)
8680 && (gimplify_omp_ctxp->region_type & ORT_ACC) == 0
8681 && TYPE_ATOMIC (strip_array_types (TREE_TYPE (decl))))
8682 {
8683 error ("%<_Atomic%> %qD in implicit %<firstprivate%> clause on "
8684 "%<target%> construct", decl);
8685 return 0;
8686 }
8687 }
8688 else if (flags & GOVD_LASTPRIVATE)
8689 code = OMP_CLAUSE_LASTPRIVATE;
8690 else if (flags & GOVD_ALIGNED)
8691 return 0;
8692 else
8693 gcc_unreachable ();
8694
8695 if (((flags & GOVD_LASTPRIVATE)
8696 || (code == OMP_CLAUSE_SHARED && (flags & GOVD_WRITTEN)))
8697 && omp_shared_to_firstprivate_optimizable_decl_p (decl))
8698 omp_mark_stores (gimplify_omp_ctxp->outer_context, decl);
8699
8700 tree chain = *list_p;
8701 clause = build_omp_clause (input_location, code);
8702 OMP_CLAUSE_DECL (clause) = decl;
8703 OMP_CLAUSE_CHAIN (clause) = chain;
8704 if (private_debug)
8705 OMP_CLAUSE_PRIVATE_DEBUG (clause) = 1;
8706 else if (code == OMP_CLAUSE_PRIVATE && (flags & GOVD_PRIVATE_OUTER_REF))
8707 OMP_CLAUSE_PRIVATE_OUTER_REF (clause) = 1;
8708 else if (code == OMP_CLAUSE_SHARED
8709 && (flags & GOVD_WRITTEN) == 0
8710 && omp_shared_to_firstprivate_optimizable_decl_p (decl))
8711 OMP_CLAUSE_SHARED_READONLY (clause) = 1;
8712 else if (code == OMP_CLAUSE_FIRSTPRIVATE && (flags & GOVD_EXPLICIT) == 0)
8713 OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (clause) = 1;
8714 else if (code == OMP_CLAUSE_MAP && (flags & GOVD_MAP_0LEN_ARRAY) != 0)
8715 {
8716 tree nc = build_omp_clause (input_location, OMP_CLAUSE_MAP);
8717 OMP_CLAUSE_DECL (nc) = decl;
8718 if (TREE_CODE (TREE_TYPE (decl)) == REFERENCE_TYPE
8719 && TREE_CODE (TREE_TYPE (TREE_TYPE (decl))) == POINTER_TYPE)
8720 OMP_CLAUSE_DECL (clause)
8721 = build_simple_mem_ref_loc (input_location, decl);
8722 OMP_CLAUSE_DECL (clause)
8723 = build2 (MEM_REF, char_type_node, OMP_CLAUSE_DECL (clause),
8724 build_int_cst (build_pointer_type (char_type_node), 0));
8725 OMP_CLAUSE_SIZE (clause) = size_zero_node;
8726 OMP_CLAUSE_SIZE (nc) = size_zero_node;
8727 OMP_CLAUSE_SET_MAP_KIND (clause, GOMP_MAP_ALLOC);
8728 OMP_CLAUSE_MAP_MAYBE_ZERO_LENGTH_ARRAY_SECTION (clause) = 1;
8729 OMP_CLAUSE_SET_MAP_KIND (nc, GOMP_MAP_FIRSTPRIVATE_POINTER);
8730 OMP_CLAUSE_CHAIN (nc) = chain;
8731 OMP_CLAUSE_CHAIN (clause) = nc;
8732 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
8733 gimplify_omp_ctxp = ctx->outer_context;
8734 gimplify_expr (&TREE_OPERAND (OMP_CLAUSE_DECL (clause), 0),
8735 pre_p, NULL, is_gimple_val, fb_rvalue);
8736 gimplify_omp_ctxp = ctx;
8737 }
8738 else if (code == OMP_CLAUSE_MAP)
8739 {
8740 int kind;
8741 /* Not all combinations of these GOVD_MAP flags are actually valid. */
8742 switch (flags & (GOVD_MAP_TO_ONLY
8743 | GOVD_MAP_FORCE
8744 | GOVD_MAP_FORCE_PRESENT))
8745 {
8746 case 0:
8747 kind = GOMP_MAP_TOFROM;
8748 break;
8749 case GOVD_MAP_FORCE:
8750 kind = GOMP_MAP_TOFROM | GOMP_MAP_FLAG_FORCE;
8751 break;
8752 case GOVD_MAP_TO_ONLY:
8753 kind = GOMP_MAP_TO;
8754 break;
8755 case GOVD_MAP_TO_ONLY | GOVD_MAP_FORCE:
8756 kind = GOMP_MAP_TO | GOMP_MAP_FLAG_FORCE;
8757 break;
8758 case GOVD_MAP_FORCE_PRESENT:
8759 kind = GOMP_MAP_FORCE_PRESENT;
8760 break;
8761 default:
8762 gcc_unreachable ();
8763 }
8764 OMP_CLAUSE_SET_MAP_KIND (clause, kind);
8765 if (DECL_SIZE (decl)
8766 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
8767 {
8768 tree decl2 = DECL_VALUE_EXPR (decl);
8769 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
8770 decl2 = TREE_OPERAND (decl2, 0);
8771 gcc_assert (DECL_P (decl2));
8772 tree mem = build_simple_mem_ref (decl2);
8773 OMP_CLAUSE_DECL (clause) = mem;
8774 OMP_CLAUSE_SIZE (clause) = TYPE_SIZE_UNIT (TREE_TYPE (decl));
8775 if (gimplify_omp_ctxp->outer_context)
8776 {
8777 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp->outer_context;
8778 omp_notice_variable (ctx, decl2, true);
8779 omp_notice_variable (ctx, OMP_CLAUSE_SIZE (clause), true);
8780 }
8781 tree nc = build_omp_clause (OMP_CLAUSE_LOCATION (clause),
8782 OMP_CLAUSE_MAP);
8783 OMP_CLAUSE_DECL (nc) = decl;
8784 OMP_CLAUSE_SIZE (nc) = size_zero_node;
8785 if (gimplify_omp_ctxp->target_firstprivatize_array_bases)
8786 OMP_CLAUSE_SET_MAP_KIND (nc, GOMP_MAP_FIRSTPRIVATE_POINTER);
8787 else
8788 OMP_CLAUSE_SET_MAP_KIND (nc, GOMP_MAP_POINTER);
8789 OMP_CLAUSE_CHAIN (nc) = OMP_CLAUSE_CHAIN (clause);
8790 OMP_CLAUSE_CHAIN (clause) = nc;
8791 }
8792 else if (gimplify_omp_ctxp->target_firstprivatize_array_bases
8793 && lang_hooks.decls.omp_privatize_by_reference (decl))
8794 {
8795 OMP_CLAUSE_DECL (clause) = build_simple_mem_ref (decl);
8796 OMP_CLAUSE_SIZE (clause)
8797 = unshare_expr (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl))));
8798 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
8799 gimplify_omp_ctxp = ctx->outer_context;
8800 gimplify_expr (&OMP_CLAUSE_SIZE (clause),
8801 pre_p, NULL, is_gimple_val, fb_rvalue);
8802 gimplify_omp_ctxp = ctx;
8803 tree nc = build_omp_clause (OMP_CLAUSE_LOCATION (clause),
8804 OMP_CLAUSE_MAP);
8805 OMP_CLAUSE_DECL (nc) = decl;
8806 OMP_CLAUSE_SIZE (nc) = size_zero_node;
8807 OMP_CLAUSE_SET_MAP_KIND (nc, GOMP_MAP_FIRSTPRIVATE_REFERENCE);
8808 OMP_CLAUSE_CHAIN (nc) = OMP_CLAUSE_CHAIN (clause);
8809 OMP_CLAUSE_CHAIN (clause) = nc;
8810 }
8811 else
8812 OMP_CLAUSE_SIZE (clause) = DECL_SIZE_UNIT (decl);
8813 }
8814 if (code == OMP_CLAUSE_FIRSTPRIVATE && (flags & GOVD_LASTPRIVATE) != 0)
8815 {
8816 tree nc = build_omp_clause (input_location, OMP_CLAUSE_LASTPRIVATE);
8817 OMP_CLAUSE_DECL (nc) = decl;
8818 OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (nc) = 1;
8819 OMP_CLAUSE_CHAIN (nc) = chain;
8820 OMP_CLAUSE_CHAIN (clause) = nc;
8821 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
8822 gimplify_omp_ctxp = ctx->outer_context;
8823 lang_hooks.decls.omp_finish_clause (nc, pre_p);
8824 gimplify_omp_ctxp = ctx;
8825 }
8826 *list_p = clause;
8827 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
8828 gimplify_omp_ctxp = ctx->outer_context;
8829 lang_hooks.decls.omp_finish_clause (clause, pre_p);
8830 if (gimplify_omp_ctxp)
8831 for (; clause != chain; clause = OMP_CLAUSE_CHAIN (clause))
8832 if (OMP_CLAUSE_CODE (clause) == OMP_CLAUSE_MAP
8833 && DECL_P (OMP_CLAUSE_SIZE (clause)))
8834 omp_notice_variable (gimplify_omp_ctxp, OMP_CLAUSE_SIZE (clause),
8835 true);
8836 gimplify_omp_ctxp = ctx;
8837 return 0;
8838 }
8839
8840 static void
8841 gimplify_adjust_omp_clauses (gimple_seq *pre_p, gimple_seq body, tree *list_p,
8842 enum tree_code code)
8843 {
8844 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
8845 tree c, decl;
8846
8847 if (body)
8848 {
8849 struct gimplify_omp_ctx *octx;
8850 for (octx = ctx; octx; octx = octx->outer_context)
8851 if ((octx->region_type & (ORT_PARALLEL | ORT_TASK | ORT_TEAMS)) != 0)
8852 break;
8853 if (octx)
8854 {
8855 struct walk_stmt_info wi;
8856 memset (&wi, 0, sizeof (wi));
8857 walk_gimple_seq (body, omp_find_stores_stmt,
8858 omp_find_stores_op, &wi);
8859 }
8860 }
8861 while ((c = *list_p) != NULL)
8862 {
8863 splay_tree_node n;
8864 bool remove = false;
8865
8866 switch (OMP_CLAUSE_CODE (c))
8867 {
8868 case OMP_CLAUSE_FIRSTPRIVATE:
8869 if ((ctx->region_type & ORT_TARGET)
8870 && (ctx->region_type & ORT_ACC) == 0
8871 && TYPE_ATOMIC (strip_array_types
8872 (TREE_TYPE (OMP_CLAUSE_DECL (c)))))
8873 {
8874 error_at (OMP_CLAUSE_LOCATION (c),
8875 "%<_Atomic%> %qD in %<firstprivate%> clause on "
8876 "%<target%> construct", OMP_CLAUSE_DECL (c));
8877 remove = true;
8878 break;
8879 }
8880 /* FALLTHRU */
8881 case OMP_CLAUSE_PRIVATE:
8882 case OMP_CLAUSE_SHARED:
8883 case OMP_CLAUSE_LINEAR:
8884 decl = OMP_CLAUSE_DECL (c);
8885 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
8886 remove = !(n->value & GOVD_SEEN);
8887 if (! remove)
8888 {
8889 bool shared = OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED;
8890 if ((n->value & GOVD_DEBUG_PRIVATE)
8891 || lang_hooks.decls.omp_private_debug_clause (decl, shared))
8892 {
8893 gcc_assert ((n->value & GOVD_DEBUG_PRIVATE) == 0
8894 || ((n->value & GOVD_DATA_SHARE_CLASS)
8895 == GOVD_SHARED));
8896 OMP_CLAUSE_SET_CODE (c, OMP_CLAUSE_PRIVATE);
8897 OMP_CLAUSE_PRIVATE_DEBUG (c) = 1;
8898 }
8899 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
8900 && (n->value & GOVD_WRITTEN) == 0
8901 && DECL_P (decl)
8902 && omp_shared_to_firstprivate_optimizable_decl_p (decl))
8903 OMP_CLAUSE_SHARED_READONLY (c) = 1;
8904 else if (DECL_P (decl)
8905 && ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
8906 && (n->value & GOVD_WRITTEN) != 1)
8907 || (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
8908 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c)))
8909 && omp_shared_to_firstprivate_optimizable_decl_p (decl))
8910 omp_mark_stores (gimplify_omp_ctxp->outer_context, decl);
8911 }
8912 break;
8913
8914 case OMP_CLAUSE_LASTPRIVATE:
8915 /* Make sure OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE is set to
8916 accurately reflect the presence of a FIRSTPRIVATE clause. */
8917 decl = OMP_CLAUSE_DECL (c);
8918 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
8919 OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c)
8920 = (n->value & GOVD_FIRSTPRIVATE) != 0;
8921 if (code == OMP_DISTRIBUTE
8922 && OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
8923 {
8924 remove = true;
8925 error_at (OMP_CLAUSE_LOCATION (c),
8926 "same variable used in %<firstprivate%> and "
8927 "%<lastprivate%> clauses on %<distribute%> "
8928 "construct");
8929 }
8930 if (!remove
8931 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
8932 && DECL_P (decl)
8933 && omp_shared_to_firstprivate_optimizable_decl_p (decl))
8934 omp_mark_stores (gimplify_omp_ctxp->outer_context, decl);
8935 break;
8936
8937 case OMP_CLAUSE_ALIGNED:
8938 decl = OMP_CLAUSE_DECL (c);
8939 if (!is_global_var (decl))
8940 {
8941 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
8942 remove = n == NULL || !(n->value & GOVD_SEEN);
8943 if (!remove && TREE_CODE (TREE_TYPE (decl)) == POINTER_TYPE)
8944 {
8945 struct gimplify_omp_ctx *octx;
8946 if (n != NULL
8947 && (n->value & (GOVD_DATA_SHARE_CLASS
8948 & ~GOVD_FIRSTPRIVATE)))
8949 remove = true;
8950 else
8951 for (octx = ctx->outer_context; octx;
8952 octx = octx->outer_context)
8953 {
8954 n = splay_tree_lookup (octx->variables,
8955 (splay_tree_key) decl);
8956 if (n == NULL)
8957 continue;
8958 if (n->value & GOVD_LOCAL)
8959 break;
8960 /* We have to avoid assigning a shared variable
8961 to itself when trying to add
8962 __builtin_assume_aligned. */
8963 if (n->value & GOVD_SHARED)
8964 {
8965 remove = true;
8966 break;
8967 }
8968 }
8969 }
8970 }
8971 else if (TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
8972 {
8973 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
8974 if (n != NULL && (n->value & GOVD_DATA_SHARE_CLASS) != 0)
8975 remove = true;
8976 }
8977 break;
8978
8979 case OMP_CLAUSE_MAP:
8980 if (code == OMP_TARGET_EXIT_DATA
8981 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ALWAYS_POINTER)
8982 {
8983 remove = true;
8984 break;
8985 }
8986 decl = OMP_CLAUSE_DECL (c);
8987 /* Data clauses associated with acc parallel reductions must be
8988 compatible with present_or_copy. Warn and adjust the clause
8989 if that is not the case. */
8990 if (ctx->region_type == ORT_ACC_PARALLEL)
8991 {
8992 tree t = DECL_P (decl) ? decl : TREE_OPERAND (decl, 0);
8993 n = NULL;
8994
8995 if (DECL_P (t))
8996 n = splay_tree_lookup (ctx->variables, (splay_tree_key) t);
8997
8998 if (n && (n->value & GOVD_REDUCTION))
8999 {
9000 enum gomp_map_kind kind = OMP_CLAUSE_MAP_KIND (c);
9001
9002 OMP_CLAUSE_MAP_IN_REDUCTION (c) = 1;
9003 if ((kind & GOMP_MAP_TOFROM) != GOMP_MAP_TOFROM
9004 && kind != GOMP_MAP_FORCE_PRESENT
9005 && kind != GOMP_MAP_POINTER)
9006 {
9007 warning_at (OMP_CLAUSE_LOCATION (c), 0,
9008 "incompatible data clause with reduction "
9009 "on %qE; promoting to present_or_copy",
9010 DECL_NAME (t));
9011 OMP_CLAUSE_SET_MAP_KIND (c, GOMP_MAP_TOFROM);
9012 }
9013 }
9014 }
9015 if (!DECL_P (decl))
9016 {
9017 if ((ctx->region_type & ORT_TARGET) != 0
9018 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER)
9019 {
9020 if (TREE_CODE (decl) == INDIRECT_REF
9021 && TREE_CODE (TREE_OPERAND (decl, 0)) == COMPONENT_REF
9022 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (decl, 0)))
9023 == REFERENCE_TYPE))
9024 decl = TREE_OPERAND (decl, 0);
9025 if (TREE_CODE (decl) == COMPONENT_REF)
9026 {
9027 while (TREE_CODE (decl) == COMPONENT_REF)
9028 decl = TREE_OPERAND (decl, 0);
9029 if (DECL_P (decl))
9030 {
9031 n = splay_tree_lookup (ctx->variables,
9032 (splay_tree_key) decl);
9033 if (!(n->value & GOVD_SEEN))
9034 remove = true;
9035 }
9036 }
9037 }
9038 break;
9039 }
9040 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
9041 if ((ctx->region_type & ORT_TARGET) != 0
9042 && !(n->value & GOVD_SEEN)
9043 && GOMP_MAP_ALWAYS_P (OMP_CLAUSE_MAP_KIND (c)) == 0
9044 && (!is_global_var (decl)
9045 || !lookup_attribute ("omp declare target link",
9046 DECL_ATTRIBUTES (decl))))
9047 {
9048 remove = true;
9049 /* For struct element mapping, if struct is never referenced
9050 in target block and none of the mapping has always modifier,
9051 remove all the struct element mappings, which immediately
9052 follow the GOMP_MAP_STRUCT map clause. */
9053 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_STRUCT)
9054 {
9055 HOST_WIDE_INT cnt = tree_to_shwi (OMP_CLAUSE_SIZE (c));
9056 while (cnt--)
9057 OMP_CLAUSE_CHAIN (c)
9058 = OMP_CLAUSE_CHAIN (OMP_CLAUSE_CHAIN (c));
9059 }
9060 }
9061 else if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_STRUCT
9062 && code == OMP_TARGET_EXIT_DATA)
9063 remove = true;
9064 else if (DECL_SIZE (decl)
9065 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST
9066 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_POINTER
9067 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_FIRSTPRIVATE_POINTER
9068 && (OMP_CLAUSE_MAP_KIND (c)
9069 != GOMP_MAP_FIRSTPRIVATE_REFERENCE))
9070 {
9071 /* For GOMP_MAP_FORCE_DEVICEPTR, we'll never enter here, because
9072 for these, TREE_CODE (DECL_SIZE (decl)) will always be
9073 INTEGER_CST. */
9074 gcc_assert (OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_FORCE_DEVICEPTR);
9075
9076 tree decl2 = DECL_VALUE_EXPR (decl);
9077 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
9078 decl2 = TREE_OPERAND (decl2, 0);
9079 gcc_assert (DECL_P (decl2));
9080 tree mem = build_simple_mem_ref (decl2);
9081 OMP_CLAUSE_DECL (c) = mem;
9082 OMP_CLAUSE_SIZE (c) = TYPE_SIZE_UNIT (TREE_TYPE (decl));
9083 if (ctx->outer_context)
9084 {
9085 omp_notice_variable (ctx->outer_context, decl2, true);
9086 omp_notice_variable (ctx->outer_context,
9087 OMP_CLAUSE_SIZE (c), true);
9088 }
9089 if (((ctx->region_type & ORT_TARGET) != 0
9090 || !ctx->target_firstprivatize_array_bases)
9091 && ((n->value & GOVD_SEEN) == 0
9092 || (n->value & (GOVD_PRIVATE | GOVD_FIRSTPRIVATE)) == 0))
9093 {
9094 tree nc = build_omp_clause (OMP_CLAUSE_LOCATION (c),
9095 OMP_CLAUSE_MAP);
9096 OMP_CLAUSE_DECL (nc) = decl;
9097 OMP_CLAUSE_SIZE (nc) = size_zero_node;
9098 if (ctx->target_firstprivatize_array_bases)
9099 OMP_CLAUSE_SET_MAP_KIND (nc,
9100 GOMP_MAP_FIRSTPRIVATE_POINTER);
9101 else
9102 OMP_CLAUSE_SET_MAP_KIND (nc, GOMP_MAP_POINTER);
9103 OMP_CLAUSE_CHAIN (nc) = OMP_CLAUSE_CHAIN (c);
9104 OMP_CLAUSE_CHAIN (c) = nc;
9105 c = nc;
9106 }
9107 }
9108 else
9109 {
9110 if (OMP_CLAUSE_SIZE (c) == NULL_TREE)
9111 OMP_CLAUSE_SIZE (c) = DECL_SIZE_UNIT (decl);
9112 gcc_assert ((n->value & GOVD_SEEN) == 0
9113 || ((n->value & (GOVD_PRIVATE | GOVD_FIRSTPRIVATE))
9114 == 0));
9115 }
9116 break;
9117
9118 case OMP_CLAUSE_TO:
9119 case OMP_CLAUSE_FROM:
9120 case OMP_CLAUSE__CACHE_:
9121 decl = OMP_CLAUSE_DECL (c);
9122 if (!DECL_P (decl))
9123 break;
9124 if (DECL_SIZE (decl)
9125 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
9126 {
9127 tree decl2 = DECL_VALUE_EXPR (decl);
9128 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
9129 decl2 = TREE_OPERAND (decl2, 0);
9130 gcc_assert (DECL_P (decl2));
9131 tree mem = build_simple_mem_ref (decl2);
9132 OMP_CLAUSE_DECL (c) = mem;
9133 OMP_CLAUSE_SIZE (c) = TYPE_SIZE_UNIT (TREE_TYPE (decl));
9134 if (ctx->outer_context)
9135 {
9136 omp_notice_variable (ctx->outer_context, decl2, true);
9137 omp_notice_variable (ctx->outer_context,
9138 OMP_CLAUSE_SIZE (c), true);
9139 }
9140 }
9141 else if (OMP_CLAUSE_SIZE (c) == NULL_TREE)
9142 OMP_CLAUSE_SIZE (c) = DECL_SIZE_UNIT (decl);
9143 break;
9144
9145 case OMP_CLAUSE_REDUCTION:
9146 decl = OMP_CLAUSE_DECL (c);
9147 /* OpenACC reductions need a present_or_copy data clause.
9148 Add one if necessary. Error is the reduction is private. */
9149 if (ctx->region_type == ORT_ACC_PARALLEL)
9150 {
9151 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
9152 if (n->value & (GOVD_PRIVATE | GOVD_FIRSTPRIVATE))
9153 error_at (OMP_CLAUSE_LOCATION (c), "invalid private "
9154 "reduction on %qE", DECL_NAME (decl));
9155 else if ((n->value & GOVD_MAP) == 0)
9156 {
9157 tree next = OMP_CLAUSE_CHAIN (c);
9158 tree nc = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE_MAP);
9159 OMP_CLAUSE_SET_MAP_KIND (nc, GOMP_MAP_TOFROM);
9160 OMP_CLAUSE_DECL (nc) = decl;
9161 OMP_CLAUSE_CHAIN (c) = nc;
9162 lang_hooks.decls.omp_finish_clause (nc, pre_p);
9163 while (1)
9164 {
9165 OMP_CLAUSE_MAP_IN_REDUCTION (nc) = 1;
9166 if (OMP_CLAUSE_CHAIN (nc) == NULL)
9167 break;
9168 nc = OMP_CLAUSE_CHAIN (nc);
9169 }
9170 OMP_CLAUSE_CHAIN (nc) = next;
9171 n->value |= GOVD_MAP;
9172 }
9173 }
9174 if (DECL_P (decl)
9175 && omp_shared_to_firstprivate_optimizable_decl_p (decl))
9176 omp_mark_stores (gimplify_omp_ctxp->outer_context, decl);
9177 break;
9178 case OMP_CLAUSE_COPYIN:
9179 case OMP_CLAUSE_COPYPRIVATE:
9180 case OMP_CLAUSE_IF:
9181 case OMP_CLAUSE_NUM_THREADS:
9182 case OMP_CLAUSE_NUM_TEAMS:
9183 case OMP_CLAUSE_THREAD_LIMIT:
9184 case OMP_CLAUSE_DIST_SCHEDULE:
9185 case OMP_CLAUSE_DEVICE:
9186 case OMP_CLAUSE_SCHEDULE:
9187 case OMP_CLAUSE_NOWAIT:
9188 case OMP_CLAUSE_ORDERED:
9189 case OMP_CLAUSE_DEFAULT:
9190 case OMP_CLAUSE_UNTIED:
9191 case OMP_CLAUSE_COLLAPSE:
9192 case OMP_CLAUSE_FINAL:
9193 case OMP_CLAUSE_MERGEABLE:
9194 case OMP_CLAUSE_PROC_BIND:
9195 case OMP_CLAUSE_SAFELEN:
9196 case OMP_CLAUSE_SIMDLEN:
9197 case OMP_CLAUSE_DEPEND:
9198 case OMP_CLAUSE_PRIORITY:
9199 case OMP_CLAUSE_GRAINSIZE:
9200 case OMP_CLAUSE_NUM_TASKS:
9201 case OMP_CLAUSE_NOGROUP:
9202 case OMP_CLAUSE_THREADS:
9203 case OMP_CLAUSE_SIMD:
9204 case OMP_CLAUSE_HINT:
9205 case OMP_CLAUSE_DEFAULTMAP:
9206 case OMP_CLAUSE_USE_DEVICE_PTR:
9207 case OMP_CLAUSE_IS_DEVICE_PTR:
9208 case OMP_CLAUSE__CILK_FOR_COUNT_:
9209 case OMP_CLAUSE_ASYNC:
9210 case OMP_CLAUSE_WAIT:
9211 case OMP_CLAUSE_INDEPENDENT:
9212 case OMP_CLAUSE_NUM_GANGS:
9213 case OMP_CLAUSE_NUM_WORKERS:
9214 case OMP_CLAUSE_VECTOR_LENGTH:
9215 case OMP_CLAUSE_GANG:
9216 case OMP_CLAUSE_WORKER:
9217 case OMP_CLAUSE_VECTOR:
9218 case OMP_CLAUSE_AUTO:
9219 case OMP_CLAUSE_SEQ:
9220 case OMP_CLAUSE_TILE:
9221 break;
9222
9223 default:
9224 gcc_unreachable ();
9225 }
9226
9227 if (remove)
9228 *list_p = OMP_CLAUSE_CHAIN (c);
9229 else
9230 list_p = &OMP_CLAUSE_CHAIN (c);
9231 }
9232
9233 /* Add in any implicit data sharing. */
9234 struct gimplify_adjust_omp_clauses_data data;
9235 data.list_p = list_p;
9236 data.pre_p = pre_p;
9237 splay_tree_foreach (ctx->variables, gimplify_adjust_omp_clauses_1, &data);
9238
9239 gimplify_omp_ctxp = ctx->outer_context;
9240 delete_omp_context (ctx);
9241 }
9242
9243 /* Gimplify OACC_CACHE. */
9244
9245 static void
9246 gimplify_oacc_cache (tree *expr_p, gimple_seq *pre_p)
9247 {
9248 tree expr = *expr_p;
9249
9250 gimplify_scan_omp_clauses (&OACC_CACHE_CLAUSES (expr), pre_p, ORT_ACC,
9251 OACC_CACHE);
9252 gimplify_adjust_omp_clauses (pre_p, NULL, &OACC_CACHE_CLAUSES (expr),
9253 OACC_CACHE);
9254
9255 /* TODO: Do something sensible with this information. */
9256
9257 *expr_p = NULL_TREE;
9258 }
9259
9260 /* Helper function of gimplify_oacc_declare. The helper's purpose is to,
9261 if required, translate 'kind' in CLAUSE into an 'entry' kind and 'exit'
9262 kind. The entry kind will replace the one in CLAUSE, while the exit
9263 kind will be used in a new omp_clause and returned to the caller. */
9264
9265 static tree
9266 gimplify_oacc_declare_1 (tree clause)
9267 {
9268 HOST_WIDE_INT kind, new_op;
9269 bool ret = false;
9270 tree c = NULL;
9271
9272 kind = OMP_CLAUSE_MAP_KIND (clause);
9273
9274 switch (kind)
9275 {
9276 case GOMP_MAP_ALLOC:
9277 case GOMP_MAP_FORCE_ALLOC:
9278 case GOMP_MAP_FORCE_TO:
9279 new_op = GOMP_MAP_DELETE;
9280 ret = true;
9281 break;
9282
9283 case GOMP_MAP_FORCE_FROM:
9284 OMP_CLAUSE_SET_MAP_KIND (clause, GOMP_MAP_FORCE_ALLOC);
9285 new_op = GOMP_MAP_FORCE_FROM;
9286 ret = true;
9287 break;
9288
9289 case GOMP_MAP_FORCE_TOFROM:
9290 OMP_CLAUSE_SET_MAP_KIND (clause, GOMP_MAP_FORCE_TO);
9291 new_op = GOMP_MAP_FORCE_FROM;
9292 ret = true;
9293 break;
9294
9295 case GOMP_MAP_FROM:
9296 OMP_CLAUSE_SET_MAP_KIND (clause, GOMP_MAP_FORCE_ALLOC);
9297 new_op = GOMP_MAP_FROM;
9298 ret = true;
9299 break;
9300
9301 case GOMP_MAP_TOFROM:
9302 OMP_CLAUSE_SET_MAP_KIND (clause, GOMP_MAP_TO);
9303 new_op = GOMP_MAP_FROM;
9304 ret = true;
9305 break;
9306
9307 case GOMP_MAP_DEVICE_RESIDENT:
9308 case GOMP_MAP_FORCE_DEVICEPTR:
9309 case GOMP_MAP_FORCE_PRESENT:
9310 case GOMP_MAP_LINK:
9311 case GOMP_MAP_POINTER:
9312 case GOMP_MAP_TO:
9313 break;
9314
9315 default:
9316 gcc_unreachable ();
9317 break;
9318 }
9319
9320 if (ret)
9321 {
9322 c = build_omp_clause (OMP_CLAUSE_LOCATION (clause), OMP_CLAUSE_MAP);
9323 OMP_CLAUSE_SET_MAP_KIND (c, new_op);
9324 OMP_CLAUSE_DECL (c) = OMP_CLAUSE_DECL (clause);
9325 }
9326
9327 return c;
9328 }
9329
9330 /* Gimplify OACC_DECLARE. */
9331
9332 static void
9333 gimplify_oacc_declare (tree *expr_p, gimple_seq *pre_p)
9334 {
9335 tree expr = *expr_p;
9336 gomp_target *stmt;
9337 tree clauses, t, decl;
9338
9339 clauses = OACC_DECLARE_CLAUSES (expr);
9340
9341 gimplify_scan_omp_clauses (&clauses, pre_p, ORT_TARGET_DATA, OACC_DECLARE);
9342 gimplify_adjust_omp_clauses (pre_p, NULL, &clauses, OACC_DECLARE);
9343
9344 for (t = clauses; t; t = OMP_CLAUSE_CHAIN (t))
9345 {
9346 decl = OMP_CLAUSE_DECL (t);
9347
9348 if (TREE_CODE (decl) == MEM_REF)
9349 decl = TREE_OPERAND (decl, 0);
9350
9351 if (VAR_P (decl) && !is_oacc_declared (decl))
9352 {
9353 tree attr = get_identifier ("oacc declare target");
9354 DECL_ATTRIBUTES (decl) = tree_cons (attr, NULL_TREE,
9355 DECL_ATTRIBUTES (decl));
9356 }
9357
9358 if (VAR_P (decl)
9359 && !is_global_var (decl)
9360 && DECL_CONTEXT (decl) == current_function_decl)
9361 {
9362 tree c = gimplify_oacc_declare_1 (t);
9363 if (c)
9364 {
9365 if (oacc_declare_returns == NULL)
9366 oacc_declare_returns = new hash_map<tree, tree>;
9367
9368 oacc_declare_returns->put (decl, c);
9369 }
9370 }
9371
9372 if (gimplify_omp_ctxp)
9373 omp_add_variable (gimplify_omp_ctxp, decl, GOVD_SEEN);
9374 }
9375
9376 stmt = gimple_build_omp_target (NULL, GF_OMP_TARGET_KIND_OACC_DECLARE,
9377 clauses);
9378
9379 gimplify_seq_add_stmt (pre_p, stmt);
9380
9381 *expr_p = NULL_TREE;
9382 }
9383
9384 /* Gimplify the contents of an OMP_PARALLEL statement. This involves
9385 gimplification of the body, as well as scanning the body for used
9386 variables. We need to do this scan now, because variable-sized
9387 decls will be decomposed during gimplification. */
9388
9389 static void
9390 gimplify_omp_parallel (tree *expr_p, gimple_seq *pre_p)
9391 {
9392 tree expr = *expr_p;
9393 gimple *g;
9394 gimple_seq body = NULL;
9395
9396 gimplify_scan_omp_clauses (&OMP_PARALLEL_CLAUSES (expr), pre_p,
9397 OMP_PARALLEL_COMBINED (expr)
9398 ? ORT_COMBINED_PARALLEL
9399 : ORT_PARALLEL, OMP_PARALLEL);
9400
9401 push_gimplify_context ();
9402
9403 g = gimplify_and_return_first (OMP_PARALLEL_BODY (expr), &body);
9404 if (gimple_code (g) == GIMPLE_BIND)
9405 pop_gimplify_context (g);
9406 else
9407 pop_gimplify_context (NULL);
9408
9409 gimplify_adjust_omp_clauses (pre_p, body, &OMP_PARALLEL_CLAUSES (expr),
9410 OMP_PARALLEL);
9411
9412 g = gimple_build_omp_parallel (body,
9413 OMP_PARALLEL_CLAUSES (expr),
9414 NULL_TREE, NULL_TREE);
9415 if (OMP_PARALLEL_COMBINED (expr))
9416 gimple_omp_set_subcode (g, GF_OMP_PARALLEL_COMBINED);
9417 gimplify_seq_add_stmt (pre_p, g);
9418 *expr_p = NULL_TREE;
9419 }
9420
9421 /* Gimplify the contents of an OMP_TASK statement. This involves
9422 gimplification of the body, as well as scanning the body for used
9423 variables. We need to do this scan now, because variable-sized
9424 decls will be decomposed during gimplification. */
9425
9426 static void
9427 gimplify_omp_task (tree *expr_p, gimple_seq *pre_p)
9428 {
9429 tree expr = *expr_p;
9430 gimple *g;
9431 gimple_seq body = NULL;
9432
9433 gimplify_scan_omp_clauses (&OMP_TASK_CLAUSES (expr), pre_p,
9434 omp_find_clause (OMP_TASK_CLAUSES (expr),
9435 OMP_CLAUSE_UNTIED)
9436 ? ORT_UNTIED_TASK : ORT_TASK, OMP_TASK);
9437
9438 push_gimplify_context ();
9439
9440 g = gimplify_and_return_first (OMP_TASK_BODY (expr), &body);
9441 if (gimple_code (g) == GIMPLE_BIND)
9442 pop_gimplify_context (g);
9443 else
9444 pop_gimplify_context (NULL);
9445
9446 gimplify_adjust_omp_clauses (pre_p, body, &OMP_TASK_CLAUSES (expr),
9447 OMP_TASK);
9448
9449 g = gimple_build_omp_task (body,
9450 OMP_TASK_CLAUSES (expr),
9451 NULL_TREE, NULL_TREE,
9452 NULL_TREE, NULL_TREE, NULL_TREE);
9453 gimplify_seq_add_stmt (pre_p, g);
9454 *expr_p = NULL_TREE;
9455 }
9456
9457 /* Helper function of gimplify_omp_for, find OMP_FOR resp. OMP_SIMD
9458 with non-NULL OMP_FOR_INIT. */
9459
9460 static tree
9461 find_combined_omp_for (tree *tp, int *walk_subtrees, void *)
9462 {
9463 *walk_subtrees = 0;
9464 switch (TREE_CODE (*tp))
9465 {
9466 case OMP_FOR:
9467 *walk_subtrees = 1;
9468 /* FALLTHRU */
9469 case OMP_SIMD:
9470 if (OMP_FOR_INIT (*tp) != NULL_TREE)
9471 return *tp;
9472 break;
9473 case BIND_EXPR:
9474 case STATEMENT_LIST:
9475 case OMP_PARALLEL:
9476 *walk_subtrees = 1;
9477 break;
9478 default:
9479 break;
9480 }
9481 return NULL_TREE;
9482 }
9483
9484 /* Gimplify the gross structure of an OMP_FOR statement. */
9485
9486 static enum gimplify_status
9487 gimplify_omp_for (tree *expr_p, gimple_seq *pre_p)
9488 {
9489 tree for_stmt, orig_for_stmt, inner_for_stmt = NULL_TREE, decl, var, t;
9490 enum gimplify_status ret = GS_ALL_DONE;
9491 enum gimplify_status tret;
9492 gomp_for *gfor;
9493 gimple_seq for_body, for_pre_body;
9494 int i;
9495 bitmap has_decl_expr = NULL;
9496 enum omp_region_type ort = ORT_WORKSHARE;
9497
9498 orig_for_stmt = for_stmt = *expr_p;
9499
9500 switch (TREE_CODE (for_stmt))
9501 {
9502 case OMP_FOR:
9503 case CILK_FOR:
9504 case OMP_DISTRIBUTE:
9505 break;
9506 case OACC_LOOP:
9507 ort = ORT_ACC;
9508 break;
9509 case OMP_TASKLOOP:
9510 if (omp_find_clause (OMP_FOR_CLAUSES (for_stmt), OMP_CLAUSE_UNTIED))
9511 ort = ORT_UNTIED_TASK;
9512 else
9513 ort = ORT_TASK;
9514 break;
9515 case OMP_SIMD:
9516 case CILK_SIMD:
9517 ort = ORT_SIMD;
9518 break;
9519 default:
9520 gcc_unreachable ();
9521 }
9522
9523 /* Set OMP_CLAUSE_LINEAR_NO_COPYIN flag on explicit linear
9524 clause for the IV. */
9525 if (ort == ORT_SIMD && TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)) == 1)
9526 {
9527 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), 0);
9528 gcc_assert (TREE_CODE (t) == MODIFY_EXPR);
9529 decl = TREE_OPERAND (t, 0);
9530 for (tree c = OMP_FOR_CLAUSES (for_stmt); c; c = OMP_CLAUSE_CHAIN (c))
9531 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
9532 && OMP_CLAUSE_DECL (c) == decl)
9533 {
9534 OMP_CLAUSE_LINEAR_NO_COPYIN (c) = 1;
9535 break;
9536 }
9537 }
9538
9539 if (OMP_FOR_INIT (for_stmt) == NULL_TREE)
9540 {
9541 gcc_assert (TREE_CODE (for_stmt) != OACC_LOOP);
9542 inner_for_stmt = walk_tree (&OMP_FOR_BODY (for_stmt),
9543 find_combined_omp_for, NULL, NULL);
9544 if (inner_for_stmt == NULL_TREE)
9545 {
9546 gcc_assert (seen_error ());
9547 *expr_p = NULL_TREE;
9548 return GS_ERROR;
9549 }
9550 }
9551
9552 if (TREE_CODE (for_stmt) != OMP_TASKLOOP)
9553 gimplify_scan_omp_clauses (&OMP_FOR_CLAUSES (for_stmt), pre_p, ort,
9554 TREE_CODE (for_stmt));
9555
9556 if (TREE_CODE (for_stmt) == OMP_DISTRIBUTE)
9557 gimplify_omp_ctxp->distribute = true;
9558
9559 /* Handle OMP_FOR_INIT. */
9560 for_pre_body = NULL;
9561 if (ort == ORT_SIMD && OMP_FOR_PRE_BODY (for_stmt))
9562 {
9563 has_decl_expr = BITMAP_ALLOC (NULL);
9564 if (TREE_CODE (OMP_FOR_PRE_BODY (for_stmt)) == DECL_EXPR
9565 && TREE_CODE (DECL_EXPR_DECL (OMP_FOR_PRE_BODY (for_stmt)))
9566 == VAR_DECL)
9567 {
9568 t = OMP_FOR_PRE_BODY (for_stmt);
9569 bitmap_set_bit (has_decl_expr, DECL_UID (DECL_EXPR_DECL (t)));
9570 }
9571 else if (TREE_CODE (OMP_FOR_PRE_BODY (for_stmt)) == STATEMENT_LIST)
9572 {
9573 tree_stmt_iterator si;
9574 for (si = tsi_start (OMP_FOR_PRE_BODY (for_stmt)); !tsi_end_p (si);
9575 tsi_next (&si))
9576 {
9577 t = tsi_stmt (si);
9578 if (TREE_CODE (t) == DECL_EXPR
9579 && TREE_CODE (DECL_EXPR_DECL (t)) == VAR_DECL)
9580 bitmap_set_bit (has_decl_expr, DECL_UID (DECL_EXPR_DECL (t)));
9581 }
9582 }
9583 }
9584 if (OMP_FOR_PRE_BODY (for_stmt))
9585 {
9586 if (TREE_CODE (for_stmt) != OMP_TASKLOOP || gimplify_omp_ctxp)
9587 gimplify_and_add (OMP_FOR_PRE_BODY (for_stmt), &for_pre_body);
9588 else
9589 {
9590 struct gimplify_omp_ctx ctx;
9591 memset (&ctx, 0, sizeof (ctx));
9592 ctx.region_type = ORT_NONE;
9593 gimplify_omp_ctxp = &ctx;
9594 gimplify_and_add (OMP_FOR_PRE_BODY (for_stmt), &for_pre_body);
9595 gimplify_omp_ctxp = NULL;
9596 }
9597 }
9598 OMP_FOR_PRE_BODY (for_stmt) = NULL_TREE;
9599
9600 if (OMP_FOR_INIT (for_stmt) == NULL_TREE)
9601 for_stmt = inner_for_stmt;
9602
9603 /* For taskloop, need to gimplify the start, end and step before the
9604 taskloop, outside of the taskloop omp context. */
9605 if (TREE_CODE (orig_for_stmt) == OMP_TASKLOOP)
9606 {
9607 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
9608 {
9609 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i);
9610 if (!is_gimple_constant (TREE_OPERAND (t, 1)))
9611 {
9612 TREE_OPERAND (t, 1)
9613 = get_initialized_tmp_var (TREE_OPERAND (t, 1),
9614 pre_p, NULL, false);
9615 tree c = build_omp_clause (input_location,
9616 OMP_CLAUSE_FIRSTPRIVATE);
9617 OMP_CLAUSE_DECL (c) = TREE_OPERAND (t, 1);
9618 OMP_CLAUSE_CHAIN (c) = OMP_FOR_CLAUSES (orig_for_stmt);
9619 OMP_FOR_CLAUSES (orig_for_stmt) = c;
9620 }
9621
9622 /* Handle OMP_FOR_COND. */
9623 t = TREE_VEC_ELT (OMP_FOR_COND (for_stmt), i);
9624 if (!is_gimple_constant (TREE_OPERAND (t, 1)))
9625 {
9626 TREE_OPERAND (t, 1)
9627 = get_initialized_tmp_var (TREE_OPERAND (t, 1),
9628 gimple_seq_empty_p (for_pre_body)
9629 ? pre_p : &for_pre_body, NULL,
9630 false);
9631 tree c = build_omp_clause (input_location,
9632 OMP_CLAUSE_FIRSTPRIVATE);
9633 OMP_CLAUSE_DECL (c) = TREE_OPERAND (t, 1);
9634 OMP_CLAUSE_CHAIN (c) = OMP_FOR_CLAUSES (orig_for_stmt);
9635 OMP_FOR_CLAUSES (orig_for_stmt) = c;
9636 }
9637
9638 /* Handle OMP_FOR_INCR. */
9639 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
9640 if (TREE_CODE (t) == MODIFY_EXPR)
9641 {
9642 decl = TREE_OPERAND (t, 0);
9643 t = TREE_OPERAND (t, 1);
9644 tree *tp = &TREE_OPERAND (t, 1);
9645 if (TREE_CODE (t) == PLUS_EXPR && *tp == decl)
9646 tp = &TREE_OPERAND (t, 0);
9647
9648 if (!is_gimple_constant (*tp))
9649 {
9650 gimple_seq *seq = gimple_seq_empty_p (for_pre_body)
9651 ? pre_p : &for_pre_body;
9652 *tp = get_initialized_tmp_var (*tp, seq, NULL, false);
9653 tree c = build_omp_clause (input_location,
9654 OMP_CLAUSE_FIRSTPRIVATE);
9655 OMP_CLAUSE_DECL (c) = *tp;
9656 OMP_CLAUSE_CHAIN (c) = OMP_FOR_CLAUSES (orig_for_stmt);
9657 OMP_FOR_CLAUSES (orig_for_stmt) = c;
9658 }
9659 }
9660 }
9661
9662 gimplify_scan_omp_clauses (&OMP_FOR_CLAUSES (orig_for_stmt), pre_p, ort,
9663 OMP_TASKLOOP);
9664 }
9665
9666 if (orig_for_stmt != for_stmt)
9667 gimplify_omp_ctxp->combined_loop = true;
9668
9669 for_body = NULL;
9670 gcc_assert (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt))
9671 == TREE_VEC_LENGTH (OMP_FOR_COND (for_stmt)));
9672 gcc_assert (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt))
9673 == TREE_VEC_LENGTH (OMP_FOR_INCR (for_stmt)));
9674
9675 tree c = omp_find_clause (OMP_FOR_CLAUSES (for_stmt), OMP_CLAUSE_ORDERED);
9676 bool is_doacross = false;
9677 if (c && OMP_CLAUSE_ORDERED_EXPR (c))
9678 {
9679 is_doacross = true;
9680 gimplify_omp_ctxp->loop_iter_var.create (TREE_VEC_LENGTH
9681 (OMP_FOR_INIT (for_stmt))
9682 * 2);
9683 }
9684 int collapse = 1, tile = 0;
9685 c = omp_find_clause (OMP_FOR_CLAUSES (for_stmt), OMP_CLAUSE_COLLAPSE);
9686 if (c)
9687 collapse = tree_to_shwi (OMP_CLAUSE_COLLAPSE_EXPR (c));
9688 c = omp_find_clause (OMP_FOR_CLAUSES (for_stmt), OMP_CLAUSE_TILE);
9689 if (c)
9690 tile = list_length (OMP_CLAUSE_TILE_LIST (c));
9691 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
9692 {
9693 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i);
9694 gcc_assert (TREE_CODE (t) == MODIFY_EXPR);
9695 decl = TREE_OPERAND (t, 0);
9696 gcc_assert (DECL_P (decl));
9697 gcc_assert (INTEGRAL_TYPE_P (TREE_TYPE (decl))
9698 || POINTER_TYPE_P (TREE_TYPE (decl)));
9699 if (is_doacross)
9700 {
9701 if (TREE_CODE (for_stmt) == OMP_FOR && OMP_FOR_ORIG_DECLS (for_stmt))
9702 gimplify_omp_ctxp->loop_iter_var.quick_push
9703 (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (for_stmt), i));
9704 else
9705 gimplify_omp_ctxp->loop_iter_var.quick_push (decl);
9706 gimplify_omp_ctxp->loop_iter_var.quick_push (decl);
9707 }
9708
9709 /* Make sure the iteration variable is private. */
9710 tree c = NULL_TREE;
9711 tree c2 = NULL_TREE;
9712 if (orig_for_stmt != for_stmt)
9713 /* Do this only on innermost construct for combined ones. */;
9714 else if (ort == ORT_SIMD)
9715 {
9716 splay_tree_node n = splay_tree_lookup (gimplify_omp_ctxp->variables,
9717 (splay_tree_key) decl);
9718 omp_is_private (gimplify_omp_ctxp, decl,
9719 1 + (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt))
9720 != 1));
9721 if (n != NULL && (n->value & GOVD_DATA_SHARE_CLASS) != 0)
9722 omp_notice_variable (gimplify_omp_ctxp, decl, true);
9723 else if (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)) == 1)
9724 {
9725 c = build_omp_clause (input_location, OMP_CLAUSE_LINEAR);
9726 OMP_CLAUSE_LINEAR_NO_COPYIN (c) = 1;
9727 unsigned int flags = GOVD_LINEAR | GOVD_EXPLICIT | GOVD_SEEN;
9728 if (has_decl_expr
9729 && bitmap_bit_p (has_decl_expr, DECL_UID (decl)))
9730 {
9731 OMP_CLAUSE_LINEAR_NO_COPYOUT (c) = 1;
9732 flags |= GOVD_LINEAR_LASTPRIVATE_NO_OUTER;
9733 }
9734 struct gimplify_omp_ctx *outer
9735 = gimplify_omp_ctxp->outer_context;
9736 if (outer && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
9737 {
9738 if (outer->region_type == ORT_WORKSHARE
9739 && outer->combined_loop)
9740 {
9741 n = splay_tree_lookup (outer->variables,
9742 (splay_tree_key)decl);
9743 if (n != NULL && (n->value & GOVD_LOCAL) != 0)
9744 {
9745 OMP_CLAUSE_LINEAR_NO_COPYOUT (c) = 1;
9746 flags |= GOVD_LINEAR_LASTPRIVATE_NO_OUTER;
9747 }
9748 else
9749 {
9750 struct gimplify_omp_ctx *octx = outer->outer_context;
9751 if (octx
9752 && octx->region_type == ORT_COMBINED_PARALLEL
9753 && octx->outer_context
9754 && (octx->outer_context->region_type
9755 == ORT_WORKSHARE)
9756 && octx->outer_context->combined_loop)
9757 {
9758 octx = octx->outer_context;
9759 n = splay_tree_lookup (octx->variables,
9760 (splay_tree_key)decl);
9761 if (n != NULL && (n->value & GOVD_LOCAL) != 0)
9762 {
9763 OMP_CLAUSE_LINEAR_NO_COPYOUT (c) = 1;
9764 flags |= GOVD_LINEAR_LASTPRIVATE_NO_OUTER;
9765 }
9766 }
9767 }
9768 }
9769 }
9770
9771 OMP_CLAUSE_DECL (c) = decl;
9772 OMP_CLAUSE_CHAIN (c) = OMP_FOR_CLAUSES (for_stmt);
9773 OMP_FOR_CLAUSES (for_stmt) = c;
9774 omp_add_variable (gimplify_omp_ctxp, decl, flags);
9775 if (outer && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
9776 {
9777 if (outer->region_type == ORT_WORKSHARE
9778 && outer->combined_loop)
9779 {
9780 if (outer->outer_context
9781 && (outer->outer_context->region_type
9782 == ORT_COMBINED_PARALLEL))
9783 outer = outer->outer_context;
9784 else if (omp_check_private (outer, decl, false))
9785 outer = NULL;
9786 }
9787 else if (((outer->region_type & ORT_TASK) != 0)
9788 && outer->combined_loop
9789 && !omp_check_private (gimplify_omp_ctxp,
9790 decl, false))
9791 ;
9792 else if (outer->region_type != ORT_COMBINED_PARALLEL)
9793 {
9794 omp_notice_variable (outer, decl, true);
9795 outer = NULL;
9796 }
9797 if (outer)
9798 {
9799 n = splay_tree_lookup (outer->variables,
9800 (splay_tree_key)decl);
9801 if (n == NULL || (n->value & GOVD_DATA_SHARE_CLASS) == 0)
9802 {
9803 omp_add_variable (outer, decl,
9804 GOVD_LASTPRIVATE | GOVD_SEEN);
9805 if (outer->region_type == ORT_COMBINED_PARALLEL
9806 && outer->outer_context
9807 && (outer->outer_context->region_type
9808 == ORT_WORKSHARE)
9809 && outer->outer_context->combined_loop)
9810 {
9811 outer = outer->outer_context;
9812 n = splay_tree_lookup (outer->variables,
9813 (splay_tree_key)decl);
9814 if (omp_check_private (outer, decl, false))
9815 outer = NULL;
9816 else if (n == NULL
9817 || ((n->value & GOVD_DATA_SHARE_CLASS)
9818 == 0))
9819 omp_add_variable (outer, decl,
9820 GOVD_LASTPRIVATE
9821 | GOVD_SEEN);
9822 else
9823 outer = NULL;
9824 }
9825 if (outer && outer->outer_context
9826 && (outer->outer_context->region_type
9827 == ORT_COMBINED_TEAMS))
9828 {
9829 outer = outer->outer_context;
9830 n = splay_tree_lookup (outer->variables,
9831 (splay_tree_key)decl);
9832 if (n == NULL
9833 || (n->value & GOVD_DATA_SHARE_CLASS) == 0)
9834 omp_add_variable (outer, decl,
9835 GOVD_SHARED | GOVD_SEEN);
9836 else
9837 outer = NULL;
9838 }
9839 if (outer && outer->outer_context)
9840 omp_notice_variable (outer->outer_context, decl,
9841 true);
9842 }
9843 }
9844 }
9845 }
9846 else
9847 {
9848 bool lastprivate
9849 = (!has_decl_expr
9850 || !bitmap_bit_p (has_decl_expr, DECL_UID (decl)));
9851 struct gimplify_omp_ctx *outer
9852 = gimplify_omp_ctxp->outer_context;
9853 if (outer && lastprivate)
9854 {
9855 if (outer->region_type == ORT_WORKSHARE
9856 && outer->combined_loop)
9857 {
9858 n = splay_tree_lookup (outer->variables,
9859 (splay_tree_key)decl);
9860 if (n != NULL && (n->value & GOVD_LOCAL) != 0)
9861 {
9862 lastprivate = false;
9863 outer = NULL;
9864 }
9865 else if (outer->outer_context
9866 && (outer->outer_context->region_type
9867 == ORT_COMBINED_PARALLEL))
9868 outer = outer->outer_context;
9869 else if (omp_check_private (outer, decl, false))
9870 outer = NULL;
9871 }
9872 else if (((outer->region_type & ORT_TASK) != 0)
9873 && outer->combined_loop
9874 && !omp_check_private (gimplify_omp_ctxp,
9875 decl, false))
9876 ;
9877 else if (outer->region_type != ORT_COMBINED_PARALLEL)
9878 {
9879 omp_notice_variable (outer, decl, true);
9880 outer = NULL;
9881 }
9882 if (outer)
9883 {
9884 n = splay_tree_lookup (outer->variables,
9885 (splay_tree_key)decl);
9886 if (n == NULL || (n->value & GOVD_DATA_SHARE_CLASS) == 0)
9887 {
9888 omp_add_variable (outer, decl,
9889 GOVD_LASTPRIVATE | GOVD_SEEN);
9890 if (outer->region_type == ORT_COMBINED_PARALLEL
9891 && outer->outer_context
9892 && (outer->outer_context->region_type
9893 == ORT_WORKSHARE)
9894 && outer->outer_context->combined_loop)
9895 {
9896 outer = outer->outer_context;
9897 n = splay_tree_lookup (outer->variables,
9898 (splay_tree_key)decl);
9899 if (omp_check_private (outer, decl, false))
9900 outer = NULL;
9901 else if (n == NULL
9902 || ((n->value & GOVD_DATA_SHARE_CLASS)
9903 == 0))
9904 omp_add_variable (outer, decl,
9905 GOVD_LASTPRIVATE
9906 | GOVD_SEEN);
9907 else
9908 outer = NULL;
9909 }
9910 if (outer && outer->outer_context
9911 && (outer->outer_context->region_type
9912 == ORT_COMBINED_TEAMS))
9913 {
9914 outer = outer->outer_context;
9915 n = splay_tree_lookup (outer->variables,
9916 (splay_tree_key)decl);
9917 if (n == NULL
9918 || (n->value & GOVD_DATA_SHARE_CLASS) == 0)
9919 omp_add_variable (outer, decl,
9920 GOVD_SHARED | GOVD_SEEN);
9921 else
9922 outer = NULL;
9923 }
9924 if (outer && outer->outer_context)
9925 omp_notice_variable (outer->outer_context, decl,
9926 true);
9927 }
9928 }
9929 }
9930
9931 c = build_omp_clause (input_location,
9932 lastprivate ? OMP_CLAUSE_LASTPRIVATE
9933 : OMP_CLAUSE_PRIVATE);
9934 OMP_CLAUSE_DECL (c) = decl;
9935 OMP_CLAUSE_CHAIN (c) = OMP_FOR_CLAUSES (for_stmt);
9936 OMP_FOR_CLAUSES (for_stmt) = c;
9937 omp_add_variable (gimplify_omp_ctxp, decl,
9938 (lastprivate ? GOVD_LASTPRIVATE : GOVD_PRIVATE)
9939 | GOVD_EXPLICIT | GOVD_SEEN);
9940 c = NULL_TREE;
9941 }
9942 }
9943 else if (omp_is_private (gimplify_omp_ctxp, decl, 0))
9944 omp_notice_variable (gimplify_omp_ctxp, decl, true);
9945 else
9946 omp_add_variable (gimplify_omp_ctxp, decl, GOVD_PRIVATE | GOVD_SEEN);
9947
9948 /* If DECL is not a gimple register, create a temporary variable to act
9949 as an iteration counter. This is valid, since DECL cannot be
9950 modified in the body of the loop. Similarly for any iteration vars
9951 in simd with collapse > 1 where the iterator vars must be
9952 lastprivate. */
9953 if (orig_for_stmt != for_stmt)
9954 var = decl;
9955 else if (!is_gimple_reg (decl)
9956 || (ort == ORT_SIMD
9957 && TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)) > 1))
9958 {
9959 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
9960 /* Make sure omp_add_variable is not called on it prematurely.
9961 We call it ourselves a few lines later. */
9962 gimplify_omp_ctxp = NULL;
9963 var = create_tmp_var (TREE_TYPE (decl), get_name (decl));
9964 gimplify_omp_ctxp = ctx;
9965 TREE_OPERAND (t, 0) = var;
9966
9967 gimplify_seq_add_stmt (&for_body, gimple_build_assign (decl, var));
9968
9969 if (ort == ORT_SIMD
9970 && TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)) == 1)
9971 {
9972 c2 = build_omp_clause (input_location, OMP_CLAUSE_LINEAR);
9973 OMP_CLAUSE_LINEAR_NO_COPYIN (c2) = 1;
9974 OMP_CLAUSE_LINEAR_NO_COPYOUT (c2) = 1;
9975 OMP_CLAUSE_DECL (c2) = var;
9976 OMP_CLAUSE_CHAIN (c2) = OMP_FOR_CLAUSES (for_stmt);
9977 OMP_FOR_CLAUSES (for_stmt) = c2;
9978 omp_add_variable (gimplify_omp_ctxp, var,
9979 GOVD_LINEAR | GOVD_EXPLICIT | GOVD_SEEN);
9980 if (c == NULL_TREE)
9981 {
9982 c = c2;
9983 c2 = NULL_TREE;
9984 }
9985 }
9986 else
9987 omp_add_variable (gimplify_omp_ctxp, var,
9988 GOVD_PRIVATE | GOVD_SEEN);
9989 }
9990 else
9991 var = decl;
9992
9993 tret = gimplify_expr (&TREE_OPERAND (t, 1), &for_pre_body, NULL,
9994 is_gimple_val, fb_rvalue, false);
9995 ret = MIN (ret, tret);
9996 if (ret == GS_ERROR)
9997 return ret;
9998
9999 /* Handle OMP_FOR_COND. */
10000 t = TREE_VEC_ELT (OMP_FOR_COND (for_stmt), i);
10001 gcc_assert (COMPARISON_CLASS_P (t));
10002 gcc_assert (TREE_OPERAND (t, 0) == decl);
10003
10004 tret = gimplify_expr (&TREE_OPERAND (t, 1), &for_pre_body, NULL,
10005 is_gimple_val, fb_rvalue, false);
10006 ret = MIN (ret, tret);
10007
10008 /* Handle OMP_FOR_INCR. */
10009 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
10010 switch (TREE_CODE (t))
10011 {
10012 case PREINCREMENT_EXPR:
10013 case POSTINCREMENT_EXPR:
10014 {
10015 tree decl = TREE_OPERAND (t, 0);
10016 /* c_omp_for_incr_canonicalize_ptr() should have been
10017 called to massage things appropriately. */
10018 gcc_assert (!POINTER_TYPE_P (TREE_TYPE (decl)));
10019
10020 if (orig_for_stmt != for_stmt)
10021 break;
10022 t = build_int_cst (TREE_TYPE (decl), 1);
10023 if (c)
10024 OMP_CLAUSE_LINEAR_STEP (c) = t;
10025 t = build2 (PLUS_EXPR, TREE_TYPE (decl), var, t);
10026 t = build2 (MODIFY_EXPR, TREE_TYPE (var), var, t);
10027 TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i) = t;
10028 break;
10029 }
10030
10031 case PREDECREMENT_EXPR:
10032 case POSTDECREMENT_EXPR:
10033 /* c_omp_for_incr_canonicalize_ptr() should have been
10034 called to massage things appropriately. */
10035 gcc_assert (!POINTER_TYPE_P (TREE_TYPE (decl)));
10036 if (orig_for_stmt != for_stmt)
10037 break;
10038 t = build_int_cst (TREE_TYPE (decl), -1);
10039 if (c)
10040 OMP_CLAUSE_LINEAR_STEP (c) = t;
10041 t = build2 (PLUS_EXPR, TREE_TYPE (decl), var, t);
10042 t = build2 (MODIFY_EXPR, TREE_TYPE (var), var, t);
10043 TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i) = t;
10044 break;
10045
10046 case MODIFY_EXPR:
10047 gcc_assert (TREE_OPERAND (t, 0) == decl);
10048 TREE_OPERAND (t, 0) = var;
10049
10050 t = TREE_OPERAND (t, 1);
10051 switch (TREE_CODE (t))
10052 {
10053 case PLUS_EXPR:
10054 if (TREE_OPERAND (t, 1) == decl)
10055 {
10056 TREE_OPERAND (t, 1) = TREE_OPERAND (t, 0);
10057 TREE_OPERAND (t, 0) = var;
10058 break;
10059 }
10060
10061 /* Fallthru. */
10062 case MINUS_EXPR:
10063 case POINTER_PLUS_EXPR:
10064 gcc_assert (TREE_OPERAND (t, 0) == decl);
10065 TREE_OPERAND (t, 0) = var;
10066 break;
10067 default:
10068 gcc_unreachable ();
10069 }
10070
10071 tret = gimplify_expr (&TREE_OPERAND (t, 1), &for_pre_body, NULL,
10072 is_gimple_val, fb_rvalue, false);
10073 ret = MIN (ret, tret);
10074 if (c)
10075 {
10076 tree step = TREE_OPERAND (t, 1);
10077 tree stept = TREE_TYPE (decl);
10078 if (POINTER_TYPE_P (stept))
10079 stept = sizetype;
10080 step = fold_convert (stept, step);
10081 if (TREE_CODE (t) == MINUS_EXPR)
10082 step = fold_build1 (NEGATE_EXPR, stept, step);
10083 OMP_CLAUSE_LINEAR_STEP (c) = step;
10084 if (step != TREE_OPERAND (t, 1))
10085 {
10086 tret = gimplify_expr (&OMP_CLAUSE_LINEAR_STEP (c),
10087 &for_pre_body, NULL,
10088 is_gimple_val, fb_rvalue, false);
10089 ret = MIN (ret, tret);
10090 }
10091 }
10092 break;
10093
10094 default:
10095 gcc_unreachable ();
10096 }
10097
10098 if (c2)
10099 {
10100 gcc_assert (c);
10101 OMP_CLAUSE_LINEAR_STEP (c2) = OMP_CLAUSE_LINEAR_STEP (c);
10102 }
10103
10104 if ((var != decl || collapse > 1 || tile) && orig_for_stmt == for_stmt)
10105 {
10106 for (c = OMP_FOR_CLAUSES (for_stmt); c ; c = OMP_CLAUSE_CHAIN (c))
10107 if (((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
10108 && OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c) == NULL)
10109 || (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
10110 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c)
10111 && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c) == NULL))
10112 && OMP_CLAUSE_DECL (c) == decl)
10113 {
10114 if (is_doacross && (collapse == 1 || i >= collapse))
10115 t = var;
10116 else
10117 {
10118 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
10119 gcc_assert (TREE_CODE (t) == MODIFY_EXPR);
10120 gcc_assert (TREE_OPERAND (t, 0) == var);
10121 t = TREE_OPERAND (t, 1);
10122 gcc_assert (TREE_CODE (t) == PLUS_EXPR
10123 || TREE_CODE (t) == MINUS_EXPR
10124 || TREE_CODE (t) == POINTER_PLUS_EXPR);
10125 gcc_assert (TREE_OPERAND (t, 0) == var);
10126 t = build2 (TREE_CODE (t), TREE_TYPE (decl),
10127 is_doacross ? var : decl,
10128 TREE_OPERAND (t, 1));
10129 }
10130 gimple_seq *seq;
10131 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE)
10132 seq = &OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c);
10133 else
10134 seq = &OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c);
10135 gimplify_assign (decl, t, seq);
10136 }
10137 }
10138 }
10139
10140 BITMAP_FREE (has_decl_expr);
10141
10142 if (TREE_CODE (orig_for_stmt) == OMP_TASKLOOP)
10143 {
10144 push_gimplify_context ();
10145 if (TREE_CODE (OMP_FOR_BODY (orig_for_stmt)) != BIND_EXPR)
10146 {
10147 OMP_FOR_BODY (orig_for_stmt)
10148 = build3 (BIND_EXPR, void_type_node, NULL,
10149 OMP_FOR_BODY (orig_for_stmt), NULL);
10150 TREE_SIDE_EFFECTS (OMP_FOR_BODY (orig_for_stmt)) = 1;
10151 }
10152 }
10153
10154 gimple *g = gimplify_and_return_first (OMP_FOR_BODY (orig_for_stmt),
10155 &for_body);
10156
10157 if (TREE_CODE (orig_for_stmt) == OMP_TASKLOOP)
10158 {
10159 if (gimple_code (g) == GIMPLE_BIND)
10160 pop_gimplify_context (g);
10161 else
10162 pop_gimplify_context (NULL);
10163 }
10164
10165 if (orig_for_stmt != for_stmt)
10166 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
10167 {
10168 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i);
10169 decl = TREE_OPERAND (t, 0);
10170 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
10171 if (TREE_CODE (orig_for_stmt) == OMP_TASKLOOP)
10172 gimplify_omp_ctxp = ctx->outer_context;
10173 var = create_tmp_var (TREE_TYPE (decl), get_name (decl));
10174 gimplify_omp_ctxp = ctx;
10175 omp_add_variable (gimplify_omp_ctxp, var, GOVD_PRIVATE | GOVD_SEEN);
10176 TREE_OPERAND (t, 0) = var;
10177 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
10178 TREE_OPERAND (t, 1) = copy_node (TREE_OPERAND (t, 1));
10179 TREE_OPERAND (TREE_OPERAND (t, 1), 0) = var;
10180 }
10181
10182 gimplify_adjust_omp_clauses (pre_p, for_body,
10183 &OMP_FOR_CLAUSES (orig_for_stmt),
10184 TREE_CODE (orig_for_stmt));
10185
10186 int kind;
10187 switch (TREE_CODE (orig_for_stmt))
10188 {
10189 case OMP_FOR: kind = GF_OMP_FOR_KIND_FOR; break;
10190 case OMP_SIMD: kind = GF_OMP_FOR_KIND_SIMD; break;
10191 case CILK_SIMD: kind = GF_OMP_FOR_KIND_CILKSIMD; break;
10192 case CILK_FOR: kind = GF_OMP_FOR_KIND_CILKFOR; break;
10193 case OMP_DISTRIBUTE: kind = GF_OMP_FOR_KIND_DISTRIBUTE; break;
10194 case OMP_TASKLOOP: kind = GF_OMP_FOR_KIND_TASKLOOP; break;
10195 case OACC_LOOP: kind = GF_OMP_FOR_KIND_OACC_LOOP; break;
10196 default:
10197 gcc_unreachable ();
10198 }
10199 gfor = gimple_build_omp_for (for_body, kind, OMP_FOR_CLAUSES (orig_for_stmt),
10200 TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)),
10201 for_pre_body);
10202 if (orig_for_stmt != for_stmt)
10203 gimple_omp_for_set_combined_p (gfor, true);
10204 if (gimplify_omp_ctxp
10205 && (gimplify_omp_ctxp->combined_loop
10206 || (gimplify_omp_ctxp->region_type == ORT_COMBINED_PARALLEL
10207 && gimplify_omp_ctxp->outer_context
10208 && gimplify_omp_ctxp->outer_context->combined_loop)))
10209 {
10210 gimple_omp_for_set_combined_into_p (gfor, true);
10211 if (gimplify_omp_ctxp->combined_loop)
10212 gcc_assert (TREE_CODE (orig_for_stmt) == OMP_SIMD);
10213 else
10214 gcc_assert (TREE_CODE (orig_for_stmt) == OMP_FOR);
10215 }
10216
10217 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
10218 {
10219 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i);
10220 gimple_omp_for_set_index (gfor, i, TREE_OPERAND (t, 0));
10221 gimple_omp_for_set_initial (gfor, i, TREE_OPERAND (t, 1));
10222 t = TREE_VEC_ELT (OMP_FOR_COND (for_stmt), i);
10223 gimple_omp_for_set_cond (gfor, i, TREE_CODE (t));
10224 gimple_omp_for_set_final (gfor, i, TREE_OPERAND (t, 1));
10225 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
10226 gimple_omp_for_set_incr (gfor, i, TREE_OPERAND (t, 1));
10227 }
10228
10229 /* OMP_TASKLOOP is gimplified as two GIMPLE_OMP_FOR taskloop
10230 constructs with GIMPLE_OMP_TASK sandwiched in between them.
10231 The outer taskloop stands for computing the number of iterations,
10232 counts for collapsed loops and holding taskloop specific clauses.
10233 The task construct stands for the effect of data sharing on the
10234 explicit task it creates and the inner taskloop stands for expansion
10235 of the static loop inside of the explicit task construct. */
10236 if (TREE_CODE (orig_for_stmt) == OMP_TASKLOOP)
10237 {
10238 tree *gfor_clauses_ptr = gimple_omp_for_clauses_ptr (gfor);
10239 tree task_clauses = NULL_TREE;
10240 tree c = *gfor_clauses_ptr;
10241 tree *gtask_clauses_ptr = &task_clauses;
10242 tree outer_for_clauses = NULL_TREE;
10243 tree *gforo_clauses_ptr = &outer_for_clauses;
10244 for (; c; c = OMP_CLAUSE_CHAIN (c))
10245 switch (OMP_CLAUSE_CODE (c))
10246 {
10247 /* These clauses are allowed on task, move them there. */
10248 case OMP_CLAUSE_SHARED:
10249 case OMP_CLAUSE_FIRSTPRIVATE:
10250 case OMP_CLAUSE_DEFAULT:
10251 case OMP_CLAUSE_IF:
10252 case OMP_CLAUSE_UNTIED:
10253 case OMP_CLAUSE_FINAL:
10254 case OMP_CLAUSE_MERGEABLE:
10255 case OMP_CLAUSE_PRIORITY:
10256 *gtask_clauses_ptr = c;
10257 gtask_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
10258 break;
10259 case OMP_CLAUSE_PRIVATE:
10260 if (OMP_CLAUSE_PRIVATE_TASKLOOP_IV (c))
10261 {
10262 /* We want private on outer for and firstprivate
10263 on task. */
10264 *gtask_clauses_ptr
10265 = build_omp_clause (OMP_CLAUSE_LOCATION (c),
10266 OMP_CLAUSE_FIRSTPRIVATE);
10267 OMP_CLAUSE_DECL (*gtask_clauses_ptr) = OMP_CLAUSE_DECL (c);
10268 lang_hooks.decls.omp_finish_clause (*gtask_clauses_ptr, NULL);
10269 gtask_clauses_ptr = &OMP_CLAUSE_CHAIN (*gtask_clauses_ptr);
10270 *gforo_clauses_ptr = c;
10271 gforo_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
10272 }
10273 else
10274 {
10275 *gtask_clauses_ptr = c;
10276 gtask_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
10277 }
10278 break;
10279 /* These clauses go into outer taskloop clauses. */
10280 case OMP_CLAUSE_GRAINSIZE:
10281 case OMP_CLAUSE_NUM_TASKS:
10282 case OMP_CLAUSE_NOGROUP:
10283 *gforo_clauses_ptr = c;
10284 gforo_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
10285 break;
10286 /* Taskloop clause we duplicate on both taskloops. */
10287 case OMP_CLAUSE_COLLAPSE:
10288 *gfor_clauses_ptr = c;
10289 gfor_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
10290 *gforo_clauses_ptr = copy_node (c);
10291 gforo_clauses_ptr = &OMP_CLAUSE_CHAIN (*gforo_clauses_ptr);
10292 break;
10293 /* For lastprivate, keep the clause on inner taskloop, and add
10294 a shared clause on task. If the same decl is also firstprivate,
10295 add also firstprivate clause on the inner taskloop. */
10296 case OMP_CLAUSE_LASTPRIVATE:
10297 if (OMP_CLAUSE_LASTPRIVATE_TASKLOOP_IV (c))
10298 {
10299 /* For taskloop C++ lastprivate IVs, we want:
10300 1) private on outer taskloop
10301 2) firstprivate and shared on task
10302 3) lastprivate on inner taskloop */
10303 *gtask_clauses_ptr
10304 = build_omp_clause (OMP_CLAUSE_LOCATION (c),
10305 OMP_CLAUSE_FIRSTPRIVATE);
10306 OMP_CLAUSE_DECL (*gtask_clauses_ptr) = OMP_CLAUSE_DECL (c);
10307 lang_hooks.decls.omp_finish_clause (*gtask_clauses_ptr, NULL);
10308 gtask_clauses_ptr = &OMP_CLAUSE_CHAIN (*gtask_clauses_ptr);
10309 OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c) = 1;
10310 *gforo_clauses_ptr = build_omp_clause (OMP_CLAUSE_LOCATION (c),
10311 OMP_CLAUSE_PRIVATE);
10312 OMP_CLAUSE_DECL (*gforo_clauses_ptr) = OMP_CLAUSE_DECL (c);
10313 OMP_CLAUSE_PRIVATE_TASKLOOP_IV (*gforo_clauses_ptr) = 1;
10314 TREE_TYPE (*gforo_clauses_ptr) = TREE_TYPE (c);
10315 gforo_clauses_ptr = &OMP_CLAUSE_CHAIN (*gforo_clauses_ptr);
10316 }
10317 *gfor_clauses_ptr = c;
10318 gfor_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
10319 *gtask_clauses_ptr
10320 = build_omp_clause (OMP_CLAUSE_LOCATION (c), OMP_CLAUSE_SHARED);
10321 OMP_CLAUSE_DECL (*gtask_clauses_ptr) = OMP_CLAUSE_DECL (c);
10322 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
10323 OMP_CLAUSE_SHARED_FIRSTPRIVATE (*gtask_clauses_ptr) = 1;
10324 gtask_clauses_ptr
10325 = &OMP_CLAUSE_CHAIN (*gtask_clauses_ptr);
10326 break;
10327 default:
10328 gcc_unreachable ();
10329 }
10330 *gfor_clauses_ptr = NULL_TREE;
10331 *gtask_clauses_ptr = NULL_TREE;
10332 *gforo_clauses_ptr = NULL_TREE;
10333 g = gimple_build_bind (NULL_TREE, gfor, NULL_TREE);
10334 g = gimple_build_omp_task (g, task_clauses, NULL_TREE, NULL_TREE,
10335 NULL_TREE, NULL_TREE, NULL_TREE);
10336 gimple_omp_task_set_taskloop_p (g, true);
10337 g = gimple_build_bind (NULL_TREE, g, NULL_TREE);
10338 gomp_for *gforo
10339 = gimple_build_omp_for (g, GF_OMP_FOR_KIND_TASKLOOP, outer_for_clauses,
10340 gimple_omp_for_collapse (gfor),
10341 gimple_omp_for_pre_body (gfor));
10342 gimple_omp_for_set_pre_body (gfor, NULL);
10343 gimple_omp_for_set_combined_p (gforo, true);
10344 gimple_omp_for_set_combined_into_p (gfor, true);
10345 for (i = 0; i < (int) gimple_omp_for_collapse (gfor); i++)
10346 {
10347 tree type = TREE_TYPE (gimple_omp_for_index (gfor, i));
10348 tree v = create_tmp_var (type);
10349 gimple_omp_for_set_index (gforo, i, v);
10350 t = unshare_expr (gimple_omp_for_initial (gfor, i));
10351 gimple_omp_for_set_initial (gforo, i, t);
10352 gimple_omp_for_set_cond (gforo, i,
10353 gimple_omp_for_cond (gfor, i));
10354 t = unshare_expr (gimple_omp_for_final (gfor, i));
10355 gimple_omp_for_set_final (gforo, i, t);
10356 t = unshare_expr (gimple_omp_for_incr (gfor, i));
10357 gcc_assert (TREE_OPERAND (t, 0) == gimple_omp_for_index (gfor, i));
10358 TREE_OPERAND (t, 0) = v;
10359 gimple_omp_for_set_incr (gforo, i, t);
10360 t = build_omp_clause (input_location, OMP_CLAUSE_PRIVATE);
10361 OMP_CLAUSE_DECL (t) = v;
10362 OMP_CLAUSE_CHAIN (t) = gimple_omp_for_clauses (gforo);
10363 gimple_omp_for_set_clauses (gforo, t);
10364 }
10365 gimplify_seq_add_stmt (pre_p, gforo);
10366 }
10367 else
10368 gimplify_seq_add_stmt (pre_p, gfor);
10369 if (ret != GS_ALL_DONE)
10370 return GS_ERROR;
10371 *expr_p = NULL_TREE;
10372 return GS_ALL_DONE;
10373 }
10374
10375 /* Helper function of optimize_target_teams, find OMP_TEAMS inside
10376 of OMP_TARGET's body. */
10377
10378 static tree
10379 find_omp_teams (tree *tp, int *walk_subtrees, void *)
10380 {
10381 *walk_subtrees = 0;
10382 switch (TREE_CODE (*tp))
10383 {
10384 case OMP_TEAMS:
10385 return *tp;
10386 case BIND_EXPR:
10387 case STATEMENT_LIST:
10388 *walk_subtrees = 1;
10389 break;
10390 default:
10391 break;
10392 }
10393 return NULL_TREE;
10394 }
10395
10396 /* Helper function of optimize_target_teams, determine if the expression
10397 can be computed safely before the target construct on the host. */
10398
10399 static tree
10400 computable_teams_clause (tree *tp, int *walk_subtrees, void *)
10401 {
10402 splay_tree_node n;
10403
10404 if (TYPE_P (*tp))
10405 {
10406 *walk_subtrees = 0;
10407 return NULL_TREE;
10408 }
10409 switch (TREE_CODE (*tp))
10410 {
10411 case VAR_DECL:
10412 case PARM_DECL:
10413 case RESULT_DECL:
10414 *walk_subtrees = 0;
10415 if (error_operand_p (*tp)
10416 || !INTEGRAL_TYPE_P (TREE_TYPE (*tp))
10417 || DECL_HAS_VALUE_EXPR_P (*tp)
10418 || DECL_THREAD_LOCAL_P (*tp)
10419 || TREE_SIDE_EFFECTS (*tp)
10420 || TREE_THIS_VOLATILE (*tp))
10421 return *tp;
10422 if (is_global_var (*tp)
10423 && (lookup_attribute ("omp declare target", DECL_ATTRIBUTES (*tp))
10424 || lookup_attribute ("omp declare target link",
10425 DECL_ATTRIBUTES (*tp))))
10426 return *tp;
10427 if (VAR_P (*tp)
10428 && !DECL_SEEN_IN_BIND_EXPR_P (*tp)
10429 && !is_global_var (*tp)
10430 && decl_function_context (*tp) == current_function_decl)
10431 return *tp;
10432 n = splay_tree_lookup (gimplify_omp_ctxp->variables,
10433 (splay_tree_key) *tp);
10434 if (n == NULL)
10435 {
10436 if (gimplify_omp_ctxp->target_map_scalars_firstprivate)
10437 return NULL_TREE;
10438 return *tp;
10439 }
10440 else if (n->value & GOVD_LOCAL)
10441 return *tp;
10442 else if (n->value & GOVD_FIRSTPRIVATE)
10443 return NULL_TREE;
10444 else if ((n->value & (GOVD_MAP | GOVD_MAP_ALWAYS_TO))
10445 == (GOVD_MAP | GOVD_MAP_ALWAYS_TO))
10446 return NULL_TREE;
10447 return *tp;
10448 case INTEGER_CST:
10449 if (!INTEGRAL_TYPE_P (TREE_TYPE (*tp)))
10450 return *tp;
10451 return NULL_TREE;
10452 case TARGET_EXPR:
10453 if (TARGET_EXPR_INITIAL (*tp)
10454 || TREE_CODE (TARGET_EXPR_SLOT (*tp)) != VAR_DECL)
10455 return *tp;
10456 return computable_teams_clause (&TARGET_EXPR_SLOT (*tp),
10457 walk_subtrees, NULL);
10458 /* Allow some reasonable subset of integral arithmetics. */
10459 case PLUS_EXPR:
10460 case MINUS_EXPR:
10461 case MULT_EXPR:
10462 case TRUNC_DIV_EXPR:
10463 case CEIL_DIV_EXPR:
10464 case FLOOR_DIV_EXPR:
10465 case ROUND_DIV_EXPR:
10466 case TRUNC_MOD_EXPR:
10467 case CEIL_MOD_EXPR:
10468 case FLOOR_MOD_EXPR:
10469 case ROUND_MOD_EXPR:
10470 case RDIV_EXPR:
10471 case EXACT_DIV_EXPR:
10472 case MIN_EXPR:
10473 case MAX_EXPR:
10474 case LSHIFT_EXPR:
10475 case RSHIFT_EXPR:
10476 case BIT_IOR_EXPR:
10477 case BIT_XOR_EXPR:
10478 case BIT_AND_EXPR:
10479 case NEGATE_EXPR:
10480 case ABS_EXPR:
10481 case BIT_NOT_EXPR:
10482 case NON_LVALUE_EXPR:
10483 CASE_CONVERT:
10484 if (!INTEGRAL_TYPE_P (TREE_TYPE (*tp)))
10485 return *tp;
10486 return NULL_TREE;
10487 /* And disallow anything else, except for comparisons. */
10488 default:
10489 if (COMPARISON_CLASS_P (*tp))
10490 return NULL_TREE;
10491 return *tp;
10492 }
10493 }
10494
10495 /* Try to determine if the num_teams and/or thread_limit expressions
10496 can have their values determined already before entering the
10497 target construct.
10498 INTEGER_CSTs trivially are,
10499 integral decls that are firstprivate (explicitly or implicitly)
10500 or explicitly map(always, to:) or map(always, tofrom:) on the target
10501 region too, and expressions involving simple arithmetics on those
10502 too, function calls are not ok, dereferencing something neither etc.
10503 Add NUM_TEAMS and THREAD_LIMIT clauses to the OMP_CLAUSES of
10504 EXPR based on what we find:
10505 0 stands for clause not specified at all, use implementation default
10506 -1 stands for value that can't be determined easily before entering
10507 the target construct.
10508 If teams construct is not present at all, use 1 for num_teams
10509 and 0 for thread_limit (only one team is involved, and the thread
10510 limit is implementation defined. */
10511
10512 static void
10513 optimize_target_teams (tree target, gimple_seq *pre_p)
10514 {
10515 tree body = OMP_BODY (target);
10516 tree teams = walk_tree (&body, find_omp_teams, NULL, NULL);
10517 tree num_teams = integer_zero_node;
10518 tree thread_limit = integer_zero_node;
10519 location_t num_teams_loc = EXPR_LOCATION (target);
10520 location_t thread_limit_loc = EXPR_LOCATION (target);
10521 tree c, *p, expr;
10522 struct gimplify_omp_ctx *target_ctx = gimplify_omp_ctxp;
10523
10524 if (teams == NULL_TREE)
10525 num_teams = integer_one_node;
10526 else
10527 for (c = OMP_TEAMS_CLAUSES (teams); c; c = OMP_CLAUSE_CHAIN (c))
10528 {
10529 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_NUM_TEAMS)
10530 {
10531 p = &num_teams;
10532 num_teams_loc = OMP_CLAUSE_LOCATION (c);
10533 }
10534 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_THREAD_LIMIT)
10535 {
10536 p = &thread_limit;
10537 thread_limit_loc = OMP_CLAUSE_LOCATION (c);
10538 }
10539 else
10540 continue;
10541 expr = OMP_CLAUSE_OPERAND (c, 0);
10542 if (TREE_CODE (expr) == INTEGER_CST)
10543 {
10544 *p = expr;
10545 continue;
10546 }
10547 if (walk_tree (&expr, computable_teams_clause, NULL, NULL))
10548 {
10549 *p = integer_minus_one_node;
10550 continue;
10551 }
10552 *p = expr;
10553 gimplify_omp_ctxp = gimplify_omp_ctxp->outer_context;
10554 if (gimplify_expr (p, pre_p, NULL, is_gimple_val, fb_rvalue, false)
10555 == GS_ERROR)
10556 {
10557 gimplify_omp_ctxp = target_ctx;
10558 *p = integer_minus_one_node;
10559 continue;
10560 }
10561 gimplify_omp_ctxp = target_ctx;
10562 if (!DECL_P (expr) && TREE_CODE (expr) != TARGET_EXPR)
10563 OMP_CLAUSE_OPERAND (c, 0) = *p;
10564 }
10565 c = build_omp_clause (thread_limit_loc, OMP_CLAUSE_THREAD_LIMIT);
10566 OMP_CLAUSE_THREAD_LIMIT_EXPR (c) = thread_limit;
10567 OMP_CLAUSE_CHAIN (c) = OMP_TARGET_CLAUSES (target);
10568 OMP_TARGET_CLAUSES (target) = c;
10569 c = build_omp_clause (num_teams_loc, OMP_CLAUSE_NUM_TEAMS);
10570 OMP_CLAUSE_NUM_TEAMS_EXPR (c) = num_teams;
10571 OMP_CLAUSE_CHAIN (c) = OMP_TARGET_CLAUSES (target);
10572 OMP_TARGET_CLAUSES (target) = c;
10573 }
10574
10575 /* Gimplify the gross structure of several OMP constructs. */
10576
10577 static void
10578 gimplify_omp_workshare (tree *expr_p, gimple_seq *pre_p)
10579 {
10580 tree expr = *expr_p;
10581 gimple *stmt;
10582 gimple_seq body = NULL;
10583 enum omp_region_type ort;
10584
10585 switch (TREE_CODE (expr))
10586 {
10587 case OMP_SECTIONS:
10588 case OMP_SINGLE:
10589 ort = ORT_WORKSHARE;
10590 break;
10591 case OMP_TARGET:
10592 ort = OMP_TARGET_COMBINED (expr) ? ORT_COMBINED_TARGET : ORT_TARGET;
10593 break;
10594 case OACC_KERNELS:
10595 ort = ORT_ACC_KERNELS;
10596 break;
10597 case OACC_PARALLEL:
10598 ort = ORT_ACC_PARALLEL;
10599 break;
10600 case OACC_DATA:
10601 ort = ORT_ACC_DATA;
10602 break;
10603 case OMP_TARGET_DATA:
10604 ort = ORT_TARGET_DATA;
10605 break;
10606 case OMP_TEAMS:
10607 ort = OMP_TEAMS_COMBINED (expr) ? ORT_COMBINED_TEAMS : ORT_TEAMS;
10608 break;
10609 case OACC_HOST_DATA:
10610 ort = ORT_ACC_HOST_DATA;
10611 break;
10612 default:
10613 gcc_unreachable ();
10614 }
10615 gimplify_scan_omp_clauses (&OMP_CLAUSES (expr), pre_p, ort,
10616 TREE_CODE (expr));
10617 if (TREE_CODE (expr) == OMP_TARGET)
10618 optimize_target_teams (expr, pre_p);
10619 if ((ort & (ORT_TARGET | ORT_TARGET_DATA)) != 0)
10620 {
10621 push_gimplify_context ();
10622 gimple *g = gimplify_and_return_first (OMP_BODY (expr), &body);
10623 if (gimple_code (g) == GIMPLE_BIND)
10624 pop_gimplify_context (g);
10625 else
10626 pop_gimplify_context (NULL);
10627 if ((ort & ORT_TARGET_DATA) != 0)
10628 {
10629 enum built_in_function end_ix;
10630 switch (TREE_CODE (expr))
10631 {
10632 case OACC_DATA:
10633 case OACC_HOST_DATA:
10634 end_ix = BUILT_IN_GOACC_DATA_END;
10635 break;
10636 case OMP_TARGET_DATA:
10637 end_ix = BUILT_IN_GOMP_TARGET_END_DATA;
10638 break;
10639 default:
10640 gcc_unreachable ();
10641 }
10642 tree fn = builtin_decl_explicit (end_ix);
10643 g = gimple_build_call (fn, 0);
10644 gimple_seq cleanup = NULL;
10645 gimple_seq_add_stmt (&cleanup, g);
10646 g = gimple_build_try (body, cleanup, GIMPLE_TRY_FINALLY);
10647 body = NULL;
10648 gimple_seq_add_stmt (&body, g);
10649 }
10650 }
10651 else
10652 gimplify_and_add (OMP_BODY (expr), &body);
10653 gimplify_adjust_omp_clauses (pre_p, body, &OMP_CLAUSES (expr),
10654 TREE_CODE (expr));
10655
10656 switch (TREE_CODE (expr))
10657 {
10658 case OACC_DATA:
10659 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_OACC_DATA,
10660 OMP_CLAUSES (expr));
10661 break;
10662 case OACC_KERNELS:
10663 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_OACC_KERNELS,
10664 OMP_CLAUSES (expr));
10665 break;
10666 case OACC_HOST_DATA:
10667 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_OACC_HOST_DATA,
10668 OMP_CLAUSES (expr));
10669 break;
10670 case OACC_PARALLEL:
10671 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_OACC_PARALLEL,
10672 OMP_CLAUSES (expr));
10673 break;
10674 case OMP_SECTIONS:
10675 stmt = gimple_build_omp_sections (body, OMP_CLAUSES (expr));
10676 break;
10677 case OMP_SINGLE:
10678 stmt = gimple_build_omp_single (body, OMP_CLAUSES (expr));
10679 break;
10680 case OMP_TARGET:
10681 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_REGION,
10682 OMP_CLAUSES (expr));
10683 break;
10684 case OMP_TARGET_DATA:
10685 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_DATA,
10686 OMP_CLAUSES (expr));
10687 break;
10688 case OMP_TEAMS:
10689 stmt = gimple_build_omp_teams (body, OMP_CLAUSES (expr));
10690 break;
10691 default:
10692 gcc_unreachable ();
10693 }
10694
10695 gimplify_seq_add_stmt (pre_p, stmt);
10696 *expr_p = NULL_TREE;
10697 }
10698
10699 /* Gimplify the gross structure of OpenACC enter/exit data, update, and OpenMP
10700 target update constructs. */
10701
10702 static void
10703 gimplify_omp_target_update (tree *expr_p, gimple_seq *pre_p)
10704 {
10705 tree expr = *expr_p;
10706 int kind;
10707 gomp_target *stmt;
10708 enum omp_region_type ort = ORT_WORKSHARE;
10709
10710 switch (TREE_CODE (expr))
10711 {
10712 case OACC_ENTER_DATA:
10713 case OACC_EXIT_DATA:
10714 kind = GF_OMP_TARGET_KIND_OACC_ENTER_EXIT_DATA;
10715 ort = ORT_ACC;
10716 break;
10717 case OACC_UPDATE:
10718 kind = GF_OMP_TARGET_KIND_OACC_UPDATE;
10719 ort = ORT_ACC;
10720 break;
10721 case OMP_TARGET_UPDATE:
10722 kind = GF_OMP_TARGET_KIND_UPDATE;
10723 break;
10724 case OMP_TARGET_ENTER_DATA:
10725 kind = GF_OMP_TARGET_KIND_ENTER_DATA;
10726 break;
10727 case OMP_TARGET_EXIT_DATA:
10728 kind = GF_OMP_TARGET_KIND_EXIT_DATA;
10729 break;
10730 default:
10731 gcc_unreachable ();
10732 }
10733 gimplify_scan_omp_clauses (&OMP_STANDALONE_CLAUSES (expr), pre_p,
10734 ort, TREE_CODE (expr));
10735 gimplify_adjust_omp_clauses (pre_p, NULL, &OMP_STANDALONE_CLAUSES (expr),
10736 TREE_CODE (expr));
10737 stmt = gimple_build_omp_target (NULL, kind, OMP_STANDALONE_CLAUSES (expr));
10738
10739 gimplify_seq_add_stmt (pre_p, stmt);
10740 *expr_p = NULL_TREE;
10741 }
10742
10743 /* A subroutine of gimplify_omp_atomic. The front end is supposed to have
10744 stabilized the lhs of the atomic operation as *ADDR. Return true if
10745 EXPR is this stabilized form. */
10746
10747 static bool
10748 goa_lhs_expr_p (tree expr, tree addr)
10749 {
10750 /* Also include casts to other type variants. The C front end is fond
10751 of adding these for e.g. volatile variables. This is like
10752 STRIP_TYPE_NOPS but includes the main variant lookup. */
10753 STRIP_USELESS_TYPE_CONVERSION (expr);
10754
10755 if (TREE_CODE (expr) == INDIRECT_REF)
10756 {
10757 expr = TREE_OPERAND (expr, 0);
10758 while (expr != addr
10759 && (CONVERT_EXPR_P (expr)
10760 || TREE_CODE (expr) == NON_LVALUE_EXPR)
10761 && TREE_CODE (expr) == TREE_CODE (addr)
10762 && types_compatible_p (TREE_TYPE (expr), TREE_TYPE (addr)))
10763 {
10764 expr = TREE_OPERAND (expr, 0);
10765 addr = TREE_OPERAND (addr, 0);
10766 }
10767 if (expr == addr)
10768 return true;
10769 return (TREE_CODE (addr) == ADDR_EXPR
10770 && TREE_CODE (expr) == ADDR_EXPR
10771 && TREE_OPERAND (addr, 0) == TREE_OPERAND (expr, 0));
10772 }
10773 if (TREE_CODE (addr) == ADDR_EXPR && expr == TREE_OPERAND (addr, 0))
10774 return true;
10775 return false;
10776 }
10777
10778 /* Walk *EXPR_P and replace appearances of *LHS_ADDR with LHS_VAR. If an
10779 expression does not involve the lhs, evaluate it into a temporary.
10780 Return 1 if the lhs appeared as a subexpression, 0 if it did not,
10781 or -1 if an error was encountered. */
10782
10783 static int
10784 goa_stabilize_expr (tree *expr_p, gimple_seq *pre_p, tree lhs_addr,
10785 tree lhs_var)
10786 {
10787 tree expr = *expr_p;
10788 int saw_lhs;
10789
10790 if (goa_lhs_expr_p (expr, lhs_addr))
10791 {
10792 *expr_p = lhs_var;
10793 return 1;
10794 }
10795 if (is_gimple_val (expr))
10796 return 0;
10797
10798 saw_lhs = 0;
10799 switch (TREE_CODE_CLASS (TREE_CODE (expr)))
10800 {
10801 case tcc_binary:
10802 case tcc_comparison:
10803 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 1), pre_p, lhs_addr,
10804 lhs_var);
10805 /* FALLTHRU */
10806 case tcc_unary:
10807 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 0), pre_p, lhs_addr,
10808 lhs_var);
10809 break;
10810 case tcc_expression:
10811 switch (TREE_CODE (expr))
10812 {
10813 case TRUTH_ANDIF_EXPR:
10814 case TRUTH_ORIF_EXPR:
10815 case TRUTH_AND_EXPR:
10816 case TRUTH_OR_EXPR:
10817 case TRUTH_XOR_EXPR:
10818 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 1), pre_p,
10819 lhs_addr, lhs_var);
10820 /* FALLTHRU */
10821 case TRUTH_NOT_EXPR:
10822 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 0), pre_p,
10823 lhs_addr, lhs_var);
10824 break;
10825 case COMPOUND_EXPR:
10826 /* Break out any preevaluations from cp_build_modify_expr. */
10827 for (; TREE_CODE (expr) == COMPOUND_EXPR;
10828 expr = TREE_OPERAND (expr, 1))
10829 gimplify_stmt (&TREE_OPERAND (expr, 0), pre_p);
10830 *expr_p = expr;
10831 return goa_stabilize_expr (expr_p, pre_p, lhs_addr, lhs_var);
10832 default:
10833 break;
10834 }
10835 break;
10836 default:
10837 break;
10838 }
10839
10840 if (saw_lhs == 0)
10841 {
10842 enum gimplify_status gs;
10843 gs = gimplify_expr (expr_p, pre_p, NULL, is_gimple_val, fb_rvalue);
10844 if (gs != GS_ALL_DONE)
10845 saw_lhs = -1;
10846 }
10847
10848 return saw_lhs;
10849 }
10850
10851 /* Gimplify an OMP_ATOMIC statement. */
10852
10853 static enum gimplify_status
10854 gimplify_omp_atomic (tree *expr_p, gimple_seq *pre_p)
10855 {
10856 tree addr = TREE_OPERAND (*expr_p, 0);
10857 tree rhs = TREE_CODE (*expr_p) == OMP_ATOMIC_READ
10858 ? NULL : TREE_OPERAND (*expr_p, 1);
10859 tree type = TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (addr)));
10860 tree tmp_load;
10861 gomp_atomic_load *loadstmt;
10862 gomp_atomic_store *storestmt;
10863
10864 tmp_load = create_tmp_reg (type);
10865 if (rhs && goa_stabilize_expr (&rhs, pre_p, addr, tmp_load) < 0)
10866 return GS_ERROR;
10867
10868 if (gimplify_expr (&addr, pre_p, NULL, is_gimple_val, fb_rvalue)
10869 != GS_ALL_DONE)
10870 return GS_ERROR;
10871
10872 loadstmt = gimple_build_omp_atomic_load (tmp_load, addr);
10873 gimplify_seq_add_stmt (pre_p, loadstmt);
10874 if (rhs && gimplify_expr (&rhs, pre_p, NULL, is_gimple_val, fb_rvalue)
10875 != GS_ALL_DONE)
10876 return GS_ERROR;
10877
10878 if (TREE_CODE (*expr_p) == OMP_ATOMIC_READ)
10879 rhs = tmp_load;
10880 storestmt = gimple_build_omp_atomic_store (rhs);
10881 gimplify_seq_add_stmt (pre_p, storestmt);
10882 if (OMP_ATOMIC_SEQ_CST (*expr_p))
10883 {
10884 gimple_omp_atomic_set_seq_cst (loadstmt);
10885 gimple_omp_atomic_set_seq_cst (storestmt);
10886 }
10887 switch (TREE_CODE (*expr_p))
10888 {
10889 case OMP_ATOMIC_READ:
10890 case OMP_ATOMIC_CAPTURE_OLD:
10891 *expr_p = tmp_load;
10892 gimple_omp_atomic_set_need_value (loadstmt);
10893 break;
10894 case OMP_ATOMIC_CAPTURE_NEW:
10895 *expr_p = rhs;
10896 gimple_omp_atomic_set_need_value (storestmt);
10897 break;
10898 default:
10899 *expr_p = NULL;
10900 break;
10901 }
10902
10903 return GS_ALL_DONE;
10904 }
10905
10906 /* Gimplify a TRANSACTION_EXPR. This involves gimplification of the
10907 body, and adding some EH bits. */
10908
10909 static enum gimplify_status
10910 gimplify_transaction (tree *expr_p, gimple_seq *pre_p)
10911 {
10912 tree expr = *expr_p, temp, tbody = TRANSACTION_EXPR_BODY (expr);
10913 gimple *body_stmt;
10914 gtransaction *trans_stmt;
10915 gimple_seq body = NULL;
10916 int subcode = 0;
10917
10918 /* Wrap the transaction body in a BIND_EXPR so we have a context
10919 where to put decls for OMP. */
10920 if (TREE_CODE (tbody) != BIND_EXPR)
10921 {
10922 tree bind = build3 (BIND_EXPR, void_type_node, NULL, tbody, NULL);
10923 TREE_SIDE_EFFECTS (bind) = 1;
10924 SET_EXPR_LOCATION (bind, EXPR_LOCATION (tbody));
10925 TRANSACTION_EXPR_BODY (expr) = bind;
10926 }
10927
10928 push_gimplify_context ();
10929 temp = voidify_wrapper_expr (*expr_p, NULL);
10930
10931 body_stmt = gimplify_and_return_first (TRANSACTION_EXPR_BODY (expr), &body);
10932 pop_gimplify_context (body_stmt);
10933
10934 trans_stmt = gimple_build_transaction (body);
10935 if (TRANSACTION_EXPR_OUTER (expr))
10936 subcode = GTMA_IS_OUTER;
10937 else if (TRANSACTION_EXPR_RELAXED (expr))
10938 subcode = GTMA_IS_RELAXED;
10939 gimple_transaction_set_subcode (trans_stmt, subcode);
10940
10941 gimplify_seq_add_stmt (pre_p, trans_stmt);
10942
10943 if (temp)
10944 {
10945 *expr_p = temp;
10946 return GS_OK;
10947 }
10948
10949 *expr_p = NULL_TREE;
10950 return GS_ALL_DONE;
10951 }
10952
10953 /* Gimplify an OMP_ORDERED construct. EXPR is the tree version. BODY
10954 is the OMP_BODY of the original EXPR (which has already been
10955 gimplified so it's not present in the EXPR).
10956
10957 Return the gimplified GIMPLE_OMP_ORDERED tuple. */
10958
10959 static gimple *
10960 gimplify_omp_ordered (tree expr, gimple_seq body)
10961 {
10962 tree c, decls;
10963 int failures = 0;
10964 unsigned int i;
10965 tree source_c = NULL_TREE;
10966 tree sink_c = NULL_TREE;
10967
10968 if (gimplify_omp_ctxp)
10969 {
10970 for (c = OMP_ORDERED_CLAUSES (expr); c; c = OMP_CLAUSE_CHAIN (c))
10971 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
10972 && gimplify_omp_ctxp->loop_iter_var.is_empty ()
10973 && (OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SINK
10974 || OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SOURCE))
10975 {
10976 error_at (OMP_CLAUSE_LOCATION (c),
10977 "%<ordered%> construct with %<depend%> clause must be "
10978 "closely nested inside a loop with %<ordered%> clause "
10979 "with a parameter");
10980 failures++;
10981 }
10982 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
10983 && OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SINK)
10984 {
10985 bool fail = false;
10986 for (decls = OMP_CLAUSE_DECL (c), i = 0;
10987 decls && TREE_CODE (decls) == TREE_LIST;
10988 decls = TREE_CHAIN (decls), ++i)
10989 if (i >= gimplify_omp_ctxp->loop_iter_var.length () / 2)
10990 continue;
10991 else if (TREE_VALUE (decls)
10992 != gimplify_omp_ctxp->loop_iter_var[2 * i])
10993 {
10994 error_at (OMP_CLAUSE_LOCATION (c),
10995 "variable %qE is not an iteration "
10996 "of outermost loop %d, expected %qE",
10997 TREE_VALUE (decls), i + 1,
10998 gimplify_omp_ctxp->loop_iter_var[2 * i]);
10999 fail = true;
11000 failures++;
11001 }
11002 else
11003 TREE_VALUE (decls)
11004 = gimplify_omp_ctxp->loop_iter_var[2 * i + 1];
11005 if (!fail && i != gimplify_omp_ctxp->loop_iter_var.length () / 2)
11006 {
11007 error_at (OMP_CLAUSE_LOCATION (c),
11008 "number of variables in %<depend(sink)%> "
11009 "clause does not match number of "
11010 "iteration variables");
11011 failures++;
11012 }
11013 sink_c = c;
11014 }
11015 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
11016 && OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SOURCE)
11017 {
11018 if (source_c)
11019 {
11020 error_at (OMP_CLAUSE_LOCATION (c),
11021 "more than one %<depend(source)%> clause on an "
11022 "%<ordered%> construct");
11023 failures++;
11024 }
11025 else
11026 source_c = c;
11027 }
11028 }
11029 if (source_c && sink_c)
11030 {
11031 error_at (OMP_CLAUSE_LOCATION (source_c),
11032 "%<depend(source)%> clause specified together with "
11033 "%<depend(sink:)%> clauses on the same construct");
11034 failures++;
11035 }
11036
11037 if (failures)
11038 return gimple_build_nop ();
11039 return gimple_build_omp_ordered (body, OMP_ORDERED_CLAUSES (expr));
11040 }
11041
11042 /* Convert the GENERIC expression tree *EXPR_P to GIMPLE. If the
11043 expression produces a value to be used as an operand inside a GIMPLE
11044 statement, the value will be stored back in *EXPR_P. This value will
11045 be a tree of class tcc_declaration, tcc_constant, tcc_reference or
11046 an SSA_NAME. The corresponding sequence of GIMPLE statements is
11047 emitted in PRE_P and POST_P.
11048
11049 Additionally, this process may overwrite parts of the input
11050 expression during gimplification. Ideally, it should be
11051 possible to do non-destructive gimplification.
11052
11053 EXPR_P points to the GENERIC expression to convert to GIMPLE. If
11054 the expression needs to evaluate to a value to be used as
11055 an operand in a GIMPLE statement, this value will be stored in
11056 *EXPR_P on exit. This happens when the caller specifies one
11057 of fb_lvalue or fb_rvalue fallback flags.
11058
11059 PRE_P will contain the sequence of GIMPLE statements corresponding
11060 to the evaluation of EXPR and all the side-effects that must
11061 be executed before the main expression. On exit, the last
11062 statement of PRE_P is the core statement being gimplified. For
11063 instance, when gimplifying 'if (++a)' the last statement in
11064 PRE_P will be 'if (t.1)' where t.1 is the result of
11065 pre-incrementing 'a'.
11066
11067 POST_P will contain the sequence of GIMPLE statements corresponding
11068 to the evaluation of all the side-effects that must be executed
11069 after the main expression. If this is NULL, the post
11070 side-effects are stored at the end of PRE_P.
11071
11072 The reason why the output is split in two is to handle post
11073 side-effects explicitly. In some cases, an expression may have
11074 inner and outer post side-effects which need to be emitted in
11075 an order different from the one given by the recursive
11076 traversal. For instance, for the expression (*p--)++ the post
11077 side-effects of '--' must actually occur *after* the post
11078 side-effects of '++'. However, gimplification will first visit
11079 the inner expression, so if a separate POST sequence was not
11080 used, the resulting sequence would be:
11081
11082 1 t.1 = *p
11083 2 p = p - 1
11084 3 t.2 = t.1 + 1
11085 4 *p = t.2
11086
11087 However, the post-decrement operation in line #2 must not be
11088 evaluated until after the store to *p at line #4, so the
11089 correct sequence should be:
11090
11091 1 t.1 = *p
11092 2 t.2 = t.1 + 1
11093 3 *p = t.2
11094 4 p = p - 1
11095
11096 So, by specifying a separate post queue, it is possible
11097 to emit the post side-effects in the correct order.
11098 If POST_P is NULL, an internal queue will be used. Before
11099 returning to the caller, the sequence POST_P is appended to
11100 the main output sequence PRE_P.
11101
11102 GIMPLE_TEST_F points to a function that takes a tree T and
11103 returns nonzero if T is in the GIMPLE form requested by the
11104 caller. The GIMPLE predicates are in gimple.c.
11105
11106 FALLBACK tells the function what sort of a temporary we want if
11107 gimplification cannot produce an expression that complies with
11108 GIMPLE_TEST_F.
11109
11110 fb_none means that no temporary should be generated
11111 fb_rvalue means that an rvalue is OK to generate
11112 fb_lvalue means that an lvalue is OK to generate
11113 fb_either means that either is OK, but an lvalue is preferable.
11114 fb_mayfail means that gimplification may fail (in which case
11115 GS_ERROR will be returned)
11116
11117 The return value is either GS_ERROR or GS_ALL_DONE, since this
11118 function iterates until EXPR is completely gimplified or an error
11119 occurs. */
11120
11121 enum gimplify_status
11122 gimplify_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
11123 bool (*gimple_test_f) (tree), fallback_t fallback)
11124 {
11125 tree tmp;
11126 gimple_seq internal_pre = NULL;
11127 gimple_seq internal_post = NULL;
11128 tree save_expr;
11129 bool is_statement;
11130 location_t saved_location;
11131 enum gimplify_status ret;
11132 gimple_stmt_iterator pre_last_gsi, post_last_gsi;
11133 tree label;
11134
11135 save_expr = *expr_p;
11136 if (save_expr == NULL_TREE)
11137 return GS_ALL_DONE;
11138
11139 /* If we are gimplifying a top-level statement, PRE_P must be valid. */
11140 is_statement = gimple_test_f == is_gimple_stmt;
11141 if (is_statement)
11142 gcc_assert (pre_p);
11143
11144 /* Consistency checks. */
11145 if (gimple_test_f == is_gimple_reg)
11146 gcc_assert (fallback & (fb_rvalue | fb_lvalue));
11147 else if (gimple_test_f == is_gimple_val
11148 || gimple_test_f == is_gimple_call_addr
11149 || gimple_test_f == is_gimple_condexpr
11150 || gimple_test_f == is_gimple_mem_rhs
11151 || gimple_test_f == is_gimple_mem_rhs_or_call
11152 || gimple_test_f == is_gimple_reg_rhs
11153 || gimple_test_f == is_gimple_reg_rhs_or_call
11154 || gimple_test_f == is_gimple_asm_val
11155 || gimple_test_f == is_gimple_mem_ref_addr)
11156 gcc_assert (fallback & fb_rvalue);
11157 else if (gimple_test_f == is_gimple_min_lval
11158 || gimple_test_f == is_gimple_lvalue)
11159 gcc_assert (fallback & fb_lvalue);
11160 else if (gimple_test_f == is_gimple_addressable)
11161 gcc_assert (fallback & fb_either);
11162 else if (gimple_test_f == is_gimple_stmt)
11163 gcc_assert (fallback == fb_none);
11164 else
11165 {
11166 /* We should have recognized the GIMPLE_TEST_F predicate to
11167 know what kind of fallback to use in case a temporary is
11168 needed to hold the value or address of *EXPR_P. */
11169 gcc_unreachable ();
11170 }
11171
11172 /* We used to check the predicate here and return immediately if it
11173 succeeds. This is wrong; the design is for gimplification to be
11174 idempotent, and for the predicates to only test for valid forms, not
11175 whether they are fully simplified. */
11176 if (pre_p == NULL)
11177 pre_p = &internal_pre;
11178
11179 if (post_p == NULL)
11180 post_p = &internal_post;
11181
11182 /* Remember the last statements added to PRE_P and POST_P. Every
11183 new statement added by the gimplification helpers needs to be
11184 annotated with location information. To centralize the
11185 responsibility, we remember the last statement that had been
11186 added to both queues before gimplifying *EXPR_P. If
11187 gimplification produces new statements in PRE_P and POST_P, those
11188 statements will be annotated with the same location information
11189 as *EXPR_P. */
11190 pre_last_gsi = gsi_last (*pre_p);
11191 post_last_gsi = gsi_last (*post_p);
11192
11193 saved_location = input_location;
11194 if (save_expr != error_mark_node
11195 && EXPR_HAS_LOCATION (*expr_p))
11196 input_location = EXPR_LOCATION (*expr_p);
11197
11198 /* Loop over the specific gimplifiers until the toplevel node
11199 remains the same. */
11200 do
11201 {
11202 /* Strip away as many useless type conversions as possible
11203 at the toplevel. */
11204 STRIP_USELESS_TYPE_CONVERSION (*expr_p);
11205
11206 /* Remember the expr. */
11207 save_expr = *expr_p;
11208
11209 /* Die, die, die, my darling. */
11210 if (save_expr == error_mark_node
11211 || (TREE_TYPE (save_expr)
11212 && TREE_TYPE (save_expr) == error_mark_node))
11213 {
11214 ret = GS_ERROR;
11215 break;
11216 }
11217
11218 /* Do any language-specific gimplification. */
11219 ret = ((enum gimplify_status)
11220 lang_hooks.gimplify_expr (expr_p, pre_p, post_p));
11221 if (ret == GS_OK)
11222 {
11223 if (*expr_p == NULL_TREE)
11224 break;
11225 if (*expr_p != save_expr)
11226 continue;
11227 }
11228 else if (ret != GS_UNHANDLED)
11229 break;
11230
11231 /* Make sure that all the cases set 'ret' appropriately. */
11232 ret = GS_UNHANDLED;
11233 switch (TREE_CODE (*expr_p))
11234 {
11235 /* First deal with the special cases. */
11236
11237 case POSTINCREMENT_EXPR:
11238 case POSTDECREMENT_EXPR:
11239 case PREINCREMENT_EXPR:
11240 case PREDECREMENT_EXPR:
11241 ret = gimplify_self_mod_expr (expr_p, pre_p, post_p,
11242 fallback != fb_none,
11243 TREE_TYPE (*expr_p));
11244 break;
11245
11246 case VIEW_CONVERT_EXPR:
11247 if (is_gimple_reg_type (TREE_TYPE (*expr_p))
11248 && is_gimple_reg_type (TREE_TYPE (TREE_OPERAND (*expr_p, 0))))
11249 {
11250 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
11251 post_p, is_gimple_val, fb_rvalue);
11252 recalculate_side_effects (*expr_p);
11253 break;
11254 }
11255 /* Fallthru. */
11256
11257 case ARRAY_REF:
11258 case ARRAY_RANGE_REF:
11259 case REALPART_EXPR:
11260 case IMAGPART_EXPR:
11261 case COMPONENT_REF:
11262 ret = gimplify_compound_lval (expr_p, pre_p, post_p,
11263 fallback ? fallback : fb_rvalue);
11264 break;
11265
11266 case COND_EXPR:
11267 ret = gimplify_cond_expr (expr_p, pre_p, fallback);
11268
11269 /* C99 code may assign to an array in a structure value of a
11270 conditional expression, and this has undefined behavior
11271 only on execution, so create a temporary if an lvalue is
11272 required. */
11273 if (fallback == fb_lvalue)
11274 {
11275 *expr_p = get_initialized_tmp_var (*expr_p, pre_p, post_p, false);
11276 mark_addressable (*expr_p);
11277 ret = GS_OK;
11278 }
11279 break;
11280
11281 case CALL_EXPR:
11282 ret = gimplify_call_expr (expr_p, pre_p, fallback != fb_none);
11283
11284 /* C99 code may assign to an array in a structure returned
11285 from a function, and this has undefined behavior only on
11286 execution, so create a temporary if an lvalue is
11287 required. */
11288 if (fallback == fb_lvalue)
11289 {
11290 *expr_p = get_initialized_tmp_var (*expr_p, pre_p, post_p, false);
11291 mark_addressable (*expr_p);
11292 ret = GS_OK;
11293 }
11294 break;
11295
11296 case TREE_LIST:
11297 gcc_unreachable ();
11298
11299 case COMPOUND_EXPR:
11300 ret = gimplify_compound_expr (expr_p, pre_p, fallback != fb_none);
11301 break;
11302
11303 case COMPOUND_LITERAL_EXPR:
11304 ret = gimplify_compound_literal_expr (expr_p, pre_p,
11305 gimple_test_f, fallback);
11306 break;
11307
11308 case MODIFY_EXPR:
11309 case INIT_EXPR:
11310 ret = gimplify_modify_expr (expr_p, pre_p, post_p,
11311 fallback != fb_none);
11312 break;
11313
11314 case TRUTH_ANDIF_EXPR:
11315 case TRUTH_ORIF_EXPR:
11316 {
11317 /* Preserve the original type of the expression and the
11318 source location of the outer expression. */
11319 tree org_type = TREE_TYPE (*expr_p);
11320 *expr_p = gimple_boolify (*expr_p);
11321 *expr_p = build3_loc (input_location, COND_EXPR,
11322 org_type, *expr_p,
11323 fold_convert_loc
11324 (input_location,
11325 org_type, boolean_true_node),
11326 fold_convert_loc
11327 (input_location,
11328 org_type, boolean_false_node));
11329 ret = GS_OK;
11330 break;
11331 }
11332
11333 case TRUTH_NOT_EXPR:
11334 {
11335 tree type = TREE_TYPE (*expr_p);
11336 /* The parsers are careful to generate TRUTH_NOT_EXPR
11337 only with operands that are always zero or one.
11338 We do not fold here but handle the only interesting case
11339 manually, as fold may re-introduce the TRUTH_NOT_EXPR. */
11340 *expr_p = gimple_boolify (*expr_p);
11341 if (TYPE_PRECISION (TREE_TYPE (*expr_p)) == 1)
11342 *expr_p = build1_loc (input_location, BIT_NOT_EXPR,
11343 TREE_TYPE (*expr_p),
11344 TREE_OPERAND (*expr_p, 0));
11345 else
11346 *expr_p = build2_loc (input_location, BIT_XOR_EXPR,
11347 TREE_TYPE (*expr_p),
11348 TREE_OPERAND (*expr_p, 0),
11349 build_int_cst (TREE_TYPE (*expr_p), 1));
11350 if (!useless_type_conversion_p (type, TREE_TYPE (*expr_p)))
11351 *expr_p = fold_convert_loc (input_location, type, *expr_p);
11352 ret = GS_OK;
11353 break;
11354 }
11355
11356 case ADDR_EXPR:
11357 ret = gimplify_addr_expr (expr_p, pre_p, post_p);
11358 break;
11359
11360 case ANNOTATE_EXPR:
11361 {
11362 tree cond = TREE_OPERAND (*expr_p, 0);
11363 tree kind = TREE_OPERAND (*expr_p, 1);
11364 tree type = TREE_TYPE (cond);
11365 if (!INTEGRAL_TYPE_P (type))
11366 {
11367 *expr_p = cond;
11368 ret = GS_OK;
11369 break;
11370 }
11371 tree tmp = create_tmp_var (type);
11372 gimplify_arg (&cond, pre_p, EXPR_LOCATION (*expr_p));
11373 gcall *call
11374 = gimple_build_call_internal (IFN_ANNOTATE, 2, cond, kind);
11375 gimple_call_set_lhs (call, tmp);
11376 gimplify_seq_add_stmt (pre_p, call);
11377 *expr_p = tmp;
11378 ret = GS_ALL_DONE;
11379 break;
11380 }
11381
11382 case VA_ARG_EXPR:
11383 ret = gimplify_va_arg_expr (expr_p, pre_p, post_p);
11384 break;
11385
11386 CASE_CONVERT:
11387 if (IS_EMPTY_STMT (*expr_p))
11388 {
11389 ret = GS_ALL_DONE;
11390 break;
11391 }
11392
11393 if (VOID_TYPE_P (TREE_TYPE (*expr_p))
11394 || fallback == fb_none)
11395 {
11396 /* Just strip a conversion to void (or in void context) and
11397 try again. */
11398 *expr_p = TREE_OPERAND (*expr_p, 0);
11399 ret = GS_OK;
11400 break;
11401 }
11402
11403 ret = gimplify_conversion (expr_p);
11404 if (ret == GS_ERROR)
11405 break;
11406 if (*expr_p != save_expr)
11407 break;
11408 /* FALLTHRU */
11409
11410 case FIX_TRUNC_EXPR:
11411 /* unary_expr: ... | '(' cast ')' val | ... */
11412 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
11413 is_gimple_val, fb_rvalue);
11414 recalculate_side_effects (*expr_p);
11415 break;
11416
11417 case INDIRECT_REF:
11418 {
11419 bool volatilep = TREE_THIS_VOLATILE (*expr_p);
11420 bool notrap = TREE_THIS_NOTRAP (*expr_p);
11421 tree saved_ptr_type = TREE_TYPE (TREE_OPERAND (*expr_p, 0));
11422
11423 *expr_p = fold_indirect_ref_loc (input_location, *expr_p);
11424 if (*expr_p != save_expr)
11425 {
11426 ret = GS_OK;
11427 break;
11428 }
11429
11430 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
11431 is_gimple_reg, fb_rvalue);
11432 if (ret == GS_ERROR)
11433 break;
11434
11435 recalculate_side_effects (*expr_p);
11436 *expr_p = fold_build2_loc (input_location, MEM_REF,
11437 TREE_TYPE (*expr_p),
11438 TREE_OPERAND (*expr_p, 0),
11439 build_int_cst (saved_ptr_type, 0));
11440 TREE_THIS_VOLATILE (*expr_p) = volatilep;
11441 TREE_THIS_NOTRAP (*expr_p) = notrap;
11442 ret = GS_OK;
11443 break;
11444 }
11445
11446 /* We arrive here through the various re-gimplifcation paths. */
11447 case MEM_REF:
11448 /* First try re-folding the whole thing. */
11449 tmp = fold_binary (MEM_REF, TREE_TYPE (*expr_p),
11450 TREE_OPERAND (*expr_p, 0),
11451 TREE_OPERAND (*expr_p, 1));
11452 if (tmp)
11453 {
11454 REF_REVERSE_STORAGE_ORDER (tmp)
11455 = REF_REVERSE_STORAGE_ORDER (*expr_p);
11456 *expr_p = tmp;
11457 recalculate_side_effects (*expr_p);
11458 ret = GS_OK;
11459 break;
11460 }
11461 /* Avoid re-gimplifying the address operand if it is already
11462 in suitable form. Re-gimplifying would mark the address
11463 operand addressable. Always gimplify when not in SSA form
11464 as we still may have to gimplify decls with value-exprs. */
11465 if (!gimplify_ctxp || !gimple_in_ssa_p (cfun)
11466 || !is_gimple_mem_ref_addr (TREE_OPERAND (*expr_p, 0)))
11467 {
11468 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
11469 is_gimple_mem_ref_addr, fb_rvalue);
11470 if (ret == GS_ERROR)
11471 break;
11472 }
11473 recalculate_side_effects (*expr_p);
11474 ret = GS_ALL_DONE;
11475 break;
11476
11477 /* Constants need not be gimplified. */
11478 case INTEGER_CST:
11479 case REAL_CST:
11480 case FIXED_CST:
11481 case STRING_CST:
11482 case COMPLEX_CST:
11483 case VECTOR_CST:
11484 /* Drop the overflow flag on constants, we do not want
11485 that in the GIMPLE IL. */
11486 if (TREE_OVERFLOW_P (*expr_p))
11487 *expr_p = drop_tree_overflow (*expr_p);
11488 ret = GS_ALL_DONE;
11489 break;
11490
11491 case CONST_DECL:
11492 /* If we require an lvalue, such as for ADDR_EXPR, retain the
11493 CONST_DECL node. Otherwise the decl is replaceable by its
11494 value. */
11495 /* ??? Should be == fb_lvalue, but ADDR_EXPR passes fb_either. */
11496 if (fallback & fb_lvalue)
11497 ret = GS_ALL_DONE;
11498 else
11499 {
11500 *expr_p = DECL_INITIAL (*expr_p);
11501 ret = GS_OK;
11502 }
11503 break;
11504
11505 case DECL_EXPR:
11506 ret = gimplify_decl_expr (expr_p, pre_p);
11507 break;
11508
11509 case BIND_EXPR:
11510 ret = gimplify_bind_expr (expr_p, pre_p);
11511 break;
11512
11513 case LOOP_EXPR:
11514 ret = gimplify_loop_expr (expr_p, pre_p);
11515 break;
11516
11517 case SWITCH_EXPR:
11518 ret = gimplify_switch_expr (expr_p, pre_p);
11519 break;
11520
11521 case EXIT_EXPR:
11522 ret = gimplify_exit_expr (expr_p);
11523 break;
11524
11525 case GOTO_EXPR:
11526 /* If the target is not LABEL, then it is a computed jump
11527 and the target needs to be gimplified. */
11528 if (TREE_CODE (GOTO_DESTINATION (*expr_p)) != LABEL_DECL)
11529 {
11530 ret = gimplify_expr (&GOTO_DESTINATION (*expr_p), pre_p,
11531 NULL, is_gimple_val, fb_rvalue);
11532 if (ret == GS_ERROR)
11533 break;
11534 }
11535 gimplify_seq_add_stmt (pre_p,
11536 gimple_build_goto (GOTO_DESTINATION (*expr_p)));
11537 ret = GS_ALL_DONE;
11538 break;
11539
11540 case PREDICT_EXPR:
11541 gimplify_seq_add_stmt (pre_p,
11542 gimple_build_predict (PREDICT_EXPR_PREDICTOR (*expr_p),
11543 PREDICT_EXPR_OUTCOME (*expr_p)));
11544 ret = GS_ALL_DONE;
11545 break;
11546
11547 case LABEL_EXPR:
11548 ret = gimplify_label_expr (expr_p, pre_p);
11549 label = LABEL_EXPR_LABEL (*expr_p);
11550 gcc_assert (decl_function_context (label) == current_function_decl);
11551
11552 /* If the label is used in a goto statement, or address of the label
11553 is taken, we need to unpoison all variables that were seen so far.
11554 Doing so would prevent us from reporting a false positives. */
11555 if (asan_poisoned_variables
11556 && asan_used_labels != NULL
11557 && asan_used_labels->contains (label))
11558 asan_poison_variables (asan_poisoned_variables, false, pre_p);
11559 break;
11560
11561 case CASE_LABEL_EXPR:
11562 ret = gimplify_case_label_expr (expr_p, pre_p);
11563
11564 if (gimplify_ctxp->live_switch_vars)
11565 asan_poison_variables (gimplify_ctxp->live_switch_vars, false,
11566 pre_p);
11567 break;
11568
11569 case RETURN_EXPR:
11570 ret = gimplify_return_expr (*expr_p, pre_p);
11571 break;
11572
11573 case CONSTRUCTOR:
11574 /* Don't reduce this in place; let gimplify_init_constructor work its
11575 magic. Buf if we're just elaborating this for side effects, just
11576 gimplify any element that has side-effects. */
11577 if (fallback == fb_none)
11578 {
11579 unsigned HOST_WIDE_INT ix;
11580 tree val;
11581 tree temp = NULL_TREE;
11582 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (*expr_p), ix, val)
11583 if (TREE_SIDE_EFFECTS (val))
11584 append_to_statement_list (val, &temp);
11585
11586 *expr_p = temp;
11587 ret = temp ? GS_OK : GS_ALL_DONE;
11588 }
11589 /* C99 code may assign to an array in a constructed
11590 structure or union, and this has undefined behavior only
11591 on execution, so create a temporary if an lvalue is
11592 required. */
11593 else if (fallback == fb_lvalue)
11594 {
11595 *expr_p = get_initialized_tmp_var (*expr_p, pre_p, post_p, false);
11596 mark_addressable (*expr_p);
11597 ret = GS_OK;
11598 }
11599 else
11600 ret = GS_ALL_DONE;
11601 break;
11602
11603 /* The following are special cases that are not handled by the
11604 original GIMPLE grammar. */
11605
11606 /* SAVE_EXPR nodes are converted into a GIMPLE identifier and
11607 eliminated. */
11608 case SAVE_EXPR:
11609 ret = gimplify_save_expr (expr_p, pre_p, post_p);
11610 break;
11611
11612 case BIT_FIELD_REF:
11613 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
11614 post_p, is_gimple_lvalue, fb_either);
11615 recalculate_side_effects (*expr_p);
11616 break;
11617
11618 case TARGET_MEM_REF:
11619 {
11620 enum gimplify_status r0 = GS_ALL_DONE, r1 = GS_ALL_DONE;
11621
11622 if (TMR_BASE (*expr_p))
11623 r0 = gimplify_expr (&TMR_BASE (*expr_p), pre_p,
11624 post_p, is_gimple_mem_ref_addr, fb_either);
11625 if (TMR_INDEX (*expr_p))
11626 r1 = gimplify_expr (&TMR_INDEX (*expr_p), pre_p,
11627 post_p, is_gimple_val, fb_rvalue);
11628 if (TMR_INDEX2 (*expr_p))
11629 r1 = gimplify_expr (&TMR_INDEX2 (*expr_p), pre_p,
11630 post_p, is_gimple_val, fb_rvalue);
11631 /* TMR_STEP and TMR_OFFSET are always integer constants. */
11632 ret = MIN (r0, r1);
11633 }
11634 break;
11635
11636 case NON_LVALUE_EXPR:
11637 /* This should have been stripped above. */
11638 gcc_unreachable ();
11639
11640 case ASM_EXPR:
11641 ret = gimplify_asm_expr (expr_p, pre_p, post_p);
11642 break;
11643
11644 case TRY_FINALLY_EXPR:
11645 case TRY_CATCH_EXPR:
11646 {
11647 gimple_seq eval, cleanup;
11648 gtry *try_;
11649
11650 /* Calls to destructors are generated automatically in FINALLY/CATCH
11651 block. They should have location as UNKNOWN_LOCATION. However,
11652 gimplify_call_expr will reset these call stmts to input_location
11653 if it finds stmt's location is unknown. To prevent resetting for
11654 destructors, we set the input_location to unknown.
11655 Note that this only affects the destructor calls in FINALLY/CATCH
11656 block, and will automatically reset to its original value by the
11657 end of gimplify_expr. */
11658 input_location = UNKNOWN_LOCATION;
11659 eval = cleanup = NULL;
11660 gimplify_and_add (TREE_OPERAND (*expr_p, 0), &eval);
11661 gimplify_and_add (TREE_OPERAND (*expr_p, 1), &cleanup);
11662 /* Don't create bogus GIMPLE_TRY with empty cleanup. */
11663 if (gimple_seq_empty_p (cleanup))
11664 {
11665 gimple_seq_add_seq (pre_p, eval);
11666 ret = GS_ALL_DONE;
11667 break;
11668 }
11669 try_ = gimple_build_try (eval, cleanup,
11670 TREE_CODE (*expr_p) == TRY_FINALLY_EXPR
11671 ? GIMPLE_TRY_FINALLY
11672 : GIMPLE_TRY_CATCH);
11673 if (EXPR_HAS_LOCATION (save_expr))
11674 gimple_set_location (try_, EXPR_LOCATION (save_expr));
11675 else if (LOCATION_LOCUS (saved_location) != UNKNOWN_LOCATION)
11676 gimple_set_location (try_, saved_location);
11677 if (TREE_CODE (*expr_p) == TRY_CATCH_EXPR)
11678 gimple_try_set_catch_is_cleanup (try_,
11679 TRY_CATCH_IS_CLEANUP (*expr_p));
11680 gimplify_seq_add_stmt (pre_p, try_);
11681 ret = GS_ALL_DONE;
11682 break;
11683 }
11684
11685 case CLEANUP_POINT_EXPR:
11686 ret = gimplify_cleanup_point_expr (expr_p, pre_p);
11687 break;
11688
11689 case TARGET_EXPR:
11690 ret = gimplify_target_expr (expr_p, pre_p, post_p);
11691 break;
11692
11693 case CATCH_EXPR:
11694 {
11695 gimple *c;
11696 gimple_seq handler = NULL;
11697 gimplify_and_add (CATCH_BODY (*expr_p), &handler);
11698 c = gimple_build_catch (CATCH_TYPES (*expr_p), handler);
11699 gimplify_seq_add_stmt (pre_p, c);
11700 ret = GS_ALL_DONE;
11701 break;
11702 }
11703
11704 case EH_FILTER_EXPR:
11705 {
11706 gimple *ehf;
11707 gimple_seq failure = NULL;
11708
11709 gimplify_and_add (EH_FILTER_FAILURE (*expr_p), &failure);
11710 ehf = gimple_build_eh_filter (EH_FILTER_TYPES (*expr_p), failure);
11711 gimple_set_no_warning (ehf, TREE_NO_WARNING (*expr_p));
11712 gimplify_seq_add_stmt (pre_p, ehf);
11713 ret = GS_ALL_DONE;
11714 break;
11715 }
11716
11717 case OBJ_TYPE_REF:
11718 {
11719 enum gimplify_status r0, r1;
11720 r0 = gimplify_expr (&OBJ_TYPE_REF_OBJECT (*expr_p), pre_p,
11721 post_p, is_gimple_val, fb_rvalue);
11722 r1 = gimplify_expr (&OBJ_TYPE_REF_EXPR (*expr_p), pre_p,
11723 post_p, is_gimple_val, fb_rvalue);
11724 TREE_SIDE_EFFECTS (*expr_p) = 0;
11725 ret = MIN (r0, r1);
11726 }
11727 break;
11728
11729 case LABEL_DECL:
11730 /* We get here when taking the address of a label. We mark
11731 the label as "forced"; meaning it can never be removed and
11732 it is a potential target for any computed goto. */
11733 FORCED_LABEL (*expr_p) = 1;
11734 ret = GS_ALL_DONE;
11735 break;
11736
11737 case STATEMENT_LIST:
11738 ret = gimplify_statement_list (expr_p, pre_p);
11739 break;
11740
11741 case WITH_SIZE_EXPR:
11742 {
11743 gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
11744 post_p == &internal_post ? NULL : post_p,
11745 gimple_test_f, fallback);
11746 gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p, post_p,
11747 is_gimple_val, fb_rvalue);
11748 ret = GS_ALL_DONE;
11749 }
11750 break;
11751
11752 case VAR_DECL:
11753 case PARM_DECL:
11754 ret = gimplify_var_or_parm_decl (expr_p);
11755 break;
11756
11757 case RESULT_DECL:
11758 /* When within an OMP context, notice uses of variables. */
11759 if (gimplify_omp_ctxp)
11760 omp_notice_variable (gimplify_omp_ctxp, *expr_p, true);
11761 ret = GS_ALL_DONE;
11762 break;
11763
11764 case SSA_NAME:
11765 /* Allow callbacks into the gimplifier during optimization. */
11766 ret = GS_ALL_DONE;
11767 break;
11768
11769 case OMP_PARALLEL:
11770 gimplify_omp_parallel (expr_p, pre_p);
11771 ret = GS_ALL_DONE;
11772 break;
11773
11774 case OMP_TASK:
11775 gimplify_omp_task (expr_p, pre_p);
11776 ret = GS_ALL_DONE;
11777 break;
11778
11779 case OMP_FOR:
11780 case OMP_SIMD:
11781 case CILK_SIMD:
11782 case CILK_FOR:
11783 case OMP_DISTRIBUTE:
11784 case OMP_TASKLOOP:
11785 case OACC_LOOP:
11786 ret = gimplify_omp_for (expr_p, pre_p);
11787 break;
11788
11789 case OACC_CACHE:
11790 gimplify_oacc_cache (expr_p, pre_p);
11791 ret = GS_ALL_DONE;
11792 break;
11793
11794 case OACC_DECLARE:
11795 gimplify_oacc_declare (expr_p, pre_p);
11796 ret = GS_ALL_DONE;
11797 break;
11798
11799 case OACC_HOST_DATA:
11800 case OACC_DATA:
11801 case OACC_KERNELS:
11802 case OACC_PARALLEL:
11803 case OMP_SECTIONS:
11804 case OMP_SINGLE:
11805 case OMP_TARGET:
11806 case OMP_TARGET_DATA:
11807 case OMP_TEAMS:
11808 gimplify_omp_workshare (expr_p, pre_p);
11809 ret = GS_ALL_DONE;
11810 break;
11811
11812 case OACC_ENTER_DATA:
11813 case OACC_EXIT_DATA:
11814 case OACC_UPDATE:
11815 case OMP_TARGET_UPDATE:
11816 case OMP_TARGET_ENTER_DATA:
11817 case OMP_TARGET_EXIT_DATA:
11818 gimplify_omp_target_update (expr_p, pre_p);
11819 ret = GS_ALL_DONE;
11820 break;
11821
11822 case OMP_SECTION:
11823 case OMP_MASTER:
11824 case OMP_TASKGROUP:
11825 case OMP_ORDERED:
11826 case OMP_CRITICAL:
11827 {
11828 gimple_seq body = NULL;
11829 gimple *g;
11830
11831 gimplify_and_add (OMP_BODY (*expr_p), &body);
11832 switch (TREE_CODE (*expr_p))
11833 {
11834 case OMP_SECTION:
11835 g = gimple_build_omp_section (body);
11836 break;
11837 case OMP_MASTER:
11838 g = gimple_build_omp_master (body);
11839 break;
11840 case OMP_TASKGROUP:
11841 {
11842 gimple_seq cleanup = NULL;
11843 tree fn
11844 = builtin_decl_explicit (BUILT_IN_GOMP_TASKGROUP_END);
11845 g = gimple_build_call (fn, 0);
11846 gimple_seq_add_stmt (&cleanup, g);
11847 g = gimple_build_try (body, cleanup, GIMPLE_TRY_FINALLY);
11848 body = NULL;
11849 gimple_seq_add_stmt (&body, g);
11850 g = gimple_build_omp_taskgroup (body);
11851 }
11852 break;
11853 case OMP_ORDERED:
11854 g = gimplify_omp_ordered (*expr_p, body);
11855 break;
11856 case OMP_CRITICAL:
11857 gimplify_scan_omp_clauses (&OMP_CRITICAL_CLAUSES (*expr_p),
11858 pre_p, ORT_WORKSHARE, OMP_CRITICAL);
11859 gimplify_adjust_omp_clauses (pre_p, body,
11860 &OMP_CRITICAL_CLAUSES (*expr_p),
11861 OMP_CRITICAL);
11862 g = gimple_build_omp_critical (body,
11863 OMP_CRITICAL_NAME (*expr_p),
11864 OMP_CRITICAL_CLAUSES (*expr_p));
11865 break;
11866 default:
11867 gcc_unreachable ();
11868 }
11869 gimplify_seq_add_stmt (pre_p, g);
11870 ret = GS_ALL_DONE;
11871 break;
11872 }
11873
11874 case OMP_ATOMIC:
11875 case OMP_ATOMIC_READ:
11876 case OMP_ATOMIC_CAPTURE_OLD:
11877 case OMP_ATOMIC_CAPTURE_NEW:
11878 ret = gimplify_omp_atomic (expr_p, pre_p);
11879 break;
11880
11881 case TRANSACTION_EXPR:
11882 ret = gimplify_transaction (expr_p, pre_p);
11883 break;
11884
11885 case TRUTH_AND_EXPR:
11886 case TRUTH_OR_EXPR:
11887 case TRUTH_XOR_EXPR:
11888 {
11889 tree orig_type = TREE_TYPE (*expr_p);
11890 tree new_type, xop0, xop1;
11891 *expr_p = gimple_boolify (*expr_p);
11892 new_type = TREE_TYPE (*expr_p);
11893 if (!useless_type_conversion_p (orig_type, new_type))
11894 {
11895 *expr_p = fold_convert_loc (input_location, orig_type, *expr_p);
11896 ret = GS_OK;
11897 break;
11898 }
11899
11900 /* Boolified binary truth expressions are semantically equivalent
11901 to bitwise binary expressions. Canonicalize them to the
11902 bitwise variant. */
11903 switch (TREE_CODE (*expr_p))
11904 {
11905 case TRUTH_AND_EXPR:
11906 TREE_SET_CODE (*expr_p, BIT_AND_EXPR);
11907 break;
11908 case TRUTH_OR_EXPR:
11909 TREE_SET_CODE (*expr_p, BIT_IOR_EXPR);
11910 break;
11911 case TRUTH_XOR_EXPR:
11912 TREE_SET_CODE (*expr_p, BIT_XOR_EXPR);
11913 break;
11914 default:
11915 break;
11916 }
11917 /* Now make sure that operands have compatible type to
11918 expression's new_type. */
11919 xop0 = TREE_OPERAND (*expr_p, 0);
11920 xop1 = TREE_OPERAND (*expr_p, 1);
11921 if (!useless_type_conversion_p (new_type, TREE_TYPE (xop0)))
11922 TREE_OPERAND (*expr_p, 0) = fold_convert_loc (input_location,
11923 new_type,
11924 xop0);
11925 if (!useless_type_conversion_p (new_type, TREE_TYPE (xop1)))
11926 TREE_OPERAND (*expr_p, 1) = fold_convert_loc (input_location,
11927 new_type,
11928 xop1);
11929 /* Continue classified as tcc_binary. */
11930 goto expr_2;
11931 }
11932
11933 case VEC_COND_EXPR:
11934 {
11935 enum gimplify_status r0, r1, r2;
11936
11937 r0 = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
11938 post_p, is_gimple_condexpr, fb_rvalue);
11939 r1 = gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p,
11940 post_p, is_gimple_val, fb_rvalue);
11941 r2 = gimplify_expr (&TREE_OPERAND (*expr_p, 2), pre_p,
11942 post_p, is_gimple_val, fb_rvalue);
11943
11944 ret = MIN (MIN (r0, r1), r2);
11945 recalculate_side_effects (*expr_p);
11946 }
11947 break;
11948
11949 case FMA_EXPR:
11950 case VEC_PERM_EXPR:
11951 /* Classified as tcc_expression. */
11952 goto expr_3;
11953
11954 case BIT_INSERT_EXPR:
11955 /* Argument 3 is a constant. */
11956 goto expr_2;
11957
11958 case POINTER_PLUS_EXPR:
11959 {
11960 enum gimplify_status r0, r1;
11961 r0 = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
11962 post_p, is_gimple_val, fb_rvalue);
11963 r1 = gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p,
11964 post_p, is_gimple_val, fb_rvalue);
11965 recalculate_side_effects (*expr_p);
11966 ret = MIN (r0, r1);
11967 break;
11968 }
11969
11970 case CILK_SYNC_STMT:
11971 {
11972 if (!fn_contains_cilk_spawn_p (cfun))
11973 {
11974 error_at (EXPR_LOCATION (*expr_p),
11975 "expected %<_Cilk_spawn%> before %<_Cilk_sync%>");
11976 ret = GS_ERROR;
11977 }
11978 else
11979 {
11980 gimplify_cilk_sync (expr_p, pre_p);
11981 ret = GS_ALL_DONE;
11982 }
11983 break;
11984 }
11985
11986 default:
11987 switch (TREE_CODE_CLASS (TREE_CODE (*expr_p)))
11988 {
11989 case tcc_comparison:
11990 /* Handle comparison of objects of non scalar mode aggregates
11991 with a call to memcmp. It would be nice to only have to do
11992 this for variable-sized objects, but then we'd have to allow
11993 the same nest of reference nodes we allow for MODIFY_EXPR and
11994 that's too complex.
11995
11996 Compare scalar mode aggregates as scalar mode values. Using
11997 memcmp for them would be very inefficient at best, and is
11998 plain wrong if bitfields are involved. */
11999 {
12000 tree type = TREE_TYPE (TREE_OPERAND (*expr_p, 1));
12001
12002 /* Vector comparisons need no boolification. */
12003 if (TREE_CODE (type) == VECTOR_TYPE)
12004 goto expr_2;
12005 else if (!AGGREGATE_TYPE_P (type))
12006 {
12007 tree org_type = TREE_TYPE (*expr_p);
12008 *expr_p = gimple_boolify (*expr_p);
12009 if (!useless_type_conversion_p (org_type,
12010 TREE_TYPE (*expr_p)))
12011 {
12012 *expr_p = fold_convert_loc (input_location,
12013 org_type, *expr_p);
12014 ret = GS_OK;
12015 }
12016 else
12017 goto expr_2;
12018 }
12019 else if (TYPE_MODE (type) != BLKmode)
12020 ret = gimplify_scalar_mode_aggregate_compare (expr_p);
12021 else
12022 ret = gimplify_variable_sized_compare (expr_p);
12023
12024 break;
12025 }
12026
12027 /* If *EXPR_P does not need to be special-cased, handle it
12028 according to its class. */
12029 case tcc_unary:
12030 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
12031 post_p, is_gimple_val, fb_rvalue);
12032 break;
12033
12034 case tcc_binary:
12035 expr_2:
12036 {
12037 enum gimplify_status r0, r1;
12038
12039 r0 = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
12040 post_p, is_gimple_val, fb_rvalue);
12041 r1 = gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p,
12042 post_p, is_gimple_val, fb_rvalue);
12043
12044 ret = MIN (r0, r1);
12045 break;
12046 }
12047
12048 expr_3:
12049 {
12050 enum gimplify_status r0, r1, r2;
12051
12052 r0 = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
12053 post_p, is_gimple_val, fb_rvalue);
12054 r1 = gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p,
12055 post_p, is_gimple_val, fb_rvalue);
12056 r2 = gimplify_expr (&TREE_OPERAND (*expr_p, 2), pre_p,
12057 post_p, is_gimple_val, fb_rvalue);
12058
12059 ret = MIN (MIN (r0, r1), r2);
12060 break;
12061 }
12062
12063 case tcc_declaration:
12064 case tcc_constant:
12065 ret = GS_ALL_DONE;
12066 goto dont_recalculate;
12067
12068 default:
12069 gcc_unreachable ();
12070 }
12071
12072 recalculate_side_effects (*expr_p);
12073
12074 dont_recalculate:
12075 break;
12076 }
12077
12078 gcc_assert (*expr_p || ret != GS_OK);
12079 }
12080 while (ret == GS_OK);
12081
12082 /* If we encountered an error_mark somewhere nested inside, either
12083 stub out the statement or propagate the error back out. */
12084 if (ret == GS_ERROR)
12085 {
12086 if (is_statement)
12087 *expr_p = NULL;
12088 goto out;
12089 }
12090
12091 /* This was only valid as a return value from the langhook, which
12092 we handled. Make sure it doesn't escape from any other context. */
12093 gcc_assert (ret != GS_UNHANDLED);
12094
12095 if (fallback == fb_none && *expr_p && !is_gimple_stmt (*expr_p))
12096 {
12097 /* We aren't looking for a value, and we don't have a valid
12098 statement. If it doesn't have side-effects, throw it away.
12099 We can also get here with code such as "*&&L;", where L is
12100 a LABEL_DECL that is marked as FORCED_LABEL. */
12101 if (TREE_CODE (*expr_p) == LABEL_DECL
12102 || !TREE_SIDE_EFFECTS (*expr_p))
12103 *expr_p = NULL;
12104 else if (!TREE_THIS_VOLATILE (*expr_p))
12105 {
12106 /* This is probably a _REF that contains something nested that
12107 has side effects. Recurse through the operands to find it. */
12108 enum tree_code code = TREE_CODE (*expr_p);
12109
12110 switch (code)
12111 {
12112 case COMPONENT_REF:
12113 case REALPART_EXPR:
12114 case IMAGPART_EXPR:
12115 case VIEW_CONVERT_EXPR:
12116 gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
12117 gimple_test_f, fallback);
12118 break;
12119
12120 case ARRAY_REF:
12121 case ARRAY_RANGE_REF:
12122 gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
12123 gimple_test_f, fallback);
12124 gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p, post_p,
12125 gimple_test_f, fallback);
12126 break;
12127
12128 default:
12129 /* Anything else with side-effects must be converted to
12130 a valid statement before we get here. */
12131 gcc_unreachable ();
12132 }
12133
12134 *expr_p = NULL;
12135 }
12136 else if (COMPLETE_TYPE_P (TREE_TYPE (*expr_p))
12137 && TYPE_MODE (TREE_TYPE (*expr_p)) != BLKmode)
12138 {
12139 /* Historically, the compiler has treated a bare reference
12140 to a non-BLKmode volatile lvalue as forcing a load. */
12141 tree type = TYPE_MAIN_VARIANT (TREE_TYPE (*expr_p));
12142
12143 /* Normally, we do not want to create a temporary for a
12144 TREE_ADDRESSABLE type because such a type should not be
12145 copied by bitwise-assignment. However, we make an
12146 exception here, as all we are doing here is ensuring that
12147 we read the bytes that make up the type. We use
12148 create_tmp_var_raw because create_tmp_var will abort when
12149 given a TREE_ADDRESSABLE type. */
12150 tree tmp = create_tmp_var_raw (type, "vol");
12151 gimple_add_tmp_var (tmp);
12152 gimplify_assign (tmp, *expr_p, pre_p);
12153 *expr_p = NULL;
12154 }
12155 else
12156 /* We can't do anything useful with a volatile reference to
12157 an incomplete type, so just throw it away. Likewise for
12158 a BLKmode type, since any implicit inner load should
12159 already have been turned into an explicit one by the
12160 gimplification process. */
12161 *expr_p = NULL;
12162 }
12163
12164 /* If we are gimplifying at the statement level, we're done. Tack
12165 everything together and return. */
12166 if (fallback == fb_none || is_statement)
12167 {
12168 /* Since *EXPR_P has been converted into a GIMPLE tuple, clear
12169 it out for GC to reclaim it. */
12170 *expr_p = NULL_TREE;
12171
12172 if (!gimple_seq_empty_p (internal_pre)
12173 || !gimple_seq_empty_p (internal_post))
12174 {
12175 gimplify_seq_add_seq (&internal_pre, internal_post);
12176 gimplify_seq_add_seq (pre_p, internal_pre);
12177 }
12178
12179 /* The result of gimplifying *EXPR_P is going to be the last few
12180 statements in *PRE_P and *POST_P. Add location information
12181 to all the statements that were added by the gimplification
12182 helpers. */
12183 if (!gimple_seq_empty_p (*pre_p))
12184 annotate_all_with_location_after (*pre_p, pre_last_gsi, input_location);
12185
12186 if (!gimple_seq_empty_p (*post_p))
12187 annotate_all_with_location_after (*post_p, post_last_gsi,
12188 input_location);
12189
12190 goto out;
12191 }
12192
12193 #ifdef ENABLE_GIMPLE_CHECKING
12194 if (*expr_p)
12195 {
12196 enum tree_code code = TREE_CODE (*expr_p);
12197 /* These expressions should already be in gimple IR form. */
12198 gcc_assert (code != MODIFY_EXPR
12199 && code != ASM_EXPR
12200 && code != BIND_EXPR
12201 && code != CATCH_EXPR
12202 && (code != COND_EXPR || gimplify_ctxp->allow_rhs_cond_expr)
12203 && code != EH_FILTER_EXPR
12204 && code != GOTO_EXPR
12205 && code != LABEL_EXPR
12206 && code != LOOP_EXPR
12207 && code != SWITCH_EXPR
12208 && code != TRY_FINALLY_EXPR
12209 && code != OACC_PARALLEL
12210 && code != OACC_KERNELS
12211 && code != OACC_DATA
12212 && code != OACC_HOST_DATA
12213 && code != OACC_DECLARE
12214 && code != OACC_UPDATE
12215 && code != OACC_ENTER_DATA
12216 && code != OACC_EXIT_DATA
12217 && code != OACC_CACHE
12218 && code != OMP_CRITICAL
12219 && code != OMP_FOR
12220 && code != OACC_LOOP
12221 && code != OMP_MASTER
12222 && code != OMP_TASKGROUP
12223 && code != OMP_ORDERED
12224 && code != OMP_PARALLEL
12225 && code != OMP_SECTIONS
12226 && code != OMP_SECTION
12227 && code != OMP_SINGLE);
12228 }
12229 #endif
12230
12231 /* Otherwise we're gimplifying a subexpression, so the resulting
12232 value is interesting. If it's a valid operand that matches
12233 GIMPLE_TEST_F, we're done. Unless we are handling some
12234 post-effects internally; if that's the case, we need to copy into
12235 a temporary before adding the post-effects to POST_P. */
12236 if (gimple_seq_empty_p (internal_post) && (*gimple_test_f) (*expr_p))
12237 goto out;
12238
12239 /* Otherwise, we need to create a new temporary for the gimplified
12240 expression. */
12241
12242 /* We can't return an lvalue if we have an internal postqueue. The
12243 object the lvalue refers to would (probably) be modified by the
12244 postqueue; we need to copy the value out first, which means an
12245 rvalue. */
12246 if ((fallback & fb_lvalue)
12247 && gimple_seq_empty_p (internal_post)
12248 && is_gimple_addressable (*expr_p))
12249 {
12250 /* An lvalue will do. Take the address of the expression, store it
12251 in a temporary, and replace the expression with an INDIRECT_REF of
12252 that temporary. */
12253 tmp = build_fold_addr_expr_loc (input_location, *expr_p);
12254 gimplify_expr (&tmp, pre_p, post_p, is_gimple_reg, fb_rvalue);
12255 *expr_p = build_simple_mem_ref (tmp);
12256 }
12257 else if ((fallback & fb_rvalue) && is_gimple_reg_rhs_or_call (*expr_p))
12258 {
12259 /* An rvalue will do. Assign the gimplified expression into a
12260 new temporary TMP and replace the original expression with
12261 TMP. First, make sure that the expression has a type so that
12262 it can be assigned into a temporary. */
12263 gcc_assert (!VOID_TYPE_P (TREE_TYPE (*expr_p)));
12264 *expr_p = get_formal_tmp_var (*expr_p, pre_p);
12265 }
12266 else
12267 {
12268 #ifdef ENABLE_GIMPLE_CHECKING
12269 if (!(fallback & fb_mayfail))
12270 {
12271 fprintf (stderr, "gimplification failed:\n");
12272 print_generic_expr (stderr, *expr_p);
12273 debug_tree (*expr_p);
12274 internal_error ("gimplification failed");
12275 }
12276 #endif
12277 gcc_assert (fallback & fb_mayfail);
12278
12279 /* If this is an asm statement, and the user asked for the
12280 impossible, don't die. Fail and let gimplify_asm_expr
12281 issue an error. */
12282 ret = GS_ERROR;
12283 goto out;
12284 }
12285
12286 /* Make sure the temporary matches our predicate. */
12287 gcc_assert ((*gimple_test_f) (*expr_p));
12288
12289 if (!gimple_seq_empty_p (internal_post))
12290 {
12291 annotate_all_with_location (internal_post, input_location);
12292 gimplify_seq_add_seq (pre_p, internal_post);
12293 }
12294
12295 out:
12296 input_location = saved_location;
12297 return ret;
12298 }
12299
12300 /* Like gimplify_expr but make sure the gimplified result is not itself
12301 a SSA name (but a decl if it were). Temporaries required by
12302 evaluating *EXPR_P may be still SSA names. */
12303
12304 static enum gimplify_status
12305 gimplify_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
12306 bool (*gimple_test_f) (tree), fallback_t fallback,
12307 bool allow_ssa)
12308 {
12309 bool was_ssa_name_p = TREE_CODE (*expr_p) == SSA_NAME;
12310 enum gimplify_status ret = gimplify_expr (expr_p, pre_p, post_p,
12311 gimple_test_f, fallback);
12312 if (! allow_ssa
12313 && TREE_CODE (*expr_p) == SSA_NAME)
12314 {
12315 tree name = *expr_p;
12316 if (was_ssa_name_p)
12317 *expr_p = get_initialized_tmp_var (*expr_p, pre_p, NULL, false);
12318 else
12319 {
12320 /* Avoid the extra copy if possible. */
12321 *expr_p = create_tmp_reg (TREE_TYPE (name));
12322 gimple_set_lhs (SSA_NAME_DEF_STMT (name), *expr_p);
12323 release_ssa_name (name);
12324 }
12325 }
12326 return ret;
12327 }
12328
12329 /* Look through TYPE for variable-sized objects and gimplify each such
12330 size that we find. Add to LIST_P any statements generated. */
12331
12332 void
12333 gimplify_type_sizes (tree type, gimple_seq *list_p)
12334 {
12335 tree field, t;
12336
12337 if (type == NULL || type == error_mark_node)
12338 return;
12339
12340 /* We first do the main variant, then copy into any other variants. */
12341 type = TYPE_MAIN_VARIANT (type);
12342
12343 /* Avoid infinite recursion. */
12344 if (TYPE_SIZES_GIMPLIFIED (type))
12345 return;
12346
12347 TYPE_SIZES_GIMPLIFIED (type) = 1;
12348
12349 switch (TREE_CODE (type))
12350 {
12351 case INTEGER_TYPE:
12352 case ENUMERAL_TYPE:
12353 case BOOLEAN_TYPE:
12354 case REAL_TYPE:
12355 case FIXED_POINT_TYPE:
12356 gimplify_one_sizepos (&TYPE_MIN_VALUE (type), list_p);
12357 gimplify_one_sizepos (&TYPE_MAX_VALUE (type), list_p);
12358
12359 for (t = TYPE_NEXT_VARIANT (type); t; t = TYPE_NEXT_VARIANT (t))
12360 {
12361 TYPE_MIN_VALUE (t) = TYPE_MIN_VALUE (type);
12362 TYPE_MAX_VALUE (t) = TYPE_MAX_VALUE (type);
12363 }
12364 break;
12365
12366 case ARRAY_TYPE:
12367 /* These types may not have declarations, so handle them here. */
12368 gimplify_type_sizes (TREE_TYPE (type), list_p);
12369 gimplify_type_sizes (TYPE_DOMAIN (type), list_p);
12370 /* Ensure VLA bounds aren't removed, for -O0 they should be variables
12371 with assigned stack slots, for -O1+ -g they should be tracked
12372 by VTA. */
12373 if (!(TYPE_NAME (type)
12374 && TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
12375 && DECL_IGNORED_P (TYPE_NAME (type)))
12376 && TYPE_DOMAIN (type)
12377 && INTEGRAL_TYPE_P (TYPE_DOMAIN (type)))
12378 {
12379 t = TYPE_MIN_VALUE (TYPE_DOMAIN (type));
12380 if (t && VAR_P (t) && DECL_ARTIFICIAL (t))
12381 DECL_IGNORED_P (t) = 0;
12382 t = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
12383 if (t && VAR_P (t) && DECL_ARTIFICIAL (t))
12384 DECL_IGNORED_P (t) = 0;
12385 }
12386 break;
12387
12388 case RECORD_TYPE:
12389 case UNION_TYPE:
12390 case QUAL_UNION_TYPE:
12391 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
12392 if (TREE_CODE (field) == FIELD_DECL)
12393 {
12394 gimplify_one_sizepos (&DECL_FIELD_OFFSET (field), list_p);
12395 gimplify_one_sizepos (&DECL_SIZE (field), list_p);
12396 gimplify_one_sizepos (&DECL_SIZE_UNIT (field), list_p);
12397 gimplify_type_sizes (TREE_TYPE (field), list_p);
12398 }
12399 break;
12400
12401 case POINTER_TYPE:
12402 case REFERENCE_TYPE:
12403 /* We used to recurse on the pointed-to type here, which turned out to
12404 be incorrect because its definition might refer to variables not
12405 yet initialized at this point if a forward declaration is involved.
12406
12407 It was actually useful for anonymous pointed-to types to ensure
12408 that the sizes evaluation dominates every possible later use of the
12409 values. Restricting to such types here would be safe since there
12410 is no possible forward declaration around, but would introduce an
12411 undesirable middle-end semantic to anonymity. We then defer to
12412 front-ends the responsibility of ensuring that the sizes are
12413 evaluated both early and late enough, e.g. by attaching artificial
12414 type declarations to the tree. */
12415 break;
12416
12417 default:
12418 break;
12419 }
12420
12421 gimplify_one_sizepos (&TYPE_SIZE (type), list_p);
12422 gimplify_one_sizepos (&TYPE_SIZE_UNIT (type), list_p);
12423
12424 for (t = TYPE_NEXT_VARIANT (type); t; t = TYPE_NEXT_VARIANT (t))
12425 {
12426 TYPE_SIZE (t) = TYPE_SIZE (type);
12427 TYPE_SIZE_UNIT (t) = TYPE_SIZE_UNIT (type);
12428 TYPE_SIZES_GIMPLIFIED (t) = 1;
12429 }
12430 }
12431
12432 /* A subroutine of gimplify_type_sizes to make sure that *EXPR_P,
12433 a size or position, has had all of its SAVE_EXPRs evaluated.
12434 We add any required statements to *STMT_P. */
12435
12436 void
12437 gimplify_one_sizepos (tree *expr_p, gimple_seq *stmt_p)
12438 {
12439 tree expr = *expr_p;
12440
12441 /* We don't do anything if the value isn't there, is constant, or contains
12442 A PLACEHOLDER_EXPR. We also don't want to do anything if it's already
12443 a VAR_DECL. If it's a VAR_DECL from another function, the gimplifier
12444 will want to replace it with a new variable, but that will cause problems
12445 if this type is from outside the function. It's OK to have that here. */
12446 if (is_gimple_sizepos (expr))
12447 return;
12448
12449 *expr_p = unshare_expr (expr);
12450
12451 /* SSA names in decl/type fields are a bad idea - they'll get reclaimed
12452 if the def vanishes. */
12453 gimplify_expr (expr_p, stmt_p, NULL, is_gimple_val, fb_rvalue, false);
12454 }
12455
12456 /* Gimplify the body of statements of FNDECL and return a GIMPLE_BIND node
12457 containing the sequence of corresponding GIMPLE statements. If DO_PARMS
12458 is true, also gimplify the parameters. */
12459
12460 gbind *
12461 gimplify_body (tree fndecl, bool do_parms)
12462 {
12463 location_t saved_location = input_location;
12464 gimple_seq parm_stmts, seq;
12465 gimple *outer_stmt;
12466 gbind *outer_bind;
12467 struct cgraph_node *cgn;
12468
12469 timevar_push (TV_TREE_GIMPLIFY);
12470
12471 init_tree_ssa (cfun);
12472
12473 /* Initialize for optimize_insn_for_s{ize,peed}_p possibly called during
12474 gimplification. */
12475 default_rtl_profile ();
12476
12477 gcc_assert (gimplify_ctxp == NULL);
12478 push_gimplify_context (true);
12479
12480 if (flag_openacc || flag_openmp)
12481 {
12482 gcc_assert (gimplify_omp_ctxp == NULL);
12483 if (lookup_attribute ("omp declare target", DECL_ATTRIBUTES (fndecl)))
12484 gimplify_omp_ctxp = new_omp_context (ORT_TARGET);
12485 }
12486
12487 /* Unshare most shared trees in the body and in that of any nested functions.
12488 It would seem we don't have to do this for nested functions because
12489 they are supposed to be output and then the outer function gimplified
12490 first, but the g++ front end doesn't always do it that way. */
12491 unshare_body (fndecl);
12492 unvisit_body (fndecl);
12493
12494 cgn = cgraph_node::get (fndecl);
12495 if (cgn && cgn->origin)
12496 nonlocal_vlas = new hash_set<tree>;
12497
12498 /* Make sure input_location isn't set to something weird. */
12499 input_location = DECL_SOURCE_LOCATION (fndecl);
12500
12501 /* Resolve callee-copies. This has to be done before processing
12502 the body so that DECL_VALUE_EXPR gets processed correctly. */
12503 parm_stmts = do_parms ? gimplify_parameters () : NULL;
12504
12505 /* Gimplify the function's body. */
12506 seq = NULL;
12507 gimplify_stmt (&DECL_SAVED_TREE (fndecl), &seq);
12508 outer_stmt = gimple_seq_first_stmt (seq);
12509 if (!outer_stmt)
12510 {
12511 outer_stmt = gimple_build_nop ();
12512 gimplify_seq_add_stmt (&seq, outer_stmt);
12513 }
12514
12515 /* The body must contain exactly one statement, a GIMPLE_BIND. If this is
12516 not the case, wrap everything in a GIMPLE_BIND to make it so. */
12517 if (gimple_code (outer_stmt) == GIMPLE_BIND
12518 && gimple_seq_first (seq) == gimple_seq_last (seq))
12519 outer_bind = as_a <gbind *> (outer_stmt);
12520 else
12521 outer_bind = gimple_build_bind (NULL_TREE, seq, NULL);
12522
12523 DECL_SAVED_TREE (fndecl) = NULL_TREE;
12524
12525 /* If we had callee-copies statements, insert them at the beginning
12526 of the function and clear DECL_VALUE_EXPR_P on the parameters. */
12527 if (!gimple_seq_empty_p (parm_stmts))
12528 {
12529 tree parm;
12530
12531 gimplify_seq_add_seq (&parm_stmts, gimple_bind_body (outer_bind));
12532 gimple_bind_set_body (outer_bind, parm_stmts);
12533
12534 for (parm = DECL_ARGUMENTS (current_function_decl);
12535 parm; parm = DECL_CHAIN (parm))
12536 if (DECL_HAS_VALUE_EXPR_P (parm))
12537 {
12538 DECL_HAS_VALUE_EXPR_P (parm) = 0;
12539 DECL_IGNORED_P (parm) = 0;
12540 }
12541 }
12542
12543 if (nonlocal_vlas)
12544 {
12545 if (nonlocal_vla_vars)
12546 {
12547 /* tree-nested.c may later on call declare_vars (..., true);
12548 which relies on BLOCK_VARS chain to be the tail of the
12549 gimple_bind_vars chain. Ensure we don't violate that
12550 assumption. */
12551 if (gimple_bind_block (outer_bind)
12552 == DECL_INITIAL (current_function_decl))
12553 declare_vars (nonlocal_vla_vars, outer_bind, true);
12554 else
12555 BLOCK_VARS (DECL_INITIAL (current_function_decl))
12556 = chainon (BLOCK_VARS (DECL_INITIAL (current_function_decl)),
12557 nonlocal_vla_vars);
12558 nonlocal_vla_vars = NULL_TREE;
12559 }
12560 delete nonlocal_vlas;
12561 nonlocal_vlas = NULL;
12562 }
12563
12564 if ((flag_openacc || flag_openmp || flag_openmp_simd)
12565 && gimplify_omp_ctxp)
12566 {
12567 delete_omp_context (gimplify_omp_ctxp);
12568 gimplify_omp_ctxp = NULL;
12569 }
12570
12571 pop_gimplify_context (outer_bind);
12572 gcc_assert (gimplify_ctxp == NULL);
12573
12574 if (flag_checking && !seen_error ())
12575 verify_gimple_in_seq (gimple_bind_body (outer_bind));
12576
12577 timevar_pop (TV_TREE_GIMPLIFY);
12578 input_location = saved_location;
12579
12580 return outer_bind;
12581 }
12582
12583 typedef char *char_p; /* For DEF_VEC_P. */
12584
12585 /* Return whether we should exclude FNDECL from instrumentation. */
12586
12587 static bool
12588 flag_instrument_functions_exclude_p (tree fndecl)
12589 {
12590 vec<char_p> *v;
12591
12592 v = (vec<char_p> *) flag_instrument_functions_exclude_functions;
12593 if (v && v->length () > 0)
12594 {
12595 const char *name;
12596 int i;
12597 char *s;
12598
12599 name = lang_hooks.decl_printable_name (fndecl, 0);
12600 FOR_EACH_VEC_ELT (*v, i, s)
12601 if (strstr (name, s) != NULL)
12602 return true;
12603 }
12604
12605 v = (vec<char_p> *) flag_instrument_functions_exclude_files;
12606 if (v && v->length () > 0)
12607 {
12608 const char *name;
12609 int i;
12610 char *s;
12611
12612 name = DECL_SOURCE_FILE (fndecl);
12613 FOR_EACH_VEC_ELT (*v, i, s)
12614 if (strstr (name, s) != NULL)
12615 return true;
12616 }
12617
12618 return false;
12619 }
12620
12621 /* Entry point to the gimplification pass. FNDECL is the FUNCTION_DECL
12622 node for the function we want to gimplify.
12623
12624 Return the sequence of GIMPLE statements corresponding to the body
12625 of FNDECL. */
12626
12627 void
12628 gimplify_function_tree (tree fndecl)
12629 {
12630 tree parm, ret;
12631 gimple_seq seq;
12632 gbind *bind;
12633
12634 gcc_assert (!gimple_body (fndecl));
12635
12636 if (DECL_STRUCT_FUNCTION (fndecl))
12637 push_cfun (DECL_STRUCT_FUNCTION (fndecl));
12638 else
12639 push_struct_function (fndecl);
12640
12641 /* Tentatively set PROP_gimple_lva here, and reset it in gimplify_va_arg_expr
12642 if necessary. */
12643 cfun->curr_properties |= PROP_gimple_lva;
12644
12645 for (parm = DECL_ARGUMENTS (fndecl); parm ; parm = DECL_CHAIN (parm))
12646 {
12647 /* Preliminarily mark non-addressed complex variables as eligible
12648 for promotion to gimple registers. We'll transform their uses
12649 as we find them. */
12650 if ((TREE_CODE (TREE_TYPE (parm)) == COMPLEX_TYPE
12651 || TREE_CODE (TREE_TYPE (parm)) == VECTOR_TYPE)
12652 && !TREE_THIS_VOLATILE (parm)
12653 && !needs_to_live_in_memory (parm))
12654 DECL_GIMPLE_REG_P (parm) = 1;
12655 }
12656
12657 ret = DECL_RESULT (fndecl);
12658 if ((TREE_CODE (TREE_TYPE (ret)) == COMPLEX_TYPE
12659 || TREE_CODE (TREE_TYPE (ret)) == VECTOR_TYPE)
12660 && !needs_to_live_in_memory (ret))
12661 DECL_GIMPLE_REG_P (ret) = 1;
12662
12663 if (asan_sanitize_use_after_scope () && sanitize_flags_p (SANITIZE_ADDRESS))
12664 asan_poisoned_variables = new hash_set<tree> ();
12665 bind = gimplify_body (fndecl, true);
12666 if (asan_poisoned_variables)
12667 {
12668 delete asan_poisoned_variables;
12669 asan_poisoned_variables = NULL;
12670 }
12671
12672 /* The tree body of the function is no longer needed, replace it
12673 with the new GIMPLE body. */
12674 seq = NULL;
12675 gimple_seq_add_stmt (&seq, bind);
12676 gimple_set_body (fndecl, seq);
12677
12678 /* If we're instrumenting function entry/exit, then prepend the call to
12679 the entry hook and wrap the whole function in a TRY_FINALLY_EXPR to
12680 catch the exit hook. */
12681 /* ??? Add some way to ignore exceptions for this TFE. */
12682 if (flag_instrument_function_entry_exit
12683 && !DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (fndecl)
12684 /* Do not instrument extern inline functions. */
12685 && !(DECL_DECLARED_INLINE_P (fndecl)
12686 && DECL_EXTERNAL (fndecl)
12687 && DECL_DISREGARD_INLINE_LIMITS (fndecl))
12688 && !flag_instrument_functions_exclude_p (fndecl))
12689 {
12690 tree x;
12691 gbind *new_bind;
12692 gimple *tf;
12693 gimple_seq cleanup = NULL, body = NULL;
12694 tree tmp_var;
12695 gcall *call;
12696
12697 x = builtin_decl_implicit (BUILT_IN_RETURN_ADDRESS);
12698 call = gimple_build_call (x, 1, integer_zero_node);
12699 tmp_var = create_tmp_var (ptr_type_node, "return_addr");
12700 gimple_call_set_lhs (call, tmp_var);
12701 gimplify_seq_add_stmt (&cleanup, call);
12702 x = builtin_decl_implicit (BUILT_IN_PROFILE_FUNC_EXIT);
12703 call = gimple_build_call (x, 2,
12704 build_fold_addr_expr (current_function_decl),
12705 tmp_var);
12706 gimplify_seq_add_stmt (&cleanup, call);
12707 tf = gimple_build_try (seq, cleanup, GIMPLE_TRY_FINALLY);
12708
12709 x = builtin_decl_implicit (BUILT_IN_RETURN_ADDRESS);
12710 call = gimple_build_call (x, 1, integer_zero_node);
12711 tmp_var = create_tmp_var (ptr_type_node, "return_addr");
12712 gimple_call_set_lhs (call, tmp_var);
12713 gimplify_seq_add_stmt (&body, call);
12714 x = builtin_decl_implicit (BUILT_IN_PROFILE_FUNC_ENTER);
12715 call = gimple_build_call (x, 2,
12716 build_fold_addr_expr (current_function_decl),
12717 tmp_var);
12718 gimplify_seq_add_stmt (&body, call);
12719 gimplify_seq_add_stmt (&body, tf);
12720 new_bind = gimple_build_bind (NULL, body, NULL);
12721
12722 /* Replace the current function body with the body
12723 wrapped in the try/finally TF. */
12724 seq = NULL;
12725 gimple_seq_add_stmt (&seq, new_bind);
12726 gimple_set_body (fndecl, seq);
12727 bind = new_bind;
12728 }
12729
12730 if (sanitize_flags_p (SANITIZE_THREAD))
12731 {
12732 gcall *call = gimple_build_call_internal (IFN_TSAN_FUNC_EXIT, 0);
12733 gimple *tf = gimple_build_try (seq, call, GIMPLE_TRY_FINALLY);
12734 gbind *new_bind = gimple_build_bind (NULL, tf, NULL);
12735 /* Replace the current function body with the body
12736 wrapped in the try/finally TF. */
12737 seq = NULL;
12738 gimple_seq_add_stmt (&seq, new_bind);
12739 gimple_set_body (fndecl, seq);
12740 }
12741
12742 DECL_SAVED_TREE (fndecl) = NULL_TREE;
12743 cfun->curr_properties |= PROP_gimple_any;
12744
12745 pop_cfun ();
12746
12747 dump_function (TDI_gimple, fndecl);
12748 }
12749
12750 /* Return a dummy expression of type TYPE in order to keep going after an
12751 error. */
12752
12753 static tree
12754 dummy_object (tree type)
12755 {
12756 tree t = build_int_cst (build_pointer_type (type), 0);
12757 return build2 (MEM_REF, type, t, t);
12758 }
12759
12760 /* Gimplify __builtin_va_arg, aka VA_ARG_EXPR, which is not really a
12761 builtin function, but a very special sort of operator. */
12762
12763 enum gimplify_status
12764 gimplify_va_arg_expr (tree *expr_p, gimple_seq *pre_p,
12765 gimple_seq *post_p ATTRIBUTE_UNUSED)
12766 {
12767 tree promoted_type, have_va_type;
12768 tree valist = TREE_OPERAND (*expr_p, 0);
12769 tree type = TREE_TYPE (*expr_p);
12770 tree t, tag, aptag;
12771 location_t loc = EXPR_LOCATION (*expr_p);
12772
12773 /* Verify that valist is of the proper type. */
12774 have_va_type = TREE_TYPE (valist);
12775 if (have_va_type == error_mark_node)
12776 return GS_ERROR;
12777 have_va_type = targetm.canonical_va_list_type (have_va_type);
12778 if (have_va_type == NULL_TREE
12779 && POINTER_TYPE_P (TREE_TYPE (valist)))
12780 /* Handle 'Case 1: Not an array type' from c-common.c/build_va_arg. */
12781 have_va_type
12782 = targetm.canonical_va_list_type (TREE_TYPE (TREE_TYPE (valist)));
12783 gcc_assert (have_va_type != NULL_TREE);
12784
12785 /* Generate a diagnostic for requesting data of a type that cannot
12786 be passed through `...' due to type promotion at the call site. */
12787 if ((promoted_type = lang_hooks.types.type_promotes_to (type))
12788 != type)
12789 {
12790 static bool gave_help;
12791 bool warned;
12792 /* Use the expansion point to handle cases such as passing bool (defined
12793 in a system header) through `...'. */
12794 source_location xloc
12795 = expansion_point_location_if_in_system_header (loc);
12796
12797 /* Unfortunately, this is merely undefined, rather than a constraint
12798 violation, so we cannot make this an error. If this call is never
12799 executed, the program is still strictly conforming. */
12800 warned = warning_at (xloc, 0,
12801 "%qT is promoted to %qT when passed through %<...%>",
12802 type, promoted_type);
12803 if (!gave_help && warned)
12804 {
12805 gave_help = true;
12806 inform (xloc, "(so you should pass %qT not %qT to %<va_arg%>)",
12807 promoted_type, type);
12808 }
12809
12810 /* We can, however, treat "undefined" any way we please.
12811 Call abort to encourage the user to fix the program. */
12812 if (warned)
12813 inform (xloc, "if this code is reached, the program will abort");
12814 /* Before the abort, allow the evaluation of the va_list
12815 expression to exit or longjmp. */
12816 gimplify_and_add (valist, pre_p);
12817 t = build_call_expr_loc (loc,
12818 builtin_decl_implicit (BUILT_IN_TRAP), 0);
12819 gimplify_and_add (t, pre_p);
12820
12821 /* This is dead code, but go ahead and finish so that the
12822 mode of the result comes out right. */
12823 *expr_p = dummy_object (type);
12824 return GS_ALL_DONE;
12825 }
12826
12827 tag = build_int_cst (build_pointer_type (type), 0);
12828 aptag = build_int_cst (TREE_TYPE (valist), 0);
12829
12830 *expr_p = build_call_expr_internal_loc (loc, IFN_VA_ARG, type, 3,
12831 valist, tag, aptag);
12832
12833 /* Clear the tentatively set PROP_gimple_lva, to indicate that IFN_VA_ARG
12834 needs to be expanded. */
12835 cfun->curr_properties &= ~PROP_gimple_lva;
12836
12837 return GS_OK;
12838 }
12839
12840 /* Build a new GIMPLE_ASSIGN tuple and append it to the end of *SEQ_P.
12841
12842 DST/SRC are the destination and source respectively. You can pass
12843 ungimplified trees in DST or SRC, in which case they will be
12844 converted to a gimple operand if necessary.
12845
12846 This function returns the newly created GIMPLE_ASSIGN tuple. */
12847
12848 gimple *
12849 gimplify_assign (tree dst, tree src, gimple_seq *seq_p)
12850 {
12851 tree t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
12852 gimplify_and_add (t, seq_p);
12853 ggc_free (t);
12854 return gimple_seq_last_stmt (*seq_p);
12855 }
12856
12857 inline hashval_t
12858 gimplify_hasher::hash (const elt_t *p)
12859 {
12860 tree t = p->val;
12861 return iterative_hash_expr (t, 0);
12862 }
12863
12864 inline bool
12865 gimplify_hasher::equal (const elt_t *p1, const elt_t *p2)
12866 {
12867 tree t1 = p1->val;
12868 tree t2 = p2->val;
12869 enum tree_code code = TREE_CODE (t1);
12870
12871 if (TREE_CODE (t2) != code
12872 || TREE_TYPE (t1) != TREE_TYPE (t2))
12873 return false;
12874
12875 if (!operand_equal_p (t1, t2, 0))
12876 return false;
12877
12878 /* Only allow them to compare equal if they also hash equal; otherwise
12879 results are nondeterminate, and we fail bootstrap comparison. */
12880 gcc_checking_assert (hash (p1) == hash (p2));
12881
12882 return true;
12883 }