]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/gimplify.c
4c245103fc49fd2c80b9e36d82fe9b7f4545d485
[thirdparty/gcc.git] / gcc / gimplify.c
1 /* Tree lowering pass. This pass converts the GENERIC functions-as-trees
2 tree representation into the GIMPLE form.
3 Copyright (C) 2002-2017 Free Software Foundation, Inc.
4 Major work done by Sebastian Pop <s.pop@laposte.net>,
5 Diego Novillo <dnovillo@redhat.com> and Jason Merrill <jason@redhat.com>.
6
7 This file is part of GCC.
8
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
12 version.
13
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 for more details.
18
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
22
23 #include "config.h"
24 #include "system.h"
25 #include "coretypes.h"
26 #include "backend.h"
27 #include "target.h"
28 #include "rtl.h"
29 #include "tree.h"
30 #include "gimple.h"
31 #include "gimple-predict.h"
32 #include "tree-pass.h" /* FIXME: only for PROP_gimple_any */
33 #include "ssa.h"
34 #include "cgraph.h"
35 #include "tree-pretty-print.h"
36 #include "diagnostic-core.h"
37 #include "alias.h"
38 #include "fold-const.h"
39 #include "calls.h"
40 #include "varasm.h"
41 #include "stmt.h"
42 #include "expr.h"
43 #include "gimple-fold.h"
44 #include "tree-eh.h"
45 #include "gimplify.h"
46 #include "gimple-iterator.h"
47 #include "stor-layout.h"
48 #include "print-tree.h"
49 #include "tree-iterator.h"
50 #include "tree-inline.h"
51 #include "langhooks.h"
52 #include "tree-cfg.h"
53 #include "tree-ssa.h"
54 #include "omp-general.h"
55 #include "omp-low.h"
56 #include "gimple-low.h"
57 #include "cilk.h"
58 #include "gomp-constants.h"
59 #include "splay-tree.h"
60 #include "gimple-walk.h"
61 #include "langhooks-def.h" /* FIXME: for lhd_set_decl_assembler_name */
62 #include "builtins.h"
63 #include "stringpool.h"
64 #include "attribs.h"
65 #include "asan.h"
66 #include "dbgcnt.h"
67
68 /* Hash set of poisoned variables in a bind expr. */
69 static hash_set<tree> *asan_poisoned_variables = NULL;
70
71 enum gimplify_omp_var_data
72 {
73 GOVD_SEEN = 1,
74 GOVD_EXPLICIT = 2,
75 GOVD_SHARED = 4,
76 GOVD_PRIVATE = 8,
77 GOVD_FIRSTPRIVATE = 16,
78 GOVD_LASTPRIVATE = 32,
79 GOVD_REDUCTION = 64,
80 GOVD_LOCAL = 128,
81 GOVD_MAP = 256,
82 GOVD_DEBUG_PRIVATE = 512,
83 GOVD_PRIVATE_OUTER_REF = 1024,
84 GOVD_LINEAR = 2048,
85 GOVD_ALIGNED = 4096,
86
87 /* Flag for GOVD_MAP: don't copy back. */
88 GOVD_MAP_TO_ONLY = 8192,
89
90 /* Flag for GOVD_LINEAR or GOVD_LASTPRIVATE: no outer reference. */
91 GOVD_LINEAR_LASTPRIVATE_NO_OUTER = 16384,
92
93 GOVD_MAP_0LEN_ARRAY = 32768,
94
95 /* Flag for GOVD_MAP, if it is always, to or always, tofrom mapping. */
96 GOVD_MAP_ALWAYS_TO = 65536,
97
98 /* Flag for shared vars that are or might be stored to in the region. */
99 GOVD_WRITTEN = 131072,
100
101 /* Flag for GOVD_MAP, if it is a forced mapping. */
102 GOVD_MAP_FORCE = 262144,
103
104 /* Flag for GOVD_MAP: must be present already. */
105 GOVD_MAP_FORCE_PRESENT = 524288,
106
107 GOVD_DATA_SHARE_CLASS = (GOVD_SHARED | GOVD_PRIVATE | GOVD_FIRSTPRIVATE
108 | GOVD_LASTPRIVATE | GOVD_REDUCTION | GOVD_LINEAR
109 | GOVD_LOCAL)
110 };
111
112
113 enum omp_region_type
114 {
115 ORT_WORKSHARE = 0x00,
116 ORT_SIMD = 0x01,
117
118 ORT_PARALLEL = 0x02,
119 ORT_COMBINED_PARALLEL = 0x03,
120
121 ORT_TASK = 0x04,
122 ORT_UNTIED_TASK = 0x05,
123
124 ORT_TEAMS = 0x08,
125 ORT_COMBINED_TEAMS = 0x09,
126
127 /* Data region. */
128 ORT_TARGET_DATA = 0x10,
129
130 /* Data region with offloading. */
131 ORT_TARGET = 0x20,
132 ORT_COMBINED_TARGET = 0x21,
133
134 /* OpenACC variants. */
135 ORT_ACC = 0x40, /* A generic OpenACC region. */
136 ORT_ACC_DATA = ORT_ACC | ORT_TARGET_DATA, /* Data construct. */
137 ORT_ACC_PARALLEL = ORT_ACC | ORT_TARGET, /* Parallel construct */
138 ORT_ACC_KERNELS = ORT_ACC | ORT_TARGET | 0x80, /* Kernels construct. */
139 ORT_ACC_HOST_DATA = ORT_ACC | ORT_TARGET_DATA | 0x80, /* Host data. */
140
141 /* Dummy OpenMP region, used to disable expansion of
142 DECL_VALUE_EXPRs in taskloop pre body. */
143 ORT_NONE = 0x100
144 };
145
146 /* Gimplify hashtable helper. */
147
148 struct gimplify_hasher : free_ptr_hash <elt_t>
149 {
150 static inline hashval_t hash (const elt_t *);
151 static inline bool equal (const elt_t *, const elt_t *);
152 };
153
154 struct gimplify_ctx
155 {
156 struct gimplify_ctx *prev_context;
157
158 vec<gbind *> bind_expr_stack;
159 tree temps;
160 gimple_seq conditional_cleanups;
161 tree exit_label;
162 tree return_temp;
163
164 vec<tree> case_labels;
165 hash_set<tree> *live_switch_vars;
166 /* The formal temporary table. Should this be persistent? */
167 hash_table<gimplify_hasher> *temp_htab;
168
169 int conditions;
170 unsigned into_ssa : 1;
171 unsigned allow_rhs_cond_expr : 1;
172 unsigned in_cleanup_point_expr : 1;
173 unsigned keep_stack : 1;
174 unsigned save_stack : 1;
175 unsigned in_switch_expr : 1;
176 };
177
178 struct gimplify_omp_ctx
179 {
180 struct gimplify_omp_ctx *outer_context;
181 splay_tree variables;
182 hash_set<tree> *privatized_types;
183 /* Iteration variables in an OMP_FOR. */
184 vec<tree> loop_iter_var;
185 location_t location;
186 enum omp_clause_default_kind default_kind;
187 enum omp_region_type region_type;
188 bool combined_loop;
189 bool distribute;
190 bool target_map_scalars_firstprivate;
191 bool target_map_pointers_as_0len_arrays;
192 bool target_firstprivatize_array_bases;
193 };
194
195 static struct gimplify_ctx *gimplify_ctxp;
196 static struct gimplify_omp_ctx *gimplify_omp_ctxp;
197
198 /* Forward declaration. */
199 static enum gimplify_status gimplify_compound_expr (tree *, gimple_seq *, bool);
200 static hash_map<tree, tree> *oacc_declare_returns;
201 static enum gimplify_status gimplify_expr (tree *, gimple_seq *, gimple_seq *,
202 bool (*) (tree), fallback_t, bool);
203
204 /* Shorter alias name for the above function for use in gimplify.c
205 only. */
206
207 static inline void
208 gimplify_seq_add_stmt (gimple_seq *seq_p, gimple *gs)
209 {
210 gimple_seq_add_stmt_without_update (seq_p, gs);
211 }
212
213 /* Append sequence SRC to the end of sequence *DST_P. If *DST_P is
214 NULL, a new sequence is allocated. This function is
215 similar to gimple_seq_add_seq, but does not scan the operands.
216 During gimplification, we need to manipulate statement sequences
217 before the def/use vectors have been constructed. */
218
219 static void
220 gimplify_seq_add_seq (gimple_seq *dst_p, gimple_seq src)
221 {
222 gimple_stmt_iterator si;
223
224 if (src == NULL)
225 return;
226
227 si = gsi_last (*dst_p);
228 gsi_insert_seq_after_without_update (&si, src, GSI_NEW_STMT);
229 }
230
231
232 /* Pointer to a list of allocated gimplify_ctx structs to be used for pushing
233 and popping gimplify contexts. */
234
235 static struct gimplify_ctx *ctx_pool = NULL;
236
237 /* Return a gimplify context struct from the pool. */
238
239 static inline struct gimplify_ctx *
240 ctx_alloc (void)
241 {
242 struct gimplify_ctx * c = ctx_pool;
243
244 if (c)
245 ctx_pool = c->prev_context;
246 else
247 c = XNEW (struct gimplify_ctx);
248
249 memset (c, '\0', sizeof (*c));
250 return c;
251 }
252
253 /* Put gimplify context C back into the pool. */
254
255 static inline void
256 ctx_free (struct gimplify_ctx *c)
257 {
258 c->prev_context = ctx_pool;
259 ctx_pool = c;
260 }
261
262 /* Free allocated ctx stack memory. */
263
264 void
265 free_gimplify_stack (void)
266 {
267 struct gimplify_ctx *c;
268
269 while ((c = ctx_pool))
270 {
271 ctx_pool = c->prev_context;
272 free (c);
273 }
274 }
275
276
277 /* Set up a context for the gimplifier. */
278
279 void
280 push_gimplify_context (bool in_ssa, bool rhs_cond_ok)
281 {
282 struct gimplify_ctx *c = ctx_alloc ();
283
284 c->prev_context = gimplify_ctxp;
285 gimplify_ctxp = c;
286 gimplify_ctxp->into_ssa = in_ssa;
287 gimplify_ctxp->allow_rhs_cond_expr = rhs_cond_ok;
288 }
289
290 /* Tear down a context for the gimplifier. If BODY is non-null, then
291 put the temporaries into the outer BIND_EXPR. Otherwise, put them
292 in the local_decls.
293
294 BODY is not a sequence, but the first tuple in a sequence. */
295
296 void
297 pop_gimplify_context (gimple *body)
298 {
299 struct gimplify_ctx *c = gimplify_ctxp;
300
301 gcc_assert (c
302 && (!c->bind_expr_stack.exists ()
303 || c->bind_expr_stack.is_empty ()));
304 c->bind_expr_stack.release ();
305 gimplify_ctxp = c->prev_context;
306
307 if (body)
308 declare_vars (c->temps, body, false);
309 else
310 record_vars (c->temps);
311
312 delete c->temp_htab;
313 c->temp_htab = NULL;
314 ctx_free (c);
315 }
316
317 /* Push a GIMPLE_BIND tuple onto the stack of bindings. */
318
319 static void
320 gimple_push_bind_expr (gbind *bind_stmt)
321 {
322 gimplify_ctxp->bind_expr_stack.reserve (8);
323 gimplify_ctxp->bind_expr_stack.safe_push (bind_stmt);
324 }
325
326 /* Pop the first element off the stack of bindings. */
327
328 static void
329 gimple_pop_bind_expr (void)
330 {
331 gimplify_ctxp->bind_expr_stack.pop ();
332 }
333
334 /* Return the first element of the stack of bindings. */
335
336 gbind *
337 gimple_current_bind_expr (void)
338 {
339 return gimplify_ctxp->bind_expr_stack.last ();
340 }
341
342 /* Return the stack of bindings created during gimplification. */
343
344 vec<gbind *>
345 gimple_bind_expr_stack (void)
346 {
347 return gimplify_ctxp->bind_expr_stack;
348 }
349
350 /* Return true iff there is a COND_EXPR between us and the innermost
351 CLEANUP_POINT_EXPR. This info is used by gimple_push_cleanup. */
352
353 static bool
354 gimple_conditional_context (void)
355 {
356 return gimplify_ctxp->conditions > 0;
357 }
358
359 /* Note that we've entered a COND_EXPR. */
360
361 static void
362 gimple_push_condition (void)
363 {
364 #ifdef ENABLE_GIMPLE_CHECKING
365 if (gimplify_ctxp->conditions == 0)
366 gcc_assert (gimple_seq_empty_p (gimplify_ctxp->conditional_cleanups));
367 #endif
368 ++(gimplify_ctxp->conditions);
369 }
370
371 /* Note that we've left a COND_EXPR. If we're back at unconditional scope
372 now, add any conditional cleanups we've seen to the prequeue. */
373
374 static void
375 gimple_pop_condition (gimple_seq *pre_p)
376 {
377 int conds = --(gimplify_ctxp->conditions);
378
379 gcc_assert (conds >= 0);
380 if (conds == 0)
381 {
382 gimplify_seq_add_seq (pre_p, gimplify_ctxp->conditional_cleanups);
383 gimplify_ctxp->conditional_cleanups = NULL;
384 }
385 }
386
387 /* A stable comparison routine for use with splay trees and DECLs. */
388
389 static int
390 splay_tree_compare_decl_uid (splay_tree_key xa, splay_tree_key xb)
391 {
392 tree a = (tree) xa;
393 tree b = (tree) xb;
394
395 return DECL_UID (a) - DECL_UID (b);
396 }
397
398 /* Create a new omp construct that deals with variable remapping. */
399
400 static struct gimplify_omp_ctx *
401 new_omp_context (enum omp_region_type region_type)
402 {
403 struct gimplify_omp_ctx *c;
404
405 c = XCNEW (struct gimplify_omp_ctx);
406 c->outer_context = gimplify_omp_ctxp;
407 c->variables = splay_tree_new (splay_tree_compare_decl_uid, 0, 0);
408 c->privatized_types = new hash_set<tree>;
409 c->location = input_location;
410 c->region_type = region_type;
411 if ((region_type & ORT_TASK) == 0)
412 c->default_kind = OMP_CLAUSE_DEFAULT_SHARED;
413 else
414 c->default_kind = OMP_CLAUSE_DEFAULT_UNSPECIFIED;
415
416 return c;
417 }
418
419 /* Destroy an omp construct that deals with variable remapping. */
420
421 static void
422 delete_omp_context (struct gimplify_omp_ctx *c)
423 {
424 splay_tree_delete (c->variables);
425 delete c->privatized_types;
426 c->loop_iter_var.release ();
427 XDELETE (c);
428 }
429
430 static void omp_add_variable (struct gimplify_omp_ctx *, tree, unsigned int);
431 static bool omp_notice_variable (struct gimplify_omp_ctx *, tree, bool);
432
433 /* Both gimplify the statement T and append it to *SEQ_P. This function
434 behaves exactly as gimplify_stmt, but you don't have to pass T as a
435 reference. */
436
437 void
438 gimplify_and_add (tree t, gimple_seq *seq_p)
439 {
440 gimplify_stmt (&t, seq_p);
441 }
442
443 /* Gimplify statement T into sequence *SEQ_P, and return the first
444 tuple in the sequence of generated tuples for this statement.
445 Return NULL if gimplifying T produced no tuples. */
446
447 static gimple *
448 gimplify_and_return_first (tree t, gimple_seq *seq_p)
449 {
450 gimple_stmt_iterator last = gsi_last (*seq_p);
451
452 gimplify_and_add (t, seq_p);
453
454 if (!gsi_end_p (last))
455 {
456 gsi_next (&last);
457 return gsi_stmt (last);
458 }
459 else
460 return gimple_seq_first_stmt (*seq_p);
461 }
462
463 /* Returns true iff T is a valid RHS for an assignment to an un-renamed
464 LHS, or for a call argument. */
465
466 static bool
467 is_gimple_mem_rhs (tree t)
468 {
469 /* If we're dealing with a renamable type, either source or dest must be
470 a renamed variable. */
471 if (is_gimple_reg_type (TREE_TYPE (t)))
472 return is_gimple_val (t);
473 else
474 return is_gimple_val (t) || is_gimple_lvalue (t);
475 }
476
477 /* Return true if T is a CALL_EXPR or an expression that can be
478 assigned to a temporary. Note that this predicate should only be
479 used during gimplification. See the rationale for this in
480 gimplify_modify_expr. */
481
482 static bool
483 is_gimple_reg_rhs_or_call (tree t)
484 {
485 return (get_gimple_rhs_class (TREE_CODE (t)) != GIMPLE_INVALID_RHS
486 || TREE_CODE (t) == CALL_EXPR);
487 }
488
489 /* Return true if T is a valid memory RHS or a CALL_EXPR. Note that
490 this predicate should only be used during gimplification. See the
491 rationale for this in gimplify_modify_expr. */
492
493 static bool
494 is_gimple_mem_rhs_or_call (tree t)
495 {
496 /* If we're dealing with a renamable type, either source or dest must be
497 a renamed variable. */
498 if (is_gimple_reg_type (TREE_TYPE (t)))
499 return is_gimple_val (t);
500 else
501 return (is_gimple_val (t)
502 || is_gimple_lvalue (t)
503 || TREE_CLOBBER_P (t)
504 || TREE_CODE (t) == CALL_EXPR);
505 }
506
507 /* Create a temporary with a name derived from VAL. Subroutine of
508 lookup_tmp_var; nobody else should call this function. */
509
510 static inline tree
511 create_tmp_from_val (tree val)
512 {
513 /* Drop all qualifiers and address-space information from the value type. */
514 tree type = TYPE_MAIN_VARIANT (TREE_TYPE (val));
515 tree var = create_tmp_var (type, get_name (val));
516 if (TREE_CODE (TREE_TYPE (var)) == COMPLEX_TYPE
517 || TREE_CODE (TREE_TYPE (var)) == VECTOR_TYPE)
518 DECL_GIMPLE_REG_P (var) = 1;
519 return var;
520 }
521
522 /* Create a temporary to hold the value of VAL. If IS_FORMAL, try to reuse
523 an existing expression temporary. */
524
525 static tree
526 lookup_tmp_var (tree val, bool is_formal)
527 {
528 tree ret;
529
530 /* If not optimizing, never really reuse a temporary. local-alloc
531 won't allocate any variable that is used in more than one basic
532 block, which means it will go into memory, causing much extra
533 work in reload and final and poorer code generation, outweighing
534 the extra memory allocation here. */
535 if (!optimize || !is_formal || TREE_SIDE_EFFECTS (val))
536 ret = create_tmp_from_val (val);
537 else
538 {
539 elt_t elt, *elt_p;
540 elt_t **slot;
541
542 elt.val = val;
543 if (!gimplify_ctxp->temp_htab)
544 gimplify_ctxp->temp_htab = new hash_table<gimplify_hasher> (1000);
545 slot = gimplify_ctxp->temp_htab->find_slot (&elt, INSERT);
546 if (*slot == NULL)
547 {
548 elt_p = XNEW (elt_t);
549 elt_p->val = val;
550 elt_p->temp = ret = create_tmp_from_val (val);
551 *slot = elt_p;
552 }
553 else
554 {
555 elt_p = *slot;
556 ret = elt_p->temp;
557 }
558 }
559
560 return ret;
561 }
562
563 /* Helper for get_formal_tmp_var and get_initialized_tmp_var. */
564
565 static tree
566 internal_get_tmp_var (tree val, gimple_seq *pre_p, gimple_seq *post_p,
567 bool is_formal, bool allow_ssa)
568 {
569 tree t, mod;
570
571 /* Notice that we explicitly allow VAL to be a CALL_EXPR so that we
572 can create an INIT_EXPR and convert it into a GIMPLE_CALL below. */
573 gimplify_expr (&val, pre_p, post_p, is_gimple_reg_rhs_or_call,
574 fb_rvalue);
575
576 if (allow_ssa
577 && gimplify_ctxp->into_ssa
578 && is_gimple_reg_type (TREE_TYPE (val)))
579 {
580 t = make_ssa_name (TYPE_MAIN_VARIANT (TREE_TYPE (val)));
581 if (! gimple_in_ssa_p (cfun))
582 {
583 const char *name = get_name (val);
584 if (name)
585 SET_SSA_NAME_VAR_OR_IDENTIFIER (t, create_tmp_var_name (name));
586 }
587 }
588 else
589 t = lookup_tmp_var (val, is_formal);
590
591 mod = build2 (INIT_EXPR, TREE_TYPE (t), t, unshare_expr (val));
592
593 SET_EXPR_LOCATION (mod, EXPR_LOC_OR_LOC (val, input_location));
594
595 /* gimplify_modify_expr might want to reduce this further. */
596 gimplify_and_add (mod, pre_p);
597 ggc_free (mod);
598
599 return t;
600 }
601
602 /* Return a formal temporary variable initialized with VAL. PRE_P is as
603 in gimplify_expr. Only use this function if:
604
605 1) The value of the unfactored expression represented by VAL will not
606 change between the initialization and use of the temporary, and
607 2) The temporary will not be otherwise modified.
608
609 For instance, #1 means that this is inappropriate for SAVE_EXPR temps,
610 and #2 means it is inappropriate for && temps.
611
612 For other cases, use get_initialized_tmp_var instead. */
613
614 tree
615 get_formal_tmp_var (tree val, gimple_seq *pre_p)
616 {
617 return internal_get_tmp_var (val, pre_p, NULL, true, true);
618 }
619
620 /* Return a temporary variable initialized with VAL. PRE_P and POST_P
621 are as in gimplify_expr. */
622
623 tree
624 get_initialized_tmp_var (tree val, gimple_seq *pre_p, gimple_seq *post_p,
625 bool allow_ssa)
626 {
627 return internal_get_tmp_var (val, pre_p, post_p, false, allow_ssa);
628 }
629
630 /* Declare all the variables in VARS in SCOPE. If DEBUG_INFO is true,
631 generate debug info for them; otherwise don't. */
632
633 void
634 declare_vars (tree vars, gimple *gs, bool debug_info)
635 {
636 tree last = vars;
637 if (last)
638 {
639 tree temps, block;
640
641 gbind *scope = as_a <gbind *> (gs);
642
643 temps = nreverse (last);
644
645 block = gimple_bind_block (scope);
646 gcc_assert (!block || TREE_CODE (block) == BLOCK);
647 if (!block || !debug_info)
648 {
649 DECL_CHAIN (last) = gimple_bind_vars (scope);
650 gimple_bind_set_vars (scope, temps);
651 }
652 else
653 {
654 /* We need to attach the nodes both to the BIND_EXPR and to its
655 associated BLOCK for debugging purposes. The key point here
656 is that the BLOCK_VARS of the BIND_EXPR_BLOCK of a BIND_EXPR
657 is a subchain of the BIND_EXPR_VARS of the BIND_EXPR. */
658 if (BLOCK_VARS (block))
659 BLOCK_VARS (block) = chainon (BLOCK_VARS (block), temps);
660 else
661 {
662 gimple_bind_set_vars (scope,
663 chainon (gimple_bind_vars (scope), temps));
664 BLOCK_VARS (block) = temps;
665 }
666 }
667 }
668 }
669
670 /* For VAR a VAR_DECL of variable size, try to find a constant upper bound
671 for the size and adjust DECL_SIZE/DECL_SIZE_UNIT accordingly. Abort if
672 no such upper bound can be obtained. */
673
674 static void
675 force_constant_size (tree var)
676 {
677 /* The only attempt we make is by querying the maximum size of objects
678 of the variable's type. */
679
680 HOST_WIDE_INT max_size;
681
682 gcc_assert (VAR_P (var));
683
684 max_size = max_int_size_in_bytes (TREE_TYPE (var));
685
686 gcc_assert (max_size >= 0);
687
688 DECL_SIZE_UNIT (var)
689 = build_int_cst (TREE_TYPE (DECL_SIZE_UNIT (var)), max_size);
690 DECL_SIZE (var)
691 = build_int_cst (TREE_TYPE (DECL_SIZE (var)), max_size * BITS_PER_UNIT);
692 }
693
694 /* Push the temporary variable TMP into the current binding. */
695
696 void
697 gimple_add_tmp_var_fn (struct function *fn, tree tmp)
698 {
699 gcc_assert (!DECL_CHAIN (tmp) && !DECL_SEEN_IN_BIND_EXPR_P (tmp));
700
701 /* Later processing assumes that the object size is constant, which might
702 not be true at this point. Force the use of a constant upper bound in
703 this case. */
704 if (!tree_fits_uhwi_p (DECL_SIZE_UNIT (tmp)))
705 force_constant_size (tmp);
706
707 DECL_CONTEXT (tmp) = fn->decl;
708 DECL_SEEN_IN_BIND_EXPR_P (tmp) = 1;
709
710 record_vars_into (tmp, fn->decl);
711 }
712
713 /* Push the temporary variable TMP into the current binding. */
714
715 void
716 gimple_add_tmp_var (tree tmp)
717 {
718 gcc_assert (!DECL_CHAIN (tmp) && !DECL_SEEN_IN_BIND_EXPR_P (tmp));
719
720 /* Later processing assumes that the object size is constant, which might
721 not be true at this point. Force the use of a constant upper bound in
722 this case. */
723 if (!tree_fits_uhwi_p (DECL_SIZE_UNIT (tmp)))
724 force_constant_size (tmp);
725
726 DECL_CONTEXT (tmp) = current_function_decl;
727 DECL_SEEN_IN_BIND_EXPR_P (tmp) = 1;
728
729 if (gimplify_ctxp)
730 {
731 DECL_CHAIN (tmp) = gimplify_ctxp->temps;
732 gimplify_ctxp->temps = tmp;
733
734 /* Mark temporaries local within the nearest enclosing parallel. */
735 if (gimplify_omp_ctxp)
736 {
737 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
738 while (ctx
739 && (ctx->region_type == ORT_WORKSHARE
740 || ctx->region_type == ORT_SIMD
741 || ctx->region_type == ORT_ACC))
742 ctx = ctx->outer_context;
743 if (ctx)
744 omp_add_variable (ctx, tmp, GOVD_LOCAL | GOVD_SEEN);
745 }
746 }
747 else if (cfun)
748 record_vars (tmp);
749 else
750 {
751 gimple_seq body_seq;
752
753 /* This case is for nested functions. We need to expose the locals
754 they create. */
755 body_seq = gimple_body (current_function_decl);
756 declare_vars (tmp, gimple_seq_first_stmt (body_seq), false);
757 }
758 }
759
760
761 \f
762 /* This page contains routines to unshare tree nodes, i.e. to duplicate tree
763 nodes that are referenced more than once in GENERIC functions. This is
764 necessary because gimplification (translation into GIMPLE) is performed
765 by modifying tree nodes in-place, so gimplication of a shared node in a
766 first context could generate an invalid GIMPLE form in a second context.
767
768 This is achieved with a simple mark/copy/unmark algorithm that walks the
769 GENERIC representation top-down, marks nodes with TREE_VISITED the first
770 time it encounters them, duplicates them if they already have TREE_VISITED
771 set, and finally removes the TREE_VISITED marks it has set.
772
773 The algorithm works only at the function level, i.e. it generates a GENERIC
774 representation of a function with no nodes shared within the function when
775 passed a GENERIC function (except for nodes that are allowed to be shared).
776
777 At the global level, it is also necessary to unshare tree nodes that are
778 referenced in more than one function, for the same aforementioned reason.
779 This requires some cooperation from the front-end. There are 2 strategies:
780
781 1. Manual unsharing. The front-end needs to call unshare_expr on every
782 expression that might end up being shared across functions.
783
784 2. Deep unsharing. This is an extension of regular unsharing. Instead
785 of calling unshare_expr on expressions that might be shared across
786 functions, the front-end pre-marks them with TREE_VISITED. This will
787 ensure that they are unshared on the first reference within functions
788 when the regular unsharing algorithm runs. The counterpart is that
789 this algorithm must look deeper than for manual unsharing, which is
790 specified by LANG_HOOKS_DEEP_UNSHARING.
791
792 If there are only few specific cases of node sharing across functions, it is
793 probably easier for a front-end to unshare the expressions manually. On the
794 contrary, if the expressions generated at the global level are as widespread
795 as expressions generated within functions, deep unsharing is very likely the
796 way to go. */
797
798 /* Similar to copy_tree_r but do not copy SAVE_EXPR or TARGET_EXPR nodes.
799 These nodes model computations that must be done once. If we were to
800 unshare something like SAVE_EXPR(i++), the gimplification process would
801 create wrong code. However, if DATA is non-null, it must hold a pointer
802 set that is used to unshare the subtrees of these nodes. */
803
804 static tree
805 mostly_copy_tree_r (tree *tp, int *walk_subtrees, void *data)
806 {
807 tree t = *tp;
808 enum tree_code code = TREE_CODE (t);
809
810 /* Do not copy SAVE_EXPR, TARGET_EXPR or BIND_EXPR nodes themselves, but
811 copy their subtrees if we can make sure to do it only once. */
812 if (code == SAVE_EXPR || code == TARGET_EXPR || code == BIND_EXPR)
813 {
814 if (data && !((hash_set<tree> *)data)->add (t))
815 ;
816 else
817 *walk_subtrees = 0;
818 }
819
820 /* Stop at types, decls, constants like copy_tree_r. */
821 else if (TREE_CODE_CLASS (code) == tcc_type
822 || TREE_CODE_CLASS (code) == tcc_declaration
823 || TREE_CODE_CLASS (code) == tcc_constant)
824 *walk_subtrees = 0;
825
826 /* Cope with the statement expression extension. */
827 else if (code == STATEMENT_LIST)
828 ;
829
830 /* Leave the bulk of the work to copy_tree_r itself. */
831 else
832 copy_tree_r (tp, walk_subtrees, NULL);
833
834 return NULL_TREE;
835 }
836
837 /* Callback for walk_tree to unshare most of the shared trees rooted at *TP.
838 If *TP has been visited already, then *TP is deeply copied by calling
839 mostly_copy_tree_r. DATA is passed to mostly_copy_tree_r unmodified. */
840
841 static tree
842 copy_if_shared_r (tree *tp, int *walk_subtrees, void *data)
843 {
844 tree t = *tp;
845 enum tree_code code = TREE_CODE (t);
846
847 /* Skip types, decls, and constants. But we do want to look at their
848 types and the bounds of types. Mark them as visited so we properly
849 unmark their subtrees on the unmark pass. If we've already seen them,
850 don't look down further. */
851 if (TREE_CODE_CLASS (code) == tcc_type
852 || TREE_CODE_CLASS (code) == tcc_declaration
853 || TREE_CODE_CLASS (code) == tcc_constant)
854 {
855 if (TREE_VISITED (t))
856 *walk_subtrees = 0;
857 else
858 TREE_VISITED (t) = 1;
859 }
860
861 /* If this node has been visited already, unshare it and don't look
862 any deeper. */
863 else if (TREE_VISITED (t))
864 {
865 walk_tree (tp, mostly_copy_tree_r, data, NULL);
866 *walk_subtrees = 0;
867 }
868
869 /* Otherwise, mark the node as visited and keep looking. */
870 else
871 TREE_VISITED (t) = 1;
872
873 return NULL_TREE;
874 }
875
876 /* Unshare most of the shared trees rooted at *TP. DATA is passed to the
877 copy_if_shared_r callback unmodified. */
878
879 static inline void
880 copy_if_shared (tree *tp, void *data)
881 {
882 walk_tree (tp, copy_if_shared_r, data, NULL);
883 }
884
885 /* Unshare all the trees in the body of FNDECL, as well as in the bodies of
886 any nested functions. */
887
888 static void
889 unshare_body (tree fndecl)
890 {
891 struct cgraph_node *cgn = cgraph_node::get (fndecl);
892 /* If the language requires deep unsharing, we need a pointer set to make
893 sure we don't repeatedly unshare subtrees of unshareable nodes. */
894 hash_set<tree> *visited
895 = lang_hooks.deep_unsharing ? new hash_set<tree> : NULL;
896
897 copy_if_shared (&DECL_SAVED_TREE (fndecl), visited);
898 copy_if_shared (&DECL_SIZE (DECL_RESULT (fndecl)), visited);
899 copy_if_shared (&DECL_SIZE_UNIT (DECL_RESULT (fndecl)), visited);
900
901 delete visited;
902
903 if (cgn)
904 for (cgn = cgn->nested; cgn; cgn = cgn->next_nested)
905 unshare_body (cgn->decl);
906 }
907
908 /* Callback for walk_tree to unmark the visited trees rooted at *TP.
909 Subtrees are walked until the first unvisited node is encountered. */
910
911 static tree
912 unmark_visited_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
913 {
914 tree t = *tp;
915
916 /* If this node has been visited, unmark it and keep looking. */
917 if (TREE_VISITED (t))
918 TREE_VISITED (t) = 0;
919
920 /* Otherwise, don't look any deeper. */
921 else
922 *walk_subtrees = 0;
923
924 return NULL_TREE;
925 }
926
927 /* Unmark the visited trees rooted at *TP. */
928
929 static inline void
930 unmark_visited (tree *tp)
931 {
932 walk_tree (tp, unmark_visited_r, NULL, NULL);
933 }
934
935 /* Likewise, but mark all trees as not visited. */
936
937 static void
938 unvisit_body (tree fndecl)
939 {
940 struct cgraph_node *cgn = cgraph_node::get (fndecl);
941
942 unmark_visited (&DECL_SAVED_TREE (fndecl));
943 unmark_visited (&DECL_SIZE (DECL_RESULT (fndecl)));
944 unmark_visited (&DECL_SIZE_UNIT (DECL_RESULT (fndecl)));
945
946 if (cgn)
947 for (cgn = cgn->nested; cgn; cgn = cgn->next_nested)
948 unvisit_body (cgn->decl);
949 }
950
951 /* Unconditionally make an unshared copy of EXPR. This is used when using
952 stored expressions which span multiple functions, such as BINFO_VTABLE,
953 as the normal unsharing process can't tell that they're shared. */
954
955 tree
956 unshare_expr (tree expr)
957 {
958 walk_tree (&expr, mostly_copy_tree_r, NULL, NULL);
959 return expr;
960 }
961
962 /* Worker for unshare_expr_without_location. */
963
964 static tree
965 prune_expr_location (tree *tp, int *walk_subtrees, void *)
966 {
967 if (EXPR_P (*tp))
968 SET_EXPR_LOCATION (*tp, UNKNOWN_LOCATION);
969 else
970 *walk_subtrees = 0;
971 return NULL_TREE;
972 }
973
974 /* Similar to unshare_expr but also prune all expression locations
975 from EXPR. */
976
977 tree
978 unshare_expr_without_location (tree expr)
979 {
980 walk_tree (&expr, mostly_copy_tree_r, NULL, NULL);
981 if (EXPR_P (expr))
982 walk_tree (&expr, prune_expr_location, NULL, NULL);
983 return expr;
984 }
985 \f
986 /* WRAPPER is a code such as BIND_EXPR or CLEANUP_POINT_EXPR which can both
987 contain statements and have a value. Assign its value to a temporary
988 and give it void_type_node. Return the temporary, or NULL_TREE if
989 WRAPPER was already void. */
990
991 tree
992 voidify_wrapper_expr (tree wrapper, tree temp)
993 {
994 tree type = TREE_TYPE (wrapper);
995 if (type && !VOID_TYPE_P (type))
996 {
997 tree *p;
998
999 /* Set p to point to the body of the wrapper. Loop until we find
1000 something that isn't a wrapper. */
1001 for (p = &wrapper; p && *p; )
1002 {
1003 switch (TREE_CODE (*p))
1004 {
1005 case BIND_EXPR:
1006 TREE_SIDE_EFFECTS (*p) = 1;
1007 TREE_TYPE (*p) = void_type_node;
1008 /* For a BIND_EXPR, the body is operand 1. */
1009 p = &BIND_EXPR_BODY (*p);
1010 break;
1011
1012 case CLEANUP_POINT_EXPR:
1013 case TRY_FINALLY_EXPR:
1014 case TRY_CATCH_EXPR:
1015 TREE_SIDE_EFFECTS (*p) = 1;
1016 TREE_TYPE (*p) = void_type_node;
1017 p = &TREE_OPERAND (*p, 0);
1018 break;
1019
1020 case STATEMENT_LIST:
1021 {
1022 tree_stmt_iterator i = tsi_last (*p);
1023 TREE_SIDE_EFFECTS (*p) = 1;
1024 TREE_TYPE (*p) = void_type_node;
1025 p = tsi_end_p (i) ? NULL : tsi_stmt_ptr (i);
1026 }
1027 break;
1028
1029 case COMPOUND_EXPR:
1030 /* Advance to the last statement. Set all container types to
1031 void. */
1032 for (; TREE_CODE (*p) == COMPOUND_EXPR; p = &TREE_OPERAND (*p, 1))
1033 {
1034 TREE_SIDE_EFFECTS (*p) = 1;
1035 TREE_TYPE (*p) = void_type_node;
1036 }
1037 break;
1038
1039 case TRANSACTION_EXPR:
1040 TREE_SIDE_EFFECTS (*p) = 1;
1041 TREE_TYPE (*p) = void_type_node;
1042 p = &TRANSACTION_EXPR_BODY (*p);
1043 break;
1044
1045 default:
1046 /* Assume that any tree upon which voidify_wrapper_expr is
1047 directly called is a wrapper, and that its body is op0. */
1048 if (p == &wrapper)
1049 {
1050 TREE_SIDE_EFFECTS (*p) = 1;
1051 TREE_TYPE (*p) = void_type_node;
1052 p = &TREE_OPERAND (*p, 0);
1053 break;
1054 }
1055 goto out;
1056 }
1057 }
1058
1059 out:
1060 if (p == NULL || IS_EMPTY_STMT (*p))
1061 temp = NULL_TREE;
1062 else if (temp)
1063 {
1064 /* The wrapper is on the RHS of an assignment that we're pushing
1065 down. */
1066 gcc_assert (TREE_CODE (temp) == INIT_EXPR
1067 || TREE_CODE (temp) == MODIFY_EXPR);
1068 TREE_OPERAND (temp, 1) = *p;
1069 *p = temp;
1070 }
1071 else
1072 {
1073 temp = create_tmp_var (type, "retval");
1074 *p = build2 (INIT_EXPR, type, temp, *p);
1075 }
1076
1077 return temp;
1078 }
1079
1080 return NULL_TREE;
1081 }
1082
1083 /* Prepare calls to builtins to SAVE and RESTORE the stack as well as
1084 a temporary through which they communicate. */
1085
1086 static void
1087 build_stack_save_restore (gcall **save, gcall **restore)
1088 {
1089 tree tmp_var;
1090
1091 *save = gimple_build_call (builtin_decl_implicit (BUILT_IN_STACK_SAVE), 0);
1092 tmp_var = create_tmp_var (ptr_type_node, "saved_stack");
1093 gimple_call_set_lhs (*save, tmp_var);
1094
1095 *restore
1096 = gimple_build_call (builtin_decl_implicit (BUILT_IN_STACK_RESTORE),
1097 1, tmp_var);
1098 }
1099
1100 /* Generate IFN_ASAN_MARK call that poisons shadow of a for DECL variable. */
1101
1102 static tree
1103 build_asan_poison_call_expr (tree decl)
1104 {
1105 /* Do not poison variables that have size equal to zero. */
1106 tree unit_size = DECL_SIZE_UNIT (decl);
1107 if (zerop (unit_size))
1108 return NULL_TREE;
1109
1110 tree base = build_fold_addr_expr (decl);
1111
1112 return build_call_expr_internal_loc (UNKNOWN_LOCATION, IFN_ASAN_MARK,
1113 void_type_node, 3,
1114 build_int_cst (integer_type_node,
1115 ASAN_MARK_POISON),
1116 base, unit_size);
1117 }
1118
1119 /* Generate IFN_ASAN_MARK call that would poison or unpoison, depending
1120 on POISON flag, shadow memory of a DECL variable. The call will be
1121 put on location identified by IT iterator, where BEFORE flag drives
1122 position where the stmt will be put. */
1123
1124 static void
1125 asan_poison_variable (tree decl, bool poison, gimple_stmt_iterator *it,
1126 bool before)
1127 {
1128 /* When within an OMP context, do not emit ASAN_MARK internal fns. */
1129 if (gimplify_omp_ctxp)
1130 return;
1131
1132 tree unit_size = DECL_SIZE_UNIT (decl);
1133 tree base = build_fold_addr_expr (decl);
1134
1135 /* Do not poison variables that have size equal to zero. */
1136 if (zerop (unit_size))
1137 return;
1138
1139 /* It's necessary to have all stack variables aligned to ASAN granularity
1140 bytes. */
1141 if (DECL_ALIGN_UNIT (decl) <= ASAN_SHADOW_GRANULARITY)
1142 SET_DECL_ALIGN (decl, BITS_PER_UNIT * ASAN_SHADOW_GRANULARITY);
1143
1144 HOST_WIDE_INT flags = poison ? ASAN_MARK_POISON : ASAN_MARK_UNPOISON;
1145
1146 gimple *g
1147 = gimple_build_call_internal (IFN_ASAN_MARK, 3,
1148 build_int_cst (integer_type_node, flags),
1149 base, unit_size);
1150
1151 if (before)
1152 gsi_insert_before (it, g, GSI_NEW_STMT);
1153 else
1154 gsi_insert_after (it, g, GSI_NEW_STMT);
1155 }
1156
1157 /* Generate IFN_ASAN_MARK internal call that depending on POISON flag
1158 either poisons or unpoisons a DECL. Created statement is appended
1159 to SEQ_P gimple sequence. */
1160
1161 static void
1162 asan_poison_variable (tree decl, bool poison, gimple_seq *seq_p)
1163 {
1164 gimple_stmt_iterator it = gsi_last (*seq_p);
1165 bool before = false;
1166
1167 if (gsi_end_p (it))
1168 before = true;
1169
1170 asan_poison_variable (decl, poison, &it, before);
1171 }
1172
1173 /* Sort pair of VAR_DECLs A and B by DECL_UID. */
1174
1175 static int
1176 sort_by_decl_uid (const void *a, const void *b)
1177 {
1178 const tree *t1 = (const tree *)a;
1179 const tree *t2 = (const tree *)b;
1180
1181 int uid1 = DECL_UID (*t1);
1182 int uid2 = DECL_UID (*t2);
1183
1184 if (uid1 < uid2)
1185 return -1;
1186 else if (uid1 > uid2)
1187 return 1;
1188 else
1189 return 0;
1190 }
1191
1192 /* Generate IFN_ASAN_MARK internal call for all VARIABLES
1193 depending on POISON flag. Created statement is appended
1194 to SEQ_P gimple sequence. */
1195
1196 static void
1197 asan_poison_variables (hash_set<tree> *variables, bool poison, gimple_seq *seq_p)
1198 {
1199 unsigned c = variables->elements ();
1200 if (c == 0)
1201 return;
1202
1203 auto_vec<tree> sorted_variables (c);
1204
1205 for (hash_set<tree>::iterator it = variables->begin ();
1206 it != variables->end (); ++it)
1207 sorted_variables.safe_push (*it);
1208
1209 sorted_variables.qsort (sort_by_decl_uid);
1210
1211 unsigned i;
1212 tree var;
1213 FOR_EACH_VEC_ELT (sorted_variables, i, var)
1214 {
1215 asan_poison_variable (var, poison, seq_p);
1216
1217 /* Add use_after_scope_memory attribute for the variable in order
1218 to prevent re-written into SSA. */
1219 if (!lookup_attribute (ASAN_USE_AFTER_SCOPE_ATTRIBUTE,
1220 DECL_ATTRIBUTES (var)))
1221 DECL_ATTRIBUTES (var)
1222 = tree_cons (get_identifier (ASAN_USE_AFTER_SCOPE_ATTRIBUTE),
1223 integer_one_node,
1224 DECL_ATTRIBUTES (var));
1225 }
1226 }
1227
1228 /* Gimplify a BIND_EXPR. Just voidify and recurse. */
1229
1230 static enum gimplify_status
1231 gimplify_bind_expr (tree *expr_p, gimple_seq *pre_p)
1232 {
1233 tree bind_expr = *expr_p;
1234 bool old_keep_stack = gimplify_ctxp->keep_stack;
1235 bool old_save_stack = gimplify_ctxp->save_stack;
1236 tree t;
1237 gbind *bind_stmt;
1238 gimple_seq body, cleanup;
1239 gcall *stack_save;
1240 location_t start_locus = 0, end_locus = 0;
1241 tree ret_clauses = NULL;
1242
1243 tree temp = voidify_wrapper_expr (bind_expr, NULL);
1244
1245 /* Mark variables seen in this bind expr. */
1246 for (t = BIND_EXPR_VARS (bind_expr); t ; t = DECL_CHAIN (t))
1247 {
1248 if (VAR_P (t))
1249 {
1250 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
1251
1252 /* Mark variable as local. */
1253 if (ctx && ctx->region_type != ORT_NONE && !DECL_EXTERNAL (t)
1254 && (! DECL_SEEN_IN_BIND_EXPR_P (t)
1255 || splay_tree_lookup (ctx->variables,
1256 (splay_tree_key) t) == NULL))
1257 {
1258 if (ctx->region_type == ORT_SIMD
1259 && TREE_ADDRESSABLE (t)
1260 && !TREE_STATIC (t))
1261 omp_add_variable (ctx, t, GOVD_PRIVATE | GOVD_SEEN);
1262 else
1263 omp_add_variable (ctx, t, GOVD_LOCAL | GOVD_SEEN);
1264 }
1265
1266 DECL_SEEN_IN_BIND_EXPR_P (t) = 1;
1267
1268 if (DECL_HARD_REGISTER (t) && !is_global_var (t) && cfun)
1269 cfun->has_local_explicit_reg_vars = true;
1270 }
1271
1272 /* Preliminarily mark non-addressed complex variables as eligible
1273 for promotion to gimple registers. We'll transform their uses
1274 as we find them. */
1275 if ((TREE_CODE (TREE_TYPE (t)) == COMPLEX_TYPE
1276 || TREE_CODE (TREE_TYPE (t)) == VECTOR_TYPE)
1277 && !TREE_THIS_VOLATILE (t)
1278 && (VAR_P (t) && !DECL_HARD_REGISTER (t))
1279 && !needs_to_live_in_memory (t))
1280 DECL_GIMPLE_REG_P (t) = 1;
1281 }
1282
1283 bind_stmt = gimple_build_bind (BIND_EXPR_VARS (bind_expr), NULL,
1284 BIND_EXPR_BLOCK (bind_expr));
1285 gimple_push_bind_expr (bind_stmt);
1286
1287 gimplify_ctxp->keep_stack = false;
1288 gimplify_ctxp->save_stack = false;
1289
1290 /* Gimplify the body into the GIMPLE_BIND tuple's body. */
1291 body = NULL;
1292 gimplify_stmt (&BIND_EXPR_BODY (bind_expr), &body);
1293 gimple_bind_set_body (bind_stmt, body);
1294
1295 /* Source location wise, the cleanup code (stack_restore and clobbers)
1296 belongs to the end of the block, so propagate what we have. The
1297 stack_save operation belongs to the beginning of block, which we can
1298 infer from the bind_expr directly if the block has no explicit
1299 assignment. */
1300 if (BIND_EXPR_BLOCK (bind_expr))
1301 {
1302 end_locus = BLOCK_SOURCE_END_LOCATION (BIND_EXPR_BLOCK (bind_expr));
1303 start_locus = BLOCK_SOURCE_LOCATION (BIND_EXPR_BLOCK (bind_expr));
1304 }
1305 if (start_locus == 0)
1306 start_locus = EXPR_LOCATION (bind_expr);
1307
1308 cleanup = NULL;
1309 stack_save = NULL;
1310
1311 /* If the code both contains VLAs and calls alloca, then we cannot reclaim
1312 the stack space allocated to the VLAs. */
1313 if (gimplify_ctxp->save_stack && !gimplify_ctxp->keep_stack)
1314 {
1315 gcall *stack_restore;
1316
1317 /* Save stack on entry and restore it on exit. Add a try_finally
1318 block to achieve this. */
1319 build_stack_save_restore (&stack_save, &stack_restore);
1320
1321 gimple_set_location (stack_save, start_locus);
1322 gimple_set_location (stack_restore, end_locus);
1323
1324 gimplify_seq_add_stmt (&cleanup, stack_restore);
1325 }
1326
1327 /* Add clobbers for all variables that go out of scope. */
1328 for (t = BIND_EXPR_VARS (bind_expr); t ; t = DECL_CHAIN (t))
1329 {
1330 if (VAR_P (t)
1331 && !is_global_var (t)
1332 && DECL_CONTEXT (t) == current_function_decl)
1333 {
1334 if (!DECL_HARD_REGISTER (t)
1335 && !TREE_THIS_VOLATILE (t)
1336 && !DECL_HAS_VALUE_EXPR_P (t)
1337 /* Only care for variables that have to be in memory. Others
1338 will be rewritten into SSA names, hence moved to the
1339 top-level. */
1340 && !is_gimple_reg (t)
1341 && flag_stack_reuse != SR_NONE)
1342 {
1343 tree clobber = build_constructor (TREE_TYPE (t), NULL);
1344 gimple *clobber_stmt;
1345 TREE_THIS_VOLATILE (clobber) = 1;
1346 clobber_stmt = gimple_build_assign (t, clobber);
1347 gimple_set_location (clobber_stmt, end_locus);
1348 gimplify_seq_add_stmt (&cleanup, clobber_stmt);
1349 }
1350
1351 if (flag_openacc && oacc_declare_returns != NULL)
1352 {
1353 tree *c = oacc_declare_returns->get (t);
1354 if (c != NULL)
1355 {
1356 if (ret_clauses)
1357 OMP_CLAUSE_CHAIN (*c) = ret_clauses;
1358
1359 ret_clauses = *c;
1360
1361 oacc_declare_returns->remove (t);
1362
1363 if (oacc_declare_returns->elements () == 0)
1364 {
1365 delete oacc_declare_returns;
1366 oacc_declare_returns = NULL;
1367 }
1368 }
1369 }
1370 }
1371
1372 if (asan_poisoned_variables != NULL
1373 && asan_poisoned_variables->contains (t))
1374 {
1375 asan_poisoned_variables->remove (t);
1376 asan_poison_variable (t, true, &cleanup);
1377 }
1378
1379 if (gimplify_ctxp->live_switch_vars != NULL
1380 && gimplify_ctxp->live_switch_vars->contains (t))
1381 gimplify_ctxp->live_switch_vars->remove (t);
1382 }
1383
1384 if (ret_clauses)
1385 {
1386 gomp_target *stmt;
1387 gimple_stmt_iterator si = gsi_start (cleanup);
1388
1389 stmt = gimple_build_omp_target (NULL, GF_OMP_TARGET_KIND_OACC_DECLARE,
1390 ret_clauses);
1391 gsi_insert_seq_before_without_update (&si, stmt, GSI_NEW_STMT);
1392 }
1393
1394 if (cleanup)
1395 {
1396 gtry *gs;
1397 gimple_seq new_body;
1398
1399 new_body = NULL;
1400 gs = gimple_build_try (gimple_bind_body (bind_stmt), cleanup,
1401 GIMPLE_TRY_FINALLY);
1402
1403 if (stack_save)
1404 gimplify_seq_add_stmt (&new_body, stack_save);
1405 gimplify_seq_add_stmt (&new_body, gs);
1406 gimple_bind_set_body (bind_stmt, new_body);
1407 }
1408
1409 /* keep_stack propagates all the way up to the outermost BIND_EXPR. */
1410 if (!gimplify_ctxp->keep_stack)
1411 gimplify_ctxp->keep_stack = old_keep_stack;
1412 gimplify_ctxp->save_stack = old_save_stack;
1413
1414 gimple_pop_bind_expr ();
1415
1416 gimplify_seq_add_stmt (pre_p, bind_stmt);
1417
1418 if (temp)
1419 {
1420 *expr_p = temp;
1421 return GS_OK;
1422 }
1423
1424 *expr_p = NULL_TREE;
1425 return GS_ALL_DONE;
1426 }
1427
1428 /* Maybe add early return predict statement to PRE_P sequence. */
1429
1430 static void
1431 maybe_add_early_return_predict_stmt (gimple_seq *pre_p)
1432 {
1433 /* If we are not in a conditional context, add PREDICT statement. */
1434 if (gimple_conditional_context ())
1435 {
1436 gimple *predict = gimple_build_predict (PRED_TREE_EARLY_RETURN,
1437 NOT_TAKEN);
1438 gimplify_seq_add_stmt (pre_p, predict);
1439 }
1440 }
1441
1442 /* Gimplify a RETURN_EXPR. If the expression to be returned is not a
1443 GIMPLE value, it is assigned to a new temporary and the statement is
1444 re-written to return the temporary.
1445
1446 PRE_P points to the sequence where side effects that must happen before
1447 STMT should be stored. */
1448
1449 static enum gimplify_status
1450 gimplify_return_expr (tree stmt, gimple_seq *pre_p)
1451 {
1452 greturn *ret;
1453 tree ret_expr = TREE_OPERAND (stmt, 0);
1454 tree result_decl, result;
1455
1456 if (ret_expr == error_mark_node)
1457 return GS_ERROR;
1458
1459 /* Implicit _Cilk_sync must be inserted right before any return statement
1460 if there is a _Cilk_spawn in the function. If the user has provided a
1461 _Cilk_sync, the optimizer should remove this duplicate one. */
1462 if (fn_contains_cilk_spawn_p (cfun))
1463 {
1464 tree impl_sync = build0 (CILK_SYNC_STMT, void_type_node);
1465 gimplify_and_add (impl_sync, pre_p);
1466 }
1467
1468 if (!ret_expr
1469 || TREE_CODE (ret_expr) == RESULT_DECL
1470 || ret_expr == error_mark_node)
1471 {
1472 maybe_add_early_return_predict_stmt (pre_p);
1473 greturn *ret = gimple_build_return (ret_expr);
1474 gimple_set_no_warning (ret, TREE_NO_WARNING (stmt));
1475 gimplify_seq_add_stmt (pre_p, ret);
1476 return GS_ALL_DONE;
1477 }
1478
1479 if (VOID_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl))))
1480 result_decl = NULL_TREE;
1481 else
1482 {
1483 result_decl = TREE_OPERAND (ret_expr, 0);
1484
1485 /* See through a return by reference. */
1486 if (TREE_CODE (result_decl) == INDIRECT_REF)
1487 result_decl = TREE_OPERAND (result_decl, 0);
1488
1489 gcc_assert ((TREE_CODE (ret_expr) == MODIFY_EXPR
1490 || TREE_CODE (ret_expr) == INIT_EXPR)
1491 && TREE_CODE (result_decl) == RESULT_DECL);
1492 }
1493
1494 /* If aggregate_value_p is true, then we can return the bare RESULT_DECL.
1495 Recall that aggregate_value_p is FALSE for any aggregate type that is
1496 returned in registers. If we're returning values in registers, then
1497 we don't want to extend the lifetime of the RESULT_DECL, particularly
1498 across another call. In addition, for those aggregates for which
1499 hard_function_value generates a PARALLEL, we'll die during normal
1500 expansion of structure assignments; there's special code in expand_return
1501 to handle this case that does not exist in expand_expr. */
1502 if (!result_decl)
1503 result = NULL_TREE;
1504 else if (aggregate_value_p (result_decl, TREE_TYPE (current_function_decl)))
1505 {
1506 if (TREE_CODE (DECL_SIZE (result_decl)) != INTEGER_CST)
1507 {
1508 if (!TYPE_SIZES_GIMPLIFIED (TREE_TYPE (result_decl)))
1509 gimplify_type_sizes (TREE_TYPE (result_decl), pre_p);
1510 /* Note that we don't use gimplify_vla_decl because the RESULT_DECL
1511 should be effectively allocated by the caller, i.e. all calls to
1512 this function must be subject to the Return Slot Optimization. */
1513 gimplify_one_sizepos (&DECL_SIZE (result_decl), pre_p);
1514 gimplify_one_sizepos (&DECL_SIZE_UNIT (result_decl), pre_p);
1515 }
1516 result = result_decl;
1517 }
1518 else if (gimplify_ctxp->return_temp)
1519 result = gimplify_ctxp->return_temp;
1520 else
1521 {
1522 result = create_tmp_reg (TREE_TYPE (result_decl));
1523
1524 /* ??? With complex control flow (usually involving abnormal edges),
1525 we can wind up warning about an uninitialized value for this. Due
1526 to how this variable is constructed and initialized, this is never
1527 true. Give up and never warn. */
1528 TREE_NO_WARNING (result) = 1;
1529
1530 gimplify_ctxp->return_temp = result;
1531 }
1532
1533 /* Smash the lhs of the MODIFY_EXPR to the temporary we plan to use.
1534 Then gimplify the whole thing. */
1535 if (result != result_decl)
1536 TREE_OPERAND (ret_expr, 0) = result;
1537
1538 gimplify_and_add (TREE_OPERAND (stmt, 0), pre_p);
1539
1540 maybe_add_early_return_predict_stmt (pre_p);
1541 ret = gimple_build_return (result);
1542 gimple_set_no_warning (ret, TREE_NO_WARNING (stmt));
1543 gimplify_seq_add_stmt (pre_p, ret);
1544
1545 return GS_ALL_DONE;
1546 }
1547
1548 /* Gimplify a variable-length array DECL. */
1549
1550 static void
1551 gimplify_vla_decl (tree decl, gimple_seq *seq_p)
1552 {
1553 /* This is a variable-sized decl. Simplify its size and mark it
1554 for deferred expansion. */
1555 tree t, addr, ptr_type;
1556
1557 gimplify_one_sizepos (&DECL_SIZE (decl), seq_p);
1558 gimplify_one_sizepos (&DECL_SIZE_UNIT (decl), seq_p);
1559
1560 /* Don't mess with a DECL_VALUE_EXPR set by the front-end. */
1561 if (DECL_HAS_VALUE_EXPR_P (decl))
1562 return;
1563
1564 /* All occurrences of this decl in final gimplified code will be
1565 replaced by indirection. Setting DECL_VALUE_EXPR does two
1566 things: First, it lets the rest of the gimplifier know what
1567 replacement to use. Second, it lets the debug info know
1568 where to find the value. */
1569 ptr_type = build_pointer_type (TREE_TYPE (decl));
1570 addr = create_tmp_var (ptr_type, get_name (decl));
1571 DECL_IGNORED_P (addr) = 0;
1572 t = build_fold_indirect_ref (addr);
1573 TREE_THIS_NOTRAP (t) = 1;
1574 SET_DECL_VALUE_EXPR (decl, t);
1575 DECL_HAS_VALUE_EXPR_P (decl) = 1;
1576
1577 t = build_alloca_call_expr (DECL_SIZE_UNIT (decl), DECL_ALIGN (decl),
1578 max_int_size_in_bytes (TREE_TYPE (decl)));
1579 /* The call has been built for a variable-sized object. */
1580 CALL_ALLOCA_FOR_VAR_P (t) = 1;
1581 t = fold_convert (ptr_type, t);
1582 t = build2 (MODIFY_EXPR, TREE_TYPE (addr), addr, t);
1583
1584 gimplify_and_add (t, seq_p);
1585 }
1586
1587 /* A helper function to be called via walk_tree. Mark all labels under *TP
1588 as being forced. To be called for DECL_INITIAL of static variables. */
1589
1590 static tree
1591 force_labels_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
1592 {
1593 if (TYPE_P (*tp))
1594 *walk_subtrees = 0;
1595 if (TREE_CODE (*tp) == LABEL_DECL)
1596 {
1597 FORCED_LABEL (*tp) = 1;
1598 cfun->has_forced_label_in_static = 1;
1599 }
1600
1601 return NULL_TREE;
1602 }
1603
1604 /* Gimplify a DECL_EXPR node *STMT_P by making any necessary allocation
1605 and initialization explicit. */
1606
1607 static enum gimplify_status
1608 gimplify_decl_expr (tree *stmt_p, gimple_seq *seq_p)
1609 {
1610 tree stmt = *stmt_p;
1611 tree decl = DECL_EXPR_DECL (stmt);
1612
1613 *stmt_p = NULL_TREE;
1614
1615 if (TREE_TYPE (decl) == error_mark_node)
1616 return GS_ERROR;
1617
1618 if ((TREE_CODE (decl) == TYPE_DECL
1619 || VAR_P (decl))
1620 && !TYPE_SIZES_GIMPLIFIED (TREE_TYPE (decl)))
1621 {
1622 gimplify_type_sizes (TREE_TYPE (decl), seq_p);
1623 if (TREE_CODE (TREE_TYPE (decl)) == REFERENCE_TYPE)
1624 gimplify_type_sizes (TREE_TYPE (TREE_TYPE (decl)), seq_p);
1625 }
1626
1627 /* ??? DECL_ORIGINAL_TYPE is streamed for LTO so it needs to be gimplified
1628 in case its size expressions contain problematic nodes like CALL_EXPR. */
1629 if (TREE_CODE (decl) == TYPE_DECL
1630 && DECL_ORIGINAL_TYPE (decl)
1631 && !TYPE_SIZES_GIMPLIFIED (DECL_ORIGINAL_TYPE (decl)))
1632 {
1633 gimplify_type_sizes (DECL_ORIGINAL_TYPE (decl), seq_p);
1634 if (TREE_CODE (DECL_ORIGINAL_TYPE (decl)) == REFERENCE_TYPE)
1635 gimplify_type_sizes (TREE_TYPE (DECL_ORIGINAL_TYPE (decl)), seq_p);
1636 }
1637
1638 if (VAR_P (decl) && !DECL_EXTERNAL (decl))
1639 {
1640 tree init = DECL_INITIAL (decl);
1641 bool is_vla = false;
1642
1643 if (TREE_CODE (DECL_SIZE_UNIT (decl)) != INTEGER_CST
1644 || (!TREE_STATIC (decl)
1645 && flag_stack_check == GENERIC_STACK_CHECK
1646 && compare_tree_int (DECL_SIZE_UNIT (decl),
1647 STACK_CHECK_MAX_VAR_SIZE) > 0))
1648 {
1649 gimplify_vla_decl (decl, seq_p);
1650 is_vla = true;
1651 }
1652
1653 if (asan_poisoned_variables
1654 && !is_vla
1655 && TREE_ADDRESSABLE (decl)
1656 && !TREE_STATIC (decl)
1657 && !DECL_HAS_VALUE_EXPR_P (decl)
1658 && DECL_ALIGN (decl) <= MAX_SUPPORTED_STACK_ALIGNMENT
1659 && dbg_cnt (asan_use_after_scope))
1660 {
1661 asan_poisoned_variables->add (decl);
1662 asan_poison_variable (decl, false, seq_p);
1663 if (!DECL_ARTIFICIAL (decl) && gimplify_ctxp->live_switch_vars)
1664 gimplify_ctxp->live_switch_vars->add (decl);
1665 }
1666
1667 /* Some front ends do not explicitly declare all anonymous
1668 artificial variables. We compensate here by declaring the
1669 variables, though it would be better if the front ends would
1670 explicitly declare them. */
1671 if (!DECL_SEEN_IN_BIND_EXPR_P (decl)
1672 && DECL_ARTIFICIAL (decl) && DECL_NAME (decl) == NULL_TREE)
1673 gimple_add_tmp_var (decl);
1674
1675 if (init && init != error_mark_node)
1676 {
1677 if (!TREE_STATIC (decl))
1678 {
1679 DECL_INITIAL (decl) = NULL_TREE;
1680 init = build2 (INIT_EXPR, void_type_node, decl, init);
1681 gimplify_and_add (init, seq_p);
1682 ggc_free (init);
1683 }
1684 else
1685 /* We must still examine initializers for static variables
1686 as they may contain a label address. */
1687 walk_tree (&init, force_labels_r, NULL, NULL);
1688 }
1689 }
1690
1691 return GS_ALL_DONE;
1692 }
1693
1694 /* Gimplify a LOOP_EXPR. Normally this just involves gimplifying the body
1695 and replacing the LOOP_EXPR with goto, but if the loop contains an
1696 EXIT_EXPR, we need to append a label for it to jump to. */
1697
1698 static enum gimplify_status
1699 gimplify_loop_expr (tree *expr_p, gimple_seq *pre_p)
1700 {
1701 tree saved_label = gimplify_ctxp->exit_label;
1702 tree start_label = create_artificial_label (UNKNOWN_LOCATION);
1703
1704 gimplify_seq_add_stmt (pre_p, gimple_build_label (start_label));
1705
1706 gimplify_ctxp->exit_label = NULL_TREE;
1707
1708 gimplify_and_add (LOOP_EXPR_BODY (*expr_p), pre_p);
1709
1710 gimplify_seq_add_stmt (pre_p, gimple_build_goto (start_label));
1711
1712 if (gimplify_ctxp->exit_label)
1713 gimplify_seq_add_stmt (pre_p,
1714 gimple_build_label (gimplify_ctxp->exit_label));
1715
1716 gimplify_ctxp->exit_label = saved_label;
1717
1718 *expr_p = NULL;
1719 return GS_ALL_DONE;
1720 }
1721
1722 /* Gimplify a statement list onto a sequence. These may be created either
1723 by an enlightened front-end, or by shortcut_cond_expr. */
1724
1725 static enum gimplify_status
1726 gimplify_statement_list (tree *expr_p, gimple_seq *pre_p)
1727 {
1728 tree temp = voidify_wrapper_expr (*expr_p, NULL);
1729
1730 tree_stmt_iterator i = tsi_start (*expr_p);
1731
1732 while (!tsi_end_p (i))
1733 {
1734 gimplify_stmt (tsi_stmt_ptr (i), pre_p);
1735 tsi_delink (&i);
1736 }
1737
1738 if (temp)
1739 {
1740 *expr_p = temp;
1741 return GS_OK;
1742 }
1743
1744 return GS_ALL_DONE;
1745 }
1746
1747 /* Callback for walk_gimple_seq. */
1748
1749 static tree
1750 warn_switch_unreachable_r (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
1751 struct walk_stmt_info *wi)
1752 {
1753 gimple *stmt = gsi_stmt (*gsi_p);
1754
1755 *handled_ops_p = true;
1756 switch (gimple_code (stmt))
1757 {
1758 case GIMPLE_TRY:
1759 /* A compiler-generated cleanup or a user-written try block.
1760 If it's empty, don't dive into it--that would result in
1761 worse location info. */
1762 if (gimple_try_eval (stmt) == NULL)
1763 {
1764 wi->info = stmt;
1765 return integer_zero_node;
1766 }
1767 /* Fall through. */
1768 case GIMPLE_BIND:
1769 case GIMPLE_CATCH:
1770 case GIMPLE_EH_FILTER:
1771 case GIMPLE_TRANSACTION:
1772 /* Walk the sub-statements. */
1773 *handled_ops_p = false;
1774 break;
1775 case GIMPLE_CALL:
1776 if (gimple_call_internal_p (stmt, IFN_ASAN_MARK))
1777 {
1778 *handled_ops_p = false;
1779 break;
1780 }
1781 /* Fall through. */
1782 default:
1783 /* Save the first "real" statement (not a decl/lexical scope/...). */
1784 wi->info = stmt;
1785 return integer_zero_node;
1786 }
1787 return NULL_TREE;
1788 }
1789
1790 /* Possibly warn about unreachable statements between switch's controlling
1791 expression and the first case. SEQ is the body of a switch expression. */
1792
1793 static void
1794 maybe_warn_switch_unreachable (gimple_seq seq)
1795 {
1796 if (!warn_switch_unreachable
1797 /* This warning doesn't play well with Fortran when optimizations
1798 are on. */
1799 || lang_GNU_Fortran ()
1800 || seq == NULL)
1801 return;
1802
1803 struct walk_stmt_info wi;
1804 memset (&wi, 0, sizeof (wi));
1805 walk_gimple_seq (seq, warn_switch_unreachable_r, NULL, &wi);
1806 gimple *stmt = (gimple *) wi.info;
1807
1808 if (stmt && gimple_code (stmt) != GIMPLE_LABEL)
1809 {
1810 if (gimple_code (stmt) == GIMPLE_GOTO
1811 && TREE_CODE (gimple_goto_dest (stmt)) == LABEL_DECL
1812 && DECL_ARTIFICIAL (gimple_goto_dest (stmt)))
1813 /* Don't warn for compiler-generated gotos. These occur
1814 in Duff's devices, for example. */;
1815 else
1816 warning_at (gimple_location (stmt), OPT_Wswitch_unreachable,
1817 "statement will never be executed");
1818 }
1819 }
1820
1821
1822 /* A label entry that pairs label and a location. */
1823 struct label_entry
1824 {
1825 tree label;
1826 location_t loc;
1827 };
1828
1829 /* Find LABEL in vector of label entries VEC. */
1830
1831 static struct label_entry *
1832 find_label_entry (const auto_vec<struct label_entry> *vec, tree label)
1833 {
1834 unsigned int i;
1835 struct label_entry *l;
1836
1837 FOR_EACH_VEC_ELT (*vec, i, l)
1838 if (l->label == label)
1839 return l;
1840 return NULL;
1841 }
1842
1843 /* Return true if LABEL, a LABEL_DECL, represents a case label
1844 in a vector of labels CASES. */
1845
1846 static bool
1847 case_label_p (const vec<tree> *cases, tree label)
1848 {
1849 unsigned int i;
1850 tree l;
1851
1852 FOR_EACH_VEC_ELT (*cases, i, l)
1853 if (CASE_LABEL (l) == label)
1854 return true;
1855 return false;
1856 }
1857
1858 /* Find the last statement in a scope STMT. */
1859
1860 static gimple *
1861 last_stmt_in_scope (gimple *stmt)
1862 {
1863 if (!stmt)
1864 return NULL;
1865
1866 switch (gimple_code (stmt))
1867 {
1868 case GIMPLE_BIND:
1869 {
1870 gbind *bind = as_a <gbind *> (stmt);
1871 stmt = gimple_seq_last_stmt (gimple_bind_body (bind));
1872 return last_stmt_in_scope (stmt);
1873 }
1874
1875 case GIMPLE_TRY:
1876 {
1877 gtry *try_stmt = as_a <gtry *> (stmt);
1878 stmt = gimple_seq_last_stmt (gimple_try_eval (try_stmt));
1879 gimple *last_eval = last_stmt_in_scope (stmt);
1880 if (gimple_stmt_may_fallthru (last_eval)
1881 && (last_eval == NULL
1882 || !gimple_call_internal_p (last_eval, IFN_FALLTHROUGH))
1883 && gimple_try_kind (try_stmt) == GIMPLE_TRY_FINALLY)
1884 {
1885 stmt = gimple_seq_last_stmt (gimple_try_cleanup (try_stmt));
1886 return last_stmt_in_scope (stmt);
1887 }
1888 else
1889 return last_eval;
1890 }
1891
1892 default:
1893 return stmt;
1894 }
1895 }
1896
1897 /* Collect interesting labels in LABELS and return the statement preceding
1898 another case label, or a user-defined label. */
1899
1900 static gimple *
1901 collect_fallthrough_labels (gimple_stmt_iterator *gsi_p,
1902 auto_vec <struct label_entry> *labels)
1903 {
1904 gimple *prev = NULL;
1905
1906 do
1907 {
1908 if (gimple_code (gsi_stmt (*gsi_p)) == GIMPLE_BIND
1909 || gimple_code (gsi_stmt (*gsi_p)) == GIMPLE_TRY)
1910 {
1911 /* Nested scope. Only look at the last statement of
1912 the innermost scope. */
1913 location_t bind_loc = gimple_location (gsi_stmt (*gsi_p));
1914 gimple *last = last_stmt_in_scope (gsi_stmt (*gsi_p));
1915 if (last)
1916 {
1917 prev = last;
1918 /* It might be a label without a location. Use the
1919 location of the scope then. */
1920 if (!gimple_has_location (prev))
1921 gimple_set_location (prev, bind_loc);
1922 }
1923 gsi_next (gsi_p);
1924 continue;
1925 }
1926
1927 /* Ifs are tricky. */
1928 if (gimple_code (gsi_stmt (*gsi_p)) == GIMPLE_COND)
1929 {
1930 gcond *cond_stmt = as_a <gcond *> (gsi_stmt (*gsi_p));
1931 tree false_lab = gimple_cond_false_label (cond_stmt);
1932 location_t if_loc = gimple_location (cond_stmt);
1933
1934 /* If we have e.g.
1935 if (i > 1) goto <D.2259>; else goto D;
1936 we can't do much with the else-branch. */
1937 if (!DECL_ARTIFICIAL (false_lab))
1938 break;
1939
1940 /* Go on until the false label, then one step back. */
1941 for (; !gsi_end_p (*gsi_p); gsi_next (gsi_p))
1942 {
1943 gimple *stmt = gsi_stmt (*gsi_p);
1944 if (gimple_code (stmt) == GIMPLE_LABEL
1945 && gimple_label_label (as_a <glabel *> (stmt)) == false_lab)
1946 break;
1947 }
1948
1949 /* Not found? Oops. */
1950 if (gsi_end_p (*gsi_p))
1951 break;
1952
1953 struct label_entry l = { false_lab, if_loc };
1954 labels->safe_push (l);
1955
1956 /* Go to the last statement of the then branch. */
1957 gsi_prev (gsi_p);
1958
1959 /* if (i != 0) goto <D.1759>; else goto <D.1760>;
1960 <D.1759>:
1961 <stmt>;
1962 goto <D.1761>;
1963 <D.1760>:
1964 */
1965 if (gimple_code (gsi_stmt (*gsi_p)) == GIMPLE_GOTO
1966 && !gimple_has_location (gsi_stmt (*gsi_p)))
1967 {
1968 /* Look at the statement before, it might be
1969 attribute fallthrough, in which case don't warn. */
1970 gsi_prev (gsi_p);
1971 bool fallthru_before_dest
1972 = gimple_call_internal_p (gsi_stmt (*gsi_p), IFN_FALLTHROUGH);
1973 gsi_next (gsi_p);
1974 tree goto_dest = gimple_goto_dest (gsi_stmt (*gsi_p));
1975 if (!fallthru_before_dest)
1976 {
1977 struct label_entry l = { goto_dest, if_loc };
1978 labels->safe_push (l);
1979 }
1980 }
1981 /* And move back. */
1982 gsi_next (gsi_p);
1983 }
1984
1985 /* Remember the last statement. Skip labels that are of no interest
1986 to us. */
1987 if (gimple_code (gsi_stmt (*gsi_p)) == GIMPLE_LABEL)
1988 {
1989 tree label = gimple_label_label (as_a <glabel *> (gsi_stmt (*gsi_p)));
1990 if (find_label_entry (labels, label))
1991 prev = gsi_stmt (*gsi_p);
1992 }
1993 else if (gimple_call_internal_p (gsi_stmt (*gsi_p), IFN_ASAN_MARK))
1994 ;
1995 else
1996 prev = gsi_stmt (*gsi_p);
1997 gsi_next (gsi_p);
1998 }
1999 while (!gsi_end_p (*gsi_p)
2000 /* Stop if we find a case or a user-defined label. */
2001 && (gimple_code (gsi_stmt (*gsi_p)) != GIMPLE_LABEL
2002 || !gimple_has_location (gsi_stmt (*gsi_p))));
2003
2004 return prev;
2005 }
2006
2007 /* Return true if the switch fallthough warning should occur. LABEL is
2008 the label statement that we're falling through to. */
2009
2010 static bool
2011 should_warn_for_implicit_fallthrough (gimple_stmt_iterator *gsi_p, tree label)
2012 {
2013 gimple_stmt_iterator gsi = *gsi_p;
2014
2015 /* Don't warn if the label is marked with a "falls through" comment. */
2016 if (FALLTHROUGH_LABEL_P (label))
2017 return false;
2018
2019 /* Don't warn for non-case labels followed by a statement:
2020 case 0:
2021 foo ();
2022 label:
2023 bar ();
2024 as these are likely intentional. */
2025 if (!case_label_p (&gimplify_ctxp->case_labels, label))
2026 {
2027 tree l;
2028 while (!gsi_end_p (gsi)
2029 && gimple_code (gsi_stmt (gsi)) == GIMPLE_LABEL
2030 && (l = gimple_label_label (as_a <glabel *> (gsi_stmt (gsi))))
2031 && !case_label_p (&gimplify_ctxp->case_labels, l))
2032 gsi_next (&gsi);
2033 if (gsi_end_p (gsi) || gimple_code (gsi_stmt (gsi)) != GIMPLE_LABEL)
2034 return false;
2035 }
2036
2037 /* Don't warn for terminated branches, i.e. when the subsequent case labels
2038 immediately breaks. */
2039 gsi = *gsi_p;
2040
2041 /* Skip all immediately following labels. */
2042 while (!gsi_end_p (gsi)
2043 && (gimple_code (gsi_stmt (gsi)) == GIMPLE_LABEL
2044 || gimple_code (gsi_stmt (gsi)) == GIMPLE_PREDICT))
2045 gsi_next (&gsi);
2046
2047 /* { ... something; default:; } */
2048 if (gsi_end_p (gsi)
2049 /* { ... something; default: break; } or
2050 { ... something; default: goto L; } */
2051 || gimple_code (gsi_stmt (gsi)) == GIMPLE_GOTO
2052 /* { ... something; default: return; } */
2053 || gimple_code (gsi_stmt (gsi)) == GIMPLE_RETURN)
2054 return false;
2055
2056 return true;
2057 }
2058
2059 /* Callback for walk_gimple_seq. */
2060
2061 static tree
2062 warn_implicit_fallthrough_r (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
2063 struct walk_stmt_info *)
2064 {
2065 gimple *stmt = gsi_stmt (*gsi_p);
2066
2067 *handled_ops_p = true;
2068 switch (gimple_code (stmt))
2069 {
2070 case GIMPLE_TRY:
2071 case GIMPLE_BIND:
2072 case GIMPLE_CATCH:
2073 case GIMPLE_EH_FILTER:
2074 case GIMPLE_TRANSACTION:
2075 /* Walk the sub-statements. */
2076 *handled_ops_p = false;
2077 break;
2078
2079 /* Find a sequence of form:
2080
2081 GIMPLE_LABEL
2082 [...]
2083 <may fallthru stmt>
2084 GIMPLE_LABEL
2085
2086 and possibly warn. */
2087 case GIMPLE_LABEL:
2088 {
2089 /* Found a label. Skip all immediately following labels. */
2090 while (!gsi_end_p (*gsi_p)
2091 && gimple_code (gsi_stmt (*gsi_p)) == GIMPLE_LABEL)
2092 gsi_next (gsi_p);
2093
2094 /* There might be no more statements. */
2095 if (gsi_end_p (*gsi_p))
2096 return integer_zero_node;
2097
2098 /* Vector of labels that fall through. */
2099 auto_vec <struct label_entry> labels;
2100 gimple *prev = collect_fallthrough_labels (gsi_p, &labels);
2101
2102 /* There might be no more statements. */
2103 if (gsi_end_p (*gsi_p))
2104 return integer_zero_node;
2105
2106 gimple *next = gsi_stmt (*gsi_p);
2107 tree label;
2108 /* If what follows is a label, then we may have a fallthrough. */
2109 if (gimple_code (next) == GIMPLE_LABEL
2110 && gimple_has_location (next)
2111 && (label = gimple_label_label (as_a <glabel *> (next)))
2112 && prev != NULL)
2113 {
2114 struct label_entry *l;
2115 bool warned_p = false;
2116 if (!should_warn_for_implicit_fallthrough (gsi_p, label))
2117 /* Quiet. */;
2118 else if (gimple_code (prev) == GIMPLE_LABEL
2119 && (label = gimple_label_label (as_a <glabel *> (prev)))
2120 && (l = find_label_entry (&labels, label)))
2121 warned_p = warning_at (l->loc, OPT_Wimplicit_fallthrough_,
2122 "this statement may fall through");
2123 else if (!gimple_call_internal_p (prev, IFN_FALLTHROUGH)
2124 /* Try to be clever and don't warn when the statement
2125 can't actually fall through. */
2126 && gimple_stmt_may_fallthru (prev)
2127 && gimple_has_location (prev))
2128 warned_p = warning_at (gimple_location (prev),
2129 OPT_Wimplicit_fallthrough_,
2130 "this statement may fall through");
2131 if (warned_p)
2132 inform (gimple_location (next), "here");
2133
2134 /* Mark this label as processed so as to prevent multiple
2135 warnings in nested switches. */
2136 FALLTHROUGH_LABEL_P (label) = true;
2137
2138 /* So that next warn_implicit_fallthrough_r will start looking for
2139 a new sequence starting with this label. */
2140 gsi_prev (gsi_p);
2141 }
2142 }
2143 break;
2144 default:
2145 break;
2146 }
2147 return NULL_TREE;
2148 }
2149
2150 /* Warn when a switch case falls through. */
2151
2152 static void
2153 maybe_warn_implicit_fallthrough (gimple_seq seq)
2154 {
2155 if (!warn_implicit_fallthrough)
2156 return;
2157
2158 /* This warning is meant for C/C++/ObjC/ObjC++ only. */
2159 if (!(lang_GNU_C ()
2160 || lang_GNU_CXX ()
2161 || lang_GNU_OBJC ()))
2162 return;
2163
2164 struct walk_stmt_info wi;
2165 memset (&wi, 0, sizeof (wi));
2166 walk_gimple_seq (seq, warn_implicit_fallthrough_r, NULL, &wi);
2167 }
2168
2169 /* Callback for walk_gimple_seq. */
2170
2171 static tree
2172 expand_FALLTHROUGH_r (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
2173 struct walk_stmt_info *)
2174 {
2175 gimple *stmt = gsi_stmt (*gsi_p);
2176
2177 *handled_ops_p = true;
2178 switch (gimple_code (stmt))
2179 {
2180 case GIMPLE_TRY:
2181 case GIMPLE_BIND:
2182 case GIMPLE_CATCH:
2183 case GIMPLE_EH_FILTER:
2184 case GIMPLE_TRANSACTION:
2185 /* Walk the sub-statements. */
2186 *handled_ops_p = false;
2187 break;
2188 case GIMPLE_CALL:
2189 if (gimple_call_internal_p (stmt, IFN_FALLTHROUGH))
2190 {
2191 gsi_remove (gsi_p, true);
2192 if (gsi_end_p (*gsi_p))
2193 return integer_zero_node;
2194
2195 bool found = false;
2196 location_t loc = gimple_location (stmt);
2197
2198 gimple_stmt_iterator gsi2 = *gsi_p;
2199 stmt = gsi_stmt (gsi2);
2200 if (gimple_code (stmt) == GIMPLE_GOTO && !gimple_has_location (stmt))
2201 {
2202 /* Go on until the artificial label. */
2203 tree goto_dest = gimple_goto_dest (stmt);
2204 for (; !gsi_end_p (gsi2); gsi_next (&gsi2))
2205 {
2206 if (gimple_code (gsi_stmt (gsi2)) == GIMPLE_LABEL
2207 && gimple_label_label (as_a <glabel *> (gsi_stmt (gsi2)))
2208 == goto_dest)
2209 break;
2210 }
2211
2212 /* Not found? Stop. */
2213 if (gsi_end_p (gsi2))
2214 break;
2215
2216 /* Look one past it. */
2217 gsi_next (&gsi2);
2218 }
2219
2220 /* We're looking for a case label or default label here. */
2221 while (!gsi_end_p (gsi2))
2222 {
2223 stmt = gsi_stmt (gsi2);
2224 if (gimple_code (stmt) == GIMPLE_LABEL)
2225 {
2226 tree label = gimple_label_label (as_a <glabel *> (stmt));
2227 if (gimple_has_location (stmt) && DECL_ARTIFICIAL (label))
2228 {
2229 found = true;
2230 break;
2231 }
2232 }
2233 else
2234 /* Something other than a label. That's not expected. */
2235 break;
2236 gsi_next (&gsi2);
2237 }
2238 if (!found)
2239 warning_at (loc, 0, "attribute %<fallthrough%> not preceding "
2240 "a case label or default label");
2241 }
2242 break;
2243 default:
2244 break;
2245 }
2246 return NULL_TREE;
2247 }
2248
2249 /* Expand all FALLTHROUGH () calls in SEQ. */
2250
2251 static void
2252 expand_FALLTHROUGH (gimple_seq *seq_p)
2253 {
2254 struct walk_stmt_info wi;
2255 memset (&wi, 0, sizeof (wi));
2256 walk_gimple_seq_mod (seq_p, expand_FALLTHROUGH_r, NULL, &wi);
2257 }
2258
2259 \f
2260 /* Gimplify a SWITCH_EXPR, and collect the vector of labels it can
2261 branch to. */
2262
2263 static enum gimplify_status
2264 gimplify_switch_expr (tree *expr_p, gimple_seq *pre_p)
2265 {
2266 tree switch_expr = *expr_p;
2267 gimple_seq switch_body_seq = NULL;
2268 enum gimplify_status ret;
2269 tree index_type = TREE_TYPE (switch_expr);
2270 if (index_type == NULL_TREE)
2271 index_type = TREE_TYPE (SWITCH_COND (switch_expr));
2272
2273 ret = gimplify_expr (&SWITCH_COND (switch_expr), pre_p, NULL, is_gimple_val,
2274 fb_rvalue);
2275 if (ret == GS_ERROR || ret == GS_UNHANDLED)
2276 return ret;
2277
2278 if (SWITCH_BODY (switch_expr))
2279 {
2280 vec<tree> labels;
2281 vec<tree> saved_labels;
2282 hash_set<tree> *saved_live_switch_vars = NULL;
2283 tree default_case = NULL_TREE;
2284 gswitch *switch_stmt;
2285
2286 /* If someone can be bothered to fill in the labels, they can
2287 be bothered to null out the body too. */
2288 gcc_assert (!SWITCH_LABELS (switch_expr));
2289
2290 /* Save old labels, get new ones from body, then restore the old
2291 labels. Save all the things from the switch body to append after. */
2292 saved_labels = gimplify_ctxp->case_labels;
2293 gimplify_ctxp->case_labels.create (8);
2294
2295 /* Do not create live_switch_vars if SWITCH_BODY is not a BIND_EXPR. */
2296 saved_live_switch_vars = gimplify_ctxp->live_switch_vars;
2297 tree_code body_type = TREE_CODE (SWITCH_BODY (switch_expr));
2298 if (body_type == BIND_EXPR || body_type == STATEMENT_LIST)
2299 gimplify_ctxp->live_switch_vars = new hash_set<tree> (4);
2300 else
2301 gimplify_ctxp->live_switch_vars = NULL;
2302
2303 bool old_in_switch_expr = gimplify_ctxp->in_switch_expr;
2304 gimplify_ctxp->in_switch_expr = true;
2305
2306 gimplify_stmt (&SWITCH_BODY (switch_expr), &switch_body_seq);
2307
2308 gimplify_ctxp->in_switch_expr = old_in_switch_expr;
2309 maybe_warn_switch_unreachable (switch_body_seq);
2310 maybe_warn_implicit_fallthrough (switch_body_seq);
2311 /* Only do this for the outermost GIMPLE_SWITCH. */
2312 if (!gimplify_ctxp->in_switch_expr)
2313 expand_FALLTHROUGH (&switch_body_seq);
2314
2315 labels = gimplify_ctxp->case_labels;
2316 gimplify_ctxp->case_labels = saved_labels;
2317
2318 if (gimplify_ctxp->live_switch_vars)
2319 {
2320 gcc_assert (gimplify_ctxp->live_switch_vars->elements () == 0);
2321 delete gimplify_ctxp->live_switch_vars;
2322 }
2323 gimplify_ctxp->live_switch_vars = saved_live_switch_vars;
2324
2325 preprocess_case_label_vec_for_gimple (labels, index_type,
2326 &default_case);
2327
2328 if (!default_case)
2329 {
2330 glabel *new_default;
2331
2332 default_case
2333 = build_case_label (NULL_TREE, NULL_TREE,
2334 create_artificial_label (UNKNOWN_LOCATION));
2335 new_default = gimple_build_label (CASE_LABEL (default_case));
2336 gimplify_seq_add_stmt (&switch_body_seq, new_default);
2337 }
2338
2339 switch_stmt = gimple_build_switch (SWITCH_COND (switch_expr),
2340 default_case, labels);
2341 gimplify_seq_add_stmt (pre_p, switch_stmt);
2342 gimplify_seq_add_seq (pre_p, switch_body_seq);
2343 labels.release ();
2344 }
2345 else
2346 gcc_assert (SWITCH_LABELS (switch_expr));
2347
2348 return GS_ALL_DONE;
2349 }
2350
2351 /* Gimplify the LABEL_EXPR pointed to by EXPR_P. */
2352
2353 static enum gimplify_status
2354 gimplify_label_expr (tree *expr_p, gimple_seq *pre_p)
2355 {
2356 gcc_assert (decl_function_context (LABEL_EXPR_LABEL (*expr_p))
2357 == current_function_decl);
2358
2359 tree label = LABEL_EXPR_LABEL (*expr_p);
2360 glabel *label_stmt = gimple_build_label (label);
2361 gimple_set_location (label_stmt, EXPR_LOCATION (*expr_p));
2362 gimplify_seq_add_stmt (pre_p, label_stmt);
2363
2364 if (lookup_attribute ("cold", DECL_ATTRIBUTES (label)))
2365 gimple_seq_add_stmt (pre_p, gimple_build_predict (PRED_COLD_LABEL,
2366 NOT_TAKEN));
2367 else if (lookup_attribute ("hot", DECL_ATTRIBUTES (label)))
2368 gimple_seq_add_stmt (pre_p, gimple_build_predict (PRED_HOT_LABEL,
2369 TAKEN));
2370
2371 return GS_ALL_DONE;
2372 }
2373
2374 /* Gimplify the CASE_LABEL_EXPR pointed to by EXPR_P. */
2375
2376 static enum gimplify_status
2377 gimplify_case_label_expr (tree *expr_p, gimple_seq *pre_p)
2378 {
2379 struct gimplify_ctx *ctxp;
2380 glabel *label_stmt;
2381
2382 /* Invalid programs can play Duff's Device type games with, for example,
2383 #pragma omp parallel. At least in the C front end, we don't
2384 detect such invalid branches until after gimplification, in the
2385 diagnose_omp_blocks pass. */
2386 for (ctxp = gimplify_ctxp; ; ctxp = ctxp->prev_context)
2387 if (ctxp->case_labels.exists ())
2388 break;
2389
2390 label_stmt = gimple_build_label (CASE_LABEL (*expr_p));
2391 gimple_set_location (label_stmt, EXPR_LOCATION (*expr_p));
2392 ctxp->case_labels.safe_push (*expr_p);
2393 gimplify_seq_add_stmt (pre_p, label_stmt);
2394
2395 return GS_ALL_DONE;
2396 }
2397
2398 /* Build a GOTO to the LABEL_DECL pointed to by LABEL_P, building it first
2399 if necessary. */
2400
2401 tree
2402 build_and_jump (tree *label_p)
2403 {
2404 if (label_p == NULL)
2405 /* If there's nowhere to jump, just fall through. */
2406 return NULL_TREE;
2407
2408 if (*label_p == NULL_TREE)
2409 {
2410 tree label = create_artificial_label (UNKNOWN_LOCATION);
2411 *label_p = label;
2412 }
2413
2414 return build1 (GOTO_EXPR, void_type_node, *label_p);
2415 }
2416
2417 /* Gimplify an EXIT_EXPR by converting to a GOTO_EXPR inside a COND_EXPR.
2418 This also involves building a label to jump to and communicating it to
2419 gimplify_loop_expr through gimplify_ctxp->exit_label. */
2420
2421 static enum gimplify_status
2422 gimplify_exit_expr (tree *expr_p)
2423 {
2424 tree cond = TREE_OPERAND (*expr_p, 0);
2425 tree expr;
2426
2427 expr = build_and_jump (&gimplify_ctxp->exit_label);
2428 expr = build3 (COND_EXPR, void_type_node, cond, expr, NULL_TREE);
2429 *expr_p = expr;
2430
2431 return GS_OK;
2432 }
2433
2434 /* *EXPR_P is a COMPONENT_REF being used as an rvalue. If its type is
2435 different from its canonical type, wrap the whole thing inside a
2436 NOP_EXPR and force the type of the COMPONENT_REF to be the canonical
2437 type.
2438
2439 The canonical type of a COMPONENT_REF is the type of the field being
2440 referenced--unless the field is a bit-field which can be read directly
2441 in a smaller mode, in which case the canonical type is the
2442 sign-appropriate type corresponding to that mode. */
2443
2444 static void
2445 canonicalize_component_ref (tree *expr_p)
2446 {
2447 tree expr = *expr_p;
2448 tree type;
2449
2450 gcc_assert (TREE_CODE (expr) == COMPONENT_REF);
2451
2452 if (INTEGRAL_TYPE_P (TREE_TYPE (expr)))
2453 type = TREE_TYPE (get_unwidened (expr, NULL_TREE));
2454 else
2455 type = TREE_TYPE (TREE_OPERAND (expr, 1));
2456
2457 /* One could argue that all the stuff below is not necessary for
2458 the non-bitfield case and declare it a FE error if type
2459 adjustment would be needed. */
2460 if (TREE_TYPE (expr) != type)
2461 {
2462 #ifdef ENABLE_TYPES_CHECKING
2463 tree old_type = TREE_TYPE (expr);
2464 #endif
2465 int type_quals;
2466
2467 /* We need to preserve qualifiers and propagate them from
2468 operand 0. */
2469 type_quals = TYPE_QUALS (type)
2470 | TYPE_QUALS (TREE_TYPE (TREE_OPERAND (expr, 0)));
2471 if (TYPE_QUALS (type) != type_quals)
2472 type = build_qualified_type (TYPE_MAIN_VARIANT (type), type_quals);
2473
2474 /* Set the type of the COMPONENT_REF to the underlying type. */
2475 TREE_TYPE (expr) = type;
2476
2477 #ifdef ENABLE_TYPES_CHECKING
2478 /* It is now a FE error, if the conversion from the canonical
2479 type to the original expression type is not useless. */
2480 gcc_assert (useless_type_conversion_p (old_type, type));
2481 #endif
2482 }
2483 }
2484
2485 /* If a NOP conversion is changing a pointer to array of foo to a pointer
2486 to foo, embed that change in the ADDR_EXPR by converting
2487 T array[U];
2488 (T *)&array
2489 ==>
2490 &array[L]
2491 where L is the lower bound. For simplicity, only do this for constant
2492 lower bound.
2493 The constraint is that the type of &array[L] is trivially convertible
2494 to T *. */
2495
2496 static void
2497 canonicalize_addr_expr (tree *expr_p)
2498 {
2499 tree expr = *expr_p;
2500 tree addr_expr = TREE_OPERAND (expr, 0);
2501 tree datype, ddatype, pddatype;
2502
2503 /* We simplify only conversions from an ADDR_EXPR to a pointer type. */
2504 if (!POINTER_TYPE_P (TREE_TYPE (expr))
2505 || TREE_CODE (addr_expr) != ADDR_EXPR)
2506 return;
2507
2508 /* The addr_expr type should be a pointer to an array. */
2509 datype = TREE_TYPE (TREE_TYPE (addr_expr));
2510 if (TREE_CODE (datype) != ARRAY_TYPE)
2511 return;
2512
2513 /* The pointer to element type shall be trivially convertible to
2514 the expression pointer type. */
2515 ddatype = TREE_TYPE (datype);
2516 pddatype = build_pointer_type (ddatype);
2517 if (!useless_type_conversion_p (TYPE_MAIN_VARIANT (TREE_TYPE (expr)),
2518 pddatype))
2519 return;
2520
2521 /* The lower bound and element sizes must be constant. */
2522 if (!TYPE_SIZE_UNIT (ddatype)
2523 || TREE_CODE (TYPE_SIZE_UNIT (ddatype)) != INTEGER_CST
2524 || !TYPE_DOMAIN (datype) || !TYPE_MIN_VALUE (TYPE_DOMAIN (datype))
2525 || TREE_CODE (TYPE_MIN_VALUE (TYPE_DOMAIN (datype))) != INTEGER_CST)
2526 return;
2527
2528 /* All checks succeeded. Build a new node to merge the cast. */
2529 *expr_p = build4 (ARRAY_REF, ddatype, TREE_OPERAND (addr_expr, 0),
2530 TYPE_MIN_VALUE (TYPE_DOMAIN (datype)),
2531 NULL_TREE, NULL_TREE);
2532 *expr_p = build1 (ADDR_EXPR, pddatype, *expr_p);
2533
2534 /* We can have stripped a required restrict qualifier above. */
2535 if (!useless_type_conversion_p (TREE_TYPE (expr), TREE_TYPE (*expr_p)))
2536 *expr_p = fold_convert (TREE_TYPE (expr), *expr_p);
2537 }
2538
2539 /* *EXPR_P is a NOP_EXPR or CONVERT_EXPR. Remove it and/or other conversions
2540 underneath as appropriate. */
2541
2542 static enum gimplify_status
2543 gimplify_conversion (tree *expr_p)
2544 {
2545 location_t loc = EXPR_LOCATION (*expr_p);
2546 gcc_assert (CONVERT_EXPR_P (*expr_p));
2547
2548 /* Then strip away all but the outermost conversion. */
2549 STRIP_SIGN_NOPS (TREE_OPERAND (*expr_p, 0));
2550
2551 /* And remove the outermost conversion if it's useless. */
2552 if (tree_ssa_useless_type_conversion (*expr_p))
2553 *expr_p = TREE_OPERAND (*expr_p, 0);
2554
2555 /* If we still have a conversion at the toplevel,
2556 then canonicalize some constructs. */
2557 if (CONVERT_EXPR_P (*expr_p))
2558 {
2559 tree sub = TREE_OPERAND (*expr_p, 0);
2560
2561 /* If a NOP conversion is changing the type of a COMPONENT_REF
2562 expression, then canonicalize its type now in order to expose more
2563 redundant conversions. */
2564 if (TREE_CODE (sub) == COMPONENT_REF)
2565 canonicalize_component_ref (&TREE_OPERAND (*expr_p, 0));
2566
2567 /* If a NOP conversion is changing a pointer to array of foo
2568 to a pointer to foo, embed that change in the ADDR_EXPR. */
2569 else if (TREE_CODE (sub) == ADDR_EXPR)
2570 canonicalize_addr_expr (expr_p);
2571 }
2572
2573 /* If we have a conversion to a non-register type force the
2574 use of a VIEW_CONVERT_EXPR instead. */
2575 if (CONVERT_EXPR_P (*expr_p) && !is_gimple_reg_type (TREE_TYPE (*expr_p)))
2576 *expr_p = fold_build1_loc (loc, VIEW_CONVERT_EXPR, TREE_TYPE (*expr_p),
2577 TREE_OPERAND (*expr_p, 0));
2578
2579 /* Canonicalize CONVERT_EXPR to NOP_EXPR. */
2580 if (TREE_CODE (*expr_p) == CONVERT_EXPR)
2581 TREE_SET_CODE (*expr_p, NOP_EXPR);
2582
2583 return GS_OK;
2584 }
2585
2586 /* Nonlocal VLAs seen in the current function. */
2587 static hash_set<tree> *nonlocal_vlas;
2588
2589 /* The VAR_DECLs created for nonlocal VLAs for debug info purposes. */
2590 static tree nonlocal_vla_vars;
2591
2592 /* Gimplify a VAR_DECL or PARM_DECL. Return GS_OK if we expanded a
2593 DECL_VALUE_EXPR, and it's worth re-examining things. */
2594
2595 static enum gimplify_status
2596 gimplify_var_or_parm_decl (tree *expr_p)
2597 {
2598 tree decl = *expr_p;
2599
2600 /* ??? If this is a local variable, and it has not been seen in any
2601 outer BIND_EXPR, then it's probably the result of a duplicate
2602 declaration, for which we've already issued an error. It would
2603 be really nice if the front end wouldn't leak these at all.
2604 Currently the only known culprit is C++ destructors, as seen
2605 in g++.old-deja/g++.jason/binding.C. */
2606 if (VAR_P (decl)
2607 && !DECL_SEEN_IN_BIND_EXPR_P (decl)
2608 && !TREE_STATIC (decl) && !DECL_EXTERNAL (decl)
2609 && decl_function_context (decl) == current_function_decl)
2610 {
2611 gcc_assert (seen_error ());
2612 return GS_ERROR;
2613 }
2614
2615 /* When within an OMP context, notice uses of variables. */
2616 if (gimplify_omp_ctxp && omp_notice_variable (gimplify_omp_ctxp, decl, true))
2617 return GS_ALL_DONE;
2618
2619 /* If the decl is an alias for another expression, substitute it now. */
2620 if (DECL_HAS_VALUE_EXPR_P (decl))
2621 {
2622 tree value_expr = DECL_VALUE_EXPR (decl);
2623
2624 /* For referenced nonlocal VLAs add a decl for debugging purposes
2625 to the current function. */
2626 if (VAR_P (decl)
2627 && TREE_CODE (DECL_SIZE_UNIT (decl)) != INTEGER_CST
2628 && nonlocal_vlas != NULL
2629 && TREE_CODE (value_expr) == INDIRECT_REF
2630 && TREE_CODE (TREE_OPERAND (value_expr, 0)) == VAR_DECL
2631 && decl_function_context (decl) != current_function_decl)
2632 {
2633 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
2634 while (ctx
2635 && (ctx->region_type == ORT_WORKSHARE
2636 || ctx->region_type == ORT_SIMD
2637 || ctx->region_type == ORT_ACC))
2638 ctx = ctx->outer_context;
2639 if (!ctx && !nonlocal_vlas->add (decl))
2640 {
2641 tree copy = copy_node (decl);
2642
2643 lang_hooks.dup_lang_specific_decl (copy);
2644 SET_DECL_RTL (copy, 0);
2645 TREE_USED (copy) = 1;
2646 DECL_CHAIN (copy) = nonlocal_vla_vars;
2647 nonlocal_vla_vars = copy;
2648 SET_DECL_VALUE_EXPR (copy, unshare_expr (value_expr));
2649 DECL_HAS_VALUE_EXPR_P (copy) = 1;
2650 }
2651 }
2652
2653 *expr_p = unshare_expr (value_expr);
2654 return GS_OK;
2655 }
2656
2657 return GS_ALL_DONE;
2658 }
2659
2660 /* Recalculate the value of the TREE_SIDE_EFFECTS flag for T. */
2661
2662 static void
2663 recalculate_side_effects (tree t)
2664 {
2665 enum tree_code code = TREE_CODE (t);
2666 int len = TREE_OPERAND_LENGTH (t);
2667 int i;
2668
2669 switch (TREE_CODE_CLASS (code))
2670 {
2671 case tcc_expression:
2672 switch (code)
2673 {
2674 case INIT_EXPR:
2675 case MODIFY_EXPR:
2676 case VA_ARG_EXPR:
2677 case PREDECREMENT_EXPR:
2678 case PREINCREMENT_EXPR:
2679 case POSTDECREMENT_EXPR:
2680 case POSTINCREMENT_EXPR:
2681 /* All of these have side-effects, no matter what their
2682 operands are. */
2683 return;
2684
2685 default:
2686 break;
2687 }
2688 /* Fall through. */
2689
2690 case tcc_comparison: /* a comparison expression */
2691 case tcc_unary: /* a unary arithmetic expression */
2692 case tcc_binary: /* a binary arithmetic expression */
2693 case tcc_reference: /* a reference */
2694 case tcc_vl_exp: /* a function call */
2695 TREE_SIDE_EFFECTS (t) = TREE_THIS_VOLATILE (t);
2696 for (i = 0; i < len; ++i)
2697 {
2698 tree op = TREE_OPERAND (t, i);
2699 if (op && TREE_SIDE_EFFECTS (op))
2700 TREE_SIDE_EFFECTS (t) = 1;
2701 }
2702 break;
2703
2704 case tcc_constant:
2705 /* No side-effects. */
2706 return;
2707
2708 default:
2709 gcc_unreachable ();
2710 }
2711 }
2712
2713 /* Gimplify the COMPONENT_REF, ARRAY_REF, REALPART_EXPR or IMAGPART_EXPR
2714 node *EXPR_P.
2715
2716 compound_lval
2717 : min_lval '[' val ']'
2718 | min_lval '.' ID
2719 | compound_lval '[' val ']'
2720 | compound_lval '.' ID
2721
2722 This is not part of the original SIMPLE definition, which separates
2723 array and member references, but it seems reasonable to handle them
2724 together. Also, this way we don't run into problems with union
2725 aliasing; gcc requires that for accesses through a union to alias, the
2726 union reference must be explicit, which was not always the case when we
2727 were splitting up array and member refs.
2728
2729 PRE_P points to the sequence where side effects that must happen before
2730 *EXPR_P should be stored.
2731
2732 POST_P points to the sequence where side effects that must happen after
2733 *EXPR_P should be stored. */
2734
2735 static enum gimplify_status
2736 gimplify_compound_lval (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
2737 fallback_t fallback)
2738 {
2739 tree *p;
2740 enum gimplify_status ret = GS_ALL_DONE, tret;
2741 int i;
2742 location_t loc = EXPR_LOCATION (*expr_p);
2743 tree expr = *expr_p;
2744
2745 /* Create a stack of the subexpressions so later we can walk them in
2746 order from inner to outer. */
2747 auto_vec<tree, 10> expr_stack;
2748
2749 /* We can handle anything that get_inner_reference can deal with. */
2750 for (p = expr_p; ; p = &TREE_OPERAND (*p, 0))
2751 {
2752 restart:
2753 /* Fold INDIRECT_REFs now to turn them into ARRAY_REFs. */
2754 if (TREE_CODE (*p) == INDIRECT_REF)
2755 *p = fold_indirect_ref_loc (loc, *p);
2756
2757 if (handled_component_p (*p))
2758 ;
2759 /* Expand DECL_VALUE_EXPR now. In some cases that may expose
2760 additional COMPONENT_REFs. */
2761 else if ((VAR_P (*p) || TREE_CODE (*p) == PARM_DECL)
2762 && gimplify_var_or_parm_decl (p) == GS_OK)
2763 goto restart;
2764 else
2765 break;
2766
2767 expr_stack.safe_push (*p);
2768 }
2769
2770 gcc_assert (expr_stack.length ());
2771
2772 /* Now EXPR_STACK is a stack of pointers to all the refs we've
2773 walked through and P points to the innermost expression.
2774
2775 Java requires that we elaborated nodes in source order. That
2776 means we must gimplify the inner expression followed by each of
2777 the indices, in order. But we can't gimplify the inner
2778 expression until we deal with any variable bounds, sizes, or
2779 positions in order to deal with PLACEHOLDER_EXPRs.
2780
2781 So we do this in three steps. First we deal with the annotations
2782 for any variables in the components, then we gimplify the base,
2783 then we gimplify any indices, from left to right. */
2784 for (i = expr_stack.length () - 1; i >= 0; i--)
2785 {
2786 tree t = expr_stack[i];
2787
2788 if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
2789 {
2790 /* Gimplify the low bound and element type size and put them into
2791 the ARRAY_REF. If these values are set, they have already been
2792 gimplified. */
2793 if (TREE_OPERAND (t, 2) == NULL_TREE)
2794 {
2795 tree low = unshare_expr (array_ref_low_bound (t));
2796 if (!is_gimple_min_invariant (low))
2797 {
2798 TREE_OPERAND (t, 2) = low;
2799 tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p,
2800 post_p, is_gimple_reg,
2801 fb_rvalue);
2802 ret = MIN (ret, tret);
2803 }
2804 }
2805 else
2806 {
2807 tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p, post_p,
2808 is_gimple_reg, fb_rvalue);
2809 ret = MIN (ret, tret);
2810 }
2811
2812 if (TREE_OPERAND (t, 3) == NULL_TREE)
2813 {
2814 tree elmt_type = TREE_TYPE (TREE_TYPE (TREE_OPERAND (t, 0)));
2815 tree elmt_size = unshare_expr (array_ref_element_size (t));
2816 tree factor = size_int (TYPE_ALIGN_UNIT (elmt_type));
2817
2818 /* Divide the element size by the alignment of the element
2819 type (above). */
2820 elmt_size
2821 = size_binop_loc (loc, EXACT_DIV_EXPR, elmt_size, factor);
2822
2823 if (!is_gimple_min_invariant (elmt_size))
2824 {
2825 TREE_OPERAND (t, 3) = elmt_size;
2826 tret = gimplify_expr (&TREE_OPERAND (t, 3), pre_p,
2827 post_p, is_gimple_reg,
2828 fb_rvalue);
2829 ret = MIN (ret, tret);
2830 }
2831 }
2832 else
2833 {
2834 tret = gimplify_expr (&TREE_OPERAND (t, 3), pre_p, post_p,
2835 is_gimple_reg, fb_rvalue);
2836 ret = MIN (ret, tret);
2837 }
2838 }
2839 else if (TREE_CODE (t) == COMPONENT_REF)
2840 {
2841 /* Set the field offset into T and gimplify it. */
2842 if (TREE_OPERAND (t, 2) == NULL_TREE)
2843 {
2844 tree offset = unshare_expr (component_ref_field_offset (t));
2845 tree field = TREE_OPERAND (t, 1);
2846 tree factor
2847 = size_int (DECL_OFFSET_ALIGN (field) / BITS_PER_UNIT);
2848
2849 /* Divide the offset by its alignment. */
2850 offset = size_binop_loc (loc, EXACT_DIV_EXPR, offset, factor);
2851
2852 if (!is_gimple_min_invariant (offset))
2853 {
2854 TREE_OPERAND (t, 2) = offset;
2855 tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p,
2856 post_p, is_gimple_reg,
2857 fb_rvalue);
2858 ret = MIN (ret, tret);
2859 }
2860 }
2861 else
2862 {
2863 tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p, post_p,
2864 is_gimple_reg, fb_rvalue);
2865 ret = MIN (ret, tret);
2866 }
2867 }
2868 }
2869
2870 /* Step 2 is to gimplify the base expression. Make sure lvalue is set
2871 so as to match the min_lval predicate. Failure to do so may result
2872 in the creation of large aggregate temporaries. */
2873 tret = gimplify_expr (p, pre_p, post_p, is_gimple_min_lval,
2874 fallback | fb_lvalue);
2875 ret = MIN (ret, tret);
2876
2877 /* And finally, the indices and operands of ARRAY_REF. During this
2878 loop we also remove any useless conversions. */
2879 for (; expr_stack.length () > 0; )
2880 {
2881 tree t = expr_stack.pop ();
2882
2883 if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
2884 {
2885 /* Gimplify the dimension. */
2886 if (!is_gimple_min_invariant (TREE_OPERAND (t, 1)))
2887 {
2888 tret = gimplify_expr (&TREE_OPERAND (t, 1), pre_p, post_p,
2889 is_gimple_val, fb_rvalue);
2890 ret = MIN (ret, tret);
2891 }
2892 }
2893
2894 STRIP_USELESS_TYPE_CONVERSION (TREE_OPERAND (t, 0));
2895
2896 /* The innermost expression P may have originally had
2897 TREE_SIDE_EFFECTS set which would have caused all the outer
2898 expressions in *EXPR_P leading to P to also have had
2899 TREE_SIDE_EFFECTS set. */
2900 recalculate_side_effects (t);
2901 }
2902
2903 /* If the outermost expression is a COMPONENT_REF, canonicalize its type. */
2904 if ((fallback & fb_rvalue) && TREE_CODE (*expr_p) == COMPONENT_REF)
2905 {
2906 canonicalize_component_ref (expr_p);
2907 }
2908
2909 expr_stack.release ();
2910
2911 gcc_assert (*expr_p == expr || ret != GS_ALL_DONE);
2912
2913 return ret;
2914 }
2915
2916 /* Gimplify the self modifying expression pointed to by EXPR_P
2917 (++, --, +=, -=).
2918
2919 PRE_P points to the list where side effects that must happen before
2920 *EXPR_P should be stored.
2921
2922 POST_P points to the list where side effects that must happen after
2923 *EXPR_P should be stored.
2924
2925 WANT_VALUE is nonzero iff we want to use the value of this expression
2926 in another expression.
2927
2928 ARITH_TYPE is the type the computation should be performed in. */
2929
2930 enum gimplify_status
2931 gimplify_self_mod_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
2932 bool want_value, tree arith_type)
2933 {
2934 enum tree_code code;
2935 tree lhs, lvalue, rhs, t1;
2936 gimple_seq post = NULL, *orig_post_p = post_p;
2937 bool postfix;
2938 enum tree_code arith_code;
2939 enum gimplify_status ret;
2940 location_t loc = EXPR_LOCATION (*expr_p);
2941
2942 code = TREE_CODE (*expr_p);
2943
2944 gcc_assert (code == POSTINCREMENT_EXPR || code == POSTDECREMENT_EXPR
2945 || code == PREINCREMENT_EXPR || code == PREDECREMENT_EXPR);
2946
2947 /* Prefix or postfix? */
2948 if (code == POSTINCREMENT_EXPR || code == POSTDECREMENT_EXPR)
2949 /* Faster to treat as prefix if result is not used. */
2950 postfix = want_value;
2951 else
2952 postfix = false;
2953
2954 /* For postfix, make sure the inner expression's post side effects
2955 are executed after side effects from this expression. */
2956 if (postfix)
2957 post_p = &post;
2958
2959 /* Add or subtract? */
2960 if (code == PREINCREMENT_EXPR || code == POSTINCREMENT_EXPR)
2961 arith_code = PLUS_EXPR;
2962 else
2963 arith_code = MINUS_EXPR;
2964
2965 /* Gimplify the LHS into a GIMPLE lvalue. */
2966 lvalue = TREE_OPERAND (*expr_p, 0);
2967 ret = gimplify_expr (&lvalue, pre_p, post_p, is_gimple_lvalue, fb_lvalue);
2968 if (ret == GS_ERROR)
2969 return ret;
2970
2971 /* Extract the operands to the arithmetic operation. */
2972 lhs = lvalue;
2973 rhs = TREE_OPERAND (*expr_p, 1);
2974
2975 /* For postfix operator, we evaluate the LHS to an rvalue and then use
2976 that as the result value and in the postqueue operation. */
2977 if (postfix)
2978 {
2979 ret = gimplify_expr (&lhs, pre_p, post_p, is_gimple_val, fb_rvalue);
2980 if (ret == GS_ERROR)
2981 return ret;
2982
2983 lhs = get_initialized_tmp_var (lhs, pre_p, NULL);
2984 }
2985
2986 /* For POINTERs increment, use POINTER_PLUS_EXPR. */
2987 if (POINTER_TYPE_P (TREE_TYPE (lhs)))
2988 {
2989 rhs = convert_to_ptrofftype_loc (loc, rhs);
2990 if (arith_code == MINUS_EXPR)
2991 rhs = fold_build1_loc (loc, NEGATE_EXPR, TREE_TYPE (rhs), rhs);
2992 t1 = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (*expr_p), lhs, rhs);
2993 }
2994 else
2995 t1 = fold_convert (TREE_TYPE (*expr_p),
2996 fold_build2 (arith_code, arith_type,
2997 fold_convert (arith_type, lhs),
2998 fold_convert (arith_type, rhs)));
2999
3000 if (postfix)
3001 {
3002 gimplify_assign (lvalue, t1, pre_p);
3003 gimplify_seq_add_seq (orig_post_p, post);
3004 *expr_p = lhs;
3005 return GS_ALL_DONE;
3006 }
3007 else
3008 {
3009 *expr_p = build2 (MODIFY_EXPR, TREE_TYPE (lvalue), lvalue, t1);
3010 return GS_OK;
3011 }
3012 }
3013
3014 /* If *EXPR_P has a variable sized type, wrap it in a WITH_SIZE_EXPR. */
3015
3016 static void
3017 maybe_with_size_expr (tree *expr_p)
3018 {
3019 tree expr = *expr_p;
3020 tree type = TREE_TYPE (expr);
3021 tree size;
3022
3023 /* If we've already wrapped this or the type is error_mark_node, we can't do
3024 anything. */
3025 if (TREE_CODE (expr) == WITH_SIZE_EXPR
3026 || type == error_mark_node)
3027 return;
3028
3029 /* If the size isn't known or is a constant, we have nothing to do. */
3030 size = TYPE_SIZE_UNIT (type);
3031 if (!size || TREE_CODE (size) == INTEGER_CST)
3032 return;
3033
3034 /* Otherwise, make a WITH_SIZE_EXPR. */
3035 size = unshare_expr (size);
3036 size = SUBSTITUTE_PLACEHOLDER_IN_EXPR (size, expr);
3037 *expr_p = build2 (WITH_SIZE_EXPR, type, expr, size);
3038 }
3039
3040 /* Helper for gimplify_call_expr. Gimplify a single argument *ARG_P
3041 Store any side-effects in PRE_P. CALL_LOCATION is the location of
3042 the CALL_EXPR. If ALLOW_SSA is set the actual parameter may be
3043 gimplified to an SSA name. */
3044
3045 enum gimplify_status
3046 gimplify_arg (tree *arg_p, gimple_seq *pre_p, location_t call_location,
3047 bool allow_ssa)
3048 {
3049 bool (*test) (tree);
3050 fallback_t fb;
3051
3052 /* In general, we allow lvalues for function arguments to avoid
3053 extra overhead of copying large aggregates out of even larger
3054 aggregates into temporaries only to copy the temporaries to
3055 the argument list. Make optimizers happy by pulling out to
3056 temporaries those types that fit in registers. */
3057 if (is_gimple_reg_type (TREE_TYPE (*arg_p)))
3058 test = is_gimple_val, fb = fb_rvalue;
3059 else
3060 {
3061 test = is_gimple_lvalue, fb = fb_either;
3062 /* Also strip a TARGET_EXPR that would force an extra copy. */
3063 if (TREE_CODE (*arg_p) == TARGET_EXPR)
3064 {
3065 tree init = TARGET_EXPR_INITIAL (*arg_p);
3066 if (init
3067 && !VOID_TYPE_P (TREE_TYPE (init)))
3068 *arg_p = init;
3069 }
3070 }
3071
3072 /* If this is a variable sized type, we must remember the size. */
3073 maybe_with_size_expr (arg_p);
3074
3075 /* FIXME diagnostics: This will mess up gcc.dg/Warray-bounds.c. */
3076 /* Make sure arguments have the same location as the function call
3077 itself. */
3078 protected_set_expr_location (*arg_p, call_location);
3079
3080 /* There is a sequence point before a function call. Side effects in
3081 the argument list must occur before the actual call. So, when
3082 gimplifying arguments, force gimplify_expr to use an internal
3083 post queue which is then appended to the end of PRE_P. */
3084 return gimplify_expr (arg_p, pre_p, NULL, test, fb, allow_ssa);
3085 }
3086
3087 /* Don't fold inside offloading or taskreg regions: it can break code by
3088 adding decl references that weren't in the source. We'll do it during
3089 omplower pass instead. */
3090
3091 static bool
3092 maybe_fold_stmt (gimple_stmt_iterator *gsi)
3093 {
3094 struct gimplify_omp_ctx *ctx;
3095 for (ctx = gimplify_omp_ctxp; ctx; ctx = ctx->outer_context)
3096 if ((ctx->region_type & (ORT_TARGET | ORT_PARALLEL | ORT_TASK)) != 0)
3097 return false;
3098 return fold_stmt (gsi);
3099 }
3100
3101 /* Add a gimple call to __builtin_cilk_detach to GIMPLE sequence PRE_P,
3102 with the pointer to the proper cilk frame. */
3103 static void
3104 gimplify_cilk_detach (gimple_seq *pre_p)
3105 {
3106 tree frame = cfun->cilk_frame_decl;
3107 tree ptrf = build1 (ADDR_EXPR, cilk_frame_ptr_type_decl,
3108 frame);
3109 gcall *detach = gimple_build_call (cilk_detach_fndecl, 1,
3110 ptrf);
3111 gimplify_seq_add_stmt(pre_p, detach);
3112 }
3113
3114 /* Gimplify the CALL_EXPR node *EXPR_P into the GIMPLE sequence PRE_P.
3115 WANT_VALUE is true if the result of the call is desired. */
3116
3117 static enum gimplify_status
3118 gimplify_call_expr (tree *expr_p, gimple_seq *pre_p, bool want_value)
3119 {
3120 tree fndecl, parms, p, fnptrtype;
3121 enum gimplify_status ret;
3122 int i, nargs;
3123 gcall *call;
3124 bool builtin_va_start_p = false;
3125 location_t loc = EXPR_LOCATION (*expr_p);
3126
3127 gcc_assert (TREE_CODE (*expr_p) == CALL_EXPR);
3128
3129 /* For reliable diagnostics during inlining, it is necessary that
3130 every call_expr be annotated with file and line. */
3131 if (! EXPR_HAS_LOCATION (*expr_p))
3132 SET_EXPR_LOCATION (*expr_p, input_location);
3133
3134 /* Gimplify internal functions created in the FEs. */
3135 if (CALL_EXPR_FN (*expr_p) == NULL_TREE)
3136 {
3137 if (want_value)
3138 return GS_ALL_DONE;
3139
3140 nargs = call_expr_nargs (*expr_p);
3141 enum internal_fn ifn = CALL_EXPR_IFN (*expr_p);
3142 auto_vec<tree> vargs (nargs);
3143
3144 for (i = 0; i < nargs; i++)
3145 {
3146 gimplify_arg (&CALL_EXPR_ARG (*expr_p, i), pre_p,
3147 EXPR_LOCATION (*expr_p));
3148 vargs.quick_push (CALL_EXPR_ARG (*expr_p, i));
3149 }
3150
3151 if (EXPR_CILK_SPAWN (*expr_p))
3152 gimplify_cilk_detach (pre_p);
3153 gcall *call = gimple_build_call_internal_vec (ifn, vargs);
3154 gimple_call_set_nothrow (call, TREE_NOTHROW (*expr_p));
3155 gimplify_seq_add_stmt (pre_p, call);
3156 return GS_ALL_DONE;
3157 }
3158
3159 /* This may be a call to a builtin function.
3160
3161 Builtin function calls may be transformed into different
3162 (and more efficient) builtin function calls under certain
3163 circumstances. Unfortunately, gimplification can muck things
3164 up enough that the builtin expanders are not aware that certain
3165 transformations are still valid.
3166
3167 So we attempt transformation/gimplification of the call before
3168 we gimplify the CALL_EXPR. At this time we do not manage to
3169 transform all calls in the same manner as the expanders do, but
3170 we do transform most of them. */
3171 fndecl = get_callee_fndecl (*expr_p);
3172 if (fndecl
3173 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
3174 switch (DECL_FUNCTION_CODE (fndecl))
3175 {
3176 CASE_BUILT_IN_ALLOCA:
3177 /* If the call has been built for a variable-sized object, then we
3178 want to restore the stack level when the enclosing BIND_EXPR is
3179 exited to reclaim the allocated space; otherwise, we precisely
3180 need to do the opposite and preserve the latest stack level. */
3181 if (CALL_ALLOCA_FOR_VAR_P (*expr_p))
3182 gimplify_ctxp->save_stack = true;
3183 else
3184 gimplify_ctxp->keep_stack = true;
3185 break;
3186
3187 case BUILT_IN_VA_START:
3188 {
3189 builtin_va_start_p = TRUE;
3190 if (call_expr_nargs (*expr_p) < 2)
3191 {
3192 error ("too few arguments to function %<va_start%>");
3193 *expr_p = build_empty_stmt (EXPR_LOCATION (*expr_p));
3194 return GS_OK;
3195 }
3196
3197 if (fold_builtin_next_arg (*expr_p, true))
3198 {
3199 *expr_p = build_empty_stmt (EXPR_LOCATION (*expr_p));
3200 return GS_OK;
3201 }
3202 break;
3203 }
3204
3205 default:
3206 ;
3207 }
3208 if (fndecl && DECL_BUILT_IN (fndecl))
3209 {
3210 tree new_tree = fold_call_expr (input_location, *expr_p, !want_value);
3211 if (new_tree && new_tree != *expr_p)
3212 {
3213 /* There was a transformation of this call which computes the
3214 same value, but in a more efficient way. Return and try
3215 again. */
3216 *expr_p = new_tree;
3217 return GS_OK;
3218 }
3219 }
3220
3221 /* Remember the original function pointer type. */
3222 fnptrtype = TREE_TYPE (CALL_EXPR_FN (*expr_p));
3223
3224 /* There is a sequence point before the call, so any side effects in
3225 the calling expression must occur before the actual call. Force
3226 gimplify_expr to use an internal post queue. */
3227 ret = gimplify_expr (&CALL_EXPR_FN (*expr_p), pre_p, NULL,
3228 is_gimple_call_addr, fb_rvalue);
3229
3230 nargs = call_expr_nargs (*expr_p);
3231
3232 /* Get argument types for verification. */
3233 fndecl = get_callee_fndecl (*expr_p);
3234 parms = NULL_TREE;
3235 if (fndecl)
3236 parms = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
3237 else
3238 parms = TYPE_ARG_TYPES (TREE_TYPE (fnptrtype));
3239
3240 if (fndecl && DECL_ARGUMENTS (fndecl))
3241 p = DECL_ARGUMENTS (fndecl);
3242 else if (parms)
3243 p = parms;
3244 else
3245 p = NULL_TREE;
3246 for (i = 0; i < nargs && p; i++, p = TREE_CHAIN (p))
3247 ;
3248
3249 /* If the last argument is __builtin_va_arg_pack () and it is not
3250 passed as a named argument, decrease the number of CALL_EXPR
3251 arguments and set instead the CALL_EXPR_VA_ARG_PACK flag. */
3252 if (!p
3253 && i < nargs
3254 && TREE_CODE (CALL_EXPR_ARG (*expr_p, nargs - 1)) == CALL_EXPR)
3255 {
3256 tree last_arg = CALL_EXPR_ARG (*expr_p, nargs - 1);
3257 tree last_arg_fndecl = get_callee_fndecl (last_arg);
3258
3259 if (last_arg_fndecl
3260 && TREE_CODE (last_arg_fndecl) == FUNCTION_DECL
3261 && DECL_BUILT_IN_CLASS (last_arg_fndecl) == BUILT_IN_NORMAL
3262 && DECL_FUNCTION_CODE (last_arg_fndecl) == BUILT_IN_VA_ARG_PACK)
3263 {
3264 tree call = *expr_p;
3265
3266 --nargs;
3267 *expr_p = build_call_array_loc (loc, TREE_TYPE (call),
3268 CALL_EXPR_FN (call),
3269 nargs, CALL_EXPR_ARGP (call));
3270
3271 /* Copy all CALL_EXPR flags, location and block, except
3272 CALL_EXPR_VA_ARG_PACK flag. */
3273 CALL_EXPR_STATIC_CHAIN (*expr_p) = CALL_EXPR_STATIC_CHAIN (call);
3274 CALL_EXPR_TAILCALL (*expr_p) = CALL_EXPR_TAILCALL (call);
3275 CALL_EXPR_RETURN_SLOT_OPT (*expr_p)
3276 = CALL_EXPR_RETURN_SLOT_OPT (call);
3277 CALL_FROM_THUNK_P (*expr_p) = CALL_FROM_THUNK_P (call);
3278 SET_EXPR_LOCATION (*expr_p, EXPR_LOCATION (call));
3279
3280 /* Set CALL_EXPR_VA_ARG_PACK. */
3281 CALL_EXPR_VA_ARG_PACK (*expr_p) = 1;
3282 }
3283 }
3284
3285 /* If the call returns twice then after building the CFG the call
3286 argument computations will no longer dominate the call because
3287 we add an abnormal incoming edge to the call. So do not use SSA
3288 vars there. */
3289 bool returns_twice = call_expr_flags (*expr_p) & ECF_RETURNS_TWICE;
3290
3291 /* Gimplify the function arguments. */
3292 if (nargs > 0)
3293 {
3294 for (i = (PUSH_ARGS_REVERSED ? nargs - 1 : 0);
3295 PUSH_ARGS_REVERSED ? i >= 0 : i < nargs;
3296 PUSH_ARGS_REVERSED ? i-- : i++)
3297 {
3298 enum gimplify_status t;
3299
3300 /* Avoid gimplifying the second argument to va_start, which needs to
3301 be the plain PARM_DECL. */
3302 if ((i != 1) || !builtin_va_start_p)
3303 {
3304 t = gimplify_arg (&CALL_EXPR_ARG (*expr_p, i), pre_p,
3305 EXPR_LOCATION (*expr_p), ! returns_twice);
3306
3307 if (t == GS_ERROR)
3308 ret = GS_ERROR;
3309 }
3310 }
3311 }
3312
3313 /* Gimplify the static chain. */
3314 if (CALL_EXPR_STATIC_CHAIN (*expr_p))
3315 {
3316 if (fndecl && !DECL_STATIC_CHAIN (fndecl))
3317 CALL_EXPR_STATIC_CHAIN (*expr_p) = NULL;
3318 else
3319 {
3320 enum gimplify_status t;
3321 t = gimplify_arg (&CALL_EXPR_STATIC_CHAIN (*expr_p), pre_p,
3322 EXPR_LOCATION (*expr_p), ! returns_twice);
3323 if (t == GS_ERROR)
3324 ret = GS_ERROR;
3325 }
3326 }
3327
3328 /* Verify the function result. */
3329 if (want_value && fndecl
3330 && VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fnptrtype))))
3331 {
3332 error_at (loc, "using result of function returning %<void%>");
3333 ret = GS_ERROR;
3334 }
3335
3336 /* Try this again in case gimplification exposed something. */
3337 if (ret != GS_ERROR)
3338 {
3339 tree new_tree = fold_call_expr (input_location, *expr_p, !want_value);
3340
3341 if (new_tree && new_tree != *expr_p)
3342 {
3343 /* There was a transformation of this call which computes the
3344 same value, but in a more efficient way. Return and try
3345 again. */
3346 *expr_p = new_tree;
3347 return GS_OK;
3348 }
3349 }
3350 else
3351 {
3352 *expr_p = error_mark_node;
3353 return GS_ERROR;
3354 }
3355
3356 /* If the function is "const" or "pure", then clear TREE_SIDE_EFFECTS on its
3357 decl. This allows us to eliminate redundant or useless
3358 calls to "const" functions. */
3359 if (TREE_CODE (*expr_p) == CALL_EXPR)
3360 {
3361 int flags = call_expr_flags (*expr_p);
3362 if (flags & (ECF_CONST | ECF_PURE)
3363 /* An infinite loop is considered a side effect. */
3364 && !(flags & (ECF_LOOPING_CONST_OR_PURE)))
3365 TREE_SIDE_EFFECTS (*expr_p) = 0;
3366 }
3367
3368 /* If the value is not needed by the caller, emit a new GIMPLE_CALL
3369 and clear *EXPR_P. Otherwise, leave *EXPR_P in its gimplified
3370 form and delegate the creation of a GIMPLE_CALL to
3371 gimplify_modify_expr. This is always possible because when
3372 WANT_VALUE is true, the caller wants the result of this call into
3373 a temporary, which means that we will emit an INIT_EXPR in
3374 internal_get_tmp_var which will then be handled by
3375 gimplify_modify_expr. */
3376 if (!want_value)
3377 {
3378 /* The CALL_EXPR in *EXPR_P is already in GIMPLE form, so all we
3379 have to do is replicate it as a GIMPLE_CALL tuple. */
3380 gimple_stmt_iterator gsi;
3381 call = gimple_build_call_from_tree (*expr_p, fnptrtype);
3382 notice_special_calls (call);
3383 if (EXPR_CILK_SPAWN (*expr_p))
3384 gimplify_cilk_detach (pre_p);
3385 gimplify_seq_add_stmt (pre_p, call);
3386 gsi = gsi_last (*pre_p);
3387 maybe_fold_stmt (&gsi);
3388 *expr_p = NULL_TREE;
3389 }
3390 else
3391 /* Remember the original function type. */
3392 CALL_EXPR_FN (*expr_p) = build1 (NOP_EXPR, fnptrtype,
3393 CALL_EXPR_FN (*expr_p));
3394
3395 return ret;
3396 }
3397
3398 /* Handle shortcut semantics in the predicate operand of a COND_EXPR by
3399 rewriting it into multiple COND_EXPRs, and possibly GOTO_EXPRs.
3400
3401 TRUE_LABEL_P and FALSE_LABEL_P point to the labels to jump to if the
3402 condition is true or false, respectively. If null, we should generate
3403 our own to skip over the evaluation of this specific expression.
3404
3405 LOCUS is the source location of the COND_EXPR.
3406
3407 This function is the tree equivalent of do_jump.
3408
3409 shortcut_cond_r should only be called by shortcut_cond_expr. */
3410
3411 static tree
3412 shortcut_cond_r (tree pred, tree *true_label_p, tree *false_label_p,
3413 location_t locus)
3414 {
3415 tree local_label = NULL_TREE;
3416 tree t, expr = NULL;
3417
3418 /* OK, it's not a simple case; we need to pull apart the COND_EXPR to
3419 retain the shortcut semantics. Just insert the gotos here;
3420 shortcut_cond_expr will append the real blocks later. */
3421 if (TREE_CODE (pred) == TRUTH_ANDIF_EXPR)
3422 {
3423 location_t new_locus;
3424
3425 /* Turn if (a && b) into
3426
3427 if (a); else goto no;
3428 if (b) goto yes; else goto no;
3429 (no:) */
3430
3431 if (false_label_p == NULL)
3432 false_label_p = &local_label;
3433
3434 /* Keep the original source location on the first 'if'. */
3435 t = shortcut_cond_r (TREE_OPERAND (pred, 0), NULL, false_label_p, locus);
3436 append_to_statement_list (t, &expr);
3437
3438 /* Set the source location of the && on the second 'if'. */
3439 new_locus = EXPR_HAS_LOCATION (pred) ? EXPR_LOCATION (pred) : locus;
3440 t = shortcut_cond_r (TREE_OPERAND (pred, 1), true_label_p, false_label_p,
3441 new_locus);
3442 append_to_statement_list (t, &expr);
3443 }
3444 else if (TREE_CODE (pred) == TRUTH_ORIF_EXPR)
3445 {
3446 location_t new_locus;
3447
3448 /* Turn if (a || b) into
3449
3450 if (a) goto yes;
3451 if (b) goto yes; else goto no;
3452 (yes:) */
3453
3454 if (true_label_p == NULL)
3455 true_label_p = &local_label;
3456
3457 /* Keep the original source location on the first 'if'. */
3458 t = shortcut_cond_r (TREE_OPERAND (pred, 0), true_label_p, NULL, locus);
3459 append_to_statement_list (t, &expr);
3460
3461 /* Set the source location of the || on the second 'if'. */
3462 new_locus = EXPR_HAS_LOCATION (pred) ? EXPR_LOCATION (pred) : locus;
3463 t = shortcut_cond_r (TREE_OPERAND (pred, 1), true_label_p, false_label_p,
3464 new_locus);
3465 append_to_statement_list (t, &expr);
3466 }
3467 else if (TREE_CODE (pred) == COND_EXPR
3468 && !VOID_TYPE_P (TREE_TYPE (TREE_OPERAND (pred, 1)))
3469 && !VOID_TYPE_P (TREE_TYPE (TREE_OPERAND (pred, 2))))
3470 {
3471 location_t new_locus;
3472
3473 /* As long as we're messing with gotos, turn if (a ? b : c) into
3474 if (a)
3475 if (b) goto yes; else goto no;
3476 else
3477 if (c) goto yes; else goto no;
3478
3479 Don't do this if one of the arms has void type, which can happen
3480 in C++ when the arm is throw. */
3481
3482 /* Keep the original source location on the first 'if'. Set the source
3483 location of the ? on the second 'if'. */
3484 new_locus = EXPR_HAS_LOCATION (pred) ? EXPR_LOCATION (pred) : locus;
3485 expr = build3 (COND_EXPR, void_type_node, TREE_OPERAND (pred, 0),
3486 shortcut_cond_r (TREE_OPERAND (pred, 1), true_label_p,
3487 false_label_p, locus),
3488 shortcut_cond_r (TREE_OPERAND (pred, 2), true_label_p,
3489 false_label_p, new_locus));
3490 }
3491 else
3492 {
3493 expr = build3 (COND_EXPR, void_type_node, pred,
3494 build_and_jump (true_label_p),
3495 build_and_jump (false_label_p));
3496 SET_EXPR_LOCATION (expr, locus);
3497 }
3498
3499 if (local_label)
3500 {
3501 t = build1 (LABEL_EXPR, void_type_node, local_label);
3502 append_to_statement_list (t, &expr);
3503 }
3504
3505 return expr;
3506 }
3507
3508 /* Given a conditional expression EXPR with short-circuit boolean
3509 predicates using TRUTH_ANDIF_EXPR or TRUTH_ORIF_EXPR, break the
3510 predicate apart into the equivalent sequence of conditionals. */
3511
3512 static tree
3513 shortcut_cond_expr (tree expr)
3514 {
3515 tree pred = TREE_OPERAND (expr, 0);
3516 tree then_ = TREE_OPERAND (expr, 1);
3517 tree else_ = TREE_OPERAND (expr, 2);
3518 tree true_label, false_label, end_label, t;
3519 tree *true_label_p;
3520 tree *false_label_p;
3521 bool emit_end, emit_false, jump_over_else;
3522 bool then_se = then_ && TREE_SIDE_EFFECTS (then_);
3523 bool else_se = else_ && TREE_SIDE_EFFECTS (else_);
3524
3525 /* First do simple transformations. */
3526 if (!else_se)
3527 {
3528 /* If there is no 'else', turn
3529 if (a && b) then c
3530 into
3531 if (a) if (b) then c. */
3532 while (TREE_CODE (pred) == TRUTH_ANDIF_EXPR)
3533 {
3534 /* Keep the original source location on the first 'if'. */
3535 location_t locus = EXPR_LOC_OR_LOC (expr, input_location);
3536 TREE_OPERAND (expr, 0) = TREE_OPERAND (pred, 1);
3537 /* Set the source location of the && on the second 'if'. */
3538 if (EXPR_HAS_LOCATION (pred))
3539 SET_EXPR_LOCATION (expr, EXPR_LOCATION (pred));
3540 then_ = shortcut_cond_expr (expr);
3541 then_se = then_ && TREE_SIDE_EFFECTS (then_);
3542 pred = TREE_OPERAND (pred, 0);
3543 expr = build3 (COND_EXPR, void_type_node, pred, then_, NULL_TREE);
3544 SET_EXPR_LOCATION (expr, locus);
3545 }
3546 }
3547
3548 if (!then_se)
3549 {
3550 /* If there is no 'then', turn
3551 if (a || b); else d
3552 into
3553 if (a); else if (b); else d. */
3554 while (TREE_CODE (pred) == TRUTH_ORIF_EXPR)
3555 {
3556 /* Keep the original source location on the first 'if'. */
3557 location_t locus = EXPR_LOC_OR_LOC (expr, input_location);
3558 TREE_OPERAND (expr, 0) = TREE_OPERAND (pred, 1);
3559 /* Set the source location of the || on the second 'if'. */
3560 if (EXPR_HAS_LOCATION (pred))
3561 SET_EXPR_LOCATION (expr, EXPR_LOCATION (pred));
3562 else_ = shortcut_cond_expr (expr);
3563 else_se = else_ && TREE_SIDE_EFFECTS (else_);
3564 pred = TREE_OPERAND (pred, 0);
3565 expr = build3 (COND_EXPR, void_type_node, pred, NULL_TREE, else_);
3566 SET_EXPR_LOCATION (expr, locus);
3567 }
3568 }
3569
3570 /* If we're done, great. */
3571 if (TREE_CODE (pred) != TRUTH_ANDIF_EXPR
3572 && TREE_CODE (pred) != TRUTH_ORIF_EXPR)
3573 return expr;
3574
3575 /* Otherwise we need to mess with gotos. Change
3576 if (a) c; else d;
3577 to
3578 if (a); else goto no;
3579 c; goto end;
3580 no: d; end:
3581 and recursively gimplify the condition. */
3582
3583 true_label = false_label = end_label = NULL_TREE;
3584
3585 /* If our arms just jump somewhere, hijack those labels so we don't
3586 generate jumps to jumps. */
3587
3588 if (then_
3589 && TREE_CODE (then_) == GOTO_EXPR
3590 && TREE_CODE (GOTO_DESTINATION (then_)) == LABEL_DECL)
3591 {
3592 true_label = GOTO_DESTINATION (then_);
3593 then_ = NULL;
3594 then_se = false;
3595 }
3596
3597 if (else_
3598 && TREE_CODE (else_) == GOTO_EXPR
3599 && TREE_CODE (GOTO_DESTINATION (else_)) == LABEL_DECL)
3600 {
3601 false_label = GOTO_DESTINATION (else_);
3602 else_ = NULL;
3603 else_se = false;
3604 }
3605
3606 /* If we aren't hijacking a label for the 'then' branch, it falls through. */
3607 if (true_label)
3608 true_label_p = &true_label;
3609 else
3610 true_label_p = NULL;
3611
3612 /* The 'else' branch also needs a label if it contains interesting code. */
3613 if (false_label || else_se)
3614 false_label_p = &false_label;
3615 else
3616 false_label_p = NULL;
3617
3618 /* If there was nothing else in our arms, just forward the label(s). */
3619 if (!then_se && !else_se)
3620 return shortcut_cond_r (pred, true_label_p, false_label_p,
3621 EXPR_LOC_OR_LOC (expr, input_location));
3622
3623 /* If our last subexpression already has a terminal label, reuse it. */
3624 if (else_se)
3625 t = expr_last (else_);
3626 else if (then_se)
3627 t = expr_last (then_);
3628 else
3629 t = NULL;
3630 if (t && TREE_CODE (t) == LABEL_EXPR)
3631 end_label = LABEL_EXPR_LABEL (t);
3632
3633 /* If we don't care about jumping to the 'else' branch, jump to the end
3634 if the condition is false. */
3635 if (!false_label_p)
3636 false_label_p = &end_label;
3637
3638 /* We only want to emit these labels if we aren't hijacking them. */
3639 emit_end = (end_label == NULL_TREE);
3640 emit_false = (false_label == NULL_TREE);
3641
3642 /* We only emit the jump over the else clause if we have to--if the
3643 then clause may fall through. Otherwise we can wind up with a
3644 useless jump and a useless label at the end of gimplified code,
3645 which will cause us to think that this conditional as a whole
3646 falls through even if it doesn't. If we then inline a function
3647 which ends with such a condition, that can cause us to issue an
3648 inappropriate warning about control reaching the end of a
3649 non-void function. */
3650 jump_over_else = block_may_fallthru (then_);
3651
3652 pred = shortcut_cond_r (pred, true_label_p, false_label_p,
3653 EXPR_LOC_OR_LOC (expr, input_location));
3654
3655 expr = NULL;
3656 append_to_statement_list (pred, &expr);
3657
3658 append_to_statement_list (then_, &expr);
3659 if (else_se)
3660 {
3661 if (jump_over_else)
3662 {
3663 tree last = expr_last (expr);
3664 t = build_and_jump (&end_label);
3665 if (EXPR_HAS_LOCATION (last))
3666 SET_EXPR_LOCATION (t, EXPR_LOCATION (last));
3667 append_to_statement_list (t, &expr);
3668 }
3669 if (emit_false)
3670 {
3671 t = build1 (LABEL_EXPR, void_type_node, false_label);
3672 append_to_statement_list (t, &expr);
3673 }
3674 append_to_statement_list (else_, &expr);
3675 }
3676 if (emit_end && end_label)
3677 {
3678 t = build1 (LABEL_EXPR, void_type_node, end_label);
3679 append_to_statement_list (t, &expr);
3680 }
3681
3682 return expr;
3683 }
3684
3685 /* EXPR is used in a boolean context; make sure it has BOOLEAN_TYPE. */
3686
3687 tree
3688 gimple_boolify (tree expr)
3689 {
3690 tree type = TREE_TYPE (expr);
3691 location_t loc = EXPR_LOCATION (expr);
3692
3693 if (TREE_CODE (expr) == NE_EXPR
3694 && TREE_CODE (TREE_OPERAND (expr, 0)) == CALL_EXPR
3695 && integer_zerop (TREE_OPERAND (expr, 1)))
3696 {
3697 tree call = TREE_OPERAND (expr, 0);
3698 tree fn = get_callee_fndecl (call);
3699
3700 /* For __builtin_expect ((long) (x), y) recurse into x as well
3701 if x is truth_value_p. */
3702 if (fn
3703 && DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL
3704 && DECL_FUNCTION_CODE (fn) == BUILT_IN_EXPECT
3705 && call_expr_nargs (call) == 2)
3706 {
3707 tree arg = CALL_EXPR_ARG (call, 0);
3708 if (arg)
3709 {
3710 if (TREE_CODE (arg) == NOP_EXPR
3711 && TREE_TYPE (arg) == TREE_TYPE (call))
3712 arg = TREE_OPERAND (arg, 0);
3713 if (truth_value_p (TREE_CODE (arg)))
3714 {
3715 arg = gimple_boolify (arg);
3716 CALL_EXPR_ARG (call, 0)
3717 = fold_convert_loc (loc, TREE_TYPE (call), arg);
3718 }
3719 }
3720 }
3721 }
3722
3723 switch (TREE_CODE (expr))
3724 {
3725 case TRUTH_AND_EXPR:
3726 case TRUTH_OR_EXPR:
3727 case TRUTH_XOR_EXPR:
3728 case TRUTH_ANDIF_EXPR:
3729 case TRUTH_ORIF_EXPR:
3730 /* Also boolify the arguments of truth exprs. */
3731 TREE_OPERAND (expr, 1) = gimple_boolify (TREE_OPERAND (expr, 1));
3732 /* FALLTHRU */
3733
3734 case TRUTH_NOT_EXPR:
3735 TREE_OPERAND (expr, 0) = gimple_boolify (TREE_OPERAND (expr, 0));
3736
3737 /* These expressions always produce boolean results. */
3738 if (TREE_CODE (type) != BOOLEAN_TYPE)
3739 TREE_TYPE (expr) = boolean_type_node;
3740 return expr;
3741
3742 case ANNOTATE_EXPR:
3743 switch ((enum annot_expr_kind) TREE_INT_CST_LOW (TREE_OPERAND (expr, 1)))
3744 {
3745 case annot_expr_ivdep_kind:
3746 case annot_expr_no_vector_kind:
3747 case annot_expr_vector_kind:
3748 TREE_OPERAND (expr, 0) = gimple_boolify (TREE_OPERAND (expr, 0));
3749 if (TREE_CODE (type) != BOOLEAN_TYPE)
3750 TREE_TYPE (expr) = boolean_type_node;
3751 return expr;
3752 default:
3753 gcc_unreachable ();
3754 }
3755
3756 default:
3757 if (COMPARISON_CLASS_P (expr))
3758 {
3759 /* There expressions always prduce boolean results. */
3760 if (TREE_CODE (type) != BOOLEAN_TYPE)
3761 TREE_TYPE (expr) = boolean_type_node;
3762 return expr;
3763 }
3764 /* Other expressions that get here must have boolean values, but
3765 might need to be converted to the appropriate mode. */
3766 if (TREE_CODE (type) == BOOLEAN_TYPE)
3767 return expr;
3768 return fold_convert_loc (loc, boolean_type_node, expr);
3769 }
3770 }
3771
3772 /* Given a conditional expression *EXPR_P without side effects, gimplify
3773 its operands. New statements are inserted to PRE_P. */
3774
3775 static enum gimplify_status
3776 gimplify_pure_cond_expr (tree *expr_p, gimple_seq *pre_p)
3777 {
3778 tree expr = *expr_p, cond;
3779 enum gimplify_status ret, tret;
3780 enum tree_code code;
3781
3782 cond = gimple_boolify (COND_EXPR_COND (expr));
3783
3784 /* We need to handle && and || specially, as their gimplification
3785 creates pure cond_expr, thus leading to an infinite cycle otherwise. */
3786 code = TREE_CODE (cond);
3787 if (code == TRUTH_ANDIF_EXPR)
3788 TREE_SET_CODE (cond, TRUTH_AND_EXPR);
3789 else if (code == TRUTH_ORIF_EXPR)
3790 TREE_SET_CODE (cond, TRUTH_OR_EXPR);
3791 ret = gimplify_expr (&cond, pre_p, NULL, is_gimple_condexpr, fb_rvalue);
3792 COND_EXPR_COND (*expr_p) = cond;
3793
3794 tret = gimplify_expr (&COND_EXPR_THEN (expr), pre_p, NULL,
3795 is_gimple_val, fb_rvalue);
3796 ret = MIN (ret, tret);
3797 tret = gimplify_expr (&COND_EXPR_ELSE (expr), pre_p, NULL,
3798 is_gimple_val, fb_rvalue);
3799
3800 return MIN (ret, tret);
3801 }
3802
3803 /* Return true if evaluating EXPR could trap.
3804 EXPR is GENERIC, while tree_could_trap_p can be called
3805 only on GIMPLE. */
3806
3807 static bool
3808 generic_expr_could_trap_p (tree expr)
3809 {
3810 unsigned i, n;
3811
3812 if (!expr || is_gimple_val (expr))
3813 return false;
3814
3815 if (!EXPR_P (expr) || tree_could_trap_p (expr))
3816 return true;
3817
3818 n = TREE_OPERAND_LENGTH (expr);
3819 for (i = 0; i < n; i++)
3820 if (generic_expr_could_trap_p (TREE_OPERAND (expr, i)))
3821 return true;
3822
3823 return false;
3824 }
3825
3826 /* Convert the conditional expression pointed to by EXPR_P '(p) ? a : b;'
3827 into
3828
3829 if (p) if (p)
3830 t1 = a; a;
3831 else or else
3832 t1 = b; b;
3833 t1;
3834
3835 The second form is used when *EXPR_P is of type void.
3836
3837 PRE_P points to the list where side effects that must happen before
3838 *EXPR_P should be stored. */
3839
3840 static enum gimplify_status
3841 gimplify_cond_expr (tree *expr_p, gimple_seq *pre_p, fallback_t fallback)
3842 {
3843 tree expr = *expr_p;
3844 tree type = TREE_TYPE (expr);
3845 location_t loc = EXPR_LOCATION (expr);
3846 tree tmp, arm1, arm2;
3847 enum gimplify_status ret;
3848 tree label_true, label_false, label_cont;
3849 bool have_then_clause_p, have_else_clause_p;
3850 gcond *cond_stmt;
3851 enum tree_code pred_code;
3852 gimple_seq seq = NULL;
3853
3854 /* If this COND_EXPR has a value, copy the values into a temporary within
3855 the arms. */
3856 if (!VOID_TYPE_P (type))
3857 {
3858 tree then_ = TREE_OPERAND (expr, 1), else_ = TREE_OPERAND (expr, 2);
3859 tree result;
3860
3861 /* If either an rvalue is ok or we do not require an lvalue, create the
3862 temporary. But we cannot do that if the type is addressable. */
3863 if (((fallback & fb_rvalue) || !(fallback & fb_lvalue))
3864 && !TREE_ADDRESSABLE (type))
3865 {
3866 if (gimplify_ctxp->allow_rhs_cond_expr
3867 /* If either branch has side effects or could trap, it can't be
3868 evaluated unconditionally. */
3869 && !TREE_SIDE_EFFECTS (then_)
3870 && !generic_expr_could_trap_p (then_)
3871 && !TREE_SIDE_EFFECTS (else_)
3872 && !generic_expr_could_trap_p (else_))
3873 return gimplify_pure_cond_expr (expr_p, pre_p);
3874
3875 tmp = create_tmp_var (type, "iftmp");
3876 result = tmp;
3877 }
3878
3879 /* Otherwise, only create and copy references to the values. */
3880 else
3881 {
3882 type = build_pointer_type (type);
3883
3884 if (!VOID_TYPE_P (TREE_TYPE (then_)))
3885 then_ = build_fold_addr_expr_loc (loc, then_);
3886
3887 if (!VOID_TYPE_P (TREE_TYPE (else_)))
3888 else_ = build_fold_addr_expr_loc (loc, else_);
3889
3890 expr
3891 = build3 (COND_EXPR, type, TREE_OPERAND (expr, 0), then_, else_);
3892
3893 tmp = create_tmp_var (type, "iftmp");
3894 result = build_simple_mem_ref_loc (loc, tmp);
3895 }
3896
3897 /* Build the new then clause, `tmp = then_;'. But don't build the
3898 assignment if the value is void; in C++ it can be if it's a throw. */
3899 if (!VOID_TYPE_P (TREE_TYPE (then_)))
3900 TREE_OPERAND (expr, 1) = build2 (MODIFY_EXPR, type, tmp, then_);
3901
3902 /* Similarly, build the new else clause, `tmp = else_;'. */
3903 if (!VOID_TYPE_P (TREE_TYPE (else_)))
3904 TREE_OPERAND (expr, 2) = build2 (MODIFY_EXPR, type, tmp, else_);
3905
3906 TREE_TYPE (expr) = void_type_node;
3907 recalculate_side_effects (expr);
3908
3909 /* Move the COND_EXPR to the prequeue. */
3910 gimplify_stmt (&expr, pre_p);
3911
3912 *expr_p = result;
3913 return GS_ALL_DONE;
3914 }
3915
3916 /* Remove any COMPOUND_EXPR so the following cases will be caught. */
3917 STRIP_TYPE_NOPS (TREE_OPERAND (expr, 0));
3918 if (TREE_CODE (TREE_OPERAND (expr, 0)) == COMPOUND_EXPR)
3919 gimplify_compound_expr (&TREE_OPERAND (expr, 0), pre_p, true);
3920
3921 /* Make sure the condition has BOOLEAN_TYPE. */
3922 TREE_OPERAND (expr, 0) = gimple_boolify (TREE_OPERAND (expr, 0));
3923
3924 /* Break apart && and || conditions. */
3925 if (TREE_CODE (TREE_OPERAND (expr, 0)) == TRUTH_ANDIF_EXPR
3926 || TREE_CODE (TREE_OPERAND (expr, 0)) == TRUTH_ORIF_EXPR)
3927 {
3928 expr = shortcut_cond_expr (expr);
3929
3930 if (expr != *expr_p)
3931 {
3932 *expr_p = expr;
3933
3934 /* We can't rely on gimplify_expr to re-gimplify the expanded
3935 form properly, as cleanups might cause the target labels to be
3936 wrapped in a TRY_FINALLY_EXPR. To prevent that, we need to
3937 set up a conditional context. */
3938 gimple_push_condition ();
3939 gimplify_stmt (expr_p, &seq);
3940 gimple_pop_condition (pre_p);
3941 gimple_seq_add_seq (pre_p, seq);
3942
3943 return GS_ALL_DONE;
3944 }
3945 }
3946
3947 /* Now do the normal gimplification. */
3948
3949 /* Gimplify condition. */
3950 ret = gimplify_expr (&TREE_OPERAND (expr, 0), pre_p, NULL, is_gimple_condexpr,
3951 fb_rvalue);
3952 if (ret == GS_ERROR)
3953 return GS_ERROR;
3954 gcc_assert (TREE_OPERAND (expr, 0) != NULL_TREE);
3955
3956 gimple_push_condition ();
3957
3958 have_then_clause_p = have_else_clause_p = false;
3959 if (TREE_OPERAND (expr, 1) != NULL
3960 && TREE_CODE (TREE_OPERAND (expr, 1)) == GOTO_EXPR
3961 && TREE_CODE (GOTO_DESTINATION (TREE_OPERAND (expr, 1))) == LABEL_DECL
3962 && (DECL_CONTEXT (GOTO_DESTINATION (TREE_OPERAND (expr, 1)))
3963 == current_function_decl)
3964 /* For -O0 avoid this optimization if the COND_EXPR and GOTO_EXPR
3965 have different locations, otherwise we end up with incorrect
3966 location information on the branches. */
3967 && (optimize
3968 || !EXPR_HAS_LOCATION (expr)
3969 || !EXPR_HAS_LOCATION (TREE_OPERAND (expr, 1))
3970 || EXPR_LOCATION (expr) == EXPR_LOCATION (TREE_OPERAND (expr, 1))))
3971 {
3972 label_true = GOTO_DESTINATION (TREE_OPERAND (expr, 1));
3973 have_then_clause_p = true;
3974 }
3975 else
3976 label_true = create_artificial_label (UNKNOWN_LOCATION);
3977 if (TREE_OPERAND (expr, 2) != NULL
3978 && TREE_CODE (TREE_OPERAND (expr, 2)) == GOTO_EXPR
3979 && TREE_CODE (GOTO_DESTINATION (TREE_OPERAND (expr, 2))) == LABEL_DECL
3980 && (DECL_CONTEXT (GOTO_DESTINATION (TREE_OPERAND (expr, 2)))
3981 == current_function_decl)
3982 /* For -O0 avoid this optimization if the COND_EXPR and GOTO_EXPR
3983 have different locations, otherwise we end up with incorrect
3984 location information on the branches. */
3985 && (optimize
3986 || !EXPR_HAS_LOCATION (expr)
3987 || !EXPR_HAS_LOCATION (TREE_OPERAND (expr, 2))
3988 || EXPR_LOCATION (expr) == EXPR_LOCATION (TREE_OPERAND (expr, 2))))
3989 {
3990 label_false = GOTO_DESTINATION (TREE_OPERAND (expr, 2));
3991 have_else_clause_p = true;
3992 }
3993 else
3994 label_false = create_artificial_label (UNKNOWN_LOCATION);
3995
3996 gimple_cond_get_ops_from_tree (COND_EXPR_COND (expr), &pred_code, &arm1,
3997 &arm2);
3998 cond_stmt = gimple_build_cond (pred_code, arm1, arm2, label_true,
3999 label_false);
4000 gimple_set_no_warning (cond_stmt, TREE_NO_WARNING (COND_EXPR_COND (expr)));
4001 gimplify_seq_add_stmt (&seq, cond_stmt);
4002 gimple_stmt_iterator gsi = gsi_last (seq);
4003 maybe_fold_stmt (&gsi);
4004
4005 label_cont = NULL_TREE;
4006 if (!have_then_clause_p)
4007 {
4008 /* For if (...) {} else { code; } put label_true after
4009 the else block. */
4010 if (TREE_OPERAND (expr, 1) == NULL_TREE
4011 && !have_else_clause_p
4012 && TREE_OPERAND (expr, 2) != NULL_TREE)
4013 label_cont = label_true;
4014 else
4015 {
4016 gimplify_seq_add_stmt (&seq, gimple_build_label (label_true));
4017 have_then_clause_p = gimplify_stmt (&TREE_OPERAND (expr, 1), &seq);
4018 /* For if (...) { code; } else {} or
4019 if (...) { code; } else goto label; or
4020 if (...) { code; return; } else { ... }
4021 label_cont isn't needed. */
4022 if (!have_else_clause_p
4023 && TREE_OPERAND (expr, 2) != NULL_TREE
4024 && gimple_seq_may_fallthru (seq))
4025 {
4026 gimple *g;
4027 label_cont = create_artificial_label (UNKNOWN_LOCATION);
4028
4029 g = gimple_build_goto (label_cont);
4030
4031 /* GIMPLE_COND's are very low level; they have embedded
4032 gotos. This particular embedded goto should not be marked
4033 with the location of the original COND_EXPR, as it would
4034 correspond to the COND_EXPR's condition, not the ELSE or the
4035 THEN arms. To avoid marking it with the wrong location, flag
4036 it as "no location". */
4037 gimple_set_do_not_emit_location (g);
4038
4039 gimplify_seq_add_stmt (&seq, g);
4040 }
4041 }
4042 }
4043 if (!have_else_clause_p)
4044 {
4045 gimplify_seq_add_stmt (&seq, gimple_build_label (label_false));
4046 have_else_clause_p = gimplify_stmt (&TREE_OPERAND (expr, 2), &seq);
4047 }
4048 if (label_cont)
4049 gimplify_seq_add_stmt (&seq, gimple_build_label (label_cont));
4050
4051 gimple_pop_condition (pre_p);
4052 gimple_seq_add_seq (pre_p, seq);
4053
4054 if (ret == GS_ERROR)
4055 ; /* Do nothing. */
4056 else if (have_then_clause_p || have_else_clause_p)
4057 ret = GS_ALL_DONE;
4058 else
4059 {
4060 /* Both arms are empty; replace the COND_EXPR with its predicate. */
4061 expr = TREE_OPERAND (expr, 0);
4062 gimplify_stmt (&expr, pre_p);
4063 }
4064
4065 *expr_p = NULL;
4066 return ret;
4067 }
4068
4069 /* Prepare the node pointed to by EXPR_P, an is_gimple_addressable expression,
4070 to be marked addressable.
4071
4072 We cannot rely on such an expression being directly markable if a temporary
4073 has been created by the gimplification. In this case, we create another
4074 temporary and initialize it with a copy, which will become a store after we
4075 mark it addressable. This can happen if the front-end passed us something
4076 that it could not mark addressable yet, like a Fortran pass-by-reference
4077 parameter (int) floatvar. */
4078
4079 static void
4080 prepare_gimple_addressable (tree *expr_p, gimple_seq *seq_p)
4081 {
4082 while (handled_component_p (*expr_p))
4083 expr_p = &TREE_OPERAND (*expr_p, 0);
4084 if (is_gimple_reg (*expr_p))
4085 {
4086 /* Do not allow an SSA name as the temporary. */
4087 tree var = get_initialized_tmp_var (*expr_p, seq_p, NULL, false);
4088 DECL_GIMPLE_REG_P (var) = 0;
4089 *expr_p = var;
4090 }
4091 }
4092
4093 /* A subroutine of gimplify_modify_expr. Replace a MODIFY_EXPR with
4094 a call to __builtin_memcpy. */
4095
4096 static enum gimplify_status
4097 gimplify_modify_expr_to_memcpy (tree *expr_p, tree size, bool want_value,
4098 gimple_seq *seq_p)
4099 {
4100 tree t, to, to_ptr, from, from_ptr;
4101 gcall *gs;
4102 location_t loc = EXPR_LOCATION (*expr_p);
4103
4104 to = TREE_OPERAND (*expr_p, 0);
4105 from = TREE_OPERAND (*expr_p, 1);
4106
4107 /* Mark the RHS addressable. Beware that it may not be possible to do so
4108 directly if a temporary has been created by the gimplification. */
4109 prepare_gimple_addressable (&from, seq_p);
4110
4111 mark_addressable (from);
4112 from_ptr = build_fold_addr_expr_loc (loc, from);
4113 gimplify_arg (&from_ptr, seq_p, loc);
4114
4115 mark_addressable (to);
4116 to_ptr = build_fold_addr_expr_loc (loc, to);
4117 gimplify_arg (&to_ptr, seq_p, loc);
4118
4119 t = builtin_decl_implicit (BUILT_IN_MEMCPY);
4120
4121 gs = gimple_build_call (t, 3, to_ptr, from_ptr, size);
4122
4123 if (want_value)
4124 {
4125 /* tmp = memcpy() */
4126 t = create_tmp_var (TREE_TYPE (to_ptr));
4127 gimple_call_set_lhs (gs, t);
4128 gimplify_seq_add_stmt (seq_p, gs);
4129
4130 *expr_p = build_simple_mem_ref (t);
4131 return GS_ALL_DONE;
4132 }
4133
4134 gimplify_seq_add_stmt (seq_p, gs);
4135 *expr_p = NULL;
4136 return GS_ALL_DONE;
4137 }
4138
4139 /* A subroutine of gimplify_modify_expr. Replace a MODIFY_EXPR with
4140 a call to __builtin_memset. In this case we know that the RHS is
4141 a CONSTRUCTOR with an empty element list. */
4142
4143 static enum gimplify_status
4144 gimplify_modify_expr_to_memset (tree *expr_p, tree size, bool want_value,
4145 gimple_seq *seq_p)
4146 {
4147 tree t, from, to, to_ptr;
4148 gcall *gs;
4149 location_t loc = EXPR_LOCATION (*expr_p);
4150
4151 /* Assert our assumptions, to abort instead of producing wrong code
4152 silently if they are not met. Beware that the RHS CONSTRUCTOR might
4153 not be immediately exposed. */
4154 from = TREE_OPERAND (*expr_p, 1);
4155 if (TREE_CODE (from) == WITH_SIZE_EXPR)
4156 from = TREE_OPERAND (from, 0);
4157
4158 gcc_assert (TREE_CODE (from) == CONSTRUCTOR
4159 && vec_safe_is_empty (CONSTRUCTOR_ELTS (from)));
4160
4161 /* Now proceed. */
4162 to = TREE_OPERAND (*expr_p, 0);
4163
4164 to_ptr = build_fold_addr_expr_loc (loc, to);
4165 gimplify_arg (&to_ptr, seq_p, loc);
4166 t = builtin_decl_implicit (BUILT_IN_MEMSET);
4167
4168 gs = gimple_build_call (t, 3, to_ptr, integer_zero_node, size);
4169
4170 if (want_value)
4171 {
4172 /* tmp = memset() */
4173 t = create_tmp_var (TREE_TYPE (to_ptr));
4174 gimple_call_set_lhs (gs, t);
4175 gimplify_seq_add_stmt (seq_p, gs);
4176
4177 *expr_p = build1 (INDIRECT_REF, TREE_TYPE (to), t);
4178 return GS_ALL_DONE;
4179 }
4180
4181 gimplify_seq_add_stmt (seq_p, gs);
4182 *expr_p = NULL;
4183 return GS_ALL_DONE;
4184 }
4185
4186 /* A subroutine of gimplify_init_ctor_preeval. Called via walk_tree,
4187 determine, cautiously, if a CONSTRUCTOR overlaps the lhs of an
4188 assignment. Return non-null if we detect a potential overlap. */
4189
4190 struct gimplify_init_ctor_preeval_data
4191 {
4192 /* The base decl of the lhs object. May be NULL, in which case we
4193 have to assume the lhs is indirect. */
4194 tree lhs_base_decl;
4195
4196 /* The alias set of the lhs object. */
4197 alias_set_type lhs_alias_set;
4198 };
4199
4200 static tree
4201 gimplify_init_ctor_preeval_1 (tree *tp, int *walk_subtrees, void *xdata)
4202 {
4203 struct gimplify_init_ctor_preeval_data *data
4204 = (struct gimplify_init_ctor_preeval_data *) xdata;
4205 tree t = *tp;
4206
4207 /* If we find the base object, obviously we have overlap. */
4208 if (data->lhs_base_decl == t)
4209 return t;
4210
4211 /* If the constructor component is indirect, determine if we have a
4212 potential overlap with the lhs. The only bits of information we
4213 have to go on at this point are addressability and alias sets. */
4214 if ((INDIRECT_REF_P (t)
4215 || TREE_CODE (t) == MEM_REF)
4216 && (!data->lhs_base_decl || TREE_ADDRESSABLE (data->lhs_base_decl))
4217 && alias_sets_conflict_p (data->lhs_alias_set, get_alias_set (t)))
4218 return t;
4219
4220 /* If the constructor component is a call, determine if it can hide a
4221 potential overlap with the lhs through an INDIRECT_REF like above.
4222 ??? Ugh - this is completely broken. In fact this whole analysis
4223 doesn't look conservative. */
4224 if (TREE_CODE (t) == CALL_EXPR)
4225 {
4226 tree type, fntype = TREE_TYPE (TREE_TYPE (CALL_EXPR_FN (t)));
4227
4228 for (type = TYPE_ARG_TYPES (fntype); type; type = TREE_CHAIN (type))
4229 if (POINTER_TYPE_P (TREE_VALUE (type))
4230 && (!data->lhs_base_decl || TREE_ADDRESSABLE (data->lhs_base_decl))
4231 && alias_sets_conflict_p (data->lhs_alias_set,
4232 get_alias_set
4233 (TREE_TYPE (TREE_VALUE (type)))))
4234 return t;
4235 }
4236
4237 if (IS_TYPE_OR_DECL_P (t))
4238 *walk_subtrees = 0;
4239 return NULL;
4240 }
4241
4242 /* A subroutine of gimplify_init_constructor. Pre-evaluate EXPR,
4243 force values that overlap with the lhs (as described by *DATA)
4244 into temporaries. */
4245
4246 static void
4247 gimplify_init_ctor_preeval (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
4248 struct gimplify_init_ctor_preeval_data *data)
4249 {
4250 enum gimplify_status one;
4251
4252 /* If the value is constant, then there's nothing to pre-evaluate. */
4253 if (TREE_CONSTANT (*expr_p))
4254 {
4255 /* Ensure it does not have side effects, it might contain a reference to
4256 the object we're initializing. */
4257 gcc_assert (!TREE_SIDE_EFFECTS (*expr_p));
4258 return;
4259 }
4260
4261 /* If the type has non-trivial constructors, we can't pre-evaluate. */
4262 if (TREE_ADDRESSABLE (TREE_TYPE (*expr_p)))
4263 return;
4264
4265 /* Recurse for nested constructors. */
4266 if (TREE_CODE (*expr_p) == CONSTRUCTOR)
4267 {
4268 unsigned HOST_WIDE_INT ix;
4269 constructor_elt *ce;
4270 vec<constructor_elt, va_gc> *v = CONSTRUCTOR_ELTS (*expr_p);
4271
4272 FOR_EACH_VEC_SAFE_ELT (v, ix, ce)
4273 gimplify_init_ctor_preeval (&ce->value, pre_p, post_p, data);
4274
4275 return;
4276 }
4277
4278 /* If this is a variable sized type, we must remember the size. */
4279 maybe_with_size_expr (expr_p);
4280
4281 /* Gimplify the constructor element to something appropriate for the rhs
4282 of a MODIFY_EXPR. Given that we know the LHS is an aggregate, we know
4283 the gimplifier will consider this a store to memory. Doing this
4284 gimplification now means that we won't have to deal with complicated
4285 language-specific trees, nor trees like SAVE_EXPR that can induce
4286 exponential search behavior. */
4287 one = gimplify_expr (expr_p, pre_p, post_p, is_gimple_mem_rhs, fb_rvalue);
4288 if (one == GS_ERROR)
4289 {
4290 *expr_p = NULL;
4291 return;
4292 }
4293
4294 /* If we gimplified to a bare decl, we can be sure that it doesn't overlap
4295 with the lhs, since "a = { .x=a }" doesn't make sense. This will
4296 always be true for all scalars, since is_gimple_mem_rhs insists on a
4297 temporary variable for them. */
4298 if (DECL_P (*expr_p))
4299 return;
4300
4301 /* If this is of variable size, we have no choice but to assume it doesn't
4302 overlap since we can't make a temporary for it. */
4303 if (TREE_CODE (TYPE_SIZE (TREE_TYPE (*expr_p))) != INTEGER_CST)
4304 return;
4305
4306 /* Otherwise, we must search for overlap ... */
4307 if (!walk_tree (expr_p, gimplify_init_ctor_preeval_1, data, NULL))
4308 return;
4309
4310 /* ... and if found, force the value into a temporary. */
4311 *expr_p = get_formal_tmp_var (*expr_p, pre_p);
4312 }
4313
4314 /* A subroutine of gimplify_init_ctor_eval. Create a loop for
4315 a RANGE_EXPR in a CONSTRUCTOR for an array.
4316
4317 var = lower;
4318 loop_entry:
4319 object[var] = value;
4320 if (var == upper)
4321 goto loop_exit;
4322 var = var + 1;
4323 goto loop_entry;
4324 loop_exit:
4325
4326 We increment var _after_ the loop exit check because we might otherwise
4327 fail if upper == TYPE_MAX_VALUE (type for upper).
4328
4329 Note that we never have to deal with SAVE_EXPRs here, because this has
4330 already been taken care of for us, in gimplify_init_ctor_preeval(). */
4331
4332 static void gimplify_init_ctor_eval (tree, vec<constructor_elt, va_gc> *,
4333 gimple_seq *, bool);
4334
4335 static void
4336 gimplify_init_ctor_eval_range (tree object, tree lower, tree upper,
4337 tree value, tree array_elt_type,
4338 gimple_seq *pre_p, bool cleared)
4339 {
4340 tree loop_entry_label, loop_exit_label, fall_thru_label;
4341 tree var, var_type, cref, tmp;
4342
4343 loop_entry_label = create_artificial_label (UNKNOWN_LOCATION);
4344 loop_exit_label = create_artificial_label (UNKNOWN_LOCATION);
4345 fall_thru_label = create_artificial_label (UNKNOWN_LOCATION);
4346
4347 /* Create and initialize the index variable. */
4348 var_type = TREE_TYPE (upper);
4349 var = create_tmp_var (var_type);
4350 gimplify_seq_add_stmt (pre_p, gimple_build_assign (var, lower));
4351
4352 /* Add the loop entry label. */
4353 gimplify_seq_add_stmt (pre_p, gimple_build_label (loop_entry_label));
4354
4355 /* Build the reference. */
4356 cref = build4 (ARRAY_REF, array_elt_type, unshare_expr (object),
4357 var, NULL_TREE, NULL_TREE);
4358
4359 /* If we are a constructor, just call gimplify_init_ctor_eval to do
4360 the store. Otherwise just assign value to the reference. */
4361
4362 if (TREE_CODE (value) == CONSTRUCTOR)
4363 /* NB we might have to call ourself recursively through
4364 gimplify_init_ctor_eval if the value is a constructor. */
4365 gimplify_init_ctor_eval (cref, CONSTRUCTOR_ELTS (value),
4366 pre_p, cleared);
4367 else
4368 gimplify_seq_add_stmt (pre_p, gimple_build_assign (cref, value));
4369
4370 /* We exit the loop when the index var is equal to the upper bound. */
4371 gimplify_seq_add_stmt (pre_p,
4372 gimple_build_cond (EQ_EXPR, var, upper,
4373 loop_exit_label, fall_thru_label));
4374
4375 gimplify_seq_add_stmt (pre_p, gimple_build_label (fall_thru_label));
4376
4377 /* Otherwise, increment the index var... */
4378 tmp = build2 (PLUS_EXPR, var_type, var,
4379 fold_convert (var_type, integer_one_node));
4380 gimplify_seq_add_stmt (pre_p, gimple_build_assign (var, tmp));
4381
4382 /* ...and jump back to the loop entry. */
4383 gimplify_seq_add_stmt (pre_p, gimple_build_goto (loop_entry_label));
4384
4385 /* Add the loop exit label. */
4386 gimplify_seq_add_stmt (pre_p, gimple_build_label (loop_exit_label));
4387 }
4388
4389 /* Return true if FDECL is accessing a field that is zero sized. */
4390
4391 static bool
4392 zero_sized_field_decl (const_tree fdecl)
4393 {
4394 if (TREE_CODE (fdecl) == FIELD_DECL && DECL_SIZE (fdecl)
4395 && integer_zerop (DECL_SIZE (fdecl)))
4396 return true;
4397 return false;
4398 }
4399
4400 /* Return true if TYPE is zero sized. */
4401
4402 static bool
4403 zero_sized_type (const_tree type)
4404 {
4405 if (AGGREGATE_TYPE_P (type) && TYPE_SIZE (type)
4406 && integer_zerop (TYPE_SIZE (type)))
4407 return true;
4408 return false;
4409 }
4410
4411 /* A subroutine of gimplify_init_constructor. Generate individual
4412 MODIFY_EXPRs for a CONSTRUCTOR. OBJECT is the LHS against which the
4413 assignments should happen. ELTS is the CONSTRUCTOR_ELTS of the
4414 CONSTRUCTOR. CLEARED is true if the entire LHS object has been
4415 zeroed first. */
4416
4417 static void
4418 gimplify_init_ctor_eval (tree object, vec<constructor_elt, va_gc> *elts,
4419 gimple_seq *pre_p, bool cleared)
4420 {
4421 tree array_elt_type = NULL;
4422 unsigned HOST_WIDE_INT ix;
4423 tree purpose, value;
4424
4425 if (TREE_CODE (TREE_TYPE (object)) == ARRAY_TYPE)
4426 array_elt_type = TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (object)));
4427
4428 FOR_EACH_CONSTRUCTOR_ELT (elts, ix, purpose, value)
4429 {
4430 tree cref;
4431
4432 /* NULL values are created above for gimplification errors. */
4433 if (value == NULL)
4434 continue;
4435
4436 if (cleared && initializer_zerop (value))
4437 continue;
4438
4439 /* ??? Here's to hoping the front end fills in all of the indices,
4440 so we don't have to figure out what's missing ourselves. */
4441 gcc_assert (purpose);
4442
4443 /* Skip zero-sized fields, unless value has side-effects. This can
4444 happen with calls to functions returning a zero-sized type, which
4445 we shouldn't discard. As a number of downstream passes don't
4446 expect sets of zero-sized fields, we rely on the gimplification of
4447 the MODIFY_EXPR we make below to drop the assignment statement. */
4448 if (! TREE_SIDE_EFFECTS (value) && zero_sized_field_decl (purpose))
4449 continue;
4450
4451 /* If we have a RANGE_EXPR, we have to build a loop to assign the
4452 whole range. */
4453 if (TREE_CODE (purpose) == RANGE_EXPR)
4454 {
4455 tree lower = TREE_OPERAND (purpose, 0);
4456 tree upper = TREE_OPERAND (purpose, 1);
4457
4458 /* If the lower bound is equal to upper, just treat it as if
4459 upper was the index. */
4460 if (simple_cst_equal (lower, upper))
4461 purpose = upper;
4462 else
4463 {
4464 gimplify_init_ctor_eval_range (object, lower, upper, value,
4465 array_elt_type, pre_p, cleared);
4466 continue;
4467 }
4468 }
4469
4470 if (array_elt_type)
4471 {
4472 /* Do not use bitsizetype for ARRAY_REF indices. */
4473 if (TYPE_DOMAIN (TREE_TYPE (object)))
4474 purpose
4475 = fold_convert (TREE_TYPE (TYPE_DOMAIN (TREE_TYPE (object))),
4476 purpose);
4477 cref = build4 (ARRAY_REF, array_elt_type, unshare_expr (object),
4478 purpose, NULL_TREE, NULL_TREE);
4479 }
4480 else
4481 {
4482 gcc_assert (TREE_CODE (purpose) == FIELD_DECL);
4483 cref = build3 (COMPONENT_REF, TREE_TYPE (purpose),
4484 unshare_expr (object), purpose, NULL_TREE);
4485 }
4486
4487 if (TREE_CODE (value) == CONSTRUCTOR
4488 && TREE_CODE (TREE_TYPE (value)) != VECTOR_TYPE)
4489 gimplify_init_ctor_eval (cref, CONSTRUCTOR_ELTS (value),
4490 pre_p, cleared);
4491 else
4492 {
4493 tree init = build2 (INIT_EXPR, TREE_TYPE (cref), cref, value);
4494 gimplify_and_add (init, pre_p);
4495 ggc_free (init);
4496 }
4497 }
4498 }
4499
4500 /* Return the appropriate RHS predicate for this LHS. */
4501
4502 gimple_predicate
4503 rhs_predicate_for (tree lhs)
4504 {
4505 if (is_gimple_reg (lhs))
4506 return is_gimple_reg_rhs_or_call;
4507 else
4508 return is_gimple_mem_rhs_or_call;
4509 }
4510
4511 /* Return the initial guess for an appropriate RHS predicate for this LHS,
4512 before the LHS has been gimplified. */
4513
4514 static gimple_predicate
4515 initial_rhs_predicate_for (tree lhs)
4516 {
4517 if (is_gimple_reg_type (TREE_TYPE (lhs)))
4518 return is_gimple_reg_rhs_or_call;
4519 else
4520 return is_gimple_mem_rhs_or_call;
4521 }
4522
4523 /* Gimplify a C99 compound literal expression. This just means adding
4524 the DECL_EXPR before the current statement and using its anonymous
4525 decl instead. */
4526
4527 static enum gimplify_status
4528 gimplify_compound_literal_expr (tree *expr_p, gimple_seq *pre_p,
4529 bool (*gimple_test_f) (tree),
4530 fallback_t fallback)
4531 {
4532 tree decl_s = COMPOUND_LITERAL_EXPR_DECL_EXPR (*expr_p);
4533 tree decl = DECL_EXPR_DECL (decl_s);
4534 tree init = DECL_INITIAL (decl);
4535 /* Mark the decl as addressable if the compound literal
4536 expression is addressable now, otherwise it is marked too late
4537 after we gimplify the initialization expression. */
4538 if (TREE_ADDRESSABLE (*expr_p))
4539 TREE_ADDRESSABLE (decl) = 1;
4540 /* Otherwise, if we don't need an lvalue and have a literal directly
4541 substitute it. Check if it matches the gimple predicate, as
4542 otherwise we'd generate a new temporary, and we can as well just
4543 use the decl we already have. */
4544 else if (!TREE_ADDRESSABLE (decl)
4545 && init
4546 && (fallback & fb_lvalue) == 0
4547 && gimple_test_f (init))
4548 {
4549 *expr_p = init;
4550 return GS_OK;
4551 }
4552
4553 /* Preliminarily mark non-addressed complex variables as eligible
4554 for promotion to gimple registers. We'll transform their uses
4555 as we find them. */
4556 if ((TREE_CODE (TREE_TYPE (decl)) == COMPLEX_TYPE
4557 || TREE_CODE (TREE_TYPE (decl)) == VECTOR_TYPE)
4558 && !TREE_THIS_VOLATILE (decl)
4559 && !needs_to_live_in_memory (decl))
4560 DECL_GIMPLE_REG_P (decl) = 1;
4561
4562 /* If the decl is not addressable, then it is being used in some
4563 expression or on the right hand side of a statement, and it can
4564 be put into a readonly data section. */
4565 if (!TREE_ADDRESSABLE (decl) && (fallback & fb_lvalue) == 0)
4566 TREE_READONLY (decl) = 1;
4567
4568 /* This decl isn't mentioned in the enclosing block, so add it to the
4569 list of temps. FIXME it seems a bit of a kludge to say that
4570 anonymous artificial vars aren't pushed, but everything else is. */
4571 if (DECL_NAME (decl) == NULL_TREE && !DECL_SEEN_IN_BIND_EXPR_P (decl))
4572 gimple_add_tmp_var (decl);
4573
4574 gimplify_and_add (decl_s, pre_p);
4575 *expr_p = decl;
4576 return GS_OK;
4577 }
4578
4579 /* Optimize embedded COMPOUND_LITERAL_EXPRs within a CONSTRUCTOR,
4580 return a new CONSTRUCTOR if something changed. */
4581
4582 static tree
4583 optimize_compound_literals_in_ctor (tree orig_ctor)
4584 {
4585 tree ctor = orig_ctor;
4586 vec<constructor_elt, va_gc> *elts = CONSTRUCTOR_ELTS (ctor);
4587 unsigned int idx, num = vec_safe_length (elts);
4588
4589 for (idx = 0; idx < num; idx++)
4590 {
4591 tree value = (*elts)[idx].value;
4592 tree newval = value;
4593 if (TREE_CODE (value) == CONSTRUCTOR)
4594 newval = optimize_compound_literals_in_ctor (value);
4595 else if (TREE_CODE (value) == COMPOUND_LITERAL_EXPR)
4596 {
4597 tree decl_s = COMPOUND_LITERAL_EXPR_DECL_EXPR (value);
4598 tree decl = DECL_EXPR_DECL (decl_s);
4599 tree init = DECL_INITIAL (decl);
4600
4601 if (!TREE_ADDRESSABLE (value)
4602 && !TREE_ADDRESSABLE (decl)
4603 && init
4604 && TREE_CODE (init) == CONSTRUCTOR)
4605 newval = optimize_compound_literals_in_ctor (init);
4606 }
4607 if (newval == value)
4608 continue;
4609
4610 if (ctor == orig_ctor)
4611 {
4612 ctor = copy_node (orig_ctor);
4613 CONSTRUCTOR_ELTS (ctor) = vec_safe_copy (elts);
4614 elts = CONSTRUCTOR_ELTS (ctor);
4615 }
4616 (*elts)[idx].value = newval;
4617 }
4618 return ctor;
4619 }
4620
4621 /* A subroutine of gimplify_modify_expr. Break out elements of a
4622 CONSTRUCTOR used as an initializer into separate MODIFY_EXPRs.
4623
4624 Note that we still need to clear any elements that don't have explicit
4625 initializers, so if not all elements are initialized we keep the
4626 original MODIFY_EXPR, we just remove all of the constructor elements.
4627
4628 If NOTIFY_TEMP_CREATION is true, do not gimplify, just return
4629 GS_ERROR if we would have to create a temporary when gimplifying
4630 this constructor. Otherwise, return GS_OK.
4631
4632 If NOTIFY_TEMP_CREATION is false, just do the gimplification. */
4633
4634 static enum gimplify_status
4635 gimplify_init_constructor (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
4636 bool want_value, bool notify_temp_creation)
4637 {
4638 tree object, ctor, type;
4639 enum gimplify_status ret;
4640 vec<constructor_elt, va_gc> *elts;
4641
4642 gcc_assert (TREE_CODE (TREE_OPERAND (*expr_p, 1)) == CONSTRUCTOR);
4643
4644 if (!notify_temp_creation)
4645 {
4646 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
4647 is_gimple_lvalue, fb_lvalue);
4648 if (ret == GS_ERROR)
4649 return ret;
4650 }
4651
4652 object = TREE_OPERAND (*expr_p, 0);
4653 ctor = TREE_OPERAND (*expr_p, 1)
4654 = optimize_compound_literals_in_ctor (TREE_OPERAND (*expr_p, 1));
4655 type = TREE_TYPE (ctor);
4656 elts = CONSTRUCTOR_ELTS (ctor);
4657 ret = GS_ALL_DONE;
4658
4659 switch (TREE_CODE (type))
4660 {
4661 case RECORD_TYPE:
4662 case UNION_TYPE:
4663 case QUAL_UNION_TYPE:
4664 case ARRAY_TYPE:
4665 {
4666 struct gimplify_init_ctor_preeval_data preeval_data;
4667 HOST_WIDE_INT num_ctor_elements, num_nonzero_elements;
4668 bool cleared, complete_p, valid_const_initializer;
4669
4670 /* Aggregate types must lower constructors to initialization of
4671 individual elements. The exception is that a CONSTRUCTOR node
4672 with no elements indicates zero-initialization of the whole. */
4673 if (vec_safe_is_empty (elts))
4674 {
4675 if (notify_temp_creation)
4676 return GS_OK;
4677 break;
4678 }
4679
4680 /* Fetch information about the constructor to direct later processing.
4681 We might want to make static versions of it in various cases, and
4682 can only do so if it known to be a valid constant initializer. */
4683 valid_const_initializer
4684 = categorize_ctor_elements (ctor, &num_nonzero_elements,
4685 &num_ctor_elements, &complete_p);
4686
4687 /* If a const aggregate variable is being initialized, then it
4688 should never be a lose to promote the variable to be static. */
4689 if (valid_const_initializer
4690 && num_nonzero_elements > 1
4691 && TREE_READONLY (object)
4692 && VAR_P (object)
4693 && (flag_merge_constants >= 2 || !TREE_ADDRESSABLE (object)))
4694 {
4695 if (notify_temp_creation)
4696 return GS_ERROR;
4697 DECL_INITIAL (object) = ctor;
4698 TREE_STATIC (object) = 1;
4699 if (!DECL_NAME (object))
4700 DECL_NAME (object) = create_tmp_var_name ("C");
4701 walk_tree (&DECL_INITIAL (object), force_labels_r, NULL, NULL);
4702
4703 /* ??? C++ doesn't automatically append a .<number> to the
4704 assembler name, and even when it does, it looks at FE private
4705 data structures to figure out what that number should be,
4706 which are not set for this variable. I suppose this is
4707 important for local statics for inline functions, which aren't
4708 "local" in the object file sense. So in order to get a unique
4709 TU-local symbol, we must invoke the lhd version now. */
4710 lhd_set_decl_assembler_name (object);
4711
4712 *expr_p = NULL_TREE;
4713 break;
4714 }
4715
4716 /* If there are "lots" of initialized elements, even discounting
4717 those that are not address constants (and thus *must* be
4718 computed at runtime), then partition the constructor into
4719 constant and non-constant parts. Block copy the constant
4720 parts in, then generate code for the non-constant parts. */
4721 /* TODO. There's code in cp/typeck.c to do this. */
4722
4723 if (int_size_in_bytes (TREE_TYPE (ctor)) < 0)
4724 /* store_constructor will ignore the clearing of variable-sized
4725 objects. Initializers for such objects must explicitly set
4726 every field that needs to be set. */
4727 cleared = false;
4728 else if (!complete_p && !CONSTRUCTOR_NO_CLEARING (ctor))
4729 /* If the constructor isn't complete, clear the whole object
4730 beforehand, unless CONSTRUCTOR_NO_CLEARING is set on it.
4731
4732 ??? This ought not to be needed. For any element not present
4733 in the initializer, we should simply set them to zero. Except
4734 we'd need to *find* the elements that are not present, and that
4735 requires trickery to avoid quadratic compile-time behavior in
4736 large cases or excessive memory use in small cases. */
4737 cleared = true;
4738 else if (num_ctor_elements - num_nonzero_elements
4739 > CLEAR_RATIO (optimize_function_for_speed_p (cfun))
4740 && num_nonzero_elements < num_ctor_elements / 4)
4741 /* If there are "lots" of zeros, it's more efficient to clear
4742 the memory and then set the nonzero elements. */
4743 cleared = true;
4744 else
4745 cleared = false;
4746
4747 /* If there are "lots" of initialized elements, and all of them
4748 are valid address constants, then the entire initializer can
4749 be dropped to memory, and then memcpy'd out. Don't do this
4750 for sparse arrays, though, as it's more efficient to follow
4751 the standard CONSTRUCTOR behavior of memset followed by
4752 individual element initialization. Also don't do this for small
4753 all-zero initializers (which aren't big enough to merit
4754 clearing), and don't try to make bitwise copies of
4755 TREE_ADDRESSABLE types.
4756
4757 We cannot apply such transformation when compiling chkp static
4758 initializer because creation of initializer image in the memory
4759 will require static initialization of bounds for it. It should
4760 result in another gimplification of similar initializer and we
4761 may fall into infinite loop. */
4762 if (valid_const_initializer
4763 && !(cleared || num_nonzero_elements == 0)
4764 && !TREE_ADDRESSABLE (type)
4765 && (!current_function_decl
4766 || !lookup_attribute ("chkp ctor",
4767 DECL_ATTRIBUTES (current_function_decl))))
4768 {
4769 HOST_WIDE_INT size = int_size_in_bytes (type);
4770 unsigned int align;
4771
4772 /* ??? We can still get unbounded array types, at least
4773 from the C++ front end. This seems wrong, but attempt
4774 to work around it for now. */
4775 if (size < 0)
4776 {
4777 size = int_size_in_bytes (TREE_TYPE (object));
4778 if (size >= 0)
4779 TREE_TYPE (ctor) = type = TREE_TYPE (object);
4780 }
4781
4782 /* Find the maximum alignment we can assume for the object. */
4783 /* ??? Make use of DECL_OFFSET_ALIGN. */
4784 if (DECL_P (object))
4785 align = DECL_ALIGN (object);
4786 else
4787 align = TYPE_ALIGN (type);
4788
4789 /* Do a block move either if the size is so small as to make
4790 each individual move a sub-unit move on average, or if it
4791 is so large as to make individual moves inefficient. */
4792 if (size > 0
4793 && num_nonzero_elements > 1
4794 && (size < num_nonzero_elements
4795 || !can_move_by_pieces (size, align)))
4796 {
4797 if (notify_temp_creation)
4798 return GS_ERROR;
4799
4800 walk_tree (&ctor, force_labels_r, NULL, NULL);
4801 ctor = tree_output_constant_def (ctor);
4802 if (!useless_type_conversion_p (type, TREE_TYPE (ctor)))
4803 ctor = build1 (VIEW_CONVERT_EXPR, type, ctor);
4804 TREE_OPERAND (*expr_p, 1) = ctor;
4805
4806 /* This is no longer an assignment of a CONSTRUCTOR, but
4807 we still may have processing to do on the LHS. So
4808 pretend we didn't do anything here to let that happen. */
4809 return GS_UNHANDLED;
4810 }
4811 }
4812
4813 /* If the target is volatile, we have non-zero elements and more than
4814 one field to assign, initialize the target from a temporary. */
4815 if (TREE_THIS_VOLATILE (object)
4816 && !TREE_ADDRESSABLE (type)
4817 && num_nonzero_elements > 0
4818 && vec_safe_length (elts) > 1)
4819 {
4820 tree temp = create_tmp_var (TYPE_MAIN_VARIANT (type));
4821 TREE_OPERAND (*expr_p, 0) = temp;
4822 *expr_p = build2 (COMPOUND_EXPR, TREE_TYPE (*expr_p),
4823 *expr_p,
4824 build2 (MODIFY_EXPR, void_type_node,
4825 object, temp));
4826 return GS_OK;
4827 }
4828
4829 if (notify_temp_creation)
4830 return GS_OK;
4831
4832 /* If there are nonzero elements and if needed, pre-evaluate to capture
4833 elements overlapping with the lhs into temporaries. We must do this
4834 before clearing to fetch the values before they are zeroed-out. */
4835 if (num_nonzero_elements > 0 && TREE_CODE (*expr_p) != INIT_EXPR)
4836 {
4837 preeval_data.lhs_base_decl = get_base_address (object);
4838 if (!DECL_P (preeval_data.lhs_base_decl))
4839 preeval_data.lhs_base_decl = NULL;
4840 preeval_data.lhs_alias_set = get_alias_set (object);
4841
4842 gimplify_init_ctor_preeval (&TREE_OPERAND (*expr_p, 1),
4843 pre_p, post_p, &preeval_data);
4844 }
4845
4846 bool ctor_has_side_effects_p
4847 = TREE_SIDE_EFFECTS (TREE_OPERAND (*expr_p, 1));
4848
4849 if (cleared)
4850 {
4851 /* Zap the CONSTRUCTOR element list, which simplifies this case.
4852 Note that we still have to gimplify, in order to handle the
4853 case of variable sized types. Avoid shared tree structures. */
4854 CONSTRUCTOR_ELTS (ctor) = NULL;
4855 TREE_SIDE_EFFECTS (ctor) = 0;
4856 object = unshare_expr (object);
4857 gimplify_stmt (expr_p, pre_p);
4858 }
4859
4860 /* If we have not block cleared the object, or if there are nonzero
4861 elements in the constructor, or if the constructor has side effects,
4862 add assignments to the individual scalar fields of the object. */
4863 if (!cleared
4864 || num_nonzero_elements > 0
4865 || ctor_has_side_effects_p)
4866 gimplify_init_ctor_eval (object, elts, pre_p, cleared);
4867
4868 *expr_p = NULL_TREE;
4869 }
4870 break;
4871
4872 case COMPLEX_TYPE:
4873 {
4874 tree r, i;
4875
4876 if (notify_temp_creation)
4877 return GS_OK;
4878
4879 /* Extract the real and imaginary parts out of the ctor. */
4880 gcc_assert (elts->length () == 2);
4881 r = (*elts)[0].value;
4882 i = (*elts)[1].value;
4883 if (r == NULL || i == NULL)
4884 {
4885 tree zero = build_zero_cst (TREE_TYPE (type));
4886 if (r == NULL)
4887 r = zero;
4888 if (i == NULL)
4889 i = zero;
4890 }
4891
4892 /* Complex types have either COMPLEX_CST or COMPLEX_EXPR to
4893 represent creation of a complex value. */
4894 if (TREE_CONSTANT (r) && TREE_CONSTANT (i))
4895 {
4896 ctor = build_complex (type, r, i);
4897 TREE_OPERAND (*expr_p, 1) = ctor;
4898 }
4899 else
4900 {
4901 ctor = build2 (COMPLEX_EXPR, type, r, i);
4902 TREE_OPERAND (*expr_p, 1) = ctor;
4903 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 1),
4904 pre_p,
4905 post_p,
4906 rhs_predicate_for (TREE_OPERAND (*expr_p, 0)),
4907 fb_rvalue);
4908 }
4909 }
4910 break;
4911
4912 case VECTOR_TYPE:
4913 {
4914 unsigned HOST_WIDE_INT ix;
4915 constructor_elt *ce;
4916
4917 if (notify_temp_creation)
4918 return GS_OK;
4919
4920 /* Go ahead and simplify constant constructors to VECTOR_CST. */
4921 if (TREE_CONSTANT (ctor))
4922 {
4923 bool constant_p = true;
4924 tree value;
4925
4926 /* Even when ctor is constant, it might contain non-*_CST
4927 elements, such as addresses or trapping values like
4928 1.0/0.0 - 1.0/0.0. Such expressions don't belong
4929 in VECTOR_CST nodes. */
4930 FOR_EACH_CONSTRUCTOR_VALUE (elts, ix, value)
4931 if (!CONSTANT_CLASS_P (value))
4932 {
4933 constant_p = false;
4934 break;
4935 }
4936
4937 if (constant_p)
4938 {
4939 TREE_OPERAND (*expr_p, 1) = build_vector_from_ctor (type, elts);
4940 break;
4941 }
4942
4943 TREE_CONSTANT (ctor) = 0;
4944 }
4945
4946 /* Vector types use CONSTRUCTOR all the way through gimple
4947 compilation as a general initializer. */
4948 FOR_EACH_VEC_SAFE_ELT (elts, ix, ce)
4949 {
4950 enum gimplify_status tret;
4951 tret = gimplify_expr (&ce->value, pre_p, post_p, is_gimple_val,
4952 fb_rvalue);
4953 if (tret == GS_ERROR)
4954 ret = GS_ERROR;
4955 else if (TREE_STATIC (ctor)
4956 && !initializer_constant_valid_p (ce->value,
4957 TREE_TYPE (ce->value)))
4958 TREE_STATIC (ctor) = 0;
4959 }
4960 if (!is_gimple_reg (TREE_OPERAND (*expr_p, 0)))
4961 TREE_OPERAND (*expr_p, 1) = get_formal_tmp_var (ctor, pre_p);
4962 }
4963 break;
4964
4965 default:
4966 /* So how did we get a CONSTRUCTOR for a scalar type? */
4967 gcc_unreachable ();
4968 }
4969
4970 if (ret == GS_ERROR)
4971 return GS_ERROR;
4972 /* If we have gimplified both sides of the initializer but have
4973 not emitted an assignment, do so now. */
4974 if (*expr_p)
4975 {
4976 tree lhs = TREE_OPERAND (*expr_p, 0);
4977 tree rhs = TREE_OPERAND (*expr_p, 1);
4978 if (want_value && object == lhs)
4979 lhs = unshare_expr (lhs);
4980 gassign *init = gimple_build_assign (lhs, rhs);
4981 gimplify_seq_add_stmt (pre_p, init);
4982 }
4983 if (want_value)
4984 {
4985 *expr_p = object;
4986 return GS_OK;
4987 }
4988 else
4989 {
4990 *expr_p = NULL;
4991 return GS_ALL_DONE;
4992 }
4993 }
4994
4995 /* Given a pointer value OP0, return a simplified version of an
4996 indirection through OP0, or NULL_TREE if no simplification is
4997 possible. This may only be applied to a rhs of an expression.
4998 Note that the resulting type may be different from the type pointed
4999 to in the sense that it is still compatible from the langhooks
5000 point of view. */
5001
5002 static tree
5003 gimple_fold_indirect_ref_rhs (tree t)
5004 {
5005 return gimple_fold_indirect_ref (t);
5006 }
5007
5008 /* Subroutine of gimplify_modify_expr to do simplifications of
5009 MODIFY_EXPRs based on the code of the RHS. We loop for as long as
5010 something changes. */
5011
5012 static enum gimplify_status
5013 gimplify_modify_expr_rhs (tree *expr_p, tree *from_p, tree *to_p,
5014 gimple_seq *pre_p, gimple_seq *post_p,
5015 bool want_value)
5016 {
5017 enum gimplify_status ret = GS_UNHANDLED;
5018 bool changed;
5019
5020 do
5021 {
5022 changed = false;
5023 switch (TREE_CODE (*from_p))
5024 {
5025 case VAR_DECL:
5026 /* If we're assigning from a read-only variable initialized with
5027 a constructor, do the direct assignment from the constructor,
5028 but only if neither source nor target are volatile since this
5029 latter assignment might end up being done on a per-field basis. */
5030 if (DECL_INITIAL (*from_p)
5031 && TREE_READONLY (*from_p)
5032 && !TREE_THIS_VOLATILE (*from_p)
5033 && !TREE_THIS_VOLATILE (*to_p)
5034 && TREE_CODE (DECL_INITIAL (*from_p)) == CONSTRUCTOR)
5035 {
5036 tree old_from = *from_p;
5037 enum gimplify_status subret;
5038
5039 /* Move the constructor into the RHS. */
5040 *from_p = unshare_expr (DECL_INITIAL (*from_p));
5041
5042 /* Let's see if gimplify_init_constructor will need to put
5043 it in memory. */
5044 subret = gimplify_init_constructor (expr_p, NULL, NULL,
5045 false, true);
5046 if (subret == GS_ERROR)
5047 {
5048 /* If so, revert the change. */
5049 *from_p = old_from;
5050 }
5051 else
5052 {
5053 ret = GS_OK;
5054 changed = true;
5055 }
5056 }
5057 break;
5058 case INDIRECT_REF:
5059 {
5060 /* If we have code like
5061
5062 *(const A*)(A*)&x
5063
5064 where the type of "x" is a (possibly cv-qualified variant
5065 of "A"), treat the entire expression as identical to "x".
5066 This kind of code arises in C++ when an object is bound
5067 to a const reference, and if "x" is a TARGET_EXPR we want
5068 to take advantage of the optimization below. */
5069 bool volatile_p = TREE_THIS_VOLATILE (*from_p);
5070 tree t = gimple_fold_indirect_ref_rhs (TREE_OPERAND (*from_p, 0));
5071 if (t)
5072 {
5073 if (TREE_THIS_VOLATILE (t) != volatile_p)
5074 {
5075 if (DECL_P (t))
5076 t = build_simple_mem_ref_loc (EXPR_LOCATION (*from_p),
5077 build_fold_addr_expr (t));
5078 if (REFERENCE_CLASS_P (t))
5079 TREE_THIS_VOLATILE (t) = volatile_p;
5080 }
5081 *from_p = t;
5082 ret = GS_OK;
5083 changed = true;
5084 }
5085 break;
5086 }
5087
5088 case TARGET_EXPR:
5089 {
5090 /* If we are initializing something from a TARGET_EXPR, strip the
5091 TARGET_EXPR and initialize it directly, if possible. This can't
5092 be done if the initializer is void, since that implies that the
5093 temporary is set in some non-trivial way.
5094
5095 ??? What about code that pulls out the temp and uses it
5096 elsewhere? I think that such code never uses the TARGET_EXPR as
5097 an initializer. If I'm wrong, we'll die because the temp won't
5098 have any RTL. In that case, I guess we'll need to replace
5099 references somehow. */
5100 tree init = TARGET_EXPR_INITIAL (*from_p);
5101
5102 if (init
5103 && !VOID_TYPE_P (TREE_TYPE (init)))
5104 {
5105 *from_p = init;
5106 ret = GS_OK;
5107 changed = true;
5108 }
5109 }
5110 break;
5111
5112 case COMPOUND_EXPR:
5113 /* Remove any COMPOUND_EXPR in the RHS so the following cases will be
5114 caught. */
5115 gimplify_compound_expr (from_p, pre_p, true);
5116 ret = GS_OK;
5117 changed = true;
5118 break;
5119
5120 case CONSTRUCTOR:
5121 /* If we already made some changes, let the front end have a
5122 crack at this before we break it down. */
5123 if (ret != GS_UNHANDLED)
5124 break;
5125 /* If we're initializing from a CONSTRUCTOR, break this into
5126 individual MODIFY_EXPRs. */
5127 return gimplify_init_constructor (expr_p, pre_p, post_p, want_value,
5128 false);
5129
5130 case COND_EXPR:
5131 /* If we're assigning to a non-register type, push the assignment
5132 down into the branches. This is mandatory for ADDRESSABLE types,
5133 since we cannot generate temporaries for such, but it saves a
5134 copy in other cases as well. */
5135 if (!is_gimple_reg_type (TREE_TYPE (*from_p)))
5136 {
5137 /* This code should mirror the code in gimplify_cond_expr. */
5138 enum tree_code code = TREE_CODE (*expr_p);
5139 tree cond = *from_p;
5140 tree result = *to_p;
5141
5142 ret = gimplify_expr (&result, pre_p, post_p,
5143 is_gimple_lvalue, fb_lvalue);
5144 if (ret != GS_ERROR)
5145 ret = GS_OK;
5146
5147 /* If we are going to write RESULT more than once, clear
5148 TREE_READONLY flag, otherwise we might incorrectly promote
5149 the variable to static const and initialize it at compile
5150 time in one of the branches. */
5151 if (VAR_P (result)
5152 && TREE_TYPE (TREE_OPERAND (cond, 1)) != void_type_node
5153 && TREE_TYPE (TREE_OPERAND (cond, 2)) != void_type_node)
5154 TREE_READONLY (result) = 0;
5155 if (TREE_TYPE (TREE_OPERAND (cond, 1)) != void_type_node)
5156 TREE_OPERAND (cond, 1)
5157 = build2 (code, void_type_node, result,
5158 TREE_OPERAND (cond, 1));
5159 if (TREE_TYPE (TREE_OPERAND (cond, 2)) != void_type_node)
5160 TREE_OPERAND (cond, 2)
5161 = build2 (code, void_type_node, unshare_expr (result),
5162 TREE_OPERAND (cond, 2));
5163
5164 TREE_TYPE (cond) = void_type_node;
5165 recalculate_side_effects (cond);
5166
5167 if (want_value)
5168 {
5169 gimplify_and_add (cond, pre_p);
5170 *expr_p = unshare_expr (result);
5171 }
5172 else
5173 *expr_p = cond;
5174 return ret;
5175 }
5176 break;
5177
5178 case CALL_EXPR:
5179 /* For calls that return in memory, give *to_p as the CALL_EXPR's
5180 return slot so that we don't generate a temporary. */
5181 if (!CALL_EXPR_RETURN_SLOT_OPT (*from_p)
5182 && aggregate_value_p (*from_p, *from_p))
5183 {
5184 bool use_target;
5185
5186 if (!(rhs_predicate_for (*to_p))(*from_p))
5187 /* If we need a temporary, *to_p isn't accurate. */
5188 use_target = false;
5189 /* It's OK to use the return slot directly unless it's an NRV. */
5190 else if (TREE_CODE (*to_p) == RESULT_DECL
5191 && DECL_NAME (*to_p) == NULL_TREE
5192 && needs_to_live_in_memory (*to_p))
5193 use_target = true;
5194 else if (is_gimple_reg_type (TREE_TYPE (*to_p))
5195 || (DECL_P (*to_p) && DECL_REGISTER (*to_p)))
5196 /* Don't force regs into memory. */
5197 use_target = false;
5198 else if (TREE_CODE (*expr_p) == INIT_EXPR)
5199 /* It's OK to use the target directly if it's being
5200 initialized. */
5201 use_target = true;
5202 else if (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (*to_p)))
5203 != INTEGER_CST)
5204 /* Always use the target and thus RSO for variable-sized types.
5205 GIMPLE cannot deal with a variable-sized assignment
5206 embedded in a call statement. */
5207 use_target = true;
5208 else if (TREE_CODE (*to_p) != SSA_NAME
5209 && (!is_gimple_variable (*to_p)
5210 || needs_to_live_in_memory (*to_p)))
5211 /* Don't use the original target if it's already addressable;
5212 if its address escapes, and the called function uses the
5213 NRV optimization, a conforming program could see *to_p
5214 change before the called function returns; see c++/19317.
5215 When optimizing, the return_slot pass marks more functions
5216 as safe after we have escape info. */
5217 use_target = false;
5218 else
5219 use_target = true;
5220
5221 if (use_target)
5222 {
5223 CALL_EXPR_RETURN_SLOT_OPT (*from_p) = 1;
5224 mark_addressable (*to_p);
5225 }
5226 }
5227 break;
5228
5229 case WITH_SIZE_EXPR:
5230 /* Likewise for calls that return an aggregate of non-constant size,
5231 since we would not be able to generate a temporary at all. */
5232 if (TREE_CODE (TREE_OPERAND (*from_p, 0)) == CALL_EXPR)
5233 {
5234 *from_p = TREE_OPERAND (*from_p, 0);
5235 /* We don't change ret in this case because the
5236 WITH_SIZE_EXPR might have been added in
5237 gimplify_modify_expr, so returning GS_OK would lead to an
5238 infinite loop. */
5239 changed = true;
5240 }
5241 break;
5242
5243 /* If we're initializing from a container, push the initialization
5244 inside it. */
5245 case CLEANUP_POINT_EXPR:
5246 case BIND_EXPR:
5247 case STATEMENT_LIST:
5248 {
5249 tree wrap = *from_p;
5250 tree t;
5251
5252 ret = gimplify_expr (to_p, pre_p, post_p, is_gimple_min_lval,
5253 fb_lvalue);
5254 if (ret != GS_ERROR)
5255 ret = GS_OK;
5256
5257 t = voidify_wrapper_expr (wrap, *expr_p);
5258 gcc_assert (t == *expr_p);
5259
5260 if (want_value)
5261 {
5262 gimplify_and_add (wrap, pre_p);
5263 *expr_p = unshare_expr (*to_p);
5264 }
5265 else
5266 *expr_p = wrap;
5267 return GS_OK;
5268 }
5269
5270 case COMPOUND_LITERAL_EXPR:
5271 {
5272 tree complit = TREE_OPERAND (*expr_p, 1);
5273 tree decl_s = COMPOUND_LITERAL_EXPR_DECL_EXPR (complit);
5274 tree decl = DECL_EXPR_DECL (decl_s);
5275 tree init = DECL_INITIAL (decl);
5276
5277 /* struct T x = (struct T) { 0, 1, 2 } can be optimized
5278 into struct T x = { 0, 1, 2 } if the address of the
5279 compound literal has never been taken. */
5280 if (!TREE_ADDRESSABLE (complit)
5281 && !TREE_ADDRESSABLE (decl)
5282 && init)
5283 {
5284 *expr_p = copy_node (*expr_p);
5285 TREE_OPERAND (*expr_p, 1) = init;
5286 return GS_OK;
5287 }
5288 }
5289
5290 default:
5291 break;
5292 }
5293 }
5294 while (changed);
5295
5296 return ret;
5297 }
5298
5299
5300 /* Return true if T looks like a valid GIMPLE statement. */
5301
5302 static bool
5303 is_gimple_stmt (tree t)
5304 {
5305 const enum tree_code code = TREE_CODE (t);
5306
5307 switch (code)
5308 {
5309 case NOP_EXPR:
5310 /* The only valid NOP_EXPR is the empty statement. */
5311 return IS_EMPTY_STMT (t);
5312
5313 case BIND_EXPR:
5314 case COND_EXPR:
5315 /* These are only valid if they're void. */
5316 return TREE_TYPE (t) == NULL || VOID_TYPE_P (TREE_TYPE (t));
5317
5318 case SWITCH_EXPR:
5319 case GOTO_EXPR:
5320 case RETURN_EXPR:
5321 case LABEL_EXPR:
5322 case CASE_LABEL_EXPR:
5323 case TRY_CATCH_EXPR:
5324 case TRY_FINALLY_EXPR:
5325 case EH_FILTER_EXPR:
5326 case CATCH_EXPR:
5327 case ASM_EXPR:
5328 case STATEMENT_LIST:
5329 case OACC_PARALLEL:
5330 case OACC_KERNELS:
5331 case OACC_DATA:
5332 case OACC_HOST_DATA:
5333 case OACC_DECLARE:
5334 case OACC_UPDATE:
5335 case OACC_ENTER_DATA:
5336 case OACC_EXIT_DATA:
5337 case OACC_CACHE:
5338 case OMP_PARALLEL:
5339 case OMP_FOR:
5340 case OMP_SIMD:
5341 case CILK_SIMD:
5342 case OMP_DISTRIBUTE:
5343 case OACC_LOOP:
5344 case OMP_SECTIONS:
5345 case OMP_SECTION:
5346 case OMP_SINGLE:
5347 case OMP_MASTER:
5348 case OMP_TASKGROUP:
5349 case OMP_ORDERED:
5350 case OMP_CRITICAL:
5351 case OMP_TASK:
5352 case OMP_TARGET:
5353 case OMP_TARGET_DATA:
5354 case OMP_TARGET_UPDATE:
5355 case OMP_TARGET_ENTER_DATA:
5356 case OMP_TARGET_EXIT_DATA:
5357 case OMP_TASKLOOP:
5358 case OMP_TEAMS:
5359 /* These are always void. */
5360 return true;
5361
5362 case CALL_EXPR:
5363 case MODIFY_EXPR:
5364 case PREDICT_EXPR:
5365 /* These are valid regardless of their type. */
5366 return true;
5367
5368 default:
5369 return false;
5370 }
5371 }
5372
5373
5374 /* Promote partial stores to COMPLEX variables to total stores. *EXPR_P is
5375 a MODIFY_EXPR with a lhs of a REAL/IMAGPART_EXPR of a variable with
5376 DECL_GIMPLE_REG_P set.
5377
5378 IMPORTANT NOTE: This promotion is performed by introducing a load of the
5379 other, unmodified part of the complex object just before the total store.
5380 As a consequence, if the object is still uninitialized, an undefined value
5381 will be loaded into a register, which may result in a spurious exception
5382 if the register is floating-point and the value happens to be a signaling
5383 NaN for example. Then the fully-fledged complex operations lowering pass
5384 followed by a DCE pass are necessary in order to fix things up. */
5385
5386 static enum gimplify_status
5387 gimplify_modify_expr_complex_part (tree *expr_p, gimple_seq *pre_p,
5388 bool want_value)
5389 {
5390 enum tree_code code, ocode;
5391 tree lhs, rhs, new_rhs, other, realpart, imagpart;
5392
5393 lhs = TREE_OPERAND (*expr_p, 0);
5394 rhs = TREE_OPERAND (*expr_p, 1);
5395 code = TREE_CODE (lhs);
5396 lhs = TREE_OPERAND (lhs, 0);
5397
5398 ocode = code == REALPART_EXPR ? IMAGPART_EXPR : REALPART_EXPR;
5399 other = build1 (ocode, TREE_TYPE (rhs), lhs);
5400 TREE_NO_WARNING (other) = 1;
5401 other = get_formal_tmp_var (other, pre_p);
5402
5403 realpart = code == REALPART_EXPR ? rhs : other;
5404 imagpart = code == REALPART_EXPR ? other : rhs;
5405
5406 if (TREE_CONSTANT (realpart) && TREE_CONSTANT (imagpart))
5407 new_rhs = build_complex (TREE_TYPE (lhs), realpart, imagpart);
5408 else
5409 new_rhs = build2 (COMPLEX_EXPR, TREE_TYPE (lhs), realpart, imagpart);
5410
5411 gimplify_seq_add_stmt (pre_p, gimple_build_assign (lhs, new_rhs));
5412 *expr_p = (want_value) ? rhs : NULL_TREE;
5413
5414 return GS_ALL_DONE;
5415 }
5416
5417 /* Gimplify the MODIFY_EXPR node pointed to by EXPR_P.
5418
5419 modify_expr
5420 : varname '=' rhs
5421 | '*' ID '=' rhs
5422
5423 PRE_P points to the list where side effects that must happen before
5424 *EXPR_P should be stored.
5425
5426 POST_P points to the list where side effects that must happen after
5427 *EXPR_P should be stored.
5428
5429 WANT_VALUE is nonzero iff we want to use the value of this expression
5430 in another expression. */
5431
5432 static enum gimplify_status
5433 gimplify_modify_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
5434 bool want_value)
5435 {
5436 tree *from_p = &TREE_OPERAND (*expr_p, 1);
5437 tree *to_p = &TREE_OPERAND (*expr_p, 0);
5438 enum gimplify_status ret = GS_UNHANDLED;
5439 gimple *assign;
5440 location_t loc = EXPR_LOCATION (*expr_p);
5441 gimple_stmt_iterator gsi;
5442
5443 gcc_assert (TREE_CODE (*expr_p) == MODIFY_EXPR
5444 || TREE_CODE (*expr_p) == INIT_EXPR);
5445
5446 /* Trying to simplify a clobber using normal logic doesn't work,
5447 so handle it here. */
5448 if (TREE_CLOBBER_P (*from_p))
5449 {
5450 ret = gimplify_expr (to_p, pre_p, post_p, is_gimple_lvalue, fb_lvalue);
5451 if (ret == GS_ERROR)
5452 return ret;
5453 gcc_assert (!want_value
5454 && (VAR_P (*to_p) || TREE_CODE (*to_p) == MEM_REF));
5455 gimplify_seq_add_stmt (pre_p, gimple_build_assign (*to_p, *from_p));
5456 *expr_p = NULL;
5457 return GS_ALL_DONE;
5458 }
5459
5460 /* Insert pointer conversions required by the middle-end that are not
5461 required by the frontend. This fixes middle-end type checking for
5462 for example gcc.dg/redecl-6.c. */
5463 if (POINTER_TYPE_P (TREE_TYPE (*to_p)))
5464 {
5465 STRIP_USELESS_TYPE_CONVERSION (*from_p);
5466 if (!useless_type_conversion_p (TREE_TYPE (*to_p), TREE_TYPE (*from_p)))
5467 *from_p = fold_convert_loc (loc, TREE_TYPE (*to_p), *from_p);
5468 }
5469
5470 /* See if any simplifications can be done based on what the RHS is. */
5471 ret = gimplify_modify_expr_rhs (expr_p, from_p, to_p, pre_p, post_p,
5472 want_value);
5473 if (ret != GS_UNHANDLED)
5474 return ret;
5475
5476 /* For zero sized types only gimplify the left hand side and right hand
5477 side as statements and throw away the assignment. Do this after
5478 gimplify_modify_expr_rhs so we handle TARGET_EXPRs of addressable
5479 types properly. */
5480 if (zero_sized_type (TREE_TYPE (*from_p))
5481 && !want_value
5482 /* Don't do this for calls that return addressable types, expand_call
5483 relies on those having a lhs. */
5484 && !(TREE_ADDRESSABLE (TREE_TYPE (*from_p))
5485 && TREE_CODE (*from_p) == CALL_EXPR))
5486 {
5487 gimplify_stmt (from_p, pre_p);
5488 gimplify_stmt (to_p, pre_p);
5489 *expr_p = NULL_TREE;
5490 return GS_ALL_DONE;
5491 }
5492
5493 /* If the value being copied is of variable width, compute the length
5494 of the copy into a WITH_SIZE_EXPR. Note that we need to do this
5495 before gimplifying any of the operands so that we can resolve any
5496 PLACEHOLDER_EXPRs in the size. Also note that the RTL expander uses
5497 the size of the expression to be copied, not of the destination, so
5498 that is what we must do here. */
5499 maybe_with_size_expr (from_p);
5500
5501 /* As a special case, we have to temporarily allow for assignments
5502 with a CALL_EXPR on the RHS. Since in GIMPLE a function call is
5503 a toplevel statement, when gimplifying the GENERIC expression
5504 MODIFY_EXPR <a, CALL_EXPR <foo>>, we cannot create the tuple
5505 GIMPLE_ASSIGN <a, GIMPLE_CALL <foo>>.
5506
5507 Instead, we need to create the tuple GIMPLE_CALL <a, foo>. To
5508 prevent gimplify_expr from trying to create a new temporary for
5509 foo's LHS, we tell it that it should only gimplify until it
5510 reaches the CALL_EXPR. On return from gimplify_expr, the newly
5511 created GIMPLE_CALL <foo> will be the last statement in *PRE_P
5512 and all we need to do here is set 'a' to be its LHS. */
5513
5514 /* Gimplify the RHS first for C++17 and bug 71104. */
5515 gimple_predicate initial_pred = initial_rhs_predicate_for (*to_p);
5516 ret = gimplify_expr (from_p, pre_p, post_p, initial_pred, fb_rvalue);
5517 if (ret == GS_ERROR)
5518 return ret;
5519
5520 /* Then gimplify the LHS. */
5521 /* If we gimplified the RHS to a CALL_EXPR and that call may return
5522 twice we have to make sure to gimplify into non-SSA as otherwise
5523 the abnormal edge added later will make those defs not dominate
5524 their uses.
5525 ??? Technically this applies only to the registers used in the
5526 resulting non-register *TO_P. */
5527 bool saved_into_ssa = gimplify_ctxp->into_ssa;
5528 if (saved_into_ssa
5529 && TREE_CODE (*from_p) == CALL_EXPR
5530 && call_expr_flags (*from_p) & ECF_RETURNS_TWICE)
5531 gimplify_ctxp->into_ssa = false;
5532 ret = gimplify_expr (to_p, pre_p, post_p, is_gimple_lvalue, fb_lvalue);
5533 gimplify_ctxp->into_ssa = saved_into_ssa;
5534 if (ret == GS_ERROR)
5535 return ret;
5536
5537 /* Now that the LHS is gimplified, re-gimplify the RHS if our initial
5538 guess for the predicate was wrong. */
5539 gimple_predicate final_pred = rhs_predicate_for (*to_p);
5540 if (final_pred != initial_pred)
5541 {
5542 ret = gimplify_expr (from_p, pre_p, post_p, final_pred, fb_rvalue);
5543 if (ret == GS_ERROR)
5544 return ret;
5545 }
5546
5547 /* In case of va_arg internal fn wrappped in a WITH_SIZE_EXPR, add the type
5548 size as argument to the call. */
5549 if (TREE_CODE (*from_p) == WITH_SIZE_EXPR)
5550 {
5551 tree call = TREE_OPERAND (*from_p, 0);
5552 tree vlasize = TREE_OPERAND (*from_p, 1);
5553
5554 if (TREE_CODE (call) == CALL_EXPR
5555 && CALL_EXPR_IFN (call) == IFN_VA_ARG)
5556 {
5557 int nargs = call_expr_nargs (call);
5558 tree type = TREE_TYPE (call);
5559 tree ap = CALL_EXPR_ARG (call, 0);
5560 tree tag = CALL_EXPR_ARG (call, 1);
5561 tree aptag = CALL_EXPR_ARG (call, 2);
5562 tree newcall = build_call_expr_internal_loc (EXPR_LOCATION (call),
5563 IFN_VA_ARG, type,
5564 nargs + 1, ap, tag,
5565 aptag, vlasize);
5566 TREE_OPERAND (*from_p, 0) = newcall;
5567 }
5568 }
5569
5570 /* Now see if the above changed *from_p to something we handle specially. */
5571 ret = gimplify_modify_expr_rhs (expr_p, from_p, to_p, pre_p, post_p,
5572 want_value);
5573 if (ret != GS_UNHANDLED)
5574 return ret;
5575
5576 /* If we've got a variable sized assignment between two lvalues (i.e. does
5577 not involve a call), then we can make things a bit more straightforward
5578 by converting the assignment to memcpy or memset. */
5579 if (TREE_CODE (*from_p) == WITH_SIZE_EXPR)
5580 {
5581 tree from = TREE_OPERAND (*from_p, 0);
5582 tree size = TREE_OPERAND (*from_p, 1);
5583
5584 if (TREE_CODE (from) == CONSTRUCTOR)
5585 return gimplify_modify_expr_to_memset (expr_p, size, want_value, pre_p);
5586
5587 if (is_gimple_addressable (from))
5588 {
5589 *from_p = from;
5590 return gimplify_modify_expr_to_memcpy (expr_p, size, want_value,
5591 pre_p);
5592 }
5593 }
5594
5595 /* Transform partial stores to non-addressable complex variables into
5596 total stores. This allows us to use real instead of virtual operands
5597 for these variables, which improves optimization. */
5598 if ((TREE_CODE (*to_p) == REALPART_EXPR
5599 || TREE_CODE (*to_p) == IMAGPART_EXPR)
5600 && is_gimple_reg (TREE_OPERAND (*to_p, 0)))
5601 return gimplify_modify_expr_complex_part (expr_p, pre_p, want_value);
5602
5603 /* Try to alleviate the effects of the gimplification creating artificial
5604 temporaries (see for example is_gimple_reg_rhs) on the debug info, but
5605 make sure not to create DECL_DEBUG_EXPR links across functions. */
5606 if (!gimplify_ctxp->into_ssa
5607 && VAR_P (*from_p)
5608 && DECL_IGNORED_P (*from_p)
5609 && DECL_P (*to_p)
5610 && !DECL_IGNORED_P (*to_p)
5611 && decl_function_context (*to_p) == current_function_decl
5612 && decl_function_context (*from_p) == current_function_decl)
5613 {
5614 if (!DECL_NAME (*from_p) && DECL_NAME (*to_p))
5615 DECL_NAME (*from_p)
5616 = create_tmp_var_name (IDENTIFIER_POINTER (DECL_NAME (*to_p)));
5617 DECL_HAS_DEBUG_EXPR_P (*from_p) = 1;
5618 SET_DECL_DEBUG_EXPR (*from_p, *to_p);
5619 }
5620
5621 if (want_value && TREE_THIS_VOLATILE (*to_p))
5622 *from_p = get_initialized_tmp_var (*from_p, pre_p, post_p);
5623
5624 if (TREE_CODE (*from_p) == CALL_EXPR)
5625 {
5626 /* Since the RHS is a CALL_EXPR, we need to create a GIMPLE_CALL
5627 instead of a GIMPLE_ASSIGN. */
5628 gcall *call_stmt;
5629 if (CALL_EXPR_FN (*from_p) == NULL_TREE)
5630 {
5631 /* Gimplify internal functions created in the FEs. */
5632 int nargs = call_expr_nargs (*from_p), i;
5633 enum internal_fn ifn = CALL_EXPR_IFN (*from_p);
5634 auto_vec<tree> vargs (nargs);
5635
5636 for (i = 0; i < nargs; i++)
5637 {
5638 gimplify_arg (&CALL_EXPR_ARG (*from_p, i), pre_p,
5639 EXPR_LOCATION (*from_p));
5640 vargs.quick_push (CALL_EXPR_ARG (*from_p, i));
5641 }
5642 call_stmt = gimple_build_call_internal_vec (ifn, vargs);
5643 gimple_call_set_nothrow (call_stmt, TREE_NOTHROW (*from_p));
5644 gimple_set_location (call_stmt, EXPR_LOCATION (*expr_p));
5645 }
5646 else
5647 {
5648 tree fnptrtype = TREE_TYPE (CALL_EXPR_FN (*from_p));
5649 CALL_EXPR_FN (*from_p) = TREE_OPERAND (CALL_EXPR_FN (*from_p), 0);
5650 STRIP_USELESS_TYPE_CONVERSION (CALL_EXPR_FN (*from_p));
5651 tree fndecl = get_callee_fndecl (*from_p);
5652 if (fndecl
5653 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
5654 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_EXPECT
5655 && call_expr_nargs (*from_p) == 3)
5656 call_stmt = gimple_build_call_internal (IFN_BUILTIN_EXPECT, 3,
5657 CALL_EXPR_ARG (*from_p, 0),
5658 CALL_EXPR_ARG (*from_p, 1),
5659 CALL_EXPR_ARG (*from_p, 2));
5660 else
5661 {
5662 call_stmt = gimple_build_call_from_tree (*from_p, fnptrtype);
5663 }
5664 }
5665 notice_special_calls (call_stmt);
5666 if (!gimple_call_noreturn_p (call_stmt) || !should_remove_lhs_p (*to_p))
5667 gimple_call_set_lhs (call_stmt, *to_p);
5668 else if (TREE_CODE (*to_p) == SSA_NAME)
5669 /* The above is somewhat premature, avoid ICEing later for a
5670 SSA name w/o a definition. We may have uses in the GIMPLE IL.
5671 ??? This doesn't make it a default-def. */
5672 SSA_NAME_DEF_STMT (*to_p) = gimple_build_nop ();
5673
5674 if (EXPR_CILK_SPAWN (*from_p))
5675 gimplify_cilk_detach (pre_p);
5676 assign = call_stmt;
5677 }
5678 else
5679 {
5680 assign = gimple_build_assign (*to_p, *from_p);
5681 gimple_set_location (assign, EXPR_LOCATION (*expr_p));
5682 if (COMPARISON_CLASS_P (*from_p))
5683 gimple_set_no_warning (assign, TREE_NO_WARNING (*from_p));
5684 }
5685
5686 if (gimplify_ctxp->into_ssa && is_gimple_reg (*to_p))
5687 {
5688 /* We should have got an SSA name from the start. */
5689 gcc_assert (TREE_CODE (*to_p) == SSA_NAME
5690 || ! gimple_in_ssa_p (cfun));
5691 }
5692
5693 gimplify_seq_add_stmt (pre_p, assign);
5694 gsi = gsi_last (*pre_p);
5695 maybe_fold_stmt (&gsi);
5696
5697 if (want_value)
5698 {
5699 *expr_p = TREE_THIS_VOLATILE (*to_p) ? *from_p : unshare_expr (*to_p);
5700 return GS_OK;
5701 }
5702 else
5703 *expr_p = NULL;
5704
5705 return GS_ALL_DONE;
5706 }
5707
5708 /* Gimplify a comparison between two variable-sized objects. Do this
5709 with a call to BUILT_IN_MEMCMP. */
5710
5711 static enum gimplify_status
5712 gimplify_variable_sized_compare (tree *expr_p)
5713 {
5714 location_t loc = EXPR_LOCATION (*expr_p);
5715 tree op0 = TREE_OPERAND (*expr_p, 0);
5716 tree op1 = TREE_OPERAND (*expr_p, 1);
5717 tree t, arg, dest, src, expr;
5718
5719 arg = TYPE_SIZE_UNIT (TREE_TYPE (op0));
5720 arg = unshare_expr (arg);
5721 arg = SUBSTITUTE_PLACEHOLDER_IN_EXPR (arg, op0);
5722 src = build_fold_addr_expr_loc (loc, op1);
5723 dest = build_fold_addr_expr_loc (loc, op0);
5724 t = builtin_decl_implicit (BUILT_IN_MEMCMP);
5725 t = build_call_expr_loc (loc, t, 3, dest, src, arg);
5726
5727 expr
5728 = build2 (TREE_CODE (*expr_p), TREE_TYPE (*expr_p), t, integer_zero_node);
5729 SET_EXPR_LOCATION (expr, loc);
5730 *expr_p = expr;
5731
5732 return GS_OK;
5733 }
5734
5735 /* Gimplify a comparison between two aggregate objects of integral scalar
5736 mode as a comparison between the bitwise equivalent scalar values. */
5737
5738 static enum gimplify_status
5739 gimplify_scalar_mode_aggregate_compare (tree *expr_p)
5740 {
5741 location_t loc = EXPR_LOCATION (*expr_p);
5742 tree op0 = TREE_OPERAND (*expr_p, 0);
5743 tree op1 = TREE_OPERAND (*expr_p, 1);
5744
5745 tree type = TREE_TYPE (op0);
5746 tree scalar_type = lang_hooks.types.type_for_mode (TYPE_MODE (type), 1);
5747
5748 op0 = fold_build1_loc (loc, VIEW_CONVERT_EXPR, scalar_type, op0);
5749 op1 = fold_build1_loc (loc, VIEW_CONVERT_EXPR, scalar_type, op1);
5750
5751 *expr_p
5752 = fold_build2_loc (loc, TREE_CODE (*expr_p), TREE_TYPE (*expr_p), op0, op1);
5753
5754 return GS_OK;
5755 }
5756
5757 /* Gimplify an expression sequence. This function gimplifies each
5758 expression and rewrites the original expression with the last
5759 expression of the sequence in GIMPLE form.
5760
5761 PRE_P points to the list where the side effects for all the
5762 expressions in the sequence will be emitted.
5763
5764 WANT_VALUE is true when the result of the last COMPOUND_EXPR is used. */
5765
5766 static enum gimplify_status
5767 gimplify_compound_expr (tree *expr_p, gimple_seq *pre_p, bool want_value)
5768 {
5769 tree t = *expr_p;
5770
5771 do
5772 {
5773 tree *sub_p = &TREE_OPERAND (t, 0);
5774
5775 if (TREE_CODE (*sub_p) == COMPOUND_EXPR)
5776 gimplify_compound_expr (sub_p, pre_p, false);
5777 else
5778 gimplify_stmt (sub_p, pre_p);
5779
5780 t = TREE_OPERAND (t, 1);
5781 }
5782 while (TREE_CODE (t) == COMPOUND_EXPR);
5783
5784 *expr_p = t;
5785 if (want_value)
5786 return GS_OK;
5787 else
5788 {
5789 gimplify_stmt (expr_p, pre_p);
5790 return GS_ALL_DONE;
5791 }
5792 }
5793
5794 /* Gimplify a SAVE_EXPR node. EXPR_P points to the expression to
5795 gimplify. After gimplification, EXPR_P will point to a new temporary
5796 that holds the original value of the SAVE_EXPR node.
5797
5798 PRE_P points to the list where side effects that must happen before
5799 *EXPR_P should be stored. */
5800
5801 static enum gimplify_status
5802 gimplify_save_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
5803 {
5804 enum gimplify_status ret = GS_ALL_DONE;
5805 tree val;
5806
5807 gcc_assert (TREE_CODE (*expr_p) == SAVE_EXPR);
5808 val = TREE_OPERAND (*expr_p, 0);
5809
5810 /* If the SAVE_EXPR has not been resolved, then evaluate it once. */
5811 if (!SAVE_EXPR_RESOLVED_P (*expr_p))
5812 {
5813 /* The operand may be a void-valued expression. It is
5814 being executed only for its side-effects. */
5815 if (TREE_TYPE (val) == void_type_node)
5816 {
5817 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
5818 is_gimple_stmt, fb_none);
5819 val = NULL;
5820 }
5821 else
5822 /* The temporary may not be an SSA name as later abnormal and EH
5823 control flow may invalidate use/def domination. */
5824 val = get_initialized_tmp_var (val, pre_p, post_p, false);
5825
5826 TREE_OPERAND (*expr_p, 0) = val;
5827 SAVE_EXPR_RESOLVED_P (*expr_p) = 1;
5828 }
5829
5830 *expr_p = val;
5831
5832 return ret;
5833 }
5834
5835 /* Rewrite the ADDR_EXPR node pointed to by EXPR_P
5836
5837 unary_expr
5838 : ...
5839 | '&' varname
5840 ...
5841
5842 PRE_P points to the list where side effects that must happen before
5843 *EXPR_P should be stored.
5844
5845 POST_P points to the list where side effects that must happen after
5846 *EXPR_P should be stored. */
5847
5848 static enum gimplify_status
5849 gimplify_addr_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
5850 {
5851 tree expr = *expr_p;
5852 tree op0 = TREE_OPERAND (expr, 0);
5853 enum gimplify_status ret;
5854 location_t loc = EXPR_LOCATION (*expr_p);
5855
5856 switch (TREE_CODE (op0))
5857 {
5858 case INDIRECT_REF:
5859 do_indirect_ref:
5860 /* Check if we are dealing with an expression of the form '&*ptr'.
5861 While the front end folds away '&*ptr' into 'ptr', these
5862 expressions may be generated internally by the compiler (e.g.,
5863 builtins like __builtin_va_end). */
5864 /* Caution: the silent array decomposition semantics we allow for
5865 ADDR_EXPR means we can't always discard the pair. */
5866 /* Gimplification of the ADDR_EXPR operand may drop
5867 cv-qualification conversions, so make sure we add them if
5868 needed. */
5869 {
5870 tree op00 = TREE_OPERAND (op0, 0);
5871 tree t_expr = TREE_TYPE (expr);
5872 tree t_op00 = TREE_TYPE (op00);
5873
5874 if (!useless_type_conversion_p (t_expr, t_op00))
5875 op00 = fold_convert_loc (loc, TREE_TYPE (expr), op00);
5876 *expr_p = op00;
5877 ret = GS_OK;
5878 }
5879 break;
5880
5881 case VIEW_CONVERT_EXPR:
5882 /* Take the address of our operand and then convert it to the type of
5883 this ADDR_EXPR.
5884
5885 ??? The interactions of VIEW_CONVERT_EXPR and aliasing is not at
5886 all clear. The impact of this transformation is even less clear. */
5887
5888 /* If the operand is a useless conversion, look through it. Doing so
5889 guarantees that the ADDR_EXPR and its operand will remain of the
5890 same type. */
5891 if (tree_ssa_useless_type_conversion (TREE_OPERAND (op0, 0)))
5892 op0 = TREE_OPERAND (op0, 0);
5893
5894 *expr_p = fold_convert_loc (loc, TREE_TYPE (expr),
5895 build_fold_addr_expr_loc (loc,
5896 TREE_OPERAND (op0, 0)));
5897 ret = GS_OK;
5898 break;
5899
5900 case MEM_REF:
5901 if (integer_zerop (TREE_OPERAND (op0, 1)))
5902 goto do_indirect_ref;
5903
5904 /* fall through */
5905
5906 default:
5907 /* If we see a call to a declared builtin or see its address
5908 being taken (we can unify those cases here) then we can mark
5909 the builtin for implicit generation by GCC. */
5910 if (TREE_CODE (op0) == FUNCTION_DECL
5911 && DECL_BUILT_IN_CLASS (op0) == BUILT_IN_NORMAL
5912 && builtin_decl_declared_p (DECL_FUNCTION_CODE (op0)))
5913 set_builtin_decl_implicit_p (DECL_FUNCTION_CODE (op0), true);
5914
5915 /* We use fb_either here because the C frontend sometimes takes
5916 the address of a call that returns a struct; see
5917 gcc.dg/c99-array-lval-1.c. The gimplifier will correctly make
5918 the implied temporary explicit. */
5919
5920 /* Make the operand addressable. */
5921 ret = gimplify_expr (&TREE_OPERAND (expr, 0), pre_p, post_p,
5922 is_gimple_addressable, fb_either);
5923 if (ret == GS_ERROR)
5924 break;
5925
5926 /* Then mark it. Beware that it may not be possible to do so directly
5927 if a temporary has been created by the gimplification. */
5928 prepare_gimple_addressable (&TREE_OPERAND (expr, 0), pre_p);
5929
5930 op0 = TREE_OPERAND (expr, 0);
5931
5932 /* For various reasons, the gimplification of the expression
5933 may have made a new INDIRECT_REF. */
5934 if (TREE_CODE (op0) == INDIRECT_REF)
5935 goto do_indirect_ref;
5936
5937 mark_addressable (TREE_OPERAND (expr, 0));
5938
5939 /* The FEs may end up building ADDR_EXPRs early on a decl with
5940 an incomplete type. Re-build ADDR_EXPRs in canonical form
5941 here. */
5942 if (!types_compatible_p (TREE_TYPE (op0), TREE_TYPE (TREE_TYPE (expr))))
5943 *expr_p = build_fold_addr_expr (op0);
5944
5945 /* Make sure TREE_CONSTANT and TREE_SIDE_EFFECTS are set properly. */
5946 recompute_tree_invariant_for_addr_expr (*expr_p);
5947
5948 /* If we re-built the ADDR_EXPR add a conversion to the original type
5949 if required. */
5950 if (!useless_type_conversion_p (TREE_TYPE (expr), TREE_TYPE (*expr_p)))
5951 *expr_p = fold_convert (TREE_TYPE (expr), *expr_p);
5952
5953 break;
5954 }
5955
5956 return ret;
5957 }
5958
5959 /* Gimplify the operands of an ASM_EXPR. Input operands should be a gimple
5960 value; output operands should be a gimple lvalue. */
5961
5962 static enum gimplify_status
5963 gimplify_asm_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
5964 {
5965 tree expr;
5966 int noutputs;
5967 const char **oconstraints;
5968 int i;
5969 tree link;
5970 const char *constraint;
5971 bool allows_mem, allows_reg, is_inout;
5972 enum gimplify_status ret, tret;
5973 gasm *stmt;
5974 vec<tree, va_gc> *inputs;
5975 vec<tree, va_gc> *outputs;
5976 vec<tree, va_gc> *clobbers;
5977 vec<tree, va_gc> *labels;
5978 tree link_next;
5979
5980 expr = *expr_p;
5981 noutputs = list_length (ASM_OUTPUTS (expr));
5982 oconstraints = (const char **) alloca ((noutputs) * sizeof (const char *));
5983
5984 inputs = NULL;
5985 outputs = NULL;
5986 clobbers = NULL;
5987 labels = NULL;
5988
5989 ret = GS_ALL_DONE;
5990 link_next = NULL_TREE;
5991 for (i = 0, link = ASM_OUTPUTS (expr); link; ++i, link = link_next)
5992 {
5993 bool ok;
5994 size_t constraint_len;
5995
5996 link_next = TREE_CHAIN (link);
5997
5998 oconstraints[i]
5999 = constraint
6000 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
6001 constraint_len = strlen (constraint);
6002 if (constraint_len == 0)
6003 continue;
6004
6005 ok = parse_output_constraint (&constraint, i, 0, 0,
6006 &allows_mem, &allows_reg, &is_inout);
6007 if (!ok)
6008 {
6009 ret = GS_ERROR;
6010 is_inout = false;
6011 }
6012
6013 if (!allows_reg && allows_mem)
6014 mark_addressable (TREE_VALUE (link));
6015
6016 tret = gimplify_expr (&TREE_VALUE (link), pre_p, post_p,
6017 is_inout ? is_gimple_min_lval : is_gimple_lvalue,
6018 fb_lvalue | fb_mayfail);
6019 if (tret == GS_ERROR)
6020 {
6021 error ("invalid lvalue in asm output %d", i);
6022 ret = tret;
6023 }
6024
6025 /* If the constraint does not allow memory make sure we gimplify
6026 it to a register if it is not already but its base is. This
6027 happens for complex and vector components. */
6028 if (!allows_mem)
6029 {
6030 tree op = TREE_VALUE (link);
6031 if (! is_gimple_val (op)
6032 && is_gimple_reg_type (TREE_TYPE (op))
6033 && is_gimple_reg (get_base_address (op)))
6034 {
6035 tree tem = create_tmp_reg (TREE_TYPE (op));
6036 tree ass;
6037 if (is_inout)
6038 {
6039 ass = build2 (MODIFY_EXPR, TREE_TYPE (tem),
6040 tem, unshare_expr (op));
6041 gimplify_and_add (ass, pre_p);
6042 }
6043 ass = build2 (MODIFY_EXPR, TREE_TYPE (tem), op, tem);
6044 gimplify_and_add (ass, post_p);
6045
6046 TREE_VALUE (link) = tem;
6047 tret = GS_OK;
6048 }
6049 }
6050
6051 vec_safe_push (outputs, link);
6052 TREE_CHAIN (link) = NULL_TREE;
6053
6054 if (is_inout)
6055 {
6056 /* An input/output operand. To give the optimizers more
6057 flexibility, split it into separate input and output
6058 operands. */
6059 tree input;
6060 /* Buffer big enough to format a 32-bit UINT_MAX into. */
6061 char buf[11];
6062
6063 /* Turn the in/out constraint into an output constraint. */
6064 char *p = xstrdup (constraint);
6065 p[0] = '=';
6066 TREE_VALUE (TREE_PURPOSE (link)) = build_string (constraint_len, p);
6067
6068 /* And add a matching input constraint. */
6069 if (allows_reg)
6070 {
6071 sprintf (buf, "%u", i);
6072
6073 /* If there are multiple alternatives in the constraint,
6074 handle each of them individually. Those that allow register
6075 will be replaced with operand number, the others will stay
6076 unchanged. */
6077 if (strchr (p, ',') != NULL)
6078 {
6079 size_t len = 0, buflen = strlen (buf);
6080 char *beg, *end, *str, *dst;
6081
6082 for (beg = p + 1;;)
6083 {
6084 end = strchr (beg, ',');
6085 if (end == NULL)
6086 end = strchr (beg, '\0');
6087 if ((size_t) (end - beg) < buflen)
6088 len += buflen + 1;
6089 else
6090 len += end - beg + 1;
6091 if (*end)
6092 beg = end + 1;
6093 else
6094 break;
6095 }
6096
6097 str = (char *) alloca (len);
6098 for (beg = p + 1, dst = str;;)
6099 {
6100 const char *tem;
6101 bool mem_p, reg_p, inout_p;
6102
6103 end = strchr (beg, ',');
6104 if (end)
6105 *end = '\0';
6106 beg[-1] = '=';
6107 tem = beg - 1;
6108 parse_output_constraint (&tem, i, 0, 0,
6109 &mem_p, &reg_p, &inout_p);
6110 if (dst != str)
6111 *dst++ = ',';
6112 if (reg_p)
6113 {
6114 memcpy (dst, buf, buflen);
6115 dst += buflen;
6116 }
6117 else
6118 {
6119 if (end)
6120 len = end - beg;
6121 else
6122 len = strlen (beg);
6123 memcpy (dst, beg, len);
6124 dst += len;
6125 }
6126 if (end)
6127 beg = end + 1;
6128 else
6129 break;
6130 }
6131 *dst = '\0';
6132 input = build_string (dst - str, str);
6133 }
6134 else
6135 input = build_string (strlen (buf), buf);
6136 }
6137 else
6138 input = build_string (constraint_len - 1, constraint + 1);
6139
6140 free (p);
6141
6142 input = build_tree_list (build_tree_list (NULL_TREE, input),
6143 unshare_expr (TREE_VALUE (link)));
6144 ASM_INPUTS (expr) = chainon (ASM_INPUTS (expr), input);
6145 }
6146 }
6147
6148 link_next = NULL_TREE;
6149 for (link = ASM_INPUTS (expr); link; ++i, link = link_next)
6150 {
6151 link_next = TREE_CHAIN (link);
6152 constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
6153 parse_input_constraint (&constraint, 0, 0, noutputs, 0,
6154 oconstraints, &allows_mem, &allows_reg);
6155
6156 /* If we can't make copies, we can only accept memory. */
6157 if (TREE_ADDRESSABLE (TREE_TYPE (TREE_VALUE (link))))
6158 {
6159 if (allows_mem)
6160 allows_reg = 0;
6161 else
6162 {
6163 error ("impossible constraint in %<asm%>");
6164 error ("non-memory input %d must stay in memory", i);
6165 return GS_ERROR;
6166 }
6167 }
6168
6169 /* If the operand is a memory input, it should be an lvalue. */
6170 if (!allows_reg && allows_mem)
6171 {
6172 tree inputv = TREE_VALUE (link);
6173 STRIP_NOPS (inputv);
6174 if (TREE_CODE (inputv) == PREDECREMENT_EXPR
6175 || TREE_CODE (inputv) == PREINCREMENT_EXPR
6176 || TREE_CODE (inputv) == POSTDECREMENT_EXPR
6177 || TREE_CODE (inputv) == POSTINCREMENT_EXPR
6178 || TREE_CODE (inputv) == MODIFY_EXPR)
6179 TREE_VALUE (link) = error_mark_node;
6180 tret = gimplify_expr (&TREE_VALUE (link), pre_p, post_p,
6181 is_gimple_lvalue, fb_lvalue | fb_mayfail);
6182 if (tret != GS_ERROR)
6183 {
6184 /* Unlike output operands, memory inputs are not guaranteed
6185 to be lvalues by the FE, and while the expressions are
6186 marked addressable there, if it is e.g. a statement
6187 expression, temporaries in it might not end up being
6188 addressable. They might be already used in the IL and thus
6189 it is too late to make them addressable now though. */
6190 tree x = TREE_VALUE (link);
6191 while (handled_component_p (x))
6192 x = TREE_OPERAND (x, 0);
6193 if (TREE_CODE (x) == MEM_REF
6194 && TREE_CODE (TREE_OPERAND (x, 0)) == ADDR_EXPR)
6195 x = TREE_OPERAND (TREE_OPERAND (x, 0), 0);
6196 if ((VAR_P (x)
6197 || TREE_CODE (x) == PARM_DECL
6198 || TREE_CODE (x) == RESULT_DECL)
6199 && !TREE_ADDRESSABLE (x)
6200 && is_gimple_reg (x))
6201 {
6202 warning_at (EXPR_LOC_OR_LOC (TREE_VALUE (link),
6203 input_location), 0,
6204 "memory input %d is not directly addressable",
6205 i);
6206 prepare_gimple_addressable (&TREE_VALUE (link), pre_p);
6207 }
6208 }
6209 mark_addressable (TREE_VALUE (link));
6210 if (tret == GS_ERROR)
6211 {
6212 error_at (EXPR_LOC_OR_LOC (TREE_VALUE (link), input_location),
6213 "memory input %d is not directly addressable", i);
6214 ret = tret;
6215 }
6216 }
6217 else
6218 {
6219 tret = gimplify_expr (&TREE_VALUE (link), pre_p, post_p,
6220 is_gimple_asm_val, fb_rvalue);
6221 if (tret == GS_ERROR)
6222 ret = tret;
6223 }
6224
6225 TREE_CHAIN (link) = NULL_TREE;
6226 vec_safe_push (inputs, link);
6227 }
6228
6229 link_next = NULL_TREE;
6230 for (link = ASM_CLOBBERS (expr); link; ++i, link = link_next)
6231 {
6232 link_next = TREE_CHAIN (link);
6233 TREE_CHAIN (link) = NULL_TREE;
6234 vec_safe_push (clobbers, link);
6235 }
6236
6237 link_next = NULL_TREE;
6238 for (link = ASM_LABELS (expr); link; ++i, link = link_next)
6239 {
6240 link_next = TREE_CHAIN (link);
6241 TREE_CHAIN (link) = NULL_TREE;
6242 vec_safe_push (labels, link);
6243 }
6244
6245 /* Do not add ASMs with errors to the gimple IL stream. */
6246 if (ret != GS_ERROR)
6247 {
6248 stmt = gimple_build_asm_vec (TREE_STRING_POINTER (ASM_STRING (expr)),
6249 inputs, outputs, clobbers, labels);
6250
6251 gimple_asm_set_volatile (stmt, ASM_VOLATILE_P (expr) || noutputs == 0);
6252 gimple_asm_set_input (stmt, ASM_INPUT_P (expr));
6253
6254 gimplify_seq_add_stmt (pre_p, stmt);
6255 }
6256
6257 return ret;
6258 }
6259
6260 /* Gimplify a CLEANUP_POINT_EXPR. Currently this works by adding
6261 GIMPLE_WITH_CLEANUP_EXPRs to the prequeue as we encounter cleanups while
6262 gimplifying the body, and converting them to TRY_FINALLY_EXPRs when we
6263 return to this function.
6264
6265 FIXME should we complexify the prequeue handling instead? Or use flags
6266 for all the cleanups and let the optimizer tighten them up? The current
6267 code seems pretty fragile; it will break on a cleanup within any
6268 non-conditional nesting. But any such nesting would be broken, anyway;
6269 we can't write a TRY_FINALLY_EXPR that starts inside a nesting construct
6270 and continues out of it. We can do that at the RTL level, though, so
6271 having an optimizer to tighten up try/finally regions would be a Good
6272 Thing. */
6273
6274 static enum gimplify_status
6275 gimplify_cleanup_point_expr (tree *expr_p, gimple_seq *pre_p)
6276 {
6277 gimple_stmt_iterator iter;
6278 gimple_seq body_sequence = NULL;
6279
6280 tree temp = voidify_wrapper_expr (*expr_p, NULL);
6281
6282 /* We only care about the number of conditions between the innermost
6283 CLEANUP_POINT_EXPR and the cleanup. So save and reset the count and
6284 any cleanups collected outside the CLEANUP_POINT_EXPR. */
6285 int old_conds = gimplify_ctxp->conditions;
6286 gimple_seq old_cleanups = gimplify_ctxp->conditional_cleanups;
6287 bool old_in_cleanup_point_expr = gimplify_ctxp->in_cleanup_point_expr;
6288 gimplify_ctxp->conditions = 0;
6289 gimplify_ctxp->conditional_cleanups = NULL;
6290 gimplify_ctxp->in_cleanup_point_expr = true;
6291
6292 gimplify_stmt (&TREE_OPERAND (*expr_p, 0), &body_sequence);
6293
6294 gimplify_ctxp->conditions = old_conds;
6295 gimplify_ctxp->conditional_cleanups = old_cleanups;
6296 gimplify_ctxp->in_cleanup_point_expr = old_in_cleanup_point_expr;
6297
6298 for (iter = gsi_start (body_sequence); !gsi_end_p (iter); )
6299 {
6300 gimple *wce = gsi_stmt (iter);
6301
6302 if (gimple_code (wce) == GIMPLE_WITH_CLEANUP_EXPR)
6303 {
6304 if (gsi_one_before_end_p (iter))
6305 {
6306 /* Note that gsi_insert_seq_before and gsi_remove do not
6307 scan operands, unlike some other sequence mutators. */
6308 if (!gimple_wce_cleanup_eh_only (wce))
6309 gsi_insert_seq_before_without_update (&iter,
6310 gimple_wce_cleanup (wce),
6311 GSI_SAME_STMT);
6312 gsi_remove (&iter, true);
6313 break;
6314 }
6315 else
6316 {
6317 gtry *gtry;
6318 gimple_seq seq;
6319 enum gimple_try_flags kind;
6320
6321 if (gimple_wce_cleanup_eh_only (wce))
6322 kind = GIMPLE_TRY_CATCH;
6323 else
6324 kind = GIMPLE_TRY_FINALLY;
6325 seq = gsi_split_seq_after (iter);
6326
6327 gtry = gimple_build_try (seq, gimple_wce_cleanup (wce), kind);
6328 /* Do not use gsi_replace here, as it may scan operands.
6329 We want to do a simple structural modification only. */
6330 gsi_set_stmt (&iter, gtry);
6331 iter = gsi_start (gtry->eval);
6332 }
6333 }
6334 else
6335 gsi_next (&iter);
6336 }
6337
6338 gimplify_seq_add_seq (pre_p, body_sequence);
6339 if (temp)
6340 {
6341 *expr_p = temp;
6342 return GS_OK;
6343 }
6344 else
6345 {
6346 *expr_p = NULL;
6347 return GS_ALL_DONE;
6348 }
6349 }
6350
6351 /* Insert a cleanup marker for gimplify_cleanup_point_expr. CLEANUP
6352 is the cleanup action required. EH_ONLY is true if the cleanup should
6353 only be executed if an exception is thrown, not on normal exit.
6354 If FORCE_UNCOND is true perform the cleanup unconditionally; this is
6355 only valid for clobbers. */
6356
6357 static void
6358 gimple_push_cleanup (tree var, tree cleanup, bool eh_only, gimple_seq *pre_p,
6359 bool force_uncond = false)
6360 {
6361 gimple *wce;
6362 gimple_seq cleanup_stmts = NULL;
6363
6364 /* Errors can result in improperly nested cleanups. Which results in
6365 confusion when trying to resolve the GIMPLE_WITH_CLEANUP_EXPR. */
6366 if (seen_error ())
6367 return;
6368
6369 if (gimple_conditional_context ())
6370 {
6371 /* If we're in a conditional context, this is more complex. We only
6372 want to run the cleanup if we actually ran the initialization that
6373 necessitates it, but we want to run it after the end of the
6374 conditional context. So we wrap the try/finally around the
6375 condition and use a flag to determine whether or not to actually
6376 run the destructor. Thus
6377
6378 test ? f(A()) : 0
6379
6380 becomes (approximately)
6381
6382 flag = 0;
6383 try {
6384 if (test) { A::A(temp); flag = 1; val = f(temp); }
6385 else { val = 0; }
6386 } finally {
6387 if (flag) A::~A(temp);
6388 }
6389 val
6390 */
6391 if (force_uncond)
6392 {
6393 gimplify_stmt (&cleanup, &cleanup_stmts);
6394 wce = gimple_build_wce (cleanup_stmts);
6395 gimplify_seq_add_stmt (&gimplify_ctxp->conditional_cleanups, wce);
6396 }
6397 else
6398 {
6399 tree flag = create_tmp_var (boolean_type_node, "cleanup");
6400 gassign *ffalse = gimple_build_assign (flag, boolean_false_node);
6401 gassign *ftrue = gimple_build_assign (flag, boolean_true_node);
6402
6403 cleanup = build3 (COND_EXPR, void_type_node, flag, cleanup, NULL);
6404 gimplify_stmt (&cleanup, &cleanup_stmts);
6405 wce = gimple_build_wce (cleanup_stmts);
6406
6407 gimplify_seq_add_stmt (&gimplify_ctxp->conditional_cleanups, ffalse);
6408 gimplify_seq_add_stmt (&gimplify_ctxp->conditional_cleanups, wce);
6409 gimplify_seq_add_stmt (pre_p, ftrue);
6410
6411 /* Because of this manipulation, and the EH edges that jump
6412 threading cannot redirect, the temporary (VAR) will appear
6413 to be used uninitialized. Don't warn. */
6414 TREE_NO_WARNING (var) = 1;
6415 }
6416 }
6417 else
6418 {
6419 gimplify_stmt (&cleanup, &cleanup_stmts);
6420 wce = gimple_build_wce (cleanup_stmts);
6421 gimple_wce_set_cleanup_eh_only (wce, eh_only);
6422 gimplify_seq_add_stmt (pre_p, wce);
6423 }
6424 }
6425
6426 /* Gimplify a TARGET_EXPR which doesn't appear on the rhs of an INIT_EXPR. */
6427
6428 static enum gimplify_status
6429 gimplify_target_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
6430 {
6431 tree targ = *expr_p;
6432 tree temp = TARGET_EXPR_SLOT (targ);
6433 tree init = TARGET_EXPR_INITIAL (targ);
6434 enum gimplify_status ret;
6435
6436 bool unpoison_empty_seq = false;
6437 gimple_stmt_iterator unpoison_it;
6438
6439 if (init)
6440 {
6441 tree cleanup = NULL_TREE;
6442
6443 /* TARGET_EXPR temps aren't part of the enclosing block, so add it
6444 to the temps list. Handle also variable length TARGET_EXPRs. */
6445 if (TREE_CODE (DECL_SIZE (temp)) != INTEGER_CST)
6446 {
6447 if (!TYPE_SIZES_GIMPLIFIED (TREE_TYPE (temp)))
6448 gimplify_type_sizes (TREE_TYPE (temp), pre_p);
6449 gimplify_vla_decl (temp, pre_p);
6450 }
6451 else
6452 {
6453 /* Save location where we need to place unpoisoning. It's possible
6454 that a variable will be converted to needs_to_live_in_memory. */
6455 unpoison_it = gsi_last (*pre_p);
6456 unpoison_empty_seq = gsi_end_p (unpoison_it);
6457
6458 gimple_add_tmp_var (temp);
6459 }
6460
6461 /* If TARGET_EXPR_INITIAL is void, then the mere evaluation of the
6462 expression is supposed to initialize the slot. */
6463 if (VOID_TYPE_P (TREE_TYPE (init)))
6464 ret = gimplify_expr (&init, pre_p, post_p, is_gimple_stmt, fb_none);
6465 else
6466 {
6467 tree init_expr = build2 (INIT_EXPR, void_type_node, temp, init);
6468 init = init_expr;
6469 ret = gimplify_expr (&init, pre_p, post_p, is_gimple_stmt, fb_none);
6470 init = NULL;
6471 ggc_free (init_expr);
6472 }
6473 if (ret == GS_ERROR)
6474 {
6475 /* PR c++/28266 Make sure this is expanded only once. */
6476 TARGET_EXPR_INITIAL (targ) = NULL_TREE;
6477 return GS_ERROR;
6478 }
6479 if (init)
6480 gimplify_and_add (init, pre_p);
6481
6482 /* If needed, push the cleanup for the temp. */
6483 if (TARGET_EXPR_CLEANUP (targ))
6484 {
6485 if (CLEANUP_EH_ONLY (targ))
6486 gimple_push_cleanup (temp, TARGET_EXPR_CLEANUP (targ),
6487 CLEANUP_EH_ONLY (targ), pre_p);
6488 else
6489 cleanup = TARGET_EXPR_CLEANUP (targ);
6490 }
6491
6492 /* Add a clobber for the temporary going out of scope, like
6493 gimplify_bind_expr. */
6494 if (gimplify_ctxp->in_cleanup_point_expr
6495 && needs_to_live_in_memory (temp))
6496 {
6497 if (flag_stack_reuse == SR_ALL)
6498 {
6499 tree clobber = build_constructor (TREE_TYPE (temp),
6500 NULL);
6501 TREE_THIS_VOLATILE (clobber) = true;
6502 clobber = build2 (MODIFY_EXPR, TREE_TYPE (temp), temp, clobber);
6503 gimple_push_cleanup (temp, clobber, false, pre_p, true);
6504 }
6505 if (asan_poisoned_variables
6506 && DECL_ALIGN (temp) <= MAX_SUPPORTED_STACK_ALIGNMENT
6507 && dbg_cnt (asan_use_after_scope))
6508 {
6509 tree asan_cleanup = build_asan_poison_call_expr (temp);
6510 if (asan_cleanup)
6511 {
6512 if (unpoison_empty_seq)
6513 unpoison_it = gsi_start (*pre_p);
6514
6515 asan_poison_variable (temp, false, &unpoison_it,
6516 unpoison_empty_seq);
6517 gimple_push_cleanup (temp, asan_cleanup, false, pre_p);
6518 }
6519 }
6520 }
6521 if (cleanup)
6522 gimple_push_cleanup (temp, cleanup, false, pre_p);
6523
6524 /* Only expand this once. */
6525 TREE_OPERAND (targ, 3) = init;
6526 TARGET_EXPR_INITIAL (targ) = NULL_TREE;
6527 }
6528 else
6529 /* We should have expanded this before. */
6530 gcc_assert (DECL_SEEN_IN_BIND_EXPR_P (temp));
6531
6532 *expr_p = temp;
6533 return GS_OK;
6534 }
6535
6536 /* Gimplification of expression trees. */
6537
6538 /* Gimplify an expression which appears at statement context. The
6539 corresponding GIMPLE statements are added to *SEQ_P. If *SEQ_P is
6540 NULL, a new sequence is allocated.
6541
6542 Return true if we actually added a statement to the queue. */
6543
6544 bool
6545 gimplify_stmt (tree *stmt_p, gimple_seq *seq_p)
6546 {
6547 gimple_seq_node last;
6548
6549 last = gimple_seq_last (*seq_p);
6550 gimplify_expr (stmt_p, seq_p, NULL, is_gimple_stmt, fb_none);
6551 return last != gimple_seq_last (*seq_p);
6552 }
6553
6554 /* Add FIRSTPRIVATE entries for DECL in the OpenMP the surrounding parallels
6555 to CTX. If entries already exist, force them to be some flavor of private.
6556 If there is no enclosing parallel, do nothing. */
6557
6558 void
6559 omp_firstprivatize_variable (struct gimplify_omp_ctx *ctx, tree decl)
6560 {
6561 splay_tree_node n;
6562
6563 if (decl == NULL || !DECL_P (decl) || ctx->region_type == ORT_NONE)
6564 return;
6565
6566 do
6567 {
6568 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
6569 if (n != NULL)
6570 {
6571 if (n->value & GOVD_SHARED)
6572 n->value = GOVD_FIRSTPRIVATE | (n->value & GOVD_SEEN);
6573 else if (n->value & GOVD_MAP)
6574 n->value |= GOVD_MAP_TO_ONLY;
6575 else
6576 return;
6577 }
6578 else if ((ctx->region_type & ORT_TARGET) != 0)
6579 {
6580 if (ctx->target_map_scalars_firstprivate)
6581 omp_add_variable (ctx, decl, GOVD_FIRSTPRIVATE);
6582 else
6583 omp_add_variable (ctx, decl, GOVD_MAP | GOVD_MAP_TO_ONLY);
6584 }
6585 else if (ctx->region_type != ORT_WORKSHARE
6586 && ctx->region_type != ORT_SIMD
6587 && ctx->region_type != ORT_ACC
6588 && !(ctx->region_type & ORT_TARGET_DATA))
6589 omp_add_variable (ctx, decl, GOVD_FIRSTPRIVATE);
6590
6591 ctx = ctx->outer_context;
6592 }
6593 while (ctx);
6594 }
6595
6596 /* Similarly for each of the type sizes of TYPE. */
6597
6598 static void
6599 omp_firstprivatize_type_sizes (struct gimplify_omp_ctx *ctx, tree type)
6600 {
6601 if (type == NULL || type == error_mark_node)
6602 return;
6603 type = TYPE_MAIN_VARIANT (type);
6604
6605 if (ctx->privatized_types->add (type))
6606 return;
6607
6608 switch (TREE_CODE (type))
6609 {
6610 case INTEGER_TYPE:
6611 case ENUMERAL_TYPE:
6612 case BOOLEAN_TYPE:
6613 case REAL_TYPE:
6614 case FIXED_POINT_TYPE:
6615 omp_firstprivatize_variable (ctx, TYPE_MIN_VALUE (type));
6616 omp_firstprivatize_variable (ctx, TYPE_MAX_VALUE (type));
6617 break;
6618
6619 case ARRAY_TYPE:
6620 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (type));
6621 omp_firstprivatize_type_sizes (ctx, TYPE_DOMAIN (type));
6622 break;
6623
6624 case RECORD_TYPE:
6625 case UNION_TYPE:
6626 case QUAL_UNION_TYPE:
6627 {
6628 tree field;
6629 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
6630 if (TREE_CODE (field) == FIELD_DECL)
6631 {
6632 omp_firstprivatize_variable (ctx, DECL_FIELD_OFFSET (field));
6633 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (field));
6634 }
6635 }
6636 break;
6637
6638 case POINTER_TYPE:
6639 case REFERENCE_TYPE:
6640 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (type));
6641 break;
6642
6643 default:
6644 break;
6645 }
6646
6647 omp_firstprivatize_variable (ctx, TYPE_SIZE (type));
6648 omp_firstprivatize_variable (ctx, TYPE_SIZE_UNIT (type));
6649 lang_hooks.types.omp_firstprivatize_type_sizes (ctx, type);
6650 }
6651
6652 /* Add an entry for DECL in the OMP context CTX with FLAGS. */
6653
6654 static void
6655 omp_add_variable (struct gimplify_omp_ctx *ctx, tree decl, unsigned int flags)
6656 {
6657 splay_tree_node n;
6658 unsigned int nflags;
6659 tree t;
6660
6661 if (error_operand_p (decl) || ctx->region_type == ORT_NONE)
6662 return;
6663
6664 /* Never elide decls whose type has TREE_ADDRESSABLE set. This means
6665 there are constructors involved somewhere. Exception is a shared clause,
6666 there is nothing privatized in that case. */
6667 if ((flags & GOVD_SHARED) == 0
6668 && (TREE_ADDRESSABLE (TREE_TYPE (decl))
6669 || TYPE_NEEDS_CONSTRUCTING (TREE_TYPE (decl))))
6670 flags |= GOVD_SEEN;
6671
6672 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
6673 if (n != NULL && (n->value & GOVD_DATA_SHARE_CLASS) != 0)
6674 {
6675 /* We shouldn't be re-adding the decl with the same data
6676 sharing class. */
6677 gcc_assert ((n->value & GOVD_DATA_SHARE_CLASS & flags) == 0);
6678 nflags = n->value | flags;
6679 /* The only combination of data sharing classes we should see is
6680 FIRSTPRIVATE and LASTPRIVATE. However, OpenACC permits
6681 reduction variables to be used in data sharing clauses. */
6682 gcc_assert ((ctx->region_type & ORT_ACC) != 0
6683 || ((nflags & GOVD_DATA_SHARE_CLASS)
6684 == (GOVD_FIRSTPRIVATE | GOVD_LASTPRIVATE))
6685 || (flags & GOVD_DATA_SHARE_CLASS) == 0);
6686 n->value = nflags;
6687 return;
6688 }
6689
6690 /* When adding a variable-sized variable, we have to handle all sorts
6691 of additional bits of data: the pointer replacement variable, and
6692 the parameters of the type. */
6693 if (DECL_SIZE (decl) && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
6694 {
6695 /* Add the pointer replacement variable as PRIVATE if the variable
6696 replacement is private, else FIRSTPRIVATE since we'll need the
6697 address of the original variable either for SHARED, or for the
6698 copy into or out of the context. */
6699 if (!(flags & GOVD_LOCAL))
6700 {
6701 if (flags & GOVD_MAP)
6702 nflags = GOVD_MAP | GOVD_MAP_TO_ONLY | GOVD_EXPLICIT;
6703 else if (flags & GOVD_PRIVATE)
6704 nflags = GOVD_PRIVATE;
6705 else if ((ctx->region_type & (ORT_TARGET | ORT_TARGET_DATA)) != 0
6706 && (flags & GOVD_FIRSTPRIVATE))
6707 nflags = GOVD_PRIVATE | GOVD_EXPLICIT;
6708 else
6709 nflags = GOVD_FIRSTPRIVATE;
6710 nflags |= flags & GOVD_SEEN;
6711 t = DECL_VALUE_EXPR (decl);
6712 gcc_assert (TREE_CODE (t) == INDIRECT_REF);
6713 t = TREE_OPERAND (t, 0);
6714 gcc_assert (DECL_P (t));
6715 omp_add_variable (ctx, t, nflags);
6716 }
6717
6718 /* Add all of the variable and type parameters (which should have
6719 been gimplified to a formal temporary) as FIRSTPRIVATE. */
6720 omp_firstprivatize_variable (ctx, DECL_SIZE_UNIT (decl));
6721 omp_firstprivatize_variable (ctx, DECL_SIZE (decl));
6722 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (decl));
6723
6724 /* The variable-sized variable itself is never SHARED, only some form
6725 of PRIVATE. The sharing would take place via the pointer variable
6726 which we remapped above. */
6727 if (flags & GOVD_SHARED)
6728 flags = GOVD_SHARED | GOVD_DEBUG_PRIVATE
6729 | (flags & (GOVD_SEEN | GOVD_EXPLICIT));
6730
6731 /* We're going to make use of the TYPE_SIZE_UNIT at least in the
6732 alloca statement we generate for the variable, so make sure it
6733 is available. This isn't automatically needed for the SHARED
6734 case, since we won't be allocating local storage then.
6735 For local variables TYPE_SIZE_UNIT might not be gimplified yet,
6736 in this case omp_notice_variable will be called later
6737 on when it is gimplified. */
6738 else if (! (flags & (GOVD_LOCAL | GOVD_MAP))
6739 && DECL_P (TYPE_SIZE_UNIT (TREE_TYPE (decl))))
6740 omp_notice_variable (ctx, TYPE_SIZE_UNIT (TREE_TYPE (decl)), true);
6741 }
6742 else if ((flags & (GOVD_MAP | GOVD_LOCAL)) == 0
6743 && lang_hooks.decls.omp_privatize_by_reference (decl))
6744 {
6745 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (decl));
6746
6747 /* Similar to the direct variable sized case above, we'll need the
6748 size of references being privatized. */
6749 if ((flags & GOVD_SHARED) == 0)
6750 {
6751 t = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl)));
6752 if (DECL_P (t))
6753 omp_notice_variable (ctx, t, true);
6754 }
6755 }
6756
6757 if (n != NULL)
6758 n->value |= flags;
6759 else
6760 splay_tree_insert (ctx->variables, (splay_tree_key)decl, flags);
6761
6762 /* For reductions clauses in OpenACC loop directives, by default create a
6763 copy clause on the enclosing parallel construct for carrying back the
6764 results. */
6765 if (ctx->region_type == ORT_ACC && (flags & GOVD_REDUCTION))
6766 {
6767 struct gimplify_omp_ctx *outer_ctx = ctx->outer_context;
6768 while (outer_ctx)
6769 {
6770 n = splay_tree_lookup (outer_ctx->variables, (splay_tree_key)decl);
6771 if (n != NULL)
6772 {
6773 /* Ignore local variables and explicitly declared clauses. */
6774 if (n->value & (GOVD_LOCAL | GOVD_EXPLICIT))
6775 break;
6776 else if (outer_ctx->region_type == ORT_ACC_KERNELS)
6777 {
6778 /* According to the OpenACC spec, such a reduction variable
6779 should already have a copy map on a kernels construct,
6780 verify that here. */
6781 gcc_assert (!(n->value & GOVD_FIRSTPRIVATE)
6782 && (n->value & GOVD_MAP));
6783 }
6784 else if (outer_ctx->region_type == ORT_ACC_PARALLEL)
6785 {
6786 /* Remove firstprivate and make it a copy map. */
6787 n->value &= ~GOVD_FIRSTPRIVATE;
6788 n->value |= GOVD_MAP;
6789 }
6790 }
6791 else if (outer_ctx->region_type == ORT_ACC_PARALLEL)
6792 {
6793 splay_tree_insert (outer_ctx->variables, (splay_tree_key)decl,
6794 GOVD_MAP | GOVD_SEEN);
6795 break;
6796 }
6797 outer_ctx = outer_ctx->outer_context;
6798 }
6799 }
6800 }
6801
6802 /* Notice a threadprivate variable DECL used in OMP context CTX.
6803 This just prints out diagnostics about threadprivate variable uses
6804 in untied tasks. If DECL2 is non-NULL, prevent this warning
6805 on that variable. */
6806
6807 static bool
6808 omp_notice_threadprivate_variable (struct gimplify_omp_ctx *ctx, tree decl,
6809 tree decl2)
6810 {
6811 splay_tree_node n;
6812 struct gimplify_omp_ctx *octx;
6813
6814 for (octx = ctx; octx; octx = octx->outer_context)
6815 if ((octx->region_type & ORT_TARGET) != 0)
6816 {
6817 n = splay_tree_lookup (octx->variables, (splay_tree_key)decl);
6818 if (n == NULL)
6819 {
6820 error ("threadprivate variable %qE used in target region",
6821 DECL_NAME (decl));
6822 error_at (octx->location, "enclosing target region");
6823 splay_tree_insert (octx->variables, (splay_tree_key)decl, 0);
6824 }
6825 if (decl2)
6826 splay_tree_insert (octx->variables, (splay_tree_key)decl2, 0);
6827 }
6828
6829 if (ctx->region_type != ORT_UNTIED_TASK)
6830 return false;
6831 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
6832 if (n == NULL)
6833 {
6834 error ("threadprivate variable %qE used in untied task",
6835 DECL_NAME (decl));
6836 error_at (ctx->location, "enclosing task");
6837 splay_tree_insert (ctx->variables, (splay_tree_key)decl, 0);
6838 }
6839 if (decl2)
6840 splay_tree_insert (ctx->variables, (splay_tree_key)decl2, 0);
6841 return false;
6842 }
6843
6844 /* Return true if global var DECL is device resident. */
6845
6846 static bool
6847 device_resident_p (tree decl)
6848 {
6849 tree attr = lookup_attribute ("oacc declare target", DECL_ATTRIBUTES (decl));
6850
6851 if (!attr)
6852 return false;
6853
6854 for (tree t = TREE_VALUE (attr); t; t = TREE_PURPOSE (t))
6855 {
6856 tree c = TREE_VALUE (t);
6857 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_DEVICE_RESIDENT)
6858 return true;
6859 }
6860
6861 return false;
6862 }
6863
6864 /* Return true if DECL has an ACC DECLARE attribute. */
6865
6866 static bool
6867 is_oacc_declared (tree decl)
6868 {
6869 tree t = TREE_CODE (decl) == MEM_REF ? TREE_OPERAND (decl, 0) : decl;
6870 tree declared = lookup_attribute ("oacc declare target", DECL_ATTRIBUTES (t));
6871 return declared != NULL_TREE;
6872 }
6873
6874 /* Determine outer default flags for DECL mentioned in an OMP region
6875 but not declared in an enclosing clause.
6876
6877 ??? Some compiler-generated variables (like SAVE_EXPRs) could be
6878 remapped firstprivate instead of shared. To some extent this is
6879 addressed in omp_firstprivatize_type_sizes, but not
6880 effectively. */
6881
6882 static unsigned
6883 omp_default_clause (struct gimplify_omp_ctx *ctx, tree decl,
6884 bool in_code, unsigned flags)
6885 {
6886 enum omp_clause_default_kind default_kind = ctx->default_kind;
6887 enum omp_clause_default_kind kind;
6888
6889 kind = lang_hooks.decls.omp_predetermined_sharing (decl);
6890 if (kind != OMP_CLAUSE_DEFAULT_UNSPECIFIED)
6891 default_kind = kind;
6892
6893 switch (default_kind)
6894 {
6895 case OMP_CLAUSE_DEFAULT_NONE:
6896 {
6897 const char *rtype;
6898
6899 if (ctx->region_type & ORT_PARALLEL)
6900 rtype = "parallel";
6901 else if (ctx->region_type & ORT_TASK)
6902 rtype = "task";
6903 else if (ctx->region_type & ORT_TEAMS)
6904 rtype = "teams";
6905 else
6906 gcc_unreachable ();
6907
6908 error ("%qE not specified in enclosing %qs",
6909 DECL_NAME (lang_hooks.decls.omp_report_decl (decl)), rtype);
6910 error_at (ctx->location, "enclosing %qs", rtype);
6911 }
6912 /* FALLTHRU */
6913 case OMP_CLAUSE_DEFAULT_SHARED:
6914 flags |= GOVD_SHARED;
6915 break;
6916 case OMP_CLAUSE_DEFAULT_PRIVATE:
6917 flags |= GOVD_PRIVATE;
6918 break;
6919 case OMP_CLAUSE_DEFAULT_FIRSTPRIVATE:
6920 flags |= GOVD_FIRSTPRIVATE;
6921 break;
6922 case OMP_CLAUSE_DEFAULT_UNSPECIFIED:
6923 /* decl will be either GOVD_FIRSTPRIVATE or GOVD_SHARED. */
6924 gcc_assert ((ctx->region_type & ORT_TASK) != 0);
6925 if (struct gimplify_omp_ctx *octx = ctx->outer_context)
6926 {
6927 omp_notice_variable (octx, decl, in_code);
6928 for (; octx; octx = octx->outer_context)
6929 {
6930 splay_tree_node n2;
6931
6932 n2 = splay_tree_lookup (octx->variables, (splay_tree_key) decl);
6933 if ((octx->region_type & (ORT_TARGET_DATA | ORT_TARGET)) != 0
6934 && (n2 == NULL || (n2->value & GOVD_DATA_SHARE_CLASS) == 0))
6935 continue;
6936 if (n2 && (n2->value & GOVD_DATA_SHARE_CLASS) != GOVD_SHARED)
6937 {
6938 flags |= GOVD_FIRSTPRIVATE;
6939 goto found_outer;
6940 }
6941 if ((octx->region_type & (ORT_PARALLEL | ORT_TEAMS)) != 0)
6942 {
6943 flags |= GOVD_SHARED;
6944 goto found_outer;
6945 }
6946 }
6947 }
6948
6949 if (TREE_CODE (decl) == PARM_DECL
6950 || (!is_global_var (decl)
6951 && DECL_CONTEXT (decl) == current_function_decl))
6952 flags |= GOVD_FIRSTPRIVATE;
6953 else
6954 flags |= GOVD_SHARED;
6955 found_outer:
6956 break;
6957
6958 default:
6959 gcc_unreachable ();
6960 }
6961
6962 return flags;
6963 }
6964
6965
6966 /* Determine outer default flags for DECL mentioned in an OACC region
6967 but not declared in an enclosing clause. */
6968
6969 static unsigned
6970 oacc_default_clause (struct gimplify_omp_ctx *ctx, tree decl, unsigned flags)
6971 {
6972 const char *rkind;
6973 bool on_device = false;
6974 bool declared = is_oacc_declared (decl);
6975 tree type = TREE_TYPE (decl);
6976
6977 if (lang_hooks.decls.omp_privatize_by_reference (decl))
6978 type = TREE_TYPE (type);
6979
6980 if ((ctx->region_type & (ORT_ACC_PARALLEL | ORT_ACC_KERNELS)) != 0
6981 && is_global_var (decl)
6982 && device_resident_p (decl))
6983 {
6984 on_device = true;
6985 flags |= GOVD_MAP_TO_ONLY;
6986 }
6987
6988 switch (ctx->region_type)
6989 {
6990 case ORT_ACC_KERNELS:
6991 rkind = "kernels";
6992
6993 if (AGGREGATE_TYPE_P (type))
6994 {
6995 /* Aggregates default to 'present_or_copy', or 'present'. */
6996 if (ctx->default_kind != OMP_CLAUSE_DEFAULT_PRESENT)
6997 flags |= GOVD_MAP;
6998 else
6999 flags |= GOVD_MAP | GOVD_MAP_FORCE_PRESENT;
7000 }
7001 else
7002 /* Scalars default to 'copy'. */
7003 flags |= GOVD_MAP | GOVD_MAP_FORCE;
7004
7005 break;
7006
7007 case ORT_ACC_PARALLEL:
7008 rkind = "parallel";
7009
7010 if (on_device || declared)
7011 flags |= GOVD_MAP;
7012 else if (AGGREGATE_TYPE_P (type))
7013 {
7014 /* Aggregates default to 'present_or_copy', or 'present'. */
7015 if (ctx->default_kind != OMP_CLAUSE_DEFAULT_PRESENT)
7016 flags |= GOVD_MAP;
7017 else
7018 flags |= GOVD_MAP | GOVD_MAP_FORCE_PRESENT;
7019 }
7020 else
7021 /* Scalars default to 'firstprivate'. */
7022 flags |= GOVD_FIRSTPRIVATE;
7023
7024 break;
7025
7026 default:
7027 gcc_unreachable ();
7028 }
7029
7030 if (DECL_ARTIFICIAL (decl))
7031 ; /* We can get compiler-generated decls, and should not complain
7032 about them. */
7033 else if (ctx->default_kind == OMP_CLAUSE_DEFAULT_NONE)
7034 {
7035 error ("%qE not specified in enclosing OpenACC %qs construct",
7036 DECL_NAME (lang_hooks.decls.omp_report_decl (decl)), rkind);
7037 inform (ctx->location, "enclosing OpenACC %qs construct", rkind);
7038 }
7039 else if (ctx->default_kind == OMP_CLAUSE_DEFAULT_PRESENT)
7040 ; /* Handled above. */
7041 else
7042 gcc_checking_assert (ctx->default_kind == OMP_CLAUSE_DEFAULT_SHARED);
7043
7044 return flags;
7045 }
7046
7047 /* Record the fact that DECL was used within the OMP context CTX.
7048 IN_CODE is true when real code uses DECL, and false when we should
7049 merely emit default(none) errors. Return true if DECL is going to
7050 be remapped and thus DECL shouldn't be gimplified into its
7051 DECL_VALUE_EXPR (if any). */
7052
7053 static bool
7054 omp_notice_variable (struct gimplify_omp_ctx *ctx, tree decl, bool in_code)
7055 {
7056 splay_tree_node n;
7057 unsigned flags = in_code ? GOVD_SEEN : 0;
7058 bool ret = false, shared;
7059
7060 if (error_operand_p (decl))
7061 return false;
7062
7063 if (ctx->region_type == ORT_NONE)
7064 return lang_hooks.decls.omp_disregard_value_expr (decl, false);
7065
7066 if (is_global_var (decl))
7067 {
7068 /* Threadprivate variables are predetermined. */
7069 if (DECL_THREAD_LOCAL_P (decl))
7070 return omp_notice_threadprivate_variable (ctx, decl, NULL_TREE);
7071
7072 if (DECL_HAS_VALUE_EXPR_P (decl))
7073 {
7074 tree value = get_base_address (DECL_VALUE_EXPR (decl));
7075
7076 if (value && DECL_P (value) && DECL_THREAD_LOCAL_P (value))
7077 return omp_notice_threadprivate_variable (ctx, decl, value);
7078 }
7079
7080 if (gimplify_omp_ctxp->outer_context == NULL
7081 && VAR_P (decl)
7082 && oacc_get_fn_attrib (current_function_decl))
7083 {
7084 location_t loc = DECL_SOURCE_LOCATION (decl);
7085
7086 if (lookup_attribute ("omp declare target link",
7087 DECL_ATTRIBUTES (decl)))
7088 {
7089 error_at (loc,
7090 "%qE with %<link%> clause used in %<routine%> function",
7091 DECL_NAME (decl));
7092 return false;
7093 }
7094 else if (!lookup_attribute ("omp declare target",
7095 DECL_ATTRIBUTES (decl)))
7096 {
7097 error_at (loc,
7098 "%qE requires a %<declare%> directive for use "
7099 "in a %<routine%> function", DECL_NAME (decl));
7100 return false;
7101 }
7102 }
7103 }
7104
7105 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
7106 if ((ctx->region_type & ORT_TARGET) != 0)
7107 {
7108 ret = lang_hooks.decls.omp_disregard_value_expr (decl, true);
7109 if (n == NULL)
7110 {
7111 unsigned nflags = flags;
7112 if (ctx->target_map_pointers_as_0len_arrays
7113 || ctx->target_map_scalars_firstprivate)
7114 {
7115 bool is_declare_target = false;
7116 bool is_scalar = false;
7117 if (is_global_var (decl)
7118 && varpool_node::get_create (decl)->offloadable)
7119 {
7120 struct gimplify_omp_ctx *octx;
7121 for (octx = ctx->outer_context;
7122 octx; octx = octx->outer_context)
7123 {
7124 n = splay_tree_lookup (octx->variables,
7125 (splay_tree_key)decl);
7126 if (n
7127 && (n->value & GOVD_DATA_SHARE_CLASS) != GOVD_SHARED
7128 && (n->value & GOVD_DATA_SHARE_CLASS) != 0)
7129 break;
7130 }
7131 is_declare_target = octx == NULL;
7132 }
7133 if (!is_declare_target && ctx->target_map_scalars_firstprivate)
7134 is_scalar = lang_hooks.decls.omp_scalar_p (decl);
7135 if (is_declare_target)
7136 ;
7137 else if (ctx->target_map_pointers_as_0len_arrays
7138 && (TREE_CODE (TREE_TYPE (decl)) == POINTER_TYPE
7139 || (TREE_CODE (TREE_TYPE (decl)) == REFERENCE_TYPE
7140 && TREE_CODE (TREE_TYPE (TREE_TYPE (decl)))
7141 == POINTER_TYPE)))
7142 nflags |= GOVD_MAP | GOVD_MAP_0LEN_ARRAY;
7143 else if (is_scalar)
7144 nflags |= GOVD_FIRSTPRIVATE;
7145 }
7146
7147 struct gimplify_omp_ctx *octx = ctx->outer_context;
7148 if ((ctx->region_type & ORT_ACC) && octx)
7149 {
7150 /* Look in outer OpenACC contexts, to see if there's a
7151 data attribute for this variable. */
7152 omp_notice_variable (octx, decl, in_code);
7153
7154 for (; octx; octx = octx->outer_context)
7155 {
7156 if (!(octx->region_type & (ORT_TARGET_DATA | ORT_TARGET)))
7157 break;
7158 splay_tree_node n2
7159 = splay_tree_lookup (octx->variables,
7160 (splay_tree_key) decl);
7161 if (n2)
7162 {
7163 if (octx->region_type == ORT_ACC_HOST_DATA)
7164 error ("variable %qE declared in enclosing "
7165 "%<host_data%> region", DECL_NAME (decl));
7166 nflags |= GOVD_MAP;
7167 if (octx->region_type == ORT_ACC_DATA
7168 && (n2->value & GOVD_MAP_0LEN_ARRAY))
7169 nflags |= GOVD_MAP_0LEN_ARRAY;
7170 goto found_outer;
7171 }
7172 }
7173 }
7174
7175 {
7176 tree type = TREE_TYPE (decl);
7177
7178 if (nflags == flags
7179 && gimplify_omp_ctxp->target_firstprivatize_array_bases
7180 && lang_hooks.decls.omp_privatize_by_reference (decl))
7181 type = TREE_TYPE (type);
7182 if (nflags == flags
7183 && !lang_hooks.types.omp_mappable_type (type))
7184 {
7185 error ("%qD referenced in target region does not have "
7186 "a mappable type", decl);
7187 nflags |= GOVD_MAP | GOVD_EXPLICIT;
7188 }
7189 else if (nflags == flags)
7190 {
7191 if ((ctx->region_type & ORT_ACC) != 0)
7192 nflags = oacc_default_clause (ctx, decl, flags);
7193 else
7194 nflags |= GOVD_MAP;
7195 }
7196 }
7197 found_outer:
7198 omp_add_variable (ctx, decl, nflags);
7199 }
7200 else
7201 {
7202 /* If nothing changed, there's nothing left to do. */
7203 if ((n->value & flags) == flags)
7204 return ret;
7205 flags |= n->value;
7206 n->value = flags;
7207 }
7208 goto do_outer;
7209 }
7210
7211 if (n == NULL)
7212 {
7213 if (ctx->region_type == ORT_WORKSHARE
7214 || ctx->region_type == ORT_SIMD
7215 || ctx->region_type == ORT_ACC
7216 || (ctx->region_type & ORT_TARGET_DATA) != 0)
7217 goto do_outer;
7218
7219 flags = omp_default_clause (ctx, decl, in_code, flags);
7220
7221 if ((flags & GOVD_PRIVATE)
7222 && lang_hooks.decls.omp_private_outer_ref (decl))
7223 flags |= GOVD_PRIVATE_OUTER_REF;
7224
7225 omp_add_variable (ctx, decl, flags);
7226
7227 shared = (flags & GOVD_SHARED) != 0;
7228 ret = lang_hooks.decls.omp_disregard_value_expr (decl, shared);
7229 goto do_outer;
7230 }
7231
7232 if ((n->value & (GOVD_SEEN | GOVD_LOCAL)) == 0
7233 && (flags & (GOVD_SEEN | GOVD_LOCAL)) == GOVD_SEEN
7234 && DECL_SIZE (decl))
7235 {
7236 if (TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
7237 {
7238 splay_tree_node n2;
7239 tree t = DECL_VALUE_EXPR (decl);
7240 gcc_assert (TREE_CODE (t) == INDIRECT_REF);
7241 t = TREE_OPERAND (t, 0);
7242 gcc_assert (DECL_P (t));
7243 n2 = splay_tree_lookup (ctx->variables, (splay_tree_key) t);
7244 n2->value |= GOVD_SEEN;
7245 }
7246 else if (lang_hooks.decls.omp_privatize_by_reference (decl)
7247 && TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl)))
7248 && (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl))))
7249 != INTEGER_CST))
7250 {
7251 splay_tree_node n2;
7252 tree t = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl)));
7253 gcc_assert (DECL_P (t));
7254 n2 = splay_tree_lookup (ctx->variables, (splay_tree_key) t);
7255 if (n2)
7256 omp_notice_variable (ctx, t, true);
7257 }
7258 }
7259
7260 shared = ((flags | n->value) & GOVD_SHARED) != 0;
7261 ret = lang_hooks.decls.omp_disregard_value_expr (decl, shared);
7262
7263 /* If nothing changed, there's nothing left to do. */
7264 if ((n->value & flags) == flags)
7265 return ret;
7266 flags |= n->value;
7267 n->value = flags;
7268
7269 do_outer:
7270 /* If the variable is private in the current context, then we don't
7271 need to propagate anything to an outer context. */
7272 if ((flags & GOVD_PRIVATE) && !(flags & GOVD_PRIVATE_OUTER_REF))
7273 return ret;
7274 if ((flags & (GOVD_LINEAR | GOVD_LINEAR_LASTPRIVATE_NO_OUTER))
7275 == (GOVD_LINEAR | GOVD_LINEAR_LASTPRIVATE_NO_OUTER))
7276 return ret;
7277 if ((flags & (GOVD_FIRSTPRIVATE | GOVD_LASTPRIVATE
7278 | GOVD_LINEAR_LASTPRIVATE_NO_OUTER))
7279 == (GOVD_LASTPRIVATE | GOVD_LINEAR_LASTPRIVATE_NO_OUTER))
7280 return ret;
7281 if (ctx->outer_context
7282 && omp_notice_variable (ctx->outer_context, decl, in_code))
7283 return true;
7284 return ret;
7285 }
7286
7287 /* Verify that DECL is private within CTX. If there's specific information
7288 to the contrary in the innermost scope, generate an error. */
7289
7290 static bool
7291 omp_is_private (struct gimplify_omp_ctx *ctx, tree decl, int simd)
7292 {
7293 splay_tree_node n;
7294
7295 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
7296 if (n != NULL)
7297 {
7298 if (n->value & GOVD_SHARED)
7299 {
7300 if (ctx == gimplify_omp_ctxp)
7301 {
7302 if (simd)
7303 error ("iteration variable %qE is predetermined linear",
7304 DECL_NAME (decl));
7305 else
7306 error ("iteration variable %qE should be private",
7307 DECL_NAME (decl));
7308 n->value = GOVD_PRIVATE;
7309 return true;
7310 }
7311 else
7312 return false;
7313 }
7314 else if ((n->value & GOVD_EXPLICIT) != 0
7315 && (ctx == gimplify_omp_ctxp
7316 || (ctx->region_type == ORT_COMBINED_PARALLEL
7317 && gimplify_omp_ctxp->outer_context == ctx)))
7318 {
7319 if ((n->value & GOVD_FIRSTPRIVATE) != 0)
7320 error ("iteration variable %qE should not be firstprivate",
7321 DECL_NAME (decl));
7322 else if ((n->value & GOVD_REDUCTION) != 0)
7323 error ("iteration variable %qE should not be reduction",
7324 DECL_NAME (decl));
7325 else if (simd == 0 && (n->value & GOVD_LINEAR) != 0)
7326 error ("iteration variable %qE should not be linear",
7327 DECL_NAME (decl));
7328 else if (simd == 1 && (n->value & GOVD_LASTPRIVATE) != 0)
7329 error ("iteration variable %qE should not be lastprivate",
7330 DECL_NAME (decl));
7331 else if (simd && (n->value & GOVD_PRIVATE) != 0)
7332 error ("iteration variable %qE should not be private",
7333 DECL_NAME (decl));
7334 else if (simd == 2 && (n->value & GOVD_LINEAR) != 0)
7335 error ("iteration variable %qE is predetermined linear",
7336 DECL_NAME (decl));
7337 }
7338 return (ctx == gimplify_omp_ctxp
7339 || (ctx->region_type == ORT_COMBINED_PARALLEL
7340 && gimplify_omp_ctxp->outer_context == ctx));
7341 }
7342
7343 if (ctx->region_type != ORT_WORKSHARE
7344 && ctx->region_type != ORT_SIMD
7345 && ctx->region_type != ORT_ACC)
7346 return false;
7347 else if (ctx->outer_context)
7348 return omp_is_private (ctx->outer_context, decl, simd);
7349 return false;
7350 }
7351
7352 /* Return true if DECL is private within a parallel region
7353 that binds to the current construct's context or in parallel
7354 region's REDUCTION clause. */
7355
7356 static bool
7357 omp_check_private (struct gimplify_omp_ctx *ctx, tree decl, bool copyprivate)
7358 {
7359 splay_tree_node n;
7360
7361 do
7362 {
7363 ctx = ctx->outer_context;
7364 if (ctx == NULL)
7365 {
7366 if (is_global_var (decl))
7367 return false;
7368
7369 /* References might be private, but might be shared too,
7370 when checking for copyprivate, assume they might be
7371 private, otherwise assume they might be shared. */
7372 if (copyprivate)
7373 return true;
7374
7375 if (lang_hooks.decls.omp_privatize_by_reference (decl))
7376 return false;
7377
7378 /* Treat C++ privatized non-static data members outside
7379 of the privatization the same. */
7380 if (omp_member_access_dummy_var (decl))
7381 return false;
7382
7383 return true;
7384 }
7385
7386 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
7387
7388 if ((ctx->region_type & (ORT_TARGET | ORT_TARGET_DATA)) != 0
7389 && (n == NULL || (n->value & GOVD_DATA_SHARE_CLASS) == 0))
7390 continue;
7391
7392 if (n != NULL)
7393 {
7394 if ((n->value & GOVD_LOCAL) != 0
7395 && omp_member_access_dummy_var (decl))
7396 return false;
7397 return (n->value & GOVD_SHARED) == 0;
7398 }
7399 }
7400 while (ctx->region_type == ORT_WORKSHARE
7401 || ctx->region_type == ORT_SIMD
7402 || ctx->region_type == ORT_ACC);
7403 return false;
7404 }
7405
7406 /* Callback for walk_tree to find a DECL_EXPR for the given DECL. */
7407
7408 static tree
7409 find_decl_expr (tree *tp, int *walk_subtrees, void *data)
7410 {
7411 tree t = *tp;
7412
7413 /* If this node has been visited, unmark it and keep looking. */
7414 if (TREE_CODE (t) == DECL_EXPR && DECL_EXPR_DECL (t) == (tree) data)
7415 return t;
7416
7417 if (IS_TYPE_OR_DECL_P (t))
7418 *walk_subtrees = 0;
7419 return NULL_TREE;
7420 }
7421
7422 /* Scan the OMP clauses in *LIST_P, installing mappings into a new
7423 and previous omp contexts. */
7424
7425 static void
7426 gimplify_scan_omp_clauses (tree *list_p, gimple_seq *pre_p,
7427 enum omp_region_type region_type,
7428 enum tree_code code)
7429 {
7430 struct gimplify_omp_ctx *ctx, *outer_ctx;
7431 tree c;
7432 hash_map<tree, tree> *struct_map_to_clause = NULL;
7433 tree *prev_list_p = NULL;
7434
7435 ctx = new_omp_context (region_type);
7436 outer_ctx = ctx->outer_context;
7437 if (code == OMP_TARGET)
7438 {
7439 if (!lang_GNU_Fortran ())
7440 ctx->target_map_pointers_as_0len_arrays = true;
7441 ctx->target_map_scalars_firstprivate = true;
7442 }
7443 if (!lang_GNU_Fortran ())
7444 switch (code)
7445 {
7446 case OMP_TARGET:
7447 case OMP_TARGET_DATA:
7448 case OMP_TARGET_ENTER_DATA:
7449 case OMP_TARGET_EXIT_DATA:
7450 case OACC_DECLARE:
7451 case OACC_HOST_DATA:
7452 ctx->target_firstprivatize_array_bases = true;
7453 default:
7454 break;
7455 }
7456
7457 while ((c = *list_p) != NULL)
7458 {
7459 bool remove = false;
7460 bool notice_outer = true;
7461 const char *check_non_private = NULL;
7462 unsigned int flags;
7463 tree decl;
7464
7465 switch (OMP_CLAUSE_CODE (c))
7466 {
7467 case OMP_CLAUSE_PRIVATE:
7468 flags = GOVD_PRIVATE | GOVD_EXPLICIT;
7469 if (lang_hooks.decls.omp_private_outer_ref (OMP_CLAUSE_DECL (c)))
7470 {
7471 flags |= GOVD_PRIVATE_OUTER_REF;
7472 OMP_CLAUSE_PRIVATE_OUTER_REF (c) = 1;
7473 }
7474 else
7475 notice_outer = false;
7476 goto do_add;
7477 case OMP_CLAUSE_SHARED:
7478 flags = GOVD_SHARED | GOVD_EXPLICIT;
7479 goto do_add;
7480 case OMP_CLAUSE_FIRSTPRIVATE:
7481 flags = GOVD_FIRSTPRIVATE | GOVD_EXPLICIT;
7482 check_non_private = "firstprivate";
7483 goto do_add;
7484 case OMP_CLAUSE_LASTPRIVATE:
7485 flags = GOVD_LASTPRIVATE | GOVD_SEEN | GOVD_EXPLICIT;
7486 check_non_private = "lastprivate";
7487 decl = OMP_CLAUSE_DECL (c);
7488 if (error_operand_p (decl))
7489 goto do_add;
7490 else if (outer_ctx
7491 && (outer_ctx->region_type == ORT_COMBINED_PARALLEL
7492 || outer_ctx->region_type == ORT_COMBINED_TEAMS)
7493 && splay_tree_lookup (outer_ctx->variables,
7494 (splay_tree_key) decl) == NULL)
7495 {
7496 omp_add_variable (outer_ctx, decl, GOVD_SHARED | GOVD_SEEN);
7497 if (outer_ctx->outer_context)
7498 omp_notice_variable (outer_ctx->outer_context, decl, true);
7499 }
7500 else if (outer_ctx
7501 && (outer_ctx->region_type & ORT_TASK) != 0
7502 && outer_ctx->combined_loop
7503 && splay_tree_lookup (outer_ctx->variables,
7504 (splay_tree_key) decl) == NULL)
7505 {
7506 omp_add_variable (outer_ctx, decl, GOVD_LASTPRIVATE | GOVD_SEEN);
7507 if (outer_ctx->outer_context)
7508 omp_notice_variable (outer_ctx->outer_context, decl, true);
7509 }
7510 else if (outer_ctx
7511 && (outer_ctx->region_type == ORT_WORKSHARE
7512 || outer_ctx->region_type == ORT_ACC)
7513 && outer_ctx->combined_loop
7514 && splay_tree_lookup (outer_ctx->variables,
7515 (splay_tree_key) decl) == NULL
7516 && !omp_check_private (outer_ctx, decl, false))
7517 {
7518 omp_add_variable (outer_ctx, decl, GOVD_LASTPRIVATE | GOVD_SEEN);
7519 if (outer_ctx->outer_context
7520 && (outer_ctx->outer_context->region_type
7521 == ORT_COMBINED_PARALLEL)
7522 && splay_tree_lookup (outer_ctx->outer_context->variables,
7523 (splay_tree_key) decl) == NULL)
7524 {
7525 struct gimplify_omp_ctx *octx = outer_ctx->outer_context;
7526 omp_add_variable (octx, decl, GOVD_SHARED | GOVD_SEEN);
7527 if (octx->outer_context)
7528 {
7529 octx = octx->outer_context;
7530 if (octx->region_type == ORT_WORKSHARE
7531 && octx->combined_loop
7532 && splay_tree_lookup (octx->variables,
7533 (splay_tree_key) decl) == NULL
7534 && !omp_check_private (octx, decl, false))
7535 {
7536 omp_add_variable (octx, decl,
7537 GOVD_LASTPRIVATE | GOVD_SEEN);
7538 octx = octx->outer_context;
7539 if (octx
7540 && octx->region_type == ORT_COMBINED_TEAMS
7541 && (splay_tree_lookup (octx->variables,
7542 (splay_tree_key) decl)
7543 == NULL))
7544 {
7545 omp_add_variable (octx, decl,
7546 GOVD_SHARED | GOVD_SEEN);
7547 octx = octx->outer_context;
7548 }
7549 }
7550 if (octx)
7551 omp_notice_variable (octx, decl, true);
7552 }
7553 }
7554 else if (outer_ctx->outer_context)
7555 omp_notice_variable (outer_ctx->outer_context, decl, true);
7556 }
7557 goto do_add;
7558 case OMP_CLAUSE_REDUCTION:
7559 flags = GOVD_REDUCTION | GOVD_SEEN | GOVD_EXPLICIT;
7560 /* OpenACC permits reductions on private variables. */
7561 if (!(region_type & ORT_ACC))
7562 check_non_private = "reduction";
7563 decl = OMP_CLAUSE_DECL (c);
7564 if (TREE_CODE (decl) == MEM_REF)
7565 {
7566 tree type = TREE_TYPE (decl);
7567 if (gimplify_expr (&TYPE_MAX_VALUE (TYPE_DOMAIN (type)), pre_p,
7568 NULL, is_gimple_val, fb_rvalue, false)
7569 == GS_ERROR)
7570 {
7571 remove = true;
7572 break;
7573 }
7574 tree v = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
7575 if (DECL_P (v))
7576 {
7577 omp_firstprivatize_variable (ctx, v);
7578 omp_notice_variable (ctx, v, true);
7579 }
7580 decl = TREE_OPERAND (decl, 0);
7581 if (TREE_CODE (decl) == POINTER_PLUS_EXPR)
7582 {
7583 if (gimplify_expr (&TREE_OPERAND (decl, 1), pre_p,
7584 NULL, is_gimple_val, fb_rvalue, false)
7585 == GS_ERROR)
7586 {
7587 remove = true;
7588 break;
7589 }
7590 v = TREE_OPERAND (decl, 1);
7591 if (DECL_P (v))
7592 {
7593 omp_firstprivatize_variable (ctx, v);
7594 omp_notice_variable (ctx, v, true);
7595 }
7596 decl = TREE_OPERAND (decl, 0);
7597 }
7598 if (TREE_CODE (decl) == ADDR_EXPR
7599 || TREE_CODE (decl) == INDIRECT_REF)
7600 decl = TREE_OPERAND (decl, 0);
7601 }
7602 goto do_add_decl;
7603 case OMP_CLAUSE_LINEAR:
7604 if (gimplify_expr (&OMP_CLAUSE_LINEAR_STEP (c), pre_p, NULL,
7605 is_gimple_val, fb_rvalue) == GS_ERROR)
7606 {
7607 remove = true;
7608 break;
7609 }
7610 else
7611 {
7612 if (code == OMP_SIMD
7613 && !OMP_CLAUSE_LINEAR_NO_COPYIN (c))
7614 {
7615 struct gimplify_omp_ctx *octx = outer_ctx;
7616 if (octx
7617 && octx->region_type == ORT_WORKSHARE
7618 && octx->combined_loop
7619 && !octx->distribute)
7620 {
7621 if (octx->outer_context
7622 && (octx->outer_context->region_type
7623 == ORT_COMBINED_PARALLEL))
7624 octx = octx->outer_context->outer_context;
7625 else
7626 octx = octx->outer_context;
7627 }
7628 if (octx
7629 && octx->region_type == ORT_WORKSHARE
7630 && octx->combined_loop
7631 && octx->distribute)
7632 {
7633 error_at (OMP_CLAUSE_LOCATION (c),
7634 "%<linear%> clause for variable other than "
7635 "loop iterator specified on construct "
7636 "combined with %<distribute%>");
7637 remove = true;
7638 break;
7639 }
7640 }
7641 /* For combined #pragma omp parallel for simd, need to put
7642 lastprivate and perhaps firstprivate too on the
7643 parallel. Similarly for #pragma omp for simd. */
7644 struct gimplify_omp_ctx *octx = outer_ctx;
7645 decl = NULL_TREE;
7646 do
7647 {
7648 if (OMP_CLAUSE_LINEAR_NO_COPYIN (c)
7649 && OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
7650 break;
7651 decl = OMP_CLAUSE_DECL (c);
7652 if (error_operand_p (decl))
7653 {
7654 decl = NULL_TREE;
7655 break;
7656 }
7657 flags = GOVD_SEEN;
7658 if (!OMP_CLAUSE_LINEAR_NO_COPYIN (c))
7659 flags |= GOVD_FIRSTPRIVATE;
7660 if (!OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
7661 flags |= GOVD_LASTPRIVATE;
7662 if (octx
7663 && octx->region_type == ORT_WORKSHARE
7664 && octx->combined_loop)
7665 {
7666 if (octx->outer_context
7667 && (octx->outer_context->region_type
7668 == ORT_COMBINED_PARALLEL))
7669 octx = octx->outer_context;
7670 else if (omp_check_private (octx, decl, false))
7671 break;
7672 }
7673 else if (octx
7674 && (octx->region_type & ORT_TASK) != 0
7675 && octx->combined_loop)
7676 ;
7677 else if (octx
7678 && octx->region_type == ORT_COMBINED_PARALLEL
7679 && ctx->region_type == ORT_WORKSHARE
7680 && octx == outer_ctx)
7681 flags = GOVD_SEEN | GOVD_SHARED;
7682 else if (octx
7683 && octx->region_type == ORT_COMBINED_TEAMS)
7684 flags = GOVD_SEEN | GOVD_SHARED;
7685 else if (octx
7686 && octx->region_type == ORT_COMBINED_TARGET)
7687 {
7688 flags &= ~GOVD_LASTPRIVATE;
7689 if (flags == GOVD_SEEN)
7690 break;
7691 }
7692 else
7693 break;
7694 splay_tree_node on
7695 = splay_tree_lookup (octx->variables,
7696 (splay_tree_key) decl);
7697 if (on && (on->value & GOVD_DATA_SHARE_CLASS) != 0)
7698 {
7699 octx = NULL;
7700 break;
7701 }
7702 omp_add_variable (octx, decl, flags);
7703 if (octx->outer_context == NULL)
7704 break;
7705 octx = octx->outer_context;
7706 }
7707 while (1);
7708 if (octx
7709 && decl
7710 && (!OMP_CLAUSE_LINEAR_NO_COPYIN (c)
7711 || !OMP_CLAUSE_LINEAR_NO_COPYOUT (c)))
7712 omp_notice_variable (octx, decl, true);
7713 }
7714 flags = GOVD_LINEAR | GOVD_EXPLICIT;
7715 if (OMP_CLAUSE_LINEAR_NO_COPYIN (c)
7716 && OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
7717 {
7718 notice_outer = false;
7719 flags |= GOVD_LINEAR_LASTPRIVATE_NO_OUTER;
7720 }
7721 goto do_add;
7722
7723 case OMP_CLAUSE_MAP:
7724 decl = OMP_CLAUSE_DECL (c);
7725 if (error_operand_p (decl))
7726 remove = true;
7727 switch (code)
7728 {
7729 case OMP_TARGET:
7730 break;
7731 case OACC_DATA:
7732 if (TREE_CODE (TREE_TYPE (decl)) != ARRAY_TYPE)
7733 break;
7734 /* FALLTHRU */
7735 case OMP_TARGET_DATA:
7736 case OMP_TARGET_ENTER_DATA:
7737 case OMP_TARGET_EXIT_DATA:
7738 case OACC_ENTER_DATA:
7739 case OACC_EXIT_DATA:
7740 case OACC_HOST_DATA:
7741 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
7742 || (OMP_CLAUSE_MAP_KIND (c)
7743 == GOMP_MAP_FIRSTPRIVATE_REFERENCE))
7744 /* For target {,enter ,exit }data only the array slice is
7745 mapped, but not the pointer to it. */
7746 remove = true;
7747 break;
7748 default:
7749 break;
7750 }
7751 if (remove)
7752 break;
7753 if (DECL_P (decl) && outer_ctx && (region_type & ORT_ACC))
7754 {
7755 struct gimplify_omp_ctx *octx;
7756 for (octx = outer_ctx; octx; octx = octx->outer_context)
7757 {
7758 if (octx->region_type != ORT_ACC_HOST_DATA)
7759 break;
7760 splay_tree_node n2
7761 = splay_tree_lookup (octx->variables,
7762 (splay_tree_key) decl);
7763 if (n2)
7764 error_at (OMP_CLAUSE_LOCATION (c), "variable %qE "
7765 "declared in enclosing %<host_data%> region",
7766 DECL_NAME (decl));
7767 }
7768 }
7769 if (OMP_CLAUSE_SIZE (c) == NULL_TREE)
7770 OMP_CLAUSE_SIZE (c) = DECL_P (decl) ? DECL_SIZE_UNIT (decl)
7771 : TYPE_SIZE_UNIT (TREE_TYPE (decl));
7772 if (gimplify_expr (&OMP_CLAUSE_SIZE (c), pre_p,
7773 NULL, is_gimple_val, fb_rvalue) == GS_ERROR)
7774 {
7775 remove = true;
7776 break;
7777 }
7778 else if ((OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
7779 || (OMP_CLAUSE_MAP_KIND (c)
7780 == GOMP_MAP_FIRSTPRIVATE_REFERENCE))
7781 && TREE_CODE (OMP_CLAUSE_SIZE (c)) != INTEGER_CST)
7782 {
7783 OMP_CLAUSE_SIZE (c)
7784 = get_initialized_tmp_var (OMP_CLAUSE_SIZE (c), pre_p, NULL,
7785 false);
7786 omp_add_variable (ctx, OMP_CLAUSE_SIZE (c),
7787 GOVD_FIRSTPRIVATE | GOVD_SEEN);
7788 }
7789 if (!DECL_P (decl))
7790 {
7791 tree d = decl, *pd;
7792 if (TREE_CODE (d) == ARRAY_REF)
7793 {
7794 while (TREE_CODE (d) == ARRAY_REF)
7795 d = TREE_OPERAND (d, 0);
7796 if (TREE_CODE (d) == COMPONENT_REF
7797 && TREE_CODE (TREE_TYPE (d)) == ARRAY_TYPE)
7798 decl = d;
7799 }
7800 pd = &OMP_CLAUSE_DECL (c);
7801 if (d == decl
7802 && TREE_CODE (decl) == INDIRECT_REF
7803 && TREE_CODE (TREE_OPERAND (decl, 0)) == COMPONENT_REF
7804 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (decl, 0)))
7805 == REFERENCE_TYPE))
7806 {
7807 pd = &TREE_OPERAND (decl, 0);
7808 decl = TREE_OPERAND (decl, 0);
7809 }
7810 if (TREE_CODE (decl) == COMPONENT_REF)
7811 {
7812 while (TREE_CODE (decl) == COMPONENT_REF)
7813 decl = TREE_OPERAND (decl, 0);
7814 if (TREE_CODE (decl) == INDIRECT_REF
7815 && DECL_P (TREE_OPERAND (decl, 0))
7816 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (decl, 0)))
7817 == REFERENCE_TYPE))
7818 decl = TREE_OPERAND (decl, 0);
7819 }
7820 if (gimplify_expr (pd, pre_p, NULL, is_gimple_lvalue, fb_lvalue)
7821 == GS_ERROR)
7822 {
7823 remove = true;
7824 break;
7825 }
7826 if (DECL_P (decl))
7827 {
7828 if (error_operand_p (decl))
7829 {
7830 remove = true;
7831 break;
7832 }
7833
7834 tree stype = TREE_TYPE (decl);
7835 if (TREE_CODE (stype) == REFERENCE_TYPE)
7836 stype = TREE_TYPE (stype);
7837 if (TYPE_SIZE_UNIT (stype) == NULL
7838 || TREE_CODE (TYPE_SIZE_UNIT (stype)) != INTEGER_CST)
7839 {
7840 error_at (OMP_CLAUSE_LOCATION (c),
7841 "mapping field %qE of variable length "
7842 "structure", OMP_CLAUSE_DECL (c));
7843 remove = true;
7844 break;
7845 }
7846
7847 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ALWAYS_POINTER)
7848 {
7849 /* Error recovery. */
7850 if (prev_list_p == NULL)
7851 {
7852 remove = true;
7853 break;
7854 }
7855 if (OMP_CLAUSE_CHAIN (*prev_list_p) != c)
7856 {
7857 tree ch = OMP_CLAUSE_CHAIN (*prev_list_p);
7858 if (ch == NULL_TREE || OMP_CLAUSE_CHAIN (ch) != c)
7859 {
7860 remove = true;
7861 break;
7862 }
7863 }
7864 }
7865
7866 tree offset;
7867 HOST_WIDE_INT bitsize, bitpos;
7868 machine_mode mode;
7869 int unsignedp, reversep, volatilep = 0;
7870 tree base = OMP_CLAUSE_DECL (c);
7871 while (TREE_CODE (base) == ARRAY_REF)
7872 base = TREE_OPERAND (base, 0);
7873 if (TREE_CODE (base) == INDIRECT_REF)
7874 base = TREE_OPERAND (base, 0);
7875 base = get_inner_reference (base, &bitsize, &bitpos, &offset,
7876 &mode, &unsignedp, &reversep,
7877 &volatilep);
7878 tree orig_base = base;
7879 if ((TREE_CODE (base) == INDIRECT_REF
7880 || (TREE_CODE (base) == MEM_REF
7881 && integer_zerop (TREE_OPERAND (base, 1))))
7882 && DECL_P (TREE_OPERAND (base, 0))
7883 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (base, 0)))
7884 == REFERENCE_TYPE))
7885 base = TREE_OPERAND (base, 0);
7886 gcc_assert (base == decl
7887 && (offset == NULL_TREE
7888 || TREE_CODE (offset) == INTEGER_CST));
7889
7890 splay_tree_node n
7891 = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
7892 bool ptr = (OMP_CLAUSE_MAP_KIND (c)
7893 == GOMP_MAP_ALWAYS_POINTER);
7894 if (n == NULL || (n->value & GOVD_MAP) == 0)
7895 {
7896 tree l = build_omp_clause (OMP_CLAUSE_LOCATION (c),
7897 OMP_CLAUSE_MAP);
7898 OMP_CLAUSE_SET_MAP_KIND (l, GOMP_MAP_STRUCT);
7899 if (orig_base != base)
7900 OMP_CLAUSE_DECL (l) = unshare_expr (orig_base);
7901 else
7902 OMP_CLAUSE_DECL (l) = decl;
7903 OMP_CLAUSE_SIZE (l) = size_int (1);
7904 if (struct_map_to_clause == NULL)
7905 struct_map_to_clause = new hash_map<tree, tree>;
7906 struct_map_to_clause->put (decl, l);
7907 if (ptr)
7908 {
7909 enum gomp_map_kind mkind
7910 = code == OMP_TARGET_EXIT_DATA
7911 ? GOMP_MAP_RELEASE : GOMP_MAP_ALLOC;
7912 tree c2 = build_omp_clause (OMP_CLAUSE_LOCATION (c),
7913 OMP_CLAUSE_MAP);
7914 OMP_CLAUSE_SET_MAP_KIND (c2, mkind);
7915 OMP_CLAUSE_DECL (c2)
7916 = unshare_expr (OMP_CLAUSE_DECL (c));
7917 OMP_CLAUSE_CHAIN (c2) = *prev_list_p;
7918 OMP_CLAUSE_SIZE (c2)
7919 = TYPE_SIZE_UNIT (ptr_type_node);
7920 OMP_CLAUSE_CHAIN (l) = c2;
7921 if (OMP_CLAUSE_CHAIN (*prev_list_p) != c)
7922 {
7923 tree c4 = OMP_CLAUSE_CHAIN (*prev_list_p);
7924 tree c3
7925 = build_omp_clause (OMP_CLAUSE_LOCATION (c),
7926 OMP_CLAUSE_MAP);
7927 OMP_CLAUSE_SET_MAP_KIND (c3, mkind);
7928 OMP_CLAUSE_DECL (c3)
7929 = unshare_expr (OMP_CLAUSE_DECL (c4));
7930 OMP_CLAUSE_SIZE (c3)
7931 = TYPE_SIZE_UNIT (ptr_type_node);
7932 OMP_CLAUSE_CHAIN (c3) = *prev_list_p;
7933 OMP_CLAUSE_CHAIN (c2) = c3;
7934 }
7935 *prev_list_p = l;
7936 prev_list_p = NULL;
7937 }
7938 else
7939 {
7940 OMP_CLAUSE_CHAIN (l) = c;
7941 *list_p = l;
7942 list_p = &OMP_CLAUSE_CHAIN (l);
7943 }
7944 if (orig_base != base && code == OMP_TARGET)
7945 {
7946 tree c2 = build_omp_clause (OMP_CLAUSE_LOCATION (c),
7947 OMP_CLAUSE_MAP);
7948 enum gomp_map_kind mkind
7949 = GOMP_MAP_FIRSTPRIVATE_REFERENCE;
7950 OMP_CLAUSE_SET_MAP_KIND (c2, mkind);
7951 OMP_CLAUSE_DECL (c2) = decl;
7952 OMP_CLAUSE_SIZE (c2) = size_zero_node;
7953 OMP_CLAUSE_CHAIN (c2) = OMP_CLAUSE_CHAIN (l);
7954 OMP_CLAUSE_CHAIN (l) = c2;
7955 }
7956 flags = GOVD_MAP | GOVD_EXPLICIT;
7957 if (GOMP_MAP_ALWAYS_P (OMP_CLAUSE_MAP_KIND (c)) || ptr)
7958 flags |= GOVD_SEEN;
7959 goto do_add_decl;
7960 }
7961 else
7962 {
7963 tree *osc = struct_map_to_clause->get (decl);
7964 tree *sc = NULL, *scp = NULL;
7965 if (GOMP_MAP_ALWAYS_P (OMP_CLAUSE_MAP_KIND (c)) || ptr)
7966 n->value |= GOVD_SEEN;
7967 offset_int o1, o2;
7968 if (offset)
7969 o1 = wi::to_offset (offset);
7970 else
7971 o1 = 0;
7972 if (bitpos)
7973 o1 = o1 + bitpos / BITS_PER_UNIT;
7974 sc = &OMP_CLAUSE_CHAIN (*osc);
7975 if (*sc != c
7976 && (OMP_CLAUSE_MAP_KIND (*sc)
7977 == GOMP_MAP_FIRSTPRIVATE_REFERENCE))
7978 sc = &OMP_CLAUSE_CHAIN (*sc);
7979 for (; *sc != c; sc = &OMP_CLAUSE_CHAIN (*sc))
7980 if (ptr && sc == prev_list_p)
7981 break;
7982 else if (TREE_CODE (OMP_CLAUSE_DECL (*sc))
7983 != COMPONENT_REF
7984 && (TREE_CODE (OMP_CLAUSE_DECL (*sc))
7985 != INDIRECT_REF)
7986 && (TREE_CODE (OMP_CLAUSE_DECL (*sc))
7987 != ARRAY_REF))
7988 break;
7989 else
7990 {
7991 tree offset2;
7992 HOST_WIDE_INT bitsize2, bitpos2;
7993 base = OMP_CLAUSE_DECL (*sc);
7994 if (TREE_CODE (base) == ARRAY_REF)
7995 {
7996 while (TREE_CODE (base) == ARRAY_REF)
7997 base = TREE_OPERAND (base, 0);
7998 if (TREE_CODE (base) != COMPONENT_REF
7999 || (TREE_CODE (TREE_TYPE (base))
8000 != ARRAY_TYPE))
8001 break;
8002 }
8003 else if (TREE_CODE (base) == INDIRECT_REF
8004 && (TREE_CODE (TREE_OPERAND (base, 0))
8005 == COMPONENT_REF)
8006 && (TREE_CODE (TREE_TYPE
8007 (TREE_OPERAND (base, 0)))
8008 == REFERENCE_TYPE))
8009 base = TREE_OPERAND (base, 0);
8010 base = get_inner_reference (base, &bitsize2,
8011 &bitpos2, &offset2,
8012 &mode, &unsignedp,
8013 &reversep, &volatilep);
8014 if ((TREE_CODE (base) == INDIRECT_REF
8015 || (TREE_CODE (base) == MEM_REF
8016 && integer_zerop (TREE_OPERAND (base,
8017 1))))
8018 && DECL_P (TREE_OPERAND (base, 0))
8019 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (base,
8020 0)))
8021 == REFERENCE_TYPE))
8022 base = TREE_OPERAND (base, 0);
8023 if (base != decl)
8024 break;
8025 if (scp)
8026 continue;
8027 gcc_assert (offset == NULL_TREE
8028 || TREE_CODE (offset) == INTEGER_CST);
8029 tree d1 = OMP_CLAUSE_DECL (*sc);
8030 tree d2 = OMP_CLAUSE_DECL (c);
8031 while (TREE_CODE (d1) == ARRAY_REF)
8032 d1 = TREE_OPERAND (d1, 0);
8033 while (TREE_CODE (d2) == ARRAY_REF)
8034 d2 = TREE_OPERAND (d2, 0);
8035 if (TREE_CODE (d1) == INDIRECT_REF)
8036 d1 = TREE_OPERAND (d1, 0);
8037 if (TREE_CODE (d2) == INDIRECT_REF)
8038 d2 = TREE_OPERAND (d2, 0);
8039 while (TREE_CODE (d1) == COMPONENT_REF)
8040 if (TREE_CODE (d2) == COMPONENT_REF
8041 && TREE_OPERAND (d1, 1)
8042 == TREE_OPERAND (d2, 1))
8043 {
8044 d1 = TREE_OPERAND (d1, 0);
8045 d2 = TREE_OPERAND (d2, 0);
8046 }
8047 else
8048 break;
8049 if (d1 == d2)
8050 {
8051 error_at (OMP_CLAUSE_LOCATION (c),
8052 "%qE appears more than once in map "
8053 "clauses", OMP_CLAUSE_DECL (c));
8054 remove = true;
8055 break;
8056 }
8057 if (offset2)
8058 o2 = wi::to_offset (offset2);
8059 else
8060 o2 = 0;
8061 if (bitpos2)
8062 o2 = o2 + bitpos2 / BITS_PER_UNIT;
8063 if (wi::ltu_p (o1, o2)
8064 || (wi::eq_p (o1, o2) && bitpos < bitpos2))
8065 {
8066 if (ptr)
8067 scp = sc;
8068 else
8069 break;
8070 }
8071 }
8072 if (remove)
8073 break;
8074 OMP_CLAUSE_SIZE (*osc)
8075 = size_binop (PLUS_EXPR, OMP_CLAUSE_SIZE (*osc),
8076 size_one_node);
8077 if (ptr)
8078 {
8079 tree c2 = build_omp_clause (OMP_CLAUSE_LOCATION (c),
8080 OMP_CLAUSE_MAP);
8081 tree cl = NULL_TREE;
8082 enum gomp_map_kind mkind
8083 = code == OMP_TARGET_EXIT_DATA
8084 ? GOMP_MAP_RELEASE : GOMP_MAP_ALLOC;
8085 OMP_CLAUSE_SET_MAP_KIND (c2, mkind);
8086 OMP_CLAUSE_DECL (c2)
8087 = unshare_expr (OMP_CLAUSE_DECL (c));
8088 OMP_CLAUSE_CHAIN (c2) = scp ? *scp : *prev_list_p;
8089 OMP_CLAUSE_SIZE (c2)
8090 = TYPE_SIZE_UNIT (ptr_type_node);
8091 cl = scp ? *prev_list_p : c2;
8092 if (OMP_CLAUSE_CHAIN (*prev_list_p) != c)
8093 {
8094 tree c4 = OMP_CLAUSE_CHAIN (*prev_list_p);
8095 tree c3
8096 = build_omp_clause (OMP_CLAUSE_LOCATION (c),
8097 OMP_CLAUSE_MAP);
8098 OMP_CLAUSE_SET_MAP_KIND (c3, mkind);
8099 OMP_CLAUSE_DECL (c3)
8100 = unshare_expr (OMP_CLAUSE_DECL (c4));
8101 OMP_CLAUSE_SIZE (c3)
8102 = TYPE_SIZE_UNIT (ptr_type_node);
8103 OMP_CLAUSE_CHAIN (c3) = *prev_list_p;
8104 if (!scp)
8105 OMP_CLAUSE_CHAIN (c2) = c3;
8106 else
8107 cl = c3;
8108 }
8109 if (scp)
8110 *scp = c2;
8111 if (sc == prev_list_p)
8112 {
8113 *sc = cl;
8114 prev_list_p = NULL;
8115 }
8116 else
8117 {
8118 *prev_list_p = OMP_CLAUSE_CHAIN (c);
8119 list_p = prev_list_p;
8120 prev_list_p = NULL;
8121 OMP_CLAUSE_CHAIN (c) = *sc;
8122 *sc = cl;
8123 continue;
8124 }
8125 }
8126 else if (*sc != c)
8127 {
8128 *list_p = OMP_CLAUSE_CHAIN (c);
8129 OMP_CLAUSE_CHAIN (c) = *sc;
8130 *sc = c;
8131 continue;
8132 }
8133 }
8134 }
8135 if (!remove
8136 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_ALWAYS_POINTER
8137 && OMP_CLAUSE_CHAIN (c)
8138 && OMP_CLAUSE_CODE (OMP_CLAUSE_CHAIN (c)) == OMP_CLAUSE_MAP
8139 && (OMP_CLAUSE_MAP_KIND (OMP_CLAUSE_CHAIN (c))
8140 == GOMP_MAP_ALWAYS_POINTER))
8141 prev_list_p = list_p;
8142 break;
8143 }
8144 flags = GOVD_MAP | GOVD_EXPLICIT;
8145 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ALWAYS_TO
8146 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ALWAYS_TOFROM)
8147 flags |= GOVD_MAP_ALWAYS_TO;
8148 goto do_add;
8149
8150 case OMP_CLAUSE_DEPEND:
8151 if (OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SINK)
8152 {
8153 tree deps = OMP_CLAUSE_DECL (c);
8154 while (deps && TREE_CODE (deps) == TREE_LIST)
8155 {
8156 if (TREE_CODE (TREE_PURPOSE (deps)) == TRUNC_DIV_EXPR
8157 && DECL_P (TREE_OPERAND (TREE_PURPOSE (deps), 1)))
8158 gimplify_expr (&TREE_OPERAND (TREE_PURPOSE (deps), 1),
8159 pre_p, NULL, is_gimple_val, fb_rvalue);
8160 deps = TREE_CHAIN (deps);
8161 }
8162 break;
8163 }
8164 else if (OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SOURCE)
8165 break;
8166 if (TREE_CODE (OMP_CLAUSE_DECL (c)) == COMPOUND_EXPR)
8167 {
8168 gimplify_expr (&TREE_OPERAND (OMP_CLAUSE_DECL (c), 0), pre_p,
8169 NULL, is_gimple_val, fb_rvalue);
8170 OMP_CLAUSE_DECL (c) = TREE_OPERAND (OMP_CLAUSE_DECL (c), 1);
8171 }
8172 if (error_operand_p (OMP_CLAUSE_DECL (c)))
8173 {
8174 remove = true;
8175 break;
8176 }
8177 OMP_CLAUSE_DECL (c) = build_fold_addr_expr (OMP_CLAUSE_DECL (c));
8178 if (gimplify_expr (&OMP_CLAUSE_DECL (c), pre_p, NULL,
8179 is_gimple_val, fb_rvalue) == GS_ERROR)
8180 {
8181 remove = true;
8182 break;
8183 }
8184 break;
8185
8186 case OMP_CLAUSE_TO:
8187 case OMP_CLAUSE_FROM:
8188 case OMP_CLAUSE__CACHE_:
8189 decl = OMP_CLAUSE_DECL (c);
8190 if (error_operand_p (decl))
8191 {
8192 remove = true;
8193 break;
8194 }
8195 if (OMP_CLAUSE_SIZE (c) == NULL_TREE)
8196 OMP_CLAUSE_SIZE (c) = DECL_P (decl) ? DECL_SIZE_UNIT (decl)
8197 : TYPE_SIZE_UNIT (TREE_TYPE (decl));
8198 if (gimplify_expr (&OMP_CLAUSE_SIZE (c), pre_p,
8199 NULL, is_gimple_val, fb_rvalue) == GS_ERROR)
8200 {
8201 remove = true;
8202 break;
8203 }
8204 if (!DECL_P (decl))
8205 {
8206 if (gimplify_expr (&OMP_CLAUSE_DECL (c), pre_p,
8207 NULL, is_gimple_lvalue, fb_lvalue)
8208 == GS_ERROR)
8209 {
8210 remove = true;
8211 break;
8212 }
8213 break;
8214 }
8215 goto do_notice;
8216
8217 case OMP_CLAUSE_USE_DEVICE_PTR:
8218 flags = GOVD_FIRSTPRIVATE | GOVD_EXPLICIT;
8219 goto do_add;
8220 case OMP_CLAUSE_IS_DEVICE_PTR:
8221 flags = GOVD_FIRSTPRIVATE | GOVD_EXPLICIT;
8222 goto do_add;
8223
8224 do_add:
8225 decl = OMP_CLAUSE_DECL (c);
8226 do_add_decl:
8227 if (error_operand_p (decl))
8228 {
8229 remove = true;
8230 break;
8231 }
8232 if (DECL_NAME (decl) == NULL_TREE && (flags & GOVD_SHARED) == 0)
8233 {
8234 tree t = omp_member_access_dummy_var (decl);
8235 if (t)
8236 {
8237 tree v = DECL_VALUE_EXPR (decl);
8238 DECL_NAME (decl) = DECL_NAME (TREE_OPERAND (v, 1));
8239 if (outer_ctx)
8240 omp_notice_variable (outer_ctx, t, true);
8241 }
8242 }
8243 if (code == OACC_DATA
8244 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
8245 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER)
8246 flags |= GOVD_MAP_0LEN_ARRAY;
8247 omp_add_variable (ctx, decl, flags);
8248 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
8249 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
8250 {
8251 omp_add_variable (ctx, OMP_CLAUSE_REDUCTION_PLACEHOLDER (c),
8252 GOVD_LOCAL | GOVD_SEEN);
8253 if (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c)
8254 && walk_tree (&OMP_CLAUSE_REDUCTION_INIT (c),
8255 find_decl_expr,
8256 OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c),
8257 NULL) == NULL_TREE)
8258 omp_add_variable (ctx,
8259 OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c),
8260 GOVD_LOCAL | GOVD_SEEN);
8261 gimplify_omp_ctxp = ctx;
8262 push_gimplify_context ();
8263
8264 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
8265 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
8266
8267 gimplify_and_add (OMP_CLAUSE_REDUCTION_INIT (c),
8268 &OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c));
8269 pop_gimplify_context
8270 (gimple_seq_first_stmt (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c)));
8271 push_gimplify_context ();
8272 gimplify_and_add (OMP_CLAUSE_REDUCTION_MERGE (c),
8273 &OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c));
8274 pop_gimplify_context
8275 (gimple_seq_first_stmt (OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c)));
8276 OMP_CLAUSE_REDUCTION_INIT (c) = NULL_TREE;
8277 OMP_CLAUSE_REDUCTION_MERGE (c) = NULL_TREE;
8278
8279 gimplify_omp_ctxp = outer_ctx;
8280 }
8281 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
8282 && OMP_CLAUSE_LASTPRIVATE_STMT (c))
8283 {
8284 gimplify_omp_ctxp = ctx;
8285 push_gimplify_context ();
8286 if (TREE_CODE (OMP_CLAUSE_LASTPRIVATE_STMT (c)) != BIND_EXPR)
8287 {
8288 tree bind = build3 (BIND_EXPR, void_type_node, NULL,
8289 NULL, NULL);
8290 TREE_SIDE_EFFECTS (bind) = 1;
8291 BIND_EXPR_BODY (bind) = OMP_CLAUSE_LASTPRIVATE_STMT (c);
8292 OMP_CLAUSE_LASTPRIVATE_STMT (c) = bind;
8293 }
8294 gimplify_and_add (OMP_CLAUSE_LASTPRIVATE_STMT (c),
8295 &OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c));
8296 pop_gimplify_context
8297 (gimple_seq_first_stmt (OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c)));
8298 OMP_CLAUSE_LASTPRIVATE_STMT (c) = NULL_TREE;
8299
8300 gimplify_omp_ctxp = outer_ctx;
8301 }
8302 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
8303 && OMP_CLAUSE_LINEAR_STMT (c))
8304 {
8305 gimplify_omp_ctxp = ctx;
8306 push_gimplify_context ();
8307 if (TREE_CODE (OMP_CLAUSE_LINEAR_STMT (c)) != BIND_EXPR)
8308 {
8309 tree bind = build3 (BIND_EXPR, void_type_node, NULL,
8310 NULL, NULL);
8311 TREE_SIDE_EFFECTS (bind) = 1;
8312 BIND_EXPR_BODY (bind) = OMP_CLAUSE_LINEAR_STMT (c);
8313 OMP_CLAUSE_LINEAR_STMT (c) = bind;
8314 }
8315 gimplify_and_add (OMP_CLAUSE_LINEAR_STMT (c),
8316 &OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c));
8317 pop_gimplify_context
8318 (gimple_seq_first_stmt (OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c)));
8319 OMP_CLAUSE_LINEAR_STMT (c) = NULL_TREE;
8320
8321 gimplify_omp_ctxp = outer_ctx;
8322 }
8323 if (notice_outer)
8324 goto do_notice;
8325 break;
8326
8327 case OMP_CLAUSE_COPYIN:
8328 case OMP_CLAUSE_COPYPRIVATE:
8329 decl = OMP_CLAUSE_DECL (c);
8330 if (error_operand_p (decl))
8331 {
8332 remove = true;
8333 break;
8334 }
8335 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_COPYPRIVATE
8336 && !remove
8337 && !omp_check_private (ctx, decl, true))
8338 {
8339 remove = true;
8340 if (is_global_var (decl))
8341 {
8342 if (DECL_THREAD_LOCAL_P (decl))
8343 remove = false;
8344 else if (DECL_HAS_VALUE_EXPR_P (decl))
8345 {
8346 tree value = get_base_address (DECL_VALUE_EXPR (decl));
8347
8348 if (value
8349 && DECL_P (value)
8350 && DECL_THREAD_LOCAL_P (value))
8351 remove = false;
8352 }
8353 }
8354 if (remove)
8355 error_at (OMP_CLAUSE_LOCATION (c),
8356 "copyprivate variable %qE is not threadprivate"
8357 " or private in outer context", DECL_NAME (decl));
8358 }
8359 do_notice:
8360 if (outer_ctx)
8361 omp_notice_variable (outer_ctx, decl, true);
8362 if (check_non_private
8363 && region_type == ORT_WORKSHARE
8364 && (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION
8365 || decl == OMP_CLAUSE_DECL (c)
8366 || (TREE_CODE (OMP_CLAUSE_DECL (c)) == MEM_REF
8367 && (TREE_CODE (TREE_OPERAND (OMP_CLAUSE_DECL (c), 0))
8368 == ADDR_EXPR
8369 || (TREE_CODE (TREE_OPERAND (OMP_CLAUSE_DECL (c), 0))
8370 == POINTER_PLUS_EXPR
8371 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND
8372 (OMP_CLAUSE_DECL (c), 0), 0))
8373 == ADDR_EXPR)))))
8374 && omp_check_private (ctx, decl, false))
8375 {
8376 error ("%s variable %qE is private in outer context",
8377 check_non_private, DECL_NAME (decl));
8378 remove = true;
8379 }
8380 break;
8381
8382 case OMP_CLAUSE_IF:
8383 if (OMP_CLAUSE_IF_MODIFIER (c) != ERROR_MARK
8384 && OMP_CLAUSE_IF_MODIFIER (c) != code)
8385 {
8386 const char *p[2];
8387 for (int i = 0; i < 2; i++)
8388 switch (i ? OMP_CLAUSE_IF_MODIFIER (c) : code)
8389 {
8390 case OMP_PARALLEL: p[i] = "parallel"; break;
8391 case OMP_TASK: p[i] = "task"; break;
8392 case OMP_TASKLOOP: p[i] = "taskloop"; break;
8393 case OMP_TARGET_DATA: p[i] = "target data"; break;
8394 case OMP_TARGET: p[i] = "target"; break;
8395 case OMP_TARGET_UPDATE: p[i] = "target update"; break;
8396 case OMP_TARGET_ENTER_DATA:
8397 p[i] = "target enter data"; break;
8398 case OMP_TARGET_EXIT_DATA: p[i] = "target exit data"; break;
8399 default: gcc_unreachable ();
8400 }
8401 error_at (OMP_CLAUSE_LOCATION (c),
8402 "expected %qs %<if%> clause modifier rather than %qs",
8403 p[0], p[1]);
8404 remove = true;
8405 }
8406 /* Fall through. */
8407
8408 case OMP_CLAUSE_FINAL:
8409 OMP_CLAUSE_OPERAND (c, 0)
8410 = gimple_boolify (OMP_CLAUSE_OPERAND (c, 0));
8411 /* Fall through. */
8412
8413 case OMP_CLAUSE_SCHEDULE:
8414 case OMP_CLAUSE_NUM_THREADS:
8415 case OMP_CLAUSE_NUM_TEAMS:
8416 case OMP_CLAUSE_THREAD_LIMIT:
8417 case OMP_CLAUSE_DIST_SCHEDULE:
8418 case OMP_CLAUSE_DEVICE:
8419 case OMP_CLAUSE_PRIORITY:
8420 case OMP_CLAUSE_GRAINSIZE:
8421 case OMP_CLAUSE_NUM_TASKS:
8422 case OMP_CLAUSE_HINT:
8423 case OMP_CLAUSE__CILK_FOR_COUNT_:
8424 case OMP_CLAUSE_ASYNC:
8425 case OMP_CLAUSE_WAIT:
8426 case OMP_CLAUSE_NUM_GANGS:
8427 case OMP_CLAUSE_NUM_WORKERS:
8428 case OMP_CLAUSE_VECTOR_LENGTH:
8429 case OMP_CLAUSE_WORKER:
8430 case OMP_CLAUSE_VECTOR:
8431 if (gimplify_expr (&OMP_CLAUSE_OPERAND (c, 0), pre_p, NULL,
8432 is_gimple_val, fb_rvalue) == GS_ERROR)
8433 remove = true;
8434 break;
8435
8436 case OMP_CLAUSE_GANG:
8437 if (gimplify_expr (&OMP_CLAUSE_OPERAND (c, 0), pre_p, NULL,
8438 is_gimple_val, fb_rvalue) == GS_ERROR)
8439 remove = true;
8440 if (gimplify_expr (&OMP_CLAUSE_OPERAND (c, 1), pre_p, NULL,
8441 is_gimple_val, fb_rvalue) == GS_ERROR)
8442 remove = true;
8443 break;
8444
8445 case OMP_CLAUSE_NOWAIT:
8446 case OMP_CLAUSE_ORDERED:
8447 case OMP_CLAUSE_UNTIED:
8448 case OMP_CLAUSE_COLLAPSE:
8449 case OMP_CLAUSE_TILE:
8450 case OMP_CLAUSE_AUTO:
8451 case OMP_CLAUSE_SEQ:
8452 case OMP_CLAUSE_INDEPENDENT:
8453 case OMP_CLAUSE_MERGEABLE:
8454 case OMP_CLAUSE_PROC_BIND:
8455 case OMP_CLAUSE_SAFELEN:
8456 case OMP_CLAUSE_SIMDLEN:
8457 case OMP_CLAUSE_NOGROUP:
8458 case OMP_CLAUSE_THREADS:
8459 case OMP_CLAUSE_SIMD:
8460 break;
8461
8462 case OMP_CLAUSE_DEFAULTMAP:
8463 ctx->target_map_scalars_firstprivate = false;
8464 break;
8465
8466 case OMP_CLAUSE_ALIGNED:
8467 decl = OMP_CLAUSE_DECL (c);
8468 if (error_operand_p (decl))
8469 {
8470 remove = true;
8471 break;
8472 }
8473 if (gimplify_expr (&OMP_CLAUSE_ALIGNED_ALIGNMENT (c), pre_p, NULL,
8474 is_gimple_val, fb_rvalue) == GS_ERROR)
8475 {
8476 remove = true;
8477 break;
8478 }
8479 if (!is_global_var (decl)
8480 && TREE_CODE (TREE_TYPE (decl)) == POINTER_TYPE)
8481 omp_add_variable (ctx, decl, GOVD_ALIGNED);
8482 break;
8483
8484 case OMP_CLAUSE_DEFAULT:
8485 ctx->default_kind = OMP_CLAUSE_DEFAULT_KIND (c);
8486 break;
8487
8488 default:
8489 gcc_unreachable ();
8490 }
8491
8492 if (code == OACC_DATA
8493 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
8494 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER)
8495 remove = true;
8496 if (remove)
8497 *list_p = OMP_CLAUSE_CHAIN (c);
8498 else
8499 list_p = &OMP_CLAUSE_CHAIN (c);
8500 }
8501
8502 gimplify_omp_ctxp = ctx;
8503 if (struct_map_to_clause)
8504 delete struct_map_to_clause;
8505 }
8506
8507 /* Return true if DECL is a candidate for shared to firstprivate
8508 optimization. We only consider non-addressable scalars, not
8509 too big, and not references. */
8510
8511 static bool
8512 omp_shared_to_firstprivate_optimizable_decl_p (tree decl)
8513 {
8514 if (TREE_ADDRESSABLE (decl))
8515 return false;
8516 tree type = TREE_TYPE (decl);
8517 if (!is_gimple_reg_type (type)
8518 || TREE_CODE (type) == REFERENCE_TYPE
8519 || TREE_ADDRESSABLE (type))
8520 return false;
8521 /* Don't optimize too large decls, as each thread/task will have
8522 its own. */
8523 HOST_WIDE_INT len = int_size_in_bytes (type);
8524 if (len == -1 || len > 4 * POINTER_SIZE / BITS_PER_UNIT)
8525 return false;
8526 if (lang_hooks.decls.omp_privatize_by_reference (decl))
8527 return false;
8528 return true;
8529 }
8530
8531 /* Helper function of omp_find_stores_op and gimplify_adjust_omp_clauses*.
8532 For omp_shared_to_firstprivate_optimizable_decl_p decl mark it as
8533 GOVD_WRITTEN in outer contexts. */
8534
8535 static void
8536 omp_mark_stores (struct gimplify_omp_ctx *ctx, tree decl)
8537 {
8538 for (; ctx; ctx = ctx->outer_context)
8539 {
8540 splay_tree_node n = splay_tree_lookup (ctx->variables,
8541 (splay_tree_key) decl);
8542 if (n == NULL)
8543 continue;
8544 else if (n->value & GOVD_SHARED)
8545 {
8546 n->value |= GOVD_WRITTEN;
8547 return;
8548 }
8549 else if (n->value & GOVD_DATA_SHARE_CLASS)
8550 return;
8551 }
8552 }
8553
8554 /* Helper callback for walk_gimple_seq to discover possible stores
8555 to omp_shared_to_firstprivate_optimizable_decl_p decls and set
8556 GOVD_WRITTEN if they are GOVD_SHARED in some outer context
8557 for those. */
8558
8559 static tree
8560 omp_find_stores_op (tree *tp, int *walk_subtrees, void *data)
8561 {
8562 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
8563
8564 *walk_subtrees = 0;
8565 if (!wi->is_lhs)
8566 return NULL_TREE;
8567
8568 tree op = *tp;
8569 do
8570 {
8571 if (handled_component_p (op))
8572 op = TREE_OPERAND (op, 0);
8573 else if ((TREE_CODE (op) == MEM_REF || TREE_CODE (op) == TARGET_MEM_REF)
8574 && TREE_CODE (TREE_OPERAND (op, 0)) == ADDR_EXPR)
8575 op = TREE_OPERAND (TREE_OPERAND (op, 0), 0);
8576 else
8577 break;
8578 }
8579 while (1);
8580 if (!DECL_P (op) || !omp_shared_to_firstprivate_optimizable_decl_p (op))
8581 return NULL_TREE;
8582
8583 omp_mark_stores (gimplify_omp_ctxp, op);
8584 return NULL_TREE;
8585 }
8586
8587 /* Helper callback for walk_gimple_seq to discover possible stores
8588 to omp_shared_to_firstprivate_optimizable_decl_p decls and set
8589 GOVD_WRITTEN if they are GOVD_SHARED in some outer context
8590 for those. */
8591
8592 static tree
8593 omp_find_stores_stmt (gimple_stmt_iterator *gsi_p,
8594 bool *handled_ops_p,
8595 struct walk_stmt_info *wi)
8596 {
8597 gimple *stmt = gsi_stmt (*gsi_p);
8598 switch (gimple_code (stmt))
8599 {
8600 /* Don't recurse on OpenMP constructs for which
8601 gimplify_adjust_omp_clauses already handled the bodies,
8602 except handle gimple_omp_for_pre_body. */
8603 case GIMPLE_OMP_FOR:
8604 *handled_ops_p = true;
8605 if (gimple_omp_for_pre_body (stmt))
8606 walk_gimple_seq (gimple_omp_for_pre_body (stmt),
8607 omp_find_stores_stmt, omp_find_stores_op, wi);
8608 break;
8609 case GIMPLE_OMP_PARALLEL:
8610 case GIMPLE_OMP_TASK:
8611 case GIMPLE_OMP_SECTIONS:
8612 case GIMPLE_OMP_SINGLE:
8613 case GIMPLE_OMP_TARGET:
8614 case GIMPLE_OMP_TEAMS:
8615 case GIMPLE_OMP_CRITICAL:
8616 *handled_ops_p = true;
8617 break;
8618 default:
8619 break;
8620 }
8621 return NULL_TREE;
8622 }
8623
8624 struct gimplify_adjust_omp_clauses_data
8625 {
8626 tree *list_p;
8627 gimple_seq *pre_p;
8628 };
8629
8630 /* For all variables that were not actually used within the context,
8631 remove PRIVATE, SHARED, and FIRSTPRIVATE clauses. */
8632
8633 static int
8634 gimplify_adjust_omp_clauses_1 (splay_tree_node n, void *data)
8635 {
8636 tree *list_p = ((struct gimplify_adjust_omp_clauses_data *) data)->list_p;
8637 gimple_seq *pre_p
8638 = ((struct gimplify_adjust_omp_clauses_data *) data)->pre_p;
8639 tree decl = (tree) n->key;
8640 unsigned flags = n->value;
8641 enum omp_clause_code code;
8642 tree clause;
8643 bool private_debug;
8644
8645 if (flags & (GOVD_EXPLICIT | GOVD_LOCAL))
8646 return 0;
8647 if ((flags & GOVD_SEEN) == 0)
8648 return 0;
8649 if (flags & GOVD_DEBUG_PRIVATE)
8650 {
8651 gcc_assert ((flags & GOVD_DATA_SHARE_CLASS) == GOVD_SHARED);
8652 private_debug = true;
8653 }
8654 else if (flags & GOVD_MAP)
8655 private_debug = false;
8656 else
8657 private_debug
8658 = lang_hooks.decls.omp_private_debug_clause (decl,
8659 !!(flags & GOVD_SHARED));
8660 if (private_debug)
8661 code = OMP_CLAUSE_PRIVATE;
8662 else if (flags & GOVD_MAP)
8663 {
8664 code = OMP_CLAUSE_MAP;
8665 if ((gimplify_omp_ctxp->region_type & ORT_ACC) == 0
8666 && TYPE_ATOMIC (strip_array_types (TREE_TYPE (decl))))
8667 {
8668 error ("%<_Atomic%> %qD in implicit %<map%> clause", decl);
8669 return 0;
8670 }
8671 }
8672 else if (flags & GOVD_SHARED)
8673 {
8674 if (is_global_var (decl))
8675 {
8676 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp->outer_context;
8677 while (ctx != NULL)
8678 {
8679 splay_tree_node on
8680 = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
8681 if (on && (on->value & (GOVD_FIRSTPRIVATE | GOVD_LASTPRIVATE
8682 | GOVD_PRIVATE | GOVD_REDUCTION
8683 | GOVD_LINEAR | GOVD_MAP)) != 0)
8684 break;
8685 ctx = ctx->outer_context;
8686 }
8687 if (ctx == NULL)
8688 return 0;
8689 }
8690 code = OMP_CLAUSE_SHARED;
8691 }
8692 else if (flags & GOVD_PRIVATE)
8693 code = OMP_CLAUSE_PRIVATE;
8694 else if (flags & GOVD_FIRSTPRIVATE)
8695 {
8696 code = OMP_CLAUSE_FIRSTPRIVATE;
8697 if ((gimplify_omp_ctxp->region_type & ORT_TARGET)
8698 && (gimplify_omp_ctxp->region_type & ORT_ACC) == 0
8699 && TYPE_ATOMIC (strip_array_types (TREE_TYPE (decl))))
8700 {
8701 error ("%<_Atomic%> %qD in implicit %<firstprivate%> clause on "
8702 "%<target%> construct", decl);
8703 return 0;
8704 }
8705 }
8706 else if (flags & GOVD_LASTPRIVATE)
8707 code = OMP_CLAUSE_LASTPRIVATE;
8708 else if (flags & GOVD_ALIGNED)
8709 return 0;
8710 else
8711 gcc_unreachable ();
8712
8713 if (((flags & GOVD_LASTPRIVATE)
8714 || (code == OMP_CLAUSE_SHARED && (flags & GOVD_WRITTEN)))
8715 && omp_shared_to_firstprivate_optimizable_decl_p (decl))
8716 omp_mark_stores (gimplify_omp_ctxp->outer_context, decl);
8717
8718 tree chain = *list_p;
8719 clause = build_omp_clause (input_location, code);
8720 OMP_CLAUSE_DECL (clause) = decl;
8721 OMP_CLAUSE_CHAIN (clause) = chain;
8722 if (private_debug)
8723 OMP_CLAUSE_PRIVATE_DEBUG (clause) = 1;
8724 else if (code == OMP_CLAUSE_PRIVATE && (flags & GOVD_PRIVATE_OUTER_REF))
8725 OMP_CLAUSE_PRIVATE_OUTER_REF (clause) = 1;
8726 else if (code == OMP_CLAUSE_SHARED
8727 && (flags & GOVD_WRITTEN) == 0
8728 && omp_shared_to_firstprivate_optimizable_decl_p (decl))
8729 OMP_CLAUSE_SHARED_READONLY (clause) = 1;
8730 else if (code == OMP_CLAUSE_FIRSTPRIVATE && (flags & GOVD_EXPLICIT) == 0)
8731 OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (clause) = 1;
8732 else if (code == OMP_CLAUSE_MAP && (flags & GOVD_MAP_0LEN_ARRAY) != 0)
8733 {
8734 tree nc = build_omp_clause (input_location, OMP_CLAUSE_MAP);
8735 OMP_CLAUSE_DECL (nc) = decl;
8736 if (TREE_CODE (TREE_TYPE (decl)) == REFERENCE_TYPE
8737 && TREE_CODE (TREE_TYPE (TREE_TYPE (decl))) == POINTER_TYPE)
8738 OMP_CLAUSE_DECL (clause)
8739 = build_simple_mem_ref_loc (input_location, decl);
8740 OMP_CLAUSE_DECL (clause)
8741 = build2 (MEM_REF, char_type_node, OMP_CLAUSE_DECL (clause),
8742 build_int_cst (build_pointer_type (char_type_node), 0));
8743 OMP_CLAUSE_SIZE (clause) = size_zero_node;
8744 OMP_CLAUSE_SIZE (nc) = size_zero_node;
8745 OMP_CLAUSE_SET_MAP_KIND (clause, GOMP_MAP_ALLOC);
8746 OMP_CLAUSE_MAP_MAYBE_ZERO_LENGTH_ARRAY_SECTION (clause) = 1;
8747 OMP_CLAUSE_SET_MAP_KIND (nc, GOMP_MAP_FIRSTPRIVATE_POINTER);
8748 OMP_CLAUSE_CHAIN (nc) = chain;
8749 OMP_CLAUSE_CHAIN (clause) = nc;
8750 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
8751 gimplify_omp_ctxp = ctx->outer_context;
8752 gimplify_expr (&TREE_OPERAND (OMP_CLAUSE_DECL (clause), 0),
8753 pre_p, NULL, is_gimple_val, fb_rvalue);
8754 gimplify_omp_ctxp = ctx;
8755 }
8756 else if (code == OMP_CLAUSE_MAP)
8757 {
8758 int kind;
8759 /* Not all combinations of these GOVD_MAP flags are actually valid. */
8760 switch (flags & (GOVD_MAP_TO_ONLY
8761 | GOVD_MAP_FORCE
8762 | GOVD_MAP_FORCE_PRESENT))
8763 {
8764 case 0:
8765 kind = GOMP_MAP_TOFROM;
8766 break;
8767 case GOVD_MAP_FORCE:
8768 kind = GOMP_MAP_TOFROM | GOMP_MAP_FLAG_FORCE;
8769 break;
8770 case GOVD_MAP_TO_ONLY:
8771 kind = GOMP_MAP_TO;
8772 break;
8773 case GOVD_MAP_TO_ONLY | GOVD_MAP_FORCE:
8774 kind = GOMP_MAP_TO | GOMP_MAP_FLAG_FORCE;
8775 break;
8776 case GOVD_MAP_FORCE_PRESENT:
8777 kind = GOMP_MAP_FORCE_PRESENT;
8778 break;
8779 default:
8780 gcc_unreachable ();
8781 }
8782 OMP_CLAUSE_SET_MAP_KIND (clause, kind);
8783 if (DECL_SIZE (decl)
8784 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
8785 {
8786 tree decl2 = DECL_VALUE_EXPR (decl);
8787 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
8788 decl2 = TREE_OPERAND (decl2, 0);
8789 gcc_assert (DECL_P (decl2));
8790 tree mem = build_simple_mem_ref (decl2);
8791 OMP_CLAUSE_DECL (clause) = mem;
8792 OMP_CLAUSE_SIZE (clause) = TYPE_SIZE_UNIT (TREE_TYPE (decl));
8793 if (gimplify_omp_ctxp->outer_context)
8794 {
8795 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp->outer_context;
8796 omp_notice_variable (ctx, decl2, true);
8797 omp_notice_variable (ctx, OMP_CLAUSE_SIZE (clause), true);
8798 }
8799 tree nc = build_omp_clause (OMP_CLAUSE_LOCATION (clause),
8800 OMP_CLAUSE_MAP);
8801 OMP_CLAUSE_DECL (nc) = decl;
8802 OMP_CLAUSE_SIZE (nc) = size_zero_node;
8803 if (gimplify_omp_ctxp->target_firstprivatize_array_bases)
8804 OMP_CLAUSE_SET_MAP_KIND (nc, GOMP_MAP_FIRSTPRIVATE_POINTER);
8805 else
8806 OMP_CLAUSE_SET_MAP_KIND (nc, GOMP_MAP_POINTER);
8807 OMP_CLAUSE_CHAIN (nc) = OMP_CLAUSE_CHAIN (clause);
8808 OMP_CLAUSE_CHAIN (clause) = nc;
8809 }
8810 else if (gimplify_omp_ctxp->target_firstprivatize_array_bases
8811 && lang_hooks.decls.omp_privatize_by_reference (decl))
8812 {
8813 OMP_CLAUSE_DECL (clause) = build_simple_mem_ref (decl);
8814 OMP_CLAUSE_SIZE (clause)
8815 = unshare_expr (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl))));
8816 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
8817 gimplify_omp_ctxp = ctx->outer_context;
8818 gimplify_expr (&OMP_CLAUSE_SIZE (clause),
8819 pre_p, NULL, is_gimple_val, fb_rvalue);
8820 gimplify_omp_ctxp = ctx;
8821 tree nc = build_omp_clause (OMP_CLAUSE_LOCATION (clause),
8822 OMP_CLAUSE_MAP);
8823 OMP_CLAUSE_DECL (nc) = decl;
8824 OMP_CLAUSE_SIZE (nc) = size_zero_node;
8825 OMP_CLAUSE_SET_MAP_KIND (nc, GOMP_MAP_FIRSTPRIVATE_REFERENCE);
8826 OMP_CLAUSE_CHAIN (nc) = OMP_CLAUSE_CHAIN (clause);
8827 OMP_CLAUSE_CHAIN (clause) = nc;
8828 }
8829 else
8830 OMP_CLAUSE_SIZE (clause) = DECL_SIZE_UNIT (decl);
8831 }
8832 if (code == OMP_CLAUSE_FIRSTPRIVATE && (flags & GOVD_LASTPRIVATE) != 0)
8833 {
8834 tree nc = build_omp_clause (input_location, OMP_CLAUSE_LASTPRIVATE);
8835 OMP_CLAUSE_DECL (nc) = decl;
8836 OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (nc) = 1;
8837 OMP_CLAUSE_CHAIN (nc) = chain;
8838 OMP_CLAUSE_CHAIN (clause) = nc;
8839 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
8840 gimplify_omp_ctxp = ctx->outer_context;
8841 lang_hooks.decls.omp_finish_clause (nc, pre_p);
8842 gimplify_omp_ctxp = ctx;
8843 }
8844 *list_p = clause;
8845 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
8846 gimplify_omp_ctxp = ctx->outer_context;
8847 lang_hooks.decls.omp_finish_clause (clause, pre_p);
8848 if (gimplify_omp_ctxp)
8849 for (; clause != chain; clause = OMP_CLAUSE_CHAIN (clause))
8850 if (OMP_CLAUSE_CODE (clause) == OMP_CLAUSE_MAP
8851 && DECL_P (OMP_CLAUSE_SIZE (clause)))
8852 omp_notice_variable (gimplify_omp_ctxp, OMP_CLAUSE_SIZE (clause),
8853 true);
8854 gimplify_omp_ctxp = ctx;
8855 return 0;
8856 }
8857
8858 static void
8859 gimplify_adjust_omp_clauses (gimple_seq *pre_p, gimple_seq body, tree *list_p,
8860 enum tree_code code)
8861 {
8862 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
8863 tree c, decl;
8864
8865 if (body)
8866 {
8867 struct gimplify_omp_ctx *octx;
8868 for (octx = ctx; octx; octx = octx->outer_context)
8869 if ((octx->region_type & (ORT_PARALLEL | ORT_TASK | ORT_TEAMS)) != 0)
8870 break;
8871 if (octx)
8872 {
8873 struct walk_stmt_info wi;
8874 memset (&wi, 0, sizeof (wi));
8875 walk_gimple_seq (body, omp_find_stores_stmt,
8876 omp_find_stores_op, &wi);
8877 }
8878 }
8879 while ((c = *list_p) != NULL)
8880 {
8881 splay_tree_node n;
8882 bool remove = false;
8883
8884 switch (OMP_CLAUSE_CODE (c))
8885 {
8886 case OMP_CLAUSE_FIRSTPRIVATE:
8887 if ((ctx->region_type & ORT_TARGET)
8888 && (ctx->region_type & ORT_ACC) == 0
8889 && TYPE_ATOMIC (strip_array_types
8890 (TREE_TYPE (OMP_CLAUSE_DECL (c)))))
8891 {
8892 error_at (OMP_CLAUSE_LOCATION (c),
8893 "%<_Atomic%> %qD in %<firstprivate%> clause on "
8894 "%<target%> construct", OMP_CLAUSE_DECL (c));
8895 remove = true;
8896 break;
8897 }
8898 /* FALLTHRU */
8899 case OMP_CLAUSE_PRIVATE:
8900 case OMP_CLAUSE_SHARED:
8901 case OMP_CLAUSE_LINEAR:
8902 decl = OMP_CLAUSE_DECL (c);
8903 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
8904 remove = !(n->value & GOVD_SEEN);
8905 if (! remove)
8906 {
8907 bool shared = OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED;
8908 if ((n->value & GOVD_DEBUG_PRIVATE)
8909 || lang_hooks.decls.omp_private_debug_clause (decl, shared))
8910 {
8911 gcc_assert ((n->value & GOVD_DEBUG_PRIVATE) == 0
8912 || ((n->value & GOVD_DATA_SHARE_CLASS)
8913 == GOVD_SHARED));
8914 OMP_CLAUSE_SET_CODE (c, OMP_CLAUSE_PRIVATE);
8915 OMP_CLAUSE_PRIVATE_DEBUG (c) = 1;
8916 }
8917 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
8918 && (n->value & GOVD_WRITTEN) == 0
8919 && DECL_P (decl)
8920 && omp_shared_to_firstprivate_optimizable_decl_p (decl))
8921 OMP_CLAUSE_SHARED_READONLY (c) = 1;
8922 else if (DECL_P (decl)
8923 && ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
8924 && (n->value & GOVD_WRITTEN) != 0)
8925 || (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
8926 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c)))
8927 && omp_shared_to_firstprivate_optimizable_decl_p (decl))
8928 omp_mark_stores (gimplify_omp_ctxp->outer_context, decl);
8929 }
8930 break;
8931
8932 case OMP_CLAUSE_LASTPRIVATE:
8933 /* Make sure OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE is set to
8934 accurately reflect the presence of a FIRSTPRIVATE clause. */
8935 decl = OMP_CLAUSE_DECL (c);
8936 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
8937 OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c)
8938 = (n->value & GOVD_FIRSTPRIVATE) != 0;
8939 if (code == OMP_DISTRIBUTE
8940 && OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
8941 {
8942 remove = true;
8943 error_at (OMP_CLAUSE_LOCATION (c),
8944 "same variable used in %<firstprivate%> and "
8945 "%<lastprivate%> clauses on %<distribute%> "
8946 "construct");
8947 }
8948 if (!remove
8949 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
8950 && DECL_P (decl)
8951 && omp_shared_to_firstprivate_optimizable_decl_p (decl))
8952 omp_mark_stores (gimplify_omp_ctxp->outer_context, decl);
8953 break;
8954
8955 case OMP_CLAUSE_ALIGNED:
8956 decl = OMP_CLAUSE_DECL (c);
8957 if (!is_global_var (decl))
8958 {
8959 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
8960 remove = n == NULL || !(n->value & GOVD_SEEN);
8961 if (!remove && TREE_CODE (TREE_TYPE (decl)) == POINTER_TYPE)
8962 {
8963 struct gimplify_omp_ctx *octx;
8964 if (n != NULL
8965 && (n->value & (GOVD_DATA_SHARE_CLASS
8966 & ~GOVD_FIRSTPRIVATE)))
8967 remove = true;
8968 else
8969 for (octx = ctx->outer_context; octx;
8970 octx = octx->outer_context)
8971 {
8972 n = splay_tree_lookup (octx->variables,
8973 (splay_tree_key) decl);
8974 if (n == NULL)
8975 continue;
8976 if (n->value & GOVD_LOCAL)
8977 break;
8978 /* We have to avoid assigning a shared variable
8979 to itself when trying to add
8980 __builtin_assume_aligned. */
8981 if (n->value & GOVD_SHARED)
8982 {
8983 remove = true;
8984 break;
8985 }
8986 }
8987 }
8988 }
8989 else if (TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
8990 {
8991 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
8992 if (n != NULL && (n->value & GOVD_DATA_SHARE_CLASS) != 0)
8993 remove = true;
8994 }
8995 break;
8996
8997 case OMP_CLAUSE_MAP:
8998 if (code == OMP_TARGET_EXIT_DATA
8999 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ALWAYS_POINTER)
9000 {
9001 remove = true;
9002 break;
9003 }
9004 decl = OMP_CLAUSE_DECL (c);
9005 /* Data clauses associated with acc parallel reductions must be
9006 compatible with present_or_copy. Warn and adjust the clause
9007 if that is not the case. */
9008 if (ctx->region_type == ORT_ACC_PARALLEL)
9009 {
9010 tree t = DECL_P (decl) ? decl : TREE_OPERAND (decl, 0);
9011 n = NULL;
9012
9013 if (DECL_P (t))
9014 n = splay_tree_lookup (ctx->variables, (splay_tree_key) t);
9015
9016 if (n && (n->value & GOVD_REDUCTION))
9017 {
9018 enum gomp_map_kind kind = OMP_CLAUSE_MAP_KIND (c);
9019
9020 OMP_CLAUSE_MAP_IN_REDUCTION (c) = 1;
9021 if ((kind & GOMP_MAP_TOFROM) != GOMP_MAP_TOFROM
9022 && kind != GOMP_MAP_FORCE_PRESENT
9023 && kind != GOMP_MAP_POINTER)
9024 {
9025 warning_at (OMP_CLAUSE_LOCATION (c), 0,
9026 "incompatible data clause with reduction "
9027 "on %qE; promoting to present_or_copy",
9028 DECL_NAME (t));
9029 OMP_CLAUSE_SET_MAP_KIND (c, GOMP_MAP_TOFROM);
9030 }
9031 }
9032 }
9033 if (!DECL_P (decl))
9034 {
9035 if ((ctx->region_type & ORT_TARGET) != 0
9036 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER)
9037 {
9038 if (TREE_CODE (decl) == INDIRECT_REF
9039 && TREE_CODE (TREE_OPERAND (decl, 0)) == COMPONENT_REF
9040 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (decl, 0)))
9041 == REFERENCE_TYPE))
9042 decl = TREE_OPERAND (decl, 0);
9043 if (TREE_CODE (decl) == COMPONENT_REF)
9044 {
9045 while (TREE_CODE (decl) == COMPONENT_REF)
9046 decl = TREE_OPERAND (decl, 0);
9047 if (DECL_P (decl))
9048 {
9049 n = splay_tree_lookup (ctx->variables,
9050 (splay_tree_key) decl);
9051 if (!(n->value & GOVD_SEEN))
9052 remove = true;
9053 }
9054 }
9055 }
9056 break;
9057 }
9058 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
9059 if ((ctx->region_type & ORT_TARGET) != 0
9060 && !(n->value & GOVD_SEEN)
9061 && GOMP_MAP_ALWAYS_P (OMP_CLAUSE_MAP_KIND (c)) == 0
9062 && (!is_global_var (decl)
9063 || !lookup_attribute ("omp declare target link",
9064 DECL_ATTRIBUTES (decl))))
9065 {
9066 remove = true;
9067 /* For struct element mapping, if struct is never referenced
9068 in target block and none of the mapping has always modifier,
9069 remove all the struct element mappings, which immediately
9070 follow the GOMP_MAP_STRUCT map clause. */
9071 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_STRUCT)
9072 {
9073 HOST_WIDE_INT cnt = tree_to_shwi (OMP_CLAUSE_SIZE (c));
9074 while (cnt--)
9075 OMP_CLAUSE_CHAIN (c)
9076 = OMP_CLAUSE_CHAIN (OMP_CLAUSE_CHAIN (c));
9077 }
9078 }
9079 else if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_STRUCT
9080 && code == OMP_TARGET_EXIT_DATA)
9081 remove = true;
9082 else if (DECL_SIZE (decl)
9083 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST
9084 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_POINTER
9085 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_FIRSTPRIVATE_POINTER
9086 && (OMP_CLAUSE_MAP_KIND (c)
9087 != GOMP_MAP_FIRSTPRIVATE_REFERENCE))
9088 {
9089 /* For GOMP_MAP_FORCE_DEVICEPTR, we'll never enter here, because
9090 for these, TREE_CODE (DECL_SIZE (decl)) will always be
9091 INTEGER_CST. */
9092 gcc_assert (OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_FORCE_DEVICEPTR);
9093
9094 tree decl2 = DECL_VALUE_EXPR (decl);
9095 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
9096 decl2 = TREE_OPERAND (decl2, 0);
9097 gcc_assert (DECL_P (decl2));
9098 tree mem = build_simple_mem_ref (decl2);
9099 OMP_CLAUSE_DECL (c) = mem;
9100 OMP_CLAUSE_SIZE (c) = TYPE_SIZE_UNIT (TREE_TYPE (decl));
9101 if (ctx->outer_context)
9102 {
9103 omp_notice_variable (ctx->outer_context, decl2, true);
9104 omp_notice_variable (ctx->outer_context,
9105 OMP_CLAUSE_SIZE (c), true);
9106 }
9107 if (((ctx->region_type & ORT_TARGET) != 0
9108 || !ctx->target_firstprivatize_array_bases)
9109 && ((n->value & GOVD_SEEN) == 0
9110 || (n->value & (GOVD_PRIVATE | GOVD_FIRSTPRIVATE)) == 0))
9111 {
9112 tree nc = build_omp_clause (OMP_CLAUSE_LOCATION (c),
9113 OMP_CLAUSE_MAP);
9114 OMP_CLAUSE_DECL (nc) = decl;
9115 OMP_CLAUSE_SIZE (nc) = size_zero_node;
9116 if (ctx->target_firstprivatize_array_bases)
9117 OMP_CLAUSE_SET_MAP_KIND (nc,
9118 GOMP_MAP_FIRSTPRIVATE_POINTER);
9119 else
9120 OMP_CLAUSE_SET_MAP_KIND (nc, GOMP_MAP_POINTER);
9121 OMP_CLAUSE_CHAIN (nc) = OMP_CLAUSE_CHAIN (c);
9122 OMP_CLAUSE_CHAIN (c) = nc;
9123 c = nc;
9124 }
9125 }
9126 else
9127 {
9128 if (OMP_CLAUSE_SIZE (c) == NULL_TREE)
9129 OMP_CLAUSE_SIZE (c) = DECL_SIZE_UNIT (decl);
9130 gcc_assert ((n->value & GOVD_SEEN) == 0
9131 || ((n->value & (GOVD_PRIVATE | GOVD_FIRSTPRIVATE))
9132 == 0));
9133 }
9134 break;
9135
9136 case OMP_CLAUSE_TO:
9137 case OMP_CLAUSE_FROM:
9138 case OMP_CLAUSE__CACHE_:
9139 decl = OMP_CLAUSE_DECL (c);
9140 if (!DECL_P (decl))
9141 break;
9142 if (DECL_SIZE (decl)
9143 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
9144 {
9145 tree decl2 = DECL_VALUE_EXPR (decl);
9146 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
9147 decl2 = TREE_OPERAND (decl2, 0);
9148 gcc_assert (DECL_P (decl2));
9149 tree mem = build_simple_mem_ref (decl2);
9150 OMP_CLAUSE_DECL (c) = mem;
9151 OMP_CLAUSE_SIZE (c) = TYPE_SIZE_UNIT (TREE_TYPE (decl));
9152 if (ctx->outer_context)
9153 {
9154 omp_notice_variable (ctx->outer_context, decl2, true);
9155 omp_notice_variable (ctx->outer_context,
9156 OMP_CLAUSE_SIZE (c), true);
9157 }
9158 }
9159 else if (OMP_CLAUSE_SIZE (c) == NULL_TREE)
9160 OMP_CLAUSE_SIZE (c) = DECL_SIZE_UNIT (decl);
9161 break;
9162
9163 case OMP_CLAUSE_REDUCTION:
9164 decl = OMP_CLAUSE_DECL (c);
9165 /* OpenACC reductions need a present_or_copy data clause.
9166 Add one if necessary. Error is the reduction is private. */
9167 if (ctx->region_type == ORT_ACC_PARALLEL)
9168 {
9169 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
9170 if (n->value & (GOVD_PRIVATE | GOVD_FIRSTPRIVATE))
9171 error_at (OMP_CLAUSE_LOCATION (c), "invalid private "
9172 "reduction on %qE", DECL_NAME (decl));
9173 else if ((n->value & GOVD_MAP) == 0)
9174 {
9175 tree next = OMP_CLAUSE_CHAIN (c);
9176 tree nc = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE_MAP);
9177 OMP_CLAUSE_SET_MAP_KIND (nc, GOMP_MAP_TOFROM);
9178 OMP_CLAUSE_DECL (nc) = decl;
9179 OMP_CLAUSE_CHAIN (c) = nc;
9180 lang_hooks.decls.omp_finish_clause (nc, pre_p);
9181 while (1)
9182 {
9183 OMP_CLAUSE_MAP_IN_REDUCTION (nc) = 1;
9184 if (OMP_CLAUSE_CHAIN (nc) == NULL)
9185 break;
9186 nc = OMP_CLAUSE_CHAIN (nc);
9187 }
9188 OMP_CLAUSE_CHAIN (nc) = next;
9189 n->value |= GOVD_MAP;
9190 }
9191 }
9192 if (DECL_P (decl)
9193 && omp_shared_to_firstprivate_optimizable_decl_p (decl))
9194 omp_mark_stores (gimplify_omp_ctxp->outer_context, decl);
9195 break;
9196 case OMP_CLAUSE_COPYIN:
9197 case OMP_CLAUSE_COPYPRIVATE:
9198 case OMP_CLAUSE_IF:
9199 case OMP_CLAUSE_NUM_THREADS:
9200 case OMP_CLAUSE_NUM_TEAMS:
9201 case OMP_CLAUSE_THREAD_LIMIT:
9202 case OMP_CLAUSE_DIST_SCHEDULE:
9203 case OMP_CLAUSE_DEVICE:
9204 case OMP_CLAUSE_SCHEDULE:
9205 case OMP_CLAUSE_NOWAIT:
9206 case OMP_CLAUSE_ORDERED:
9207 case OMP_CLAUSE_DEFAULT:
9208 case OMP_CLAUSE_UNTIED:
9209 case OMP_CLAUSE_COLLAPSE:
9210 case OMP_CLAUSE_FINAL:
9211 case OMP_CLAUSE_MERGEABLE:
9212 case OMP_CLAUSE_PROC_BIND:
9213 case OMP_CLAUSE_SAFELEN:
9214 case OMP_CLAUSE_SIMDLEN:
9215 case OMP_CLAUSE_DEPEND:
9216 case OMP_CLAUSE_PRIORITY:
9217 case OMP_CLAUSE_GRAINSIZE:
9218 case OMP_CLAUSE_NUM_TASKS:
9219 case OMP_CLAUSE_NOGROUP:
9220 case OMP_CLAUSE_THREADS:
9221 case OMP_CLAUSE_SIMD:
9222 case OMP_CLAUSE_HINT:
9223 case OMP_CLAUSE_DEFAULTMAP:
9224 case OMP_CLAUSE_USE_DEVICE_PTR:
9225 case OMP_CLAUSE_IS_DEVICE_PTR:
9226 case OMP_CLAUSE__CILK_FOR_COUNT_:
9227 case OMP_CLAUSE_ASYNC:
9228 case OMP_CLAUSE_WAIT:
9229 case OMP_CLAUSE_INDEPENDENT:
9230 case OMP_CLAUSE_NUM_GANGS:
9231 case OMP_CLAUSE_NUM_WORKERS:
9232 case OMP_CLAUSE_VECTOR_LENGTH:
9233 case OMP_CLAUSE_GANG:
9234 case OMP_CLAUSE_WORKER:
9235 case OMP_CLAUSE_VECTOR:
9236 case OMP_CLAUSE_AUTO:
9237 case OMP_CLAUSE_SEQ:
9238 case OMP_CLAUSE_TILE:
9239 break;
9240
9241 default:
9242 gcc_unreachable ();
9243 }
9244
9245 if (remove)
9246 *list_p = OMP_CLAUSE_CHAIN (c);
9247 else
9248 list_p = &OMP_CLAUSE_CHAIN (c);
9249 }
9250
9251 /* Add in any implicit data sharing. */
9252 struct gimplify_adjust_omp_clauses_data data;
9253 data.list_p = list_p;
9254 data.pre_p = pre_p;
9255 splay_tree_foreach (ctx->variables, gimplify_adjust_omp_clauses_1, &data);
9256
9257 gimplify_omp_ctxp = ctx->outer_context;
9258 delete_omp_context (ctx);
9259 }
9260
9261 /* Gimplify OACC_CACHE. */
9262
9263 static void
9264 gimplify_oacc_cache (tree *expr_p, gimple_seq *pre_p)
9265 {
9266 tree expr = *expr_p;
9267
9268 gimplify_scan_omp_clauses (&OACC_CACHE_CLAUSES (expr), pre_p, ORT_ACC,
9269 OACC_CACHE);
9270 gimplify_adjust_omp_clauses (pre_p, NULL, &OACC_CACHE_CLAUSES (expr),
9271 OACC_CACHE);
9272
9273 /* TODO: Do something sensible with this information. */
9274
9275 *expr_p = NULL_TREE;
9276 }
9277
9278 /* Helper function of gimplify_oacc_declare. The helper's purpose is to,
9279 if required, translate 'kind' in CLAUSE into an 'entry' kind and 'exit'
9280 kind. The entry kind will replace the one in CLAUSE, while the exit
9281 kind will be used in a new omp_clause and returned to the caller. */
9282
9283 static tree
9284 gimplify_oacc_declare_1 (tree clause)
9285 {
9286 HOST_WIDE_INT kind, new_op;
9287 bool ret = false;
9288 tree c = NULL;
9289
9290 kind = OMP_CLAUSE_MAP_KIND (clause);
9291
9292 switch (kind)
9293 {
9294 case GOMP_MAP_ALLOC:
9295 case GOMP_MAP_FORCE_ALLOC:
9296 case GOMP_MAP_FORCE_TO:
9297 new_op = GOMP_MAP_DELETE;
9298 ret = true;
9299 break;
9300
9301 case GOMP_MAP_FORCE_FROM:
9302 OMP_CLAUSE_SET_MAP_KIND (clause, GOMP_MAP_FORCE_ALLOC);
9303 new_op = GOMP_MAP_FORCE_FROM;
9304 ret = true;
9305 break;
9306
9307 case GOMP_MAP_FORCE_TOFROM:
9308 OMP_CLAUSE_SET_MAP_KIND (clause, GOMP_MAP_FORCE_TO);
9309 new_op = GOMP_MAP_FORCE_FROM;
9310 ret = true;
9311 break;
9312
9313 case GOMP_MAP_FROM:
9314 OMP_CLAUSE_SET_MAP_KIND (clause, GOMP_MAP_FORCE_ALLOC);
9315 new_op = GOMP_MAP_FROM;
9316 ret = true;
9317 break;
9318
9319 case GOMP_MAP_TOFROM:
9320 OMP_CLAUSE_SET_MAP_KIND (clause, GOMP_MAP_TO);
9321 new_op = GOMP_MAP_FROM;
9322 ret = true;
9323 break;
9324
9325 case GOMP_MAP_DEVICE_RESIDENT:
9326 case GOMP_MAP_FORCE_DEVICEPTR:
9327 case GOMP_MAP_FORCE_PRESENT:
9328 case GOMP_MAP_LINK:
9329 case GOMP_MAP_POINTER:
9330 case GOMP_MAP_TO:
9331 break;
9332
9333 default:
9334 gcc_unreachable ();
9335 break;
9336 }
9337
9338 if (ret)
9339 {
9340 c = build_omp_clause (OMP_CLAUSE_LOCATION (clause), OMP_CLAUSE_MAP);
9341 OMP_CLAUSE_SET_MAP_KIND (c, new_op);
9342 OMP_CLAUSE_DECL (c) = OMP_CLAUSE_DECL (clause);
9343 }
9344
9345 return c;
9346 }
9347
9348 /* Gimplify OACC_DECLARE. */
9349
9350 static void
9351 gimplify_oacc_declare (tree *expr_p, gimple_seq *pre_p)
9352 {
9353 tree expr = *expr_p;
9354 gomp_target *stmt;
9355 tree clauses, t, decl;
9356
9357 clauses = OACC_DECLARE_CLAUSES (expr);
9358
9359 gimplify_scan_omp_clauses (&clauses, pre_p, ORT_TARGET_DATA, OACC_DECLARE);
9360 gimplify_adjust_omp_clauses (pre_p, NULL, &clauses, OACC_DECLARE);
9361
9362 for (t = clauses; t; t = OMP_CLAUSE_CHAIN (t))
9363 {
9364 decl = OMP_CLAUSE_DECL (t);
9365
9366 if (TREE_CODE (decl) == MEM_REF)
9367 decl = TREE_OPERAND (decl, 0);
9368
9369 if (VAR_P (decl) && !is_oacc_declared (decl))
9370 {
9371 tree attr = get_identifier ("oacc declare target");
9372 DECL_ATTRIBUTES (decl) = tree_cons (attr, NULL_TREE,
9373 DECL_ATTRIBUTES (decl));
9374 }
9375
9376 if (VAR_P (decl)
9377 && !is_global_var (decl)
9378 && DECL_CONTEXT (decl) == current_function_decl)
9379 {
9380 tree c = gimplify_oacc_declare_1 (t);
9381 if (c)
9382 {
9383 if (oacc_declare_returns == NULL)
9384 oacc_declare_returns = new hash_map<tree, tree>;
9385
9386 oacc_declare_returns->put (decl, c);
9387 }
9388 }
9389
9390 if (gimplify_omp_ctxp)
9391 omp_add_variable (gimplify_omp_ctxp, decl, GOVD_SEEN);
9392 }
9393
9394 stmt = gimple_build_omp_target (NULL, GF_OMP_TARGET_KIND_OACC_DECLARE,
9395 clauses);
9396
9397 gimplify_seq_add_stmt (pre_p, stmt);
9398
9399 *expr_p = NULL_TREE;
9400 }
9401
9402 /* Gimplify the contents of an OMP_PARALLEL statement. This involves
9403 gimplification of the body, as well as scanning the body for used
9404 variables. We need to do this scan now, because variable-sized
9405 decls will be decomposed during gimplification. */
9406
9407 static void
9408 gimplify_omp_parallel (tree *expr_p, gimple_seq *pre_p)
9409 {
9410 tree expr = *expr_p;
9411 gimple *g;
9412 gimple_seq body = NULL;
9413
9414 gimplify_scan_omp_clauses (&OMP_PARALLEL_CLAUSES (expr), pre_p,
9415 OMP_PARALLEL_COMBINED (expr)
9416 ? ORT_COMBINED_PARALLEL
9417 : ORT_PARALLEL, OMP_PARALLEL);
9418
9419 push_gimplify_context ();
9420
9421 g = gimplify_and_return_first (OMP_PARALLEL_BODY (expr), &body);
9422 if (gimple_code (g) == GIMPLE_BIND)
9423 pop_gimplify_context (g);
9424 else
9425 pop_gimplify_context (NULL);
9426
9427 gimplify_adjust_omp_clauses (pre_p, body, &OMP_PARALLEL_CLAUSES (expr),
9428 OMP_PARALLEL);
9429
9430 g = gimple_build_omp_parallel (body,
9431 OMP_PARALLEL_CLAUSES (expr),
9432 NULL_TREE, NULL_TREE);
9433 if (OMP_PARALLEL_COMBINED (expr))
9434 gimple_omp_set_subcode (g, GF_OMP_PARALLEL_COMBINED);
9435 gimplify_seq_add_stmt (pre_p, g);
9436 *expr_p = NULL_TREE;
9437 }
9438
9439 /* Gimplify the contents of an OMP_TASK statement. This involves
9440 gimplification of the body, as well as scanning the body for used
9441 variables. We need to do this scan now, because variable-sized
9442 decls will be decomposed during gimplification. */
9443
9444 static void
9445 gimplify_omp_task (tree *expr_p, gimple_seq *pre_p)
9446 {
9447 tree expr = *expr_p;
9448 gimple *g;
9449 gimple_seq body = NULL;
9450
9451 gimplify_scan_omp_clauses (&OMP_TASK_CLAUSES (expr), pre_p,
9452 omp_find_clause (OMP_TASK_CLAUSES (expr),
9453 OMP_CLAUSE_UNTIED)
9454 ? ORT_UNTIED_TASK : ORT_TASK, OMP_TASK);
9455
9456 push_gimplify_context ();
9457
9458 g = gimplify_and_return_first (OMP_TASK_BODY (expr), &body);
9459 if (gimple_code (g) == GIMPLE_BIND)
9460 pop_gimplify_context (g);
9461 else
9462 pop_gimplify_context (NULL);
9463
9464 gimplify_adjust_omp_clauses (pre_p, body, &OMP_TASK_CLAUSES (expr),
9465 OMP_TASK);
9466
9467 g = gimple_build_omp_task (body,
9468 OMP_TASK_CLAUSES (expr),
9469 NULL_TREE, NULL_TREE,
9470 NULL_TREE, NULL_TREE, NULL_TREE);
9471 gimplify_seq_add_stmt (pre_p, g);
9472 *expr_p = NULL_TREE;
9473 }
9474
9475 /* Helper function of gimplify_omp_for, find OMP_FOR resp. OMP_SIMD
9476 with non-NULL OMP_FOR_INIT. */
9477
9478 static tree
9479 find_combined_omp_for (tree *tp, int *walk_subtrees, void *)
9480 {
9481 *walk_subtrees = 0;
9482 switch (TREE_CODE (*tp))
9483 {
9484 case OMP_FOR:
9485 *walk_subtrees = 1;
9486 /* FALLTHRU */
9487 case OMP_SIMD:
9488 if (OMP_FOR_INIT (*tp) != NULL_TREE)
9489 return *tp;
9490 break;
9491 case BIND_EXPR:
9492 case STATEMENT_LIST:
9493 case OMP_PARALLEL:
9494 *walk_subtrees = 1;
9495 break;
9496 default:
9497 break;
9498 }
9499 return NULL_TREE;
9500 }
9501
9502 /* Gimplify the gross structure of an OMP_FOR statement. */
9503
9504 static enum gimplify_status
9505 gimplify_omp_for (tree *expr_p, gimple_seq *pre_p)
9506 {
9507 tree for_stmt, orig_for_stmt, inner_for_stmt = NULL_TREE, decl, var, t;
9508 enum gimplify_status ret = GS_ALL_DONE;
9509 enum gimplify_status tret;
9510 gomp_for *gfor;
9511 gimple_seq for_body, for_pre_body;
9512 int i;
9513 bitmap has_decl_expr = NULL;
9514 enum omp_region_type ort = ORT_WORKSHARE;
9515
9516 orig_for_stmt = for_stmt = *expr_p;
9517
9518 switch (TREE_CODE (for_stmt))
9519 {
9520 case OMP_FOR:
9521 case CILK_FOR:
9522 case OMP_DISTRIBUTE:
9523 break;
9524 case OACC_LOOP:
9525 ort = ORT_ACC;
9526 break;
9527 case OMP_TASKLOOP:
9528 if (omp_find_clause (OMP_FOR_CLAUSES (for_stmt), OMP_CLAUSE_UNTIED))
9529 ort = ORT_UNTIED_TASK;
9530 else
9531 ort = ORT_TASK;
9532 break;
9533 case OMP_SIMD:
9534 case CILK_SIMD:
9535 ort = ORT_SIMD;
9536 break;
9537 default:
9538 gcc_unreachable ();
9539 }
9540
9541 /* Set OMP_CLAUSE_LINEAR_NO_COPYIN flag on explicit linear
9542 clause for the IV. */
9543 if (ort == ORT_SIMD && TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)) == 1)
9544 {
9545 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), 0);
9546 gcc_assert (TREE_CODE (t) == MODIFY_EXPR);
9547 decl = TREE_OPERAND (t, 0);
9548 for (tree c = OMP_FOR_CLAUSES (for_stmt); c; c = OMP_CLAUSE_CHAIN (c))
9549 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
9550 && OMP_CLAUSE_DECL (c) == decl)
9551 {
9552 OMP_CLAUSE_LINEAR_NO_COPYIN (c) = 1;
9553 break;
9554 }
9555 }
9556
9557 if (OMP_FOR_INIT (for_stmt) == NULL_TREE)
9558 {
9559 gcc_assert (TREE_CODE (for_stmt) != OACC_LOOP);
9560 inner_for_stmt = walk_tree (&OMP_FOR_BODY (for_stmt),
9561 find_combined_omp_for, NULL, NULL);
9562 if (inner_for_stmt == NULL_TREE)
9563 {
9564 gcc_assert (seen_error ());
9565 *expr_p = NULL_TREE;
9566 return GS_ERROR;
9567 }
9568 }
9569
9570 if (TREE_CODE (for_stmt) != OMP_TASKLOOP)
9571 gimplify_scan_omp_clauses (&OMP_FOR_CLAUSES (for_stmt), pre_p, ort,
9572 TREE_CODE (for_stmt));
9573
9574 if (TREE_CODE (for_stmt) == OMP_DISTRIBUTE)
9575 gimplify_omp_ctxp->distribute = true;
9576
9577 /* Handle OMP_FOR_INIT. */
9578 for_pre_body = NULL;
9579 if (ort == ORT_SIMD && OMP_FOR_PRE_BODY (for_stmt))
9580 {
9581 has_decl_expr = BITMAP_ALLOC (NULL);
9582 if (TREE_CODE (OMP_FOR_PRE_BODY (for_stmt)) == DECL_EXPR
9583 && TREE_CODE (DECL_EXPR_DECL (OMP_FOR_PRE_BODY (for_stmt)))
9584 == VAR_DECL)
9585 {
9586 t = OMP_FOR_PRE_BODY (for_stmt);
9587 bitmap_set_bit (has_decl_expr, DECL_UID (DECL_EXPR_DECL (t)));
9588 }
9589 else if (TREE_CODE (OMP_FOR_PRE_BODY (for_stmt)) == STATEMENT_LIST)
9590 {
9591 tree_stmt_iterator si;
9592 for (si = tsi_start (OMP_FOR_PRE_BODY (for_stmt)); !tsi_end_p (si);
9593 tsi_next (&si))
9594 {
9595 t = tsi_stmt (si);
9596 if (TREE_CODE (t) == DECL_EXPR
9597 && TREE_CODE (DECL_EXPR_DECL (t)) == VAR_DECL)
9598 bitmap_set_bit (has_decl_expr, DECL_UID (DECL_EXPR_DECL (t)));
9599 }
9600 }
9601 }
9602 if (OMP_FOR_PRE_BODY (for_stmt))
9603 {
9604 if (TREE_CODE (for_stmt) != OMP_TASKLOOP || gimplify_omp_ctxp)
9605 gimplify_and_add (OMP_FOR_PRE_BODY (for_stmt), &for_pre_body);
9606 else
9607 {
9608 struct gimplify_omp_ctx ctx;
9609 memset (&ctx, 0, sizeof (ctx));
9610 ctx.region_type = ORT_NONE;
9611 gimplify_omp_ctxp = &ctx;
9612 gimplify_and_add (OMP_FOR_PRE_BODY (for_stmt), &for_pre_body);
9613 gimplify_omp_ctxp = NULL;
9614 }
9615 }
9616 OMP_FOR_PRE_BODY (for_stmt) = NULL_TREE;
9617
9618 if (OMP_FOR_INIT (for_stmt) == NULL_TREE)
9619 for_stmt = inner_for_stmt;
9620
9621 /* For taskloop, need to gimplify the start, end and step before the
9622 taskloop, outside of the taskloop omp context. */
9623 if (TREE_CODE (orig_for_stmt) == OMP_TASKLOOP)
9624 {
9625 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
9626 {
9627 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i);
9628 if (!is_gimple_constant (TREE_OPERAND (t, 1)))
9629 {
9630 TREE_OPERAND (t, 1)
9631 = get_initialized_tmp_var (TREE_OPERAND (t, 1),
9632 pre_p, NULL, false);
9633 tree c = build_omp_clause (input_location,
9634 OMP_CLAUSE_FIRSTPRIVATE);
9635 OMP_CLAUSE_DECL (c) = TREE_OPERAND (t, 1);
9636 OMP_CLAUSE_CHAIN (c) = OMP_FOR_CLAUSES (orig_for_stmt);
9637 OMP_FOR_CLAUSES (orig_for_stmt) = c;
9638 }
9639
9640 /* Handle OMP_FOR_COND. */
9641 t = TREE_VEC_ELT (OMP_FOR_COND (for_stmt), i);
9642 if (!is_gimple_constant (TREE_OPERAND (t, 1)))
9643 {
9644 TREE_OPERAND (t, 1)
9645 = get_initialized_tmp_var (TREE_OPERAND (t, 1),
9646 gimple_seq_empty_p (for_pre_body)
9647 ? pre_p : &for_pre_body, NULL,
9648 false);
9649 tree c = build_omp_clause (input_location,
9650 OMP_CLAUSE_FIRSTPRIVATE);
9651 OMP_CLAUSE_DECL (c) = TREE_OPERAND (t, 1);
9652 OMP_CLAUSE_CHAIN (c) = OMP_FOR_CLAUSES (orig_for_stmt);
9653 OMP_FOR_CLAUSES (orig_for_stmt) = c;
9654 }
9655
9656 /* Handle OMP_FOR_INCR. */
9657 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
9658 if (TREE_CODE (t) == MODIFY_EXPR)
9659 {
9660 decl = TREE_OPERAND (t, 0);
9661 t = TREE_OPERAND (t, 1);
9662 tree *tp = &TREE_OPERAND (t, 1);
9663 if (TREE_CODE (t) == PLUS_EXPR && *tp == decl)
9664 tp = &TREE_OPERAND (t, 0);
9665
9666 if (!is_gimple_constant (*tp))
9667 {
9668 gimple_seq *seq = gimple_seq_empty_p (for_pre_body)
9669 ? pre_p : &for_pre_body;
9670 *tp = get_initialized_tmp_var (*tp, seq, NULL, false);
9671 tree c = build_omp_clause (input_location,
9672 OMP_CLAUSE_FIRSTPRIVATE);
9673 OMP_CLAUSE_DECL (c) = *tp;
9674 OMP_CLAUSE_CHAIN (c) = OMP_FOR_CLAUSES (orig_for_stmt);
9675 OMP_FOR_CLAUSES (orig_for_stmt) = c;
9676 }
9677 }
9678 }
9679
9680 gimplify_scan_omp_clauses (&OMP_FOR_CLAUSES (orig_for_stmt), pre_p, ort,
9681 OMP_TASKLOOP);
9682 }
9683
9684 if (orig_for_stmt != for_stmt)
9685 gimplify_omp_ctxp->combined_loop = true;
9686
9687 for_body = NULL;
9688 gcc_assert (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt))
9689 == TREE_VEC_LENGTH (OMP_FOR_COND (for_stmt)));
9690 gcc_assert (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt))
9691 == TREE_VEC_LENGTH (OMP_FOR_INCR (for_stmt)));
9692
9693 tree c = omp_find_clause (OMP_FOR_CLAUSES (for_stmt), OMP_CLAUSE_ORDERED);
9694 bool is_doacross = false;
9695 if (c && OMP_CLAUSE_ORDERED_EXPR (c))
9696 {
9697 is_doacross = true;
9698 gimplify_omp_ctxp->loop_iter_var.create (TREE_VEC_LENGTH
9699 (OMP_FOR_INIT (for_stmt))
9700 * 2);
9701 }
9702 int collapse = 1, tile = 0;
9703 c = omp_find_clause (OMP_FOR_CLAUSES (for_stmt), OMP_CLAUSE_COLLAPSE);
9704 if (c)
9705 collapse = tree_to_shwi (OMP_CLAUSE_COLLAPSE_EXPR (c));
9706 c = omp_find_clause (OMP_FOR_CLAUSES (for_stmt), OMP_CLAUSE_TILE);
9707 if (c)
9708 tile = list_length (OMP_CLAUSE_TILE_LIST (c));
9709 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
9710 {
9711 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i);
9712 gcc_assert (TREE_CODE (t) == MODIFY_EXPR);
9713 decl = TREE_OPERAND (t, 0);
9714 gcc_assert (DECL_P (decl));
9715 gcc_assert (INTEGRAL_TYPE_P (TREE_TYPE (decl))
9716 || POINTER_TYPE_P (TREE_TYPE (decl)));
9717 if (is_doacross)
9718 {
9719 if (TREE_CODE (for_stmt) == OMP_FOR && OMP_FOR_ORIG_DECLS (for_stmt))
9720 gimplify_omp_ctxp->loop_iter_var.quick_push
9721 (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (for_stmt), i));
9722 else
9723 gimplify_omp_ctxp->loop_iter_var.quick_push (decl);
9724 gimplify_omp_ctxp->loop_iter_var.quick_push (decl);
9725 }
9726
9727 /* Make sure the iteration variable is private. */
9728 tree c = NULL_TREE;
9729 tree c2 = NULL_TREE;
9730 if (orig_for_stmt != for_stmt)
9731 /* Do this only on innermost construct for combined ones. */;
9732 else if (ort == ORT_SIMD)
9733 {
9734 splay_tree_node n = splay_tree_lookup (gimplify_omp_ctxp->variables,
9735 (splay_tree_key) decl);
9736 omp_is_private (gimplify_omp_ctxp, decl,
9737 1 + (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt))
9738 != 1));
9739 if (n != NULL && (n->value & GOVD_DATA_SHARE_CLASS) != 0)
9740 omp_notice_variable (gimplify_omp_ctxp, decl, true);
9741 else if (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)) == 1)
9742 {
9743 c = build_omp_clause (input_location, OMP_CLAUSE_LINEAR);
9744 OMP_CLAUSE_LINEAR_NO_COPYIN (c) = 1;
9745 unsigned int flags = GOVD_LINEAR | GOVD_EXPLICIT | GOVD_SEEN;
9746 if (has_decl_expr
9747 && bitmap_bit_p (has_decl_expr, DECL_UID (decl)))
9748 {
9749 OMP_CLAUSE_LINEAR_NO_COPYOUT (c) = 1;
9750 flags |= GOVD_LINEAR_LASTPRIVATE_NO_OUTER;
9751 }
9752 struct gimplify_omp_ctx *outer
9753 = gimplify_omp_ctxp->outer_context;
9754 if (outer && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
9755 {
9756 if (outer->region_type == ORT_WORKSHARE
9757 && outer->combined_loop)
9758 {
9759 n = splay_tree_lookup (outer->variables,
9760 (splay_tree_key)decl);
9761 if (n != NULL && (n->value & GOVD_LOCAL) != 0)
9762 {
9763 OMP_CLAUSE_LINEAR_NO_COPYOUT (c) = 1;
9764 flags |= GOVD_LINEAR_LASTPRIVATE_NO_OUTER;
9765 }
9766 else
9767 {
9768 struct gimplify_omp_ctx *octx = outer->outer_context;
9769 if (octx
9770 && octx->region_type == ORT_COMBINED_PARALLEL
9771 && octx->outer_context
9772 && (octx->outer_context->region_type
9773 == ORT_WORKSHARE)
9774 && octx->outer_context->combined_loop)
9775 {
9776 octx = octx->outer_context;
9777 n = splay_tree_lookup (octx->variables,
9778 (splay_tree_key)decl);
9779 if (n != NULL && (n->value & GOVD_LOCAL) != 0)
9780 {
9781 OMP_CLAUSE_LINEAR_NO_COPYOUT (c) = 1;
9782 flags |= GOVD_LINEAR_LASTPRIVATE_NO_OUTER;
9783 }
9784 }
9785 }
9786 }
9787 }
9788
9789 OMP_CLAUSE_DECL (c) = decl;
9790 OMP_CLAUSE_CHAIN (c) = OMP_FOR_CLAUSES (for_stmt);
9791 OMP_FOR_CLAUSES (for_stmt) = c;
9792 omp_add_variable (gimplify_omp_ctxp, decl, flags);
9793 if (outer && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
9794 {
9795 if (outer->region_type == ORT_WORKSHARE
9796 && outer->combined_loop)
9797 {
9798 if (outer->outer_context
9799 && (outer->outer_context->region_type
9800 == ORT_COMBINED_PARALLEL))
9801 outer = outer->outer_context;
9802 else if (omp_check_private (outer, decl, false))
9803 outer = NULL;
9804 }
9805 else if (((outer->region_type & ORT_TASK) != 0)
9806 && outer->combined_loop
9807 && !omp_check_private (gimplify_omp_ctxp,
9808 decl, false))
9809 ;
9810 else if (outer->region_type != ORT_COMBINED_PARALLEL)
9811 {
9812 omp_notice_variable (outer, decl, true);
9813 outer = NULL;
9814 }
9815 if (outer)
9816 {
9817 n = splay_tree_lookup (outer->variables,
9818 (splay_tree_key)decl);
9819 if (n == NULL || (n->value & GOVD_DATA_SHARE_CLASS) == 0)
9820 {
9821 omp_add_variable (outer, decl,
9822 GOVD_LASTPRIVATE | GOVD_SEEN);
9823 if (outer->region_type == ORT_COMBINED_PARALLEL
9824 && outer->outer_context
9825 && (outer->outer_context->region_type
9826 == ORT_WORKSHARE)
9827 && outer->outer_context->combined_loop)
9828 {
9829 outer = outer->outer_context;
9830 n = splay_tree_lookup (outer->variables,
9831 (splay_tree_key)decl);
9832 if (omp_check_private (outer, decl, false))
9833 outer = NULL;
9834 else if (n == NULL
9835 || ((n->value & GOVD_DATA_SHARE_CLASS)
9836 == 0))
9837 omp_add_variable (outer, decl,
9838 GOVD_LASTPRIVATE
9839 | GOVD_SEEN);
9840 else
9841 outer = NULL;
9842 }
9843 if (outer && outer->outer_context
9844 && (outer->outer_context->region_type
9845 == ORT_COMBINED_TEAMS))
9846 {
9847 outer = outer->outer_context;
9848 n = splay_tree_lookup (outer->variables,
9849 (splay_tree_key)decl);
9850 if (n == NULL
9851 || (n->value & GOVD_DATA_SHARE_CLASS) == 0)
9852 omp_add_variable (outer, decl,
9853 GOVD_SHARED | GOVD_SEEN);
9854 else
9855 outer = NULL;
9856 }
9857 if (outer && outer->outer_context)
9858 omp_notice_variable (outer->outer_context, decl,
9859 true);
9860 }
9861 }
9862 }
9863 }
9864 else
9865 {
9866 bool lastprivate
9867 = (!has_decl_expr
9868 || !bitmap_bit_p (has_decl_expr, DECL_UID (decl)));
9869 struct gimplify_omp_ctx *outer
9870 = gimplify_omp_ctxp->outer_context;
9871 if (outer && lastprivate)
9872 {
9873 if (outer->region_type == ORT_WORKSHARE
9874 && outer->combined_loop)
9875 {
9876 n = splay_tree_lookup (outer->variables,
9877 (splay_tree_key)decl);
9878 if (n != NULL && (n->value & GOVD_LOCAL) != 0)
9879 {
9880 lastprivate = false;
9881 outer = NULL;
9882 }
9883 else if (outer->outer_context
9884 && (outer->outer_context->region_type
9885 == ORT_COMBINED_PARALLEL))
9886 outer = outer->outer_context;
9887 else if (omp_check_private (outer, decl, false))
9888 outer = NULL;
9889 }
9890 else if (((outer->region_type & ORT_TASK) != 0)
9891 && outer->combined_loop
9892 && !omp_check_private (gimplify_omp_ctxp,
9893 decl, false))
9894 ;
9895 else if (outer->region_type != ORT_COMBINED_PARALLEL)
9896 {
9897 omp_notice_variable (outer, decl, true);
9898 outer = NULL;
9899 }
9900 if (outer)
9901 {
9902 n = splay_tree_lookup (outer->variables,
9903 (splay_tree_key)decl);
9904 if (n == NULL || (n->value & GOVD_DATA_SHARE_CLASS) == 0)
9905 {
9906 omp_add_variable (outer, decl,
9907 GOVD_LASTPRIVATE | GOVD_SEEN);
9908 if (outer->region_type == ORT_COMBINED_PARALLEL
9909 && outer->outer_context
9910 && (outer->outer_context->region_type
9911 == ORT_WORKSHARE)
9912 && outer->outer_context->combined_loop)
9913 {
9914 outer = outer->outer_context;
9915 n = splay_tree_lookup (outer->variables,
9916 (splay_tree_key)decl);
9917 if (omp_check_private (outer, decl, false))
9918 outer = NULL;
9919 else if (n == NULL
9920 || ((n->value & GOVD_DATA_SHARE_CLASS)
9921 == 0))
9922 omp_add_variable (outer, decl,
9923 GOVD_LASTPRIVATE
9924 | GOVD_SEEN);
9925 else
9926 outer = NULL;
9927 }
9928 if (outer && outer->outer_context
9929 && (outer->outer_context->region_type
9930 == ORT_COMBINED_TEAMS))
9931 {
9932 outer = outer->outer_context;
9933 n = splay_tree_lookup (outer->variables,
9934 (splay_tree_key)decl);
9935 if (n == NULL
9936 || (n->value & GOVD_DATA_SHARE_CLASS) == 0)
9937 omp_add_variable (outer, decl,
9938 GOVD_SHARED | GOVD_SEEN);
9939 else
9940 outer = NULL;
9941 }
9942 if (outer && outer->outer_context)
9943 omp_notice_variable (outer->outer_context, decl,
9944 true);
9945 }
9946 }
9947 }
9948
9949 c = build_omp_clause (input_location,
9950 lastprivate ? OMP_CLAUSE_LASTPRIVATE
9951 : OMP_CLAUSE_PRIVATE);
9952 OMP_CLAUSE_DECL (c) = decl;
9953 OMP_CLAUSE_CHAIN (c) = OMP_FOR_CLAUSES (for_stmt);
9954 OMP_FOR_CLAUSES (for_stmt) = c;
9955 omp_add_variable (gimplify_omp_ctxp, decl,
9956 (lastprivate ? GOVD_LASTPRIVATE : GOVD_PRIVATE)
9957 | GOVD_EXPLICIT | GOVD_SEEN);
9958 c = NULL_TREE;
9959 }
9960 }
9961 else if (omp_is_private (gimplify_omp_ctxp, decl, 0))
9962 omp_notice_variable (gimplify_omp_ctxp, decl, true);
9963 else
9964 omp_add_variable (gimplify_omp_ctxp, decl, GOVD_PRIVATE | GOVD_SEEN);
9965
9966 /* If DECL is not a gimple register, create a temporary variable to act
9967 as an iteration counter. This is valid, since DECL cannot be
9968 modified in the body of the loop. Similarly for any iteration vars
9969 in simd with collapse > 1 where the iterator vars must be
9970 lastprivate. */
9971 if (orig_for_stmt != for_stmt)
9972 var = decl;
9973 else if (!is_gimple_reg (decl)
9974 || (ort == ORT_SIMD
9975 && TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)) > 1))
9976 {
9977 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
9978 /* Make sure omp_add_variable is not called on it prematurely.
9979 We call it ourselves a few lines later. */
9980 gimplify_omp_ctxp = NULL;
9981 var = create_tmp_var (TREE_TYPE (decl), get_name (decl));
9982 gimplify_omp_ctxp = ctx;
9983 TREE_OPERAND (t, 0) = var;
9984
9985 gimplify_seq_add_stmt (&for_body, gimple_build_assign (decl, var));
9986
9987 if (ort == ORT_SIMD
9988 && TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)) == 1)
9989 {
9990 c2 = build_omp_clause (input_location, OMP_CLAUSE_LINEAR);
9991 OMP_CLAUSE_LINEAR_NO_COPYIN (c2) = 1;
9992 OMP_CLAUSE_LINEAR_NO_COPYOUT (c2) = 1;
9993 OMP_CLAUSE_DECL (c2) = var;
9994 OMP_CLAUSE_CHAIN (c2) = OMP_FOR_CLAUSES (for_stmt);
9995 OMP_FOR_CLAUSES (for_stmt) = c2;
9996 omp_add_variable (gimplify_omp_ctxp, var,
9997 GOVD_LINEAR | GOVD_EXPLICIT | GOVD_SEEN);
9998 if (c == NULL_TREE)
9999 {
10000 c = c2;
10001 c2 = NULL_TREE;
10002 }
10003 }
10004 else
10005 omp_add_variable (gimplify_omp_ctxp, var,
10006 GOVD_PRIVATE | GOVD_SEEN);
10007 }
10008 else
10009 var = decl;
10010
10011 tret = gimplify_expr (&TREE_OPERAND (t, 1), &for_pre_body, NULL,
10012 is_gimple_val, fb_rvalue, false);
10013 ret = MIN (ret, tret);
10014 if (ret == GS_ERROR)
10015 return ret;
10016
10017 /* Handle OMP_FOR_COND. */
10018 t = TREE_VEC_ELT (OMP_FOR_COND (for_stmt), i);
10019 gcc_assert (COMPARISON_CLASS_P (t));
10020 gcc_assert (TREE_OPERAND (t, 0) == decl);
10021
10022 tret = gimplify_expr (&TREE_OPERAND (t, 1), &for_pre_body, NULL,
10023 is_gimple_val, fb_rvalue, false);
10024 ret = MIN (ret, tret);
10025
10026 /* Handle OMP_FOR_INCR. */
10027 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
10028 switch (TREE_CODE (t))
10029 {
10030 case PREINCREMENT_EXPR:
10031 case POSTINCREMENT_EXPR:
10032 {
10033 tree decl = TREE_OPERAND (t, 0);
10034 /* c_omp_for_incr_canonicalize_ptr() should have been
10035 called to massage things appropriately. */
10036 gcc_assert (!POINTER_TYPE_P (TREE_TYPE (decl)));
10037
10038 if (orig_for_stmt != for_stmt)
10039 break;
10040 t = build_int_cst (TREE_TYPE (decl), 1);
10041 if (c)
10042 OMP_CLAUSE_LINEAR_STEP (c) = t;
10043 t = build2 (PLUS_EXPR, TREE_TYPE (decl), var, t);
10044 t = build2 (MODIFY_EXPR, TREE_TYPE (var), var, t);
10045 TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i) = t;
10046 break;
10047 }
10048
10049 case PREDECREMENT_EXPR:
10050 case POSTDECREMENT_EXPR:
10051 /* c_omp_for_incr_canonicalize_ptr() should have been
10052 called to massage things appropriately. */
10053 gcc_assert (!POINTER_TYPE_P (TREE_TYPE (decl)));
10054 if (orig_for_stmt != for_stmt)
10055 break;
10056 t = build_int_cst (TREE_TYPE (decl), -1);
10057 if (c)
10058 OMP_CLAUSE_LINEAR_STEP (c) = t;
10059 t = build2 (PLUS_EXPR, TREE_TYPE (decl), var, t);
10060 t = build2 (MODIFY_EXPR, TREE_TYPE (var), var, t);
10061 TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i) = t;
10062 break;
10063
10064 case MODIFY_EXPR:
10065 gcc_assert (TREE_OPERAND (t, 0) == decl);
10066 TREE_OPERAND (t, 0) = var;
10067
10068 t = TREE_OPERAND (t, 1);
10069 switch (TREE_CODE (t))
10070 {
10071 case PLUS_EXPR:
10072 if (TREE_OPERAND (t, 1) == decl)
10073 {
10074 TREE_OPERAND (t, 1) = TREE_OPERAND (t, 0);
10075 TREE_OPERAND (t, 0) = var;
10076 break;
10077 }
10078
10079 /* Fallthru. */
10080 case MINUS_EXPR:
10081 case POINTER_PLUS_EXPR:
10082 gcc_assert (TREE_OPERAND (t, 0) == decl);
10083 TREE_OPERAND (t, 0) = var;
10084 break;
10085 default:
10086 gcc_unreachable ();
10087 }
10088
10089 tret = gimplify_expr (&TREE_OPERAND (t, 1), &for_pre_body, NULL,
10090 is_gimple_val, fb_rvalue, false);
10091 ret = MIN (ret, tret);
10092 if (c)
10093 {
10094 tree step = TREE_OPERAND (t, 1);
10095 tree stept = TREE_TYPE (decl);
10096 if (POINTER_TYPE_P (stept))
10097 stept = sizetype;
10098 step = fold_convert (stept, step);
10099 if (TREE_CODE (t) == MINUS_EXPR)
10100 step = fold_build1 (NEGATE_EXPR, stept, step);
10101 OMP_CLAUSE_LINEAR_STEP (c) = step;
10102 if (step != TREE_OPERAND (t, 1))
10103 {
10104 tret = gimplify_expr (&OMP_CLAUSE_LINEAR_STEP (c),
10105 &for_pre_body, NULL,
10106 is_gimple_val, fb_rvalue, false);
10107 ret = MIN (ret, tret);
10108 }
10109 }
10110 break;
10111
10112 default:
10113 gcc_unreachable ();
10114 }
10115
10116 if (c2)
10117 {
10118 gcc_assert (c);
10119 OMP_CLAUSE_LINEAR_STEP (c2) = OMP_CLAUSE_LINEAR_STEP (c);
10120 }
10121
10122 if ((var != decl || collapse > 1 || tile) && orig_for_stmt == for_stmt)
10123 {
10124 for (c = OMP_FOR_CLAUSES (for_stmt); c ; c = OMP_CLAUSE_CHAIN (c))
10125 if (((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
10126 && OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c) == NULL)
10127 || (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
10128 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c)
10129 && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c) == NULL))
10130 && OMP_CLAUSE_DECL (c) == decl)
10131 {
10132 if (is_doacross && (collapse == 1 || i >= collapse))
10133 t = var;
10134 else
10135 {
10136 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
10137 gcc_assert (TREE_CODE (t) == MODIFY_EXPR);
10138 gcc_assert (TREE_OPERAND (t, 0) == var);
10139 t = TREE_OPERAND (t, 1);
10140 gcc_assert (TREE_CODE (t) == PLUS_EXPR
10141 || TREE_CODE (t) == MINUS_EXPR
10142 || TREE_CODE (t) == POINTER_PLUS_EXPR);
10143 gcc_assert (TREE_OPERAND (t, 0) == var);
10144 t = build2 (TREE_CODE (t), TREE_TYPE (decl),
10145 is_doacross ? var : decl,
10146 TREE_OPERAND (t, 1));
10147 }
10148 gimple_seq *seq;
10149 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE)
10150 seq = &OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c);
10151 else
10152 seq = &OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c);
10153 gimplify_assign (decl, t, seq);
10154 }
10155 }
10156 }
10157
10158 BITMAP_FREE (has_decl_expr);
10159
10160 if (TREE_CODE (orig_for_stmt) == OMP_TASKLOOP)
10161 {
10162 push_gimplify_context ();
10163 if (TREE_CODE (OMP_FOR_BODY (orig_for_stmt)) != BIND_EXPR)
10164 {
10165 OMP_FOR_BODY (orig_for_stmt)
10166 = build3 (BIND_EXPR, void_type_node, NULL,
10167 OMP_FOR_BODY (orig_for_stmt), NULL);
10168 TREE_SIDE_EFFECTS (OMP_FOR_BODY (orig_for_stmt)) = 1;
10169 }
10170 }
10171
10172 gimple *g = gimplify_and_return_first (OMP_FOR_BODY (orig_for_stmt),
10173 &for_body);
10174
10175 if (TREE_CODE (orig_for_stmt) == OMP_TASKLOOP)
10176 {
10177 if (gimple_code (g) == GIMPLE_BIND)
10178 pop_gimplify_context (g);
10179 else
10180 pop_gimplify_context (NULL);
10181 }
10182
10183 if (orig_for_stmt != for_stmt)
10184 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
10185 {
10186 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i);
10187 decl = TREE_OPERAND (t, 0);
10188 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
10189 if (TREE_CODE (orig_for_stmt) == OMP_TASKLOOP)
10190 gimplify_omp_ctxp = ctx->outer_context;
10191 var = create_tmp_var (TREE_TYPE (decl), get_name (decl));
10192 gimplify_omp_ctxp = ctx;
10193 omp_add_variable (gimplify_omp_ctxp, var, GOVD_PRIVATE | GOVD_SEEN);
10194 TREE_OPERAND (t, 0) = var;
10195 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
10196 TREE_OPERAND (t, 1) = copy_node (TREE_OPERAND (t, 1));
10197 TREE_OPERAND (TREE_OPERAND (t, 1), 0) = var;
10198 }
10199
10200 gimplify_adjust_omp_clauses (pre_p, for_body,
10201 &OMP_FOR_CLAUSES (orig_for_stmt),
10202 TREE_CODE (orig_for_stmt));
10203
10204 int kind;
10205 switch (TREE_CODE (orig_for_stmt))
10206 {
10207 case OMP_FOR: kind = GF_OMP_FOR_KIND_FOR; break;
10208 case OMP_SIMD: kind = GF_OMP_FOR_KIND_SIMD; break;
10209 case CILK_SIMD: kind = GF_OMP_FOR_KIND_CILKSIMD; break;
10210 case CILK_FOR: kind = GF_OMP_FOR_KIND_CILKFOR; break;
10211 case OMP_DISTRIBUTE: kind = GF_OMP_FOR_KIND_DISTRIBUTE; break;
10212 case OMP_TASKLOOP: kind = GF_OMP_FOR_KIND_TASKLOOP; break;
10213 case OACC_LOOP: kind = GF_OMP_FOR_KIND_OACC_LOOP; break;
10214 default:
10215 gcc_unreachable ();
10216 }
10217 gfor = gimple_build_omp_for (for_body, kind, OMP_FOR_CLAUSES (orig_for_stmt),
10218 TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)),
10219 for_pre_body);
10220 if (orig_for_stmt != for_stmt)
10221 gimple_omp_for_set_combined_p (gfor, true);
10222 if (gimplify_omp_ctxp
10223 && (gimplify_omp_ctxp->combined_loop
10224 || (gimplify_omp_ctxp->region_type == ORT_COMBINED_PARALLEL
10225 && gimplify_omp_ctxp->outer_context
10226 && gimplify_omp_ctxp->outer_context->combined_loop)))
10227 {
10228 gimple_omp_for_set_combined_into_p (gfor, true);
10229 if (gimplify_omp_ctxp->combined_loop)
10230 gcc_assert (TREE_CODE (orig_for_stmt) == OMP_SIMD);
10231 else
10232 gcc_assert (TREE_CODE (orig_for_stmt) == OMP_FOR);
10233 }
10234
10235 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
10236 {
10237 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i);
10238 gimple_omp_for_set_index (gfor, i, TREE_OPERAND (t, 0));
10239 gimple_omp_for_set_initial (gfor, i, TREE_OPERAND (t, 1));
10240 t = TREE_VEC_ELT (OMP_FOR_COND (for_stmt), i);
10241 gimple_omp_for_set_cond (gfor, i, TREE_CODE (t));
10242 gimple_omp_for_set_final (gfor, i, TREE_OPERAND (t, 1));
10243 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
10244 gimple_omp_for_set_incr (gfor, i, TREE_OPERAND (t, 1));
10245 }
10246
10247 /* OMP_TASKLOOP is gimplified as two GIMPLE_OMP_FOR taskloop
10248 constructs with GIMPLE_OMP_TASK sandwiched in between them.
10249 The outer taskloop stands for computing the number of iterations,
10250 counts for collapsed loops and holding taskloop specific clauses.
10251 The task construct stands for the effect of data sharing on the
10252 explicit task it creates and the inner taskloop stands for expansion
10253 of the static loop inside of the explicit task construct. */
10254 if (TREE_CODE (orig_for_stmt) == OMP_TASKLOOP)
10255 {
10256 tree *gfor_clauses_ptr = gimple_omp_for_clauses_ptr (gfor);
10257 tree task_clauses = NULL_TREE;
10258 tree c = *gfor_clauses_ptr;
10259 tree *gtask_clauses_ptr = &task_clauses;
10260 tree outer_for_clauses = NULL_TREE;
10261 tree *gforo_clauses_ptr = &outer_for_clauses;
10262 for (; c; c = OMP_CLAUSE_CHAIN (c))
10263 switch (OMP_CLAUSE_CODE (c))
10264 {
10265 /* These clauses are allowed on task, move them there. */
10266 case OMP_CLAUSE_SHARED:
10267 case OMP_CLAUSE_FIRSTPRIVATE:
10268 case OMP_CLAUSE_DEFAULT:
10269 case OMP_CLAUSE_IF:
10270 case OMP_CLAUSE_UNTIED:
10271 case OMP_CLAUSE_FINAL:
10272 case OMP_CLAUSE_MERGEABLE:
10273 case OMP_CLAUSE_PRIORITY:
10274 *gtask_clauses_ptr = c;
10275 gtask_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
10276 break;
10277 case OMP_CLAUSE_PRIVATE:
10278 if (OMP_CLAUSE_PRIVATE_TASKLOOP_IV (c))
10279 {
10280 /* We want private on outer for and firstprivate
10281 on task. */
10282 *gtask_clauses_ptr
10283 = build_omp_clause (OMP_CLAUSE_LOCATION (c),
10284 OMP_CLAUSE_FIRSTPRIVATE);
10285 OMP_CLAUSE_DECL (*gtask_clauses_ptr) = OMP_CLAUSE_DECL (c);
10286 lang_hooks.decls.omp_finish_clause (*gtask_clauses_ptr, NULL);
10287 gtask_clauses_ptr = &OMP_CLAUSE_CHAIN (*gtask_clauses_ptr);
10288 *gforo_clauses_ptr = c;
10289 gforo_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
10290 }
10291 else
10292 {
10293 *gtask_clauses_ptr = c;
10294 gtask_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
10295 }
10296 break;
10297 /* These clauses go into outer taskloop clauses. */
10298 case OMP_CLAUSE_GRAINSIZE:
10299 case OMP_CLAUSE_NUM_TASKS:
10300 case OMP_CLAUSE_NOGROUP:
10301 *gforo_clauses_ptr = c;
10302 gforo_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
10303 break;
10304 /* Taskloop clause we duplicate on both taskloops. */
10305 case OMP_CLAUSE_COLLAPSE:
10306 *gfor_clauses_ptr = c;
10307 gfor_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
10308 *gforo_clauses_ptr = copy_node (c);
10309 gforo_clauses_ptr = &OMP_CLAUSE_CHAIN (*gforo_clauses_ptr);
10310 break;
10311 /* For lastprivate, keep the clause on inner taskloop, and add
10312 a shared clause on task. If the same decl is also firstprivate,
10313 add also firstprivate clause on the inner taskloop. */
10314 case OMP_CLAUSE_LASTPRIVATE:
10315 if (OMP_CLAUSE_LASTPRIVATE_TASKLOOP_IV (c))
10316 {
10317 /* For taskloop C++ lastprivate IVs, we want:
10318 1) private on outer taskloop
10319 2) firstprivate and shared on task
10320 3) lastprivate on inner taskloop */
10321 *gtask_clauses_ptr
10322 = build_omp_clause (OMP_CLAUSE_LOCATION (c),
10323 OMP_CLAUSE_FIRSTPRIVATE);
10324 OMP_CLAUSE_DECL (*gtask_clauses_ptr) = OMP_CLAUSE_DECL (c);
10325 lang_hooks.decls.omp_finish_clause (*gtask_clauses_ptr, NULL);
10326 gtask_clauses_ptr = &OMP_CLAUSE_CHAIN (*gtask_clauses_ptr);
10327 OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c) = 1;
10328 *gforo_clauses_ptr = build_omp_clause (OMP_CLAUSE_LOCATION (c),
10329 OMP_CLAUSE_PRIVATE);
10330 OMP_CLAUSE_DECL (*gforo_clauses_ptr) = OMP_CLAUSE_DECL (c);
10331 OMP_CLAUSE_PRIVATE_TASKLOOP_IV (*gforo_clauses_ptr) = 1;
10332 TREE_TYPE (*gforo_clauses_ptr) = TREE_TYPE (c);
10333 gforo_clauses_ptr = &OMP_CLAUSE_CHAIN (*gforo_clauses_ptr);
10334 }
10335 *gfor_clauses_ptr = c;
10336 gfor_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
10337 *gtask_clauses_ptr
10338 = build_omp_clause (OMP_CLAUSE_LOCATION (c), OMP_CLAUSE_SHARED);
10339 OMP_CLAUSE_DECL (*gtask_clauses_ptr) = OMP_CLAUSE_DECL (c);
10340 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
10341 OMP_CLAUSE_SHARED_FIRSTPRIVATE (*gtask_clauses_ptr) = 1;
10342 gtask_clauses_ptr
10343 = &OMP_CLAUSE_CHAIN (*gtask_clauses_ptr);
10344 break;
10345 default:
10346 gcc_unreachable ();
10347 }
10348 *gfor_clauses_ptr = NULL_TREE;
10349 *gtask_clauses_ptr = NULL_TREE;
10350 *gforo_clauses_ptr = NULL_TREE;
10351 g = gimple_build_bind (NULL_TREE, gfor, NULL_TREE);
10352 g = gimple_build_omp_task (g, task_clauses, NULL_TREE, NULL_TREE,
10353 NULL_TREE, NULL_TREE, NULL_TREE);
10354 gimple_omp_task_set_taskloop_p (g, true);
10355 g = gimple_build_bind (NULL_TREE, g, NULL_TREE);
10356 gomp_for *gforo
10357 = gimple_build_omp_for (g, GF_OMP_FOR_KIND_TASKLOOP, outer_for_clauses,
10358 gimple_omp_for_collapse (gfor),
10359 gimple_omp_for_pre_body (gfor));
10360 gimple_omp_for_set_pre_body (gfor, NULL);
10361 gimple_omp_for_set_combined_p (gforo, true);
10362 gimple_omp_for_set_combined_into_p (gfor, true);
10363 for (i = 0; i < (int) gimple_omp_for_collapse (gfor); i++)
10364 {
10365 tree type = TREE_TYPE (gimple_omp_for_index (gfor, i));
10366 tree v = create_tmp_var (type);
10367 gimple_omp_for_set_index (gforo, i, v);
10368 t = unshare_expr (gimple_omp_for_initial (gfor, i));
10369 gimple_omp_for_set_initial (gforo, i, t);
10370 gimple_omp_for_set_cond (gforo, i,
10371 gimple_omp_for_cond (gfor, i));
10372 t = unshare_expr (gimple_omp_for_final (gfor, i));
10373 gimple_omp_for_set_final (gforo, i, t);
10374 t = unshare_expr (gimple_omp_for_incr (gfor, i));
10375 gcc_assert (TREE_OPERAND (t, 0) == gimple_omp_for_index (gfor, i));
10376 TREE_OPERAND (t, 0) = v;
10377 gimple_omp_for_set_incr (gforo, i, t);
10378 t = build_omp_clause (input_location, OMP_CLAUSE_PRIVATE);
10379 OMP_CLAUSE_DECL (t) = v;
10380 OMP_CLAUSE_CHAIN (t) = gimple_omp_for_clauses (gforo);
10381 gimple_omp_for_set_clauses (gforo, t);
10382 }
10383 gimplify_seq_add_stmt (pre_p, gforo);
10384 }
10385 else
10386 gimplify_seq_add_stmt (pre_p, gfor);
10387 if (ret != GS_ALL_DONE)
10388 return GS_ERROR;
10389 *expr_p = NULL_TREE;
10390 return GS_ALL_DONE;
10391 }
10392
10393 /* Helper function of optimize_target_teams, find OMP_TEAMS inside
10394 of OMP_TARGET's body. */
10395
10396 static tree
10397 find_omp_teams (tree *tp, int *walk_subtrees, void *)
10398 {
10399 *walk_subtrees = 0;
10400 switch (TREE_CODE (*tp))
10401 {
10402 case OMP_TEAMS:
10403 return *tp;
10404 case BIND_EXPR:
10405 case STATEMENT_LIST:
10406 *walk_subtrees = 1;
10407 break;
10408 default:
10409 break;
10410 }
10411 return NULL_TREE;
10412 }
10413
10414 /* Helper function of optimize_target_teams, determine if the expression
10415 can be computed safely before the target construct on the host. */
10416
10417 static tree
10418 computable_teams_clause (tree *tp, int *walk_subtrees, void *)
10419 {
10420 splay_tree_node n;
10421
10422 if (TYPE_P (*tp))
10423 {
10424 *walk_subtrees = 0;
10425 return NULL_TREE;
10426 }
10427 switch (TREE_CODE (*tp))
10428 {
10429 case VAR_DECL:
10430 case PARM_DECL:
10431 case RESULT_DECL:
10432 *walk_subtrees = 0;
10433 if (error_operand_p (*tp)
10434 || !INTEGRAL_TYPE_P (TREE_TYPE (*tp))
10435 || DECL_HAS_VALUE_EXPR_P (*tp)
10436 || DECL_THREAD_LOCAL_P (*tp)
10437 || TREE_SIDE_EFFECTS (*tp)
10438 || TREE_THIS_VOLATILE (*tp))
10439 return *tp;
10440 if (is_global_var (*tp)
10441 && (lookup_attribute ("omp declare target", DECL_ATTRIBUTES (*tp))
10442 || lookup_attribute ("omp declare target link",
10443 DECL_ATTRIBUTES (*tp))))
10444 return *tp;
10445 if (VAR_P (*tp)
10446 && !DECL_SEEN_IN_BIND_EXPR_P (*tp)
10447 && !is_global_var (*tp)
10448 && decl_function_context (*tp) == current_function_decl)
10449 return *tp;
10450 n = splay_tree_lookup (gimplify_omp_ctxp->variables,
10451 (splay_tree_key) *tp);
10452 if (n == NULL)
10453 {
10454 if (gimplify_omp_ctxp->target_map_scalars_firstprivate)
10455 return NULL_TREE;
10456 return *tp;
10457 }
10458 else if (n->value & GOVD_LOCAL)
10459 return *tp;
10460 else if (n->value & GOVD_FIRSTPRIVATE)
10461 return NULL_TREE;
10462 else if ((n->value & (GOVD_MAP | GOVD_MAP_ALWAYS_TO))
10463 == (GOVD_MAP | GOVD_MAP_ALWAYS_TO))
10464 return NULL_TREE;
10465 return *tp;
10466 case INTEGER_CST:
10467 if (!INTEGRAL_TYPE_P (TREE_TYPE (*tp)))
10468 return *tp;
10469 return NULL_TREE;
10470 case TARGET_EXPR:
10471 if (TARGET_EXPR_INITIAL (*tp)
10472 || TREE_CODE (TARGET_EXPR_SLOT (*tp)) != VAR_DECL)
10473 return *tp;
10474 return computable_teams_clause (&TARGET_EXPR_SLOT (*tp),
10475 walk_subtrees, NULL);
10476 /* Allow some reasonable subset of integral arithmetics. */
10477 case PLUS_EXPR:
10478 case MINUS_EXPR:
10479 case MULT_EXPR:
10480 case TRUNC_DIV_EXPR:
10481 case CEIL_DIV_EXPR:
10482 case FLOOR_DIV_EXPR:
10483 case ROUND_DIV_EXPR:
10484 case TRUNC_MOD_EXPR:
10485 case CEIL_MOD_EXPR:
10486 case FLOOR_MOD_EXPR:
10487 case ROUND_MOD_EXPR:
10488 case RDIV_EXPR:
10489 case EXACT_DIV_EXPR:
10490 case MIN_EXPR:
10491 case MAX_EXPR:
10492 case LSHIFT_EXPR:
10493 case RSHIFT_EXPR:
10494 case BIT_IOR_EXPR:
10495 case BIT_XOR_EXPR:
10496 case BIT_AND_EXPR:
10497 case NEGATE_EXPR:
10498 case ABS_EXPR:
10499 case BIT_NOT_EXPR:
10500 case NON_LVALUE_EXPR:
10501 CASE_CONVERT:
10502 if (!INTEGRAL_TYPE_P (TREE_TYPE (*tp)))
10503 return *tp;
10504 return NULL_TREE;
10505 /* And disallow anything else, except for comparisons. */
10506 default:
10507 if (COMPARISON_CLASS_P (*tp))
10508 return NULL_TREE;
10509 return *tp;
10510 }
10511 }
10512
10513 /* Try to determine if the num_teams and/or thread_limit expressions
10514 can have their values determined already before entering the
10515 target construct.
10516 INTEGER_CSTs trivially are,
10517 integral decls that are firstprivate (explicitly or implicitly)
10518 or explicitly map(always, to:) or map(always, tofrom:) on the target
10519 region too, and expressions involving simple arithmetics on those
10520 too, function calls are not ok, dereferencing something neither etc.
10521 Add NUM_TEAMS and THREAD_LIMIT clauses to the OMP_CLAUSES of
10522 EXPR based on what we find:
10523 0 stands for clause not specified at all, use implementation default
10524 -1 stands for value that can't be determined easily before entering
10525 the target construct.
10526 If teams construct is not present at all, use 1 for num_teams
10527 and 0 for thread_limit (only one team is involved, and the thread
10528 limit is implementation defined. */
10529
10530 static void
10531 optimize_target_teams (tree target, gimple_seq *pre_p)
10532 {
10533 tree body = OMP_BODY (target);
10534 tree teams = walk_tree (&body, find_omp_teams, NULL, NULL);
10535 tree num_teams = integer_zero_node;
10536 tree thread_limit = integer_zero_node;
10537 location_t num_teams_loc = EXPR_LOCATION (target);
10538 location_t thread_limit_loc = EXPR_LOCATION (target);
10539 tree c, *p, expr;
10540 struct gimplify_omp_ctx *target_ctx = gimplify_omp_ctxp;
10541
10542 if (teams == NULL_TREE)
10543 num_teams = integer_one_node;
10544 else
10545 for (c = OMP_TEAMS_CLAUSES (teams); c; c = OMP_CLAUSE_CHAIN (c))
10546 {
10547 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_NUM_TEAMS)
10548 {
10549 p = &num_teams;
10550 num_teams_loc = OMP_CLAUSE_LOCATION (c);
10551 }
10552 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_THREAD_LIMIT)
10553 {
10554 p = &thread_limit;
10555 thread_limit_loc = OMP_CLAUSE_LOCATION (c);
10556 }
10557 else
10558 continue;
10559 expr = OMP_CLAUSE_OPERAND (c, 0);
10560 if (TREE_CODE (expr) == INTEGER_CST)
10561 {
10562 *p = expr;
10563 continue;
10564 }
10565 if (walk_tree (&expr, computable_teams_clause, NULL, NULL))
10566 {
10567 *p = integer_minus_one_node;
10568 continue;
10569 }
10570 *p = expr;
10571 gimplify_omp_ctxp = gimplify_omp_ctxp->outer_context;
10572 if (gimplify_expr (p, pre_p, NULL, is_gimple_val, fb_rvalue, false)
10573 == GS_ERROR)
10574 {
10575 gimplify_omp_ctxp = target_ctx;
10576 *p = integer_minus_one_node;
10577 continue;
10578 }
10579 gimplify_omp_ctxp = target_ctx;
10580 if (!DECL_P (expr) && TREE_CODE (expr) != TARGET_EXPR)
10581 OMP_CLAUSE_OPERAND (c, 0) = *p;
10582 }
10583 c = build_omp_clause (thread_limit_loc, OMP_CLAUSE_THREAD_LIMIT);
10584 OMP_CLAUSE_THREAD_LIMIT_EXPR (c) = thread_limit;
10585 OMP_CLAUSE_CHAIN (c) = OMP_TARGET_CLAUSES (target);
10586 OMP_TARGET_CLAUSES (target) = c;
10587 c = build_omp_clause (num_teams_loc, OMP_CLAUSE_NUM_TEAMS);
10588 OMP_CLAUSE_NUM_TEAMS_EXPR (c) = num_teams;
10589 OMP_CLAUSE_CHAIN (c) = OMP_TARGET_CLAUSES (target);
10590 OMP_TARGET_CLAUSES (target) = c;
10591 }
10592
10593 /* Gimplify the gross structure of several OMP constructs. */
10594
10595 static void
10596 gimplify_omp_workshare (tree *expr_p, gimple_seq *pre_p)
10597 {
10598 tree expr = *expr_p;
10599 gimple *stmt;
10600 gimple_seq body = NULL;
10601 enum omp_region_type ort;
10602
10603 switch (TREE_CODE (expr))
10604 {
10605 case OMP_SECTIONS:
10606 case OMP_SINGLE:
10607 ort = ORT_WORKSHARE;
10608 break;
10609 case OMP_TARGET:
10610 ort = OMP_TARGET_COMBINED (expr) ? ORT_COMBINED_TARGET : ORT_TARGET;
10611 break;
10612 case OACC_KERNELS:
10613 ort = ORT_ACC_KERNELS;
10614 break;
10615 case OACC_PARALLEL:
10616 ort = ORT_ACC_PARALLEL;
10617 break;
10618 case OACC_DATA:
10619 ort = ORT_ACC_DATA;
10620 break;
10621 case OMP_TARGET_DATA:
10622 ort = ORT_TARGET_DATA;
10623 break;
10624 case OMP_TEAMS:
10625 ort = OMP_TEAMS_COMBINED (expr) ? ORT_COMBINED_TEAMS : ORT_TEAMS;
10626 break;
10627 case OACC_HOST_DATA:
10628 ort = ORT_ACC_HOST_DATA;
10629 break;
10630 default:
10631 gcc_unreachable ();
10632 }
10633 gimplify_scan_omp_clauses (&OMP_CLAUSES (expr), pre_p, ort,
10634 TREE_CODE (expr));
10635 if (TREE_CODE (expr) == OMP_TARGET)
10636 optimize_target_teams (expr, pre_p);
10637 if ((ort & (ORT_TARGET | ORT_TARGET_DATA)) != 0)
10638 {
10639 push_gimplify_context ();
10640 gimple *g = gimplify_and_return_first (OMP_BODY (expr), &body);
10641 if (gimple_code (g) == GIMPLE_BIND)
10642 pop_gimplify_context (g);
10643 else
10644 pop_gimplify_context (NULL);
10645 if ((ort & ORT_TARGET_DATA) != 0)
10646 {
10647 enum built_in_function end_ix;
10648 switch (TREE_CODE (expr))
10649 {
10650 case OACC_DATA:
10651 case OACC_HOST_DATA:
10652 end_ix = BUILT_IN_GOACC_DATA_END;
10653 break;
10654 case OMP_TARGET_DATA:
10655 end_ix = BUILT_IN_GOMP_TARGET_END_DATA;
10656 break;
10657 default:
10658 gcc_unreachable ();
10659 }
10660 tree fn = builtin_decl_explicit (end_ix);
10661 g = gimple_build_call (fn, 0);
10662 gimple_seq cleanup = NULL;
10663 gimple_seq_add_stmt (&cleanup, g);
10664 g = gimple_build_try (body, cleanup, GIMPLE_TRY_FINALLY);
10665 body = NULL;
10666 gimple_seq_add_stmt (&body, g);
10667 }
10668 }
10669 else
10670 gimplify_and_add (OMP_BODY (expr), &body);
10671 gimplify_adjust_omp_clauses (pre_p, body, &OMP_CLAUSES (expr),
10672 TREE_CODE (expr));
10673
10674 switch (TREE_CODE (expr))
10675 {
10676 case OACC_DATA:
10677 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_OACC_DATA,
10678 OMP_CLAUSES (expr));
10679 break;
10680 case OACC_KERNELS:
10681 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_OACC_KERNELS,
10682 OMP_CLAUSES (expr));
10683 break;
10684 case OACC_HOST_DATA:
10685 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_OACC_HOST_DATA,
10686 OMP_CLAUSES (expr));
10687 break;
10688 case OACC_PARALLEL:
10689 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_OACC_PARALLEL,
10690 OMP_CLAUSES (expr));
10691 break;
10692 case OMP_SECTIONS:
10693 stmt = gimple_build_omp_sections (body, OMP_CLAUSES (expr));
10694 break;
10695 case OMP_SINGLE:
10696 stmt = gimple_build_omp_single (body, OMP_CLAUSES (expr));
10697 break;
10698 case OMP_TARGET:
10699 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_REGION,
10700 OMP_CLAUSES (expr));
10701 break;
10702 case OMP_TARGET_DATA:
10703 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_DATA,
10704 OMP_CLAUSES (expr));
10705 break;
10706 case OMP_TEAMS:
10707 stmt = gimple_build_omp_teams (body, OMP_CLAUSES (expr));
10708 break;
10709 default:
10710 gcc_unreachable ();
10711 }
10712
10713 gimplify_seq_add_stmt (pre_p, stmt);
10714 *expr_p = NULL_TREE;
10715 }
10716
10717 /* Gimplify the gross structure of OpenACC enter/exit data, update, and OpenMP
10718 target update constructs. */
10719
10720 static void
10721 gimplify_omp_target_update (tree *expr_p, gimple_seq *pre_p)
10722 {
10723 tree expr = *expr_p;
10724 int kind;
10725 gomp_target *stmt;
10726 enum omp_region_type ort = ORT_WORKSHARE;
10727
10728 switch (TREE_CODE (expr))
10729 {
10730 case OACC_ENTER_DATA:
10731 case OACC_EXIT_DATA:
10732 kind = GF_OMP_TARGET_KIND_OACC_ENTER_EXIT_DATA;
10733 ort = ORT_ACC;
10734 break;
10735 case OACC_UPDATE:
10736 kind = GF_OMP_TARGET_KIND_OACC_UPDATE;
10737 ort = ORT_ACC;
10738 break;
10739 case OMP_TARGET_UPDATE:
10740 kind = GF_OMP_TARGET_KIND_UPDATE;
10741 break;
10742 case OMP_TARGET_ENTER_DATA:
10743 kind = GF_OMP_TARGET_KIND_ENTER_DATA;
10744 break;
10745 case OMP_TARGET_EXIT_DATA:
10746 kind = GF_OMP_TARGET_KIND_EXIT_DATA;
10747 break;
10748 default:
10749 gcc_unreachable ();
10750 }
10751 gimplify_scan_omp_clauses (&OMP_STANDALONE_CLAUSES (expr), pre_p,
10752 ort, TREE_CODE (expr));
10753 gimplify_adjust_omp_clauses (pre_p, NULL, &OMP_STANDALONE_CLAUSES (expr),
10754 TREE_CODE (expr));
10755 stmt = gimple_build_omp_target (NULL, kind, OMP_STANDALONE_CLAUSES (expr));
10756
10757 gimplify_seq_add_stmt (pre_p, stmt);
10758 *expr_p = NULL_TREE;
10759 }
10760
10761 /* A subroutine of gimplify_omp_atomic. The front end is supposed to have
10762 stabilized the lhs of the atomic operation as *ADDR. Return true if
10763 EXPR is this stabilized form. */
10764
10765 static bool
10766 goa_lhs_expr_p (tree expr, tree addr)
10767 {
10768 /* Also include casts to other type variants. The C front end is fond
10769 of adding these for e.g. volatile variables. This is like
10770 STRIP_TYPE_NOPS but includes the main variant lookup. */
10771 STRIP_USELESS_TYPE_CONVERSION (expr);
10772
10773 if (TREE_CODE (expr) == INDIRECT_REF)
10774 {
10775 expr = TREE_OPERAND (expr, 0);
10776 while (expr != addr
10777 && (CONVERT_EXPR_P (expr)
10778 || TREE_CODE (expr) == NON_LVALUE_EXPR)
10779 && TREE_CODE (expr) == TREE_CODE (addr)
10780 && types_compatible_p (TREE_TYPE (expr), TREE_TYPE (addr)))
10781 {
10782 expr = TREE_OPERAND (expr, 0);
10783 addr = TREE_OPERAND (addr, 0);
10784 }
10785 if (expr == addr)
10786 return true;
10787 return (TREE_CODE (addr) == ADDR_EXPR
10788 && TREE_CODE (expr) == ADDR_EXPR
10789 && TREE_OPERAND (addr, 0) == TREE_OPERAND (expr, 0));
10790 }
10791 if (TREE_CODE (addr) == ADDR_EXPR && expr == TREE_OPERAND (addr, 0))
10792 return true;
10793 return false;
10794 }
10795
10796 /* Walk *EXPR_P and replace appearances of *LHS_ADDR with LHS_VAR. If an
10797 expression does not involve the lhs, evaluate it into a temporary.
10798 Return 1 if the lhs appeared as a subexpression, 0 if it did not,
10799 or -1 if an error was encountered. */
10800
10801 static int
10802 goa_stabilize_expr (tree *expr_p, gimple_seq *pre_p, tree lhs_addr,
10803 tree lhs_var)
10804 {
10805 tree expr = *expr_p;
10806 int saw_lhs;
10807
10808 if (goa_lhs_expr_p (expr, lhs_addr))
10809 {
10810 *expr_p = lhs_var;
10811 return 1;
10812 }
10813 if (is_gimple_val (expr))
10814 return 0;
10815
10816 saw_lhs = 0;
10817 switch (TREE_CODE_CLASS (TREE_CODE (expr)))
10818 {
10819 case tcc_binary:
10820 case tcc_comparison:
10821 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 1), pre_p, lhs_addr,
10822 lhs_var);
10823 /* FALLTHRU */
10824 case tcc_unary:
10825 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 0), pre_p, lhs_addr,
10826 lhs_var);
10827 break;
10828 case tcc_expression:
10829 switch (TREE_CODE (expr))
10830 {
10831 case TRUTH_ANDIF_EXPR:
10832 case TRUTH_ORIF_EXPR:
10833 case TRUTH_AND_EXPR:
10834 case TRUTH_OR_EXPR:
10835 case TRUTH_XOR_EXPR:
10836 case BIT_INSERT_EXPR:
10837 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 1), pre_p,
10838 lhs_addr, lhs_var);
10839 /* FALLTHRU */
10840 case TRUTH_NOT_EXPR:
10841 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 0), pre_p,
10842 lhs_addr, lhs_var);
10843 break;
10844 case COMPOUND_EXPR:
10845 /* Break out any preevaluations from cp_build_modify_expr. */
10846 for (; TREE_CODE (expr) == COMPOUND_EXPR;
10847 expr = TREE_OPERAND (expr, 1))
10848 gimplify_stmt (&TREE_OPERAND (expr, 0), pre_p);
10849 *expr_p = expr;
10850 return goa_stabilize_expr (expr_p, pre_p, lhs_addr, lhs_var);
10851 default:
10852 break;
10853 }
10854 break;
10855 case tcc_reference:
10856 if (TREE_CODE (expr) == BIT_FIELD_REF)
10857 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 0), pre_p,
10858 lhs_addr, lhs_var);
10859 break;
10860 default:
10861 break;
10862 }
10863
10864 if (saw_lhs == 0)
10865 {
10866 enum gimplify_status gs;
10867 gs = gimplify_expr (expr_p, pre_p, NULL, is_gimple_val, fb_rvalue);
10868 if (gs != GS_ALL_DONE)
10869 saw_lhs = -1;
10870 }
10871
10872 return saw_lhs;
10873 }
10874
10875 /* Gimplify an OMP_ATOMIC statement. */
10876
10877 static enum gimplify_status
10878 gimplify_omp_atomic (tree *expr_p, gimple_seq *pre_p)
10879 {
10880 tree addr = TREE_OPERAND (*expr_p, 0);
10881 tree rhs = TREE_CODE (*expr_p) == OMP_ATOMIC_READ
10882 ? NULL : TREE_OPERAND (*expr_p, 1);
10883 tree type = TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (addr)));
10884 tree tmp_load;
10885 gomp_atomic_load *loadstmt;
10886 gomp_atomic_store *storestmt;
10887
10888 tmp_load = create_tmp_reg (type);
10889 if (rhs && goa_stabilize_expr (&rhs, pre_p, addr, tmp_load) < 0)
10890 return GS_ERROR;
10891
10892 if (gimplify_expr (&addr, pre_p, NULL, is_gimple_val, fb_rvalue)
10893 != GS_ALL_DONE)
10894 return GS_ERROR;
10895
10896 loadstmt = gimple_build_omp_atomic_load (tmp_load, addr);
10897 gimplify_seq_add_stmt (pre_p, loadstmt);
10898 if (rhs && gimplify_expr (&rhs, pre_p, NULL, is_gimple_val, fb_rvalue)
10899 != GS_ALL_DONE)
10900 return GS_ERROR;
10901
10902 if (TREE_CODE (*expr_p) == OMP_ATOMIC_READ)
10903 rhs = tmp_load;
10904 storestmt = gimple_build_omp_atomic_store (rhs);
10905 gimplify_seq_add_stmt (pre_p, storestmt);
10906 if (OMP_ATOMIC_SEQ_CST (*expr_p))
10907 {
10908 gimple_omp_atomic_set_seq_cst (loadstmt);
10909 gimple_omp_atomic_set_seq_cst (storestmt);
10910 }
10911 switch (TREE_CODE (*expr_p))
10912 {
10913 case OMP_ATOMIC_READ:
10914 case OMP_ATOMIC_CAPTURE_OLD:
10915 *expr_p = tmp_load;
10916 gimple_omp_atomic_set_need_value (loadstmt);
10917 break;
10918 case OMP_ATOMIC_CAPTURE_NEW:
10919 *expr_p = rhs;
10920 gimple_omp_atomic_set_need_value (storestmt);
10921 break;
10922 default:
10923 *expr_p = NULL;
10924 break;
10925 }
10926
10927 return GS_ALL_DONE;
10928 }
10929
10930 /* Gimplify a TRANSACTION_EXPR. This involves gimplification of the
10931 body, and adding some EH bits. */
10932
10933 static enum gimplify_status
10934 gimplify_transaction (tree *expr_p, gimple_seq *pre_p)
10935 {
10936 tree expr = *expr_p, temp, tbody = TRANSACTION_EXPR_BODY (expr);
10937 gimple *body_stmt;
10938 gtransaction *trans_stmt;
10939 gimple_seq body = NULL;
10940 int subcode = 0;
10941
10942 /* Wrap the transaction body in a BIND_EXPR so we have a context
10943 where to put decls for OMP. */
10944 if (TREE_CODE (tbody) != BIND_EXPR)
10945 {
10946 tree bind = build3 (BIND_EXPR, void_type_node, NULL, tbody, NULL);
10947 TREE_SIDE_EFFECTS (bind) = 1;
10948 SET_EXPR_LOCATION (bind, EXPR_LOCATION (tbody));
10949 TRANSACTION_EXPR_BODY (expr) = bind;
10950 }
10951
10952 push_gimplify_context ();
10953 temp = voidify_wrapper_expr (*expr_p, NULL);
10954
10955 body_stmt = gimplify_and_return_first (TRANSACTION_EXPR_BODY (expr), &body);
10956 pop_gimplify_context (body_stmt);
10957
10958 trans_stmt = gimple_build_transaction (body);
10959 if (TRANSACTION_EXPR_OUTER (expr))
10960 subcode = GTMA_IS_OUTER;
10961 else if (TRANSACTION_EXPR_RELAXED (expr))
10962 subcode = GTMA_IS_RELAXED;
10963 gimple_transaction_set_subcode (trans_stmt, subcode);
10964
10965 gimplify_seq_add_stmt (pre_p, trans_stmt);
10966
10967 if (temp)
10968 {
10969 *expr_p = temp;
10970 return GS_OK;
10971 }
10972
10973 *expr_p = NULL_TREE;
10974 return GS_ALL_DONE;
10975 }
10976
10977 /* Gimplify an OMP_ORDERED construct. EXPR is the tree version. BODY
10978 is the OMP_BODY of the original EXPR (which has already been
10979 gimplified so it's not present in the EXPR).
10980
10981 Return the gimplified GIMPLE_OMP_ORDERED tuple. */
10982
10983 static gimple *
10984 gimplify_omp_ordered (tree expr, gimple_seq body)
10985 {
10986 tree c, decls;
10987 int failures = 0;
10988 unsigned int i;
10989 tree source_c = NULL_TREE;
10990 tree sink_c = NULL_TREE;
10991
10992 if (gimplify_omp_ctxp)
10993 {
10994 for (c = OMP_ORDERED_CLAUSES (expr); c; c = OMP_CLAUSE_CHAIN (c))
10995 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
10996 && gimplify_omp_ctxp->loop_iter_var.is_empty ()
10997 && (OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SINK
10998 || OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SOURCE))
10999 {
11000 error_at (OMP_CLAUSE_LOCATION (c),
11001 "%<ordered%> construct with %<depend%> clause must be "
11002 "closely nested inside a loop with %<ordered%> clause "
11003 "with a parameter");
11004 failures++;
11005 }
11006 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
11007 && OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SINK)
11008 {
11009 bool fail = false;
11010 for (decls = OMP_CLAUSE_DECL (c), i = 0;
11011 decls && TREE_CODE (decls) == TREE_LIST;
11012 decls = TREE_CHAIN (decls), ++i)
11013 if (i >= gimplify_omp_ctxp->loop_iter_var.length () / 2)
11014 continue;
11015 else if (TREE_VALUE (decls)
11016 != gimplify_omp_ctxp->loop_iter_var[2 * i])
11017 {
11018 error_at (OMP_CLAUSE_LOCATION (c),
11019 "variable %qE is not an iteration "
11020 "of outermost loop %d, expected %qE",
11021 TREE_VALUE (decls), i + 1,
11022 gimplify_omp_ctxp->loop_iter_var[2 * i]);
11023 fail = true;
11024 failures++;
11025 }
11026 else
11027 TREE_VALUE (decls)
11028 = gimplify_omp_ctxp->loop_iter_var[2 * i + 1];
11029 if (!fail && i != gimplify_omp_ctxp->loop_iter_var.length () / 2)
11030 {
11031 error_at (OMP_CLAUSE_LOCATION (c),
11032 "number of variables in %<depend(sink)%> "
11033 "clause does not match number of "
11034 "iteration variables");
11035 failures++;
11036 }
11037 sink_c = c;
11038 }
11039 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
11040 && OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SOURCE)
11041 {
11042 if (source_c)
11043 {
11044 error_at (OMP_CLAUSE_LOCATION (c),
11045 "more than one %<depend(source)%> clause on an "
11046 "%<ordered%> construct");
11047 failures++;
11048 }
11049 else
11050 source_c = c;
11051 }
11052 }
11053 if (source_c && sink_c)
11054 {
11055 error_at (OMP_CLAUSE_LOCATION (source_c),
11056 "%<depend(source)%> clause specified together with "
11057 "%<depend(sink:)%> clauses on the same construct");
11058 failures++;
11059 }
11060
11061 if (failures)
11062 return gimple_build_nop ();
11063 return gimple_build_omp_ordered (body, OMP_ORDERED_CLAUSES (expr));
11064 }
11065
11066 /* Convert the GENERIC expression tree *EXPR_P to GIMPLE. If the
11067 expression produces a value to be used as an operand inside a GIMPLE
11068 statement, the value will be stored back in *EXPR_P. This value will
11069 be a tree of class tcc_declaration, tcc_constant, tcc_reference or
11070 an SSA_NAME. The corresponding sequence of GIMPLE statements is
11071 emitted in PRE_P and POST_P.
11072
11073 Additionally, this process may overwrite parts of the input
11074 expression during gimplification. Ideally, it should be
11075 possible to do non-destructive gimplification.
11076
11077 EXPR_P points to the GENERIC expression to convert to GIMPLE. If
11078 the expression needs to evaluate to a value to be used as
11079 an operand in a GIMPLE statement, this value will be stored in
11080 *EXPR_P on exit. This happens when the caller specifies one
11081 of fb_lvalue or fb_rvalue fallback flags.
11082
11083 PRE_P will contain the sequence of GIMPLE statements corresponding
11084 to the evaluation of EXPR and all the side-effects that must
11085 be executed before the main expression. On exit, the last
11086 statement of PRE_P is the core statement being gimplified. For
11087 instance, when gimplifying 'if (++a)' the last statement in
11088 PRE_P will be 'if (t.1)' where t.1 is the result of
11089 pre-incrementing 'a'.
11090
11091 POST_P will contain the sequence of GIMPLE statements corresponding
11092 to the evaluation of all the side-effects that must be executed
11093 after the main expression. If this is NULL, the post
11094 side-effects are stored at the end of PRE_P.
11095
11096 The reason why the output is split in two is to handle post
11097 side-effects explicitly. In some cases, an expression may have
11098 inner and outer post side-effects which need to be emitted in
11099 an order different from the one given by the recursive
11100 traversal. For instance, for the expression (*p--)++ the post
11101 side-effects of '--' must actually occur *after* the post
11102 side-effects of '++'. However, gimplification will first visit
11103 the inner expression, so if a separate POST sequence was not
11104 used, the resulting sequence would be:
11105
11106 1 t.1 = *p
11107 2 p = p - 1
11108 3 t.2 = t.1 + 1
11109 4 *p = t.2
11110
11111 However, the post-decrement operation in line #2 must not be
11112 evaluated until after the store to *p at line #4, so the
11113 correct sequence should be:
11114
11115 1 t.1 = *p
11116 2 t.2 = t.1 + 1
11117 3 *p = t.2
11118 4 p = p - 1
11119
11120 So, by specifying a separate post queue, it is possible
11121 to emit the post side-effects in the correct order.
11122 If POST_P is NULL, an internal queue will be used. Before
11123 returning to the caller, the sequence POST_P is appended to
11124 the main output sequence PRE_P.
11125
11126 GIMPLE_TEST_F points to a function that takes a tree T and
11127 returns nonzero if T is in the GIMPLE form requested by the
11128 caller. The GIMPLE predicates are in gimple.c.
11129
11130 FALLBACK tells the function what sort of a temporary we want if
11131 gimplification cannot produce an expression that complies with
11132 GIMPLE_TEST_F.
11133
11134 fb_none means that no temporary should be generated
11135 fb_rvalue means that an rvalue is OK to generate
11136 fb_lvalue means that an lvalue is OK to generate
11137 fb_either means that either is OK, but an lvalue is preferable.
11138 fb_mayfail means that gimplification may fail (in which case
11139 GS_ERROR will be returned)
11140
11141 The return value is either GS_ERROR or GS_ALL_DONE, since this
11142 function iterates until EXPR is completely gimplified or an error
11143 occurs. */
11144
11145 enum gimplify_status
11146 gimplify_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
11147 bool (*gimple_test_f) (tree), fallback_t fallback)
11148 {
11149 tree tmp;
11150 gimple_seq internal_pre = NULL;
11151 gimple_seq internal_post = NULL;
11152 tree save_expr;
11153 bool is_statement;
11154 location_t saved_location;
11155 enum gimplify_status ret;
11156 gimple_stmt_iterator pre_last_gsi, post_last_gsi;
11157 tree label;
11158
11159 save_expr = *expr_p;
11160 if (save_expr == NULL_TREE)
11161 return GS_ALL_DONE;
11162
11163 /* If we are gimplifying a top-level statement, PRE_P must be valid. */
11164 is_statement = gimple_test_f == is_gimple_stmt;
11165 if (is_statement)
11166 gcc_assert (pre_p);
11167
11168 /* Consistency checks. */
11169 if (gimple_test_f == is_gimple_reg)
11170 gcc_assert (fallback & (fb_rvalue | fb_lvalue));
11171 else if (gimple_test_f == is_gimple_val
11172 || gimple_test_f == is_gimple_call_addr
11173 || gimple_test_f == is_gimple_condexpr
11174 || gimple_test_f == is_gimple_mem_rhs
11175 || gimple_test_f == is_gimple_mem_rhs_or_call
11176 || gimple_test_f == is_gimple_reg_rhs
11177 || gimple_test_f == is_gimple_reg_rhs_or_call
11178 || gimple_test_f == is_gimple_asm_val
11179 || gimple_test_f == is_gimple_mem_ref_addr)
11180 gcc_assert (fallback & fb_rvalue);
11181 else if (gimple_test_f == is_gimple_min_lval
11182 || gimple_test_f == is_gimple_lvalue)
11183 gcc_assert (fallback & fb_lvalue);
11184 else if (gimple_test_f == is_gimple_addressable)
11185 gcc_assert (fallback & fb_either);
11186 else if (gimple_test_f == is_gimple_stmt)
11187 gcc_assert (fallback == fb_none);
11188 else
11189 {
11190 /* We should have recognized the GIMPLE_TEST_F predicate to
11191 know what kind of fallback to use in case a temporary is
11192 needed to hold the value or address of *EXPR_P. */
11193 gcc_unreachable ();
11194 }
11195
11196 /* We used to check the predicate here and return immediately if it
11197 succeeds. This is wrong; the design is for gimplification to be
11198 idempotent, and for the predicates to only test for valid forms, not
11199 whether they are fully simplified. */
11200 if (pre_p == NULL)
11201 pre_p = &internal_pre;
11202
11203 if (post_p == NULL)
11204 post_p = &internal_post;
11205
11206 /* Remember the last statements added to PRE_P and POST_P. Every
11207 new statement added by the gimplification helpers needs to be
11208 annotated with location information. To centralize the
11209 responsibility, we remember the last statement that had been
11210 added to both queues before gimplifying *EXPR_P. If
11211 gimplification produces new statements in PRE_P and POST_P, those
11212 statements will be annotated with the same location information
11213 as *EXPR_P. */
11214 pre_last_gsi = gsi_last (*pre_p);
11215 post_last_gsi = gsi_last (*post_p);
11216
11217 saved_location = input_location;
11218 if (save_expr != error_mark_node
11219 && EXPR_HAS_LOCATION (*expr_p))
11220 input_location = EXPR_LOCATION (*expr_p);
11221
11222 /* Loop over the specific gimplifiers until the toplevel node
11223 remains the same. */
11224 do
11225 {
11226 /* Strip away as many useless type conversions as possible
11227 at the toplevel. */
11228 STRIP_USELESS_TYPE_CONVERSION (*expr_p);
11229
11230 /* Remember the expr. */
11231 save_expr = *expr_p;
11232
11233 /* Die, die, die, my darling. */
11234 if (save_expr == error_mark_node
11235 || (TREE_TYPE (save_expr)
11236 && TREE_TYPE (save_expr) == error_mark_node))
11237 {
11238 ret = GS_ERROR;
11239 break;
11240 }
11241
11242 /* Do any language-specific gimplification. */
11243 ret = ((enum gimplify_status)
11244 lang_hooks.gimplify_expr (expr_p, pre_p, post_p));
11245 if (ret == GS_OK)
11246 {
11247 if (*expr_p == NULL_TREE)
11248 break;
11249 if (*expr_p != save_expr)
11250 continue;
11251 }
11252 else if (ret != GS_UNHANDLED)
11253 break;
11254
11255 /* Make sure that all the cases set 'ret' appropriately. */
11256 ret = GS_UNHANDLED;
11257 switch (TREE_CODE (*expr_p))
11258 {
11259 /* First deal with the special cases. */
11260
11261 case POSTINCREMENT_EXPR:
11262 case POSTDECREMENT_EXPR:
11263 case PREINCREMENT_EXPR:
11264 case PREDECREMENT_EXPR:
11265 ret = gimplify_self_mod_expr (expr_p, pre_p, post_p,
11266 fallback != fb_none,
11267 TREE_TYPE (*expr_p));
11268 break;
11269
11270 case VIEW_CONVERT_EXPR:
11271 if (is_gimple_reg_type (TREE_TYPE (*expr_p))
11272 && is_gimple_reg_type (TREE_TYPE (TREE_OPERAND (*expr_p, 0))))
11273 {
11274 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
11275 post_p, is_gimple_val, fb_rvalue);
11276 recalculate_side_effects (*expr_p);
11277 break;
11278 }
11279 /* Fallthru. */
11280
11281 case ARRAY_REF:
11282 case ARRAY_RANGE_REF:
11283 case REALPART_EXPR:
11284 case IMAGPART_EXPR:
11285 case COMPONENT_REF:
11286 ret = gimplify_compound_lval (expr_p, pre_p, post_p,
11287 fallback ? fallback : fb_rvalue);
11288 break;
11289
11290 case COND_EXPR:
11291 ret = gimplify_cond_expr (expr_p, pre_p, fallback);
11292
11293 /* C99 code may assign to an array in a structure value of a
11294 conditional expression, and this has undefined behavior
11295 only on execution, so create a temporary if an lvalue is
11296 required. */
11297 if (fallback == fb_lvalue)
11298 {
11299 *expr_p = get_initialized_tmp_var (*expr_p, pre_p, post_p, false);
11300 mark_addressable (*expr_p);
11301 ret = GS_OK;
11302 }
11303 break;
11304
11305 case CALL_EXPR:
11306 ret = gimplify_call_expr (expr_p, pre_p, fallback != fb_none);
11307
11308 /* C99 code may assign to an array in a structure returned
11309 from a function, and this has undefined behavior only on
11310 execution, so create a temporary if an lvalue is
11311 required. */
11312 if (fallback == fb_lvalue)
11313 {
11314 *expr_p = get_initialized_tmp_var (*expr_p, pre_p, post_p, false);
11315 mark_addressable (*expr_p);
11316 ret = GS_OK;
11317 }
11318 break;
11319
11320 case TREE_LIST:
11321 gcc_unreachable ();
11322
11323 case COMPOUND_EXPR:
11324 ret = gimplify_compound_expr (expr_p, pre_p, fallback != fb_none);
11325 break;
11326
11327 case COMPOUND_LITERAL_EXPR:
11328 ret = gimplify_compound_literal_expr (expr_p, pre_p,
11329 gimple_test_f, fallback);
11330 break;
11331
11332 case MODIFY_EXPR:
11333 case INIT_EXPR:
11334 ret = gimplify_modify_expr (expr_p, pre_p, post_p,
11335 fallback != fb_none);
11336 break;
11337
11338 case TRUTH_ANDIF_EXPR:
11339 case TRUTH_ORIF_EXPR:
11340 {
11341 /* Preserve the original type of the expression and the
11342 source location of the outer expression. */
11343 tree org_type = TREE_TYPE (*expr_p);
11344 *expr_p = gimple_boolify (*expr_p);
11345 *expr_p = build3_loc (input_location, COND_EXPR,
11346 org_type, *expr_p,
11347 fold_convert_loc
11348 (input_location,
11349 org_type, boolean_true_node),
11350 fold_convert_loc
11351 (input_location,
11352 org_type, boolean_false_node));
11353 ret = GS_OK;
11354 break;
11355 }
11356
11357 case TRUTH_NOT_EXPR:
11358 {
11359 tree type = TREE_TYPE (*expr_p);
11360 /* The parsers are careful to generate TRUTH_NOT_EXPR
11361 only with operands that are always zero or one.
11362 We do not fold here but handle the only interesting case
11363 manually, as fold may re-introduce the TRUTH_NOT_EXPR. */
11364 *expr_p = gimple_boolify (*expr_p);
11365 if (TYPE_PRECISION (TREE_TYPE (*expr_p)) == 1)
11366 *expr_p = build1_loc (input_location, BIT_NOT_EXPR,
11367 TREE_TYPE (*expr_p),
11368 TREE_OPERAND (*expr_p, 0));
11369 else
11370 *expr_p = build2_loc (input_location, BIT_XOR_EXPR,
11371 TREE_TYPE (*expr_p),
11372 TREE_OPERAND (*expr_p, 0),
11373 build_int_cst (TREE_TYPE (*expr_p), 1));
11374 if (!useless_type_conversion_p (type, TREE_TYPE (*expr_p)))
11375 *expr_p = fold_convert_loc (input_location, type, *expr_p);
11376 ret = GS_OK;
11377 break;
11378 }
11379
11380 case ADDR_EXPR:
11381 ret = gimplify_addr_expr (expr_p, pre_p, post_p);
11382 break;
11383
11384 case ANNOTATE_EXPR:
11385 {
11386 tree cond = TREE_OPERAND (*expr_p, 0);
11387 tree kind = TREE_OPERAND (*expr_p, 1);
11388 tree type = TREE_TYPE (cond);
11389 if (!INTEGRAL_TYPE_P (type))
11390 {
11391 *expr_p = cond;
11392 ret = GS_OK;
11393 break;
11394 }
11395 tree tmp = create_tmp_var (type);
11396 gimplify_arg (&cond, pre_p, EXPR_LOCATION (*expr_p));
11397 gcall *call
11398 = gimple_build_call_internal (IFN_ANNOTATE, 2, cond, kind);
11399 gimple_call_set_lhs (call, tmp);
11400 gimplify_seq_add_stmt (pre_p, call);
11401 *expr_p = tmp;
11402 ret = GS_ALL_DONE;
11403 break;
11404 }
11405
11406 case VA_ARG_EXPR:
11407 ret = gimplify_va_arg_expr (expr_p, pre_p, post_p);
11408 break;
11409
11410 CASE_CONVERT:
11411 if (IS_EMPTY_STMT (*expr_p))
11412 {
11413 ret = GS_ALL_DONE;
11414 break;
11415 }
11416
11417 if (VOID_TYPE_P (TREE_TYPE (*expr_p))
11418 || fallback == fb_none)
11419 {
11420 /* Just strip a conversion to void (or in void context) and
11421 try again. */
11422 *expr_p = TREE_OPERAND (*expr_p, 0);
11423 ret = GS_OK;
11424 break;
11425 }
11426
11427 ret = gimplify_conversion (expr_p);
11428 if (ret == GS_ERROR)
11429 break;
11430 if (*expr_p != save_expr)
11431 break;
11432 /* FALLTHRU */
11433
11434 case FIX_TRUNC_EXPR:
11435 /* unary_expr: ... | '(' cast ')' val | ... */
11436 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
11437 is_gimple_val, fb_rvalue);
11438 recalculate_side_effects (*expr_p);
11439 break;
11440
11441 case INDIRECT_REF:
11442 {
11443 bool volatilep = TREE_THIS_VOLATILE (*expr_p);
11444 bool notrap = TREE_THIS_NOTRAP (*expr_p);
11445 tree saved_ptr_type = TREE_TYPE (TREE_OPERAND (*expr_p, 0));
11446
11447 *expr_p = fold_indirect_ref_loc (input_location, *expr_p);
11448 if (*expr_p != save_expr)
11449 {
11450 ret = GS_OK;
11451 break;
11452 }
11453
11454 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
11455 is_gimple_reg, fb_rvalue);
11456 if (ret == GS_ERROR)
11457 break;
11458
11459 recalculate_side_effects (*expr_p);
11460 *expr_p = fold_build2_loc (input_location, MEM_REF,
11461 TREE_TYPE (*expr_p),
11462 TREE_OPERAND (*expr_p, 0),
11463 build_int_cst (saved_ptr_type, 0));
11464 TREE_THIS_VOLATILE (*expr_p) = volatilep;
11465 TREE_THIS_NOTRAP (*expr_p) = notrap;
11466 ret = GS_OK;
11467 break;
11468 }
11469
11470 /* We arrive here through the various re-gimplifcation paths. */
11471 case MEM_REF:
11472 /* First try re-folding the whole thing. */
11473 tmp = fold_binary (MEM_REF, TREE_TYPE (*expr_p),
11474 TREE_OPERAND (*expr_p, 0),
11475 TREE_OPERAND (*expr_p, 1));
11476 if (tmp)
11477 {
11478 REF_REVERSE_STORAGE_ORDER (tmp)
11479 = REF_REVERSE_STORAGE_ORDER (*expr_p);
11480 *expr_p = tmp;
11481 recalculate_side_effects (*expr_p);
11482 ret = GS_OK;
11483 break;
11484 }
11485 /* Avoid re-gimplifying the address operand if it is already
11486 in suitable form. Re-gimplifying would mark the address
11487 operand addressable. Always gimplify when not in SSA form
11488 as we still may have to gimplify decls with value-exprs. */
11489 if (!gimplify_ctxp || !gimple_in_ssa_p (cfun)
11490 || !is_gimple_mem_ref_addr (TREE_OPERAND (*expr_p, 0)))
11491 {
11492 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
11493 is_gimple_mem_ref_addr, fb_rvalue);
11494 if (ret == GS_ERROR)
11495 break;
11496 }
11497 recalculate_side_effects (*expr_p);
11498 ret = GS_ALL_DONE;
11499 break;
11500
11501 /* Constants need not be gimplified. */
11502 case INTEGER_CST:
11503 case REAL_CST:
11504 case FIXED_CST:
11505 case STRING_CST:
11506 case COMPLEX_CST:
11507 case VECTOR_CST:
11508 /* Drop the overflow flag on constants, we do not want
11509 that in the GIMPLE IL. */
11510 if (TREE_OVERFLOW_P (*expr_p))
11511 *expr_p = drop_tree_overflow (*expr_p);
11512 ret = GS_ALL_DONE;
11513 break;
11514
11515 case CONST_DECL:
11516 /* If we require an lvalue, such as for ADDR_EXPR, retain the
11517 CONST_DECL node. Otherwise the decl is replaceable by its
11518 value. */
11519 /* ??? Should be == fb_lvalue, but ADDR_EXPR passes fb_either. */
11520 if (fallback & fb_lvalue)
11521 ret = GS_ALL_DONE;
11522 else
11523 {
11524 *expr_p = DECL_INITIAL (*expr_p);
11525 ret = GS_OK;
11526 }
11527 break;
11528
11529 case DECL_EXPR:
11530 ret = gimplify_decl_expr (expr_p, pre_p);
11531 break;
11532
11533 case BIND_EXPR:
11534 ret = gimplify_bind_expr (expr_p, pre_p);
11535 break;
11536
11537 case LOOP_EXPR:
11538 ret = gimplify_loop_expr (expr_p, pre_p);
11539 break;
11540
11541 case SWITCH_EXPR:
11542 ret = gimplify_switch_expr (expr_p, pre_p);
11543 break;
11544
11545 case EXIT_EXPR:
11546 ret = gimplify_exit_expr (expr_p);
11547 break;
11548
11549 case GOTO_EXPR:
11550 /* If the target is not LABEL, then it is a computed jump
11551 and the target needs to be gimplified. */
11552 if (TREE_CODE (GOTO_DESTINATION (*expr_p)) != LABEL_DECL)
11553 {
11554 ret = gimplify_expr (&GOTO_DESTINATION (*expr_p), pre_p,
11555 NULL, is_gimple_val, fb_rvalue);
11556 if (ret == GS_ERROR)
11557 break;
11558 }
11559 gimplify_seq_add_stmt (pre_p,
11560 gimple_build_goto (GOTO_DESTINATION (*expr_p)));
11561 ret = GS_ALL_DONE;
11562 break;
11563
11564 case PREDICT_EXPR:
11565 gimplify_seq_add_stmt (pre_p,
11566 gimple_build_predict (PREDICT_EXPR_PREDICTOR (*expr_p),
11567 PREDICT_EXPR_OUTCOME (*expr_p)));
11568 ret = GS_ALL_DONE;
11569 break;
11570
11571 case LABEL_EXPR:
11572 ret = gimplify_label_expr (expr_p, pre_p);
11573 label = LABEL_EXPR_LABEL (*expr_p);
11574 gcc_assert (decl_function_context (label) == current_function_decl);
11575
11576 /* If the label is used in a goto statement, or address of the label
11577 is taken, we need to unpoison all variables that were seen so far.
11578 Doing so would prevent us from reporting a false positives. */
11579 if (asan_poisoned_variables
11580 && asan_used_labels != NULL
11581 && asan_used_labels->contains (label))
11582 asan_poison_variables (asan_poisoned_variables, false, pre_p);
11583 break;
11584
11585 case CASE_LABEL_EXPR:
11586 ret = gimplify_case_label_expr (expr_p, pre_p);
11587
11588 if (gimplify_ctxp->live_switch_vars)
11589 asan_poison_variables (gimplify_ctxp->live_switch_vars, false,
11590 pre_p);
11591 break;
11592
11593 case RETURN_EXPR:
11594 ret = gimplify_return_expr (*expr_p, pre_p);
11595 break;
11596
11597 case CONSTRUCTOR:
11598 /* Don't reduce this in place; let gimplify_init_constructor work its
11599 magic. Buf if we're just elaborating this for side effects, just
11600 gimplify any element that has side-effects. */
11601 if (fallback == fb_none)
11602 {
11603 unsigned HOST_WIDE_INT ix;
11604 tree val;
11605 tree temp = NULL_TREE;
11606 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (*expr_p), ix, val)
11607 if (TREE_SIDE_EFFECTS (val))
11608 append_to_statement_list (val, &temp);
11609
11610 *expr_p = temp;
11611 ret = temp ? GS_OK : GS_ALL_DONE;
11612 }
11613 /* C99 code may assign to an array in a constructed
11614 structure or union, and this has undefined behavior only
11615 on execution, so create a temporary if an lvalue is
11616 required. */
11617 else if (fallback == fb_lvalue)
11618 {
11619 *expr_p = get_initialized_tmp_var (*expr_p, pre_p, post_p, false);
11620 mark_addressable (*expr_p);
11621 ret = GS_OK;
11622 }
11623 else
11624 ret = GS_ALL_DONE;
11625 break;
11626
11627 /* The following are special cases that are not handled by the
11628 original GIMPLE grammar. */
11629
11630 /* SAVE_EXPR nodes are converted into a GIMPLE identifier and
11631 eliminated. */
11632 case SAVE_EXPR:
11633 ret = gimplify_save_expr (expr_p, pre_p, post_p);
11634 break;
11635
11636 case BIT_FIELD_REF:
11637 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
11638 post_p, is_gimple_lvalue, fb_either);
11639 recalculate_side_effects (*expr_p);
11640 break;
11641
11642 case TARGET_MEM_REF:
11643 {
11644 enum gimplify_status r0 = GS_ALL_DONE, r1 = GS_ALL_DONE;
11645
11646 if (TMR_BASE (*expr_p))
11647 r0 = gimplify_expr (&TMR_BASE (*expr_p), pre_p,
11648 post_p, is_gimple_mem_ref_addr, fb_either);
11649 if (TMR_INDEX (*expr_p))
11650 r1 = gimplify_expr (&TMR_INDEX (*expr_p), pre_p,
11651 post_p, is_gimple_val, fb_rvalue);
11652 if (TMR_INDEX2 (*expr_p))
11653 r1 = gimplify_expr (&TMR_INDEX2 (*expr_p), pre_p,
11654 post_p, is_gimple_val, fb_rvalue);
11655 /* TMR_STEP and TMR_OFFSET are always integer constants. */
11656 ret = MIN (r0, r1);
11657 }
11658 break;
11659
11660 case NON_LVALUE_EXPR:
11661 /* This should have been stripped above. */
11662 gcc_unreachable ();
11663
11664 case ASM_EXPR:
11665 ret = gimplify_asm_expr (expr_p, pre_p, post_p);
11666 break;
11667
11668 case TRY_FINALLY_EXPR:
11669 case TRY_CATCH_EXPR:
11670 {
11671 gimple_seq eval, cleanup;
11672 gtry *try_;
11673
11674 /* Calls to destructors are generated automatically in FINALLY/CATCH
11675 block. They should have location as UNKNOWN_LOCATION. However,
11676 gimplify_call_expr will reset these call stmts to input_location
11677 if it finds stmt's location is unknown. To prevent resetting for
11678 destructors, we set the input_location to unknown.
11679 Note that this only affects the destructor calls in FINALLY/CATCH
11680 block, and will automatically reset to its original value by the
11681 end of gimplify_expr. */
11682 input_location = UNKNOWN_LOCATION;
11683 eval = cleanup = NULL;
11684 gimplify_and_add (TREE_OPERAND (*expr_p, 0), &eval);
11685 gimplify_and_add (TREE_OPERAND (*expr_p, 1), &cleanup);
11686 /* Don't create bogus GIMPLE_TRY with empty cleanup. */
11687 if (gimple_seq_empty_p (cleanup))
11688 {
11689 gimple_seq_add_seq (pre_p, eval);
11690 ret = GS_ALL_DONE;
11691 break;
11692 }
11693 try_ = gimple_build_try (eval, cleanup,
11694 TREE_CODE (*expr_p) == TRY_FINALLY_EXPR
11695 ? GIMPLE_TRY_FINALLY
11696 : GIMPLE_TRY_CATCH);
11697 if (EXPR_HAS_LOCATION (save_expr))
11698 gimple_set_location (try_, EXPR_LOCATION (save_expr));
11699 else if (LOCATION_LOCUS (saved_location) != UNKNOWN_LOCATION)
11700 gimple_set_location (try_, saved_location);
11701 if (TREE_CODE (*expr_p) == TRY_CATCH_EXPR)
11702 gimple_try_set_catch_is_cleanup (try_,
11703 TRY_CATCH_IS_CLEANUP (*expr_p));
11704 gimplify_seq_add_stmt (pre_p, try_);
11705 ret = GS_ALL_DONE;
11706 break;
11707 }
11708
11709 case CLEANUP_POINT_EXPR:
11710 ret = gimplify_cleanup_point_expr (expr_p, pre_p);
11711 break;
11712
11713 case TARGET_EXPR:
11714 ret = gimplify_target_expr (expr_p, pre_p, post_p);
11715 break;
11716
11717 case CATCH_EXPR:
11718 {
11719 gimple *c;
11720 gimple_seq handler = NULL;
11721 gimplify_and_add (CATCH_BODY (*expr_p), &handler);
11722 c = gimple_build_catch (CATCH_TYPES (*expr_p), handler);
11723 gimplify_seq_add_stmt (pre_p, c);
11724 ret = GS_ALL_DONE;
11725 break;
11726 }
11727
11728 case EH_FILTER_EXPR:
11729 {
11730 gimple *ehf;
11731 gimple_seq failure = NULL;
11732
11733 gimplify_and_add (EH_FILTER_FAILURE (*expr_p), &failure);
11734 ehf = gimple_build_eh_filter (EH_FILTER_TYPES (*expr_p), failure);
11735 gimple_set_no_warning (ehf, TREE_NO_WARNING (*expr_p));
11736 gimplify_seq_add_stmt (pre_p, ehf);
11737 ret = GS_ALL_DONE;
11738 break;
11739 }
11740
11741 case OBJ_TYPE_REF:
11742 {
11743 enum gimplify_status r0, r1;
11744 r0 = gimplify_expr (&OBJ_TYPE_REF_OBJECT (*expr_p), pre_p,
11745 post_p, is_gimple_val, fb_rvalue);
11746 r1 = gimplify_expr (&OBJ_TYPE_REF_EXPR (*expr_p), pre_p,
11747 post_p, is_gimple_val, fb_rvalue);
11748 TREE_SIDE_EFFECTS (*expr_p) = 0;
11749 ret = MIN (r0, r1);
11750 }
11751 break;
11752
11753 case LABEL_DECL:
11754 /* We get here when taking the address of a label. We mark
11755 the label as "forced"; meaning it can never be removed and
11756 it is a potential target for any computed goto. */
11757 FORCED_LABEL (*expr_p) = 1;
11758 ret = GS_ALL_DONE;
11759 break;
11760
11761 case STATEMENT_LIST:
11762 ret = gimplify_statement_list (expr_p, pre_p);
11763 break;
11764
11765 case WITH_SIZE_EXPR:
11766 {
11767 gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
11768 post_p == &internal_post ? NULL : post_p,
11769 gimple_test_f, fallback);
11770 gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p, post_p,
11771 is_gimple_val, fb_rvalue);
11772 ret = GS_ALL_DONE;
11773 }
11774 break;
11775
11776 case VAR_DECL:
11777 case PARM_DECL:
11778 ret = gimplify_var_or_parm_decl (expr_p);
11779 break;
11780
11781 case RESULT_DECL:
11782 /* When within an OMP context, notice uses of variables. */
11783 if (gimplify_omp_ctxp)
11784 omp_notice_variable (gimplify_omp_ctxp, *expr_p, true);
11785 ret = GS_ALL_DONE;
11786 break;
11787
11788 case SSA_NAME:
11789 /* Allow callbacks into the gimplifier during optimization. */
11790 ret = GS_ALL_DONE;
11791 break;
11792
11793 case OMP_PARALLEL:
11794 gimplify_omp_parallel (expr_p, pre_p);
11795 ret = GS_ALL_DONE;
11796 break;
11797
11798 case OMP_TASK:
11799 gimplify_omp_task (expr_p, pre_p);
11800 ret = GS_ALL_DONE;
11801 break;
11802
11803 case OMP_FOR:
11804 case OMP_SIMD:
11805 case CILK_SIMD:
11806 case CILK_FOR:
11807 case OMP_DISTRIBUTE:
11808 case OMP_TASKLOOP:
11809 case OACC_LOOP:
11810 ret = gimplify_omp_for (expr_p, pre_p);
11811 break;
11812
11813 case OACC_CACHE:
11814 gimplify_oacc_cache (expr_p, pre_p);
11815 ret = GS_ALL_DONE;
11816 break;
11817
11818 case OACC_DECLARE:
11819 gimplify_oacc_declare (expr_p, pre_p);
11820 ret = GS_ALL_DONE;
11821 break;
11822
11823 case OACC_HOST_DATA:
11824 case OACC_DATA:
11825 case OACC_KERNELS:
11826 case OACC_PARALLEL:
11827 case OMP_SECTIONS:
11828 case OMP_SINGLE:
11829 case OMP_TARGET:
11830 case OMP_TARGET_DATA:
11831 case OMP_TEAMS:
11832 gimplify_omp_workshare (expr_p, pre_p);
11833 ret = GS_ALL_DONE;
11834 break;
11835
11836 case OACC_ENTER_DATA:
11837 case OACC_EXIT_DATA:
11838 case OACC_UPDATE:
11839 case OMP_TARGET_UPDATE:
11840 case OMP_TARGET_ENTER_DATA:
11841 case OMP_TARGET_EXIT_DATA:
11842 gimplify_omp_target_update (expr_p, pre_p);
11843 ret = GS_ALL_DONE;
11844 break;
11845
11846 case OMP_SECTION:
11847 case OMP_MASTER:
11848 case OMP_TASKGROUP:
11849 case OMP_ORDERED:
11850 case OMP_CRITICAL:
11851 {
11852 gimple_seq body = NULL;
11853 gimple *g;
11854
11855 gimplify_and_add (OMP_BODY (*expr_p), &body);
11856 switch (TREE_CODE (*expr_p))
11857 {
11858 case OMP_SECTION:
11859 g = gimple_build_omp_section (body);
11860 break;
11861 case OMP_MASTER:
11862 g = gimple_build_omp_master (body);
11863 break;
11864 case OMP_TASKGROUP:
11865 {
11866 gimple_seq cleanup = NULL;
11867 tree fn
11868 = builtin_decl_explicit (BUILT_IN_GOMP_TASKGROUP_END);
11869 g = gimple_build_call (fn, 0);
11870 gimple_seq_add_stmt (&cleanup, g);
11871 g = gimple_build_try (body, cleanup, GIMPLE_TRY_FINALLY);
11872 body = NULL;
11873 gimple_seq_add_stmt (&body, g);
11874 g = gimple_build_omp_taskgroup (body);
11875 }
11876 break;
11877 case OMP_ORDERED:
11878 g = gimplify_omp_ordered (*expr_p, body);
11879 break;
11880 case OMP_CRITICAL:
11881 gimplify_scan_omp_clauses (&OMP_CRITICAL_CLAUSES (*expr_p),
11882 pre_p, ORT_WORKSHARE, OMP_CRITICAL);
11883 gimplify_adjust_omp_clauses (pre_p, body,
11884 &OMP_CRITICAL_CLAUSES (*expr_p),
11885 OMP_CRITICAL);
11886 g = gimple_build_omp_critical (body,
11887 OMP_CRITICAL_NAME (*expr_p),
11888 OMP_CRITICAL_CLAUSES (*expr_p));
11889 break;
11890 default:
11891 gcc_unreachable ();
11892 }
11893 gimplify_seq_add_stmt (pre_p, g);
11894 ret = GS_ALL_DONE;
11895 break;
11896 }
11897
11898 case OMP_ATOMIC:
11899 case OMP_ATOMIC_READ:
11900 case OMP_ATOMIC_CAPTURE_OLD:
11901 case OMP_ATOMIC_CAPTURE_NEW:
11902 ret = gimplify_omp_atomic (expr_p, pre_p);
11903 break;
11904
11905 case TRANSACTION_EXPR:
11906 ret = gimplify_transaction (expr_p, pre_p);
11907 break;
11908
11909 case TRUTH_AND_EXPR:
11910 case TRUTH_OR_EXPR:
11911 case TRUTH_XOR_EXPR:
11912 {
11913 tree orig_type = TREE_TYPE (*expr_p);
11914 tree new_type, xop0, xop1;
11915 *expr_p = gimple_boolify (*expr_p);
11916 new_type = TREE_TYPE (*expr_p);
11917 if (!useless_type_conversion_p (orig_type, new_type))
11918 {
11919 *expr_p = fold_convert_loc (input_location, orig_type, *expr_p);
11920 ret = GS_OK;
11921 break;
11922 }
11923
11924 /* Boolified binary truth expressions are semantically equivalent
11925 to bitwise binary expressions. Canonicalize them to the
11926 bitwise variant. */
11927 switch (TREE_CODE (*expr_p))
11928 {
11929 case TRUTH_AND_EXPR:
11930 TREE_SET_CODE (*expr_p, BIT_AND_EXPR);
11931 break;
11932 case TRUTH_OR_EXPR:
11933 TREE_SET_CODE (*expr_p, BIT_IOR_EXPR);
11934 break;
11935 case TRUTH_XOR_EXPR:
11936 TREE_SET_CODE (*expr_p, BIT_XOR_EXPR);
11937 break;
11938 default:
11939 break;
11940 }
11941 /* Now make sure that operands have compatible type to
11942 expression's new_type. */
11943 xop0 = TREE_OPERAND (*expr_p, 0);
11944 xop1 = TREE_OPERAND (*expr_p, 1);
11945 if (!useless_type_conversion_p (new_type, TREE_TYPE (xop0)))
11946 TREE_OPERAND (*expr_p, 0) = fold_convert_loc (input_location,
11947 new_type,
11948 xop0);
11949 if (!useless_type_conversion_p (new_type, TREE_TYPE (xop1)))
11950 TREE_OPERAND (*expr_p, 1) = fold_convert_loc (input_location,
11951 new_type,
11952 xop1);
11953 /* Continue classified as tcc_binary. */
11954 goto expr_2;
11955 }
11956
11957 case VEC_COND_EXPR:
11958 {
11959 enum gimplify_status r0, r1, r2;
11960
11961 r0 = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
11962 post_p, is_gimple_condexpr, fb_rvalue);
11963 r1 = gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p,
11964 post_p, is_gimple_val, fb_rvalue);
11965 r2 = gimplify_expr (&TREE_OPERAND (*expr_p, 2), pre_p,
11966 post_p, is_gimple_val, fb_rvalue);
11967
11968 ret = MIN (MIN (r0, r1), r2);
11969 recalculate_side_effects (*expr_p);
11970 }
11971 break;
11972
11973 case FMA_EXPR:
11974 case VEC_PERM_EXPR:
11975 /* Classified as tcc_expression. */
11976 goto expr_3;
11977
11978 case BIT_INSERT_EXPR:
11979 /* Argument 3 is a constant. */
11980 goto expr_2;
11981
11982 case POINTER_PLUS_EXPR:
11983 {
11984 enum gimplify_status r0, r1;
11985 r0 = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
11986 post_p, is_gimple_val, fb_rvalue);
11987 r1 = gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p,
11988 post_p, is_gimple_val, fb_rvalue);
11989 recalculate_side_effects (*expr_p);
11990 ret = MIN (r0, r1);
11991 break;
11992 }
11993
11994 case CILK_SYNC_STMT:
11995 {
11996 if (!fn_contains_cilk_spawn_p (cfun))
11997 {
11998 error_at (EXPR_LOCATION (*expr_p),
11999 "expected %<_Cilk_spawn%> before %<_Cilk_sync%>");
12000 ret = GS_ERROR;
12001 }
12002 else
12003 {
12004 gimplify_cilk_sync (expr_p, pre_p);
12005 ret = GS_ALL_DONE;
12006 }
12007 break;
12008 }
12009
12010 default:
12011 switch (TREE_CODE_CLASS (TREE_CODE (*expr_p)))
12012 {
12013 case tcc_comparison:
12014 /* Handle comparison of objects of non scalar mode aggregates
12015 with a call to memcmp. It would be nice to only have to do
12016 this for variable-sized objects, but then we'd have to allow
12017 the same nest of reference nodes we allow for MODIFY_EXPR and
12018 that's too complex.
12019
12020 Compare scalar mode aggregates as scalar mode values. Using
12021 memcmp for them would be very inefficient at best, and is
12022 plain wrong if bitfields are involved. */
12023 {
12024 tree type = TREE_TYPE (TREE_OPERAND (*expr_p, 1));
12025
12026 /* Vector comparisons need no boolification. */
12027 if (TREE_CODE (type) == VECTOR_TYPE)
12028 goto expr_2;
12029 else if (!AGGREGATE_TYPE_P (type))
12030 {
12031 tree org_type = TREE_TYPE (*expr_p);
12032 *expr_p = gimple_boolify (*expr_p);
12033 if (!useless_type_conversion_p (org_type,
12034 TREE_TYPE (*expr_p)))
12035 {
12036 *expr_p = fold_convert_loc (input_location,
12037 org_type, *expr_p);
12038 ret = GS_OK;
12039 }
12040 else
12041 goto expr_2;
12042 }
12043 else if (TYPE_MODE (type) != BLKmode)
12044 ret = gimplify_scalar_mode_aggregate_compare (expr_p);
12045 else
12046 ret = gimplify_variable_sized_compare (expr_p);
12047
12048 break;
12049 }
12050
12051 /* If *EXPR_P does not need to be special-cased, handle it
12052 according to its class. */
12053 case tcc_unary:
12054 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
12055 post_p, is_gimple_val, fb_rvalue);
12056 break;
12057
12058 case tcc_binary:
12059 expr_2:
12060 {
12061 enum gimplify_status r0, r1;
12062
12063 r0 = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
12064 post_p, is_gimple_val, fb_rvalue);
12065 r1 = gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p,
12066 post_p, is_gimple_val, fb_rvalue);
12067
12068 ret = MIN (r0, r1);
12069 break;
12070 }
12071
12072 expr_3:
12073 {
12074 enum gimplify_status r0, r1, r2;
12075
12076 r0 = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
12077 post_p, is_gimple_val, fb_rvalue);
12078 r1 = gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p,
12079 post_p, is_gimple_val, fb_rvalue);
12080 r2 = gimplify_expr (&TREE_OPERAND (*expr_p, 2), pre_p,
12081 post_p, is_gimple_val, fb_rvalue);
12082
12083 ret = MIN (MIN (r0, r1), r2);
12084 break;
12085 }
12086
12087 case tcc_declaration:
12088 case tcc_constant:
12089 ret = GS_ALL_DONE;
12090 goto dont_recalculate;
12091
12092 default:
12093 gcc_unreachable ();
12094 }
12095
12096 recalculate_side_effects (*expr_p);
12097
12098 dont_recalculate:
12099 break;
12100 }
12101
12102 gcc_assert (*expr_p || ret != GS_OK);
12103 }
12104 while (ret == GS_OK);
12105
12106 /* If we encountered an error_mark somewhere nested inside, either
12107 stub out the statement or propagate the error back out. */
12108 if (ret == GS_ERROR)
12109 {
12110 if (is_statement)
12111 *expr_p = NULL;
12112 goto out;
12113 }
12114
12115 /* This was only valid as a return value from the langhook, which
12116 we handled. Make sure it doesn't escape from any other context. */
12117 gcc_assert (ret != GS_UNHANDLED);
12118
12119 if (fallback == fb_none && *expr_p && !is_gimple_stmt (*expr_p))
12120 {
12121 /* We aren't looking for a value, and we don't have a valid
12122 statement. If it doesn't have side-effects, throw it away.
12123 We can also get here with code such as "*&&L;", where L is
12124 a LABEL_DECL that is marked as FORCED_LABEL. */
12125 if (TREE_CODE (*expr_p) == LABEL_DECL
12126 || !TREE_SIDE_EFFECTS (*expr_p))
12127 *expr_p = NULL;
12128 else if (!TREE_THIS_VOLATILE (*expr_p))
12129 {
12130 /* This is probably a _REF that contains something nested that
12131 has side effects. Recurse through the operands to find it. */
12132 enum tree_code code = TREE_CODE (*expr_p);
12133
12134 switch (code)
12135 {
12136 case COMPONENT_REF:
12137 case REALPART_EXPR:
12138 case IMAGPART_EXPR:
12139 case VIEW_CONVERT_EXPR:
12140 gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
12141 gimple_test_f, fallback);
12142 break;
12143
12144 case ARRAY_REF:
12145 case ARRAY_RANGE_REF:
12146 gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
12147 gimple_test_f, fallback);
12148 gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p, post_p,
12149 gimple_test_f, fallback);
12150 break;
12151
12152 default:
12153 /* Anything else with side-effects must be converted to
12154 a valid statement before we get here. */
12155 gcc_unreachable ();
12156 }
12157
12158 *expr_p = NULL;
12159 }
12160 else if (COMPLETE_TYPE_P (TREE_TYPE (*expr_p))
12161 && TYPE_MODE (TREE_TYPE (*expr_p)) != BLKmode)
12162 {
12163 /* Historically, the compiler has treated a bare reference
12164 to a non-BLKmode volatile lvalue as forcing a load. */
12165 tree type = TYPE_MAIN_VARIANT (TREE_TYPE (*expr_p));
12166
12167 /* Normally, we do not want to create a temporary for a
12168 TREE_ADDRESSABLE type because such a type should not be
12169 copied by bitwise-assignment. However, we make an
12170 exception here, as all we are doing here is ensuring that
12171 we read the bytes that make up the type. We use
12172 create_tmp_var_raw because create_tmp_var will abort when
12173 given a TREE_ADDRESSABLE type. */
12174 tree tmp = create_tmp_var_raw (type, "vol");
12175 gimple_add_tmp_var (tmp);
12176 gimplify_assign (tmp, *expr_p, pre_p);
12177 *expr_p = NULL;
12178 }
12179 else
12180 /* We can't do anything useful with a volatile reference to
12181 an incomplete type, so just throw it away. Likewise for
12182 a BLKmode type, since any implicit inner load should
12183 already have been turned into an explicit one by the
12184 gimplification process. */
12185 *expr_p = NULL;
12186 }
12187
12188 /* If we are gimplifying at the statement level, we're done. Tack
12189 everything together and return. */
12190 if (fallback == fb_none || is_statement)
12191 {
12192 /* Since *EXPR_P has been converted into a GIMPLE tuple, clear
12193 it out for GC to reclaim it. */
12194 *expr_p = NULL_TREE;
12195
12196 if (!gimple_seq_empty_p (internal_pre)
12197 || !gimple_seq_empty_p (internal_post))
12198 {
12199 gimplify_seq_add_seq (&internal_pre, internal_post);
12200 gimplify_seq_add_seq (pre_p, internal_pre);
12201 }
12202
12203 /* The result of gimplifying *EXPR_P is going to be the last few
12204 statements in *PRE_P and *POST_P. Add location information
12205 to all the statements that were added by the gimplification
12206 helpers. */
12207 if (!gimple_seq_empty_p (*pre_p))
12208 annotate_all_with_location_after (*pre_p, pre_last_gsi, input_location);
12209
12210 if (!gimple_seq_empty_p (*post_p))
12211 annotate_all_with_location_after (*post_p, post_last_gsi,
12212 input_location);
12213
12214 goto out;
12215 }
12216
12217 #ifdef ENABLE_GIMPLE_CHECKING
12218 if (*expr_p)
12219 {
12220 enum tree_code code = TREE_CODE (*expr_p);
12221 /* These expressions should already be in gimple IR form. */
12222 gcc_assert (code != MODIFY_EXPR
12223 && code != ASM_EXPR
12224 && code != BIND_EXPR
12225 && code != CATCH_EXPR
12226 && (code != COND_EXPR || gimplify_ctxp->allow_rhs_cond_expr)
12227 && code != EH_FILTER_EXPR
12228 && code != GOTO_EXPR
12229 && code != LABEL_EXPR
12230 && code != LOOP_EXPR
12231 && code != SWITCH_EXPR
12232 && code != TRY_FINALLY_EXPR
12233 && code != OACC_PARALLEL
12234 && code != OACC_KERNELS
12235 && code != OACC_DATA
12236 && code != OACC_HOST_DATA
12237 && code != OACC_DECLARE
12238 && code != OACC_UPDATE
12239 && code != OACC_ENTER_DATA
12240 && code != OACC_EXIT_DATA
12241 && code != OACC_CACHE
12242 && code != OMP_CRITICAL
12243 && code != OMP_FOR
12244 && code != OACC_LOOP
12245 && code != OMP_MASTER
12246 && code != OMP_TASKGROUP
12247 && code != OMP_ORDERED
12248 && code != OMP_PARALLEL
12249 && code != OMP_SECTIONS
12250 && code != OMP_SECTION
12251 && code != OMP_SINGLE);
12252 }
12253 #endif
12254
12255 /* Otherwise we're gimplifying a subexpression, so the resulting
12256 value is interesting. If it's a valid operand that matches
12257 GIMPLE_TEST_F, we're done. Unless we are handling some
12258 post-effects internally; if that's the case, we need to copy into
12259 a temporary before adding the post-effects to POST_P. */
12260 if (gimple_seq_empty_p (internal_post) && (*gimple_test_f) (*expr_p))
12261 goto out;
12262
12263 /* Otherwise, we need to create a new temporary for the gimplified
12264 expression. */
12265
12266 /* We can't return an lvalue if we have an internal postqueue. The
12267 object the lvalue refers to would (probably) be modified by the
12268 postqueue; we need to copy the value out first, which means an
12269 rvalue. */
12270 if ((fallback & fb_lvalue)
12271 && gimple_seq_empty_p (internal_post)
12272 && is_gimple_addressable (*expr_p))
12273 {
12274 /* An lvalue will do. Take the address of the expression, store it
12275 in a temporary, and replace the expression with an INDIRECT_REF of
12276 that temporary. */
12277 tmp = build_fold_addr_expr_loc (input_location, *expr_p);
12278 gimplify_expr (&tmp, pre_p, post_p, is_gimple_reg, fb_rvalue);
12279 *expr_p = build_simple_mem_ref (tmp);
12280 }
12281 else if ((fallback & fb_rvalue) && is_gimple_reg_rhs_or_call (*expr_p))
12282 {
12283 /* An rvalue will do. Assign the gimplified expression into a
12284 new temporary TMP and replace the original expression with
12285 TMP. First, make sure that the expression has a type so that
12286 it can be assigned into a temporary. */
12287 gcc_assert (!VOID_TYPE_P (TREE_TYPE (*expr_p)));
12288 *expr_p = get_formal_tmp_var (*expr_p, pre_p);
12289 }
12290 else
12291 {
12292 #ifdef ENABLE_GIMPLE_CHECKING
12293 if (!(fallback & fb_mayfail))
12294 {
12295 fprintf (stderr, "gimplification failed:\n");
12296 print_generic_expr (stderr, *expr_p);
12297 debug_tree (*expr_p);
12298 internal_error ("gimplification failed");
12299 }
12300 #endif
12301 gcc_assert (fallback & fb_mayfail);
12302
12303 /* If this is an asm statement, and the user asked for the
12304 impossible, don't die. Fail and let gimplify_asm_expr
12305 issue an error. */
12306 ret = GS_ERROR;
12307 goto out;
12308 }
12309
12310 /* Make sure the temporary matches our predicate. */
12311 gcc_assert ((*gimple_test_f) (*expr_p));
12312
12313 if (!gimple_seq_empty_p (internal_post))
12314 {
12315 annotate_all_with_location (internal_post, input_location);
12316 gimplify_seq_add_seq (pre_p, internal_post);
12317 }
12318
12319 out:
12320 input_location = saved_location;
12321 return ret;
12322 }
12323
12324 /* Like gimplify_expr but make sure the gimplified result is not itself
12325 a SSA name (but a decl if it were). Temporaries required by
12326 evaluating *EXPR_P may be still SSA names. */
12327
12328 static enum gimplify_status
12329 gimplify_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
12330 bool (*gimple_test_f) (tree), fallback_t fallback,
12331 bool allow_ssa)
12332 {
12333 bool was_ssa_name_p = TREE_CODE (*expr_p) == SSA_NAME;
12334 enum gimplify_status ret = gimplify_expr (expr_p, pre_p, post_p,
12335 gimple_test_f, fallback);
12336 if (! allow_ssa
12337 && TREE_CODE (*expr_p) == SSA_NAME)
12338 {
12339 tree name = *expr_p;
12340 if (was_ssa_name_p)
12341 *expr_p = get_initialized_tmp_var (*expr_p, pre_p, NULL, false);
12342 else
12343 {
12344 /* Avoid the extra copy if possible. */
12345 *expr_p = create_tmp_reg (TREE_TYPE (name));
12346 gimple_set_lhs (SSA_NAME_DEF_STMT (name), *expr_p);
12347 release_ssa_name (name);
12348 }
12349 }
12350 return ret;
12351 }
12352
12353 /* Look through TYPE for variable-sized objects and gimplify each such
12354 size that we find. Add to LIST_P any statements generated. */
12355
12356 void
12357 gimplify_type_sizes (tree type, gimple_seq *list_p)
12358 {
12359 tree field, t;
12360
12361 if (type == NULL || type == error_mark_node)
12362 return;
12363
12364 /* We first do the main variant, then copy into any other variants. */
12365 type = TYPE_MAIN_VARIANT (type);
12366
12367 /* Avoid infinite recursion. */
12368 if (TYPE_SIZES_GIMPLIFIED (type))
12369 return;
12370
12371 TYPE_SIZES_GIMPLIFIED (type) = 1;
12372
12373 switch (TREE_CODE (type))
12374 {
12375 case INTEGER_TYPE:
12376 case ENUMERAL_TYPE:
12377 case BOOLEAN_TYPE:
12378 case REAL_TYPE:
12379 case FIXED_POINT_TYPE:
12380 gimplify_one_sizepos (&TYPE_MIN_VALUE (type), list_p);
12381 gimplify_one_sizepos (&TYPE_MAX_VALUE (type), list_p);
12382
12383 for (t = TYPE_NEXT_VARIANT (type); t; t = TYPE_NEXT_VARIANT (t))
12384 {
12385 TYPE_MIN_VALUE (t) = TYPE_MIN_VALUE (type);
12386 TYPE_MAX_VALUE (t) = TYPE_MAX_VALUE (type);
12387 }
12388 break;
12389
12390 case ARRAY_TYPE:
12391 /* These types may not have declarations, so handle them here. */
12392 gimplify_type_sizes (TREE_TYPE (type), list_p);
12393 gimplify_type_sizes (TYPE_DOMAIN (type), list_p);
12394 /* Ensure VLA bounds aren't removed, for -O0 they should be variables
12395 with assigned stack slots, for -O1+ -g they should be tracked
12396 by VTA. */
12397 if (!(TYPE_NAME (type)
12398 && TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
12399 && DECL_IGNORED_P (TYPE_NAME (type)))
12400 && TYPE_DOMAIN (type)
12401 && INTEGRAL_TYPE_P (TYPE_DOMAIN (type)))
12402 {
12403 t = TYPE_MIN_VALUE (TYPE_DOMAIN (type));
12404 if (t && VAR_P (t) && DECL_ARTIFICIAL (t))
12405 DECL_IGNORED_P (t) = 0;
12406 t = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
12407 if (t && VAR_P (t) && DECL_ARTIFICIAL (t))
12408 DECL_IGNORED_P (t) = 0;
12409 }
12410 break;
12411
12412 case RECORD_TYPE:
12413 case UNION_TYPE:
12414 case QUAL_UNION_TYPE:
12415 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
12416 if (TREE_CODE (field) == FIELD_DECL)
12417 {
12418 gimplify_one_sizepos (&DECL_FIELD_OFFSET (field), list_p);
12419 gimplify_one_sizepos (&DECL_SIZE (field), list_p);
12420 gimplify_one_sizepos (&DECL_SIZE_UNIT (field), list_p);
12421 gimplify_type_sizes (TREE_TYPE (field), list_p);
12422 }
12423 break;
12424
12425 case POINTER_TYPE:
12426 case REFERENCE_TYPE:
12427 /* We used to recurse on the pointed-to type here, which turned out to
12428 be incorrect because its definition might refer to variables not
12429 yet initialized at this point if a forward declaration is involved.
12430
12431 It was actually useful for anonymous pointed-to types to ensure
12432 that the sizes evaluation dominates every possible later use of the
12433 values. Restricting to such types here would be safe since there
12434 is no possible forward declaration around, but would introduce an
12435 undesirable middle-end semantic to anonymity. We then defer to
12436 front-ends the responsibility of ensuring that the sizes are
12437 evaluated both early and late enough, e.g. by attaching artificial
12438 type declarations to the tree. */
12439 break;
12440
12441 default:
12442 break;
12443 }
12444
12445 gimplify_one_sizepos (&TYPE_SIZE (type), list_p);
12446 gimplify_one_sizepos (&TYPE_SIZE_UNIT (type), list_p);
12447
12448 for (t = TYPE_NEXT_VARIANT (type); t; t = TYPE_NEXT_VARIANT (t))
12449 {
12450 TYPE_SIZE (t) = TYPE_SIZE (type);
12451 TYPE_SIZE_UNIT (t) = TYPE_SIZE_UNIT (type);
12452 TYPE_SIZES_GIMPLIFIED (t) = 1;
12453 }
12454 }
12455
12456 /* A subroutine of gimplify_type_sizes to make sure that *EXPR_P,
12457 a size or position, has had all of its SAVE_EXPRs evaluated.
12458 We add any required statements to *STMT_P. */
12459
12460 void
12461 gimplify_one_sizepos (tree *expr_p, gimple_seq *stmt_p)
12462 {
12463 tree expr = *expr_p;
12464
12465 /* We don't do anything if the value isn't there, is constant, or contains
12466 A PLACEHOLDER_EXPR. We also don't want to do anything if it's already
12467 a VAR_DECL. If it's a VAR_DECL from another function, the gimplifier
12468 will want to replace it with a new variable, but that will cause problems
12469 if this type is from outside the function. It's OK to have that here. */
12470 if (is_gimple_sizepos (expr))
12471 return;
12472
12473 *expr_p = unshare_expr (expr);
12474
12475 /* SSA names in decl/type fields are a bad idea - they'll get reclaimed
12476 if the def vanishes. */
12477 gimplify_expr (expr_p, stmt_p, NULL, is_gimple_val, fb_rvalue, false);
12478 }
12479
12480 /* Gimplify the body of statements of FNDECL and return a GIMPLE_BIND node
12481 containing the sequence of corresponding GIMPLE statements. If DO_PARMS
12482 is true, also gimplify the parameters. */
12483
12484 gbind *
12485 gimplify_body (tree fndecl, bool do_parms)
12486 {
12487 location_t saved_location = input_location;
12488 gimple_seq parm_stmts, seq;
12489 gimple *outer_stmt;
12490 gbind *outer_bind;
12491 struct cgraph_node *cgn;
12492
12493 timevar_push (TV_TREE_GIMPLIFY);
12494
12495 init_tree_ssa (cfun);
12496
12497 /* Initialize for optimize_insn_for_s{ize,peed}_p possibly called during
12498 gimplification. */
12499 default_rtl_profile ();
12500
12501 gcc_assert (gimplify_ctxp == NULL);
12502 push_gimplify_context (true);
12503
12504 if (flag_openacc || flag_openmp)
12505 {
12506 gcc_assert (gimplify_omp_ctxp == NULL);
12507 if (lookup_attribute ("omp declare target", DECL_ATTRIBUTES (fndecl)))
12508 gimplify_omp_ctxp = new_omp_context (ORT_TARGET);
12509 }
12510
12511 /* Unshare most shared trees in the body and in that of any nested functions.
12512 It would seem we don't have to do this for nested functions because
12513 they are supposed to be output and then the outer function gimplified
12514 first, but the g++ front end doesn't always do it that way. */
12515 unshare_body (fndecl);
12516 unvisit_body (fndecl);
12517
12518 cgn = cgraph_node::get (fndecl);
12519 if (cgn && cgn->origin)
12520 nonlocal_vlas = new hash_set<tree>;
12521
12522 /* Make sure input_location isn't set to something weird. */
12523 input_location = DECL_SOURCE_LOCATION (fndecl);
12524
12525 /* Resolve callee-copies. This has to be done before processing
12526 the body so that DECL_VALUE_EXPR gets processed correctly. */
12527 parm_stmts = do_parms ? gimplify_parameters () : NULL;
12528
12529 /* Gimplify the function's body. */
12530 seq = NULL;
12531 gimplify_stmt (&DECL_SAVED_TREE (fndecl), &seq);
12532 outer_stmt = gimple_seq_first_stmt (seq);
12533 if (!outer_stmt)
12534 {
12535 outer_stmt = gimple_build_nop ();
12536 gimplify_seq_add_stmt (&seq, outer_stmt);
12537 }
12538
12539 /* The body must contain exactly one statement, a GIMPLE_BIND. If this is
12540 not the case, wrap everything in a GIMPLE_BIND to make it so. */
12541 if (gimple_code (outer_stmt) == GIMPLE_BIND
12542 && gimple_seq_first (seq) == gimple_seq_last (seq))
12543 outer_bind = as_a <gbind *> (outer_stmt);
12544 else
12545 outer_bind = gimple_build_bind (NULL_TREE, seq, NULL);
12546
12547 DECL_SAVED_TREE (fndecl) = NULL_TREE;
12548
12549 /* If we had callee-copies statements, insert them at the beginning
12550 of the function and clear DECL_VALUE_EXPR_P on the parameters. */
12551 if (!gimple_seq_empty_p (parm_stmts))
12552 {
12553 tree parm;
12554
12555 gimplify_seq_add_seq (&parm_stmts, gimple_bind_body (outer_bind));
12556 gimple_bind_set_body (outer_bind, parm_stmts);
12557
12558 for (parm = DECL_ARGUMENTS (current_function_decl);
12559 parm; parm = DECL_CHAIN (parm))
12560 if (DECL_HAS_VALUE_EXPR_P (parm))
12561 {
12562 DECL_HAS_VALUE_EXPR_P (parm) = 0;
12563 DECL_IGNORED_P (parm) = 0;
12564 }
12565 }
12566
12567 if (nonlocal_vlas)
12568 {
12569 if (nonlocal_vla_vars)
12570 {
12571 /* tree-nested.c may later on call declare_vars (..., true);
12572 which relies on BLOCK_VARS chain to be the tail of the
12573 gimple_bind_vars chain. Ensure we don't violate that
12574 assumption. */
12575 if (gimple_bind_block (outer_bind)
12576 == DECL_INITIAL (current_function_decl))
12577 declare_vars (nonlocal_vla_vars, outer_bind, true);
12578 else
12579 BLOCK_VARS (DECL_INITIAL (current_function_decl))
12580 = chainon (BLOCK_VARS (DECL_INITIAL (current_function_decl)),
12581 nonlocal_vla_vars);
12582 nonlocal_vla_vars = NULL_TREE;
12583 }
12584 delete nonlocal_vlas;
12585 nonlocal_vlas = NULL;
12586 }
12587
12588 if ((flag_openacc || flag_openmp || flag_openmp_simd)
12589 && gimplify_omp_ctxp)
12590 {
12591 delete_omp_context (gimplify_omp_ctxp);
12592 gimplify_omp_ctxp = NULL;
12593 }
12594
12595 pop_gimplify_context (outer_bind);
12596 gcc_assert (gimplify_ctxp == NULL);
12597
12598 if (flag_checking && !seen_error ())
12599 verify_gimple_in_seq (gimple_bind_body (outer_bind));
12600
12601 timevar_pop (TV_TREE_GIMPLIFY);
12602 input_location = saved_location;
12603
12604 return outer_bind;
12605 }
12606
12607 typedef char *char_p; /* For DEF_VEC_P. */
12608
12609 /* Return whether we should exclude FNDECL from instrumentation. */
12610
12611 static bool
12612 flag_instrument_functions_exclude_p (tree fndecl)
12613 {
12614 vec<char_p> *v;
12615
12616 v = (vec<char_p> *) flag_instrument_functions_exclude_functions;
12617 if (v && v->length () > 0)
12618 {
12619 const char *name;
12620 int i;
12621 char *s;
12622
12623 name = lang_hooks.decl_printable_name (fndecl, 0);
12624 FOR_EACH_VEC_ELT (*v, i, s)
12625 if (strstr (name, s) != NULL)
12626 return true;
12627 }
12628
12629 v = (vec<char_p> *) flag_instrument_functions_exclude_files;
12630 if (v && v->length () > 0)
12631 {
12632 const char *name;
12633 int i;
12634 char *s;
12635
12636 name = DECL_SOURCE_FILE (fndecl);
12637 FOR_EACH_VEC_ELT (*v, i, s)
12638 if (strstr (name, s) != NULL)
12639 return true;
12640 }
12641
12642 return false;
12643 }
12644
12645 /* Entry point to the gimplification pass. FNDECL is the FUNCTION_DECL
12646 node for the function we want to gimplify.
12647
12648 Return the sequence of GIMPLE statements corresponding to the body
12649 of FNDECL. */
12650
12651 void
12652 gimplify_function_tree (tree fndecl)
12653 {
12654 tree parm, ret;
12655 gimple_seq seq;
12656 gbind *bind;
12657
12658 gcc_assert (!gimple_body (fndecl));
12659
12660 if (DECL_STRUCT_FUNCTION (fndecl))
12661 push_cfun (DECL_STRUCT_FUNCTION (fndecl));
12662 else
12663 push_struct_function (fndecl);
12664
12665 /* Tentatively set PROP_gimple_lva here, and reset it in gimplify_va_arg_expr
12666 if necessary. */
12667 cfun->curr_properties |= PROP_gimple_lva;
12668
12669 for (parm = DECL_ARGUMENTS (fndecl); parm ; parm = DECL_CHAIN (parm))
12670 {
12671 /* Preliminarily mark non-addressed complex variables as eligible
12672 for promotion to gimple registers. We'll transform their uses
12673 as we find them. */
12674 if ((TREE_CODE (TREE_TYPE (parm)) == COMPLEX_TYPE
12675 || TREE_CODE (TREE_TYPE (parm)) == VECTOR_TYPE)
12676 && !TREE_THIS_VOLATILE (parm)
12677 && !needs_to_live_in_memory (parm))
12678 DECL_GIMPLE_REG_P (parm) = 1;
12679 }
12680
12681 ret = DECL_RESULT (fndecl);
12682 if ((TREE_CODE (TREE_TYPE (ret)) == COMPLEX_TYPE
12683 || TREE_CODE (TREE_TYPE (ret)) == VECTOR_TYPE)
12684 && !needs_to_live_in_memory (ret))
12685 DECL_GIMPLE_REG_P (ret) = 1;
12686
12687 if (asan_sanitize_use_after_scope () && sanitize_flags_p (SANITIZE_ADDRESS))
12688 asan_poisoned_variables = new hash_set<tree> ();
12689 bind = gimplify_body (fndecl, true);
12690 if (asan_poisoned_variables)
12691 {
12692 delete asan_poisoned_variables;
12693 asan_poisoned_variables = NULL;
12694 }
12695
12696 /* The tree body of the function is no longer needed, replace it
12697 with the new GIMPLE body. */
12698 seq = NULL;
12699 gimple_seq_add_stmt (&seq, bind);
12700 gimple_set_body (fndecl, seq);
12701
12702 /* If we're instrumenting function entry/exit, then prepend the call to
12703 the entry hook and wrap the whole function in a TRY_FINALLY_EXPR to
12704 catch the exit hook. */
12705 /* ??? Add some way to ignore exceptions for this TFE. */
12706 if (flag_instrument_function_entry_exit
12707 && !DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (fndecl)
12708 /* Do not instrument extern inline functions. */
12709 && !(DECL_DECLARED_INLINE_P (fndecl)
12710 && DECL_EXTERNAL (fndecl)
12711 && DECL_DISREGARD_INLINE_LIMITS (fndecl))
12712 && !flag_instrument_functions_exclude_p (fndecl))
12713 {
12714 tree x;
12715 gbind *new_bind;
12716 gimple *tf;
12717 gimple_seq cleanup = NULL, body = NULL;
12718 tree tmp_var;
12719 gcall *call;
12720
12721 x = builtin_decl_implicit (BUILT_IN_RETURN_ADDRESS);
12722 call = gimple_build_call (x, 1, integer_zero_node);
12723 tmp_var = create_tmp_var (ptr_type_node, "return_addr");
12724 gimple_call_set_lhs (call, tmp_var);
12725 gimplify_seq_add_stmt (&cleanup, call);
12726 x = builtin_decl_implicit (BUILT_IN_PROFILE_FUNC_EXIT);
12727 call = gimple_build_call (x, 2,
12728 build_fold_addr_expr (current_function_decl),
12729 tmp_var);
12730 gimplify_seq_add_stmt (&cleanup, call);
12731 tf = gimple_build_try (seq, cleanup, GIMPLE_TRY_FINALLY);
12732
12733 x = builtin_decl_implicit (BUILT_IN_RETURN_ADDRESS);
12734 call = gimple_build_call (x, 1, integer_zero_node);
12735 tmp_var = create_tmp_var (ptr_type_node, "return_addr");
12736 gimple_call_set_lhs (call, tmp_var);
12737 gimplify_seq_add_stmt (&body, call);
12738 x = builtin_decl_implicit (BUILT_IN_PROFILE_FUNC_ENTER);
12739 call = gimple_build_call (x, 2,
12740 build_fold_addr_expr (current_function_decl),
12741 tmp_var);
12742 gimplify_seq_add_stmt (&body, call);
12743 gimplify_seq_add_stmt (&body, tf);
12744 new_bind = gimple_build_bind (NULL, body, NULL);
12745
12746 /* Replace the current function body with the body
12747 wrapped in the try/finally TF. */
12748 seq = NULL;
12749 gimple_seq_add_stmt (&seq, new_bind);
12750 gimple_set_body (fndecl, seq);
12751 bind = new_bind;
12752 }
12753
12754 if (sanitize_flags_p (SANITIZE_THREAD))
12755 {
12756 gcall *call = gimple_build_call_internal (IFN_TSAN_FUNC_EXIT, 0);
12757 gimple *tf = gimple_build_try (seq, call, GIMPLE_TRY_FINALLY);
12758 gbind *new_bind = gimple_build_bind (NULL, tf, NULL);
12759 /* Replace the current function body with the body
12760 wrapped in the try/finally TF. */
12761 seq = NULL;
12762 gimple_seq_add_stmt (&seq, new_bind);
12763 gimple_set_body (fndecl, seq);
12764 }
12765
12766 DECL_SAVED_TREE (fndecl) = NULL_TREE;
12767 cfun->curr_properties |= PROP_gimple_any;
12768
12769 pop_cfun ();
12770
12771 dump_function (TDI_gimple, fndecl);
12772 }
12773
12774 /* Return a dummy expression of type TYPE in order to keep going after an
12775 error. */
12776
12777 static tree
12778 dummy_object (tree type)
12779 {
12780 tree t = build_int_cst (build_pointer_type (type), 0);
12781 return build2 (MEM_REF, type, t, t);
12782 }
12783
12784 /* Gimplify __builtin_va_arg, aka VA_ARG_EXPR, which is not really a
12785 builtin function, but a very special sort of operator. */
12786
12787 enum gimplify_status
12788 gimplify_va_arg_expr (tree *expr_p, gimple_seq *pre_p,
12789 gimple_seq *post_p ATTRIBUTE_UNUSED)
12790 {
12791 tree promoted_type, have_va_type;
12792 tree valist = TREE_OPERAND (*expr_p, 0);
12793 tree type = TREE_TYPE (*expr_p);
12794 tree t, tag, aptag;
12795 location_t loc = EXPR_LOCATION (*expr_p);
12796
12797 /* Verify that valist is of the proper type. */
12798 have_va_type = TREE_TYPE (valist);
12799 if (have_va_type == error_mark_node)
12800 return GS_ERROR;
12801 have_va_type = targetm.canonical_va_list_type (have_va_type);
12802 if (have_va_type == NULL_TREE
12803 && POINTER_TYPE_P (TREE_TYPE (valist)))
12804 /* Handle 'Case 1: Not an array type' from c-common.c/build_va_arg. */
12805 have_va_type
12806 = targetm.canonical_va_list_type (TREE_TYPE (TREE_TYPE (valist)));
12807 gcc_assert (have_va_type != NULL_TREE);
12808
12809 /* Generate a diagnostic for requesting data of a type that cannot
12810 be passed through `...' due to type promotion at the call site. */
12811 if ((promoted_type = lang_hooks.types.type_promotes_to (type))
12812 != type)
12813 {
12814 static bool gave_help;
12815 bool warned;
12816 /* Use the expansion point to handle cases such as passing bool (defined
12817 in a system header) through `...'. */
12818 source_location xloc
12819 = expansion_point_location_if_in_system_header (loc);
12820
12821 /* Unfortunately, this is merely undefined, rather than a constraint
12822 violation, so we cannot make this an error. If this call is never
12823 executed, the program is still strictly conforming. */
12824 warned = warning_at (xloc, 0,
12825 "%qT is promoted to %qT when passed through %<...%>",
12826 type, promoted_type);
12827 if (!gave_help && warned)
12828 {
12829 gave_help = true;
12830 inform (xloc, "(so you should pass %qT not %qT to %<va_arg%>)",
12831 promoted_type, type);
12832 }
12833
12834 /* We can, however, treat "undefined" any way we please.
12835 Call abort to encourage the user to fix the program. */
12836 if (warned)
12837 inform (xloc, "if this code is reached, the program will abort");
12838 /* Before the abort, allow the evaluation of the va_list
12839 expression to exit or longjmp. */
12840 gimplify_and_add (valist, pre_p);
12841 t = build_call_expr_loc (loc,
12842 builtin_decl_implicit (BUILT_IN_TRAP), 0);
12843 gimplify_and_add (t, pre_p);
12844
12845 /* This is dead code, but go ahead and finish so that the
12846 mode of the result comes out right. */
12847 *expr_p = dummy_object (type);
12848 return GS_ALL_DONE;
12849 }
12850
12851 tag = build_int_cst (build_pointer_type (type), 0);
12852 aptag = build_int_cst (TREE_TYPE (valist), 0);
12853
12854 *expr_p = build_call_expr_internal_loc (loc, IFN_VA_ARG, type, 3,
12855 valist, tag, aptag);
12856
12857 /* Clear the tentatively set PROP_gimple_lva, to indicate that IFN_VA_ARG
12858 needs to be expanded. */
12859 cfun->curr_properties &= ~PROP_gimple_lva;
12860
12861 return GS_OK;
12862 }
12863
12864 /* Build a new GIMPLE_ASSIGN tuple and append it to the end of *SEQ_P.
12865
12866 DST/SRC are the destination and source respectively. You can pass
12867 ungimplified trees in DST or SRC, in which case they will be
12868 converted to a gimple operand if necessary.
12869
12870 This function returns the newly created GIMPLE_ASSIGN tuple. */
12871
12872 gimple *
12873 gimplify_assign (tree dst, tree src, gimple_seq *seq_p)
12874 {
12875 tree t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
12876 gimplify_and_add (t, seq_p);
12877 ggc_free (t);
12878 return gimple_seq_last_stmt (*seq_p);
12879 }
12880
12881 inline hashval_t
12882 gimplify_hasher::hash (const elt_t *p)
12883 {
12884 tree t = p->val;
12885 return iterative_hash_expr (t, 0);
12886 }
12887
12888 inline bool
12889 gimplify_hasher::equal (const elt_t *p1, const elt_t *p2)
12890 {
12891 tree t1 = p1->val;
12892 tree t2 = p2->val;
12893 enum tree_code code = TREE_CODE (t1);
12894
12895 if (TREE_CODE (t2) != code
12896 || TREE_TYPE (t1) != TREE_TYPE (t2))
12897 return false;
12898
12899 if (!operand_equal_p (t1, t2, 0))
12900 return false;
12901
12902 /* Only allow them to compare equal if they also hash equal; otherwise
12903 results are nondeterminate, and we fail bootstrap comparison. */
12904 gcc_checking_assert (hash (p1) == hash (p2));
12905
12906 return true;
12907 }