]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/gimplify.c
692d168c3b4e1f0c6c390010938097eb5dc839da
[thirdparty/gcc.git] / gcc / gimplify.c
1 /* Tree lowering pass. This pass converts the GENERIC functions-as-trees
2 tree representation into the GIMPLE form.
3 Copyright (C) 2002-2016 Free Software Foundation, Inc.
4 Major work done by Sebastian Pop <s.pop@laposte.net>,
5 Diego Novillo <dnovillo@redhat.com> and Jason Merrill <jason@redhat.com>.
6
7 This file is part of GCC.
8
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
12 version.
13
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 for more details.
18
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
22
23 #include "config.h"
24 #include "system.h"
25 #include "coretypes.h"
26 #include "backend.h"
27 #include "target.h"
28 #include "rtl.h"
29 #include "tree.h"
30 #include "gimple.h"
31 #include "gimple-predict.h"
32 #include "tree-pass.h" /* FIXME: only for PROP_gimple_any */
33 #include "ssa.h"
34 #include "cgraph.h"
35 #include "tree-pretty-print.h"
36 #include "diagnostic-core.h"
37 #include "alias.h"
38 #include "fold-const.h"
39 #include "calls.h"
40 #include "varasm.h"
41 #include "stmt.h"
42 #include "expr.h"
43 #include "gimple-fold.h"
44 #include "tree-eh.h"
45 #include "gimplify.h"
46 #include "gimple-iterator.h"
47 #include "stor-layout.h"
48 #include "print-tree.h"
49 #include "tree-iterator.h"
50 #include "tree-inline.h"
51 #include "langhooks.h"
52 #include "tree-cfg.h"
53 #include "tree-ssa.h"
54 #include "omp-low.h"
55 #include "gimple-low.h"
56 #include "cilk.h"
57 #include "gomp-constants.h"
58 #include "tree-dump.h"
59 #include "gimple-walk.h"
60 #include "langhooks-def.h" /* FIXME: for lhd_set_decl_assembler_name */
61 #include "builtins.h"
62
63 enum gimplify_omp_var_data
64 {
65 GOVD_SEEN = 1,
66 GOVD_EXPLICIT = 2,
67 GOVD_SHARED = 4,
68 GOVD_PRIVATE = 8,
69 GOVD_FIRSTPRIVATE = 16,
70 GOVD_LASTPRIVATE = 32,
71 GOVD_REDUCTION = 64,
72 GOVD_LOCAL = 128,
73 GOVD_MAP = 256,
74 GOVD_DEBUG_PRIVATE = 512,
75 GOVD_PRIVATE_OUTER_REF = 1024,
76 GOVD_LINEAR = 2048,
77 GOVD_ALIGNED = 4096,
78
79 /* Flag for GOVD_MAP: don't copy back. */
80 GOVD_MAP_TO_ONLY = 8192,
81
82 /* Flag for GOVD_LINEAR or GOVD_LASTPRIVATE: no outer reference. */
83 GOVD_LINEAR_LASTPRIVATE_NO_OUTER = 16384,
84
85 GOVD_MAP_0LEN_ARRAY = 32768,
86
87 /* Flag for GOVD_MAP, if it is always, to or always, tofrom mapping. */
88 GOVD_MAP_ALWAYS_TO = 65536,
89
90 /* Flag for shared vars that are or might be stored to in the region. */
91 GOVD_WRITTEN = 131072,
92
93 /* Flag for GOVD_MAP, if it is a forced mapping. */
94 GOVD_MAP_FORCE = 262144,
95
96 GOVD_DATA_SHARE_CLASS = (GOVD_SHARED | GOVD_PRIVATE | GOVD_FIRSTPRIVATE
97 | GOVD_LASTPRIVATE | GOVD_REDUCTION | GOVD_LINEAR
98 | GOVD_LOCAL)
99 };
100
101
102 enum omp_region_type
103 {
104 ORT_WORKSHARE = 0x00,
105 ORT_SIMD = 0x01,
106
107 ORT_PARALLEL = 0x02,
108 ORT_COMBINED_PARALLEL = 0x03,
109
110 ORT_TASK = 0x04,
111 ORT_UNTIED_TASK = 0x05,
112
113 ORT_TEAMS = 0x08,
114 ORT_COMBINED_TEAMS = 0x09,
115
116 /* Data region. */
117 ORT_TARGET_DATA = 0x10,
118
119 /* Data region with offloading. */
120 ORT_TARGET = 0x20,
121 ORT_COMBINED_TARGET = 0x21,
122
123 /* OpenACC variants. */
124 ORT_ACC = 0x40, /* A generic OpenACC region. */
125 ORT_ACC_DATA = ORT_ACC | ORT_TARGET_DATA, /* Data construct. */
126 ORT_ACC_PARALLEL = ORT_ACC | ORT_TARGET, /* Parallel construct */
127 ORT_ACC_KERNELS = ORT_ACC | ORT_TARGET | 0x80, /* Kernels construct. */
128 ORT_ACC_HOST_DATA = ORT_ACC | ORT_TARGET_DATA | 0x80, /* Host data. */
129
130 /* Dummy OpenMP region, used to disable expansion of
131 DECL_VALUE_EXPRs in taskloop pre body. */
132 ORT_NONE = 0x100
133 };
134
135 /* Gimplify hashtable helper. */
136
137 struct gimplify_hasher : free_ptr_hash <elt_t>
138 {
139 static inline hashval_t hash (const elt_t *);
140 static inline bool equal (const elt_t *, const elt_t *);
141 };
142
143 struct gimplify_ctx
144 {
145 struct gimplify_ctx *prev_context;
146
147 vec<gbind *> bind_expr_stack;
148 tree temps;
149 gimple_seq conditional_cleanups;
150 tree exit_label;
151 tree return_temp;
152
153 vec<tree> case_labels;
154 /* The formal temporary table. Should this be persistent? */
155 hash_table<gimplify_hasher> *temp_htab;
156
157 int conditions;
158 unsigned into_ssa : 1;
159 unsigned allow_rhs_cond_expr : 1;
160 unsigned in_cleanup_point_expr : 1;
161 unsigned keep_stack : 1;
162 unsigned save_stack : 1;
163 };
164
165 struct gimplify_omp_ctx
166 {
167 struct gimplify_omp_ctx *outer_context;
168 splay_tree variables;
169 hash_set<tree> *privatized_types;
170 /* Iteration variables in an OMP_FOR. */
171 vec<tree> loop_iter_var;
172 location_t location;
173 enum omp_clause_default_kind default_kind;
174 enum omp_region_type region_type;
175 bool combined_loop;
176 bool distribute;
177 bool target_map_scalars_firstprivate;
178 bool target_map_pointers_as_0len_arrays;
179 bool target_firstprivatize_array_bases;
180 };
181
182 static struct gimplify_ctx *gimplify_ctxp;
183 static struct gimplify_omp_ctx *gimplify_omp_ctxp;
184
185 /* Forward declaration. */
186 static enum gimplify_status gimplify_compound_expr (tree *, gimple_seq *, bool);
187 static hash_map<tree, tree> *oacc_declare_returns;
188
189 /* Shorter alias name for the above function for use in gimplify.c
190 only. */
191
192 static inline void
193 gimplify_seq_add_stmt (gimple_seq *seq_p, gimple *gs)
194 {
195 gimple_seq_add_stmt_without_update (seq_p, gs);
196 }
197
198 /* Append sequence SRC to the end of sequence *DST_P. If *DST_P is
199 NULL, a new sequence is allocated. This function is
200 similar to gimple_seq_add_seq, but does not scan the operands.
201 During gimplification, we need to manipulate statement sequences
202 before the def/use vectors have been constructed. */
203
204 static void
205 gimplify_seq_add_seq (gimple_seq *dst_p, gimple_seq src)
206 {
207 gimple_stmt_iterator si;
208
209 if (src == NULL)
210 return;
211
212 si = gsi_last (*dst_p);
213 gsi_insert_seq_after_without_update (&si, src, GSI_NEW_STMT);
214 }
215
216
217 /* Pointer to a list of allocated gimplify_ctx structs to be used for pushing
218 and popping gimplify contexts. */
219
220 static struct gimplify_ctx *ctx_pool = NULL;
221
222 /* Return a gimplify context struct from the pool. */
223
224 static inline struct gimplify_ctx *
225 ctx_alloc (void)
226 {
227 struct gimplify_ctx * c = ctx_pool;
228
229 if (c)
230 ctx_pool = c->prev_context;
231 else
232 c = XNEW (struct gimplify_ctx);
233
234 memset (c, '\0', sizeof (*c));
235 return c;
236 }
237
238 /* Put gimplify context C back into the pool. */
239
240 static inline void
241 ctx_free (struct gimplify_ctx *c)
242 {
243 c->prev_context = ctx_pool;
244 ctx_pool = c;
245 }
246
247 /* Free allocated ctx stack memory. */
248
249 void
250 free_gimplify_stack (void)
251 {
252 struct gimplify_ctx *c;
253
254 while ((c = ctx_pool))
255 {
256 ctx_pool = c->prev_context;
257 free (c);
258 }
259 }
260
261
262 /* Set up a context for the gimplifier. */
263
264 void
265 push_gimplify_context (bool in_ssa, bool rhs_cond_ok)
266 {
267 struct gimplify_ctx *c = ctx_alloc ();
268
269 c->prev_context = gimplify_ctxp;
270 gimplify_ctxp = c;
271 gimplify_ctxp->into_ssa = in_ssa;
272 gimplify_ctxp->allow_rhs_cond_expr = rhs_cond_ok;
273 }
274
275 /* Tear down a context for the gimplifier. If BODY is non-null, then
276 put the temporaries into the outer BIND_EXPR. Otherwise, put them
277 in the local_decls.
278
279 BODY is not a sequence, but the first tuple in a sequence. */
280
281 void
282 pop_gimplify_context (gimple *body)
283 {
284 struct gimplify_ctx *c = gimplify_ctxp;
285
286 gcc_assert (c
287 && (!c->bind_expr_stack.exists ()
288 || c->bind_expr_stack.is_empty ()));
289 c->bind_expr_stack.release ();
290 gimplify_ctxp = c->prev_context;
291
292 if (body)
293 declare_vars (c->temps, body, false);
294 else
295 record_vars (c->temps);
296
297 delete c->temp_htab;
298 c->temp_htab = NULL;
299 ctx_free (c);
300 }
301
302 /* Push a GIMPLE_BIND tuple onto the stack of bindings. */
303
304 static void
305 gimple_push_bind_expr (gbind *bind_stmt)
306 {
307 gimplify_ctxp->bind_expr_stack.reserve (8);
308 gimplify_ctxp->bind_expr_stack.safe_push (bind_stmt);
309 }
310
311 /* Pop the first element off the stack of bindings. */
312
313 static void
314 gimple_pop_bind_expr (void)
315 {
316 gimplify_ctxp->bind_expr_stack.pop ();
317 }
318
319 /* Return the first element of the stack of bindings. */
320
321 gbind *
322 gimple_current_bind_expr (void)
323 {
324 return gimplify_ctxp->bind_expr_stack.last ();
325 }
326
327 /* Return the stack of bindings created during gimplification. */
328
329 vec<gbind *>
330 gimple_bind_expr_stack (void)
331 {
332 return gimplify_ctxp->bind_expr_stack;
333 }
334
335 /* Return true iff there is a COND_EXPR between us and the innermost
336 CLEANUP_POINT_EXPR. This info is used by gimple_push_cleanup. */
337
338 static bool
339 gimple_conditional_context (void)
340 {
341 return gimplify_ctxp->conditions > 0;
342 }
343
344 /* Note that we've entered a COND_EXPR. */
345
346 static void
347 gimple_push_condition (void)
348 {
349 #ifdef ENABLE_GIMPLE_CHECKING
350 if (gimplify_ctxp->conditions == 0)
351 gcc_assert (gimple_seq_empty_p (gimplify_ctxp->conditional_cleanups));
352 #endif
353 ++(gimplify_ctxp->conditions);
354 }
355
356 /* Note that we've left a COND_EXPR. If we're back at unconditional scope
357 now, add any conditional cleanups we've seen to the prequeue. */
358
359 static void
360 gimple_pop_condition (gimple_seq *pre_p)
361 {
362 int conds = --(gimplify_ctxp->conditions);
363
364 gcc_assert (conds >= 0);
365 if (conds == 0)
366 {
367 gimplify_seq_add_seq (pre_p, gimplify_ctxp->conditional_cleanups);
368 gimplify_ctxp->conditional_cleanups = NULL;
369 }
370 }
371
372 /* A stable comparison routine for use with splay trees and DECLs. */
373
374 static int
375 splay_tree_compare_decl_uid (splay_tree_key xa, splay_tree_key xb)
376 {
377 tree a = (tree) xa;
378 tree b = (tree) xb;
379
380 return DECL_UID (a) - DECL_UID (b);
381 }
382
383 /* Create a new omp construct that deals with variable remapping. */
384
385 static struct gimplify_omp_ctx *
386 new_omp_context (enum omp_region_type region_type)
387 {
388 struct gimplify_omp_ctx *c;
389
390 c = XCNEW (struct gimplify_omp_ctx);
391 c->outer_context = gimplify_omp_ctxp;
392 c->variables = splay_tree_new (splay_tree_compare_decl_uid, 0, 0);
393 c->privatized_types = new hash_set<tree>;
394 c->location = input_location;
395 c->region_type = region_type;
396 if ((region_type & ORT_TASK) == 0)
397 c->default_kind = OMP_CLAUSE_DEFAULT_SHARED;
398 else
399 c->default_kind = OMP_CLAUSE_DEFAULT_UNSPECIFIED;
400
401 return c;
402 }
403
404 /* Destroy an omp construct that deals with variable remapping. */
405
406 static void
407 delete_omp_context (struct gimplify_omp_ctx *c)
408 {
409 splay_tree_delete (c->variables);
410 delete c->privatized_types;
411 c->loop_iter_var.release ();
412 XDELETE (c);
413 }
414
415 static void omp_add_variable (struct gimplify_omp_ctx *, tree, unsigned int);
416 static bool omp_notice_variable (struct gimplify_omp_ctx *, tree, bool);
417
418 /* Both gimplify the statement T and append it to *SEQ_P. This function
419 behaves exactly as gimplify_stmt, but you don't have to pass T as a
420 reference. */
421
422 void
423 gimplify_and_add (tree t, gimple_seq *seq_p)
424 {
425 gimplify_stmt (&t, seq_p);
426 }
427
428 /* Gimplify statement T into sequence *SEQ_P, and return the first
429 tuple in the sequence of generated tuples for this statement.
430 Return NULL if gimplifying T produced no tuples. */
431
432 static gimple *
433 gimplify_and_return_first (tree t, gimple_seq *seq_p)
434 {
435 gimple_stmt_iterator last = gsi_last (*seq_p);
436
437 gimplify_and_add (t, seq_p);
438
439 if (!gsi_end_p (last))
440 {
441 gsi_next (&last);
442 return gsi_stmt (last);
443 }
444 else
445 return gimple_seq_first_stmt (*seq_p);
446 }
447
448 /* Returns true iff T is a valid RHS for an assignment to an un-renamed
449 LHS, or for a call argument. */
450
451 static bool
452 is_gimple_mem_rhs (tree t)
453 {
454 /* If we're dealing with a renamable type, either source or dest must be
455 a renamed variable. */
456 if (is_gimple_reg_type (TREE_TYPE (t)))
457 return is_gimple_val (t);
458 else
459 return is_gimple_val (t) || is_gimple_lvalue (t);
460 }
461
462 /* Return true if T is a CALL_EXPR or an expression that can be
463 assigned to a temporary. Note that this predicate should only be
464 used during gimplification. See the rationale for this in
465 gimplify_modify_expr. */
466
467 static bool
468 is_gimple_reg_rhs_or_call (tree t)
469 {
470 return (get_gimple_rhs_class (TREE_CODE (t)) != GIMPLE_INVALID_RHS
471 || TREE_CODE (t) == CALL_EXPR);
472 }
473
474 /* Return true if T is a valid memory RHS or a CALL_EXPR. Note that
475 this predicate should only be used during gimplification. See the
476 rationale for this in gimplify_modify_expr. */
477
478 static bool
479 is_gimple_mem_rhs_or_call (tree t)
480 {
481 /* If we're dealing with a renamable type, either source or dest must be
482 a renamed variable. */
483 if (is_gimple_reg_type (TREE_TYPE (t)))
484 return is_gimple_val (t);
485 else
486 return (is_gimple_val (t) || is_gimple_lvalue (t)
487 || TREE_CODE (t) == CALL_EXPR);
488 }
489
490 /* Create a temporary with a name derived from VAL. Subroutine of
491 lookup_tmp_var; nobody else should call this function. */
492
493 static inline tree
494 create_tmp_from_val (tree val)
495 {
496 /* Drop all qualifiers and address-space information from the value type. */
497 tree type = TYPE_MAIN_VARIANT (TREE_TYPE (val));
498 tree var = create_tmp_var (type, get_name (val));
499 if (TREE_CODE (TREE_TYPE (var)) == COMPLEX_TYPE
500 || TREE_CODE (TREE_TYPE (var)) == VECTOR_TYPE)
501 DECL_GIMPLE_REG_P (var) = 1;
502 return var;
503 }
504
505 /* Create a temporary to hold the value of VAL. If IS_FORMAL, try to reuse
506 an existing expression temporary. */
507
508 static tree
509 lookup_tmp_var (tree val, bool is_formal)
510 {
511 tree ret;
512
513 /* If not optimizing, never really reuse a temporary. local-alloc
514 won't allocate any variable that is used in more than one basic
515 block, which means it will go into memory, causing much extra
516 work in reload and final and poorer code generation, outweighing
517 the extra memory allocation here. */
518 if (!optimize || !is_formal || TREE_SIDE_EFFECTS (val))
519 ret = create_tmp_from_val (val);
520 else
521 {
522 elt_t elt, *elt_p;
523 elt_t **slot;
524
525 elt.val = val;
526 if (!gimplify_ctxp->temp_htab)
527 gimplify_ctxp->temp_htab = new hash_table<gimplify_hasher> (1000);
528 slot = gimplify_ctxp->temp_htab->find_slot (&elt, INSERT);
529 if (*slot == NULL)
530 {
531 elt_p = XNEW (elt_t);
532 elt_p->val = val;
533 elt_p->temp = ret = create_tmp_from_val (val);
534 *slot = elt_p;
535 }
536 else
537 {
538 elt_p = *slot;
539 ret = elt_p->temp;
540 }
541 }
542
543 return ret;
544 }
545
546 /* Helper for get_formal_tmp_var and get_initialized_tmp_var. */
547
548 static tree
549 internal_get_tmp_var (tree val, gimple_seq *pre_p, gimple_seq *post_p,
550 bool is_formal)
551 {
552 tree t, mod;
553
554 /* Notice that we explicitly allow VAL to be a CALL_EXPR so that we
555 can create an INIT_EXPR and convert it into a GIMPLE_CALL below. */
556 gimplify_expr (&val, pre_p, post_p, is_gimple_reg_rhs_or_call,
557 fb_rvalue);
558
559 if (gimplify_ctxp->into_ssa
560 && is_gimple_reg_type (TREE_TYPE (val)))
561 t = make_ssa_name (TYPE_MAIN_VARIANT (TREE_TYPE (val)));
562 else
563 t = lookup_tmp_var (val, is_formal);
564
565 mod = build2 (INIT_EXPR, TREE_TYPE (t), t, unshare_expr (val));
566
567 SET_EXPR_LOCATION (mod, EXPR_LOC_OR_LOC (val, input_location));
568
569 /* gimplify_modify_expr might want to reduce this further. */
570 gimplify_and_add (mod, pre_p);
571 ggc_free (mod);
572
573 return t;
574 }
575
576 /* Return a formal temporary variable initialized with VAL. PRE_P is as
577 in gimplify_expr. Only use this function if:
578
579 1) The value of the unfactored expression represented by VAL will not
580 change between the initialization and use of the temporary, and
581 2) The temporary will not be otherwise modified.
582
583 For instance, #1 means that this is inappropriate for SAVE_EXPR temps,
584 and #2 means it is inappropriate for && temps.
585
586 For other cases, use get_initialized_tmp_var instead. */
587
588 tree
589 get_formal_tmp_var (tree val, gimple_seq *pre_p)
590 {
591 return internal_get_tmp_var (val, pre_p, NULL, true);
592 }
593
594 /* Return a temporary variable initialized with VAL. PRE_P and POST_P
595 are as in gimplify_expr. */
596
597 tree
598 get_initialized_tmp_var (tree val, gimple_seq *pre_p, gimple_seq *post_p)
599 {
600 return internal_get_tmp_var (val, pre_p, post_p, false);
601 }
602
603 /* Declare all the variables in VARS in SCOPE. If DEBUG_INFO is true,
604 generate debug info for them; otherwise don't. */
605
606 void
607 declare_vars (tree vars, gimple *gs, bool debug_info)
608 {
609 tree last = vars;
610 if (last)
611 {
612 tree temps, block;
613
614 gbind *scope = as_a <gbind *> (gs);
615
616 temps = nreverse (last);
617
618 block = gimple_bind_block (scope);
619 gcc_assert (!block || TREE_CODE (block) == BLOCK);
620 if (!block || !debug_info)
621 {
622 DECL_CHAIN (last) = gimple_bind_vars (scope);
623 gimple_bind_set_vars (scope, temps);
624 }
625 else
626 {
627 /* We need to attach the nodes both to the BIND_EXPR and to its
628 associated BLOCK for debugging purposes. The key point here
629 is that the BLOCK_VARS of the BIND_EXPR_BLOCK of a BIND_EXPR
630 is a subchain of the BIND_EXPR_VARS of the BIND_EXPR. */
631 if (BLOCK_VARS (block))
632 BLOCK_VARS (block) = chainon (BLOCK_VARS (block), temps);
633 else
634 {
635 gimple_bind_set_vars (scope,
636 chainon (gimple_bind_vars (scope), temps));
637 BLOCK_VARS (block) = temps;
638 }
639 }
640 }
641 }
642
643 /* For VAR a VAR_DECL of variable size, try to find a constant upper bound
644 for the size and adjust DECL_SIZE/DECL_SIZE_UNIT accordingly. Abort if
645 no such upper bound can be obtained. */
646
647 static void
648 force_constant_size (tree var)
649 {
650 /* The only attempt we make is by querying the maximum size of objects
651 of the variable's type. */
652
653 HOST_WIDE_INT max_size;
654
655 gcc_assert (TREE_CODE (var) == VAR_DECL);
656
657 max_size = max_int_size_in_bytes (TREE_TYPE (var));
658
659 gcc_assert (max_size >= 0);
660
661 DECL_SIZE_UNIT (var)
662 = build_int_cst (TREE_TYPE (DECL_SIZE_UNIT (var)), max_size);
663 DECL_SIZE (var)
664 = build_int_cst (TREE_TYPE (DECL_SIZE (var)), max_size * BITS_PER_UNIT);
665 }
666
667 /* Push the temporary variable TMP into the current binding. */
668
669 void
670 gimple_add_tmp_var_fn (struct function *fn, tree tmp)
671 {
672 gcc_assert (!DECL_CHAIN (tmp) && !DECL_SEEN_IN_BIND_EXPR_P (tmp));
673
674 /* Later processing assumes that the object size is constant, which might
675 not be true at this point. Force the use of a constant upper bound in
676 this case. */
677 if (!tree_fits_uhwi_p (DECL_SIZE_UNIT (tmp)))
678 force_constant_size (tmp);
679
680 DECL_CONTEXT (tmp) = fn->decl;
681 DECL_SEEN_IN_BIND_EXPR_P (tmp) = 1;
682
683 record_vars_into (tmp, fn->decl);
684 }
685
686 /* Push the temporary variable TMP into the current binding. */
687
688 void
689 gimple_add_tmp_var (tree tmp)
690 {
691 gcc_assert (!DECL_CHAIN (tmp) && !DECL_SEEN_IN_BIND_EXPR_P (tmp));
692
693 /* Later processing assumes that the object size is constant, which might
694 not be true at this point. Force the use of a constant upper bound in
695 this case. */
696 if (!tree_fits_uhwi_p (DECL_SIZE_UNIT (tmp)))
697 force_constant_size (tmp);
698
699 DECL_CONTEXT (tmp) = current_function_decl;
700 DECL_SEEN_IN_BIND_EXPR_P (tmp) = 1;
701
702 if (gimplify_ctxp)
703 {
704 DECL_CHAIN (tmp) = gimplify_ctxp->temps;
705 gimplify_ctxp->temps = tmp;
706
707 /* Mark temporaries local within the nearest enclosing parallel. */
708 if (gimplify_omp_ctxp)
709 {
710 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
711 while (ctx
712 && (ctx->region_type == ORT_WORKSHARE
713 || ctx->region_type == ORT_SIMD
714 || ctx->region_type == ORT_ACC))
715 ctx = ctx->outer_context;
716 if (ctx)
717 omp_add_variable (ctx, tmp, GOVD_LOCAL | GOVD_SEEN);
718 }
719 }
720 else if (cfun)
721 record_vars (tmp);
722 else
723 {
724 gimple_seq body_seq;
725
726 /* This case is for nested functions. We need to expose the locals
727 they create. */
728 body_seq = gimple_body (current_function_decl);
729 declare_vars (tmp, gimple_seq_first_stmt (body_seq), false);
730 }
731 }
732
733
734 \f
735 /* This page contains routines to unshare tree nodes, i.e. to duplicate tree
736 nodes that are referenced more than once in GENERIC functions. This is
737 necessary because gimplification (translation into GIMPLE) is performed
738 by modifying tree nodes in-place, so gimplication of a shared node in a
739 first context could generate an invalid GIMPLE form in a second context.
740
741 This is achieved with a simple mark/copy/unmark algorithm that walks the
742 GENERIC representation top-down, marks nodes with TREE_VISITED the first
743 time it encounters them, duplicates them if they already have TREE_VISITED
744 set, and finally removes the TREE_VISITED marks it has set.
745
746 The algorithm works only at the function level, i.e. it generates a GENERIC
747 representation of a function with no nodes shared within the function when
748 passed a GENERIC function (except for nodes that are allowed to be shared).
749
750 At the global level, it is also necessary to unshare tree nodes that are
751 referenced in more than one function, for the same aforementioned reason.
752 This requires some cooperation from the front-end. There are 2 strategies:
753
754 1. Manual unsharing. The front-end needs to call unshare_expr on every
755 expression that might end up being shared across functions.
756
757 2. Deep unsharing. This is an extension of regular unsharing. Instead
758 of calling unshare_expr on expressions that might be shared across
759 functions, the front-end pre-marks them with TREE_VISITED. This will
760 ensure that they are unshared on the first reference within functions
761 when the regular unsharing algorithm runs. The counterpart is that
762 this algorithm must look deeper than for manual unsharing, which is
763 specified by LANG_HOOKS_DEEP_UNSHARING.
764
765 If there are only few specific cases of node sharing across functions, it is
766 probably easier for a front-end to unshare the expressions manually. On the
767 contrary, if the expressions generated at the global level are as widespread
768 as expressions generated within functions, deep unsharing is very likely the
769 way to go. */
770
771 /* Similar to copy_tree_r but do not copy SAVE_EXPR or TARGET_EXPR nodes.
772 These nodes model computations that must be done once. If we were to
773 unshare something like SAVE_EXPR(i++), the gimplification process would
774 create wrong code. However, if DATA is non-null, it must hold a pointer
775 set that is used to unshare the subtrees of these nodes. */
776
777 static tree
778 mostly_copy_tree_r (tree *tp, int *walk_subtrees, void *data)
779 {
780 tree t = *tp;
781 enum tree_code code = TREE_CODE (t);
782
783 /* Do not copy SAVE_EXPR, TARGET_EXPR or BIND_EXPR nodes themselves, but
784 copy their subtrees if we can make sure to do it only once. */
785 if (code == SAVE_EXPR || code == TARGET_EXPR || code == BIND_EXPR)
786 {
787 if (data && !((hash_set<tree> *)data)->add (t))
788 ;
789 else
790 *walk_subtrees = 0;
791 }
792
793 /* Stop at types, decls, constants like copy_tree_r. */
794 else if (TREE_CODE_CLASS (code) == tcc_type
795 || TREE_CODE_CLASS (code) == tcc_declaration
796 || TREE_CODE_CLASS (code) == tcc_constant
797 /* We can't do anything sensible with a BLOCK used as an
798 expression, but we also can't just die when we see it
799 because of non-expression uses. So we avert our eyes
800 and cross our fingers. Silly Java. */
801 || code == BLOCK)
802 *walk_subtrees = 0;
803
804 /* Cope with the statement expression extension. */
805 else if (code == STATEMENT_LIST)
806 ;
807
808 /* Leave the bulk of the work to copy_tree_r itself. */
809 else
810 copy_tree_r (tp, walk_subtrees, NULL);
811
812 return NULL_TREE;
813 }
814
815 /* Callback for walk_tree to unshare most of the shared trees rooted at *TP.
816 If *TP has been visited already, then *TP is deeply copied by calling
817 mostly_copy_tree_r. DATA is passed to mostly_copy_tree_r unmodified. */
818
819 static tree
820 copy_if_shared_r (tree *tp, int *walk_subtrees, void *data)
821 {
822 tree t = *tp;
823 enum tree_code code = TREE_CODE (t);
824
825 /* Skip types, decls, and constants. But we do want to look at their
826 types and the bounds of types. Mark them as visited so we properly
827 unmark their subtrees on the unmark pass. If we've already seen them,
828 don't look down further. */
829 if (TREE_CODE_CLASS (code) == tcc_type
830 || TREE_CODE_CLASS (code) == tcc_declaration
831 || TREE_CODE_CLASS (code) == tcc_constant)
832 {
833 if (TREE_VISITED (t))
834 *walk_subtrees = 0;
835 else
836 TREE_VISITED (t) = 1;
837 }
838
839 /* If this node has been visited already, unshare it and don't look
840 any deeper. */
841 else if (TREE_VISITED (t))
842 {
843 walk_tree (tp, mostly_copy_tree_r, data, NULL);
844 *walk_subtrees = 0;
845 }
846
847 /* Otherwise, mark the node as visited and keep looking. */
848 else
849 TREE_VISITED (t) = 1;
850
851 return NULL_TREE;
852 }
853
854 /* Unshare most of the shared trees rooted at *TP. DATA is passed to the
855 copy_if_shared_r callback unmodified. */
856
857 static inline void
858 copy_if_shared (tree *tp, void *data)
859 {
860 walk_tree (tp, copy_if_shared_r, data, NULL);
861 }
862
863 /* Unshare all the trees in the body of FNDECL, as well as in the bodies of
864 any nested functions. */
865
866 static void
867 unshare_body (tree fndecl)
868 {
869 struct cgraph_node *cgn = cgraph_node::get (fndecl);
870 /* If the language requires deep unsharing, we need a pointer set to make
871 sure we don't repeatedly unshare subtrees of unshareable nodes. */
872 hash_set<tree> *visited
873 = lang_hooks.deep_unsharing ? new hash_set<tree> : NULL;
874
875 copy_if_shared (&DECL_SAVED_TREE (fndecl), visited);
876 copy_if_shared (&DECL_SIZE (DECL_RESULT (fndecl)), visited);
877 copy_if_shared (&DECL_SIZE_UNIT (DECL_RESULT (fndecl)), visited);
878
879 delete visited;
880
881 if (cgn)
882 for (cgn = cgn->nested; cgn; cgn = cgn->next_nested)
883 unshare_body (cgn->decl);
884 }
885
886 /* Callback for walk_tree to unmark the visited trees rooted at *TP.
887 Subtrees are walked until the first unvisited node is encountered. */
888
889 static tree
890 unmark_visited_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
891 {
892 tree t = *tp;
893
894 /* If this node has been visited, unmark it and keep looking. */
895 if (TREE_VISITED (t))
896 TREE_VISITED (t) = 0;
897
898 /* Otherwise, don't look any deeper. */
899 else
900 *walk_subtrees = 0;
901
902 return NULL_TREE;
903 }
904
905 /* Unmark the visited trees rooted at *TP. */
906
907 static inline void
908 unmark_visited (tree *tp)
909 {
910 walk_tree (tp, unmark_visited_r, NULL, NULL);
911 }
912
913 /* Likewise, but mark all trees as not visited. */
914
915 static void
916 unvisit_body (tree fndecl)
917 {
918 struct cgraph_node *cgn = cgraph_node::get (fndecl);
919
920 unmark_visited (&DECL_SAVED_TREE (fndecl));
921 unmark_visited (&DECL_SIZE (DECL_RESULT (fndecl)));
922 unmark_visited (&DECL_SIZE_UNIT (DECL_RESULT (fndecl)));
923
924 if (cgn)
925 for (cgn = cgn->nested; cgn; cgn = cgn->next_nested)
926 unvisit_body (cgn->decl);
927 }
928
929 /* Unconditionally make an unshared copy of EXPR. This is used when using
930 stored expressions which span multiple functions, such as BINFO_VTABLE,
931 as the normal unsharing process can't tell that they're shared. */
932
933 tree
934 unshare_expr (tree expr)
935 {
936 walk_tree (&expr, mostly_copy_tree_r, NULL, NULL);
937 return expr;
938 }
939
940 /* Worker for unshare_expr_without_location. */
941
942 static tree
943 prune_expr_location (tree *tp, int *walk_subtrees, void *)
944 {
945 if (EXPR_P (*tp))
946 SET_EXPR_LOCATION (*tp, UNKNOWN_LOCATION);
947 else
948 *walk_subtrees = 0;
949 return NULL_TREE;
950 }
951
952 /* Similar to unshare_expr but also prune all expression locations
953 from EXPR. */
954
955 tree
956 unshare_expr_without_location (tree expr)
957 {
958 walk_tree (&expr, mostly_copy_tree_r, NULL, NULL);
959 if (EXPR_P (expr))
960 walk_tree (&expr, prune_expr_location, NULL, NULL);
961 return expr;
962 }
963 \f
964 /* WRAPPER is a code such as BIND_EXPR or CLEANUP_POINT_EXPR which can both
965 contain statements and have a value. Assign its value to a temporary
966 and give it void_type_node. Return the temporary, or NULL_TREE if
967 WRAPPER was already void. */
968
969 tree
970 voidify_wrapper_expr (tree wrapper, tree temp)
971 {
972 tree type = TREE_TYPE (wrapper);
973 if (type && !VOID_TYPE_P (type))
974 {
975 tree *p;
976
977 /* Set p to point to the body of the wrapper. Loop until we find
978 something that isn't a wrapper. */
979 for (p = &wrapper; p && *p; )
980 {
981 switch (TREE_CODE (*p))
982 {
983 case BIND_EXPR:
984 TREE_SIDE_EFFECTS (*p) = 1;
985 TREE_TYPE (*p) = void_type_node;
986 /* For a BIND_EXPR, the body is operand 1. */
987 p = &BIND_EXPR_BODY (*p);
988 break;
989
990 case CLEANUP_POINT_EXPR:
991 case TRY_FINALLY_EXPR:
992 case TRY_CATCH_EXPR:
993 TREE_SIDE_EFFECTS (*p) = 1;
994 TREE_TYPE (*p) = void_type_node;
995 p = &TREE_OPERAND (*p, 0);
996 break;
997
998 case STATEMENT_LIST:
999 {
1000 tree_stmt_iterator i = tsi_last (*p);
1001 TREE_SIDE_EFFECTS (*p) = 1;
1002 TREE_TYPE (*p) = void_type_node;
1003 p = tsi_end_p (i) ? NULL : tsi_stmt_ptr (i);
1004 }
1005 break;
1006
1007 case COMPOUND_EXPR:
1008 /* Advance to the last statement. Set all container types to
1009 void. */
1010 for (; TREE_CODE (*p) == COMPOUND_EXPR; p = &TREE_OPERAND (*p, 1))
1011 {
1012 TREE_SIDE_EFFECTS (*p) = 1;
1013 TREE_TYPE (*p) = void_type_node;
1014 }
1015 break;
1016
1017 case TRANSACTION_EXPR:
1018 TREE_SIDE_EFFECTS (*p) = 1;
1019 TREE_TYPE (*p) = void_type_node;
1020 p = &TRANSACTION_EXPR_BODY (*p);
1021 break;
1022
1023 default:
1024 /* Assume that any tree upon which voidify_wrapper_expr is
1025 directly called is a wrapper, and that its body is op0. */
1026 if (p == &wrapper)
1027 {
1028 TREE_SIDE_EFFECTS (*p) = 1;
1029 TREE_TYPE (*p) = void_type_node;
1030 p = &TREE_OPERAND (*p, 0);
1031 break;
1032 }
1033 goto out;
1034 }
1035 }
1036
1037 out:
1038 if (p == NULL || IS_EMPTY_STMT (*p))
1039 temp = NULL_TREE;
1040 else if (temp)
1041 {
1042 /* The wrapper is on the RHS of an assignment that we're pushing
1043 down. */
1044 gcc_assert (TREE_CODE (temp) == INIT_EXPR
1045 || TREE_CODE (temp) == MODIFY_EXPR);
1046 TREE_OPERAND (temp, 1) = *p;
1047 *p = temp;
1048 }
1049 else
1050 {
1051 temp = create_tmp_var (type, "retval");
1052 *p = build2 (INIT_EXPR, type, temp, *p);
1053 }
1054
1055 return temp;
1056 }
1057
1058 return NULL_TREE;
1059 }
1060
1061 /* Prepare calls to builtins to SAVE and RESTORE the stack as well as
1062 a temporary through which they communicate. */
1063
1064 static void
1065 build_stack_save_restore (gcall **save, gcall **restore)
1066 {
1067 tree tmp_var;
1068
1069 *save = gimple_build_call (builtin_decl_implicit (BUILT_IN_STACK_SAVE), 0);
1070 tmp_var = create_tmp_var (ptr_type_node, "saved_stack");
1071 gimple_call_set_lhs (*save, tmp_var);
1072
1073 *restore
1074 = gimple_build_call (builtin_decl_implicit (BUILT_IN_STACK_RESTORE),
1075 1, tmp_var);
1076 }
1077
1078 /* Gimplify a BIND_EXPR. Just voidify and recurse. */
1079
1080 static enum gimplify_status
1081 gimplify_bind_expr (tree *expr_p, gimple_seq *pre_p)
1082 {
1083 tree bind_expr = *expr_p;
1084 bool old_keep_stack = gimplify_ctxp->keep_stack;
1085 bool old_save_stack = gimplify_ctxp->save_stack;
1086 tree t;
1087 gbind *bind_stmt;
1088 gimple_seq body, cleanup;
1089 gcall *stack_save;
1090 location_t start_locus = 0, end_locus = 0;
1091 tree ret_clauses = NULL;
1092
1093 tree temp = voidify_wrapper_expr (bind_expr, NULL);
1094
1095 /* Mark variables seen in this bind expr. */
1096 for (t = BIND_EXPR_VARS (bind_expr); t ; t = DECL_CHAIN (t))
1097 {
1098 if (TREE_CODE (t) == VAR_DECL)
1099 {
1100 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
1101
1102 /* Mark variable as local. */
1103 if (ctx && ctx->region_type != ORT_NONE && !DECL_EXTERNAL (t)
1104 && (! DECL_SEEN_IN_BIND_EXPR_P (t)
1105 || splay_tree_lookup (ctx->variables,
1106 (splay_tree_key) t) == NULL))
1107 {
1108 if (ctx->region_type == ORT_SIMD
1109 && TREE_ADDRESSABLE (t)
1110 && !TREE_STATIC (t))
1111 omp_add_variable (ctx, t, GOVD_PRIVATE | GOVD_SEEN);
1112 else
1113 omp_add_variable (ctx, t, GOVD_LOCAL | GOVD_SEEN);
1114 }
1115
1116 DECL_SEEN_IN_BIND_EXPR_P (t) = 1;
1117
1118 if (DECL_HARD_REGISTER (t) && !is_global_var (t) && cfun)
1119 cfun->has_local_explicit_reg_vars = true;
1120 }
1121
1122 /* Preliminarily mark non-addressed complex variables as eligible
1123 for promotion to gimple registers. We'll transform their uses
1124 as we find them. */
1125 if ((TREE_CODE (TREE_TYPE (t)) == COMPLEX_TYPE
1126 || TREE_CODE (TREE_TYPE (t)) == VECTOR_TYPE)
1127 && !TREE_THIS_VOLATILE (t)
1128 && (TREE_CODE (t) == VAR_DECL && !DECL_HARD_REGISTER (t))
1129 && !needs_to_live_in_memory (t))
1130 DECL_GIMPLE_REG_P (t) = 1;
1131 }
1132
1133 bind_stmt = gimple_build_bind (BIND_EXPR_VARS (bind_expr), NULL,
1134 BIND_EXPR_BLOCK (bind_expr));
1135 gimple_push_bind_expr (bind_stmt);
1136
1137 gimplify_ctxp->keep_stack = false;
1138 gimplify_ctxp->save_stack = false;
1139
1140 /* Gimplify the body into the GIMPLE_BIND tuple's body. */
1141 body = NULL;
1142 gimplify_stmt (&BIND_EXPR_BODY (bind_expr), &body);
1143 gimple_bind_set_body (bind_stmt, body);
1144
1145 /* Source location wise, the cleanup code (stack_restore and clobbers)
1146 belongs to the end of the block, so propagate what we have. The
1147 stack_save operation belongs to the beginning of block, which we can
1148 infer from the bind_expr directly if the block has no explicit
1149 assignment. */
1150 if (BIND_EXPR_BLOCK (bind_expr))
1151 {
1152 end_locus = BLOCK_SOURCE_END_LOCATION (BIND_EXPR_BLOCK (bind_expr));
1153 start_locus = BLOCK_SOURCE_LOCATION (BIND_EXPR_BLOCK (bind_expr));
1154 }
1155 if (start_locus == 0)
1156 start_locus = EXPR_LOCATION (bind_expr);
1157
1158 cleanup = NULL;
1159 stack_save = NULL;
1160
1161 /* If the code both contains VLAs and calls alloca, then we cannot reclaim
1162 the stack space allocated to the VLAs. */
1163 if (gimplify_ctxp->save_stack && !gimplify_ctxp->keep_stack)
1164 {
1165 gcall *stack_restore;
1166
1167 /* Save stack on entry and restore it on exit. Add a try_finally
1168 block to achieve this. */
1169 build_stack_save_restore (&stack_save, &stack_restore);
1170
1171 gimple_set_location (stack_save, start_locus);
1172 gimple_set_location (stack_restore, end_locus);
1173
1174 gimplify_seq_add_stmt (&cleanup, stack_restore);
1175 }
1176
1177 /* Add clobbers for all variables that go out of scope. */
1178 for (t = BIND_EXPR_VARS (bind_expr); t ; t = DECL_CHAIN (t))
1179 {
1180 if (TREE_CODE (t) == VAR_DECL
1181 && !is_global_var (t)
1182 && DECL_CONTEXT (t) == current_function_decl
1183 && !DECL_HARD_REGISTER (t)
1184 && !TREE_THIS_VOLATILE (t)
1185 && !DECL_HAS_VALUE_EXPR_P (t)
1186 /* Only care for variables that have to be in memory. Others
1187 will be rewritten into SSA names, hence moved to the top-level. */
1188 && !is_gimple_reg (t)
1189 && flag_stack_reuse != SR_NONE)
1190 {
1191 tree clobber = build_constructor (TREE_TYPE (t), NULL);
1192 gimple *clobber_stmt;
1193 TREE_THIS_VOLATILE (clobber) = 1;
1194 clobber_stmt = gimple_build_assign (t, clobber);
1195 gimple_set_location (clobber_stmt, end_locus);
1196 gimplify_seq_add_stmt (&cleanup, clobber_stmt);
1197
1198 if (flag_openacc && oacc_declare_returns != NULL)
1199 {
1200 tree *c = oacc_declare_returns->get (t);
1201 if (c != NULL)
1202 {
1203 if (ret_clauses)
1204 OMP_CLAUSE_CHAIN (*c) = ret_clauses;
1205
1206 ret_clauses = *c;
1207
1208 oacc_declare_returns->remove (t);
1209
1210 if (oacc_declare_returns->elements () == 0)
1211 {
1212 delete oacc_declare_returns;
1213 oacc_declare_returns = NULL;
1214 }
1215 }
1216 }
1217 }
1218 }
1219
1220 if (ret_clauses)
1221 {
1222 gomp_target *stmt;
1223 gimple_stmt_iterator si = gsi_start (cleanup);
1224
1225 stmt = gimple_build_omp_target (NULL, GF_OMP_TARGET_KIND_OACC_DECLARE,
1226 ret_clauses);
1227 gsi_insert_seq_before_without_update (&si, stmt, GSI_NEW_STMT);
1228 }
1229
1230 if (cleanup)
1231 {
1232 gtry *gs;
1233 gimple_seq new_body;
1234
1235 new_body = NULL;
1236 gs = gimple_build_try (gimple_bind_body (bind_stmt), cleanup,
1237 GIMPLE_TRY_FINALLY);
1238
1239 if (stack_save)
1240 gimplify_seq_add_stmt (&new_body, stack_save);
1241 gimplify_seq_add_stmt (&new_body, gs);
1242 gimple_bind_set_body (bind_stmt, new_body);
1243 }
1244
1245 /* keep_stack propagates all the way up to the outermost BIND_EXPR. */
1246 if (!gimplify_ctxp->keep_stack)
1247 gimplify_ctxp->keep_stack = old_keep_stack;
1248 gimplify_ctxp->save_stack = old_save_stack;
1249
1250 gimple_pop_bind_expr ();
1251
1252 gimplify_seq_add_stmt (pre_p, bind_stmt);
1253
1254 if (temp)
1255 {
1256 *expr_p = temp;
1257 return GS_OK;
1258 }
1259
1260 *expr_p = NULL_TREE;
1261 return GS_ALL_DONE;
1262 }
1263
1264 /* Gimplify a RETURN_EXPR. If the expression to be returned is not a
1265 GIMPLE value, it is assigned to a new temporary and the statement is
1266 re-written to return the temporary.
1267
1268 PRE_P points to the sequence where side effects that must happen before
1269 STMT should be stored. */
1270
1271 static enum gimplify_status
1272 gimplify_return_expr (tree stmt, gimple_seq *pre_p)
1273 {
1274 greturn *ret;
1275 tree ret_expr = TREE_OPERAND (stmt, 0);
1276 tree result_decl, result;
1277
1278 if (ret_expr == error_mark_node)
1279 return GS_ERROR;
1280
1281 /* Implicit _Cilk_sync must be inserted right before any return statement
1282 if there is a _Cilk_spawn in the function. If the user has provided a
1283 _Cilk_sync, the optimizer should remove this duplicate one. */
1284 if (fn_contains_cilk_spawn_p (cfun))
1285 {
1286 tree impl_sync = build0 (CILK_SYNC_STMT, void_type_node);
1287 gimplify_and_add (impl_sync, pre_p);
1288 }
1289
1290 if (!ret_expr
1291 || TREE_CODE (ret_expr) == RESULT_DECL
1292 || ret_expr == error_mark_node)
1293 {
1294 greturn *ret = gimple_build_return (ret_expr);
1295 gimple_set_no_warning (ret, TREE_NO_WARNING (stmt));
1296 gimplify_seq_add_stmt (pre_p, ret);
1297 return GS_ALL_DONE;
1298 }
1299
1300 if (VOID_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl))))
1301 result_decl = NULL_TREE;
1302 else
1303 {
1304 result_decl = TREE_OPERAND (ret_expr, 0);
1305
1306 /* See through a return by reference. */
1307 if (TREE_CODE (result_decl) == INDIRECT_REF)
1308 result_decl = TREE_OPERAND (result_decl, 0);
1309
1310 gcc_assert ((TREE_CODE (ret_expr) == MODIFY_EXPR
1311 || TREE_CODE (ret_expr) == INIT_EXPR)
1312 && TREE_CODE (result_decl) == RESULT_DECL);
1313 }
1314
1315 /* If aggregate_value_p is true, then we can return the bare RESULT_DECL.
1316 Recall that aggregate_value_p is FALSE for any aggregate type that is
1317 returned in registers. If we're returning values in registers, then
1318 we don't want to extend the lifetime of the RESULT_DECL, particularly
1319 across another call. In addition, for those aggregates for which
1320 hard_function_value generates a PARALLEL, we'll die during normal
1321 expansion of structure assignments; there's special code in expand_return
1322 to handle this case that does not exist in expand_expr. */
1323 if (!result_decl)
1324 result = NULL_TREE;
1325 else if (aggregate_value_p (result_decl, TREE_TYPE (current_function_decl)))
1326 {
1327 if (TREE_CODE (DECL_SIZE (result_decl)) != INTEGER_CST)
1328 {
1329 if (!TYPE_SIZES_GIMPLIFIED (TREE_TYPE (result_decl)))
1330 gimplify_type_sizes (TREE_TYPE (result_decl), pre_p);
1331 /* Note that we don't use gimplify_vla_decl because the RESULT_DECL
1332 should be effectively allocated by the caller, i.e. all calls to
1333 this function must be subject to the Return Slot Optimization. */
1334 gimplify_one_sizepos (&DECL_SIZE (result_decl), pre_p);
1335 gimplify_one_sizepos (&DECL_SIZE_UNIT (result_decl), pre_p);
1336 }
1337 result = result_decl;
1338 }
1339 else if (gimplify_ctxp->return_temp)
1340 result = gimplify_ctxp->return_temp;
1341 else
1342 {
1343 result = create_tmp_reg (TREE_TYPE (result_decl));
1344
1345 /* ??? With complex control flow (usually involving abnormal edges),
1346 we can wind up warning about an uninitialized value for this. Due
1347 to how this variable is constructed and initialized, this is never
1348 true. Give up and never warn. */
1349 TREE_NO_WARNING (result) = 1;
1350
1351 gimplify_ctxp->return_temp = result;
1352 }
1353
1354 /* Smash the lhs of the MODIFY_EXPR to the temporary we plan to use.
1355 Then gimplify the whole thing. */
1356 if (result != result_decl)
1357 TREE_OPERAND (ret_expr, 0) = result;
1358
1359 gimplify_and_add (TREE_OPERAND (stmt, 0), pre_p);
1360
1361 ret = gimple_build_return (result);
1362 gimple_set_no_warning (ret, TREE_NO_WARNING (stmt));
1363 gimplify_seq_add_stmt (pre_p, ret);
1364
1365 return GS_ALL_DONE;
1366 }
1367
1368 /* Gimplify a variable-length array DECL. */
1369
1370 static void
1371 gimplify_vla_decl (tree decl, gimple_seq *seq_p)
1372 {
1373 /* This is a variable-sized decl. Simplify its size and mark it
1374 for deferred expansion. */
1375 tree t, addr, ptr_type;
1376
1377 gimplify_one_sizepos (&DECL_SIZE (decl), seq_p);
1378 gimplify_one_sizepos (&DECL_SIZE_UNIT (decl), seq_p);
1379
1380 /* Don't mess with a DECL_VALUE_EXPR set by the front-end. */
1381 if (DECL_HAS_VALUE_EXPR_P (decl))
1382 return;
1383
1384 /* All occurrences of this decl in final gimplified code will be
1385 replaced by indirection. Setting DECL_VALUE_EXPR does two
1386 things: First, it lets the rest of the gimplifier know what
1387 replacement to use. Second, it lets the debug info know
1388 where to find the value. */
1389 ptr_type = build_pointer_type (TREE_TYPE (decl));
1390 addr = create_tmp_var (ptr_type, get_name (decl));
1391 DECL_IGNORED_P (addr) = 0;
1392 t = build_fold_indirect_ref (addr);
1393 TREE_THIS_NOTRAP (t) = 1;
1394 SET_DECL_VALUE_EXPR (decl, t);
1395 DECL_HAS_VALUE_EXPR_P (decl) = 1;
1396
1397 t = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
1398 t = build_call_expr (t, 2, DECL_SIZE_UNIT (decl),
1399 size_int (DECL_ALIGN (decl)));
1400 /* The call has been built for a variable-sized object. */
1401 CALL_ALLOCA_FOR_VAR_P (t) = 1;
1402 t = fold_convert (ptr_type, t);
1403 t = build2 (MODIFY_EXPR, TREE_TYPE (addr), addr, t);
1404
1405 gimplify_and_add (t, seq_p);
1406 }
1407
1408 /* A helper function to be called via walk_tree. Mark all labels under *TP
1409 as being forced. To be called for DECL_INITIAL of static variables. */
1410
1411 static tree
1412 force_labels_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
1413 {
1414 if (TYPE_P (*tp))
1415 *walk_subtrees = 0;
1416 if (TREE_CODE (*tp) == LABEL_DECL)
1417 FORCED_LABEL (*tp) = 1;
1418
1419 return NULL_TREE;
1420 }
1421
1422 /* Gimplify a DECL_EXPR node *STMT_P by making any necessary allocation
1423 and initialization explicit. */
1424
1425 static enum gimplify_status
1426 gimplify_decl_expr (tree *stmt_p, gimple_seq *seq_p)
1427 {
1428 tree stmt = *stmt_p;
1429 tree decl = DECL_EXPR_DECL (stmt);
1430
1431 *stmt_p = NULL_TREE;
1432
1433 if (TREE_TYPE (decl) == error_mark_node)
1434 return GS_ERROR;
1435
1436 if ((TREE_CODE (decl) == TYPE_DECL
1437 || TREE_CODE (decl) == VAR_DECL)
1438 && !TYPE_SIZES_GIMPLIFIED (TREE_TYPE (decl)))
1439 {
1440 gimplify_type_sizes (TREE_TYPE (decl), seq_p);
1441 if (TREE_CODE (TREE_TYPE (decl)) == REFERENCE_TYPE)
1442 gimplify_type_sizes (TREE_TYPE (TREE_TYPE (decl)), seq_p);
1443 }
1444
1445 /* ??? DECL_ORIGINAL_TYPE is streamed for LTO so it needs to be gimplified
1446 in case its size expressions contain problematic nodes like CALL_EXPR. */
1447 if (TREE_CODE (decl) == TYPE_DECL
1448 && DECL_ORIGINAL_TYPE (decl)
1449 && !TYPE_SIZES_GIMPLIFIED (DECL_ORIGINAL_TYPE (decl)))
1450 {
1451 gimplify_type_sizes (DECL_ORIGINAL_TYPE (decl), seq_p);
1452 if (TREE_CODE (DECL_ORIGINAL_TYPE (decl)) == REFERENCE_TYPE)
1453 gimplify_type_sizes (TREE_TYPE (DECL_ORIGINAL_TYPE (decl)), seq_p);
1454 }
1455
1456 if (TREE_CODE (decl) == VAR_DECL && !DECL_EXTERNAL (decl))
1457 {
1458 tree init = DECL_INITIAL (decl);
1459
1460 if (TREE_CODE (DECL_SIZE_UNIT (decl)) != INTEGER_CST
1461 || (!TREE_STATIC (decl)
1462 && flag_stack_check == GENERIC_STACK_CHECK
1463 && compare_tree_int (DECL_SIZE_UNIT (decl),
1464 STACK_CHECK_MAX_VAR_SIZE) > 0))
1465 gimplify_vla_decl (decl, seq_p);
1466
1467 /* Some front ends do not explicitly declare all anonymous
1468 artificial variables. We compensate here by declaring the
1469 variables, though it would be better if the front ends would
1470 explicitly declare them. */
1471 if (!DECL_SEEN_IN_BIND_EXPR_P (decl)
1472 && DECL_ARTIFICIAL (decl) && DECL_NAME (decl) == NULL_TREE)
1473 gimple_add_tmp_var (decl);
1474
1475 if (init && init != error_mark_node)
1476 {
1477 if (!TREE_STATIC (decl))
1478 {
1479 DECL_INITIAL (decl) = NULL_TREE;
1480 init = build2 (INIT_EXPR, void_type_node, decl, init);
1481 gimplify_and_add (init, seq_p);
1482 ggc_free (init);
1483 }
1484 else
1485 /* We must still examine initializers for static variables
1486 as they may contain a label address. */
1487 walk_tree (&init, force_labels_r, NULL, NULL);
1488 }
1489 }
1490
1491 return GS_ALL_DONE;
1492 }
1493
1494 /* Gimplify a LOOP_EXPR. Normally this just involves gimplifying the body
1495 and replacing the LOOP_EXPR with goto, but if the loop contains an
1496 EXIT_EXPR, we need to append a label for it to jump to. */
1497
1498 static enum gimplify_status
1499 gimplify_loop_expr (tree *expr_p, gimple_seq *pre_p)
1500 {
1501 tree saved_label = gimplify_ctxp->exit_label;
1502 tree start_label = create_artificial_label (UNKNOWN_LOCATION);
1503
1504 gimplify_seq_add_stmt (pre_p, gimple_build_label (start_label));
1505
1506 gimplify_ctxp->exit_label = NULL_TREE;
1507
1508 gimplify_and_add (LOOP_EXPR_BODY (*expr_p), pre_p);
1509
1510 gimplify_seq_add_stmt (pre_p, gimple_build_goto (start_label));
1511
1512 if (gimplify_ctxp->exit_label)
1513 gimplify_seq_add_stmt (pre_p,
1514 gimple_build_label (gimplify_ctxp->exit_label));
1515
1516 gimplify_ctxp->exit_label = saved_label;
1517
1518 *expr_p = NULL;
1519 return GS_ALL_DONE;
1520 }
1521
1522 /* Gimplify a statement list onto a sequence. These may be created either
1523 by an enlightened front-end, or by shortcut_cond_expr. */
1524
1525 static enum gimplify_status
1526 gimplify_statement_list (tree *expr_p, gimple_seq *pre_p)
1527 {
1528 tree temp = voidify_wrapper_expr (*expr_p, NULL);
1529
1530 tree_stmt_iterator i = tsi_start (*expr_p);
1531
1532 while (!tsi_end_p (i))
1533 {
1534 gimplify_stmt (tsi_stmt_ptr (i), pre_p);
1535 tsi_delink (&i);
1536 }
1537
1538 if (temp)
1539 {
1540 *expr_p = temp;
1541 return GS_OK;
1542 }
1543
1544 return GS_ALL_DONE;
1545 }
1546
1547 \f
1548 /* Gimplify a SWITCH_EXPR, and collect the vector of labels it can
1549 branch to. */
1550
1551 static enum gimplify_status
1552 gimplify_switch_expr (tree *expr_p, gimple_seq *pre_p)
1553 {
1554 tree switch_expr = *expr_p;
1555 gimple_seq switch_body_seq = NULL;
1556 enum gimplify_status ret;
1557 tree index_type = TREE_TYPE (switch_expr);
1558 if (index_type == NULL_TREE)
1559 index_type = TREE_TYPE (SWITCH_COND (switch_expr));
1560
1561 ret = gimplify_expr (&SWITCH_COND (switch_expr), pre_p, NULL, is_gimple_val,
1562 fb_rvalue);
1563 if (ret == GS_ERROR || ret == GS_UNHANDLED)
1564 return ret;
1565
1566 if (SWITCH_BODY (switch_expr))
1567 {
1568 vec<tree> labels;
1569 vec<tree> saved_labels;
1570 tree default_case = NULL_TREE;
1571 gswitch *switch_stmt;
1572
1573 /* If someone can be bothered to fill in the labels, they can
1574 be bothered to null out the body too. */
1575 gcc_assert (!SWITCH_LABELS (switch_expr));
1576
1577 /* Save old labels, get new ones from body, then restore the old
1578 labels. Save all the things from the switch body to append after. */
1579 saved_labels = gimplify_ctxp->case_labels;
1580 gimplify_ctxp->case_labels.create (8);
1581
1582 gimplify_stmt (&SWITCH_BODY (switch_expr), &switch_body_seq);
1583 labels = gimplify_ctxp->case_labels;
1584 gimplify_ctxp->case_labels = saved_labels;
1585
1586 preprocess_case_label_vec_for_gimple (labels, index_type,
1587 &default_case);
1588
1589 if (!default_case)
1590 {
1591 glabel *new_default;
1592
1593 default_case
1594 = build_case_label (NULL_TREE, NULL_TREE,
1595 create_artificial_label (UNKNOWN_LOCATION));
1596 new_default = gimple_build_label (CASE_LABEL (default_case));
1597 gimplify_seq_add_stmt (&switch_body_seq, new_default);
1598 }
1599
1600 switch_stmt = gimple_build_switch (SWITCH_COND (switch_expr),
1601 default_case, labels);
1602 gimplify_seq_add_stmt (pre_p, switch_stmt);
1603 gimplify_seq_add_seq (pre_p, switch_body_seq);
1604 labels.release ();
1605 }
1606 else
1607 gcc_assert (SWITCH_LABELS (switch_expr));
1608
1609 return GS_ALL_DONE;
1610 }
1611
1612 /* Gimplify the CASE_LABEL_EXPR pointed to by EXPR_P. */
1613
1614 static enum gimplify_status
1615 gimplify_case_label_expr (tree *expr_p, gimple_seq *pre_p)
1616 {
1617 struct gimplify_ctx *ctxp;
1618 glabel *label_stmt;
1619
1620 /* Invalid programs can play Duff's Device type games with, for example,
1621 #pragma omp parallel. At least in the C front end, we don't
1622 detect such invalid branches until after gimplification, in the
1623 diagnose_omp_blocks pass. */
1624 for (ctxp = gimplify_ctxp; ; ctxp = ctxp->prev_context)
1625 if (ctxp->case_labels.exists ())
1626 break;
1627
1628 label_stmt = gimple_build_label (CASE_LABEL (*expr_p));
1629 ctxp->case_labels.safe_push (*expr_p);
1630 gimplify_seq_add_stmt (pre_p, label_stmt);
1631
1632 return GS_ALL_DONE;
1633 }
1634
1635 /* Build a GOTO to the LABEL_DECL pointed to by LABEL_P, building it first
1636 if necessary. */
1637
1638 tree
1639 build_and_jump (tree *label_p)
1640 {
1641 if (label_p == NULL)
1642 /* If there's nowhere to jump, just fall through. */
1643 return NULL_TREE;
1644
1645 if (*label_p == NULL_TREE)
1646 {
1647 tree label = create_artificial_label (UNKNOWN_LOCATION);
1648 *label_p = label;
1649 }
1650
1651 return build1 (GOTO_EXPR, void_type_node, *label_p);
1652 }
1653
1654 /* Gimplify an EXIT_EXPR by converting to a GOTO_EXPR inside a COND_EXPR.
1655 This also involves building a label to jump to and communicating it to
1656 gimplify_loop_expr through gimplify_ctxp->exit_label. */
1657
1658 static enum gimplify_status
1659 gimplify_exit_expr (tree *expr_p)
1660 {
1661 tree cond = TREE_OPERAND (*expr_p, 0);
1662 tree expr;
1663
1664 expr = build_and_jump (&gimplify_ctxp->exit_label);
1665 expr = build3 (COND_EXPR, void_type_node, cond, expr, NULL_TREE);
1666 *expr_p = expr;
1667
1668 return GS_OK;
1669 }
1670
1671 /* *EXPR_P is a COMPONENT_REF being used as an rvalue. If its type is
1672 different from its canonical type, wrap the whole thing inside a
1673 NOP_EXPR and force the type of the COMPONENT_REF to be the canonical
1674 type.
1675
1676 The canonical type of a COMPONENT_REF is the type of the field being
1677 referenced--unless the field is a bit-field which can be read directly
1678 in a smaller mode, in which case the canonical type is the
1679 sign-appropriate type corresponding to that mode. */
1680
1681 static void
1682 canonicalize_component_ref (tree *expr_p)
1683 {
1684 tree expr = *expr_p;
1685 tree type;
1686
1687 gcc_assert (TREE_CODE (expr) == COMPONENT_REF);
1688
1689 if (INTEGRAL_TYPE_P (TREE_TYPE (expr)))
1690 type = TREE_TYPE (get_unwidened (expr, NULL_TREE));
1691 else
1692 type = TREE_TYPE (TREE_OPERAND (expr, 1));
1693
1694 /* One could argue that all the stuff below is not necessary for
1695 the non-bitfield case and declare it a FE error if type
1696 adjustment would be needed. */
1697 if (TREE_TYPE (expr) != type)
1698 {
1699 #ifdef ENABLE_TYPES_CHECKING
1700 tree old_type = TREE_TYPE (expr);
1701 #endif
1702 int type_quals;
1703
1704 /* We need to preserve qualifiers and propagate them from
1705 operand 0. */
1706 type_quals = TYPE_QUALS (type)
1707 | TYPE_QUALS (TREE_TYPE (TREE_OPERAND (expr, 0)));
1708 if (TYPE_QUALS (type) != type_quals)
1709 type = build_qualified_type (TYPE_MAIN_VARIANT (type), type_quals);
1710
1711 /* Set the type of the COMPONENT_REF to the underlying type. */
1712 TREE_TYPE (expr) = type;
1713
1714 #ifdef ENABLE_TYPES_CHECKING
1715 /* It is now a FE error, if the conversion from the canonical
1716 type to the original expression type is not useless. */
1717 gcc_assert (useless_type_conversion_p (old_type, type));
1718 #endif
1719 }
1720 }
1721
1722 /* If a NOP conversion is changing a pointer to array of foo to a pointer
1723 to foo, embed that change in the ADDR_EXPR by converting
1724 T array[U];
1725 (T *)&array
1726 ==>
1727 &array[L]
1728 where L is the lower bound. For simplicity, only do this for constant
1729 lower bound.
1730 The constraint is that the type of &array[L] is trivially convertible
1731 to T *. */
1732
1733 static void
1734 canonicalize_addr_expr (tree *expr_p)
1735 {
1736 tree expr = *expr_p;
1737 tree addr_expr = TREE_OPERAND (expr, 0);
1738 tree datype, ddatype, pddatype;
1739
1740 /* We simplify only conversions from an ADDR_EXPR to a pointer type. */
1741 if (!POINTER_TYPE_P (TREE_TYPE (expr))
1742 || TREE_CODE (addr_expr) != ADDR_EXPR)
1743 return;
1744
1745 /* The addr_expr type should be a pointer to an array. */
1746 datype = TREE_TYPE (TREE_TYPE (addr_expr));
1747 if (TREE_CODE (datype) != ARRAY_TYPE)
1748 return;
1749
1750 /* The pointer to element type shall be trivially convertible to
1751 the expression pointer type. */
1752 ddatype = TREE_TYPE (datype);
1753 pddatype = build_pointer_type (ddatype);
1754 if (!useless_type_conversion_p (TYPE_MAIN_VARIANT (TREE_TYPE (expr)),
1755 pddatype))
1756 return;
1757
1758 /* The lower bound and element sizes must be constant. */
1759 if (!TYPE_SIZE_UNIT (ddatype)
1760 || TREE_CODE (TYPE_SIZE_UNIT (ddatype)) != INTEGER_CST
1761 || !TYPE_DOMAIN (datype) || !TYPE_MIN_VALUE (TYPE_DOMAIN (datype))
1762 || TREE_CODE (TYPE_MIN_VALUE (TYPE_DOMAIN (datype))) != INTEGER_CST)
1763 return;
1764
1765 /* All checks succeeded. Build a new node to merge the cast. */
1766 *expr_p = build4 (ARRAY_REF, ddatype, TREE_OPERAND (addr_expr, 0),
1767 TYPE_MIN_VALUE (TYPE_DOMAIN (datype)),
1768 NULL_TREE, NULL_TREE);
1769 *expr_p = build1 (ADDR_EXPR, pddatype, *expr_p);
1770
1771 /* We can have stripped a required restrict qualifier above. */
1772 if (!useless_type_conversion_p (TREE_TYPE (expr), TREE_TYPE (*expr_p)))
1773 *expr_p = fold_convert (TREE_TYPE (expr), *expr_p);
1774 }
1775
1776 /* *EXPR_P is a NOP_EXPR or CONVERT_EXPR. Remove it and/or other conversions
1777 underneath as appropriate. */
1778
1779 static enum gimplify_status
1780 gimplify_conversion (tree *expr_p)
1781 {
1782 location_t loc = EXPR_LOCATION (*expr_p);
1783 gcc_assert (CONVERT_EXPR_P (*expr_p));
1784
1785 /* Then strip away all but the outermost conversion. */
1786 STRIP_SIGN_NOPS (TREE_OPERAND (*expr_p, 0));
1787
1788 /* And remove the outermost conversion if it's useless. */
1789 if (tree_ssa_useless_type_conversion (*expr_p))
1790 *expr_p = TREE_OPERAND (*expr_p, 0);
1791
1792 /* If we still have a conversion at the toplevel,
1793 then canonicalize some constructs. */
1794 if (CONVERT_EXPR_P (*expr_p))
1795 {
1796 tree sub = TREE_OPERAND (*expr_p, 0);
1797
1798 /* If a NOP conversion is changing the type of a COMPONENT_REF
1799 expression, then canonicalize its type now in order to expose more
1800 redundant conversions. */
1801 if (TREE_CODE (sub) == COMPONENT_REF)
1802 canonicalize_component_ref (&TREE_OPERAND (*expr_p, 0));
1803
1804 /* If a NOP conversion is changing a pointer to array of foo
1805 to a pointer to foo, embed that change in the ADDR_EXPR. */
1806 else if (TREE_CODE (sub) == ADDR_EXPR)
1807 canonicalize_addr_expr (expr_p);
1808 }
1809
1810 /* If we have a conversion to a non-register type force the
1811 use of a VIEW_CONVERT_EXPR instead. */
1812 if (CONVERT_EXPR_P (*expr_p) && !is_gimple_reg_type (TREE_TYPE (*expr_p)))
1813 *expr_p = fold_build1_loc (loc, VIEW_CONVERT_EXPR, TREE_TYPE (*expr_p),
1814 TREE_OPERAND (*expr_p, 0));
1815
1816 /* Canonicalize CONVERT_EXPR to NOP_EXPR. */
1817 if (TREE_CODE (*expr_p) == CONVERT_EXPR)
1818 TREE_SET_CODE (*expr_p, NOP_EXPR);
1819
1820 return GS_OK;
1821 }
1822
1823 /* Nonlocal VLAs seen in the current function. */
1824 static hash_set<tree> *nonlocal_vlas;
1825
1826 /* The VAR_DECLs created for nonlocal VLAs for debug info purposes. */
1827 static tree nonlocal_vla_vars;
1828
1829 /* Gimplify a VAR_DECL or PARM_DECL. Return GS_OK if we expanded a
1830 DECL_VALUE_EXPR, and it's worth re-examining things. */
1831
1832 static enum gimplify_status
1833 gimplify_var_or_parm_decl (tree *expr_p)
1834 {
1835 tree decl = *expr_p;
1836
1837 /* ??? If this is a local variable, and it has not been seen in any
1838 outer BIND_EXPR, then it's probably the result of a duplicate
1839 declaration, for which we've already issued an error. It would
1840 be really nice if the front end wouldn't leak these at all.
1841 Currently the only known culprit is C++ destructors, as seen
1842 in g++.old-deja/g++.jason/binding.C. */
1843 if (TREE_CODE (decl) == VAR_DECL
1844 && !DECL_SEEN_IN_BIND_EXPR_P (decl)
1845 && !TREE_STATIC (decl) && !DECL_EXTERNAL (decl)
1846 && decl_function_context (decl) == current_function_decl)
1847 {
1848 gcc_assert (seen_error ());
1849 return GS_ERROR;
1850 }
1851
1852 /* When within an OMP context, notice uses of variables. */
1853 if (gimplify_omp_ctxp && omp_notice_variable (gimplify_omp_ctxp, decl, true))
1854 return GS_ALL_DONE;
1855
1856 /* If the decl is an alias for another expression, substitute it now. */
1857 if (DECL_HAS_VALUE_EXPR_P (decl))
1858 {
1859 tree value_expr = DECL_VALUE_EXPR (decl);
1860
1861 /* For referenced nonlocal VLAs add a decl for debugging purposes
1862 to the current function. */
1863 if (TREE_CODE (decl) == VAR_DECL
1864 && TREE_CODE (DECL_SIZE_UNIT (decl)) != INTEGER_CST
1865 && nonlocal_vlas != NULL
1866 && TREE_CODE (value_expr) == INDIRECT_REF
1867 && TREE_CODE (TREE_OPERAND (value_expr, 0)) == VAR_DECL
1868 && decl_function_context (decl) != current_function_decl)
1869 {
1870 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
1871 while (ctx
1872 && (ctx->region_type == ORT_WORKSHARE
1873 || ctx->region_type == ORT_SIMD
1874 || ctx->region_type == ORT_ACC))
1875 ctx = ctx->outer_context;
1876 if (!ctx && !nonlocal_vlas->add (decl))
1877 {
1878 tree copy = copy_node (decl);
1879
1880 lang_hooks.dup_lang_specific_decl (copy);
1881 SET_DECL_RTL (copy, 0);
1882 TREE_USED (copy) = 1;
1883 DECL_CHAIN (copy) = nonlocal_vla_vars;
1884 nonlocal_vla_vars = copy;
1885 SET_DECL_VALUE_EXPR (copy, unshare_expr (value_expr));
1886 DECL_HAS_VALUE_EXPR_P (copy) = 1;
1887 }
1888 }
1889
1890 *expr_p = unshare_expr (value_expr);
1891 return GS_OK;
1892 }
1893
1894 return GS_ALL_DONE;
1895 }
1896
1897 /* Recalculate the value of the TREE_SIDE_EFFECTS flag for T. */
1898
1899 static void
1900 recalculate_side_effects (tree t)
1901 {
1902 enum tree_code code = TREE_CODE (t);
1903 int len = TREE_OPERAND_LENGTH (t);
1904 int i;
1905
1906 switch (TREE_CODE_CLASS (code))
1907 {
1908 case tcc_expression:
1909 switch (code)
1910 {
1911 case INIT_EXPR:
1912 case MODIFY_EXPR:
1913 case VA_ARG_EXPR:
1914 case PREDECREMENT_EXPR:
1915 case PREINCREMENT_EXPR:
1916 case POSTDECREMENT_EXPR:
1917 case POSTINCREMENT_EXPR:
1918 /* All of these have side-effects, no matter what their
1919 operands are. */
1920 return;
1921
1922 default:
1923 break;
1924 }
1925 /* Fall through. */
1926
1927 case tcc_comparison: /* a comparison expression */
1928 case tcc_unary: /* a unary arithmetic expression */
1929 case tcc_binary: /* a binary arithmetic expression */
1930 case tcc_reference: /* a reference */
1931 case tcc_vl_exp: /* a function call */
1932 TREE_SIDE_EFFECTS (t) = TREE_THIS_VOLATILE (t);
1933 for (i = 0; i < len; ++i)
1934 {
1935 tree op = TREE_OPERAND (t, i);
1936 if (op && TREE_SIDE_EFFECTS (op))
1937 TREE_SIDE_EFFECTS (t) = 1;
1938 }
1939 break;
1940
1941 case tcc_constant:
1942 /* No side-effects. */
1943 return;
1944
1945 default:
1946 gcc_unreachable ();
1947 }
1948 }
1949
1950 /* Gimplify the COMPONENT_REF, ARRAY_REF, REALPART_EXPR or IMAGPART_EXPR
1951 node *EXPR_P.
1952
1953 compound_lval
1954 : min_lval '[' val ']'
1955 | min_lval '.' ID
1956 | compound_lval '[' val ']'
1957 | compound_lval '.' ID
1958
1959 This is not part of the original SIMPLE definition, which separates
1960 array and member references, but it seems reasonable to handle them
1961 together. Also, this way we don't run into problems with union
1962 aliasing; gcc requires that for accesses through a union to alias, the
1963 union reference must be explicit, which was not always the case when we
1964 were splitting up array and member refs.
1965
1966 PRE_P points to the sequence where side effects that must happen before
1967 *EXPR_P should be stored.
1968
1969 POST_P points to the sequence where side effects that must happen after
1970 *EXPR_P should be stored. */
1971
1972 static enum gimplify_status
1973 gimplify_compound_lval (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
1974 fallback_t fallback)
1975 {
1976 tree *p;
1977 enum gimplify_status ret = GS_ALL_DONE, tret;
1978 int i;
1979 location_t loc = EXPR_LOCATION (*expr_p);
1980 tree expr = *expr_p;
1981
1982 /* Create a stack of the subexpressions so later we can walk them in
1983 order from inner to outer. */
1984 auto_vec<tree, 10> expr_stack;
1985
1986 /* We can handle anything that get_inner_reference can deal with. */
1987 for (p = expr_p; ; p = &TREE_OPERAND (*p, 0))
1988 {
1989 restart:
1990 /* Fold INDIRECT_REFs now to turn them into ARRAY_REFs. */
1991 if (TREE_CODE (*p) == INDIRECT_REF)
1992 *p = fold_indirect_ref_loc (loc, *p);
1993
1994 if (handled_component_p (*p))
1995 ;
1996 /* Expand DECL_VALUE_EXPR now. In some cases that may expose
1997 additional COMPONENT_REFs. */
1998 else if ((TREE_CODE (*p) == VAR_DECL || TREE_CODE (*p) == PARM_DECL)
1999 && gimplify_var_or_parm_decl (p) == GS_OK)
2000 goto restart;
2001 else
2002 break;
2003
2004 expr_stack.safe_push (*p);
2005 }
2006
2007 gcc_assert (expr_stack.length ());
2008
2009 /* Now EXPR_STACK is a stack of pointers to all the refs we've
2010 walked through and P points to the innermost expression.
2011
2012 Java requires that we elaborated nodes in source order. That
2013 means we must gimplify the inner expression followed by each of
2014 the indices, in order. But we can't gimplify the inner
2015 expression until we deal with any variable bounds, sizes, or
2016 positions in order to deal with PLACEHOLDER_EXPRs.
2017
2018 So we do this in three steps. First we deal with the annotations
2019 for any variables in the components, then we gimplify the base,
2020 then we gimplify any indices, from left to right. */
2021 for (i = expr_stack.length () - 1; i >= 0; i--)
2022 {
2023 tree t = expr_stack[i];
2024
2025 if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
2026 {
2027 /* Gimplify the low bound and element type size and put them into
2028 the ARRAY_REF. If these values are set, they have already been
2029 gimplified. */
2030 if (TREE_OPERAND (t, 2) == NULL_TREE)
2031 {
2032 tree low = unshare_expr (array_ref_low_bound (t));
2033 if (!is_gimple_min_invariant (low))
2034 {
2035 TREE_OPERAND (t, 2) = low;
2036 tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p,
2037 post_p, is_gimple_reg,
2038 fb_rvalue);
2039 ret = MIN (ret, tret);
2040 }
2041 }
2042 else
2043 {
2044 tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p, post_p,
2045 is_gimple_reg, fb_rvalue);
2046 ret = MIN (ret, tret);
2047 }
2048
2049 if (TREE_OPERAND (t, 3) == NULL_TREE)
2050 {
2051 tree elmt_type = TREE_TYPE (TREE_TYPE (TREE_OPERAND (t, 0)));
2052 tree elmt_size = unshare_expr (array_ref_element_size (t));
2053 tree factor = size_int (TYPE_ALIGN_UNIT (elmt_type));
2054
2055 /* Divide the element size by the alignment of the element
2056 type (above). */
2057 elmt_size
2058 = size_binop_loc (loc, EXACT_DIV_EXPR, elmt_size, factor);
2059
2060 if (!is_gimple_min_invariant (elmt_size))
2061 {
2062 TREE_OPERAND (t, 3) = elmt_size;
2063 tret = gimplify_expr (&TREE_OPERAND (t, 3), pre_p,
2064 post_p, is_gimple_reg,
2065 fb_rvalue);
2066 ret = MIN (ret, tret);
2067 }
2068 }
2069 else
2070 {
2071 tret = gimplify_expr (&TREE_OPERAND (t, 3), pre_p, post_p,
2072 is_gimple_reg, fb_rvalue);
2073 ret = MIN (ret, tret);
2074 }
2075 }
2076 else if (TREE_CODE (t) == COMPONENT_REF)
2077 {
2078 /* Set the field offset into T and gimplify it. */
2079 if (TREE_OPERAND (t, 2) == NULL_TREE)
2080 {
2081 tree offset = unshare_expr (component_ref_field_offset (t));
2082 tree field = TREE_OPERAND (t, 1);
2083 tree factor
2084 = size_int (DECL_OFFSET_ALIGN (field) / BITS_PER_UNIT);
2085
2086 /* Divide the offset by its alignment. */
2087 offset = size_binop_loc (loc, EXACT_DIV_EXPR, offset, factor);
2088
2089 if (!is_gimple_min_invariant (offset))
2090 {
2091 TREE_OPERAND (t, 2) = offset;
2092 tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p,
2093 post_p, is_gimple_reg,
2094 fb_rvalue);
2095 ret = MIN (ret, tret);
2096 }
2097 }
2098 else
2099 {
2100 tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p, post_p,
2101 is_gimple_reg, fb_rvalue);
2102 ret = MIN (ret, tret);
2103 }
2104 }
2105 }
2106
2107 /* Step 2 is to gimplify the base expression. Make sure lvalue is set
2108 so as to match the min_lval predicate. Failure to do so may result
2109 in the creation of large aggregate temporaries. */
2110 tret = gimplify_expr (p, pre_p, post_p, is_gimple_min_lval,
2111 fallback | fb_lvalue);
2112 ret = MIN (ret, tret);
2113
2114 /* And finally, the indices and operands of ARRAY_REF. During this
2115 loop we also remove any useless conversions. */
2116 for (; expr_stack.length () > 0; )
2117 {
2118 tree t = expr_stack.pop ();
2119
2120 if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
2121 {
2122 /* Gimplify the dimension. */
2123 if (!is_gimple_min_invariant (TREE_OPERAND (t, 1)))
2124 {
2125 tret = gimplify_expr (&TREE_OPERAND (t, 1), pre_p, post_p,
2126 is_gimple_val, fb_rvalue);
2127 ret = MIN (ret, tret);
2128 }
2129 }
2130
2131 STRIP_USELESS_TYPE_CONVERSION (TREE_OPERAND (t, 0));
2132
2133 /* The innermost expression P may have originally had
2134 TREE_SIDE_EFFECTS set which would have caused all the outer
2135 expressions in *EXPR_P leading to P to also have had
2136 TREE_SIDE_EFFECTS set. */
2137 recalculate_side_effects (t);
2138 }
2139
2140 /* If the outermost expression is a COMPONENT_REF, canonicalize its type. */
2141 if ((fallback & fb_rvalue) && TREE_CODE (*expr_p) == COMPONENT_REF)
2142 {
2143 canonicalize_component_ref (expr_p);
2144 }
2145
2146 expr_stack.release ();
2147
2148 gcc_assert (*expr_p == expr || ret != GS_ALL_DONE);
2149
2150 return ret;
2151 }
2152
2153 /* Gimplify the self modifying expression pointed to by EXPR_P
2154 (++, --, +=, -=).
2155
2156 PRE_P points to the list where side effects that must happen before
2157 *EXPR_P should be stored.
2158
2159 POST_P points to the list where side effects that must happen after
2160 *EXPR_P should be stored.
2161
2162 WANT_VALUE is nonzero iff we want to use the value of this expression
2163 in another expression.
2164
2165 ARITH_TYPE is the type the computation should be performed in. */
2166
2167 enum gimplify_status
2168 gimplify_self_mod_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
2169 bool want_value, tree arith_type)
2170 {
2171 enum tree_code code;
2172 tree lhs, lvalue, rhs, t1;
2173 gimple_seq post = NULL, *orig_post_p = post_p;
2174 bool postfix;
2175 enum tree_code arith_code;
2176 enum gimplify_status ret;
2177 location_t loc = EXPR_LOCATION (*expr_p);
2178
2179 code = TREE_CODE (*expr_p);
2180
2181 gcc_assert (code == POSTINCREMENT_EXPR || code == POSTDECREMENT_EXPR
2182 || code == PREINCREMENT_EXPR || code == PREDECREMENT_EXPR);
2183
2184 /* Prefix or postfix? */
2185 if (code == POSTINCREMENT_EXPR || code == POSTDECREMENT_EXPR)
2186 /* Faster to treat as prefix if result is not used. */
2187 postfix = want_value;
2188 else
2189 postfix = false;
2190
2191 /* For postfix, make sure the inner expression's post side effects
2192 are executed after side effects from this expression. */
2193 if (postfix)
2194 post_p = &post;
2195
2196 /* Add or subtract? */
2197 if (code == PREINCREMENT_EXPR || code == POSTINCREMENT_EXPR)
2198 arith_code = PLUS_EXPR;
2199 else
2200 arith_code = MINUS_EXPR;
2201
2202 /* Gimplify the LHS into a GIMPLE lvalue. */
2203 lvalue = TREE_OPERAND (*expr_p, 0);
2204 ret = gimplify_expr (&lvalue, pre_p, post_p, is_gimple_lvalue, fb_lvalue);
2205 if (ret == GS_ERROR)
2206 return ret;
2207
2208 /* Extract the operands to the arithmetic operation. */
2209 lhs = lvalue;
2210 rhs = TREE_OPERAND (*expr_p, 1);
2211
2212 /* For postfix operator, we evaluate the LHS to an rvalue and then use
2213 that as the result value and in the postqueue operation. */
2214 if (postfix)
2215 {
2216 ret = gimplify_expr (&lhs, pre_p, post_p, is_gimple_val, fb_rvalue);
2217 if (ret == GS_ERROR)
2218 return ret;
2219
2220 lhs = get_initialized_tmp_var (lhs, pre_p, NULL);
2221 }
2222
2223 /* For POINTERs increment, use POINTER_PLUS_EXPR. */
2224 if (POINTER_TYPE_P (TREE_TYPE (lhs)))
2225 {
2226 rhs = convert_to_ptrofftype_loc (loc, rhs);
2227 if (arith_code == MINUS_EXPR)
2228 rhs = fold_build1_loc (loc, NEGATE_EXPR, TREE_TYPE (rhs), rhs);
2229 t1 = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (*expr_p), lhs, rhs);
2230 }
2231 else
2232 t1 = fold_convert (TREE_TYPE (*expr_p),
2233 fold_build2 (arith_code, arith_type,
2234 fold_convert (arith_type, lhs),
2235 fold_convert (arith_type, rhs)));
2236
2237 if (postfix)
2238 {
2239 gimplify_assign (lvalue, t1, pre_p);
2240 gimplify_seq_add_seq (orig_post_p, post);
2241 *expr_p = lhs;
2242 return GS_ALL_DONE;
2243 }
2244 else
2245 {
2246 *expr_p = build2 (MODIFY_EXPR, TREE_TYPE (lvalue), lvalue, t1);
2247 return GS_OK;
2248 }
2249 }
2250
2251 /* If *EXPR_P has a variable sized type, wrap it in a WITH_SIZE_EXPR. */
2252
2253 static void
2254 maybe_with_size_expr (tree *expr_p)
2255 {
2256 tree expr = *expr_p;
2257 tree type = TREE_TYPE (expr);
2258 tree size;
2259
2260 /* If we've already wrapped this or the type is error_mark_node, we can't do
2261 anything. */
2262 if (TREE_CODE (expr) == WITH_SIZE_EXPR
2263 || type == error_mark_node)
2264 return;
2265
2266 /* If the size isn't known or is a constant, we have nothing to do. */
2267 size = TYPE_SIZE_UNIT (type);
2268 if (!size || TREE_CODE (size) == INTEGER_CST)
2269 return;
2270
2271 /* Otherwise, make a WITH_SIZE_EXPR. */
2272 size = unshare_expr (size);
2273 size = SUBSTITUTE_PLACEHOLDER_IN_EXPR (size, expr);
2274 *expr_p = build2 (WITH_SIZE_EXPR, type, expr, size);
2275 }
2276
2277 /* Helper for gimplify_call_expr. Gimplify a single argument *ARG_P
2278 Store any side-effects in PRE_P. CALL_LOCATION is the location of
2279 the CALL_EXPR. */
2280
2281 enum gimplify_status
2282 gimplify_arg (tree *arg_p, gimple_seq *pre_p, location_t call_location)
2283 {
2284 bool (*test) (tree);
2285 fallback_t fb;
2286
2287 /* In general, we allow lvalues for function arguments to avoid
2288 extra overhead of copying large aggregates out of even larger
2289 aggregates into temporaries only to copy the temporaries to
2290 the argument list. Make optimizers happy by pulling out to
2291 temporaries those types that fit in registers. */
2292 if (is_gimple_reg_type (TREE_TYPE (*arg_p)))
2293 test = is_gimple_val, fb = fb_rvalue;
2294 else
2295 {
2296 test = is_gimple_lvalue, fb = fb_either;
2297 /* Also strip a TARGET_EXPR that would force an extra copy. */
2298 if (TREE_CODE (*arg_p) == TARGET_EXPR)
2299 {
2300 tree init = TARGET_EXPR_INITIAL (*arg_p);
2301 if (init
2302 && !VOID_TYPE_P (TREE_TYPE (init)))
2303 *arg_p = init;
2304 }
2305 }
2306
2307 /* If this is a variable sized type, we must remember the size. */
2308 maybe_with_size_expr (arg_p);
2309
2310 /* FIXME diagnostics: This will mess up gcc.dg/Warray-bounds.c. */
2311 /* Make sure arguments have the same location as the function call
2312 itself. */
2313 protected_set_expr_location (*arg_p, call_location);
2314
2315 /* There is a sequence point before a function call. Side effects in
2316 the argument list must occur before the actual call. So, when
2317 gimplifying arguments, force gimplify_expr to use an internal
2318 post queue which is then appended to the end of PRE_P. */
2319 return gimplify_expr (arg_p, pre_p, NULL, test, fb);
2320 }
2321
2322 /* Don't fold inside offloading or taskreg regions: it can break code by
2323 adding decl references that weren't in the source. We'll do it during
2324 omplower pass instead. */
2325
2326 static bool
2327 maybe_fold_stmt (gimple_stmt_iterator *gsi)
2328 {
2329 struct gimplify_omp_ctx *ctx;
2330 for (ctx = gimplify_omp_ctxp; ctx; ctx = ctx->outer_context)
2331 if ((ctx->region_type & (ORT_TARGET | ORT_PARALLEL | ORT_TASK)) != 0)
2332 return false;
2333 return fold_stmt (gsi);
2334 }
2335
2336 /* Gimplify the CALL_EXPR node *EXPR_P into the GIMPLE sequence PRE_P.
2337 WANT_VALUE is true if the result of the call is desired. */
2338
2339 static enum gimplify_status
2340 gimplify_call_expr (tree *expr_p, gimple_seq *pre_p, bool want_value)
2341 {
2342 tree fndecl, parms, p, fnptrtype;
2343 enum gimplify_status ret;
2344 int i, nargs;
2345 gcall *call;
2346 bool builtin_va_start_p = false;
2347 location_t loc = EXPR_LOCATION (*expr_p);
2348
2349 gcc_assert (TREE_CODE (*expr_p) == CALL_EXPR);
2350
2351 /* For reliable diagnostics during inlining, it is necessary that
2352 every call_expr be annotated with file and line. */
2353 if (! EXPR_HAS_LOCATION (*expr_p))
2354 SET_EXPR_LOCATION (*expr_p, input_location);
2355
2356 /* Gimplify internal functions created in the FEs. */
2357 if (CALL_EXPR_FN (*expr_p) == NULL_TREE)
2358 {
2359 if (want_value)
2360 return GS_ALL_DONE;
2361
2362 nargs = call_expr_nargs (*expr_p);
2363 enum internal_fn ifn = CALL_EXPR_IFN (*expr_p);
2364 auto_vec<tree> vargs (nargs);
2365
2366 for (i = 0; i < nargs; i++)
2367 {
2368 gimplify_arg (&CALL_EXPR_ARG (*expr_p, i), pre_p,
2369 EXPR_LOCATION (*expr_p));
2370 vargs.quick_push (CALL_EXPR_ARG (*expr_p, i));
2371 }
2372 gimple *call = gimple_build_call_internal_vec (ifn, vargs);
2373 gimplify_seq_add_stmt (pre_p, call);
2374 return GS_ALL_DONE;
2375 }
2376
2377 /* This may be a call to a builtin function.
2378
2379 Builtin function calls may be transformed into different
2380 (and more efficient) builtin function calls under certain
2381 circumstances. Unfortunately, gimplification can muck things
2382 up enough that the builtin expanders are not aware that certain
2383 transformations are still valid.
2384
2385 So we attempt transformation/gimplification of the call before
2386 we gimplify the CALL_EXPR. At this time we do not manage to
2387 transform all calls in the same manner as the expanders do, but
2388 we do transform most of them. */
2389 fndecl = get_callee_fndecl (*expr_p);
2390 if (fndecl
2391 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
2392 switch (DECL_FUNCTION_CODE (fndecl))
2393 {
2394 case BUILT_IN_ALLOCA:
2395 case BUILT_IN_ALLOCA_WITH_ALIGN:
2396 /* If the call has been built for a variable-sized object, then we
2397 want to restore the stack level when the enclosing BIND_EXPR is
2398 exited to reclaim the allocated space; otherwise, we precisely
2399 need to do the opposite and preserve the latest stack level. */
2400 if (CALL_ALLOCA_FOR_VAR_P (*expr_p))
2401 gimplify_ctxp->save_stack = true;
2402 else
2403 gimplify_ctxp->keep_stack = true;
2404 break;
2405
2406 case BUILT_IN_VA_START:
2407 {
2408 builtin_va_start_p = TRUE;
2409 if (call_expr_nargs (*expr_p) < 2)
2410 {
2411 error ("too few arguments to function %<va_start%>");
2412 *expr_p = build_empty_stmt (EXPR_LOCATION (*expr_p));
2413 return GS_OK;
2414 }
2415
2416 if (fold_builtin_next_arg (*expr_p, true))
2417 {
2418 *expr_p = build_empty_stmt (EXPR_LOCATION (*expr_p));
2419 return GS_OK;
2420 }
2421 break;
2422 }
2423 case BUILT_IN_LINE:
2424 {
2425 *expr_p = build_int_cst (TREE_TYPE (*expr_p),
2426 LOCATION_LINE (EXPR_LOCATION (*expr_p)));
2427 return GS_OK;
2428 }
2429 case BUILT_IN_FILE:
2430 {
2431 const char *locfile = LOCATION_FILE (EXPR_LOCATION (*expr_p));
2432 *expr_p = build_string_literal (strlen (locfile) + 1, locfile);
2433 return GS_OK;
2434 }
2435 case BUILT_IN_FUNCTION:
2436 {
2437 const char *function;
2438 function = IDENTIFIER_POINTER (DECL_NAME (current_function_decl));
2439 *expr_p = build_string_literal (strlen (function) + 1, function);
2440 return GS_OK;
2441 }
2442 default:
2443 ;
2444 }
2445 if (fndecl && DECL_BUILT_IN (fndecl))
2446 {
2447 tree new_tree = fold_call_expr (input_location, *expr_p, !want_value);
2448 if (new_tree && new_tree != *expr_p)
2449 {
2450 /* There was a transformation of this call which computes the
2451 same value, but in a more efficient way. Return and try
2452 again. */
2453 *expr_p = new_tree;
2454 return GS_OK;
2455 }
2456 }
2457
2458 /* Remember the original function pointer type. */
2459 fnptrtype = TREE_TYPE (CALL_EXPR_FN (*expr_p));
2460
2461 /* There is a sequence point before the call, so any side effects in
2462 the calling expression must occur before the actual call. Force
2463 gimplify_expr to use an internal post queue. */
2464 ret = gimplify_expr (&CALL_EXPR_FN (*expr_p), pre_p, NULL,
2465 is_gimple_call_addr, fb_rvalue);
2466
2467 nargs = call_expr_nargs (*expr_p);
2468
2469 /* Get argument types for verification. */
2470 fndecl = get_callee_fndecl (*expr_p);
2471 parms = NULL_TREE;
2472 if (fndecl)
2473 parms = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
2474 else
2475 parms = TYPE_ARG_TYPES (TREE_TYPE (fnptrtype));
2476
2477 if (fndecl && DECL_ARGUMENTS (fndecl))
2478 p = DECL_ARGUMENTS (fndecl);
2479 else if (parms)
2480 p = parms;
2481 else
2482 p = NULL_TREE;
2483 for (i = 0; i < nargs && p; i++, p = TREE_CHAIN (p))
2484 ;
2485
2486 /* If the last argument is __builtin_va_arg_pack () and it is not
2487 passed as a named argument, decrease the number of CALL_EXPR
2488 arguments and set instead the CALL_EXPR_VA_ARG_PACK flag. */
2489 if (!p
2490 && i < nargs
2491 && TREE_CODE (CALL_EXPR_ARG (*expr_p, nargs - 1)) == CALL_EXPR)
2492 {
2493 tree last_arg = CALL_EXPR_ARG (*expr_p, nargs - 1);
2494 tree last_arg_fndecl = get_callee_fndecl (last_arg);
2495
2496 if (last_arg_fndecl
2497 && TREE_CODE (last_arg_fndecl) == FUNCTION_DECL
2498 && DECL_BUILT_IN_CLASS (last_arg_fndecl) == BUILT_IN_NORMAL
2499 && DECL_FUNCTION_CODE (last_arg_fndecl) == BUILT_IN_VA_ARG_PACK)
2500 {
2501 tree call = *expr_p;
2502
2503 --nargs;
2504 *expr_p = build_call_array_loc (loc, TREE_TYPE (call),
2505 CALL_EXPR_FN (call),
2506 nargs, CALL_EXPR_ARGP (call));
2507
2508 /* Copy all CALL_EXPR flags, location and block, except
2509 CALL_EXPR_VA_ARG_PACK flag. */
2510 CALL_EXPR_STATIC_CHAIN (*expr_p) = CALL_EXPR_STATIC_CHAIN (call);
2511 CALL_EXPR_TAILCALL (*expr_p) = CALL_EXPR_TAILCALL (call);
2512 CALL_EXPR_RETURN_SLOT_OPT (*expr_p)
2513 = CALL_EXPR_RETURN_SLOT_OPT (call);
2514 CALL_FROM_THUNK_P (*expr_p) = CALL_FROM_THUNK_P (call);
2515 SET_EXPR_LOCATION (*expr_p, EXPR_LOCATION (call));
2516
2517 /* Set CALL_EXPR_VA_ARG_PACK. */
2518 CALL_EXPR_VA_ARG_PACK (*expr_p) = 1;
2519 }
2520 }
2521
2522 /* Gimplify the function arguments. */
2523 if (nargs > 0)
2524 {
2525 for (i = (PUSH_ARGS_REVERSED ? nargs - 1 : 0);
2526 PUSH_ARGS_REVERSED ? i >= 0 : i < nargs;
2527 PUSH_ARGS_REVERSED ? i-- : i++)
2528 {
2529 enum gimplify_status t;
2530
2531 /* Avoid gimplifying the second argument to va_start, which needs to
2532 be the plain PARM_DECL. */
2533 if ((i != 1) || !builtin_va_start_p)
2534 {
2535 t = gimplify_arg (&CALL_EXPR_ARG (*expr_p, i), pre_p,
2536 EXPR_LOCATION (*expr_p));
2537
2538 if (t == GS_ERROR)
2539 ret = GS_ERROR;
2540 }
2541 }
2542 }
2543
2544 /* Gimplify the static chain. */
2545 if (CALL_EXPR_STATIC_CHAIN (*expr_p))
2546 {
2547 if (fndecl && !DECL_STATIC_CHAIN (fndecl))
2548 CALL_EXPR_STATIC_CHAIN (*expr_p) = NULL;
2549 else
2550 {
2551 enum gimplify_status t;
2552 t = gimplify_arg (&CALL_EXPR_STATIC_CHAIN (*expr_p), pre_p,
2553 EXPR_LOCATION (*expr_p));
2554 if (t == GS_ERROR)
2555 ret = GS_ERROR;
2556 }
2557 }
2558
2559 /* Verify the function result. */
2560 if (want_value && fndecl
2561 && VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fnptrtype))))
2562 {
2563 error_at (loc, "using result of function returning %<void%>");
2564 ret = GS_ERROR;
2565 }
2566
2567 /* Try this again in case gimplification exposed something. */
2568 if (ret != GS_ERROR)
2569 {
2570 tree new_tree = fold_call_expr (input_location, *expr_p, !want_value);
2571
2572 if (new_tree && new_tree != *expr_p)
2573 {
2574 /* There was a transformation of this call which computes the
2575 same value, but in a more efficient way. Return and try
2576 again. */
2577 *expr_p = new_tree;
2578 return GS_OK;
2579 }
2580 }
2581 else
2582 {
2583 *expr_p = error_mark_node;
2584 return GS_ERROR;
2585 }
2586
2587 /* If the function is "const" or "pure", then clear TREE_SIDE_EFFECTS on its
2588 decl. This allows us to eliminate redundant or useless
2589 calls to "const" functions. */
2590 if (TREE_CODE (*expr_p) == CALL_EXPR)
2591 {
2592 int flags = call_expr_flags (*expr_p);
2593 if (flags & (ECF_CONST | ECF_PURE)
2594 /* An infinite loop is considered a side effect. */
2595 && !(flags & (ECF_LOOPING_CONST_OR_PURE)))
2596 TREE_SIDE_EFFECTS (*expr_p) = 0;
2597 }
2598
2599 /* If the value is not needed by the caller, emit a new GIMPLE_CALL
2600 and clear *EXPR_P. Otherwise, leave *EXPR_P in its gimplified
2601 form and delegate the creation of a GIMPLE_CALL to
2602 gimplify_modify_expr. This is always possible because when
2603 WANT_VALUE is true, the caller wants the result of this call into
2604 a temporary, which means that we will emit an INIT_EXPR in
2605 internal_get_tmp_var which will then be handled by
2606 gimplify_modify_expr. */
2607 if (!want_value)
2608 {
2609 /* The CALL_EXPR in *EXPR_P is already in GIMPLE form, so all we
2610 have to do is replicate it as a GIMPLE_CALL tuple. */
2611 gimple_stmt_iterator gsi;
2612 call = gimple_build_call_from_tree (*expr_p);
2613 gimple_call_set_fntype (call, TREE_TYPE (fnptrtype));
2614 notice_special_calls (call);
2615 gimplify_seq_add_stmt (pre_p, call);
2616 gsi = gsi_last (*pre_p);
2617 maybe_fold_stmt (&gsi);
2618 *expr_p = NULL_TREE;
2619 }
2620 else
2621 /* Remember the original function type. */
2622 CALL_EXPR_FN (*expr_p) = build1 (NOP_EXPR, fnptrtype,
2623 CALL_EXPR_FN (*expr_p));
2624
2625 return ret;
2626 }
2627
2628 /* Handle shortcut semantics in the predicate operand of a COND_EXPR by
2629 rewriting it into multiple COND_EXPRs, and possibly GOTO_EXPRs.
2630
2631 TRUE_LABEL_P and FALSE_LABEL_P point to the labels to jump to if the
2632 condition is true or false, respectively. If null, we should generate
2633 our own to skip over the evaluation of this specific expression.
2634
2635 LOCUS is the source location of the COND_EXPR.
2636
2637 This function is the tree equivalent of do_jump.
2638
2639 shortcut_cond_r should only be called by shortcut_cond_expr. */
2640
2641 static tree
2642 shortcut_cond_r (tree pred, tree *true_label_p, tree *false_label_p,
2643 location_t locus)
2644 {
2645 tree local_label = NULL_TREE;
2646 tree t, expr = NULL;
2647
2648 /* OK, it's not a simple case; we need to pull apart the COND_EXPR to
2649 retain the shortcut semantics. Just insert the gotos here;
2650 shortcut_cond_expr will append the real blocks later. */
2651 if (TREE_CODE (pred) == TRUTH_ANDIF_EXPR)
2652 {
2653 location_t new_locus;
2654
2655 /* Turn if (a && b) into
2656
2657 if (a); else goto no;
2658 if (b) goto yes; else goto no;
2659 (no:) */
2660
2661 if (false_label_p == NULL)
2662 false_label_p = &local_label;
2663
2664 /* Keep the original source location on the first 'if'. */
2665 t = shortcut_cond_r (TREE_OPERAND (pred, 0), NULL, false_label_p, locus);
2666 append_to_statement_list (t, &expr);
2667
2668 /* Set the source location of the && on the second 'if'. */
2669 new_locus = EXPR_HAS_LOCATION (pred) ? EXPR_LOCATION (pred) : locus;
2670 t = shortcut_cond_r (TREE_OPERAND (pred, 1), true_label_p, false_label_p,
2671 new_locus);
2672 append_to_statement_list (t, &expr);
2673 }
2674 else if (TREE_CODE (pred) == TRUTH_ORIF_EXPR)
2675 {
2676 location_t new_locus;
2677
2678 /* Turn if (a || b) into
2679
2680 if (a) goto yes;
2681 if (b) goto yes; else goto no;
2682 (yes:) */
2683
2684 if (true_label_p == NULL)
2685 true_label_p = &local_label;
2686
2687 /* Keep the original source location on the first 'if'. */
2688 t = shortcut_cond_r (TREE_OPERAND (pred, 0), true_label_p, NULL, locus);
2689 append_to_statement_list (t, &expr);
2690
2691 /* Set the source location of the || on the second 'if'. */
2692 new_locus = EXPR_HAS_LOCATION (pred) ? EXPR_LOCATION (pred) : locus;
2693 t = shortcut_cond_r (TREE_OPERAND (pred, 1), true_label_p, false_label_p,
2694 new_locus);
2695 append_to_statement_list (t, &expr);
2696 }
2697 else if (TREE_CODE (pred) == COND_EXPR
2698 && !VOID_TYPE_P (TREE_TYPE (TREE_OPERAND (pred, 1)))
2699 && !VOID_TYPE_P (TREE_TYPE (TREE_OPERAND (pred, 2))))
2700 {
2701 location_t new_locus;
2702
2703 /* As long as we're messing with gotos, turn if (a ? b : c) into
2704 if (a)
2705 if (b) goto yes; else goto no;
2706 else
2707 if (c) goto yes; else goto no;
2708
2709 Don't do this if one of the arms has void type, which can happen
2710 in C++ when the arm is throw. */
2711
2712 /* Keep the original source location on the first 'if'. Set the source
2713 location of the ? on the second 'if'. */
2714 new_locus = EXPR_HAS_LOCATION (pred) ? EXPR_LOCATION (pred) : locus;
2715 expr = build3 (COND_EXPR, void_type_node, TREE_OPERAND (pred, 0),
2716 shortcut_cond_r (TREE_OPERAND (pred, 1), true_label_p,
2717 false_label_p, locus),
2718 shortcut_cond_r (TREE_OPERAND (pred, 2), true_label_p,
2719 false_label_p, new_locus));
2720 }
2721 else
2722 {
2723 expr = build3 (COND_EXPR, void_type_node, pred,
2724 build_and_jump (true_label_p),
2725 build_and_jump (false_label_p));
2726 SET_EXPR_LOCATION (expr, locus);
2727 }
2728
2729 if (local_label)
2730 {
2731 t = build1 (LABEL_EXPR, void_type_node, local_label);
2732 append_to_statement_list (t, &expr);
2733 }
2734
2735 return expr;
2736 }
2737
2738 /* Given a conditional expression EXPR with short-circuit boolean
2739 predicates using TRUTH_ANDIF_EXPR or TRUTH_ORIF_EXPR, break the
2740 predicate apart into the equivalent sequence of conditionals. */
2741
2742 static tree
2743 shortcut_cond_expr (tree expr)
2744 {
2745 tree pred = TREE_OPERAND (expr, 0);
2746 tree then_ = TREE_OPERAND (expr, 1);
2747 tree else_ = TREE_OPERAND (expr, 2);
2748 tree true_label, false_label, end_label, t;
2749 tree *true_label_p;
2750 tree *false_label_p;
2751 bool emit_end, emit_false, jump_over_else;
2752 bool then_se = then_ && TREE_SIDE_EFFECTS (then_);
2753 bool else_se = else_ && TREE_SIDE_EFFECTS (else_);
2754
2755 /* First do simple transformations. */
2756 if (!else_se)
2757 {
2758 /* If there is no 'else', turn
2759 if (a && b) then c
2760 into
2761 if (a) if (b) then c. */
2762 while (TREE_CODE (pred) == TRUTH_ANDIF_EXPR)
2763 {
2764 /* Keep the original source location on the first 'if'. */
2765 location_t locus = EXPR_LOC_OR_LOC (expr, input_location);
2766 TREE_OPERAND (expr, 0) = TREE_OPERAND (pred, 1);
2767 /* Set the source location of the && on the second 'if'. */
2768 if (EXPR_HAS_LOCATION (pred))
2769 SET_EXPR_LOCATION (expr, EXPR_LOCATION (pred));
2770 then_ = shortcut_cond_expr (expr);
2771 then_se = then_ && TREE_SIDE_EFFECTS (then_);
2772 pred = TREE_OPERAND (pred, 0);
2773 expr = build3 (COND_EXPR, void_type_node, pred, then_, NULL_TREE);
2774 SET_EXPR_LOCATION (expr, locus);
2775 }
2776 }
2777
2778 if (!then_se)
2779 {
2780 /* If there is no 'then', turn
2781 if (a || b); else d
2782 into
2783 if (a); else if (b); else d. */
2784 while (TREE_CODE (pred) == TRUTH_ORIF_EXPR)
2785 {
2786 /* Keep the original source location on the first 'if'. */
2787 location_t locus = EXPR_LOC_OR_LOC (expr, input_location);
2788 TREE_OPERAND (expr, 0) = TREE_OPERAND (pred, 1);
2789 /* Set the source location of the || on the second 'if'. */
2790 if (EXPR_HAS_LOCATION (pred))
2791 SET_EXPR_LOCATION (expr, EXPR_LOCATION (pred));
2792 else_ = shortcut_cond_expr (expr);
2793 else_se = else_ && TREE_SIDE_EFFECTS (else_);
2794 pred = TREE_OPERAND (pred, 0);
2795 expr = build3 (COND_EXPR, void_type_node, pred, NULL_TREE, else_);
2796 SET_EXPR_LOCATION (expr, locus);
2797 }
2798 }
2799
2800 /* If we're done, great. */
2801 if (TREE_CODE (pred) != TRUTH_ANDIF_EXPR
2802 && TREE_CODE (pred) != TRUTH_ORIF_EXPR)
2803 return expr;
2804
2805 /* Otherwise we need to mess with gotos. Change
2806 if (a) c; else d;
2807 to
2808 if (a); else goto no;
2809 c; goto end;
2810 no: d; end:
2811 and recursively gimplify the condition. */
2812
2813 true_label = false_label = end_label = NULL_TREE;
2814
2815 /* If our arms just jump somewhere, hijack those labels so we don't
2816 generate jumps to jumps. */
2817
2818 if (then_
2819 && TREE_CODE (then_) == GOTO_EXPR
2820 && TREE_CODE (GOTO_DESTINATION (then_)) == LABEL_DECL)
2821 {
2822 true_label = GOTO_DESTINATION (then_);
2823 then_ = NULL;
2824 then_se = false;
2825 }
2826
2827 if (else_
2828 && TREE_CODE (else_) == GOTO_EXPR
2829 && TREE_CODE (GOTO_DESTINATION (else_)) == LABEL_DECL)
2830 {
2831 false_label = GOTO_DESTINATION (else_);
2832 else_ = NULL;
2833 else_se = false;
2834 }
2835
2836 /* If we aren't hijacking a label for the 'then' branch, it falls through. */
2837 if (true_label)
2838 true_label_p = &true_label;
2839 else
2840 true_label_p = NULL;
2841
2842 /* The 'else' branch also needs a label if it contains interesting code. */
2843 if (false_label || else_se)
2844 false_label_p = &false_label;
2845 else
2846 false_label_p = NULL;
2847
2848 /* If there was nothing else in our arms, just forward the label(s). */
2849 if (!then_se && !else_se)
2850 return shortcut_cond_r (pred, true_label_p, false_label_p,
2851 EXPR_LOC_OR_LOC (expr, input_location));
2852
2853 /* If our last subexpression already has a terminal label, reuse it. */
2854 if (else_se)
2855 t = expr_last (else_);
2856 else if (then_se)
2857 t = expr_last (then_);
2858 else
2859 t = NULL;
2860 if (t && TREE_CODE (t) == LABEL_EXPR)
2861 end_label = LABEL_EXPR_LABEL (t);
2862
2863 /* If we don't care about jumping to the 'else' branch, jump to the end
2864 if the condition is false. */
2865 if (!false_label_p)
2866 false_label_p = &end_label;
2867
2868 /* We only want to emit these labels if we aren't hijacking them. */
2869 emit_end = (end_label == NULL_TREE);
2870 emit_false = (false_label == NULL_TREE);
2871
2872 /* We only emit the jump over the else clause if we have to--if the
2873 then clause may fall through. Otherwise we can wind up with a
2874 useless jump and a useless label at the end of gimplified code,
2875 which will cause us to think that this conditional as a whole
2876 falls through even if it doesn't. If we then inline a function
2877 which ends with such a condition, that can cause us to issue an
2878 inappropriate warning about control reaching the end of a
2879 non-void function. */
2880 jump_over_else = block_may_fallthru (then_);
2881
2882 pred = shortcut_cond_r (pred, true_label_p, false_label_p,
2883 EXPR_LOC_OR_LOC (expr, input_location));
2884
2885 expr = NULL;
2886 append_to_statement_list (pred, &expr);
2887
2888 append_to_statement_list (then_, &expr);
2889 if (else_se)
2890 {
2891 if (jump_over_else)
2892 {
2893 tree last = expr_last (expr);
2894 t = build_and_jump (&end_label);
2895 if (EXPR_HAS_LOCATION (last))
2896 SET_EXPR_LOCATION (t, EXPR_LOCATION (last));
2897 append_to_statement_list (t, &expr);
2898 }
2899 if (emit_false)
2900 {
2901 t = build1 (LABEL_EXPR, void_type_node, false_label);
2902 append_to_statement_list (t, &expr);
2903 }
2904 append_to_statement_list (else_, &expr);
2905 }
2906 if (emit_end && end_label)
2907 {
2908 t = build1 (LABEL_EXPR, void_type_node, end_label);
2909 append_to_statement_list (t, &expr);
2910 }
2911
2912 return expr;
2913 }
2914
2915 /* EXPR is used in a boolean context; make sure it has BOOLEAN_TYPE. */
2916
2917 tree
2918 gimple_boolify (tree expr)
2919 {
2920 tree type = TREE_TYPE (expr);
2921 location_t loc = EXPR_LOCATION (expr);
2922
2923 if (TREE_CODE (expr) == NE_EXPR
2924 && TREE_CODE (TREE_OPERAND (expr, 0)) == CALL_EXPR
2925 && integer_zerop (TREE_OPERAND (expr, 1)))
2926 {
2927 tree call = TREE_OPERAND (expr, 0);
2928 tree fn = get_callee_fndecl (call);
2929
2930 /* For __builtin_expect ((long) (x), y) recurse into x as well
2931 if x is truth_value_p. */
2932 if (fn
2933 && DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL
2934 && DECL_FUNCTION_CODE (fn) == BUILT_IN_EXPECT
2935 && call_expr_nargs (call) == 2)
2936 {
2937 tree arg = CALL_EXPR_ARG (call, 0);
2938 if (arg)
2939 {
2940 if (TREE_CODE (arg) == NOP_EXPR
2941 && TREE_TYPE (arg) == TREE_TYPE (call))
2942 arg = TREE_OPERAND (arg, 0);
2943 if (truth_value_p (TREE_CODE (arg)))
2944 {
2945 arg = gimple_boolify (arg);
2946 CALL_EXPR_ARG (call, 0)
2947 = fold_convert_loc (loc, TREE_TYPE (call), arg);
2948 }
2949 }
2950 }
2951 }
2952
2953 switch (TREE_CODE (expr))
2954 {
2955 case TRUTH_AND_EXPR:
2956 case TRUTH_OR_EXPR:
2957 case TRUTH_XOR_EXPR:
2958 case TRUTH_ANDIF_EXPR:
2959 case TRUTH_ORIF_EXPR:
2960 /* Also boolify the arguments of truth exprs. */
2961 TREE_OPERAND (expr, 1) = gimple_boolify (TREE_OPERAND (expr, 1));
2962 /* FALLTHRU */
2963
2964 case TRUTH_NOT_EXPR:
2965 TREE_OPERAND (expr, 0) = gimple_boolify (TREE_OPERAND (expr, 0));
2966
2967 /* These expressions always produce boolean results. */
2968 if (TREE_CODE (type) != BOOLEAN_TYPE)
2969 TREE_TYPE (expr) = boolean_type_node;
2970 return expr;
2971
2972 case ANNOTATE_EXPR:
2973 switch ((enum annot_expr_kind) TREE_INT_CST_LOW (TREE_OPERAND (expr, 1)))
2974 {
2975 case annot_expr_ivdep_kind:
2976 case annot_expr_no_vector_kind:
2977 case annot_expr_vector_kind:
2978 TREE_OPERAND (expr, 0) = gimple_boolify (TREE_OPERAND (expr, 0));
2979 if (TREE_CODE (type) != BOOLEAN_TYPE)
2980 TREE_TYPE (expr) = boolean_type_node;
2981 return expr;
2982 default:
2983 gcc_unreachable ();
2984 }
2985
2986 default:
2987 if (COMPARISON_CLASS_P (expr))
2988 {
2989 /* There expressions always prduce boolean results. */
2990 if (TREE_CODE (type) != BOOLEAN_TYPE)
2991 TREE_TYPE (expr) = boolean_type_node;
2992 return expr;
2993 }
2994 /* Other expressions that get here must have boolean values, but
2995 might need to be converted to the appropriate mode. */
2996 if (TREE_CODE (type) == BOOLEAN_TYPE)
2997 return expr;
2998 return fold_convert_loc (loc, boolean_type_node, expr);
2999 }
3000 }
3001
3002 /* Given a conditional expression *EXPR_P without side effects, gimplify
3003 its operands. New statements are inserted to PRE_P. */
3004
3005 static enum gimplify_status
3006 gimplify_pure_cond_expr (tree *expr_p, gimple_seq *pre_p)
3007 {
3008 tree expr = *expr_p, cond;
3009 enum gimplify_status ret, tret;
3010 enum tree_code code;
3011
3012 cond = gimple_boolify (COND_EXPR_COND (expr));
3013
3014 /* We need to handle && and || specially, as their gimplification
3015 creates pure cond_expr, thus leading to an infinite cycle otherwise. */
3016 code = TREE_CODE (cond);
3017 if (code == TRUTH_ANDIF_EXPR)
3018 TREE_SET_CODE (cond, TRUTH_AND_EXPR);
3019 else if (code == TRUTH_ORIF_EXPR)
3020 TREE_SET_CODE (cond, TRUTH_OR_EXPR);
3021 ret = gimplify_expr (&cond, pre_p, NULL, is_gimple_condexpr, fb_rvalue);
3022 COND_EXPR_COND (*expr_p) = cond;
3023
3024 tret = gimplify_expr (&COND_EXPR_THEN (expr), pre_p, NULL,
3025 is_gimple_val, fb_rvalue);
3026 ret = MIN (ret, tret);
3027 tret = gimplify_expr (&COND_EXPR_ELSE (expr), pre_p, NULL,
3028 is_gimple_val, fb_rvalue);
3029
3030 return MIN (ret, tret);
3031 }
3032
3033 /* Return true if evaluating EXPR could trap.
3034 EXPR is GENERIC, while tree_could_trap_p can be called
3035 only on GIMPLE. */
3036
3037 static bool
3038 generic_expr_could_trap_p (tree expr)
3039 {
3040 unsigned i, n;
3041
3042 if (!expr || is_gimple_val (expr))
3043 return false;
3044
3045 if (!EXPR_P (expr) || tree_could_trap_p (expr))
3046 return true;
3047
3048 n = TREE_OPERAND_LENGTH (expr);
3049 for (i = 0; i < n; i++)
3050 if (generic_expr_could_trap_p (TREE_OPERAND (expr, i)))
3051 return true;
3052
3053 return false;
3054 }
3055
3056 /* Convert the conditional expression pointed to by EXPR_P '(p) ? a : b;'
3057 into
3058
3059 if (p) if (p)
3060 t1 = a; a;
3061 else or else
3062 t1 = b; b;
3063 t1;
3064
3065 The second form is used when *EXPR_P is of type void.
3066
3067 PRE_P points to the list where side effects that must happen before
3068 *EXPR_P should be stored. */
3069
3070 static enum gimplify_status
3071 gimplify_cond_expr (tree *expr_p, gimple_seq *pre_p, fallback_t fallback)
3072 {
3073 tree expr = *expr_p;
3074 tree type = TREE_TYPE (expr);
3075 location_t loc = EXPR_LOCATION (expr);
3076 tree tmp, arm1, arm2;
3077 enum gimplify_status ret;
3078 tree label_true, label_false, label_cont;
3079 bool have_then_clause_p, have_else_clause_p;
3080 gcond *cond_stmt;
3081 enum tree_code pred_code;
3082 gimple_seq seq = NULL;
3083
3084 /* If this COND_EXPR has a value, copy the values into a temporary within
3085 the arms. */
3086 if (!VOID_TYPE_P (type))
3087 {
3088 tree then_ = TREE_OPERAND (expr, 1), else_ = TREE_OPERAND (expr, 2);
3089 tree result;
3090
3091 /* If either an rvalue is ok or we do not require an lvalue, create the
3092 temporary. But we cannot do that if the type is addressable. */
3093 if (((fallback & fb_rvalue) || !(fallback & fb_lvalue))
3094 && !TREE_ADDRESSABLE (type))
3095 {
3096 if (gimplify_ctxp->allow_rhs_cond_expr
3097 /* If either branch has side effects or could trap, it can't be
3098 evaluated unconditionally. */
3099 && !TREE_SIDE_EFFECTS (then_)
3100 && !generic_expr_could_trap_p (then_)
3101 && !TREE_SIDE_EFFECTS (else_)
3102 && !generic_expr_could_trap_p (else_))
3103 return gimplify_pure_cond_expr (expr_p, pre_p);
3104
3105 tmp = create_tmp_var (type, "iftmp");
3106 result = tmp;
3107 }
3108
3109 /* Otherwise, only create and copy references to the values. */
3110 else
3111 {
3112 type = build_pointer_type (type);
3113
3114 if (!VOID_TYPE_P (TREE_TYPE (then_)))
3115 then_ = build_fold_addr_expr_loc (loc, then_);
3116
3117 if (!VOID_TYPE_P (TREE_TYPE (else_)))
3118 else_ = build_fold_addr_expr_loc (loc, else_);
3119
3120 expr
3121 = build3 (COND_EXPR, type, TREE_OPERAND (expr, 0), then_, else_);
3122
3123 tmp = create_tmp_var (type, "iftmp");
3124 result = build_simple_mem_ref_loc (loc, tmp);
3125 }
3126
3127 /* Build the new then clause, `tmp = then_;'. But don't build the
3128 assignment if the value is void; in C++ it can be if it's a throw. */
3129 if (!VOID_TYPE_P (TREE_TYPE (then_)))
3130 TREE_OPERAND (expr, 1) = build2 (MODIFY_EXPR, type, tmp, then_);
3131
3132 /* Similarly, build the new else clause, `tmp = else_;'. */
3133 if (!VOID_TYPE_P (TREE_TYPE (else_)))
3134 TREE_OPERAND (expr, 2) = build2 (MODIFY_EXPR, type, tmp, else_);
3135
3136 TREE_TYPE (expr) = void_type_node;
3137 recalculate_side_effects (expr);
3138
3139 /* Move the COND_EXPR to the prequeue. */
3140 gimplify_stmt (&expr, pre_p);
3141
3142 *expr_p = result;
3143 return GS_ALL_DONE;
3144 }
3145
3146 /* Remove any COMPOUND_EXPR so the following cases will be caught. */
3147 STRIP_TYPE_NOPS (TREE_OPERAND (expr, 0));
3148 if (TREE_CODE (TREE_OPERAND (expr, 0)) == COMPOUND_EXPR)
3149 gimplify_compound_expr (&TREE_OPERAND (expr, 0), pre_p, true);
3150
3151 /* Make sure the condition has BOOLEAN_TYPE. */
3152 TREE_OPERAND (expr, 0) = gimple_boolify (TREE_OPERAND (expr, 0));
3153
3154 /* Break apart && and || conditions. */
3155 if (TREE_CODE (TREE_OPERAND (expr, 0)) == TRUTH_ANDIF_EXPR
3156 || TREE_CODE (TREE_OPERAND (expr, 0)) == TRUTH_ORIF_EXPR)
3157 {
3158 expr = shortcut_cond_expr (expr);
3159
3160 if (expr != *expr_p)
3161 {
3162 *expr_p = expr;
3163
3164 /* We can't rely on gimplify_expr to re-gimplify the expanded
3165 form properly, as cleanups might cause the target labels to be
3166 wrapped in a TRY_FINALLY_EXPR. To prevent that, we need to
3167 set up a conditional context. */
3168 gimple_push_condition ();
3169 gimplify_stmt (expr_p, &seq);
3170 gimple_pop_condition (pre_p);
3171 gimple_seq_add_seq (pre_p, seq);
3172
3173 return GS_ALL_DONE;
3174 }
3175 }
3176
3177 /* Now do the normal gimplification. */
3178
3179 /* Gimplify condition. */
3180 ret = gimplify_expr (&TREE_OPERAND (expr, 0), pre_p, NULL, is_gimple_condexpr,
3181 fb_rvalue);
3182 if (ret == GS_ERROR)
3183 return GS_ERROR;
3184 gcc_assert (TREE_OPERAND (expr, 0) != NULL_TREE);
3185
3186 gimple_push_condition ();
3187
3188 have_then_clause_p = have_else_clause_p = false;
3189 if (TREE_OPERAND (expr, 1) != NULL
3190 && TREE_CODE (TREE_OPERAND (expr, 1)) == GOTO_EXPR
3191 && TREE_CODE (GOTO_DESTINATION (TREE_OPERAND (expr, 1))) == LABEL_DECL
3192 && (DECL_CONTEXT (GOTO_DESTINATION (TREE_OPERAND (expr, 1)))
3193 == current_function_decl)
3194 /* For -O0 avoid this optimization if the COND_EXPR and GOTO_EXPR
3195 have different locations, otherwise we end up with incorrect
3196 location information on the branches. */
3197 && (optimize
3198 || !EXPR_HAS_LOCATION (expr)
3199 || !EXPR_HAS_LOCATION (TREE_OPERAND (expr, 1))
3200 || EXPR_LOCATION (expr) == EXPR_LOCATION (TREE_OPERAND (expr, 1))))
3201 {
3202 label_true = GOTO_DESTINATION (TREE_OPERAND (expr, 1));
3203 have_then_clause_p = true;
3204 }
3205 else
3206 label_true = create_artificial_label (UNKNOWN_LOCATION);
3207 if (TREE_OPERAND (expr, 2) != NULL
3208 && TREE_CODE (TREE_OPERAND (expr, 2)) == GOTO_EXPR
3209 && TREE_CODE (GOTO_DESTINATION (TREE_OPERAND (expr, 2))) == LABEL_DECL
3210 && (DECL_CONTEXT (GOTO_DESTINATION (TREE_OPERAND (expr, 2)))
3211 == current_function_decl)
3212 /* For -O0 avoid this optimization if the COND_EXPR and GOTO_EXPR
3213 have different locations, otherwise we end up with incorrect
3214 location information on the branches. */
3215 && (optimize
3216 || !EXPR_HAS_LOCATION (expr)
3217 || !EXPR_HAS_LOCATION (TREE_OPERAND (expr, 2))
3218 || EXPR_LOCATION (expr) == EXPR_LOCATION (TREE_OPERAND (expr, 2))))
3219 {
3220 label_false = GOTO_DESTINATION (TREE_OPERAND (expr, 2));
3221 have_else_clause_p = true;
3222 }
3223 else
3224 label_false = create_artificial_label (UNKNOWN_LOCATION);
3225
3226 gimple_cond_get_ops_from_tree (COND_EXPR_COND (expr), &pred_code, &arm1,
3227 &arm2);
3228 cond_stmt = gimple_build_cond (pred_code, arm1, arm2, label_true,
3229 label_false);
3230 gimple_set_no_warning (cond_stmt, TREE_NO_WARNING (COND_EXPR_COND (expr)));
3231 gimplify_seq_add_stmt (&seq, cond_stmt);
3232 gimple_stmt_iterator gsi = gsi_last (seq);
3233 maybe_fold_stmt (&gsi);
3234
3235 label_cont = NULL_TREE;
3236 if (!have_then_clause_p)
3237 {
3238 /* For if (...) {} else { code; } put label_true after
3239 the else block. */
3240 if (TREE_OPERAND (expr, 1) == NULL_TREE
3241 && !have_else_clause_p
3242 && TREE_OPERAND (expr, 2) != NULL_TREE)
3243 label_cont = label_true;
3244 else
3245 {
3246 gimplify_seq_add_stmt (&seq, gimple_build_label (label_true));
3247 have_then_clause_p = gimplify_stmt (&TREE_OPERAND (expr, 1), &seq);
3248 /* For if (...) { code; } else {} or
3249 if (...) { code; } else goto label; or
3250 if (...) { code; return; } else { ... }
3251 label_cont isn't needed. */
3252 if (!have_else_clause_p
3253 && TREE_OPERAND (expr, 2) != NULL_TREE
3254 && gimple_seq_may_fallthru (seq))
3255 {
3256 gimple *g;
3257 label_cont = create_artificial_label (UNKNOWN_LOCATION);
3258
3259 g = gimple_build_goto (label_cont);
3260
3261 /* GIMPLE_COND's are very low level; they have embedded
3262 gotos. This particular embedded goto should not be marked
3263 with the location of the original COND_EXPR, as it would
3264 correspond to the COND_EXPR's condition, not the ELSE or the
3265 THEN arms. To avoid marking it with the wrong location, flag
3266 it as "no location". */
3267 gimple_set_do_not_emit_location (g);
3268
3269 gimplify_seq_add_stmt (&seq, g);
3270 }
3271 }
3272 }
3273 if (!have_else_clause_p)
3274 {
3275 gimplify_seq_add_stmt (&seq, gimple_build_label (label_false));
3276 have_else_clause_p = gimplify_stmt (&TREE_OPERAND (expr, 2), &seq);
3277 }
3278 if (label_cont)
3279 gimplify_seq_add_stmt (&seq, gimple_build_label (label_cont));
3280
3281 gimple_pop_condition (pre_p);
3282 gimple_seq_add_seq (pre_p, seq);
3283
3284 if (ret == GS_ERROR)
3285 ; /* Do nothing. */
3286 else if (have_then_clause_p || have_else_clause_p)
3287 ret = GS_ALL_DONE;
3288 else
3289 {
3290 /* Both arms are empty; replace the COND_EXPR with its predicate. */
3291 expr = TREE_OPERAND (expr, 0);
3292 gimplify_stmt (&expr, pre_p);
3293 }
3294
3295 *expr_p = NULL;
3296 return ret;
3297 }
3298
3299 /* Prepare the node pointed to by EXPR_P, an is_gimple_addressable expression,
3300 to be marked addressable.
3301
3302 We cannot rely on such an expression being directly markable if a temporary
3303 has been created by the gimplification. In this case, we create another
3304 temporary and initialize it with a copy, which will become a store after we
3305 mark it addressable. This can happen if the front-end passed us something
3306 that it could not mark addressable yet, like a Fortran pass-by-reference
3307 parameter (int) floatvar. */
3308
3309 static void
3310 prepare_gimple_addressable (tree *expr_p, gimple_seq *seq_p)
3311 {
3312 while (handled_component_p (*expr_p))
3313 expr_p = &TREE_OPERAND (*expr_p, 0);
3314 if (is_gimple_reg (*expr_p))
3315 {
3316 tree var = get_initialized_tmp_var (*expr_p, seq_p, NULL);
3317 DECL_GIMPLE_REG_P (var) = 0;
3318 *expr_p = var;
3319 }
3320 }
3321
3322 /* A subroutine of gimplify_modify_expr. Replace a MODIFY_EXPR with
3323 a call to __builtin_memcpy. */
3324
3325 static enum gimplify_status
3326 gimplify_modify_expr_to_memcpy (tree *expr_p, tree size, bool want_value,
3327 gimple_seq *seq_p)
3328 {
3329 tree t, to, to_ptr, from, from_ptr;
3330 gcall *gs;
3331 location_t loc = EXPR_LOCATION (*expr_p);
3332
3333 to = TREE_OPERAND (*expr_p, 0);
3334 from = TREE_OPERAND (*expr_p, 1);
3335
3336 /* Mark the RHS addressable. Beware that it may not be possible to do so
3337 directly if a temporary has been created by the gimplification. */
3338 prepare_gimple_addressable (&from, seq_p);
3339
3340 mark_addressable (from);
3341 from_ptr = build_fold_addr_expr_loc (loc, from);
3342 gimplify_arg (&from_ptr, seq_p, loc);
3343
3344 mark_addressable (to);
3345 to_ptr = build_fold_addr_expr_loc (loc, to);
3346 gimplify_arg (&to_ptr, seq_p, loc);
3347
3348 t = builtin_decl_implicit (BUILT_IN_MEMCPY);
3349
3350 gs = gimple_build_call (t, 3, to_ptr, from_ptr, size);
3351
3352 if (want_value)
3353 {
3354 /* tmp = memcpy() */
3355 t = create_tmp_var (TREE_TYPE (to_ptr));
3356 gimple_call_set_lhs (gs, t);
3357 gimplify_seq_add_stmt (seq_p, gs);
3358
3359 *expr_p = build_simple_mem_ref (t);
3360 return GS_ALL_DONE;
3361 }
3362
3363 gimplify_seq_add_stmt (seq_p, gs);
3364 *expr_p = NULL;
3365 return GS_ALL_DONE;
3366 }
3367
3368 /* A subroutine of gimplify_modify_expr. Replace a MODIFY_EXPR with
3369 a call to __builtin_memset. In this case we know that the RHS is
3370 a CONSTRUCTOR with an empty element list. */
3371
3372 static enum gimplify_status
3373 gimplify_modify_expr_to_memset (tree *expr_p, tree size, bool want_value,
3374 gimple_seq *seq_p)
3375 {
3376 tree t, from, to, to_ptr;
3377 gcall *gs;
3378 location_t loc = EXPR_LOCATION (*expr_p);
3379
3380 /* Assert our assumptions, to abort instead of producing wrong code
3381 silently if they are not met. Beware that the RHS CONSTRUCTOR might
3382 not be immediately exposed. */
3383 from = TREE_OPERAND (*expr_p, 1);
3384 if (TREE_CODE (from) == WITH_SIZE_EXPR)
3385 from = TREE_OPERAND (from, 0);
3386
3387 gcc_assert (TREE_CODE (from) == CONSTRUCTOR
3388 && vec_safe_is_empty (CONSTRUCTOR_ELTS (from)));
3389
3390 /* Now proceed. */
3391 to = TREE_OPERAND (*expr_p, 0);
3392
3393 to_ptr = build_fold_addr_expr_loc (loc, to);
3394 gimplify_arg (&to_ptr, seq_p, loc);
3395 t = builtin_decl_implicit (BUILT_IN_MEMSET);
3396
3397 gs = gimple_build_call (t, 3, to_ptr, integer_zero_node, size);
3398
3399 if (want_value)
3400 {
3401 /* tmp = memset() */
3402 t = create_tmp_var (TREE_TYPE (to_ptr));
3403 gimple_call_set_lhs (gs, t);
3404 gimplify_seq_add_stmt (seq_p, gs);
3405
3406 *expr_p = build1 (INDIRECT_REF, TREE_TYPE (to), t);
3407 return GS_ALL_DONE;
3408 }
3409
3410 gimplify_seq_add_stmt (seq_p, gs);
3411 *expr_p = NULL;
3412 return GS_ALL_DONE;
3413 }
3414
3415 /* A subroutine of gimplify_init_ctor_preeval. Called via walk_tree,
3416 determine, cautiously, if a CONSTRUCTOR overlaps the lhs of an
3417 assignment. Return non-null if we detect a potential overlap. */
3418
3419 struct gimplify_init_ctor_preeval_data
3420 {
3421 /* The base decl of the lhs object. May be NULL, in which case we
3422 have to assume the lhs is indirect. */
3423 tree lhs_base_decl;
3424
3425 /* The alias set of the lhs object. */
3426 alias_set_type lhs_alias_set;
3427 };
3428
3429 static tree
3430 gimplify_init_ctor_preeval_1 (tree *tp, int *walk_subtrees, void *xdata)
3431 {
3432 struct gimplify_init_ctor_preeval_data *data
3433 = (struct gimplify_init_ctor_preeval_data *) xdata;
3434 tree t = *tp;
3435
3436 /* If we find the base object, obviously we have overlap. */
3437 if (data->lhs_base_decl == t)
3438 return t;
3439
3440 /* If the constructor component is indirect, determine if we have a
3441 potential overlap with the lhs. The only bits of information we
3442 have to go on at this point are addressability and alias sets. */
3443 if ((INDIRECT_REF_P (t)
3444 || TREE_CODE (t) == MEM_REF)
3445 && (!data->lhs_base_decl || TREE_ADDRESSABLE (data->lhs_base_decl))
3446 && alias_sets_conflict_p (data->lhs_alias_set, get_alias_set (t)))
3447 return t;
3448
3449 /* If the constructor component is a call, determine if it can hide a
3450 potential overlap with the lhs through an INDIRECT_REF like above.
3451 ??? Ugh - this is completely broken. In fact this whole analysis
3452 doesn't look conservative. */
3453 if (TREE_CODE (t) == CALL_EXPR)
3454 {
3455 tree type, fntype = TREE_TYPE (TREE_TYPE (CALL_EXPR_FN (t)));
3456
3457 for (type = TYPE_ARG_TYPES (fntype); type; type = TREE_CHAIN (type))
3458 if (POINTER_TYPE_P (TREE_VALUE (type))
3459 && (!data->lhs_base_decl || TREE_ADDRESSABLE (data->lhs_base_decl))
3460 && alias_sets_conflict_p (data->lhs_alias_set,
3461 get_alias_set
3462 (TREE_TYPE (TREE_VALUE (type)))))
3463 return t;
3464 }
3465
3466 if (IS_TYPE_OR_DECL_P (t))
3467 *walk_subtrees = 0;
3468 return NULL;
3469 }
3470
3471 /* A subroutine of gimplify_init_constructor. Pre-evaluate EXPR,
3472 force values that overlap with the lhs (as described by *DATA)
3473 into temporaries. */
3474
3475 static void
3476 gimplify_init_ctor_preeval (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
3477 struct gimplify_init_ctor_preeval_data *data)
3478 {
3479 enum gimplify_status one;
3480
3481 /* If the value is constant, then there's nothing to pre-evaluate. */
3482 if (TREE_CONSTANT (*expr_p))
3483 {
3484 /* Ensure it does not have side effects, it might contain a reference to
3485 the object we're initializing. */
3486 gcc_assert (!TREE_SIDE_EFFECTS (*expr_p));
3487 return;
3488 }
3489
3490 /* If the type has non-trivial constructors, we can't pre-evaluate. */
3491 if (TREE_ADDRESSABLE (TREE_TYPE (*expr_p)))
3492 return;
3493
3494 /* Recurse for nested constructors. */
3495 if (TREE_CODE (*expr_p) == CONSTRUCTOR)
3496 {
3497 unsigned HOST_WIDE_INT ix;
3498 constructor_elt *ce;
3499 vec<constructor_elt, va_gc> *v = CONSTRUCTOR_ELTS (*expr_p);
3500
3501 FOR_EACH_VEC_SAFE_ELT (v, ix, ce)
3502 gimplify_init_ctor_preeval (&ce->value, pre_p, post_p, data);
3503
3504 return;
3505 }
3506
3507 /* If this is a variable sized type, we must remember the size. */
3508 maybe_with_size_expr (expr_p);
3509
3510 /* Gimplify the constructor element to something appropriate for the rhs
3511 of a MODIFY_EXPR. Given that we know the LHS is an aggregate, we know
3512 the gimplifier will consider this a store to memory. Doing this
3513 gimplification now means that we won't have to deal with complicated
3514 language-specific trees, nor trees like SAVE_EXPR that can induce
3515 exponential search behavior. */
3516 one = gimplify_expr (expr_p, pre_p, post_p, is_gimple_mem_rhs, fb_rvalue);
3517 if (one == GS_ERROR)
3518 {
3519 *expr_p = NULL;
3520 return;
3521 }
3522
3523 /* If we gimplified to a bare decl, we can be sure that it doesn't overlap
3524 with the lhs, since "a = { .x=a }" doesn't make sense. This will
3525 always be true for all scalars, since is_gimple_mem_rhs insists on a
3526 temporary variable for them. */
3527 if (DECL_P (*expr_p))
3528 return;
3529
3530 /* If this is of variable size, we have no choice but to assume it doesn't
3531 overlap since we can't make a temporary for it. */
3532 if (TREE_CODE (TYPE_SIZE (TREE_TYPE (*expr_p))) != INTEGER_CST)
3533 return;
3534
3535 /* Otherwise, we must search for overlap ... */
3536 if (!walk_tree (expr_p, gimplify_init_ctor_preeval_1, data, NULL))
3537 return;
3538
3539 /* ... and if found, force the value into a temporary. */
3540 *expr_p = get_formal_tmp_var (*expr_p, pre_p);
3541 }
3542
3543 /* A subroutine of gimplify_init_ctor_eval. Create a loop for
3544 a RANGE_EXPR in a CONSTRUCTOR for an array.
3545
3546 var = lower;
3547 loop_entry:
3548 object[var] = value;
3549 if (var == upper)
3550 goto loop_exit;
3551 var = var + 1;
3552 goto loop_entry;
3553 loop_exit:
3554
3555 We increment var _after_ the loop exit check because we might otherwise
3556 fail if upper == TYPE_MAX_VALUE (type for upper).
3557
3558 Note that we never have to deal with SAVE_EXPRs here, because this has
3559 already been taken care of for us, in gimplify_init_ctor_preeval(). */
3560
3561 static void gimplify_init_ctor_eval (tree, vec<constructor_elt, va_gc> *,
3562 gimple_seq *, bool);
3563
3564 static void
3565 gimplify_init_ctor_eval_range (tree object, tree lower, tree upper,
3566 tree value, tree array_elt_type,
3567 gimple_seq *pre_p, bool cleared)
3568 {
3569 tree loop_entry_label, loop_exit_label, fall_thru_label;
3570 tree var, var_type, cref, tmp;
3571
3572 loop_entry_label = create_artificial_label (UNKNOWN_LOCATION);
3573 loop_exit_label = create_artificial_label (UNKNOWN_LOCATION);
3574 fall_thru_label = create_artificial_label (UNKNOWN_LOCATION);
3575
3576 /* Create and initialize the index variable. */
3577 var_type = TREE_TYPE (upper);
3578 var = create_tmp_var (var_type);
3579 gimplify_seq_add_stmt (pre_p, gimple_build_assign (var, lower));
3580
3581 /* Add the loop entry label. */
3582 gimplify_seq_add_stmt (pre_p, gimple_build_label (loop_entry_label));
3583
3584 /* Build the reference. */
3585 cref = build4 (ARRAY_REF, array_elt_type, unshare_expr (object),
3586 var, NULL_TREE, NULL_TREE);
3587
3588 /* If we are a constructor, just call gimplify_init_ctor_eval to do
3589 the store. Otherwise just assign value to the reference. */
3590
3591 if (TREE_CODE (value) == CONSTRUCTOR)
3592 /* NB we might have to call ourself recursively through
3593 gimplify_init_ctor_eval if the value is a constructor. */
3594 gimplify_init_ctor_eval (cref, CONSTRUCTOR_ELTS (value),
3595 pre_p, cleared);
3596 else
3597 gimplify_seq_add_stmt (pre_p, gimple_build_assign (cref, value));
3598
3599 /* We exit the loop when the index var is equal to the upper bound. */
3600 gimplify_seq_add_stmt (pre_p,
3601 gimple_build_cond (EQ_EXPR, var, upper,
3602 loop_exit_label, fall_thru_label));
3603
3604 gimplify_seq_add_stmt (pre_p, gimple_build_label (fall_thru_label));
3605
3606 /* Otherwise, increment the index var... */
3607 tmp = build2 (PLUS_EXPR, var_type, var,
3608 fold_convert (var_type, integer_one_node));
3609 gimplify_seq_add_stmt (pre_p, gimple_build_assign (var, tmp));
3610
3611 /* ...and jump back to the loop entry. */
3612 gimplify_seq_add_stmt (pre_p, gimple_build_goto (loop_entry_label));
3613
3614 /* Add the loop exit label. */
3615 gimplify_seq_add_stmt (pre_p, gimple_build_label (loop_exit_label));
3616 }
3617
3618 /* Return true if FDECL is accessing a field that is zero sized. */
3619
3620 static bool
3621 zero_sized_field_decl (const_tree fdecl)
3622 {
3623 if (TREE_CODE (fdecl) == FIELD_DECL && DECL_SIZE (fdecl)
3624 && integer_zerop (DECL_SIZE (fdecl)))
3625 return true;
3626 return false;
3627 }
3628
3629 /* Return true if TYPE is zero sized. */
3630
3631 static bool
3632 zero_sized_type (const_tree type)
3633 {
3634 if (AGGREGATE_TYPE_P (type) && TYPE_SIZE (type)
3635 && integer_zerop (TYPE_SIZE (type)))
3636 return true;
3637 return false;
3638 }
3639
3640 /* A subroutine of gimplify_init_constructor. Generate individual
3641 MODIFY_EXPRs for a CONSTRUCTOR. OBJECT is the LHS against which the
3642 assignments should happen. ELTS is the CONSTRUCTOR_ELTS of the
3643 CONSTRUCTOR. CLEARED is true if the entire LHS object has been
3644 zeroed first. */
3645
3646 static void
3647 gimplify_init_ctor_eval (tree object, vec<constructor_elt, va_gc> *elts,
3648 gimple_seq *pre_p, bool cleared)
3649 {
3650 tree array_elt_type = NULL;
3651 unsigned HOST_WIDE_INT ix;
3652 tree purpose, value;
3653
3654 if (TREE_CODE (TREE_TYPE (object)) == ARRAY_TYPE)
3655 array_elt_type = TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (object)));
3656
3657 FOR_EACH_CONSTRUCTOR_ELT (elts, ix, purpose, value)
3658 {
3659 tree cref;
3660
3661 /* NULL values are created above for gimplification errors. */
3662 if (value == NULL)
3663 continue;
3664
3665 if (cleared && initializer_zerop (value))
3666 continue;
3667
3668 /* ??? Here's to hoping the front end fills in all of the indices,
3669 so we don't have to figure out what's missing ourselves. */
3670 gcc_assert (purpose);
3671
3672 /* Skip zero-sized fields, unless value has side-effects. This can
3673 happen with calls to functions returning a zero-sized type, which
3674 we shouldn't discard. As a number of downstream passes don't
3675 expect sets of zero-sized fields, we rely on the gimplification of
3676 the MODIFY_EXPR we make below to drop the assignment statement. */
3677 if (! TREE_SIDE_EFFECTS (value) && zero_sized_field_decl (purpose))
3678 continue;
3679
3680 /* If we have a RANGE_EXPR, we have to build a loop to assign the
3681 whole range. */
3682 if (TREE_CODE (purpose) == RANGE_EXPR)
3683 {
3684 tree lower = TREE_OPERAND (purpose, 0);
3685 tree upper = TREE_OPERAND (purpose, 1);
3686
3687 /* If the lower bound is equal to upper, just treat it as if
3688 upper was the index. */
3689 if (simple_cst_equal (lower, upper))
3690 purpose = upper;
3691 else
3692 {
3693 gimplify_init_ctor_eval_range (object, lower, upper, value,
3694 array_elt_type, pre_p, cleared);
3695 continue;
3696 }
3697 }
3698
3699 if (array_elt_type)
3700 {
3701 /* Do not use bitsizetype for ARRAY_REF indices. */
3702 if (TYPE_DOMAIN (TREE_TYPE (object)))
3703 purpose
3704 = fold_convert (TREE_TYPE (TYPE_DOMAIN (TREE_TYPE (object))),
3705 purpose);
3706 cref = build4 (ARRAY_REF, array_elt_type, unshare_expr (object),
3707 purpose, NULL_TREE, NULL_TREE);
3708 }
3709 else
3710 {
3711 gcc_assert (TREE_CODE (purpose) == FIELD_DECL);
3712 cref = build3 (COMPONENT_REF, TREE_TYPE (purpose),
3713 unshare_expr (object), purpose, NULL_TREE);
3714 }
3715
3716 if (TREE_CODE (value) == CONSTRUCTOR
3717 && TREE_CODE (TREE_TYPE (value)) != VECTOR_TYPE)
3718 gimplify_init_ctor_eval (cref, CONSTRUCTOR_ELTS (value),
3719 pre_p, cleared);
3720 else
3721 {
3722 tree init = build2 (INIT_EXPR, TREE_TYPE (cref), cref, value);
3723 gimplify_and_add (init, pre_p);
3724 ggc_free (init);
3725 }
3726 }
3727 }
3728
3729 /* Return the appropriate RHS predicate for this LHS. */
3730
3731 gimple_predicate
3732 rhs_predicate_for (tree lhs)
3733 {
3734 if (is_gimple_reg (lhs))
3735 return is_gimple_reg_rhs_or_call;
3736 else
3737 return is_gimple_mem_rhs_or_call;
3738 }
3739
3740 /* Gimplify a C99 compound literal expression. This just means adding
3741 the DECL_EXPR before the current statement and using its anonymous
3742 decl instead. */
3743
3744 static enum gimplify_status
3745 gimplify_compound_literal_expr (tree *expr_p, gimple_seq *pre_p,
3746 bool (*gimple_test_f) (tree),
3747 fallback_t fallback)
3748 {
3749 tree decl_s = COMPOUND_LITERAL_EXPR_DECL_EXPR (*expr_p);
3750 tree decl = DECL_EXPR_DECL (decl_s);
3751 tree init = DECL_INITIAL (decl);
3752 /* Mark the decl as addressable if the compound literal
3753 expression is addressable now, otherwise it is marked too late
3754 after we gimplify the initialization expression. */
3755 if (TREE_ADDRESSABLE (*expr_p))
3756 TREE_ADDRESSABLE (decl) = 1;
3757 /* Otherwise, if we don't need an lvalue and have a literal directly
3758 substitute it. Check if it matches the gimple predicate, as
3759 otherwise we'd generate a new temporary, and we can as well just
3760 use the decl we already have. */
3761 else if (!TREE_ADDRESSABLE (decl)
3762 && init
3763 && (fallback & fb_lvalue) == 0
3764 && gimple_test_f (init))
3765 {
3766 *expr_p = init;
3767 return GS_OK;
3768 }
3769
3770 /* Preliminarily mark non-addressed complex variables as eligible
3771 for promotion to gimple registers. We'll transform their uses
3772 as we find them. */
3773 if ((TREE_CODE (TREE_TYPE (decl)) == COMPLEX_TYPE
3774 || TREE_CODE (TREE_TYPE (decl)) == VECTOR_TYPE)
3775 && !TREE_THIS_VOLATILE (decl)
3776 && !needs_to_live_in_memory (decl))
3777 DECL_GIMPLE_REG_P (decl) = 1;
3778
3779 /* If the decl is not addressable, then it is being used in some
3780 expression or on the right hand side of a statement, and it can
3781 be put into a readonly data section. */
3782 if (!TREE_ADDRESSABLE (decl) && (fallback & fb_lvalue) == 0)
3783 TREE_READONLY (decl) = 1;
3784
3785 /* This decl isn't mentioned in the enclosing block, so add it to the
3786 list of temps. FIXME it seems a bit of a kludge to say that
3787 anonymous artificial vars aren't pushed, but everything else is. */
3788 if (DECL_NAME (decl) == NULL_TREE && !DECL_SEEN_IN_BIND_EXPR_P (decl))
3789 gimple_add_tmp_var (decl);
3790
3791 gimplify_and_add (decl_s, pre_p);
3792 *expr_p = decl;
3793 return GS_OK;
3794 }
3795
3796 /* Optimize embedded COMPOUND_LITERAL_EXPRs within a CONSTRUCTOR,
3797 return a new CONSTRUCTOR if something changed. */
3798
3799 static tree
3800 optimize_compound_literals_in_ctor (tree orig_ctor)
3801 {
3802 tree ctor = orig_ctor;
3803 vec<constructor_elt, va_gc> *elts = CONSTRUCTOR_ELTS (ctor);
3804 unsigned int idx, num = vec_safe_length (elts);
3805
3806 for (idx = 0; idx < num; idx++)
3807 {
3808 tree value = (*elts)[idx].value;
3809 tree newval = value;
3810 if (TREE_CODE (value) == CONSTRUCTOR)
3811 newval = optimize_compound_literals_in_ctor (value);
3812 else if (TREE_CODE (value) == COMPOUND_LITERAL_EXPR)
3813 {
3814 tree decl_s = COMPOUND_LITERAL_EXPR_DECL_EXPR (value);
3815 tree decl = DECL_EXPR_DECL (decl_s);
3816 tree init = DECL_INITIAL (decl);
3817
3818 if (!TREE_ADDRESSABLE (value)
3819 && !TREE_ADDRESSABLE (decl)
3820 && init
3821 && TREE_CODE (init) == CONSTRUCTOR)
3822 newval = optimize_compound_literals_in_ctor (init);
3823 }
3824 if (newval == value)
3825 continue;
3826
3827 if (ctor == orig_ctor)
3828 {
3829 ctor = copy_node (orig_ctor);
3830 CONSTRUCTOR_ELTS (ctor) = vec_safe_copy (elts);
3831 elts = CONSTRUCTOR_ELTS (ctor);
3832 }
3833 (*elts)[idx].value = newval;
3834 }
3835 return ctor;
3836 }
3837
3838 /* A subroutine of gimplify_modify_expr. Break out elements of a
3839 CONSTRUCTOR used as an initializer into separate MODIFY_EXPRs.
3840
3841 Note that we still need to clear any elements that don't have explicit
3842 initializers, so if not all elements are initialized we keep the
3843 original MODIFY_EXPR, we just remove all of the constructor elements.
3844
3845 If NOTIFY_TEMP_CREATION is true, do not gimplify, just return
3846 GS_ERROR if we would have to create a temporary when gimplifying
3847 this constructor. Otherwise, return GS_OK.
3848
3849 If NOTIFY_TEMP_CREATION is false, just do the gimplification. */
3850
3851 static enum gimplify_status
3852 gimplify_init_constructor (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
3853 bool want_value, bool notify_temp_creation)
3854 {
3855 tree object, ctor, type;
3856 enum gimplify_status ret;
3857 vec<constructor_elt, va_gc> *elts;
3858
3859 gcc_assert (TREE_CODE (TREE_OPERAND (*expr_p, 1)) == CONSTRUCTOR);
3860
3861 if (!notify_temp_creation)
3862 {
3863 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
3864 is_gimple_lvalue, fb_lvalue);
3865 if (ret == GS_ERROR)
3866 return ret;
3867 }
3868
3869 object = TREE_OPERAND (*expr_p, 0);
3870 ctor = TREE_OPERAND (*expr_p, 1) =
3871 optimize_compound_literals_in_ctor (TREE_OPERAND (*expr_p, 1));
3872 type = TREE_TYPE (ctor);
3873 elts = CONSTRUCTOR_ELTS (ctor);
3874 ret = GS_ALL_DONE;
3875
3876 switch (TREE_CODE (type))
3877 {
3878 case RECORD_TYPE:
3879 case UNION_TYPE:
3880 case QUAL_UNION_TYPE:
3881 case ARRAY_TYPE:
3882 {
3883 struct gimplify_init_ctor_preeval_data preeval_data;
3884 HOST_WIDE_INT num_ctor_elements, num_nonzero_elements;
3885 bool cleared, complete_p, valid_const_initializer;
3886
3887 /* Aggregate types must lower constructors to initialization of
3888 individual elements. The exception is that a CONSTRUCTOR node
3889 with no elements indicates zero-initialization of the whole. */
3890 if (vec_safe_is_empty (elts))
3891 {
3892 if (notify_temp_creation)
3893 return GS_OK;
3894 break;
3895 }
3896
3897 /* Fetch information about the constructor to direct later processing.
3898 We might want to make static versions of it in various cases, and
3899 can only do so if it known to be a valid constant initializer. */
3900 valid_const_initializer
3901 = categorize_ctor_elements (ctor, &num_nonzero_elements,
3902 &num_ctor_elements, &complete_p);
3903
3904 /* If a const aggregate variable is being initialized, then it
3905 should never be a lose to promote the variable to be static. */
3906 if (valid_const_initializer
3907 && num_nonzero_elements > 1
3908 && TREE_READONLY (object)
3909 && TREE_CODE (object) == VAR_DECL
3910 && (flag_merge_constants >= 2 || !TREE_ADDRESSABLE (object)))
3911 {
3912 if (notify_temp_creation)
3913 return GS_ERROR;
3914 DECL_INITIAL (object) = ctor;
3915 TREE_STATIC (object) = 1;
3916 if (!DECL_NAME (object))
3917 DECL_NAME (object) = create_tmp_var_name ("C");
3918 walk_tree (&DECL_INITIAL (object), force_labels_r, NULL, NULL);
3919
3920 /* ??? C++ doesn't automatically append a .<number> to the
3921 assembler name, and even when it does, it looks at FE private
3922 data structures to figure out what that number should be,
3923 which are not set for this variable. I suppose this is
3924 important for local statics for inline functions, which aren't
3925 "local" in the object file sense. So in order to get a unique
3926 TU-local symbol, we must invoke the lhd version now. */
3927 lhd_set_decl_assembler_name (object);
3928
3929 *expr_p = NULL_TREE;
3930 break;
3931 }
3932
3933 /* If there are "lots" of initialized elements, even discounting
3934 those that are not address constants (and thus *must* be
3935 computed at runtime), then partition the constructor into
3936 constant and non-constant parts. Block copy the constant
3937 parts in, then generate code for the non-constant parts. */
3938 /* TODO. There's code in cp/typeck.c to do this. */
3939
3940 if (int_size_in_bytes (TREE_TYPE (ctor)) < 0)
3941 /* store_constructor will ignore the clearing of variable-sized
3942 objects. Initializers for such objects must explicitly set
3943 every field that needs to be set. */
3944 cleared = false;
3945 else if (!complete_p && !CONSTRUCTOR_NO_CLEARING (ctor))
3946 /* If the constructor isn't complete, clear the whole object
3947 beforehand, unless CONSTRUCTOR_NO_CLEARING is set on it.
3948
3949 ??? This ought not to be needed. For any element not present
3950 in the initializer, we should simply set them to zero. Except
3951 we'd need to *find* the elements that are not present, and that
3952 requires trickery to avoid quadratic compile-time behavior in
3953 large cases or excessive memory use in small cases. */
3954 cleared = true;
3955 else if (num_ctor_elements - num_nonzero_elements
3956 > CLEAR_RATIO (optimize_function_for_speed_p (cfun))
3957 && num_nonzero_elements < num_ctor_elements / 4)
3958 /* If there are "lots" of zeros, it's more efficient to clear
3959 the memory and then set the nonzero elements. */
3960 cleared = true;
3961 else
3962 cleared = false;
3963
3964 /* If there are "lots" of initialized elements, and all of them
3965 are valid address constants, then the entire initializer can
3966 be dropped to memory, and then memcpy'd out. Don't do this
3967 for sparse arrays, though, as it's more efficient to follow
3968 the standard CONSTRUCTOR behavior of memset followed by
3969 individual element initialization. Also don't do this for small
3970 all-zero initializers (which aren't big enough to merit
3971 clearing), and don't try to make bitwise copies of
3972 TREE_ADDRESSABLE types.
3973
3974 We cannot apply such transformation when compiling chkp static
3975 initializer because creation of initializer image in the memory
3976 will require static initialization of bounds for it. It should
3977 result in another gimplification of similar initializer and we
3978 may fall into infinite loop. */
3979 if (valid_const_initializer
3980 && !(cleared || num_nonzero_elements == 0)
3981 && !TREE_ADDRESSABLE (type)
3982 && (!current_function_decl
3983 || !lookup_attribute ("chkp ctor",
3984 DECL_ATTRIBUTES (current_function_decl))))
3985 {
3986 HOST_WIDE_INT size = int_size_in_bytes (type);
3987 unsigned int align;
3988
3989 /* ??? We can still get unbounded array types, at least
3990 from the C++ front end. This seems wrong, but attempt
3991 to work around it for now. */
3992 if (size < 0)
3993 {
3994 size = int_size_in_bytes (TREE_TYPE (object));
3995 if (size >= 0)
3996 TREE_TYPE (ctor) = type = TREE_TYPE (object);
3997 }
3998
3999 /* Find the maximum alignment we can assume for the object. */
4000 /* ??? Make use of DECL_OFFSET_ALIGN. */
4001 if (DECL_P (object))
4002 align = DECL_ALIGN (object);
4003 else
4004 align = TYPE_ALIGN (type);
4005
4006 /* Do a block move either if the size is so small as to make
4007 each individual move a sub-unit move on average, or if it
4008 is so large as to make individual moves inefficient. */
4009 if (size > 0
4010 && num_nonzero_elements > 1
4011 && (size < num_nonzero_elements
4012 || !can_move_by_pieces (size, align)))
4013 {
4014 if (notify_temp_creation)
4015 return GS_ERROR;
4016
4017 walk_tree (&ctor, force_labels_r, NULL, NULL);
4018 ctor = tree_output_constant_def (ctor);
4019 if (!useless_type_conversion_p (type, TREE_TYPE (ctor)))
4020 ctor = build1 (VIEW_CONVERT_EXPR, type, ctor);
4021 TREE_OPERAND (*expr_p, 1) = ctor;
4022
4023 /* This is no longer an assignment of a CONSTRUCTOR, but
4024 we still may have processing to do on the LHS. So
4025 pretend we didn't do anything here to let that happen. */
4026 return GS_UNHANDLED;
4027 }
4028 }
4029
4030 /* If the target is volatile, we have non-zero elements and more than
4031 one field to assign, initialize the target from a temporary. */
4032 if (TREE_THIS_VOLATILE (object)
4033 && !TREE_ADDRESSABLE (type)
4034 && num_nonzero_elements > 0
4035 && vec_safe_length (elts) > 1)
4036 {
4037 tree temp = create_tmp_var (TYPE_MAIN_VARIANT (type));
4038 TREE_OPERAND (*expr_p, 0) = temp;
4039 *expr_p = build2 (COMPOUND_EXPR, TREE_TYPE (*expr_p),
4040 *expr_p,
4041 build2 (MODIFY_EXPR, void_type_node,
4042 object, temp));
4043 return GS_OK;
4044 }
4045
4046 if (notify_temp_creation)
4047 return GS_OK;
4048
4049 /* If there are nonzero elements and if needed, pre-evaluate to capture
4050 elements overlapping with the lhs into temporaries. We must do this
4051 before clearing to fetch the values before they are zeroed-out. */
4052 if (num_nonzero_elements > 0 && TREE_CODE (*expr_p) != INIT_EXPR)
4053 {
4054 preeval_data.lhs_base_decl = get_base_address (object);
4055 if (!DECL_P (preeval_data.lhs_base_decl))
4056 preeval_data.lhs_base_decl = NULL;
4057 preeval_data.lhs_alias_set = get_alias_set (object);
4058
4059 gimplify_init_ctor_preeval (&TREE_OPERAND (*expr_p, 1),
4060 pre_p, post_p, &preeval_data);
4061 }
4062
4063 bool ctor_has_side_effects_p
4064 = TREE_SIDE_EFFECTS (TREE_OPERAND (*expr_p, 1));
4065
4066 if (cleared)
4067 {
4068 /* Zap the CONSTRUCTOR element list, which simplifies this case.
4069 Note that we still have to gimplify, in order to handle the
4070 case of variable sized types. Avoid shared tree structures. */
4071 CONSTRUCTOR_ELTS (ctor) = NULL;
4072 TREE_SIDE_EFFECTS (ctor) = 0;
4073 object = unshare_expr (object);
4074 gimplify_stmt (expr_p, pre_p);
4075 }
4076
4077 /* If we have not block cleared the object, or if there are nonzero
4078 elements in the constructor, or if the constructor has side effects,
4079 add assignments to the individual scalar fields of the object. */
4080 if (!cleared
4081 || num_nonzero_elements > 0
4082 || ctor_has_side_effects_p)
4083 gimplify_init_ctor_eval (object, elts, pre_p, cleared);
4084
4085 *expr_p = NULL_TREE;
4086 }
4087 break;
4088
4089 case COMPLEX_TYPE:
4090 {
4091 tree r, i;
4092
4093 if (notify_temp_creation)
4094 return GS_OK;
4095
4096 /* Extract the real and imaginary parts out of the ctor. */
4097 gcc_assert (elts->length () == 2);
4098 r = (*elts)[0].value;
4099 i = (*elts)[1].value;
4100 if (r == NULL || i == NULL)
4101 {
4102 tree zero = build_zero_cst (TREE_TYPE (type));
4103 if (r == NULL)
4104 r = zero;
4105 if (i == NULL)
4106 i = zero;
4107 }
4108
4109 /* Complex types have either COMPLEX_CST or COMPLEX_EXPR to
4110 represent creation of a complex value. */
4111 if (TREE_CONSTANT (r) && TREE_CONSTANT (i))
4112 {
4113 ctor = build_complex (type, r, i);
4114 TREE_OPERAND (*expr_p, 1) = ctor;
4115 }
4116 else
4117 {
4118 ctor = build2 (COMPLEX_EXPR, type, r, i);
4119 TREE_OPERAND (*expr_p, 1) = ctor;
4120 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 1),
4121 pre_p,
4122 post_p,
4123 rhs_predicate_for (TREE_OPERAND (*expr_p, 0)),
4124 fb_rvalue);
4125 }
4126 }
4127 break;
4128
4129 case VECTOR_TYPE:
4130 {
4131 unsigned HOST_WIDE_INT ix;
4132 constructor_elt *ce;
4133
4134 if (notify_temp_creation)
4135 return GS_OK;
4136
4137 /* Go ahead and simplify constant constructors to VECTOR_CST. */
4138 if (TREE_CONSTANT (ctor))
4139 {
4140 bool constant_p = true;
4141 tree value;
4142
4143 /* Even when ctor is constant, it might contain non-*_CST
4144 elements, such as addresses or trapping values like
4145 1.0/0.0 - 1.0/0.0. Such expressions don't belong
4146 in VECTOR_CST nodes. */
4147 FOR_EACH_CONSTRUCTOR_VALUE (elts, ix, value)
4148 if (!CONSTANT_CLASS_P (value))
4149 {
4150 constant_p = false;
4151 break;
4152 }
4153
4154 if (constant_p)
4155 {
4156 TREE_OPERAND (*expr_p, 1) = build_vector_from_ctor (type, elts);
4157 break;
4158 }
4159
4160 TREE_CONSTANT (ctor) = 0;
4161 }
4162
4163 /* Vector types use CONSTRUCTOR all the way through gimple
4164 compilation as a general initializer. */
4165 FOR_EACH_VEC_SAFE_ELT (elts, ix, ce)
4166 {
4167 enum gimplify_status tret;
4168 tret = gimplify_expr (&ce->value, pre_p, post_p, is_gimple_val,
4169 fb_rvalue);
4170 if (tret == GS_ERROR)
4171 ret = GS_ERROR;
4172 }
4173 if (!is_gimple_reg (TREE_OPERAND (*expr_p, 0)))
4174 TREE_OPERAND (*expr_p, 1) = get_formal_tmp_var (ctor, pre_p);
4175 }
4176 break;
4177
4178 default:
4179 /* So how did we get a CONSTRUCTOR for a scalar type? */
4180 gcc_unreachable ();
4181 }
4182
4183 if (ret == GS_ERROR)
4184 return GS_ERROR;
4185 else if (want_value)
4186 {
4187 *expr_p = object;
4188 return GS_OK;
4189 }
4190 else
4191 {
4192 /* If we have gimplified both sides of the initializer but have
4193 not emitted an assignment, do so now. */
4194 if (*expr_p)
4195 {
4196 tree lhs = TREE_OPERAND (*expr_p, 0);
4197 tree rhs = TREE_OPERAND (*expr_p, 1);
4198 gassign *init = gimple_build_assign (lhs, rhs);
4199 gimplify_seq_add_stmt (pre_p, init);
4200 *expr_p = NULL;
4201 }
4202
4203 return GS_ALL_DONE;
4204 }
4205 }
4206
4207 /* Given a pointer value OP0, return a simplified version of an
4208 indirection through OP0, or NULL_TREE if no simplification is
4209 possible. This may only be applied to a rhs of an expression.
4210 Note that the resulting type may be different from the type pointed
4211 to in the sense that it is still compatible from the langhooks
4212 point of view. */
4213
4214 static tree
4215 gimple_fold_indirect_ref_rhs (tree t)
4216 {
4217 return gimple_fold_indirect_ref (t);
4218 }
4219
4220 /* Subroutine of gimplify_modify_expr to do simplifications of
4221 MODIFY_EXPRs based on the code of the RHS. We loop for as long as
4222 something changes. */
4223
4224 static enum gimplify_status
4225 gimplify_modify_expr_rhs (tree *expr_p, tree *from_p, tree *to_p,
4226 gimple_seq *pre_p, gimple_seq *post_p,
4227 bool want_value)
4228 {
4229 enum gimplify_status ret = GS_UNHANDLED;
4230 bool changed;
4231
4232 do
4233 {
4234 changed = false;
4235 switch (TREE_CODE (*from_p))
4236 {
4237 case VAR_DECL:
4238 /* If we're assigning from a read-only variable initialized with
4239 a constructor, do the direct assignment from the constructor,
4240 but only if neither source nor target are volatile since this
4241 latter assignment might end up being done on a per-field basis. */
4242 if (DECL_INITIAL (*from_p)
4243 && TREE_READONLY (*from_p)
4244 && !TREE_THIS_VOLATILE (*from_p)
4245 && !TREE_THIS_VOLATILE (*to_p)
4246 && TREE_CODE (DECL_INITIAL (*from_p)) == CONSTRUCTOR)
4247 {
4248 tree old_from = *from_p;
4249 enum gimplify_status subret;
4250
4251 /* Move the constructor into the RHS. */
4252 *from_p = unshare_expr (DECL_INITIAL (*from_p));
4253
4254 /* Let's see if gimplify_init_constructor will need to put
4255 it in memory. */
4256 subret = gimplify_init_constructor (expr_p, NULL, NULL,
4257 false, true);
4258 if (subret == GS_ERROR)
4259 {
4260 /* If so, revert the change. */
4261 *from_p = old_from;
4262 }
4263 else
4264 {
4265 ret = GS_OK;
4266 changed = true;
4267 }
4268 }
4269 break;
4270 case INDIRECT_REF:
4271 {
4272 /* If we have code like
4273
4274 *(const A*)(A*)&x
4275
4276 where the type of "x" is a (possibly cv-qualified variant
4277 of "A"), treat the entire expression as identical to "x".
4278 This kind of code arises in C++ when an object is bound
4279 to a const reference, and if "x" is a TARGET_EXPR we want
4280 to take advantage of the optimization below. */
4281 bool volatile_p = TREE_THIS_VOLATILE (*from_p);
4282 tree t = gimple_fold_indirect_ref_rhs (TREE_OPERAND (*from_p, 0));
4283 if (t)
4284 {
4285 if (TREE_THIS_VOLATILE (t) != volatile_p)
4286 {
4287 if (DECL_P (t))
4288 t = build_simple_mem_ref_loc (EXPR_LOCATION (*from_p),
4289 build_fold_addr_expr (t));
4290 if (REFERENCE_CLASS_P (t))
4291 TREE_THIS_VOLATILE (t) = volatile_p;
4292 }
4293 *from_p = t;
4294 ret = GS_OK;
4295 changed = true;
4296 }
4297 break;
4298 }
4299
4300 case TARGET_EXPR:
4301 {
4302 /* If we are initializing something from a TARGET_EXPR, strip the
4303 TARGET_EXPR and initialize it directly, if possible. This can't
4304 be done if the initializer is void, since that implies that the
4305 temporary is set in some non-trivial way.
4306
4307 ??? What about code that pulls out the temp and uses it
4308 elsewhere? I think that such code never uses the TARGET_EXPR as
4309 an initializer. If I'm wrong, we'll die because the temp won't
4310 have any RTL. In that case, I guess we'll need to replace
4311 references somehow. */
4312 tree init = TARGET_EXPR_INITIAL (*from_p);
4313
4314 if (init
4315 && !VOID_TYPE_P (TREE_TYPE (init)))
4316 {
4317 *from_p = init;
4318 ret = GS_OK;
4319 changed = true;
4320 }
4321 }
4322 break;
4323
4324 case COMPOUND_EXPR:
4325 /* Remove any COMPOUND_EXPR in the RHS so the following cases will be
4326 caught. */
4327 gimplify_compound_expr (from_p, pre_p, true);
4328 ret = GS_OK;
4329 changed = true;
4330 break;
4331
4332 case CONSTRUCTOR:
4333 /* If we already made some changes, let the front end have a
4334 crack at this before we break it down. */
4335 if (ret != GS_UNHANDLED)
4336 break;
4337 /* If we're initializing from a CONSTRUCTOR, break this into
4338 individual MODIFY_EXPRs. */
4339 return gimplify_init_constructor (expr_p, pre_p, post_p, want_value,
4340 false);
4341
4342 case COND_EXPR:
4343 /* If we're assigning to a non-register type, push the assignment
4344 down into the branches. This is mandatory for ADDRESSABLE types,
4345 since we cannot generate temporaries for such, but it saves a
4346 copy in other cases as well. */
4347 if (!is_gimple_reg_type (TREE_TYPE (*from_p)))
4348 {
4349 /* This code should mirror the code in gimplify_cond_expr. */
4350 enum tree_code code = TREE_CODE (*expr_p);
4351 tree cond = *from_p;
4352 tree result = *to_p;
4353
4354 ret = gimplify_expr (&result, pre_p, post_p,
4355 is_gimple_lvalue, fb_lvalue);
4356 if (ret != GS_ERROR)
4357 ret = GS_OK;
4358
4359 if (TREE_TYPE (TREE_OPERAND (cond, 1)) != void_type_node)
4360 TREE_OPERAND (cond, 1)
4361 = build2 (code, void_type_node, result,
4362 TREE_OPERAND (cond, 1));
4363 if (TREE_TYPE (TREE_OPERAND (cond, 2)) != void_type_node)
4364 TREE_OPERAND (cond, 2)
4365 = build2 (code, void_type_node, unshare_expr (result),
4366 TREE_OPERAND (cond, 2));
4367
4368 TREE_TYPE (cond) = void_type_node;
4369 recalculate_side_effects (cond);
4370
4371 if (want_value)
4372 {
4373 gimplify_and_add (cond, pre_p);
4374 *expr_p = unshare_expr (result);
4375 }
4376 else
4377 *expr_p = cond;
4378 return ret;
4379 }
4380 break;
4381
4382 case CALL_EXPR:
4383 /* For calls that return in memory, give *to_p as the CALL_EXPR's
4384 return slot so that we don't generate a temporary. */
4385 if (!CALL_EXPR_RETURN_SLOT_OPT (*from_p)
4386 && aggregate_value_p (*from_p, *from_p))
4387 {
4388 bool use_target;
4389
4390 if (!(rhs_predicate_for (*to_p))(*from_p))
4391 /* If we need a temporary, *to_p isn't accurate. */
4392 use_target = false;
4393 /* It's OK to use the return slot directly unless it's an NRV. */
4394 else if (TREE_CODE (*to_p) == RESULT_DECL
4395 && DECL_NAME (*to_p) == NULL_TREE
4396 && needs_to_live_in_memory (*to_p))
4397 use_target = true;
4398 else if (is_gimple_reg_type (TREE_TYPE (*to_p))
4399 || (DECL_P (*to_p) && DECL_REGISTER (*to_p)))
4400 /* Don't force regs into memory. */
4401 use_target = false;
4402 else if (TREE_CODE (*expr_p) == INIT_EXPR)
4403 /* It's OK to use the target directly if it's being
4404 initialized. */
4405 use_target = true;
4406 else if (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (*to_p)))
4407 != INTEGER_CST)
4408 /* Always use the target and thus RSO for variable-sized types.
4409 GIMPLE cannot deal with a variable-sized assignment
4410 embedded in a call statement. */
4411 use_target = true;
4412 else if (TREE_CODE (*to_p) != SSA_NAME
4413 && (!is_gimple_variable (*to_p)
4414 || needs_to_live_in_memory (*to_p)))
4415 /* Don't use the original target if it's already addressable;
4416 if its address escapes, and the called function uses the
4417 NRV optimization, a conforming program could see *to_p
4418 change before the called function returns; see c++/19317.
4419 When optimizing, the return_slot pass marks more functions
4420 as safe after we have escape info. */
4421 use_target = false;
4422 else
4423 use_target = true;
4424
4425 if (use_target)
4426 {
4427 CALL_EXPR_RETURN_SLOT_OPT (*from_p) = 1;
4428 mark_addressable (*to_p);
4429 }
4430 }
4431 break;
4432
4433 case WITH_SIZE_EXPR:
4434 /* Likewise for calls that return an aggregate of non-constant size,
4435 since we would not be able to generate a temporary at all. */
4436 if (TREE_CODE (TREE_OPERAND (*from_p, 0)) == CALL_EXPR)
4437 {
4438 *from_p = TREE_OPERAND (*from_p, 0);
4439 /* We don't change ret in this case because the
4440 WITH_SIZE_EXPR might have been added in
4441 gimplify_modify_expr, so returning GS_OK would lead to an
4442 infinite loop. */
4443 changed = true;
4444 }
4445 break;
4446
4447 /* If we're initializing from a container, push the initialization
4448 inside it. */
4449 case CLEANUP_POINT_EXPR:
4450 case BIND_EXPR:
4451 case STATEMENT_LIST:
4452 {
4453 tree wrap = *from_p;
4454 tree t;
4455
4456 ret = gimplify_expr (to_p, pre_p, post_p, is_gimple_min_lval,
4457 fb_lvalue);
4458 if (ret != GS_ERROR)
4459 ret = GS_OK;
4460
4461 t = voidify_wrapper_expr (wrap, *expr_p);
4462 gcc_assert (t == *expr_p);
4463
4464 if (want_value)
4465 {
4466 gimplify_and_add (wrap, pre_p);
4467 *expr_p = unshare_expr (*to_p);
4468 }
4469 else
4470 *expr_p = wrap;
4471 return GS_OK;
4472 }
4473
4474 case COMPOUND_LITERAL_EXPR:
4475 {
4476 tree complit = TREE_OPERAND (*expr_p, 1);
4477 tree decl_s = COMPOUND_LITERAL_EXPR_DECL_EXPR (complit);
4478 tree decl = DECL_EXPR_DECL (decl_s);
4479 tree init = DECL_INITIAL (decl);
4480
4481 /* struct T x = (struct T) { 0, 1, 2 } can be optimized
4482 into struct T x = { 0, 1, 2 } if the address of the
4483 compound literal has never been taken. */
4484 if (!TREE_ADDRESSABLE (complit)
4485 && !TREE_ADDRESSABLE (decl)
4486 && init)
4487 {
4488 *expr_p = copy_node (*expr_p);
4489 TREE_OPERAND (*expr_p, 1) = init;
4490 return GS_OK;
4491 }
4492 }
4493
4494 default:
4495 break;
4496 }
4497 }
4498 while (changed);
4499
4500 return ret;
4501 }
4502
4503
4504 /* Return true if T looks like a valid GIMPLE statement. */
4505
4506 static bool
4507 is_gimple_stmt (tree t)
4508 {
4509 const enum tree_code code = TREE_CODE (t);
4510
4511 switch (code)
4512 {
4513 case NOP_EXPR:
4514 /* The only valid NOP_EXPR is the empty statement. */
4515 return IS_EMPTY_STMT (t);
4516
4517 case BIND_EXPR:
4518 case COND_EXPR:
4519 /* These are only valid if they're void. */
4520 return TREE_TYPE (t) == NULL || VOID_TYPE_P (TREE_TYPE (t));
4521
4522 case SWITCH_EXPR:
4523 case GOTO_EXPR:
4524 case RETURN_EXPR:
4525 case LABEL_EXPR:
4526 case CASE_LABEL_EXPR:
4527 case TRY_CATCH_EXPR:
4528 case TRY_FINALLY_EXPR:
4529 case EH_FILTER_EXPR:
4530 case CATCH_EXPR:
4531 case ASM_EXPR:
4532 case STATEMENT_LIST:
4533 case OACC_PARALLEL:
4534 case OACC_KERNELS:
4535 case OACC_DATA:
4536 case OACC_HOST_DATA:
4537 case OACC_DECLARE:
4538 case OACC_UPDATE:
4539 case OACC_ENTER_DATA:
4540 case OACC_EXIT_DATA:
4541 case OACC_CACHE:
4542 case OMP_PARALLEL:
4543 case OMP_FOR:
4544 case OMP_SIMD:
4545 case CILK_SIMD:
4546 case OMP_DISTRIBUTE:
4547 case OACC_LOOP:
4548 case OMP_SECTIONS:
4549 case OMP_SECTION:
4550 case OMP_SINGLE:
4551 case OMP_MASTER:
4552 case OMP_TASKGROUP:
4553 case OMP_ORDERED:
4554 case OMP_CRITICAL:
4555 case OMP_TASK:
4556 case OMP_TARGET:
4557 case OMP_TARGET_DATA:
4558 case OMP_TARGET_UPDATE:
4559 case OMP_TARGET_ENTER_DATA:
4560 case OMP_TARGET_EXIT_DATA:
4561 case OMP_TASKLOOP:
4562 case OMP_TEAMS:
4563 /* These are always void. */
4564 return true;
4565
4566 case CALL_EXPR:
4567 case MODIFY_EXPR:
4568 case PREDICT_EXPR:
4569 /* These are valid regardless of their type. */
4570 return true;
4571
4572 default:
4573 return false;
4574 }
4575 }
4576
4577
4578 /* Promote partial stores to COMPLEX variables to total stores. *EXPR_P is
4579 a MODIFY_EXPR with a lhs of a REAL/IMAGPART_EXPR of a variable with
4580 DECL_GIMPLE_REG_P set.
4581
4582 IMPORTANT NOTE: This promotion is performed by introducing a load of the
4583 other, unmodified part of the complex object just before the total store.
4584 As a consequence, if the object is still uninitialized, an undefined value
4585 will be loaded into a register, which may result in a spurious exception
4586 if the register is floating-point and the value happens to be a signaling
4587 NaN for example. Then the fully-fledged complex operations lowering pass
4588 followed by a DCE pass are necessary in order to fix things up. */
4589
4590 static enum gimplify_status
4591 gimplify_modify_expr_complex_part (tree *expr_p, gimple_seq *pre_p,
4592 bool want_value)
4593 {
4594 enum tree_code code, ocode;
4595 tree lhs, rhs, new_rhs, other, realpart, imagpart;
4596
4597 lhs = TREE_OPERAND (*expr_p, 0);
4598 rhs = TREE_OPERAND (*expr_p, 1);
4599 code = TREE_CODE (lhs);
4600 lhs = TREE_OPERAND (lhs, 0);
4601
4602 ocode = code == REALPART_EXPR ? IMAGPART_EXPR : REALPART_EXPR;
4603 other = build1 (ocode, TREE_TYPE (rhs), lhs);
4604 TREE_NO_WARNING (other) = 1;
4605 other = get_formal_tmp_var (other, pre_p);
4606
4607 realpart = code == REALPART_EXPR ? rhs : other;
4608 imagpart = code == REALPART_EXPR ? other : rhs;
4609
4610 if (TREE_CONSTANT (realpart) && TREE_CONSTANT (imagpart))
4611 new_rhs = build_complex (TREE_TYPE (lhs), realpart, imagpart);
4612 else
4613 new_rhs = build2 (COMPLEX_EXPR, TREE_TYPE (lhs), realpart, imagpart);
4614
4615 gimplify_seq_add_stmt (pre_p, gimple_build_assign (lhs, new_rhs));
4616 *expr_p = (want_value) ? rhs : NULL_TREE;
4617
4618 return GS_ALL_DONE;
4619 }
4620
4621 /* Gimplify the MODIFY_EXPR node pointed to by EXPR_P.
4622
4623 modify_expr
4624 : varname '=' rhs
4625 | '*' ID '=' rhs
4626
4627 PRE_P points to the list where side effects that must happen before
4628 *EXPR_P should be stored.
4629
4630 POST_P points to the list where side effects that must happen after
4631 *EXPR_P should be stored.
4632
4633 WANT_VALUE is nonzero iff we want to use the value of this expression
4634 in another expression. */
4635
4636 static enum gimplify_status
4637 gimplify_modify_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
4638 bool want_value)
4639 {
4640 tree *from_p = &TREE_OPERAND (*expr_p, 1);
4641 tree *to_p = &TREE_OPERAND (*expr_p, 0);
4642 enum gimplify_status ret = GS_UNHANDLED;
4643 gimple *assign;
4644 location_t loc = EXPR_LOCATION (*expr_p);
4645 gimple_stmt_iterator gsi;
4646
4647 gcc_assert (TREE_CODE (*expr_p) == MODIFY_EXPR
4648 || TREE_CODE (*expr_p) == INIT_EXPR);
4649
4650 /* Trying to simplify a clobber using normal logic doesn't work,
4651 so handle it here. */
4652 if (TREE_CLOBBER_P (*from_p))
4653 {
4654 ret = gimplify_expr (to_p, pre_p, post_p, is_gimple_lvalue, fb_lvalue);
4655 if (ret == GS_ERROR)
4656 return ret;
4657 gcc_assert (!want_value
4658 && (TREE_CODE (*to_p) == VAR_DECL
4659 || TREE_CODE (*to_p) == MEM_REF));
4660 gimplify_seq_add_stmt (pre_p, gimple_build_assign (*to_p, *from_p));
4661 *expr_p = NULL;
4662 return GS_ALL_DONE;
4663 }
4664
4665 /* Insert pointer conversions required by the middle-end that are not
4666 required by the frontend. This fixes middle-end type checking for
4667 for example gcc.dg/redecl-6.c. */
4668 if (POINTER_TYPE_P (TREE_TYPE (*to_p)))
4669 {
4670 STRIP_USELESS_TYPE_CONVERSION (*from_p);
4671 if (!useless_type_conversion_p (TREE_TYPE (*to_p), TREE_TYPE (*from_p)))
4672 *from_p = fold_convert_loc (loc, TREE_TYPE (*to_p), *from_p);
4673 }
4674
4675 /* See if any simplifications can be done based on what the RHS is. */
4676 ret = gimplify_modify_expr_rhs (expr_p, from_p, to_p, pre_p, post_p,
4677 want_value);
4678 if (ret != GS_UNHANDLED)
4679 return ret;
4680
4681 /* For zero sized types only gimplify the left hand side and right hand
4682 side as statements and throw away the assignment. Do this after
4683 gimplify_modify_expr_rhs so we handle TARGET_EXPRs of addressable
4684 types properly. */
4685 if (zero_sized_type (TREE_TYPE (*from_p)) && !want_value)
4686 {
4687 gimplify_stmt (from_p, pre_p);
4688 gimplify_stmt (to_p, pre_p);
4689 *expr_p = NULL_TREE;
4690 return GS_ALL_DONE;
4691 }
4692
4693 /* If the value being copied is of variable width, compute the length
4694 of the copy into a WITH_SIZE_EXPR. Note that we need to do this
4695 before gimplifying any of the operands so that we can resolve any
4696 PLACEHOLDER_EXPRs in the size. Also note that the RTL expander uses
4697 the size of the expression to be copied, not of the destination, so
4698 that is what we must do here. */
4699 maybe_with_size_expr (from_p);
4700
4701 ret = gimplify_expr (to_p, pre_p, post_p, is_gimple_lvalue, fb_lvalue);
4702 if (ret == GS_ERROR)
4703 return ret;
4704
4705 /* As a special case, we have to temporarily allow for assignments
4706 with a CALL_EXPR on the RHS. Since in GIMPLE a function call is
4707 a toplevel statement, when gimplifying the GENERIC expression
4708 MODIFY_EXPR <a, CALL_EXPR <foo>>, we cannot create the tuple
4709 GIMPLE_ASSIGN <a, GIMPLE_CALL <foo>>.
4710
4711 Instead, we need to create the tuple GIMPLE_CALL <a, foo>. To
4712 prevent gimplify_expr from trying to create a new temporary for
4713 foo's LHS, we tell it that it should only gimplify until it
4714 reaches the CALL_EXPR. On return from gimplify_expr, the newly
4715 created GIMPLE_CALL <foo> will be the last statement in *PRE_P
4716 and all we need to do here is set 'a' to be its LHS. */
4717 ret = gimplify_expr (from_p, pre_p, post_p, rhs_predicate_for (*to_p),
4718 fb_rvalue);
4719 if (ret == GS_ERROR)
4720 return ret;
4721
4722 /* In case of va_arg internal fn wrappped in a WITH_SIZE_EXPR, add the type
4723 size as argument to the call. */
4724 if (TREE_CODE (*from_p) == WITH_SIZE_EXPR)
4725 {
4726 tree call = TREE_OPERAND (*from_p, 0);
4727 tree vlasize = TREE_OPERAND (*from_p, 1);
4728
4729 if (TREE_CODE (call) == CALL_EXPR
4730 && CALL_EXPR_IFN (call) == IFN_VA_ARG)
4731 {
4732 int nargs = call_expr_nargs (call);
4733 tree type = TREE_TYPE (call);
4734 tree ap = CALL_EXPR_ARG (call, 0);
4735 tree tag = CALL_EXPR_ARG (call, 1);
4736 tree aptag = CALL_EXPR_ARG (call, 2);
4737 tree newcall = build_call_expr_internal_loc (EXPR_LOCATION (call),
4738 IFN_VA_ARG, type,
4739 nargs + 1, ap, tag,
4740 aptag, vlasize);
4741 TREE_OPERAND (*from_p, 0) = newcall;
4742 }
4743 }
4744
4745 /* Now see if the above changed *from_p to something we handle specially. */
4746 ret = gimplify_modify_expr_rhs (expr_p, from_p, to_p, pre_p, post_p,
4747 want_value);
4748 if (ret != GS_UNHANDLED)
4749 return ret;
4750
4751 /* If we've got a variable sized assignment between two lvalues (i.e. does
4752 not involve a call), then we can make things a bit more straightforward
4753 by converting the assignment to memcpy or memset. */
4754 if (TREE_CODE (*from_p) == WITH_SIZE_EXPR)
4755 {
4756 tree from = TREE_OPERAND (*from_p, 0);
4757 tree size = TREE_OPERAND (*from_p, 1);
4758
4759 if (TREE_CODE (from) == CONSTRUCTOR)
4760 return gimplify_modify_expr_to_memset (expr_p, size, want_value, pre_p);
4761
4762 if (is_gimple_addressable (from))
4763 {
4764 *from_p = from;
4765 return gimplify_modify_expr_to_memcpy (expr_p, size, want_value,
4766 pre_p);
4767 }
4768 }
4769
4770 /* Transform partial stores to non-addressable complex variables into
4771 total stores. This allows us to use real instead of virtual operands
4772 for these variables, which improves optimization. */
4773 if ((TREE_CODE (*to_p) == REALPART_EXPR
4774 || TREE_CODE (*to_p) == IMAGPART_EXPR)
4775 && is_gimple_reg (TREE_OPERAND (*to_p, 0)))
4776 return gimplify_modify_expr_complex_part (expr_p, pre_p, want_value);
4777
4778 /* Try to alleviate the effects of the gimplification creating artificial
4779 temporaries (see for example is_gimple_reg_rhs) on the debug info, but
4780 make sure not to create DECL_DEBUG_EXPR links across functions. */
4781 if (!gimplify_ctxp->into_ssa
4782 && TREE_CODE (*from_p) == VAR_DECL
4783 && DECL_IGNORED_P (*from_p)
4784 && DECL_P (*to_p)
4785 && !DECL_IGNORED_P (*to_p)
4786 && decl_function_context (*to_p) == current_function_decl)
4787 {
4788 if (!DECL_NAME (*from_p) && DECL_NAME (*to_p))
4789 DECL_NAME (*from_p)
4790 = create_tmp_var_name (IDENTIFIER_POINTER (DECL_NAME (*to_p)));
4791 DECL_HAS_DEBUG_EXPR_P (*from_p) = 1;
4792 SET_DECL_DEBUG_EXPR (*from_p, *to_p);
4793 }
4794
4795 if (want_value && TREE_THIS_VOLATILE (*to_p))
4796 *from_p = get_initialized_tmp_var (*from_p, pre_p, post_p);
4797
4798 if (TREE_CODE (*from_p) == CALL_EXPR)
4799 {
4800 /* Since the RHS is a CALL_EXPR, we need to create a GIMPLE_CALL
4801 instead of a GIMPLE_ASSIGN. */
4802 gcall *call_stmt;
4803 if (CALL_EXPR_FN (*from_p) == NULL_TREE)
4804 {
4805 /* Gimplify internal functions created in the FEs. */
4806 int nargs = call_expr_nargs (*from_p), i;
4807 enum internal_fn ifn = CALL_EXPR_IFN (*from_p);
4808 auto_vec<tree> vargs (nargs);
4809
4810 for (i = 0; i < nargs; i++)
4811 {
4812 gimplify_arg (&CALL_EXPR_ARG (*from_p, i), pre_p,
4813 EXPR_LOCATION (*from_p));
4814 vargs.quick_push (CALL_EXPR_ARG (*from_p, i));
4815 }
4816 call_stmt = gimple_build_call_internal_vec (ifn, vargs);
4817 gimple_set_location (call_stmt, EXPR_LOCATION (*expr_p));
4818 }
4819 else
4820 {
4821 tree fnptrtype = TREE_TYPE (CALL_EXPR_FN (*from_p));
4822 CALL_EXPR_FN (*from_p) = TREE_OPERAND (CALL_EXPR_FN (*from_p), 0);
4823 STRIP_USELESS_TYPE_CONVERSION (CALL_EXPR_FN (*from_p));
4824 tree fndecl = get_callee_fndecl (*from_p);
4825 if (fndecl
4826 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
4827 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_EXPECT
4828 && call_expr_nargs (*from_p) == 3)
4829 call_stmt = gimple_build_call_internal (IFN_BUILTIN_EXPECT, 3,
4830 CALL_EXPR_ARG (*from_p, 0),
4831 CALL_EXPR_ARG (*from_p, 1),
4832 CALL_EXPR_ARG (*from_p, 2));
4833 else
4834 {
4835 call_stmt = gimple_build_call_from_tree (*from_p);
4836 gimple_call_set_fntype (call_stmt, TREE_TYPE (fnptrtype));
4837 }
4838 }
4839 notice_special_calls (call_stmt);
4840 if (!gimple_call_noreturn_p (call_stmt)
4841 || TREE_ADDRESSABLE (TREE_TYPE (*to_p))
4842 || TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (*to_p))) != INTEGER_CST)
4843 gimple_call_set_lhs (call_stmt, *to_p);
4844 assign = call_stmt;
4845 }
4846 else
4847 {
4848 assign = gimple_build_assign (*to_p, *from_p);
4849 gimple_set_location (assign, EXPR_LOCATION (*expr_p));
4850 }
4851
4852 if (gimplify_ctxp->into_ssa && is_gimple_reg (*to_p))
4853 {
4854 /* We should have got an SSA name from the start. */
4855 gcc_assert (TREE_CODE (*to_p) == SSA_NAME);
4856 }
4857
4858 gimplify_seq_add_stmt (pre_p, assign);
4859 gsi = gsi_last (*pre_p);
4860 maybe_fold_stmt (&gsi);
4861
4862 if (want_value)
4863 {
4864 *expr_p = TREE_THIS_VOLATILE (*to_p) ? *from_p : unshare_expr (*to_p);
4865 return GS_OK;
4866 }
4867 else
4868 *expr_p = NULL;
4869
4870 return GS_ALL_DONE;
4871 }
4872
4873 /* Gimplify a comparison between two variable-sized objects. Do this
4874 with a call to BUILT_IN_MEMCMP. */
4875
4876 static enum gimplify_status
4877 gimplify_variable_sized_compare (tree *expr_p)
4878 {
4879 location_t loc = EXPR_LOCATION (*expr_p);
4880 tree op0 = TREE_OPERAND (*expr_p, 0);
4881 tree op1 = TREE_OPERAND (*expr_p, 1);
4882 tree t, arg, dest, src, expr;
4883
4884 arg = TYPE_SIZE_UNIT (TREE_TYPE (op0));
4885 arg = unshare_expr (arg);
4886 arg = SUBSTITUTE_PLACEHOLDER_IN_EXPR (arg, op0);
4887 src = build_fold_addr_expr_loc (loc, op1);
4888 dest = build_fold_addr_expr_loc (loc, op0);
4889 t = builtin_decl_implicit (BUILT_IN_MEMCMP);
4890 t = build_call_expr_loc (loc, t, 3, dest, src, arg);
4891
4892 expr
4893 = build2 (TREE_CODE (*expr_p), TREE_TYPE (*expr_p), t, integer_zero_node);
4894 SET_EXPR_LOCATION (expr, loc);
4895 *expr_p = expr;
4896
4897 return GS_OK;
4898 }
4899
4900 /* Gimplify a comparison between two aggregate objects of integral scalar
4901 mode as a comparison between the bitwise equivalent scalar values. */
4902
4903 static enum gimplify_status
4904 gimplify_scalar_mode_aggregate_compare (tree *expr_p)
4905 {
4906 location_t loc = EXPR_LOCATION (*expr_p);
4907 tree op0 = TREE_OPERAND (*expr_p, 0);
4908 tree op1 = TREE_OPERAND (*expr_p, 1);
4909
4910 tree type = TREE_TYPE (op0);
4911 tree scalar_type = lang_hooks.types.type_for_mode (TYPE_MODE (type), 1);
4912
4913 op0 = fold_build1_loc (loc, VIEW_CONVERT_EXPR, scalar_type, op0);
4914 op1 = fold_build1_loc (loc, VIEW_CONVERT_EXPR, scalar_type, op1);
4915
4916 *expr_p
4917 = fold_build2_loc (loc, TREE_CODE (*expr_p), TREE_TYPE (*expr_p), op0, op1);
4918
4919 return GS_OK;
4920 }
4921
4922 /* Gimplify an expression sequence. This function gimplifies each
4923 expression and rewrites the original expression with the last
4924 expression of the sequence in GIMPLE form.
4925
4926 PRE_P points to the list where the side effects for all the
4927 expressions in the sequence will be emitted.
4928
4929 WANT_VALUE is true when the result of the last COMPOUND_EXPR is used. */
4930
4931 static enum gimplify_status
4932 gimplify_compound_expr (tree *expr_p, gimple_seq *pre_p, bool want_value)
4933 {
4934 tree t = *expr_p;
4935
4936 do
4937 {
4938 tree *sub_p = &TREE_OPERAND (t, 0);
4939
4940 if (TREE_CODE (*sub_p) == COMPOUND_EXPR)
4941 gimplify_compound_expr (sub_p, pre_p, false);
4942 else
4943 gimplify_stmt (sub_p, pre_p);
4944
4945 t = TREE_OPERAND (t, 1);
4946 }
4947 while (TREE_CODE (t) == COMPOUND_EXPR);
4948
4949 *expr_p = t;
4950 if (want_value)
4951 return GS_OK;
4952 else
4953 {
4954 gimplify_stmt (expr_p, pre_p);
4955 return GS_ALL_DONE;
4956 }
4957 }
4958
4959 /* Gimplify a SAVE_EXPR node. EXPR_P points to the expression to
4960 gimplify. After gimplification, EXPR_P will point to a new temporary
4961 that holds the original value of the SAVE_EXPR node.
4962
4963 PRE_P points to the list where side effects that must happen before
4964 *EXPR_P should be stored. */
4965
4966 static enum gimplify_status
4967 gimplify_save_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
4968 {
4969 enum gimplify_status ret = GS_ALL_DONE;
4970 tree val;
4971
4972 gcc_assert (TREE_CODE (*expr_p) == SAVE_EXPR);
4973 val = TREE_OPERAND (*expr_p, 0);
4974
4975 /* If the SAVE_EXPR has not been resolved, then evaluate it once. */
4976 if (!SAVE_EXPR_RESOLVED_P (*expr_p))
4977 {
4978 /* The operand may be a void-valued expression such as SAVE_EXPRs
4979 generated by the Java frontend for class initialization. It is
4980 being executed only for its side-effects. */
4981 if (TREE_TYPE (val) == void_type_node)
4982 {
4983 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
4984 is_gimple_stmt, fb_none);
4985 val = NULL;
4986 }
4987 else
4988 val = get_initialized_tmp_var (val, pre_p, post_p);
4989
4990 TREE_OPERAND (*expr_p, 0) = val;
4991 SAVE_EXPR_RESOLVED_P (*expr_p) = 1;
4992 }
4993
4994 *expr_p = val;
4995
4996 return ret;
4997 }
4998
4999 /* Rewrite the ADDR_EXPR node pointed to by EXPR_P
5000
5001 unary_expr
5002 : ...
5003 | '&' varname
5004 ...
5005
5006 PRE_P points to the list where side effects that must happen before
5007 *EXPR_P should be stored.
5008
5009 POST_P points to the list where side effects that must happen after
5010 *EXPR_P should be stored. */
5011
5012 static enum gimplify_status
5013 gimplify_addr_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
5014 {
5015 tree expr = *expr_p;
5016 tree op0 = TREE_OPERAND (expr, 0);
5017 enum gimplify_status ret;
5018 location_t loc = EXPR_LOCATION (*expr_p);
5019
5020 switch (TREE_CODE (op0))
5021 {
5022 case INDIRECT_REF:
5023 do_indirect_ref:
5024 /* Check if we are dealing with an expression of the form '&*ptr'.
5025 While the front end folds away '&*ptr' into 'ptr', these
5026 expressions may be generated internally by the compiler (e.g.,
5027 builtins like __builtin_va_end). */
5028 /* Caution: the silent array decomposition semantics we allow for
5029 ADDR_EXPR means we can't always discard the pair. */
5030 /* Gimplification of the ADDR_EXPR operand may drop
5031 cv-qualification conversions, so make sure we add them if
5032 needed. */
5033 {
5034 tree op00 = TREE_OPERAND (op0, 0);
5035 tree t_expr = TREE_TYPE (expr);
5036 tree t_op00 = TREE_TYPE (op00);
5037
5038 if (!useless_type_conversion_p (t_expr, t_op00))
5039 op00 = fold_convert_loc (loc, TREE_TYPE (expr), op00);
5040 *expr_p = op00;
5041 ret = GS_OK;
5042 }
5043 break;
5044
5045 case VIEW_CONVERT_EXPR:
5046 /* Take the address of our operand and then convert it to the type of
5047 this ADDR_EXPR.
5048
5049 ??? The interactions of VIEW_CONVERT_EXPR and aliasing is not at
5050 all clear. The impact of this transformation is even less clear. */
5051
5052 /* If the operand is a useless conversion, look through it. Doing so
5053 guarantees that the ADDR_EXPR and its operand will remain of the
5054 same type. */
5055 if (tree_ssa_useless_type_conversion (TREE_OPERAND (op0, 0)))
5056 op0 = TREE_OPERAND (op0, 0);
5057
5058 *expr_p = fold_convert_loc (loc, TREE_TYPE (expr),
5059 build_fold_addr_expr_loc (loc,
5060 TREE_OPERAND (op0, 0)));
5061 ret = GS_OK;
5062 break;
5063
5064 case MEM_REF:
5065 if (integer_zerop (TREE_OPERAND (op0, 1)))
5066 goto do_indirect_ref;
5067
5068 /* ... fall through ... */
5069
5070 default:
5071 /* If we see a call to a declared builtin or see its address
5072 being taken (we can unify those cases here) then we can mark
5073 the builtin for implicit generation by GCC. */
5074 if (TREE_CODE (op0) == FUNCTION_DECL
5075 && DECL_BUILT_IN_CLASS (op0) == BUILT_IN_NORMAL
5076 && builtin_decl_declared_p (DECL_FUNCTION_CODE (op0)))
5077 set_builtin_decl_implicit_p (DECL_FUNCTION_CODE (op0), true);
5078
5079 /* We use fb_either here because the C frontend sometimes takes
5080 the address of a call that returns a struct; see
5081 gcc.dg/c99-array-lval-1.c. The gimplifier will correctly make
5082 the implied temporary explicit. */
5083
5084 /* Make the operand addressable. */
5085 ret = gimplify_expr (&TREE_OPERAND (expr, 0), pre_p, post_p,
5086 is_gimple_addressable, fb_either);
5087 if (ret == GS_ERROR)
5088 break;
5089
5090 /* Then mark it. Beware that it may not be possible to do so directly
5091 if a temporary has been created by the gimplification. */
5092 prepare_gimple_addressable (&TREE_OPERAND (expr, 0), pre_p);
5093
5094 op0 = TREE_OPERAND (expr, 0);
5095
5096 /* For various reasons, the gimplification of the expression
5097 may have made a new INDIRECT_REF. */
5098 if (TREE_CODE (op0) == INDIRECT_REF)
5099 goto do_indirect_ref;
5100
5101 mark_addressable (TREE_OPERAND (expr, 0));
5102
5103 /* The FEs may end up building ADDR_EXPRs early on a decl with
5104 an incomplete type. Re-build ADDR_EXPRs in canonical form
5105 here. */
5106 if (!types_compatible_p (TREE_TYPE (op0), TREE_TYPE (TREE_TYPE (expr))))
5107 *expr_p = build_fold_addr_expr (op0);
5108
5109 /* Make sure TREE_CONSTANT and TREE_SIDE_EFFECTS are set properly. */
5110 recompute_tree_invariant_for_addr_expr (*expr_p);
5111
5112 /* If we re-built the ADDR_EXPR add a conversion to the original type
5113 if required. */
5114 if (!useless_type_conversion_p (TREE_TYPE (expr), TREE_TYPE (*expr_p)))
5115 *expr_p = fold_convert (TREE_TYPE (expr), *expr_p);
5116
5117 break;
5118 }
5119
5120 return ret;
5121 }
5122
5123 /* Gimplify the operands of an ASM_EXPR. Input operands should be a gimple
5124 value; output operands should be a gimple lvalue. */
5125
5126 static enum gimplify_status
5127 gimplify_asm_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
5128 {
5129 tree expr;
5130 int noutputs;
5131 const char **oconstraints;
5132 int i;
5133 tree link;
5134 const char *constraint;
5135 bool allows_mem, allows_reg, is_inout;
5136 enum gimplify_status ret, tret;
5137 gasm *stmt;
5138 vec<tree, va_gc> *inputs;
5139 vec<tree, va_gc> *outputs;
5140 vec<tree, va_gc> *clobbers;
5141 vec<tree, va_gc> *labels;
5142 tree link_next;
5143
5144 expr = *expr_p;
5145 noutputs = list_length (ASM_OUTPUTS (expr));
5146 oconstraints = (const char **) alloca ((noutputs) * sizeof (const char *));
5147
5148 inputs = NULL;
5149 outputs = NULL;
5150 clobbers = NULL;
5151 labels = NULL;
5152
5153 ret = GS_ALL_DONE;
5154 link_next = NULL_TREE;
5155 for (i = 0, link = ASM_OUTPUTS (expr); link; ++i, link = link_next)
5156 {
5157 bool ok;
5158 size_t constraint_len;
5159
5160 link_next = TREE_CHAIN (link);
5161
5162 oconstraints[i]
5163 = constraint
5164 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
5165 constraint_len = strlen (constraint);
5166 if (constraint_len == 0)
5167 continue;
5168
5169 ok = parse_output_constraint (&constraint, i, 0, 0,
5170 &allows_mem, &allows_reg, &is_inout);
5171 if (!ok)
5172 {
5173 ret = GS_ERROR;
5174 is_inout = false;
5175 }
5176
5177 if (!allows_reg && allows_mem)
5178 mark_addressable (TREE_VALUE (link));
5179
5180 tret = gimplify_expr (&TREE_VALUE (link), pre_p, post_p,
5181 is_inout ? is_gimple_min_lval : is_gimple_lvalue,
5182 fb_lvalue | fb_mayfail);
5183 if (tret == GS_ERROR)
5184 {
5185 error ("invalid lvalue in asm output %d", i);
5186 ret = tret;
5187 }
5188
5189 vec_safe_push (outputs, link);
5190 TREE_CHAIN (link) = NULL_TREE;
5191
5192 if (is_inout)
5193 {
5194 /* An input/output operand. To give the optimizers more
5195 flexibility, split it into separate input and output
5196 operands. */
5197 tree input;
5198 char buf[10];
5199
5200 /* Turn the in/out constraint into an output constraint. */
5201 char *p = xstrdup (constraint);
5202 p[0] = '=';
5203 TREE_VALUE (TREE_PURPOSE (link)) = build_string (constraint_len, p);
5204
5205 /* And add a matching input constraint. */
5206 if (allows_reg)
5207 {
5208 sprintf (buf, "%d", i);
5209
5210 /* If there are multiple alternatives in the constraint,
5211 handle each of them individually. Those that allow register
5212 will be replaced with operand number, the others will stay
5213 unchanged. */
5214 if (strchr (p, ',') != NULL)
5215 {
5216 size_t len = 0, buflen = strlen (buf);
5217 char *beg, *end, *str, *dst;
5218
5219 for (beg = p + 1;;)
5220 {
5221 end = strchr (beg, ',');
5222 if (end == NULL)
5223 end = strchr (beg, '\0');
5224 if ((size_t) (end - beg) < buflen)
5225 len += buflen + 1;
5226 else
5227 len += end - beg + 1;
5228 if (*end)
5229 beg = end + 1;
5230 else
5231 break;
5232 }
5233
5234 str = (char *) alloca (len);
5235 for (beg = p + 1, dst = str;;)
5236 {
5237 const char *tem;
5238 bool mem_p, reg_p, inout_p;
5239
5240 end = strchr (beg, ',');
5241 if (end)
5242 *end = '\0';
5243 beg[-1] = '=';
5244 tem = beg - 1;
5245 parse_output_constraint (&tem, i, 0, 0,
5246 &mem_p, &reg_p, &inout_p);
5247 if (dst != str)
5248 *dst++ = ',';
5249 if (reg_p)
5250 {
5251 memcpy (dst, buf, buflen);
5252 dst += buflen;
5253 }
5254 else
5255 {
5256 if (end)
5257 len = end - beg;
5258 else
5259 len = strlen (beg);
5260 memcpy (dst, beg, len);
5261 dst += len;
5262 }
5263 if (end)
5264 beg = end + 1;
5265 else
5266 break;
5267 }
5268 *dst = '\0';
5269 input = build_string (dst - str, str);
5270 }
5271 else
5272 input = build_string (strlen (buf), buf);
5273 }
5274 else
5275 input = build_string (constraint_len - 1, constraint + 1);
5276
5277 free (p);
5278
5279 input = build_tree_list (build_tree_list (NULL_TREE, input),
5280 unshare_expr (TREE_VALUE (link)));
5281 ASM_INPUTS (expr) = chainon (ASM_INPUTS (expr), input);
5282 }
5283 }
5284
5285 link_next = NULL_TREE;
5286 for (link = ASM_INPUTS (expr); link; ++i, link = link_next)
5287 {
5288 link_next = TREE_CHAIN (link);
5289 constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
5290 parse_input_constraint (&constraint, 0, 0, noutputs, 0,
5291 oconstraints, &allows_mem, &allows_reg);
5292
5293 /* If we can't make copies, we can only accept memory. */
5294 if (TREE_ADDRESSABLE (TREE_TYPE (TREE_VALUE (link))))
5295 {
5296 if (allows_mem)
5297 allows_reg = 0;
5298 else
5299 {
5300 error ("impossible constraint in %<asm%>");
5301 error ("non-memory input %d must stay in memory", i);
5302 return GS_ERROR;
5303 }
5304 }
5305
5306 /* If the operand is a memory input, it should be an lvalue. */
5307 if (!allows_reg && allows_mem)
5308 {
5309 tree inputv = TREE_VALUE (link);
5310 STRIP_NOPS (inputv);
5311 if (TREE_CODE (inputv) == PREDECREMENT_EXPR
5312 || TREE_CODE (inputv) == PREINCREMENT_EXPR
5313 || TREE_CODE (inputv) == POSTDECREMENT_EXPR
5314 || TREE_CODE (inputv) == POSTINCREMENT_EXPR
5315 || TREE_CODE (inputv) == MODIFY_EXPR)
5316 TREE_VALUE (link) = error_mark_node;
5317 tret = gimplify_expr (&TREE_VALUE (link), pre_p, post_p,
5318 is_gimple_lvalue, fb_lvalue | fb_mayfail);
5319 if (tret != GS_ERROR)
5320 {
5321 /* Unlike output operands, memory inputs are not guaranteed
5322 to be lvalues by the FE, and while the expressions are
5323 marked addressable there, if it is e.g. a statement
5324 expression, temporaries in it might not end up being
5325 addressable. They might be already used in the IL and thus
5326 it is too late to make them addressable now though. */
5327 tree x = TREE_VALUE (link);
5328 while (handled_component_p (x))
5329 x = TREE_OPERAND (x, 0);
5330 if (TREE_CODE (x) == MEM_REF
5331 && TREE_CODE (TREE_OPERAND (x, 0)) == ADDR_EXPR)
5332 x = TREE_OPERAND (TREE_OPERAND (x, 0), 0);
5333 if ((TREE_CODE (x) == VAR_DECL
5334 || TREE_CODE (x) == PARM_DECL
5335 || TREE_CODE (x) == RESULT_DECL)
5336 && !TREE_ADDRESSABLE (x)
5337 && is_gimple_reg (x))
5338 {
5339 warning_at (EXPR_LOC_OR_LOC (TREE_VALUE (link),
5340 input_location), 0,
5341 "memory input %d is not directly addressable",
5342 i);
5343 prepare_gimple_addressable (&TREE_VALUE (link), pre_p);
5344 }
5345 }
5346 mark_addressable (TREE_VALUE (link));
5347 if (tret == GS_ERROR)
5348 {
5349 error_at (EXPR_LOC_OR_LOC (TREE_VALUE (link), input_location),
5350 "memory input %d is not directly addressable", i);
5351 ret = tret;
5352 }
5353 }
5354 else
5355 {
5356 tret = gimplify_expr (&TREE_VALUE (link), pre_p, post_p,
5357 is_gimple_asm_val, fb_rvalue);
5358 if (tret == GS_ERROR)
5359 ret = tret;
5360 }
5361
5362 TREE_CHAIN (link) = NULL_TREE;
5363 vec_safe_push (inputs, link);
5364 }
5365
5366 link_next = NULL_TREE;
5367 for (link = ASM_CLOBBERS (expr); link; ++i, link = link_next)
5368 {
5369 link_next = TREE_CHAIN (link);
5370 TREE_CHAIN (link) = NULL_TREE;
5371 vec_safe_push (clobbers, link);
5372 }
5373
5374 link_next = NULL_TREE;
5375 for (link = ASM_LABELS (expr); link; ++i, link = link_next)
5376 {
5377 link_next = TREE_CHAIN (link);
5378 TREE_CHAIN (link) = NULL_TREE;
5379 vec_safe_push (labels, link);
5380 }
5381
5382 /* Do not add ASMs with errors to the gimple IL stream. */
5383 if (ret != GS_ERROR)
5384 {
5385 stmt = gimple_build_asm_vec (TREE_STRING_POINTER (ASM_STRING (expr)),
5386 inputs, outputs, clobbers, labels);
5387
5388 gimple_asm_set_volatile (stmt, ASM_VOLATILE_P (expr) || noutputs == 0);
5389 gimple_asm_set_input (stmt, ASM_INPUT_P (expr));
5390
5391 gimplify_seq_add_stmt (pre_p, stmt);
5392 }
5393
5394 return ret;
5395 }
5396
5397 /* Gimplify a CLEANUP_POINT_EXPR. Currently this works by adding
5398 GIMPLE_WITH_CLEANUP_EXPRs to the prequeue as we encounter cleanups while
5399 gimplifying the body, and converting them to TRY_FINALLY_EXPRs when we
5400 return to this function.
5401
5402 FIXME should we complexify the prequeue handling instead? Or use flags
5403 for all the cleanups and let the optimizer tighten them up? The current
5404 code seems pretty fragile; it will break on a cleanup within any
5405 non-conditional nesting. But any such nesting would be broken, anyway;
5406 we can't write a TRY_FINALLY_EXPR that starts inside a nesting construct
5407 and continues out of it. We can do that at the RTL level, though, so
5408 having an optimizer to tighten up try/finally regions would be a Good
5409 Thing. */
5410
5411 static enum gimplify_status
5412 gimplify_cleanup_point_expr (tree *expr_p, gimple_seq *pre_p)
5413 {
5414 gimple_stmt_iterator iter;
5415 gimple_seq body_sequence = NULL;
5416
5417 tree temp = voidify_wrapper_expr (*expr_p, NULL);
5418
5419 /* We only care about the number of conditions between the innermost
5420 CLEANUP_POINT_EXPR and the cleanup. So save and reset the count and
5421 any cleanups collected outside the CLEANUP_POINT_EXPR. */
5422 int old_conds = gimplify_ctxp->conditions;
5423 gimple_seq old_cleanups = gimplify_ctxp->conditional_cleanups;
5424 bool old_in_cleanup_point_expr = gimplify_ctxp->in_cleanup_point_expr;
5425 gimplify_ctxp->conditions = 0;
5426 gimplify_ctxp->conditional_cleanups = NULL;
5427 gimplify_ctxp->in_cleanup_point_expr = true;
5428
5429 gimplify_stmt (&TREE_OPERAND (*expr_p, 0), &body_sequence);
5430
5431 gimplify_ctxp->conditions = old_conds;
5432 gimplify_ctxp->conditional_cleanups = old_cleanups;
5433 gimplify_ctxp->in_cleanup_point_expr = old_in_cleanup_point_expr;
5434
5435 for (iter = gsi_start (body_sequence); !gsi_end_p (iter); )
5436 {
5437 gimple *wce = gsi_stmt (iter);
5438
5439 if (gimple_code (wce) == GIMPLE_WITH_CLEANUP_EXPR)
5440 {
5441 if (gsi_one_before_end_p (iter))
5442 {
5443 /* Note that gsi_insert_seq_before and gsi_remove do not
5444 scan operands, unlike some other sequence mutators. */
5445 if (!gimple_wce_cleanup_eh_only (wce))
5446 gsi_insert_seq_before_without_update (&iter,
5447 gimple_wce_cleanup (wce),
5448 GSI_SAME_STMT);
5449 gsi_remove (&iter, true);
5450 break;
5451 }
5452 else
5453 {
5454 gtry *gtry;
5455 gimple_seq seq;
5456 enum gimple_try_flags kind;
5457
5458 if (gimple_wce_cleanup_eh_only (wce))
5459 kind = GIMPLE_TRY_CATCH;
5460 else
5461 kind = GIMPLE_TRY_FINALLY;
5462 seq = gsi_split_seq_after (iter);
5463
5464 gtry = gimple_build_try (seq, gimple_wce_cleanup (wce), kind);
5465 /* Do not use gsi_replace here, as it may scan operands.
5466 We want to do a simple structural modification only. */
5467 gsi_set_stmt (&iter, gtry);
5468 iter = gsi_start (gtry->eval);
5469 }
5470 }
5471 else
5472 gsi_next (&iter);
5473 }
5474
5475 gimplify_seq_add_seq (pre_p, body_sequence);
5476 if (temp)
5477 {
5478 *expr_p = temp;
5479 return GS_OK;
5480 }
5481 else
5482 {
5483 *expr_p = NULL;
5484 return GS_ALL_DONE;
5485 }
5486 }
5487
5488 /* Insert a cleanup marker for gimplify_cleanup_point_expr. CLEANUP
5489 is the cleanup action required. EH_ONLY is true if the cleanup should
5490 only be executed if an exception is thrown, not on normal exit. */
5491
5492 static void
5493 gimple_push_cleanup (tree var, tree cleanup, bool eh_only, gimple_seq *pre_p)
5494 {
5495 gimple *wce;
5496 gimple_seq cleanup_stmts = NULL;
5497
5498 /* Errors can result in improperly nested cleanups. Which results in
5499 confusion when trying to resolve the GIMPLE_WITH_CLEANUP_EXPR. */
5500 if (seen_error ())
5501 return;
5502
5503 if (gimple_conditional_context ())
5504 {
5505 /* If we're in a conditional context, this is more complex. We only
5506 want to run the cleanup if we actually ran the initialization that
5507 necessitates it, but we want to run it after the end of the
5508 conditional context. So we wrap the try/finally around the
5509 condition and use a flag to determine whether or not to actually
5510 run the destructor. Thus
5511
5512 test ? f(A()) : 0
5513
5514 becomes (approximately)
5515
5516 flag = 0;
5517 try {
5518 if (test) { A::A(temp); flag = 1; val = f(temp); }
5519 else { val = 0; }
5520 } finally {
5521 if (flag) A::~A(temp);
5522 }
5523 val
5524 */
5525 tree flag = create_tmp_var (boolean_type_node, "cleanup");
5526 gassign *ffalse = gimple_build_assign (flag, boolean_false_node);
5527 gassign *ftrue = gimple_build_assign (flag, boolean_true_node);
5528
5529 cleanup = build3 (COND_EXPR, void_type_node, flag, cleanup, NULL);
5530 gimplify_stmt (&cleanup, &cleanup_stmts);
5531 wce = gimple_build_wce (cleanup_stmts);
5532
5533 gimplify_seq_add_stmt (&gimplify_ctxp->conditional_cleanups, ffalse);
5534 gimplify_seq_add_stmt (&gimplify_ctxp->conditional_cleanups, wce);
5535 gimplify_seq_add_stmt (pre_p, ftrue);
5536
5537 /* Because of this manipulation, and the EH edges that jump
5538 threading cannot redirect, the temporary (VAR) will appear
5539 to be used uninitialized. Don't warn. */
5540 TREE_NO_WARNING (var) = 1;
5541 }
5542 else
5543 {
5544 gimplify_stmt (&cleanup, &cleanup_stmts);
5545 wce = gimple_build_wce (cleanup_stmts);
5546 gimple_wce_set_cleanup_eh_only (wce, eh_only);
5547 gimplify_seq_add_stmt (pre_p, wce);
5548 }
5549 }
5550
5551 /* Gimplify a TARGET_EXPR which doesn't appear on the rhs of an INIT_EXPR. */
5552
5553 static enum gimplify_status
5554 gimplify_target_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
5555 {
5556 tree targ = *expr_p;
5557 tree temp = TARGET_EXPR_SLOT (targ);
5558 tree init = TARGET_EXPR_INITIAL (targ);
5559 enum gimplify_status ret;
5560
5561 if (init)
5562 {
5563 tree cleanup = NULL_TREE;
5564
5565 /* TARGET_EXPR temps aren't part of the enclosing block, so add it
5566 to the temps list. Handle also variable length TARGET_EXPRs. */
5567 if (TREE_CODE (DECL_SIZE (temp)) != INTEGER_CST)
5568 {
5569 if (!TYPE_SIZES_GIMPLIFIED (TREE_TYPE (temp)))
5570 gimplify_type_sizes (TREE_TYPE (temp), pre_p);
5571 gimplify_vla_decl (temp, pre_p);
5572 }
5573 else
5574 gimple_add_tmp_var (temp);
5575
5576 /* If TARGET_EXPR_INITIAL is void, then the mere evaluation of the
5577 expression is supposed to initialize the slot. */
5578 if (VOID_TYPE_P (TREE_TYPE (init)))
5579 ret = gimplify_expr (&init, pre_p, post_p, is_gimple_stmt, fb_none);
5580 else
5581 {
5582 tree init_expr = build2 (INIT_EXPR, void_type_node, temp, init);
5583 init = init_expr;
5584 ret = gimplify_expr (&init, pre_p, post_p, is_gimple_stmt, fb_none);
5585 init = NULL;
5586 ggc_free (init_expr);
5587 }
5588 if (ret == GS_ERROR)
5589 {
5590 /* PR c++/28266 Make sure this is expanded only once. */
5591 TARGET_EXPR_INITIAL (targ) = NULL_TREE;
5592 return GS_ERROR;
5593 }
5594 if (init)
5595 gimplify_and_add (init, pre_p);
5596
5597 /* If needed, push the cleanup for the temp. */
5598 if (TARGET_EXPR_CLEANUP (targ))
5599 {
5600 if (CLEANUP_EH_ONLY (targ))
5601 gimple_push_cleanup (temp, TARGET_EXPR_CLEANUP (targ),
5602 CLEANUP_EH_ONLY (targ), pre_p);
5603 else
5604 cleanup = TARGET_EXPR_CLEANUP (targ);
5605 }
5606
5607 /* Add a clobber for the temporary going out of scope, like
5608 gimplify_bind_expr. */
5609 if (gimplify_ctxp->in_cleanup_point_expr
5610 && needs_to_live_in_memory (temp)
5611 && flag_stack_reuse == SR_ALL)
5612 {
5613 tree clobber = build_constructor (TREE_TYPE (temp),
5614 NULL);
5615 TREE_THIS_VOLATILE (clobber) = true;
5616 clobber = build2 (MODIFY_EXPR, TREE_TYPE (temp), temp, clobber);
5617 if (cleanup)
5618 cleanup = build2 (COMPOUND_EXPR, void_type_node, cleanup,
5619 clobber);
5620 else
5621 cleanup = clobber;
5622 }
5623
5624 if (cleanup)
5625 gimple_push_cleanup (temp, cleanup, false, pre_p);
5626
5627 /* Only expand this once. */
5628 TREE_OPERAND (targ, 3) = init;
5629 TARGET_EXPR_INITIAL (targ) = NULL_TREE;
5630 }
5631 else
5632 /* We should have expanded this before. */
5633 gcc_assert (DECL_SEEN_IN_BIND_EXPR_P (temp));
5634
5635 *expr_p = temp;
5636 return GS_OK;
5637 }
5638
5639 /* Gimplification of expression trees. */
5640
5641 /* Gimplify an expression which appears at statement context. The
5642 corresponding GIMPLE statements are added to *SEQ_P. If *SEQ_P is
5643 NULL, a new sequence is allocated.
5644
5645 Return true if we actually added a statement to the queue. */
5646
5647 bool
5648 gimplify_stmt (tree *stmt_p, gimple_seq *seq_p)
5649 {
5650 gimple_seq_node last;
5651
5652 last = gimple_seq_last (*seq_p);
5653 gimplify_expr (stmt_p, seq_p, NULL, is_gimple_stmt, fb_none);
5654 return last != gimple_seq_last (*seq_p);
5655 }
5656
5657 /* Add FIRSTPRIVATE entries for DECL in the OpenMP the surrounding parallels
5658 to CTX. If entries already exist, force them to be some flavor of private.
5659 If there is no enclosing parallel, do nothing. */
5660
5661 void
5662 omp_firstprivatize_variable (struct gimplify_omp_ctx *ctx, tree decl)
5663 {
5664 splay_tree_node n;
5665
5666 if (decl == NULL || !DECL_P (decl) || ctx->region_type == ORT_NONE)
5667 return;
5668
5669 do
5670 {
5671 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
5672 if (n != NULL)
5673 {
5674 if (n->value & GOVD_SHARED)
5675 n->value = GOVD_FIRSTPRIVATE | (n->value & GOVD_SEEN);
5676 else if (n->value & GOVD_MAP)
5677 n->value |= GOVD_MAP_TO_ONLY;
5678 else
5679 return;
5680 }
5681 else if ((ctx->region_type & ORT_TARGET) != 0)
5682 {
5683 if (ctx->target_map_scalars_firstprivate)
5684 omp_add_variable (ctx, decl, GOVD_FIRSTPRIVATE);
5685 else
5686 omp_add_variable (ctx, decl, GOVD_MAP | GOVD_MAP_TO_ONLY);
5687 }
5688 else if (ctx->region_type != ORT_WORKSHARE
5689 && ctx->region_type != ORT_SIMD
5690 && ctx->region_type != ORT_ACC
5691 && !(ctx->region_type & ORT_TARGET_DATA))
5692 omp_add_variable (ctx, decl, GOVD_FIRSTPRIVATE);
5693
5694 ctx = ctx->outer_context;
5695 }
5696 while (ctx);
5697 }
5698
5699 /* Similarly for each of the type sizes of TYPE. */
5700
5701 static void
5702 omp_firstprivatize_type_sizes (struct gimplify_omp_ctx *ctx, tree type)
5703 {
5704 if (type == NULL || type == error_mark_node)
5705 return;
5706 type = TYPE_MAIN_VARIANT (type);
5707
5708 if (ctx->privatized_types->add (type))
5709 return;
5710
5711 switch (TREE_CODE (type))
5712 {
5713 case INTEGER_TYPE:
5714 case ENUMERAL_TYPE:
5715 case BOOLEAN_TYPE:
5716 case REAL_TYPE:
5717 case FIXED_POINT_TYPE:
5718 omp_firstprivatize_variable (ctx, TYPE_MIN_VALUE (type));
5719 omp_firstprivatize_variable (ctx, TYPE_MAX_VALUE (type));
5720 break;
5721
5722 case ARRAY_TYPE:
5723 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (type));
5724 omp_firstprivatize_type_sizes (ctx, TYPE_DOMAIN (type));
5725 break;
5726
5727 case RECORD_TYPE:
5728 case UNION_TYPE:
5729 case QUAL_UNION_TYPE:
5730 {
5731 tree field;
5732 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
5733 if (TREE_CODE (field) == FIELD_DECL)
5734 {
5735 omp_firstprivatize_variable (ctx, DECL_FIELD_OFFSET (field));
5736 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (field));
5737 }
5738 }
5739 break;
5740
5741 case POINTER_TYPE:
5742 case REFERENCE_TYPE:
5743 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (type));
5744 break;
5745
5746 default:
5747 break;
5748 }
5749
5750 omp_firstprivatize_variable (ctx, TYPE_SIZE (type));
5751 omp_firstprivatize_variable (ctx, TYPE_SIZE_UNIT (type));
5752 lang_hooks.types.omp_firstprivatize_type_sizes (ctx, type);
5753 }
5754
5755 /* Add an entry for DECL in the OMP context CTX with FLAGS. */
5756
5757 static void
5758 omp_add_variable (struct gimplify_omp_ctx *ctx, tree decl, unsigned int flags)
5759 {
5760 splay_tree_node n;
5761 unsigned int nflags;
5762 tree t;
5763
5764 if (error_operand_p (decl) || ctx->region_type == ORT_NONE)
5765 return;
5766
5767 /* Never elide decls whose type has TREE_ADDRESSABLE set. This means
5768 there are constructors involved somewhere. */
5769 if (TREE_ADDRESSABLE (TREE_TYPE (decl))
5770 || TYPE_NEEDS_CONSTRUCTING (TREE_TYPE (decl)))
5771 flags |= GOVD_SEEN;
5772
5773 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
5774 if (n != NULL && (n->value & GOVD_DATA_SHARE_CLASS) != 0)
5775 {
5776 /* We shouldn't be re-adding the decl with the same data
5777 sharing class. */
5778 gcc_assert ((n->value & GOVD_DATA_SHARE_CLASS & flags) == 0);
5779 nflags = n->value | flags;
5780 /* The only combination of data sharing classes we should see is
5781 FIRSTPRIVATE and LASTPRIVATE. However, OpenACC permits
5782 reduction variables to be used in data sharing clauses. */
5783 gcc_assert ((ctx->region_type & ORT_ACC) != 0
5784 || ((nflags & GOVD_DATA_SHARE_CLASS)
5785 == (GOVD_FIRSTPRIVATE | GOVD_LASTPRIVATE))
5786 || (flags & GOVD_DATA_SHARE_CLASS) == 0);
5787 n->value = nflags;
5788 return;
5789 }
5790
5791 /* When adding a variable-sized variable, we have to handle all sorts
5792 of additional bits of data: the pointer replacement variable, and
5793 the parameters of the type. */
5794 if (DECL_SIZE (decl) && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
5795 {
5796 /* Add the pointer replacement variable as PRIVATE if the variable
5797 replacement is private, else FIRSTPRIVATE since we'll need the
5798 address of the original variable either for SHARED, or for the
5799 copy into or out of the context. */
5800 if (!(flags & GOVD_LOCAL))
5801 {
5802 if (flags & GOVD_MAP)
5803 nflags = GOVD_MAP | GOVD_MAP_TO_ONLY | GOVD_EXPLICIT;
5804 else if (flags & GOVD_PRIVATE)
5805 nflags = GOVD_PRIVATE;
5806 else if ((ctx->region_type & (ORT_TARGET | ORT_TARGET_DATA)) != 0
5807 && (flags & GOVD_FIRSTPRIVATE))
5808 nflags = GOVD_PRIVATE | GOVD_EXPLICIT;
5809 else
5810 nflags = GOVD_FIRSTPRIVATE;
5811 nflags |= flags & GOVD_SEEN;
5812 t = DECL_VALUE_EXPR (decl);
5813 gcc_assert (TREE_CODE (t) == INDIRECT_REF);
5814 t = TREE_OPERAND (t, 0);
5815 gcc_assert (DECL_P (t));
5816 omp_add_variable (ctx, t, nflags);
5817 }
5818
5819 /* Add all of the variable and type parameters (which should have
5820 been gimplified to a formal temporary) as FIRSTPRIVATE. */
5821 omp_firstprivatize_variable (ctx, DECL_SIZE_UNIT (decl));
5822 omp_firstprivatize_variable (ctx, DECL_SIZE (decl));
5823 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (decl));
5824
5825 /* The variable-sized variable itself is never SHARED, only some form
5826 of PRIVATE. The sharing would take place via the pointer variable
5827 which we remapped above. */
5828 if (flags & GOVD_SHARED)
5829 flags = GOVD_PRIVATE | GOVD_DEBUG_PRIVATE
5830 | (flags & (GOVD_SEEN | GOVD_EXPLICIT));
5831
5832 /* We're going to make use of the TYPE_SIZE_UNIT at least in the
5833 alloca statement we generate for the variable, so make sure it
5834 is available. This isn't automatically needed for the SHARED
5835 case, since we won't be allocating local storage then.
5836 For local variables TYPE_SIZE_UNIT might not be gimplified yet,
5837 in this case omp_notice_variable will be called later
5838 on when it is gimplified. */
5839 else if (! (flags & (GOVD_LOCAL | GOVD_MAP))
5840 && DECL_P (TYPE_SIZE_UNIT (TREE_TYPE (decl))))
5841 omp_notice_variable (ctx, TYPE_SIZE_UNIT (TREE_TYPE (decl)), true);
5842 }
5843 else if ((flags & (GOVD_MAP | GOVD_LOCAL)) == 0
5844 && lang_hooks.decls.omp_privatize_by_reference (decl))
5845 {
5846 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (decl));
5847
5848 /* Similar to the direct variable sized case above, we'll need the
5849 size of references being privatized. */
5850 if ((flags & GOVD_SHARED) == 0)
5851 {
5852 t = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl)));
5853 if (DECL_P (t))
5854 omp_notice_variable (ctx, t, true);
5855 }
5856 }
5857
5858 if (n != NULL)
5859 n->value |= flags;
5860 else
5861 splay_tree_insert (ctx->variables, (splay_tree_key)decl, flags);
5862 }
5863
5864 /* Notice a threadprivate variable DECL used in OMP context CTX.
5865 This just prints out diagnostics about threadprivate variable uses
5866 in untied tasks. If DECL2 is non-NULL, prevent this warning
5867 on that variable. */
5868
5869 static bool
5870 omp_notice_threadprivate_variable (struct gimplify_omp_ctx *ctx, tree decl,
5871 tree decl2)
5872 {
5873 splay_tree_node n;
5874 struct gimplify_omp_ctx *octx;
5875
5876 for (octx = ctx; octx; octx = octx->outer_context)
5877 if ((octx->region_type & ORT_TARGET) != 0)
5878 {
5879 n = splay_tree_lookup (octx->variables, (splay_tree_key)decl);
5880 if (n == NULL)
5881 {
5882 error ("threadprivate variable %qE used in target region",
5883 DECL_NAME (decl));
5884 error_at (octx->location, "enclosing target region");
5885 splay_tree_insert (octx->variables, (splay_tree_key)decl, 0);
5886 }
5887 if (decl2)
5888 splay_tree_insert (octx->variables, (splay_tree_key)decl2, 0);
5889 }
5890
5891 if (ctx->region_type != ORT_UNTIED_TASK)
5892 return false;
5893 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
5894 if (n == NULL)
5895 {
5896 error ("threadprivate variable %qE used in untied task",
5897 DECL_NAME (decl));
5898 error_at (ctx->location, "enclosing task");
5899 splay_tree_insert (ctx->variables, (splay_tree_key)decl, 0);
5900 }
5901 if (decl2)
5902 splay_tree_insert (ctx->variables, (splay_tree_key)decl2, 0);
5903 return false;
5904 }
5905
5906 /* Return true if global var DECL is device resident. */
5907
5908 static bool
5909 device_resident_p (tree decl)
5910 {
5911 tree attr = lookup_attribute ("oacc declare target", DECL_ATTRIBUTES (decl));
5912
5913 if (!attr)
5914 return false;
5915
5916 for (tree t = TREE_VALUE (attr); t; t = TREE_PURPOSE (t))
5917 {
5918 tree c = TREE_VALUE (t);
5919 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_DEVICE_RESIDENT)
5920 return true;
5921 }
5922
5923 return false;
5924 }
5925
5926 /* Determine outer default flags for DECL mentioned in an OMP region
5927 but not declared in an enclosing clause.
5928
5929 ??? Some compiler-generated variables (like SAVE_EXPRs) could be
5930 remapped firstprivate instead of shared. To some extent this is
5931 addressed in omp_firstprivatize_type_sizes, but not
5932 effectively. */
5933
5934 static unsigned
5935 omp_default_clause (struct gimplify_omp_ctx *ctx, tree decl,
5936 bool in_code, unsigned flags)
5937 {
5938 enum omp_clause_default_kind default_kind = ctx->default_kind;
5939 enum omp_clause_default_kind kind;
5940
5941 kind = lang_hooks.decls.omp_predetermined_sharing (decl);
5942 if (kind != OMP_CLAUSE_DEFAULT_UNSPECIFIED)
5943 default_kind = kind;
5944
5945 switch (default_kind)
5946 {
5947 case OMP_CLAUSE_DEFAULT_NONE:
5948 {
5949 const char *rtype;
5950
5951 if (ctx->region_type & ORT_PARALLEL)
5952 rtype = "parallel";
5953 else if (ctx->region_type & ORT_TASK)
5954 rtype = "task";
5955 else if (ctx->region_type & ORT_TEAMS)
5956 rtype = "teams";
5957 else
5958 gcc_unreachable ();
5959
5960 error ("%qE not specified in enclosing %s",
5961 DECL_NAME (lang_hooks.decls.omp_report_decl (decl)), rtype);
5962 error_at (ctx->location, "enclosing %s", rtype);
5963 }
5964 /* FALLTHRU */
5965 case OMP_CLAUSE_DEFAULT_SHARED:
5966 flags |= GOVD_SHARED;
5967 break;
5968 case OMP_CLAUSE_DEFAULT_PRIVATE:
5969 flags |= GOVD_PRIVATE;
5970 break;
5971 case OMP_CLAUSE_DEFAULT_FIRSTPRIVATE:
5972 flags |= GOVD_FIRSTPRIVATE;
5973 break;
5974 case OMP_CLAUSE_DEFAULT_UNSPECIFIED:
5975 /* decl will be either GOVD_FIRSTPRIVATE or GOVD_SHARED. */
5976 gcc_assert ((ctx->region_type & ORT_TASK) != 0);
5977 if (struct gimplify_omp_ctx *octx = ctx->outer_context)
5978 {
5979 omp_notice_variable (octx, decl, in_code);
5980 for (; octx; octx = octx->outer_context)
5981 {
5982 splay_tree_node n2;
5983
5984 n2 = splay_tree_lookup (octx->variables, (splay_tree_key) decl);
5985 if ((octx->region_type & (ORT_TARGET_DATA | ORT_TARGET)) != 0
5986 && (n2 == NULL || (n2->value & GOVD_DATA_SHARE_CLASS) == 0))
5987 continue;
5988 if (n2 && (n2->value & GOVD_DATA_SHARE_CLASS) != GOVD_SHARED)
5989 {
5990 flags |= GOVD_FIRSTPRIVATE;
5991 goto found_outer;
5992 }
5993 if ((octx->region_type & (ORT_PARALLEL | ORT_TEAMS)) != 0)
5994 {
5995 flags |= GOVD_SHARED;
5996 goto found_outer;
5997 }
5998 }
5999 }
6000
6001 if (TREE_CODE (decl) == PARM_DECL
6002 || (!is_global_var (decl)
6003 && DECL_CONTEXT (decl) == current_function_decl))
6004 flags |= GOVD_FIRSTPRIVATE;
6005 else
6006 flags |= GOVD_SHARED;
6007 found_outer:
6008 break;
6009
6010 default:
6011 gcc_unreachable ();
6012 }
6013
6014 return flags;
6015 }
6016
6017
6018 /* Determine outer default flags for DECL mentioned in an OACC region
6019 but not declared in an enclosing clause. */
6020
6021 static unsigned
6022 oacc_default_clause (struct gimplify_omp_ctx *ctx, tree decl, unsigned flags)
6023 {
6024 const char *rkind;
6025 bool on_device = false;
6026 tree type = TREE_TYPE (decl);
6027
6028 if (lang_hooks.decls.omp_privatize_by_reference (decl))
6029 type = TREE_TYPE (type);
6030
6031 if ((ctx->region_type & (ORT_ACC_PARALLEL | ORT_ACC_KERNELS)) != 0
6032 && is_global_var (decl)
6033 && device_resident_p (decl))
6034 {
6035 on_device = true;
6036 flags |= GOVD_MAP_TO_ONLY;
6037 }
6038
6039 switch (ctx->region_type)
6040 {
6041 default:
6042 gcc_unreachable ();
6043
6044 case ORT_ACC_KERNELS:
6045 /* Scalars are default 'copy' under kernels, non-scalars are default
6046 'present_or_copy'. */
6047 flags |= GOVD_MAP;
6048 if (!AGGREGATE_TYPE_P (type))
6049 flags |= GOVD_MAP_FORCE;
6050
6051 rkind = "kernels";
6052 break;
6053
6054 case ORT_ACC_PARALLEL:
6055 {
6056 if (on_device || AGGREGATE_TYPE_P (type))
6057 /* Aggregates default to 'present_or_copy'. */
6058 flags |= GOVD_MAP;
6059 else
6060 /* Scalars default to 'firstprivate'. */
6061 flags |= GOVD_FIRSTPRIVATE;
6062 rkind = "parallel";
6063 }
6064 break;
6065 }
6066
6067 if (DECL_ARTIFICIAL (decl))
6068 ; /* We can get compiler-generated decls, and should not complain
6069 about them. */
6070 else if (ctx->default_kind == OMP_CLAUSE_DEFAULT_NONE)
6071 {
6072 error ("%qE not specified in enclosing OpenACC %qs construct",
6073 DECL_NAME (lang_hooks.decls.omp_report_decl (decl)), rkind);
6074 inform (ctx->location, "enclosing OpenACC %qs construct", rkind);
6075 }
6076 else
6077 gcc_checking_assert (ctx->default_kind == OMP_CLAUSE_DEFAULT_SHARED);
6078
6079 return flags;
6080 }
6081
6082 /* Record the fact that DECL was used within the OMP context CTX.
6083 IN_CODE is true when real code uses DECL, and false when we should
6084 merely emit default(none) errors. Return true if DECL is going to
6085 be remapped and thus DECL shouldn't be gimplified into its
6086 DECL_VALUE_EXPR (if any). */
6087
6088 static bool
6089 omp_notice_variable (struct gimplify_omp_ctx *ctx, tree decl, bool in_code)
6090 {
6091 splay_tree_node n;
6092 unsigned flags = in_code ? GOVD_SEEN : 0;
6093 bool ret = false, shared;
6094
6095 if (error_operand_p (decl))
6096 return false;
6097
6098 if (ctx->region_type == ORT_NONE)
6099 return lang_hooks.decls.omp_disregard_value_expr (decl, false);
6100
6101 if (is_global_var (decl))
6102 {
6103 /* Threadprivate variables are predetermined. */
6104 if (DECL_THREAD_LOCAL_P (decl))
6105 return omp_notice_threadprivate_variable (ctx, decl, NULL_TREE);
6106
6107 if (DECL_HAS_VALUE_EXPR_P (decl))
6108 {
6109 tree value = get_base_address (DECL_VALUE_EXPR (decl));
6110
6111 if (value && DECL_P (value) && DECL_THREAD_LOCAL_P (value))
6112 return omp_notice_threadprivate_variable (ctx, decl, value);
6113 }
6114
6115 if (gimplify_omp_ctxp->outer_context == NULL
6116 && VAR_P (decl)
6117 && get_oacc_fn_attrib (current_function_decl))
6118 {
6119 location_t loc = DECL_SOURCE_LOCATION (decl);
6120
6121 if (lookup_attribute ("omp declare target link",
6122 DECL_ATTRIBUTES (decl)))
6123 {
6124 error_at (loc,
6125 "%qE with %<link%> clause used in %<routine%> function",
6126 DECL_NAME (decl));
6127 return false;
6128 }
6129 else if (!lookup_attribute ("omp declare target",
6130 DECL_ATTRIBUTES (decl)))
6131 {
6132 error_at (loc,
6133 "%qE requires a %<declare%> directive for use "
6134 "in a %<routine%> function", DECL_NAME (decl));
6135 return false;
6136 }
6137 }
6138 }
6139
6140 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
6141 if ((ctx->region_type & ORT_TARGET) != 0)
6142 {
6143 ret = lang_hooks.decls.omp_disregard_value_expr (decl, true);
6144 if (n == NULL)
6145 {
6146 unsigned nflags = flags;
6147 if (ctx->target_map_pointers_as_0len_arrays
6148 || ctx->target_map_scalars_firstprivate)
6149 {
6150 bool is_declare_target = false;
6151 bool is_scalar = false;
6152 if (is_global_var (decl)
6153 && varpool_node::get_create (decl)->offloadable)
6154 {
6155 struct gimplify_omp_ctx *octx;
6156 for (octx = ctx->outer_context;
6157 octx; octx = octx->outer_context)
6158 {
6159 n = splay_tree_lookup (octx->variables,
6160 (splay_tree_key)decl);
6161 if (n
6162 && (n->value & GOVD_DATA_SHARE_CLASS) != GOVD_SHARED
6163 && (n->value & GOVD_DATA_SHARE_CLASS) != 0)
6164 break;
6165 }
6166 is_declare_target = octx == NULL;
6167 }
6168 if (!is_declare_target && ctx->target_map_scalars_firstprivate)
6169 {
6170 tree type = TREE_TYPE (decl);
6171 if (TREE_CODE (type) == REFERENCE_TYPE)
6172 type = TREE_TYPE (type);
6173 if (TREE_CODE (type) == COMPLEX_TYPE)
6174 type = TREE_TYPE (type);
6175 if (INTEGRAL_TYPE_P (type)
6176 || SCALAR_FLOAT_TYPE_P (type)
6177 || TREE_CODE (type) == POINTER_TYPE)
6178 is_scalar = true;
6179 }
6180 if (is_declare_target)
6181 ;
6182 else if (ctx->target_map_pointers_as_0len_arrays
6183 && (TREE_CODE (TREE_TYPE (decl)) == POINTER_TYPE
6184 || (TREE_CODE (TREE_TYPE (decl)) == REFERENCE_TYPE
6185 && TREE_CODE (TREE_TYPE (TREE_TYPE (decl)))
6186 == POINTER_TYPE)))
6187 nflags |= GOVD_MAP | GOVD_MAP_0LEN_ARRAY;
6188 else if (is_scalar)
6189 nflags |= GOVD_FIRSTPRIVATE;
6190 }
6191
6192 struct gimplify_omp_ctx *octx = ctx->outer_context;
6193 if ((ctx->region_type & ORT_ACC) && octx)
6194 {
6195 /* Look in outer OpenACC contexts, to see if there's a
6196 data attribute for this variable. */
6197 omp_notice_variable (octx, decl, in_code);
6198
6199 for (; octx; octx = octx->outer_context)
6200 {
6201 if (!(octx->region_type & (ORT_TARGET_DATA | ORT_TARGET)))
6202 break;
6203 splay_tree_node n2
6204 = splay_tree_lookup (octx->variables,
6205 (splay_tree_key) decl);
6206 if (n2)
6207 {
6208 if (octx->region_type == ORT_ACC_HOST_DATA)
6209 error ("variable %qE declared in enclosing "
6210 "%<host_data%> region", DECL_NAME (decl));
6211 nflags |= GOVD_MAP;
6212 goto found_outer;
6213 }
6214 }
6215 }
6216
6217 {
6218 tree type = TREE_TYPE (decl);
6219
6220 if (nflags == flags
6221 && gimplify_omp_ctxp->target_firstprivatize_array_bases
6222 && lang_hooks.decls.omp_privatize_by_reference (decl))
6223 type = TREE_TYPE (type);
6224 if (nflags == flags
6225 && !lang_hooks.types.omp_mappable_type (type))
6226 {
6227 error ("%qD referenced in target region does not have "
6228 "a mappable type", decl);
6229 nflags |= GOVD_MAP | GOVD_EXPLICIT;
6230 }
6231 else if (nflags == flags)
6232 {
6233 if ((ctx->region_type & ORT_ACC) != 0)
6234 nflags = oacc_default_clause (ctx, decl, flags);
6235 else
6236 nflags |= GOVD_MAP;
6237 }
6238 }
6239 found_outer:
6240 omp_add_variable (ctx, decl, nflags);
6241 }
6242 else
6243 {
6244 /* If nothing changed, there's nothing left to do. */
6245 if ((n->value & flags) == flags)
6246 return ret;
6247 flags |= n->value;
6248 n->value = flags;
6249 }
6250 goto do_outer;
6251 }
6252
6253 if (n == NULL)
6254 {
6255 if (ctx->region_type == ORT_WORKSHARE
6256 || ctx->region_type == ORT_SIMD
6257 || ctx->region_type == ORT_ACC
6258 || (ctx->region_type & ORT_TARGET_DATA) != 0)
6259 goto do_outer;
6260
6261 flags = omp_default_clause (ctx, decl, in_code, flags);
6262
6263 if ((flags & GOVD_PRIVATE)
6264 && lang_hooks.decls.omp_private_outer_ref (decl))
6265 flags |= GOVD_PRIVATE_OUTER_REF;
6266
6267 omp_add_variable (ctx, decl, flags);
6268
6269 shared = (flags & GOVD_SHARED) != 0;
6270 ret = lang_hooks.decls.omp_disregard_value_expr (decl, shared);
6271 goto do_outer;
6272 }
6273
6274 if ((n->value & (GOVD_SEEN | GOVD_LOCAL)) == 0
6275 && (flags & (GOVD_SEEN | GOVD_LOCAL)) == GOVD_SEEN
6276 && DECL_SIZE (decl))
6277 {
6278 if (TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
6279 {
6280 splay_tree_node n2;
6281 tree t = DECL_VALUE_EXPR (decl);
6282 gcc_assert (TREE_CODE (t) == INDIRECT_REF);
6283 t = TREE_OPERAND (t, 0);
6284 gcc_assert (DECL_P (t));
6285 n2 = splay_tree_lookup (ctx->variables, (splay_tree_key) t);
6286 n2->value |= GOVD_SEEN;
6287 }
6288 else if (lang_hooks.decls.omp_privatize_by_reference (decl)
6289 && TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl)))
6290 && (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl))))
6291 != INTEGER_CST))
6292 {
6293 splay_tree_node n2;
6294 tree t = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl)));
6295 gcc_assert (DECL_P (t));
6296 n2 = splay_tree_lookup (ctx->variables, (splay_tree_key) t);
6297 if (n2)
6298 n2->value |= GOVD_SEEN;
6299 }
6300 }
6301
6302 shared = ((flags | n->value) & GOVD_SHARED) != 0;
6303 ret = lang_hooks.decls.omp_disregard_value_expr (decl, shared);
6304
6305 /* If nothing changed, there's nothing left to do. */
6306 if ((n->value & flags) == flags)
6307 return ret;
6308 flags |= n->value;
6309 n->value = flags;
6310
6311 do_outer:
6312 /* If the variable is private in the current context, then we don't
6313 need to propagate anything to an outer context. */
6314 if ((flags & GOVD_PRIVATE) && !(flags & GOVD_PRIVATE_OUTER_REF))
6315 return ret;
6316 if ((flags & (GOVD_LINEAR | GOVD_LINEAR_LASTPRIVATE_NO_OUTER))
6317 == (GOVD_LINEAR | GOVD_LINEAR_LASTPRIVATE_NO_OUTER))
6318 return ret;
6319 if ((flags & (GOVD_FIRSTPRIVATE | GOVD_LASTPRIVATE
6320 | GOVD_LINEAR_LASTPRIVATE_NO_OUTER))
6321 == (GOVD_LASTPRIVATE | GOVD_LINEAR_LASTPRIVATE_NO_OUTER))
6322 return ret;
6323 if (ctx->outer_context
6324 && omp_notice_variable (ctx->outer_context, decl, in_code))
6325 return true;
6326 return ret;
6327 }
6328
6329 /* Verify that DECL is private within CTX. If there's specific information
6330 to the contrary in the innermost scope, generate an error. */
6331
6332 static bool
6333 omp_is_private (struct gimplify_omp_ctx *ctx, tree decl, int simd)
6334 {
6335 splay_tree_node n;
6336
6337 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
6338 if (n != NULL)
6339 {
6340 if (n->value & GOVD_SHARED)
6341 {
6342 if (ctx == gimplify_omp_ctxp)
6343 {
6344 if (simd)
6345 error ("iteration variable %qE is predetermined linear",
6346 DECL_NAME (decl));
6347 else
6348 error ("iteration variable %qE should be private",
6349 DECL_NAME (decl));
6350 n->value = GOVD_PRIVATE;
6351 return true;
6352 }
6353 else
6354 return false;
6355 }
6356 else if ((n->value & GOVD_EXPLICIT) != 0
6357 && (ctx == gimplify_omp_ctxp
6358 || (ctx->region_type == ORT_COMBINED_PARALLEL
6359 && gimplify_omp_ctxp->outer_context == ctx)))
6360 {
6361 if ((n->value & GOVD_FIRSTPRIVATE) != 0)
6362 error ("iteration variable %qE should not be firstprivate",
6363 DECL_NAME (decl));
6364 else if ((n->value & GOVD_REDUCTION) != 0)
6365 error ("iteration variable %qE should not be reduction",
6366 DECL_NAME (decl));
6367 else if (simd == 0 && (n->value & GOVD_LINEAR) != 0)
6368 error ("iteration variable %qE should not be linear",
6369 DECL_NAME (decl));
6370 else if (simd == 1 && (n->value & GOVD_LASTPRIVATE) != 0)
6371 error ("iteration variable %qE should not be lastprivate",
6372 DECL_NAME (decl));
6373 else if (simd && (n->value & GOVD_PRIVATE) != 0)
6374 error ("iteration variable %qE should not be private",
6375 DECL_NAME (decl));
6376 else if (simd == 2 && (n->value & GOVD_LINEAR) != 0)
6377 error ("iteration variable %qE is predetermined linear",
6378 DECL_NAME (decl));
6379 }
6380 return (ctx == gimplify_omp_ctxp
6381 || (ctx->region_type == ORT_COMBINED_PARALLEL
6382 && gimplify_omp_ctxp->outer_context == ctx));
6383 }
6384
6385 if (ctx->region_type != ORT_WORKSHARE
6386 && ctx->region_type != ORT_SIMD
6387 && ctx->region_type != ORT_ACC)
6388 return false;
6389 else if (ctx->outer_context)
6390 return omp_is_private (ctx->outer_context, decl, simd);
6391 return false;
6392 }
6393
6394 /* Return true if DECL is private within a parallel region
6395 that binds to the current construct's context or in parallel
6396 region's REDUCTION clause. */
6397
6398 static bool
6399 omp_check_private (struct gimplify_omp_ctx *ctx, tree decl, bool copyprivate)
6400 {
6401 splay_tree_node n;
6402
6403 do
6404 {
6405 ctx = ctx->outer_context;
6406 if (ctx == NULL)
6407 {
6408 if (is_global_var (decl))
6409 return false;
6410
6411 /* References might be private, but might be shared too,
6412 when checking for copyprivate, assume they might be
6413 private, otherwise assume they might be shared. */
6414 if (copyprivate)
6415 return true;
6416
6417 if (lang_hooks.decls.omp_privatize_by_reference (decl))
6418 return false;
6419
6420 /* Treat C++ privatized non-static data members outside
6421 of the privatization the same. */
6422 if (omp_member_access_dummy_var (decl))
6423 return false;
6424
6425 return true;
6426 }
6427
6428 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
6429
6430 if ((ctx->region_type & (ORT_TARGET | ORT_TARGET_DATA)) != 0
6431 && (n == NULL || (n->value & GOVD_DATA_SHARE_CLASS) == 0))
6432 continue;
6433
6434 if (n != NULL)
6435 {
6436 if ((n->value & GOVD_LOCAL) != 0
6437 && omp_member_access_dummy_var (decl))
6438 return false;
6439 return (n->value & GOVD_SHARED) == 0;
6440 }
6441 }
6442 while (ctx->region_type == ORT_WORKSHARE
6443 || ctx->region_type == ORT_SIMD
6444 || ctx->region_type == ORT_ACC);
6445 return false;
6446 }
6447
6448 /* Return true if the CTX is combined with distribute and thus
6449 lastprivate can't be supported. */
6450
6451 static bool
6452 omp_no_lastprivate (struct gimplify_omp_ctx *ctx)
6453 {
6454 do
6455 {
6456 if (ctx->outer_context == NULL)
6457 return false;
6458 ctx = ctx->outer_context;
6459 switch (ctx->region_type)
6460 {
6461 case ORT_WORKSHARE:
6462 if (!ctx->combined_loop)
6463 return false;
6464 if (ctx->distribute)
6465 return lang_GNU_Fortran ();
6466 break;
6467 case ORT_COMBINED_PARALLEL:
6468 break;
6469 case ORT_COMBINED_TEAMS:
6470 return lang_GNU_Fortran ();
6471 default:
6472 return false;
6473 }
6474 }
6475 while (1);
6476 }
6477
6478 /* Callback for walk_tree to find a DECL_EXPR for the given DECL. */
6479
6480 static tree
6481 find_decl_expr (tree *tp, int *walk_subtrees, void *data)
6482 {
6483 tree t = *tp;
6484
6485 /* If this node has been visited, unmark it and keep looking. */
6486 if (TREE_CODE (t) == DECL_EXPR && DECL_EXPR_DECL (t) == (tree) data)
6487 return t;
6488
6489 if (IS_TYPE_OR_DECL_P (t))
6490 *walk_subtrees = 0;
6491 return NULL_TREE;
6492 }
6493
6494 /* Scan the OMP clauses in *LIST_P, installing mappings into a new
6495 and previous omp contexts. */
6496
6497 static void
6498 gimplify_scan_omp_clauses (tree *list_p, gimple_seq *pre_p,
6499 enum omp_region_type region_type,
6500 enum tree_code code)
6501 {
6502 struct gimplify_omp_ctx *ctx, *outer_ctx;
6503 tree c;
6504 hash_map<tree, tree> *struct_map_to_clause = NULL;
6505 tree *prev_list_p = NULL;
6506
6507 ctx = new_omp_context (region_type);
6508 outer_ctx = ctx->outer_context;
6509 if (code == OMP_TARGET && !lang_GNU_Fortran ())
6510 {
6511 ctx->target_map_pointers_as_0len_arrays = true;
6512 /* FIXME: For Fortran we want to set this too, when
6513 the Fortran FE is updated to OpenMP 4.5. */
6514 ctx->target_map_scalars_firstprivate = true;
6515 }
6516 if (!lang_GNU_Fortran ())
6517 switch (code)
6518 {
6519 case OMP_TARGET:
6520 case OMP_TARGET_DATA:
6521 case OMP_TARGET_ENTER_DATA:
6522 case OMP_TARGET_EXIT_DATA:
6523 case OACC_HOST_DATA:
6524 ctx->target_firstprivatize_array_bases = true;
6525 default:
6526 break;
6527 }
6528
6529 while ((c = *list_p) != NULL)
6530 {
6531 bool remove = false;
6532 bool notice_outer = true;
6533 const char *check_non_private = NULL;
6534 unsigned int flags;
6535 tree decl;
6536
6537 switch (OMP_CLAUSE_CODE (c))
6538 {
6539 case OMP_CLAUSE_PRIVATE:
6540 flags = GOVD_PRIVATE | GOVD_EXPLICIT;
6541 if (lang_hooks.decls.omp_private_outer_ref (OMP_CLAUSE_DECL (c)))
6542 {
6543 flags |= GOVD_PRIVATE_OUTER_REF;
6544 OMP_CLAUSE_PRIVATE_OUTER_REF (c) = 1;
6545 }
6546 else
6547 notice_outer = false;
6548 goto do_add;
6549 case OMP_CLAUSE_SHARED:
6550 flags = GOVD_SHARED | GOVD_EXPLICIT;
6551 goto do_add;
6552 case OMP_CLAUSE_FIRSTPRIVATE:
6553 flags = GOVD_FIRSTPRIVATE | GOVD_EXPLICIT;
6554 check_non_private = "firstprivate";
6555 goto do_add;
6556 case OMP_CLAUSE_LASTPRIVATE:
6557 flags = GOVD_LASTPRIVATE | GOVD_SEEN | GOVD_EXPLICIT;
6558 check_non_private = "lastprivate";
6559 decl = OMP_CLAUSE_DECL (c);
6560 if (omp_no_lastprivate (ctx))
6561 {
6562 notice_outer = false;
6563 flags |= GOVD_LINEAR_LASTPRIVATE_NO_OUTER;
6564 }
6565 else if (error_operand_p (decl))
6566 goto do_add;
6567 else if (outer_ctx
6568 && (outer_ctx->region_type == ORT_COMBINED_PARALLEL
6569 || outer_ctx->region_type == ORT_COMBINED_TEAMS)
6570 && splay_tree_lookup (outer_ctx->variables,
6571 (splay_tree_key) decl) == NULL)
6572 {
6573 omp_add_variable (outer_ctx, decl, GOVD_SHARED | GOVD_SEEN);
6574 if (outer_ctx->outer_context)
6575 omp_notice_variable (outer_ctx->outer_context, decl, true);
6576 }
6577 else if (outer_ctx
6578 && (outer_ctx->region_type & ORT_TASK) != 0
6579 && outer_ctx->combined_loop
6580 && splay_tree_lookup (outer_ctx->variables,
6581 (splay_tree_key) decl) == NULL)
6582 {
6583 omp_add_variable (outer_ctx, decl, GOVD_LASTPRIVATE | GOVD_SEEN);
6584 if (outer_ctx->outer_context)
6585 omp_notice_variable (outer_ctx->outer_context, decl, true);
6586 }
6587 else if (outer_ctx
6588 && (outer_ctx->region_type == ORT_WORKSHARE
6589 || outer_ctx->region_type == ORT_ACC)
6590 && outer_ctx->combined_loop
6591 && splay_tree_lookup (outer_ctx->variables,
6592 (splay_tree_key) decl) == NULL
6593 && !omp_check_private (outer_ctx, decl, false))
6594 {
6595 omp_add_variable (outer_ctx, decl, GOVD_LASTPRIVATE | GOVD_SEEN);
6596 if (outer_ctx->outer_context
6597 && (outer_ctx->outer_context->region_type
6598 == ORT_COMBINED_PARALLEL)
6599 && splay_tree_lookup (outer_ctx->outer_context->variables,
6600 (splay_tree_key) decl) == NULL)
6601 {
6602 struct gimplify_omp_ctx *octx = outer_ctx->outer_context;
6603 omp_add_variable (octx, decl, GOVD_SHARED | GOVD_SEEN);
6604 if (octx->outer_context)
6605 omp_notice_variable (octx->outer_context, decl, true);
6606 }
6607 else if (outer_ctx->outer_context)
6608 omp_notice_variable (outer_ctx->outer_context, decl, true);
6609 }
6610 goto do_add;
6611 case OMP_CLAUSE_REDUCTION:
6612 flags = GOVD_REDUCTION | GOVD_SEEN | GOVD_EXPLICIT;
6613 /* OpenACC permits reductions on private variables. */
6614 if (!(region_type & ORT_ACC))
6615 check_non_private = "reduction";
6616 decl = OMP_CLAUSE_DECL (c);
6617 if (TREE_CODE (decl) == MEM_REF)
6618 {
6619 tree type = TREE_TYPE (decl);
6620 if (gimplify_expr (&TYPE_MAX_VALUE (TYPE_DOMAIN (type)), pre_p,
6621 NULL, is_gimple_val, fb_rvalue) == GS_ERROR)
6622 {
6623 remove = true;
6624 break;
6625 }
6626 tree v = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
6627 if (DECL_P (v))
6628 {
6629 omp_firstprivatize_variable (ctx, v);
6630 omp_notice_variable (ctx, v, true);
6631 }
6632 decl = TREE_OPERAND (decl, 0);
6633 if (TREE_CODE (decl) == POINTER_PLUS_EXPR)
6634 {
6635 if (gimplify_expr (&TREE_OPERAND (decl, 1), pre_p,
6636 NULL, is_gimple_val, fb_rvalue)
6637 == GS_ERROR)
6638 {
6639 remove = true;
6640 break;
6641 }
6642 v = TREE_OPERAND (decl, 1);
6643 if (DECL_P (v))
6644 {
6645 omp_firstprivatize_variable (ctx, v);
6646 omp_notice_variable (ctx, v, true);
6647 }
6648 decl = TREE_OPERAND (decl, 0);
6649 }
6650 if (TREE_CODE (decl) == ADDR_EXPR
6651 || TREE_CODE (decl) == INDIRECT_REF)
6652 decl = TREE_OPERAND (decl, 0);
6653 }
6654 goto do_add_decl;
6655 case OMP_CLAUSE_LINEAR:
6656 if (gimplify_expr (&OMP_CLAUSE_LINEAR_STEP (c), pre_p, NULL,
6657 is_gimple_val, fb_rvalue) == GS_ERROR)
6658 {
6659 remove = true;
6660 break;
6661 }
6662 else
6663 {
6664 if (code == OMP_SIMD
6665 && !OMP_CLAUSE_LINEAR_NO_COPYIN (c))
6666 {
6667 struct gimplify_omp_ctx *octx = outer_ctx;
6668 if (octx
6669 && octx->region_type == ORT_WORKSHARE
6670 && octx->combined_loop
6671 && !octx->distribute)
6672 {
6673 if (octx->outer_context
6674 && (octx->outer_context->region_type
6675 == ORT_COMBINED_PARALLEL))
6676 octx = octx->outer_context->outer_context;
6677 else
6678 octx = octx->outer_context;
6679 }
6680 if (octx
6681 && octx->region_type == ORT_WORKSHARE
6682 && octx->combined_loop
6683 && octx->distribute
6684 && !lang_GNU_Fortran ())
6685 {
6686 error_at (OMP_CLAUSE_LOCATION (c),
6687 "%<linear%> clause for variable other than "
6688 "loop iterator specified on construct "
6689 "combined with %<distribute%>");
6690 remove = true;
6691 break;
6692 }
6693 }
6694 /* For combined #pragma omp parallel for simd, need to put
6695 lastprivate and perhaps firstprivate too on the
6696 parallel. Similarly for #pragma omp for simd. */
6697 struct gimplify_omp_ctx *octx = outer_ctx;
6698 decl = NULL_TREE;
6699 if (omp_no_lastprivate (ctx))
6700 OMP_CLAUSE_LINEAR_NO_COPYOUT (c) = 1;
6701 do
6702 {
6703 if (OMP_CLAUSE_LINEAR_NO_COPYIN (c)
6704 && OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
6705 break;
6706 decl = OMP_CLAUSE_DECL (c);
6707 if (error_operand_p (decl))
6708 {
6709 decl = NULL_TREE;
6710 break;
6711 }
6712 flags = GOVD_SEEN;
6713 if (!OMP_CLAUSE_LINEAR_NO_COPYIN (c))
6714 flags |= GOVD_FIRSTPRIVATE;
6715 if (!OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
6716 flags |= GOVD_LASTPRIVATE;
6717 if (octx
6718 && octx->region_type == ORT_WORKSHARE
6719 && octx->combined_loop)
6720 {
6721 if (octx->outer_context
6722 && (octx->outer_context->region_type
6723 == ORT_COMBINED_PARALLEL))
6724 octx = octx->outer_context;
6725 else if (omp_check_private (octx, decl, false))
6726 break;
6727 }
6728 else if (octx
6729 && (octx->region_type & ORT_TASK) != 0
6730 && octx->combined_loop)
6731 ;
6732 else if (octx
6733 && octx->region_type == ORT_COMBINED_PARALLEL
6734 && ctx->region_type == ORT_WORKSHARE
6735 && octx == outer_ctx)
6736 flags = GOVD_SEEN | GOVD_SHARED;
6737 else if (octx
6738 && octx->region_type == ORT_COMBINED_TEAMS)
6739 flags = GOVD_SEEN | GOVD_SHARED;
6740 else if (octx
6741 && octx->region_type == ORT_COMBINED_TARGET)
6742 {
6743 flags &= ~GOVD_LASTPRIVATE;
6744 if (flags == GOVD_SEEN)
6745 break;
6746 }
6747 else
6748 break;
6749 splay_tree_node on
6750 = splay_tree_lookup (octx->variables,
6751 (splay_tree_key) decl);
6752 if (on && (on->value & GOVD_DATA_SHARE_CLASS) != 0)
6753 {
6754 octx = NULL;
6755 break;
6756 }
6757 omp_add_variable (octx, decl, flags);
6758 if (octx->outer_context == NULL)
6759 break;
6760 octx = octx->outer_context;
6761 }
6762 while (1);
6763 if (octx
6764 && decl
6765 && (!OMP_CLAUSE_LINEAR_NO_COPYIN (c)
6766 || !OMP_CLAUSE_LINEAR_NO_COPYOUT (c)))
6767 omp_notice_variable (octx, decl, true);
6768 }
6769 flags = GOVD_LINEAR | GOVD_EXPLICIT;
6770 if (OMP_CLAUSE_LINEAR_NO_COPYIN (c)
6771 && OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
6772 {
6773 notice_outer = false;
6774 flags |= GOVD_LINEAR_LASTPRIVATE_NO_OUTER;
6775 }
6776 goto do_add;
6777
6778 case OMP_CLAUSE_MAP:
6779 decl = OMP_CLAUSE_DECL (c);
6780 if (error_operand_p (decl))
6781 remove = true;
6782 switch (code)
6783 {
6784 case OMP_TARGET:
6785 break;
6786 case OMP_TARGET_DATA:
6787 case OMP_TARGET_ENTER_DATA:
6788 case OMP_TARGET_EXIT_DATA:
6789 case OACC_HOST_DATA:
6790 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
6791 || (OMP_CLAUSE_MAP_KIND (c)
6792 == GOMP_MAP_FIRSTPRIVATE_REFERENCE))
6793 /* For target {,enter ,exit }data only the array slice is
6794 mapped, but not the pointer to it. */
6795 remove = true;
6796 break;
6797 default:
6798 break;
6799 }
6800 if (remove)
6801 break;
6802 if (DECL_P (decl) && outer_ctx && (region_type & ORT_ACC))
6803 {
6804 struct gimplify_omp_ctx *octx;
6805 for (octx = outer_ctx; octx; octx = octx->outer_context)
6806 {
6807 if (octx->region_type != ORT_ACC_HOST_DATA)
6808 break;
6809 splay_tree_node n2
6810 = splay_tree_lookup (octx->variables,
6811 (splay_tree_key) decl);
6812 if (n2)
6813 error_at (OMP_CLAUSE_LOCATION (c), "variable %qE "
6814 "declared in enclosing %<host_data%> region",
6815 DECL_NAME (decl));
6816 }
6817 }
6818 if (OMP_CLAUSE_SIZE (c) == NULL_TREE)
6819 OMP_CLAUSE_SIZE (c) = DECL_P (decl) ? DECL_SIZE_UNIT (decl)
6820 : TYPE_SIZE_UNIT (TREE_TYPE (decl));
6821 if (gimplify_expr (&OMP_CLAUSE_SIZE (c), pre_p,
6822 NULL, is_gimple_val, fb_rvalue) == GS_ERROR)
6823 {
6824 remove = true;
6825 break;
6826 }
6827 else if ((OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
6828 || (OMP_CLAUSE_MAP_KIND (c)
6829 == GOMP_MAP_FIRSTPRIVATE_REFERENCE))
6830 && TREE_CODE (OMP_CLAUSE_SIZE (c)) != INTEGER_CST)
6831 {
6832 OMP_CLAUSE_SIZE (c)
6833 = get_initialized_tmp_var (OMP_CLAUSE_SIZE (c), pre_p, NULL);
6834 omp_add_variable (ctx, OMP_CLAUSE_SIZE (c),
6835 GOVD_FIRSTPRIVATE | GOVD_SEEN);
6836 }
6837 if (!DECL_P (decl))
6838 {
6839 tree d = decl, *pd;
6840 if (TREE_CODE (d) == ARRAY_REF)
6841 {
6842 while (TREE_CODE (d) == ARRAY_REF)
6843 d = TREE_OPERAND (d, 0);
6844 if (TREE_CODE (d) == COMPONENT_REF
6845 && TREE_CODE (TREE_TYPE (d)) == ARRAY_TYPE)
6846 decl = d;
6847 }
6848 pd = &OMP_CLAUSE_DECL (c);
6849 if (d == decl
6850 && TREE_CODE (decl) == INDIRECT_REF
6851 && TREE_CODE (TREE_OPERAND (decl, 0)) == COMPONENT_REF
6852 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (decl, 0)))
6853 == REFERENCE_TYPE))
6854 {
6855 pd = &TREE_OPERAND (decl, 0);
6856 decl = TREE_OPERAND (decl, 0);
6857 }
6858 if (TREE_CODE (decl) == COMPONENT_REF)
6859 {
6860 while (TREE_CODE (decl) == COMPONENT_REF)
6861 decl = TREE_OPERAND (decl, 0);
6862 }
6863 if (gimplify_expr (pd, pre_p, NULL, is_gimple_lvalue, fb_lvalue)
6864 == GS_ERROR)
6865 {
6866 remove = true;
6867 break;
6868 }
6869 if (DECL_P (decl))
6870 {
6871 if (error_operand_p (decl))
6872 {
6873 remove = true;
6874 break;
6875 }
6876
6877 if (TYPE_SIZE_UNIT (TREE_TYPE (decl)) == NULL
6878 || (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (decl)))
6879 != INTEGER_CST))
6880 {
6881 error_at (OMP_CLAUSE_LOCATION (c),
6882 "mapping field %qE of variable length "
6883 "structure", OMP_CLAUSE_DECL (c));
6884 remove = true;
6885 break;
6886 }
6887
6888 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ALWAYS_POINTER)
6889 {
6890 /* Error recovery. */
6891 if (prev_list_p == NULL)
6892 {
6893 remove = true;
6894 break;
6895 }
6896 if (OMP_CLAUSE_CHAIN (*prev_list_p) != c)
6897 {
6898 tree ch = OMP_CLAUSE_CHAIN (*prev_list_p);
6899 if (ch == NULL_TREE || OMP_CLAUSE_CHAIN (ch) != c)
6900 {
6901 remove = true;
6902 break;
6903 }
6904 }
6905 }
6906
6907 tree offset;
6908 HOST_WIDE_INT bitsize, bitpos;
6909 machine_mode mode;
6910 int unsignedp, reversep, volatilep = 0;
6911 tree base = OMP_CLAUSE_DECL (c);
6912 while (TREE_CODE (base) == ARRAY_REF)
6913 base = TREE_OPERAND (base, 0);
6914 if (TREE_CODE (base) == INDIRECT_REF)
6915 base = TREE_OPERAND (base, 0);
6916 base = get_inner_reference (base, &bitsize, &bitpos, &offset,
6917 &mode, &unsignedp, &reversep,
6918 &volatilep, false);
6919 gcc_assert (base == decl
6920 && (offset == NULL_TREE
6921 || TREE_CODE (offset) == INTEGER_CST));
6922
6923 splay_tree_node n
6924 = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
6925 bool ptr = (OMP_CLAUSE_MAP_KIND (c)
6926 == GOMP_MAP_ALWAYS_POINTER);
6927 if (n == NULL || (n->value & GOVD_MAP) == 0)
6928 {
6929 tree l = build_omp_clause (OMP_CLAUSE_LOCATION (c),
6930 OMP_CLAUSE_MAP);
6931 OMP_CLAUSE_SET_MAP_KIND (l, GOMP_MAP_STRUCT);
6932 OMP_CLAUSE_DECL (l) = decl;
6933 OMP_CLAUSE_SIZE (l) = size_int (1);
6934 if (struct_map_to_clause == NULL)
6935 struct_map_to_clause = new hash_map<tree, tree>;
6936 struct_map_to_clause->put (decl, l);
6937 if (ptr)
6938 {
6939 enum gomp_map_kind mkind
6940 = code == OMP_TARGET_EXIT_DATA
6941 ? GOMP_MAP_RELEASE : GOMP_MAP_ALLOC;
6942 tree c2 = build_omp_clause (OMP_CLAUSE_LOCATION (c),
6943 OMP_CLAUSE_MAP);
6944 OMP_CLAUSE_SET_MAP_KIND (c2, mkind);
6945 OMP_CLAUSE_DECL (c2)
6946 = unshare_expr (OMP_CLAUSE_DECL (c));
6947 OMP_CLAUSE_CHAIN (c2) = *prev_list_p;
6948 OMP_CLAUSE_SIZE (c2)
6949 = TYPE_SIZE_UNIT (ptr_type_node);
6950 OMP_CLAUSE_CHAIN (l) = c2;
6951 if (OMP_CLAUSE_CHAIN (*prev_list_p) != c)
6952 {
6953 tree c4 = OMP_CLAUSE_CHAIN (*prev_list_p);
6954 tree c3
6955 = build_omp_clause (OMP_CLAUSE_LOCATION (c),
6956 OMP_CLAUSE_MAP);
6957 OMP_CLAUSE_SET_MAP_KIND (c3, mkind);
6958 OMP_CLAUSE_DECL (c3)
6959 = unshare_expr (OMP_CLAUSE_DECL (c4));
6960 OMP_CLAUSE_SIZE (c3)
6961 = TYPE_SIZE_UNIT (ptr_type_node);
6962 OMP_CLAUSE_CHAIN (c3) = *prev_list_p;
6963 OMP_CLAUSE_CHAIN (c2) = c3;
6964 }
6965 *prev_list_p = l;
6966 prev_list_p = NULL;
6967 }
6968 else
6969 {
6970 OMP_CLAUSE_CHAIN (l) = c;
6971 *list_p = l;
6972 list_p = &OMP_CLAUSE_CHAIN (l);
6973 }
6974 flags = GOVD_MAP | GOVD_EXPLICIT;
6975 if (GOMP_MAP_ALWAYS_P (OMP_CLAUSE_MAP_KIND (c)) || ptr)
6976 flags |= GOVD_SEEN;
6977 goto do_add_decl;
6978 }
6979 else
6980 {
6981 tree *osc = struct_map_to_clause->get (decl);
6982 tree *sc = NULL, *scp = NULL;
6983 if (GOMP_MAP_ALWAYS_P (OMP_CLAUSE_MAP_KIND (c)) || ptr)
6984 n->value |= GOVD_SEEN;
6985 offset_int o1, o2;
6986 if (offset)
6987 o1 = wi::to_offset (offset);
6988 else
6989 o1 = 0;
6990 if (bitpos)
6991 o1 = o1 + bitpos / BITS_PER_UNIT;
6992 for (sc = &OMP_CLAUSE_CHAIN (*osc);
6993 *sc != c; sc = &OMP_CLAUSE_CHAIN (*sc))
6994 if (ptr && sc == prev_list_p)
6995 break;
6996 else if (TREE_CODE (OMP_CLAUSE_DECL (*sc))
6997 != COMPONENT_REF
6998 && (TREE_CODE (OMP_CLAUSE_DECL (*sc))
6999 != INDIRECT_REF)
7000 && (TREE_CODE (OMP_CLAUSE_DECL (*sc))
7001 != ARRAY_REF))
7002 break;
7003 else
7004 {
7005 tree offset2;
7006 HOST_WIDE_INT bitsize2, bitpos2;
7007 base = OMP_CLAUSE_DECL (*sc);
7008 if (TREE_CODE (base) == ARRAY_REF)
7009 {
7010 while (TREE_CODE (base) == ARRAY_REF)
7011 base = TREE_OPERAND (base, 0);
7012 if (TREE_CODE (base) != COMPONENT_REF
7013 || (TREE_CODE (TREE_TYPE (base))
7014 != ARRAY_TYPE))
7015 break;
7016 }
7017 else if (TREE_CODE (base) == INDIRECT_REF
7018 && (TREE_CODE (TREE_OPERAND (base, 0))
7019 == COMPONENT_REF)
7020 && (TREE_CODE (TREE_TYPE
7021 (TREE_OPERAND (base, 0)))
7022 == REFERENCE_TYPE))
7023 base = TREE_OPERAND (base, 0);
7024 base = get_inner_reference (base, &bitsize2,
7025 &bitpos2, &offset2,
7026 &mode, &unsignedp,
7027 &reversep, &volatilep,
7028 false);
7029 if (base != decl)
7030 break;
7031 if (scp)
7032 continue;
7033 gcc_assert (offset == NULL_TREE
7034 || TREE_CODE (offset) == INTEGER_CST);
7035 tree d1 = OMP_CLAUSE_DECL (*sc);
7036 tree d2 = OMP_CLAUSE_DECL (c);
7037 while (TREE_CODE (d1) == ARRAY_REF)
7038 d1 = TREE_OPERAND (d1, 0);
7039 while (TREE_CODE (d2) == ARRAY_REF)
7040 d2 = TREE_OPERAND (d2, 0);
7041 if (TREE_CODE (d1) == INDIRECT_REF)
7042 d1 = TREE_OPERAND (d1, 0);
7043 if (TREE_CODE (d2) == INDIRECT_REF)
7044 d2 = TREE_OPERAND (d2, 0);
7045 while (TREE_CODE (d1) == COMPONENT_REF)
7046 if (TREE_CODE (d2) == COMPONENT_REF
7047 && TREE_OPERAND (d1, 1)
7048 == TREE_OPERAND (d2, 1))
7049 {
7050 d1 = TREE_OPERAND (d1, 0);
7051 d2 = TREE_OPERAND (d2, 0);
7052 }
7053 else
7054 break;
7055 if (d1 == d2)
7056 {
7057 error_at (OMP_CLAUSE_LOCATION (c),
7058 "%qE appears more than once in map "
7059 "clauses", OMP_CLAUSE_DECL (c));
7060 remove = true;
7061 break;
7062 }
7063 if (offset2)
7064 o2 = wi::to_offset (offset2);
7065 else
7066 o2 = 0;
7067 if (bitpos2)
7068 o2 = o2 + bitpos2 / BITS_PER_UNIT;
7069 if (wi::ltu_p (o1, o2)
7070 || (wi::eq_p (o1, o2) && bitpos < bitpos2))
7071 {
7072 if (ptr)
7073 scp = sc;
7074 else
7075 break;
7076 }
7077 }
7078 if (remove)
7079 break;
7080 OMP_CLAUSE_SIZE (*osc)
7081 = size_binop (PLUS_EXPR, OMP_CLAUSE_SIZE (*osc),
7082 size_one_node);
7083 if (ptr)
7084 {
7085 tree c2 = build_omp_clause (OMP_CLAUSE_LOCATION (c),
7086 OMP_CLAUSE_MAP);
7087 tree cl = NULL_TREE;
7088 enum gomp_map_kind mkind
7089 = code == OMP_TARGET_EXIT_DATA
7090 ? GOMP_MAP_RELEASE : GOMP_MAP_ALLOC;
7091 OMP_CLAUSE_SET_MAP_KIND (c2, mkind);
7092 OMP_CLAUSE_DECL (c2)
7093 = unshare_expr (OMP_CLAUSE_DECL (c));
7094 OMP_CLAUSE_CHAIN (c2) = scp ? *scp : *prev_list_p;
7095 OMP_CLAUSE_SIZE (c2)
7096 = TYPE_SIZE_UNIT (ptr_type_node);
7097 cl = scp ? *prev_list_p : c2;
7098 if (OMP_CLAUSE_CHAIN (*prev_list_p) != c)
7099 {
7100 tree c4 = OMP_CLAUSE_CHAIN (*prev_list_p);
7101 tree c3
7102 = build_omp_clause (OMP_CLAUSE_LOCATION (c),
7103 OMP_CLAUSE_MAP);
7104 OMP_CLAUSE_SET_MAP_KIND (c3, mkind);
7105 OMP_CLAUSE_DECL (c3)
7106 = unshare_expr (OMP_CLAUSE_DECL (c4));
7107 OMP_CLAUSE_SIZE (c3)
7108 = TYPE_SIZE_UNIT (ptr_type_node);
7109 OMP_CLAUSE_CHAIN (c3) = *prev_list_p;
7110 if (!scp)
7111 OMP_CLAUSE_CHAIN (c2) = c3;
7112 else
7113 cl = c3;
7114 }
7115 if (scp)
7116 *scp = c2;
7117 if (sc == prev_list_p)
7118 {
7119 *sc = cl;
7120 prev_list_p = NULL;
7121 }
7122 else
7123 {
7124 *prev_list_p = OMP_CLAUSE_CHAIN (c);
7125 list_p = prev_list_p;
7126 prev_list_p = NULL;
7127 OMP_CLAUSE_CHAIN (c) = *sc;
7128 *sc = cl;
7129 continue;
7130 }
7131 }
7132 else if (*sc != c)
7133 {
7134 *list_p = OMP_CLAUSE_CHAIN (c);
7135 OMP_CLAUSE_CHAIN (c) = *sc;
7136 *sc = c;
7137 continue;
7138 }
7139 }
7140 }
7141 if (!remove
7142 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_ALWAYS_POINTER
7143 && OMP_CLAUSE_CHAIN (c)
7144 && OMP_CLAUSE_CODE (OMP_CLAUSE_CHAIN (c)) == OMP_CLAUSE_MAP
7145 && (OMP_CLAUSE_MAP_KIND (OMP_CLAUSE_CHAIN (c))
7146 == GOMP_MAP_ALWAYS_POINTER))
7147 prev_list_p = list_p;
7148 break;
7149 }
7150 flags = GOVD_MAP | GOVD_EXPLICIT;
7151 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ALWAYS_TO
7152 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ALWAYS_TOFROM)
7153 flags |= GOVD_MAP_ALWAYS_TO;
7154 goto do_add;
7155
7156 case OMP_CLAUSE_DEPEND:
7157 if (OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SINK
7158 || OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SOURCE)
7159 {
7160 /* Nothing to do. OMP_CLAUSE_DECL will be lowered in
7161 omp-low.c. */
7162 break;
7163 }
7164 if (TREE_CODE (OMP_CLAUSE_DECL (c)) == COMPOUND_EXPR)
7165 {
7166 gimplify_expr (&TREE_OPERAND (OMP_CLAUSE_DECL (c), 0), pre_p,
7167 NULL, is_gimple_val, fb_rvalue);
7168 OMP_CLAUSE_DECL (c) = TREE_OPERAND (OMP_CLAUSE_DECL (c), 1);
7169 }
7170 if (error_operand_p (OMP_CLAUSE_DECL (c)))
7171 {
7172 remove = true;
7173 break;
7174 }
7175 OMP_CLAUSE_DECL (c) = build_fold_addr_expr (OMP_CLAUSE_DECL (c));
7176 if (gimplify_expr (&OMP_CLAUSE_DECL (c), pre_p, NULL,
7177 is_gimple_val, fb_rvalue) == GS_ERROR)
7178 {
7179 remove = true;
7180 break;
7181 }
7182 break;
7183
7184 case OMP_CLAUSE_TO:
7185 case OMP_CLAUSE_FROM:
7186 case OMP_CLAUSE__CACHE_:
7187 decl = OMP_CLAUSE_DECL (c);
7188 if (error_operand_p (decl))
7189 {
7190 remove = true;
7191 break;
7192 }
7193 if (OMP_CLAUSE_SIZE (c) == NULL_TREE)
7194 OMP_CLAUSE_SIZE (c) = DECL_P (decl) ? DECL_SIZE_UNIT (decl)
7195 : TYPE_SIZE_UNIT (TREE_TYPE (decl));
7196 if (gimplify_expr (&OMP_CLAUSE_SIZE (c), pre_p,
7197 NULL, is_gimple_val, fb_rvalue) == GS_ERROR)
7198 {
7199 remove = true;
7200 break;
7201 }
7202 if (!DECL_P (decl))
7203 {
7204 if (gimplify_expr (&OMP_CLAUSE_DECL (c), pre_p,
7205 NULL, is_gimple_lvalue, fb_lvalue)
7206 == GS_ERROR)
7207 {
7208 remove = true;
7209 break;
7210 }
7211 break;
7212 }
7213 goto do_notice;
7214
7215 case OMP_CLAUSE_USE_DEVICE_PTR:
7216 flags = GOVD_FIRSTPRIVATE | GOVD_EXPLICIT;
7217 goto do_add;
7218 case OMP_CLAUSE_IS_DEVICE_PTR:
7219 flags = GOVD_FIRSTPRIVATE | GOVD_EXPLICIT;
7220 goto do_add;
7221
7222 do_add:
7223 decl = OMP_CLAUSE_DECL (c);
7224 do_add_decl:
7225 if (error_operand_p (decl))
7226 {
7227 remove = true;
7228 break;
7229 }
7230 if (DECL_NAME (decl) == NULL_TREE && (flags & GOVD_SHARED) == 0)
7231 {
7232 tree t = omp_member_access_dummy_var (decl);
7233 if (t)
7234 {
7235 tree v = DECL_VALUE_EXPR (decl);
7236 DECL_NAME (decl) = DECL_NAME (TREE_OPERAND (v, 1));
7237 if (outer_ctx)
7238 omp_notice_variable (outer_ctx, t, true);
7239 }
7240 }
7241 omp_add_variable (ctx, decl, flags);
7242 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
7243 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
7244 {
7245 omp_add_variable (ctx, OMP_CLAUSE_REDUCTION_PLACEHOLDER (c),
7246 GOVD_LOCAL | GOVD_SEEN);
7247 if (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c)
7248 && walk_tree (&OMP_CLAUSE_REDUCTION_INIT (c),
7249 find_decl_expr,
7250 OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c),
7251 NULL) == NULL_TREE)
7252 omp_add_variable (ctx,
7253 OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c),
7254 GOVD_LOCAL | GOVD_SEEN);
7255 gimplify_omp_ctxp = ctx;
7256 push_gimplify_context ();
7257
7258 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
7259 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
7260
7261 gimplify_and_add (OMP_CLAUSE_REDUCTION_INIT (c),
7262 &OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c));
7263 pop_gimplify_context
7264 (gimple_seq_first_stmt (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c)));
7265 push_gimplify_context ();
7266 gimplify_and_add (OMP_CLAUSE_REDUCTION_MERGE (c),
7267 &OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c));
7268 pop_gimplify_context
7269 (gimple_seq_first_stmt (OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c)));
7270 OMP_CLAUSE_REDUCTION_INIT (c) = NULL_TREE;
7271 OMP_CLAUSE_REDUCTION_MERGE (c) = NULL_TREE;
7272
7273 gimplify_omp_ctxp = outer_ctx;
7274 }
7275 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
7276 && OMP_CLAUSE_LASTPRIVATE_STMT (c))
7277 {
7278 gimplify_omp_ctxp = ctx;
7279 push_gimplify_context ();
7280 if (TREE_CODE (OMP_CLAUSE_LASTPRIVATE_STMT (c)) != BIND_EXPR)
7281 {
7282 tree bind = build3 (BIND_EXPR, void_type_node, NULL,
7283 NULL, NULL);
7284 TREE_SIDE_EFFECTS (bind) = 1;
7285 BIND_EXPR_BODY (bind) = OMP_CLAUSE_LASTPRIVATE_STMT (c);
7286 OMP_CLAUSE_LASTPRIVATE_STMT (c) = bind;
7287 }
7288 gimplify_and_add (OMP_CLAUSE_LASTPRIVATE_STMT (c),
7289 &OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c));
7290 pop_gimplify_context
7291 (gimple_seq_first_stmt (OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c)));
7292 OMP_CLAUSE_LASTPRIVATE_STMT (c) = NULL_TREE;
7293
7294 gimplify_omp_ctxp = outer_ctx;
7295 }
7296 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
7297 && OMP_CLAUSE_LINEAR_STMT (c))
7298 {
7299 gimplify_omp_ctxp = ctx;
7300 push_gimplify_context ();
7301 if (TREE_CODE (OMP_CLAUSE_LINEAR_STMT (c)) != BIND_EXPR)
7302 {
7303 tree bind = build3 (BIND_EXPR, void_type_node, NULL,
7304 NULL, NULL);
7305 TREE_SIDE_EFFECTS (bind) = 1;
7306 BIND_EXPR_BODY (bind) = OMP_CLAUSE_LINEAR_STMT (c);
7307 OMP_CLAUSE_LINEAR_STMT (c) = bind;
7308 }
7309 gimplify_and_add (OMP_CLAUSE_LINEAR_STMT (c),
7310 &OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c));
7311 pop_gimplify_context
7312 (gimple_seq_first_stmt (OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c)));
7313 OMP_CLAUSE_LINEAR_STMT (c) = NULL_TREE;
7314
7315 gimplify_omp_ctxp = outer_ctx;
7316 }
7317 if (notice_outer)
7318 goto do_notice;
7319 break;
7320
7321 case OMP_CLAUSE_COPYIN:
7322 case OMP_CLAUSE_COPYPRIVATE:
7323 decl = OMP_CLAUSE_DECL (c);
7324 if (error_operand_p (decl))
7325 {
7326 remove = true;
7327 break;
7328 }
7329 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_COPYPRIVATE
7330 && !remove
7331 && !omp_check_private (ctx, decl, true))
7332 {
7333 remove = true;
7334 if (is_global_var (decl))
7335 {
7336 if (DECL_THREAD_LOCAL_P (decl))
7337 remove = false;
7338 else if (DECL_HAS_VALUE_EXPR_P (decl))
7339 {
7340 tree value = get_base_address (DECL_VALUE_EXPR (decl));
7341
7342 if (value
7343 && DECL_P (value)
7344 && DECL_THREAD_LOCAL_P (value))
7345 remove = false;
7346 }
7347 }
7348 if (remove)
7349 error_at (OMP_CLAUSE_LOCATION (c),
7350 "copyprivate variable %qE is not threadprivate"
7351 " or private in outer context", DECL_NAME (decl));
7352 }
7353 do_notice:
7354 if (outer_ctx)
7355 omp_notice_variable (outer_ctx, decl, true);
7356 if (check_non_private
7357 && region_type == ORT_WORKSHARE
7358 && (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION
7359 || decl == OMP_CLAUSE_DECL (c)
7360 || (TREE_CODE (OMP_CLAUSE_DECL (c)) == MEM_REF
7361 && (TREE_CODE (TREE_OPERAND (OMP_CLAUSE_DECL (c), 0))
7362 == ADDR_EXPR
7363 || (TREE_CODE (TREE_OPERAND (OMP_CLAUSE_DECL (c), 0))
7364 == POINTER_PLUS_EXPR
7365 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND
7366 (OMP_CLAUSE_DECL (c), 0), 0))
7367 == ADDR_EXPR)))))
7368 && omp_check_private (ctx, decl, false))
7369 {
7370 error ("%s variable %qE is private in outer context",
7371 check_non_private, DECL_NAME (decl));
7372 remove = true;
7373 }
7374 break;
7375
7376 case OMP_CLAUSE_IF:
7377 if (OMP_CLAUSE_IF_MODIFIER (c) != ERROR_MARK
7378 && OMP_CLAUSE_IF_MODIFIER (c) != code)
7379 {
7380 const char *p[2];
7381 for (int i = 0; i < 2; i++)
7382 switch (i ? OMP_CLAUSE_IF_MODIFIER (c) : code)
7383 {
7384 case OMP_PARALLEL: p[i] = "parallel"; break;
7385 case OMP_TASK: p[i] = "task"; break;
7386 case OMP_TASKLOOP: p[i] = "taskloop"; break;
7387 case OMP_TARGET_DATA: p[i] = "target data"; break;
7388 case OMP_TARGET: p[i] = "target"; break;
7389 case OMP_TARGET_UPDATE: p[i] = "target update"; break;
7390 case OMP_TARGET_ENTER_DATA:
7391 p[i] = "target enter data"; break;
7392 case OMP_TARGET_EXIT_DATA: p[i] = "target exit data"; break;
7393 default: gcc_unreachable ();
7394 }
7395 error_at (OMP_CLAUSE_LOCATION (c),
7396 "expected %qs %<if%> clause modifier rather than %qs",
7397 p[0], p[1]);
7398 remove = true;
7399 }
7400 /* Fall through. */
7401
7402 case OMP_CLAUSE_FINAL:
7403 OMP_CLAUSE_OPERAND (c, 0)
7404 = gimple_boolify (OMP_CLAUSE_OPERAND (c, 0));
7405 /* Fall through. */
7406
7407 case OMP_CLAUSE_SCHEDULE:
7408 case OMP_CLAUSE_NUM_THREADS:
7409 case OMP_CLAUSE_NUM_TEAMS:
7410 case OMP_CLAUSE_THREAD_LIMIT:
7411 case OMP_CLAUSE_DIST_SCHEDULE:
7412 case OMP_CLAUSE_DEVICE:
7413 case OMP_CLAUSE_PRIORITY:
7414 case OMP_CLAUSE_GRAINSIZE:
7415 case OMP_CLAUSE_NUM_TASKS:
7416 case OMP_CLAUSE_HINT:
7417 case OMP_CLAUSE__CILK_FOR_COUNT_:
7418 case OMP_CLAUSE_ASYNC:
7419 case OMP_CLAUSE_WAIT:
7420 case OMP_CLAUSE_NUM_GANGS:
7421 case OMP_CLAUSE_NUM_WORKERS:
7422 case OMP_CLAUSE_VECTOR_LENGTH:
7423 case OMP_CLAUSE_WORKER:
7424 case OMP_CLAUSE_VECTOR:
7425 if (gimplify_expr (&OMP_CLAUSE_OPERAND (c, 0), pre_p, NULL,
7426 is_gimple_val, fb_rvalue) == GS_ERROR)
7427 remove = true;
7428 break;
7429
7430 case OMP_CLAUSE_GANG:
7431 if (gimplify_expr (&OMP_CLAUSE_OPERAND (c, 0), pre_p, NULL,
7432 is_gimple_val, fb_rvalue) == GS_ERROR)
7433 remove = true;
7434 if (gimplify_expr (&OMP_CLAUSE_OPERAND (c, 1), pre_p, NULL,
7435 is_gimple_val, fb_rvalue) == GS_ERROR)
7436 remove = true;
7437 break;
7438
7439 case OMP_CLAUSE_TILE:
7440 for (tree list = OMP_CLAUSE_TILE_LIST (c); !remove && list;
7441 list = TREE_CHAIN (list))
7442 {
7443 if (gimplify_expr (&TREE_VALUE (list), pre_p, NULL,
7444 is_gimple_val, fb_rvalue) == GS_ERROR)
7445 remove = true;
7446 }
7447 break;
7448
7449 case OMP_CLAUSE_DEVICE_RESIDENT:
7450 remove = true;
7451 break;
7452
7453 case OMP_CLAUSE_NOWAIT:
7454 case OMP_CLAUSE_ORDERED:
7455 case OMP_CLAUSE_UNTIED:
7456 case OMP_CLAUSE_COLLAPSE:
7457 case OMP_CLAUSE_AUTO:
7458 case OMP_CLAUSE_SEQ:
7459 case OMP_CLAUSE_INDEPENDENT:
7460 case OMP_CLAUSE_MERGEABLE:
7461 case OMP_CLAUSE_PROC_BIND:
7462 case OMP_CLAUSE_SAFELEN:
7463 case OMP_CLAUSE_SIMDLEN:
7464 case OMP_CLAUSE_NOGROUP:
7465 case OMP_CLAUSE_THREADS:
7466 case OMP_CLAUSE_SIMD:
7467 break;
7468
7469 case OMP_CLAUSE_DEFAULTMAP:
7470 ctx->target_map_scalars_firstprivate = false;
7471 break;
7472
7473 case OMP_CLAUSE_ALIGNED:
7474 decl = OMP_CLAUSE_DECL (c);
7475 if (error_operand_p (decl))
7476 {
7477 remove = true;
7478 break;
7479 }
7480 if (gimplify_expr (&OMP_CLAUSE_ALIGNED_ALIGNMENT (c), pre_p, NULL,
7481 is_gimple_val, fb_rvalue) == GS_ERROR)
7482 {
7483 remove = true;
7484 break;
7485 }
7486 if (!is_global_var (decl)
7487 && TREE_CODE (TREE_TYPE (decl)) == POINTER_TYPE)
7488 omp_add_variable (ctx, decl, GOVD_ALIGNED);
7489 break;
7490
7491 case OMP_CLAUSE_DEFAULT:
7492 ctx->default_kind = OMP_CLAUSE_DEFAULT_KIND (c);
7493 break;
7494
7495 default:
7496 gcc_unreachable ();
7497 }
7498
7499 if (remove)
7500 *list_p = OMP_CLAUSE_CHAIN (c);
7501 else
7502 list_p = &OMP_CLAUSE_CHAIN (c);
7503 }
7504
7505 gimplify_omp_ctxp = ctx;
7506 if (struct_map_to_clause)
7507 delete struct_map_to_clause;
7508 }
7509
7510 /* Return true if DECL is a candidate for shared to firstprivate
7511 optimization. We only consider non-addressable scalars, not
7512 too big, and not references. */
7513
7514 static bool
7515 omp_shared_to_firstprivate_optimizable_decl_p (tree decl)
7516 {
7517 if (TREE_ADDRESSABLE (decl))
7518 return false;
7519 tree type = TREE_TYPE (decl);
7520 if (!is_gimple_reg_type (type)
7521 || TREE_CODE (type) == REFERENCE_TYPE
7522 || TREE_ADDRESSABLE (type))
7523 return false;
7524 /* Don't optimize too large decls, as each thread/task will have
7525 its own. */
7526 HOST_WIDE_INT len = int_size_in_bytes (type);
7527 if (len == -1 || len > 4 * POINTER_SIZE / BITS_PER_UNIT)
7528 return false;
7529 if (lang_hooks.decls.omp_privatize_by_reference (decl))
7530 return false;
7531 return true;
7532 }
7533
7534 /* Helper function of omp_find_stores_op and gimplify_adjust_omp_clauses*.
7535 For omp_shared_to_firstprivate_optimizable_decl_p decl mark it as
7536 GOVD_WRITTEN in outer contexts. */
7537
7538 static void
7539 omp_mark_stores (struct gimplify_omp_ctx *ctx, tree decl)
7540 {
7541 for (; ctx; ctx = ctx->outer_context)
7542 {
7543 splay_tree_node n = splay_tree_lookup (ctx->variables,
7544 (splay_tree_key) decl);
7545 if (n == NULL)
7546 continue;
7547 else if (n->value & GOVD_SHARED)
7548 {
7549 n->value |= GOVD_WRITTEN;
7550 return;
7551 }
7552 else if (n->value & GOVD_DATA_SHARE_CLASS)
7553 return;
7554 }
7555 }
7556
7557 /* Helper callback for walk_gimple_seq to discover possible stores
7558 to omp_shared_to_firstprivate_optimizable_decl_p decls and set
7559 GOVD_WRITTEN if they are GOVD_SHARED in some outer context
7560 for those. */
7561
7562 static tree
7563 omp_find_stores_op (tree *tp, int *walk_subtrees, void *data)
7564 {
7565 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
7566
7567 *walk_subtrees = 0;
7568 if (!wi->is_lhs)
7569 return NULL_TREE;
7570
7571 tree op = *tp;
7572 do
7573 {
7574 if (handled_component_p (op))
7575 op = TREE_OPERAND (op, 0);
7576 else if ((TREE_CODE (op) == MEM_REF || TREE_CODE (op) == TARGET_MEM_REF)
7577 && TREE_CODE (TREE_OPERAND (op, 0)) == ADDR_EXPR)
7578 op = TREE_OPERAND (TREE_OPERAND (op, 0), 0);
7579 else
7580 break;
7581 }
7582 while (1);
7583 if (!DECL_P (op) || !omp_shared_to_firstprivate_optimizable_decl_p (op))
7584 return NULL_TREE;
7585
7586 omp_mark_stores (gimplify_omp_ctxp, op);
7587 return NULL_TREE;
7588 }
7589
7590 /* Helper callback for walk_gimple_seq to discover possible stores
7591 to omp_shared_to_firstprivate_optimizable_decl_p decls and set
7592 GOVD_WRITTEN if they are GOVD_SHARED in some outer context
7593 for those. */
7594
7595 static tree
7596 omp_find_stores_stmt (gimple_stmt_iterator *gsi_p,
7597 bool *handled_ops_p,
7598 struct walk_stmt_info *wi)
7599 {
7600 gimple *stmt = gsi_stmt (*gsi_p);
7601 switch (gimple_code (stmt))
7602 {
7603 /* Don't recurse on OpenMP constructs for which
7604 gimplify_adjust_omp_clauses already handled the bodies,
7605 except handle gimple_omp_for_pre_body. */
7606 case GIMPLE_OMP_FOR:
7607 *handled_ops_p = true;
7608 if (gimple_omp_for_pre_body (stmt))
7609 walk_gimple_seq (gimple_omp_for_pre_body (stmt),
7610 omp_find_stores_stmt, omp_find_stores_op, wi);
7611 break;
7612 case GIMPLE_OMP_PARALLEL:
7613 case GIMPLE_OMP_TASK:
7614 case GIMPLE_OMP_SECTIONS:
7615 case GIMPLE_OMP_SINGLE:
7616 case GIMPLE_OMP_TARGET:
7617 case GIMPLE_OMP_TEAMS:
7618 case GIMPLE_OMP_CRITICAL:
7619 *handled_ops_p = true;
7620 break;
7621 default:
7622 break;
7623 }
7624 return NULL_TREE;
7625 }
7626
7627 struct gimplify_adjust_omp_clauses_data
7628 {
7629 tree *list_p;
7630 gimple_seq *pre_p;
7631 };
7632
7633 /* For all variables that were not actually used within the context,
7634 remove PRIVATE, SHARED, and FIRSTPRIVATE clauses. */
7635
7636 static int
7637 gimplify_adjust_omp_clauses_1 (splay_tree_node n, void *data)
7638 {
7639 tree *list_p = ((struct gimplify_adjust_omp_clauses_data *) data)->list_p;
7640 gimple_seq *pre_p
7641 = ((struct gimplify_adjust_omp_clauses_data *) data)->pre_p;
7642 tree decl = (tree) n->key;
7643 unsigned flags = n->value;
7644 enum omp_clause_code code;
7645 tree clause;
7646 bool private_debug;
7647
7648 if (flags & (GOVD_EXPLICIT | GOVD_LOCAL))
7649 return 0;
7650 if ((flags & GOVD_SEEN) == 0)
7651 return 0;
7652 if (flags & GOVD_DEBUG_PRIVATE)
7653 {
7654 gcc_assert ((flags & GOVD_DATA_SHARE_CLASS) == GOVD_PRIVATE);
7655 private_debug = true;
7656 }
7657 else if (flags & GOVD_MAP)
7658 private_debug = false;
7659 else
7660 private_debug
7661 = lang_hooks.decls.omp_private_debug_clause (decl,
7662 !!(flags & GOVD_SHARED));
7663 if (private_debug)
7664 code = OMP_CLAUSE_PRIVATE;
7665 else if (flags & GOVD_MAP)
7666 code = OMP_CLAUSE_MAP;
7667 else if (flags & GOVD_SHARED)
7668 {
7669 if (is_global_var (decl))
7670 {
7671 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp->outer_context;
7672 while (ctx != NULL)
7673 {
7674 splay_tree_node on
7675 = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
7676 if (on && (on->value & (GOVD_FIRSTPRIVATE | GOVD_LASTPRIVATE
7677 | GOVD_PRIVATE | GOVD_REDUCTION
7678 | GOVD_LINEAR | GOVD_MAP)) != 0)
7679 break;
7680 ctx = ctx->outer_context;
7681 }
7682 if (ctx == NULL)
7683 return 0;
7684 }
7685 code = OMP_CLAUSE_SHARED;
7686 }
7687 else if (flags & GOVD_PRIVATE)
7688 code = OMP_CLAUSE_PRIVATE;
7689 else if (flags & GOVD_FIRSTPRIVATE)
7690 code = OMP_CLAUSE_FIRSTPRIVATE;
7691 else if (flags & GOVD_LASTPRIVATE)
7692 code = OMP_CLAUSE_LASTPRIVATE;
7693 else if (flags & GOVD_ALIGNED)
7694 return 0;
7695 else
7696 gcc_unreachable ();
7697
7698 if (((flags & GOVD_LASTPRIVATE)
7699 || (code == OMP_CLAUSE_SHARED && (flags & GOVD_WRITTEN)))
7700 && omp_shared_to_firstprivate_optimizable_decl_p (decl))
7701 omp_mark_stores (gimplify_omp_ctxp->outer_context, decl);
7702
7703 clause = build_omp_clause (input_location, code);
7704 OMP_CLAUSE_DECL (clause) = decl;
7705 OMP_CLAUSE_CHAIN (clause) = *list_p;
7706 if (private_debug)
7707 OMP_CLAUSE_PRIVATE_DEBUG (clause) = 1;
7708 else if (code == OMP_CLAUSE_PRIVATE && (flags & GOVD_PRIVATE_OUTER_REF))
7709 OMP_CLAUSE_PRIVATE_OUTER_REF (clause) = 1;
7710 else if (code == OMP_CLAUSE_SHARED
7711 && (flags & GOVD_WRITTEN) == 0
7712 && omp_shared_to_firstprivate_optimizable_decl_p (decl))
7713 OMP_CLAUSE_SHARED_READONLY (clause) = 1;
7714 else if (code == OMP_CLAUSE_MAP && (flags & GOVD_MAP_0LEN_ARRAY) != 0)
7715 {
7716 tree nc = build_omp_clause (input_location, OMP_CLAUSE_MAP);
7717 OMP_CLAUSE_DECL (nc) = decl;
7718 if (TREE_CODE (TREE_TYPE (decl)) == REFERENCE_TYPE
7719 && TREE_CODE (TREE_TYPE (TREE_TYPE (decl))) == POINTER_TYPE)
7720 OMP_CLAUSE_DECL (clause)
7721 = build_simple_mem_ref_loc (input_location, decl);
7722 OMP_CLAUSE_DECL (clause)
7723 = build2 (MEM_REF, char_type_node, OMP_CLAUSE_DECL (clause),
7724 build_int_cst (build_pointer_type (char_type_node), 0));
7725 OMP_CLAUSE_SIZE (clause) = size_zero_node;
7726 OMP_CLAUSE_SIZE (nc) = size_zero_node;
7727 OMP_CLAUSE_SET_MAP_KIND (clause, GOMP_MAP_ALLOC);
7728 OMP_CLAUSE_MAP_MAYBE_ZERO_LENGTH_ARRAY_SECTION (clause) = 1;
7729 OMP_CLAUSE_SET_MAP_KIND (nc, GOMP_MAP_FIRSTPRIVATE_POINTER);
7730 OMP_CLAUSE_CHAIN (nc) = *list_p;
7731 OMP_CLAUSE_CHAIN (clause) = nc;
7732 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
7733 gimplify_omp_ctxp = ctx->outer_context;
7734 gimplify_expr (&TREE_OPERAND (OMP_CLAUSE_DECL (clause), 0),
7735 pre_p, NULL, is_gimple_val, fb_rvalue);
7736 gimplify_omp_ctxp = ctx;
7737 }
7738 else if (code == OMP_CLAUSE_MAP)
7739 {
7740 int kind = (flags & GOVD_MAP_TO_ONLY
7741 ? GOMP_MAP_TO
7742 : GOMP_MAP_TOFROM);
7743 if (flags & GOVD_MAP_FORCE)
7744 kind |= GOMP_MAP_FLAG_FORCE;
7745 OMP_CLAUSE_SET_MAP_KIND (clause, kind);
7746 if (DECL_SIZE (decl)
7747 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
7748 {
7749 tree decl2 = DECL_VALUE_EXPR (decl);
7750 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
7751 decl2 = TREE_OPERAND (decl2, 0);
7752 gcc_assert (DECL_P (decl2));
7753 tree mem = build_simple_mem_ref (decl2);
7754 OMP_CLAUSE_DECL (clause) = mem;
7755 OMP_CLAUSE_SIZE (clause) = TYPE_SIZE_UNIT (TREE_TYPE (decl));
7756 if (gimplify_omp_ctxp->outer_context)
7757 {
7758 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp->outer_context;
7759 omp_notice_variable (ctx, decl2, true);
7760 omp_notice_variable (ctx, OMP_CLAUSE_SIZE (clause), true);
7761 }
7762 tree nc = build_omp_clause (OMP_CLAUSE_LOCATION (clause),
7763 OMP_CLAUSE_MAP);
7764 OMP_CLAUSE_DECL (nc) = decl;
7765 OMP_CLAUSE_SIZE (nc) = size_zero_node;
7766 if (gimplify_omp_ctxp->target_firstprivatize_array_bases)
7767 OMP_CLAUSE_SET_MAP_KIND (nc, GOMP_MAP_FIRSTPRIVATE_POINTER);
7768 else
7769 OMP_CLAUSE_SET_MAP_KIND (nc, GOMP_MAP_POINTER);
7770 OMP_CLAUSE_CHAIN (nc) = OMP_CLAUSE_CHAIN (clause);
7771 OMP_CLAUSE_CHAIN (clause) = nc;
7772 }
7773 else if (gimplify_omp_ctxp->target_firstprivatize_array_bases
7774 && lang_hooks.decls.omp_privatize_by_reference (decl))
7775 {
7776 OMP_CLAUSE_DECL (clause) = build_simple_mem_ref (decl);
7777 OMP_CLAUSE_SIZE (clause)
7778 = unshare_expr (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl))));
7779 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
7780 gimplify_omp_ctxp = ctx->outer_context;
7781 gimplify_expr (&OMP_CLAUSE_SIZE (clause),
7782 pre_p, NULL, is_gimple_val, fb_rvalue);
7783 gimplify_omp_ctxp = ctx;
7784 tree nc = build_omp_clause (OMP_CLAUSE_LOCATION (clause),
7785 OMP_CLAUSE_MAP);
7786 OMP_CLAUSE_DECL (nc) = decl;
7787 OMP_CLAUSE_SIZE (nc) = size_zero_node;
7788 OMP_CLAUSE_SET_MAP_KIND (nc, GOMP_MAP_FIRSTPRIVATE_REFERENCE);
7789 OMP_CLAUSE_CHAIN (nc) = OMP_CLAUSE_CHAIN (clause);
7790 OMP_CLAUSE_CHAIN (clause) = nc;
7791 }
7792 else
7793 OMP_CLAUSE_SIZE (clause) = DECL_SIZE_UNIT (decl);
7794 }
7795 if (code == OMP_CLAUSE_FIRSTPRIVATE && (flags & GOVD_LASTPRIVATE) != 0)
7796 {
7797 tree nc = build_omp_clause (input_location, OMP_CLAUSE_LASTPRIVATE);
7798 OMP_CLAUSE_DECL (nc) = decl;
7799 OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (nc) = 1;
7800 OMP_CLAUSE_CHAIN (nc) = *list_p;
7801 OMP_CLAUSE_CHAIN (clause) = nc;
7802 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
7803 gimplify_omp_ctxp = ctx->outer_context;
7804 lang_hooks.decls.omp_finish_clause (nc, pre_p);
7805 gimplify_omp_ctxp = ctx;
7806 }
7807 *list_p = clause;
7808 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
7809 gimplify_omp_ctxp = ctx->outer_context;
7810 lang_hooks.decls.omp_finish_clause (clause, pre_p);
7811 gimplify_omp_ctxp = ctx;
7812 return 0;
7813 }
7814
7815 static void
7816 gimplify_adjust_omp_clauses (gimple_seq *pre_p, gimple_seq body, tree *list_p,
7817 enum tree_code code)
7818 {
7819 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
7820 tree c, decl;
7821
7822 if (body)
7823 {
7824 struct gimplify_omp_ctx *octx;
7825 for (octx = ctx; octx; octx = octx->outer_context)
7826 if ((octx->region_type & (ORT_PARALLEL | ORT_TASK | ORT_TEAMS)) != 0)
7827 break;
7828 if (octx)
7829 {
7830 struct walk_stmt_info wi;
7831 memset (&wi, 0, sizeof (wi));
7832 walk_gimple_seq (body, omp_find_stores_stmt,
7833 omp_find_stores_op, &wi);
7834 }
7835 }
7836 while ((c = *list_p) != NULL)
7837 {
7838 splay_tree_node n;
7839 bool remove = false;
7840
7841 switch (OMP_CLAUSE_CODE (c))
7842 {
7843 case OMP_CLAUSE_PRIVATE:
7844 case OMP_CLAUSE_SHARED:
7845 case OMP_CLAUSE_FIRSTPRIVATE:
7846 case OMP_CLAUSE_LINEAR:
7847 decl = OMP_CLAUSE_DECL (c);
7848 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
7849 remove = !(n->value & GOVD_SEEN);
7850 if (! remove)
7851 {
7852 bool shared = OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED;
7853 if ((n->value & GOVD_DEBUG_PRIVATE)
7854 || lang_hooks.decls.omp_private_debug_clause (decl, shared))
7855 {
7856 gcc_assert ((n->value & GOVD_DEBUG_PRIVATE) == 0
7857 || ((n->value & GOVD_DATA_SHARE_CLASS)
7858 == GOVD_PRIVATE));
7859 OMP_CLAUSE_SET_CODE (c, OMP_CLAUSE_PRIVATE);
7860 OMP_CLAUSE_PRIVATE_DEBUG (c) = 1;
7861 }
7862 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
7863 && (n->value & GOVD_WRITTEN) == 0
7864 && DECL_P (decl)
7865 && omp_shared_to_firstprivate_optimizable_decl_p (decl))
7866 OMP_CLAUSE_SHARED_READONLY (c) = 1;
7867 else if (DECL_P (decl)
7868 && ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
7869 && (n->value & GOVD_WRITTEN) != 1)
7870 || (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
7871 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c)))
7872 && omp_shared_to_firstprivate_optimizable_decl_p (decl))
7873 omp_mark_stores (gimplify_omp_ctxp->outer_context, decl);
7874 }
7875 break;
7876
7877 case OMP_CLAUSE_LASTPRIVATE:
7878 /* Make sure OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE is set to
7879 accurately reflect the presence of a FIRSTPRIVATE clause. */
7880 decl = OMP_CLAUSE_DECL (c);
7881 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
7882 OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c)
7883 = (n->value & GOVD_FIRSTPRIVATE) != 0;
7884 if (omp_no_lastprivate (ctx))
7885 {
7886 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
7887 remove = true;
7888 else
7889 OMP_CLAUSE_CODE (c) = OMP_CLAUSE_PRIVATE;
7890 }
7891 else if (code == OMP_DISTRIBUTE
7892 && OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
7893 {
7894 remove = true;
7895 error_at (OMP_CLAUSE_LOCATION (c),
7896 "same variable used in %<firstprivate%> and "
7897 "%<lastprivate%> clauses on %<distribute%> "
7898 "construct");
7899 }
7900 if (!remove
7901 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
7902 && DECL_P (decl)
7903 && omp_shared_to_firstprivate_optimizable_decl_p (decl))
7904 omp_mark_stores (gimplify_omp_ctxp->outer_context, decl);
7905 break;
7906
7907 case OMP_CLAUSE_ALIGNED:
7908 decl = OMP_CLAUSE_DECL (c);
7909 if (!is_global_var (decl))
7910 {
7911 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
7912 remove = n == NULL || !(n->value & GOVD_SEEN);
7913 if (!remove && TREE_CODE (TREE_TYPE (decl)) == POINTER_TYPE)
7914 {
7915 struct gimplify_omp_ctx *octx;
7916 if (n != NULL
7917 && (n->value & (GOVD_DATA_SHARE_CLASS
7918 & ~GOVD_FIRSTPRIVATE)))
7919 remove = true;
7920 else
7921 for (octx = ctx->outer_context; octx;
7922 octx = octx->outer_context)
7923 {
7924 n = splay_tree_lookup (octx->variables,
7925 (splay_tree_key) decl);
7926 if (n == NULL)
7927 continue;
7928 if (n->value & GOVD_LOCAL)
7929 break;
7930 /* We have to avoid assigning a shared variable
7931 to itself when trying to add
7932 __builtin_assume_aligned. */
7933 if (n->value & GOVD_SHARED)
7934 {
7935 remove = true;
7936 break;
7937 }
7938 }
7939 }
7940 }
7941 else if (TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
7942 {
7943 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
7944 if (n != NULL && (n->value & GOVD_DATA_SHARE_CLASS) != 0)
7945 remove = true;
7946 }
7947 break;
7948
7949 case OMP_CLAUSE_MAP:
7950 if (code == OMP_TARGET_EXIT_DATA
7951 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ALWAYS_POINTER)
7952 {
7953 remove = true;
7954 break;
7955 }
7956 decl = OMP_CLAUSE_DECL (c);
7957 if (!DECL_P (decl))
7958 {
7959 if ((ctx->region_type & ORT_TARGET) != 0
7960 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER)
7961 {
7962 if (TREE_CODE (decl) == INDIRECT_REF
7963 && TREE_CODE (TREE_OPERAND (decl, 0)) == COMPONENT_REF
7964 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (decl, 0)))
7965 == REFERENCE_TYPE))
7966 decl = TREE_OPERAND (decl, 0);
7967 if (TREE_CODE (decl) == COMPONENT_REF)
7968 {
7969 while (TREE_CODE (decl) == COMPONENT_REF)
7970 decl = TREE_OPERAND (decl, 0);
7971 if (DECL_P (decl))
7972 {
7973 n = splay_tree_lookup (ctx->variables,
7974 (splay_tree_key) decl);
7975 if (!(n->value & GOVD_SEEN))
7976 remove = true;
7977 }
7978 }
7979 }
7980 break;
7981 }
7982 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
7983 if ((ctx->region_type & ORT_TARGET) != 0
7984 && !(n->value & GOVD_SEEN)
7985 && GOMP_MAP_ALWAYS_P (OMP_CLAUSE_MAP_KIND (c)) == 0
7986 && !lookup_attribute ("omp declare target link",
7987 DECL_ATTRIBUTES (decl)))
7988 {
7989 remove = true;
7990 /* For struct element mapping, if struct is never referenced
7991 in target block and none of the mapping has always modifier,
7992 remove all the struct element mappings, which immediately
7993 follow the GOMP_MAP_STRUCT map clause. */
7994 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_STRUCT)
7995 {
7996 HOST_WIDE_INT cnt = tree_to_shwi (OMP_CLAUSE_SIZE (c));
7997 while (cnt--)
7998 OMP_CLAUSE_CHAIN (c)
7999 = OMP_CLAUSE_CHAIN (OMP_CLAUSE_CHAIN (c));
8000 }
8001 }
8002 else if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_STRUCT
8003 && code == OMP_TARGET_EXIT_DATA)
8004 remove = true;
8005 else if (DECL_SIZE (decl)
8006 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST
8007 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_POINTER
8008 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_FIRSTPRIVATE_POINTER
8009 && (OMP_CLAUSE_MAP_KIND (c)
8010 != GOMP_MAP_FIRSTPRIVATE_REFERENCE))
8011 {
8012 /* For GOMP_MAP_FORCE_DEVICEPTR, we'll never enter here, because
8013 for these, TREE_CODE (DECL_SIZE (decl)) will always be
8014 INTEGER_CST. */
8015 gcc_assert (OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_FORCE_DEVICEPTR);
8016
8017 tree decl2 = DECL_VALUE_EXPR (decl);
8018 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
8019 decl2 = TREE_OPERAND (decl2, 0);
8020 gcc_assert (DECL_P (decl2));
8021 tree mem = build_simple_mem_ref (decl2);
8022 OMP_CLAUSE_DECL (c) = mem;
8023 OMP_CLAUSE_SIZE (c) = TYPE_SIZE_UNIT (TREE_TYPE (decl));
8024 if (ctx->outer_context)
8025 {
8026 omp_notice_variable (ctx->outer_context, decl2, true);
8027 omp_notice_variable (ctx->outer_context,
8028 OMP_CLAUSE_SIZE (c), true);
8029 }
8030 if (((ctx->region_type & ORT_TARGET) != 0
8031 || !ctx->target_firstprivatize_array_bases)
8032 && ((n->value & GOVD_SEEN) == 0
8033 || (n->value & (GOVD_PRIVATE | GOVD_FIRSTPRIVATE)) == 0))
8034 {
8035 tree nc = build_omp_clause (OMP_CLAUSE_LOCATION (c),
8036 OMP_CLAUSE_MAP);
8037 OMP_CLAUSE_DECL (nc) = decl;
8038 OMP_CLAUSE_SIZE (nc) = size_zero_node;
8039 if (ctx->target_firstprivatize_array_bases)
8040 OMP_CLAUSE_SET_MAP_KIND (nc,
8041 GOMP_MAP_FIRSTPRIVATE_POINTER);
8042 else
8043 OMP_CLAUSE_SET_MAP_KIND (nc, GOMP_MAP_POINTER);
8044 OMP_CLAUSE_CHAIN (nc) = OMP_CLAUSE_CHAIN (c);
8045 OMP_CLAUSE_CHAIN (c) = nc;
8046 c = nc;
8047 }
8048 }
8049 else
8050 {
8051 if (OMP_CLAUSE_SIZE (c) == NULL_TREE)
8052 OMP_CLAUSE_SIZE (c) = DECL_SIZE_UNIT (decl);
8053 gcc_assert ((n->value & GOVD_SEEN) == 0
8054 || ((n->value & (GOVD_PRIVATE | GOVD_FIRSTPRIVATE))
8055 == 0));
8056 }
8057 break;
8058
8059 case OMP_CLAUSE_TO:
8060 case OMP_CLAUSE_FROM:
8061 case OMP_CLAUSE__CACHE_:
8062 decl = OMP_CLAUSE_DECL (c);
8063 if (!DECL_P (decl))
8064 break;
8065 if (DECL_SIZE (decl)
8066 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
8067 {
8068 tree decl2 = DECL_VALUE_EXPR (decl);
8069 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
8070 decl2 = TREE_OPERAND (decl2, 0);
8071 gcc_assert (DECL_P (decl2));
8072 tree mem = build_simple_mem_ref (decl2);
8073 OMP_CLAUSE_DECL (c) = mem;
8074 OMP_CLAUSE_SIZE (c) = TYPE_SIZE_UNIT (TREE_TYPE (decl));
8075 if (ctx->outer_context)
8076 {
8077 omp_notice_variable (ctx->outer_context, decl2, true);
8078 omp_notice_variable (ctx->outer_context,
8079 OMP_CLAUSE_SIZE (c), true);
8080 }
8081 }
8082 else if (OMP_CLAUSE_SIZE (c) == NULL_TREE)
8083 OMP_CLAUSE_SIZE (c) = DECL_SIZE_UNIT (decl);
8084 break;
8085
8086 case OMP_CLAUSE_REDUCTION:
8087 decl = OMP_CLAUSE_DECL (c);
8088 if (DECL_P (decl)
8089 && omp_shared_to_firstprivate_optimizable_decl_p (decl))
8090 omp_mark_stores (gimplify_omp_ctxp->outer_context, decl);
8091 break;
8092 case OMP_CLAUSE_COPYIN:
8093 case OMP_CLAUSE_COPYPRIVATE:
8094 case OMP_CLAUSE_IF:
8095 case OMP_CLAUSE_NUM_THREADS:
8096 case OMP_CLAUSE_NUM_TEAMS:
8097 case OMP_CLAUSE_THREAD_LIMIT:
8098 case OMP_CLAUSE_DIST_SCHEDULE:
8099 case OMP_CLAUSE_DEVICE:
8100 case OMP_CLAUSE_SCHEDULE:
8101 case OMP_CLAUSE_NOWAIT:
8102 case OMP_CLAUSE_ORDERED:
8103 case OMP_CLAUSE_DEFAULT:
8104 case OMP_CLAUSE_UNTIED:
8105 case OMP_CLAUSE_COLLAPSE:
8106 case OMP_CLAUSE_FINAL:
8107 case OMP_CLAUSE_MERGEABLE:
8108 case OMP_CLAUSE_PROC_BIND:
8109 case OMP_CLAUSE_SAFELEN:
8110 case OMP_CLAUSE_SIMDLEN:
8111 case OMP_CLAUSE_DEPEND:
8112 case OMP_CLAUSE_PRIORITY:
8113 case OMP_CLAUSE_GRAINSIZE:
8114 case OMP_CLAUSE_NUM_TASKS:
8115 case OMP_CLAUSE_NOGROUP:
8116 case OMP_CLAUSE_THREADS:
8117 case OMP_CLAUSE_SIMD:
8118 case OMP_CLAUSE_HINT:
8119 case OMP_CLAUSE_DEFAULTMAP:
8120 case OMP_CLAUSE_USE_DEVICE_PTR:
8121 case OMP_CLAUSE_IS_DEVICE_PTR:
8122 case OMP_CLAUSE__CILK_FOR_COUNT_:
8123 case OMP_CLAUSE_ASYNC:
8124 case OMP_CLAUSE_WAIT:
8125 case OMP_CLAUSE_DEVICE_RESIDENT:
8126 case OMP_CLAUSE_INDEPENDENT:
8127 case OMP_CLAUSE_NUM_GANGS:
8128 case OMP_CLAUSE_NUM_WORKERS:
8129 case OMP_CLAUSE_VECTOR_LENGTH:
8130 case OMP_CLAUSE_GANG:
8131 case OMP_CLAUSE_WORKER:
8132 case OMP_CLAUSE_VECTOR:
8133 case OMP_CLAUSE_AUTO:
8134 case OMP_CLAUSE_SEQ:
8135 case OMP_CLAUSE_TILE:
8136 break;
8137
8138 default:
8139 gcc_unreachable ();
8140 }
8141
8142 if (remove)
8143 *list_p = OMP_CLAUSE_CHAIN (c);
8144 else
8145 list_p = &OMP_CLAUSE_CHAIN (c);
8146 }
8147
8148 /* Add in any implicit data sharing. */
8149 struct gimplify_adjust_omp_clauses_data data;
8150 data.list_p = list_p;
8151 data.pre_p = pre_p;
8152 splay_tree_foreach (ctx->variables, gimplify_adjust_omp_clauses_1, &data);
8153
8154 gimplify_omp_ctxp = ctx->outer_context;
8155 delete_omp_context (ctx);
8156 }
8157
8158 /* Gimplify OACC_CACHE. */
8159
8160 static void
8161 gimplify_oacc_cache (tree *expr_p, gimple_seq *pre_p)
8162 {
8163 tree expr = *expr_p;
8164
8165 gimplify_scan_omp_clauses (&OACC_CACHE_CLAUSES (expr), pre_p, ORT_ACC,
8166 OACC_CACHE);
8167 gimplify_adjust_omp_clauses (pre_p, NULL, &OACC_CACHE_CLAUSES (expr),
8168 OACC_CACHE);
8169
8170 /* TODO: Do something sensible with this information. */
8171
8172 *expr_p = NULL_TREE;
8173 }
8174
8175 /* Helper function of gimplify_oacc_declare. The helper's purpose is to,
8176 if required, translate 'kind' in CLAUSE into an 'entry' kind and 'exit'
8177 kind. The entry kind will replace the one in CLAUSE, while the exit
8178 kind will be used in a new omp_clause and returned to the caller. */
8179
8180 static tree
8181 gimplify_oacc_declare_1 (tree clause)
8182 {
8183 HOST_WIDE_INT kind, new_op;
8184 bool ret = false;
8185 tree c = NULL;
8186
8187 kind = OMP_CLAUSE_MAP_KIND (clause);
8188
8189 switch (kind)
8190 {
8191 case GOMP_MAP_ALLOC:
8192 case GOMP_MAP_FORCE_ALLOC:
8193 case GOMP_MAP_FORCE_TO:
8194 new_op = GOMP_MAP_FORCE_DEALLOC;
8195 ret = true;
8196 break;
8197
8198 case GOMP_MAP_FORCE_FROM:
8199 OMP_CLAUSE_SET_MAP_KIND (clause, GOMP_MAP_FORCE_ALLOC);
8200 new_op = GOMP_MAP_FORCE_FROM;
8201 ret = true;
8202 break;
8203
8204 case GOMP_MAP_FORCE_TOFROM:
8205 OMP_CLAUSE_SET_MAP_KIND (clause, GOMP_MAP_FORCE_TO);
8206 new_op = GOMP_MAP_FORCE_FROM;
8207 ret = true;
8208 break;
8209
8210 case GOMP_MAP_FROM:
8211 OMP_CLAUSE_SET_MAP_KIND (clause, GOMP_MAP_FORCE_ALLOC);
8212 new_op = GOMP_MAP_FROM;
8213 ret = true;
8214 break;
8215
8216 case GOMP_MAP_TOFROM:
8217 OMP_CLAUSE_SET_MAP_KIND (clause, GOMP_MAP_TO);
8218 new_op = GOMP_MAP_FROM;
8219 ret = true;
8220 break;
8221
8222 case GOMP_MAP_DEVICE_RESIDENT:
8223 case GOMP_MAP_FORCE_DEVICEPTR:
8224 case GOMP_MAP_FORCE_PRESENT:
8225 case GOMP_MAP_LINK:
8226 case GOMP_MAP_POINTER:
8227 case GOMP_MAP_TO:
8228 break;
8229
8230 default:
8231 gcc_unreachable ();
8232 break;
8233 }
8234
8235 if (ret)
8236 {
8237 c = build_omp_clause (OMP_CLAUSE_LOCATION (clause), OMP_CLAUSE_MAP);
8238 OMP_CLAUSE_SET_MAP_KIND (c, new_op);
8239 OMP_CLAUSE_DECL (c) = OMP_CLAUSE_DECL (clause);
8240 }
8241
8242 return c;
8243 }
8244
8245 /* Gimplify OACC_DECLARE. */
8246
8247 static void
8248 gimplify_oacc_declare (tree *expr_p, gimple_seq *pre_p)
8249 {
8250 tree expr = *expr_p;
8251 gomp_target *stmt;
8252 tree clauses, t;
8253
8254 clauses = OACC_DECLARE_CLAUSES (expr);
8255
8256 gimplify_scan_omp_clauses (&clauses, pre_p, ORT_TARGET_DATA, OACC_DECLARE);
8257
8258 for (t = clauses; t; t = OMP_CLAUSE_CHAIN (t))
8259 {
8260 tree decl = OMP_CLAUSE_DECL (t);
8261
8262 if (TREE_CODE (decl) == MEM_REF)
8263 continue;
8264
8265 if (TREE_CODE (decl) == VAR_DECL
8266 && !is_global_var (decl)
8267 && DECL_CONTEXT (decl) == current_function_decl)
8268 {
8269 tree c = gimplify_oacc_declare_1 (t);
8270 if (c)
8271 {
8272 if (oacc_declare_returns == NULL)
8273 oacc_declare_returns = new hash_map<tree, tree>;
8274
8275 oacc_declare_returns->put (decl, c);
8276 }
8277 }
8278
8279 omp_add_variable (gimplify_omp_ctxp, decl, GOVD_SEEN);
8280 }
8281
8282 stmt = gimple_build_omp_target (NULL, GF_OMP_TARGET_KIND_OACC_DECLARE,
8283 clauses);
8284
8285 gimplify_seq_add_stmt (pre_p, stmt);
8286
8287 *expr_p = NULL_TREE;
8288 }
8289
8290 /* Gimplify the contents of an OMP_PARALLEL statement. This involves
8291 gimplification of the body, as well as scanning the body for used
8292 variables. We need to do this scan now, because variable-sized
8293 decls will be decomposed during gimplification. */
8294
8295 static void
8296 gimplify_omp_parallel (tree *expr_p, gimple_seq *pre_p)
8297 {
8298 tree expr = *expr_p;
8299 gimple *g;
8300 gimple_seq body = NULL;
8301
8302 gimplify_scan_omp_clauses (&OMP_PARALLEL_CLAUSES (expr), pre_p,
8303 OMP_PARALLEL_COMBINED (expr)
8304 ? ORT_COMBINED_PARALLEL
8305 : ORT_PARALLEL, OMP_PARALLEL);
8306
8307 push_gimplify_context ();
8308
8309 g = gimplify_and_return_first (OMP_PARALLEL_BODY (expr), &body);
8310 if (gimple_code (g) == GIMPLE_BIND)
8311 pop_gimplify_context (g);
8312 else
8313 pop_gimplify_context (NULL);
8314
8315 gimplify_adjust_omp_clauses (pre_p, body, &OMP_PARALLEL_CLAUSES (expr),
8316 OMP_PARALLEL);
8317
8318 g = gimple_build_omp_parallel (body,
8319 OMP_PARALLEL_CLAUSES (expr),
8320 NULL_TREE, NULL_TREE);
8321 if (OMP_PARALLEL_COMBINED (expr))
8322 gimple_omp_set_subcode (g, GF_OMP_PARALLEL_COMBINED);
8323 gimplify_seq_add_stmt (pre_p, g);
8324 *expr_p = NULL_TREE;
8325 }
8326
8327 /* Gimplify the contents of an OMP_TASK statement. This involves
8328 gimplification of the body, as well as scanning the body for used
8329 variables. We need to do this scan now, because variable-sized
8330 decls will be decomposed during gimplification. */
8331
8332 static void
8333 gimplify_omp_task (tree *expr_p, gimple_seq *pre_p)
8334 {
8335 tree expr = *expr_p;
8336 gimple *g;
8337 gimple_seq body = NULL;
8338
8339 gimplify_scan_omp_clauses (&OMP_TASK_CLAUSES (expr), pre_p,
8340 find_omp_clause (OMP_TASK_CLAUSES (expr),
8341 OMP_CLAUSE_UNTIED)
8342 ? ORT_UNTIED_TASK : ORT_TASK, OMP_TASK);
8343
8344 push_gimplify_context ();
8345
8346 g = gimplify_and_return_first (OMP_TASK_BODY (expr), &body);
8347 if (gimple_code (g) == GIMPLE_BIND)
8348 pop_gimplify_context (g);
8349 else
8350 pop_gimplify_context (NULL);
8351
8352 gimplify_adjust_omp_clauses (pre_p, body, &OMP_TASK_CLAUSES (expr),
8353 OMP_TASK);
8354
8355 g = gimple_build_omp_task (body,
8356 OMP_TASK_CLAUSES (expr),
8357 NULL_TREE, NULL_TREE,
8358 NULL_TREE, NULL_TREE, NULL_TREE);
8359 gimplify_seq_add_stmt (pre_p, g);
8360 *expr_p = NULL_TREE;
8361 }
8362
8363 /* Helper function of gimplify_omp_for, find OMP_FOR resp. OMP_SIMD
8364 with non-NULL OMP_FOR_INIT. */
8365
8366 static tree
8367 find_combined_omp_for (tree *tp, int *walk_subtrees, void *)
8368 {
8369 *walk_subtrees = 0;
8370 switch (TREE_CODE (*tp))
8371 {
8372 case OMP_FOR:
8373 *walk_subtrees = 1;
8374 /* FALLTHRU */
8375 case OMP_SIMD:
8376 if (OMP_FOR_INIT (*tp) != NULL_TREE)
8377 return *tp;
8378 break;
8379 case BIND_EXPR:
8380 case STATEMENT_LIST:
8381 case OMP_PARALLEL:
8382 *walk_subtrees = 1;
8383 break;
8384 default:
8385 break;
8386 }
8387 return NULL_TREE;
8388 }
8389
8390 /* Gimplify the gross structure of an OMP_FOR statement. */
8391
8392 static enum gimplify_status
8393 gimplify_omp_for (tree *expr_p, gimple_seq *pre_p)
8394 {
8395 tree for_stmt, orig_for_stmt, inner_for_stmt = NULL_TREE, decl, var, t;
8396 enum gimplify_status ret = GS_ALL_DONE;
8397 enum gimplify_status tret;
8398 gomp_for *gfor;
8399 gimple_seq for_body, for_pre_body;
8400 int i;
8401 bitmap has_decl_expr = NULL;
8402 enum omp_region_type ort = ORT_WORKSHARE;
8403
8404 orig_for_stmt = for_stmt = *expr_p;
8405
8406 switch (TREE_CODE (for_stmt))
8407 {
8408 case OMP_FOR:
8409 case CILK_FOR:
8410 case OMP_DISTRIBUTE:
8411 break;
8412 case OACC_LOOP:
8413 ort = ORT_ACC;
8414 break;
8415 case OMP_TASKLOOP:
8416 if (find_omp_clause (OMP_FOR_CLAUSES (for_stmt), OMP_CLAUSE_UNTIED))
8417 ort = ORT_UNTIED_TASK;
8418 else
8419 ort = ORT_TASK;
8420 break;
8421 case OMP_SIMD:
8422 case CILK_SIMD:
8423 ort = ORT_SIMD;
8424 break;
8425 default:
8426 gcc_unreachable ();
8427 }
8428
8429 /* Set OMP_CLAUSE_LINEAR_NO_COPYIN flag on explicit linear
8430 clause for the IV. */
8431 if (ort == ORT_SIMD && TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)) == 1)
8432 {
8433 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), 0);
8434 gcc_assert (TREE_CODE (t) == MODIFY_EXPR);
8435 decl = TREE_OPERAND (t, 0);
8436 for (tree c = OMP_FOR_CLAUSES (for_stmt); c; c = OMP_CLAUSE_CHAIN (c))
8437 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
8438 && OMP_CLAUSE_DECL (c) == decl)
8439 {
8440 OMP_CLAUSE_LINEAR_NO_COPYIN (c) = 1;
8441 break;
8442 }
8443 }
8444
8445 if (OMP_FOR_INIT (for_stmt) == NULL_TREE)
8446 {
8447 gcc_assert (TREE_CODE (for_stmt) != OACC_LOOP);
8448 inner_for_stmt = walk_tree (&OMP_FOR_BODY (for_stmt),
8449 find_combined_omp_for, NULL, NULL);
8450 if (inner_for_stmt == NULL_TREE)
8451 {
8452 gcc_assert (seen_error ());
8453 *expr_p = NULL_TREE;
8454 return GS_ERROR;
8455 }
8456 }
8457
8458 if (TREE_CODE (for_stmt) != OMP_TASKLOOP)
8459 gimplify_scan_omp_clauses (&OMP_FOR_CLAUSES (for_stmt), pre_p, ort,
8460 TREE_CODE (for_stmt));
8461
8462 if (TREE_CODE (for_stmt) == OMP_DISTRIBUTE)
8463 gimplify_omp_ctxp->distribute = true;
8464
8465 /* Handle OMP_FOR_INIT. */
8466 for_pre_body = NULL;
8467 if (ort == ORT_SIMD && OMP_FOR_PRE_BODY (for_stmt))
8468 {
8469 has_decl_expr = BITMAP_ALLOC (NULL);
8470 if (TREE_CODE (OMP_FOR_PRE_BODY (for_stmt)) == DECL_EXPR
8471 && TREE_CODE (DECL_EXPR_DECL (OMP_FOR_PRE_BODY (for_stmt)))
8472 == VAR_DECL)
8473 {
8474 t = OMP_FOR_PRE_BODY (for_stmt);
8475 bitmap_set_bit (has_decl_expr, DECL_UID (DECL_EXPR_DECL (t)));
8476 }
8477 else if (TREE_CODE (OMP_FOR_PRE_BODY (for_stmt)) == STATEMENT_LIST)
8478 {
8479 tree_stmt_iterator si;
8480 for (si = tsi_start (OMP_FOR_PRE_BODY (for_stmt)); !tsi_end_p (si);
8481 tsi_next (&si))
8482 {
8483 t = tsi_stmt (si);
8484 if (TREE_CODE (t) == DECL_EXPR
8485 && TREE_CODE (DECL_EXPR_DECL (t)) == VAR_DECL)
8486 bitmap_set_bit (has_decl_expr, DECL_UID (DECL_EXPR_DECL (t)));
8487 }
8488 }
8489 }
8490 if (OMP_FOR_PRE_BODY (for_stmt))
8491 {
8492 if (TREE_CODE (for_stmt) != OMP_TASKLOOP || gimplify_omp_ctxp)
8493 gimplify_and_add (OMP_FOR_PRE_BODY (for_stmt), &for_pre_body);
8494 else
8495 {
8496 struct gimplify_omp_ctx ctx;
8497 memset (&ctx, 0, sizeof (ctx));
8498 ctx.region_type = ORT_NONE;
8499 gimplify_omp_ctxp = &ctx;
8500 gimplify_and_add (OMP_FOR_PRE_BODY (for_stmt), &for_pre_body);
8501 gimplify_omp_ctxp = NULL;
8502 }
8503 }
8504 OMP_FOR_PRE_BODY (for_stmt) = NULL_TREE;
8505
8506 if (OMP_FOR_INIT (for_stmt) == NULL_TREE)
8507 for_stmt = inner_for_stmt;
8508
8509 /* For taskloop, need to gimplify the start, end and step before the
8510 taskloop, outside of the taskloop omp context. */
8511 if (TREE_CODE (orig_for_stmt) == OMP_TASKLOOP)
8512 {
8513 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
8514 {
8515 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i);
8516 if (!is_gimple_constant (TREE_OPERAND (t, 1)))
8517 {
8518 TREE_OPERAND (t, 1)
8519 = get_initialized_tmp_var (TREE_OPERAND (t, 1),
8520 pre_p, NULL);
8521 tree c = build_omp_clause (input_location,
8522 OMP_CLAUSE_FIRSTPRIVATE);
8523 OMP_CLAUSE_DECL (c) = TREE_OPERAND (t, 1);
8524 OMP_CLAUSE_CHAIN (c) = OMP_FOR_CLAUSES (orig_for_stmt);
8525 OMP_FOR_CLAUSES (orig_for_stmt) = c;
8526 }
8527
8528 /* Handle OMP_FOR_COND. */
8529 t = TREE_VEC_ELT (OMP_FOR_COND (for_stmt), i);
8530 if (!is_gimple_constant (TREE_OPERAND (t, 1)))
8531 {
8532 TREE_OPERAND (t, 1)
8533 = get_initialized_tmp_var (TREE_OPERAND (t, 1),
8534 gimple_seq_empty_p (for_pre_body)
8535 ? pre_p : &for_pre_body, NULL);
8536 tree c = build_omp_clause (input_location,
8537 OMP_CLAUSE_FIRSTPRIVATE);
8538 OMP_CLAUSE_DECL (c) = TREE_OPERAND (t, 1);
8539 OMP_CLAUSE_CHAIN (c) = OMP_FOR_CLAUSES (orig_for_stmt);
8540 OMP_FOR_CLAUSES (orig_for_stmt) = c;
8541 }
8542
8543 /* Handle OMP_FOR_INCR. */
8544 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
8545 if (TREE_CODE (t) == MODIFY_EXPR)
8546 {
8547 decl = TREE_OPERAND (t, 0);
8548 t = TREE_OPERAND (t, 1);
8549 tree *tp = &TREE_OPERAND (t, 1);
8550 if (TREE_CODE (t) == PLUS_EXPR && *tp == decl)
8551 tp = &TREE_OPERAND (t, 0);
8552
8553 if (!is_gimple_constant (*tp))
8554 {
8555 gimple_seq *seq = gimple_seq_empty_p (for_pre_body)
8556 ? pre_p : &for_pre_body;
8557 *tp = get_initialized_tmp_var (*tp, seq, NULL);
8558 tree c = build_omp_clause (input_location,
8559 OMP_CLAUSE_FIRSTPRIVATE);
8560 OMP_CLAUSE_DECL (c) = *tp;
8561 OMP_CLAUSE_CHAIN (c) = OMP_FOR_CLAUSES (orig_for_stmt);
8562 OMP_FOR_CLAUSES (orig_for_stmt) = c;
8563 }
8564 }
8565 }
8566
8567 gimplify_scan_omp_clauses (&OMP_FOR_CLAUSES (orig_for_stmt), pre_p, ort,
8568 OMP_TASKLOOP);
8569 }
8570
8571 if (orig_for_stmt != for_stmt)
8572 gimplify_omp_ctxp->combined_loop = true;
8573
8574 for_body = NULL;
8575 gcc_assert (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt))
8576 == TREE_VEC_LENGTH (OMP_FOR_COND (for_stmt)));
8577 gcc_assert (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt))
8578 == TREE_VEC_LENGTH (OMP_FOR_INCR (for_stmt)));
8579
8580 tree c = find_omp_clause (OMP_FOR_CLAUSES (for_stmt), OMP_CLAUSE_ORDERED);
8581 bool is_doacross = false;
8582 if (c && OMP_CLAUSE_ORDERED_EXPR (c))
8583 {
8584 is_doacross = true;
8585 gimplify_omp_ctxp->loop_iter_var.create (TREE_VEC_LENGTH
8586 (OMP_FOR_INIT (for_stmt))
8587 * 2);
8588 }
8589 int collapse = 1;
8590 c = find_omp_clause (OMP_FOR_CLAUSES (for_stmt), OMP_CLAUSE_COLLAPSE);
8591 if (c)
8592 collapse = tree_to_shwi (OMP_CLAUSE_COLLAPSE_EXPR (c));
8593 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
8594 {
8595 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i);
8596 gcc_assert (TREE_CODE (t) == MODIFY_EXPR);
8597 decl = TREE_OPERAND (t, 0);
8598 gcc_assert (DECL_P (decl));
8599 gcc_assert (INTEGRAL_TYPE_P (TREE_TYPE (decl))
8600 || POINTER_TYPE_P (TREE_TYPE (decl)));
8601 if (is_doacross)
8602 {
8603 if (TREE_CODE (for_stmt) == OMP_FOR && OMP_FOR_ORIG_DECLS (for_stmt))
8604 gimplify_omp_ctxp->loop_iter_var.quick_push
8605 (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (for_stmt), i));
8606 else
8607 gimplify_omp_ctxp->loop_iter_var.quick_push (decl);
8608 gimplify_omp_ctxp->loop_iter_var.quick_push (decl);
8609 }
8610
8611 /* Make sure the iteration variable is private. */
8612 tree c = NULL_TREE;
8613 tree c2 = NULL_TREE;
8614 if (orig_for_stmt != for_stmt)
8615 /* Do this only on innermost construct for combined ones. */;
8616 else if (ort == ORT_SIMD)
8617 {
8618 splay_tree_node n = splay_tree_lookup (gimplify_omp_ctxp->variables,
8619 (splay_tree_key) decl);
8620 omp_is_private (gimplify_omp_ctxp, decl,
8621 1 + (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt))
8622 != 1));
8623 if (n != NULL && (n->value & GOVD_DATA_SHARE_CLASS) != 0)
8624 omp_notice_variable (gimplify_omp_ctxp, decl, true);
8625 else if (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)) == 1)
8626 {
8627 c = build_omp_clause (input_location, OMP_CLAUSE_LINEAR);
8628 OMP_CLAUSE_LINEAR_NO_COPYIN (c) = 1;
8629 unsigned int flags = GOVD_LINEAR | GOVD_EXPLICIT | GOVD_SEEN;
8630 if ((has_decl_expr
8631 && bitmap_bit_p (has_decl_expr, DECL_UID (decl)))
8632 || omp_no_lastprivate (gimplify_omp_ctxp))
8633 {
8634 OMP_CLAUSE_LINEAR_NO_COPYOUT (c) = 1;
8635 flags |= GOVD_LINEAR_LASTPRIVATE_NO_OUTER;
8636 }
8637 struct gimplify_omp_ctx *outer
8638 = gimplify_omp_ctxp->outer_context;
8639 if (outer && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
8640 {
8641 if (outer->region_type == ORT_WORKSHARE
8642 && outer->combined_loop)
8643 {
8644 n = splay_tree_lookup (outer->variables,
8645 (splay_tree_key)decl);
8646 if (n != NULL && (n->value & GOVD_LOCAL) != 0)
8647 {
8648 OMP_CLAUSE_LINEAR_NO_COPYOUT (c) = 1;
8649 flags |= GOVD_LINEAR_LASTPRIVATE_NO_OUTER;
8650 }
8651 else
8652 {
8653 struct gimplify_omp_ctx *octx = outer->outer_context;
8654 if (octx
8655 && octx->region_type == ORT_COMBINED_PARALLEL
8656 && octx->outer_context
8657 && (octx->outer_context->region_type
8658 == ORT_WORKSHARE)
8659 && octx->outer_context->combined_loop)
8660 {
8661 octx = octx->outer_context;
8662 n = splay_tree_lookup (octx->variables,
8663 (splay_tree_key)decl);
8664 if (n != NULL && (n->value & GOVD_LOCAL) != 0)
8665 {
8666 OMP_CLAUSE_LINEAR_NO_COPYOUT (c) = 1;
8667 flags |= GOVD_LINEAR_LASTPRIVATE_NO_OUTER;
8668 }
8669 }
8670 }
8671 }
8672 }
8673
8674 OMP_CLAUSE_DECL (c) = decl;
8675 OMP_CLAUSE_CHAIN (c) = OMP_FOR_CLAUSES (for_stmt);
8676 OMP_FOR_CLAUSES (for_stmt) = c;
8677 omp_add_variable (gimplify_omp_ctxp, decl, flags);
8678 if (outer && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
8679 {
8680 if (outer->region_type == ORT_WORKSHARE
8681 && outer->combined_loop)
8682 {
8683 if (outer->outer_context
8684 && (outer->outer_context->region_type
8685 == ORT_COMBINED_PARALLEL))
8686 outer = outer->outer_context;
8687 else if (omp_check_private (outer, decl, false))
8688 outer = NULL;
8689 }
8690 else if (((outer->region_type & ORT_TASK) != 0)
8691 && outer->combined_loop
8692 && !omp_check_private (gimplify_omp_ctxp,
8693 decl, false))
8694 ;
8695 else if (outer->region_type != ORT_COMBINED_PARALLEL)
8696 outer = NULL;
8697 if (outer)
8698 {
8699 n = splay_tree_lookup (outer->variables,
8700 (splay_tree_key)decl);
8701 if (n == NULL || (n->value & GOVD_DATA_SHARE_CLASS) == 0)
8702 {
8703 omp_add_variable (outer, decl,
8704 GOVD_LASTPRIVATE | GOVD_SEEN);
8705 if (outer->region_type == ORT_COMBINED_PARALLEL
8706 && outer->outer_context
8707 && (outer->outer_context->region_type
8708 == ORT_WORKSHARE)
8709 && outer->outer_context->combined_loop)
8710 {
8711 outer = outer->outer_context;
8712 n = splay_tree_lookup (outer->variables,
8713 (splay_tree_key)decl);
8714 if (omp_check_private (outer, decl, false))
8715 outer = NULL;
8716 else if (n == NULL
8717 || ((n->value & GOVD_DATA_SHARE_CLASS)
8718 == 0))
8719 omp_add_variable (outer, decl,
8720 GOVD_LASTPRIVATE
8721 | GOVD_SEEN);
8722 else
8723 outer = NULL;
8724 }
8725 if (outer && outer->outer_context
8726 && (outer->outer_context->region_type
8727 == ORT_COMBINED_TEAMS))
8728 {
8729 outer = outer->outer_context;
8730 n = splay_tree_lookup (outer->variables,
8731 (splay_tree_key)decl);
8732 if (n == NULL
8733 || (n->value & GOVD_DATA_SHARE_CLASS) == 0)
8734 omp_add_variable (outer, decl,
8735 GOVD_SHARED | GOVD_SEEN);
8736 else
8737 outer = NULL;
8738 }
8739 if (outer && outer->outer_context)
8740 omp_notice_variable (outer->outer_context, decl,
8741 true);
8742 }
8743 }
8744 }
8745 }
8746 else
8747 {
8748 bool lastprivate
8749 = (!has_decl_expr
8750 || !bitmap_bit_p (has_decl_expr, DECL_UID (decl)))
8751 && !omp_no_lastprivate (gimplify_omp_ctxp);
8752 struct gimplify_omp_ctx *outer
8753 = gimplify_omp_ctxp->outer_context;
8754 if (outer && lastprivate)
8755 {
8756 if (outer->region_type == ORT_WORKSHARE
8757 && outer->combined_loop)
8758 {
8759 n = splay_tree_lookup (outer->variables,
8760 (splay_tree_key)decl);
8761 if (n != NULL && (n->value & GOVD_LOCAL) != 0)
8762 {
8763 lastprivate = false;
8764 outer = NULL;
8765 }
8766 else if (outer->outer_context
8767 && (outer->outer_context->region_type
8768 == ORT_COMBINED_PARALLEL))
8769 outer = outer->outer_context;
8770 else if (omp_check_private (outer, decl, false))
8771 outer = NULL;
8772 }
8773 else if (((outer->region_type & ORT_TASK) != 0)
8774 && outer->combined_loop
8775 && !omp_check_private (gimplify_omp_ctxp,
8776 decl, false))
8777 ;
8778 else if (outer->region_type != ORT_COMBINED_PARALLEL)
8779 outer = NULL;
8780 if (outer)
8781 {
8782 n = splay_tree_lookup (outer->variables,
8783 (splay_tree_key)decl);
8784 if (n == NULL || (n->value & GOVD_DATA_SHARE_CLASS) == 0)
8785 {
8786 omp_add_variable (outer, decl,
8787 GOVD_LASTPRIVATE | GOVD_SEEN);
8788 if (outer->region_type == ORT_COMBINED_PARALLEL
8789 && outer->outer_context
8790 && (outer->outer_context->region_type
8791 == ORT_WORKSHARE)
8792 && outer->outer_context->combined_loop)
8793 {
8794 outer = outer->outer_context;
8795 n = splay_tree_lookup (outer->variables,
8796 (splay_tree_key)decl);
8797 if (omp_check_private (outer, decl, false))
8798 outer = NULL;
8799 else if (n == NULL
8800 || ((n->value & GOVD_DATA_SHARE_CLASS)
8801 == 0))
8802 omp_add_variable (outer, decl,
8803 GOVD_LASTPRIVATE
8804 | GOVD_SEEN);
8805 else
8806 outer = NULL;
8807 }
8808 if (outer && outer->outer_context
8809 && (outer->outer_context->region_type
8810 == ORT_COMBINED_TEAMS))
8811 {
8812 outer = outer->outer_context;
8813 n = splay_tree_lookup (outer->variables,
8814 (splay_tree_key)decl);
8815 if (n == NULL
8816 || (n->value & GOVD_DATA_SHARE_CLASS) == 0)
8817 omp_add_variable (outer, decl,
8818 GOVD_SHARED | GOVD_SEEN);
8819 else
8820 outer = NULL;
8821 }
8822 if (outer && outer->outer_context)
8823 omp_notice_variable (outer->outer_context, decl,
8824 true);
8825 }
8826 }
8827 }
8828
8829 c = build_omp_clause (input_location,
8830 lastprivate ? OMP_CLAUSE_LASTPRIVATE
8831 : OMP_CLAUSE_PRIVATE);
8832 OMP_CLAUSE_DECL (c) = decl;
8833 OMP_CLAUSE_CHAIN (c) = OMP_FOR_CLAUSES (for_stmt);
8834 OMP_FOR_CLAUSES (for_stmt) = c;
8835 omp_add_variable (gimplify_omp_ctxp, decl,
8836 (lastprivate ? GOVD_LASTPRIVATE : GOVD_PRIVATE)
8837 | GOVD_EXPLICIT | GOVD_SEEN);
8838 c = NULL_TREE;
8839 }
8840 }
8841 else if (omp_is_private (gimplify_omp_ctxp, decl, 0))
8842 omp_notice_variable (gimplify_omp_ctxp, decl, true);
8843 else
8844 omp_add_variable (gimplify_omp_ctxp, decl, GOVD_PRIVATE | GOVD_SEEN);
8845
8846 /* If DECL is not a gimple register, create a temporary variable to act
8847 as an iteration counter. This is valid, since DECL cannot be
8848 modified in the body of the loop. Similarly for any iteration vars
8849 in simd with collapse > 1 where the iterator vars must be
8850 lastprivate. */
8851 if (orig_for_stmt != for_stmt)
8852 var = decl;
8853 else if (!is_gimple_reg (decl)
8854 || (ort == ORT_SIMD
8855 && TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)) > 1))
8856 {
8857 var = create_tmp_var (TREE_TYPE (decl), get_name (decl));
8858 TREE_OPERAND (t, 0) = var;
8859
8860 gimplify_seq_add_stmt (&for_body, gimple_build_assign (decl, var));
8861
8862 if (ort == ORT_SIMD
8863 && TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)) == 1)
8864 {
8865 c2 = build_omp_clause (input_location, OMP_CLAUSE_LINEAR);
8866 OMP_CLAUSE_LINEAR_NO_COPYIN (c2) = 1;
8867 OMP_CLAUSE_LINEAR_NO_COPYOUT (c2) = 1;
8868 OMP_CLAUSE_DECL (c2) = var;
8869 OMP_CLAUSE_CHAIN (c2) = OMP_FOR_CLAUSES (for_stmt);
8870 OMP_FOR_CLAUSES (for_stmt) = c2;
8871 omp_add_variable (gimplify_omp_ctxp, var,
8872 GOVD_LINEAR | GOVD_EXPLICIT | GOVD_SEEN);
8873 if (c == NULL_TREE)
8874 {
8875 c = c2;
8876 c2 = NULL_TREE;
8877 }
8878 }
8879 else
8880 omp_add_variable (gimplify_omp_ctxp, var,
8881 GOVD_PRIVATE | GOVD_SEEN);
8882 }
8883 else
8884 var = decl;
8885
8886 tret = gimplify_expr (&TREE_OPERAND (t, 1), &for_pre_body, NULL,
8887 is_gimple_val, fb_rvalue);
8888 ret = MIN (ret, tret);
8889 if (ret == GS_ERROR)
8890 return ret;
8891
8892 /* Handle OMP_FOR_COND. */
8893 t = TREE_VEC_ELT (OMP_FOR_COND (for_stmt), i);
8894 gcc_assert (COMPARISON_CLASS_P (t));
8895 gcc_assert (TREE_OPERAND (t, 0) == decl);
8896
8897 tret = gimplify_expr (&TREE_OPERAND (t, 1), &for_pre_body, NULL,
8898 is_gimple_val, fb_rvalue);
8899 ret = MIN (ret, tret);
8900
8901 /* Handle OMP_FOR_INCR. */
8902 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
8903 switch (TREE_CODE (t))
8904 {
8905 case PREINCREMENT_EXPR:
8906 case POSTINCREMENT_EXPR:
8907 {
8908 tree decl = TREE_OPERAND (t, 0);
8909 /* c_omp_for_incr_canonicalize_ptr() should have been
8910 called to massage things appropriately. */
8911 gcc_assert (!POINTER_TYPE_P (TREE_TYPE (decl)));
8912
8913 if (orig_for_stmt != for_stmt)
8914 break;
8915 t = build_int_cst (TREE_TYPE (decl), 1);
8916 if (c)
8917 OMP_CLAUSE_LINEAR_STEP (c) = t;
8918 t = build2 (PLUS_EXPR, TREE_TYPE (decl), var, t);
8919 t = build2 (MODIFY_EXPR, TREE_TYPE (var), var, t);
8920 TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i) = t;
8921 break;
8922 }
8923
8924 case PREDECREMENT_EXPR:
8925 case POSTDECREMENT_EXPR:
8926 /* c_omp_for_incr_canonicalize_ptr() should have been
8927 called to massage things appropriately. */
8928 gcc_assert (!POINTER_TYPE_P (TREE_TYPE (decl)));
8929 if (orig_for_stmt != for_stmt)
8930 break;
8931 t = build_int_cst (TREE_TYPE (decl), -1);
8932 if (c)
8933 OMP_CLAUSE_LINEAR_STEP (c) = t;
8934 t = build2 (PLUS_EXPR, TREE_TYPE (decl), var, t);
8935 t = build2 (MODIFY_EXPR, TREE_TYPE (var), var, t);
8936 TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i) = t;
8937 break;
8938
8939 case MODIFY_EXPR:
8940 gcc_assert (TREE_OPERAND (t, 0) == decl);
8941 TREE_OPERAND (t, 0) = var;
8942
8943 t = TREE_OPERAND (t, 1);
8944 switch (TREE_CODE (t))
8945 {
8946 case PLUS_EXPR:
8947 if (TREE_OPERAND (t, 1) == decl)
8948 {
8949 TREE_OPERAND (t, 1) = TREE_OPERAND (t, 0);
8950 TREE_OPERAND (t, 0) = var;
8951 break;
8952 }
8953
8954 /* Fallthru. */
8955 case MINUS_EXPR:
8956 case POINTER_PLUS_EXPR:
8957 gcc_assert (TREE_OPERAND (t, 0) == decl);
8958 TREE_OPERAND (t, 0) = var;
8959 break;
8960 default:
8961 gcc_unreachable ();
8962 }
8963
8964 tret = gimplify_expr (&TREE_OPERAND (t, 1), &for_pre_body, NULL,
8965 is_gimple_val, fb_rvalue);
8966 ret = MIN (ret, tret);
8967 if (c)
8968 {
8969 tree step = TREE_OPERAND (t, 1);
8970 tree stept = TREE_TYPE (decl);
8971 if (POINTER_TYPE_P (stept))
8972 stept = sizetype;
8973 step = fold_convert (stept, step);
8974 if (TREE_CODE (t) == MINUS_EXPR)
8975 step = fold_build1 (NEGATE_EXPR, stept, step);
8976 OMP_CLAUSE_LINEAR_STEP (c) = step;
8977 if (step != TREE_OPERAND (t, 1))
8978 {
8979 tret = gimplify_expr (&OMP_CLAUSE_LINEAR_STEP (c),
8980 &for_pre_body, NULL,
8981 is_gimple_val, fb_rvalue);
8982 ret = MIN (ret, tret);
8983 }
8984 }
8985 break;
8986
8987 default:
8988 gcc_unreachable ();
8989 }
8990
8991 if (c2)
8992 {
8993 gcc_assert (c);
8994 OMP_CLAUSE_LINEAR_STEP (c2) = OMP_CLAUSE_LINEAR_STEP (c);
8995 }
8996
8997 if ((var != decl || collapse > 1) && orig_for_stmt == for_stmt)
8998 {
8999 for (c = OMP_FOR_CLAUSES (for_stmt); c ; c = OMP_CLAUSE_CHAIN (c))
9000 if (((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
9001 && OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c) == NULL)
9002 || (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
9003 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c)
9004 && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c) == NULL))
9005 && OMP_CLAUSE_DECL (c) == decl)
9006 {
9007 if (is_doacross && (collapse == 1 || i >= collapse))
9008 t = var;
9009 else
9010 {
9011 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
9012 gcc_assert (TREE_CODE (t) == MODIFY_EXPR);
9013 gcc_assert (TREE_OPERAND (t, 0) == var);
9014 t = TREE_OPERAND (t, 1);
9015 gcc_assert (TREE_CODE (t) == PLUS_EXPR
9016 || TREE_CODE (t) == MINUS_EXPR
9017 || TREE_CODE (t) == POINTER_PLUS_EXPR);
9018 gcc_assert (TREE_OPERAND (t, 0) == var);
9019 t = build2 (TREE_CODE (t), TREE_TYPE (decl),
9020 is_doacross ? var : decl,
9021 TREE_OPERAND (t, 1));
9022 }
9023 gimple_seq *seq;
9024 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE)
9025 seq = &OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c);
9026 else
9027 seq = &OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c);
9028 gimplify_assign (decl, t, seq);
9029 }
9030 }
9031 }
9032
9033 BITMAP_FREE (has_decl_expr);
9034
9035 if (TREE_CODE (orig_for_stmt) == OMP_TASKLOOP)
9036 {
9037 push_gimplify_context ();
9038 if (TREE_CODE (OMP_FOR_BODY (orig_for_stmt)) != BIND_EXPR)
9039 {
9040 OMP_FOR_BODY (orig_for_stmt)
9041 = build3 (BIND_EXPR, void_type_node, NULL,
9042 OMP_FOR_BODY (orig_for_stmt), NULL);
9043 TREE_SIDE_EFFECTS (OMP_FOR_BODY (orig_for_stmt)) = 1;
9044 }
9045 }
9046
9047 gimple *g = gimplify_and_return_first (OMP_FOR_BODY (orig_for_stmt),
9048 &for_body);
9049
9050 if (TREE_CODE (orig_for_stmt) == OMP_TASKLOOP)
9051 {
9052 if (gimple_code (g) == GIMPLE_BIND)
9053 pop_gimplify_context (g);
9054 else
9055 pop_gimplify_context (NULL);
9056 }
9057
9058 if (orig_for_stmt != for_stmt)
9059 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
9060 {
9061 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i);
9062 decl = TREE_OPERAND (t, 0);
9063 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
9064 if (TREE_CODE (orig_for_stmt) == OMP_TASKLOOP)
9065 gimplify_omp_ctxp = ctx->outer_context;
9066 var = create_tmp_var (TREE_TYPE (decl), get_name (decl));
9067 gimplify_omp_ctxp = ctx;
9068 omp_add_variable (gimplify_omp_ctxp, var, GOVD_PRIVATE | GOVD_SEEN);
9069 TREE_OPERAND (t, 0) = var;
9070 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
9071 TREE_OPERAND (t, 1) = copy_node (TREE_OPERAND (t, 1));
9072 TREE_OPERAND (TREE_OPERAND (t, 1), 0) = var;
9073 }
9074
9075 gimplify_adjust_omp_clauses (pre_p, for_body,
9076 &OMP_FOR_CLAUSES (orig_for_stmt),
9077 TREE_CODE (orig_for_stmt));
9078
9079 int kind;
9080 switch (TREE_CODE (orig_for_stmt))
9081 {
9082 case OMP_FOR: kind = GF_OMP_FOR_KIND_FOR; break;
9083 case OMP_SIMD: kind = GF_OMP_FOR_KIND_SIMD; break;
9084 case CILK_SIMD: kind = GF_OMP_FOR_KIND_CILKSIMD; break;
9085 case CILK_FOR: kind = GF_OMP_FOR_KIND_CILKFOR; break;
9086 case OMP_DISTRIBUTE: kind = GF_OMP_FOR_KIND_DISTRIBUTE; break;
9087 case OMP_TASKLOOP: kind = GF_OMP_FOR_KIND_TASKLOOP; break;
9088 case OACC_LOOP: kind = GF_OMP_FOR_KIND_OACC_LOOP; break;
9089 default:
9090 gcc_unreachable ();
9091 }
9092 gfor = gimple_build_omp_for (for_body, kind, OMP_FOR_CLAUSES (orig_for_stmt),
9093 TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)),
9094 for_pre_body);
9095 if (orig_for_stmt != for_stmt)
9096 gimple_omp_for_set_combined_p (gfor, true);
9097 if (gimplify_omp_ctxp
9098 && (gimplify_omp_ctxp->combined_loop
9099 || (gimplify_omp_ctxp->region_type == ORT_COMBINED_PARALLEL
9100 && gimplify_omp_ctxp->outer_context
9101 && gimplify_omp_ctxp->outer_context->combined_loop)))
9102 {
9103 gimple_omp_for_set_combined_into_p (gfor, true);
9104 if (gimplify_omp_ctxp->combined_loop)
9105 gcc_assert (TREE_CODE (orig_for_stmt) == OMP_SIMD);
9106 else
9107 gcc_assert (TREE_CODE (orig_for_stmt) == OMP_FOR);
9108 }
9109
9110 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
9111 {
9112 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i);
9113 gimple_omp_for_set_index (gfor, i, TREE_OPERAND (t, 0));
9114 gimple_omp_for_set_initial (gfor, i, TREE_OPERAND (t, 1));
9115 t = TREE_VEC_ELT (OMP_FOR_COND (for_stmt), i);
9116 gimple_omp_for_set_cond (gfor, i, TREE_CODE (t));
9117 gimple_omp_for_set_final (gfor, i, TREE_OPERAND (t, 1));
9118 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
9119 gimple_omp_for_set_incr (gfor, i, TREE_OPERAND (t, 1));
9120 }
9121
9122 /* OMP_TASKLOOP is gimplified as two GIMPLE_OMP_FOR taskloop
9123 constructs with GIMPLE_OMP_TASK sandwiched in between them.
9124 The outer taskloop stands for computing the number of iterations,
9125 counts for collapsed loops and holding taskloop specific clauses.
9126 The task construct stands for the effect of data sharing on the
9127 explicit task it creates and the inner taskloop stands for expansion
9128 of the static loop inside of the explicit task construct. */
9129 if (TREE_CODE (orig_for_stmt) == OMP_TASKLOOP)
9130 {
9131 tree *gfor_clauses_ptr = gimple_omp_for_clauses_ptr (gfor);
9132 tree task_clauses = NULL_TREE;
9133 tree c = *gfor_clauses_ptr;
9134 tree *gtask_clauses_ptr = &task_clauses;
9135 tree outer_for_clauses = NULL_TREE;
9136 tree *gforo_clauses_ptr = &outer_for_clauses;
9137 for (; c; c = OMP_CLAUSE_CHAIN (c))
9138 switch (OMP_CLAUSE_CODE (c))
9139 {
9140 /* These clauses are allowed on task, move them there. */
9141 case OMP_CLAUSE_SHARED:
9142 case OMP_CLAUSE_FIRSTPRIVATE:
9143 case OMP_CLAUSE_DEFAULT:
9144 case OMP_CLAUSE_IF:
9145 case OMP_CLAUSE_UNTIED:
9146 case OMP_CLAUSE_FINAL:
9147 case OMP_CLAUSE_MERGEABLE:
9148 case OMP_CLAUSE_PRIORITY:
9149 *gtask_clauses_ptr = c;
9150 gtask_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
9151 break;
9152 case OMP_CLAUSE_PRIVATE:
9153 if (OMP_CLAUSE_PRIVATE_TASKLOOP_IV (c))
9154 {
9155 /* We want private on outer for and firstprivate
9156 on task. */
9157 *gtask_clauses_ptr
9158 = build_omp_clause (OMP_CLAUSE_LOCATION (c),
9159 OMP_CLAUSE_FIRSTPRIVATE);
9160 OMP_CLAUSE_DECL (*gtask_clauses_ptr) = OMP_CLAUSE_DECL (c);
9161 lang_hooks.decls.omp_finish_clause (*gtask_clauses_ptr, NULL);
9162 gtask_clauses_ptr = &OMP_CLAUSE_CHAIN (*gtask_clauses_ptr);
9163 *gforo_clauses_ptr = c;
9164 gforo_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
9165 }
9166 else
9167 {
9168 *gtask_clauses_ptr = c;
9169 gtask_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
9170 }
9171 break;
9172 /* These clauses go into outer taskloop clauses. */
9173 case OMP_CLAUSE_GRAINSIZE:
9174 case OMP_CLAUSE_NUM_TASKS:
9175 case OMP_CLAUSE_NOGROUP:
9176 *gforo_clauses_ptr = c;
9177 gforo_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
9178 break;
9179 /* Taskloop clause we duplicate on both taskloops. */
9180 case OMP_CLAUSE_COLLAPSE:
9181 *gfor_clauses_ptr = c;
9182 gfor_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
9183 *gforo_clauses_ptr = copy_node (c);
9184 gforo_clauses_ptr = &OMP_CLAUSE_CHAIN (*gforo_clauses_ptr);
9185 break;
9186 /* For lastprivate, keep the clause on inner taskloop, and add
9187 a shared clause on task. If the same decl is also firstprivate,
9188 add also firstprivate clause on the inner taskloop. */
9189 case OMP_CLAUSE_LASTPRIVATE:
9190 if (OMP_CLAUSE_LASTPRIVATE_TASKLOOP_IV (c))
9191 {
9192 /* For taskloop C++ lastprivate IVs, we want:
9193 1) private on outer taskloop
9194 2) firstprivate and shared on task
9195 3) lastprivate on inner taskloop */
9196 *gtask_clauses_ptr
9197 = build_omp_clause (OMP_CLAUSE_LOCATION (c),
9198 OMP_CLAUSE_FIRSTPRIVATE);
9199 OMP_CLAUSE_DECL (*gtask_clauses_ptr) = OMP_CLAUSE_DECL (c);
9200 lang_hooks.decls.omp_finish_clause (*gtask_clauses_ptr, NULL);
9201 gtask_clauses_ptr = &OMP_CLAUSE_CHAIN (*gtask_clauses_ptr);
9202 OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c) = 1;
9203 *gforo_clauses_ptr = build_omp_clause (OMP_CLAUSE_LOCATION (c),
9204 OMP_CLAUSE_PRIVATE);
9205 OMP_CLAUSE_DECL (*gforo_clauses_ptr) = OMP_CLAUSE_DECL (c);
9206 OMP_CLAUSE_PRIVATE_TASKLOOP_IV (*gforo_clauses_ptr) = 1;
9207 TREE_TYPE (*gforo_clauses_ptr) = TREE_TYPE (c);
9208 gforo_clauses_ptr = &OMP_CLAUSE_CHAIN (*gforo_clauses_ptr);
9209 }
9210 *gfor_clauses_ptr = c;
9211 gfor_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
9212 *gtask_clauses_ptr
9213 = build_omp_clause (OMP_CLAUSE_LOCATION (c), OMP_CLAUSE_SHARED);
9214 OMP_CLAUSE_DECL (*gtask_clauses_ptr) = OMP_CLAUSE_DECL (c);
9215 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
9216 OMP_CLAUSE_SHARED_FIRSTPRIVATE (*gtask_clauses_ptr) = 1;
9217 gtask_clauses_ptr
9218 = &OMP_CLAUSE_CHAIN (*gtask_clauses_ptr);
9219 break;
9220 default:
9221 gcc_unreachable ();
9222 }
9223 *gfor_clauses_ptr = NULL_TREE;
9224 *gtask_clauses_ptr = NULL_TREE;
9225 *gforo_clauses_ptr = NULL_TREE;
9226 g = gimple_build_bind (NULL_TREE, gfor, NULL_TREE);
9227 g = gimple_build_omp_task (g, task_clauses, NULL_TREE, NULL_TREE,
9228 NULL_TREE, NULL_TREE, NULL_TREE);
9229 gimple_omp_task_set_taskloop_p (g, true);
9230 g = gimple_build_bind (NULL_TREE, g, NULL_TREE);
9231 gomp_for *gforo
9232 = gimple_build_omp_for (g, GF_OMP_FOR_KIND_TASKLOOP, outer_for_clauses,
9233 gimple_omp_for_collapse (gfor),
9234 gimple_omp_for_pre_body (gfor));
9235 gimple_omp_for_set_pre_body (gfor, NULL);
9236 gimple_omp_for_set_combined_p (gforo, true);
9237 gimple_omp_for_set_combined_into_p (gfor, true);
9238 for (i = 0; i < (int) gimple_omp_for_collapse (gfor); i++)
9239 {
9240 t = unshare_expr (gimple_omp_for_index (gfor, i));
9241 gimple_omp_for_set_index (gforo, i, t);
9242 t = unshare_expr (gimple_omp_for_initial (gfor, i));
9243 gimple_omp_for_set_initial (gforo, i, t);
9244 gimple_omp_for_set_cond (gforo, i,
9245 gimple_omp_for_cond (gfor, i));
9246 t = unshare_expr (gimple_omp_for_final (gfor, i));
9247 gimple_omp_for_set_final (gforo, i, t);
9248 t = unshare_expr (gimple_omp_for_incr (gfor, i));
9249 gimple_omp_for_set_incr (gforo, i, t);
9250 }
9251 gimplify_seq_add_stmt (pre_p, gforo);
9252 }
9253 else
9254 gimplify_seq_add_stmt (pre_p, gfor);
9255 if (ret != GS_ALL_DONE)
9256 return GS_ERROR;
9257 *expr_p = NULL_TREE;
9258 return GS_ALL_DONE;
9259 }
9260
9261 /* Helper function of optimize_target_teams, find OMP_TEAMS inside
9262 of OMP_TARGET's body. */
9263
9264 static tree
9265 find_omp_teams (tree *tp, int *walk_subtrees, void *)
9266 {
9267 *walk_subtrees = 0;
9268 switch (TREE_CODE (*tp))
9269 {
9270 case OMP_TEAMS:
9271 return *tp;
9272 case BIND_EXPR:
9273 case STATEMENT_LIST:
9274 *walk_subtrees = 1;
9275 break;
9276 default:
9277 break;
9278 }
9279 return NULL_TREE;
9280 }
9281
9282 /* Helper function of optimize_target_teams, determine if the expression
9283 can be computed safely before the target construct on the host. */
9284
9285 static tree
9286 computable_teams_clause (tree *tp, int *walk_subtrees, void *)
9287 {
9288 splay_tree_node n;
9289
9290 if (TYPE_P (*tp))
9291 {
9292 *walk_subtrees = 0;
9293 return NULL_TREE;
9294 }
9295 switch (TREE_CODE (*tp))
9296 {
9297 case VAR_DECL:
9298 case PARM_DECL:
9299 case RESULT_DECL:
9300 *walk_subtrees = 0;
9301 if (error_operand_p (*tp)
9302 || !INTEGRAL_TYPE_P (TREE_TYPE (*tp))
9303 || DECL_HAS_VALUE_EXPR_P (*tp)
9304 || DECL_THREAD_LOCAL_P (*tp)
9305 || TREE_SIDE_EFFECTS (*tp)
9306 || TREE_THIS_VOLATILE (*tp))
9307 return *tp;
9308 if (is_global_var (*tp)
9309 && (lookup_attribute ("omp declare target", DECL_ATTRIBUTES (*tp))
9310 || lookup_attribute ("omp declare target link",
9311 DECL_ATTRIBUTES (*tp))))
9312 return *tp;
9313 n = splay_tree_lookup (gimplify_omp_ctxp->variables,
9314 (splay_tree_key) *tp);
9315 if (n == NULL)
9316 {
9317 if (gimplify_omp_ctxp->target_map_scalars_firstprivate)
9318 return NULL_TREE;
9319 return *tp;
9320 }
9321 else if (n->value & GOVD_LOCAL)
9322 return *tp;
9323 else if (n->value & GOVD_FIRSTPRIVATE)
9324 return NULL_TREE;
9325 else if ((n->value & (GOVD_MAP | GOVD_MAP_ALWAYS_TO))
9326 == (GOVD_MAP | GOVD_MAP_ALWAYS_TO))
9327 return NULL_TREE;
9328 return *tp;
9329 case INTEGER_CST:
9330 if (!INTEGRAL_TYPE_P (TREE_TYPE (*tp)))
9331 return *tp;
9332 return NULL_TREE;
9333 case TARGET_EXPR:
9334 if (TARGET_EXPR_INITIAL (*tp)
9335 || TREE_CODE (TARGET_EXPR_SLOT (*tp)) != VAR_DECL)
9336 return *tp;
9337 return computable_teams_clause (&TARGET_EXPR_SLOT (*tp),
9338 walk_subtrees, NULL);
9339 /* Allow some reasonable subset of integral arithmetics. */
9340 case PLUS_EXPR:
9341 case MINUS_EXPR:
9342 case MULT_EXPR:
9343 case TRUNC_DIV_EXPR:
9344 case CEIL_DIV_EXPR:
9345 case FLOOR_DIV_EXPR:
9346 case ROUND_DIV_EXPR:
9347 case TRUNC_MOD_EXPR:
9348 case CEIL_MOD_EXPR:
9349 case FLOOR_MOD_EXPR:
9350 case ROUND_MOD_EXPR:
9351 case RDIV_EXPR:
9352 case EXACT_DIV_EXPR:
9353 case MIN_EXPR:
9354 case MAX_EXPR:
9355 case LSHIFT_EXPR:
9356 case RSHIFT_EXPR:
9357 case BIT_IOR_EXPR:
9358 case BIT_XOR_EXPR:
9359 case BIT_AND_EXPR:
9360 case NEGATE_EXPR:
9361 case ABS_EXPR:
9362 case BIT_NOT_EXPR:
9363 case NON_LVALUE_EXPR:
9364 CASE_CONVERT:
9365 if (!INTEGRAL_TYPE_P (TREE_TYPE (*tp)))
9366 return *tp;
9367 return NULL_TREE;
9368 /* And disallow anything else, except for comparisons. */
9369 default:
9370 if (COMPARISON_CLASS_P (*tp))
9371 return NULL_TREE;
9372 return *tp;
9373 }
9374 }
9375
9376 /* Try to determine if the num_teams and/or thread_limit expressions
9377 can have their values determined already before entering the
9378 target construct.
9379 INTEGER_CSTs trivially are,
9380 integral decls that are firstprivate (explicitly or implicitly)
9381 or explicitly map(always, to:) or map(always, tofrom:) on the target
9382 region too, and expressions involving simple arithmetics on those
9383 too, function calls are not ok, dereferencing something neither etc.
9384 Add NUM_TEAMS and THREAD_LIMIT clauses to the OMP_CLAUSES of
9385 EXPR based on what we find:
9386 0 stands for clause not specified at all, use implementation default
9387 -1 stands for value that can't be determined easily before entering
9388 the target construct.
9389 If teams construct is not present at all, use 1 for num_teams
9390 and 0 for thread_limit (only one team is involved, and the thread
9391 limit is implementation defined. */
9392
9393 static void
9394 optimize_target_teams (tree target, gimple_seq *pre_p)
9395 {
9396 tree body = OMP_BODY (target);
9397 tree teams = walk_tree (&body, find_omp_teams, NULL, NULL);
9398 tree num_teams = integer_zero_node;
9399 tree thread_limit = integer_zero_node;
9400 location_t num_teams_loc = EXPR_LOCATION (target);
9401 location_t thread_limit_loc = EXPR_LOCATION (target);
9402 tree c, *p, expr;
9403 struct gimplify_omp_ctx *target_ctx = gimplify_omp_ctxp;
9404
9405 if (teams == NULL_TREE)
9406 num_teams = integer_one_node;
9407 else
9408 for (c = OMP_TEAMS_CLAUSES (teams); c; c = OMP_CLAUSE_CHAIN (c))
9409 {
9410 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_NUM_TEAMS)
9411 {
9412 p = &num_teams;
9413 num_teams_loc = OMP_CLAUSE_LOCATION (c);
9414 }
9415 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_THREAD_LIMIT)
9416 {
9417 p = &thread_limit;
9418 thread_limit_loc = OMP_CLAUSE_LOCATION (c);
9419 }
9420 else
9421 continue;
9422 expr = OMP_CLAUSE_OPERAND (c, 0);
9423 if (TREE_CODE (expr) == INTEGER_CST)
9424 {
9425 *p = expr;
9426 continue;
9427 }
9428 if (walk_tree (&expr, computable_teams_clause, NULL, NULL))
9429 {
9430 *p = integer_minus_one_node;
9431 continue;
9432 }
9433 *p = expr;
9434 gimplify_omp_ctxp = gimplify_omp_ctxp->outer_context;
9435 if (gimplify_expr (p, pre_p, NULL, is_gimple_val, fb_rvalue)
9436 == GS_ERROR)
9437 {
9438 gimplify_omp_ctxp = target_ctx;
9439 *p = integer_minus_one_node;
9440 continue;
9441 }
9442 gimplify_omp_ctxp = target_ctx;
9443 if (!DECL_P (expr) && TREE_CODE (expr) != TARGET_EXPR)
9444 OMP_CLAUSE_OPERAND (c, 0) = *p;
9445 }
9446 c = build_omp_clause (thread_limit_loc, OMP_CLAUSE_THREAD_LIMIT);
9447 OMP_CLAUSE_THREAD_LIMIT_EXPR (c) = thread_limit;
9448 OMP_CLAUSE_CHAIN (c) = OMP_TARGET_CLAUSES (target);
9449 OMP_TARGET_CLAUSES (target) = c;
9450 c = build_omp_clause (num_teams_loc, OMP_CLAUSE_NUM_TEAMS);
9451 OMP_CLAUSE_NUM_TEAMS_EXPR (c) = num_teams;
9452 OMP_CLAUSE_CHAIN (c) = OMP_TARGET_CLAUSES (target);
9453 OMP_TARGET_CLAUSES (target) = c;
9454 }
9455
9456 /* Gimplify the gross structure of several OMP constructs. */
9457
9458 static void
9459 gimplify_omp_workshare (tree *expr_p, gimple_seq *pre_p)
9460 {
9461 tree expr = *expr_p;
9462 gimple *stmt;
9463 gimple_seq body = NULL;
9464 enum omp_region_type ort;
9465
9466 switch (TREE_CODE (expr))
9467 {
9468 case OMP_SECTIONS:
9469 case OMP_SINGLE:
9470 ort = ORT_WORKSHARE;
9471 break;
9472 case OMP_TARGET:
9473 ort = OMP_TARGET_COMBINED (expr) ? ORT_COMBINED_TARGET : ORT_TARGET;
9474 break;
9475 case OACC_KERNELS:
9476 ort = ORT_ACC_KERNELS;
9477 break;
9478 case OACC_PARALLEL:
9479 ort = ORT_ACC_PARALLEL;
9480 break;
9481 case OACC_DATA:
9482 ort = ORT_ACC_DATA;
9483 break;
9484 case OMP_TARGET_DATA:
9485 ort = ORT_TARGET_DATA;
9486 break;
9487 case OMP_TEAMS:
9488 ort = OMP_TEAMS_COMBINED (expr) ? ORT_COMBINED_TEAMS : ORT_TEAMS;
9489 break;
9490 case OACC_HOST_DATA:
9491 ort = ORT_ACC_HOST_DATA;
9492 break;
9493 default:
9494 gcc_unreachable ();
9495 }
9496 gimplify_scan_omp_clauses (&OMP_CLAUSES (expr), pre_p, ort,
9497 TREE_CODE (expr));
9498 if (TREE_CODE (expr) == OMP_TARGET)
9499 optimize_target_teams (expr, pre_p);
9500 if ((ort & (ORT_TARGET | ORT_TARGET_DATA)) != 0)
9501 {
9502 push_gimplify_context ();
9503 gimple *g = gimplify_and_return_first (OMP_BODY (expr), &body);
9504 if (gimple_code (g) == GIMPLE_BIND)
9505 pop_gimplify_context (g);
9506 else
9507 pop_gimplify_context (NULL);
9508 if ((ort & ORT_TARGET_DATA) != 0)
9509 {
9510 enum built_in_function end_ix;
9511 switch (TREE_CODE (expr))
9512 {
9513 case OACC_DATA:
9514 case OACC_HOST_DATA:
9515 end_ix = BUILT_IN_GOACC_DATA_END;
9516 break;
9517 case OMP_TARGET_DATA:
9518 end_ix = BUILT_IN_GOMP_TARGET_END_DATA;
9519 break;
9520 default:
9521 gcc_unreachable ();
9522 }
9523 tree fn = builtin_decl_explicit (end_ix);
9524 g = gimple_build_call (fn, 0);
9525 gimple_seq cleanup = NULL;
9526 gimple_seq_add_stmt (&cleanup, g);
9527 g = gimple_build_try (body, cleanup, GIMPLE_TRY_FINALLY);
9528 body = NULL;
9529 gimple_seq_add_stmt (&body, g);
9530 }
9531 }
9532 else
9533 gimplify_and_add (OMP_BODY (expr), &body);
9534 gimplify_adjust_omp_clauses (pre_p, body, &OMP_CLAUSES (expr),
9535 TREE_CODE (expr));
9536
9537 switch (TREE_CODE (expr))
9538 {
9539 case OACC_DATA:
9540 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_OACC_DATA,
9541 OMP_CLAUSES (expr));
9542 break;
9543 case OACC_KERNELS:
9544 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_OACC_KERNELS,
9545 OMP_CLAUSES (expr));
9546 break;
9547 case OACC_HOST_DATA:
9548 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_OACC_HOST_DATA,
9549 OMP_CLAUSES (expr));
9550 break;
9551 case OACC_PARALLEL:
9552 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_OACC_PARALLEL,
9553 OMP_CLAUSES (expr));
9554 break;
9555 case OMP_SECTIONS:
9556 stmt = gimple_build_omp_sections (body, OMP_CLAUSES (expr));
9557 break;
9558 case OMP_SINGLE:
9559 stmt = gimple_build_omp_single (body, OMP_CLAUSES (expr));
9560 break;
9561 case OMP_TARGET:
9562 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_REGION,
9563 OMP_CLAUSES (expr));
9564 break;
9565 case OMP_TARGET_DATA:
9566 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_DATA,
9567 OMP_CLAUSES (expr));
9568 break;
9569 case OMP_TEAMS:
9570 stmt = gimple_build_omp_teams (body, OMP_CLAUSES (expr));
9571 break;
9572 default:
9573 gcc_unreachable ();
9574 }
9575
9576 gimplify_seq_add_stmt (pre_p, stmt);
9577 *expr_p = NULL_TREE;
9578 }
9579
9580 /* Gimplify the gross structure of OpenACC enter/exit data, update, and OpenMP
9581 target update constructs. */
9582
9583 static void
9584 gimplify_omp_target_update (tree *expr_p, gimple_seq *pre_p)
9585 {
9586 tree expr = *expr_p;
9587 int kind;
9588 gomp_target *stmt;
9589 enum omp_region_type ort = ORT_WORKSHARE;
9590
9591 switch (TREE_CODE (expr))
9592 {
9593 case OACC_ENTER_DATA:
9594 case OACC_EXIT_DATA:
9595 kind = GF_OMP_TARGET_KIND_OACC_ENTER_EXIT_DATA;
9596 ort = ORT_ACC;
9597 break;
9598 case OACC_UPDATE:
9599 kind = GF_OMP_TARGET_KIND_OACC_UPDATE;
9600 ort = ORT_ACC;
9601 break;
9602 case OMP_TARGET_UPDATE:
9603 kind = GF_OMP_TARGET_KIND_UPDATE;
9604 break;
9605 case OMP_TARGET_ENTER_DATA:
9606 kind = GF_OMP_TARGET_KIND_ENTER_DATA;
9607 break;
9608 case OMP_TARGET_EXIT_DATA:
9609 kind = GF_OMP_TARGET_KIND_EXIT_DATA;
9610 break;
9611 default:
9612 gcc_unreachable ();
9613 }
9614 gimplify_scan_omp_clauses (&OMP_STANDALONE_CLAUSES (expr), pre_p,
9615 ort, TREE_CODE (expr));
9616 gimplify_adjust_omp_clauses (pre_p, NULL, &OMP_STANDALONE_CLAUSES (expr),
9617 TREE_CODE (expr));
9618 stmt = gimple_build_omp_target (NULL, kind, OMP_STANDALONE_CLAUSES (expr));
9619
9620 gimplify_seq_add_stmt (pre_p, stmt);
9621 *expr_p = NULL_TREE;
9622 }
9623
9624 /* A subroutine of gimplify_omp_atomic. The front end is supposed to have
9625 stabilized the lhs of the atomic operation as *ADDR. Return true if
9626 EXPR is this stabilized form. */
9627
9628 static bool
9629 goa_lhs_expr_p (tree expr, tree addr)
9630 {
9631 /* Also include casts to other type variants. The C front end is fond
9632 of adding these for e.g. volatile variables. This is like
9633 STRIP_TYPE_NOPS but includes the main variant lookup. */
9634 STRIP_USELESS_TYPE_CONVERSION (expr);
9635
9636 if (TREE_CODE (expr) == INDIRECT_REF)
9637 {
9638 expr = TREE_OPERAND (expr, 0);
9639 while (expr != addr
9640 && (CONVERT_EXPR_P (expr)
9641 || TREE_CODE (expr) == NON_LVALUE_EXPR)
9642 && TREE_CODE (expr) == TREE_CODE (addr)
9643 && types_compatible_p (TREE_TYPE (expr), TREE_TYPE (addr)))
9644 {
9645 expr = TREE_OPERAND (expr, 0);
9646 addr = TREE_OPERAND (addr, 0);
9647 }
9648 if (expr == addr)
9649 return true;
9650 return (TREE_CODE (addr) == ADDR_EXPR
9651 && TREE_CODE (expr) == ADDR_EXPR
9652 && TREE_OPERAND (addr, 0) == TREE_OPERAND (expr, 0));
9653 }
9654 if (TREE_CODE (addr) == ADDR_EXPR && expr == TREE_OPERAND (addr, 0))
9655 return true;
9656 return false;
9657 }
9658
9659 /* Walk *EXPR_P and replace appearances of *LHS_ADDR with LHS_VAR. If an
9660 expression does not involve the lhs, evaluate it into a temporary.
9661 Return 1 if the lhs appeared as a subexpression, 0 if it did not,
9662 or -1 if an error was encountered. */
9663
9664 static int
9665 goa_stabilize_expr (tree *expr_p, gimple_seq *pre_p, tree lhs_addr,
9666 tree lhs_var)
9667 {
9668 tree expr = *expr_p;
9669 int saw_lhs;
9670
9671 if (goa_lhs_expr_p (expr, lhs_addr))
9672 {
9673 *expr_p = lhs_var;
9674 return 1;
9675 }
9676 if (is_gimple_val (expr))
9677 return 0;
9678
9679 saw_lhs = 0;
9680 switch (TREE_CODE_CLASS (TREE_CODE (expr)))
9681 {
9682 case tcc_binary:
9683 case tcc_comparison:
9684 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 1), pre_p, lhs_addr,
9685 lhs_var);
9686 case tcc_unary:
9687 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 0), pre_p, lhs_addr,
9688 lhs_var);
9689 break;
9690 case tcc_expression:
9691 switch (TREE_CODE (expr))
9692 {
9693 case TRUTH_ANDIF_EXPR:
9694 case TRUTH_ORIF_EXPR:
9695 case TRUTH_AND_EXPR:
9696 case TRUTH_OR_EXPR:
9697 case TRUTH_XOR_EXPR:
9698 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 1), pre_p,
9699 lhs_addr, lhs_var);
9700 case TRUTH_NOT_EXPR:
9701 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 0), pre_p,
9702 lhs_addr, lhs_var);
9703 break;
9704 case COMPOUND_EXPR:
9705 /* Break out any preevaluations from cp_build_modify_expr. */
9706 for (; TREE_CODE (expr) == COMPOUND_EXPR;
9707 expr = TREE_OPERAND (expr, 1))
9708 gimplify_stmt (&TREE_OPERAND (expr, 0), pre_p);
9709 *expr_p = expr;
9710 return goa_stabilize_expr (expr_p, pre_p, lhs_addr, lhs_var);
9711 default:
9712 break;
9713 }
9714 break;
9715 default:
9716 break;
9717 }
9718
9719 if (saw_lhs == 0)
9720 {
9721 enum gimplify_status gs;
9722 gs = gimplify_expr (expr_p, pre_p, NULL, is_gimple_val, fb_rvalue);
9723 if (gs != GS_ALL_DONE)
9724 saw_lhs = -1;
9725 }
9726
9727 return saw_lhs;
9728 }
9729
9730 /* Gimplify an OMP_ATOMIC statement. */
9731
9732 static enum gimplify_status
9733 gimplify_omp_atomic (tree *expr_p, gimple_seq *pre_p)
9734 {
9735 tree addr = TREE_OPERAND (*expr_p, 0);
9736 tree rhs = TREE_CODE (*expr_p) == OMP_ATOMIC_READ
9737 ? NULL : TREE_OPERAND (*expr_p, 1);
9738 tree type = TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (addr)));
9739 tree tmp_load;
9740 gomp_atomic_load *loadstmt;
9741 gomp_atomic_store *storestmt;
9742
9743 tmp_load = create_tmp_reg (type);
9744 if (rhs && goa_stabilize_expr (&rhs, pre_p, addr, tmp_load) < 0)
9745 return GS_ERROR;
9746
9747 if (gimplify_expr (&addr, pre_p, NULL, is_gimple_val, fb_rvalue)
9748 != GS_ALL_DONE)
9749 return GS_ERROR;
9750
9751 loadstmt = gimple_build_omp_atomic_load (tmp_load, addr);
9752 gimplify_seq_add_stmt (pre_p, loadstmt);
9753 if (rhs && gimplify_expr (&rhs, pre_p, NULL, is_gimple_val, fb_rvalue)
9754 != GS_ALL_DONE)
9755 return GS_ERROR;
9756
9757 if (TREE_CODE (*expr_p) == OMP_ATOMIC_READ)
9758 rhs = tmp_load;
9759 storestmt = gimple_build_omp_atomic_store (rhs);
9760 gimplify_seq_add_stmt (pre_p, storestmt);
9761 if (OMP_ATOMIC_SEQ_CST (*expr_p))
9762 {
9763 gimple_omp_atomic_set_seq_cst (loadstmt);
9764 gimple_omp_atomic_set_seq_cst (storestmt);
9765 }
9766 switch (TREE_CODE (*expr_p))
9767 {
9768 case OMP_ATOMIC_READ:
9769 case OMP_ATOMIC_CAPTURE_OLD:
9770 *expr_p = tmp_load;
9771 gimple_omp_atomic_set_need_value (loadstmt);
9772 break;
9773 case OMP_ATOMIC_CAPTURE_NEW:
9774 *expr_p = rhs;
9775 gimple_omp_atomic_set_need_value (storestmt);
9776 break;
9777 default:
9778 *expr_p = NULL;
9779 break;
9780 }
9781
9782 return GS_ALL_DONE;
9783 }
9784
9785 /* Gimplify a TRANSACTION_EXPR. This involves gimplification of the
9786 body, and adding some EH bits. */
9787
9788 static enum gimplify_status
9789 gimplify_transaction (tree *expr_p, gimple_seq *pre_p)
9790 {
9791 tree expr = *expr_p, temp, tbody = TRANSACTION_EXPR_BODY (expr);
9792 gimple *body_stmt;
9793 gtransaction *trans_stmt;
9794 gimple_seq body = NULL;
9795 int subcode = 0;
9796
9797 /* Wrap the transaction body in a BIND_EXPR so we have a context
9798 where to put decls for OMP. */
9799 if (TREE_CODE (tbody) != BIND_EXPR)
9800 {
9801 tree bind = build3 (BIND_EXPR, void_type_node, NULL, tbody, NULL);
9802 TREE_SIDE_EFFECTS (bind) = 1;
9803 SET_EXPR_LOCATION (bind, EXPR_LOCATION (tbody));
9804 TRANSACTION_EXPR_BODY (expr) = bind;
9805 }
9806
9807 push_gimplify_context ();
9808 temp = voidify_wrapper_expr (*expr_p, NULL);
9809
9810 body_stmt = gimplify_and_return_first (TRANSACTION_EXPR_BODY (expr), &body);
9811 pop_gimplify_context (body_stmt);
9812
9813 trans_stmt = gimple_build_transaction (body);
9814 if (TRANSACTION_EXPR_OUTER (expr))
9815 subcode = GTMA_IS_OUTER;
9816 else if (TRANSACTION_EXPR_RELAXED (expr))
9817 subcode = GTMA_IS_RELAXED;
9818 gimple_transaction_set_subcode (trans_stmt, subcode);
9819
9820 gimplify_seq_add_stmt (pre_p, trans_stmt);
9821
9822 if (temp)
9823 {
9824 *expr_p = temp;
9825 return GS_OK;
9826 }
9827
9828 *expr_p = NULL_TREE;
9829 return GS_ALL_DONE;
9830 }
9831
9832 /* Gimplify an OMP_ORDERED construct. EXPR is the tree version. BODY
9833 is the OMP_BODY of the original EXPR (which has already been
9834 gimplified so it's not present in the EXPR).
9835
9836 Return the gimplified GIMPLE_OMP_ORDERED tuple. */
9837
9838 static gimple *
9839 gimplify_omp_ordered (tree expr, gimple_seq body)
9840 {
9841 tree c, decls;
9842 int failures = 0;
9843 unsigned int i;
9844 tree source_c = NULL_TREE;
9845 tree sink_c = NULL_TREE;
9846
9847 if (gimplify_omp_ctxp)
9848 for (c = OMP_ORDERED_CLAUSES (expr); c; c = OMP_CLAUSE_CHAIN (c))
9849 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
9850 && gimplify_omp_ctxp->loop_iter_var.is_empty ()
9851 && (OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SINK
9852 || OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SOURCE))
9853 {
9854 error_at (OMP_CLAUSE_LOCATION (c),
9855 "%<ordered%> construct with %<depend%> clause must be "
9856 "closely nested inside a loop with %<ordered%> clause "
9857 "with a parameter");
9858 failures++;
9859 }
9860 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
9861 && OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SINK)
9862 {
9863 bool fail = false;
9864 for (decls = OMP_CLAUSE_DECL (c), i = 0;
9865 decls && TREE_CODE (decls) == TREE_LIST;
9866 decls = TREE_CHAIN (decls), ++i)
9867 if (i >= gimplify_omp_ctxp->loop_iter_var.length () / 2)
9868 continue;
9869 else if (TREE_VALUE (decls)
9870 != gimplify_omp_ctxp->loop_iter_var[2 * i])
9871 {
9872 error_at (OMP_CLAUSE_LOCATION (c),
9873 "variable %qE is not an iteration "
9874 "of outermost loop %d, expected %qE",
9875 TREE_VALUE (decls), i + 1,
9876 gimplify_omp_ctxp->loop_iter_var[2 * i]);
9877 fail = true;
9878 failures++;
9879 }
9880 else
9881 TREE_VALUE (decls)
9882 = gimplify_omp_ctxp->loop_iter_var[2 * i + 1];
9883 if (!fail && i != gimplify_omp_ctxp->loop_iter_var.length () / 2)
9884 {
9885 error_at (OMP_CLAUSE_LOCATION (c),
9886 "number of variables in %<depend(sink)%> "
9887 "clause does not match number of "
9888 "iteration variables");
9889 failures++;
9890 }
9891 sink_c = c;
9892 }
9893 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
9894 && OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SOURCE)
9895 {
9896 if (source_c)
9897 {
9898 error_at (OMP_CLAUSE_LOCATION (c),
9899 "more than one %<depend(source)%> clause on an "
9900 "%<ordered%> construct");
9901 failures++;
9902 }
9903 else
9904 source_c = c;
9905 }
9906 if (source_c && sink_c)
9907 {
9908 error_at (OMP_CLAUSE_LOCATION (source_c),
9909 "%<depend(source)%> clause specified together with "
9910 "%<depend(sink:)%> clauses on the same construct");
9911 failures++;
9912 }
9913
9914 if (failures)
9915 return gimple_build_nop ();
9916 return gimple_build_omp_ordered (body, OMP_ORDERED_CLAUSES (expr));
9917 }
9918
9919 /* Convert the GENERIC expression tree *EXPR_P to GIMPLE. If the
9920 expression produces a value to be used as an operand inside a GIMPLE
9921 statement, the value will be stored back in *EXPR_P. This value will
9922 be a tree of class tcc_declaration, tcc_constant, tcc_reference or
9923 an SSA_NAME. The corresponding sequence of GIMPLE statements is
9924 emitted in PRE_P and POST_P.
9925
9926 Additionally, this process may overwrite parts of the input
9927 expression during gimplification. Ideally, it should be
9928 possible to do non-destructive gimplification.
9929
9930 EXPR_P points to the GENERIC expression to convert to GIMPLE. If
9931 the expression needs to evaluate to a value to be used as
9932 an operand in a GIMPLE statement, this value will be stored in
9933 *EXPR_P on exit. This happens when the caller specifies one
9934 of fb_lvalue or fb_rvalue fallback flags.
9935
9936 PRE_P will contain the sequence of GIMPLE statements corresponding
9937 to the evaluation of EXPR and all the side-effects that must
9938 be executed before the main expression. On exit, the last
9939 statement of PRE_P is the core statement being gimplified. For
9940 instance, when gimplifying 'if (++a)' the last statement in
9941 PRE_P will be 'if (t.1)' where t.1 is the result of
9942 pre-incrementing 'a'.
9943
9944 POST_P will contain the sequence of GIMPLE statements corresponding
9945 to the evaluation of all the side-effects that must be executed
9946 after the main expression. If this is NULL, the post
9947 side-effects are stored at the end of PRE_P.
9948
9949 The reason why the output is split in two is to handle post
9950 side-effects explicitly. In some cases, an expression may have
9951 inner and outer post side-effects which need to be emitted in
9952 an order different from the one given by the recursive
9953 traversal. For instance, for the expression (*p--)++ the post
9954 side-effects of '--' must actually occur *after* the post
9955 side-effects of '++'. However, gimplification will first visit
9956 the inner expression, so if a separate POST sequence was not
9957 used, the resulting sequence would be:
9958
9959 1 t.1 = *p
9960 2 p = p - 1
9961 3 t.2 = t.1 + 1
9962 4 *p = t.2
9963
9964 However, the post-decrement operation in line #2 must not be
9965 evaluated until after the store to *p at line #4, so the
9966 correct sequence should be:
9967
9968 1 t.1 = *p
9969 2 t.2 = t.1 + 1
9970 3 *p = t.2
9971 4 p = p - 1
9972
9973 So, by specifying a separate post queue, it is possible
9974 to emit the post side-effects in the correct order.
9975 If POST_P is NULL, an internal queue will be used. Before
9976 returning to the caller, the sequence POST_P is appended to
9977 the main output sequence PRE_P.
9978
9979 GIMPLE_TEST_F points to a function that takes a tree T and
9980 returns nonzero if T is in the GIMPLE form requested by the
9981 caller. The GIMPLE predicates are in gimple.c.
9982
9983 FALLBACK tells the function what sort of a temporary we want if
9984 gimplification cannot produce an expression that complies with
9985 GIMPLE_TEST_F.
9986
9987 fb_none means that no temporary should be generated
9988 fb_rvalue means that an rvalue is OK to generate
9989 fb_lvalue means that an lvalue is OK to generate
9990 fb_either means that either is OK, but an lvalue is preferable.
9991 fb_mayfail means that gimplification may fail (in which case
9992 GS_ERROR will be returned)
9993
9994 The return value is either GS_ERROR or GS_ALL_DONE, since this
9995 function iterates until EXPR is completely gimplified or an error
9996 occurs. */
9997
9998 enum gimplify_status
9999 gimplify_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
10000 bool (*gimple_test_f) (tree), fallback_t fallback)
10001 {
10002 tree tmp;
10003 gimple_seq internal_pre = NULL;
10004 gimple_seq internal_post = NULL;
10005 tree save_expr;
10006 bool is_statement;
10007 location_t saved_location;
10008 enum gimplify_status ret;
10009 gimple_stmt_iterator pre_last_gsi, post_last_gsi;
10010
10011 save_expr = *expr_p;
10012 if (save_expr == NULL_TREE)
10013 return GS_ALL_DONE;
10014
10015 /* If we are gimplifying a top-level statement, PRE_P must be valid. */
10016 is_statement = gimple_test_f == is_gimple_stmt;
10017 if (is_statement)
10018 gcc_assert (pre_p);
10019
10020 /* Consistency checks. */
10021 if (gimple_test_f == is_gimple_reg)
10022 gcc_assert (fallback & (fb_rvalue | fb_lvalue));
10023 else if (gimple_test_f == is_gimple_val
10024 || gimple_test_f == is_gimple_call_addr
10025 || gimple_test_f == is_gimple_condexpr
10026 || gimple_test_f == is_gimple_mem_rhs
10027 || gimple_test_f == is_gimple_mem_rhs_or_call
10028 || gimple_test_f == is_gimple_reg_rhs
10029 || gimple_test_f == is_gimple_reg_rhs_or_call
10030 || gimple_test_f == is_gimple_asm_val
10031 || gimple_test_f == is_gimple_mem_ref_addr)
10032 gcc_assert (fallback & fb_rvalue);
10033 else if (gimple_test_f == is_gimple_min_lval
10034 || gimple_test_f == is_gimple_lvalue)
10035 gcc_assert (fallback & fb_lvalue);
10036 else if (gimple_test_f == is_gimple_addressable)
10037 gcc_assert (fallback & fb_either);
10038 else if (gimple_test_f == is_gimple_stmt)
10039 gcc_assert (fallback == fb_none);
10040 else
10041 {
10042 /* We should have recognized the GIMPLE_TEST_F predicate to
10043 know what kind of fallback to use in case a temporary is
10044 needed to hold the value or address of *EXPR_P. */
10045 gcc_unreachable ();
10046 }
10047
10048 /* We used to check the predicate here and return immediately if it
10049 succeeds. This is wrong; the design is for gimplification to be
10050 idempotent, and for the predicates to only test for valid forms, not
10051 whether they are fully simplified. */
10052 if (pre_p == NULL)
10053 pre_p = &internal_pre;
10054
10055 if (post_p == NULL)
10056 post_p = &internal_post;
10057
10058 /* Remember the last statements added to PRE_P and POST_P. Every
10059 new statement added by the gimplification helpers needs to be
10060 annotated with location information. To centralize the
10061 responsibility, we remember the last statement that had been
10062 added to both queues before gimplifying *EXPR_P. If
10063 gimplification produces new statements in PRE_P and POST_P, those
10064 statements will be annotated with the same location information
10065 as *EXPR_P. */
10066 pre_last_gsi = gsi_last (*pre_p);
10067 post_last_gsi = gsi_last (*post_p);
10068
10069 saved_location = input_location;
10070 if (save_expr != error_mark_node
10071 && EXPR_HAS_LOCATION (*expr_p))
10072 input_location = EXPR_LOCATION (*expr_p);
10073
10074 /* Loop over the specific gimplifiers until the toplevel node
10075 remains the same. */
10076 do
10077 {
10078 /* Strip away as many useless type conversions as possible
10079 at the toplevel. */
10080 STRIP_USELESS_TYPE_CONVERSION (*expr_p);
10081
10082 /* Remember the expr. */
10083 save_expr = *expr_p;
10084
10085 /* Die, die, die, my darling. */
10086 if (save_expr == error_mark_node
10087 || (TREE_TYPE (save_expr)
10088 && TREE_TYPE (save_expr) == error_mark_node))
10089 {
10090 ret = GS_ERROR;
10091 break;
10092 }
10093
10094 /* Do any language-specific gimplification. */
10095 ret = ((enum gimplify_status)
10096 lang_hooks.gimplify_expr (expr_p, pre_p, post_p));
10097 if (ret == GS_OK)
10098 {
10099 if (*expr_p == NULL_TREE)
10100 break;
10101 if (*expr_p != save_expr)
10102 continue;
10103 }
10104 else if (ret != GS_UNHANDLED)
10105 break;
10106
10107 /* Make sure that all the cases set 'ret' appropriately. */
10108 ret = GS_UNHANDLED;
10109 switch (TREE_CODE (*expr_p))
10110 {
10111 /* First deal with the special cases. */
10112
10113 case POSTINCREMENT_EXPR:
10114 case POSTDECREMENT_EXPR:
10115 case PREINCREMENT_EXPR:
10116 case PREDECREMENT_EXPR:
10117 ret = gimplify_self_mod_expr (expr_p, pre_p, post_p,
10118 fallback != fb_none,
10119 TREE_TYPE (*expr_p));
10120 break;
10121
10122 case VIEW_CONVERT_EXPR:
10123 if (is_gimple_reg_type (TREE_TYPE (*expr_p))
10124 && is_gimple_reg_type (TREE_TYPE (TREE_OPERAND (*expr_p, 0))))
10125 {
10126 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
10127 post_p, is_gimple_val, fb_rvalue);
10128 recalculate_side_effects (*expr_p);
10129 break;
10130 }
10131 /* Fallthru. */
10132
10133 case ARRAY_REF:
10134 case ARRAY_RANGE_REF:
10135 case REALPART_EXPR:
10136 case IMAGPART_EXPR:
10137 case COMPONENT_REF:
10138 ret = gimplify_compound_lval (expr_p, pre_p, post_p,
10139 fallback ? fallback : fb_rvalue);
10140 break;
10141
10142 case COND_EXPR:
10143 ret = gimplify_cond_expr (expr_p, pre_p, fallback);
10144
10145 /* C99 code may assign to an array in a structure value of a
10146 conditional expression, and this has undefined behavior
10147 only on execution, so create a temporary if an lvalue is
10148 required. */
10149 if (fallback == fb_lvalue)
10150 {
10151 *expr_p = get_initialized_tmp_var (*expr_p, pre_p, post_p);
10152 mark_addressable (*expr_p);
10153 ret = GS_OK;
10154 }
10155 break;
10156
10157 case CALL_EXPR:
10158 ret = gimplify_call_expr (expr_p, pre_p, fallback != fb_none);
10159
10160 /* C99 code may assign to an array in a structure returned
10161 from a function, and this has undefined behavior only on
10162 execution, so create a temporary if an lvalue is
10163 required. */
10164 if (fallback == fb_lvalue)
10165 {
10166 *expr_p = get_initialized_tmp_var (*expr_p, pre_p, post_p);
10167 mark_addressable (*expr_p);
10168 ret = GS_OK;
10169 }
10170 break;
10171
10172 case TREE_LIST:
10173 gcc_unreachable ();
10174
10175 case COMPOUND_EXPR:
10176 ret = gimplify_compound_expr (expr_p, pre_p, fallback != fb_none);
10177 break;
10178
10179 case COMPOUND_LITERAL_EXPR:
10180 ret = gimplify_compound_literal_expr (expr_p, pre_p,
10181 gimple_test_f, fallback);
10182 break;
10183
10184 case MODIFY_EXPR:
10185 case INIT_EXPR:
10186 ret = gimplify_modify_expr (expr_p, pre_p, post_p,
10187 fallback != fb_none);
10188 break;
10189
10190 case TRUTH_ANDIF_EXPR:
10191 case TRUTH_ORIF_EXPR:
10192 {
10193 /* Preserve the original type of the expression and the
10194 source location of the outer expression. */
10195 tree org_type = TREE_TYPE (*expr_p);
10196 *expr_p = gimple_boolify (*expr_p);
10197 *expr_p = build3_loc (input_location, COND_EXPR,
10198 org_type, *expr_p,
10199 fold_convert_loc
10200 (input_location,
10201 org_type, boolean_true_node),
10202 fold_convert_loc
10203 (input_location,
10204 org_type, boolean_false_node));
10205 ret = GS_OK;
10206 break;
10207 }
10208
10209 case TRUTH_NOT_EXPR:
10210 {
10211 tree type = TREE_TYPE (*expr_p);
10212 /* The parsers are careful to generate TRUTH_NOT_EXPR
10213 only with operands that are always zero or one.
10214 We do not fold here but handle the only interesting case
10215 manually, as fold may re-introduce the TRUTH_NOT_EXPR. */
10216 *expr_p = gimple_boolify (*expr_p);
10217 if (TYPE_PRECISION (TREE_TYPE (*expr_p)) == 1)
10218 *expr_p = build1_loc (input_location, BIT_NOT_EXPR,
10219 TREE_TYPE (*expr_p),
10220 TREE_OPERAND (*expr_p, 0));
10221 else
10222 *expr_p = build2_loc (input_location, BIT_XOR_EXPR,
10223 TREE_TYPE (*expr_p),
10224 TREE_OPERAND (*expr_p, 0),
10225 build_int_cst (TREE_TYPE (*expr_p), 1));
10226 if (!useless_type_conversion_p (type, TREE_TYPE (*expr_p)))
10227 *expr_p = fold_convert_loc (input_location, type, *expr_p);
10228 ret = GS_OK;
10229 break;
10230 }
10231
10232 case ADDR_EXPR:
10233 ret = gimplify_addr_expr (expr_p, pre_p, post_p);
10234 break;
10235
10236 case ANNOTATE_EXPR:
10237 {
10238 tree cond = TREE_OPERAND (*expr_p, 0);
10239 tree kind = TREE_OPERAND (*expr_p, 1);
10240 tree type = TREE_TYPE (cond);
10241 if (!INTEGRAL_TYPE_P (type))
10242 {
10243 *expr_p = cond;
10244 ret = GS_OK;
10245 break;
10246 }
10247 tree tmp = create_tmp_var (type);
10248 gimplify_arg (&cond, pre_p, EXPR_LOCATION (*expr_p));
10249 gcall *call
10250 = gimple_build_call_internal (IFN_ANNOTATE, 2, cond, kind);
10251 gimple_call_set_lhs (call, tmp);
10252 gimplify_seq_add_stmt (pre_p, call);
10253 *expr_p = tmp;
10254 ret = GS_ALL_DONE;
10255 break;
10256 }
10257
10258 case VA_ARG_EXPR:
10259 ret = gimplify_va_arg_expr (expr_p, pre_p, post_p);
10260 break;
10261
10262 CASE_CONVERT:
10263 if (IS_EMPTY_STMT (*expr_p))
10264 {
10265 ret = GS_ALL_DONE;
10266 break;
10267 }
10268
10269 if (VOID_TYPE_P (TREE_TYPE (*expr_p))
10270 || fallback == fb_none)
10271 {
10272 /* Just strip a conversion to void (or in void context) and
10273 try again. */
10274 *expr_p = TREE_OPERAND (*expr_p, 0);
10275 ret = GS_OK;
10276 break;
10277 }
10278
10279 ret = gimplify_conversion (expr_p);
10280 if (ret == GS_ERROR)
10281 break;
10282 if (*expr_p != save_expr)
10283 break;
10284 /* FALLTHRU */
10285
10286 case FIX_TRUNC_EXPR:
10287 /* unary_expr: ... | '(' cast ')' val | ... */
10288 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
10289 is_gimple_val, fb_rvalue);
10290 recalculate_side_effects (*expr_p);
10291 break;
10292
10293 case INDIRECT_REF:
10294 {
10295 bool volatilep = TREE_THIS_VOLATILE (*expr_p);
10296 bool notrap = TREE_THIS_NOTRAP (*expr_p);
10297 tree saved_ptr_type = TREE_TYPE (TREE_OPERAND (*expr_p, 0));
10298
10299 *expr_p = fold_indirect_ref_loc (input_location, *expr_p);
10300 if (*expr_p != save_expr)
10301 {
10302 ret = GS_OK;
10303 break;
10304 }
10305
10306 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
10307 is_gimple_reg, fb_rvalue);
10308 if (ret == GS_ERROR)
10309 break;
10310
10311 recalculate_side_effects (*expr_p);
10312 *expr_p = fold_build2_loc (input_location, MEM_REF,
10313 TREE_TYPE (*expr_p),
10314 TREE_OPERAND (*expr_p, 0),
10315 build_int_cst (saved_ptr_type, 0));
10316 TREE_THIS_VOLATILE (*expr_p) = volatilep;
10317 TREE_THIS_NOTRAP (*expr_p) = notrap;
10318 ret = GS_OK;
10319 break;
10320 }
10321
10322 /* We arrive here through the various re-gimplifcation paths. */
10323 case MEM_REF:
10324 /* First try re-folding the whole thing. */
10325 tmp = fold_binary (MEM_REF, TREE_TYPE (*expr_p),
10326 TREE_OPERAND (*expr_p, 0),
10327 TREE_OPERAND (*expr_p, 1));
10328 if (tmp)
10329 {
10330 REF_REVERSE_STORAGE_ORDER (tmp)
10331 = REF_REVERSE_STORAGE_ORDER (*expr_p);
10332 *expr_p = tmp;
10333 recalculate_side_effects (*expr_p);
10334 ret = GS_OK;
10335 break;
10336 }
10337 /* Avoid re-gimplifying the address operand if it is already
10338 in suitable form. Re-gimplifying would mark the address
10339 operand addressable. Always gimplify when not in SSA form
10340 as we still may have to gimplify decls with value-exprs. */
10341 if (!gimplify_ctxp || !gimplify_ctxp->into_ssa
10342 || !is_gimple_mem_ref_addr (TREE_OPERAND (*expr_p, 0)))
10343 {
10344 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
10345 is_gimple_mem_ref_addr, fb_rvalue);
10346 if (ret == GS_ERROR)
10347 break;
10348 }
10349 recalculate_side_effects (*expr_p);
10350 ret = GS_ALL_DONE;
10351 break;
10352
10353 /* Constants need not be gimplified. */
10354 case INTEGER_CST:
10355 case REAL_CST:
10356 case FIXED_CST:
10357 case STRING_CST:
10358 case COMPLEX_CST:
10359 case VECTOR_CST:
10360 /* Drop the overflow flag on constants, we do not want
10361 that in the GIMPLE IL. */
10362 if (TREE_OVERFLOW_P (*expr_p))
10363 *expr_p = drop_tree_overflow (*expr_p);
10364 ret = GS_ALL_DONE;
10365 break;
10366
10367 case CONST_DECL:
10368 /* If we require an lvalue, such as for ADDR_EXPR, retain the
10369 CONST_DECL node. Otherwise the decl is replaceable by its
10370 value. */
10371 /* ??? Should be == fb_lvalue, but ADDR_EXPR passes fb_either. */
10372 if (fallback & fb_lvalue)
10373 ret = GS_ALL_DONE;
10374 else
10375 {
10376 *expr_p = DECL_INITIAL (*expr_p);
10377 ret = GS_OK;
10378 }
10379 break;
10380
10381 case DECL_EXPR:
10382 ret = gimplify_decl_expr (expr_p, pre_p);
10383 break;
10384
10385 case BIND_EXPR:
10386 ret = gimplify_bind_expr (expr_p, pre_p);
10387 break;
10388
10389 case LOOP_EXPR:
10390 ret = gimplify_loop_expr (expr_p, pre_p);
10391 break;
10392
10393 case SWITCH_EXPR:
10394 ret = gimplify_switch_expr (expr_p, pre_p);
10395 break;
10396
10397 case EXIT_EXPR:
10398 ret = gimplify_exit_expr (expr_p);
10399 break;
10400
10401 case GOTO_EXPR:
10402 /* If the target is not LABEL, then it is a computed jump
10403 and the target needs to be gimplified. */
10404 if (TREE_CODE (GOTO_DESTINATION (*expr_p)) != LABEL_DECL)
10405 {
10406 ret = gimplify_expr (&GOTO_DESTINATION (*expr_p), pre_p,
10407 NULL, is_gimple_val, fb_rvalue);
10408 if (ret == GS_ERROR)
10409 break;
10410 }
10411 gimplify_seq_add_stmt (pre_p,
10412 gimple_build_goto (GOTO_DESTINATION (*expr_p)));
10413 ret = GS_ALL_DONE;
10414 break;
10415
10416 case PREDICT_EXPR:
10417 gimplify_seq_add_stmt (pre_p,
10418 gimple_build_predict (PREDICT_EXPR_PREDICTOR (*expr_p),
10419 PREDICT_EXPR_OUTCOME (*expr_p)));
10420 ret = GS_ALL_DONE;
10421 break;
10422
10423 case LABEL_EXPR:
10424 ret = GS_ALL_DONE;
10425 gcc_assert (decl_function_context (LABEL_EXPR_LABEL (*expr_p))
10426 == current_function_decl);
10427 gimplify_seq_add_stmt (pre_p,
10428 gimple_build_label (LABEL_EXPR_LABEL (*expr_p)));
10429 break;
10430
10431 case CASE_LABEL_EXPR:
10432 ret = gimplify_case_label_expr (expr_p, pre_p);
10433 break;
10434
10435 case RETURN_EXPR:
10436 ret = gimplify_return_expr (*expr_p, pre_p);
10437 break;
10438
10439 case CONSTRUCTOR:
10440 /* Don't reduce this in place; let gimplify_init_constructor work its
10441 magic. Buf if we're just elaborating this for side effects, just
10442 gimplify any element that has side-effects. */
10443 if (fallback == fb_none)
10444 {
10445 unsigned HOST_WIDE_INT ix;
10446 tree val;
10447 tree temp = NULL_TREE;
10448 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (*expr_p), ix, val)
10449 if (TREE_SIDE_EFFECTS (val))
10450 append_to_statement_list (val, &temp);
10451
10452 *expr_p = temp;
10453 ret = temp ? GS_OK : GS_ALL_DONE;
10454 }
10455 /* C99 code may assign to an array in a constructed
10456 structure or union, and this has undefined behavior only
10457 on execution, so create a temporary if an lvalue is
10458 required. */
10459 else if (fallback == fb_lvalue)
10460 {
10461 *expr_p = get_initialized_tmp_var (*expr_p, pre_p, post_p);
10462 mark_addressable (*expr_p);
10463 ret = GS_OK;
10464 }
10465 else
10466 ret = GS_ALL_DONE;
10467 break;
10468
10469 /* The following are special cases that are not handled by the
10470 original GIMPLE grammar. */
10471
10472 /* SAVE_EXPR nodes are converted into a GIMPLE identifier and
10473 eliminated. */
10474 case SAVE_EXPR:
10475 ret = gimplify_save_expr (expr_p, pre_p, post_p);
10476 break;
10477
10478 case BIT_FIELD_REF:
10479 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
10480 post_p, is_gimple_lvalue, fb_either);
10481 recalculate_side_effects (*expr_p);
10482 break;
10483
10484 case TARGET_MEM_REF:
10485 {
10486 enum gimplify_status r0 = GS_ALL_DONE, r1 = GS_ALL_DONE;
10487
10488 if (TMR_BASE (*expr_p))
10489 r0 = gimplify_expr (&TMR_BASE (*expr_p), pre_p,
10490 post_p, is_gimple_mem_ref_addr, fb_either);
10491 if (TMR_INDEX (*expr_p))
10492 r1 = gimplify_expr (&TMR_INDEX (*expr_p), pre_p,
10493 post_p, is_gimple_val, fb_rvalue);
10494 if (TMR_INDEX2 (*expr_p))
10495 r1 = gimplify_expr (&TMR_INDEX2 (*expr_p), pre_p,
10496 post_p, is_gimple_val, fb_rvalue);
10497 /* TMR_STEP and TMR_OFFSET are always integer constants. */
10498 ret = MIN (r0, r1);
10499 }
10500 break;
10501
10502 case NON_LVALUE_EXPR:
10503 /* This should have been stripped above. */
10504 gcc_unreachable ();
10505
10506 case ASM_EXPR:
10507 ret = gimplify_asm_expr (expr_p, pre_p, post_p);
10508 break;
10509
10510 case TRY_FINALLY_EXPR:
10511 case TRY_CATCH_EXPR:
10512 {
10513 gimple_seq eval, cleanup;
10514 gtry *try_;
10515
10516 /* Calls to destructors are generated automatically in FINALLY/CATCH
10517 block. They should have location as UNKNOWN_LOCATION. However,
10518 gimplify_call_expr will reset these call stmts to input_location
10519 if it finds stmt's location is unknown. To prevent resetting for
10520 destructors, we set the input_location to unknown.
10521 Note that this only affects the destructor calls in FINALLY/CATCH
10522 block, and will automatically reset to its original value by the
10523 end of gimplify_expr. */
10524 input_location = UNKNOWN_LOCATION;
10525 eval = cleanup = NULL;
10526 gimplify_and_add (TREE_OPERAND (*expr_p, 0), &eval);
10527 gimplify_and_add (TREE_OPERAND (*expr_p, 1), &cleanup);
10528 /* Don't create bogus GIMPLE_TRY with empty cleanup. */
10529 if (gimple_seq_empty_p (cleanup))
10530 {
10531 gimple_seq_add_seq (pre_p, eval);
10532 ret = GS_ALL_DONE;
10533 break;
10534 }
10535 try_ = gimple_build_try (eval, cleanup,
10536 TREE_CODE (*expr_p) == TRY_FINALLY_EXPR
10537 ? GIMPLE_TRY_FINALLY
10538 : GIMPLE_TRY_CATCH);
10539 if (EXPR_HAS_LOCATION (save_expr))
10540 gimple_set_location (try_, EXPR_LOCATION (save_expr));
10541 else if (LOCATION_LOCUS (saved_location) != UNKNOWN_LOCATION)
10542 gimple_set_location (try_, saved_location);
10543 if (TREE_CODE (*expr_p) == TRY_CATCH_EXPR)
10544 gimple_try_set_catch_is_cleanup (try_,
10545 TRY_CATCH_IS_CLEANUP (*expr_p));
10546 gimplify_seq_add_stmt (pre_p, try_);
10547 ret = GS_ALL_DONE;
10548 break;
10549 }
10550
10551 case CLEANUP_POINT_EXPR:
10552 ret = gimplify_cleanup_point_expr (expr_p, pre_p);
10553 break;
10554
10555 case TARGET_EXPR:
10556 ret = gimplify_target_expr (expr_p, pre_p, post_p);
10557 break;
10558
10559 case CATCH_EXPR:
10560 {
10561 gimple *c;
10562 gimple_seq handler = NULL;
10563 gimplify_and_add (CATCH_BODY (*expr_p), &handler);
10564 c = gimple_build_catch (CATCH_TYPES (*expr_p), handler);
10565 gimplify_seq_add_stmt (pre_p, c);
10566 ret = GS_ALL_DONE;
10567 break;
10568 }
10569
10570 case EH_FILTER_EXPR:
10571 {
10572 gimple *ehf;
10573 gimple_seq failure = NULL;
10574
10575 gimplify_and_add (EH_FILTER_FAILURE (*expr_p), &failure);
10576 ehf = gimple_build_eh_filter (EH_FILTER_TYPES (*expr_p), failure);
10577 gimple_set_no_warning (ehf, TREE_NO_WARNING (*expr_p));
10578 gimplify_seq_add_stmt (pre_p, ehf);
10579 ret = GS_ALL_DONE;
10580 break;
10581 }
10582
10583 case OBJ_TYPE_REF:
10584 {
10585 enum gimplify_status r0, r1;
10586 r0 = gimplify_expr (&OBJ_TYPE_REF_OBJECT (*expr_p), pre_p,
10587 post_p, is_gimple_val, fb_rvalue);
10588 r1 = gimplify_expr (&OBJ_TYPE_REF_EXPR (*expr_p), pre_p,
10589 post_p, is_gimple_val, fb_rvalue);
10590 TREE_SIDE_EFFECTS (*expr_p) = 0;
10591 ret = MIN (r0, r1);
10592 }
10593 break;
10594
10595 case LABEL_DECL:
10596 /* We get here when taking the address of a label. We mark
10597 the label as "forced"; meaning it can never be removed and
10598 it is a potential target for any computed goto. */
10599 FORCED_LABEL (*expr_p) = 1;
10600 ret = GS_ALL_DONE;
10601 break;
10602
10603 case STATEMENT_LIST:
10604 ret = gimplify_statement_list (expr_p, pre_p);
10605 break;
10606
10607 case WITH_SIZE_EXPR:
10608 {
10609 gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
10610 post_p == &internal_post ? NULL : post_p,
10611 gimple_test_f, fallback);
10612 gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p, post_p,
10613 is_gimple_val, fb_rvalue);
10614 ret = GS_ALL_DONE;
10615 }
10616 break;
10617
10618 case VAR_DECL:
10619 case PARM_DECL:
10620 ret = gimplify_var_or_parm_decl (expr_p);
10621 break;
10622
10623 case RESULT_DECL:
10624 /* When within an OMP context, notice uses of variables. */
10625 if (gimplify_omp_ctxp)
10626 omp_notice_variable (gimplify_omp_ctxp, *expr_p, true);
10627 ret = GS_ALL_DONE;
10628 break;
10629
10630 case SSA_NAME:
10631 /* Allow callbacks into the gimplifier during optimization. */
10632 ret = GS_ALL_DONE;
10633 break;
10634
10635 case OMP_PARALLEL:
10636 gimplify_omp_parallel (expr_p, pre_p);
10637 ret = GS_ALL_DONE;
10638 break;
10639
10640 case OMP_TASK:
10641 gimplify_omp_task (expr_p, pre_p);
10642 ret = GS_ALL_DONE;
10643 break;
10644
10645 case OMP_FOR:
10646 case OMP_SIMD:
10647 case CILK_SIMD:
10648 case CILK_FOR:
10649 case OMP_DISTRIBUTE:
10650 case OMP_TASKLOOP:
10651 case OACC_LOOP:
10652 ret = gimplify_omp_for (expr_p, pre_p);
10653 break;
10654
10655 case OACC_CACHE:
10656 gimplify_oacc_cache (expr_p, pre_p);
10657 ret = GS_ALL_DONE;
10658 break;
10659
10660 case OACC_DECLARE:
10661 gimplify_oacc_declare (expr_p, pre_p);
10662 ret = GS_ALL_DONE;
10663 break;
10664
10665 case OACC_HOST_DATA:
10666 case OACC_DATA:
10667 case OACC_KERNELS:
10668 case OACC_PARALLEL:
10669 case OMP_SECTIONS:
10670 case OMP_SINGLE:
10671 case OMP_TARGET:
10672 case OMP_TARGET_DATA:
10673 case OMP_TEAMS:
10674 gimplify_omp_workshare (expr_p, pre_p);
10675 ret = GS_ALL_DONE;
10676 break;
10677
10678 case OACC_ENTER_DATA:
10679 case OACC_EXIT_DATA:
10680 case OACC_UPDATE:
10681 case OMP_TARGET_UPDATE:
10682 case OMP_TARGET_ENTER_DATA:
10683 case OMP_TARGET_EXIT_DATA:
10684 gimplify_omp_target_update (expr_p, pre_p);
10685 ret = GS_ALL_DONE;
10686 break;
10687
10688 case OMP_SECTION:
10689 case OMP_MASTER:
10690 case OMP_TASKGROUP:
10691 case OMP_ORDERED:
10692 case OMP_CRITICAL:
10693 {
10694 gimple_seq body = NULL;
10695 gimple *g;
10696
10697 gimplify_and_add (OMP_BODY (*expr_p), &body);
10698 switch (TREE_CODE (*expr_p))
10699 {
10700 case OMP_SECTION:
10701 g = gimple_build_omp_section (body);
10702 break;
10703 case OMP_MASTER:
10704 g = gimple_build_omp_master (body);
10705 break;
10706 case OMP_TASKGROUP:
10707 {
10708 gimple_seq cleanup = NULL;
10709 tree fn
10710 = builtin_decl_explicit (BUILT_IN_GOMP_TASKGROUP_END);
10711 g = gimple_build_call (fn, 0);
10712 gimple_seq_add_stmt (&cleanup, g);
10713 g = gimple_build_try (body, cleanup, GIMPLE_TRY_FINALLY);
10714 body = NULL;
10715 gimple_seq_add_stmt (&body, g);
10716 g = gimple_build_omp_taskgroup (body);
10717 }
10718 break;
10719 case OMP_ORDERED:
10720 g = gimplify_omp_ordered (*expr_p, body);
10721 break;
10722 case OMP_CRITICAL:
10723 gimplify_scan_omp_clauses (&OMP_CRITICAL_CLAUSES (*expr_p),
10724 pre_p, ORT_WORKSHARE, OMP_CRITICAL);
10725 gimplify_adjust_omp_clauses (pre_p, body,
10726 &OMP_CRITICAL_CLAUSES (*expr_p),
10727 OMP_CRITICAL);
10728 g = gimple_build_omp_critical (body,
10729 OMP_CRITICAL_NAME (*expr_p),
10730 OMP_CRITICAL_CLAUSES (*expr_p));
10731 break;
10732 default:
10733 gcc_unreachable ();
10734 }
10735 gimplify_seq_add_stmt (pre_p, g);
10736 ret = GS_ALL_DONE;
10737 break;
10738 }
10739
10740 case OMP_ATOMIC:
10741 case OMP_ATOMIC_READ:
10742 case OMP_ATOMIC_CAPTURE_OLD:
10743 case OMP_ATOMIC_CAPTURE_NEW:
10744 ret = gimplify_omp_atomic (expr_p, pre_p);
10745 break;
10746
10747 case TRANSACTION_EXPR:
10748 ret = gimplify_transaction (expr_p, pre_p);
10749 break;
10750
10751 case TRUTH_AND_EXPR:
10752 case TRUTH_OR_EXPR:
10753 case TRUTH_XOR_EXPR:
10754 {
10755 tree orig_type = TREE_TYPE (*expr_p);
10756 tree new_type, xop0, xop1;
10757 *expr_p = gimple_boolify (*expr_p);
10758 new_type = TREE_TYPE (*expr_p);
10759 if (!useless_type_conversion_p (orig_type, new_type))
10760 {
10761 *expr_p = fold_convert_loc (input_location, orig_type, *expr_p);
10762 ret = GS_OK;
10763 break;
10764 }
10765
10766 /* Boolified binary truth expressions are semantically equivalent
10767 to bitwise binary expressions. Canonicalize them to the
10768 bitwise variant. */
10769 switch (TREE_CODE (*expr_p))
10770 {
10771 case TRUTH_AND_EXPR:
10772 TREE_SET_CODE (*expr_p, BIT_AND_EXPR);
10773 break;
10774 case TRUTH_OR_EXPR:
10775 TREE_SET_CODE (*expr_p, BIT_IOR_EXPR);
10776 break;
10777 case TRUTH_XOR_EXPR:
10778 TREE_SET_CODE (*expr_p, BIT_XOR_EXPR);
10779 break;
10780 default:
10781 break;
10782 }
10783 /* Now make sure that operands have compatible type to
10784 expression's new_type. */
10785 xop0 = TREE_OPERAND (*expr_p, 0);
10786 xop1 = TREE_OPERAND (*expr_p, 1);
10787 if (!useless_type_conversion_p (new_type, TREE_TYPE (xop0)))
10788 TREE_OPERAND (*expr_p, 0) = fold_convert_loc (input_location,
10789 new_type,
10790 xop0);
10791 if (!useless_type_conversion_p (new_type, TREE_TYPE (xop1)))
10792 TREE_OPERAND (*expr_p, 1) = fold_convert_loc (input_location,
10793 new_type,
10794 xop1);
10795 /* Continue classified as tcc_binary. */
10796 goto expr_2;
10797 }
10798
10799 case FMA_EXPR:
10800 case VEC_COND_EXPR:
10801 case VEC_PERM_EXPR:
10802 /* Classified as tcc_expression. */
10803 goto expr_3;
10804
10805 case POINTER_PLUS_EXPR:
10806 {
10807 enum gimplify_status r0, r1;
10808 r0 = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
10809 post_p, is_gimple_val, fb_rvalue);
10810 r1 = gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p,
10811 post_p, is_gimple_val, fb_rvalue);
10812 recalculate_side_effects (*expr_p);
10813 ret = MIN (r0, r1);
10814 break;
10815 }
10816
10817 case CILK_SYNC_STMT:
10818 {
10819 if (!fn_contains_cilk_spawn_p (cfun))
10820 {
10821 error_at (EXPR_LOCATION (*expr_p),
10822 "expected %<_Cilk_spawn%> before %<_Cilk_sync%>");
10823 ret = GS_ERROR;
10824 }
10825 else
10826 {
10827 gimplify_cilk_sync (expr_p, pre_p);
10828 ret = GS_ALL_DONE;
10829 }
10830 break;
10831 }
10832
10833 default:
10834 switch (TREE_CODE_CLASS (TREE_CODE (*expr_p)))
10835 {
10836 case tcc_comparison:
10837 /* Handle comparison of objects of non scalar mode aggregates
10838 with a call to memcmp. It would be nice to only have to do
10839 this for variable-sized objects, but then we'd have to allow
10840 the same nest of reference nodes we allow for MODIFY_EXPR and
10841 that's too complex.
10842
10843 Compare scalar mode aggregates as scalar mode values. Using
10844 memcmp for them would be very inefficient at best, and is
10845 plain wrong if bitfields are involved. */
10846 {
10847 tree type = TREE_TYPE (TREE_OPERAND (*expr_p, 1));
10848
10849 /* Vector comparisons need no boolification. */
10850 if (TREE_CODE (type) == VECTOR_TYPE)
10851 goto expr_2;
10852 else if (!AGGREGATE_TYPE_P (type))
10853 {
10854 tree org_type = TREE_TYPE (*expr_p);
10855 *expr_p = gimple_boolify (*expr_p);
10856 if (!useless_type_conversion_p (org_type,
10857 TREE_TYPE (*expr_p)))
10858 {
10859 *expr_p = fold_convert_loc (input_location,
10860 org_type, *expr_p);
10861 ret = GS_OK;
10862 }
10863 else
10864 goto expr_2;
10865 }
10866 else if (TYPE_MODE (type) != BLKmode)
10867 ret = gimplify_scalar_mode_aggregate_compare (expr_p);
10868 else
10869 ret = gimplify_variable_sized_compare (expr_p);
10870
10871 break;
10872 }
10873
10874 /* If *EXPR_P does not need to be special-cased, handle it
10875 according to its class. */
10876 case tcc_unary:
10877 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
10878 post_p, is_gimple_val, fb_rvalue);
10879 break;
10880
10881 case tcc_binary:
10882 expr_2:
10883 {
10884 enum gimplify_status r0, r1;
10885
10886 r0 = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
10887 post_p, is_gimple_val, fb_rvalue);
10888 r1 = gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p,
10889 post_p, is_gimple_val, fb_rvalue);
10890
10891 ret = MIN (r0, r1);
10892 break;
10893 }
10894
10895 expr_3:
10896 {
10897 enum gimplify_status r0, r1, r2;
10898
10899 r0 = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
10900 post_p, is_gimple_val, fb_rvalue);
10901 r1 = gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p,
10902 post_p, is_gimple_val, fb_rvalue);
10903 r2 = gimplify_expr (&TREE_OPERAND (*expr_p, 2), pre_p,
10904 post_p, is_gimple_val, fb_rvalue);
10905
10906 ret = MIN (MIN (r0, r1), r2);
10907 break;
10908 }
10909
10910 case tcc_declaration:
10911 case tcc_constant:
10912 ret = GS_ALL_DONE;
10913 goto dont_recalculate;
10914
10915 default:
10916 gcc_unreachable ();
10917 }
10918
10919 recalculate_side_effects (*expr_p);
10920
10921 dont_recalculate:
10922 break;
10923 }
10924
10925 gcc_assert (*expr_p || ret != GS_OK);
10926 }
10927 while (ret == GS_OK);
10928
10929 /* If we encountered an error_mark somewhere nested inside, either
10930 stub out the statement or propagate the error back out. */
10931 if (ret == GS_ERROR)
10932 {
10933 if (is_statement)
10934 *expr_p = NULL;
10935 goto out;
10936 }
10937
10938 /* This was only valid as a return value from the langhook, which
10939 we handled. Make sure it doesn't escape from any other context. */
10940 gcc_assert (ret != GS_UNHANDLED);
10941
10942 if (fallback == fb_none && *expr_p && !is_gimple_stmt (*expr_p))
10943 {
10944 /* We aren't looking for a value, and we don't have a valid
10945 statement. If it doesn't have side-effects, throw it away. */
10946 if (!TREE_SIDE_EFFECTS (*expr_p))
10947 *expr_p = NULL;
10948 else if (!TREE_THIS_VOLATILE (*expr_p))
10949 {
10950 /* This is probably a _REF that contains something nested that
10951 has side effects. Recurse through the operands to find it. */
10952 enum tree_code code = TREE_CODE (*expr_p);
10953
10954 switch (code)
10955 {
10956 case COMPONENT_REF:
10957 case REALPART_EXPR:
10958 case IMAGPART_EXPR:
10959 case VIEW_CONVERT_EXPR:
10960 gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
10961 gimple_test_f, fallback);
10962 break;
10963
10964 case ARRAY_REF:
10965 case ARRAY_RANGE_REF:
10966 gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
10967 gimple_test_f, fallback);
10968 gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p, post_p,
10969 gimple_test_f, fallback);
10970 break;
10971
10972 default:
10973 /* Anything else with side-effects must be converted to
10974 a valid statement before we get here. */
10975 gcc_unreachable ();
10976 }
10977
10978 *expr_p = NULL;
10979 }
10980 else if (COMPLETE_TYPE_P (TREE_TYPE (*expr_p))
10981 && TYPE_MODE (TREE_TYPE (*expr_p)) != BLKmode)
10982 {
10983 /* Historically, the compiler has treated a bare reference
10984 to a non-BLKmode volatile lvalue as forcing a load. */
10985 tree type = TYPE_MAIN_VARIANT (TREE_TYPE (*expr_p));
10986
10987 /* Normally, we do not want to create a temporary for a
10988 TREE_ADDRESSABLE type because such a type should not be
10989 copied by bitwise-assignment. However, we make an
10990 exception here, as all we are doing here is ensuring that
10991 we read the bytes that make up the type. We use
10992 create_tmp_var_raw because create_tmp_var will abort when
10993 given a TREE_ADDRESSABLE type. */
10994 tree tmp = create_tmp_var_raw (type, "vol");
10995 gimple_add_tmp_var (tmp);
10996 gimplify_assign (tmp, *expr_p, pre_p);
10997 *expr_p = NULL;
10998 }
10999 else
11000 /* We can't do anything useful with a volatile reference to
11001 an incomplete type, so just throw it away. Likewise for
11002 a BLKmode type, since any implicit inner load should
11003 already have been turned into an explicit one by the
11004 gimplification process. */
11005 *expr_p = NULL;
11006 }
11007
11008 /* If we are gimplifying at the statement level, we're done. Tack
11009 everything together and return. */
11010 if (fallback == fb_none || is_statement)
11011 {
11012 /* Since *EXPR_P has been converted into a GIMPLE tuple, clear
11013 it out for GC to reclaim it. */
11014 *expr_p = NULL_TREE;
11015
11016 if (!gimple_seq_empty_p (internal_pre)
11017 || !gimple_seq_empty_p (internal_post))
11018 {
11019 gimplify_seq_add_seq (&internal_pre, internal_post);
11020 gimplify_seq_add_seq (pre_p, internal_pre);
11021 }
11022
11023 /* The result of gimplifying *EXPR_P is going to be the last few
11024 statements in *PRE_P and *POST_P. Add location information
11025 to all the statements that were added by the gimplification
11026 helpers. */
11027 if (!gimple_seq_empty_p (*pre_p))
11028 annotate_all_with_location_after (*pre_p, pre_last_gsi, input_location);
11029
11030 if (!gimple_seq_empty_p (*post_p))
11031 annotate_all_with_location_after (*post_p, post_last_gsi,
11032 input_location);
11033
11034 goto out;
11035 }
11036
11037 #ifdef ENABLE_GIMPLE_CHECKING
11038 if (*expr_p)
11039 {
11040 enum tree_code code = TREE_CODE (*expr_p);
11041 /* These expressions should already be in gimple IR form. */
11042 gcc_assert (code != MODIFY_EXPR
11043 && code != ASM_EXPR
11044 && code != BIND_EXPR
11045 && code != CATCH_EXPR
11046 && (code != COND_EXPR || gimplify_ctxp->allow_rhs_cond_expr)
11047 && code != EH_FILTER_EXPR
11048 && code != GOTO_EXPR
11049 && code != LABEL_EXPR
11050 && code != LOOP_EXPR
11051 && code != SWITCH_EXPR
11052 && code != TRY_FINALLY_EXPR
11053 && code != OACC_PARALLEL
11054 && code != OACC_KERNELS
11055 && code != OACC_DATA
11056 && code != OACC_HOST_DATA
11057 && code != OACC_DECLARE
11058 && code != OACC_UPDATE
11059 && code != OACC_ENTER_DATA
11060 && code != OACC_EXIT_DATA
11061 && code != OACC_CACHE
11062 && code != OMP_CRITICAL
11063 && code != OMP_FOR
11064 && code != OACC_LOOP
11065 && code != OMP_MASTER
11066 && code != OMP_TASKGROUP
11067 && code != OMP_ORDERED
11068 && code != OMP_PARALLEL
11069 && code != OMP_SECTIONS
11070 && code != OMP_SECTION
11071 && code != OMP_SINGLE);
11072 }
11073 #endif
11074
11075 /* Otherwise we're gimplifying a subexpression, so the resulting
11076 value is interesting. If it's a valid operand that matches
11077 GIMPLE_TEST_F, we're done. Unless we are handling some
11078 post-effects internally; if that's the case, we need to copy into
11079 a temporary before adding the post-effects to POST_P. */
11080 if (gimple_seq_empty_p (internal_post) && (*gimple_test_f) (*expr_p))
11081 goto out;
11082
11083 /* Otherwise, we need to create a new temporary for the gimplified
11084 expression. */
11085
11086 /* We can't return an lvalue if we have an internal postqueue. The
11087 object the lvalue refers to would (probably) be modified by the
11088 postqueue; we need to copy the value out first, which means an
11089 rvalue. */
11090 if ((fallback & fb_lvalue)
11091 && gimple_seq_empty_p (internal_post)
11092 && is_gimple_addressable (*expr_p))
11093 {
11094 /* An lvalue will do. Take the address of the expression, store it
11095 in a temporary, and replace the expression with an INDIRECT_REF of
11096 that temporary. */
11097 tmp = build_fold_addr_expr_loc (input_location, *expr_p);
11098 gimplify_expr (&tmp, pre_p, post_p, is_gimple_reg, fb_rvalue);
11099 *expr_p = build_simple_mem_ref (tmp);
11100 }
11101 else if ((fallback & fb_rvalue) && is_gimple_reg_rhs_or_call (*expr_p))
11102 {
11103 /* An rvalue will do. Assign the gimplified expression into a
11104 new temporary TMP and replace the original expression with
11105 TMP. First, make sure that the expression has a type so that
11106 it can be assigned into a temporary. */
11107 gcc_assert (!VOID_TYPE_P (TREE_TYPE (*expr_p)));
11108 *expr_p = get_formal_tmp_var (*expr_p, pre_p);
11109 }
11110 else
11111 {
11112 #ifdef ENABLE_GIMPLE_CHECKING
11113 if (!(fallback & fb_mayfail))
11114 {
11115 fprintf (stderr, "gimplification failed:\n");
11116 print_generic_expr (stderr, *expr_p, 0);
11117 debug_tree (*expr_p);
11118 internal_error ("gimplification failed");
11119 }
11120 #endif
11121 gcc_assert (fallback & fb_mayfail);
11122
11123 /* If this is an asm statement, and the user asked for the
11124 impossible, don't die. Fail and let gimplify_asm_expr
11125 issue an error. */
11126 ret = GS_ERROR;
11127 goto out;
11128 }
11129
11130 /* Make sure the temporary matches our predicate. */
11131 gcc_assert ((*gimple_test_f) (*expr_p));
11132
11133 if (!gimple_seq_empty_p (internal_post))
11134 {
11135 annotate_all_with_location (internal_post, input_location);
11136 gimplify_seq_add_seq (pre_p, internal_post);
11137 }
11138
11139 out:
11140 input_location = saved_location;
11141 return ret;
11142 }
11143
11144 /* Look through TYPE for variable-sized objects and gimplify each such
11145 size that we find. Add to LIST_P any statements generated. */
11146
11147 void
11148 gimplify_type_sizes (tree type, gimple_seq *list_p)
11149 {
11150 tree field, t;
11151
11152 if (type == NULL || type == error_mark_node)
11153 return;
11154
11155 /* We first do the main variant, then copy into any other variants. */
11156 type = TYPE_MAIN_VARIANT (type);
11157
11158 /* Avoid infinite recursion. */
11159 if (TYPE_SIZES_GIMPLIFIED (type))
11160 return;
11161
11162 TYPE_SIZES_GIMPLIFIED (type) = 1;
11163
11164 switch (TREE_CODE (type))
11165 {
11166 case INTEGER_TYPE:
11167 case ENUMERAL_TYPE:
11168 case BOOLEAN_TYPE:
11169 case REAL_TYPE:
11170 case FIXED_POINT_TYPE:
11171 gimplify_one_sizepos (&TYPE_MIN_VALUE (type), list_p);
11172 gimplify_one_sizepos (&TYPE_MAX_VALUE (type), list_p);
11173
11174 for (t = TYPE_NEXT_VARIANT (type); t; t = TYPE_NEXT_VARIANT (t))
11175 {
11176 TYPE_MIN_VALUE (t) = TYPE_MIN_VALUE (type);
11177 TYPE_MAX_VALUE (t) = TYPE_MAX_VALUE (type);
11178 }
11179 break;
11180
11181 case ARRAY_TYPE:
11182 /* These types may not have declarations, so handle them here. */
11183 gimplify_type_sizes (TREE_TYPE (type), list_p);
11184 gimplify_type_sizes (TYPE_DOMAIN (type), list_p);
11185 /* Ensure VLA bounds aren't removed, for -O0 they should be variables
11186 with assigned stack slots, for -O1+ -g they should be tracked
11187 by VTA. */
11188 if (!(TYPE_NAME (type)
11189 && TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
11190 && DECL_IGNORED_P (TYPE_NAME (type)))
11191 && TYPE_DOMAIN (type)
11192 && INTEGRAL_TYPE_P (TYPE_DOMAIN (type)))
11193 {
11194 t = TYPE_MIN_VALUE (TYPE_DOMAIN (type));
11195 if (t && TREE_CODE (t) == VAR_DECL && DECL_ARTIFICIAL (t))
11196 DECL_IGNORED_P (t) = 0;
11197 t = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
11198 if (t && TREE_CODE (t) == VAR_DECL && DECL_ARTIFICIAL (t))
11199 DECL_IGNORED_P (t) = 0;
11200 }
11201 break;
11202
11203 case RECORD_TYPE:
11204 case UNION_TYPE:
11205 case QUAL_UNION_TYPE:
11206 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
11207 if (TREE_CODE (field) == FIELD_DECL)
11208 {
11209 gimplify_one_sizepos (&DECL_FIELD_OFFSET (field), list_p);
11210 gimplify_one_sizepos (&DECL_SIZE (field), list_p);
11211 gimplify_one_sizepos (&DECL_SIZE_UNIT (field), list_p);
11212 gimplify_type_sizes (TREE_TYPE (field), list_p);
11213 }
11214 break;
11215
11216 case POINTER_TYPE:
11217 case REFERENCE_TYPE:
11218 /* We used to recurse on the pointed-to type here, which turned out to
11219 be incorrect because its definition might refer to variables not
11220 yet initialized at this point if a forward declaration is involved.
11221
11222 It was actually useful for anonymous pointed-to types to ensure
11223 that the sizes evaluation dominates every possible later use of the
11224 values. Restricting to such types here would be safe since there
11225 is no possible forward declaration around, but would introduce an
11226 undesirable middle-end semantic to anonymity. We then defer to
11227 front-ends the responsibility of ensuring that the sizes are
11228 evaluated both early and late enough, e.g. by attaching artificial
11229 type declarations to the tree. */
11230 break;
11231
11232 default:
11233 break;
11234 }
11235
11236 gimplify_one_sizepos (&TYPE_SIZE (type), list_p);
11237 gimplify_one_sizepos (&TYPE_SIZE_UNIT (type), list_p);
11238
11239 for (t = TYPE_NEXT_VARIANT (type); t; t = TYPE_NEXT_VARIANT (t))
11240 {
11241 TYPE_SIZE (t) = TYPE_SIZE (type);
11242 TYPE_SIZE_UNIT (t) = TYPE_SIZE_UNIT (type);
11243 TYPE_SIZES_GIMPLIFIED (t) = 1;
11244 }
11245 }
11246
11247 /* A subroutine of gimplify_type_sizes to make sure that *EXPR_P,
11248 a size or position, has had all of its SAVE_EXPRs evaluated.
11249 We add any required statements to *STMT_P. */
11250
11251 void
11252 gimplify_one_sizepos (tree *expr_p, gimple_seq *stmt_p)
11253 {
11254 tree expr = *expr_p;
11255
11256 /* We don't do anything if the value isn't there, is constant, or contains
11257 A PLACEHOLDER_EXPR. We also don't want to do anything if it's already
11258 a VAR_DECL. If it's a VAR_DECL from another function, the gimplifier
11259 will want to replace it with a new variable, but that will cause problems
11260 if this type is from outside the function. It's OK to have that here. */
11261 if (is_gimple_sizepos (expr))
11262 return;
11263
11264 *expr_p = unshare_expr (expr);
11265
11266 gimplify_expr (expr_p, stmt_p, NULL, is_gimple_val, fb_rvalue);
11267 }
11268
11269 /* Gimplify the body of statements of FNDECL and return a GIMPLE_BIND node
11270 containing the sequence of corresponding GIMPLE statements. If DO_PARMS
11271 is true, also gimplify the parameters. */
11272
11273 gbind *
11274 gimplify_body (tree fndecl, bool do_parms)
11275 {
11276 location_t saved_location = input_location;
11277 gimple_seq parm_stmts, seq;
11278 gimple *outer_stmt;
11279 gbind *outer_bind;
11280 struct cgraph_node *cgn;
11281
11282 timevar_push (TV_TREE_GIMPLIFY);
11283
11284 /* Initialize for optimize_insn_for_s{ize,peed}_p possibly called during
11285 gimplification. */
11286 default_rtl_profile ();
11287
11288 gcc_assert (gimplify_ctxp == NULL);
11289 push_gimplify_context ();
11290
11291 if (flag_openacc || flag_openmp)
11292 {
11293 gcc_assert (gimplify_omp_ctxp == NULL);
11294 if (lookup_attribute ("omp declare target", DECL_ATTRIBUTES (fndecl)))
11295 gimplify_omp_ctxp = new_omp_context (ORT_TARGET);
11296 }
11297
11298 /* Unshare most shared trees in the body and in that of any nested functions.
11299 It would seem we don't have to do this for nested functions because
11300 they are supposed to be output and then the outer function gimplified
11301 first, but the g++ front end doesn't always do it that way. */
11302 unshare_body (fndecl);
11303 unvisit_body (fndecl);
11304
11305 cgn = cgraph_node::get (fndecl);
11306 if (cgn && cgn->origin)
11307 nonlocal_vlas = new hash_set<tree>;
11308
11309 /* Make sure input_location isn't set to something weird. */
11310 input_location = DECL_SOURCE_LOCATION (fndecl);
11311
11312 /* Resolve callee-copies. This has to be done before processing
11313 the body so that DECL_VALUE_EXPR gets processed correctly. */
11314 parm_stmts = do_parms ? gimplify_parameters () : NULL;
11315
11316 /* Gimplify the function's body. */
11317 seq = NULL;
11318 gimplify_stmt (&DECL_SAVED_TREE (fndecl), &seq);
11319 outer_stmt = gimple_seq_first_stmt (seq);
11320 if (!outer_stmt)
11321 {
11322 outer_stmt = gimple_build_nop ();
11323 gimplify_seq_add_stmt (&seq, outer_stmt);
11324 }
11325
11326 /* The body must contain exactly one statement, a GIMPLE_BIND. If this is
11327 not the case, wrap everything in a GIMPLE_BIND to make it so. */
11328 if (gimple_code (outer_stmt) == GIMPLE_BIND
11329 && gimple_seq_first (seq) == gimple_seq_last (seq))
11330 outer_bind = as_a <gbind *> (outer_stmt);
11331 else
11332 outer_bind = gimple_build_bind (NULL_TREE, seq, NULL);
11333
11334 DECL_SAVED_TREE (fndecl) = NULL_TREE;
11335
11336 /* If we had callee-copies statements, insert them at the beginning
11337 of the function and clear DECL_VALUE_EXPR_P on the parameters. */
11338 if (!gimple_seq_empty_p (parm_stmts))
11339 {
11340 tree parm;
11341
11342 gimplify_seq_add_seq (&parm_stmts, gimple_bind_body (outer_bind));
11343 gimple_bind_set_body (outer_bind, parm_stmts);
11344
11345 for (parm = DECL_ARGUMENTS (current_function_decl);
11346 parm; parm = DECL_CHAIN (parm))
11347 if (DECL_HAS_VALUE_EXPR_P (parm))
11348 {
11349 DECL_HAS_VALUE_EXPR_P (parm) = 0;
11350 DECL_IGNORED_P (parm) = 0;
11351 }
11352 }
11353
11354 if (nonlocal_vlas)
11355 {
11356 if (nonlocal_vla_vars)
11357 {
11358 /* tree-nested.c may later on call declare_vars (..., true);
11359 which relies on BLOCK_VARS chain to be the tail of the
11360 gimple_bind_vars chain. Ensure we don't violate that
11361 assumption. */
11362 if (gimple_bind_block (outer_bind)
11363 == DECL_INITIAL (current_function_decl))
11364 declare_vars (nonlocal_vla_vars, outer_bind, true);
11365 else
11366 BLOCK_VARS (DECL_INITIAL (current_function_decl))
11367 = chainon (BLOCK_VARS (DECL_INITIAL (current_function_decl)),
11368 nonlocal_vla_vars);
11369 nonlocal_vla_vars = NULL_TREE;
11370 }
11371 delete nonlocal_vlas;
11372 nonlocal_vlas = NULL;
11373 }
11374
11375 if ((flag_openacc || flag_openmp || flag_openmp_simd)
11376 && gimplify_omp_ctxp)
11377 {
11378 delete_omp_context (gimplify_omp_ctxp);
11379 gimplify_omp_ctxp = NULL;
11380 }
11381
11382 pop_gimplify_context (outer_bind);
11383 gcc_assert (gimplify_ctxp == NULL);
11384
11385 if (flag_checking && !seen_error ())
11386 verify_gimple_in_seq (gimple_bind_body (outer_bind));
11387
11388 timevar_pop (TV_TREE_GIMPLIFY);
11389 input_location = saved_location;
11390
11391 return outer_bind;
11392 }
11393
11394 typedef char *char_p; /* For DEF_VEC_P. */
11395
11396 /* Return whether we should exclude FNDECL from instrumentation. */
11397
11398 static bool
11399 flag_instrument_functions_exclude_p (tree fndecl)
11400 {
11401 vec<char_p> *v;
11402
11403 v = (vec<char_p> *) flag_instrument_functions_exclude_functions;
11404 if (v && v->length () > 0)
11405 {
11406 const char *name;
11407 int i;
11408 char *s;
11409
11410 name = lang_hooks.decl_printable_name (fndecl, 0);
11411 FOR_EACH_VEC_ELT (*v, i, s)
11412 if (strstr (name, s) != NULL)
11413 return true;
11414 }
11415
11416 v = (vec<char_p> *) flag_instrument_functions_exclude_files;
11417 if (v && v->length () > 0)
11418 {
11419 const char *name;
11420 int i;
11421 char *s;
11422
11423 name = DECL_SOURCE_FILE (fndecl);
11424 FOR_EACH_VEC_ELT (*v, i, s)
11425 if (strstr (name, s) != NULL)
11426 return true;
11427 }
11428
11429 return false;
11430 }
11431
11432 /* Entry point to the gimplification pass. FNDECL is the FUNCTION_DECL
11433 node for the function we want to gimplify.
11434
11435 Return the sequence of GIMPLE statements corresponding to the body
11436 of FNDECL. */
11437
11438 void
11439 gimplify_function_tree (tree fndecl)
11440 {
11441 tree parm, ret;
11442 gimple_seq seq;
11443 gbind *bind;
11444
11445 gcc_assert (!gimple_body (fndecl));
11446
11447 if (DECL_STRUCT_FUNCTION (fndecl))
11448 push_cfun (DECL_STRUCT_FUNCTION (fndecl));
11449 else
11450 push_struct_function (fndecl);
11451
11452 /* Tentatively set PROP_gimple_lva here, and reset it in gimplify_va_arg_expr
11453 if necessary. */
11454 cfun->curr_properties |= PROP_gimple_lva;
11455
11456 for (parm = DECL_ARGUMENTS (fndecl); parm ; parm = DECL_CHAIN (parm))
11457 {
11458 /* Preliminarily mark non-addressed complex variables as eligible
11459 for promotion to gimple registers. We'll transform their uses
11460 as we find them. */
11461 if ((TREE_CODE (TREE_TYPE (parm)) == COMPLEX_TYPE
11462 || TREE_CODE (TREE_TYPE (parm)) == VECTOR_TYPE)
11463 && !TREE_THIS_VOLATILE (parm)
11464 && !needs_to_live_in_memory (parm))
11465 DECL_GIMPLE_REG_P (parm) = 1;
11466 }
11467
11468 ret = DECL_RESULT (fndecl);
11469 if ((TREE_CODE (TREE_TYPE (ret)) == COMPLEX_TYPE
11470 || TREE_CODE (TREE_TYPE (ret)) == VECTOR_TYPE)
11471 && !needs_to_live_in_memory (ret))
11472 DECL_GIMPLE_REG_P (ret) = 1;
11473
11474 bind = gimplify_body (fndecl, true);
11475
11476 /* The tree body of the function is no longer needed, replace it
11477 with the new GIMPLE body. */
11478 seq = NULL;
11479 gimple_seq_add_stmt (&seq, bind);
11480 gimple_set_body (fndecl, seq);
11481
11482 /* If we're instrumenting function entry/exit, then prepend the call to
11483 the entry hook and wrap the whole function in a TRY_FINALLY_EXPR to
11484 catch the exit hook. */
11485 /* ??? Add some way to ignore exceptions for this TFE. */
11486 if (flag_instrument_function_entry_exit
11487 && !DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (fndecl)
11488 && !flag_instrument_functions_exclude_p (fndecl))
11489 {
11490 tree x;
11491 gbind *new_bind;
11492 gimple *tf;
11493 gimple_seq cleanup = NULL, body = NULL;
11494 tree tmp_var;
11495 gcall *call;
11496
11497 x = builtin_decl_implicit (BUILT_IN_RETURN_ADDRESS);
11498 call = gimple_build_call (x, 1, integer_zero_node);
11499 tmp_var = create_tmp_var (ptr_type_node, "return_addr");
11500 gimple_call_set_lhs (call, tmp_var);
11501 gimplify_seq_add_stmt (&cleanup, call);
11502 x = builtin_decl_implicit (BUILT_IN_PROFILE_FUNC_EXIT);
11503 call = gimple_build_call (x, 2,
11504 build_fold_addr_expr (current_function_decl),
11505 tmp_var);
11506 gimplify_seq_add_stmt (&cleanup, call);
11507 tf = gimple_build_try (seq, cleanup, GIMPLE_TRY_FINALLY);
11508
11509 x = builtin_decl_implicit (BUILT_IN_RETURN_ADDRESS);
11510 call = gimple_build_call (x, 1, integer_zero_node);
11511 tmp_var = create_tmp_var (ptr_type_node, "return_addr");
11512 gimple_call_set_lhs (call, tmp_var);
11513 gimplify_seq_add_stmt (&body, call);
11514 x = builtin_decl_implicit (BUILT_IN_PROFILE_FUNC_ENTER);
11515 call = gimple_build_call (x, 2,
11516 build_fold_addr_expr (current_function_decl),
11517 tmp_var);
11518 gimplify_seq_add_stmt (&body, call);
11519 gimplify_seq_add_stmt (&body, tf);
11520 new_bind = gimple_build_bind (NULL, body, gimple_bind_block (bind));
11521 /* Clear the block for BIND, since it is no longer directly inside
11522 the function, but within a try block. */
11523 gimple_bind_set_block (bind, NULL);
11524
11525 /* Replace the current function body with the body
11526 wrapped in the try/finally TF. */
11527 seq = NULL;
11528 gimple_seq_add_stmt (&seq, new_bind);
11529 gimple_set_body (fndecl, seq);
11530 bind = new_bind;
11531 }
11532
11533 if ((flag_sanitize & SANITIZE_THREAD) != 0
11534 && !lookup_attribute ("no_sanitize_thread", DECL_ATTRIBUTES (fndecl)))
11535 {
11536 gcall *call = gimple_build_call_internal (IFN_TSAN_FUNC_EXIT, 0);
11537 gimple *tf = gimple_build_try (seq, call, GIMPLE_TRY_FINALLY);
11538 gbind *new_bind = gimple_build_bind (NULL, tf, gimple_bind_block (bind));
11539 /* Clear the block for BIND, since it is no longer directly inside
11540 the function, but within a try block. */
11541 gimple_bind_set_block (bind, NULL);
11542 /* Replace the current function body with the body
11543 wrapped in the try/finally TF. */
11544 seq = NULL;
11545 gimple_seq_add_stmt (&seq, new_bind);
11546 gimple_set_body (fndecl, seq);
11547 }
11548
11549 DECL_SAVED_TREE (fndecl) = NULL_TREE;
11550 cfun->curr_properties |= PROP_gimple_any;
11551
11552 pop_cfun ();
11553
11554 dump_function (TDI_generic, fndecl);
11555 }
11556
11557 /* Return a dummy expression of type TYPE in order to keep going after an
11558 error. */
11559
11560 static tree
11561 dummy_object (tree type)
11562 {
11563 tree t = build_int_cst (build_pointer_type (type), 0);
11564 return build2 (MEM_REF, type, t, t);
11565 }
11566
11567 /* Gimplify __builtin_va_arg, aka VA_ARG_EXPR, which is not really a
11568 builtin function, but a very special sort of operator. */
11569
11570 enum gimplify_status
11571 gimplify_va_arg_expr (tree *expr_p, gimple_seq *pre_p,
11572 gimple_seq *post_p ATTRIBUTE_UNUSED)
11573 {
11574 tree promoted_type, have_va_type;
11575 tree valist = TREE_OPERAND (*expr_p, 0);
11576 tree type = TREE_TYPE (*expr_p);
11577 tree t, tag, aptag;
11578 location_t loc = EXPR_LOCATION (*expr_p);
11579
11580 /* Verify that valist is of the proper type. */
11581 have_va_type = TREE_TYPE (valist);
11582 if (have_va_type == error_mark_node)
11583 return GS_ERROR;
11584 have_va_type = targetm.canonical_va_list_type (have_va_type);
11585
11586 if (have_va_type == NULL_TREE)
11587 {
11588 error_at (loc, "first argument to %<va_arg%> not of type %<va_list%>");
11589 return GS_ERROR;
11590 }
11591
11592 /* Generate a diagnostic for requesting data of a type that cannot
11593 be passed through `...' due to type promotion at the call site. */
11594 if ((promoted_type = lang_hooks.types.type_promotes_to (type))
11595 != type)
11596 {
11597 static bool gave_help;
11598 bool warned;
11599 /* Use the expansion point to handle cases such as passing bool (defined
11600 in a system header) through `...'. */
11601 source_location xloc
11602 = expansion_point_location_if_in_system_header (loc);
11603
11604 /* Unfortunately, this is merely undefined, rather than a constraint
11605 violation, so we cannot make this an error. If this call is never
11606 executed, the program is still strictly conforming. */
11607 warned = warning_at (xloc, 0,
11608 "%qT is promoted to %qT when passed through %<...%>",
11609 type, promoted_type);
11610 if (!gave_help && warned)
11611 {
11612 gave_help = true;
11613 inform (xloc, "(so you should pass %qT not %qT to %<va_arg%>)",
11614 promoted_type, type);
11615 }
11616
11617 /* We can, however, treat "undefined" any way we please.
11618 Call abort to encourage the user to fix the program. */
11619 if (warned)
11620 inform (xloc, "if this code is reached, the program will abort");
11621 /* Before the abort, allow the evaluation of the va_list
11622 expression to exit or longjmp. */
11623 gimplify_and_add (valist, pre_p);
11624 t = build_call_expr_loc (loc,
11625 builtin_decl_implicit (BUILT_IN_TRAP), 0);
11626 gimplify_and_add (t, pre_p);
11627
11628 /* This is dead code, but go ahead and finish so that the
11629 mode of the result comes out right. */
11630 *expr_p = dummy_object (type);
11631 return GS_ALL_DONE;
11632 }
11633
11634 tag = build_int_cst (build_pointer_type (type), 0);
11635 aptag = build_int_cst (TREE_TYPE (valist), 0);
11636
11637 *expr_p = build_call_expr_internal_loc (loc, IFN_VA_ARG, type, 3,
11638 valist, tag, aptag);
11639
11640 /* Clear the tentatively set PROP_gimple_lva, to indicate that IFN_VA_ARG
11641 needs to be expanded. */
11642 cfun->curr_properties &= ~PROP_gimple_lva;
11643
11644 return GS_OK;
11645 }
11646
11647 /* Build a new GIMPLE_ASSIGN tuple and append it to the end of *SEQ_P.
11648
11649 DST/SRC are the destination and source respectively. You can pass
11650 ungimplified trees in DST or SRC, in which case they will be
11651 converted to a gimple operand if necessary.
11652
11653 This function returns the newly created GIMPLE_ASSIGN tuple. */
11654
11655 gimple *
11656 gimplify_assign (tree dst, tree src, gimple_seq *seq_p)
11657 {
11658 tree t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
11659 gimplify_and_add (t, seq_p);
11660 ggc_free (t);
11661 return gimple_seq_last_stmt (*seq_p);
11662 }
11663
11664 inline hashval_t
11665 gimplify_hasher::hash (const elt_t *p)
11666 {
11667 tree t = p->val;
11668 return iterative_hash_expr (t, 0);
11669 }
11670
11671 inline bool
11672 gimplify_hasher::equal (const elt_t *p1, const elt_t *p2)
11673 {
11674 tree t1 = p1->val;
11675 tree t2 = p2->val;
11676 enum tree_code code = TREE_CODE (t1);
11677
11678 if (TREE_CODE (t2) != code
11679 || TREE_TYPE (t1) != TREE_TYPE (t2))
11680 return false;
11681
11682 if (!operand_equal_p (t1, t2, 0))
11683 return false;
11684
11685 /* Only allow them to compare equal if they also hash equal; otherwise
11686 results are nondeterminate, and we fail bootstrap comparison. */
11687 gcc_checking_assert (hash (p1) == hash (p2));
11688
11689 return true;
11690 }