]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/gimplify.c
* tree-flow.h (struct omp_region): Move to omp-low.c.
[thirdparty/gcc.git] / gcc / gimplify.c
1 /* Tree lowering pass. This pass converts the GENERIC functions-as-trees
2 tree representation into the GIMPLE form.
3 Copyright (C) 2002-2013 Free Software Foundation, Inc.
4 Major work done by Sebastian Pop <s.pop@laposte.net>,
5 Diego Novillo <dnovillo@redhat.com> and Jason Merrill <jason@redhat.com>.
6
7 This file is part of GCC.
8
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
12 version.
13
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 for more details.
18
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
22
23 #include "config.h"
24 #include "system.h"
25 #include "coretypes.h"
26 #include "tm.h"
27 #include "tree.h"
28 #include "gimple.h"
29 #include "tree-iterator.h"
30 #include "tree-inline.h"
31 #include "tree-pretty-print.h"
32 #include "langhooks.h"
33 #include "tree-ssa.h"
34 #include "cgraph.h"
35 #include "timevar.h"
36 #include "hashtab.h"
37 #include "flags.h"
38 #include "function.h"
39 #include "ggc.h"
40 #include "diagnostic-core.h"
41 #include "target.h"
42 #include "pointer-set.h"
43 #include "splay-tree.h"
44 #include "vec.h"
45 #include "omp-low.h"
46
47 #include "langhooks-def.h" /* FIXME: for lhd_set_decl_assembler_name */
48 #include "tree-pass.h" /* FIXME: only for PROP_gimple_any */
49 #include "tree-mudflap.h"
50 #include "expr.h"
51 #include "tm_p.h"
52
53 enum gimplify_omp_var_data
54 {
55 GOVD_SEEN = 1,
56 GOVD_EXPLICIT = 2,
57 GOVD_SHARED = 4,
58 GOVD_PRIVATE = 8,
59 GOVD_FIRSTPRIVATE = 16,
60 GOVD_LASTPRIVATE = 32,
61 GOVD_REDUCTION = 64,
62 GOVD_LOCAL = 128,
63 GOVD_MAP = 256,
64 GOVD_DEBUG_PRIVATE = 512,
65 GOVD_PRIVATE_OUTER_REF = 1024,
66 GOVD_LINEAR = 2048,
67 GOVD_ALIGNED = 4096,
68 GOVD_MAP_TO_ONLY = 8192,
69 GOVD_DATA_SHARE_CLASS = (GOVD_SHARED | GOVD_PRIVATE | GOVD_FIRSTPRIVATE
70 | GOVD_LASTPRIVATE | GOVD_REDUCTION | GOVD_LINEAR
71 | GOVD_LOCAL)
72 };
73
74
75 enum omp_region_type
76 {
77 ORT_WORKSHARE = 0,
78 ORT_SIMD = 1,
79 ORT_PARALLEL = 2,
80 ORT_COMBINED_PARALLEL = 3,
81 ORT_TASK = 4,
82 ORT_UNTIED_TASK = 5,
83 ORT_TEAMS = 8,
84 ORT_TARGET_DATA = 16,
85 ORT_TARGET = 32
86 };
87
88 struct gimplify_omp_ctx
89 {
90 struct gimplify_omp_ctx *outer_context;
91 splay_tree variables;
92 struct pointer_set_t *privatized_types;
93 location_t location;
94 enum omp_clause_default_kind default_kind;
95 enum omp_region_type region_type;
96 bool combined_loop;
97 };
98
99 static struct gimplify_ctx *gimplify_ctxp;
100 static struct gimplify_omp_ctx *gimplify_omp_ctxp;
101
102
103 /* Forward declaration. */
104 static enum gimplify_status gimplify_compound_expr (tree *, gimple_seq *, bool);
105
106 /* Mark X addressable. Unlike the langhook we expect X to be in gimple
107 form and we don't do any syntax checking. */
108
109 void
110 mark_addressable (tree x)
111 {
112 while (handled_component_p (x))
113 x = TREE_OPERAND (x, 0);
114 if (TREE_CODE (x) == MEM_REF
115 && TREE_CODE (TREE_OPERAND (x, 0)) == ADDR_EXPR)
116 x = TREE_OPERAND (TREE_OPERAND (x, 0), 0);
117 if (TREE_CODE (x) != VAR_DECL
118 && TREE_CODE (x) != PARM_DECL
119 && TREE_CODE (x) != RESULT_DECL)
120 return;
121 TREE_ADDRESSABLE (x) = 1;
122
123 /* Also mark the artificial SSA_NAME that points to the partition of X. */
124 if (TREE_CODE (x) == VAR_DECL
125 && !DECL_EXTERNAL (x)
126 && !TREE_STATIC (x)
127 && cfun->gimple_df != NULL
128 && cfun->gimple_df->decls_to_pointers != NULL)
129 {
130 void *namep
131 = pointer_map_contains (cfun->gimple_df->decls_to_pointers, x);
132 if (namep)
133 TREE_ADDRESSABLE (*(tree *)namep) = 1;
134 }
135 }
136
137 /* Link gimple statement GS to the end of the sequence *SEQ_P. If
138 *SEQ_P is NULL, a new sequence is allocated. This function is
139 similar to gimple_seq_add_stmt, but does not scan the operands.
140 During gimplification, we need to manipulate statement sequences
141 before the def/use vectors have been constructed. */
142
143 void
144 gimple_seq_add_stmt_without_update (gimple_seq *seq_p, gimple gs)
145 {
146 gimple_stmt_iterator si;
147
148 if (gs == NULL)
149 return;
150
151 si = gsi_last (*seq_p);
152 gsi_insert_after_without_update (&si, gs, GSI_NEW_STMT);
153 }
154
155 /* Shorter alias name for the above function for use in gimplify.c
156 only. */
157
158 static inline void
159 gimplify_seq_add_stmt (gimple_seq *seq_p, gimple gs)
160 {
161 gimple_seq_add_stmt_without_update (seq_p, gs);
162 }
163
164 /* Append sequence SRC to the end of sequence *DST_P. If *DST_P is
165 NULL, a new sequence is allocated. This function is
166 similar to gimple_seq_add_seq, but does not scan the operands.
167 During gimplification, we need to manipulate statement sequences
168 before the def/use vectors have been constructed. */
169
170 static void
171 gimplify_seq_add_seq (gimple_seq *dst_p, gimple_seq src)
172 {
173 gimple_stmt_iterator si;
174
175 if (src == NULL)
176 return;
177
178 si = gsi_last (*dst_p);
179 gsi_insert_seq_after_without_update (&si, src, GSI_NEW_STMT);
180 }
181
182 /* Set up a context for the gimplifier. */
183
184 void
185 push_gimplify_context (struct gimplify_ctx *c)
186 {
187 memset (c, '\0', sizeof (*c));
188 c->prev_context = gimplify_ctxp;
189 gimplify_ctxp = c;
190 }
191
192 /* Tear down a context for the gimplifier. If BODY is non-null, then
193 put the temporaries into the outer BIND_EXPR. Otherwise, put them
194 in the local_decls.
195
196 BODY is not a sequence, but the first tuple in a sequence. */
197
198 void
199 pop_gimplify_context (gimple body)
200 {
201 struct gimplify_ctx *c = gimplify_ctxp;
202
203 gcc_assert (c
204 && (!c->bind_expr_stack.exists ()
205 || c->bind_expr_stack.is_empty ()));
206 c->bind_expr_stack.release ();
207 gimplify_ctxp = c->prev_context;
208
209 if (body)
210 declare_vars (c->temps, body, false);
211 else
212 record_vars (c->temps);
213
214 if (c->temp_htab.is_created ())
215 c->temp_htab.dispose ();
216 }
217
218 /* Push a GIMPLE_BIND tuple onto the stack of bindings. */
219
220 static void
221 gimple_push_bind_expr (gimple gimple_bind)
222 {
223 gimplify_ctxp->bind_expr_stack.reserve (8);
224 gimplify_ctxp->bind_expr_stack.safe_push (gimple_bind);
225 }
226
227 /* Pop the first element off the stack of bindings. */
228
229 static void
230 gimple_pop_bind_expr (void)
231 {
232 gimplify_ctxp->bind_expr_stack.pop ();
233 }
234
235 /* Return the first element of the stack of bindings. */
236
237 gimple
238 gimple_current_bind_expr (void)
239 {
240 return gimplify_ctxp->bind_expr_stack.last ();
241 }
242
243 /* Return the stack of bindings created during gimplification. */
244
245 vec<gimple>
246 gimple_bind_expr_stack (void)
247 {
248 return gimplify_ctxp->bind_expr_stack;
249 }
250
251 /* Return true iff there is a COND_EXPR between us and the innermost
252 CLEANUP_POINT_EXPR. This info is used by gimple_push_cleanup. */
253
254 static bool
255 gimple_conditional_context (void)
256 {
257 return gimplify_ctxp->conditions > 0;
258 }
259
260 /* Note that we've entered a COND_EXPR. */
261
262 static void
263 gimple_push_condition (void)
264 {
265 #ifdef ENABLE_GIMPLE_CHECKING
266 if (gimplify_ctxp->conditions == 0)
267 gcc_assert (gimple_seq_empty_p (gimplify_ctxp->conditional_cleanups));
268 #endif
269 ++(gimplify_ctxp->conditions);
270 }
271
272 /* Note that we've left a COND_EXPR. If we're back at unconditional scope
273 now, add any conditional cleanups we've seen to the prequeue. */
274
275 static void
276 gimple_pop_condition (gimple_seq *pre_p)
277 {
278 int conds = --(gimplify_ctxp->conditions);
279
280 gcc_assert (conds >= 0);
281 if (conds == 0)
282 {
283 gimplify_seq_add_seq (pre_p, gimplify_ctxp->conditional_cleanups);
284 gimplify_ctxp->conditional_cleanups = NULL;
285 }
286 }
287
288 /* A stable comparison routine for use with splay trees and DECLs. */
289
290 static int
291 splay_tree_compare_decl_uid (splay_tree_key xa, splay_tree_key xb)
292 {
293 tree a = (tree) xa;
294 tree b = (tree) xb;
295
296 return DECL_UID (a) - DECL_UID (b);
297 }
298
299 /* Create a new omp construct that deals with variable remapping. */
300
301 static struct gimplify_omp_ctx *
302 new_omp_context (enum omp_region_type region_type)
303 {
304 struct gimplify_omp_ctx *c;
305
306 c = XCNEW (struct gimplify_omp_ctx);
307 c->outer_context = gimplify_omp_ctxp;
308 c->variables = splay_tree_new (splay_tree_compare_decl_uid, 0, 0);
309 c->privatized_types = pointer_set_create ();
310 c->location = input_location;
311 c->region_type = region_type;
312 if ((region_type & ORT_TASK) == 0)
313 c->default_kind = OMP_CLAUSE_DEFAULT_SHARED;
314 else
315 c->default_kind = OMP_CLAUSE_DEFAULT_UNSPECIFIED;
316
317 return c;
318 }
319
320 /* Destroy an omp construct that deals with variable remapping. */
321
322 static void
323 delete_omp_context (struct gimplify_omp_ctx *c)
324 {
325 splay_tree_delete (c->variables);
326 pointer_set_destroy (c->privatized_types);
327 XDELETE (c);
328 }
329
330 static void omp_add_variable (struct gimplify_omp_ctx *, tree, unsigned int);
331 static bool omp_notice_variable (struct gimplify_omp_ctx *, tree, bool);
332
333 /* Both gimplify the statement T and append it to *SEQ_P. This function
334 behaves exactly as gimplify_stmt, but you don't have to pass T as a
335 reference. */
336
337 void
338 gimplify_and_add (tree t, gimple_seq *seq_p)
339 {
340 gimplify_stmt (&t, seq_p);
341 }
342
343 /* Gimplify statement T into sequence *SEQ_P, and return the first
344 tuple in the sequence of generated tuples for this statement.
345 Return NULL if gimplifying T produced no tuples. */
346
347 static gimple
348 gimplify_and_return_first (tree t, gimple_seq *seq_p)
349 {
350 gimple_stmt_iterator last = gsi_last (*seq_p);
351
352 gimplify_and_add (t, seq_p);
353
354 if (!gsi_end_p (last))
355 {
356 gsi_next (&last);
357 return gsi_stmt (last);
358 }
359 else
360 return gimple_seq_first_stmt (*seq_p);
361 }
362
363 /* Strip off a legitimate source ending from the input string NAME of
364 length LEN. Rather than having to know the names used by all of
365 our front ends, we strip off an ending of a period followed by
366 up to five characters. (Java uses ".class".) */
367
368 static inline void
369 remove_suffix (char *name, int len)
370 {
371 int i;
372
373 for (i = 2; i < 8 && len > i; i++)
374 {
375 if (name[len - i] == '.')
376 {
377 name[len - i] = '\0';
378 break;
379 }
380 }
381 }
382
383 /* Create a new temporary name with PREFIX. Return an identifier. */
384
385 static GTY(()) unsigned int tmp_var_id_num;
386
387 tree
388 create_tmp_var_name (const char *prefix)
389 {
390 char *tmp_name;
391
392 if (prefix)
393 {
394 char *preftmp = ASTRDUP (prefix);
395
396 remove_suffix (preftmp, strlen (preftmp));
397 clean_symbol_name (preftmp);
398
399 prefix = preftmp;
400 }
401
402 ASM_FORMAT_PRIVATE_NAME (tmp_name, prefix ? prefix : "T", tmp_var_id_num++);
403 return get_identifier (tmp_name);
404 }
405
406 /* Create a new temporary variable declaration of type TYPE.
407 Do NOT push it into the current binding. */
408
409 tree
410 create_tmp_var_raw (tree type, const char *prefix)
411 {
412 tree tmp_var;
413
414 tmp_var = build_decl (input_location,
415 VAR_DECL, prefix ? create_tmp_var_name (prefix) : NULL,
416 type);
417
418 /* The variable was declared by the compiler. */
419 DECL_ARTIFICIAL (tmp_var) = 1;
420 /* And we don't want debug info for it. */
421 DECL_IGNORED_P (tmp_var) = 1;
422
423 /* Make the variable writable. */
424 TREE_READONLY (tmp_var) = 0;
425
426 DECL_EXTERNAL (tmp_var) = 0;
427 TREE_STATIC (tmp_var) = 0;
428 TREE_USED (tmp_var) = 1;
429
430 return tmp_var;
431 }
432
433 /* Create a new temporary variable declaration of type TYPE. DO push the
434 variable into the current binding. Further, assume that this is called
435 only from gimplification or optimization, at which point the creation of
436 certain types are bugs. */
437
438 tree
439 create_tmp_var (tree type, const char *prefix)
440 {
441 tree tmp_var;
442
443 /* We don't allow types that are addressable (meaning we can't make copies),
444 or incomplete. We also used to reject every variable size objects here,
445 but now support those for which a constant upper bound can be obtained.
446 The processing for variable sizes is performed in gimple_add_tmp_var,
447 point at which it really matters and possibly reached via paths not going
448 through this function, e.g. after direct calls to create_tmp_var_raw. */
449 gcc_assert (!TREE_ADDRESSABLE (type) && COMPLETE_TYPE_P (type));
450
451 tmp_var = create_tmp_var_raw (type, prefix);
452 gimple_add_tmp_var (tmp_var);
453 return tmp_var;
454 }
455
456 /* Create a new temporary variable declaration of type TYPE by calling
457 create_tmp_var and if TYPE is a vector or a complex number, mark the new
458 temporary as gimple register. */
459
460 tree
461 create_tmp_reg (tree type, const char *prefix)
462 {
463 tree tmp;
464
465 tmp = create_tmp_var (type, prefix);
466 if (TREE_CODE (type) == COMPLEX_TYPE
467 || TREE_CODE (type) == VECTOR_TYPE)
468 DECL_GIMPLE_REG_P (tmp) = 1;
469
470 return tmp;
471 }
472
473 /* Returns true iff T is a valid RHS for an assignment to a renamed
474 user -- or front-end generated artificial -- variable. */
475
476 static bool
477 is_gimple_reg_rhs (tree t)
478 {
479 return get_gimple_rhs_class (TREE_CODE (t)) != GIMPLE_INVALID_RHS;
480 }
481
482 /* Returns true iff T is a valid RHS for an assignment to an un-renamed
483 LHS, or for a call argument. */
484
485 static bool
486 is_gimple_mem_rhs (tree t)
487 {
488 /* If we're dealing with a renamable type, either source or dest must be
489 a renamed variable. */
490 if (is_gimple_reg_type (TREE_TYPE (t)))
491 return is_gimple_val (t);
492 else
493 return is_gimple_val (t) || is_gimple_lvalue (t);
494 }
495
496 /* Return true if T is a CALL_EXPR or an expression that can be
497 assigned to a temporary. Note that this predicate should only be
498 used during gimplification. See the rationale for this in
499 gimplify_modify_expr. */
500
501 static bool
502 is_gimple_reg_rhs_or_call (tree t)
503 {
504 return (get_gimple_rhs_class (TREE_CODE (t)) != GIMPLE_INVALID_RHS
505 || TREE_CODE (t) == CALL_EXPR);
506 }
507
508 /* Return true if T is a valid memory RHS or a CALL_EXPR. Note that
509 this predicate should only be used during gimplification. See the
510 rationale for this in gimplify_modify_expr. */
511
512 static bool
513 is_gimple_mem_rhs_or_call (tree t)
514 {
515 /* If we're dealing with a renamable type, either source or dest must be
516 a renamed variable. */
517 if (is_gimple_reg_type (TREE_TYPE (t)))
518 return is_gimple_val (t);
519 else
520 return (is_gimple_val (t) || is_gimple_lvalue (t)
521 || TREE_CODE (t) == CALL_EXPR);
522 }
523
524 /* Create a temporary with a name derived from VAL. Subroutine of
525 lookup_tmp_var; nobody else should call this function. */
526
527 static inline tree
528 create_tmp_from_val (tree val, bool is_formal)
529 {
530 /* Drop all qualifiers and address-space information from the value type. */
531 tree type = TYPE_MAIN_VARIANT (TREE_TYPE (val));
532 tree var = create_tmp_var (type, get_name (val));
533 if (is_formal
534 && (TREE_CODE (TREE_TYPE (var)) == COMPLEX_TYPE
535 || TREE_CODE (TREE_TYPE (var)) == VECTOR_TYPE))
536 DECL_GIMPLE_REG_P (var) = 1;
537 return var;
538 }
539
540 /* Create a temporary to hold the value of VAL. If IS_FORMAL, try to reuse
541 an existing expression temporary. */
542
543 static tree
544 lookup_tmp_var (tree val, bool is_formal)
545 {
546 tree ret;
547
548 /* If not optimizing, never really reuse a temporary. local-alloc
549 won't allocate any variable that is used in more than one basic
550 block, which means it will go into memory, causing much extra
551 work in reload and final and poorer code generation, outweighing
552 the extra memory allocation here. */
553 if (!optimize || !is_formal || TREE_SIDE_EFFECTS (val))
554 ret = create_tmp_from_val (val, is_formal);
555 else
556 {
557 elt_t elt, *elt_p;
558 elt_t **slot;
559
560 elt.val = val;
561 if (!gimplify_ctxp->temp_htab.is_created ())
562 gimplify_ctxp->temp_htab.create (1000);
563 slot = gimplify_ctxp->temp_htab.find_slot (&elt, INSERT);
564 if (*slot == NULL)
565 {
566 elt_p = XNEW (elt_t);
567 elt_p->val = val;
568 elt_p->temp = ret = create_tmp_from_val (val, is_formal);
569 *slot = elt_p;
570 }
571 else
572 {
573 elt_p = *slot;
574 ret = elt_p->temp;
575 }
576 }
577
578 return ret;
579 }
580
581 /* Helper for get_formal_tmp_var and get_initialized_tmp_var. */
582
583 static tree
584 internal_get_tmp_var (tree val, gimple_seq *pre_p, gimple_seq *post_p,
585 bool is_formal)
586 {
587 tree t, mod;
588
589 /* Notice that we explicitly allow VAL to be a CALL_EXPR so that we
590 can create an INIT_EXPR and convert it into a GIMPLE_CALL below. */
591 gimplify_expr (&val, pre_p, post_p, is_gimple_reg_rhs_or_call,
592 fb_rvalue);
593
594 if (gimplify_ctxp->into_ssa
595 && is_gimple_reg_type (TREE_TYPE (val)))
596 t = make_ssa_name (TYPE_MAIN_VARIANT (TREE_TYPE (val)), NULL);
597 else
598 t = lookup_tmp_var (val, is_formal);
599
600 mod = build2 (INIT_EXPR, TREE_TYPE (t), t, unshare_expr (val));
601
602 SET_EXPR_LOCATION (mod, EXPR_LOC_OR_HERE (val));
603
604 /* gimplify_modify_expr might want to reduce this further. */
605 gimplify_and_add (mod, pre_p);
606 ggc_free (mod);
607
608 return t;
609 }
610
611 /* Return a formal temporary variable initialized with VAL. PRE_P is as
612 in gimplify_expr. Only use this function if:
613
614 1) The value of the unfactored expression represented by VAL will not
615 change between the initialization and use of the temporary, and
616 2) The temporary will not be otherwise modified.
617
618 For instance, #1 means that this is inappropriate for SAVE_EXPR temps,
619 and #2 means it is inappropriate for && temps.
620
621 For other cases, use get_initialized_tmp_var instead. */
622
623 tree
624 get_formal_tmp_var (tree val, gimple_seq *pre_p)
625 {
626 return internal_get_tmp_var (val, pre_p, NULL, true);
627 }
628
629 /* Return a temporary variable initialized with VAL. PRE_P and POST_P
630 are as in gimplify_expr. */
631
632 tree
633 get_initialized_tmp_var (tree val, gimple_seq *pre_p, gimple_seq *post_p)
634 {
635 return internal_get_tmp_var (val, pre_p, post_p, false);
636 }
637
638 /* Declare all the variables in VARS in SCOPE. If DEBUG_INFO is true,
639 generate debug info for them; otherwise don't. */
640
641 void
642 declare_vars (tree vars, gimple scope, bool debug_info)
643 {
644 tree last = vars;
645 if (last)
646 {
647 tree temps, block;
648
649 gcc_assert (gimple_code (scope) == GIMPLE_BIND);
650
651 temps = nreverse (last);
652
653 block = gimple_bind_block (scope);
654 gcc_assert (!block || TREE_CODE (block) == BLOCK);
655 if (!block || !debug_info)
656 {
657 DECL_CHAIN (last) = gimple_bind_vars (scope);
658 gimple_bind_set_vars (scope, temps);
659 }
660 else
661 {
662 /* We need to attach the nodes both to the BIND_EXPR and to its
663 associated BLOCK for debugging purposes. The key point here
664 is that the BLOCK_VARS of the BIND_EXPR_BLOCK of a BIND_EXPR
665 is a subchain of the BIND_EXPR_VARS of the BIND_EXPR. */
666 if (BLOCK_VARS (block))
667 BLOCK_VARS (block) = chainon (BLOCK_VARS (block), temps);
668 else
669 {
670 gimple_bind_set_vars (scope,
671 chainon (gimple_bind_vars (scope), temps));
672 BLOCK_VARS (block) = temps;
673 }
674 }
675 }
676 }
677
678 /* For VAR a VAR_DECL of variable size, try to find a constant upper bound
679 for the size and adjust DECL_SIZE/DECL_SIZE_UNIT accordingly. Abort if
680 no such upper bound can be obtained. */
681
682 static void
683 force_constant_size (tree var)
684 {
685 /* The only attempt we make is by querying the maximum size of objects
686 of the variable's type. */
687
688 HOST_WIDE_INT max_size;
689
690 gcc_assert (TREE_CODE (var) == VAR_DECL);
691
692 max_size = max_int_size_in_bytes (TREE_TYPE (var));
693
694 gcc_assert (max_size >= 0);
695
696 DECL_SIZE_UNIT (var)
697 = build_int_cst (TREE_TYPE (DECL_SIZE_UNIT (var)), max_size);
698 DECL_SIZE (var)
699 = build_int_cst (TREE_TYPE (DECL_SIZE (var)), max_size * BITS_PER_UNIT);
700 }
701
702 /* Push the temporary variable TMP into the current binding. */
703
704 void
705 gimple_add_tmp_var (tree tmp)
706 {
707 gcc_assert (!DECL_CHAIN (tmp) && !DECL_SEEN_IN_BIND_EXPR_P (tmp));
708
709 /* Later processing assumes that the object size is constant, which might
710 not be true at this point. Force the use of a constant upper bound in
711 this case. */
712 if (!host_integerp (DECL_SIZE_UNIT (tmp), 1))
713 force_constant_size (tmp);
714
715 DECL_CONTEXT (tmp) = current_function_decl;
716 DECL_SEEN_IN_BIND_EXPR_P (tmp) = 1;
717
718 if (gimplify_ctxp)
719 {
720 DECL_CHAIN (tmp) = gimplify_ctxp->temps;
721 gimplify_ctxp->temps = tmp;
722
723 /* Mark temporaries local within the nearest enclosing parallel. */
724 if (gimplify_omp_ctxp)
725 {
726 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
727 while (ctx
728 && (ctx->region_type == ORT_WORKSHARE
729 || ctx->region_type == ORT_SIMD))
730 ctx = ctx->outer_context;
731 if (ctx)
732 omp_add_variable (ctx, tmp, GOVD_LOCAL | GOVD_SEEN);
733 }
734 }
735 else if (cfun)
736 record_vars (tmp);
737 else
738 {
739 gimple_seq body_seq;
740
741 /* This case is for nested functions. We need to expose the locals
742 they create. */
743 body_seq = gimple_body (current_function_decl);
744 declare_vars (tmp, gimple_seq_first_stmt (body_seq), false);
745 }
746 }
747
748 /* Determine whether to assign a location to the statement GS. */
749
750 static bool
751 should_carry_location_p (gimple gs)
752 {
753 /* Don't emit a line note for a label. We particularly don't want to
754 emit one for the break label, since it doesn't actually correspond
755 to the beginning of the loop/switch. */
756 if (gimple_code (gs) == GIMPLE_LABEL)
757 return false;
758
759 return true;
760 }
761
762 /* Return true if a location should not be emitted for this statement
763 by annotate_one_with_location. */
764
765 static inline bool
766 gimple_do_not_emit_location_p (gimple g)
767 {
768 return gimple_plf (g, GF_PLF_1);
769 }
770
771 /* Mark statement G so a location will not be emitted by
772 annotate_one_with_location. */
773
774 static inline void
775 gimple_set_do_not_emit_location (gimple g)
776 {
777 /* The PLF flags are initialized to 0 when a new tuple is created,
778 so no need to initialize it anywhere. */
779 gimple_set_plf (g, GF_PLF_1, true);
780 }
781
782 /* Set the location for gimple statement GS to LOCATION. */
783
784 static void
785 annotate_one_with_location (gimple gs, location_t location)
786 {
787 if (!gimple_has_location (gs)
788 && !gimple_do_not_emit_location_p (gs)
789 && should_carry_location_p (gs))
790 gimple_set_location (gs, location);
791 }
792
793 /* Set LOCATION for all the statements after iterator GSI in sequence
794 SEQ. If GSI is pointing to the end of the sequence, start with the
795 first statement in SEQ. */
796
797 static void
798 annotate_all_with_location_after (gimple_seq seq, gimple_stmt_iterator gsi,
799 location_t location)
800 {
801 if (gsi_end_p (gsi))
802 gsi = gsi_start (seq);
803 else
804 gsi_next (&gsi);
805
806 for (; !gsi_end_p (gsi); gsi_next (&gsi))
807 annotate_one_with_location (gsi_stmt (gsi), location);
808 }
809
810 /* Set the location for all the statements in a sequence STMT_P to LOCATION. */
811
812 void
813 annotate_all_with_location (gimple_seq stmt_p, location_t location)
814 {
815 gimple_stmt_iterator i;
816
817 if (gimple_seq_empty_p (stmt_p))
818 return;
819
820 for (i = gsi_start (stmt_p); !gsi_end_p (i); gsi_next (&i))
821 {
822 gimple gs = gsi_stmt (i);
823 annotate_one_with_location (gs, location);
824 }
825 }
826 \f
827 /* This page contains routines to unshare tree nodes, i.e. to duplicate tree
828 nodes that are referenced more than once in GENERIC functions. This is
829 necessary because gimplification (translation into GIMPLE) is performed
830 by modifying tree nodes in-place, so gimplication of a shared node in a
831 first context could generate an invalid GIMPLE form in a second context.
832
833 This is achieved with a simple mark/copy/unmark algorithm that walks the
834 GENERIC representation top-down, marks nodes with TREE_VISITED the first
835 time it encounters them, duplicates them if they already have TREE_VISITED
836 set, and finally removes the TREE_VISITED marks it has set.
837
838 The algorithm works only at the function level, i.e. it generates a GENERIC
839 representation of a function with no nodes shared within the function when
840 passed a GENERIC function (except for nodes that are allowed to be shared).
841
842 At the global level, it is also necessary to unshare tree nodes that are
843 referenced in more than one function, for the same aforementioned reason.
844 This requires some cooperation from the front-end. There are 2 strategies:
845
846 1. Manual unsharing. The front-end needs to call unshare_expr on every
847 expression that might end up being shared across functions.
848
849 2. Deep unsharing. This is an extension of regular unsharing. Instead
850 of calling unshare_expr on expressions that might be shared across
851 functions, the front-end pre-marks them with TREE_VISITED. This will
852 ensure that they are unshared on the first reference within functions
853 when the regular unsharing algorithm runs. The counterpart is that
854 this algorithm must look deeper than for manual unsharing, which is
855 specified by LANG_HOOKS_DEEP_UNSHARING.
856
857 If there are only few specific cases of node sharing across functions, it is
858 probably easier for a front-end to unshare the expressions manually. On the
859 contrary, if the expressions generated at the global level are as widespread
860 as expressions generated within functions, deep unsharing is very likely the
861 way to go. */
862
863 /* Similar to copy_tree_r but do not copy SAVE_EXPR or TARGET_EXPR nodes.
864 These nodes model computations that must be done once. If we were to
865 unshare something like SAVE_EXPR(i++), the gimplification process would
866 create wrong code. However, if DATA is non-null, it must hold a pointer
867 set that is used to unshare the subtrees of these nodes. */
868
869 static tree
870 mostly_copy_tree_r (tree *tp, int *walk_subtrees, void *data)
871 {
872 tree t = *tp;
873 enum tree_code code = TREE_CODE (t);
874
875 /* Do not copy SAVE_EXPR, TARGET_EXPR or BIND_EXPR nodes themselves, but
876 copy their subtrees if we can make sure to do it only once. */
877 if (code == SAVE_EXPR || code == TARGET_EXPR || code == BIND_EXPR)
878 {
879 if (data && !pointer_set_insert ((struct pointer_set_t *)data, t))
880 ;
881 else
882 *walk_subtrees = 0;
883 }
884
885 /* Stop at types, decls, constants like copy_tree_r. */
886 else if (TREE_CODE_CLASS (code) == tcc_type
887 || TREE_CODE_CLASS (code) == tcc_declaration
888 || TREE_CODE_CLASS (code) == tcc_constant
889 /* We can't do anything sensible with a BLOCK used as an
890 expression, but we also can't just die when we see it
891 because of non-expression uses. So we avert our eyes
892 and cross our fingers. Silly Java. */
893 || code == BLOCK)
894 *walk_subtrees = 0;
895
896 /* Cope with the statement expression extension. */
897 else if (code == STATEMENT_LIST)
898 ;
899
900 /* Leave the bulk of the work to copy_tree_r itself. */
901 else
902 copy_tree_r (tp, walk_subtrees, NULL);
903
904 return NULL_TREE;
905 }
906
907 /* Callback for walk_tree to unshare most of the shared trees rooted at *TP.
908 If *TP has been visited already, then *TP is deeply copied by calling
909 mostly_copy_tree_r. DATA is passed to mostly_copy_tree_r unmodified. */
910
911 static tree
912 copy_if_shared_r (tree *tp, int *walk_subtrees, void *data)
913 {
914 tree t = *tp;
915 enum tree_code code = TREE_CODE (t);
916
917 /* Skip types, decls, and constants. But we do want to look at their
918 types and the bounds of types. Mark them as visited so we properly
919 unmark their subtrees on the unmark pass. If we've already seen them,
920 don't look down further. */
921 if (TREE_CODE_CLASS (code) == tcc_type
922 || TREE_CODE_CLASS (code) == tcc_declaration
923 || TREE_CODE_CLASS (code) == tcc_constant)
924 {
925 if (TREE_VISITED (t))
926 *walk_subtrees = 0;
927 else
928 TREE_VISITED (t) = 1;
929 }
930
931 /* If this node has been visited already, unshare it and don't look
932 any deeper. */
933 else if (TREE_VISITED (t))
934 {
935 walk_tree (tp, mostly_copy_tree_r, data, NULL);
936 *walk_subtrees = 0;
937 }
938
939 /* Otherwise, mark the node as visited and keep looking. */
940 else
941 TREE_VISITED (t) = 1;
942
943 return NULL_TREE;
944 }
945
946 /* Unshare most of the shared trees rooted at *TP. DATA is passed to the
947 copy_if_shared_r callback unmodified. */
948
949 static inline void
950 copy_if_shared (tree *tp, void *data)
951 {
952 walk_tree (tp, copy_if_shared_r, data, NULL);
953 }
954
955 /* Unshare all the trees in the body of FNDECL, as well as in the bodies of
956 any nested functions. */
957
958 static void
959 unshare_body (tree fndecl)
960 {
961 struct cgraph_node *cgn = cgraph_get_node (fndecl);
962 /* If the language requires deep unsharing, we need a pointer set to make
963 sure we don't repeatedly unshare subtrees of unshareable nodes. */
964 struct pointer_set_t *visited
965 = lang_hooks.deep_unsharing ? pointer_set_create () : NULL;
966
967 copy_if_shared (&DECL_SAVED_TREE (fndecl), visited);
968 copy_if_shared (&DECL_SIZE (DECL_RESULT (fndecl)), visited);
969 copy_if_shared (&DECL_SIZE_UNIT (DECL_RESULT (fndecl)), visited);
970
971 if (visited)
972 pointer_set_destroy (visited);
973
974 if (cgn)
975 for (cgn = cgn->nested; cgn; cgn = cgn->next_nested)
976 unshare_body (cgn->symbol.decl);
977 }
978
979 /* Callback for walk_tree to unmark the visited trees rooted at *TP.
980 Subtrees are walked until the first unvisited node is encountered. */
981
982 static tree
983 unmark_visited_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
984 {
985 tree t = *tp;
986
987 /* If this node has been visited, unmark it and keep looking. */
988 if (TREE_VISITED (t))
989 TREE_VISITED (t) = 0;
990
991 /* Otherwise, don't look any deeper. */
992 else
993 *walk_subtrees = 0;
994
995 return NULL_TREE;
996 }
997
998 /* Unmark the visited trees rooted at *TP. */
999
1000 static inline void
1001 unmark_visited (tree *tp)
1002 {
1003 walk_tree (tp, unmark_visited_r, NULL, NULL);
1004 }
1005
1006 /* Likewise, but mark all trees as not visited. */
1007
1008 static void
1009 unvisit_body (tree fndecl)
1010 {
1011 struct cgraph_node *cgn = cgraph_get_node (fndecl);
1012
1013 unmark_visited (&DECL_SAVED_TREE (fndecl));
1014 unmark_visited (&DECL_SIZE (DECL_RESULT (fndecl)));
1015 unmark_visited (&DECL_SIZE_UNIT (DECL_RESULT (fndecl)));
1016
1017 if (cgn)
1018 for (cgn = cgn->nested; cgn; cgn = cgn->next_nested)
1019 unvisit_body (cgn->symbol.decl);
1020 }
1021
1022 /* Unconditionally make an unshared copy of EXPR. This is used when using
1023 stored expressions which span multiple functions, such as BINFO_VTABLE,
1024 as the normal unsharing process can't tell that they're shared. */
1025
1026 tree
1027 unshare_expr (tree expr)
1028 {
1029 walk_tree (&expr, mostly_copy_tree_r, NULL, NULL);
1030 return expr;
1031 }
1032
1033 /* Worker for unshare_expr_without_location. */
1034
1035 static tree
1036 prune_expr_location (tree *tp, int *walk_subtrees, void *)
1037 {
1038 if (EXPR_P (*tp))
1039 SET_EXPR_LOCATION (*tp, UNKNOWN_LOCATION);
1040 else
1041 *walk_subtrees = 0;
1042 return NULL_TREE;
1043 }
1044
1045 /* Similar to unshare_expr but also prune all expression locations
1046 from EXPR. */
1047
1048 tree
1049 unshare_expr_without_location (tree expr)
1050 {
1051 walk_tree (&expr, mostly_copy_tree_r, NULL, NULL);
1052 if (EXPR_P (expr))
1053 walk_tree (&expr, prune_expr_location, NULL, NULL);
1054 return expr;
1055 }
1056 \f
1057 /* WRAPPER is a code such as BIND_EXPR or CLEANUP_POINT_EXPR which can both
1058 contain statements and have a value. Assign its value to a temporary
1059 and give it void_type_node. Return the temporary, or NULL_TREE if
1060 WRAPPER was already void. */
1061
1062 tree
1063 voidify_wrapper_expr (tree wrapper, tree temp)
1064 {
1065 tree type = TREE_TYPE (wrapper);
1066 if (type && !VOID_TYPE_P (type))
1067 {
1068 tree *p;
1069
1070 /* Set p to point to the body of the wrapper. Loop until we find
1071 something that isn't a wrapper. */
1072 for (p = &wrapper; p && *p; )
1073 {
1074 switch (TREE_CODE (*p))
1075 {
1076 case BIND_EXPR:
1077 TREE_SIDE_EFFECTS (*p) = 1;
1078 TREE_TYPE (*p) = void_type_node;
1079 /* For a BIND_EXPR, the body is operand 1. */
1080 p = &BIND_EXPR_BODY (*p);
1081 break;
1082
1083 case CLEANUP_POINT_EXPR:
1084 case TRY_FINALLY_EXPR:
1085 case TRY_CATCH_EXPR:
1086 TREE_SIDE_EFFECTS (*p) = 1;
1087 TREE_TYPE (*p) = void_type_node;
1088 p = &TREE_OPERAND (*p, 0);
1089 break;
1090
1091 case STATEMENT_LIST:
1092 {
1093 tree_stmt_iterator i = tsi_last (*p);
1094 TREE_SIDE_EFFECTS (*p) = 1;
1095 TREE_TYPE (*p) = void_type_node;
1096 p = tsi_end_p (i) ? NULL : tsi_stmt_ptr (i);
1097 }
1098 break;
1099
1100 case COMPOUND_EXPR:
1101 /* Advance to the last statement. Set all container types to
1102 void. */
1103 for (; TREE_CODE (*p) == COMPOUND_EXPR; p = &TREE_OPERAND (*p, 1))
1104 {
1105 TREE_SIDE_EFFECTS (*p) = 1;
1106 TREE_TYPE (*p) = void_type_node;
1107 }
1108 break;
1109
1110 case TRANSACTION_EXPR:
1111 TREE_SIDE_EFFECTS (*p) = 1;
1112 TREE_TYPE (*p) = void_type_node;
1113 p = &TRANSACTION_EXPR_BODY (*p);
1114 break;
1115
1116 default:
1117 /* Assume that any tree upon which voidify_wrapper_expr is
1118 directly called is a wrapper, and that its body is op0. */
1119 if (p == &wrapper)
1120 {
1121 TREE_SIDE_EFFECTS (*p) = 1;
1122 TREE_TYPE (*p) = void_type_node;
1123 p = &TREE_OPERAND (*p, 0);
1124 break;
1125 }
1126 goto out;
1127 }
1128 }
1129
1130 out:
1131 if (p == NULL || IS_EMPTY_STMT (*p))
1132 temp = NULL_TREE;
1133 else if (temp)
1134 {
1135 /* The wrapper is on the RHS of an assignment that we're pushing
1136 down. */
1137 gcc_assert (TREE_CODE (temp) == INIT_EXPR
1138 || TREE_CODE (temp) == MODIFY_EXPR);
1139 TREE_OPERAND (temp, 1) = *p;
1140 *p = temp;
1141 }
1142 else
1143 {
1144 temp = create_tmp_var (type, "retval");
1145 *p = build2 (INIT_EXPR, type, temp, *p);
1146 }
1147
1148 return temp;
1149 }
1150
1151 return NULL_TREE;
1152 }
1153
1154 /* Prepare calls to builtins to SAVE and RESTORE the stack as well as
1155 a temporary through which they communicate. */
1156
1157 static void
1158 build_stack_save_restore (gimple *save, gimple *restore)
1159 {
1160 tree tmp_var;
1161
1162 *save = gimple_build_call (builtin_decl_implicit (BUILT_IN_STACK_SAVE), 0);
1163 tmp_var = create_tmp_var (ptr_type_node, "saved_stack");
1164 gimple_call_set_lhs (*save, tmp_var);
1165
1166 *restore
1167 = gimple_build_call (builtin_decl_implicit (BUILT_IN_STACK_RESTORE),
1168 1, tmp_var);
1169 }
1170
1171 /* Gimplify a BIND_EXPR. Just voidify and recurse. */
1172
1173 static enum gimplify_status
1174 gimplify_bind_expr (tree *expr_p, gimple_seq *pre_p)
1175 {
1176 tree bind_expr = *expr_p;
1177 bool old_save_stack = gimplify_ctxp->save_stack;
1178 tree t;
1179 gimple gimple_bind;
1180 gimple_seq body, cleanup;
1181 gimple stack_save;
1182
1183 tree temp = voidify_wrapper_expr (bind_expr, NULL);
1184
1185 /* Mark variables seen in this bind expr. */
1186 for (t = BIND_EXPR_VARS (bind_expr); t ; t = DECL_CHAIN (t))
1187 {
1188 if (TREE_CODE (t) == VAR_DECL)
1189 {
1190 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
1191
1192 /* Mark variable as local. */
1193 if (ctx && !DECL_EXTERNAL (t)
1194 && (! DECL_SEEN_IN_BIND_EXPR_P (t)
1195 || splay_tree_lookup (ctx->variables,
1196 (splay_tree_key) t) == NULL))
1197 omp_add_variable (gimplify_omp_ctxp, t, GOVD_LOCAL | GOVD_SEEN);
1198
1199 DECL_SEEN_IN_BIND_EXPR_P (t) = 1;
1200
1201 if (DECL_HARD_REGISTER (t) && !is_global_var (t) && cfun)
1202 cfun->has_local_explicit_reg_vars = true;
1203 }
1204
1205 /* Preliminarily mark non-addressed complex variables as eligible
1206 for promotion to gimple registers. We'll transform their uses
1207 as we find them. */
1208 if ((TREE_CODE (TREE_TYPE (t)) == COMPLEX_TYPE
1209 || TREE_CODE (TREE_TYPE (t)) == VECTOR_TYPE)
1210 && !TREE_THIS_VOLATILE (t)
1211 && (TREE_CODE (t) == VAR_DECL && !DECL_HARD_REGISTER (t))
1212 && !needs_to_live_in_memory (t))
1213 DECL_GIMPLE_REG_P (t) = 1;
1214 }
1215
1216 gimple_bind = gimple_build_bind (BIND_EXPR_VARS (bind_expr), NULL,
1217 BIND_EXPR_BLOCK (bind_expr));
1218 gimple_push_bind_expr (gimple_bind);
1219
1220 gimplify_ctxp->save_stack = false;
1221
1222 /* Gimplify the body into the GIMPLE_BIND tuple's body. */
1223 body = NULL;
1224 gimplify_stmt (&BIND_EXPR_BODY (bind_expr), &body);
1225 gimple_bind_set_body (gimple_bind, body);
1226
1227 cleanup = NULL;
1228 stack_save = NULL;
1229 if (gimplify_ctxp->save_stack)
1230 {
1231 gimple stack_restore;
1232
1233 /* Save stack on entry and restore it on exit. Add a try_finally
1234 block to achieve this. Note that mudflap depends on the
1235 format of the emitted code: see mx_register_decls(). */
1236 build_stack_save_restore (&stack_save, &stack_restore);
1237
1238 gimplify_seq_add_stmt (&cleanup, stack_restore);
1239 }
1240
1241 /* Add clobbers for all variables that go out of scope. */
1242 for (t = BIND_EXPR_VARS (bind_expr); t ; t = DECL_CHAIN (t))
1243 {
1244 if (TREE_CODE (t) == VAR_DECL
1245 && !is_global_var (t)
1246 && DECL_CONTEXT (t) == current_function_decl
1247 && !DECL_HARD_REGISTER (t)
1248 && !TREE_THIS_VOLATILE (t)
1249 && !DECL_HAS_VALUE_EXPR_P (t)
1250 /* Only care for variables that have to be in memory. Others
1251 will be rewritten into SSA names, hence moved to the top-level. */
1252 && !is_gimple_reg (t)
1253 && flag_stack_reuse != SR_NONE)
1254 {
1255 tree clobber = build_constructor (TREE_TYPE (t),
1256 NULL);
1257 TREE_THIS_VOLATILE (clobber) = 1;
1258 gimplify_seq_add_stmt (&cleanup, gimple_build_assign (t, clobber));
1259 }
1260 }
1261
1262 if (cleanup)
1263 {
1264 gimple gs;
1265 gimple_seq new_body;
1266
1267 new_body = NULL;
1268 gs = gimple_build_try (gimple_bind_body (gimple_bind), cleanup,
1269 GIMPLE_TRY_FINALLY);
1270
1271 if (stack_save)
1272 gimplify_seq_add_stmt (&new_body, stack_save);
1273 gimplify_seq_add_stmt (&new_body, gs);
1274 gimple_bind_set_body (gimple_bind, new_body);
1275 }
1276
1277 gimplify_ctxp->save_stack = old_save_stack;
1278 gimple_pop_bind_expr ();
1279
1280 gimplify_seq_add_stmt (pre_p, gimple_bind);
1281
1282 if (temp)
1283 {
1284 *expr_p = temp;
1285 return GS_OK;
1286 }
1287
1288 *expr_p = NULL_TREE;
1289 return GS_ALL_DONE;
1290 }
1291
1292 /* Gimplify a RETURN_EXPR. If the expression to be returned is not a
1293 GIMPLE value, it is assigned to a new temporary and the statement is
1294 re-written to return the temporary.
1295
1296 PRE_P points to the sequence where side effects that must happen before
1297 STMT should be stored. */
1298
1299 static enum gimplify_status
1300 gimplify_return_expr (tree stmt, gimple_seq *pre_p)
1301 {
1302 gimple ret;
1303 tree ret_expr = TREE_OPERAND (stmt, 0);
1304 tree result_decl, result;
1305
1306 if (ret_expr == error_mark_node)
1307 return GS_ERROR;
1308
1309 if (!ret_expr
1310 || TREE_CODE (ret_expr) == RESULT_DECL
1311 || ret_expr == error_mark_node)
1312 {
1313 gimple ret = gimple_build_return (ret_expr);
1314 gimple_set_no_warning (ret, TREE_NO_WARNING (stmt));
1315 gimplify_seq_add_stmt (pre_p, ret);
1316 return GS_ALL_DONE;
1317 }
1318
1319 if (VOID_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl))))
1320 result_decl = NULL_TREE;
1321 else
1322 {
1323 result_decl = TREE_OPERAND (ret_expr, 0);
1324
1325 /* See through a return by reference. */
1326 if (TREE_CODE (result_decl) == INDIRECT_REF)
1327 result_decl = TREE_OPERAND (result_decl, 0);
1328
1329 gcc_assert ((TREE_CODE (ret_expr) == MODIFY_EXPR
1330 || TREE_CODE (ret_expr) == INIT_EXPR)
1331 && TREE_CODE (result_decl) == RESULT_DECL);
1332 }
1333
1334 /* If aggregate_value_p is true, then we can return the bare RESULT_DECL.
1335 Recall that aggregate_value_p is FALSE for any aggregate type that is
1336 returned in registers. If we're returning values in registers, then
1337 we don't want to extend the lifetime of the RESULT_DECL, particularly
1338 across another call. In addition, for those aggregates for which
1339 hard_function_value generates a PARALLEL, we'll die during normal
1340 expansion of structure assignments; there's special code in expand_return
1341 to handle this case that does not exist in expand_expr. */
1342 if (!result_decl)
1343 result = NULL_TREE;
1344 else if (aggregate_value_p (result_decl, TREE_TYPE (current_function_decl)))
1345 {
1346 if (TREE_CODE (DECL_SIZE (result_decl)) != INTEGER_CST)
1347 {
1348 if (!TYPE_SIZES_GIMPLIFIED (TREE_TYPE (result_decl)))
1349 gimplify_type_sizes (TREE_TYPE (result_decl), pre_p);
1350 /* Note that we don't use gimplify_vla_decl because the RESULT_DECL
1351 should be effectively allocated by the caller, i.e. all calls to
1352 this function must be subject to the Return Slot Optimization. */
1353 gimplify_one_sizepos (&DECL_SIZE (result_decl), pre_p);
1354 gimplify_one_sizepos (&DECL_SIZE_UNIT (result_decl), pre_p);
1355 }
1356 result = result_decl;
1357 }
1358 else if (gimplify_ctxp->return_temp)
1359 result = gimplify_ctxp->return_temp;
1360 else
1361 {
1362 result = create_tmp_reg (TREE_TYPE (result_decl), NULL);
1363
1364 /* ??? With complex control flow (usually involving abnormal edges),
1365 we can wind up warning about an uninitialized value for this. Due
1366 to how this variable is constructed and initialized, this is never
1367 true. Give up and never warn. */
1368 TREE_NO_WARNING (result) = 1;
1369
1370 gimplify_ctxp->return_temp = result;
1371 }
1372
1373 /* Smash the lhs of the MODIFY_EXPR to the temporary we plan to use.
1374 Then gimplify the whole thing. */
1375 if (result != result_decl)
1376 TREE_OPERAND (ret_expr, 0) = result;
1377
1378 gimplify_and_add (TREE_OPERAND (stmt, 0), pre_p);
1379
1380 ret = gimple_build_return (result);
1381 gimple_set_no_warning (ret, TREE_NO_WARNING (stmt));
1382 gimplify_seq_add_stmt (pre_p, ret);
1383
1384 return GS_ALL_DONE;
1385 }
1386
1387 /* Gimplify a variable-length array DECL. */
1388
1389 static void
1390 gimplify_vla_decl (tree decl, gimple_seq *seq_p)
1391 {
1392 /* This is a variable-sized decl. Simplify its size and mark it
1393 for deferred expansion. Note that mudflap depends on the format
1394 of the emitted code: see mx_register_decls(). */
1395 tree t, addr, ptr_type;
1396
1397 gimplify_one_sizepos (&DECL_SIZE (decl), seq_p);
1398 gimplify_one_sizepos (&DECL_SIZE_UNIT (decl), seq_p);
1399
1400 /* Don't mess with a DECL_VALUE_EXPR set by the front-end. */
1401 if (DECL_HAS_VALUE_EXPR_P (decl))
1402 return;
1403
1404 /* All occurrences of this decl in final gimplified code will be
1405 replaced by indirection. Setting DECL_VALUE_EXPR does two
1406 things: First, it lets the rest of the gimplifier know what
1407 replacement to use. Second, it lets the debug info know
1408 where to find the value. */
1409 ptr_type = build_pointer_type (TREE_TYPE (decl));
1410 addr = create_tmp_var (ptr_type, get_name (decl));
1411 DECL_IGNORED_P (addr) = 0;
1412 t = build_fold_indirect_ref (addr);
1413 TREE_THIS_NOTRAP (t) = 1;
1414 SET_DECL_VALUE_EXPR (decl, t);
1415 DECL_HAS_VALUE_EXPR_P (decl) = 1;
1416
1417 t = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
1418 t = build_call_expr (t, 2, DECL_SIZE_UNIT (decl),
1419 size_int (DECL_ALIGN (decl)));
1420 /* The call has been built for a variable-sized object. */
1421 CALL_ALLOCA_FOR_VAR_P (t) = 1;
1422 t = fold_convert (ptr_type, t);
1423 t = build2 (MODIFY_EXPR, TREE_TYPE (addr), addr, t);
1424
1425 gimplify_and_add (t, seq_p);
1426
1427 /* Indicate that we need to restore the stack level when the
1428 enclosing BIND_EXPR is exited. */
1429 gimplify_ctxp->save_stack = true;
1430 }
1431
1432 /* Gimplify a DECL_EXPR node *STMT_P by making any necessary allocation
1433 and initialization explicit. */
1434
1435 static enum gimplify_status
1436 gimplify_decl_expr (tree *stmt_p, gimple_seq *seq_p)
1437 {
1438 tree stmt = *stmt_p;
1439 tree decl = DECL_EXPR_DECL (stmt);
1440
1441 *stmt_p = NULL_TREE;
1442
1443 if (TREE_TYPE (decl) == error_mark_node)
1444 return GS_ERROR;
1445
1446 if ((TREE_CODE (decl) == TYPE_DECL
1447 || TREE_CODE (decl) == VAR_DECL)
1448 && !TYPE_SIZES_GIMPLIFIED (TREE_TYPE (decl)))
1449 gimplify_type_sizes (TREE_TYPE (decl), seq_p);
1450
1451 /* ??? DECL_ORIGINAL_TYPE is streamed for LTO so it needs to be gimplified
1452 in case its size expressions contain problematic nodes like CALL_EXPR. */
1453 if (TREE_CODE (decl) == TYPE_DECL
1454 && DECL_ORIGINAL_TYPE (decl)
1455 && !TYPE_SIZES_GIMPLIFIED (DECL_ORIGINAL_TYPE (decl)))
1456 gimplify_type_sizes (DECL_ORIGINAL_TYPE (decl), seq_p);
1457
1458 if (TREE_CODE (decl) == VAR_DECL && !DECL_EXTERNAL (decl))
1459 {
1460 tree init = DECL_INITIAL (decl);
1461
1462 if (TREE_CODE (DECL_SIZE_UNIT (decl)) != INTEGER_CST
1463 || (!TREE_STATIC (decl)
1464 && flag_stack_check == GENERIC_STACK_CHECK
1465 && compare_tree_int (DECL_SIZE_UNIT (decl),
1466 STACK_CHECK_MAX_VAR_SIZE) > 0))
1467 gimplify_vla_decl (decl, seq_p);
1468
1469 /* Some front ends do not explicitly declare all anonymous
1470 artificial variables. We compensate here by declaring the
1471 variables, though it would be better if the front ends would
1472 explicitly declare them. */
1473 if (!DECL_SEEN_IN_BIND_EXPR_P (decl)
1474 && DECL_ARTIFICIAL (decl) && DECL_NAME (decl) == NULL_TREE)
1475 gimple_add_tmp_var (decl);
1476
1477 if (init && init != error_mark_node)
1478 {
1479 if (!TREE_STATIC (decl))
1480 {
1481 DECL_INITIAL (decl) = NULL_TREE;
1482 init = build2 (INIT_EXPR, void_type_node, decl, init);
1483 gimplify_and_add (init, seq_p);
1484 ggc_free (init);
1485 }
1486 else
1487 /* We must still examine initializers for static variables
1488 as they may contain a label address. */
1489 walk_tree (&init, force_labels_r, NULL, NULL);
1490 }
1491 }
1492
1493 return GS_ALL_DONE;
1494 }
1495
1496 /* Gimplify a LOOP_EXPR. Normally this just involves gimplifying the body
1497 and replacing the LOOP_EXPR with goto, but if the loop contains an
1498 EXIT_EXPR, we need to append a label for it to jump to. */
1499
1500 static enum gimplify_status
1501 gimplify_loop_expr (tree *expr_p, gimple_seq *pre_p)
1502 {
1503 tree saved_label = gimplify_ctxp->exit_label;
1504 tree start_label = create_artificial_label (UNKNOWN_LOCATION);
1505
1506 gimplify_seq_add_stmt (pre_p, gimple_build_label (start_label));
1507
1508 gimplify_ctxp->exit_label = NULL_TREE;
1509
1510 gimplify_and_add (LOOP_EXPR_BODY (*expr_p), pre_p);
1511
1512 gimplify_seq_add_stmt (pre_p, gimple_build_goto (start_label));
1513
1514 if (gimplify_ctxp->exit_label)
1515 gimplify_seq_add_stmt (pre_p,
1516 gimple_build_label (gimplify_ctxp->exit_label));
1517
1518 gimplify_ctxp->exit_label = saved_label;
1519
1520 *expr_p = NULL;
1521 return GS_ALL_DONE;
1522 }
1523
1524 /* Gimplify a statement list onto a sequence. These may be created either
1525 by an enlightened front-end, or by shortcut_cond_expr. */
1526
1527 static enum gimplify_status
1528 gimplify_statement_list (tree *expr_p, gimple_seq *pre_p)
1529 {
1530 tree temp = voidify_wrapper_expr (*expr_p, NULL);
1531
1532 tree_stmt_iterator i = tsi_start (*expr_p);
1533
1534 while (!tsi_end_p (i))
1535 {
1536 gimplify_stmt (tsi_stmt_ptr (i), pre_p);
1537 tsi_delink (&i);
1538 }
1539
1540 if (temp)
1541 {
1542 *expr_p = temp;
1543 return GS_OK;
1544 }
1545
1546 return GS_ALL_DONE;
1547 }
1548 \f
1549 /* Compare two case labels. Because the front end should already have
1550 made sure that case ranges do not overlap, it is enough to only compare
1551 the CASE_LOW values of each case label. */
1552
1553 static int
1554 compare_case_labels (const void *p1, const void *p2)
1555 {
1556 const_tree const case1 = *(const_tree const*)p1;
1557 const_tree const case2 = *(const_tree const*)p2;
1558
1559 /* The 'default' case label always goes first. */
1560 if (!CASE_LOW (case1))
1561 return -1;
1562 else if (!CASE_LOW (case2))
1563 return 1;
1564 else
1565 return tree_int_cst_compare (CASE_LOW (case1), CASE_LOW (case2));
1566 }
1567
1568 /* Sort the case labels in LABEL_VEC in place in ascending order. */
1569
1570 void
1571 sort_case_labels (vec<tree> label_vec)
1572 {
1573 label_vec.qsort (compare_case_labels);
1574 }
1575 \f
1576 /* Prepare a vector of case labels to be used in a GIMPLE_SWITCH statement.
1577
1578 LABELS is a vector that contains all case labels to look at.
1579
1580 INDEX_TYPE is the type of the switch index expression. Case labels
1581 in LABELS are discarded if their values are not in the value range
1582 covered by INDEX_TYPE. The remaining case label values are folded
1583 to INDEX_TYPE.
1584
1585 If a default case exists in LABELS, it is removed from LABELS and
1586 returned in DEFAULT_CASEP. If no default case exists, but the
1587 case labels already cover the whole range of INDEX_TYPE, a default
1588 case is returned pointing to one of the existing case labels.
1589 Otherwise DEFAULT_CASEP is set to NULL_TREE.
1590
1591 DEFAULT_CASEP may be NULL, in which case the above comment doesn't
1592 apply and no action is taken regardless of whether a default case is
1593 found or not. */
1594
1595 void
1596 preprocess_case_label_vec_for_gimple (vec<tree> labels,
1597 tree index_type,
1598 tree *default_casep)
1599 {
1600 tree min_value, max_value;
1601 tree default_case = NULL_TREE;
1602 size_t i, len;
1603
1604 i = 0;
1605 min_value = TYPE_MIN_VALUE (index_type);
1606 max_value = TYPE_MAX_VALUE (index_type);
1607 while (i < labels.length ())
1608 {
1609 tree elt = labels[i];
1610 tree low = CASE_LOW (elt);
1611 tree high = CASE_HIGH (elt);
1612 bool remove_element = FALSE;
1613
1614 if (low)
1615 {
1616 gcc_checking_assert (TREE_CODE (low) == INTEGER_CST);
1617 gcc_checking_assert (!high || TREE_CODE (high) == INTEGER_CST);
1618
1619 /* This is a non-default case label, i.e. it has a value.
1620
1621 See if the case label is reachable within the range of
1622 the index type. Remove out-of-range case values. Turn
1623 case ranges into a canonical form (high > low strictly)
1624 and convert the case label values to the index type.
1625
1626 NB: The type of gimple_switch_index() may be the promoted
1627 type, but the case labels retain the original type. */
1628
1629 if (high)
1630 {
1631 /* This is a case range. Discard empty ranges.
1632 If the bounds or the range are equal, turn this
1633 into a simple (one-value) case. */
1634 int cmp = tree_int_cst_compare (high, low);
1635 if (cmp < 0)
1636 remove_element = TRUE;
1637 else if (cmp == 0)
1638 high = NULL_TREE;
1639 }
1640
1641 if (! high)
1642 {
1643 /* If the simple case value is unreachable, ignore it. */
1644 if ((TREE_CODE (min_value) == INTEGER_CST
1645 && tree_int_cst_compare (low, min_value) < 0)
1646 || (TREE_CODE (max_value) == INTEGER_CST
1647 && tree_int_cst_compare (low, max_value) > 0))
1648 remove_element = TRUE;
1649 else
1650 low = fold_convert (index_type, low);
1651 }
1652 else
1653 {
1654 /* If the entire case range is unreachable, ignore it. */
1655 if ((TREE_CODE (min_value) == INTEGER_CST
1656 && tree_int_cst_compare (high, min_value) < 0)
1657 || (TREE_CODE (max_value) == INTEGER_CST
1658 && tree_int_cst_compare (low, max_value) > 0))
1659 remove_element = TRUE;
1660 else
1661 {
1662 /* If the lower bound is less than the index type's
1663 minimum value, truncate the range bounds. */
1664 if (TREE_CODE (min_value) == INTEGER_CST
1665 && tree_int_cst_compare (low, min_value) < 0)
1666 low = min_value;
1667 low = fold_convert (index_type, low);
1668
1669 /* If the upper bound is greater than the index type's
1670 maximum value, truncate the range bounds. */
1671 if (TREE_CODE (max_value) == INTEGER_CST
1672 && tree_int_cst_compare (high, max_value) > 0)
1673 high = max_value;
1674 high = fold_convert (index_type, high);
1675
1676 /* We may have folded a case range to a one-value case. */
1677 if (tree_int_cst_equal (low, high))
1678 high = NULL_TREE;
1679 }
1680 }
1681
1682 CASE_LOW (elt) = low;
1683 CASE_HIGH (elt) = high;
1684 }
1685 else
1686 {
1687 gcc_assert (!default_case);
1688 default_case = elt;
1689 /* The default case must be passed separately to the
1690 gimple_build_switch routine. But if DEFAULT_CASEP
1691 is NULL, we do not remove the default case (it would
1692 be completely lost). */
1693 if (default_casep)
1694 remove_element = TRUE;
1695 }
1696
1697 if (remove_element)
1698 labels.ordered_remove (i);
1699 else
1700 i++;
1701 }
1702 len = i;
1703
1704 if (!labels.is_empty ())
1705 sort_case_labels (labels);
1706
1707 if (default_casep && !default_case)
1708 {
1709 /* If the switch has no default label, add one, so that we jump
1710 around the switch body. If the labels already cover the whole
1711 range of the switch index_type, add the default label pointing
1712 to one of the existing labels. */
1713 if (len
1714 && TYPE_MIN_VALUE (index_type)
1715 && TYPE_MAX_VALUE (index_type)
1716 && tree_int_cst_equal (CASE_LOW (labels[0]),
1717 TYPE_MIN_VALUE (index_type)))
1718 {
1719 tree low, high = CASE_HIGH (labels[len - 1]);
1720 if (!high)
1721 high = CASE_LOW (labels[len - 1]);
1722 if (tree_int_cst_equal (high, TYPE_MAX_VALUE (index_type)))
1723 {
1724 for (i = 1; i < len; i++)
1725 {
1726 high = CASE_LOW (labels[i]);
1727 low = CASE_HIGH (labels[i - 1]);
1728 if (!low)
1729 low = CASE_LOW (labels[i - 1]);
1730 if ((TREE_INT_CST_LOW (low) + 1
1731 != TREE_INT_CST_LOW (high))
1732 || (TREE_INT_CST_HIGH (low)
1733 + (TREE_INT_CST_LOW (high) == 0)
1734 != TREE_INT_CST_HIGH (high)))
1735 break;
1736 }
1737 if (i == len)
1738 {
1739 tree label = CASE_LABEL (labels[0]);
1740 default_case = build_case_label (NULL_TREE, NULL_TREE,
1741 label);
1742 }
1743 }
1744 }
1745 }
1746
1747 if (default_casep)
1748 *default_casep = default_case;
1749 }
1750 \f
1751 /* Gimplify a SWITCH_EXPR, and collect the vector of labels it can
1752 branch to. */
1753
1754 static enum gimplify_status
1755 gimplify_switch_expr (tree *expr_p, gimple_seq *pre_p)
1756 {
1757 tree switch_expr = *expr_p;
1758 gimple_seq switch_body_seq = NULL;
1759 enum gimplify_status ret;
1760 tree index_type = TREE_TYPE (switch_expr);
1761 if (index_type == NULL_TREE)
1762 index_type = TREE_TYPE (SWITCH_COND (switch_expr));
1763
1764 ret = gimplify_expr (&SWITCH_COND (switch_expr), pre_p, NULL, is_gimple_val,
1765 fb_rvalue);
1766 if (ret == GS_ERROR || ret == GS_UNHANDLED)
1767 return ret;
1768
1769 if (SWITCH_BODY (switch_expr))
1770 {
1771 vec<tree> labels;
1772 vec<tree> saved_labels;
1773 tree default_case = NULL_TREE;
1774 gimple gimple_switch;
1775
1776 /* If someone can be bothered to fill in the labels, they can
1777 be bothered to null out the body too. */
1778 gcc_assert (!SWITCH_LABELS (switch_expr));
1779
1780 /* Save old labels, get new ones from body, then restore the old
1781 labels. Save all the things from the switch body to append after. */
1782 saved_labels = gimplify_ctxp->case_labels;
1783 gimplify_ctxp->case_labels.create (8);
1784
1785 gimplify_stmt (&SWITCH_BODY (switch_expr), &switch_body_seq);
1786 labels = gimplify_ctxp->case_labels;
1787 gimplify_ctxp->case_labels = saved_labels;
1788
1789 preprocess_case_label_vec_for_gimple (labels, index_type,
1790 &default_case);
1791
1792 if (!default_case)
1793 {
1794 gimple new_default;
1795
1796 default_case
1797 = build_case_label (NULL_TREE, NULL_TREE,
1798 create_artificial_label (UNKNOWN_LOCATION));
1799 new_default = gimple_build_label (CASE_LABEL (default_case));
1800 gimplify_seq_add_stmt (&switch_body_seq, new_default);
1801 }
1802
1803 gimple_switch = gimple_build_switch (SWITCH_COND (switch_expr),
1804 default_case, labels);
1805 gimplify_seq_add_stmt (pre_p, gimple_switch);
1806 gimplify_seq_add_seq (pre_p, switch_body_seq);
1807 labels.release ();
1808 }
1809 else
1810 gcc_assert (SWITCH_LABELS (switch_expr));
1811
1812 return GS_ALL_DONE;
1813 }
1814
1815 /* Gimplify the CASE_LABEL_EXPR pointed to by EXPR_P. */
1816
1817 static enum gimplify_status
1818 gimplify_case_label_expr (tree *expr_p, gimple_seq *pre_p)
1819 {
1820 struct gimplify_ctx *ctxp;
1821 gimple gimple_label;
1822
1823 /* Invalid OpenMP programs can play Duff's Device type games with
1824 #pragma omp parallel. At least in the C front end, we don't
1825 detect such invalid branches until after gimplification. */
1826 for (ctxp = gimplify_ctxp; ; ctxp = ctxp->prev_context)
1827 if (ctxp->case_labels.exists ())
1828 break;
1829
1830 gimple_label = gimple_build_label (CASE_LABEL (*expr_p));
1831 ctxp->case_labels.safe_push (*expr_p);
1832 gimplify_seq_add_stmt (pre_p, gimple_label);
1833
1834 return GS_ALL_DONE;
1835 }
1836
1837 /* Build a GOTO to the LABEL_DECL pointed to by LABEL_P, building it first
1838 if necessary. */
1839
1840 tree
1841 build_and_jump (tree *label_p)
1842 {
1843 if (label_p == NULL)
1844 /* If there's nowhere to jump, just fall through. */
1845 return NULL_TREE;
1846
1847 if (*label_p == NULL_TREE)
1848 {
1849 tree label = create_artificial_label (UNKNOWN_LOCATION);
1850 *label_p = label;
1851 }
1852
1853 return build1 (GOTO_EXPR, void_type_node, *label_p);
1854 }
1855
1856 /* Gimplify an EXIT_EXPR by converting to a GOTO_EXPR inside a COND_EXPR.
1857 This also involves building a label to jump to and communicating it to
1858 gimplify_loop_expr through gimplify_ctxp->exit_label. */
1859
1860 static enum gimplify_status
1861 gimplify_exit_expr (tree *expr_p)
1862 {
1863 tree cond = TREE_OPERAND (*expr_p, 0);
1864 tree expr;
1865
1866 expr = build_and_jump (&gimplify_ctxp->exit_label);
1867 expr = build3 (COND_EXPR, void_type_node, cond, expr, NULL_TREE);
1868 *expr_p = expr;
1869
1870 return GS_OK;
1871 }
1872
1873 /* A helper function to be called via walk_tree. Mark all labels under *TP
1874 as being forced. To be called for DECL_INITIAL of static variables. */
1875
1876 tree
1877 force_labels_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
1878 {
1879 if (TYPE_P (*tp))
1880 *walk_subtrees = 0;
1881 if (TREE_CODE (*tp) == LABEL_DECL)
1882 FORCED_LABEL (*tp) = 1;
1883
1884 return NULL_TREE;
1885 }
1886
1887 /* *EXPR_P is a COMPONENT_REF being used as an rvalue. If its type is
1888 different from its canonical type, wrap the whole thing inside a
1889 NOP_EXPR and force the type of the COMPONENT_REF to be the canonical
1890 type.
1891
1892 The canonical type of a COMPONENT_REF is the type of the field being
1893 referenced--unless the field is a bit-field which can be read directly
1894 in a smaller mode, in which case the canonical type is the
1895 sign-appropriate type corresponding to that mode. */
1896
1897 static void
1898 canonicalize_component_ref (tree *expr_p)
1899 {
1900 tree expr = *expr_p;
1901 tree type;
1902
1903 gcc_assert (TREE_CODE (expr) == COMPONENT_REF);
1904
1905 if (INTEGRAL_TYPE_P (TREE_TYPE (expr)))
1906 type = TREE_TYPE (get_unwidened (expr, NULL_TREE));
1907 else
1908 type = TREE_TYPE (TREE_OPERAND (expr, 1));
1909
1910 /* One could argue that all the stuff below is not necessary for
1911 the non-bitfield case and declare it a FE error if type
1912 adjustment would be needed. */
1913 if (TREE_TYPE (expr) != type)
1914 {
1915 #ifdef ENABLE_TYPES_CHECKING
1916 tree old_type = TREE_TYPE (expr);
1917 #endif
1918 int type_quals;
1919
1920 /* We need to preserve qualifiers and propagate them from
1921 operand 0. */
1922 type_quals = TYPE_QUALS (type)
1923 | TYPE_QUALS (TREE_TYPE (TREE_OPERAND (expr, 0)));
1924 if (TYPE_QUALS (type) != type_quals)
1925 type = build_qualified_type (TYPE_MAIN_VARIANT (type), type_quals);
1926
1927 /* Set the type of the COMPONENT_REF to the underlying type. */
1928 TREE_TYPE (expr) = type;
1929
1930 #ifdef ENABLE_TYPES_CHECKING
1931 /* It is now a FE error, if the conversion from the canonical
1932 type to the original expression type is not useless. */
1933 gcc_assert (useless_type_conversion_p (old_type, type));
1934 #endif
1935 }
1936 }
1937
1938 /* If a NOP conversion is changing a pointer to array of foo to a pointer
1939 to foo, embed that change in the ADDR_EXPR by converting
1940 T array[U];
1941 (T *)&array
1942 ==>
1943 &array[L]
1944 where L is the lower bound. For simplicity, only do this for constant
1945 lower bound.
1946 The constraint is that the type of &array[L] is trivially convertible
1947 to T *. */
1948
1949 static void
1950 canonicalize_addr_expr (tree *expr_p)
1951 {
1952 tree expr = *expr_p;
1953 tree addr_expr = TREE_OPERAND (expr, 0);
1954 tree datype, ddatype, pddatype;
1955
1956 /* We simplify only conversions from an ADDR_EXPR to a pointer type. */
1957 if (!POINTER_TYPE_P (TREE_TYPE (expr))
1958 || TREE_CODE (addr_expr) != ADDR_EXPR)
1959 return;
1960
1961 /* The addr_expr type should be a pointer to an array. */
1962 datype = TREE_TYPE (TREE_TYPE (addr_expr));
1963 if (TREE_CODE (datype) != ARRAY_TYPE)
1964 return;
1965
1966 /* The pointer to element type shall be trivially convertible to
1967 the expression pointer type. */
1968 ddatype = TREE_TYPE (datype);
1969 pddatype = build_pointer_type (ddatype);
1970 if (!useless_type_conversion_p (TYPE_MAIN_VARIANT (TREE_TYPE (expr)),
1971 pddatype))
1972 return;
1973
1974 /* The lower bound and element sizes must be constant. */
1975 if (!TYPE_SIZE_UNIT (ddatype)
1976 || TREE_CODE (TYPE_SIZE_UNIT (ddatype)) != INTEGER_CST
1977 || !TYPE_DOMAIN (datype) || !TYPE_MIN_VALUE (TYPE_DOMAIN (datype))
1978 || TREE_CODE (TYPE_MIN_VALUE (TYPE_DOMAIN (datype))) != INTEGER_CST)
1979 return;
1980
1981 /* All checks succeeded. Build a new node to merge the cast. */
1982 *expr_p = build4 (ARRAY_REF, ddatype, TREE_OPERAND (addr_expr, 0),
1983 TYPE_MIN_VALUE (TYPE_DOMAIN (datype)),
1984 NULL_TREE, NULL_TREE);
1985 *expr_p = build1 (ADDR_EXPR, pddatype, *expr_p);
1986
1987 /* We can have stripped a required restrict qualifier above. */
1988 if (!useless_type_conversion_p (TREE_TYPE (expr), TREE_TYPE (*expr_p)))
1989 *expr_p = fold_convert (TREE_TYPE (expr), *expr_p);
1990 }
1991
1992 /* *EXPR_P is a NOP_EXPR or CONVERT_EXPR. Remove it and/or other conversions
1993 underneath as appropriate. */
1994
1995 static enum gimplify_status
1996 gimplify_conversion (tree *expr_p)
1997 {
1998 location_t loc = EXPR_LOCATION (*expr_p);
1999 gcc_assert (CONVERT_EXPR_P (*expr_p));
2000
2001 /* Then strip away all but the outermost conversion. */
2002 STRIP_SIGN_NOPS (TREE_OPERAND (*expr_p, 0));
2003
2004 /* And remove the outermost conversion if it's useless. */
2005 if (tree_ssa_useless_type_conversion (*expr_p))
2006 *expr_p = TREE_OPERAND (*expr_p, 0);
2007
2008 /* If we still have a conversion at the toplevel,
2009 then canonicalize some constructs. */
2010 if (CONVERT_EXPR_P (*expr_p))
2011 {
2012 tree sub = TREE_OPERAND (*expr_p, 0);
2013
2014 /* If a NOP conversion is changing the type of a COMPONENT_REF
2015 expression, then canonicalize its type now in order to expose more
2016 redundant conversions. */
2017 if (TREE_CODE (sub) == COMPONENT_REF)
2018 canonicalize_component_ref (&TREE_OPERAND (*expr_p, 0));
2019
2020 /* If a NOP conversion is changing a pointer to array of foo
2021 to a pointer to foo, embed that change in the ADDR_EXPR. */
2022 else if (TREE_CODE (sub) == ADDR_EXPR)
2023 canonicalize_addr_expr (expr_p);
2024 }
2025
2026 /* If we have a conversion to a non-register type force the
2027 use of a VIEW_CONVERT_EXPR instead. */
2028 if (CONVERT_EXPR_P (*expr_p) && !is_gimple_reg_type (TREE_TYPE (*expr_p)))
2029 *expr_p = fold_build1_loc (loc, VIEW_CONVERT_EXPR, TREE_TYPE (*expr_p),
2030 TREE_OPERAND (*expr_p, 0));
2031
2032 return GS_OK;
2033 }
2034
2035 /* Nonlocal VLAs seen in the current function. */
2036 static struct pointer_set_t *nonlocal_vlas;
2037
2038 /* Gimplify a VAR_DECL or PARM_DECL. Return GS_OK if we expanded a
2039 DECL_VALUE_EXPR, and it's worth re-examining things. */
2040
2041 static enum gimplify_status
2042 gimplify_var_or_parm_decl (tree *expr_p)
2043 {
2044 tree decl = *expr_p;
2045
2046 /* ??? If this is a local variable, and it has not been seen in any
2047 outer BIND_EXPR, then it's probably the result of a duplicate
2048 declaration, for which we've already issued an error. It would
2049 be really nice if the front end wouldn't leak these at all.
2050 Currently the only known culprit is C++ destructors, as seen
2051 in g++.old-deja/g++.jason/binding.C. */
2052 if (TREE_CODE (decl) == VAR_DECL
2053 && !DECL_SEEN_IN_BIND_EXPR_P (decl)
2054 && !TREE_STATIC (decl) && !DECL_EXTERNAL (decl)
2055 && decl_function_context (decl) == current_function_decl)
2056 {
2057 gcc_assert (seen_error ());
2058 return GS_ERROR;
2059 }
2060
2061 /* When within an OpenMP context, notice uses of variables. */
2062 if (gimplify_omp_ctxp && omp_notice_variable (gimplify_omp_ctxp, decl, true))
2063 return GS_ALL_DONE;
2064
2065 /* If the decl is an alias for another expression, substitute it now. */
2066 if (DECL_HAS_VALUE_EXPR_P (decl))
2067 {
2068 tree value_expr = DECL_VALUE_EXPR (decl);
2069
2070 /* For referenced nonlocal VLAs add a decl for debugging purposes
2071 to the current function. */
2072 if (TREE_CODE (decl) == VAR_DECL
2073 && TREE_CODE (DECL_SIZE_UNIT (decl)) != INTEGER_CST
2074 && nonlocal_vlas != NULL
2075 && TREE_CODE (value_expr) == INDIRECT_REF
2076 && TREE_CODE (TREE_OPERAND (value_expr, 0)) == VAR_DECL
2077 && decl_function_context (decl) != current_function_decl)
2078 {
2079 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
2080 while (ctx
2081 && (ctx->region_type == ORT_WORKSHARE
2082 || ctx->region_type == ORT_SIMD))
2083 ctx = ctx->outer_context;
2084 if (!ctx && !pointer_set_insert (nonlocal_vlas, decl))
2085 {
2086 tree copy = copy_node (decl), block;
2087
2088 lang_hooks.dup_lang_specific_decl (copy);
2089 SET_DECL_RTL (copy, 0);
2090 TREE_USED (copy) = 1;
2091 block = DECL_INITIAL (current_function_decl);
2092 DECL_CHAIN (copy) = BLOCK_VARS (block);
2093 BLOCK_VARS (block) = copy;
2094 SET_DECL_VALUE_EXPR (copy, unshare_expr (value_expr));
2095 DECL_HAS_VALUE_EXPR_P (copy) = 1;
2096 }
2097 }
2098
2099 *expr_p = unshare_expr (value_expr);
2100 return GS_OK;
2101 }
2102
2103 return GS_ALL_DONE;
2104 }
2105
2106 /* Gimplify the COMPONENT_REF, ARRAY_REF, REALPART_EXPR or IMAGPART_EXPR
2107 node *EXPR_P.
2108
2109 compound_lval
2110 : min_lval '[' val ']'
2111 | min_lval '.' ID
2112 | compound_lval '[' val ']'
2113 | compound_lval '.' ID
2114
2115 This is not part of the original SIMPLE definition, which separates
2116 array and member references, but it seems reasonable to handle them
2117 together. Also, this way we don't run into problems with union
2118 aliasing; gcc requires that for accesses through a union to alias, the
2119 union reference must be explicit, which was not always the case when we
2120 were splitting up array and member refs.
2121
2122 PRE_P points to the sequence where side effects that must happen before
2123 *EXPR_P should be stored.
2124
2125 POST_P points to the sequence where side effects that must happen after
2126 *EXPR_P should be stored. */
2127
2128 static enum gimplify_status
2129 gimplify_compound_lval (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
2130 fallback_t fallback)
2131 {
2132 tree *p;
2133 vec<tree> expr_stack;
2134 enum gimplify_status ret = GS_ALL_DONE, tret;
2135 int i;
2136 location_t loc = EXPR_LOCATION (*expr_p);
2137 tree expr = *expr_p;
2138
2139 /* Create a stack of the subexpressions so later we can walk them in
2140 order from inner to outer. */
2141 expr_stack.create (10);
2142
2143 /* We can handle anything that get_inner_reference can deal with. */
2144 for (p = expr_p; ; p = &TREE_OPERAND (*p, 0))
2145 {
2146 restart:
2147 /* Fold INDIRECT_REFs now to turn them into ARRAY_REFs. */
2148 if (TREE_CODE (*p) == INDIRECT_REF)
2149 *p = fold_indirect_ref_loc (loc, *p);
2150
2151 if (handled_component_p (*p))
2152 ;
2153 /* Expand DECL_VALUE_EXPR now. In some cases that may expose
2154 additional COMPONENT_REFs. */
2155 else if ((TREE_CODE (*p) == VAR_DECL || TREE_CODE (*p) == PARM_DECL)
2156 && gimplify_var_or_parm_decl (p) == GS_OK)
2157 goto restart;
2158 else
2159 break;
2160
2161 expr_stack.safe_push (*p);
2162 }
2163
2164 gcc_assert (expr_stack.length ());
2165
2166 /* Now EXPR_STACK is a stack of pointers to all the refs we've
2167 walked through and P points to the innermost expression.
2168
2169 Java requires that we elaborated nodes in source order. That
2170 means we must gimplify the inner expression followed by each of
2171 the indices, in order. But we can't gimplify the inner
2172 expression until we deal with any variable bounds, sizes, or
2173 positions in order to deal with PLACEHOLDER_EXPRs.
2174
2175 So we do this in three steps. First we deal with the annotations
2176 for any variables in the components, then we gimplify the base,
2177 then we gimplify any indices, from left to right. */
2178 for (i = expr_stack.length () - 1; i >= 0; i--)
2179 {
2180 tree t = expr_stack[i];
2181
2182 if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
2183 {
2184 /* Gimplify the low bound and element type size and put them into
2185 the ARRAY_REF. If these values are set, they have already been
2186 gimplified. */
2187 if (TREE_OPERAND (t, 2) == NULL_TREE)
2188 {
2189 tree low = unshare_expr (array_ref_low_bound (t));
2190 if (!is_gimple_min_invariant (low))
2191 {
2192 TREE_OPERAND (t, 2) = low;
2193 tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p,
2194 post_p, is_gimple_reg,
2195 fb_rvalue);
2196 ret = MIN (ret, tret);
2197 }
2198 }
2199 else
2200 {
2201 tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p, post_p,
2202 is_gimple_reg, fb_rvalue);
2203 ret = MIN (ret, tret);
2204 }
2205
2206 if (TREE_OPERAND (t, 3) == NULL_TREE)
2207 {
2208 tree elmt_type = TREE_TYPE (TREE_TYPE (TREE_OPERAND (t, 0)));
2209 tree elmt_size = unshare_expr (array_ref_element_size (t));
2210 tree factor = size_int (TYPE_ALIGN_UNIT (elmt_type));
2211
2212 /* Divide the element size by the alignment of the element
2213 type (above). */
2214 elmt_size
2215 = size_binop_loc (loc, EXACT_DIV_EXPR, elmt_size, factor);
2216
2217 if (!is_gimple_min_invariant (elmt_size))
2218 {
2219 TREE_OPERAND (t, 3) = elmt_size;
2220 tret = gimplify_expr (&TREE_OPERAND (t, 3), pre_p,
2221 post_p, is_gimple_reg,
2222 fb_rvalue);
2223 ret = MIN (ret, tret);
2224 }
2225 }
2226 else
2227 {
2228 tret = gimplify_expr (&TREE_OPERAND (t, 3), pre_p, post_p,
2229 is_gimple_reg, fb_rvalue);
2230 ret = MIN (ret, tret);
2231 }
2232 }
2233 else if (TREE_CODE (t) == COMPONENT_REF)
2234 {
2235 /* Set the field offset into T and gimplify it. */
2236 if (TREE_OPERAND (t, 2) == NULL_TREE)
2237 {
2238 tree offset = unshare_expr (component_ref_field_offset (t));
2239 tree field = TREE_OPERAND (t, 1);
2240 tree factor
2241 = size_int (DECL_OFFSET_ALIGN (field) / BITS_PER_UNIT);
2242
2243 /* Divide the offset by its alignment. */
2244 offset = size_binop_loc (loc, EXACT_DIV_EXPR, offset, factor);
2245
2246 if (!is_gimple_min_invariant (offset))
2247 {
2248 TREE_OPERAND (t, 2) = offset;
2249 tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p,
2250 post_p, is_gimple_reg,
2251 fb_rvalue);
2252 ret = MIN (ret, tret);
2253 }
2254 }
2255 else
2256 {
2257 tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p, post_p,
2258 is_gimple_reg, fb_rvalue);
2259 ret = MIN (ret, tret);
2260 }
2261 }
2262 }
2263
2264 /* Step 2 is to gimplify the base expression. Make sure lvalue is set
2265 so as to match the min_lval predicate. Failure to do so may result
2266 in the creation of large aggregate temporaries. */
2267 tret = gimplify_expr (p, pre_p, post_p, is_gimple_min_lval,
2268 fallback | fb_lvalue);
2269 ret = MIN (ret, tret);
2270
2271 /* And finally, the indices and operands of ARRAY_REF. During this
2272 loop we also remove any useless conversions. */
2273 for (; expr_stack.length () > 0; )
2274 {
2275 tree t = expr_stack.pop ();
2276
2277 if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
2278 {
2279 /* Gimplify the dimension. */
2280 if (!is_gimple_min_invariant (TREE_OPERAND (t, 1)))
2281 {
2282 tret = gimplify_expr (&TREE_OPERAND (t, 1), pre_p, post_p,
2283 is_gimple_val, fb_rvalue);
2284 ret = MIN (ret, tret);
2285 }
2286 }
2287
2288 STRIP_USELESS_TYPE_CONVERSION (TREE_OPERAND (t, 0));
2289
2290 /* The innermost expression P may have originally had
2291 TREE_SIDE_EFFECTS set which would have caused all the outer
2292 expressions in *EXPR_P leading to P to also have had
2293 TREE_SIDE_EFFECTS set. */
2294 recalculate_side_effects (t);
2295 }
2296
2297 /* If the outermost expression is a COMPONENT_REF, canonicalize its type. */
2298 if ((fallback & fb_rvalue) && TREE_CODE (*expr_p) == COMPONENT_REF)
2299 {
2300 canonicalize_component_ref (expr_p);
2301 }
2302
2303 expr_stack.release ();
2304
2305 gcc_assert (*expr_p == expr || ret != GS_ALL_DONE);
2306
2307 return ret;
2308 }
2309
2310 /* Gimplify the self modifying expression pointed to by EXPR_P
2311 (++, --, +=, -=).
2312
2313 PRE_P points to the list where side effects that must happen before
2314 *EXPR_P should be stored.
2315
2316 POST_P points to the list where side effects that must happen after
2317 *EXPR_P should be stored.
2318
2319 WANT_VALUE is nonzero iff we want to use the value of this expression
2320 in another expression.
2321
2322 ARITH_TYPE is the type the computation should be performed in. */
2323
2324 enum gimplify_status
2325 gimplify_self_mod_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
2326 bool want_value, tree arith_type)
2327 {
2328 enum tree_code code;
2329 tree lhs, lvalue, rhs, t1;
2330 gimple_seq post = NULL, *orig_post_p = post_p;
2331 bool postfix;
2332 enum tree_code arith_code;
2333 enum gimplify_status ret;
2334 location_t loc = EXPR_LOCATION (*expr_p);
2335
2336 code = TREE_CODE (*expr_p);
2337
2338 gcc_assert (code == POSTINCREMENT_EXPR || code == POSTDECREMENT_EXPR
2339 || code == PREINCREMENT_EXPR || code == PREDECREMENT_EXPR);
2340
2341 /* Prefix or postfix? */
2342 if (code == POSTINCREMENT_EXPR || code == POSTDECREMENT_EXPR)
2343 /* Faster to treat as prefix if result is not used. */
2344 postfix = want_value;
2345 else
2346 postfix = false;
2347
2348 /* For postfix, make sure the inner expression's post side effects
2349 are executed after side effects from this expression. */
2350 if (postfix)
2351 post_p = &post;
2352
2353 /* Add or subtract? */
2354 if (code == PREINCREMENT_EXPR || code == POSTINCREMENT_EXPR)
2355 arith_code = PLUS_EXPR;
2356 else
2357 arith_code = MINUS_EXPR;
2358
2359 /* Gimplify the LHS into a GIMPLE lvalue. */
2360 lvalue = TREE_OPERAND (*expr_p, 0);
2361 ret = gimplify_expr (&lvalue, pre_p, post_p, is_gimple_lvalue, fb_lvalue);
2362 if (ret == GS_ERROR)
2363 return ret;
2364
2365 /* Extract the operands to the arithmetic operation. */
2366 lhs = lvalue;
2367 rhs = TREE_OPERAND (*expr_p, 1);
2368
2369 /* For postfix operator, we evaluate the LHS to an rvalue and then use
2370 that as the result value and in the postqueue operation. */
2371 if (postfix)
2372 {
2373 ret = gimplify_expr (&lhs, pre_p, post_p, is_gimple_val, fb_rvalue);
2374 if (ret == GS_ERROR)
2375 return ret;
2376
2377 lhs = get_initialized_tmp_var (lhs, pre_p, NULL);
2378 }
2379
2380 /* For POINTERs increment, use POINTER_PLUS_EXPR. */
2381 if (POINTER_TYPE_P (TREE_TYPE (lhs)))
2382 {
2383 rhs = convert_to_ptrofftype_loc (loc, rhs);
2384 if (arith_code == MINUS_EXPR)
2385 rhs = fold_build1_loc (loc, NEGATE_EXPR, TREE_TYPE (rhs), rhs);
2386 t1 = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (*expr_p), lhs, rhs);
2387 }
2388 else
2389 t1 = fold_convert (TREE_TYPE (*expr_p),
2390 fold_build2 (arith_code, arith_type,
2391 fold_convert (arith_type, lhs),
2392 fold_convert (arith_type, rhs)));
2393
2394 if (postfix)
2395 {
2396 gimplify_assign (lvalue, t1, pre_p);
2397 gimplify_seq_add_seq (orig_post_p, post);
2398 *expr_p = lhs;
2399 return GS_ALL_DONE;
2400 }
2401 else
2402 {
2403 *expr_p = build2 (MODIFY_EXPR, TREE_TYPE (lvalue), lvalue, t1);
2404 return GS_OK;
2405 }
2406 }
2407
2408 /* If *EXPR_P has a variable sized type, wrap it in a WITH_SIZE_EXPR. */
2409
2410 static void
2411 maybe_with_size_expr (tree *expr_p)
2412 {
2413 tree expr = *expr_p;
2414 tree type = TREE_TYPE (expr);
2415 tree size;
2416
2417 /* If we've already wrapped this or the type is error_mark_node, we can't do
2418 anything. */
2419 if (TREE_CODE (expr) == WITH_SIZE_EXPR
2420 || type == error_mark_node)
2421 return;
2422
2423 /* If the size isn't known or is a constant, we have nothing to do. */
2424 size = TYPE_SIZE_UNIT (type);
2425 if (!size || TREE_CODE (size) == INTEGER_CST)
2426 return;
2427
2428 /* Otherwise, make a WITH_SIZE_EXPR. */
2429 size = unshare_expr (size);
2430 size = SUBSTITUTE_PLACEHOLDER_IN_EXPR (size, expr);
2431 *expr_p = build2 (WITH_SIZE_EXPR, type, expr, size);
2432 }
2433
2434 /* Helper for gimplify_call_expr. Gimplify a single argument *ARG_P
2435 Store any side-effects in PRE_P. CALL_LOCATION is the location of
2436 the CALL_EXPR. */
2437
2438 static enum gimplify_status
2439 gimplify_arg (tree *arg_p, gimple_seq *pre_p, location_t call_location)
2440 {
2441 bool (*test) (tree);
2442 fallback_t fb;
2443
2444 /* In general, we allow lvalues for function arguments to avoid
2445 extra overhead of copying large aggregates out of even larger
2446 aggregates into temporaries only to copy the temporaries to
2447 the argument list. Make optimizers happy by pulling out to
2448 temporaries those types that fit in registers. */
2449 if (is_gimple_reg_type (TREE_TYPE (*arg_p)))
2450 test = is_gimple_val, fb = fb_rvalue;
2451 else
2452 {
2453 test = is_gimple_lvalue, fb = fb_either;
2454 /* Also strip a TARGET_EXPR that would force an extra copy. */
2455 if (TREE_CODE (*arg_p) == TARGET_EXPR)
2456 {
2457 tree init = TARGET_EXPR_INITIAL (*arg_p);
2458 if (init
2459 && !VOID_TYPE_P (TREE_TYPE (init)))
2460 *arg_p = init;
2461 }
2462 }
2463
2464 /* If this is a variable sized type, we must remember the size. */
2465 maybe_with_size_expr (arg_p);
2466
2467 /* FIXME diagnostics: This will mess up gcc.dg/Warray-bounds.c. */
2468 /* Make sure arguments have the same location as the function call
2469 itself. */
2470 protected_set_expr_location (*arg_p, call_location);
2471
2472 /* There is a sequence point before a function call. Side effects in
2473 the argument list must occur before the actual call. So, when
2474 gimplifying arguments, force gimplify_expr to use an internal
2475 post queue which is then appended to the end of PRE_P. */
2476 return gimplify_expr (arg_p, pre_p, NULL, test, fb);
2477 }
2478
2479 /* Gimplify the CALL_EXPR node *EXPR_P into the GIMPLE sequence PRE_P.
2480 WANT_VALUE is true if the result of the call is desired. */
2481
2482 static enum gimplify_status
2483 gimplify_call_expr (tree *expr_p, gimple_seq *pre_p, bool want_value)
2484 {
2485 tree fndecl, parms, p, fnptrtype;
2486 enum gimplify_status ret;
2487 int i, nargs;
2488 gimple call;
2489 bool builtin_va_start_p = FALSE;
2490 location_t loc = EXPR_LOCATION (*expr_p);
2491
2492 gcc_assert (TREE_CODE (*expr_p) == CALL_EXPR);
2493
2494 /* For reliable diagnostics during inlining, it is necessary that
2495 every call_expr be annotated with file and line. */
2496 if (! EXPR_HAS_LOCATION (*expr_p))
2497 SET_EXPR_LOCATION (*expr_p, input_location);
2498
2499 /* This may be a call to a builtin function.
2500
2501 Builtin function calls may be transformed into different
2502 (and more efficient) builtin function calls under certain
2503 circumstances. Unfortunately, gimplification can muck things
2504 up enough that the builtin expanders are not aware that certain
2505 transformations are still valid.
2506
2507 So we attempt transformation/gimplification of the call before
2508 we gimplify the CALL_EXPR. At this time we do not manage to
2509 transform all calls in the same manner as the expanders do, but
2510 we do transform most of them. */
2511 fndecl = get_callee_fndecl (*expr_p);
2512 if (fndecl
2513 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
2514 switch (DECL_FUNCTION_CODE (fndecl))
2515 {
2516 case BUILT_IN_VA_START:
2517 {
2518 builtin_va_start_p = TRUE;
2519 if (call_expr_nargs (*expr_p) < 2)
2520 {
2521 error ("too few arguments to function %<va_start%>");
2522 *expr_p = build_empty_stmt (EXPR_LOCATION (*expr_p));
2523 return GS_OK;
2524 }
2525
2526 if (fold_builtin_next_arg (*expr_p, true))
2527 {
2528 *expr_p = build_empty_stmt (EXPR_LOCATION (*expr_p));
2529 return GS_OK;
2530 }
2531 break;
2532 }
2533 case BUILT_IN_LINE:
2534 {
2535 expanded_location loc = expand_location (EXPR_LOCATION (*expr_p));
2536 *expr_p = build_int_cst (TREE_TYPE (*expr_p), loc.line);
2537 return GS_OK;
2538 }
2539 case BUILT_IN_FILE:
2540 {
2541 expanded_location loc = expand_location (EXPR_LOCATION (*expr_p));
2542 *expr_p = build_string_literal (strlen (loc.file) + 1, loc.file);
2543 return GS_OK;
2544 }
2545 case BUILT_IN_FUNCTION:
2546 {
2547 const char *function;
2548 function = IDENTIFIER_POINTER (DECL_NAME (current_function_decl));
2549 *expr_p = build_string_literal (strlen (function) + 1, function);
2550 return GS_OK;
2551 }
2552 default:
2553 ;
2554 }
2555 if (fndecl && DECL_BUILT_IN (fndecl))
2556 {
2557 tree new_tree = fold_call_expr (input_location, *expr_p, !want_value);
2558 if (new_tree && new_tree != *expr_p)
2559 {
2560 /* There was a transformation of this call which computes the
2561 same value, but in a more efficient way. Return and try
2562 again. */
2563 *expr_p = new_tree;
2564 return GS_OK;
2565 }
2566 }
2567
2568 /* Remember the original function pointer type. */
2569 fnptrtype = TREE_TYPE (CALL_EXPR_FN (*expr_p));
2570
2571 /* There is a sequence point before the call, so any side effects in
2572 the calling expression must occur before the actual call. Force
2573 gimplify_expr to use an internal post queue. */
2574 ret = gimplify_expr (&CALL_EXPR_FN (*expr_p), pre_p, NULL,
2575 is_gimple_call_addr, fb_rvalue);
2576
2577 nargs = call_expr_nargs (*expr_p);
2578
2579 /* Get argument types for verification. */
2580 fndecl = get_callee_fndecl (*expr_p);
2581 parms = NULL_TREE;
2582 if (fndecl)
2583 parms = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
2584 else if (POINTER_TYPE_P (TREE_TYPE (CALL_EXPR_FN (*expr_p))))
2585 parms = TYPE_ARG_TYPES (TREE_TYPE (TREE_TYPE (CALL_EXPR_FN (*expr_p))));
2586
2587 if (fndecl && DECL_ARGUMENTS (fndecl))
2588 p = DECL_ARGUMENTS (fndecl);
2589 else if (parms)
2590 p = parms;
2591 else
2592 p = NULL_TREE;
2593 for (i = 0; i < nargs && p; i++, p = TREE_CHAIN (p))
2594 ;
2595
2596 /* If the last argument is __builtin_va_arg_pack () and it is not
2597 passed as a named argument, decrease the number of CALL_EXPR
2598 arguments and set instead the CALL_EXPR_VA_ARG_PACK flag. */
2599 if (!p
2600 && i < nargs
2601 && TREE_CODE (CALL_EXPR_ARG (*expr_p, nargs - 1)) == CALL_EXPR)
2602 {
2603 tree last_arg = CALL_EXPR_ARG (*expr_p, nargs - 1);
2604 tree last_arg_fndecl = get_callee_fndecl (last_arg);
2605
2606 if (last_arg_fndecl
2607 && TREE_CODE (last_arg_fndecl) == FUNCTION_DECL
2608 && DECL_BUILT_IN_CLASS (last_arg_fndecl) == BUILT_IN_NORMAL
2609 && DECL_FUNCTION_CODE (last_arg_fndecl) == BUILT_IN_VA_ARG_PACK)
2610 {
2611 tree call = *expr_p;
2612
2613 --nargs;
2614 *expr_p = build_call_array_loc (loc, TREE_TYPE (call),
2615 CALL_EXPR_FN (call),
2616 nargs, CALL_EXPR_ARGP (call));
2617
2618 /* Copy all CALL_EXPR flags, location and block, except
2619 CALL_EXPR_VA_ARG_PACK flag. */
2620 CALL_EXPR_STATIC_CHAIN (*expr_p) = CALL_EXPR_STATIC_CHAIN (call);
2621 CALL_EXPR_TAILCALL (*expr_p) = CALL_EXPR_TAILCALL (call);
2622 CALL_EXPR_RETURN_SLOT_OPT (*expr_p)
2623 = CALL_EXPR_RETURN_SLOT_OPT (call);
2624 CALL_FROM_THUNK_P (*expr_p) = CALL_FROM_THUNK_P (call);
2625 SET_EXPR_LOCATION (*expr_p, EXPR_LOCATION (call));
2626
2627 /* Set CALL_EXPR_VA_ARG_PACK. */
2628 CALL_EXPR_VA_ARG_PACK (*expr_p) = 1;
2629 }
2630 }
2631
2632 /* Finally, gimplify the function arguments. */
2633 if (nargs > 0)
2634 {
2635 for (i = (PUSH_ARGS_REVERSED ? nargs - 1 : 0);
2636 PUSH_ARGS_REVERSED ? i >= 0 : i < nargs;
2637 PUSH_ARGS_REVERSED ? i-- : i++)
2638 {
2639 enum gimplify_status t;
2640
2641 /* Avoid gimplifying the second argument to va_start, which needs to
2642 be the plain PARM_DECL. */
2643 if ((i != 1) || !builtin_va_start_p)
2644 {
2645 t = gimplify_arg (&CALL_EXPR_ARG (*expr_p, i), pre_p,
2646 EXPR_LOCATION (*expr_p));
2647
2648 if (t == GS_ERROR)
2649 ret = GS_ERROR;
2650 }
2651 }
2652 }
2653
2654 /* Verify the function result. */
2655 if (want_value && fndecl
2656 && VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fnptrtype))))
2657 {
2658 error_at (loc, "using result of function returning %<void%>");
2659 ret = GS_ERROR;
2660 }
2661
2662 /* Try this again in case gimplification exposed something. */
2663 if (ret != GS_ERROR)
2664 {
2665 tree new_tree = fold_call_expr (input_location, *expr_p, !want_value);
2666
2667 if (new_tree && new_tree != *expr_p)
2668 {
2669 /* There was a transformation of this call which computes the
2670 same value, but in a more efficient way. Return and try
2671 again. */
2672 *expr_p = new_tree;
2673 return GS_OK;
2674 }
2675 }
2676 else
2677 {
2678 *expr_p = error_mark_node;
2679 return GS_ERROR;
2680 }
2681
2682 /* If the function is "const" or "pure", then clear TREE_SIDE_EFFECTS on its
2683 decl. This allows us to eliminate redundant or useless
2684 calls to "const" functions. */
2685 if (TREE_CODE (*expr_p) == CALL_EXPR)
2686 {
2687 int flags = call_expr_flags (*expr_p);
2688 if (flags & (ECF_CONST | ECF_PURE)
2689 /* An infinite loop is considered a side effect. */
2690 && !(flags & (ECF_LOOPING_CONST_OR_PURE)))
2691 TREE_SIDE_EFFECTS (*expr_p) = 0;
2692 }
2693
2694 /* If the value is not needed by the caller, emit a new GIMPLE_CALL
2695 and clear *EXPR_P. Otherwise, leave *EXPR_P in its gimplified
2696 form and delegate the creation of a GIMPLE_CALL to
2697 gimplify_modify_expr. This is always possible because when
2698 WANT_VALUE is true, the caller wants the result of this call into
2699 a temporary, which means that we will emit an INIT_EXPR in
2700 internal_get_tmp_var which will then be handled by
2701 gimplify_modify_expr. */
2702 if (!want_value)
2703 {
2704 /* The CALL_EXPR in *EXPR_P is already in GIMPLE form, so all we
2705 have to do is replicate it as a GIMPLE_CALL tuple. */
2706 gimple_stmt_iterator gsi;
2707 call = gimple_build_call_from_tree (*expr_p);
2708 gimple_call_set_fntype (call, TREE_TYPE (fnptrtype));
2709 notice_special_calls (call);
2710 gimplify_seq_add_stmt (pre_p, call);
2711 gsi = gsi_last (*pre_p);
2712 /* Don't fold stmts inside of target construct. We'll do it
2713 during omplower pass instead. */
2714 struct gimplify_omp_ctx *ctx;
2715 for (ctx = gimplify_omp_ctxp; ctx; ctx = ctx->outer_context)
2716 if (ctx->region_type == ORT_TARGET)
2717 break;
2718 if (ctx == NULL)
2719 fold_stmt (&gsi);
2720 *expr_p = NULL_TREE;
2721 }
2722 else
2723 /* Remember the original function type. */
2724 CALL_EXPR_FN (*expr_p) = build1 (NOP_EXPR, fnptrtype,
2725 CALL_EXPR_FN (*expr_p));
2726
2727 return ret;
2728 }
2729
2730 /* Handle shortcut semantics in the predicate operand of a COND_EXPR by
2731 rewriting it into multiple COND_EXPRs, and possibly GOTO_EXPRs.
2732
2733 TRUE_LABEL_P and FALSE_LABEL_P point to the labels to jump to if the
2734 condition is true or false, respectively. If null, we should generate
2735 our own to skip over the evaluation of this specific expression.
2736
2737 LOCUS is the source location of the COND_EXPR.
2738
2739 This function is the tree equivalent of do_jump.
2740
2741 shortcut_cond_r should only be called by shortcut_cond_expr. */
2742
2743 static tree
2744 shortcut_cond_r (tree pred, tree *true_label_p, tree *false_label_p,
2745 location_t locus)
2746 {
2747 tree local_label = NULL_TREE;
2748 tree t, expr = NULL;
2749
2750 /* OK, it's not a simple case; we need to pull apart the COND_EXPR to
2751 retain the shortcut semantics. Just insert the gotos here;
2752 shortcut_cond_expr will append the real blocks later. */
2753 if (TREE_CODE (pred) == TRUTH_ANDIF_EXPR)
2754 {
2755 location_t new_locus;
2756
2757 /* Turn if (a && b) into
2758
2759 if (a); else goto no;
2760 if (b) goto yes; else goto no;
2761 (no:) */
2762
2763 if (false_label_p == NULL)
2764 false_label_p = &local_label;
2765
2766 /* Keep the original source location on the first 'if'. */
2767 t = shortcut_cond_r (TREE_OPERAND (pred, 0), NULL, false_label_p, locus);
2768 append_to_statement_list (t, &expr);
2769
2770 /* Set the source location of the && on the second 'if'. */
2771 new_locus = EXPR_HAS_LOCATION (pred) ? EXPR_LOCATION (pred) : locus;
2772 t = shortcut_cond_r (TREE_OPERAND (pred, 1), true_label_p, false_label_p,
2773 new_locus);
2774 append_to_statement_list (t, &expr);
2775 }
2776 else if (TREE_CODE (pred) == TRUTH_ORIF_EXPR)
2777 {
2778 location_t new_locus;
2779
2780 /* Turn if (a || b) into
2781
2782 if (a) goto yes;
2783 if (b) goto yes; else goto no;
2784 (yes:) */
2785
2786 if (true_label_p == NULL)
2787 true_label_p = &local_label;
2788
2789 /* Keep the original source location on the first 'if'. */
2790 t = shortcut_cond_r (TREE_OPERAND (pred, 0), true_label_p, NULL, locus);
2791 append_to_statement_list (t, &expr);
2792
2793 /* Set the source location of the || on the second 'if'. */
2794 new_locus = EXPR_HAS_LOCATION (pred) ? EXPR_LOCATION (pred) : locus;
2795 t = shortcut_cond_r (TREE_OPERAND (pred, 1), true_label_p, false_label_p,
2796 new_locus);
2797 append_to_statement_list (t, &expr);
2798 }
2799 else if (TREE_CODE (pred) == COND_EXPR
2800 && !VOID_TYPE_P (TREE_TYPE (TREE_OPERAND (pred, 1)))
2801 && !VOID_TYPE_P (TREE_TYPE (TREE_OPERAND (pred, 2))))
2802 {
2803 location_t new_locus;
2804
2805 /* As long as we're messing with gotos, turn if (a ? b : c) into
2806 if (a)
2807 if (b) goto yes; else goto no;
2808 else
2809 if (c) goto yes; else goto no;
2810
2811 Don't do this if one of the arms has void type, which can happen
2812 in C++ when the arm is throw. */
2813
2814 /* Keep the original source location on the first 'if'. Set the source
2815 location of the ? on the second 'if'. */
2816 new_locus = EXPR_HAS_LOCATION (pred) ? EXPR_LOCATION (pred) : locus;
2817 expr = build3 (COND_EXPR, void_type_node, TREE_OPERAND (pred, 0),
2818 shortcut_cond_r (TREE_OPERAND (pred, 1), true_label_p,
2819 false_label_p, locus),
2820 shortcut_cond_r (TREE_OPERAND (pred, 2), true_label_p,
2821 false_label_p, new_locus));
2822 }
2823 else
2824 {
2825 expr = build3 (COND_EXPR, void_type_node, pred,
2826 build_and_jump (true_label_p),
2827 build_and_jump (false_label_p));
2828 SET_EXPR_LOCATION (expr, locus);
2829 }
2830
2831 if (local_label)
2832 {
2833 t = build1 (LABEL_EXPR, void_type_node, local_label);
2834 append_to_statement_list (t, &expr);
2835 }
2836
2837 return expr;
2838 }
2839
2840 /* Given a conditional expression EXPR with short-circuit boolean
2841 predicates using TRUTH_ANDIF_EXPR or TRUTH_ORIF_EXPR, break the
2842 predicate apart into the equivalent sequence of conditionals. */
2843
2844 static tree
2845 shortcut_cond_expr (tree expr)
2846 {
2847 tree pred = TREE_OPERAND (expr, 0);
2848 tree then_ = TREE_OPERAND (expr, 1);
2849 tree else_ = TREE_OPERAND (expr, 2);
2850 tree true_label, false_label, end_label, t;
2851 tree *true_label_p;
2852 tree *false_label_p;
2853 bool emit_end, emit_false, jump_over_else;
2854 bool then_se = then_ && TREE_SIDE_EFFECTS (then_);
2855 bool else_se = else_ && TREE_SIDE_EFFECTS (else_);
2856
2857 /* First do simple transformations. */
2858 if (!else_se)
2859 {
2860 /* If there is no 'else', turn
2861 if (a && b) then c
2862 into
2863 if (a) if (b) then c. */
2864 while (TREE_CODE (pred) == TRUTH_ANDIF_EXPR)
2865 {
2866 /* Keep the original source location on the first 'if'. */
2867 location_t locus = EXPR_LOC_OR_HERE (expr);
2868 TREE_OPERAND (expr, 0) = TREE_OPERAND (pred, 1);
2869 /* Set the source location of the && on the second 'if'. */
2870 if (EXPR_HAS_LOCATION (pred))
2871 SET_EXPR_LOCATION (expr, EXPR_LOCATION (pred));
2872 then_ = shortcut_cond_expr (expr);
2873 then_se = then_ && TREE_SIDE_EFFECTS (then_);
2874 pred = TREE_OPERAND (pred, 0);
2875 expr = build3 (COND_EXPR, void_type_node, pred, then_, NULL_TREE);
2876 SET_EXPR_LOCATION (expr, locus);
2877 }
2878 }
2879
2880 if (!then_se)
2881 {
2882 /* If there is no 'then', turn
2883 if (a || b); else d
2884 into
2885 if (a); else if (b); else d. */
2886 while (TREE_CODE (pred) == TRUTH_ORIF_EXPR)
2887 {
2888 /* Keep the original source location on the first 'if'. */
2889 location_t locus = EXPR_LOC_OR_HERE (expr);
2890 TREE_OPERAND (expr, 0) = TREE_OPERAND (pred, 1);
2891 /* Set the source location of the || on the second 'if'. */
2892 if (EXPR_HAS_LOCATION (pred))
2893 SET_EXPR_LOCATION (expr, EXPR_LOCATION (pred));
2894 else_ = shortcut_cond_expr (expr);
2895 else_se = else_ && TREE_SIDE_EFFECTS (else_);
2896 pred = TREE_OPERAND (pred, 0);
2897 expr = build3 (COND_EXPR, void_type_node, pred, NULL_TREE, else_);
2898 SET_EXPR_LOCATION (expr, locus);
2899 }
2900 }
2901
2902 /* If we're done, great. */
2903 if (TREE_CODE (pred) != TRUTH_ANDIF_EXPR
2904 && TREE_CODE (pred) != TRUTH_ORIF_EXPR)
2905 return expr;
2906
2907 /* Otherwise we need to mess with gotos. Change
2908 if (a) c; else d;
2909 to
2910 if (a); else goto no;
2911 c; goto end;
2912 no: d; end:
2913 and recursively gimplify the condition. */
2914
2915 true_label = false_label = end_label = NULL_TREE;
2916
2917 /* If our arms just jump somewhere, hijack those labels so we don't
2918 generate jumps to jumps. */
2919
2920 if (then_
2921 && TREE_CODE (then_) == GOTO_EXPR
2922 && TREE_CODE (GOTO_DESTINATION (then_)) == LABEL_DECL)
2923 {
2924 true_label = GOTO_DESTINATION (then_);
2925 then_ = NULL;
2926 then_se = false;
2927 }
2928
2929 if (else_
2930 && TREE_CODE (else_) == GOTO_EXPR
2931 && TREE_CODE (GOTO_DESTINATION (else_)) == LABEL_DECL)
2932 {
2933 false_label = GOTO_DESTINATION (else_);
2934 else_ = NULL;
2935 else_se = false;
2936 }
2937
2938 /* If we aren't hijacking a label for the 'then' branch, it falls through. */
2939 if (true_label)
2940 true_label_p = &true_label;
2941 else
2942 true_label_p = NULL;
2943
2944 /* The 'else' branch also needs a label if it contains interesting code. */
2945 if (false_label || else_se)
2946 false_label_p = &false_label;
2947 else
2948 false_label_p = NULL;
2949
2950 /* If there was nothing else in our arms, just forward the label(s). */
2951 if (!then_se && !else_se)
2952 return shortcut_cond_r (pred, true_label_p, false_label_p,
2953 EXPR_LOC_OR_HERE (expr));
2954
2955 /* If our last subexpression already has a terminal label, reuse it. */
2956 if (else_se)
2957 t = expr_last (else_);
2958 else if (then_se)
2959 t = expr_last (then_);
2960 else
2961 t = NULL;
2962 if (t && TREE_CODE (t) == LABEL_EXPR)
2963 end_label = LABEL_EXPR_LABEL (t);
2964
2965 /* If we don't care about jumping to the 'else' branch, jump to the end
2966 if the condition is false. */
2967 if (!false_label_p)
2968 false_label_p = &end_label;
2969
2970 /* We only want to emit these labels if we aren't hijacking them. */
2971 emit_end = (end_label == NULL_TREE);
2972 emit_false = (false_label == NULL_TREE);
2973
2974 /* We only emit the jump over the else clause if we have to--if the
2975 then clause may fall through. Otherwise we can wind up with a
2976 useless jump and a useless label at the end of gimplified code,
2977 which will cause us to think that this conditional as a whole
2978 falls through even if it doesn't. If we then inline a function
2979 which ends with such a condition, that can cause us to issue an
2980 inappropriate warning about control reaching the end of a
2981 non-void function. */
2982 jump_over_else = block_may_fallthru (then_);
2983
2984 pred = shortcut_cond_r (pred, true_label_p, false_label_p,
2985 EXPR_LOC_OR_HERE (expr));
2986
2987 expr = NULL;
2988 append_to_statement_list (pred, &expr);
2989
2990 append_to_statement_list (then_, &expr);
2991 if (else_se)
2992 {
2993 if (jump_over_else)
2994 {
2995 tree last = expr_last (expr);
2996 t = build_and_jump (&end_label);
2997 if (EXPR_HAS_LOCATION (last))
2998 SET_EXPR_LOCATION (t, EXPR_LOCATION (last));
2999 append_to_statement_list (t, &expr);
3000 }
3001 if (emit_false)
3002 {
3003 t = build1 (LABEL_EXPR, void_type_node, false_label);
3004 append_to_statement_list (t, &expr);
3005 }
3006 append_to_statement_list (else_, &expr);
3007 }
3008 if (emit_end && end_label)
3009 {
3010 t = build1 (LABEL_EXPR, void_type_node, end_label);
3011 append_to_statement_list (t, &expr);
3012 }
3013
3014 return expr;
3015 }
3016
3017 /* EXPR is used in a boolean context; make sure it has BOOLEAN_TYPE. */
3018
3019 tree
3020 gimple_boolify (tree expr)
3021 {
3022 tree type = TREE_TYPE (expr);
3023 location_t loc = EXPR_LOCATION (expr);
3024
3025 if (TREE_CODE (expr) == NE_EXPR
3026 && TREE_CODE (TREE_OPERAND (expr, 0)) == CALL_EXPR
3027 && integer_zerop (TREE_OPERAND (expr, 1)))
3028 {
3029 tree call = TREE_OPERAND (expr, 0);
3030 tree fn = get_callee_fndecl (call);
3031
3032 /* For __builtin_expect ((long) (x), y) recurse into x as well
3033 if x is truth_value_p. */
3034 if (fn
3035 && DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL
3036 && DECL_FUNCTION_CODE (fn) == BUILT_IN_EXPECT
3037 && call_expr_nargs (call) == 2)
3038 {
3039 tree arg = CALL_EXPR_ARG (call, 0);
3040 if (arg)
3041 {
3042 if (TREE_CODE (arg) == NOP_EXPR
3043 && TREE_TYPE (arg) == TREE_TYPE (call))
3044 arg = TREE_OPERAND (arg, 0);
3045 if (truth_value_p (TREE_CODE (arg)))
3046 {
3047 arg = gimple_boolify (arg);
3048 CALL_EXPR_ARG (call, 0)
3049 = fold_convert_loc (loc, TREE_TYPE (call), arg);
3050 }
3051 }
3052 }
3053 }
3054
3055 switch (TREE_CODE (expr))
3056 {
3057 case TRUTH_AND_EXPR:
3058 case TRUTH_OR_EXPR:
3059 case TRUTH_XOR_EXPR:
3060 case TRUTH_ANDIF_EXPR:
3061 case TRUTH_ORIF_EXPR:
3062 /* Also boolify the arguments of truth exprs. */
3063 TREE_OPERAND (expr, 1) = gimple_boolify (TREE_OPERAND (expr, 1));
3064 /* FALLTHRU */
3065
3066 case TRUTH_NOT_EXPR:
3067 TREE_OPERAND (expr, 0) = gimple_boolify (TREE_OPERAND (expr, 0));
3068
3069 /* These expressions always produce boolean results. */
3070 if (TREE_CODE (type) != BOOLEAN_TYPE)
3071 TREE_TYPE (expr) = boolean_type_node;
3072 return expr;
3073
3074 default:
3075 if (COMPARISON_CLASS_P (expr))
3076 {
3077 /* There expressions always prduce boolean results. */
3078 if (TREE_CODE (type) != BOOLEAN_TYPE)
3079 TREE_TYPE (expr) = boolean_type_node;
3080 return expr;
3081 }
3082 /* Other expressions that get here must have boolean values, but
3083 might need to be converted to the appropriate mode. */
3084 if (TREE_CODE (type) == BOOLEAN_TYPE)
3085 return expr;
3086 return fold_convert_loc (loc, boolean_type_node, expr);
3087 }
3088 }
3089
3090 /* Given a conditional expression *EXPR_P without side effects, gimplify
3091 its operands. New statements are inserted to PRE_P. */
3092
3093 static enum gimplify_status
3094 gimplify_pure_cond_expr (tree *expr_p, gimple_seq *pre_p)
3095 {
3096 tree expr = *expr_p, cond;
3097 enum gimplify_status ret, tret;
3098 enum tree_code code;
3099
3100 cond = gimple_boolify (COND_EXPR_COND (expr));
3101
3102 /* We need to handle && and || specially, as their gimplification
3103 creates pure cond_expr, thus leading to an infinite cycle otherwise. */
3104 code = TREE_CODE (cond);
3105 if (code == TRUTH_ANDIF_EXPR)
3106 TREE_SET_CODE (cond, TRUTH_AND_EXPR);
3107 else if (code == TRUTH_ORIF_EXPR)
3108 TREE_SET_CODE (cond, TRUTH_OR_EXPR);
3109 ret = gimplify_expr (&cond, pre_p, NULL, is_gimple_condexpr, fb_rvalue);
3110 COND_EXPR_COND (*expr_p) = cond;
3111
3112 tret = gimplify_expr (&COND_EXPR_THEN (expr), pre_p, NULL,
3113 is_gimple_val, fb_rvalue);
3114 ret = MIN (ret, tret);
3115 tret = gimplify_expr (&COND_EXPR_ELSE (expr), pre_p, NULL,
3116 is_gimple_val, fb_rvalue);
3117
3118 return MIN (ret, tret);
3119 }
3120
3121 /* Return true if evaluating EXPR could trap.
3122 EXPR is GENERIC, while tree_could_trap_p can be called
3123 only on GIMPLE. */
3124
3125 static bool
3126 generic_expr_could_trap_p (tree expr)
3127 {
3128 unsigned i, n;
3129
3130 if (!expr || is_gimple_val (expr))
3131 return false;
3132
3133 if (!EXPR_P (expr) || tree_could_trap_p (expr))
3134 return true;
3135
3136 n = TREE_OPERAND_LENGTH (expr);
3137 for (i = 0; i < n; i++)
3138 if (generic_expr_could_trap_p (TREE_OPERAND (expr, i)))
3139 return true;
3140
3141 return false;
3142 }
3143
3144 /* Convert the conditional expression pointed to by EXPR_P '(p) ? a : b;'
3145 into
3146
3147 if (p) if (p)
3148 t1 = a; a;
3149 else or else
3150 t1 = b; b;
3151 t1;
3152
3153 The second form is used when *EXPR_P is of type void.
3154
3155 PRE_P points to the list where side effects that must happen before
3156 *EXPR_P should be stored. */
3157
3158 static enum gimplify_status
3159 gimplify_cond_expr (tree *expr_p, gimple_seq *pre_p, fallback_t fallback)
3160 {
3161 tree expr = *expr_p;
3162 tree type = TREE_TYPE (expr);
3163 location_t loc = EXPR_LOCATION (expr);
3164 tree tmp, arm1, arm2;
3165 enum gimplify_status ret;
3166 tree label_true, label_false, label_cont;
3167 bool have_then_clause_p, have_else_clause_p;
3168 gimple gimple_cond;
3169 enum tree_code pred_code;
3170 gimple_seq seq = NULL;
3171
3172 /* If this COND_EXPR has a value, copy the values into a temporary within
3173 the arms. */
3174 if (!VOID_TYPE_P (type))
3175 {
3176 tree then_ = TREE_OPERAND (expr, 1), else_ = TREE_OPERAND (expr, 2);
3177 tree result;
3178
3179 /* If either an rvalue is ok or we do not require an lvalue, create the
3180 temporary. But we cannot do that if the type is addressable. */
3181 if (((fallback & fb_rvalue) || !(fallback & fb_lvalue))
3182 && !TREE_ADDRESSABLE (type))
3183 {
3184 if (gimplify_ctxp->allow_rhs_cond_expr
3185 /* If either branch has side effects or could trap, it can't be
3186 evaluated unconditionally. */
3187 && !TREE_SIDE_EFFECTS (then_)
3188 && !generic_expr_could_trap_p (then_)
3189 && !TREE_SIDE_EFFECTS (else_)
3190 && !generic_expr_could_trap_p (else_))
3191 return gimplify_pure_cond_expr (expr_p, pre_p);
3192
3193 tmp = create_tmp_var (type, "iftmp");
3194 result = tmp;
3195 }
3196
3197 /* Otherwise, only create and copy references to the values. */
3198 else
3199 {
3200 type = build_pointer_type (type);
3201
3202 if (!VOID_TYPE_P (TREE_TYPE (then_)))
3203 then_ = build_fold_addr_expr_loc (loc, then_);
3204
3205 if (!VOID_TYPE_P (TREE_TYPE (else_)))
3206 else_ = build_fold_addr_expr_loc (loc, else_);
3207
3208 expr
3209 = build3 (COND_EXPR, type, TREE_OPERAND (expr, 0), then_, else_);
3210
3211 tmp = create_tmp_var (type, "iftmp");
3212 result = build_simple_mem_ref_loc (loc, tmp);
3213 }
3214
3215 /* Build the new then clause, `tmp = then_;'. But don't build the
3216 assignment if the value is void; in C++ it can be if it's a throw. */
3217 if (!VOID_TYPE_P (TREE_TYPE (then_)))
3218 TREE_OPERAND (expr, 1) = build2 (MODIFY_EXPR, type, tmp, then_);
3219
3220 /* Similarly, build the new else clause, `tmp = else_;'. */
3221 if (!VOID_TYPE_P (TREE_TYPE (else_)))
3222 TREE_OPERAND (expr, 2) = build2 (MODIFY_EXPR, type, tmp, else_);
3223
3224 TREE_TYPE (expr) = void_type_node;
3225 recalculate_side_effects (expr);
3226
3227 /* Move the COND_EXPR to the prequeue. */
3228 gimplify_stmt (&expr, pre_p);
3229
3230 *expr_p = result;
3231 return GS_ALL_DONE;
3232 }
3233
3234 /* Remove any COMPOUND_EXPR so the following cases will be caught. */
3235 STRIP_TYPE_NOPS (TREE_OPERAND (expr, 0));
3236 if (TREE_CODE (TREE_OPERAND (expr, 0)) == COMPOUND_EXPR)
3237 gimplify_compound_expr (&TREE_OPERAND (expr, 0), pre_p, true);
3238
3239 /* Make sure the condition has BOOLEAN_TYPE. */
3240 TREE_OPERAND (expr, 0) = gimple_boolify (TREE_OPERAND (expr, 0));
3241
3242 /* Break apart && and || conditions. */
3243 if (TREE_CODE (TREE_OPERAND (expr, 0)) == TRUTH_ANDIF_EXPR
3244 || TREE_CODE (TREE_OPERAND (expr, 0)) == TRUTH_ORIF_EXPR)
3245 {
3246 expr = shortcut_cond_expr (expr);
3247
3248 if (expr != *expr_p)
3249 {
3250 *expr_p = expr;
3251
3252 /* We can't rely on gimplify_expr to re-gimplify the expanded
3253 form properly, as cleanups might cause the target labels to be
3254 wrapped in a TRY_FINALLY_EXPR. To prevent that, we need to
3255 set up a conditional context. */
3256 gimple_push_condition ();
3257 gimplify_stmt (expr_p, &seq);
3258 gimple_pop_condition (pre_p);
3259 gimple_seq_add_seq (pre_p, seq);
3260
3261 return GS_ALL_DONE;
3262 }
3263 }
3264
3265 /* Now do the normal gimplification. */
3266
3267 /* Gimplify condition. */
3268 ret = gimplify_expr (&TREE_OPERAND (expr, 0), pre_p, NULL, is_gimple_condexpr,
3269 fb_rvalue);
3270 if (ret == GS_ERROR)
3271 return GS_ERROR;
3272 gcc_assert (TREE_OPERAND (expr, 0) != NULL_TREE);
3273
3274 gimple_push_condition ();
3275
3276 have_then_clause_p = have_else_clause_p = false;
3277 if (TREE_OPERAND (expr, 1) != NULL
3278 && TREE_CODE (TREE_OPERAND (expr, 1)) == GOTO_EXPR
3279 && TREE_CODE (GOTO_DESTINATION (TREE_OPERAND (expr, 1))) == LABEL_DECL
3280 && (DECL_CONTEXT (GOTO_DESTINATION (TREE_OPERAND (expr, 1)))
3281 == current_function_decl)
3282 /* For -O0 avoid this optimization if the COND_EXPR and GOTO_EXPR
3283 have different locations, otherwise we end up with incorrect
3284 location information on the branches. */
3285 && (optimize
3286 || !EXPR_HAS_LOCATION (expr)
3287 || !EXPR_HAS_LOCATION (TREE_OPERAND (expr, 1))
3288 || EXPR_LOCATION (expr) == EXPR_LOCATION (TREE_OPERAND (expr, 1))))
3289 {
3290 label_true = GOTO_DESTINATION (TREE_OPERAND (expr, 1));
3291 have_then_clause_p = true;
3292 }
3293 else
3294 label_true = create_artificial_label (UNKNOWN_LOCATION);
3295 if (TREE_OPERAND (expr, 2) != NULL
3296 && TREE_CODE (TREE_OPERAND (expr, 2)) == GOTO_EXPR
3297 && TREE_CODE (GOTO_DESTINATION (TREE_OPERAND (expr, 2))) == LABEL_DECL
3298 && (DECL_CONTEXT (GOTO_DESTINATION (TREE_OPERAND (expr, 2)))
3299 == current_function_decl)
3300 /* For -O0 avoid this optimization if the COND_EXPR and GOTO_EXPR
3301 have different locations, otherwise we end up with incorrect
3302 location information on the branches. */
3303 && (optimize
3304 || !EXPR_HAS_LOCATION (expr)
3305 || !EXPR_HAS_LOCATION (TREE_OPERAND (expr, 2))
3306 || EXPR_LOCATION (expr) == EXPR_LOCATION (TREE_OPERAND (expr, 2))))
3307 {
3308 label_false = GOTO_DESTINATION (TREE_OPERAND (expr, 2));
3309 have_else_clause_p = true;
3310 }
3311 else
3312 label_false = create_artificial_label (UNKNOWN_LOCATION);
3313
3314 gimple_cond_get_ops_from_tree (COND_EXPR_COND (expr), &pred_code, &arm1,
3315 &arm2);
3316
3317 gimple_cond = gimple_build_cond (pred_code, arm1, arm2, label_true,
3318 label_false);
3319
3320 gimplify_seq_add_stmt (&seq, gimple_cond);
3321 label_cont = NULL_TREE;
3322 if (!have_then_clause_p)
3323 {
3324 /* For if (...) {} else { code; } put label_true after
3325 the else block. */
3326 if (TREE_OPERAND (expr, 1) == NULL_TREE
3327 && !have_else_clause_p
3328 && TREE_OPERAND (expr, 2) != NULL_TREE)
3329 label_cont = label_true;
3330 else
3331 {
3332 gimplify_seq_add_stmt (&seq, gimple_build_label (label_true));
3333 have_then_clause_p = gimplify_stmt (&TREE_OPERAND (expr, 1), &seq);
3334 /* For if (...) { code; } else {} or
3335 if (...) { code; } else goto label; or
3336 if (...) { code; return; } else { ... }
3337 label_cont isn't needed. */
3338 if (!have_else_clause_p
3339 && TREE_OPERAND (expr, 2) != NULL_TREE
3340 && gimple_seq_may_fallthru (seq))
3341 {
3342 gimple g;
3343 label_cont = create_artificial_label (UNKNOWN_LOCATION);
3344
3345 g = gimple_build_goto (label_cont);
3346
3347 /* GIMPLE_COND's are very low level; they have embedded
3348 gotos. This particular embedded goto should not be marked
3349 with the location of the original COND_EXPR, as it would
3350 correspond to the COND_EXPR's condition, not the ELSE or the
3351 THEN arms. To avoid marking it with the wrong location, flag
3352 it as "no location". */
3353 gimple_set_do_not_emit_location (g);
3354
3355 gimplify_seq_add_stmt (&seq, g);
3356 }
3357 }
3358 }
3359 if (!have_else_clause_p)
3360 {
3361 gimplify_seq_add_stmt (&seq, gimple_build_label (label_false));
3362 have_else_clause_p = gimplify_stmt (&TREE_OPERAND (expr, 2), &seq);
3363 }
3364 if (label_cont)
3365 gimplify_seq_add_stmt (&seq, gimple_build_label (label_cont));
3366
3367 gimple_pop_condition (pre_p);
3368 gimple_seq_add_seq (pre_p, seq);
3369
3370 if (ret == GS_ERROR)
3371 ; /* Do nothing. */
3372 else if (have_then_clause_p || have_else_clause_p)
3373 ret = GS_ALL_DONE;
3374 else
3375 {
3376 /* Both arms are empty; replace the COND_EXPR with its predicate. */
3377 expr = TREE_OPERAND (expr, 0);
3378 gimplify_stmt (&expr, pre_p);
3379 }
3380
3381 *expr_p = NULL;
3382 return ret;
3383 }
3384
3385 /* Prepare the node pointed to by EXPR_P, an is_gimple_addressable expression,
3386 to be marked addressable.
3387
3388 We cannot rely on such an expression being directly markable if a temporary
3389 has been created by the gimplification. In this case, we create another
3390 temporary and initialize it with a copy, which will become a store after we
3391 mark it addressable. This can happen if the front-end passed us something
3392 that it could not mark addressable yet, like a Fortran pass-by-reference
3393 parameter (int) floatvar. */
3394
3395 static void
3396 prepare_gimple_addressable (tree *expr_p, gimple_seq *seq_p)
3397 {
3398 while (handled_component_p (*expr_p))
3399 expr_p = &TREE_OPERAND (*expr_p, 0);
3400 if (is_gimple_reg (*expr_p))
3401 *expr_p = get_initialized_tmp_var (*expr_p, seq_p, NULL);
3402 }
3403
3404 /* A subroutine of gimplify_modify_expr. Replace a MODIFY_EXPR with
3405 a call to __builtin_memcpy. */
3406
3407 static enum gimplify_status
3408 gimplify_modify_expr_to_memcpy (tree *expr_p, tree size, bool want_value,
3409 gimple_seq *seq_p)
3410 {
3411 tree t, to, to_ptr, from, from_ptr;
3412 gimple gs;
3413 location_t loc = EXPR_LOCATION (*expr_p);
3414
3415 to = TREE_OPERAND (*expr_p, 0);
3416 from = TREE_OPERAND (*expr_p, 1);
3417
3418 /* Mark the RHS addressable. Beware that it may not be possible to do so
3419 directly if a temporary has been created by the gimplification. */
3420 prepare_gimple_addressable (&from, seq_p);
3421
3422 mark_addressable (from);
3423 from_ptr = build_fold_addr_expr_loc (loc, from);
3424 gimplify_arg (&from_ptr, seq_p, loc);
3425
3426 mark_addressable (to);
3427 to_ptr = build_fold_addr_expr_loc (loc, to);
3428 gimplify_arg (&to_ptr, seq_p, loc);
3429
3430 t = builtin_decl_implicit (BUILT_IN_MEMCPY);
3431
3432 gs = gimple_build_call (t, 3, to_ptr, from_ptr, size);
3433
3434 if (want_value)
3435 {
3436 /* tmp = memcpy() */
3437 t = create_tmp_var (TREE_TYPE (to_ptr), NULL);
3438 gimple_call_set_lhs (gs, t);
3439 gimplify_seq_add_stmt (seq_p, gs);
3440
3441 *expr_p = build_simple_mem_ref (t);
3442 return GS_ALL_DONE;
3443 }
3444
3445 gimplify_seq_add_stmt (seq_p, gs);
3446 *expr_p = NULL;
3447 return GS_ALL_DONE;
3448 }
3449
3450 /* A subroutine of gimplify_modify_expr. Replace a MODIFY_EXPR with
3451 a call to __builtin_memset. In this case we know that the RHS is
3452 a CONSTRUCTOR with an empty element list. */
3453
3454 static enum gimplify_status
3455 gimplify_modify_expr_to_memset (tree *expr_p, tree size, bool want_value,
3456 gimple_seq *seq_p)
3457 {
3458 tree t, from, to, to_ptr;
3459 gimple gs;
3460 location_t loc = EXPR_LOCATION (*expr_p);
3461
3462 /* Assert our assumptions, to abort instead of producing wrong code
3463 silently if they are not met. Beware that the RHS CONSTRUCTOR might
3464 not be immediately exposed. */
3465 from = TREE_OPERAND (*expr_p, 1);
3466 if (TREE_CODE (from) == WITH_SIZE_EXPR)
3467 from = TREE_OPERAND (from, 0);
3468
3469 gcc_assert (TREE_CODE (from) == CONSTRUCTOR
3470 && vec_safe_is_empty (CONSTRUCTOR_ELTS (from)));
3471
3472 /* Now proceed. */
3473 to = TREE_OPERAND (*expr_p, 0);
3474
3475 to_ptr = build_fold_addr_expr_loc (loc, to);
3476 gimplify_arg (&to_ptr, seq_p, loc);
3477 t = builtin_decl_implicit (BUILT_IN_MEMSET);
3478
3479 gs = gimple_build_call (t, 3, to_ptr, integer_zero_node, size);
3480
3481 if (want_value)
3482 {
3483 /* tmp = memset() */
3484 t = create_tmp_var (TREE_TYPE (to_ptr), NULL);
3485 gimple_call_set_lhs (gs, t);
3486 gimplify_seq_add_stmt (seq_p, gs);
3487
3488 *expr_p = build1 (INDIRECT_REF, TREE_TYPE (to), t);
3489 return GS_ALL_DONE;
3490 }
3491
3492 gimplify_seq_add_stmt (seq_p, gs);
3493 *expr_p = NULL;
3494 return GS_ALL_DONE;
3495 }
3496
3497 /* A subroutine of gimplify_init_ctor_preeval. Called via walk_tree,
3498 determine, cautiously, if a CONSTRUCTOR overlaps the lhs of an
3499 assignment. Return non-null if we detect a potential overlap. */
3500
3501 struct gimplify_init_ctor_preeval_data
3502 {
3503 /* The base decl of the lhs object. May be NULL, in which case we
3504 have to assume the lhs is indirect. */
3505 tree lhs_base_decl;
3506
3507 /* The alias set of the lhs object. */
3508 alias_set_type lhs_alias_set;
3509 };
3510
3511 static tree
3512 gimplify_init_ctor_preeval_1 (tree *tp, int *walk_subtrees, void *xdata)
3513 {
3514 struct gimplify_init_ctor_preeval_data *data
3515 = (struct gimplify_init_ctor_preeval_data *) xdata;
3516 tree t = *tp;
3517
3518 /* If we find the base object, obviously we have overlap. */
3519 if (data->lhs_base_decl == t)
3520 return t;
3521
3522 /* If the constructor component is indirect, determine if we have a
3523 potential overlap with the lhs. The only bits of information we
3524 have to go on at this point are addressability and alias sets. */
3525 if ((INDIRECT_REF_P (t)
3526 || TREE_CODE (t) == MEM_REF)
3527 && (!data->lhs_base_decl || TREE_ADDRESSABLE (data->lhs_base_decl))
3528 && alias_sets_conflict_p (data->lhs_alias_set, get_alias_set (t)))
3529 return t;
3530
3531 /* If the constructor component is a call, determine if it can hide a
3532 potential overlap with the lhs through an INDIRECT_REF like above.
3533 ??? Ugh - this is completely broken. In fact this whole analysis
3534 doesn't look conservative. */
3535 if (TREE_CODE (t) == CALL_EXPR)
3536 {
3537 tree type, fntype = TREE_TYPE (TREE_TYPE (CALL_EXPR_FN (t)));
3538
3539 for (type = TYPE_ARG_TYPES (fntype); type; type = TREE_CHAIN (type))
3540 if (POINTER_TYPE_P (TREE_VALUE (type))
3541 && (!data->lhs_base_decl || TREE_ADDRESSABLE (data->lhs_base_decl))
3542 && alias_sets_conflict_p (data->lhs_alias_set,
3543 get_alias_set
3544 (TREE_TYPE (TREE_VALUE (type)))))
3545 return t;
3546 }
3547
3548 if (IS_TYPE_OR_DECL_P (t))
3549 *walk_subtrees = 0;
3550 return NULL;
3551 }
3552
3553 /* A subroutine of gimplify_init_constructor. Pre-evaluate EXPR,
3554 force values that overlap with the lhs (as described by *DATA)
3555 into temporaries. */
3556
3557 static void
3558 gimplify_init_ctor_preeval (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
3559 struct gimplify_init_ctor_preeval_data *data)
3560 {
3561 enum gimplify_status one;
3562
3563 /* If the value is constant, then there's nothing to pre-evaluate. */
3564 if (TREE_CONSTANT (*expr_p))
3565 {
3566 /* Ensure it does not have side effects, it might contain a reference to
3567 the object we're initializing. */
3568 gcc_assert (!TREE_SIDE_EFFECTS (*expr_p));
3569 return;
3570 }
3571
3572 /* If the type has non-trivial constructors, we can't pre-evaluate. */
3573 if (TREE_ADDRESSABLE (TREE_TYPE (*expr_p)))
3574 return;
3575
3576 /* Recurse for nested constructors. */
3577 if (TREE_CODE (*expr_p) == CONSTRUCTOR)
3578 {
3579 unsigned HOST_WIDE_INT ix;
3580 constructor_elt *ce;
3581 vec<constructor_elt, va_gc> *v = CONSTRUCTOR_ELTS (*expr_p);
3582
3583 FOR_EACH_VEC_SAFE_ELT (v, ix, ce)
3584 gimplify_init_ctor_preeval (&ce->value, pre_p, post_p, data);
3585
3586 return;
3587 }
3588
3589 /* If this is a variable sized type, we must remember the size. */
3590 maybe_with_size_expr (expr_p);
3591
3592 /* Gimplify the constructor element to something appropriate for the rhs
3593 of a MODIFY_EXPR. Given that we know the LHS is an aggregate, we know
3594 the gimplifier will consider this a store to memory. Doing this
3595 gimplification now means that we won't have to deal with complicated
3596 language-specific trees, nor trees like SAVE_EXPR that can induce
3597 exponential search behavior. */
3598 one = gimplify_expr (expr_p, pre_p, post_p, is_gimple_mem_rhs, fb_rvalue);
3599 if (one == GS_ERROR)
3600 {
3601 *expr_p = NULL;
3602 return;
3603 }
3604
3605 /* If we gimplified to a bare decl, we can be sure that it doesn't overlap
3606 with the lhs, since "a = { .x=a }" doesn't make sense. This will
3607 always be true for all scalars, since is_gimple_mem_rhs insists on a
3608 temporary variable for them. */
3609 if (DECL_P (*expr_p))
3610 return;
3611
3612 /* If this is of variable size, we have no choice but to assume it doesn't
3613 overlap since we can't make a temporary for it. */
3614 if (TREE_CODE (TYPE_SIZE (TREE_TYPE (*expr_p))) != INTEGER_CST)
3615 return;
3616
3617 /* Otherwise, we must search for overlap ... */
3618 if (!walk_tree (expr_p, gimplify_init_ctor_preeval_1, data, NULL))
3619 return;
3620
3621 /* ... and if found, force the value into a temporary. */
3622 *expr_p = get_formal_tmp_var (*expr_p, pre_p);
3623 }
3624
3625 /* A subroutine of gimplify_init_ctor_eval. Create a loop for
3626 a RANGE_EXPR in a CONSTRUCTOR for an array.
3627
3628 var = lower;
3629 loop_entry:
3630 object[var] = value;
3631 if (var == upper)
3632 goto loop_exit;
3633 var = var + 1;
3634 goto loop_entry;
3635 loop_exit:
3636
3637 We increment var _after_ the loop exit check because we might otherwise
3638 fail if upper == TYPE_MAX_VALUE (type for upper).
3639
3640 Note that we never have to deal with SAVE_EXPRs here, because this has
3641 already been taken care of for us, in gimplify_init_ctor_preeval(). */
3642
3643 static void gimplify_init_ctor_eval (tree, vec<constructor_elt, va_gc> *,
3644 gimple_seq *, bool);
3645
3646 static void
3647 gimplify_init_ctor_eval_range (tree object, tree lower, tree upper,
3648 tree value, tree array_elt_type,
3649 gimple_seq *pre_p, bool cleared)
3650 {
3651 tree loop_entry_label, loop_exit_label, fall_thru_label;
3652 tree var, var_type, cref, tmp;
3653
3654 loop_entry_label = create_artificial_label (UNKNOWN_LOCATION);
3655 loop_exit_label = create_artificial_label (UNKNOWN_LOCATION);
3656 fall_thru_label = create_artificial_label (UNKNOWN_LOCATION);
3657
3658 /* Create and initialize the index variable. */
3659 var_type = TREE_TYPE (upper);
3660 var = create_tmp_var (var_type, NULL);
3661 gimplify_seq_add_stmt (pre_p, gimple_build_assign (var, lower));
3662
3663 /* Add the loop entry label. */
3664 gimplify_seq_add_stmt (pre_p, gimple_build_label (loop_entry_label));
3665
3666 /* Build the reference. */
3667 cref = build4 (ARRAY_REF, array_elt_type, unshare_expr (object),
3668 var, NULL_TREE, NULL_TREE);
3669
3670 /* If we are a constructor, just call gimplify_init_ctor_eval to do
3671 the store. Otherwise just assign value to the reference. */
3672
3673 if (TREE_CODE (value) == CONSTRUCTOR)
3674 /* NB we might have to call ourself recursively through
3675 gimplify_init_ctor_eval if the value is a constructor. */
3676 gimplify_init_ctor_eval (cref, CONSTRUCTOR_ELTS (value),
3677 pre_p, cleared);
3678 else
3679 gimplify_seq_add_stmt (pre_p, gimple_build_assign (cref, value));
3680
3681 /* We exit the loop when the index var is equal to the upper bound. */
3682 gimplify_seq_add_stmt (pre_p,
3683 gimple_build_cond (EQ_EXPR, var, upper,
3684 loop_exit_label, fall_thru_label));
3685
3686 gimplify_seq_add_stmt (pre_p, gimple_build_label (fall_thru_label));
3687
3688 /* Otherwise, increment the index var... */
3689 tmp = build2 (PLUS_EXPR, var_type, var,
3690 fold_convert (var_type, integer_one_node));
3691 gimplify_seq_add_stmt (pre_p, gimple_build_assign (var, tmp));
3692
3693 /* ...and jump back to the loop entry. */
3694 gimplify_seq_add_stmt (pre_p, gimple_build_goto (loop_entry_label));
3695
3696 /* Add the loop exit label. */
3697 gimplify_seq_add_stmt (pre_p, gimple_build_label (loop_exit_label));
3698 }
3699
3700 /* Return true if FDECL is accessing a field that is zero sized. */
3701
3702 static bool
3703 zero_sized_field_decl (const_tree fdecl)
3704 {
3705 if (TREE_CODE (fdecl) == FIELD_DECL && DECL_SIZE (fdecl)
3706 && integer_zerop (DECL_SIZE (fdecl)))
3707 return true;
3708 return false;
3709 }
3710
3711 /* Return true if TYPE is zero sized. */
3712
3713 static bool
3714 zero_sized_type (const_tree type)
3715 {
3716 if (AGGREGATE_TYPE_P (type) && TYPE_SIZE (type)
3717 && integer_zerop (TYPE_SIZE (type)))
3718 return true;
3719 return false;
3720 }
3721
3722 /* A subroutine of gimplify_init_constructor. Generate individual
3723 MODIFY_EXPRs for a CONSTRUCTOR. OBJECT is the LHS against which the
3724 assignments should happen. ELTS is the CONSTRUCTOR_ELTS of the
3725 CONSTRUCTOR. CLEARED is true if the entire LHS object has been
3726 zeroed first. */
3727
3728 static void
3729 gimplify_init_ctor_eval (tree object, vec<constructor_elt, va_gc> *elts,
3730 gimple_seq *pre_p, bool cleared)
3731 {
3732 tree array_elt_type = NULL;
3733 unsigned HOST_WIDE_INT ix;
3734 tree purpose, value;
3735
3736 if (TREE_CODE (TREE_TYPE (object)) == ARRAY_TYPE)
3737 array_elt_type = TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (object)));
3738
3739 FOR_EACH_CONSTRUCTOR_ELT (elts, ix, purpose, value)
3740 {
3741 tree cref;
3742
3743 /* NULL values are created above for gimplification errors. */
3744 if (value == NULL)
3745 continue;
3746
3747 if (cleared && initializer_zerop (value))
3748 continue;
3749
3750 /* ??? Here's to hoping the front end fills in all of the indices,
3751 so we don't have to figure out what's missing ourselves. */
3752 gcc_assert (purpose);
3753
3754 /* Skip zero-sized fields, unless value has side-effects. This can
3755 happen with calls to functions returning a zero-sized type, which
3756 we shouldn't discard. As a number of downstream passes don't
3757 expect sets of zero-sized fields, we rely on the gimplification of
3758 the MODIFY_EXPR we make below to drop the assignment statement. */
3759 if (! TREE_SIDE_EFFECTS (value) && zero_sized_field_decl (purpose))
3760 continue;
3761
3762 /* If we have a RANGE_EXPR, we have to build a loop to assign the
3763 whole range. */
3764 if (TREE_CODE (purpose) == RANGE_EXPR)
3765 {
3766 tree lower = TREE_OPERAND (purpose, 0);
3767 tree upper = TREE_OPERAND (purpose, 1);
3768
3769 /* If the lower bound is equal to upper, just treat it as if
3770 upper was the index. */
3771 if (simple_cst_equal (lower, upper))
3772 purpose = upper;
3773 else
3774 {
3775 gimplify_init_ctor_eval_range (object, lower, upper, value,
3776 array_elt_type, pre_p, cleared);
3777 continue;
3778 }
3779 }
3780
3781 if (array_elt_type)
3782 {
3783 /* Do not use bitsizetype for ARRAY_REF indices. */
3784 if (TYPE_DOMAIN (TREE_TYPE (object)))
3785 purpose
3786 = fold_convert (TREE_TYPE (TYPE_DOMAIN (TREE_TYPE (object))),
3787 purpose);
3788 cref = build4 (ARRAY_REF, array_elt_type, unshare_expr (object),
3789 purpose, NULL_TREE, NULL_TREE);
3790 }
3791 else
3792 {
3793 gcc_assert (TREE_CODE (purpose) == FIELD_DECL);
3794 cref = build3 (COMPONENT_REF, TREE_TYPE (purpose),
3795 unshare_expr (object), purpose, NULL_TREE);
3796 }
3797
3798 if (TREE_CODE (value) == CONSTRUCTOR
3799 && TREE_CODE (TREE_TYPE (value)) != VECTOR_TYPE)
3800 gimplify_init_ctor_eval (cref, CONSTRUCTOR_ELTS (value),
3801 pre_p, cleared);
3802 else
3803 {
3804 tree init = build2 (INIT_EXPR, TREE_TYPE (cref), cref, value);
3805 gimplify_and_add (init, pre_p);
3806 ggc_free (init);
3807 }
3808 }
3809 }
3810
3811 /* Return the appropriate RHS predicate for this LHS. */
3812
3813 gimple_predicate
3814 rhs_predicate_for (tree lhs)
3815 {
3816 if (is_gimple_reg (lhs))
3817 return is_gimple_reg_rhs_or_call;
3818 else
3819 return is_gimple_mem_rhs_or_call;
3820 }
3821
3822 /* Gimplify a C99 compound literal expression. This just means adding
3823 the DECL_EXPR before the current statement and using its anonymous
3824 decl instead. */
3825
3826 static enum gimplify_status
3827 gimplify_compound_literal_expr (tree *expr_p, gimple_seq *pre_p,
3828 bool (*gimple_test_f) (tree),
3829 fallback_t fallback)
3830 {
3831 tree decl_s = COMPOUND_LITERAL_EXPR_DECL_EXPR (*expr_p);
3832 tree decl = DECL_EXPR_DECL (decl_s);
3833 tree init = DECL_INITIAL (decl);
3834 /* Mark the decl as addressable if the compound literal
3835 expression is addressable now, otherwise it is marked too late
3836 after we gimplify the initialization expression. */
3837 if (TREE_ADDRESSABLE (*expr_p))
3838 TREE_ADDRESSABLE (decl) = 1;
3839 /* Otherwise, if we don't need an lvalue and have a literal directly
3840 substitute it. Check if it matches the gimple predicate, as
3841 otherwise we'd generate a new temporary, and we can as well just
3842 use the decl we already have. */
3843 else if (!TREE_ADDRESSABLE (decl)
3844 && init
3845 && (fallback & fb_lvalue) == 0
3846 && gimple_test_f (init))
3847 {
3848 *expr_p = init;
3849 return GS_OK;
3850 }
3851
3852 /* Preliminarily mark non-addressed complex variables as eligible
3853 for promotion to gimple registers. We'll transform their uses
3854 as we find them. */
3855 if ((TREE_CODE (TREE_TYPE (decl)) == COMPLEX_TYPE
3856 || TREE_CODE (TREE_TYPE (decl)) == VECTOR_TYPE)
3857 && !TREE_THIS_VOLATILE (decl)
3858 && !needs_to_live_in_memory (decl))
3859 DECL_GIMPLE_REG_P (decl) = 1;
3860
3861 /* If the decl is not addressable, then it is being used in some
3862 expression or on the right hand side of a statement, and it can
3863 be put into a readonly data section. */
3864 if (!TREE_ADDRESSABLE (decl) && (fallback & fb_lvalue) == 0)
3865 TREE_READONLY (decl) = 1;
3866
3867 /* This decl isn't mentioned in the enclosing block, so add it to the
3868 list of temps. FIXME it seems a bit of a kludge to say that
3869 anonymous artificial vars aren't pushed, but everything else is. */
3870 if (DECL_NAME (decl) == NULL_TREE && !DECL_SEEN_IN_BIND_EXPR_P (decl))
3871 gimple_add_tmp_var (decl);
3872
3873 gimplify_and_add (decl_s, pre_p);
3874 *expr_p = decl;
3875 return GS_OK;
3876 }
3877
3878 /* Optimize embedded COMPOUND_LITERAL_EXPRs within a CONSTRUCTOR,
3879 return a new CONSTRUCTOR if something changed. */
3880
3881 static tree
3882 optimize_compound_literals_in_ctor (tree orig_ctor)
3883 {
3884 tree ctor = orig_ctor;
3885 vec<constructor_elt, va_gc> *elts = CONSTRUCTOR_ELTS (ctor);
3886 unsigned int idx, num = vec_safe_length (elts);
3887
3888 for (idx = 0; idx < num; idx++)
3889 {
3890 tree value = (*elts)[idx].value;
3891 tree newval = value;
3892 if (TREE_CODE (value) == CONSTRUCTOR)
3893 newval = optimize_compound_literals_in_ctor (value);
3894 else if (TREE_CODE (value) == COMPOUND_LITERAL_EXPR)
3895 {
3896 tree decl_s = COMPOUND_LITERAL_EXPR_DECL_EXPR (value);
3897 tree decl = DECL_EXPR_DECL (decl_s);
3898 tree init = DECL_INITIAL (decl);
3899
3900 if (!TREE_ADDRESSABLE (value)
3901 && !TREE_ADDRESSABLE (decl)
3902 && init
3903 && TREE_CODE (init) == CONSTRUCTOR)
3904 newval = optimize_compound_literals_in_ctor (init);
3905 }
3906 if (newval == value)
3907 continue;
3908
3909 if (ctor == orig_ctor)
3910 {
3911 ctor = copy_node (orig_ctor);
3912 CONSTRUCTOR_ELTS (ctor) = vec_safe_copy (elts);
3913 elts = CONSTRUCTOR_ELTS (ctor);
3914 }
3915 (*elts)[idx].value = newval;
3916 }
3917 return ctor;
3918 }
3919
3920 /* A subroutine of gimplify_modify_expr. Break out elements of a
3921 CONSTRUCTOR used as an initializer into separate MODIFY_EXPRs.
3922
3923 Note that we still need to clear any elements that don't have explicit
3924 initializers, so if not all elements are initialized we keep the
3925 original MODIFY_EXPR, we just remove all of the constructor elements.
3926
3927 If NOTIFY_TEMP_CREATION is true, do not gimplify, just return
3928 GS_ERROR if we would have to create a temporary when gimplifying
3929 this constructor. Otherwise, return GS_OK.
3930
3931 If NOTIFY_TEMP_CREATION is false, just do the gimplification. */
3932
3933 static enum gimplify_status
3934 gimplify_init_constructor (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
3935 bool want_value, bool notify_temp_creation)
3936 {
3937 tree object, ctor, type;
3938 enum gimplify_status ret;
3939 vec<constructor_elt, va_gc> *elts;
3940
3941 gcc_assert (TREE_CODE (TREE_OPERAND (*expr_p, 1)) == CONSTRUCTOR);
3942
3943 if (!notify_temp_creation)
3944 {
3945 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
3946 is_gimple_lvalue, fb_lvalue);
3947 if (ret == GS_ERROR)
3948 return ret;
3949 }
3950
3951 object = TREE_OPERAND (*expr_p, 0);
3952 ctor = TREE_OPERAND (*expr_p, 1) =
3953 optimize_compound_literals_in_ctor (TREE_OPERAND (*expr_p, 1));
3954 type = TREE_TYPE (ctor);
3955 elts = CONSTRUCTOR_ELTS (ctor);
3956 ret = GS_ALL_DONE;
3957
3958 switch (TREE_CODE (type))
3959 {
3960 case RECORD_TYPE:
3961 case UNION_TYPE:
3962 case QUAL_UNION_TYPE:
3963 case ARRAY_TYPE:
3964 {
3965 struct gimplify_init_ctor_preeval_data preeval_data;
3966 HOST_WIDE_INT num_ctor_elements, num_nonzero_elements;
3967 bool cleared, complete_p, valid_const_initializer;
3968
3969 /* Aggregate types must lower constructors to initialization of
3970 individual elements. The exception is that a CONSTRUCTOR node
3971 with no elements indicates zero-initialization of the whole. */
3972 if (vec_safe_is_empty (elts))
3973 {
3974 if (notify_temp_creation)
3975 return GS_OK;
3976 break;
3977 }
3978
3979 /* Fetch information about the constructor to direct later processing.
3980 We might want to make static versions of it in various cases, and
3981 can only do so if it known to be a valid constant initializer. */
3982 valid_const_initializer
3983 = categorize_ctor_elements (ctor, &num_nonzero_elements,
3984 &num_ctor_elements, &complete_p);
3985
3986 /* If a const aggregate variable is being initialized, then it
3987 should never be a lose to promote the variable to be static. */
3988 if (valid_const_initializer
3989 && num_nonzero_elements > 1
3990 && TREE_READONLY (object)
3991 && TREE_CODE (object) == VAR_DECL
3992 && (flag_merge_constants >= 2 || !TREE_ADDRESSABLE (object)))
3993 {
3994 if (notify_temp_creation)
3995 return GS_ERROR;
3996 DECL_INITIAL (object) = ctor;
3997 TREE_STATIC (object) = 1;
3998 if (!DECL_NAME (object))
3999 DECL_NAME (object) = create_tmp_var_name ("C");
4000 walk_tree (&DECL_INITIAL (object), force_labels_r, NULL, NULL);
4001
4002 /* ??? C++ doesn't automatically append a .<number> to the
4003 assembler name, and even when it does, it looks at FE private
4004 data structures to figure out what that number should be,
4005 which are not set for this variable. I suppose this is
4006 important for local statics for inline functions, which aren't
4007 "local" in the object file sense. So in order to get a unique
4008 TU-local symbol, we must invoke the lhd version now. */
4009 lhd_set_decl_assembler_name (object);
4010
4011 *expr_p = NULL_TREE;
4012 break;
4013 }
4014
4015 /* If there are "lots" of initialized elements, even discounting
4016 those that are not address constants (and thus *must* be
4017 computed at runtime), then partition the constructor into
4018 constant and non-constant parts. Block copy the constant
4019 parts in, then generate code for the non-constant parts. */
4020 /* TODO. There's code in cp/typeck.c to do this. */
4021
4022 if (int_size_in_bytes (TREE_TYPE (ctor)) < 0)
4023 /* store_constructor will ignore the clearing of variable-sized
4024 objects. Initializers for such objects must explicitly set
4025 every field that needs to be set. */
4026 cleared = false;
4027 else if (!complete_p)
4028 /* If the constructor isn't complete, clear the whole object
4029 beforehand.
4030
4031 ??? This ought not to be needed. For any element not present
4032 in the initializer, we should simply set them to zero. Except
4033 we'd need to *find* the elements that are not present, and that
4034 requires trickery to avoid quadratic compile-time behavior in
4035 large cases or excessive memory use in small cases. */
4036 cleared = true;
4037 else if (num_ctor_elements - num_nonzero_elements
4038 > CLEAR_RATIO (optimize_function_for_speed_p (cfun))
4039 && num_nonzero_elements < num_ctor_elements / 4)
4040 /* If there are "lots" of zeros, it's more efficient to clear
4041 the memory and then set the nonzero elements. */
4042 cleared = true;
4043 else
4044 cleared = false;
4045
4046 /* If there are "lots" of initialized elements, and all of them
4047 are valid address constants, then the entire initializer can
4048 be dropped to memory, and then memcpy'd out. Don't do this
4049 for sparse arrays, though, as it's more efficient to follow
4050 the standard CONSTRUCTOR behavior of memset followed by
4051 individual element initialization. Also don't do this for small
4052 all-zero initializers (which aren't big enough to merit
4053 clearing), and don't try to make bitwise copies of
4054 TREE_ADDRESSABLE types. */
4055 if (valid_const_initializer
4056 && !(cleared || num_nonzero_elements == 0)
4057 && !TREE_ADDRESSABLE (type))
4058 {
4059 HOST_WIDE_INT size = int_size_in_bytes (type);
4060 unsigned int align;
4061
4062 /* ??? We can still get unbounded array types, at least
4063 from the C++ front end. This seems wrong, but attempt
4064 to work around it for now. */
4065 if (size < 0)
4066 {
4067 size = int_size_in_bytes (TREE_TYPE (object));
4068 if (size >= 0)
4069 TREE_TYPE (ctor) = type = TREE_TYPE (object);
4070 }
4071
4072 /* Find the maximum alignment we can assume for the object. */
4073 /* ??? Make use of DECL_OFFSET_ALIGN. */
4074 if (DECL_P (object))
4075 align = DECL_ALIGN (object);
4076 else
4077 align = TYPE_ALIGN (type);
4078
4079 /* Do a block move either if the size is so small as to make
4080 each individual move a sub-unit move on average, or if it
4081 is so large as to make individual moves inefficient. */
4082 if (size > 0
4083 && num_nonzero_elements > 1
4084 && (size < num_nonzero_elements
4085 || !can_move_by_pieces (size, align)))
4086 {
4087 if (notify_temp_creation)
4088 return GS_ERROR;
4089
4090 walk_tree (&ctor, force_labels_r, NULL, NULL);
4091 ctor = tree_output_constant_def (ctor);
4092 if (!useless_type_conversion_p (type, TREE_TYPE (ctor)))
4093 ctor = build1 (VIEW_CONVERT_EXPR, type, ctor);
4094 TREE_OPERAND (*expr_p, 1) = ctor;
4095
4096 /* This is no longer an assignment of a CONSTRUCTOR, but
4097 we still may have processing to do on the LHS. So
4098 pretend we didn't do anything here to let that happen. */
4099 return GS_UNHANDLED;
4100 }
4101 }
4102
4103 /* If the target is volatile, we have non-zero elements and more than
4104 one field to assign, initialize the target from a temporary. */
4105 if (TREE_THIS_VOLATILE (object)
4106 && !TREE_ADDRESSABLE (type)
4107 && num_nonzero_elements > 0
4108 && vec_safe_length (elts) > 1)
4109 {
4110 tree temp = create_tmp_var (TYPE_MAIN_VARIANT (type), NULL);
4111 TREE_OPERAND (*expr_p, 0) = temp;
4112 *expr_p = build2 (COMPOUND_EXPR, TREE_TYPE (*expr_p),
4113 *expr_p,
4114 build2 (MODIFY_EXPR, void_type_node,
4115 object, temp));
4116 return GS_OK;
4117 }
4118
4119 if (notify_temp_creation)
4120 return GS_OK;
4121
4122 /* If there are nonzero elements and if needed, pre-evaluate to capture
4123 elements overlapping with the lhs into temporaries. We must do this
4124 before clearing to fetch the values before they are zeroed-out. */
4125 if (num_nonzero_elements > 0 && TREE_CODE (*expr_p) != INIT_EXPR)
4126 {
4127 preeval_data.lhs_base_decl = get_base_address (object);
4128 if (!DECL_P (preeval_data.lhs_base_decl))
4129 preeval_data.lhs_base_decl = NULL;
4130 preeval_data.lhs_alias_set = get_alias_set (object);
4131
4132 gimplify_init_ctor_preeval (&TREE_OPERAND (*expr_p, 1),
4133 pre_p, post_p, &preeval_data);
4134 }
4135
4136 if (cleared)
4137 {
4138 /* Zap the CONSTRUCTOR element list, which simplifies this case.
4139 Note that we still have to gimplify, in order to handle the
4140 case of variable sized types. Avoid shared tree structures. */
4141 CONSTRUCTOR_ELTS (ctor) = NULL;
4142 TREE_SIDE_EFFECTS (ctor) = 0;
4143 object = unshare_expr (object);
4144 gimplify_stmt (expr_p, pre_p);
4145 }
4146
4147 /* If we have not block cleared the object, or if there are nonzero
4148 elements in the constructor, add assignments to the individual
4149 scalar fields of the object. */
4150 if (!cleared || num_nonzero_elements > 0)
4151 gimplify_init_ctor_eval (object, elts, pre_p, cleared);
4152
4153 *expr_p = NULL_TREE;
4154 }
4155 break;
4156
4157 case COMPLEX_TYPE:
4158 {
4159 tree r, i;
4160
4161 if (notify_temp_creation)
4162 return GS_OK;
4163
4164 /* Extract the real and imaginary parts out of the ctor. */
4165 gcc_assert (elts->length () == 2);
4166 r = (*elts)[0].value;
4167 i = (*elts)[1].value;
4168 if (r == NULL || i == NULL)
4169 {
4170 tree zero = build_zero_cst (TREE_TYPE (type));
4171 if (r == NULL)
4172 r = zero;
4173 if (i == NULL)
4174 i = zero;
4175 }
4176
4177 /* Complex types have either COMPLEX_CST or COMPLEX_EXPR to
4178 represent creation of a complex value. */
4179 if (TREE_CONSTANT (r) && TREE_CONSTANT (i))
4180 {
4181 ctor = build_complex (type, r, i);
4182 TREE_OPERAND (*expr_p, 1) = ctor;
4183 }
4184 else
4185 {
4186 ctor = build2 (COMPLEX_EXPR, type, r, i);
4187 TREE_OPERAND (*expr_p, 1) = ctor;
4188 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 1),
4189 pre_p,
4190 post_p,
4191 rhs_predicate_for (TREE_OPERAND (*expr_p, 0)),
4192 fb_rvalue);
4193 }
4194 }
4195 break;
4196
4197 case VECTOR_TYPE:
4198 {
4199 unsigned HOST_WIDE_INT ix;
4200 constructor_elt *ce;
4201
4202 if (notify_temp_creation)
4203 return GS_OK;
4204
4205 /* Go ahead and simplify constant constructors to VECTOR_CST. */
4206 if (TREE_CONSTANT (ctor))
4207 {
4208 bool constant_p = true;
4209 tree value;
4210
4211 /* Even when ctor is constant, it might contain non-*_CST
4212 elements, such as addresses or trapping values like
4213 1.0/0.0 - 1.0/0.0. Such expressions don't belong
4214 in VECTOR_CST nodes. */
4215 FOR_EACH_CONSTRUCTOR_VALUE (elts, ix, value)
4216 if (!CONSTANT_CLASS_P (value))
4217 {
4218 constant_p = false;
4219 break;
4220 }
4221
4222 if (constant_p)
4223 {
4224 TREE_OPERAND (*expr_p, 1) = build_vector_from_ctor (type, elts);
4225 break;
4226 }
4227
4228 /* Don't reduce an initializer constant even if we can't
4229 make a VECTOR_CST. It won't do anything for us, and it'll
4230 prevent us from representing it as a single constant. */
4231 if (initializer_constant_valid_p (ctor, type))
4232 break;
4233
4234 TREE_CONSTANT (ctor) = 0;
4235 }
4236
4237 /* Vector types use CONSTRUCTOR all the way through gimple
4238 compilation as a general initializer. */
4239 FOR_EACH_VEC_SAFE_ELT (elts, ix, ce)
4240 {
4241 enum gimplify_status tret;
4242 tret = gimplify_expr (&ce->value, pre_p, post_p, is_gimple_val,
4243 fb_rvalue);
4244 if (tret == GS_ERROR)
4245 ret = GS_ERROR;
4246 }
4247 if (!is_gimple_reg (TREE_OPERAND (*expr_p, 0)))
4248 TREE_OPERAND (*expr_p, 1) = get_formal_tmp_var (ctor, pre_p);
4249 }
4250 break;
4251
4252 default:
4253 /* So how did we get a CONSTRUCTOR for a scalar type? */
4254 gcc_unreachable ();
4255 }
4256
4257 if (ret == GS_ERROR)
4258 return GS_ERROR;
4259 else if (want_value)
4260 {
4261 *expr_p = object;
4262 return GS_OK;
4263 }
4264 else
4265 {
4266 /* If we have gimplified both sides of the initializer but have
4267 not emitted an assignment, do so now. */
4268 if (*expr_p)
4269 {
4270 tree lhs = TREE_OPERAND (*expr_p, 0);
4271 tree rhs = TREE_OPERAND (*expr_p, 1);
4272 gimple init = gimple_build_assign (lhs, rhs);
4273 gimplify_seq_add_stmt (pre_p, init);
4274 *expr_p = NULL;
4275 }
4276
4277 return GS_ALL_DONE;
4278 }
4279 }
4280
4281 /* Given a pointer value OP0, return a simplified version of an
4282 indirection through OP0, or NULL_TREE if no simplification is
4283 possible. This may only be applied to a rhs of an expression.
4284 Note that the resulting type may be different from the type pointed
4285 to in the sense that it is still compatible from the langhooks
4286 point of view. */
4287
4288 static tree
4289 gimple_fold_indirect_ref_rhs (tree t)
4290 {
4291 return gimple_fold_indirect_ref (t);
4292 }
4293
4294 /* Subroutine of gimplify_modify_expr to do simplifications of
4295 MODIFY_EXPRs based on the code of the RHS. We loop for as long as
4296 something changes. */
4297
4298 static enum gimplify_status
4299 gimplify_modify_expr_rhs (tree *expr_p, tree *from_p, tree *to_p,
4300 gimple_seq *pre_p, gimple_seq *post_p,
4301 bool want_value)
4302 {
4303 enum gimplify_status ret = GS_UNHANDLED;
4304 bool changed;
4305
4306 do
4307 {
4308 changed = false;
4309 switch (TREE_CODE (*from_p))
4310 {
4311 case VAR_DECL:
4312 /* If we're assigning from a read-only variable initialized with
4313 a constructor, do the direct assignment from the constructor,
4314 but only if neither source nor target are volatile since this
4315 latter assignment might end up being done on a per-field basis. */
4316 if (DECL_INITIAL (*from_p)
4317 && TREE_READONLY (*from_p)
4318 && !TREE_THIS_VOLATILE (*from_p)
4319 && !TREE_THIS_VOLATILE (*to_p)
4320 && TREE_CODE (DECL_INITIAL (*from_p)) == CONSTRUCTOR)
4321 {
4322 tree old_from = *from_p;
4323 enum gimplify_status subret;
4324
4325 /* Move the constructor into the RHS. */
4326 *from_p = unshare_expr (DECL_INITIAL (*from_p));
4327
4328 /* Let's see if gimplify_init_constructor will need to put
4329 it in memory. */
4330 subret = gimplify_init_constructor (expr_p, NULL, NULL,
4331 false, true);
4332 if (subret == GS_ERROR)
4333 {
4334 /* If so, revert the change. */
4335 *from_p = old_from;
4336 }
4337 else
4338 {
4339 ret = GS_OK;
4340 changed = true;
4341 }
4342 }
4343 break;
4344 case INDIRECT_REF:
4345 {
4346 /* If we have code like
4347
4348 *(const A*)(A*)&x
4349
4350 where the type of "x" is a (possibly cv-qualified variant
4351 of "A"), treat the entire expression as identical to "x".
4352 This kind of code arises in C++ when an object is bound
4353 to a const reference, and if "x" is a TARGET_EXPR we want
4354 to take advantage of the optimization below. */
4355 bool volatile_p = TREE_THIS_VOLATILE (*from_p);
4356 tree t = gimple_fold_indirect_ref_rhs (TREE_OPERAND (*from_p, 0));
4357 if (t)
4358 {
4359 if (TREE_THIS_VOLATILE (t) != volatile_p)
4360 {
4361 if (TREE_CODE_CLASS (TREE_CODE (t)) == tcc_declaration)
4362 t = build_simple_mem_ref_loc (EXPR_LOCATION (*from_p),
4363 build_fold_addr_expr (t));
4364 if (REFERENCE_CLASS_P (t))
4365 TREE_THIS_VOLATILE (t) = volatile_p;
4366 }
4367 *from_p = t;
4368 ret = GS_OK;
4369 changed = true;
4370 }
4371 break;
4372 }
4373
4374 case TARGET_EXPR:
4375 {
4376 /* If we are initializing something from a TARGET_EXPR, strip the
4377 TARGET_EXPR and initialize it directly, if possible. This can't
4378 be done if the initializer is void, since that implies that the
4379 temporary is set in some non-trivial way.
4380
4381 ??? What about code that pulls out the temp and uses it
4382 elsewhere? I think that such code never uses the TARGET_EXPR as
4383 an initializer. If I'm wrong, we'll die because the temp won't
4384 have any RTL. In that case, I guess we'll need to replace
4385 references somehow. */
4386 tree init = TARGET_EXPR_INITIAL (*from_p);
4387
4388 if (init
4389 && !VOID_TYPE_P (TREE_TYPE (init)))
4390 {
4391 *from_p = init;
4392 ret = GS_OK;
4393 changed = true;
4394 }
4395 }
4396 break;
4397
4398 case COMPOUND_EXPR:
4399 /* Remove any COMPOUND_EXPR in the RHS so the following cases will be
4400 caught. */
4401 gimplify_compound_expr (from_p, pre_p, true);
4402 ret = GS_OK;
4403 changed = true;
4404 break;
4405
4406 case CONSTRUCTOR:
4407 /* If we already made some changes, let the front end have a
4408 crack at this before we break it down. */
4409 if (ret != GS_UNHANDLED)
4410 break;
4411 /* If we're initializing from a CONSTRUCTOR, break this into
4412 individual MODIFY_EXPRs. */
4413 return gimplify_init_constructor (expr_p, pre_p, post_p, want_value,
4414 false);
4415
4416 case COND_EXPR:
4417 /* If we're assigning to a non-register type, push the assignment
4418 down into the branches. This is mandatory for ADDRESSABLE types,
4419 since we cannot generate temporaries for such, but it saves a
4420 copy in other cases as well. */
4421 if (!is_gimple_reg_type (TREE_TYPE (*from_p)))
4422 {
4423 /* This code should mirror the code in gimplify_cond_expr. */
4424 enum tree_code code = TREE_CODE (*expr_p);
4425 tree cond = *from_p;
4426 tree result = *to_p;
4427
4428 ret = gimplify_expr (&result, pre_p, post_p,
4429 is_gimple_lvalue, fb_lvalue);
4430 if (ret != GS_ERROR)
4431 ret = GS_OK;
4432
4433 if (TREE_TYPE (TREE_OPERAND (cond, 1)) != void_type_node)
4434 TREE_OPERAND (cond, 1)
4435 = build2 (code, void_type_node, result,
4436 TREE_OPERAND (cond, 1));
4437 if (TREE_TYPE (TREE_OPERAND (cond, 2)) != void_type_node)
4438 TREE_OPERAND (cond, 2)
4439 = build2 (code, void_type_node, unshare_expr (result),
4440 TREE_OPERAND (cond, 2));
4441
4442 TREE_TYPE (cond) = void_type_node;
4443 recalculate_side_effects (cond);
4444
4445 if (want_value)
4446 {
4447 gimplify_and_add (cond, pre_p);
4448 *expr_p = unshare_expr (result);
4449 }
4450 else
4451 *expr_p = cond;
4452 return ret;
4453 }
4454 break;
4455
4456 case CALL_EXPR:
4457 /* For calls that return in memory, give *to_p as the CALL_EXPR's
4458 return slot so that we don't generate a temporary. */
4459 if (!CALL_EXPR_RETURN_SLOT_OPT (*from_p)
4460 && aggregate_value_p (*from_p, *from_p))
4461 {
4462 bool use_target;
4463
4464 if (!(rhs_predicate_for (*to_p))(*from_p))
4465 /* If we need a temporary, *to_p isn't accurate. */
4466 use_target = false;
4467 /* It's OK to use the return slot directly unless it's an NRV. */
4468 else if (TREE_CODE (*to_p) == RESULT_DECL
4469 && DECL_NAME (*to_p) == NULL_TREE
4470 && needs_to_live_in_memory (*to_p))
4471 use_target = true;
4472 else if (is_gimple_reg_type (TREE_TYPE (*to_p))
4473 || (DECL_P (*to_p) && DECL_REGISTER (*to_p)))
4474 /* Don't force regs into memory. */
4475 use_target = false;
4476 else if (TREE_CODE (*expr_p) == INIT_EXPR)
4477 /* It's OK to use the target directly if it's being
4478 initialized. */
4479 use_target = true;
4480 else if (variably_modified_type_p (TREE_TYPE (*to_p), NULL_TREE))
4481 /* Always use the target and thus RSO for variable-sized types.
4482 GIMPLE cannot deal with a variable-sized assignment
4483 embedded in a call statement. */
4484 use_target = true;
4485 else if (TREE_CODE (*to_p) != SSA_NAME
4486 && (!is_gimple_variable (*to_p)
4487 || needs_to_live_in_memory (*to_p)))
4488 /* Don't use the original target if it's already addressable;
4489 if its address escapes, and the called function uses the
4490 NRV optimization, a conforming program could see *to_p
4491 change before the called function returns; see c++/19317.
4492 When optimizing, the return_slot pass marks more functions
4493 as safe after we have escape info. */
4494 use_target = false;
4495 else
4496 use_target = true;
4497
4498 if (use_target)
4499 {
4500 CALL_EXPR_RETURN_SLOT_OPT (*from_p) = 1;
4501 mark_addressable (*to_p);
4502 }
4503 }
4504 break;
4505
4506 case WITH_SIZE_EXPR:
4507 /* Likewise for calls that return an aggregate of non-constant size,
4508 since we would not be able to generate a temporary at all. */
4509 if (TREE_CODE (TREE_OPERAND (*from_p, 0)) == CALL_EXPR)
4510 {
4511 *from_p = TREE_OPERAND (*from_p, 0);
4512 /* We don't change ret in this case because the
4513 WITH_SIZE_EXPR might have been added in
4514 gimplify_modify_expr, so returning GS_OK would lead to an
4515 infinite loop. */
4516 changed = true;
4517 }
4518 break;
4519
4520 /* If we're initializing from a container, push the initialization
4521 inside it. */
4522 case CLEANUP_POINT_EXPR:
4523 case BIND_EXPR:
4524 case STATEMENT_LIST:
4525 {
4526 tree wrap = *from_p;
4527 tree t;
4528
4529 ret = gimplify_expr (to_p, pre_p, post_p, is_gimple_min_lval,
4530 fb_lvalue);
4531 if (ret != GS_ERROR)
4532 ret = GS_OK;
4533
4534 t = voidify_wrapper_expr (wrap, *expr_p);
4535 gcc_assert (t == *expr_p);
4536
4537 if (want_value)
4538 {
4539 gimplify_and_add (wrap, pre_p);
4540 *expr_p = unshare_expr (*to_p);
4541 }
4542 else
4543 *expr_p = wrap;
4544 return GS_OK;
4545 }
4546
4547 case COMPOUND_LITERAL_EXPR:
4548 {
4549 tree complit = TREE_OPERAND (*expr_p, 1);
4550 tree decl_s = COMPOUND_LITERAL_EXPR_DECL_EXPR (complit);
4551 tree decl = DECL_EXPR_DECL (decl_s);
4552 tree init = DECL_INITIAL (decl);
4553
4554 /* struct T x = (struct T) { 0, 1, 2 } can be optimized
4555 into struct T x = { 0, 1, 2 } if the address of the
4556 compound literal has never been taken. */
4557 if (!TREE_ADDRESSABLE (complit)
4558 && !TREE_ADDRESSABLE (decl)
4559 && init)
4560 {
4561 *expr_p = copy_node (*expr_p);
4562 TREE_OPERAND (*expr_p, 1) = init;
4563 return GS_OK;
4564 }
4565 }
4566
4567 default:
4568 break;
4569 }
4570 }
4571 while (changed);
4572
4573 return ret;
4574 }
4575
4576
4577 /* Return true if T looks like a valid GIMPLE statement. */
4578
4579 static bool
4580 is_gimple_stmt (tree t)
4581 {
4582 const enum tree_code code = TREE_CODE (t);
4583
4584 switch (code)
4585 {
4586 case NOP_EXPR:
4587 /* The only valid NOP_EXPR is the empty statement. */
4588 return IS_EMPTY_STMT (t);
4589
4590 case BIND_EXPR:
4591 case COND_EXPR:
4592 /* These are only valid if they're void. */
4593 return TREE_TYPE (t) == NULL || VOID_TYPE_P (TREE_TYPE (t));
4594
4595 case SWITCH_EXPR:
4596 case GOTO_EXPR:
4597 case RETURN_EXPR:
4598 case LABEL_EXPR:
4599 case CASE_LABEL_EXPR:
4600 case TRY_CATCH_EXPR:
4601 case TRY_FINALLY_EXPR:
4602 case EH_FILTER_EXPR:
4603 case CATCH_EXPR:
4604 case ASM_EXPR:
4605 case STATEMENT_LIST:
4606 case OMP_PARALLEL:
4607 case OMP_FOR:
4608 case OMP_SIMD:
4609 case OMP_DISTRIBUTE:
4610 case OMP_SECTIONS:
4611 case OMP_SECTION:
4612 case OMP_SINGLE:
4613 case OMP_MASTER:
4614 case OMP_TASKGROUP:
4615 case OMP_ORDERED:
4616 case OMP_CRITICAL:
4617 case OMP_TASK:
4618 /* These are always void. */
4619 return true;
4620
4621 case CALL_EXPR:
4622 case MODIFY_EXPR:
4623 case PREDICT_EXPR:
4624 /* These are valid regardless of their type. */
4625 return true;
4626
4627 default:
4628 return false;
4629 }
4630 }
4631
4632
4633 /* Promote partial stores to COMPLEX variables to total stores. *EXPR_P is
4634 a MODIFY_EXPR with a lhs of a REAL/IMAGPART_EXPR of a variable with
4635 DECL_GIMPLE_REG_P set.
4636
4637 IMPORTANT NOTE: This promotion is performed by introducing a load of the
4638 other, unmodified part of the complex object just before the total store.
4639 As a consequence, if the object is still uninitialized, an undefined value
4640 will be loaded into a register, which may result in a spurious exception
4641 if the register is floating-point and the value happens to be a signaling
4642 NaN for example. Then the fully-fledged complex operations lowering pass
4643 followed by a DCE pass are necessary in order to fix things up. */
4644
4645 static enum gimplify_status
4646 gimplify_modify_expr_complex_part (tree *expr_p, gimple_seq *pre_p,
4647 bool want_value)
4648 {
4649 enum tree_code code, ocode;
4650 tree lhs, rhs, new_rhs, other, realpart, imagpart;
4651
4652 lhs = TREE_OPERAND (*expr_p, 0);
4653 rhs = TREE_OPERAND (*expr_p, 1);
4654 code = TREE_CODE (lhs);
4655 lhs = TREE_OPERAND (lhs, 0);
4656
4657 ocode = code == REALPART_EXPR ? IMAGPART_EXPR : REALPART_EXPR;
4658 other = build1 (ocode, TREE_TYPE (rhs), lhs);
4659 TREE_NO_WARNING (other) = 1;
4660 other = get_formal_tmp_var (other, pre_p);
4661
4662 realpart = code == REALPART_EXPR ? rhs : other;
4663 imagpart = code == REALPART_EXPR ? other : rhs;
4664
4665 if (TREE_CONSTANT (realpart) && TREE_CONSTANT (imagpart))
4666 new_rhs = build_complex (TREE_TYPE (lhs), realpart, imagpart);
4667 else
4668 new_rhs = build2 (COMPLEX_EXPR, TREE_TYPE (lhs), realpart, imagpart);
4669
4670 gimplify_seq_add_stmt (pre_p, gimple_build_assign (lhs, new_rhs));
4671 *expr_p = (want_value) ? rhs : NULL_TREE;
4672
4673 return GS_ALL_DONE;
4674 }
4675
4676 /* Gimplify the MODIFY_EXPR node pointed to by EXPR_P.
4677
4678 modify_expr
4679 : varname '=' rhs
4680 | '*' ID '=' rhs
4681
4682 PRE_P points to the list where side effects that must happen before
4683 *EXPR_P should be stored.
4684
4685 POST_P points to the list where side effects that must happen after
4686 *EXPR_P should be stored.
4687
4688 WANT_VALUE is nonzero iff we want to use the value of this expression
4689 in another expression. */
4690
4691 static enum gimplify_status
4692 gimplify_modify_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
4693 bool want_value)
4694 {
4695 tree *from_p = &TREE_OPERAND (*expr_p, 1);
4696 tree *to_p = &TREE_OPERAND (*expr_p, 0);
4697 enum gimplify_status ret = GS_UNHANDLED;
4698 gimple assign;
4699 location_t loc = EXPR_LOCATION (*expr_p);
4700 gimple_stmt_iterator gsi;
4701
4702 gcc_assert (TREE_CODE (*expr_p) == MODIFY_EXPR
4703 || TREE_CODE (*expr_p) == INIT_EXPR);
4704
4705 /* Trying to simplify a clobber using normal logic doesn't work,
4706 so handle it here. */
4707 if (TREE_CLOBBER_P (*from_p))
4708 {
4709 ret = gimplify_expr (to_p, pre_p, post_p, is_gimple_lvalue, fb_lvalue);
4710 if (ret == GS_ERROR)
4711 return ret;
4712 gcc_assert (!want_value
4713 && (TREE_CODE (*to_p) == VAR_DECL
4714 || TREE_CODE (*to_p) == MEM_REF));
4715 gimplify_seq_add_stmt (pre_p, gimple_build_assign (*to_p, *from_p));
4716 *expr_p = NULL;
4717 return GS_ALL_DONE;
4718 }
4719
4720 /* Insert pointer conversions required by the middle-end that are not
4721 required by the frontend. This fixes middle-end type checking for
4722 for example gcc.dg/redecl-6.c. */
4723 if (POINTER_TYPE_P (TREE_TYPE (*to_p)))
4724 {
4725 STRIP_USELESS_TYPE_CONVERSION (*from_p);
4726 if (!useless_type_conversion_p (TREE_TYPE (*to_p), TREE_TYPE (*from_p)))
4727 *from_p = fold_convert_loc (loc, TREE_TYPE (*to_p), *from_p);
4728 }
4729
4730 /* See if any simplifications can be done based on what the RHS is. */
4731 ret = gimplify_modify_expr_rhs (expr_p, from_p, to_p, pre_p, post_p,
4732 want_value);
4733 if (ret != GS_UNHANDLED)
4734 return ret;
4735
4736 /* For zero sized types only gimplify the left hand side and right hand
4737 side as statements and throw away the assignment. Do this after
4738 gimplify_modify_expr_rhs so we handle TARGET_EXPRs of addressable
4739 types properly. */
4740 if (zero_sized_type (TREE_TYPE (*from_p)) && !want_value)
4741 {
4742 gimplify_stmt (from_p, pre_p);
4743 gimplify_stmt (to_p, pre_p);
4744 *expr_p = NULL_TREE;
4745 return GS_ALL_DONE;
4746 }
4747
4748 /* If the value being copied is of variable width, compute the length
4749 of the copy into a WITH_SIZE_EXPR. Note that we need to do this
4750 before gimplifying any of the operands so that we can resolve any
4751 PLACEHOLDER_EXPRs in the size. Also note that the RTL expander uses
4752 the size of the expression to be copied, not of the destination, so
4753 that is what we must do here. */
4754 maybe_with_size_expr (from_p);
4755
4756 ret = gimplify_expr (to_p, pre_p, post_p, is_gimple_lvalue, fb_lvalue);
4757 if (ret == GS_ERROR)
4758 return ret;
4759
4760 /* As a special case, we have to temporarily allow for assignments
4761 with a CALL_EXPR on the RHS. Since in GIMPLE a function call is
4762 a toplevel statement, when gimplifying the GENERIC expression
4763 MODIFY_EXPR <a, CALL_EXPR <foo>>, we cannot create the tuple
4764 GIMPLE_ASSIGN <a, GIMPLE_CALL <foo>>.
4765
4766 Instead, we need to create the tuple GIMPLE_CALL <a, foo>. To
4767 prevent gimplify_expr from trying to create a new temporary for
4768 foo's LHS, we tell it that it should only gimplify until it
4769 reaches the CALL_EXPR. On return from gimplify_expr, the newly
4770 created GIMPLE_CALL <foo> will be the last statement in *PRE_P
4771 and all we need to do here is set 'a' to be its LHS. */
4772 ret = gimplify_expr (from_p, pre_p, post_p, rhs_predicate_for (*to_p),
4773 fb_rvalue);
4774 if (ret == GS_ERROR)
4775 return ret;
4776
4777 /* Now see if the above changed *from_p to something we handle specially. */
4778 ret = gimplify_modify_expr_rhs (expr_p, from_p, to_p, pre_p, post_p,
4779 want_value);
4780 if (ret != GS_UNHANDLED)
4781 return ret;
4782
4783 /* If we've got a variable sized assignment between two lvalues (i.e. does
4784 not involve a call), then we can make things a bit more straightforward
4785 by converting the assignment to memcpy or memset. */
4786 if (TREE_CODE (*from_p) == WITH_SIZE_EXPR)
4787 {
4788 tree from = TREE_OPERAND (*from_p, 0);
4789 tree size = TREE_OPERAND (*from_p, 1);
4790
4791 if (TREE_CODE (from) == CONSTRUCTOR)
4792 return gimplify_modify_expr_to_memset (expr_p, size, want_value, pre_p);
4793
4794 if (is_gimple_addressable (from))
4795 {
4796 *from_p = from;
4797 return gimplify_modify_expr_to_memcpy (expr_p, size, want_value,
4798 pre_p);
4799 }
4800 }
4801
4802 /* Transform partial stores to non-addressable complex variables into
4803 total stores. This allows us to use real instead of virtual operands
4804 for these variables, which improves optimization. */
4805 if ((TREE_CODE (*to_p) == REALPART_EXPR
4806 || TREE_CODE (*to_p) == IMAGPART_EXPR)
4807 && is_gimple_reg (TREE_OPERAND (*to_p, 0)))
4808 return gimplify_modify_expr_complex_part (expr_p, pre_p, want_value);
4809
4810 /* Try to alleviate the effects of the gimplification creating artificial
4811 temporaries (see for example is_gimple_reg_rhs) on the debug info. */
4812 if (!gimplify_ctxp->into_ssa
4813 && TREE_CODE (*from_p) == VAR_DECL
4814 && DECL_IGNORED_P (*from_p)
4815 && DECL_P (*to_p)
4816 && !DECL_IGNORED_P (*to_p))
4817 {
4818 if (!DECL_NAME (*from_p) && DECL_NAME (*to_p))
4819 DECL_NAME (*from_p)
4820 = create_tmp_var_name (IDENTIFIER_POINTER (DECL_NAME (*to_p)));
4821 DECL_HAS_DEBUG_EXPR_P (*from_p) = 1;
4822 SET_DECL_DEBUG_EXPR (*from_p, *to_p);
4823 }
4824
4825 if (want_value && TREE_THIS_VOLATILE (*to_p))
4826 *from_p = get_initialized_tmp_var (*from_p, pre_p, post_p);
4827
4828 if (TREE_CODE (*from_p) == CALL_EXPR)
4829 {
4830 /* Since the RHS is a CALL_EXPR, we need to create a GIMPLE_CALL
4831 instead of a GIMPLE_ASSIGN. */
4832 tree fnptrtype = TREE_TYPE (CALL_EXPR_FN (*from_p));
4833 CALL_EXPR_FN (*from_p) = TREE_OPERAND (CALL_EXPR_FN (*from_p), 0);
4834 STRIP_USELESS_TYPE_CONVERSION (CALL_EXPR_FN (*from_p));
4835 assign = gimple_build_call_from_tree (*from_p);
4836 gimple_call_set_fntype (assign, TREE_TYPE (fnptrtype));
4837 notice_special_calls (assign);
4838 if (!gimple_call_noreturn_p (assign))
4839 gimple_call_set_lhs (assign, *to_p);
4840 }
4841 else
4842 {
4843 assign = gimple_build_assign (*to_p, *from_p);
4844 gimple_set_location (assign, EXPR_LOCATION (*expr_p));
4845 }
4846
4847 if (gimplify_ctxp->into_ssa && is_gimple_reg (*to_p))
4848 {
4849 /* We should have got an SSA name from the start. */
4850 gcc_assert (TREE_CODE (*to_p) == SSA_NAME);
4851 }
4852
4853 gimplify_seq_add_stmt (pre_p, assign);
4854 gsi = gsi_last (*pre_p);
4855 /* Don't fold stmts inside of target construct. We'll do it
4856 during omplower pass instead. */
4857 struct gimplify_omp_ctx *ctx;
4858 for (ctx = gimplify_omp_ctxp; ctx; ctx = ctx->outer_context)
4859 if (ctx->region_type == ORT_TARGET)
4860 break;
4861 if (ctx == NULL)
4862 fold_stmt (&gsi);
4863
4864 if (want_value)
4865 {
4866 *expr_p = TREE_THIS_VOLATILE (*to_p) ? *from_p : unshare_expr (*to_p);
4867 return GS_OK;
4868 }
4869 else
4870 *expr_p = NULL;
4871
4872 return GS_ALL_DONE;
4873 }
4874
4875 /* Gimplify a comparison between two variable-sized objects. Do this
4876 with a call to BUILT_IN_MEMCMP. */
4877
4878 static enum gimplify_status
4879 gimplify_variable_sized_compare (tree *expr_p)
4880 {
4881 location_t loc = EXPR_LOCATION (*expr_p);
4882 tree op0 = TREE_OPERAND (*expr_p, 0);
4883 tree op1 = TREE_OPERAND (*expr_p, 1);
4884 tree t, arg, dest, src, expr;
4885
4886 arg = TYPE_SIZE_UNIT (TREE_TYPE (op0));
4887 arg = unshare_expr (arg);
4888 arg = SUBSTITUTE_PLACEHOLDER_IN_EXPR (arg, op0);
4889 src = build_fold_addr_expr_loc (loc, op1);
4890 dest = build_fold_addr_expr_loc (loc, op0);
4891 t = builtin_decl_implicit (BUILT_IN_MEMCMP);
4892 t = build_call_expr_loc (loc, t, 3, dest, src, arg);
4893
4894 expr
4895 = build2 (TREE_CODE (*expr_p), TREE_TYPE (*expr_p), t, integer_zero_node);
4896 SET_EXPR_LOCATION (expr, loc);
4897 *expr_p = expr;
4898
4899 return GS_OK;
4900 }
4901
4902 /* Gimplify a comparison between two aggregate objects of integral scalar
4903 mode as a comparison between the bitwise equivalent scalar values. */
4904
4905 static enum gimplify_status
4906 gimplify_scalar_mode_aggregate_compare (tree *expr_p)
4907 {
4908 location_t loc = EXPR_LOCATION (*expr_p);
4909 tree op0 = TREE_OPERAND (*expr_p, 0);
4910 tree op1 = TREE_OPERAND (*expr_p, 1);
4911
4912 tree type = TREE_TYPE (op0);
4913 tree scalar_type = lang_hooks.types.type_for_mode (TYPE_MODE (type), 1);
4914
4915 op0 = fold_build1_loc (loc, VIEW_CONVERT_EXPR, scalar_type, op0);
4916 op1 = fold_build1_loc (loc, VIEW_CONVERT_EXPR, scalar_type, op1);
4917
4918 *expr_p
4919 = fold_build2_loc (loc, TREE_CODE (*expr_p), TREE_TYPE (*expr_p), op0, op1);
4920
4921 return GS_OK;
4922 }
4923
4924 /* Gimplify an expression sequence. This function gimplifies each
4925 expression and rewrites the original expression with the last
4926 expression of the sequence in GIMPLE form.
4927
4928 PRE_P points to the list where the side effects for all the
4929 expressions in the sequence will be emitted.
4930
4931 WANT_VALUE is true when the result of the last COMPOUND_EXPR is used. */
4932
4933 static enum gimplify_status
4934 gimplify_compound_expr (tree *expr_p, gimple_seq *pre_p, bool want_value)
4935 {
4936 tree t = *expr_p;
4937
4938 do
4939 {
4940 tree *sub_p = &TREE_OPERAND (t, 0);
4941
4942 if (TREE_CODE (*sub_p) == COMPOUND_EXPR)
4943 gimplify_compound_expr (sub_p, pre_p, false);
4944 else
4945 gimplify_stmt (sub_p, pre_p);
4946
4947 t = TREE_OPERAND (t, 1);
4948 }
4949 while (TREE_CODE (t) == COMPOUND_EXPR);
4950
4951 *expr_p = t;
4952 if (want_value)
4953 return GS_OK;
4954 else
4955 {
4956 gimplify_stmt (expr_p, pre_p);
4957 return GS_ALL_DONE;
4958 }
4959 }
4960
4961 /* Gimplify a SAVE_EXPR node. EXPR_P points to the expression to
4962 gimplify. After gimplification, EXPR_P will point to a new temporary
4963 that holds the original value of the SAVE_EXPR node.
4964
4965 PRE_P points to the list where side effects that must happen before
4966 *EXPR_P should be stored. */
4967
4968 static enum gimplify_status
4969 gimplify_save_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
4970 {
4971 enum gimplify_status ret = GS_ALL_DONE;
4972 tree val;
4973
4974 gcc_assert (TREE_CODE (*expr_p) == SAVE_EXPR);
4975 val = TREE_OPERAND (*expr_p, 0);
4976
4977 /* If the SAVE_EXPR has not been resolved, then evaluate it once. */
4978 if (!SAVE_EXPR_RESOLVED_P (*expr_p))
4979 {
4980 /* The operand may be a void-valued expression such as SAVE_EXPRs
4981 generated by the Java frontend for class initialization. It is
4982 being executed only for its side-effects. */
4983 if (TREE_TYPE (val) == void_type_node)
4984 {
4985 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
4986 is_gimple_stmt, fb_none);
4987 val = NULL;
4988 }
4989 else
4990 val = get_initialized_tmp_var (val, pre_p, post_p);
4991
4992 TREE_OPERAND (*expr_p, 0) = val;
4993 SAVE_EXPR_RESOLVED_P (*expr_p) = 1;
4994 }
4995
4996 *expr_p = val;
4997
4998 return ret;
4999 }
5000
5001 /* Rewrite the ADDR_EXPR node pointed to by EXPR_P
5002
5003 unary_expr
5004 : ...
5005 | '&' varname
5006 ...
5007
5008 PRE_P points to the list where side effects that must happen before
5009 *EXPR_P should be stored.
5010
5011 POST_P points to the list where side effects that must happen after
5012 *EXPR_P should be stored. */
5013
5014 static enum gimplify_status
5015 gimplify_addr_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
5016 {
5017 tree expr = *expr_p;
5018 tree op0 = TREE_OPERAND (expr, 0);
5019 enum gimplify_status ret;
5020 location_t loc = EXPR_LOCATION (*expr_p);
5021
5022 switch (TREE_CODE (op0))
5023 {
5024 case INDIRECT_REF:
5025 do_indirect_ref:
5026 /* Check if we are dealing with an expression of the form '&*ptr'.
5027 While the front end folds away '&*ptr' into 'ptr', these
5028 expressions may be generated internally by the compiler (e.g.,
5029 builtins like __builtin_va_end). */
5030 /* Caution: the silent array decomposition semantics we allow for
5031 ADDR_EXPR means we can't always discard the pair. */
5032 /* Gimplification of the ADDR_EXPR operand may drop
5033 cv-qualification conversions, so make sure we add them if
5034 needed. */
5035 {
5036 tree op00 = TREE_OPERAND (op0, 0);
5037 tree t_expr = TREE_TYPE (expr);
5038 tree t_op00 = TREE_TYPE (op00);
5039
5040 if (!useless_type_conversion_p (t_expr, t_op00))
5041 op00 = fold_convert_loc (loc, TREE_TYPE (expr), op00);
5042 *expr_p = op00;
5043 ret = GS_OK;
5044 }
5045 break;
5046
5047 case VIEW_CONVERT_EXPR:
5048 /* Take the address of our operand and then convert it to the type of
5049 this ADDR_EXPR.
5050
5051 ??? The interactions of VIEW_CONVERT_EXPR and aliasing is not at
5052 all clear. The impact of this transformation is even less clear. */
5053
5054 /* If the operand is a useless conversion, look through it. Doing so
5055 guarantees that the ADDR_EXPR and its operand will remain of the
5056 same type. */
5057 if (tree_ssa_useless_type_conversion (TREE_OPERAND (op0, 0)))
5058 op0 = TREE_OPERAND (op0, 0);
5059
5060 *expr_p = fold_convert_loc (loc, TREE_TYPE (expr),
5061 build_fold_addr_expr_loc (loc,
5062 TREE_OPERAND (op0, 0)));
5063 ret = GS_OK;
5064 break;
5065
5066 default:
5067 /* We use fb_either here because the C frontend sometimes takes
5068 the address of a call that returns a struct; see
5069 gcc.dg/c99-array-lval-1.c. The gimplifier will correctly make
5070 the implied temporary explicit. */
5071
5072 /* Make the operand addressable. */
5073 ret = gimplify_expr (&TREE_OPERAND (expr, 0), pre_p, post_p,
5074 is_gimple_addressable, fb_either);
5075 if (ret == GS_ERROR)
5076 break;
5077
5078 /* Then mark it. Beware that it may not be possible to do so directly
5079 if a temporary has been created by the gimplification. */
5080 prepare_gimple_addressable (&TREE_OPERAND (expr, 0), pre_p);
5081
5082 op0 = TREE_OPERAND (expr, 0);
5083
5084 /* For various reasons, the gimplification of the expression
5085 may have made a new INDIRECT_REF. */
5086 if (TREE_CODE (op0) == INDIRECT_REF)
5087 goto do_indirect_ref;
5088
5089 mark_addressable (TREE_OPERAND (expr, 0));
5090
5091 /* The FEs may end up building ADDR_EXPRs early on a decl with
5092 an incomplete type. Re-build ADDR_EXPRs in canonical form
5093 here. */
5094 if (!types_compatible_p (TREE_TYPE (op0), TREE_TYPE (TREE_TYPE (expr))))
5095 *expr_p = build_fold_addr_expr (op0);
5096
5097 /* Make sure TREE_CONSTANT and TREE_SIDE_EFFECTS are set properly. */
5098 recompute_tree_invariant_for_addr_expr (*expr_p);
5099
5100 /* If we re-built the ADDR_EXPR add a conversion to the original type
5101 if required. */
5102 if (!useless_type_conversion_p (TREE_TYPE (expr), TREE_TYPE (*expr_p)))
5103 *expr_p = fold_convert (TREE_TYPE (expr), *expr_p);
5104
5105 break;
5106 }
5107
5108 return ret;
5109 }
5110
5111 /* Gimplify the operands of an ASM_EXPR. Input operands should be a gimple
5112 value; output operands should be a gimple lvalue. */
5113
5114 static enum gimplify_status
5115 gimplify_asm_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
5116 {
5117 tree expr;
5118 int noutputs;
5119 const char **oconstraints;
5120 int i;
5121 tree link;
5122 const char *constraint;
5123 bool allows_mem, allows_reg, is_inout;
5124 enum gimplify_status ret, tret;
5125 gimple stmt;
5126 vec<tree, va_gc> *inputs;
5127 vec<tree, va_gc> *outputs;
5128 vec<tree, va_gc> *clobbers;
5129 vec<tree, va_gc> *labels;
5130 tree link_next;
5131
5132 expr = *expr_p;
5133 noutputs = list_length (ASM_OUTPUTS (expr));
5134 oconstraints = (const char **) alloca ((noutputs) * sizeof (const char *));
5135
5136 inputs = NULL;
5137 outputs = NULL;
5138 clobbers = NULL;
5139 labels = NULL;
5140
5141 ret = GS_ALL_DONE;
5142 link_next = NULL_TREE;
5143 for (i = 0, link = ASM_OUTPUTS (expr); link; ++i, link = link_next)
5144 {
5145 bool ok;
5146 size_t constraint_len;
5147
5148 link_next = TREE_CHAIN (link);
5149
5150 oconstraints[i]
5151 = constraint
5152 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
5153 constraint_len = strlen (constraint);
5154 if (constraint_len == 0)
5155 continue;
5156
5157 ok = parse_output_constraint (&constraint, i, 0, 0,
5158 &allows_mem, &allows_reg, &is_inout);
5159 if (!ok)
5160 {
5161 ret = GS_ERROR;
5162 is_inout = false;
5163 }
5164
5165 if (!allows_reg && allows_mem)
5166 mark_addressable (TREE_VALUE (link));
5167
5168 tret = gimplify_expr (&TREE_VALUE (link), pre_p, post_p,
5169 is_inout ? is_gimple_min_lval : is_gimple_lvalue,
5170 fb_lvalue | fb_mayfail);
5171 if (tret == GS_ERROR)
5172 {
5173 error ("invalid lvalue in asm output %d", i);
5174 ret = tret;
5175 }
5176
5177 vec_safe_push (outputs, link);
5178 TREE_CHAIN (link) = NULL_TREE;
5179
5180 if (is_inout)
5181 {
5182 /* An input/output operand. To give the optimizers more
5183 flexibility, split it into separate input and output
5184 operands. */
5185 tree input;
5186 char buf[10];
5187
5188 /* Turn the in/out constraint into an output constraint. */
5189 char *p = xstrdup (constraint);
5190 p[0] = '=';
5191 TREE_VALUE (TREE_PURPOSE (link)) = build_string (constraint_len, p);
5192
5193 /* And add a matching input constraint. */
5194 if (allows_reg)
5195 {
5196 sprintf (buf, "%d", i);
5197
5198 /* If there are multiple alternatives in the constraint,
5199 handle each of them individually. Those that allow register
5200 will be replaced with operand number, the others will stay
5201 unchanged. */
5202 if (strchr (p, ',') != NULL)
5203 {
5204 size_t len = 0, buflen = strlen (buf);
5205 char *beg, *end, *str, *dst;
5206
5207 for (beg = p + 1;;)
5208 {
5209 end = strchr (beg, ',');
5210 if (end == NULL)
5211 end = strchr (beg, '\0');
5212 if ((size_t) (end - beg) < buflen)
5213 len += buflen + 1;
5214 else
5215 len += end - beg + 1;
5216 if (*end)
5217 beg = end + 1;
5218 else
5219 break;
5220 }
5221
5222 str = (char *) alloca (len);
5223 for (beg = p + 1, dst = str;;)
5224 {
5225 const char *tem;
5226 bool mem_p, reg_p, inout_p;
5227
5228 end = strchr (beg, ',');
5229 if (end)
5230 *end = '\0';
5231 beg[-1] = '=';
5232 tem = beg - 1;
5233 parse_output_constraint (&tem, i, 0, 0,
5234 &mem_p, &reg_p, &inout_p);
5235 if (dst != str)
5236 *dst++ = ',';
5237 if (reg_p)
5238 {
5239 memcpy (dst, buf, buflen);
5240 dst += buflen;
5241 }
5242 else
5243 {
5244 if (end)
5245 len = end - beg;
5246 else
5247 len = strlen (beg);
5248 memcpy (dst, beg, len);
5249 dst += len;
5250 }
5251 if (end)
5252 beg = end + 1;
5253 else
5254 break;
5255 }
5256 *dst = '\0';
5257 input = build_string (dst - str, str);
5258 }
5259 else
5260 input = build_string (strlen (buf), buf);
5261 }
5262 else
5263 input = build_string (constraint_len - 1, constraint + 1);
5264
5265 free (p);
5266
5267 input = build_tree_list (build_tree_list (NULL_TREE, input),
5268 unshare_expr (TREE_VALUE (link)));
5269 ASM_INPUTS (expr) = chainon (ASM_INPUTS (expr), input);
5270 }
5271 }
5272
5273 link_next = NULL_TREE;
5274 for (link = ASM_INPUTS (expr); link; ++i, link = link_next)
5275 {
5276 link_next = TREE_CHAIN (link);
5277 constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
5278 parse_input_constraint (&constraint, 0, 0, noutputs, 0,
5279 oconstraints, &allows_mem, &allows_reg);
5280
5281 /* If we can't make copies, we can only accept memory. */
5282 if (TREE_ADDRESSABLE (TREE_TYPE (TREE_VALUE (link))))
5283 {
5284 if (allows_mem)
5285 allows_reg = 0;
5286 else
5287 {
5288 error ("impossible constraint in %<asm%>");
5289 error ("non-memory input %d must stay in memory", i);
5290 return GS_ERROR;
5291 }
5292 }
5293
5294 /* If the operand is a memory input, it should be an lvalue. */
5295 if (!allows_reg && allows_mem)
5296 {
5297 tree inputv = TREE_VALUE (link);
5298 STRIP_NOPS (inputv);
5299 if (TREE_CODE (inputv) == PREDECREMENT_EXPR
5300 || TREE_CODE (inputv) == PREINCREMENT_EXPR
5301 || TREE_CODE (inputv) == POSTDECREMENT_EXPR
5302 || TREE_CODE (inputv) == POSTINCREMENT_EXPR)
5303 TREE_VALUE (link) = error_mark_node;
5304 tret = gimplify_expr (&TREE_VALUE (link), pre_p, post_p,
5305 is_gimple_lvalue, fb_lvalue | fb_mayfail);
5306 mark_addressable (TREE_VALUE (link));
5307 if (tret == GS_ERROR)
5308 {
5309 if (EXPR_HAS_LOCATION (TREE_VALUE (link)))
5310 input_location = EXPR_LOCATION (TREE_VALUE (link));
5311 error ("memory input %d is not directly addressable", i);
5312 ret = tret;
5313 }
5314 }
5315 else
5316 {
5317 tret = gimplify_expr (&TREE_VALUE (link), pre_p, post_p,
5318 is_gimple_asm_val, fb_rvalue);
5319 if (tret == GS_ERROR)
5320 ret = tret;
5321 }
5322
5323 TREE_CHAIN (link) = NULL_TREE;
5324 vec_safe_push (inputs, link);
5325 }
5326
5327 link_next = NULL_TREE;
5328 for (link = ASM_CLOBBERS (expr); link; ++i, link = link_next)
5329 {
5330 link_next = TREE_CHAIN (link);
5331 TREE_CHAIN (link) = NULL_TREE;
5332 vec_safe_push (clobbers, link);
5333 }
5334
5335 link_next = NULL_TREE;
5336 for (link = ASM_LABELS (expr); link; ++i, link = link_next)
5337 {
5338 link_next = TREE_CHAIN (link);
5339 TREE_CHAIN (link) = NULL_TREE;
5340 vec_safe_push (labels, link);
5341 }
5342
5343 /* Do not add ASMs with errors to the gimple IL stream. */
5344 if (ret != GS_ERROR)
5345 {
5346 stmt = gimple_build_asm_vec (TREE_STRING_POINTER (ASM_STRING (expr)),
5347 inputs, outputs, clobbers, labels);
5348
5349 gimple_asm_set_volatile (stmt, ASM_VOLATILE_P (expr));
5350 gimple_asm_set_input (stmt, ASM_INPUT_P (expr));
5351
5352 gimplify_seq_add_stmt (pre_p, stmt);
5353 }
5354
5355 return ret;
5356 }
5357
5358 /* Gimplify a CLEANUP_POINT_EXPR. Currently this works by adding
5359 GIMPLE_WITH_CLEANUP_EXPRs to the prequeue as we encounter cleanups while
5360 gimplifying the body, and converting them to TRY_FINALLY_EXPRs when we
5361 return to this function.
5362
5363 FIXME should we complexify the prequeue handling instead? Or use flags
5364 for all the cleanups and let the optimizer tighten them up? The current
5365 code seems pretty fragile; it will break on a cleanup within any
5366 non-conditional nesting. But any such nesting would be broken, anyway;
5367 we can't write a TRY_FINALLY_EXPR that starts inside a nesting construct
5368 and continues out of it. We can do that at the RTL level, though, so
5369 having an optimizer to tighten up try/finally regions would be a Good
5370 Thing. */
5371
5372 static enum gimplify_status
5373 gimplify_cleanup_point_expr (tree *expr_p, gimple_seq *pre_p)
5374 {
5375 gimple_stmt_iterator iter;
5376 gimple_seq body_sequence = NULL;
5377
5378 tree temp = voidify_wrapper_expr (*expr_p, NULL);
5379
5380 /* We only care about the number of conditions between the innermost
5381 CLEANUP_POINT_EXPR and the cleanup. So save and reset the count and
5382 any cleanups collected outside the CLEANUP_POINT_EXPR. */
5383 int old_conds = gimplify_ctxp->conditions;
5384 gimple_seq old_cleanups = gimplify_ctxp->conditional_cleanups;
5385 bool old_in_cleanup_point_expr = gimplify_ctxp->in_cleanup_point_expr;
5386 gimplify_ctxp->conditions = 0;
5387 gimplify_ctxp->conditional_cleanups = NULL;
5388 gimplify_ctxp->in_cleanup_point_expr = true;
5389
5390 gimplify_stmt (&TREE_OPERAND (*expr_p, 0), &body_sequence);
5391
5392 gimplify_ctxp->conditions = old_conds;
5393 gimplify_ctxp->conditional_cleanups = old_cleanups;
5394 gimplify_ctxp->in_cleanup_point_expr = old_in_cleanup_point_expr;
5395
5396 for (iter = gsi_start (body_sequence); !gsi_end_p (iter); )
5397 {
5398 gimple wce = gsi_stmt (iter);
5399
5400 if (gimple_code (wce) == GIMPLE_WITH_CLEANUP_EXPR)
5401 {
5402 if (gsi_one_before_end_p (iter))
5403 {
5404 /* Note that gsi_insert_seq_before and gsi_remove do not
5405 scan operands, unlike some other sequence mutators. */
5406 if (!gimple_wce_cleanup_eh_only (wce))
5407 gsi_insert_seq_before_without_update (&iter,
5408 gimple_wce_cleanup (wce),
5409 GSI_SAME_STMT);
5410 gsi_remove (&iter, true);
5411 break;
5412 }
5413 else
5414 {
5415 gimple gtry;
5416 gimple_seq seq;
5417 enum gimple_try_flags kind;
5418
5419 if (gimple_wce_cleanup_eh_only (wce))
5420 kind = GIMPLE_TRY_CATCH;
5421 else
5422 kind = GIMPLE_TRY_FINALLY;
5423 seq = gsi_split_seq_after (iter);
5424
5425 gtry = gimple_build_try (seq, gimple_wce_cleanup (wce), kind);
5426 /* Do not use gsi_replace here, as it may scan operands.
5427 We want to do a simple structural modification only. */
5428 gsi_set_stmt (&iter, gtry);
5429 iter = gsi_start (gtry->gimple_try.eval);
5430 }
5431 }
5432 else
5433 gsi_next (&iter);
5434 }
5435
5436 gimplify_seq_add_seq (pre_p, body_sequence);
5437 if (temp)
5438 {
5439 *expr_p = temp;
5440 return GS_OK;
5441 }
5442 else
5443 {
5444 *expr_p = NULL;
5445 return GS_ALL_DONE;
5446 }
5447 }
5448
5449 /* Insert a cleanup marker for gimplify_cleanup_point_expr. CLEANUP
5450 is the cleanup action required. EH_ONLY is true if the cleanup should
5451 only be executed if an exception is thrown, not on normal exit. */
5452
5453 static void
5454 gimple_push_cleanup (tree var, tree cleanup, bool eh_only, gimple_seq *pre_p)
5455 {
5456 gimple wce;
5457 gimple_seq cleanup_stmts = NULL;
5458
5459 /* Errors can result in improperly nested cleanups. Which results in
5460 confusion when trying to resolve the GIMPLE_WITH_CLEANUP_EXPR. */
5461 if (seen_error ())
5462 return;
5463
5464 if (gimple_conditional_context ())
5465 {
5466 /* If we're in a conditional context, this is more complex. We only
5467 want to run the cleanup if we actually ran the initialization that
5468 necessitates it, but we want to run it after the end of the
5469 conditional context. So we wrap the try/finally around the
5470 condition and use a flag to determine whether or not to actually
5471 run the destructor. Thus
5472
5473 test ? f(A()) : 0
5474
5475 becomes (approximately)
5476
5477 flag = 0;
5478 try {
5479 if (test) { A::A(temp); flag = 1; val = f(temp); }
5480 else { val = 0; }
5481 } finally {
5482 if (flag) A::~A(temp);
5483 }
5484 val
5485 */
5486 tree flag = create_tmp_var (boolean_type_node, "cleanup");
5487 gimple ffalse = gimple_build_assign (flag, boolean_false_node);
5488 gimple ftrue = gimple_build_assign (flag, boolean_true_node);
5489
5490 cleanup = build3 (COND_EXPR, void_type_node, flag, cleanup, NULL);
5491 gimplify_stmt (&cleanup, &cleanup_stmts);
5492 wce = gimple_build_wce (cleanup_stmts);
5493
5494 gimplify_seq_add_stmt (&gimplify_ctxp->conditional_cleanups, ffalse);
5495 gimplify_seq_add_stmt (&gimplify_ctxp->conditional_cleanups, wce);
5496 gimplify_seq_add_stmt (pre_p, ftrue);
5497
5498 /* Because of this manipulation, and the EH edges that jump
5499 threading cannot redirect, the temporary (VAR) will appear
5500 to be used uninitialized. Don't warn. */
5501 TREE_NO_WARNING (var) = 1;
5502 }
5503 else
5504 {
5505 gimplify_stmt (&cleanup, &cleanup_stmts);
5506 wce = gimple_build_wce (cleanup_stmts);
5507 gimple_wce_set_cleanup_eh_only (wce, eh_only);
5508 gimplify_seq_add_stmt (pre_p, wce);
5509 }
5510 }
5511
5512 /* Gimplify a TARGET_EXPR which doesn't appear on the rhs of an INIT_EXPR. */
5513
5514 static enum gimplify_status
5515 gimplify_target_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
5516 {
5517 tree targ = *expr_p;
5518 tree temp = TARGET_EXPR_SLOT (targ);
5519 tree init = TARGET_EXPR_INITIAL (targ);
5520 enum gimplify_status ret;
5521
5522 if (init)
5523 {
5524 tree cleanup = NULL_TREE;
5525
5526 /* TARGET_EXPR temps aren't part of the enclosing block, so add it
5527 to the temps list. Handle also variable length TARGET_EXPRs. */
5528 if (TREE_CODE (DECL_SIZE (temp)) != INTEGER_CST)
5529 {
5530 if (!TYPE_SIZES_GIMPLIFIED (TREE_TYPE (temp)))
5531 gimplify_type_sizes (TREE_TYPE (temp), pre_p);
5532 gimplify_vla_decl (temp, pre_p);
5533 }
5534 else
5535 gimple_add_tmp_var (temp);
5536
5537 /* If TARGET_EXPR_INITIAL is void, then the mere evaluation of the
5538 expression is supposed to initialize the slot. */
5539 if (VOID_TYPE_P (TREE_TYPE (init)))
5540 ret = gimplify_expr (&init, pre_p, post_p, is_gimple_stmt, fb_none);
5541 else
5542 {
5543 tree init_expr = build2 (INIT_EXPR, void_type_node, temp, init);
5544 init = init_expr;
5545 ret = gimplify_expr (&init, pre_p, post_p, is_gimple_stmt, fb_none);
5546 init = NULL;
5547 ggc_free (init_expr);
5548 }
5549 if (ret == GS_ERROR)
5550 {
5551 /* PR c++/28266 Make sure this is expanded only once. */
5552 TARGET_EXPR_INITIAL (targ) = NULL_TREE;
5553 return GS_ERROR;
5554 }
5555 if (init)
5556 gimplify_and_add (init, pre_p);
5557
5558 /* If needed, push the cleanup for the temp. */
5559 if (TARGET_EXPR_CLEANUP (targ))
5560 {
5561 if (CLEANUP_EH_ONLY (targ))
5562 gimple_push_cleanup (temp, TARGET_EXPR_CLEANUP (targ),
5563 CLEANUP_EH_ONLY (targ), pre_p);
5564 else
5565 cleanup = TARGET_EXPR_CLEANUP (targ);
5566 }
5567
5568 /* Add a clobber for the temporary going out of scope, like
5569 gimplify_bind_expr. */
5570 if (gimplify_ctxp->in_cleanup_point_expr
5571 && needs_to_live_in_memory (temp)
5572 && flag_stack_reuse == SR_ALL)
5573 {
5574 tree clobber = build_constructor (TREE_TYPE (temp),
5575 NULL);
5576 TREE_THIS_VOLATILE (clobber) = true;
5577 clobber = build2 (MODIFY_EXPR, TREE_TYPE (temp), temp, clobber);
5578 if (cleanup)
5579 cleanup = build2 (COMPOUND_EXPR, void_type_node, cleanup,
5580 clobber);
5581 else
5582 cleanup = clobber;
5583 }
5584
5585 if (cleanup)
5586 gimple_push_cleanup (temp, cleanup, false, pre_p);
5587
5588 /* Only expand this once. */
5589 TREE_OPERAND (targ, 3) = init;
5590 TARGET_EXPR_INITIAL (targ) = NULL_TREE;
5591 }
5592 else
5593 /* We should have expanded this before. */
5594 gcc_assert (DECL_SEEN_IN_BIND_EXPR_P (temp));
5595
5596 *expr_p = temp;
5597 return GS_OK;
5598 }
5599
5600 /* Gimplification of expression trees. */
5601
5602 /* Gimplify an expression which appears at statement context. The
5603 corresponding GIMPLE statements are added to *SEQ_P. If *SEQ_P is
5604 NULL, a new sequence is allocated.
5605
5606 Return true if we actually added a statement to the queue. */
5607
5608 bool
5609 gimplify_stmt (tree *stmt_p, gimple_seq *seq_p)
5610 {
5611 gimple_seq_node last;
5612
5613 last = gimple_seq_last (*seq_p);
5614 gimplify_expr (stmt_p, seq_p, NULL, is_gimple_stmt, fb_none);
5615 return last != gimple_seq_last (*seq_p);
5616 }
5617
5618 /* Add FIRSTPRIVATE entries for DECL in the OpenMP the surrounding parallels
5619 to CTX. If entries already exist, force them to be some flavor of private.
5620 If there is no enclosing parallel, do nothing. */
5621
5622 void
5623 omp_firstprivatize_variable (struct gimplify_omp_ctx *ctx, tree decl)
5624 {
5625 splay_tree_node n;
5626
5627 if (decl == NULL || !DECL_P (decl))
5628 return;
5629
5630 do
5631 {
5632 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
5633 if (n != NULL)
5634 {
5635 if (n->value & GOVD_SHARED)
5636 n->value = GOVD_FIRSTPRIVATE | (n->value & GOVD_SEEN);
5637 else if (n->value & GOVD_MAP)
5638 n->value |= GOVD_MAP_TO_ONLY;
5639 else
5640 return;
5641 }
5642 else if (ctx->region_type == ORT_TARGET)
5643 omp_add_variable (ctx, decl, GOVD_MAP | GOVD_MAP_TO_ONLY);
5644 else if (ctx->region_type != ORT_WORKSHARE
5645 && ctx->region_type != ORT_SIMD
5646 && ctx->region_type != ORT_TARGET_DATA)
5647 omp_add_variable (ctx, decl, GOVD_FIRSTPRIVATE);
5648
5649 ctx = ctx->outer_context;
5650 }
5651 while (ctx);
5652 }
5653
5654 /* Similarly for each of the type sizes of TYPE. */
5655
5656 static void
5657 omp_firstprivatize_type_sizes (struct gimplify_omp_ctx *ctx, tree type)
5658 {
5659 if (type == NULL || type == error_mark_node)
5660 return;
5661 type = TYPE_MAIN_VARIANT (type);
5662
5663 if (pointer_set_insert (ctx->privatized_types, type))
5664 return;
5665
5666 switch (TREE_CODE (type))
5667 {
5668 case INTEGER_TYPE:
5669 case ENUMERAL_TYPE:
5670 case BOOLEAN_TYPE:
5671 case REAL_TYPE:
5672 case FIXED_POINT_TYPE:
5673 omp_firstprivatize_variable (ctx, TYPE_MIN_VALUE (type));
5674 omp_firstprivatize_variable (ctx, TYPE_MAX_VALUE (type));
5675 break;
5676
5677 case ARRAY_TYPE:
5678 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (type));
5679 omp_firstprivatize_type_sizes (ctx, TYPE_DOMAIN (type));
5680 break;
5681
5682 case RECORD_TYPE:
5683 case UNION_TYPE:
5684 case QUAL_UNION_TYPE:
5685 {
5686 tree field;
5687 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
5688 if (TREE_CODE (field) == FIELD_DECL)
5689 {
5690 omp_firstprivatize_variable (ctx, DECL_FIELD_OFFSET (field));
5691 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (field));
5692 }
5693 }
5694 break;
5695
5696 case POINTER_TYPE:
5697 case REFERENCE_TYPE:
5698 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (type));
5699 break;
5700
5701 default:
5702 break;
5703 }
5704
5705 omp_firstprivatize_variable (ctx, TYPE_SIZE (type));
5706 omp_firstprivatize_variable (ctx, TYPE_SIZE_UNIT (type));
5707 lang_hooks.types.omp_firstprivatize_type_sizes (ctx, type);
5708 }
5709
5710 /* Add an entry for DECL in the OpenMP context CTX with FLAGS. */
5711
5712 static void
5713 omp_add_variable (struct gimplify_omp_ctx *ctx, tree decl, unsigned int flags)
5714 {
5715 splay_tree_node n;
5716 unsigned int nflags;
5717 tree t;
5718
5719 if (error_operand_p (decl))
5720 return;
5721
5722 /* Never elide decls whose type has TREE_ADDRESSABLE set. This means
5723 there are constructors involved somewhere. */
5724 if (TREE_ADDRESSABLE (TREE_TYPE (decl))
5725 || TYPE_NEEDS_CONSTRUCTING (TREE_TYPE (decl)))
5726 flags |= GOVD_SEEN;
5727
5728 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
5729 if (n != NULL && n->value != GOVD_ALIGNED)
5730 {
5731 /* We shouldn't be re-adding the decl with the same data
5732 sharing class. */
5733 gcc_assert ((n->value & GOVD_DATA_SHARE_CLASS & flags) == 0);
5734 /* The only combination of data sharing classes we should see is
5735 FIRSTPRIVATE and LASTPRIVATE. */
5736 nflags = n->value | flags;
5737 gcc_assert ((nflags & GOVD_DATA_SHARE_CLASS)
5738 == (GOVD_FIRSTPRIVATE | GOVD_LASTPRIVATE)
5739 || (flags & GOVD_DATA_SHARE_CLASS) == 0);
5740 n->value = nflags;
5741 return;
5742 }
5743
5744 /* When adding a variable-sized variable, we have to handle all sorts
5745 of additional bits of data: the pointer replacement variable, and
5746 the parameters of the type. */
5747 if (DECL_SIZE (decl) && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
5748 {
5749 /* Add the pointer replacement variable as PRIVATE if the variable
5750 replacement is private, else FIRSTPRIVATE since we'll need the
5751 address of the original variable either for SHARED, or for the
5752 copy into or out of the context. */
5753 if (!(flags & GOVD_LOCAL))
5754 {
5755 nflags = flags & GOVD_MAP
5756 ? GOVD_MAP | GOVD_MAP_TO_ONLY | GOVD_EXPLICIT
5757 : flags & GOVD_PRIVATE ? GOVD_PRIVATE : GOVD_FIRSTPRIVATE;
5758 nflags |= flags & GOVD_SEEN;
5759 t = DECL_VALUE_EXPR (decl);
5760 gcc_assert (TREE_CODE (t) == INDIRECT_REF);
5761 t = TREE_OPERAND (t, 0);
5762 gcc_assert (DECL_P (t));
5763 omp_add_variable (ctx, t, nflags);
5764 }
5765
5766 /* Add all of the variable and type parameters (which should have
5767 been gimplified to a formal temporary) as FIRSTPRIVATE. */
5768 omp_firstprivatize_variable (ctx, DECL_SIZE_UNIT (decl));
5769 omp_firstprivatize_variable (ctx, DECL_SIZE (decl));
5770 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (decl));
5771
5772 /* The variable-sized variable itself is never SHARED, only some form
5773 of PRIVATE. The sharing would take place via the pointer variable
5774 which we remapped above. */
5775 if (flags & GOVD_SHARED)
5776 flags = GOVD_PRIVATE | GOVD_DEBUG_PRIVATE
5777 | (flags & (GOVD_SEEN | GOVD_EXPLICIT));
5778
5779 /* We're going to make use of the TYPE_SIZE_UNIT at least in the
5780 alloca statement we generate for the variable, so make sure it
5781 is available. This isn't automatically needed for the SHARED
5782 case, since we won't be allocating local storage then.
5783 For local variables TYPE_SIZE_UNIT might not be gimplified yet,
5784 in this case omp_notice_variable will be called later
5785 on when it is gimplified. */
5786 else if (! (flags & (GOVD_LOCAL | GOVD_MAP))
5787 && DECL_P (TYPE_SIZE_UNIT (TREE_TYPE (decl))))
5788 omp_notice_variable (ctx, TYPE_SIZE_UNIT (TREE_TYPE (decl)), true);
5789 }
5790 else if ((flags & (GOVD_MAP | GOVD_LOCAL)) == 0
5791 && lang_hooks.decls.omp_privatize_by_reference (decl))
5792 {
5793 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (decl));
5794
5795 /* Similar to the direct variable sized case above, we'll need the
5796 size of references being privatized. */
5797 if ((flags & GOVD_SHARED) == 0)
5798 {
5799 t = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl)));
5800 if (TREE_CODE (t) != INTEGER_CST)
5801 omp_notice_variable (ctx, t, true);
5802 }
5803 }
5804
5805 if (n != NULL)
5806 n->value |= flags;
5807 else
5808 splay_tree_insert (ctx->variables, (splay_tree_key)decl, flags);
5809 }
5810
5811 /* Notice a threadprivate variable DECL used in OpenMP context CTX.
5812 This just prints out diagnostics about threadprivate variable uses
5813 in untied tasks. If DECL2 is non-NULL, prevent this warning
5814 on that variable. */
5815
5816 static bool
5817 omp_notice_threadprivate_variable (struct gimplify_omp_ctx *ctx, tree decl,
5818 tree decl2)
5819 {
5820 splay_tree_node n;
5821 struct gimplify_omp_ctx *octx;
5822
5823 for (octx = ctx; octx; octx = octx->outer_context)
5824 if (octx->region_type == ORT_TARGET)
5825 {
5826 n = splay_tree_lookup (octx->variables, (splay_tree_key)decl);
5827 if (n == NULL)
5828 {
5829 error ("threadprivate variable %qE used in target region",
5830 DECL_NAME (decl));
5831 error_at (octx->location, "enclosing target region");
5832 splay_tree_insert (octx->variables, (splay_tree_key)decl, 0);
5833 }
5834 if (decl2)
5835 splay_tree_insert (octx->variables, (splay_tree_key)decl2, 0);
5836 }
5837
5838 if (ctx->region_type != ORT_UNTIED_TASK)
5839 return false;
5840 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
5841 if (n == NULL)
5842 {
5843 error ("threadprivate variable %qE used in untied task",
5844 DECL_NAME (decl));
5845 error_at (ctx->location, "enclosing task");
5846 splay_tree_insert (ctx->variables, (splay_tree_key)decl, 0);
5847 }
5848 if (decl2)
5849 splay_tree_insert (ctx->variables, (splay_tree_key)decl2, 0);
5850 return false;
5851 }
5852
5853 /* Record the fact that DECL was used within the OpenMP context CTX.
5854 IN_CODE is true when real code uses DECL, and false when we should
5855 merely emit default(none) errors. Return true if DECL is going to
5856 be remapped and thus DECL shouldn't be gimplified into its
5857 DECL_VALUE_EXPR (if any). */
5858
5859 static bool
5860 omp_notice_variable (struct gimplify_omp_ctx *ctx, tree decl, bool in_code)
5861 {
5862 splay_tree_node n;
5863 unsigned flags = in_code ? GOVD_SEEN : 0;
5864 bool ret = false, shared;
5865
5866 if (error_operand_p (decl))
5867 return false;
5868
5869 /* Threadprivate variables are predetermined. */
5870 if (is_global_var (decl))
5871 {
5872 if (DECL_THREAD_LOCAL_P (decl))
5873 return omp_notice_threadprivate_variable (ctx, decl, NULL_TREE);
5874
5875 if (DECL_HAS_VALUE_EXPR_P (decl))
5876 {
5877 tree value = get_base_address (DECL_VALUE_EXPR (decl));
5878
5879 if (value && DECL_P (value) && DECL_THREAD_LOCAL_P (value))
5880 return omp_notice_threadprivate_variable (ctx, decl, value);
5881 }
5882 }
5883
5884 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
5885 if (ctx->region_type == ORT_TARGET)
5886 {
5887 if (n == NULL)
5888 {
5889 if (!lang_hooks.types.omp_mappable_type (TREE_TYPE (decl)))
5890 {
5891 error ("%qD referenced in target region does not have "
5892 "a mappable type", decl);
5893 omp_add_variable (ctx, decl, GOVD_MAP | GOVD_EXPLICIT | flags);
5894 }
5895 else
5896 omp_add_variable (ctx, decl, GOVD_MAP | flags);
5897 }
5898 else
5899 n->value |= flags;
5900 ret = lang_hooks.decls.omp_disregard_value_expr (decl, true);
5901 goto do_outer;
5902 }
5903
5904 if (n == NULL)
5905 {
5906 enum omp_clause_default_kind default_kind, kind;
5907 struct gimplify_omp_ctx *octx;
5908
5909 if (ctx->region_type == ORT_WORKSHARE
5910 || ctx->region_type == ORT_SIMD
5911 || ctx->region_type == ORT_TARGET_DATA)
5912 goto do_outer;
5913
5914 /* ??? Some compiler-generated variables (like SAVE_EXPRs) could be
5915 remapped firstprivate instead of shared. To some extent this is
5916 addressed in omp_firstprivatize_type_sizes, but not effectively. */
5917 default_kind = ctx->default_kind;
5918 kind = lang_hooks.decls.omp_predetermined_sharing (decl);
5919 if (kind != OMP_CLAUSE_DEFAULT_UNSPECIFIED)
5920 default_kind = kind;
5921
5922 switch (default_kind)
5923 {
5924 case OMP_CLAUSE_DEFAULT_NONE:
5925 if ((ctx->region_type & ORT_TASK) != 0)
5926 {
5927 error ("%qE not specified in enclosing task",
5928 DECL_NAME (lang_hooks.decls.omp_report_decl (decl)));
5929 error_at (ctx->location, "enclosing task");
5930 }
5931 else if (ctx->region_type == ORT_TEAMS)
5932 {
5933 error ("%qE not specified in enclosing teams construct",
5934 DECL_NAME (lang_hooks.decls.omp_report_decl (decl)));
5935 error_at (ctx->location, "enclosing teams construct");
5936 }
5937 else
5938 {
5939 error ("%qE not specified in enclosing parallel",
5940 DECL_NAME (lang_hooks.decls.omp_report_decl (decl)));
5941 error_at (ctx->location, "enclosing parallel");
5942 }
5943 /* FALLTHRU */
5944 case OMP_CLAUSE_DEFAULT_SHARED:
5945 flags |= GOVD_SHARED;
5946 break;
5947 case OMP_CLAUSE_DEFAULT_PRIVATE:
5948 flags |= GOVD_PRIVATE;
5949 break;
5950 case OMP_CLAUSE_DEFAULT_FIRSTPRIVATE:
5951 flags |= GOVD_FIRSTPRIVATE;
5952 break;
5953 case OMP_CLAUSE_DEFAULT_UNSPECIFIED:
5954 /* decl will be either GOVD_FIRSTPRIVATE or GOVD_SHARED. */
5955 gcc_assert ((ctx->region_type & ORT_TASK) != 0);
5956 if (ctx->outer_context)
5957 omp_notice_variable (ctx->outer_context, decl, in_code);
5958 for (octx = ctx->outer_context; octx; octx = octx->outer_context)
5959 {
5960 splay_tree_node n2;
5961
5962 if ((octx->region_type & (ORT_TARGET_DATA | ORT_TARGET)) != 0)
5963 continue;
5964 n2 = splay_tree_lookup (octx->variables, (splay_tree_key) decl);
5965 if (n2 && (n2->value & GOVD_DATA_SHARE_CLASS) != GOVD_SHARED)
5966 {
5967 flags |= GOVD_FIRSTPRIVATE;
5968 break;
5969 }
5970 if ((octx->region_type & (ORT_PARALLEL | ORT_TEAMS)) != 0)
5971 break;
5972 }
5973 if (flags & GOVD_FIRSTPRIVATE)
5974 break;
5975 if (octx == NULL
5976 && (TREE_CODE (decl) == PARM_DECL
5977 || (!is_global_var (decl)
5978 && DECL_CONTEXT (decl) == current_function_decl)))
5979 {
5980 flags |= GOVD_FIRSTPRIVATE;
5981 break;
5982 }
5983 flags |= GOVD_SHARED;
5984 break;
5985 default:
5986 gcc_unreachable ();
5987 }
5988
5989 if ((flags & GOVD_PRIVATE)
5990 && lang_hooks.decls.omp_private_outer_ref (decl))
5991 flags |= GOVD_PRIVATE_OUTER_REF;
5992
5993 omp_add_variable (ctx, decl, flags);
5994
5995 shared = (flags & GOVD_SHARED) != 0;
5996 ret = lang_hooks.decls.omp_disregard_value_expr (decl, shared);
5997 goto do_outer;
5998 }
5999
6000 if ((n->value & (GOVD_SEEN | GOVD_LOCAL)) == 0
6001 && (flags & (GOVD_SEEN | GOVD_LOCAL)) == GOVD_SEEN
6002 && DECL_SIZE (decl)
6003 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
6004 {
6005 splay_tree_node n2;
6006 tree t = DECL_VALUE_EXPR (decl);
6007 gcc_assert (TREE_CODE (t) == INDIRECT_REF);
6008 t = TREE_OPERAND (t, 0);
6009 gcc_assert (DECL_P (t));
6010 n2 = splay_tree_lookup (ctx->variables, (splay_tree_key) t);
6011 n2->value |= GOVD_SEEN;
6012 }
6013
6014 shared = ((flags | n->value) & GOVD_SHARED) != 0;
6015 ret = lang_hooks.decls.omp_disregard_value_expr (decl, shared);
6016
6017 /* If nothing changed, there's nothing left to do. */
6018 if ((n->value & flags) == flags)
6019 return ret;
6020 flags |= n->value;
6021 n->value = flags;
6022
6023 do_outer:
6024 /* If the variable is private in the current context, then we don't
6025 need to propagate anything to an outer context. */
6026 if ((flags & GOVD_PRIVATE) && !(flags & GOVD_PRIVATE_OUTER_REF))
6027 return ret;
6028 if (ctx->outer_context
6029 && omp_notice_variable (ctx->outer_context, decl, in_code))
6030 return true;
6031 return ret;
6032 }
6033
6034 /* Verify that DECL is private within CTX. If there's specific information
6035 to the contrary in the innermost scope, generate an error. */
6036
6037 static bool
6038 omp_is_private (struct gimplify_omp_ctx *ctx, tree decl, bool simd)
6039 {
6040 splay_tree_node n;
6041
6042 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
6043 if (n != NULL)
6044 {
6045 if (n->value & GOVD_SHARED)
6046 {
6047 if (ctx == gimplify_omp_ctxp)
6048 {
6049 if (simd)
6050 error ("iteration variable %qE is predetermined linear",
6051 DECL_NAME (decl));
6052 else
6053 error ("iteration variable %qE should be private",
6054 DECL_NAME (decl));
6055 n->value = GOVD_PRIVATE;
6056 return true;
6057 }
6058 else
6059 return false;
6060 }
6061 else if ((n->value & GOVD_EXPLICIT) != 0
6062 && (ctx == gimplify_omp_ctxp
6063 || (ctx->region_type == ORT_COMBINED_PARALLEL
6064 && gimplify_omp_ctxp->outer_context == ctx)))
6065 {
6066 if ((n->value & GOVD_FIRSTPRIVATE) != 0)
6067 error ("iteration variable %qE should not be firstprivate",
6068 DECL_NAME (decl));
6069 else if ((n->value & GOVD_REDUCTION) != 0)
6070 error ("iteration variable %qE should not be reduction",
6071 DECL_NAME (decl));
6072 else if (simd && (n->value & GOVD_LASTPRIVATE) != 0)
6073 error ("iteration variable %qE should not be lastprivate",
6074 DECL_NAME (decl));
6075 else if (simd && (n->value & GOVD_PRIVATE) != 0)
6076 error ("iteration variable %qE should not be private",
6077 DECL_NAME (decl));
6078 else if (simd && (n->value & GOVD_LINEAR) != 0)
6079 error ("iteration variable %qE is predetermined linear",
6080 DECL_NAME (decl));
6081 }
6082 return (ctx == gimplify_omp_ctxp
6083 || (ctx->region_type == ORT_COMBINED_PARALLEL
6084 && gimplify_omp_ctxp->outer_context == ctx));
6085 }
6086
6087 if (ctx->region_type != ORT_WORKSHARE
6088 && ctx->region_type != ORT_SIMD)
6089 return false;
6090 else if (ctx->outer_context)
6091 return omp_is_private (ctx->outer_context, decl, simd);
6092 return false;
6093 }
6094
6095 /* Return true if DECL is private within a parallel region
6096 that binds to the current construct's context or in parallel
6097 region's REDUCTION clause. */
6098
6099 static bool
6100 omp_check_private (struct gimplify_omp_ctx *ctx, tree decl)
6101 {
6102 splay_tree_node n;
6103
6104 do
6105 {
6106 ctx = ctx->outer_context;
6107 if (ctx == NULL)
6108 return !(is_global_var (decl)
6109 /* References might be private, but might be shared too. */
6110 || lang_hooks.decls.omp_privatize_by_reference (decl));
6111
6112 if ((ctx->region_type & (ORT_TARGET | ORT_TARGET_DATA)) != 0)
6113 continue;
6114
6115 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
6116 if (n != NULL)
6117 return (n->value & GOVD_SHARED) == 0;
6118 }
6119 while (ctx->region_type == ORT_WORKSHARE
6120 || ctx->region_type == ORT_SIMD);
6121 return false;
6122 }
6123
6124 /* Scan the OpenMP clauses in *LIST_P, installing mappings into a new
6125 and previous omp contexts. */
6126
6127 static void
6128 gimplify_scan_omp_clauses (tree *list_p, gimple_seq *pre_p,
6129 enum omp_region_type region_type)
6130 {
6131 struct gimplify_omp_ctx *ctx, *outer_ctx;
6132 struct gimplify_ctx gctx;
6133 tree c;
6134
6135 ctx = new_omp_context (region_type);
6136 outer_ctx = ctx->outer_context;
6137
6138 while ((c = *list_p) != NULL)
6139 {
6140 bool remove = false;
6141 bool notice_outer = true;
6142 const char *check_non_private = NULL;
6143 unsigned int flags;
6144 tree decl;
6145
6146 switch (OMP_CLAUSE_CODE (c))
6147 {
6148 case OMP_CLAUSE_PRIVATE:
6149 flags = GOVD_PRIVATE | GOVD_EXPLICIT;
6150 if (lang_hooks.decls.omp_private_outer_ref (OMP_CLAUSE_DECL (c)))
6151 {
6152 flags |= GOVD_PRIVATE_OUTER_REF;
6153 OMP_CLAUSE_PRIVATE_OUTER_REF (c) = 1;
6154 }
6155 else
6156 notice_outer = false;
6157 goto do_add;
6158 case OMP_CLAUSE_SHARED:
6159 flags = GOVD_SHARED | GOVD_EXPLICIT;
6160 goto do_add;
6161 case OMP_CLAUSE_FIRSTPRIVATE:
6162 flags = GOVD_FIRSTPRIVATE | GOVD_EXPLICIT;
6163 check_non_private = "firstprivate";
6164 goto do_add;
6165 case OMP_CLAUSE_LASTPRIVATE:
6166 flags = GOVD_LASTPRIVATE | GOVD_SEEN | GOVD_EXPLICIT;
6167 check_non_private = "lastprivate";
6168 goto do_add;
6169 case OMP_CLAUSE_REDUCTION:
6170 flags = GOVD_REDUCTION | GOVD_SEEN | GOVD_EXPLICIT;
6171 check_non_private = "reduction";
6172 goto do_add;
6173 case OMP_CLAUSE_LINEAR:
6174 if (gimplify_expr (&OMP_CLAUSE_LINEAR_STEP (c), pre_p, NULL,
6175 is_gimple_val, fb_rvalue) == GS_ERROR)
6176 {
6177 remove = true;
6178 break;
6179 }
6180 flags = GOVD_LINEAR | GOVD_EXPLICIT;
6181 goto do_add;
6182
6183 case OMP_CLAUSE_MAP:
6184 if (OMP_CLAUSE_SIZE (c)
6185 && gimplify_expr (&OMP_CLAUSE_SIZE (c), pre_p,
6186 NULL, is_gimple_val, fb_rvalue) == GS_ERROR)
6187 {
6188 remove = true;
6189 break;
6190 }
6191 decl = OMP_CLAUSE_DECL (c);
6192 if (!DECL_P (decl))
6193 {
6194 if (gimplify_expr (&OMP_CLAUSE_DECL (c), pre_p,
6195 NULL, is_gimple_lvalue, fb_lvalue)
6196 == GS_ERROR)
6197 {
6198 remove = true;
6199 break;
6200 }
6201 break;
6202 }
6203 flags = GOVD_MAP | GOVD_EXPLICIT;
6204 goto do_add;
6205
6206 case OMP_CLAUSE_DEPEND:
6207 if (TREE_CODE (OMP_CLAUSE_DECL (c)) == COMPOUND_EXPR)
6208 {
6209 gimplify_expr (&TREE_OPERAND (OMP_CLAUSE_DECL (c), 0), pre_p,
6210 NULL, is_gimple_val, fb_rvalue);
6211 OMP_CLAUSE_DECL (c) = TREE_OPERAND (OMP_CLAUSE_DECL (c), 1);
6212 }
6213 if (error_operand_p (OMP_CLAUSE_DECL (c)))
6214 {
6215 remove = true;
6216 break;
6217 }
6218 OMP_CLAUSE_DECL (c) = build_fold_addr_expr (OMP_CLAUSE_DECL (c));
6219 if (gimplify_expr (&OMP_CLAUSE_DECL (c), pre_p, NULL,
6220 is_gimple_val, fb_rvalue) == GS_ERROR)
6221 {
6222 remove = true;
6223 break;
6224 }
6225 break;
6226
6227 case OMP_CLAUSE_TO:
6228 case OMP_CLAUSE_FROM:
6229 if (OMP_CLAUSE_SIZE (c)
6230 && gimplify_expr (&OMP_CLAUSE_SIZE (c), pre_p,
6231 NULL, is_gimple_val, fb_rvalue) == GS_ERROR)
6232 {
6233 remove = true;
6234 break;
6235 }
6236 decl = OMP_CLAUSE_DECL (c);
6237 if (error_operand_p (decl))
6238 {
6239 remove = true;
6240 break;
6241 }
6242 if (!DECL_P (decl))
6243 {
6244 if (gimplify_expr (&OMP_CLAUSE_DECL (c), pre_p,
6245 NULL, is_gimple_lvalue, fb_lvalue)
6246 == GS_ERROR)
6247 {
6248 remove = true;
6249 break;
6250 }
6251 break;
6252 }
6253 goto do_notice;
6254
6255 do_add:
6256 decl = OMP_CLAUSE_DECL (c);
6257 if (error_operand_p (decl))
6258 {
6259 remove = true;
6260 break;
6261 }
6262 omp_add_variable (ctx, decl, flags);
6263 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
6264 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
6265 {
6266 omp_add_variable (ctx, OMP_CLAUSE_REDUCTION_PLACEHOLDER (c),
6267 GOVD_LOCAL | GOVD_SEEN);
6268 gimplify_omp_ctxp = ctx;
6269 push_gimplify_context (&gctx);
6270
6271 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
6272 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
6273
6274 gimplify_and_add (OMP_CLAUSE_REDUCTION_INIT (c),
6275 &OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c));
6276 pop_gimplify_context
6277 (gimple_seq_first_stmt (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c)));
6278 push_gimplify_context (&gctx);
6279 gimplify_and_add (OMP_CLAUSE_REDUCTION_MERGE (c),
6280 &OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c));
6281 pop_gimplify_context
6282 (gimple_seq_first_stmt (OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c)));
6283 OMP_CLAUSE_REDUCTION_INIT (c) = NULL_TREE;
6284 OMP_CLAUSE_REDUCTION_MERGE (c) = NULL_TREE;
6285
6286 gimplify_omp_ctxp = outer_ctx;
6287 }
6288 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
6289 && OMP_CLAUSE_LASTPRIVATE_STMT (c))
6290 {
6291 gimplify_omp_ctxp = ctx;
6292 push_gimplify_context (&gctx);
6293 if (TREE_CODE (OMP_CLAUSE_LASTPRIVATE_STMT (c)) != BIND_EXPR)
6294 {
6295 tree bind = build3 (BIND_EXPR, void_type_node, NULL,
6296 NULL, NULL);
6297 TREE_SIDE_EFFECTS (bind) = 1;
6298 BIND_EXPR_BODY (bind) = OMP_CLAUSE_LASTPRIVATE_STMT (c);
6299 OMP_CLAUSE_LASTPRIVATE_STMT (c) = bind;
6300 }
6301 gimplify_and_add (OMP_CLAUSE_LASTPRIVATE_STMT (c),
6302 &OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c));
6303 pop_gimplify_context
6304 (gimple_seq_first_stmt (OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c)));
6305 OMP_CLAUSE_LASTPRIVATE_STMT (c) = NULL_TREE;
6306
6307 gimplify_omp_ctxp = outer_ctx;
6308 }
6309 if (notice_outer)
6310 goto do_notice;
6311 break;
6312
6313 case OMP_CLAUSE_COPYIN:
6314 case OMP_CLAUSE_COPYPRIVATE:
6315 decl = OMP_CLAUSE_DECL (c);
6316 if (error_operand_p (decl))
6317 {
6318 remove = true;
6319 break;
6320 }
6321 do_notice:
6322 if (outer_ctx)
6323 omp_notice_variable (outer_ctx, decl, true);
6324 if (check_non_private
6325 && region_type == ORT_WORKSHARE
6326 && omp_check_private (ctx, decl))
6327 {
6328 error ("%s variable %qE is private in outer context",
6329 check_non_private, DECL_NAME (decl));
6330 remove = true;
6331 }
6332 break;
6333
6334 case OMP_CLAUSE_FINAL:
6335 case OMP_CLAUSE_IF:
6336 OMP_CLAUSE_OPERAND (c, 0)
6337 = gimple_boolify (OMP_CLAUSE_OPERAND (c, 0));
6338 /* Fall through. */
6339
6340 case OMP_CLAUSE_SCHEDULE:
6341 case OMP_CLAUSE_NUM_THREADS:
6342 case OMP_CLAUSE_NUM_TEAMS:
6343 case OMP_CLAUSE_THREAD_LIMIT:
6344 case OMP_CLAUSE_DIST_SCHEDULE:
6345 case OMP_CLAUSE_DEVICE:
6346 if (gimplify_expr (&OMP_CLAUSE_OPERAND (c, 0), pre_p, NULL,
6347 is_gimple_val, fb_rvalue) == GS_ERROR)
6348 remove = true;
6349 break;
6350
6351 case OMP_CLAUSE_NOWAIT:
6352 case OMP_CLAUSE_ORDERED:
6353 case OMP_CLAUSE_UNTIED:
6354 case OMP_CLAUSE_COLLAPSE:
6355 case OMP_CLAUSE_MERGEABLE:
6356 case OMP_CLAUSE_PROC_BIND:
6357 case OMP_CLAUSE_SAFELEN:
6358 break;
6359
6360 case OMP_CLAUSE_ALIGNED:
6361 decl = OMP_CLAUSE_DECL (c);
6362 if (error_operand_p (decl))
6363 {
6364 remove = true;
6365 break;
6366 }
6367 if (!is_global_var (decl)
6368 && TREE_CODE (TREE_TYPE (decl)) == POINTER_TYPE)
6369 omp_add_variable (ctx, decl, GOVD_ALIGNED);
6370 break;
6371
6372 case OMP_CLAUSE_DEFAULT:
6373 ctx->default_kind = OMP_CLAUSE_DEFAULT_KIND (c);
6374 break;
6375
6376 default:
6377 gcc_unreachable ();
6378 }
6379
6380 if (remove)
6381 *list_p = OMP_CLAUSE_CHAIN (c);
6382 else
6383 list_p = &OMP_CLAUSE_CHAIN (c);
6384 }
6385
6386 gimplify_omp_ctxp = ctx;
6387 }
6388
6389 /* For all variables that were not actually used within the context,
6390 remove PRIVATE, SHARED, and FIRSTPRIVATE clauses. */
6391
6392 static int
6393 gimplify_adjust_omp_clauses_1 (splay_tree_node n, void *data)
6394 {
6395 tree *list_p = (tree *) data;
6396 tree decl = (tree) n->key;
6397 unsigned flags = n->value;
6398 enum omp_clause_code code;
6399 tree clause;
6400 bool private_debug;
6401
6402 if (flags & (GOVD_EXPLICIT | GOVD_LOCAL))
6403 return 0;
6404 if ((flags & GOVD_SEEN) == 0)
6405 return 0;
6406 if (flags & GOVD_DEBUG_PRIVATE)
6407 {
6408 gcc_assert ((flags & GOVD_DATA_SHARE_CLASS) == GOVD_PRIVATE);
6409 private_debug = true;
6410 }
6411 else if (flags & GOVD_MAP)
6412 private_debug = false;
6413 else
6414 private_debug
6415 = lang_hooks.decls.omp_private_debug_clause (decl,
6416 !!(flags & GOVD_SHARED));
6417 if (private_debug)
6418 code = OMP_CLAUSE_PRIVATE;
6419 else if (flags & GOVD_MAP)
6420 code = OMP_CLAUSE_MAP;
6421 else if (flags & GOVD_SHARED)
6422 {
6423 if (is_global_var (decl))
6424 {
6425 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp->outer_context;
6426 while (ctx != NULL)
6427 {
6428 splay_tree_node on
6429 = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
6430 if (on && (on->value & (GOVD_FIRSTPRIVATE | GOVD_LASTPRIVATE
6431 | GOVD_PRIVATE | GOVD_REDUCTION
6432 | GOVD_LINEAR)) != 0)
6433 break;
6434 ctx = ctx->outer_context;
6435 }
6436 if (ctx == NULL)
6437 return 0;
6438 }
6439 code = OMP_CLAUSE_SHARED;
6440 }
6441 else if (flags & GOVD_PRIVATE)
6442 code = OMP_CLAUSE_PRIVATE;
6443 else if (flags & GOVD_FIRSTPRIVATE)
6444 code = OMP_CLAUSE_FIRSTPRIVATE;
6445 else if (flags & GOVD_LASTPRIVATE)
6446 code = OMP_CLAUSE_LASTPRIVATE;
6447 else if (flags & GOVD_ALIGNED)
6448 return 0;
6449 else
6450 gcc_unreachable ();
6451
6452 clause = build_omp_clause (input_location, code);
6453 OMP_CLAUSE_DECL (clause) = decl;
6454 OMP_CLAUSE_CHAIN (clause) = *list_p;
6455 if (private_debug)
6456 OMP_CLAUSE_PRIVATE_DEBUG (clause) = 1;
6457 else if (code == OMP_CLAUSE_PRIVATE && (flags & GOVD_PRIVATE_OUTER_REF))
6458 OMP_CLAUSE_PRIVATE_OUTER_REF (clause) = 1;
6459 else if (code == OMP_CLAUSE_MAP)
6460 {
6461 OMP_CLAUSE_MAP_KIND (clause) = flags & GOVD_MAP_TO_ONLY
6462 ? OMP_CLAUSE_MAP_TO
6463 : OMP_CLAUSE_MAP_TOFROM;
6464 if (DECL_SIZE (decl)
6465 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
6466 {
6467 tree decl2 = DECL_VALUE_EXPR (decl);
6468 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
6469 decl2 = TREE_OPERAND (decl2, 0);
6470 gcc_assert (DECL_P (decl2));
6471 tree mem = build_simple_mem_ref (decl2);
6472 OMP_CLAUSE_DECL (clause) = mem;
6473 OMP_CLAUSE_SIZE (clause) = TYPE_SIZE_UNIT (TREE_TYPE (decl));
6474 if (gimplify_omp_ctxp->outer_context)
6475 {
6476 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp->outer_context;
6477 omp_notice_variable (ctx, decl2, true);
6478 omp_notice_variable (ctx, OMP_CLAUSE_SIZE (clause), true);
6479 }
6480 tree nc = build_omp_clause (OMP_CLAUSE_LOCATION (clause),
6481 OMP_CLAUSE_MAP);
6482 OMP_CLAUSE_DECL (nc) = decl;
6483 OMP_CLAUSE_SIZE (nc) = size_zero_node;
6484 OMP_CLAUSE_MAP_KIND (nc) = OMP_CLAUSE_MAP_POINTER;
6485 OMP_CLAUSE_CHAIN (nc) = OMP_CLAUSE_CHAIN (clause);
6486 OMP_CLAUSE_CHAIN (clause) = nc;
6487 }
6488 }
6489 *list_p = clause;
6490 lang_hooks.decls.omp_finish_clause (clause);
6491
6492 return 0;
6493 }
6494
6495 static void
6496 gimplify_adjust_omp_clauses (tree *list_p)
6497 {
6498 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
6499 tree c, decl;
6500
6501 while ((c = *list_p) != NULL)
6502 {
6503 splay_tree_node n;
6504 bool remove = false;
6505
6506 switch (OMP_CLAUSE_CODE (c))
6507 {
6508 case OMP_CLAUSE_PRIVATE:
6509 case OMP_CLAUSE_SHARED:
6510 case OMP_CLAUSE_FIRSTPRIVATE:
6511 case OMP_CLAUSE_LINEAR:
6512 decl = OMP_CLAUSE_DECL (c);
6513 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
6514 remove = !(n->value & GOVD_SEEN);
6515 if (! remove)
6516 {
6517 bool shared = OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED;
6518 if ((n->value & GOVD_DEBUG_PRIVATE)
6519 || lang_hooks.decls.omp_private_debug_clause (decl, shared))
6520 {
6521 gcc_assert ((n->value & GOVD_DEBUG_PRIVATE) == 0
6522 || ((n->value & GOVD_DATA_SHARE_CLASS)
6523 == GOVD_PRIVATE));
6524 OMP_CLAUSE_SET_CODE (c, OMP_CLAUSE_PRIVATE);
6525 OMP_CLAUSE_PRIVATE_DEBUG (c) = 1;
6526 }
6527 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
6528 && ctx->outer_context
6529 && !(OMP_CLAUSE_LINEAR_NO_COPYIN (c)
6530 && OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
6531 && !is_global_var (decl))
6532 {
6533 if (ctx->outer_context->region_type == ORT_COMBINED_PARALLEL)
6534 {
6535 n = splay_tree_lookup (ctx->outer_context->variables,
6536 (splay_tree_key) decl);
6537 if (n == NULL
6538 || (n->value & GOVD_DATA_SHARE_CLASS) == 0)
6539 {
6540 int flags = OMP_CLAUSE_LINEAR_NO_COPYIN (c)
6541 ? GOVD_LASTPRIVATE : GOVD_SHARED;
6542 if (n == NULL)
6543 omp_add_variable (ctx->outer_context, decl,
6544 flags | GOVD_SEEN);
6545 else
6546 n->value |= flags | GOVD_SEEN;
6547 }
6548 }
6549 else
6550 omp_notice_variable (ctx->outer_context, decl, true);
6551 }
6552 }
6553 break;
6554
6555 case OMP_CLAUSE_LASTPRIVATE:
6556 /* Make sure OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE is set to
6557 accurately reflect the presence of a FIRSTPRIVATE clause. */
6558 decl = OMP_CLAUSE_DECL (c);
6559 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
6560 OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c)
6561 = (n->value & GOVD_FIRSTPRIVATE) != 0;
6562 break;
6563
6564 case OMP_CLAUSE_ALIGNED:
6565 decl = OMP_CLAUSE_DECL (c);
6566 if (!is_global_var (decl))
6567 {
6568 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
6569 remove = n == NULL || !(n->value & GOVD_SEEN);
6570 if (!remove && TREE_CODE (TREE_TYPE (decl)) == POINTER_TYPE)
6571 {
6572 struct gimplify_omp_ctx *octx;
6573 if (n != NULL
6574 && (n->value & (GOVD_DATA_SHARE_CLASS
6575 & ~GOVD_FIRSTPRIVATE)))
6576 remove = true;
6577 else
6578 for (octx = ctx->outer_context; octx;
6579 octx = octx->outer_context)
6580 {
6581 n = splay_tree_lookup (octx->variables,
6582 (splay_tree_key) decl);
6583 if (n == NULL)
6584 continue;
6585 if (n->value & GOVD_LOCAL)
6586 break;
6587 /* We have to avoid assigning a shared variable
6588 to itself when trying to add
6589 __builtin_assume_aligned. */
6590 if (n->value & GOVD_SHARED)
6591 {
6592 remove = true;
6593 break;
6594 }
6595 }
6596 }
6597 }
6598 else if (TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
6599 {
6600 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
6601 if (n != NULL && (n->value & GOVD_DATA_SHARE_CLASS) != 0)
6602 remove = true;
6603 }
6604 break;
6605
6606 case OMP_CLAUSE_MAP:
6607 decl = OMP_CLAUSE_DECL (c);
6608 if (!DECL_P (decl))
6609 break;
6610 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
6611 if (ctx->region_type == ORT_TARGET && !(n->value & GOVD_SEEN))
6612 remove = true;
6613 else if (DECL_SIZE (decl)
6614 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST
6615 && OMP_CLAUSE_MAP_KIND (c) != OMP_CLAUSE_MAP_POINTER)
6616 {
6617 tree decl2 = DECL_VALUE_EXPR (decl);
6618 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
6619 decl2 = TREE_OPERAND (decl2, 0);
6620 gcc_assert (DECL_P (decl2));
6621 tree mem = build_simple_mem_ref (decl2);
6622 OMP_CLAUSE_DECL (c) = mem;
6623 OMP_CLAUSE_SIZE (c) = TYPE_SIZE_UNIT (TREE_TYPE (decl));
6624 if (ctx->outer_context)
6625 {
6626 omp_notice_variable (ctx->outer_context, decl2, true);
6627 omp_notice_variable (ctx->outer_context,
6628 OMP_CLAUSE_SIZE (c), true);
6629 }
6630 tree nc = build_omp_clause (OMP_CLAUSE_LOCATION (c),
6631 OMP_CLAUSE_MAP);
6632 OMP_CLAUSE_DECL (nc) = decl;
6633 OMP_CLAUSE_SIZE (nc) = size_zero_node;
6634 OMP_CLAUSE_MAP_KIND (nc) = OMP_CLAUSE_MAP_POINTER;
6635 OMP_CLAUSE_CHAIN (nc) = OMP_CLAUSE_CHAIN (c);
6636 OMP_CLAUSE_CHAIN (c) = nc;
6637 c = nc;
6638 }
6639 break;
6640
6641 case OMP_CLAUSE_TO:
6642 case OMP_CLAUSE_FROM:
6643 decl = OMP_CLAUSE_DECL (c);
6644 if (!DECL_P (decl))
6645 break;
6646 if (DECL_SIZE (decl)
6647 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
6648 {
6649 tree decl2 = DECL_VALUE_EXPR (decl);
6650 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
6651 decl2 = TREE_OPERAND (decl2, 0);
6652 gcc_assert (DECL_P (decl2));
6653 tree mem = build_simple_mem_ref (decl2);
6654 OMP_CLAUSE_DECL (c) = mem;
6655 OMP_CLAUSE_SIZE (c) = TYPE_SIZE_UNIT (TREE_TYPE (decl));
6656 if (ctx->outer_context)
6657 {
6658 omp_notice_variable (ctx->outer_context, decl2, true);
6659 omp_notice_variable (ctx->outer_context,
6660 OMP_CLAUSE_SIZE (c), true);
6661 }
6662 }
6663 break;
6664
6665 case OMP_CLAUSE_REDUCTION:
6666 case OMP_CLAUSE_COPYIN:
6667 case OMP_CLAUSE_COPYPRIVATE:
6668 case OMP_CLAUSE_IF:
6669 case OMP_CLAUSE_NUM_THREADS:
6670 case OMP_CLAUSE_NUM_TEAMS:
6671 case OMP_CLAUSE_THREAD_LIMIT:
6672 case OMP_CLAUSE_DIST_SCHEDULE:
6673 case OMP_CLAUSE_DEVICE:
6674 case OMP_CLAUSE_SCHEDULE:
6675 case OMP_CLAUSE_NOWAIT:
6676 case OMP_CLAUSE_ORDERED:
6677 case OMP_CLAUSE_DEFAULT:
6678 case OMP_CLAUSE_UNTIED:
6679 case OMP_CLAUSE_COLLAPSE:
6680 case OMP_CLAUSE_FINAL:
6681 case OMP_CLAUSE_MERGEABLE:
6682 case OMP_CLAUSE_PROC_BIND:
6683 case OMP_CLAUSE_SAFELEN:
6684 case OMP_CLAUSE_DEPEND:
6685 break;
6686
6687 default:
6688 gcc_unreachable ();
6689 }
6690
6691 if (remove)
6692 *list_p = OMP_CLAUSE_CHAIN (c);
6693 else
6694 list_p = &OMP_CLAUSE_CHAIN (c);
6695 }
6696
6697 /* Add in any implicit data sharing. */
6698 splay_tree_foreach (ctx->variables, gimplify_adjust_omp_clauses_1, list_p);
6699
6700 gimplify_omp_ctxp = ctx->outer_context;
6701 delete_omp_context (ctx);
6702 }
6703
6704 /* Gimplify the contents of an OMP_PARALLEL statement. This involves
6705 gimplification of the body, as well as scanning the body for used
6706 variables. We need to do this scan now, because variable-sized
6707 decls will be decomposed during gimplification. */
6708
6709 static void
6710 gimplify_omp_parallel (tree *expr_p, gimple_seq *pre_p)
6711 {
6712 tree expr = *expr_p;
6713 gimple g;
6714 gimple_seq body = NULL;
6715 struct gimplify_ctx gctx;
6716
6717 gimplify_scan_omp_clauses (&OMP_PARALLEL_CLAUSES (expr), pre_p,
6718 OMP_PARALLEL_COMBINED (expr)
6719 ? ORT_COMBINED_PARALLEL
6720 : ORT_PARALLEL);
6721
6722 push_gimplify_context (&gctx);
6723
6724 g = gimplify_and_return_first (OMP_PARALLEL_BODY (expr), &body);
6725 if (gimple_code (g) == GIMPLE_BIND)
6726 pop_gimplify_context (g);
6727 else
6728 pop_gimplify_context (NULL);
6729
6730 gimplify_adjust_omp_clauses (&OMP_PARALLEL_CLAUSES (expr));
6731
6732 g = gimple_build_omp_parallel (body,
6733 OMP_PARALLEL_CLAUSES (expr),
6734 NULL_TREE, NULL_TREE);
6735 if (OMP_PARALLEL_COMBINED (expr))
6736 gimple_omp_set_subcode (g, GF_OMP_PARALLEL_COMBINED);
6737 gimplify_seq_add_stmt (pre_p, g);
6738 *expr_p = NULL_TREE;
6739 }
6740
6741 /* Gimplify the contents of an OMP_TASK statement. This involves
6742 gimplification of the body, as well as scanning the body for used
6743 variables. We need to do this scan now, because variable-sized
6744 decls will be decomposed during gimplification. */
6745
6746 static void
6747 gimplify_omp_task (tree *expr_p, gimple_seq *pre_p)
6748 {
6749 tree expr = *expr_p;
6750 gimple g;
6751 gimple_seq body = NULL;
6752 struct gimplify_ctx gctx;
6753
6754 gimplify_scan_omp_clauses (&OMP_TASK_CLAUSES (expr), pre_p,
6755 find_omp_clause (OMP_TASK_CLAUSES (expr),
6756 OMP_CLAUSE_UNTIED)
6757 ? ORT_UNTIED_TASK : ORT_TASK);
6758
6759 push_gimplify_context (&gctx);
6760
6761 g = gimplify_and_return_first (OMP_TASK_BODY (expr), &body);
6762 if (gimple_code (g) == GIMPLE_BIND)
6763 pop_gimplify_context (g);
6764 else
6765 pop_gimplify_context (NULL);
6766
6767 gimplify_adjust_omp_clauses (&OMP_TASK_CLAUSES (expr));
6768
6769 g = gimple_build_omp_task (body,
6770 OMP_TASK_CLAUSES (expr),
6771 NULL_TREE, NULL_TREE,
6772 NULL_TREE, NULL_TREE, NULL_TREE);
6773 gimplify_seq_add_stmt (pre_p, g);
6774 *expr_p = NULL_TREE;
6775 }
6776
6777 /* Helper function of gimplify_omp_for, find OMP_FOR resp. OMP_SIMD
6778 with non-NULL OMP_FOR_INIT. */
6779
6780 static tree
6781 find_combined_omp_for (tree *tp, int *walk_subtrees, void *)
6782 {
6783 *walk_subtrees = 0;
6784 switch (TREE_CODE (*tp))
6785 {
6786 case OMP_FOR:
6787 *walk_subtrees = 1;
6788 /* FALLTHRU */
6789 case OMP_SIMD:
6790 if (OMP_FOR_INIT (*tp) != NULL_TREE)
6791 return *tp;
6792 break;
6793 case BIND_EXPR:
6794 case STATEMENT_LIST:
6795 case OMP_PARALLEL:
6796 *walk_subtrees = 1;
6797 break;
6798 default:
6799 break;
6800 }
6801 return NULL_TREE;
6802 }
6803
6804 /* Gimplify the gross structure of an OMP_FOR statement. */
6805
6806 static enum gimplify_status
6807 gimplify_omp_for (tree *expr_p, gimple_seq *pre_p)
6808 {
6809 tree for_stmt, orig_for_stmt, decl, var, t;
6810 enum gimplify_status ret = GS_ALL_DONE;
6811 enum gimplify_status tret;
6812 gimple gfor;
6813 gimple_seq for_body, for_pre_body;
6814 int i;
6815 bool simd;
6816 bitmap has_decl_expr = NULL;
6817
6818 orig_for_stmt = for_stmt = *expr_p;
6819
6820 simd = TREE_CODE (for_stmt) == OMP_SIMD;
6821 gimplify_scan_omp_clauses (&OMP_FOR_CLAUSES (for_stmt), pre_p,
6822 simd ? ORT_SIMD : ORT_WORKSHARE);
6823
6824 /* Handle OMP_FOR_INIT. */
6825 for_pre_body = NULL;
6826 if (simd && OMP_FOR_PRE_BODY (for_stmt))
6827 {
6828 has_decl_expr = BITMAP_ALLOC (NULL);
6829 if (TREE_CODE (OMP_FOR_PRE_BODY (for_stmt)) == DECL_EXPR
6830 && TREE_CODE (DECL_EXPR_DECL (OMP_FOR_PRE_BODY (for_stmt)))
6831 == VAR_DECL)
6832 {
6833 t = OMP_FOR_PRE_BODY (for_stmt);
6834 bitmap_set_bit (has_decl_expr, DECL_UID (DECL_EXPR_DECL (t)));
6835 }
6836 else if (TREE_CODE (OMP_FOR_PRE_BODY (for_stmt)) == STATEMENT_LIST)
6837 {
6838 tree_stmt_iterator si;
6839 for (si = tsi_start (OMP_FOR_PRE_BODY (for_stmt)); !tsi_end_p (si);
6840 tsi_next (&si))
6841 {
6842 t = tsi_stmt (si);
6843 if (TREE_CODE (t) == DECL_EXPR
6844 && TREE_CODE (DECL_EXPR_DECL (t)) == VAR_DECL)
6845 bitmap_set_bit (has_decl_expr, DECL_UID (DECL_EXPR_DECL (t)));
6846 }
6847 }
6848 }
6849 gimplify_and_add (OMP_FOR_PRE_BODY (for_stmt), &for_pre_body);
6850 OMP_FOR_PRE_BODY (for_stmt) = NULL_TREE;
6851
6852 if (OMP_FOR_INIT (for_stmt) == NULL_TREE)
6853 {
6854 for_stmt = walk_tree (&OMP_FOR_BODY (for_stmt), find_combined_omp_for,
6855 NULL, NULL);
6856 gcc_assert (for_stmt != NULL_TREE);
6857 gimplify_omp_ctxp->combined_loop = true;
6858 }
6859
6860 for_body = NULL;
6861 gcc_assert (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt))
6862 == TREE_VEC_LENGTH (OMP_FOR_COND (for_stmt)));
6863 gcc_assert (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt))
6864 == TREE_VEC_LENGTH (OMP_FOR_INCR (for_stmt)));
6865 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
6866 {
6867 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i);
6868 gcc_assert (TREE_CODE (t) == MODIFY_EXPR);
6869 decl = TREE_OPERAND (t, 0);
6870 gcc_assert (DECL_P (decl));
6871 gcc_assert (INTEGRAL_TYPE_P (TREE_TYPE (decl))
6872 || POINTER_TYPE_P (TREE_TYPE (decl)));
6873
6874 /* Make sure the iteration variable is private. */
6875 tree c = NULL_TREE;
6876 if (orig_for_stmt != for_stmt)
6877 /* Do this only on innermost construct for combined ones. */;
6878 else if (simd)
6879 {
6880 splay_tree_node n = splay_tree_lookup (gimplify_omp_ctxp->variables,
6881 (splay_tree_key)decl);
6882 omp_is_private (gimplify_omp_ctxp, decl, simd);
6883 if (n != NULL && (n->value & GOVD_DATA_SHARE_CLASS) != 0)
6884 omp_notice_variable (gimplify_omp_ctxp, decl, true);
6885 else if (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)) == 1)
6886 {
6887 c = build_omp_clause (input_location, OMP_CLAUSE_LINEAR);
6888 OMP_CLAUSE_LINEAR_NO_COPYIN (c) = 1;
6889 if (has_decl_expr
6890 && bitmap_bit_p (has_decl_expr, DECL_UID (decl)))
6891 OMP_CLAUSE_LINEAR_NO_COPYOUT (c) = 1;
6892 OMP_CLAUSE_DECL (c) = decl;
6893 OMP_CLAUSE_CHAIN (c) = OMP_FOR_CLAUSES (for_stmt);
6894 OMP_FOR_CLAUSES (for_stmt) = c;
6895 omp_add_variable (gimplify_omp_ctxp, decl,
6896 GOVD_LINEAR | GOVD_EXPLICIT | GOVD_SEEN);
6897 }
6898 else
6899 {
6900 bool lastprivate
6901 = (!has_decl_expr
6902 || !bitmap_bit_p (has_decl_expr, DECL_UID (decl)));
6903 c = build_omp_clause (input_location,
6904 lastprivate ? OMP_CLAUSE_LASTPRIVATE
6905 : OMP_CLAUSE_PRIVATE);
6906 OMP_CLAUSE_DECL (c) = decl;
6907 OMP_CLAUSE_CHAIN (c) = OMP_FOR_CLAUSES (for_stmt);
6908 omp_add_variable (gimplify_omp_ctxp, decl,
6909 (lastprivate ? GOVD_LASTPRIVATE : GOVD_PRIVATE)
6910 | GOVD_SEEN);
6911 c = NULL_TREE;
6912 }
6913 }
6914 else if (omp_is_private (gimplify_omp_ctxp, decl, simd))
6915 omp_notice_variable (gimplify_omp_ctxp, decl, true);
6916 else
6917 omp_add_variable (gimplify_omp_ctxp, decl, GOVD_PRIVATE | GOVD_SEEN);
6918
6919 /* If DECL is not a gimple register, create a temporary variable to act
6920 as an iteration counter. This is valid, since DECL cannot be
6921 modified in the body of the loop. */
6922 if (orig_for_stmt != for_stmt)
6923 var = decl;
6924 else if (!is_gimple_reg (decl))
6925 {
6926 var = create_tmp_var (TREE_TYPE (decl), get_name (decl));
6927 TREE_OPERAND (t, 0) = var;
6928
6929 gimplify_seq_add_stmt (&for_body, gimple_build_assign (decl, var));
6930
6931 omp_add_variable (gimplify_omp_ctxp, var, GOVD_PRIVATE | GOVD_SEEN);
6932 }
6933 else
6934 var = decl;
6935
6936 tret = gimplify_expr (&TREE_OPERAND (t, 1), &for_pre_body, NULL,
6937 is_gimple_val, fb_rvalue);
6938 ret = MIN (ret, tret);
6939 if (ret == GS_ERROR)
6940 return ret;
6941
6942 /* Handle OMP_FOR_COND. */
6943 t = TREE_VEC_ELT (OMP_FOR_COND (for_stmt), i);
6944 gcc_assert (COMPARISON_CLASS_P (t));
6945 gcc_assert (TREE_OPERAND (t, 0) == decl);
6946
6947 tret = gimplify_expr (&TREE_OPERAND (t, 1), &for_pre_body, NULL,
6948 is_gimple_val, fb_rvalue);
6949 ret = MIN (ret, tret);
6950
6951 /* Handle OMP_FOR_INCR. */
6952 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
6953 switch (TREE_CODE (t))
6954 {
6955 case PREINCREMENT_EXPR:
6956 case POSTINCREMENT_EXPR:
6957 if (orig_for_stmt != for_stmt)
6958 break;
6959 t = build_int_cst (TREE_TYPE (decl), 1);
6960 if (c)
6961 OMP_CLAUSE_LINEAR_STEP (c) = t;
6962 t = build2 (PLUS_EXPR, TREE_TYPE (decl), var, t);
6963 t = build2 (MODIFY_EXPR, TREE_TYPE (var), var, t);
6964 TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i) = t;
6965 break;
6966
6967 case PREDECREMENT_EXPR:
6968 case POSTDECREMENT_EXPR:
6969 if (orig_for_stmt != for_stmt)
6970 break;
6971 t = build_int_cst (TREE_TYPE (decl), -1);
6972 if (c)
6973 OMP_CLAUSE_LINEAR_STEP (c) = t;
6974 t = build2 (PLUS_EXPR, TREE_TYPE (decl), var, t);
6975 t = build2 (MODIFY_EXPR, TREE_TYPE (var), var, t);
6976 TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i) = t;
6977 break;
6978
6979 case MODIFY_EXPR:
6980 gcc_assert (TREE_OPERAND (t, 0) == decl);
6981 TREE_OPERAND (t, 0) = var;
6982
6983 t = TREE_OPERAND (t, 1);
6984 switch (TREE_CODE (t))
6985 {
6986 case PLUS_EXPR:
6987 if (TREE_OPERAND (t, 1) == decl)
6988 {
6989 TREE_OPERAND (t, 1) = TREE_OPERAND (t, 0);
6990 TREE_OPERAND (t, 0) = var;
6991 break;
6992 }
6993
6994 /* Fallthru. */
6995 case MINUS_EXPR:
6996 case POINTER_PLUS_EXPR:
6997 gcc_assert (TREE_OPERAND (t, 0) == decl);
6998 TREE_OPERAND (t, 0) = var;
6999 break;
7000 default:
7001 gcc_unreachable ();
7002 }
7003
7004 tret = gimplify_expr (&TREE_OPERAND (t, 1), &for_pre_body, NULL,
7005 is_gimple_val, fb_rvalue);
7006 ret = MIN (ret, tret);
7007 if (c)
7008 {
7009 OMP_CLAUSE_LINEAR_STEP (c) = TREE_OPERAND (t, 1);
7010 if (TREE_CODE (t) == MINUS_EXPR)
7011 {
7012 t = TREE_OPERAND (t, 1);
7013 OMP_CLAUSE_LINEAR_STEP (c)
7014 = fold_build1 (NEGATE_EXPR, TREE_TYPE (t), t);
7015 tret = gimplify_expr (&OMP_CLAUSE_LINEAR_STEP (c),
7016 &for_pre_body, NULL,
7017 is_gimple_val, fb_rvalue);
7018 ret = MIN (ret, tret);
7019 }
7020 }
7021 break;
7022
7023 default:
7024 gcc_unreachable ();
7025 }
7026
7027 if ((var != decl || TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)) > 1)
7028 && orig_for_stmt == for_stmt)
7029 {
7030 for (c = OMP_FOR_CLAUSES (for_stmt); c ; c = OMP_CLAUSE_CHAIN (c))
7031 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
7032 && OMP_CLAUSE_DECL (c) == decl
7033 && OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c) == NULL)
7034 {
7035 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
7036 gcc_assert (TREE_CODE (t) == MODIFY_EXPR);
7037 gcc_assert (TREE_OPERAND (t, 0) == var);
7038 t = TREE_OPERAND (t, 1);
7039 gcc_assert (TREE_CODE (t) == PLUS_EXPR
7040 || TREE_CODE (t) == MINUS_EXPR
7041 || TREE_CODE (t) == POINTER_PLUS_EXPR);
7042 gcc_assert (TREE_OPERAND (t, 0) == var);
7043 t = build2 (TREE_CODE (t), TREE_TYPE (decl), decl,
7044 TREE_OPERAND (t, 1));
7045 gimplify_assign (decl, t,
7046 &OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c));
7047 }
7048 }
7049 }
7050
7051 BITMAP_FREE (has_decl_expr);
7052
7053 gimplify_and_add (OMP_FOR_BODY (orig_for_stmt), &for_body);
7054
7055 if (orig_for_stmt != for_stmt)
7056 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
7057 {
7058 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i);
7059 decl = TREE_OPERAND (t, 0);
7060 var = create_tmp_var (TREE_TYPE (decl), get_name (decl));
7061 omp_add_variable (gimplify_omp_ctxp, var, GOVD_PRIVATE | GOVD_SEEN);
7062 TREE_OPERAND (t, 0) = var;
7063 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
7064 TREE_OPERAND (t, 1) = copy_node (TREE_OPERAND (t, 1));
7065 TREE_OPERAND (TREE_OPERAND (t, 1), 0) = var;
7066 }
7067
7068 gimplify_adjust_omp_clauses (&OMP_FOR_CLAUSES (orig_for_stmt));
7069
7070 int kind;
7071 switch (TREE_CODE (orig_for_stmt))
7072 {
7073 case OMP_FOR: kind = GF_OMP_FOR_KIND_FOR; break;
7074 case OMP_SIMD: kind = GF_OMP_FOR_KIND_SIMD; break;
7075 case OMP_DISTRIBUTE: kind = GF_OMP_FOR_KIND_DISTRIBUTE; break;
7076 default:
7077 gcc_unreachable ();
7078 }
7079 gfor = gimple_build_omp_for (for_body, kind, OMP_FOR_CLAUSES (orig_for_stmt),
7080 TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)),
7081 for_pre_body);
7082 if (orig_for_stmt != for_stmt)
7083 gimple_omp_for_set_combined_p (gfor, true);
7084 if (gimplify_omp_ctxp
7085 && (gimplify_omp_ctxp->combined_loop
7086 || (gimplify_omp_ctxp->region_type == ORT_COMBINED_PARALLEL
7087 && gimplify_omp_ctxp->outer_context
7088 && gimplify_omp_ctxp->outer_context->combined_loop)))
7089 {
7090 gimple_omp_for_set_combined_into_p (gfor, true);
7091 if (gimplify_omp_ctxp->combined_loop)
7092 gcc_assert (TREE_CODE (orig_for_stmt) == OMP_SIMD);
7093 else
7094 gcc_assert (TREE_CODE (orig_for_stmt) == OMP_FOR);
7095 }
7096
7097 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
7098 {
7099 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i);
7100 gimple_omp_for_set_index (gfor, i, TREE_OPERAND (t, 0));
7101 gimple_omp_for_set_initial (gfor, i, TREE_OPERAND (t, 1));
7102 t = TREE_VEC_ELT (OMP_FOR_COND (for_stmt), i);
7103 gimple_omp_for_set_cond (gfor, i, TREE_CODE (t));
7104 gimple_omp_for_set_final (gfor, i, TREE_OPERAND (t, 1));
7105 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
7106 gimple_omp_for_set_incr (gfor, i, TREE_OPERAND (t, 1));
7107 }
7108
7109 gimplify_seq_add_stmt (pre_p, gfor);
7110 if (ret != GS_ALL_DONE)
7111 return GS_ERROR;
7112 *expr_p = NULL_TREE;
7113 return GS_ALL_DONE;
7114 }
7115
7116 /* Gimplify the gross structure of other OpenMP constructs.
7117 In particular, OMP_SECTIONS, OMP_SINGLE, OMP_TARGET, OMP_TARGET_DATA
7118 and OMP_TEAMS. */
7119
7120 static void
7121 gimplify_omp_workshare (tree *expr_p, gimple_seq *pre_p)
7122 {
7123 tree expr = *expr_p;
7124 gimple stmt;
7125 gimple_seq body = NULL;
7126 enum omp_region_type ort = ORT_WORKSHARE;
7127
7128 switch (TREE_CODE (expr))
7129 {
7130 case OMP_SECTIONS:
7131 case OMP_SINGLE:
7132 break;
7133 case OMP_TARGET:
7134 ort = ORT_TARGET;
7135 break;
7136 case OMP_TARGET_DATA:
7137 ort = ORT_TARGET_DATA;
7138 break;
7139 case OMP_TEAMS:
7140 ort = ORT_TEAMS;
7141 break;
7142 default:
7143 gcc_unreachable ();
7144 }
7145 gimplify_scan_omp_clauses (&OMP_CLAUSES (expr), pre_p, ort);
7146 if (ort == ORT_TARGET || ort == ORT_TARGET_DATA)
7147 {
7148 struct gimplify_ctx gctx;
7149 push_gimplify_context (&gctx);
7150 gimple g = gimplify_and_return_first (OMP_BODY (expr), &body);
7151 if (gimple_code (g) == GIMPLE_BIND)
7152 pop_gimplify_context (g);
7153 else
7154 pop_gimplify_context (NULL);
7155 if (ort == ORT_TARGET_DATA)
7156 {
7157 gimple_seq cleanup = NULL;
7158 tree fn = builtin_decl_explicit (BUILT_IN_GOMP_TARGET_END_DATA);
7159 g = gimple_build_call (fn, 0);
7160 gimple_seq_add_stmt (&cleanup, g);
7161 g = gimple_build_try (body, cleanup, GIMPLE_TRY_FINALLY);
7162 body = NULL;
7163 gimple_seq_add_stmt (&body, g);
7164 }
7165 }
7166 else
7167 gimplify_and_add (OMP_BODY (expr), &body);
7168 gimplify_adjust_omp_clauses (&OMP_CLAUSES (expr));
7169
7170 switch (TREE_CODE (expr))
7171 {
7172 case OMP_SECTIONS:
7173 stmt = gimple_build_omp_sections (body, OMP_CLAUSES (expr));
7174 break;
7175 case OMP_SINGLE:
7176 stmt = gimple_build_omp_single (body, OMP_CLAUSES (expr));
7177 break;
7178 case OMP_TARGET:
7179 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_REGION,
7180 OMP_CLAUSES (expr));
7181 break;
7182 case OMP_TARGET_DATA:
7183 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_DATA,
7184 OMP_CLAUSES (expr));
7185 break;
7186 case OMP_TEAMS:
7187 stmt = gimple_build_omp_teams (body, OMP_CLAUSES (expr));
7188 break;
7189 default:
7190 gcc_unreachable ();
7191 }
7192
7193 gimplify_seq_add_stmt (pre_p, stmt);
7194 *expr_p = NULL_TREE;
7195 }
7196
7197 /* Gimplify the gross structure of OpenMP target update construct. */
7198
7199 static void
7200 gimplify_omp_target_update (tree *expr_p, gimple_seq *pre_p)
7201 {
7202 tree expr = *expr_p;
7203 gimple stmt;
7204
7205 gimplify_scan_omp_clauses (&OMP_TARGET_UPDATE_CLAUSES (expr), pre_p,
7206 ORT_WORKSHARE);
7207 gimplify_adjust_omp_clauses (&OMP_TARGET_UPDATE_CLAUSES (expr));
7208 stmt = gimple_build_omp_target (NULL, GF_OMP_TARGET_KIND_UPDATE,
7209 OMP_TARGET_UPDATE_CLAUSES (expr));
7210
7211 gimplify_seq_add_stmt (pre_p, stmt);
7212 *expr_p = NULL_TREE;
7213 }
7214
7215 /* A subroutine of gimplify_omp_atomic. The front end is supposed to have
7216 stabilized the lhs of the atomic operation as *ADDR. Return true if
7217 EXPR is this stabilized form. */
7218
7219 static bool
7220 goa_lhs_expr_p (tree expr, tree addr)
7221 {
7222 /* Also include casts to other type variants. The C front end is fond
7223 of adding these for e.g. volatile variables. This is like
7224 STRIP_TYPE_NOPS but includes the main variant lookup. */
7225 STRIP_USELESS_TYPE_CONVERSION (expr);
7226
7227 if (TREE_CODE (expr) == INDIRECT_REF)
7228 {
7229 expr = TREE_OPERAND (expr, 0);
7230 while (expr != addr
7231 && (CONVERT_EXPR_P (expr)
7232 || TREE_CODE (expr) == NON_LVALUE_EXPR)
7233 && TREE_CODE (expr) == TREE_CODE (addr)
7234 && types_compatible_p (TREE_TYPE (expr), TREE_TYPE (addr)))
7235 {
7236 expr = TREE_OPERAND (expr, 0);
7237 addr = TREE_OPERAND (addr, 0);
7238 }
7239 if (expr == addr)
7240 return true;
7241 return (TREE_CODE (addr) == ADDR_EXPR
7242 && TREE_CODE (expr) == ADDR_EXPR
7243 && TREE_OPERAND (addr, 0) == TREE_OPERAND (expr, 0));
7244 }
7245 if (TREE_CODE (addr) == ADDR_EXPR && expr == TREE_OPERAND (addr, 0))
7246 return true;
7247 return false;
7248 }
7249
7250 /* Walk *EXPR_P and replace appearances of *LHS_ADDR with LHS_VAR. If an
7251 expression does not involve the lhs, evaluate it into a temporary.
7252 Return 1 if the lhs appeared as a subexpression, 0 if it did not,
7253 or -1 if an error was encountered. */
7254
7255 static int
7256 goa_stabilize_expr (tree *expr_p, gimple_seq *pre_p, tree lhs_addr,
7257 tree lhs_var)
7258 {
7259 tree expr = *expr_p;
7260 int saw_lhs;
7261
7262 if (goa_lhs_expr_p (expr, lhs_addr))
7263 {
7264 *expr_p = lhs_var;
7265 return 1;
7266 }
7267 if (is_gimple_val (expr))
7268 return 0;
7269
7270 saw_lhs = 0;
7271 switch (TREE_CODE_CLASS (TREE_CODE (expr)))
7272 {
7273 case tcc_binary:
7274 case tcc_comparison:
7275 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 1), pre_p, lhs_addr,
7276 lhs_var);
7277 case tcc_unary:
7278 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 0), pre_p, lhs_addr,
7279 lhs_var);
7280 break;
7281 case tcc_expression:
7282 switch (TREE_CODE (expr))
7283 {
7284 case TRUTH_ANDIF_EXPR:
7285 case TRUTH_ORIF_EXPR:
7286 case TRUTH_AND_EXPR:
7287 case TRUTH_OR_EXPR:
7288 case TRUTH_XOR_EXPR:
7289 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 1), pre_p,
7290 lhs_addr, lhs_var);
7291 case TRUTH_NOT_EXPR:
7292 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 0), pre_p,
7293 lhs_addr, lhs_var);
7294 break;
7295 case COMPOUND_EXPR:
7296 /* Break out any preevaluations from cp_build_modify_expr. */
7297 for (; TREE_CODE (expr) == COMPOUND_EXPR;
7298 expr = TREE_OPERAND (expr, 1))
7299 gimplify_stmt (&TREE_OPERAND (expr, 0), pre_p);
7300 *expr_p = expr;
7301 return goa_stabilize_expr (expr_p, pre_p, lhs_addr, lhs_var);
7302 default:
7303 break;
7304 }
7305 break;
7306 default:
7307 break;
7308 }
7309
7310 if (saw_lhs == 0)
7311 {
7312 enum gimplify_status gs;
7313 gs = gimplify_expr (expr_p, pre_p, NULL, is_gimple_val, fb_rvalue);
7314 if (gs != GS_ALL_DONE)
7315 saw_lhs = -1;
7316 }
7317
7318 return saw_lhs;
7319 }
7320
7321 /* Gimplify an OMP_ATOMIC statement. */
7322
7323 static enum gimplify_status
7324 gimplify_omp_atomic (tree *expr_p, gimple_seq *pre_p)
7325 {
7326 tree addr = TREE_OPERAND (*expr_p, 0);
7327 tree rhs = TREE_CODE (*expr_p) == OMP_ATOMIC_READ
7328 ? NULL : TREE_OPERAND (*expr_p, 1);
7329 tree type = TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (addr)));
7330 tree tmp_load;
7331 gimple loadstmt, storestmt;
7332
7333 tmp_load = create_tmp_reg (type, NULL);
7334 if (rhs && goa_stabilize_expr (&rhs, pre_p, addr, tmp_load) < 0)
7335 return GS_ERROR;
7336
7337 if (gimplify_expr (&addr, pre_p, NULL, is_gimple_val, fb_rvalue)
7338 != GS_ALL_DONE)
7339 return GS_ERROR;
7340
7341 loadstmt = gimple_build_omp_atomic_load (tmp_load, addr);
7342 gimplify_seq_add_stmt (pre_p, loadstmt);
7343 if (rhs && gimplify_expr (&rhs, pre_p, NULL, is_gimple_val, fb_rvalue)
7344 != GS_ALL_DONE)
7345 return GS_ERROR;
7346
7347 if (TREE_CODE (*expr_p) == OMP_ATOMIC_READ)
7348 rhs = tmp_load;
7349 storestmt = gimple_build_omp_atomic_store (rhs);
7350 gimplify_seq_add_stmt (pre_p, storestmt);
7351 if (OMP_ATOMIC_SEQ_CST (*expr_p))
7352 {
7353 gimple_omp_atomic_set_seq_cst (loadstmt);
7354 gimple_omp_atomic_set_seq_cst (storestmt);
7355 }
7356 switch (TREE_CODE (*expr_p))
7357 {
7358 case OMP_ATOMIC_READ:
7359 case OMP_ATOMIC_CAPTURE_OLD:
7360 *expr_p = tmp_load;
7361 gimple_omp_atomic_set_need_value (loadstmt);
7362 break;
7363 case OMP_ATOMIC_CAPTURE_NEW:
7364 *expr_p = rhs;
7365 gimple_omp_atomic_set_need_value (storestmt);
7366 break;
7367 default:
7368 *expr_p = NULL;
7369 break;
7370 }
7371
7372 return GS_ALL_DONE;
7373 }
7374
7375 /* Gimplify a TRANSACTION_EXPR. This involves gimplification of the
7376 body, and adding some EH bits. */
7377
7378 static enum gimplify_status
7379 gimplify_transaction (tree *expr_p, gimple_seq *pre_p)
7380 {
7381 tree expr = *expr_p, temp, tbody = TRANSACTION_EXPR_BODY (expr);
7382 gimple g;
7383 gimple_seq body = NULL;
7384 struct gimplify_ctx gctx;
7385 int subcode = 0;
7386
7387 /* Wrap the transaction body in a BIND_EXPR so we have a context
7388 where to put decls for OpenMP. */
7389 if (TREE_CODE (tbody) != BIND_EXPR)
7390 {
7391 tree bind = build3 (BIND_EXPR, void_type_node, NULL, tbody, NULL);
7392 TREE_SIDE_EFFECTS (bind) = 1;
7393 SET_EXPR_LOCATION (bind, EXPR_LOCATION (tbody));
7394 TRANSACTION_EXPR_BODY (expr) = bind;
7395 }
7396
7397 push_gimplify_context (&gctx);
7398 temp = voidify_wrapper_expr (*expr_p, NULL);
7399
7400 g = gimplify_and_return_first (TRANSACTION_EXPR_BODY (expr), &body);
7401 pop_gimplify_context (g);
7402
7403 g = gimple_build_transaction (body, NULL);
7404 if (TRANSACTION_EXPR_OUTER (expr))
7405 subcode = GTMA_IS_OUTER;
7406 else if (TRANSACTION_EXPR_RELAXED (expr))
7407 subcode = GTMA_IS_RELAXED;
7408 gimple_transaction_set_subcode (g, subcode);
7409
7410 gimplify_seq_add_stmt (pre_p, g);
7411
7412 if (temp)
7413 {
7414 *expr_p = temp;
7415 return GS_OK;
7416 }
7417
7418 *expr_p = NULL_TREE;
7419 return GS_ALL_DONE;
7420 }
7421
7422 /* Convert the GENERIC expression tree *EXPR_P to GIMPLE. If the
7423 expression produces a value to be used as an operand inside a GIMPLE
7424 statement, the value will be stored back in *EXPR_P. This value will
7425 be a tree of class tcc_declaration, tcc_constant, tcc_reference or
7426 an SSA_NAME. The corresponding sequence of GIMPLE statements is
7427 emitted in PRE_P and POST_P.
7428
7429 Additionally, this process may overwrite parts of the input
7430 expression during gimplification. Ideally, it should be
7431 possible to do non-destructive gimplification.
7432
7433 EXPR_P points to the GENERIC expression to convert to GIMPLE. If
7434 the expression needs to evaluate to a value to be used as
7435 an operand in a GIMPLE statement, this value will be stored in
7436 *EXPR_P on exit. This happens when the caller specifies one
7437 of fb_lvalue or fb_rvalue fallback flags.
7438
7439 PRE_P will contain the sequence of GIMPLE statements corresponding
7440 to the evaluation of EXPR and all the side-effects that must
7441 be executed before the main expression. On exit, the last
7442 statement of PRE_P is the core statement being gimplified. For
7443 instance, when gimplifying 'if (++a)' the last statement in
7444 PRE_P will be 'if (t.1)' where t.1 is the result of
7445 pre-incrementing 'a'.
7446
7447 POST_P will contain the sequence of GIMPLE statements corresponding
7448 to the evaluation of all the side-effects that must be executed
7449 after the main expression. If this is NULL, the post
7450 side-effects are stored at the end of PRE_P.
7451
7452 The reason why the output is split in two is to handle post
7453 side-effects explicitly. In some cases, an expression may have
7454 inner and outer post side-effects which need to be emitted in
7455 an order different from the one given by the recursive
7456 traversal. For instance, for the expression (*p--)++ the post
7457 side-effects of '--' must actually occur *after* the post
7458 side-effects of '++'. However, gimplification will first visit
7459 the inner expression, so if a separate POST sequence was not
7460 used, the resulting sequence would be:
7461
7462 1 t.1 = *p
7463 2 p = p - 1
7464 3 t.2 = t.1 + 1
7465 4 *p = t.2
7466
7467 However, the post-decrement operation in line #2 must not be
7468 evaluated until after the store to *p at line #4, so the
7469 correct sequence should be:
7470
7471 1 t.1 = *p
7472 2 t.2 = t.1 + 1
7473 3 *p = t.2
7474 4 p = p - 1
7475
7476 So, by specifying a separate post queue, it is possible
7477 to emit the post side-effects in the correct order.
7478 If POST_P is NULL, an internal queue will be used. Before
7479 returning to the caller, the sequence POST_P is appended to
7480 the main output sequence PRE_P.
7481
7482 GIMPLE_TEST_F points to a function that takes a tree T and
7483 returns nonzero if T is in the GIMPLE form requested by the
7484 caller. The GIMPLE predicates are in gimple.c.
7485
7486 FALLBACK tells the function what sort of a temporary we want if
7487 gimplification cannot produce an expression that complies with
7488 GIMPLE_TEST_F.
7489
7490 fb_none means that no temporary should be generated
7491 fb_rvalue means that an rvalue is OK to generate
7492 fb_lvalue means that an lvalue is OK to generate
7493 fb_either means that either is OK, but an lvalue is preferable.
7494 fb_mayfail means that gimplification may fail (in which case
7495 GS_ERROR will be returned)
7496
7497 The return value is either GS_ERROR or GS_ALL_DONE, since this
7498 function iterates until EXPR is completely gimplified or an error
7499 occurs. */
7500
7501 enum gimplify_status
7502 gimplify_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
7503 bool (*gimple_test_f) (tree), fallback_t fallback)
7504 {
7505 tree tmp;
7506 gimple_seq internal_pre = NULL;
7507 gimple_seq internal_post = NULL;
7508 tree save_expr;
7509 bool is_statement;
7510 location_t saved_location;
7511 enum gimplify_status ret;
7512 gimple_stmt_iterator pre_last_gsi, post_last_gsi;
7513
7514 save_expr = *expr_p;
7515 if (save_expr == NULL_TREE)
7516 return GS_ALL_DONE;
7517
7518 /* If we are gimplifying a top-level statement, PRE_P must be valid. */
7519 is_statement = gimple_test_f == is_gimple_stmt;
7520 if (is_statement)
7521 gcc_assert (pre_p);
7522
7523 /* Consistency checks. */
7524 if (gimple_test_f == is_gimple_reg)
7525 gcc_assert (fallback & (fb_rvalue | fb_lvalue));
7526 else if (gimple_test_f == is_gimple_val
7527 || gimple_test_f == is_gimple_call_addr
7528 || gimple_test_f == is_gimple_condexpr
7529 || gimple_test_f == is_gimple_mem_rhs
7530 || gimple_test_f == is_gimple_mem_rhs_or_call
7531 || gimple_test_f == is_gimple_reg_rhs
7532 || gimple_test_f == is_gimple_reg_rhs_or_call
7533 || gimple_test_f == is_gimple_asm_val
7534 || gimple_test_f == is_gimple_mem_ref_addr)
7535 gcc_assert (fallback & fb_rvalue);
7536 else if (gimple_test_f == is_gimple_min_lval
7537 || gimple_test_f == is_gimple_lvalue)
7538 gcc_assert (fallback & fb_lvalue);
7539 else if (gimple_test_f == is_gimple_addressable)
7540 gcc_assert (fallback & fb_either);
7541 else if (gimple_test_f == is_gimple_stmt)
7542 gcc_assert (fallback == fb_none);
7543 else
7544 {
7545 /* We should have recognized the GIMPLE_TEST_F predicate to
7546 know what kind of fallback to use in case a temporary is
7547 needed to hold the value or address of *EXPR_P. */
7548 gcc_unreachable ();
7549 }
7550
7551 /* We used to check the predicate here and return immediately if it
7552 succeeds. This is wrong; the design is for gimplification to be
7553 idempotent, and for the predicates to only test for valid forms, not
7554 whether they are fully simplified. */
7555 if (pre_p == NULL)
7556 pre_p = &internal_pre;
7557
7558 if (post_p == NULL)
7559 post_p = &internal_post;
7560
7561 /* Remember the last statements added to PRE_P and POST_P. Every
7562 new statement added by the gimplification helpers needs to be
7563 annotated with location information. To centralize the
7564 responsibility, we remember the last statement that had been
7565 added to both queues before gimplifying *EXPR_P. If
7566 gimplification produces new statements in PRE_P and POST_P, those
7567 statements will be annotated with the same location information
7568 as *EXPR_P. */
7569 pre_last_gsi = gsi_last (*pre_p);
7570 post_last_gsi = gsi_last (*post_p);
7571
7572 saved_location = input_location;
7573 if (save_expr != error_mark_node
7574 && EXPR_HAS_LOCATION (*expr_p))
7575 input_location = EXPR_LOCATION (*expr_p);
7576
7577 /* Loop over the specific gimplifiers until the toplevel node
7578 remains the same. */
7579 do
7580 {
7581 /* Strip away as many useless type conversions as possible
7582 at the toplevel. */
7583 STRIP_USELESS_TYPE_CONVERSION (*expr_p);
7584
7585 /* Remember the expr. */
7586 save_expr = *expr_p;
7587
7588 /* Die, die, die, my darling. */
7589 if (save_expr == error_mark_node
7590 || (TREE_TYPE (save_expr)
7591 && TREE_TYPE (save_expr) == error_mark_node))
7592 {
7593 ret = GS_ERROR;
7594 break;
7595 }
7596
7597 /* Do any language-specific gimplification. */
7598 ret = ((enum gimplify_status)
7599 lang_hooks.gimplify_expr (expr_p, pre_p, post_p));
7600 if (ret == GS_OK)
7601 {
7602 if (*expr_p == NULL_TREE)
7603 break;
7604 if (*expr_p != save_expr)
7605 continue;
7606 }
7607 else if (ret != GS_UNHANDLED)
7608 break;
7609
7610 /* Make sure that all the cases set 'ret' appropriately. */
7611 ret = GS_UNHANDLED;
7612 switch (TREE_CODE (*expr_p))
7613 {
7614 /* First deal with the special cases. */
7615
7616 case POSTINCREMENT_EXPR:
7617 case POSTDECREMENT_EXPR:
7618 case PREINCREMENT_EXPR:
7619 case PREDECREMENT_EXPR:
7620 ret = gimplify_self_mod_expr (expr_p, pre_p, post_p,
7621 fallback != fb_none,
7622 TREE_TYPE (*expr_p));
7623 break;
7624
7625 case ARRAY_REF:
7626 case ARRAY_RANGE_REF:
7627 case REALPART_EXPR:
7628 case IMAGPART_EXPR:
7629 case COMPONENT_REF:
7630 case VIEW_CONVERT_EXPR:
7631 ret = gimplify_compound_lval (expr_p, pre_p, post_p,
7632 fallback ? fallback : fb_rvalue);
7633 break;
7634
7635 case COND_EXPR:
7636 ret = gimplify_cond_expr (expr_p, pre_p, fallback);
7637
7638 /* C99 code may assign to an array in a structure value of a
7639 conditional expression, and this has undefined behavior
7640 only on execution, so create a temporary if an lvalue is
7641 required. */
7642 if (fallback == fb_lvalue)
7643 {
7644 *expr_p = get_initialized_tmp_var (*expr_p, pre_p, post_p);
7645 mark_addressable (*expr_p);
7646 ret = GS_OK;
7647 }
7648 break;
7649
7650 case CALL_EXPR:
7651 ret = gimplify_call_expr (expr_p, pre_p, fallback != fb_none);
7652
7653 /* C99 code may assign to an array in a structure returned
7654 from a function, and this has undefined behavior only on
7655 execution, so create a temporary if an lvalue is
7656 required. */
7657 if (fallback == fb_lvalue)
7658 {
7659 *expr_p = get_initialized_tmp_var (*expr_p, pre_p, post_p);
7660 mark_addressable (*expr_p);
7661 ret = GS_OK;
7662 }
7663 break;
7664
7665 case TREE_LIST:
7666 gcc_unreachable ();
7667
7668 case COMPOUND_EXPR:
7669 ret = gimplify_compound_expr (expr_p, pre_p, fallback != fb_none);
7670 break;
7671
7672 case COMPOUND_LITERAL_EXPR:
7673 ret = gimplify_compound_literal_expr (expr_p, pre_p,
7674 gimple_test_f, fallback);
7675 break;
7676
7677 case MODIFY_EXPR:
7678 case INIT_EXPR:
7679 ret = gimplify_modify_expr (expr_p, pre_p, post_p,
7680 fallback != fb_none);
7681 break;
7682
7683 case TRUTH_ANDIF_EXPR:
7684 case TRUTH_ORIF_EXPR:
7685 {
7686 /* Preserve the original type of the expression and the
7687 source location of the outer expression. */
7688 tree org_type = TREE_TYPE (*expr_p);
7689 *expr_p = gimple_boolify (*expr_p);
7690 *expr_p = build3_loc (input_location, COND_EXPR,
7691 org_type, *expr_p,
7692 fold_convert_loc
7693 (input_location,
7694 org_type, boolean_true_node),
7695 fold_convert_loc
7696 (input_location,
7697 org_type, boolean_false_node));
7698 ret = GS_OK;
7699 break;
7700 }
7701
7702 case TRUTH_NOT_EXPR:
7703 {
7704 tree type = TREE_TYPE (*expr_p);
7705 /* The parsers are careful to generate TRUTH_NOT_EXPR
7706 only with operands that are always zero or one.
7707 We do not fold here but handle the only interesting case
7708 manually, as fold may re-introduce the TRUTH_NOT_EXPR. */
7709 *expr_p = gimple_boolify (*expr_p);
7710 if (TYPE_PRECISION (TREE_TYPE (*expr_p)) == 1)
7711 *expr_p = build1_loc (input_location, BIT_NOT_EXPR,
7712 TREE_TYPE (*expr_p),
7713 TREE_OPERAND (*expr_p, 0));
7714 else
7715 *expr_p = build2_loc (input_location, BIT_XOR_EXPR,
7716 TREE_TYPE (*expr_p),
7717 TREE_OPERAND (*expr_p, 0),
7718 build_int_cst (TREE_TYPE (*expr_p), 1));
7719 if (!useless_type_conversion_p (type, TREE_TYPE (*expr_p)))
7720 *expr_p = fold_convert_loc (input_location, type, *expr_p);
7721 ret = GS_OK;
7722 break;
7723 }
7724
7725 case ADDR_EXPR:
7726 ret = gimplify_addr_expr (expr_p, pre_p, post_p);
7727 break;
7728
7729 case VA_ARG_EXPR:
7730 ret = gimplify_va_arg_expr (expr_p, pre_p, post_p);
7731 break;
7732
7733 CASE_CONVERT:
7734 if (IS_EMPTY_STMT (*expr_p))
7735 {
7736 ret = GS_ALL_DONE;
7737 break;
7738 }
7739
7740 if (VOID_TYPE_P (TREE_TYPE (*expr_p))
7741 || fallback == fb_none)
7742 {
7743 /* Just strip a conversion to void (or in void context) and
7744 try again. */
7745 *expr_p = TREE_OPERAND (*expr_p, 0);
7746 ret = GS_OK;
7747 break;
7748 }
7749
7750 ret = gimplify_conversion (expr_p);
7751 if (ret == GS_ERROR)
7752 break;
7753 if (*expr_p != save_expr)
7754 break;
7755 /* FALLTHRU */
7756
7757 case FIX_TRUNC_EXPR:
7758 /* unary_expr: ... | '(' cast ')' val | ... */
7759 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
7760 is_gimple_val, fb_rvalue);
7761 recalculate_side_effects (*expr_p);
7762 break;
7763
7764 case INDIRECT_REF:
7765 {
7766 bool volatilep = TREE_THIS_VOLATILE (*expr_p);
7767 bool notrap = TREE_THIS_NOTRAP (*expr_p);
7768 tree saved_ptr_type = TREE_TYPE (TREE_OPERAND (*expr_p, 0));
7769
7770 *expr_p = fold_indirect_ref_loc (input_location, *expr_p);
7771 if (*expr_p != save_expr)
7772 {
7773 ret = GS_OK;
7774 break;
7775 }
7776
7777 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
7778 is_gimple_reg, fb_rvalue);
7779 if (ret == GS_ERROR)
7780 break;
7781
7782 recalculate_side_effects (*expr_p);
7783 *expr_p = fold_build2_loc (input_location, MEM_REF,
7784 TREE_TYPE (*expr_p),
7785 TREE_OPERAND (*expr_p, 0),
7786 build_int_cst (saved_ptr_type, 0));
7787 TREE_THIS_VOLATILE (*expr_p) = volatilep;
7788 TREE_THIS_NOTRAP (*expr_p) = notrap;
7789 ret = GS_OK;
7790 break;
7791 }
7792
7793 /* We arrive here through the various re-gimplifcation paths. */
7794 case MEM_REF:
7795 /* First try re-folding the whole thing. */
7796 tmp = fold_binary (MEM_REF, TREE_TYPE (*expr_p),
7797 TREE_OPERAND (*expr_p, 0),
7798 TREE_OPERAND (*expr_p, 1));
7799 if (tmp)
7800 {
7801 *expr_p = tmp;
7802 recalculate_side_effects (*expr_p);
7803 ret = GS_OK;
7804 break;
7805 }
7806 /* Avoid re-gimplifying the address operand if it is already
7807 in suitable form. Re-gimplifying would mark the address
7808 operand addressable. Always gimplify when not in SSA form
7809 as we still may have to gimplify decls with value-exprs. */
7810 if (!gimplify_ctxp || !gimplify_ctxp->into_ssa
7811 || !is_gimple_mem_ref_addr (TREE_OPERAND (*expr_p, 0)))
7812 {
7813 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
7814 is_gimple_mem_ref_addr, fb_rvalue);
7815 if (ret == GS_ERROR)
7816 break;
7817 }
7818 recalculate_side_effects (*expr_p);
7819 ret = GS_ALL_DONE;
7820 break;
7821
7822 /* Constants need not be gimplified. */
7823 case INTEGER_CST:
7824 case REAL_CST:
7825 case FIXED_CST:
7826 case STRING_CST:
7827 case COMPLEX_CST:
7828 case VECTOR_CST:
7829 ret = GS_ALL_DONE;
7830 break;
7831
7832 case CONST_DECL:
7833 /* If we require an lvalue, such as for ADDR_EXPR, retain the
7834 CONST_DECL node. Otherwise the decl is replaceable by its
7835 value. */
7836 /* ??? Should be == fb_lvalue, but ADDR_EXPR passes fb_either. */
7837 if (fallback & fb_lvalue)
7838 ret = GS_ALL_DONE;
7839 else
7840 {
7841 *expr_p = DECL_INITIAL (*expr_p);
7842 ret = GS_OK;
7843 }
7844 break;
7845
7846 case DECL_EXPR:
7847 ret = gimplify_decl_expr (expr_p, pre_p);
7848 break;
7849
7850 case BIND_EXPR:
7851 ret = gimplify_bind_expr (expr_p, pre_p);
7852 break;
7853
7854 case LOOP_EXPR:
7855 ret = gimplify_loop_expr (expr_p, pre_p);
7856 break;
7857
7858 case SWITCH_EXPR:
7859 ret = gimplify_switch_expr (expr_p, pre_p);
7860 break;
7861
7862 case EXIT_EXPR:
7863 ret = gimplify_exit_expr (expr_p);
7864 break;
7865
7866 case GOTO_EXPR:
7867 /* If the target is not LABEL, then it is a computed jump
7868 and the target needs to be gimplified. */
7869 if (TREE_CODE (GOTO_DESTINATION (*expr_p)) != LABEL_DECL)
7870 {
7871 ret = gimplify_expr (&GOTO_DESTINATION (*expr_p), pre_p,
7872 NULL, is_gimple_val, fb_rvalue);
7873 if (ret == GS_ERROR)
7874 break;
7875 }
7876 gimplify_seq_add_stmt (pre_p,
7877 gimple_build_goto (GOTO_DESTINATION (*expr_p)));
7878 ret = GS_ALL_DONE;
7879 break;
7880
7881 case PREDICT_EXPR:
7882 gimplify_seq_add_stmt (pre_p,
7883 gimple_build_predict (PREDICT_EXPR_PREDICTOR (*expr_p),
7884 PREDICT_EXPR_OUTCOME (*expr_p)));
7885 ret = GS_ALL_DONE;
7886 break;
7887
7888 case LABEL_EXPR:
7889 ret = GS_ALL_DONE;
7890 gcc_assert (decl_function_context (LABEL_EXPR_LABEL (*expr_p))
7891 == current_function_decl);
7892 gimplify_seq_add_stmt (pre_p,
7893 gimple_build_label (LABEL_EXPR_LABEL (*expr_p)));
7894 break;
7895
7896 case CASE_LABEL_EXPR:
7897 ret = gimplify_case_label_expr (expr_p, pre_p);
7898 break;
7899
7900 case RETURN_EXPR:
7901 ret = gimplify_return_expr (*expr_p, pre_p);
7902 break;
7903
7904 case CONSTRUCTOR:
7905 /* Don't reduce this in place; let gimplify_init_constructor work its
7906 magic. Buf if we're just elaborating this for side effects, just
7907 gimplify any element that has side-effects. */
7908 if (fallback == fb_none)
7909 {
7910 unsigned HOST_WIDE_INT ix;
7911 tree val;
7912 tree temp = NULL_TREE;
7913 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (*expr_p), ix, val)
7914 if (TREE_SIDE_EFFECTS (val))
7915 append_to_statement_list (val, &temp);
7916
7917 *expr_p = temp;
7918 ret = temp ? GS_OK : GS_ALL_DONE;
7919 }
7920 /* C99 code may assign to an array in a constructed
7921 structure or union, and this has undefined behavior only
7922 on execution, so create a temporary if an lvalue is
7923 required. */
7924 else if (fallback == fb_lvalue)
7925 {
7926 *expr_p = get_initialized_tmp_var (*expr_p, pre_p, post_p);
7927 mark_addressable (*expr_p);
7928 ret = GS_OK;
7929 }
7930 else
7931 ret = GS_ALL_DONE;
7932 break;
7933
7934 /* The following are special cases that are not handled by the
7935 original GIMPLE grammar. */
7936
7937 /* SAVE_EXPR nodes are converted into a GIMPLE identifier and
7938 eliminated. */
7939 case SAVE_EXPR:
7940 ret = gimplify_save_expr (expr_p, pre_p, post_p);
7941 break;
7942
7943 case BIT_FIELD_REF:
7944 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
7945 post_p, is_gimple_lvalue, fb_either);
7946 recalculate_side_effects (*expr_p);
7947 break;
7948
7949 case TARGET_MEM_REF:
7950 {
7951 enum gimplify_status r0 = GS_ALL_DONE, r1 = GS_ALL_DONE;
7952
7953 if (TMR_BASE (*expr_p))
7954 r0 = gimplify_expr (&TMR_BASE (*expr_p), pre_p,
7955 post_p, is_gimple_mem_ref_addr, fb_either);
7956 if (TMR_INDEX (*expr_p))
7957 r1 = gimplify_expr (&TMR_INDEX (*expr_p), pre_p,
7958 post_p, is_gimple_val, fb_rvalue);
7959 if (TMR_INDEX2 (*expr_p))
7960 r1 = gimplify_expr (&TMR_INDEX2 (*expr_p), pre_p,
7961 post_p, is_gimple_val, fb_rvalue);
7962 /* TMR_STEP and TMR_OFFSET are always integer constants. */
7963 ret = MIN (r0, r1);
7964 }
7965 break;
7966
7967 case NON_LVALUE_EXPR:
7968 /* This should have been stripped above. */
7969 gcc_unreachable ();
7970
7971 case ASM_EXPR:
7972 ret = gimplify_asm_expr (expr_p, pre_p, post_p);
7973 break;
7974
7975 case TRY_FINALLY_EXPR:
7976 case TRY_CATCH_EXPR:
7977 {
7978 gimple_seq eval, cleanup;
7979 gimple try_;
7980
7981 /* Calls to destructors are generated automatically in FINALLY/CATCH
7982 block. They should have location as UNKNOWN_LOCATION. However,
7983 gimplify_call_expr will reset these call stmts to input_location
7984 if it finds stmt's location is unknown. To prevent resetting for
7985 destructors, we set the input_location to unknown.
7986 Note that this only affects the destructor calls in FINALLY/CATCH
7987 block, and will automatically reset to its original value by the
7988 end of gimplify_expr. */
7989 input_location = UNKNOWN_LOCATION;
7990 eval = cleanup = NULL;
7991 gimplify_and_add (TREE_OPERAND (*expr_p, 0), &eval);
7992 gimplify_and_add (TREE_OPERAND (*expr_p, 1), &cleanup);
7993 /* Don't create bogus GIMPLE_TRY with empty cleanup. */
7994 if (gimple_seq_empty_p (cleanup))
7995 {
7996 gimple_seq_add_seq (pre_p, eval);
7997 ret = GS_ALL_DONE;
7998 break;
7999 }
8000 try_ = gimple_build_try (eval, cleanup,
8001 TREE_CODE (*expr_p) == TRY_FINALLY_EXPR
8002 ? GIMPLE_TRY_FINALLY
8003 : GIMPLE_TRY_CATCH);
8004 if (LOCATION_LOCUS (saved_location) != UNKNOWN_LOCATION)
8005 gimple_set_location (try_, saved_location);
8006 else
8007 gimple_set_location (try_, EXPR_LOCATION (save_expr));
8008 if (TREE_CODE (*expr_p) == TRY_CATCH_EXPR)
8009 gimple_try_set_catch_is_cleanup (try_,
8010 TRY_CATCH_IS_CLEANUP (*expr_p));
8011 gimplify_seq_add_stmt (pre_p, try_);
8012 ret = GS_ALL_DONE;
8013 break;
8014 }
8015
8016 case CLEANUP_POINT_EXPR:
8017 ret = gimplify_cleanup_point_expr (expr_p, pre_p);
8018 break;
8019
8020 case TARGET_EXPR:
8021 ret = gimplify_target_expr (expr_p, pre_p, post_p);
8022 break;
8023
8024 case CATCH_EXPR:
8025 {
8026 gimple c;
8027 gimple_seq handler = NULL;
8028 gimplify_and_add (CATCH_BODY (*expr_p), &handler);
8029 c = gimple_build_catch (CATCH_TYPES (*expr_p), handler);
8030 gimplify_seq_add_stmt (pre_p, c);
8031 ret = GS_ALL_DONE;
8032 break;
8033 }
8034
8035 case EH_FILTER_EXPR:
8036 {
8037 gimple ehf;
8038 gimple_seq failure = NULL;
8039
8040 gimplify_and_add (EH_FILTER_FAILURE (*expr_p), &failure);
8041 ehf = gimple_build_eh_filter (EH_FILTER_TYPES (*expr_p), failure);
8042 gimple_set_no_warning (ehf, TREE_NO_WARNING (*expr_p));
8043 gimplify_seq_add_stmt (pre_p, ehf);
8044 ret = GS_ALL_DONE;
8045 break;
8046 }
8047
8048 case OBJ_TYPE_REF:
8049 {
8050 enum gimplify_status r0, r1;
8051 r0 = gimplify_expr (&OBJ_TYPE_REF_OBJECT (*expr_p), pre_p,
8052 post_p, is_gimple_val, fb_rvalue);
8053 r1 = gimplify_expr (&OBJ_TYPE_REF_EXPR (*expr_p), pre_p,
8054 post_p, is_gimple_val, fb_rvalue);
8055 TREE_SIDE_EFFECTS (*expr_p) = 0;
8056 ret = MIN (r0, r1);
8057 }
8058 break;
8059
8060 case LABEL_DECL:
8061 /* We get here when taking the address of a label. We mark
8062 the label as "forced"; meaning it can never be removed and
8063 it is a potential target for any computed goto. */
8064 FORCED_LABEL (*expr_p) = 1;
8065 ret = GS_ALL_DONE;
8066 break;
8067
8068 case STATEMENT_LIST:
8069 ret = gimplify_statement_list (expr_p, pre_p);
8070 break;
8071
8072 case WITH_SIZE_EXPR:
8073 {
8074 gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
8075 post_p == &internal_post ? NULL : post_p,
8076 gimple_test_f, fallback);
8077 gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p, post_p,
8078 is_gimple_val, fb_rvalue);
8079 ret = GS_ALL_DONE;
8080 }
8081 break;
8082
8083 case VAR_DECL:
8084 case PARM_DECL:
8085 ret = gimplify_var_or_parm_decl (expr_p);
8086 break;
8087
8088 case RESULT_DECL:
8089 /* When within an OpenMP context, notice uses of variables. */
8090 if (gimplify_omp_ctxp)
8091 omp_notice_variable (gimplify_omp_ctxp, *expr_p, true);
8092 ret = GS_ALL_DONE;
8093 break;
8094
8095 case SSA_NAME:
8096 /* Allow callbacks into the gimplifier during optimization. */
8097 ret = GS_ALL_DONE;
8098 break;
8099
8100 case OMP_PARALLEL:
8101 gimplify_omp_parallel (expr_p, pre_p);
8102 ret = GS_ALL_DONE;
8103 break;
8104
8105 case OMP_TASK:
8106 gimplify_omp_task (expr_p, pre_p);
8107 ret = GS_ALL_DONE;
8108 break;
8109
8110 case OMP_FOR:
8111 case OMP_SIMD:
8112 case OMP_DISTRIBUTE:
8113 ret = gimplify_omp_for (expr_p, pre_p);
8114 break;
8115
8116 case OMP_SECTIONS:
8117 case OMP_SINGLE:
8118 case OMP_TARGET:
8119 case OMP_TARGET_DATA:
8120 case OMP_TEAMS:
8121 gimplify_omp_workshare (expr_p, pre_p);
8122 ret = GS_ALL_DONE;
8123 break;
8124
8125 case OMP_TARGET_UPDATE:
8126 gimplify_omp_target_update (expr_p, pre_p);
8127 ret = GS_ALL_DONE;
8128 break;
8129
8130 case OMP_SECTION:
8131 case OMP_MASTER:
8132 case OMP_TASKGROUP:
8133 case OMP_ORDERED:
8134 case OMP_CRITICAL:
8135 {
8136 gimple_seq body = NULL;
8137 gimple g;
8138
8139 gimplify_and_add (OMP_BODY (*expr_p), &body);
8140 switch (TREE_CODE (*expr_p))
8141 {
8142 case OMP_SECTION:
8143 g = gimple_build_omp_section (body);
8144 break;
8145 case OMP_MASTER:
8146 g = gimple_build_omp_master (body);
8147 break;
8148 case OMP_TASKGROUP:
8149 {
8150 gimple_seq cleanup = NULL;
8151 tree fn
8152 = builtin_decl_explicit (BUILT_IN_GOMP_TASKGROUP_END);
8153 g = gimple_build_call (fn, 0);
8154 gimple_seq_add_stmt (&cleanup, g);
8155 g = gimple_build_try (body, cleanup, GIMPLE_TRY_FINALLY);
8156 body = NULL;
8157 gimple_seq_add_stmt (&body, g);
8158 g = gimple_build_omp_taskgroup (body);
8159 }
8160 break;
8161 case OMP_ORDERED:
8162 g = gimple_build_omp_ordered (body);
8163 break;
8164 case OMP_CRITICAL:
8165 g = gimple_build_omp_critical (body,
8166 OMP_CRITICAL_NAME (*expr_p));
8167 break;
8168 default:
8169 gcc_unreachable ();
8170 }
8171 gimplify_seq_add_stmt (pre_p, g);
8172 ret = GS_ALL_DONE;
8173 break;
8174 }
8175
8176 case OMP_ATOMIC:
8177 case OMP_ATOMIC_READ:
8178 case OMP_ATOMIC_CAPTURE_OLD:
8179 case OMP_ATOMIC_CAPTURE_NEW:
8180 ret = gimplify_omp_atomic (expr_p, pre_p);
8181 break;
8182
8183 case TRANSACTION_EXPR:
8184 ret = gimplify_transaction (expr_p, pre_p);
8185 break;
8186
8187 case TRUTH_AND_EXPR:
8188 case TRUTH_OR_EXPR:
8189 case TRUTH_XOR_EXPR:
8190 {
8191 tree orig_type = TREE_TYPE (*expr_p);
8192 tree new_type, xop0, xop1;
8193 *expr_p = gimple_boolify (*expr_p);
8194 new_type = TREE_TYPE (*expr_p);
8195 if (!useless_type_conversion_p (orig_type, new_type))
8196 {
8197 *expr_p = fold_convert_loc (input_location, orig_type, *expr_p);
8198 ret = GS_OK;
8199 break;
8200 }
8201
8202 /* Boolified binary truth expressions are semantically equivalent
8203 to bitwise binary expressions. Canonicalize them to the
8204 bitwise variant. */
8205 switch (TREE_CODE (*expr_p))
8206 {
8207 case TRUTH_AND_EXPR:
8208 TREE_SET_CODE (*expr_p, BIT_AND_EXPR);
8209 break;
8210 case TRUTH_OR_EXPR:
8211 TREE_SET_CODE (*expr_p, BIT_IOR_EXPR);
8212 break;
8213 case TRUTH_XOR_EXPR:
8214 TREE_SET_CODE (*expr_p, BIT_XOR_EXPR);
8215 break;
8216 default:
8217 break;
8218 }
8219 /* Now make sure that operands have compatible type to
8220 expression's new_type. */
8221 xop0 = TREE_OPERAND (*expr_p, 0);
8222 xop1 = TREE_OPERAND (*expr_p, 1);
8223 if (!useless_type_conversion_p (new_type, TREE_TYPE (xop0)))
8224 TREE_OPERAND (*expr_p, 0) = fold_convert_loc (input_location,
8225 new_type,
8226 xop0);
8227 if (!useless_type_conversion_p (new_type, TREE_TYPE (xop1)))
8228 TREE_OPERAND (*expr_p, 1) = fold_convert_loc (input_location,
8229 new_type,
8230 xop1);
8231 /* Continue classified as tcc_binary. */
8232 goto expr_2;
8233 }
8234
8235 case FMA_EXPR:
8236 case VEC_COND_EXPR:
8237 case VEC_PERM_EXPR:
8238 /* Classified as tcc_expression. */
8239 goto expr_3;
8240
8241 case POINTER_PLUS_EXPR:
8242 {
8243 enum gimplify_status r0, r1;
8244 r0 = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
8245 post_p, is_gimple_val, fb_rvalue);
8246 r1 = gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p,
8247 post_p, is_gimple_val, fb_rvalue);
8248 recalculate_side_effects (*expr_p);
8249 ret = MIN (r0, r1);
8250 /* Convert &X + CST to invariant &MEM[&X, CST]. Do this
8251 after gimplifying operands - this is similar to how
8252 it would be folding all gimplified stmts on creation
8253 to have them canonicalized, which is what we eventually
8254 should do anyway. */
8255 if (TREE_CODE (TREE_OPERAND (*expr_p, 1)) == INTEGER_CST
8256 && is_gimple_min_invariant (TREE_OPERAND (*expr_p, 0)))
8257 {
8258 *expr_p = build_fold_addr_expr_with_type_loc
8259 (input_location,
8260 fold_build2 (MEM_REF, TREE_TYPE (TREE_TYPE (*expr_p)),
8261 TREE_OPERAND (*expr_p, 0),
8262 fold_convert (ptr_type_node,
8263 TREE_OPERAND (*expr_p, 1))),
8264 TREE_TYPE (*expr_p));
8265 ret = MIN (ret, GS_OK);
8266 }
8267 break;
8268 }
8269
8270 default:
8271 switch (TREE_CODE_CLASS (TREE_CODE (*expr_p)))
8272 {
8273 case tcc_comparison:
8274 /* Handle comparison of objects of non scalar mode aggregates
8275 with a call to memcmp. It would be nice to only have to do
8276 this for variable-sized objects, but then we'd have to allow
8277 the same nest of reference nodes we allow for MODIFY_EXPR and
8278 that's too complex.
8279
8280 Compare scalar mode aggregates as scalar mode values. Using
8281 memcmp for them would be very inefficient at best, and is
8282 plain wrong if bitfields are involved. */
8283 {
8284 tree type = TREE_TYPE (TREE_OPERAND (*expr_p, 1));
8285
8286 /* Vector comparisons need no boolification. */
8287 if (TREE_CODE (type) == VECTOR_TYPE)
8288 goto expr_2;
8289 else if (!AGGREGATE_TYPE_P (type))
8290 {
8291 tree org_type = TREE_TYPE (*expr_p);
8292 *expr_p = gimple_boolify (*expr_p);
8293 if (!useless_type_conversion_p (org_type,
8294 TREE_TYPE (*expr_p)))
8295 {
8296 *expr_p = fold_convert_loc (input_location,
8297 org_type, *expr_p);
8298 ret = GS_OK;
8299 }
8300 else
8301 goto expr_2;
8302 }
8303 else if (TYPE_MODE (type) != BLKmode)
8304 ret = gimplify_scalar_mode_aggregate_compare (expr_p);
8305 else
8306 ret = gimplify_variable_sized_compare (expr_p);
8307
8308 break;
8309 }
8310
8311 /* If *EXPR_P does not need to be special-cased, handle it
8312 according to its class. */
8313 case tcc_unary:
8314 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
8315 post_p, is_gimple_val, fb_rvalue);
8316 break;
8317
8318 case tcc_binary:
8319 expr_2:
8320 {
8321 enum gimplify_status r0, r1;
8322
8323 r0 = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
8324 post_p, is_gimple_val, fb_rvalue);
8325 r1 = gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p,
8326 post_p, is_gimple_val, fb_rvalue);
8327
8328 ret = MIN (r0, r1);
8329 break;
8330 }
8331
8332 expr_3:
8333 {
8334 enum gimplify_status r0, r1, r2;
8335
8336 r0 = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
8337 post_p, is_gimple_val, fb_rvalue);
8338 r1 = gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p,
8339 post_p, is_gimple_val, fb_rvalue);
8340 r2 = gimplify_expr (&TREE_OPERAND (*expr_p, 2), pre_p,
8341 post_p, is_gimple_val, fb_rvalue);
8342
8343 ret = MIN (MIN (r0, r1), r2);
8344 break;
8345 }
8346
8347 case tcc_declaration:
8348 case tcc_constant:
8349 ret = GS_ALL_DONE;
8350 goto dont_recalculate;
8351
8352 default:
8353 gcc_unreachable ();
8354 }
8355
8356 recalculate_side_effects (*expr_p);
8357
8358 dont_recalculate:
8359 break;
8360 }
8361
8362 gcc_assert (*expr_p || ret != GS_OK);
8363 }
8364 while (ret == GS_OK);
8365
8366 /* If we encountered an error_mark somewhere nested inside, either
8367 stub out the statement or propagate the error back out. */
8368 if (ret == GS_ERROR)
8369 {
8370 if (is_statement)
8371 *expr_p = NULL;
8372 goto out;
8373 }
8374
8375 /* This was only valid as a return value from the langhook, which
8376 we handled. Make sure it doesn't escape from any other context. */
8377 gcc_assert (ret != GS_UNHANDLED);
8378
8379 if (fallback == fb_none && *expr_p && !is_gimple_stmt (*expr_p))
8380 {
8381 /* We aren't looking for a value, and we don't have a valid
8382 statement. If it doesn't have side-effects, throw it away. */
8383 if (!TREE_SIDE_EFFECTS (*expr_p))
8384 *expr_p = NULL;
8385 else if (!TREE_THIS_VOLATILE (*expr_p))
8386 {
8387 /* This is probably a _REF that contains something nested that
8388 has side effects. Recurse through the operands to find it. */
8389 enum tree_code code = TREE_CODE (*expr_p);
8390
8391 switch (code)
8392 {
8393 case COMPONENT_REF:
8394 case REALPART_EXPR:
8395 case IMAGPART_EXPR:
8396 case VIEW_CONVERT_EXPR:
8397 gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
8398 gimple_test_f, fallback);
8399 break;
8400
8401 case ARRAY_REF:
8402 case ARRAY_RANGE_REF:
8403 gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
8404 gimple_test_f, fallback);
8405 gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p, post_p,
8406 gimple_test_f, fallback);
8407 break;
8408
8409 default:
8410 /* Anything else with side-effects must be converted to
8411 a valid statement before we get here. */
8412 gcc_unreachable ();
8413 }
8414
8415 *expr_p = NULL;
8416 }
8417 else if (COMPLETE_TYPE_P (TREE_TYPE (*expr_p))
8418 && TYPE_MODE (TREE_TYPE (*expr_p)) != BLKmode)
8419 {
8420 /* Historically, the compiler has treated a bare reference
8421 to a non-BLKmode volatile lvalue as forcing a load. */
8422 tree type = TYPE_MAIN_VARIANT (TREE_TYPE (*expr_p));
8423
8424 /* Normally, we do not want to create a temporary for a
8425 TREE_ADDRESSABLE type because such a type should not be
8426 copied by bitwise-assignment. However, we make an
8427 exception here, as all we are doing here is ensuring that
8428 we read the bytes that make up the type. We use
8429 create_tmp_var_raw because create_tmp_var will abort when
8430 given a TREE_ADDRESSABLE type. */
8431 tree tmp = create_tmp_var_raw (type, "vol");
8432 gimple_add_tmp_var (tmp);
8433 gimplify_assign (tmp, *expr_p, pre_p);
8434 *expr_p = NULL;
8435 }
8436 else
8437 /* We can't do anything useful with a volatile reference to
8438 an incomplete type, so just throw it away. Likewise for
8439 a BLKmode type, since any implicit inner load should
8440 already have been turned into an explicit one by the
8441 gimplification process. */
8442 *expr_p = NULL;
8443 }
8444
8445 /* If we are gimplifying at the statement level, we're done. Tack
8446 everything together and return. */
8447 if (fallback == fb_none || is_statement)
8448 {
8449 /* Since *EXPR_P has been converted into a GIMPLE tuple, clear
8450 it out for GC to reclaim it. */
8451 *expr_p = NULL_TREE;
8452
8453 if (!gimple_seq_empty_p (internal_pre)
8454 || !gimple_seq_empty_p (internal_post))
8455 {
8456 gimplify_seq_add_seq (&internal_pre, internal_post);
8457 gimplify_seq_add_seq (pre_p, internal_pre);
8458 }
8459
8460 /* The result of gimplifying *EXPR_P is going to be the last few
8461 statements in *PRE_P and *POST_P. Add location information
8462 to all the statements that were added by the gimplification
8463 helpers. */
8464 if (!gimple_seq_empty_p (*pre_p))
8465 annotate_all_with_location_after (*pre_p, pre_last_gsi, input_location);
8466
8467 if (!gimple_seq_empty_p (*post_p))
8468 annotate_all_with_location_after (*post_p, post_last_gsi,
8469 input_location);
8470
8471 goto out;
8472 }
8473
8474 #ifdef ENABLE_GIMPLE_CHECKING
8475 if (*expr_p)
8476 {
8477 enum tree_code code = TREE_CODE (*expr_p);
8478 /* These expressions should already be in gimple IR form. */
8479 gcc_assert (code != MODIFY_EXPR
8480 && code != ASM_EXPR
8481 && code != BIND_EXPR
8482 && code != CATCH_EXPR
8483 && (code != COND_EXPR || gimplify_ctxp->allow_rhs_cond_expr)
8484 && code != EH_FILTER_EXPR
8485 && code != GOTO_EXPR
8486 && code != LABEL_EXPR
8487 && code != LOOP_EXPR
8488 && code != SWITCH_EXPR
8489 && code != TRY_FINALLY_EXPR
8490 && code != OMP_CRITICAL
8491 && code != OMP_FOR
8492 && code != OMP_MASTER
8493 && code != OMP_TASKGROUP
8494 && code != OMP_ORDERED
8495 && code != OMP_PARALLEL
8496 && code != OMP_SECTIONS
8497 && code != OMP_SECTION
8498 && code != OMP_SINGLE);
8499 }
8500 #endif
8501
8502 /* Otherwise we're gimplifying a subexpression, so the resulting
8503 value is interesting. If it's a valid operand that matches
8504 GIMPLE_TEST_F, we're done. Unless we are handling some
8505 post-effects internally; if that's the case, we need to copy into
8506 a temporary before adding the post-effects to POST_P. */
8507 if (gimple_seq_empty_p (internal_post) && (*gimple_test_f) (*expr_p))
8508 goto out;
8509
8510 /* Otherwise, we need to create a new temporary for the gimplified
8511 expression. */
8512
8513 /* We can't return an lvalue if we have an internal postqueue. The
8514 object the lvalue refers to would (probably) be modified by the
8515 postqueue; we need to copy the value out first, which means an
8516 rvalue. */
8517 if ((fallback & fb_lvalue)
8518 && gimple_seq_empty_p (internal_post)
8519 && is_gimple_addressable (*expr_p))
8520 {
8521 /* An lvalue will do. Take the address of the expression, store it
8522 in a temporary, and replace the expression with an INDIRECT_REF of
8523 that temporary. */
8524 tmp = build_fold_addr_expr_loc (input_location, *expr_p);
8525 gimplify_expr (&tmp, pre_p, post_p, is_gimple_reg, fb_rvalue);
8526 *expr_p = build_simple_mem_ref (tmp);
8527 }
8528 else if ((fallback & fb_rvalue) && is_gimple_reg_rhs_or_call (*expr_p))
8529 {
8530 /* An rvalue will do. Assign the gimplified expression into a
8531 new temporary TMP and replace the original expression with
8532 TMP. First, make sure that the expression has a type so that
8533 it can be assigned into a temporary. */
8534 gcc_assert (!VOID_TYPE_P (TREE_TYPE (*expr_p)));
8535 *expr_p = get_formal_tmp_var (*expr_p, pre_p);
8536 }
8537 else
8538 {
8539 #ifdef ENABLE_GIMPLE_CHECKING
8540 if (!(fallback & fb_mayfail))
8541 {
8542 fprintf (stderr, "gimplification failed:\n");
8543 print_generic_expr (stderr, *expr_p, 0);
8544 debug_tree (*expr_p);
8545 internal_error ("gimplification failed");
8546 }
8547 #endif
8548 gcc_assert (fallback & fb_mayfail);
8549
8550 /* If this is an asm statement, and the user asked for the
8551 impossible, don't die. Fail and let gimplify_asm_expr
8552 issue an error. */
8553 ret = GS_ERROR;
8554 goto out;
8555 }
8556
8557 /* Make sure the temporary matches our predicate. */
8558 gcc_assert ((*gimple_test_f) (*expr_p));
8559
8560 if (!gimple_seq_empty_p (internal_post))
8561 {
8562 annotate_all_with_location (internal_post, input_location);
8563 gimplify_seq_add_seq (pre_p, internal_post);
8564 }
8565
8566 out:
8567 input_location = saved_location;
8568 return ret;
8569 }
8570
8571 /* Look through TYPE for variable-sized objects and gimplify each such
8572 size that we find. Add to LIST_P any statements generated. */
8573
8574 void
8575 gimplify_type_sizes (tree type, gimple_seq *list_p)
8576 {
8577 tree field, t;
8578
8579 if (type == NULL || type == error_mark_node)
8580 return;
8581
8582 /* We first do the main variant, then copy into any other variants. */
8583 type = TYPE_MAIN_VARIANT (type);
8584
8585 /* Avoid infinite recursion. */
8586 if (TYPE_SIZES_GIMPLIFIED (type))
8587 return;
8588
8589 TYPE_SIZES_GIMPLIFIED (type) = 1;
8590
8591 switch (TREE_CODE (type))
8592 {
8593 case INTEGER_TYPE:
8594 case ENUMERAL_TYPE:
8595 case BOOLEAN_TYPE:
8596 case REAL_TYPE:
8597 case FIXED_POINT_TYPE:
8598 gimplify_one_sizepos (&TYPE_MIN_VALUE (type), list_p);
8599 gimplify_one_sizepos (&TYPE_MAX_VALUE (type), list_p);
8600
8601 for (t = TYPE_NEXT_VARIANT (type); t; t = TYPE_NEXT_VARIANT (t))
8602 {
8603 TYPE_MIN_VALUE (t) = TYPE_MIN_VALUE (type);
8604 TYPE_MAX_VALUE (t) = TYPE_MAX_VALUE (type);
8605 }
8606 break;
8607
8608 case ARRAY_TYPE:
8609 /* These types may not have declarations, so handle them here. */
8610 gimplify_type_sizes (TREE_TYPE (type), list_p);
8611 gimplify_type_sizes (TYPE_DOMAIN (type), list_p);
8612 /* Ensure VLA bounds aren't removed, for -O0 they should be variables
8613 with assigned stack slots, for -O1+ -g they should be tracked
8614 by VTA. */
8615 if (!(TYPE_NAME (type)
8616 && TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
8617 && DECL_IGNORED_P (TYPE_NAME (type)))
8618 && TYPE_DOMAIN (type)
8619 && INTEGRAL_TYPE_P (TYPE_DOMAIN (type)))
8620 {
8621 t = TYPE_MIN_VALUE (TYPE_DOMAIN (type));
8622 if (t && TREE_CODE (t) == VAR_DECL && DECL_ARTIFICIAL (t))
8623 DECL_IGNORED_P (t) = 0;
8624 t = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
8625 if (t && TREE_CODE (t) == VAR_DECL && DECL_ARTIFICIAL (t))
8626 DECL_IGNORED_P (t) = 0;
8627 }
8628 break;
8629
8630 case RECORD_TYPE:
8631 case UNION_TYPE:
8632 case QUAL_UNION_TYPE:
8633 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
8634 if (TREE_CODE (field) == FIELD_DECL)
8635 {
8636 gimplify_one_sizepos (&DECL_FIELD_OFFSET (field), list_p);
8637 gimplify_one_sizepos (&DECL_SIZE (field), list_p);
8638 gimplify_one_sizepos (&DECL_SIZE_UNIT (field), list_p);
8639 gimplify_type_sizes (TREE_TYPE (field), list_p);
8640 }
8641 break;
8642
8643 case POINTER_TYPE:
8644 case REFERENCE_TYPE:
8645 /* We used to recurse on the pointed-to type here, which turned out to
8646 be incorrect because its definition might refer to variables not
8647 yet initialized at this point if a forward declaration is involved.
8648
8649 It was actually useful for anonymous pointed-to types to ensure
8650 that the sizes evaluation dominates every possible later use of the
8651 values. Restricting to such types here would be safe since there
8652 is no possible forward declaration around, but would introduce an
8653 undesirable middle-end semantic to anonymity. We then defer to
8654 front-ends the responsibility of ensuring that the sizes are
8655 evaluated both early and late enough, e.g. by attaching artificial
8656 type declarations to the tree. */
8657 break;
8658
8659 default:
8660 break;
8661 }
8662
8663 gimplify_one_sizepos (&TYPE_SIZE (type), list_p);
8664 gimplify_one_sizepos (&TYPE_SIZE_UNIT (type), list_p);
8665
8666 for (t = TYPE_NEXT_VARIANT (type); t; t = TYPE_NEXT_VARIANT (t))
8667 {
8668 TYPE_SIZE (t) = TYPE_SIZE (type);
8669 TYPE_SIZE_UNIT (t) = TYPE_SIZE_UNIT (type);
8670 TYPE_SIZES_GIMPLIFIED (t) = 1;
8671 }
8672 }
8673
8674 /* A subroutine of gimplify_type_sizes to make sure that *EXPR_P,
8675 a size or position, has had all of its SAVE_EXPRs evaluated.
8676 We add any required statements to *STMT_P. */
8677
8678 void
8679 gimplify_one_sizepos (tree *expr_p, gimple_seq *stmt_p)
8680 {
8681 tree expr = *expr_p;
8682
8683 /* We don't do anything if the value isn't there, is constant, or contains
8684 A PLACEHOLDER_EXPR. We also don't want to do anything if it's already
8685 a VAR_DECL. If it's a VAR_DECL from another function, the gimplifier
8686 will want to replace it with a new variable, but that will cause problems
8687 if this type is from outside the function. It's OK to have that here. */
8688 if (is_gimple_sizepos (expr))
8689 return;
8690
8691 *expr_p = unshare_expr (expr);
8692
8693 gimplify_expr (expr_p, stmt_p, NULL, is_gimple_val, fb_rvalue);
8694 }
8695
8696 /* Gimplify the body of statements of FNDECL and return a GIMPLE_BIND node
8697 containing the sequence of corresponding GIMPLE statements. If DO_PARMS
8698 is true, also gimplify the parameters. */
8699
8700 gimple
8701 gimplify_body (tree fndecl, bool do_parms)
8702 {
8703 location_t saved_location = input_location;
8704 gimple_seq parm_stmts, seq;
8705 gimple outer_bind;
8706 struct gimplify_ctx gctx;
8707 struct cgraph_node *cgn;
8708
8709 timevar_push (TV_TREE_GIMPLIFY);
8710
8711 /* Initialize for optimize_insn_for_s{ize,peed}_p possibly called during
8712 gimplification. */
8713 default_rtl_profile ();
8714
8715 gcc_assert (gimplify_ctxp == NULL);
8716 push_gimplify_context (&gctx);
8717
8718 if (flag_openmp)
8719 {
8720 gcc_assert (gimplify_omp_ctxp == NULL);
8721 if (lookup_attribute ("omp declare target", DECL_ATTRIBUTES (fndecl)))
8722 gimplify_omp_ctxp = new_omp_context (ORT_TARGET);
8723 }
8724
8725 /* Unshare most shared trees in the body and in that of any nested functions.
8726 It would seem we don't have to do this for nested functions because
8727 they are supposed to be output and then the outer function gimplified
8728 first, but the g++ front end doesn't always do it that way. */
8729 unshare_body (fndecl);
8730 unvisit_body (fndecl);
8731
8732 cgn = cgraph_get_node (fndecl);
8733 if (cgn && cgn->origin)
8734 nonlocal_vlas = pointer_set_create ();
8735
8736 /* Make sure input_location isn't set to something weird. */
8737 input_location = DECL_SOURCE_LOCATION (fndecl);
8738
8739 /* Resolve callee-copies. This has to be done before processing
8740 the body so that DECL_VALUE_EXPR gets processed correctly. */
8741 parm_stmts = do_parms ? gimplify_parameters () : NULL;
8742
8743 /* Gimplify the function's body. */
8744 seq = NULL;
8745 gimplify_stmt (&DECL_SAVED_TREE (fndecl), &seq);
8746 outer_bind = gimple_seq_first_stmt (seq);
8747 if (!outer_bind)
8748 {
8749 outer_bind = gimple_build_nop ();
8750 gimplify_seq_add_stmt (&seq, outer_bind);
8751 }
8752
8753 /* The body must contain exactly one statement, a GIMPLE_BIND. If this is
8754 not the case, wrap everything in a GIMPLE_BIND to make it so. */
8755 if (gimple_code (outer_bind) == GIMPLE_BIND
8756 && gimple_seq_first (seq) == gimple_seq_last (seq))
8757 ;
8758 else
8759 outer_bind = gimple_build_bind (NULL_TREE, seq, NULL);
8760
8761 DECL_SAVED_TREE (fndecl) = NULL_TREE;
8762
8763 /* If we had callee-copies statements, insert them at the beginning
8764 of the function and clear DECL_VALUE_EXPR_P on the parameters. */
8765 if (!gimple_seq_empty_p (parm_stmts))
8766 {
8767 tree parm;
8768
8769 gimplify_seq_add_seq (&parm_stmts, gimple_bind_body (outer_bind));
8770 gimple_bind_set_body (outer_bind, parm_stmts);
8771
8772 for (parm = DECL_ARGUMENTS (current_function_decl);
8773 parm; parm = DECL_CHAIN (parm))
8774 if (DECL_HAS_VALUE_EXPR_P (parm))
8775 {
8776 DECL_HAS_VALUE_EXPR_P (parm) = 0;
8777 DECL_IGNORED_P (parm) = 0;
8778 }
8779 }
8780
8781 if (nonlocal_vlas)
8782 {
8783 pointer_set_destroy (nonlocal_vlas);
8784 nonlocal_vlas = NULL;
8785 }
8786
8787 if (flag_openmp && gimplify_omp_ctxp)
8788 {
8789 delete_omp_context (gimplify_omp_ctxp);
8790 gimplify_omp_ctxp = NULL;
8791 }
8792
8793 pop_gimplify_context (outer_bind);
8794 gcc_assert (gimplify_ctxp == NULL);
8795
8796 #ifdef ENABLE_CHECKING
8797 if (!seen_error ())
8798 verify_gimple_in_seq (gimple_bind_body (outer_bind));
8799 #endif
8800
8801 timevar_pop (TV_TREE_GIMPLIFY);
8802 input_location = saved_location;
8803
8804 return outer_bind;
8805 }
8806
8807 typedef char *char_p; /* For DEF_VEC_P. */
8808
8809 /* Return whether we should exclude FNDECL from instrumentation. */
8810
8811 static bool
8812 flag_instrument_functions_exclude_p (tree fndecl)
8813 {
8814 vec<char_p> *v;
8815
8816 v = (vec<char_p> *) flag_instrument_functions_exclude_functions;
8817 if (v && v->length () > 0)
8818 {
8819 const char *name;
8820 int i;
8821 char *s;
8822
8823 name = lang_hooks.decl_printable_name (fndecl, 0);
8824 FOR_EACH_VEC_ELT (*v, i, s)
8825 if (strstr (name, s) != NULL)
8826 return true;
8827 }
8828
8829 v = (vec<char_p> *) flag_instrument_functions_exclude_files;
8830 if (v && v->length () > 0)
8831 {
8832 const char *name;
8833 int i;
8834 char *s;
8835
8836 name = DECL_SOURCE_FILE (fndecl);
8837 FOR_EACH_VEC_ELT (*v, i, s)
8838 if (strstr (name, s) != NULL)
8839 return true;
8840 }
8841
8842 return false;
8843 }
8844
8845 /* Entry point to the gimplification pass. FNDECL is the FUNCTION_DECL
8846 node for the function we want to gimplify.
8847
8848 Return the sequence of GIMPLE statements corresponding to the body
8849 of FNDECL. */
8850
8851 void
8852 gimplify_function_tree (tree fndecl)
8853 {
8854 tree parm, ret;
8855 gimple_seq seq;
8856 gimple bind;
8857
8858 gcc_assert (!gimple_body (fndecl));
8859
8860 if (DECL_STRUCT_FUNCTION (fndecl))
8861 push_cfun (DECL_STRUCT_FUNCTION (fndecl));
8862 else
8863 push_struct_function (fndecl);
8864
8865 for (parm = DECL_ARGUMENTS (fndecl); parm ; parm = DECL_CHAIN (parm))
8866 {
8867 /* Preliminarily mark non-addressed complex variables as eligible
8868 for promotion to gimple registers. We'll transform their uses
8869 as we find them. */
8870 if ((TREE_CODE (TREE_TYPE (parm)) == COMPLEX_TYPE
8871 || TREE_CODE (TREE_TYPE (parm)) == VECTOR_TYPE)
8872 && !TREE_THIS_VOLATILE (parm)
8873 && !needs_to_live_in_memory (parm))
8874 DECL_GIMPLE_REG_P (parm) = 1;
8875 }
8876
8877 ret = DECL_RESULT (fndecl);
8878 if ((TREE_CODE (TREE_TYPE (ret)) == COMPLEX_TYPE
8879 || TREE_CODE (TREE_TYPE (ret)) == VECTOR_TYPE)
8880 && !needs_to_live_in_memory (ret))
8881 DECL_GIMPLE_REG_P (ret) = 1;
8882
8883 bind = gimplify_body (fndecl, true);
8884
8885 /* The tree body of the function is no longer needed, replace it
8886 with the new GIMPLE body. */
8887 seq = NULL;
8888 gimple_seq_add_stmt (&seq, bind);
8889 gimple_set_body (fndecl, seq);
8890
8891 /* If we're instrumenting function entry/exit, then prepend the call to
8892 the entry hook and wrap the whole function in a TRY_FINALLY_EXPR to
8893 catch the exit hook. */
8894 /* ??? Add some way to ignore exceptions for this TFE. */
8895 if (flag_instrument_function_entry_exit
8896 && !DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (fndecl)
8897 && !flag_instrument_functions_exclude_p (fndecl))
8898 {
8899 tree x;
8900 gimple new_bind;
8901 gimple tf;
8902 gimple_seq cleanup = NULL, body = NULL;
8903 tree tmp_var;
8904 gimple call;
8905
8906 x = builtin_decl_implicit (BUILT_IN_RETURN_ADDRESS);
8907 call = gimple_build_call (x, 1, integer_zero_node);
8908 tmp_var = create_tmp_var (ptr_type_node, "return_addr");
8909 gimple_call_set_lhs (call, tmp_var);
8910 gimplify_seq_add_stmt (&cleanup, call);
8911 x = builtin_decl_implicit (BUILT_IN_PROFILE_FUNC_EXIT);
8912 call = gimple_build_call (x, 2,
8913 build_fold_addr_expr (current_function_decl),
8914 tmp_var);
8915 gimplify_seq_add_stmt (&cleanup, call);
8916 tf = gimple_build_try (seq, cleanup, GIMPLE_TRY_FINALLY);
8917
8918 x = builtin_decl_implicit (BUILT_IN_RETURN_ADDRESS);
8919 call = gimple_build_call (x, 1, integer_zero_node);
8920 tmp_var = create_tmp_var (ptr_type_node, "return_addr");
8921 gimple_call_set_lhs (call, tmp_var);
8922 gimplify_seq_add_stmt (&body, call);
8923 x = builtin_decl_implicit (BUILT_IN_PROFILE_FUNC_ENTER);
8924 call = gimple_build_call (x, 2,
8925 build_fold_addr_expr (current_function_decl),
8926 tmp_var);
8927 gimplify_seq_add_stmt (&body, call);
8928 gimplify_seq_add_stmt (&body, tf);
8929 new_bind = gimple_build_bind (NULL, body, gimple_bind_block (bind));
8930 /* Clear the block for BIND, since it is no longer directly inside
8931 the function, but within a try block. */
8932 gimple_bind_set_block (bind, NULL);
8933
8934 /* Replace the current function body with the body
8935 wrapped in the try/finally TF. */
8936 seq = NULL;
8937 gimple_seq_add_stmt (&seq, new_bind);
8938 gimple_set_body (fndecl, seq);
8939 }
8940
8941 DECL_SAVED_TREE (fndecl) = NULL_TREE;
8942 cfun->curr_properties = PROP_gimple_any;
8943
8944 pop_cfun ();
8945 }
8946
8947 /* Some transformations like inlining may invalidate the GIMPLE form
8948 for operands. This function traverses all the operands in STMT and
8949 gimplifies anything that is not a valid gimple operand. Any new
8950 GIMPLE statements are inserted before *GSI_P. */
8951
8952 void
8953 gimple_regimplify_operands (gimple stmt, gimple_stmt_iterator *gsi_p)
8954 {
8955 size_t i, num_ops;
8956 tree lhs;
8957 gimple_seq pre = NULL;
8958 gimple post_stmt = NULL;
8959 struct gimplify_ctx gctx;
8960
8961 push_gimplify_context (&gctx);
8962 gimplify_ctxp->into_ssa = gimple_in_ssa_p (cfun);
8963
8964 switch (gimple_code (stmt))
8965 {
8966 case GIMPLE_COND:
8967 gimplify_expr (gimple_cond_lhs_ptr (stmt), &pre, NULL,
8968 is_gimple_val, fb_rvalue);
8969 gimplify_expr (gimple_cond_rhs_ptr (stmt), &pre, NULL,
8970 is_gimple_val, fb_rvalue);
8971 break;
8972 case GIMPLE_SWITCH:
8973 gimplify_expr (gimple_switch_index_ptr (stmt), &pre, NULL,
8974 is_gimple_val, fb_rvalue);
8975 break;
8976 case GIMPLE_OMP_ATOMIC_LOAD:
8977 gimplify_expr (gimple_omp_atomic_load_rhs_ptr (stmt), &pre, NULL,
8978 is_gimple_val, fb_rvalue);
8979 break;
8980 case GIMPLE_ASM:
8981 {
8982 size_t i, noutputs = gimple_asm_noutputs (stmt);
8983 const char *constraint, **oconstraints;
8984 bool allows_mem, allows_reg, is_inout;
8985
8986 oconstraints
8987 = (const char **) alloca ((noutputs) * sizeof (const char *));
8988 for (i = 0; i < noutputs; i++)
8989 {
8990 tree op = gimple_asm_output_op (stmt, i);
8991 constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (op)));
8992 oconstraints[i] = constraint;
8993 parse_output_constraint (&constraint, i, 0, 0, &allows_mem,
8994 &allows_reg, &is_inout);
8995 gimplify_expr (&TREE_VALUE (op), &pre, NULL,
8996 is_inout ? is_gimple_min_lval : is_gimple_lvalue,
8997 fb_lvalue | fb_mayfail);
8998 }
8999 for (i = 0; i < gimple_asm_ninputs (stmt); i++)
9000 {
9001 tree op = gimple_asm_input_op (stmt, i);
9002 constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (op)));
9003 parse_input_constraint (&constraint, 0, 0, noutputs, 0,
9004 oconstraints, &allows_mem, &allows_reg);
9005 if (TREE_ADDRESSABLE (TREE_TYPE (TREE_VALUE (op))) && allows_mem)
9006 allows_reg = 0;
9007 if (!allows_reg && allows_mem)
9008 gimplify_expr (&TREE_VALUE (op), &pre, NULL,
9009 is_gimple_lvalue, fb_lvalue | fb_mayfail);
9010 else
9011 gimplify_expr (&TREE_VALUE (op), &pre, NULL,
9012 is_gimple_asm_val, fb_rvalue);
9013 }
9014 }
9015 break;
9016 default:
9017 /* NOTE: We start gimplifying operands from last to first to
9018 make sure that side-effects on the RHS of calls, assignments
9019 and ASMs are executed before the LHS. The ordering is not
9020 important for other statements. */
9021 num_ops = gimple_num_ops (stmt);
9022 for (i = num_ops; i > 0; i--)
9023 {
9024 tree op = gimple_op (stmt, i - 1);
9025 if (op == NULL_TREE)
9026 continue;
9027 if (i == 1 && (is_gimple_call (stmt) || is_gimple_assign (stmt)))
9028 gimplify_expr (&op, &pre, NULL, is_gimple_lvalue, fb_lvalue);
9029 else if (i == 2
9030 && is_gimple_assign (stmt)
9031 && num_ops == 2
9032 && get_gimple_rhs_class (gimple_expr_code (stmt))
9033 == GIMPLE_SINGLE_RHS)
9034 gimplify_expr (&op, &pre, NULL,
9035 rhs_predicate_for (gimple_assign_lhs (stmt)),
9036 fb_rvalue);
9037 else if (i == 2 && is_gimple_call (stmt))
9038 {
9039 if (TREE_CODE (op) == FUNCTION_DECL)
9040 continue;
9041 gimplify_expr (&op, &pre, NULL, is_gimple_call_addr, fb_rvalue);
9042 }
9043 else
9044 gimplify_expr (&op, &pre, NULL, is_gimple_val, fb_rvalue);
9045 gimple_set_op (stmt, i - 1, op);
9046 }
9047
9048 lhs = gimple_get_lhs (stmt);
9049 /* If the LHS changed it in a way that requires a simple RHS,
9050 create temporary. */
9051 if (lhs && !is_gimple_reg (lhs))
9052 {
9053 bool need_temp = false;
9054
9055 if (is_gimple_assign (stmt)
9056 && num_ops == 2
9057 && get_gimple_rhs_class (gimple_expr_code (stmt))
9058 == GIMPLE_SINGLE_RHS)
9059 gimplify_expr (gimple_assign_rhs1_ptr (stmt), &pre, NULL,
9060 rhs_predicate_for (gimple_assign_lhs (stmt)),
9061 fb_rvalue);
9062 else if (is_gimple_reg (lhs))
9063 {
9064 if (is_gimple_reg_type (TREE_TYPE (lhs)))
9065 {
9066 if (is_gimple_call (stmt))
9067 {
9068 i = gimple_call_flags (stmt);
9069 if ((i & ECF_LOOPING_CONST_OR_PURE)
9070 || !(i & (ECF_CONST | ECF_PURE)))
9071 need_temp = true;
9072 }
9073 if (stmt_can_throw_internal (stmt))
9074 need_temp = true;
9075 }
9076 }
9077 else
9078 {
9079 if (is_gimple_reg_type (TREE_TYPE (lhs)))
9080 need_temp = true;
9081 else if (TYPE_MODE (TREE_TYPE (lhs)) != BLKmode)
9082 {
9083 if (is_gimple_call (stmt))
9084 {
9085 tree fndecl = gimple_call_fndecl (stmt);
9086
9087 if (!aggregate_value_p (TREE_TYPE (lhs), fndecl)
9088 && !(fndecl && DECL_RESULT (fndecl)
9089 && DECL_BY_REFERENCE (DECL_RESULT (fndecl))))
9090 need_temp = true;
9091 }
9092 else
9093 need_temp = true;
9094 }
9095 }
9096 if (need_temp)
9097 {
9098 tree temp = create_tmp_reg (TREE_TYPE (lhs), NULL);
9099 if (gimple_in_ssa_p (cfun))
9100 temp = make_ssa_name (temp, NULL);
9101 gimple_set_lhs (stmt, temp);
9102 post_stmt = gimple_build_assign (lhs, temp);
9103 if (TREE_CODE (lhs) == SSA_NAME)
9104 SSA_NAME_DEF_STMT (lhs) = post_stmt;
9105 }
9106 }
9107 break;
9108 }
9109
9110 if (!gimple_seq_empty_p (pre))
9111 gsi_insert_seq_before (gsi_p, pre, GSI_SAME_STMT);
9112 if (post_stmt)
9113 gsi_insert_after (gsi_p, post_stmt, GSI_NEW_STMT);
9114
9115 pop_gimplify_context (NULL);
9116 }
9117
9118 /* Expand EXPR to list of gimple statements STMTS. GIMPLE_TEST_F specifies
9119 the predicate that will hold for the result. If VAR is not NULL, make the
9120 base variable of the final destination be VAR if suitable. */
9121
9122 tree
9123 force_gimple_operand_1 (tree expr, gimple_seq *stmts,
9124 gimple_predicate gimple_test_f, tree var)
9125 {
9126 enum gimplify_status ret;
9127 struct gimplify_ctx gctx;
9128 location_t saved_location;
9129
9130 *stmts = NULL;
9131
9132 /* gimple_test_f might be more strict than is_gimple_val, make
9133 sure we pass both. Just checking gimple_test_f doesn't work
9134 because most gimple predicates do not work recursively. */
9135 if (is_gimple_val (expr)
9136 && (*gimple_test_f) (expr))
9137 return expr;
9138
9139 push_gimplify_context (&gctx);
9140 gimplify_ctxp->into_ssa = gimple_in_ssa_p (cfun);
9141 gimplify_ctxp->allow_rhs_cond_expr = true;
9142 saved_location = input_location;
9143 input_location = UNKNOWN_LOCATION;
9144
9145 if (var)
9146 {
9147 if (gimplify_ctxp->into_ssa
9148 && is_gimple_reg (var))
9149 var = make_ssa_name (var, NULL);
9150 expr = build2 (MODIFY_EXPR, TREE_TYPE (var), var, expr);
9151 }
9152
9153 if (TREE_CODE (expr) != MODIFY_EXPR
9154 && TREE_TYPE (expr) == void_type_node)
9155 {
9156 gimplify_and_add (expr, stmts);
9157 expr = NULL_TREE;
9158 }
9159 else
9160 {
9161 ret = gimplify_expr (&expr, stmts, NULL, gimple_test_f, fb_rvalue);
9162 gcc_assert (ret != GS_ERROR);
9163 }
9164
9165 input_location = saved_location;
9166 pop_gimplify_context (NULL);
9167
9168 return expr;
9169 }
9170
9171 /* Expand EXPR to list of gimple statements STMTS. If SIMPLE is true,
9172 force the result to be either ssa_name or an invariant, otherwise
9173 just force it to be a rhs expression. If VAR is not NULL, make the
9174 base variable of the final destination be VAR if suitable. */
9175
9176 tree
9177 force_gimple_operand (tree expr, gimple_seq *stmts, bool simple, tree var)
9178 {
9179 return force_gimple_operand_1 (expr, stmts,
9180 simple ? is_gimple_val : is_gimple_reg_rhs,
9181 var);
9182 }
9183
9184 /* Invoke force_gimple_operand_1 for EXPR with parameters GIMPLE_TEST_F
9185 and VAR. If some statements are produced, emits them at GSI.
9186 If BEFORE is true. the statements are appended before GSI, otherwise
9187 they are appended after it. M specifies the way GSI moves after
9188 insertion (GSI_SAME_STMT or GSI_CONTINUE_LINKING are the usual values). */
9189
9190 tree
9191 force_gimple_operand_gsi_1 (gimple_stmt_iterator *gsi, tree expr,
9192 gimple_predicate gimple_test_f,
9193 tree var, bool before,
9194 enum gsi_iterator_update m)
9195 {
9196 gimple_seq stmts;
9197
9198 expr = force_gimple_operand_1 (expr, &stmts, gimple_test_f, var);
9199
9200 if (!gimple_seq_empty_p (stmts))
9201 {
9202 if (before)
9203 gsi_insert_seq_before (gsi, stmts, m);
9204 else
9205 gsi_insert_seq_after (gsi, stmts, m);
9206 }
9207
9208 return expr;
9209 }
9210
9211 /* Invoke force_gimple_operand_1 for EXPR with parameter VAR.
9212 If SIMPLE is true, force the result to be either ssa_name or an invariant,
9213 otherwise just force it to be a rhs expression. If some statements are
9214 produced, emits them at GSI. If BEFORE is true, the statements are
9215 appended before GSI, otherwise they are appended after it. M specifies
9216 the way GSI moves after insertion (GSI_SAME_STMT or GSI_CONTINUE_LINKING
9217 are the usual values). */
9218
9219 tree
9220 force_gimple_operand_gsi (gimple_stmt_iterator *gsi, tree expr,
9221 bool simple_p, tree var, bool before,
9222 enum gsi_iterator_update m)
9223 {
9224 return force_gimple_operand_gsi_1 (gsi, expr,
9225 simple_p
9226 ? is_gimple_val : is_gimple_reg_rhs,
9227 var, before, m);
9228 }
9229
9230 #ifndef PAD_VARARGS_DOWN
9231 #define PAD_VARARGS_DOWN BYTES_BIG_ENDIAN
9232 #endif
9233
9234 /* Build an indirect-ref expression over the given TREE, which represents a
9235 piece of a va_arg() expansion. */
9236 tree
9237 build_va_arg_indirect_ref (tree addr)
9238 {
9239 addr = build_simple_mem_ref_loc (EXPR_LOCATION (addr), addr);
9240
9241 if (flag_mudflap) /* Don't instrument va_arg INDIRECT_REF. */
9242 mf_mark (addr);
9243
9244 return addr;
9245 }
9246
9247 /* The "standard" implementation of va_arg: read the value from the
9248 current (padded) address and increment by the (padded) size. */
9249
9250 tree
9251 std_gimplify_va_arg_expr (tree valist, tree type, gimple_seq *pre_p,
9252 gimple_seq *post_p)
9253 {
9254 tree addr, t, type_size, rounded_size, valist_tmp;
9255 unsigned HOST_WIDE_INT align, boundary;
9256 bool indirect;
9257
9258 #ifdef ARGS_GROW_DOWNWARD
9259 /* All of the alignment and movement below is for args-grow-up machines.
9260 As of 2004, there are only 3 ARGS_GROW_DOWNWARD targets, and they all
9261 implement their own specialized gimplify_va_arg_expr routines. */
9262 gcc_unreachable ();
9263 #endif
9264
9265 indirect = pass_by_reference (NULL, TYPE_MODE (type), type, false);
9266 if (indirect)
9267 type = build_pointer_type (type);
9268
9269 align = PARM_BOUNDARY / BITS_PER_UNIT;
9270 boundary = targetm.calls.function_arg_boundary (TYPE_MODE (type), type);
9271
9272 /* When we align parameter on stack for caller, if the parameter
9273 alignment is beyond MAX_SUPPORTED_STACK_ALIGNMENT, it will be
9274 aligned at MAX_SUPPORTED_STACK_ALIGNMENT. We will match callee
9275 here with caller. */
9276 if (boundary > MAX_SUPPORTED_STACK_ALIGNMENT)
9277 boundary = MAX_SUPPORTED_STACK_ALIGNMENT;
9278
9279 boundary /= BITS_PER_UNIT;
9280
9281 /* Hoist the valist value into a temporary for the moment. */
9282 valist_tmp = get_initialized_tmp_var (valist, pre_p, NULL);
9283
9284 /* va_list pointer is aligned to PARM_BOUNDARY. If argument actually
9285 requires greater alignment, we must perform dynamic alignment. */
9286 if (boundary > align
9287 && !integer_zerop (TYPE_SIZE (type)))
9288 {
9289 t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist_tmp,
9290 fold_build_pointer_plus_hwi (valist_tmp, boundary - 1));
9291 gimplify_and_add (t, pre_p);
9292
9293 t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist_tmp,
9294 fold_build2 (BIT_AND_EXPR, TREE_TYPE (valist),
9295 valist_tmp,
9296 build_int_cst (TREE_TYPE (valist), -boundary)));
9297 gimplify_and_add (t, pre_p);
9298 }
9299 else
9300 boundary = align;
9301
9302 /* If the actual alignment is less than the alignment of the type,
9303 adjust the type accordingly so that we don't assume strict alignment
9304 when dereferencing the pointer. */
9305 boundary *= BITS_PER_UNIT;
9306 if (boundary < TYPE_ALIGN (type))
9307 {
9308 type = build_variant_type_copy (type);
9309 TYPE_ALIGN (type) = boundary;
9310 }
9311
9312 /* Compute the rounded size of the type. */
9313 type_size = size_in_bytes (type);
9314 rounded_size = round_up (type_size, align);
9315
9316 /* Reduce rounded_size so it's sharable with the postqueue. */
9317 gimplify_expr (&rounded_size, pre_p, post_p, is_gimple_val, fb_rvalue);
9318
9319 /* Get AP. */
9320 addr = valist_tmp;
9321 if (PAD_VARARGS_DOWN && !integer_zerop (rounded_size))
9322 {
9323 /* Small args are padded downward. */
9324 t = fold_build2_loc (input_location, GT_EXPR, sizetype,
9325 rounded_size, size_int (align));
9326 t = fold_build3 (COND_EXPR, sizetype, t, size_zero_node,
9327 size_binop (MINUS_EXPR, rounded_size, type_size));
9328 addr = fold_build_pointer_plus (addr, t);
9329 }
9330
9331 /* Compute new value for AP. */
9332 t = fold_build_pointer_plus (valist_tmp, rounded_size);
9333 t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist, t);
9334 gimplify_and_add (t, pre_p);
9335
9336 addr = fold_convert (build_pointer_type (type), addr);
9337
9338 if (indirect)
9339 addr = build_va_arg_indirect_ref (addr);
9340
9341 return build_va_arg_indirect_ref (addr);
9342 }
9343
9344 #include "gt-gimplify.h"