]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/gimplify.c
alias.c (ao_ref_from_mem): Adjust.
[thirdparty/gcc.git] / gcc / gimplify.c
CommitLineData
6de9cd9a
DN
1/* Tree lowering pass. This pass converts the GENERIC functions-as-trees
2 tree representation into the GIMPLE form.
7c028163 3 Copyright (C) 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010
a441d616 4 Free Software Foundation, Inc.
6de9cd9a
DN
5 Major work done by Sebastian Pop <s.pop@laposte.net>,
6 Diego Novillo <dnovillo@redhat.com> and Jason Merrill <jason@redhat.com>.
7
8This file is part of GCC.
9
10GCC is free software; you can redistribute it and/or modify it under
11the terms of the GNU General Public License as published by the Free
9dcd6f09 12Software Foundation; either version 3, or (at your option) any later
6de9cd9a
DN
13version.
14
15GCC is distributed in the hope that it will be useful, but WITHOUT ANY
16WARRANTY; without even the implied warranty of MERCHANTABILITY or
17FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18for more details.
19
20You should have received a copy of the GNU General Public License
9dcd6f09
NC
21along with GCC; see the file COPYING3. If not see
22<http://www.gnu.org/licenses/>. */
6de9cd9a
DN
23
24#include "config.h"
25#include "system.h"
26#include "coretypes.h"
27#include "tm.h"
28#include "tree.h"
726a989a
RB
29#include "gimple.h"
30#include "tree-iterator.h"
6de9cd9a 31#include "tree-inline.h"
cf835838 32#include "tree-pretty-print.h"
6de9cd9a 33#include "langhooks.h"
6de9cd9a 34#include "tree-flow.h"
44de5aeb 35#include "cgraph.h"
6de9cd9a 36#include "timevar.h"
6de9cd9a
DN
37#include "hashtab.h"
38#include "flags.h"
6de9cd9a
DN
39#include "function.h"
40#include "output.h"
6de9cd9a 41#include "ggc.h"
718f9c0f 42#include "diagnostic-core.h"
4c714dd4 43#include "toplev.h"
cd3ce9b4 44#include "target.h"
953ff289 45#include "pointer-set.h"
6be42dd4 46#include "splay-tree.h"
726a989a
RB
47#include "vec.h"
48#include "gimple.h"
a406865a 49#include "tree-pass.h"
6de9cd9a 50
2eb79bbb
SB
51#include "langhooks-def.h" /* FIXME: for lhd_set_decl_assembler_name. */
52#include "expr.h" /* FIXME: for can_move_by_pieces
53 and STACK_CHECK_MAX_VAR_SIZE. */
953ff289
DN
54
55enum gimplify_omp_var_data
56{
57 GOVD_SEEN = 1,
58 GOVD_EXPLICIT = 2,
59 GOVD_SHARED = 4,
60 GOVD_PRIVATE = 8,
61 GOVD_FIRSTPRIVATE = 16,
62 GOVD_LASTPRIVATE = 32,
63 GOVD_REDUCTION = 64,
64 GOVD_LOCAL = 128,
65 GOVD_DEBUG_PRIVATE = 256,
a68ab351 66 GOVD_PRIVATE_OUTER_REF = 512,
953ff289
DN
67 GOVD_DATA_SHARE_CLASS = (GOVD_SHARED | GOVD_PRIVATE | GOVD_FIRSTPRIVATE
68 | GOVD_LASTPRIVATE | GOVD_REDUCTION | GOVD_LOCAL)
69};
70
726a989a 71
a68ab351
JJ
72enum omp_region_type
73{
74 ORT_WORKSHARE = 0,
a68ab351 75 ORT_PARALLEL = 2,
f22f4340
JJ
76 ORT_COMBINED_PARALLEL = 3,
77 ORT_TASK = 4,
78 ORT_UNTIED_TASK = 5
a68ab351
JJ
79};
80
953ff289 81struct gimplify_omp_ctx
6de9cd9a 82{
953ff289
DN
83 struct gimplify_omp_ctx *outer_context;
84 splay_tree variables;
85 struct pointer_set_t *privatized_types;
86 location_t location;
87 enum omp_clause_default_kind default_kind;
a68ab351 88 enum omp_region_type region_type;
953ff289
DN
89};
90
953ff289
DN
91static struct gimplify_ctx *gimplify_ctxp;
92static struct gimplify_omp_ctx *gimplify_omp_ctxp;
93
6de9cd9a 94
6de9cd9a
DN
95/* Formal (expression) temporary table handling: Multiple occurrences of
96 the same scalar expression are evaluated into the same temporary. */
97
98typedef struct gimple_temp_hash_elt
99{
100 tree val; /* Key */
101 tree temp; /* Value */
102} elt_t;
103
44de5aeb 104/* Forward declarations. */
726a989a 105static enum gimplify_status gimplify_compound_expr (tree *, gimple_seq *, bool);
eb6127a4 106
936d04b6
JH
107/* Mark X addressable. Unlike the langhook we expect X to be in gimple
108 form and we don't do any syntax checking. */
628c189e 109void
936d04b6
JH
110mark_addressable (tree x)
111{
112 while (handled_component_p (x))
113 x = TREE_OPERAND (x, 0);
70f34814
RG
114 if (TREE_CODE (x) == MEM_REF
115 && TREE_CODE (TREE_OPERAND (x, 0)) == ADDR_EXPR)
116 x = TREE_OPERAND (TREE_OPERAND (x, 0), 0);
628c189e
RG
117 if (TREE_CODE (x) != VAR_DECL
118 && TREE_CODE (x) != PARM_DECL
119 && TREE_CODE (x) != RESULT_DECL)
70f34814 120 return;
936d04b6
JH
121 TREE_ADDRESSABLE (x) = 1;
122}
eb6127a4 123
6de9cd9a
DN
124/* Return a hash value for a formal temporary table entry. */
125
126static hashval_t
127gimple_tree_hash (const void *p)
128{
aa4a53af 129 tree t = ((const elt_t *) p)->val;
6de9cd9a
DN
130 return iterative_hash_expr (t, 0);
131}
132
133/* Compare two formal temporary table entries. */
134
135static int
136gimple_tree_eq (const void *p1, const void *p2)
137{
aa4a53af
RK
138 tree t1 = ((const elt_t *) p1)->val;
139 tree t2 = ((const elt_t *) p2)->val;
6de9cd9a
DN
140 enum tree_code code = TREE_CODE (t1);
141
142 if (TREE_CODE (t2) != code
143 || TREE_TYPE (t1) != TREE_TYPE (t2))
144 return 0;
145
146 if (!operand_equal_p (t1, t2, 0))
147 return 0;
148
149 /* Only allow them to compare equal if they also hash equal; otherwise
150 results are nondeterminate, and we fail bootstrap comparison. */
282899df 151 gcc_assert (gimple_tree_hash (p1) == gimple_tree_hash (p2));
6de9cd9a
DN
152
153 return 1;
154}
155
726a989a
RB
156/* Link gimple statement GS to the end of the sequence *SEQ_P. If
157 *SEQ_P is NULL, a new sequence is allocated. This function is
158 similar to gimple_seq_add_stmt, but does not scan the operands.
159 During gimplification, we need to manipulate statement sequences
160 before the def/use vectors have been constructed. */
161
786f715d 162void
726a989a
RB
163gimplify_seq_add_stmt (gimple_seq *seq_p, gimple gs)
164{
165 gimple_stmt_iterator si;
166
167 if (gs == NULL)
168 return;
169
170 if (*seq_p == NULL)
171 *seq_p = gimple_seq_alloc ();
172
173 si = gsi_last (*seq_p);
174
175 gsi_insert_after_without_update (&si, gs, GSI_NEW_STMT);
176}
177
178/* Append sequence SRC to the end of sequence *DST_P. If *DST_P is
179 NULL, a new sequence is allocated. This function is
180 similar to gimple_seq_add_seq, but does not scan the operands.
181 During gimplification, we need to manipulate statement sequences
182 before the def/use vectors have been constructed. */
183
184static void
185gimplify_seq_add_seq (gimple_seq *dst_p, gimple_seq src)
186{
187 gimple_stmt_iterator si;
188
189 if (src == NULL)
190 return;
191
192 if (*dst_p == NULL)
193 *dst_p = gimple_seq_alloc ();
194
195 si = gsi_last (*dst_p);
196 gsi_insert_seq_after_without_update (&si, src, GSI_NEW_STMT);
197}
198
6de9cd9a
DN
199/* Set up a context for the gimplifier. */
200
201void
d406b663 202push_gimplify_context (struct gimplify_ctx *c)
6de9cd9a 203{
d406b663 204 memset (c, '\0', sizeof (*c));
953ff289 205 c->prev_context = gimplify_ctxp;
953ff289 206 gimplify_ctxp = c;
6de9cd9a
DN
207}
208
209/* Tear down a context for the gimplifier. If BODY is non-null, then
210 put the temporaries into the outer BIND_EXPR. Otherwise, put them
726a989a
RB
211 in the local_decls.
212
213 BODY is not a sequence, but the first tuple in a sequence. */
6de9cd9a
DN
214
215void
726a989a 216pop_gimplify_context (gimple body)
6de9cd9a 217{
953ff289 218 struct gimplify_ctx *c = gimplify_ctxp;
17ad5b5e 219
726a989a
RB
220 gcc_assert (c && (c->bind_expr_stack == NULL
221 || VEC_empty (gimple, c->bind_expr_stack)));
a856e96f 222 VEC_free (gimple, heap, c->bind_expr_stack);
953ff289 223 gimplify_ctxp = c->prev_context;
6de9cd9a
DN
224
225 if (body)
5123ad09 226 declare_vars (c->temps, body, false);
6de9cd9a 227 else
953ff289 228 record_vars (c->temps);
6de9cd9a 229
d406b663 230 if (c->temp_htab)
953ff289 231 htab_delete (c->temp_htab);
6de9cd9a
DN
232}
233
c24b7de9 234static void
726a989a 235gimple_push_bind_expr (gimple gimple_bind)
6de9cd9a 236{
726a989a
RB
237 if (gimplify_ctxp->bind_expr_stack == NULL)
238 gimplify_ctxp->bind_expr_stack = VEC_alloc (gimple, heap, 8);
239 VEC_safe_push (gimple, heap, gimplify_ctxp->bind_expr_stack, gimple_bind);
6de9cd9a
DN
240}
241
c24b7de9 242static void
6de9cd9a
DN
243gimple_pop_bind_expr (void)
244{
726a989a 245 VEC_pop (gimple, gimplify_ctxp->bind_expr_stack);
6de9cd9a
DN
246}
247
726a989a 248gimple
6de9cd9a
DN
249gimple_current_bind_expr (void)
250{
726a989a
RB
251 return VEC_last (gimple, gimplify_ctxp->bind_expr_stack);
252}
253
254/* Return the stack GIMPLE_BINDs created during gimplification. */
255
256VEC(gimple, heap) *
257gimple_bind_expr_stack (void)
258{
259 return gimplify_ctxp->bind_expr_stack;
6de9cd9a
DN
260}
261
262/* Returns true iff there is a COND_EXPR between us and the innermost
263 CLEANUP_POINT_EXPR. This info is used by gimple_push_cleanup. */
264
265static bool
266gimple_conditional_context (void)
267{
268 return gimplify_ctxp->conditions > 0;
269}
270
271/* Note that we've entered a COND_EXPR. */
272
273static void
274gimple_push_condition (void)
275{
726a989a 276#ifdef ENABLE_GIMPLE_CHECKING
d775bc45 277 if (gimplify_ctxp->conditions == 0)
726a989a 278 gcc_assert (gimple_seq_empty_p (gimplify_ctxp->conditional_cleanups));
d775bc45 279#endif
6de9cd9a
DN
280 ++(gimplify_ctxp->conditions);
281}
282
283/* Note that we've left a COND_EXPR. If we're back at unconditional scope
284 now, add any conditional cleanups we've seen to the prequeue. */
285
286static void
726a989a 287gimple_pop_condition (gimple_seq *pre_p)
6de9cd9a
DN
288{
289 int conds = --(gimplify_ctxp->conditions);
aa4a53af 290
282899df 291 gcc_assert (conds >= 0);
6de9cd9a
DN
292 if (conds == 0)
293 {
726a989a
RB
294 gimplify_seq_add_seq (pre_p, gimplify_ctxp->conditional_cleanups);
295 gimplify_ctxp->conditional_cleanups = NULL;
6de9cd9a 296 }
6de9cd9a
DN
297}
298
953ff289
DN
299/* A stable comparison routine for use with splay trees and DECLs. */
300
301static int
302splay_tree_compare_decl_uid (splay_tree_key xa, splay_tree_key xb)
303{
304 tree a = (tree) xa;
305 tree b = (tree) xb;
306
307 return DECL_UID (a) - DECL_UID (b);
308}
309
310/* Create a new omp construct that deals with variable remapping. */
311
312static struct gimplify_omp_ctx *
a68ab351 313new_omp_context (enum omp_region_type region_type)
953ff289
DN
314{
315 struct gimplify_omp_ctx *c;
316
317 c = XCNEW (struct gimplify_omp_ctx);
318 c->outer_context = gimplify_omp_ctxp;
319 c->variables = splay_tree_new (splay_tree_compare_decl_uid, 0, 0);
320 c->privatized_types = pointer_set_create ();
321 c->location = input_location;
a68ab351 322 c->region_type = region_type;
f22f4340 323 if ((region_type & ORT_TASK) == 0)
a68ab351
JJ
324 c->default_kind = OMP_CLAUSE_DEFAULT_SHARED;
325 else
326 c->default_kind = OMP_CLAUSE_DEFAULT_UNSPECIFIED;
953ff289
DN
327
328 return c;
329}
330
331/* Destroy an omp construct that deals with variable remapping. */
332
333static void
334delete_omp_context (struct gimplify_omp_ctx *c)
335{
336 splay_tree_delete (c->variables);
337 pointer_set_destroy (c->privatized_types);
338 XDELETE (c);
339}
340
341static void omp_add_variable (struct gimplify_omp_ctx *, tree, unsigned int);
342static bool omp_notice_variable (struct gimplify_omp_ctx *, tree, bool);
343
726a989a
RB
344/* Both gimplify the statement T and append it to *SEQ_P. This function
345 behaves exactly as gimplify_stmt, but you don't have to pass T as a
346 reference. */
cd3ce9b4
JM
347
348void
726a989a
RB
349gimplify_and_add (tree t, gimple_seq *seq_p)
350{
351 gimplify_stmt (&t, seq_p);
352}
353
354/* Gimplify statement T into sequence *SEQ_P, and return the first
355 tuple in the sequence of generated tuples for this statement.
356 Return NULL if gimplifying T produced no tuples. */
357
358static gimple
359gimplify_and_return_first (tree t, gimple_seq *seq_p)
cd3ce9b4 360{
726a989a
RB
361 gimple_stmt_iterator last = gsi_last (*seq_p);
362
363 gimplify_and_add (t, seq_p);
364
365 if (!gsi_end_p (last))
366 {
367 gsi_next (&last);
368 return gsi_stmt (last);
369 }
370 else
371 return gimple_seq_first_stmt (*seq_p);
cd3ce9b4
JM
372}
373
6de9cd9a
DN
374/* Strip off a legitimate source ending from the input string NAME of
375 length LEN. Rather than having to know the names used by all of
376 our front ends, we strip off an ending of a period followed by
377 up to five characters. (Java uses ".class".) */
378
379static inline void
380remove_suffix (char *name, int len)
381{
382 int i;
383
384 for (i = 2; i < 8 && len > i; i++)
385 {
386 if (name[len - i] == '.')
387 {
388 name[len - i] = '\0';
389 break;
390 }
391 }
392}
393
1ea7e6ad 394/* Create a new temporary name with PREFIX. Returns an identifier. */
6de9cd9a
DN
395
396static GTY(()) unsigned int tmp_var_id_num;
397
7e140280 398tree
6de9cd9a
DN
399create_tmp_var_name (const char *prefix)
400{
401 char *tmp_name;
402
403 if (prefix)
404 {
405 char *preftmp = ASTRDUP (prefix);
aa4a53af 406
6de9cd9a
DN
407 remove_suffix (preftmp, strlen (preftmp));
408 prefix = preftmp;
409 }
410
411 ASM_FORMAT_PRIVATE_NAME (tmp_name, prefix ? prefix : "T", tmp_var_id_num++);
412 return get_identifier (tmp_name);
413}
414
415
416/* Create a new temporary variable declaration of type TYPE.
417 Does NOT push it into the current binding. */
418
419tree
420create_tmp_var_raw (tree type, const char *prefix)
421{
422 tree tmp_var;
423 tree new_type;
424
425 /* Make the type of the variable writable. */
426 new_type = build_type_variant (type, 0, 0);
427 TYPE_ATTRIBUTES (new_type) = TYPE_ATTRIBUTES (type);
428
c2255bc4
AH
429 tmp_var = build_decl (input_location,
430 VAR_DECL, prefix ? create_tmp_var_name (prefix) : NULL,
769da818 431 type);
6de9cd9a
DN
432
433 /* The variable was declared by the compiler. */
434 DECL_ARTIFICIAL (tmp_var) = 1;
435 /* And we don't want debug info for it. */
436 DECL_IGNORED_P (tmp_var) = 1;
437
438 /* Make the variable writable. */
439 TREE_READONLY (tmp_var) = 0;
440
441 DECL_EXTERNAL (tmp_var) = 0;
442 TREE_STATIC (tmp_var) = 0;
443 TREE_USED (tmp_var) = 1;
444
445 return tmp_var;
446}
447
448/* Create a new temporary variable declaration of type TYPE. DOES push the
449 variable into the current binding. Further, assume that this is called
450 only from gimplification or optimization, at which point the creation of
451 certain types are bugs. */
452
453tree
454create_tmp_var (tree type, const char *prefix)
455{
456 tree tmp_var;
457
44de5aeb 458 /* We don't allow types that are addressable (meaning we can't make copies),
a441447f
OH
459 or incomplete. We also used to reject every variable size objects here,
460 but now support those for which a constant upper bound can be obtained.
461 The processing for variable sizes is performed in gimple_add_tmp_var,
462 point at which it really matters and possibly reached via paths not going
463 through this function, e.g. after direct calls to create_tmp_var_raw. */
464 gcc_assert (!TREE_ADDRESSABLE (type) && COMPLETE_TYPE_P (type));
6de9cd9a
DN
465
466 tmp_var = create_tmp_var_raw (type, prefix);
467 gimple_add_tmp_var (tmp_var);
468 return tmp_var;
469}
470
acd63801
MJ
471/* Create a new temporary variable declaration of type TYPE by calling
472 create_tmp_var and if TYPE is a vector or a complex number, mark the new
473 temporary as gimple register. */
474
475tree
476create_tmp_reg (tree type, const char *prefix)
477{
478 tree tmp;
479
480 tmp = create_tmp_var (type, prefix);
481 if (TREE_CODE (type) == COMPLEX_TYPE
482 || TREE_CODE (type) == VECTOR_TYPE)
483 DECL_GIMPLE_REG_P (tmp) = 1;
484
485 return tmp;
486}
487
6de9cd9a
DN
488/* Create a temporary with a name derived from VAL. Subroutine of
489 lookup_tmp_var; nobody else should call this function. */
490
491static inline tree
492create_tmp_from_val (tree val)
493{
f7c0ffb4 494 return create_tmp_var (TREE_TYPE (val), get_name (val));
6de9cd9a
DN
495}
496
497/* Create a temporary to hold the value of VAL. If IS_FORMAL, try to reuse
498 an existing expression temporary. */
499
500static tree
501lookup_tmp_var (tree val, bool is_formal)
502{
17ad5b5e
RH
503 tree ret;
504
bbbb79d4
GK
505 /* If not optimizing, never really reuse a temporary. local-alloc
506 won't allocate any variable that is used in more than one basic
507 block, which means it will go into memory, causing much extra
508 work in reload and final and poorer code generation, outweighing
509 the extra memory allocation here. */
510 if (!optimize || !is_formal || TREE_SIDE_EFFECTS (val))
17ad5b5e 511 ret = create_tmp_from_val (val);
6de9cd9a
DN
512 else
513 {
514 elt_t elt, *elt_p;
515 void **slot;
516
517 elt.val = val;
d406b663
JJ
518 if (gimplify_ctxp->temp_htab == NULL)
519 gimplify_ctxp->temp_htab
520 = htab_create (1000, gimple_tree_hash, gimple_tree_eq, free);
6de9cd9a
DN
521 slot = htab_find_slot (gimplify_ctxp->temp_htab, (void *)&elt, INSERT);
522 if (*slot == NULL)
523 {
858904db 524 elt_p = XNEW (elt_t);
6de9cd9a 525 elt_p->val = val;
17ad5b5e 526 elt_p->temp = ret = create_tmp_from_val (val);
af72267c 527 *slot = (void *) elt_p;
6de9cd9a
DN
528 }
529 else
17ad5b5e
RH
530 {
531 elt_p = (elt_t *) *slot;
532 ret = elt_p->temp;
533 }
6de9cd9a 534 }
17ad5b5e 535
17ad5b5e 536 return ret;
6de9cd9a
DN
537}
538
726a989a
RB
539
540/* Return true if T is a CALL_EXPR or an expression that can be
12947319 541 assigned to a temporary. Note that this predicate should only be
726a989a
RB
542 used during gimplification. See the rationale for this in
543 gimplify_modify_expr. */
544
545static bool
ba4d8f9d 546is_gimple_reg_rhs_or_call (tree t)
726a989a 547{
ba4d8f9d
RG
548 return (get_gimple_rhs_class (TREE_CODE (t)) != GIMPLE_INVALID_RHS
549 || TREE_CODE (t) == CALL_EXPR);
726a989a
RB
550}
551
552/* Return true if T is a valid memory RHS or a CALL_EXPR. Note that
553 this predicate should only be used during gimplification. See the
554 rationale for this in gimplify_modify_expr. */
555
556static bool
ba4d8f9d 557is_gimple_mem_rhs_or_call (tree t)
726a989a
RB
558{
559 /* If we're dealing with a renamable type, either source or dest must be
050bbfeb
RG
560 a renamed variable. */
561 if (is_gimple_reg_type (TREE_TYPE (t)))
726a989a
RB
562 return is_gimple_val (t);
563 else
ba4d8f9d
RG
564 return (is_gimple_val (t) || is_gimple_lvalue (t)
565 || TREE_CODE (t) == CALL_EXPR);
726a989a
RB
566}
567
ba4d8f9d 568/* Helper for get_formal_tmp_var and get_initialized_tmp_var. */
6de9cd9a
DN
569
570static tree
726a989a
RB
571internal_get_tmp_var (tree val, gimple_seq *pre_p, gimple_seq *post_p,
572 bool is_formal)
6de9cd9a
DN
573{
574 tree t, mod;
6de9cd9a 575
726a989a
RB
576 /* Notice that we explicitly allow VAL to be a CALL_EXPR so that we
577 can create an INIT_EXPR and convert it into a GIMPLE_CALL below. */
ba4d8f9d 578 gimplify_expr (&val, pre_p, post_p, is_gimple_reg_rhs_or_call,
726a989a 579 fb_rvalue);
6de9cd9a
DN
580
581 t = lookup_tmp_var (val, is_formal);
582
5b21f0f3
RG
583 if (is_formal
584 && (TREE_CODE (TREE_TYPE (t)) == COMPLEX_TYPE
585 || TREE_CODE (TREE_TYPE (t)) == VECTOR_TYPE))
586 DECL_GIMPLE_REG_P (t) = 1;
e41d82f5 587
2e929cf3 588 mod = build2 (INIT_EXPR, TREE_TYPE (t), t, unshare_expr (val));
6de9cd9a 589
a281759f 590 if (EXPR_HAS_LOCATION (val))
5e278028 591 SET_EXPR_LOCATION (mod, EXPR_LOCATION (val));
6de9cd9a 592 else
a281759f 593 SET_EXPR_LOCATION (mod, input_location);
6de9cd9a 594
fff34d35
RK
595 /* gimplify_modify_expr might want to reduce this further. */
596 gimplify_and_add (mod, pre_p);
726a989a 597 ggc_free (mod);
8b11a64c
ZD
598
599 /* If we're gimplifying into ssa, gimplify_modify_expr will have
726a989a 600 given our temporary an SSA name. Find and return it. */
8b11a64c 601 if (gimplify_ctxp->into_ssa)
726a989a
RB
602 {
603 gimple last = gimple_seq_last_stmt (*pre_p);
604 t = gimple_get_lhs (last);
605 }
8b11a64c 606
6de9cd9a
DN
607 return t;
608}
609
ba4d8f9d
RG
610/* Returns a formal temporary variable initialized with VAL. PRE_P is as
611 in gimplify_expr. Only use this function if:
612
613 1) The value of the unfactored expression represented by VAL will not
614 change between the initialization and use of the temporary, and
615 2) The temporary will not be otherwise modified.
616
617 For instance, #1 means that this is inappropriate for SAVE_EXPR temps,
618 and #2 means it is inappropriate for && temps.
619
620 For other cases, use get_initialized_tmp_var instead. */
50674e96 621
6de9cd9a 622tree
726a989a 623get_formal_tmp_var (tree val, gimple_seq *pre_p)
6de9cd9a
DN
624{
625 return internal_get_tmp_var (val, pre_p, NULL, true);
626}
627
628/* Returns a temporary variable initialized with VAL. PRE_P and POST_P
629 are as in gimplify_expr. */
630
631tree
726a989a 632get_initialized_tmp_var (tree val, gimple_seq *pre_p, gimple_seq *post_p)
6de9cd9a
DN
633{
634 return internal_get_tmp_var (val, pre_p, post_p, false);
635}
636
5123ad09
EB
637/* Declares all the variables in VARS in SCOPE. If DEBUG_INFO is
638 true, generate debug info for them; otherwise don't. */
6de9cd9a
DN
639
640void
726a989a 641declare_vars (tree vars, gimple scope, bool debug_info)
6de9cd9a
DN
642{
643 tree last = vars;
644 if (last)
645 {
5123ad09 646 tree temps, block;
6de9cd9a 647
726a989a 648 gcc_assert (gimple_code (scope) == GIMPLE_BIND);
6de9cd9a
DN
649
650 temps = nreverse (last);
5123ad09 651
524d9a45 652 block = gimple_bind_block (scope);
726a989a 653 gcc_assert (!block || TREE_CODE (block) == BLOCK);
5123ad09
EB
654 if (!block || !debug_info)
655 {
910ad8de 656 DECL_CHAIN (last) = gimple_bind_vars (scope);
726a989a 657 gimple_bind_set_vars (scope, temps);
5123ad09
EB
658 }
659 else
660 {
661 /* We need to attach the nodes both to the BIND_EXPR and to its
662 associated BLOCK for debugging purposes. The key point here
663 is that the BLOCK_VARS of the BIND_EXPR_BLOCK of a BIND_EXPR
664 is a subchain of the BIND_EXPR_VARS of the BIND_EXPR. */
665 if (BLOCK_VARS (block))
666 BLOCK_VARS (block) = chainon (BLOCK_VARS (block), temps);
667 else
668 {
726a989a
RB
669 gimple_bind_set_vars (scope,
670 chainon (gimple_bind_vars (scope), temps));
5123ad09
EB
671 BLOCK_VARS (block) = temps;
672 }
673 }
6de9cd9a
DN
674 }
675}
676
a441447f
OH
677/* For VAR a VAR_DECL of variable size, try to find a constant upper bound
678 for the size and adjust DECL_SIZE/DECL_SIZE_UNIT accordingly. Abort if
679 no such upper bound can be obtained. */
680
681static void
682force_constant_size (tree var)
683{
684 /* The only attempt we make is by querying the maximum size of objects
685 of the variable's type. */
686
687 HOST_WIDE_INT max_size;
688
689 gcc_assert (TREE_CODE (var) == VAR_DECL);
690
691 max_size = max_int_size_in_bytes (TREE_TYPE (var));
692
693 gcc_assert (max_size >= 0);
694
695 DECL_SIZE_UNIT (var)
696 = build_int_cst (TREE_TYPE (DECL_SIZE_UNIT (var)), max_size);
697 DECL_SIZE (var)
698 = build_int_cst (TREE_TYPE (DECL_SIZE (var)), max_size * BITS_PER_UNIT);
699}
700
6de9cd9a
DN
701void
702gimple_add_tmp_var (tree tmp)
703{
910ad8de 704 gcc_assert (!DECL_CHAIN (tmp) && !DECL_SEEN_IN_BIND_EXPR_P (tmp));
6de9cd9a 705
a441447f
OH
706 /* Later processing assumes that the object size is constant, which might
707 not be true at this point. Force the use of a constant upper bound in
708 this case. */
709 if (!host_integerp (DECL_SIZE_UNIT (tmp), 1))
710 force_constant_size (tmp);
711
6de9cd9a 712 DECL_CONTEXT (tmp) = current_function_decl;
48eb4e53 713 DECL_SEEN_IN_BIND_EXPR_P (tmp) = 1;
6de9cd9a
DN
714
715 if (gimplify_ctxp)
716 {
910ad8de 717 DECL_CHAIN (tmp) = gimplify_ctxp->temps;
6de9cd9a 718 gimplify_ctxp->temps = tmp;
953ff289
DN
719
720 /* Mark temporaries local within the nearest enclosing parallel. */
721 if (gimplify_omp_ctxp)
722 {
723 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
a68ab351 724 while (ctx && ctx->region_type == ORT_WORKSHARE)
953ff289
DN
725 ctx = ctx->outer_context;
726 if (ctx)
727 omp_add_variable (ctx, tmp, GOVD_LOCAL | GOVD_SEEN);
728 }
6de9cd9a
DN
729 }
730 else if (cfun)
731 record_vars (tmp);
732 else
726a989a
RB
733 {
734 gimple_seq body_seq;
735
736 /* This case is for nested functions. We need to expose the locals
737 they create. */
738 body_seq = gimple_body (current_function_decl);
739 declare_vars (tmp, gimple_seq_first_stmt (body_seq), false);
740 }
741}
742
743/* Determines whether to assign a location to the statement GS. */
744
745static bool
746should_carry_location_p (gimple gs)
747{
748 /* Don't emit a line note for a label. We particularly don't want to
749 emit one for the break label, since it doesn't actually correspond
750 to the beginning of the loop/switch. */
751 if (gimple_code (gs) == GIMPLE_LABEL)
752 return false;
753
754 return true;
6de9cd9a
DN
755}
756
6de9cd9a 757
726a989a
RB
758/* Return true if a location should not be emitted for this statement
759 by annotate_one_with_location. */
760
761static inline bool
762gimple_do_not_emit_location_p (gimple g)
763{
764 return gimple_plf (g, GF_PLF_1);
765}
766
767/* Mark statement G so a location will not be emitted by
768 annotate_one_with_location. */
769
770static inline void
771gimple_set_do_not_emit_location (gimple g)
772{
773 /* The PLF flags are initialized to 0 when a new tuple is created,
774 so no need to initialize it anywhere. */
775 gimple_set_plf (g, GF_PLF_1, true);
776}
777
5e278028 778/* Set the location for gimple statement GS to LOCATION. */
726a989a
RB
779
780static void
781annotate_one_with_location (gimple gs, location_t location)
782{
b8698a0f 783 if (!gimple_has_location (gs)
726a989a
RB
784 && !gimple_do_not_emit_location_p (gs)
785 && should_carry_location_p (gs))
786 gimple_set_location (gs, location);
787}
788
726a989a
RB
789
790/* Set LOCATION for all the statements after iterator GSI in sequence
791 SEQ. If GSI is pointing to the end of the sequence, start with the
792 first statement in SEQ. */
793
794static void
795annotate_all_with_location_after (gimple_seq seq, gimple_stmt_iterator gsi,
796 location_t location)
797{
798 if (gsi_end_p (gsi))
799 gsi = gsi_start (seq);
800 else
801 gsi_next (&gsi);
802
803 for (; !gsi_end_p (gsi); gsi_next (&gsi))
804 annotate_one_with_location (gsi_stmt (gsi), location);
805}
806
807
5e278028 808/* Set the location for all the statements in a sequence STMT_P to LOCATION. */
726a989a
RB
809
810void
811annotate_all_with_location (gimple_seq stmt_p, location_t location)
812{
813 gimple_stmt_iterator i;
814
815 if (gimple_seq_empty_p (stmt_p))
816 return;
817
818 for (i = gsi_start (stmt_p); !gsi_end_p (i); gsi_next (&i))
819 {
820 gimple gs = gsi_stmt (i);
821 annotate_one_with_location (gs, location);
822 }
7c34ced1 823}
616f1431
EB
824\f
825/* This page contains routines to unshare tree nodes, i.e. to duplicate tree
826 nodes that are referenced more than once in GENERIC functions. This is
827 necessary because gimplification (translation into GIMPLE) is performed
828 by modifying tree nodes in-place, so gimplication of a shared node in a
829 first context could generate an invalid GIMPLE form in a second context.
830
831 This is achieved with a simple mark/copy/unmark algorithm that walks the
832 GENERIC representation top-down, marks nodes with TREE_VISITED the first
833 time it encounters them, duplicates them if they already have TREE_VISITED
834 set, and finally removes the TREE_VISITED marks it has set.
835
836 The algorithm works only at the function level, i.e. it generates a GENERIC
837 representation of a function with no nodes shared within the function when
838 passed a GENERIC function (except for nodes that are allowed to be shared).
839
840 At the global level, it is also necessary to unshare tree nodes that are
841 referenced in more than one function, for the same aforementioned reason.
842 This requires some cooperation from the front-end. There are 2 strategies:
843
844 1. Manual unsharing. The front-end needs to call unshare_expr on every
845 expression that might end up being shared across functions.
846
847 2. Deep unsharing. This is an extension of regular unsharing. Instead
848 of calling unshare_expr on expressions that might be shared across
849 functions, the front-end pre-marks them with TREE_VISITED. This will
850 ensure that they are unshared on the first reference within functions
851 when the regular unsharing algorithm runs. The counterpart is that
852 this algorithm must look deeper than for manual unsharing, which is
853 specified by LANG_HOOKS_DEEP_UNSHARING.
854
855 If there are only few specific cases of node sharing across functions, it is
856 probably easier for a front-end to unshare the expressions manually. On the
857 contrary, if the expressions generated at the global level are as widespread
858 as expressions generated within functions, deep unsharing is very likely the
859 way to go. */
860
861/* Similar to copy_tree_r but do not copy SAVE_EXPR or TARGET_EXPR nodes.
6de9cd9a
DN
862 These nodes model computations that should only be done once. If we
863 were to unshare something like SAVE_EXPR(i++), the gimplification
864 process would create wrong code. */
865
866static tree
867mostly_copy_tree_r (tree *tp, int *walk_subtrees, void *data)
868{
616f1431
EB
869 tree t = *tp;
870 enum tree_code code = TREE_CODE (t);
871
6687b740
EB
872 /* Do not copy SAVE_EXPR, TARGET_EXPR or BIND_EXPR nodes themselves, but
873 copy their subtrees if we can make sure to do it only once. */
874 if (code == SAVE_EXPR || code == TARGET_EXPR || code == BIND_EXPR)
616f1431
EB
875 {
876 if (data && !pointer_set_insert ((struct pointer_set_t *)data, t))
877 ;
878 else
879 *walk_subtrees = 0;
880 }
881
882 /* Stop at types, decls, constants like copy_tree_r. */
883 else if (TREE_CODE_CLASS (code) == tcc_type
884 || TREE_CODE_CLASS (code) == tcc_declaration
885 || TREE_CODE_CLASS (code) == tcc_constant
886 /* We can't do anything sensible with a BLOCK used as an
887 expression, but we also can't just die when we see it
888 because of non-expression uses. So we avert our eyes
889 and cross our fingers. Silly Java. */
890 || code == BLOCK)
6de9cd9a 891 *walk_subtrees = 0;
616f1431
EB
892
893 /* Cope with the statement expression extension. */
894 else if (code == STATEMENT_LIST)
895 ;
896
897 /* Leave the bulk of the work to copy_tree_r itself. */
6de9cd9a 898 else
6687b740 899 copy_tree_r (tp, walk_subtrees, NULL);
6de9cd9a
DN
900
901 return NULL_TREE;
902}
903
6de9cd9a
DN
904/* Callback for walk_tree to unshare most of the shared trees rooted at
905 *TP. If *TP has been visited already (i.e., TREE_VISITED (*TP) == 1),
616f1431 906 then *TP is deep copied by calling mostly_copy_tree_r. */
6de9cd9a
DN
907
908static tree
616f1431 909copy_if_shared_r (tree *tp, int *walk_subtrees, void *data)
6de9cd9a 910{
f0638e1d
RH
911 tree t = *tp;
912 enum tree_code code = TREE_CODE (t);
913
44de5aeb
RK
914 /* Skip types, decls, and constants. But we do want to look at their
915 types and the bounds of types. Mark them as visited so we properly
916 unmark their subtrees on the unmark pass. If we've already seen them,
917 don't look down further. */
6615c446
JO
918 if (TREE_CODE_CLASS (code) == tcc_type
919 || TREE_CODE_CLASS (code) == tcc_declaration
920 || TREE_CODE_CLASS (code) == tcc_constant)
44de5aeb
RK
921 {
922 if (TREE_VISITED (t))
923 *walk_subtrees = 0;
924 else
925 TREE_VISITED (t) = 1;
926 }
f0638e1d 927
6de9cd9a
DN
928 /* If this node has been visited already, unshare it and don't look
929 any deeper. */
f0638e1d 930 else if (TREE_VISITED (t))
6de9cd9a 931 {
616f1431 932 walk_tree (tp, mostly_copy_tree_r, data, NULL);
6de9cd9a
DN
933 *walk_subtrees = 0;
934 }
f0638e1d 935
616f1431 936 /* Otherwise, mark the node as visited and keep looking. */
6de9cd9a 937 else
77c9db77 938 TREE_VISITED (t) = 1;
f0638e1d 939
6de9cd9a
DN
940 return NULL_TREE;
941}
942
616f1431 943/* Unshare most of the shared trees rooted at *TP. */
6de9cd9a 944
616f1431
EB
945static inline void
946copy_if_shared (tree *tp)
947{
948 /* If the language requires deep unsharing, we need a pointer set to make
949 sure we don't repeatedly unshare subtrees of unshareable nodes. */
950 struct pointer_set_t *visited
951 = lang_hooks.deep_unsharing ? pointer_set_create () : NULL;
952 walk_tree (tp, copy_if_shared_r, visited, NULL);
953 if (visited)
954 pointer_set_destroy (visited);
6de9cd9a
DN
955}
956
48eb4e53
RK
957/* Unshare all the trees in BODY_P, a pointer into the body of FNDECL, and the
958 bodies of any nested functions if we are unsharing the entire body of
959 FNDECL. */
44de5aeb
RK
960
961static void
962unshare_body (tree *body_p, tree fndecl)
963{
964 struct cgraph_node *cgn = cgraph_node (fndecl);
965
616f1431
EB
966 copy_if_shared (body_p);
967
48eb4e53
RK
968 if (body_p == &DECL_SAVED_TREE (fndecl))
969 for (cgn = cgn->nested; cgn; cgn = cgn->next_nested)
970 unshare_body (&DECL_SAVED_TREE (cgn->decl), cgn->decl);
44de5aeb
RK
971}
972
616f1431
EB
973/* Callback for walk_tree to unmark the visited trees rooted at *TP.
974 Subtrees are walked until the first unvisited node is encountered. */
975
976static tree
977unmark_visited_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
978{
979 tree t = *tp;
980
981 /* If this node has been visited, unmark it and keep looking. */
982 if (TREE_VISITED (t))
983 TREE_VISITED (t) = 0;
984
985 /* Otherwise, don't look any deeper. */
986 else
987 *walk_subtrees = 0;
988
989 return NULL_TREE;
990}
991
992/* Unmark the visited trees rooted at *TP. */
993
994static inline void
995unmark_visited (tree *tp)
996{
997 walk_tree (tp, unmark_visited_r, NULL, NULL);
998}
999
44de5aeb
RK
1000/* Likewise, but mark all trees as not visited. */
1001
1002static void
1003unvisit_body (tree *body_p, tree fndecl)
1004{
1005 struct cgraph_node *cgn = cgraph_node (fndecl);
1006
616f1431
EB
1007 unmark_visited (body_p);
1008
48eb4e53
RK
1009 if (body_p == &DECL_SAVED_TREE (fndecl))
1010 for (cgn = cgn->nested; cgn; cgn = cgn->next_nested)
1011 unvisit_body (&DECL_SAVED_TREE (cgn->decl), cgn->decl);
44de5aeb
RK
1012}
1013
6de9cd9a
DN
1014/* Unconditionally make an unshared copy of EXPR. This is used when using
1015 stored expressions which span multiple functions, such as BINFO_VTABLE,
1016 as the normal unsharing process can't tell that they're shared. */
1017
1018tree
1019unshare_expr (tree expr)
1020{
1021 walk_tree (&expr, mostly_copy_tree_r, NULL, NULL);
1022 return expr;
1023}
6de9cd9a
DN
1024\f
1025/* WRAPPER is a code such as BIND_EXPR or CLEANUP_POINT_EXPR which can both
1026 contain statements and have a value. Assign its value to a temporary
1027 and give it void_type_node. Returns the temporary, or NULL_TREE if
1028 WRAPPER was already void. */
1029
1030tree
325c3691 1031voidify_wrapper_expr (tree wrapper, tree temp)
6de9cd9a 1032{
4832214a
JM
1033 tree type = TREE_TYPE (wrapper);
1034 if (type && !VOID_TYPE_P (type))
6de9cd9a 1035 {
c6c7698d 1036 tree *p;
6de9cd9a 1037
c6c7698d
JM
1038 /* Set p to point to the body of the wrapper. Loop until we find
1039 something that isn't a wrapper. */
1040 for (p = &wrapper; p && *p; )
d3147f64 1041 {
c6c7698d 1042 switch (TREE_CODE (*p))
6de9cd9a 1043 {
c6c7698d
JM
1044 case BIND_EXPR:
1045 TREE_SIDE_EFFECTS (*p) = 1;
1046 TREE_TYPE (*p) = void_type_node;
1047 /* For a BIND_EXPR, the body is operand 1. */
1048 p = &BIND_EXPR_BODY (*p);
1049 break;
1050
1051 case CLEANUP_POINT_EXPR:
1052 case TRY_FINALLY_EXPR:
1053 case TRY_CATCH_EXPR:
6de9cd9a
DN
1054 TREE_SIDE_EFFECTS (*p) = 1;
1055 TREE_TYPE (*p) = void_type_node;
c6c7698d
JM
1056 p = &TREE_OPERAND (*p, 0);
1057 break;
1058
1059 case STATEMENT_LIST:
1060 {
1061 tree_stmt_iterator i = tsi_last (*p);
1062 TREE_SIDE_EFFECTS (*p) = 1;
1063 TREE_TYPE (*p) = void_type_node;
1064 p = tsi_end_p (i) ? NULL : tsi_stmt_ptr (i);
1065 }
1066 break;
1067
1068 case COMPOUND_EXPR:
1069 /* Advance to the last statement. Set all container types to void. */
1070 for (; TREE_CODE (*p) == COMPOUND_EXPR; p = &TREE_OPERAND (*p, 1))
1071 {
1072 TREE_SIDE_EFFECTS (*p) = 1;
1073 TREE_TYPE (*p) = void_type_node;
1074 }
1075 break;
1076
1077 default:
1078 goto out;
6de9cd9a
DN
1079 }
1080 }
1081
c6c7698d 1082 out:
325c3691 1083 if (p == NULL || IS_EMPTY_STMT (*p))
c6c7698d
JM
1084 temp = NULL_TREE;
1085 else if (temp)
6de9cd9a 1086 {
c6c7698d
JM
1087 /* The wrapper is on the RHS of an assignment that we're pushing
1088 down. */
1089 gcc_assert (TREE_CODE (temp) == INIT_EXPR
1090 || TREE_CODE (temp) == MODIFY_EXPR);
726a989a 1091 TREE_OPERAND (temp, 1) = *p;
c6c7698d 1092 *p = temp;
6de9cd9a
DN
1093 }
1094 else
1095 {
c6c7698d
JM
1096 temp = create_tmp_var (type, "retval");
1097 *p = build2 (INIT_EXPR, type, temp, *p);
6de9cd9a
DN
1098 }
1099
6de9cd9a
DN
1100 return temp;
1101 }
1102
1103 return NULL_TREE;
1104}
1105
1106/* Prepare calls to builtins to SAVE and RESTORE the stack as well as
1ea7e6ad 1107 a temporary through which they communicate. */
6de9cd9a
DN
1108
1109static void
726a989a 1110build_stack_save_restore (gimple *save, gimple *restore)
6de9cd9a 1111{
726a989a 1112 tree tmp_var;
6de9cd9a 1113
726a989a 1114 *save = gimple_build_call (implicit_built_in_decls[BUILT_IN_STACK_SAVE], 0);
6de9cd9a 1115 tmp_var = create_tmp_var (ptr_type_node, "saved_stack");
726a989a 1116 gimple_call_set_lhs (*save, tmp_var);
6de9cd9a 1117
726a989a
RB
1118 *restore = gimple_build_call (implicit_built_in_decls[BUILT_IN_STACK_RESTORE],
1119 1, tmp_var);
6de9cd9a
DN
1120}
1121
1122/* Gimplify a BIND_EXPR. Just voidify and recurse. */
1123
1124static enum gimplify_status
726a989a 1125gimplify_bind_expr (tree *expr_p, gimple_seq *pre_p)
6de9cd9a
DN
1126{
1127 tree bind_expr = *expr_p;
6de9cd9a
DN
1128 bool old_save_stack = gimplify_ctxp->save_stack;
1129 tree t;
726a989a
RB
1130 gimple gimple_bind;
1131 gimple_seq body;
6de9cd9a 1132
c6c7698d 1133 tree temp = voidify_wrapper_expr (bind_expr, NULL);
325c3691 1134
6de9cd9a 1135 /* Mark variables seen in this bind expr. */
910ad8de 1136 for (t = BIND_EXPR_VARS (bind_expr); t ; t = DECL_CHAIN (t))
e41d82f5 1137 {
820cc88f 1138 if (TREE_CODE (t) == VAR_DECL)
8cb86b65
JJ
1139 {
1140 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
1141
1142 /* Mark variable as local. */
1143 if (ctx && !is_global_var (t)
1144 && (! DECL_SEEN_IN_BIND_EXPR_P (t)
1145 || splay_tree_lookup (ctx->variables,
1146 (splay_tree_key) t) == NULL))
1147 omp_add_variable (gimplify_omp_ctxp, t, GOVD_LOCAL | GOVD_SEEN);
1148
1149 DECL_SEEN_IN_BIND_EXPR_P (t) = 1;
fc3103e7
JJ
1150
1151 if (DECL_HARD_REGISTER (t) && !is_global_var (t) && cfun)
1152 cfun->has_local_explicit_reg_vars = true;
8cb86b65 1153 }
e41d82f5
RH
1154
1155 /* Preliminarily mark non-addressed complex variables as eligible
1156 for promotion to gimple registers. We'll transform their uses
bd2e63a1
RG
1157 as we find them. */
1158 if ((TREE_CODE (TREE_TYPE (t)) == COMPLEX_TYPE
1159 || TREE_CODE (TREE_TYPE (t)) == VECTOR_TYPE)
e41d82f5
RH
1160 && !TREE_THIS_VOLATILE (t)
1161 && (TREE_CODE (t) == VAR_DECL && !DECL_HARD_REGISTER (t))
1162 && !needs_to_live_in_memory (t))
0890b981 1163 DECL_GIMPLE_REG_P (t) = 1;
e41d82f5 1164 }
6de9cd9a 1165
726a989a
RB
1166 gimple_bind = gimple_build_bind (BIND_EXPR_VARS (bind_expr), NULL,
1167 BIND_EXPR_BLOCK (bind_expr));
1168 gimple_push_bind_expr (gimple_bind);
1169
6de9cd9a
DN
1170 gimplify_ctxp->save_stack = false;
1171
726a989a
RB
1172 /* Gimplify the body into the GIMPLE_BIND tuple's body. */
1173 body = NULL;
1174 gimplify_stmt (&BIND_EXPR_BODY (bind_expr), &body);
1175 gimple_bind_set_body (gimple_bind, body);
6de9cd9a
DN
1176
1177 if (gimplify_ctxp->save_stack)
1178 {
726a989a
RB
1179 gimple stack_save, stack_restore, gs;
1180 gimple_seq cleanup, new_body;
6de9cd9a
DN
1181
1182 /* Save stack on entry and restore it on exit. Add a try_finally
1183 block to achieve this. Note that mudflap depends on the
1184 format of the emitted code: see mx_register_decls(). */
1185 build_stack_save_restore (&stack_save, &stack_restore);
1186
726a989a
RB
1187 cleanup = new_body = NULL;
1188 gimplify_seq_add_stmt (&cleanup, stack_restore);
1189 gs = gimple_build_try (gimple_bind_body (gimple_bind), cleanup,
1190 GIMPLE_TRY_FINALLY);
6de9cd9a 1191
726a989a
RB
1192 gimplify_seq_add_stmt (&new_body, stack_save);
1193 gimplify_seq_add_stmt (&new_body, gs);
1194 gimple_bind_set_body (gimple_bind, new_body);
6de9cd9a
DN
1195 }
1196
1197 gimplify_ctxp->save_stack = old_save_stack;
1198 gimple_pop_bind_expr ();
1199
726a989a
RB
1200 gimplify_seq_add_stmt (pre_p, gimple_bind);
1201
6de9cd9a
DN
1202 if (temp)
1203 {
1204 *expr_p = temp;
6de9cd9a
DN
1205 return GS_OK;
1206 }
726a989a
RB
1207
1208 *expr_p = NULL_TREE;
1209 return GS_ALL_DONE;
6de9cd9a
DN
1210}
1211
1212/* Gimplify a RETURN_EXPR. If the expression to be returned is not a
1213 GIMPLE value, it is assigned to a new temporary and the statement is
1214 re-written to return the temporary.
1215
726a989a 1216 PRE_P points to the sequence where side effects that must happen before
6de9cd9a
DN
1217 STMT should be stored. */
1218
1219static enum gimplify_status
726a989a 1220gimplify_return_expr (tree stmt, gimple_seq *pre_p)
6de9cd9a 1221{
726a989a 1222 gimple ret;
6de9cd9a 1223 tree ret_expr = TREE_OPERAND (stmt, 0);
71877985 1224 tree result_decl, result;
6de9cd9a 1225
726a989a
RB
1226 if (ret_expr == error_mark_node)
1227 return GS_ERROR;
1228
1229 if (!ret_expr
1230 || TREE_CODE (ret_expr) == RESULT_DECL
55e99d52 1231 || ret_expr == error_mark_node)
726a989a
RB
1232 {
1233 gimple ret = gimple_build_return (ret_expr);
1234 gimple_set_no_warning (ret, TREE_NO_WARNING (stmt));
1235 gimplify_seq_add_stmt (pre_p, ret);
1236 return GS_ALL_DONE;
1237 }
6de9cd9a 1238
6de9cd9a 1239 if (VOID_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl))))
71877985 1240 result_decl = NULL_TREE;
6de9cd9a
DN
1241 else
1242 {
726a989a
RB
1243 result_decl = TREE_OPERAND (ret_expr, 0);
1244
1245 /* See through a return by reference. */
cc77ae10 1246 if (TREE_CODE (result_decl) == INDIRECT_REF)
cc77ae10 1247 result_decl = TREE_OPERAND (result_decl, 0);
282899df
NS
1248
1249 gcc_assert ((TREE_CODE (ret_expr) == MODIFY_EXPR
1250 || TREE_CODE (ret_expr) == INIT_EXPR)
1251 && TREE_CODE (result_decl) == RESULT_DECL);
6de9cd9a
DN
1252 }
1253
71877985
RH
1254 /* If aggregate_value_p is true, then we can return the bare RESULT_DECL.
1255 Recall that aggregate_value_p is FALSE for any aggregate type that is
1256 returned in registers. If we're returning values in registers, then
1257 we don't want to extend the lifetime of the RESULT_DECL, particularly
d3147f64 1258 across another call. In addition, for those aggregates for which
535a42b1 1259 hard_function_value generates a PARALLEL, we'll die during normal
71877985
RH
1260 expansion of structure assignments; there's special code in expand_return
1261 to handle this case that does not exist in expand_expr. */
ca361dec
EB
1262 if (!result_decl)
1263 result = NULL_TREE;
1264 else if (aggregate_value_p (result_decl, TREE_TYPE (current_function_decl)))
1265 {
1266 if (TREE_CODE (DECL_SIZE (result_decl)) != INTEGER_CST)
1267 {
1268 if (!TYPE_SIZES_GIMPLIFIED (TREE_TYPE (result_decl)))
1269 gimplify_type_sizes (TREE_TYPE (result_decl), pre_p);
1270 /* Note that we don't use gimplify_vla_decl because the RESULT_DECL
1271 should be effectively allocated by the caller, i.e. all calls to
1272 this function must be subject to the Return Slot Optimization. */
1273 gimplify_one_sizepos (&DECL_SIZE (result_decl), pre_p);
1274 gimplify_one_sizepos (&DECL_SIZE_UNIT (result_decl), pre_p);
1275 }
1276 result = result_decl;
1277 }
71877985
RH
1278 else if (gimplify_ctxp->return_temp)
1279 result = gimplify_ctxp->return_temp;
1280 else
1281 {
acd63801 1282 result = create_tmp_reg (TREE_TYPE (result_decl), NULL);
ff98621c
RH
1283
1284 /* ??? With complex control flow (usually involving abnormal edges),
1285 we can wind up warning about an uninitialized value for this. Due
1286 to how this variable is constructed and initialized, this is never
1287 true. Give up and never warn. */
1288 TREE_NO_WARNING (result) = 1;
1289
71877985
RH
1290 gimplify_ctxp->return_temp = result;
1291 }
1292
726a989a 1293 /* Smash the lhs of the MODIFY_EXPR to the temporary we plan to use.
71877985
RH
1294 Then gimplify the whole thing. */
1295 if (result != result_decl)
726a989a 1296 TREE_OPERAND (ret_expr, 0) = result;
fff34d35
RK
1297
1298 gimplify_and_add (TREE_OPERAND (stmt, 0), pre_p);
6de9cd9a 1299
726a989a
RB
1300 ret = gimple_build_return (result);
1301 gimple_set_no_warning (ret, TREE_NO_WARNING (stmt));
1302 gimplify_seq_add_stmt (pre_p, ret);
6de9cd9a 1303
6de9cd9a
DN
1304 return GS_ALL_DONE;
1305}
1306
786025ea 1307static void
726a989a 1308gimplify_vla_decl (tree decl, gimple_seq *seq_p)
786025ea
JJ
1309{
1310 /* This is a variable-sized decl. Simplify its size and mark it
1311 for deferred expansion. Note that mudflap depends on the format
1312 of the emitted code: see mx_register_decls(). */
1313 tree t, addr, ptr_type;
1314
726a989a
RB
1315 gimplify_one_sizepos (&DECL_SIZE (decl), seq_p);
1316 gimplify_one_sizepos (&DECL_SIZE_UNIT (decl), seq_p);
786025ea
JJ
1317
1318 /* All occurrences of this decl in final gimplified code will be
1319 replaced by indirection. Setting DECL_VALUE_EXPR does two
1320 things: First, it lets the rest of the gimplifier know what
1321 replacement to use. Second, it lets the debug info know
1322 where to find the value. */
1323 ptr_type = build_pointer_type (TREE_TYPE (decl));
1324 addr = create_tmp_var (ptr_type, get_name (decl));
1325 DECL_IGNORED_P (addr) = 0;
1326 t = build_fold_indirect_ref (addr);
1327 SET_DECL_VALUE_EXPR (decl, t);
1328 DECL_HAS_VALUE_EXPR_P (decl) = 1;
1329
1330 t = built_in_decls[BUILT_IN_ALLOCA];
1331 t = build_call_expr (t, 1, DECL_SIZE_UNIT (decl));
d3c12306
EB
1332 /* The call has been built for a variable-sized object. */
1333 ALLOCA_FOR_VAR_P (t) = 1;
786025ea 1334 t = fold_convert (ptr_type, t);
726a989a 1335 t = build2 (MODIFY_EXPR, TREE_TYPE (addr), addr, t);
786025ea 1336
726a989a 1337 gimplify_and_add (t, seq_p);
786025ea
JJ
1338
1339 /* Indicate that we need to restore the stack level when the
1340 enclosing BIND_EXPR is exited. */
1341 gimplify_ctxp->save_stack = true;
1342}
1343
726a989a 1344
350fae66
RK
1345/* Gimplifies a DECL_EXPR node *STMT_P by making any necessary allocation
1346 and initialization explicit. */
1347
1348static enum gimplify_status
726a989a 1349gimplify_decl_expr (tree *stmt_p, gimple_seq *seq_p)
350fae66
RK
1350{
1351 tree stmt = *stmt_p;
1352 tree decl = DECL_EXPR_DECL (stmt);
1353
1354 *stmt_p = NULL_TREE;
1355
1356 if (TREE_TYPE (decl) == error_mark_node)
1357 return GS_ERROR;
1358
8e0a600b
JJ
1359 if ((TREE_CODE (decl) == TYPE_DECL
1360 || TREE_CODE (decl) == VAR_DECL)
1361 && !TYPE_SIZES_GIMPLIFIED (TREE_TYPE (decl)))
726a989a 1362 gimplify_type_sizes (TREE_TYPE (decl), seq_p);
350fae66 1363
8e0a600b 1364 if (TREE_CODE (decl) == VAR_DECL && !DECL_EXTERNAL (decl))
350fae66
RK
1365 {
1366 tree init = DECL_INITIAL (decl);
1367
b38f3813
EB
1368 if (TREE_CODE (DECL_SIZE_UNIT (decl)) != INTEGER_CST
1369 || (!TREE_STATIC (decl)
1370 && flag_stack_check == GENERIC_STACK_CHECK
1371 && compare_tree_int (DECL_SIZE_UNIT (decl),
1372 STACK_CHECK_MAX_VAR_SIZE) > 0))
726a989a 1373 gimplify_vla_decl (decl, seq_p);
350fae66
RK
1374
1375 if (init && init != error_mark_node)
1376 {
1377 if (!TREE_STATIC (decl))
1378 {
1379 DECL_INITIAL (decl) = NULL_TREE;
dae7ec87 1380 init = build2 (INIT_EXPR, void_type_node, decl, init);
726a989a
RB
1381 gimplify_and_add (init, seq_p);
1382 ggc_free (init);
350fae66
RK
1383 }
1384 else
1385 /* We must still examine initializers for static variables
1386 as they may contain a label address. */
1387 walk_tree (&init, force_labels_r, NULL, NULL);
1388 }
1389
e92fb501
MM
1390 /* Some front ends do not explicitly declare all anonymous
1391 artificial variables. We compensate here by declaring the
1392 variables, though it would be better if the front ends would
1393 explicitly declare them. */
1394 if (!DECL_SEEN_IN_BIND_EXPR_P (decl)
1395 && DECL_ARTIFICIAL (decl) && DECL_NAME (decl) == NULL_TREE)
350fae66
RK
1396 gimple_add_tmp_var (decl);
1397 }
1398
1399 return GS_ALL_DONE;
1400}
1401
6de9cd9a
DN
1402/* Gimplify a LOOP_EXPR. Normally this just involves gimplifying the body
1403 and replacing the LOOP_EXPR with goto, but if the loop contains an
1404 EXIT_EXPR, we need to append a label for it to jump to. */
1405
1406static enum gimplify_status
726a989a 1407gimplify_loop_expr (tree *expr_p, gimple_seq *pre_p)
6de9cd9a
DN
1408{
1409 tree saved_label = gimplify_ctxp->exit_label;
c2255bc4 1410 tree start_label = create_artificial_label (UNKNOWN_LOCATION);
6de9cd9a 1411
726a989a 1412 gimplify_seq_add_stmt (pre_p, gimple_build_label (start_label));
6de9cd9a
DN
1413
1414 gimplify_ctxp->exit_label = NULL_TREE;
1415
fff34d35 1416 gimplify_and_add (LOOP_EXPR_BODY (*expr_p), pre_p);
6de9cd9a 1417
726a989a
RB
1418 gimplify_seq_add_stmt (pre_p, gimple_build_goto (start_label));
1419
6de9cd9a 1420 if (gimplify_ctxp->exit_label)
726a989a
RB
1421 gimplify_seq_add_stmt (pre_p, gimple_build_label (gimplify_ctxp->exit_label));
1422
1423 gimplify_ctxp->exit_label = saved_label;
1424
1425 *expr_p = NULL;
1426 return GS_ALL_DONE;
1427}
1428
1429/* Gimplifies a statement list onto a sequence. These may be created either
1430 by an enlightened front-end, or by shortcut_cond_expr. */
1431
1432static enum gimplify_status
1433gimplify_statement_list (tree *expr_p, gimple_seq *pre_p)
1434{
1435 tree temp = voidify_wrapper_expr (*expr_p, NULL);
1436
1437 tree_stmt_iterator i = tsi_start (*expr_p);
1438
1439 while (!tsi_end_p (i))
6de9cd9a 1440 {
726a989a
RB
1441 gimplify_stmt (tsi_stmt_ptr (i), pre_p);
1442 tsi_delink (&i);
6de9cd9a 1443 }
6de9cd9a 1444
726a989a
RB
1445 if (temp)
1446 {
1447 *expr_p = temp;
1448 return GS_OK;
1449 }
6de9cd9a
DN
1450
1451 return GS_ALL_DONE;
1452}
1453
f667741c
SB
1454/* Compare two case labels. Because the front end should already have
1455 made sure that case ranges do not overlap, it is enough to only compare
1456 the CASE_LOW values of each case label. */
1457
1458static int
1459compare_case_labels (const void *p1, const void *p2)
1460{
741ac903
KG
1461 const_tree const case1 = *(const_tree const*)p1;
1462 const_tree const case2 = *(const_tree const*)p2;
f667741c 1463
726a989a
RB
1464 /* The 'default' case label always goes first. */
1465 if (!CASE_LOW (case1))
1466 return -1;
1467 else if (!CASE_LOW (case2))
1468 return 1;
1469 else
1470 return tree_int_cst_compare (CASE_LOW (case1), CASE_LOW (case2));
f667741c
SB
1471}
1472
726a989a 1473
165b54c3 1474/* Sort the case labels in LABEL_VEC in place in ascending order. */
0f1f6967
SB
1475
1476void
726a989a 1477sort_case_labels (VEC(tree,heap)* label_vec)
0f1f6967 1478{
726a989a
RB
1479 size_t len = VEC_length (tree, label_vec);
1480 qsort (VEC_address (tree, label_vec), len, sizeof (tree),
1481 compare_case_labels);
0f1f6967
SB
1482}
1483
726a989a 1484
6de9cd9a
DN
1485/* Gimplify a SWITCH_EXPR, and collect a TREE_VEC of the labels it can
1486 branch to. */
1487
1488static enum gimplify_status
726a989a 1489gimplify_switch_expr (tree *expr_p, gimple_seq *pre_p)
6de9cd9a
DN
1490{
1491 tree switch_expr = *expr_p;
726a989a 1492 gimple_seq switch_body_seq = NULL;
6de9cd9a
DN
1493 enum gimplify_status ret;
1494
726a989a
RB
1495 ret = gimplify_expr (&SWITCH_COND (switch_expr), pre_p, NULL, is_gimple_val,
1496 fb_rvalue);
1497 if (ret == GS_ERROR || ret == GS_UNHANDLED)
1498 return ret;
6de9cd9a
DN
1499
1500 if (SWITCH_BODY (switch_expr))
1501 {
726a989a
RB
1502 VEC (tree,heap) *labels;
1503 VEC (tree,heap) *saved_labels;
1504 tree default_case = NULL_TREE;
6de9cd9a 1505 size_t i, len;
726a989a 1506 gimple gimple_switch;
b8698a0f 1507
6de9cd9a
DN
1508 /* If someone can be bothered to fill in the labels, they can
1509 be bothered to null out the body too. */
282899df 1510 gcc_assert (!SWITCH_LABELS (switch_expr));
6de9cd9a 1511
b8698a0f 1512 /* save old labels, get new ones from body, then restore the old
726a989a 1513 labels. Save all the things from the switch body to append after. */
6de9cd9a 1514 saved_labels = gimplify_ctxp->case_labels;
84c76d40 1515 gimplify_ctxp->case_labels = VEC_alloc (tree, heap, 8);
6de9cd9a 1516
726a989a 1517 gimplify_stmt (&SWITCH_BODY (switch_expr), &switch_body_seq);
6de9cd9a
DN
1518 labels = gimplify_ctxp->case_labels;
1519 gimplify_ctxp->case_labels = saved_labels;
b8698a0f 1520
3feaea00
EB
1521 i = 0;
1522 while (i < VEC_length (tree, labels))
6de9cd9a 1523 {
3feaea00
EB
1524 tree elt = VEC_index (tree, labels, i);
1525 tree low = CASE_LOW (elt);
1526 bool remove_element = FALSE;
1527
1528 if (low)
1529 {
1530 /* Discard empty ranges. */
1531 tree high = CASE_HIGH (elt);
288bd0d7 1532 if (high && tree_int_cst_lt (high, low))
3feaea00
EB
1533 remove_element = TRUE;
1534 }
1535 else
6de9cd9a 1536 {
f667741c 1537 /* The default case must be the last label in the list. */
3feaea00
EB
1538 gcc_assert (!default_case);
1539 default_case = elt;
1540 remove_element = TRUE;
6de9cd9a 1541 }
3feaea00
EB
1542
1543 if (remove_element)
1544 VEC_ordered_remove (tree, labels, i);
1545 else
1546 i++;
6de9cd9a 1547 }
3feaea00 1548 len = i;
6de9cd9a 1549
32f579f6
JJ
1550 if (!VEC_empty (tree, labels))
1551 sort_case_labels (labels);
1552
726a989a 1553 if (!default_case)
6de9cd9a 1554 {
32f579f6 1555 tree type = TREE_TYPE (switch_expr);
726a989a 1556
f667741c 1557 /* If the switch has no default label, add one, so that we jump
32f579f6
JJ
1558 around the switch body. If the labels already cover the whole
1559 range of type, add the default label pointing to one of the
1560 existing labels. */
1561 if (type == void_type_node)
1562 type = TREE_TYPE (SWITCH_COND (switch_expr));
1563 if (len
1564 && INTEGRAL_TYPE_P (type)
1565 && TYPE_MIN_VALUE (type)
1566 && TYPE_MAX_VALUE (type)
1567 && tree_int_cst_equal (CASE_LOW (VEC_index (tree, labels, 0)),
1568 TYPE_MIN_VALUE (type)))
1569 {
1570 tree low, high = CASE_HIGH (VEC_index (tree, labels, len - 1));
1571 if (!high)
1572 high = CASE_LOW (VEC_index (tree, labels, len - 1));
1573 if (tree_int_cst_equal (high, TYPE_MAX_VALUE (type)))
1574 {
1575 for (i = 1; i < len; i++)
1576 {
1577 high = CASE_LOW (VEC_index (tree, labels, i));
1578 low = CASE_HIGH (VEC_index (tree, labels, i - 1));
1579 if (!low)
1580 low = CASE_LOW (VEC_index (tree, labels, i - 1));
1581 if ((TREE_INT_CST_LOW (low) + 1
1582 != TREE_INT_CST_LOW (high))
1583 || (TREE_INT_CST_HIGH (low)
1584 + (TREE_INT_CST_LOW (high) == 0)
1585 != TREE_INT_CST_HIGH (high)))
1586 break;
1587 }
1588 if (i == len)
1589 default_case = build3 (CASE_LABEL_EXPR, void_type_node,
1590 NULL_TREE, NULL_TREE,
1591 CASE_LABEL (VEC_index (tree,
1592 labels, 0)));
1593 }
1594 }
6de9cd9a 1595
32f579f6
JJ
1596 if (!default_case)
1597 {
1598 gimple new_default;
1599
c2255bc4
AH
1600 default_case
1601 = build3 (CASE_LABEL_EXPR, void_type_node,
1602 NULL_TREE, NULL_TREE,
1603 create_artificial_label (UNKNOWN_LOCATION));
32f579f6
JJ
1604 new_default = gimple_build_label (CASE_LABEL (default_case));
1605 gimplify_seq_add_stmt (&switch_body_seq, new_default);
1606 }
1607 }
f667741c 1608
b8698a0f 1609 gimple_switch = gimple_build_switch_vec (SWITCH_COND (switch_expr),
726a989a
RB
1610 default_case, labels);
1611 gimplify_seq_add_stmt (pre_p, gimple_switch);
1612 gimplify_seq_add_seq (pre_p, switch_body_seq);
1613 VEC_free(tree, heap, labels);
6de9cd9a 1614 }
282899df
NS
1615 else
1616 gcc_assert (SWITCH_LABELS (switch_expr));
6de9cd9a 1617
726a989a 1618 return GS_ALL_DONE;
6de9cd9a
DN
1619}
1620
726a989a 1621
6de9cd9a 1622static enum gimplify_status
726a989a 1623gimplify_case_label_expr (tree *expr_p, gimple_seq *pre_p)
6de9cd9a 1624{
953ff289 1625 struct gimplify_ctx *ctxp;
726a989a 1626 gimple gimple_label;
953ff289
DN
1627
1628 /* Invalid OpenMP programs can play Duff's Device type games with
1629 #pragma omp parallel. At least in the C front end, we don't
1630 detect such invalid branches until after gimplification. */
1631 for (ctxp = gimplify_ctxp; ; ctxp = ctxp->prev_context)
1632 if (ctxp->case_labels)
1633 break;
282899df 1634
726a989a
RB
1635 gimple_label = gimple_build_label (CASE_LABEL (*expr_p));
1636 VEC_safe_push (tree, heap, ctxp->case_labels, *expr_p);
1637 gimplify_seq_add_stmt (pre_p, gimple_label);
1638
6de9cd9a
DN
1639 return GS_ALL_DONE;
1640}
1641
6de9cd9a
DN
1642/* Build a GOTO to the LABEL_DECL pointed to by LABEL_P, building it first
1643 if necessary. */
1644
1645tree
1646build_and_jump (tree *label_p)
1647{
1648 if (label_p == NULL)
1649 /* If there's nowhere to jump, just fall through. */
65355d53 1650 return NULL_TREE;
6de9cd9a
DN
1651
1652 if (*label_p == NULL_TREE)
1653 {
c2255bc4 1654 tree label = create_artificial_label (UNKNOWN_LOCATION);
6de9cd9a
DN
1655 *label_p = label;
1656 }
1657
1658 return build1 (GOTO_EXPR, void_type_node, *label_p);
1659}
1660
1661/* Gimplify an EXIT_EXPR by converting to a GOTO_EXPR inside a COND_EXPR.
1662 This also involves building a label to jump to and communicating it to
1663 gimplify_loop_expr through gimplify_ctxp->exit_label. */
1664
1665static enum gimplify_status
1666gimplify_exit_expr (tree *expr_p)
1667{
1668 tree cond = TREE_OPERAND (*expr_p, 0);
1669 tree expr;
1670
1671 expr = build_and_jump (&gimplify_ctxp->exit_label);
b4257cfc 1672 expr = build3 (COND_EXPR, void_type_node, cond, expr, NULL_TREE);
6de9cd9a
DN
1673 *expr_p = expr;
1674
1675 return GS_OK;
1676}
1677
1678/* A helper function to be called via walk_tree. Mark all labels under *TP
1679 as being forced. To be called for DECL_INITIAL of static variables. */
1680
1681tree
1682force_labels_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
1683{
1684 if (TYPE_P (*tp))
1685 *walk_subtrees = 0;
1686 if (TREE_CODE (*tp) == LABEL_DECL)
1687 FORCED_LABEL (*tp) = 1;
1688
1689 return NULL_TREE;
1690}
1691
26d44ae2
RH
1692/* *EXPR_P is a COMPONENT_REF being used as an rvalue. If its type is
1693 different from its canonical type, wrap the whole thing inside a
1694 NOP_EXPR and force the type of the COMPONENT_REF to be the canonical
1695 type.
6de9cd9a 1696
26d44ae2
RH
1697 The canonical type of a COMPONENT_REF is the type of the field being
1698 referenced--unless the field is a bit-field which can be read directly
1699 in a smaller mode, in which case the canonical type is the
1700 sign-appropriate type corresponding to that mode. */
6de9cd9a 1701
26d44ae2
RH
1702static void
1703canonicalize_component_ref (tree *expr_p)
6de9cd9a 1704{
26d44ae2
RH
1705 tree expr = *expr_p;
1706 tree type;
6de9cd9a 1707
282899df 1708 gcc_assert (TREE_CODE (expr) == COMPONENT_REF);
6de9cd9a 1709
26d44ae2
RH
1710 if (INTEGRAL_TYPE_P (TREE_TYPE (expr)))
1711 type = TREE_TYPE (get_unwidened (expr, NULL_TREE));
1712 else
1713 type = TREE_TYPE (TREE_OPERAND (expr, 1));
6de9cd9a 1714
b26c6d55
RG
1715 /* One could argue that all the stuff below is not necessary for
1716 the non-bitfield case and declare it a FE error if type
1717 adjustment would be needed. */
26d44ae2 1718 if (TREE_TYPE (expr) != type)
6de9cd9a 1719 {
b26c6d55 1720#ifdef ENABLE_TYPES_CHECKING
26d44ae2 1721 tree old_type = TREE_TYPE (expr);
b26c6d55
RG
1722#endif
1723 int type_quals;
1724
1725 /* We need to preserve qualifiers and propagate them from
1726 operand 0. */
1727 type_quals = TYPE_QUALS (type)
1728 | TYPE_QUALS (TREE_TYPE (TREE_OPERAND (expr, 0)));
1729 if (TYPE_QUALS (type) != type_quals)
1730 type = build_qualified_type (TYPE_MAIN_VARIANT (type), type_quals);
6de9cd9a 1731
26d44ae2
RH
1732 /* Set the type of the COMPONENT_REF to the underlying type. */
1733 TREE_TYPE (expr) = type;
6de9cd9a 1734
b26c6d55
RG
1735#ifdef ENABLE_TYPES_CHECKING
1736 /* It is now a FE error, if the conversion from the canonical
1737 type to the original expression type is not useless. */
1738 gcc_assert (useless_type_conversion_p (old_type, type));
1739#endif
26d44ae2
RH
1740 }
1741}
6de9cd9a 1742
26d44ae2 1743/* If a NOP conversion is changing a pointer to array of foo to a pointer
d3147f64 1744 to foo, embed that change in the ADDR_EXPR by converting
26d44ae2
RH
1745 T array[U];
1746 (T *)&array
1747 ==>
1748 &array[L]
1749 where L is the lower bound. For simplicity, only do this for constant
04d86531
RG
1750 lower bound.
1751 The constraint is that the type of &array[L] is trivially convertible
1752 to T *. */
6de9cd9a 1753
26d44ae2
RH
1754static void
1755canonicalize_addr_expr (tree *expr_p)
1756{
1757 tree expr = *expr_p;
26d44ae2 1758 tree addr_expr = TREE_OPERAND (expr, 0);
04d86531 1759 tree datype, ddatype, pddatype;
6de9cd9a 1760
04d86531
RG
1761 /* We simplify only conversions from an ADDR_EXPR to a pointer type. */
1762 if (!POINTER_TYPE_P (TREE_TYPE (expr))
1763 || TREE_CODE (addr_expr) != ADDR_EXPR)
26d44ae2 1764 return;
6de9cd9a 1765
26d44ae2 1766 /* The addr_expr type should be a pointer to an array. */
04d86531 1767 datype = TREE_TYPE (TREE_TYPE (addr_expr));
26d44ae2
RH
1768 if (TREE_CODE (datype) != ARRAY_TYPE)
1769 return;
6de9cd9a 1770
04d86531
RG
1771 /* The pointer to element type shall be trivially convertible to
1772 the expression pointer type. */
26d44ae2 1773 ddatype = TREE_TYPE (datype);
04d86531 1774 pddatype = build_pointer_type (ddatype);
e5fdcd8c
RG
1775 if (!useless_type_conversion_p (TYPE_MAIN_VARIANT (TREE_TYPE (expr)),
1776 pddatype))
26d44ae2 1777 return;
6de9cd9a 1778
26d44ae2 1779 /* The lower bound and element sizes must be constant. */
04d86531
RG
1780 if (!TYPE_SIZE_UNIT (ddatype)
1781 || TREE_CODE (TYPE_SIZE_UNIT (ddatype)) != INTEGER_CST
26d44ae2
RH
1782 || !TYPE_DOMAIN (datype) || !TYPE_MIN_VALUE (TYPE_DOMAIN (datype))
1783 || TREE_CODE (TYPE_MIN_VALUE (TYPE_DOMAIN (datype))) != INTEGER_CST)
1784 return;
6de9cd9a 1785
26d44ae2 1786 /* All checks succeeded. Build a new node to merge the cast. */
04d86531 1787 *expr_p = build4 (ARRAY_REF, ddatype, TREE_OPERAND (addr_expr, 0),
26d44ae2 1788 TYPE_MIN_VALUE (TYPE_DOMAIN (datype)),
5852948c 1789 NULL_TREE, NULL_TREE);
04d86531 1790 *expr_p = build1 (ADDR_EXPR, pddatype, *expr_p);
e5fdcd8c
RG
1791
1792 /* We can have stripped a required restrict qualifier above. */
1793 if (!useless_type_conversion_p (TREE_TYPE (expr), TREE_TYPE (*expr_p)))
1794 *expr_p = fold_convert (TREE_TYPE (expr), *expr_p);
26d44ae2 1795}
6de9cd9a 1796
26d44ae2
RH
1797/* *EXPR_P is a NOP_EXPR or CONVERT_EXPR. Remove it and/or other conversions
1798 underneath as appropriate. */
6de9cd9a 1799
26d44ae2
RH
1800static enum gimplify_status
1801gimplify_conversion (tree *expr_p)
d3147f64 1802{
fe9821b8 1803 tree tem;
db3927fb 1804 location_t loc = EXPR_LOCATION (*expr_p);
1043771b 1805 gcc_assert (CONVERT_EXPR_P (*expr_p));
c2255bc4 1806
0710ccff
NS
1807 /* Then strip away all but the outermost conversion. */
1808 STRIP_SIGN_NOPS (TREE_OPERAND (*expr_p, 0));
1809
1810 /* And remove the outermost conversion if it's useless. */
1811 if (tree_ssa_useless_type_conversion (*expr_p))
1812 *expr_p = TREE_OPERAND (*expr_p, 0);
6de9cd9a 1813
fe9821b8
JH
1814 /* Attempt to avoid NOP_EXPR by producing reference to a subtype.
1815 For example this fold (subclass *)&A into &A->subclass avoiding
1816 a need for statement. */
99f536cc 1817 if (CONVERT_EXPR_P (*expr_p)
fe9821b8
JH
1818 && POINTER_TYPE_P (TREE_TYPE (*expr_p))
1819 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (*expr_p, 0)))
99f536cc 1820 && (tem = maybe_fold_offset_to_address
c2255bc4
AH
1821 (EXPR_LOCATION (*expr_p), TREE_OPERAND (*expr_p, 0),
1822 integer_zero_node, TREE_TYPE (*expr_p))) != NULL_TREE)
99f536cc 1823 *expr_p = tem;
fe9821b8 1824
26d44ae2
RH
1825 /* If we still have a conversion at the toplevel,
1826 then canonicalize some constructs. */
1043771b 1827 if (CONVERT_EXPR_P (*expr_p))
26d44ae2
RH
1828 {
1829 tree sub = TREE_OPERAND (*expr_p, 0);
6de9cd9a 1830
26d44ae2
RH
1831 /* If a NOP conversion is changing the type of a COMPONENT_REF
1832 expression, then canonicalize its type now in order to expose more
1833 redundant conversions. */
1834 if (TREE_CODE (sub) == COMPONENT_REF)
1835 canonicalize_component_ref (&TREE_OPERAND (*expr_p, 0));
6de9cd9a 1836
26d44ae2
RH
1837 /* If a NOP conversion is changing a pointer to array of foo
1838 to a pointer to foo, embed that change in the ADDR_EXPR. */
1839 else if (TREE_CODE (sub) == ADDR_EXPR)
1840 canonicalize_addr_expr (expr_p);
1841 }
6de9cd9a 1842
8b17cc05
RG
1843 /* If we have a conversion to a non-register type force the
1844 use of a VIEW_CONVERT_EXPR instead. */
4f934809 1845 if (CONVERT_EXPR_P (*expr_p) && !is_gimple_reg_type (TREE_TYPE (*expr_p)))
db3927fb 1846 *expr_p = fold_build1_loc (loc, VIEW_CONVERT_EXPR, TREE_TYPE (*expr_p),
4f934809 1847 TREE_OPERAND (*expr_p, 0));
8b17cc05 1848
6de9cd9a
DN
1849 return GS_OK;
1850}
1851
77f2a970
JJ
1852/* Nonlocal VLAs seen in the current function. */
1853static struct pointer_set_t *nonlocal_vlas;
1854
b8698a0f 1855/* Gimplify a VAR_DECL or PARM_DECL. Returns GS_OK if we expanded a
a9f7c570
RH
1856 DECL_VALUE_EXPR, and it's worth re-examining things. */
1857
1858static enum gimplify_status
1859gimplify_var_or_parm_decl (tree *expr_p)
1860{
1861 tree decl = *expr_p;
1862
1863 /* ??? If this is a local variable, and it has not been seen in any
1864 outer BIND_EXPR, then it's probably the result of a duplicate
1865 declaration, for which we've already issued an error. It would
1866 be really nice if the front end wouldn't leak these at all.
1867 Currently the only known culprit is C++ destructors, as seen
1868 in g++.old-deja/g++.jason/binding.C. */
1869 if (TREE_CODE (decl) == VAR_DECL
1870 && !DECL_SEEN_IN_BIND_EXPR_P (decl)
1871 && !TREE_STATIC (decl) && !DECL_EXTERNAL (decl)
1872 && decl_function_context (decl) == current_function_decl)
1873 {
1da2ed5f 1874 gcc_assert (seen_error ());
a9f7c570
RH
1875 return GS_ERROR;
1876 }
1877
953ff289
DN
1878 /* When within an OpenMP context, notice uses of variables. */
1879 if (gimplify_omp_ctxp && omp_notice_variable (gimplify_omp_ctxp, decl, true))
1880 return GS_ALL_DONE;
1881
a9f7c570
RH
1882 /* If the decl is an alias for another expression, substitute it now. */
1883 if (DECL_HAS_VALUE_EXPR_P (decl))
1884 {
77f2a970
JJ
1885 tree value_expr = DECL_VALUE_EXPR (decl);
1886
1887 /* For referenced nonlocal VLAs add a decl for debugging purposes
1888 to the current function. */
1889 if (TREE_CODE (decl) == VAR_DECL
1890 && TREE_CODE (DECL_SIZE_UNIT (decl)) != INTEGER_CST
1891 && nonlocal_vlas != NULL
1892 && TREE_CODE (value_expr) == INDIRECT_REF
1893 && TREE_CODE (TREE_OPERAND (value_expr, 0)) == VAR_DECL
1894 && decl_function_context (decl) != current_function_decl)
1895 {
1896 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
1897 while (ctx && ctx->region_type == ORT_WORKSHARE)
1898 ctx = ctx->outer_context;
1899 if (!ctx && !pointer_set_insert (nonlocal_vlas, decl))
1900 {
1901 tree copy = copy_node (decl), block;
1902
1903 lang_hooks.dup_lang_specific_decl (copy);
2eb79bbb 1904 SET_DECL_RTL (copy, 0);
77f2a970
JJ
1905 TREE_USED (copy) = 1;
1906 block = DECL_INITIAL (current_function_decl);
910ad8de 1907 DECL_CHAIN (copy) = BLOCK_VARS (block);
77f2a970
JJ
1908 BLOCK_VARS (block) = copy;
1909 SET_DECL_VALUE_EXPR (copy, unshare_expr (value_expr));
1910 DECL_HAS_VALUE_EXPR_P (copy) = 1;
1911 }
1912 }
1913
1914 *expr_p = unshare_expr (value_expr);
a9f7c570
RH
1915 return GS_OK;
1916 }
1917
1918 return GS_ALL_DONE;
1919}
1920
1921
6de9cd9a 1922/* Gimplify the COMPONENT_REF, ARRAY_REF, REALPART_EXPR or IMAGPART_EXPR
726a989a 1923 node *EXPR_P.
6de9cd9a
DN
1924
1925 compound_lval
1926 : min_lval '[' val ']'
1927 | min_lval '.' ID
1928 | compound_lval '[' val ']'
1929 | compound_lval '.' ID
1930
1931 This is not part of the original SIMPLE definition, which separates
1932 array and member references, but it seems reasonable to handle them
1933 together. Also, this way we don't run into problems with union
1934 aliasing; gcc requires that for accesses through a union to alias, the
1935 union reference must be explicit, which was not always the case when we
1936 were splitting up array and member refs.
1937
726a989a 1938 PRE_P points to the sequence where side effects that must happen before
6de9cd9a
DN
1939 *EXPR_P should be stored.
1940
726a989a 1941 POST_P points to the sequence where side effects that must happen after
6de9cd9a
DN
1942 *EXPR_P should be stored. */
1943
1944static enum gimplify_status
726a989a
RB
1945gimplify_compound_lval (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
1946 fallback_t fallback)
6de9cd9a
DN
1947{
1948 tree *p;
ec234842 1949 VEC(tree,heap) *stack;
941f78d1 1950 enum gimplify_status ret = GS_ALL_DONE, tret;
af72267c 1951 int i;
db3927fb 1952 location_t loc = EXPR_LOCATION (*expr_p);
941f78d1 1953 tree expr = *expr_p;
6de9cd9a 1954
6de9cd9a 1955 /* Create a stack of the subexpressions so later we can walk them in
ec234842
KH
1956 order from inner to outer. */
1957 stack = VEC_alloc (tree, heap, 10);
6de9cd9a 1958
afe84921 1959 /* We can handle anything that get_inner_reference can deal with. */
6a720599
JM
1960 for (p = expr_p; ; p = &TREE_OPERAND (*p, 0))
1961 {
a9f7c570 1962 restart:
6a720599
JM
1963 /* Fold INDIRECT_REFs now to turn them into ARRAY_REFs. */
1964 if (TREE_CODE (*p) == INDIRECT_REF)
db3927fb 1965 *p = fold_indirect_ref_loc (loc, *p);
a9f7c570
RH
1966
1967 if (handled_component_p (*p))
1968 ;
1969 /* Expand DECL_VALUE_EXPR now. In some cases that may expose
1970 additional COMPONENT_REFs. */
1971 else if ((TREE_CODE (*p) == VAR_DECL || TREE_CODE (*p) == PARM_DECL)
1972 && gimplify_var_or_parm_decl (p) == GS_OK)
1973 goto restart;
1974 else
6a720599 1975 break;
b8698a0f 1976
ec234842 1977 VEC_safe_push (tree, heap, stack, *p);
6a720599 1978 }
6de9cd9a 1979
ec234842 1980 gcc_assert (VEC_length (tree, stack));
9e51aaf5 1981
44de5aeb
RK
1982 /* Now STACK is a stack of pointers to all the refs we've walked through
1983 and P points to the innermost expression.
6de9cd9a 1984
af72267c
RK
1985 Java requires that we elaborated nodes in source order. That
1986 means we must gimplify the inner expression followed by each of
1987 the indices, in order. But we can't gimplify the inner
1988 expression until we deal with any variable bounds, sizes, or
1989 positions in order to deal with PLACEHOLDER_EXPRs.
1990
1991 So we do this in three steps. First we deal with the annotations
1992 for any variables in the components, then we gimplify the base,
1993 then we gimplify any indices, from left to right. */
ec234842 1994 for (i = VEC_length (tree, stack) - 1; i >= 0; i--)
6de9cd9a 1995 {
ec234842 1996 tree t = VEC_index (tree, stack, i);
44de5aeb
RK
1997
1998 if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
6de9cd9a 1999 {
44de5aeb
RK
2000 /* Gimplify the low bound and element type size and put them into
2001 the ARRAY_REF. If these values are set, they have already been
2002 gimplified. */
726a989a 2003 if (TREE_OPERAND (t, 2) == NULL_TREE)
44de5aeb 2004 {
a7cc468a
RH
2005 tree low = unshare_expr (array_ref_low_bound (t));
2006 if (!is_gimple_min_invariant (low))
44de5aeb 2007 {
726a989a
RB
2008 TREE_OPERAND (t, 2) = low;
2009 tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p,
ba4d8f9d 2010 post_p, is_gimple_reg,
726a989a 2011 fb_rvalue);
44de5aeb
RK
2012 ret = MIN (ret, tret);
2013 }
2014 }
2015
2016 if (!TREE_OPERAND (t, 3))
2017 {
2018 tree elmt_type = TREE_TYPE (TREE_TYPE (TREE_OPERAND (t, 0)));
2019 tree elmt_size = unshare_expr (array_ref_element_size (t));
a4e9ffe5 2020 tree factor = size_int (TYPE_ALIGN_UNIT (elmt_type));
44de5aeb
RK
2021
2022 /* Divide the element size by the alignment of the element
2023 type (above). */
db3927fb 2024 elmt_size = size_binop_loc (loc, EXACT_DIV_EXPR, elmt_size, factor);
44de5aeb 2025
a7cc468a 2026 if (!is_gimple_min_invariant (elmt_size))
44de5aeb 2027 {
726a989a
RB
2028 TREE_OPERAND (t, 3) = elmt_size;
2029 tret = gimplify_expr (&TREE_OPERAND (t, 3), pre_p,
ba4d8f9d 2030 post_p, is_gimple_reg,
726a989a 2031 fb_rvalue);
44de5aeb
RK
2032 ret = MIN (ret, tret);
2033 }
6de9cd9a
DN
2034 }
2035 }
44de5aeb
RK
2036 else if (TREE_CODE (t) == COMPONENT_REF)
2037 {
2038 /* Set the field offset into T and gimplify it. */
2039 if (!TREE_OPERAND (t, 2))
2040 {
2041 tree offset = unshare_expr (component_ref_field_offset (t));
2042 tree field = TREE_OPERAND (t, 1);
2043 tree factor
2044 = size_int (DECL_OFFSET_ALIGN (field) / BITS_PER_UNIT);
2045
2046 /* Divide the offset by its alignment. */
db3927fb 2047 offset = size_binop_loc (loc, EXACT_DIV_EXPR, offset, factor);
44de5aeb 2048
a7cc468a 2049 if (!is_gimple_min_invariant (offset))
44de5aeb 2050 {
726a989a
RB
2051 TREE_OPERAND (t, 2) = offset;
2052 tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p,
ba4d8f9d 2053 post_p, is_gimple_reg,
726a989a 2054 fb_rvalue);
44de5aeb
RK
2055 ret = MIN (ret, tret);
2056 }
2057 }
2058 }
af72267c
RK
2059 }
2060
a9f7c570
RH
2061 /* Step 2 is to gimplify the base expression. Make sure lvalue is set
2062 so as to match the min_lval predicate. Failure to do so may result
2063 in the creation of large aggregate temporaries. */
2064 tret = gimplify_expr (p, pre_p, post_p, is_gimple_min_lval,
2065 fallback | fb_lvalue);
af72267c
RK
2066 ret = MIN (ret, tret);
2067
48eb4e53
RK
2068 /* And finally, the indices and operands to BIT_FIELD_REF. During this
2069 loop we also remove any useless conversions. */
ec234842 2070 for (; VEC_length (tree, stack) > 0; )
af72267c 2071 {
ec234842 2072 tree t = VEC_pop (tree, stack);
af72267c
RK
2073
2074 if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
2075 {
ba4d8f9d 2076 /* Gimplify the dimension. */
af72267c
RK
2077 if (!is_gimple_min_invariant (TREE_OPERAND (t, 1)))
2078 {
2079 tret = gimplify_expr (&TREE_OPERAND (t, 1), pre_p, post_p,
ba4d8f9d 2080 is_gimple_val, fb_rvalue);
af72267c
RK
2081 ret = MIN (ret, tret);
2082 }
2083 }
44de5aeb
RK
2084 else if (TREE_CODE (t) == BIT_FIELD_REF)
2085 {
2086 tret = gimplify_expr (&TREE_OPERAND (t, 1), pre_p, post_p,
2087 is_gimple_val, fb_rvalue);
2088 ret = MIN (ret, tret);
2089 tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p, post_p,
2090 is_gimple_val, fb_rvalue);
2091 ret = MIN (ret, tret);
2092 }
48eb4e53
RK
2093
2094 STRIP_USELESS_TYPE_CONVERSION (TREE_OPERAND (t, 0));
2095
726a989a
RB
2096 /* The innermost expression P may have originally had
2097 TREE_SIDE_EFFECTS set which would have caused all the outer
2098 expressions in *EXPR_P leading to P to also have had
2099 TREE_SIDE_EFFECTS set. */
6de9cd9a 2100 recalculate_side_effects (t);
6de9cd9a
DN
2101 }
2102
2103 /* If the outermost expression is a COMPONENT_REF, canonicalize its type. */
90051e16 2104 if ((fallback & fb_rvalue) && TREE_CODE (*expr_p) == COMPONENT_REF)
6de9cd9a
DN
2105 {
2106 canonicalize_component_ref (expr_p);
6de9cd9a
DN
2107 }
2108
ec234842 2109 VEC_free (tree, heap, stack);
07724022 2110
941f78d1
JM
2111 gcc_assert (*expr_p == expr || ret != GS_ALL_DONE);
2112
6de9cd9a
DN
2113 return ret;
2114}
2115
206048bd
VR
2116/* Gimplify the self modifying expression pointed to by EXPR_P
2117 (++, --, +=, -=).
6de9cd9a
DN
2118
2119 PRE_P points to the list where side effects that must happen before
2120 *EXPR_P should be stored.
2121
2122 POST_P points to the list where side effects that must happen after
2123 *EXPR_P should be stored.
2124
2125 WANT_VALUE is nonzero iff we want to use the value of this expression
2126 in another expression. */
2127
2128static enum gimplify_status
726a989a 2129gimplify_self_mod_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
44de5aeb 2130 bool want_value)
6de9cd9a
DN
2131{
2132 enum tree_code code;
726a989a
RB
2133 tree lhs, lvalue, rhs, t1;
2134 gimple_seq post = NULL, *orig_post_p = post_p;
6de9cd9a
DN
2135 bool postfix;
2136 enum tree_code arith_code;
2137 enum gimplify_status ret;
db3927fb 2138 location_t loc = EXPR_LOCATION (*expr_p);
6de9cd9a
DN
2139
2140 code = TREE_CODE (*expr_p);
2141
282899df
NS
2142 gcc_assert (code == POSTINCREMENT_EXPR || code == POSTDECREMENT_EXPR
2143 || code == PREINCREMENT_EXPR || code == PREDECREMENT_EXPR);
6de9cd9a
DN
2144
2145 /* Prefix or postfix? */
2146 if (code == POSTINCREMENT_EXPR || code == POSTDECREMENT_EXPR)
2147 /* Faster to treat as prefix if result is not used. */
2148 postfix = want_value;
2149 else
2150 postfix = false;
2151
82181741
JJ
2152 /* For postfix, make sure the inner expression's post side effects
2153 are executed after side effects from this expression. */
2154 if (postfix)
2155 post_p = &post;
2156
6de9cd9a
DN
2157 /* Add or subtract? */
2158 if (code == PREINCREMENT_EXPR || code == POSTINCREMENT_EXPR)
2159 arith_code = PLUS_EXPR;
2160 else
2161 arith_code = MINUS_EXPR;
2162
2163 /* Gimplify the LHS into a GIMPLE lvalue. */
2164 lvalue = TREE_OPERAND (*expr_p, 0);
2165 ret = gimplify_expr (&lvalue, pre_p, post_p, is_gimple_lvalue, fb_lvalue);
2166 if (ret == GS_ERROR)
2167 return ret;
2168
2169 /* Extract the operands to the arithmetic operation. */
2170 lhs = lvalue;
2171 rhs = TREE_OPERAND (*expr_p, 1);
2172
2173 /* For postfix operator, we evaluate the LHS to an rvalue and then use
ba4d8f9d
RG
2174 that as the result value and in the postqueue operation. We also
2175 make sure to make lvalue a minimal lval, see
2176 gcc.c-torture/execute/20040313-1.c for an example where this matters. */
6de9cd9a
DN
2177 if (postfix)
2178 {
ba4d8f9d
RG
2179 if (!is_gimple_min_lval (lvalue))
2180 {
2181 mark_addressable (lvalue);
db3927fb 2182 lvalue = build_fold_addr_expr_loc (input_location, lvalue);
ba4d8f9d 2183 gimplify_expr (&lvalue, pre_p, post_p, is_gimple_val, fb_rvalue);
db3927fb 2184 lvalue = build_fold_indirect_ref_loc (input_location, lvalue);
ba4d8f9d 2185 }
6de9cd9a
DN
2186 ret = gimplify_expr (&lhs, pre_p, post_p, is_gimple_val, fb_rvalue);
2187 if (ret == GS_ERROR)
2188 return ret;
2189 }
2190
5be014d5
AP
2191 /* For POINTERs increment, use POINTER_PLUS_EXPR. */
2192 if (POINTER_TYPE_P (TREE_TYPE (lhs)))
2193 {
db3927fb 2194 rhs = fold_convert_loc (loc, sizetype, rhs);
5be014d5 2195 if (arith_code == MINUS_EXPR)
db3927fb 2196 rhs = fold_build1_loc (loc, NEGATE_EXPR, TREE_TYPE (rhs), rhs);
5be014d5
AP
2197 arith_code = POINTER_PLUS_EXPR;
2198 }
2199
b4257cfc 2200 t1 = build2 (arith_code, TREE_TYPE (*expr_p), lhs, rhs);
6de9cd9a
DN
2201
2202 if (postfix)
2203 {
726a989a
RB
2204 gimplify_assign (lvalue, t1, orig_post_p);
2205 gimplify_seq_add_seq (orig_post_p, post);
6de9cd9a
DN
2206 *expr_p = lhs;
2207 return GS_ALL_DONE;
2208 }
2209 else
2210 {
726a989a 2211 *expr_p = build2 (MODIFY_EXPR, TREE_TYPE (lvalue), lvalue, t1);
6de9cd9a
DN
2212 return GS_OK;
2213 }
2214}
2215
726a989a 2216
d25cee4d
RH
2217/* If *EXPR_P has a variable sized type, wrap it in a WITH_SIZE_EXPR. */
2218
2219static void
2220maybe_with_size_expr (tree *expr_p)
2221{
61025d1b
RK
2222 tree expr = *expr_p;
2223 tree type = TREE_TYPE (expr);
2224 tree size;
d25cee4d 2225
61025d1b
RK
2226 /* If we've already wrapped this or the type is error_mark_node, we can't do
2227 anything. */
2228 if (TREE_CODE (expr) == WITH_SIZE_EXPR
2229 || type == error_mark_node)
d25cee4d
RH
2230 return;
2231
61025d1b 2232 /* If the size isn't known or is a constant, we have nothing to do. */
d25cee4d 2233 size = TYPE_SIZE_UNIT (type);
61025d1b
RK
2234 if (!size || TREE_CODE (size) == INTEGER_CST)
2235 return;
2236
2237 /* Otherwise, make a WITH_SIZE_EXPR. */
2238 size = unshare_expr (size);
2239 size = SUBSTITUTE_PLACEHOLDER_IN_EXPR (size, expr);
2240 *expr_p = build2 (WITH_SIZE_EXPR, type, expr, size);
d25cee4d
RH
2241}
2242
726a989a
RB
2243
2244/* Helper for gimplify_call_expr. Gimplify a single argument *ARG_P
1282697f
AH
2245 Store any side-effects in PRE_P. CALL_LOCATION is the location of
2246 the CALL_EXPR. */
e4f78bd4
JM
2247
2248static enum gimplify_status
1282697f 2249gimplify_arg (tree *arg_p, gimple_seq *pre_p, location_t call_location)
e4f78bd4
JM
2250{
2251 bool (*test) (tree);
2252 fallback_t fb;
2253
2254 /* In general, we allow lvalues for function arguments to avoid
2255 extra overhead of copying large aggregates out of even larger
2256 aggregates into temporaries only to copy the temporaries to
2257 the argument list. Make optimizers happy by pulling out to
2258 temporaries those types that fit in registers. */
726a989a 2259 if (is_gimple_reg_type (TREE_TYPE (*arg_p)))
e4f78bd4
JM
2260 test = is_gimple_val, fb = fb_rvalue;
2261 else
2262 test = is_gimple_lvalue, fb = fb_either;
2263
d25cee4d 2264 /* If this is a variable sized type, we must remember the size. */
726a989a 2265 maybe_with_size_expr (arg_p);
d25cee4d 2266
c2255bc4 2267 /* FIXME diagnostics: This will mess up gcc.dg/Warray-bounds.c. */
1282697f
AH
2268 /* Make sure arguments have the same location as the function call
2269 itself. */
2270 protected_set_expr_location (*arg_p, call_location);
2271
e4f78bd4
JM
2272 /* There is a sequence point before a function call. Side effects in
2273 the argument list must occur before the actual call. So, when
2274 gimplifying arguments, force gimplify_expr to use an internal
2275 post queue which is then appended to the end of PRE_P. */
726a989a 2276 return gimplify_expr (arg_p, pre_p, NULL, test, fb);
e4f78bd4
JM
2277}
2278
726a989a
RB
2279
2280/* Gimplify the CALL_EXPR node *EXPR_P into the GIMPLE sequence PRE_P.
90051e16 2281 WANT_VALUE is true if the result of the call is desired. */
6de9cd9a
DN
2282
2283static enum gimplify_status
726a989a 2284gimplify_call_expr (tree *expr_p, gimple_seq *pre_p, bool want_value)
6de9cd9a 2285{
726a989a 2286 tree fndecl, parms, p;
6de9cd9a 2287 enum gimplify_status ret;
5039610b 2288 int i, nargs;
726a989a
RB
2289 gimple call;
2290 bool builtin_va_start_p = FALSE;
db3927fb 2291 location_t loc = EXPR_LOCATION (*expr_p);
6de9cd9a 2292
282899df 2293 gcc_assert (TREE_CODE (*expr_p) == CALL_EXPR);
6de9cd9a 2294
d3147f64 2295 /* For reliable diagnostics during inlining, it is necessary that
6de9cd9a 2296 every call_expr be annotated with file and line. */
a281759f
PB
2297 if (! EXPR_HAS_LOCATION (*expr_p))
2298 SET_EXPR_LOCATION (*expr_p, input_location);
6de9cd9a
DN
2299
2300 /* This may be a call to a builtin function.
2301
2302 Builtin function calls may be transformed into different
2303 (and more efficient) builtin function calls under certain
2304 circumstances. Unfortunately, gimplification can muck things
2305 up enough that the builtin expanders are not aware that certain
2306 transformations are still valid.
2307
2308 So we attempt transformation/gimplification of the call before
2309 we gimplify the CALL_EXPR. At this time we do not manage to
2310 transform all calls in the same manner as the expanders do, but
2311 we do transform most of them. */
726a989a
RB
2312 fndecl = get_callee_fndecl (*expr_p);
2313 if (fndecl && DECL_BUILT_IN (fndecl))
6de9cd9a 2314 {
db3927fb 2315 tree new_tree = fold_call_expr (input_location, *expr_p, !want_value);
6de9cd9a 2316
82d6e6fc 2317 if (new_tree && new_tree != *expr_p)
6de9cd9a
DN
2318 {
2319 /* There was a transformation of this call which computes the
2320 same value, but in a more efficient way. Return and try
2321 again. */
82d6e6fc 2322 *expr_p = new_tree;
6de9cd9a
DN
2323 return GS_OK;
2324 }
e4f78bd4 2325
726a989a
RB
2326 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
2327 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_VA_START)
2efcfa4e 2328 {
726a989a 2329 builtin_va_start_p = TRUE;
5039610b 2330 if (call_expr_nargs (*expr_p) < 2)
2efcfa4e
AP
2331 {
2332 error ("too few arguments to function %<va_start%>");
c2255bc4 2333 *expr_p = build_empty_stmt (EXPR_LOCATION (*expr_p));
2efcfa4e
AP
2334 return GS_OK;
2335 }
b8698a0f 2336
5039610b 2337 if (fold_builtin_next_arg (*expr_p, true))
2efcfa4e 2338 {
c2255bc4 2339 *expr_p = build_empty_stmt (EXPR_LOCATION (*expr_p));
2efcfa4e
AP
2340 return GS_OK;
2341 }
2efcfa4e 2342 }
6de9cd9a
DN
2343 }
2344
2345 /* There is a sequence point before the call, so any side effects in
2346 the calling expression must occur before the actual call. Force
2347 gimplify_expr to use an internal post queue. */
5039610b 2348 ret = gimplify_expr (&CALL_EXPR_FN (*expr_p), pre_p, NULL,
0f59171d 2349 is_gimple_call_addr, fb_rvalue);
6de9cd9a 2350
5039610b
SL
2351 nargs = call_expr_nargs (*expr_p);
2352
e36711f3 2353 /* Get argument types for verification. */
726a989a 2354 fndecl = get_callee_fndecl (*expr_p);
e36711f3 2355 parms = NULL_TREE;
726a989a
RB
2356 if (fndecl)
2357 parms = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
e36711f3
RG
2358 else if (POINTER_TYPE_P (TREE_TYPE (CALL_EXPR_FN (*expr_p))))
2359 parms = TYPE_ARG_TYPES (TREE_TYPE (TREE_TYPE (CALL_EXPR_FN (*expr_p))));
2360
726a989a 2361 if (fndecl && DECL_ARGUMENTS (fndecl))
f9487002 2362 p = DECL_ARGUMENTS (fndecl);
004e2fa7 2363 else if (parms)
f9487002 2364 p = parms;
6ef5231b 2365 else
498e51ca 2366 p = NULL_TREE;
f9487002
JJ
2367 for (i = 0; i < nargs && p; i++, p = TREE_CHAIN (p))
2368 ;
6ef5231b
JJ
2369
2370 /* If the last argument is __builtin_va_arg_pack () and it is not
2371 passed as a named argument, decrease the number of CALL_EXPR
2372 arguments and set instead the CALL_EXPR_VA_ARG_PACK flag. */
2373 if (!p
2374 && i < nargs
2375 && TREE_CODE (CALL_EXPR_ARG (*expr_p, nargs - 1)) == CALL_EXPR)
2376 {
2377 tree last_arg = CALL_EXPR_ARG (*expr_p, nargs - 1);
2378 tree last_arg_fndecl = get_callee_fndecl (last_arg);
2379
2380 if (last_arg_fndecl
2381 && TREE_CODE (last_arg_fndecl) == FUNCTION_DECL
2382 && DECL_BUILT_IN_CLASS (last_arg_fndecl) == BUILT_IN_NORMAL
2383 && DECL_FUNCTION_CODE (last_arg_fndecl) == BUILT_IN_VA_ARG_PACK)
2384 {
2385 tree call = *expr_p;
2386
2387 --nargs;
db3927fb
AH
2388 *expr_p = build_call_array_loc (loc, TREE_TYPE (call),
2389 CALL_EXPR_FN (call),
2390 nargs, CALL_EXPR_ARGP (call));
726a989a
RB
2391
2392 /* Copy all CALL_EXPR flags, location and block, except
6ef5231b
JJ
2393 CALL_EXPR_VA_ARG_PACK flag. */
2394 CALL_EXPR_STATIC_CHAIN (*expr_p) = CALL_EXPR_STATIC_CHAIN (call);
2395 CALL_EXPR_TAILCALL (*expr_p) = CALL_EXPR_TAILCALL (call);
2396 CALL_EXPR_RETURN_SLOT_OPT (*expr_p)
2397 = CALL_EXPR_RETURN_SLOT_OPT (call);
2398 CALL_FROM_THUNK_P (*expr_p) = CALL_FROM_THUNK_P (call);
726a989a 2399 CALL_CANNOT_INLINE_P (*expr_p) = CALL_CANNOT_INLINE_P (call);
5e278028 2400 SET_EXPR_LOCATION (*expr_p, EXPR_LOCATION (call));
6ef5231b 2401 TREE_BLOCK (*expr_p) = TREE_BLOCK (call);
726a989a 2402
6ef5231b
JJ
2403 /* Set CALL_EXPR_VA_ARG_PACK. */
2404 CALL_EXPR_VA_ARG_PACK (*expr_p) = 1;
2405 }
2406 }
e36711f3
RG
2407
2408 /* Finally, gimplify the function arguments. */
726a989a 2409 if (nargs > 0)
6de9cd9a 2410 {
726a989a
RB
2411 for (i = (PUSH_ARGS_REVERSED ? nargs - 1 : 0);
2412 PUSH_ARGS_REVERSED ? i >= 0 : i < nargs;
2413 PUSH_ARGS_REVERSED ? i-- : i++)
2414 {
2415 enum gimplify_status t;
6de9cd9a 2416
726a989a
RB
2417 /* Avoid gimplifying the second argument to va_start, which needs to
2418 be the plain PARM_DECL. */
2419 if ((i != 1) || !builtin_va_start_p)
2420 {
1282697f
AH
2421 t = gimplify_arg (&CALL_EXPR_ARG (*expr_p, i), pre_p,
2422 EXPR_LOCATION (*expr_p));
6de9cd9a 2423
726a989a
RB
2424 if (t == GS_ERROR)
2425 ret = GS_ERROR;
2426 }
2427 }
6de9cd9a 2428 }
6de9cd9a 2429
33922890
RG
2430 /* Verify the function result. */
2431 if (want_value && fndecl
2432 && VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fndecl))))
2433 {
2434 error_at (loc, "using result of function returning %<void%>");
2435 ret = GS_ERROR;
2436 }
2437
6de9cd9a 2438 /* Try this again in case gimplification exposed something. */
6f538523 2439 if (ret != GS_ERROR)
6de9cd9a 2440 {
db3927fb 2441 tree new_tree = fold_call_expr (input_location, *expr_p, !want_value);
6f538523 2442
82d6e6fc 2443 if (new_tree && new_tree != *expr_p)
5039610b
SL
2444 {
2445 /* There was a transformation of this call which computes the
2446 same value, but in a more efficient way. Return and try
2447 again. */
82d6e6fc 2448 *expr_p = new_tree;
5039610b 2449 return GS_OK;
6de9cd9a
DN
2450 }
2451 }
726a989a
RB
2452 else
2453 {
df8fa700 2454 *expr_p = error_mark_node;
726a989a
RB
2455 return GS_ERROR;
2456 }
6de9cd9a
DN
2457
2458 /* If the function is "const" or "pure", then clear TREE_SIDE_EFFECTS on its
2459 decl. This allows us to eliminate redundant or useless
2460 calls to "const" functions. */
becfd6e5
KZ
2461 if (TREE_CODE (*expr_p) == CALL_EXPR)
2462 {
2463 int flags = call_expr_flags (*expr_p);
2464 if (flags & (ECF_CONST | ECF_PURE)
2465 /* An infinite loop is considered a side effect. */
2466 && !(flags & (ECF_LOOPING_CONST_OR_PURE)))
2467 TREE_SIDE_EFFECTS (*expr_p) = 0;
2468 }
726a989a
RB
2469
2470 /* If the value is not needed by the caller, emit a new GIMPLE_CALL
2471 and clear *EXPR_P. Otherwise, leave *EXPR_P in its gimplified
2472 form and delegate the creation of a GIMPLE_CALL to
2473 gimplify_modify_expr. This is always possible because when
2474 WANT_VALUE is true, the caller wants the result of this call into
2475 a temporary, which means that we will emit an INIT_EXPR in
2476 internal_get_tmp_var which will then be handled by
2477 gimplify_modify_expr. */
2478 if (!want_value)
2479 {
2480 /* The CALL_EXPR in *EXPR_P is already in GIMPLE form, so all we
2481 have to do is replicate it as a GIMPLE_CALL tuple. */
2482 call = gimple_build_call_from_tree (*expr_p);
2483 gimplify_seq_add_stmt (pre_p, call);
2484 *expr_p = NULL_TREE;
2485 }
2486
6de9cd9a
DN
2487 return ret;
2488}
2489
2490/* Handle shortcut semantics in the predicate operand of a COND_EXPR by
2491 rewriting it into multiple COND_EXPRs, and possibly GOTO_EXPRs.
2492
2493 TRUE_LABEL_P and FALSE_LABEL_P point to the labels to jump to if the
2494 condition is true or false, respectively. If null, we should generate
2495 our own to skip over the evaluation of this specific expression.
2496
ca80e52b
EB
2497 LOCUS is the source location of the COND_EXPR.
2498
6de9cd9a
DN
2499 This function is the tree equivalent of do_jump.
2500
2501 shortcut_cond_r should only be called by shortcut_cond_expr. */
2502
2503static tree
ca80e52b
EB
2504shortcut_cond_r (tree pred, tree *true_label_p, tree *false_label_p,
2505 location_t locus)
6de9cd9a
DN
2506{
2507 tree local_label = NULL_TREE;
2508 tree t, expr = NULL;
2509
2510 /* OK, it's not a simple case; we need to pull apart the COND_EXPR to
2511 retain the shortcut semantics. Just insert the gotos here;
2512 shortcut_cond_expr will append the real blocks later. */
2513 if (TREE_CODE (pred) == TRUTH_ANDIF_EXPR)
2514 {
ca80e52b
EB
2515 location_t new_locus;
2516
6de9cd9a
DN
2517 /* Turn if (a && b) into
2518
2519 if (a); else goto no;
2520 if (b) goto yes; else goto no;
2521 (no:) */
2522
2523 if (false_label_p == NULL)
2524 false_label_p = &local_label;
2525
ca80e52b
EB
2526 /* Keep the original source location on the first 'if'. */
2527 t = shortcut_cond_r (TREE_OPERAND (pred, 0), NULL, false_label_p, locus);
6de9cd9a
DN
2528 append_to_statement_list (t, &expr);
2529
ca80e52b
EB
2530 /* Set the source location of the && on the second 'if'. */
2531 new_locus = EXPR_HAS_LOCATION (pred) ? EXPR_LOCATION (pred) : locus;
2532 t = shortcut_cond_r (TREE_OPERAND (pred, 1), true_label_p, false_label_p,
2533 new_locus);
6de9cd9a
DN
2534 append_to_statement_list (t, &expr);
2535 }
2536 else if (TREE_CODE (pred) == TRUTH_ORIF_EXPR)
2537 {
ca80e52b
EB
2538 location_t new_locus;
2539
6de9cd9a
DN
2540 /* Turn if (a || b) into
2541
2542 if (a) goto yes;
2543 if (b) goto yes; else goto no;
2544 (yes:) */
2545
2546 if (true_label_p == NULL)
2547 true_label_p = &local_label;
2548
ca80e52b
EB
2549 /* Keep the original source location on the first 'if'. */
2550 t = shortcut_cond_r (TREE_OPERAND (pred, 0), true_label_p, NULL, locus);
6de9cd9a
DN
2551 append_to_statement_list (t, &expr);
2552
ca80e52b
EB
2553 /* Set the source location of the || on the second 'if'. */
2554 new_locus = EXPR_HAS_LOCATION (pred) ? EXPR_LOCATION (pred) : locus;
2555 t = shortcut_cond_r (TREE_OPERAND (pred, 1), true_label_p, false_label_p,
2556 new_locus);
6de9cd9a
DN
2557 append_to_statement_list (t, &expr);
2558 }
2559 else if (TREE_CODE (pred) == COND_EXPR)
2560 {
ca80e52b
EB
2561 location_t new_locus;
2562
6de9cd9a
DN
2563 /* As long as we're messing with gotos, turn if (a ? b : c) into
2564 if (a)
2565 if (b) goto yes; else goto no;
2566 else
2567 if (c) goto yes; else goto no; */
ca80e52b
EB
2568
2569 /* Keep the original source location on the first 'if'. Set the source
2570 location of the ? on the second 'if'. */
2571 new_locus = EXPR_HAS_LOCATION (pred) ? EXPR_LOCATION (pred) : locus;
b4257cfc
RG
2572 expr = build3 (COND_EXPR, void_type_node, TREE_OPERAND (pred, 0),
2573 shortcut_cond_r (TREE_OPERAND (pred, 1), true_label_p,
ca80e52b 2574 false_label_p, locus),
b4257cfc 2575 shortcut_cond_r (TREE_OPERAND (pred, 2), true_label_p,
ca80e52b 2576 false_label_p, new_locus));
6de9cd9a
DN
2577 }
2578 else
2579 {
b4257cfc
RG
2580 expr = build3 (COND_EXPR, void_type_node, pred,
2581 build_and_jump (true_label_p),
2582 build_and_jump (false_label_p));
ca80e52b 2583 SET_EXPR_LOCATION (expr, locus);
6de9cd9a
DN
2584 }
2585
2586 if (local_label)
2587 {
2588 t = build1 (LABEL_EXPR, void_type_node, local_label);
2589 append_to_statement_list (t, &expr);
2590 }
2591
2592 return expr;
2593}
2594
726a989a
RB
2595/* Given a conditional expression EXPR with short-circuit boolean
2596 predicates using TRUTH_ANDIF_EXPR or TRUTH_ORIF_EXPR, break the
2597 predicate appart into the equivalent sequence of conditionals. */
2598
6de9cd9a
DN
2599static tree
2600shortcut_cond_expr (tree expr)
2601{
2602 tree pred = TREE_OPERAND (expr, 0);
2603 tree then_ = TREE_OPERAND (expr, 1);
2604 tree else_ = TREE_OPERAND (expr, 2);
2605 tree true_label, false_label, end_label, t;
2606 tree *true_label_p;
2607 tree *false_label_p;
089efaa4 2608 bool emit_end, emit_false, jump_over_else;
65355d53
RH
2609 bool then_se = then_ && TREE_SIDE_EFFECTS (then_);
2610 bool else_se = else_ && TREE_SIDE_EFFECTS (else_);
6de9cd9a
DN
2611
2612 /* First do simple transformations. */
65355d53 2613 if (!else_se)
6de9cd9a 2614 {
ca80e52b
EB
2615 /* If there is no 'else', turn
2616 if (a && b) then c
2617 into
2618 if (a) if (b) then c. */
6de9cd9a
DN
2619 while (TREE_CODE (pred) == TRUTH_ANDIF_EXPR)
2620 {
ca80e52b
EB
2621 /* Keep the original source location on the first 'if'. */
2622 location_t locus = EXPR_HAS_LOCATION (expr)
2623 ? EXPR_LOCATION (expr) : input_location;
6de9cd9a 2624 TREE_OPERAND (expr, 0) = TREE_OPERAND (pred, 1);
ca80e52b
EB
2625 /* Set the source location of the && on the second 'if'. */
2626 if (EXPR_HAS_LOCATION (pred))
2627 SET_EXPR_LOCATION (expr, EXPR_LOCATION (pred));
6de9cd9a 2628 then_ = shortcut_cond_expr (expr);
4356a1bf 2629 then_se = then_ && TREE_SIDE_EFFECTS (then_);
6de9cd9a 2630 pred = TREE_OPERAND (pred, 0);
b4257cfc 2631 expr = build3 (COND_EXPR, void_type_node, pred, then_, NULL_TREE);
ca80e52b 2632 SET_EXPR_LOCATION (expr, locus);
6de9cd9a
DN
2633 }
2634 }
726a989a 2635
65355d53 2636 if (!then_se)
6de9cd9a
DN
2637 {
2638 /* If there is no 'then', turn
2639 if (a || b); else d
2640 into
2641 if (a); else if (b); else d. */
2642 while (TREE_CODE (pred) == TRUTH_ORIF_EXPR)
2643 {
ca80e52b
EB
2644 /* Keep the original source location on the first 'if'. */
2645 location_t locus = EXPR_HAS_LOCATION (expr)
2646 ? EXPR_LOCATION (expr) : input_location;
6de9cd9a 2647 TREE_OPERAND (expr, 0) = TREE_OPERAND (pred, 1);
ca80e52b
EB
2648 /* Set the source location of the || on the second 'if'. */
2649 if (EXPR_HAS_LOCATION (pred))
2650 SET_EXPR_LOCATION (expr, EXPR_LOCATION (pred));
6de9cd9a 2651 else_ = shortcut_cond_expr (expr);
4356a1bf 2652 else_se = else_ && TREE_SIDE_EFFECTS (else_);
6de9cd9a 2653 pred = TREE_OPERAND (pred, 0);
b4257cfc 2654 expr = build3 (COND_EXPR, void_type_node, pred, NULL_TREE, else_);
ca80e52b 2655 SET_EXPR_LOCATION (expr, locus);
6de9cd9a
DN
2656 }
2657 }
2658
2659 /* If we're done, great. */
2660 if (TREE_CODE (pred) != TRUTH_ANDIF_EXPR
2661 && TREE_CODE (pred) != TRUTH_ORIF_EXPR)
2662 return expr;
2663
2664 /* Otherwise we need to mess with gotos. Change
2665 if (a) c; else d;
2666 to
2667 if (a); else goto no;
2668 c; goto end;
2669 no: d; end:
2670 and recursively gimplify the condition. */
2671
2672 true_label = false_label = end_label = NULL_TREE;
2673
2674 /* If our arms just jump somewhere, hijack those labels so we don't
2675 generate jumps to jumps. */
2676
65355d53
RH
2677 if (then_
2678 && TREE_CODE (then_) == GOTO_EXPR
6de9cd9a
DN
2679 && TREE_CODE (GOTO_DESTINATION (then_)) == LABEL_DECL)
2680 {
2681 true_label = GOTO_DESTINATION (then_);
65355d53
RH
2682 then_ = NULL;
2683 then_se = false;
6de9cd9a
DN
2684 }
2685
65355d53
RH
2686 if (else_
2687 && TREE_CODE (else_) == GOTO_EXPR
6de9cd9a
DN
2688 && TREE_CODE (GOTO_DESTINATION (else_)) == LABEL_DECL)
2689 {
2690 false_label = GOTO_DESTINATION (else_);
65355d53
RH
2691 else_ = NULL;
2692 else_se = false;
6de9cd9a
DN
2693 }
2694
9cf737f8 2695 /* If we aren't hijacking a label for the 'then' branch, it falls through. */
6de9cd9a
DN
2696 if (true_label)
2697 true_label_p = &true_label;
2698 else
2699 true_label_p = NULL;
2700
2701 /* The 'else' branch also needs a label if it contains interesting code. */
65355d53 2702 if (false_label || else_se)
6de9cd9a
DN
2703 false_label_p = &false_label;
2704 else
2705 false_label_p = NULL;
2706
2707 /* If there was nothing else in our arms, just forward the label(s). */
65355d53 2708 if (!then_se && !else_se)
ca80e52b
EB
2709 return shortcut_cond_r (pred, true_label_p, false_label_p,
2710 EXPR_HAS_LOCATION (expr)
2711 ? EXPR_LOCATION (expr) : input_location);
6de9cd9a
DN
2712
2713 /* If our last subexpression already has a terminal label, reuse it. */
65355d53 2714 if (else_se)
ca80e52b 2715 t = expr_last (else_);
65355d53 2716 else if (then_se)
ca80e52b 2717 t = expr_last (then_);
65355d53 2718 else
ca80e52b
EB
2719 t = NULL;
2720 if (t && TREE_CODE (t) == LABEL_EXPR)
2721 end_label = LABEL_EXPR_LABEL (t);
6de9cd9a
DN
2722
2723 /* If we don't care about jumping to the 'else' branch, jump to the end
2724 if the condition is false. */
2725 if (!false_label_p)
2726 false_label_p = &end_label;
2727
2728 /* We only want to emit these labels if we aren't hijacking them. */
2729 emit_end = (end_label == NULL_TREE);
2730 emit_false = (false_label == NULL_TREE);
2731
089efaa4
ILT
2732 /* We only emit the jump over the else clause if we have to--if the
2733 then clause may fall through. Otherwise we can wind up with a
2734 useless jump and a useless label at the end of gimplified code,
2735 which will cause us to think that this conditional as a whole
2736 falls through even if it doesn't. If we then inline a function
2737 which ends with such a condition, that can cause us to issue an
2738 inappropriate warning about control reaching the end of a
2739 non-void function. */
2740 jump_over_else = block_may_fallthru (then_);
2741
ca80e52b
EB
2742 pred = shortcut_cond_r (pred, true_label_p, false_label_p,
2743 EXPR_HAS_LOCATION (expr)
2744 ? EXPR_LOCATION (expr) : input_location);
6de9cd9a
DN
2745
2746 expr = NULL;
2747 append_to_statement_list (pred, &expr);
2748
2749 append_to_statement_list (then_, &expr);
65355d53 2750 if (else_se)
6de9cd9a 2751 {
089efaa4
ILT
2752 if (jump_over_else)
2753 {
ca80e52b 2754 tree last = expr_last (expr);
089efaa4 2755 t = build_and_jump (&end_label);
ca80e52b
EB
2756 if (EXPR_HAS_LOCATION (last))
2757 SET_EXPR_LOCATION (t, EXPR_LOCATION (last));
089efaa4
ILT
2758 append_to_statement_list (t, &expr);
2759 }
6de9cd9a
DN
2760 if (emit_false)
2761 {
2762 t = build1 (LABEL_EXPR, void_type_node, false_label);
2763 append_to_statement_list (t, &expr);
2764 }
2765 append_to_statement_list (else_, &expr);
2766 }
2767 if (emit_end && end_label)
2768 {
2769 t = build1 (LABEL_EXPR, void_type_node, end_label);
2770 append_to_statement_list (t, &expr);
2771 }
2772
2773 return expr;
2774}
2775
2776/* EXPR is used in a boolean context; make sure it has BOOLEAN_TYPE. */
2777
50674e96 2778tree
6de9cd9a
DN
2779gimple_boolify (tree expr)
2780{
2781 tree type = TREE_TYPE (expr);
db3927fb 2782 location_t loc = EXPR_LOCATION (expr);
6de9cd9a 2783
554cf330
JJ
2784 if (TREE_CODE (expr) == NE_EXPR
2785 && TREE_CODE (TREE_OPERAND (expr, 0)) == CALL_EXPR
2786 && integer_zerop (TREE_OPERAND (expr, 1)))
2787 {
2788 tree call = TREE_OPERAND (expr, 0);
2789 tree fn = get_callee_fndecl (call);
2790
d53c73e0
JJ
2791 /* For __builtin_expect ((long) (x), y) recurse into x as well
2792 if x is truth_value_p. */
554cf330
JJ
2793 if (fn
2794 && DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL
2795 && DECL_FUNCTION_CODE (fn) == BUILT_IN_EXPECT
2796 && call_expr_nargs (call) == 2)
2797 {
2798 tree arg = CALL_EXPR_ARG (call, 0);
2799 if (arg)
2800 {
2801 if (TREE_CODE (arg) == NOP_EXPR
2802 && TREE_TYPE (arg) == TREE_TYPE (call))
2803 arg = TREE_OPERAND (arg, 0);
d53c73e0
JJ
2804 if (truth_value_p (TREE_CODE (arg)))
2805 {
2806 arg = gimple_boolify (arg);
2807 CALL_EXPR_ARG (call, 0)
2808 = fold_convert_loc (loc, TREE_TYPE (call), arg);
2809 }
554cf330
JJ
2810 }
2811 }
2812 }
2813
6de9cd9a
DN
2814 if (TREE_CODE (type) == BOOLEAN_TYPE)
2815 return expr;
2816
6de9cd9a
DN
2817 switch (TREE_CODE (expr))
2818 {
2819 case TRUTH_AND_EXPR:
2820 case TRUTH_OR_EXPR:
2821 case TRUTH_XOR_EXPR:
2822 case TRUTH_ANDIF_EXPR:
2823 case TRUTH_ORIF_EXPR:
2824 /* Also boolify the arguments of truth exprs. */
2825 TREE_OPERAND (expr, 1) = gimple_boolify (TREE_OPERAND (expr, 1));
2826 /* FALLTHRU */
2827
2828 case TRUTH_NOT_EXPR:
2829 TREE_OPERAND (expr, 0) = gimple_boolify (TREE_OPERAND (expr, 0));
2830 /* FALLTHRU */
2831
2832 case EQ_EXPR: case NE_EXPR:
2833 case LE_EXPR: case GE_EXPR: case LT_EXPR: case GT_EXPR:
2834 /* These expressions always produce boolean results. */
2835 TREE_TYPE (expr) = boolean_type_node;
2836 return expr;
d3147f64 2837
6de9cd9a
DN
2838 default:
2839 /* Other expressions that get here must have boolean values, but
2840 might need to be converted to the appropriate mode. */
db3927fb 2841 return fold_convert_loc (loc, boolean_type_node, expr);
6de9cd9a
DN
2842 }
2843}
2844
aea74440
JJ
2845/* Given a conditional expression *EXPR_P without side effects, gimplify
2846 its operands. New statements are inserted to PRE_P. */
2847
2848static enum gimplify_status
726a989a 2849gimplify_pure_cond_expr (tree *expr_p, gimple_seq *pre_p)
aea74440
JJ
2850{
2851 tree expr = *expr_p, cond;
2852 enum gimplify_status ret, tret;
2853 enum tree_code code;
2854
2855 cond = gimple_boolify (COND_EXPR_COND (expr));
2856
2857 /* We need to handle && and || specially, as their gimplification
2858 creates pure cond_expr, thus leading to an infinite cycle otherwise. */
2859 code = TREE_CODE (cond);
2860 if (code == TRUTH_ANDIF_EXPR)
2861 TREE_SET_CODE (cond, TRUTH_AND_EXPR);
2862 else if (code == TRUTH_ORIF_EXPR)
2863 TREE_SET_CODE (cond, TRUTH_OR_EXPR);
726a989a 2864 ret = gimplify_expr (&cond, pre_p, NULL, is_gimple_condexpr, fb_rvalue);
aea74440
JJ
2865 COND_EXPR_COND (*expr_p) = cond;
2866
2867 tret = gimplify_expr (&COND_EXPR_THEN (expr), pre_p, NULL,
2868 is_gimple_val, fb_rvalue);
2869 ret = MIN (ret, tret);
2870 tret = gimplify_expr (&COND_EXPR_ELSE (expr), pre_p, NULL,
2871 is_gimple_val, fb_rvalue);
2872
2873 return MIN (ret, tret);
2874}
2875
2876/* Returns true if evaluating EXPR could trap.
2877 EXPR is GENERIC, while tree_could_trap_p can be called
2878 only on GIMPLE. */
2879
2880static bool
2881generic_expr_could_trap_p (tree expr)
2882{
2883 unsigned i, n;
2884
2885 if (!expr || is_gimple_val (expr))
2886 return false;
2887
2888 if (!EXPR_P (expr) || tree_could_trap_p (expr))
2889 return true;
2890
2891 n = TREE_OPERAND_LENGTH (expr);
2892 for (i = 0; i < n; i++)
2893 if (generic_expr_could_trap_p (TREE_OPERAND (expr, i)))
2894 return true;
2895
2896 return false;
2897}
2898
206048bd 2899/* Convert the conditional expression pointed to by EXPR_P '(p) ? a : b;'
6de9cd9a
DN
2900 into
2901
2902 if (p) if (p)
2903 t1 = a; a;
2904 else or else
2905 t1 = b; b;
2906 t1;
2907
2908 The second form is used when *EXPR_P is of type void.
2909
2910 PRE_P points to the list where side effects that must happen before
dae7ec87 2911 *EXPR_P should be stored. */
6de9cd9a
DN
2912
2913static enum gimplify_status
726a989a 2914gimplify_cond_expr (tree *expr_p, gimple_seq *pre_p, fallback_t fallback)
6de9cd9a
DN
2915{
2916 tree expr = *expr_p;
06ec59e6
EB
2917 tree type = TREE_TYPE (expr);
2918 location_t loc = EXPR_LOCATION (expr);
2919 tree tmp, arm1, arm2;
6de9cd9a 2920 enum gimplify_status ret;
726a989a
RB
2921 tree label_true, label_false, label_cont;
2922 bool have_then_clause_p, have_else_clause_p;
2923 gimple gimple_cond;
2924 enum tree_code pred_code;
2925 gimple_seq seq = NULL;
26d44ae2
RH
2926
2927 /* If this COND_EXPR has a value, copy the values into a temporary within
2928 the arms. */
06ec59e6 2929 if (!VOID_TYPE_P (type))
26d44ae2 2930 {
06ec59e6 2931 tree then_ = TREE_OPERAND (expr, 1), else_ = TREE_OPERAND (expr, 2);
aff98faf
AO
2932 tree result;
2933
06ec59e6
EB
2934 /* If either an rvalue is ok or we do not require an lvalue, create the
2935 temporary. But we cannot do that if the type is addressable. */
2936 if (((fallback & fb_rvalue) || !(fallback & fb_lvalue))
c3e203cf 2937 && !TREE_ADDRESSABLE (type))
aff98faf 2938 {
aea74440
JJ
2939 if (gimplify_ctxp->allow_rhs_cond_expr
2940 /* If either branch has side effects or could trap, it can't be
2941 evaluated unconditionally. */
06ec59e6
EB
2942 && !TREE_SIDE_EFFECTS (then_)
2943 && !generic_expr_could_trap_p (then_)
2944 && !TREE_SIDE_EFFECTS (else_)
2945 && !generic_expr_could_trap_p (else_))
aea74440
JJ
2946 return gimplify_pure_cond_expr (expr_p, pre_p);
2947
06ec59e6
EB
2948 tmp = create_tmp_var (type, "iftmp");
2949 result = tmp;
aff98faf 2950 }
06ec59e6
EB
2951
2952 /* Otherwise, only create and copy references to the values. */
26d44ae2
RH
2953 else
2954 {
06ec59e6 2955 type = build_pointer_type (type);
aff98faf 2956
06ec59e6
EB
2957 if (!VOID_TYPE_P (TREE_TYPE (then_)))
2958 then_ = build_fold_addr_expr_loc (loc, then_);
aff98faf 2959
06ec59e6
EB
2960 if (!VOID_TYPE_P (TREE_TYPE (else_)))
2961 else_ = build_fold_addr_expr_loc (loc, else_);
2962
2963 expr
2964 = build3 (COND_EXPR, type, TREE_OPERAND (expr, 0), then_, else_);
aea74440 2965
726a989a 2966 tmp = create_tmp_var (type, "iftmp");
70f34814 2967 result = build_simple_mem_ref_loc (loc, tmp);
26d44ae2
RH
2968 }
2969
06ec59e6
EB
2970 /* Build the new then clause, `tmp = then_;'. But don't build the
2971 assignment if the value is void; in C++ it can be if it's a throw. */
2972 if (!VOID_TYPE_P (TREE_TYPE (then_)))
2973 TREE_OPERAND (expr, 1) = build2 (MODIFY_EXPR, type, tmp, then_);
26d44ae2 2974
06ec59e6
EB
2975 /* Similarly, build the new else clause, `tmp = else_;'. */
2976 if (!VOID_TYPE_P (TREE_TYPE (else_)))
2977 TREE_OPERAND (expr, 2) = build2 (MODIFY_EXPR, type, tmp, else_);
26d44ae2
RH
2978
2979 TREE_TYPE (expr) = void_type_node;
2980 recalculate_side_effects (expr);
2981
d91ba7b0 2982 /* Move the COND_EXPR to the prequeue. */
726a989a 2983 gimplify_stmt (&expr, pre_p);
26d44ae2 2984
aff98faf 2985 *expr_p = result;
726a989a 2986 return GS_ALL_DONE;
26d44ae2
RH
2987 }
2988
2989 /* Make sure the condition has BOOLEAN_TYPE. */
2990 TREE_OPERAND (expr, 0) = gimple_boolify (TREE_OPERAND (expr, 0));
2991
2992 /* Break apart && and || conditions. */
2993 if (TREE_CODE (TREE_OPERAND (expr, 0)) == TRUTH_ANDIF_EXPR
2994 || TREE_CODE (TREE_OPERAND (expr, 0)) == TRUTH_ORIF_EXPR)
2995 {
2996 expr = shortcut_cond_expr (expr);
2997
2998 if (expr != *expr_p)
2999 {
3000 *expr_p = expr;
3001
3002 /* We can't rely on gimplify_expr to re-gimplify the expanded
3003 form properly, as cleanups might cause the target labels to be
3004 wrapped in a TRY_FINALLY_EXPR. To prevent that, we need to
3005 set up a conditional context. */
3006 gimple_push_condition ();
726a989a 3007 gimplify_stmt (expr_p, &seq);
26d44ae2 3008 gimple_pop_condition (pre_p);
726a989a 3009 gimple_seq_add_seq (pre_p, seq);
26d44ae2
RH
3010
3011 return GS_ALL_DONE;
3012 }
3013 }
3014
3015 /* Now do the normal gimplification. */
26d44ae2 3016
726a989a
RB
3017 /* Gimplify condition. */
3018 ret = gimplify_expr (&TREE_OPERAND (expr, 0), pre_p, NULL, is_gimple_condexpr,
3019 fb_rvalue);
26d44ae2 3020 if (ret == GS_ERROR)
726a989a
RB
3021 return GS_ERROR;
3022 gcc_assert (TREE_OPERAND (expr, 0) != NULL_TREE);
3023
3024 gimple_push_condition ();
26d44ae2 3025
726a989a
RB
3026 have_then_clause_p = have_else_clause_p = false;
3027 if (TREE_OPERAND (expr, 1) != NULL
3028 && TREE_CODE (TREE_OPERAND (expr, 1)) == GOTO_EXPR
3029 && TREE_CODE (GOTO_DESTINATION (TREE_OPERAND (expr, 1))) == LABEL_DECL
3030 && (DECL_CONTEXT (GOTO_DESTINATION (TREE_OPERAND (expr, 1)))
3031 == current_function_decl)
3032 /* For -O0 avoid this optimization if the COND_EXPR and GOTO_EXPR
3033 have different locations, otherwise we end up with incorrect
3034 location information on the branches. */
3035 && (optimize
3036 || !EXPR_HAS_LOCATION (expr)
3037 || !EXPR_HAS_LOCATION (TREE_OPERAND (expr, 1))
3038 || EXPR_LOCATION (expr) == EXPR_LOCATION (TREE_OPERAND (expr, 1))))
3039 {
3040 label_true = GOTO_DESTINATION (TREE_OPERAND (expr, 1));
3041 have_then_clause_p = true;
26d44ae2
RH
3042 }
3043 else
c2255bc4 3044 label_true = create_artificial_label (UNKNOWN_LOCATION);
726a989a
RB
3045 if (TREE_OPERAND (expr, 2) != NULL
3046 && TREE_CODE (TREE_OPERAND (expr, 2)) == GOTO_EXPR
3047 && TREE_CODE (GOTO_DESTINATION (TREE_OPERAND (expr, 2))) == LABEL_DECL
3048 && (DECL_CONTEXT (GOTO_DESTINATION (TREE_OPERAND (expr, 2)))
3049 == current_function_decl)
3050 /* For -O0 avoid this optimization if the COND_EXPR and GOTO_EXPR
3051 have different locations, otherwise we end up with incorrect
3052 location information on the branches. */
3053 && (optimize
3054 || !EXPR_HAS_LOCATION (expr)
3055 || !EXPR_HAS_LOCATION (TREE_OPERAND (expr, 2))
3056 || EXPR_LOCATION (expr) == EXPR_LOCATION (TREE_OPERAND (expr, 2))))
3057 {
3058 label_false = GOTO_DESTINATION (TREE_OPERAND (expr, 2));
3059 have_else_clause_p = true;
3060 }
3061 else
c2255bc4 3062 label_false = create_artificial_label (UNKNOWN_LOCATION);
26d44ae2 3063
726a989a
RB
3064 gimple_cond_get_ops_from_tree (COND_EXPR_COND (expr), &pred_code, &arm1,
3065 &arm2);
26d44ae2 3066
726a989a
RB
3067 gimple_cond = gimple_build_cond (pred_code, arm1, arm2, label_true,
3068 label_false);
26d44ae2 3069
726a989a
RB
3070 gimplify_seq_add_stmt (&seq, gimple_cond);
3071 label_cont = NULL_TREE;
3072 if (!have_then_clause_p)
3073 {
3074 /* For if (...) {} else { code; } put label_true after
3075 the else block. */
3076 if (TREE_OPERAND (expr, 1) == NULL_TREE
3077 && !have_else_clause_p
3078 && TREE_OPERAND (expr, 2) != NULL_TREE)
3079 label_cont = label_true;
3080 else
3081 {
3082 gimplify_seq_add_stmt (&seq, gimple_build_label (label_true));
3083 have_then_clause_p = gimplify_stmt (&TREE_OPERAND (expr, 1), &seq);
3084 /* For if (...) { code; } else {} or
3085 if (...) { code; } else goto label; or
3086 if (...) { code; return; } else { ... }
3087 label_cont isn't needed. */
3088 if (!have_else_clause_p
3089 && TREE_OPERAND (expr, 2) != NULL_TREE
3090 && gimple_seq_may_fallthru (seq))
3091 {
3092 gimple g;
c2255bc4 3093 label_cont = create_artificial_label (UNKNOWN_LOCATION);
726a989a
RB
3094
3095 g = gimple_build_goto (label_cont);
3096
3097 /* GIMPLE_COND's are very low level; they have embedded
3098 gotos. This particular embedded goto should not be marked
3099 with the location of the original COND_EXPR, as it would
3100 correspond to the COND_EXPR's condition, not the ELSE or the
3101 THEN arms. To avoid marking it with the wrong location, flag
3102 it as "no location". */
3103 gimple_set_do_not_emit_location (g);
3104
3105 gimplify_seq_add_stmt (&seq, g);
3106 }
3107 }
3108 }
3109 if (!have_else_clause_p)
3110 {
3111 gimplify_seq_add_stmt (&seq, gimple_build_label (label_false));
3112 have_else_clause_p = gimplify_stmt (&TREE_OPERAND (expr, 2), &seq);
3113 }
3114 if (label_cont)
3115 gimplify_seq_add_stmt (&seq, gimple_build_label (label_cont));
3116
3117 gimple_pop_condition (pre_p);
3118 gimple_seq_add_seq (pre_p, seq);
3119
3120 if (ret == GS_ERROR)
3121 ; /* Do nothing. */
3122 else if (have_then_clause_p || have_else_clause_p)
3123 ret = GS_ALL_DONE;
3124 else
3125 {
3126 /* Both arms are empty; replace the COND_EXPR with its predicate. */
3127 expr = TREE_OPERAND (expr, 0);
3128 gimplify_stmt (&expr, pre_p);
3129 }
3130
3131 *expr_p = NULL;
3132 return ret;
3133}
3134
f76d6e6f
EB
3135/* Prepare the node pointed to by EXPR_P, an is_gimple_addressable expression,
3136 to be marked addressable.
3137
3138 We cannot rely on such an expression being directly markable if a temporary
3139 has been created by the gimplification. In this case, we create another
3140 temporary and initialize it with a copy, which will become a store after we
3141 mark it addressable. This can happen if the front-end passed us something
3142 that it could not mark addressable yet, like a Fortran pass-by-reference
3143 parameter (int) floatvar. */
3144
3145static void
3146prepare_gimple_addressable (tree *expr_p, gimple_seq *seq_p)
3147{
3148 while (handled_component_p (*expr_p))
3149 expr_p = &TREE_OPERAND (*expr_p, 0);
3150 if (is_gimple_reg (*expr_p))
3151 *expr_p = get_initialized_tmp_var (*expr_p, seq_p, NULL);
3152}
3153
726a989a
RB
3154/* A subroutine of gimplify_modify_expr. Replace a MODIFY_EXPR with
3155 a call to __builtin_memcpy. */
3156
3157static enum gimplify_status
3158gimplify_modify_expr_to_memcpy (tree *expr_p, tree size, bool want_value,
3159 gimple_seq *seq_p)
26d44ae2 3160{
5039610b 3161 tree t, to, to_ptr, from, from_ptr;
726a989a 3162 gimple gs;
db3927fb 3163 location_t loc = EXPR_LOCATION (*expr_p);
26d44ae2 3164
726a989a
RB
3165 to = TREE_OPERAND (*expr_p, 0);
3166 from = TREE_OPERAND (*expr_p, 1);
26d44ae2 3167
f76d6e6f
EB
3168 /* Mark the RHS addressable. Beware that it may not be possible to do so
3169 directly if a temporary has been created by the gimplification. */
3170 prepare_gimple_addressable (&from, seq_p);
3171
628c189e 3172 mark_addressable (from);
db3927fb
AH
3173 from_ptr = build_fold_addr_expr_loc (loc, from);
3174 gimplify_arg (&from_ptr, seq_p, loc);
26d44ae2 3175
628c189e 3176 mark_addressable (to);
db3927fb
AH
3177 to_ptr = build_fold_addr_expr_loc (loc, to);
3178 gimplify_arg (&to_ptr, seq_p, loc);
726a989a 3179
26d44ae2 3180 t = implicit_built_in_decls[BUILT_IN_MEMCPY];
726a989a
RB
3181
3182 gs = gimple_build_call (t, 3, to_ptr, from_ptr, size);
26d44ae2
RH
3183
3184 if (want_value)
3185 {
726a989a
RB
3186 /* tmp = memcpy() */
3187 t = create_tmp_var (TREE_TYPE (to_ptr), NULL);
3188 gimple_call_set_lhs (gs, t);
3189 gimplify_seq_add_stmt (seq_p, gs);
3190
70f34814 3191 *expr_p = build_simple_mem_ref (t);
726a989a 3192 return GS_ALL_DONE;
26d44ae2
RH
3193 }
3194
726a989a
RB
3195 gimplify_seq_add_stmt (seq_p, gs);
3196 *expr_p = NULL;
3197 return GS_ALL_DONE;
26d44ae2
RH
3198}
3199
3200/* A subroutine of gimplify_modify_expr. Replace a MODIFY_EXPR with
3201 a call to __builtin_memset. In this case we know that the RHS is
3202 a CONSTRUCTOR with an empty element list. */
3203
3204static enum gimplify_status
726a989a
RB
3205gimplify_modify_expr_to_memset (tree *expr_p, tree size, bool want_value,
3206 gimple_seq *seq_p)
26d44ae2 3207{
1a13360e 3208 tree t, from, to, to_ptr;
726a989a 3209 gimple gs;
db3927fb 3210 location_t loc = EXPR_LOCATION (*expr_p);
26d44ae2 3211
1a13360e
OH
3212 /* Assert our assumptions, to abort instead of producing wrong code
3213 silently if they are not met. Beware that the RHS CONSTRUCTOR might
3214 not be immediately exposed. */
b8698a0f 3215 from = TREE_OPERAND (*expr_p, 1);
1a13360e
OH
3216 if (TREE_CODE (from) == WITH_SIZE_EXPR)
3217 from = TREE_OPERAND (from, 0);
3218
3219 gcc_assert (TREE_CODE (from) == CONSTRUCTOR
3220 && VEC_empty (constructor_elt, CONSTRUCTOR_ELTS (from)));
3221
3222 /* Now proceed. */
726a989a 3223 to = TREE_OPERAND (*expr_p, 0);
26d44ae2 3224
db3927fb
AH
3225 to_ptr = build_fold_addr_expr_loc (loc, to);
3226 gimplify_arg (&to_ptr, seq_p, loc);
26d44ae2 3227 t = implicit_built_in_decls[BUILT_IN_MEMSET];
726a989a
RB
3228
3229 gs = gimple_build_call (t, 3, to_ptr, integer_zero_node, size);
26d44ae2
RH
3230
3231 if (want_value)
3232 {
726a989a
RB
3233 /* tmp = memset() */
3234 t = create_tmp_var (TREE_TYPE (to_ptr), NULL);
3235 gimple_call_set_lhs (gs, t);
3236 gimplify_seq_add_stmt (seq_p, gs);
3237
3238 *expr_p = build1 (INDIRECT_REF, TREE_TYPE (to), t);
3239 return GS_ALL_DONE;
26d44ae2
RH
3240 }
3241
726a989a
RB
3242 gimplify_seq_add_stmt (seq_p, gs);
3243 *expr_p = NULL;
3244 return GS_ALL_DONE;
26d44ae2
RH
3245}
3246
57d1dd87
RH
3247/* A subroutine of gimplify_init_ctor_preeval. Called via walk_tree,
3248 determine, cautiously, if a CONSTRUCTOR overlaps the lhs of an
3249 assignment. Returns non-null if we detect a potential overlap. */
3250
3251struct gimplify_init_ctor_preeval_data
3252{
3253 /* The base decl of the lhs object. May be NULL, in which case we
3254 have to assume the lhs is indirect. */
3255 tree lhs_base_decl;
3256
3257 /* The alias set of the lhs object. */
4862826d 3258 alias_set_type lhs_alias_set;
57d1dd87
RH
3259};
3260
3261static tree
3262gimplify_init_ctor_preeval_1 (tree *tp, int *walk_subtrees, void *xdata)
3263{
3264 struct gimplify_init_ctor_preeval_data *data
3265 = (struct gimplify_init_ctor_preeval_data *) xdata;
3266 tree t = *tp;
3267
3268 /* If we find the base object, obviously we have overlap. */
3269 if (data->lhs_base_decl == t)
3270 return t;
3271
3272 /* If the constructor component is indirect, determine if we have a
3273 potential overlap with the lhs. The only bits of information we
3274 have to go on at this point are addressability and alias sets. */
70f34814
RG
3275 if ((INDIRECT_REF_P (t)
3276 || TREE_CODE (t) == MEM_REF)
57d1dd87
RH
3277 && (!data->lhs_base_decl || TREE_ADDRESSABLE (data->lhs_base_decl))
3278 && alias_sets_conflict_p (data->lhs_alias_set, get_alias_set (t)))
3279 return t;
3280
df10ee2a 3281 /* If the constructor component is a call, determine if it can hide a
70f34814
RG
3282 potential overlap with the lhs through an INDIRECT_REF like above.
3283 ??? Ugh - this is completely broken. In fact this whole analysis
3284 doesn't look conservative. */
df10ee2a
EB
3285 if (TREE_CODE (t) == CALL_EXPR)
3286 {
3287 tree type, fntype = TREE_TYPE (TREE_TYPE (CALL_EXPR_FN (t)));
3288
3289 for (type = TYPE_ARG_TYPES (fntype); type; type = TREE_CHAIN (type))
3290 if (POINTER_TYPE_P (TREE_VALUE (type))
3291 && (!data->lhs_base_decl || TREE_ADDRESSABLE (data->lhs_base_decl))
3292 && alias_sets_conflict_p (data->lhs_alias_set,
3293 get_alias_set
3294 (TREE_TYPE (TREE_VALUE (type)))))
3295 return t;
3296 }
3297
6615c446 3298 if (IS_TYPE_OR_DECL_P (t))
57d1dd87
RH
3299 *walk_subtrees = 0;
3300 return NULL;
3301}
3302
726a989a 3303/* A subroutine of gimplify_init_constructor. Pre-evaluate EXPR,
57d1dd87
RH
3304 force values that overlap with the lhs (as described by *DATA)
3305 into temporaries. */
3306
3307static void
726a989a 3308gimplify_init_ctor_preeval (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
57d1dd87
RH
3309 struct gimplify_init_ctor_preeval_data *data)
3310{
3311 enum gimplify_status one;
3312
51eed280
PB
3313 /* If the value is constant, then there's nothing to pre-evaluate. */
3314 if (TREE_CONSTANT (*expr_p))
3315 {
3316 /* Ensure it does not have side effects, it might contain a reference to
3317 the object we're initializing. */
3318 gcc_assert (!TREE_SIDE_EFFECTS (*expr_p));
3319 return;
3320 }
57d1dd87
RH
3321
3322 /* If the type has non-trivial constructors, we can't pre-evaluate. */
3323 if (TREE_ADDRESSABLE (TREE_TYPE (*expr_p)))
3324 return;
3325
3326 /* Recurse for nested constructors. */
3327 if (TREE_CODE (*expr_p) == CONSTRUCTOR)
3328 {
4038c495
GB
3329 unsigned HOST_WIDE_INT ix;
3330 constructor_elt *ce;
3331 VEC(constructor_elt,gc) *v = CONSTRUCTOR_ELTS (*expr_p);
3332
ac47786e 3333 FOR_EACH_VEC_ELT (constructor_elt, v, ix, ce)
4038c495 3334 gimplify_init_ctor_preeval (&ce->value, pre_p, post_p, data);
726a989a 3335
57d1dd87
RH
3336 return;
3337 }
3338
0461b801
EB
3339 /* If this is a variable sized type, we must remember the size. */
3340 maybe_with_size_expr (expr_p);
57d1dd87
RH
3341
3342 /* Gimplify the constructor element to something appropriate for the rhs
726a989a 3343 of a MODIFY_EXPR. Given that we know the LHS is an aggregate, we know
d3147f64 3344 the gimplifier will consider this a store to memory. Doing this
57d1dd87
RH
3345 gimplification now means that we won't have to deal with complicated
3346 language-specific trees, nor trees like SAVE_EXPR that can induce
b01d837f 3347 exponential search behavior. */
57d1dd87
RH
3348 one = gimplify_expr (expr_p, pre_p, post_p, is_gimple_mem_rhs, fb_rvalue);
3349 if (one == GS_ERROR)
3350 {
3351 *expr_p = NULL;
3352 return;
3353 }
3354
3355 /* If we gimplified to a bare decl, we can be sure that it doesn't overlap
3356 with the lhs, since "a = { .x=a }" doesn't make sense. This will
3357 always be true for all scalars, since is_gimple_mem_rhs insists on a
3358 temporary variable for them. */
3359 if (DECL_P (*expr_p))
3360 return;
3361
3362 /* If this is of variable size, we have no choice but to assume it doesn't
3363 overlap since we can't make a temporary for it. */
4c923c28 3364 if (TREE_CODE (TYPE_SIZE (TREE_TYPE (*expr_p))) != INTEGER_CST)
57d1dd87
RH
3365 return;
3366
3367 /* Otherwise, we must search for overlap ... */
3368 if (!walk_tree (expr_p, gimplify_init_ctor_preeval_1, data, NULL))
3369 return;
3370
3371 /* ... and if found, force the value into a temporary. */
3372 *expr_p = get_formal_tmp_var (*expr_p, pre_p);
3373}
3374
6fa91b48
SB
3375/* A subroutine of gimplify_init_ctor_eval. Create a loop for
3376 a RANGE_EXPR in a CONSTRUCTOR for an array.
3377
3378 var = lower;
3379 loop_entry:
3380 object[var] = value;
3381 if (var == upper)
3382 goto loop_exit;
3383 var = var + 1;
3384 goto loop_entry;
3385 loop_exit:
3386
3387 We increment var _after_ the loop exit check because we might otherwise
3388 fail if upper == TYPE_MAX_VALUE (type for upper).
3389
3390 Note that we never have to deal with SAVE_EXPRs here, because this has
3391 already been taken care of for us, in gimplify_init_ctor_preeval(). */
3392
4038c495 3393static void gimplify_init_ctor_eval (tree, VEC(constructor_elt,gc) *,
726a989a 3394 gimple_seq *, bool);
6fa91b48
SB
3395
3396static void
3397gimplify_init_ctor_eval_range (tree object, tree lower, tree upper,
3398 tree value, tree array_elt_type,
726a989a 3399 gimple_seq *pre_p, bool cleared)
6fa91b48 3400{
726a989a 3401 tree loop_entry_label, loop_exit_label, fall_thru_label;
b56b9fe3 3402 tree var, var_type, cref, tmp;
6fa91b48 3403
c2255bc4
AH
3404 loop_entry_label = create_artificial_label (UNKNOWN_LOCATION);
3405 loop_exit_label = create_artificial_label (UNKNOWN_LOCATION);
3406 fall_thru_label = create_artificial_label (UNKNOWN_LOCATION);
6fa91b48
SB
3407
3408 /* Create and initialize the index variable. */
3409 var_type = TREE_TYPE (upper);
3410 var = create_tmp_var (var_type, NULL);
726a989a 3411 gimplify_seq_add_stmt (pre_p, gimple_build_assign (var, lower));
6fa91b48
SB
3412
3413 /* Add the loop entry label. */
726a989a 3414 gimplify_seq_add_stmt (pre_p, gimple_build_label (loop_entry_label));
6fa91b48
SB
3415
3416 /* Build the reference. */
3417 cref = build4 (ARRAY_REF, array_elt_type, unshare_expr (object),
3418 var, NULL_TREE, NULL_TREE);
3419
3420 /* If we are a constructor, just call gimplify_init_ctor_eval to do
3421 the store. Otherwise just assign value to the reference. */
3422
3423 if (TREE_CODE (value) == CONSTRUCTOR)
3424 /* NB we might have to call ourself recursively through
3425 gimplify_init_ctor_eval if the value is a constructor. */
3426 gimplify_init_ctor_eval (cref, CONSTRUCTOR_ELTS (value),
3427 pre_p, cleared);
3428 else
726a989a 3429 gimplify_seq_add_stmt (pre_p, gimple_build_assign (cref, value));
6fa91b48
SB
3430
3431 /* We exit the loop when the index var is equal to the upper bound. */
726a989a
RB
3432 gimplify_seq_add_stmt (pre_p,
3433 gimple_build_cond (EQ_EXPR, var, upper,
3434 loop_exit_label, fall_thru_label));
3435
3436 gimplify_seq_add_stmt (pre_p, gimple_build_label (fall_thru_label));
6fa91b48
SB
3437
3438 /* Otherwise, increment the index var... */
b56b9fe3
RS
3439 tmp = build2 (PLUS_EXPR, var_type, var,
3440 fold_convert (var_type, integer_one_node));
726a989a 3441 gimplify_seq_add_stmt (pre_p, gimple_build_assign (var, tmp));
6fa91b48
SB
3442
3443 /* ...and jump back to the loop entry. */
726a989a 3444 gimplify_seq_add_stmt (pre_p, gimple_build_goto (loop_entry_label));
6fa91b48
SB
3445
3446 /* Add the loop exit label. */
726a989a 3447 gimplify_seq_add_stmt (pre_p, gimple_build_label (loop_exit_label));
6fa91b48
SB
3448}
3449
292a398f 3450/* Return true if FDECL is accessing a field that is zero sized. */
b8698a0f 3451
292a398f 3452static bool
22ea9ec0 3453zero_sized_field_decl (const_tree fdecl)
292a398f 3454{
b8698a0f 3455 if (TREE_CODE (fdecl) == FIELD_DECL && DECL_SIZE (fdecl)
292a398f
DB
3456 && integer_zerop (DECL_SIZE (fdecl)))
3457 return true;
3458 return false;
3459}
3460
d06526b7 3461/* Return true if TYPE is zero sized. */
b8698a0f 3462
d06526b7 3463static bool
22ea9ec0 3464zero_sized_type (const_tree type)
d06526b7
AP
3465{
3466 if (AGGREGATE_TYPE_P (type) && TYPE_SIZE (type)
3467 && integer_zerop (TYPE_SIZE (type)))
3468 return true;
3469 return false;
3470}
3471
57d1dd87
RH
3472/* A subroutine of gimplify_init_constructor. Generate individual
3473 MODIFY_EXPRs for a CONSTRUCTOR. OBJECT is the LHS against which the
4038c495 3474 assignments should happen. ELTS is the CONSTRUCTOR_ELTS of the
57d1dd87
RH
3475 CONSTRUCTOR. CLEARED is true if the entire LHS object has been
3476 zeroed first. */
3477
3478static void
4038c495 3479gimplify_init_ctor_eval (tree object, VEC(constructor_elt,gc) *elts,
726a989a 3480 gimple_seq *pre_p, bool cleared)
57d1dd87
RH
3481{
3482 tree array_elt_type = NULL;
4038c495
GB
3483 unsigned HOST_WIDE_INT ix;
3484 tree purpose, value;
57d1dd87
RH
3485
3486 if (TREE_CODE (TREE_TYPE (object)) == ARRAY_TYPE)
3487 array_elt_type = TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (object)));
3488
4038c495 3489 FOR_EACH_CONSTRUCTOR_ELT (elts, ix, purpose, value)
57d1dd87 3490 {
726a989a 3491 tree cref;
57d1dd87
RH
3492
3493 /* NULL values are created above for gimplification errors. */
3494 if (value == NULL)
3495 continue;
3496
3497 if (cleared && initializer_zerop (value))
3498 continue;
3499
6fa91b48
SB
3500 /* ??? Here's to hoping the front end fills in all of the indices,
3501 so we don't have to figure out what's missing ourselves. */
3502 gcc_assert (purpose);
3503
816fa80a
OH
3504 /* Skip zero-sized fields, unless value has side-effects. This can
3505 happen with calls to functions returning a zero-sized type, which
3506 we shouldn't discard. As a number of downstream passes don't
3507 expect sets of zero-sized fields, we rely on the gimplification of
3508 the MODIFY_EXPR we make below to drop the assignment statement. */
3509 if (! TREE_SIDE_EFFECTS (value) && zero_sized_field_decl (purpose))
292a398f
DB
3510 continue;
3511
6fa91b48
SB
3512 /* If we have a RANGE_EXPR, we have to build a loop to assign the
3513 whole range. */
3514 if (TREE_CODE (purpose) == RANGE_EXPR)
57d1dd87 3515 {
6fa91b48
SB
3516 tree lower = TREE_OPERAND (purpose, 0);
3517 tree upper = TREE_OPERAND (purpose, 1);
3518
3519 /* If the lower bound is equal to upper, just treat it as if
3520 upper was the index. */
3521 if (simple_cst_equal (lower, upper))
3522 purpose = upper;
3523 else
3524 {
3525 gimplify_init_ctor_eval_range (object, lower, upper, value,
3526 array_elt_type, pre_p, cleared);
3527 continue;
3528 }
3529 }
57d1dd87 3530
6fa91b48
SB
3531 if (array_elt_type)
3532 {
1a1640db
RG
3533 /* Do not use bitsizetype for ARRAY_REF indices. */
3534 if (TYPE_DOMAIN (TREE_TYPE (object)))
3535 purpose = fold_convert (TREE_TYPE (TYPE_DOMAIN (TREE_TYPE (object))),
3536 purpose);
b4257cfc
RG
3537 cref = build4 (ARRAY_REF, array_elt_type, unshare_expr (object),
3538 purpose, NULL_TREE, NULL_TREE);
57d1dd87
RH
3539 }
3540 else
cf0efa6a
ILT
3541 {
3542 gcc_assert (TREE_CODE (purpose) == FIELD_DECL);
b4257cfc
RG
3543 cref = build3 (COMPONENT_REF, TREE_TYPE (purpose),
3544 unshare_expr (object), purpose, NULL_TREE);
cf0efa6a 3545 }
57d1dd87 3546
cf0efa6a
ILT
3547 if (TREE_CODE (value) == CONSTRUCTOR
3548 && TREE_CODE (TREE_TYPE (value)) != VECTOR_TYPE)
57d1dd87
RH
3549 gimplify_init_ctor_eval (cref, CONSTRUCTOR_ELTS (value),
3550 pre_p, cleared);
3551 else
3552 {
726a989a 3553 tree init = build2 (INIT_EXPR, TREE_TYPE (cref), cref, value);
57d1dd87 3554 gimplify_and_add (init, pre_p);
726a989a 3555 ggc_free (init);
57d1dd87
RH
3556 }
3557 }
3558}
3559
726a989a
RB
3560
3561/* Returns the appropriate RHS predicate for this LHS. */
3562
3563gimple_predicate
3564rhs_predicate_for (tree lhs)
3565{
ba4d8f9d
RG
3566 if (is_gimple_reg (lhs))
3567 return is_gimple_reg_rhs_or_call;
726a989a 3568 else
ba4d8f9d 3569 return is_gimple_mem_rhs_or_call;
726a989a
RB
3570}
3571
2ec5deb5
PB
3572/* Gimplify a C99 compound literal expression. This just means adding
3573 the DECL_EXPR before the current statement and using its anonymous
3574 decl instead. */
3575
3576static enum gimplify_status
3577gimplify_compound_literal_expr (tree *expr_p, gimple_seq *pre_p)
3578{
3579 tree decl_s = COMPOUND_LITERAL_EXPR_DECL_EXPR (*expr_p);
3580 tree decl = DECL_EXPR_DECL (decl_s);
3581 /* Mark the decl as addressable if the compound literal
3582 expression is addressable now, otherwise it is marked too late
3583 after we gimplify the initialization expression. */
3584 if (TREE_ADDRESSABLE (*expr_p))
3585 TREE_ADDRESSABLE (decl) = 1;
3586
3587 /* Preliminarily mark non-addressed complex variables as eligible
3588 for promotion to gimple registers. We'll transform their uses
3589 as we find them. */
3590 if ((TREE_CODE (TREE_TYPE (decl)) == COMPLEX_TYPE
3591 || TREE_CODE (TREE_TYPE (decl)) == VECTOR_TYPE)
3592 && !TREE_THIS_VOLATILE (decl)
3593 && !needs_to_live_in_memory (decl))
3594 DECL_GIMPLE_REG_P (decl) = 1;
3595
3596 /* This decl isn't mentioned in the enclosing block, so add it to the
3597 list of temps. FIXME it seems a bit of a kludge to say that
3598 anonymous artificial vars aren't pushed, but everything else is. */
3599 if (DECL_NAME (decl) == NULL_TREE && !DECL_SEEN_IN_BIND_EXPR_P (decl))
3600 gimple_add_tmp_var (decl);
3601
3602 gimplify_and_add (decl_s, pre_p);
3603 *expr_p = decl;
3604 return GS_OK;
3605}
3606
3607/* Optimize embedded COMPOUND_LITERAL_EXPRs within a CONSTRUCTOR,
3608 return a new CONSTRUCTOR if something changed. */
3609
3610static tree
3611optimize_compound_literals_in_ctor (tree orig_ctor)
3612{
3613 tree ctor = orig_ctor;
3614 VEC(constructor_elt,gc) *elts = CONSTRUCTOR_ELTS (ctor);
3615 unsigned int idx, num = VEC_length (constructor_elt, elts);
3616
3617 for (idx = 0; idx < num; idx++)
3618 {
3619 tree value = VEC_index (constructor_elt, elts, idx)->value;
3620 tree newval = value;
3621 if (TREE_CODE (value) == CONSTRUCTOR)
3622 newval = optimize_compound_literals_in_ctor (value);
3623 else if (TREE_CODE (value) == COMPOUND_LITERAL_EXPR)
3624 {
3625 tree decl_s = COMPOUND_LITERAL_EXPR_DECL_EXPR (value);
3626 tree decl = DECL_EXPR_DECL (decl_s);
3627 tree init = DECL_INITIAL (decl);
3628
3629 if (!TREE_ADDRESSABLE (value)
3630 && !TREE_ADDRESSABLE (decl)
3631 && init)
3632 newval = optimize_compound_literals_in_ctor (init);
3633 }
3634 if (newval == value)
3635 continue;
3636
3637 if (ctor == orig_ctor)
3638 {
3639 ctor = copy_node (orig_ctor);
3640 CONSTRUCTOR_ELTS (ctor) = VEC_copy (constructor_elt, gc, elts);
3641 elts = CONSTRUCTOR_ELTS (ctor);
3642 }
3643 VEC_index (constructor_elt, elts, idx)->value = newval;
3644 }
3645 return ctor;
3646}
3647
3648
726a989a 3649
26d44ae2
RH
3650/* A subroutine of gimplify_modify_expr. Break out elements of a
3651 CONSTRUCTOR used as an initializer into separate MODIFY_EXPRs.
3652
3653 Note that we still need to clear any elements that don't have explicit
3654 initializers, so if not all elements are initialized we keep the
ffed8a01
AH
3655 original MODIFY_EXPR, we just remove all of the constructor elements.
3656
3657 If NOTIFY_TEMP_CREATION is true, do not gimplify, just return
3658 GS_ERROR if we would have to create a temporary when gimplifying
3659 this constructor. Otherwise, return GS_OK.
3660
3661 If NOTIFY_TEMP_CREATION is false, just do the gimplification. */
26d44ae2
RH
3662
3663static enum gimplify_status
726a989a
RB
3664gimplify_init_constructor (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
3665 bool want_value, bool notify_temp_creation)
26d44ae2 3666{
f5a1f0d0 3667 tree object, ctor, type;
26d44ae2 3668 enum gimplify_status ret;
4038c495 3669 VEC(constructor_elt,gc) *elts;
26d44ae2 3670
f5a1f0d0 3671 gcc_assert (TREE_CODE (TREE_OPERAND (*expr_p, 1)) == CONSTRUCTOR);
26d44ae2 3672
ffed8a01
AH
3673 if (!notify_temp_creation)
3674 {
726a989a 3675 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
ffed8a01
AH
3676 is_gimple_lvalue, fb_lvalue);
3677 if (ret == GS_ERROR)
3678 return ret;
3679 }
57d1dd87 3680
726a989a 3681 object = TREE_OPERAND (*expr_p, 0);
f5a1f0d0
PB
3682 ctor = TREE_OPERAND (*expr_p, 1) =
3683 optimize_compound_literals_in_ctor (TREE_OPERAND (*expr_p, 1));
3684 type = TREE_TYPE (ctor);
3685 elts = CONSTRUCTOR_ELTS (ctor);
26d44ae2 3686 ret = GS_ALL_DONE;
726a989a 3687
26d44ae2
RH
3688 switch (TREE_CODE (type))
3689 {
3690 case RECORD_TYPE:
3691 case UNION_TYPE:
3692 case QUAL_UNION_TYPE:
3693 case ARRAY_TYPE:
3694 {
57d1dd87 3695 struct gimplify_init_ctor_preeval_data preeval_data;
6fa91b48 3696 HOST_WIDE_INT num_type_elements, num_ctor_elements;
fe24d485 3697 HOST_WIDE_INT num_nonzero_elements;
8afd015a 3698 bool cleared, valid_const_initializer;
26d44ae2
RH
3699
3700 /* Aggregate types must lower constructors to initialization of
3701 individual elements. The exception is that a CONSTRUCTOR node
3702 with no elements indicates zero-initialization of the whole. */
4038c495 3703 if (VEC_empty (constructor_elt, elts))
ffed8a01
AH
3704 {
3705 if (notify_temp_creation)
3706 return GS_OK;
3707 break;
3708 }
b8698a0f 3709
fe24d485
OH
3710 /* Fetch information about the constructor to direct later processing.
3711 We might want to make static versions of it in various cases, and
3712 can only do so if it known to be a valid constant initializer. */
3713 valid_const_initializer
3714 = categorize_ctor_elements (ctor, &num_nonzero_elements,
3715 &num_ctor_elements, &cleared);
26d44ae2
RH
3716
3717 /* If a const aggregate variable is being initialized, then it
3718 should never be a lose to promote the variable to be static. */
fe24d485 3719 if (valid_const_initializer
6f642f98 3720 && num_nonzero_elements > 1
26d44ae2 3721 && TREE_READONLY (object)
d0ea0759
SE
3722 && TREE_CODE (object) == VAR_DECL
3723 && (flag_merge_constants >= 2 || !TREE_ADDRESSABLE (object)))
26d44ae2 3724 {
ffed8a01
AH
3725 if (notify_temp_creation)
3726 return GS_ERROR;
26d44ae2
RH
3727 DECL_INITIAL (object) = ctor;
3728 TREE_STATIC (object) = 1;
3729 if (!DECL_NAME (object))
3730 DECL_NAME (object) = create_tmp_var_name ("C");
3731 walk_tree (&DECL_INITIAL (object), force_labels_r, NULL, NULL);
3732
3733 /* ??? C++ doesn't automatically append a .<number> to the
3734 assembler name, and even when it does, it looks a FE private
3735 data structures to figure out what that number should be,
3736 which are not set for this variable. I suppose this is
3737 important for local statics for inline functions, which aren't
3738 "local" in the object file sense. So in order to get a unique
3739 TU-local symbol, we must invoke the lhd version now. */
3740 lhd_set_decl_assembler_name (object);
3741
3742 *expr_p = NULL_TREE;
3743 break;
3744 }
3745
cce70747
JC
3746 /* If there are "lots" of initialized elements, even discounting
3747 those that are not address constants (and thus *must* be
3748 computed at runtime), then partition the constructor into
3749 constant and non-constant parts. Block copy the constant
3750 parts in, then generate code for the non-constant parts. */
3751 /* TODO. There's code in cp/typeck.c to do this. */
3752
73ed17ff 3753 num_type_elements = count_type_elements (type, true);
cce70747 3754
73ed17ff
JJ
3755 /* If count_type_elements could not determine number of type elements
3756 for a constant-sized object, assume clearing is needed.
3757 Don't do this for variable-sized objects, as store_constructor
3758 will ignore the clearing of variable-sized objects. */
3759 if (num_type_elements < 0 && int_size_in_bytes (type) >= 0)
3760 cleared = true;
cce70747 3761 /* If there are "lots" of zeros, then block clear the object first. */
e04ad03d
JH
3762 else if (num_type_elements - num_nonzero_elements
3763 > CLEAR_RATIO (optimize_function_for_speed_p (cfun))
8afd015a 3764 && num_nonzero_elements < num_type_elements/4)
cce70747 3765 cleared = true;
cce70747
JC
3766 /* ??? This bit ought not be needed. For any element not present
3767 in the initializer, we should simply set them to zero. Except
3768 we'd need to *find* the elements that are not present, and that
3769 requires trickery to avoid quadratic compile-time behavior in
3770 large cases or excessive memory use in small cases. */
3771 else if (num_ctor_elements < num_type_elements)
3772 cleared = true;
3773
26d44ae2
RH
3774 /* If there are "lots" of initialized elements, and all of them
3775 are valid address constants, then the entire initializer can
cce70747
JC
3776 be dropped to memory, and then memcpy'd out. Don't do this
3777 for sparse arrays, though, as it's more efficient to follow
3778 the standard CONSTRUCTOR behavior of memset followed by
8afd015a
JM
3779 individual element initialization. Also don't do this for small
3780 all-zero initializers (which aren't big enough to merit
3781 clearing), and don't try to make bitwise copies of
3782 TREE_ADDRESSABLE types. */
3783 if (valid_const_initializer
3784 && !(cleared || num_nonzero_elements == 0)
c69c7be1 3785 && !TREE_ADDRESSABLE (type))
26d44ae2
RH
3786 {
3787 HOST_WIDE_INT size = int_size_in_bytes (type);
3788 unsigned int align;
3789
3790 /* ??? We can still get unbounded array types, at least
3791 from the C++ front end. This seems wrong, but attempt
3792 to work around it for now. */
3793 if (size < 0)
3794 {
3795 size = int_size_in_bytes (TREE_TYPE (object));
3796 if (size >= 0)
3797 TREE_TYPE (ctor) = type = TREE_TYPE (object);
3798 }
3799
3800 /* Find the maximum alignment we can assume for the object. */
3801 /* ??? Make use of DECL_OFFSET_ALIGN. */
3802 if (DECL_P (object))
3803 align = DECL_ALIGN (object);
3804 else
3805 align = TYPE_ALIGN (type);
3806
329ad380
JJ
3807 if (size > 0
3808 && num_nonzero_elements > 1
3809 && !can_move_by_pieces (size, align))
26d44ae2 3810 {
ffed8a01
AH
3811 if (notify_temp_creation)
3812 return GS_ERROR;
3813
46314d3e
EB
3814 walk_tree (&ctor, force_labels_r, NULL, NULL);
3815 ctor = tree_output_constant_def (ctor);
3816 if (!useless_type_conversion_p (type, TREE_TYPE (ctor)))
3817 ctor = build1 (VIEW_CONVERT_EXPR, type, ctor);
3818 TREE_OPERAND (*expr_p, 1) = ctor;
57d1dd87
RH
3819
3820 /* This is no longer an assignment of a CONSTRUCTOR, but
3821 we still may have processing to do on the LHS. So
3822 pretend we didn't do anything here to let that happen. */
3823 return GS_UNHANDLED;
26d44ae2
RH
3824 }
3825 }
3826
558af7ca
EB
3827 /* If the target is volatile, we have non-zero elements and more than
3828 one field to assign, initialize the target from a temporary. */
61c7cbf8
RG
3829 if (TREE_THIS_VOLATILE (object)
3830 && !TREE_ADDRESSABLE (type)
558af7ca
EB
3831 && num_nonzero_elements > 0
3832 && VEC_length (constructor_elt, elts) > 1)
61c7cbf8
RG
3833 {
3834 tree temp = create_tmp_var (TYPE_MAIN_VARIANT (type), NULL);
3835 TREE_OPERAND (*expr_p, 0) = temp;
3836 *expr_p = build2 (COMPOUND_EXPR, TREE_TYPE (*expr_p),
3837 *expr_p,
3838 build2 (MODIFY_EXPR, void_type_node,
3839 object, temp));
3840 return GS_OK;
3841 }
3842
ffed8a01
AH
3843 if (notify_temp_creation)
3844 return GS_OK;
3845
675c873b
EB
3846 /* If there are nonzero elements and if needed, pre-evaluate to capture
3847 elements overlapping with the lhs into temporaries. We must do this
3848 before clearing to fetch the values before they are zeroed-out. */
3849 if (num_nonzero_elements > 0 && TREE_CODE (*expr_p) != INIT_EXPR)
85d89e76
OH
3850 {
3851 preeval_data.lhs_base_decl = get_base_address (object);
3852 if (!DECL_P (preeval_data.lhs_base_decl))
3853 preeval_data.lhs_base_decl = NULL;
3854 preeval_data.lhs_alias_set = get_alias_set (object);
3855
726a989a 3856 gimplify_init_ctor_preeval (&TREE_OPERAND (*expr_p, 1),
85d89e76
OH
3857 pre_p, post_p, &preeval_data);
3858 }
3859
26d44ae2
RH
3860 if (cleared)
3861 {
3862 /* Zap the CONSTRUCTOR element list, which simplifies this case.
3863 Note that we still have to gimplify, in order to handle the
57d1dd87 3864 case of variable sized types. Avoid shared tree structures. */
4038c495 3865 CONSTRUCTOR_ELTS (ctor) = NULL;
726a989a 3866 TREE_SIDE_EFFECTS (ctor) = 0;
57d1dd87 3867 object = unshare_expr (object);
726a989a 3868 gimplify_stmt (expr_p, pre_p);
26d44ae2
RH
3869 }
3870
6fa91b48
SB
3871 /* If we have not block cleared the object, or if there are nonzero
3872 elements in the constructor, add assignments to the individual
3873 scalar fields of the object. */
3874 if (!cleared || num_nonzero_elements > 0)
85d89e76 3875 gimplify_init_ctor_eval (object, elts, pre_p, cleared);
26d44ae2
RH
3876
3877 *expr_p = NULL_TREE;
3878 }
3879 break;
3880
3881 case COMPLEX_TYPE:
3882 {
3883 tree r, i;
3884
ffed8a01
AH
3885 if (notify_temp_creation)
3886 return GS_OK;
3887
26d44ae2 3888 /* Extract the real and imaginary parts out of the ctor. */
4038c495
GB
3889 gcc_assert (VEC_length (constructor_elt, elts) == 2);
3890 r = VEC_index (constructor_elt, elts, 0)->value;
3891 i = VEC_index (constructor_elt, elts, 1)->value;
26d44ae2
RH
3892 if (r == NULL || i == NULL)
3893 {
b6f65e3c 3894 tree zero = fold_convert (TREE_TYPE (type), integer_zero_node);
26d44ae2
RH
3895 if (r == NULL)
3896 r = zero;
3897 if (i == NULL)
3898 i = zero;
3899 }
3900
3901 /* Complex types have either COMPLEX_CST or COMPLEX_EXPR to
3902 represent creation of a complex value. */
3903 if (TREE_CONSTANT (r) && TREE_CONSTANT (i))
3904 {
3905 ctor = build_complex (type, r, i);
3906 TREE_OPERAND (*expr_p, 1) = ctor;
3907 }
3908 else
3909 {
b4257cfc 3910 ctor = build2 (COMPLEX_EXPR, type, r, i);
26d44ae2 3911 TREE_OPERAND (*expr_p, 1) = ctor;
726a989a
RB
3912 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 1),
3913 pre_p,
3914 post_p,
17ad5b5e
RH
3915 rhs_predicate_for (TREE_OPERAND (*expr_p, 0)),
3916 fb_rvalue);
26d44ae2
RH
3917 }
3918 }
3919 break;
506e2710 3920
26d44ae2 3921 case VECTOR_TYPE:
4038c495
GB
3922 {
3923 unsigned HOST_WIDE_INT ix;
3924 constructor_elt *ce;
e89be13b 3925
ffed8a01
AH
3926 if (notify_temp_creation)
3927 return GS_OK;
3928
4038c495
GB
3929 /* Go ahead and simplify constant constructors to VECTOR_CST. */
3930 if (TREE_CONSTANT (ctor))
3931 {
3932 bool constant_p = true;
3933 tree value;
3934
3935 /* Even when ctor is constant, it might contain non-*_CST
9f1da821
RS
3936 elements, such as addresses or trapping values like
3937 1.0/0.0 - 1.0/0.0. Such expressions don't belong
3938 in VECTOR_CST nodes. */
4038c495
GB
3939 FOR_EACH_CONSTRUCTOR_VALUE (elts, ix, value)
3940 if (!CONSTANT_CLASS_P (value))
3941 {
3942 constant_p = false;
3943 break;
3944 }
e89be13b 3945
4038c495
GB
3946 if (constant_p)
3947 {
3948 TREE_OPERAND (*expr_p, 1) = build_vector_from_ctor (type, elts);
3949 break;
3950 }
84816907 3951
9f1da821 3952 /* Don't reduce an initializer constant even if we can't
84816907
JM
3953 make a VECTOR_CST. It won't do anything for us, and it'll
3954 prevent us from representing it as a single constant. */
9f1da821
RS
3955 if (initializer_constant_valid_p (ctor, type))
3956 break;
3957
3958 TREE_CONSTANT (ctor) = 0;
4038c495 3959 }
e89be13b 3960
4038c495
GB
3961 /* Vector types use CONSTRUCTOR all the way through gimple
3962 compilation as a general initializer. */
ac47786e 3963 FOR_EACH_VEC_ELT (constructor_elt, elts, ix, ce)
4038c495
GB
3964 {
3965 enum gimplify_status tret;
726a989a
RB
3966 tret = gimplify_expr (&ce->value, pre_p, post_p, is_gimple_val,
3967 fb_rvalue);
4038c495
GB
3968 if (tret == GS_ERROR)
3969 ret = GS_ERROR;
3970 }
726a989a
RB
3971 if (!is_gimple_reg (TREE_OPERAND (*expr_p, 0)))
3972 TREE_OPERAND (*expr_p, 1) = get_formal_tmp_var (ctor, pre_p);
4038c495 3973 }
26d44ae2 3974 break;
6de9cd9a 3975
26d44ae2
RH
3976 default:
3977 /* So how did we get a CONSTRUCTOR for a scalar type? */
282899df 3978 gcc_unreachable ();
26d44ae2 3979 }
6de9cd9a 3980
26d44ae2
RH
3981 if (ret == GS_ERROR)
3982 return GS_ERROR;
3983 else if (want_value)
3984 {
26d44ae2
RH
3985 *expr_p = object;
3986 return GS_OK;
6de9cd9a 3987 }
26d44ae2 3988 else
726a989a
RB
3989 {
3990 /* If we have gimplified both sides of the initializer but have
3991 not emitted an assignment, do so now. */
3992 if (*expr_p)
3993 {
3994 tree lhs = TREE_OPERAND (*expr_p, 0);
3995 tree rhs = TREE_OPERAND (*expr_p, 1);
3996 gimple init = gimple_build_assign (lhs, rhs);
3997 gimplify_seq_add_stmt (pre_p, init);
3998 *expr_p = NULL;
3999 }
4000
4001 return GS_ALL_DONE;
4002 }
26d44ae2 4003}
6de9cd9a 4004
30d2e943
RG
4005/* Given a pointer value OP0, return a simplified version of an
4006 indirection through OP0, or NULL_TREE if no simplification is
de4af523
JJ
4007 possible. Note that the resulting type may be different from
4008 the type pointed to in the sense that it is still compatible
4009 from the langhooks point of view. */
30d2e943 4010
de4af523
JJ
4011tree
4012gimple_fold_indirect_ref (tree t)
30d2e943 4013{
70f34814 4014 tree ptype = TREE_TYPE (t), type = TREE_TYPE (ptype);
30d2e943
RG
4015 tree sub = t;
4016 tree subtype;
4017
cb6b911a 4018 STRIP_NOPS (sub);
30d2e943
RG
4019 subtype = TREE_TYPE (sub);
4020 if (!POINTER_TYPE_P (subtype))
4021 return NULL_TREE;
4022
4023 if (TREE_CODE (sub) == ADDR_EXPR)
4024 {
4025 tree op = TREE_OPERAND (sub, 0);
4026 tree optype = TREE_TYPE (op);
4027 /* *&p => p */
f4088621 4028 if (useless_type_conversion_p (type, optype))
30d2e943 4029 return op;
de4af523 4030
30d2e943 4031 /* *(foo *)&fooarray => fooarray[0] */
de4af523 4032 if (TREE_CODE (optype) == ARRAY_TYPE
cb6b911a 4033 && TREE_CODE (TYPE_SIZE (TREE_TYPE (optype))) == INTEGER_CST
de4af523 4034 && useless_type_conversion_p (type, TREE_TYPE (optype)))
30d2e943
RG
4035 {
4036 tree type_domain = TYPE_DOMAIN (optype);
4037 tree min_val = size_zero_node;
4038 if (type_domain && TYPE_MIN_VALUE (type_domain))
4039 min_val = TYPE_MIN_VALUE (type_domain);
cb6b911a
RG
4040 if (TREE_CODE (min_val) == INTEGER_CST)
4041 return build4 (ARRAY_REF, type, op, min_val, NULL_TREE, NULL_TREE);
30d2e943 4042 }
cb6b911a
RG
4043 /* *(foo *)&complexfoo => __real__ complexfoo */
4044 else if (TREE_CODE (optype) == COMPLEX_TYPE
4045 && useless_type_conversion_p (type, TREE_TYPE (optype)))
4046 return fold_build1 (REALPART_EXPR, type, op);
4047 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
4048 else if (TREE_CODE (optype) == VECTOR_TYPE
4049 && useless_type_conversion_p (type, TREE_TYPE (optype)))
4050 {
4051 tree part_width = TYPE_SIZE (type);
4052 tree index = bitsize_int (0);
4053 return fold_build3 (BIT_FIELD_REF, type, op, part_width, index);
4054 }
4055 }
4056
70f34814 4057 /* *(p + CST) -> ... */
cb6b911a
RG
4058 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
4059 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
4060 {
70f34814
RG
4061 tree addr = TREE_OPERAND (sub, 0);
4062 tree off = TREE_OPERAND (sub, 1);
4063 tree addrtype;
cb6b911a 4064
70f34814
RG
4065 STRIP_NOPS (addr);
4066 addrtype = TREE_TYPE (addr);
4067
4068 /* ((foo*)&vectorfoo)[1] -> BIT_FIELD_REF<vectorfoo,...> */
4069 if (TREE_CODE (addr) == ADDR_EXPR
4070 && TREE_CODE (TREE_TYPE (addrtype)) == VECTOR_TYPE
4071 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (addrtype))))
cb6b911a 4072 {
70f34814
RG
4073 HOST_WIDE_INT offset = tree_low_cst (off, 0);
4074 tree part_width = TYPE_SIZE (type);
4075 unsigned HOST_WIDE_INT part_widthi
4076 = tree_low_cst (part_width, 0) / BITS_PER_UNIT;
4077 unsigned HOST_WIDE_INT indexi = offset * BITS_PER_UNIT;
4078 tree index = bitsize_int (indexi);
4079 if (offset / part_widthi
4080 <= TYPE_VECTOR_SUBPARTS (TREE_TYPE (addrtype)))
4081 return fold_build3 (BIT_FIELD_REF, type, TREE_OPERAND (addr, 0),
4082 part_width, index);
cb6b911a 4083 }
70f34814
RG
4084
4085 /* ((foo*)&complexfoo)[1] -> __imag__ complexfoo */
4086 if (TREE_CODE (addr) == ADDR_EXPR
4087 && TREE_CODE (TREE_TYPE (addrtype)) == COMPLEX_TYPE
4088 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (addrtype))))
4089 {
4090 tree size = TYPE_SIZE_UNIT (type);
4091 if (tree_int_cst_equal (size, off))
4092 return fold_build1 (IMAGPART_EXPR, type, TREE_OPERAND (addr, 0));
4093 }
4094
4095 /* *(p + CST) -> MEM_REF <p, CST>. */
4096 if (TREE_CODE (addr) != ADDR_EXPR
4097 || DECL_P (TREE_OPERAND (addr, 0)))
4098 return fold_build2 (MEM_REF, type,
4099 addr,
4100 build_int_cst_wide (ptype,
4101 TREE_INT_CST_LOW (off),
4102 TREE_INT_CST_HIGH (off)));
30d2e943
RG
4103 }
4104
4105 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
4106 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
cb6b911a 4107 && TREE_CODE (TYPE_SIZE (TREE_TYPE (TREE_TYPE (subtype)))) == INTEGER_CST
f4088621 4108 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (subtype))))
30d2e943
RG
4109 {
4110 tree type_domain;
4111 tree min_val = size_zero_node;
c2953725 4112 tree osub = sub;
de4af523 4113 sub = gimple_fold_indirect_ref (sub);
30d2e943 4114 if (! sub)
c2953725 4115 sub = build1 (INDIRECT_REF, TREE_TYPE (subtype), osub);
30d2e943
RG
4116 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
4117 if (type_domain && TYPE_MIN_VALUE (type_domain))
4118 min_val = TYPE_MIN_VALUE (type_domain);
cb6b911a
RG
4119 if (TREE_CODE (min_val) == INTEGER_CST)
4120 return build4 (ARRAY_REF, type, sub, min_val, NULL_TREE, NULL_TREE);
30d2e943
RG
4121 }
4122
4123 return NULL_TREE;
4124}
4125
de4af523
JJ
4126/* Given a pointer value OP0, return a simplified version of an
4127 indirection through OP0, or NULL_TREE if no simplification is
4128 possible. This may only be applied to a rhs of an expression.
4129 Note that the resulting type may be different from the type pointed
4130 to in the sense that it is still compatible from the langhooks
4131 point of view. */
4132
4133static tree
4134gimple_fold_indirect_ref_rhs (tree t)
4135{
4136 return gimple_fold_indirect_ref (t);
4137}
4138
4caa08da
AH
4139/* Subroutine of gimplify_modify_expr to do simplifications of
4140 MODIFY_EXPRs based on the code of the RHS. We loop for as long as
4141 something changes. */
6de9cd9a 4142
26d44ae2 4143static enum gimplify_status
726a989a
RB
4144gimplify_modify_expr_rhs (tree *expr_p, tree *from_p, tree *to_p,
4145 gimple_seq *pre_p, gimple_seq *post_p,
4146 bool want_value)
26d44ae2 4147{
6d729f28
JM
4148 enum gimplify_status ret = GS_UNHANDLED;
4149 bool changed;
6de9cd9a 4150
6d729f28
JM
4151 do
4152 {
4153 changed = false;
4154 switch (TREE_CODE (*from_p))
4155 {
4156 case VAR_DECL:
4157 /* If we're assigning from a read-only variable initialized with
4158 a constructor, do the direct assignment from the constructor,
4159 but only if neither source nor target are volatile since this
4160 latter assignment might end up being done on a per-field basis. */
4161 if (DECL_INITIAL (*from_p)
4162 && TREE_READONLY (*from_p)
4163 && !TREE_THIS_VOLATILE (*from_p)
4164 && !TREE_THIS_VOLATILE (*to_p)
4165 && TREE_CODE (DECL_INITIAL (*from_p)) == CONSTRUCTOR)
4166 {
4167 tree old_from = *from_p;
4168 enum gimplify_status subret;
4169
4170 /* Move the constructor into the RHS. */
4171 *from_p = unshare_expr (DECL_INITIAL (*from_p));
4172
4173 /* Let's see if gimplify_init_constructor will need to put
4174 it in memory. */
4175 subret = gimplify_init_constructor (expr_p, NULL, NULL,
4176 false, true);
4177 if (subret == GS_ERROR)
4178 {
4179 /* If so, revert the change. */
4180 *from_p = old_from;
4181 }
4182 else
4183 {
4184 ret = GS_OK;
4185 changed = true;
4186 }
4187 }
4188 break;
4189 case INDIRECT_REF:
4caa08da 4190 {
6d729f28 4191 /* If we have code like
ffed8a01 4192
6d729f28 4193 *(const A*)(A*)&x
ffed8a01 4194
6d729f28
JM
4195 where the type of "x" is a (possibly cv-qualified variant
4196 of "A"), treat the entire expression as identical to "x".
4197 This kind of code arises in C++ when an object is bound
4198 to a const reference, and if "x" is a TARGET_EXPR we want
4199 to take advantage of the optimization below. */
4200 tree t = gimple_fold_indirect_ref_rhs (TREE_OPERAND (*from_p, 0));
4201 if (t)
ffed8a01 4202 {
6d729f28
JM
4203 *from_p = t;
4204 ret = GS_OK;
4205 changed = true;
ffed8a01 4206 }
6d729f28
JM
4207 break;
4208 }
4209
4210 case TARGET_EXPR:
4211 {
4212 /* If we are initializing something from a TARGET_EXPR, strip the
4213 TARGET_EXPR and initialize it directly, if possible. This can't
4214 be done if the initializer is void, since that implies that the
4215 temporary is set in some non-trivial way.
4216
4217 ??? What about code that pulls out the temp and uses it
4218 elsewhere? I think that such code never uses the TARGET_EXPR as
4219 an initializer. If I'm wrong, we'll die because the temp won't
4220 have any RTL. In that case, I guess we'll need to replace
4221 references somehow. */
4222 tree init = TARGET_EXPR_INITIAL (*from_p);
4223
4224 if (init
4225 && !VOID_TYPE_P (TREE_TYPE (init)))
ffed8a01 4226 {
6d729f28 4227 *from_p = init;
ffed8a01 4228 ret = GS_OK;
6d729f28 4229 changed = true;
ffed8a01 4230 }
4caa08da 4231 }
6d729f28 4232 break;
f98625f6 4233
6d729f28
JM
4234 case COMPOUND_EXPR:
4235 /* Remove any COMPOUND_EXPR in the RHS so the following cases will be
4236 caught. */
4237 gimplify_compound_expr (from_p, pre_p, true);
4238 ret = GS_OK;
4239 changed = true;
4240 break;
f98625f6 4241
6d729f28 4242 case CONSTRUCTOR:
ce3beba3
JM
4243 /* If we already made some changes, let the front end have a
4244 crack at this before we break it down. */
4245 if (ret != GS_UNHANDLED)
4246 break;
6d729f28
JM
4247 /* If we're initializing from a CONSTRUCTOR, break this into
4248 individual MODIFY_EXPRs. */
4249 return gimplify_init_constructor (expr_p, pre_p, post_p, want_value,
4250 false);
4251
4252 case COND_EXPR:
4253 /* If we're assigning to a non-register type, push the assignment
4254 down into the branches. This is mandatory for ADDRESSABLE types,
4255 since we cannot generate temporaries for such, but it saves a
4256 copy in other cases as well. */
4257 if (!is_gimple_reg_type (TREE_TYPE (*from_p)))
f98625f6 4258 {
6d729f28
JM
4259 /* This code should mirror the code in gimplify_cond_expr. */
4260 enum tree_code code = TREE_CODE (*expr_p);
4261 tree cond = *from_p;
4262 tree result = *to_p;
4263
4264 ret = gimplify_expr (&result, pre_p, post_p,
4265 is_gimple_lvalue, fb_lvalue);
4266 if (ret != GS_ERROR)
4267 ret = GS_OK;
4268
4269 if (TREE_TYPE (TREE_OPERAND (cond, 1)) != void_type_node)
4270 TREE_OPERAND (cond, 1)
4271 = build2 (code, void_type_node, result,
4272 TREE_OPERAND (cond, 1));
4273 if (TREE_TYPE (TREE_OPERAND (cond, 2)) != void_type_node)
4274 TREE_OPERAND (cond, 2)
4275 = build2 (code, void_type_node, unshare_expr (result),
4276 TREE_OPERAND (cond, 2));
4277
4278 TREE_TYPE (cond) = void_type_node;
4279 recalculate_side_effects (cond);
4280
4281 if (want_value)
4282 {
4283 gimplify_and_add (cond, pre_p);
4284 *expr_p = unshare_expr (result);
4285 }
4286 else
4287 *expr_p = cond;
4288 return ret;
f98625f6 4289 }
f98625f6 4290 break;
f98625f6 4291
6d729f28
JM
4292 case CALL_EXPR:
4293 /* For calls that return in memory, give *to_p as the CALL_EXPR's
4294 return slot so that we don't generate a temporary. */
4295 if (!CALL_EXPR_RETURN_SLOT_OPT (*from_p)
4296 && aggregate_value_p (*from_p, *from_p))
26d44ae2 4297 {
6d729f28
JM
4298 bool use_target;
4299
4300 if (!(rhs_predicate_for (*to_p))(*from_p))
4301 /* If we need a temporary, *to_p isn't accurate. */
4302 use_target = false;
4303 else if (TREE_CODE (*to_p) == RESULT_DECL
4304 && DECL_NAME (*to_p) == NULL_TREE
4305 && needs_to_live_in_memory (*to_p))
4306 /* It's OK to use the return slot directly unless it's an NRV. */
4307 use_target = true;
4308 else if (is_gimple_reg_type (TREE_TYPE (*to_p))
4309 || (DECL_P (*to_p) && DECL_REGISTER (*to_p)))
4310 /* Don't force regs into memory. */
4311 use_target = false;
4312 else if (TREE_CODE (*expr_p) == INIT_EXPR)
4313 /* It's OK to use the target directly if it's being
4314 initialized. */
4315 use_target = true;
4316 else if (!is_gimple_non_addressable (*to_p))
4317 /* Don't use the original target if it's already addressable;
4318 if its address escapes, and the called function uses the
4319 NRV optimization, a conforming program could see *to_p
4320 change before the called function returns; see c++/19317.
4321 When optimizing, the return_slot pass marks more functions
4322 as safe after we have escape info. */
4323 use_target = false;
4324 else
4325 use_target = true;
4326
4327 if (use_target)
4328 {
4329 CALL_EXPR_RETURN_SLOT_OPT (*from_p) = 1;
4330 mark_addressable (*to_p);
4331 }
26d44ae2 4332 }
6d729f28 4333 break;
6de9cd9a 4334
6d729f28
JM
4335 case WITH_SIZE_EXPR:
4336 /* Likewise for calls that return an aggregate of non-constant size,
4337 since we would not be able to generate a temporary at all. */
4338 if (TREE_CODE (TREE_OPERAND (*from_p, 0)) == CALL_EXPR)
4339 {
4340 *from_p = TREE_OPERAND (*from_p, 0);
ebad5233
JM
4341 /* We don't change ret in this case because the
4342 WITH_SIZE_EXPR might have been added in
4343 gimplify_modify_expr, so returning GS_OK would lead to an
4344 infinite loop. */
6d729f28
JM
4345 changed = true;
4346 }
4347 break;
6de9cd9a 4348
6d729f28
JM
4349 /* If we're initializing from a container, push the initialization
4350 inside it. */
4351 case CLEANUP_POINT_EXPR:
4352 case BIND_EXPR:
4353 case STATEMENT_LIST:
26d44ae2 4354 {
6d729f28
JM
4355 tree wrap = *from_p;
4356 tree t;
dae7ec87 4357
6d729f28
JM
4358 ret = gimplify_expr (to_p, pre_p, post_p, is_gimple_min_lval,
4359 fb_lvalue);
dae7ec87
JM
4360 if (ret != GS_ERROR)
4361 ret = GS_OK;
4362
6d729f28
JM
4363 t = voidify_wrapper_expr (wrap, *expr_p);
4364 gcc_assert (t == *expr_p);
dae7ec87
JM
4365
4366 if (want_value)
4367 {
6d729f28
JM
4368 gimplify_and_add (wrap, pre_p);
4369 *expr_p = unshare_expr (*to_p);
dae7ec87
JM
4370 }
4371 else
6d729f28
JM
4372 *expr_p = wrap;
4373 return GS_OK;
26d44ae2 4374 }
6de9cd9a 4375
6d729f28 4376 case COMPOUND_LITERAL_EXPR:
fa47911c 4377 {
6d729f28
JM
4378 tree complit = TREE_OPERAND (*expr_p, 1);
4379 tree decl_s = COMPOUND_LITERAL_EXPR_DECL_EXPR (complit);
4380 tree decl = DECL_EXPR_DECL (decl_s);
4381 tree init = DECL_INITIAL (decl);
4382
4383 /* struct T x = (struct T) { 0, 1, 2 } can be optimized
4384 into struct T x = { 0, 1, 2 } if the address of the
4385 compound literal has never been taken. */
4386 if (!TREE_ADDRESSABLE (complit)
4387 && !TREE_ADDRESSABLE (decl)
4388 && init)
fa47911c 4389 {
6d729f28
JM
4390 *expr_p = copy_node (*expr_p);
4391 TREE_OPERAND (*expr_p, 1) = init;
4392 return GS_OK;
fa47911c
JM
4393 }
4394 }
4395
6d729f28
JM
4396 default:
4397 break;
2ec5deb5 4398 }
6d729f28
JM
4399 }
4400 while (changed);
6de9cd9a 4401
6de9cd9a
DN
4402 return ret;
4403}
4404
07beea0d 4405
d9c2d296
AP
4406/* Promote partial stores to COMPLEX variables to total stores. *EXPR_P is
4407 a MODIFY_EXPR with a lhs of a REAL/IMAGPART_EXPR of a variable with
7b7e6ecd
EB
4408 DECL_GIMPLE_REG_P set.
4409
4410 IMPORTANT NOTE: This promotion is performed by introducing a load of the
4411 other, unmodified part of the complex object just before the total store.
4412 As a consequence, if the object is still uninitialized, an undefined value
4413 will be loaded into a register, which may result in a spurious exception
4414 if the register is floating-point and the value happens to be a signaling
4415 NaN for example. Then the fully-fledged complex operations lowering pass
4416 followed by a DCE pass are necessary in order to fix things up. */
d9c2d296
AP
4417
4418static enum gimplify_status
726a989a
RB
4419gimplify_modify_expr_complex_part (tree *expr_p, gimple_seq *pre_p,
4420 bool want_value)
d9c2d296
AP
4421{
4422 enum tree_code code, ocode;
4423 tree lhs, rhs, new_rhs, other, realpart, imagpart;
4424
726a989a
RB
4425 lhs = TREE_OPERAND (*expr_p, 0);
4426 rhs = TREE_OPERAND (*expr_p, 1);
d9c2d296
AP
4427 code = TREE_CODE (lhs);
4428 lhs = TREE_OPERAND (lhs, 0);
4429
4430 ocode = code == REALPART_EXPR ? IMAGPART_EXPR : REALPART_EXPR;
4431 other = build1 (ocode, TREE_TYPE (rhs), lhs);
4432 other = get_formal_tmp_var (other, pre_p);
4433
4434 realpart = code == REALPART_EXPR ? rhs : other;
4435 imagpart = code == REALPART_EXPR ? other : rhs;
4436
4437 if (TREE_CONSTANT (realpart) && TREE_CONSTANT (imagpart))
4438 new_rhs = build_complex (TREE_TYPE (lhs), realpart, imagpart);
4439 else
4440 new_rhs = build2 (COMPLEX_EXPR, TREE_TYPE (lhs), realpart, imagpart);
4441
726a989a
RB
4442 gimplify_seq_add_stmt (pre_p, gimple_build_assign (lhs, new_rhs));
4443 *expr_p = (want_value) ? rhs : NULL_TREE;
d9c2d296
AP
4444
4445 return GS_ALL_DONE;
4446}
4447
726a989a 4448
206048bd 4449/* Gimplify the MODIFY_EXPR node pointed to by EXPR_P.
6de9cd9a
DN
4450
4451 modify_expr
4452 : varname '=' rhs
4453 | '*' ID '=' rhs
4454
4455 PRE_P points to the list where side effects that must happen before
4456 *EXPR_P should be stored.
4457
4458 POST_P points to the list where side effects that must happen after
4459 *EXPR_P should be stored.
4460
4461 WANT_VALUE is nonzero iff we want to use the value of this expression
4462 in another expression. */
4463
4464static enum gimplify_status
726a989a
RB
4465gimplify_modify_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
4466 bool want_value)
6de9cd9a 4467{
726a989a
RB
4468 tree *from_p = &TREE_OPERAND (*expr_p, 1);
4469 tree *to_p = &TREE_OPERAND (*expr_p, 0);
44de5aeb 4470 enum gimplify_status ret = GS_UNHANDLED;
726a989a 4471 gimple assign;
db3927fb 4472 location_t loc = EXPR_LOCATION (*expr_p);
6de9cd9a 4473
282899df
NS
4474 gcc_assert (TREE_CODE (*expr_p) == MODIFY_EXPR
4475 || TREE_CODE (*expr_p) == INIT_EXPR);
6de9cd9a 4476
1b24a790
RG
4477 /* Insert pointer conversions required by the middle-end that are not
4478 required by the frontend. This fixes middle-end type checking for
4479 for example gcc.dg/redecl-6.c. */
daad0278 4480 if (POINTER_TYPE_P (TREE_TYPE (*to_p)))
1b24a790
RG
4481 {
4482 STRIP_USELESS_TYPE_CONVERSION (*from_p);
4483 if (!useless_type_conversion_p (TREE_TYPE (*to_p), TREE_TYPE (*from_p)))
db3927fb 4484 *from_p = fold_convert_loc (loc, TREE_TYPE (*to_p), *from_p);
1b24a790
RG
4485 }
4486
83d7e8f0
JM
4487 /* See if any simplifications can be done based on what the RHS is. */
4488 ret = gimplify_modify_expr_rhs (expr_p, from_p, to_p, pre_p, post_p,
4489 want_value);
4490 if (ret != GS_UNHANDLED)
4491 return ret;
4492
4493 /* For zero sized types only gimplify the left hand side and right hand
4494 side as statements and throw away the assignment. Do this after
4495 gimplify_modify_expr_rhs so we handle TARGET_EXPRs of addressable
4496 types properly. */
753b34d7 4497 if (zero_sized_type (TREE_TYPE (*from_p)) && !want_value)
412f8986 4498 {
726a989a
RB
4499 gimplify_stmt (from_p, pre_p);
4500 gimplify_stmt (to_p, pre_p);
412f8986
AP
4501 *expr_p = NULL_TREE;
4502 return GS_ALL_DONE;
4503 }
6de9cd9a 4504
d25cee4d
RH
4505 /* If the value being copied is of variable width, compute the length
4506 of the copy into a WITH_SIZE_EXPR. Note that we need to do this
4507 before gimplifying any of the operands so that we can resolve any
4508 PLACEHOLDER_EXPRs in the size. Also note that the RTL expander uses
4509 the size of the expression to be copied, not of the destination, so
726a989a 4510 that is what we must do here. */
d25cee4d 4511 maybe_with_size_expr (from_p);
6de9cd9a 4512
44de5aeb
RK
4513 ret = gimplify_expr (to_p, pre_p, post_p, is_gimple_lvalue, fb_lvalue);
4514 if (ret == GS_ERROR)
4515 return ret;
6de9cd9a 4516
726a989a
RB
4517 /* As a special case, we have to temporarily allow for assignments
4518 with a CALL_EXPR on the RHS. Since in GIMPLE a function call is
4519 a toplevel statement, when gimplifying the GENERIC expression
4520 MODIFY_EXPR <a, CALL_EXPR <foo>>, we cannot create the tuple
4521 GIMPLE_ASSIGN <a, GIMPLE_CALL <foo>>.
4522
4523 Instead, we need to create the tuple GIMPLE_CALL <a, foo>. To
4524 prevent gimplify_expr from trying to create a new temporary for
4525 foo's LHS, we tell it that it should only gimplify until it
4526 reaches the CALL_EXPR. On return from gimplify_expr, the newly
4527 created GIMPLE_CALL <foo> will be the last statement in *PRE_P
4528 and all we need to do here is set 'a' to be its LHS. */
4529 ret = gimplify_expr (from_p, pre_p, post_p, rhs_predicate_for (*to_p),
4530 fb_rvalue);
6de9cd9a
DN
4531 if (ret == GS_ERROR)
4532 return ret;
4533
44de5aeb
RK
4534 /* Now see if the above changed *from_p to something we handle specially. */
4535 ret = gimplify_modify_expr_rhs (expr_p, from_p, to_p, pre_p, post_p,
4536 want_value);
6de9cd9a
DN
4537 if (ret != GS_UNHANDLED)
4538 return ret;
4539
d25cee4d
RH
4540 /* If we've got a variable sized assignment between two lvalues (i.e. does
4541 not involve a call), then we can make things a bit more straightforward
4542 by converting the assignment to memcpy or memset. */
4543 if (TREE_CODE (*from_p) == WITH_SIZE_EXPR)
4544 {
4545 tree from = TREE_OPERAND (*from_p, 0);
4546 tree size = TREE_OPERAND (*from_p, 1);
4547
4548 if (TREE_CODE (from) == CONSTRUCTOR)
726a989a
RB
4549 return gimplify_modify_expr_to_memset (expr_p, size, want_value, pre_p);
4550
e847cc68 4551 if (is_gimple_addressable (from))
d25cee4d
RH
4552 {
4553 *from_p = from;
726a989a
RB
4554 return gimplify_modify_expr_to_memcpy (expr_p, size, want_value,
4555 pre_p);
d25cee4d
RH
4556 }
4557 }
4558
e41d82f5
RH
4559 /* Transform partial stores to non-addressable complex variables into
4560 total stores. This allows us to use real instead of virtual operands
4561 for these variables, which improves optimization. */
4562 if ((TREE_CODE (*to_p) == REALPART_EXPR
4563 || TREE_CODE (*to_p) == IMAGPART_EXPR)
4564 && is_gimple_reg (TREE_OPERAND (*to_p, 0)))
4565 return gimplify_modify_expr_complex_part (expr_p, pre_p, want_value);
4566
f173837a
EB
4567 /* Try to alleviate the effects of the gimplification creating artificial
4568 temporaries (see for example is_gimple_reg_rhs) on the debug info. */
4569 if (!gimplify_ctxp->into_ssa
f2896bc9 4570 && TREE_CODE (*from_p) == VAR_DECL
726a989a
RB
4571 && DECL_IGNORED_P (*from_p)
4572 && DECL_P (*to_p)
4573 && !DECL_IGNORED_P (*to_p))
f173837a
EB
4574 {
4575 if (!DECL_NAME (*from_p) && DECL_NAME (*to_p))
4576 DECL_NAME (*from_p)
4577 = create_tmp_var_name (IDENTIFIER_POINTER (DECL_NAME (*to_p)));
4578 DECL_DEBUG_EXPR_IS_FROM (*from_p) = 1;
4579 SET_DECL_DEBUG_EXPR (*from_p, *to_p);
726a989a
RB
4580 }
4581
8f0fe813
NS
4582 if (want_value && TREE_THIS_VOLATILE (*to_p))
4583 *from_p = get_initialized_tmp_var (*from_p, pre_p, post_p);
4584
726a989a
RB
4585 if (TREE_CODE (*from_p) == CALL_EXPR)
4586 {
4587 /* Since the RHS is a CALL_EXPR, we need to create a GIMPLE_CALL
4588 instead of a GIMPLE_ASSIGN. */
4589 assign = gimple_build_call_from_tree (*from_p);
5de8da9b
AO
4590 if (!gimple_call_noreturn_p (assign))
4591 gimple_call_set_lhs (assign, *to_p);
f173837a 4592 }
726a989a 4593 else
c2255bc4
AH
4594 {
4595 assign = gimple_build_assign (*to_p, *from_p);
4596 gimple_set_location (assign, EXPR_LOCATION (*expr_p));
4597 }
f173837a 4598
726a989a
RB
4599 gimplify_seq_add_stmt (pre_p, assign);
4600
4601 if (gimplify_ctxp->into_ssa && is_gimple_reg (*to_p))
6de9cd9a 4602 {
726a989a
RB
4603 /* If we've somehow already got an SSA_NAME on the LHS, then
4604 we've probably modified it twice. Not good. */
4605 gcc_assert (TREE_CODE (*to_p) != SSA_NAME);
4606 *to_p = make_ssa_name (*to_p, assign);
4607 gimple_set_lhs (assign, *to_p);
4608 }
07beea0d 4609
726a989a
RB
4610 if (want_value)
4611 {
8f0fe813 4612 *expr_p = TREE_THIS_VOLATILE (*to_p) ? *from_p : unshare_expr (*to_p);
17ad5b5e 4613 return GS_OK;
6de9cd9a 4614 }
726a989a
RB
4615 else
4616 *expr_p = NULL;
6de9cd9a 4617
17ad5b5e 4618 return GS_ALL_DONE;
6de9cd9a
DN
4619}
4620
44de5aeb
RK
4621/* Gimplify a comparison between two variable-sized objects. Do this
4622 with a call to BUILT_IN_MEMCMP. */
4623
4624static enum gimplify_status
4625gimplify_variable_sized_compare (tree *expr_p)
4626{
4627 tree op0 = TREE_OPERAND (*expr_p, 0);
4628 tree op1 = TREE_OPERAND (*expr_p, 1);
5039610b 4629 tree t, arg, dest, src;
db3927fb 4630 location_t loc = EXPR_LOCATION (*expr_p);
5039610b
SL
4631
4632 arg = TYPE_SIZE_UNIT (TREE_TYPE (op0));
4633 arg = unshare_expr (arg);
4634 arg = SUBSTITUTE_PLACEHOLDER_IN_EXPR (arg, op0);
db3927fb
AH
4635 src = build_fold_addr_expr_loc (loc, op1);
4636 dest = build_fold_addr_expr_loc (loc, op0);
44de5aeb 4637 t = implicit_built_in_decls[BUILT_IN_MEMCMP];
db3927fb 4638 t = build_call_expr_loc (loc, t, 3, dest, src, arg);
44de5aeb 4639 *expr_p
b4257cfc 4640 = build2 (TREE_CODE (*expr_p), TREE_TYPE (*expr_p), t, integer_zero_node);
44de5aeb
RK
4641
4642 return GS_OK;
4643}
4644
61c25908
OH
4645/* Gimplify a comparison between two aggregate objects of integral scalar
4646 mode as a comparison between the bitwise equivalent scalar values. */
4647
4648static enum gimplify_status
4649gimplify_scalar_mode_aggregate_compare (tree *expr_p)
4650{
db3927fb 4651 location_t loc = EXPR_LOCATION (*expr_p);
61c25908
OH
4652 tree op0 = TREE_OPERAND (*expr_p, 0);
4653 tree op1 = TREE_OPERAND (*expr_p, 1);
4654
4655 tree type = TREE_TYPE (op0);
4656 tree scalar_type = lang_hooks.types.type_for_mode (TYPE_MODE (type), 1);
4657
db3927fb
AH
4658 op0 = fold_build1_loc (loc, VIEW_CONVERT_EXPR, scalar_type, op0);
4659 op1 = fold_build1_loc (loc, VIEW_CONVERT_EXPR, scalar_type, op1);
61c25908
OH
4660
4661 *expr_p
db3927fb 4662 = fold_build2_loc (loc, TREE_CODE (*expr_p), TREE_TYPE (*expr_p), op0, op1);
61c25908
OH
4663
4664 return GS_OK;
4665}
4666
6de9cd9a
DN
4667/* Gimplify TRUTH_ANDIF_EXPR and TRUTH_ORIF_EXPR expressions. EXPR_P
4668 points to the expression to gimplify.
4669
4670 Expressions of the form 'a && b' are gimplified to:
4671
4672 a && b ? true : false
4673
ca80e52b
EB
4674 LOCUS is the source location to be put on the generated COND_EXPR.
4675 gimplify_cond_expr will do the rest. */
6de9cd9a
DN
4676
4677static enum gimplify_status
ca80e52b 4678gimplify_boolean_expr (tree *expr_p, location_t locus)
6de9cd9a
DN
4679{
4680 /* Preserve the original type of the expression. */
4681 tree type = TREE_TYPE (*expr_p);
4682
b4257cfc 4683 *expr_p = build3 (COND_EXPR, type, *expr_p,
db3927fb
AH
4684 fold_convert_loc (locus, type, boolean_true_node),
4685 fold_convert_loc (locus, type, boolean_false_node));
6de9cd9a 4686
ca80e52b
EB
4687 SET_EXPR_LOCATION (*expr_p, locus);
4688
6de9cd9a
DN
4689 return GS_OK;
4690}
4691
4692/* Gimplifies an expression sequence. This function gimplifies each
4693 expression and re-writes the original expression with the last
4694 expression of the sequence in GIMPLE form.
4695
4696 PRE_P points to the list where the side effects for all the
4697 expressions in the sequence will be emitted.
d3147f64 4698
6de9cd9a 4699 WANT_VALUE is true when the result of the last COMPOUND_EXPR is used. */
6de9cd9a
DN
4700
4701static enum gimplify_status
726a989a 4702gimplify_compound_expr (tree *expr_p, gimple_seq *pre_p, bool want_value)
6de9cd9a
DN
4703{
4704 tree t = *expr_p;
4705
4706 do
4707 {
4708 tree *sub_p = &TREE_OPERAND (t, 0);
4709
4710 if (TREE_CODE (*sub_p) == COMPOUND_EXPR)
4711 gimplify_compound_expr (sub_p, pre_p, false);
4712 else
726a989a 4713 gimplify_stmt (sub_p, pre_p);
6de9cd9a
DN
4714
4715 t = TREE_OPERAND (t, 1);
4716 }
4717 while (TREE_CODE (t) == COMPOUND_EXPR);
4718
4719 *expr_p = t;
4720 if (want_value)
4721 return GS_OK;
4722 else
4723 {
726a989a 4724 gimplify_stmt (expr_p, pre_p);
6de9cd9a
DN
4725 return GS_ALL_DONE;
4726 }
4727}
4728
6de9cd9a 4729
726a989a
RB
4730/* Gimplify a SAVE_EXPR node. EXPR_P points to the expression to
4731 gimplify. After gimplification, EXPR_P will point to a new temporary
4732 that holds the original value of the SAVE_EXPR node.
6de9cd9a 4733
726a989a
RB
4734 PRE_P points to the list where side effects that must happen before
4735 *EXPR_P should be stored. */
6de9cd9a
DN
4736
4737static enum gimplify_status
726a989a 4738gimplify_save_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
6de9cd9a
DN
4739{
4740 enum gimplify_status ret = GS_ALL_DONE;
4741 tree val;
4742
282899df 4743 gcc_assert (TREE_CODE (*expr_p) == SAVE_EXPR);
6de9cd9a
DN
4744 val = TREE_OPERAND (*expr_p, 0);
4745
7f5e6307
RH
4746 /* If the SAVE_EXPR has not been resolved, then evaluate it once. */
4747 if (!SAVE_EXPR_RESOLVED_P (*expr_p))
17ad5b5e 4748 {
7f5e6307
RH
4749 /* The operand may be a void-valued expression such as SAVE_EXPRs
4750 generated by the Java frontend for class initialization. It is
4751 being executed only for its side-effects. */
4752 if (TREE_TYPE (val) == void_type_node)
4753 {
4754 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
4755 is_gimple_stmt, fb_none);
7f5e6307
RH
4756 val = NULL;
4757 }
4758 else
4759 val = get_initialized_tmp_var (val, pre_p, post_p);
4760
4761 TREE_OPERAND (*expr_p, 0) = val;
4762 SAVE_EXPR_RESOLVED_P (*expr_p) = 1;
17ad5b5e 4763 }
6de9cd9a 4764
7f5e6307
RH
4765 *expr_p = val;
4766
6de9cd9a
DN
4767 return ret;
4768}
4769
206048bd 4770/* Re-write the ADDR_EXPR node pointed to by EXPR_P
6de9cd9a
DN
4771
4772 unary_expr
4773 : ...
4774 | '&' varname
4775 ...
4776
4777 PRE_P points to the list where side effects that must happen before
4778 *EXPR_P should be stored.
4779
4780 POST_P points to the list where side effects that must happen after
4781 *EXPR_P should be stored. */
4782
4783static enum gimplify_status
726a989a 4784gimplify_addr_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
6de9cd9a
DN
4785{
4786 tree expr = *expr_p;
4787 tree op0 = TREE_OPERAND (expr, 0);
4788 enum gimplify_status ret;
db3927fb 4789 location_t loc = EXPR_LOCATION (*expr_p);
6de9cd9a
DN
4790
4791 switch (TREE_CODE (op0))
4792 {
4793 case INDIRECT_REF:
7ccf35ed 4794 case MISALIGNED_INDIRECT_REF:
67f23620 4795 do_indirect_ref:
6de9cd9a
DN
4796 /* Check if we are dealing with an expression of the form '&*ptr'.
4797 While the front end folds away '&*ptr' into 'ptr', these
4798 expressions may be generated internally by the compiler (e.g.,
4799 builtins like __builtin_va_end). */
67f23620
RH
4800 /* Caution: the silent array decomposition semantics we allow for
4801 ADDR_EXPR means we can't always discard the pair. */
c87ac7e8
AO
4802 /* Gimplification of the ADDR_EXPR operand may drop
4803 cv-qualification conversions, so make sure we add them if
4804 needed. */
67f23620
RH
4805 {
4806 tree op00 = TREE_OPERAND (op0, 0);
4807 tree t_expr = TREE_TYPE (expr);
4808 tree t_op00 = TREE_TYPE (op00);
4809
f4088621 4810 if (!useless_type_conversion_p (t_expr, t_op00))
db3927fb 4811 op00 = fold_convert_loc (loc, TREE_TYPE (expr), op00);
67f23620
RH
4812 *expr_p = op00;
4813 ret = GS_OK;
4814 }
6de9cd9a
DN
4815 break;
4816
44de5aeb
RK
4817 case VIEW_CONVERT_EXPR:
4818 /* Take the address of our operand and then convert it to the type of
af72267c
RK
4819 this ADDR_EXPR.
4820
4821 ??? The interactions of VIEW_CONVERT_EXPR and aliasing is not at
4822 all clear. The impact of this transformation is even less clear. */
91804752
EB
4823
4824 /* If the operand is a useless conversion, look through it. Doing so
4825 guarantees that the ADDR_EXPR and its operand will remain of the
4826 same type. */
4827 if (tree_ssa_useless_type_conversion (TREE_OPERAND (op0, 0)))
317c0092 4828 op0 = TREE_OPERAND (op0, 0);
91804752 4829
db3927fb
AH
4830 *expr_p = fold_convert_loc (loc, TREE_TYPE (expr),
4831 build_fold_addr_expr_loc (loc,
4832 TREE_OPERAND (op0, 0)));
44de5aeb 4833 ret = GS_OK;
6de9cd9a
DN
4834 break;
4835
4836 default:
4837 /* We use fb_either here because the C frontend sometimes takes
5201931e
JM
4838 the address of a call that returns a struct; see
4839 gcc.dg/c99-array-lval-1.c. The gimplifier will correctly make
4840 the implied temporary explicit. */
936d04b6 4841
f76d6e6f 4842 /* Make the operand addressable. */
6de9cd9a 4843 ret = gimplify_expr (&TREE_OPERAND (expr, 0), pre_p, post_p,
e847cc68 4844 is_gimple_addressable, fb_either);
8b17cc05
RG
4845 if (ret == GS_ERROR)
4846 break;
67f23620 4847
f76d6e6f
EB
4848 /* Then mark it. Beware that it may not be possible to do so directly
4849 if a temporary has been created by the gimplification. */
4850 prepare_gimple_addressable (&TREE_OPERAND (expr, 0), pre_p);
9e51aaf5 4851
8b17cc05 4852 op0 = TREE_OPERAND (expr, 0);
6de9cd9a 4853
8b17cc05
RG
4854 /* For various reasons, the gimplification of the expression
4855 may have made a new INDIRECT_REF. */
4856 if (TREE_CODE (op0) == INDIRECT_REF)
4857 goto do_indirect_ref;
4858
6b8b9e42
RG
4859 mark_addressable (TREE_OPERAND (expr, 0));
4860
4861 /* The FEs may end up building ADDR_EXPRs early on a decl with
4862 an incomplete type. Re-build ADDR_EXPRs in canonical form
4863 here. */
4864 if (!types_compatible_p (TREE_TYPE (op0), TREE_TYPE (TREE_TYPE (expr))))
4865 *expr_p = build_fold_addr_expr (op0);
4866
8b17cc05 4867 /* Make sure TREE_CONSTANT and TREE_SIDE_EFFECTS are set properly. */
6b8b9e42
RG
4868 recompute_tree_invariant_for_addr_expr (*expr_p);
4869
4870 /* If we re-built the ADDR_EXPR add a conversion to the original type
4871 if required. */
4872 if (!useless_type_conversion_p (TREE_TYPE (expr), TREE_TYPE (*expr_p)))
4873 *expr_p = fold_convert (TREE_TYPE (expr), *expr_p);
8b17cc05 4874
6de9cd9a
DN
4875 break;
4876 }
4877
6de9cd9a
DN
4878 return ret;
4879}
4880
4881/* Gimplify the operands of an ASM_EXPR. Input operands should be a gimple
4882 value; output operands should be a gimple lvalue. */
4883
4884static enum gimplify_status
726a989a 4885gimplify_asm_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
6de9cd9a 4886{
726a989a
RB
4887 tree expr;
4888 int noutputs;
4889 const char **oconstraints;
6de9cd9a
DN
4890 int i;
4891 tree link;
4892 const char *constraint;
4893 bool allows_mem, allows_reg, is_inout;
4894 enum gimplify_status ret, tret;
726a989a
RB
4895 gimple stmt;
4896 VEC(tree, gc) *inputs;
4897 VEC(tree, gc) *outputs;
4898 VEC(tree, gc) *clobbers;
1c384bf1 4899 VEC(tree, gc) *labels;
726a989a 4900 tree link_next;
b8698a0f 4901
726a989a
RB
4902 expr = *expr_p;
4903 noutputs = list_length (ASM_OUTPUTS (expr));
4904 oconstraints = (const char **) alloca ((noutputs) * sizeof (const char *));
4905
1c384bf1 4906 inputs = outputs = clobbers = labels = NULL;
6de9cd9a 4907
6de9cd9a 4908 ret = GS_ALL_DONE;
726a989a
RB
4909 link_next = NULL_TREE;
4910 for (i = 0, link = ASM_OUTPUTS (expr); link; ++i, link = link_next)
6de9cd9a 4911 {
2c68ba8e 4912 bool ok;
726a989a
RB
4913 size_t constraint_len;
4914
4915 link_next = TREE_CHAIN (link);
4916
4917 oconstraints[i]
4918 = constraint
6de9cd9a 4919 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
6db081f1
AP
4920 constraint_len = strlen (constraint);
4921 if (constraint_len == 0)
4922 continue;
6de9cd9a 4923
2c68ba8e
LB
4924 ok = parse_output_constraint (&constraint, i, 0, 0,
4925 &allows_mem, &allows_reg, &is_inout);
4926 if (!ok)
4927 {
4928 ret = GS_ERROR;
4929 is_inout = false;
4930 }
6de9cd9a
DN
4931
4932 if (!allows_reg && allows_mem)
936d04b6 4933 mark_addressable (TREE_VALUE (link));
6de9cd9a
DN
4934
4935 tret = gimplify_expr (&TREE_VALUE (link), pre_p, post_p,
4936 is_inout ? is_gimple_min_lval : is_gimple_lvalue,
4937 fb_lvalue | fb_mayfail);
4938 if (tret == GS_ERROR)
4939 {
4940 error ("invalid lvalue in asm output %d", i);
4941 ret = tret;
4942 }
4943
726a989a
RB
4944 VEC_safe_push (tree, gc, outputs, link);
4945 TREE_CHAIN (link) = NULL_TREE;
4946
6de9cd9a
DN
4947 if (is_inout)
4948 {
4949 /* An input/output operand. To give the optimizers more
4950 flexibility, split it into separate input and output
4951 operands. */
4952 tree input;
4953 char buf[10];
6de9cd9a
DN
4954
4955 /* Turn the in/out constraint into an output constraint. */
4956 char *p = xstrdup (constraint);
4957 p[0] = '=';
4958 TREE_VALUE (TREE_PURPOSE (link)) = build_string (constraint_len, p);
6de9cd9a
DN
4959
4960 /* And add a matching input constraint. */
4961 if (allows_reg)
4962 {
4963 sprintf (buf, "%d", i);
372d72d9
JJ
4964
4965 /* If there are multiple alternatives in the constraint,
4966 handle each of them individually. Those that allow register
4967 will be replaced with operand number, the others will stay
4968 unchanged. */
4969 if (strchr (p, ',') != NULL)
4970 {
4971 size_t len = 0, buflen = strlen (buf);
4972 char *beg, *end, *str, *dst;
4973
4974 for (beg = p + 1;;)
4975 {
4976 end = strchr (beg, ',');
4977 if (end == NULL)
4978 end = strchr (beg, '\0');
4979 if ((size_t) (end - beg) < buflen)
4980 len += buflen + 1;
4981 else
4982 len += end - beg + 1;
4983 if (*end)
4984 beg = end + 1;
4985 else
4986 break;
4987 }
4988
858904db 4989 str = (char *) alloca (len);
372d72d9
JJ
4990 for (beg = p + 1, dst = str;;)
4991 {
4992 const char *tem;
4993 bool mem_p, reg_p, inout_p;
4994
4995 end = strchr (beg, ',');
4996 if (end)
4997 *end = '\0';
4998 beg[-1] = '=';
4999 tem = beg - 1;
5000 parse_output_constraint (&tem, i, 0, 0,
5001 &mem_p, &reg_p, &inout_p);
5002 if (dst != str)
5003 *dst++ = ',';
5004 if (reg_p)
5005 {
5006 memcpy (dst, buf, buflen);
5007 dst += buflen;
5008 }
5009 else
5010 {
5011 if (end)
5012 len = end - beg;
5013 else
5014 len = strlen (beg);
5015 memcpy (dst, beg, len);
5016 dst += len;
5017 }
5018 if (end)
5019 beg = end + 1;
5020 else
5021 break;
5022 }
5023 *dst = '\0';
5024 input = build_string (dst - str, str);
5025 }
5026 else
5027 input = build_string (strlen (buf), buf);
6de9cd9a
DN
5028 }
5029 else
5030 input = build_string (constraint_len - 1, constraint + 1);
372d72d9
JJ
5031
5032 free (p);
5033
6de9cd9a
DN
5034 input = build_tree_list (build_tree_list (NULL_TREE, input),
5035 unshare_expr (TREE_VALUE (link)));
5036 ASM_INPUTS (expr) = chainon (ASM_INPUTS (expr), input);
5037 }
5038 }
5039
726a989a
RB
5040 link_next = NULL_TREE;
5041 for (link = ASM_INPUTS (expr); link; ++i, link = link_next)
6de9cd9a 5042 {
726a989a
RB
5043 link_next = TREE_CHAIN (link);
5044 constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
6de9cd9a
DN
5045 parse_input_constraint (&constraint, 0, 0, noutputs, 0,
5046 oconstraints, &allows_mem, &allows_reg);
5047
f497c16c
JJ
5048 /* If we can't make copies, we can only accept memory. */
5049 if (TREE_ADDRESSABLE (TREE_TYPE (TREE_VALUE (link))))
5050 {
5051 if (allows_mem)
5052 allows_reg = 0;
5053 else
5054 {
5055 error ("impossible constraint in %<asm%>");
5056 error ("non-memory input %d must stay in memory", i);
5057 return GS_ERROR;
5058 }
5059 }
5060
6de9cd9a
DN
5061 /* If the operand is a memory input, it should be an lvalue. */
5062 if (!allows_reg && allows_mem)
5063 {
6de9cd9a
DN
5064 tret = gimplify_expr (&TREE_VALUE (link), pre_p, post_p,
5065 is_gimple_lvalue, fb_lvalue | fb_mayfail);
936d04b6 5066 mark_addressable (TREE_VALUE (link));
6de9cd9a
DN
5067 if (tret == GS_ERROR)
5068 {
6a3799eb
AH
5069 if (EXPR_HAS_LOCATION (TREE_VALUE (link)))
5070 input_location = EXPR_LOCATION (TREE_VALUE (link));
6de9cd9a
DN
5071 error ("memory input %d is not directly addressable", i);
5072 ret = tret;
5073 }
5074 }
5075 else
5076 {
5077 tret = gimplify_expr (&TREE_VALUE (link), pre_p, post_p,
e670d9e4 5078 is_gimple_asm_val, fb_rvalue);
6de9cd9a
DN
5079 if (tret == GS_ERROR)
5080 ret = tret;
5081 }
726a989a
RB
5082
5083 TREE_CHAIN (link) = NULL_TREE;
5084 VEC_safe_push (tree, gc, inputs, link);
6de9cd9a 5085 }
b8698a0f 5086
726a989a 5087 for (link = ASM_CLOBBERS (expr); link; ++i, link = TREE_CHAIN (link))
1c384bf1
RH
5088 VEC_safe_push (tree, gc, clobbers, link);
5089
5090 for (link = ASM_LABELS (expr); link; ++i, link = TREE_CHAIN (link))
5091 VEC_safe_push (tree, gc, labels, link);
726a989a 5092
a406865a
RG
5093 /* Do not add ASMs with errors to the gimple IL stream. */
5094 if (ret != GS_ERROR)
5095 {
5096 stmt = gimple_build_asm_vec (TREE_STRING_POINTER (ASM_STRING (expr)),
1c384bf1 5097 inputs, outputs, clobbers, labels);
726a989a 5098
a406865a
RG
5099 gimple_asm_set_volatile (stmt, ASM_VOLATILE_P (expr));
5100 gimple_asm_set_input (stmt, ASM_INPUT_P (expr));
5101
5102 gimplify_seq_add_stmt (pre_p, stmt);
5103 }
6de9cd9a
DN
5104
5105 return ret;
5106}
5107
5108/* Gimplify a CLEANUP_POINT_EXPR. Currently this works by adding
726a989a 5109 GIMPLE_WITH_CLEANUP_EXPRs to the prequeue as we encounter cleanups while
6de9cd9a
DN
5110 gimplifying the body, and converting them to TRY_FINALLY_EXPRs when we
5111 return to this function.
5112
5113 FIXME should we complexify the prequeue handling instead? Or use flags
5114 for all the cleanups and let the optimizer tighten them up? The current
5115 code seems pretty fragile; it will break on a cleanup within any
5116 non-conditional nesting. But any such nesting would be broken, anyway;
5117 we can't write a TRY_FINALLY_EXPR that starts inside a nesting construct
5118 and continues out of it. We can do that at the RTL level, though, so
5119 having an optimizer to tighten up try/finally regions would be a Good
5120 Thing. */
5121
5122static enum gimplify_status
726a989a 5123gimplify_cleanup_point_expr (tree *expr_p, gimple_seq *pre_p)
6de9cd9a 5124{
726a989a
RB
5125 gimple_stmt_iterator iter;
5126 gimple_seq body_sequence = NULL;
6de9cd9a 5127
325c3691 5128 tree temp = voidify_wrapper_expr (*expr_p, NULL);
6de9cd9a
DN
5129
5130 /* We only care about the number of conditions between the innermost
df77f454
JM
5131 CLEANUP_POINT_EXPR and the cleanup. So save and reset the count and
5132 any cleanups collected outside the CLEANUP_POINT_EXPR. */
6de9cd9a 5133 int old_conds = gimplify_ctxp->conditions;
726a989a 5134 gimple_seq old_cleanups = gimplify_ctxp->conditional_cleanups;
6de9cd9a 5135 gimplify_ctxp->conditions = 0;
726a989a 5136 gimplify_ctxp->conditional_cleanups = NULL;
6de9cd9a 5137
726a989a 5138 gimplify_stmt (&TREE_OPERAND (*expr_p, 0), &body_sequence);
6de9cd9a
DN
5139
5140 gimplify_ctxp->conditions = old_conds;
df77f454 5141 gimplify_ctxp->conditional_cleanups = old_cleanups;
6de9cd9a 5142
726a989a 5143 for (iter = gsi_start (body_sequence); !gsi_end_p (iter); )
6de9cd9a 5144 {
726a989a 5145 gimple wce = gsi_stmt (iter);
6de9cd9a 5146
726a989a 5147 if (gimple_code (wce) == GIMPLE_WITH_CLEANUP_EXPR)
6de9cd9a 5148 {
726a989a 5149 if (gsi_one_before_end_p (iter))
6de9cd9a 5150 {
726a989a
RB
5151 /* Note that gsi_insert_seq_before and gsi_remove do not
5152 scan operands, unlike some other sequence mutators. */
ae0595b0
RG
5153 if (!gimple_wce_cleanup_eh_only (wce))
5154 gsi_insert_seq_before_without_update (&iter,
5155 gimple_wce_cleanup (wce),
5156 GSI_SAME_STMT);
726a989a 5157 gsi_remove (&iter, true);
6de9cd9a
DN
5158 break;
5159 }
5160 else
5161 {
82d6e6fc 5162 gimple gtry;
726a989a
RB
5163 gimple_seq seq;
5164 enum gimple_try_flags kind;
40aac948 5165
726a989a
RB
5166 if (gimple_wce_cleanup_eh_only (wce))
5167 kind = GIMPLE_TRY_CATCH;
40aac948 5168 else
726a989a
RB
5169 kind = GIMPLE_TRY_FINALLY;
5170 seq = gsi_split_seq_after (iter);
5171
82d6e6fc 5172 gtry = gimple_build_try (seq, gimple_wce_cleanup (wce), kind);
726a989a
RB
5173 /* Do not use gsi_replace here, as it may scan operands.
5174 We want to do a simple structural modification only. */
82d6e6fc 5175 *gsi_stmt_ptr (&iter) = gtry;
726a989a 5176 iter = gsi_start (seq);
6de9cd9a
DN
5177 }
5178 }
5179 else
726a989a 5180 gsi_next (&iter);
6de9cd9a
DN
5181 }
5182
726a989a 5183 gimplify_seq_add_seq (pre_p, body_sequence);
6de9cd9a
DN
5184 if (temp)
5185 {
5186 *expr_p = temp;
6de9cd9a
DN
5187 return GS_OK;
5188 }
5189 else
5190 {
726a989a 5191 *expr_p = NULL;
6de9cd9a
DN
5192 return GS_ALL_DONE;
5193 }
5194}
5195
5196/* Insert a cleanup marker for gimplify_cleanup_point_expr. CLEANUP
726a989a
RB
5197 is the cleanup action required. EH_ONLY is true if the cleanup should
5198 only be executed if an exception is thrown, not on normal exit. */
6de9cd9a
DN
5199
5200static void
726a989a 5201gimple_push_cleanup (tree var, tree cleanup, bool eh_only, gimple_seq *pre_p)
6de9cd9a 5202{
726a989a
RB
5203 gimple wce;
5204 gimple_seq cleanup_stmts = NULL;
6de9cd9a
DN
5205
5206 /* Errors can result in improperly nested cleanups. Which results in
726a989a 5207 confusion when trying to resolve the GIMPLE_WITH_CLEANUP_EXPR. */
1da2ed5f 5208 if (seen_error ())
6de9cd9a
DN
5209 return;
5210
5211 if (gimple_conditional_context ())
5212 {
5213 /* If we're in a conditional context, this is more complex. We only
5214 want to run the cleanup if we actually ran the initialization that
5215 necessitates it, but we want to run it after the end of the
5216 conditional context. So we wrap the try/finally around the
5217 condition and use a flag to determine whether or not to actually
5218 run the destructor. Thus
5219
5220 test ? f(A()) : 0
5221
5222 becomes (approximately)
5223
5224 flag = 0;
5225 try {
5226 if (test) { A::A(temp); flag = 1; val = f(temp); }
5227 else { val = 0; }
5228 } finally {
5229 if (flag) A::~A(temp);
5230 }
5231 val
5232 */
6de9cd9a 5233 tree flag = create_tmp_var (boolean_type_node, "cleanup");
726a989a
RB
5234 gimple ffalse = gimple_build_assign (flag, boolean_false_node);
5235 gimple ftrue = gimple_build_assign (flag, boolean_true_node);
5236
b4257cfc 5237 cleanup = build3 (COND_EXPR, void_type_node, flag, cleanup, NULL);
726a989a
RB
5238 gimplify_stmt (&cleanup, &cleanup_stmts);
5239 wce = gimple_build_wce (cleanup_stmts);
5240
5241 gimplify_seq_add_stmt (&gimplify_ctxp->conditional_cleanups, ffalse);
5242 gimplify_seq_add_stmt (&gimplify_ctxp->conditional_cleanups, wce);
5243 gimplify_seq_add_stmt (pre_p, ftrue);
6de9cd9a
DN
5244
5245 /* Because of this manipulation, and the EH edges that jump
5246 threading cannot redirect, the temporary (VAR) will appear
5247 to be used uninitialized. Don't warn. */
5248 TREE_NO_WARNING (var) = 1;
5249 }
5250 else
5251 {
726a989a
RB
5252 gimplify_stmt (&cleanup, &cleanup_stmts);
5253 wce = gimple_build_wce (cleanup_stmts);
5254 gimple_wce_set_cleanup_eh_only (wce, eh_only);
5255 gimplify_seq_add_stmt (pre_p, wce);
6de9cd9a 5256 }
6de9cd9a
DN
5257}
5258
5259/* Gimplify a TARGET_EXPR which doesn't appear on the rhs of an INIT_EXPR. */
5260
5261static enum gimplify_status
726a989a 5262gimplify_target_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
6de9cd9a
DN
5263{
5264 tree targ = *expr_p;
5265 tree temp = TARGET_EXPR_SLOT (targ);
5266 tree init = TARGET_EXPR_INITIAL (targ);
5267 enum gimplify_status ret;
5268
5269 if (init)
5270 {
3a5b9284 5271 /* TARGET_EXPR temps aren't part of the enclosing block, so add it
786025ea
JJ
5272 to the temps list. Handle also variable length TARGET_EXPRs. */
5273 if (TREE_CODE (DECL_SIZE (temp)) != INTEGER_CST)
5274 {
5275 if (!TYPE_SIZES_GIMPLIFIED (TREE_TYPE (temp)))
5276 gimplify_type_sizes (TREE_TYPE (temp), pre_p);
5277 gimplify_vla_decl (temp, pre_p);
5278 }
5279 else
5280 gimple_add_tmp_var (temp);
6de9cd9a 5281
3a5b9284
RH
5282 /* If TARGET_EXPR_INITIAL is void, then the mere evaluation of the
5283 expression is supposed to initialize the slot. */
5284 if (VOID_TYPE_P (TREE_TYPE (init)))
5285 ret = gimplify_expr (&init, pre_p, post_p, is_gimple_stmt, fb_none);
5286 else
325c3691 5287 {
726a989a
RB
5288 tree init_expr = build2 (INIT_EXPR, void_type_node, temp, init);
5289 init = init_expr;
5290 ret = gimplify_expr (&init, pre_p, post_p, is_gimple_stmt, fb_none);
5291 init = NULL;
5292 ggc_free (init_expr);
325c3691 5293 }
3a5b9284 5294 if (ret == GS_ERROR)
abc67de1
SM
5295 {
5296 /* PR c++/28266 Make sure this is expanded only once. */
5297 TARGET_EXPR_INITIAL (targ) = NULL_TREE;
5298 return GS_ERROR;
5299 }
726a989a
RB
5300 if (init)
5301 gimplify_and_add (init, pre_p);
6de9cd9a
DN
5302
5303 /* If needed, push the cleanup for the temp. */
5304 if (TARGET_EXPR_CLEANUP (targ))
726a989a
RB
5305 gimple_push_cleanup (temp, TARGET_EXPR_CLEANUP (targ),
5306 CLEANUP_EH_ONLY (targ), pre_p);
6de9cd9a
DN
5307
5308 /* Only expand this once. */
5309 TREE_OPERAND (targ, 3) = init;
5310 TARGET_EXPR_INITIAL (targ) = NULL_TREE;
5311 }
282899df 5312 else
6de9cd9a 5313 /* We should have expanded this before. */
282899df 5314 gcc_assert (DECL_SEEN_IN_BIND_EXPR_P (temp));
6de9cd9a
DN
5315
5316 *expr_p = temp;
5317 return GS_OK;
5318}
5319
5320/* Gimplification of expression trees. */
5321
726a989a
RB
5322/* Gimplify an expression which appears at statement context. The
5323 corresponding GIMPLE statements are added to *SEQ_P. If *SEQ_P is
5324 NULL, a new sequence is allocated.
6de9cd9a 5325
726a989a
RB
5326 Return true if we actually added a statement to the queue. */
5327
5328bool
5329gimplify_stmt (tree *stmt_p, gimple_seq *seq_p)
6de9cd9a 5330{
726a989a 5331 gimple_seq_node last;
6de9cd9a 5332
726a989a
RB
5333 if (!*seq_p)
5334 *seq_p = gimple_seq_alloc ();
6de9cd9a 5335
726a989a
RB
5336 last = gimple_seq_last (*seq_p);
5337 gimplify_expr (stmt_p, seq_p, NULL, is_gimple_stmt, fb_none);
5338 return last != gimple_seq_last (*seq_p);
6de9cd9a
DN
5339}
5340
953ff289
DN
5341
5342/* Add FIRSTPRIVATE entries for DECL in the OpenMP the surrounding parallels
5343 to CTX. If entries already exist, force them to be some flavor of private.
5344 If there is no enclosing parallel, do nothing. */
5345
5346void
5347omp_firstprivatize_variable (struct gimplify_omp_ctx *ctx, tree decl)
5348{
5349 splay_tree_node n;
5350
5351 if (decl == NULL || !DECL_P (decl))
5352 return;
5353
5354 do
5355 {
5356 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
5357 if (n != NULL)
5358 {
5359 if (n->value & GOVD_SHARED)
5360 n->value = GOVD_FIRSTPRIVATE | (n->value & GOVD_SEEN);
5361 else
5362 return;
5363 }
a68ab351 5364 else if (ctx->region_type != ORT_WORKSHARE)
953ff289
DN
5365 omp_add_variable (ctx, decl, GOVD_FIRSTPRIVATE);
5366
5367 ctx = ctx->outer_context;
5368 }
5369 while (ctx);
5370}
5371
5372/* Similarly for each of the type sizes of TYPE. */
5373
5374static void
5375omp_firstprivatize_type_sizes (struct gimplify_omp_ctx *ctx, tree type)
5376{
5377 if (type == NULL || type == error_mark_node)
5378 return;
5379 type = TYPE_MAIN_VARIANT (type);
5380
5381 if (pointer_set_insert (ctx->privatized_types, type))
5382 return;
5383
5384 switch (TREE_CODE (type))
5385 {
5386 case INTEGER_TYPE:
5387 case ENUMERAL_TYPE:
5388 case BOOLEAN_TYPE:
953ff289 5389 case REAL_TYPE:
325217ed 5390 case FIXED_POINT_TYPE:
953ff289
DN
5391 omp_firstprivatize_variable (ctx, TYPE_MIN_VALUE (type));
5392 omp_firstprivatize_variable (ctx, TYPE_MAX_VALUE (type));
5393 break;
5394
5395 case ARRAY_TYPE:
5396 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (type));
5397 omp_firstprivatize_type_sizes (ctx, TYPE_DOMAIN (type));
5398 break;
5399
5400 case RECORD_TYPE:
5401 case UNION_TYPE:
5402 case QUAL_UNION_TYPE:
5403 {
5404 tree field;
910ad8de 5405 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
953ff289
DN
5406 if (TREE_CODE (field) == FIELD_DECL)
5407 {
5408 omp_firstprivatize_variable (ctx, DECL_FIELD_OFFSET (field));
5409 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (field));
5410 }
5411 }
5412 break;
5413
5414 case POINTER_TYPE:
5415 case REFERENCE_TYPE:
5416 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (type));
5417 break;
5418
5419 default:
5420 break;
5421 }
5422
5423 omp_firstprivatize_variable (ctx, TYPE_SIZE (type));
5424 omp_firstprivatize_variable (ctx, TYPE_SIZE_UNIT (type));
5425 lang_hooks.types.omp_firstprivatize_type_sizes (ctx, type);
5426}
5427
5428/* Add an entry for DECL in the OpenMP context CTX with FLAGS. */
5429
5430static void
5431omp_add_variable (struct gimplify_omp_ctx *ctx, tree decl, unsigned int flags)
5432{
5433 splay_tree_node n;
5434 unsigned int nflags;
5435 tree t;
5436
5437 if (decl == error_mark_node || TREE_TYPE (decl) == error_mark_node)
5438 return;
5439
5440 /* Never elide decls whose type has TREE_ADDRESSABLE set. This means
5441 there are constructors involved somewhere. */
5442 if (TREE_ADDRESSABLE (TREE_TYPE (decl))
5443 || TYPE_NEEDS_CONSTRUCTING (TREE_TYPE (decl)))
5444 flags |= GOVD_SEEN;
5445
5446 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
5447 if (n != NULL)
5448 {
5449 /* We shouldn't be re-adding the decl with the same data
5450 sharing class. */
5451 gcc_assert ((n->value & GOVD_DATA_SHARE_CLASS & flags) == 0);
5452 /* The only combination of data sharing classes we should see is
5453 FIRSTPRIVATE and LASTPRIVATE. */
5454 nflags = n->value | flags;
5455 gcc_assert ((nflags & GOVD_DATA_SHARE_CLASS)
5456 == (GOVD_FIRSTPRIVATE | GOVD_LASTPRIVATE));
5457 n->value = nflags;
5458 return;
5459 }
5460
5461 /* When adding a variable-sized variable, we have to handle all sorts
b8698a0f 5462 of additional bits of data: the pointer replacement variable, and
953ff289 5463 the parameters of the type. */
4c923c28 5464 if (DECL_SIZE (decl) && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
953ff289
DN
5465 {
5466 /* Add the pointer replacement variable as PRIVATE if the variable
5467 replacement is private, else FIRSTPRIVATE since we'll need the
5468 address of the original variable either for SHARED, or for the
5469 copy into or out of the context. */
5470 if (!(flags & GOVD_LOCAL))
5471 {
5472 nflags = flags & GOVD_PRIVATE ? GOVD_PRIVATE : GOVD_FIRSTPRIVATE;
5473 nflags |= flags & GOVD_SEEN;
5474 t = DECL_VALUE_EXPR (decl);
5475 gcc_assert (TREE_CODE (t) == INDIRECT_REF);
5476 t = TREE_OPERAND (t, 0);
5477 gcc_assert (DECL_P (t));
5478 omp_add_variable (ctx, t, nflags);
5479 }
5480
5481 /* Add all of the variable and type parameters (which should have
5482 been gimplified to a formal temporary) as FIRSTPRIVATE. */
5483 omp_firstprivatize_variable (ctx, DECL_SIZE_UNIT (decl));
5484 omp_firstprivatize_variable (ctx, DECL_SIZE (decl));
5485 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (decl));
5486
5487 /* The variable-sized variable itself is never SHARED, only some form
5488 of PRIVATE. The sharing would take place via the pointer variable
5489 which we remapped above. */
5490 if (flags & GOVD_SHARED)
5491 flags = GOVD_PRIVATE | GOVD_DEBUG_PRIVATE
5492 | (flags & (GOVD_SEEN | GOVD_EXPLICIT));
5493
b8698a0f 5494 /* We're going to make use of the TYPE_SIZE_UNIT at least in the
953ff289
DN
5495 alloca statement we generate for the variable, so make sure it
5496 is available. This isn't automatically needed for the SHARED
4288fea2
JJ
5497 case, since we won't be allocating local storage then.
5498 For local variables TYPE_SIZE_UNIT might not be gimplified yet,
5499 in this case omp_notice_variable will be called later
5500 on when it is gimplified. */
5501 else if (! (flags & GOVD_LOCAL))
953ff289
DN
5502 omp_notice_variable (ctx, TYPE_SIZE_UNIT (TREE_TYPE (decl)), true);
5503 }
5504 else if (lang_hooks.decls.omp_privatize_by_reference (decl))
5505 {
5506 gcc_assert ((flags & GOVD_LOCAL) == 0);
5507 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (decl));
5508
5509 /* Similar to the direct variable sized case above, we'll need the
5510 size of references being privatized. */
5511 if ((flags & GOVD_SHARED) == 0)
5512 {
5513 t = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl)));
4c923c28 5514 if (TREE_CODE (t) != INTEGER_CST)
953ff289
DN
5515 omp_notice_variable (ctx, t, true);
5516 }
5517 }
5518
5519 splay_tree_insert (ctx->variables, (splay_tree_key)decl, flags);
5520}
5521
f22f4340
JJ
5522/* Notice a threadprivate variable DECL used in OpenMP context CTX.
5523 This just prints out diagnostics about threadprivate variable uses
5524 in untied tasks. If DECL2 is non-NULL, prevent this warning
5525 on that variable. */
5526
5527static bool
5528omp_notice_threadprivate_variable (struct gimplify_omp_ctx *ctx, tree decl,
5529 tree decl2)
5530{
5531 splay_tree_node n;
5532
5533 if (ctx->region_type != ORT_UNTIED_TASK)
5534 return false;
5535 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
5536 if (n == NULL)
5537 {
5538 error ("threadprivate variable %qE used in untied task", DECL_NAME (decl));
5539 error_at (ctx->location, "enclosing task");
5540 splay_tree_insert (ctx->variables, (splay_tree_key)decl, 0);
5541 }
5542 if (decl2)
5543 splay_tree_insert (ctx->variables, (splay_tree_key)decl2, 0);
5544 return false;
5545}
5546
953ff289
DN
5547/* Record the fact that DECL was used within the OpenMP context CTX.
5548 IN_CODE is true when real code uses DECL, and false when we should
5549 merely emit default(none) errors. Return true if DECL is going to
5550 be remapped and thus DECL shouldn't be gimplified into its
5551 DECL_VALUE_EXPR (if any). */
5552
5553static bool
5554omp_notice_variable (struct gimplify_omp_ctx *ctx, tree decl, bool in_code)
5555{
5556 splay_tree_node n;
5557 unsigned flags = in_code ? GOVD_SEEN : 0;
5558 bool ret = false, shared;
5559
5560 if (decl == error_mark_node || TREE_TYPE (decl) == error_mark_node)
5561 return false;
5562
5563 /* Threadprivate variables are predetermined. */
5564 if (is_global_var (decl))
5565 {
5566 if (DECL_THREAD_LOCAL_P (decl))
f22f4340 5567 return omp_notice_threadprivate_variable (ctx, decl, NULL_TREE);
953ff289
DN
5568
5569 if (DECL_HAS_VALUE_EXPR_P (decl))
5570 {
5571 tree value = get_base_address (DECL_VALUE_EXPR (decl));
5572
5573 if (value && DECL_P (value) && DECL_THREAD_LOCAL_P (value))
f22f4340 5574 return omp_notice_threadprivate_variable (ctx, decl, value);
953ff289
DN
5575 }
5576 }
5577
5578 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
5579 if (n == NULL)
5580 {
5581 enum omp_clause_default_kind default_kind, kind;
a68ab351 5582 struct gimplify_omp_ctx *octx;
953ff289 5583
a68ab351 5584 if (ctx->region_type == ORT_WORKSHARE)
953ff289
DN
5585 goto do_outer;
5586
5587 /* ??? Some compiler-generated variables (like SAVE_EXPRs) could be
5588 remapped firstprivate instead of shared. To some extent this is
5589 addressed in omp_firstprivatize_type_sizes, but not effectively. */
5590 default_kind = ctx->default_kind;
5591 kind = lang_hooks.decls.omp_predetermined_sharing (decl);
5592 if (kind != OMP_CLAUSE_DEFAULT_UNSPECIFIED)
5593 default_kind = kind;
5594
5595 switch (default_kind)
5596 {
5597 case OMP_CLAUSE_DEFAULT_NONE:
4f1e4960 5598 error ("%qE not specified in enclosing parallel",
79943d19 5599 DECL_NAME (lang_hooks.decls.omp_report_decl (decl)));
f22f4340
JJ
5600 if ((ctx->region_type & ORT_TASK) != 0)
5601 error_at (ctx->location, "enclosing task");
5602 else
5603 error_at (ctx->location, "enclosing parallel");
953ff289
DN
5604 /* FALLTHRU */
5605 case OMP_CLAUSE_DEFAULT_SHARED:
5606 flags |= GOVD_SHARED;
5607 break;
5608 case OMP_CLAUSE_DEFAULT_PRIVATE:
5609 flags |= GOVD_PRIVATE;
5610 break;
a68ab351
JJ
5611 case OMP_CLAUSE_DEFAULT_FIRSTPRIVATE:
5612 flags |= GOVD_FIRSTPRIVATE;
5613 break;
5614 case OMP_CLAUSE_DEFAULT_UNSPECIFIED:
5615 /* decl will be either GOVD_FIRSTPRIVATE or GOVD_SHARED. */
f22f4340 5616 gcc_assert ((ctx->region_type & ORT_TASK) != 0);
a68ab351
JJ
5617 if (ctx->outer_context)
5618 omp_notice_variable (ctx->outer_context, decl, in_code);
5619 for (octx = ctx->outer_context; octx; octx = octx->outer_context)
5620 {
5621 splay_tree_node n2;
5622
5623 n2 = splay_tree_lookup (octx->variables, (splay_tree_key) decl);
5624 if (n2 && (n2->value & GOVD_DATA_SHARE_CLASS) != GOVD_SHARED)
5625 {
5626 flags |= GOVD_FIRSTPRIVATE;
5627 break;
5628 }
5629 if ((octx->region_type & ORT_PARALLEL) != 0)
5630 break;
5631 }
5632 if (flags & GOVD_FIRSTPRIVATE)
5633 break;
5634 if (octx == NULL
5635 && (TREE_CODE (decl) == PARM_DECL
5636 || (!is_global_var (decl)
5637 && DECL_CONTEXT (decl) == current_function_decl)))
5638 {
5639 flags |= GOVD_FIRSTPRIVATE;
5640 break;
5641 }
5642 flags |= GOVD_SHARED;
5643 break;
953ff289
DN
5644 default:
5645 gcc_unreachable ();
5646 }
5647
a68ab351
JJ
5648 if ((flags & GOVD_PRIVATE)
5649 && lang_hooks.decls.omp_private_outer_ref (decl))
5650 flags |= GOVD_PRIVATE_OUTER_REF;
5651
953ff289
DN
5652 omp_add_variable (ctx, decl, flags);
5653
5654 shared = (flags & GOVD_SHARED) != 0;
5655 ret = lang_hooks.decls.omp_disregard_value_expr (decl, shared);
5656 goto do_outer;
5657 }
5658
3ad6b266
JJ
5659 if ((n->value & (GOVD_SEEN | GOVD_LOCAL)) == 0
5660 && (flags & (GOVD_SEEN | GOVD_LOCAL)) == GOVD_SEEN
5661 && DECL_SIZE (decl)
5662 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
5663 {
5664 splay_tree_node n2;
5665 tree t = DECL_VALUE_EXPR (decl);
5666 gcc_assert (TREE_CODE (t) == INDIRECT_REF);
5667 t = TREE_OPERAND (t, 0);
5668 gcc_assert (DECL_P (t));
5669 n2 = splay_tree_lookup (ctx->variables, (splay_tree_key) t);
5670 n2->value |= GOVD_SEEN;
5671 }
5672
953ff289
DN
5673 shared = ((flags | n->value) & GOVD_SHARED) != 0;
5674 ret = lang_hooks.decls.omp_disregard_value_expr (decl, shared);
5675
5676 /* If nothing changed, there's nothing left to do. */
5677 if ((n->value & flags) == flags)
5678 return ret;
5679 flags |= n->value;
5680 n->value = flags;
5681
5682 do_outer:
5683 /* If the variable is private in the current context, then we don't
5684 need to propagate anything to an outer context. */
a68ab351 5685 if ((flags & GOVD_PRIVATE) && !(flags & GOVD_PRIVATE_OUTER_REF))
953ff289
DN
5686 return ret;
5687 if (ctx->outer_context
5688 && omp_notice_variable (ctx->outer_context, decl, in_code))
5689 return true;
5690 return ret;
5691}
5692
5693/* Verify that DECL is private within CTX. If there's specific information
5694 to the contrary in the innermost scope, generate an error. */
5695
5696static bool
5697omp_is_private (struct gimplify_omp_ctx *ctx, tree decl)
5698{
5699 splay_tree_node n;
5700
5701 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
5702 if (n != NULL)
5703 {
5704 if (n->value & GOVD_SHARED)
5705 {
5706 if (ctx == gimplify_omp_ctxp)
f6a5ffbf 5707 {
4f1e4960
JM
5708 error ("iteration variable %qE should be private",
5709 DECL_NAME (decl));
f6a5ffbf
JJ
5710 n->value = GOVD_PRIVATE;
5711 return true;
5712 }
5713 else
5714 return false;
953ff289 5715 }
761041be
JJ
5716 else if ((n->value & GOVD_EXPLICIT) != 0
5717 && (ctx == gimplify_omp_ctxp
a68ab351 5718 || (ctx->region_type == ORT_COMBINED_PARALLEL
761041be
JJ
5719 && gimplify_omp_ctxp->outer_context == ctx)))
5720 {
5721 if ((n->value & GOVD_FIRSTPRIVATE) != 0)
4f1e4960
JM
5722 error ("iteration variable %qE should not be firstprivate",
5723 DECL_NAME (decl));
761041be 5724 else if ((n->value & GOVD_REDUCTION) != 0)
4f1e4960
JM
5725 error ("iteration variable %qE should not be reduction",
5726 DECL_NAME (decl));
761041be 5727 }
ca2b1311
JJ
5728 return (ctx == gimplify_omp_ctxp
5729 || (ctx->region_type == ORT_COMBINED_PARALLEL
5730 && gimplify_omp_ctxp->outer_context == ctx));
953ff289
DN
5731 }
5732
a68ab351 5733 if (ctx->region_type != ORT_WORKSHARE)
953ff289 5734 return false;
f6a5ffbf
JJ
5735 else if (ctx->outer_context)
5736 return omp_is_private (ctx->outer_context, decl);
ca2b1311 5737 return false;
953ff289
DN
5738}
5739
07b7aade
JJ
5740/* Return true if DECL is private within a parallel region
5741 that binds to the current construct's context or in parallel
5742 region's REDUCTION clause. */
5743
5744static bool
5745omp_check_private (struct gimplify_omp_ctx *ctx, tree decl)
5746{
5747 splay_tree_node n;
5748
5749 do
5750 {
5751 ctx = ctx->outer_context;
5752 if (ctx == NULL)
5753 return !(is_global_var (decl)
5754 /* References might be private, but might be shared too. */
5755 || lang_hooks.decls.omp_privatize_by_reference (decl));
5756
5757 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
5758 if (n != NULL)
5759 return (n->value & GOVD_SHARED) == 0;
5760 }
a68ab351 5761 while (ctx->region_type == ORT_WORKSHARE);
07b7aade
JJ
5762 return false;
5763}
5764
953ff289
DN
5765/* Scan the OpenMP clauses in *LIST_P, installing mappings into a new
5766 and previous omp contexts. */
5767
5768static void
726a989a 5769gimplify_scan_omp_clauses (tree *list_p, gimple_seq *pre_p,
a68ab351 5770 enum omp_region_type region_type)
953ff289
DN
5771{
5772 struct gimplify_omp_ctx *ctx, *outer_ctx;
d406b663 5773 struct gimplify_ctx gctx;
953ff289
DN
5774 tree c;
5775
a68ab351 5776 ctx = new_omp_context (region_type);
953ff289
DN
5777 outer_ctx = ctx->outer_context;
5778
5779 while ((c = *list_p) != NULL)
5780 {
953ff289
DN
5781 bool remove = false;
5782 bool notice_outer = true;
07b7aade 5783 const char *check_non_private = NULL;
953ff289
DN
5784 unsigned int flags;
5785 tree decl;
5786
aaf46ef9 5787 switch (OMP_CLAUSE_CODE (c))
953ff289
DN
5788 {
5789 case OMP_CLAUSE_PRIVATE:
5790 flags = GOVD_PRIVATE | GOVD_EXPLICIT;
a68ab351
JJ
5791 if (lang_hooks.decls.omp_private_outer_ref (OMP_CLAUSE_DECL (c)))
5792 {
5793 flags |= GOVD_PRIVATE_OUTER_REF;
5794 OMP_CLAUSE_PRIVATE_OUTER_REF (c) = 1;
5795 }
5796 else
5797 notice_outer = false;
953ff289
DN
5798 goto do_add;
5799 case OMP_CLAUSE_SHARED:
5800 flags = GOVD_SHARED | GOVD_EXPLICIT;
5801 goto do_add;
5802 case OMP_CLAUSE_FIRSTPRIVATE:
5803 flags = GOVD_FIRSTPRIVATE | GOVD_EXPLICIT;
07b7aade 5804 check_non_private = "firstprivate";
953ff289
DN
5805 goto do_add;
5806 case OMP_CLAUSE_LASTPRIVATE:
5807 flags = GOVD_LASTPRIVATE | GOVD_SEEN | GOVD_EXPLICIT;
07b7aade 5808 check_non_private = "lastprivate";
953ff289
DN
5809 goto do_add;
5810 case OMP_CLAUSE_REDUCTION:
5811 flags = GOVD_REDUCTION | GOVD_SEEN | GOVD_EXPLICIT;
07b7aade 5812 check_non_private = "reduction";
953ff289
DN
5813 goto do_add;
5814
5815 do_add:
5816 decl = OMP_CLAUSE_DECL (c);
5817 if (decl == error_mark_node || TREE_TYPE (decl) == error_mark_node)
5818 {
5819 remove = true;
5820 break;
5821 }
5822 omp_add_variable (ctx, decl, flags);
693d710f 5823 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
953ff289
DN
5824 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
5825 {
5826 omp_add_variable (ctx, OMP_CLAUSE_REDUCTION_PLACEHOLDER (c),
693d710f 5827 GOVD_LOCAL | GOVD_SEEN);
953ff289 5828 gimplify_omp_ctxp = ctx;
d406b663 5829 push_gimplify_context (&gctx);
726a989a
RB
5830
5831 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = gimple_seq_alloc ();
5832 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = gimple_seq_alloc ();
5833
5834 gimplify_and_add (OMP_CLAUSE_REDUCTION_INIT (c),
5835 &OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c));
5836 pop_gimplify_context
5837 (gimple_seq_first_stmt (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c)));
d406b663 5838 push_gimplify_context (&gctx);
726a989a
RB
5839 gimplify_and_add (OMP_CLAUSE_REDUCTION_MERGE (c),
5840 &OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c));
b8698a0f 5841 pop_gimplify_context
726a989a
RB
5842 (gimple_seq_first_stmt (OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c)));
5843 OMP_CLAUSE_REDUCTION_INIT (c) = NULL_TREE;
5844 OMP_CLAUSE_REDUCTION_MERGE (c) = NULL_TREE;
5845
953ff289
DN
5846 gimplify_omp_ctxp = outer_ctx;
5847 }
a68ab351
JJ
5848 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
5849 && OMP_CLAUSE_LASTPRIVATE_STMT (c))
5850 {
5851 gimplify_omp_ctxp = ctx;
d406b663 5852 push_gimplify_context (&gctx);
a68ab351
JJ
5853 if (TREE_CODE (OMP_CLAUSE_LASTPRIVATE_STMT (c)) != BIND_EXPR)
5854 {
5855 tree bind = build3 (BIND_EXPR, void_type_node, NULL,
5856 NULL, NULL);
5857 TREE_SIDE_EFFECTS (bind) = 1;
5858 BIND_EXPR_BODY (bind) = OMP_CLAUSE_LASTPRIVATE_STMT (c);
5859 OMP_CLAUSE_LASTPRIVATE_STMT (c) = bind;
5860 }
726a989a
RB
5861 gimplify_and_add (OMP_CLAUSE_LASTPRIVATE_STMT (c),
5862 &OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c));
5863 pop_gimplify_context
5864 (gimple_seq_first_stmt (OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c)));
5865 OMP_CLAUSE_LASTPRIVATE_STMT (c) = NULL_TREE;
5866
a68ab351
JJ
5867 gimplify_omp_ctxp = outer_ctx;
5868 }
953ff289
DN
5869 if (notice_outer)
5870 goto do_notice;
5871 break;
5872
5873 case OMP_CLAUSE_COPYIN:
5874 case OMP_CLAUSE_COPYPRIVATE:
5875 decl = OMP_CLAUSE_DECL (c);
5876 if (decl == error_mark_node || TREE_TYPE (decl) == error_mark_node)
5877 {
5878 remove = true;
5879 break;
5880 }
5881 do_notice:
5882 if (outer_ctx)
5883 omp_notice_variable (outer_ctx, decl, true);
07b7aade 5884 if (check_non_private
a68ab351 5885 && region_type == ORT_WORKSHARE
07b7aade
JJ
5886 && omp_check_private (ctx, decl))
5887 {
4f1e4960
JM
5888 error ("%s variable %qE is private in outer context",
5889 check_non_private, DECL_NAME (decl));
07b7aade
JJ
5890 remove = true;
5891 }
953ff289
DN
5892 break;
5893
953ff289 5894 case OMP_CLAUSE_IF:
d568d1a8
RS
5895 OMP_CLAUSE_OPERAND (c, 0)
5896 = gimple_boolify (OMP_CLAUSE_OPERAND (c, 0));
5897 /* Fall through. */
5898
5899 case OMP_CLAUSE_SCHEDULE:
953ff289 5900 case OMP_CLAUSE_NUM_THREADS:
726a989a
RB
5901 if (gimplify_expr (&OMP_CLAUSE_OPERAND (c, 0), pre_p, NULL,
5902 is_gimple_val, fb_rvalue) == GS_ERROR)
5903 remove = true;
953ff289
DN
5904 break;
5905
5906 case OMP_CLAUSE_NOWAIT:
5907 case OMP_CLAUSE_ORDERED:
a68ab351
JJ
5908 case OMP_CLAUSE_UNTIED:
5909 case OMP_CLAUSE_COLLAPSE:
953ff289
DN
5910 break;
5911
5912 case OMP_CLAUSE_DEFAULT:
5913 ctx->default_kind = OMP_CLAUSE_DEFAULT_KIND (c);
5914 break;
5915
5916 default:
5917 gcc_unreachable ();
5918 }
5919
5920 if (remove)
5921 *list_p = OMP_CLAUSE_CHAIN (c);
5922 else
5923 list_p = &OMP_CLAUSE_CHAIN (c);
5924 }
5925
5926 gimplify_omp_ctxp = ctx;
5927}
5928
5929/* For all variables that were not actually used within the context,
5930 remove PRIVATE, SHARED, and FIRSTPRIVATE clauses. */
5931
5932static int
5933gimplify_adjust_omp_clauses_1 (splay_tree_node n, void *data)
5934{
5935 tree *list_p = (tree *) data;
5936 tree decl = (tree) n->key;
5937 unsigned flags = n->value;
aaf46ef9 5938 enum omp_clause_code code;
953ff289
DN
5939 tree clause;
5940 bool private_debug;
5941
5942 if (flags & (GOVD_EXPLICIT | GOVD_LOCAL))
5943 return 0;
5944 if ((flags & GOVD_SEEN) == 0)
5945 return 0;
5946 if (flags & GOVD_DEBUG_PRIVATE)
5947 {
5948 gcc_assert ((flags & GOVD_DATA_SHARE_CLASS) == GOVD_PRIVATE);
5949 private_debug = true;
5950 }
5951 else
5952 private_debug
5953 = lang_hooks.decls.omp_private_debug_clause (decl,
5954 !!(flags & GOVD_SHARED));
5955 if (private_debug)
5956 code = OMP_CLAUSE_PRIVATE;
5957 else if (flags & GOVD_SHARED)
5958 {
5959 if (is_global_var (decl))
64964499
JJ
5960 {
5961 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp->outer_context;
5962 while (ctx != NULL)
5963 {
5964 splay_tree_node on
5965 = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
5966 if (on && (on->value & (GOVD_FIRSTPRIVATE | GOVD_LASTPRIVATE
5967 | GOVD_PRIVATE | GOVD_REDUCTION)) != 0)
5968 break;
5969 ctx = ctx->outer_context;
5970 }
5971 if (ctx == NULL)
5972 return 0;
5973 }
953ff289
DN
5974 code = OMP_CLAUSE_SHARED;
5975 }
5976 else if (flags & GOVD_PRIVATE)
5977 code = OMP_CLAUSE_PRIVATE;
5978 else if (flags & GOVD_FIRSTPRIVATE)
5979 code = OMP_CLAUSE_FIRSTPRIVATE;
5980 else
5981 gcc_unreachable ();
5982
c2255bc4 5983 clause = build_omp_clause (input_location, code);
aaf46ef9 5984 OMP_CLAUSE_DECL (clause) = decl;
953ff289
DN
5985 OMP_CLAUSE_CHAIN (clause) = *list_p;
5986 if (private_debug)
5987 OMP_CLAUSE_PRIVATE_DEBUG (clause) = 1;
a68ab351
JJ
5988 else if (code == OMP_CLAUSE_PRIVATE && (flags & GOVD_PRIVATE_OUTER_REF))
5989 OMP_CLAUSE_PRIVATE_OUTER_REF (clause) = 1;
953ff289 5990 *list_p = clause;
a68ab351 5991 lang_hooks.decls.omp_finish_clause (clause);
953ff289
DN
5992
5993 return 0;
5994}
5995
5996static void
5997gimplify_adjust_omp_clauses (tree *list_p)
5998{
5999 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
6000 tree c, decl;
6001
6002 while ((c = *list_p) != NULL)
6003 {
6004 splay_tree_node n;
6005 bool remove = false;
6006
aaf46ef9 6007 switch (OMP_CLAUSE_CODE (c))
953ff289
DN
6008 {
6009 case OMP_CLAUSE_PRIVATE:
6010 case OMP_CLAUSE_SHARED:
6011 case OMP_CLAUSE_FIRSTPRIVATE:
6012 decl = OMP_CLAUSE_DECL (c);
6013 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
6014 remove = !(n->value & GOVD_SEEN);
6015 if (! remove)
6016 {
aaf46ef9 6017 bool shared = OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED;
953ff289
DN
6018 if ((n->value & GOVD_DEBUG_PRIVATE)
6019 || lang_hooks.decls.omp_private_debug_clause (decl, shared))
6020 {
6021 gcc_assert ((n->value & GOVD_DEBUG_PRIVATE) == 0
6022 || ((n->value & GOVD_DATA_SHARE_CLASS)
6023 == GOVD_PRIVATE));
aaf46ef9 6024 OMP_CLAUSE_SET_CODE (c, OMP_CLAUSE_PRIVATE);
953ff289
DN
6025 OMP_CLAUSE_PRIVATE_DEBUG (c) = 1;
6026 }
6027 }
6028 break;
6029
6030 case OMP_CLAUSE_LASTPRIVATE:
6031 /* Make sure OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE is set to
6032 accurately reflect the presence of a FIRSTPRIVATE clause. */
6033 decl = OMP_CLAUSE_DECL (c);
6034 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
6035 OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c)
6036 = (n->value & GOVD_FIRSTPRIVATE) != 0;
6037 break;
b8698a0f 6038
953ff289
DN
6039 case OMP_CLAUSE_REDUCTION:
6040 case OMP_CLAUSE_COPYIN:
6041 case OMP_CLAUSE_COPYPRIVATE:
6042 case OMP_CLAUSE_IF:
6043 case OMP_CLAUSE_NUM_THREADS:
6044 case OMP_CLAUSE_SCHEDULE:
6045 case OMP_CLAUSE_NOWAIT:
6046 case OMP_CLAUSE_ORDERED:
6047 case OMP_CLAUSE_DEFAULT:
a68ab351
JJ
6048 case OMP_CLAUSE_UNTIED:
6049 case OMP_CLAUSE_COLLAPSE:
953ff289
DN
6050 break;
6051
6052 default:
6053 gcc_unreachable ();
6054 }
6055
6056 if (remove)
6057 *list_p = OMP_CLAUSE_CHAIN (c);
6058 else
6059 list_p = &OMP_CLAUSE_CHAIN (c);
6060 }
6061
6062 /* Add in any implicit data sharing. */
6063 splay_tree_foreach (ctx->variables, gimplify_adjust_omp_clauses_1, list_p);
b8698a0f 6064
953ff289
DN
6065 gimplify_omp_ctxp = ctx->outer_context;
6066 delete_omp_context (ctx);
6067}
6068
6069/* Gimplify the contents of an OMP_PARALLEL statement. This involves
6070 gimplification of the body, as well as scanning the body for used
6071 variables. We need to do this scan now, because variable-sized
6072 decls will be decomposed during gimplification. */
6073
726a989a
RB
6074static void
6075gimplify_omp_parallel (tree *expr_p, gimple_seq *pre_p)
953ff289
DN
6076{
6077 tree expr = *expr_p;
726a989a
RB
6078 gimple g;
6079 gimple_seq body = NULL;
d406b663 6080 struct gimplify_ctx gctx;
953ff289 6081
a68ab351
JJ
6082 gimplify_scan_omp_clauses (&OMP_PARALLEL_CLAUSES (expr), pre_p,
6083 OMP_PARALLEL_COMBINED (expr)
6084 ? ORT_COMBINED_PARALLEL
6085 : ORT_PARALLEL);
953ff289 6086
d406b663 6087 push_gimplify_context (&gctx);
953ff289 6088
726a989a
RB
6089 g = gimplify_and_return_first (OMP_PARALLEL_BODY (expr), &body);
6090 if (gimple_code (g) == GIMPLE_BIND)
6091 pop_gimplify_context (g);
50674e96 6092 else
726a989a 6093 pop_gimplify_context (NULL);
953ff289
DN
6094
6095 gimplify_adjust_omp_clauses (&OMP_PARALLEL_CLAUSES (expr));
6096
726a989a
RB
6097 g = gimple_build_omp_parallel (body,
6098 OMP_PARALLEL_CLAUSES (expr),
6099 NULL_TREE, NULL_TREE);
6100 if (OMP_PARALLEL_COMBINED (expr))
6101 gimple_omp_set_subcode (g, GF_OMP_PARALLEL_COMBINED);
6102 gimplify_seq_add_stmt (pre_p, g);
6103 *expr_p = NULL_TREE;
953ff289
DN
6104}
6105
a68ab351
JJ
6106/* Gimplify the contents of an OMP_TASK statement. This involves
6107 gimplification of the body, as well as scanning the body for used
6108 variables. We need to do this scan now, because variable-sized
6109 decls will be decomposed during gimplification. */
953ff289 6110
726a989a
RB
6111static void
6112gimplify_omp_task (tree *expr_p, gimple_seq *pre_p)
953ff289 6113{
a68ab351 6114 tree expr = *expr_p;
726a989a
RB
6115 gimple g;
6116 gimple_seq body = NULL;
d406b663 6117 struct gimplify_ctx gctx;
953ff289 6118
f22f4340
JJ
6119 gimplify_scan_omp_clauses (&OMP_TASK_CLAUSES (expr), pre_p,
6120 find_omp_clause (OMP_TASK_CLAUSES (expr),
6121 OMP_CLAUSE_UNTIED)
6122 ? ORT_UNTIED_TASK : ORT_TASK);
953ff289 6123
d406b663 6124 push_gimplify_context (&gctx);
953ff289 6125
726a989a
RB
6126 g = gimplify_and_return_first (OMP_TASK_BODY (expr), &body);
6127 if (gimple_code (g) == GIMPLE_BIND)
6128 pop_gimplify_context (g);
953ff289 6129 else
726a989a 6130 pop_gimplify_context (NULL);
953ff289 6131
a68ab351 6132 gimplify_adjust_omp_clauses (&OMP_TASK_CLAUSES (expr));
917948d3 6133
726a989a
RB
6134 g = gimple_build_omp_task (body,
6135 OMP_TASK_CLAUSES (expr),
6136 NULL_TREE, NULL_TREE,
6137 NULL_TREE, NULL_TREE, NULL_TREE);
6138 gimplify_seq_add_stmt (pre_p, g);
6139 *expr_p = NULL_TREE;
a68ab351
JJ
6140}
6141
6142/* Gimplify the gross structure of an OMP_FOR statement. */
6143
6144static enum gimplify_status
726a989a 6145gimplify_omp_for (tree *expr_p, gimple_seq *pre_p)
a68ab351 6146{
726a989a 6147 tree for_stmt, decl, var, t;
32e8bb8e
ILT
6148 enum gimplify_status ret = GS_ALL_DONE;
6149 enum gimplify_status tret;
726a989a
RB
6150 gimple gfor;
6151 gimple_seq for_body, for_pre_body;
a68ab351
JJ
6152 int i;
6153
6154 for_stmt = *expr_p;
6155
6156 gimplify_scan_omp_clauses (&OMP_FOR_CLAUSES (for_stmt), pre_p,
6157 ORT_WORKSHARE);
917948d3 6158
726a989a
RB
6159 /* Handle OMP_FOR_INIT. */
6160 for_pre_body = NULL;
6161 gimplify_and_add (OMP_FOR_PRE_BODY (for_stmt), &for_pre_body);
6162 OMP_FOR_PRE_BODY (for_stmt) = NULL_TREE;
a68ab351 6163
726a989a 6164 for_body = gimple_seq_alloc ();
a68ab351
JJ
6165 gcc_assert (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt))
6166 == TREE_VEC_LENGTH (OMP_FOR_COND (for_stmt)));
6167 gcc_assert (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt))
6168 == TREE_VEC_LENGTH (OMP_FOR_INCR (for_stmt)));
6169 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
6170 {
6171 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i);
726a989a
RB
6172 gcc_assert (TREE_CODE (t) == MODIFY_EXPR);
6173 decl = TREE_OPERAND (t, 0);
a68ab351
JJ
6174 gcc_assert (DECL_P (decl));
6175 gcc_assert (INTEGRAL_TYPE_P (TREE_TYPE (decl))
6176 || POINTER_TYPE_P (TREE_TYPE (decl)));
6177
6178 /* Make sure the iteration variable is private. */
6179 if (omp_is_private (gimplify_omp_ctxp, decl))
6180 omp_notice_variable (gimplify_omp_ctxp, decl, true);
6181 else
6182 omp_add_variable (gimplify_omp_ctxp, decl, GOVD_PRIVATE | GOVD_SEEN);
6183
6184 /* If DECL is not a gimple register, create a temporary variable to act
6185 as an iteration counter. This is valid, since DECL cannot be
6186 modified in the body of the loop. */
6187 if (!is_gimple_reg (decl))
6188 {
6189 var = create_tmp_var (TREE_TYPE (decl), get_name (decl));
726a989a 6190 TREE_OPERAND (t, 0) = var;
b8698a0f 6191
726a989a 6192 gimplify_seq_add_stmt (&for_body, gimple_build_assign (decl, var));
953ff289 6193
a68ab351
JJ
6194 omp_add_variable (gimplify_omp_ctxp, var, GOVD_PRIVATE | GOVD_SEEN);
6195 }
6196 else
6197 var = decl;
07beea0d 6198
32e8bb8e 6199 tret = gimplify_expr (&TREE_OPERAND (t, 1), &for_pre_body, NULL,
726a989a 6200 is_gimple_val, fb_rvalue);
32e8bb8e 6201 ret = MIN (ret, tret);
726a989a
RB
6202 if (ret == GS_ERROR)
6203 return ret;
953ff289 6204
726a989a 6205 /* Handle OMP_FOR_COND. */
a68ab351
JJ
6206 t = TREE_VEC_ELT (OMP_FOR_COND (for_stmt), i);
6207 gcc_assert (COMPARISON_CLASS_P (t));
726a989a 6208 gcc_assert (TREE_OPERAND (t, 0) == decl);
b56b9fe3 6209
32e8bb8e 6210 tret = gimplify_expr (&TREE_OPERAND (t, 1), &for_pre_body, NULL,
726a989a 6211 is_gimple_val, fb_rvalue);
32e8bb8e 6212 ret = MIN (ret, tret);
917948d3 6213
726a989a 6214 /* Handle OMP_FOR_INCR. */
a68ab351 6215 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
953ff289
DN
6216 switch (TREE_CODE (t))
6217 {
a68ab351
JJ
6218 case PREINCREMENT_EXPR:
6219 case POSTINCREMENT_EXPR:
6220 t = build_int_cst (TREE_TYPE (decl), 1);
6221 t = build2 (PLUS_EXPR, TREE_TYPE (decl), var, t);
726a989a 6222 t = build2 (MODIFY_EXPR, TREE_TYPE (var), var, t);
a68ab351
JJ
6223 TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i) = t;
6224 break;
6225
6226 case PREDECREMENT_EXPR:
6227 case POSTDECREMENT_EXPR:
6228 t = build_int_cst (TREE_TYPE (decl), -1);
6229 t = build2 (PLUS_EXPR, TREE_TYPE (decl), var, t);
726a989a 6230 t = build2 (MODIFY_EXPR, TREE_TYPE (var), var, t);
a68ab351
JJ
6231 TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i) = t;
6232 break;
6233
726a989a
RB
6234 case MODIFY_EXPR:
6235 gcc_assert (TREE_OPERAND (t, 0) == decl);
6236 TREE_OPERAND (t, 0) = var;
a68ab351 6237
726a989a 6238 t = TREE_OPERAND (t, 1);
a68ab351 6239 switch (TREE_CODE (t))
953ff289 6240 {
a68ab351
JJ
6241 case PLUS_EXPR:
6242 if (TREE_OPERAND (t, 1) == decl)
6243 {
6244 TREE_OPERAND (t, 1) = TREE_OPERAND (t, 0);
6245 TREE_OPERAND (t, 0) = var;
6246 break;
6247 }
6248
6249 /* Fallthru. */
6250 case MINUS_EXPR:
6251 case POINTER_PLUS_EXPR:
6252 gcc_assert (TREE_OPERAND (t, 0) == decl);
917948d3 6253 TREE_OPERAND (t, 0) = var;
953ff289 6254 break;
a68ab351
JJ
6255 default:
6256 gcc_unreachable ();
953ff289 6257 }
917948d3 6258
32e8bb8e 6259 tret = gimplify_expr (&TREE_OPERAND (t, 1), &for_pre_body, NULL,
726a989a 6260 is_gimple_val, fb_rvalue);
32e8bb8e 6261 ret = MIN (ret, tret);
953ff289 6262 break;
a68ab351 6263
953ff289
DN
6264 default:
6265 gcc_unreachable ();
6266 }
6267
a68ab351
JJ
6268 if (var != decl || TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)) > 1)
6269 {
6270 tree c;
6271 for (c = OMP_FOR_CLAUSES (for_stmt); c ; c = OMP_CLAUSE_CHAIN (c))
726a989a
RB
6272 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
6273 && OMP_CLAUSE_DECL (c) == decl
6274 && OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c) == NULL)
6275 {
6276 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
6277 gcc_assert (TREE_CODE (t) == MODIFY_EXPR);
6278 gcc_assert (TREE_OPERAND (t, 0) == var);
6279 t = TREE_OPERAND (t, 1);
6280 gcc_assert (TREE_CODE (t) == PLUS_EXPR
6281 || TREE_CODE (t) == MINUS_EXPR
6282 || TREE_CODE (t) == POINTER_PLUS_EXPR);
6283 gcc_assert (TREE_OPERAND (t, 0) == var);
6284 t = build2 (TREE_CODE (t), TREE_TYPE (decl), decl,
6285 TREE_OPERAND (t, 1));
6286 gimplify_assign (decl, t,
6287 &OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c));
a68ab351
JJ
6288 }
6289 }
953ff289
DN
6290 }
6291
726a989a
RB
6292 gimplify_and_add (OMP_FOR_BODY (for_stmt), &for_body);
6293
953ff289
DN
6294 gimplify_adjust_omp_clauses (&OMP_FOR_CLAUSES (for_stmt));
6295
726a989a
RB
6296 gfor = gimple_build_omp_for (for_body, OMP_FOR_CLAUSES (for_stmt),
6297 TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)),
6298 for_pre_body);
6299
6300 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
6301 {
6302 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i);
6303 gimple_omp_for_set_index (gfor, i, TREE_OPERAND (t, 0));
6304 gimple_omp_for_set_initial (gfor, i, TREE_OPERAND (t, 1));
6305 t = TREE_VEC_ELT (OMP_FOR_COND (for_stmt), i);
6306 gimple_omp_for_set_cond (gfor, i, TREE_CODE (t));
6307 gimple_omp_for_set_final (gfor, i, TREE_OPERAND (t, 1));
6308 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
6309 gimple_omp_for_set_incr (gfor, i, TREE_OPERAND (t, 1));
6310 }
6311
6312 gimplify_seq_add_stmt (pre_p, gfor);
953ff289
DN
6313 return ret == GS_ALL_DONE ? GS_ALL_DONE : GS_ERROR;
6314}
6315
6316/* Gimplify the gross structure of other OpenMP worksharing constructs.
6317 In particular, OMP_SECTIONS and OMP_SINGLE. */
6318
726a989a
RB
6319static void
6320gimplify_omp_workshare (tree *expr_p, gimple_seq *pre_p)
953ff289 6321{
726a989a
RB
6322 tree expr = *expr_p;
6323 gimple stmt;
6324 gimple_seq body = NULL;
953ff289 6325
726a989a
RB
6326 gimplify_scan_omp_clauses (&OMP_CLAUSES (expr), pre_p, ORT_WORKSHARE);
6327 gimplify_and_add (OMP_BODY (expr), &body);
6328 gimplify_adjust_omp_clauses (&OMP_CLAUSES (expr));
953ff289 6329
726a989a
RB
6330 if (TREE_CODE (expr) == OMP_SECTIONS)
6331 stmt = gimple_build_omp_sections (body, OMP_CLAUSES (expr));
6332 else if (TREE_CODE (expr) == OMP_SINGLE)
6333 stmt = gimple_build_omp_single (body, OMP_CLAUSES (expr));
6334 else
6335 gcc_unreachable ();
6336
6337 gimplify_seq_add_stmt (pre_p, stmt);
953ff289
DN
6338}
6339
6340/* A subroutine of gimplify_omp_atomic. The front end is supposed to have
b8698a0f 6341 stabilized the lhs of the atomic operation as *ADDR. Return true if
953ff289
DN
6342 EXPR is this stabilized form. */
6343
6344static bool
a509ebb5 6345goa_lhs_expr_p (tree expr, tree addr)
953ff289
DN
6346{
6347 /* Also include casts to other type variants. The C front end is fond
b8698a0f 6348 of adding these for e.g. volatile variables. This is like
953ff289 6349 STRIP_TYPE_NOPS but includes the main variant lookup. */
9600efe1 6350 STRIP_USELESS_TYPE_CONVERSION (expr);
953ff289 6351
78e47463
JJ
6352 if (TREE_CODE (expr) == INDIRECT_REF)
6353 {
6354 expr = TREE_OPERAND (expr, 0);
6355 while (expr != addr
1043771b 6356 && (CONVERT_EXPR_P (expr)
78e47463
JJ
6357 || TREE_CODE (expr) == NON_LVALUE_EXPR)
6358 && TREE_CODE (expr) == TREE_CODE (addr)
9600efe1 6359 && types_compatible_p (TREE_TYPE (expr), TREE_TYPE (addr)))
78e47463
JJ
6360 {
6361 expr = TREE_OPERAND (expr, 0);
6362 addr = TREE_OPERAND (addr, 0);
6363 }
251923f5
JJ
6364 if (expr == addr)
6365 return true;
71458b8a
JJ
6366 return (TREE_CODE (addr) == ADDR_EXPR
6367 && TREE_CODE (expr) == ADDR_EXPR
251923f5 6368 && TREE_OPERAND (addr, 0) == TREE_OPERAND (expr, 0));
78e47463 6369 }
953ff289
DN
6370 if (TREE_CODE (addr) == ADDR_EXPR && expr == TREE_OPERAND (addr, 0))
6371 return true;
6372 return false;
6373}
6374
a509ebb5 6375/* Walk *EXPR_P and replace
c0220ea4 6376 appearances of *LHS_ADDR with LHS_VAR. If an expression does not involve
953ff289
DN
6377 the lhs, evaluate it into a temporary. Return 1 if the lhs appeared as
6378 a subexpression, 0 if it did not, or -1 if an error was encountered. */
6379
6380static int
726a989a
RB
6381goa_stabilize_expr (tree *expr_p, gimple_seq *pre_p, tree lhs_addr,
6382 tree lhs_var)
953ff289
DN
6383{
6384 tree expr = *expr_p;
6385 int saw_lhs;
6386
6387 if (goa_lhs_expr_p (expr, lhs_addr))
6388 {
6389 *expr_p = lhs_var;
6390 return 1;
6391 }
6392 if (is_gimple_val (expr))
6393 return 0;
b8698a0f 6394
953ff289
DN
6395 saw_lhs = 0;
6396 switch (TREE_CODE_CLASS (TREE_CODE (expr)))
6397 {
6398 case tcc_binary:
067dd3c9 6399 case tcc_comparison:
726a989a
RB
6400 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 1), pre_p, lhs_addr,
6401 lhs_var);
953ff289 6402 case tcc_unary:
726a989a
RB
6403 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 0), pre_p, lhs_addr,
6404 lhs_var);
953ff289 6405 break;
067dd3c9
JJ
6406 case tcc_expression:
6407 switch (TREE_CODE (expr))
6408 {
6409 case TRUTH_ANDIF_EXPR:
6410 case TRUTH_ORIF_EXPR:
f2b11865
JJ
6411 case TRUTH_AND_EXPR:
6412 case TRUTH_OR_EXPR:
6413 case TRUTH_XOR_EXPR:
067dd3c9
JJ
6414 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 1), pre_p,
6415 lhs_addr, lhs_var);
f2b11865 6416 case TRUTH_NOT_EXPR:
067dd3c9
JJ
6417 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 0), pre_p,
6418 lhs_addr, lhs_var);
6419 break;
6420 default:
6421 break;
6422 }
6423 break;
953ff289
DN
6424 default:
6425 break;
6426 }
6427
6428 if (saw_lhs == 0)
6429 {
6430 enum gimplify_status gs;
6431 gs = gimplify_expr (expr_p, pre_p, NULL, is_gimple_val, fb_rvalue);
6432 if (gs != GS_ALL_DONE)
6433 saw_lhs = -1;
6434 }
6435
6436 return saw_lhs;
6437}
6438
726a989a 6439
953ff289
DN
6440/* Gimplify an OMP_ATOMIC statement. */
6441
6442static enum gimplify_status
726a989a 6443gimplify_omp_atomic (tree *expr_p, gimple_seq *pre_p)
953ff289
DN
6444{
6445 tree addr = TREE_OPERAND (*expr_p, 0);
6446 tree rhs = TREE_OPERAND (*expr_p, 1);
6447 tree type = TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (addr)));
726a989a 6448 tree tmp_load;
953ff289 6449
acd63801 6450 tmp_load = create_tmp_reg (type, NULL);
a509ebb5
RL
6451 if (goa_stabilize_expr (&rhs, pre_p, addr, tmp_load) < 0)
6452 return GS_ERROR;
953ff289 6453
a509ebb5
RL
6454 if (gimplify_expr (&addr, pre_p, NULL, is_gimple_val, fb_rvalue)
6455 != GS_ALL_DONE)
6456 return GS_ERROR;
953ff289 6457
726a989a 6458 gimplify_seq_add_stmt (pre_p, gimple_build_omp_atomic_load (tmp_load, addr));
a509ebb5
RL
6459 if (gimplify_expr (&rhs, pre_p, NULL, is_gimple_val, fb_rvalue)
6460 != GS_ALL_DONE)
6461 return GS_ERROR;
726a989a
RB
6462 gimplify_seq_add_stmt (pre_p, gimple_build_omp_atomic_store (rhs));
6463 *expr_p = NULL;
a509ebb5
RL
6464
6465 return GS_ALL_DONE;
953ff289 6466}
6de9cd9a 6467
6de9cd9a 6468
726a989a
RB
6469/* Converts the GENERIC expression tree *EXPR_P to GIMPLE. If the
6470 expression produces a value to be used as an operand inside a GIMPLE
6471 statement, the value will be stored back in *EXPR_P. This value will
6472 be a tree of class tcc_declaration, tcc_constant, tcc_reference or
6473 an SSA_NAME. The corresponding sequence of GIMPLE statements is
6474 emitted in PRE_P and POST_P.
6475
6476 Additionally, this process may overwrite parts of the input
6477 expression during gimplification. Ideally, it should be
6478 possible to do non-destructive gimplification.
6479
6480 EXPR_P points to the GENERIC expression to convert to GIMPLE. If
6481 the expression needs to evaluate to a value to be used as
6482 an operand in a GIMPLE statement, this value will be stored in
6483 *EXPR_P on exit. This happens when the caller specifies one
6484 of fb_lvalue or fb_rvalue fallback flags.
6485
6486 PRE_P will contain the sequence of GIMPLE statements corresponding
6487 to the evaluation of EXPR and all the side-effects that must
6488 be executed before the main expression. On exit, the last
6489 statement of PRE_P is the core statement being gimplified. For
6490 instance, when gimplifying 'if (++a)' the last statement in
6491 PRE_P will be 'if (t.1)' where t.1 is the result of
6492 pre-incrementing 'a'.
6493
6494 POST_P will contain the sequence of GIMPLE statements corresponding
6495 to the evaluation of all the side-effects that must be executed
6496 after the main expression. If this is NULL, the post
6497 side-effects are stored at the end of PRE_P.
6498
6499 The reason why the output is split in two is to handle post
6500 side-effects explicitly. In some cases, an expression may have
6501 inner and outer post side-effects which need to be emitted in
6502 an order different from the one given by the recursive
6503 traversal. For instance, for the expression (*p--)++ the post
6504 side-effects of '--' must actually occur *after* the post
6505 side-effects of '++'. However, gimplification will first visit
6506 the inner expression, so if a separate POST sequence was not
6507 used, the resulting sequence would be:
6508
6509 1 t.1 = *p
6510 2 p = p - 1
6511 3 t.2 = t.1 + 1
6512 4 *p = t.2
6513
6514 However, the post-decrement operation in line #2 must not be
6515 evaluated until after the store to *p at line #4, so the
6516 correct sequence should be:
6517
6518 1 t.1 = *p
6519 2 t.2 = t.1 + 1
6520 3 *p = t.2
6521 4 p = p - 1
6522
6523 So, by specifying a separate post queue, it is possible
6524 to emit the post side-effects in the correct order.
6525 If POST_P is NULL, an internal queue will be used. Before
6526 returning to the caller, the sequence POST_P is appended to
6527 the main output sequence PRE_P.
6528
6529 GIMPLE_TEST_F points to a function that takes a tree T and
6530 returns nonzero if T is in the GIMPLE form requested by the
12947319 6531 caller. The GIMPLE predicates are in gimple.c.
726a989a
RB
6532
6533 FALLBACK tells the function what sort of a temporary we want if
6534 gimplification cannot produce an expression that complies with
6535 GIMPLE_TEST_F.
6536
6537 fb_none means that no temporary should be generated
6538 fb_rvalue means that an rvalue is OK to generate
6539 fb_lvalue means that an lvalue is OK to generate
6540 fb_either means that either is OK, but an lvalue is preferable.
6541 fb_mayfail means that gimplification may fail (in which case
6542 GS_ERROR will be returned)
6543
6544 The return value is either GS_ERROR or GS_ALL_DONE, since this
6545 function iterates until EXPR is completely gimplified or an error
6546 occurs. */
6de9cd9a
DN
6547
6548enum gimplify_status
726a989a
RB
6549gimplify_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
6550 bool (*gimple_test_f) (tree), fallback_t fallback)
6de9cd9a
DN
6551{
6552 tree tmp;
726a989a
RB
6553 gimple_seq internal_pre = NULL;
6554 gimple_seq internal_post = NULL;
6de9cd9a 6555 tree save_expr;
726a989a 6556 bool is_statement;
6de9cd9a
DN
6557 location_t saved_location;
6558 enum gimplify_status ret;
726a989a 6559 gimple_stmt_iterator pre_last_gsi, post_last_gsi;
6de9cd9a
DN
6560
6561 save_expr = *expr_p;
6562 if (save_expr == NULL_TREE)
6563 return GS_ALL_DONE;
6564
726a989a
RB
6565 /* If we are gimplifying a top-level statement, PRE_P must be valid. */
6566 is_statement = gimple_test_f == is_gimple_stmt;
6567 if (is_statement)
6568 gcc_assert (pre_p);
6569
6570 /* Consistency checks. */
6571 if (gimple_test_f == is_gimple_reg)
6572 gcc_assert (fallback & (fb_rvalue | fb_lvalue));
6573 else if (gimple_test_f == is_gimple_val
726a989a
RB
6574 || gimple_test_f == is_gimple_call_addr
6575 || gimple_test_f == is_gimple_condexpr
6576 || gimple_test_f == is_gimple_mem_rhs
ba4d8f9d 6577 || gimple_test_f == is_gimple_mem_rhs_or_call
726a989a 6578 || gimple_test_f == is_gimple_reg_rhs
ba4d8f9d 6579 || gimple_test_f == is_gimple_reg_rhs_or_call
70f34814
RG
6580 || gimple_test_f == is_gimple_asm_val
6581 || gimple_test_f == is_gimple_mem_ref_addr)
726a989a
RB
6582 gcc_assert (fallback & fb_rvalue);
6583 else if (gimple_test_f == is_gimple_min_lval
6584 || gimple_test_f == is_gimple_lvalue)
6585 gcc_assert (fallback & fb_lvalue);
6586 else if (gimple_test_f == is_gimple_addressable)
6587 gcc_assert (fallback & fb_either);
6588 else if (gimple_test_f == is_gimple_stmt)
6589 gcc_assert (fallback == fb_none);
6590 else
6591 {
6592 /* We should have recognized the GIMPLE_TEST_F predicate to
6593 know what kind of fallback to use in case a temporary is
6594 needed to hold the value or address of *EXPR_P. */
6595 gcc_unreachable ();
6596 }
6597
6de9cd9a
DN
6598 /* We used to check the predicate here and return immediately if it
6599 succeeds. This is wrong; the design is for gimplification to be
6600 idempotent, and for the predicates to only test for valid forms, not
6601 whether they are fully simplified. */
6de9cd9a
DN
6602 if (pre_p == NULL)
6603 pre_p = &internal_pre;
726a989a 6604
6de9cd9a
DN
6605 if (post_p == NULL)
6606 post_p = &internal_post;
6607
726a989a
RB
6608 /* Remember the last statements added to PRE_P and POST_P. Every
6609 new statement added by the gimplification helpers needs to be
6610 annotated with location information. To centralize the
6611 responsibility, we remember the last statement that had been
6612 added to both queues before gimplifying *EXPR_P. If
6613 gimplification produces new statements in PRE_P and POST_P, those
6614 statements will be annotated with the same location information
6615 as *EXPR_P. */
6616 pre_last_gsi = gsi_last (*pre_p);
6617 post_last_gsi = gsi_last (*post_p);
6618
6de9cd9a 6619 saved_location = input_location;
a281759f
PB
6620 if (save_expr != error_mark_node
6621 && EXPR_HAS_LOCATION (*expr_p))
6622 input_location = EXPR_LOCATION (*expr_p);
6de9cd9a
DN
6623
6624 /* Loop over the specific gimplifiers until the toplevel node
6625 remains the same. */
6626 do
6627 {
73d6ddef
RK
6628 /* Strip away as many useless type conversions as possible
6629 at the toplevel. */
6630 STRIP_USELESS_TYPE_CONVERSION (*expr_p);
6de9cd9a
DN
6631
6632 /* Remember the expr. */
6633 save_expr = *expr_p;
6634
6635 /* Die, die, die, my darling. */
6636 if (save_expr == error_mark_node
726a989a 6637 || (TREE_TYPE (save_expr)
65355d53 6638 && TREE_TYPE (save_expr) == error_mark_node))
6de9cd9a
DN
6639 {
6640 ret = GS_ERROR;
6641 break;
6642 }
6643
6644 /* Do any language-specific gimplification. */
32e8bb8e
ILT
6645 ret = ((enum gimplify_status)
6646 lang_hooks.gimplify_expr (expr_p, pre_p, post_p));
6de9cd9a
DN
6647 if (ret == GS_OK)
6648 {
6649 if (*expr_p == NULL_TREE)
6650 break;
6651 if (*expr_p != save_expr)
6652 continue;
6653 }
6654 else if (ret != GS_UNHANDLED)
6655 break;
6656
941f78d1
JM
6657 /* Make sure that all the cases set 'ret' appropriately. */
6658 ret = GS_UNHANDLED;
6de9cd9a
DN
6659 switch (TREE_CODE (*expr_p))
6660 {
6661 /* First deal with the special cases. */
6662
6663 case POSTINCREMENT_EXPR:
6664 case POSTDECREMENT_EXPR:
6665 case PREINCREMENT_EXPR:
6666 case PREDECREMENT_EXPR:
6667 ret = gimplify_self_mod_expr (expr_p, pre_p, post_p,
6668 fallback != fb_none);
6669 break;
6670
6671 case ARRAY_REF:
44de5aeb
RK
6672 case ARRAY_RANGE_REF:
6673 case REALPART_EXPR:
6674 case IMAGPART_EXPR:
6de9cd9a 6675 case COMPONENT_REF:
9e51aaf5 6676 case VIEW_CONVERT_EXPR:
6de9cd9a 6677 ret = gimplify_compound_lval (expr_p, pre_p, post_p,
90051e16 6678 fallback ? fallback : fb_rvalue);
6de9cd9a
DN
6679 break;
6680
6681 case COND_EXPR:
dae7ec87 6682 ret = gimplify_cond_expr (expr_p, pre_p, fallback);
726a989a 6683
0223e4f5
JM
6684 /* C99 code may assign to an array in a structure value of a
6685 conditional expression, and this has undefined behavior
6686 only on execution, so create a temporary if an lvalue is
6687 required. */
6688 if (fallback == fb_lvalue)
6689 {
6690 *expr_p = get_initialized_tmp_var (*expr_p, pre_p, post_p);
936d04b6 6691 mark_addressable (*expr_p);
941f78d1 6692 ret = GS_OK;
0223e4f5 6693 }
6de9cd9a
DN
6694 break;
6695
6696 case CALL_EXPR:
90051e16 6697 ret = gimplify_call_expr (expr_p, pre_p, fallback != fb_none);
726a989a 6698
0223e4f5
JM
6699 /* C99 code may assign to an array in a structure returned
6700 from a function, and this has undefined behavior only on
6701 execution, so create a temporary if an lvalue is
6702 required. */
6703 if (fallback == fb_lvalue)
6704 {
6705 *expr_p = get_initialized_tmp_var (*expr_p, pre_p, post_p);
936d04b6 6706 mark_addressable (*expr_p);
941f78d1 6707 ret = GS_OK;
0223e4f5 6708 }
6de9cd9a
DN
6709 break;
6710
6711 case TREE_LIST:
282899df 6712 gcc_unreachable ();
6de9cd9a
DN
6713
6714 case COMPOUND_EXPR:
6715 ret = gimplify_compound_expr (expr_p, pre_p, fallback != fb_none);
6716 break;
6717
2ec5deb5
PB
6718 case COMPOUND_LITERAL_EXPR:
6719 ret = gimplify_compound_literal_expr (expr_p, pre_p);
6720 break;
6721
6de9cd9a
DN
6722 case MODIFY_EXPR:
6723 case INIT_EXPR:
ebad5233
JM
6724 ret = gimplify_modify_expr (expr_p, pre_p, post_p,
6725 fallback != fb_none);
6de9cd9a
DN
6726 break;
6727
6728 case TRUTH_ANDIF_EXPR:
6729 case TRUTH_ORIF_EXPR:
ca80e52b
EB
6730 /* Pass the source location of the outer expression. */
6731 ret = gimplify_boolean_expr (expr_p, saved_location);
6de9cd9a
DN
6732 break;
6733
6734 case TRUTH_NOT_EXPR:
67339062
JJ
6735 if (TREE_CODE (TREE_TYPE (*expr_p)) != BOOLEAN_TYPE)
6736 {
6737 tree type = TREE_TYPE (*expr_p);
6738 *expr_p = fold_convert (type, gimple_boolify (*expr_p));
6739 ret = GS_OK;
6740 break;
6741 }
6742
6de9cd9a
DN
6743 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
6744 is_gimple_val, fb_rvalue);
6745 recalculate_side_effects (*expr_p);
6746 break;
6747
6748 case ADDR_EXPR:
6749 ret = gimplify_addr_expr (expr_p, pre_p, post_p);
6750 break;
6751
6752 case VA_ARG_EXPR:
cd3ce9b4 6753 ret = gimplify_va_arg_expr (expr_p, pre_p, post_p);
6de9cd9a
DN
6754 break;
6755
1043771b 6756 CASE_CONVERT:
6de9cd9a
DN
6757 if (IS_EMPTY_STMT (*expr_p))
6758 {
6759 ret = GS_ALL_DONE;
6760 break;
6761 }
6762
6763 if (VOID_TYPE_P (TREE_TYPE (*expr_p))
6764 || fallback == fb_none)
6765 {
6766 /* Just strip a conversion to void (or in void context) and
6767 try again. */
6768 *expr_p = TREE_OPERAND (*expr_p, 0);
941f78d1 6769 ret = GS_OK;
6de9cd9a
DN
6770 break;
6771 }
6772
6773 ret = gimplify_conversion (expr_p);
6774 if (ret == GS_ERROR)
6775 break;
6776 if (*expr_p != save_expr)
6777 break;
6778 /* FALLTHRU */
6779
6780 case FIX_TRUNC_EXPR:
6de9cd9a
DN
6781 /* unary_expr: ... | '(' cast ')' val | ... */
6782 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
6783 is_gimple_val, fb_rvalue);
6784 recalculate_side_effects (*expr_p);
6785 break;
6786
70f34814
RG
6787 case MISALIGNED_INDIRECT_REF:
6788 /* We can only reach this through re-gimplification from
6789 tree optimizers. */
6790 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
6791 is_gimple_reg, fb_rvalue);
6792 recalculate_side_effects (*expr_p);
6793 break;
6794
6a720599 6795 case INDIRECT_REF:
70f34814
RG
6796 {
6797 bool volatilep = TREE_THIS_VOLATILE (*expr_p);
6798 tree saved_ptr_type = TREE_TYPE (TREE_OPERAND (*expr_p, 0));
6799
6800 *expr_p = fold_indirect_ref_loc (input_location, *expr_p);
6801 if (*expr_p != save_expr)
6802 {
6803 ret = GS_OK;
6804 break;
6805 }
6806
6807 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
6808 is_gimple_reg, fb_rvalue);
6809 recalculate_side_effects (*expr_p);
6810
6811 *expr_p = fold_build2_loc (input_location, MEM_REF,
6812 TREE_TYPE (*expr_p),
6813 TREE_OPERAND (*expr_p, 0),
6814 build_int_cst (saved_ptr_type, 0));
6815 TREE_THIS_VOLATILE (*expr_p) = volatilep;
6816 ret = GS_OK;
6817 break;
6818 }
6819
6820 /* We arrive here through the various re-gimplifcation paths. */
6821 case MEM_REF:
6822 /* First try re-folding the whole thing. */
6823 tmp = fold_binary (MEM_REF, TREE_TYPE (*expr_p),
6824 TREE_OPERAND (*expr_p, 0),
6825 TREE_OPERAND (*expr_p, 1));
6826 if (tmp)
941f78d1 6827 {
70f34814
RG
6828 *expr_p = tmp;
6829 recalculate_side_effects (*expr_p);
941f78d1
JM
6830 ret = GS_OK;
6831 break;
6832 }
6de9cd9a 6833 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
70f34814 6834 is_gimple_mem_ref_addr, fb_rvalue);
6de9cd9a 6835 recalculate_side_effects (*expr_p);
70f34814 6836 ret = GS_ALL_DONE;
6de9cd9a
DN
6837 break;
6838
6839 /* Constants need not be gimplified. */
6840 case INTEGER_CST:
6841 case REAL_CST:
325217ed 6842 case FIXED_CST:
6de9cd9a
DN
6843 case STRING_CST:
6844 case COMPLEX_CST:
6845 case VECTOR_CST:
6846 ret = GS_ALL_DONE;
6847 break;
6848
6849 case CONST_DECL:
0534fa56 6850 /* If we require an lvalue, such as for ADDR_EXPR, retain the
2a7e31df 6851 CONST_DECL node. Otherwise the decl is replaceable by its
0534fa56
RH
6852 value. */
6853 /* ??? Should be == fb_lvalue, but ADDR_EXPR passes fb_either. */
6854 if (fallback & fb_lvalue)
6855 ret = GS_ALL_DONE;
6856 else
941f78d1
JM
6857 {
6858 *expr_p = DECL_INITIAL (*expr_p);
6859 ret = GS_OK;
6860 }
6de9cd9a
DN
6861 break;
6862
350fae66 6863 case DECL_EXPR:
726a989a 6864 ret = gimplify_decl_expr (expr_p, pre_p);
350fae66
RK
6865 break;
6866
6de9cd9a 6867 case BIND_EXPR:
c6c7698d 6868 ret = gimplify_bind_expr (expr_p, pre_p);
6de9cd9a
DN
6869 break;
6870
6871 case LOOP_EXPR:
6872 ret = gimplify_loop_expr (expr_p, pre_p);
6873 break;
6874
6875 case SWITCH_EXPR:
6876 ret = gimplify_switch_expr (expr_p, pre_p);
6877 break;
6878
6de9cd9a
DN
6879 case EXIT_EXPR:
6880 ret = gimplify_exit_expr (expr_p);
6881 break;
6882
6883 case GOTO_EXPR:
6884 /* If the target is not LABEL, then it is a computed jump
6885 and the target needs to be gimplified. */
6886 if (TREE_CODE (GOTO_DESTINATION (*expr_p)) != LABEL_DECL)
8c50b495
JJ
6887 {
6888 ret = gimplify_expr (&GOTO_DESTINATION (*expr_p), pre_p,
6889 NULL, is_gimple_val, fb_rvalue);
6890 if (ret == GS_ERROR)
6891 break;
6892 }
726a989a
RB
6893 gimplify_seq_add_stmt (pre_p,
6894 gimple_build_goto (GOTO_DESTINATION (*expr_p)));
941f78d1 6895 ret = GS_ALL_DONE;
6de9cd9a
DN
6896 break;
6897
2e28e797 6898 case PREDICT_EXPR:
726a989a
RB
6899 gimplify_seq_add_stmt (pre_p,
6900 gimple_build_predict (PREDICT_EXPR_PREDICTOR (*expr_p),
6901 PREDICT_EXPR_OUTCOME (*expr_p)));
6902 ret = GS_ALL_DONE;
6903 break;
2e28e797 6904
6de9cd9a
DN
6905 case LABEL_EXPR:
6906 ret = GS_ALL_DONE;
282899df
NS
6907 gcc_assert (decl_function_context (LABEL_EXPR_LABEL (*expr_p))
6908 == current_function_decl);
726a989a
RB
6909 gimplify_seq_add_stmt (pre_p,
6910 gimple_build_label (LABEL_EXPR_LABEL (*expr_p)));
6de9cd9a
DN
6911 break;
6912
6913 case CASE_LABEL_EXPR:
726a989a 6914 ret = gimplify_case_label_expr (expr_p, pre_p);
6de9cd9a
DN
6915 break;
6916
6917 case RETURN_EXPR:
6918 ret = gimplify_return_expr (*expr_p, pre_p);
6919 break;
6920
6921 case CONSTRUCTOR:
48eb4e53
RK
6922 /* Don't reduce this in place; let gimplify_init_constructor work its
6923 magic. Buf if we're just elaborating this for side effects, just
6924 gimplify any element that has side-effects. */
6925 if (fallback == fb_none)
6926 {
4038c495 6927 unsigned HOST_WIDE_INT ix;
ac47786e 6928 tree val;
08330ec2 6929 tree temp = NULL_TREE;
ac47786e
NF
6930 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (*expr_p), ix, val)
6931 if (TREE_SIDE_EFFECTS (val))
6932 append_to_statement_list (val, &temp);
48eb4e53 6933
08330ec2 6934 *expr_p = temp;
941f78d1 6935 ret = temp ? GS_OK : GS_ALL_DONE;
48eb4e53 6936 }
ca0b7d18
AP
6937 /* C99 code may assign to an array in a constructed
6938 structure or union, and this has undefined behavior only
6939 on execution, so create a temporary if an lvalue is
6940 required. */
6941 else if (fallback == fb_lvalue)
6942 {
6943 *expr_p = get_initialized_tmp_var (*expr_p, pre_p, post_p);
936d04b6 6944 mark_addressable (*expr_p);
941f78d1 6945 ret = GS_OK;
ca0b7d18 6946 }
08330ec2
AP
6947 else
6948 ret = GS_ALL_DONE;
6de9cd9a
DN
6949 break;
6950
6951 /* The following are special cases that are not handled by the
6952 original GIMPLE grammar. */
6953
6954 /* SAVE_EXPR nodes are converted into a GIMPLE identifier and
6955 eliminated. */
6956 case SAVE_EXPR:
6957 ret = gimplify_save_expr (expr_p, pre_p, post_p);
6958 break;
6959
6960 case BIT_FIELD_REF:
6961 {
6962 enum gimplify_status r0, r1, r2;
6963
726a989a
RB
6964 r0 = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
6965 post_p, is_gimple_lvalue, fb_either);
6966 r1 = gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p,
6967 post_p, is_gimple_val, fb_rvalue);
6968 r2 = gimplify_expr (&TREE_OPERAND (*expr_p, 2), pre_p,
6969 post_p, is_gimple_val, fb_rvalue);
6de9cd9a
DN
6970 recalculate_side_effects (*expr_p);
6971
6972 ret = MIN (r0, MIN (r1, r2));
6973 }
6974 break;
6975
150e3929
RG
6976 case TARGET_MEM_REF:
6977 {
6978 enum gimplify_status r0 = GS_ALL_DONE, r1 = GS_ALL_DONE;
6979
6980 if (TMR_SYMBOL (*expr_p))
23a534a1
RG
6981 /* We can't gimplify the symbol part. Assert it is
6982 already gimple instead.
6983 ??? This isn't exactly the same as ADDR_EXPR
6984 plus is_gimple_mem_ref_addr (), see fixed_address_object_p. */
6985 gcc_assert (TREE_CODE (TMR_SYMBOL (*expr_p)) == ADDR_EXPR
6986 && (TREE_CODE (TREE_OPERAND (TMR_SYMBOL (*expr_p), 0))
6987 == VAR_DECL));
6988 if (TMR_BASE (*expr_p))
150e3929
RG
6989 r0 = gimplify_expr (&TMR_BASE (*expr_p), pre_p,
6990 post_p, is_gimple_val, fb_either);
6991 if (TMR_INDEX (*expr_p))
6992 r1 = gimplify_expr (&TMR_INDEX (*expr_p), pre_p,
6993 post_p, is_gimple_val, fb_rvalue);
6994 /* TMR_STEP and TMR_OFFSET are always integer constants. */
6995 ret = MIN (r0, r1);
6996 }
6997 break;
6998
6de9cd9a
DN
6999 case NON_LVALUE_EXPR:
7000 /* This should have been stripped above. */
282899df 7001 gcc_unreachable ();
6de9cd9a
DN
7002
7003 case ASM_EXPR:
7004 ret = gimplify_asm_expr (expr_p, pre_p, post_p);
7005 break;
7006
7007 case TRY_FINALLY_EXPR:
7008 case TRY_CATCH_EXPR:
726a989a
RB
7009 {
7010 gimple_seq eval, cleanup;
7011 gimple try_;
7012
7013 eval = cleanup = NULL;
7014 gimplify_and_add (TREE_OPERAND (*expr_p, 0), &eval);
7015 gimplify_and_add (TREE_OPERAND (*expr_p, 1), &cleanup);
cc8b343d
JJ
7016 /* Don't create bogus GIMPLE_TRY with empty cleanup. */
7017 if (gimple_seq_empty_p (cleanup))
7018 {
7019 gimple_seq_add_seq (pre_p, eval);
7020 ret = GS_ALL_DONE;
7021 break;
7022 }
726a989a
RB
7023 try_ = gimple_build_try (eval, cleanup,
7024 TREE_CODE (*expr_p) == TRY_FINALLY_EXPR
7025 ? GIMPLE_TRY_FINALLY
7026 : GIMPLE_TRY_CATCH);
7027 if (TREE_CODE (*expr_p) == TRY_CATCH_EXPR)
7028 gimple_try_set_catch_is_cleanup (try_,
7029 TRY_CATCH_IS_CLEANUP (*expr_p));
7030 gimplify_seq_add_stmt (pre_p, try_);
7031 ret = GS_ALL_DONE;
7032 break;
7033 }
6de9cd9a
DN
7034
7035 case CLEANUP_POINT_EXPR:
7036 ret = gimplify_cleanup_point_expr (expr_p, pre_p);
7037 break;
7038
7039 case TARGET_EXPR:
7040 ret = gimplify_target_expr (expr_p, pre_p, post_p);
7041 break;
7042
7043 case CATCH_EXPR:
726a989a
RB
7044 {
7045 gimple c;
7046 gimple_seq handler = NULL;
7047 gimplify_and_add (CATCH_BODY (*expr_p), &handler);
7048 c = gimple_build_catch (CATCH_TYPES (*expr_p), handler);
7049 gimplify_seq_add_stmt (pre_p, c);
7050 ret = GS_ALL_DONE;
7051 break;
7052 }
6de9cd9a
DN
7053
7054 case EH_FILTER_EXPR:
726a989a
RB
7055 {
7056 gimple ehf;
7057 gimple_seq failure = NULL;
7058
7059 gimplify_and_add (EH_FILTER_FAILURE (*expr_p), &failure);
7060 ehf = gimple_build_eh_filter (EH_FILTER_TYPES (*expr_p), failure);
d665b6e5 7061 gimple_set_no_warning (ehf, TREE_NO_WARNING (*expr_p));
726a989a
RB
7062 gimplify_seq_add_stmt (pre_p, ehf);
7063 ret = GS_ALL_DONE;
7064 break;
7065 }
6de9cd9a 7066
0f59171d
RH
7067 case OBJ_TYPE_REF:
7068 {
7069 enum gimplify_status r0, r1;
726a989a
RB
7070 r0 = gimplify_expr (&OBJ_TYPE_REF_OBJECT (*expr_p), pre_p,
7071 post_p, is_gimple_val, fb_rvalue);
7072 r1 = gimplify_expr (&OBJ_TYPE_REF_EXPR (*expr_p), pre_p,
7073 post_p, is_gimple_val, fb_rvalue);
0f3a057a 7074 TREE_SIDE_EFFECTS (*expr_p) = 0;
0f59171d
RH
7075 ret = MIN (r0, r1);
7076 }
6de9cd9a
DN
7077 break;
7078
6de9cd9a
DN
7079 case LABEL_DECL:
7080 /* We get here when taking the address of a label. We mark
7081 the label as "forced"; meaning it can never be removed and
7082 it is a potential target for any computed goto. */
7083 FORCED_LABEL (*expr_p) = 1;
7084 ret = GS_ALL_DONE;
7085 break;
7086
7087 case STATEMENT_LIST:
c6c7698d 7088 ret = gimplify_statement_list (expr_p, pre_p);
6de9cd9a
DN
7089 break;
7090
d25cee4d
RH
7091 case WITH_SIZE_EXPR:
7092 {
70e2829d
KH
7093 gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
7094 post_p == &internal_post ? NULL : post_p,
7095 gimple_test_f, fallback);
7096 gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p, post_p,
7097 is_gimple_val, fb_rvalue);
941f78d1 7098 ret = GS_ALL_DONE;
d25cee4d
RH
7099 }
7100 break;
7101
6de9cd9a 7102 case VAR_DECL:
4744afba 7103 case PARM_DECL:
a9f7c570 7104 ret = gimplify_var_or_parm_decl (expr_p);
6de9cd9a
DN
7105 break;
7106
077b0dfb
JJ
7107 case RESULT_DECL:
7108 /* When within an OpenMP context, notice uses of variables. */
7109 if (gimplify_omp_ctxp)
7110 omp_notice_variable (gimplify_omp_ctxp, *expr_p, true);
7111 ret = GS_ALL_DONE;
7112 break;
7113
71956db3
RH
7114 case SSA_NAME:
7115 /* Allow callbacks into the gimplifier during optimization. */
7116 ret = GS_ALL_DONE;
7117 break;
7118
953ff289 7119 case OMP_PARALLEL:
726a989a
RB
7120 gimplify_omp_parallel (expr_p, pre_p);
7121 ret = GS_ALL_DONE;
953ff289
DN
7122 break;
7123
a68ab351 7124 case OMP_TASK:
726a989a
RB
7125 gimplify_omp_task (expr_p, pre_p);
7126 ret = GS_ALL_DONE;
a68ab351
JJ
7127 break;
7128
953ff289
DN
7129 case OMP_FOR:
7130 ret = gimplify_omp_for (expr_p, pre_p);
7131 break;
7132
7133 case OMP_SECTIONS:
7134 case OMP_SINGLE:
726a989a
RB
7135 gimplify_omp_workshare (expr_p, pre_p);
7136 ret = GS_ALL_DONE;
953ff289
DN
7137 break;
7138
7139 case OMP_SECTION:
7140 case OMP_MASTER:
7141 case OMP_ORDERED:
7142 case OMP_CRITICAL:
726a989a
RB
7143 {
7144 gimple_seq body = NULL;
7145 gimple g;
7146
7147 gimplify_and_add (OMP_BODY (*expr_p), &body);
7148 switch (TREE_CODE (*expr_p))
7149 {
7150 case OMP_SECTION:
7151 g = gimple_build_omp_section (body);
7152 break;
7153 case OMP_MASTER:
7154 g = gimple_build_omp_master (body);
7155 break;
7156 case OMP_ORDERED:
7157 g = gimple_build_omp_ordered (body);
7158 break;
7159 case OMP_CRITICAL:
7160 g = gimple_build_omp_critical (body,
7161 OMP_CRITICAL_NAME (*expr_p));
7162 break;
7163 default:
7164 gcc_unreachable ();
7165 }
7166 gimplify_seq_add_stmt (pre_p, g);
7167 ret = GS_ALL_DONE;
7168 break;
7169 }
953ff289
DN
7170
7171 case OMP_ATOMIC:
7172 ret = gimplify_omp_atomic (expr_p, pre_p);
7173 break;
7174
5be014d5 7175 case POINTER_PLUS_EXPR:
fe9821b8
JH
7176 /* Convert ((type *)A)+offset into &A->field_of_type_and_offset.
7177 The second is gimple immediate saving a need for extra statement.
7178 */
5be014d5 7179 if (TREE_CODE (TREE_OPERAND (*expr_p, 1)) == INTEGER_CST
99f536cc 7180 && (tmp = maybe_fold_offset_to_address
c2255bc4
AH
7181 (EXPR_LOCATION (*expr_p),
7182 TREE_OPERAND (*expr_p, 0), TREE_OPERAND (*expr_p, 1),
7183 TREE_TYPE (*expr_p))))
99f536cc
RG
7184 {
7185 *expr_p = tmp;
941f78d1 7186 ret = GS_OK;
99f536cc
RG
7187 break;
7188 }
ac5a28a6 7189 /* Convert (void *)&a + 4 into (void *)&a[1]. */
5be014d5 7190 if (TREE_CODE (TREE_OPERAND (*expr_p, 0)) == NOP_EXPR
ac5a28a6 7191 && TREE_CODE (TREE_OPERAND (*expr_p, 1)) == INTEGER_CST
2cb7995f
JH
7192 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (*expr_p,
7193 0),0)))
99f536cc 7194 && (tmp = maybe_fold_offset_to_address
c2255bc4
AH
7195 (EXPR_LOCATION (*expr_p),
7196 TREE_OPERAND (TREE_OPERAND (*expr_p, 0), 0),
7197 TREE_OPERAND (*expr_p, 1),
7198 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (*expr_p, 0),
7199 0)))))
ac5a28a6 7200 {
ac5a28a6 7201 *expr_p = fold_convert (TREE_TYPE (*expr_p), tmp);
941f78d1 7202 ret = GS_OK;
ac5a28a6 7203 break;
fe9821b8
JH
7204 }
7205 /* FALLTHRU */
726a989a 7206
6de9cd9a 7207 default:
282899df 7208 switch (TREE_CODE_CLASS (TREE_CODE (*expr_p)))
6de9cd9a 7209 {
6615c446 7210 case tcc_comparison:
61c25908
OH
7211 /* Handle comparison of objects of non scalar mode aggregates
7212 with a call to memcmp. It would be nice to only have to do
7213 this for variable-sized objects, but then we'd have to allow
7214 the same nest of reference nodes we allow for MODIFY_EXPR and
7215 that's too complex.
7216
7217 Compare scalar mode aggregates as scalar mode values. Using
7218 memcmp for them would be very inefficient at best, and is
7219 plain wrong if bitfields are involved. */
726a989a
RB
7220 {
7221 tree type = TREE_TYPE (TREE_OPERAND (*expr_p, 1));
61c25908 7222
726a989a
RB
7223 if (!AGGREGATE_TYPE_P (type))
7224 goto expr_2;
7225 else if (TYPE_MODE (type) != BLKmode)
7226 ret = gimplify_scalar_mode_aggregate_compare (expr_p);
7227 else
7228 ret = gimplify_variable_sized_compare (expr_p);
61c25908 7229
726a989a 7230 break;
61c25908 7231 }
d3147f64 7232
282899df
NS
7233 /* If *EXPR_P does not need to be special-cased, handle it
7234 according to its class. */
6615c446 7235 case tcc_unary:
282899df
NS
7236 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
7237 post_p, is_gimple_val, fb_rvalue);
7238 break;
6de9cd9a 7239
6615c446 7240 case tcc_binary:
282899df
NS
7241 expr_2:
7242 {
7243 enum gimplify_status r0, r1;
d3147f64 7244
282899df 7245 r0 = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
726a989a 7246 post_p, is_gimple_val, fb_rvalue);
282899df
NS
7247 r1 = gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p,
7248 post_p, is_gimple_val, fb_rvalue);
d3147f64 7249
282899df
NS
7250 ret = MIN (r0, r1);
7251 break;
7252 }
d3147f64 7253
6615c446
JO
7254 case tcc_declaration:
7255 case tcc_constant:
6de9cd9a 7256 ret = GS_ALL_DONE;
282899df 7257 goto dont_recalculate;
d3147f64 7258
282899df
NS
7259 default:
7260 gcc_assert (TREE_CODE (*expr_p) == TRUTH_AND_EXPR
7261 || TREE_CODE (*expr_p) == TRUTH_OR_EXPR
7262 || TREE_CODE (*expr_p) == TRUTH_XOR_EXPR);
7263 goto expr_2;
6de9cd9a 7264 }
6de9cd9a
DN
7265
7266 recalculate_side_effects (*expr_p);
726a989a 7267
282899df 7268 dont_recalculate:
6de9cd9a
DN
7269 break;
7270 }
d3147f64 7271
941f78d1 7272 gcc_assert (*expr_p || ret != GS_OK);
6de9cd9a
DN
7273 }
7274 while (ret == GS_OK);
7275
7276 /* If we encountered an error_mark somewhere nested inside, either
7277 stub out the statement or propagate the error back out. */
7278 if (ret == GS_ERROR)
7279 {
7280 if (is_statement)
65355d53 7281 *expr_p = NULL;
6de9cd9a
DN
7282 goto out;
7283 }
7284
6de9cd9a
DN
7285 /* This was only valid as a return value from the langhook, which
7286 we handled. Make sure it doesn't escape from any other context. */
282899df 7287 gcc_assert (ret != GS_UNHANDLED);
6de9cd9a 7288
65355d53 7289 if (fallback == fb_none && *expr_p && !is_gimple_stmt (*expr_p))
6de9cd9a
DN
7290 {
7291 /* We aren't looking for a value, and we don't have a valid
7292 statement. If it doesn't have side-effects, throw it away. */
7293 if (!TREE_SIDE_EFFECTS (*expr_p))
65355d53 7294 *expr_p = NULL;
6de9cd9a 7295 else if (!TREE_THIS_VOLATILE (*expr_p))
44de5aeb
RK
7296 {
7297 /* This is probably a _REF that contains something nested that
7298 has side effects. Recurse through the operands to find it. */
7299 enum tree_code code = TREE_CODE (*expr_p);
7300
282899df 7301 switch (code)
44de5aeb 7302 {
282899df 7303 case COMPONENT_REF:
02a5eac4
EB
7304 case REALPART_EXPR:
7305 case IMAGPART_EXPR:
7306 case VIEW_CONVERT_EXPR:
282899df
NS
7307 gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
7308 gimple_test_f, fallback);
7309 break;
7310
a9e64c63
EB
7311 case ARRAY_REF:
7312 case ARRAY_RANGE_REF:
44de5aeb
RK
7313 gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
7314 gimple_test_f, fallback);
7315 gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p, post_p,
282899df
NS
7316 gimple_test_f, fallback);
7317 break;
7318
7319 default:
7320 /* Anything else with side-effects must be converted to
a9e64c63 7321 a valid statement before we get here. */
282899df 7322 gcc_unreachable ();
44de5aeb 7323 }
44de5aeb 7324
65355d53 7325 *expr_p = NULL;
44de5aeb 7326 }
a9e64c63
EB
7327 else if (COMPLETE_TYPE_P (TREE_TYPE (*expr_p))
7328 && TYPE_MODE (TREE_TYPE (*expr_p)) != BLKmode)
6de9cd9a 7329 {
a9e64c63
EB
7330 /* Historically, the compiler has treated a bare reference
7331 to a non-BLKmode volatile lvalue as forcing a load. */
af62f6f9 7332 tree type = TYPE_MAIN_VARIANT (TREE_TYPE (*expr_p));
726a989a 7333
c22b1771 7334 /* Normally, we do not want to create a temporary for a
a38578e1
MM
7335 TREE_ADDRESSABLE type because such a type should not be
7336 copied by bitwise-assignment. However, we make an
7337 exception here, as all we are doing here is ensuring that
7338 we read the bytes that make up the type. We use
7339 create_tmp_var_raw because create_tmp_var will abort when
57b51d4d 7340 given a TREE_ADDRESSABLE type. */
a38578e1
MM
7341 tree tmp = create_tmp_var_raw (type, "vol");
7342 gimple_add_tmp_var (tmp);
726a989a
RB
7343 gimplify_assign (tmp, *expr_p, pre_p);
7344 *expr_p = NULL;
6de9cd9a
DN
7345 }
7346 else
7347 /* We can't do anything useful with a volatile reference to
a9e64c63
EB
7348 an incomplete type, so just throw it away. Likewise for
7349 a BLKmode type, since any implicit inner load should
7350 already have been turned into an explicit one by the
7351 gimplification process. */
65355d53 7352 *expr_p = NULL;
6de9cd9a
DN
7353 }
7354
7355 /* If we are gimplifying at the statement level, we're done. Tack
726a989a 7356 everything together and return. */
325c3691 7357 if (fallback == fb_none || is_statement)
6de9cd9a 7358 {
726a989a
RB
7359 /* Since *EXPR_P has been converted into a GIMPLE tuple, clear
7360 it out for GC to reclaim it. */
7361 *expr_p = NULL_TREE;
7362
7363 if (!gimple_seq_empty_p (internal_pre)
7364 || !gimple_seq_empty_p (internal_post))
be00f578 7365 {
726a989a
RB
7366 gimplify_seq_add_seq (&internal_pre, internal_post);
7367 gimplify_seq_add_seq (pre_p, internal_pre);
be00f578 7368 }
726a989a
RB
7369
7370 /* The result of gimplifying *EXPR_P is going to be the last few
7371 statements in *PRE_P and *POST_P. Add location information
7372 to all the statements that were added by the gimplification
7373 helpers. */
7374 if (!gimple_seq_empty_p (*pre_p))
7375 annotate_all_with_location_after (*pre_p, pre_last_gsi, input_location);
7376
7377 if (!gimple_seq_empty_p (*post_p))
7378 annotate_all_with_location_after (*post_p, post_last_gsi,
7379 input_location);
7380
6de9cd9a
DN
7381 goto out;
7382 }
7383
726a989a
RB
7384#ifdef ENABLE_GIMPLE_CHECKING
7385 if (*expr_p)
7386 {
7387 enum tree_code code = TREE_CODE (*expr_p);
7388 /* These expressions should already be in gimple IR form. */
7389 gcc_assert (code != MODIFY_EXPR
7390 && code != ASM_EXPR
7391 && code != BIND_EXPR
7392 && code != CATCH_EXPR
6fc4fb06 7393 && (code != COND_EXPR || gimplify_ctxp->allow_rhs_cond_expr)
726a989a
RB
7394 && code != EH_FILTER_EXPR
7395 && code != GOTO_EXPR
7396 && code != LABEL_EXPR
7397 && code != LOOP_EXPR
726a989a
RB
7398 && code != SWITCH_EXPR
7399 && code != TRY_FINALLY_EXPR
7400 && code != OMP_CRITICAL
7401 && code != OMP_FOR
7402 && code != OMP_MASTER
7403 && code != OMP_ORDERED
7404 && code != OMP_PARALLEL
7405 && code != OMP_SECTIONS
7406 && code != OMP_SECTION
7407 && code != OMP_SINGLE);
7408 }
7409#endif
6de9cd9a 7410
726a989a
RB
7411 /* Otherwise we're gimplifying a subexpression, so the resulting
7412 value is interesting. If it's a valid operand that matches
7413 GIMPLE_TEST_F, we're done. Unless we are handling some
7414 post-effects internally; if that's the case, we need to copy into
7415 a temporary before adding the post-effects to POST_P. */
7416 if (gimple_seq_empty_p (internal_post) && (*gimple_test_f) (*expr_p))
6de9cd9a
DN
7417 goto out;
7418
7419 /* Otherwise, we need to create a new temporary for the gimplified
7420 expression. */
7421
7422 /* We can't return an lvalue if we have an internal postqueue. The
7423 object the lvalue refers to would (probably) be modified by the
7424 postqueue; we need to copy the value out first, which means an
7425 rvalue. */
726a989a
RB
7426 if ((fallback & fb_lvalue)
7427 && gimple_seq_empty_p (internal_post)
e847cc68 7428 && is_gimple_addressable (*expr_p))
6de9cd9a
DN
7429 {
7430 /* An lvalue will do. Take the address of the expression, store it
7431 in a temporary, and replace the expression with an INDIRECT_REF of
7432 that temporary. */
db3927fb 7433 tmp = build_fold_addr_expr_loc (input_location, *expr_p);
6de9cd9a 7434 gimplify_expr (&tmp, pre_p, post_p, is_gimple_reg, fb_rvalue);
7f5ad6d7 7435 *expr_p = build_simple_mem_ref (tmp);
6de9cd9a 7436 }
ba4d8f9d 7437 else if ((fallback & fb_rvalue) && is_gimple_reg_rhs_or_call (*expr_p))
6de9cd9a 7438 {
726a989a
RB
7439 /* An rvalue will do. Assign the gimplified expression into a
7440 new temporary TMP and replace the original expression with
7441 TMP. First, make sure that the expression has a type so that
7442 it can be assigned into a temporary. */
282899df 7443 gcc_assert (!VOID_TYPE_P (TREE_TYPE (*expr_p)));
6de9cd9a 7444
726a989a 7445 if (!gimple_seq_empty_p (internal_post) || (fallback & fb_lvalue))
6de9cd9a
DN
7446 /* The postqueue might change the value of the expression between
7447 the initialization and use of the temporary, so we can't use a
7448 formal temp. FIXME do we care? */
c685de4a
RG
7449 {
7450 *expr_p = get_initialized_tmp_var (*expr_p, pre_p, post_p);
7451 if (TREE_CODE (TREE_TYPE (*expr_p)) == COMPLEX_TYPE
7452 || TREE_CODE (TREE_TYPE (*expr_p)) == VECTOR_TYPE)
7453 DECL_GIMPLE_REG_P (*expr_p) = 1;
7454 }
6de9cd9a
DN
7455 else
7456 *expr_p = get_formal_tmp_var (*expr_p, pre_p);
7457 }
282899df 7458 else
6de9cd9a 7459 {
726a989a 7460#ifdef ENABLE_GIMPLE_CHECKING
282899df
NS
7461 if (!(fallback & fb_mayfail))
7462 {
7463 fprintf (stderr, "gimplification failed:\n");
7464 print_generic_expr (stderr, *expr_p, 0);
7465 debug_tree (*expr_p);
7466 internal_error ("gimplification failed");
7467 }
7468#endif
7469 gcc_assert (fallback & fb_mayfail);
726a989a 7470
282899df 7471 /* If this is an asm statement, and the user asked for the
535a42b1 7472 impossible, don't die. Fail and let gimplify_asm_expr
282899df 7473 issue an error. */
6de9cd9a
DN
7474 ret = GS_ERROR;
7475 goto out;
7476 }
6de9cd9a 7477
6de9cd9a 7478 /* Make sure the temporary matches our predicate. */
282899df 7479 gcc_assert ((*gimple_test_f) (*expr_p));
6de9cd9a 7480
726a989a 7481 if (!gimple_seq_empty_p (internal_post))
6de9cd9a 7482 {
726a989a
RB
7483 annotate_all_with_location (internal_post, input_location);
7484 gimplify_seq_add_seq (pre_p, internal_post);
6de9cd9a
DN
7485 }
7486
7487 out:
7488 input_location = saved_location;
7489 return ret;
7490}
7491
44de5aeb 7492/* Look through TYPE for variable-sized objects and gimplify each such
65355d53 7493 size that we find. Add to LIST_P any statements generated. */
44de5aeb 7494
65355d53 7495void
726a989a 7496gimplify_type_sizes (tree type, gimple_seq *list_p)
44de5aeb 7497{
ad50bc8d
RH
7498 tree field, t;
7499
19dbbf36 7500 if (type == NULL || type == error_mark_node)
8e0a600b 7501 return;
ad50bc8d 7502
6c6cfbfd 7503 /* We first do the main variant, then copy into any other variants. */
ad50bc8d 7504 type = TYPE_MAIN_VARIANT (type);
44de5aeb 7505
8e0a600b 7506 /* Avoid infinite recursion. */
19dbbf36 7507 if (TYPE_SIZES_GIMPLIFIED (type))
8e0a600b
JJ
7508 return;
7509
7510 TYPE_SIZES_GIMPLIFIED (type) = 1;
7511
44de5aeb
RK
7512 switch (TREE_CODE (type))
7513 {
44de5aeb
RK
7514 case INTEGER_TYPE:
7515 case ENUMERAL_TYPE:
7516 case BOOLEAN_TYPE:
44de5aeb 7517 case REAL_TYPE:
325217ed 7518 case FIXED_POINT_TYPE:
65355d53
RH
7519 gimplify_one_sizepos (&TYPE_MIN_VALUE (type), list_p);
7520 gimplify_one_sizepos (&TYPE_MAX_VALUE (type), list_p);
ad50bc8d
RH
7521
7522 for (t = TYPE_NEXT_VARIANT (type); t; t = TYPE_NEXT_VARIANT (t))
7523 {
7524 TYPE_MIN_VALUE (t) = TYPE_MIN_VALUE (type);
7525 TYPE_MAX_VALUE (t) = TYPE_MAX_VALUE (type);
ad50bc8d 7526 }
44de5aeb
RK
7527 break;
7528
7529 case ARRAY_TYPE:
ad50bc8d 7530 /* These types may not have declarations, so handle them here. */
8e0a600b
JJ
7531 gimplify_type_sizes (TREE_TYPE (type), list_p);
7532 gimplify_type_sizes (TYPE_DOMAIN (type), list_p);
2e957792
JJ
7533 /* Ensure VLA bounds aren't removed, for -O0 they should be variables
7534 with assigned stack slots, for -O1+ -g they should be tracked
7535 by VTA. */
7536 if (TYPE_DOMAIN (type)
802e9f8e
JJ
7537 && INTEGRAL_TYPE_P (TYPE_DOMAIN (type)))
7538 {
7539 t = TYPE_MIN_VALUE (TYPE_DOMAIN (type));
7540 if (t && TREE_CODE (t) == VAR_DECL && DECL_ARTIFICIAL (t))
7541 DECL_IGNORED_P (t) = 0;
7542 t = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
7543 if (t && TREE_CODE (t) == VAR_DECL && DECL_ARTIFICIAL (t))
7544 DECL_IGNORED_P (t) = 0;
7545 }
44de5aeb
RK
7546 break;
7547
7548 case RECORD_TYPE:
7549 case UNION_TYPE:
7550 case QUAL_UNION_TYPE:
910ad8de 7551 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
44de5aeb 7552 if (TREE_CODE (field) == FIELD_DECL)
8e0a600b
JJ
7553 {
7554 gimplify_one_sizepos (&DECL_FIELD_OFFSET (field), list_p);
9a9ba8d9
JJ
7555 gimplify_one_sizepos (&DECL_SIZE (field), list_p);
7556 gimplify_one_sizepos (&DECL_SIZE_UNIT (field), list_p);
8e0a600b
JJ
7557 gimplify_type_sizes (TREE_TYPE (field), list_p);
7558 }
7559 break;
7560
7561 case POINTER_TYPE:
7562 case REFERENCE_TYPE:
706c4bb7
OH
7563 /* We used to recurse on the pointed-to type here, which turned out to
7564 be incorrect because its definition might refer to variables not
7565 yet initialized at this point if a forward declaration is involved.
7566
7567 It was actually useful for anonymous pointed-to types to ensure
7568 that the sizes evaluation dominates every possible later use of the
7569 values. Restricting to such types here would be safe since there
f63645be
KH
7570 is no possible forward declaration around, but would introduce an
7571 undesirable middle-end semantic to anonymity. We then defer to
7572 front-ends the responsibility of ensuring that the sizes are
7573 evaluated both early and late enough, e.g. by attaching artificial
706c4bb7 7574 type declarations to the tree. */
44de5aeb
RK
7575 break;
7576
7577 default:
7578 break;
7579 }
7580
65355d53
RH
7581 gimplify_one_sizepos (&TYPE_SIZE (type), list_p);
7582 gimplify_one_sizepos (&TYPE_SIZE_UNIT (type), list_p);
44de5aeb 7583
ad50bc8d 7584 for (t = TYPE_NEXT_VARIANT (type); t; t = TYPE_NEXT_VARIANT (t))
b4830636 7585 {
ad50bc8d
RH
7586 TYPE_SIZE (t) = TYPE_SIZE (type);
7587 TYPE_SIZE_UNIT (t) = TYPE_SIZE_UNIT (type);
7588 TYPE_SIZES_GIMPLIFIED (t) = 1;
b4830636 7589 }
b4830636
RH
7590}
7591
7592/* A subroutine of gimplify_type_sizes to make sure that *EXPR_P,
7593 a size or position, has had all of its SAVE_EXPRs evaluated.
726a989a 7594 We add any required statements to *STMT_P. */
44de5aeb
RK
7595
7596void
726a989a 7597gimplify_one_sizepos (tree *expr_p, gimple_seq *stmt_p)
44de5aeb 7598{
a9c5ddf9
RH
7599 tree type, expr = *expr_p;
7600
44de5aeb 7601 /* We don't do anything if the value isn't there, is constant, or contains
1e748a2b 7602 A PLACEHOLDER_EXPR. We also don't want to do anything if it's already
aabcd309 7603 a VAR_DECL. If it's a VAR_DECL from another function, the gimplifier
1e748a2b
RK
7604 will want to replace it with a new variable, but that will cause problems
7605 if this type is from outside the function. It's OK to have that here. */
a9c5ddf9
RH
7606 if (expr == NULL_TREE || TREE_CONSTANT (expr)
7607 || TREE_CODE (expr) == VAR_DECL
7608 || CONTAINS_PLACEHOLDER_P (expr))
44de5aeb
RK
7609 return;
7610
a9c5ddf9
RH
7611 type = TREE_TYPE (expr);
7612 *expr_p = unshare_expr (expr);
7613
ad50bc8d 7614 gimplify_expr (expr_p, stmt_p, NULL, is_gimple_val, fb_rvalue);
a9c5ddf9
RH
7615 expr = *expr_p;
7616
7617 /* Verify that we've an exact type match with the original expression.
7618 In particular, we do not wish to drop a "sizetype" in favour of a
7619 type of similar dimensions. We don't want to pollute the generic
7620 type-stripping code with this knowledge because it doesn't matter
7621 for the bulk of GENERIC/GIMPLE. It only matters that TYPE_SIZE_UNIT
7622 and friends retain their "sizetype-ness". */
7fd6694b
RH
7623 if (TREE_TYPE (expr) != type
7624 && TREE_CODE (type) == INTEGER_TYPE
7625 && TYPE_IS_SIZETYPE (type))
a9c5ddf9
RH
7626 {
7627 tree tmp;
726a989a 7628 gimple stmt;
a9c5ddf9
RH
7629
7630 *expr_p = create_tmp_var (type, NULL);
7631 tmp = build1 (NOP_EXPR, type, expr);
726a989a 7632 stmt = gimplify_assign (*expr_p, tmp, stmt_p);
a9c5ddf9 7633 if (EXPR_HAS_LOCATION (expr))
5e278028 7634 gimple_set_location (stmt, EXPR_LOCATION (expr));
a9c5ddf9 7635 else
726a989a 7636 gimple_set_location (stmt, input_location);
a9c5ddf9 7637 }
44de5aeb 7638}
6de9cd9a 7639
6de9cd9a 7640
726a989a
RB
7641/* Gimplify the body of statements pointed to by BODY_P and return a
7642 GIMPLE_BIND containing the sequence of GIMPLE statements
7643 corresponding to BODY_P. FNDECL is the function decl containing
7644 *BODY_P. */
7645
7646gimple
4744afba 7647gimplify_body (tree *body_p, tree fndecl, bool do_parms)
6de9cd9a
DN
7648{
7649 location_t saved_location = input_location;
726a989a
RB
7650 gimple_seq parm_stmts, seq;
7651 gimple outer_bind;
d406b663 7652 struct gimplify_ctx gctx;
6de9cd9a
DN
7653
7654 timevar_push (TV_TREE_GIMPLIFY);
953ff289 7655
f66d6761
SB
7656 /* Initialize for optimize_insn_for_s{ize,peed}_p possibly called during
7657 gimplification. */
7658 default_rtl_profile ();
7659
953ff289 7660 gcc_assert (gimplify_ctxp == NULL);
d406b663 7661 push_gimplify_context (&gctx);
6de9cd9a 7662
44de5aeb
RK
7663 /* Unshare most shared trees in the body and in that of any nested functions.
7664 It would seem we don't have to do this for nested functions because
7665 they are supposed to be output and then the outer function gimplified
7666 first, but the g++ front end doesn't always do it that way. */
7667 unshare_body (body_p, fndecl);
7668 unvisit_body (body_p, fndecl);
6de9cd9a 7669
77f2a970
JJ
7670 if (cgraph_node (fndecl)->origin)
7671 nonlocal_vlas = pointer_set_create ();
7672
fa10beec 7673 /* Make sure input_location isn't set to something weird. */
6de9cd9a
DN
7674 input_location = DECL_SOURCE_LOCATION (fndecl);
7675
4744afba
RH
7676 /* Resolve callee-copies. This has to be done before processing
7677 the body so that DECL_VALUE_EXPR gets processed correctly. */
726a989a 7678 parm_stmts = (do_parms) ? gimplify_parameters () : NULL;
4744afba 7679
6de9cd9a 7680 /* Gimplify the function's body. */
726a989a
RB
7681 seq = NULL;
7682 gimplify_stmt (body_p, &seq);
7683 outer_bind = gimple_seq_first_stmt (seq);
7684 if (!outer_bind)
6de9cd9a 7685 {
726a989a
RB
7686 outer_bind = gimple_build_nop ();
7687 gimplify_seq_add_stmt (&seq, outer_bind);
6de9cd9a 7688 }
44de5aeb 7689
726a989a
RB
7690 /* The body must contain exactly one statement, a GIMPLE_BIND. If this is
7691 not the case, wrap everything in a GIMPLE_BIND to make it so. */
7692 if (gimple_code (outer_bind) == GIMPLE_BIND
7693 && gimple_seq_first (seq) == gimple_seq_last (seq))
7694 ;
7695 else
7696 outer_bind = gimple_build_bind (NULL_TREE, seq, NULL);
7697
7698 *body_p = NULL_TREE;
4744afba
RH
7699
7700 /* If we had callee-copies statements, insert them at the beginning
f0c10f0f 7701 of the function and clear DECL_VALUE_EXPR_P on the parameters. */
726a989a 7702 if (!gimple_seq_empty_p (parm_stmts))
4744afba 7703 {
f0c10f0f
RG
7704 tree parm;
7705
726a989a
RB
7706 gimplify_seq_add_seq (&parm_stmts, gimple_bind_body (outer_bind));
7707 gimple_bind_set_body (outer_bind, parm_stmts);
f0c10f0f
RG
7708
7709 for (parm = DECL_ARGUMENTS (current_function_decl);
910ad8de 7710 parm; parm = DECL_CHAIN (parm))
f0c10f0f
RG
7711 if (DECL_HAS_VALUE_EXPR_P (parm))
7712 {
7713 DECL_HAS_VALUE_EXPR_P (parm) = 0;
7714 DECL_IGNORED_P (parm) = 0;
7715 }
4744afba
RH
7716 }
7717
77f2a970
JJ
7718 if (nonlocal_vlas)
7719 {
7720 pointer_set_destroy (nonlocal_vlas);
7721 nonlocal_vlas = NULL;
7722 }
7723
726a989a 7724 pop_gimplify_context (outer_bind);
953ff289 7725 gcc_assert (gimplify_ctxp == NULL);
6de9cd9a 7726
7e98624c 7727#ifdef ENABLE_TYPES_CHECKING
1da2ed5f 7728 if (!seen_error ())
726a989a 7729 verify_types_in_gimple_seq (gimple_bind_body (outer_bind));
6de9cd9a
DN
7730#endif
7731
7732 timevar_pop (TV_TREE_GIMPLIFY);
7733 input_location = saved_location;
726a989a
RB
7734
7735 return outer_bind;
6de9cd9a
DN
7736}
7737
7738/* Entry point to the gimplification pass. FNDECL is the FUNCTION_DECL
726a989a 7739 node for the function we want to gimplify.
b8698a0f 7740
726a989a
RB
7741 Returns the sequence of GIMPLE statements corresponding to the body
7742 of FNDECL. */
6de9cd9a
DN
7743
7744void
7745gimplify_function_tree (tree fndecl)
7746{
e41d82f5 7747 tree oldfn, parm, ret;
726a989a
RB
7748 gimple_seq seq;
7749 gimple bind;
6de9cd9a 7750
a406865a
RG
7751 gcc_assert (!gimple_body (fndecl));
7752
6de9cd9a
DN
7753 oldfn = current_function_decl;
7754 current_function_decl = fndecl;
db2960f4
SL
7755 if (DECL_STRUCT_FUNCTION (fndecl))
7756 push_cfun (DECL_STRUCT_FUNCTION (fndecl));
7757 else
7758 push_struct_function (fndecl);
6de9cd9a 7759
910ad8de 7760 for (parm = DECL_ARGUMENTS (fndecl); parm ; parm = DECL_CHAIN (parm))
e41d82f5
RH
7761 {
7762 /* Preliminarily mark non-addressed complex variables as eligible
7763 for promotion to gimple registers. We'll transform their uses
7764 as we find them. */
0890b981
AP
7765 if ((TREE_CODE (TREE_TYPE (parm)) == COMPLEX_TYPE
7766 || TREE_CODE (TREE_TYPE (parm)) == VECTOR_TYPE)
e41d82f5
RH
7767 && !TREE_THIS_VOLATILE (parm)
7768 && !needs_to_live_in_memory (parm))
0890b981 7769 DECL_GIMPLE_REG_P (parm) = 1;
e41d82f5
RH
7770 }
7771
7772 ret = DECL_RESULT (fndecl);
0890b981 7773 if ((TREE_CODE (TREE_TYPE (ret)) == COMPLEX_TYPE
7b7e6ecd 7774 || TREE_CODE (TREE_TYPE (ret)) == VECTOR_TYPE)
e41d82f5 7775 && !needs_to_live_in_memory (ret))
0890b981 7776 DECL_GIMPLE_REG_P (ret) = 1;
e41d82f5 7777
726a989a
RB
7778 bind = gimplify_body (&DECL_SAVED_TREE (fndecl), fndecl, true);
7779
7780 /* The tree body of the function is no longer needed, replace it
7781 with the new GIMPLE body. */
7782 seq = gimple_seq_alloc ();
7783 gimple_seq_add_stmt (&seq, bind);
7784 gimple_set_body (fndecl, seq);
6de9cd9a
DN
7785
7786 /* If we're instrumenting function entry/exit, then prepend the call to
7787 the entry hook and wrap the whole function in a TRY_FINALLY_EXPR to
7788 catch the exit hook. */
7789 /* ??? Add some way to ignore exceptions for this TFE. */
7790 if (flag_instrument_function_entry_exit
8d5a7d1f
ILT
7791 && !DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (fndecl)
7792 && !flag_instrument_functions_exclude_p (fndecl))
6de9cd9a 7793 {
726a989a
RB
7794 tree x;
7795 gimple new_bind;
7796 gimple tf;
7797 gimple_seq cleanup = NULL, body = NULL;
6de9cd9a 7798
6de9cd9a 7799 x = implicit_built_in_decls[BUILT_IN_PROFILE_FUNC_EXIT];
726a989a
RB
7800 gimplify_seq_add_stmt (&cleanup, gimple_build_call (x, 0));
7801 tf = gimple_build_try (seq, cleanup, GIMPLE_TRY_FINALLY);
6de9cd9a 7802
6de9cd9a 7803 x = implicit_built_in_decls[BUILT_IN_PROFILE_FUNC_ENTER];
726a989a
RB
7804 gimplify_seq_add_stmt (&body, gimple_build_call (x, 0));
7805 gimplify_seq_add_stmt (&body, tf);
32001f69 7806 new_bind = gimple_build_bind (NULL, body, gimple_bind_block (bind));
726a989a
RB
7807 /* Clear the block for BIND, since it is no longer directly inside
7808 the function, but within a try block. */
32001f69 7809 gimple_bind_set_block (bind, NULL);
6de9cd9a 7810
726a989a
RB
7811 /* Replace the current function body with the body
7812 wrapped in the try/finally TF. */
7813 seq = gimple_seq_alloc ();
7814 gimple_seq_add_stmt (&seq, new_bind);
7815 gimple_set_body (fndecl, seq);
6de9cd9a
DN
7816 }
7817
726a989a 7818 DECL_SAVED_TREE (fndecl) = NULL_TREE;
a406865a 7819 cfun->curr_properties = PROP_gimple_any;
726a989a 7820
6de9cd9a 7821 current_function_decl = oldfn;
db2960f4 7822 pop_cfun ();
6de9cd9a 7823}
726a989a
RB
7824
7825
7826/* Some transformations like inlining may invalidate the GIMPLE form
7827 for operands. This function traverses all the operands in STMT and
7828 gimplifies anything that is not a valid gimple operand. Any new
7829 GIMPLE statements are inserted before *GSI_P. */
7830
7831void
7832gimple_regimplify_operands (gimple stmt, gimple_stmt_iterator *gsi_p)
7833{
7834 size_t i, num_ops;
7835 tree orig_lhs = NULL_TREE, lhs, t;
7836 gimple_seq pre = NULL;
7837 gimple post_stmt = NULL;
7838 struct gimplify_ctx gctx;
7839
7840 push_gimplify_context (&gctx);
7841 gimplify_ctxp->into_ssa = gimple_in_ssa_p (cfun);
7842
7843 switch (gimple_code (stmt))
7844 {
7845 case GIMPLE_COND:
7846 gimplify_expr (gimple_cond_lhs_ptr (stmt), &pre, NULL,
7847 is_gimple_val, fb_rvalue);
7848 gimplify_expr (gimple_cond_rhs_ptr (stmt), &pre, NULL,
7849 is_gimple_val, fb_rvalue);
7850 break;
e8789588
JJ
7851 case GIMPLE_SWITCH:
7852 gimplify_expr (gimple_switch_index_ptr (stmt), &pre, NULL,
7853 is_gimple_val, fb_rvalue);
7854 break;
726a989a
RB
7855 case GIMPLE_OMP_ATOMIC_LOAD:
7856 gimplify_expr (gimple_omp_atomic_load_rhs_ptr (stmt), &pre, NULL,
7857 is_gimple_val, fb_rvalue);
7858 break;
7859 case GIMPLE_ASM:
7860 {
7861 size_t i, noutputs = gimple_asm_noutputs (stmt);
7862 const char *constraint, **oconstraints;
7863 bool allows_mem, allows_reg, is_inout;
7864
7865 oconstraints
7866 = (const char **) alloca ((noutputs) * sizeof (const char *));
7867 for (i = 0; i < noutputs; i++)
7868 {
7869 tree op = gimple_asm_output_op (stmt, i);
7870 constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (op)));
7871 oconstraints[i] = constraint;
7872 parse_output_constraint (&constraint, i, 0, 0, &allows_mem,
7873 &allows_reg, &is_inout);
7874 gimplify_expr (&TREE_VALUE (op), &pre, NULL,
7875 is_inout ? is_gimple_min_lval : is_gimple_lvalue,
7876 fb_lvalue | fb_mayfail);
7877 }
7878 for (i = 0; i < gimple_asm_ninputs (stmt); i++)
7879 {
7880 tree op = gimple_asm_input_op (stmt, i);
7881 constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (op)));
7882 parse_input_constraint (&constraint, 0, 0, noutputs, 0,
7883 oconstraints, &allows_mem, &allows_reg);
7884 if (TREE_ADDRESSABLE (TREE_TYPE (TREE_VALUE (op))) && allows_mem)
7885 allows_reg = 0;
7886 if (!allows_reg && allows_mem)
7887 gimplify_expr (&TREE_VALUE (op), &pre, NULL,
7888 is_gimple_lvalue, fb_lvalue | fb_mayfail);
7889 else
7890 gimplify_expr (&TREE_VALUE (op), &pre, NULL,
7891 is_gimple_asm_val, fb_rvalue);
7892 }
7893 }
7894 break;
7895 default:
7896 /* NOTE: We start gimplifying operands from last to first to
7897 make sure that side-effects on the RHS of calls, assignments
7898 and ASMs are executed before the LHS. The ordering is not
7899 important for other statements. */
7900 num_ops = gimple_num_ops (stmt);
7901 orig_lhs = gimple_get_lhs (stmt);
7902 for (i = num_ops; i > 0; i--)
7903 {
7904 tree op = gimple_op (stmt, i - 1);
7905 if (op == NULL_TREE)
7906 continue;
7907 if (i == 1 && (is_gimple_call (stmt) || is_gimple_assign (stmt)))
7908 gimplify_expr (&op, &pre, NULL, is_gimple_lvalue, fb_lvalue);
7909 else if (i == 2
7910 && is_gimple_assign (stmt)
7911 && num_ops == 2
7912 && get_gimple_rhs_class (gimple_expr_code (stmt))
7913 == GIMPLE_SINGLE_RHS)
7914 gimplify_expr (&op, &pre, NULL,
7915 rhs_predicate_for (gimple_assign_lhs (stmt)),
7916 fb_rvalue);
7917 else if (i == 2 && is_gimple_call (stmt))
7918 {
7919 if (TREE_CODE (op) == FUNCTION_DECL)
7920 continue;
7921 gimplify_expr (&op, &pre, NULL, is_gimple_call_addr, fb_rvalue);
7922 }
7923 else
7924 gimplify_expr (&op, &pre, NULL, is_gimple_val, fb_rvalue);
7925 gimple_set_op (stmt, i - 1, op);
7926 }
7927
7928 lhs = gimple_get_lhs (stmt);
bdec4dc7
RG
7929 /* If the LHS changed it in a way that requires a simple RHS,
7930 create temporary. */
ba4d8f9d 7931 if (lhs && !is_gimple_reg (lhs))
726a989a
RB
7932 {
7933 bool need_temp = false;
7934
7935 if (is_gimple_assign (stmt)
7936 && num_ops == 2
7937 && get_gimple_rhs_class (gimple_expr_code (stmt))
7938 == GIMPLE_SINGLE_RHS)
7939 gimplify_expr (gimple_assign_rhs1_ptr (stmt), &pre, NULL,
7940 rhs_predicate_for (gimple_assign_lhs (stmt)),
7941 fb_rvalue);
7942 else if (is_gimple_reg (lhs))
7943 {
7944 if (is_gimple_reg_type (TREE_TYPE (lhs)))
7945 {
7946 if (is_gimple_call (stmt))
7947 {
7948 i = gimple_call_flags (stmt);
7949 if ((i & ECF_LOOPING_CONST_OR_PURE)
7950 || !(i & (ECF_CONST | ECF_PURE)))
7951 need_temp = true;
7952 }
7953 if (stmt_can_throw_internal (stmt))
7954 need_temp = true;
7955 }
7956 }
7957 else
7958 {
7959 if (is_gimple_reg_type (TREE_TYPE (lhs)))
7960 need_temp = true;
7961 else if (TYPE_MODE (TREE_TYPE (lhs)) != BLKmode)
7962 {
7963 if (is_gimple_call (stmt))
7964 {
7965 tree fndecl = gimple_call_fndecl (stmt);
7966
7967 if (!aggregate_value_p (TREE_TYPE (lhs), fndecl)
7968 && !(fndecl && DECL_RESULT (fndecl)
7969 && DECL_BY_REFERENCE (DECL_RESULT (fndecl))))
7970 need_temp = true;
7971 }
7972 else
7973 need_temp = true;
7974 }
7975 }
7976 if (need_temp)
7977 {
acd63801 7978 tree temp = create_tmp_reg (TREE_TYPE (lhs), NULL);
726a989a 7979
726a989a
RB
7980 if (TREE_CODE (orig_lhs) == SSA_NAME)
7981 orig_lhs = SSA_NAME_VAR (orig_lhs);
726a989a
RB
7982
7983 if (gimple_in_ssa_p (cfun))
7984 temp = make_ssa_name (temp, NULL);
7985 gimple_set_lhs (stmt, temp);
7986 post_stmt = gimple_build_assign (lhs, temp);
7987 if (TREE_CODE (lhs) == SSA_NAME)
7988 SSA_NAME_DEF_STMT (lhs) = post_stmt;
7989 }
7990 }
7991 break;
7992 }
7993
f93bc6f5
JJ
7994 if (gimple_referenced_vars (cfun))
7995 for (t = gimplify_ctxp->temps; t ; t = TREE_CHAIN (t))
7996 add_referenced_var (t);
7997
726a989a
RB
7998 if (!gimple_seq_empty_p (pre))
7999 {
8000 if (gimple_in_ssa_p (cfun))
8001 {
8002 gimple_stmt_iterator i;
8003
8004 for (i = gsi_start (pre); !gsi_end_p (i); gsi_next (&i))
8005 mark_symbols_for_renaming (gsi_stmt (i));
8006 }
8007 gsi_insert_seq_before (gsi_p, pre, GSI_SAME_STMT);
8008 }
8009 if (post_stmt)
8010 gsi_insert_after (gsi_p, post_stmt, GSI_NEW_STMT);
8011
726a989a
RB
8012 pop_gimplify_context (NULL);
8013}
8014
8015
8b11a64c
ZD
8016/* Expands EXPR to list of gimple statements STMTS. If SIMPLE is true,
8017 force the result to be either ssa_name or an invariant, otherwise
8018 just force it to be a rhs expression. If VAR is not NULL, make the
8019 base variable of the final destination be VAR if suitable. */
8020
8021tree
726a989a 8022force_gimple_operand (tree expr, gimple_seq *stmts, bool simple, tree var)
8b11a64c
ZD
8023{
8024 tree t;
8025 enum gimplify_status ret;
8026 gimple_predicate gimple_test_f;
d406b663 8027 struct gimplify_ctx gctx;
8b11a64c 8028
726a989a 8029 *stmts = NULL;
8b11a64c
ZD
8030
8031 if (is_gimple_val (expr))
8032 return expr;
8033
8034 gimple_test_f = simple ? is_gimple_val : is_gimple_reg_rhs;
8035
d406b663 8036 push_gimplify_context (&gctx);
5cd4ec7f 8037 gimplify_ctxp->into_ssa = gimple_in_ssa_p (cfun);
aea74440 8038 gimplify_ctxp->allow_rhs_cond_expr = true;
8b11a64c
ZD
8039
8040 if (var)
726a989a 8041 expr = build2 (MODIFY_EXPR, TREE_TYPE (var), var, expr);
8b11a64c 8042
726a989a 8043 if (TREE_CODE (expr) != MODIFY_EXPR
917948d3
ZD
8044 && TREE_TYPE (expr) == void_type_node)
8045 {
8046 gimplify_and_add (expr, stmts);
8047 expr = NULL_TREE;
8048 }
8049 else
8050 {
726a989a 8051 ret = gimplify_expr (&expr, stmts, NULL, gimple_test_f, fb_rvalue);
917948d3
ZD
8052 gcc_assert (ret != GS_ERROR);
8053 }
8b11a64c 8054
5cd4ec7f 8055 if (gimple_referenced_vars (cfun))
910ad8de 8056 for (t = gimplify_ctxp->temps; t ; t = DECL_CHAIN (t))
726a989a 8057 add_referenced_var (t);
8b11a64c
ZD
8058
8059 pop_gimplify_context (NULL);
8060
8061 return expr;
8062}
8063
9885da8e 8064/* Invokes force_gimple_operand for EXPR with parameters SIMPLE_P and VAR. If
726a989a
RB
8065 some statements are produced, emits them at GSI. If BEFORE is true.
8066 the statements are appended before GSI, otherwise they are appended after
8067 it. M specifies the way GSI moves after insertion (GSI_SAME_STMT or
8068 GSI_CONTINUE_LINKING are the usual values). */
9885da8e
ZD
8069
8070tree
726a989a 8071force_gimple_operand_gsi (gimple_stmt_iterator *gsi, tree expr,
c6540bde 8072 bool simple_p, tree var, bool before,
726a989a 8073 enum gsi_iterator_update m)
9885da8e 8074{
726a989a 8075 gimple_seq stmts;
9885da8e
ZD
8076
8077 expr = force_gimple_operand (expr, &stmts, simple_p, var);
726a989a
RB
8078
8079 if (!gimple_seq_empty_p (stmts))
c6540bde 8080 {
928bc34f
EB
8081 if (gimple_in_ssa_p (cfun))
8082 {
726a989a 8083 gimple_stmt_iterator i;
928bc34f 8084
726a989a
RB
8085 for (i = gsi_start (stmts); !gsi_end_p (i); gsi_next (&i))
8086 mark_symbols_for_renaming (gsi_stmt (i));
928bc34f
EB
8087 }
8088
c6540bde 8089 if (before)
726a989a 8090 gsi_insert_seq_before (gsi, stmts, m);
c6540bde 8091 else
726a989a 8092 gsi_insert_seq_after (gsi, stmts, m);
c6540bde 8093 }
9885da8e
ZD
8094
8095 return expr;
8096}
8097
6de9cd9a 8098#include "gt-gimplify.h"