]>
Commit | Line | Data |
---|---|---|
6de9cd9a DN |
1 | /* Tree lowering pass. This pass converts the GENERIC functions-as-trees |
2 | tree representation into the GIMPLE form. | |
23a5b65a | 3 | Copyright (C) 2002-2014 Free Software Foundation, Inc. |
6de9cd9a DN |
4 | Major work done by Sebastian Pop <s.pop@laposte.net>, |
5 | Diego Novillo <dnovillo@redhat.com> and Jason Merrill <jason@redhat.com>. | |
6 | ||
7 | This file is part of GCC. | |
8 | ||
9 | GCC is free software; you can redistribute it and/or modify it under | |
10 | the terms of the GNU General Public License as published by the Free | |
9dcd6f09 | 11 | Software Foundation; either version 3, or (at your option) any later |
6de9cd9a DN |
12 | version. |
13 | ||
14 | GCC is distributed in the hope that it will be useful, but WITHOUT ANY | |
15 | WARRANTY; without even the implied warranty of MERCHANTABILITY or | |
16 | FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License | |
17 | for more details. | |
18 | ||
19 | You should have received a copy of the GNU General Public License | |
9dcd6f09 NC |
20 | along with GCC; see the file COPYING3. If not see |
21 | <http://www.gnu.org/licenses/>. */ | |
6de9cd9a DN |
22 | |
23 | #include "config.h" | |
24 | #include "system.h" | |
25 | #include "coretypes.h" | |
6de9cd9a | 26 | #include "tree.h" |
d8a2d370 | 27 | #include "expr.h" |
6e2830c3 | 28 | #include "hash-set.h" |
2fb9a547 | 29 | #include "hash-table.h" |
60393bbc AM |
30 | #include "predict.h" |
31 | #include "vec.h" | |
32 | #include "hashtab.h" | |
33 | #include "machmode.h" | |
34 | #include "tm.h" | |
35 | #include "hard-reg-set.h" | |
36 | #include "input.h" | |
37 | #include "function.h" | |
2fb9a547 AM |
38 | #include "basic-block.h" |
39 | #include "tree-ssa-alias.h" | |
40 | #include "internal-fn.h" | |
41 | #include "gimple-fold.h" | |
42 | #include "tree-eh.h" | |
43 | #include "gimple-expr.h" | |
44 | #include "is-a.h" | |
18f429e2 | 45 | #include "gimple.h" |
45b0be94 | 46 | #include "gimplify.h" |
5be5c238 | 47 | #include "gimple-iterator.h" |
d8a2d370 DN |
48 | #include "stringpool.h" |
49 | #include "calls.h" | |
50 | #include "varasm.h" | |
51 | #include "stor-layout.h" | |
52 | #include "stmt.h" | |
53 | #include "print-tree.h" | |
726a989a | 54 | #include "tree-iterator.h" |
6de9cd9a | 55 | #include "tree-inline.h" |
cf835838 | 56 | #include "tree-pretty-print.h" |
6de9cd9a | 57 | #include "langhooks.h" |
442b4905 AM |
58 | #include "bitmap.h" |
59 | #include "gimple-ssa.h" | |
c582198b AM |
60 | #include "hash-map.h" |
61 | #include "plugin-api.h" | |
62 | #include "ipa-ref.h" | |
44de5aeb | 63 | #include "cgraph.h" |
442b4905 AM |
64 | #include "tree-cfg.h" |
65 | #include "tree-ssanames.h" | |
66 | #include "tree-ssa.h" | |
718f9c0f | 67 | #include "diagnostic-core.h" |
cd3ce9b4 | 68 | #include "target.h" |
6be42dd4 | 69 | #include "splay-tree.h" |
0645c1a2 | 70 | #include "omp-low.h" |
4484a35a | 71 | #include "gimple-low.h" |
939b37da | 72 | #include "cilk.h" |
6de9cd9a | 73 | |
7ee2468b SB |
74 | #include "langhooks-def.h" /* FIXME: for lhd_set_decl_assembler_name */ |
75 | #include "tree-pass.h" /* FIXME: only for PROP_gimple_any */ | |
9b2b7279 | 76 | #include "builtins.h" |
953ff289 DN |
77 | |
78 | enum gimplify_omp_var_data | |
79 | { | |
80 | GOVD_SEEN = 1, | |
81 | GOVD_EXPLICIT = 2, | |
82 | GOVD_SHARED = 4, | |
83 | GOVD_PRIVATE = 8, | |
84 | GOVD_FIRSTPRIVATE = 16, | |
85 | GOVD_LASTPRIVATE = 32, | |
86 | GOVD_REDUCTION = 64, | |
87 | GOVD_LOCAL = 128, | |
acf0174b JJ |
88 | GOVD_MAP = 256, |
89 | GOVD_DEBUG_PRIVATE = 512, | |
90 | GOVD_PRIVATE_OUTER_REF = 1024, | |
74bf76ed | 91 | GOVD_LINEAR = 2048, |
acf0174b JJ |
92 | GOVD_ALIGNED = 4096, |
93 | GOVD_MAP_TO_ONLY = 8192, | |
953ff289 | 94 | GOVD_DATA_SHARE_CLASS = (GOVD_SHARED | GOVD_PRIVATE | GOVD_FIRSTPRIVATE |
74bf76ed JJ |
95 | | GOVD_LASTPRIVATE | GOVD_REDUCTION | GOVD_LINEAR |
96 | | GOVD_LOCAL) | |
953ff289 DN |
97 | }; |
98 | ||
726a989a | 99 | |
a68ab351 JJ |
100 | enum omp_region_type |
101 | { | |
102 | ORT_WORKSHARE = 0, | |
74bf76ed | 103 | ORT_SIMD = 1, |
a68ab351 | 104 | ORT_PARALLEL = 2, |
f22f4340 JJ |
105 | ORT_COMBINED_PARALLEL = 3, |
106 | ORT_TASK = 4, | |
acf0174b JJ |
107 | ORT_UNTIED_TASK = 5, |
108 | ORT_TEAMS = 8, | |
109 | ORT_TARGET_DATA = 16, | |
110 | ORT_TARGET = 32 | |
a68ab351 JJ |
111 | }; |
112 | ||
45852dcc AM |
113 | /* Gimplify hashtable helper. */ |
114 | ||
115 | struct gimplify_hasher : typed_free_remove <elt_t> | |
116 | { | |
117 | typedef elt_t value_type; | |
118 | typedef elt_t compare_type; | |
119 | static inline hashval_t hash (const value_type *); | |
120 | static inline bool equal (const value_type *, const compare_type *); | |
121 | }; | |
122 | ||
123 | struct gimplify_ctx | |
124 | { | |
125 | struct gimplify_ctx *prev_context; | |
126 | ||
127 | vec<gimple> bind_expr_stack; | |
128 | tree temps; | |
129 | gimple_seq conditional_cleanups; | |
130 | tree exit_label; | |
131 | tree return_temp; | |
132 | ||
133 | vec<tree> case_labels; | |
134 | /* The formal temporary table. Should this be persistent? */ | |
c203e8a7 | 135 | hash_table<gimplify_hasher> *temp_htab; |
45852dcc AM |
136 | |
137 | int conditions; | |
138 | bool save_stack; | |
139 | bool into_ssa; | |
140 | bool allow_rhs_cond_expr; | |
141 | bool in_cleanup_point_expr; | |
142 | }; | |
143 | ||
953ff289 | 144 | struct gimplify_omp_ctx |
6de9cd9a | 145 | { |
953ff289 DN |
146 | struct gimplify_omp_ctx *outer_context; |
147 | splay_tree variables; | |
6e2830c3 | 148 | hash_set<tree> *privatized_types; |
953ff289 DN |
149 | location_t location; |
150 | enum omp_clause_default_kind default_kind; | |
a68ab351 | 151 | enum omp_region_type region_type; |
acf0174b | 152 | bool combined_loop; |
9cf32741 | 153 | bool distribute; |
953ff289 DN |
154 | }; |
155 | ||
45852dcc | 156 | static struct gimplify_ctx *gimplify_ctxp; |
953ff289 DN |
157 | static struct gimplify_omp_ctx *gimplify_omp_ctxp; |
158 | ||
ad19c4be | 159 | /* Forward declaration. */ |
726a989a | 160 | static enum gimplify_status gimplify_compound_expr (tree *, gimple_seq *, bool); |
eb6127a4 | 161 | |
a1a6c5b2 JJ |
162 | /* Shorter alias name for the above function for use in gimplify.c |
163 | only. */ | |
164 | ||
165 | static inline void | |
166 | gimplify_seq_add_stmt (gimple_seq *seq_p, gimple gs) | |
167 | { | |
168 | gimple_seq_add_stmt_without_update (seq_p, gs); | |
169 | } | |
170 | ||
726a989a RB |
171 | /* Append sequence SRC to the end of sequence *DST_P. If *DST_P is |
172 | NULL, a new sequence is allocated. This function is | |
173 | similar to gimple_seq_add_seq, but does not scan the operands. | |
174 | During gimplification, we need to manipulate statement sequences | |
175 | before the def/use vectors have been constructed. */ | |
176 | ||
177 | static void | |
178 | gimplify_seq_add_seq (gimple_seq *dst_p, gimple_seq src) | |
179 | { | |
180 | gimple_stmt_iterator si; | |
181 | ||
182 | if (src == NULL) | |
183 | return; | |
184 | ||
726a989a RB |
185 | si = gsi_last (*dst_p); |
186 | gsi_insert_seq_after_without_update (&si, src, GSI_NEW_STMT); | |
187 | } | |
188 | ||
45852dcc AM |
189 | |
190 | /* Pointer to a list of allocated gimplify_ctx structs to be used for pushing | |
191 | and popping gimplify contexts. */ | |
192 | ||
193 | static struct gimplify_ctx *ctx_pool = NULL; | |
194 | ||
195 | /* Return a gimplify context struct from the pool. */ | |
196 | ||
197 | static inline struct gimplify_ctx * | |
198 | ctx_alloc (void) | |
199 | { | |
200 | struct gimplify_ctx * c = ctx_pool; | |
201 | ||
202 | if (c) | |
203 | ctx_pool = c->prev_context; | |
204 | else | |
205 | c = XNEW (struct gimplify_ctx); | |
206 | ||
207 | memset (c, '\0', sizeof (*c)); | |
208 | return c; | |
209 | } | |
210 | ||
211 | /* Put gimplify context C back into the pool. */ | |
212 | ||
213 | static inline void | |
214 | ctx_free (struct gimplify_ctx *c) | |
215 | { | |
216 | c->prev_context = ctx_pool; | |
217 | ctx_pool = c; | |
218 | } | |
219 | ||
220 | /* Free allocated ctx stack memory. */ | |
221 | ||
222 | void | |
223 | free_gimplify_stack (void) | |
224 | { | |
225 | struct gimplify_ctx *c; | |
226 | ||
227 | while ((c = ctx_pool)) | |
228 | { | |
229 | ctx_pool = c->prev_context; | |
230 | free (c); | |
231 | } | |
232 | } | |
233 | ||
234 | ||
6de9cd9a DN |
235 | /* Set up a context for the gimplifier. */ |
236 | ||
237 | void | |
45852dcc | 238 | push_gimplify_context (bool in_ssa, bool rhs_cond_ok) |
6de9cd9a | 239 | { |
45852dcc AM |
240 | struct gimplify_ctx *c = ctx_alloc (); |
241 | ||
953ff289 | 242 | c->prev_context = gimplify_ctxp; |
953ff289 | 243 | gimplify_ctxp = c; |
45852dcc AM |
244 | gimplify_ctxp->into_ssa = in_ssa; |
245 | gimplify_ctxp->allow_rhs_cond_expr = rhs_cond_ok; | |
6de9cd9a DN |
246 | } |
247 | ||
248 | /* Tear down a context for the gimplifier. If BODY is non-null, then | |
249 | put the temporaries into the outer BIND_EXPR. Otherwise, put them | |
726a989a RB |
250 | in the local_decls. |
251 | ||
252 | BODY is not a sequence, but the first tuple in a sequence. */ | |
6de9cd9a DN |
253 | |
254 | void | |
726a989a | 255 | pop_gimplify_context (gimple body) |
6de9cd9a | 256 | { |
953ff289 | 257 | struct gimplify_ctx *c = gimplify_ctxp; |
17ad5b5e | 258 | |
9771b263 DN |
259 | gcc_assert (c |
260 | && (!c->bind_expr_stack.exists () | |
261 | || c->bind_expr_stack.is_empty ())); | |
262 | c->bind_expr_stack.release (); | |
953ff289 | 263 | gimplify_ctxp = c->prev_context; |
6de9cd9a DN |
264 | |
265 | if (body) | |
5123ad09 | 266 | declare_vars (c->temps, body, false); |
6de9cd9a | 267 | else |
953ff289 | 268 | record_vars (c->temps); |
6de9cd9a | 269 | |
c203e8a7 TS |
270 | delete c->temp_htab; |
271 | c->temp_htab = NULL; | |
45852dcc | 272 | ctx_free (c); |
6de9cd9a DN |
273 | } |
274 | ||
ad19c4be EB |
275 | /* Push a GIMPLE_BIND tuple onto the stack of bindings. */ |
276 | ||
c24b7de9 | 277 | static void |
726a989a | 278 | gimple_push_bind_expr (gimple gimple_bind) |
6de9cd9a | 279 | { |
9771b263 DN |
280 | gimplify_ctxp->bind_expr_stack.reserve (8); |
281 | gimplify_ctxp->bind_expr_stack.safe_push (gimple_bind); | |
6de9cd9a DN |
282 | } |
283 | ||
ad19c4be EB |
284 | /* Pop the first element off the stack of bindings. */ |
285 | ||
c24b7de9 | 286 | static void |
6de9cd9a DN |
287 | gimple_pop_bind_expr (void) |
288 | { | |
9771b263 | 289 | gimplify_ctxp->bind_expr_stack.pop (); |
6de9cd9a DN |
290 | } |
291 | ||
ad19c4be EB |
292 | /* Return the first element of the stack of bindings. */ |
293 | ||
726a989a | 294 | gimple |
6de9cd9a DN |
295 | gimple_current_bind_expr (void) |
296 | { | |
9771b263 | 297 | return gimplify_ctxp->bind_expr_stack.last (); |
726a989a RB |
298 | } |
299 | ||
ad19c4be | 300 | /* Return the stack of bindings created during gimplification. */ |
726a989a | 301 | |
9771b263 | 302 | vec<gimple> |
726a989a RB |
303 | gimple_bind_expr_stack (void) |
304 | { | |
305 | return gimplify_ctxp->bind_expr_stack; | |
6de9cd9a DN |
306 | } |
307 | ||
ad19c4be | 308 | /* Return true iff there is a COND_EXPR between us and the innermost |
6de9cd9a DN |
309 | CLEANUP_POINT_EXPR. This info is used by gimple_push_cleanup. */ |
310 | ||
311 | static bool | |
312 | gimple_conditional_context (void) | |
313 | { | |
314 | return gimplify_ctxp->conditions > 0; | |
315 | } | |
316 | ||
317 | /* Note that we've entered a COND_EXPR. */ | |
318 | ||
319 | static void | |
320 | gimple_push_condition (void) | |
321 | { | |
726a989a | 322 | #ifdef ENABLE_GIMPLE_CHECKING |
d775bc45 | 323 | if (gimplify_ctxp->conditions == 0) |
726a989a | 324 | gcc_assert (gimple_seq_empty_p (gimplify_ctxp->conditional_cleanups)); |
d775bc45 | 325 | #endif |
6de9cd9a DN |
326 | ++(gimplify_ctxp->conditions); |
327 | } | |
328 | ||
329 | /* Note that we've left a COND_EXPR. If we're back at unconditional scope | |
330 | now, add any conditional cleanups we've seen to the prequeue. */ | |
331 | ||
332 | static void | |
726a989a | 333 | gimple_pop_condition (gimple_seq *pre_p) |
6de9cd9a DN |
334 | { |
335 | int conds = --(gimplify_ctxp->conditions); | |
aa4a53af | 336 | |
282899df | 337 | gcc_assert (conds >= 0); |
6de9cd9a DN |
338 | if (conds == 0) |
339 | { | |
726a989a RB |
340 | gimplify_seq_add_seq (pre_p, gimplify_ctxp->conditional_cleanups); |
341 | gimplify_ctxp->conditional_cleanups = NULL; | |
6de9cd9a | 342 | } |
6de9cd9a DN |
343 | } |
344 | ||
953ff289 DN |
345 | /* A stable comparison routine for use with splay trees and DECLs. */ |
346 | ||
347 | static int | |
348 | splay_tree_compare_decl_uid (splay_tree_key xa, splay_tree_key xb) | |
349 | { | |
350 | tree a = (tree) xa; | |
351 | tree b = (tree) xb; | |
352 | ||
353 | return DECL_UID (a) - DECL_UID (b); | |
354 | } | |
355 | ||
356 | /* Create a new omp construct that deals with variable remapping. */ | |
357 | ||
358 | static struct gimplify_omp_ctx * | |
a68ab351 | 359 | new_omp_context (enum omp_region_type region_type) |
953ff289 DN |
360 | { |
361 | struct gimplify_omp_ctx *c; | |
362 | ||
363 | c = XCNEW (struct gimplify_omp_ctx); | |
364 | c->outer_context = gimplify_omp_ctxp; | |
365 | c->variables = splay_tree_new (splay_tree_compare_decl_uid, 0, 0); | |
6e2830c3 | 366 | c->privatized_types = new hash_set<tree>; |
953ff289 | 367 | c->location = input_location; |
a68ab351 | 368 | c->region_type = region_type; |
f22f4340 | 369 | if ((region_type & ORT_TASK) == 0) |
a68ab351 JJ |
370 | c->default_kind = OMP_CLAUSE_DEFAULT_SHARED; |
371 | else | |
372 | c->default_kind = OMP_CLAUSE_DEFAULT_UNSPECIFIED; | |
953ff289 DN |
373 | |
374 | return c; | |
375 | } | |
376 | ||
377 | /* Destroy an omp construct that deals with variable remapping. */ | |
378 | ||
379 | static void | |
380 | delete_omp_context (struct gimplify_omp_ctx *c) | |
381 | { | |
382 | splay_tree_delete (c->variables); | |
6e2830c3 | 383 | delete c->privatized_types; |
953ff289 DN |
384 | XDELETE (c); |
385 | } | |
386 | ||
387 | static void omp_add_variable (struct gimplify_omp_ctx *, tree, unsigned int); | |
388 | static bool omp_notice_variable (struct gimplify_omp_ctx *, tree, bool); | |
389 | ||
726a989a RB |
390 | /* Both gimplify the statement T and append it to *SEQ_P. This function |
391 | behaves exactly as gimplify_stmt, but you don't have to pass T as a | |
392 | reference. */ | |
cd3ce9b4 JM |
393 | |
394 | void | |
726a989a RB |
395 | gimplify_and_add (tree t, gimple_seq *seq_p) |
396 | { | |
397 | gimplify_stmt (&t, seq_p); | |
398 | } | |
399 | ||
400 | /* Gimplify statement T into sequence *SEQ_P, and return the first | |
401 | tuple in the sequence of generated tuples for this statement. | |
402 | Return NULL if gimplifying T produced no tuples. */ | |
403 | ||
404 | static gimple | |
405 | gimplify_and_return_first (tree t, gimple_seq *seq_p) | |
cd3ce9b4 | 406 | { |
726a989a RB |
407 | gimple_stmt_iterator last = gsi_last (*seq_p); |
408 | ||
409 | gimplify_and_add (t, seq_p); | |
410 | ||
411 | if (!gsi_end_p (last)) | |
412 | { | |
413 | gsi_next (&last); | |
414 | return gsi_stmt (last); | |
415 | } | |
416 | else | |
417 | return gimple_seq_first_stmt (*seq_p); | |
cd3ce9b4 JM |
418 | } |
419 | ||
216820a4 RG |
420 | /* Returns true iff T is a valid RHS for an assignment to an un-renamed |
421 | LHS, or for a call argument. */ | |
422 | ||
423 | static bool | |
424 | is_gimple_mem_rhs (tree t) | |
425 | { | |
426 | /* If we're dealing with a renamable type, either source or dest must be | |
427 | a renamed variable. */ | |
428 | if (is_gimple_reg_type (TREE_TYPE (t))) | |
429 | return is_gimple_val (t); | |
430 | else | |
431 | return is_gimple_val (t) || is_gimple_lvalue (t); | |
432 | } | |
433 | ||
726a989a | 434 | /* Return true if T is a CALL_EXPR or an expression that can be |
12947319 | 435 | assigned to a temporary. Note that this predicate should only be |
726a989a RB |
436 | used during gimplification. See the rationale for this in |
437 | gimplify_modify_expr. */ | |
438 | ||
439 | static bool | |
ba4d8f9d | 440 | is_gimple_reg_rhs_or_call (tree t) |
726a989a | 441 | { |
ba4d8f9d RG |
442 | return (get_gimple_rhs_class (TREE_CODE (t)) != GIMPLE_INVALID_RHS |
443 | || TREE_CODE (t) == CALL_EXPR); | |
726a989a RB |
444 | } |
445 | ||
446 | /* Return true if T is a valid memory RHS or a CALL_EXPR. Note that | |
447 | this predicate should only be used during gimplification. See the | |
448 | rationale for this in gimplify_modify_expr. */ | |
449 | ||
450 | static bool | |
ba4d8f9d | 451 | is_gimple_mem_rhs_or_call (tree t) |
726a989a RB |
452 | { |
453 | /* If we're dealing with a renamable type, either source or dest must be | |
050bbfeb RG |
454 | a renamed variable. */ |
455 | if (is_gimple_reg_type (TREE_TYPE (t))) | |
726a989a RB |
456 | return is_gimple_val (t); |
457 | else | |
ba4d8f9d RG |
458 | return (is_gimple_val (t) || is_gimple_lvalue (t) |
459 | || TREE_CODE (t) == CALL_EXPR); | |
726a989a RB |
460 | } |
461 | ||
2ad728d2 RG |
462 | /* Create a temporary with a name derived from VAL. Subroutine of |
463 | lookup_tmp_var; nobody else should call this function. */ | |
464 | ||
465 | static inline tree | |
947ca6a0 | 466 | create_tmp_from_val (tree val) |
2ad728d2 RG |
467 | { |
468 | /* Drop all qualifiers and address-space information from the value type. */ | |
469 | tree type = TYPE_MAIN_VARIANT (TREE_TYPE (val)); | |
470 | tree var = create_tmp_var (type, get_name (val)); | |
947ca6a0 RB |
471 | if (TREE_CODE (TREE_TYPE (var)) == COMPLEX_TYPE |
472 | || TREE_CODE (TREE_TYPE (var)) == VECTOR_TYPE) | |
2ad728d2 RG |
473 | DECL_GIMPLE_REG_P (var) = 1; |
474 | return var; | |
475 | } | |
476 | ||
477 | /* Create a temporary to hold the value of VAL. If IS_FORMAL, try to reuse | |
478 | an existing expression temporary. */ | |
479 | ||
480 | static tree | |
481 | lookup_tmp_var (tree val, bool is_formal) | |
482 | { | |
483 | tree ret; | |
484 | ||
485 | /* If not optimizing, never really reuse a temporary. local-alloc | |
486 | won't allocate any variable that is used in more than one basic | |
487 | block, which means it will go into memory, causing much extra | |
488 | work in reload and final and poorer code generation, outweighing | |
489 | the extra memory allocation here. */ | |
490 | if (!optimize || !is_formal || TREE_SIDE_EFFECTS (val)) | |
947ca6a0 | 491 | ret = create_tmp_from_val (val); |
2ad728d2 RG |
492 | else |
493 | { | |
494 | elt_t elt, *elt_p; | |
4a8fb1a1 | 495 | elt_t **slot; |
2ad728d2 RG |
496 | |
497 | elt.val = val; | |
c203e8a7 TS |
498 | if (!gimplify_ctxp->temp_htab) |
499 | gimplify_ctxp->temp_htab = new hash_table<gimplify_hasher> (1000); | |
500 | slot = gimplify_ctxp->temp_htab->find_slot (&elt, INSERT); | |
2ad728d2 RG |
501 | if (*slot == NULL) |
502 | { | |
503 | elt_p = XNEW (elt_t); | |
504 | elt_p->val = val; | |
947ca6a0 | 505 | elt_p->temp = ret = create_tmp_from_val (val); |
4a8fb1a1 | 506 | *slot = elt_p; |
2ad728d2 RG |
507 | } |
508 | else | |
509 | { | |
4a8fb1a1 | 510 | elt_p = *slot; |
2ad728d2 RG |
511 | ret = elt_p->temp; |
512 | } | |
513 | } | |
514 | ||
515 | return ret; | |
516 | } | |
517 | ||
ba4d8f9d | 518 | /* Helper for get_formal_tmp_var and get_initialized_tmp_var. */ |
6de9cd9a DN |
519 | |
520 | static tree | |
726a989a RB |
521 | internal_get_tmp_var (tree val, gimple_seq *pre_p, gimple_seq *post_p, |
522 | bool is_formal) | |
6de9cd9a DN |
523 | { |
524 | tree t, mod; | |
6de9cd9a | 525 | |
726a989a RB |
526 | /* Notice that we explicitly allow VAL to be a CALL_EXPR so that we |
527 | can create an INIT_EXPR and convert it into a GIMPLE_CALL below. */ | |
ba4d8f9d | 528 | gimplify_expr (&val, pre_p, post_p, is_gimple_reg_rhs_or_call, |
726a989a | 529 | fb_rvalue); |
6de9cd9a | 530 | |
2ad728d2 RG |
531 | if (gimplify_ctxp->into_ssa |
532 | && is_gimple_reg_type (TREE_TYPE (val))) | |
533 | t = make_ssa_name (TYPE_MAIN_VARIANT (TREE_TYPE (val)), NULL); | |
534 | else | |
535 | t = lookup_tmp_var (val, is_formal); | |
e41d82f5 | 536 | |
2e929cf3 | 537 | mod = build2 (INIT_EXPR, TREE_TYPE (t), t, unshare_expr (val)); |
6de9cd9a | 538 | |
8400e75e | 539 | SET_EXPR_LOCATION (mod, EXPR_LOC_OR_LOC (val, input_location)); |
6de9cd9a | 540 | |
fff34d35 RK |
541 | /* gimplify_modify_expr might want to reduce this further. */ |
542 | gimplify_and_add (mod, pre_p); | |
726a989a | 543 | ggc_free (mod); |
8b11a64c | 544 | |
6de9cd9a DN |
545 | return t; |
546 | } | |
547 | ||
ad19c4be | 548 | /* Return a formal temporary variable initialized with VAL. PRE_P is as |
ba4d8f9d RG |
549 | in gimplify_expr. Only use this function if: |
550 | ||
551 | 1) The value of the unfactored expression represented by VAL will not | |
552 | change between the initialization and use of the temporary, and | |
553 | 2) The temporary will not be otherwise modified. | |
554 | ||
555 | For instance, #1 means that this is inappropriate for SAVE_EXPR temps, | |
556 | and #2 means it is inappropriate for && temps. | |
557 | ||
558 | For other cases, use get_initialized_tmp_var instead. */ | |
50674e96 | 559 | |
6de9cd9a | 560 | tree |
726a989a | 561 | get_formal_tmp_var (tree val, gimple_seq *pre_p) |
6de9cd9a DN |
562 | { |
563 | return internal_get_tmp_var (val, pre_p, NULL, true); | |
564 | } | |
565 | ||
ad19c4be | 566 | /* Return a temporary variable initialized with VAL. PRE_P and POST_P |
6de9cd9a DN |
567 | are as in gimplify_expr. */ |
568 | ||
569 | tree | |
726a989a | 570 | get_initialized_tmp_var (tree val, gimple_seq *pre_p, gimple_seq *post_p) |
6de9cd9a DN |
571 | { |
572 | return internal_get_tmp_var (val, pre_p, post_p, false); | |
573 | } | |
574 | ||
ad19c4be EB |
575 | /* Declare all the variables in VARS in SCOPE. If DEBUG_INFO is true, |
576 | generate debug info for them; otherwise don't. */ | |
6de9cd9a DN |
577 | |
578 | void | |
726a989a | 579 | declare_vars (tree vars, gimple scope, bool debug_info) |
6de9cd9a DN |
580 | { |
581 | tree last = vars; | |
582 | if (last) | |
583 | { | |
5123ad09 | 584 | tree temps, block; |
6de9cd9a | 585 | |
726a989a | 586 | gcc_assert (gimple_code (scope) == GIMPLE_BIND); |
6de9cd9a DN |
587 | |
588 | temps = nreverse (last); | |
5123ad09 | 589 | |
524d9a45 | 590 | block = gimple_bind_block (scope); |
726a989a | 591 | gcc_assert (!block || TREE_CODE (block) == BLOCK); |
5123ad09 EB |
592 | if (!block || !debug_info) |
593 | { | |
910ad8de | 594 | DECL_CHAIN (last) = gimple_bind_vars (scope); |
726a989a | 595 | gimple_bind_set_vars (scope, temps); |
5123ad09 EB |
596 | } |
597 | else | |
598 | { | |
599 | /* We need to attach the nodes both to the BIND_EXPR and to its | |
600 | associated BLOCK for debugging purposes. The key point here | |
601 | is that the BLOCK_VARS of the BIND_EXPR_BLOCK of a BIND_EXPR | |
602 | is a subchain of the BIND_EXPR_VARS of the BIND_EXPR. */ | |
603 | if (BLOCK_VARS (block)) | |
604 | BLOCK_VARS (block) = chainon (BLOCK_VARS (block), temps); | |
605 | else | |
606 | { | |
726a989a RB |
607 | gimple_bind_set_vars (scope, |
608 | chainon (gimple_bind_vars (scope), temps)); | |
5123ad09 EB |
609 | BLOCK_VARS (block) = temps; |
610 | } | |
611 | } | |
6de9cd9a DN |
612 | } |
613 | } | |
614 | ||
a441447f OH |
615 | /* For VAR a VAR_DECL of variable size, try to find a constant upper bound |
616 | for the size and adjust DECL_SIZE/DECL_SIZE_UNIT accordingly. Abort if | |
617 | no such upper bound can be obtained. */ | |
618 | ||
619 | static void | |
620 | force_constant_size (tree var) | |
621 | { | |
622 | /* The only attempt we make is by querying the maximum size of objects | |
623 | of the variable's type. */ | |
624 | ||
625 | HOST_WIDE_INT max_size; | |
626 | ||
627 | gcc_assert (TREE_CODE (var) == VAR_DECL); | |
628 | ||
629 | max_size = max_int_size_in_bytes (TREE_TYPE (var)); | |
630 | ||
631 | gcc_assert (max_size >= 0); | |
632 | ||
633 | DECL_SIZE_UNIT (var) | |
634 | = build_int_cst (TREE_TYPE (DECL_SIZE_UNIT (var)), max_size); | |
635 | DECL_SIZE (var) | |
636 | = build_int_cst (TREE_TYPE (DECL_SIZE (var)), max_size * BITS_PER_UNIT); | |
637 | } | |
638 | ||
ad19c4be EB |
639 | /* Push the temporary variable TMP into the current binding. */ |
640 | ||
45b62594 RB |
641 | void |
642 | gimple_add_tmp_var_fn (struct function *fn, tree tmp) | |
643 | { | |
644 | gcc_assert (!DECL_CHAIN (tmp) && !DECL_SEEN_IN_BIND_EXPR_P (tmp)); | |
645 | ||
646 | /* Later processing assumes that the object size is constant, which might | |
647 | not be true at this point. Force the use of a constant upper bound in | |
648 | this case. */ | |
649 | if (!tree_fits_uhwi_p (DECL_SIZE_UNIT (tmp))) | |
650 | force_constant_size (tmp); | |
651 | ||
652 | DECL_CONTEXT (tmp) = fn->decl; | |
653 | DECL_SEEN_IN_BIND_EXPR_P (tmp) = 1; | |
654 | ||
655 | record_vars_into (tmp, fn->decl); | |
656 | } | |
657 | ||
658 | /* Push the temporary variable TMP into the current binding. */ | |
659 | ||
6de9cd9a DN |
660 | void |
661 | gimple_add_tmp_var (tree tmp) | |
662 | { | |
910ad8de | 663 | gcc_assert (!DECL_CHAIN (tmp) && !DECL_SEEN_IN_BIND_EXPR_P (tmp)); |
6de9cd9a | 664 | |
a441447f OH |
665 | /* Later processing assumes that the object size is constant, which might |
666 | not be true at this point. Force the use of a constant upper bound in | |
667 | this case. */ | |
cc269bb6 | 668 | if (!tree_fits_uhwi_p (DECL_SIZE_UNIT (tmp))) |
a441447f OH |
669 | force_constant_size (tmp); |
670 | ||
6de9cd9a | 671 | DECL_CONTEXT (tmp) = current_function_decl; |
48eb4e53 | 672 | DECL_SEEN_IN_BIND_EXPR_P (tmp) = 1; |
6de9cd9a DN |
673 | |
674 | if (gimplify_ctxp) | |
675 | { | |
910ad8de | 676 | DECL_CHAIN (tmp) = gimplify_ctxp->temps; |
6de9cd9a | 677 | gimplify_ctxp->temps = tmp; |
953ff289 DN |
678 | |
679 | /* Mark temporaries local within the nearest enclosing parallel. */ | |
680 | if (gimplify_omp_ctxp) | |
681 | { | |
682 | struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp; | |
74bf76ed JJ |
683 | while (ctx |
684 | && (ctx->region_type == ORT_WORKSHARE | |
685 | || ctx->region_type == ORT_SIMD)) | |
953ff289 DN |
686 | ctx = ctx->outer_context; |
687 | if (ctx) | |
688 | omp_add_variable (ctx, tmp, GOVD_LOCAL | GOVD_SEEN); | |
689 | } | |
6de9cd9a DN |
690 | } |
691 | else if (cfun) | |
692 | record_vars (tmp); | |
693 | else | |
726a989a RB |
694 | { |
695 | gimple_seq body_seq; | |
696 | ||
697 | /* This case is for nested functions. We need to expose the locals | |
698 | they create. */ | |
699 | body_seq = gimple_body (current_function_decl); | |
700 | declare_vars (tmp, gimple_seq_first_stmt (body_seq), false); | |
701 | } | |
702 | } | |
703 | ||
726a989a | 704 | |
616f1431 EB |
705 | \f |
706 | /* This page contains routines to unshare tree nodes, i.e. to duplicate tree | |
707 | nodes that are referenced more than once in GENERIC functions. This is | |
708 | necessary because gimplification (translation into GIMPLE) is performed | |
709 | by modifying tree nodes in-place, so gimplication of a shared node in a | |
710 | first context could generate an invalid GIMPLE form in a second context. | |
711 | ||
712 | This is achieved with a simple mark/copy/unmark algorithm that walks the | |
713 | GENERIC representation top-down, marks nodes with TREE_VISITED the first | |
714 | time it encounters them, duplicates them if they already have TREE_VISITED | |
715 | set, and finally removes the TREE_VISITED marks it has set. | |
716 | ||
717 | The algorithm works only at the function level, i.e. it generates a GENERIC | |
718 | representation of a function with no nodes shared within the function when | |
719 | passed a GENERIC function (except for nodes that are allowed to be shared). | |
720 | ||
721 | At the global level, it is also necessary to unshare tree nodes that are | |
722 | referenced in more than one function, for the same aforementioned reason. | |
723 | This requires some cooperation from the front-end. There are 2 strategies: | |
724 | ||
725 | 1. Manual unsharing. The front-end needs to call unshare_expr on every | |
726 | expression that might end up being shared across functions. | |
727 | ||
728 | 2. Deep unsharing. This is an extension of regular unsharing. Instead | |
729 | of calling unshare_expr on expressions that might be shared across | |
730 | functions, the front-end pre-marks them with TREE_VISITED. This will | |
731 | ensure that they are unshared on the first reference within functions | |
732 | when the regular unsharing algorithm runs. The counterpart is that | |
733 | this algorithm must look deeper than for manual unsharing, which is | |
734 | specified by LANG_HOOKS_DEEP_UNSHARING. | |
735 | ||
736 | If there are only few specific cases of node sharing across functions, it is | |
737 | probably easier for a front-end to unshare the expressions manually. On the | |
738 | contrary, if the expressions generated at the global level are as widespread | |
739 | as expressions generated within functions, deep unsharing is very likely the | |
740 | way to go. */ | |
741 | ||
742 | /* Similar to copy_tree_r but do not copy SAVE_EXPR or TARGET_EXPR nodes. | |
3ad065ef EB |
743 | These nodes model computations that must be done once. If we were to |
744 | unshare something like SAVE_EXPR(i++), the gimplification process would | |
745 | create wrong code. However, if DATA is non-null, it must hold a pointer | |
746 | set that is used to unshare the subtrees of these nodes. */ | |
6de9cd9a DN |
747 | |
748 | static tree | |
749 | mostly_copy_tree_r (tree *tp, int *walk_subtrees, void *data) | |
750 | { | |
616f1431 EB |
751 | tree t = *tp; |
752 | enum tree_code code = TREE_CODE (t); | |
753 | ||
6687b740 EB |
754 | /* Do not copy SAVE_EXPR, TARGET_EXPR or BIND_EXPR nodes themselves, but |
755 | copy their subtrees if we can make sure to do it only once. */ | |
756 | if (code == SAVE_EXPR || code == TARGET_EXPR || code == BIND_EXPR) | |
616f1431 | 757 | { |
6e2830c3 | 758 | if (data && !((hash_set<tree> *)data)->add (t)) |
616f1431 EB |
759 | ; |
760 | else | |
761 | *walk_subtrees = 0; | |
762 | } | |
763 | ||
764 | /* Stop at types, decls, constants like copy_tree_r. */ | |
765 | else if (TREE_CODE_CLASS (code) == tcc_type | |
766 | || TREE_CODE_CLASS (code) == tcc_declaration | |
767 | || TREE_CODE_CLASS (code) == tcc_constant | |
768 | /* We can't do anything sensible with a BLOCK used as an | |
769 | expression, but we also can't just die when we see it | |
770 | because of non-expression uses. So we avert our eyes | |
771 | and cross our fingers. Silly Java. */ | |
772 | || code == BLOCK) | |
6de9cd9a | 773 | *walk_subtrees = 0; |
616f1431 EB |
774 | |
775 | /* Cope with the statement expression extension. */ | |
776 | else if (code == STATEMENT_LIST) | |
777 | ; | |
778 | ||
779 | /* Leave the bulk of the work to copy_tree_r itself. */ | |
6de9cd9a | 780 | else |
6687b740 | 781 | copy_tree_r (tp, walk_subtrees, NULL); |
6de9cd9a DN |
782 | |
783 | return NULL_TREE; | |
784 | } | |
785 | ||
3ad065ef EB |
786 | /* Callback for walk_tree to unshare most of the shared trees rooted at *TP. |
787 | If *TP has been visited already, then *TP is deeply copied by calling | |
788 | mostly_copy_tree_r. DATA is passed to mostly_copy_tree_r unmodified. */ | |
6de9cd9a DN |
789 | |
790 | static tree | |
616f1431 | 791 | copy_if_shared_r (tree *tp, int *walk_subtrees, void *data) |
6de9cd9a | 792 | { |
f0638e1d RH |
793 | tree t = *tp; |
794 | enum tree_code code = TREE_CODE (t); | |
795 | ||
44de5aeb RK |
796 | /* Skip types, decls, and constants. But we do want to look at their |
797 | types and the bounds of types. Mark them as visited so we properly | |
798 | unmark their subtrees on the unmark pass. If we've already seen them, | |
799 | don't look down further. */ | |
6615c446 JO |
800 | if (TREE_CODE_CLASS (code) == tcc_type |
801 | || TREE_CODE_CLASS (code) == tcc_declaration | |
802 | || TREE_CODE_CLASS (code) == tcc_constant) | |
44de5aeb RK |
803 | { |
804 | if (TREE_VISITED (t)) | |
805 | *walk_subtrees = 0; | |
806 | else | |
807 | TREE_VISITED (t) = 1; | |
808 | } | |
f0638e1d | 809 | |
6de9cd9a DN |
810 | /* If this node has been visited already, unshare it and don't look |
811 | any deeper. */ | |
f0638e1d | 812 | else if (TREE_VISITED (t)) |
6de9cd9a | 813 | { |
616f1431 | 814 | walk_tree (tp, mostly_copy_tree_r, data, NULL); |
6de9cd9a DN |
815 | *walk_subtrees = 0; |
816 | } | |
f0638e1d | 817 | |
616f1431 | 818 | /* Otherwise, mark the node as visited and keep looking. */ |
6de9cd9a | 819 | else |
77c9db77 | 820 | TREE_VISITED (t) = 1; |
f0638e1d | 821 | |
6de9cd9a DN |
822 | return NULL_TREE; |
823 | } | |
824 | ||
3ad065ef EB |
825 | /* Unshare most of the shared trees rooted at *TP. DATA is passed to the |
826 | copy_if_shared_r callback unmodified. */ | |
6de9cd9a | 827 | |
616f1431 | 828 | static inline void |
3ad065ef | 829 | copy_if_shared (tree *tp, void *data) |
616f1431 | 830 | { |
3ad065ef | 831 | walk_tree (tp, copy_if_shared_r, data, NULL); |
6de9cd9a DN |
832 | } |
833 | ||
3ad065ef EB |
834 | /* Unshare all the trees in the body of FNDECL, as well as in the bodies of |
835 | any nested functions. */ | |
44de5aeb RK |
836 | |
837 | static void | |
3ad065ef | 838 | unshare_body (tree fndecl) |
44de5aeb | 839 | { |
d52f5295 | 840 | struct cgraph_node *cgn = cgraph_node::get (fndecl); |
3ad065ef EB |
841 | /* If the language requires deep unsharing, we need a pointer set to make |
842 | sure we don't repeatedly unshare subtrees of unshareable nodes. */ | |
6e2830c3 TS |
843 | hash_set<tree> *visited |
844 | = lang_hooks.deep_unsharing ? new hash_set<tree> : NULL; | |
44de5aeb | 845 | |
3ad065ef EB |
846 | copy_if_shared (&DECL_SAVED_TREE (fndecl), visited); |
847 | copy_if_shared (&DECL_SIZE (DECL_RESULT (fndecl)), visited); | |
848 | copy_if_shared (&DECL_SIZE_UNIT (DECL_RESULT (fndecl)), visited); | |
849 | ||
6e2830c3 | 850 | delete visited; |
616f1431 | 851 | |
3ad065ef | 852 | if (cgn) |
48eb4e53 | 853 | for (cgn = cgn->nested; cgn; cgn = cgn->next_nested) |
67348ccc | 854 | unshare_body (cgn->decl); |
44de5aeb RK |
855 | } |
856 | ||
616f1431 EB |
857 | /* Callback for walk_tree to unmark the visited trees rooted at *TP. |
858 | Subtrees are walked until the first unvisited node is encountered. */ | |
859 | ||
860 | static tree | |
861 | unmark_visited_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED) | |
862 | { | |
863 | tree t = *tp; | |
864 | ||
865 | /* If this node has been visited, unmark it and keep looking. */ | |
866 | if (TREE_VISITED (t)) | |
867 | TREE_VISITED (t) = 0; | |
868 | ||
869 | /* Otherwise, don't look any deeper. */ | |
870 | else | |
871 | *walk_subtrees = 0; | |
872 | ||
873 | return NULL_TREE; | |
874 | } | |
875 | ||
876 | /* Unmark the visited trees rooted at *TP. */ | |
877 | ||
878 | static inline void | |
879 | unmark_visited (tree *tp) | |
880 | { | |
881 | walk_tree (tp, unmark_visited_r, NULL, NULL); | |
882 | } | |
883 | ||
44de5aeb RK |
884 | /* Likewise, but mark all trees as not visited. */ |
885 | ||
886 | static void | |
3ad065ef | 887 | unvisit_body (tree fndecl) |
44de5aeb | 888 | { |
d52f5295 | 889 | struct cgraph_node *cgn = cgraph_node::get (fndecl); |
44de5aeb | 890 | |
3ad065ef EB |
891 | unmark_visited (&DECL_SAVED_TREE (fndecl)); |
892 | unmark_visited (&DECL_SIZE (DECL_RESULT (fndecl))); | |
893 | unmark_visited (&DECL_SIZE_UNIT (DECL_RESULT (fndecl))); | |
616f1431 | 894 | |
3ad065ef | 895 | if (cgn) |
48eb4e53 | 896 | for (cgn = cgn->nested; cgn; cgn = cgn->next_nested) |
67348ccc | 897 | unvisit_body (cgn->decl); |
44de5aeb RK |
898 | } |
899 | ||
6de9cd9a DN |
900 | /* Unconditionally make an unshared copy of EXPR. This is used when using |
901 | stored expressions which span multiple functions, such as BINFO_VTABLE, | |
902 | as the normal unsharing process can't tell that they're shared. */ | |
903 | ||
904 | tree | |
905 | unshare_expr (tree expr) | |
906 | { | |
907 | walk_tree (&expr, mostly_copy_tree_r, NULL, NULL); | |
908 | return expr; | |
909 | } | |
d1f98542 RB |
910 | |
911 | /* Worker for unshare_expr_without_location. */ | |
912 | ||
913 | static tree | |
914 | prune_expr_location (tree *tp, int *walk_subtrees, void *) | |
915 | { | |
916 | if (EXPR_P (*tp)) | |
917 | SET_EXPR_LOCATION (*tp, UNKNOWN_LOCATION); | |
918 | else | |
919 | *walk_subtrees = 0; | |
920 | return NULL_TREE; | |
921 | } | |
922 | ||
923 | /* Similar to unshare_expr but also prune all expression locations | |
924 | from EXPR. */ | |
925 | ||
926 | tree | |
927 | unshare_expr_without_location (tree expr) | |
928 | { | |
929 | walk_tree (&expr, mostly_copy_tree_r, NULL, NULL); | |
930 | if (EXPR_P (expr)) | |
931 | walk_tree (&expr, prune_expr_location, NULL, NULL); | |
932 | return expr; | |
933 | } | |
6de9cd9a DN |
934 | \f |
935 | /* WRAPPER is a code such as BIND_EXPR or CLEANUP_POINT_EXPR which can both | |
936 | contain statements and have a value. Assign its value to a temporary | |
ad19c4be | 937 | and give it void_type_node. Return the temporary, or NULL_TREE if |
6de9cd9a DN |
938 | WRAPPER was already void. */ |
939 | ||
940 | tree | |
325c3691 | 941 | voidify_wrapper_expr (tree wrapper, tree temp) |
6de9cd9a | 942 | { |
4832214a JM |
943 | tree type = TREE_TYPE (wrapper); |
944 | if (type && !VOID_TYPE_P (type)) | |
6de9cd9a | 945 | { |
c6c7698d | 946 | tree *p; |
6de9cd9a | 947 | |
c6c7698d JM |
948 | /* Set p to point to the body of the wrapper. Loop until we find |
949 | something that isn't a wrapper. */ | |
950 | for (p = &wrapper; p && *p; ) | |
d3147f64 | 951 | { |
c6c7698d | 952 | switch (TREE_CODE (*p)) |
6de9cd9a | 953 | { |
c6c7698d JM |
954 | case BIND_EXPR: |
955 | TREE_SIDE_EFFECTS (*p) = 1; | |
956 | TREE_TYPE (*p) = void_type_node; | |
957 | /* For a BIND_EXPR, the body is operand 1. */ | |
958 | p = &BIND_EXPR_BODY (*p); | |
959 | break; | |
960 | ||
961 | case CLEANUP_POINT_EXPR: | |
962 | case TRY_FINALLY_EXPR: | |
963 | case TRY_CATCH_EXPR: | |
6de9cd9a DN |
964 | TREE_SIDE_EFFECTS (*p) = 1; |
965 | TREE_TYPE (*p) = void_type_node; | |
c6c7698d JM |
966 | p = &TREE_OPERAND (*p, 0); |
967 | break; | |
968 | ||
969 | case STATEMENT_LIST: | |
970 | { | |
971 | tree_stmt_iterator i = tsi_last (*p); | |
972 | TREE_SIDE_EFFECTS (*p) = 1; | |
973 | TREE_TYPE (*p) = void_type_node; | |
974 | p = tsi_end_p (i) ? NULL : tsi_stmt_ptr (i); | |
975 | } | |
976 | break; | |
977 | ||
978 | case COMPOUND_EXPR: | |
ad19c4be EB |
979 | /* Advance to the last statement. Set all container types to |
980 | void. */ | |
c6c7698d JM |
981 | for (; TREE_CODE (*p) == COMPOUND_EXPR; p = &TREE_OPERAND (*p, 1)) |
982 | { | |
983 | TREE_SIDE_EFFECTS (*p) = 1; | |
984 | TREE_TYPE (*p) = void_type_node; | |
985 | } | |
986 | break; | |
987 | ||
0a35513e AH |
988 | case TRANSACTION_EXPR: |
989 | TREE_SIDE_EFFECTS (*p) = 1; | |
990 | TREE_TYPE (*p) = void_type_node; | |
991 | p = &TRANSACTION_EXPR_BODY (*p); | |
992 | break; | |
993 | ||
c6c7698d | 994 | default: |
5f23640f TR |
995 | /* Assume that any tree upon which voidify_wrapper_expr is |
996 | directly called is a wrapper, and that its body is op0. */ | |
997 | if (p == &wrapper) | |
998 | { | |
999 | TREE_SIDE_EFFECTS (*p) = 1; | |
1000 | TREE_TYPE (*p) = void_type_node; | |
1001 | p = &TREE_OPERAND (*p, 0); | |
1002 | break; | |
1003 | } | |
c6c7698d | 1004 | goto out; |
6de9cd9a DN |
1005 | } |
1006 | } | |
1007 | ||
c6c7698d | 1008 | out: |
325c3691 | 1009 | if (p == NULL || IS_EMPTY_STMT (*p)) |
c6c7698d JM |
1010 | temp = NULL_TREE; |
1011 | else if (temp) | |
6de9cd9a | 1012 | { |
c6c7698d JM |
1013 | /* The wrapper is on the RHS of an assignment that we're pushing |
1014 | down. */ | |
1015 | gcc_assert (TREE_CODE (temp) == INIT_EXPR | |
1016 | || TREE_CODE (temp) == MODIFY_EXPR); | |
726a989a | 1017 | TREE_OPERAND (temp, 1) = *p; |
c6c7698d | 1018 | *p = temp; |
6de9cd9a DN |
1019 | } |
1020 | else | |
1021 | { | |
c6c7698d JM |
1022 | temp = create_tmp_var (type, "retval"); |
1023 | *p = build2 (INIT_EXPR, type, temp, *p); | |
6de9cd9a DN |
1024 | } |
1025 | ||
6de9cd9a DN |
1026 | return temp; |
1027 | } | |
1028 | ||
1029 | return NULL_TREE; | |
1030 | } | |
1031 | ||
1032 | /* Prepare calls to builtins to SAVE and RESTORE the stack as well as | |
1ea7e6ad | 1033 | a temporary through which they communicate. */ |
6de9cd9a DN |
1034 | |
1035 | static void | |
726a989a | 1036 | build_stack_save_restore (gimple *save, gimple *restore) |
6de9cd9a | 1037 | { |
726a989a | 1038 | tree tmp_var; |
6de9cd9a | 1039 | |
e79983f4 | 1040 | *save = gimple_build_call (builtin_decl_implicit (BUILT_IN_STACK_SAVE), 0); |
6de9cd9a | 1041 | tmp_var = create_tmp_var (ptr_type_node, "saved_stack"); |
726a989a | 1042 | gimple_call_set_lhs (*save, tmp_var); |
6de9cd9a | 1043 | |
ad19c4be | 1044 | *restore |
e79983f4 | 1045 | = gimple_build_call (builtin_decl_implicit (BUILT_IN_STACK_RESTORE), |
ad19c4be | 1046 | 1, tmp_var); |
6de9cd9a DN |
1047 | } |
1048 | ||
1049 | /* Gimplify a BIND_EXPR. Just voidify and recurse. */ | |
1050 | ||
1051 | static enum gimplify_status | |
726a989a | 1052 | gimplify_bind_expr (tree *expr_p, gimple_seq *pre_p) |
6de9cd9a DN |
1053 | { |
1054 | tree bind_expr = *expr_p; | |
6de9cd9a DN |
1055 | bool old_save_stack = gimplify_ctxp->save_stack; |
1056 | tree t; | |
726a989a | 1057 | gimple gimple_bind; |
47598145 MM |
1058 | gimple_seq body, cleanup; |
1059 | gimple stack_save; | |
a5852bea | 1060 | location_t start_locus = 0, end_locus = 0; |
6de9cd9a | 1061 | |
c6c7698d | 1062 | tree temp = voidify_wrapper_expr (bind_expr, NULL); |
325c3691 | 1063 | |
6de9cd9a | 1064 | /* Mark variables seen in this bind expr. */ |
910ad8de | 1065 | for (t = BIND_EXPR_VARS (bind_expr); t ; t = DECL_CHAIN (t)) |
e41d82f5 | 1066 | { |
820cc88f | 1067 | if (TREE_CODE (t) == VAR_DECL) |
8cb86b65 JJ |
1068 | { |
1069 | struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp; | |
1070 | ||
1071 | /* Mark variable as local. */ | |
144f4153 | 1072 | if (ctx && !DECL_EXTERNAL (t) |
8cb86b65 JJ |
1073 | && (! DECL_SEEN_IN_BIND_EXPR_P (t) |
1074 | || splay_tree_lookup (ctx->variables, | |
1075 | (splay_tree_key) t) == NULL)) | |
c74559df JJ |
1076 | { |
1077 | if (ctx->region_type == ORT_SIMD | |
1078 | && TREE_ADDRESSABLE (t) | |
1079 | && !TREE_STATIC (t)) | |
1080 | omp_add_variable (ctx, t, GOVD_PRIVATE | GOVD_SEEN); | |
1081 | else | |
1082 | omp_add_variable (ctx, t, GOVD_LOCAL | GOVD_SEEN); | |
1083 | } | |
8cb86b65 JJ |
1084 | |
1085 | DECL_SEEN_IN_BIND_EXPR_P (t) = 1; | |
fc3103e7 JJ |
1086 | |
1087 | if (DECL_HARD_REGISTER (t) && !is_global_var (t) && cfun) | |
1088 | cfun->has_local_explicit_reg_vars = true; | |
8cb86b65 | 1089 | } |
e41d82f5 RH |
1090 | |
1091 | /* Preliminarily mark non-addressed complex variables as eligible | |
1092 | for promotion to gimple registers. We'll transform their uses | |
bd2e63a1 RG |
1093 | as we find them. */ |
1094 | if ((TREE_CODE (TREE_TYPE (t)) == COMPLEX_TYPE | |
1095 | || TREE_CODE (TREE_TYPE (t)) == VECTOR_TYPE) | |
e41d82f5 RH |
1096 | && !TREE_THIS_VOLATILE (t) |
1097 | && (TREE_CODE (t) == VAR_DECL && !DECL_HARD_REGISTER (t)) | |
1098 | && !needs_to_live_in_memory (t)) | |
0890b981 | 1099 | DECL_GIMPLE_REG_P (t) = 1; |
e41d82f5 | 1100 | } |
6de9cd9a | 1101 | |
726a989a RB |
1102 | gimple_bind = gimple_build_bind (BIND_EXPR_VARS (bind_expr), NULL, |
1103 | BIND_EXPR_BLOCK (bind_expr)); | |
1104 | gimple_push_bind_expr (gimple_bind); | |
1105 | ||
6de9cd9a DN |
1106 | gimplify_ctxp->save_stack = false; |
1107 | ||
726a989a RB |
1108 | /* Gimplify the body into the GIMPLE_BIND tuple's body. */ |
1109 | body = NULL; | |
1110 | gimplify_stmt (&BIND_EXPR_BODY (bind_expr), &body); | |
1111 | gimple_bind_set_body (gimple_bind, body); | |
6de9cd9a | 1112 | |
a5852bea OH |
1113 | /* Source location wise, the cleanup code (stack_restore and clobbers) |
1114 | belongs to the end of the block, so propagate what we have. The | |
1115 | stack_save operation belongs to the beginning of block, which we can | |
1116 | infer from the bind_expr directly if the block has no explicit | |
1117 | assignment. */ | |
1118 | if (BIND_EXPR_BLOCK (bind_expr)) | |
1119 | { | |
1120 | end_locus = BLOCK_SOURCE_END_LOCATION (BIND_EXPR_BLOCK (bind_expr)); | |
1121 | start_locus = BLOCK_SOURCE_LOCATION (BIND_EXPR_BLOCK (bind_expr)); | |
1122 | } | |
1123 | if (start_locus == 0) | |
1124 | start_locus = EXPR_LOCATION (bind_expr); | |
1125 | ||
47598145 MM |
1126 | cleanup = NULL; |
1127 | stack_save = NULL; | |
6de9cd9a DN |
1128 | if (gimplify_ctxp->save_stack) |
1129 | { | |
47598145 | 1130 | gimple stack_restore; |
6de9cd9a DN |
1131 | |
1132 | /* Save stack on entry and restore it on exit. Add a try_finally | |
98906124 | 1133 | block to achieve this. */ |
6de9cd9a DN |
1134 | build_stack_save_restore (&stack_save, &stack_restore); |
1135 | ||
a5852bea OH |
1136 | gimple_set_location (stack_save, start_locus); |
1137 | gimple_set_location (stack_restore, end_locus); | |
1138 | ||
726a989a | 1139 | gimplify_seq_add_stmt (&cleanup, stack_restore); |
47598145 MM |
1140 | } |
1141 | ||
1142 | /* Add clobbers for all variables that go out of scope. */ | |
1143 | for (t = BIND_EXPR_VARS (bind_expr); t ; t = DECL_CHAIN (t)) | |
1144 | { | |
1145 | if (TREE_CODE (t) == VAR_DECL | |
1146 | && !is_global_var (t) | |
1147 | && DECL_CONTEXT (t) == current_function_decl | |
1148 | && !DECL_HARD_REGISTER (t) | |
1149 | && !TREE_THIS_VOLATILE (t) | |
1150 | && !DECL_HAS_VALUE_EXPR_P (t) | |
1151 | /* Only care for variables that have to be in memory. Others | |
1152 | will be rewritten into SSA names, hence moved to the top-level. */ | |
87e2a8fd XDL |
1153 | && !is_gimple_reg (t) |
1154 | && flag_stack_reuse != SR_NONE) | |
47598145 | 1155 | { |
a5852bea OH |
1156 | tree clobber = build_constructor (TREE_TYPE (t), NULL); |
1157 | gimple clobber_stmt; | |
47598145 | 1158 | TREE_THIS_VOLATILE (clobber) = 1; |
a5852bea OH |
1159 | clobber_stmt = gimple_build_assign (t, clobber); |
1160 | gimple_set_location (clobber_stmt, end_locus); | |
1161 | gimplify_seq_add_stmt (&cleanup, clobber_stmt); | |
47598145 MM |
1162 | } |
1163 | } | |
1164 | ||
1165 | if (cleanup) | |
1166 | { | |
1167 | gimple gs; | |
1168 | gimple_seq new_body; | |
1169 | ||
1170 | new_body = NULL; | |
726a989a RB |
1171 | gs = gimple_build_try (gimple_bind_body (gimple_bind), cleanup, |
1172 | GIMPLE_TRY_FINALLY); | |
6de9cd9a | 1173 | |
47598145 MM |
1174 | if (stack_save) |
1175 | gimplify_seq_add_stmt (&new_body, stack_save); | |
726a989a RB |
1176 | gimplify_seq_add_stmt (&new_body, gs); |
1177 | gimple_bind_set_body (gimple_bind, new_body); | |
6de9cd9a DN |
1178 | } |
1179 | ||
1180 | gimplify_ctxp->save_stack = old_save_stack; | |
1181 | gimple_pop_bind_expr (); | |
1182 | ||
726a989a RB |
1183 | gimplify_seq_add_stmt (pre_p, gimple_bind); |
1184 | ||
6de9cd9a DN |
1185 | if (temp) |
1186 | { | |
1187 | *expr_p = temp; | |
6de9cd9a DN |
1188 | return GS_OK; |
1189 | } | |
726a989a RB |
1190 | |
1191 | *expr_p = NULL_TREE; | |
1192 | return GS_ALL_DONE; | |
6de9cd9a DN |
1193 | } |
1194 | ||
1195 | /* Gimplify a RETURN_EXPR. If the expression to be returned is not a | |
1196 | GIMPLE value, it is assigned to a new temporary and the statement is | |
1197 | re-written to return the temporary. | |
1198 | ||
726a989a | 1199 | PRE_P points to the sequence where side effects that must happen before |
6de9cd9a DN |
1200 | STMT should be stored. */ |
1201 | ||
1202 | static enum gimplify_status | |
726a989a | 1203 | gimplify_return_expr (tree stmt, gimple_seq *pre_p) |
6de9cd9a | 1204 | { |
726a989a | 1205 | gimple ret; |
6de9cd9a | 1206 | tree ret_expr = TREE_OPERAND (stmt, 0); |
71877985 | 1207 | tree result_decl, result; |
6de9cd9a | 1208 | |
726a989a RB |
1209 | if (ret_expr == error_mark_node) |
1210 | return GS_ERROR; | |
1211 | ||
939b37da BI |
1212 | /* Implicit _Cilk_sync must be inserted right before any return statement |
1213 | if there is a _Cilk_spawn in the function. If the user has provided a | |
1214 | _Cilk_sync, the optimizer should remove this duplicate one. */ | |
1215 | if (fn_contains_cilk_spawn_p (cfun)) | |
1216 | { | |
1217 | tree impl_sync = build0 (CILK_SYNC_STMT, void_type_node); | |
1218 | gimplify_and_add (impl_sync, pre_p); | |
1219 | } | |
1220 | ||
726a989a RB |
1221 | if (!ret_expr |
1222 | || TREE_CODE (ret_expr) == RESULT_DECL | |
55e99d52 | 1223 | || ret_expr == error_mark_node) |
726a989a RB |
1224 | { |
1225 | gimple ret = gimple_build_return (ret_expr); | |
1226 | gimple_set_no_warning (ret, TREE_NO_WARNING (stmt)); | |
1227 | gimplify_seq_add_stmt (pre_p, ret); | |
1228 | return GS_ALL_DONE; | |
1229 | } | |
6de9cd9a | 1230 | |
6de9cd9a | 1231 | if (VOID_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl)))) |
71877985 | 1232 | result_decl = NULL_TREE; |
6de9cd9a DN |
1233 | else |
1234 | { | |
726a989a RB |
1235 | result_decl = TREE_OPERAND (ret_expr, 0); |
1236 | ||
1237 | /* See through a return by reference. */ | |
cc77ae10 | 1238 | if (TREE_CODE (result_decl) == INDIRECT_REF) |
cc77ae10 | 1239 | result_decl = TREE_OPERAND (result_decl, 0); |
282899df NS |
1240 | |
1241 | gcc_assert ((TREE_CODE (ret_expr) == MODIFY_EXPR | |
1242 | || TREE_CODE (ret_expr) == INIT_EXPR) | |
1243 | && TREE_CODE (result_decl) == RESULT_DECL); | |
6de9cd9a DN |
1244 | } |
1245 | ||
71877985 RH |
1246 | /* If aggregate_value_p is true, then we can return the bare RESULT_DECL. |
1247 | Recall that aggregate_value_p is FALSE for any aggregate type that is | |
1248 | returned in registers. If we're returning values in registers, then | |
1249 | we don't want to extend the lifetime of the RESULT_DECL, particularly | |
d3147f64 | 1250 | across another call. In addition, for those aggregates for which |
535a42b1 | 1251 | hard_function_value generates a PARALLEL, we'll die during normal |
71877985 RH |
1252 | expansion of structure assignments; there's special code in expand_return |
1253 | to handle this case that does not exist in expand_expr. */ | |
ca361dec EB |
1254 | if (!result_decl) |
1255 | result = NULL_TREE; | |
1256 | else if (aggregate_value_p (result_decl, TREE_TYPE (current_function_decl))) | |
1257 | { | |
1258 | if (TREE_CODE (DECL_SIZE (result_decl)) != INTEGER_CST) | |
1259 | { | |
1260 | if (!TYPE_SIZES_GIMPLIFIED (TREE_TYPE (result_decl))) | |
1261 | gimplify_type_sizes (TREE_TYPE (result_decl), pre_p); | |
1262 | /* Note that we don't use gimplify_vla_decl because the RESULT_DECL | |
1263 | should be effectively allocated by the caller, i.e. all calls to | |
1264 | this function must be subject to the Return Slot Optimization. */ | |
1265 | gimplify_one_sizepos (&DECL_SIZE (result_decl), pre_p); | |
1266 | gimplify_one_sizepos (&DECL_SIZE_UNIT (result_decl), pre_p); | |
1267 | } | |
1268 | result = result_decl; | |
1269 | } | |
71877985 RH |
1270 | else if (gimplify_ctxp->return_temp) |
1271 | result = gimplify_ctxp->return_temp; | |
1272 | else | |
1273 | { | |
acd63801 | 1274 | result = create_tmp_reg (TREE_TYPE (result_decl), NULL); |
ff98621c RH |
1275 | |
1276 | /* ??? With complex control flow (usually involving abnormal edges), | |
1277 | we can wind up warning about an uninitialized value for this. Due | |
1278 | to how this variable is constructed and initialized, this is never | |
1279 | true. Give up and never warn. */ | |
1280 | TREE_NO_WARNING (result) = 1; | |
1281 | ||
71877985 RH |
1282 | gimplify_ctxp->return_temp = result; |
1283 | } | |
1284 | ||
726a989a | 1285 | /* Smash the lhs of the MODIFY_EXPR to the temporary we plan to use. |
71877985 RH |
1286 | Then gimplify the whole thing. */ |
1287 | if (result != result_decl) | |
726a989a | 1288 | TREE_OPERAND (ret_expr, 0) = result; |
fff34d35 RK |
1289 | |
1290 | gimplify_and_add (TREE_OPERAND (stmt, 0), pre_p); | |
6de9cd9a | 1291 | |
726a989a RB |
1292 | ret = gimple_build_return (result); |
1293 | gimple_set_no_warning (ret, TREE_NO_WARNING (stmt)); | |
1294 | gimplify_seq_add_stmt (pre_p, ret); | |
6de9cd9a | 1295 | |
6de9cd9a DN |
1296 | return GS_ALL_DONE; |
1297 | } | |
1298 | ||
ad19c4be EB |
1299 | /* Gimplify a variable-length array DECL. */ |
1300 | ||
786025ea | 1301 | static void |
726a989a | 1302 | gimplify_vla_decl (tree decl, gimple_seq *seq_p) |
786025ea JJ |
1303 | { |
1304 | /* This is a variable-sized decl. Simplify its size and mark it | |
98906124 | 1305 | for deferred expansion. */ |
786025ea JJ |
1306 | tree t, addr, ptr_type; |
1307 | ||
726a989a RB |
1308 | gimplify_one_sizepos (&DECL_SIZE (decl), seq_p); |
1309 | gimplify_one_sizepos (&DECL_SIZE_UNIT (decl), seq_p); | |
786025ea | 1310 | |
0138d6b2 JM |
1311 | /* Don't mess with a DECL_VALUE_EXPR set by the front-end. */ |
1312 | if (DECL_HAS_VALUE_EXPR_P (decl)) | |
1313 | return; | |
1314 | ||
786025ea JJ |
1315 | /* All occurrences of this decl in final gimplified code will be |
1316 | replaced by indirection. Setting DECL_VALUE_EXPR does two | |
1317 | things: First, it lets the rest of the gimplifier know what | |
1318 | replacement to use. Second, it lets the debug info know | |
1319 | where to find the value. */ | |
1320 | ptr_type = build_pointer_type (TREE_TYPE (decl)); | |
1321 | addr = create_tmp_var (ptr_type, get_name (decl)); | |
1322 | DECL_IGNORED_P (addr) = 0; | |
1323 | t = build_fold_indirect_ref (addr); | |
31408f60 | 1324 | TREE_THIS_NOTRAP (t) = 1; |
786025ea JJ |
1325 | SET_DECL_VALUE_EXPR (decl, t); |
1326 | DECL_HAS_VALUE_EXPR_P (decl) = 1; | |
1327 | ||
e79983f4 | 1328 | t = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN); |
13e49da9 TV |
1329 | t = build_call_expr (t, 2, DECL_SIZE_UNIT (decl), |
1330 | size_int (DECL_ALIGN (decl))); | |
d3c12306 | 1331 | /* The call has been built for a variable-sized object. */ |
63d2a353 | 1332 | CALL_ALLOCA_FOR_VAR_P (t) = 1; |
786025ea | 1333 | t = fold_convert (ptr_type, t); |
726a989a | 1334 | t = build2 (MODIFY_EXPR, TREE_TYPE (addr), addr, t); |
786025ea | 1335 | |
726a989a | 1336 | gimplify_and_add (t, seq_p); |
786025ea JJ |
1337 | |
1338 | /* Indicate that we need to restore the stack level when the | |
1339 | enclosing BIND_EXPR is exited. */ | |
1340 | gimplify_ctxp->save_stack = true; | |
1341 | } | |
1342 | ||
45b0be94 AM |
1343 | /* A helper function to be called via walk_tree. Mark all labels under *TP |
1344 | as being forced. To be called for DECL_INITIAL of static variables. */ | |
1345 | ||
1346 | static tree | |
1347 | force_labels_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED) | |
1348 | { | |
1349 | if (TYPE_P (*tp)) | |
1350 | *walk_subtrees = 0; | |
1351 | if (TREE_CODE (*tp) == LABEL_DECL) | |
1352 | FORCED_LABEL (*tp) = 1; | |
1353 | ||
1354 | return NULL_TREE; | |
1355 | } | |
1356 | ||
ad19c4be | 1357 | /* Gimplify a DECL_EXPR node *STMT_P by making any necessary allocation |
350fae66 RK |
1358 | and initialization explicit. */ |
1359 | ||
1360 | static enum gimplify_status | |
726a989a | 1361 | gimplify_decl_expr (tree *stmt_p, gimple_seq *seq_p) |
350fae66 RK |
1362 | { |
1363 | tree stmt = *stmt_p; | |
1364 | tree decl = DECL_EXPR_DECL (stmt); | |
1365 | ||
1366 | *stmt_p = NULL_TREE; | |
1367 | ||
1368 | if (TREE_TYPE (decl) == error_mark_node) | |
1369 | return GS_ERROR; | |
1370 | ||
8e0a600b JJ |
1371 | if ((TREE_CODE (decl) == TYPE_DECL |
1372 | || TREE_CODE (decl) == VAR_DECL) | |
1373 | && !TYPE_SIZES_GIMPLIFIED (TREE_TYPE (decl))) | |
726a989a | 1374 | gimplify_type_sizes (TREE_TYPE (decl), seq_p); |
350fae66 | 1375 | |
d400d17e EB |
1376 | /* ??? DECL_ORIGINAL_TYPE is streamed for LTO so it needs to be gimplified |
1377 | in case its size expressions contain problematic nodes like CALL_EXPR. */ | |
1378 | if (TREE_CODE (decl) == TYPE_DECL | |
1379 | && DECL_ORIGINAL_TYPE (decl) | |
1380 | && !TYPE_SIZES_GIMPLIFIED (DECL_ORIGINAL_TYPE (decl))) | |
1381 | gimplify_type_sizes (DECL_ORIGINAL_TYPE (decl), seq_p); | |
1382 | ||
8e0a600b | 1383 | if (TREE_CODE (decl) == VAR_DECL && !DECL_EXTERNAL (decl)) |
350fae66 RK |
1384 | { |
1385 | tree init = DECL_INITIAL (decl); | |
1386 | ||
b38f3813 EB |
1387 | if (TREE_CODE (DECL_SIZE_UNIT (decl)) != INTEGER_CST |
1388 | || (!TREE_STATIC (decl) | |
1389 | && flag_stack_check == GENERIC_STACK_CHECK | |
1390 | && compare_tree_int (DECL_SIZE_UNIT (decl), | |
1391 | STACK_CHECK_MAX_VAR_SIZE) > 0)) | |
726a989a | 1392 | gimplify_vla_decl (decl, seq_p); |
350fae66 | 1393 | |
22192559 JM |
1394 | /* Some front ends do not explicitly declare all anonymous |
1395 | artificial variables. We compensate here by declaring the | |
1396 | variables, though it would be better if the front ends would | |
1397 | explicitly declare them. */ | |
1398 | if (!DECL_SEEN_IN_BIND_EXPR_P (decl) | |
1399 | && DECL_ARTIFICIAL (decl) && DECL_NAME (decl) == NULL_TREE) | |
1400 | gimple_add_tmp_var (decl); | |
1401 | ||
350fae66 RK |
1402 | if (init && init != error_mark_node) |
1403 | { | |
1404 | if (!TREE_STATIC (decl)) | |
1405 | { | |
1406 | DECL_INITIAL (decl) = NULL_TREE; | |
dae7ec87 | 1407 | init = build2 (INIT_EXPR, void_type_node, decl, init); |
726a989a RB |
1408 | gimplify_and_add (init, seq_p); |
1409 | ggc_free (init); | |
350fae66 RK |
1410 | } |
1411 | else | |
1412 | /* We must still examine initializers for static variables | |
1413 | as they may contain a label address. */ | |
1414 | walk_tree (&init, force_labels_r, NULL, NULL); | |
1415 | } | |
350fae66 RK |
1416 | } |
1417 | ||
1418 | return GS_ALL_DONE; | |
1419 | } | |
1420 | ||
6de9cd9a DN |
1421 | /* Gimplify a LOOP_EXPR. Normally this just involves gimplifying the body |
1422 | and replacing the LOOP_EXPR with goto, but if the loop contains an | |
1423 | EXIT_EXPR, we need to append a label for it to jump to. */ | |
1424 | ||
1425 | static enum gimplify_status | |
726a989a | 1426 | gimplify_loop_expr (tree *expr_p, gimple_seq *pre_p) |
6de9cd9a DN |
1427 | { |
1428 | tree saved_label = gimplify_ctxp->exit_label; | |
c2255bc4 | 1429 | tree start_label = create_artificial_label (UNKNOWN_LOCATION); |
6de9cd9a | 1430 | |
726a989a | 1431 | gimplify_seq_add_stmt (pre_p, gimple_build_label (start_label)); |
6de9cd9a DN |
1432 | |
1433 | gimplify_ctxp->exit_label = NULL_TREE; | |
1434 | ||
fff34d35 | 1435 | gimplify_and_add (LOOP_EXPR_BODY (*expr_p), pre_p); |
6de9cd9a | 1436 | |
726a989a RB |
1437 | gimplify_seq_add_stmt (pre_p, gimple_build_goto (start_label)); |
1438 | ||
6de9cd9a | 1439 | if (gimplify_ctxp->exit_label) |
ad19c4be EB |
1440 | gimplify_seq_add_stmt (pre_p, |
1441 | gimple_build_label (gimplify_ctxp->exit_label)); | |
726a989a RB |
1442 | |
1443 | gimplify_ctxp->exit_label = saved_label; | |
1444 | ||
1445 | *expr_p = NULL; | |
1446 | return GS_ALL_DONE; | |
1447 | } | |
1448 | ||
ad19c4be | 1449 | /* Gimplify a statement list onto a sequence. These may be created either |
726a989a RB |
1450 | by an enlightened front-end, or by shortcut_cond_expr. */ |
1451 | ||
1452 | static enum gimplify_status | |
1453 | gimplify_statement_list (tree *expr_p, gimple_seq *pre_p) | |
1454 | { | |
1455 | tree temp = voidify_wrapper_expr (*expr_p, NULL); | |
1456 | ||
1457 | tree_stmt_iterator i = tsi_start (*expr_p); | |
1458 | ||
1459 | while (!tsi_end_p (i)) | |
6de9cd9a | 1460 | { |
726a989a RB |
1461 | gimplify_stmt (tsi_stmt_ptr (i), pre_p); |
1462 | tsi_delink (&i); | |
6de9cd9a | 1463 | } |
6de9cd9a | 1464 | |
726a989a RB |
1465 | if (temp) |
1466 | { | |
1467 | *expr_p = temp; | |
1468 | return GS_OK; | |
1469 | } | |
6de9cd9a DN |
1470 | |
1471 | return GS_ALL_DONE; | |
1472 | } | |
0f1f6967 | 1473 | |
68e72840 SB |
1474 | \f |
1475 | /* Gimplify a SWITCH_EXPR, and collect the vector of labels it can | |
6de9cd9a DN |
1476 | branch to. */ |
1477 | ||
1478 | static enum gimplify_status | |
726a989a | 1479 | gimplify_switch_expr (tree *expr_p, gimple_seq *pre_p) |
6de9cd9a DN |
1480 | { |
1481 | tree switch_expr = *expr_p; | |
726a989a | 1482 | gimple_seq switch_body_seq = NULL; |
6de9cd9a | 1483 | enum gimplify_status ret; |
0cd2402d SB |
1484 | tree index_type = TREE_TYPE (switch_expr); |
1485 | if (index_type == NULL_TREE) | |
1486 | index_type = TREE_TYPE (SWITCH_COND (switch_expr)); | |
6de9cd9a | 1487 | |
726a989a RB |
1488 | ret = gimplify_expr (&SWITCH_COND (switch_expr), pre_p, NULL, is_gimple_val, |
1489 | fb_rvalue); | |
1490 | if (ret == GS_ERROR || ret == GS_UNHANDLED) | |
1491 | return ret; | |
6de9cd9a DN |
1492 | |
1493 | if (SWITCH_BODY (switch_expr)) | |
1494 | { | |
9771b263 DN |
1495 | vec<tree> labels; |
1496 | vec<tree> saved_labels; | |
726a989a | 1497 | tree default_case = NULL_TREE; |
726a989a | 1498 | gimple gimple_switch; |
b8698a0f | 1499 | |
6de9cd9a DN |
1500 | /* If someone can be bothered to fill in the labels, they can |
1501 | be bothered to null out the body too. */ | |
282899df | 1502 | gcc_assert (!SWITCH_LABELS (switch_expr)); |
6de9cd9a | 1503 | |
0cd2402d | 1504 | /* Save old labels, get new ones from body, then restore the old |
726a989a | 1505 | labels. Save all the things from the switch body to append after. */ |
6de9cd9a | 1506 | saved_labels = gimplify_ctxp->case_labels; |
9771b263 | 1507 | gimplify_ctxp->case_labels.create (8); |
6de9cd9a | 1508 | |
726a989a | 1509 | gimplify_stmt (&SWITCH_BODY (switch_expr), &switch_body_seq); |
6de9cd9a DN |
1510 | labels = gimplify_ctxp->case_labels; |
1511 | gimplify_ctxp->case_labels = saved_labels; | |
b8698a0f | 1512 | |
68e72840 SB |
1513 | preprocess_case_label_vec_for_gimple (labels, index_type, |
1514 | &default_case); | |
32f579f6 | 1515 | |
726a989a | 1516 | if (!default_case) |
6de9cd9a | 1517 | { |
68e72840 | 1518 | gimple new_default; |
6de9cd9a | 1519 | |
68e72840 SB |
1520 | default_case |
1521 | = build_case_label (NULL_TREE, NULL_TREE, | |
1522 | create_artificial_label (UNKNOWN_LOCATION)); | |
1523 | new_default = gimple_build_label (CASE_LABEL (default_case)); | |
1524 | gimplify_seq_add_stmt (&switch_body_seq, new_default); | |
32f579f6 | 1525 | } |
f667741c | 1526 | |
fd8d363e SB |
1527 | gimple_switch = gimple_build_switch (SWITCH_COND (switch_expr), |
1528 | default_case, labels); | |
726a989a RB |
1529 | gimplify_seq_add_stmt (pre_p, gimple_switch); |
1530 | gimplify_seq_add_seq (pre_p, switch_body_seq); | |
9771b263 | 1531 | labels.release (); |
6de9cd9a | 1532 | } |
282899df NS |
1533 | else |
1534 | gcc_assert (SWITCH_LABELS (switch_expr)); | |
6de9cd9a | 1535 | |
726a989a | 1536 | return GS_ALL_DONE; |
6de9cd9a DN |
1537 | } |
1538 | ||
ad19c4be | 1539 | /* Gimplify the CASE_LABEL_EXPR pointed to by EXPR_P. */ |
726a989a | 1540 | |
6de9cd9a | 1541 | static enum gimplify_status |
726a989a | 1542 | gimplify_case_label_expr (tree *expr_p, gimple_seq *pre_p) |
6de9cd9a | 1543 | { |
953ff289 | 1544 | struct gimplify_ctx *ctxp; |
726a989a | 1545 | gimple gimple_label; |
953ff289 DN |
1546 | |
1547 | /* Invalid OpenMP programs can play Duff's Device type games with | |
1548 | #pragma omp parallel. At least in the C front end, we don't | |
1549 | detect such invalid branches until after gimplification. */ | |
1550 | for (ctxp = gimplify_ctxp; ; ctxp = ctxp->prev_context) | |
9771b263 | 1551 | if (ctxp->case_labels.exists ()) |
953ff289 | 1552 | break; |
282899df | 1553 | |
726a989a | 1554 | gimple_label = gimple_build_label (CASE_LABEL (*expr_p)); |
9771b263 | 1555 | ctxp->case_labels.safe_push (*expr_p); |
726a989a RB |
1556 | gimplify_seq_add_stmt (pre_p, gimple_label); |
1557 | ||
6de9cd9a DN |
1558 | return GS_ALL_DONE; |
1559 | } | |
1560 | ||
6de9cd9a DN |
1561 | /* Build a GOTO to the LABEL_DECL pointed to by LABEL_P, building it first |
1562 | if necessary. */ | |
1563 | ||
1564 | tree | |
1565 | build_and_jump (tree *label_p) | |
1566 | { | |
1567 | if (label_p == NULL) | |
1568 | /* If there's nowhere to jump, just fall through. */ | |
65355d53 | 1569 | return NULL_TREE; |
6de9cd9a DN |
1570 | |
1571 | if (*label_p == NULL_TREE) | |
1572 | { | |
c2255bc4 | 1573 | tree label = create_artificial_label (UNKNOWN_LOCATION); |
6de9cd9a DN |
1574 | *label_p = label; |
1575 | } | |
1576 | ||
1577 | return build1 (GOTO_EXPR, void_type_node, *label_p); | |
1578 | } | |
1579 | ||
1580 | /* Gimplify an EXIT_EXPR by converting to a GOTO_EXPR inside a COND_EXPR. | |
1581 | This also involves building a label to jump to and communicating it to | |
1582 | gimplify_loop_expr through gimplify_ctxp->exit_label. */ | |
1583 | ||
1584 | static enum gimplify_status | |
1585 | gimplify_exit_expr (tree *expr_p) | |
1586 | { | |
1587 | tree cond = TREE_OPERAND (*expr_p, 0); | |
1588 | tree expr; | |
1589 | ||
1590 | expr = build_and_jump (&gimplify_ctxp->exit_label); | |
b4257cfc | 1591 | expr = build3 (COND_EXPR, void_type_node, cond, expr, NULL_TREE); |
6de9cd9a DN |
1592 | *expr_p = expr; |
1593 | ||
1594 | return GS_OK; | |
1595 | } | |
1596 | ||
26d44ae2 RH |
1597 | /* *EXPR_P is a COMPONENT_REF being used as an rvalue. If its type is |
1598 | different from its canonical type, wrap the whole thing inside a | |
1599 | NOP_EXPR and force the type of the COMPONENT_REF to be the canonical | |
1600 | type. | |
6de9cd9a | 1601 | |
26d44ae2 RH |
1602 | The canonical type of a COMPONENT_REF is the type of the field being |
1603 | referenced--unless the field is a bit-field which can be read directly | |
1604 | in a smaller mode, in which case the canonical type is the | |
1605 | sign-appropriate type corresponding to that mode. */ | |
6de9cd9a | 1606 | |
26d44ae2 RH |
1607 | static void |
1608 | canonicalize_component_ref (tree *expr_p) | |
6de9cd9a | 1609 | { |
26d44ae2 RH |
1610 | tree expr = *expr_p; |
1611 | tree type; | |
6de9cd9a | 1612 | |
282899df | 1613 | gcc_assert (TREE_CODE (expr) == COMPONENT_REF); |
6de9cd9a | 1614 | |
26d44ae2 RH |
1615 | if (INTEGRAL_TYPE_P (TREE_TYPE (expr))) |
1616 | type = TREE_TYPE (get_unwidened (expr, NULL_TREE)); | |
1617 | else | |
1618 | type = TREE_TYPE (TREE_OPERAND (expr, 1)); | |
6de9cd9a | 1619 | |
b26c6d55 RG |
1620 | /* One could argue that all the stuff below is not necessary for |
1621 | the non-bitfield case and declare it a FE error if type | |
1622 | adjustment would be needed. */ | |
26d44ae2 | 1623 | if (TREE_TYPE (expr) != type) |
6de9cd9a | 1624 | { |
b26c6d55 | 1625 | #ifdef ENABLE_TYPES_CHECKING |
26d44ae2 | 1626 | tree old_type = TREE_TYPE (expr); |
b26c6d55 RG |
1627 | #endif |
1628 | int type_quals; | |
1629 | ||
1630 | /* We need to preserve qualifiers and propagate them from | |
1631 | operand 0. */ | |
1632 | type_quals = TYPE_QUALS (type) | |
1633 | | TYPE_QUALS (TREE_TYPE (TREE_OPERAND (expr, 0))); | |
1634 | if (TYPE_QUALS (type) != type_quals) | |
1635 | type = build_qualified_type (TYPE_MAIN_VARIANT (type), type_quals); | |
6de9cd9a | 1636 | |
26d44ae2 RH |
1637 | /* Set the type of the COMPONENT_REF to the underlying type. */ |
1638 | TREE_TYPE (expr) = type; | |
6de9cd9a | 1639 | |
b26c6d55 RG |
1640 | #ifdef ENABLE_TYPES_CHECKING |
1641 | /* It is now a FE error, if the conversion from the canonical | |
1642 | type to the original expression type is not useless. */ | |
1643 | gcc_assert (useless_type_conversion_p (old_type, type)); | |
1644 | #endif | |
26d44ae2 RH |
1645 | } |
1646 | } | |
6de9cd9a | 1647 | |
26d44ae2 | 1648 | /* If a NOP conversion is changing a pointer to array of foo to a pointer |
d3147f64 | 1649 | to foo, embed that change in the ADDR_EXPR by converting |
26d44ae2 RH |
1650 | T array[U]; |
1651 | (T *)&array | |
1652 | ==> | |
1653 | &array[L] | |
1654 | where L is the lower bound. For simplicity, only do this for constant | |
04d86531 RG |
1655 | lower bound. |
1656 | The constraint is that the type of &array[L] is trivially convertible | |
1657 | to T *. */ | |
6de9cd9a | 1658 | |
26d44ae2 RH |
1659 | static void |
1660 | canonicalize_addr_expr (tree *expr_p) | |
1661 | { | |
1662 | tree expr = *expr_p; | |
26d44ae2 | 1663 | tree addr_expr = TREE_OPERAND (expr, 0); |
04d86531 | 1664 | tree datype, ddatype, pddatype; |
6de9cd9a | 1665 | |
04d86531 RG |
1666 | /* We simplify only conversions from an ADDR_EXPR to a pointer type. */ |
1667 | if (!POINTER_TYPE_P (TREE_TYPE (expr)) | |
1668 | || TREE_CODE (addr_expr) != ADDR_EXPR) | |
26d44ae2 | 1669 | return; |
6de9cd9a | 1670 | |
26d44ae2 | 1671 | /* The addr_expr type should be a pointer to an array. */ |
04d86531 | 1672 | datype = TREE_TYPE (TREE_TYPE (addr_expr)); |
26d44ae2 RH |
1673 | if (TREE_CODE (datype) != ARRAY_TYPE) |
1674 | return; | |
6de9cd9a | 1675 | |
04d86531 RG |
1676 | /* The pointer to element type shall be trivially convertible to |
1677 | the expression pointer type. */ | |
26d44ae2 | 1678 | ddatype = TREE_TYPE (datype); |
04d86531 | 1679 | pddatype = build_pointer_type (ddatype); |
e5fdcd8c RG |
1680 | if (!useless_type_conversion_p (TYPE_MAIN_VARIANT (TREE_TYPE (expr)), |
1681 | pddatype)) | |
26d44ae2 | 1682 | return; |
6de9cd9a | 1683 | |
26d44ae2 | 1684 | /* The lower bound and element sizes must be constant. */ |
04d86531 RG |
1685 | if (!TYPE_SIZE_UNIT (ddatype) |
1686 | || TREE_CODE (TYPE_SIZE_UNIT (ddatype)) != INTEGER_CST | |
26d44ae2 RH |
1687 | || !TYPE_DOMAIN (datype) || !TYPE_MIN_VALUE (TYPE_DOMAIN (datype)) |
1688 | || TREE_CODE (TYPE_MIN_VALUE (TYPE_DOMAIN (datype))) != INTEGER_CST) | |
1689 | return; | |
6de9cd9a | 1690 | |
26d44ae2 | 1691 | /* All checks succeeded. Build a new node to merge the cast. */ |
04d86531 | 1692 | *expr_p = build4 (ARRAY_REF, ddatype, TREE_OPERAND (addr_expr, 0), |
26d44ae2 | 1693 | TYPE_MIN_VALUE (TYPE_DOMAIN (datype)), |
5852948c | 1694 | NULL_TREE, NULL_TREE); |
04d86531 | 1695 | *expr_p = build1 (ADDR_EXPR, pddatype, *expr_p); |
e5fdcd8c RG |
1696 | |
1697 | /* We can have stripped a required restrict qualifier above. */ | |
1698 | if (!useless_type_conversion_p (TREE_TYPE (expr), TREE_TYPE (*expr_p))) | |
1699 | *expr_p = fold_convert (TREE_TYPE (expr), *expr_p); | |
26d44ae2 | 1700 | } |
6de9cd9a | 1701 | |
26d44ae2 RH |
1702 | /* *EXPR_P is a NOP_EXPR or CONVERT_EXPR. Remove it and/or other conversions |
1703 | underneath as appropriate. */ | |
6de9cd9a | 1704 | |
26d44ae2 RH |
1705 | static enum gimplify_status |
1706 | gimplify_conversion (tree *expr_p) | |
d3147f64 | 1707 | { |
db3927fb | 1708 | location_t loc = EXPR_LOCATION (*expr_p); |
1043771b | 1709 | gcc_assert (CONVERT_EXPR_P (*expr_p)); |
c2255bc4 | 1710 | |
0710ccff NS |
1711 | /* Then strip away all but the outermost conversion. */ |
1712 | STRIP_SIGN_NOPS (TREE_OPERAND (*expr_p, 0)); | |
1713 | ||
1714 | /* And remove the outermost conversion if it's useless. */ | |
1715 | if (tree_ssa_useless_type_conversion (*expr_p)) | |
1716 | *expr_p = TREE_OPERAND (*expr_p, 0); | |
6de9cd9a | 1717 | |
26d44ae2 RH |
1718 | /* If we still have a conversion at the toplevel, |
1719 | then canonicalize some constructs. */ | |
1043771b | 1720 | if (CONVERT_EXPR_P (*expr_p)) |
26d44ae2 RH |
1721 | { |
1722 | tree sub = TREE_OPERAND (*expr_p, 0); | |
6de9cd9a | 1723 | |
26d44ae2 RH |
1724 | /* If a NOP conversion is changing the type of a COMPONENT_REF |
1725 | expression, then canonicalize its type now in order to expose more | |
1726 | redundant conversions. */ | |
1727 | if (TREE_CODE (sub) == COMPONENT_REF) | |
1728 | canonicalize_component_ref (&TREE_OPERAND (*expr_p, 0)); | |
6de9cd9a | 1729 | |
26d44ae2 RH |
1730 | /* If a NOP conversion is changing a pointer to array of foo |
1731 | to a pointer to foo, embed that change in the ADDR_EXPR. */ | |
1732 | else if (TREE_CODE (sub) == ADDR_EXPR) | |
1733 | canonicalize_addr_expr (expr_p); | |
1734 | } | |
6de9cd9a | 1735 | |
8b17cc05 RG |
1736 | /* If we have a conversion to a non-register type force the |
1737 | use of a VIEW_CONVERT_EXPR instead. */ | |
4f934809 | 1738 | if (CONVERT_EXPR_P (*expr_p) && !is_gimple_reg_type (TREE_TYPE (*expr_p))) |
db3927fb | 1739 | *expr_p = fold_build1_loc (loc, VIEW_CONVERT_EXPR, TREE_TYPE (*expr_p), |
4f934809 | 1740 | TREE_OPERAND (*expr_p, 0)); |
8b17cc05 | 1741 | |
741233cf RB |
1742 | /* Canonicalize CONVERT_EXPR to NOP_EXPR. */ |
1743 | if (TREE_CODE (*expr_p) == CONVERT_EXPR) | |
1744 | TREE_SET_CODE (*expr_p, NOP_EXPR); | |
1745 | ||
6de9cd9a DN |
1746 | return GS_OK; |
1747 | } | |
1748 | ||
77f2a970 | 1749 | /* Nonlocal VLAs seen in the current function. */ |
6e2830c3 | 1750 | static hash_set<tree> *nonlocal_vlas; |
77f2a970 | 1751 | |
96ddb7ec JJ |
1752 | /* The VAR_DECLs created for nonlocal VLAs for debug info purposes. */ |
1753 | static tree nonlocal_vla_vars; | |
1754 | ||
ad19c4be | 1755 | /* Gimplify a VAR_DECL or PARM_DECL. Return GS_OK if we expanded a |
a9f7c570 RH |
1756 | DECL_VALUE_EXPR, and it's worth re-examining things. */ |
1757 | ||
1758 | static enum gimplify_status | |
1759 | gimplify_var_or_parm_decl (tree *expr_p) | |
1760 | { | |
1761 | tree decl = *expr_p; | |
1762 | ||
1763 | /* ??? If this is a local variable, and it has not been seen in any | |
1764 | outer BIND_EXPR, then it's probably the result of a duplicate | |
1765 | declaration, for which we've already issued an error. It would | |
1766 | be really nice if the front end wouldn't leak these at all. | |
1767 | Currently the only known culprit is C++ destructors, as seen | |
1768 | in g++.old-deja/g++.jason/binding.C. */ | |
1769 | if (TREE_CODE (decl) == VAR_DECL | |
1770 | && !DECL_SEEN_IN_BIND_EXPR_P (decl) | |
1771 | && !TREE_STATIC (decl) && !DECL_EXTERNAL (decl) | |
1772 | && decl_function_context (decl) == current_function_decl) | |
1773 | { | |
1da2ed5f | 1774 | gcc_assert (seen_error ()); |
a9f7c570 RH |
1775 | return GS_ERROR; |
1776 | } | |
1777 | ||
953ff289 DN |
1778 | /* When within an OpenMP context, notice uses of variables. */ |
1779 | if (gimplify_omp_ctxp && omp_notice_variable (gimplify_omp_ctxp, decl, true)) | |
1780 | return GS_ALL_DONE; | |
1781 | ||
a9f7c570 RH |
1782 | /* If the decl is an alias for another expression, substitute it now. */ |
1783 | if (DECL_HAS_VALUE_EXPR_P (decl)) | |
1784 | { | |
77f2a970 JJ |
1785 | tree value_expr = DECL_VALUE_EXPR (decl); |
1786 | ||
1787 | /* For referenced nonlocal VLAs add a decl for debugging purposes | |
1788 | to the current function. */ | |
1789 | if (TREE_CODE (decl) == VAR_DECL | |
1790 | && TREE_CODE (DECL_SIZE_UNIT (decl)) != INTEGER_CST | |
1791 | && nonlocal_vlas != NULL | |
1792 | && TREE_CODE (value_expr) == INDIRECT_REF | |
1793 | && TREE_CODE (TREE_OPERAND (value_expr, 0)) == VAR_DECL | |
1794 | && decl_function_context (decl) != current_function_decl) | |
1795 | { | |
1796 | struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp; | |
74bf76ed JJ |
1797 | while (ctx |
1798 | && (ctx->region_type == ORT_WORKSHARE | |
1799 | || ctx->region_type == ORT_SIMD)) | |
77f2a970 | 1800 | ctx = ctx->outer_context; |
6e2830c3 | 1801 | if (!ctx && !nonlocal_vlas->add (decl)) |
77f2a970 | 1802 | { |
96ddb7ec | 1803 | tree copy = copy_node (decl); |
77f2a970 JJ |
1804 | |
1805 | lang_hooks.dup_lang_specific_decl (copy); | |
2eb79bbb | 1806 | SET_DECL_RTL (copy, 0); |
77f2a970 | 1807 | TREE_USED (copy) = 1; |
96ddb7ec JJ |
1808 | DECL_CHAIN (copy) = nonlocal_vla_vars; |
1809 | nonlocal_vla_vars = copy; | |
77f2a970 JJ |
1810 | SET_DECL_VALUE_EXPR (copy, unshare_expr (value_expr)); |
1811 | DECL_HAS_VALUE_EXPR_P (copy) = 1; | |
1812 | } | |
1813 | } | |
1814 | ||
1815 | *expr_p = unshare_expr (value_expr); | |
a9f7c570 RH |
1816 | return GS_OK; |
1817 | } | |
1818 | ||
1819 | return GS_ALL_DONE; | |
1820 | } | |
1821 | ||
66c14933 EB |
1822 | /* Recalculate the value of the TREE_SIDE_EFFECTS flag for T. */ |
1823 | ||
1824 | static void | |
2fb9a547 AM |
1825 | recalculate_side_effects (tree t) |
1826 | { | |
1827 | enum tree_code code = TREE_CODE (t); | |
1828 | int len = TREE_OPERAND_LENGTH (t); | |
1829 | int i; | |
1830 | ||
1831 | switch (TREE_CODE_CLASS (code)) | |
1832 | { | |
1833 | case tcc_expression: | |
1834 | switch (code) | |
1835 | { | |
1836 | case INIT_EXPR: | |
1837 | case MODIFY_EXPR: | |
1838 | case VA_ARG_EXPR: | |
1839 | case PREDECREMENT_EXPR: | |
1840 | case PREINCREMENT_EXPR: | |
1841 | case POSTDECREMENT_EXPR: | |
1842 | case POSTINCREMENT_EXPR: | |
1843 | /* All of these have side-effects, no matter what their | |
1844 | operands are. */ | |
1845 | return; | |
1846 | ||
1847 | default: | |
1848 | break; | |
1849 | } | |
1850 | /* Fall through. */ | |
1851 | ||
1852 | case tcc_comparison: /* a comparison expression */ | |
1853 | case tcc_unary: /* a unary arithmetic expression */ | |
1854 | case tcc_binary: /* a binary arithmetic expression */ | |
1855 | case tcc_reference: /* a reference */ | |
1856 | case tcc_vl_exp: /* a function call */ | |
1857 | TREE_SIDE_EFFECTS (t) = TREE_THIS_VOLATILE (t); | |
1858 | for (i = 0; i < len; ++i) | |
1859 | { | |
1860 | tree op = TREE_OPERAND (t, i); | |
1861 | if (op && TREE_SIDE_EFFECTS (op)) | |
1862 | TREE_SIDE_EFFECTS (t) = 1; | |
1863 | } | |
1864 | break; | |
1865 | ||
1866 | case tcc_constant: | |
1867 | /* No side-effects. */ | |
1868 | return; | |
1869 | ||
1870 | default: | |
1871 | gcc_unreachable (); | |
1872 | } | |
1873 | } | |
1874 | ||
6de9cd9a | 1875 | /* Gimplify the COMPONENT_REF, ARRAY_REF, REALPART_EXPR or IMAGPART_EXPR |
726a989a | 1876 | node *EXPR_P. |
6de9cd9a DN |
1877 | |
1878 | compound_lval | |
1879 | : min_lval '[' val ']' | |
1880 | | min_lval '.' ID | |
1881 | | compound_lval '[' val ']' | |
1882 | | compound_lval '.' ID | |
1883 | ||
1884 | This is not part of the original SIMPLE definition, which separates | |
1885 | array and member references, but it seems reasonable to handle them | |
1886 | together. Also, this way we don't run into problems with union | |
1887 | aliasing; gcc requires that for accesses through a union to alias, the | |
1888 | union reference must be explicit, which was not always the case when we | |
1889 | were splitting up array and member refs. | |
1890 | ||
726a989a | 1891 | PRE_P points to the sequence where side effects that must happen before |
6de9cd9a DN |
1892 | *EXPR_P should be stored. |
1893 | ||
726a989a | 1894 | POST_P points to the sequence where side effects that must happen after |
6de9cd9a DN |
1895 | *EXPR_P should be stored. */ |
1896 | ||
1897 | static enum gimplify_status | |
726a989a RB |
1898 | gimplify_compound_lval (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p, |
1899 | fallback_t fallback) | |
6de9cd9a DN |
1900 | { |
1901 | tree *p; | |
941f78d1 | 1902 | enum gimplify_status ret = GS_ALL_DONE, tret; |
af72267c | 1903 | int i; |
db3927fb | 1904 | location_t loc = EXPR_LOCATION (*expr_p); |
941f78d1 | 1905 | tree expr = *expr_p; |
6de9cd9a | 1906 | |
6de9cd9a | 1907 | /* Create a stack of the subexpressions so later we can walk them in |
ec234842 | 1908 | order from inner to outer. */ |
00f96dc9 | 1909 | auto_vec<tree, 10> expr_stack; |
6de9cd9a | 1910 | |
afe84921 | 1911 | /* We can handle anything that get_inner_reference can deal with. */ |
6a720599 JM |
1912 | for (p = expr_p; ; p = &TREE_OPERAND (*p, 0)) |
1913 | { | |
a9f7c570 | 1914 | restart: |
6a720599 JM |
1915 | /* Fold INDIRECT_REFs now to turn them into ARRAY_REFs. */ |
1916 | if (TREE_CODE (*p) == INDIRECT_REF) | |
db3927fb | 1917 | *p = fold_indirect_ref_loc (loc, *p); |
a9f7c570 RH |
1918 | |
1919 | if (handled_component_p (*p)) | |
1920 | ; | |
1921 | /* Expand DECL_VALUE_EXPR now. In some cases that may expose | |
1922 | additional COMPONENT_REFs. */ | |
1923 | else if ((TREE_CODE (*p) == VAR_DECL || TREE_CODE (*p) == PARM_DECL) | |
1924 | && gimplify_var_or_parm_decl (p) == GS_OK) | |
1925 | goto restart; | |
1926 | else | |
6a720599 | 1927 | break; |
b8698a0f | 1928 | |
9771b263 | 1929 | expr_stack.safe_push (*p); |
6a720599 | 1930 | } |
6de9cd9a | 1931 | |
9771b263 | 1932 | gcc_assert (expr_stack.length ()); |
9e51aaf5 | 1933 | |
0823efed DN |
1934 | /* Now EXPR_STACK is a stack of pointers to all the refs we've |
1935 | walked through and P points to the innermost expression. | |
6de9cd9a | 1936 | |
af72267c RK |
1937 | Java requires that we elaborated nodes in source order. That |
1938 | means we must gimplify the inner expression followed by each of | |
1939 | the indices, in order. But we can't gimplify the inner | |
1940 | expression until we deal with any variable bounds, sizes, or | |
1941 | positions in order to deal with PLACEHOLDER_EXPRs. | |
1942 | ||
1943 | So we do this in three steps. First we deal with the annotations | |
1944 | for any variables in the components, then we gimplify the base, | |
1945 | then we gimplify any indices, from left to right. */ | |
9771b263 | 1946 | for (i = expr_stack.length () - 1; i >= 0; i--) |
6de9cd9a | 1947 | { |
9771b263 | 1948 | tree t = expr_stack[i]; |
44de5aeb RK |
1949 | |
1950 | if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF) | |
6de9cd9a | 1951 | { |
44de5aeb RK |
1952 | /* Gimplify the low bound and element type size and put them into |
1953 | the ARRAY_REF. If these values are set, they have already been | |
1954 | gimplified. */ | |
726a989a | 1955 | if (TREE_OPERAND (t, 2) == NULL_TREE) |
44de5aeb | 1956 | { |
a7cc468a RH |
1957 | tree low = unshare_expr (array_ref_low_bound (t)); |
1958 | if (!is_gimple_min_invariant (low)) | |
44de5aeb | 1959 | { |
726a989a RB |
1960 | TREE_OPERAND (t, 2) = low; |
1961 | tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p, | |
ba4d8f9d | 1962 | post_p, is_gimple_reg, |
726a989a | 1963 | fb_rvalue); |
44de5aeb RK |
1964 | ret = MIN (ret, tret); |
1965 | } | |
1966 | } | |
19c44640 JJ |
1967 | else |
1968 | { | |
1969 | tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p, post_p, | |
1970 | is_gimple_reg, fb_rvalue); | |
1971 | ret = MIN (ret, tret); | |
1972 | } | |
44de5aeb | 1973 | |
19c44640 | 1974 | if (TREE_OPERAND (t, 3) == NULL_TREE) |
44de5aeb RK |
1975 | { |
1976 | tree elmt_type = TREE_TYPE (TREE_TYPE (TREE_OPERAND (t, 0))); | |
1977 | tree elmt_size = unshare_expr (array_ref_element_size (t)); | |
a4e9ffe5 | 1978 | tree factor = size_int (TYPE_ALIGN_UNIT (elmt_type)); |
44de5aeb RK |
1979 | |
1980 | /* Divide the element size by the alignment of the element | |
1981 | type (above). */ | |
ad19c4be EB |
1982 | elmt_size |
1983 | = size_binop_loc (loc, EXACT_DIV_EXPR, elmt_size, factor); | |
44de5aeb | 1984 | |
a7cc468a | 1985 | if (!is_gimple_min_invariant (elmt_size)) |
44de5aeb | 1986 | { |
726a989a RB |
1987 | TREE_OPERAND (t, 3) = elmt_size; |
1988 | tret = gimplify_expr (&TREE_OPERAND (t, 3), pre_p, | |
ba4d8f9d | 1989 | post_p, is_gimple_reg, |
726a989a | 1990 | fb_rvalue); |
44de5aeb RK |
1991 | ret = MIN (ret, tret); |
1992 | } | |
6de9cd9a | 1993 | } |
19c44640 JJ |
1994 | else |
1995 | { | |
1996 | tret = gimplify_expr (&TREE_OPERAND (t, 3), pre_p, post_p, | |
1997 | is_gimple_reg, fb_rvalue); | |
1998 | ret = MIN (ret, tret); | |
1999 | } | |
6de9cd9a | 2000 | } |
44de5aeb RK |
2001 | else if (TREE_CODE (t) == COMPONENT_REF) |
2002 | { | |
2003 | /* Set the field offset into T and gimplify it. */ | |
19c44640 | 2004 | if (TREE_OPERAND (t, 2) == NULL_TREE) |
44de5aeb RK |
2005 | { |
2006 | tree offset = unshare_expr (component_ref_field_offset (t)); | |
2007 | tree field = TREE_OPERAND (t, 1); | |
2008 | tree factor | |
2009 | = size_int (DECL_OFFSET_ALIGN (field) / BITS_PER_UNIT); | |
2010 | ||
2011 | /* Divide the offset by its alignment. */ | |
db3927fb | 2012 | offset = size_binop_loc (loc, EXACT_DIV_EXPR, offset, factor); |
44de5aeb | 2013 | |
a7cc468a | 2014 | if (!is_gimple_min_invariant (offset)) |
44de5aeb | 2015 | { |
726a989a RB |
2016 | TREE_OPERAND (t, 2) = offset; |
2017 | tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p, | |
ba4d8f9d | 2018 | post_p, is_gimple_reg, |
726a989a | 2019 | fb_rvalue); |
44de5aeb RK |
2020 | ret = MIN (ret, tret); |
2021 | } | |
2022 | } | |
19c44640 JJ |
2023 | else |
2024 | { | |
2025 | tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p, post_p, | |
2026 | is_gimple_reg, fb_rvalue); | |
2027 | ret = MIN (ret, tret); | |
2028 | } | |
44de5aeb | 2029 | } |
af72267c RK |
2030 | } |
2031 | ||
a9f7c570 RH |
2032 | /* Step 2 is to gimplify the base expression. Make sure lvalue is set |
2033 | so as to match the min_lval predicate. Failure to do so may result | |
2034 | in the creation of large aggregate temporaries. */ | |
2035 | tret = gimplify_expr (p, pre_p, post_p, is_gimple_min_lval, | |
2036 | fallback | fb_lvalue); | |
af72267c RK |
2037 | ret = MIN (ret, tret); |
2038 | ||
ea814c66 | 2039 | /* And finally, the indices and operands of ARRAY_REF. During this |
48eb4e53 | 2040 | loop we also remove any useless conversions. */ |
9771b263 | 2041 | for (; expr_stack.length () > 0; ) |
af72267c | 2042 | { |
9771b263 | 2043 | tree t = expr_stack.pop (); |
af72267c RK |
2044 | |
2045 | if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF) | |
2046 | { | |
ba4d8f9d | 2047 | /* Gimplify the dimension. */ |
af72267c RK |
2048 | if (!is_gimple_min_invariant (TREE_OPERAND (t, 1))) |
2049 | { | |
2050 | tret = gimplify_expr (&TREE_OPERAND (t, 1), pre_p, post_p, | |
ba4d8f9d | 2051 | is_gimple_val, fb_rvalue); |
af72267c RK |
2052 | ret = MIN (ret, tret); |
2053 | } | |
2054 | } | |
48eb4e53 RK |
2055 | |
2056 | STRIP_USELESS_TYPE_CONVERSION (TREE_OPERAND (t, 0)); | |
2057 | ||
726a989a RB |
2058 | /* The innermost expression P may have originally had |
2059 | TREE_SIDE_EFFECTS set which would have caused all the outer | |
2060 | expressions in *EXPR_P leading to P to also have had | |
2061 | TREE_SIDE_EFFECTS set. */ | |
6de9cd9a | 2062 | recalculate_side_effects (t); |
6de9cd9a DN |
2063 | } |
2064 | ||
2065 | /* If the outermost expression is a COMPONENT_REF, canonicalize its type. */ | |
90051e16 | 2066 | if ((fallback & fb_rvalue) && TREE_CODE (*expr_p) == COMPONENT_REF) |
6de9cd9a DN |
2067 | { |
2068 | canonicalize_component_ref (expr_p); | |
6de9cd9a DN |
2069 | } |
2070 | ||
9771b263 | 2071 | expr_stack.release (); |
07724022 | 2072 | |
941f78d1 JM |
2073 | gcc_assert (*expr_p == expr || ret != GS_ALL_DONE); |
2074 | ||
6de9cd9a DN |
2075 | return ret; |
2076 | } | |
2077 | ||
206048bd VR |
2078 | /* Gimplify the self modifying expression pointed to by EXPR_P |
2079 | (++, --, +=, -=). | |
6de9cd9a DN |
2080 | |
2081 | PRE_P points to the list where side effects that must happen before | |
2082 | *EXPR_P should be stored. | |
2083 | ||
2084 | POST_P points to the list where side effects that must happen after | |
2085 | *EXPR_P should be stored. | |
2086 | ||
2087 | WANT_VALUE is nonzero iff we want to use the value of this expression | |
cc3c4f62 | 2088 | in another expression. |
6de9cd9a | 2089 | |
cc3c4f62 RB |
2090 | ARITH_TYPE is the type the computation should be performed in. */ |
2091 | ||
2092 | enum gimplify_status | |
726a989a | 2093 | gimplify_self_mod_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p, |
cc3c4f62 | 2094 | bool want_value, tree arith_type) |
6de9cd9a DN |
2095 | { |
2096 | enum tree_code code; | |
726a989a RB |
2097 | tree lhs, lvalue, rhs, t1; |
2098 | gimple_seq post = NULL, *orig_post_p = post_p; | |
6de9cd9a DN |
2099 | bool postfix; |
2100 | enum tree_code arith_code; | |
2101 | enum gimplify_status ret; | |
db3927fb | 2102 | location_t loc = EXPR_LOCATION (*expr_p); |
6de9cd9a DN |
2103 | |
2104 | code = TREE_CODE (*expr_p); | |
2105 | ||
282899df NS |
2106 | gcc_assert (code == POSTINCREMENT_EXPR || code == POSTDECREMENT_EXPR |
2107 | || code == PREINCREMENT_EXPR || code == PREDECREMENT_EXPR); | |
6de9cd9a DN |
2108 | |
2109 | /* Prefix or postfix? */ | |
2110 | if (code == POSTINCREMENT_EXPR || code == POSTDECREMENT_EXPR) | |
2111 | /* Faster to treat as prefix if result is not used. */ | |
2112 | postfix = want_value; | |
2113 | else | |
2114 | postfix = false; | |
2115 | ||
82181741 JJ |
2116 | /* For postfix, make sure the inner expression's post side effects |
2117 | are executed after side effects from this expression. */ | |
2118 | if (postfix) | |
2119 | post_p = &post; | |
2120 | ||
6de9cd9a DN |
2121 | /* Add or subtract? */ |
2122 | if (code == PREINCREMENT_EXPR || code == POSTINCREMENT_EXPR) | |
2123 | arith_code = PLUS_EXPR; | |
2124 | else | |
2125 | arith_code = MINUS_EXPR; | |
2126 | ||
2127 | /* Gimplify the LHS into a GIMPLE lvalue. */ | |
2128 | lvalue = TREE_OPERAND (*expr_p, 0); | |
2129 | ret = gimplify_expr (&lvalue, pre_p, post_p, is_gimple_lvalue, fb_lvalue); | |
2130 | if (ret == GS_ERROR) | |
2131 | return ret; | |
2132 | ||
2133 | /* Extract the operands to the arithmetic operation. */ | |
2134 | lhs = lvalue; | |
2135 | rhs = TREE_OPERAND (*expr_p, 1); | |
2136 | ||
2137 | /* For postfix operator, we evaluate the LHS to an rvalue and then use | |
d97c9b22 | 2138 | that as the result value and in the postqueue operation. */ |
6de9cd9a DN |
2139 | if (postfix) |
2140 | { | |
2141 | ret = gimplify_expr (&lhs, pre_p, post_p, is_gimple_val, fb_rvalue); | |
2142 | if (ret == GS_ERROR) | |
2143 | return ret; | |
6de9cd9a | 2144 | |
d97c9b22 JJ |
2145 | lhs = get_initialized_tmp_var (lhs, pre_p, NULL); |
2146 | } | |
cc3c4f62 | 2147 | |
5be014d5 AP |
2148 | /* For POINTERs increment, use POINTER_PLUS_EXPR. */ |
2149 | if (POINTER_TYPE_P (TREE_TYPE (lhs))) | |
2150 | { | |
0d82a1c8 | 2151 | rhs = convert_to_ptrofftype_loc (loc, rhs); |
5be014d5 | 2152 | if (arith_code == MINUS_EXPR) |
db3927fb | 2153 | rhs = fold_build1_loc (loc, NEGATE_EXPR, TREE_TYPE (rhs), rhs); |
cc3c4f62 | 2154 | t1 = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (*expr_p), lhs, rhs); |
5be014d5 | 2155 | } |
cc3c4f62 RB |
2156 | else |
2157 | t1 = fold_convert (TREE_TYPE (*expr_p), | |
2158 | fold_build2 (arith_code, arith_type, | |
2159 | fold_convert (arith_type, lhs), | |
2160 | fold_convert (arith_type, rhs))); | |
5be014d5 | 2161 | |
6de9cd9a DN |
2162 | if (postfix) |
2163 | { | |
cf1867a0 | 2164 | gimplify_assign (lvalue, t1, pre_p); |
726a989a | 2165 | gimplify_seq_add_seq (orig_post_p, post); |
cc3c4f62 | 2166 | *expr_p = lhs; |
6de9cd9a DN |
2167 | return GS_ALL_DONE; |
2168 | } | |
2169 | else | |
2170 | { | |
726a989a | 2171 | *expr_p = build2 (MODIFY_EXPR, TREE_TYPE (lvalue), lvalue, t1); |
6de9cd9a DN |
2172 | return GS_OK; |
2173 | } | |
2174 | } | |
2175 | ||
d25cee4d RH |
2176 | /* If *EXPR_P has a variable sized type, wrap it in a WITH_SIZE_EXPR. */ |
2177 | ||
2178 | static void | |
2179 | maybe_with_size_expr (tree *expr_p) | |
2180 | { | |
61025d1b RK |
2181 | tree expr = *expr_p; |
2182 | tree type = TREE_TYPE (expr); | |
2183 | tree size; | |
d25cee4d | 2184 | |
61025d1b RK |
2185 | /* If we've already wrapped this or the type is error_mark_node, we can't do |
2186 | anything. */ | |
2187 | if (TREE_CODE (expr) == WITH_SIZE_EXPR | |
2188 | || type == error_mark_node) | |
d25cee4d RH |
2189 | return; |
2190 | ||
61025d1b | 2191 | /* If the size isn't known or is a constant, we have nothing to do. */ |
d25cee4d | 2192 | size = TYPE_SIZE_UNIT (type); |
61025d1b RK |
2193 | if (!size || TREE_CODE (size) == INTEGER_CST) |
2194 | return; | |
2195 | ||
2196 | /* Otherwise, make a WITH_SIZE_EXPR. */ | |
2197 | size = unshare_expr (size); | |
2198 | size = SUBSTITUTE_PLACEHOLDER_IN_EXPR (size, expr); | |
2199 | *expr_p = build2 (WITH_SIZE_EXPR, type, expr, size); | |
d25cee4d RH |
2200 | } |
2201 | ||
726a989a | 2202 | /* Helper for gimplify_call_expr. Gimplify a single argument *ARG_P |
1282697f AH |
2203 | Store any side-effects in PRE_P. CALL_LOCATION is the location of |
2204 | the CALL_EXPR. */ | |
e4f78bd4 | 2205 | |
fe6ebcf1 | 2206 | enum gimplify_status |
1282697f | 2207 | gimplify_arg (tree *arg_p, gimple_seq *pre_p, location_t call_location) |
e4f78bd4 JM |
2208 | { |
2209 | bool (*test) (tree); | |
2210 | fallback_t fb; | |
2211 | ||
2212 | /* In general, we allow lvalues for function arguments to avoid | |
2213 | extra overhead of copying large aggregates out of even larger | |
2214 | aggregates into temporaries only to copy the temporaries to | |
2215 | the argument list. Make optimizers happy by pulling out to | |
2216 | temporaries those types that fit in registers. */ | |
726a989a | 2217 | if (is_gimple_reg_type (TREE_TYPE (*arg_p))) |
e4f78bd4 JM |
2218 | test = is_gimple_val, fb = fb_rvalue; |
2219 | else | |
b4ef8aac JM |
2220 | { |
2221 | test = is_gimple_lvalue, fb = fb_either; | |
2222 | /* Also strip a TARGET_EXPR that would force an extra copy. */ | |
2223 | if (TREE_CODE (*arg_p) == TARGET_EXPR) | |
2224 | { | |
2225 | tree init = TARGET_EXPR_INITIAL (*arg_p); | |
2226 | if (init | |
2227 | && !VOID_TYPE_P (TREE_TYPE (init))) | |
2228 | *arg_p = init; | |
2229 | } | |
2230 | } | |
e4f78bd4 | 2231 | |
d25cee4d | 2232 | /* If this is a variable sized type, we must remember the size. */ |
726a989a | 2233 | maybe_with_size_expr (arg_p); |
d25cee4d | 2234 | |
c2255bc4 | 2235 | /* FIXME diagnostics: This will mess up gcc.dg/Warray-bounds.c. */ |
1282697f AH |
2236 | /* Make sure arguments have the same location as the function call |
2237 | itself. */ | |
2238 | protected_set_expr_location (*arg_p, call_location); | |
2239 | ||
e4f78bd4 JM |
2240 | /* There is a sequence point before a function call. Side effects in |
2241 | the argument list must occur before the actual call. So, when | |
2242 | gimplifying arguments, force gimplify_expr to use an internal | |
2243 | post queue which is then appended to the end of PRE_P. */ | |
726a989a | 2244 | return gimplify_expr (arg_p, pre_p, NULL, test, fb); |
e4f78bd4 JM |
2245 | } |
2246 | ||
88ac13da TS |
2247 | /* Don't fold STMT inside ORT_TARGET, because it can break code by adding decl |
2248 | references that weren't in the source. We'll do it during omplower pass | |
2249 | instead. */ | |
2250 | ||
2251 | static bool | |
2252 | maybe_fold_stmt (gimple_stmt_iterator *gsi) | |
2253 | { | |
2254 | struct gimplify_omp_ctx *ctx; | |
2255 | for (ctx = gimplify_omp_ctxp; ctx; ctx = ctx->outer_context) | |
2256 | if (ctx->region_type == ORT_TARGET) | |
2257 | return false; | |
2258 | return fold_stmt (gsi); | |
2259 | } | |
2260 | ||
726a989a | 2261 | /* Gimplify the CALL_EXPR node *EXPR_P into the GIMPLE sequence PRE_P. |
90051e16 | 2262 | WANT_VALUE is true if the result of the call is desired. */ |
6de9cd9a DN |
2263 | |
2264 | static enum gimplify_status | |
726a989a | 2265 | gimplify_call_expr (tree *expr_p, gimple_seq *pre_p, bool want_value) |
6de9cd9a | 2266 | { |
f20ca725 | 2267 | tree fndecl, parms, p, fnptrtype; |
6de9cd9a | 2268 | enum gimplify_status ret; |
5039610b | 2269 | int i, nargs; |
726a989a | 2270 | gimple call; |
ed9c79e1 | 2271 | bool builtin_va_start_p = false; |
db3927fb | 2272 | location_t loc = EXPR_LOCATION (*expr_p); |
6de9cd9a | 2273 | |
282899df | 2274 | gcc_assert (TREE_CODE (*expr_p) == CALL_EXPR); |
6de9cd9a | 2275 | |
d3147f64 | 2276 | /* For reliable diagnostics during inlining, it is necessary that |
6de9cd9a | 2277 | every call_expr be annotated with file and line. */ |
a281759f PB |
2278 | if (! EXPR_HAS_LOCATION (*expr_p)) |
2279 | SET_EXPR_LOCATION (*expr_p, input_location); | |
6de9cd9a | 2280 | |
0e37a2f3 MP |
2281 | /* Gimplify internal functions created in the FEs. */ |
2282 | if (CALL_EXPR_FN (*expr_p) == NULL_TREE) | |
2283 | { | |
1304953e JJ |
2284 | if (want_value) |
2285 | return GS_ALL_DONE; | |
2286 | ||
0e37a2f3 MP |
2287 | nargs = call_expr_nargs (*expr_p); |
2288 | enum internal_fn ifn = CALL_EXPR_IFN (*expr_p); | |
2289 | auto_vec<tree> vargs (nargs); | |
2290 | ||
2291 | for (i = 0; i < nargs; i++) | |
2292 | { | |
2293 | gimplify_arg (&CALL_EXPR_ARG (*expr_p, i), pre_p, | |
2294 | EXPR_LOCATION (*expr_p)); | |
2295 | vargs.quick_push (CALL_EXPR_ARG (*expr_p, i)); | |
2296 | } | |
2297 | gimple call = gimple_build_call_internal_vec (ifn, vargs); | |
2298 | gimplify_seq_add_stmt (pre_p, call); | |
2299 | return GS_ALL_DONE; | |
2300 | } | |
2301 | ||
6de9cd9a DN |
2302 | /* This may be a call to a builtin function. |
2303 | ||
2304 | Builtin function calls may be transformed into different | |
2305 | (and more efficient) builtin function calls under certain | |
2306 | circumstances. Unfortunately, gimplification can muck things | |
2307 | up enough that the builtin expanders are not aware that certain | |
2308 | transformations are still valid. | |
2309 | ||
2310 | So we attempt transformation/gimplification of the call before | |
2311 | we gimplify the CALL_EXPR. At this time we do not manage to | |
2312 | transform all calls in the same manner as the expanders do, but | |
2313 | we do transform most of them. */ | |
726a989a | 2314 | fndecl = get_callee_fndecl (*expr_p); |
3537a0cd RG |
2315 | if (fndecl |
2316 | && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL) | |
2317 | switch (DECL_FUNCTION_CODE (fndecl)) | |
2318 | { | |
2319 | case BUILT_IN_VA_START: | |
2efcfa4e | 2320 | { |
726a989a | 2321 | builtin_va_start_p = TRUE; |
5039610b | 2322 | if (call_expr_nargs (*expr_p) < 2) |
2efcfa4e AP |
2323 | { |
2324 | error ("too few arguments to function %<va_start%>"); | |
c2255bc4 | 2325 | *expr_p = build_empty_stmt (EXPR_LOCATION (*expr_p)); |
2efcfa4e AP |
2326 | return GS_OK; |
2327 | } | |
b8698a0f | 2328 | |
5039610b | 2329 | if (fold_builtin_next_arg (*expr_p, true)) |
2efcfa4e | 2330 | { |
c2255bc4 | 2331 | *expr_p = build_empty_stmt (EXPR_LOCATION (*expr_p)); |
2efcfa4e AP |
2332 | return GS_OK; |
2333 | } | |
3537a0cd RG |
2334 | break; |
2335 | } | |
2336 | case BUILT_IN_LINE: | |
2337 | { | |
bb42836b MLI |
2338 | *expr_p = build_int_cst (TREE_TYPE (*expr_p), |
2339 | LOCATION_LINE (EXPR_LOCATION (*expr_p))); | |
3537a0cd RG |
2340 | return GS_OK; |
2341 | } | |
2342 | case BUILT_IN_FILE: | |
2343 | { | |
bb42836b MLI |
2344 | const char *locfile = LOCATION_FILE (EXPR_LOCATION (*expr_p)); |
2345 | *expr_p = build_string_literal (strlen (locfile) + 1, locfile); | |
3537a0cd RG |
2346 | return GS_OK; |
2347 | } | |
2348 | case BUILT_IN_FUNCTION: | |
2349 | { | |
2350 | const char *function; | |
2351 | function = IDENTIFIER_POINTER (DECL_NAME (current_function_decl)); | |
2352 | *expr_p = build_string_literal (strlen (function) + 1, function); | |
2353 | return GS_OK; | |
2354 | } | |
2355 | default: | |
2356 | ; | |
2357 | } | |
2358 | if (fndecl && DECL_BUILT_IN (fndecl)) | |
2359 | { | |
2360 | tree new_tree = fold_call_expr (input_location, *expr_p, !want_value); | |
2361 | if (new_tree && new_tree != *expr_p) | |
2362 | { | |
2363 | /* There was a transformation of this call which computes the | |
2364 | same value, but in a more efficient way. Return and try | |
2365 | again. */ | |
2366 | *expr_p = new_tree; | |
2367 | return GS_OK; | |
2efcfa4e | 2368 | } |
6de9cd9a DN |
2369 | } |
2370 | ||
f20ca725 RG |
2371 | /* Remember the original function pointer type. */ |
2372 | fnptrtype = TREE_TYPE (CALL_EXPR_FN (*expr_p)); | |
2373 | ||
6de9cd9a DN |
2374 | /* There is a sequence point before the call, so any side effects in |
2375 | the calling expression must occur before the actual call. Force | |
2376 | gimplify_expr to use an internal post queue. */ | |
5039610b | 2377 | ret = gimplify_expr (&CALL_EXPR_FN (*expr_p), pre_p, NULL, |
0f59171d | 2378 | is_gimple_call_addr, fb_rvalue); |
6de9cd9a | 2379 | |
5039610b SL |
2380 | nargs = call_expr_nargs (*expr_p); |
2381 | ||
e36711f3 | 2382 | /* Get argument types for verification. */ |
726a989a | 2383 | fndecl = get_callee_fndecl (*expr_p); |
e36711f3 | 2384 | parms = NULL_TREE; |
726a989a RB |
2385 | if (fndecl) |
2386 | parms = TYPE_ARG_TYPES (TREE_TYPE (fndecl)); | |
a96c6a62 RB |
2387 | else |
2388 | parms = TYPE_ARG_TYPES (TREE_TYPE (fnptrtype)); | |
e36711f3 | 2389 | |
726a989a | 2390 | if (fndecl && DECL_ARGUMENTS (fndecl)) |
f9487002 | 2391 | p = DECL_ARGUMENTS (fndecl); |
004e2fa7 | 2392 | else if (parms) |
f9487002 | 2393 | p = parms; |
6ef5231b | 2394 | else |
498e51ca | 2395 | p = NULL_TREE; |
f9487002 JJ |
2396 | for (i = 0; i < nargs && p; i++, p = TREE_CHAIN (p)) |
2397 | ; | |
6ef5231b JJ |
2398 | |
2399 | /* If the last argument is __builtin_va_arg_pack () and it is not | |
2400 | passed as a named argument, decrease the number of CALL_EXPR | |
2401 | arguments and set instead the CALL_EXPR_VA_ARG_PACK flag. */ | |
2402 | if (!p | |
2403 | && i < nargs | |
2404 | && TREE_CODE (CALL_EXPR_ARG (*expr_p, nargs - 1)) == CALL_EXPR) | |
2405 | { | |
2406 | tree last_arg = CALL_EXPR_ARG (*expr_p, nargs - 1); | |
2407 | tree last_arg_fndecl = get_callee_fndecl (last_arg); | |
2408 | ||
2409 | if (last_arg_fndecl | |
2410 | && TREE_CODE (last_arg_fndecl) == FUNCTION_DECL | |
2411 | && DECL_BUILT_IN_CLASS (last_arg_fndecl) == BUILT_IN_NORMAL | |
2412 | && DECL_FUNCTION_CODE (last_arg_fndecl) == BUILT_IN_VA_ARG_PACK) | |
2413 | { | |
2414 | tree call = *expr_p; | |
2415 | ||
2416 | --nargs; | |
db3927fb AH |
2417 | *expr_p = build_call_array_loc (loc, TREE_TYPE (call), |
2418 | CALL_EXPR_FN (call), | |
2419 | nargs, CALL_EXPR_ARGP (call)); | |
726a989a RB |
2420 | |
2421 | /* Copy all CALL_EXPR flags, location and block, except | |
6ef5231b JJ |
2422 | CALL_EXPR_VA_ARG_PACK flag. */ |
2423 | CALL_EXPR_STATIC_CHAIN (*expr_p) = CALL_EXPR_STATIC_CHAIN (call); | |
2424 | CALL_EXPR_TAILCALL (*expr_p) = CALL_EXPR_TAILCALL (call); | |
2425 | CALL_EXPR_RETURN_SLOT_OPT (*expr_p) | |
2426 | = CALL_EXPR_RETURN_SLOT_OPT (call); | |
2427 | CALL_FROM_THUNK_P (*expr_p) = CALL_FROM_THUNK_P (call); | |
5e278028 | 2428 | SET_EXPR_LOCATION (*expr_p, EXPR_LOCATION (call)); |
726a989a | 2429 | |
6ef5231b JJ |
2430 | /* Set CALL_EXPR_VA_ARG_PACK. */ |
2431 | CALL_EXPR_VA_ARG_PACK (*expr_p) = 1; | |
2432 | } | |
2433 | } | |
e36711f3 RG |
2434 | |
2435 | /* Finally, gimplify the function arguments. */ | |
726a989a | 2436 | if (nargs > 0) |
6de9cd9a | 2437 | { |
726a989a RB |
2438 | for (i = (PUSH_ARGS_REVERSED ? nargs - 1 : 0); |
2439 | PUSH_ARGS_REVERSED ? i >= 0 : i < nargs; | |
2440 | PUSH_ARGS_REVERSED ? i-- : i++) | |
2441 | { | |
2442 | enum gimplify_status t; | |
6de9cd9a | 2443 | |
726a989a RB |
2444 | /* Avoid gimplifying the second argument to va_start, which needs to |
2445 | be the plain PARM_DECL. */ | |
2446 | if ((i != 1) || !builtin_va_start_p) | |
2447 | { | |
1282697f AH |
2448 | t = gimplify_arg (&CALL_EXPR_ARG (*expr_p, i), pre_p, |
2449 | EXPR_LOCATION (*expr_p)); | |
6de9cd9a | 2450 | |
726a989a RB |
2451 | if (t == GS_ERROR) |
2452 | ret = GS_ERROR; | |
2453 | } | |
2454 | } | |
6de9cd9a | 2455 | } |
6de9cd9a | 2456 | |
33922890 RG |
2457 | /* Verify the function result. */ |
2458 | if (want_value && fndecl | |
f20ca725 | 2459 | && VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fnptrtype)))) |
33922890 RG |
2460 | { |
2461 | error_at (loc, "using result of function returning %<void%>"); | |
2462 | ret = GS_ERROR; | |
2463 | } | |
2464 | ||
6de9cd9a | 2465 | /* Try this again in case gimplification exposed something. */ |
6f538523 | 2466 | if (ret != GS_ERROR) |
6de9cd9a | 2467 | { |
db3927fb | 2468 | tree new_tree = fold_call_expr (input_location, *expr_p, !want_value); |
6f538523 | 2469 | |
82d6e6fc | 2470 | if (new_tree && new_tree != *expr_p) |
5039610b SL |
2471 | { |
2472 | /* There was a transformation of this call which computes the | |
2473 | same value, but in a more efficient way. Return and try | |
2474 | again. */ | |
82d6e6fc | 2475 | *expr_p = new_tree; |
5039610b | 2476 | return GS_OK; |
6de9cd9a DN |
2477 | } |
2478 | } | |
726a989a RB |
2479 | else |
2480 | { | |
df8fa700 | 2481 | *expr_p = error_mark_node; |
726a989a RB |
2482 | return GS_ERROR; |
2483 | } | |
6de9cd9a DN |
2484 | |
2485 | /* If the function is "const" or "pure", then clear TREE_SIDE_EFFECTS on its | |
2486 | decl. This allows us to eliminate redundant or useless | |
2487 | calls to "const" functions. */ | |
becfd6e5 KZ |
2488 | if (TREE_CODE (*expr_p) == CALL_EXPR) |
2489 | { | |
2490 | int flags = call_expr_flags (*expr_p); | |
2491 | if (flags & (ECF_CONST | ECF_PURE) | |
2492 | /* An infinite loop is considered a side effect. */ | |
2493 | && !(flags & (ECF_LOOPING_CONST_OR_PURE))) | |
2494 | TREE_SIDE_EFFECTS (*expr_p) = 0; | |
2495 | } | |
726a989a RB |
2496 | |
2497 | /* If the value is not needed by the caller, emit a new GIMPLE_CALL | |
2498 | and clear *EXPR_P. Otherwise, leave *EXPR_P in its gimplified | |
2499 | form and delegate the creation of a GIMPLE_CALL to | |
2500 | gimplify_modify_expr. This is always possible because when | |
2501 | WANT_VALUE is true, the caller wants the result of this call into | |
2502 | a temporary, which means that we will emit an INIT_EXPR in | |
2503 | internal_get_tmp_var which will then be handled by | |
2504 | gimplify_modify_expr. */ | |
2505 | if (!want_value) | |
2506 | { | |
2507 | /* The CALL_EXPR in *EXPR_P is already in GIMPLE form, so all we | |
2508 | have to do is replicate it as a GIMPLE_CALL tuple. */ | |
64e0f5ff | 2509 | gimple_stmt_iterator gsi; |
726a989a | 2510 | call = gimple_build_call_from_tree (*expr_p); |
f20ca725 | 2511 | gimple_call_set_fntype (call, TREE_TYPE (fnptrtype)); |
f6b64c35 | 2512 | notice_special_calls (call); |
726a989a | 2513 | gimplify_seq_add_stmt (pre_p, call); |
64e0f5ff | 2514 | gsi = gsi_last (*pre_p); |
88ac13da | 2515 | maybe_fold_stmt (&gsi); |
726a989a RB |
2516 | *expr_p = NULL_TREE; |
2517 | } | |
f20ca725 RG |
2518 | else |
2519 | /* Remember the original function type. */ | |
2520 | CALL_EXPR_FN (*expr_p) = build1 (NOP_EXPR, fnptrtype, | |
2521 | CALL_EXPR_FN (*expr_p)); | |
726a989a | 2522 | |
6de9cd9a DN |
2523 | return ret; |
2524 | } | |
2525 | ||
2526 | /* Handle shortcut semantics in the predicate operand of a COND_EXPR by | |
2527 | rewriting it into multiple COND_EXPRs, and possibly GOTO_EXPRs. | |
2528 | ||
2529 | TRUE_LABEL_P and FALSE_LABEL_P point to the labels to jump to if the | |
2530 | condition is true or false, respectively. If null, we should generate | |
2531 | our own to skip over the evaluation of this specific expression. | |
2532 | ||
ca80e52b EB |
2533 | LOCUS is the source location of the COND_EXPR. |
2534 | ||
6de9cd9a DN |
2535 | This function is the tree equivalent of do_jump. |
2536 | ||
2537 | shortcut_cond_r should only be called by shortcut_cond_expr. */ | |
2538 | ||
2539 | static tree | |
ca80e52b EB |
2540 | shortcut_cond_r (tree pred, tree *true_label_p, tree *false_label_p, |
2541 | location_t locus) | |
6de9cd9a DN |
2542 | { |
2543 | tree local_label = NULL_TREE; | |
2544 | tree t, expr = NULL; | |
2545 | ||
2546 | /* OK, it's not a simple case; we need to pull apart the COND_EXPR to | |
2547 | retain the shortcut semantics. Just insert the gotos here; | |
2548 | shortcut_cond_expr will append the real blocks later. */ | |
2549 | if (TREE_CODE (pred) == TRUTH_ANDIF_EXPR) | |
2550 | { | |
ca80e52b EB |
2551 | location_t new_locus; |
2552 | ||
6de9cd9a DN |
2553 | /* Turn if (a && b) into |
2554 | ||
2555 | if (a); else goto no; | |
2556 | if (b) goto yes; else goto no; | |
2557 | (no:) */ | |
2558 | ||
2559 | if (false_label_p == NULL) | |
2560 | false_label_p = &local_label; | |
2561 | ||
ca80e52b EB |
2562 | /* Keep the original source location on the first 'if'. */ |
2563 | t = shortcut_cond_r (TREE_OPERAND (pred, 0), NULL, false_label_p, locus); | |
6de9cd9a DN |
2564 | append_to_statement_list (t, &expr); |
2565 | ||
ca80e52b EB |
2566 | /* Set the source location of the && on the second 'if'. */ |
2567 | new_locus = EXPR_HAS_LOCATION (pred) ? EXPR_LOCATION (pred) : locus; | |
2568 | t = shortcut_cond_r (TREE_OPERAND (pred, 1), true_label_p, false_label_p, | |
2569 | new_locus); | |
6de9cd9a DN |
2570 | append_to_statement_list (t, &expr); |
2571 | } | |
2572 | else if (TREE_CODE (pred) == TRUTH_ORIF_EXPR) | |
2573 | { | |
ca80e52b EB |
2574 | location_t new_locus; |
2575 | ||
6de9cd9a DN |
2576 | /* Turn if (a || b) into |
2577 | ||
2578 | if (a) goto yes; | |
2579 | if (b) goto yes; else goto no; | |
2580 | (yes:) */ | |
2581 | ||
2582 | if (true_label_p == NULL) | |
2583 | true_label_p = &local_label; | |
2584 | ||
ca80e52b EB |
2585 | /* Keep the original source location on the first 'if'. */ |
2586 | t = shortcut_cond_r (TREE_OPERAND (pred, 0), true_label_p, NULL, locus); | |
6de9cd9a DN |
2587 | append_to_statement_list (t, &expr); |
2588 | ||
ca80e52b EB |
2589 | /* Set the source location of the || on the second 'if'. */ |
2590 | new_locus = EXPR_HAS_LOCATION (pred) ? EXPR_LOCATION (pred) : locus; | |
2591 | t = shortcut_cond_r (TREE_OPERAND (pred, 1), true_label_p, false_label_p, | |
2592 | new_locus); | |
6de9cd9a DN |
2593 | append_to_statement_list (t, &expr); |
2594 | } | |
1537737f JJ |
2595 | else if (TREE_CODE (pred) == COND_EXPR |
2596 | && !VOID_TYPE_P (TREE_TYPE (TREE_OPERAND (pred, 1))) | |
2597 | && !VOID_TYPE_P (TREE_TYPE (TREE_OPERAND (pred, 2)))) | |
6de9cd9a | 2598 | { |
ca80e52b EB |
2599 | location_t new_locus; |
2600 | ||
6de9cd9a DN |
2601 | /* As long as we're messing with gotos, turn if (a ? b : c) into |
2602 | if (a) | |
2603 | if (b) goto yes; else goto no; | |
2604 | else | |
1537737f JJ |
2605 | if (c) goto yes; else goto no; |
2606 | ||
2607 | Don't do this if one of the arms has void type, which can happen | |
2608 | in C++ when the arm is throw. */ | |
ca80e52b EB |
2609 | |
2610 | /* Keep the original source location on the first 'if'. Set the source | |
2611 | location of the ? on the second 'if'. */ | |
2612 | new_locus = EXPR_HAS_LOCATION (pred) ? EXPR_LOCATION (pred) : locus; | |
b4257cfc RG |
2613 | expr = build3 (COND_EXPR, void_type_node, TREE_OPERAND (pred, 0), |
2614 | shortcut_cond_r (TREE_OPERAND (pred, 1), true_label_p, | |
ca80e52b | 2615 | false_label_p, locus), |
b4257cfc | 2616 | shortcut_cond_r (TREE_OPERAND (pred, 2), true_label_p, |
ca80e52b | 2617 | false_label_p, new_locus)); |
6de9cd9a DN |
2618 | } |
2619 | else | |
2620 | { | |
b4257cfc RG |
2621 | expr = build3 (COND_EXPR, void_type_node, pred, |
2622 | build_and_jump (true_label_p), | |
2623 | build_and_jump (false_label_p)); | |
ca80e52b | 2624 | SET_EXPR_LOCATION (expr, locus); |
6de9cd9a DN |
2625 | } |
2626 | ||
2627 | if (local_label) | |
2628 | { | |
2629 | t = build1 (LABEL_EXPR, void_type_node, local_label); | |
2630 | append_to_statement_list (t, &expr); | |
2631 | } | |
2632 | ||
2633 | return expr; | |
2634 | } | |
2635 | ||
726a989a RB |
2636 | /* Given a conditional expression EXPR with short-circuit boolean |
2637 | predicates using TRUTH_ANDIF_EXPR or TRUTH_ORIF_EXPR, break the | |
073a8998 | 2638 | predicate apart into the equivalent sequence of conditionals. */ |
726a989a | 2639 | |
6de9cd9a DN |
2640 | static tree |
2641 | shortcut_cond_expr (tree expr) | |
2642 | { | |
2643 | tree pred = TREE_OPERAND (expr, 0); | |
2644 | tree then_ = TREE_OPERAND (expr, 1); | |
2645 | tree else_ = TREE_OPERAND (expr, 2); | |
2646 | tree true_label, false_label, end_label, t; | |
2647 | tree *true_label_p; | |
2648 | tree *false_label_p; | |
089efaa4 | 2649 | bool emit_end, emit_false, jump_over_else; |
65355d53 RH |
2650 | bool then_se = then_ && TREE_SIDE_EFFECTS (then_); |
2651 | bool else_se = else_ && TREE_SIDE_EFFECTS (else_); | |
6de9cd9a DN |
2652 | |
2653 | /* First do simple transformations. */ | |
65355d53 | 2654 | if (!else_se) |
6de9cd9a | 2655 | { |
ca80e52b EB |
2656 | /* If there is no 'else', turn |
2657 | if (a && b) then c | |
2658 | into | |
2659 | if (a) if (b) then c. */ | |
6de9cd9a DN |
2660 | while (TREE_CODE (pred) == TRUTH_ANDIF_EXPR) |
2661 | { | |
ca80e52b | 2662 | /* Keep the original source location on the first 'if'. */ |
8400e75e | 2663 | location_t locus = EXPR_LOC_OR_LOC (expr, input_location); |
6de9cd9a | 2664 | TREE_OPERAND (expr, 0) = TREE_OPERAND (pred, 1); |
ca80e52b EB |
2665 | /* Set the source location of the && on the second 'if'. */ |
2666 | if (EXPR_HAS_LOCATION (pred)) | |
2667 | SET_EXPR_LOCATION (expr, EXPR_LOCATION (pred)); | |
6de9cd9a | 2668 | then_ = shortcut_cond_expr (expr); |
4356a1bf | 2669 | then_se = then_ && TREE_SIDE_EFFECTS (then_); |
6de9cd9a | 2670 | pred = TREE_OPERAND (pred, 0); |
b4257cfc | 2671 | expr = build3 (COND_EXPR, void_type_node, pred, then_, NULL_TREE); |
ca80e52b | 2672 | SET_EXPR_LOCATION (expr, locus); |
6de9cd9a DN |
2673 | } |
2674 | } | |
726a989a | 2675 | |
65355d53 | 2676 | if (!then_se) |
6de9cd9a DN |
2677 | { |
2678 | /* If there is no 'then', turn | |
2679 | if (a || b); else d | |
2680 | into | |
2681 | if (a); else if (b); else d. */ | |
2682 | while (TREE_CODE (pred) == TRUTH_ORIF_EXPR) | |
2683 | { | |
ca80e52b | 2684 | /* Keep the original source location on the first 'if'. */ |
8400e75e | 2685 | location_t locus = EXPR_LOC_OR_LOC (expr, input_location); |
6de9cd9a | 2686 | TREE_OPERAND (expr, 0) = TREE_OPERAND (pred, 1); |
ca80e52b EB |
2687 | /* Set the source location of the || on the second 'if'. */ |
2688 | if (EXPR_HAS_LOCATION (pred)) | |
2689 | SET_EXPR_LOCATION (expr, EXPR_LOCATION (pred)); | |
6de9cd9a | 2690 | else_ = shortcut_cond_expr (expr); |
4356a1bf | 2691 | else_se = else_ && TREE_SIDE_EFFECTS (else_); |
6de9cd9a | 2692 | pred = TREE_OPERAND (pred, 0); |
b4257cfc | 2693 | expr = build3 (COND_EXPR, void_type_node, pred, NULL_TREE, else_); |
ca80e52b | 2694 | SET_EXPR_LOCATION (expr, locus); |
6de9cd9a DN |
2695 | } |
2696 | } | |
2697 | ||
2698 | /* If we're done, great. */ | |
2699 | if (TREE_CODE (pred) != TRUTH_ANDIF_EXPR | |
2700 | && TREE_CODE (pred) != TRUTH_ORIF_EXPR) | |
2701 | return expr; | |
2702 | ||
2703 | /* Otherwise we need to mess with gotos. Change | |
2704 | if (a) c; else d; | |
2705 | to | |
2706 | if (a); else goto no; | |
2707 | c; goto end; | |
2708 | no: d; end: | |
2709 | and recursively gimplify the condition. */ | |
2710 | ||
2711 | true_label = false_label = end_label = NULL_TREE; | |
2712 | ||
2713 | /* If our arms just jump somewhere, hijack those labels so we don't | |
2714 | generate jumps to jumps. */ | |
2715 | ||
65355d53 RH |
2716 | if (then_ |
2717 | && TREE_CODE (then_) == GOTO_EXPR | |
6de9cd9a DN |
2718 | && TREE_CODE (GOTO_DESTINATION (then_)) == LABEL_DECL) |
2719 | { | |
2720 | true_label = GOTO_DESTINATION (then_); | |
65355d53 RH |
2721 | then_ = NULL; |
2722 | then_se = false; | |
6de9cd9a DN |
2723 | } |
2724 | ||
65355d53 RH |
2725 | if (else_ |
2726 | && TREE_CODE (else_) == GOTO_EXPR | |
6de9cd9a DN |
2727 | && TREE_CODE (GOTO_DESTINATION (else_)) == LABEL_DECL) |
2728 | { | |
2729 | false_label = GOTO_DESTINATION (else_); | |
65355d53 RH |
2730 | else_ = NULL; |
2731 | else_se = false; | |
6de9cd9a DN |
2732 | } |
2733 | ||
9cf737f8 | 2734 | /* If we aren't hijacking a label for the 'then' branch, it falls through. */ |
6de9cd9a DN |
2735 | if (true_label) |
2736 | true_label_p = &true_label; | |
2737 | else | |
2738 | true_label_p = NULL; | |
2739 | ||
2740 | /* The 'else' branch also needs a label if it contains interesting code. */ | |
65355d53 | 2741 | if (false_label || else_se) |
6de9cd9a DN |
2742 | false_label_p = &false_label; |
2743 | else | |
2744 | false_label_p = NULL; | |
2745 | ||
2746 | /* If there was nothing else in our arms, just forward the label(s). */ | |
65355d53 | 2747 | if (!then_se && !else_se) |
ca80e52b | 2748 | return shortcut_cond_r (pred, true_label_p, false_label_p, |
8400e75e | 2749 | EXPR_LOC_OR_LOC (expr, input_location)); |
6de9cd9a DN |
2750 | |
2751 | /* If our last subexpression already has a terminal label, reuse it. */ | |
65355d53 | 2752 | if (else_se) |
ca80e52b | 2753 | t = expr_last (else_); |
65355d53 | 2754 | else if (then_se) |
ca80e52b | 2755 | t = expr_last (then_); |
65355d53 | 2756 | else |
ca80e52b EB |
2757 | t = NULL; |
2758 | if (t && TREE_CODE (t) == LABEL_EXPR) | |
2759 | end_label = LABEL_EXPR_LABEL (t); | |
6de9cd9a DN |
2760 | |
2761 | /* If we don't care about jumping to the 'else' branch, jump to the end | |
2762 | if the condition is false. */ | |
2763 | if (!false_label_p) | |
2764 | false_label_p = &end_label; | |
2765 | ||
2766 | /* We only want to emit these labels if we aren't hijacking them. */ | |
2767 | emit_end = (end_label == NULL_TREE); | |
2768 | emit_false = (false_label == NULL_TREE); | |
2769 | ||
089efaa4 ILT |
2770 | /* We only emit the jump over the else clause if we have to--if the |
2771 | then clause may fall through. Otherwise we can wind up with a | |
2772 | useless jump and a useless label at the end of gimplified code, | |
2773 | which will cause us to think that this conditional as a whole | |
2774 | falls through even if it doesn't. If we then inline a function | |
2775 | which ends with such a condition, that can cause us to issue an | |
2776 | inappropriate warning about control reaching the end of a | |
2777 | non-void function. */ | |
2778 | jump_over_else = block_may_fallthru (then_); | |
2779 | ||
ca80e52b | 2780 | pred = shortcut_cond_r (pred, true_label_p, false_label_p, |
8400e75e | 2781 | EXPR_LOC_OR_LOC (expr, input_location)); |
6de9cd9a DN |
2782 | |
2783 | expr = NULL; | |
2784 | append_to_statement_list (pred, &expr); | |
2785 | ||
2786 | append_to_statement_list (then_, &expr); | |
65355d53 | 2787 | if (else_se) |
6de9cd9a | 2788 | { |
089efaa4 ILT |
2789 | if (jump_over_else) |
2790 | { | |
ca80e52b | 2791 | tree last = expr_last (expr); |
089efaa4 | 2792 | t = build_and_jump (&end_label); |
ca80e52b EB |
2793 | if (EXPR_HAS_LOCATION (last)) |
2794 | SET_EXPR_LOCATION (t, EXPR_LOCATION (last)); | |
089efaa4 ILT |
2795 | append_to_statement_list (t, &expr); |
2796 | } | |
6de9cd9a DN |
2797 | if (emit_false) |
2798 | { | |
2799 | t = build1 (LABEL_EXPR, void_type_node, false_label); | |
2800 | append_to_statement_list (t, &expr); | |
2801 | } | |
2802 | append_to_statement_list (else_, &expr); | |
2803 | } | |
2804 | if (emit_end && end_label) | |
2805 | { | |
2806 | t = build1 (LABEL_EXPR, void_type_node, end_label); | |
2807 | append_to_statement_list (t, &expr); | |
2808 | } | |
2809 | ||
2810 | return expr; | |
2811 | } | |
2812 | ||
2813 | /* EXPR is used in a boolean context; make sure it has BOOLEAN_TYPE. */ | |
2814 | ||
50674e96 | 2815 | tree |
6de9cd9a DN |
2816 | gimple_boolify (tree expr) |
2817 | { | |
2818 | tree type = TREE_TYPE (expr); | |
db3927fb | 2819 | location_t loc = EXPR_LOCATION (expr); |
6de9cd9a | 2820 | |
554cf330 JJ |
2821 | if (TREE_CODE (expr) == NE_EXPR |
2822 | && TREE_CODE (TREE_OPERAND (expr, 0)) == CALL_EXPR | |
2823 | && integer_zerop (TREE_OPERAND (expr, 1))) | |
2824 | { | |
2825 | tree call = TREE_OPERAND (expr, 0); | |
2826 | tree fn = get_callee_fndecl (call); | |
2827 | ||
d53c73e0 JJ |
2828 | /* For __builtin_expect ((long) (x), y) recurse into x as well |
2829 | if x is truth_value_p. */ | |
554cf330 JJ |
2830 | if (fn |
2831 | && DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL | |
2832 | && DECL_FUNCTION_CODE (fn) == BUILT_IN_EXPECT | |
2833 | && call_expr_nargs (call) == 2) | |
2834 | { | |
2835 | tree arg = CALL_EXPR_ARG (call, 0); | |
2836 | if (arg) | |
2837 | { | |
2838 | if (TREE_CODE (arg) == NOP_EXPR | |
2839 | && TREE_TYPE (arg) == TREE_TYPE (call)) | |
2840 | arg = TREE_OPERAND (arg, 0); | |
d53c73e0 JJ |
2841 | if (truth_value_p (TREE_CODE (arg))) |
2842 | { | |
2843 | arg = gimple_boolify (arg); | |
2844 | CALL_EXPR_ARG (call, 0) | |
2845 | = fold_convert_loc (loc, TREE_TYPE (call), arg); | |
2846 | } | |
554cf330 JJ |
2847 | } |
2848 | } | |
2849 | } | |
2850 | ||
6de9cd9a DN |
2851 | switch (TREE_CODE (expr)) |
2852 | { | |
2853 | case TRUTH_AND_EXPR: | |
2854 | case TRUTH_OR_EXPR: | |
2855 | case TRUTH_XOR_EXPR: | |
2856 | case TRUTH_ANDIF_EXPR: | |
2857 | case TRUTH_ORIF_EXPR: | |
2858 | /* Also boolify the arguments of truth exprs. */ | |
2859 | TREE_OPERAND (expr, 1) = gimple_boolify (TREE_OPERAND (expr, 1)); | |
2860 | /* FALLTHRU */ | |
2861 | ||
2862 | case TRUTH_NOT_EXPR: | |
2863 | TREE_OPERAND (expr, 0) = gimple_boolify (TREE_OPERAND (expr, 0)); | |
6de9cd9a | 2864 | |
6de9cd9a | 2865 | /* These expressions always produce boolean results. */ |
7f3ff782 KT |
2866 | if (TREE_CODE (type) != BOOLEAN_TYPE) |
2867 | TREE_TYPE (expr) = boolean_type_node; | |
6de9cd9a | 2868 | return expr; |
d3147f64 | 2869 | |
8170608b | 2870 | case ANNOTATE_EXPR: |
718c4601 | 2871 | switch ((enum annot_expr_kind) TREE_INT_CST_LOW (TREE_OPERAND (expr, 1))) |
8170608b | 2872 | { |
718c4601 EB |
2873 | case annot_expr_ivdep_kind: |
2874 | case annot_expr_no_vector_kind: | |
2875 | case annot_expr_vector_kind: | |
8170608b TB |
2876 | TREE_OPERAND (expr, 0) = gimple_boolify (TREE_OPERAND (expr, 0)); |
2877 | if (TREE_CODE (type) != BOOLEAN_TYPE) | |
2878 | TREE_TYPE (expr) = boolean_type_node; | |
2879 | return expr; | |
718c4601 EB |
2880 | default: |
2881 | gcc_unreachable (); | |
8170608b | 2882 | } |
8170608b | 2883 | |
6de9cd9a | 2884 | default: |
7f3ff782 KT |
2885 | if (COMPARISON_CLASS_P (expr)) |
2886 | { | |
2887 | /* There expressions always prduce boolean results. */ | |
2888 | if (TREE_CODE (type) != BOOLEAN_TYPE) | |
2889 | TREE_TYPE (expr) = boolean_type_node; | |
2890 | return expr; | |
2891 | } | |
6de9cd9a DN |
2892 | /* Other expressions that get here must have boolean values, but |
2893 | might need to be converted to the appropriate mode. */ | |
7f3ff782 | 2894 | if (TREE_CODE (type) == BOOLEAN_TYPE) |
1d15f620 | 2895 | return expr; |
db3927fb | 2896 | return fold_convert_loc (loc, boolean_type_node, expr); |
6de9cd9a DN |
2897 | } |
2898 | } | |
2899 | ||
aea74440 JJ |
2900 | /* Given a conditional expression *EXPR_P without side effects, gimplify |
2901 | its operands. New statements are inserted to PRE_P. */ | |
2902 | ||
2903 | static enum gimplify_status | |
726a989a | 2904 | gimplify_pure_cond_expr (tree *expr_p, gimple_seq *pre_p) |
aea74440 JJ |
2905 | { |
2906 | tree expr = *expr_p, cond; | |
2907 | enum gimplify_status ret, tret; | |
2908 | enum tree_code code; | |
2909 | ||
2910 | cond = gimple_boolify (COND_EXPR_COND (expr)); | |
2911 | ||
2912 | /* We need to handle && and || specially, as their gimplification | |
2913 | creates pure cond_expr, thus leading to an infinite cycle otherwise. */ | |
2914 | code = TREE_CODE (cond); | |
2915 | if (code == TRUTH_ANDIF_EXPR) | |
2916 | TREE_SET_CODE (cond, TRUTH_AND_EXPR); | |
2917 | else if (code == TRUTH_ORIF_EXPR) | |
2918 | TREE_SET_CODE (cond, TRUTH_OR_EXPR); | |
726a989a | 2919 | ret = gimplify_expr (&cond, pre_p, NULL, is_gimple_condexpr, fb_rvalue); |
aea74440 JJ |
2920 | COND_EXPR_COND (*expr_p) = cond; |
2921 | ||
2922 | tret = gimplify_expr (&COND_EXPR_THEN (expr), pre_p, NULL, | |
2923 | is_gimple_val, fb_rvalue); | |
2924 | ret = MIN (ret, tret); | |
2925 | tret = gimplify_expr (&COND_EXPR_ELSE (expr), pre_p, NULL, | |
2926 | is_gimple_val, fb_rvalue); | |
2927 | ||
2928 | return MIN (ret, tret); | |
2929 | } | |
2930 | ||
ad19c4be | 2931 | /* Return true if evaluating EXPR could trap. |
aea74440 JJ |
2932 | EXPR is GENERIC, while tree_could_trap_p can be called |
2933 | only on GIMPLE. */ | |
2934 | ||
2935 | static bool | |
2936 | generic_expr_could_trap_p (tree expr) | |
2937 | { | |
2938 | unsigned i, n; | |
2939 | ||
2940 | if (!expr || is_gimple_val (expr)) | |
2941 | return false; | |
2942 | ||
2943 | if (!EXPR_P (expr) || tree_could_trap_p (expr)) | |
2944 | return true; | |
2945 | ||
2946 | n = TREE_OPERAND_LENGTH (expr); | |
2947 | for (i = 0; i < n; i++) | |
2948 | if (generic_expr_could_trap_p (TREE_OPERAND (expr, i))) | |
2949 | return true; | |
2950 | ||
2951 | return false; | |
2952 | } | |
2953 | ||
206048bd | 2954 | /* Convert the conditional expression pointed to by EXPR_P '(p) ? a : b;' |
6de9cd9a DN |
2955 | into |
2956 | ||
2957 | if (p) if (p) | |
2958 | t1 = a; a; | |
2959 | else or else | |
2960 | t1 = b; b; | |
2961 | t1; | |
2962 | ||
2963 | The second form is used when *EXPR_P is of type void. | |
2964 | ||
2965 | PRE_P points to the list where side effects that must happen before | |
dae7ec87 | 2966 | *EXPR_P should be stored. */ |
6de9cd9a DN |
2967 | |
2968 | static enum gimplify_status | |
726a989a | 2969 | gimplify_cond_expr (tree *expr_p, gimple_seq *pre_p, fallback_t fallback) |
6de9cd9a DN |
2970 | { |
2971 | tree expr = *expr_p; | |
06ec59e6 EB |
2972 | tree type = TREE_TYPE (expr); |
2973 | location_t loc = EXPR_LOCATION (expr); | |
2974 | tree tmp, arm1, arm2; | |
6de9cd9a | 2975 | enum gimplify_status ret; |
726a989a RB |
2976 | tree label_true, label_false, label_cont; |
2977 | bool have_then_clause_p, have_else_clause_p; | |
2978 | gimple gimple_cond; | |
2979 | enum tree_code pred_code; | |
2980 | gimple_seq seq = NULL; | |
26d44ae2 RH |
2981 | |
2982 | /* If this COND_EXPR has a value, copy the values into a temporary within | |
2983 | the arms. */ | |
06ec59e6 | 2984 | if (!VOID_TYPE_P (type)) |
26d44ae2 | 2985 | { |
06ec59e6 | 2986 | tree then_ = TREE_OPERAND (expr, 1), else_ = TREE_OPERAND (expr, 2); |
aff98faf AO |
2987 | tree result; |
2988 | ||
06ec59e6 EB |
2989 | /* If either an rvalue is ok or we do not require an lvalue, create the |
2990 | temporary. But we cannot do that if the type is addressable. */ | |
2991 | if (((fallback & fb_rvalue) || !(fallback & fb_lvalue)) | |
c3e203cf | 2992 | && !TREE_ADDRESSABLE (type)) |
aff98faf | 2993 | { |
aea74440 JJ |
2994 | if (gimplify_ctxp->allow_rhs_cond_expr |
2995 | /* If either branch has side effects or could trap, it can't be | |
2996 | evaluated unconditionally. */ | |
06ec59e6 EB |
2997 | && !TREE_SIDE_EFFECTS (then_) |
2998 | && !generic_expr_could_trap_p (then_) | |
2999 | && !TREE_SIDE_EFFECTS (else_) | |
3000 | && !generic_expr_could_trap_p (else_)) | |
aea74440 JJ |
3001 | return gimplify_pure_cond_expr (expr_p, pre_p); |
3002 | ||
06ec59e6 EB |
3003 | tmp = create_tmp_var (type, "iftmp"); |
3004 | result = tmp; | |
aff98faf | 3005 | } |
06ec59e6 EB |
3006 | |
3007 | /* Otherwise, only create and copy references to the values. */ | |
26d44ae2 RH |
3008 | else |
3009 | { | |
06ec59e6 | 3010 | type = build_pointer_type (type); |
aff98faf | 3011 | |
06ec59e6 EB |
3012 | if (!VOID_TYPE_P (TREE_TYPE (then_))) |
3013 | then_ = build_fold_addr_expr_loc (loc, then_); | |
aff98faf | 3014 | |
06ec59e6 EB |
3015 | if (!VOID_TYPE_P (TREE_TYPE (else_))) |
3016 | else_ = build_fold_addr_expr_loc (loc, else_); | |
3017 | ||
3018 | expr | |
3019 | = build3 (COND_EXPR, type, TREE_OPERAND (expr, 0), then_, else_); | |
aea74440 | 3020 | |
726a989a | 3021 | tmp = create_tmp_var (type, "iftmp"); |
70f34814 | 3022 | result = build_simple_mem_ref_loc (loc, tmp); |
26d44ae2 RH |
3023 | } |
3024 | ||
06ec59e6 EB |
3025 | /* Build the new then clause, `tmp = then_;'. But don't build the |
3026 | assignment if the value is void; in C++ it can be if it's a throw. */ | |
3027 | if (!VOID_TYPE_P (TREE_TYPE (then_))) | |
3028 | TREE_OPERAND (expr, 1) = build2 (MODIFY_EXPR, type, tmp, then_); | |
26d44ae2 | 3029 | |
06ec59e6 EB |
3030 | /* Similarly, build the new else clause, `tmp = else_;'. */ |
3031 | if (!VOID_TYPE_P (TREE_TYPE (else_))) | |
3032 | TREE_OPERAND (expr, 2) = build2 (MODIFY_EXPR, type, tmp, else_); | |
26d44ae2 RH |
3033 | |
3034 | TREE_TYPE (expr) = void_type_node; | |
3035 | recalculate_side_effects (expr); | |
3036 | ||
d91ba7b0 | 3037 | /* Move the COND_EXPR to the prequeue. */ |
726a989a | 3038 | gimplify_stmt (&expr, pre_p); |
26d44ae2 | 3039 | |
aff98faf | 3040 | *expr_p = result; |
726a989a | 3041 | return GS_ALL_DONE; |
26d44ae2 RH |
3042 | } |
3043 | ||
f2f81d57 EB |
3044 | /* Remove any COMPOUND_EXPR so the following cases will be caught. */ |
3045 | STRIP_TYPE_NOPS (TREE_OPERAND (expr, 0)); | |
3046 | if (TREE_CODE (TREE_OPERAND (expr, 0)) == COMPOUND_EXPR) | |
3047 | gimplify_compound_expr (&TREE_OPERAND (expr, 0), pre_p, true); | |
3048 | ||
26d44ae2 RH |
3049 | /* Make sure the condition has BOOLEAN_TYPE. */ |
3050 | TREE_OPERAND (expr, 0) = gimple_boolify (TREE_OPERAND (expr, 0)); | |
3051 | ||
3052 | /* Break apart && and || conditions. */ | |
3053 | if (TREE_CODE (TREE_OPERAND (expr, 0)) == TRUTH_ANDIF_EXPR | |
3054 | || TREE_CODE (TREE_OPERAND (expr, 0)) == TRUTH_ORIF_EXPR) | |
3055 | { | |
3056 | expr = shortcut_cond_expr (expr); | |
3057 | ||
3058 | if (expr != *expr_p) | |
3059 | { | |
3060 | *expr_p = expr; | |
3061 | ||
3062 | /* We can't rely on gimplify_expr to re-gimplify the expanded | |
3063 | form properly, as cleanups might cause the target labels to be | |
3064 | wrapped in a TRY_FINALLY_EXPR. To prevent that, we need to | |
3065 | set up a conditional context. */ | |
3066 | gimple_push_condition (); | |
726a989a | 3067 | gimplify_stmt (expr_p, &seq); |
26d44ae2 | 3068 | gimple_pop_condition (pre_p); |
726a989a | 3069 | gimple_seq_add_seq (pre_p, seq); |
26d44ae2 RH |
3070 | |
3071 | return GS_ALL_DONE; | |
3072 | } | |
3073 | } | |
3074 | ||
3075 | /* Now do the normal gimplification. */ | |
26d44ae2 | 3076 | |
726a989a RB |
3077 | /* Gimplify condition. */ |
3078 | ret = gimplify_expr (&TREE_OPERAND (expr, 0), pre_p, NULL, is_gimple_condexpr, | |
3079 | fb_rvalue); | |
26d44ae2 | 3080 | if (ret == GS_ERROR) |
726a989a RB |
3081 | return GS_ERROR; |
3082 | gcc_assert (TREE_OPERAND (expr, 0) != NULL_TREE); | |
3083 | ||
3084 | gimple_push_condition (); | |
26d44ae2 | 3085 | |
726a989a RB |
3086 | have_then_clause_p = have_else_clause_p = false; |
3087 | if (TREE_OPERAND (expr, 1) != NULL | |
3088 | && TREE_CODE (TREE_OPERAND (expr, 1)) == GOTO_EXPR | |
3089 | && TREE_CODE (GOTO_DESTINATION (TREE_OPERAND (expr, 1))) == LABEL_DECL | |
3090 | && (DECL_CONTEXT (GOTO_DESTINATION (TREE_OPERAND (expr, 1))) | |
3091 | == current_function_decl) | |
3092 | /* For -O0 avoid this optimization if the COND_EXPR and GOTO_EXPR | |
3093 | have different locations, otherwise we end up with incorrect | |
3094 | location information on the branches. */ | |
3095 | && (optimize | |
3096 | || !EXPR_HAS_LOCATION (expr) | |
3097 | || !EXPR_HAS_LOCATION (TREE_OPERAND (expr, 1)) | |
3098 | || EXPR_LOCATION (expr) == EXPR_LOCATION (TREE_OPERAND (expr, 1)))) | |
3099 | { | |
3100 | label_true = GOTO_DESTINATION (TREE_OPERAND (expr, 1)); | |
3101 | have_then_clause_p = true; | |
26d44ae2 RH |
3102 | } |
3103 | else | |
c2255bc4 | 3104 | label_true = create_artificial_label (UNKNOWN_LOCATION); |
726a989a RB |
3105 | if (TREE_OPERAND (expr, 2) != NULL |
3106 | && TREE_CODE (TREE_OPERAND (expr, 2)) == GOTO_EXPR | |
3107 | && TREE_CODE (GOTO_DESTINATION (TREE_OPERAND (expr, 2))) == LABEL_DECL | |
3108 | && (DECL_CONTEXT (GOTO_DESTINATION (TREE_OPERAND (expr, 2))) | |
3109 | == current_function_decl) | |
3110 | /* For -O0 avoid this optimization if the COND_EXPR and GOTO_EXPR | |
3111 | have different locations, otherwise we end up with incorrect | |
3112 | location information on the branches. */ | |
3113 | && (optimize | |
3114 | || !EXPR_HAS_LOCATION (expr) | |
3115 | || !EXPR_HAS_LOCATION (TREE_OPERAND (expr, 2)) | |
3116 | || EXPR_LOCATION (expr) == EXPR_LOCATION (TREE_OPERAND (expr, 2)))) | |
3117 | { | |
3118 | label_false = GOTO_DESTINATION (TREE_OPERAND (expr, 2)); | |
3119 | have_else_clause_p = true; | |
3120 | } | |
3121 | else | |
c2255bc4 | 3122 | label_false = create_artificial_label (UNKNOWN_LOCATION); |
26d44ae2 | 3123 | |
726a989a RB |
3124 | gimple_cond_get_ops_from_tree (COND_EXPR_COND (expr), &pred_code, &arm1, |
3125 | &arm2); | |
26d44ae2 | 3126 | |
726a989a RB |
3127 | gimple_cond = gimple_build_cond (pred_code, arm1, arm2, label_true, |
3128 | label_false); | |
26d44ae2 | 3129 | |
726a989a RB |
3130 | gimplify_seq_add_stmt (&seq, gimple_cond); |
3131 | label_cont = NULL_TREE; | |
3132 | if (!have_then_clause_p) | |
3133 | { | |
3134 | /* For if (...) {} else { code; } put label_true after | |
3135 | the else block. */ | |
3136 | if (TREE_OPERAND (expr, 1) == NULL_TREE | |
3137 | && !have_else_clause_p | |
3138 | && TREE_OPERAND (expr, 2) != NULL_TREE) | |
3139 | label_cont = label_true; | |
3140 | else | |
3141 | { | |
3142 | gimplify_seq_add_stmt (&seq, gimple_build_label (label_true)); | |
3143 | have_then_clause_p = gimplify_stmt (&TREE_OPERAND (expr, 1), &seq); | |
3144 | /* For if (...) { code; } else {} or | |
3145 | if (...) { code; } else goto label; or | |
3146 | if (...) { code; return; } else { ... } | |
3147 | label_cont isn't needed. */ | |
3148 | if (!have_else_clause_p | |
3149 | && TREE_OPERAND (expr, 2) != NULL_TREE | |
3150 | && gimple_seq_may_fallthru (seq)) | |
3151 | { | |
3152 | gimple g; | |
c2255bc4 | 3153 | label_cont = create_artificial_label (UNKNOWN_LOCATION); |
726a989a RB |
3154 | |
3155 | g = gimple_build_goto (label_cont); | |
3156 | ||
3157 | /* GIMPLE_COND's are very low level; they have embedded | |
3158 | gotos. This particular embedded goto should not be marked | |
3159 | with the location of the original COND_EXPR, as it would | |
3160 | correspond to the COND_EXPR's condition, not the ELSE or the | |
3161 | THEN arms. To avoid marking it with the wrong location, flag | |
3162 | it as "no location". */ | |
3163 | gimple_set_do_not_emit_location (g); | |
3164 | ||
3165 | gimplify_seq_add_stmt (&seq, g); | |
3166 | } | |
3167 | } | |
3168 | } | |
3169 | if (!have_else_clause_p) | |
3170 | { | |
3171 | gimplify_seq_add_stmt (&seq, gimple_build_label (label_false)); | |
3172 | have_else_clause_p = gimplify_stmt (&TREE_OPERAND (expr, 2), &seq); | |
3173 | } | |
3174 | if (label_cont) | |
3175 | gimplify_seq_add_stmt (&seq, gimple_build_label (label_cont)); | |
3176 | ||
3177 | gimple_pop_condition (pre_p); | |
3178 | gimple_seq_add_seq (pre_p, seq); | |
3179 | ||
3180 | if (ret == GS_ERROR) | |
3181 | ; /* Do nothing. */ | |
3182 | else if (have_then_clause_p || have_else_clause_p) | |
3183 | ret = GS_ALL_DONE; | |
3184 | else | |
3185 | { | |
3186 | /* Both arms are empty; replace the COND_EXPR with its predicate. */ | |
3187 | expr = TREE_OPERAND (expr, 0); | |
3188 | gimplify_stmt (&expr, pre_p); | |
3189 | } | |
3190 | ||
3191 | *expr_p = NULL; | |
3192 | return ret; | |
3193 | } | |
3194 | ||
f76d6e6f EB |
3195 | /* Prepare the node pointed to by EXPR_P, an is_gimple_addressable expression, |
3196 | to be marked addressable. | |
3197 | ||
3198 | We cannot rely on such an expression being directly markable if a temporary | |
3199 | has been created by the gimplification. In this case, we create another | |
3200 | temporary and initialize it with a copy, which will become a store after we | |
3201 | mark it addressable. This can happen if the front-end passed us something | |
3202 | that it could not mark addressable yet, like a Fortran pass-by-reference | |
3203 | parameter (int) floatvar. */ | |
3204 | ||
3205 | static void | |
3206 | prepare_gimple_addressable (tree *expr_p, gimple_seq *seq_p) | |
3207 | { | |
3208 | while (handled_component_p (*expr_p)) | |
3209 | expr_p = &TREE_OPERAND (*expr_p, 0); | |
3210 | if (is_gimple_reg (*expr_p)) | |
947ca6a0 RB |
3211 | { |
3212 | tree var = get_initialized_tmp_var (*expr_p, seq_p, NULL); | |
3213 | DECL_GIMPLE_REG_P (var) = 0; | |
3214 | *expr_p = var; | |
3215 | } | |
f76d6e6f EB |
3216 | } |
3217 | ||
726a989a RB |
3218 | /* A subroutine of gimplify_modify_expr. Replace a MODIFY_EXPR with |
3219 | a call to __builtin_memcpy. */ | |
3220 | ||
3221 | static enum gimplify_status | |
3222 | gimplify_modify_expr_to_memcpy (tree *expr_p, tree size, bool want_value, | |
3223 | gimple_seq *seq_p) | |
26d44ae2 | 3224 | { |
5039610b | 3225 | tree t, to, to_ptr, from, from_ptr; |
726a989a | 3226 | gimple gs; |
db3927fb | 3227 | location_t loc = EXPR_LOCATION (*expr_p); |
26d44ae2 | 3228 | |
726a989a RB |
3229 | to = TREE_OPERAND (*expr_p, 0); |
3230 | from = TREE_OPERAND (*expr_p, 1); | |
26d44ae2 | 3231 | |
f76d6e6f EB |
3232 | /* Mark the RHS addressable. Beware that it may not be possible to do so |
3233 | directly if a temporary has been created by the gimplification. */ | |
3234 | prepare_gimple_addressable (&from, seq_p); | |
3235 | ||
628c189e | 3236 | mark_addressable (from); |
db3927fb AH |
3237 | from_ptr = build_fold_addr_expr_loc (loc, from); |
3238 | gimplify_arg (&from_ptr, seq_p, loc); | |
26d44ae2 | 3239 | |
628c189e | 3240 | mark_addressable (to); |
db3927fb AH |
3241 | to_ptr = build_fold_addr_expr_loc (loc, to); |
3242 | gimplify_arg (&to_ptr, seq_p, loc); | |
726a989a | 3243 | |
e79983f4 | 3244 | t = builtin_decl_implicit (BUILT_IN_MEMCPY); |
726a989a RB |
3245 | |
3246 | gs = gimple_build_call (t, 3, to_ptr, from_ptr, size); | |
26d44ae2 RH |
3247 | |
3248 | if (want_value) | |
3249 | { | |
726a989a RB |
3250 | /* tmp = memcpy() */ |
3251 | t = create_tmp_var (TREE_TYPE (to_ptr), NULL); | |
3252 | gimple_call_set_lhs (gs, t); | |
3253 | gimplify_seq_add_stmt (seq_p, gs); | |
3254 | ||
70f34814 | 3255 | *expr_p = build_simple_mem_ref (t); |
726a989a | 3256 | return GS_ALL_DONE; |
26d44ae2 RH |
3257 | } |
3258 | ||
726a989a RB |
3259 | gimplify_seq_add_stmt (seq_p, gs); |
3260 | *expr_p = NULL; | |
3261 | return GS_ALL_DONE; | |
26d44ae2 RH |
3262 | } |
3263 | ||
3264 | /* A subroutine of gimplify_modify_expr. Replace a MODIFY_EXPR with | |
3265 | a call to __builtin_memset. In this case we know that the RHS is | |
3266 | a CONSTRUCTOR with an empty element list. */ | |
3267 | ||
3268 | static enum gimplify_status | |
726a989a RB |
3269 | gimplify_modify_expr_to_memset (tree *expr_p, tree size, bool want_value, |
3270 | gimple_seq *seq_p) | |
26d44ae2 | 3271 | { |
1a13360e | 3272 | tree t, from, to, to_ptr; |
726a989a | 3273 | gimple gs; |
db3927fb | 3274 | location_t loc = EXPR_LOCATION (*expr_p); |
26d44ae2 | 3275 | |
1a13360e OH |
3276 | /* Assert our assumptions, to abort instead of producing wrong code |
3277 | silently if they are not met. Beware that the RHS CONSTRUCTOR might | |
3278 | not be immediately exposed. */ | |
b8698a0f | 3279 | from = TREE_OPERAND (*expr_p, 1); |
1a13360e OH |
3280 | if (TREE_CODE (from) == WITH_SIZE_EXPR) |
3281 | from = TREE_OPERAND (from, 0); | |
3282 | ||
3283 | gcc_assert (TREE_CODE (from) == CONSTRUCTOR | |
9771b263 | 3284 | && vec_safe_is_empty (CONSTRUCTOR_ELTS (from))); |
1a13360e OH |
3285 | |
3286 | /* Now proceed. */ | |
726a989a | 3287 | to = TREE_OPERAND (*expr_p, 0); |
26d44ae2 | 3288 | |
db3927fb AH |
3289 | to_ptr = build_fold_addr_expr_loc (loc, to); |
3290 | gimplify_arg (&to_ptr, seq_p, loc); | |
e79983f4 | 3291 | t = builtin_decl_implicit (BUILT_IN_MEMSET); |
726a989a RB |
3292 | |
3293 | gs = gimple_build_call (t, 3, to_ptr, integer_zero_node, size); | |
26d44ae2 RH |
3294 | |
3295 | if (want_value) | |
3296 | { | |
726a989a RB |
3297 | /* tmp = memset() */ |
3298 | t = create_tmp_var (TREE_TYPE (to_ptr), NULL); | |
3299 | gimple_call_set_lhs (gs, t); | |
3300 | gimplify_seq_add_stmt (seq_p, gs); | |
3301 | ||
3302 | *expr_p = build1 (INDIRECT_REF, TREE_TYPE (to), t); | |
3303 | return GS_ALL_DONE; | |
26d44ae2 RH |
3304 | } |
3305 | ||
726a989a RB |
3306 | gimplify_seq_add_stmt (seq_p, gs); |
3307 | *expr_p = NULL; | |
3308 | return GS_ALL_DONE; | |
26d44ae2 RH |
3309 | } |
3310 | ||
57d1dd87 RH |
3311 | /* A subroutine of gimplify_init_ctor_preeval. Called via walk_tree, |
3312 | determine, cautiously, if a CONSTRUCTOR overlaps the lhs of an | |
ad19c4be | 3313 | assignment. Return non-null if we detect a potential overlap. */ |
57d1dd87 RH |
3314 | |
3315 | struct gimplify_init_ctor_preeval_data | |
3316 | { | |
3317 | /* The base decl of the lhs object. May be NULL, in which case we | |
3318 | have to assume the lhs is indirect. */ | |
3319 | tree lhs_base_decl; | |
3320 | ||
3321 | /* The alias set of the lhs object. */ | |
4862826d | 3322 | alias_set_type lhs_alias_set; |
57d1dd87 RH |
3323 | }; |
3324 | ||
3325 | static tree | |
3326 | gimplify_init_ctor_preeval_1 (tree *tp, int *walk_subtrees, void *xdata) | |
3327 | { | |
3328 | struct gimplify_init_ctor_preeval_data *data | |
3329 | = (struct gimplify_init_ctor_preeval_data *) xdata; | |
3330 | tree t = *tp; | |
3331 | ||
3332 | /* If we find the base object, obviously we have overlap. */ | |
3333 | if (data->lhs_base_decl == t) | |
3334 | return t; | |
3335 | ||
3336 | /* If the constructor component is indirect, determine if we have a | |
3337 | potential overlap with the lhs. The only bits of information we | |
3338 | have to go on at this point are addressability and alias sets. */ | |
70f34814 RG |
3339 | if ((INDIRECT_REF_P (t) |
3340 | || TREE_CODE (t) == MEM_REF) | |
57d1dd87 RH |
3341 | && (!data->lhs_base_decl || TREE_ADDRESSABLE (data->lhs_base_decl)) |
3342 | && alias_sets_conflict_p (data->lhs_alias_set, get_alias_set (t))) | |
3343 | return t; | |
3344 | ||
df10ee2a | 3345 | /* If the constructor component is a call, determine if it can hide a |
70f34814 RG |
3346 | potential overlap with the lhs through an INDIRECT_REF like above. |
3347 | ??? Ugh - this is completely broken. In fact this whole analysis | |
3348 | doesn't look conservative. */ | |
df10ee2a EB |
3349 | if (TREE_CODE (t) == CALL_EXPR) |
3350 | { | |
3351 | tree type, fntype = TREE_TYPE (TREE_TYPE (CALL_EXPR_FN (t))); | |
3352 | ||
3353 | for (type = TYPE_ARG_TYPES (fntype); type; type = TREE_CHAIN (type)) | |
3354 | if (POINTER_TYPE_P (TREE_VALUE (type)) | |
3355 | && (!data->lhs_base_decl || TREE_ADDRESSABLE (data->lhs_base_decl)) | |
3356 | && alias_sets_conflict_p (data->lhs_alias_set, | |
3357 | get_alias_set | |
3358 | (TREE_TYPE (TREE_VALUE (type))))) | |
3359 | return t; | |
3360 | } | |
3361 | ||
6615c446 | 3362 | if (IS_TYPE_OR_DECL_P (t)) |
57d1dd87 RH |
3363 | *walk_subtrees = 0; |
3364 | return NULL; | |
3365 | } | |
3366 | ||
726a989a | 3367 | /* A subroutine of gimplify_init_constructor. Pre-evaluate EXPR, |
57d1dd87 RH |
3368 | force values that overlap with the lhs (as described by *DATA) |
3369 | into temporaries. */ | |
3370 | ||
3371 | static void | |
726a989a | 3372 | gimplify_init_ctor_preeval (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p, |
57d1dd87 RH |
3373 | struct gimplify_init_ctor_preeval_data *data) |
3374 | { | |
3375 | enum gimplify_status one; | |
3376 | ||
51eed280 PB |
3377 | /* If the value is constant, then there's nothing to pre-evaluate. */ |
3378 | if (TREE_CONSTANT (*expr_p)) | |
3379 | { | |
3380 | /* Ensure it does not have side effects, it might contain a reference to | |
3381 | the object we're initializing. */ | |
3382 | gcc_assert (!TREE_SIDE_EFFECTS (*expr_p)); | |
3383 | return; | |
3384 | } | |
57d1dd87 RH |
3385 | |
3386 | /* If the type has non-trivial constructors, we can't pre-evaluate. */ | |
3387 | if (TREE_ADDRESSABLE (TREE_TYPE (*expr_p))) | |
3388 | return; | |
3389 | ||
3390 | /* Recurse for nested constructors. */ | |
3391 | if (TREE_CODE (*expr_p) == CONSTRUCTOR) | |
3392 | { | |
4038c495 GB |
3393 | unsigned HOST_WIDE_INT ix; |
3394 | constructor_elt *ce; | |
9771b263 | 3395 | vec<constructor_elt, va_gc> *v = CONSTRUCTOR_ELTS (*expr_p); |
4038c495 | 3396 | |
9771b263 | 3397 | FOR_EACH_VEC_SAFE_ELT (v, ix, ce) |
4038c495 | 3398 | gimplify_init_ctor_preeval (&ce->value, pre_p, post_p, data); |
726a989a | 3399 | |
57d1dd87 RH |
3400 | return; |
3401 | } | |
3402 | ||
0461b801 EB |
3403 | /* If this is a variable sized type, we must remember the size. */ |
3404 | maybe_with_size_expr (expr_p); | |
57d1dd87 RH |
3405 | |
3406 | /* Gimplify the constructor element to something appropriate for the rhs | |
726a989a | 3407 | of a MODIFY_EXPR. Given that we know the LHS is an aggregate, we know |
d3147f64 | 3408 | the gimplifier will consider this a store to memory. Doing this |
57d1dd87 RH |
3409 | gimplification now means that we won't have to deal with complicated |
3410 | language-specific trees, nor trees like SAVE_EXPR that can induce | |
b01d837f | 3411 | exponential search behavior. */ |
57d1dd87 RH |
3412 | one = gimplify_expr (expr_p, pre_p, post_p, is_gimple_mem_rhs, fb_rvalue); |
3413 | if (one == GS_ERROR) | |
3414 | { | |
3415 | *expr_p = NULL; | |
3416 | return; | |
3417 | } | |
3418 | ||
3419 | /* If we gimplified to a bare decl, we can be sure that it doesn't overlap | |
3420 | with the lhs, since "a = { .x=a }" doesn't make sense. This will | |
3421 | always be true for all scalars, since is_gimple_mem_rhs insists on a | |
3422 | temporary variable for them. */ | |
3423 | if (DECL_P (*expr_p)) | |
3424 | return; | |
3425 | ||
3426 | /* If this is of variable size, we have no choice but to assume it doesn't | |
3427 | overlap since we can't make a temporary for it. */ | |
4c923c28 | 3428 | if (TREE_CODE (TYPE_SIZE (TREE_TYPE (*expr_p))) != INTEGER_CST) |
57d1dd87 RH |
3429 | return; |
3430 | ||
3431 | /* Otherwise, we must search for overlap ... */ | |
3432 | if (!walk_tree (expr_p, gimplify_init_ctor_preeval_1, data, NULL)) | |
3433 | return; | |
3434 | ||
3435 | /* ... and if found, force the value into a temporary. */ | |
3436 | *expr_p = get_formal_tmp_var (*expr_p, pre_p); | |
3437 | } | |
3438 | ||
6fa91b48 SB |
3439 | /* A subroutine of gimplify_init_ctor_eval. Create a loop for |
3440 | a RANGE_EXPR in a CONSTRUCTOR for an array. | |
3441 | ||
3442 | var = lower; | |
3443 | loop_entry: | |
3444 | object[var] = value; | |
3445 | if (var == upper) | |
3446 | goto loop_exit; | |
3447 | var = var + 1; | |
3448 | goto loop_entry; | |
3449 | loop_exit: | |
3450 | ||
3451 | We increment var _after_ the loop exit check because we might otherwise | |
3452 | fail if upper == TYPE_MAX_VALUE (type for upper). | |
3453 | ||
3454 | Note that we never have to deal with SAVE_EXPRs here, because this has | |
3455 | already been taken care of for us, in gimplify_init_ctor_preeval(). */ | |
3456 | ||
9771b263 | 3457 | static void gimplify_init_ctor_eval (tree, vec<constructor_elt, va_gc> *, |
726a989a | 3458 | gimple_seq *, bool); |
6fa91b48 SB |
3459 | |
3460 | static void | |
3461 | gimplify_init_ctor_eval_range (tree object, tree lower, tree upper, | |
3462 | tree value, tree array_elt_type, | |
726a989a | 3463 | gimple_seq *pre_p, bool cleared) |
6fa91b48 | 3464 | { |
726a989a | 3465 | tree loop_entry_label, loop_exit_label, fall_thru_label; |
b56b9fe3 | 3466 | tree var, var_type, cref, tmp; |
6fa91b48 | 3467 | |
c2255bc4 AH |
3468 | loop_entry_label = create_artificial_label (UNKNOWN_LOCATION); |
3469 | loop_exit_label = create_artificial_label (UNKNOWN_LOCATION); | |
3470 | fall_thru_label = create_artificial_label (UNKNOWN_LOCATION); | |
6fa91b48 SB |
3471 | |
3472 | /* Create and initialize the index variable. */ | |
3473 | var_type = TREE_TYPE (upper); | |
3474 | var = create_tmp_var (var_type, NULL); | |
726a989a | 3475 | gimplify_seq_add_stmt (pre_p, gimple_build_assign (var, lower)); |
6fa91b48 SB |
3476 | |
3477 | /* Add the loop entry label. */ | |
726a989a | 3478 | gimplify_seq_add_stmt (pre_p, gimple_build_label (loop_entry_label)); |
6fa91b48 SB |
3479 | |
3480 | /* Build the reference. */ | |
3481 | cref = build4 (ARRAY_REF, array_elt_type, unshare_expr (object), | |
3482 | var, NULL_TREE, NULL_TREE); | |
3483 | ||
3484 | /* If we are a constructor, just call gimplify_init_ctor_eval to do | |
3485 | the store. Otherwise just assign value to the reference. */ | |
3486 | ||
3487 | if (TREE_CODE (value) == CONSTRUCTOR) | |
3488 | /* NB we might have to call ourself recursively through | |
3489 | gimplify_init_ctor_eval if the value is a constructor. */ | |
3490 | gimplify_init_ctor_eval (cref, CONSTRUCTOR_ELTS (value), | |
3491 | pre_p, cleared); | |
3492 | else | |
726a989a | 3493 | gimplify_seq_add_stmt (pre_p, gimple_build_assign (cref, value)); |
6fa91b48 SB |
3494 | |
3495 | /* We exit the loop when the index var is equal to the upper bound. */ | |
726a989a RB |
3496 | gimplify_seq_add_stmt (pre_p, |
3497 | gimple_build_cond (EQ_EXPR, var, upper, | |
3498 | loop_exit_label, fall_thru_label)); | |
3499 | ||
3500 | gimplify_seq_add_stmt (pre_p, gimple_build_label (fall_thru_label)); | |
6fa91b48 SB |
3501 | |
3502 | /* Otherwise, increment the index var... */ | |
b56b9fe3 RS |
3503 | tmp = build2 (PLUS_EXPR, var_type, var, |
3504 | fold_convert (var_type, integer_one_node)); | |
726a989a | 3505 | gimplify_seq_add_stmt (pre_p, gimple_build_assign (var, tmp)); |
6fa91b48 SB |
3506 | |
3507 | /* ...and jump back to the loop entry. */ | |
726a989a | 3508 | gimplify_seq_add_stmt (pre_p, gimple_build_goto (loop_entry_label)); |
6fa91b48 SB |
3509 | |
3510 | /* Add the loop exit label. */ | |
726a989a | 3511 | gimplify_seq_add_stmt (pre_p, gimple_build_label (loop_exit_label)); |
6fa91b48 SB |
3512 | } |
3513 | ||
292a398f | 3514 | /* Return true if FDECL is accessing a field that is zero sized. */ |
b8698a0f | 3515 | |
292a398f | 3516 | static bool |
22ea9ec0 | 3517 | zero_sized_field_decl (const_tree fdecl) |
292a398f | 3518 | { |
b8698a0f | 3519 | if (TREE_CODE (fdecl) == FIELD_DECL && DECL_SIZE (fdecl) |
292a398f DB |
3520 | && integer_zerop (DECL_SIZE (fdecl))) |
3521 | return true; | |
3522 | return false; | |
3523 | } | |
3524 | ||
d06526b7 | 3525 | /* Return true if TYPE is zero sized. */ |
b8698a0f | 3526 | |
d06526b7 | 3527 | static bool |
22ea9ec0 | 3528 | zero_sized_type (const_tree type) |
d06526b7 AP |
3529 | { |
3530 | if (AGGREGATE_TYPE_P (type) && TYPE_SIZE (type) | |
3531 | && integer_zerop (TYPE_SIZE (type))) | |
3532 | return true; | |
3533 | return false; | |
3534 | } | |
3535 | ||
57d1dd87 RH |
3536 | /* A subroutine of gimplify_init_constructor. Generate individual |
3537 | MODIFY_EXPRs for a CONSTRUCTOR. OBJECT is the LHS against which the | |
4038c495 | 3538 | assignments should happen. ELTS is the CONSTRUCTOR_ELTS of the |
57d1dd87 RH |
3539 | CONSTRUCTOR. CLEARED is true if the entire LHS object has been |
3540 | zeroed first. */ | |
3541 | ||
3542 | static void | |
9771b263 | 3543 | gimplify_init_ctor_eval (tree object, vec<constructor_elt, va_gc> *elts, |
726a989a | 3544 | gimple_seq *pre_p, bool cleared) |
57d1dd87 RH |
3545 | { |
3546 | tree array_elt_type = NULL; | |
4038c495 GB |
3547 | unsigned HOST_WIDE_INT ix; |
3548 | tree purpose, value; | |
57d1dd87 RH |
3549 | |
3550 | if (TREE_CODE (TREE_TYPE (object)) == ARRAY_TYPE) | |
3551 | array_elt_type = TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (object))); | |
3552 | ||
4038c495 | 3553 | FOR_EACH_CONSTRUCTOR_ELT (elts, ix, purpose, value) |
57d1dd87 | 3554 | { |
726a989a | 3555 | tree cref; |
57d1dd87 RH |
3556 | |
3557 | /* NULL values are created above for gimplification errors. */ | |
3558 | if (value == NULL) | |
3559 | continue; | |
3560 | ||
3561 | if (cleared && initializer_zerop (value)) | |
3562 | continue; | |
3563 | ||
6fa91b48 SB |
3564 | /* ??? Here's to hoping the front end fills in all of the indices, |
3565 | so we don't have to figure out what's missing ourselves. */ | |
3566 | gcc_assert (purpose); | |
3567 | ||
816fa80a OH |
3568 | /* Skip zero-sized fields, unless value has side-effects. This can |
3569 | happen with calls to functions returning a zero-sized type, which | |
3570 | we shouldn't discard. As a number of downstream passes don't | |
3571 | expect sets of zero-sized fields, we rely on the gimplification of | |
3572 | the MODIFY_EXPR we make below to drop the assignment statement. */ | |
3573 | if (! TREE_SIDE_EFFECTS (value) && zero_sized_field_decl (purpose)) | |
292a398f DB |
3574 | continue; |
3575 | ||
6fa91b48 SB |
3576 | /* If we have a RANGE_EXPR, we have to build a loop to assign the |
3577 | whole range. */ | |
3578 | if (TREE_CODE (purpose) == RANGE_EXPR) | |
57d1dd87 | 3579 | { |
6fa91b48 SB |
3580 | tree lower = TREE_OPERAND (purpose, 0); |
3581 | tree upper = TREE_OPERAND (purpose, 1); | |
3582 | ||
3583 | /* If the lower bound is equal to upper, just treat it as if | |
3584 | upper was the index. */ | |
3585 | if (simple_cst_equal (lower, upper)) | |
3586 | purpose = upper; | |
3587 | else | |
3588 | { | |
3589 | gimplify_init_ctor_eval_range (object, lower, upper, value, | |
3590 | array_elt_type, pre_p, cleared); | |
3591 | continue; | |
3592 | } | |
3593 | } | |
57d1dd87 | 3594 | |
6fa91b48 SB |
3595 | if (array_elt_type) |
3596 | { | |
1a1640db RG |
3597 | /* Do not use bitsizetype for ARRAY_REF indices. */ |
3598 | if (TYPE_DOMAIN (TREE_TYPE (object))) | |
ad19c4be EB |
3599 | purpose |
3600 | = fold_convert (TREE_TYPE (TYPE_DOMAIN (TREE_TYPE (object))), | |
3601 | purpose); | |
b4257cfc RG |
3602 | cref = build4 (ARRAY_REF, array_elt_type, unshare_expr (object), |
3603 | purpose, NULL_TREE, NULL_TREE); | |
57d1dd87 RH |
3604 | } |
3605 | else | |
cf0efa6a ILT |
3606 | { |
3607 | gcc_assert (TREE_CODE (purpose) == FIELD_DECL); | |
b4257cfc RG |
3608 | cref = build3 (COMPONENT_REF, TREE_TYPE (purpose), |
3609 | unshare_expr (object), purpose, NULL_TREE); | |
cf0efa6a | 3610 | } |
57d1dd87 | 3611 | |
cf0efa6a ILT |
3612 | if (TREE_CODE (value) == CONSTRUCTOR |
3613 | && TREE_CODE (TREE_TYPE (value)) != VECTOR_TYPE) | |
57d1dd87 RH |
3614 | gimplify_init_ctor_eval (cref, CONSTRUCTOR_ELTS (value), |
3615 | pre_p, cleared); | |
3616 | else | |
3617 | { | |
726a989a | 3618 | tree init = build2 (INIT_EXPR, TREE_TYPE (cref), cref, value); |
57d1dd87 | 3619 | gimplify_and_add (init, pre_p); |
726a989a | 3620 | ggc_free (init); |
57d1dd87 RH |
3621 | } |
3622 | } | |
3623 | } | |
3624 | ||
ad19c4be | 3625 | /* Return the appropriate RHS predicate for this LHS. */ |
726a989a | 3626 | |
18f429e2 | 3627 | gimple_predicate |
726a989a RB |
3628 | rhs_predicate_for (tree lhs) |
3629 | { | |
ba4d8f9d RG |
3630 | if (is_gimple_reg (lhs)) |
3631 | return is_gimple_reg_rhs_or_call; | |
726a989a | 3632 | else |
ba4d8f9d | 3633 | return is_gimple_mem_rhs_or_call; |
726a989a RB |
3634 | } |
3635 | ||
2ec5deb5 PB |
3636 | /* Gimplify a C99 compound literal expression. This just means adding |
3637 | the DECL_EXPR before the current statement and using its anonymous | |
3638 | decl instead. */ | |
3639 | ||
3640 | static enum gimplify_status | |
a845a7f5 | 3641 | gimplify_compound_literal_expr (tree *expr_p, gimple_seq *pre_p, |
4c53d183 | 3642 | bool (*gimple_test_f) (tree), |
a845a7f5 | 3643 | fallback_t fallback) |
2ec5deb5 PB |
3644 | { |
3645 | tree decl_s = COMPOUND_LITERAL_EXPR_DECL_EXPR (*expr_p); | |
3646 | tree decl = DECL_EXPR_DECL (decl_s); | |
4c53d183 | 3647 | tree init = DECL_INITIAL (decl); |
2ec5deb5 PB |
3648 | /* Mark the decl as addressable if the compound literal |
3649 | expression is addressable now, otherwise it is marked too late | |
3650 | after we gimplify the initialization expression. */ | |
3651 | if (TREE_ADDRESSABLE (*expr_p)) | |
3652 | TREE_ADDRESSABLE (decl) = 1; | |
4c53d183 MM |
3653 | /* Otherwise, if we don't need an lvalue and have a literal directly |
3654 | substitute it. Check if it matches the gimple predicate, as | |
3655 | otherwise we'd generate a new temporary, and we can as well just | |
3656 | use the decl we already have. */ | |
3657 | else if (!TREE_ADDRESSABLE (decl) | |
3658 | && init | |
3659 | && (fallback & fb_lvalue) == 0 | |
3660 | && gimple_test_f (init)) | |
3661 | { | |
3662 | *expr_p = init; | |
3663 | return GS_OK; | |
3664 | } | |
2ec5deb5 PB |
3665 | |
3666 | /* Preliminarily mark non-addressed complex variables as eligible | |
3667 | for promotion to gimple registers. We'll transform their uses | |
3668 | as we find them. */ | |
3669 | if ((TREE_CODE (TREE_TYPE (decl)) == COMPLEX_TYPE | |
3670 | || TREE_CODE (TREE_TYPE (decl)) == VECTOR_TYPE) | |
3671 | && !TREE_THIS_VOLATILE (decl) | |
3672 | && !needs_to_live_in_memory (decl)) | |
3673 | DECL_GIMPLE_REG_P (decl) = 1; | |
3674 | ||
a845a7f5 ILT |
3675 | /* If the decl is not addressable, then it is being used in some |
3676 | expression or on the right hand side of a statement, and it can | |
3677 | be put into a readonly data section. */ | |
3678 | if (!TREE_ADDRESSABLE (decl) && (fallback & fb_lvalue) == 0) | |
3679 | TREE_READONLY (decl) = 1; | |
3680 | ||
2ec5deb5 PB |
3681 | /* This decl isn't mentioned in the enclosing block, so add it to the |
3682 | list of temps. FIXME it seems a bit of a kludge to say that | |
3683 | anonymous artificial vars aren't pushed, but everything else is. */ | |
3684 | if (DECL_NAME (decl) == NULL_TREE && !DECL_SEEN_IN_BIND_EXPR_P (decl)) | |
3685 | gimple_add_tmp_var (decl); | |
3686 | ||
3687 | gimplify_and_add (decl_s, pre_p); | |
3688 | *expr_p = decl; | |
3689 | return GS_OK; | |
3690 | } | |
3691 | ||
3692 | /* Optimize embedded COMPOUND_LITERAL_EXPRs within a CONSTRUCTOR, | |
3693 | return a new CONSTRUCTOR if something changed. */ | |
3694 | ||
3695 | static tree | |
3696 | optimize_compound_literals_in_ctor (tree orig_ctor) | |
3697 | { | |
3698 | tree ctor = orig_ctor; | |
9771b263 DN |
3699 | vec<constructor_elt, va_gc> *elts = CONSTRUCTOR_ELTS (ctor); |
3700 | unsigned int idx, num = vec_safe_length (elts); | |
2ec5deb5 PB |
3701 | |
3702 | for (idx = 0; idx < num; idx++) | |
3703 | { | |
9771b263 | 3704 | tree value = (*elts)[idx].value; |
2ec5deb5 PB |
3705 | tree newval = value; |
3706 | if (TREE_CODE (value) == CONSTRUCTOR) | |
3707 | newval = optimize_compound_literals_in_ctor (value); | |
3708 | else if (TREE_CODE (value) == COMPOUND_LITERAL_EXPR) | |
3709 | { | |
3710 | tree decl_s = COMPOUND_LITERAL_EXPR_DECL_EXPR (value); | |
3711 | tree decl = DECL_EXPR_DECL (decl_s); | |
3712 | tree init = DECL_INITIAL (decl); | |
3713 | ||
3714 | if (!TREE_ADDRESSABLE (value) | |
3715 | && !TREE_ADDRESSABLE (decl) | |
6f8f67e9 JJ |
3716 | && init |
3717 | && TREE_CODE (init) == CONSTRUCTOR) | |
2ec5deb5 PB |
3718 | newval = optimize_compound_literals_in_ctor (init); |
3719 | } | |
3720 | if (newval == value) | |
3721 | continue; | |
3722 | ||
3723 | if (ctor == orig_ctor) | |
3724 | { | |
3725 | ctor = copy_node (orig_ctor); | |
9771b263 | 3726 | CONSTRUCTOR_ELTS (ctor) = vec_safe_copy (elts); |
2ec5deb5 PB |
3727 | elts = CONSTRUCTOR_ELTS (ctor); |
3728 | } | |
9771b263 | 3729 | (*elts)[idx].value = newval; |
2ec5deb5 PB |
3730 | } |
3731 | return ctor; | |
3732 | } | |
3733 | ||
26d44ae2 RH |
3734 | /* A subroutine of gimplify_modify_expr. Break out elements of a |
3735 | CONSTRUCTOR used as an initializer into separate MODIFY_EXPRs. | |
3736 | ||
3737 | Note that we still need to clear any elements that don't have explicit | |
3738 | initializers, so if not all elements are initialized we keep the | |
ffed8a01 AH |
3739 | original MODIFY_EXPR, we just remove all of the constructor elements. |
3740 | ||
3741 | If NOTIFY_TEMP_CREATION is true, do not gimplify, just return | |
3742 | GS_ERROR if we would have to create a temporary when gimplifying | |
3743 | this constructor. Otherwise, return GS_OK. | |
3744 | ||
3745 | If NOTIFY_TEMP_CREATION is false, just do the gimplification. */ | |
26d44ae2 RH |
3746 | |
3747 | static enum gimplify_status | |
726a989a RB |
3748 | gimplify_init_constructor (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p, |
3749 | bool want_value, bool notify_temp_creation) | |
26d44ae2 | 3750 | { |
f5a1f0d0 | 3751 | tree object, ctor, type; |
26d44ae2 | 3752 | enum gimplify_status ret; |
9771b263 | 3753 | vec<constructor_elt, va_gc> *elts; |
26d44ae2 | 3754 | |
f5a1f0d0 | 3755 | gcc_assert (TREE_CODE (TREE_OPERAND (*expr_p, 1)) == CONSTRUCTOR); |
26d44ae2 | 3756 | |
ffed8a01 AH |
3757 | if (!notify_temp_creation) |
3758 | { | |
726a989a | 3759 | ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p, |
ffed8a01 AH |
3760 | is_gimple_lvalue, fb_lvalue); |
3761 | if (ret == GS_ERROR) | |
3762 | return ret; | |
3763 | } | |
57d1dd87 | 3764 | |
726a989a | 3765 | object = TREE_OPERAND (*expr_p, 0); |
f5a1f0d0 PB |
3766 | ctor = TREE_OPERAND (*expr_p, 1) = |
3767 | optimize_compound_literals_in_ctor (TREE_OPERAND (*expr_p, 1)); | |
3768 | type = TREE_TYPE (ctor); | |
3769 | elts = CONSTRUCTOR_ELTS (ctor); | |
26d44ae2 | 3770 | ret = GS_ALL_DONE; |
726a989a | 3771 | |
26d44ae2 RH |
3772 | switch (TREE_CODE (type)) |
3773 | { | |
3774 | case RECORD_TYPE: | |
3775 | case UNION_TYPE: | |
3776 | case QUAL_UNION_TYPE: | |
3777 | case ARRAY_TYPE: | |
3778 | { | |
57d1dd87 | 3779 | struct gimplify_init_ctor_preeval_data preeval_data; |
953d0c90 RS |
3780 | HOST_WIDE_INT num_ctor_elements, num_nonzero_elements; |
3781 | bool cleared, complete_p, valid_const_initializer; | |
26d44ae2 RH |
3782 | |
3783 | /* Aggregate types must lower constructors to initialization of | |
3784 | individual elements. The exception is that a CONSTRUCTOR node | |
3785 | with no elements indicates zero-initialization of the whole. */ | |
9771b263 | 3786 | if (vec_safe_is_empty (elts)) |
ffed8a01 AH |
3787 | { |
3788 | if (notify_temp_creation) | |
3789 | return GS_OK; | |
3790 | break; | |
3791 | } | |
b8698a0f | 3792 | |
fe24d485 OH |
3793 | /* Fetch information about the constructor to direct later processing. |
3794 | We might want to make static versions of it in various cases, and | |
3795 | can only do so if it known to be a valid constant initializer. */ | |
3796 | valid_const_initializer | |
3797 | = categorize_ctor_elements (ctor, &num_nonzero_elements, | |
953d0c90 | 3798 | &num_ctor_elements, &complete_p); |
26d44ae2 RH |
3799 | |
3800 | /* If a const aggregate variable is being initialized, then it | |
3801 | should never be a lose to promote the variable to be static. */ | |
fe24d485 | 3802 | if (valid_const_initializer |
6f642f98 | 3803 | && num_nonzero_elements > 1 |
26d44ae2 | 3804 | && TREE_READONLY (object) |
d0ea0759 SE |
3805 | && TREE_CODE (object) == VAR_DECL |
3806 | && (flag_merge_constants >= 2 || !TREE_ADDRESSABLE (object))) | |
26d44ae2 | 3807 | { |
ffed8a01 AH |
3808 | if (notify_temp_creation) |
3809 | return GS_ERROR; | |
26d44ae2 RH |
3810 | DECL_INITIAL (object) = ctor; |
3811 | TREE_STATIC (object) = 1; | |
3812 | if (!DECL_NAME (object)) | |
3813 | DECL_NAME (object) = create_tmp_var_name ("C"); | |
3814 | walk_tree (&DECL_INITIAL (object), force_labels_r, NULL, NULL); | |
3815 | ||
3816 | /* ??? C++ doesn't automatically append a .<number> to the | |
6bdf3519 | 3817 | assembler name, and even when it does, it looks at FE private |
26d44ae2 RH |
3818 | data structures to figure out what that number should be, |
3819 | which are not set for this variable. I suppose this is | |
3820 | important for local statics for inline functions, which aren't | |
3821 | "local" in the object file sense. So in order to get a unique | |
3822 | TU-local symbol, we must invoke the lhd version now. */ | |
3823 | lhd_set_decl_assembler_name (object); | |
3824 | ||
3825 | *expr_p = NULL_TREE; | |
3826 | break; | |
3827 | } | |
3828 | ||
cce70747 JC |
3829 | /* If there are "lots" of initialized elements, even discounting |
3830 | those that are not address constants (and thus *must* be | |
3831 | computed at runtime), then partition the constructor into | |
3832 | constant and non-constant parts. Block copy the constant | |
3833 | parts in, then generate code for the non-constant parts. */ | |
3834 | /* TODO. There's code in cp/typeck.c to do this. */ | |
3835 | ||
953d0c90 RS |
3836 | if (int_size_in_bytes (TREE_TYPE (ctor)) < 0) |
3837 | /* store_constructor will ignore the clearing of variable-sized | |
3838 | objects. Initializers for such objects must explicitly set | |
3839 | every field that needs to be set. */ | |
3840 | cleared = false; | |
d368135f | 3841 | else if (!complete_p && !CONSTRUCTOR_NO_CLEARING (ctor)) |
953d0c90 | 3842 | /* If the constructor isn't complete, clear the whole object |
d368135f | 3843 | beforehand, unless CONSTRUCTOR_NO_CLEARING is set on it. |
953d0c90 RS |
3844 | |
3845 | ??? This ought not to be needed. For any element not present | |
3846 | in the initializer, we should simply set them to zero. Except | |
3847 | we'd need to *find* the elements that are not present, and that | |
3848 | requires trickery to avoid quadratic compile-time behavior in | |
3849 | large cases or excessive memory use in small cases. */ | |
73ed17ff | 3850 | cleared = true; |
953d0c90 | 3851 | else if (num_ctor_elements - num_nonzero_elements |
e04ad03d | 3852 | > CLEAR_RATIO (optimize_function_for_speed_p (cfun)) |
953d0c90 RS |
3853 | && num_nonzero_elements < num_ctor_elements / 4) |
3854 | /* If there are "lots" of zeros, it's more efficient to clear | |
3855 | the memory and then set the nonzero elements. */ | |
cce70747 | 3856 | cleared = true; |
953d0c90 RS |
3857 | else |
3858 | cleared = false; | |
cce70747 | 3859 | |
26d44ae2 RH |
3860 | /* If there are "lots" of initialized elements, and all of them |
3861 | are valid address constants, then the entire initializer can | |
cce70747 JC |
3862 | be dropped to memory, and then memcpy'd out. Don't do this |
3863 | for sparse arrays, though, as it's more efficient to follow | |
3864 | the standard CONSTRUCTOR behavior of memset followed by | |
8afd015a JM |
3865 | individual element initialization. Also don't do this for small |
3866 | all-zero initializers (which aren't big enough to merit | |
3867 | clearing), and don't try to make bitwise copies of | |
d5e254e1 IE |
3868 | TREE_ADDRESSABLE types. |
3869 | ||
3870 | We cannot apply such transformation when compiling chkp static | |
3871 | initializer because creation of initializer image in the memory | |
3872 | will require static initialization of bounds for it. It should | |
3873 | result in another gimplification of similar initializer and we | |
3874 | may fall into infinite loop. */ | |
8afd015a JM |
3875 | if (valid_const_initializer |
3876 | && !(cleared || num_nonzero_elements == 0) | |
d5e254e1 IE |
3877 | && !TREE_ADDRESSABLE (type) |
3878 | && (!current_function_decl | |
3879 | || !lookup_attribute ("chkp ctor", | |
3880 | DECL_ATTRIBUTES (current_function_decl)))) | |
26d44ae2 RH |
3881 | { |
3882 | HOST_WIDE_INT size = int_size_in_bytes (type); | |
3883 | unsigned int align; | |
3884 | ||
3885 | /* ??? We can still get unbounded array types, at least | |
3886 | from the C++ front end. This seems wrong, but attempt | |
3887 | to work around it for now. */ | |
3888 | if (size < 0) | |
3889 | { | |
3890 | size = int_size_in_bytes (TREE_TYPE (object)); | |
3891 | if (size >= 0) | |
3892 | TREE_TYPE (ctor) = type = TREE_TYPE (object); | |
3893 | } | |
3894 | ||
3895 | /* Find the maximum alignment we can assume for the object. */ | |
3896 | /* ??? Make use of DECL_OFFSET_ALIGN. */ | |
3897 | if (DECL_P (object)) | |
3898 | align = DECL_ALIGN (object); | |
3899 | else | |
3900 | align = TYPE_ALIGN (type); | |
3901 | ||
f301837e EB |
3902 | /* Do a block move either if the size is so small as to make |
3903 | each individual move a sub-unit move on average, or if it | |
3904 | is so large as to make individual moves inefficient. */ | |
329ad380 JJ |
3905 | if (size > 0 |
3906 | && num_nonzero_elements > 1 | |
f301837e EB |
3907 | && (size < num_nonzero_elements |
3908 | || !can_move_by_pieces (size, align))) | |
26d44ae2 | 3909 | { |
ffed8a01 AH |
3910 | if (notify_temp_creation) |
3911 | return GS_ERROR; | |
3912 | ||
46314d3e EB |
3913 | walk_tree (&ctor, force_labels_r, NULL, NULL); |
3914 | ctor = tree_output_constant_def (ctor); | |
3915 | if (!useless_type_conversion_p (type, TREE_TYPE (ctor))) | |
3916 | ctor = build1 (VIEW_CONVERT_EXPR, type, ctor); | |
3917 | TREE_OPERAND (*expr_p, 1) = ctor; | |
57d1dd87 RH |
3918 | |
3919 | /* This is no longer an assignment of a CONSTRUCTOR, but | |
3920 | we still may have processing to do on the LHS. So | |
3921 | pretend we didn't do anything here to let that happen. */ | |
3922 | return GS_UNHANDLED; | |
26d44ae2 RH |
3923 | } |
3924 | } | |
3925 | ||
558af7ca EB |
3926 | /* If the target is volatile, we have non-zero elements and more than |
3927 | one field to assign, initialize the target from a temporary. */ | |
61c7cbf8 RG |
3928 | if (TREE_THIS_VOLATILE (object) |
3929 | && !TREE_ADDRESSABLE (type) | |
558af7ca | 3930 | && num_nonzero_elements > 0 |
9771b263 | 3931 | && vec_safe_length (elts) > 1) |
61c7cbf8 RG |
3932 | { |
3933 | tree temp = create_tmp_var (TYPE_MAIN_VARIANT (type), NULL); | |
3934 | TREE_OPERAND (*expr_p, 0) = temp; | |
3935 | *expr_p = build2 (COMPOUND_EXPR, TREE_TYPE (*expr_p), | |
3936 | *expr_p, | |
3937 | build2 (MODIFY_EXPR, void_type_node, | |
3938 | object, temp)); | |
3939 | return GS_OK; | |
3940 | } | |
3941 | ||
ffed8a01 AH |
3942 | if (notify_temp_creation) |
3943 | return GS_OK; | |
3944 | ||
675c873b EB |
3945 | /* If there are nonzero elements and if needed, pre-evaluate to capture |
3946 | elements overlapping with the lhs into temporaries. We must do this | |
3947 | before clearing to fetch the values before they are zeroed-out. */ | |
3948 | if (num_nonzero_elements > 0 && TREE_CODE (*expr_p) != INIT_EXPR) | |
85d89e76 OH |
3949 | { |
3950 | preeval_data.lhs_base_decl = get_base_address (object); | |
3951 | if (!DECL_P (preeval_data.lhs_base_decl)) | |
3952 | preeval_data.lhs_base_decl = NULL; | |
3953 | preeval_data.lhs_alias_set = get_alias_set (object); | |
3954 | ||
726a989a | 3955 | gimplify_init_ctor_preeval (&TREE_OPERAND (*expr_p, 1), |
85d89e76 OH |
3956 | pre_p, post_p, &preeval_data); |
3957 | } | |
3958 | ||
26d44ae2 RH |
3959 | if (cleared) |
3960 | { | |
3961 | /* Zap the CONSTRUCTOR element list, which simplifies this case. | |
3962 | Note that we still have to gimplify, in order to handle the | |
57d1dd87 | 3963 | case of variable sized types. Avoid shared tree structures. */ |
4038c495 | 3964 | CONSTRUCTOR_ELTS (ctor) = NULL; |
726a989a | 3965 | TREE_SIDE_EFFECTS (ctor) = 0; |
57d1dd87 | 3966 | object = unshare_expr (object); |
726a989a | 3967 | gimplify_stmt (expr_p, pre_p); |
26d44ae2 RH |
3968 | } |
3969 | ||
6fa91b48 SB |
3970 | /* If we have not block cleared the object, or if there are nonzero |
3971 | elements in the constructor, add assignments to the individual | |
3972 | scalar fields of the object. */ | |
3973 | if (!cleared || num_nonzero_elements > 0) | |
85d89e76 | 3974 | gimplify_init_ctor_eval (object, elts, pre_p, cleared); |
26d44ae2 RH |
3975 | |
3976 | *expr_p = NULL_TREE; | |
3977 | } | |
3978 | break; | |
3979 | ||
3980 | case COMPLEX_TYPE: | |
3981 | { | |
3982 | tree r, i; | |
3983 | ||
ffed8a01 AH |
3984 | if (notify_temp_creation) |
3985 | return GS_OK; | |
3986 | ||
26d44ae2 | 3987 | /* Extract the real and imaginary parts out of the ctor. */ |
9771b263 DN |
3988 | gcc_assert (elts->length () == 2); |
3989 | r = (*elts)[0].value; | |
3990 | i = (*elts)[1].value; | |
26d44ae2 RH |
3991 | if (r == NULL || i == NULL) |
3992 | { | |
e8160c9a | 3993 | tree zero = build_zero_cst (TREE_TYPE (type)); |
26d44ae2 RH |
3994 | if (r == NULL) |
3995 | r = zero; | |
3996 | if (i == NULL) | |
3997 | i = zero; | |
3998 | } | |
3999 | ||
4000 | /* Complex types have either COMPLEX_CST or COMPLEX_EXPR to | |
4001 | represent creation of a complex value. */ | |
4002 | if (TREE_CONSTANT (r) && TREE_CONSTANT (i)) | |
4003 | { | |
4004 | ctor = build_complex (type, r, i); | |
4005 | TREE_OPERAND (*expr_p, 1) = ctor; | |
4006 | } | |
4007 | else | |
4008 | { | |
b4257cfc | 4009 | ctor = build2 (COMPLEX_EXPR, type, r, i); |
26d44ae2 | 4010 | TREE_OPERAND (*expr_p, 1) = ctor; |
726a989a RB |
4011 | ret = gimplify_expr (&TREE_OPERAND (*expr_p, 1), |
4012 | pre_p, | |
4013 | post_p, | |
17ad5b5e RH |
4014 | rhs_predicate_for (TREE_OPERAND (*expr_p, 0)), |
4015 | fb_rvalue); | |
26d44ae2 RH |
4016 | } |
4017 | } | |
4018 | break; | |
506e2710 | 4019 | |
26d44ae2 | 4020 | case VECTOR_TYPE: |
4038c495 GB |
4021 | { |
4022 | unsigned HOST_WIDE_INT ix; | |
4023 | constructor_elt *ce; | |
e89be13b | 4024 | |
ffed8a01 AH |
4025 | if (notify_temp_creation) |
4026 | return GS_OK; | |
4027 | ||
4038c495 GB |
4028 | /* Go ahead and simplify constant constructors to VECTOR_CST. */ |
4029 | if (TREE_CONSTANT (ctor)) | |
4030 | { | |
4031 | bool constant_p = true; | |
4032 | tree value; | |
4033 | ||
4034 | /* Even when ctor is constant, it might contain non-*_CST | |
9f1da821 RS |
4035 | elements, such as addresses or trapping values like |
4036 | 1.0/0.0 - 1.0/0.0. Such expressions don't belong | |
4037 | in VECTOR_CST nodes. */ | |
4038c495 GB |
4038 | FOR_EACH_CONSTRUCTOR_VALUE (elts, ix, value) |
4039 | if (!CONSTANT_CLASS_P (value)) | |
4040 | { | |
4041 | constant_p = false; | |
4042 | break; | |
4043 | } | |
e89be13b | 4044 | |
4038c495 GB |
4045 | if (constant_p) |
4046 | { | |
4047 | TREE_OPERAND (*expr_p, 1) = build_vector_from_ctor (type, elts); | |
4048 | break; | |
4049 | } | |
84816907 | 4050 | |
9f1da821 | 4051 | TREE_CONSTANT (ctor) = 0; |
4038c495 | 4052 | } |
e89be13b | 4053 | |
4038c495 GB |
4054 | /* Vector types use CONSTRUCTOR all the way through gimple |
4055 | compilation as a general initializer. */ | |
9771b263 | 4056 | FOR_EACH_VEC_SAFE_ELT (elts, ix, ce) |
4038c495 GB |
4057 | { |
4058 | enum gimplify_status tret; | |
726a989a RB |
4059 | tret = gimplify_expr (&ce->value, pre_p, post_p, is_gimple_val, |
4060 | fb_rvalue); | |
4038c495 GB |
4061 | if (tret == GS_ERROR) |
4062 | ret = GS_ERROR; | |
4063 | } | |
726a989a RB |
4064 | if (!is_gimple_reg (TREE_OPERAND (*expr_p, 0))) |
4065 | TREE_OPERAND (*expr_p, 1) = get_formal_tmp_var (ctor, pre_p); | |
4038c495 | 4066 | } |
26d44ae2 | 4067 | break; |
6de9cd9a | 4068 | |
26d44ae2 RH |
4069 | default: |
4070 | /* So how did we get a CONSTRUCTOR for a scalar type? */ | |
282899df | 4071 | gcc_unreachable (); |
26d44ae2 | 4072 | } |
6de9cd9a | 4073 | |
26d44ae2 RH |
4074 | if (ret == GS_ERROR) |
4075 | return GS_ERROR; | |
4076 | else if (want_value) | |
4077 | { | |
26d44ae2 RH |
4078 | *expr_p = object; |
4079 | return GS_OK; | |
6de9cd9a | 4080 | } |
26d44ae2 | 4081 | else |
726a989a RB |
4082 | { |
4083 | /* If we have gimplified both sides of the initializer but have | |
4084 | not emitted an assignment, do so now. */ | |
4085 | if (*expr_p) | |
4086 | { | |
4087 | tree lhs = TREE_OPERAND (*expr_p, 0); | |
4088 | tree rhs = TREE_OPERAND (*expr_p, 1); | |
4089 | gimple init = gimple_build_assign (lhs, rhs); | |
4090 | gimplify_seq_add_stmt (pre_p, init); | |
4091 | *expr_p = NULL; | |
4092 | } | |
4093 | ||
4094 | return GS_ALL_DONE; | |
4095 | } | |
26d44ae2 | 4096 | } |
6de9cd9a | 4097 | |
de4af523 JJ |
4098 | /* Given a pointer value OP0, return a simplified version of an |
4099 | indirection through OP0, or NULL_TREE if no simplification is | |
4100 | possible. This may only be applied to a rhs of an expression. | |
4101 | Note that the resulting type may be different from the type pointed | |
4102 | to in the sense that it is still compatible from the langhooks | |
4103 | point of view. */ | |
4104 | ||
4105 | static tree | |
4106 | gimple_fold_indirect_ref_rhs (tree t) | |
4107 | { | |
4108 | return gimple_fold_indirect_ref (t); | |
4109 | } | |
4110 | ||
4caa08da AH |
4111 | /* Subroutine of gimplify_modify_expr to do simplifications of |
4112 | MODIFY_EXPRs based on the code of the RHS. We loop for as long as | |
4113 | something changes. */ | |
6de9cd9a | 4114 | |
26d44ae2 | 4115 | static enum gimplify_status |
726a989a RB |
4116 | gimplify_modify_expr_rhs (tree *expr_p, tree *from_p, tree *to_p, |
4117 | gimple_seq *pre_p, gimple_seq *post_p, | |
4118 | bool want_value) | |
26d44ae2 | 4119 | { |
6d729f28 JM |
4120 | enum gimplify_status ret = GS_UNHANDLED; |
4121 | bool changed; | |
6de9cd9a | 4122 | |
6d729f28 JM |
4123 | do |
4124 | { | |
4125 | changed = false; | |
4126 | switch (TREE_CODE (*from_p)) | |
4127 | { | |
4128 | case VAR_DECL: | |
4129 | /* If we're assigning from a read-only variable initialized with | |
4130 | a constructor, do the direct assignment from the constructor, | |
4131 | but only if neither source nor target are volatile since this | |
4132 | latter assignment might end up being done on a per-field basis. */ | |
4133 | if (DECL_INITIAL (*from_p) | |
4134 | && TREE_READONLY (*from_p) | |
4135 | && !TREE_THIS_VOLATILE (*from_p) | |
4136 | && !TREE_THIS_VOLATILE (*to_p) | |
4137 | && TREE_CODE (DECL_INITIAL (*from_p)) == CONSTRUCTOR) | |
4138 | { | |
4139 | tree old_from = *from_p; | |
4140 | enum gimplify_status subret; | |
4141 | ||
4142 | /* Move the constructor into the RHS. */ | |
4143 | *from_p = unshare_expr (DECL_INITIAL (*from_p)); | |
4144 | ||
4145 | /* Let's see if gimplify_init_constructor will need to put | |
4146 | it in memory. */ | |
4147 | subret = gimplify_init_constructor (expr_p, NULL, NULL, | |
4148 | false, true); | |
4149 | if (subret == GS_ERROR) | |
4150 | { | |
4151 | /* If so, revert the change. */ | |
4152 | *from_p = old_from; | |
4153 | } | |
4154 | else | |
4155 | { | |
4156 | ret = GS_OK; | |
4157 | changed = true; | |
4158 | } | |
4159 | } | |
4160 | break; | |
4161 | case INDIRECT_REF: | |
4caa08da | 4162 | { |
6d729f28 | 4163 | /* If we have code like |
ffed8a01 | 4164 | |
6d729f28 | 4165 | *(const A*)(A*)&x |
ffed8a01 | 4166 | |
6d729f28 JM |
4167 | where the type of "x" is a (possibly cv-qualified variant |
4168 | of "A"), treat the entire expression as identical to "x". | |
4169 | This kind of code arises in C++ when an object is bound | |
4170 | to a const reference, and if "x" is a TARGET_EXPR we want | |
4171 | to take advantage of the optimization below. */ | |
06baaba3 | 4172 | bool volatile_p = TREE_THIS_VOLATILE (*from_p); |
6d729f28 JM |
4173 | tree t = gimple_fold_indirect_ref_rhs (TREE_OPERAND (*from_p, 0)); |
4174 | if (t) | |
ffed8a01 | 4175 | { |
06baaba3 RG |
4176 | if (TREE_THIS_VOLATILE (t) != volatile_p) |
4177 | { | |
4178 | if (TREE_CODE_CLASS (TREE_CODE (t)) == tcc_declaration) | |
4179 | t = build_simple_mem_ref_loc (EXPR_LOCATION (*from_p), | |
4180 | build_fold_addr_expr (t)); | |
4181 | if (REFERENCE_CLASS_P (t)) | |
4182 | TREE_THIS_VOLATILE (t) = volatile_p; | |
4183 | } | |
6d729f28 JM |
4184 | *from_p = t; |
4185 | ret = GS_OK; | |
4186 | changed = true; | |
ffed8a01 | 4187 | } |
6d729f28 JM |
4188 | break; |
4189 | } | |
4190 | ||
4191 | case TARGET_EXPR: | |
4192 | { | |
4193 | /* If we are initializing something from a TARGET_EXPR, strip the | |
4194 | TARGET_EXPR and initialize it directly, if possible. This can't | |
4195 | be done if the initializer is void, since that implies that the | |
4196 | temporary is set in some non-trivial way. | |
4197 | ||
4198 | ??? What about code that pulls out the temp and uses it | |
4199 | elsewhere? I think that such code never uses the TARGET_EXPR as | |
4200 | an initializer. If I'm wrong, we'll die because the temp won't | |
4201 | have any RTL. In that case, I guess we'll need to replace | |
4202 | references somehow. */ | |
4203 | tree init = TARGET_EXPR_INITIAL (*from_p); | |
4204 | ||
4205 | if (init | |
4206 | && !VOID_TYPE_P (TREE_TYPE (init))) | |
ffed8a01 | 4207 | { |
6d729f28 | 4208 | *from_p = init; |
ffed8a01 | 4209 | ret = GS_OK; |
6d729f28 | 4210 | changed = true; |
ffed8a01 | 4211 | } |
4caa08da | 4212 | } |
6d729f28 | 4213 | break; |
f98625f6 | 4214 | |
6d729f28 JM |
4215 | case COMPOUND_EXPR: |
4216 | /* Remove any COMPOUND_EXPR in the RHS so the following cases will be | |
4217 | caught. */ | |
4218 | gimplify_compound_expr (from_p, pre_p, true); | |
4219 | ret = GS_OK; | |
4220 | changed = true; | |
4221 | break; | |
f98625f6 | 4222 | |
6d729f28 | 4223 | case CONSTRUCTOR: |
ce3beba3 JM |
4224 | /* If we already made some changes, let the front end have a |
4225 | crack at this before we break it down. */ | |
4226 | if (ret != GS_UNHANDLED) | |
4227 | break; | |
6d729f28 JM |
4228 | /* If we're initializing from a CONSTRUCTOR, break this into |
4229 | individual MODIFY_EXPRs. */ | |
4230 | return gimplify_init_constructor (expr_p, pre_p, post_p, want_value, | |
4231 | false); | |
4232 | ||
4233 | case COND_EXPR: | |
4234 | /* If we're assigning to a non-register type, push the assignment | |
4235 | down into the branches. This is mandatory for ADDRESSABLE types, | |
4236 | since we cannot generate temporaries for such, but it saves a | |
4237 | copy in other cases as well. */ | |
4238 | if (!is_gimple_reg_type (TREE_TYPE (*from_p))) | |
f98625f6 | 4239 | { |
6d729f28 JM |
4240 | /* This code should mirror the code in gimplify_cond_expr. */ |
4241 | enum tree_code code = TREE_CODE (*expr_p); | |
4242 | tree cond = *from_p; | |
4243 | tree result = *to_p; | |
4244 | ||
4245 | ret = gimplify_expr (&result, pre_p, post_p, | |
4246 | is_gimple_lvalue, fb_lvalue); | |
4247 | if (ret != GS_ERROR) | |
4248 | ret = GS_OK; | |
4249 | ||
4250 | if (TREE_TYPE (TREE_OPERAND (cond, 1)) != void_type_node) | |
4251 | TREE_OPERAND (cond, 1) | |
4252 | = build2 (code, void_type_node, result, | |
4253 | TREE_OPERAND (cond, 1)); | |
4254 | if (TREE_TYPE (TREE_OPERAND (cond, 2)) != void_type_node) | |
4255 | TREE_OPERAND (cond, 2) | |
4256 | = build2 (code, void_type_node, unshare_expr (result), | |
4257 | TREE_OPERAND (cond, 2)); | |
4258 | ||
4259 | TREE_TYPE (cond) = void_type_node; | |
4260 | recalculate_side_effects (cond); | |
4261 | ||
4262 | if (want_value) | |
4263 | { | |
4264 | gimplify_and_add (cond, pre_p); | |
4265 | *expr_p = unshare_expr (result); | |
4266 | } | |
4267 | else | |
4268 | *expr_p = cond; | |
4269 | return ret; | |
f98625f6 | 4270 | } |
f98625f6 | 4271 | break; |
f98625f6 | 4272 | |
6d729f28 JM |
4273 | case CALL_EXPR: |
4274 | /* For calls that return in memory, give *to_p as the CALL_EXPR's | |
4275 | return slot so that we don't generate a temporary. */ | |
4276 | if (!CALL_EXPR_RETURN_SLOT_OPT (*from_p) | |
4277 | && aggregate_value_p (*from_p, *from_p)) | |
26d44ae2 | 4278 | { |
6d729f28 JM |
4279 | bool use_target; |
4280 | ||
4281 | if (!(rhs_predicate_for (*to_p))(*from_p)) | |
4282 | /* If we need a temporary, *to_p isn't accurate. */ | |
4283 | use_target = false; | |
ad19c4be | 4284 | /* It's OK to use the return slot directly unless it's an NRV. */ |
6d729f28 JM |
4285 | else if (TREE_CODE (*to_p) == RESULT_DECL |
4286 | && DECL_NAME (*to_p) == NULL_TREE | |
4287 | && needs_to_live_in_memory (*to_p)) | |
6d729f28 JM |
4288 | use_target = true; |
4289 | else if (is_gimple_reg_type (TREE_TYPE (*to_p)) | |
4290 | || (DECL_P (*to_p) && DECL_REGISTER (*to_p))) | |
4291 | /* Don't force regs into memory. */ | |
4292 | use_target = false; | |
4293 | else if (TREE_CODE (*expr_p) == INIT_EXPR) | |
4294 | /* It's OK to use the target directly if it's being | |
4295 | initialized. */ | |
4296 | use_target = true; | |
aabb90e5 RG |
4297 | else if (variably_modified_type_p (TREE_TYPE (*to_p), NULL_TREE)) |
4298 | /* Always use the target and thus RSO for variable-sized types. | |
4299 | GIMPLE cannot deal with a variable-sized assignment | |
4300 | embedded in a call statement. */ | |
4301 | use_target = true; | |
345ae177 AH |
4302 | else if (TREE_CODE (*to_p) != SSA_NAME |
4303 | && (!is_gimple_variable (*to_p) | |
4304 | || needs_to_live_in_memory (*to_p))) | |
6d729f28 JM |
4305 | /* Don't use the original target if it's already addressable; |
4306 | if its address escapes, and the called function uses the | |
4307 | NRV optimization, a conforming program could see *to_p | |
4308 | change before the called function returns; see c++/19317. | |
4309 | When optimizing, the return_slot pass marks more functions | |
4310 | as safe after we have escape info. */ | |
4311 | use_target = false; | |
4312 | else | |
4313 | use_target = true; | |
4314 | ||
4315 | if (use_target) | |
4316 | { | |
4317 | CALL_EXPR_RETURN_SLOT_OPT (*from_p) = 1; | |
4318 | mark_addressable (*to_p); | |
4319 | } | |
26d44ae2 | 4320 | } |
6d729f28 | 4321 | break; |
6de9cd9a | 4322 | |
6d729f28 JM |
4323 | case WITH_SIZE_EXPR: |
4324 | /* Likewise for calls that return an aggregate of non-constant size, | |
4325 | since we would not be able to generate a temporary at all. */ | |
4326 | if (TREE_CODE (TREE_OPERAND (*from_p, 0)) == CALL_EXPR) | |
4327 | { | |
4328 | *from_p = TREE_OPERAND (*from_p, 0); | |
ebad5233 JM |
4329 | /* We don't change ret in this case because the |
4330 | WITH_SIZE_EXPR might have been added in | |
4331 | gimplify_modify_expr, so returning GS_OK would lead to an | |
4332 | infinite loop. */ | |
6d729f28 JM |
4333 | changed = true; |
4334 | } | |
4335 | break; | |
6de9cd9a | 4336 | |
6d729f28 JM |
4337 | /* If we're initializing from a container, push the initialization |
4338 | inside it. */ | |
4339 | case CLEANUP_POINT_EXPR: | |
4340 | case BIND_EXPR: | |
4341 | case STATEMENT_LIST: | |
26d44ae2 | 4342 | { |
6d729f28 JM |
4343 | tree wrap = *from_p; |
4344 | tree t; | |
dae7ec87 | 4345 | |
6d729f28 JM |
4346 | ret = gimplify_expr (to_p, pre_p, post_p, is_gimple_min_lval, |
4347 | fb_lvalue); | |
dae7ec87 JM |
4348 | if (ret != GS_ERROR) |
4349 | ret = GS_OK; | |
4350 | ||
6d729f28 JM |
4351 | t = voidify_wrapper_expr (wrap, *expr_p); |
4352 | gcc_assert (t == *expr_p); | |
dae7ec87 JM |
4353 | |
4354 | if (want_value) | |
4355 | { | |
6d729f28 JM |
4356 | gimplify_and_add (wrap, pre_p); |
4357 | *expr_p = unshare_expr (*to_p); | |
dae7ec87 JM |
4358 | } |
4359 | else | |
6d729f28 JM |
4360 | *expr_p = wrap; |
4361 | return GS_OK; | |
26d44ae2 | 4362 | } |
6de9cd9a | 4363 | |
6d729f28 | 4364 | case COMPOUND_LITERAL_EXPR: |
fa47911c | 4365 | { |
6d729f28 JM |
4366 | tree complit = TREE_OPERAND (*expr_p, 1); |
4367 | tree decl_s = COMPOUND_LITERAL_EXPR_DECL_EXPR (complit); | |
4368 | tree decl = DECL_EXPR_DECL (decl_s); | |
4369 | tree init = DECL_INITIAL (decl); | |
4370 | ||
4371 | /* struct T x = (struct T) { 0, 1, 2 } can be optimized | |
4372 | into struct T x = { 0, 1, 2 } if the address of the | |
4373 | compound literal has never been taken. */ | |
4374 | if (!TREE_ADDRESSABLE (complit) | |
4375 | && !TREE_ADDRESSABLE (decl) | |
4376 | && init) | |
fa47911c | 4377 | { |
6d729f28 JM |
4378 | *expr_p = copy_node (*expr_p); |
4379 | TREE_OPERAND (*expr_p, 1) = init; | |
4380 | return GS_OK; | |
fa47911c JM |
4381 | } |
4382 | } | |
4383 | ||
6d729f28 JM |
4384 | default: |
4385 | break; | |
2ec5deb5 | 4386 | } |
6d729f28 JM |
4387 | } |
4388 | while (changed); | |
6de9cd9a | 4389 | |
6de9cd9a DN |
4390 | return ret; |
4391 | } | |
4392 | ||
216820a4 RG |
4393 | |
4394 | /* Return true if T looks like a valid GIMPLE statement. */ | |
4395 | ||
4396 | static bool | |
4397 | is_gimple_stmt (tree t) | |
4398 | { | |
4399 | const enum tree_code code = TREE_CODE (t); | |
4400 | ||
4401 | switch (code) | |
4402 | { | |
4403 | case NOP_EXPR: | |
4404 | /* The only valid NOP_EXPR is the empty statement. */ | |
4405 | return IS_EMPTY_STMT (t); | |
4406 | ||
4407 | case BIND_EXPR: | |
4408 | case COND_EXPR: | |
4409 | /* These are only valid if they're void. */ | |
4410 | return TREE_TYPE (t) == NULL || VOID_TYPE_P (TREE_TYPE (t)); | |
4411 | ||
4412 | case SWITCH_EXPR: | |
4413 | case GOTO_EXPR: | |
4414 | case RETURN_EXPR: | |
4415 | case LABEL_EXPR: | |
4416 | case CASE_LABEL_EXPR: | |
4417 | case TRY_CATCH_EXPR: | |
4418 | case TRY_FINALLY_EXPR: | |
4419 | case EH_FILTER_EXPR: | |
4420 | case CATCH_EXPR: | |
4421 | case ASM_EXPR: | |
4422 | case STATEMENT_LIST: | |
4423 | case OMP_PARALLEL: | |
4424 | case OMP_FOR: | |
74bf76ed | 4425 | case OMP_SIMD: |
c02065fc | 4426 | case CILK_SIMD: |
acf0174b | 4427 | case OMP_DISTRIBUTE: |
216820a4 RG |
4428 | case OMP_SECTIONS: |
4429 | case OMP_SECTION: | |
4430 | case OMP_SINGLE: | |
4431 | case OMP_MASTER: | |
acf0174b | 4432 | case OMP_TASKGROUP: |
216820a4 RG |
4433 | case OMP_ORDERED: |
4434 | case OMP_CRITICAL: | |
4435 | case OMP_TASK: | |
4436 | /* These are always void. */ | |
4437 | return true; | |
4438 | ||
4439 | case CALL_EXPR: | |
4440 | case MODIFY_EXPR: | |
4441 | case PREDICT_EXPR: | |
4442 | /* These are valid regardless of their type. */ | |
4443 | return true; | |
4444 | ||
4445 | default: | |
4446 | return false; | |
4447 | } | |
4448 | } | |
4449 | ||
4450 | ||
d9c2d296 AP |
4451 | /* Promote partial stores to COMPLEX variables to total stores. *EXPR_P is |
4452 | a MODIFY_EXPR with a lhs of a REAL/IMAGPART_EXPR of a variable with | |
7b7e6ecd EB |
4453 | DECL_GIMPLE_REG_P set. |
4454 | ||
4455 | IMPORTANT NOTE: This promotion is performed by introducing a load of the | |
4456 | other, unmodified part of the complex object just before the total store. | |
4457 | As a consequence, if the object is still uninitialized, an undefined value | |
4458 | will be loaded into a register, which may result in a spurious exception | |
4459 | if the register is floating-point and the value happens to be a signaling | |
4460 | NaN for example. Then the fully-fledged complex operations lowering pass | |
4461 | followed by a DCE pass are necessary in order to fix things up. */ | |
d9c2d296 AP |
4462 | |
4463 | static enum gimplify_status | |
726a989a RB |
4464 | gimplify_modify_expr_complex_part (tree *expr_p, gimple_seq *pre_p, |
4465 | bool want_value) | |
d9c2d296 AP |
4466 | { |
4467 | enum tree_code code, ocode; | |
4468 | tree lhs, rhs, new_rhs, other, realpart, imagpart; | |
4469 | ||
726a989a RB |
4470 | lhs = TREE_OPERAND (*expr_p, 0); |
4471 | rhs = TREE_OPERAND (*expr_p, 1); | |
d9c2d296 AP |
4472 | code = TREE_CODE (lhs); |
4473 | lhs = TREE_OPERAND (lhs, 0); | |
4474 | ||
4475 | ocode = code == REALPART_EXPR ? IMAGPART_EXPR : REALPART_EXPR; | |
4476 | other = build1 (ocode, TREE_TYPE (rhs), lhs); | |
8d2b0410 | 4477 | TREE_NO_WARNING (other) = 1; |
d9c2d296 AP |
4478 | other = get_formal_tmp_var (other, pre_p); |
4479 | ||
4480 | realpart = code == REALPART_EXPR ? rhs : other; | |
4481 | imagpart = code == REALPART_EXPR ? other : rhs; | |
4482 | ||
4483 | if (TREE_CONSTANT (realpart) && TREE_CONSTANT (imagpart)) | |
4484 | new_rhs = build_complex (TREE_TYPE (lhs), realpart, imagpart); | |
4485 | else | |
4486 | new_rhs = build2 (COMPLEX_EXPR, TREE_TYPE (lhs), realpart, imagpart); | |
4487 | ||
726a989a RB |
4488 | gimplify_seq_add_stmt (pre_p, gimple_build_assign (lhs, new_rhs)); |
4489 | *expr_p = (want_value) ? rhs : NULL_TREE; | |
d9c2d296 AP |
4490 | |
4491 | return GS_ALL_DONE; | |
4492 | } | |
4493 | ||
206048bd | 4494 | /* Gimplify the MODIFY_EXPR node pointed to by EXPR_P. |
6de9cd9a DN |
4495 | |
4496 | modify_expr | |
4497 | : varname '=' rhs | |
4498 | | '*' ID '=' rhs | |
4499 | ||
4500 | PRE_P points to the list where side effects that must happen before | |
4501 | *EXPR_P should be stored. | |
4502 | ||
4503 | POST_P points to the list where side effects that must happen after | |
4504 | *EXPR_P should be stored. | |
4505 | ||
4506 | WANT_VALUE is nonzero iff we want to use the value of this expression | |
4507 | in another expression. */ | |
4508 | ||
4509 | static enum gimplify_status | |
726a989a RB |
4510 | gimplify_modify_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p, |
4511 | bool want_value) | |
6de9cd9a | 4512 | { |
726a989a RB |
4513 | tree *from_p = &TREE_OPERAND (*expr_p, 1); |
4514 | tree *to_p = &TREE_OPERAND (*expr_p, 0); | |
44de5aeb | 4515 | enum gimplify_status ret = GS_UNHANDLED; |
726a989a | 4516 | gimple assign; |
db3927fb | 4517 | location_t loc = EXPR_LOCATION (*expr_p); |
6da8be89 | 4518 | gimple_stmt_iterator gsi; |
6de9cd9a | 4519 | |
282899df NS |
4520 | gcc_assert (TREE_CODE (*expr_p) == MODIFY_EXPR |
4521 | || TREE_CODE (*expr_p) == INIT_EXPR); | |
6de9cd9a | 4522 | |
d0ad58f9 JM |
4523 | /* Trying to simplify a clobber using normal logic doesn't work, |
4524 | so handle it here. */ | |
4525 | if (TREE_CLOBBER_P (*from_p)) | |
4526 | { | |
5d751b0c JJ |
4527 | ret = gimplify_expr (to_p, pre_p, post_p, is_gimple_lvalue, fb_lvalue); |
4528 | if (ret == GS_ERROR) | |
4529 | return ret; | |
4530 | gcc_assert (!want_value | |
4531 | && (TREE_CODE (*to_p) == VAR_DECL | |
4532 | || TREE_CODE (*to_p) == MEM_REF)); | |
d0ad58f9 JM |
4533 | gimplify_seq_add_stmt (pre_p, gimple_build_assign (*to_p, *from_p)); |
4534 | *expr_p = NULL; | |
4535 | return GS_ALL_DONE; | |
4536 | } | |
4537 | ||
1b24a790 RG |
4538 | /* Insert pointer conversions required by the middle-end that are not |
4539 | required by the frontend. This fixes middle-end type checking for | |
4540 | for example gcc.dg/redecl-6.c. */ | |
daad0278 | 4541 | if (POINTER_TYPE_P (TREE_TYPE (*to_p))) |
1b24a790 RG |
4542 | { |
4543 | STRIP_USELESS_TYPE_CONVERSION (*from_p); | |
4544 | if (!useless_type_conversion_p (TREE_TYPE (*to_p), TREE_TYPE (*from_p))) | |
db3927fb | 4545 | *from_p = fold_convert_loc (loc, TREE_TYPE (*to_p), *from_p); |
1b24a790 RG |
4546 | } |
4547 | ||
83d7e8f0 JM |
4548 | /* See if any simplifications can be done based on what the RHS is. */ |
4549 | ret = gimplify_modify_expr_rhs (expr_p, from_p, to_p, pre_p, post_p, | |
4550 | want_value); | |
4551 | if (ret != GS_UNHANDLED) | |
4552 | return ret; | |
4553 | ||
4554 | /* For zero sized types only gimplify the left hand side and right hand | |
4555 | side as statements and throw away the assignment. Do this after | |
4556 | gimplify_modify_expr_rhs so we handle TARGET_EXPRs of addressable | |
4557 | types properly. */ | |
753b34d7 | 4558 | if (zero_sized_type (TREE_TYPE (*from_p)) && !want_value) |
412f8986 | 4559 | { |
726a989a RB |
4560 | gimplify_stmt (from_p, pre_p); |
4561 | gimplify_stmt (to_p, pre_p); | |
412f8986 AP |
4562 | *expr_p = NULL_TREE; |
4563 | return GS_ALL_DONE; | |
4564 | } | |
6de9cd9a | 4565 | |
d25cee4d RH |
4566 | /* If the value being copied is of variable width, compute the length |
4567 | of the copy into a WITH_SIZE_EXPR. Note that we need to do this | |
4568 | before gimplifying any of the operands so that we can resolve any | |
4569 | PLACEHOLDER_EXPRs in the size. Also note that the RTL expander uses | |
4570 | the size of the expression to be copied, not of the destination, so | |
726a989a | 4571 | that is what we must do here. */ |
d25cee4d | 4572 | maybe_with_size_expr (from_p); |
6de9cd9a | 4573 | |
44de5aeb RK |
4574 | ret = gimplify_expr (to_p, pre_p, post_p, is_gimple_lvalue, fb_lvalue); |
4575 | if (ret == GS_ERROR) | |
4576 | return ret; | |
6de9cd9a | 4577 | |
726a989a RB |
4578 | /* As a special case, we have to temporarily allow for assignments |
4579 | with a CALL_EXPR on the RHS. Since in GIMPLE a function call is | |
4580 | a toplevel statement, when gimplifying the GENERIC expression | |
4581 | MODIFY_EXPR <a, CALL_EXPR <foo>>, we cannot create the tuple | |
4582 | GIMPLE_ASSIGN <a, GIMPLE_CALL <foo>>. | |
4583 | ||
4584 | Instead, we need to create the tuple GIMPLE_CALL <a, foo>. To | |
4585 | prevent gimplify_expr from trying to create a new temporary for | |
4586 | foo's LHS, we tell it that it should only gimplify until it | |
4587 | reaches the CALL_EXPR. On return from gimplify_expr, the newly | |
4588 | created GIMPLE_CALL <foo> will be the last statement in *PRE_P | |
4589 | and all we need to do here is set 'a' to be its LHS. */ | |
4590 | ret = gimplify_expr (from_p, pre_p, post_p, rhs_predicate_for (*to_p), | |
4591 | fb_rvalue); | |
6de9cd9a DN |
4592 | if (ret == GS_ERROR) |
4593 | return ret; | |
4594 | ||
44de5aeb RK |
4595 | /* Now see if the above changed *from_p to something we handle specially. */ |
4596 | ret = gimplify_modify_expr_rhs (expr_p, from_p, to_p, pre_p, post_p, | |
4597 | want_value); | |
6de9cd9a DN |
4598 | if (ret != GS_UNHANDLED) |
4599 | return ret; | |
4600 | ||
d25cee4d RH |
4601 | /* If we've got a variable sized assignment between two lvalues (i.e. does |
4602 | not involve a call), then we can make things a bit more straightforward | |
4603 | by converting the assignment to memcpy or memset. */ | |
4604 | if (TREE_CODE (*from_p) == WITH_SIZE_EXPR) | |
4605 | { | |
4606 | tree from = TREE_OPERAND (*from_p, 0); | |
4607 | tree size = TREE_OPERAND (*from_p, 1); | |
4608 | ||
4609 | if (TREE_CODE (from) == CONSTRUCTOR) | |
726a989a RB |
4610 | return gimplify_modify_expr_to_memset (expr_p, size, want_value, pre_p); |
4611 | ||
e847cc68 | 4612 | if (is_gimple_addressable (from)) |
d25cee4d RH |
4613 | { |
4614 | *from_p = from; | |
726a989a RB |
4615 | return gimplify_modify_expr_to_memcpy (expr_p, size, want_value, |
4616 | pre_p); | |
d25cee4d RH |
4617 | } |
4618 | } | |
4619 | ||
e41d82f5 RH |
4620 | /* Transform partial stores to non-addressable complex variables into |
4621 | total stores. This allows us to use real instead of virtual operands | |
4622 | for these variables, which improves optimization. */ | |
4623 | if ((TREE_CODE (*to_p) == REALPART_EXPR | |
4624 | || TREE_CODE (*to_p) == IMAGPART_EXPR) | |
4625 | && is_gimple_reg (TREE_OPERAND (*to_p, 0))) | |
4626 | return gimplify_modify_expr_complex_part (expr_p, pre_p, want_value); | |
4627 | ||
f173837a EB |
4628 | /* Try to alleviate the effects of the gimplification creating artificial |
4629 | temporaries (see for example is_gimple_reg_rhs) on the debug info. */ | |
4630 | if (!gimplify_ctxp->into_ssa | |
f2896bc9 | 4631 | && TREE_CODE (*from_p) == VAR_DECL |
726a989a RB |
4632 | && DECL_IGNORED_P (*from_p) |
4633 | && DECL_P (*to_p) | |
4634 | && !DECL_IGNORED_P (*to_p)) | |
f173837a EB |
4635 | { |
4636 | if (!DECL_NAME (*from_p) && DECL_NAME (*to_p)) | |
4637 | DECL_NAME (*from_p) | |
4638 | = create_tmp_var_name (IDENTIFIER_POINTER (DECL_NAME (*to_p))); | |
839b422f | 4639 | DECL_HAS_DEBUG_EXPR_P (*from_p) = 1; |
f173837a | 4640 | SET_DECL_DEBUG_EXPR (*from_p, *to_p); |
726a989a RB |
4641 | } |
4642 | ||
8f0fe813 NS |
4643 | if (want_value && TREE_THIS_VOLATILE (*to_p)) |
4644 | *from_p = get_initialized_tmp_var (*from_p, pre_p, post_p); | |
4645 | ||
726a989a RB |
4646 | if (TREE_CODE (*from_p) == CALL_EXPR) |
4647 | { | |
4648 | /* Since the RHS is a CALL_EXPR, we need to create a GIMPLE_CALL | |
4649 | instead of a GIMPLE_ASSIGN. */ | |
1304953e JJ |
4650 | if (CALL_EXPR_FN (*from_p) == NULL_TREE) |
4651 | { | |
4652 | /* Gimplify internal functions created in the FEs. */ | |
4653 | int nargs = call_expr_nargs (*from_p), i; | |
4654 | enum internal_fn ifn = CALL_EXPR_IFN (*from_p); | |
4655 | auto_vec<tree> vargs (nargs); | |
4656 | ||
4657 | for (i = 0; i < nargs; i++) | |
4658 | { | |
4659 | gimplify_arg (&CALL_EXPR_ARG (*from_p, i), pre_p, | |
4660 | EXPR_LOCATION (*from_p)); | |
4661 | vargs.quick_push (CALL_EXPR_ARG (*from_p, i)); | |
4662 | } | |
4663 | assign = gimple_build_call_internal_vec (ifn, vargs); | |
4664 | gimple_set_location (assign, EXPR_LOCATION (*expr_p)); | |
4665 | } | |
ed9c79e1 JJ |
4666 | else |
4667 | { | |
1304953e JJ |
4668 | tree fnptrtype = TREE_TYPE (CALL_EXPR_FN (*from_p)); |
4669 | CALL_EXPR_FN (*from_p) = TREE_OPERAND (CALL_EXPR_FN (*from_p), 0); | |
4670 | STRIP_USELESS_TYPE_CONVERSION (CALL_EXPR_FN (*from_p)); | |
4671 | tree fndecl = get_callee_fndecl (*from_p); | |
4672 | if (fndecl | |
4673 | && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL | |
4674 | && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_EXPECT | |
4675 | && call_expr_nargs (*from_p) == 3) | |
4676 | assign = gimple_build_call_internal (IFN_BUILTIN_EXPECT, 3, | |
4677 | CALL_EXPR_ARG (*from_p, 0), | |
4678 | CALL_EXPR_ARG (*from_p, 1), | |
4679 | CALL_EXPR_ARG (*from_p, 2)); | |
4680 | else | |
4681 | { | |
4682 | assign = gimple_build_call_from_tree (*from_p); | |
4683 | gimple_call_set_fntype (assign, TREE_TYPE (fnptrtype)); | |
4684 | } | |
ed9c79e1 | 4685 | } |
f6b64c35 | 4686 | notice_special_calls (assign); |
5de8da9b AO |
4687 | if (!gimple_call_noreturn_p (assign)) |
4688 | gimple_call_set_lhs (assign, *to_p); | |
f173837a | 4689 | } |
726a989a | 4690 | else |
c2255bc4 AH |
4691 | { |
4692 | assign = gimple_build_assign (*to_p, *from_p); | |
4693 | gimple_set_location (assign, EXPR_LOCATION (*expr_p)); | |
4694 | } | |
f173837a | 4695 | |
726a989a | 4696 | if (gimplify_ctxp->into_ssa && is_gimple_reg (*to_p)) |
6de9cd9a | 4697 | { |
2ad728d2 RG |
4698 | /* We should have got an SSA name from the start. */ |
4699 | gcc_assert (TREE_CODE (*to_p) == SSA_NAME); | |
726a989a | 4700 | } |
07beea0d | 4701 | |
6da8be89 MM |
4702 | gimplify_seq_add_stmt (pre_p, assign); |
4703 | gsi = gsi_last (*pre_p); | |
88ac13da | 4704 | maybe_fold_stmt (&gsi); |
6da8be89 | 4705 | |
726a989a RB |
4706 | if (want_value) |
4707 | { | |
8f0fe813 | 4708 | *expr_p = TREE_THIS_VOLATILE (*to_p) ? *from_p : unshare_expr (*to_p); |
17ad5b5e | 4709 | return GS_OK; |
6de9cd9a | 4710 | } |
726a989a RB |
4711 | else |
4712 | *expr_p = NULL; | |
6de9cd9a | 4713 | |
17ad5b5e | 4714 | return GS_ALL_DONE; |
6de9cd9a DN |
4715 | } |
4716 | ||
ad19c4be EB |
4717 | /* Gimplify a comparison between two variable-sized objects. Do this |
4718 | with a call to BUILT_IN_MEMCMP. */ | |
44de5aeb RK |
4719 | |
4720 | static enum gimplify_status | |
4721 | gimplify_variable_sized_compare (tree *expr_p) | |
4722 | { | |
692ad9aa | 4723 | location_t loc = EXPR_LOCATION (*expr_p); |
44de5aeb RK |
4724 | tree op0 = TREE_OPERAND (*expr_p, 0); |
4725 | tree op1 = TREE_OPERAND (*expr_p, 1); | |
692ad9aa | 4726 | tree t, arg, dest, src, expr; |
5039610b SL |
4727 | |
4728 | arg = TYPE_SIZE_UNIT (TREE_TYPE (op0)); | |
4729 | arg = unshare_expr (arg); | |
4730 | arg = SUBSTITUTE_PLACEHOLDER_IN_EXPR (arg, op0); | |
db3927fb AH |
4731 | src = build_fold_addr_expr_loc (loc, op1); |
4732 | dest = build_fold_addr_expr_loc (loc, op0); | |
e79983f4 | 4733 | t = builtin_decl_implicit (BUILT_IN_MEMCMP); |
db3927fb | 4734 | t = build_call_expr_loc (loc, t, 3, dest, src, arg); |
692ad9aa EB |
4735 | |
4736 | expr | |
b4257cfc | 4737 | = build2 (TREE_CODE (*expr_p), TREE_TYPE (*expr_p), t, integer_zero_node); |
692ad9aa EB |
4738 | SET_EXPR_LOCATION (expr, loc); |
4739 | *expr_p = expr; | |
44de5aeb RK |
4740 | |
4741 | return GS_OK; | |
4742 | } | |
4743 | ||
ad19c4be EB |
4744 | /* Gimplify a comparison between two aggregate objects of integral scalar |
4745 | mode as a comparison between the bitwise equivalent scalar values. */ | |
61c25908 OH |
4746 | |
4747 | static enum gimplify_status | |
4748 | gimplify_scalar_mode_aggregate_compare (tree *expr_p) | |
4749 | { | |
db3927fb | 4750 | location_t loc = EXPR_LOCATION (*expr_p); |
61c25908 OH |
4751 | tree op0 = TREE_OPERAND (*expr_p, 0); |
4752 | tree op1 = TREE_OPERAND (*expr_p, 1); | |
4753 | ||
4754 | tree type = TREE_TYPE (op0); | |
4755 | tree scalar_type = lang_hooks.types.type_for_mode (TYPE_MODE (type), 1); | |
4756 | ||
db3927fb AH |
4757 | op0 = fold_build1_loc (loc, VIEW_CONVERT_EXPR, scalar_type, op0); |
4758 | op1 = fold_build1_loc (loc, VIEW_CONVERT_EXPR, scalar_type, op1); | |
61c25908 OH |
4759 | |
4760 | *expr_p | |
db3927fb | 4761 | = fold_build2_loc (loc, TREE_CODE (*expr_p), TREE_TYPE (*expr_p), op0, op1); |
61c25908 OH |
4762 | |
4763 | return GS_OK; | |
4764 | } | |
4765 | ||
ad19c4be EB |
4766 | /* Gimplify an expression sequence. This function gimplifies each |
4767 | expression and rewrites the original expression with the last | |
6de9cd9a DN |
4768 | expression of the sequence in GIMPLE form. |
4769 | ||
4770 | PRE_P points to the list where the side effects for all the | |
4771 | expressions in the sequence will be emitted. | |
d3147f64 | 4772 | |
6de9cd9a | 4773 | WANT_VALUE is true when the result of the last COMPOUND_EXPR is used. */ |
6de9cd9a DN |
4774 | |
4775 | static enum gimplify_status | |
726a989a | 4776 | gimplify_compound_expr (tree *expr_p, gimple_seq *pre_p, bool want_value) |
6de9cd9a DN |
4777 | { |
4778 | tree t = *expr_p; | |
4779 | ||
4780 | do | |
4781 | { | |
4782 | tree *sub_p = &TREE_OPERAND (t, 0); | |
4783 | ||
4784 | if (TREE_CODE (*sub_p) == COMPOUND_EXPR) | |
4785 | gimplify_compound_expr (sub_p, pre_p, false); | |
4786 | else | |
726a989a | 4787 | gimplify_stmt (sub_p, pre_p); |
6de9cd9a DN |
4788 | |
4789 | t = TREE_OPERAND (t, 1); | |
4790 | } | |
4791 | while (TREE_CODE (t) == COMPOUND_EXPR); | |
4792 | ||
4793 | *expr_p = t; | |
4794 | if (want_value) | |
4795 | return GS_OK; | |
4796 | else | |
4797 | { | |
726a989a | 4798 | gimplify_stmt (expr_p, pre_p); |
6de9cd9a DN |
4799 | return GS_ALL_DONE; |
4800 | } | |
4801 | } | |
4802 | ||
726a989a RB |
4803 | /* Gimplify a SAVE_EXPR node. EXPR_P points to the expression to |
4804 | gimplify. After gimplification, EXPR_P will point to a new temporary | |
4805 | that holds the original value of the SAVE_EXPR node. | |
6de9cd9a | 4806 | |
726a989a | 4807 | PRE_P points to the list where side effects that must happen before |
ad19c4be | 4808 | *EXPR_P should be stored. */ |
6de9cd9a DN |
4809 | |
4810 | static enum gimplify_status | |
726a989a | 4811 | gimplify_save_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p) |
6de9cd9a DN |
4812 | { |
4813 | enum gimplify_status ret = GS_ALL_DONE; | |
4814 | tree val; | |
4815 | ||
282899df | 4816 | gcc_assert (TREE_CODE (*expr_p) == SAVE_EXPR); |
6de9cd9a DN |
4817 | val = TREE_OPERAND (*expr_p, 0); |
4818 | ||
7f5e6307 RH |
4819 | /* If the SAVE_EXPR has not been resolved, then evaluate it once. */ |
4820 | if (!SAVE_EXPR_RESOLVED_P (*expr_p)) | |
17ad5b5e | 4821 | { |
7f5e6307 RH |
4822 | /* The operand may be a void-valued expression such as SAVE_EXPRs |
4823 | generated by the Java frontend for class initialization. It is | |
4824 | being executed only for its side-effects. */ | |
4825 | if (TREE_TYPE (val) == void_type_node) | |
4826 | { | |
4827 | ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p, | |
4828 | is_gimple_stmt, fb_none); | |
7f5e6307 RH |
4829 | val = NULL; |
4830 | } | |
4831 | else | |
4832 | val = get_initialized_tmp_var (val, pre_p, post_p); | |
4833 | ||
4834 | TREE_OPERAND (*expr_p, 0) = val; | |
4835 | SAVE_EXPR_RESOLVED_P (*expr_p) = 1; | |
17ad5b5e | 4836 | } |
6de9cd9a | 4837 | |
7f5e6307 RH |
4838 | *expr_p = val; |
4839 | ||
6de9cd9a DN |
4840 | return ret; |
4841 | } | |
4842 | ||
ad19c4be | 4843 | /* Rewrite the ADDR_EXPR node pointed to by EXPR_P |
6de9cd9a DN |
4844 | |
4845 | unary_expr | |
4846 | : ... | |
4847 | | '&' varname | |
4848 | ... | |
4849 | ||
4850 | PRE_P points to the list where side effects that must happen before | |
4851 | *EXPR_P should be stored. | |
4852 | ||
4853 | POST_P points to the list where side effects that must happen after | |
4854 | *EXPR_P should be stored. */ | |
4855 | ||
4856 | static enum gimplify_status | |
726a989a | 4857 | gimplify_addr_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p) |
6de9cd9a DN |
4858 | { |
4859 | tree expr = *expr_p; | |
4860 | tree op0 = TREE_OPERAND (expr, 0); | |
4861 | enum gimplify_status ret; | |
db3927fb | 4862 | location_t loc = EXPR_LOCATION (*expr_p); |
6de9cd9a DN |
4863 | |
4864 | switch (TREE_CODE (op0)) | |
4865 | { | |
4866 | case INDIRECT_REF: | |
67f23620 | 4867 | do_indirect_ref: |
6de9cd9a DN |
4868 | /* Check if we are dealing with an expression of the form '&*ptr'. |
4869 | While the front end folds away '&*ptr' into 'ptr', these | |
4870 | expressions may be generated internally by the compiler (e.g., | |
4871 | builtins like __builtin_va_end). */ | |
67f23620 RH |
4872 | /* Caution: the silent array decomposition semantics we allow for |
4873 | ADDR_EXPR means we can't always discard the pair. */ | |
c87ac7e8 AO |
4874 | /* Gimplification of the ADDR_EXPR operand may drop |
4875 | cv-qualification conversions, so make sure we add them if | |
4876 | needed. */ | |
67f23620 RH |
4877 | { |
4878 | tree op00 = TREE_OPERAND (op0, 0); | |
4879 | tree t_expr = TREE_TYPE (expr); | |
4880 | tree t_op00 = TREE_TYPE (op00); | |
4881 | ||
f4088621 | 4882 | if (!useless_type_conversion_p (t_expr, t_op00)) |
db3927fb | 4883 | op00 = fold_convert_loc (loc, TREE_TYPE (expr), op00); |
67f23620 RH |
4884 | *expr_p = op00; |
4885 | ret = GS_OK; | |
4886 | } | |
6de9cd9a DN |
4887 | break; |
4888 | ||
44de5aeb RK |
4889 | case VIEW_CONVERT_EXPR: |
4890 | /* Take the address of our operand and then convert it to the type of | |
af72267c RK |
4891 | this ADDR_EXPR. |
4892 | ||
4893 | ??? The interactions of VIEW_CONVERT_EXPR and aliasing is not at | |
4894 | all clear. The impact of this transformation is even less clear. */ | |
91804752 EB |
4895 | |
4896 | /* If the operand is a useless conversion, look through it. Doing so | |
4897 | guarantees that the ADDR_EXPR and its operand will remain of the | |
4898 | same type. */ | |
4899 | if (tree_ssa_useless_type_conversion (TREE_OPERAND (op0, 0))) | |
317c0092 | 4900 | op0 = TREE_OPERAND (op0, 0); |
91804752 | 4901 | |
db3927fb AH |
4902 | *expr_p = fold_convert_loc (loc, TREE_TYPE (expr), |
4903 | build_fold_addr_expr_loc (loc, | |
4904 | TREE_OPERAND (op0, 0))); | |
44de5aeb | 4905 | ret = GS_OK; |
6de9cd9a DN |
4906 | break; |
4907 | ||
4908 | default: | |
4909 | /* We use fb_either here because the C frontend sometimes takes | |
5201931e JM |
4910 | the address of a call that returns a struct; see |
4911 | gcc.dg/c99-array-lval-1.c. The gimplifier will correctly make | |
4912 | the implied temporary explicit. */ | |
936d04b6 | 4913 | |
f76d6e6f | 4914 | /* Make the operand addressable. */ |
6de9cd9a | 4915 | ret = gimplify_expr (&TREE_OPERAND (expr, 0), pre_p, post_p, |
e847cc68 | 4916 | is_gimple_addressable, fb_either); |
8b17cc05 RG |
4917 | if (ret == GS_ERROR) |
4918 | break; | |
67f23620 | 4919 | |
f76d6e6f EB |
4920 | /* Then mark it. Beware that it may not be possible to do so directly |
4921 | if a temporary has been created by the gimplification. */ | |
4922 | prepare_gimple_addressable (&TREE_OPERAND (expr, 0), pre_p); | |
9e51aaf5 | 4923 | |
8b17cc05 | 4924 | op0 = TREE_OPERAND (expr, 0); |
6de9cd9a | 4925 | |
8b17cc05 RG |
4926 | /* For various reasons, the gimplification of the expression |
4927 | may have made a new INDIRECT_REF. */ | |
4928 | if (TREE_CODE (op0) == INDIRECT_REF) | |
4929 | goto do_indirect_ref; | |
4930 | ||
6b8b9e42 RG |
4931 | mark_addressable (TREE_OPERAND (expr, 0)); |
4932 | ||
4933 | /* The FEs may end up building ADDR_EXPRs early on a decl with | |
4934 | an incomplete type. Re-build ADDR_EXPRs in canonical form | |
4935 | here. */ | |
4936 | if (!types_compatible_p (TREE_TYPE (op0), TREE_TYPE (TREE_TYPE (expr)))) | |
4937 | *expr_p = build_fold_addr_expr (op0); | |
4938 | ||
8b17cc05 | 4939 | /* Make sure TREE_CONSTANT and TREE_SIDE_EFFECTS are set properly. */ |
6b8b9e42 RG |
4940 | recompute_tree_invariant_for_addr_expr (*expr_p); |
4941 | ||
4942 | /* If we re-built the ADDR_EXPR add a conversion to the original type | |
4943 | if required. */ | |
4944 | if (!useless_type_conversion_p (TREE_TYPE (expr), TREE_TYPE (*expr_p))) | |
4945 | *expr_p = fold_convert (TREE_TYPE (expr), *expr_p); | |
8b17cc05 | 4946 | |
6de9cd9a DN |
4947 | break; |
4948 | } | |
4949 | ||
6de9cd9a DN |
4950 | return ret; |
4951 | } | |
4952 | ||
4953 | /* Gimplify the operands of an ASM_EXPR. Input operands should be a gimple | |
4954 | value; output operands should be a gimple lvalue. */ | |
4955 | ||
4956 | static enum gimplify_status | |
726a989a | 4957 | gimplify_asm_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p) |
6de9cd9a | 4958 | { |
726a989a RB |
4959 | tree expr; |
4960 | int noutputs; | |
4961 | const char **oconstraints; | |
6de9cd9a DN |
4962 | int i; |
4963 | tree link; | |
4964 | const char *constraint; | |
4965 | bool allows_mem, allows_reg, is_inout; | |
4966 | enum gimplify_status ret, tret; | |
726a989a | 4967 | gimple stmt; |
9771b263 DN |
4968 | vec<tree, va_gc> *inputs; |
4969 | vec<tree, va_gc> *outputs; | |
4970 | vec<tree, va_gc> *clobbers; | |
4971 | vec<tree, va_gc> *labels; | |
726a989a | 4972 | tree link_next; |
b8698a0f | 4973 | |
726a989a RB |
4974 | expr = *expr_p; |
4975 | noutputs = list_length (ASM_OUTPUTS (expr)); | |
4976 | oconstraints = (const char **) alloca ((noutputs) * sizeof (const char *)); | |
4977 | ||
9771b263 DN |
4978 | inputs = NULL; |
4979 | outputs = NULL; | |
4980 | clobbers = NULL; | |
4981 | labels = NULL; | |
6de9cd9a | 4982 | |
6de9cd9a | 4983 | ret = GS_ALL_DONE; |
726a989a RB |
4984 | link_next = NULL_TREE; |
4985 | for (i = 0, link = ASM_OUTPUTS (expr); link; ++i, link = link_next) | |
6de9cd9a | 4986 | { |
2c68ba8e | 4987 | bool ok; |
726a989a RB |
4988 | size_t constraint_len; |
4989 | ||
4990 | link_next = TREE_CHAIN (link); | |
4991 | ||
4992 | oconstraints[i] | |
4993 | = constraint | |
6de9cd9a | 4994 | = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link))); |
6db081f1 AP |
4995 | constraint_len = strlen (constraint); |
4996 | if (constraint_len == 0) | |
4997 | continue; | |
6de9cd9a | 4998 | |
2c68ba8e LB |
4999 | ok = parse_output_constraint (&constraint, i, 0, 0, |
5000 | &allows_mem, &allows_reg, &is_inout); | |
5001 | if (!ok) | |
5002 | { | |
5003 | ret = GS_ERROR; | |
5004 | is_inout = false; | |
5005 | } | |
6de9cd9a DN |
5006 | |
5007 | if (!allows_reg && allows_mem) | |
936d04b6 | 5008 | mark_addressable (TREE_VALUE (link)); |
6de9cd9a DN |
5009 | |
5010 | tret = gimplify_expr (&TREE_VALUE (link), pre_p, post_p, | |
5011 | is_inout ? is_gimple_min_lval : is_gimple_lvalue, | |
5012 | fb_lvalue | fb_mayfail); | |
5013 | if (tret == GS_ERROR) | |
5014 | { | |
5015 | error ("invalid lvalue in asm output %d", i); | |
5016 | ret = tret; | |
5017 | } | |
5018 | ||
9771b263 | 5019 | vec_safe_push (outputs, link); |
726a989a RB |
5020 | TREE_CHAIN (link) = NULL_TREE; |
5021 | ||
6de9cd9a DN |
5022 | if (is_inout) |
5023 | { | |
5024 | /* An input/output operand. To give the optimizers more | |
5025 | flexibility, split it into separate input and output | |
5026 | operands. */ | |
5027 | tree input; | |
5028 | char buf[10]; | |
6de9cd9a DN |
5029 | |
5030 | /* Turn the in/out constraint into an output constraint. */ | |
5031 | char *p = xstrdup (constraint); | |
5032 | p[0] = '='; | |
5033 | TREE_VALUE (TREE_PURPOSE (link)) = build_string (constraint_len, p); | |
6de9cd9a DN |
5034 | |
5035 | /* And add a matching input constraint. */ | |
5036 | if (allows_reg) | |
5037 | { | |
5038 | sprintf (buf, "%d", i); | |
372d72d9 JJ |
5039 | |
5040 | /* If there are multiple alternatives in the constraint, | |
5041 | handle each of them individually. Those that allow register | |
5042 | will be replaced with operand number, the others will stay | |
5043 | unchanged. */ | |
5044 | if (strchr (p, ',') != NULL) | |
5045 | { | |
5046 | size_t len = 0, buflen = strlen (buf); | |
5047 | char *beg, *end, *str, *dst; | |
5048 | ||
5049 | for (beg = p + 1;;) | |
5050 | { | |
5051 | end = strchr (beg, ','); | |
5052 | if (end == NULL) | |
5053 | end = strchr (beg, '\0'); | |
5054 | if ((size_t) (end - beg) < buflen) | |
5055 | len += buflen + 1; | |
5056 | else | |
5057 | len += end - beg + 1; | |
5058 | if (*end) | |
5059 | beg = end + 1; | |
5060 | else | |
5061 | break; | |
5062 | } | |
5063 | ||
858904db | 5064 | str = (char *) alloca (len); |
372d72d9 JJ |
5065 | for (beg = p + 1, dst = str;;) |
5066 | { | |
5067 | const char *tem; | |
5068 | bool mem_p, reg_p, inout_p; | |
5069 | ||
5070 | end = strchr (beg, ','); | |
5071 | if (end) | |
5072 | *end = '\0'; | |
5073 | beg[-1] = '='; | |
5074 | tem = beg - 1; | |
5075 | parse_output_constraint (&tem, i, 0, 0, | |
5076 | &mem_p, ®_p, &inout_p); | |
5077 | if (dst != str) | |
5078 | *dst++ = ','; | |
5079 | if (reg_p) | |
5080 | { | |
5081 | memcpy (dst, buf, buflen); | |
5082 | dst += buflen; | |
5083 | } | |
5084 | else | |
5085 | { | |
5086 | if (end) | |
5087 | len = end - beg; | |
5088 | else | |
5089 | len = strlen (beg); | |
5090 | memcpy (dst, beg, len); | |
5091 | dst += len; | |
5092 | } | |
5093 | if (end) | |
5094 | beg = end + 1; | |
5095 | else | |
5096 | break; | |
5097 | } | |
5098 | *dst = '\0'; | |
5099 | input = build_string (dst - str, str); | |
5100 | } | |
5101 | else | |
5102 | input = build_string (strlen (buf), buf); | |
6de9cd9a DN |
5103 | } |
5104 | else | |
5105 | input = build_string (constraint_len - 1, constraint + 1); | |
372d72d9 JJ |
5106 | |
5107 | free (p); | |
5108 | ||
6de9cd9a DN |
5109 | input = build_tree_list (build_tree_list (NULL_TREE, input), |
5110 | unshare_expr (TREE_VALUE (link))); | |
5111 | ASM_INPUTS (expr) = chainon (ASM_INPUTS (expr), input); | |
5112 | } | |
5113 | } | |
5114 | ||
726a989a RB |
5115 | link_next = NULL_TREE; |
5116 | for (link = ASM_INPUTS (expr); link; ++i, link = link_next) | |
6de9cd9a | 5117 | { |
726a989a RB |
5118 | link_next = TREE_CHAIN (link); |
5119 | constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link))); | |
6de9cd9a DN |
5120 | parse_input_constraint (&constraint, 0, 0, noutputs, 0, |
5121 | oconstraints, &allows_mem, &allows_reg); | |
5122 | ||
f497c16c JJ |
5123 | /* If we can't make copies, we can only accept memory. */ |
5124 | if (TREE_ADDRESSABLE (TREE_TYPE (TREE_VALUE (link)))) | |
5125 | { | |
5126 | if (allows_mem) | |
5127 | allows_reg = 0; | |
5128 | else | |
5129 | { | |
5130 | error ("impossible constraint in %<asm%>"); | |
5131 | error ("non-memory input %d must stay in memory", i); | |
5132 | return GS_ERROR; | |
5133 | } | |
5134 | } | |
5135 | ||
6de9cd9a DN |
5136 | /* If the operand is a memory input, it should be an lvalue. */ |
5137 | if (!allows_reg && allows_mem) | |
5138 | { | |
502c5084 JJ |
5139 | tree inputv = TREE_VALUE (link); |
5140 | STRIP_NOPS (inputv); | |
5141 | if (TREE_CODE (inputv) == PREDECREMENT_EXPR | |
5142 | || TREE_CODE (inputv) == PREINCREMENT_EXPR | |
5143 | || TREE_CODE (inputv) == POSTDECREMENT_EXPR | |
5144 | || TREE_CODE (inputv) == POSTINCREMENT_EXPR) | |
5145 | TREE_VALUE (link) = error_mark_node; | |
6de9cd9a DN |
5146 | tret = gimplify_expr (&TREE_VALUE (link), pre_p, post_p, |
5147 | is_gimple_lvalue, fb_lvalue | fb_mayfail); | |
936d04b6 | 5148 | mark_addressable (TREE_VALUE (link)); |
6de9cd9a DN |
5149 | if (tret == GS_ERROR) |
5150 | { | |
6a3799eb AH |
5151 | if (EXPR_HAS_LOCATION (TREE_VALUE (link))) |
5152 | input_location = EXPR_LOCATION (TREE_VALUE (link)); | |
6de9cd9a DN |
5153 | error ("memory input %d is not directly addressable", i); |
5154 | ret = tret; | |
5155 | } | |
5156 | } | |
5157 | else | |
5158 | { | |
5159 | tret = gimplify_expr (&TREE_VALUE (link), pre_p, post_p, | |
e670d9e4 | 5160 | is_gimple_asm_val, fb_rvalue); |
6de9cd9a DN |
5161 | if (tret == GS_ERROR) |
5162 | ret = tret; | |
5163 | } | |
726a989a RB |
5164 | |
5165 | TREE_CHAIN (link) = NULL_TREE; | |
9771b263 | 5166 | vec_safe_push (inputs, link); |
6de9cd9a | 5167 | } |
b8698a0f | 5168 | |
ca081cc8 EB |
5169 | link_next = NULL_TREE; |
5170 | for (link = ASM_CLOBBERS (expr); link; ++i, link = link_next) | |
5171 | { | |
5172 | link_next = TREE_CHAIN (link); | |
5173 | TREE_CHAIN (link) = NULL_TREE; | |
5174 | vec_safe_push (clobbers, link); | |
5175 | } | |
1c384bf1 | 5176 | |
ca081cc8 EB |
5177 | link_next = NULL_TREE; |
5178 | for (link = ASM_LABELS (expr); link; ++i, link = link_next) | |
5179 | { | |
5180 | link_next = TREE_CHAIN (link); | |
5181 | TREE_CHAIN (link) = NULL_TREE; | |
5182 | vec_safe_push (labels, link); | |
5183 | } | |
726a989a | 5184 | |
a406865a RG |
5185 | /* Do not add ASMs with errors to the gimple IL stream. */ |
5186 | if (ret != GS_ERROR) | |
5187 | { | |
5188 | stmt = gimple_build_asm_vec (TREE_STRING_POINTER (ASM_STRING (expr)), | |
1c384bf1 | 5189 | inputs, outputs, clobbers, labels); |
726a989a | 5190 | |
a406865a RG |
5191 | gimple_asm_set_volatile (stmt, ASM_VOLATILE_P (expr)); |
5192 | gimple_asm_set_input (stmt, ASM_INPUT_P (expr)); | |
5193 | ||
5194 | gimplify_seq_add_stmt (pre_p, stmt); | |
5195 | } | |
6de9cd9a DN |
5196 | |
5197 | return ret; | |
5198 | } | |
5199 | ||
5200 | /* Gimplify a CLEANUP_POINT_EXPR. Currently this works by adding | |
726a989a | 5201 | GIMPLE_WITH_CLEANUP_EXPRs to the prequeue as we encounter cleanups while |
6de9cd9a DN |
5202 | gimplifying the body, and converting them to TRY_FINALLY_EXPRs when we |
5203 | return to this function. | |
5204 | ||
5205 | FIXME should we complexify the prequeue handling instead? Or use flags | |
5206 | for all the cleanups and let the optimizer tighten them up? The current | |
5207 | code seems pretty fragile; it will break on a cleanup within any | |
5208 | non-conditional nesting. But any such nesting would be broken, anyway; | |
5209 | we can't write a TRY_FINALLY_EXPR that starts inside a nesting construct | |
5210 | and continues out of it. We can do that at the RTL level, though, so | |
5211 | having an optimizer to tighten up try/finally regions would be a Good | |
5212 | Thing. */ | |
5213 | ||
5214 | static enum gimplify_status | |
726a989a | 5215 | gimplify_cleanup_point_expr (tree *expr_p, gimple_seq *pre_p) |
6de9cd9a | 5216 | { |
726a989a RB |
5217 | gimple_stmt_iterator iter; |
5218 | gimple_seq body_sequence = NULL; | |
6de9cd9a | 5219 | |
325c3691 | 5220 | tree temp = voidify_wrapper_expr (*expr_p, NULL); |
6de9cd9a DN |
5221 | |
5222 | /* We only care about the number of conditions between the innermost | |
df77f454 JM |
5223 | CLEANUP_POINT_EXPR and the cleanup. So save and reset the count and |
5224 | any cleanups collected outside the CLEANUP_POINT_EXPR. */ | |
6de9cd9a | 5225 | int old_conds = gimplify_ctxp->conditions; |
726a989a | 5226 | gimple_seq old_cleanups = gimplify_ctxp->conditional_cleanups; |
32be32af | 5227 | bool old_in_cleanup_point_expr = gimplify_ctxp->in_cleanup_point_expr; |
6de9cd9a | 5228 | gimplify_ctxp->conditions = 0; |
726a989a | 5229 | gimplify_ctxp->conditional_cleanups = NULL; |
32be32af | 5230 | gimplify_ctxp->in_cleanup_point_expr = true; |
6de9cd9a | 5231 | |
726a989a | 5232 | gimplify_stmt (&TREE_OPERAND (*expr_p, 0), &body_sequence); |
6de9cd9a DN |
5233 | |
5234 | gimplify_ctxp->conditions = old_conds; | |
df77f454 | 5235 | gimplify_ctxp->conditional_cleanups = old_cleanups; |
32be32af | 5236 | gimplify_ctxp->in_cleanup_point_expr = old_in_cleanup_point_expr; |
6de9cd9a | 5237 | |
726a989a | 5238 | for (iter = gsi_start (body_sequence); !gsi_end_p (iter); ) |
6de9cd9a | 5239 | { |
726a989a | 5240 | gimple wce = gsi_stmt (iter); |
6de9cd9a | 5241 | |
726a989a | 5242 | if (gimple_code (wce) == GIMPLE_WITH_CLEANUP_EXPR) |
6de9cd9a | 5243 | { |
726a989a | 5244 | if (gsi_one_before_end_p (iter)) |
6de9cd9a | 5245 | { |
726a989a RB |
5246 | /* Note that gsi_insert_seq_before and gsi_remove do not |
5247 | scan operands, unlike some other sequence mutators. */ | |
ae0595b0 RG |
5248 | if (!gimple_wce_cleanup_eh_only (wce)) |
5249 | gsi_insert_seq_before_without_update (&iter, | |
5250 | gimple_wce_cleanup (wce), | |
5251 | GSI_SAME_STMT); | |
726a989a | 5252 | gsi_remove (&iter, true); |
6de9cd9a DN |
5253 | break; |
5254 | } | |
5255 | else | |
5256 | { | |
daa6e488 | 5257 | gimple_statement_try *gtry; |
726a989a RB |
5258 | gimple_seq seq; |
5259 | enum gimple_try_flags kind; | |
40aac948 | 5260 | |
726a989a RB |
5261 | if (gimple_wce_cleanup_eh_only (wce)) |
5262 | kind = GIMPLE_TRY_CATCH; | |
40aac948 | 5263 | else |
726a989a RB |
5264 | kind = GIMPLE_TRY_FINALLY; |
5265 | seq = gsi_split_seq_after (iter); | |
5266 | ||
82d6e6fc | 5267 | gtry = gimple_build_try (seq, gimple_wce_cleanup (wce), kind); |
726a989a RB |
5268 | /* Do not use gsi_replace here, as it may scan operands. |
5269 | We want to do a simple structural modification only. */ | |
355a7673 | 5270 | gsi_set_stmt (&iter, gtry); |
daa6e488 | 5271 | iter = gsi_start (gtry->eval); |
6de9cd9a DN |
5272 | } |
5273 | } | |
5274 | else | |
726a989a | 5275 | gsi_next (&iter); |
6de9cd9a DN |
5276 | } |
5277 | ||
726a989a | 5278 | gimplify_seq_add_seq (pre_p, body_sequence); |
6de9cd9a DN |
5279 | if (temp) |
5280 | { | |
5281 | *expr_p = temp; | |
6de9cd9a DN |
5282 | return GS_OK; |
5283 | } | |
5284 | else | |
5285 | { | |
726a989a | 5286 | *expr_p = NULL; |
6de9cd9a DN |
5287 | return GS_ALL_DONE; |
5288 | } | |
5289 | } | |
5290 | ||
5291 | /* Insert a cleanup marker for gimplify_cleanup_point_expr. CLEANUP | |
726a989a RB |
5292 | is the cleanup action required. EH_ONLY is true if the cleanup should |
5293 | only be executed if an exception is thrown, not on normal exit. */ | |
6de9cd9a DN |
5294 | |
5295 | static void | |
726a989a | 5296 | gimple_push_cleanup (tree var, tree cleanup, bool eh_only, gimple_seq *pre_p) |
6de9cd9a | 5297 | { |
726a989a RB |
5298 | gimple wce; |
5299 | gimple_seq cleanup_stmts = NULL; | |
6de9cd9a DN |
5300 | |
5301 | /* Errors can result in improperly nested cleanups. Which results in | |
726a989a | 5302 | confusion when trying to resolve the GIMPLE_WITH_CLEANUP_EXPR. */ |
1da2ed5f | 5303 | if (seen_error ()) |
6de9cd9a DN |
5304 | return; |
5305 | ||
5306 | if (gimple_conditional_context ()) | |
5307 | { | |
5308 | /* If we're in a conditional context, this is more complex. We only | |
5309 | want to run the cleanup if we actually ran the initialization that | |
5310 | necessitates it, but we want to run it after the end of the | |
5311 | conditional context. So we wrap the try/finally around the | |
5312 | condition and use a flag to determine whether or not to actually | |
5313 | run the destructor. Thus | |
5314 | ||
5315 | test ? f(A()) : 0 | |
5316 | ||
5317 | becomes (approximately) | |
5318 | ||
5319 | flag = 0; | |
5320 | try { | |
5321 | if (test) { A::A(temp); flag = 1; val = f(temp); } | |
5322 | else { val = 0; } | |
5323 | } finally { | |
5324 | if (flag) A::~A(temp); | |
5325 | } | |
5326 | val | |
5327 | */ | |
6de9cd9a | 5328 | tree flag = create_tmp_var (boolean_type_node, "cleanup"); |
726a989a RB |
5329 | gimple ffalse = gimple_build_assign (flag, boolean_false_node); |
5330 | gimple ftrue = gimple_build_assign (flag, boolean_true_node); | |
5331 | ||
b4257cfc | 5332 | cleanup = build3 (COND_EXPR, void_type_node, flag, cleanup, NULL); |
726a989a RB |
5333 | gimplify_stmt (&cleanup, &cleanup_stmts); |
5334 | wce = gimple_build_wce (cleanup_stmts); | |
5335 | ||
5336 | gimplify_seq_add_stmt (&gimplify_ctxp->conditional_cleanups, ffalse); | |
5337 | gimplify_seq_add_stmt (&gimplify_ctxp->conditional_cleanups, wce); | |
5338 | gimplify_seq_add_stmt (pre_p, ftrue); | |
6de9cd9a DN |
5339 | |
5340 | /* Because of this manipulation, and the EH edges that jump | |
5341 | threading cannot redirect, the temporary (VAR) will appear | |
5342 | to be used uninitialized. Don't warn. */ | |
5343 | TREE_NO_WARNING (var) = 1; | |
5344 | } | |
5345 | else | |
5346 | { | |
726a989a RB |
5347 | gimplify_stmt (&cleanup, &cleanup_stmts); |
5348 | wce = gimple_build_wce (cleanup_stmts); | |
5349 | gimple_wce_set_cleanup_eh_only (wce, eh_only); | |
5350 | gimplify_seq_add_stmt (pre_p, wce); | |
6de9cd9a | 5351 | } |
6de9cd9a DN |
5352 | } |
5353 | ||
5354 | /* Gimplify a TARGET_EXPR which doesn't appear on the rhs of an INIT_EXPR. */ | |
5355 | ||
5356 | static enum gimplify_status | |
726a989a | 5357 | gimplify_target_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p) |
6de9cd9a DN |
5358 | { |
5359 | tree targ = *expr_p; | |
5360 | tree temp = TARGET_EXPR_SLOT (targ); | |
5361 | tree init = TARGET_EXPR_INITIAL (targ); | |
5362 | enum gimplify_status ret; | |
5363 | ||
5364 | if (init) | |
5365 | { | |
d0ad58f9 JM |
5366 | tree cleanup = NULL_TREE; |
5367 | ||
3a5b9284 | 5368 | /* TARGET_EXPR temps aren't part of the enclosing block, so add it |
786025ea JJ |
5369 | to the temps list. Handle also variable length TARGET_EXPRs. */ |
5370 | if (TREE_CODE (DECL_SIZE (temp)) != INTEGER_CST) | |
5371 | { | |
5372 | if (!TYPE_SIZES_GIMPLIFIED (TREE_TYPE (temp))) | |
5373 | gimplify_type_sizes (TREE_TYPE (temp), pre_p); | |
5374 | gimplify_vla_decl (temp, pre_p); | |
5375 | } | |
5376 | else | |
5377 | gimple_add_tmp_var (temp); | |
6de9cd9a | 5378 | |
3a5b9284 RH |
5379 | /* If TARGET_EXPR_INITIAL is void, then the mere evaluation of the |
5380 | expression is supposed to initialize the slot. */ | |
5381 | if (VOID_TYPE_P (TREE_TYPE (init))) | |
5382 | ret = gimplify_expr (&init, pre_p, post_p, is_gimple_stmt, fb_none); | |
5383 | else | |
325c3691 | 5384 | { |
726a989a RB |
5385 | tree init_expr = build2 (INIT_EXPR, void_type_node, temp, init); |
5386 | init = init_expr; | |
5387 | ret = gimplify_expr (&init, pre_p, post_p, is_gimple_stmt, fb_none); | |
5388 | init = NULL; | |
5389 | ggc_free (init_expr); | |
325c3691 | 5390 | } |
3a5b9284 | 5391 | if (ret == GS_ERROR) |
abc67de1 SM |
5392 | { |
5393 | /* PR c++/28266 Make sure this is expanded only once. */ | |
5394 | TARGET_EXPR_INITIAL (targ) = NULL_TREE; | |
5395 | return GS_ERROR; | |
5396 | } | |
726a989a RB |
5397 | if (init) |
5398 | gimplify_and_add (init, pre_p); | |
6de9cd9a DN |
5399 | |
5400 | /* If needed, push the cleanup for the temp. */ | |
5401 | if (TARGET_EXPR_CLEANUP (targ)) | |
d0ad58f9 JM |
5402 | { |
5403 | if (CLEANUP_EH_ONLY (targ)) | |
5404 | gimple_push_cleanup (temp, TARGET_EXPR_CLEANUP (targ), | |
5405 | CLEANUP_EH_ONLY (targ), pre_p); | |
5406 | else | |
5407 | cleanup = TARGET_EXPR_CLEANUP (targ); | |
5408 | } | |
5409 | ||
5410 | /* Add a clobber for the temporary going out of scope, like | |
5411 | gimplify_bind_expr. */ | |
32be32af | 5412 | if (gimplify_ctxp->in_cleanup_point_expr |
87e2a8fd XDL |
5413 | && needs_to_live_in_memory (temp) |
5414 | && flag_stack_reuse == SR_ALL) | |
d0ad58f9 | 5415 | { |
9771b263 DN |
5416 | tree clobber = build_constructor (TREE_TYPE (temp), |
5417 | NULL); | |
d0ad58f9 JM |
5418 | TREE_THIS_VOLATILE (clobber) = true; |
5419 | clobber = build2 (MODIFY_EXPR, TREE_TYPE (temp), temp, clobber); | |
5420 | if (cleanup) | |
5421 | cleanup = build2 (COMPOUND_EXPR, void_type_node, cleanup, | |
5422 | clobber); | |
5423 | else | |
5424 | cleanup = clobber; | |
5425 | } | |
5426 | ||
5427 | if (cleanup) | |
5428 | gimple_push_cleanup (temp, cleanup, false, pre_p); | |
6de9cd9a DN |
5429 | |
5430 | /* Only expand this once. */ | |
5431 | TREE_OPERAND (targ, 3) = init; | |
5432 | TARGET_EXPR_INITIAL (targ) = NULL_TREE; | |
5433 | } | |
282899df | 5434 | else |
6de9cd9a | 5435 | /* We should have expanded this before. */ |
282899df | 5436 | gcc_assert (DECL_SEEN_IN_BIND_EXPR_P (temp)); |
6de9cd9a DN |
5437 | |
5438 | *expr_p = temp; | |
5439 | return GS_OK; | |
5440 | } | |
5441 | ||
5442 | /* Gimplification of expression trees. */ | |
5443 | ||
726a989a RB |
5444 | /* Gimplify an expression which appears at statement context. The |
5445 | corresponding GIMPLE statements are added to *SEQ_P. If *SEQ_P is | |
5446 | NULL, a new sequence is allocated. | |
6de9cd9a | 5447 | |
726a989a RB |
5448 | Return true if we actually added a statement to the queue. */ |
5449 | ||
5450 | bool | |
5451 | gimplify_stmt (tree *stmt_p, gimple_seq *seq_p) | |
6de9cd9a | 5452 | { |
726a989a | 5453 | gimple_seq_node last; |
6de9cd9a | 5454 | |
726a989a RB |
5455 | last = gimple_seq_last (*seq_p); |
5456 | gimplify_expr (stmt_p, seq_p, NULL, is_gimple_stmt, fb_none); | |
5457 | return last != gimple_seq_last (*seq_p); | |
6de9cd9a DN |
5458 | } |
5459 | ||
953ff289 DN |
5460 | /* Add FIRSTPRIVATE entries for DECL in the OpenMP the surrounding parallels |
5461 | to CTX. If entries already exist, force them to be some flavor of private. | |
5462 | If there is no enclosing parallel, do nothing. */ | |
5463 | ||
5464 | void | |
5465 | omp_firstprivatize_variable (struct gimplify_omp_ctx *ctx, tree decl) | |
5466 | { | |
5467 | splay_tree_node n; | |
5468 | ||
5469 | if (decl == NULL || !DECL_P (decl)) | |
5470 | return; | |
5471 | ||
5472 | do | |
5473 | { | |
5474 | n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl); | |
5475 | if (n != NULL) | |
5476 | { | |
5477 | if (n->value & GOVD_SHARED) | |
5478 | n->value = GOVD_FIRSTPRIVATE | (n->value & GOVD_SEEN); | |
acf0174b JJ |
5479 | else if (n->value & GOVD_MAP) |
5480 | n->value |= GOVD_MAP_TO_ONLY; | |
953ff289 DN |
5481 | else |
5482 | return; | |
5483 | } | |
acf0174b JJ |
5484 | else if (ctx->region_type == ORT_TARGET) |
5485 | omp_add_variable (ctx, decl, GOVD_MAP | GOVD_MAP_TO_ONLY); | |
74bf76ed | 5486 | else if (ctx->region_type != ORT_WORKSHARE |
acf0174b JJ |
5487 | && ctx->region_type != ORT_SIMD |
5488 | && ctx->region_type != ORT_TARGET_DATA) | |
953ff289 DN |
5489 | omp_add_variable (ctx, decl, GOVD_FIRSTPRIVATE); |
5490 | ||
5491 | ctx = ctx->outer_context; | |
5492 | } | |
5493 | while (ctx); | |
5494 | } | |
5495 | ||
5496 | /* Similarly for each of the type sizes of TYPE. */ | |
5497 | ||
5498 | static void | |
5499 | omp_firstprivatize_type_sizes (struct gimplify_omp_ctx *ctx, tree type) | |
5500 | { | |
5501 | if (type == NULL || type == error_mark_node) | |
5502 | return; | |
5503 | type = TYPE_MAIN_VARIANT (type); | |
5504 | ||
6e2830c3 | 5505 | if (ctx->privatized_types->add (type)) |
953ff289 DN |
5506 | return; |
5507 | ||
5508 | switch (TREE_CODE (type)) | |
5509 | { | |
5510 | case INTEGER_TYPE: | |
5511 | case ENUMERAL_TYPE: | |
5512 | case BOOLEAN_TYPE: | |
953ff289 | 5513 | case REAL_TYPE: |
325217ed | 5514 | case FIXED_POINT_TYPE: |
953ff289 DN |
5515 | omp_firstprivatize_variable (ctx, TYPE_MIN_VALUE (type)); |
5516 | omp_firstprivatize_variable (ctx, TYPE_MAX_VALUE (type)); | |
5517 | break; | |
5518 | ||
5519 | case ARRAY_TYPE: | |
5520 | omp_firstprivatize_type_sizes (ctx, TREE_TYPE (type)); | |
5521 | omp_firstprivatize_type_sizes (ctx, TYPE_DOMAIN (type)); | |
5522 | break; | |
5523 | ||
5524 | case RECORD_TYPE: | |
5525 | case UNION_TYPE: | |
5526 | case QUAL_UNION_TYPE: | |
5527 | { | |
5528 | tree field; | |
910ad8de | 5529 | for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field)) |
953ff289 DN |
5530 | if (TREE_CODE (field) == FIELD_DECL) |
5531 | { | |
5532 | omp_firstprivatize_variable (ctx, DECL_FIELD_OFFSET (field)); | |
5533 | omp_firstprivatize_type_sizes (ctx, TREE_TYPE (field)); | |
5534 | } | |
5535 | } | |
5536 | break; | |
5537 | ||
5538 | case POINTER_TYPE: | |
5539 | case REFERENCE_TYPE: | |
5540 | omp_firstprivatize_type_sizes (ctx, TREE_TYPE (type)); | |
5541 | break; | |
5542 | ||
5543 | default: | |
5544 | break; | |
5545 | } | |
5546 | ||
5547 | omp_firstprivatize_variable (ctx, TYPE_SIZE (type)); | |
5548 | omp_firstprivatize_variable (ctx, TYPE_SIZE_UNIT (type)); | |
5549 | lang_hooks.types.omp_firstprivatize_type_sizes (ctx, type); | |
5550 | } | |
5551 | ||
5552 | /* Add an entry for DECL in the OpenMP context CTX with FLAGS. */ | |
5553 | ||
5554 | static void | |
5555 | omp_add_variable (struct gimplify_omp_ctx *ctx, tree decl, unsigned int flags) | |
5556 | { | |
5557 | splay_tree_node n; | |
5558 | unsigned int nflags; | |
5559 | tree t; | |
5560 | ||
b504a918 | 5561 | if (error_operand_p (decl)) |
953ff289 DN |
5562 | return; |
5563 | ||
5564 | /* Never elide decls whose type has TREE_ADDRESSABLE set. This means | |
5565 | there are constructors involved somewhere. */ | |
5566 | if (TREE_ADDRESSABLE (TREE_TYPE (decl)) | |
5567 | || TYPE_NEEDS_CONSTRUCTING (TREE_TYPE (decl))) | |
5568 | flags |= GOVD_SEEN; | |
5569 | ||
5570 | n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl); | |
acf0174b | 5571 | if (n != NULL && n->value != GOVD_ALIGNED) |
953ff289 DN |
5572 | { |
5573 | /* We shouldn't be re-adding the decl with the same data | |
5574 | sharing class. */ | |
5575 | gcc_assert ((n->value & GOVD_DATA_SHARE_CLASS & flags) == 0); | |
5576 | /* The only combination of data sharing classes we should see is | |
5577 | FIRSTPRIVATE and LASTPRIVATE. */ | |
5578 | nflags = n->value | flags; | |
5579 | gcc_assert ((nflags & GOVD_DATA_SHARE_CLASS) | |
74bf76ed JJ |
5580 | == (GOVD_FIRSTPRIVATE | GOVD_LASTPRIVATE) |
5581 | || (flags & GOVD_DATA_SHARE_CLASS) == 0); | |
953ff289 DN |
5582 | n->value = nflags; |
5583 | return; | |
5584 | } | |
5585 | ||
5586 | /* When adding a variable-sized variable, we have to handle all sorts | |
b8698a0f | 5587 | of additional bits of data: the pointer replacement variable, and |
953ff289 | 5588 | the parameters of the type. */ |
4c923c28 | 5589 | if (DECL_SIZE (decl) && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST) |
953ff289 DN |
5590 | { |
5591 | /* Add the pointer replacement variable as PRIVATE if the variable | |
5592 | replacement is private, else FIRSTPRIVATE since we'll need the | |
5593 | address of the original variable either for SHARED, or for the | |
5594 | copy into or out of the context. */ | |
5595 | if (!(flags & GOVD_LOCAL)) | |
5596 | { | |
acf0174b JJ |
5597 | nflags = flags & GOVD_MAP |
5598 | ? GOVD_MAP | GOVD_MAP_TO_ONLY | GOVD_EXPLICIT | |
5599 | : flags & GOVD_PRIVATE ? GOVD_PRIVATE : GOVD_FIRSTPRIVATE; | |
953ff289 DN |
5600 | nflags |= flags & GOVD_SEEN; |
5601 | t = DECL_VALUE_EXPR (decl); | |
5602 | gcc_assert (TREE_CODE (t) == INDIRECT_REF); | |
5603 | t = TREE_OPERAND (t, 0); | |
5604 | gcc_assert (DECL_P (t)); | |
5605 | omp_add_variable (ctx, t, nflags); | |
5606 | } | |
5607 | ||
5608 | /* Add all of the variable and type parameters (which should have | |
5609 | been gimplified to a formal temporary) as FIRSTPRIVATE. */ | |
5610 | omp_firstprivatize_variable (ctx, DECL_SIZE_UNIT (decl)); | |
5611 | omp_firstprivatize_variable (ctx, DECL_SIZE (decl)); | |
5612 | omp_firstprivatize_type_sizes (ctx, TREE_TYPE (decl)); | |
5613 | ||
5614 | /* The variable-sized variable itself is never SHARED, only some form | |
5615 | of PRIVATE. The sharing would take place via the pointer variable | |
5616 | which we remapped above. */ | |
5617 | if (flags & GOVD_SHARED) | |
5618 | flags = GOVD_PRIVATE | GOVD_DEBUG_PRIVATE | |
5619 | | (flags & (GOVD_SEEN | GOVD_EXPLICIT)); | |
5620 | ||
b8698a0f | 5621 | /* We're going to make use of the TYPE_SIZE_UNIT at least in the |
953ff289 DN |
5622 | alloca statement we generate for the variable, so make sure it |
5623 | is available. This isn't automatically needed for the SHARED | |
4288fea2 JJ |
5624 | case, since we won't be allocating local storage then. |
5625 | For local variables TYPE_SIZE_UNIT might not be gimplified yet, | |
5626 | in this case omp_notice_variable will be called later | |
5627 | on when it is gimplified. */ | |
acf0174b | 5628 | else if (! (flags & (GOVD_LOCAL | GOVD_MAP)) |
423ed416 | 5629 | && DECL_P (TYPE_SIZE_UNIT (TREE_TYPE (decl)))) |
953ff289 DN |
5630 | omp_notice_variable (ctx, TYPE_SIZE_UNIT (TREE_TYPE (decl)), true); |
5631 | } | |
acf0174b JJ |
5632 | else if ((flags & (GOVD_MAP | GOVD_LOCAL)) == 0 |
5633 | && lang_hooks.decls.omp_privatize_by_reference (decl)) | |
953ff289 | 5634 | { |
953ff289 DN |
5635 | omp_firstprivatize_type_sizes (ctx, TREE_TYPE (decl)); |
5636 | ||
5637 | /* Similar to the direct variable sized case above, we'll need the | |
5638 | size of references being privatized. */ | |
5639 | if ((flags & GOVD_SHARED) == 0) | |
5640 | { | |
5641 | t = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl))); | |
4c923c28 | 5642 | if (TREE_CODE (t) != INTEGER_CST) |
953ff289 DN |
5643 | omp_notice_variable (ctx, t, true); |
5644 | } | |
5645 | } | |
5646 | ||
74bf76ed JJ |
5647 | if (n != NULL) |
5648 | n->value |= flags; | |
5649 | else | |
5650 | splay_tree_insert (ctx->variables, (splay_tree_key)decl, flags); | |
953ff289 DN |
5651 | } |
5652 | ||
f22f4340 JJ |
5653 | /* Notice a threadprivate variable DECL used in OpenMP context CTX. |
5654 | This just prints out diagnostics about threadprivate variable uses | |
5655 | in untied tasks. If DECL2 is non-NULL, prevent this warning | |
5656 | on that variable. */ | |
5657 | ||
5658 | static bool | |
5659 | omp_notice_threadprivate_variable (struct gimplify_omp_ctx *ctx, tree decl, | |
5660 | tree decl2) | |
5661 | { | |
5662 | splay_tree_node n; | |
acf0174b JJ |
5663 | struct gimplify_omp_ctx *octx; |
5664 | ||
5665 | for (octx = ctx; octx; octx = octx->outer_context) | |
5666 | if (octx->region_type == ORT_TARGET) | |
5667 | { | |
5668 | n = splay_tree_lookup (octx->variables, (splay_tree_key)decl); | |
5669 | if (n == NULL) | |
5670 | { | |
5671 | error ("threadprivate variable %qE used in target region", | |
5672 | DECL_NAME (decl)); | |
5673 | error_at (octx->location, "enclosing target region"); | |
5674 | splay_tree_insert (octx->variables, (splay_tree_key)decl, 0); | |
5675 | } | |
5676 | if (decl2) | |
5677 | splay_tree_insert (octx->variables, (splay_tree_key)decl2, 0); | |
5678 | } | |
f22f4340 JJ |
5679 | |
5680 | if (ctx->region_type != ORT_UNTIED_TASK) | |
5681 | return false; | |
5682 | n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl); | |
5683 | if (n == NULL) | |
5684 | { | |
ad19c4be EB |
5685 | error ("threadprivate variable %qE used in untied task", |
5686 | DECL_NAME (decl)); | |
f22f4340 JJ |
5687 | error_at (ctx->location, "enclosing task"); |
5688 | splay_tree_insert (ctx->variables, (splay_tree_key)decl, 0); | |
5689 | } | |
5690 | if (decl2) | |
5691 | splay_tree_insert (ctx->variables, (splay_tree_key)decl2, 0); | |
5692 | return false; | |
5693 | } | |
5694 | ||
953ff289 DN |
5695 | /* Record the fact that DECL was used within the OpenMP context CTX. |
5696 | IN_CODE is true when real code uses DECL, and false when we should | |
5697 | merely emit default(none) errors. Return true if DECL is going to | |
5698 | be remapped and thus DECL shouldn't be gimplified into its | |
5699 | DECL_VALUE_EXPR (if any). */ | |
5700 | ||
5701 | static bool | |
5702 | omp_notice_variable (struct gimplify_omp_ctx *ctx, tree decl, bool in_code) | |
5703 | { | |
5704 | splay_tree_node n; | |
5705 | unsigned flags = in_code ? GOVD_SEEN : 0; | |
5706 | bool ret = false, shared; | |
5707 | ||
b504a918 | 5708 | if (error_operand_p (decl)) |
953ff289 DN |
5709 | return false; |
5710 | ||
5711 | /* Threadprivate variables are predetermined. */ | |
5712 | if (is_global_var (decl)) | |
5713 | { | |
5714 | if (DECL_THREAD_LOCAL_P (decl)) | |
f22f4340 | 5715 | return omp_notice_threadprivate_variable (ctx, decl, NULL_TREE); |
953ff289 DN |
5716 | |
5717 | if (DECL_HAS_VALUE_EXPR_P (decl)) | |
5718 | { | |
5719 | tree value = get_base_address (DECL_VALUE_EXPR (decl)); | |
5720 | ||
5721 | if (value && DECL_P (value) && DECL_THREAD_LOCAL_P (value)) | |
f22f4340 | 5722 | return omp_notice_threadprivate_variable (ctx, decl, value); |
953ff289 DN |
5723 | } |
5724 | } | |
5725 | ||
5726 | n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl); | |
acf0174b JJ |
5727 | if (ctx->region_type == ORT_TARGET) |
5728 | { | |
f014c653 | 5729 | ret = lang_hooks.decls.omp_disregard_value_expr (decl, true); |
acf0174b JJ |
5730 | if (n == NULL) |
5731 | { | |
5732 | if (!lang_hooks.types.omp_mappable_type (TREE_TYPE (decl))) | |
5733 | { | |
5734 | error ("%qD referenced in target region does not have " | |
5735 | "a mappable type", decl); | |
5736 | omp_add_variable (ctx, decl, GOVD_MAP | GOVD_EXPLICIT | flags); | |
5737 | } | |
5738 | else | |
5739 | omp_add_variable (ctx, decl, GOVD_MAP | flags); | |
5740 | } | |
5741 | else | |
f014c653 JJ |
5742 | { |
5743 | /* If nothing changed, there's nothing left to do. */ | |
5744 | if ((n->value & flags) == flags) | |
5745 | return ret; | |
5746 | n->value |= flags; | |
5747 | } | |
acf0174b JJ |
5748 | goto do_outer; |
5749 | } | |
5750 | ||
953ff289 DN |
5751 | if (n == NULL) |
5752 | { | |
5753 | enum omp_clause_default_kind default_kind, kind; | |
a68ab351 | 5754 | struct gimplify_omp_ctx *octx; |
953ff289 | 5755 | |
74bf76ed | 5756 | if (ctx->region_type == ORT_WORKSHARE |
acf0174b JJ |
5757 | || ctx->region_type == ORT_SIMD |
5758 | || ctx->region_type == ORT_TARGET_DATA) | |
953ff289 DN |
5759 | goto do_outer; |
5760 | ||
5761 | /* ??? Some compiler-generated variables (like SAVE_EXPRs) could be | |
5762 | remapped firstprivate instead of shared. To some extent this is | |
5763 | addressed in omp_firstprivatize_type_sizes, but not effectively. */ | |
5764 | default_kind = ctx->default_kind; | |
5765 | kind = lang_hooks.decls.omp_predetermined_sharing (decl); | |
5766 | if (kind != OMP_CLAUSE_DEFAULT_UNSPECIFIED) | |
5767 | default_kind = kind; | |
5768 | ||
5769 | switch (default_kind) | |
5770 | { | |
5771 | case OMP_CLAUSE_DEFAULT_NONE: | |
09af4b4c | 5772 | if ((ctx->region_type & ORT_PARALLEL) != 0) |
a5a5434f TS |
5773 | { |
5774 | error ("%qE not specified in enclosing parallel", | |
5775 | DECL_NAME (lang_hooks.decls.omp_report_decl (decl))); | |
5776 | error_at (ctx->location, "enclosing parallel"); | |
5777 | } | |
5778 | else if ((ctx->region_type & ORT_TASK) != 0) | |
acf0174b JJ |
5779 | { |
5780 | error ("%qE not specified in enclosing task", | |
5781 | DECL_NAME (lang_hooks.decls.omp_report_decl (decl))); | |
5782 | error_at (ctx->location, "enclosing task"); | |
5783 | } | |
5784 | else if (ctx->region_type == ORT_TEAMS) | |
5785 | { | |
5786 | error ("%qE not specified in enclosing teams construct", | |
5787 | DECL_NAME (lang_hooks.decls.omp_report_decl (decl))); | |
5788 | error_at (ctx->location, "enclosing teams construct"); | |
5789 | } | |
f22f4340 | 5790 | else |
a5a5434f | 5791 | gcc_unreachable (); |
953ff289 DN |
5792 | /* FALLTHRU */ |
5793 | case OMP_CLAUSE_DEFAULT_SHARED: | |
5794 | flags |= GOVD_SHARED; | |
5795 | break; | |
5796 | case OMP_CLAUSE_DEFAULT_PRIVATE: | |
5797 | flags |= GOVD_PRIVATE; | |
5798 | break; | |
a68ab351 JJ |
5799 | case OMP_CLAUSE_DEFAULT_FIRSTPRIVATE: |
5800 | flags |= GOVD_FIRSTPRIVATE; | |
5801 | break; | |
5802 | case OMP_CLAUSE_DEFAULT_UNSPECIFIED: | |
5803 | /* decl will be either GOVD_FIRSTPRIVATE or GOVD_SHARED. */ | |
f22f4340 | 5804 | gcc_assert ((ctx->region_type & ORT_TASK) != 0); |
a68ab351 JJ |
5805 | if (ctx->outer_context) |
5806 | omp_notice_variable (ctx->outer_context, decl, in_code); | |
5807 | for (octx = ctx->outer_context; octx; octx = octx->outer_context) | |
5808 | { | |
5809 | splay_tree_node n2; | |
5810 | ||
acf0174b JJ |
5811 | if ((octx->region_type & (ORT_TARGET_DATA | ORT_TARGET)) != 0) |
5812 | continue; | |
a68ab351 JJ |
5813 | n2 = splay_tree_lookup (octx->variables, (splay_tree_key) decl); |
5814 | if (n2 && (n2->value & GOVD_DATA_SHARE_CLASS) != GOVD_SHARED) | |
5815 | { | |
5816 | flags |= GOVD_FIRSTPRIVATE; | |
5817 | break; | |
5818 | } | |
acf0174b | 5819 | if ((octx->region_type & (ORT_PARALLEL | ORT_TEAMS)) != 0) |
a68ab351 JJ |
5820 | break; |
5821 | } | |
5822 | if (flags & GOVD_FIRSTPRIVATE) | |
5823 | break; | |
5824 | if (octx == NULL | |
5825 | && (TREE_CODE (decl) == PARM_DECL | |
5826 | || (!is_global_var (decl) | |
5827 | && DECL_CONTEXT (decl) == current_function_decl))) | |
5828 | { | |
5829 | flags |= GOVD_FIRSTPRIVATE; | |
5830 | break; | |
5831 | } | |
5832 | flags |= GOVD_SHARED; | |
5833 | break; | |
953ff289 DN |
5834 | default: |
5835 | gcc_unreachable (); | |
5836 | } | |
5837 | ||
a68ab351 JJ |
5838 | if ((flags & GOVD_PRIVATE) |
5839 | && lang_hooks.decls.omp_private_outer_ref (decl)) | |
5840 | flags |= GOVD_PRIVATE_OUTER_REF; | |
5841 | ||
953ff289 DN |
5842 | omp_add_variable (ctx, decl, flags); |
5843 | ||
5844 | shared = (flags & GOVD_SHARED) != 0; | |
5845 | ret = lang_hooks.decls.omp_disregard_value_expr (decl, shared); | |
5846 | goto do_outer; | |
5847 | } | |
5848 | ||
3ad6b266 JJ |
5849 | if ((n->value & (GOVD_SEEN | GOVD_LOCAL)) == 0 |
5850 | && (flags & (GOVD_SEEN | GOVD_LOCAL)) == GOVD_SEEN | |
5851 | && DECL_SIZE (decl) | |
5852 | && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST) | |
5853 | { | |
5854 | splay_tree_node n2; | |
5855 | tree t = DECL_VALUE_EXPR (decl); | |
5856 | gcc_assert (TREE_CODE (t) == INDIRECT_REF); | |
5857 | t = TREE_OPERAND (t, 0); | |
5858 | gcc_assert (DECL_P (t)); | |
5859 | n2 = splay_tree_lookup (ctx->variables, (splay_tree_key) t); | |
5860 | n2->value |= GOVD_SEEN; | |
5861 | } | |
5862 | ||
953ff289 DN |
5863 | shared = ((flags | n->value) & GOVD_SHARED) != 0; |
5864 | ret = lang_hooks.decls.omp_disregard_value_expr (decl, shared); | |
5865 | ||
5866 | /* If nothing changed, there's nothing left to do. */ | |
5867 | if ((n->value & flags) == flags) | |
5868 | return ret; | |
5869 | flags |= n->value; | |
5870 | n->value = flags; | |
5871 | ||
5872 | do_outer: | |
5873 | /* If the variable is private in the current context, then we don't | |
5874 | need to propagate anything to an outer context. */ | |
a68ab351 | 5875 | if ((flags & GOVD_PRIVATE) && !(flags & GOVD_PRIVATE_OUTER_REF)) |
953ff289 DN |
5876 | return ret; |
5877 | if (ctx->outer_context | |
5878 | && omp_notice_variable (ctx->outer_context, decl, in_code)) | |
5879 | return true; | |
5880 | return ret; | |
5881 | } | |
5882 | ||
5883 | /* Verify that DECL is private within CTX. If there's specific information | |
5884 | to the contrary in the innermost scope, generate an error. */ | |
5885 | ||
5886 | static bool | |
f7468577 | 5887 | omp_is_private (struct gimplify_omp_ctx *ctx, tree decl, int simd) |
953ff289 DN |
5888 | { |
5889 | splay_tree_node n; | |
5890 | ||
5891 | n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl); | |
5892 | if (n != NULL) | |
5893 | { | |
5894 | if (n->value & GOVD_SHARED) | |
5895 | { | |
5896 | if (ctx == gimplify_omp_ctxp) | |
f6a5ffbf | 5897 | { |
74bf76ed JJ |
5898 | if (simd) |
5899 | error ("iteration variable %qE is predetermined linear", | |
5900 | DECL_NAME (decl)); | |
5901 | else | |
5902 | error ("iteration variable %qE should be private", | |
5903 | DECL_NAME (decl)); | |
f6a5ffbf JJ |
5904 | n->value = GOVD_PRIVATE; |
5905 | return true; | |
5906 | } | |
5907 | else | |
5908 | return false; | |
953ff289 | 5909 | } |
761041be JJ |
5910 | else if ((n->value & GOVD_EXPLICIT) != 0 |
5911 | && (ctx == gimplify_omp_ctxp | |
a68ab351 | 5912 | || (ctx->region_type == ORT_COMBINED_PARALLEL |
761041be JJ |
5913 | && gimplify_omp_ctxp->outer_context == ctx))) |
5914 | { | |
5915 | if ((n->value & GOVD_FIRSTPRIVATE) != 0) | |
4f1e4960 JM |
5916 | error ("iteration variable %qE should not be firstprivate", |
5917 | DECL_NAME (decl)); | |
761041be | 5918 | else if ((n->value & GOVD_REDUCTION) != 0) |
4f1e4960 JM |
5919 | error ("iteration variable %qE should not be reduction", |
5920 | DECL_NAME (decl)); | |
f7468577 | 5921 | else if (simd == 1 && (n->value & GOVD_LASTPRIVATE) != 0) |
74bf76ed JJ |
5922 | error ("iteration variable %qE should not be lastprivate", |
5923 | DECL_NAME (decl)); | |
5924 | else if (simd && (n->value & GOVD_PRIVATE) != 0) | |
5925 | error ("iteration variable %qE should not be private", | |
5926 | DECL_NAME (decl)); | |
f7468577 | 5927 | else if (simd == 2 && (n->value & GOVD_LINEAR) != 0) |
74bf76ed JJ |
5928 | error ("iteration variable %qE is predetermined linear", |
5929 | DECL_NAME (decl)); | |
761041be | 5930 | } |
ca2b1311 JJ |
5931 | return (ctx == gimplify_omp_ctxp |
5932 | || (ctx->region_type == ORT_COMBINED_PARALLEL | |
5933 | && gimplify_omp_ctxp->outer_context == ctx)); | |
953ff289 DN |
5934 | } |
5935 | ||
74bf76ed JJ |
5936 | if (ctx->region_type != ORT_WORKSHARE |
5937 | && ctx->region_type != ORT_SIMD) | |
953ff289 | 5938 | return false; |
f6a5ffbf | 5939 | else if (ctx->outer_context) |
74bf76ed | 5940 | return omp_is_private (ctx->outer_context, decl, simd); |
ca2b1311 | 5941 | return false; |
953ff289 DN |
5942 | } |
5943 | ||
07b7aade JJ |
5944 | /* Return true if DECL is private within a parallel region |
5945 | that binds to the current construct's context or in parallel | |
5946 | region's REDUCTION clause. */ | |
5947 | ||
5948 | static bool | |
cab37c89 | 5949 | omp_check_private (struct gimplify_omp_ctx *ctx, tree decl, bool copyprivate) |
07b7aade JJ |
5950 | { |
5951 | splay_tree_node n; | |
5952 | ||
5953 | do | |
5954 | { | |
5955 | ctx = ctx->outer_context; | |
5956 | if (ctx == NULL) | |
5957 | return !(is_global_var (decl) | |
cab37c89 JJ |
5958 | /* References might be private, but might be shared too, |
5959 | when checking for copyprivate, assume they might be | |
5960 | private, otherwise assume they might be shared. */ | |
5961 | || (!copyprivate | |
5962 | && lang_hooks.decls.omp_privatize_by_reference (decl))); | |
07b7aade | 5963 | |
acf0174b JJ |
5964 | if ((ctx->region_type & (ORT_TARGET | ORT_TARGET_DATA)) != 0) |
5965 | continue; | |
5966 | ||
07b7aade JJ |
5967 | n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl); |
5968 | if (n != NULL) | |
5969 | return (n->value & GOVD_SHARED) == 0; | |
5970 | } | |
74bf76ed JJ |
5971 | while (ctx->region_type == ORT_WORKSHARE |
5972 | || ctx->region_type == ORT_SIMD); | |
07b7aade JJ |
5973 | return false; |
5974 | } | |
5975 | ||
953ff289 DN |
5976 | /* Scan the OpenMP clauses in *LIST_P, installing mappings into a new |
5977 | and previous omp contexts. */ | |
5978 | ||
5979 | static void | |
726a989a | 5980 | gimplify_scan_omp_clauses (tree *list_p, gimple_seq *pre_p, |
a68ab351 | 5981 | enum omp_region_type region_type) |
953ff289 DN |
5982 | { |
5983 | struct gimplify_omp_ctx *ctx, *outer_ctx; | |
5984 | tree c; | |
5985 | ||
a68ab351 | 5986 | ctx = new_omp_context (region_type); |
953ff289 DN |
5987 | outer_ctx = ctx->outer_context; |
5988 | ||
5989 | while ((c = *list_p) != NULL) | |
5990 | { | |
953ff289 DN |
5991 | bool remove = false; |
5992 | bool notice_outer = true; | |
07b7aade | 5993 | const char *check_non_private = NULL; |
953ff289 DN |
5994 | unsigned int flags; |
5995 | tree decl; | |
5996 | ||
aaf46ef9 | 5997 | switch (OMP_CLAUSE_CODE (c)) |
953ff289 DN |
5998 | { |
5999 | case OMP_CLAUSE_PRIVATE: | |
6000 | flags = GOVD_PRIVATE | GOVD_EXPLICIT; | |
a68ab351 JJ |
6001 | if (lang_hooks.decls.omp_private_outer_ref (OMP_CLAUSE_DECL (c))) |
6002 | { | |
6003 | flags |= GOVD_PRIVATE_OUTER_REF; | |
6004 | OMP_CLAUSE_PRIVATE_OUTER_REF (c) = 1; | |
6005 | } | |
6006 | else | |
6007 | notice_outer = false; | |
953ff289 DN |
6008 | goto do_add; |
6009 | case OMP_CLAUSE_SHARED: | |
6010 | flags = GOVD_SHARED | GOVD_EXPLICIT; | |
6011 | goto do_add; | |
6012 | case OMP_CLAUSE_FIRSTPRIVATE: | |
6013 | flags = GOVD_FIRSTPRIVATE | GOVD_EXPLICIT; | |
07b7aade | 6014 | check_non_private = "firstprivate"; |
953ff289 DN |
6015 | goto do_add; |
6016 | case OMP_CLAUSE_LASTPRIVATE: | |
6017 | flags = GOVD_LASTPRIVATE | GOVD_SEEN | GOVD_EXPLICIT; | |
07b7aade | 6018 | check_non_private = "lastprivate"; |
953ff289 DN |
6019 | goto do_add; |
6020 | case OMP_CLAUSE_REDUCTION: | |
6021 | flags = GOVD_REDUCTION | GOVD_SEEN | GOVD_EXPLICIT; | |
07b7aade | 6022 | check_non_private = "reduction"; |
953ff289 | 6023 | goto do_add; |
acf0174b JJ |
6024 | case OMP_CLAUSE_LINEAR: |
6025 | if (gimplify_expr (&OMP_CLAUSE_LINEAR_STEP (c), pre_p, NULL, | |
6026 | is_gimple_val, fb_rvalue) == GS_ERROR) | |
6027 | { | |
6028 | remove = true; | |
6029 | break; | |
6030 | } | |
6031 | flags = GOVD_LINEAR | GOVD_EXPLICIT; | |
6032 | goto do_add; | |
6033 | ||
6034 | case OMP_CLAUSE_MAP: | |
b46ebd6c JJ |
6035 | decl = OMP_CLAUSE_DECL (c); |
6036 | if (error_operand_p (decl)) | |
6037 | { | |
6038 | remove = true; | |
6039 | break; | |
6040 | } | |
6041 | if (OMP_CLAUSE_SIZE (c) == NULL_TREE) | |
6042 | OMP_CLAUSE_SIZE (c) = DECL_P (decl) ? DECL_SIZE_UNIT (decl) | |
6043 | : TYPE_SIZE_UNIT (TREE_TYPE (decl)); | |
6044 | if (gimplify_expr (&OMP_CLAUSE_SIZE (c), pre_p, | |
6045 | NULL, is_gimple_val, fb_rvalue) == GS_ERROR) | |
acf0174b JJ |
6046 | { |
6047 | remove = true; | |
6048 | break; | |
6049 | } | |
acf0174b JJ |
6050 | if (!DECL_P (decl)) |
6051 | { | |
6052 | if (gimplify_expr (&OMP_CLAUSE_DECL (c), pre_p, | |
6053 | NULL, is_gimple_lvalue, fb_lvalue) | |
6054 | == GS_ERROR) | |
6055 | { | |
6056 | remove = true; | |
6057 | break; | |
6058 | } | |
6059 | break; | |
6060 | } | |
6061 | flags = GOVD_MAP | GOVD_EXPLICIT; | |
6062 | goto do_add; | |
6063 | ||
6064 | case OMP_CLAUSE_DEPEND: | |
6065 | if (TREE_CODE (OMP_CLAUSE_DECL (c)) == COMPOUND_EXPR) | |
6066 | { | |
6067 | gimplify_expr (&TREE_OPERAND (OMP_CLAUSE_DECL (c), 0), pre_p, | |
6068 | NULL, is_gimple_val, fb_rvalue); | |
6069 | OMP_CLAUSE_DECL (c) = TREE_OPERAND (OMP_CLAUSE_DECL (c), 1); | |
6070 | } | |
6071 | if (error_operand_p (OMP_CLAUSE_DECL (c))) | |
6072 | { | |
6073 | remove = true; | |
6074 | break; | |
6075 | } | |
6076 | OMP_CLAUSE_DECL (c) = build_fold_addr_expr (OMP_CLAUSE_DECL (c)); | |
6077 | if (gimplify_expr (&OMP_CLAUSE_DECL (c), pre_p, NULL, | |
6078 | is_gimple_val, fb_rvalue) == GS_ERROR) | |
6079 | { | |
6080 | remove = true; | |
6081 | break; | |
6082 | } | |
6083 | break; | |
6084 | ||
6085 | case OMP_CLAUSE_TO: | |
6086 | case OMP_CLAUSE_FROM: | |
b46ebd6c JJ |
6087 | decl = OMP_CLAUSE_DECL (c); |
6088 | if (error_operand_p (decl)) | |
acf0174b JJ |
6089 | { |
6090 | remove = true; | |
6091 | break; | |
6092 | } | |
b46ebd6c JJ |
6093 | if (OMP_CLAUSE_SIZE (c) == NULL_TREE) |
6094 | OMP_CLAUSE_SIZE (c) = DECL_P (decl) ? DECL_SIZE_UNIT (decl) | |
6095 | : TYPE_SIZE_UNIT (TREE_TYPE (decl)); | |
6096 | if (gimplify_expr (&OMP_CLAUSE_SIZE (c), pre_p, | |
6097 | NULL, is_gimple_val, fb_rvalue) == GS_ERROR) | |
acf0174b JJ |
6098 | { |
6099 | remove = true; | |
6100 | break; | |
6101 | } | |
6102 | if (!DECL_P (decl)) | |
6103 | { | |
6104 | if (gimplify_expr (&OMP_CLAUSE_DECL (c), pre_p, | |
6105 | NULL, is_gimple_lvalue, fb_lvalue) | |
6106 | == GS_ERROR) | |
6107 | { | |
6108 | remove = true; | |
6109 | break; | |
6110 | } | |
6111 | break; | |
6112 | } | |
6113 | goto do_notice; | |
953ff289 DN |
6114 | |
6115 | do_add: | |
6116 | decl = OMP_CLAUSE_DECL (c); | |
b504a918 | 6117 | if (error_operand_p (decl)) |
953ff289 DN |
6118 | { |
6119 | remove = true; | |
6120 | break; | |
6121 | } | |
6122 | omp_add_variable (ctx, decl, flags); | |
693d710f | 6123 | if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION |
953ff289 DN |
6124 | && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c)) |
6125 | { | |
6126 | omp_add_variable (ctx, OMP_CLAUSE_REDUCTION_PLACEHOLDER (c), | |
693d710f | 6127 | GOVD_LOCAL | GOVD_SEEN); |
953ff289 | 6128 | gimplify_omp_ctxp = ctx; |
45852dcc | 6129 | push_gimplify_context (); |
726a989a | 6130 | |
355a7673 MM |
6131 | OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL; |
6132 | OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL; | |
726a989a RB |
6133 | |
6134 | gimplify_and_add (OMP_CLAUSE_REDUCTION_INIT (c), | |
6135 | &OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c)); | |
6136 | pop_gimplify_context | |
6137 | (gimple_seq_first_stmt (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))); | |
45852dcc | 6138 | push_gimplify_context (); |
726a989a RB |
6139 | gimplify_and_add (OMP_CLAUSE_REDUCTION_MERGE (c), |
6140 | &OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c)); | |
b8698a0f | 6141 | pop_gimplify_context |
726a989a RB |
6142 | (gimple_seq_first_stmt (OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c))); |
6143 | OMP_CLAUSE_REDUCTION_INIT (c) = NULL_TREE; | |
6144 | OMP_CLAUSE_REDUCTION_MERGE (c) = NULL_TREE; | |
6145 | ||
953ff289 DN |
6146 | gimplify_omp_ctxp = outer_ctx; |
6147 | } | |
a68ab351 JJ |
6148 | else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE |
6149 | && OMP_CLAUSE_LASTPRIVATE_STMT (c)) | |
6150 | { | |
6151 | gimplify_omp_ctxp = ctx; | |
45852dcc | 6152 | push_gimplify_context (); |
a68ab351 JJ |
6153 | if (TREE_CODE (OMP_CLAUSE_LASTPRIVATE_STMT (c)) != BIND_EXPR) |
6154 | { | |
6155 | tree bind = build3 (BIND_EXPR, void_type_node, NULL, | |
6156 | NULL, NULL); | |
6157 | TREE_SIDE_EFFECTS (bind) = 1; | |
6158 | BIND_EXPR_BODY (bind) = OMP_CLAUSE_LASTPRIVATE_STMT (c); | |
6159 | OMP_CLAUSE_LASTPRIVATE_STMT (c) = bind; | |
6160 | } | |
726a989a RB |
6161 | gimplify_and_add (OMP_CLAUSE_LASTPRIVATE_STMT (c), |
6162 | &OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c)); | |
6163 | pop_gimplify_context | |
6164 | (gimple_seq_first_stmt (OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c))); | |
6165 | OMP_CLAUSE_LASTPRIVATE_STMT (c) = NULL_TREE; | |
6166 | ||
dd2fc525 JJ |
6167 | gimplify_omp_ctxp = outer_ctx; |
6168 | } | |
6169 | else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR | |
6170 | && OMP_CLAUSE_LINEAR_STMT (c)) | |
6171 | { | |
6172 | gimplify_omp_ctxp = ctx; | |
6173 | push_gimplify_context (); | |
6174 | if (TREE_CODE (OMP_CLAUSE_LINEAR_STMT (c)) != BIND_EXPR) | |
6175 | { | |
6176 | tree bind = build3 (BIND_EXPR, void_type_node, NULL, | |
6177 | NULL, NULL); | |
6178 | TREE_SIDE_EFFECTS (bind) = 1; | |
6179 | BIND_EXPR_BODY (bind) = OMP_CLAUSE_LINEAR_STMT (c); | |
6180 | OMP_CLAUSE_LINEAR_STMT (c) = bind; | |
6181 | } | |
6182 | gimplify_and_add (OMP_CLAUSE_LINEAR_STMT (c), | |
6183 | &OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c)); | |
6184 | pop_gimplify_context | |
6185 | (gimple_seq_first_stmt (OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c))); | |
6186 | OMP_CLAUSE_LINEAR_STMT (c) = NULL_TREE; | |
6187 | ||
a68ab351 JJ |
6188 | gimplify_omp_ctxp = outer_ctx; |
6189 | } | |
953ff289 DN |
6190 | if (notice_outer) |
6191 | goto do_notice; | |
6192 | break; | |
6193 | ||
6194 | case OMP_CLAUSE_COPYIN: | |
6195 | case OMP_CLAUSE_COPYPRIVATE: | |
6196 | decl = OMP_CLAUSE_DECL (c); | |
b504a918 | 6197 | if (error_operand_p (decl)) |
953ff289 DN |
6198 | { |
6199 | remove = true; | |
6200 | break; | |
6201 | } | |
cab37c89 JJ |
6202 | if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_COPYPRIVATE |
6203 | && !remove | |
6204 | && !omp_check_private (ctx, decl, true)) | |
6205 | { | |
6206 | remove = true; | |
6207 | if (is_global_var (decl)) | |
6208 | { | |
6209 | if (DECL_THREAD_LOCAL_P (decl)) | |
6210 | remove = false; | |
6211 | else if (DECL_HAS_VALUE_EXPR_P (decl)) | |
6212 | { | |
6213 | tree value = get_base_address (DECL_VALUE_EXPR (decl)); | |
6214 | ||
6215 | if (value | |
6216 | && DECL_P (value) | |
6217 | && DECL_THREAD_LOCAL_P (value)) | |
6218 | remove = false; | |
6219 | } | |
6220 | } | |
6221 | if (remove) | |
6222 | error_at (OMP_CLAUSE_LOCATION (c), | |
6223 | "copyprivate variable %qE is not threadprivate" | |
6224 | " or private in outer context", DECL_NAME (decl)); | |
6225 | } | |
953ff289 DN |
6226 | do_notice: |
6227 | if (outer_ctx) | |
6228 | omp_notice_variable (outer_ctx, decl, true); | |
07b7aade | 6229 | if (check_non_private |
a68ab351 | 6230 | && region_type == ORT_WORKSHARE |
cab37c89 | 6231 | && omp_check_private (ctx, decl, false)) |
07b7aade | 6232 | { |
4f1e4960 JM |
6233 | error ("%s variable %qE is private in outer context", |
6234 | check_non_private, DECL_NAME (decl)); | |
07b7aade JJ |
6235 | remove = true; |
6236 | } | |
953ff289 DN |
6237 | break; |
6238 | ||
20906c66 | 6239 | case OMP_CLAUSE_FINAL: |
953ff289 | 6240 | case OMP_CLAUSE_IF: |
d568d1a8 RS |
6241 | OMP_CLAUSE_OPERAND (c, 0) |
6242 | = gimple_boolify (OMP_CLAUSE_OPERAND (c, 0)); | |
6243 | /* Fall through. */ | |
6244 | ||
6245 | case OMP_CLAUSE_SCHEDULE: | |
953ff289 | 6246 | case OMP_CLAUSE_NUM_THREADS: |
acf0174b JJ |
6247 | case OMP_CLAUSE_NUM_TEAMS: |
6248 | case OMP_CLAUSE_THREAD_LIMIT: | |
6249 | case OMP_CLAUSE_DIST_SCHEDULE: | |
6250 | case OMP_CLAUSE_DEVICE: | |
9a771876 | 6251 | case OMP_CLAUSE__CILK_FOR_COUNT_: |
726a989a RB |
6252 | if (gimplify_expr (&OMP_CLAUSE_OPERAND (c, 0), pre_p, NULL, |
6253 | is_gimple_val, fb_rvalue) == GS_ERROR) | |
acf0174b | 6254 | remove = true; |
953ff289 DN |
6255 | break; |
6256 | ||
6257 | case OMP_CLAUSE_NOWAIT: | |
6258 | case OMP_CLAUSE_ORDERED: | |
a68ab351 JJ |
6259 | case OMP_CLAUSE_UNTIED: |
6260 | case OMP_CLAUSE_COLLAPSE: | |
20906c66 | 6261 | case OMP_CLAUSE_MERGEABLE: |
acf0174b | 6262 | case OMP_CLAUSE_PROC_BIND: |
74bf76ed | 6263 | case OMP_CLAUSE_SAFELEN: |
953ff289 DN |
6264 | break; |
6265 | ||
acf0174b JJ |
6266 | case OMP_CLAUSE_ALIGNED: |
6267 | decl = OMP_CLAUSE_DECL (c); | |
6268 | if (error_operand_p (decl)) | |
6269 | { | |
6270 | remove = true; | |
6271 | break; | |
6272 | } | |
b46ebd6c JJ |
6273 | if (gimplify_expr (&OMP_CLAUSE_ALIGNED_ALIGNMENT (c), pre_p, NULL, |
6274 | is_gimple_val, fb_rvalue) == GS_ERROR) | |
6275 | { | |
6276 | remove = true; | |
6277 | break; | |
6278 | } | |
acf0174b JJ |
6279 | if (!is_global_var (decl) |
6280 | && TREE_CODE (TREE_TYPE (decl)) == POINTER_TYPE) | |
6281 | omp_add_variable (ctx, decl, GOVD_ALIGNED); | |
6282 | break; | |
6283 | ||
953ff289 DN |
6284 | case OMP_CLAUSE_DEFAULT: |
6285 | ctx->default_kind = OMP_CLAUSE_DEFAULT_KIND (c); | |
6286 | break; | |
6287 | ||
6288 | default: | |
6289 | gcc_unreachable (); | |
6290 | } | |
6291 | ||
6292 | if (remove) | |
6293 | *list_p = OMP_CLAUSE_CHAIN (c); | |
6294 | else | |
6295 | list_p = &OMP_CLAUSE_CHAIN (c); | |
6296 | } | |
6297 | ||
6298 | gimplify_omp_ctxp = ctx; | |
6299 | } | |
6300 | ||
f014c653 JJ |
6301 | struct gimplify_adjust_omp_clauses_data |
6302 | { | |
6303 | tree *list_p; | |
6304 | gimple_seq *pre_p; | |
6305 | }; | |
6306 | ||
953ff289 DN |
6307 | /* For all variables that were not actually used within the context, |
6308 | remove PRIVATE, SHARED, and FIRSTPRIVATE clauses. */ | |
6309 | ||
6310 | static int | |
6311 | gimplify_adjust_omp_clauses_1 (splay_tree_node n, void *data) | |
6312 | { | |
f014c653 JJ |
6313 | tree *list_p = ((struct gimplify_adjust_omp_clauses_data *) data)->list_p; |
6314 | gimple_seq *pre_p | |
6315 | = ((struct gimplify_adjust_omp_clauses_data *) data)->pre_p; | |
953ff289 DN |
6316 | tree decl = (tree) n->key; |
6317 | unsigned flags = n->value; | |
aaf46ef9 | 6318 | enum omp_clause_code code; |
953ff289 DN |
6319 | tree clause; |
6320 | bool private_debug; | |
6321 | ||
6322 | if (flags & (GOVD_EXPLICIT | GOVD_LOCAL)) | |
6323 | return 0; | |
6324 | if ((flags & GOVD_SEEN) == 0) | |
6325 | return 0; | |
6326 | if (flags & GOVD_DEBUG_PRIVATE) | |
6327 | { | |
6328 | gcc_assert ((flags & GOVD_DATA_SHARE_CLASS) == GOVD_PRIVATE); | |
6329 | private_debug = true; | |
6330 | } | |
acf0174b JJ |
6331 | else if (flags & GOVD_MAP) |
6332 | private_debug = false; | |
953ff289 DN |
6333 | else |
6334 | private_debug | |
6335 | = lang_hooks.decls.omp_private_debug_clause (decl, | |
6336 | !!(flags & GOVD_SHARED)); | |
6337 | if (private_debug) | |
6338 | code = OMP_CLAUSE_PRIVATE; | |
acf0174b JJ |
6339 | else if (flags & GOVD_MAP) |
6340 | code = OMP_CLAUSE_MAP; | |
953ff289 DN |
6341 | else if (flags & GOVD_SHARED) |
6342 | { | |
6343 | if (is_global_var (decl)) | |
64964499 JJ |
6344 | { |
6345 | struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp->outer_context; | |
6346 | while (ctx != NULL) | |
6347 | { | |
6348 | splay_tree_node on | |
6349 | = splay_tree_lookup (ctx->variables, (splay_tree_key) decl); | |
6350 | if (on && (on->value & (GOVD_FIRSTPRIVATE | GOVD_LASTPRIVATE | |
74bf76ed | 6351 | | GOVD_PRIVATE | GOVD_REDUCTION |
7de20fbd | 6352 | | GOVD_LINEAR | GOVD_MAP)) != 0) |
64964499 JJ |
6353 | break; |
6354 | ctx = ctx->outer_context; | |
6355 | } | |
6356 | if (ctx == NULL) | |
6357 | return 0; | |
6358 | } | |
953ff289 DN |
6359 | code = OMP_CLAUSE_SHARED; |
6360 | } | |
6361 | else if (flags & GOVD_PRIVATE) | |
6362 | code = OMP_CLAUSE_PRIVATE; | |
6363 | else if (flags & GOVD_FIRSTPRIVATE) | |
6364 | code = OMP_CLAUSE_FIRSTPRIVATE; | |
74bf76ed JJ |
6365 | else if (flags & GOVD_LASTPRIVATE) |
6366 | code = OMP_CLAUSE_LASTPRIVATE; | |
acf0174b JJ |
6367 | else if (flags & GOVD_ALIGNED) |
6368 | return 0; | |
953ff289 DN |
6369 | else |
6370 | gcc_unreachable (); | |
6371 | ||
c2255bc4 | 6372 | clause = build_omp_clause (input_location, code); |
aaf46ef9 | 6373 | OMP_CLAUSE_DECL (clause) = decl; |
953ff289 DN |
6374 | OMP_CLAUSE_CHAIN (clause) = *list_p; |
6375 | if (private_debug) | |
6376 | OMP_CLAUSE_PRIVATE_DEBUG (clause) = 1; | |
a68ab351 JJ |
6377 | else if (code == OMP_CLAUSE_PRIVATE && (flags & GOVD_PRIVATE_OUTER_REF)) |
6378 | OMP_CLAUSE_PRIVATE_OUTER_REF (clause) = 1; | |
acf0174b JJ |
6379 | else if (code == OMP_CLAUSE_MAP) |
6380 | { | |
6381 | OMP_CLAUSE_MAP_KIND (clause) = flags & GOVD_MAP_TO_ONLY | |
6382 | ? OMP_CLAUSE_MAP_TO | |
6383 | : OMP_CLAUSE_MAP_TOFROM; | |
6384 | if (DECL_SIZE (decl) | |
6385 | && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST) | |
6386 | { | |
6387 | tree decl2 = DECL_VALUE_EXPR (decl); | |
6388 | gcc_assert (TREE_CODE (decl2) == INDIRECT_REF); | |
6389 | decl2 = TREE_OPERAND (decl2, 0); | |
6390 | gcc_assert (DECL_P (decl2)); | |
6391 | tree mem = build_simple_mem_ref (decl2); | |
6392 | OMP_CLAUSE_DECL (clause) = mem; | |
6393 | OMP_CLAUSE_SIZE (clause) = TYPE_SIZE_UNIT (TREE_TYPE (decl)); | |
6394 | if (gimplify_omp_ctxp->outer_context) | |
6395 | { | |
6396 | struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp->outer_context; | |
6397 | omp_notice_variable (ctx, decl2, true); | |
6398 | omp_notice_variable (ctx, OMP_CLAUSE_SIZE (clause), true); | |
6399 | } | |
6400 | tree nc = build_omp_clause (OMP_CLAUSE_LOCATION (clause), | |
6401 | OMP_CLAUSE_MAP); | |
6402 | OMP_CLAUSE_DECL (nc) = decl; | |
6403 | OMP_CLAUSE_SIZE (nc) = size_zero_node; | |
6404 | OMP_CLAUSE_MAP_KIND (nc) = OMP_CLAUSE_MAP_POINTER; | |
6405 | OMP_CLAUSE_CHAIN (nc) = OMP_CLAUSE_CHAIN (clause); | |
6406 | OMP_CLAUSE_CHAIN (clause) = nc; | |
6407 | } | |
b46ebd6c JJ |
6408 | else |
6409 | OMP_CLAUSE_SIZE (clause) = DECL_SIZE_UNIT (decl); | |
acf0174b | 6410 | } |
95782571 JJ |
6411 | if (code == OMP_CLAUSE_FIRSTPRIVATE && (flags & GOVD_LASTPRIVATE) != 0) |
6412 | { | |
6413 | tree nc = build_omp_clause (input_location, OMP_CLAUSE_LASTPRIVATE); | |
6414 | OMP_CLAUSE_DECL (nc) = decl; | |
6415 | OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (nc) = 1; | |
6416 | OMP_CLAUSE_CHAIN (nc) = *list_p; | |
6417 | OMP_CLAUSE_CHAIN (clause) = nc; | |
f014c653 JJ |
6418 | struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp; |
6419 | gimplify_omp_ctxp = ctx->outer_context; | |
6420 | lang_hooks.decls.omp_finish_clause (nc, pre_p); | |
6421 | gimplify_omp_ctxp = ctx; | |
95782571 | 6422 | } |
953ff289 | 6423 | *list_p = clause; |
f014c653 JJ |
6424 | struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp; |
6425 | gimplify_omp_ctxp = ctx->outer_context; | |
6426 | lang_hooks.decls.omp_finish_clause (clause, pre_p); | |
6427 | gimplify_omp_ctxp = ctx; | |
953ff289 DN |
6428 | return 0; |
6429 | } | |
6430 | ||
6431 | static void | |
f014c653 | 6432 | gimplify_adjust_omp_clauses (gimple_seq *pre_p, tree *list_p) |
953ff289 DN |
6433 | { |
6434 | struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp; | |
6435 | tree c, decl; | |
6436 | ||
6437 | while ((c = *list_p) != NULL) | |
6438 | { | |
6439 | splay_tree_node n; | |
6440 | bool remove = false; | |
6441 | ||
aaf46ef9 | 6442 | switch (OMP_CLAUSE_CODE (c)) |
953ff289 DN |
6443 | { |
6444 | case OMP_CLAUSE_PRIVATE: | |
6445 | case OMP_CLAUSE_SHARED: | |
6446 | case OMP_CLAUSE_FIRSTPRIVATE: | |
74bf76ed | 6447 | case OMP_CLAUSE_LINEAR: |
953ff289 DN |
6448 | decl = OMP_CLAUSE_DECL (c); |
6449 | n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl); | |
6450 | remove = !(n->value & GOVD_SEEN); | |
6451 | if (! remove) | |
6452 | { | |
aaf46ef9 | 6453 | bool shared = OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED; |
953ff289 DN |
6454 | if ((n->value & GOVD_DEBUG_PRIVATE) |
6455 | || lang_hooks.decls.omp_private_debug_clause (decl, shared)) | |
6456 | { | |
6457 | gcc_assert ((n->value & GOVD_DEBUG_PRIVATE) == 0 | |
6458 | || ((n->value & GOVD_DATA_SHARE_CLASS) | |
6459 | == GOVD_PRIVATE)); | |
aaf46ef9 | 6460 | OMP_CLAUSE_SET_CODE (c, OMP_CLAUSE_PRIVATE); |
953ff289 DN |
6461 | OMP_CLAUSE_PRIVATE_DEBUG (c) = 1; |
6462 | } | |
74bf76ed JJ |
6463 | if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR |
6464 | && ctx->outer_context | |
6465 | && !(OMP_CLAUSE_LINEAR_NO_COPYIN (c) | |
95782571 | 6466 | && OMP_CLAUSE_LINEAR_NO_COPYOUT (c))) |
74bf76ed | 6467 | { |
95782571 JJ |
6468 | if (ctx->outer_context->combined_loop |
6469 | && !OMP_CLAUSE_LINEAR_NO_COPYIN (c)) | |
74bf76ed JJ |
6470 | { |
6471 | n = splay_tree_lookup (ctx->outer_context->variables, | |
6472 | (splay_tree_key) decl); | |
6473 | if (n == NULL | |
6474 | || (n->value & GOVD_DATA_SHARE_CLASS) == 0) | |
6475 | { | |
9cf32741 JJ |
6476 | int flags = GOVD_FIRSTPRIVATE; |
6477 | /* #pragma omp distribute does not allow | |
6478 | lastprivate clause. */ | |
6479 | if (!ctx->outer_context->distribute) | |
6480 | flags |= GOVD_LASTPRIVATE; | |
74bf76ed JJ |
6481 | if (n == NULL) |
6482 | omp_add_variable (ctx->outer_context, decl, | |
6483 | flags | GOVD_SEEN); | |
6484 | else | |
6485 | n->value |= flags | GOVD_SEEN; | |
6486 | } | |
6487 | } | |
95782571 | 6488 | else if (!is_global_var (decl)) |
74bf76ed JJ |
6489 | omp_notice_variable (ctx->outer_context, decl, true); |
6490 | } | |
953ff289 DN |
6491 | } |
6492 | break; | |
6493 | ||
6494 | case OMP_CLAUSE_LASTPRIVATE: | |
6495 | /* Make sure OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE is set to | |
6496 | accurately reflect the presence of a FIRSTPRIVATE clause. */ | |
6497 | decl = OMP_CLAUSE_DECL (c); | |
6498 | n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl); | |
6499 | OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c) | |
6500 | = (n->value & GOVD_FIRSTPRIVATE) != 0; | |
6501 | break; | |
b8698a0f | 6502 | |
acf0174b JJ |
6503 | case OMP_CLAUSE_ALIGNED: |
6504 | decl = OMP_CLAUSE_DECL (c); | |
6505 | if (!is_global_var (decl)) | |
6506 | { | |
6507 | n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl); | |
6508 | remove = n == NULL || !(n->value & GOVD_SEEN); | |
6509 | if (!remove && TREE_CODE (TREE_TYPE (decl)) == POINTER_TYPE) | |
6510 | { | |
6511 | struct gimplify_omp_ctx *octx; | |
6512 | if (n != NULL | |
6513 | && (n->value & (GOVD_DATA_SHARE_CLASS | |
6514 | & ~GOVD_FIRSTPRIVATE))) | |
6515 | remove = true; | |
6516 | else | |
6517 | for (octx = ctx->outer_context; octx; | |
6518 | octx = octx->outer_context) | |
6519 | { | |
6520 | n = splay_tree_lookup (octx->variables, | |
6521 | (splay_tree_key) decl); | |
6522 | if (n == NULL) | |
6523 | continue; | |
6524 | if (n->value & GOVD_LOCAL) | |
6525 | break; | |
6526 | /* We have to avoid assigning a shared variable | |
6527 | to itself when trying to add | |
6528 | __builtin_assume_aligned. */ | |
6529 | if (n->value & GOVD_SHARED) | |
6530 | { | |
6531 | remove = true; | |
6532 | break; | |
6533 | } | |
6534 | } | |
6535 | } | |
6536 | } | |
6537 | else if (TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE) | |
6538 | { | |
6539 | n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl); | |
6540 | if (n != NULL && (n->value & GOVD_DATA_SHARE_CLASS) != 0) | |
6541 | remove = true; | |
6542 | } | |
6543 | break; | |
6544 | ||
6545 | case OMP_CLAUSE_MAP: | |
6546 | decl = OMP_CLAUSE_DECL (c); | |
6547 | if (!DECL_P (decl)) | |
6548 | break; | |
6549 | n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl); | |
6550 | if (ctx->region_type == ORT_TARGET && !(n->value & GOVD_SEEN)) | |
6551 | remove = true; | |
6552 | else if (DECL_SIZE (decl) | |
6553 | && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST | |
6554 | && OMP_CLAUSE_MAP_KIND (c) != OMP_CLAUSE_MAP_POINTER) | |
6555 | { | |
6556 | tree decl2 = DECL_VALUE_EXPR (decl); | |
6557 | gcc_assert (TREE_CODE (decl2) == INDIRECT_REF); | |
6558 | decl2 = TREE_OPERAND (decl2, 0); | |
6559 | gcc_assert (DECL_P (decl2)); | |
6560 | tree mem = build_simple_mem_ref (decl2); | |
6561 | OMP_CLAUSE_DECL (c) = mem; | |
6562 | OMP_CLAUSE_SIZE (c) = TYPE_SIZE_UNIT (TREE_TYPE (decl)); | |
6563 | if (ctx->outer_context) | |
6564 | { | |
6565 | omp_notice_variable (ctx->outer_context, decl2, true); | |
6566 | omp_notice_variable (ctx->outer_context, | |
6567 | OMP_CLAUSE_SIZE (c), true); | |
6568 | } | |
6569 | tree nc = build_omp_clause (OMP_CLAUSE_LOCATION (c), | |
6570 | OMP_CLAUSE_MAP); | |
6571 | OMP_CLAUSE_DECL (nc) = decl; | |
6572 | OMP_CLAUSE_SIZE (nc) = size_zero_node; | |
6573 | OMP_CLAUSE_MAP_KIND (nc) = OMP_CLAUSE_MAP_POINTER; | |
6574 | OMP_CLAUSE_CHAIN (nc) = OMP_CLAUSE_CHAIN (c); | |
6575 | OMP_CLAUSE_CHAIN (c) = nc; | |
6576 | c = nc; | |
6577 | } | |
b46ebd6c JJ |
6578 | else if (OMP_CLAUSE_SIZE (c) == NULL_TREE) |
6579 | OMP_CLAUSE_SIZE (c) = DECL_SIZE_UNIT (decl); | |
acf0174b JJ |
6580 | break; |
6581 | ||
6582 | case OMP_CLAUSE_TO: | |
6583 | case OMP_CLAUSE_FROM: | |
6584 | decl = OMP_CLAUSE_DECL (c); | |
6585 | if (!DECL_P (decl)) | |
6586 | break; | |
6587 | if (DECL_SIZE (decl) | |
6588 | && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST) | |
6589 | { | |
6590 | tree decl2 = DECL_VALUE_EXPR (decl); | |
6591 | gcc_assert (TREE_CODE (decl2) == INDIRECT_REF); | |
6592 | decl2 = TREE_OPERAND (decl2, 0); | |
6593 | gcc_assert (DECL_P (decl2)); | |
6594 | tree mem = build_simple_mem_ref (decl2); | |
6595 | OMP_CLAUSE_DECL (c) = mem; | |
6596 | OMP_CLAUSE_SIZE (c) = TYPE_SIZE_UNIT (TREE_TYPE (decl)); | |
6597 | if (ctx->outer_context) | |
6598 | { | |
6599 | omp_notice_variable (ctx->outer_context, decl2, true); | |
6600 | omp_notice_variable (ctx->outer_context, | |
6601 | OMP_CLAUSE_SIZE (c), true); | |
6602 | } | |
6603 | } | |
b46ebd6c JJ |
6604 | else if (OMP_CLAUSE_SIZE (c) == NULL_TREE) |
6605 | OMP_CLAUSE_SIZE (c) = DECL_SIZE_UNIT (decl); | |
acf0174b JJ |
6606 | break; |
6607 | ||
953ff289 DN |
6608 | case OMP_CLAUSE_REDUCTION: |
6609 | case OMP_CLAUSE_COPYIN: | |
6610 | case OMP_CLAUSE_COPYPRIVATE: | |
6611 | case OMP_CLAUSE_IF: | |
6612 | case OMP_CLAUSE_NUM_THREADS: | |
acf0174b JJ |
6613 | case OMP_CLAUSE_NUM_TEAMS: |
6614 | case OMP_CLAUSE_THREAD_LIMIT: | |
6615 | case OMP_CLAUSE_DIST_SCHEDULE: | |
6616 | case OMP_CLAUSE_DEVICE: | |
953ff289 DN |
6617 | case OMP_CLAUSE_SCHEDULE: |
6618 | case OMP_CLAUSE_NOWAIT: | |
6619 | case OMP_CLAUSE_ORDERED: | |
6620 | case OMP_CLAUSE_DEFAULT: | |
a68ab351 JJ |
6621 | case OMP_CLAUSE_UNTIED: |
6622 | case OMP_CLAUSE_COLLAPSE: | |
20906c66 JJ |
6623 | case OMP_CLAUSE_FINAL: |
6624 | case OMP_CLAUSE_MERGEABLE: | |
acf0174b | 6625 | case OMP_CLAUSE_PROC_BIND: |
74bf76ed | 6626 | case OMP_CLAUSE_SAFELEN: |
acf0174b | 6627 | case OMP_CLAUSE_DEPEND: |
9a771876 | 6628 | case OMP_CLAUSE__CILK_FOR_COUNT_: |
953ff289 DN |
6629 | break; |
6630 | ||
6631 | default: | |
6632 | gcc_unreachable (); | |
6633 | } | |
6634 | ||
6635 | if (remove) | |
6636 | *list_p = OMP_CLAUSE_CHAIN (c); | |
6637 | else | |
6638 | list_p = &OMP_CLAUSE_CHAIN (c); | |
6639 | } | |
6640 | ||
6641 | /* Add in any implicit data sharing. */ | |
f014c653 JJ |
6642 | struct gimplify_adjust_omp_clauses_data data; |
6643 | data.list_p = list_p; | |
6644 | data.pre_p = pre_p; | |
6645 | splay_tree_foreach (ctx->variables, gimplify_adjust_omp_clauses_1, &data); | |
b8698a0f | 6646 | |
953ff289 DN |
6647 | gimplify_omp_ctxp = ctx->outer_context; |
6648 | delete_omp_context (ctx); | |
6649 | } | |
6650 | ||
6651 | /* Gimplify the contents of an OMP_PARALLEL statement. This involves | |
6652 | gimplification of the body, as well as scanning the body for used | |
6653 | variables. We need to do this scan now, because variable-sized | |
6654 | decls will be decomposed during gimplification. */ | |
6655 | ||
726a989a RB |
6656 | static void |
6657 | gimplify_omp_parallel (tree *expr_p, gimple_seq *pre_p) | |
953ff289 DN |
6658 | { |
6659 | tree expr = *expr_p; | |
726a989a RB |
6660 | gimple g; |
6661 | gimple_seq body = NULL; | |
953ff289 | 6662 | |
a68ab351 JJ |
6663 | gimplify_scan_omp_clauses (&OMP_PARALLEL_CLAUSES (expr), pre_p, |
6664 | OMP_PARALLEL_COMBINED (expr) | |
6665 | ? ORT_COMBINED_PARALLEL | |
6666 | : ORT_PARALLEL); | |
953ff289 | 6667 | |
45852dcc | 6668 | push_gimplify_context (); |
953ff289 | 6669 | |
726a989a RB |
6670 | g = gimplify_and_return_first (OMP_PARALLEL_BODY (expr), &body); |
6671 | if (gimple_code (g) == GIMPLE_BIND) | |
6672 | pop_gimplify_context (g); | |
50674e96 | 6673 | else |
726a989a | 6674 | pop_gimplify_context (NULL); |
953ff289 | 6675 | |
f014c653 | 6676 | gimplify_adjust_omp_clauses (pre_p, &OMP_PARALLEL_CLAUSES (expr)); |
953ff289 | 6677 | |
726a989a RB |
6678 | g = gimple_build_omp_parallel (body, |
6679 | OMP_PARALLEL_CLAUSES (expr), | |
6680 | NULL_TREE, NULL_TREE); | |
6681 | if (OMP_PARALLEL_COMBINED (expr)) | |
6682 | gimple_omp_set_subcode (g, GF_OMP_PARALLEL_COMBINED); | |
6683 | gimplify_seq_add_stmt (pre_p, g); | |
6684 | *expr_p = NULL_TREE; | |
953ff289 DN |
6685 | } |
6686 | ||
a68ab351 JJ |
6687 | /* Gimplify the contents of an OMP_TASK statement. This involves |
6688 | gimplification of the body, as well as scanning the body for used | |
6689 | variables. We need to do this scan now, because variable-sized | |
6690 | decls will be decomposed during gimplification. */ | |
953ff289 | 6691 | |
726a989a RB |
6692 | static void |
6693 | gimplify_omp_task (tree *expr_p, gimple_seq *pre_p) | |
953ff289 | 6694 | { |
a68ab351 | 6695 | tree expr = *expr_p; |
726a989a RB |
6696 | gimple g; |
6697 | gimple_seq body = NULL; | |
953ff289 | 6698 | |
f22f4340 JJ |
6699 | gimplify_scan_omp_clauses (&OMP_TASK_CLAUSES (expr), pre_p, |
6700 | find_omp_clause (OMP_TASK_CLAUSES (expr), | |
6701 | OMP_CLAUSE_UNTIED) | |
6702 | ? ORT_UNTIED_TASK : ORT_TASK); | |
953ff289 | 6703 | |
45852dcc | 6704 | push_gimplify_context (); |
953ff289 | 6705 | |
726a989a RB |
6706 | g = gimplify_and_return_first (OMP_TASK_BODY (expr), &body); |
6707 | if (gimple_code (g) == GIMPLE_BIND) | |
6708 | pop_gimplify_context (g); | |
953ff289 | 6709 | else |
726a989a | 6710 | pop_gimplify_context (NULL); |
953ff289 | 6711 | |
f014c653 | 6712 | gimplify_adjust_omp_clauses (pre_p, &OMP_TASK_CLAUSES (expr)); |
917948d3 | 6713 | |
726a989a RB |
6714 | g = gimple_build_omp_task (body, |
6715 | OMP_TASK_CLAUSES (expr), | |
6716 | NULL_TREE, NULL_TREE, | |
6717 | NULL_TREE, NULL_TREE, NULL_TREE); | |
6718 | gimplify_seq_add_stmt (pre_p, g); | |
6719 | *expr_p = NULL_TREE; | |
a68ab351 JJ |
6720 | } |
6721 | ||
acf0174b JJ |
6722 | /* Helper function of gimplify_omp_for, find OMP_FOR resp. OMP_SIMD |
6723 | with non-NULL OMP_FOR_INIT. */ | |
6724 | ||
6725 | static tree | |
6726 | find_combined_omp_for (tree *tp, int *walk_subtrees, void *) | |
6727 | { | |
6728 | *walk_subtrees = 0; | |
6729 | switch (TREE_CODE (*tp)) | |
6730 | { | |
6731 | case OMP_FOR: | |
6732 | *walk_subtrees = 1; | |
6733 | /* FALLTHRU */ | |
6734 | case OMP_SIMD: | |
6735 | if (OMP_FOR_INIT (*tp) != NULL_TREE) | |
6736 | return *tp; | |
6737 | break; | |
6738 | case BIND_EXPR: | |
6739 | case STATEMENT_LIST: | |
6740 | case OMP_PARALLEL: | |
6741 | *walk_subtrees = 1; | |
6742 | break; | |
6743 | default: | |
6744 | break; | |
6745 | } | |
6746 | return NULL_TREE; | |
6747 | } | |
6748 | ||
a68ab351 JJ |
6749 | /* Gimplify the gross structure of an OMP_FOR statement. */ |
6750 | ||
6751 | static enum gimplify_status | |
726a989a | 6752 | gimplify_omp_for (tree *expr_p, gimple_seq *pre_p) |
a68ab351 | 6753 | { |
acf0174b | 6754 | tree for_stmt, orig_for_stmt, decl, var, t; |
32e8bb8e ILT |
6755 | enum gimplify_status ret = GS_ALL_DONE; |
6756 | enum gimplify_status tret; | |
726a989a RB |
6757 | gimple gfor; |
6758 | gimple_seq for_body, for_pre_body; | |
a68ab351 | 6759 | int i; |
74bf76ed JJ |
6760 | bool simd; |
6761 | bitmap has_decl_expr = NULL; | |
a68ab351 | 6762 | |
acf0174b | 6763 | orig_for_stmt = for_stmt = *expr_p; |
a68ab351 | 6764 | |
f7468577 JJ |
6765 | simd = (TREE_CODE (for_stmt) == OMP_SIMD |
6766 | || TREE_CODE (for_stmt) == CILK_SIMD); | |
a68ab351 | 6767 | gimplify_scan_omp_clauses (&OMP_FOR_CLAUSES (for_stmt), pre_p, |
74bf76ed | 6768 | simd ? ORT_SIMD : ORT_WORKSHARE); |
9cf32741 JJ |
6769 | if (TREE_CODE (for_stmt) == OMP_DISTRIBUTE) |
6770 | gimplify_omp_ctxp->distribute = true; | |
917948d3 | 6771 | |
726a989a RB |
6772 | /* Handle OMP_FOR_INIT. */ |
6773 | for_pre_body = NULL; | |
74bf76ed JJ |
6774 | if (simd && OMP_FOR_PRE_BODY (for_stmt)) |
6775 | { | |
6776 | has_decl_expr = BITMAP_ALLOC (NULL); | |
6777 | if (TREE_CODE (OMP_FOR_PRE_BODY (for_stmt)) == DECL_EXPR | |
6778 | && TREE_CODE (DECL_EXPR_DECL (OMP_FOR_PRE_BODY (for_stmt))) | |
acf0174b | 6779 | == VAR_DECL) |
74bf76ed JJ |
6780 | { |
6781 | t = OMP_FOR_PRE_BODY (for_stmt); | |
6782 | bitmap_set_bit (has_decl_expr, DECL_UID (DECL_EXPR_DECL (t))); | |
6783 | } | |
6784 | else if (TREE_CODE (OMP_FOR_PRE_BODY (for_stmt)) == STATEMENT_LIST) | |
6785 | { | |
6786 | tree_stmt_iterator si; | |
6787 | for (si = tsi_start (OMP_FOR_PRE_BODY (for_stmt)); !tsi_end_p (si); | |
6788 | tsi_next (&si)) | |
6789 | { | |
6790 | t = tsi_stmt (si); | |
6791 | if (TREE_CODE (t) == DECL_EXPR | |
6792 | && TREE_CODE (DECL_EXPR_DECL (t)) == VAR_DECL) | |
6793 | bitmap_set_bit (has_decl_expr, DECL_UID (DECL_EXPR_DECL (t))); | |
6794 | } | |
6795 | } | |
6796 | } | |
726a989a RB |
6797 | gimplify_and_add (OMP_FOR_PRE_BODY (for_stmt), &for_pre_body); |
6798 | OMP_FOR_PRE_BODY (for_stmt) = NULL_TREE; | |
a68ab351 | 6799 | |
acf0174b JJ |
6800 | if (OMP_FOR_INIT (for_stmt) == NULL_TREE) |
6801 | { | |
6802 | for_stmt = walk_tree (&OMP_FOR_BODY (for_stmt), find_combined_omp_for, | |
6803 | NULL, NULL); | |
6804 | gcc_assert (for_stmt != NULL_TREE); | |
6805 | gimplify_omp_ctxp->combined_loop = true; | |
6806 | } | |
6807 | ||
355a7673 | 6808 | for_body = NULL; |
a68ab351 JJ |
6809 | gcc_assert (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)) |
6810 | == TREE_VEC_LENGTH (OMP_FOR_COND (for_stmt))); | |
6811 | gcc_assert (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)) | |
6812 | == TREE_VEC_LENGTH (OMP_FOR_INCR (for_stmt))); | |
6813 | for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++) | |
6814 | { | |
6815 | t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i); | |
726a989a RB |
6816 | gcc_assert (TREE_CODE (t) == MODIFY_EXPR); |
6817 | decl = TREE_OPERAND (t, 0); | |
a68ab351 JJ |
6818 | gcc_assert (DECL_P (decl)); |
6819 | gcc_assert (INTEGRAL_TYPE_P (TREE_TYPE (decl)) | |
6820 | || POINTER_TYPE_P (TREE_TYPE (decl))); | |
6821 | ||
6822 | /* Make sure the iteration variable is private. */ | |
74bf76ed | 6823 | tree c = NULL_TREE; |
f7468577 | 6824 | tree c2 = NULL_TREE; |
acf0174b JJ |
6825 | if (orig_for_stmt != for_stmt) |
6826 | /* Do this only on innermost construct for combined ones. */; | |
6827 | else if (simd) | |
74bf76ed JJ |
6828 | { |
6829 | splay_tree_node n = splay_tree_lookup (gimplify_omp_ctxp->variables, | |
6830 | (splay_tree_key)decl); | |
f7468577 JJ |
6831 | omp_is_private (gimplify_omp_ctxp, decl, |
6832 | 1 + (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)) | |
6833 | != 1)); | |
74bf76ed JJ |
6834 | if (n != NULL && (n->value & GOVD_DATA_SHARE_CLASS) != 0) |
6835 | omp_notice_variable (gimplify_omp_ctxp, decl, true); | |
6836 | else if (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)) == 1) | |
6837 | { | |
6838 | c = build_omp_clause (input_location, OMP_CLAUSE_LINEAR); | |
6839 | OMP_CLAUSE_LINEAR_NO_COPYIN (c) = 1; | |
6840 | if (has_decl_expr | |
6841 | && bitmap_bit_p (has_decl_expr, DECL_UID (decl))) | |
6842 | OMP_CLAUSE_LINEAR_NO_COPYOUT (c) = 1; | |
6843 | OMP_CLAUSE_DECL (c) = decl; | |
6844 | OMP_CLAUSE_CHAIN (c) = OMP_FOR_CLAUSES (for_stmt); | |
6845 | OMP_FOR_CLAUSES (for_stmt) = c; | |
6846 | omp_add_variable (gimplify_omp_ctxp, decl, | |
6847 | GOVD_LINEAR | GOVD_EXPLICIT | GOVD_SEEN); | |
6848 | } | |
6849 | else | |
6850 | { | |
6851 | bool lastprivate | |
6852 | = (!has_decl_expr | |
6853 | || !bitmap_bit_p (has_decl_expr, DECL_UID (decl))); | |
56ad0e38 JJ |
6854 | if (lastprivate |
6855 | && gimplify_omp_ctxp->outer_context | |
6856 | && gimplify_omp_ctxp->outer_context->region_type | |
6857 | == ORT_WORKSHARE | |
6858 | && gimplify_omp_ctxp->outer_context->combined_loop | |
6859 | && !gimplify_omp_ctxp->outer_context->distribute) | |
6860 | { | |
6861 | struct gimplify_omp_ctx *outer | |
6862 | = gimplify_omp_ctxp->outer_context; | |
6863 | n = splay_tree_lookup (outer->variables, | |
6864 | (splay_tree_key) decl); | |
6865 | if (n != NULL | |
6866 | && (n->value & GOVD_DATA_SHARE_CLASS) == GOVD_LOCAL) | |
6867 | lastprivate = false; | |
6868 | else if (omp_check_private (outer, decl, false)) | |
6869 | error ("lastprivate variable %qE is private in outer " | |
6870 | "context", DECL_NAME (decl)); | |
6871 | else | |
6872 | { | |
6873 | omp_add_variable (outer, decl, | |
6874 | GOVD_LASTPRIVATE | GOVD_SEEN); | |
6875 | if (outer->outer_context) | |
6876 | omp_notice_variable (outer->outer_context, decl, true); | |
6877 | } | |
6878 | } | |
74bf76ed JJ |
6879 | c = build_omp_clause (input_location, |
6880 | lastprivate ? OMP_CLAUSE_LASTPRIVATE | |
6881 | : OMP_CLAUSE_PRIVATE); | |
6882 | OMP_CLAUSE_DECL (c) = decl; | |
6883 | OMP_CLAUSE_CHAIN (c) = OMP_FOR_CLAUSES (for_stmt); | |
f7468577 | 6884 | OMP_FOR_CLAUSES (for_stmt) = c; |
74bf76ed JJ |
6885 | omp_add_variable (gimplify_omp_ctxp, decl, |
6886 | (lastprivate ? GOVD_LASTPRIVATE : GOVD_PRIVATE) | |
f7468577 | 6887 | | GOVD_EXPLICIT | GOVD_SEEN); |
74bf76ed JJ |
6888 | c = NULL_TREE; |
6889 | } | |
6890 | } | |
f7468577 | 6891 | else if (omp_is_private (gimplify_omp_ctxp, decl, 0)) |
a68ab351 JJ |
6892 | omp_notice_variable (gimplify_omp_ctxp, decl, true); |
6893 | else | |
6894 | omp_add_variable (gimplify_omp_ctxp, decl, GOVD_PRIVATE | GOVD_SEEN); | |
6895 | ||
6896 | /* If DECL is not a gimple register, create a temporary variable to act | |
6897 | as an iteration counter. This is valid, since DECL cannot be | |
56ad0e38 JJ |
6898 | modified in the body of the loop. Similarly for any iteration vars |
6899 | in simd with collapse > 1 where the iterator vars must be | |
6900 | lastprivate. */ | |
acf0174b JJ |
6901 | if (orig_for_stmt != for_stmt) |
6902 | var = decl; | |
56ad0e38 JJ |
6903 | else if (!is_gimple_reg (decl) |
6904 | || (simd && TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)) > 1)) | |
a68ab351 JJ |
6905 | { |
6906 | var = create_tmp_var (TREE_TYPE (decl), get_name (decl)); | |
726a989a | 6907 | TREE_OPERAND (t, 0) = var; |
b8698a0f | 6908 | |
726a989a | 6909 | gimplify_seq_add_stmt (&for_body, gimple_build_assign (decl, var)); |
953ff289 | 6910 | |
f7468577 JJ |
6911 | if (simd && TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)) == 1) |
6912 | { | |
6913 | c2 = build_omp_clause (input_location, OMP_CLAUSE_LINEAR); | |
6914 | OMP_CLAUSE_LINEAR_NO_COPYIN (c2) = 1; | |
6915 | OMP_CLAUSE_LINEAR_NO_COPYOUT (c2) = 1; | |
6916 | OMP_CLAUSE_DECL (c2) = var; | |
6917 | OMP_CLAUSE_CHAIN (c2) = OMP_FOR_CLAUSES (for_stmt); | |
6918 | OMP_FOR_CLAUSES (for_stmt) = c2; | |
6919 | omp_add_variable (gimplify_omp_ctxp, var, | |
6920 | GOVD_LINEAR | GOVD_EXPLICIT | GOVD_SEEN); | |
6921 | if (c == NULL_TREE) | |
6922 | { | |
6923 | c = c2; | |
6924 | c2 = NULL_TREE; | |
6925 | } | |
6926 | } | |
6927 | else | |
6928 | omp_add_variable (gimplify_omp_ctxp, var, | |
6929 | GOVD_PRIVATE | GOVD_SEEN); | |
a68ab351 JJ |
6930 | } |
6931 | else | |
6932 | var = decl; | |
07beea0d | 6933 | |
32e8bb8e | 6934 | tret = gimplify_expr (&TREE_OPERAND (t, 1), &for_pre_body, NULL, |
726a989a | 6935 | is_gimple_val, fb_rvalue); |
32e8bb8e | 6936 | ret = MIN (ret, tret); |
726a989a RB |
6937 | if (ret == GS_ERROR) |
6938 | return ret; | |
953ff289 | 6939 | |
726a989a | 6940 | /* Handle OMP_FOR_COND. */ |
a68ab351 JJ |
6941 | t = TREE_VEC_ELT (OMP_FOR_COND (for_stmt), i); |
6942 | gcc_assert (COMPARISON_CLASS_P (t)); | |
726a989a | 6943 | gcc_assert (TREE_OPERAND (t, 0) == decl); |
b56b9fe3 | 6944 | |
32e8bb8e | 6945 | tret = gimplify_expr (&TREE_OPERAND (t, 1), &for_pre_body, NULL, |
726a989a | 6946 | is_gimple_val, fb_rvalue); |
32e8bb8e | 6947 | ret = MIN (ret, tret); |
917948d3 | 6948 | |
726a989a | 6949 | /* Handle OMP_FOR_INCR. */ |
a68ab351 | 6950 | t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i); |
953ff289 DN |
6951 | switch (TREE_CODE (t)) |
6952 | { | |
a68ab351 JJ |
6953 | case PREINCREMENT_EXPR: |
6954 | case POSTINCREMENT_EXPR: | |
c02065fc AH |
6955 | { |
6956 | tree decl = TREE_OPERAND (t, 0); | |
da6f124d JJ |
6957 | /* c_omp_for_incr_canonicalize_ptr() should have been |
6958 | called to massage things appropriately. */ | |
c02065fc AH |
6959 | gcc_assert (!POINTER_TYPE_P (TREE_TYPE (decl))); |
6960 | ||
6961 | if (orig_for_stmt != for_stmt) | |
6962 | break; | |
6963 | t = build_int_cst (TREE_TYPE (decl), 1); | |
6964 | if (c) | |
6965 | OMP_CLAUSE_LINEAR_STEP (c) = t; | |
6966 | t = build2 (PLUS_EXPR, TREE_TYPE (decl), var, t); | |
6967 | t = build2 (MODIFY_EXPR, TREE_TYPE (var), var, t); | |
6968 | TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i) = t; | |
acf0174b | 6969 | break; |
c02065fc | 6970 | } |
a68ab351 JJ |
6971 | |
6972 | case PREDECREMENT_EXPR: | |
6973 | case POSTDECREMENT_EXPR: | |
da6f124d JJ |
6974 | /* c_omp_for_incr_canonicalize_ptr() should have been |
6975 | called to massage things appropriately. */ | |
6976 | gcc_assert (!POINTER_TYPE_P (TREE_TYPE (decl))); | |
acf0174b JJ |
6977 | if (orig_for_stmt != for_stmt) |
6978 | break; | |
a68ab351 | 6979 | t = build_int_cst (TREE_TYPE (decl), -1); |
74bf76ed JJ |
6980 | if (c) |
6981 | OMP_CLAUSE_LINEAR_STEP (c) = t; | |
a68ab351 | 6982 | t = build2 (PLUS_EXPR, TREE_TYPE (decl), var, t); |
726a989a | 6983 | t = build2 (MODIFY_EXPR, TREE_TYPE (var), var, t); |
a68ab351 JJ |
6984 | TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i) = t; |
6985 | break; | |
6986 | ||
726a989a RB |
6987 | case MODIFY_EXPR: |
6988 | gcc_assert (TREE_OPERAND (t, 0) == decl); | |
6989 | TREE_OPERAND (t, 0) = var; | |
a68ab351 | 6990 | |
726a989a | 6991 | t = TREE_OPERAND (t, 1); |
a68ab351 | 6992 | switch (TREE_CODE (t)) |
953ff289 | 6993 | { |
a68ab351 JJ |
6994 | case PLUS_EXPR: |
6995 | if (TREE_OPERAND (t, 1) == decl) | |
6996 | { | |
6997 | TREE_OPERAND (t, 1) = TREE_OPERAND (t, 0); | |
6998 | TREE_OPERAND (t, 0) = var; | |
6999 | break; | |
7000 | } | |
7001 | ||
7002 | /* Fallthru. */ | |
7003 | case MINUS_EXPR: | |
7004 | case POINTER_PLUS_EXPR: | |
7005 | gcc_assert (TREE_OPERAND (t, 0) == decl); | |
917948d3 | 7006 | TREE_OPERAND (t, 0) = var; |
953ff289 | 7007 | break; |
a68ab351 JJ |
7008 | default: |
7009 | gcc_unreachable (); | |
953ff289 | 7010 | } |
917948d3 | 7011 | |
32e8bb8e | 7012 | tret = gimplify_expr (&TREE_OPERAND (t, 1), &for_pre_body, NULL, |
726a989a | 7013 | is_gimple_val, fb_rvalue); |
32e8bb8e | 7014 | ret = MIN (ret, tret); |
74bf76ed JJ |
7015 | if (c) |
7016 | { | |
da6f124d JJ |
7017 | tree step = TREE_OPERAND (t, 1); |
7018 | tree stept = TREE_TYPE (decl); | |
7019 | if (POINTER_TYPE_P (stept)) | |
7020 | stept = sizetype; | |
7021 | step = fold_convert (stept, step); | |
74bf76ed | 7022 | if (TREE_CODE (t) == MINUS_EXPR) |
da6f124d JJ |
7023 | step = fold_build1 (NEGATE_EXPR, stept, step); |
7024 | OMP_CLAUSE_LINEAR_STEP (c) = step; | |
7025 | if (step != TREE_OPERAND (t, 1)) | |
74bf76ed | 7026 | { |
74bf76ed JJ |
7027 | tret = gimplify_expr (&OMP_CLAUSE_LINEAR_STEP (c), |
7028 | &for_pre_body, NULL, | |
7029 | is_gimple_val, fb_rvalue); | |
7030 | ret = MIN (ret, tret); | |
7031 | } | |
7032 | } | |
953ff289 | 7033 | break; |
a68ab351 | 7034 | |
953ff289 DN |
7035 | default: |
7036 | gcc_unreachable (); | |
7037 | } | |
7038 | ||
f7468577 JJ |
7039 | if (c2) |
7040 | { | |
7041 | gcc_assert (c); | |
7042 | OMP_CLAUSE_LINEAR_STEP (c2) = OMP_CLAUSE_LINEAR_STEP (c); | |
7043 | } | |
7044 | ||
acf0174b JJ |
7045 | if ((var != decl || TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)) > 1) |
7046 | && orig_for_stmt == for_stmt) | |
a68ab351 | 7047 | { |
a68ab351 | 7048 | for (c = OMP_FOR_CLAUSES (for_stmt); c ; c = OMP_CLAUSE_CHAIN (c)) |
f7468577 JJ |
7049 | if (((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE |
7050 | && OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c) == NULL) | |
7051 | || (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR | |
7052 | && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c) | |
7053 | && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c) == NULL)) | |
7054 | && OMP_CLAUSE_DECL (c) == decl) | |
726a989a RB |
7055 | { |
7056 | t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i); | |
7057 | gcc_assert (TREE_CODE (t) == MODIFY_EXPR); | |
7058 | gcc_assert (TREE_OPERAND (t, 0) == var); | |
7059 | t = TREE_OPERAND (t, 1); | |
7060 | gcc_assert (TREE_CODE (t) == PLUS_EXPR | |
7061 | || TREE_CODE (t) == MINUS_EXPR | |
7062 | || TREE_CODE (t) == POINTER_PLUS_EXPR); | |
7063 | gcc_assert (TREE_OPERAND (t, 0) == var); | |
7064 | t = build2 (TREE_CODE (t), TREE_TYPE (decl), decl, | |
7065 | TREE_OPERAND (t, 1)); | |
f7468577 JJ |
7066 | gimple_seq *seq; |
7067 | if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE) | |
7068 | seq = &OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c); | |
7069 | else | |
7070 | seq = &OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c); | |
7071 | gimplify_assign (decl, t, seq); | |
a68ab351 JJ |
7072 | } |
7073 | } | |
953ff289 DN |
7074 | } |
7075 | ||
74bf76ed JJ |
7076 | BITMAP_FREE (has_decl_expr); |
7077 | ||
acf0174b | 7078 | gimplify_and_add (OMP_FOR_BODY (orig_for_stmt), &for_body); |
726a989a | 7079 | |
acf0174b JJ |
7080 | if (orig_for_stmt != for_stmt) |
7081 | for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++) | |
7082 | { | |
7083 | t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i); | |
7084 | decl = TREE_OPERAND (t, 0); | |
7085 | var = create_tmp_var (TREE_TYPE (decl), get_name (decl)); | |
7086 | omp_add_variable (gimplify_omp_ctxp, var, GOVD_PRIVATE | GOVD_SEEN); | |
7087 | TREE_OPERAND (t, 0) = var; | |
7088 | t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i); | |
7089 | TREE_OPERAND (t, 1) = copy_node (TREE_OPERAND (t, 1)); | |
7090 | TREE_OPERAND (TREE_OPERAND (t, 1), 0) = var; | |
7091 | } | |
7092 | ||
f014c653 | 7093 | gimplify_adjust_omp_clauses (pre_p, &OMP_FOR_CLAUSES (orig_for_stmt)); |
953ff289 | 7094 | |
74bf76ed | 7095 | int kind; |
acf0174b | 7096 | switch (TREE_CODE (orig_for_stmt)) |
74bf76ed JJ |
7097 | { |
7098 | case OMP_FOR: kind = GF_OMP_FOR_KIND_FOR; break; | |
7099 | case OMP_SIMD: kind = GF_OMP_FOR_KIND_SIMD; break; | |
c02065fc | 7100 | case CILK_SIMD: kind = GF_OMP_FOR_KIND_CILKSIMD; break; |
9a771876 | 7101 | case CILK_FOR: kind = GF_OMP_FOR_KIND_CILKFOR; break; |
acf0174b | 7102 | case OMP_DISTRIBUTE: kind = GF_OMP_FOR_KIND_DISTRIBUTE; break; |
74bf76ed JJ |
7103 | default: |
7104 | gcc_unreachable (); | |
7105 | } | |
acf0174b | 7106 | gfor = gimple_build_omp_for (for_body, kind, OMP_FOR_CLAUSES (orig_for_stmt), |
726a989a RB |
7107 | TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)), |
7108 | for_pre_body); | |
acf0174b JJ |
7109 | if (orig_for_stmt != for_stmt) |
7110 | gimple_omp_for_set_combined_p (gfor, true); | |
7111 | if (gimplify_omp_ctxp | |
7112 | && (gimplify_omp_ctxp->combined_loop | |
7113 | || (gimplify_omp_ctxp->region_type == ORT_COMBINED_PARALLEL | |
7114 | && gimplify_omp_ctxp->outer_context | |
7115 | && gimplify_omp_ctxp->outer_context->combined_loop))) | |
7116 | { | |
7117 | gimple_omp_for_set_combined_into_p (gfor, true); | |
7118 | if (gimplify_omp_ctxp->combined_loop) | |
7119 | gcc_assert (TREE_CODE (orig_for_stmt) == OMP_SIMD); | |
7120 | else | |
7121 | gcc_assert (TREE_CODE (orig_for_stmt) == OMP_FOR); | |
7122 | } | |
726a989a RB |
7123 | |
7124 | for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++) | |
7125 | { | |
7126 | t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i); | |
7127 | gimple_omp_for_set_index (gfor, i, TREE_OPERAND (t, 0)); | |
7128 | gimple_omp_for_set_initial (gfor, i, TREE_OPERAND (t, 1)); | |
7129 | t = TREE_VEC_ELT (OMP_FOR_COND (for_stmt), i); | |
7130 | gimple_omp_for_set_cond (gfor, i, TREE_CODE (t)); | |
7131 | gimple_omp_for_set_final (gfor, i, TREE_OPERAND (t, 1)); | |
7132 | t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i); | |
7133 | gimple_omp_for_set_incr (gfor, i, TREE_OPERAND (t, 1)); | |
7134 | } | |
7135 | ||
7136 | gimplify_seq_add_stmt (pre_p, gfor); | |
74bf76ed JJ |
7137 | if (ret != GS_ALL_DONE) |
7138 | return GS_ERROR; | |
7139 | *expr_p = NULL_TREE; | |
7140 | return GS_ALL_DONE; | |
953ff289 DN |
7141 | } |
7142 | ||
acf0174b JJ |
7143 | /* Gimplify the gross structure of other OpenMP constructs. |
7144 | In particular, OMP_SECTIONS, OMP_SINGLE, OMP_TARGET, OMP_TARGET_DATA | |
7145 | and OMP_TEAMS. */ | |
953ff289 | 7146 | |
726a989a RB |
7147 | static void |
7148 | gimplify_omp_workshare (tree *expr_p, gimple_seq *pre_p) | |
953ff289 | 7149 | { |
726a989a RB |
7150 | tree expr = *expr_p; |
7151 | gimple stmt; | |
7152 | gimple_seq body = NULL; | |
acf0174b | 7153 | enum omp_region_type ort = ORT_WORKSHARE; |
953ff289 | 7154 | |
acf0174b JJ |
7155 | switch (TREE_CODE (expr)) |
7156 | { | |
7157 | case OMP_SECTIONS: | |
7158 | case OMP_SINGLE: | |
7159 | break; | |
7160 | case OMP_TARGET: | |
7161 | ort = ORT_TARGET; | |
7162 | break; | |
7163 | case OMP_TARGET_DATA: | |
7164 | ort = ORT_TARGET_DATA; | |
7165 | break; | |
7166 | case OMP_TEAMS: | |
7167 | ort = ORT_TEAMS; | |
7168 | break; | |
7169 | default: | |
7170 | gcc_unreachable (); | |
7171 | } | |
7172 | gimplify_scan_omp_clauses (&OMP_CLAUSES (expr), pre_p, ort); | |
7173 | if (ort == ORT_TARGET || ort == ORT_TARGET_DATA) | |
7174 | { | |
45852dcc | 7175 | push_gimplify_context (); |
acf0174b JJ |
7176 | gimple g = gimplify_and_return_first (OMP_BODY (expr), &body); |
7177 | if (gimple_code (g) == GIMPLE_BIND) | |
7178 | pop_gimplify_context (g); | |
7179 | else | |
7180 | pop_gimplify_context (NULL); | |
7181 | if (ort == ORT_TARGET_DATA) | |
7182 | { | |
7183 | gimple_seq cleanup = NULL; | |
7184 | tree fn = builtin_decl_explicit (BUILT_IN_GOMP_TARGET_END_DATA); | |
7185 | g = gimple_build_call (fn, 0); | |
7186 | gimple_seq_add_stmt (&cleanup, g); | |
7187 | g = gimple_build_try (body, cleanup, GIMPLE_TRY_FINALLY); | |
7188 | body = NULL; | |
7189 | gimple_seq_add_stmt (&body, g); | |
7190 | } | |
7191 | } | |
7192 | else | |
7193 | gimplify_and_add (OMP_BODY (expr), &body); | |
f014c653 | 7194 | gimplify_adjust_omp_clauses (pre_p, &OMP_CLAUSES (expr)); |
953ff289 | 7195 | |
acf0174b JJ |
7196 | switch (TREE_CODE (expr)) |
7197 | { | |
7198 | case OMP_SECTIONS: | |
7199 | stmt = gimple_build_omp_sections (body, OMP_CLAUSES (expr)); | |
7200 | break; | |
7201 | case OMP_SINGLE: | |
7202 | stmt = gimple_build_omp_single (body, OMP_CLAUSES (expr)); | |
7203 | break; | |
7204 | case OMP_TARGET: | |
7205 | stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_REGION, | |
7206 | OMP_CLAUSES (expr)); | |
7207 | break; | |
7208 | case OMP_TARGET_DATA: | |
7209 | stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_DATA, | |
7210 | OMP_CLAUSES (expr)); | |
7211 | break; | |
7212 | case OMP_TEAMS: | |
7213 | stmt = gimple_build_omp_teams (body, OMP_CLAUSES (expr)); | |
7214 | break; | |
7215 | default: | |
7216 | gcc_unreachable (); | |
7217 | } | |
7218 | ||
7219 | gimplify_seq_add_stmt (pre_p, stmt); | |
7220 | *expr_p = NULL_TREE; | |
7221 | } | |
7222 | ||
7223 | /* Gimplify the gross structure of OpenMP target update construct. */ | |
7224 | ||
7225 | static void | |
7226 | gimplify_omp_target_update (tree *expr_p, gimple_seq *pre_p) | |
7227 | { | |
7228 | tree expr = *expr_p; | |
7229 | gimple stmt; | |
7230 | ||
7231 | gimplify_scan_omp_clauses (&OMP_TARGET_UPDATE_CLAUSES (expr), pre_p, | |
7232 | ORT_WORKSHARE); | |
f014c653 | 7233 | gimplify_adjust_omp_clauses (pre_p, &OMP_TARGET_UPDATE_CLAUSES (expr)); |
acf0174b JJ |
7234 | stmt = gimple_build_omp_target (NULL, GF_OMP_TARGET_KIND_UPDATE, |
7235 | OMP_TARGET_UPDATE_CLAUSES (expr)); | |
726a989a RB |
7236 | |
7237 | gimplify_seq_add_stmt (pre_p, stmt); | |
acf0174b | 7238 | *expr_p = NULL_TREE; |
953ff289 DN |
7239 | } |
7240 | ||
7241 | /* A subroutine of gimplify_omp_atomic. The front end is supposed to have | |
b8698a0f | 7242 | stabilized the lhs of the atomic operation as *ADDR. Return true if |
953ff289 DN |
7243 | EXPR is this stabilized form. */ |
7244 | ||
7245 | static bool | |
a509ebb5 | 7246 | goa_lhs_expr_p (tree expr, tree addr) |
953ff289 DN |
7247 | { |
7248 | /* Also include casts to other type variants. The C front end is fond | |
b8698a0f | 7249 | of adding these for e.g. volatile variables. This is like |
953ff289 | 7250 | STRIP_TYPE_NOPS but includes the main variant lookup. */ |
9600efe1 | 7251 | STRIP_USELESS_TYPE_CONVERSION (expr); |
953ff289 | 7252 | |
78e47463 JJ |
7253 | if (TREE_CODE (expr) == INDIRECT_REF) |
7254 | { | |
7255 | expr = TREE_OPERAND (expr, 0); | |
7256 | while (expr != addr | |
1043771b | 7257 | && (CONVERT_EXPR_P (expr) |
78e47463 JJ |
7258 | || TREE_CODE (expr) == NON_LVALUE_EXPR) |
7259 | && TREE_CODE (expr) == TREE_CODE (addr) | |
9600efe1 | 7260 | && types_compatible_p (TREE_TYPE (expr), TREE_TYPE (addr))) |
78e47463 JJ |
7261 | { |
7262 | expr = TREE_OPERAND (expr, 0); | |
7263 | addr = TREE_OPERAND (addr, 0); | |
7264 | } | |
251923f5 JJ |
7265 | if (expr == addr) |
7266 | return true; | |
71458b8a JJ |
7267 | return (TREE_CODE (addr) == ADDR_EXPR |
7268 | && TREE_CODE (expr) == ADDR_EXPR | |
251923f5 | 7269 | && TREE_OPERAND (addr, 0) == TREE_OPERAND (expr, 0)); |
78e47463 | 7270 | } |
953ff289 DN |
7271 | if (TREE_CODE (addr) == ADDR_EXPR && expr == TREE_OPERAND (addr, 0)) |
7272 | return true; | |
7273 | return false; | |
7274 | } | |
7275 | ||
ad19c4be EB |
7276 | /* Walk *EXPR_P and replace appearances of *LHS_ADDR with LHS_VAR. If an |
7277 | expression does not involve the lhs, evaluate it into a temporary. | |
7278 | Return 1 if the lhs appeared as a subexpression, 0 if it did not, | |
7279 | or -1 if an error was encountered. */ | |
953ff289 DN |
7280 | |
7281 | static int | |
726a989a RB |
7282 | goa_stabilize_expr (tree *expr_p, gimple_seq *pre_p, tree lhs_addr, |
7283 | tree lhs_var) | |
953ff289 DN |
7284 | { |
7285 | tree expr = *expr_p; | |
7286 | int saw_lhs; | |
7287 | ||
7288 | if (goa_lhs_expr_p (expr, lhs_addr)) | |
7289 | { | |
7290 | *expr_p = lhs_var; | |
7291 | return 1; | |
7292 | } | |
7293 | if (is_gimple_val (expr)) | |
7294 | return 0; | |
b8698a0f | 7295 | |
953ff289 DN |
7296 | saw_lhs = 0; |
7297 | switch (TREE_CODE_CLASS (TREE_CODE (expr))) | |
7298 | { | |
7299 | case tcc_binary: | |
067dd3c9 | 7300 | case tcc_comparison: |
726a989a RB |
7301 | saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 1), pre_p, lhs_addr, |
7302 | lhs_var); | |
953ff289 | 7303 | case tcc_unary: |
726a989a RB |
7304 | saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 0), pre_p, lhs_addr, |
7305 | lhs_var); | |
953ff289 | 7306 | break; |
067dd3c9 JJ |
7307 | case tcc_expression: |
7308 | switch (TREE_CODE (expr)) | |
7309 | { | |
7310 | case TRUTH_ANDIF_EXPR: | |
7311 | case TRUTH_ORIF_EXPR: | |
f2b11865 JJ |
7312 | case TRUTH_AND_EXPR: |
7313 | case TRUTH_OR_EXPR: | |
7314 | case TRUTH_XOR_EXPR: | |
067dd3c9 JJ |
7315 | saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 1), pre_p, |
7316 | lhs_addr, lhs_var); | |
f2b11865 | 7317 | case TRUTH_NOT_EXPR: |
067dd3c9 JJ |
7318 | saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 0), pre_p, |
7319 | lhs_addr, lhs_var); | |
7320 | break; | |
4063e61b JM |
7321 | case COMPOUND_EXPR: |
7322 | /* Break out any preevaluations from cp_build_modify_expr. */ | |
7323 | for (; TREE_CODE (expr) == COMPOUND_EXPR; | |
7324 | expr = TREE_OPERAND (expr, 1)) | |
7325 | gimplify_stmt (&TREE_OPERAND (expr, 0), pre_p); | |
7326 | *expr_p = expr; | |
7327 | return goa_stabilize_expr (expr_p, pre_p, lhs_addr, lhs_var); | |
067dd3c9 JJ |
7328 | default: |
7329 | break; | |
7330 | } | |
7331 | break; | |
953ff289 DN |
7332 | default: |
7333 | break; | |
7334 | } | |
7335 | ||
7336 | if (saw_lhs == 0) | |
7337 | { | |
7338 | enum gimplify_status gs; | |
7339 | gs = gimplify_expr (expr_p, pre_p, NULL, is_gimple_val, fb_rvalue); | |
7340 | if (gs != GS_ALL_DONE) | |
7341 | saw_lhs = -1; | |
7342 | } | |
7343 | ||
7344 | return saw_lhs; | |
7345 | } | |
7346 | ||
953ff289 DN |
7347 | /* Gimplify an OMP_ATOMIC statement. */ |
7348 | ||
7349 | static enum gimplify_status | |
726a989a | 7350 | gimplify_omp_atomic (tree *expr_p, gimple_seq *pre_p) |
953ff289 DN |
7351 | { |
7352 | tree addr = TREE_OPERAND (*expr_p, 0); | |
20906c66 JJ |
7353 | tree rhs = TREE_CODE (*expr_p) == OMP_ATOMIC_READ |
7354 | ? NULL : TREE_OPERAND (*expr_p, 1); | |
953ff289 | 7355 | tree type = TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (addr))); |
726a989a | 7356 | tree tmp_load; |
20906c66 | 7357 | gimple loadstmt, storestmt; |
953ff289 | 7358 | |
20906c66 JJ |
7359 | tmp_load = create_tmp_reg (type, NULL); |
7360 | if (rhs && goa_stabilize_expr (&rhs, pre_p, addr, tmp_load) < 0) | |
7361 | return GS_ERROR; | |
7362 | ||
7363 | if (gimplify_expr (&addr, pre_p, NULL, is_gimple_val, fb_rvalue) | |
7364 | != GS_ALL_DONE) | |
7365 | return GS_ERROR; | |
953ff289 | 7366 | |
20906c66 JJ |
7367 | loadstmt = gimple_build_omp_atomic_load (tmp_load, addr); |
7368 | gimplify_seq_add_stmt (pre_p, loadstmt); | |
7369 | if (rhs && gimplify_expr (&rhs, pre_p, NULL, is_gimple_val, fb_rvalue) | |
7370 | != GS_ALL_DONE) | |
7371 | return GS_ERROR; | |
953ff289 | 7372 | |
20906c66 JJ |
7373 | if (TREE_CODE (*expr_p) == OMP_ATOMIC_READ) |
7374 | rhs = tmp_load; | |
7375 | storestmt = gimple_build_omp_atomic_store (rhs); | |
7376 | gimplify_seq_add_stmt (pre_p, storestmt); | |
acf0174b JJ |
7377 | if (OMP_ATOMIC_SEQ_CST (*expr_p)) |
7378 | { | |
7379 | gimple_omp_atomic_set_seq_cst (loadstmt); | |
7380 | gimple_omp_atomic_set_seq_cst (storestmt); | |
7381 | } | |
20906c66 JJ |
7382 | switch (TREE_CODE (*expr_p)) |
7383 | { | |
7384 | case OMP_ATOMIC_READ: | |
7385 | case OMP_ATOMIC_CAPTURE_OLD: | |
7386 | *expr_p = tmp_load; | |
7387 | gimple_omp_atomic_set_need_value (loadstmt); | |
7388 | break; | |
7389 | case OMP_ATOMIC_CAPTURE_NEW: | |
7390 | *expr_p = rhs; | |
7391 | gimple_omp_atomic_set_need_value (storestmt); | |
7392 | break; | |
7393 | default: | |
7394 | *expr_p = NULL; | |
7395 | break; | |
7396 | } | |
a509ebb5 | 7397 | |
acf0174b | 7398 | return GS_ALL_DONE; |
953ff289 | 7399 | } |
6de9cd9a | 7400 | |
0a35513e AH |
7401 | /* Gimplify a TRANSACTION_EXPR. This involves gimplification of the |
7402 | body, and adding some EH bits. */ | |
7403 | ||
7404 | static enum gimplify_status | |
7405 | gimplify_transaction (tree *expr_p, gimple_seq *pre_p) | |
7406 | { | |
7407 | tree expr = *expr_p, temp, tbody = TRANSACTION_EXPR_BODY (expr); | |
7408 | gimple g; | |
7409 | gimple_seq body = NULL; | |
0a35513e AH |
7410 | int subcode = 0; |
7411 | ||
7412 | /* Wrap the transaction body in a BIND_EXPR so we have a context | |
7413 | where to put decls for OpenMP. */ | |
7414 | if (TREE_CODE (tbody) != BIND_EXPR) | |
7415 | { | |
7416 | tree bind = build3 (BIND_EXPR, void_type_node, NULL, tbody, NULL); | |
7417 | TREE_SIDE_EFFECTS (bind) = 1; | |
7418 | SET_EXPR_LOCATION (bind, EXPR_LOCATION (tbody)); | |
7419 | TRANSACTION_EXPR_BODY (expr) = bind; | |
7420 | } | |
7421 | ||
45852dcc | 7422 | push_gimplify_context (); |
0a35513e AH |
7423 | temp = voidify_wrapper_expr (*expr_p, NULL); |
7424 | ||
7425 | g = gimplify_and_return_first (TRANSACTION_EXPR_BODY (expr), &body); | |
7426 | pop_gimplify_context (g); | |
7427 | ||
7428 | g = gimple_build_transaction (body, NULL); | |
7429 | if (TRANSACTION_EXPR_OUTER (expr)) | |
7430 | subcode = GTMA_IS_OUTER; | |
7431 | else if (TRANSACTION_EXPR_RELAXED (expr)) | |
7432 | subcode = GTMA_IS_RELAXED; | |
7433 | gimple_transaction_set_subcode (g, subcode); | |
7434 | ||
7435 | gimplify_seq_add_stmt (pre_p, g); | |
7436 | ||
7437 | if (temp) | |
7438 | { | |
7439 | *expr_p = temp; | |
7440 | return GS_OK; | |
7441 | } | |
7442 | ||
7443 | *expr_p = NULL_TREE; | |
7444 | return GS_ALL_DONE; | |
7445 | } | |
7446 | ||
ad19c4be | 7447 | /* Convert the GENERIC expression tree *EXPR_P to GIMPLE. If the |
726a989a RB |
7448 | expression produces a value to be used as an operand inside a GIMPLE |
7449 | statement, the value will be stored back in *EXPR_P. This value will | |
7450 | be a tree of class tcc_declaration, tcc_constant, tcc_reference or | |
7451 | an SSA_NAME. The corresponding sequence of GIMPLE statements is | |
7452 | emitted in PRE_P and POST_P. | |
7453 | ||
7454 | Additionally, this process may overwrite parts of the input | |
7455 | expression during gimplification. Ideally, it should be | |
7456 | possible to do non-destructive gimplification. | |
7457 | ||
7458 | EXPR_P points to the GENERIC expression to convert to GIMPLE. If | |
7459 | the expression needs to evaluate to a value to be used as | |
7460 | an operand in a GIMPLE statement, this value will be stored in | |
7461 | *EXPR_P on exit. This happens when the caller specifies one | |
7462 | of fb_lvalue or fb_rvalue fallback flags. | |
7463 | ||
7464 | PRE_P will contain the sequence of GIMPLE statements corresponding | |
7465 | to the evaluation of EXPR and all the side-effects that must | |
7466 | be executed before the main expression. On exit, the last | |
7467 | statement of PRE_P is the core statement being gimplified. For | |
7468 | instance, when gimplifying 'if (++a)' the last statement in | |
7469 | PRE_P will be 'if (t.1)' where t.1 is the result of | |
7470 | pre-incrementing 'a'. | |
7471 | ||
7472 | POST_P will contain the sequence of GIMPLE statements corresponding | |
7473 | to the evaluation of all the side-effects that must be executed | |
7474 | after the main expression. If this is NULL, the post | |
7475 | side-effects are stored at the end of PRE_P. | |
7476 | ||
7477 | The reason why the output is split in two is to handle post | |
7478 | side-effects explicitly. In some cases, an expression may have | |
7479 | inner and outer post side-effects which need to be emitted in | |
7480 | an order different from the one given by the recursive | |
7481 | traversal. For instance, for the expression (*p--)++ the post | |
7482 | side-effects of '--' must actually occur *after* the post | |
7483 | side-effects of '++'. However, gimplification will first visit | |
7484 | the inner expression, so if a separate POST sequence was not | |
7485 | used, the resulting sequence would be: | |
7486 | ||
7487 | 1 t.1 = *p | |
7488 | 2 p = p - 1 | |
7489 | 3 t.2 = t.1 + 1 | |
7490 | 4 *p = t.2 | |
7491 | ||
7492 | However, the post-decrement operation in line #2 must not be | |
7493 | evaluated until after the store to *p at line #4, so the | |
7494 | correct sequence should be: | |
7495 | ||
7496 | 1 t.1 = *p | |
7497 | 2 t.2 = t.1 + 1 | |
7498 | 3 *p = t.2 | |
7499 | 4 p = p - 1 | |
7500 | ||
7501 | So, by specifying a separate post queue, it is possible | |
7502 | to emit the post side-effects in the correct order. | |
7503 | If POST_P is NULL, an internal queue will be used. Before | |
7504 | returning to the caller, the sequence POST_P is appended to | |
7505 | the main output sequence PRE_P. | |
7506 | ||
7507 | GIMPLE_TEST_F points to a function that takes a tree T and | |
7508 | returns nonzero if T is in the GIMPLE form requested by the | |
12947319 | 7509 | caller. The GIMPLE predicates are in gimple.c. |
726a989a RB |
7510 | |
7511 | FALLBACK tells the function what sort of a temporary we want if | |
7512 | gimplification cannot produce an expression that complies with | |
7513 | GIMPLE_TEST_F. | |
7514 | ||
7515 | fb_none means that no temporary should be generated | |
7516 | fb_rvalue means that an rvalue is OK to generate | |
7517 | fb_lvalue means that an lvalue is OK to generate | |
7518 | fb_either means that either is OK, but an lvalue is preferable. | |
7519 | fb_mayfail means that gimplification may fail (in which case | |
7520 | GS_ERROR will be returned) | |
7521 | ||
7522 | The return value is either GS_ERROR or GS_ALL_DONE, since this | |
7523 | function iterates until EXPR is completely gimplified or an error | |
7524 | occurs. */ | |
6de9cd9a DN |
7525 | |
7526 | enum gimplify_status | |
726a989a RB |
7527 | gimplify_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p, |
7528 | bool (*gimple_test_f) (tree), fallback_t fallback) | |
6de9cd9a DN |
7529 | { |
7530 | tree tmp; | |
726a989a RB |
7531 | gimple_seq internal_pre = NULL; |
7532 | gimple_seq internal_post = NULL; | |
6de9cd9a | 7533 | tree save_expr; |
726a989a | 7534 | bool is_statement; |
6de9cd9a DN |
7535 | location_t saved_location; |
7536 | enum gimplify_status ret; | |
726a989a | 7537 | gimple_stmt_iterator pre_last_gsi, post_last_gsi; |
6de9cd9a DN |
7538 | |
7539 | save_expr = *expr_p; | |
7540 | if (save_expr == NULL_TREE) | |
7541 | return GS_ALL_DONE; | |
7542 | ||
726a989a RB |
7543 | /* If we are gimplifying a top-level statement, PRE_P must be valid. */ |
7544 | is_statement = gimple_test_f == is_gimple_stmt; | |
7545 | if (is_statement) | |
7546 | gcc_assert (pre_p); | |
7547 | ||
7548 | /* Consistency checks. */ | |
7549 | if (gimple_test_f == is_gimple_reg) | |
7550 | gcc_assert (fallback & (fb_rvalue | fb_lvalue)); | |
7551 | else if (gimple_test_f == is_gimple_val | |
726a989a RB |
7552 | || gimple_test_f == is_gimple_call_addr |
7553 | || gimple_test_f == is_gimple_condexpr | |
7554 | || gimple_test_f == is_gimple_mem_rhs | |
ba4d8f9d | 7555 | || gimple_test_f == is_gimple_mem_rhs_or_call |
726a989a | 7556 | || gimple_test_f == is_gimple_reg_rhs |
ba4d8f9d | 7557 | || gimple_test_f == is_gimple_reg_rhs_or_call |
70f34814 RG |
7558 | || gimple_test_f == is_gimple_asm_val |
7559 | || gimple_test_f == is_gimple_mem_ref_addr) | |
726a989a RB |
7560 | gcc_assert (fallback & fb_rvalue); |
7561 | else if (gimple_test_f == is_gimple_min_lval | |
7562 | || gimple_test_f == is_gimple_lvalue) | |
7563 | gcc_assert (fallback & fb_lvalue); | |
7564 | else if (gimple_test_f == is_gimple_addressable) | |
7565 | gcc_assert (fallback & fb_either); | |
7566 | else if (gimple_test_f == is_gimple_stmt) | |
7567 | gcc_assert (fallback == fb_none); | |
7568 | else | |
7569 | { | |
7570 | /* We should have recognized the GIMPLE_TEST_F predicate to | |
7571 | know what kind of fallback to use in case a temporary is | |
7572 | needed to hold the value or address of *EXPR_P. */ | |
7573 | gcc_unreachable (); | |
7574 | } | |
7575 | ||
6de9cd9a DN |
7576 | /* We used to check the predicate here and return immediately if it |
7577 | succeeds. This is wrong; the design is for gimplification to be | |
7578 | idempotent, and for the predicates to only test for valid forms, not | |
7579 | whether they are fully simplified. */ | |
6de9cd9a DN |
7580 | if (pre_p == NULL) |
7581 | pre_p = &internal_pre; | |
726a989a | 7582 | |
6de9cd9a DN |
7583 | if (post_p == NULL) |
7584 | post_p = &internal_post; | |
7585 | ||
726a989a RB |
7586 | /* Remember the last statements added to PRE_P and POST_P. Every |
7587 | new statement added by the gimplification helpers needs to be | |
7588 | annotated with location information. To centralize the | |
7589 | responsibility, we remember the last statement that had been | |
7590 | added to both queues before gimplifying *EXPR_P. If | |
7591 | gimplification produces new statements in PRE_P and POST_P, those | |
7592 | statements will be annotated with the same location information | |
7593 | as *EXPR_P. */ | |
7594 | pre_last_gsi = gsi_last (*pre_p); | |
7595 | post_last_gsi = gsi_last (*post_p); | |
7596 | ||
6de9cd9a | 7597 | saved_location = input_location; |
a281759f PB |
7598 | if (save_expr != error_mark_node |
7599 | && EXPR_HAS_LOCATION (*expr_p)) | |
7600 | input_location = EXPR_LOCATION (*expr_p); | |
6de9cd9a DN |
7601 | |
7602 | /* Loop over the specific gimplifiers until the toplevel node | |
7603 | remains the same. */ | |
7604 | do | |
7605 | { | |
73d6ddef RK |
7606 | /* Strip away as many useless type conversions as possible |
7607 | at the toplevel. */ | |
7608 | STRIP_USELESS_TYPE_CONVERSION (*expr_p); | |
6de9cd9a DN |
7609 | |
7610 | /* Remember the expr. */ | |
7611 | save_expr = *expr_p; | |
7612 | ||
7613 | /* Die, die, die, my darling. */ | |
7614 | if (save_expr == error_mark_node | |
726a989a | 7615 | || (TREE_TYPE (save_expr) |
65355d53 | 7616 | && TREE_TYPE (save_expr) == error_mark_node)) |
6de9cd9a DN |
7617 | { |
7618 | ret = GS_ERROR; | |
7619 | break; | |
7620 | } | |
7621 | ||
7622 | /* Do any language-specific gimplification. */ | |
32e8bb8e ILT |
7623 | ret = ((enum gimplify_status) |
7624 | lang_hooks.gimplify_expr (expr_p, pre_p, post_p)); | |
6de9cd9a DN |
7625 | if (ret == GS_OK) |
7626 | { | |
7627 | if (*expr_p == NULL_TREE) | |
7628 | break; | |
7629 | if (*expr_p != save_expr) | |
7630 | continue; | |
7631 | } | |
7632 | else if (ret != GS_UNHANDLED) | |
7633 | break; | |
7634 | ||
941f78d1 JM |
7635 | /* Make sure that all the cases set 'ret' appropriately. */ |
7636 | ret = GS_UNHANDLED; | |
6de9cd9a DN |
7637 | switch (TREE_CODE (*expr_p)) |
7638 | { | |
7639 | /* First deal with the special cases. */ | |
7640 | ||
7641 | case POSTINCREMENT_EXPR: | |
7642 | case POSTDECREMENT_EXPR: | |
7643 | case PREINCREMENT_EXPR: | |
7644 | case PREDECREMENT_EXPR: | |
7645 | ret = gimplify_self_mod_expr (expr_p, pre_p, post_p, | |
cc3c4f62 RB |
7646 | fallback != fb_none, |
7647 | TREE_TYPE (*expr_p)); | |
6de9cd9a DN |
7648 | break; |
7649 | ||
0bd34ae4 RB |
7650 | case VIEW_CONVERT_EXPR: |
7651 | if (is_gimple_reg_type (TREE_TYPE (*expr_p)) | |
7652 | && is_gimple_reg_type (TREE_TYPE (TREE_OPERAND (*expr_p, 0)))) | |
7653 | { | |
7654 | ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, | |
7655 | post_p, is_gimple_val, fb_rvalue); | |
7656 | recalculate_side_effects (*expr_p); | |
7657 | break; | |
7658 | } | |
7659 | /* Fallthru. */ | |
7660 | ||
6de9cd9a | 7661 | case ARRAY_REF: |
44de5aeb RK |
7662 | case ARRAY_RANGE_REF: |
7663 | case REALPART_EXPR: | |
7664 | case IMAGPART_EXPR: | |
6de9cd9a DN |
7665 | case COMPONENT_REF: |
7666 | ret = gimplify_compound_lval (expr_p, pre_p, post_p, | |
90051e16 | 7667 | fallback ? fallback : fb_rvalue); |
6de9cd9a DN |
7668 | break; |
7669 | ||
7670 | case COND_EXPR: | |
dae7ec87 | 7671 | ret = gimplify_cond_expr (expr_p, pre_p, fallback); |
726a989a | 7672 | |
0223e4f5 JM |
7673 | /* C99 code may assign to an array in a structure value of a |
7674 | conditional expression, and this has undefined behavior | |
7675 | only on execution, so create a temporary if an lvalue is | |
7676 | required. */ | |
7677 | if (fallback == fb_lvalue) | |
7678 | { | |
7679 | *expr_p = get_initialized_tmp_var (*expr_p, pre_p, post_p); | |
936d04b6 | 7680 | mark_addressable (*expr_p); |
941f78d1 | 7681 | ret = GS_OK; |
0223e4f5 | 7682 | } |
6de9cd9a DN |
7683 | break; |
7684 | ||
7685 | case CALL_EXPR: | |
90051e16 | 7686 | ret = gimplify_call_expr (expr_p, pre_p, fallback != fb_none); |
726a989a | 7687 | |
0223e4f5 JM |
7688 | /* C99 code may assign to an array in a structure returned |
7689 | from a function, and this has undefined behavior only on | |
7690 | execution, so create a temporary if an lvalue is | |
7691 | required. */ | |
7692 | if (fallback == fb_lvalue) | |
7693 | { | |
7694 | *expr_p = get_initialized_tmp_var (*expr_p, pre_p, post_p); | |
936d04b6 | 7695 | mark_addressable (*expr_p); |
941f78d1 | 7696 | ret = GS_OK; |
0223e4f5 | 7697 | } |
6de9cd9a DN |
7698 | break; |
7699 | ||
7700 | case TREE_LIST: | |
282899df | 7701 | gcc_unreachable (); |
6de9cd9a DN |
7702 | |
7703 | case COMPOUND_EXPR: | |
7704 | ret = gimplify_compound_expr (expr_p, pre_p, fallback != fb_none); | |
7705 | break; | |
7706 | ||
2ec5deb5 | 7707 | case COMPOUND_LITERAL_EXPR: |
4c53d183 MM |
7708 | ret = gimplify_compound_literal_expr (expr_p, pre_p, |
7709 | gimple_test_f, fallback); | |
2ec5deb5 PB |
7710 | break; |
7711 | ||
6de9cd9a DN |
7712 | case MODIFY_EXPR: |
7713 | case INIT_EXPR: | |
ebad5233 JM |
7714 | ret = gimplify_modify_expr (expr_p, pre_p, post_p, |
7715 | fallback != fb_none); | |
6de9cd9a DN |
7716 | break; |
7717 | ||
7718 | case TRUTH_ANDIF_EXPR: | |
7719 | case TRUTH_ORIF_EXPR: | |
1d15f620 KT |
7720 | { |
7721 | /* Preserve the original type of the expression and the | |
7722 | source location of the outer expression. */ | |
7723 | tree org_type = TREE_TYPE (*expr_p); | |
7724 | *expr_p = gimple_boolify (*expr_p); | |
4b4455e5 | 7725 | *expr_p = build3_loc (input_location, COND_EXPR, |
1d15f620 KT |
7726 | org_type, *expr_p, |
7727 | fold_convert_loc | |
4b4455e5 | 7728 | (input_location, |
1d15f620 KT |
7729 | org_type, boolean_true_node), |
7730 | fold_convert_loc | |
4b4455e5 | 7731 | (input_location, |
1d15f620 KT |
7732 | org_type, boolean_false_node)); |
7733 | ret = GS_OK; | |
7734 | break; | |
7735 | } | |
6de9cd9a DN |
7736 | |
7737 | case TRUTH_NOT_EXPR: | |
3c6cbf7a | 7738 | { |
53020648 RG |
7739 | tree type = TREE_TYPE (*expr_p); |
7740 | /* The parsers are careful to generate TRUTH_NOT_EXPR | |
7741 | only with operands that are always zero or one. | |
7742 | We do not fold here but handle the only interesting case | |
7743 | manually, as fold may re-introduce the TRUTH_NOT_EXPR. */ | |
3c6cbf7a | 7744 | *expr_p = gimple_boolify (*expr_p); |
53020648 RG |
7745 | if (TYPE_PRECISION (TREE_TYPE (*expr_p)) == 1) |
7746 | *expr_p = build1_loc (input_location, BIT_NOT_EXPR, | |
7747 | TREE_TYPE (*expr_p), | |
7748 | TREE_OPERAND (*expr_p, 0)); | |
7749 | else | |
7750 | *expr_p = build2_loc (input_location, BIT_XOR_EXPR, | |
7751 | TREE_TYPE (*expr_p), | |
7752 | TREE_OPERAND (*expr_p, 0), | |
7753 | build_int_cst (TREE_TYPE (*expr_p), 1)); | |
7754 | if (!useless_type_conversion_p (type, TREE_TYPE (*expr_p))) | |
7755 | *expr_p = fold_convert_loc (input_location, type, *expr_p); | |
7756 | ret = GS_OK; | |
bd5d002e | 7757 | break; |
3c6cbf7a | 7758 | } |
67339062 | 7759 | |
6de9cd9a DN |
7760 | case ADDR_EXPR: |
7761 | ret = gimplify_addr_expr (expr_p, pre_p, post_p); | |
7762 | break; | |
7763 | ||
8170608b TB |
7764 | case ANNOTATE_EXPR: |
7765 | { | |
7766 | tree cond = TREE_OPERAND (*expr_p, 0); | |
718c4601 | 7767 | tree kind = TREE_OPERAND (*expr_p, 1); |
664ceb1e JJ |
7768 | tree type = TREE_TYPE (cond); |
7769 | if (!INTEGRAL_TYPE_P (type)) | |
7770 | { | |
7771 | *expr_p = cond; | |
7772 | ret = GS_OK; | |
7773 | break; | |
7774 | } | |
7775 | tree tmp = create_tmp_var (type, NULL); | |
8170608b | 7776 | gimplify_arg (&cond, pre_p, EXPR_LOCATION (*expr_p)); |
718c4601 EB |
7777 | gimple call |
7778 | = gimple_build_call_internal (IFN_ANNOTATE, 2, cond, kind); | |
8170608b TB |
7779 | gimple_call_set_lhs (call, tmp); |
7780 | gimplify_seq_add_stmt (pre_p, call); | |
7781 | *expr_p = tmp; | |
7782 | ret = GS_ALL_DONE; | |
7783 | break; | |
7784 | } | |
7785 | ||
6de9cd9a | 7786 | case VA_ARG_EXPR: |
cd3ce9b4 | 7787 | ret = gimplify_va_arg_expr (expr_p, pre_p, post_p); |
6de9cd9a DN |
7788 | break; |
7789 | ||
1043771b | 7790 | CASE_CONVERT: |
6de9cd9a DN |
7791 | if (IS_EMPTY_STMT (*expr_p)) |
7792 | { | |
7793 | ret = GS_ALL_DONE; | |
7794 | break; | |
7795 | } | |
7796 | ||
7797 | if (VOID_TYPE_P (TREE_TYPE (*expr_p)) | |
7798 | || fallback == fb_none) | |
7799 | { | |
7800 | /* Just strip a conversion to void (or in void context) and | |
7801 | try again. */ | |
7802 | *expr_p = TREE_OPERAND (*expr_p, 0); | |
941f78d1 | 7803 | ret = GS_OK; |
6de9cd9a DN |
7804 | break; |
7805 | } | |
7806 | ||
7807 | ret = gimplify_conversion (expr_p); | |
7808 | if (ret == GS_ERROR) | |
7809 | break; | |
7810 | if (*expr_p != save_expr) | |
7811 | break; | |
7812 | /* FALLTHRU */ | |
7813 | ||
7814 | case FIX_TRUNC_EXPR: | |
6de9cd9a DN |
7815 | /* unary_expr: ... | '(' cast ')' val | ... */ |
7816 | ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p, | |
7817 | is_gimple_val, fb_rvalue); | |
7818 | recalculate_side_effects (*expr_p); | |
7819 | break; | |
7820 | ||
6a720599 | 7821 | case INDIRECT_REF: |
70f34814 RG |
7822 | { |
7823 | bool volatilep = TREE_THIS_VOLATILE (*expr_p); | |
3748f5c9 | 7824 | bool notrap = TREE_THIS_NOTRAP (*expr_p); |
70f34814 RG |
7825 | tree saved_ptr_type = TREE_TYPE (TREE_OPERAND (*expr_p, 0)); |
7826 | ||
7827 | *expr_p = fold_indirect_ref_loc (input_location, *expr_p); | |
7828 | if (*expr_p != save_expr) | |
7829 | { | |
7830 | ret = GS_OK; | |
7831 | break; | |
7832 | } | |
7833 | ||
7834 | ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p, | |
7835 | is_gimple_reg, fb_rvalue); | |
dca26746 RG |
7836 | if (ret == GS_ERROR) |
7837 | break; | |
70f34814 | 7838 | |
dca26746 | 7839 | recalculate_side_effects (*expr_p); |
70f34814 RG |
7840 | *expr_p = fold_build2_loc (input_location, MEM_REF, |
7841 | TREE_TYPE (*expr_p), | |
7842 | TREE_OPERAND (*expr_p, 0), | |
7843 | build_int_cst (saved_ptr_type, 0)); | |
7844 | TREE_THIS_VOLATILE (*expr_p) = volatilep; | |
3748f5c9 | 7845 | TREE_THIS_NOTRAP (*expr_p) = notrap; |
70f34814 RG |
7846 | ret = GS_OK; |
7847 | break; | |
7848 | } | |
7849 | ||
7850 | /* We arrive here through the various re-gimplifcation paths. */ | |
7851 | case MEM_REF: | |
7852 | /* First try re-folding the whole thing. */ | |
7853 | tmp = fold_binary (MEM_REF, TREE_TYPE (*expr_p), | |
7854 | TREE_OPERAND (*expr_p, 0), | |
7855 | TREE_OPERAND (*expr_p, 1)); | |
7856 | if (tmp) | |
941f78d1 | 7857 | { |
70f34814 RG |
7858 | *expr_p = tmp; |
7859 | recalculate_side_effects (*expr_p); | |
941f78d1 JM |
7860 | ret = GS_OK; |
7861 | break; | |
7862 | } | |
01718e96 RG |
7863 | /* Avoid re-gimplifying the address operand if it is already |
7864 | in suitable form. Re-gimplifying would mark the address | |
7865 | operand addressable. Always gimplify when not in SSA form | |
7866 | as we still may have to gimplify decls with value-exprs. */ | |
7867 | if (!gimplify_ctxp || !gimplify_ctxp->into_ssa | |
7868 | || !is_gimple_mem_ref_addr (TREE_OPERAND (*expr_p, 0))) | |
7869 | { | |
7870 | ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p, | |
7871 | is_gimple_mem_ref_addr, fb_rvalue); | |
7872 | if (ret == GS_ERROR) | |
7873 | break; | |
7874 | } | |
6de9cd9a | 7875 | recalculate_side_effects (*expr_p); |
70f34814 | 7876 | ret = GS_ALL_DONE; |
6de9cd9a DN |
7877 | break; |
7878 | ||
01718e96 | 7879 | /* Constants need not be gimplified. */ |
6de9cd9a DN |
7880 | case INTEGER_CST: |
7881 | case REAL_CST: | |
325217ed | 7882 | case FIXED_CST: |
6de9cd9a DN |
7883 | case STRING_CST: |
7884 | case COMPLEX_CST: | |
7885 | case VECTOR_CST: | |
3f5c390d RB |
7886 | /* Drop the overflow flag on constants, we do not want |
7887 | that in the GIMPLE IL. */ | |
7888 | if (TREE_OVERFLOW_P (*expr_p)) | |
7889 | *expr_p = drop_tree_overflow (*expr_p); | |
6de9cd9a DN |
7890 | ret = GS_ALL_DONE; |
7891 | break; | |
7892 | ||
7893 | case CONST_DECL: | |
0534fa56 | 7894 | /* If we require an lvalue, such as for ADDR_EXPR, retain the |
2a7e31df | 7895 | CONST_DECL node. Otherwise the decl is replaceable by its |
0534fa56 RH |
7896 | value. */ |
7897 | /* ??? Should be == fb_lvalue, but ADDR_EXPR passes fb_either. */ | |
7898 | if (fallback & fb_lvalue) | |
7899 | ret = GS_ALL_DONE; | |
7900 | else | |
941f78d1 JM |
7901 | { |
7902 | *expr_p = DECL_INITIAL (*expr_p); | |
7903 | ret = GS_OK; | |
7904 | } | |
6de9cd9a DN |
7905 | break; |
7906 | ||
350fae66 | 7907 | case DECL_EXPR: |
726a989a | 7908 | ret = gimplify_decl_expr (expr_p, pre_p); |
350fae66 RK |
7909 | break; |
7910 | ||
6de9cd9a | 7911 | case BIND_EXPR: |
c6c7698d | 7912 | ret = gimplify_bind_expr (expr_p, pre_p); |
6de9cd9a DN |
7913 | break; |
7914 | ||
7915 | case LOOP_EXPR: | |
7916 | ret = gimplify_loop_expr (expr_p, pre_p); | |
7917 | break; | |
7918 | ||
7919 | case SWITCH_EXPR: | |
7920 | ret = gimplify_switch_expr (expr_p, pre_p); | |
7921 | break; | |
7922 | ||
6de9cd9a DN |
7923 | case EXIT_EXPR: |
7924 | ret = gimplify_exit_expr (expr_p); | |
7925 | break; | |
7926 | ||
7927 | case GOTO_EXPR: | |
7928 | /* If the target is not LABEL, then it is a computed jump | |
7929 | and the target needs to be gimplified. */ | |
7930 | if (TREE_CODE (GOTO_DESTINATION (*expr_p)) != LABEL_DECL) | |
8c50b495 JJ |
7931 | { |
7932 | ret = gimplify_expr (&GOTO_DESTINATION (*expr_p), pre_p, | |
7933 | NULL, is_gimple_val, fb_rvalue); | |
7934 | if (ret == GS_ERROR) | |
7935 | break; | |
7936 | } | |
726a989a RB |
7937 | gimplify_seq_add_stmt (pre_p, |
7938 | gimple_build_goto (GOTO_DESTINATION (*expr_p))); | |
941f78d1 | 7939 | ret = GS_ALL_DONE; |
6de9cd9a DN |
7940 | break; |
7941 | ||
2e28e797 | 7942 | case PREDICT_EXPR: |
726a989a RB |
7943 | gimplify_seq_add_stmt (pre_p, |
7944 | gimple_build_predict (PREDICT_EXPR_PREDICTOR (*expr_p), | |
7945 | PREDICT_EXPR_OUTCOME (*expr_p))); | |
7946 | ret = GS_ALL_DONE; | |
7947 | break; | |
2e28e797 | 7948 | |
6de9cd9a DN |
7949 | case LABEL_EXPR: |
7950 | ret = GS_ALL_DONE; | |
282899df NS |
7951 | gcc_assert (decl_function_context (LABEL_EXPR_LABEL (*expr_p)) |
7952 | == current_function_decl); | |
726a989a RB |
7953 | gimplify_seq_add_stmt (pre_p, |
7954 | gimple_build_label (LABEL_EXPR_LABEL (*expr_p))); | |
6de9cd9a DN |
7955 | break; |
7956 | ||
7957 | case CASE_LABEL_EXPR: | |
726a989a | 7958 | ret = gimplify_case_label_expr (expr_p, pre_p); |
6de9cd9a DN |
7959 | break; |
7960 | ||
7961 | case RETURN_EXPR: | |
7962 | ret = gimplify_return_expr (*expr_p, pre_p); | |
7963 | break; | |
7964 | ||
7965 | case CONSTRUCTOR: | |
48eb4e53 RK |
7966 | /* Don't reduce this in place; let gimplify_init_constructor work its |
7967 | magic. Buf if we're just elaborating this for side effects, just | |
7968 | gimplify any element that has side-effects. */ | |
7969 | if (fallback == fb_none) | |
7970 | { | |
4038c495 | 7971 | unsigned HOST_WIDE_INT ix; |
ac47786e | 7972 | tree val; |
08330ec2 | 7973 | tree temp = NULL_TREE; |
ac47786e NF |
7974 | FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (*expr_p), ix, val) |
7975 | if (TREE_SIDE_EFFECTS (val)) | |
7976 | append_to_statement_list (val, &temp); | |
48eb4e53 | 7977 | |
08330ec2 | 7978 | *expr_p = temp; |
941f78d1 | 7979 | ret = temp ? GS_OK : GS_ALL_DONE; |
48eb4e53 | 7980 | } |
ca0b7d18 AP |
7981 | /* C99 code may assign to an array in a constructed |
7982 | structure or union, and this has undefined behavior only | |
7983 | on execution, so create a temporary if an lvalue is | |
7984 | required. */ | |
7985 | else if (fallback == fb_lvalue) | |
7986 | { | |
7987 | *expr_p = get_initialized_tmp_var (*expr_p, pre_p, post_p); | |
936d04b6 | 7988 | mark_addressable (*expr_p); |
941f78d1 | 7989 | ret = GS_OK; |
ca0b7d18 | 7990 | } |
08330ec2 AP |
7991 | else |
7992 | ret = GS_ALL_DONE; | |
6de9cd9a DN |
7993 | break; |
7994 | ||
7995 | /* The following are special cases that are not handled by the | |
7996 | original GIMPLE grammar. */ | |
7997 | ||
7998 | /* SAVE_EXPR nodes are converted into a GIMPLE identifier and | |
7999 | eliminated. */ | |
8000 | case SAVE_EXPR: | |
8001 | ret = gimplify_save_expr (expr_p, pre_p, post_p); | |
8002 | break; | |
8003 | ||
8004 | case BIT_FIELD_REF: | |
ea814c66 EB |
8005 | ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, |
8006 | post_p, is_gimple_lvalue, fb_either); | |
8007 | recalculate_side_effects (*expr_p); | |
6de9cd9a DN |
8008 | break; |
8009 | ||
150e3929 RG |
8010 | case TARGET_MEM_REF: |
8011 | { | |
8012 | enum gimplify_status r0 = GS_ALL_DONE, r1 = GS_ALL_DONE; | |
8013 | ||
23a534a1 | 8014 | if (TMR_BASE (*expr_p)) |
150e3929 | 8015 | r0 = gimplify_expr (&TMR_BASE (*expr_p), pre_p, |
4d948885 | 8016 | post_p, is_gimple_mem_ref_addr, fb_either); |
150e3929 RG |
8017 | if (TMR_INDEX (*expr_p)) |
8018 | r1 = gimplify_expr (&TMR_INDEX (*expr_p), pre_p, | |
8019 | post_p, is_gimple_val, fb_rvalue); | |
4d948885 RG |
8020 | if (TMR_INDEX2 (*expr_p)) |
8021 | r1 = gimplify_expr (&TMR_INDEX2 (*expr_p), pre_p, | |
8022 | post_p, is_gimple_val, fb_rvalue); | |
150e3929 RG |
8023 | /* TMR_STEP and TMR_OFFSET are always integer constants. */ |
8024 | ret = MIN (r0, r1); | |
8025 | } | |
8026 | break; | |
8027 | ||
6de9cd9a DN |
8028 | case NON_LVALUE_EXPR: |
8029 | /* This should have been stripped above. */ | |
282899df | 8030 | gcc_unreachable (); |
6de9cd9a DN |
8031 | |
8032 | case ASM_EXPR: | |
8033 | ret = gimplify_asm_expr (expr_p, pre_p, post_p); | |
8034 | break; | |
8035 | ||
8036 | case TRY_FINALLY_EXPR: | |
8037 | case TRY_CATCH_EXPR: | |
726a989a RB |
8038 | { |
8039 | gimple_seq eval, cleanup; | |
8040 | gimple try_; | |
8041 | ||
820055a0 DC |
8042 | /* Calls to destructors are generated automatically in FINALLY/CATCH |
8043 | block. They should have location as UNKNOWN_LOCATION. However, | |
8044 | gimplify_call_expr will reset these call stmts to input_location | |
8045 | if it finds stmt's location is unknown. To prevent resetting for | |
8046 | destructors, we set the input_location to unknown. | |
8047 | Note that this only affects the destructor calls in FINALLY/CATCH | |
8048 | block, and will automatically reset to its original value by the | |
8049 | end of gimplify_expr. */ | |
8050 | input_location = UNKNOWN_LOCATION; | |
726a989a RB |
8051 | eval = cleanup = NULL; |
8052 | gimplify_and_add (TREE_OPERAND (*expr_p, 0), &eval); | |
8053 | gimplify_and_add (TREE_OPERAND (*expr_p, 1), &cleanup); | |
cc8b343d JJ |
8054 | /* Don't create bogus GIMPLE_TRY with empty cleanup. */ |
8055 | if (gimple_seq_empty_p (cleanup)) | |
8056 | { | |
8057 | gimple_seq_add_seq (pre_p, eval); | |
8058 | ret = GS_ALL_DONE; | |
8059 | break; | |
8060 | } | |
726a989a RB |
8061 | try_ = gimple_build_try (eval, cleanup, |
8062 | TREE_CODE (*expr_p) == TRY_FINALLY_EXPR | |
8063 | ? GIMPLE_TRY_FINALLY | |
8064 | : GIMPLE_TRY_CATCH); | |
e368f44f DC |
8065 | if (LOCATION_LOCUS (saved_location) != UNKNOWN_LOCATION) |
8066 | gimple_set_location (try_, saved_location); | |
8067 | else | |
8068 | gimple_set_location (try_, EXPR_LOCATION (save_expr)); | |
726a989a RB |
8069 | if (TREE_CODE (*expr_p) == TRY_CATCH_EXPR) |
8070 | gimple_try_set_catch_is_cleanup (try_, | |
8071 | TRY_CATCH_IS_CLEANUP (*expr_p)); | |
8072 | gimplify_seq_add_stmt (pre_p, try_); | |
8073 | ret = GS_ALL_DONE; | |
8074 | break; | |
8075 | } | |
6de9cd9a DN |
8076 | |
8077 | case CLEANUP_POINT_EXPR: | |
8078 | ret = gimplify_cleanup_point_expr (expr_p, pre_p); | |
8079 | break; | |
8080 | ||
8081 | case TARGET_EXPR: | |
8082 | ret = gimplify_target_expr (expr_p, pre_p, post_p); | |
8083 | break; | |
8084 | ||
8085 | case CATCH_EXPR: | |
726a989a RB |
8086 | { |
8087 | gimple c; | |
8088 | gimple_seq handler = NULL; | |
8089 | gimplify_and_add (CATCH_BODY (*expr_p), &handler); | |
8090 | c = gimple_build_catch (CATCH_TYPES (*expr_p), handler); | |
8091 | gimplify_seq_add_stmt (pre_p, c); | |
8092 | ret = GS_ALL_DONE; | |
8093 | break; | |
8094 | } | |
6de9cd9a DN |
8095 | |
8096 | case EH_FILTER_EXPR: | |
726a989a RB |
8097 | { |
8098 | gimple ehf; | |
8099 | gimple_seq failure = NULL; | |
8100 | ||
8101 | gimplify_and_add (EH_FILTER_FAILURE (*expr_p), &failure); | |
8102 | ehf = gimple_build_eh_filter (EH_FILTER_TYPES (*expr_p), failure); | |
d665b6e5 | 8103 | gimple_set_no_warning (ehf, TREE_NO_WARNING (*expr_p)); |
726a989a RB |
8104 | gimplify_seq_add_stmt (pre_p, ehf); |
8105 | ret = GS_ALL_DONE; | |
8106 | break; | |
8107 | } | |
6de9cd9a | 8108 | |
0f59171d RH |
8109 | case OBJ_TYPE_REF: |
8110 | { | |
8111 | enum gimplify_status r0, r1; | |
726a989a RB |
8112 | r0 = gimplify_expr (&OBJ_TYPE_REF_OBJECT (*expr_p), pre_p, |
8113 | post_p, is_gimple_val, fb_rvalue); | |
8114 | r1 = gimplify_expr (&OBJ_TYPE_REF_EXPR (*expr_p), pre_p, | |
8115 | post_p, is_gimple_val, fb_rvalue); | |
0f3a057a | 8116 | TREE_SIDE_EFFECTS (*expr_p) = 0; |
0f59171d RH |
8117 | ret = MIN (r0, r1); |
8118 | } | |
6de9cd9a DN |
8119 | break; |
8120 | ||
6de9cd9a DN |
8121 | case LABEL_DECL: |
8122 | /* We get here when taking the address of a label. We mark | |
8123 | the label as "forced"; meaning it can never be removed and | |
8124 | it is a potential target for any computed goto. */ | |
8125 | FORCED_LABEL (*expr_p) = 1; | |
8126 | ret = GS_ALL_DONE; | |
8127 | break; | |
8128 | ||
8129 | case STATEMENT_LIST: | |
c6c7698d | 8130 | ret = gimplify_statement_list (expr_p, pre_p); |
6de9cd9a DN |
8131 | break; |
8132 | ||
d25cee4d RH |
8133 | case WITH_SIZE_EXPR: |
8134 | { | |
70e2829d KH |
8135 | gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, |
8136 | post_p == &internal_post ? NULL : post_p, | |
8137 | gimple_test_f, fallback); | |
8138 | gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p, post_p, | |
8139 | is_gimple_val, fb_rvalue); | |
941f78d1 | 8140 | ret = GS_ALL_DONE; |
d25cee4d RH |
8141 | } |
8142 | break; | |
8143 | ||
6de9cd9a | 8144 | case VAR_DECL: |
4744afba | 8145 | case PARM_DECL: |
a9f7c570 | 8146 | ret = gimplify_var_or_parm_decl (expr_p); |
6de9cd9a DN |
8147 | break; |
8148 | ||
077b0dfb JJ |
8149 | case RESULT_DECL: |
8150 | /* When within an OpenMP context, notice uses of variables. */ | |
8151 | if (gimplify_omp_ctxp) | |
8152 | omp_notice_variable (gimplify_omp_ctxp, *expr_p, true); | |
8153 | ret = GS_ALL_DONE; | |
8154 | break; | |
8155 | ||
71956db3 RH |
8156 | case SSA_NAME: |
8157 | /* Allow callbacks into the gimplifier during optimization. */ | |
8158 | ret = GS_ALL_DONE; | |
8159 | break; | |
8160 | ||
953ff289 | 8161 | case OMP_PARALLEL: |
726a989a RB |
8162 | gimplify_omp_parallel (expr_p, pre_p); |
8163 | ret = GS_ALL_DONE; | |
953ff289 DN |
8164 | break; |
8165 | ||
a68ab351 | 8166 | case OMP_TASK: |
726a989a RB |
8167 | gimplify_omp_task (expr_p, pre_p); |
8168 | ret = GS_ALL_DONE; | |
a68ab351 JJ |
8169 | break; |
8170 | ||
953ff289 | 8171 | case OMP_FOR: |
74bf76ed | 8172 | case OMP_SIMD: |
c02065fc | 8173 | case CILK_SIMD: |
9a771876 | 8174 | case CILK_FOR: |
acf0174b | 8175 | case OMP_DISTRIBUTE: |
953ff289 DN |
8176 | ret = gimplify_omp_for (expr_p, pre_p); |
8177 | break; | |
8178 | ||
8179 | case OMP_SECTIONS: | |
8180 | case OMP_SINGLE: | |
acf0174b JJ |
8181 | case OMP_TARGET: |
8182 | case OMP_TARGET_DATA: | |
8183 | case OMP_TEAMS: | |
726a989a RB |
8184 | gimplify_omp_workshare (expr_p, pre_p); |
8185 | ret = GS_ALL_DONE; | |
953ff289 DN |
8186 | break; |
8187 | ||
acf0174b JJ |
8188 | case OMP_TARGET_UPDATE: |
8189 | gimplify_omp_target_update (expr_p, pre_p); | |
8190 | ret = GS_ALL_DONE; | |
8191 | break; | |
8192 | ||
953ff289 DN |
8193 | case OMP_SECTION: |
8194 | case OMP_MASTER: | |
acf0174b | 8195 | case OMP_TASKGROUP: |
953ff289 DN |
8196 | case OMP_ORDERED: |
8197 | case OMP_CRITICAL: | |
726a989a RB |
8198 | { |
8199 | gimple_seq body = NULL; | |
8200 | gimple g; | |
8201 | ||
8202 | gimplify_and_add (OMP_BODY (*expr_p), &body); | |
8203 | switch (TREE_CODE (*expr_p)) | |
8204 | { | |
8205 | case OMP_SECTION: | |
8206 | g = gimple_build_omp_section (body); | |
8207 | break; | |
8208 | case OMP_MASTER: | |
8209 | g = gimple_build_omp_master (body); | |
8210 | break; | |
acf0174b JJ |
8211 | case OMP_TASKGROUP: |
8212 | { | |
8213 | gimple_seq cleanup = NULL; | |
8214 | tree fn | |
8215 | = builtin_decl_explicit (BUILT_IN_GOMP_TASKGROUP_END); | |
8216 | g = gimple_build_call (fn, 0); | |
8217 | gimple_seq_add_stmt (&cleanup, g); | |
8218 | g = gimple_build_try (body, cleanup, GIMPLE_TRY_FINALLY); | |
8219 | body = NULL; | |
8220 | gimple_seq_add_stmt (&body, g); | |
8221 | g = gimple_build_omp_taskgroup (body); | |
8222 | } | |
8223 | break; | |
726a989a RB |
8224 | case OMP_ORDERED: |
8225 | g = gimple_build_omp_ordered (body); | |
8226 | break; | |
8227 | case OMP_CRITICAL: | |
8228 | g = gimple_build_omp_critical (body, | |
8229 | OMP_CRITICAL_NAME (*expr_p)); | |
8230 | break; | |
8231 | default: | |
8232 | gcc_unreachable (); | |
8233 | } | |
8234 | gimplify_seq_add_stmt (pre_p, g); | |
8235 | ret = GS_ALL_DONE; | |
8236 | break; | |
8237 | } | |
953ff289 DN |
8238 | |
8239 | case OMP_ATOMIC: | |
20906c66 JJ |
8240 | case OMP_ATOMIC_READ: |
8241 | case OMP_ATOMIC_CAPTURE_OLD: | |
8242 | case OMP_ATOMIC_CAPTURE_NEW: | |
953ff289 DN |
8243 | ret = gimplify_omp_atomic (expr_p, pre_p); |
8244 | break; | |
8245 | ||
0a35513e AH |
8246 | case TRANSACTION_EXPR: |
8247 | ret = gimplify_transaction (expr_p, pre_p); | |
8248 | break; | |
8249 | ||
16949072 RG |
8250 | case TRUTH_AND_EXPR: |
8251 | case TRUTH_OR_EXPR: | |
8252 | case TRUTH_XOR_EXPR: | |
1d15f620 | 8253 | { |
bd5d002e | 8254 | tree orig_type = TREE_TYPE (*expr_p); |
fc1f4caf | 8255 | tree new_type, xop0, xop1; |
1d15f620 | 8256 | *expr_p = gimple_boolify (*expr_p); |
fc1f4caf KT |
8257 | new_type = TREE_TYPE (*expr_p); |
8258 | if (!useless_type_conversion_p (orig_type, new_type)) | |
1d15f620 | 8259 | { |
4b4455e5 | 8260 | *expr_p = fold_convert_loc (input_location, orig_type, *expr_p); |
1d15f620 KT |
8261 | ret = GS_OK; |
8262 | break; | |
8263 | } | |
da5fb469 | 8264 | |
bd5d002e RG |
8265 | /* Boolified binary truth expressions are semantically equivalent |
8266 | to bitwise binary expressions. Canonicalize them to the | |
8267 | bitwise variant. */ | |
8268 | switch (TREE_CODE (*expr_p)) | |
8269 | { | |
8270 | case TRUTH_AND_EXPR: | |
8271 | TREE_SET_CODE (*expr_p, BIT_AND_EXPR); | |
8272 | break; | |
8273 | case TRUTH_OR_EXPR: | |
8274 | TREE_SET_CODE (*expr_p, BIT_IOR_EXPR); | |
8275 | break; | |
8276 | case TRUTH_XOR_EXPR: | |
8277 | TREE_SET_CODE (*expr_p, BIT_XOR_EXPR); | |
8278 | break; | |
8279 | default: | |
8280 | break; | |
8281 | } | |
fc1f4caf KT |
8282 | /* Now make sure that operands have compatible type to |
8283 | expression's new_type. */ | |
8284 | xop0 = TREE_OPERAND (*expr_p, 0); | |
8285 | xop1 = TREE_OPERAND (*expr_p, 1); | |
8286 | if (!useless_type_conversion_p (new_type, TREE_TYPE (xop0))) | |
8287 | TREE_OPERAND (*expr_p, 0) = fold_convert_loc (input_location, | |
8288 | new_type, | |
8289 | xop0); | |
8290 | if (!useless_type_conversion_p (new_type, TREE_TYPE (xop1))) | |
8291 | TREE_OPERAND (*expr_p, 1) = fold_convert_loc (input_location, | |
8292 | new_type, | |
8293 | xop1); | |
bd5d002e RG |
8294 | /* Continue classified as tcc_binary. */ |
8295 | goto expr_2; | |
da5fb469 | 8296 | } |
16949072 RG |
8297 | |
8298 | case FMA_EXPR: | |
e6ed43b0 | 8299 | case VEC_COND_EXPR: |
2205ed25 | 8300 | case VEC_PERM_EXPR: |
16949072 RG |
8301 | /* Classified as tcc_expression. */ |
8302 | goto expr_3; | |
8303 | ||
5be014d5 | 8304 | case POINTER_PLUS_EXPR: |
315f5f1b RG |
8305 | { |
8306 | enum gimplify_status r0, r1; | |
8307 | r0 = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, | |
8308 | post_p, is_gimple_val, fb_rvalue); | |
8309 | r1 = gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p, | |
8310 | post_p, is_gimple_val, fb_rvalue); | |
8311 | recalculate_side_effects (*expr_p); | |
8312 | ret = MIN (r0, r1); | |
8313 | /* Convert &X + CST to invariant &MEM[&X, CST]. Do this | |
8314 | after gimplifying operands - this is similar to how | |
8315 | it would be folding all gimplified stmts on creation | |
8316 | to have them canonicalized, which is what we eventually | |
8317 | should do anyway. */ | |
8318 | if (TREE_CODE (TREE_OPERAND (*expr_p, 1)) == INTEGER_CST | |
8319 | && is_gimple_min_invariant (TREE_OPERAND (*expr_p, 0))) | |
8320 | { | |
8321 | *expr_p = build_fold_addr_expr_with_type_loc | |
8322 | (input_location, | |
8323 | fold_build2 (MEM_REF, TREE_TYPE (TREE_TYPE (*expr_p)), | |
8324 | TREE_OPERAND (*expr_p, 0), | |
8325 | fold_convert (ptr_type_node, | |
8326 | TREE_OPERAND (*expr_p, 1))), | |
8327 | TREE_TYPE (*expr_p)); | |
8328 | ret = MIN (ret, GS_OK); | |
8329 | } | |
8330 | break; | |
8331 | } | |
726a989a | 8332 | |
939b37da BI |
8333 | case CILK_SYNC_STMT: |
8334 | { | |
8335 | if (!fn_contains_cilk_spawn_p (cfun)) | |
8336 | { | |
8337 | error_at (EXPR_LOCATION (*expr_p), | |
8338 | "expected %<_Cilk_spawn%> before %<_Cilk_sync%>"); | |
8339 | ret = GS_ERROR; | |
8340 | } | |
8341 | else | |
8342 | { | |
8343 | gimplify_cilk_sync (expr_p, pre_p); | |
8344 | ret = GS_ALL_DONE; | |
8345 | } | |
8346 | break; | |
8347 | } | |
8348 | ||
6de9cd9a | 8349 | default: |
282899df | 8350 | switch (TREE_CODE_CLASS (TREE_CODE (*expr_p))) |
6de9cd9a | 8351 | { |
6615c446 | 8352 | case tcc_comparison: |
61c25908 OH |
8353 | /* Handle comparison of objects of non scalar mode aggregates |
8354 | with a call to memcmp. It would be nice to only have to do | |
8355 | this for variable-sized objects, but then we'd have to allow | |
8356 | the same nest of reference nodes we allow for MODIFY_EXPR and | |
8357 | that's too complex. | |
8358 | ||
8359 | Compare scalar mode aggregates as scalar mode values. Using | |
8360 | memcmp for them would be very inefficient at best, and is | |
8361 | plain wrong if bitfields are involved. */ | |
726a989a RB |
8362 | { |
8363 | tree type = TREE_TYPE (TREE_OPERAND (*expr_p, 1)); | |
61c25908 | 8364 | |
544d960a AS |
8365 | /* Vector comparisons need no boolification. */ |
8366 | if (TREE_CODE (type) == VECTOR_TYPE) | |
8367 | goto expr_2; | |
8368 | else if (!AGGREGATE_TYPE_P (type)) | |
7f3ff782 KT |
8369 | { |
8370 | tree org_type = TREE_TYPE (*expr_p); | |
8371 | *expr_p = gimple_boolify (*expr_p); | |
8372 | if (!useless_type_conversion_p (org_type, | |
8373 | TREE_TYPE (*expr_p))) | |
8374 | { | |
8375 | *expr_p = fold_convert_loc (input_location, | |
8376 | org_type, *expr_p); | |
8377 | ret = GS_OK; | |
8378 | } | |
8379 | else | |
8380 | goto expr_2; | |
8381 | } | |
726a989a RB |
8382 | else if (TYPE_MODE (type) != BLKmode) |
8383 | ret = gimplify_scalar_mode_aggregate_compare (expr_p); | |
8384 | else | |
8385 | ret = gimplify_variable_sized_compare (expr_p); | |
61c25908 | 8386 | |
726a989a | 8387 | break; |
61c25908 | 8388 | } |
d3147f64 | 8389 | |
282899df NS |
8390 | /* If *EXPR_P does not need to be special-cased, handle it |
8391 | according to its class. */ | |
6615c446 | 8392 | case tcc_unary: |
282899df NS |
8393 | ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, |
8394 | post_p, is_gimple_val, fb_rvalue); | |
8395 | break; | |
6de9cd9a | 8396 | |
6615c446 | 8397 | case tcc_binary: |
282899df NS |
8398 | expr_2: |
8399 | { | |
8400 | enum gimplify_status r0, r1; | |
d3147f64 | 8401 | |
282899df | 8402 | r0 = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, |
726a989a | 8403 | post_p, is_gimple_val, fb_rvalue); |
282899df NS |
8404 | r1 = gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p, |
8405 | post_p, is_gimple_val, fb_rvalue); | |
d3147f64 | 8406 | |
282899df NS |
8407 | ret = MIN (r0, r1); |
8408 | break; | |
8409 | } | |
d3147f64 | 8410 | |
16949072 RG |
8411 | expr_3: |
8412 | { | |
8413 | enum gimplify_status r0, r1, r2; | |
8414 | ||
8415 | r0 = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, | |
8416 | post_p, is_gimple_val, fb_rvalue); | |
8417 | r1 = gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p, | |
8418 | post_p, is_gimple_val, fb_rvalue); | |
8419 | r2 = gimplify_expr (&TREE_OPERAND (*expr_p, 2), pre_p, | |
8420 | post_p, is_gimple_val, fb_rvalue); | |
8421 | ||
8422 | ret = MIN (MIN (r0, r1), r2); | |
8423 | break; | |
8424 | } | |
8425 | ||
6615c446 JO |
8426 | case tcc_declaration: |
8427 | case tcc_constant: | |
6de9cd9a | 8428 | ret = GS_ALL_DONE; |
282899df | 8429 | goto dont_recalculate; |
d3147f64 | 8430 | |
282899df | 8431 | default: |
16949072 | 8432 | gcc_unreachable (); |
6de9cd9a | 8433 | } |
6de9cd9a DN |
8434 | |
8435 | recalculate_side_effects (*expr_p); | |
726a989a | 8436 | |
282899df | 8437 | dont_recalculate: |
6de9cd9a DN |
8438 | break; |
8439 | } | |
d3147f64 | 8440 | |
941f78d1 | 8441 | gcc_assert (*expr_p || ret != GS_OK); |
6de9cd9a DN |
8442 | } |
8443 | while (ret == GS_OK); | |
8444 | ||
8445 | /* If we encountered an error_mark somewhere nested inside, either | |
8446 | stub out the statement or propagate the error back out. */ | |
8447 | if (ret == GS_ERROR) | |
8448 | { | |
8449 | if (is_statement) | |
65355d53 | 8450 | *expr_p = NULL; |
6de9cd9a DN |
8451 | goto out; |
8452 | } | |
8453 | ||
6de9cd9a DN |
8454 | /* This was only valid as a return value from the langhook, which |
8455 | we handled. Make sure it doesn't escape from any other context. */ | |
282899df | 8456 | gcc_assert (ret != GS_UNHANDLED); |
6de9cd9a | 8457 | |
65355d53 | 8458 | if (fallback == fb_none && *expr_p && !is_gimple_stmt (*expr_p)) |
6de9cd9a DN |
8459 | { |
8460 | /* We aren't looking for a value, and we don't have a valid | |
8461 | statement. If it doesn't have side-effects, throw it away. */ | |
8462 | if (!TREE_SIDE_EFFECTS (*expr_p)) | |
65355d53 | 8463 | *expr_p = NULL; |
6de9cd9a | 8464 | else if (!TREE_THIS_VOLATILE (*expr_p)) |
44de5aeb RK |
8465 | { |
8466 | /* This is probably a _REF that contains something nested that | |
8467 | has side effects. Recurse through the operands to find it. */ | |
8468 | enum tree_code code = TREE_CODE (*expr_p); | |
8469 | ||
282899df | 8470 | switch (code) |
44de5aeb | 8471 | { |
282899df | 8472 | case COMPONENT_REF: |
02a5eac4 EB |
8473 | case REALPART_EXPR: |
8474 | case IMAGPART_EXPR: | |
8475 | case VIEW_CONVERT_EXPR: | |
282899df NS |
8476 | gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p, |
8477 | gimple_test_f, fallback); | |
8478 | break; | |
8479 | ||
a9e64c63 EB |
8480 | case ARRAY_REF: |
8481 | case ARRAY_RANGE_REF: | |
44de5aeb RK |
8482 | gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p, |
8483 | gimple_test_f, fallback); | |
8484 | gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p, post_p, | |
282899df NS |
8485 | gimple_test_f, fallback); |
8486 | break; | |
8487 | ||
8488 | default: | |
8489 | /* Anything else with side-effects must be converted to | |
a9e64c63 | 8490 | a valid statement before we get here. */ |
282899df | 8491 | gcc_unreachable (); |
44de5aeb | 8492 | } |
44de5aeb | 8493 | |
65355d53 | 8494 | *expr_p = NULL; |
44de5aeb | 8495 | } |
a9e64c63 EB |
8496 | else if (COMPLETE_TYPE_P (TREE_TYPE (*expr_p)) |
8497 | && TYPE_MODE (TREE_TYPE (*expr_p)) != BLKmode) | |
6de9cd9a | 8498 | { |
a9e64c63 EB |
8499 | /* Historically, the compiler has treated a bare reference |
8500 | to a non-BLKmode volatile lvalue as forcing a load. */ | |
af62f6f9 | 8501 | tree type = TYPE_MAIN_VARIANT (TREE_TYPE (*expr_p)); |
726a989a | 8502 | |
c22b1771 | 8503 | /* Normally, we do not want to create a temporary for a |
a38578e1 MM |
8504 | TREE_ADDRESSABLE type because such a type should not be |
8505 | copied by bitwise-assignment. However, we make an | |
8506 | exception here, as all we are doing here is ensuring that | |
8507 | we read the bytes that make up the type. We use | |
8508 | create_tmp_var_raw because create_tmp_var will abort when | |
57b51d4d | 8509 | given a TREE_ADDRESSABLE type. */ |
a38578e1 MM |
8510 | tree tmp = create_tmp_var_raw (type, "vol"); |
8511 | gimple_add_tmp_var (tmp); | |
726a989a RB |
8512 | gimplify_assign (tmp, *expr_p, pre_p); |
8513 | *expr_p = NULL; | |
6de9cd9a DN |
8514 | } |
8515 | else | |
8516 | /* We can't do anything useful with a volatile reference to | |
a9e64c63 EB |
8517 | an incomplete type, so just throw it away. Likewise for |
8518 | a BLKmode type, since any implicit inner load should | |
8519 | already have been turned into an explicit one by the | |
8520 | gimplification process. */ | |
65355d53 | 8521 | *expr_p = NULL; |
6de9cd9a DN |
8522 | } |
8523 | ||
8524 | /* If we are gimplifying at the statement level, we're done. Tack | |
726a989a | 8525 | everything together and return. */ |
325c3691 | 8526 | if (fallback == fb_none || is_statement) |
6de9cd9a | 8527 | { |
726a989a RB |
8528 | /* Since *EXPR_P has been converted into a GIMPLE tuple, clear |
8529 | it out for GC to reclaim it. */ | |
8530 | *expr_p = NULL_TREE; | |
8531 | ||
8532 | if (!gimple_seq_empty_p (internal_pre) | |
8533 | || !gimple_seq_empty_p (internal_post)) | |
be00f578 | 8534 | { |
726a989a RB |
8535 | gimplify_seq_add_seq (&internal_pre, internal_post); |
8536 | gimplify_seq_add_seq (pre_p, internal_pre); | |
be00f578 | 8537 | } |
726a989a RB |
8538 | |
8539 | /* The result of gimplifying *EXPR_P is going to be the last few | |
8540 | statements in *PRE_P and *POST_P. Add location information | |
8541 | to all the statements that were added by the gimplification | |
8542 | helpers. */ | |
8543 | if (!gimple_seq_empty_p (*pre_p)) | |
8544 | annotate_all_with_location_after (*pre_p, pre_last_gsi, input_location); | |
8545 | ||
8546 | if (!gimple_seq_empty_p (*post_p)) | |
8547 | annotate_all_with_location_after (*post_p, post_last_gsi, | |
8548 | input_location); | |
8549 | ||
6de9cd9a DN |
8550 | goto out; |
8551 | } | |
8552 | ||
726a989a RB |
8553 | #ifdef ENABLE_GIMPLE_CHECKING |
8554 | if (*expr_p) | |
8555 | { | |
8556 | enum tree_code code = TREE_CODE (*expr_p); | |
8557 | /* These expressions should already be in gimple IR form. */ | |
8558 | gcc_assert (code != MODIFY_EXPR | |
8559 | && code != ASM_EXPR | |
8560 | && code != BIND_EXPR | |
8561 | && code != CATCH_EXPR | |
6fc4fb06 | 8562 | && (code != COND_EXPR || gimplify_ctxp->allow_rhs_cond_expr) |
726a989a RB |
8563 | && code != EH_FILTER_EXPR |
8564 | && code != GOTO_EXPR | |
8565 | && code != LABEL_EXPR | |
8566 | && code != LOOP_EXPR | |
726a989a RB |
8567 | && code != SWITCH_EXPR |
8568 | && code != TRY_FINALLY_EXPR | |
8569 | && code != OMP_CRITICAL | |
8570 | && code != OMP_FOR | |
8571 | && code != OMP_MASTER | |
acf0174b | 8572 | && code != OMP_TASKGROUP |
726a989a RB |
8573 | && code != OMP_ORDERED |
8574 | && code != OMP_PARALLEL | |
8575 | && code != OMP_SECTIONS | |
8576 | && code != OMP_SECTION | |
8577 | && code != OMP_SINGLE); | |
8578 | } | |
8579 | #endif | |
6de9cd9a | 8580 | |
726a989a RB |
8581 | /* Otherwise we're gimplifying a subexpression, so the resulting |
8582 | value is interesting. If it's a valid operand that matches | |
8583 | GIMPLE_TEST_F, we're done. Unless we are handling some | |
8584 | post-effects internally; if that's the case, we need to copy into | |
8585 | a temporary before adding the post-effects to POST_P. */ | |
8586 | if (gimple_seq_empty_p (internal_post) && (*gimple_test_f) (*expr_p)) | |
6de9cd9a DN |
8587 | goto out; |
8588 | ||
8589 | /* Otherwise, we need to create a new temporary for the gimplified | |
8590 | expression. */ | |
8591 | ||
8592 | /* We can't return an lvalue if we have an internal postqueue. The | |
8593 | object the lvalue refers to would (probably) be modified by the | |
8594 | postqueue; we need to copy the value out first, which means an | |
8595 | rvalue. */ | |
726a989a RB |
8596 | if ((fallback & fb_lvalue) |
8597 | && gimple_seq_empty_p (internal_post) | |
e847cc68 | 8598 | && is_gimple_addressable (*expr_p)) |
6de9cd9a DN |
8599 | { |
8600 | /* An lvalue will do. Take the address of the expression, store it | |
8601 | in a temporary, and replace the expression with an INDIRECT_REF of | |
8602 | that temporary. */ | |
db3927fb | 8603 | tmp = build_fold_addr_expr_loc (input_location, *expr_p); |
6de9cd9a | 8604 | gimplify_expr (&tmp, pre_p, post_p, is_gimple_reg, fb_rvalue); |
7f5ad6d7 | 8605 | *expr_p = build_simple_mem_ref (tmp); |
6de9cd9a | 8606 | } |
ba4d8f9d | 8607 | else if ((fallback & fb_rvalue) && is_gimple_reg_rhs_or_call (*expr_p)) |
6de9cd9a | 8608 | { |
726a989a RB |
8609 | /* An rvalue will do. Assign the gimplified expression into a |
8610 | new temporary TMP and replace the original expression with | |
8611 | TMP. First, make sure that the expression has a type so that | |
8612 | it can be assigned into a temporary. */ | |
282899df | 8613 | gcc_assert (!VOID_TYPE_P (TREE_TYPE (*expr_p))); |
412a1d9e | 8614 | *expr_p = get_formal_tmp_var (*expr_p, pre_p); |
6de9cd9a | 8615 | } |
282899df | 8616 | else |
6de9cd9a | 8617 | { |
726a989a | 8618 | #ifdef ENABLE_GIMPLE_CHECKING |
282899df NS |
8619 | if (!(fallback & fb_mayfail)) |
8620 | { | |
8621 | fprintf (stderr, "gimplification failed:\n"); | |
8622 | print_generic_expr (stderr, *expr_p, 0); | |
8623 | debug_tree (*expr_p); | |
8624 | internal_error ("gimplification failed"); | |
8625 | } | |
8626 | #endif | |
8627 | gcc_assert (fallback & fb_mayfail); | |
726a989a | 8628 | |
282899df | 8629 | /* If this is an asm statement, and the user asked for the |
535a42b1 | 8630 | impossible, don't die. Fail and let gimplify_asm_expr |
282899df | 8631 | issue an error. */ |
6de9cd9a DN |
8632 | ret = GS_ERROR; |
8633 | goto out; | |
8634 | } | |
6de9cd9a | 8635 | |
6de9cd9a | 8636 | /* Make sure the temporary matches our predicate. */ |
282899df | 8637 | gcc_assert ((*gimple_test_f) (*expr_p)); |
6de9cd9a | 8638 | |
726a989a | 8639 | if (!gimple_seq_empty_p (internal_post)) |
6de9cd9a | 8640 | { |
726a989a RB |
8641 | annotate_all_with_location (internal_post, input_location); |
8642 | gimplify_seq_add_seq (pre_p, internal_post); | |
6de9cd9a DN |
8643 | } |
8644 | ||
8645 | out: | |
8646 | input_location = saved_location; | |
8647 | return ret; | |
8648 | } | |
8649 | ||
44de5aeb | 8650 | /* Look through TYPE for variable-sized objects and gimplify each such |
65355d53 | 8651 | size that we find. Add to LIST_P any statements generated. */ |
44de5aeb | 8652 | |
65355d53 | 8653 | void |
726a989a | 8654 | gimplify_type_sizes (tree type, gimple_seq *list_p) |
44de5aeb | 8655 | { |
ad50bc8d RH |
8656 | tree field, t; |
8657 | ||
19dbbf36 | 8658 | if (type == NULL || type == error_mark_node) |
8e0a600b | 8659 | return; |
ad50bc8d | 8660 | |
6c6cfbfd | 8661 | /* We first do the main variant, then copy into any other variants. */ |
ad50bc8d | 8662 | type = TYPE_MAIN_VARIANT (type); |
44de5aeb | 8663 | |
8e0a600b | 8664 | /* Avoid infinite recursion. */ |
19dbbf36 | 8665 | if (TYPE_SIZES_GIMPLIFIED (type)) |
8e0a600b JJ |
8666 | return; |
8667 | ||
8668 | TYPE_SIZES_GIMPLIFIED (type) = 1; | |
8669 | ||
44de5aeb RK |
8670 | switch (TREE_CODE (type)) |
8671 | { | |
44de5aeb RK |
8672 | case INTEGER_TYPE: |
8673 | case ENUMERAL_TYPE: | |
8674 | case BOOLEAN_TYPE: | |
44de5aeb | 8675 | case REAL_TYPE: |
325217ed | 8676 | case FIXED_POINT_TYPE: |
65355d53 RH |
8677 | gimplify_one_sizepos (&TYPE_MIN_VALUE (type), list_p); |
8678 | gimplify_one_sizepos (&TYPE_MAX_VALUE (type), list_p); | |
ad50bc8d RH |
8679 | |
8680 | for (t = TYPE_NEXT_VARIANT (type); t; t = TYPE_NEXT_VARIANT (t)) | |
8681 | { | |
8682 | TYPE_MIN_VALUE (t) = TYPE_MIN_VALUE (type); | |
8683 | TYPE_MAX_VALUE (t) = TYPE_MAX_VALUE (type); | |
ad50bc8d | 8684 | } |
44de5aeb RK |
8685 | break; |
8686 | ||
8687 | case ARRAY_TYPE: | |
ad50bc8d | 8688 | /* These types may not have declarations, so handle them here. */ |
8e0a600b JJ |
8689 | gimplify_type_sizes (TREE_TYPE (type), list_p); |
8690 | gimplify_type_sizes (TYPE_DOMAIN (type), list_p); | |
2e957792 JJ |
8691 | /* Ensure VLA bounds aren't removed, for -O0 they should be variables |
8692 | with assigned stack slots, for -O1+ -g they should be tracked | |
8693 | by VTA. */ | |
08d78391 EB |
8694 | if (!(TYPE_NAME (type) |
8695 | && TREE_CODE (TYPE_NAME (type)) == TYPE_DECL | |
8696 | && DECL_IGNORED_P (TYPE_NAME (type))) | |
8697 | && TYPE_DOMAIN (type) | |
802e9f8e JJ |
8698 | && INTEGRAL_TYPE_P (TYPE_DOMAIN (type))) |
8699 | { | |
8700 | t = TYPE_MIN_VALUE (TYPE_DOMAIN (type)); | |
8701 | if (t && TREE_CODE (t) == VAR_DECL && DECL_ARTIFICIAL (t)) | |
8702 | DECL_IGNORED_P (t) = 0; | |
8703 | t = TYPE_MAX_VALUE (TYPE_DOMAIN (type)); | |
8704 | if (t && TREE_CODE (t) == VAR_DECL && DECL_ARTIFICIAL (t)) | |
8705 | DECL_IGNORED_P (t) = 0; | |
8706 | } | |
44de5aeb RK |
8707 | break; |
8708 | ||
8709 | case RECORD_TYPE: | |
8710 | case UNION_TYPE: | |
8711 | case QUAL_UNION_TYPE: | |
910ad8de | 8712 | for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field)) |
44de5aeb | 8713 | if (TREE_CODE (field) == FIELD_DECL) |
8e0a600b JJ |
8714 | { |
8715 | gimplify_one_sizepos (&DECL_FIELD_OFFSET (field), list_p); | |
9a9ba8d9 JJ |
8716 | gimplify_one_sizepos (&DECL_SIZE (field), list_p); |
8717 | gimplify_one_sizepos (&DECL_SIZE_UNIT (field), list_p); | |
8e0a600b JJ |
8718 | gimplify_type_sizes (TREE_TYPE (field), list_p); |
8719 | } | |
8720 | break; | |
8721 | ||
8722 | case POINTER_TYPE: | |
8723 | case REFERENCE_TYPE: | |
706c4bb7 OH |
8724 | /* We used to recurse on the pointed-to type here, which turned out to |
8725 | be incorrect because its definition might refer to variables not | |
8726 | yet initialized at this point if a forward declaration is involved. | |
8727 | ||
8728 | It was actually useful for anonymous pointed-to types to ensure | |
8729 | that the sizes evaluation dominates every possible later use of the | |
8730 | values. Restricting to such types here would be safe since there | |
f63645be KH |
8731 | is no possible forward declaration around, but would introduce an |
8732 | undesirable middle-end semantic to anonymity. We then defer to | |
8733 | front-ends the responsibility of ensuring that the sizes are | |
8734 | evaluated both early and late enough, e.g. by attaching artificial | |
706c4bb7 | 8735 | type declarations to the tree. */ |
44de5aeb RK |
8736 | break; |
8737 | ||
8738 | default: | |
8739 | break; | |
8740 | } | |
8741 | ||
65355d53 RH |
8742 | gimplify_one_sizepos (&TYPE_SIZE (type), list_p); |
8743 | gimplify_one_sizepos (&TYPE_SIZE_UNIT (type), list_p); | |
44de5aeb | 8744 | |
ad50bc8d | 8745 | for (t = TYPE_NEXT_VARIANT (type); t; t = TYPE_NEXT_VARIANT (t)) |
b4830636 | 8746 | { |
ad50bc8d RH |
8747 | TYPE_SIZE (t) = TYPE_SIZE (type); |
8748 | TYPE_SIZE_UNIT (t) = TYPE_SIZE_UNIT (type); | |
8749 | TYPE_SIZES_GIMPLIFIED (t) = 1; | |
b4830636 | 8750 | } |
b4830636 RH |
8751 | } |
8752 | ||
8753 | /* A subroutine of gimplify_type_sizes to make sure that *EXPR_P, | |
8754 | a size or position, has had all of its SAVE_EXPRs evaluated. | |
726a989a | 8755 | We add any required statements to *STMT_P. */ |
44de5aeb RK |
8756 | |
8757 | void | |
726a989a | 8758 | gimplify_one_sizepos (tree *expr_p, gimple_seq *stmt_p) |
44de5aeb | 8759 | { |
3ac8781c | 8760 | tree expr = *expr_p; |
a9c5ddf9 | 8761 | |
44de5aeb | 8762 | /* We don't do anything if the value isn't there, is constant, or contains |
1e748a2b | 8763 | A PLACEHOLDER_EXPR. We also don't want to do anything if it's already |
aabcd309 | 8764 | a VAR_DECL. If it's a VAR_DECL from another function, the gimplifier |
1e748a2b RK |
8765 | will want to replace it with a new variable, but that will cause problems |
8766 | if this type is from outside the function. It's OK to have that here. */ | |
848be094 | 8767 | if (is_gimple_sizepos (expr)) |
44de5aeb RK |
8768 | return; |
8769 | ||
a9c5ddf9 RH |
8770 | *expr_p = unshare_expr (expr); |
8771 | ||
ad50bc8d | 8772 | gimplify_expr (expr_p, stmt_p, NULL, is_gimple_val, fb_rvalue); |
44de5aeb | 8773 | } |
6de9cd9a | 8774 | |
3ad065ef EB |
8775 | /* Gimplify the body of statements of FNDECL and return a GIMPLE_BIND node |
8776 | containing the sequence of corresponding GIMPLE statements. If DO_PARMS | |
8777 | is true, also gimplify the parameters. */ | |
726a989a RB |
8778 | |
8779 | gimple | |
3ad065ef | 8780 | gimplify_body (tree fndecl, bool do_parms) |
6de9cd9a DN |
8781 | { |
8782 | location_t saved_location = input_location; | |
726a989a RB |
8783 | gimple_seq parm_stmts, seq; |
8784 | gimple outer_bind; | |
9f9ebcdf | 8785 | struct cgraph_node *cgn; |
6de9cd9a DN |
8786 | |
8787 | timevar_push (TV_TREE_GIMPLIFY); | |
953ff289 | 8788 | |
f66d6761 SB |
8789 | /* Initialize for optimize_insn_for_s{ize,peed}_p possibly called during |
8790 | gimplification. */ | |
8791 | default_rtl_profile (); | |
8792 | ||
953ff289 | 8793 | gcc_assert (gimplify_ctxp == NULL); |
45852dcc | 8794 | push_gimplify_context (); |
6de9cd9a | 8795 | |
acf0174b JJ |
8796 | if (flag_openmp) |
8797 | { | |
8798 | gcc_assert (gimplify_omp_ctxp == NULL); | |
8799 | if (lookup_attribute ("omp declare target", DECL_ATTRIBUTES (fndecl))) | |
8800 | gimplify_omp_ctxp = new_omp_context (ORT_TARGET); | |
8801 | } | |
8802 | ||
44de5aeb RK |
8803 | /* Unshare most shared trees in the body and in that of any nested functions. |
8804 | It would seem we don't have to do this for nested functions because | |
8805 | they are supposed to be output and then the outer function gimplified | |
8806 | first, but the g++ front end doesn't always do it that way. */ | |
3ad065ef EB |
8807 | unshare_body (fndecl); |
8808 | unvisit_body (fndecl); | |
6de9cd9a | 8809 | |
d52f5295 | 8810 | cgn = cgraph_node::get (fndecl); |
9f9ebcdf | 8811 | if (cgn && cgn->origin) |
6e2830c3 | 8812 | nonlocal_vlas = new hash_set<tree>; |
77f2a970 | 8813 | |
fa10beec | 8814 | /* Make sure input_location isn't set to something weird. */ |
6de9cd9a DN |
8815 | input_location = DECL_SOURCE_LOCATION (fndecl); |
8816 | ||
4744afba RH |
8817 | /* Resolve callee-copies. This has to be done before processing |
8818 | the body so that DECL_VALUE_EXPR gets processed correctly. */ | |
3ad065ef | 8819 | parm_stmts = do_parms ? gimplify_parameters () : NULL; |
4744afba | 8820 | |
6de9cd9a | 8821 | /* Gimplify the function's body. */ |
726a989a | 8822 | seq = NULL; |
3ad065ef | 8823 | gimplify_stmt (&DECL_SAVED_TREE (fndecl), &seq); |
726a989a RB |
8824 | outer_bind = gimple_seq_first_stmt (seq); |
8825 | if (!outer_bind) | |
6de9cd9a | 8826 | { |
726a989a RB |
8827 | outer_bind = gimple_build_nop (); |
8828 | gimplify_seq_add_stmt (&seq, outer_bind); | |
6de9cd9a | 8829 | } |
44de5aeb | 8830 | |
726a989a RB |
8831 | /* The body must contain exactly one statement, a GIMPLE_BIND. If this is |
8832 | not the case, wrap everything in a GIMPLE_BIND to make it so. */ | |
8833 | if (gimple_code (outer_bind) == GIMPLE_BIND | |
8834 | && gimple_seq_first (seq) == gimple_seq_last (seq)) | |
8835 | ; | |
8836 | else | |
8837 | outer_bind = gimple_build_bind (NULL_TREE, seq, NULL); | |
8838 | ||
3ad065ef | 8839 | DECL_SAVED_TREE (fndecl) = NULL_TREE; |
4744afba RH |
8840 | |
8841 | /* If we had callee-copies statements, insert them at the beginning | |
f0c10f0f | 8842 | of the function and clear DECL_VALUE_EXPR_P on the parameters. */ |
726a989a | 8843 | if (!gimple_seq_empty_p (parm_stmts)) |
4744afba | 8844 | { |
f0c10f0f RG |
8845 | tree parm; |
8846 | ||
726a989a RB |
8847 | gimplify_seq_add_seq (&parm_stmts, gimple_bind_body (outer_bind)); |
8848 | gimple_bind_set_body (outer_bind, parm_stmts); | |
f0c10f0f RG |
8849 | |
8850 | for (parm = DECL_ARGUMENTS (current_function_decl); | |
910ad8de | 8851 | parm; parm = DECL_CHAIN (parm)) |
f0c10f0f RG |
8852 | if (DECL_HAS_VALUE_EXPR_P (parm)) |
8853 | { | |
8854 | DECL_HAS_VALUE_EXPR_P (parm) = 0; | |
8855 | DECL_IGNORED_P (parm) = 0; | |
8856 | } | |
4744afba RH |
8857 | } |
8858 | ||
77f2a970 JJ |
8859 | if (nonlocal_vlas) |
8860 | { | |
96ddb7ec JJ |
8861 | if (nonlocal_vla_vars) |
8862 | { | |
8863 | /* tree-nested.c may later on call declare_vars (..., true); | |
8864 | which relies on BLOCK_VARS chain to be the tail of the | |
8865 | gimple_bind_vars chain. Ensure we don't violate that | |
8866 | assumption. */ | |
8867 | if (gimple_bind_block (outer_bind) | |
8868 | == DECL_INITIAL (current_function_decl)) | |
8869 | declare_vars (nonlocal_vla_vars, outer_bind, true); | |
8870 | else | |
8871 | BLOCK_VARS (DECL_INITIAL (current_function_decl)) | |
8872 | = chainon (BLOCK_VARS (DECL_INITIAL (current_function_decl)), | |
8873 | nonlocal_vla_vars); | |
8874 | nonlocal_vla_vars = NULL_TREE; | |
8875 | } | |
6e2830c3 | 8876 | delete nonlocal_vlas; |
77f2a970 JJ |
8877 | nonlocal_vlas = NULL; |
8878 | } | |
8879 | ||
6d7f7e0a | 8880 | if ((flag_openmp || flag_openmp_simd) && gimplify_omp_ctxp) |
acf0174b JJ |
8881 | { |
8882 | delete_omp_context (gimplify_omp_ctxp); | |
8883 | gimplify_omp_ctxp = NULL; | |
8884 | } | |
8885 | ||
726a989a | 8886 | pop_gimplify_context (outer_bind); |
953ff289 | 8887 | gcc_assert (gimplify_ctxp == NULL); |
6de9cd9a | 8888 | |
07c5a154 | 8889 | #ifdef ENABLE_CHECKING |
1da2ed5f | 8890 | if (!seen_error ()) |
34019e28 | 8891 | verify_gimple_in_seq (gimple_bind_body (outer_bind)); |
07c5a154 | 8892 | #endif |
6de9cd9a DN |
8893 | |
8894 | timevar_pop (TV_TREE_GIMPLIFY); | |
8895 | input_location = saved_location; | |
726a989a RB |
8896 | |
8897 | return outer_bind; | |
6de9cd9a DN |
8898 | } |
8899 | ||
6a1f6c9c | 8900 | typedef char *char_p; /* For DEF_VEC_P. */ |
6a1f6c9c JM |
8901 | |
8902 | /* Return whether we should exclude FNDECL from instrumentation. */ | |
8903 | ||
8904 | static bool | |
8905 | flag_instrument_functions_exclude_p (tree fndecl) | |
8906 | { | |
9771b263 | 8907 | vec<char_p> *v; |
6a1f6c9c | 8908 | |
9771b263 DN |
8909 | v = (vec<char_p> *) flag_instrument_functions_exclude_functions; |
8910 | if (v && v->length () > 0) | |
6a1f6c9c JM |
8911 | { |
8912 | const char *name; | |
8913 | int i; | |
8914 | char *s; | |
8915 | ||
8916 | name = lang_hooks.decl_printable_name (fndecl, 0); | |
9771b263 | 8917 | FOR_EACH_VEC_ELT (*v, i, s) |
6a1f6c9c JM |
8918 | if (strstr (name, s) != NULL) |
8919 | return true; | |
8920 | } | |
8921 | ||
9771b263 DN |
8922 | v = (vec<char_p> *) flag_instrument_functions_exclude_files; |
8923 | if (v && v->length () > 0) | |
6a1f6c9c JM |
8924 | { |
8925 | const char *name; | |
8926 | int i; | |
8927 | char *s; | |
8928 | ||
8929 | name = DECL_SOURCE_FILE (fndecl); | |
9771b263 | 8930 | FOR_EACH_VEC_ELT (*v, i, s) |
6a1f6c9c JM |
8931 | if (strstr (name, s) != NULL) |
8932 | return true; | |
8933 | } | |
8934 | ||
8935 | return false; | |
8936 | } | |
8937 | ||
6de9cd9a | 8938 | /* Entry point to the gimplification pass. FNDECL is the FUNCTION_DECL |
726a989a | 8939 | node for the function we want to gimplify. |
b8698a0f | 8940 | |
ad19c4be | 8941 | Return the sequence of GIMPLE statements corresponding to the body |
726a989a | 8942 | of FNDECL. */ |
6de9cd9a DN |
8943 | |
8944 | void | |
8945 | gimplify_function_tree (tree fndecl) | |
8946 | { | |
af16bc76 | 8947 | tree parm, ret; |
726a989a RB |
8948 | gimple_seq seq; |
8949 | gimple bind; | |
6de9cd9a | 8950 | |
a406865a RG |
8951 | gcc_assert (!gimple_body (fndecl)); |
8952 | ||
db2960f4 SL |
8953 | if (DECL_STRUCT_FUNCTION (fndecl)) |
8954 | push_cfun (DECL_STRUCT_FUNCTION (fndecl)); | |
8955 | else | |
8956 | push_struct_function (fndecl); | |
6de9cd9a | 8957 | |
910ad8de | 8958 | for (parm = DECL_ARGUMENTS (fndecl); parm ; parm = DECL_CHAIN (parm)) |
e41d82f5 RH |
8959 | { |
8960 | /* Preliminarily mark non-addressed complex variables as eligible | |
8961 | for promotion to gimple registers. We'll transform their uses | |
8962 | as we find them. */ | |
0890b981 AP |
8963 | if ((TREE_CODE (TREE_TYPE (parm)) == COMPLEX_TYPE |
8964 | || TREE_CODE (TREE_TYPE (parm)) == VECTOR_TYPE) | |
e41d82f5 RH |
8965 | && !TREE_THIS_VOLATILE (parm) |
8966 | && !needs_to_live_in_memory (parm)) | |
0890b981 | 8967 | DECL_GIMPLE_REG_P (parm) = 1; |
e41d82f5 RH |
8968 | } |
8969 | ||
8970 | ret = DECL_RESULT (fndecl); | |
0890b981 | 8971 | if ((TREE_CODE (TREE_TYPE (ret)) == COMPLEX_TYPE |
7b7e6ecd | 8972 | || TREE_CODE (TREE_TYPE (ret)) == VECTOR_TYPE) |
e41d82f5 | 8973 | && !needs_to_live_in_memory (ret)) |
0890b981 | 8974 | DECL_GIMPLE_REG_P (ret) = 1; |
e41d82f5 | 8975 | |
3ad065ef | 8976 | bind = gimplify_body (fndecl, true); |
726a989a RB |
8977 | |
8978 | /* The tree body of the function is no longer needed, replace it | |
8979 | with the new GIMPLE body. */ | |
355a7673 | 8980 | seq = NULL; |
726a989a RB |
8981 | gimple_seq_add_stmt (&seq, bind); |
8982 | gimple_set_body (fndecl, seq); | |
6de9cd9a DN |
8983 | |
8984 | /* If we're instrumenting function entry/exit, then prepend the call to | |
8985 | the entry hook and wrap the whole function in a TRY_FINALLY_EXPR to | |
8986 | catch the exit hook. */ | |
8987 | /* ??? Add some way to ignore exceptions for this TFE. */ | |
8988 | if (flag_instrument_function_entry_exit | |
8d5a7d1f ILT |
8989 | && !DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (fndecl) |
8990 | && !flag_instrument_functions_exclude_p (fndecl)) | |
6de9cd9a | 8991 | { |
726a989a RB |
8992 | tree x; |
8993 | gimple new_bind; | |
8994 | gimple tf; | |
8995 | gimple_seq cleanup = NULL, body = NULL; | |
b01890ff JH |
8996 | tree tmp_var; |
8997 | gimple call; | |
8998 | ||
e79983f4 | 8999 | x = builtin_decl_implicit (BUILT_IN_RETURN_ADDRESS); |
59527282 | 9000 | call = gimple_build_call (x, 1, integer_zero_node); |
b01890ff JH |
9001 | tmp_var = create_tmp_var (ptr_type_node, "return_addr"); |
9002 | gimple_call_set_lhs (call, tmp_var); | |
9003 | gimplify_seq_add_stmt (&cleanup, call); | |
e79983f4 | 9004 | x = builtin_decl_implicit (BUILT_IN_PROFILE_FUNC_EXIT); |
b01890ff JH |
9005 | call = gimple_build_call (x, 2, |
9006 | build_fold_addr_expr (current_function_decl), | |
9007 | tmp_var); | |
9008 | gimplify_seq_add_stmt (&cleanup, call); | |
726a989a | 9009 | tf = gimple_build_try (seq, cleanup, GIMPLE_TRY_FINALLY); |
6de9cd9a | 9010 | |
e79983f4 | 9011 | x = builtin_decl_implicit (BUILT_IN_RETURN_ADDRESS); |
59527282 | 9012 | call = gimple_build_call (x, 1, integer_zero_node); |
b01890ff JH |
9013 | tmp_var = create_tmp_var (ptr_type_node, "return_addr"); |
9014 | gimple_call_set_lhs (call, tmp_var); | |
9015 | gimplify_seq_add_stmt (&body, call); | |
e79983f4 | 9016 | x = builtin_decl_implicit (BUILT_IN_PROFILE_FUNC_ENTER); |
b01890ff JH |
9017 | call = gimple_build_call (x, 2, |
9018 | build_fold_addr_expr (current_function_decl), | |
9019 | tmp_var); | |
9020 | gimplify_seq_add_stmt (&body, call); | |
726a989a | 9021 | gimplify_seq_add_stmt (&body, tf); |
32001f69 | 9022 | new_bind = gimple_build_bind (NULL, body, gimple_bind_block (bind)); |
726a989a RB |
9023 | /* Clear the block for BIND, since it is no longer directly inside |
9024 | the function, but within a try block. */ | |
32001f69 | 9025 | gimple_bind_set_block (bind, NULL); |
6de9cd9a | 9026 | |
726a989a RB |
9027 | /* Replace the current function body with the body |
9028 | wrapped in the try/finally TF. */ | |
355a7673 | 9029 | seq = NULL; |
726a989a RB |
9030 | gimple_seq_add_stmt (&seq, new_bind); |
9031 | gimple_set_body (fndecl, seq); | |
6de9cd9a DN |
9032 | } |
9033 | ||
726a989a | 9034 | DECL_SAVED_TREE (fndecl) = NULL_TREE; |
a406865a | 9035 | cfun->curr_properties = PROP_gimple_any; |
726a989a | 9036 | |
db2960f4 | 9037 | pop_cfun (); |
6de9cd9a | 9038 | } |
726a989a | 9039 | |
4a7cb16f AM |
9040 | /* Return a dummy expression of type TYPE in order to keep going after an |
9041 | error. */ | |
b184c8f1 | 9042 | |
4a7cb16f AM |
9043 | static tree |
9044 | dummy_object (tree type) | |
b184c8f1 | 9045 | { |
4a7cb16f AM |
9046 | tree t = build_int_cst (build_pointer_type (type), 0); |
9047 | return build2 (MEM_REF, type, t, t); | |
b184c8f1 AM |
9048 | } |
9049 | ||
4a7cb16f AM |
9050 | /* Gimplify __builtin_va_arg, aka VA_ARG_EXPR, which is not really a |
9051 | builtin function, but a very special sort of operator. */ | |
b184c8f1 | 9052 | |
4a7cb16f AM |
9053 | enum gimplify_status |
9054 | gimplify_va_arg_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p) | |
9055 | { | |
9056 | tree promoted_type, have_va_type; | |
9057 | tree valist = TREE_OPERAND (*expr_p, 0); | |
9058 | tree type = TREE_TYPE (*expr_p); | |
9059 | tree t; | |
9060 | location_t loc = EXPR_LOCATION (*expr_p); | |
b184c8f1 | 9061 | |
4a7cb16f AM |
9062 | /* Verify that valist is of the proper type. */ |
9063 | have_va_type = TREE_TYPE (valist); | |
9064 | if (have_va_type == error_mark_node) | |
9065 | return GS_ERROR; | |
9066 | have_va_type = targetm.canonical_va_list_type (have_va_type); | |
b184c8f1 | 9067 | |
4a7cb16f AM |
9068 | if (have_va_type == NULL_TREE) |
9069 | { | |
9070 | error_at (loc, "first argument to %<va_arg%> not of type %<va_list%>"); | |
9071 | return GS_ERROR; | |
9072 | } | |
b184c8f1 | 9073 | |
4a7cb16f AM |
9074 | /* Generate a diagnostic for requesting data of a type that cannot |
9075 | be passed through `...' due to type promotion at the call site. */ | |
9076 | if ((promoted_type = lang_hooks.types.type_promotes_to (type)) | |
9077 | != type) | |
9078 | { | |
9079 | static bool gave_help; | |
9080 | bool warned; | |
b184c8f1 | 9081 | |
4a7cb16f AM |
9082 | /* Unfortunately, this is merely undefined, rather than a constraint |
9083 | violation, so we cannot make this an error. If this call is never | |
9084 | executed, the program is still strictly conforming. */ | |
9085 | warned = warning_at (loc, 0, | |
9086 | "%qT is promoted to %qT when passed through %<...%>", | |
9087 | type, promoted_type); | |
9088 | if (!gave_help && warned) | |
9089 | { | |
9090 | gave_help = true; | |
9091 | inform (loc, "(so you should pass %qT not %qT to %<va_arg%>)", | |
9092 | promoted_type, type); | |
9093 | } | |
b184c8f1 | 9094 | |
4a7cb16f AM |
9095 | /* We can, however, treat "undefined" any way we please. |
9096 | Call abort to encourage the user to fix the program. */ | |
9097 | if (warned) | |
9098 | inform (loc, "if this code is reached, the program will abort"); | |
9099 | /* Before the abort, allow the evaluation of the va_list | |
9100 | expression to exit or longjmp. */ | |
9101 | gimplify_and_add (valist, pre_p); | |
9102 | t = build_call_expr_loc (loc, | |
9103 | builtin_decl_implicit (BUILT_IN_TRAP), 0); | |
b184c8f1 AM |
9104 | gimplify_and_add (t, pre_p); |
9105 | ||
4a7cb16f AM |
9106 | /* This is dead code, but go ahead and finish so that the |
9107 | mode of the result comes out right. */ | |
9108 | *expr_p = dummy_object (type); | |
9109 | return GS_ALL_DONE; | |
b184c8f1 AM |
9110 | } |
9111 | else | |
b184c8f1 | 9112 | { |
4a7cb16f AM |
9113 | /* Make it easier for the backends by protecting the valist argument |
9114 | from multiple evaluations. */ | |
9115 | if (TREE_CODE (have_va_type) == ARRAY_TYPE) | |
9116 | { | |
9117 | /* For this case, the backends will be expecting a pointer to | |
9118 | TREE_TYPE (abi), but it's possible we've | |
9119 | actually been given an array (an actual TARGET_FN_ABI_VA_LIST). | |
9120 | So fix it. */ | |
9121 | if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE) | |
9122 | { | |
9123 | tree p1 = build_pointer_type (TREE_TYPE (have_va_type)); | |
9124 | valist = fold_convert_loc (loc, p1, | |
9125 | build_fold_addr_expr_loc (loc, valist)); | |
9126 | } | |
b184c8f1 | 9127 | |
4a7cb16f AM |
9128 | gimplify_expr (&valist, pre_p, post_p, is_gimple_val, fb_rvalue); |
9129 | } | |
9130 | else | |
9131 | gimplify_expr (&valist, pre_p, post_p, is_gimple_min_lval, fb_lvalue); | |
b184c8f1 | 9132 | |
4a7cb16f AM |
9133 | if (!targetm.gimplify_va_arg_expr) |
9134 | /* FIXME: Once most targets are converted we should merely | |
9135 | assert this is non-null. */ | |
9136 | return GS_ALL_DONE; | |
b184c8f1 | 9137 | |
4a7cb16f AM |
9138 | *expr_p = targetm.gimplify_va_arg_expr (valist, type, pre_p, post_p); |
9139 | return GS_OK; | |
b184c8f1 | 9140 | } |
b184c8f1 | 9141 | } |
bcf71673 | 9142 | |
45b0be94 AM |
9143 | /* Build a new GIMPLE_ASSIGN tuple and append it to the end of *SEQ_P. |
9144 | ||
9145 | DST/SRC are the destination and source respectively. You can pass | |
9146 | ungimplified trees in DST or SRC, in which case they will be | |
9147 | converted to a gimple operand if necessary. | |
9148 | ||
9149 | This function returns the newly created GIMPLE_ASSIGN tuple. */ | |
9150 | ||
9151 | gimple | |
9152 | gimplify_assign (tree dst, tree src, gimple_seq *seq_p) | |
9153 | { | |
9154 | tree t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src); | |
9155 | gimplify_and_add (t, seq_p); | |
9156 | ggc_free (t); | |
9157 | return gimple_seq_last_stmt (*seq_p); | |
9158 | } | |
9159 | ||
18f429e2 AM |
9160 | inline hashval_t |
9161 | gimplify_hasher::hash (const value_type *p) | |
9162 | { | |
9163 | tree t = p->val; | |
9164 | return iterative_hash_expr (t, 0); | |
9165 | } | |
9166 | ||
9167 | inline bool | |
9168 | gimplify_hasher::equal (const value_type *p1, const compare_type *p2) | |
9169 | { | |
9170 | tree t1 = p1->val; | |
9171 | tree t2 = p2->val; | |
9172 | enum tree_code code = TREE_CODE (t1); | |
9173 | ||
9174 | if (TREE_CODE (t2) != code | |
9175 | || TREE_TYPE (t1) != TREE_TYPE (t2)) | |
9176 | return false; | |
9177 | ||
9178 | if (!operand_equal_p (t1, t2, 0)) | |
9179 | return false; | |
9180 | ||
9181 | #ifdef ENABLE_CHECKING | |
9182 | /* Only allow them to compare equal if they also hash equal; otherwise | |
9183 | results are nondeterminate, and we fail bootstrap comparison. */ | |
9184 | gcc_assert (hash (p1) == hash (p2)); | |
9185 | #endif | |
9186 | ||
9187 | return true; | |
9188 | } |