]>
Commit | Line | Data |
---|---|---|
2a0603f1 AM |
1 | /* Gimple decl, type, and expression support functions. |
2 | ||
5624e564 | 3 | Copyright (C) 2007-2015 Free Software Foundation, Inc. |
2a0603f1 AM |
4 | Contributed by Aldy Hernandez <aldyh@redhat.com> |
5 | ||
6 | This file is part of GCC. | |
7 | ||
8 | GCC is free software; you can redistribute it and/or modify it under | |
9 | the terms of the GNU General Public License as published by the Free | |
10 | Software Foundation; either version 3, or (at your option) any later | |
11 | version. | |
12 | ||
13 | GCC is distributed in the hope that it will be useful, but WITHOUT ANY | |
14 | WARRANTY; without even the implied warranty of MERCHANTABILITY or | |
15 | FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License | |
16 | for more details. | |
17 | ||
18 | You should have received a copy of the GNU General Public License | |
19 | along with GCC; see the file COPYING3. If not see | |
20 | <http://www.gnu.org/licenses/>. */ | |
21 | ||
22 | #include "config.h" | |
23 | #include "system.h" | |
24 | #include "coretypes.h" | |
c7131fb2 | 25 | #include "backend.h" |
957060b5 | 26 | #include "hard-reg-set.h" |
40e23961 | 27 | #include "tree.h" |
c7131fb2 | 28 | #include "gimple.h" |
957060b5 AM |
29 | #include "stringpool.h" |
30 | #include "gimple-ssa.h" | |
c7131fb2 AM |
31 | #include "alias.h" |
32 | #include "fold-const.h" | |
2fb9a547 AM |
33 | #include "internal-fn.h" |
34 | #include "tree-eh.h" | |
45b0be94 | 35 | #include "gimplify.h" |
d8a2d370 | 36 | #include "stor-layout.h" |
2a0603f1 AM |
37 | #include "demangle.h" |
38 | ||
39 | /* ----- Type related ----- */ | |
40 | ||
41 | /* Return true if the conversion from INNER_TYPE to OUTER_TYPE is a | |
42 | useless type conversion, otherwise return false. | |
43 | ||
44 | This function implicitly defines the middle-end type system. With | |
45 | the notion of 'a < b' meaning that useless_type_conversion_p (a, b) | |
46 | holds and 'a > b' meaning that useless_type_conversion_p (b, a) holds, | |
47 | the following invariants shall be fulfilled: | |
48 | ||
49 | 1) useless_type_conversion_p is transitive. | |
50 | If a < b and b < c then a < c. | |
51 | ||
52 | 2) useless_type_conversion_p is not symmetric. | |
53 | From a < b does not follow a > b. | |
54 | ||
55 | 3) Types define the available set of operations applicable to values. | |
56 | A type conversion is useless if the operations for the target type | |
57 | is a subset of the operations for the source type. For example | |
58 | casts to void* are useless, casts from void* are not (void* can't | |
59 | be dereferenced or offsetted, but copied, hence its set of operations | |
60 | is a strict subset of that of all other data pointer types). Casts | |
61 | to const T* are useless (can't be written to), casts from const T* | |
62 | to T* are not. */ | |
63 | ||
64 | bool | |
65 | useless_type_conversion_p (tree outer_type, tree inner_type) | |
66 | { | |
67 | /* Do the following before stripping toplevel qualifiers. */ | |
68 | if (POINTER_TYPE_P (inner_type) | |
69 | && POINTER_TYPE_P (outer_type)) | |
70 | { | |
71 | /* Do not lose casts between pointers to different address spaces. */ | |
72 | if (TYPE_ADDR_SPACE (TREE_TYPE (outer_type)) | |
73 | != TYPE_ADDR_SPACE (TREE_TYPE (inner_type))) | |
74 | return false; | |
ccb5ad37 JH |
75 | /* Do not lose casts to function pointer types. */ |
76 | if ((TREE_CODE (TREE_TYPE (outer_type)) == FUNCTION_TYPE | |
77 | || TREE_CODE (TREE_TYPE (outer_type)) == METHOD_TYPE) | |
78 | && !(TREE_CODE (TREE_TYPE (inner_type)) == FUNCTION_TYPE | |
79 | || TREE_CODE (TREE_TYPE (inner_type)) == METHOD_TYPE)) | |
80 | return false; | |
2a0603f1 AM |
81 | } |
82 | ||
83 | /* From now on qualifiers on value types do not matter. */ | |
84 | inner_type = TYPE_MAIN_VARIANT (inner_type); | |
85 | outer_type = TYPE_MAIN_VARIANT (outer_type); | |
86 | ||
87 | if (inner_type == outer_type) | |
88 | return true; | |
89 | ||
b6d3c031 JH |
90 | /* Changes in machine mode are never useless conversions unless. */ |
91 | if (TYPE_MODE (inner_type) != TYPE_MODE (outer_type)) | |
2a0603f1 AM |
92 | return false; |
93 | ||
94 | /* If both the inner and outer types are integral types, then the | |
95 | conversion is not necessary if they have the same mode and | |
96 | signedness and precision, and both or neither are boolean. */ | |
97 | if (INTEGRAL_TYPE_P (inner_type) | |
98 | && INTEGRAL_TYPE_P (outer_type)) | |
99 | { | |
100 | /* Preserve changes in signedness or precision. */ | |
101 | if (TYPE_UNSIGNED (inner_type) != TYPE_UNSIGNED (outer_type) | |
102 | || TYPE_PRECISION (inner_type) != TYPE_PRECISION (outer_type)) | |
103 | return false; | |
104 | ||
105 | /* Preserve conversions to/from BOOLEAN_TYPE if types are not | |
106 | of precision one. */ | |
107 | if (((TREE_CODE (inner_type) == BOOLEAN_TYPE) | |
108 | != (TREE_CODE (outer_type) == BOOLEAN_TYPE)) | |
109 | && TYPE_PRECISION (outer_type) != 1) | |
110 | return false; | |
111 | ||
112 | /* We don't need to preserve changes in the types minimum or | |
113 | maximum value in general as these do not generate code | |
114 | unless the types precisions are different. */ | |
115 | return true; | |
116 | } | |
117 | ||
118 | /* Scalar floating point types with the same mode are compatible. */ | |
119 | else if (SCALAR_FLOAT_TYPE_P (inner_type) | |
120 | && SCALAR_FLOAT_TYPE_P (outer_type)) | |
121 | return true; | |
122 | ||
123 | /* Fixed point types with the same mode are compatible. */ | |
124 | else if (FIXED_POINT_TYPE_P (inner_type) | |
125 | && FIXED_POINT_TYPE_P (outer_type)) | |
126 | return true; | |
127 | ||
128 | /* We need to take special care recursing to pointed-to types. */ | |
129 | else if (POINTER_TYPE_P (inner_type) | |
130 | && POINTER_TYPE_P (outer_type)) | |
131 | { | |
2a0603f1 AM |
132 | /* We do not care for const qualification of the pointed-to types |
133 | as const qualification has no semantic value to the middle-end. */ | |
134 | ||
135 | /* Otherwise pointers/references are equivalent. */ | |
136 | return true; | |
137 | } | |
138 | ||
139 | /* Recurse for complex types. */ | |
140 | else if (TREE_CODE (inner_type) == COMPLEX_TYPE | |
141 | && TREE_CODE (outer_type) == COMPLEX_TYPE) | |
142 | return useless_type_conversion_p (TREE_TYPE (outer_type), | |
143 | TREE_TYPE (inner_type)); | |
144 | ||
145 | /* Recurse for vector types with the same number of subparts. */ | |
146 | else if (TREE_CODE (inner_type) == VECTOR_TYPE | |
147 | && TREE_CODE (outer_type) == VECTOR_TYPE | |
148 | && TYPE_PRECISION (inner_type) == TYPE_PRECISION (outer_type)) | |
149 | return useless_type_conversion_p (TREE_TYPE (outer_type), | |
150 | TREE_TYPE (inner_type)); | |
151 | ||
152 | else if (TREE_CODE (inner_type) == ARRAY_TYPE | |
153 | && TREE_CODE (outer_type) == ARRAY_TYPE) | |
154 | { | |
155 | /* Preserve string attributes. */ | |
156 | if (TYPE_STRING_FLAG (inner_type) != TYPE_STRING_FLAG (outer_type)) | |
157 | return false; | |
158 | ||
159 | /* Conversions from array types with unknown extent to | |
160 | array types with known extent are not useless. */ | |
161 | if (!TYPE_DOMAIN (inner_type) | |
162 | && TYPE_DOMAIN (outer_type)) | |
163 | return false; | |
164 | ||
165 | /* Nor are conversions from array types with non-constant size to | |
166 | array types with constant size or to different size. */ | |
167 | if (TYPE_SIZE (outer_type) | |
168 | && TREE_CODE (TYPE_SIZE (outer_type)) == INTEGER_CST | |
169 | && (!TYPE_SIZE (inner_type) | |
170 | || TREE_CODE (TYPE_SIZE (inner_type)) != INTEGER_CST | |
171 | || !tree_int_cst_equal (TYPE_SIZE (outer_type), | |
172 | TYPE_SIZE (inner_type)))) | |
173 | return false; | |
174 | ||
175 | /* Check conversions between arrays with partially known extents. | |
176 | If the array min/max values are constant they have to match. | |
177 | Otherwise allow conversions to unknown and variable extents. | |
178 | In particular this declares conversions that may change the | |
179 | mode to BLKmode as useless. */ | |
180 | if (TYPE_DOMAIN (inner_type) | |
181 | && TYPE_DOMAIN (outer_type) | |
182 | && TYPE_DOMAIN (inner_type) != TYPE_DOMAIN (outer_type)) | |
183 | { | |
184 | tree inner_min = TYPE_MIN_VALUE (TYPE_DOMAIN (inner_type)); | |
185 | tree outer_min = TYPE_MIN_VALUE (TYPE_DOMAIN (outer_type)); | |
186 | tree inner_max = TYPE_MAX_VALUE (TYPE_DOMAIN (inner_type)); | |
187 | tree outer_max = TYPE_MAX_VALUE (TYPE_DOMAIN (outer_type)); | |
188 | ||
189 | /* After gimplification a variable min/max value carries no | |
190 | additional information compared to a NULL value. All that | |
191 | matters has been lowered to be part of the IL. */ | |
192 | if (inner_min && TREE_CODE (inner_min) != INTEGER_CST) | |
193 | inner_min = NULL_TREE; | |
194 | if (outer_min && TREE_CODE (outer_min) != INTEGER_CST) | |
195 | outer_min = NULL_TREE; | |
196 | if (inner_max && TREE_CODE (inner_max) != INTEGER_CST) | |
197 | inner_max = NULL_TREE; | |
198 | if (outer_max && TREE_CODE (outer_max) != INTEGER_CST) | |
199 | outer_max = NULL_TREE; | |
200 | ||
201 | /* Conversions NULL / variable <- cst are useless, but not | |
202 | the other way around. */ | |
203 | if (outer_min | |
204 | && (!inner_min | |
205 | || !tree_int_cst_equal (inner_min, outer_min))) | |
206 | return false; | |
207 | if (outer_max | |
208 | && (!inner_max | |
209 | || !tree_int_cst_equal (inner_max, outer_max))) | |
210 | return false; | |
211 | } | |
212 | ||
213 | /* Recurse on the element check. */ | |
214 | return useless_type_conversion_p (TREE_TYPE (outer_type), | |
215 | TREE_TYPE (inner_type)); | |
216 | } | |
217 | ||
218 | else if ((TREE_CODE (inner_type) == FUNCTION_TYPE | |
219 | || TREE_CODE (inner_type) == METHOD_TYPE) | |
220 | && TREE_CODE (inner_type) == TREE_CODE (outer_type)) | |
221 | { | |
222 | tree outer_parm, inner_parm; | |
223 | ||
224 | /* If the return types are not compatible bail out. */ | |
225 | if (!useless_type_conversion_p (TREE_TYPE (outer_type), | |
226 | TREE_TYPE (inner_type))) | |
227 | return false; | |
228 | ||
229 | /* Method types should belong to a compatible base class. */ | |
230 | if (TREE_CODE (inner_type) == METHOD_TYPE | |
231 | && !useless_type_conversion_p (TYPE_METHOD_BASETYPE (outer_type), | |
232 | TYPE_METHOD_BASETYPE (inner_type))) | |
233 | return false; | |
234 | ||
235 | /* A conversion to an unprototyped argument list is ok. */ | |
236 | if (!prototype_p (outer_type)) | |
237 | return true; | |
238 | ||
239 | /* If the unqualified argument types are compatible the conversion | |
240 | is useless. */ | |
241 | if (TYPE_ARG_TYPES (outer_type) == TYPE_ARG_TYPES (inner_type)) | |
242 | return true; | |
243 | ||
244 | for (outer_parm = TYPE_ARG_TYPES (outer_type), | |
245 | inner_parm = TYPE_ARG_TYPES (inner_type); | |
246 | outer_parm && inner_parm; | |
247 | outer_parm = TREE_CHAIN (outer_parm), | |
248 | inner_parm = TREE_CHAIN (inner_parm)) | |
249 | if (!useless_type_conversion_p | |
250 | (TYPE_MAIN_VARIANT (TREE_VALUE (outer_parm)), | |
251 | TYPE_MAIN_VARIANT (TREE_VALUE (inner_parm)))) | |
252 | return false; | |
253 | ||
254 | /* If there is a mismatch in the number of arguments the functions | |
255 | are not compatible. */ | |
256 | if (outer_parm || inner_parm) | |
257 | return false; | |
258 | ||
259 | /* Defer to the target if necessary. */ | |
260 | if (TYPE_ATTRIBUTES (inner_type) || TYPE_ATTRIBUTES (outer_type)) | |
261 | return comp_type_attributes (outer_type, inner_type) != 0; | |
262 | ||
263 | return true; | |
264 | } | |
265 | ||
ee3db47d JH |
266 | /* For aggregates compare only the size. Accesses to fields do have |
267 | a type information by themselves and thus we only care if we can i.e. | |
268 | use the types in move operations. */ | |
2a0603f1 AM |
269 | else if (AGGREGATE_TYPE_P (inner_type) |
270 | && TREE_CODE (inner_type) == TREE_CODE (outer_type)) | |
b6d3c031 JH |
271 | return (TYPE_MODE (outer_type) != BLKmode |
272 | || operand_equal_p (TYPE_SIZE (inner_type), | |
273 | TYPE_SIZE (outer_type), 0)); | |
ee3db47d JH |
274 | |
275 | else if (TREE_CODE (inner_type) == OFFSET_TYPE | |
276 | && TREE_CODE (outer_type) == OFFSET_TYPE) | |
277 | return useless_type_conversion_p (TREE_TYPE (outer_type), | |
278 | TREE_TYPE (inner_type)) | |
279 | && useless_type_conversion_p | |
280 | (TYPE_OFFSET_BASETYPE (outer_type), | |
281 | TYPE_OFFSET_BASETYPE (inner_type)); | |
2a0603f1 AM |
282 | |
283 | return false; | |
284 | } | |
285 | ||
286 | ||
287 | /* ----- Decl related ----- */ | |
288 | ||
289 | /* Set sequence SEQ to be the GIMPLE body for function FN. */ | |
290 | ||
291 | void | |
292 | gimple_set_body (tree fndecl, gimple_seq seq) | |
293 | { | |
294 | struct function *fn = DECL_STRUCT_FUNCTION (fndecl); | |
295 | if (fn == NULL) | |
296 | { | |
297 | /* If FNDECL still does not have a function structure associated | |
298 | with it, then it does not make sense for it to receive a | |
299 | GIMPLE body. */ | |
300 | gcc_assert (seq == NULL); | |
301 | } | |
302 | else | |
303 | fn->gimple_body = seq; | |
304 | } | |
305 | ||
306 | ||
307 | /* Return the body of GIMPLE statements for function FN. After the | |
308 | CFG pass, the function body doesn't exist anymore because it has | |
309 | been split up into basic blocks. In this case, it returns | |
310 | NULL. */ | |
311 | ||
312 | gimple_seq | |
313 | gimple_body (tree fndecl) | |
314 | { | |
315 | struct function *fn = DECL_STRUCT_FUNCTION (fndecl); | |
316 | return fn ? fn->gimple_body : NULL; | |
317 | } | |
318 | ||
319 | /* Return true when FNDECL has Gimple body either in unlowered | |
320 | or CFG form. */ | |
321 | bool | |
322 | gimple_has_body_p (tree fndecl) | |
323 | { | |
324 | struct function *fn = DECL_STRUCT_FUNCTION (fndecl); | |
325 | return (gimple_body (fndecl) || (fn && fn->cfg)); | |
326 | } | |
327 | ||
328 | /* Return a printable name for symbol DECL. */ | |
329 | ||
330 | const char * | |
331 | gimple_decl_printable_name (tree decl, int verbosity) | |
332 | { | |
333 | if (!DECL_NAME (decl)) | |
334 | return NULL; | |
335 | ||
336 | if (DECL_ASSEMBLER_NAME_SET_P (decl)) | |
337 | { | |
338 | const char *str, *mangled_str; | |
339 | int dmgl_opts = DMGL_NO_OPTS; | |
340 | ||
341 | if (verbosity >= 2) | |
342 | { | |
343 | dmgl_opts = DMGL_VERBOSE | |
344 | | DMGL_ANSI | |
345 | | DMGL_GNU_V3 | |
346 | | DMGL_RET_POSTFIX; | |
347 | if (TREE_CODE (decl) == FUNCTION_DECL) | |
348 | dmgl_opts |= DMGL_PARAMS; | |
349 | } | |
350 | ||
351 | mangled_str = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl)); | |
352 | str = cplus_demangle_v3 (mangled_str, dmgl_opts); | |
353 | return (str) ? str : mangled_str; | |
354 | } | |
355 | ||
356 | return IDENTIFIER_POINTER (DECL_NAME (decl)); | |
357 | } | |
358 | ||
359 | ||
360 | /* Create a new VAR_DECL and copy information from VAR to it. */ | |
361 | ||
362 | tree | |
363 | copy_var_decl (tree var, tree name, tree type) | |
364 | { | |
365 | tree copy = build_decl (DECL_SOURCE_LOCATION (var), VAR_DECL, name, type); | |
366 | ||
367 | TREE_ADDRESSABLE (copy) = TREE_ADDRESSABLE (var); | |
368 | TREE_THIS_VOLATILE (copy) = TREE_THIS_VOLATILE (var); | |
369 | DECL_GIMPLE_REG_P (copy) = DECL_GIMPLE_REG_P (var); | |
370 | DECL_ARTIFICIAL (copy) = DECL_ARTIFICIAL (var); | |
371 | DECL_IGNORED_P (copy) = DECL_IGNORED_P (var); | |
372 | DECL_CONTEXT (copy) = DECL_CONTEXT (var); | |
373 | TREE_NO_WARNING (copy) = TREE_NO_WARNING (var); | |
374 | TREE_USED (copy) = 1; | |
375 | DECL_SEEN_IN_BIND_EXPR_P (copy) = 1; | |
376 | DECL_ATTRIBUTES (copy) = DECL_ATTRIBUTES (var); | |
377 | ||
378 | return copy; | |
379 | } | |
380 | ||
45b0be94 AM |
381 | /* Strip off a legitimate source ending from the input string NAME of |
382 | length LEN. Rather than having to know the names used by all of | |
383 | our front ends, we strip off an ending of a period followed by | |
384 | up to five characters. (Java uses ".class".) */ | |
385 | ||
386 | static inline void | |
387 | remove_suffix (char *name, int len) | |
388 | { | |
389 | int i; | |
390 | ||
391 | for (i = 2; i < 8 && len > i; i++) | |
392 | { | |
393 | if (name[len - i] == '.') | |
394 | { | |
395 | name[len - i] = '\0'; | |
396 | break; | |
397 | } | |
398 | } | |
399 | } | |
400 | ||
401 | /* Create a new temporary name with PREFIX. Return an identifier. */ | |
402 | ||
403 | static GTY(()) unsigned int tmp_var_id_num; | |
404 | ||
405 | tree | |
406 | create_tmp_var_name (const char *prefix) | |
407 | { | |
408 | char *tmp_name; | |
409 | ||
410 | if (prefix) | |
411 | { | |
412 | char *preftmp = ASTRDUP (prefix); | |
413 | ||
414 | remove_suffix (preftmp, strlen (preftmp)); | |
415 | clean_symbol_name (preftmp); | |
416 | ||
417 | prefix = preftmp; | |
418 | } | |
419 | ||
420 | ASM_FORMAT_PRIVATE_NAME (tmp_name, prefix ? prefix : "T", tmp_var_id_num++); | |
421 | return get_identifier (tmp_name); | |
422 | } | |
423 | ||
424 | /* Create a new temporary variable declaration of type TYPE. | |
425 | Do NOT push it into the current binding. */ | |
426 | ||
427 | tree | |
428 | create_tmp_var_raw (tree type, const char *prefix) | |
429 | { | |
430 | tree tmp_var; | |
431 | ||
432 | tmp_var = build_decl (input_location, | |
433 | VAR_DECL, prefix ? create_tmp_var_name (prefix) : NULL, | |
434 | type); | |
435 | ||
436 | /* The variable was declared by the compiler. */ | |
437 | DECL_ARTIFICIAL (tmp_var) = 1; | |
438 | /* And we don't want debug info for it. */ | |
439 | DECL_IGNORED_P (tmp_var) = 1; | |
440 | ||
441 | /* Make the variable writable. */ | |
442 | TREE_READONLY (tmp_var) = 0; | |
443 | ||
444 | DECL_EXTERNAL (tmp_var) = 0; | |
445 | TREE_STATIC (tmp_var) = 0; | |
446 | TREE_USED (tmp_var) = 1; | |
447 | ||
448 | return tmp_var; | |
449 | } | |
450 | ||
451 | /* Create a new temporary variable declaration of type TYPE. DO push the | |
452 | variable into the current binding. Further, assume that this is called | |
453 | only from gimplification or optimization, at which point the creation of | |
454 | certain types are bugs. */ | |
455 | ||
456 | tree | |
457 | create_tmp_var (tree type, const char *prefix) | |
458 | { | |
459 | tree tmp_var; | |
460 | ||
461 | /* We don't allow types that are addressable (meaning we can't make copies), | |
462 | or incomplete. We also used to reject every variable size objects here, | |
463 | but now support those for which a constant upper bound can be obtained. | |
464 | The processing for variable sizes is performed in gimple_add_tmp_var, | |
465 | point at which it really matters and possibly reached via paths not going | |
466 | through this function, e.g. after direct calls to create_tmp_var_raw. */ | |
467 | gcc_assert (!TREE_ADDRESSABLE (type) && COMPLETE_TYPE_P (type)); | |
468 | ||
469 | tmp_var = create_tmp_var_raw (type, prefix); | |
470 | gimple_add_tmp_var (tmp_var); | |
471 | return tmp_var; | |
472 | } | |
473 | ||
474 | /* Create a new temporary variable declaration of type TYPE by calling | |
475 | create_tmp_var and if TYPE is a vector or a complex number, mark the new | |
476 | temporary as gimple register. */ | |
477 | ||
478 | tree | |
479 | create_tmp_reg (tree type, const char *prefix) | |
480 | { | |
481 | tree tmp; | |
482 | ||
483 | tmp = create_tmp_var (type, prefix); | |
484 | if (TREE_CODE (type) == COMPLEX_TYPE | |
485 | || TREE_CODE (type) == VECTOR_TYPE) | |
486 | DECL_GIMPLE_REG_P (tmp) = 1; | |
487 | ||
488 | return tmp; | |
45b62594 RB |
489 | } |
490 | ||
491 | /* Create a new temporary variable declaration of type TYPE by calling | |
492 | create_tmp_var and if TYPE is a vector or a complex number, mark the new | |
493 | temporary as gimple register. */ | |
494 | ||
495 | tree | |
496 | create_tmp_reg_fn (struct function *fn, tree type, const char *prefix) | |
497 | { | |
498 | tree tmp; | |
499 | ||
500 | tmp = create_tmp_var_raw (type, prefix); | |
501 | gimple_add_tmp_var_fn (fn, tmp); | |
502 | if (TREE_CODE (type) == COMPLEX_TYPE | |
503 | || TREE_CODE (type) == VECTOR_TYPE) | |
504 | DECL_GIMPLE_REG_P (tmp) = 1; | |
505 | ||
506 | return tmp; | |
45b0be94 AM |
507 | } |
508 | ||
2a0603f1 AM |
509 | |
510 | /* ----- Expression related ----- */ | |
511 | ||
512 | /* Extract the operands and code for expression EXPR into *SUBCODE_P, | |
513 | *OP1_P, *OP2_P and *OP3_P respectively. */ | |
514 | ||
515 | void | |
516 | extract_ops_from_tree_1 (tree expr, enum tree_code *subcode_p, tree *op1_p, | |
517 | tree *op2_p, tree *op3_p) | |
518 | { | |
519 | enum gimple_rhs_class grhs_class; | |
520 | ||
521 | *subcode_p = TREE_CODE (expr); | |
522 | grhs_class = get_gimple_rhs_class (*subcode_p); | |
523 | ||
524 | if (grhs_class == GIMPLE_TERNARY_RHS) | |
525 | { | |
526 | *op1_p = TREE_OPERAND (expr, 0); | |
527 | *op2_p = TREE_OPERAND (expr, 1); | |
528 | *op3_p = TREE_OPERAND (expr, 2); | |
529 | } | |
530 | else if (grhs_class == GIMPLE_BINARY_RHS) | |
531 | { | |
532 | *op1_p = TREE_OPERAND (expr, 0); | |
533 | *op2_p = TREE_OPERAND (expr, 1); | |
534 | *op3_p = NULL_TREE; | |
535 | } | |
536 | else if (grhs_class == GIMPLE_UNARY_RHS) | |
537 | { | |
538 | *op1_p = TREE_OPERAND (expr, 0); | |
539 | *op2_p = NULL_TREE; | |
540 | *op3_p = NULL_TREE; | |
541 | } | |
542 | else if (grhs_class == GIMPLE_SINGLE_RHS) | |
543 | { | |
544 | *op1_p = expr; | |
545 | *op2_p = NULL_TREE; | |
546 | *op3_p = NULL_TREE; | |
547 | } | |
548 | else | |
549 | gcc_unreachable (); | |
550 | } | |
551 | ||
552 | /* Extract operands for a GIMPLE_COND statement out of COND_EXPR tree COND. */ | |
553 | ||
554 | void | |
555 | gimple_cond_get_ops_from_tree (tree cond, enum tree_code *code_p, | |
556 | tree *lhs_p, tree *rhs_p) | |
557 | { | |
98209db3 | 558 | gcc_assert (COMPARISON_CLASS_P (cond) |
2a0603f1 AM |
559 | || TREE_CODE (cond) == TRUTH_NOT_EXPR |
560 | || is_gimple_min_invariant (cond) | |
561 | || SSA_VAR_P (cond)); | |
562 | ||
563 | extract_ops_from_tree (cond, code_p, lhs_p, rhs_p); | |
564 | ||
565 | /* Canonicalize conditionals of the form 'if (!VAL)'. */ | |
566 | if (*code_p == TRUTH_NOT_EXPR) | |
567 | { | |
568 | *code_p = EQ_EXPR; | |
569 | gcc_assert (*lhs_p && *rhs_p == NULL_TREE); | |
570 | *rhs_p = build_zero_cst (TREE_TYPE (*lhs_p)); | |
571 | } | |
572 | /* Canonicalize conditionals of the form 'if (VAL)' */ | |
573 | else if (TREE_CODE_CLASS (*code_p) != tcc_comparison) | |
574 | { | |
575 | *code_p = NE_EXPR; | |
576 | gcc_assert (*lhs_p && *rhs_p == NULL_TREE); | |
577 | *rhs_p = build_zero_cst (TREE_TYPE (*lhs_p)); | |
578 | } | |
579 | } | |
580 | ||
581 | /* Return true if T is a valid LHS for a GIMPLE assignment expression. */ | |
582 | ||
583 | bool | |
584 | is_gimple_lvalue (tree t) | |
585 | { | |
586 | return (is_gimple_addressable (t) | |
587 | || TREE_CODE (t) == WITH_SIZE_EXPR | |
588 | /* These are complex lvalues, but don't have addresses, so they | |
589 | go here. */ | |
590 | || TREE_CODE (t) == BIT_FIELD_REF); | |
591 | } | |
592 | ||
593 | /* Return true if T is a GIMPLE condition. */ | |
594 | ||
595 | bool | |
596 | is_gimple_condexpr (tree t) | |
597 | { | |
598 | return (is_gimple_val (t) || (COMPARISON_CLASS_P (t) | |
599 | && !tree_could_throw_p (t) | |
600 | && is_gimple_val (TREE_OPERAND (t, 0)) | |
601 | && is_gimple_val (TREE_OPERAND (t, 1)))); | |
602 | } | |
603 | ||
604 | /* Return true if T is a gimple address. */ | |
605 | ||
606 | bool | |
607 | is_gimple_address (const_tree t) | |
608 | { | |
609 | tree op; | |
610 | ||
611 | if (TREE_CODE (t) != ADDR_EXPR) | |
612 | return false; | |
613 | ||
614 | op = TREE_OPERAND (t, 0); | |
615 | while (handled_component_p (op)) | |
616 | { | |
617 | if ((TREE_CODE (op) == ARRAY_REF | |
618 | || TREE_CODE (op) == ARRAY_RANGE_REF) | |
619 | && !is_gimple_val (TREE_OPERAND (op, 1))) | |
620 | return false; | |
621 | ||
622 | op = TREE_OPERAND (op, 0); | |
623 | } | |
624 | ||
625 | if (CONSTANT_CLASS_P (op) || TREE_CODE (op) == MEM_REF) | |
626 | return true; | |
627 | ||
628 | switch (TREE_CODE (op)) | |
629 | { | |
630 | case PARM_DECL: | |
631 | case RESULT_DECL: | |
632 | case LABEL_DECL: | |
633 | case FUNCTION_DECL: | |
634 | case VAR_DECL: | |
635 | case CONST_DECL: | |
636 | return true; | |
637 | ||
638 | default: | |
639 | return false; | |
640 | } | |
641 | } | |
642 | ||
643 | /* Return true if T is a gimple invariant address. */ | |
644 | ||
645 | bool | |
646 | is_gimple_invariant_address (const_tree t) | |
647 | { | |
648 | const_tree op; | |
649 | ||
650 | if (TREE_CODE (t) != ADDR_EXPR) | |
651 | return false; | |
652 | ||
653 | op = strip_invariant_refs (TREE_OPERAND (t, 0)); | |
654 | if (!op) | |
655 | return false; | |
656 | ||
657 | if (TREE_CODE (op) == MEM_REF) | |
658 | { | |
659 | const_tree op0 = TREE_OPERAND (op, 0); | |
660 | return (TREE_CODE (op0) == ADDR_EXPR | |
661 | && (CONSTANT_CLASS_P (TREE_OPERAND (op0, 0)) | |
662 | || decl_address_invariant_p (TREE_OPERAND (op0, 0)))); | |
663 | } | |
664 | ||
665 | return CONSTANT_CLASS_P (op) || decl_address_invariant_p (op); | |
666 | } | |
667 | ||
668 | /* Return true if T is a gimple invariant address at IPA level | |
669 | (so addresses of variables on stack are not allowed). */ | |
670 | ||
671 | bool | |
672 | is_gimple_ip_invariant_address (const_tree t) | |
673 | { | |
674 | const_tree op; | |
675 | ||
676 | if (TREE_CODE (t) != ADDR_EXPR) | |
677 | return false; | |
678 | ||
679 | op = strip_invariant_refs (TREE_OPERAND (t, 0)); | |
680 | if (!op) | |
681 | return false; | |
682 | ||
683 | if (TREE_CODE (op) == MEM_REF) | |
684 | { | |
685 | const_tree op0 = TREE_OPERAND (op, 0); | |
686 | return (TREE_CODE (op0) == ADDR_EXPR | |
687 | && (CONSTANT_CLASS_P (TREE_OPERAND (op0, 0)) | |
688 | || decl_address_ip_invariant_p (TREE_OPERAND (op0, 0)))); | |
689 | } | |
690 | ||
691 | return CONSTANT_CLASS_P (op) || decl_address_ip_invariant_p (op); | |
692 | } | |
693 | ||
694 | /* Return true if T is a GIMPLE minimal invariant. It's a restricted | |
695 | form of function invariant. */ | |
696 | ||
697 | bool | |
698 | is_gimple_min_invariant (const_tree t) | |
699 | { | |
700 | if (TREE_CODE (t) == ADDR_EXPR) | |
701 | return is_gimple_invariant_address (t); | |
702 | ||
703 | return is_gimple_constant (t); | |
704 | } | |
705 | ||
706 | /* Return true if T is a GIMPLE interprocedural invariant. It's a restricted | |
707 | form of gimple minimal invariant. */ | |
708 | ||
709 | bool | |
710 | is_gimple_ip_invariant (const_tree t) | |
711 | { | |
712 | if (TREE_CODE (t) == ADDR_EXPR) | |
713 | return is_gimple_ip_invariant_address (t); | |
714 | ||
715 | return is_gimple_constant (t); | |
716 | } | |
717 | ||
718 | /* Return true if T is a non-aggregate register variable. */ | |
719 | ||
720 | bool | |
721 | is_gimple_reg (tree t) | |
722 | { | |
723 | if (virtual_operand_p (t)) | |
724 | return false; | |
725 | ||
726 | if (TREE_CODE (t) == SSA_NAME) | |
727 | return true; | |
728 | ||
729 | if (!is_gimple_variable (t)) | |
730 | return false; | |
731 | ||
732 | if (!is_gimple_reg_type (TREE_TYPE (t))) | |
733 | return false; | |
734 | ||
735 | /* A volatile decl is not acceptable because we can't reuse it as | |
736 | needed. We need to copy it into a temp first. */ | |
737 | if (TREE_THIS_VOLATILE (t)) | |
738 | return false; | |
739 | ||
740 | /* We define "registers" as things that can be renamed as needed, | |
741 | which with our infrastructure does not apply to memory. */ | |
742 | if (needs_to_live_in_memory (t)) | |
743 | return false; | |
744 | ||
745 | /* Hard register variables are an interesting case. For those that | |
746 | are call-clobbered, we don't know where all the calls are, since | |
747 | we don't (want to) take into account which operations will turn | |
748 | into libcalls at the rtl level. For those that are call-saved, | |
749 | we don't currently model the fact that calls may in fact change | |
750 | global hard registers, nor do we examine ASM_CLOBBERS at the tree | |
751 | level, and so miss variable changes that might imply. All around, | |
752 | it seems safest to not do too much optimization with these at the | |
753 | tree level at all. We'll have to rely on the rtl optimizers to | |
754 | clean this up, as there we've got all the appropriate bits exposed. */ | |
755 | if (TREE_CODE (t) == VAR_DECL && DECL_HARD_REGISTER (t)) | |
756 | return false; | |
757 | ||
758 | /* Complex and vector values must have been put into SSA-like form. | |
759 | That is, no assignments to the individual components. */ | |
760 | if (TREE_CODE (TREE_TYPE (t)) == COMPLEX_TYPE | |
761 | || TREE_CODE (TREE_TYPE (t)) == VECTOR_TYPE) | |
762 | return DECL_GIMPLE_REG_P (t); | |
763 | ||
764 | return true; | |
765 | } | |
766 | ||
767 | ||
768 | /* Return true if T is a GIMPLE rvalue, i.e. an identifier or a constant. */ | |
769 | ||
770 | bool | |
771 | is_gimple_val (tree t) | |
772 | { | |
773 | /* Make loads from volatiles and memory vars explicit. */ | |
774 | if (is_gimple_variable (t) | |
775 | && is_gimple_reg_type (TREE_TYPE (t)) | |
776 | && !is_gimple_reg (t)) | |
777 | return false; | |
778 | ||
779 | return (is_gimple_variable (t) || is_gimple_min_invariant (t)); | |
780 | } | |
781 | ||
782 | /* Similarly, but accept hard registers as inputs to asm statements. */ | |
783 | ||
784 | bool | |
785 | is_gimple_asm_val (tree t) | |
786 | { | |
787 | if (TREE_CODE (t) == VAR_DECL && DECL_HARD_REGISTER (t)) | |
788 | return true; | |
789 | ||
790 | return is_gimple_val (t); | |
791 | } | |
792 | ||
793 | /* Return true if T is a GIMPLE minimal lvalue. */ | |
794 | ||
795 | bool | |
796 | is_gimple_min_lval (tree t) | |
797 | { | |
798 | if (!(t = CONST_CAST_TREE (strip_invariant_refs (t)))) | |
799 | return false; | |
800 | return (is_gimple_id (t) || TREE_CODE (t) == MEM_REF); | |
801 | } | |
802 | ||
803 | /* Return true if T is a valid function operand of a CALL_EXPR. */ | |
804 | ||
805 | bool | |
806 | is_gimple_call_addr (tree t) | |
807 | { | |
808 | return (TREE_CODE (t) == OBJ_TYPE_REF || is_gimple_val (t)); | |
809 | } | |
810 | ||
811 | /* Return true if T is a valid address operand of a MEM_REF. */ | |
812 | ||
813 | bool | |
814 | is_gimple_mem_ref_addr (tree t) | |
815 | { | |
816 | return (is_gimple_reg (t) | |
817 | || TREE_CODE (t) == INTEGER_CST | |
818 | || (TREE_CODE (t) == ADDR_EXPR | |
819 | && (CONSTANT_CLASS_P (TREE_OPERAND (t, 0)) | |
820 | || decl_address_invariant_p (TREE_OPERAND (t, 0))))); | |
821 | } | |
45b0be94 AM |
822 | |
823 | /* Mark X addressable. Unlike the langhook we expect X to be in gimple | |
824 | form and we don't do any syntax checking. */ | |
825 | ||
826 | void | |
827 | mark_addressable (tree x) | |
828 | { | |
829 | while (handled_component_p (x)) | |
830 | x = TREE_OPERAND (x, 0); | |
831 | if (TREE_CODE (x) == MEM_REF | |
832 | && TREE_CODE (TREE_OPERAND (x, 0)) == ADDR_EXPR) | |
833 | x = TREE_OPERAND (TREE_OPERAND (x, 0), 0); | |
834 | if (TREE_CODE (x) != VAR_DECL | |
835 | && TREE_CODE (x) != PARM_DECL | |
836 | && TREE_CODE (x) != RESULT_DECL) | |
837 | return; | |
838 | TREE_ADDRESSABLE (x) = 1; | |
839 | ||
840 | /* Also mark the artificial SSA_NAME that points to the partition of X. */ | |
841 | if (TREE_CODE (x) == VAR_DECL | |
842 | && !DECL_EXTERNAL (x) | |
843 | && !TREE_STATIC (x) | |
844 | && cfun->gimple_df != NULL | |
845 | && cfun->gimple_df->decls_to_pointers != NULL) | |
846 | { | |
39c8aaa4 | 847 | tree *namep = cfun->gimple_df->decls_to_pointers->get (x); |
45b0be94 | 848 | if (namep) |
39c8aaa4 | 849 | TREE_ADDRESSABLE (*namep) = 1; |
45b0be94 AM |
850 | } |
851 | } | |
852 | ||
853 | /* Returns true iff T is a valid RHS for an assignment to a renamed | |
854 | user -- or front-end generated artificial -- variable. */ | |
855 | ||
856 | bool | |
857 | is_gimple_reg_rhs (tree t) | |
858 | { | |
859 | return get_gimple_rhs_class (TREE_CODE (t)) != GIMPLE_INVALID_RHS; | |
860 | } | |
861 | ||
862 | #include "gt-gimple-expr.h" |