]>
Commit | Line | Data |
---|---|---|
f2cfea4a | 1 | /* C-compiler utilities for types and variables storage layout |
d353bf18 | 2 | Copyright (C) 1987-2015 Free Software Foundation, Inc. |
f2cfea4a | 3 | |
f12b58b3 | 4 | This file is part of GCC. |
f2cfea4a | 5 | |
f12b58b3 | 6 | GCC is free software; you can redistribute it and/or modify it under |
7 | the terms of the GNU General Public License as published by the Free | |
8c4c00c1 | 8 | Software Foundation; either version 3, or (at your option) any later |
f12b58b3 | 9 | version. |
f2cfea4a | 10 | |
f12b58b3 | 11 | GCC is distributed in the hope that it will be useful, but WITHOUT ANY |
12 | WARRANTY; without even the implied warranty of MERCHANTABILITY or | |
13 | FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License | |
14 | for more details. | |
f2cfea4a | 15 | |
16 | You should have received a copy of the GNU General Public License | |
8c4c00c1 | 17 | along with GCC; see the file COPYING3. If not see |
18 | <http://www.gnu.org/licenses/>. */ | |
f2cfea4a | 19 | |
20 | ||
21 | #include "config.h" | |
405711de | 22 | #include "system.h" |
805e22b2 | 23 | #include "coretypes.h" |
7c29e30e | 24 | #include "target.h" |
25 | #include "function.h" | |
26 | #include "rtl.h" | |
f2cfea4a | 27 | #include "tree.h" |
7c29e30e | 28 | #include "tm_p.h" |
29 | #include "stringpool.h" | |
30 | #include "expmed.h" | |
31 | #include "insn-config.h" | |
32 | #include "regs.h" | |
33 | #include "emit-rtl.h" | |
34 | #include "cgraph.h" | |
35 | #include "diagnostic-core.h" | |
36 | #include "alias.h" | |
b20a8bb4 | 37 | #include "fold-const.h" |
9ed99284 | 38 | #include "stor-layout.h" |
9ed99284 | 39 | #include "varasm.h" |
40 | #include "print-tree.h" | |
a6a1ab64 | 41 | #include "flags.h" |
d53441c8 | 42 | #include "dojump.h" |
43 | #include "explow.h" | |
44 | #include "calls.h" | |
d53441c8 | 45 | #include "stmt.h" |
c091e5a4 | 46 | #include "expr.h" |
20325f61 | 47 | #include "langhooks.h" |
00b76131 | 48 | #include "params.h" |
4189e677 | 49 | #include "tree-inline.h" |
50 | #include "tree-dump.h" | |
a8783bee | 51 | #include "gimplify.h" |
f2cfea4a | 52 | |
f2cfea4a | 53 | /* Data type for the expressions representing sizes of data types. |
a32bb500 | 54 | It is the first integer type laid out. */ |
748e5d45 | 55 | tree sizetype_tab[(int) stk_type_kind_last]; |
f2cfea4a | 56 | |
cdb11de0 | 57 | /* If nonzero, this is an upper limit on alignment of structure fields. |
58 | The value is measured in bits. */ | |
6b5553e5 | 59 | unsigned int maximum_field_alignment = TARGET_DEFAULT_PACK_STRUCT * BITS_PER_UNIT; |
cdb11de0 | 60 | |
98155838 | 61 | /* Nonzero if all REFERENCE_TYPEs are internal and hence should be allocated |
62 | in the address spaces' address_mode, not pointer_mode. Set only by | |
63 | internal_reference_types called only by a front end. */ | |
f1986931 | 64 | static int reference_types_internal = 0; |
65 | ||
4189e677 | 66 | static tree self_referential_size (tree); |
60b8c5b3 | 67 | static void finalize_record_size (record_layout_info); |
68 | static void finalize_type_size (tree); | |
69 | static void place_union_field (record_layout_info, tree); | |
60b8c5b3 | 70 | static int excess_unit_span (HOST_WIDE_INT, HOST_WIDE_INT, HOST_WIDE_INT, |
71 | HOST_WIDE_INT, tree); | |
60b8c5b3 | 72 | extern void debug_rli (record_layout_info); |
f2cfea4a | 73 | \f |
98155838 | 74 | /* Show that REFERENCE_TYPES are internal and should use address_mode. |
75 | Called only by front end. */ | |
f1986931 | 76 | |
77 | void | |
60b8c5b3 | 78 | internal_reference_types (void) |
f1986931 | 79 | { |
80 | reference_types_internal = 1; | |
81 | } | |
82 | ||
09138ab1 | 83 | /* Given a size SIZE that may not be a constant, return a SAVE_EXPR |
f2cfea4a | 84 | to serve as the actual size-expression for a type or decl. */ |
85 | ||
7b778713 | 86 | tree |
60b8c5b3 | 87 | variable_size (tree size) |
f2cfea4a | 88 | { |
4189e677 | 89 | /* Obviously. */ |
90 | if (TREE_CONSTANT (size)) | |
91 | return size; | |
92 | ||
93 | /* If the size is self-referential, we can't make a SAVE_EXPR (see | |
94 | save_expr for the rationale). But we can do something else. */ | |
95 | if (CONTAINS_PLACEHOLDER_P (size)) | |
96 | return self_referential_size (size); | |
97 | ||
1d2bb655 | 98 | /* If we are in the global binding level, we can't make a SAVE_EXPR |
99 | since it may end up being shared across functions, so it is up | |
100 | to the front-end to deal with this case. */ | |
101 | if (lang_hooks.decls.global_bindings_p ()) | |
034b6c60 | 102 | return size; |
103 | ||
a5aff672 | 104 | return save_expr (size); |
f2cfea4a | 105 | } |
4189e677 | 106 | |
107 | /* An array of functions used for self-referential size computation. */ | |
f1f41a6c | 108 | static GTY(()) vec<tree, va_gc> *size_functions; |
4189e677 | 109 | |
5759dc8e | 110 | /* Return true if T is a self-referential component reference. */ |
111 | ||
112 | static bool | |
113 | self_referential_component_ref_p (tree t) | |
114 | { | |
115 | if (TREE_CODE (t) != COMPONENT_REF) | |
116 | return false; | |
117 | ||
118 | while (REFERENCE_CLASS_P (t)) | |
119 | t = TREE_OPERAND (t, 0); | |
120 | ||
121 | return (TREE_CODE (t) == PLACEHOLDER_EXPR); | |
122 | } | |
123 | ||
4189e677 | 124 | /* Similar to copy_tree_r but do not copy component references involving |
125 | PLACEHOLDER_EXPRs. These nodes are spotted in find_placeholder_in_expr | |
126 | and substituted in substitute_in_expr. */ | |
127 | ||
128 | static tree | |
129 | copy_self_referential_tree_r (tree *tp, int *walk_subtrees, void *data) | |
130 | { | |
131 | enum tree_code code = TREE_CODE (*tp); | |
132 | ||
133 | /* Stop at types, decls, constants like copy_tree_r. */ | |
134 | if (TREE_CODE_CLASS (code) == tcc_type | |
135 | || TREE_CODE_CLASS (code) == tcc_declaration | |
136 | || TREE_CODE_CLASS (code) == tcc_constant) | |
137 | { | |
138 | *walk_subtrees = 0; | |
139 | return NULL_TREE; | |
140 | } | |
141 | ||
142 | /* This is the pattern built in ada/make_aligning_type. */ | |
143 | else if (code == ADDR_EXPR | |
144 | && TREE_CODE (TREE_OPERAND (*tp, 0)) == PLACEHOLDER_EXPR) | |
145 | { | |
146 | *walk_subtrees = 0; | |
147 | return NULL_TREE; | |
148 | } | |
149 | ||
150 | /* Default case: the component reference. */ | |
5759dc8e | 151 | else if (self_referential_component_ref_p (*tp)) |
4189e677 | 152 | { |
5759dc8e | 153 | *walk_subtrees = 0; |
154 | return NULL_TREE; | |
4189e677 | 155 | } |
156 | ||
157 | /* We're not supposed to have them in self-referential size trees | |
158 | because we wouldn't properly control when they are evaluated. | |
159 | However, not creating superfluous SAVE_EXPRs requires accurate | |
160 | tracking of readonly-ness all the way down to here, which we | |
161 | cannot always guarantee in practice. So punt in this case. */ | |
162 | else if (code == SAVE_EXPR) | |
163 | return error_mark_node; | |
164 | ||
17476aac | 165 | else if (code == STATEMENT_LIST) |
166 | gcc_unreachable (); | |
167 | ||
4189e677 | 168 | return copy_tree_r (tp, walk_subtrees, data); |
169 | } | |
170 | ||
171 | /* Given a SIZE expression that is self-referential, return an equivalent | |
172 | expression to serve as the actual size expression for a type. */ | |
173 | ||
174 | static tree | |
175 | self_referential_size (tree size) | |
176 | { | |
177 | static unsigned HOST_WIDE_INT fnno = 0; | |
1e094109 | 178 | vec<tree> self_refs = vNULL; |
414c3a2c | 179 | tree param_type_list = NULL, param_decl_list = NULL; |
4189e677 | 180 | tree t, ref, return_type, fntype, fnname, fndecl; |
181 | unsigned int i; | |
182 | char buf[128]; | |
f1f41a6c | 183 | vec<tree, va_gc> *args = NULL; |
4189e677 | 184 | |
185 | /* Do not factor out simple operations. */ | |
85cea2e3 | 186 | t = skip_simple_constant_arithmetic (size); |
5759dc8e | 187 | if (TREE_CODE (t) == CALL_EXPR || self_referential_component_ref_p (t)) |
4189e677 | 188 | return size; |
189 | ||
190 | /* Collect the list of self-references in the expression. */ | |
191 | find_placeholder_in_expr (size, &self_refs); | |
f1f41a6c | 192 | gcc_assert (self_refs.length () > 0); |
4189e677 | 193 | |
194 | /* Obtain a private copy of the expression. */ | |
195 | t = size; | |
196 | if (walk_tree (&t, copy_self_referential_tree_r, NULL, NULL) != NULL_TREE) | |
197 | return size; | |
198 | size = t; | |
199 | ||
200 | /* Build the parameter and argument lists in parallel; also | |
201 | substitute the former for the latter in the expression. */ | |
f1f41a6c | 202 | vec_alloc (args, self_refs.length ()); |
203 | FOR_EACH_VEC_ELT (self_refs, i, ref) | |
4189e677 | 204 | { |
205 | tree subst, param_name, param_type, param_decl; | |
206 | ||
207 | if (DECL_P (ref)) | |
208 | { | |
209 | /* We shouldn't have true variables here. */ | |
210 | gcc_assert (TREE_READONLY (ref)); | |
211 | subst = ref; | |
212 | } | |
213 | /* This is the pattern built in ada/make_aligning_type. */ | |
214 | else if (TREE_CODE (ref) == ADDR_EXPR) | |
215 | subst = ref; | |
216 | /* Default case: the component reference. */ | |
217 | else | |
218 | subst = TREE_OPERAND (ref, 1); | |
219 | ||
220 | sprintf (buf, "p%d", i); | |
221 | param_name = get_identifier (buf); | |
222 | param_type = TREE_TYPE (ref); | |
223 | param_decl | |
224 | = build_decl (input_location, PARM_DECL, param_name, param_type); | |
263c64d9 | 225 | DECL_ARG_TYPE (param_decl) = param_type; |
4189e677 | 226 | DECL_ARTIFICIAL (param_decl) = 1; |
227 | TREE_READONLY (param_decl) = 1; | |
228 | ||
229 | size = substitute_in_expr (size, subst, param_decl); | |
230 | ||
231 | param_type_list = tree_cons (NULL_TREE, param_type, param_type_list); | |
232 | param_decl_list = chainon (param_decl, param_decl_list); | |
f1f41a6c | 233 | args->quick_push (ref); |
4189e677 | 234 | } |
235 | ||
f1f41a6c | 236 | self_refs.release (); |
4189e677 | 237 | |
238 | /* Append 'void' to indicate that the number of parameters is fixed. */ | |
239 | param_type_list = tree_cons (NULL_TREE, void_type_node, param_type_list); | |
240 | ||
241 | /* The 3 lists have been created in reverse order. */ | |
242 | param_type_list = nreverse (param_type_list); | |
243 | param_decl_list = nreverse (param_decl_list); | |
4189e677 | 244 | |
245 | /* Build the function type. */ | |
246 | return_type = TREE_TYPE (size); | |
247 | fntype = build_function_type (return_type, param_type_list); | |
248 | ||
249 | /* Build the function declaration. */ | |
f03df321 | 250 | sprintf (buf, "SZ" HOST_WIDE_INT_PRINT_UNSIGNED, fnno++); |
4189e677 | 251 | fnname = get_file_function_name (buf); |
252 | fndecl = build_decl (input_location, FUNCTION_DECL, fnname, fntype); | |
1767a056 | 253 | for (t = param_decl_list; t; t = DECL_CHAIN (t)) |
4189e677 | 254 | DECL_CONTEXT (t) = fndecl; |
255 | DECL_ARGUMENTS (fndecl) = param_decl_list; | |
256 | DECL_RESULT (fndecl) | |
257 | = build_decl (input_location, RESULT_DECL, 0, return_type); | |
258 | DECL_CONTEXT (DECL_RESULT (fndecl)) = fndecl; | |
259 | ||
260 | /* The function has been created by the compiler and we don't | |
261 | want to emit debug info for it. */ | |
262 | DECL_ARTIFICIAL (fndecl) = 1; | |
263 | DECL_IGNORED_P (fndecl) = 1; | |
264 | ||
265 | /* It is supposed to be "const" and never throw. */ | |
266 | TREE_READONLY (fndecl) = 1; | |
267 | TREE_NOTHROW (fndecl) = 1; | |
268 | ||
269 | /* We want it to be inlined when this is deemed profitable, as | |
270 | well as discarded if every call has been integrated. */ | |
271 | DECL_DECLARED_INLINE_P (fndecl) = 1; | |
272 | ||
273 | /* It is made up of a unique return statement. */ | |
274 | DECL_INITIAL (fndecl) = make_node (BLOCK); | |
275 | BLOCK_SUPERCONTEXT (DECL_INITIAL (fndecl)) = fndecl; | |
276 | t = build2 (MODIFY_EXPR, return_type, DECL_RESULT (fndecl), size); | |
277 | DECL_SAVED_TREE (fndecl) = build1 (RETURN_EXPR, void_type_node, t); | |
278 | TREE_STATIC (fndecl) = 1; | |
279 | ||
280 | /* Put it onto the list of size functions. */ | |
f1f41a6c | 281 | vec_safe_push (size_functions, fndecl); |
4189e677 | 282 | |
283 | /* Replace the original expression with a call to the size function. */ | |
5cca4f1d | 284 | return build_call_expr_loc_vec (UNKNOWN_LOCATION, fndecl, args); |
4189e677 | 285 | } |
286 | ||
287 | /* Take, queue and compile all the size functions. It is essential that | |
288 | the size functions be gimplified at the very end of the compilation | |
289 | in order to guarantee transparent handling of self-referential sizes. | |
290 | Otherwise the GENERIC inliner would not be able to inline them back | |
291 | at each of their call sites, thus creating artificial non-constant | |
292 | size expressions which would trigger nasty problems later on. */ | |
293 | ||
294 | void | |
295 | finalize_size_functions (void) | |
296 | { | |
297 | unsigned int i; | |
298 | tree fndecl; | |
299 | ||
f1f41a6c | 300 | for (i = 0; size_functions && size_functions->iterate (i, &fndecl); i++) |
4189e677 | 301 | { |
c6cfb282 | 302 | allocate_struct_function (fndecl, false); |
303 | set_cfun (NULL); | |
4189e677 | 304 | dump_function (TDI_original, fndecl); |
305 | gimplify_function_tree (fndecl); | |
35ee1c66 | 306 | cgraph_node::finalize_function (fndecl, false); |
4189e677 | 307 | } |
308 | ||
f1f41a6c | 309 | vec_free (size_functions); |
4189e677 | 310 | } |
f2cfea4a | 311 | \f |
7c0390e7 | 312 | /* Return the machine mode to use for a nonscalar of SIZE bits. The |
47cfb7f4 | 313 | mode must be in class MCLASS, and have exactly that many value bits; |
7c0390e7 | 314 | it may have padding as well. If LIMIT is nonzero, modes of wider |
315 | than MAX_FIXED_MODE_SIZE will not be used. */ | |
f2cfea4a | 316 | |
3754d046 | 317 | machine_mode |
47cfb7f4 | 318 | mode_for_size (unsigned int size, enum mode_class mclass, int limit) |
f2cfea4a | 319 | { |
3754d046 | 320 | machine_mode mode; |
9f75f026 | 321 | int i; |
f2cfea4a | 322 | |
0fc6aef1 | 323 | if (limit && size > MAX_FIXED_MODE_SIZE) |
f2cfea4a | 324 | return BLKmode; |
325 | ||
034b6c60 | 326 | /* Get the first mode which has this size, in the specified class. */ |
47cfb7f4 | 327 | for (mode = GET_CLASS_NARROWEST_MODE (mclass); mode != VOIDmode; |
f2cfea4a | 328 | mode = GET_MODE_WIDER_MODE (mode)) |
7c0390e7 | 329 | if (GET_MODE_PRECISION (mode) == size) |
f2cfea4a | 330 | return mode; |
331 | ||
9f75f026 | 332 | if (mclass == MODE_INT || mclass == MODE_PARTIAL_INT) |
333 | for (i = 0; i < NUM_INT_N_ENTS; i ++) | |
334 | if (int_n_data[i].bitsize == size | |
335 | && int_n_enabled_p[i]) | |
336 | return int_n_data[i].m; | |
337 | ||
f2cfea4a | 338 | return BLKmode; |
339 | } | |
340 | ||
0fc6aef1 | 341 | /* Similar, except passed a tree node. */ |
342 | ||
3754d046 | 343 | machine_mode |
47cfb7f4 | 344 | mode_for_size_tree (const_tree size, enum mode_class mclass, int limit) |
0fc6aef1 | 345 | { |
1088cbc4 | 346 | unsigned HOST_WIDE_INT uhwi; |
347 | unsigned int ui; | |
348 | ||
e913b5cd | 349 | if (!tree_fits_uhwi_p (size)) |
0fc6aef1 | 350 | return BLKmode; |
e913b5cd | 351 | uhwi = tree_to_uhwi (size); |
1088cbc4 | 352 | ui = uhwi; |
353 | if (uhwi != ui) | |
354 | return BLKmode; | |
47cfb7f4 | 355 | return mode_for_size (ui, mclass, limit); |
0fc6aef1 | 356 | } |
357 | ||
034b6c60 | 358 | /* Similar, but never return BLKmode; return the narrowest mode that |
7c0390e7 | 359 | contains at least the requested number of value bits. */ |
034b6c60 | 360 | |
3754d046 | 361 | machine_mode |
47cfb7f4 | 362 | smallest_mode_for_size (unsigned int size, enum mode_class mclass) |
034b6c60 | 363 | { |
3754d046 | 364 | machine_mode mode = VOIDmode; |
9f75f026 | 365 | int i; |
034b6c60 | 366 | |
367 | /* Get the first mode which has at least this size, in the | |
368 | specified class. */ | |
47cfb7f4 | 369 | for (mode = GET_CLASS_NARROWEST_MODE (mclass); mode != VOIDmode; |
034b6c60 | 370 | mode = GET_MODE_WIDER_MODE (mode)) |
7c0390e7 | 371 | if (GET_MODE_PRECISION (mode) >= size) |
9f75f026 | 372 | break; |
034b6c60 | 373 | |
9f75f026 | 374 | if (mclass == MODE_INT || mclass == MODE_PARTIAL_INT) |
375 | for (i = 0; i < NUM_INT_N_ENTS; i ++) | |
376 | if (int_n_data[i].bitsize >= size | |
377 | && int_n_data[i].bitsize < GET_MODE_PRECISION (mode) | |
378 | && int_n_enabled_p[i]) | |
379 | mode = int_n_data[i].m; | |
380 | ||
381 | if (mode == VOIDmode) | |
382 | gcc_unreachable (); | |
383 | ||
384 | return mode; | |
034b6c60 | 385 | } |
386 | ||
86cde393 | 387 | /* Find an integer mode of the exact same size, or BLKmode on failure. */ |
388 | ||
3754d046 | 389 | machine_mode |
390 | int_mode_for_mode (machine_mode mode) | |
86cde393 | 391 | { |
392 | switch (GET_MODE_CLASS (mode)) | |
393 | { | |
394 | case MODE_INT: | |
395 | case MODE_PARTIAL_INT: | |
396 | break; | |
397 | ||
398 | case MODE_COMPLEX_INT: | |
399 | case MODE_COMPLEX_FLOAT: | |
400 | case MODE_FLOAT: | |
069b07bf | 401 | case MODE_DECIMAL_FLOAT: |
d76983d1 | 402 | case MODE_VECTOR_INT: |
403 | case MODE_VECTOR_FLOAT: | |
06f0b99c | 404 | case MODE_FRACT: |
405 | case MODE_ACCUM: | |
406 | case MODE_UFRACT: | |
407 | case MODE_UACCUM: | |
408 | case MODE_VECTOR_FRACT: | |
409 | case MODE_VECTOR_ACCUM: | |
410 | case MODE_VECTOR_UFRACT: | |
411 | case MODE_VECTOR_UACCUM: | |
058a1b7a | 412 | case MODE_POINTER_BOUNDS: |
86cde393 | 413 | mode = mode_for_size (GET_MODE_BITSIZE (mode), MODE_INT, 0); |
414 | break; | |
415 | ||
416 | case MODE_RANDOM: | |
417 | if (mode == BLKmode) | |
40734805 | 418 | break; |
083a2b5e | 419 | |
1be87b72 | 420 | /* ... fall through ... */ |
86cde393 | 421 | |
422 | case MODE_CC: | |
423 | default: | |
04e579b6 | 424 | gcc_unreachable (); |
86cde393 | 425 | } |
426 | ||
427 | return mode; | |
428 | } | |
429 | ||
ac875fa4 | 430 | /* Find a mode that can be used for efficient bitwise operations on MODE. |
431 | Return BLKmode if no such mode exists. */ | |
432 | ||
3754d046 | 433 | machine_mode |
434 | bitwise_mode_for_mode (machine_mode mode) | |
ac875fa4 | 435 | { |
436 | /* Quick exit if we already have a suitable mode. */ | |
437 | unsigned int bitsize = GET_MODE_BITSIZE (mode); | |
438 | if (SCALAR_INT_MODE_P (mode) && bitsize <= MAX_FIXED_MODE_SIZE) | |
439 | return mode; | |
440 | ||
441 | /* Reuse the sanity checks from int_mode_for_mode. */ | |
442 | gcc_checking_assert ((int_mode_for_mode (mode), true)); | |
443 | ||
444 | /* Try to replace complex modes with complex modes. In general we | |
445 | expect both components to be processed independently, so we only | |
446 | care whether there is a register for the inner mode. */ | |
447 | if (COMPLEX_MODE_P (mode)) | |
448 | { | |
3754d046 | 449 | machine_mode trial = mode; |
ac875fa4 | 450 | if (GET_MODE_CLASS (mode) != MODE_COMPLEX_INT) |
451 | trial = mode_for_size (bitsize, MODE_COMPLEX_INT, false); | |
452 | if (trial != BLKmode | |
453 | && have_regs_of_mode[GET_MODE_INNER (trial)]) | |
454 | return trial; | |
455 | } | |
456 | ||
457 | /* Try to replace vector modes with vector modes. Also try using vector | |
458 | modes if an integer mode would be too big. */ | |
459 | if (VECTOR_MODE_P (mode) || bitsize > MAX_FIXED_MODE_SIZE) | |
460 | { | |
3754d046 | 461 | machine_mode trial = mode; |
ac875fa4 | 462 | if (GET_MODE_CLASS (mode) != MODE_VECTOR_INT) |
463 | trial = mode_for_size (bitsize, MODE_VECTOR_INT, 0); | |
464 | if (trial != BLKmode | |
465 | && have_regs_of_mode[trial] | |
466 | && targetm.vector_mode_supported_p (trial)) | |
467 | return trial; | |
468 | } | |
469 | ||
470 | /* Otherwise fall back on integers while honoring MAX_FIXED_MODE_SIZE. */ | |
471 | return mode_for_size (bitsize, MODE_INT, true); | |
472 | } | |
473 | ||
474 | /* Find a type that can be used for efficient bitwise operations on MODE. | |
475 | Return null if no such mode exists. */ | |
476 | ||
477 | tree | |
3754d046 | 478 | bitwise_type_for_mode (machine_mode mode) |
ac875fa4 | 479 | { |
480 | mode = bitwise_mode_for_mode (mode); | |
481 | if (mode == BLKmode) | |
482 | return NULL_TREE; | |
483 | ||
484 | unsigned int inner_size = GET_MODE_UNIT_BITSIZE (mode); | |
485 | tree inner_type = build_nonstandard_integer_type (inner_size, true); | |
486 | ||
487 | if (VECTOR_MODE_P (mode)) | |
488 | return build_vector_type_for_mode (inner_type, mode); | |
489 | ||
490 | if (COMPLEX_MODE_P (mode)) | |
491 | return build_complex_type (inner_type); | |
492 | ||
d145b68b | 493 | gcc_checking_assert (GET_MODE_INNER (mode) == mode); |
ac875fa4 | 494 | return inner_type; |
495 | } | |
496 | ||
c4740c5d | 497 | /* Find a mode that is suitable for representing a vector with |
498 | NUNITS elements of mode INNERMODE. Returns BLKmode if there | |
499 | is no suitable mode. */ | |
500 | ||
3754d046 | 501 | machine_mode |
502 | mode_for_vector (machine_mode innermode, unsigned nunits) | |
c4740c5d | 503 | { |
3754d046 | 504 | machine_mode mode; |
c4740c5d | 505 | |
506 | /* First, look for a supported vector type. */ | |
507 | if (SCALAR_FLOAT_MODE_P (innermode)) | |
508 | mode = MIN_MODE_VECTOR_FLOAT; | |
509 | else if (SCALAR_FRACT_MODE_P (innermode)) | |
510 | mode = MIN_MODE_VECTOR_FRACT; | |
511 | else if (SCALAR_UFRACT_MODE_P (innermode)) | |
512 | mode = MIN_MODE_VECTOR_UFRACT; | |
513 | else if (SCALAR_ACCUM_MODE_P (innermode)) | |
514 | mode = MIN_MODE_VECTOR_ACCUM; | |
515 | else if (SCALAR_UACCUM_MODE_P (innermode)) | |
516 | mode = MIN_MODE_VECTOR_UACCUM; | |
517 | else | |
518 | mode = MIN_MODE_VECTOR_INT; | |
519 | ||
520 | /* Do not check vector_mode_supported_p here. We'll do that | |
521 | later in vector_type_mode. */ | |
522 | for (; mode != VOIDmode ; mode = GET_MODE_WIDER_MODE (mode)) | |
523 | if (GET_MODE_NUNITS (mode) == nunits | |
524 | && GET_MODE_INNER (mode) == innermode) | |
525 | break; | |
526 | ||
527 | /* For integers, try mapping it to a same-sized scalar mode. */ | |
528 | if (mode == VOIDmode | |
529 | && GET_MODE_CLASS (innermode) == MODE_INT) | |
530 | mode = mode_for_size (nunits * GET_MODE_BITSIZE (innermode), | |
531 | MODE_INT, 0); | |
532 | ||
533 | if (mode == VOIDmode | |
534 | || (GET_MODE_CLASS (mode) == MODE_INT | |
535 | && !have_regs_of_mode[mode])) | |
536 | return BLKmode; | |
537 | ||
538 | return mode; | |
539 | } | |
540 | ||
1a3e3a66 | 541 | /* Return the alignment of MODE. This will be bounded by 1 and |
542 | BIGGEST_ALIGNMENT. */ | |
543 | ||
544 | unsigned int | |
3754d046 | 545 | get_mode_alignment (machine_mode mode) |
1a3e3a66 | 546 | { |
47a2c1d4 | 547 | return MIN (BIGGEST_ALIGNMENT, MAX (1, mode_base_align[mode]*BITS_PER_UNIT)); |
1a3e3a66 | 548 | } |
549 | ||
13d3ceb9 | 550 | /* Return the natural mode of an array, given that it is SIZE bytes in |
551 | total and has elements of type ELEM_TYPE. */ | |
552 | ||
3754d046 | 553 | static machine_mode |
13d3ceb9 | 554 | mode_for_array (tree elem_type, tree size) |
555 | { | |
556 | tree elem_size; | |
557 | unsigned HOST_WIDE_INT int_size, int_elem_size; | |
558 | bool limit_p; | |
559 | ||
560 | /* One-element arrays get the component type's mode. */ | |
561 | elem_size = TYPE_SIZE (elem_type); | |
562 | if (simple_cst_equal (size, elem_size)) | |
563 | return TYPE_MODE (elem_type); | |
564 | ||
565 | limit_p = true; | |
e913b5cd | 566 | if (tree_fits_uhwi_p (size) && tree_fits_uhwi_p (elem_size)) |
13d3ceb9 | 567 | { |
e913b5cd | 568 | int_size = tree_to_uhwi (size); |
569 | int_elem_size = tree_to_uhwi (elem_size); | |
13d3ceb9 | 570 | if (int_elem_size > 0 |
571 | && int_size % int_elem_size == 0 | |
572 | && targetm.array_mode_supported_p (TYPE_MODE (elem_type), | |
573 | int_size / int_elem_size)) | |
574 | limit_p = false; | |
575 | } | |
576 | return mode_for_size_tree (size, MODE_INT, limit_p); | |
577 | } | |
f2cfea4a | 578 | \f |
f5712181 | 579 | /* Subroutine of layout_decl: Force alignment required for the data type. |
580 | But if the decl itself wants greater alignment, don't override that. */ | |
581 | ||
582 | static inline void | |
583 | do_type_align (tree type, tree decl) | |
584 | { | |
585 | if (TYPE_ALIGN (type) > DECL_ALIGN (decl)) | |
586 | { | |
587 | DECL_ALIGN (decl) = TYPE_ALIGN (type); | |
79bdd5ff | 588 | if (TREE_CODE (decl) == FIELD_DECL) |
589 | DECL_USER_ALIGN (decl) = TYPE_USER_ALIGN (type); | |
f5712181 | 590 | } |
591 | } | |
592 | ||
f2cfea4a | 593 | /* Set the size, mode and alignment of a ..._DECL node. |
594 | TYPE_DECL does need this for C++. | |
595 | Note that LABEL_DECL and CONST_DECL nodes do not need this, | |
596 | and FUNCTION_DECL nodes have them set up in a special (and simple) way. | |
597 | Don't call layout_decl for them. | |
598 | ||
599 | KNOWN_ALIGN is the amount of alignment we can assume this | |
600 | decl has with no special effort. It is relevant only for FIELD_DECLs | |
601 | and depends on the previous fields. | |
602 | All that matters about KNOWN_ALIGN is which powers of 2 divide it. | |
603 | If KNOWN_ALIGN is 0, it means, "as much alignment as you like": | |
604 | the record will be aligned to suit. */ | |
605 | ||
606 | void | |
60b8c5b3 | 607 | layout_decl (tree decl, unsigned int known_align) |
f2cfea4a | 608 | { |
19cb6b50 | 609 | tree type = TREE_TYPE (decl); |
610 | enum tree_code code = TREE_CODE (decl); | |
65433eb4 | 611 | rtx rtl = NULL_RTX; |
389dd41b | 612 | location_t loc = DECL_SOURCE_LOCATION (decl); |
f2cfea4a | 613 | |
614 | if (code == CONST_DECL) | |
615 | return; | |
7bd4091f | 616 | |
04e579b6 | 617 | gcc_assert (code == VAR_DECL || code == PARM_DECL || code == RESULT_DECL |
618 | || code == TYPE_DECL ||code == FIELD_DECL); | |
7bd4091f | 619 | |
65433eb4 | 620 | rtl = DECL_RTL_IF_SET (decl); |
621 | ||
f2cfea4a | 622 | if (type == error_mark_node) |
51e5c07e | 623 | type = void_type_node; |
f2cfea4a | 624 | |
02e7a332 | 625 | /* Usually the size and mode come from the data type without change, |
626 | however, the front-end may set the explicit width of the field, so its | |
627 | size may not be the same as the size of its type. This happens with | |
628 | bitfields, of course (an `int' bitfield may be only 2 bits, say), but it | |
629 | also happens with other fields. For example, the C++ front-end creates | |
630 | zero-sized fields corresponding to empty base classes, and depends on | |
631 | layout_type setting DECL_FIELD_BITPOS correctly for the field. Set the | |
62d2dc6f | 632 | size in bytes from the size in bits. If we have already set the mode, |
633 | don't set it again since we can be called twice for FIELD_DECLs. */ | |
02e7a332 | 634 | |
86ae60fd | 635 | DECL_UNSIGNED (decl) = TYPE_UNSIGNED (type); |
62d2dc6f | 636 | if (DECL_MODE (decl) == VOIDmode) |
637 | DECL_MODE (decl) = TYPE_MODE (type); | |
02e7a332 | 638 | |
034b6c60 | 639 | if (DECL_SIZE (decl) == 0) |
b278476e | 640 | { |
bc97b18f | 641 | DECL_SIZE (decl) = TYPE_SIZE (type); |
642 | DECL_SIZE_UNIT (decl) = TYPE_SIZE_UNIT (type); | |
b278476e | 643 | } |
ac068b11 | 644 | else if (DECL_SIZE_UNIT (decl) == 0) |
02e7a332 | 645 | DECL_SIZE_UNIT (decl) |
389dd41b | 646 | = fold_convert_loc (loc, sizetype, |
647 | size_binop_loc (loc, CEIL_DIV_EXPR, DECL_SIZE (decl), | |
648 | bitsize_unit_node)); | |
b278476e | 649 | |
f5712181 | 650 | if (code != FIELD_DECL) |
651 | /* For non-fields, update the alignment from the type. */ | |
652 | do_type_align (type, decl); | |
653 | else | |
654 | /* For fields, it's a bit more complicated... */ | |
40734805 | 655 | { |
6a22ca24 | 656 | bool old_user_align = DECL_USER_ALIGN (decl); |
7c68c953 | 657 | bool zero_bitfield = false; |
658 | bool packed_p = DECL_PACKED (decl); | |
659 | unsigned int mfa; | |
6a22ca24 | 660 | |
f5712181 | 661 | if (DECL_BIT_FIELD (decl)) |
662 | { | |
663 | DECL_BIT_FIELD_TYPE (decl) = type; | |
f2cfea4a | 664 | |
f5712181 | 665 | /* A zero-length bit-field affects the alignment of the next |
7c68c953 | 666 | field. In essence such bit-fields are not influenced by |
667 | any packing due to #pragma pack or attribute packed. */ | |
f5712181 | 668 | if (integer_zerop (DECL_SIZE (decl)) |
883b2e73 | 669 | && ! targetm.ms_bitfield_layout_p (DECL_FIELD_CONTEXT (decl))) |
f5712181 | 670 | { |
7c68c953 | 671 | zero_bitfield = true; |
672 | packed_p = false; | |
f5712181 | 673 | if (PCC_BITFIELD_TYPE_MATTERS) |
674 | do_type_align (type, decl); | |
675 | else | |
3aa7cd03 | 676 | { |
f5712181 | 677 | #ifdef EMPTY_FIELD_BOUNDARY |
3aa7cd03 | 678 | if (EMPTY_FIELD_BOUNDARY > DECL_ALIGN (decl)) |
679 | { | |
680 | DECL_ALIGN (decl) = EMPTY_FIELD_BOUNDARY; | |
681 | DECL_USER_ALIGN (decl) = 0; | |
682 | } | |
f5712181 | 683 | #endif |
3aa7cd03 | 684 | } |
f5712181 | 685 | } |
686 | ||
687 | /* See if we can use an ordinary integer mode for a bit-field. | |
f30fa59a | 688 | Conditions are: a fixed size that is correct for another mode, |
7691c4ce | 689 | occupying a complete byte or bytes on proper boundary. */ |
f5712181 | 690 | if (TYPE_SIZE (type) != 0 |
691 | && TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST | |
7691c4ce | 692 | && GET_MODE_CLASS (TYPE_MODE (type)) == MODE_INT) |
f5712181 | 693 | { |
3754d046 | 694 | machine_mode xmode |
f5712181 | 695 | = mode_for_size_tree (DECL_SIZE (decl), MODE_INT, 1); |
23f65835 | 696 | unsigned int xalign = GET_MODE_ALIGNMENT (xmode); |
f5712181 | 697 | |
9e7454d0 | 698 | if (xmode != BLKmode |
23f65835 | 699 | && !(xalign > BITS_PER_UNIT && DECL_PACKED (decl)) |
700 | && (known_align == 0 || known_align >= xalign)) | |
f5712181 | 701 | { |
23f65835 | 702 | DECL_ALIGN (decl) = MAX (xalign, DECL_ALIGN (decl)); |
f5712181 | 703 | DECL_MODE (decl) = xmode; |
704 | DECL_BIT_FIELD (decl) = 0; | |
705 | } | |
706 | } | |
707 | ||
708 | /* Turn off DECL_BIT_FIELD if we won't need it set. */ | |
709 | if (TYPE_MODE (type) == BLKmode && DECL_MODE (decl) == BLKmode | |
710 | && known_align >= TYPE_ALIGN (type) | |
711 | && DECL_ALIGN (decl) >= TYPE_ALIGN (type)) | |
712 | DECL_BIT_FIELD (decl) = 0; | |
713 | } | |
7c68c953 | 714 | else if (packed_p && DECL_USER_ALIGN (decl)) |
f5712181 | 715 | /* Don't touch DECL_ALIGN. For other packed fields, go ahead and |
a708df98 | 716 | round up; we'll reduce it again below. We want packing to |
c7bf1374 | 717 | supersede USER_ALIGN inherited from the type, but defer to |
a708df98 | 718 | alignment explicitly specified on the field decl. */; |
f5712181 | 719 | else |
6a22ca24 | 720 | do_type_align (type, decl); |
721 | ||
7b04d839 | 722 | /* If the field is packed and not explicitly aligned, give it the |
723 | minimum alignment. Note that do_type_align may set | |
724 | DECL_USER_ALIGN, so we need to check old_user_align instead. */ | |
7c68c953 | 725 | if (packed_p |
7b04d839 | 726 | && !old_user_align) |
6a22ca24 | 727 | DECL_ALIGN (decl) = MIN (DECL_ALIGN (decl), BITS_PER_UNIT); |
f5712181 | 728 | |
7c68c953 | 729 | if (! packed_p && ! DECL_USER_ALIGN (decl)) |
f2cfea4a | 730 | { |
f5712181 | 731 | /* Some targets (i.e. i386, VMS) limit struct field alignment |
732 | to a lower boundary than alignment of variables unless | |
733 | it was overridden by attribute aligned. */ | |
734 | #ifdef BIGGEST_FIELD_ALIGNMENT | |
735 | DECL_ALIGN (decl) | |
736 | = MIN (DECL_ALIGN (decl), (unsigned) BIGGEST_FIELD_ALIGNMENT); | |
737 | #endif | |
738 | #ifdef ADJUST_FIELD_ALIGN | |
739 | DECL_ALIGN (decl) = ADJUST_FIELD_ALIGN (decl, DECL_ALIGN (decl)); | |
740 | #endif | |
f2cfea4a | 741 | } |
36530340 | 742 | |
7c68c953 | 743 | if (zero_bitfield) |
744 | mfa = initial_max_fld_align * BITS_PER_UNIT; | |
745 | else | |
746 | mfa = maximum_field_alignment; | |
36530340 | 747 | /* Should this be controlled by DECL_USER_ALIGN, too? */ |
7c68c953 | 748 | if (mfa != 0) |
749 | DECL_ALIGN (decl) = MIN (DECL_ALIGN (decl), mfa); | |
f2cfea4a | 750 | } |
751 | ||
752 | /* Evaluate nonconstant size only once, either now or as soon as safe. */ | |
753 | if (DECL_SIZE (decl) != 0 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST) | |
754 | DECL_SIZE (decl) = variable_size (DECL_SIZE (decl)); | |
b278476e | 755 | if (DECL_SIZE_UNIT (decl) != 0 |
756 | && TREE_CODE (DECL_SIZE_UNIT (decl)) != INTEGER_CST) | |
757 | DECL_SIZE_UNIT (decl) = variable_size (DECL_SIZE_UNIT (decl)); | |
758 | ||
759 | /* If requested, warn about definitions of large data objects. */ | |
760 | if (warn_larger_than | |
b8e0d419 | 761 | && (code == VAR_DECL || code == PARM_DECL) |
b278476e | 762 | && ! DECL_EXTERNAL (decl)) |
763 | { | |
764 | tree size = DECL_SIZE_UNIT (decl); | |
765 | ||
766 | if (size != 0 && TREE_CODE (size) == INTEGER_CST | |
a0c2c45b | 767 | && compare_tree_int (size, larger_than_size) > 0) |
b278476e | 768 | { |
f9ae6f95 | 769 | int size_as_int = TREE_INT_CST_LOW (size); |
b278476e | 770 | |
a0c2c45b | 771 | if (compare_tree_int (size, size_as_int) == 0) |
67089c6b | 772 | warning (OPT_Wlarger_than_, "size of %q+D is %d bytes", decl, size_as_int); |
b278476e | 773 | else |
67089c6b | 774 | warning (OPT_Wlarger_than_, "size of %q+D is larger than %wd bytes", |
3cf8b391 | 775 | decl, larger_than_size); |
b278476e | 776 | } |
777 | } | |
65433eb4 | 778 | |
779 | /* If the RTL was already set, update its mode and mem attributes. */ | |
780 | if (rtl) | |
781 | { | |
782 | PUT_MODE (rtl, DECL_MODE (decl)); | |
783 | SET_DECL_RTL (decl, 0); | |
94f92c36 | 784 | if (MEM_P (rtl)) |
785 | set_mem_attributes (rtl, decl, 1); | |
65433eb4 | 786 | SET_DECL_RTL (decl, rtl); |
787 | } | |
f2cfea4a | 788 | } |
dddcebdc | 789 | |
790 | /* Given a VAR_DECL, PARM_DECL or RESULT_DECL, clears the results of | |
791 | a previous call to layout_decl and calls it again. */ | |
792 | ||
793 | void | |
794 | relayout_decl (tree decl) | |
795 | { | |
796 | DECL_SIZE (decl) = DECL_SIZE_UNIT (decl) = 0; | |
797 | DECL_MODE (decl) = VOIDmode; | |
950474d6 | 798 | if (!DECL_USER_ALIGN (decl)) |
799 | DECL_ALIGN (decl) = 0; | |
dddcebdc | 800 | SET_DECL_RTL (decl, 0); |
801 | ||
802 | layout_decl (decl, 0); | |
803 | } | |
f2cfea4a | 804 | \f |
02e7a332 | 805 | /* Begin laying out type T, which may be a RECORD_TYPE, UNION_TYPE, or |
806 | QUAL_UNION_TYPE. Return a pointer to a struct record_layout_info which | |
807 | is to be passed to all other layout functions for this record. It is the | |
40734805 | 808 | responsibility of the caller to call `free' for the storage returned. |
02e7a332 | 809 | Note that garbage collection is not permitted until we finish laying |
810 | out the record. */ | |
f2cfea4a | 811 | |
99f4e085 | 812 | record_layout_info |
60b8c5b3 | 813 | start_record_layout (tree t) |
f2cfea4a | 814 | { |
f7f3687c | 815 | record_layout_info rli = XNEW (struct record_layout_info_s); |
99f4e085 | 816 | |
817 | rli->t = t; | |
02e7a332 | 818 | |
99f4e085 | 819 | /* If the type has a minimum specified alignment (via an attribute |
820 | declaration, for example) use it -- otherwise, start with a | |
821 | one-byte alignment. */ | |
822 | rli->record_align = MAX (BITS_PER_UNIT, TYPE_ALIGN (t)); | |
f5712181 | 823 | rli->unpacked_align = rli->record_align; |
02e7a332 | 824 | rli->offset_align = MAX (rli->record_align, BIGGEST_ALIGNMENT); |
f2cfea4a | 825 | |
3af966eb | 826 | #ifdef STRUCTURE_SIZE_BOUNDARY |
827 | /* Packed structures don't need to have minimum size. */ | |
0f9793f3 | 828 | if (! TYPE_PACKED (t)) |
546e12a7 | 829 | { |
830 | unsigned tmp; | |
831 | ||
832 | /* #pragma pack overrides STRUCTURE_SIZE_BOUNDARY. */ | |
833 | tmp = (unsigned) STRUCTURE_SIZE_BOUNDARY; | |
834 | if (maximum_field_alignment != 0) | |
835 | tmp = MIN (tmp, maximum_field_alignment); | |
836 | rli->record_align = MAX (rli->record_align, tmp); | |
837 | } | |
3af966eb | 838 | #endif |
f2cfea4a | 839 | |
02e7a332 | 840 | rli->offset = size_zero_node; |
841 | rli->bitpos = bitsize_zero_node; | |
f04f096b | 842 | rli->prev_field = 0; |
f1f41a6c | 843 | rli->pending_statics = 0; |
02e7a332 | 844 | rli->packed_maybe_necessary = 0; |
7bd4091f | 845 | rli->remaining_in_alignment = 0; |
02e7a332 | 846 | |
99f4e085 | 847 | return rli; |
848 | } | |
f2cfea4a | 849 | |
2765f7eb | 850 | /* Return the combined bit position for the byte offset OFFSET and the |
d9906773 | 851 | bit position BITPOS. |
852 | ||
853 | These functions operate on byte and bit positions present in FIELD_DECLs | |
854 | and assume that these expressions result in no (intermediate) overflow. | |
855 | This assumption is necessary to fold the expressions as much as possible, | |
856 | so as to avoid creating artificially variable-sized types in languages | |
857 | supporting variable-sized types like Ada. */ | |
6d731e4d | 858 | |
859 | tree | |
60b8c5b3 | 860 | bit_from_pos (tree offset, tree bitpos) |
6d731e4d | 861 | { |
d9906773 | 862 | if (TREE_CODE (offset) == PLUS_EXPR) |
863 | offset = size_binop (PLUS_EXPR, | |
864 | fold_convert (bitsizetype, TREE_OPERAND (offset, 0)), | |
865 | fold_convert (bitsizetype, TREE_OPERAND (offset, 1))); | |
866 | else | |
867 | offset = fold_convert (bitsizetype, offset); | |
6d731e4d | 868 | return size_binop (PLUS_EXPR, bitpos, |
d9906773 | 869 | size_binop (MULT_EXPR, offset, bitsize_unit_node)); |
6d731e4d | 870 | } |
871 | ||
2765f7eb | 872 | /* Return the combined truncated byte position for the byte offset OFFSET and |
d9906773 | 873 | the bit position BITPOS. */ |
2765f7eb | 874 | |
6d731e4d | 875 | tree |
60b8c5b3 | 876 | byte_from_pos (tree offset, tree bitpos) |
6d731e4d | 877 | { |
2765f7eb | 878 | tree bytepos; |
879 | if (TREE_CODE (bitpos) == MULT_EXPR | |
880 | && tree_int_cst_equal (TREE_OPERAND (bitpos, 1), bitsize_unit_node)) | |
881 | bytepos = TREE_OPERAND (bitpos, 0); | |
882 | else | |
883 | bytepos = size_binop (TRUNC_DIV_EXPR, bitpos, bitsize_unit_node); | |
884 | return size_binop (PLUS_EXPR, offset, fold_convert (sizetype, bytepos)); | |
6d731e4d | 885 | } |
886 | ||
2765f7eb | 887 | /* Split the bit position POS into a byte offset *POFFSET and a bit |
888 | position *PBITPOS with the byte offset aligned to OFF_ALIGN bits. */ | |
889 | ||
6d731e4d | 890 | void |
60b8c5b3 | 891 | pos_from_bit (tree *poffset, tree *pbitpos, unsigned int off_align, |
892 | tree pos) | |
6d731e4d | 893 | { |
2765f7eb | 894 | tree toff_align = bitsize_int (off_align); |
895 | if (TREE_CODE (pos) == MULT_EXPR | |
896 | && tree_int_cst_equal (TREE_OPERAND (pos, 1), toff_align)) | |
897 | { | |
898 | *poffset = size_binop (MULT_EXPR, | |
899 | fold_convert (sizetype, TREE_OPERAND (pos, 0)), | |
900 | size_int (off_align / BITS_PER_UNIT)); | |
901 | *pbitpos = bitsize_zero_node; | |
902 | } | |
903 | else | |
904 | { | |
905 | *poffset = size_binop (MULT_EXPR, | |
906 | fold_convert (sizetype, | |
907 | size_binop (FLOOR_DIV_EXPR, pos, | |
908 | toff_align)), | |
909 | size_int (off_align / BITS_PER_UNIT)); | |
910 | *pbitpos = size_binop (FLOOR_MOD_EXPR, pos, toff_align); | |
911 | } | |
6d731e4d | 912 | } |
913 | ||
914 | /* Given a pointer to bit and byte offsets and an offset alignment, | |
915 | normalize the offsets so they are within the alignment. */ | |
916 | ||
917 | void | |
60b8c5b3 | 918 | normalize_offset (tree *poffset, tree *pbitpos, unsigned int off_align) |
6d731e4d | 919 | { |
920 | /* If the bit position is now larger than it should be, adjust it | |
921 | downwards. */ | |
922 | if (compare_tree_int (*pbitpos, off_align) >= 0) | |
923 | { | |
2765f7eb | 924 | tree offset, bitpos; |
925 | pos_from_bit (&offset, &bitpos, off_align, *pbitpos); | |
926 | *poffset = size_binop (PLUS_EXPR, *poffset, offset); | |
927 | *pbitpos = bitpos; | |
6d731e4d | 928 | } |
929 | } | |
930 | ||
02e7a332 | 931 | /* Print debugging information about the information in RLI. */ |
83675f44 | 932 | |
4b987fac | 933 | DEBUG_FUNCTION void |
60b8c5b3 | 934 | debug_rli (record_layout_info rli) |
83675f44 | 935 | { |
02e7a332 | 936 | print_node_brief (stderr, "type", rli->t, 0); |
937 | print_node_brief (stderr, "\noffset", rli->offset, 0); | |
938 | print_node_brief (stderr, " bitpos", rli->bitpos, 0); | |
83675f44 | 939 | |
f5712181 | 940 | fprintf (stderr, "\naligns: rec = %u, unpack = %u, off = %u\n", |
941 | rli->record_align, rli->unpacked_align, | |
38ac5984 | 942 | rli->offset_align); |
7bd4091f | 943 | |
944 | /* The ms_struct code is the only that uses this. */ | |
945 | if (targetm.ms_bitfield_layout_p (rli->t)) | |
674b377b | 946 | fprintf (stderr, "remaining in alignment = %u\n", rli->remaining_in_alignment); |
7bd4091f | 947 | |
02e7a332 | 948 | if (rli->packed_maybe_necessary) |
949 | fprintf (stderr, "packed may be necessary\n"); | |
950 | ||
f1f41a6c | 951 | if (!vec_safe_is_empty (rli->pending_statics)) |
02e7a332 | 952 | { |
953 | fprintf (stderr, "pending statics:\n"); | |
364ba361 | 954 | debug_vec_tree (rli->pending_statics); |
02e7a332 | 955 | } |
956 | } | |
957 | ||
958 | /* Given an RLI with a possibly-incremented BITPOS, adjust OFFSET and | |
959 | BITPOS if necessary to keep BITPOS below OFFSET_ALIGN. */ | |
960 | ||
961 | void | |
60b8c5b3 | 962 | normalize_rli (record_layout_info rli) |
02e7a332 | 963 | { |
6d731e4d | 964 | normalize_offset (&rli->offset, &rli->bitpos, rli->offset_align); |
02e7a332 | 965 | } |
83675f44 | 966 | |
02e7a332 | 967 | /* Returns the size in bytes allocated so far. */ |
968 | ||
969 | tree | |
60b8c5b3 | 970 | rli_size_unit_so_far (record_layout_info rli) |
02e7a332 | 971 | { |
6d731e4d | 972 | return byte_from_pos (rli->offset, rli->bitpos); |
02e7a332 | 973 | } |
974 | ||
975 | /* Returns the size in bits allocated so far. */ | |
976 | ||
977 | tree | |
60b8c5b3 | 978 | rli_size_so_far (record_layout_info rli) |
02e7a332 | 979 | { |
6d731e4d | 980 | return bit_from_pos (rli->offset, rli->bitpos); |
02e7a332 | 981 | } |
982 | ||
4b387c35 | 983 | /* FIELD is about to be added to RLI->T. The alignment (in bits) of |
23325b33 | 984 | the next available location within the record is given by KNOWN_ALIGN. |
985 | Update the variable alignment fields in RLI, and return the alignment | |
986 | to give the FIELD. */ | |
02e7a332 | 987 | |
4ee9c684 | 988 | unsigned int |
60b8c5b3 | 989 | update_alignment_for_field (record_layout_info rli, tree field, |
990 | unsigned int known_align) | |
99f4e085 | 991 | { |
992 | /* The alignment required for FIELD. */ | |
993 | unsigned int desired_align; | |
99f4e085 | 994 | /* The type of this field. */ |
995 | tree type = TREE_TYPE (field); | |
4b387c35 | 996 | /* True if the field was explicitly aligned by the user. */ |
997 | bool user_align; | |
f5712181 | 998 | bool is_bitfield; |
99f4e085 | 999 | |
f6cf83a8 | 1000 | /* Do not attempt to align an ERROR_MARK node */ |
1001 | if (TREE_CODE (type) == ERROR_MARK) | |
1002 | return 0; | |
1003 | ||
f5712181 | 1004 | /* Lay out the field so we know what alignment it needs. */ |
1005 | layout_decl (field, known_align); | |
02e7a332 | 1006 | desired_align = DECL_ALIGN (field); |
aca14577 | 1007 | user_align = DECL_USER_ALIGN (field); |
02e7a332 | 1008 | |
f5712181 | 1009 | is_bitfield = (type != error_mark_node |
1010 | && DECL_BIT_FIELD_TYPE (field) | |
1011 | && ! integer_zerop (TYPE_SIZE (type))); | |
f2cfea4a | 1012 | |
99f4e085 | 1013 | /* Record must have at least as much alignment as any field. |
1014 | Otherwise, the alignment of the field within the record is | |
1015 | meaningless. */ | |
7bd4091f | 1016 | if (targetm.ms_bitfield_layout_p (rli->t)) |
f04f096b | 1017 | { |
8642f3d3 | 1018 | /* Here, the alignment of the underlying type of a bitfield can |
1019 | affect the alignment of a record; even a zero-sized field | |
1020 | can do this. The alignment should be to the alignment of | |
1021 | the type, except that for zero-size bitfields this only | |
f712a0dc | 1022 | applies if there was an immediately prior, nonzero-size |
8642f3d3 | 1023 | bitfield. (That's the way it is, experimentally.) */ |
089ea875 | 1024 | if ((!is_bitfield && !DECL_PACKED (field)) |
a6949f31 | 1025 | || ((DECL_SIZE (field) == NULL_TREE |
1026 | || !integer_zerop (DECL_SIZE (field))) | |
7bd4091f | 1027 | ? !DECL_PACKED (field) |
1028 | : (rli->prev_field | |
1029 | && DECL_BIT_FIELD_TYPE (rli->prev_field) | |
1030 | && ! integer_zerop (DECL_SIZE (rli->prev_field))))) | |
f04f096b | 1031 | { |
8642f3d3 | 1032 | unsigned int type_align = TYPE_ALIGN (type); |
1033 | type_align = MAX (type_align, desired_align); | |
1034 | if (maximum_field_alignment != 0) | |
1035 | type_align = MIN (type_align, maximum_field_alignment); | |
1036 | rli->record_align = MAX (rli->record_align, type_align); | |
f04f096b | 1037 | rli->unpacked_align = MAX (rli->unpacked_align, TYPE_ALIGN (type)); |
1038 | } | |
40734805 | 1039 | } |
f5712181 | 1040 | else if (is_bitfield && PCC_BITFIELD_TYPE_MATTERS) |
99f4e085 | 1041 | { |
4975da72 | 1042 | /* Named bit-fields cause the entire structure to have the |
25ba5be6 | 1043 | alignment implied by their type. Some targets also apply the same |
1044 | rules to unnamed bitfields. */ | |
1045 | if (DECL_NAME (field) != 0 | |
1046 | || targetm.align_anon_bitfield ()) | |
f2cfea4a | 1047 | { |
99f4e085 | 1048 | unsigned int type_align = TYPE_ALIGN (type); |
fe352cf1 | 1049 | |
77d0f168 | 1050 | #ifdef ADJUST_FIELD_ALIGN |
1051 | if (! TYPE_USER_ALIGN (type)) | |
1052 | type_align = ADJUST_FIELD_ALIGN (field, type_align); | |
1053 | #endif | |
1054 | ||
7c68c953 | 1055 | /* Targets might chose to handle unnamed and hence possibly |
1056 | zero-width bitfield. Those are not influenced by #pragmas | |
1057 | or packed attributes. */ | |
1058 | if (integer_zerop (DECL_SIZE (field))) | |
1059 | { | |
1060 | if (initial_max_fld_align) | |
1061 | type_align = MIN (type_align, | |
1062 | initial_max_fld_align * BITS_PER_UNIT); | |
1063 | } | |
1064 | else if (maximum_field_alignment != 0) | |
99f4e085 | 1065 | type_align = MIN (type_align, maximum_field_alignment); |
1066 | else if (DECL_PACKED (field)) | |
1067 | type_align = MIN (type_align, BITS_PER_UNIT); | |
87994a83 | 1068 | |
4975da72 | 1069 | /* The alignment of the record is increased to the maximum |
1070 | of the current alignment, the alignment indicated on the | |
1071 | field (i.e., the alignment specified by an __aligned__ | |
1072 | attribute), and the alignment indicated by the type of | |
1073 | the field. */ | |
1074 | rli->record_align = MAX (rli->record_align, desired_align); | |
99f4e085 | 1075 | rli->record_align = MAX (rli->record_align, type_align); |
4975da72 | 1076 | |
fca12917 | 1077 | if (warn_packed) |
38ac5984 | 1078 | rli->unpacked_align = MAX (rli->unpacked_align, TYPE_ALIGN (type)); |
fced8f42 | 1079 | user_align |= TYPE_USER_ALIGN (type); |
fca12917 | 1080 | } |
99f4e085 | 1081 | } |
f5712181 | 1082 | else |
99f4e085 | 1083 | { |
1084 | rli->record_align = MAX (rli->record_align, desired_align); | |
02e7a332 | 1085 | rli->unpacked_align = MAX (rli->unpacked_align, TYPE_ALIGN (type)); |
99f4e085 | 1086 | } |
fca12917 | 1087 | |
4b387c35 | 1088 | TYPE_USER_ALIGN (rli->t) |= user_align; |
1089 | ||
1090 | return desired_align; | |
1091 | } | |
1092 | ||
1093 | /* Called from place_field to handle unions. */ | |
1094 | ||
1095 | static void | |
60b8c5b3 | 1096 | place_union_field (record_layout_info rli, tree field) |
4b387c35 | 1097 | { |
1098 | update_alignment_for_field (rli, field, /*known_align=*/0); | |
1099 | ||
1100 | DECL_FIELD_OFFSET (field) = size_zero_node; | |
1101 | DECL_FIELD_BIT_OFFSET (field) = bitsize_zero_node; | |
1102 | SET_DECL_OFFSET_ALIGN (field, BIGGEST_ALIGNMENT); | |
1103 | ||
7bd4091f | 1104 | /* If this is an ERROR_MARK return *after* having set the |
f6cf83a8 | 1105 | field at the start of the union. This helps when parsing |
1106 | invalid fields. */ | |
1107 | if (TREE_CODE (TREE_TYPE (field)) == ERROR_MARK) | |
1108 | return; | |
1109 | ||
4b387c35 | 1110 | /* We assume the union's size will be a multiple of a byte so we don't |
1111 | bother with BITPOS. */ | |
1112 | if (TREE_CODE (rli->t) == UNION_TYPE) | |
1113 | rli->offset = size_binop (MAX_EXPR, rli->offset, DECL_SIZE_UNIT (field)); | |
1114 | else if (TREE_CODE (rli->t) == QUAL_UNION_TYPE) | |
178825bb | 1115 | rli->offset = fold_build3 (COND_EXPR, sizetype, DECL_QUALIFIER (field), |
faa43f85 | 1116 | DECL_SIZE_UNIT (field), rli->offset); |
4b387c35 | 1117 | } |
1118 | ||
805e22b2 | 1119 | /* A bitfield of SIZE with a required access alignment of ALIGN is allocated |
a8b24921 | 1120 | at BYTE_OFFSET / BIT_OFFSET. Return nonzero if the field would span more |
805e22b2 | 1121 | units of alignment than the underlying TYPE. */ |
1122 | static int | |
60b8c5b3 | 1123 | excess_unit_span (HOST_WIDE_INT byte_offset, HOST_WIDE_INT bit_offset, |
1124 | HOST_WIDE_INT size, HOST_WIDE_INT align, tree type) | |
805e22b2 | 1125 | { |
1126 | /* Note that the calculation of OFFSET might overflow; we calculate it so | |
1127 | that we still get the right result as long as ALIGN is a power of two. */ | |
1128 | unsigned HOST_WIDE_INT offset = byte_offset * BITS_PER_UNIT + bit_offset; | |
1129 | ||
1130 | offset = offset % align; | |
1131 | return ((offset + size + align - 1) / align | |
aa59f000 | 1132 | > tree_to_uhwi (TYPE_SIZE (type)) / align); |
805e22b2 | 1133 | } |
1134 | ||
4b387c35 | 1135 | /* RLI contains information about the layout of a RECORD_TYPE. FIELD |
1136 | is a FIELD_DECL to be added after those fields already present in | |
1137 | T. (FIELD is not actually added to the TYPE_FIELDS list here; | |
1138 | callers that desire that behavior must manually perform that step.) */ | |
1139 | ||
1140 | void | |
60b8c5b3 | 1141 | place_field (record_layout_info rli, tree field) |
4b387c35 | 1142 | { |
1143 | /* The alignment required for FIELD. */ | |
1144 | unsigned int desired_align; | |
1145 | /* The alignment FIELD would have if we just dropped it into the | |
1146 | record as it presently stands. */ | |
1147 | unsigned int known_align; | |
1148 | unsigned int actual_align; | |
1149 | /* The type of this field. */ | |
1150 | tree type = TREE_TYPE (field); | |
1151 | ||
65a7c526 | 1152 | gcc_assert (TREE_CODE (field) != ERROR_MARK); |
4b387c35 | 1153 | |
1154 | /* If FIELD is static, then treat it like a separate variable, not | |
1155 | really like a structure field. If it is a FUNCTION_DECL, it's a | |
1156 | method. In both cases, all we do is lay out the decl, and we do | |
1157 | it *after* the record is laid out. */ | |
1158 | if (TREE_CODE (field) == VAR_DECL) | |
1159 | { | |
f1f41a6c | 1160 | vec_safe_push (rli->pending_statics, field); |
4b387c35 | 1161 | return; |
1162 | } | |
1163 | ||
1164 | /* Enumerators and enum types which are local to this class need not | |
1165 | be laid out. Likewise for initialized constant fields. */ | |
1166 | else if (TREE_CODE (field) != FIELD_DECL) | |
1167 | return; | |
1168 | ||
1169 | /* Unions are laid out very differently than records, so split | |
1170 | that code off to another function. */ | |
1171 | else if (TREE_CODE (rli->t) != RECORD_TYPE) | |
1172 | { | |
1173 | place_union_field (rli, field); | |
1174 | return; | |
1175 | } | |
1176 | ||
7bd4091f | 1177 | else if (TREE_CODE (type) == ERROR_MARK) |
f6cf83a8 | 1178 | { |
1179 | /* Place this field at the current allocation position, so we | |
1180 | maintain monotonicity. */ | |
1181 | DECL_FIELD_OFFSET (field) = rli->offset; | |
1182 | DECL_FIELD_BIT_OFFSET (field) = rli->bitpos; | |
1183 | SET_DECL_OFFSET_ALIGN (field, rli->offset_align); | |
1184 | return; | |
1185 | } | |
1186 | ||
4b387c35 | 1187 | /* Work out the known alignment so far. Note that A & (-A) is the |
1188 | value of the least-significant bit in A that is one. */ | |
1189 | if (! integer_zerop (rli->bitpos)) | |
e913b5cd | 1190 | known_align = (tree_to_uhwi (rli->bitpos) |
1191 | & - tree_to_uhwi (rli->bitpos)); | |
4b387c35 | 1192 | else if (integer_zerop (rli->offset)) |
23325b33 | 1193 | known_align = 0; |
e913b5cd | 1194 | else if (tree_fits_uhwi_p (rli->offset)) |
4b387c35 | 1195 | known_align = (BITS_PER_UNIT |
e913b5cd | 1196 | * (tree_to_uhwi (rli->offset) |
1197 | & - tree_to_uhwi (rli->offset))); | |
4b387c35 | 1198 | else |
1199 | known_align = rli->offset_align; | |
60b8c5b3 | 1200 | |
4b387c35 | 1201 | desired_align = update_alignment_for_field (rli, field, known_align); |
23325b33 | 1202 | if (known_align == 0) |
1203 | known_align = MAX (BIGGEST_ALIGNMENT, rli->record_align); | |
4b387c35 | 1204 | |
99f4e085 | 1205 | if (warn_packed && DECL_PACKED (field)) |
1206 | { | |
f5712181 | 1207 | if (known_align >= TYPE_ALIGN (type)) |
fca12917 | 1208 | { |
99f4e085 | 1209 | if (TYPE_ALIGN (type) > desired_align) |
fca12917 | 1210 | { |
99f4e085 | 1211 | if (STRICT_ALIGNMENT) |
3cf8b391 | 1212 | warning (OPT_Wattributes, "packed attribute causes " |
1213 | "inefficient alignment for %q+D", field); | |
acca8c42 | 1214 | /* Don't warn if DECL_PACKED was set by the type. */ |
1215 | else if (!TYPE_PACKED (rli->t)) | |
3cf8b391 | 1216 | warning (OPT_Wattributes, "packed attribute is " |
1217 | "unnecessary for %q+D", field); | |
fca12917 | 1218 | } |
fca12917 | 1219 | } |
99f4e085 | 1220 | else |
1221 | rli->packed_maybe_necessary = 1; | |
1222 | } | |
f2cfea4a | 1223 | |
99f4e085 | 1224 | /* Does this field automatically have alignment it needs by virtue |
b527cbf0 | 1225 | of the fields that precede it and the record's own alignment? */ |
1226 | if (known_align < desired_align) | |
99f4e085 | 1227 | { |
1228 | /* No, we need to skip space before this field. | |
1229 | Bump the cumulative size to multiple of field alignment. */ | |
f2cfea4a | 1230 | |
b527cbf0 | 1231 | if (!targetm.ms_bitfield_layout_p (rli->t) |
1232 | && DECL_SOURCE_LOCATION (field) != BUILTINS_LOCATION) | |
d1251492 | 1233 | warning (OPT_Wpadded, "padding struct to align %q+D", field); |
fca12917 | 1234 | |
02e7a332 | 1235 | /* If the alignment is still within offset_align, just align |
1236 | the bit position. */ | |
1237 | if (desired_align < rli->offset_align) | |
1238 | rli->bitpos = round_up (rli->bitpos, desired_align); | |
99f4e085 | 1239 | else |
1240 | { | |
02e7a332 | 1241 | /* First adjust OFFSET by the partial bits, then align. */ |
1242 | rli->offset | |
1243 | = size_binop (PLUS_EXPR, rli->offset, | |
5d7ed6c7 | 1244 | fold_convert (sizetype, |
1245 | size_binop (CEIL_DIV_EXPR, rli->bitpos, | |
1246 | bitsize_unit_node))); | |
02e7a332 | 1247 | rli->bitpos = bitsize_zero_node; |
1248 | ||
1249 | rli->offset = round_up (rli->offset, desired_align / BITS_PER_UNIT); | |
f2cfea4a | 1250 | } |
02e7a332 | 1251 | |
53de1faf | 1252 | if (! TREE_CONSTANT (rli->offset)) |
1253 | rli->offset_align = desired_align; | |
b527cbf0 | 1254 | if (targetm.ms_bitfield_layout_p (rli->t)) |
1255 | rli->prev_field = NULL; | |
99f4e085 | 1256 | } |
f2cfea4a | 1257 | |
02e7a332 | 1258 | /* Handle compatibility with PCC. Note that if the record has any |
1259 | variable-sized fields, we need not worry about compatibility. */ | |
99f4e085 | 1260 | if (PCC_BITFIELD_TYPE_MATTERS |
6fb33aa0 | 1261 | && ! targetm.ms_bitfield_layout_p (rli->t) |
99f4e085 | 1262 | && TREE_CODE (field) == FIELD_DECL |
1263 | && type != error_mark_node | |
02e7a332 | 1264 | && DECL_BIT_FIELD (field) |
9fd767c5 | 1265 | && (! DECL_PACKED (field) |
1266 | /* Enter for these packed fields only to issue a warning. */ | |
1267 | || TYPE_ALIGN (type) <= BITS_PER_UNIT) | |
99f4e085 | 1268 | && maximum_field_alignment == 0 |
02e7a332 | 1269 | && ! integer_zerop (DECL_SIZE (field)) |
e913b5cd | 1270 | && tree_fits_uhwi_p (DECL_SIZE (field)) |
e913b5cd | 1271 | && tree_fits_uhwi_p (rli->offset) |
1272 | && tree_fits_uhwi_p (TYPE_SIZE (type))) | |
99f4e085 | 1273 | { |
1274 | unsigned int type_align = TYPE_ALIGN (type); | |
02e7a332 | 1275 | tree dsize = DECL_SIZE (field); |
e913b5cd | 1276 | HOST_WIDE_INT field_size = tree_to_uhwi (dsize); |
69c1cbfa | 1277 | HOST_WIDE_INT offset = tree_to_uhwi (rli->offset); |
e913b5cd | 1278 | HOST_WIDE_INT bit_offset = tree_to_shwi (rli->bitpos); |
99f4e085 | 1279 | |
77d0f168 | 1280 | #ifdef ADJUST_FIELD_ALIGN |
1281 | if (! TYPE_USER_ALIGN (type)) | |
1282 | type_align = ADJUST_FIELD_ALIGN (field, type_align); | |
1283 | #endif | |
1284 | ||
99f4e085 | 1285 | /* A bit field may not span more units of alignment of its type |
1286 | than its type itself. Advance to next boundary if necessary. */ | |
805e22b2 | 1287 | if (excess_unit_span (offset, bit_offset, field_size, type_align, type)) |
9fd767c5 | 1288 | { |
1289 | if (DECL_PACKED (field)) | |
1290 | { | |
7a6a48c9 | 1291 | if (warn_packed_bitfield_compat == 1) |
9fd767c5 | 1292 | inform |
1293 | (input_location, | |
bf776685 | 1294 | "offset of packed bit-field %qD has changed in GCC 4.4", |
9fd767c5 | 1295 | field); |
1296 | } | |
1297 | else | |
178825bb | 1298 | rli->bitpos = round_up (rli->bitpos, type_align); |
9fd767c5 | 1299 | } |
fced8f42 | 1300 | |
9fd767c5 | 1301 | if (! DECL_PACKED (field)) |
1302 | TYPE_USER_ALIGN (rli->t) |= TYPE_USER_ALIGN (type); | |
99f4e085 | 1303 | } |
f2cfea4a | 1304 | |
f2cfea4a | 1305 | #ifdef BITFIELD_NBYTES_LIMITED |
99f4e085 | 1306 | if (BITFIELD_NBYTES_LIMITED |
6fb33aa0 | 1307 | && ! targetm.ms_bitfield_layout_p (rli->t) |
99f4e085 | 1308 | && TREE_CODE (field) == FIELD_DECL |
1309 | && type != error_mark_node | |
1310 | && DECL_BIT_FIELD_TYPE (field) | |
02e7a332 | 1311 | && ! DECL_PACKED (field) |
1312 | && ! integer_zerop (DECL_SIZE (field)) | |
e913b5cd | 1313 | && tree_fits_uhwi_p (DECL_SIZE (field)) |
08f4222b | 1314 | && tree_fits_uhwi_p (rli->offset) |
e913b5cd | 1315 | && tree_fits_uhwi_p (TYPE_SIZE (type))) |
99f4e085 | 1316 | { |
1317 | unsigned int type_align = TYPE_ALIGN (type); | |
02e7a332 | 1318 | tree dsize = DECL_SIZE (field); |
e913b5cd | 1319 | HOST_WIDE_INT field_size = tree_to_uhwi (dsize); |
69c1cbfa | 1320 | HOST_WIDE_INT offset = tree_to_uhwi (rli->offset); |
e913b5cd | 1321 | HOST_WIDE_INT bit_offset = tree_to_shwi (rli->bitpos); |
87994a83 | 1322 | |
77d0f168 | 1323 | #ifdef ADJUST_FIELD_ALIGN |
1324 | if (! TYPE_USER_ALIGN (type)) | |
1325 | type_align = ADJUST_FIELD_ALIGN (field, type_align); | |
1326 | #endif | |
1327 | ||
99f4e085 | 1328 | if (maximum_field_alignment != 0) |
1329 | type_align = MIN (type_align, maximum_field_alignment); | |
1330 | /* ??? This test is opposite the test in the containing if | |
1331 | statement, so this code is unreachable currently. */ | |
1332 | else if (DECL_PACKED (field)) | |
1333 | type_align = MIN (type_align, BITS_PER_UNIT); | |
1334 | ||
1335 | /* A bit field may not span the unit of alignment of its type. | |
1336 | Advance to next boundary if necessary. */ | |
805e22b2 | 1337 | if (excess_unit_span (offset, bit_offset, field_size, type_align, type)) |
02e7a332 | 1338 | rli->bitpos = round_up (rli->bitpos, type_align); |
fced8f42 | 1339 | |
4b387c35 | 1340 | TYPE_USER_ALIGN (rli->t) |= TYPE_USER_ALIGN (type); |
99f4e085 | 1341 | } |
f2cfea4a | 1342 | #endif |
1343 | ||
8642f3d3 | 1344 | /* See the docs for TARGET_MS_BITFIELD_LAYOUT_P for details. |
1345 | A subtlety: | |
1346 | When a bit field is inserted into a packed record, the whole | |
1347 | size of the underlying type is used by one or more same-size | |
805e22b2 | 1348 | adjacent bitfields. (That is, if its long:3, 32 bits is |
8642f3d3 | 1349 | used in the record, and any additional adjacent long bitfields are |
1350 | packed into the same chunk of 32 bits. However, if the size | |
1351 | changes, a new field of that size is allocated.) In an unpacked | |
de132707 | 1352 | record, this is the same as using alignment, but not equivalent |
805e22b2 | 1353 | when packing. |
8642f3d3 | 1354 | |
de132707 | 1355 | Note: for compatibility, we use the type size, not the type alignment |
8642f3d3 | 1356 | to determine alignment, since that matches the documentation */ |
1357 | ||
7bd4091f | 1358 | if (targetm.ms_bitfield_layout_p (rli->t)) |
f04f096b | 1359 | { |
8642f3d3 | 1360 | tree prev_saved = rli->prev_field; |
8aea3a7e | 1361 | tree prev_type = prev_saved ? DECL_BIT_FIELD_TYPE (prev_saved) : NULL; |
f04f096b | 1362 | |
7bd4091f | 1363 | /* This is a bitfield if it exists. */ |
1364 | if (rli->prev_field) | |
8642f3d3 | 1365 | { |
1366 | /* If both are bitfields, nonzero, and the same size, this is | |
1367 | the middle of a run. Zero declared size fields are special | |
1368 | and handled as "end of run". (Note: it's nonzero declared | |
1369 | size, but equal type sizes!) (Since we know that both | |
1370 | the current and previous fields are bitfields by the | |
1371 | time we check it, DECL_SIZE must be present for both.) */ | |
1372 | if (DECL_BIT_FIELD_TYPE (field) | |
1373 | && !integer_zerop (DECL_SIZE (field)) | |
1374 | && !integer_zerop (DECL_SIZE (rli->prev_field)) | |
e913b5cd | 1375 | && tree_fits_shwi_p (DECL_SIZE (rli->prev_field)) |
69c1cbfa | 1376 | && tree_fits_uhwi_p (TYPE_SIZE (type)) |
8aea3a7e | 1377 | && simple_cst_equal (TYPE_SIZE (type), TYPE_SIZE (prev_type))) |
8642f3d3 | 1378 | { |
1379 | /* We're in the middle of a run of equal type size fields; make | |
1380 | sure we realign if we run out of bits. (Not decl size, | |
1381 | type size!) */ | |
e913b5cd | 1382 | HOST_WIDE_INT bitsize = tree_to_uhwi (DECL_SIZE (field)); |
8642f3d3 | 1383 | |
1384 | if (rli->remaining_in_alignment < bitsize) | |
1385 | { | |
e913b5cd | 1386 | HOST_WIDE_INT typesize = tree_to_uhwi (TYPE_SIZE (type)); |
8aea3a7e | 1387 | |
7bd4091f | 1388 | /* out of bits; bump up to next 'word'. */ |
7bd4091f | 1389 | rli->bitpos |
8aea3a7e | 1390 | = size_binop (PLUS_EXPR, rli->bitpos, |
1391 | bitsize_int (rli->remaining_in_alignment)); | |
7bd4091f | 1392 | rli->prev_field = field; |
8aea3a7e | 1393 | if (typesize < bitsize) |
1394 | rli->remaining_in_alignment = 0; | |
1395 | else | |
1396 | rli->remaining_in_alignment = typesize - bitsize; | |
8642f3d3 | 1397 | } |
8aea3a7e | 1398 | else |
1399 | rli->remaining_in_alignment -= bitsize; | |
8642f3d3 | 1400 | } |
1401 | else | |
1402 | { | |
805e22b2 | 1403 | /* End of a run: if leaving a run of bitfields of the same type |
1404 | size, we have to "use up" the rest of the bits of the type | |
8642f3d3 | 1405 | size. |
1406 | ||
1407 | Compute the new position as the sum of the size for the prior | |
1408 | type and where we first started working on that type. | |
1409 | Note: since the beginning of the field was aligned then | |
1410 | of course the end will be too. No round needed. */ | |
1411 | ||
a6cf93fb | 1412 | if (!integer_zerop (DECL_SIZE (rli->prev_field))) |
8642f3d3 | 1413 | { |
7bd4091f | 1414 | rli->bitpos |
1415 | = size_binop (PLUS_EXPR, rli->bitpos, | |
1416 | bitsize_int (rli->remaining_in_alignment)); | |
8642f3d3 | 1417 | } |
1418 | else | |
5f1e9331 | 1419 | /* We "use up" size zero fields; the code below should behave |
1420 | as if the prior field was not a bitfield. */ | |
1421 | prev_saved = NULL; | |
8642f3d3 | 1422 | |
805e22b2 | 1423 | /* Cause a new bitfield to be captured, either this time (if |
9cb8e99f | 1424 | currently a bitfield) or next time we see one. */ |
9af5ce0c | 1425 | if (!DECL_BIT_FIELD_TYPE (field) |
a6cf93fb | 1426 | || integer_zerop (DECL_SIZE (field))) |
5f1e9331 | 1427 | rli->prev_field = NULL; |
8642f3d3 | 1428 | } |
5f1e9331 | 1429 | |
8642f3d3 | 1430 | normalize_rli (rli); |
1431 | } | |
1432 | ||
3157acc6 | 1433 | /* If we're starting a new run of same type size bitfields |
8642f3d3 | 1434 | (or a run of non-bitfields), set up the "first of the run" |
805e22b2 | 1435 | fields. |
8642f3d3 | 1436 | |
1437 | That is, if the current field is not a bitfield, or if there | |
1438 | was a prior bitfield the type sizes differ, or if there wasn't | |
1439 | a prior bitfield the size of the current field is nonzero. | |
1440 | ||
1441 | Note: we must be sure to test ONLY the type size if there was | |
1442 | a prior bitfield and ONLY for the current field being zero if | |
1443 | there wasn't. */ | |
1444 | ||
1445 | if (!DECL_BIT_FIELD_TYPE (field) | |
a6cf93fb | 1446 | || (prev_saved != NULL |
8aea3a7e | 1447 | ? !simple_cst_equal (TYPE_SIZE (type), TYPE_SIZE (prev_type)) |
5f1e9331 | 1448 | : !integer_zerop (DECL_SIZE (field)) )) |
8642f3d3 | 1449 | { |
5f1e9331 | 1450 | /* Never smaller than a byte for compatibility. */ |
1451 | unsigned int type_align = BITS_PER_UNIT; | |
8642f3d3 | 1452 | |
805e22b2 | 1453 | /* (When not a bitfield), we could be seeing a flex array (with |
8642f3d3 | 1454 | no DECL_SIZE). Since we won't be using remaining_in_alignment |
805e22b2 | 1455 | until we see a bitfield (and come by here again) we just skip |
8642f3d3 | 1456 | calculating it. */ |
5f1e9331 | 1457 | if (DECL_SIZE (field) != NULL |
e913b5cd | 1458 | && tree_fits_uhwi_p (TYPE_SIZE (TREE_TYPE (field))) |
1459 | && tree_fits_uhwi_p (DECL_SIZE (field))) | |
8aea3a7e | 1460 | { |
c7e8d0da | 1461 | unsigned HOST_WIDE_INT bitsize |
e913b5cd | 1462 | = tree_to_uhwi (DECL_SIZE (field)); |
c7e8d0da | 1463 | unsigned HOST_WIDE_INT typesize |
e913b5cd | 1464 | = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (field))); |
8aea3a7e | 1465 | |
1466 | if (typesize < bitsize) | |
1467 | rli->remaining_in_alignment = 0; | |
1468 | else | |
1469 | rli->remaining_in_alignment = typesize - bitsize; | |
1470 | } | |
8642f3d3 | 1471 | |
9cb8e99f | 1472 | /* Now align (conventionally) for the new type. */ |
7bd4091f | 1473 | type_align = TYPE_ALIGN (TREE_TYPE (field)); |
f04f096b | 1474 | |
8642f3d3 | 1475 | if (maximum_field_alignment != 0) |
1476 | type_align = MIN (type_align, maximum_field_alignment); | |
f04f096b | 1477 | |
178825bb | 1478 | rli->bitpos = round_up (rli->bitpos, type_align); |
5f1e9331 | 1479 | |
8642f3d3 | 1480 | /* If we really aligned, don't allow subsequent bitfields |
9cb8e99f | 1481 | to undo that. */ |
8642f3d3 | 1482 | rli->prev_field = NULL; |
1483 | } | |
f04f096b | 1484 | } |
1485 | ||
02e7a332 | 1486 | /* Offset so far becomes the position of this field after normalizing. */ |
1487 | normalize_rli (rli); | |
1488 | DECL_FIELD_OFFSET (field) = rli->offset; | |
1489 | DECL_FIELD_BIT_OFFSET (field) = rli->bitpos; | |
b4bb829f | 1490 | SET_DECL_OFFSET_ALIGN (field, rli->offset_align); |
02e7a332 | 1491 | |
46515aeb | 1492 | /* Evaluate nonconstant offsets only once, either now or as soon as safe. */ |
1493 | if (TREE_CODE (DECL_FIELD_OFFSET (field)) != INTEGER_CST) | |
1494 | DECL_FIELD_OFFSET (field) = variable_size (DECL_FIELD_OFFSET (field)); | |
1495 | ||
02e7a332 | 1496 | /* If this field ended up more aligned than we thought it would be (we |
1497 | approximate this by seeing if its position changed), lay out the field | |
1498 | again; perhaps we can use an integral mode for it now. */ | |
62d2dc6f | 1499 | if (! integer_zerop (DECL_FIELD_BIT_OFFSET (field))) |
e913b5cd | 1500 | actual_align = (tree_to_uhwi (DECL_FIELD_BIT_OFFSET (field)) |
1501 | & - tree_to_uhwi (DECL_FIELD_BIT_OFFSET (field))); | |
62d2dc6f | 1502 | else if (integer_zerop (DECL_FIELD_OFFSET (field))) |
23325b33 | 1503 | actual_align = MAX (BIGGEST_ALIGNMENT, rli->record_align); |
e913b5cd | 1504 | else if (tree_fits_uhwi_p (DECL_FIELD_OFFSET (field))) |
02e7a332 | 1505 | actual_align = (BITS_PER_UNIT |
e913b5cd | 1506 | * (tree_to_uhwi (DECL_FIELD_OFFSET (field)) |
1507 | & - tree_to_uhwi (DECL_FIELD_OFFSET (field)))); | |
99f4e085 | 1508 | else |
02e7a332 | 1509 | actual_align = DECL_OFFSET_ALIGN (field); |
23325b33 | 1510 | /* ACTUAL_ALIGN is still the actual alignment *within the record* . |
1511 | store / extract bit field operations will check the alignment of the | |
1512 | record against the mode of bit fields. */ | |
02e7a332 | 1513 | |
1514 | if (known_align != actual_align) | |
1515 | layout_decl (field, actual_align); | |
1516 | ||
7bd4091f | 1517 | if (rli->prev_field == NULL && DECL_BIT_FIELD_TYPE (field)) |
1518 | rli->prev_field = field; | |
f04f096b | 1519 | |
02e7a332 | 1520 | /* Now add size of this field to the size of the record. If the size is |
1521 | not constant, treat the field as being a multiple of bytes and just | |
1522 | adjust the offset, resetting the bit position. Otherwise, apportion the | |
1523 | size amongst the bit position and offset. First handle the case of an | |
1524 | unspecified size, which can happen when we have an invalid nested struct | |
1525 | definition, such as struct j { struct j { int i; } }. The error message | |
1526 | is printed in finish_struct. */ | |
1527 | if (DECL_SIZE (field) == 0) | |
1528 | /* Do nothing. */; | |
7e50ecae | 1529 | else if (TREE_CODE (DECL_SIZE (field)) != INTEGER_CST |
f96bd2bf | 1530 | || TREE_OVERFLOW (DECL_SIZE (field))) |
99f4e085 | 1531 | { |
02e7a332 | 1532 | rli->offset |
1533 | = size_binop (PLUS_EXPR, rli->offset, | |
5d7ed6c7 | 1534 | fold_convert (sizetype, |
1535 | size_binop (CEIL_DIV_EXPR, rli->bitpos, | |
1536 | bitsize_unit_node))); | |
02e7a332 | 1537 | rli->offset |
1538 | = size_binop (PLUS_EXPR, rli->offset, DECL_SIZE_UNIT (field)); | |
1539 | rli->bitpos = bitsize_zero_node; | |
fcf31ac6 | 1540 | rli->offset_align = MIN (rli->offset_align, desired_align); |
99f4e085 | 1541 | } |
7bd4091f | 1542 | else if (targetm.ms_bitfield_layout_p (rli->t)) |
1543 | { | |
1544 | rli->bitpos = size_binop (PLUS_EXPR, rli->bitpos, DECL_SIZE (field)); | |
1545 | ||
1546 | /* If we ended a bitfield before the full length of the type then | |
1547 | pad the struct out to the full length of the last type. */ | |
1767a056 | 1548 | if ((DECL_CHAIN (field) == NULL |
1549 | || TREE_CODE (DECL_CHAIN (field)) != FIELD_DECL) | |
7bd4091f | 1550 | && DECL_BIT_FIELD_TYPE (field) |
1551 | && !integer_zerop (DECL_SIZE (field))) | |
1552 | rli->bitpos = size_binop (PLUS_EXPR, rli->bitpos, | |
1553 | bitsize_int (rli->remaining_in_alignment)); | |
1554 | ||
1555 | normalize_rli (rli); | |
1556 | } | |
99f4e085 | 1557 | else |
1558 | { | |
02e7a332 | 1559 | rli->bitpos = size_binop (PLUS_EXPR, rli->bitpos, DECL_SIZE (field)); |
1560 | normalize_rli (rli); | |
f2cfea4a | 1561 | } |
99f4e085 | 1562 | } |
f2cfea4a | 1563 | |
99f4e085 | 1564 | /* Assuming that all the fields have been laid out, this function uses |
1565 | RLI to compute the final TYPE_SIZE, TYPE_ALIGN, etc. for the type | |
de132707 | 1566 | indicated by RLI. */ |
f2cfea4a | 1567 | |
99f4e085 | 1568 | static void |
60b8c5b3 | 1569 | finalize_record_size (record_layout_info rli) |
99f4e085 | 1570 | { |
02e7a332 | 1571 | tree unpadded_size, unpadded_size_unit; |
1572 | ||
9ac9c432 | 1573 | /* Now we want just byte and bit offsets, so set the offset alignment |
1574 | to be a byte and then normalize. */ | |
1575 | rli->offset_align = BITS_PER_UNIT; | |
1576 | normalize_rli (rli); | |
f2cfea4a | 1577 | |
1578 | /* Determine the desired alignment. */ | |
1579 | #ifdef ROUND_TYPE_ALIGN | |
99f4e085 | 1580 | TYPE_ALIGN (rli->t) = ROUND_TYPE_ALIGN (rli->t, TYPE_ALIGN (rli->t), |
fd5279fc | 1581 | rli->record_align); |
f2cfea4a | 1582 | #else |
99f4e085 | 1583 | TYPE_ALIGN (rli->t) = MAX (TYPE_ALIGN (rli->t), rli->record_align); |
f2cfea4a | 1584 | #endif |
1585 | ||
9ac9c432 | 1586 | /* Compute the size so far. Be sure to allow for extra bits in the |
1587 | size in bytes. We have guaranteed above that it will be no more | |
1588 | than a single byte. */ | |
1589 | unpadded_size = rli_size_so_far (rli); | |
1590 | unpadded_size_unit = rli_size_unit_so_far (rli); | |
1591 | if (! integer_zerop (rli->bitpos)) | |
1592 | unpadded_size_unit | |
1593 | = size_binop (PLUS_EXPR, unpadded_size_unit, size_one_node); | |
02e7a332 | 1594 | |
2358393e | 1595 | /* Round the size up to be a multiple of the required alignment. */ |
178825bb | 1596 | TYPE_SIZE (rli->t) = round_up (unpadded_size, TYPE_ALIGN (rli->t)); |
d37625c0 | 1597 | TYPE_SIZE_UNIT (rli->t) |
178825bb | 1598 | = round_up (unpadded_size_unit, TYPE_ALIGN_UNIT (rli->t)); |
fe352cf1 | 1599 | |
6bf97f82 | 1600 | if (TREE_CONSTANT (unpadded_size) |
d1251492 | 1601 | && simple_cst_equal (unpadded_size, TYPE_SIZE (rli->t)) == 0 |
1602 | && input_location != BUILTINS_LOCATION) | |
6bf97f82 | 1603 | warning (OPT_Wpadded, "padding struct size to alignment boundary"); |
40734805 | 1604 | |
02e7a332 | 1605 | if (warn_packed && TREE_CODE (rli->t) == RECORD_TYPE |
1606 | && TYPE_PACKED (rli->t) && ! rli->packed_maybe_necessary | |
1607 | && TREE_CONSTANT (unpadded_size)) | |
fca12917 | 1608 | { |
1609 | tree unpacked_size; | |
fe352cf1 | 1610 | |
fca12917 | 1611 | #ifdef ROUND_TYPE_ALIGN |
99f4e085 | 1612 | rli->unpacked_align |
1613 | = ROUND_TYPE_ALIGN (rli->t, TYPE_ALIGN (rli->t), rli->unpacked_align); | |
fca12917 | 1614 | #else |
99f4e085 | 1615 | rli->unpacked_align = MAX (TYPE_ALIGN (rli->t), rli->unpacked_align); |
fca12917 | 1616 | #endif |
02e7a332 | 1617 | |
178825bb | 1618 | unpacked_size = round_up (TYPE_SIZE (rli->t), rli->unpacked_align); |
99f4e085 | 1619 | if (simple_cst_equal (unpacked_size, TYPE_SIZE (rli->t))) |
fca12917 | 1620 | { |
99f4e085 | 1621 | if (TYPE_NAME (rli->t)) |
fca12917 | 1622 | { |
abd3e6b5 | 1623 | tree name; |
fe352cf1 | 1624 | |
99f4e085 | 1625 | if (TREE_CODE (TYPE_NAME (rli->t)) == IDENTIFIER_NODE) |
abd3e6b5 | 1626 | name = TYPE_NAME (rli->t); |
fca12917 | 1627 | else |
abd3e6b5 | 1628 | name = DECL_NAME (TYPE_NAME (rli->t)); |
02e7a332 | 1629 | |
fca12917 | 1630 | if (STRICT_ALIGNMENT) |
6bf97f82 | 1631 | warning (OPT_Wpacked, "packed attribute causes inefficient " |
abd3e6b5 | 1632 | "alignment for %qE", name); |
fca12917 | 1633 | else |
6bf97f82 | 1634 | warning (OPT_Wpacked, |
abd3e6b5 | 1635 | "packed attribute is unnecessary for %qE", name); |
fca12917 | 1636 | } |
1637 | else | |
1638 | { | |
1639 | if (STRICT_ALIGNMENT) | |
6bf97f82 | 1640 | warning (OPT_Wpacked, |
9b2d6d13 | 1641 | "packed attribute causes inefficient alignment"); |
fca12917 | 1642 | else |
6bf97f82 | 1643 | warning (OPT_Wpacked, "packed attribute is unnecessary"); |
fca12917 | 1644 | } |
1645 | } | |
fca12917 | 1646 | } |
99f4e085 | 1647 | } |
1648 | ||
1649 | /* Compute the TYPE_MODE for the TYPE (which is a RECORD_TYPE). */ | |
f2cfea4a | 1650 | |
9ac9c432 | 1651 | void |
60b8c5b3 | 1652 | compute_record_mode (tree type) |
99f4e085 | 1653 | { |
02e7a332 | 1654 | tree field; |
3754d046 | 1655 | machine_mode mode = VOIDmode; |
02e7a332 | 1656 | |
99f4e085 | 1657 | /* Most RECORD_TYPEs have BLKmode, so we start off assuming that. |
1658 | However, if possible, we use a mode that fits in a register | |
1659 | instead, in order to allow for better optimization down the | |
1660 | line. */ | |
342ad2d6 | 1661 | SET_TYPE_MODE (type, BLKmode); |
99f4e085 | 1662 | |
e913b5cd | 1663 | if (! tree_fits_uhwi_p (TYPE_SIZE (type))) |
02e7a332 | 1664 | return; |
99f4e085 | 1665 | |
02e7a332 | 1666 | /* A record which has any BLKmode members must itself be |
1667 | BLKmode; it can't go in a register. Unless the member is | |
1668 | BLKmode only because it isn't aligned. */ | |
1767a056 | 1669 | for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field)) |
02e7a332 | 1670 | { |
02e7a332 | 1671 | if (TREE_CODE (field) != FIELD_DECL) |
1672 | continue; | |
99f4e085 | 1673 | |
02e7a332 | 1674 | if (TREE_CODE (TREE_TYPE (field)) == ERROR_MARK |
1675 | || (TYPE_MODE (TREE_TYPE (field)) == BLKmode | |
0e9fefce | 1676 | && ! TYPE_NO_FORCE_BLK (TREE_TYPE (field)) |
1677 | && !(TYPE_SIZE (TREE_TYPE (field)) != 0 | |
1678 | && integer_zerop (TYPE_SIZE (TREE_TYPE (field))))) | |
e913b5cd | 1679 | || ! tree_fits_uhwi_p (bit_position (field)) |
0b241033 | 1680 | || DECL_SIZE (field) == 0 |
e913b5cd | 1681 | || ! tree_fits_uhwi_p (DECL_SIZE (field))) |
02e7a332 | 1682 | return; |
1683 | ||
02e7a332 | 1684 | /* If this field is the whole struct, remember its mode so |
1685 | that, say, we can put a double in a class into a DF | |
b708a05c | 1686 | register instead of forcing it to live in the stack. */ |
1687 | if (simple_cst_equal (TYPE_SIZE (type), DECL_SIZE (field))) | |
02e7a332 | 1688 | mode = DECL_MODE (field); |
99f4e085 | 1689 | |
f91ed644 | 1690 | /* With some targets, it is sub-optimal to access an aligned |
1691 | BLKmode structure as a scalar. */ | |
1692 | if (targetm.member_type_forces_blk (field, mode)) | |
02e7a332 | 1693 | return; |
02e7a332 | 1694 | } |
99f4e085 | 1695 | |
aedd07a7 | 1696 | /* If we only have one real field; use its mode if that mode's size |
1697 | matches the type's size. This only applies to RECORD_TYPE. This | |
1698 | does not apply to unions. */ | |
1699 | if (TREE_CODE (type) == RECORD_TYPE && mode != VOIDmode | |
e913b5cd | 1700 | && tree_fits_uhwi_p (TYPE_SIZE (type)) |
1701 | && GET_MODE_BITSIZE (mode) == tree_to_uhwi (TYPE_SIZE (type))) | |
342ad2d6 | 1702 | SET_TYPE_MODE (type, mode); |
c0d93be8 | 1703 | else |
342ad2d6 | 1704 | SET_TYPE_MODE (type, mode_for_size_tree (TYPE_SIZE (type), MODE_INT, 1)); |
02e7a332 | 1705 | |
1706 | /* If structure's known alignment is less than what the scalar | |
1707 | mode would need, and it matters, then stick with BLKmode. */ | |
1708 | if (TYPE_MODE (type) != BLKmode | |
1709 | && STRICT_ALIGNMENT | |
1710 | && ! (TYPE_ALIGN (type) >= BIGGEST_ALIGNMENT | |
1711 | || TYPE_ALIGN (type) >= GET_MODE_ALIGNMENT (TYPE_MODE (type)))) | |
1712 | { | |
1713 | /* If this is the only reason this type is BLKmode, then | |
1714 | don't force containing types to be BLKmode. */ | |
1715 | TYPE_NO_FORCE_BLK (type) = 1; | |
342ad2d6 | 1716 | SET_TYPE_MODE (type, BLKmode); |
99f4e085 | 1717 | } |
f2cfea4a | 1718 | } |
99f4e085 | 1719 | |
1720 | /* Compute TYPE_SIZE and TYPE_ALIGN for TYPE, once it has been laid | |
1721 | out. */ | |
1722 | ||
1723 | static void | |
60b8c5b3 | 1724 | finalize_type_size (tree type) |
99f4e085 | 1725 | { |
1726 | /* Normally, use the alignment corresponding to the mode chosen. | |
1727 | However, where strict alignment is not required, avoid | |
1728 | over-aligning structures, since most compilers do not do this | |
d1b5d503 | 1729 | alignment. */ |
3d54a731 | 1730 | if (TYPE_MODE (type) != BLKmode |
1731 | && TYPE_MODE (type) != VOIDmode | |
1732 | && (STRICT_ALIGNMENT || !AGGREGATE_TYPE_P (type))) | |
aca14577 | 1733 | { |
d1b5d503 | 1734 | unsigned mode_align = GET_MODE_ALIGNMENT (TYPE_MODE (type)); |
1735 | ||
1736 | /* Don't override a larger alignment requirement coming from a user | |
1737 | alignment of one of the fields. */ | |
1738 | if (mode_align >= TYPE_ALIGN (type)) | |
1739 | { | |
1740 | TYPE_ALIGN (type) = mode_align; | |
1741 | TYPE_USER_ALIGN (type) = 0; | |
1742 | } | |
aca14577 | 1743 | } |
99f4e085 | 1744 | |
1745 | /* Do machine-dependent extra alignment. */ | |
1746 | #ifdef ROUND_TYPE_ALIGN | |
1747 | TYPE_ALIGN (type) | |
1748 | = ROUND_TYPE_ALIGN (type, TYPE_ALIGN (type), BITS_PER_UNIT); | |
1749 | #endif | |
1750 | ||
99f4e085 | 1751 | /* If we failed to find a simple way to calculate the unit size |
02e7a332 | 1752 | of the type, find it by division. */ |
99f4e085 | 1753 | if (TYPE_SIZE_UNIT (type) == 0 && TYPE_SIZE (type) != 0) |
1754 | /* TYPE_SIZE (type) is computed in bitsizetype. After the division, the | |
1755 | result will fit in sizetype. We will get more efficient code using | |
1756 | sizetype, so we force a conversion. */ | |
1757 | TYPE_SIZE_UNIT (type) | |
5d7ed6c7 | 1758 | = fold_convert (sizetype, |
1759 | size_binop (FLOOR_DIV_EXPR, TYPE_SIZE (type), | |
1760 | bitsize_unit_node)); | |
99f4e085 | 1761 | |
02e7a332 | 1762 | if (TYPE_SIZE (type) != 0) |
1763 | { | |
178825bb | 1764 | TYPE_SIZE (type) = round_up (TYPE_SIZE (type), TYPE_ALIGN (type)); |
1765 | TYPE_SIZE_UNIT (type) | |
1766 | = round_up (TYPE_SIZE_UNIT (type), TYPE_ALIGN_UNIT (type)); | |
02e7a332 | 1767 | } |
1768 | ||
1769 | /* Evaluate nonconstant sizes only once, either now or as soon as safe. */ | |
1770 | if (TYPE_SIZE (type) != 0 && TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST) | |
1771 | TYPE_SIZE (type) = variable_size (TYPE_SIZE (type)); | |
99f4e085 | 1772 | if (TYPE_SIZE_UNIT (type) != 0 |
1773 | && TREE_CODE (TYPE_SIZE_UNIT (type)) != INTEGER_CST) | |
1774 | TYPE_SIZE_UNIT (type) = variable_size (TYPE_SIZE_UNIT (type)); | |
1775 | ||
1776 | /* Also layout any other variants of the type. */ | |
1777 | if (TYPE_NEXT_VARIANT (type) | |
1778 | || type != TYPE_MAIN_VARIANT (type)) | |
1779 | { | |
1780 | tree variant; | |
1781 | /* Record layout info of this variant. */ | |
1782 | tree size = TYPE_SIZE (type); | |
1783 | tree size_unit = TYPE_SIZE_UNIT (type); | |
1784 | unsigned int align = TYPE_ALIGN (type); | |
4765975c | 1785 | unsigned int precision = TYPE_PRECISION (type); |
aca14577 | 1786 | unsigned int user_align = TYPE_USER_ALIGN (type); |
3754d046 | 1787 | machine_mode mode = TYPE_MODE (type); |
99f4e085 | 1788 | |
1789 | /* Copy it into all variants. */ | |
1790 | for (variant = TYPE_MAIN_VARIANT (type); | |
1791 | variant != 0; | |
1792 | variant = TYPE_NEXT_VARIANT (variant)) | |
1793 | { | |
1794 | TYPE_SIZE (variant) = size; | |
1795 | TYPE_SIZE_UNIT (variant) = size_unit; | |
8c522687 | 1796 | unsigned valign = align; |
1797 | if (TYPE_USER_ALIGN (variant)) | |
1798 | valign = MAX (valign, TYPE_ALIGN (variant)); | |
1799 | else | |
1800 | TYPE_USER_ALIGN (variant) = user_align; | |
1801 | TYPE_ALIGN (variant) = valign; | |
4765975c | 1802 | TYPE_PRECISION (variant) = precision; |
342ad2d6 | 1803 | SET_TYPE_MODE (variant, mode); |
99f4e085 | 1804 | } |
1805 | } | |
1806 | } | |
1807 | ||
8d8a34f9 | 1808 | /* Return a new underlying object for a bitfield started with FIELD. */ |
1809 | ||
1810 | static tree | |
1811 | start_bitfield_representative (tree field) | |
1812 | { | |
1813 | tree repr = make_node (FIELD_DECL); | |
1814 | DECL_FIELD_OFFSET (repr) = DECL_FIELD_OFFSET (field); | |
1815 | /* Force the representative to begin at a BITS_PER_UNIT aligned | |
1816 | boundary - C++ may use tail-padding of a base object to | |
1817 | continue packing bits so the bitfield region does not start | |
1818 | at bit zero (see g++.dg/abi/bitfield5.C for example). | |
1819 | Unallocated bits may happen for other reasons as well, | |
1820 | for example Ada which allows explicit bit-granular structure layout. */ | |
1821 | DECL_FIELD_BIT_OFFSET (repr) | |
1822 | = size_binop (BIT_AND_EXPR, | |
1823 | DECL_FIELD_BIT_OFFSET (field), | |
1824 | bitsize_int (~(BITS_PER_UNIT - 1))); | |
1825 | SET_DECL_OFFSET_ALIGN (repr, DECL_OFFSET_ALIGN (field)); | |
1826 | DECL_SIZE (repr) = DECL_SIZE (field); | |
1827 | DECL_SIZE_UNIT (repr) = DECL_SIZE_UNIT (field); | |
1828 | DECL_PACKED (repr) = DECL_PACKED (field); | |
1829 | DECL_CONTEXT (repr) = DECL_CONTEXT (field); | |
1830 | return repr; | |
1831 | } | |
1832 | ||
1833 | /* Finish up a bitfield group that was started by creating the underlying | |
1834 | object REPR with the last field in the bitfield group FIELD. */ | |
1835 | ||
1836 | static void | |
1837 | finish_bitfield_representative (tree repr, tree field) | |
1838 | { | |
1839 | unsigned HOST_WIDE_INT bitsize, maxbitsize; | |
3754d046 | 1840 | machine_mode mode; |
8d8a34f9 | 1841 | tree nextf, size; |
1842 | ||
1843 | size = size_diffop (DECL_FIELD_OFFSET (field), | |
1844 | DECL_FIELD_OFFSET (repr)); | |
129ef22c | 1845 | while (TREE_CODE (size) == COMPOUND_EXPR) |
1846 | size = TREE_OPERAND (size, 1); | |
e913b5cd | 1847 | gcc_assert (tree_fits_uhwi_p (size)); |
1848 | bitsize = (tree_to_uhwi (size) * BITS_PER_UNIT | |
1849 | + tree_to_uhwi (DECL_FIELD_BIT_OFFSET (field)) | |
1850 | - tree_to_uhwi (DECL_FIELD_BIT_OFFSET (repr)) | |
1851 | + tree_to_uhwi (DECL_SIZE (field))); | |
8d8a34f9 | 1852 | |
75188dc6 | 1853 | /* Round up bitsize to multiples of BITS_PER_UNIT. */ |
1854 | bitsize = (bitsize + BITS_PER_UNIT - 1) & ~(BITS_PER_UNIT - 1); | |
1855 | ||
8d8a34f9 | 1856 | /* Now nothing tells us how to pad out bitsize ... */ |
1857 | nextf = DECL_CHAIN (field); | |
1858 | while (nextf && TREE_CODE (nextf) != FIELD_DECL) | |
1859 | nextf = DECL_CHAIN (nextf); | |
1860 | if (nextf) | |
1861 | { | |
1862 | tree maxsize; | |
9d75589a | 1863 | /* If there was an error, the field may be not laid out |
8d8a34f9 | 1864 | correctly. Don't bother to do anything. */ |
1865 | if (TREE_TYPE (nextf) == error_mark_node) | |
1866 | return; | |
1867 | maxsize = size_diffop (DECL_FIELD_OFFSET (nextf), | |
1868 | DECL_FIELD_OFFSET (repr)); | |
e913b5cd | 1869 | if (tree_fits_uhwi_p (maxsize)) |
fa42e1a4 | 1870 | { |
e913b5cd | 1871 | maxbitsize = (tree_to_uhwi (maxsize) * BITS_PER_UNIT |
1872 | + tree_to_uhwi (DECL_FIELD_BIT_OFFSET (nextf)) | |
1873 | - tree_to_uhwi (DECL_FIELD_BIT_OFFSET (repr))); | |
fa42e1a4 | 1874 | /* If the group ends within a bitfield nextf does not need to be |
1875 | aligned to BITS_PER_UNIT. Thus round up. */ | |
1876 | maxbitsize = (maxbitsize + BITS_PER_UNIT - 1) & ~(BITS_PER_UNIT - 1); | |
1877 | } | |
1878 | else | |
1879 | maxbitsize = bitsize; | |
8d8a34f9 | 1880 | } |
1881 | else | |
1882 | { | |
1883 | /* ??? If you consider that tail-padding of this struct might be | |
1884 | re-used when deriving from it we cannot really do the following | |
75188dc6 | 1885 | and thus need to set maxsize to bitsize? Also we cannot |
1886 | generally rely on maxsize to fold to an integer constant, so | |
1887 | use bitsize as fallback for this case. */ | |
8d8a34f9 | 1888 | tree maxsize = size_diffop (TYPE_SIZE_UNIT (DECL_CONTEXT (field)), |
1889 | DECL_FIELD_OFFSET (repr)); | |
e913b5cd | 1890 | if (tree_fits_uhwi_p (maxsize)) |
1891 | maxbitsize = (tree_to_uhwi (maxsize) * BITS_PER_UNIT | |
1892 | - tree_to_uhwi (DECL_FIELD_BIT_OFFSET (repr))); | |
75188dc6 | 1893 | else |
1894 | maxbitsize = bitsize; | |
8d8a34f9 | 1895 | } |
1896 | ||
1897 | /* Only if we don't artificially break up the representative in | |
1898 | the middle of a large bitfield with different possibly | |
1899 | overlapping representatives. And all representatives start | |
1900 | at byte offset. */ | |
1901 | gcc_assert (maxbitsize % BITS_PER_UNIT == 0); | |
1902 | ||
8d8a34f9 | 1903 | /* Find the smallest nice mode to use. */ |
1904 | for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode; | |
1905 | mode = GET_MODE_WIDER_MODE (mode)) | |
1906 | if (GET_MODE_BITSIZE (mode) >= bitsize) | |
1907 | break; | |
1908 | if (mode != VOIDmode | |
1909 | && (GET_MODE_BITSIZE (mode) > maxbitsize | |
1910 | || GET_MODE_BITSIZE (mode) > MAX_FIXED_MODE_SIZE)) | |
1911 | mode = VOIDmode; | |
1912 | ||
1913 | if (mode == VOIDmode) | |
1914 | { | |
1915 | /* We really want a BLKmode representative only as a last resort, | |
1916 | considering the member b in | |
1917 | struct { int a : 7; int b : 17; int c; } __attribute__((packed)); | |
1918 | Otherwise we simply want to split the representative up | |
1919 | allowing for overlaps within the bitfield region as required for | |
1920 | struct { int a : 7; int b : 7; | |
1921 | int c : 10; int d; } __attribute__((packed)); | |
1922 | [0, 15] HImode for a and b, [8, 23] HImode for c. */ | |
1923 | DECL_SIZE (repr) = bitsize_int (bitsize); | |
1924 | DECL_SIZE_UNIT (repr) = size_int (bitsize / BITS_PER_UNIT); | |
1925 | DECL_MODE (repr) = BLKmode; | |
1926 | TREE_TYPE (repr) = build_array_type_nelts (unsigned_char_type_node, | |
1927 | bitsize / BITS_PER_UNIT); | |
1928 | } | |
1929 | else | |
1930 | { | |
1931 | unsigned HOST_WIDE_INT modesize = GET_MODE_BITSIZE (mode); | |
1932 | DECL_SIZE (repr) = bitsize_int (modesize); | |
1933 | DECL_SIZE_UNIT (repr) = size_int (modesize / BITS_PER_UNIT); | |
1934 | DECL_MODE (repr) = mode; | |
1935 | TREE_TYPE (repr) = lang_hooks.types.type_for_mode (mode, 1); | |
1936 | } | |
1937 | ||
1938 | /* Remember whether the bitfield group is at the end of the | |
1939 | structure or not. */ | |
1940 | DECL_CHAIN (repr) = nextf; | |
1941 | } | |
1942 | ||
1943 | /* Compute and set FIELD_DECLs for the underlying objects we should | |
ab103e33 | 1944 | use for bitfield access for the structure T. */ |
8d8a34f9 | 1945 | |
ab103e33 | 1946 | void |
1947 | finish_bitfield_layout (tree t) | |
8d8a34f9 | 1948 | { |
1949 | tree field, prev; | |
1950 | tree repr = NULL_TREE; | |
1951 | ||
1952 | /* Unions would be special, for the ease of type-punning optimizations | |
1953 | we could use the underlying type as hint for the representative | |
1954 | if the bitfield would fit and the representative would not exceed | |
1955 | the union in size. */ | |
ab103e33 | 1956 | if (TREE_CODE (t) != RECORD_TYPE) |
8d8a34f9 | 1957 | return; |
1958 | ||
ab103e33 | 1959 | for (prev = NULL_TREE, field = TYPE_FIELDS (t); |
8d8a34f9 | 1960 | field; field = DECL_CHAIN (field)) |
1961 | { | |
1962 | if (TREE_CODE (field) != FIELD_DECL) | |
1963 | continue; | |
1964 | ||
1965 | /* In the C++ memory model, consecutive bit fields in a structure are | |
1966 | considered one memory location and updating a memory location | |
1967 | may not store into adjacent memory locations. */ | |
1968 | if (!repr | |
1969 | && DECL_BIT_FIELD_TYPE (field)) | |
1970 | { | |
1971 | /* Start new representative. */ | |
1972 | repr = start_bitfield_representative (field); | |
1973 | } | |
1974 | else if (repr | |
1975 | && ! DECL_BIT_FIELD_TYPE (field)) | |
1976 | { | |
1977 | /* Finish off new representative. */ | |
1978 | finish_bitfield_representative (repr, prev); | |
1979 | repr = NULL_TREE; | |
1980 | } | |
1981 | else if (DECL_BIT_FIELD_TYPE (field)) | |
1982 | { | |
fa42e1a4 | 1983 | gcc_assert (repr != NULL_TREE); |
1984 | ||
8d8a34f9 | 1985 | /* Zero-size bitfields finish off a representative and |
1986 | do not have a representative themselves. This is | |
1987 | required by the C++ memory model. */ | |
1988 | if (integer_zerop (DECL_SIZE (field))) | |
1989 | { | |
1990 | finish_bitfield_representative (repr, prev); | |
1991 | repr = NULL_TREE; | |
1992 | } | |
fa42e1a4 | 1993 | |
1994 | /* We assume that either DECL_FIELD_OFFSET of the representative | |
1995 | and each bitfield member is a constant or they are equal. | |
1996 | This is because we need to be able to compute the bit-offset | |
1997 | of each field relative to the representative in get_bit_range | |
1998 | during RTL expansion. | |
1999 | If these constraints are not met, simply force a new | |
2000 | representative to be generated. That will at most | |
2001 | generate worse code but still maintain correctness with | |
2002 | respect to the C++ memory model. */ | |
e913b5cd | 2003 | else if (!((tree_fits_uhwi_p (DECL_FIELD_OFFSET (repr)) |
2004 | && tree_fits_uhwi_p (DECL_FIELD_OFFSET (field))) | |
fa42e1a4 | 2005 | || operand_equal_p (DECL_FIELD_OFFSET (repr), |
2006 | DECL_FIELD_OFFSET (field), 0))) | |
2007 | { | |
2008 | finish_bitfield_representative (repr, prev); | |
2009 | repr = start_bitfield_representative (field); | |
2010 | } | |
8d8a34f9 | 2011 | } |
2012 | else | |
2013 | continue; | |
2014 | ||
2015 | if (repr) | |
2016 | DECL_BIT_FIELD_REPRESENTATIVE (field) = repr; | |
2017 | ||
2018 | prev = field; | |
2019 | } | |
2020 | ||
2021 | if (repr) | |
2022 | finish_bitfield_representative (repr, prev); | |
2023 | } | |
2024 | ||
99f4e085 | 2025 | /* Do all of the work required to layout the type indicated by RLI, |
2026 | once the fields have been laid out. This function will call `free' | |
23ed74d8 | 2027 | for RLI, unless FREE_P is false. Passing a value other than false |
2028 | for FREE_P is bad practice; this option only exists to support the | |
2029 | G++ 3.2 ABI. */ | |
99f4e085 | 2030 | |
2031 | void | |
60b8c5b3 | 2032 | finish_record_layout (record_layout_info rli, int free_p) |
99f4e085 | 2033 | { |
2bc7da70 | 2034 | tree variant; |
2035 | ||
02e7a332 | 2036 | /* Compute the final size. */ |
2037 | finalize_record_size (rli); | |
2038 | ||
2039 | /* Compute the TYPE_MODE for the record. */ | |
2040 | compute_record_mode (rli->t); | |
83675f44 | 2041 | |
48fdacd0 | 2042 | /* Perform any last tweaks to the TYPE_SIZE, etc. */ |
2043 | finalize_type_size (rli->t); | |
2044 | ||
8d8a34f9 | 2045 | /* Compute bitfield representatives. */ |
ab103e33 | 2046 | finish_bitfield_layout (rli->t); |
8d8a34f9 | 2047 | |
2bc7da70 | 2048 | /* Propagate TYPE_PACKED to variants. With C++ templates, |
2049 | handle_packed_attribute is too early to do this. */ | |
2050 | for (variant = TYPE_NEXT_VARIANT (rli->t); variant; | |
2051 | variant = TYPE_NEXT_VARIANT (variant)) | |
2052 | TYPE_PACKED (variant) = TYPE_PACKED (rli->t); | |
2053 | ||
99f4e085 | 2054 | /* Lay out any static members. This is done now because their type |
2055 | may use the record's type. */ | |
f1f41a6c | 2056 | while (!vec_safe_is_empty (rli->pending_statics)) |
2057 | layout_decl (rli->pending_statics->pop (), 0); | |
83675f44 | 2058 | |
99f4e085 | 2059 | /* Clean up. */ |
23ed74d8 | 2060 | if (free_p) |
364ba361 | 2061 | { |
f1f41a6c | 2062 | vec_free (rli->pending_statics); |
364ba361 | 2063 | free (rli); |
2064 | } | |
99f4e085 | 2065 | } |
f2cfea4a | 2066 | \f |
805e22b2 | 2067 | |
2068 | /* Finish processing a builtin RECORD_TYPE type TYPE. It's name is | |
2069 | NAME, its fields are chained in reverse on FIELDS. | |
2070 | ||
2071 | If ALIGN_TYPE is non-null, it is given the same alignment as | |
2072 | ALIGN_TYPE. */ | |
2073 | ||
2074 | void | |
60b8c5b3 | 2075 | finish_builtin_struct (tree type, const char *name, tree fields, |
2076 | tree align_type) | |
805e22b2 | 2077 | { |
f2332b21 | 2078 | tree tail, next; |
805e22b2 | 2079 | |
2080 | for (tail = NULL_TREE; fields; tail = fields, fields = next) | |
2081 | { | |
2082 | DECL_FIELD_CONTEXT (fields) = type; | |
1767a056 | 2083 | next = DECL_CHAIN (fields); |
2084 | DECL_CHAIN (fields) = tail; | |
805e22b2 | 2085 | } |
2086 | TYPE_FIELDS (type) = tail; | |
2087 | ||
2088 | if (align_type) | |
2089 | { | |
2090 | TYPE_ALIGN (type) = TYPE_ALIGN (align_type); | |
2091 | TYPE_USER_ALIGN (type) = TYPE_USER_ALIGN (align_type); | |
2092 | } | |
2093 | ||
2094 | layout_type (type); | |
2095 | #if 0 /* not yet, should get fixed properly later */ | |
2096 | TYPE_NAME (type) = make_type_decl (get_identifier (name), type); | |
2097 | #else | |
e60a6f7b | 2098 | TYPE_NAME (type) = build_decl (BUILTINS_LOCATION, |
2099 | TYPE_DECL, get_identifier (name), type); | |
805e22b2 | 2100 | #endif |
2101 | TYPE_STUB_DECL (type) = TYPE_NAME (type); | |
2102 | layout_decl (TYPE_NAME (type), 0); | |
2103 | } | |
2104 | ||
f2cfea4a | 2105 | /* Calculate the mode, size, and alignment for TYPE. |
2106 | For an array type, calculate the element separation as well. | |
2107 | Record TYPE on the chain of permanent or temporary types | |
2108 | so that dbxout will find out about it. | |
2109 | ||
2110 | TYPE_SIZE of a type is nonzero if the type has been laid out already. | |
2111 | layout_type does nothing on such a type. | |
2112 | ||
2113 | If the type is incomplete, its TYPE_SIZE remains zero. */ | |
2114 | ||
2115 | void | |
60b8c5b3 | 2116 | layout_type (tree type) |
f2cfea4a | 2117 | { |
04e579b6 | 2118 | gcc_assert (type); |
f2cfea4a | 2119 | |
4ee9c684 | 2120 | if (type == error_mark_node) |
2121 | return; | |
2122 | ||
8c522687 | 2123 | /* We don't want finalize_type_size to copy an alignment attribute to |
2124 | variants that don't have it. */ | |
2125 | type = TYPE_MAIN_VARIANT (type); | |
2126 | ||
f2cfea4a | 2127 | /* Do nothing if type has been laid out before. */ |
2128 | if (TYPE_SIZE (type)) | |
2129 | return; | |
2130 | ||
f2cfea4a | 2131 | switch (TREE_CODE (type)) |
2132 | { | |
2133 | case LANG_TYPE: | |
2134 | /* This kind of type is the responsibility | |
c3418f42 | 2135 | of the language-specific code. */ |
04e579b6 | 2136 | gcc_unreachable (); |
f2cfea4a | 2137 | |
0e3dfadd | 2138 | case BOOLEAN_TYPE: |
f2cfea4a | 2139 | case INTEGER_TYPE: |
2140 | case ENUMERAL_TYPE: | |
342ad2d6 | 2141 | SET_TYPE_MODE (type, |
2142 | smallest_mode_for_size (TYPE_PRECISION (type), MODE_INT)); | |
b278476e | 2143 | TYPE_SIZE (type) = bitsize_int (GET_MODE_BITSIZE (TYPE_MODE (type))); |
4765975c | 2144 | /* Don't set TYPE_PRECISION here, as it may be set by a bitfield. */ |
cec6c892 | 2145 | TYPE_SIZE_UNIT (type) = size_int (GET_MODE_SIZE (TYPE_MODE (type))); |
f2cfea4a | 2146 | break; |
2147 | ||
2148 | case REAL_TYPE: | |
342ad2d6 | 2149 | SET_TYPE_MODE (type, |
2150 | mode_for_size (TYPE_PRECISION (type), MODE_FLOAT, 0)); | |
b278476e | 2151 | TYPE_SIZE (type) = bitsize_int (GET_MODE_BITSIZE (TYPE_MODE (type))); |
cec6c892 | 2152 | TYPE_SIZE_UNIT (type) = size_int (GET_MODE_SIZE (TYPE_MODE (type))); |
f2cfea4a | 2153 | break; |
2154 | ||
06f0b99c | 2155 | case FIXED_POINT_TYPE: |
2156 | /* TYPE_MODE (type) has been set already. */ | |
2157 | TYPE_SIZE (type) = bitsize_int (GET_MODE_BITSIZE (TYPE_MODE (type))); | |
2158 | TYPE_SIZE_UNIT (type) = size_int (GET_MODE_SIZE (TYPE_MODE (type))); | |
2159 | break; | |
2160 | ||
f2cfea4a | 2161 | case COMPLEX_TYPE: |
78a8ed03 | 2162 | TYPE_UNSIGNED (type) = TYPE_UNSIGNED (TREE_TYPE (type)); |
342ad2d6 | 2163 | SET_TYPE_MODE (type, |
2164 | mode_for_size (2 * TYPE_PRECISION (TREE_TYPE (type)), | |
2165 | (TREE_CODE (TREE_TYPE (type)) == REAL_TYPE | |
2166 | ? MODE_COMPLEX_FLOAT : MODE_COMPLEX_INT), | |
2167 | 0)); | |
b278476e | 2168 | TYPE_SIZE (type) = bitsize_int (GET_MODE_BITSIZE (TYPE_MODE (type))); |
cec6c892 | 2169 | TYPE_SIZE_UNIT (type) = size_int (GET_MODE_SIZE (TYPE_MODE (type))); |
f2cfea4a | 2170 | break; |
2171 | ||
8a95ab85 | 2172 | case VECTOR_TYPE: |
83e2a11b | 2173 | { |
2174 | int nunits = TYPE_VECTOR_SUBPARTS (type); | |
83e2a11b | 2175 | tree innertype = TREE_TYPE (type); |
2176 | ||
04e579b6 | 2177 | gcc_assert (!(nunits & (nunits - 1))); |
83e2a11b | 2178 | |
2179 | /* Find an appropriate mode for the vector type. */ | |
2180 | if (TYPE_MODE (type) == VOIDmode) | |
c4740c5d | 2181 | SET_TYPE_MODE (type, |
2182 | mode_for_vector (TYPE_MODE (innertype), nunits)); | |
83e2a11b | 2183 | |
06f0b99c | 2184 | TYPE_SATURATING (type) = TYPE_SATURATING (TREE_TYPE (type)); |
83e2a11b | 2185 | TYPE_UNSIGNED (type) = TYPE_UNSIGNED (TREE_TYPE (type)); |
1f137e6d | 2186 | /* Several boolean vector elements may fit in a single unit. */ |
2187 | if (VECTOR_BOOLEAN_TYPE_P (type)) | |
2188 | TYPE_SIZE_UNIT (type) | |
2189 | = size_int (GET_MODE_SIZE (type->type_common.mode)); | |
2190 | else | |
2191 | TYPE_SIZE_UNIT (type) = int_const_binop (MULT_EXPR, | |
2192 | TYPE_SIZE_UNIT (innertype), | |
2193 | size_int (nunits)); | |
2194 | TYPE_SIZE (type) = int_const_binop (MULT_EXPR, | |
2195 | TYPE_SIZE (innertype), | |
317e2a67 | 2196 | bitsize_int (nunits)); |
b74c835a | 2197 | |
482a44fa | 2198 | /* For vector types, we do not default to the mode's alignment. |
2199 | Instead, query a target hook, defaulting to natural alignment. | |
2200 | This prevents ABI changes depending on whether or not native | |
2201 | vector modes are supported. */ | |
2202 | TYPE_ALIGN (type) = targetm.vector_alignment (type); | |
2203 | ||
2204 | /* However, if the underlying mode requires a bigger alignment than | |
2205 | what the target hook provides, we cannot use the mode. For now, | |
2206 | simply reject that case. */ | |
2207 | gcc_assert (TYPE_ALIGN (type) | |
2208 | >= GET_MODE_ALIGNMENT (TYPE_MODE (type))); | |
83e2a11b | 2209 | break; |
2210 | } | |
8a95ab85 | 2211 | |
f2cfea4a | 2212 | case VOID_TYPE: |
02e7a332 | 2213 | /* This is an incomplete type and so doesn't have a size. */ |
f2cfea4a | 2214 | TYPE_ALIGN (type) = 1; |
aca14577 | 2215 | TYPE_USER_ALIGN (type) = 0; |
342ad2d6 | 2216 | SET_TYPE_MODE (type, VOIDmode); |
f2cfea4a | 2217 | break; |
2218 | ||
058a1b7a | 2219 | case POINTER_BOUNDS_TYPE: |
2220 | TYPE_SIZE (type) = bitsize_int (GET_MODE_BITSIZE (TYPE_MODE (type))); | |
2221 | TYPE_SIZE_UNIT (type) = size_int (GET_MODE_SIZE (TYPE_MODE (type))); | |
2222 | break; | |
2223 | ||
e23958d4 | 2224 | case OFFSET_TYPE: |
b278476e | 2225 | TYPE_SIZE (type) = bitsize_int (POINTER_SIZE); |
4765975c | 2226 | TYPE_SIZE_UNIT (type) = size_int (POINTER_SIZE_UNITS); |
2227 | /* A pointer might be MODE_PARTIAL_INT, but ptrdiff_t must be | |
2228 | integral, which may be an __intN. */ | |
342ad2d6 | 2229 | SET_TYPE_MODE (type, mode_for_size (POINTER_SIZE, MODE_INT, 0)); |
98155838 | 2230 | TYPE_PRECISION (type) = POINTER_SIZE; |
e23958d4 | 2231 | break; |
2232 | ||
f2cfea4a | 2233 | case FUNCTION_TYPE: |
2234 | case METHOD_TYPE: | |
4812cab0 | 2235 | /* It's hard to see what the mode and size of a function ought to |
2236 | be, but we do know the alignment is FUNCTION_BOUNDARY, so | |
2237 | make it consistent with that. */ | |
342ad2d6 | 2238 | SET_TYPE_MODE (type, mode_for_size (FUNCTION_BOUNDARY, MODE_INT, 0)); |
4812cab0 | 2239 | TYPE_SIZE (type) = bitsize_int (FUNCTION_BOUNDARY); |
2240 | TYPE_SIZE_UNIT (type) = size_int (FUNCTION_BOUNDARY / BITS_PER_UNIT); | |
f2cfea4a | 2241 | break; |
2242 | ||
2243 | case POINTER_TYPE: | |
2244 | case REFERENCE_TYPE: | |
f1986931 | 2245 | { |
3754d046 | 2246 | machine_mode mode = TYPE_MODE (type); |
98155838 | 2247 | if (TREE_CODE (type) == REFERENCE_TYPE && reference_types_internal) |
2248 | { | |
2249 | addr_space_t as = TYPE_ADDR_SPACE (TREE_TYPE (type)); | |
2250 | mode = targetm.addr_space.address_mode (as); | |
2251 | } | |
805e22b2 | 2252 | |
98155838 | 2253 | TYPE_SIZE (type) = bitsize_int (GET_MODE_BITSIZE (mode)); |
805e22b2 | 2254 | TYPE_SIZE_UNIT (type) = size_int (GET_MODE_SIZE (mode)); |
78a8ed03 | 2255 | TYPE_UNSIGNED (type) = 1; |
4765975c | 2256 | TYPE_PRECISION (type) = GET_MODE_PRECISION (mode); |
f1986931 | 2257 | } |
f2cfea4a | 2258 | break; |
2259 | ||
2260 | case ARRAY_TYPE: | |
2261 | { | |
19cb6b50 | 2262 | tree index = TYPE_DOMAIN (type); |
2263 | tree element = TREE_TYPE (type); | |
f2cfea4a | 2264 | |
2265 | build_pointer_type (element); | |
2266 | ||
2267 | /* We need to know both bounds in order to compute the size. */ | |
2268 | if (index && TYPE_MAX_VALUE (index) && TYPE_MIN_VALUE (index) | |
2269 | && TYPE_SIZE (element)) | |
2270 | { | |
bc97b18f | 2271 | tree ub = TYPE_MAX_VALUE (index); |
2272 | tree lb = TYPE_MIN_VALUE (index); | |
f00a8c41 | 2273 | tree element_size = TYPE_SIZE (element); |
41e112e6 | 2274 | tree length; |
2275 | ||
820fcceb | 2276 | /* Make sure that an array of zero-sized element is zero-sized |
2277 | regardless of its extent. */ | |
2278 | if (integer_zerop (element_size)) | |
2279 | length = size_zero_node; | |
2280 | ||
7542c3b4 | 2281 | /* The computation should happen in the original signedness so |
2282 | that (possible) negative values are handled appropriately | |
2283 | when determining overflow. */ | |
820fcceb | 2284 | else |
85d86b55 | 2285 | { |
2286 | /* ??? When it is obvious that the range is signed | |
2287 | represent it using ssizetype. */ | |
2288 | if (TREE_CODE (lb) == INTEGER_CST | |
2289 | && TREE_CODE (ub) == INTEGER_CST | |
2290 | && TYPE_UNSIGNED (TREE_TYPE (lb)) | |
2291 | && tree_int_cst_lt (ub, lb)) | |
2292 | { | |
796b6678 | 2293 | lb = wide_int_to_tree (ssizetype, |
5de9d3ed | 2294 | offset_int::from (lb, SIGNED)); |
796b6678 | 2295 | ub = wide_int_to_tree (ssizetype, |
5de9d3ed | 2296 | offset_int::from (ub, SIGNED)); |
85d86b55 | 2297 | } |
2298 | length | |
2299 | = fold_convert (sizetype, | |
2300 | size_binop (PLUS_EXPR, | |
2301 | build_int_cst (TREE_TYPE (lb), 1), | |
2302 | size_binop (MINUS_EXPR, ub, lb))); | |
2303 | } | |
2304 | ||
97658fc9 | 2305 | /* ??? We have no way to distinguish a null-sized array from an |
2306 | array spanning the whole sizetype range, so we arbitrarily | |
2307 | decide that [0, -1] is the only valid representation. */ | |
85d86b55 | 2308 | if (integer_zerop (length) |
97658fc9 | 2309 | && TREE_OVERFLOW (length) |
2310 | && integer_zerop (lb)) | |
85d86b55 | 2311 | length = size_zero_node; |
f2cfea4a | 2312 | |
902de8ed | 2313 | TYPE_SIZE (type) = size_binop (MULT_EXPR, element_size, |
7bd4091f | 2314 | fold_convert (bitsizetype, |
5d7ed6c7 | 2315 | length)); |
cec6c892 | 2316 | |
f00a8c41 | 2317 | /* If we know the size of the element, calculate the total size |
2318 | directly, rather than do some division thing below. This | |
2319 | optimization helps Fortran assumed-size arrays (where the | |
2320 | size of the array is determined at runtime) substantially. */ | |
2321 | if (TYPE_SIZE_UNIT (element)) | |
083a2b5e | 2322 | TYPE_SIZE_UNIT (type) |
2323 | = size_binop (MULT_EXPR, TYPE_SIZE_UNIT (element), length); | |
f2cfea4a | 2324 | } |
2325 | ||
2326 | /* Now round the alignment and size, | |
2327 | using machine-dependent criteria if any. */ | |
2328 | ||
8c522687 | 2329 | unsigned align = TYPE_ALIGN (element); |
2330 | if (TYPE_USER_ALIGN (type)) | |
2331 | align = MAX (align, TYPE_ALIGN (type)); | |
2332 | else | |
2333 | TYPE_USER_ALIGN (type) = TYPE_USER_ALIGN (element); | |
f2cfea4a | 2334 | #ifdef ROUND_TYPE_ALIGN |
8c522687 | 2335 | align = ROUND_TYPE_ALIGN (type, align, BITS_PER_UNIT); |
f2cfea4a | 2336 | #else |
8c522687 | 2337 | align = MAX (align, BITS_PER_UNIT); |
f2cfea4a | 2338 | #endif |
8c522687 | 2339 | TYPE_ALIGN (type) = align; |
342ad2d6 | 2340 | SET_TYPE_MODE (type, BLKmode); |
f2cfea4a | 2341 | if (TYPE_SIZE (type) != 0 |
f91ed644 | 2342 | && ! targetm.member_type_forces_blk (type, VOIDmode) |
f2cfea4a | 2343 | /* BLKmode elements force BLKmode aggregate; |
2344 | else extract/store fields may lose. */ | |
2345 | && (TYPE_MODE (TREE_TYPE (type)) != BLKmode | |
2346 | || TYPE_NO_FORCE_BLK (TREE_TYPE (type)))) | |
2347 | { | |
13d3ceb9 | 2348 | SET_TYPE_MODE (type, mode_for_array (TREE_TYPE (type), |
2349 | TYPE_SIZE (type))); | |
0fc6aef1 | 2350 | if (TYPE_MODE (type) != BLKmode |
2351 | && STRICT_ALIGNMENT && TYPE_ALIGN (type) < BIGGEST_ALIGNMENT | |
a2ee4f78 | 2352 | && TYPE_ALIGN (type) < GET_MODE_ALIGNMENT (TYPE_MODE (type))) |
f2cfea4a | 2353 | { |
2354 | TYPE_NO_FORCE_BLK (type) = 1; | |
342ad2d6 | 2355 | SET_TYPE_MODE (type, BLKmode); |
f2cfea4a | 2356 | } |
f2cfea4a | 2357 | } |
e1b062ae | 2358 | /* When the element size is constant, check that it is at least as |
2359 | large as the element alignment. */ | |
b3bb0d2d | 2360 | if (TYPE_SIZE_UNIT (element) |
2361 | && TREE_CODE (TYPE_SIZE_UNIT (element)) == INTEGER_CST | |
e1b062ae | 2362 | /* If TYPE_SIZE_UNIT overflowed, then it is certainly larger than |
2363 | TYPE_ALIGN_UNIT. */ | |
f96bd2bf | 2364 | && !TREE_OVERFLOW (TYPE_SIZE_UNIT (element)) |
b3bb0d2d | 2365 | && !integer_zerop (TYPE_SIZE_UNIT (element)) |
2366 | && compare_tree_int (TYPE_SIZE_UNIT (element), | |
2367 | TYPE_ALIGN_UNIT (element)) < 0) | |
2368 | error ("alignment of array elements is greater than element size"); | |
f2cfea4a | 2369 | break; |
2370 | } | |
2371 | ||
2372 | case RECORD_TYPE: | |
83675f44 | 2373 | case UNION_TYPE: |
2374 | case QUAL_UNION_TYPE: | |
99f4e085 | 2375 | { |
2376 | tree field; | |
2377 | record_layout_info rli; | |
2378 | ||
2379 | /* Initialize the layout information. */ | |
02e7a332 | 2380 | rli = start_record_layout (type); |
2381 | ||
83675f44 | 2382 | /* If this is a QUAL_UNION_TYPE, we want to process the fields |
2383 | in the reverse order in building the COND_EXPR that denotes | |
2384 | its size. We reverse them again later. */ | |
2385 | if (TREE_CODE (type) == QUAL_UNION_TYPE) | |
2386 | TYPE_FIELDS (type) = nreverse (TYPE_FIELDS (type)); | |
02e7a332 | 2387 | |
2388 | /* Place all the fields. */ | |
1767a056 | 2389 | for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field)) |
02e7a332 | 2390 | place_field (rli, field); |
2391 | ||
83675f44 | 2392 | if (TREE_CODE (type) == QUAL_UNION_TYPE) |
2393 | TYPE_FIELDS (type) = nreverse (TYPE_FIELDS (type)); | |
02e7a332 | 2394 | |
99f4e085 | 2395 | /* Finish laying out the record. */ |
23ed74d8 | 2396 | finish_record_layout (rli, /*free_p=*/true); |
99f4e085 | 2397 | } |
f2cfea4a | 2398 | break; |
2399 | ||
f2cfea4a | 2400 | default: |
04e579b6 | 2401 | gcc_unreachable (); |
fe352cf1 | 2402 | } |
f2cfea4a | 2403 | |
99f4e085 | 2404 | /* Compute the final TYPE_SIZE, TYPE_ALIGN, etc. for TYPE. For |
83675f44 | 2405 | records and unions, finish_record_layout already called this |
2406 | function. */ | |
3d54a731 | 2407 | if (!RECORD_OR_UNION_TYPE_P (type)) |
99f4e085 | 2408 | finalize_type_size (type); |
f2cfea4a | 2409 | |
b35a8f48 | 2410 | /* We should never see alias sets on incomplete aggregates. And we |
2411 | should not call layout_type on not incomplete aggregates. */ | |
2412 | if (AGGREGATE_TYPE_P (type)) | |
2413 | gcc_assert (!TYPE_ALIAS_SET_KNOWN_P (type)); | |
f2cfea4a | 2414 | } |
342ad2d6 | 2415 | |
392dee1e | 2416 | /* Return the least alignment required for type TYPE. */ |
2417 | ||
2418 | unsigned int | |
2419 | min_align_of_type (tree type) | |
2420 | { | |
2421 | unsigned int align = TYPE_ALIGN (type); | |
c30da3c3 | 2422 | if (!TYPE_USER_ALIGN (type)) |
2423 | { | |
6312c69b | 2424 | align = MIN (align, BIGGEST_ALIGNMENT); |
392dee1e | 2425 | #ifdef BIGGEST_FIELD_ALIGNMENT |
c30da3c3 | 2426 | align = MIN (align, BIGGEST_FIELD_ALIGNMENT); |
392dee1e | 2427 | #endif |
c30da3c3 | 2428 | unsigned int field_align = align; |
392dee1e | 2429 | #ifdef ADJUST_FIELD_ALIGN |
c30da3c3 | 2430 | tree field = build_decl (UNKNOWN_LOCATION, FIELD_DECL, NULL_TREE, type); |
2431 | field_align = ADJUST_FIELD_ALIGN (field, field_align); | |
2432 | ggc_free (field); | |
392dee1e | 2433 | #endif |
c30da3c3 | 2434 | align = MIN (align, field_align); |
2435 | } | |
392dee1e | 2436 | return align / BITS_PER_UNIT; |
2437 | } | |
2438 | ||
342ad2d6 | 2439 | /* Vector types need to re-check the target flags each time we report |
2440 | the machine mode. We need to do this because attribute target can | |
2441 | change the result of vector_mode_supported_p and have_regs_of_mode | |
2442 | on a per-function basis. Thus the TYPE_MODE of a VECTOR_TYPE can | |
2443 | change on a per-function basis. */ | |
48e1416a | 2444 | /* ??? Possibly a better solution is to run through all the types |
342ad2d6 | 2445 | referenced by a function and re-compute the TYPE_MODE once, rather |
2446 | than make the TYPE_MODE macro call a function. */ | |
2447 | ||
3754d046 | 2448 | machine_mode |
342ad2d6 | 2449 | vector_type_mode (const_tree t) |
2450 | { | |
3754d046 | 2451 | machine_mode mode; |
342ad2d6 | 2452 | |
2453 | gcc_assert (TREE_CODE (t) == VECTOR_TYPE); | |
2454 | ||
8f2eb9e1 | 2455 | mode = t->type_common.mode; |
342ad2d6 | 2456 | if (VECTOR_MODE_P (mode) |
2457 | && (!targetm.vector_mode_supported_p (mode) | |
2458 | || !have_regs_of_mode[mode])) | |
2459 | { | |
3754d046 | 2460 | machine_mode innermode = TREE_TYPE (t)->type_common.mode; |
342ad2d6 | 2461 | |
2462 | /* For integers, try mapping it to a same-sized scalar mode. */ | |
2463 | if (GET_MODE_CLASS (innermode) == MODE_INT) | |
2464 | { | |
2465 | mode = mode_for_size (TYPE_VECTOR_SUBPARTS (t) | |
2466 | * GET_MODE_BITSIZE (innermode), MODE_INT, 0); | |
2467 | ||
2468 | if (mode != VOIDmode && have_regs_of_mode[mode]) | |
2469 | return mode; | |
2470 | } | |
2471 | ||
2472 | return BLKmode; | |
2473 | } | |
2474 | ||
2475 | return mode; | |
2476 | } | |
f2cfea4a | 2477 | \f |
2478 | /* Create and return a type for signed integers of PRECISION bits. */ | |
2479 | ||
2480 | tree | |
60b8c5b3 | 2481 | make_signed_type (int precision) |
f2cfea4a | 2482 | { |
19cb6b50 | 2483 | tree type = make_node (INTEGER_TYPE); |
f2cfea4a | 2484 | |
2485 | TYPE_PRECISION (type) = precision; | |
2486 | ||
902de8ed | 2487 | fixup_signed_type (type); |
f2cfea4a | 2488 | return type; |
2489 | } | |
2490 | ||
2491 | /* Create and return a type for unsigned integers of PRECISION bits. */ | |
2492 | ||
2493 | tree | |
60b8c5b3 | 2494 | make_unsigned_type (int precision) |
f2cfea4a | 2495 | { |
19cb6b50 | 2496 | tree type = make_node (INTEGER_TYPE); |
f2cfea4a | 2497 | |
2498 | TYPE_PRECISION (type) = precision; | |
2499 | ||
f2cfea4a | 2500 | fixup_unsigned_type (type); |
2501 | return type; | |
2502 | } | |
902de8ed | 2503 | \f |
06f0b99c | 2504 | /* Create and return a type for fract of PRECISION bits, UNSIGNEDP, |
2505 | and SATP. */ | |
2506 | ||
2507 | tree | |
2508 | make_fract_type (int precision, int unsignedp, int satp) | |
2509 | { | |
2510 | tree type = make_node (FIXED_POINT_TYPE); | |
2511 | ||
2512 | TYPE_PRECISION (type) = precision; | |
2513 | ||
2514 | if (satp) | |
2515 | TYPE_SATURATING (type) = 1; | |
2516 | ||
2517 | /* Lay out the type: set its alignment, size, etc. */ | |
2518 | if (unsignedp) | |
2519 | { | |
2520 | TYPE_UNSIGNED (type) = 1; | |
342ad2d6 | 2521 | SET_TYPE_MODE (type, mode_for_size (precision, MODE_UFRACT, 0)); |
06f0b99c | 2522 | } |
2523 | else | |
342ad2d6 | 2524 | SET_TYPE_MODE (type, mode_for_size (precision, MODE_FRACT, 0)); |
06f0b99c | 2525 | layout_type (type); |
2526 | ||
2527 | return type; | |
2528 | } | |
2529 | ||
2530 | /* Create and return a type for accum of PRECISION bits, UNSIGNEDP, | |
2531 | and SATP. */ | |
2532 | ||
2533 | tree | |
2534 | make_accum_type (int precision, int unsignedp, int satp) | |
2535 | { | |
2536 | tree type = make_node (FIXED_POINT_TYPE); | |
2537 | ||
2538 | TYPE_PRECISION (type) = precision; | |
2539 | ||
2540 | if (satp) | |
2541 | TYPE_SATURATING (type) = 1; | |
2542 | ||
2543 | /* Lay out the type: set its alignment, size, etc. */ | |
2544 | if (unsignedp) | |
2545 | { | |
2546 | TYPE_UNSIGNED (type) = 1; | |
342ad2d6 | 2547 | SET_TYPE_MODE (type, mode_for_size (precision, MODE_UACCUM, 0)); |
06f0b99c | 2548 | } |
2549 | else | |
342ad2d6 | 2550 | SET_TYPE_MODE (type, mode_for_size (precision, MODE_ACCUM, 0)); |
06f0b99c | 2551 | layout_type (type); |
2552 | ||
2553 | return type; | |
2554 | } | |
2555 | ||
7907db97 | 2556 | /* Initialize sizetypes so layout_type can use them. */ |
902de8ed | 2557 | |
2558 | void | |
ad086ed4 | 2559 | initialize_sizetypes (void) |
902de8ed | 2560 | { |
7907db97 | 2561 | int precision, bprecision; |
2562 | ||
2563 | /* Get sizetypes precision from the SIZE_TYPE target macro. */ | |
748e5d45 | 2564 | if (strcmp (SIZETYPE, "unsigned int") == 0) |
7907db97 | 2565 | precision = INT_TYPE_SIZE; |
748e5d45 | 2566 | else if (strcmp (SIZETYPE, "long unsigned int") == 0) |
7907db97 | 2567 | precision = LONG_TYPE_SIZE; |
748e5d45 | 2568 | else if (strcmp (SIZETYPE, "long long unsigned int") == 0) |
7907db97 | 2569 | precision = LONG_LONG_TYPE_SIZE; |
748e5d45 | 2570 | else if (strcmp (SIZETYPE, "short unsigned int") == 0) |
621fad71 | 2571 | precision = SHORT_TYPE_SIZE; |
7907db97 | 2572 | else |
9f75f026 | 2573 | { |
2574 | int i; | |
2575 | ||
2576 | precision = -1; | |
2577 | for (i = 0; i < NUM_INT_N_ENTS; i++) | |
2578 | if (int_n_enabled_p[i]) | |
2579 | { | |
2580 | char name[50]; | |
2581 | sprintf (name, "__int%d unsigned", int_n_data[i].bitsize); | |
2582 | ||
2583 | if (strcmp (name, SIZETYPE) == 0) | |
2584 | { | |
2585 | precision = int_n_data[i].bitsize; | |
2586 | } | |
2587 | } | |
2588 | if (precision == -1) | |
2589 | gcc_unreachable (); | |
2590 | } | |
7bd4091f | 2591 | |
7907db97 | 2592 | bprecision |
2593 | = MIN (precision + BITS_PER_UNIT_LOG + 1, MAX_FIXED_MODE_SIZE); | |
2594 | bprecision | |
2595 | = GET_MODE_PRECISION (smallest_mode_for_size (bprecision, MODE_INT)); | |
24cd46a7 | 2596 | if (bprecision > HOST_BITS_PER_DOUBLE_INT) |
2597 | bprecision = HOST_BITS_PER_DOUBLE_INT; | |
7907db97 | 2598 | |
2599 | /* Create stubs for sizetype and bitsizetype so we can create constants. */ | |
2600 | sizetype = make_node (INTEGER_TYPE); | |
f1444979 | 2601 | TYPE_NAME (sizetype) = get_identifier ("sizetype"); |
7907db97 | 2602 | TYPE_PRECISION (sizetype) = precision; |
2603 | TYPE_UNSIGNED (sizetype) = 1; | |
7907db97 | 2604 | bitsizetype = make_node (INTEGER_TYPE); |
2605 | TYPE_NAME (bitsizetype) = get_identifier ("bitsizetype"); | |
2606 | TYPE_PRECISION (bitsizetype) = bprecision; | |
2607 | TYPE_UNSIGNED (bitsizetype) = 1; | |
7907db97 | 2608 | |
2609 | /* Now layout both types manually. */ | |
2610 | SET_TYPE_MODE (sizetype, smallest_mode_for_size (precision, MODE_INT)); | |
2611 | TYPE_ALIGN (sizetype) = GET_MODE_ALIGNMENT (TYPE_MODE (sizetype)); | |
2612 | TYPE_SIZE (sizetype) = bitsize_int (precision); | |
2613 | TYPE_SIZE_UNIT (sizetype) = size_int (GET_MODE_SIZE (TYPE_MODE (sizetype))); | |
e913b5cd | 2614 | set_min_and_max_values_for_integral_type (sizetype, precision, UNSIGNED); |
7907db97 | 2615 | |
2616 | SET_TYPE_MODE (bitsizetype, smallest_mode_for_size (bprecision, MODE_INT)); | |
2617 | TYPE_ALIGN (bitsizetype) = GET_MODE_ALIGNMENT (TYPE_MODE (bitsizetype)); | |
2618 | TYPE_SIZE (bitsizetype) = bitsize_int (bprecision); | |
2619 | TYPE_SIZE_UNIT (bitsizetype) | |
2620 | = size_int (GET_MODE_SIZE (TYPE_MODE (bitsizetype))); | |
e913b5cd | 2621 | set_min_and_max_values_for_integral_type (bitsizetype, bprecision, UNSIGNED); |
e345fa3a | 2622 | |
ad086ed4 | 2623 | /* Create the signed variants of *sizetype. */ |
7907db97 | 2624 | ssizetype = make_signed_type (TYPE_PRECISION (sizetype)); |
f1444979 | 2625 | TYPE_NAME (ssizetype) = get_identifier ("ssizetype"); |
7907db97 | 2626 | sbitsizetype = make_signed_type (TYPE_PRECISION (bitsizetype)); |
f1444979 | 2627 | TYPE_NAME (sbitsizetype) = get_identifier ("sbitsizetype"); |
902de8ed | 2628 | } |
2629 | \f | |
63bf54cf | 2630 | /* TYPE is an integral type, i.e., an INTEGRAL_TYPE, ENUMERAL_TYPE |
2631 | or BOOLEAN_TYPE. Set TYPE_MIN_VALUE and TYPE_MAX_VALUE | |
ee1ab431 | 2632 | for TYPE, based on the PRECISION and whether or not the TYPE |
2633 | IS_UNSIGNED. PRECISION need not correspond to a width supported | |
2634 | natively by the hardware; for example, on a machine with 8-bit, | |
2635 | 16-bit, and 32-bit register modes, PRECISION might be 7, 23, or | |
2636 | 61. */ | |
2637 | ||
2638 | void | |
2639 | set_min_and_max_values_for_integral_type (tree type, | |
2640 | int precision, | |
e913b5cd | 2641 | signop sgn) |
ee1ab431 | 2642 | { |
0e3dfadd | 2643 | /* For bitfields with zero width we end up creating integer types |
2644 | with zero precision. Don't assign any minimum/maximum values | |
2645 | to those types, they don't have any valid value. */ | |
2646 | if (precision < 1) | |
2647 | return; | |
2648 | ||
796b6678 | 2649 | TYPE_MIN_VALUE (type) |
2650 | = wide_int_to_tree (type, wi::min_value (precision, sgn)); | |
2651 | TYPE_MAX_VALUE (type) | |
2652 | = wide_int_to_tree (type, wi::max_value (precision, sgn)); | |
ee1ab431 | 2653 | } |
2654 | ||
6d85d3bb | 2655 | /* Set the extreme values of TYPE based on its precision in bits, |
6bb1bdc7 | 2656 | then lay it out. Used when make_signed_type won't do |
6d85d3bb | 2657 | because the tree code is not INTEGER_TYPE. |
2658 | E.g. for Pascal, when the -fsigned-char option is given. */ | |
2659 | ||
2660 | void | |
60b8c5b3 | 2661 | fixup_signed_type (tree type) |
6d85d3bb | 2662 | { |
19cb6b50 | 2663 | int precision = TYPE_PRECISION (type); |
6d85d3bb | 2664 | |
e913b5cd | 2665 | set_min_and_max_values_for_integral_type (type, precision, SIGNED); |
6d85d3bb | 2666 | |
2667 | /* Lay out the type: set its alignment, size, etc. */ | |
6d85d3bb | 2668 | layout_type (type); |
2669 | } | |
2670 | ||
f2cfea4a | 2671 | /* Set the extreme values of TYPE based on its precision in bits, |
6bb1bdc7 | 2672 | then lay it out. This is used both in `make_unsigned_type' |
f2cfea4a | 2673 | and for enumeral types. */ |
2674 | ||
2675 | void | |
60b8c5b3 | 2676 | fixup_unsigned_type (tree type) |
f2cfea4a | 2677 | { |
19cb6b50 | 2678 | int precision = TYPE_PRECISION (type); |
f2cfea4a | 2679 | |
00b76131 | 2680 | TYPE_UNSIGNED (type) = 1; |
9e7454d0 | 2681 | |
e913b5cd | 2682 | set_min_and_max_values_for_integral_type (type, precision, UNSIGNED); |
f2cfea4a | 2683 | |
2684 | /* Lay out the type: set its alignment, size, etc. */ | |
f2cfea4a | 2685 | layout_type (type); |
2686 | } | |
2687 | \f | |
0a1f5755 | 2688 | /* Construct an iterator for a bitfield that spans BITSIZE bits, |
2689 | starting at BITPOS. | |
2690 | ||
2691 | BITREGION_START is the bit position of the first bit in this | |
2692 | sequence of bit fields. BITREGION_END is the last bit in this | |
2693 | sequence. If these two fields are non-zero, we should restrict the | |
2694 | memory access to that range. Otherwise, we are allowed to touch | |
2695 | any adjacent non bit-fields. | |
2696 | ||
2697 | ALIGN is the alignment of the underlying object in bits. | |
2698 | VOLATILEP says whether the bitfield is volatile. */ | |
2699 | ||
2700 | bit_field_mode_iterator | |
2701 | ::bit_field_mode_iterator (HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos, | |
2702 | HOST_WIDE_INT bitregion_start, | |
2703 | HOST_WIDE_INT bitregion_end, | |
2704 | unsigned int align, bool volatilep) | |
ae84f584 | 2705 | : m_mode (GET_CLASS_NARROWEST_MODE (MODE_INT)), m_bitsize (bitsize), |
2706 | m_bitpos (bitpos), m_bitregion_start (bitregion_start), | |
2707 | m_bitregion_end (bitregion_end), m_align (align), | |
2708 | m_volatilep (volatilep), m_count (0) | |
0a1f5755 | 2709 | { |
ae84f584 | 2710 | if (!m_bitregion_end) |
4ed6cf77 | 2711 | { |
392630d4 | 2712 | /* We can assume that any aligned chunk of ALIGN bits that overlaps |
2713 | the bitfield is mapped and won't trap, provided that ALIGN isn't | |
2714 | too large. The cap is the biggest required alignment for data, | |
2715 | or at least the word size. And force one such chunk at least. */ | |
2716 | unsigned HOST_WIDE_INT units | |
2717 | = MIN (align, MAX (BIGGEST_ALIGNMENT, BITS_PER_WORD)); | |
2718 | if (bitsize <= 0) | |
2719 | bitsize = 1; | |
ae84f584 | 2720 | m_bitregion_end = bitpos + bitsize + units - 1; |
2721 | m_bitregion_end -= m_bitregion_end % units + 1; | |
4ed6cf77 | 2722 | } |
0a1f5755 | 2723 | } |
2724 | ||
2725 | /* Calls to this function return successively larger modes that can be used | |
2726 | to represent the bitfield. Return true if another bitfield mode is | |
2727 | available, storing it in *OUT_MODE if so. */ | |
2728 | ||
2729 | bool | |
3754d046 | 2730 | bit_field_mode_iterator::next_mode (machine_mode *out_mode) |
0a1f5755 | 2731 | { |
ae84f584 | 2732 | for (; m_mode != VOIDmode; m_mode = GET_MODE_WIDER_MODE (m_mode)) |
0a1f5755 | 2733 | { |
ae84f584 | 2734 | unsigned int unit = GET_MODE_BITSIZE (m_mode); |
0a1f5755 | 2735 | |
2736 | /* Skip modes that don't have full precision. */ | |
ae84f584 | 2737 | if (unit != GET_MODE_PRECISION (m_mode)) |
0a1f5755 | 2738 | continue; |
2739 | ||
0a1f5755 | 2740 | /* Stop if the mode is too wide to handle efficiently. */ |
2741 | if (unit > MAX_FIXED_MODE_SIZE) | |
2742 | break; | |
2743 | ||
2744 | /* Don't deliver more than one multiword mode; the smallest one | |
2745 | should be used. */ | |
ae84f584 | 2746 | if (m_count > 0 && unit > BITS_PER_WORD) |
0a1f5755 | 2747 | break; |
2748 | ||
efa6660d | 2749 | /* Skip modes that are too small. */ |
ae84f584 | 2750 | unsigned HOST_WIDE_INT substart = (unsigned HOST_WIDE_INT) m_bitpos % unit; |
2751 | unsigned HOST_WIDE_INT subend = substart + m_bitsize; | |
efa6660d | 2752 | if (subend > unit) |
2753 | continue; | |
2754 | ||
0a1f5755 | 2755 | /* Stop if the mode goes outside the bitregion. */ |
ae84f584 | 2756 | HOST_WIDE_INT start = m_bitpos - substart; |
2757 | if (m_bitregion_start && start < m_bitregion_start) | |
0a1f5755 | 2758 | break; |
efa6660d | 2759 | HOST_WIDE_INT end = start + unit; |
ae84f584 | 2760 | if (end > m_bitregion_end + 1) |
4ed6cf77 | 2761 | break; |
2762 | ||
2763 | /* Stop if the mode requires too much alignment. */ | |
ae84f584 | 2764 | if (GET_MODE_ALIGNMENT (m_mode) > m_align |
2765 | && SLOW_UNALIGNED_ACCESS (m_mode, m_align)) | |
0a1f5755 | 2766 | break; |
2767 | ||
ae84f584 | 2768 | *out_mode = m_mode; |
2769 | m_mode = GET_MODE_WIDER_MODE (m_mode); | |
2770 | m_count++; | |
0a1f5755 | 2771 | return true; |
2772 | } | |
2773 | return false; | |
2774 | } | |
2775 | ||
2776 | /* Return true if smaller modes are generally preferred for this kind | |
2777 | of bitfield. */ | |
2778 | ||
2779 | bool | |
2780 | bit_field_mode_iterator::prefer_smaller_modes () | |
2781 | { | |
ae84f584 | 2782 | return (m_volatilep |
0a1f5755 | 2783 | ? targetm.narrow_volatile_bitfield () |
2784 | : !SLOW_BYTE_ACCESS); | |
2785 | } | |
2786 | ||
f2cfea4a | 2787 | /* Find the best machine mode to use when referencing a bit field of length |
2788 | BITSIZE bits starting at BITPOS. | |
2789 | ||
4bb60ec7 | 2790 | BITREGION_START is the bit position of the first bit in this |
2791 | sequence of bit fields. BITREGION_END is the last bit in this | |
2792 | sequence. If these two fields are non-zero, we should restrict the | |
0a1f5755 | 2793 | memory access to that range. Otherwise, we are allowed to touch |
4bb60ec7 | 2794 | any adjacent non bit-fields. |
2795 | ||
f2cfea4a | 2796 | The underlying object is known to be aligned to a boundary of ALIGN bits. |
2797 | If LARGEST_MODE is not VOIDmode, it means that we should not use a mode | |
2798 | larger than LARGEST_MODE (usually SImode). | |
2799 | ||
5f458503 | 2800 | If no mode meets all these conditions, we return VOIDmode. |
7bd4091f | 2801 | |
5f458503 | 2802 | If VOLATILEP is false and SLOW_BYTE_ACCESS is false, we return the |
2803 | smallest mode meeting these conditions. | |
2804 | ||
2805 | If VOLATILEP is false and SLOW_BYTE_ACCESS is true, we return the | |
2806 | largest mode (but a mode no wider than UNITS_PER_WORD) that meets | |
2807 | all the conditions. | |
7bd4091f | 2808 | |
5f458503 | 2809 | If VOLATILEP is true the narrow_volatile_bitfields target hook is used to |
2810 | decide which of the above modes should be used. */ | |
f2cfea4a | 2811 | |
3754d046 | 2812 | machine_mode |
4bb60ec7 | 2813 | get_best_mode (int bitsize, int bitpos, |
2814 | unsigned HOST_WIDE_INT bitregion_start, | |
2815 | unsigned HOST_WIDE_INT bitregion_end, | |
2816 | unsigned int align, | |
3754d046 | 2817 | machine_mode largest_mode, bool volatilep) |
f2cfea4a | 2818 | { |
0a1f5755 | 2819 | bit_field_mode_iterator iter (bitsize, bitpos, bitregion_start, |
2820 | bitregion_end, align, volatilep); | |
3754d046 | 2821 | machine_mode widest_mode = VOIDmode; |
2822 | machine_mode mode; | |
0a1f5755 | 2823 | while (iter.next_mode (&mode) |
06bedae0 | 2824 | /* ??? For historical reasons, reject modes that would normally |
2825 | receive greater alignment, even if unaligned accesses are | |
2826 | acceptable. This has both advantages and disadvantages. | |
4ed6cf77 | 2827 | Removing this check means that something like: |
2828 | ||
2829 | struct s { unsigned int x; unsigned int y; }; | |
2830 | int f (struct s *s) { return s->x == 0 && s->y == 0; } | |
2831 | ||
2832 | can be implemented using a single load and compare on | |
2833 | 64-bit machines that have no alignment restrictions. | |
2834 | For example, on powerpc64-linux-gnu, we would generate: | |
2835 | ||
2836 | ld 3,0(3) | |
2837 | cntlzd 3,3 | |
2838 | srdi 3,3,6 | |
2839 | blr | |
2840 | ||
2841 | rather than: | |
2842 | ||
2843 | lwz 9,0(3) | |
2844 | cmpwi 7,9,0 | |
2845 | bne 7,.L3 | |
2846 | lwz 3,4(3) | |
2847 | cntlzw 3,3 | |
2848 | srwi 3,3,5 | |
2849 | extsw 3,3 | |
2850 | blr | |
2851 | .p2align 4,,15 | |
2852 | .L3: | |
2853 | li 3,0 | |
2854 | blr | |
2855 | ||
2856 | However, accessing more than one field can make life harder | |
2857 | for the gimple optimizers. For example, gcc.dg/vect/bb-slp-5.c | |
2858 | has a series of unsigned short copies followed by a series of | |
2859 | unsigned short comparisons. With this check, both the copies | |
2860 | and comparisons remain 16-bit accesses and FRE is able | |
2861 | to eliminate the latter. Without the check, the comparisons | |
2862 | can be done using 2 64-bit operations, which FRE isn't able | |
2863 | to handle in the same way. | |
2864 | ||
2865 | Either way, it would probably be worth disabling this check | |
2866 | during expand. One particular example where removing the | |
2867 | check would help is the get_best_mode call in store_bit_field. | |
2868 | If we are given a memory bitregion of 128 bits that is aligned | |
2869 | to a 64-bit boundary, and the bitfield we want to modify is | |
2870 | in the second half of the bitregion, this check causes | |
2871 | store_bitfield to turn the memory into a 64-bit reference | |
2872 | to the _first_ half of the region. We later use | |
2873 | adjust_bitfield_address to get a reference to the correct half, | |
2874 | but doing so looks to adjust_bitfield_address as though we are | |
2875 | moving past the end of the original object, so it drops the | |
2876 | associated MEM_EXPR and MEM_OFFSET. Removing the check | |
2877 | causes store_bit_field to keep a 128-bit memory reference, | |
2878 | so that the final bitfield reference still has a MEM_EXPR | |
2879 | and MEM_OFFSET. */ | |
06bedae0 | 2880 | && GET_MODE_ALIGNMENT (mode) <= align |
0a1f5755 | 2881 | && (largest_mode == VOIDmode |
2882 | || GET_MODE_SIZE (mode) <= GET_MODE_SIZE (largest_mode))) | |
f2cfea4a | 2883 | { |
0a1f5755 | 2884 | widest_mode = mode; |
2885 | if (iter.prefer_smaller_modes ()) | |
f2cfea4a | 2886 | break; |
2887 | } | |
0a1f5755 | 2888 | return widest_mode; |
f2cfea4a | 2889 | } |
521dd524 | 2890 | |
f9cce2dc | 2891 | /* Gets minimal and maximal values for MODE (signed or unsigned depending on |
a6629703 | 2892 | SIGN). The returned constants are made to be usable in TARGET_MODE. */ |
f9cce2dc | 2893 | |
2894 | void | |
3754d046 | 2895 | get_mode_bounds (machine_mode mode, int sign, |
2896 | machine_mode target_mode, | |
a6629703 | 2897 | rtx *mmin, rtx *mmax) |
f9cce2dc | 2898 | { |
44a9bb76 | 2899 | unsigned size = GET_MODE_PRECISION (mode); |
a6629703 | 2900 | unsigned HOST_WIDE_INT min_val, max_val; |
f9cce2dc | 2901 | |
04e579b6 | 2902 | gcc_assert (size <= HOST_BITS_PER_WIDE_INT); |
f9cce2dc | 2903 | |
396020c7 | 2904 | /* Special case BImode, which has values 0 and STORE_FLAG_VALUE. */ |
2905 | if (mode == BImode) | |
2906 | { | |
2907 | if (STORE_FLAG_VALUE < 0) | |
2908 | { | |
2909 | min_val = STORE_FLAG_VALUE; | |
2910 | max_val = 0; | |
2911 | } | |
2912 | else | |
2913 | { | |
2914 | min_val = 0; | |
2915 | max_val = STORE_FLAG_VALUE; | |
2916 | } | |
2917 | } | |
2918 | else if (sign) | |
f9cce2dc | 2919 | { |
a6629703 | 2920 | min_val = -((unsigned HOST_WIDE_INT) 1 << (size - 1)); |
2921 | max_val = ((unsigned HOST_WIDE_INT) 1 << (size - 1)) - 1; | |
f9cce2dc | 2922 | } |
2923 | else | |
2924 | { | |
a6629703 | 2925 | min_val = 0; |
2926 | max_val = ((unsigned HOST_WIDE_INT) 1 << (size - 1) << 1) - 1; | |
f9cce2dc | 2927 | } |
a6629703 | 2928 | |
69e41517 | 2929 | *mmin = gen_int_mode (min_val, target_mode); |
2930 | *mmax = gen_int_mode (max_val, target_mode); | |
f9cce2dc | 2931 | } |
2932 | ||
1f3233d1 | 2933 | #include "gt-stor-layout.h" |