]>
Commit | Line | Data |
---|---|---|
7306ed3f | 1 | /* C-compiler utilities for types and variables storage layout |
06ceef4e | 2 | Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1996, 1998, |
f44157f8 JJ |
3 | 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010, |
4 | 2011 Free Software Foundation, Inc. | |
7306ed3f | 5 | |
1322177d | 6 | This file is part of GCC. |
7306ed3f | 7 | |
1322177d LB |
8 | GCC is free software; you can redistribute it and/or modify it under |
9 | the terms of the GNU General Public License as published by the Free | |
9dcd6f09 | 10 | Software Foundation; either version 3, or (at your option) any later |
1322177d | 11 | version. |
7306ed3f | 12 | |
1322177d LB |
13 | GCC is distributed in the hope that it will be useful, but WITHOUT ANY |
14 | WARRANTY; without even the implied warranty of MERCHANTABILITY or | |
15 | FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License | |
16 | for more details. | |
7306ed3f JW |
17 | |
18 | You should have received a copy of the GNU General Public License | |
9dcd6f09 NC |
19 | along with GCC; see the file COPYING3. If not see |
20 | <http://www.gnu.org/licenses/>. */ | |
7306ed3f JW |
21 | |
22 | ||
23 | #include "config.h" | |
670ee920 | 24 | #include "system.h" |
4977bab6 ZW |
25 | #include "coretypes.h" |
26 | #include "tm.h" | |
7306ed3f | 27 | #include "tree.h" |
d05a5492 | 28 | #include "rtl.h" |
6baf1cc8 | 29 | #include "tm_p.h" |
566cdc73 | 30 | #include "flags.h" |
7306ed3f | 31 | #include "function.h" |
234042f4 | 32 | #include "expr.h" |
ea40ba9c | 33 | #include "output.h" |
718f9c0f | 34 | #include "diagnostic-core.h" |
d7db6646 | 35 | #include "ggc.h" |
f913c102 | 36 | #include "target.h" |
43577e6b | 37 | #include "langhooks.h" |
26277d41 | 38 | #include "regs.h" |
89b0433e | 39 | #include "params.h" |
f82a627c EB |
40 | #include "cgraph.h" |
41 | #include "tree-inline.h" | |
42 | #include "tree-dump.h" | |
43 | #include "gimple.h" | |
7306ed3f | 44 | |
7306ed3f | 45 | /* Data type for the expressions representing sizes of data types. |
896cced4 | 46 | It is the first integer type laid out. */ |
18dae016 | 47 | tree sizetype_tab[(int) stk_type_kind_last]; |
7306ed3f | 48 | |
d4c40650 RS |
49 | /* If nonzero, this is an upper limit on alignment of structure fields. |
50 | The value is measured in bits. */ | |
467cecf3 | 51 | unsigned int maximum_field_alignment = TARGET_DEFAULT_PACK_STRUCT * BITS_PER_UNIT; |
d4c40650 | 52 | |
d4ebfa65 BE |
53 | /* Nonzero if all REFERENCE_TYPEs are internal and hence should be allocated |
54 | in the address spaces' address_mode, not pointer_mode. Set only by | |
55 | internal_reference_types called only by a front end. */ | |
b5d6a2ff RK |
56 | static int reference_types_internal = 0; |
57 | ||
f82a627c | 58 | static tree self_referential_size (tree); |
46c5ad27 AJ |
59 | static void finalize_record_size (record_layout_info); |
60 | static void finalize_type_size (tree); | |
61 | static void place_union_field (record_layout_info, tree); | |
b8089d8d | 62 | #if defined (PCC_BITFIELD_TYPE_MATTERS) || defined (BITFIELD_NBYTES_LIMITED) |
46c5ad27 AJ |
63 | static int excess_unit_span (HOST_WIDE_INT, HOST_WIDE_INT, HOST_WIDE_INT, |
64 | HOST_WIDE_INT, tree); | |
b8089d8d | 65 | #endif |
46c5ad27 | 66 | extern void debug_rli (record_layout_info); |
7306ed3f | 67 | \f |
d4ebfa65 BE |
68 | /* Show that REFERENCE_TYPES are internal and should use address_mode. |
69 | Called only by front end. */ | |
b5d6a2ff RK |
70 | |
71 | void | |
46c5ad27 | 72 | internal_reference_types (void) |
b5d6a2ff RK |
73 | { |
74 | reference_types_internal = 1; | |
75 | } | |
76 | ||
76ffb3a0 | 77 | /* Given a size SIZE that may not be a constant, return a SAVE_EXPR |
7306ed3f JW |
78 | to serve as the actual size-expression for a type or decl. */ |
79 | ||
4e4b555d | 80 | tree |
46c5ad27 | 81 | variable_size (tree size) |
7306ed3f | 82 | { |
f82a627c EB |
83 | /* Obviously. */ |
84 | if (TREE_CONSTANT (size)) | |
85 | return size; | |
86 | ||
87 | /* If the size is self-referential, we can't make a SAVE_EXPR (see | |
88 | save_expr for the rationale). But we can do something else. */ | |
89 | if (CONTAINS_PLACEHOLDER_P (size)) | |
90 | return self_referential_size (size); | |
91 | ||
c99c0026 EB |
92 | /* If we are in the global binding level, we can't make a SAVE_EXPR |
93 | since it may end up being shared across functions, so it is up | |
94 | to the front-end to deal with this case. */ | |
95 | if (lang_hooks.decls.global_bindings_p ()) | |
5e9bec99 RK |
96 | return size; |
97 | ||
907a08d9 | 98 | return save_expr (size); |
7306ed3f | 99 | } |
f82a627c EB |
100 | |
101 | /* An array of functions used for self-referential size computation. */ | |
102 | static GTY(()) VEC (tree, gc) *size_functions; | |
103 | ||
0d475ac5 EB |
104 | /* Look inside EXPR into simple arithmetic operations involving constants. |
105 | Return the outermost non-arithmetic or non-constant node. */ | |
106 | ||
107 | static tree | |
108 | skip_simple_constant_arithmetic (tree expr) | |
109 | { | |
110 | while (true) | |
111 | { | |
112 | if (UNARY_CLASS_P (expr)) | |
113 | expr = TREE_OPERAND (expr, 0); | |
114 | else if (BINARY_CLASS_P (expr)) | |
115 | { | |
116 | if (TREE_CONSTANT (TREE_OPERAND (expr, 1))) | |
117 | expr = TREE_OPERAND (expr, 0); | |
118 | else if (TREE_CONSTANT (TREE_OPERAND (expr, 0))) | |
119 | expr = TREE_OPERAND (expr, 1); | |
120 | else | |
121 | break; | |
122 | } | |
123 | else | |
124 | break; | |
125 | } | |
126 | ||
127 | return expr; | |
128 | } | |
129 | ||
f82a627c EB |
130 | /* Similar to copy_tree_r but do not copy component references involving |
131 | PLACEHOLDER_EXPRs. These nodes are spotted in find_placeholder_in_expr | |
132 | and substituted in substitute_in_expr. */ | |
133 | ||
134 | static tree | |
135 | copy_self_referential_tree_r (tree *tp, int *walk_subtrees, void *data) | |
136 | { | |
137 | enum tree_code code = TREE_CODE (*tp); | |
138 | ||
139 | /* Stop at types, decls, constants like copy_tree_r. */ | |
140 | if (TREE_CODE_CLASS (code) == tcc_type | |
141 | || TREE_CODE_CLASS (code) == tcc_declaration | |
142 | || TREE_CODE_CLASS (code) == tcc_constant) | |
143 | { | |
144 | *walk_subtrees = 0; | |
145 | return NULL_TREE; | |
146 | } | |
147 | ||
148 | /* This is the pattern built in ada/make_aligning_type. */ | |
149 | else if (code == ADDR_EXPR | |
150 | && TREE_CODE (TREE_OPERAND (*tp, 0)) == PLACEHOLDER_EXPR) | |
151 | { | |
152 | *walk_subtrees = 0; | |
153 | return NULL_TREE; | |
154 | } | |
155 | ||
156 | /* Default case: the component reference. */ | |
157 | else if (code == COMPONENT_REF) | |
158 | { | |
159 | tree inner; | |
160 | for (inner = TREE_OPERAND (*tp, 0); | |
161 | REFERENCE_CLASS_P (inner); | |
162 | inner = TREE_OPERAND (inner, 0)) | |
163 | ; | |
164 | ||
165 | if (TREE_CODE (inner) == PLACEHOLDER_EXPR) | |
166 | { | |
167 | *walk_subtrees = 0; | |
168 | return NULL_TREE; | |
169 | } | |
170 | } | |
171 | ||
172 | /* We're not supposed to have them in self-referential size trees | |
173 | because we wouldn't properly control when they are evaluated. | |
174 | However, not creating superfluous SAVE_EXPRs requires accurate | |
175 | tracking of readonly-ness all the way down to here, which we | |
176 | cannot always guarantee in practice. So punt in this case. */ | |
177 | else if (code == SAVE_EXPR) | |
178 | return error_mark_node; | |
179 | ||
deb5046b JM |
180 | else if (code == STATEMENT_LIST) |
181 | gcc_unreachable (); | |
182 | ||
f82a627c EB |
183 | return copy_tree_r (tp, walk_subtrees, data); |
184 | } | |
185 | ||
186 | /* Given a SIZE expression that is self-referential, return an equivalent | |
187 | expression to serve as the actual size expression for a type. */ | |
188 | ||
189 | static tree | |
190 | self_referential_size (tree size) | |
191 | { | |
192 | static unsigned HOST_WIDE_INT fnno = 0; | |
193 | VEC (tree, heap) *self_refs = NULL; | |
3bb06db4 | 194 | tree param_type_list = NULL, param_decl_list = NULL; |
f82a627c EB |
195 | tree t, ref, return_type, fntype, fnname, fndecl; |
196 | unsigned int i; | |
197 | char buf[128]; | |
3bb06db4 | 198 | VEC(tree,gc) *args = NULL; |
f82a627c EB |
199 | |
200 | /* Do not factor out simple operations. */ | |
0d475ac5 | 201 | t = skip_simple_constant_arithmetic (size); |
f82a627c EB |
202 | if (TREE_CODE (t) == CALL_EXPR) |
203 | return size; | |
204 | ||
205 | /* Collect the list of self-references in the expression. */ | |
206 | find_placeholder_in_expr (size, &self_refs); | |
207 | gcc_assert (VEC_length (tree, self_refs) > 0); | |
208 | ||
209 | /* Obtain a private copy of the expression. */ | |
210 | t = size; | |
211 | if (walk_tree (&t, copy_self_referential_tree_r, NULL, NULL) != NULL_TREE) | |
212 | return size; | |
213 | size = t; | |
214 | ||
215 | /* Build the parameter and argument lists in parallel; also | |
216 | substitute the former for the latter in the expression. */ | |
3bb06db4 | 217 | args = VEC_alloc (tree, gc, VEC_length (tree, self_refs)); |
ac47786e | 218 | FOR_EACH_VEC_ELT (tree, self_refs, i, ref) |
f82a627c EB |
219 | { |
220 | tree subst, param_name, param_type, param_decl; | |
221 | ||
222 | if (DECL_P (ref)) | |
223 | { | |
224 | /* We shouldn't have true variables here. */ | |
225 | gcc_assert (TREE_READONLY (ref)); | |
226 | subst = ref; | |
227 | } | |
228 | /* This is the pattern built in ada/make_aligning_type. */ | |
229 | else if (TREE_CODE (ref) == ADDR_EXPR) | |
230 | subst = ref; | |
231 | /* Default case: the component reference. */ | |
232 | else | |
233 | subst = TREE_OPERAND (ref, 1); | |
234 | ||
235 | sprintf (buf, "p%d", i); | |
236 | param_name = get_identifier (buf); | |
237 | param_type = TREE_TYPE (ref); | |
238 | param_decl | |
239 | = build_decl (input_location, PARM_DECL, param_name, param_type); | |
240 | if (targetm.calls.promote_prototypes (NULL_TREE) | |
241 | && INTEGRAL_TYPE_P (param_type) | |
242 | && TYPE_PRECISION (param_type) < TYPE_PRECISION (integer_type_node)) | |
243 | DECL_ARG_TYPE (param_decl) = integer_type_node; | |
244 | else | |
245 | DECL_ARG_TYPE (param_decl) = param_type; | |
246 | DECL_ARTIFICIAL (param_decl) = 1; | |
247 | TREE_READONLY (param_decl) = 1; | |
248 | ||
249 | size = substitute_in_expr (size, subst, param_decl); | |
250 | ||
251 | param_type_list = tree_cons (NULL_TREE, param_type, param_type_list); | |
252 | param_decl_list = chainon (param_decl, param_decl_list); | |
3bb06db4 | 253 | VEC_quick_push (tree, args, ref); |
f82a627c EB |
254 | } |
255 | ||
256 | VEC_free (tree, heap, self_refs); | |
257 | ||
258 | /* Append 'void' to indicate that the number of parameters is fixed. */ | |
259 | param_type_list = tree_cons (NULL_TREE, void_type_node, param_type_list); | |
260 | ||
261 | /* The 3 lists have been created in reverse order. */ | |
262 | param_type_list = nreverse (param_type_list); | |
263 | param_decl_list = nreverse (param_decl_list); | |
f82a627c EB |
264 | |
265 | /* Build the function type. */ | |
266 | return_type = TREE_TYPE (size); | |
267 | fntype = build_function_type (return_type, param_type_list); | |
268 | ||
269 | /* Build the function declaration. */ | |
270 | sprintf (buf, "SZ"HOST_WIDE_INT_PRINT_UNSIGNED, fnno++); | |
271 | fnname = get_file_function_name (buf); | |
272 | fndecl = build_decl (input_location, FUNCTION_DECL, fnname, fntype); | |
910ad8de | 273 | for (t = param_decl_list; t; t = DECL_CHAIN (t)) |
f82a627c EB |
274 | DECL_CONTEXT (t) = fndecl; |
275 | DECL_ARGUMENTS (fndecl) = param_decl_list; | |
276 | DECL_RESULT (fndecl) | |
277 | = build_decl (input_location, RESULT_DECL, 0, return_type); | |
278 | DECL_CONTEXT (DECL_RESULT (fndecl)) = fndecl; | |
279 | ||
280 | /* The function has been created by the compiler and we don't | |
281 | want to emit debug info for it. */ | |
282 | DECL_ARTIFICIAL (fndecl) = 1; | |
283 | DECL_IGNORED_P (fndecl) = 1; | |
284 | ||
285 | /* It is supposed to be "const" and never throw. */ | |
286 | TREE_READONLY (fndecl) = 1; | |
287 | TREE_NOTHROW (fndecl) = 1; | |
288 | ||
289 | /* We want it to be inlined when this is deemed profitable, as | |
290 | well as discarded if every call has been integrated. */ | |
291 | DECL_DECLARED_INLINE_P (fndecl) = 1; | |
292 | ||
293 | /* It is made up of a unique return statement. */ | |
294 | DECL_INITIAL (fndecl) = make_node (BLOCK); | |
295 | BLOCK_SUPERCONTEXT (DECL_INITIAL (fndecl)) = fndecl; | |
296 | t = build2 (MODIFY_EXPR, return_type, DECL_RESULT (fndecl), size); | |
297 | DECL_SAVED_TREE (fndecl) = build1 (RETURN_EXPR, void_type_node, t); | |
298 | TREE_STATIC (fndecl) = 1; | |
299 | ||
300 | /* Put it onto the list of size functions. */ | |
301 | VEC_safe_push (tree, gc, size_functions, fndecl); | |
302 | ||
303 | /* Replace the original expression with a call to the size function. */ | |
c5911a55 | 304 | return build_call_expr_loc_vec (UNKNOWN_LOCATION, fndecl, args); |
f82a627c EB |
305 | } |
306 | ||
307 | /* Take, queue and compile all the size functions. It is essential that | |
308 | the size functions be gimplified at the very end of the compilation | |
309 | in order to guarantee transparent handling of self-referential sizes. | |
310 | Otherwise the GENERIC inliner would not be able to inline them back | |
311 | at each of their call sites, thus creating artificial non-constant | |
312 | size expressions which would trigger nasty problems later on. */ | |
313 | ||
314 | void | |
315 | finalize_size_functions (void) | |
316 | { | |
317 | unsigned int i; | |
318 | tree fndecl; | |
319 | ||
320 | for (i = 0; VEC_iterate(tree, size_functions, i, fndecl); i++) | |
321 | { | |
322 | dump_function (TDI_original, fndecl); | |
323 | gimplify_function_tree (fndecl); | |
324 | dump_function (TDI_generic, fndecl); | |
325 | cgraph_finalize_function (fndecl, false); | |
326 | } | |
327 | ||
328 | VEC_free (tree, gc, size_functions); | |
329 | } | |
7306ed3f | 330 | \f |
37783865 | 331 | /* Return the machine mode to use for a nonscalar of SIZE bits. The |
55d796da | 332 | mode must be in class MCLASS, and have exactly that many value bits; |
37783865 ZW |
333 | it may have padding as well. If LIMIT is nonzero, modes of wider |
334 | than MAX_FIXED_MODE_SIZE will not be used. */ | |
7306ed3f JW |
335 | |
336 | enum machine_mode | |
55d796da | 337 | mode_for_size (unsigned int size, enum mode_class mclass, int limit) |
7306ed3f | 338 | { |
b3694847 | 339 | enum machine_mode mode; |
7306ed3f | 340 | |
72c602fc | 341 | if (limit && size > MAX_FIXED_MODE_SIZE) |
7306ed3f JW |
342 | return BLKmode; |
343 | ||
5e9bec99 | 344 | /* Get the first mode which has this size, in the specified class. */ |
55d796da | 345 | for (mode = GET_CLASS_NARROWEST_MODE (mclass); mode != VOIDmode; |
7306ed3f | 346 | mode = GET_MODE_WIDER_MODE (mode)) |
37783865 | 347 | if (GET_MODE_PRECISION (mode) == size) |
7306ed3f JW |
348 | return mode; |
349 | ||
350 | return BLKmode; | |
351 | } | |
352 | ||
72c602fc RK |
353 | /* Similar, except passed a tree node. */ |
354 | ||
355 | enum machine_mode | |
55d796da | 356 | mode_for_size_tree (const_tree size, enum mode_class mclass, int limit) |
72c602fc | 357 | { |
a6a12bb9 RS |
358 | unsigned HOST_WIDE_INT uhwi; |
359 | unsigned int ui; | |
360 | ||
361 | if (!host_integerp (size, 1)) | |
72c602fc | 362 | return BLKmode; |
a6a12bb9 RS |
363 | uhwi = tree_low_cst (size, 1); |
364 | ui = uhwi; | |
365 | if (uhwi != ui) | |
366 | return BLKmode; | |
55d796da | 367 | return mode_for_size (ui, mclass, limit); |
72c602fc RK |
368 | } |
369 | ||
5e9bec99 | 370 | /* Similar, but never return BLKmode; return the narrowest mode that |
37783865 | 371 | contains at least the requested number of value bits. */ |
5e9bec99 | 372 | |
27922c13 | 373 | enum machine_mode |
55d796da | 374 | smallest_mode_for_size (unsigned int size, enum mode_class mclass) |
5e9bec99 | 375 | { |
b3694847 | 376 | enum machine_mode mode; |
5e9bec99 RK |
377 | |
378 | /* Get the first mode which has at least this size, in the | |
379 | specified class. */ | |
55d796da | 380 | for (mode = GET_CLASS_NARROWEST_MODE (mclass); mode != VOIDmode; |
5e9bec99 | 381 | mode = GET_MODE_WIDER_MODE (mode)) |
37783865 | 382 | if (GET_MODE_PRECISION (mode) >= size) |
5e9bec99 RK |
383 | return mode; |
384 | ||
41374e13 | 385 | gcc_unreachable (); |
5e9bec99 RK |
386 | } |
387 | ||
d006aa54 RH |
388 | /* Find an integer mode of the exact same size, or BLKmode on failure. */ |
389 | ||
390 | enum machine_mode | |
46c5ad27 | 391 | int_mode_for_mode (enum machine_mode mode) |
d006aa54 RH |
392 | { |
393 | switch (GET_MODE_CLASS (mode)) | |
394 | { | |
395 | case MODE_INT: | |
396 | case MODE_PARTIAL_INT: | |
397 | break; | |
398 | ||
399 | case MODE_COMPLEX_INT: | |
400 | case MODE_COMPLEX_FLOAT: | |
401 | case MODE_FLOAT: | |
15ed7b52 | 402 | case MODE_DECIMAL_FLOAT: |
62c07905 JM |
403 | case MODE_VECTOR_INT: |
404 | case MODE_VECTOR_FLOAT: | |
325217ed CF |
405 | case MODE_FRACT: |
406 | case MODE_ACCUM: | |
407 | case MODE_UFRACT: | |
408 | case MODE_UACCUM: | |
409 | case MODE_VECTOR_FRACT: | |
410 | case MODE_VECTOR_ACCUM: | |
411 | case MODE_VECTOR_UFRACT: | |
412 | case MODE_VECTOR_UACCUM: | |
d006aa54 RH |
413 | mode = mode_for_size (GET_MODE_BITSIZE (mode), MODE_INT, 0); |
414 | break; | |
415 | ||
416 | case MODE_RANDOM: | |
417 | if (mode == BLKmode) | |
786de7eb | 418 | break; |
d4b60170 | 419 | |
2d76cb1a | 420 | /* ... fall through ... */ |
d006aa54 RH |
421 | |
422 | case MODE_CC: | |
423 | default: | |
41374e13 | 424 | gcc_unreachable (); |
d006aa54 RH |
425 | } |
426 | ||
427 | return mode; | |
428 | } | |
429 | ||
bb67d9c7 RG |
430 | /* Find a mode that is suitable for representing a vector with |
431 | NUNITS elements of mode INNERMODE. Returns BLKmode if there | |
432 | is no suitable mode. */ | |
433 | ||
434 | enum machine_mode | |
435 | mode_for_vector (enum machine_mode innermode, unsigned nunits) | |
436 | { | |
437 | enum machine_mode mode; | |
438 | ||
439 | /* First, look for a supported vector type. */ | |
440 | if (SCALAR_FLOAT_MODE_P (innermode)) | |
441 | mode = MIN_MODE_VECTOR_FLOAT; | |
442 | else if (SCALAR_FRACT_MODE_P (innermode)) | |
443 | mode = MIN_MODE_VECTOR_FRACT; | |
444 | else if (SCALAR_UFRACT_MODE_P (innermode)) | |
445 | mode = MIN_MODE_VECTOR_UFRACT; | |
446 | else if (SCALAR_ACCUM_MODE_P (innermode)) | |
447 | mode = MIN_MODE_VECTOR_ACCUM; | |
448 | else if (SCALAR_UACCUM_MODE_P (innermode)) | |
449 | mode = MIN_MODE_VECTOR_UACCUM; | |
450 | else | |
451 | mode = MIN_MODE_VECTOR_INT; | |
452 | ||
453 | /* Do not check vector_mode_supported_p here. We'll do that | |
454 | later in vector_type_mode. */ | |
455 | for (; mode != VOIDmode ; mode = GET_MODE_WIDER_MODE (mode)) | |
456 | if (GET_MODE_NUNITS (mode) == nunits | |
457 | && GET_MODE_INNER (mode) == innermode) | |
458 | break; | |
459 | ||
460 | /* For integers, try mapping it to a same-sized scalar mode. */ | |
461 | if (mode == VOIDmode | |
462 | && GET_MODE_CLASS (innermode) == MODE_INT) | |
463 | mode = mode_for_size (nunits * GET_MODE_BITSIZE (innermode), | |
464 | MODE_INT, 0); | |
465 | ||
466 | if (mode == VOIDmode | |
467 | || (GET_MODE_CLASS (mode) == MODE_INT | |
468 | && !have_regs_of_mode[mode])) | |
469 | return BLKmode; | |
470 | ||
471 | return mode; | |
472 | } | |
473 | ||
187515f5 AO |
474 | /* Return the alignment of MODE. This will be bounded by 1 and |
475 | BIGGEST_ALIGNMENT. */ | |
476 | ||
477 | unsigned int | |
46c5ad27 | 478 | get_mode_alignment (enum machine_mode mode) |
187515f5 | 479 | { |
0974c7d7 | 480 | return MIN (BIGGEST_ALIGNMENT, MAX (1, mode_base_align[mode]*BITS_PER_UNIT)); |
187515f5 AO |
481 | } |
482 | ||
0f6d54f7 RS |
483 | /* Return the natural mode of an array, given that it is SIZE bytes in |
484 | total and has elements of type ELEM_TYPE. */ | |
485 | ||
486 | static enum machine_mode | |
487 | mode_for_array (tree elem_type, tree size) | |
488 | { | |
489 | tree elem_size; | |
490 | unsigned HOST_WIDE_INT int_size, int_elem_size; | |
491 | bool limit_p; | |
492 | ||
493 | /* One-element arrays get the component type's mode. */ | |
494 | elem_size = TYPE_SIZE (elem_type); | |
495 | if (simple_cst_equal (size, elem_size)) | |
496 | return TYPE_MODE (elem_type); | |
497 | ||
498 | limit_p = true; | |
499 | if (host_integerp (size, 1) && host_integerp (elem_size, 1)) | |
500 | { | |
501 | int_size = tree_low_cst (size, 1); | |
502 | int_elem_size = tree_low_cst (elem_size, 1); | |
503 | if (int_elem_size > 0 | |
504 | && int_size % int_elem_size == 0 | |
505 | && targetm.array_mode_supported_p (TYPE_MODE (elem_type), | |
506 | int_size / int_elem_size)) | |
507 | limit_p = false; | |
508 | } | |
509 | return mode_for_size_tree (size, MODE_INT, limit_p); | |
510 | } | |
7306ed3f | 511 | \f |
78d55cc8 JM |
512 | /* Subroutine of layout_decl: Force alignment required for the data type. |
513 | But if the decl itself wants greater alignment, don't override that. */ | |
514 | ||
515 | static inline void | |
516 | do_type_align (tree type, tree decl) | |
517 | { | |
518 | if (TYPE_ALIGN (type) > DECL_ALIGN (decl)) | |
519 | { | |
520 | DECL_ALIGN (decl) = TYPE_ALIGN (type); | |
3acef2ae JM |
521 | if (TREE_CODE (decl) == FIELD_DECL) |
522 | DECL_USER_ALIGN (decl) = TYPE_USER_ALIGN (type); | |
78d55cc8 JM |
523 | } |
524 | } | |
525 | ||
7306ed3f JW |
526 | /* Set the size, mode and alignment of a ..._DECL node. |
527 | TYPE_DECL does need this for C++. | |
528 | Note that LABEL_DECL and CONST_DECL nodes do not need this, | |
529 | and FUNCTION_DECL nodes have them set up in a special (and simple) way. | |
530 | Don't call layout_decl for them. | |
531 | ||
532 | KNOWN_ALIGN is the amount of alignment we can assume this | |
533 | decl has with no special effort. It is relevant only for FIELD_DECLs | |
534 | and depends on the previous fields. | |
535 | All that matters about KNOWN_ALIGN is which powers of 2 divide it. | |
536 | If KNOWN_ALIGN is 0, it means, "as much alignment as you like": | |
537 | the record will be aligned to suit. */ | |
538 | ||
539 | void | |
46c5ad27 | 540 | layout_decl (tree decl, unsigned int known_align) |
7306ed3f | 541 | { |
b3694847 SS |
542 | tree type = TREE_TYPE (decl); |
543 | enum tree_code code = TREE_CODE (decl); | |
a46666a9 | 544 | rtx rtl = NULL_RTX; |
db3927fb | 545 | location_t loc = DECL_SOURCE_LOCATION (decl); |
7306ed3f JW |
546 | |
547 | if (code == CONST_DECL) | |
548 | return; | |
0ac11108 | 549 | |
41374e13 NS |
550 | gcc_assert (code == VAR_DECL || code == PARM_DECL || code == RESULT_DECL |
551 | || code == TYPE_DECL ||code == FIELD_DECL); | |
0ac11108 | 552 | |
a46666a9 RH |
553 | rtl = DECL_RTL_IF_SET (decl); |
554 | ||
7306ed3f | 555 | if (type == error_mark_node) |
33433751 | 556 | type = void_type_node; |
7306ed3f | 557 | |
770ae6cc RK |
558 | /* Usually the size and mode come from the data type without change, |
559 | however, the front-end may set the explicit width of the field, so its | |
560 | size may not be the same as the size of its type. This happens with | |
561 | bitfields, of course (an `int' bitfield may be only 2 bits, say), but it | |
562 | also happens with other fields. For example, the C++ front-end creates | |
563 | zero-sized fields corresponding to empty base classes, and depends on | |
564 | layout_type setting DECL_FIELD_BITPOS correctly for the field. Set the | |
4b6bf620 RK |
565 | size in bytes from the size in bits. If we have already set the mode, |
566 | don't set it again since we can be called twice for FIELD_DECLs. */ | |
770ae6cc | 567 | |
a150de29 | 568 | DECL_UNSIGNED (decl) = TYPE_UNSIGNED (type); |
4b6bf620 RK |
569 | if (DECL_MODE (decl) == VOIDmode) |
570 | DECL_MODE (decl) = TYPE_MODE (type); | |
770ae6cc | 571 | |
5e9bec99 | 572 | if (DECL_SIZE (decl) == 0) |
06ceef4e | 573 | { |
ad50bc8d RH |
574 | DECL_SIZE (decl) = TYPE_SIZE (type); |
575 | DECL_SIZE_UNIT (decl) = TYPE_SIZE_UNIT (type); | |
06ceef4e | 576 | } |
1a96dc46 | 577 | else if (DECL_SIZE_UNIT (decl) == 0) |
770ae6cc | 578 | DECL_SIZE_UNIT (decl) |
db3927fb AH |
579 | = fold_convert_loc (loc, sizetype, |
580 | size_binop_loc (loc, CEIL_DIV_EXPR, DECL_SIZE (decl), | |
581 | bitsize_unit_node)); | |
06ceef4e | 582 | |
78d55cc8 JM |
583 | if (code != FIELD_DECL) |
584 | /* For non-fields, update the alignment from the type. */ | |
585 | do_type_align (type, decl); | |
586 | else | |
587 | /* For fields, it's a bit more complicated... */ | |
786de7eb | 588 | { |
40aae178 | 589 | bool old_user_align = DECL_USER_ALIGN (decl); |
d1a701eb MM |
590 | bool zero_bitfield = false; |
591 | bool packed_p = DECL_PACKED (decl); | |
592 | unsigned int mfa; | |
40aae178 | 593 | |
78d55cc8 JM |
594 | if (DECL_BIT_FIELD (decl)) |
595 | { | |
596 | DECL_BIT_FIELD_TYPE (decl) = type; | |
7306ed3f | 597 | |
78d55cc8 | 598 | /* A zero-length bit-field affects the alignment of the next |
d1a701eb MM |
599 | field. In essence such bit-fields are not influenced by |
600 | any packing due to #pragma pack or attribute packed. */ | |
78d55cc8 | 601 | if (integer_zerop (DECL_SIZE (decl)) |
5fd9b178 | 602 | && ! targetm.ms_bitfield_layout_p (DECL_FIELD_CONTEXT (decl))) |
78d55cc8 | 603 | { |
d1a701eb MM |
604 | zero_bitfield = true; |
605 | packed_p = false; | |
78d55cc8 JM |
606 | #ifdef PCC_BITFIELD_TYPE_MATTERS |
607 | if (PCC_BITFIELD_TYPE_MATTERS) | |
608 | do_type_align (type, decl); | |
609 | else | |
610 | #endif | |
ad3f5759 | 611 | { |
78d55cc8 | 612 | #ifdef EMPTY_FIELD_BOUNDARY |
ad3f5759 AS |
613 | if (EMPTY_FIELD_BOUNDARY > DECL_ALIGN (decl)) |
614 | { | |
615 | DECL_ALIGN (decl) = EMPTY_FIELD_BOUNDARY; | |
616 | DECL_USER_ALIGN (decl) = 0; | |
617 | } | |
78d55cc8 | 618 | #endif |
ad3f5759 | 619 | } |
78d55cc8 JM |
620 | } |
621 | ||
622 | /* See if we can use an ordinary integer mode for a bit-field. | |
f5c88dbf JZ |
623 | Conditions are: a fixed size that is correct for another mode, |
624 | occupying a complete byte or bytes on proper boundary, | |
36acc1a2 BS |
625 | and not -fstrict-volatile-bitfields. If the latter is set, |
626 | we unfortunately can't check TREE_THIS_VOLATILE, as a cast | |
627 | may make a volatile object later. */ | |
78d55cc8 JM |
628 | if (TYPE_SIZE (type) != 0 |
629 | && TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST | |
f5c88dbf | 630 | && GET_MODE_CLASS (TYPE_MODE (type)) == MODE_INT |
36acc1a2 | 631 | && flag_strict_volatile_bitfields <= 0) |
78d55cc8 JM |
632 | { |
633 | enum machine_mode xmode | |
634 | = mode_for_size_tree (DECL_SIZE (decl), MODE_INT, 1); | |
d4cba6d4 | 635 | unsigned int xalign = GET_MODE_ALIGNMENT (xmode); |
78d55cc8 | 636 | |
f676971a | 637 | if (xmode != BLKmode |
d4cba6d4 EB |
638 | && !(xalign > BITS_PER_UNIT && DECL_PACKED (decl)) |
639 | && (known_align == 0 || known_align >= xalign)) | |
78d55cc8 | 640 | { |
d4cba6d4 | 641 | DECL_ALIGN (decl) = MAX (xalign, DECL_ALIGN (decl)); |
78d55cc8 JM |
642 | DECL_MODE (decl) = xmode; |
643 | DECL_BIT_FIELD (decl) = 0; | |
644 | } | |
645 | } | |
646 | ||
647 | /* Turn off DECL_BIT_FIELD if we won't need it set. */ | |
648 | if (TYPE_MODE (type) == BLKmode && DECL_MODE (decl) == BLKmode | |
649 | && known_align >= TYPE_ALIGN (type) | |
650 | && DECL_ALIGN (decl) >= TYPE_ALIGN (type)) | |
651 | DECL_BIT_FIELD (decl) = 0; | |
652 | } | |
d1a701eb | 653 | else if (packed_p && DECL_USER_ALIGN (decl)) |
78d55cc8 | 654 | /* Don't touch DECL_ALIGN. For other packed fields, go ahead and |
2038bd69 | 655 | round up; we'll reduce it again below. We want packing to |
ba228239 | 656 | supersede USER_ALIGN inherited from the type, but defer to |
2038bd69 | 657 | alignment explicitly specified on the field decl. */; |
78d55cc8 | 658 | else |
40aae178 JM |
659 | do_type_align (type, decl); |
660 | ||
7e4aeb32 JM |
661 | /* If the field is packed and not explicitly aligned, give it the |
662 | minimum alignment. Note that do_type_align may set | |
663 | DECL_USER_ALIGN, so we need to check old_user_align instead. */ | |
d1a701eb | 664 | if (packed_p |
7e4aeb32 | 665 | && !old_user_align) |
40aae178 | 666 | DECL_ALIGN (decl) = MIN (DECL_ALIGN (decl), BITS_PER_UNIT); |
78d55cc8 | 667 | |
d1a701eb | 668 | if (! packed_p && ! DECL_USER_ALIGN (decl)) |
7306ed3f | 669 | { |
78d55cc8 JM |
670 | /* Some targets (i.e. i386, VMS) limit struct field alignment |
671 | to a lower boundary than alignment of variables unless | |
672 | it was overridden by attribute aligned. */ | |
673 | #ifdef BIGGEST_FIELD_ALIGNMENT | |
674 | DECL_ALIGN (decl) | |
675 | = MIN (DECL_ALIGN (decl), (unsigned) BIGGEST_FIELD_ALIGNMENT); | |
676 | #endif | |
677 | #ifdef ADJUST_FIELD_ALIGN | |
678 | DECL_ALIGN (decl) = ADJUST_FIELD_ALIGN (decl, DECL_ALIGN (decl)); | |
679 | #endif | |
7306ed3f | 680 | } |
9ca75f15 | 681 | |
d1a701eb MM |
682 | if (zero_bitfield) |
683 | mfa = initial_max_fld_align * BITS_PER_UNIT; | |
684 | else | |
685 | mfa = maximum_field_alignment; | |
9ca75f15 | 686 | /* Should this be controlled by DECL_USER_ALIGN, too? */ |
d1a701eb MM |
687 | if (mfa != 0) |
688 | DECL_ALIGN (decl) = MIN (DECL_ALIGN (decl), mfa); | |
7306ed3f JW |
689 | } |
690 | ||
691 | /* Evaluate nonconstant size only once, either now or as soon as safe. */ | |
692 | if (DECL_SIZE (decl) != 0 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST) | |
693 | DECL_SIZE (decl) = variable_size (DECL_SIZE (decl)); | |
06ceef4e RK |
694 | if (DECL_SIZE_UNIT (decl) != 0 |
695 | && TREE_CODE (DECL_SIZE_UNIT (decl)) != INTEGER_CST) | |
696 | DECL_SIZE_UNIT (decl) = variable_size (DECL_SIZE_UNIT (decl)); | |
697 | ||
698 | /* If requested, warn about definitions of large data objects. */ | |
699 | if (warn_larger_than | |
17aec3eb | 700 | && (code == VAR_DECL || code == PARM_DECL) |
06ceef4e RK |
701 | && ! DECL_EXTERNAL (decl)) |
702 | { | |
703 | tree size = DECL_SIZE_UNIT (decl); | |
704 | ||
705 | if (size != 0 && TREE_CODE (size) == INTEGER_CST | |
05bccae2 | 706 | && compare_tree_int (size, larger_than_size) > 0) |
06ceef4e | 707 | { |
0384674e | 708 | int size_as_int = TREE_INT_CST_LOW (size); |
06ceef4e | 709 | |
05bccae2 | 710 | if (compare_tree_int (size, size_as_int) == 0) |
5de8299c | 711 | warning (OPT_Wlarger_than_, "size of %q+D is %d bytes", decl, size_as_int); |
06ceef4e | 712 | else |
5de8299c | 713 | warning (OPT_Wlarger_than_, "size of %q+D is larger than %wd bytes", |
dee15844 | 714 | decl, larger_than_size); |
06ceef4e RK |
715 | } |
716 | } | |
a46666a9 RH |
717 | |
718 | /* If the RTL was already set, update its mode and mem attributes. */ | |
719 | if (rtl) | |
720 | { | |
721 | PUT_MODE (rtl, DECL_MODE (decl)); | |
722 | SET_DECL_RTL (decl, 0); | |
723 | set_mem_attributes (rtl, decl, 1); | |
724 | SET_DECL_RTL (decl, rtl); | |
725 | } | |
7306ed3f | 726 | } |
d8472c75 JM |
727 | |
728 | /* Given a VAR_DECL, PARM_DECL or RESULT_DECL, clears the results of | |
729 | a previous call to layout_decl and calls it again. */ | |
730 | ||
731 | void | |
732 | relayout_decl (tree decl) | |
733 | { | |
734 | DECL_SIZE (decl) = DECL_SIZE_UNIT (decl) = 0; | |
735 | DECL_MODE (decl) = VOIDmode; | |
c9eee440 AP |
736 | if (!DECL_USER_ALIGN (decl)) |
737 | DECL_ALIGN (decl) = 0; | |
d8472c75 JM |
738 | SET_DECL_RTL (decl, 0); |
739 | ||
740 | layout_decl (decl, 0); | |
741 | } | |
7306ed3f | 742 | \f |
770ae6cc RK |
743 | /* Begin laying out type T, which may be a RECORD_TYPE, UNION_TYPE, or |
744 | QUAL_UNION_TYPE. Return a pointer to a struct record_layout_info which | |
745 | is to be passed to all other layout functions for this record. It is the | |
786de7eb | 746 | responsibility of the caller to call `free' for the storage returned. |
770ae6cc RK |
747 | Note that garbage collection is not permitted until we finish laying |
748 | out the record. */ | |
7306ed3f | 749 | |
9328904c | 750 | record_layout_info |
46c5ad27 | 751 | start_record_layout (tree t) |
7306ed3f | 752 | { |
1634b18f | 753 | record_layout_info rli = XNEW (struct record_layout_info_s); |
9328904c MM |
754 | |
755 | rli->t = t; | |
770ae6cc | 756 | |
9328904c MM |
757 | /* If the type has a minimum specified alignment (via an attribute |
758 | declaration, for example) use it -- otherwise, start with a | |
759 | one-byte alignment. */ | |
760 | rli->record_align = MAX (BITS_PER_UNIT, TYPE_ALIGN (t)); | |
78d55cc8 | 761 | rli->unpacked_align = rli->record_align; |
770ae6cc | 762 | rli->offset_align = MAX (rli->record_align, BIGGEST_ALIGNMENT); |
7306ed3f | 763 | |
5c19a356 MS |
764 | #ifdef STRUCTURE_SIZE_BOUNDARY |
765 | /* Packed structures don't need to have minimum size. */ | |
f132af85 | 766 | if (! TYPE_PACKED (t)) |
353293e7 PB |
767 | { |
768 | unsigned tmp; | |
769 | ||
770 | /* #pragma pack overrides STRUCTURE_SIZE_BOUNDARY. */ | |
771 | tmp = (unsigned) STRUCTURE_SIZE_BOUNDARY; | |
772 | if (maximum_field_alignment != 0) | |
773 | tmp = MIN (tmp, maximum_field_alignment); | |
774 | rli->record_align = MAX (rli->record_align, tmp); | |
775 | } | |
5c19a356 | 776 | #endif |
7306ed3f | 777 | |
770ae6cc RK |
778 | rli->offset = size_zero_node; |
779 | rli->bitpos = bitsize_zero_node; | |
f913c102 | 780 | rli->prev_field = 0; |
76d971cc | 781 | rli->pending_statics = NULL; |
770ae6cc | 782 | rli->packed_maybe_necessary = 0; |
0ac11108 | 783 | rli->remaining_in_alignment = 0; |
770ae6cc | 784 | |
9328904c MM |
785 | return rli; |
786 | } | |
7306ed3f | 787 | |
f2704b9f RK |
788 | /* These four routines perform computations that convert between |
789 | the offset/bitpos forms and byte and bit offsets. */ | |
790 | ||
791 | tree | |
46c5ad27 | 792 | bit_from_pos (tree offset, tree bitpos) |
f2704b9f RK |
793 | { |
794 | return size_binop (PLUS_EXPR, bitpos, | |
0ac11108 | 795 | size_binop (MULT_EXPR, |
455f19cb | 796 | fold_convert (bitsizetype, offset), |
f2704b9f RK |
797 | bitsize_unit_node)); |
798 | } | |
799 | ||
800 | tree | |
46c5ad27 | 801 | byte_from_pos (tree offset, tree bitpos) |
f2704b9f RK |
802 | { |
803 | return size_binop (PLUS_EXPR, offset, | |
455f19cb MM |
804 | fold_convert (sizetype, |
805 | size_binop (TRUNC_DIV_EXPR, bitpos, | |
806 | bitsize_unit_node))); | |
f2704b9f RK |
807 | } |
808 | ||
f2704b9f | 809 | void |
46c5ad27 AJ |
810 | pos_from_bit (tree *poffset, tree *pbitpos, unsigned int off_align, |
811 | tree pos) | |
f2704b9f RK |
812 | { |
813 | *poffset = size_binop (MULT_EXPR, | |
455f19cb MM |
814 | fold_convert (sizetype, |
815 | size_binop (FLOOR_DIV_EXPR, pos, | |
816 | bitsize_int (off_align))), | |
f2704b9f RK |
817 | size_int (off_align / BITS_PER_UNIT)); |
818 | *pbitpos = size_binop (FLOOR_MOD_EXPR, pos, bitsize_int (off_align)); | |
819 | } | |
820 | ||
821 | /* Given a pointer to bit and byte offsets and an offset alignment, | |
822 | normalize the offsets so they are within the alignment. */ | |
823 | ||
824 | void | |
46c5ad27 | 825 | normalize_offset (tree *poffset, tree *pbitpos, unsigned int off_align) |
f2704b9f RK |
826 | { |
827 | /* If the bit position is now larger than it should be, adjust it | |
828 | downwards. */ | |
829 | if (compare_tree_int (*pbitpos, off_align) >= 0) | |
830 | { | |
831 | tree extra_aligns = size_binop (FLOOR_DIV_EXPR, *pbitpos, | |
832 | bitsize_int (off_align)); | |
833 | ||
834 | *poffset | |
835 | = size_binop (PLUS_EXPR, *poffset, | |
0ac11108 | 836 | size_binop (MULT_EXPR, |
455f19cb | 837 | fold_convert (sizetype, extra_aligns), |
f2704b9f | 838 | size_int (off_align / BITS_PER_UNIT))); |
786de7eb | 839 | |
f2704b9f RK |
840 | *pbitpos |
841 | = size_binop (FLOOR_MOD_EXPR, *pbitpos, bitsize_int (off_align)); | |
842 | } | |
843 | } | |
844 | ||
770ae6cc | 845 | /* Print debugging information about the information in RLI. */ |
cc9d4a85 | 846 | |
24e47c76 | 847 | DEBUG_FUNCTION void |
46c5ad27 | 848 | debug_rli (record_layout_info rli) |
cc9d4a85 | 849 | { |
770ae6cc RK |
850 | print_node_brief (stderr, "type", rli->t, 0); |
851 | print_node_brief (stderr, "\noffset", rli->offset, 0); | |
852 | print_node_brief (stderr, " bitpos", rli->bitpos, 0); | |
cc9d4a85 | 853 | |
78d55cc8 JM |
854 | fprintf (stderr, "\naligns: rec = %u, unpack = %u, off = %u\n", |
855 | rli->record_align, rli->unpacked_align, | |
e0cea8d9 | 856 | rli->offset_align); |
0ac11108 EC |
857 | |
858 | /* The ms_struct code is the only that uses this. */ | |
859 | if (targetm.ms_bitfield_layout_p (rli->t)) | |
0c397f9c | 860 | fprintf (stderr, "remaining in alignment = %u\n", rli->remaining_in_alignment); |
0ac11108 | 861 | |
770ae6cc RK |
862 | if (rli->packed_maybe_necessary) |
863 | fprintf (stderr, "packed may be necessary\n"); | |
864 | ||
76d971cc | 865 | if (!VEC_empty (tree, rli->pending_statics)) |
770ae6cc RK |
866 | { |
867 | fprintf (stderr, "pending statics:\n"); | |
76d971cc | 868 | debug_vec_tree (rli->pending_statics); |
770ae6cc RK |
869 | } |
870 | } | |
871 | ||
872 | /* Given an RLI with a possibly-incremented BITPOS, adjust OFFSET and | |
873 | BITPOS if necessary to keep BITPOS below OFFSET_ALIGN. */ | |
874 | ||
875 | void | |
46c5ad27 | 876 | normalize_rli (record_layout_info rli) |
770ae6cc | 877 | { |
f2704b9f | 878 | normalize_offset (&rli->offset, &rli->bitpos, rli->offset_align); |
770ae6cc | 879 | } |
cc9d4a85 | 880 | |
770ae6cc RK |
881 | /* Returns the size in bytes allocated so far. */ |
882 | ||
883 | tree | |
46c5ad27 | 884 | rli_size_unit_so_far (record_layout_info rli) |
770ae6cc | 885 | { |
f2704b9f | 886 | return byte_from_pos (rli->offset, rli->bitpos); |
770ae6cc RK |
887 | } |
888 | ||
889 | /* Returns the size in bits allocated so far. */ | |
890 | ||
891 | tree | |
46c5ad27 | 892 | rli_size_so_far (record_layout_info rli) |
770ae6cc | 893 | { |
f2704b9f | 894 | return bit_from_pos (rli->offset, rli->bitpos); |
770ae6cc RK |
895 | } |
896 | ||
0645ba8f | 897 | /* FIELD is about to be added to RLI->T. The alignment (in bits) of |
cbbaf4ae R |
898 | the next available location within the record is given by KNOWN_ALIGN. |
899 | Update the variable alignment fields in RLI, and return the alignment | |
900 | to give the FIELD. */ | |
770ae6cc | 901 | |
6de9cd9a | 902 | unsigned int |
46c5ad27 AJ |
903 | update_alignment_for_field (record_layout_info rli, tree field, |
904 | unsigned int known_align) | |
9328904c MM |
905 | { |
906 | /* The alignment required for FIELD. */ | |
907 | unsigned int desired_align; | |
9328904c MM |
908 | /* The type of this field. */ |
909 | tree type = TREE_TYPE (field); | |
0645ba8f MM |
910 | /* True if the field was explicitly aligned by the user. */ |
911 | bool user_align; | |
78d55cc8 | 912 | bool is_bitfield; |
9328904c | 913 | |
9dfb66b9 CD |
914 | /* Do not attempt to align an ERROR_MARK node */ |
915 | if (TREE_CODE (type) == ERROR_MARK) | |
916 | return 0; | |
917 | ||
78d55cc8 JM |
918 | /* Lay out the field so we know what alignment it needs. */ |
919 | layout_decl (field, known_align); | |
770ae6cc | 920 | desired_align = DECL_ALIGN (field); |
11cf4d18 | 921 | user_align = DECL_USER_ALIGN (field); |
770ae6cc | 922 | |
78d55cc8 JM |
923 | is_bitfield = (type != error_mark_node |
924 | && DECL_BIT_FIELD_TYPE (field) | |
925 | && ! integer_zerop (TYPE_SIZE (type))); | |
7306ed3f | 926 | |
9328904c MM |
927 | /* Record must have at least as much alignment as any field. |
928 | Otherwise, the alignment of the field within the record is | |
929 | meaningless. */ | |
0ac11108 | 930 | if (targetm.ms_bitfield_layout_p (rli->t)) |
f913c102 | 931 | { |
e4850f36 DR |
932 | /* Here, the alignment of the underlying type of a bitfield can |
933 | affect the alignment of a record; even a zero-sized field | |
934 | can do this. The alignment should be to the alignment of | |
935 | the type, except that for zero-size bitfields this only | |
0e9e1e0a | 936 | applies if there was an immediately prior, nonzero-size |
e4850f36 | 937 | bitfield. (That's the way it is, experimentally.) */ |
cb312376 | 938 | if ((!is_bitfield && !DECL_PACKED (field)) |
f44157f8 JJ |
939 | || ((DECL_SIZE (field) == NULL_TREE |
940 | || !integer_zerop (DECL_SIZE (field))) | |
0ac11108 EC |
941 | ? !DECL_PACKED (field) |
942 | : (rli->prev_field | |
943 | && DECL_BIT_FIELD_TYPE (rli->prev_field) | |
944 | && ! integer_zerop (DECL_SIZE (rli->prev_field))))) | |
f913c102 | 945 | { |
e4850f36 DR |
946 | unsigned int type_align = TYPE_ALIGN (type); |
947 | type_align = MAX (type_align, desired_align); | |
948 | if (maximum_field_alignment != 0) | |
949 | type_align = MIN (type_align, maximum_field_alignment); | |
950 | rli->record_align = MAX (rli->record_align, type_align); | |
f913c102 AO |
951 | rli->unpacked_align = MAX (rli->unpacked_align, TYPE_ALIGN (type)); |
952 | } | |
786de7eb | 953 | } |
3c12fcc2 | 954 | #ifdef PCC_BITFIELD_TYPE_MATTERS |
78d55cc8 | 955 | else if (is_bitfield && PCC_BITFIELD_TYPE_MATTERS) |
9328904c | 956 | { |
8dc65b6e | 957 | /* Named bit-fields cause the entire structure to have the |
13c1cd82 PB |
958 | alignment implied by their type. Some targets also apply the same |
959 | rules to unnamed bitfields. */ | |
960 | if (DECL_NAME (field) != 0 | |
961 | || targetm.align_anon_bitfield ()) | |
7306ed3f | 962 | { |
9328904c | 963 | unsigned int type_align = TYPE_ALIGN (type); |
729a2125 | 964 | |
ad9335eb JJ |
965 | #ifdef ADJUST_FIELD_ALIGN |
966 | if (! TYPE_USER_ALIGN (type)) | |
967 | type_align = ADJUST_FIELD_ALIGN (field, type_align); | |
968 | #endif | |
969 | ||
d1a701eb MM |
970 | /* Targets might chose to handle unnamed and hence possibly |
971 | zero-width bitfield. Those are not influenced by #pragmas | |
972 | or packed attributes. */ | |
973 | if (integer_zerop (DECL_SIZE (field))) | |
974 | { | |
975 | if (initial_max_fld_align) | |
976 | type_align = MIN (type_align, | |
977 | initial_max_fld_align * BITS_PER_UNIT); | |
978 | } | |
979 | else if (maximum_field_alignment != 0) | |
9328904c MM |
980 | type_align = MIN (type_align, maximum_field_alignment); |
981 | else if (DECL_PACKED (field)) | |
982 | type_align = MIN (type_align, BITS_PER_UNIT); | |
e2301a83 | 983 | |
8dc65b6e MM |
984 | /* The alignment of the record is increased to the maximum |
985 | of the current alignment, the alignment indicated on the | |
986 | field (i.e., the alignment specified by an __aligned__ | |
987 | attribute), and the alignment indicated by the type of | |
988 | the field. */ | |
989 | rli->record_align = MAX (rli->record_align, desired_align); | |
9328904c | 990 | rli->record_align = MAX (rli->record_align, type_align); |
8dc65b6e | 991 | |
3c12fcc2 | 992 | if (warn_packed) |
e0cea8d9 | 993 | rli->unpacked_align = MAX (rli->unpacked_align, TYPE_ALIGN (type)); |
daf06049 | 994 | user_align |= TYPE_USER_ALIGN (type); |
3c12fcc2 | 995 | } |
9328904c | 996 | } |
9328904c | 997 | #endif |
78d55cc8 | 998 | else |
9328904c MM |
999 | { |
1000 | rli->record_align = MAX (rli->record_align, desired_align); | |
770ae6cc | 1001 | rli->unpacked_align = MAX (rli->unpacked_align, TYPE_ALIGN (type)); |
9328904c | 1002 | } |
3c12fcc2 | 1003 | |
0645ba8f MM |
1004 | TYPE_USER_ALIGN (rli->t) |= user_align; |
1005 | ||
1006 | return desired_align; | |
1007 | } | |
1008 | ||
1009 | /* Called from place_field to handle unions. */ | |
1010 | ||
1011 | static void | |
46c5ad27 | 1012 | place_union_field (record_layout_info rli, tree field) |
0645ba8f MM |
1013 | { |
1014 | update_alignment_for_field (rli, field, /*known_align=*/0); | |
1015 | ||
1016 | DECL_FIELD_OFFSET (field) = size_zero_node; | |
1017 | DECL_FIELD_BIT_OFFSET (field) = bitsize_zero_node; | |
1018 | SET_DECL_OFFSET_ALIGN (field, BIGGEST_ALIGNMENT); | |
1019 | ||
0ac11108 | 1020 | /* If this is an ERROR_MARK return *after* having set the |
9dfb66b9 CD |
1021 | field at the start of the union. This helps when parsing |
1022 | invalid fields. */ | |
1023 | if (TREE_CODE (TREE_TYPE (field)) == ERROR_MARK) | |
1024 | return; | |
1025 | ||
0645ba8f MM |
1026 | /* We assume the union's size will be a multiple of a byte so we don't |
1027 | bother with BITPOS. */ | |
1028 | if (TREE_CODE (rli->t) == UNION_TYPE) | |
1029 | rli->offset = size_binop (MAX_EXPR, rli->offset, DECL_SIZE_UNIT (field)); | |
1030 | else if (TREE_CODE (rli->t) == QUAL_UNION_TYPE) | |
985c87c9 | 1031 | rli->offset = fold_build3 (COND_EXPR, sizetype, DECL_QUALIFIER (field), |
4845b383 | 1032 | DECL_SIZE_UNIT (field), rli->offset); |
0645ba8f MM |
1033 | } |
1034 | ||
b8089d8d | 1035 | #if defined (PCC_BITFIELD_TYPE_MATTERS) || defined (BITFIELD_NBYTES_LIMITED) |
4977bab6 | 1036 | /* A bitfield of SIZE with a required access alignment of ALIGN is allocated |
272d0bee | 1037 | at BYTE_OFFSET / BIT_OFFSET. Return nonzero if the field would span more |
4977bab6 ZW |
1038 | units of alignment than the underlying TYPE. */ |
1039 | static int | |
46c5ad27 AJ |
1040 | excess_unit_span (HOST_WIDE_INT byte_offset, HOST_WIDE_INT bit_offset, |
1041 | HOST_WIDE_INT size, HOST_WIDE_INT align, tree type) | |
4977bab6 ZW |
1042 | { |
1043 | /* Note that the calculation of OFFSET might overflow; we calculate it so | |
1044 | that we still get the right result as long as ALIGN is a power of two. */ | |
1045 | unsigned HOST_WIDE_INT offset = byte_offset * BITS_PER_UNIT + bit_offset; | |
1046 | ||
1047 | offset = offset % align; | |
1048 | return ((offset + size + align - 1) / align | |
1049 | > ((unsigned HOST_WIDE_INT) tree_low_cst (TYPE_SIZE (type), 1) | |
1050 | / align)); | |
1051 | } | |
b8089d8d | 1052 | #endif |
4977bab6 | 1053 | |
0645ba8f MM |
1054 | /* RLI contains information about the layout of a RECORD_TYPE. FIELD |
1055 | is a FIELD_DECL to be added after those fields already present in | |
1056 | T. (FIELD is not actually added to the TYPE_FIELDS list here; | |
1057 | callers that desire that behavior must manually perform that step.) */ | |
1058 | ||
1059 | void | |
46c5ad27 | 1060 | place_field (record_layout_info rli, tree field) |
0645ba8f MM |
1061 | { |
1062 | /* The alignment required for FIELD. */ | |
1063 | unsigned int desired_align; | |
1064 | /* The alignment FIELD would have if we just dropped it into the | |
1065 | record as it presently stands. */ | |
1066 | unsigned int known_align; | |
1067 | unsigned int actual_align; | |
1068 | /* The type of this field. */ | |
1069 | tree type = TREE_TYPE (field); | |
1070 | ||
dbe91deb | 1071 | gcc_assert (TREE_CODE (field) != ERROR_MARK); |
0645ba8f MM |
1072 | |
1073 | /* If FIELD is static, then treat it like a separate variable, not | |
1074 | really like a structure field. If it is a FUNCTION_DECL, it's a | |
1075 | method. In both cases, all we do is lay out the decl, and we do | |
1076 | it *after* the record is laid out. */ | |
1077 | if (TREE_CODE (field) == VAR_DECL) | |
1078 | { | |
76d971cc | 1079 | VEC_safe_push (tree, gc, rli->pending_statics, field); |
0645ba8f MM |
1080 | return; |
1081 | } | |
1082 | ||
1083 | /* Enumerators and enum types which are local to this class need not | |
1084 | be laid out. Likewise for initialized constant fields. */ | |
1085 | else if (TREE_CODE (field) != FIELD_DECL) | |
1086 | return; | |
1087 | ||
1088 | /* Unions are laid out very differently than records, so split | |
1089 | that code off to another function. */ | |
1090 | else if (TREE_CODE (rli->t) != RECORD_TYPE) | |
1091 | { | |
1092 | place_union_field (rli, field); | |
1093 | return; | |
1094 | } | |
1095 | ||
0ac11108 | 1096 | else if (TREE_CODE (type) == ERROR_MARK) |
9dfb66b9 CD |
1097 | { |
1098 | /* Place this field at the current allocation position, so we | |
1099 | maintain monotonicity. */ | |
1100 | DECL_FIELD_OFFSET (field) = rli->offset; | |
1101 | DECL_FIELD_BIT_OFFSET (field) = rli->bitpos; | |
1102 | SET_DECL_OFFSET_ALIGN (field, rli->offset_align); | |
1103 | return; | |
1104 | } | |
1105 | ||
0645ba8f MM |
1106 | /* Work out the known alignment so far. Note that A & (-A) is the |
1107 | value of the least-significant bit in A that is one. */ | |
1108 | if (! integer_zerop (rli->bitpos)) | |
1109 | known_align = (tree_low_cst (rli->bitpos, 1) | |
1110 | & - tree_low_cst (rli->bitpos, 1)); | |
1111 | else if (integer_zerop (rli->offset)) | |
cbbaf4ae | 1112 | known_align = 0; |
0645ba8f MM |
1113 | else if (host_integerp (rli->offset, 1)) |
1114 | known_align = (BITS_PER_UNIT | |
1115 | * (tree_low_cst (rli->offset, 1) | |
1116 | & - tree_low_cst (rli->offset, 1))); | |
1117 | else | |
1118 | known_align = rli->offset_align; | |
46c5ad27 | 1119 | |
0645ba8f | 1120 | desired_align = update_alignment_for_field (rli, field, known_align); |
cbbaf4ae R |
1121 | if (known_align == 0) |
1122 | known_align = MAX (BIGGEST_ALIGNMENT, rli->record_align); | |
0645ba8f | 1123 | |
9328904c MM |
1124 | if (warn_packed && DECL_PACKED (field)) |
1125 | { | |
78d55cc8 | 1126 | if (known_align >= TYPE_ALIGN (type)) |
3c12fcc2 | 1127 | { |
9328904c | 1128 | if (TYPE_ALIGN (type) > desired_align) |
3c12fcc2 | 1129 | { |
9328904c | 1130 | if (STRICT_ALIGNMENT) |
dee15844 JM |
1131 | warning (OPT_Wattributes, "packed attribute causes " |
1132 | "inefficient alignment for %q+D", field); | |
a3b20e90 JM |
1133 | /* Don't warn if DECL_PACKED was set by the type. */ |
1134 | else if (!TYPE_PACKED (rli->t)) | |
dee15844 JM |
1135 | warning (OPT_Wattributes, "packed attribute is " |
1136 | "unnecessary for %q+D", field); | |
3c12fcc2 | 1137 | } |
3c12fcc2 | 1138 | } |
9328904c MM |
1139 | else |
1140 | rli->packed_maybe_necessary = 1; | |
1141 | } | |
7306ed3f | 1142 | |
9328904c | 1143 | /* Does this field automatically have alignment it needs by virtue |
9954e17f KT |
1144 | of the fields that precede it and the record's own alignment? */ |
1145 | if (known_align < desired_align) | |
9328904c MM |
1146 | { |
1147 | /* No, we need to skip space before this field. | |
1148 | Bump the cumulative size to multiple of field alignment. */ | |
7306ed3f | 1149 | |
9954e17f KT |
1150 | if (!targetm.ms_bitfield_layout_p (rli->t) |
1151 | && DECL_SOURCE_LOCATION (field) != BUILTINS_LOCATION) | |
4c0a0455 | 1152 | warning (OPT_Wpadded, "padding struct to align %q+D", field); |
3c12fcc2 | 1153 | |
770ae6cc RK |
1154 | /* If the alignment is still within offset_align, just align |
1155 | the bit position. */ | |
1156 | if (desired_align < rli->offset_align) | |
1157 | rli->bitpos = round_up (rli->bitpos, desired_align); | |
9328904c MM |
1158 | else |
1159 | { | |
770ae6cc RK |
1160 | /* First adjust OFFSET by the partial bits, then align. */ |
1161 | rli->offset | |
1162 | = size_binop (PLUS_EXPR, rli->offset, | |
455f19cb MM |
1163 | fold_convert (sizetype, |
1164 | size_binop (CEIL_DIV_EXPR, rli->bitpos, | |
1165 | bitsize_unit_node))); | |
770ae6cc RK |
1166 | rli->bitpos = bitsize_zero_node; |
1167 | ||
1168 | rli->offset = round_up (rli->offset, desired_align / BITS_PER_UNIT); | |
7306ed3f | 1169 | } |
770ae6cc | 1170 | |
b1254b72 RK |
1171 | if (! TREE_CONSTANT (rli->offset)) |
1172 | rli->offset_align = desired_align; | |
9954e17f KT |
1173 | if (targetm.ms_bitfield_layout_p (rli->t)) |
1174 | rli->prev_field = NULL; | |
9328904c | 1175 | } |
7306ed3f | 1176 | |
770ae6cc RK |
1177 | /* Handle compatibility with PCC. Note that if the record has any |
1178 | variable-sized fields, we need not worry about compatibility. */ | |
7306ed3f | 1179 | #ifdef PCC_BITFIELD_TYPE_MATTERS |
9328904c | 1180 | if (PCC_BITFIELD_TYPE_MATTERS |
245f1bfa | 1181 | && ! targetm.ms_bitfield_layout_p (rli->t) |
9328904c MM |
1182 | && TREE_CODE (field) == FIELD_DECL |
1183 | && type != error_mark_node | |
770ae6cc | 1184 | && DECL_BIT_FIELD (field) |
2cd36c22 AN |
1185 | && (! DECL_PACKED (field) |
1186 | /* Enter for these packed fields only to issue a warning. */ | |
1187 | || TYPE_ALIGN (type) <= BITS_PER_UNIT) | |
9328904c | 1188 | && maximum_field_alignment == 0 |
770ae6cc RK |
1189 | && ! integer_zerop (DECL_SIZE (field)) |
1190 | && host_integerp (DECL_SIZE (field), 1) | |
1191 | && host_integerp (rli->offset, 1) | |
1192 | && host_integerp (TYPE_SIZE (type), 1)) | |
9328904c MM |
1193 | { |
1194 | unsigned int type_align = TYPE_ALIGN (type); | |
770ae6cc RK |
1195 | tree dsize = DECL_SIZE (field); |
1196 | HOST_WIDE_INT field_size = tree_low_cst (dsize, 1); | |
1197 | HOST_WIDE_INT offset = tree_low_cst (rli->offset, 0); | |
1198 | HOST_WIDE_INT bit_offset = tree_low_cst (rli->bitpos, 0); | |
9328904c | 1199 | |
ad9335eb JJ |
1200 | #ifdef ADJUST_FIELD_ALIGN |
1201 | if (! TYPE_USER_ALIGN (type)) | |
1202 | type_align = ADJUST_FIELD_ALIGN (field, type_align); | |
1203 | #endif | |
1204 | ||
9328904c MM |
1205 | /* A bit field may not span more units of alignment of its type |
1206 | than its type itself. Advance to next boundary if necessary. */ | |
4977bab6 | 1207 | if (excess_unit_span (offset, bit_offset, field_size, type_align, type)) |
2cd36c22 AN |
1208 | { |
1209 | if (DECL_PACKED (field)) | |
1210 | { | |
bee6fa6d | 1211 | if (warn_packed_bitfield_compat == 1) |
2cd36c22 AN |
1212 | inform |
1213 | (input_location, | |
d8a07487 | 1214 | "offset of packed bit-field %qD has changed in GCC 4.4", |
2cd36c22 AN |
1215 | field); |
1216 | } | |
1217 | else | |
985c87c9 | 1218 | rli->bitpos = round_up (rli->bitpos, type_align); |
2cd36c22 | 1219 | } |
daf06049 | 1220 | |
2cd36c22 AN |
1221 | if (! DECL_PACKED (field)) |
1222 | TYPE_USER_ALIGN (rli->t) |= TYPE_USER_ALIGN (type); | |
9328904c | 1223 | } |
7306ed3f JW |
1224 | #endif |
1225 | ||
7306ed3f | 1226 | #ifdef BITFIELD_NBYTES_LIMITED |
9328904c | 1227 | if (BITFIELD_NBYTES_LIMITED |
245f1bfa | 1228 | && ! targetm.ms_bitfield_layout_p (rli->t) |
9328904c MM |
1229 | && TREE_CODE (field) == FIELD_DECL |
1230 | && type != error_mark_node | |
1231 | && DECL_BIT_FIELD_TYPE (field) | |
770ae6cc RK |
1232 | && ! DECL_PACKED (field) |
1233 | && ! integer_zerop (DECL_SIZE (field)) | |
1234 | && host_integerp (DECL_SIZE (field), 1) | |
163d3408 | 1235 | && host_integerp (rli->offset, 1) |
770ae6cc | 1236 | && host_integerp (TYPE_SIZE (type), 1)) |
9328904c MM |
1237 | { |
1238 | unsigned int type_align = TYPE_ALIGN (type); | |
770ae6cc RK |
1239 | tree dsize = DECL_SIZE (field); |
1240 | HOST_WIDE_INT field_size = tree_low_cst (dsize, 1); | |
1241 | HOST_WIDE_INT offset = tree_low_cst (rli->offset, 0); | |
1242 | HOST_WIDE_INT bit_offset = tree_low_cst (rli->bitpos, 0); | |
e2301a83 | 1243 | |
ad9335eb JJ |
1244 | #ifdef ADJUST_FIELD_ALIGN |
1245 | if (! TYPE_USER_ALIGN (type)) | |
1246 | type_align = ADJUST_FIELD_ALIGN (field, type_align); | |
1247 | #endif | |
1248 | ||
9328904c MM |
1249 | if (maximum_field_alignment != 0) |
1250 | type_align = MIN (type_align, maximum_field_alignment); | |
1251 | /* ??? This test is opposite the test in the containing if | |
1252 | statement, so this code is unreachable currently. */ | |
1253 | else if (DECL_PACKED (field)) | |
1254 | type_align = MIN (type_align, BITS_PER_UNIT); | |
1255 | ||
1256 | /* A bit field may not span the unit of alignment of its type. | |
1257 | Advance to next boundary if necessary. */ | |
4977bab6 | 1258 | if (excess_unit_span (offset, bit_offset, field_size, type_align, type)) |
770ae6cc | 1259 | rli->bitpos = round_up (rli->bitpos, type_align); |
daf06049 | 1260 | |
0645ba8f | 1261 | TYPE_USER_ALIGN (rli->t) |= TYPE_USER_ALIGN (type); |
9328904c | 1262 | } |
7306ed3f JW |
1263 | #endif |
1264 | ||
e4850f36 DR |
1265 | /* See the docs for TARGET_MS_BITFIELD_LAYOUT_P for details. |
1266 | A subtlety: | |
1267 | When a bit field is inserted into a packed record, the whole | |
1268 | size of the underlying type is used by one or more same-size | |
4977bab6 | 1269 | adjacent bitfields. (That is, if its long:3, 32 bits is |
e4850f36 DR |
1270 | used in the record, and any additional adjacent long bitfields are |
1271 | packed into the same chunk of 32 bits. However, if the size | |
1272 | changes, a new field of that size is allocated.) In an unpacked | |
14b493d6 | 1273 | record, this is the same as using alignment, but not equivalent |
4977bab6 | 1274 | when packing. |
e4850f36 | 1275 | |
14b493d6 | 1276 | Note: for compatibility, we use the type size, not the type alignment |
e4850f36 DR |
1277 | to determine alignment, since that matches the documentation */ |
1278 | ||
0ac11108 | 1279 | if (targetm.ms_bitfield_layout_p (rli->t)) |
f913c102 | 1280 | { |
e4850f36 | 1281 | tree prev_saved = rli->prev_field; |
72aeff7c | 1282 | tree prev_type = prev_saved ? DECL_BIT_FIELD_TYPE (prev_saved) : NULL; |
f913c102 | 1283 | |
0ac11108 EC |
1284 | /* This is a bitfield if it exists. */ |
1285 | if (rli->prev_field) | |
e4850f36 DR |
1286 | { |
1287 | /* If both are bitfields, nonzero, and the same size, this is | |
1288 | the middle of a run. Zero declared size fields are special | |
1289 | and handled as "end of run". (Note: it's nonzero declared | |
1290 | size, but equal type sizes!) (Since we know that both | |
1291 | the current and previous fields are bitfields by the | |
1292 | time we check it, DECL_SIZE must be present for both.) */ | |
1293 | if (DECL_BIT_FIELD_TYPE (field) | |
1294 | && !integer_zerop (DECL_SIZE (field)) | |
1295 | && !integer_zerop (DECL_SIZE (rli->prev_field)) | |
0384674e RK |
1296 | && host_integerp (DECL_SIZE (rli->prev_field), 0) |
1297 | && host_integerp (TYPE_SIZE (type), 0) | |
72aeff7c | 1298 | && simple_cst_equal (TYPE_SIZE (type), TYPE_SIZE (prev_type))) |
e4850f36 DR |
1299 | { |
1300 | /* We're in the middle of a run of equal type size fields; make | |
1301 | sure we realign if we run out of bits. (Not decl size, | |
1302 | type size!) */ | |
0ac11108 | 1303 | HOST_WIDE_INT bitsize = tree_low_cst (DECL_SIZE (field), 1); |
e4850f36 DR |
1304 | |
1305 | if (rli->remaining_in_alignment < bitsize) | |
1306 | { | |
72aeff7c KK |
1307 | HOST_WIDE_INT typesize = tree_low_cst (TYPE_SIZE (type), 1); |
1308 | ||
0ac11108 | 1309 | /* out of bits; bump up to next 'word'. */ |
0ac11108 | 1310 | rli->bitpos |
72aeff7c KK |
1311 | = size_binop (PLUS_EXPR, rli->bitpos, |
1312 | bitsize_int (rli->remaining_in_alignment)); | |
0ac11108 | 1313 | rli->prev_field = field; |
72aeff7c KK |
1314 | if (typesize < bitsize) |
1315 | rli->remaining_in_alignment = 0; | |
1316 | else | |
1317 | rli->remaining_in_alignment = typesize - bitsize; | |
e4850f36 | 1318 | } |
72aeff7c KK |
1319 | else |
1320 | rli->remaining_in_alignment -= bitsize; | |
e4850f36 DR |
1321 | } |
1322 | else | |
1323 | { | |
4977bab6 ZW |
1324 | /* End of a run: if leaving a run of bitfields of the same type |
1325 | size, we have to "use up" the rest of the bits of the type | |
e4850f36 DR |
1326 | size. |
1327 | ||
1328 | Compute the new position as the sum of the size for the prior | |
1329 | type and where we first started working on that type. | |
1330 | Note: since the beginning of the field was aligned then | |
1331 | of course the end will be too. No round needed. */ | |
1332 | ||
f7eb0dcd | 1333 | if (!integer_zerop (DECL_SIZE (rli->prev_field))) |
e4850f36 | 1334 | { |
0ac11108 EC |
1335 | rli->bitpos |
1336 | = size_binop (PLUS_EXPR, rli->bitpos, | |
1337 | bitsize_int (rli->remaining_in_alignment)); | |
e4850f36 DR |
1338 | } |
1339 | else | |
0384674e RK |
1340 | /* We "use up" size zero fields; the code below should behave |
1341 | as if the prior field was not a bitfield. */ | |
1342 | prev_saved = NULL; | |
e4850f36 | 1343 | |
4977bab6 | 1344 | /* Cause a new bitfield to be captured, either this time (if |
991b6592 | 1345 | currently a bitfield) or next time we see one. */ |
e4850f36 | 1346 | if (!DECL_BIT_FIELD_TYPE(field) |
f7eb0dcd | 1347 | || integer_zerop (DECL_SIZE (field))) |
0384674e | 1348 | rli->prev_field = NULL; |
e4850f36 | 1349 | } |
0384674e | 1350 | |
e4850f36 DR |
1351 | normalize_rli (rli); |
1352 | } | |
1353 | ||
1354 | /* If we're starting a new run of same size type bitfields | |
1355 | (or a run of non-bitfields), set up the "first of the run" | |
4977bab6 | 1356 | fields. |
e4850f36 DR |
1357 | |
1358 | That is, if the current field is not a bitfield, or if there | |
1359 | was a prior bitfield the type sizes differ, or if there wasn't | |
1360 | a prior bitfield the size of the current field is nonzero. | |
1361 | ||
1362 | Note: we must be sure to test ONLY the type size if there was | |
1363 | a prior bitfield and ONLY for the current field being zero if | |
1364 | there wasn't. */ | |
1365 | ||
1366 | if (!DECL_BIT_FIELD_TYPE (field) | |
f7eb0dcd | 1367 | || (prev_saved != NULL |
72aeff7c | 1368 | ? !simple_cst_equal (TYPE_SIZE (type), TYPE_SIZE (prev_type)) |
0384674e | 1369 | : !integer_zerop (DECL_SIZE (field)) )) |
e4850f36 | 1370 | { |
0384674e RK |
1371 | /* Never smaller than a byte for compatibility. */ |
1372 | unsigned int type_align = BITS_PER_UNIT; | |
e4850f36 | 1373 | |
4977bab6 | 1374 | /* (When not a bitfield), we could be seeing a flex array (with |
e4850f36 | 1375 | no DECL_SIZE). Since we won't be using remaining_in_alignment |
4977bab6 | 1376 | until we see a bitfield (and come by here again) we just skip |
e4850f36 | 1377 | calculating it. */ |
0384674e | 1378 | if (DECL_SIZE (field) != NULL |
fb6807b8 UB |
1379 | && host_integerp (TYPE_SIZE (TREE_TYPE (field)), 1) |
1380 | && host_integerp (DECL_SIZE (field), 1)) | |
72aeff7c | 1381 | { |
fb6807b8 UB |
1382 | unsigned HOST_WIDE_INT bitsize |
1383 | = tree_low_cst (DECL_SIZE (field), 1); | |
1384 | unsigned HOST_WIDE_INT typesize | |
72aeff7c KK |
1385 | = tree_low_cst (TYPE_SIZE (TREE_TYPE (field)), 1); |
1386 | ||
1387 | if (typesize < bitsize) | |
1388 | rli->remaining_in_alignment = 0; | |
1389 | else | |
1390 | rli->remaining_in_alignment = typesize - bitsize; | |
1391 | } | |
e4850f36 | 1392 | |
991b6592 | 1393 | /* Now align (conventionally) for the new type. */ |
0ac11108 | 1394 | type_align = TYPE_ALIGN (TREE_TYPE (field)); |
f913c102 | 1395 | |
e4850f36 DR |
1396 | if (maximum_field_alignment != 0) |
1397 | type_align = MIN (type_align, maximum_field_alignment); | |
f913c102 | 1398 | |
985c87c9 | 1399 | rli->bitpos = round_up (rli->bitpos, type_align); |
0384674e | 1400 | |
e4850f36 | 1401 | /* If we really aligned, don't allow subsequent bitfields |
991b6592 | 1402 | to undo that. */ |
e4850f36 DR |
1403 | rli->prev_field = NULL; |
1404 | } | |
f913c102 AO |
1405 | } |
1406 | ||
770ae6cc RK |
1407 | /* Offset so far becomes the position of this field after normalizing. */ |
1408 | normalize_rli (rli); | |
1409 | DECL_FIELD_OFFSET (field) = rli->offset; | |
1410 | DECL_FIELD_BIT_OFFSET (field) = rli->bitpos; | |
2f5c7f45 | 1411 | SET_DECL_OFFSET_ALIGN (field, rli->offset_align); |
770ae6cc RK |
1412 | |
1413 | /* If this field ended up more aligned than we thought it would be (we | |
1414 | approximate this by seeing if its position changed), lay out the field | |
1415 | again; perhaps we can use an integral mode for it now. */ | |
4b6bf620 | 1416 | if (! integer_zerop (DECL_FIELD_BIT_OFFSET (field))) |
770ae6cc RK |
1417 | actual_align = (tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 1) |
1418 | & - tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 1)); | |
4b6bf620 | 1419 | else if (integer_zerop (DECL_FIELD_OFFSET (field))) |
cbbaf4ae | 1420 | actual_align = MAX (BIGGEST_ALIGNMENT, rli->record_align); |
770ae6cc RK |
1421 | else if (host_integerp (DECL_FIELD_OFFSET (field), 1)) |
1422 | actual_align = (BITS_PER_UNIT | |
1423 | * (tree_low_cst (DECL_FIELD_OFFSET (field), 1) | |
1424 | & - tree_low_cst (DECL_FIELD_OFFSET (field), 1))); | |
9328904c | 1425 | else |
770ae6cc | 1426 | actual_align = DECL_OFFSET_ALIGN (field); |
cbbaf4ae R |
1427 | /* ACTUAL_ALIGN is still the actual alignment *within the record* . |
1428 | store / extract bit field operations will check the alignment of the | |
1429 | record against the mode of bit fields. */ | |
770ae6cc RK |
1430 | |
1431 | if (known_align != actual_align) | |
1432 | layout_decl (field, actual_align); | |
1433 | ||
0ac11108 EC |
1434 | if (rli->prev_field == NULL && DECL_BIT_FIELD_TYPE (field)) |
1435 | rli->prev_field = field; | |
f913c102 | 1436 | |
770ae6cc RK |
1437 | /* Now add size of this field to the size of the record. If the size is |
1438 | not constant, treat the field as being a multiple of bytes and just | |
1439 | adjust the offset, resetting the bit position. Otherwise, apportion the | |
1440 | size amongst the bit position and offset. First handle the case of an | |
1441 | unspecified size, which can happen when we have an invalid nested struct | |
1442 | definition, such as struct j { struct j { int i; } }. The error message | |
1443 | is printed in finish_struct. */ | |
1444 | if (DECL_SIZE (field) == 0) | |
1445 | /* Do nothing. */; | |
292f30c5 | 1446 | else if (TREE_CODE (DECL_SIZE (field)) != INTEGER_CST |
455f14dd | 1447 | || TREE_OVERFLOW (DECL_SIZE (field))) |
9328904c | 1448 | { |
770ae6cc RK |
1449 | rli->offset |
1450 | = size_binop (PLUS_EXPR, rli->offset, | |
455f19cb MM |
1451 | fold_convert (sizetype, |
1452 | size_binop (CEIL_DIV_EXPR, rli->bitpos, | |
1453 | bitsize_unit_node))); | |
770ae6cc RK |
1454 | rli->offset |
1455 | = size_binop (PLUS_EXPR, rli->offset, DECL_SIZE_UNIT (field)); | |
1456 | rli->bitpos = bitsize_zero_node; | |
3923e410 | 1457 | rli->offset_align = MIN (rli->offset_align, desired_align); |
9328904c | 1458 | } |
0ac11108 EC |
1459 | else if (targetm.ms_bitfield_layout_p (rli->t)) |
1460 | { | |
1461 | rli->bitpos = size_binop (PLUS_EXPR, rli->bitpos, DECL_SIZE (field)); | |
1462 | ||
1463 | /* If we ended a bitfield before the full length of the type then | |
1464 | pad the struct out to the full length of the last type. */ | |
910ad8de NF |
1465 | if ((DECL_CHAIN (field) == NULL |
1466 | || TREE_CODE (DECL_CHAIN (field)) != FIELD_DECL) | |
0ac11108 EC |
1467 | && DECL_BIT_FIELD_TYPE (field) |
1468 | && !integer_zerop (DECL_SIZE (field))) | |
1469 | rli->bitpos = size_binop (PLUS_EXPR, rli->bitpos, | |
1470 | bitsize_int (rli->remaining_in_alignment)); | |
1471 | ||
1472 | normalize_rli (rli); | |
1473 | } | |
9328904c MM |
1474 | else |
1475 | { | |
770ae6cc RK |
1476 | rli->bitpos = size_binop (PLUS_EXPR, rli->bitpos, DECL_SIZE (field)); |
1477 | normalize_rli (rli); | |
7306ed3f | 1478 | } |
9328904c | 1479 | } |
7306ed3f | 1480 | |
9328904c MM |
1481 | /* Assuming that all the fields have been laid out, this function uses |
1482 | RLI to compute the final TYPE_SIZE, TYPE_ALIGN, etc. for the type | |
14b493d6 | 1483 | indicated by RLI. */ |
7306ed3f | 1484 | |
9328904c | 1485 | static void |
46c5ad27 | 1486 | finalize_record_size (record_layout_info rli) |
9328904c | 1487 | { |
770ae6cc RK |
1488 | tree unpadded_size, unpadded_size_unit; |
1489 | ||
65e14bf5 RK |
1490 | /* Now we want just byte and bit offsets, so set the offset alignment |
1491 | to be a byte and then normalize. */ | |
1492 | rli->offset_align = BITS_PER_UNIT; | |
1493 | normalize_rli (rli); | |
7306ed3f JW |
1494 | |
1495 | /* Determine the desired alignment. */ | |
1496 | #ifdef ROUND_TYPE_ALIGN | |
9328904c | 1497 | TYPE_ALIGN (rli->t) = ROUND_TYPE_ALIGN (rli->t, TYPE_ALIGN (rli->t), |
b451555a | 1498 | rli->record_align); |
7306ed3f | 1499 | #else |
9328904c | 1500 | TYPE_ALIGN (rli->t) = MAX (TYPE_ALIGN (rli->t), rli->record_align); |
7306ed3f JW |
1501 | #endif |
1502 | ||
65e14bf5 RK |
1503 | /* Compute the size so far. Be sure to allow for extra bits in the |
1504 | size in bytes. We have guaranteed above that it will be no more | |
1505 | than a single byte. */ | |
1506 | unpadded_size = rli_size_so_far (rli); | |
1507 | unpadded_size_unit = rli_size_unit_so_far (rli); | |
1508 | if (! integer_zerop (rli->bitpos)) | |
1509 | unpadded_size_unit | |
1510 | = size_binop (PLUS_EXPR, unpadded_size_unit, size_one_node); | |
770ae6cc | 1511 | |
f9da5064 | 1512 | /* Round the size up to be a multiple of the required alignment. */ |
985c87c9 | 1513 | TYPE_SIZE (rli->t) = round_up (unpadded_size, TYPE_ALIGN (rli->t)); |
a4e9ffe5 | 1514 | TYPE_SIZE_UNIT (rli->t) |
985c87c9 | 1515 | = round_up (unpadded_size_unit, TYPE_ALIGN_UNIT (rli->t)); |
729a2125 | 1516 | |
3176a0c2 | 1517 | if (TREE_CONSTANT (unpadded_size) |
4c0a0455 JJ |
1518 | && simple_cst_equal (unpadded_size, TYPE_SIZE (rli->t)) == 0 |
1519 | && input_location != BUILTINS_LOCATION) | |
3176a0c2 | 1520 | warning (OPT_Wpadded, "padding struct size to alignment boundary"); |
786de7eb | 1521 | |
770ae6cc RK |
1522 | if (warn_packed && TREE_CODE (rli->t) == RECORD_TYPE |
1523 | && TYPE_PACKED (rli->t) && ! rli->packed_maybe_necessary | |
1524 | && TREE_CONSTANT (unpadded_size)) | |
3c12fcc2 GM |
1525 | { |
1526 | tree unpacked_size; | |
729a2125 | 1527 | |
3c12fcc2 | 1528 | #ifdef ROUND_TYPE_ALIGN |
9328904c MM |
1529 | rli->unpacked_align |
1530 | = ROUND_TYPE_ALIGN (rli->t, TYPE_ALIGN (rli->t), rli->unpacked_align); | |
3c12fcc2 | 1531 | #else |
9328904c | 1532 | rli->unpacked_align = MAX (TYPE_ALIGN (rli->t), rli->unpacked_align); |
3c12fcc2 | 1533 | #endif |
770ae6cc | 1534 | |
985c87c9 | 1535 | unpacked_size = round_up (TYPE_SIZE (rli->t), rli->unpacked_align); |
9328904c | 1536 | if (simple_cst_equal (unpacked_size, TYPE_SIZE (rli->t))) |
3c12fcc2 | 1537 | { |
9328904c | 1538 | if (TYPE_NAME (rli->t)) |
3c12fcc2 | 1539 | { |
4f1e4960 | 1540 | tree name; |
729a2125 | 1541 | |
9328904c | 1542 | if (TREE_CODE (TYPE_NAME (rli->t)) == IDENTIFIER_NODE) |
4f1e4960 | 1543 | name = TYPE_NAME (rli->t); |
3c12fcc2 | 1544 | else |
4f1e4960 | 1545 | name = DECL_NAME (TYPE_NAME (rli->t)); |
770ae6cc | 1546 | |
3c12fcc2 | 1547 | if (STRICT_ALIGNMENT) |
3176a0c2 | 1548 | warning (OPT_Wpacked, "packed attribute causes inefficient " |
4f1e4960 | 1549 | "alignment for %qE", name); |
3c12fcc2 | 1550 | else |
3176a0c2 | 1551 | warning (OPT_Wpacked, |
4f1e4960 | 1552 | "packed attribute is unnecessary for %qE", name); |
3c12fcc2 GM |
1553 | } |
1554 | else | |
1555 | { | |
1556 | if (STRICT_ALIGNMENT) | |
3176a0c2 | 1557 | warning (OPT_Wpacked, |
5c498b10 | 1558 | "packed attribute causes inefficient alignment"); |
3c12fcc2 | 1559 | else |
3176a0c2 | 1560 | warning (OPT_Wpacked, "packed attribute is unnecessary"); |
3c12fcc2 GM |
1561 | } |
1562 | } | |
3c12fcc2 | 1563 | } |
9328904c MM |
1564 | } |
1565 | ||
1566 | /* Compute the TYPE_MODE for the TYPE (which is a RECORD_TYPE). */ | |
7306ed3f | 1567 | |
65e14bf5 | 1568 | void |
46c5ad27 | 1569 | compute_record_mode (tree type) |
9328904c | 1570 | { |
770ae6cc RK |
1571 | tree field; |
1572 | enum machine_mode mode = VOIDmode; | |
1573 | ||
9328904c MM |
1574 | /* Most RECORD_TYPEs have BLKmode, so we start off assuming that. |
1575 | However, if possible, we use a mode that fits in a register | |
1576 | instead, in order to allow for better optimization down the | |
1577 | line. */ | |
179d2f74 | 1578 | SET_TYPE_MODE (type, BLKmode); |
9328904c | 1579 | |
770ae6cc RK |
1580 | if (! host_integerp (TYPE_SIZE (type), 1)) |
1581 | return; | |
9328904c | 1582 | |
770ae6cc RK |
1583 | /* A record which has any BLKmode members must itself be |
1584 | BLKmode; it can't go in a register. Unless the member is | |
1585 | BLKmode only because it isn't aligned. */ | |
910ad8de | 1586 | for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field)) |
770ae6cc | 1587 | { |
770ae6cc RK |
1588 | if (TREE_CODE (field) != FIELD_DECL) |
1589 | continue; | |
9328904c | 1590 | |
770ae6cc RK |
1591 | if (TREE_CODE (TREE_TYPE (field)) == ERROR_MARK |
1592 | || (TYPE_MODE (TREE_TYPE (field)) == BLKmode | |
7a06d606 RK |
1593 | && ! TYPE_NO_FORCE_BLK (TREE_TYPE (field)) |
1594 | && !(TYPE_SIZE (TREE_TYPE (field)) != 0 | |
1595 | && integer_zerop (TYPE_SIZE (TREE_TYPE (field))))) | |
770ae6cc | 1596 | || ! host_integerp (bit_position (field), 1) |
6a9f6727 | 1597 | || DECL_SIZE (field) == 0 |
770ae6cc RK |
1598 | || ! host_integerp (DECL_SIZE (field), 1)) |
1599 | return; | |
1600 | ||
770ae6cc RK |
1601 | /* If this field is the whole struct, remember its mode so |
1602 | that, say, we can put a double in a class into a DF | |
a8ca7756 JW |
1603 | register instead of forcing it to live in the stack. */ |
1604 | if (simple_cst_equal (TYPE_SIZE (type), DECL_SIZE (field))) | |
770ae6cc | 1605 | mode = DECL_MODE (field); |
9328904c | 1606 | |
31a02448 | 1607 | #ifdef MEMBER_TYPE_FORCES_BLK |
770ae6cc RK |
1608 | /* With some targets, eg. c4x, it is sub-optimal |
1609 | to access an aligned BLKmode structure as a scalar. */ | |
0d7839da | 1610 | |
182e515e | 1611 | if (MEMBER_TYPE_FORCES_BLK (field, mode)) |
770ae6cc | 1612 | return; |
31a02448 | 1613 | #endif /* MEMBER_TYPE_FORCES_BLK */ |
770ae6cc | 1614 | } |
9328904c | 1615 | |
897f610b RS |
1616 | /* If we only have one real field; use its mode if that mode's size |
1617 | matches the type's size. This only applies to RECORD_TYPE. This | |
1618 | does not apply to unions. */ | |
1619 | if (TREE_CODE (type) == RECORD_TYPE && mode != VOIDmode | |
f439f9a5 R |
1620 | && host_integerp (TYPE_SIZE (type), 1) |
1621 | && GET_MODE_BITSIZE (mode) == TREE_INT_CST_LOW (TYPE_SIZE (type))) | |
179d2f74 | 1622 | SET_TYPE_MODE (type, mode); |
f439f9a5 | 1623 | else |
179d2f74 | 1624 | SET_TYPE_MODE (type, mode_for_size_tree (TYPE_SIZE (type), MODE_INT, 1)); |
770ae6cc RK |
1625 | |
1626 | /* If structure's known alignment is less than what the scalar | |
1627 | mode would need, and it matters, then stick with BLKmode. */ | |
1628 | if (TYPE_MODE (type) != BLKmode | |
1629 | && STRICT_ALIGNMENT | |
1630 | && ! (TYPE_ALIGN (type) >= BIGGEST_ALIGNMENT | |
1631 | || TYPE_ALIGN (type) >= GET_MODE_ALIGNMENT (TYPE_MODE (type)))) | |
1632 | { | |
1633 | /* If this is the only reason this type is BLKmode, then | |
1634 | don't force containing types to be BLKmode. */ | |
1635 | TYPE_NO_FORCE_BLK (type) = 1; | |
179d2f74 | 1636 | SET_TYPE_MODE (type, BLKmode); |
9328904c | 1637 | } |
7306ed3f | 1638 | } |
9328904c MM |
1639 | |
1640 | /* Compute TYPE_SIZE and TYPE_ALIGN for TYPE, once it has been laid | |
1641 | out. */ | |
1642 | ||
1643 | static void | |
46c5ad27 | 1644 | finalize_type_size (tree type) |
9328904c MM |
1645 | { |
1646 | /* Normally, use the alignment corresponding to the mode chosen. | |
1647 | However, where strict alignment is not required, avoid | |
1648 | over-aligning structures, since most compilers do not do this | |
490272b4 | 1649 | alignment. */ |
9328904c MM |
1650 | |
1651 | if (TYPE_MODE (type) != BLKmode && TYPE_MODE (type) != VOIDmode | |
490272b4 | 1652 | && (STRICT_ALIGNMENT |
9328904c MM |
1653 | || (TREE_CODE (type) != RECORD_TYPE && TREE_CODE (type) != UNION_TYPE |
1654 | && TREE_CODE (type) != QUAL_UNION_TYPE | |
1655 | && TREE_CODE (type) != ARRAY_TYPE))) | |
11cf4d18 | 1656 | { |
490272b4 RH |
1657 | unsigned mode_align = GET_MODE_ALIGNMENT (TYPE_MODE (type)); |
1658 | ||
1659 | /* Don't override a larger alignment requirement coming from a user | |
1660 | alignment of one of the fields. */ | |
1661 | if (mode_align >= TYPE_ALIGN (type)) | |
1662 | { | |
1663 | TYPE_ALIGN (type) = mode_align; | |
1664 | TYPE_USER_ALIGN (type) = 0; | |
1665 | } | |
11cf4d18 | 1666 | } |
9328904c MM |
1667 | |
1668 | /* Do machine-dependent extra alignment. */ | |
1669 | #ifdef ROUND_TYPE_ALIGN | |
1670 | TYPE_ALIGN (type) | |
1671 | = ROUND_TYPE_ALIGN (type, TYPE_ALIGN (type), BITS_PER_UNIT); | |
1672 | #endif | |
1673 | ||
9328904c | 1674 | /* If we failed to find a simple way to calculate the unit size |
770ae6cc | 1675 | of the type, find it by division. */ |
9328904c MM |
1676 | if (TYPE_SIZE_UNIT (type) == 0 && TYPE_SIZE (type) != 0) |
1677 | /* TYPE_SIZE (type) is computed in bitsizetype. After the division, the | |
1678 | result will fit in sizetype. We will get more efficient code using | |
1679 | sizetype, so we force a conversion. */ | |
1680 | TYPE_SIZE_UNIT (type) | |
455f19cb MM |
1681 | = fold_convert (sizetype, |
1682 | size_binop (FLOOR_DIV_EXPR, TYPE_SIZE (type), | |
1683 | bitsize_unit_node)); | |
9328904c | 1684 | |
770ae6cc RK |
1685 | if (TYPE_SIZE (type) != 0) |
1686 | { | |
985c87c9 EB |
1687 | TYPE_SIZE (type) = round_up (TYPE_SIZE (type), TYPE_ALIGN (type)); |
1688 | TYPE_SIZE_UNIT (type) | |
1689 | = round_up (TYPE_SIZE_UNIT (type), TYPE_ALIGN_UNIT (type)); | |
770ae6cc RK |
1690 | } |
1691 | ||
1692 | /* Evaluate nonconstant sizes only once, either now or as soon as safe. */ | |
1693 | if (TYPE_SIZE (type) != 0 && TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST) | |
1694 | TYPE_SIZE (type) = variable_size (TYPE_SIZE (type)); | |
9328904c MM |
1695 | if (TYPE_SIZE_UNIT (type) != 0 |
1696 | && TREE_CODE (TYPE_SIZE_UNIT (type)) != INTEGER_CST) | |
1697 | TYPE_SIZE_UNIT (type) = variable_size (TYPE_SIZE_UNIT (type)); | |
1698 | ||
1699 | /* Also layout any other variants of the type. */ | |
1700 | if (TYPE_NEXT_VARIANT (type) | |
1701 | || type != TYPE_MAIN_VARIANT (type)) | |
1702 | { | |
1703 | tree variant; | |
1704 | /* Record layout info of this variant. */ | |
1705 | tree size = TYPE_SIZE (type); | |
1706 | tree size_unit = TYPE_SIZE_UNIT (type); | |
1707 | unsigned int align = TYPE_ALIGN (type); | |
11cf4d18 | 1708 | unsigned int user_align = TYPE_USER_ALIGN (type); |
9328904c MM |
1709 | enum machine_mode mode = TYPE_MODE (type); |
1710 | ||
1711 | /* Copy it into all variants. */ | |
1712 | for (variant = TYPE_MAIN_VARIANT (type); | |
1713 | variant != 0; | |
1714 | variant = TYPE_NEXT_VARIANT (variant)) | |
1715 | { | |
1716 | TYPE_SIZE (variant) = size; | |
1717 | TYPE_SIZE_UNIT (variant) = size_unit; | |
1718 | TYPE_ALIGN (variant) = align; | |
11cf4d18 | 1719 | TYPE_USER_ALIGN (variant) = user_align; |
179d2f74 | 1720 | SET_TYPE_MODE (variant, mode); |
9328904c MM |
1721 | } |
1722 | } | |
1723 | } | |
1724 | ||
26c71b93 RG |
1725 | /* Return a new underlying object for a bitfield started with FIELD. */ |
1726 | ||
1727 | static tree | |
1728 | start_bitfield_representative (tree field) | |
1729 | { | |
1730 | tree repr = make_node (FIELD_DECL); | |
1731 | DECL_FIELD_OFFSET (repr) = DECL_FIELD_OFFSET (field); | |
1732 | /* Force the representative to begin at a BITS_PER_UNIT aligned | |
1733 | boundary - C++ may use tail-padding of a base object to | |
1734 | continue packing bits so the bitfield region does not start | |
1735 | at bit zero (see g++.dg/abi/bitfield5.C for example). | |
1736 | Unallocated bits may happen for other reasons as well, | |
1737 | for example Ada which allows explicit bit-granular structure layout. */ | |
1738 | DECL_FIELD_BIT_OFFSET (repr) | |
1739 | = size_binop (BIT_AND_EXPR, | |
1740 | DECL_FIELD_BIT_OFFSET (field), | |
1741 | bitsize_int (~(BITS_PER_UNIT - 1))); | |
1742 | SET_DECL_OFFSET_ALIGN (repr, DECL_OFFSET_ALIGN (field)); | |
1743 | DECL_SIZE (repr) = DECL_SIZE (field); | |
1744 | DECL_SIZE_UNIT (repr) = DECL_SIZE_UNIT (field); | |
1745 | DECL_PACKED (repr) = DECL_PACKED (field); | |
1746 | DECL_CONTEXT (repr) = DECL_CONTEXT (field); | |
1747 | return repr; | |
1748 | } | |
1749 | ||
1750 | /* Finish up a bitfield group that was started by creating the underlying | |
1751 | object REPR with the last field in the bitfield group FIELD. */ | |
1752 | ||
1753 | static void | |
1754 | finish_bitfield_representative (tree repr, tree field) | |
1755 | { | |
1756 | unsigned HOST_WIDE_INT bitsize, maxbitsize; | |
1757 | enum machine_mode mode; | |
1758 | tree nextf, size; | |
1759 | ||
1760 | size = size_diffop (DECL_FIELD_OFFSET (field), | |
1761 | DECL_FIELD_OFFSET (repr)); | |
1762 | gcc_assert (host_integerp (size, 1)); | |
1763 | bitsize = (tree_low_cst (size, 1) * BITS_PER_UNIT | |
1764 | + tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 1) | |
1765 | - tree_low_cst (DECL_FIELD_BIT_OFFSET (repr), 1) | |
1766 | + tree_low_cst (DECL_SIZE (field), 1)); | |
1767 | ||
2447776c RG |
1768 | /* Round up bitsize to multiples of BITS_PER_UNIT. */ |
1769 | bitsize = (bitsize + BITS_PER_UNIT - 1) & ~(BITS_PER_UNIT - 1); | |
1770 | ||
26c71b93 RG |
1771 | /* Now nothing tells us how to pad out bitsize ... */ |
1772 | nextf = DECL_CHAIN (field); | |
1773 | while (nextf && TREE_CODE (nextf) != FIELD_DECL) | |
1774 | nextf = DECL_CHAIN (nextf); | |
1775 | if (nextf) | |
1776 | { | |
1777 | tree maxsize; | |
1778 | /* If there was an error, the field may be not layed out | |
1779 | correctly. Don't bother to do anything. */ | |
1780 | if (TREE_TYPE (nextf) == error_mark_node) | |
1781 | return; | |
1782 | maxsize = size_diffop (DECL_FIELD_OFFSET (nextf), | |
1783 | DECL_FIELD_OFFSET (repr)); | |
7ebf9677 RG |
1784 | if (host_integerp (maxsize, 1)) |
1785 | { | |
1786 | maxbitsize = (tree_low_cst (maxsize, 1) * BITS_PER_UNIT | |
1787 | + tree_low_cst (DECL_FIELD_BIT_OFFSET (nextf), 1) | |
1788 | - tree_low_cst (DECL_FIELD_BIT_OFFSET (repr), 1)); | |
1789 | /* If the group ends within a bitfield nextf does not need to be | |
1790 | aligned to BITS_PER_UNIT. Thus round up. */ | |
1791 | maxbitsize = (maxbitsize + BITS_PER_UNIT - 1) & ~(BITS_PER_UNIT - 1); | |
1792 | } | |
1793 | else | |
1794 | maxbitsize = bitsize; | |
26c71b93 RG |
1795 | } |
1796 | else | |
1797 | { | |
1798 | /* ??? If you consider that tail-padding of this struct might be | |
1799 | re-used when deriving from it we cannot really do the following | |
2447776c RG |
1800 | and thus need to set maxsize to bitsize? Also we cannot |
1801 | generally rely on maxsize to fold to an integer constant, so | |
1802 | use bitsize as fallback for this case. */ | |
26c71b93 RG |
1803 | tree maxsize = size_diffop (TYPE_SIZE_UNIT (DECL_CONTEXT (field)), |
1804 | DECL_FIELD_OFFSET (repr)); | |
2447776c RG |
1805 | if (host_integerp (maxsize, 1)) |
1806 | maxbitsize = (tree_low_cst (maxsize, 1) * BITS_PER_UNIT | |
1807 | - tree_low_cst (DECL_FIELD_BIT_OFFSET (repr), 1)); | |
1808 | else | |
1809 | maxbitsize = bitsize; | |
26c71b93 RG |
1810 | } |
1811 | ||
1812 | /* Only if we don't artificially break up the representative in | |
1813 | the middle of a large bitfield with different possibly | |
1814 | overlapping representatives. And all representatives start | |
1815 | at byte offset. */ | |
1816 | gcc_assert (maxbitsize % BITS_PER_UNIT == 0); | |
1817 | ||
26c71b93 RG |
1818 | /* Find the smallest nice mode to use. */ |
1819 | for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode; | |
1820 | mode = GET_MODE_WIDER_MODE (mode)) | |
1821 | if (GET_MODE_BITSIZE (mode) >= bitsize) | |
1822 | break; | |
1823 | if (mode != VOIDmode | |
1824 | && (GET_MODE_BITSIZE (mode) > maxbitsize | |
1825 | || GET_MODE_BITSIZE (mode) > MAX_FIXED_MODE_SIZE)) | |
1826 | mode = VOIDmode; | |
1827 | ||
1828 | if (mode == VOIDmode) | |
1829 | { | |
1830 | /* We really want a BLKmode representative only as a last resort, | |
1831 | considering the member b in | |
1832 | struct { int a : 7; int b : 17; int c; } __attribute__((packed)); | |
1833 | Otherwise we simply want to split the representative up | |
1834 | allowing for overlaps within the bitfield region as required for | |
1835 | struct { int a : 7; int b : 7; | |
1836 | int c : 10; int d; } __attribute__((packed)); | |
1837 | [0, 15] HImode for a and b, [8, 23] HImode for c. */ | |
1838 | DECL_SIZE (repr) = bitsize_int (bitsize); | |
1839 | DECL_SIZE_UNIT (repr) = size_int (bitsize / BITS_PER_UNIT); | |
1840 | DECL_MODE (repr) = BLKmode; | |
1841 | TREE_TYPE (repr) = build_array_type_nelts (unsigned_char_type_node, | |
1842 | bitsize / BITS_PER_UNIT); | |
1843 | } | |
1844 | else | |
1845 | { | |
1846 | unsigned HOST_WIDE_INT modesize = GET_MODE_BITSIZE (mode); | |
1847 | DECL_SIZE (repr) = bitsize_int (modesize); | |
1848 | DECL_SIZE_UNIT (repr) = size_int (modesize / BITS_PER_UNIT); | |
1849 | DECL_MODE (repr) = mode; | |
1850 | TREE_TYPE (repr) = lang_hooks.types.type_for_mode (mode, 1); | |
1851 | } | |
1852 | ||
1853 | /* Remember whether the bitfield group is at the end of the | |
1854 | structure or not. */ | |
1855 | DECL_CHAIN (repr) = nextf; | |
1856 | } | |
1857 | ||
1858 | /* Compute and set FIELD_DECLs for the underlying objects we should | |
1859 | use for bitfield access for the structure layed out with RLI. */ | |
1860 | ||
1861 | static void | |
1862 | finish_bitfield_layout (record_layout_info rli) | |
1863 | { | |
1864 | tree field, prev; | |
1865 | tree repr = NULL_TREE; | |
1866 | ||
1867 | /* Unions would be special, for the ease of type-punning optimizations | |
1868 | we could use the underlying type as hint for the representative | |
1869 | if the bitfield would fit and the representative would not exceed | |
1870 | the union in size. */ | |
1871 | if (TREE_CODE (rli->t) != RECORD_TYPE) | |
1872 | return; | |
1873 | ||
1874 | for (prev = NULL_TREE, field = TYPE_FIELDS (rli->t); | |
1875 | field; field = DECL_CHAIN (field)) | |
1876 | { | |
1877 | if (TREE_CODE (field) != FIELD_DECL) | |
1878 | continue; | |
1879 | ||
1880 | /* In the C++ memory model, consecutive bit fields in a structure are | |
1881 | considered one memory location and updating a memory location | |
1882 | may not store into adjacent memory locations. */ | |
1883 | if (!repr | |
1884 | && DECL_BIT_FIELD_TYPE (field)) | |
1885 | { | |
1886 | /* Start new representative. */ | |
1887 | repr = start_bitfield_representative (field); | |
1888 | } | |
1889 | else if (repr | |
1890 | && ! DECL_BIT_FIELD_TYPE (field)) | |
1891 | { | |
1892 | /* Finish off new representative. */ | |
1893 | finish_bitfield_representative (repr, prev); | |
1894 | repr = NULL_TREE; | |
1895 | } | |
1896 | else if (DECL_BIT_FIELD_TYPE (field)) | |
1897 | { | |
7ebf9677 RG |
1898 | gcc_assert (repr != NULL_TREE); |
1899 | ||
26c71b93 RG |
1900 | /* Zero-size bitfields finish off a representative and |
1901 | do not have a representative themselves. This is | |
1902 | required by the C++ memory model. */ | |
1903 | if (integer_zerop (DECL_SIZE (field))) | |
1904 | { | |
1905 | finish_bitfield_representative (repr, prev); | |
1906 | repr = NULL_TREE; | |
1907 | } | |
7ebf9677 RG |
1908 | |
1909 | /* We assume that either DECL_FIELD_OFFSET of the representative | |
1910 | and each bitfield member is a constant or they are equal. | |
1911 | This is because we need to be able to compute the bit-offset | |
1912 | of each field relative to the representative in get_bit_range | |
1913 | during RTL expansion. | |
1914 | If these constraints are not met, simply force a new | |
1915 | representative to be generated. That will at most | |
1916 | generate worse code but still maintain correctness with | |
1917 | respect to the C++ memory model. */ | |
1918 | else if (!((host_integerp (DECL_FIELD_OFFSET (repr), 1) | |
1919 | && host_integerp (DECL_FIELD_OFFSET (field), 1)) | |
1920 | || operand_equal_p (DECL_FIELD_OFFSET (repr), | |
1921 | DECL_FIELD_OFFSET (field), 0))) | |
1922 | { | |
1923 | finish_bitfield_representative (repr, prev); | |
1924 | repr = start_bitfield_representative (field); | |
1925 | } | |
26c71b93 RG |
1926 | } |
1927 | else | |
1928 | continue; | |
1929 | ||
1930 | if (repr) | |
1931 | DECL_BIT_FIELD_REPRESENTATIVE (field) = repr; | |
1932 | ||
1933 | prev = field; | |
1934 | } | |
1935 | ||
1936 | if (repr) | |
1937 | finish_bitfield_representative (repr, prev); | |
1938 | } | |
1939 | ||
9328904c MM |
1940 | /* Do all of the work required to layout the type indicated by RLI, |
1941 | once the fields have been laid out. This function will call `free' | |
17bbb839 MM |
1942 | for RLI, unless FREE_P is false. Passing a value other than false |
1943 | for FREE_P is bad practice; this option only exists to support the | |
1944 | G++ 3.2 ABI. */ | |
9328904c MM |
1945 | |
1946 | void | |
46c5ad27 | 1947 | finish_record_layout (record_layout_info rli, int free_p) |
9328904c | 1948 | { |
1937f939 JM |
1949 | tree variant; |
1950 | ||
770ae6cc RK |
1951 | /* Compute the final size. */ |
1952 | finalize_record_size (rli); | |
1953 | ||
1954 | /* Compute the TYPE_MODE for the record. */ | |
1955 | compute_record_mode (rli->t); | |
cc9d4a85 | 1956 | |
8d8238b6 JM |
1957 | /* Perform any last tweaks to the TYPE_SIZE, etc. */ |
1958 | finalize_type_size (rli->t); | |
1959 | ||
26c71b93 RG |
1960 | /* Compute bitfield representatives. */ |
1961 | finish_bitfield_layout (rli); | |
1962 | ||
1937f939 JM |
1963 | /* Propagate TYPE_PACKED to variants. With C++ templates, |
1964 | handle_packed_attribute is too early to do this. */ | |
1965 | for (variant = TYPE_NEXT_VARIANT (rli->t); variant; | |
1966 | variant = TYPE_NEXT_VARIANT (variant)) | |
1967 | TYPE_PACKED (variant) = TYPE_PACKED (rli->t); | |
1968 | ||
9328904c MM |
1969 | /* Lay out any static members. This is done now because their type |
1970 | may use the record's type. */ | |
76d971cc NF |
1971 | while (!VEC_empty (tree, rli->pending_statics)) |
1972 | layout_decl (VEC_pop (tree, rli->pending_statics), 0); | |
cc9d4a85 | 1973 | |
9328904c | 1974 | /* Clean up. */ |
17bbb839 | 1975 | if (free_p) |
76d971cc NF |
1976 | { |
1977 | VEC_free (tree, gc, rli->pending_statics); | |
1978 | free (rli); | |
1979 | } | |
9328904c | 1980 | } |
7306ed3f | 1981 | \f |
4977bab6 ZW |
1982 | |
1983 | /* Finish processing a builtin RECORD_TYPE type TYPE. It's name is | |
1984 | NAME, its fields are chained in reverse on FIELDS. | |
1985 | ||
1986 | If ALIGN_TYPE is non-null, it is given the same alignment as | |
1987 | ALIGN_TYPE. */ | |
1988 | ||
1989 | void | |
46c5ad27 AJ |
1990 | finish_builtin_struct (tree type, const char *name, tree fields, |
1991 | tree align_type) | |
4977bab6 ZW |
1992 | { |
1993 | tree tail, next; | |
1994 | ||
1995 | for (tail = NULL_TREE; fields; tail = fields, fields = next) | |
1996 | { | |
1997 | DECL_FIELD_CONTEXT (fields) = type; | |
910ad8de NF |
1998 | next = DECL_CHAIN (fields); |
1999 | DECL_CHAIN (fields) = tail; | |
4977bab6 ZW |
2000 | } |
2001 | TYPE_FIELDS (type) = tail; | |
2002 | ||
2003 | if (align_type) | |
2004 | { | |
2005 | TYPE_ALIGN (type) = TYPE_ALIGN (align_type); | |
2006 | TYPE_USER_ALIGN (type) = TYPE_USER_ALIGN (align_type); | |
2007 | } | |
2008 | ||
2009 | layout_type (type); | |
2010 | #if 0 /* not yet, should get fixed properly later */ | |
2011 | TYPE_NAME (type) = make_type_decl (get_identifier (name), type); | |
2012 | #else | |
c2255bc4 AH |
2013 | TYPE_NAME (type) = build_decl (BUILTINS_LOCATION, |
2014 | TYPE_DECL, get_identifier (name), type); | |
4977bab6 ZW |
2015 | #endif |
2016 | TYPE_STUB_DECL (type) = TYPE_NAME (type); | |
2017 | layout_decl (TYPE_NAME (type), 0); | |
2018 | } | |
2019 | ||
7306ed3f JW |
2020 | /* Calculate the mode, size, and alignment for TYPE. |
2021 | For an array type, calculate the element separation as well. | |
2022 | Record TYPE on the chain of permanent or temporary types | |
2023 | so that dbxout will find out about it. | |
2024 | ||
2025 | TYPE_SIZE of a type is nonzero if the type has been laid out already. | |
2026 | layout_type does nothing on such a type. | |
2027 | ||
2028 | If the type is incomplete, its TYPE_SIZE remains zero. */ | |
2029 | ||
2030 | void | |
46c5ad27 | 2031 | layout_type (tree type) |
7306ed3f | 2032 | { |
41374e13 | 2033 | gcc_assert (type); |
7306ed3f | 2034 | |
6de9cd9a DN |
2035 | if (type == error_mark_node) |
2036 | return; | |
2037 | ||
7306ed3f JW |
2038 | /* Do nothing if type has been laid out before. */ |
2039 | if (TYPE_SIZE (type)) | |
2040 | return; | |
2041 | ||
7306ed3f JW |
2042 | switch (TREE_CODE (type)) |
2043 | { | |
2044 | case LANG_TYPE: | |
2045 | /* This kind of type is the responsibility | |
9faa82d8 | 2046 | of the language-specific code. */ |
41374e13 | 2047 | gcc_unreachable (); |
7306ed3f | 2048 | |
2d76cb1a | 2049 | case BOOLEAN_TYPE: /* Used for Java, Pascal, and Chill. */ |
e9a25f70 | 2050 | if (TYPE_PRECISION (type) == 0) |
2d76cb1a | 2051 | TYPE_PRECISION (type) = 1; /* default to one byte/boolean. */ |
d4b60170 | 2052 | |
2d76cb1a | 2053 | /* ... fall through ... */ |
e9a25f70 | 2054 | |
7306ed3f JW |
2055 | case INTEGER_TYPE: |
2056 | case ENUMERAL_TYPE: | |
e2a77f99 RK |
2057 | if (TREE_CODE (TYPE_MIN_VALUE (type)) == INTEGER_CST |
2058 | && tree_int_cst_sgn (TYPE_MIN_VALUE (type)) >= 0) | |
8df83eae | 2059 | TYPE_UNSIGNED (type) = 1; |
7306ed3f | 2060 | |
179d2f74 RH |
2061 | SET_TYPE_MODE (type, |
2062 | smallest_mode_for_size (TYPE_PRECISION (type), MODE_INT)); | |
06ceef4e | 2063 | TYPE_SIZE (type) = bitsize_int (GET_MODE_BITSIZE (TYPE_MODE (type))); |
ead17059 | 2064 | TYPE_SIZE_UNIT (type) = size_int (GET_MODE_SIZE (TYPE_MODE (type))); |
7306ed3f JW |
2065 | break; |
2066 | ||
2067 | case REAL_TYPE: | |
179d2f74 RH |
2068 | SET_TYPE_MODE (type, |
2069 | mode_for_size (TYPE_PRECISION (type), MODE_FLOAT, 0)); | |
06ceef4e | 2070 | TYPE_SIZE (type) = bitsize_int (GET_MODE_BITSIZE (TYPE_MODE (type))); |
ead17059 | 2071 | TYPE_SIZE_UNIT (type) = size_int (GET_MODE_SIZE (TYPE_MODE (type))); |
7306ed3f JW |
2072 | break; |
2073 | ||
325217ed CF |
2074 | case FIXED_POINT_TYPE: |
2075 | /* TYPE_MODE (type) has been set already. */ | |
2076 | TYPE_SIZE (type) = bitsize_int (GET_MODE_BITSIZE (TYPE_MODE (type))); | |
2077 | TYPE_SIZE_UNIT (type) = size_int (GET_MODE_SIZE (TYPE_MODE (type))); | |
2078 | break; | |
2079 | ||
7306ed3f | 2080 | case COMPLEX_TYPE: |
8df83eae | 2081 | TYPE_UNSIGNED (type) = TYPE_UNSIGNED (TREE_TYPE (type)); |
179d2f74 RH |
2082 | SET_TYPE_MODE (type, |
2083 | mode_for_size (2 * TYPE_PRECISION (TREE_TYPE (type)), | |
2084 | (TREE_CODE (TREE_TYPE (type)) == REAL_TYPE | |
2085 | ? MODE_COMPLEX_FLOAT : MODE_COMPLEX_INT), | |
2086 | 0)); | |
06ceef4e | 2087 | TYPE_SIZE (type) = bitsize_int (GET_MODE_BITSIZE (TYPE_MODE (type))); |
ead17059 | 2088 | TYPE_SIZE_UNIT (type) = size_int (GET_MODE_SIZE (TYPE_MODE (type))); |
7306ed3f JW |
2089 | break; |
2090 | ||
0b4565c9 | 2091 | case VECTOR_TYPE: |
26277d41 PB |
2092 | { |
2093 | int nunits = TYPE_VECTOR_SUBPARTS (type); | |
26277d41 PB |
2094 | tree innertype = TREE_TYPE (type); |
2095 | ||
41374e13 | 2096 | gcc_assert (!(nunits & (nunits - 1))); |
26277d41 PB |
2097 | |
2098 | /* Find an appropriate mode for the vector type. */ | |
2099 | if (TYPE_MODE (type) == VOIDmode) | |
bb67d9c7 RG |
2100 | SET_TYPE_MODE (type, |
2101 | mode_for_vector (TYPE_MODE (innertype), nunits)); | |
26277d41 | 2102 | |
325217ed | 2103 | TYPE_SATURATING (type) = TYPE_SATURATING (TREE_TYPE (type)); |
26277d41 PB |
2104 | TYPE_UNSIGNED (type) = TYPE_UNSIGNED (TREE_TYPE (type)); |
2105 | TYPE_SIZE_UNIT (type) = int_const_binop (MULT_EXPR, | |
2106 | TYPE_SIZE_UNIT (innertype), | |
d35936ab | 2107 | size_int (nunits)); |
26277d41 | 2108 | TYPE_SIZE (type) = int_const_binop (MULT_EXPR, TYPE_SIZE (innertype), |
d35936ab | 2109 | bitsize_int (nunits)); |
e4ca3dc3 | 2110 | |
34bc6352 | 2111 | /* Always naturally align vectors. This prevents ABI changes |
e4ca3dc3 RH |
2112 | depending on whether or not native vector modes are supported. */ |
2113 | TYPE_ALIGN (type) = tree_low_cst (TYPE_SIZE (type), 0); | |
26277d41 PB |
2114 | break; |
2115 | } | |
0b4565c9 | 2116 | |
7306ed3f | 2117 | case VOID_TYPE: |
770ae6cc | 2118 | /* This is an incomplete type and so doesn't have a size. */ |
7306ed3f | 2119 | TYPE_ALIGN (type) = 1; |
11cf4d18 | 2120 | TYPE_USER_ALIGN (type) = 0; |
179d2f74 | 2121 | SET_TYPE_MODE (type, VOIDmode); |
7306ed3f JW |
2122 | break; |
2123 | ||
321cb743 | 2124 | case OFFSET_TYPE: |
06ceef4e | 2125 | TYPE_SIZE (type) = bitsize_int (POINTER_SIZE); |
ead17059 | 2126 | TYPE_SIZE_UNIT (type) = size_int (POINTER_SIZE / BITS_PER_UNIT); |
25caaba8 R |
2127 | /* A pointer might be MODE_PARTIAL_INT, |
2128 | but ptrdiff_t must be integral. */ | |
179d2f74 | 2129 | SET_TYPE_MODE (type, mode_for_size (POINTER_SIZE, MODE_INT, 0)); |
d4ebfa65 | 2130 | TYPE_PRECISION (type) = POINTER_SIZE; |
321cb743 MT |
2131 | break; |
2132 | ||
7306ed3f JW |
2133 | case FUNCTION_TYPE: |
2134 | case METHOD_TYPE: | |
019dd4ec RK |
2135 | /* It's hard to see what the mode and size of a function ought to |
2136 | be, but we do know the alignment is FUNCTION_BOUNDARY, so | |
2137 | make it consistent with that. */ | |
179d2f74 | 2138 | SET_TYPE_MODE (type, mode_for_size (FUNCTION_BOUNDARY, MODE_INT, 0)); |
019dd4ec RK |
2139 | TYPE_SIZE (type) = bitsize_int (FUNCTION_BOUNDARY); |
2140 | TYPE_SIZE_UNIT (type) = size_int (FUNCTION_BOUNDARY / BITS_PER_UNIT); | |
7306ed3f JW |
2141 | break; |
2142 | ||
2143 | case POINTER_TYPE: | |
2144 | case REFERENCE_TYPE: | |
b5d6a2ff | 2145 | { |
d4ebfa65 BE |
2146 | enum machine_mode mode = TYPE_MODE (type); |
2147 | if (TREE_CODE (type) == REFERENCE_TYPE && reference_types_internal) | |
2148 | { | |
2149 | addr_space_t as = TYPE_ADDR_SPACE (TREE_TYPE (type)); | |
2150 | mode = targetm.addr_space.address_mode (as); | |
2151 | } | |
4977bab6 | 2152 | |
d4ebfa65 | 2153 | TYPE_SIZE (type) = bitsize_int (GET_MODE_BITSIZE (mode)); |
4977bab6 | 2154 | TYPE_SIZE_UNIT (type) = size_int (GET_MODE_SIZE (mode)); |
8df83eae | 2155 | TYPE_UNSIGNED (type) = 1; |
d4ebfa65 | 2156 | TYPE_PRECISION (type) = GET_MODE_BITSIZE (mode); |
b5d6a2ff | 2157 | } |
7306ed3f JW |
2158 | break; |
2159 | ||
2160 | case ARRAY_TYPE: | |
2161 | { | |
b3694847 SS |
2162 | tree index = TYPE_DOMAIN (type); |
2163 | tree element = TREE_TYPE (type); | |
7306ed3f JW |
2164 | |
2165 | build_pointer_type (element); | |
2166 | ||
2167 | /* We need to know both bounds in order to compute the size. */ | |
2168 | if (index && TYPE_MAX_VALUE (index) && TYPE_MIN_VALUE (index) | |
2169 | && TYPE_SIZE (element)) | |
2170 | { | |
ad50bc8d RH |
2171 | tree ub = TYPE_MAX_VALUE (index); |
2172 | tree lb = TYPE_MIN_VALUE (index); | |
473ebbc5 | 2173 | tree element_size = TYPE_SIZE (element); |
e24ff973 RK |
2174 | tree length; |
2175 | ||
c2ce8cdc EB |
2176 | /* Make sure that an array of zero-sized element is zero-sized |
2177 | regardless of its extent. */ | |
2178 | if (integer_zerop (element_size)) | |
2179 | length = size_zero_node; | |
2180 | ||
830c740f RG |
2181 | /* The computation should happen in the original signedness so |
2182 | that (possible) negative values are handled appropriately | |
2183 | when determining overflow. */ | |
c2ce8cdc | 2184 | else |
56099f00 RG |
2185 | { |
2186 | /* ??? When it is obvious that the range is signed | |
2187 | represent it using ssizetype. */ | |
2188 | if (TREE_CODE (lb) == INTEGER_CST | |
2189 | && TREE_CODE (ub) == INTEGER_CST | |
2190 | && TYPE_UNSIGNED (TREE_TYPE (lb)) | |
2191 | && tree_int_cst_lt (ub, lb)) | |
2192 | { | |
2193 | lb = double_int_to_tree | |
2194 | (ssizetype, | |
2195 | double_int_sext (tree_to_double_int (lb), | |
2196 | TYPE_PRECISION (TREE_TYPE (lb)))); | |
2197 | ub = double_int_to_tree | |
2198 | (ssizetype, | |
2199 | double_int_sext (tree_to_double_int (ub), | |
2200 | TYPE_PRECISION (TREE_TYPE (ub)))); | |
2201 | } | |
2202 | length | |
2203 | = fold_convert (sizetype, | |
2204 | size_binop (PLUS_EXPR, | |
2205 | build_int_cst (TREE_TYPE (lb), 1), | |
2206 | size_binop (MINUS_EXPR, ub, lb))); | |
2207 | } | |
2208 | ||
2209 | /* If we arrived at a length of zero ignore any overflow | |
2210 | that occured as part of the calculation. There exists | |
2211 | an association of the plus one where that overflow would | |
2212 | not happen. */ | |
2213 | if (integer_zerop (length) | |
2214 | && TREE_OVERFLOW (length)) | |
2215 | length = size_zero_node; | |
7306ed3f | 2216 | |
fed3cef0 | 2217 | TYPE_SIZE (type) = size_binop (MULT_EXPR, element_size, |
0ac11108 | 2218 | fold_convert (bitsizetype, |
455f19cb | 2219 | length)); |
ead17059 | 2220 | |
473ebbc5 EB |
2221 | /* If we know the size of the element, calculate the total size |
2222 | directly, rather than do some division thing below. This | |
2223 | optimization helps Fortran assumed-size arrays (where the | |
2224 | size of the array is determined at runtime) substantially. */ | |
2225 | if (TYPE_SIZE_UNIT (element)) | |
d4b60170 RK |
2226 | TYPE_SIZE_UNIT (type) |
2227 | = size_binop (MULT_EXPR, TYPE_SIZE_UNIT (element), length); | |
7306ed3f JW |
2228 | } |
2229 | ||
2230 | /* Now round the alignment and size, | |
2231 | using machine-dependent criteria if any. */ | |
2232 | ||
2233 | #ifdef ROUND_TYPE_ALIGN | |
2234 | TYPE_ALIGN (type) | |
2235 | = ROUND_TYPE_ALIGN (type, TYPE_ALIGN (element), BITS_PER_UNIT); | |
2236 | #else | |
2237 | TYPE_ALIGN (type) = MAX (TYPE_ALIGN (element), BITS_PER_UNIT); | |
2238 | #endif | |
c163d21d | 2239 | TYPE_USER_ALIGN (type) = TYPE_USER_ALIGN (element); |
179d2f74 | 2240 | SET_TYPE_MODE (type, BLKmode); |
7306ed3f | 2241 | if (TYPE_SIZE (type) != 0 |
31a02448 | 2242 | #ifdef MEMBER_TYPE_FORCES_BLK |
182e515e | 2243 | && ! MEMBER_TYPE_FORCES_BLK (type, VOIDmode) |
31a02448 | 2244 | #endif |
7306ed3f JW |
2245 | /* BLKmode elements force BLKmode aggregate; |
2246 | else extract/store fields may lose. */ | |
2247 | && (TYPE_MODE (TREE_TYPE (type)) != BLKmode | |
2248 | || TYPE_NO_FORCE_BLK (TREE_TYPE (type)))) | |
2249 | { | |
0f6d54f7 RS |
2250 | SET_TYPE_MODE (type, mode_for_array (TREE_TYPE (type), |
2251 | TYPE_SIZE (type))); | |
72c602fc RK |
2252 | if (TYPE_MODE (type) != BLKmode |
2253 | && STRICT_ALIGNMENT && TYPE_ALIGN (type) < BIGGEST_ALIGNMENT | |
b9d49351 | 2254 | && TYPE_ALIGN (type) < GET_MODE_ALIGNMENT (TYPE_MODE (type))) |
7306ed3f JW |
2255 | { |
2256 | TYPE_NO_FORCE_BLK (type) = 1; | |
179d2f74 | 2257 | SET_TYPE_MODE (type, BLKmode); |
7306ed3f | 2258 | } |
7306ed3f | 2259 | } |
b606b65c OH |
2260 | /* When the element size is constant, check that it is at least as |
2261 | large as the element alignment. */ | |
002a9071 SE |
2262 | if (TYPE_SIZE_UNIT (element) |
2263 | && TREE_CODE (TYPE_SIZE_UNIT (element)) == INTEGER_CST | |
b606b65c OH |
2264 | /* If TYPE_SIZE_UNIT overflowed, then it is certainly larger than |
2265 | TYPE_ALIGN_UNIT. */ | |
455f14dd | 2266 | && !TREE_OVERFLOW (TYPE_SIZE_UNIT (element)) |
002a9071 SE |
2267 | && !integer_zerop (TYPE_SIZE_UNIT (element)) |
2268 | && compare_tree_int (TYPE_SIZE_UNIT (element), | |
2269 | TYPE_ALIGN_UNIT (element)) < 0) | |
2270 | error ("alignment of array elements is greater than element size"); | |
7306ed3f JW |
2271 | break; |
2272 | } | |
2273 | ||
2274 | case RECORD_TYPE: | |
cc9d4a85 MM |
2275 | case UNION_TYPE: |
2276 | case QUAL_UNION_TYPE: | |
9328904c MM |
2277 | { |
2278 | tree field; | |
2279 | record_layout_info rli; | |
2280 | ||
2281 | /* Initialize the layout information. */ | |
770ae6cc RK |
2282 | rli = start_record_layout (type); |
2283 | ||
cc9d4a85 MM |
2284 | /* If this is a QUAL_UNION_TYPE, we want to process the fields |
2285 | in the reverse order in building the COND_EXPR that denotes | |
2286 | its size. We reverse them again later. */ | |
2287 | if (TREE_CODE (type) == QUAL_UNION_TYPE) | |
2288 | TYPE_FIELDS (type) = nreverse (TYPE_FIELDS (type)); | |
770ae6cc RK |
2289 | |
2290 | /* Place all the fields. */ | |
910ad8de | 2291 | for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field)) |
770ae6cc RK |
2292 | place_field (rli, field); |
2293 | ||
cc9d4a85 MM |
2294 | if (TREE_CODE (type) == QUAL_UNION_TYPE) |
2295 | TYPE_FIELDS (type) = nreverse (TYPE_FIELDS (type)); | |
770ae6cc | 2296 | |
9328904c | 2297 | /* Finish laying out the record. */ |
17bbb839 | 2298 | finish_record_layout (rli, /*free_p=*/true); |
9328904c | 2299 | } |
7306ed3f JW |
2300 | break; |
2301 | ||
7306ed3f | 2302 | default: |
41374e13 | 2303 | gcc_unreachable (); |
729a2125 | 2304 | } |
7306ed3f | 2305 | |
9328904c | 2306 | /* Compute the final TYPE_SIZE, TYPE_ALIGN, etc. for TYPE. For |
cc9d4a85 MM |
2307 | records and unions, finish_record_layout already called this |
2308 | function. */ | |
786de7eb | 2309 | if (TREE_CODE (type) != RECORD_TYPE |
cc9d4a85 MM |
2310 | && TREE_CODE (type) != UNION_TYPE |
2311 | && TREE_CODE (type) != QUAL_UNION_TYPE) | |
9328904c | 2312 | finalize_type_size (type); |
7306ed3f | 2313 | |
36784d0e RG |
2314 | /* We should never see alias sets on incomplete aggregates. And we |
2315 | should not call layout_type on not incomplete aggregates. */ | |
2316 | if (AGGREGATE_TYPE_P (type)) | |
2317 | gcc_assert (!TYPE_ALIAS_SET_KNOWN_P (type)); | |
7306ed3f | 2318 | } |
179d2f74 RH |
2319 | |
2320 | /* Vector types need to re-check the target flags each time we report | |
2321 | the machine mode. We need to do this because attribute target can | |
2322 | change the result of vector_mode_supported_p and have_regs_of_mode | |
2323 | on a per-function basis. Thus the TYPE_MODE of a VECTOR_TYPE can | |
2324 | change on a per-function basis. */ | |
b8698a0f | 2325 | /* ??? Possibly a better solution is to run through all the types |
179d2f74 RH |
2326 | referenced by a function and re-compute the TYPE_MODE once, rather |
2327 | than make the TYPE_MODE macro call a function. */ | |
2328 | ||
2329 | enum machine_mode | |
2330 | vector_type_mode (const_tree t) | |
2331 | { | |
2332 | enum machine_mode mode; | |
2333 | ||
2334 | gcc_assert (TREE_CODE (t) == VECTOR_TYPE); | |
2335 | ||
51545682 | 2336 | mode = t->type_common.mode; |
179d2f74 RH |
2337 | if (VECTOR_MODE_P (mode) |
2338 | && (!targetm.vector_mode_supported_p (mode) | |
2339 | || !have_regs_of_mode[mode])) | |
2340 | { | |
51545682 | 2341 | enum machine_mode innermode = TREE_TYPE (t)->type_common.mode; |
179d2f74 RH |
2342 | |
2343 | /* For integers, try mapping it to a same-sized scalar mode. */ | |
2344 | if (GET_MODE_CLASS (innermode) == MODE_INT) | |
2345 | { | |
2346 | mode = mode_for_size (TYPE_VECTOR_SUBPARTS (t) | |
2347 | * GET_MODE_BITSIZE (innermode), MODE_INT, 0); | |
2348 | ||
2349 | if (mode != VOIDmode && have_regs_of_mode[mode]) | |
2350 | return mode; | |
2351 | } | |
2352 | ||
2353 | return BLKmode; | |
2354 | } | |
2355 | ||
2356 | return mode; | |
2357 | } | |
7306ed3f JW |
2358 | \f |
2359 | /* Create and return a type for signed integers of PRECISION bits. */ | |
2360 | ||
2361 | tree | |
46c5ad27 | 2362 | make_signed_type (int precision) |
7306ed3f | 2363 | { |
b3694847 | 2364 | tree type = make_node (INTEGER_TYPE); |
7306ed3f JW |
2365 | |
2366 | TYPE_PRECISION (type) = precision; | |
2367 | ||
fed3cef0 | 2368 | fixup_signed_type (type); |
7306ed3f JW |
2369 | return type; |
2370 | } | |
2371 | ||
2372 | /* Create and return a type for unsigned integers of PRECISION bits. */ | |
2373 | ||
2374 | tree | |
46c5ad27 | 2375 | make_unsigned_type (int precision) |
7306ed3f | 2376 | { |
b3694847 | 2377 | tree type = make_node (INTEGER_TYPE); |
7306ed3f JW |
2378 | |
2379 | TYPE_PRECISION (type) = precision; | |
2380 | ||
7306ed3f JW |
2381 | fixup_unsigned_type (type); |
2382 | return type; | |
2383 | } | |
fed3cef0 | 2384 | \f |
325217ed CF |
2385 | /* Create and return a type for fract of PRECISION bits, UNSIGNEDP, |
2386 | and SATP. */ | |
2387 | ||
2388 | tree | |
2389 | make_fract_type (int precision, int unsignedp, int satp) | |
2390 | { | |
2391 | tree type = make_node (FIXED_POINT_TYPE); | |
2392 | ||
2393 | TYPE_PRECISION (type) = precision; | |
2394 | ||
2395 | if (satp) | |
2396 | TYPE_SATURATING (type) = 1; | |
2397 | ||
2398 | /* Lay out the type: set its alignment, size, etc. */ | |
2399 | if (unsignedp) | |
2400 | { | |
2401 | TYPE_UNSIGNED (type) = 1; | |
179d2f74 | 2402 | SET_TYPE_MODE (type, mode_for_size (precision, MODE_UFRACT, 0)); |
325217ed CF |
2403 | } |
2404 | else | |
179d2f74 | 2405 | SET_TYPE_MODE (type, mode_for_size (precision, MODE_FRACT, 0)); |
325217ed CF |
2406 | layout_type (type); |
2407 | ||
2408 | return type; | |
2409 | } | |
2410 | ||
2411 | /* Create and return a type for accum of PRECISION bits, UNSIGNEDP, | |
2412 | and SATP. */ | |
2413 | ||
2414 | tree | |
2415 | make_accum_type (int precision, int unsignedp, int satp) | |
2416 | { | |
2417 | tree type = make_node (FIXED_POINT_TYPE); | |
2418 | ||
2419 | TYPE_PRECISION (type) = precision; | |
2420 | ||
2421 | if (satp) | |
2422 | TYPE_SATURATING (type) = 1; | |
2423 | ||
2424 | /* Lay out the type: set its alignment, size, etc. */ | |
2425 | if (unsignedp) | |
2426 | { | |
2427 | TYPE_UNSIGNED (type) = 1; | |
179d2f74 | 2428 | SET_TYPE_MODE (type, mode_for_size (precision, MODE_UACCUM, 0)); |
325217ed CF |
2429 | } |
2430 | else | |
179d2f74 | 2431 | SET_TYPE_MODE (type, mode_for_size (precision, MODE_ACCUM, 0)); |
325217ed CF |
2432 | layout_type (type); |
2433 | ||
2434 | return type; | |
2435 | } | |
2436 | ||
67b88453 | 2437 | /* Initialize sizetypes so layout_type can use them. */ |
fed3cef0 RK |
2438 | |
2439 | void | |
3b9e5d95 | 2440 | initialize_sizetypes (void) |
fed3cef0 | 2441 | { |
67b88453 RG |
2442 | int precision, bprecision; |
2443 | ||
2444 | /* Get sizetypes precision from the SIZE_TYPE target macro. */ | |
18dae016 | 2445 | if (strcmp (SIZETYPE, "unsigned int") == 0) |
67b88453 | 2446 | precision = INT_TYPE_SIZE; |
18dae016 | 2447 | else if (strcmp (SIZETYPE, "long unsigned int") == 0) |
67b88453 | 2448 | precision = LONG_TYPE_SIZE; |
18dae016 | 2449 | else if (strcmp (SIZETYPE, "long long unsigned int") == 0) |
67b88453 | 2450 | precision = LONG_LONG_TYPE_SIZE; |
18dae016 | 2451 | else if (strcmp (SIZETYPE, "short unsigned int") == 0) |
b87ac615 | 2452 | precision = SHORT_TYPE_SIZE; |
67b88453 RG |
2453 | else |
2454 | gcc_unreachable (); | |
0ac11108 | 2455 | |
67b88453 RG |
2456 | bprecision |
2457 | = MIN (precision + BITS_PER_UNIT_LOG + 1, MAX_FIXED_MODE_SIZE); | |
2458 | bprecision | |
2459 | = GET_MODE_PRECISION (smallest_mode_for_size (bprecision, MODE_INT)); | |
2460 | if (bprecision > HOST_BITS_PER_WIDE_INT * 2) | |
2461 | bprecision = HOST_BITS_PER_WIDE_INT * 2; | |
2462 | ||
2463 | /* Create stubs for sizetype and bitsizetype so we can create constants. */ | |
2464 | sizetype = make_node (INTEGER_TYPE); | |
f93fe5a0 | 2465 | TYPE_NAME (sizetype) = get_identifier ("sizetype"); |
67b88453 RG |
2466 | TYPE_PRECISION (sizetype) = precision; |
2467 | TYPE_UNSIGNED (sizetype) = 1; | |
2468 | TYPE_IS_SIZETYPE (sizetype) = 1; | |
2469 | bitsizetype = make_node (INTEGER_TYPE); | |
2470 | TYPE_NAME (bitsizetype) = get_identifier ("bitsizetype"); | |
2471 | TYPE_PRECISION (bitsizetype) = bprecision; | |
2472 | TYPE_UNSIGNED (bitsizetype) = 1; | |
2473 | TYPE_IS_SIZETYPE (bitsizetype) = 1; | |
2474 | ||
2475 | /* Now layout both types manually. */ | |
2476 | SET_TYPE_MODE (sizetype, smallest_mode_for_size (precision, MODE_INT)); | |
2477 | TYPE_ALIGN (sizetype) = GET_MODE_ALIGNMENT (TYPE_MODE (sizetype)); | |
2478 | TYPE_SIZE (sizetype) = bitsize_int (precision); | |
2479 | TYPE_SIZE_UNIT (sizetype) = size_int (GET_MODE_SIZE (TYPE_MODE (sizetype))); | |
2480 | set_min_and_max_values_for_integral_type (sizetype, precision, | |
2481 | /*is_unsigned=*/true); | |
67b88453 RG |
2482 | |
2483 | SET_TYPE_MODE (bitsizetype, smallest_mode_for_size (bprecision, MODE_INT)); | |
2484 | TYPE_ALIGN (bitsizetype) = GET_MODE_ALIGNMENT (TYPE_MODE (bitsizetype)); | |
2485 | TYPE_SIZE (bitsizetype) = bitsize_int (bprecision); | |
2486 | TYPE_SIZE_UNIT (bitsizetype) | |
2487 | = size_int (GET_MODE_SIZE (TYPE_MODE (bitsizetype))); | |
2488 | set_min_and_max_values_for_integral_type (bitsizetype, bprecision, | |
2489 | /*is_unsigned=*/true); | |
7f18f917 | 2490 | |
3b9e5d95 | 2491 | /* Create the signed variants of *sizetype. */ |
67b88453 | 2492 | ssizetype = make_signed_type (TYPE_PRECISION (sizetype)); |
f93fe5a0 | 2493 | TYPE_NAME (ssizetype) = get_identifier ("ssizetype"); |
3b9e5d95 | 2494 | TYPE_IS_SIZETYPE (ssizetype) = 1; |
67b88453 | 2495 | sbitsizetype = make_signed_type (TYPE_PRECISION (bitsizetype)); |
f93fe5a0 | 2496 | TYPE_NAME (sbitsizetype) = get_identifier ("sbitsizetype"); |
3b9e5d95 | 2497 | TYPE_IS_SIZETYPE (sbitsizetype) = 1; |
fed3cef0 RK |
2498 | } |
2499 | \f | |
71d59383 RS |
2500 | /* TYPE is an integral type, i.e., an INTEGRAL_TYPE, ENUMERAL_TYPE |
2501 | or BOOLEAN_TYPE. Set TYPE_MIN_VALUE and TYPE_MAX_VALUE | |
7b6d72fc MM |
2502 | for TYPE, based on the PRECISION and whether or not the TYPE |
2503 | IS_UNSIGNED. PRECISION need not correspond to a width supported | |
2504 | natively by the hardware; for example, on a machine with 8-bit, | |
2505 | 16-bit, and 32-bit register modes, PRECISION might be 7, 23, or | |
2506 | 61. */ | |
2507 | ||
2508 | void | |
2509 | set_min_and_max_values_for_integral_type (tree type, | |
2510 | int precision, | |
2511 | bool is_unsigned) | |
2512 | { | |
2513 | tree min_value; | |
2514 | tree max_value; | |
2515 | ||
2516 | if (is_unsigned) | |
2517 | { | |
7d60be94 | 2518 | min_value = build_int_cst (type, 0); |
f676971a | 2519 | max_value |
7d60be94 NS |
2520 | = build_int_cst_wide (type, precision - HOST_BITS_PER_WIDE_INT >= 0 |
2521 | ? -1 | |
2522 | : ((HOST_WIDE_INT) 1 << precision) - 1, | |
2523 | precision - HOST_BITS_PER_WIDE_INT > 0 | |
2524 | ? ((unsigned HOST_WIDE_INT) ~0 | |
2525 | >> (HOST_BITS_PER_WIDE_INT | |
2526 | - (precision - HOST_BITS_PER_WIDE_INT))) | |
2527 | : 0); | |
7b6d72fc MM |
2528 | } |
2529 | else | |
2530 | { | |
f676971a | 2531 | min_value |
7d60be94 NS |
2532 | = build_int_cst_wide (type, |
2533 | (precision - HOST_BITS_PER_WIDE_INT > 0 | |
2534 | ? 0 | |
2535 | : (HOST_WIDE_INT) (-1) << (precision - 1)), | |
2536 | (((HOST_WIDE_INT) (-1) | |
2537 | << (precision - HOST_BITS_PER_WIDE_INT - 1 > 0 | |
2538 | ? precision - HOST_BITS_PER_WIDE_INT - 1 | |
2539 | : 0)))); | |
7b6d72fc | 2540 | max_value |
7d60be94 NS |
2541 | = build_int_cst_wide (type, |
2542 | (precision - HOST_BITS_PER_WIDE_INT > 0 | |
2543 | ? -1 | |
2544 | : ((HOST_WIDE_INT) 1 << (precision - 1)) - 1), | |
2545 | (precision - HOST_BITS_PER_WIDE_INT - 1 > 0 | |
2546 | ? (((HOST_WIDE_INT) 1 | |
2547 | << (precision - HOST_BITS_PER_WIDE_INT - 1))) - 1 | |
2548 | : 0)); | |
7b6d72fc MM |
2549 | } |
2550 | ||
7b6d72fc MM |
2551 | TYPE_MIN_VALUE (type) = min_value; |
2552 | TYPE_MAX_VALUE (type) = max_value; | |
2553 | } | |
2554 | ||
4cc89e53 | 2555 | /* Set the extreme values of TYPE based on its precision in bits, |
13756074 | 2556 | then lay it out. Used when make_signed_type won't do |
4cc89e53 RS |
2557 | because the tree code is not INTEGER_TYPE. |
2558 | E.g. for Pascal, when the -fsigned-char option is given. */ | |
2559 | ||
2560 | void | |
46c5ad27 | 2561 | fixup_signed_type (tree type) |
4cc89e53 | 2562 | { |
b3694847 | 2563 | int precision = TYPE_PRECISION (type); |
4cc89e53 | 2564 | |
9cd56be1 JH |
2565 | /* We can not represent properly constants greater then |
2566 | 2 * HOST_BITS_PER_WIDE_INT, still we need the types | |
2567 | as they are used by i386 vector extensions and friends. */ | |
2568 | if (precision > HOST_BITS_PER_WIDE_INT * 2) | |
2569 | precision = HOST_BITS_PER_WIDE_INT * 2; | |
2570 | ||
f676971a | 2571 | set_min_and_max_values_for_integral_type (type, precision, |
7b6d72fc | 2572 | /*is_unsigned=*/false); |
4cc89e53 RS |
2573 | |
2574 | /* Lay out the type: set its alignment, size, etc. */ | |
4cc89e53 RS |
2575 | layout_type (type); |
2576 | } | |
2577 | ||
7306ed3f | 2578 | /* Set the extreme values of TYPE based on its precision in bits, |
13756074 | 2579 | then lay it out. This is used both in `make_unsigned_type' |
7306ed3f JW |
2580 | and for enumeral types. */ |
2581 | ||
2582 | void | |
46c5ad27 | 2583 | fixup_unsigned_type (tree type) |
7306ed3f | 2584 | { |
b3694847 | 2585 | int precision = TYPE_PRECISION (type); |
7306ed3f | 2586 | |
9cd56be1 JH |
2587 | /* We can not represent properly constants greater then |
2588 | 2 * HOST_BITS_PER_WIDE_INT, still we need the types | |
2589 | as they are used by i386 vector extensions and friends. */ | |
2590 | if (precision > HOST_BITS_PER_WIDE_INT * 2) | |
2591 | precision = HOST_BITS_PER_WIDE_INT * 2; | |
2592 | ||
89b0433e | 2593 | TYPE_UNSIGNED (type) = 1; |
f676971a EC |
2594 | |
2595 | set_min_and_max_values_for_integral_type (type, precision, | |
7b6d72fc | 2596 | /*is_unsigned=*/true); |
7306ed3f JW |
2597 | |
2598 | /* Lay out the type: set its alignment, size, etc. */ | |
7306ed3f JW |
2599 | layout_type (type); |
2600 | } | |
2601 | \f | |
2602 | /* Find the best machine mode to use when referencing a bit field of length | |
2603 | BITSIZE bits starting at BITPOS. | |
2604 | ||
1169e45d AH |
2605 | BITREGION_START is the bit position of the first bit in this |
2606 | sequence of bit fields. BITREGION_END is the last bit in this | |
2607 | sequence. If these two fields are non-zero, we should restrict the | |
2608 | memory access to a maximum sized chunk of | |
2609 | BITREGION_END - BITREGION_START + 1. Otherwise, we are allowed to touch | |
2610 | any adjacent non bit-fields. | |
2611 | ||
7306ed3f JW |
2612 | The underlying object is known to be aligned to a boundary of ALIGN bits. |
2613 | If LARGEST_MODE is not VOIDmode, it means that we should not use a mode | |
2614 | larger than LARGEST_MODE (usually SImode). | |
2615 | ||
c2a64439 | 2616 | If no mode meets all these conditions, we return VOIDmode. |
0ac11108 | 2617 | |
c2a64439 PB |
2618 | If VOLATILEP is false and SLOW_BYTE_ACCESS is false, we return the |
2619 | smallest mode meeting these conditions. | |
2620 | ||
2621 | If VOLATILEP is false and SLOW_BYTE_ACCESS is true, we return the | |
2622 | largest mode (but a mode no wider than UNITS_PER_WORD) that meets | |
2623 | all the conditions. | |
0ac11108 | 2624 | |
c2a64439 PB |
2625 | If VOLATILEP is true the narrow_volatile_bitfields target hook is used to |
2626 | decide which of the above modes should be used. */ | |
7306ed3f JW |
2627 | |
2628 | enum machine_mode | |
1169e45d AH |
2629 | get_best_mode (int bitsize, int bitpos, |
2630 | unsigned HOST_WIDE_INT bitregion_start, | |
2631 | unsigned HOST_WIDE_INT bitregion_end, | |
2632 | unsigned int align, | |
46c5ad27 | 2633 | enum machine_mode largest_mode, int volatilep) |
7306ed3f JW |
2634 | { |
2635 | enum machine_mode mode; | |
770ae6cc | 2636 | unsigned int unit = 0; |
1169e45d AH |
2637 | unsigned HOST_WIDE_INT maxbits; |
2638 | ||
2639 | /* If unset, no restriction. */ | |
2640 | if (!bitregion_end) | |
2641 | maxbits = MAX_FIXED_MODE_SIZE; | |
2642 | else | |
f1cc9589 | 2643 | maxbits = bitregion_end - bitregion_start + 1; |
7306ed3f JW |
2644 | |
2645 | /* Find the narrowest integer mode that contains the bit field. */ | |
2646 | for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode; | |
2647 | mode = GET_MODE_WIDER_MODE (mode)) | |
2648 | { | |
2649 | unit = GET_MODE_BITSIZE (mode); | |
0d44736e BS |
2650 | if (unit == GET_MODE_PRECISION (mode) |
2651 | && (bitpos % unit) + bitsize <= unit) | |
7306ed3f JW |
2652 | break; |
2653 | } | |
2654 | ||
0c61f541 | 2655 | if (mode == VOIDmode |
7306ed3f | 2656 | /* It is tempting to omit the following line |
4e4b555d | 2657 | if STRICT_ALIGNMENT is true. |
7306ed3f JW |
2658 | But that is incorrect, since if the bitfield uses part of 3 bytes |
2659 | and we use a 4-byte mode, we could get a spurious segv | |
2660 | if the extra 4th byte is past the end of memory. | |
2661 | (Though at least one Unix compiler ignores this problem: | |
2662 | that on the Sequent 386 machine. */ | |
770ae6cc | 2663 | || MIN (unit, BIGGEST_ALIGNMENT) > align |
f1cc9589 JJ |
2664 | || (largest_mode != VOIDmode && unit > GET_MODE_BITSIZE (largest_mode)) |
2665 | || unit > maxbits | |
2666 | || (bitregion_end | |
2667 | && bitpos - (bitpos % unit) + unit > bitregion_end + 1)) | |
7306ed3f JW |
2668 | return VOIDmode; |
2669 | ||
c2a64439 | 2670 | if ((SLOW_BYTE_ACCESS && ! volatilep) |
62e5bf5d | 2671 | || (volatilep && !targetm.narrow_volatile_bitfield ())) |
77fa0940 RK |
2672 | { |
2673 | enum machine_mode wide_mode = VOIDmode, tmode; | |
2674 | ||
2675 | for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT); tmode != VOIDmode; | |
2676 | tmode = GET_MODE_WIDER_MODE (tmode)) | |
2677 | { | |
2678 | unit = GET_MODE_BITSIZE (tmode); | |
0d44736e BS |
2679 | if (unit == GET_MODE_PRECISION (tmode) |
2680 | && bitpos / unit == (bitpos + bitsize - 1) / unit | |
77fa0940 | 2681 | && unit <= BITS_PER_WORD |
770ae6cc | 2682 | && unit <= MIN (align, BIGGEST_ALIGNMENT) |
1169e45d | 2683 | && unit <= maxbits |
77fa0940 | 2684 | && (largest_mode == VOIDmode |
f1cc9589 JJ |
2685 | || unit <= GET_MODE_BITSIZE (largest_mode)) |
2686 | && (bitregion_end == 0 | |
2687 | || bitpos - (bitpos % unit) + unit <= bitregion_end + 1)) | |
77fa0940 RK |
2688 | wide_mode = tmode; |
2689 | } | |
2690 | ||
2691 | if (wide_mode != VOIDmode) | |
2692 | return wide_mode; | |
2693 | } | |
7306ed3f JW |
2694 | |
2695 | return mode; | |
2696 | } | |
d7db6646 | 2697 | |
50654f6c | 2698 | /* Gets minimal and maximal values for MODE (signed or unsigned depending on |
0aea6467 | 2699 | SIGN). The returned constants are made to be usable in TARGET_MODE. */ |
50654f6c ZD |
2700 | |
2701 | void | |
0aea6467 ZD |
2702 | get_mode_bounds (enum machine_mode mode, int sign, |
2703 | enum machine_mode target_mode, | |
2704 | rtx *mmin, rtx *mmax) | |
50654f6c | 2705 | { |
0aea6467 ZD |
2706 | unsigned size = GET_MODE_BITSIZE (mode); |
2707 | unsigned HOST_WIDE_INT min_val, max_val; | |
50654f6c | 2708 | |
41374e13 | 2709 | gcc_assert (size <= HOST_BITS_PER_WIDE_INT); |
50654f6c ZD |
2710 | |
2711 | if (sign) | |
2712 | { | |
0aea6467 ZD |
2713 | min_val = -((unsigned HOST_WIDE_INT) 1 << (size - 1)); |
2714 | max_val = ((unsigned HOST_WIDE_INT) 1 << (size - 1)) - 1; | |
50654f6c ZD |
2715 | } |
2716 | else | |
2717 | { | |
0aea6467 ZD |
2718 | min_val = 0; |
2719 | max_val = ((unsigned HOST_WIDE_INT) 1 << (size - 1) << 1) - 1; | |
50654f6c | 2720 | } |
0aea6467 | 2721 | |
bb80db7b KH |
2722 | *mmin = gen_int_mode (min_val, target_mode); |
2723 | *mmax = gen_int_mode (max_val, target_mode); | |
50654f6c ZD |
2724 | } |
2725 | ||
e2500fed | 2726 | #include "gt-stor-layout.h" |