]>
Commit | Line | Data |
---|---|---|
7306ed3f | 1 | /* C-compiler utilities for types and variables storage layout |
23a5b65a | 2 | Copyright (C) 1987-2014 Free Software Foundation, Inc. |
7306ed3f | 3 | |
1322177d | 4 | This file is part of GCC. |
7306ed3f | 5 | |
1322177d LB |
6 | GCC is free software; you can redistribute it and/or modify it under |
7 | the terms of the GNU General Public License as published by the Free | |
9dcd6f09 | 8 | Software Foundation; either version 3, or (at your option) any later |
1322177d | 9 | version. |
7306ed3f | 10 | |
1322177d LB |
11 | GCC is distributed in the hope that it will be useful, but WITHOUT ANY |
12 | WARRANTY; without even the implied warranty of MERCHANTABILITY or | |
13 | FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License | |
14 | for more details. | |
7306ed3f JW |
15 | |
16 | You should have received a copy of the GNU General Public License | |
9dcd6f09 NC |
17 | along with GCC; see the file COPYING3. If not see |
18 | <http://www.gnu.org/licenses/>. */ | |
7306ed3f JW |
19 | |
20 | ||
21 | #include "config.h" | |
670ee920 | 22 | #include "system.h" |
4977bab6 ZW |
23 | #include "coretypes.h" |
24 | #include "tm.h" | |
7306ed3f | 25 | #include "tree.h" |
d8a2d370 DN |
26 | #include "stor-layout.h" |
27 | #include "stringpool.h" | |
28 | #include "varasm.h" | |
29 | #include "print-tree.h" | |
d05a5492 | 30 | #include "rtl.h" |
6baf1cc8 | 31 | #include "tm_p.h" |
566cdc73 | 32 | #include "flags.h" |
83685514 AM |
33 | #include "hashtab.h" |
34 | #include "hash-set.h" | |
35 | #include "vec.h" | |
36 | #include "machmode.h" | |
37 | #include "hard-reg-set.h" | |
38 | #include "input.h" | |
7306ed3f | 39 | #include "function.h" |
234042f4 | 40 | #include "expr.h" |
718f9c0f | 41 | #include "diagnostic-core.h" |
f913c102 | 42 | #include "target.h" |
43577e6b | 43 | #include "langhooks.h" |
26277d41 | 44 | #include "regs.h" |
89b0433e | 45 | #include "params.h" |
c582198b AM |
46 | #include "hash-map.h" |
47 | #include "is-a.h" | |
48 | #include "plugin-api.h" | |
49 | #include "ipa-ref.h" | |
f82a627c EB |
50 | #include "cgraph.h" |
51 | #include "tree-inline.h" | |
52 | #include "tree-dump.h" | |
45b0be94 | 53 | #include "gimplify.h" |
7306ed3f | 54 | |
7306ed3f | 55 | /* Data type for the expressions representing sizes of data types. |
896cced4 | 56 | It is the first integer type laid out. */ |
18dae016 | 57 | tree sizetype_tab[(int) stk_type_kind_last]; |
7306ed3f | 58 | |
d4c40650 RS |
59 | /* If nonzero, this is an upper limit on alignment of structure fields. |
60 | The value is measured in bits. */ | |
467cecf3 | 61 | unsigned int maximum_field_alignment = TARGET_DEFAULT_PACK_STRUCT * BITS_PER_UNIT; |
d4c40650 | 62 | |
d4ebfa65 BE |
63 | /* Nonzero if all REFERENCE_TYPEs are internal and hence should be allocated |
64 | in the address spaces' address_mode, not pointer_mode. Set only by | |
65 | internal_reference_types called only by a front end. */ | |
b5d6a2ff RK |
66 | static int reference_types_internal = 0; |
67 | ||
f82a627c | 68 | static tree self_referential_size (tree); |
46c5ad27 AJ |
69 | static void finalize_record_size (record_layout_info); |
70 | static void finalize_type_size (tree); | |
71 | static void place_union_field (record_layout_info, tree); | |
b8089d8d | 72 | #if defined (PCC_BITFIELD_TYPE_MATTERS) || defined (BITFIELD_NBYTES_LIMITED) |
46c5ad27 AJ |
73 | static int excess_unit_span (HOST_WIDE_INT, HOST_WIDE_INT, HOST_WIDE_INT, |
74 | HOST_WIDE_INT, tree); | |
b8089d8d | 75 | #endif |
46c5ad27 | 76 | extern void debug_rli (record_layout_info); |
7306ed3f | 77 | \f |
d4ebfa65 BE |
78 | /* Show that REFERENCE_TYPES are internal and should use address_mode. |
79 | Called only by front end. */ | |
b5d6a2ff RK |
80 | |
81 | void | |
46c5ad27 | 82 | internal_reference_types (void) |
b5d6a2ff RK |
83 | { |
84 | reference_types_internal = 1; | |
85 | } | |
86 | ||
76ffb3a0 | 87 | /* Given a size SIZE that may not be a constant, return a SAVE_EXPR |
7306ed3f JW |
88 | to serve as the actual size-expression for a type or decl. */ |
89 | ||
4e4b555d | 90 | tree |
46c5ad27 | 91 | variable_size (tree size) |
7306ed3f | 92 | { |
f82a627c EB |
93 | /* Obviously. */ |
94 | if (TREE_CONSTANT (size)) | |
95 | return size; | |
96 | ||
97 | /* If the size is self-referential, we can't make a SAVE_EXPR (see | |
98 | save_expr for the rationale). But we can do something else. */ | |
99 | if (CONTAINS_PLACEHOLDER_P (size)) | |
100 | return self_referential_size (size); | |
101 | ||
c99c0026 EB |
102 | /* If we are in the global binding level, we can't make a SAVE_EXPR |
103 | since it may end up being shared across functions, so it is up | |
104 | to the front-end to deal with this case. */ | |
105 | if (lang_hooks.decls.global_bindings_p ()) | |
5e9bec99 RK |
106 | return size; |
107 | ||
907a08d9 | 108 | return save_expr (size); |
7306ed3f | 109 | } |
f82a627c EB |
110 | |
111 | /* An array of functions used for self-referential size computation. */ | |
9771b263 | 112 | static GTY(()) vec<tree, va_gc> *size_functions; |
f82a627c EB |
113 | |
114 | /* Similar to copy_tree_r but do not copy component references involving | |
115 | PLACEHOLDER_EXPRs. These nodes are spotted in find_placeholder_in_expr | |
116 | and substituted in substitute_in_expr. */ | |
117 | ||
118 | static tree | |
119 | copy_self_referential_tree_r (tree *tp, int *walk_subtrees, void *data) | |
120 | { | |
121 | enum tree_code code = TREE_CODE (*tp); | |
122 | ||
123 | /* Stop at types, decls, constants like copy_tree_r. */ | |
124 | if (TREE_CODE_CLASS (code) == tcc_type | |
125 | || TREE_CODE_CLASS (code) == tcc_declaration | |
126 | || TREE_CODE_CLASS (code) == tcc_constant) | |
127 | { | |
128 | *walk_subtrees = 0; | |
129 | return NULL_TREE; | |
130 | } | |
131 | ||
132 | /* This is the pattern built in ada/make_aligning_type. */ | |
133 | else if (code == ADDR_EXPR | |
134 | && TREE_CODE (TREE_OPERAND (*tp, 0)) == PLACEHOLDER_EXPR) | |
135 | { | |
136 | *walk_subtrees = 0; | |
137 | return NULL_TREE; | |
138 | } | |
139 | ||
140 | /* Default case: the component reference. */ | |
141 | else if (code == COMPONENT_REF) | |
142 | { | |
143 | tree inner; | |
144 | for (inner = TREE_OPERAND (*tp, 0); | |
145 | REFERENCE_CLASS_P (inner); | |
146 | inner = TREE_OPERAND (inner, 0)) | |
147 | ; | |
148 | ||
149 | if (TREE_CODE (inner) == PLACEHOLDER_EXPR) | |
150 | { | |
151 | *walk_subtrees = 0; | |
152 | return NULL_TREE; | |
153 | } | |
154 | } | |
155 | ||
156 | /* We're not supposed to have them in self-referential size trees | |
157 | because we wouldn't properly control when they are evaluated. | |
158 | However, not creating superfluous SAVE_EXPRs requires accurate | |
159 | tracking of readonly-ness all the way down to here, which we | |
160 | cannot always guarantee in practice. So punt in this case. */ | |
161 | else if (code == SAVE_EXPR) | |
162 | return error_mark_node; | |
163 | ||
deb5046b JM |
164 | else if (code == STATEMENT_LIST) |
165 | gcc_unreachable (); | |
166 | ||
f82a627c EB |
167 | return copy_tree_r (tp, walk_subtrees, data); |
168 | } | |
169 | ||
170 | /* Given a SIZE expression that is self-referential, return an equivalent | |
171 | expression to serve as the actual size expression for a type. */ | |
172 | ||
173 | static tree | |
174 | self_referential_size (tree size) | |
175 | { | |
176 | static unsigned HOST_WIDE_INT fnno = 0; | |
6e1aa848 | 177 | vec<tree> self_refs = vNULL; |
3bb06db4 | 178 | tree param_type_list = NULL, param_decl_list = NULL; |
f82a627c EB |
179 | tree t, ref, return_type, fntype, fnname, fndecl; |
180 | unsigned int i; | |
181 | char buf[128]; | |
9771b263 | 182 | vec<tree, va_gc> *args = NULL; |
f82a627c EB |
183 | |
184 | /* Do not factor out simple operations. */ | |
0d475ac5 | 185 | t = skip_simple_constant_arithmetic (size); |
f82a627c EB |
186 | if (TREE_CODE (t) == CALL_EXPR) |
187 | return size; | |
188 | ||
189 | /* Collect the list of self-references in the expression. */ | |
190 | find_placeholder_in_expr (size, &self_refs); | |
9771b263 | 191 | gcc_assert (self_refs.length () > 0); |
f82a627c EB |
192 | |
193 | /* Obtain a private copy of the expression. */ | |
194 | t = size; | |
195 | if (walk_tree (&t, copy_self_referential_tree_r, NULL, NULL) != NULL_TREE) | |
196 | return size; | |
197 | size = t; | |
198 | ||
199 | /* Build the parameter and argument lists in parallel; also | |
200 | substitute the former for the latter in the expression. */ | |
9771b263 DN |
201 | vec_alloc (args, self_refs.length ()); |
202 | FOR_EACH_VEC_ELT (self_refs, i, ref) | |
f82a627c EB |
203 | { |
204 | tree subst, param_name, param_type, param_decl; | |
205 | ||
206 | if (DECL_P (ref)) | |
207 | { | |
208 | /* We shouldn't have true variables here. */ | |
209 | gcc_assert (TREE_READONLY (ref)); | |
210 | subst = ref; | |
211 | } | |
212 | /* This is the pattern built in ada/make_aligning_type. */ | |
213 | else if (TREE_CODE (ref) == ADDR_EXPR) | |
214 | subst = ref; | |
215 | /* Default case: the component reference. */ | |
216 | else | |
217 | subst = TREE_OPERAND (ref, 1); | |
218 | ||
219 | sprintf (buf, "p%d", i); | |
220 | param_name = get_identifier (buf); | |
221 | param_type = TREE_TYPE (ref); | |
222 | param_decl | |
223 | = build_decl (input_location, PARM_DECL, param_name, param_type); | |
d110c3ed | 224 | DECL_ARG_TYPE (param_decl) = param_type; |
f82a627c EB |
225 | DECL_ARTIFICIAL (param_decl) = 1; |
226 | TREE_READONLY (param_decl) = 1; | |
227 | ||
228 | size = substitute_in_expr (size, subst, param_decl); | |
229 | ||
230 | param_type_list = tree_cons (NULL_TREE, param_type, param_type_list); | |
231 | param_decl_list = chainon (param_decl, param_decl_list); | |
9771b263 | 232 | args->quick_push (ref); |
f82a627c EB |
233 | } |
234 | ||
9771b263 | 235 | self_refs.release (); |
f82a627c EB |
236 | |
237 | /* Append 'void' to indicate that the number of parameters is fixed. */ | |
238 | param_type_list = tree_cons (NULL_TREE, void_type_node, param_type_list); | |
239 | ||
240 | /* The 3 lists have been created in reverse order. */ | |
241 | param_type_list = nreverse (param_type_list); | |
242 | param_decl_list = nreverse (param_decl_list); | |
f82a627c EB |
243 | |
244 | /* Build the function type. */ | |
245 | return_type = TREE_TYPE (size); | |
246 | fntype = build_function_type (return_type, param_type_list); | |
247 | ||
248 | /* Build the function declaration. */ | |
249 | sprintf (buf, "SZ"HOST_WIDE_INT_PRINT_UNSIGNED, fnno++); | |
250 | fnname = get_file_function_name (buf); | |
251 | fndecl = build_decl (input_location, FUNCTION_DECL, fnname, fntype); | |
910ad8de | 252 | for (t = param_decl_list; t; t = DECL_CHAIN (t)) |
f82a627c EB |
253 | DECL_CONTEXT (t) = fndecl; |
254 | DECL_ARGUMENTS (fndecl) = param_decl_list; | |
255 | DECL_RESULT (fndecl) | |
256 | = build_decl (input_location, RESULT_DECL, 0, return_type); | |
257 | DECL_CONTEXT (DECL_RESULT (fndecl)) = fndecl; | |
258 | ||
259 | /* The function has been created by the compiler and we don't | |
260 | want to emit debug info for it. */ | |
261 | DECL_ARTIFICIAL (fndecl) = 1; | |
262 | DECL_IGNORED_P (fndecl) = 1; | |
263 | ||
264 | /* It is supposed to be "const" and never throw. */ | |
265 | TREE_READONLY (fndecl) = 1; | |
266 | TREE_NOTHROW (fndecl) = 1; | |
267 | ||
268 | /* We want it to be inlined when this is deemed profitable, as | |
269 | well as discarded if every call has been integrated. */ | |
270 | DECL_DECLARED_INLINE_P (fndecl) = 1; | |
271 | ||
272 | /* It is made up of a unique return statement. */ | |
273 | DECL_INITIAL (fndecl) = make_node (BLOCK); | |
274 | BLOCK_SUPERCONTEXT (DECL_INITIAL (fndecl)) = fndecl; | |
275 | t = build2 (MODIFY_EXPR, return_type, DECL_RESULT (fndecl), size); | |
276 | DECL_SAVED_TREE (fndecl) = build1 (RETURN_EXPR, void_type_node, t); | |
277 | TREE_STATIC (fndecl) = 1; | |
278 | ||
279 | /* Put it onto the list of size functions. */ | |
9771b263 | 280 | vec_safe_push (size_functions, fndecl); |
f82a627c EB |
281 | |
282 | /* Replace the original expression with a call to the size function. */ | |
c5911a55 | 283 | return build_call_expr_loc_vec (UNKNOWN_LOCATION, fndecl, args); |
f82a627c EB |
284 | } |
285 | ||
286 | /* Take, queue and compile all the size functions. It is essential that | |
287 | the size functions be gimplified at the very end of the compilation | |
288 | in order to guarantee transparent handling of self-referential sizes. | |
289 | Otherwise the GENERIC inliner would not be able to inline them back | |
290 | at each of their call sites, thus creating artificial non-constant | |
291 | size expressions which would trigger nasty problems later on. */ | |
292 | ||
293 | void | |
294 | finalize_size_functions (void) | |
295 | { | |
296 | unsigned int i; | |
297 | tree fndecl; | |
298 | ||
9771b263 | 299 | for (i = 0; size_functions && size_functions->iterate (i, &fndecl); i++) |
f82a627c | 300 | { |
22ad5e0a EB |
301 | allocate_struct_function (fndecl, false); |
302 | set_cfun (NULL); | |
f82a627c EB |
303 | dump_function (TDI_original, fndecl); |
304 | gimplify_function_tree (fndecl); | |
305 | dump_function (TDI_generic, fndecl); | |
3dafb85c | 306 | cgraph_node::finalize_function (fndecl, false); |
f82a627c EB |
307 | } |
308 | ||
9771b263 | 309 | vec_free (size_functions); |
f82a627c | 310 | } |
7306ed3f | 311 | \f |
37783865 | 312 | /* Return the machine mode to use for a nonscalar of SIZE bits. The |
55d796da | 313 | mode must be in class MCLASS, and have exactly that many value bits; |
37783865 ZW |
314 | it may have padding as well. If LIMIT is nonzero, modes of wider |
315 | than MAX_FIXED_MODE_SIZE will not be used. */ | |
7306ed3f | 316 | |
ef4bddc2 | 317 | machine_mode |
55d796da | 318 | mode_for_size (unsigned int size, enum mode_class mclass, int limit) |
7306ed3f | 319 | { |
ef4bddc2 | 320 | machine_mode mode; |
78a7c317 | 321 | int i; |
7306ed3f | 322 | |
72c602fc | 323 | if (limit && size > MAX_FIXED_MODE_SIZE) |
7306ed3f JW |
324 | return BLKmode; |
325 | ||
5e9bec99 | 326 | /* Get the first mode which has this size, in the specified class. */ |
55d796da | 327 | for (mode = GET_CLASS_NARROWEST_MODE (mclass); mode != VOIDmode; |
7306ed3f | 328 | mode = GET_MODE_WIDER_MODE (mode)) |
37783865 | 329 | if (GET_MODE_PRECISION (mode) == size) |
7306ed3f JW |
330 | return mode; |
331 | ||
78a7c317 DD |
332 | if (mclass == MODE_INT || mclass == MODE_PARTIAL_INT) |
333 | for (i = 0; i < NUM_INT_N_ENTS; i ++) | |
334 | if (int_n_data[i].bitsize == size | |
335 | && int_n_enabled_p[i]) | |
336 | return int_n_data[i].m; | |
337 | ||
7306ed3f JW |
338 | return BLKmode; |
339 | } | |
340 | ||
72c602fc RK |
341 | /* Similar, except passed a tree node. */ |
342 | ||
ef4bddc2 | 343 | machine_mode |
55d796da | 344 | mode_for_size_tree (const_tree size, enum mode_class mclass, int limit) |
72c602fc | 345 | { |
a6a12bb9 RS |
346 | unsigned HOST_WIDE_INT uhwi; |
347 | unsigned int ui; | |
348 | ||
cc269bb6 | 349 | if (!tree_fits_uhwi_p (size)) |
72c602fc | 350 | return BLKmode; |
ae7e9ddd | 351 | uhwi = tree_to_uhwi (size); |
a6a12bb9 RS |
352 | ui = uhwi; |
353 | if (uhwi != ui) | |
354 | return BLKmode; | |
55d796da | 355 | return mode_for_size (ui, mclass, limit); |
72c602fc RK |
356 | } |
357 | ||
5e9bec99 | 358 | /* Similar, but never return BLKmode; return the narrowest mode that |
37783865 | 359 | contains at least the requested number of value bits. */ |
5e9bec99 | 360 | |
ef4bddc2 | 361 | machine_mode |
55d796da | 362 | smallest_mode_for_size (unsigned int size, enum mode_class mclass) |
5e9bec99 | 363 | { |
ef4bddc2 | 364 | machine_mode mode = VOIDmode; |
78a7c317 | 365 | int i; |
5e9bec99 RK |
366 | |
367 | /* Get the first mode which has at least this size, in the | |
368 | specified class. */ | |
55d796da | 369 | for (mode = GET_CLASS_NARROWEST_MODE (mclass); mode != VOIDmode; |
5e9bec99 | 370 | mode = GET_MODE_WIDER_MODE (mode)) |
37783865 | 371 | if (GET_MODE_PRECISION (mode) >= size) |
78a7c317 | 372 | break; |
5e9bec99 | 373 | |
78a7c317 DD |
374 | if (mclass == MODE_INT || mclass == MODE_PARTIAL_INT) |
375 | for (i = 0; i < NUM_INT_N_ENTS; i ++) | |
376 | if (int_n_data[i].bitsize >= size | |
377 | && int_n_data[i].bitsize < GET_MODE_PRECISION (mode) | |
378 | && int_n_enabled_p[i]) | |
379 | mode = int_n_data[i].m; | |
380 | ||
381 | if (mode == VOIDmode) | |
382 | gcc_unreachable (); | |
383 | ||
384 | return mode; | |
5e9bec99 RK |
385 | } |
386 | ||
d006aa54 RH |
387 | /* Find an integer mode of the exact same size, or BLKmode on failure. */ |
388 | ||
ef4bddc2 RS |
389 | machine_mode |
390 | int_mode_for_mode (machine_mode mode) | |
d006aa54 RH |
391 | { |
392 | switch (GET_MODE_CLASS (mode)) | |
393 | { | |
394 | case MODE_INT: | |
395 | case MODE_PARTIAL_INT: | |
396 | break; | |
397 | ||
398 | case MODE_COMPLEX_INT: | |
399 | case MODE_COMPLEX_FLOAT: | |
400 | case MODE_FLOAT: | |
15ed7b52 | 401 | case MODE_DECIMAL_FLOAT: |
62c07905 JM |
402 | case MODE_VECTOR_INT: |
403 | case MODE_VECTOR_FLOAT: | |
325217ed CF |
404 | case MODE_FRACT: |
405 | case MODE_ACCUM: | |
406 | case MODE_UFRACT: | |
407 | case MODE_UACCUM: | |
408 | case MODE_VECTOR_FRACT: | |
409 | case MODE_VECTOR_ACCUM: | |
410 | case MODE_VECTOR_UFRACT: | |
411 | case MODE_VECTOR_UACCUM: | |
d5e254e1 | 412 | case MODE_POINTER_BOUNDS: |
d006aa54 RH |
413 | mode = mode_for_size (GET_MODE_BITSIZE (mode), MODE_INT, 0); |
414 | break; | |
415 | ||
416 | case MODE_RANDOM: | |
417 | if (mode == BLKmode) | |
786de7eb | 418 | break; |
d4b60170 | 419 | |
2d76cb1a | 420 | /* ... fall through ... */ |
d006aa54 RH |
421 | |
422 | case MODE_CC: | |
423 | default: | |
41374e13 | 424 | gcc_unreachable (); |
d006aa54 RH |
425 | } |
426 | ||
427 | return mode; | |
428 | } | |
429 | ||
dbf23a79 RS |
430 | /* Find a mode that can be used for efficient bitwise operations on MODE. |
431 | Return BLKmode if no such mode exists. */ | |
432 | ||
ef4bddc2 RS |
433 | machine_mode |
434 | bitwise_mode_for_mode (machine_mode mode) | |
dbf23a79 RS |
435 | { |
436 | /* Quick exit if we already have a suitable mode. */ | |
437 | unsigned int bitsize = GET_MODE_BITSIZE (mode); | |
438 | if (SCALAR_INT_MODE_P (mode) && bitsize <= MAX_FIXED_MODE_SIZE) | |
439 | return mode; | |
440 | ||
441 | /* Reuse the sanity checks from int_mode_for_mode. */ | |
442 | gcc_checking_assert ((int_mode_for_mode (mode), true)); | |
443 | ||
444 | /* Try to replace complex modes with complex modes. In general we | |
445 | expect both components to be processed independently, so we only | |
446 | care whether there is a register for the inner mode. */ | |
447 | if (COMPLEX_MODE_P (mode)) | |
448 | { | |
ef4bddc2 | 449 | machine_mode trial = mode; |
dbf23a79 RS |
450 | if (GET_MODE_CLASS (mode) != MODE_COMPLEX_INT) |
451 | trial = mode_for_size (bitsize, MODE_COMPLEX_INT, false); | |
452 | if (trial != BLKmode | |
453 | && have_regs_of_mode[GET_MODE_INNER (trial)]) | |
454 | return trial; | |
455 | } | |
456 | ||
457 | /* Try to replace vector modes with vector modes. Also try using vector | |
458 | modes if an integer mode would be too big. */ | |
459 | if (VECTOR_MODE_P (mode) || bitsize > MAX_FIXED_MODE_SIZE) | |
460 | { | |
ef4bddc2 | 461 | machine_mode trial = mode; |
dbf23a79 RS |
462 | if (GET_MODE_CLASS (mode) != MODE_VECTOR_INT) |
463 | trial = mode_for_size (bitsize, MODE_VECTOR_INT, 0); | |
464 | if (trial != BLKmode | |
465 | && have_regs_of_mode[trial] | |
466 | && targetm.vector_mode_supported_p (trial)) | |
467 | return trial; | |
468 | } | |
469 | ||
470 | /* Otherwise fall back on integers while honoring MAX_FIXED_MODE_SIZE. */ | |
471 | return mode_for_size (bitsize, MODE_INT, true); | |
472 | } | |
473 | ||
474 | /* Find a type that can be used for efficient bitwise operations on MODE. | |
475 | Return null if no such mode exists. */ | |
476 | ||
477 | tree | |
ef4bddc2 | 478 | bitwise_type_for_mode (machine_mode mode) |
dbf23a79 RS |
479 | { |
480 | mode = bitwise_mode_for_mode (mode); | |
481 | if (mode == BLKmode) | |
482 | return NULL_TREE; | |
483 | ||
484 | unsigned int inner_size = GET_MODE_UNIT_BITSIZE (mode); | |
485 | tree inner_type = build_nonstandard_integer_type (inner_size, true); | |
486 | ||
487 | if (VECTOR_MODE_P (mode)) | |
488 | return build_vector_type_for_mode (inner_type, mode); | |
489 | ||
490 | if (COMPLEX_MODE_P (mode)) | |
491 | return build_complex_type (inner_type); | |
492 | ||
493 | gcc_checking_assert (GET_MODE_INNER (mode) == VOIDmode); | |
494 | return inner_type; | |
495 | } | |
496 | ||
bb67d9c7 RG |
497 | /* Find a mode that is suitable for representing a vector with |
498 | NUNITS elements of mode INNERMODE. Returns BLKmode if there | |
499 | is no suitable mode. */ | |
500 | ||
ef4bddc2 RS |
501 | machine_mode |
502 | mode_for_vector (machine_mode innermode, unsigned nunits) | |
bb67d9c7 | 503 | { |
ef4bddc2 | 504 | machine_mode mode; |
bb67d9c7 RG |
505 | |
506 | /* First, look for a supported vector type. */ | |
507 | if (SCALAR_FLOAT_MODE_P (innermode)) | |
508 | mode = MIN_MODE_VECTOR_FLOAT; | |
509 | else if (SCALAR_FRACT_MODE_P (innermode)) | |
510 | mode = MIN_MODE_VECTOR_FRACT; | |
511 | else if (SCALAR_UFRACT_MODE_P (innermode)) | |
512 | mode = MIN_MODE_VECTOR_UFRACT; | |
513 | else if (SCALAR_ACCUM_MODE_P (innermode)) | |
514 | mode = MIN_MODE_VECTOR_ACCUM; | |
515 | else if (SCALAR_UACCUM_MODE_P (innermode)) | |
516 | mode = MIN_MODE_VECTOR_UACCUM; | |
517 | else | |
518 | mode = MIN_MODE_VECTOR_INT; | |
519 | ||
520 | /* Do not check vector_mode_supported_p here. We'll do that | |
521 | later in vector_type_mode. */ | |
522 | for (; mode != VOIDmode ; mode = GET_MODE_WIDER_MODE (mode)) | |
523 | if (GET_MODE_NUNITS (mode) == nunits | |
524 | && GET_MODE_INNER (mode) == innermode) | |
525 | break; | |
526 | ||
527 | /* For integers, try mapping it to a same-sized scalar mode. */ | |
528 | if (mode == VOIDmode | |
529 | && GET_MODE_CLASS (innermode) == MODE_INT) | |
530 | mode = mode_for_size (nunits * GET_MODE_BITSIZE (innermode), | |
531 | MODE_INT, 0); | |
532 | ||
533 | if (mode == VOIDmode | |
534 | || (GET_MODE_CLASS (mode) == MODE_INT | |
535 | && !have_regs_of_mode[mode])) | |
536 | return BLKmode; | |
537 | ||
538 | return mode; | |
539 | } | |
540 | ||
187515f5 AO |
541 | /* Return the alignment of MODE. This will be bounded by 1 and |
542 | BIGGEST_ALIGNMENT. */ | |
543 | ||
544 | unsigned int | |
ef4bddc2 | 545 | get_mode_alignment (machine_mode mode) |
187515f5 | 546 | { |
0974c7d7 | 547 | return MIN (BIGGEST_ALIGNMENT, MAX (1, mode_base_align[mode]*BITS_PER_UNIT)); |
187515f5 AO |
548 | } |
549 | ||
a5e0cd1d MG |
550 | /* Return the precision of the mode, or for a complex or vector mode the |
551 | precision of the mode of its elements. */ | |
552 | ||
553 | unsigned int | |
ef4bddc2 | 554 | element_precision (machine_mode mode) |
a5e0cd1d MG |
555 | { |
556 | if (COMPLEX_MODE_P (mode) || VECTOR_MODE_P (mode)) | |
557 | mode = GET_MODE_INNER (mode); | |
558 | ||
559 | return GET_MODE_PRECISION (mode); | |
560 | } | |
561 | ||
0f6d54f7 RS |
562 | /* Return the natural mode of an array, given that it is SIZE bytes in |
563 | total and has elements of type ELEM_TYPE. */ | |
564 | ||
ef4bddc2 | 565 | static machine_mode |
0f6d54f7 RS |
566 | mode_for_array (tree elem_type, tree size) |
567 | { | |
568 | tree elem_size; | |
569 | unsigned HOST_WIDE_INT int_size, int_elem_size; | |
570 | bool limit_p; | |
571 | ||
572 | /* One-element arrays get the component type's mode. */ | |
573 | elem_size = TYPE_SIZE (elem_type); | |
574 | if (simple_cst_equal (size, elem_size)) | |
575 | return TYPE_MODE (elem_type); | |
576 | ||
577 | limit_p = true; | |
cc269bb6 | 578 | if (tree_fits_uhwi_p (size) && tree_fits_uhwi_p (elem_size)) |
0f6d54f7 | 579 | { |
ae7e9ddd RS |
580 | int_size = tree_to_uhwi (size); |
581 | int_elem_size = tree_to_uhwi (elem_size); | |
0f6d54f7 RS |
582 | if (int_elem_size > 0 |
583 | && int_size % int_elem_size == 0 | |
584 | && targetm.array_mode_supported_p (TYPE_MODE (elem_type), | |
585 | int_size / int_elem_size)) | |
586 | limit_p = false; | |
587 | } | |
588 | return mode_for_size_tree (size, MODE_INT, limit_p); | |
589 | } | |
7306ed3f | 590 | \f |
78d55cc8 JM |
591 | /* Subroutine of layout_decl: Force alignment required for the data type. |
592 | But if the decl itself wants greater alignment, don't override that. */ | |
593 | ||
594 | static inline void | |
595 | do_type_align (tree type, tree decl) | |
596 | { | |
597 | if (TYPE_ALIGN (type) > DECL_ALIGN (decl)) | |
598 | { | |
599 | DECL_ALIGN (decl) = TYPE_ALIGN (type); | |
3acef2ae JM |
600 | if (TREE_CODE (decl) == FIELD_DECL) |
601 | DECL_USER_ALIGN (decl) = TYPE_USER_ALIGN (type); | |
78d55cc8 JM |
602 | } |
603 | } | |
604 | ||
7306ed3f JW |
605 | /* Set the size, mode and alignment of a ..._DECL node. |
606 | TYPE_DECL does need this for C++. | |
607 | Note that LABEL_DECL and CONST_DECL nodes do not need this, | |
608 | and FUNCTION_DECL nodes have them set up in a special (and simple) way. | |
609 | Don't call layout_decl for them. | |
610 | ||
611 | KNOWN_ALIGN is the amount of alignment we can assume this | |
612 | decl has with no special effort. It is relevant only for FIELD_DECLs | |
613 | and depends on the previous fields. | |
614 | All that matters about KNOWN_ALIGN is which powers of 2 divide it. | |
615 | If KNOWN_ALIGN is 0, it means, "as much alignment as you like": | |
616 | the record will be aligned to suit. */ | |
617 | ||
618 | void | |
46c5ad27 | 619 | layout_decl (tree decl, unsigned int known_align) |
7306ed3f | 620 | { |
b3694847 SS |
621 | tree type = TREE_TYPE (decl); |
622 | enum tree_code code = TREE_CODE (decl); | |
a46666a9 | 623 | rtx rtl = NULL_RTX; |
db3927fb | 624 | location_t loc = DECL_SOURCE_LOCATION (decl); |
7306ed3f JW |
625 | |
626 | if (code == CONST_DECL) | |
627 | return; | |
0ac11108 | 628 | |
41374e13 NS |
629 | gcc_assert (code == VAR_DECL || code == PARM_DECL || code == RESULT_DECL |
630 | || code == TYPE_DECL ||code == FIELD_DECL); | |
0ac11108 | 631 | |
a46666a9 RH |
632 | rtl = DECL_RTL_IF_SET (decl); |
633 | ||
7306ed3f | 634 | if (type == error_mark_node) |
33433751 | 635 | type = void_type_node; |
7306ed3f | 636 | |
770ae6cc RK |
637 | /* Usually the size and mode come from the data type without change, |
638 | however, the front-end may set the explicit width of the field, so its | |
639 | size may not be the same as the size of its type. This happens with | |
640 | bitfields, of course (an `int' bitfield may be only 2 bits, say), but it | |
641 | also happens with other fields. For example, the C++ front-end creates | |
642 | zero-sized fields corresponding to empty base classes, and depends on | |
643 | layout_type setting DECL_FIELD_BITPOS correctly for the field. Set the | |
4b6bf620 RK |
644 | size in bytes from the size in bits. If we have already set the mode, |
645 | don't set it again since we can be called twice for FIELD_DECLs. */ | |
770ae6cc | 646 | |
a150de29 | 647 | DECL_UNSIGNED (decl) = TYPE_UNSIGNED (type); |
4b6bf620 RK |
648 | if (DECL_MODE (decl) == VOIDmode) |
649 | DECL_MODE (decl) = TYPE_MODE (type); | |
770ae6cc | 650 | |
5e9bec99 | 651 | if (DECL_SIZE (decl) == 0) |
06ceef4e | 652 | { |
ad50bc8d RH |
653 | DECL_SIZE (decl) = TYPE_SIZE (type); |
654 | DECL_SIZE_UNIT (decl) = TYPE_SIZE_UNIT (type); | |
06ceef4e | 655 | } |
1a96dc46 | 656 | else if (DECL_SIZE_UNIT (decl) == 0) |
770ae6cc | 657 | DECL_SIZE_UNIT (decl) |
db3927fb AH |
658 | = fold_convert_loc (loc, sizetype, |
659 | size_binop_loc (loc, CEIL_DIV_EXPR, DECL_SIZE (decl), | |
660 | bitsize_unit_node)); | |
06ceef4e | 661 | |
78d55cc8 JM |
662 | if (code != FIELD_DECL) |
663 | /* For non-fields, update the alignment from the type. */ | |
664 | do_type_align (type, decl); | |
665 | else | |
666 | /* For fields, it's a bit more complicated... */ | |
786de7eb | 667 | { |
40aae178 | 668 | bool old_user_align = DECL_USER_ALIGN (decl); |
d1a701eb MM |
669 | bool zero_bitfield = false; |
670 | bool packed_p = DECL_PACKED (decl); | |
671 | unsigned int mfa; | |
40aae178 | 672 | |
78d55cc8 JM |
673 | if (DECL_BIT_FIELD (decl)) |
674 | { | |
675 | DECL_BIT_FIELD_TYPE (decl) = type; | |
7306ed3f | 676 | |
78d55cc8 | 677 | /* A zero-length bit-field affects the alignment of the next |
d1a701eb MM |
678 | field. In essence such bit-fields are not influenced by |
679 | any packing due to #pragma pack or attribute packed. */ | |
78d55cc8 | 680 | if (integer_zerop (DECL_SIZE (decl)) |
5fd9b178 | 681 | && ! targetm.ms_bitfield_layout_p (DECL_FIELD_CONTEXT (decl))) |
78d55cc8 | 682 | { |
d1a701eb MM |
683 | zero_bitfield = true; |
684 | packed_p = false; | |
78d55cc8 JM |
685 | #ifdef PCC_BITFIELD_TYPE_MATTERS |
686 | if (PCC_BITFIELD_TYPE_MATTERS) | |
687 | do_type_align (type, decl); | |
688 | else | |
689 | #endif | |
ad3f5759 | 690 | { |
78d55cc8 | 691 | #ifdef EMPTY_FIELD_BOUNDARY |
ad3f5759 AS |
692 | if (EMPTY_FIELD_BOUNDARY > DECL_ALIGN (decl)) |
693 | { | |
694 | DECL_ALIGN (decl) = EMPTY_FIELD_BOUNDARY; | |
695 | DECL_USER_ALIGN (decl) = 0; | |
696 | } | |
78d55cc8 | 697 | #endif |
ad3f5759 | 698 | } |
78d55cc8 JM |
699 | } |
700 | ||
701 | /* See if we can use an ordinary integer mode for a bit-field. | |
f5c88dbf | 702 | Conditions are: a fixed size that is correct for another mode, |
5dfd32cd | 703 | occupying a complete byte or bytes on proper boundary. */ |
78d55cc8 JM |
704 | if (TYPE_SIZE (type) != 0 |
705 | && TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST | |
5dfd32cd | 706 | && GET_MODE_CLASS (TYPE_MODE (type)) == MODE_INT) |
78d55cc8 | 707 | { |
ef4bddc2 | 708 | machine_mode xmode |
78d55cc8 | 709 | = mode_for_size_tree (DECL_SIZE (decl), MODE_INT, 1); |
d4cba6d4 | 710 | unsigned int xalign = GET_MODE_ALIGNMENT (xmode); |
78d55cc8 | 711 | |
f676971a | 712 | if (xmode != BLKmode |
d4cba6d4 EB |
713 | && !(xalign > BITS_PER_UNIT && DECL_PACKED (decl)) |
714 | && (known_align == 0 || known_align >= xalign)) | |
78d55cc8 | 715 | { |
d4cba6d4 | 716 | DECL_ALIGN (decl) = MAX (xalign, DECL_ALIGN (decl)); |
78d55cc8 JM |
717 | DECL_MODE (decl) = xmode; |
718 | DECL_BIT_FIELD (decl) = 0; | |
719 | } | |
720 | } | |
721 | ||
722 | /* Turn off DECL_BIT_FIELD if we won't need it set. */ | |
723 | if (TYPE_MODE (type) == BLKmode && DECL_MODE (decl) == BLKmode | |
724 | && known_align >= TYPE_ALIGN (type) | |
725 | && DECL_ALIGN (decl) >= TYPE_ALIGN (type)) | |
726 | DECL_BIT_FIELD (decl) = 0; | |
727 | } | |
d1a701eb | 728 | else if (packed_p && DECL_USER_ALIGN (decl)) |
78d55cc8 | 729 | /* Don't touch DECL_ALIGN. For other packed fields, go ahead and |
2038bd69 | 730 | round up; we'll reduce it again below. We want packing to |
ba228239 | 731 | supersede USER_ALIGN inherited from the type, but defer to |
2038bd69 | 732 | alignment explicitly specified on the field decl. */; |
78d55cc8 | 733 | else |
40aae178 JM |
734 | do_type_align (type, decl); |
735 | ||
7e4aeb32 JM |
736 | /* If the field is packed and not explicitly aligned, give it the |
737 | minimum alignment. Note that do_type_align may set | |
738 | DECL_USER_ALIGN, so we need to check old_user_align instead. */ | |
d1a701eb | 739 | if (packed_p |
7e4aeb32 | 740 | && !old_user_align) |
40aae178 | 741 | DECL_ALIGN (decl) = MIN (DECL_ALIGN (decl), BITS_PER_UNIT); |
78d55cc8 | 742 | |
d1a701eb | 743 | if (! packed_p && ! DECL_USER_ALIGN (decl)) |
7306ed3f | 744 | { |
78d55cc8 JM |
745 | /* Some targets (i.e. i386, VMS) limit struct field alignment |
746 | to a lower boundary than alignment of variables unless | |
747 | it was overridden by attribute aligned. */ | |
748 | #ifdef BIGGEST_FIELD_ALIGNMENT | |
749 | DECL_ALIGN (decl) | |
750 | = MIN (DECL_ALIGN (decl), (unsigned) BIGGEST_FIELD_ALIGNMENT); | |
751 | #endif | |
752 | #ifdef ADJUST_FIELD_ALIGN | |
753 | DECL_ALIGN (decl) = ADJUST_FIELD_ALIGN (decl, DECL_ALIGN (decl)); | |
754 | #endif | |
7306ed3f | 755 | } |
9ca75f15 | 756 | |
d1a701eb MM |
757 | if (zero_bitfield) |
758 | mfa = initial_max_fld_align * BITS_PER_UNIT; | |
759 | else | |
760 | mfa = maximum_field_alignment; | |
9ca75f15 | 761 | /* Should this be controlled by DECL_USER_ALIGN, too? */ |
d1a701eb MM |
762 | if (mfa != 0) |
763 | DECL_ALIGN (decl) = MIN (DECL_ALIGN (decl), mfa); | |
7306ed3f JW |
764 | } |
765 | ||
766 | /* Evaluate nonconstant size only once, either now or as soon as safe. */ | |
767 | if (DECL_SIZE (decl) != 0 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST) | |
768 | DECL_SIZE (decl) = variable_size (DECL_SIZE (decl)); | |
06ceef4e RK |
769 | if (DECL_SIZE_UNIT (decl) != 0 |
770 | && TREE_CODE (DECL_SIZE_UNIT (decl)) != INTEGER_CST) | |
771 | DECL_SIZE_UNIT (decl) = variable_size (DECL_SIZE_UNIT (decl)); | |
772 | ||
773 | /* If requested, warn about definitions of large data objects. */ | |
774 | if (warn_larger_than | |
17aec3eb | 775 | && (code == VAR_DECL || code == PARM_DECL) |
06ceef4e RK |
776 | && ! DECL_EXTERNAL (decl)) |
777 | { | |
778 | tree size = DECL_SIZE_UNIT (decl); | |
779 | ||
780 | if (size != 0 && TREE_CODE (size) == INTEGER_CST | |
05bccae2 | 781 | && compare_tree_int (size, larger_than_size) > 0) |
06ceef4e | 782 | { |
0384674e | 783 | int size_as_int = TREE_INT_CST_LOW (size); |
06ceef4e | 784 | |
05bccae2 | 785 | if (compare_tree_int (size, size_as_int) == 0) |
5de8299c | 786 | warning (OPT_Wlarger_than_, "size of %q+D is %d bytes", decl, size_as_int); |
06ceef4e | 787 | else |
5de8299c | 788 | warning (OPT_Wlarger_than_, "size of %q+D is larger than %wd bytes", |
dee15844 | 789 | decl, larger_than_size); |
06ceef4e RK |
790 | } |
791 | } | |
a46666a9 RH |
792 | |
793 | /* If the RTL was already set, update its mode and mem attributes. */ | |
794 | if (rtl) | |
795 | { | |
796 | PUT_MODE (rtl, DECL_MODE (decl)); | |
797 | SET_DECL_RTL (decl, 0); | |
798 | set_mem_attributes (rtl, decl, 1); | |
799 | SET_DECL_RTL (decl, rtl); | |
800 | } | |
7306ed3f | 801 | } |
d8472c75 JM |
802 | |
803 | /* Given a VAR_DECL, PARM_DECL or RESULT_DECL, clears the results of | |
804 | a previous call to layout_decl and calls it again. */ | |
805 | ||
806 | void | |
807 | relayout_decl (tree decl) | |
808 | { | |
809 | DECL_SIZE (decl) = DECL_SIZE_UNIT (decl) = 0; | |
810 | DECL_MODE (decl) = VOIDmode; | |
c9eee440 AP |
811 | if (!DECL_USER_ALIGN (decl)) |
812 | DECL_ALIGN (decl) = 0; | |
d8472c75 JM |
813 | SET_DECL_RTL (decl, 0); |
814 | ||
815 | layout_decl (decl, 0); | |
816 | } | |
7306ed3f | 817 | \f |
770ae6cc RK |
818 | /* Begin laying out type T, which may be a RECORD_TYPE, UNION_TYPE, or |
819 | QUAL_UNION_TYPE. Return a pointer to a struct record_layout_info which | |
820 | is to be passed to all other layout functions for this record. It is the | |
786de7eb | 821 | responsibility of the caller to call `free' for the storage returned. |
770ae6cc RK |
822 | Note that garbage collection is not permitted until we finish laying |
823 | out the record. */ | |
7306ed3f | 824 | |
9328904c | 825 | record_layout_info |
46c5ad27 | 826 | start_record_layout (tree t) |
7306ed3f | 827 | { |
1634b18f | 828 | record_layout_info rli = XNEW (struct record_layout_info_s); |
9328904c MM |
829 | |
830 | rli->t = t; | |
770ae6cc | 831 | |
9328904c MM |
832 | /* If the type has a minimum specified alignment (via an attribute |
833 | declaration, for example) use it -- otherwise, start with a | |
834 | one-byte alignment. */ | |
835 | rli->record_align = MAX (BITS_PER_UNIT, TYPE_ALIGN (t)); | |
78d55cc8 | 836 | rli->unpacked_align = rli->record_align; |
770ae6cc | 837 | rli->offset_align = MAX (rli->record_align, BIGGEST_ALIGNMENT); |
7306ed3f | 838 | |
5c19a356 MS |
839 | #ifdef STRUCTURE_SIZE_BOUNDARY |
840 | /* Packed structures don't need to have minimum size. */ | |
f132af85 | 841 | if (! TYPE_PACKED (t)) |
353293e7 PB |
842 | { |
843 | unsigned tmp; | |
844 | ||
845 | /* #pragma pack overrides STRUCTURE_SIZE_BOUNDARY. */ | |
846 | tmp = (unsigned) STRUCTURE_SIZE_BOUNDARY; | |
847 | if (maximum_field_alignment != 0) | |
848 | tmp = MIN (tmp, maximum_field_alignment); | |
849 | rli->record_align = MAX (rli->record_align, tmp); | |
850 | } | |
5c19a356 | 851 | #endif |
7306ed3f | 852 | |
770ae6cc RK |
853 | rli->offset = size_zero_node; |
854 | rli->bitpos = bitsize_zero_node; | |
f913c102 | 855 | rli->prev_field = 0; |
9771b263 | 856 | rli->pending_statics = 0; |
770ae6cc | 857 | rli->packed_maybe_necessary = 0; |
0ac11108 | 858 | rli->remaining_in_alignment = 0; |
770ae6cc | 859 | |
9328904c MM |
860 | return rli; |
861 | } | |
7306ed3f | 862 | |
2af27301 | 863 | /* Return the combined bit position for the byte offset OFFSET and the |
9cbbba28 EB |
864 | bit position BITPOS. |
865 | ||
866 | These functions operate on byte and bit positions present in FIELD_DECLs | |
867 | and assume that these expressions result in no (intermediate) overflow. | |
868 | This assumption is necessary to fold the expressions as much as possible, | |
869 | so as to avoid creating artificially variable-sized types in languages | |
870 | supporting variable-sized types like Ada. */ | |
f2704b9f RK |
871 | |
872 | tree | |
46c5ad27 | 873 | bit_from_pos (tree offset, tree bitpos) |
f2704b9f | 874 | { |
9cbbba28 EB |
875 | if (TREE_CODE (offset) == PLUS_EXPR) |
876 | offset = size_binop (PLUS_EXPR, | |
877 | fold_convert (bitsizetype, TREE_OPERAND (offset, 0)), | |
878 | fold_convert (bitsizetype, TREE_OPERAND (offset, 1))); | |
879 | else | |
880 | offset = fold_convert (bitsizetype, offset); | |
f2704b9f | 881 | return size_binop (PLUS_EXPR, bitpos, |
9cbbba28 | 882 | size_binop (MULT_EXPR, offset, bitsize_unit_node)); |
f2704b9f RK |
883 | } |
884 | ||
2af27301 | 885 | /* Return the combined truncated byte position for the byte offset OFFSET and |
9cbbba28 | 886 | the bit position BITPOS. */ |
2af27301 | 887 | |
f2704b9f | 888 | tree |
46c5ad27 | 889 | byte_from_pos (tree offset, tree bitpos) |
f2704b9f | 890 | { |
2af27301 RG |
891 | tree bytepos; |
892 | if (TREE_CODE (bitpos) == MULT_EXPR | |
893 | && tree_int_cst_equal (TREE_OPERAND (bitpos, 1), bitsize_unit_node)) | |
894 | bytepos = TREE_OPERAND (bitpos, 0); | |
895 | else | |
896 | bytepos = size_binop (TRUNC_DIV_EXPR, bitpos, bitsize_unit_node); | |
897 | return size_binop (PLUS_EXPR, offset, fold_convert (sizetype, bytepos)); | |
f2704b9f RK |
898 | } |
899 | ||
2af27301 RG |
900 | /* Split the bit position POS into a byte offset *POFFSET and a bit |
901 | position *PBITPOS with the byte offset aligned to OFF_ALIGN bits. */ | |
902 | ||
f2704b9f | 903 | void |
46c5ad27 AJ |
904 | pos_from_bit (tree *poffset, tree *pbitpos, unsigned int off_align, |
905 | tree pos) | |
f2704b9f | 906 | { |
2af27301 RG |
907 | tree toff_align = bitsize_int (off_align); |
908 | if (TREE_CODE (pos) == MULT_EXPR | |
909 | && tree_int_cst_equal (TREE_OPERAND (pos, 1), toff_align)) | |
910 | { | |
911 | *poffset = size_binop (MULT_EXPR, | |
912 | fold_convert (sizetype, TREE_OPERAND (pos, 0)), | |
913 | size_int (off_align / BITS_PER_UNIT)); | |
914 | *pbitpos = bitsize_zero_node; | |
915 | } | |
916 | else | |
917 | { | |
918 | *poffset = size_binop (MULT_EXPR, | |
919 | fold_convert (sizetype, | |
920 | size_binop (FLOOR_DIV_EXPR, pos, | |
921 | toff_align)), | |
922 | size_int (off_align / BITS_PER_UNIT)); | |
923 | *pbitpos = size_binop (FLOOR_MOD_EXPR, pos, toff_align); | |
924 | } | |
f2704b9f RK |
925 | } |
926 | ||
927 | /* Given a pointer to bit and byte offsets and an offset alignment, | |
928 | normalize the offsets so they are within the alignment. */ | |
929 | ||
930 | void | |
46c5ad27 | 931 | normalize_offset (tree *poffset, tree *pbitpos, unsigned int off_align) |
f2704b9f RK |
932 | { |
933 | /* If the bit position is now larger than it should be, adjust it | |
934 | downwards. */ | |
935 | if (compare_tree_int (*pbitpos, off_align) >= 0) | |
936 | { | |
2af27301 RG |
937 | tree offset, bitpos; |
938 | pos_from_bit (&offset, &bitpos, off_align, *pbitpos); | |
939 | *poffset = size_binop (PLUS_EXPR, *poffset, offset); | |
940 | *pbitpos = bitpos; | |
f2704b9f RK |
941 | } |
942 | } | |
943 | ||
770ae6cc | 944 | /* Print debugging information about the information in RLI. */ |
cc9d4a85 | 945 | |
24e47c76 | 946 | DEBUG_FUNCTION void |
46c5ad27 | 947 | debug_rli (record_layout_info rli) |
cc9d4a85 | 948 | { |
770ae6cc RK |
949 | print_node_brief (stderr, "type", rli->t, 0); |
950 | print_node_brief (stderr, "\noffset", rli->offset, 0); | |
951 | print_node_brief (stderr, " bitpos", rli->bitpos, 0); | |
cc9d4a85 | 952 | |
78d55cc8 JM |
953 | fprintf (stderr, "\naligns: rec = %u, unpack = %u, off = %u\n", |
954 | rli->record_align, rli->unpacked_align, | |
e0cea8d9 | 955 | rli->offset_align); |
0ac11108 EC |
956 | |
957 | /* The ms_struct code is the only that uses this. */ | |
958 | if (targetm.ms_bitfield_layout_p (rli->t)) | |
0c397f9c | 959 | fprintf (stderr, "remaining in alignment = %u\n", rli->remaining_in_alignment); |
0ac11108 | 960 | |
770ae6cc RK |
961 | if (rli->packed_maybe_necessary) |
962 | fprintf (stderr, "packed may be necessary\n"); | |
963 | ||
9771b263 | 964 | if (!vec_safe_is_empty (rli->pending_statics)) |
770ae6cc RK |
965 | { |
966 | fprintf (stderr, "pending statics:\n"); | |
76d971cc | 967 | debug_vec_tree (rli->pending_statics); |
770ae6cc RK |
968 | } |
969 | } | |
970 | ||
971 | /* Given an RLI with a possibly-incremented BITPOS, adjust OFFSET and | |
972 | BITPOS if necessary to keep BITPOS below OFFSET_ALIGN. */ | |
973 | ||
974 | void | |
46c5ad27 | 975 | normalize_rli (record_layout_info rli) |
770ae6cc | 976 | { |
f2704b9f | 977 | normalize_offset (&rli->offset, &rli->bitpos, rli->offset_align); |
770ae6cc | 978 | } |
cc9d4a85 | 979 | |
770ae6cc RK |
980 | /* Returns the size in bytes allocated so far. */ |
981 | ||
982 | tree | |
46c5ad27 | 983 | rli_size_unit_so_far (record_layout_info rli) |
770ae6cc | 984 | { |
f2704b9f | 985 | return byte_from_pos (rli->offset, rli->bitpos); |
770ae6cc RK |
986 | } |
987 | ||
988 | /* Returns the size in bits allocated so far. */ | |
989 | ||
990 | tree | |
46c5ad27 | 991 | rli_size_so_far (record_layout_info rli) |
770ae6cc | 992 | { |
f2704b9f | 993 | return bit_from_pos (rli->offset, rli->bitpos); |
770ae6cc RK |
994 | } |
995 | ||
0645ba8f | 996 | /* FIELD is about to be added to RLI->T. The alignment (in bits) of |
cbbaf4ae R |
997 | the next available location within the record is given by KNOWN_ALIGN. |
998 | Update the variable alignment fields in RLI, and return the alignment | |
999 | to give the FIELD. */ | |
770ae6cc | 1000 | |
6de9cd9a | 1001 | unsigned int |
46c5ad27 AJ |
1002 | update_alignment_for_field (record_layout_info rli, tree field, |
1003 | unsigned int known_align) | |
9328904c MM |
1004 | { |
1005 | /* The alignment required for FIELD. */ | |
1006 | unsigned int desired_align; | |
9328904c MM |
1007 | /* The type of this field. */ |
1008 | tree type = TREE_TYPE (field); | |
0645ba8f MM |
1009 | /* True if the field was explicitly aligned by the user. */ |
1010 | bool user_align; | |
78d55cc8 | 1011 | bool is_bitfield; |
9328904c | 1012 | |
9dfb66b9 CD |
1013 | /* Do not attempt to align an ERROR_MARK node */ |
1014 | if (TREE_CODE (type) == ERROR_MARK) | |
1015 | return 0; | |
1016 | ||
78d55cc8 JM |
1017 | /* Lay out the field so we know what alignment it needs. */ |
1018 | layout_decl (field, known_align); | |
770ae6cc | 1019 | desired_align = DECL_ALIGN (field); |
11cf4d18 | 1020 | user_align = DECL_USER_ALIGN (field); |
770ae6cc | 1021 | |
78d55cc8 JM |
1022 | is_bitfield = (type != error_mark_node |
1023 | && DECL_BIT_FIELD_TYPE (field) | |
1024 | && ! integer_zerop (TYPE_SIZE (type))); | |
7306ed3f | 1025 | |
9328904c MM |
1026 | /* Record must have at least as much alignment as any field. |
1027 | Otherwise, the alignment of the field within the record is | |
1028 | meaningless. */ | |
0ac11108 | 1029 | if (targetm.ms_bitfield_layout_p (rli->t)) |
f913c102 | 1030 | { |
e4850f36 DR |
1031 | /* Here, the alignment of the underlying type of a bitfield can |
1032 | affect the alignment of a record; even a zero-sized field | |
1033 | can do this. The alignment should be to the alignment of | |
1034 | the type, except that for zero-size bitfields this only | |
0e9e1e0a | 1035 | applies if there was an immediately prior, nonzero-size |
e4850f36 | 1036 | bitfield. (That's the way it is, experimentally.) */ |
cb312376 | 1037 | if ((!is_bitfield && !DECL_PACKED (field)) |
f44157f8 JJ |
1038 | || ((DECL_SIZE (field) == NULL_TREE |
1039 | || !integer_zerop (DECL_SIZE (field))) | |
0ac11108 EC |
1040 | ? !DECL_PACKED (field) |
1041 | : (rli->prev_field | |
1042 | && DECL_BIT_FIELD_TYPE (rli->prev_field) | |
1043 | && ! integer_zerop (DECL_SIZE (rli->prev_field))))) | |
f913c102 | 1044 | { |
e4850f36 DR |
1045 | unsigned int type_align = TYPE_ALIGN (type); |
1046 | type_align = MAX (type_align, desired_align); | |
1047 | if (maximum_field_alignment != 0) | |
1048 | type_align = MIN (type_align, maximum_field_alignment); | |
1049 | rli->record_align = MAX (rli->record_align, type_align); | |
f913c102 AO |
1050 | rli->unpacked_align = MAX (rli->unpacked_align, TYPE_ALIGN (type)); |
1051 | } | |
786de7eb | 1052 | } |
3c12fcc2 | 1053 | #ifdef PCC_BITFIELD_TYPE_MATTERS |
78d55cc8 | 1054 | else if (is_bitfield && PCC_BITFIELD_TYPE_MATTERS) |
9328904c | 1055 | { |
8dc65b6e | 1056 | /* Named bit-fields cause the entire structure to have the |
13c1cd82 PB |
1057 | alignment implied by their type. Some targets also apply the same |
1058 | rules to unnamed bitfields. */ | |
1059 | if (DECL_NAME (field) != 0 | |
1060 | || targetm.align_anon_bitfield ()) | |
7306ed3f | 1061 | { |
9328904c | 1062 | unsigned int type_align = TYPE_ALIGN (type); |
729a2125 | 1063 | |
ad9335eb JJ |
1064 | #ifdef ADJUST_FIELD_ALIGN |
1065 | if (! TYPE_USER_ALIGN (type)) | |
1066 | type_align = ADJUST_FIELD_ALIGN (field, type_align); | |
1067 | #endif | |
1068 | ||
d1a701eb MM |
1069 | /* Targets might chose to handle unnamed and hence possibly |
1070 | zero-width bitfield. Those are not influenced by #pragmas | |
1071 | or packed attributes. */ | |
1072 | if (integer_zerop (DECL_SIZE (field))) | |
1073 | { | |
1074 | if (initial_max_fld_align) | |
1075 | type_align = MIN (type_align, | |
1076 | initial_max_fld_align * BITS_PER_UNIT); | |
1077 | } | |
1078 | else if (maximum_field_alignment != 0) | |
9328904c MM |
1079 | type_align = MIN (type_align, maximum_field_alignment); |
1080 | else if (DECL_PACKED (field)) | |
1081 | type_align = MIN (type_align, BITS_PER_UNIT); | |
e2301a83 | 1082 | |
8dc65b6e MM |
1083 | /* The alignment of the record is increased to the maximum |
1084 | of the current alignment, the alignment indicated on the | |
1085 | field (i.e., the alignment specified by an __aligned__ | |
1086 | attribute), and the alignment indicated by the type of | |
1087 | the field. */ | |
1088 | rli->record_align = MAX (rli->record_align, desired_align); | |
9328904c | 1089 | rli->record_align = MAX (rli->record_align, type_align); |
8dc65b6e | 1090 | |
3c12fcc2 | 1091 | if (warn_packed) |
e0cea8d9 | 1092 | rli->unpacked_align = MAX (rli->unpacked_align, TYPE_ALIGN (type)); |
daf06049 | 1093 | user_align |= TYPE_USER_ALIGN (type); |
3c12fcc2 | 1094 | } |
9328904c | 1095 | } |
9328904c | 1096 | #endif |
78d55cc8 | 1097 | else |
9328904c MM |
1098 | { |
1099 | rli->record_align = MAX (rli->record_align, desired_align); | |
770ae6cc | 1100 | rli->unpacked_align = MAX (rli->unpacked_align, TYPE_ALIGN (type)); |
9328904c | 1101 | } |
3c12fcc2 | 1102 | |
0645ba8f MM |
1103 | TYPE_USER_ALIGN (rli->t) |= user_align; |
1104 | ||
1105 | return desired_align; | |
1106 | } | |
1107 | ||
1108 | /* Called from place_field to handle unions. */ | |
1109 | ||
1110 | static void | |
46c5ad27 | 1111 | place_union_field (record_layout_info rli, tree field) |
0645ba8f MM |
1112 | { |
1113 | update_alignment_for_field (rli, field, /*known_align=*/0); | |
1114 | ||
1115 | DECL_FIELD_OFFSET (field) = size_zero_node; | |
1116 | DECL_FIELD_BIT_OFFSET (field) = bitsize_zero_node; | |
1117 | SET_DECL_OFFSET_ALIGN (field, BIGGEST_ALIGNMENT); | |
1118 | ||
0ac11108 | 1119 | /* If this is an ERROR_MARK return *after* having set the |
9dfb66b9 CD |
1120 | field at the start of the union. This helps when parsing |
1121 | invalid fields. */ | |
1122 | if (TREE_CODE (TREE_TYPE (field)) == ERROR_MARK) | |
1123 | return; | |
1124 | ||
0645ba8f MM |
1125 | /* We assume the union's size will be a multiple of a byte so we don't |
1126 | bother with BITPOS. */ | |
1127 | if (TREE_CODE (rli->t) == UNION_TYPE) | |
1128 | rli->offset = size_binop (MAX_EXPR, rli->offset, DECL_SIZE_UNIT (field)); | |
1129 | else if (TREE_CODE (rli->t) == QUAL_UNION_TYPE) | |
985c87c9 | 1130 | rli->offset = fold_build3 (COND_EXPR, sizetype, DECL_QUALIFIER (field), |
4845b383 | 1131 | DECL_SIZE_UNIT (field), rli->offset); |
0645ba8f MM |
1132 | } |
1133 | ||
b8089d8d | 1134 | #if defined (PCC_BITFIELD_TYPE_MATTERS) || defined (BITFIELD_NBYTES_LIMITED) |
4977bab6 | 1135 | /* A bitfield of SIZE with a required access alignment of ALIGN is allocated |
272d0bee | 1136 | at BYTE_OFFSET / BIT_OFFSET. Return nonzero if the field would span more |
4977bab6 ZW |
1137 | units of alignment than the underlying TYPE. */ |
1138 | static int | |
46c5ad27 AJ |
1139 | excess_unit_span (HOST_WIDE_INT byte_offset, HOST_WIDE_INT bit_offset, |
1140 | HOST_WIDE_INT size, HOST_WIDE_INT align, tree type) | |
4977bab6 ZW |
1141 | { |
1142 | /* Note that the calculation of OFFSET might overflow; we calculate it so | |
1143 | that we still get the right result as long as ALIGN is a power of two. */ | |
1144 | unsigned HOST_WIDE_INT offset = byte_offset * BITS_PER_UNIT + bit_offset; | |
1145 | ||
1146 | offset = offset % align; | |
1147 | return ((offset + size + align - 1) / align | |
7d362f6c | 1148 | > tree_to_uhwi (TYPE_SIZE (type)) / align); |
4977bab6 | 1149 | } |
b8089d8d | 1150 | #endif |
4977bab6 | 1151 | |
0645ba8f MM |
1152 | /* RLI contains information about the layout of a RECORD_TYPE. FIELD |
1153 | is a FIELD_DECL to be added after those fields already present in | |
1154 | T. (FIELD is not actually added to the TYPE_FIELDS list here; | |
1155 | callers that desire that behavior must manually perform that step.) */ | |
1156 | ||
1157 | void | |
46c5ad27 | 1158 | place_field (record_layout_info rli, tree field) |
0645ba8f MM |
1159 | { |
1160 | /* The alignment required for FIELD. */ | |
1161 | unsigned int desired_align; | |
1162 | /* The alignment FIELD would have if we just dropped it into the | |
1163 | record as it presently stands. */ | |
1164 | unsigned int known_align; | |
1165 | unsigned int actual_align; | |
1166 | /* The type of this field. */ | |
1167 | tree type = TREE_TYPE (field); | |
1168 | ||
dbe91deb | 1169 | gcc_assert (TREE_CODE (field) != ERROR_MARK); |
0645ba8f MM |
1170 | |
1171 | /* If FIELD is static, then treat it like a separate variable, not | |
1172 | really like a structure field. If it is a FUNCTION_DECL, it's a | |
1173 | method. In both cases, all we do is lay out the decl, and we do | |
1174 | it *after* the record is laid out. */ | |
1175 | if (TREE_CODE (field) == VAR_DECL) | |
1176 | { | |
9771b263 | 1177 | vec_safe_push (rli->pending_statics, field); |
0645ba8f MM |
1178 | return; |
1179 | } | |
1180 | ||
1181 | /* Enumerators and enum types which are local to this class need not | |
1182 | be laid out. Likewise for initialized constant fields. */ | |
1183 | else if (TREE_CODE (field) != FIELD_DECL) | |
1184 | return; | |
1185 | ||
1186 | /* Unions are laid out very differently than records, so split | |
1187 | that code off to another function. */ | |
1188 | else if (TREE_CODE (rli->t) != RECORD_TYPE) | |
1189 | { | |
1190 | place_union_field (rli, field); | |
1191 | return; | |
1192 | } | |
1193 | ||
0ac11108 | 1194 | else if (TREE_CODE (type) == ERROR_MARK) |
9dfb66b9 CD |
1195 | { |
1196 | /* Place this field at the current allocation position, so we | |
1197 | maintain monotonicity. */ | |
1198 | DECL_FIELD_OFFSET (field) = rli->offset; | |
1199 | DECL_FIELD_BIT_OFFSET (field) = rli->bitpos; | |
1200 | SET_DECL_OFFSET_ALIGN (field, rli->offset_align); | |
1201 | return; | |
1202 | } | |
1203 | ||
0645ba8f MM |
1204 | /* Work out the known alignment so far. Note that A & (-A) is the |
1205 | value of the least-significant bit in A that is one. */ | |
1206 | if (! integer_zerop (rli->bitpos)) | |
ae7e9ddd RS |
1207 | known_align = (tree_to_uhwi (rli->bitpos) |
1208 | & - tree_to_uhwi (rli->bitpos)); | |
0645ba8f | 1209 | else if (integer_zerop (rli->offset)) |
cbbaf4ae | 1210 | known_align = 0; |
cc269bb6 | 1211 | else if (tree_fits_uhwi_p (rli->offset)) |
0645ba8f | 1212 | known_align = (BITS_PER_UNIT |
ae7e9ddd RS |
1213 | * (tree_to_uhwi (rli->offset) |
1214 | & - tree_to_uhwi (rli->offset))); | |
0645ba8f MM |
1215 | else |
1216 | known_align = rli->offset_align; | |
46c5ad27 | 1217 | |
0645ba8f | 1218 | desired_align = update_alignment_for_field (rli, field, known_align); |
cbbaf4ae R |
1219 | if (known_align == 0) |
1220 | known_align = MAX (BIGGEST_ALIGNMENT, rli->record_align); | |
0645ba8f | 1221 | |
9328904c MM |
1222 | if (warn_packed && DECL_PACKED (field)) |
1223 | { | |
78d55cc8 | 1224 | if (known_align >= TYPE_ALIGN (type)) |
3c12fcc2 | 1225 | { |
9328904c | 1226 | if (TYPE_ALIGN (type) > desired_align) |
3c12fcc2 | 1227 | { |
9328904c | 1228 | if (STRICT_ALIGNMENT) |
dee15844 JM |
1229 | warning (OPT_Wattributes, "packed attribute causes " |
1230 | "inefficient alignment for %q+D", field); | |
a3b20e90 JM |
1231 | /* Don't warn if DECL_PACKED was set by the type. */ |
1232 | else if (!TYPE_PACKED (rli->t)) | |
dee15844 JM |
1233 | warning (OPT_Wattributes, "packed attribute is " |
1234 | "unnecessary for %q+D", field); | |
3c12fcc2 | 1235 | } |
3c12fcc2 | 1236 | } |
9328904c MM |
1237 | else |
1238 | rli->packed_maybe_necessary = 1; | |
1239 | } | |
7306ed3f | 1240 | |
9328904c | 1241 | /* Does this field automatically have alignment it needs by virtue |
9954e17f KT |
1242 | of the fields that precede it and the record's own alignment? */ |
1243 | if (known_align < desired_align) | |
9328904c MM |
1244 | { |
1245 | /* No, we need to skip space before this field. | |
1246 | Bump the cumulative size to multiple of field alignment. */ | |
7306ed3f | 1247 | |
9954e17f KT |
1248 | if (!targetm.ms_bitfield_layout_p (rli->t) |
1249 | && DECL_SOURCE_LOCATION (field) != BUILTINS_LOCATION) | |
4c0a0455 | 1250 | warning (OPT_Wpadded, "padding struct to align %q+D", field); |
3c12fcc2 | 1251 | |
770ae6cc RK |
1252 | /* If the alignment is still within offset_align, just align |
1253 | the bit position. */ | |
1254 | if (desired_align < rli->offset_align) | |
1255 | rli->bitpos = round_up (rli->bitpos, desired_align); | |
9328904c MM |
1256 | else |
1257 | { | |
770ae6cc RK |
1258 | /* First adjust OFFSET by the partial bits, then align. */ |
1259 | rli->offset | |
1260 | = size_binop (PLUS_EXPR, rli->offset, | |
455f19cb MM |
1261 | fold_convert (sizetype, |
1262 | size_binop (CEIL_DIV_EXPR, rli->bitpos, | |
1263 | bitsize_unit_node))); | |
770ae6cc RK |
1264 | rli->bitpos = bitsize_zero_node; |
1265 | ||
1266 | rli->offset = round_up (rli->offset, desired_align / BITS_PER_UNIT); | |
7306ed3f | 1267 | } |
770ae6cc | 1268 | |
b1254b72 RK |
1269 | if (! TREE_CONSTANT (rli->offset)) |
1270 | rli->offset_align = desired_align; | |
9954e17f KT |
1271 | if (targetm.ms_bitfield_layout_p (rli->t)) |
1272 | rli->prev_field = NULL; | |
9328904c | 1273 | } |
7306ed3f | 1274 | |
770ae6cc RK |
1275 | /* Handle compatibility with PCC. Note that if the record has any |
1276 | variable-sized fields, we need not worry about compatibility. */ | |
7306ed3f | 1277 | #ifdef PCC_BITFIELD_TYPE_MATTERS |
9328904c | 1278 | if (PCC_BITFIELD_TYPE_MATTERS |
245f1bfa | 1279 | && ! targetm.ms_bitfield_layout_p (rli->t) |
9328904c MM |
1280 | && TREE_CODE (field) == FIELD_DECL |
1281 | && type != error_mark_node | |
770ae6cc | 1282 | && DECL_BIT_FIELD (field) |
2cd36c22 AN |
1283 | && (! DECL_PACKED (field) |
1284 | /* Enter for these packed fields only to issue a warning. */ | |
1285 | || TYPE_ALIGN (type) <= BITS_PER_UNIT) | |
9328904c | 1286 | && maximum_field_alignment == 0 |
770ae6cc | 1287 | && ! integer_zerop (DECL_SIZE (field)) |
cc269bb6 RS |
1288 | && tree_fits_uhwi_p (DECL_SIZE (field)) |
1289 | && tree_fits_uhwi_p (rli->offset) | |
1290 | && tree_fits_uhwi_p (TYPE_SIZE (type))) | |
9328904c MM |
1291 | { |
1292 | unsigned int type_align = TYPE_ALIGN (type); | |
770ae6cc | 1293 | tree dsize = DECL_SIZE (field); |
ae7e9ddd | 1294 | HOST_WIDE_INT field_size = tree_to_uhwi (dsize); |
905b0e07 | 1295 | HOST_WIDE_INT offset = tree_to_uhwi (rli->offset); |
9439e9a1 | 1296 | HOST_WIDE_INT bit_offset = tree_to_shwi (rli->bitpos); |
9328904c | 1297 | |
ad9335eb JJ |
1298 | #ifdef ADJUST_FIELD_ALIGN |
1299 | if (! TYPE_USER_ALIGN (type)) | |
1300 | type_align = ADJUST_FIELD_ALIGN (field, type_align); | |
1301 | #endif | |
1302 | ||
9328904c MM |
1303 | /* A bit field may not span more units of alignment of its type |
1304 | than its type itself. Advance to next boundary if necessary. */ | |
4977bab6 | 1305 | if (excess_unit_span (offset, bit_offset, field_size, type_align, type)) |
2cd36c22 AN |
1306 | { |
1307 | if (DECL_PACKED (field)) | |
1308 | { | |
bee6fa6d | 1309 | if (warn_packed_bitfield_compat == 1) |
2cd36c22 AN |
1310 | inform |
1311 | (input_location, | |
d8a07487 | 1312 | "offset of packed bit-field %qD has changed in GCC 4.4", |
2cd36c22 AN |
1313 | field); |
1314 | } | |
1315 | else | |
985c87c9 | 1316 | rli->bitpos = round_up (rli->bitpos, type_align); |
2cd36c22 | 1317 | } |
daf06049 | 1318 | |
2cd36c22 AN |
1319 | if (! DECL_PACKED (field)) |
1320 | TYPE_USER_ALIGN (rli->t) |= TYPE_USER_ALIGN (type); | |
9328904c | 1321 | } |
7306ed3f JW |
1322 | #endif |
1323 | ||
7306ed3f | 1324 | #ifdef BITFIELD_NBYTES_LIMITED |
9328904c | 1325 | if (BITFIELD_NBYTES_LIMITED |
245f1bfa | 1326 | && ! targetm.ms_bitfield_layout_p (rli->t) |
9328904c MM |
1327 | && TREE_CODE (field) == FIELD_DECL |
1328 | && type != error_mark_node | |
1329 | && DECL_BIT_FIELD_TYPE (field) | |
770ae6cc RK |
1330 | && ! DECL_PACKED (field) |
1331 | && ! integer_zerop (DECL_SIZE (field)) | |
cc269bb6 RS |
1332 | && tree_fits_uhwi_p (DECL_SIZE (field)) |
1333 | && tree_fits_uhwi_p (rli->offset) | |
1334 | && tree_fits_uhwi_p (TYPE_SIZE (type))) | |
9328904c MM |
1335 | { |
1336 | unsigned int type_align = TYPE_ALIGN (type); | |
770ae6cc | 1337 | tree dsize = DECL_SIZE (field); |
ae7e9ddd | 1338 | HOST_WIDE_INT field_size = tree_to_uhwi (dsize); |
905b0e07 | 1339 | HOST_WIDE_INT offset = tree_to_uhwi (rli->offset); |
9439e9a1 | 1340 | HOST_WIDE_INT bit_offset = tree_to_shwi (rli->bitpos); |
e2301a83 | 1341 | |
ad9335eb JJ |
1342 | #ifdef ADJUST_FIELD_ALIGN |
1343 | if (! TYPE_USER_ALIGN (type)) | |
1344 | type_align = ADJUST_FIELD_ALIGN (field, type_align); | |
1345 | #endif | |
1346 | ||
9328904c MM |
1347 | if (maximum_field_alignment != 0) |
1348 | type_align = MIN (type_align, maximum_field_alignment); | |
1349 | /* ??? This test is opposite the test in the containing if | |
1350 | statement, so this code is unreachable currently. */ | |
1351 | else if (DECL_PACKED (field)) | |
1352 | type_align = MIN (type_align, BITS_PER_UNIT); | |
1353 | ||
1354 | /* A bit field may not span the unit of alignment of its type. | |
1355 | Advance to next boundary if necessary. */ | |
4977bab6 | 1356 | if (excess_unit_span (offset, bit_offset, field_size, type_align, type)) |
770ae6cc | 1357 | rli->bitpos = round_up (rli->bitpos, type_align); |
daf06049 | 1358 | |
0645ba8f | 1359 | TYPE_USER_ALIGN (rli->t) |= TYPE_USER_ALIGN (type); |
9328904c | 1360 | } |
7306ed3f JW |
1361 | #endif |
1362 | ||
e4850f36 DR |
1363 | /* See the docs for TARGET_MS_BITFIELD_LAYOUT_P for details. |
1364 | A subtlety: | |
1365 | When a bit field is inserted into a packed record, the whole | |
1366 | size of the underlying type is used by one or more same-size | |
4977bab6 | 1367 | adjacent bitfields. (That is, if its long:3, 32 bits is |
e4850f36 DR |
1368 | used in the record, and any additional adjacent long bitfields are |
1369 | packed into the same chunk of 32 bits. However, if the size | |
1370 | changes, a new field of that size is allocated.) In an unpacked | |
14b493d6 | 1371 | record, this is the same as using alignment, but not equivalent |
4977bab6 | 1372 | when packing. |
e4850f36 | 1373 | |
14b493d6 | 1374 | Note: for compatibility, we use the type size, not the type alignment |
e4850f36 DR |
1375 | to determine alignment, since that matches the documentation */ |
1376 | ||
0ac11108 | 1377 | if (targetm.ms_bitfield_layout_p (rli->t)) |
f913c102 | 1378 | { |
e4850f36 | 1379 | tree prev_saved = rli->prev_field; |
72aeff7c | 1380 | tree prev_type = prev_saved ? DECL_BIT_FIELD_TYPE (prev_saved) : NULL; |
f913c102 | 1381 | |
0ac11108 EC |
1382 | /* This is a bitfield if it exists. */ |
1383 | if (rli->prev_field) | |
e4850f36 DR |
1384 | { |
1385 | /* If both are bitfields, nonzero, and the same size, this is | |
1386 | the middle of a run. Zero declared size fields are special | |
1387 | and handled as "end of run". (Note: it's nonzero declared | |
1388 | size, but equal type sizes!) (Since we know that both | |
1389 | the current and previous fields are bitfields by the | |
1390 | time we check it, DECL_SIZE must be present for both.) */ | |
1391 | if (DECL_BIT_FIELD_TYPE (field) | |
1392 | && !integer_zerop (DECL_SIZE (field)) | |
1393 | && !integer_zerop (DECL_SIZE (rli->prev_field)) | |
9541ffee | 1394 | && tree_fits_shwi_p (DECL_SIZE (rli->prev_field)) |
905b0e07 | 1395 | && tree_fits_uhwi_p (TYPE_SIZE (type)) |
72aeff7c | 1396 | && simple_cst_equal (TYPE_SIZE (type), TYPE_SIZE (prev_type))) |
e4850f36 DR |
1397 | { |
1398 | /* We're in the middle of a run of equal type size fields; make | |
1399 | sure we realign if we run out of bits. (Not decl size, | |
1400 | type size!) */ | |
ae7e9ddd | 1401 | HOST_WIDE_INT bitsize = tree_to_uhwi (DECL_SIZE (field)); |
e4850f36 DR |
1402 | |
1403 | if (rli->remaining_in_alignment < bitsize) | |
1404 | { | |
ae7e9ddd | 1405 | HOST_WIDE_INT typesize = tree_to_uhwi (TYPE_SIZE (type)); |
72aeff7c | 1406 | |
0ac11108 | 1407 | /* out of bits; bump up to next 'word'. */ |
0ac11108 | 1408 | rli->bitpos |
72aeff7c KK |
1409 | = size_binop (PLUS_EXPR, rli->bitpos, |
1410 | bitsize_int (rli->remaining_in_alignment)); | |
0ac11108 | 1411 | rli->prev_field = field; |
72aeff7c KK |
1412 | if (typesize < bitsize) |
1413 | rli->remaining_in_alignment = 0; | |
1414 | else | |
1415 | rli->remaining_in_alignment = typesize - bitsize; | |
e4850f36 | 1416 | } |
72aeff7c KK |
1417 | else |
1418 | rli->remaining_in_alignment -= bitsize; | |
e4850f36 DR |
1419 | } |
1420 | else | |
1421 | { | |
4977bab6 ZW |
1422 | /* End of a run: if leaving a run of bitfields of the same type |
1423 | size, we have to "use up" the rest of the bits of the type | |
e4850f36 DR |
1424 | size. |
1425 | ||
1426 | Compute the new position as the sum of the size for the prior | |
1427 | type and where we first started working on that type. | |
1428 | Note: since the beginning of the field was aligned then | |
1429 | of course the end will be too. No round needed. */ | |
1430 | ||
f7eb0dcd | 1431 | if (!integer_zerop (DECL_SIZE (rli->prev_field))) |
e4850f36 | 1432 | { |
0ac11108 EC |
1433 | rli->bitpos |
1434 | = size_binop (PLUS_EXPR, rli->bitpos, | |
1435 | bitsize_int (rli->remaining_in_alignment)); | |
e4850f36 DR |
1436 | } |
1437 | else | |
0384674e RK |
1438 | /* We "use up" size zero fields; the code below should behave |
1439 | as if the prior field was not a bitfield. */ | |
1440 | prev_saved = NULL; | |
e4850f36 | 1441 | |
4977bab6 | 1442 | /* Cause a new bitfield to be captured, either this time (if |
991b6592 | 1443 | currently a bitfield) or next time we see one. */ |
c3284718 | 1444 | if (!DECL_BIT_FIELD_TYPE (field) |
f7eb0dcd | 1445 | || integer_zerop (DECL_SIZE (field))) |
0384674e | 1446 | rli->prev_field = NULL; |
e4850f36 | 1447 | } |
0384674e | 1448 | |
e4850f36 DR |
1449 | normalize_rli (rli); |
1450 | } | |
1451 | ||
67ae67ec | 1452 | /* If we're starting a new run of same type size bitfields |
e4850f36 | 1453 | (or a run of non-bitfields), set up the "first of the run" |
4977bab6 | 1454 | fields. |
e4850f36 DR |
1455 | |
1456 | That is, if the current field is not a bitfield, or if there | |
1457 | was a prior bitfield the type sizes differ, or if there wasn't | |
1458 | a prior bitfield the size of the current field is nonzero. | |
1459 | ||
1460 | Note: we must be sure to test ONLY the type size if there was | |
1461 | a prior bitfield and ONLY for the current field being zero if | |
1462 | there wasn't. */ | |
1463 | ||
1464 | if (!DECL_BIT_FIELD_TYPE (field) | |
f7eb0dcd | 1465 | || (prev_saved != NULL |
72aeff7c | 1466 | ? !simple_cst_equal (TYPE_SIZE (type), TYPE_SIZE (prev_type)) |
0384674e | 1467 | : !integer_zerop (DECL_SIZE (field)) )) |
e4850f36 | 1468 | { |
0384674e RK |
1469 | /* Never smaller than a byte for compatibility. */ |
1470 | unsigned int type_align = BITS_PER_UNIT; | |
e4850f36 | 1471 | |
4977bab6 | 1472 | /* (When not a bitfield), we could be seeing a flex array (with |
e4850f36 | 1473 | no DECL_SIZE). Since we won't be using remaining_in_alignment |
4977bab6 | 1474 | until we see a bitfield (and come by here again) we just skip |
e4850f36 | 1475 | calculating it. */ |
0384674e | 1476 | if (DECL_SIZE (field) != NULL |
cc269bb6 RS |
1477 | && tree_fits_uhwi_p (TYPE_SIZE (TREE_TYPE (field))) |
1478 | && tree_fits_uhwi_p (DECL_SIZE (field))) | |
72aeff7c | 1479 | { |
fb6807b8 | 1480 | unsigned HOST_WIDE_INT bitsize |
ae7e9ddd | 1481 | = tree_to_uhwi (DECL_SIZE (field)); |
fb6807b8 | 1482 | unsigned HOST_WIDE_INT typesize |
ae7e9ddd | 1483 | = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (field))); |
72aeff7c KK |
1484 | |
1485 | if (typesize < bitsize) | |
1486 | rli->remaining_in_alignment = 0; | |
1487 | else | |
1488 | rli->remaining_in_alignment = typesize - bitsize; | |
1489 | } | |
e4850f36 | 1490 | |
991b6592 | 1491 | /* Now align (conventionally) for the new type. */ |
0ac11108 | 1492 | type_align = TYPE_ALIGN (TREE_TYPE (field)); |
f913c102 | 1493 | |
e4850f36 DR |
1494 | if (maximum_field_alignment != 0) |
1495 | type_align = MIN (type_align, maximum_field_alignment); | |
f913c102 | 1496 | |
985c87c9 | 1497 | rli->bitpos = round_up (rli->bitpos, type_align); |
0384674e | 1498 | |
e4850f36 | 1499 | /* If we really aligned, don't allow subsequent bitfields |
991b6592 | 1500 | to undo that. */ |
e4850f36 DR |
1501 | rli->prev_field = NULL; |
1502 | } | |
f913c102 AO |
1503 | } |
1504 | ||
770ae6cc RK |
1505 | /* Offset so far becomes the position of this field after normalizing. */ |
1506 | normalize_rli (rli); | |
1507 | DECL_FIELD_OFFSET (field) = rli->offset; | |
1508 | DECL_FIELD_BIT_OFFSET (field) = rli->bitpos; | |
2f5c7f45 | 1509 | SET_DECL_OFFSET_ALIGN (field, rli->offset_align); |
770ae6cc | 1510 | |
cb27986c EB |
1511 | /* Evaluate nonconstant offsets only once, either now or as soon as safe. */ |
1512 | if (TREE_CODE (DECL_FIELD_OFFSET (field)) != INTEGER_CST) | |
1513 | DECL_FIELD_OFFSET (field) = variable_size (DECL_FIELD_OFFSET (field)); | |
1514 | ||
770ae6cc RK |
1515 | /* If this field ended up more aligned than we thought it would be (we |
1516 | approximate this by seeing if its position changed), lay out the field | |
1517 | again; perhaps we can use an integral mode for it now. */ | |
4b6bf620 | 1518 | if (! integer_zerop (DECL_FIELD_BIT_OFFSET (field))) |
ae7e9ddd RS |
1519 | actual_align = (tree_to_uhwi (DECL_FIELD_BIT_OFFSET (field)) |
1520 | & - tree_to_uhwi (DECL_FIELD_BIT_OFFSET (field))); | |
4b6bf620 | 1521 | else if (integer_zerop (DECL_FIELD_OFFSET (field))) |
cbbaf4ae | 1522 | actual_align = MAX (BIGGEST_ALIGNMENT, rli->record_align); |
cc269bb6 | 1523 | else if (tree_fits_uhwi_p (DECL_FIELD_OFFSET (field))) |
770ae6cc | 1524 | actual_align = (BITS_PER_UNIT |
ae7e9ddd RS |
1525 | * (tree_to_uhwi (DECL_FIELD_OFFSET (field)) |
1526 | & - tree_to_uhwi (DECL_FIELD_OFFSET (field)))); | |
9328904c | 1527 | else |
770ae6cc | 1528 | actual_align = DECL_OFFSET_ALIGN (field); |
cbbaf4ae R |
1529 | /* ACTUAL_ALIGN is still the actual alignment *within the record* . |
1530 | store / extract bit field operations will check the alignment of the | |
1531 | record against the mode of bit fields. */ | |
770ae6cc RK |
1532 | |
1533 | if (known_align != actual_align) | |
1534 | layout_decl (field, actual_align); | |
1535 | ||
0ac11108 EC |
1536 | if (rli->prev_field == NULL && DECL_BIT_FIELD_TYPE (field)) |
1537 | rli->prev_field = field; | |
f913c102 | 1538 | |
770ae6cc RK |
1539 | /* Now add size of this field to the size of the record. If the size is |
1540 | not constant, treat the field as being a multiple of bytes and just | |
1541 | adjust the offset, resetting the bit position. Otherwise, apportion the | |
1542 | size amongst the bit position and offset. First handle the case of an | |
1543 | unspecified size, which can happen when we have an invalid nested struct | |
1544 | definition, such as struct j { struct j { int i; } }. The error message | |
1545 | is printed in finish_struct. */ | |
1546 | if (DECL_SIZE (field) == 0) | |
1547 | /* Do nothing. */; | |
292f30c5 | 1548 | else if (TREE_CODE (DECL_SIZE (field)) != INTEGER_CST |
455f14dd | 1549 | || TREE_OVERFLOW (DECL_SIZE (field))) |
9328904c | 1550 | { |
770ae6cc RK |
1551 | rli->offset |
1552 | = size_binop (PLUS_EXPR, rli->offset, | |
455f19cb MM |
1553 | fold_convert (sizetype, |
1554 | size_binop (CEIL_DIV_EXPR, rli->bitpos, | |
1555 | bitsize_unit_node))); | |
770ae6cc RK |
1556 | rli->offset |
1557 | = size_binop (PLUS_EXPR, rli->offset, DECL_SIZE_UNIT (field)); | |
1558 | rli->bitpos = bitsize_zero_node; | |
3923e410 | 1559 | rli->offset_align = MIN (rli->offset_align, desired_align); |
9328904c | 1560 | } |
0ac11108 EC |
1561 | else if (targetm.ms_bitfield_layout_p (rli->t)) |
1562 | { | |
1563 | rli->bitpos = size_binop (PLUS_EXPR, rli->bitpos, DECL_SIZE (field)); | |
1564 | ||
1565 | /* If we ended a bitfield before the full length of the type then | |
1566 | pad the struct out to the full length of the last type. */ | |
910ad8de NF |
1567 | if ((DECL_CHAIN (field) == NULL |
1568 | || TREE_CODE (DECL_CHAIN (field)) != FIELD_DECL) | |
0ac11108 EC |
1569 | && DECL_BIT_FIELD_TYPE (field) |
1570 | && !integer_zerop (DECL_SIZE (field))) | |
1571 | rli->bitpos = size_binop (PLUS_EXPR, rli->bitpos, | |
1572 | bitsize_int (rli->remaining_in_alignment)); | |
1573 | ||
1574 | normalize_rli (rli); | |
1575 | } | |
9328904c MM |
1576 | else |
1577 | { | |
770ae6cc RK |
1578 | rli->bitpos = size_binop (PLUS_EXPR, rli->bitpos, DECL_SIZE (field)); |
1579 | normalize_rli (rli); | |
7306ed3f | 1580 | } |
9328904c | 1581 | } |
7306ed3f | 1582 | |
9328904c MM |
1583 | /* Assuming that all the fields have been laid out, this function uses |
1584 | RLI to compute the final TYPE_SIZE, TYPE_ALIGN, etc. for the type | |
14b493d6 | 1585 | indicated by RLI. */ |
7306ed3f | 1586 | |
9328904c | 1587 | static void |
46c5ad27 | 1588 | finalize_record_size (record_layout_info rli) |
9328904c | 1589 | { |
770ae6cc RK |
1590 | tree unpadded_size, unpadded_size_unit; |
1591 | ||
65e14bf5 RK |
1592 | /* Now we want just byte and bit offsets, so set the offset alignment |
1593 | to be a byte and then normalize. */ | |
1594 | rli->offset_align = BITS_PER_UNIT; | |
1595 | normalize_rli (rli); | |
7306ed3f JW |
1596 | |
1597 | /* Determine the desired alignment. */ | |
1598 | #ifdef ROUND_TYPE_ALIGN | |
9328904c | 1599 | TYPE_ALIGN (rli->t) = ROUND_TYPE_ALIGN (rli->t, TYPE_ALIGN (rli->t), |
b451555a | 1600 | rli->record_align); |
7306ed3f | 1601 | #else |
9328904c | 1602 | TYPE_ALIGN (rli->t) = MAX (TYPE_ALIGN (rli->t), rli->record_align); |
7306ed3f JW |
1603 | #endif |
1604 | ||
65e14bf5 RK |
1605 | /* Compute the size so far. Be sure to allow for extra bits in the |
1606 | size in bytes. We have guaranteed above that it will be no more | |
1607 | than a single byte. */ | |
1608 | unpadded_size = rli_size_so_far (rli); | |
1609 | unpadded_size_unit = rli_size_unit_so_far (rli); | |
1610 | if (! integer_zerop (rli->bitpos)) | |
1611 | unpadded_size_unit | |
1612 | = size_binop (PLUS_EXPR, unpadded_size_unit, size_one_node); | |
770ae6cc | 1613 | |
605f12f4 MP |
1614 | if (TREE_CODE (unpadded_size_unit) == INTEGER_CST |
1615 | && !TREE_OVERFLOW (unpadded_size_unit) | |
1616 | && !valid_constant_size_p (unpadded_size_unit)) | |
1617 | error ("type %qT is too large", rli->t); | |
1618 | ||
f9da5064 | 1619 | /* Round the size up to be a multiple of the required alignment. */ |
985c87c9 | 1620 | TYPE_SIZE (rli->t) = round_up (unpadded_size, TYPE_ALIGN (rli->t)); |
a4e9ffe5 | 1621 | TYPE_SIZE_UNIT (rli->t) |
985c87c9 | 1622 | = round_up (unpadded_size_unit, TYPE_ALIGN_UNIT (rli->t)); |
729a2125 | 1623 | |
3176a0c2 | 1624 | if (TREE_CONSTANT (unpadded_size) |
4c0a0455 JJ |
1625 | && simple_cst_equal (unpadded_size, TYPE_SIZE (rli->t)) == 0 |
1626 | && input_location != BUILTINS_LOCATION) | |
3176a0c2 | 1627 | warning (OPT_Wpadded, "padding struct size to alignment boundary"); |
786de7eb | 1628 | |
770ae6cc RK |
1629 | if (warn_packed && TREE_CODE (rli->t) == RECORD_TYPE |
1630 | && TYPE_PACKED (rli->t) && ! rli->packed_maybe_necessary | |
1631 | && TREE_CONSTANT (unpadded_size)) | |
3c12fcc2 GM |
1632 | { |
1633 | tree unpacked_size; | |
729a2125 | 1634 | |
3c12fcc2 | 1635 | #ifdef ROUND_TYPE_ALIGN |
9328904c MM |
1636 | rli->unpacked_align |
1637 | = ROUND_TYPE_ALIGN (rli->t, TYPE_ALIGN (rli->t), rli->unpacked_align); | |
3c12fcc2 | 1638 | #else |
9328904c | 1639 | rli->unpacked_align = MAX (TYPE_ALIGN (rli->t), rli->unpacked_align); |
3c12fcc2 | 1640 | #endif |
770ae6cc | 1641 | |
985c87c9 | 1642 | unpacked_size = round_up (TYPE_SIZE (rli->t), rli->unpacked_align); |
9328904c | 1643 | if (simple_cst_equal (unpacked_size, TYPE_SIZE (rli->t))) |
3c12fcc2 | 1644 | { |
9328904c | 1645 | if (TYPE_NAME (rli->t)) |
3c12fcc2 | 1646 | { |
4f1e4960 | 1647 | tree name; |
729a2125 | 1648 | |
9328904c | 1649 | if (TREE_CODE (TYPE_NAME (rli->t)) == IDENTIFIER_NODE) |
4f1e4960 | 1650 | name = TYPE_NAME (rli->t); |
3c12fcc2 | 1651 | else |
4f1e4960 | 1652 | name = DECL_NAME (TYPE_NAME (rli->t)); |
770ae6cc | 1653 | |
3c12fcc2 | 1654 | if (STRICT_ALIGNMENT) |
3176a0c2 | 1655 | warning (OPT_Wpacked, "packed attribute causes inefficient " |
4f1e4960 | 1656 | "alignment for %qE", name); |
3c12fcc2 | 1657 | else |
3176a0c2 | 1658 | warning (OPT_Wpacked, |
4f1e4960 | 1659 | "packed attribute is unnecessary for %qE", name); |
3c12fcc2 GM |
1660 | } |
1661 | else | |
1662 | { | |
1663 | if (STRICT_ALIGNMENT) | |
3176a0c2 | 1664 | warning (OPT_Wpacked, |
5c498b10 | 1665 | "packed attribute causes inefficient alignment"); |
3c12fcc2 | 1666 | else |
3176a0c2 | 1667 | warning (OPT_Wpacked, "packed attribute is unnecessary"); |
3c12fcc2 GM |
1668 | } |
1669 | } | |
3c12fcc2 | 1670 | } |
9328904c MM |
1671 | } |
1672 | ||
1673 | /* Compute the TYPE_MODE for the TYPE (which is a RECORD_TYPE). */ | |
7306ed3f | 1674 | |
65e14bf5 | 1675 | void |
46c5ad27 | 1676 | compute_record_mode (tree type) |
9328904c | 1677 | { |
770ae6cc | 1678 | tree field; |
ef4bddc2 | 1679 | machine_mode mode = VOIDmode; |
770ae6cc | 1680 | |
9328904c MM |
1681 | /* Most RECORD_TYPEs have BLKmode, so we start off assuming that. |
1682 | However, if possible, we use a mode that fits in a register | |
1683 | instead, in order to allow for better optimization down the | |
1684 | line. */ | |
179d2f74 | 1685 | SET_TYPE_MODE (type, BLKmode); |
9328904c | 1686 | |
cc269bb6 | 1687 | if (! tree_fits_uhwi_p (TYPE_SIZE (type))) |
770ae6cc | 1688 | return; |
9328904c | 1689 | |
770ae6cc RK |
1690 | /* A record which has any BLKmode members must itself be |
1691 | BLKmode; it can't go in a register. Unless the member is | |
1692 | BLKmode only because it isn't aligned. */ | |
910ad8de | 1693 | for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field)) |
770ae6cc | 1694 | { |
770ae6cc RK |
1695 | if (TREE_CODE (field) != FIELD_DECL) |
1696 | continue; | |
9328904c | 1697 | |
770ae6cc RK |
1698 | if (TREE_CODE (TREE_TYPE (field)) == ERROR_MARK |
1699 | || (TYPE_MODE (TREE_TYPE (field)) == BLKmode | |
7a06d606 RK |
1700 | && ! TYPE_NO_FORCE_BLK (TREE_TYPE (field)) |
1701 | && !(TYPE_SIZE (TREE_TYPE (field)) != 0 | |
1702 | && integer_zerop (TYPE_SIZE (TREE_TYPE (field))))) | |
cc269bb6 | 1703 | || ! tree_fits_uhwi_p (bit_position (field)) |
6a9f6727 | 1704 | || DECL_SIZE (field) == 0 |
cc269bb6 | 1705 | || ! tree_fits_uhwi_p (DECL_SIZE (field))) |
770ae6cc RK |
1706 | return; |
1707 | ||
770ae6cc RK |
1708 | /* If this field is the whole struct, remember its mode so |
1709 | that, say, we can put a double in a class into a DF | |
a8ca7756 JW |
1710 | register instead of forcing it to live in the stack. */ |
1711 | if (simple_cst_equal (TYPE_SIZE (type), DECL_SIZE (field))) | |
770ae6cc | 1712 | mode = DECL_MODE (field); |
9328904c | 1713 | |
d9886a9e L |
1714 | /* With some targets, it is sub-optimal to access an aligned |
1715 | BLKmode structure as a scalar. */ | |
1716 | if (targetm.member_type_forces_blk (field, mode)) | |
770ae6cc | 1717 | return; |
770ae6cc | 1718 | } |
9328904c | 1719 | |
897f610b RS |
1720 | /* If we only have one real field; use its mode if that mode's size |
1721 | matches the type's size. This only applies to RECORD_TYPE. This | |
1722 | does not apply to unions. */ | |
1723 | if (TREE_CODE (type) == RECORD_TYPE && mode != VOIDmode | |
cc269bb6 | 1724 | && tree_fits_uhwi_p (TYPE_SIZE (type)) |
eb1ce453 | 1725 | && GET_MODE_BITSIZE (mode) == tree_to_uhwi (TYPE_SIZE (type))) |
179d2f74 | 1726 | SET_TYPE_MODE (type, mode); |
f439f9a5 | 1727 | else |
179d2f74 | 1728 | SET_TYPE_MODE (type, mode_for_size_tree (TYPE_SIZE (type), MODE_INT, 1)); |
770ae6cc RK |
1729 | |
1730 | /* If structure's known alignment is less than what the scalar | |
1731 | mode would need, and it matters, then stick with BLKmode. */ | |
1732 | if (TYPE_MODE (type) != BLKmode | |
1733 | && STRICT_ALIGNMENT | |
1734 | && ! (TYPE_ALIGN (type) >= BIGGEST_ALIGNMENT | |
1735 | || TYPE_ALIGN (type) >= GET_MODE_ALIGNMENT (TYPE_MODE (type)))) | |
1736 | { | |
1737 | /* If this is the only reason this type is BLKmode, then | |
1738 | don't force containing types to be BLKmode. */ | |
1739 | TYPE_NO_FORCE_BLK (type) = 1; | |
179d2f74 | 1740 | SET_TYPE_MODE (type, BLKmode); |
9328904c | 1741 | } |
7306ed3f | 1742 | } |
9328904c MM |
1743 | |
1744 | /* Compute TYPE_SIZE and TYPE_ALIGN for TYPE, once it has been laid | |
1745 | out. */ | |
1746 | ||
1747 | static void | |
46c5ad27 | 1748 | finalize_type_size (tree type) |
9328904c MM |
1749 | { |
1750 | /* Normally, use the alignment corresponding to the mode chosen. | |
1751 | However, where strict alignment is not required, avoid | |
1752 | over-aligning structures, since most compilers do not do this | |
490272b4 | 1753 | alignment. */ |
9328904c MM |
1754 | |
1755 | if (TYPE_MODE (type) != BLKmode && TYPE_MODE (type) != VOIDmode | |
490272b4 | 1756 | && (STRICT_ALIGNMENT |
9328904c MM |
1757 | || (TREE_CODE (type) != RECORD_TYPE && TREE_CODE (type) != UNION_TYPE |
1758 | && TREE_CODE (type) != QUAL_UNION_TYPE | |
1759 | && TREE_CODE (type) != ARRAY_TYPE))) | |
11cf4d18 | 1760 | { |
490272b4 RH |
1761 | unsigned mode_align = GET_MODE_ALIGNMENT (TYPE_MODE (type)); |
1762 | ||
1763 | /* Don't override a larger alignment requirement coming from a user | |
1764 | alignment of one of the fields. */ | |
1765 | if (mode_align >= TYPE_ALIGN (type)) | |
1766 | { | |
1767 | TYPE_ALIGN (type) = mode_align; | |
1768 | TYPE_USER_ALIGN (type) = 0; | |
1769 | } | |
11cf4d18 | 1770 | } |
9328904c MM |
1771 | |
1772 | /* Do machine-dependent extra alignment. */ | |
1773 | #ifdef ROUND_TYPE_ALIGN | |
1774 | TYPE_ALIGN (type) | |
1775 | = ROUND_TYPE_ALIGN (type, TYPE_ALIGN (type), BITS_PER_UNIT); | |
1776 | #endif | |
1777 | ||
9328904c | 1778 | /* If we failed to find a simple way to calculate the unit size |
770ae6cc | 1779 | of the type, find it by division. */ |
9328904c MM |
1780 | if (TYPE_SIZE_UNIT (type) == 0 && TYPE_SIZE (type) != 0) |
1781 | /* TYPE_SIZE (type) is computed in bitsizetype. After the division, the | |
1782 | result will fit in sizetype. We will get more efficient code using | |
1783 | sizetype, so we force a conversion. */ | |
1784 | TYPE_SIZE_UNIT (type) | |
455f19cb MM |
1785 | = fold_convert (sizetype, |
1786 | size_binop (FLOOR_DIV_EXPR, TYPE_SIZE (type), | |
1787 | bitsize_unit_node)); | |
9328904c | 1788 | |
770ae6cc RK |
1789 | if (TYPE_SIZE (type) != 0) |
1790 | { | |
985c87c9 EB |
1791 | TYPE_SIZE (type) = round_up (TYPE_SIZE (type), TYPE_ALIGN (type)); |
1792 | TYPE_SIZE_UNIT (type) | |
1793 | = round_up (TYPE_SIZE_UNIT (type), TYPE_ALIGN_UNIT (type)); | |
770ae6cc RK |
1794 | } |
1795 | ||
1796 | /* Evaluate nonconstant sizes only once, either now or as soon as safe. */ | |
1797 | if (TYPE_SIZE (type) != 0 && TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST) | |
1798 | TYPE_SIZE (type) = variable_size (TYPE_SIZE (type)); | |
9328904c MM |
1799 | if (TYPE_SIZE_UNIT (type) != 0 |
1800 | && TREE_CODE (TYPE_SIZE_UNIT (type)) != INTEGER_CST) | |
1801 | TYPE_SIZE_UNIT (type) = variable_size (TYPE_SIZE_UNIT (type)); | |
1802 | ||
1803 | /* Also layout any other variants of the type. */ | |
1804 | if (TYPE_NEXT_VARIANT (type) | |
1805 | || type != TYPE_MAIN_VARIANT (type)) | |
1806 | { | |
1807 | tree variant; | |
1808 | /* Record layout info of this variant. */ | |
1809 | tree size = TYPE_SIZE (type); | |
1810 | tree size_unit = TYPE_SIZE_UNIT (type); | |
1811 | unsigned int align = TYPE_ALIGN (type); | |
50b6ee8b | 1812 | unsigned int precision = TYPE_PRECISION (type); |
11cf4d18 | 1813 | unsigned int user_align = TYPE_USER_ALIGN (type); |
ef4bddc2 | 1814 | machine_mode mode = TYPE_MODE (type); |
9328904c MM |
1815 | |
1816 | /* Copy it into all variants. */ | |
1817 | for (variant = TYPE_MAIN_VARIANT (type); | |
1818 | variant != 0; | |
1819 | variant = TYPE_NEXT_VARIANT (variant)) | |
1820 | { | |
1821 | TYPE_SIZE (variant) = size; | |
1822 | TYPE_SIZE_UNIT (variant) = size_unit; | |
1823 | TYPE_ALIGN (variant) = align; | |
50b6ee8b | 1824 | TYPE_PRECISION (variant) = precision; |
11cf4d18 | 1825 | TYPE_USER_ALIGN (variant) = user_align; |
179d2f74 | 1826 | SET_TYPE_MODE (variant, mode); |
9328904c MM |
1827 | } |
1828 | } | |
1829 | } | |
1830 | ||
26c71b93 RG |
1831 | /* Return a new underlying object for a bitfield started with FIELD. */ |
1832 | ||
1833 | static tree | |
1834 | start_bitfield_representative (tree field) | |
1835 | { | |
1836 | tree repr = make_node (FIELD_DECL); | |
1837 | DECL_FIELD_OFFSET (repr) = DECL_FIELD_OFFSET (field); | |
1838 | /* Force the representative to begin at a BITS_PER_UNIT aligned | |
1839 | boundary - C++ may use tail-padding of a base object to | |
1840 | continue packing bits so the bitfield region does not start | |
1841 | at bit zero (see g++.dg/abi/bitfield5.C for example). | |
1842 | Unallocated bits may happen for other reasons as well, | |
1843 | for example Ada which allows explicit bit-granular structure layout. */ | |
1844 | DECL_FIELD_BIT_OFFSET (repr) | |
1845 | = size_binop (BIT_AND_EXPR, | |
1846 | DECL_FIELD_BIT_OFFSET (field), | |
1847 | bitsize_int (~(BITS_PER_UNIT - 1))); | |
1848 | SET_DECL_OFFSET_ALIGN (repr, DECL_OFFSET_ALIGN (field)); | |
1849 | DECL_SIZE (repr) = DECL_SIZE (field); | |
1850 | DECL_SIZE_UNIT (repr) = DECL_SIZE_UNIT (field); | |
1851 | DECL_PACKED (repr) = DECL_PACKED (field); | |
1852 | DECL_CONTEXT (repr) = DECL_CONTEXT (field); | |
1853 | return repr; | |
1854 | } | |
1855 | ||
1856 | /* Finish up a bitfield group that was started by creating the underlying | |
1857 | object REPR with the last field in the bitfield group FIELD. */ | |
1858 | ||
1859 | static void | |
1860 | finish_bitfield_representative (tree repr, tree field) | |
1861 | { | |
1862 | unsigned HOST_WIDE_INT bitsize, maxbitsize; | |
ef4bddc2 | 1863 | machine_mode mode; |
26c71b93 RG |
1864 | tree nextf, size; |
1865 | ||
1866 | size = size_diffop (DECL_FIELD_OFFSET (field), | |
1867 | DECL_FIELD_OFFSET (repr)); | |
4d885a63 RB |
1868 | while (TREE_CODE (size) == COMPOUND_EXPR) |
1869 | size = TREE_OPERAND (size, 1); | |
cc269bb6 | 1870 | gcc_assert (tree_fits_uhwi_p (size)); |
ae7e9ddd RS |
1871 | bitsize = (tree_to_uhwi (size) * BITS_PER_UNIT |
1872 | + tree_to_uhwi (DECL_FIELD_BIT_OFFSET (field)) | |
1873 | - tree_to_uhwi (DECL_FIELD_BIT_OFFSET (repr)) | |
1874 | + tree_to_uhwi (DECL_SIZE (field))); | |
26c71b93 | 1875 | |
2447776c RG |
1876 | /* Round up bitsize to multiples of BITS_PER_UNIT. */ |
1877 | bitsize = (bitsize + BITS_PER_UNIT - 1) & ~(BITS_PER_UNIT - 1); | |
1878 | ||
26c71b93 RG |
1879 | /* Now nothing tells us how to pad out bitsize ... */ |
1880 | nextf = DECL_CHAIN (field); | |
1881 | while (nextf && TREE_CODE (nextf) != FIELD_DECL) | |
1882 | nextf = DECL_CHAIN (nextf); | |
1883 | if (nextf) | |
1884 | { | |
1885 | tree maxsize; | |
073a8998 | 1886 | /* If there was an error, the field may be not laid out |
26c71b93 RG |
1887 | correctly. Don't bother to do anything. */ |
1888 | if (TREE_TYPE (nextf) == error_mark_node) | |
1889 | return; | |
1890 | maxsize = size_diffop (DECL_FIELD_OFFSET (nextf), | |
1891 | DECL_FIELD_OFFSET (repr)); | |
cc269bb6 | 1892 | if (tree_fits_uhwi_p (maxsize)) |
7ebf9677 | 1893 | { |
ae7e9ddd RS |
1894 | maxbitsize = (tree_to_uhwi (maxsize) * BITS_PER_UNIT |
1895 | + tree_to_uhwi (DECL_FIELD_BIT_OFFSET (nextf)) | |
1896 | - tree_to_uhwi (DECL_FIELD_BIT_OFFSET (repr))); | |
7ebf9677 RG |
1897 | /* If the group ends within a bitfield nextf does not need to be |
1898 | aligned to BITS_PER_UNIT. Thus round up. */ | |
1899 | maxbitsize = (maxbitsize + BITS_PER_UNIT - 1) & ~(BITS_PER_UNIT - 1); | |
1900 | } | |
1901 | else | |
1902 | maxbitsize = bitsize; | |
26c71b93 RG |
1903 | } |
1904 | else | |
1905 | { | |
1906 | /* ??? If you consider that tail-padding of this struct might be | |
1907 | re-used when deriving from it we cannot really do the following | |
2447776c RG |
1908 | and thus need to set maxsize to bitsize? Also we cannot |
1909 | generally rely on maxsize to fold to an integer constant, so | |
1910 | use bitsize as fallback for this case. */ | |
26c71b93 RG |
1911 | tree maxsize = size_diffop (TYPE_SIZE_UNIT (DECL_CONTEXT (field)), |
1912 | DECL_FIELD_OFFSET (repr)); | |
cc269bb6 | 1913 | if (tree_fits_uhwi_p (maxsize)) |
ae7e9ddd RS |
1914 | maxbitsize = (tree_to_uhwi (maxsize) * BITS_PER_UNIT |
1915 | - tree_to_uhwi (DECL_FIELD_BIT_OFFSET (repr))); | |
2447776c RG |
1916 | else |
1917 | maxbitsize = bitsize; | |
26c71b93 RG |
1918 | } |
1919 | ||
1920 | /* Only if we don't artificially break up the representative in | |
1921 | the middle of a large bitfield with different possibly | |
1922 | overlapping representatives. And all representatives start | |
1923 | at byte offset. */ | |
1924 | gcc_assert (maxbitsize % BITS_PER_UNIT == 0); | |
1925 | ||
26c71b93 RG |
1926 | /* Find the smallest nice mode to use. */ |
1927 | for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode; | |
1928 | mode = GET_MODE_WIDER_MODE (mode)) | |
1929 | if (GET_MODE_BITSIZE (mode) >= bitsize) | |
1930 | break; | |
1931 | if (mode != VOIDmode | |
1932 | && (GET_MODE_BITSIZE (mode) > maxbitsize | |
1933 | || GET_MODE_BITSIZE (mode) > MAX_FIXED_MODE_SIZE)) | |
1934 | mode = VOIDmode; | |
1935 | ||
1936 | if (mode == VOIDmode) | |
1937 | { | |
1938 | /* We really want a BLKmode representative only as a last resort, | |
1939 | considering the member b in | |
1940 | struct { int a : 7; int b : 17; int c; } __attribute__((packed)); | |
1941 | Otherwise we simply want to split the representative up | |
1942 | allowing for overlaps within the bitfield region as required for | |
1943 | struct { int a : 7; int b : 7; | |
1944 | int c : 10; int d; } __attribute__((packed)); | |
1945 | [0, 15] HImode for a and b, [8, 23] HImode for c. */ | |
1946 | DECL_SIZE (repr) = bitsize_int (bitsize); | |
1947 | DECL_SIZE_UNIT (repr) = size_int (bitsize / BITS_PER_UNIT); | |
1948 | DECL_MODE (repr) = BLKmode; | |
1949 | TREE_TYPE (repr) = build_array_type_nelts (unsigned_char_type_node, | |
1950 | bitsize / BITS_PER_UNIT); | |
1951 | } | |
1952 | else | |
1953 | { | |
1954 | unsigned HOST_WIDE_INT modesize = GET_MODE_BITSIZE (mode); | |
1955 | DECL_SIZE (repr) = bitsize_int (modesize); | |
1956 | DECL_SIZE_UNIT (repr) = size_int (modesize / BITS_PER_UNIT); | |
1957 | DECL_MODE (repr) = mode; | |
1958 | TREE_TYPE (repr) = lang_hooks.types.type_for_mode (mode, 1); | |
1959 | } | |
1960 | ||
1961 | /* Remember whether the bitfield group is at the end of the | |
1962 | structure or not. */ | |
1963 | DECL_CHAIN (repr) = nextf; | |
1964 | } | |
1965 | ||
1966 | /* Compute and set FIELD_DECLs for the underlying objects we should | |
ddc8de03 | 1967 | use for bitfield access for the structure T. */ |
26c71b93 | 1968 | |
ddc8de03 PM |
1969 | void |
1970 | finish_bitfield_layout (tree t) | |
26c71b93 RG |
1971 | { |
1972 | tree field, prev; | |
1973 | tree repr = NULL_TREE; | |
1974 | ||
1975 | /* Unions would be special, for the ease of type-punning optimizations | |
1976 | we could use the underlying type as hint for the representative | |
1977 | if the bitfield would fit and the representative would not exceed | |
1978 | the union in size. */ | |
ddc8de03 | 1979 | if (TREE_CODE (t) != RECORD_TYPE) |
26c71b93 RG |
1980 | return; |
1981 | ||
ddc8de03 | 1982 | for (prev = NULL_TREE, field = TYPE_FIELDS (t); |
26c71b93 RG |
1983 | field; field = DECL_CHAIN (field)) |
1984 | { | |
1985 | if (TREE_CODE (field) != FIELD_DECL) | |
1986 | continue; | |
1987 | ||
1988 | /* In the C++ memory model, consecutive bit fields in a structure are | |
1989 | considered one memory location and updating a memory location | |
1990 | may not store into adjacent memory locations. */ | |
1991 | if (!repr | |
1992 | && DECL_BIT_FIELD_TYPE (field)) | |
1993 | { | |
1994 | /* Start new representative. */ | |
1995 | repr = start_bitfield_representative (field); | |
1996 | } | |
1997 | else if (repr | |
1998 | && ! DECL_BIT_FIELD_TYPE (field)) | |
1999 | { | |
2000 | /* Finish off new representative. */ | |
2001 | finish_bitfield_representative (repr, prev); | |
2002 | repr = NULL_TREE; | |
2003 | } | |
2004 | else if (DECL_BIT_FIELD_TYPE (field)) | |
2005 | { | |
7ebf9677 RG |
2006 | gcc_assert (repr != NULL_TREE); |
2007 | ||
26c71b93 RG |
2008 | /* Zero-size bitfields finish off a representative and |
2009 | do not have a representative themselves. This is | |
2010 | required by the C++ memory model. */ | |
2011 | if (integer_zerop (DECL_SIZE (field))) | |
2012 | { | |
2013 | finish_bitfield_representative (repr, prev); | |
2014 | repr = NULL_TREE; | |
2015 | } | |
7ebf9677 RG |
2016 | |
2017 | /* We assume that either DECL_FIELD_OFFSET of the representative | |
2018 | and each bitfield member is a constant or they are equal. | |
2019 | This is because we need to be able to compute the bit-offset | |
2020 | of each field relative to the representative in get_bit_range | |
2021 | during RTL expansion. | |
2022 | If these constraints are not met, simply force a new | |
2023 | representative to be generated. That will at most | |
2024 | generate worse code but still maintain correctness with | |
2025 | respect to the C++ memory model. */ | |
cc269bb6 RS |
2026 | else if (!((tree_fits_uhwi_p (DECL_FIELD_OFFSET (repr)) |
2027 | && tree_fits_uhwi_p (DECL_FIELD_OFFSET (field))) | |
7ebf9677 RG |
2028 | || operand_equal_p (DECL_FIELD_OFFSET (repr), |
2029 | DECL_FIELD_OFFSET (field), 0))) | |
2030 | { | |
2031 | finish_bitfield_representative (repr, prev); | |
2032 | repr = start_bitfield_representative (field); | |
2033 | } | |
26c71b93 RG |
2034 | } |
2035 | else | |
2036 | continue; | |
2037 | ||
2038 | if (repr) | |
2039 | DECL_BIT_FIELD_REPRESENTATIVE (field) = repr; | |
2040 | ||
2041 | prev = field; | |
2042 | } | |
2043 | ||
2044 | if (repr) | |
2045 | finish_bitfield_representative (repr, prev); | |
2046 | } | |
2047 | ||
9328904c MM |
2048 | /* Do all of the work required to layout the type indicated by RLI, |
2049 | once the fields have been laid out. This function will call `free' | |
17bbb839 MM |
2050 | for RLI, unless FREE_P is false. Passing a value other than false |
2051 | for FREE_P is bad practice; this option only exists to support the | |
2052 | G++ 3.2 ABI. */ | |
9328904c MM |
2053 | |
2054 | void | |
46c5ad27 | 2055 | finish_record_layout (record_layout_info rli, int free_p) |
9328904c | 2056 | { |
1937f939 JM |
2057 | tree variant; |
2058 | ||
770ae6cc RK |
2059 | /* Compute the final size. */ |
2060 | finalize_record_size (rli); | |
2061 | ||
2062 | /* Compute the TYPE_MODE for the record. */ | |
2063 | compute_record_mode (rli->t); | |
cc9d4a85 | 2064 | |
8d8238b6 JM |
2065 | /* Perform any last tweaks to the TYPE_SIZE, etc. */ |
2066 | finalize_type_size (rli->t); | |
2067 | ||
26c71b93 | 2068 | /* Compute bitfield representatives. */ |
ddc8de03 | 2069 | finish_bitfield_layout (rli->t); |
26c71b93 | 2070 | |
1937f939 JM |
2071 | /* Propagate TYPE_PACKED to variants. With C++ templates, |
2072 | handle_packed_attribute is too early to do this. */ | |
2073 | for (variant = TYPE_NEXT_VARIANT (rli->t); variant; | |
2074 | variant = TYPE_NEXT_VARIANT (variant)) | |
2075 | TYPE_PACKED (variant) = TYPE_PACKED (rli->t); | |
2076 | ||
9328904c MM |
2077 | /* Lay out any static members. This is done now because their type |
2078 | may use the record's type. */ | |
9771b263 DN |
2079 | while (!vec_safe_is_empty (rli->pending_statics)) |
2080 | layout_decl (rli->pending_statics->pop (), 0); | |
cc9d4a85 | 2081 | |
9328904c | 2082 | /* Clean up. */ |
17bbb839 | 2083 | if (free_p) |
76d971cc | 2084 | { |
9771b263 | 2085 | vec_free (rli->pending_statics); |
76d971cc NF |
2086 | free (rli); |
2087 | } | |
9328904c | 2088 | } |
7306ed3f | 2089 | \f |
4977bab6 ZW |
2090 | |
2091 | /* Finish processing a builtin RECORD_TYPE type TYPE. It's name is | |
2092 | NAME, its fields are chained in reverse on FIELDS. | |
2093 | ||
2094 | If ALIGN_TYPE is non-null, it is given the same alignment as | |
2095 | ALIGN_TYPE. */ | |
2096 | ||
2097 | void | |
46c5ad27 AJ |
2098 | finish_builtin_struct (tree type, const char *name, tree fields, |
2099 | tree align_type) | |
4977bab6 | 2100 | { |
1469344a | 2101 | tree tail, next; |
4977bab6 ZW |
2102 | |
2103 | for (tail = NULL_TREE; fields; tail = fields, fields = next) | |
2104 | { | |
2105 | DECL_FIELD_CONTEXT (fields) = type; | |
910ad8de NF |
2106 | next = DECL_CHAIN (fields); |
2107 | DECL_CHAIN (fields) = tail; | |
4977bab6 ZW |
2108 | } |
2109 | TYPE_FIELDS (type) = tail; | |
2110 | ||
2111 | if (align_type) | |
2112 | { | |
2113 | TYPE_ALIGN (type) = TYPE_ALIGN (align_type); | |
2114 | TYPE_USER_ALIGN (type) = TYPE_USER_ALIGN (align_type); | |
2115 | } | |
2116 | ||
2117 | layout_type (type); | |
2118 | #if 0 /* not yet, should get fixed properly later */ | |
2119 | TYPE_NAME (type) = make_type_decl (get_identifier (name), type); | |
2120 | #else | |
c2255bc4 AH |
2121 | TYPE_NAME (type) = build_decl (BUILTINS_LOCATION, |
2122 | TYPE_DECL, get_identifier (name), type); | |
4977bab6 ZW |
2123 | #endif |
2124 | TYPE_STUB_DECL (type) = TYPE_NAME (type); | |
2125 | layout_decl (TYPE_NAME (type), 0); | |
2126 | } | |
2127 | ||
7306ed3f JW |
2128 | /* Calculate the mode, size, and alignment for TYPE. |
2129 | For an array type, calculate the element separation as well. | |
2130 | Record TYPE on the chain of permanent or temporary types | |
2131 | so that dbxout will find out about it. | |
2132 | ||
2133 | TYPE_SIZE of a type is nonzero if the type has been laid out already. | |
2134 | layout_type does nothing on such a type. | |
2135 | ||
2136 | If the type is incomplete, its TYPE_SIZE remains zero. */ | |
2137 | ||
2138 | void | |
46c5ad27 | 2139 | layout_type (tree type) |
7306ed3f | 2140 | { |
41374e13 | 2141 | gcc_assert (type); |
7306ed3f | 2142 | |
6de9cd9a DN |
2143 | if (type == error_mark_node) |
2144 | return; | |
2145 | ||
7306ed3f JW |
2146 | /* Do nothing if type has been laid out before. */ |
2147 | if (TYPE_SIZE (type)) | |
2148 | return; | |
2149 | ||
7306ed3f JW |
2150 | switch (TREE_CODE (type)) |
2151 | { | |
2152 | case LANG_TYPE: | |
2153 | /* This kind of type is the responsibility | |
9faa82d8 | 2154 | of the language-specific code. */ |
41374e13 | 2155 | gcc_unreachable (); |
7306ed3f | 2156 | |
c0e081a9 | 2157 | case BOOLEAN_TYPE: |
7306ed3f JW |
2158 | case INTEGER_TYPE: |
2159 | case ENUMERAL_TYPE: | |
179d2f74 RH |
2160 | SET_TYPE_MODE (type, |
2161 | smallest_mode_for_size (TYPE_PRECISION (type), MODE_INT)); | |
06ceef4e | 2162 | TYPE_SIZE (type) = bitsize_int (GET_MODE_BITSIZE (TYPE_MODE (type))); |
50b6ee8b | 2163 | /* Don't set TYPE_PRECISION here, as it may be set by a bitfield. */ |
ead17059 | 2164 | TYPE_SIZE_UNIT (type) = size_int (GET_MODE_SIZE (TYPE_MODE (type))); |
7306ed3f JW |
2165 | break; |
2166 | ||
2167 | case REAL_TYPE: | |
179d2f74 RH |
2168 | SET_TYPE_MODE (type, |
2169 | mode_for_size (TYPE_PRECISION (type), MODE_FLOAT, 0)); | |
06ceef4e | 2170 | TYPE_SIZE (type) = bitsize_int (GET_MODE_BITSIZE (TYPE_MODE (type))); |
ead17059 | 2171 | TYPE_SIZE_UNIT (type) = size_int (GET_MODE_SIZE (TYPE_MODE (type))); |
7306ed3f JW |
2172 | break; |
2173 | ||
325217ed CF |
2174 | case FIXED_POINT_TYPE: |
2175 | /* TYPE_MODE (type) has been set already. */ | |
2176 | TYPE_SIZE (type) = bitsize_int (GET_MODE_BITSIZE (TYPE_MODE (type))); | |
2177 | TYPE_SIZE_UNIT (type) = size_int (GET_MODE_SIZE (TYPE_MODE (type))); | |
2178 | break; | |
2179 | ||
7306ed3f | 2180 | case COMPLEX_TYPE: |
8df83eae | 2181 | TYPE_UNSIGNED (type) = TYPE_UNSIGNED (TREE_TYPE (type)); |
179d2f74 RH |
2182 | SET_TYPE_MODE (type, |
2183 | mode_for_size (2 * TYPE_PRECISION (TREE_TYPE (type)), | |
2184 | (TREE_CODE (TREE_TYPE (type)) == REAL_TYPE | |
2185 | ? MODE_COMPLEX_FLOAT : MODE_COMPLEX_INT), | |
2186 | 0)); | |
06ceef4e | 2187 | TYPE_SIZE (type) = bitsize_int (GET_MODE_BITSIZE (TYPE_MODE (type))); |
ead17059 | 2188 | TYPE_SIZE_UNIT (type) = size_int (GET_MODE_SIZE (TYPE_MODE (type))); |
7306ed3f JW |
2189 | break; |
2190 | ||
0b4565c9 | 2191 | case VECTOR_TYPE: |
26277d41 PB |
2192 | { |
2193 | int nunits = TYPE_VECTOR_SUBPARTS (type); | |
26277d41 PB |
2194 | tree innertype = TREE_TYPE (type); |
2195 | ||
41374e13 | 2196 | gcc_assert (!(nunits & (nunits - 1))); |
26277d41 PB |
2197 | |
2198 | /* Find an appropriate mode for the vector type. */ | |
2199 | if (TYPE_MODE (type) == VOIDmode) | |
bb67d9c7 RG |
2200 | SET_TYPE_MODE (type, |
2201 | mode_for_vector (TYPE_MODE (innertype), nunits)); | |
26277d41 | 2202 | |
325217ed | 2203 | TYPE_SATURATING (type) = TYPE_SATURATING (TREE_TYPE (type)); |
26277d41 PB |
2204 | TYPE_UNSIGNED (type) = TYPE_UNSIGNED (TREE_TYPE (type)); |
2205 | TYPE_SIZE_UNIT (type) = int_const_binop (MULT_EXPR, | |
2206 | TYPE_SIZE_UNIT (innertype), | |
d35936ab | 2207 | size_int (nunits)); |
26277d41 | 2208 | TYPE_SIZE (type) = int_const_binop (MULT_EXPR, TYPE_SIZE (innertype), |
d35936ab | 2209 | bitsize_int (nunits)); |
e4ca3dc3 | 2210 | |
5aea1e76 UW |
2211 | /* For vector types, we do not default to the mode's alignment. |
2212 | Instead, query a target hook, defaulting to natural alignment. | |
2213 | This prevents ABI changes depending on whether or not native | |
2214 | vector modes are supported. */ | |
2215 | TYPE_ALIGN (type) = targetm.vector_alignment (type); | |
2216 | ||
2217 | /* However, if the underlying mode requires a bigger alignment than | |
2218 | what the target hook provides, we cannot use the mode. For now, | |
2219 | simply reject that case. */ | |
2220 | gcc_assert (TYPE_ALIGN (type) | |
2221 | >= GET_MODE_ALIGNMENT (TYPE_MODE (type))); | |
26277d41 PB |
2222 | break; |
2223 | } | |
0b4565c9 | 2224 | |
7306ed3f | 2225 | case VOID_TYPE: |
770ae6cc | 2226 | /* This is an incomplete type and so doesn't have a size. */ |
7306ed3f | 2227 | TYPE_ALIGN (type) = 1; |
11cf4d18 | 2228 | TYPE_USER_ALIGN (type) = 0; |
179d2f74 | 2229 | SET_TYPE_MODE (type, VOIDmode); |
7306ed3f JW |
2230 | break; |
2231 | ||
d5e254e1 IE |
2232 | case POINTER_BOUNDS_TYPE: |
2233 | TYPE_SIZE (type) = bitsize_int (GET_MODE_BITSIZE (TYPE_MODE (type))); | |
2234 | TYPE_SIZE_UNIT (type) = size_int (GET_MODE_SIZE (TYPE_MODE (type))); | |
2235 | break; | |
2236 | ||
321cb743 | 2237 | case OFFSET_TYPE: |
06ceef4e | 2238 | TYPE_SIZE (type) = bitsize_int (POINTER_SIZE); |
50b6ee8b DD |
2239 | TYPE_SIZE_UNIT (type) = size_int (POINTER_SIZE_UNITS); |
2240 | /* A pointer might be MODE_PARTIAL_INT, but ptrdiff_t must be | |
2241 | integral, which may be an __intN. */ | |
179d2f74 | 2242 | SET_TYPE_MODE (type, mode_for_size (POINTER_SIZE, MODE_INT, 0)); |
d4ebfa65 | 2243 | TYPE_PRECISION (type) = POINTER_SIZE; |
321cb743 MT |
2244 | break; |
2245 | ||
7306ed3f JW |
2246 | case FUNCTION_TYPE: |
2247 | case METHOD_TYPE: | |
019dd4ec RK |
2248 | /* It's hard to see what the mode and size of a function ought to |
2249 | be, but we do know the alignment is FUNCTION_BOUNDARY, so | |
2250 | make it consistent with that. */ | |
179d2f74 | 2251 | SET_TYPE_MODE (type, mode_for_size (FUNCTION_BOUNDARY, MODE_INT, 0)); |
019dd4ec RK |
2252 | TYPE_SIZE (type) = bitsize_int (FUNCTION_BOUNDARY); |
2253 | TYPE_SIZE_UNIT (type) = size_int (FUNCTION_BOUNDARY / BITS_PER_UNIT); | |
7306ed3f JW |
2254 | break; |
2255 | ||
2256 | case POINTER_TYPE: | |
2257 | case REFERENCE_TYPE: | |
b5d6a2ff | 2258 | { |
ef4bddc2 | 2259 | machine_mode mode = TYPE_MODE (type); |
d4ebfa65 BE |
2260 | if (TREE_CODE (type) == REFERENCE_TYPE && reference_types_internal) |
2261 | { | |
2262 | addr_space_t as = TYPE_ADDR_SPACE (TREE_TYPE (type)); | |
2263 | mode = targetm.addr_space.address_mode (as); | |
2264 | } | |
4977bab6 | 2265 | |
d4ebfa65 | 2266 | TYPE_SIZE (type) = bitsize_int (GET_MODE_BITSIZE (mode)); |
4977bab6 | 2267 | TYPE_SIZE_UNIT (type) = size_int (GET_MODE_SIZE (mode)); |
8df83eae | 2268 | TYPE_UNSIGNED (type) = 1; |
50b6ee8b | 2269 | TYPE_PRECISION (type) = GET_MODE_PRECISION (mode); |
b5d6a2ff | 2270 | } |
7306ed3f JW |
2271 | break; |
2272 | ||
2273 | case ARRAY_TYPE: | |
2274 | { | |
b3694847 SS |
2275 | tree index = TYPE_DOMAIN (type); |
2276 | tree element = TREE_TYPE (type); | |
7306ed3f JW |
2277 | |
2278 | build_pointer_type (element); | |
2279 | ||
2280 | /* We need to know both bounds in order to compute the size. */ | |
2281 | if (index && TYPE_MAX_VALUE (index) && TYPE_MIN_VALUE (index) | |
2282 | && TYPE_SIZE (element)) | |
2283 | { | |
ad50bc8d RH |
2284 | tree ub = TYPE_MAX_VALUE (index); |
2285 | tree lb = TYPE_MIN_VALUE (index); | |
473ebbc5 | 2286 | tree element_size = TYPE_SIZE (element); |
e24ff973 RK |
2287 | tree length; |
2288 | ||
c2ce8cdc EB |
2289 | /* Make sure that an array of zero-sized element is zero-sized |
2290 | regardless of its extent. */ | |
2291 | if (integer_zerop (element_size)) | |
2292 | length = size_zero_node; | |
2293 | ||
830c740f RG |
2294 | /* The computation should happen in the original signedness so |
2295 | that (possible) negative values are handled appropriately | |
2296 | when determining overflow. */ | |
c2ce8cdc | 2297 | else |
56099f00 RG |
2298 | { |
2299 | /* ??? When it is obvious that the range is signed | |
2300 | represent it using ssizetype. */ | |
2301 | if (TREE_CODE (lb) == INTEGER_CST | |
2302 | && TREE_CODE (ub) == INTEGER_CST | |
2303 | && TYPE_UNSIGNED (TREE_TYPE (lb)) | |
2304 | && tree_int_cst_lt (ub, lb)) | |
2305 | { | |
807e902e KZ |
2306 | lb = wide_int_to_tree (ssizetype, |
2307 | offset_int::from (lb, SIGNED)); | |
2308 | ub = wide_int_to_tree (ssizetype, | |
2309 | offset_int::from (ub, SIGNED)); | |
56099f00 RG |
2310 | } |
2311 | length | |
2312 | = fold_convert (sizetype, | |
2313 | size_binop (PLUS_EXPR, | |
2314 | build_int_cst (TREE_TYPE (lb), 1), | |
2315 | size_binop (MINUS_EXPR, ub, lb))); | |
2316 | } | |
2317 | ||
ce3da0d0 EB |
2318 | /* ??? We have no way to distinguish a null-sized array from an |
2319 | array spanning the whole sizetype range, so we arbitrarily | |
2320 | decide that [0, -1] is the only valid representation. */ | |
56099f00 | 2321 | if (integer_zerop (length) |
ce3da0d0 EB |
2322 | && TREE_OVERFLOW (length) |
2323 | && integer_zerop (lb)) | |
56099f00 | 2324 | length = size_zero_node; |
7306ed3f | 2325 | |
fed3cef0 | 2326 | TYPE_SIZE (type) = size_binop (MULT_EXPR, element_size, |
0ac11108 | 2327 | fold_convert (bitsizetype, |
455f19cb | 2328 | length)); |
ead17059 | 2329 | |
473ebbc5 EB |
2330 | /* If we know the size of the element, calculate the total size |
2331 | directly, rather than do some division thing below. This | |
2332 | optimization helps Fortran assumed-size arrays (where the | |
2333 | size of the array is determined at runtime) substantially. */ | |
2334 | if (TYPE_SIZE_UNIT (element)) | |
d4b60170 RK |
2335 | TYPE_SIZE_UNIT (type) |
2336 | = size_binop (MULT_EXPR, TYPE_SIZE_UNIT (element), length); | |
7306ed3f JW |
2337 | } |
2338 | ||
2339 | /* Now round the alignment and size, | |
2340 | using machine-dependent criteria if any. */ | |
2341 | ||
2342 | #ifdef ROUND_TYPE_ALIGN | |
2343 | TYPE_ALIGN (type) | |
2344 | = ROUND_TYPE_ALIGN (type, TYPE_ALIGN (element), BITS_PER_UNIT); | |
2345 | #else | |
2346 | TYPE_ALIGN (type) = MAX (TYPE_ALIGN (element), BITS_PER_UNIT); | |
2347 | #endif | |
c163d21d | 2348 | TYPE_USER_ALIGN (type) = TYPE_USER_ALIGN (element); |
179d2f74 | 2349 | SET_TYPE_MODE (type, BLKmode); |
7306ed3f | 2350 | if (TYPE_SIZE (type) != 0 |
d9886a9e | 2351 | && ! targetm.member_type_forces_blk (type, VOIDmode) |
7306ed3f JW |
2352 | /* BLKmode elements force BLKmode aggregate; |
2353 | else extract/store fields may lose. */ | |
2354 | && (TYPE_MODE (TREE_TYPE (type)) != BLKmode | |
2355 | || TYPE_NO_FORCE_BLK (TREE_TYPE (type)))) | |
2356 | { | |
0f6d54f7 RS |
2357 | SET_TYPE_MODE (type, mode_for_array (TREE_TYPE (type), |
2358 | TYPE_SIZE (type))); | |
72c602fc RK |
2359 | if (TYPE_MODE (type) != BLKmode |
2360 | && STRICT_ALIGNMENT && TYPE_ALIGN (type) < BIGGEST_ALIGNMENT | |
b9d49351 | 2361 | && TYPE_ALIGN (type) < GET_MODE_ALIGNMENT (TYPE_MODE (type))) |
7306ed3f JW |
2362 | { |
2363 | TYPE_NO_FORCE_BLK (type) = 1; | |
179d2f74 | 2364 | SET_TYPE_MODE (type, BLKmode); |
7306ed3f | 2365 | } |
7306ed3f | 2366 | } |
b606b65c OH |
2367 | /* When the element size is constant, check that it is at least as |
2368 | large as the element alignment. */ | |
002a9071 SE |
2369 | if (TYPE_SIZE_UNIT (element) |
2370 | && TREE_CODE (TYPE_SIZE_UNIT (element)) == INTEGER_CST | |
b606b65c OH |
2371 | /* If TYPE_SIZE_UNIT overflowed, then it is certainly larger than |
2372 | TYPE_ALIGN_UNIT. */ | |
455f14dd | 2373 | && !TREE_OVERFLOW (TYPE_SIZE_UNIT (element)) |
002a9071 SE |
2374 | && !integer_zerop (TYPE_SIZE_UNIT (element)) |
2375 | && compare_tree_int (TYPE_SIZE_UNIT (element), | |
2376 | TYPE_ALIGN_UNIT (element)) < 0) | |
2377 | error ("alignment of array elements is greater than element size"); | |
7306ed3f JW |
2378 | break; |
2379 | } | |
2380 | ||
2381 | case RECORD_TYPE: | |
cc9d4a85 MM |
2382 | case UNION_TYPE: |
2383 | case QUAL_UNION_TYPE: | |
9328904c MM |
2384 | { |
2385 | tree field; | |
2386 | record_layout_info rli; | |
2387 | ||
2388 | /* Initialize the layout information. */ | |
770ae6cc RK |
2389 | rli = start_record_layout (type); |
2390 | ||
cc9d4a85 MM |
2391 | /* If this is a QUAL_UNION_TYPE, we want to process the fields |
2392 | in the reverse order in building the COND_EXPR that denotes | |
2393 | its size. We reverse them again later. */ | |
2394 | if (TREE_CODE (type) == QUAL_UNION_TYPE) | |
2395 | TYPE_FIELDS (type) = nreverse (TYPE_FIELDS (type)); | |
770ae6cc RK |
2396 | |
2397 | /* Place all the fields. */ | |
910ad8de | 2398 | for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field)) |
770ae6cc RK |
2399 | place_field (rli, field); |
2400 | ||
cc9d4a85 MM |
2401 | if (TREE_CODE (type) == QUAL_UNION_TYPE) |
2402 | TYPE_FIELDS (type) = nreverse (TYPE_FIELDS (type)); | |
770ae6cc | 2403 | |
9328904c | 2404 | /* Finish laying out the record. */ |
17bbb839 | 2405 | finish_record_layout (rli, /*free_p=*/true); |
9328904c | 2406 | } |
7306ed3f JW |
2407 | break; |
2408 | ||
7306ed3f | 2409 | default: |
41374e13 | 2410 | gcc_unreachable (); |
729a2125 | 2411 | } |
7306ed3f | 2412 | |
9328904c | 2413 | /* Compute the final TYPE_SIZE, TYPE_ALIGN, etc. for TYPE. For |
cc9d4a85 MM |
2414 | records and unions, finish_record_layout already called this |
2415 | function. */ | |
786de7eb | 2416 | if (TREE_CODE (type) != RECORD_TYPE |
cc9d4a85 MM |
2417 | && TREE_CODE (type) != UNION_TYPE |
2418 | && TREE_CODE (type) != QUAL_UNION_TYPE) | |
9328904c | 2419 | finalize_type_size (type); |
7306ed3f | 2420 | |
36784d0e RG |
2421 | /* We should never see alias sets on incomplete aggregates. And we |
2422 | should not call layout_type on not incomplete aggregates. */ | |
2423 | if (AGGREGATE_TYPE_P (type)) | |
2424 | gcc_assert (!TYPE_ALIAS_SET_KNOWN_P (type)); | |
7306ed3f | 2425 | } |
179d2f74 | 2426 | |
944fa280 JJ |
2427 | /* Return the least alignment required for type TYPE. */ |
2428 | ||
2429 | unsigned int | |
2430 | min_align_of_type (tree type) | |
2431 | { | |
2432 | unsigned int align = TYPE_ALIGN (type); | |
2433 | align = MIN (align, BIGGEST_ALIGNMENT); | |
1740f8a1 JJ |
2434 | if (!TYPE_USER_ALIGN (type)) |
2435 | { | |
944fa280 | 2436 | #ifdef BIGGEST_FIELD_ALIGNMENT |
1740f8a1 | 2437 | align = MIN (align, BIGGEST_FIELD_ALIGNMENT); |
944fa280 | 2438 | #endif |
1740f8a1 | 2439 | unsigned int field_align = align; |
944fa280 | 2440 | #ifdef ADJUST_FIELD_ALIGN |
1740f8a1 JJ |
2441 | tree field = build_decl (UNKNOWN_LOCATION, FIELD_DECL, NULL_TREE, type); |
2442 | field_align = ADJUST_FIELD_ALIGN (field, field_align); | |
2443 | ggc_free (field); | |
944fa280 | 2444 | #endif |
1740f8a1 JJ |
2445 | align = MIN (align, field_align); |
2446 | } | |
944fa280 JJ |
2447 | return align / BITS_PER_UNIT; |
2448 | } | |
2449 | ||
179d2f74 RH |
2450 | /* Vector types need to re-check the target flags each time we report |
2451 | the machine mode. We need to do this because attribute target can | |
2452 | change the result of vector_mode_supported_p and have_regs_of_mode | |
2453 | on a per-function basis. Thus the TYPE_MODE of a VECTOR_TYPE can | |
2454 | change on a per-function basis. */ | |
b8698a0f | 2455 | /* ??? Possibly a better solution is to run through all the types |
179d2f74 RH |
2456 | referenced by a function and re-compute the TYPE_MODE once, rather |
2457 | than make the TYPE_MODE macro call a function. */ | |
2458 | ||
ef4bddc2 | 2459 | machine_mode |
179d2f74 RH |
2460 | vector_type_mode (const_tree t) |
2461 | { | |
ef4bddc2 | 2462 | machine_mode mode; |
179d2f74 RH |
2463 | |
2464 | gcc_assert (TREE_CODE (t) == VECTOR_TYPE); | |
2465 | ||
51545682 | 2466 | mode = t->type_common.mode; |
179d2f74 RH |
2467 | if (VECTOR_MODE_P (mode) |
2468 | && (!targetm.vector_mode_supported_p (mode) | |
2469 | || !have_regs_of_mode[mode])) | |
2470 | { | |
ef4bddc2 | 2471 | machine_mode innermode = TREE_TYPE (t)->type_common.mode; |
179d2f74 RH |
2472 | |
2473 | /* For integers, try mapping it to a same-sized scalar mode. */ | |
2474 | if (GET_MODE_CLASS (innermode) == MODE_INT) | |
2475 | { | |
2476 | mode = mode_for_size (TYPE_VECTOR_SUBPARTS (t) | |
2477 | * GET_MODE_BITSIZE (innermode), MODE_INT, 0); | |
2478 | ||
2479 | if (mode != VOIDmode && have_regs_of_mode[mode]) | |
2480 | return mode; | |
2481 | } | |
2482 | ||
2483 | return BLKmode; | |
2484 | } | |
2485 | ||
2486 | return mode; | |
2487 | } | |
7306ed3f JW |
2488 | \f |
2489 | /* Create and return a type for signed integers of PRECISION bits. */ | |
2490 | ||
2491 | tree | |
46c5ad27 | 2492 | make_signed_type (int precision) |
7306ed3f | 2493 | { |
b3694847 | 2494 | tree type = make_node (INTEGER_TYPE); |
7306ed3f JW |
2495 | |
2496 | TYPE_PRECISION (type) = precision; | |
2497 | ||
fed3cef0 | 2498 | fixup_signed_type (type); |
7306ed3f JW |
2499 | return type; |
2500 | } | |
2501 | ||
2502 | /* Create and return a type for unsigned integers of PRECISION bits. */ | |
2503 | ||
2504 | tree | |
46c5ad27 | 2505 | make_unsigned_type (int precision) |
7306ed3f | 2506 | { |
b3694847 | 2507 | tree type = make_node (INTEGER_TYPE); |
7306ed3f JW |
2508 | |
2509 | TYPE_PRECISION (type) = precision; | |
2510 | ||
7306ed3f JW |
2511 | fixup_unsigned_type (type); |
2512 | return type; | |
2513 | } | |
fed3cef0 | 2514 | \f |
325217ed CF |
2515 | /* Create and return a type for fract of PRECISION bits, UNSIGNEDP, |
2516 | and SATP. */ | |
2517 | ||
2518 | tree | |
2519 | make_fract_type (int precision, int unsignedp, int satp) | |
2520 | { | |
2521 | tree type = make_node (FIXED_POINT_TYPE); | |
2522 | ||
2523 | TYPE_PRECISION (type) = precision; | |
2524 | ||
2525 | if (satp) | |
2526 | TYPE_SATURATING (type) = 1; | |
2527 | ||
2528 | /* Lay out the type: set its alignment, size, etc. */ | |
2529 | if (unsignedp) | |
2530 | { | |
2531 | TYPE_UNSIGNED (type) = 1; | |
179d2f74 | 2532 | SET_TYPE_MODE (type, mode_for_size (precision, MODE_UFRACT, 0)); |
325217ed CF |
2533 | } |
2534 | else | |
179d2f74 | 2535 | SET_TYPE_MODE (type, mode_for_size (precision, MODE_FRACT, 0)); |
325217ed CF |
2536 | layout_type (type); |
2537 | ||
2538 | return type; | |
2539 | } | |
2540 | ||
2541 | /* Create and return a type for accum of PRECISION bits, UNSIGNEDP, | |
2542 | and SATP. */ | |
2543 | ||
2544 | tree | |
2545 | make_accum_type (int precision, int unsignedp, int satp) | |
2546 | { | |
2547 | tree type = make_node (FIXED_POINT_TYPE); | |
2548 | ||
2549 | TYPE_PRECISION (type) = precision; | |
2550 | ||
2551 | if (satp) | |
2552 | TYPE_SATURATING (type) = 1; | |
2553 | ||
2554 | /* Lay out the type: set its alignment, size, etc. */ | |
2555 | if (unsignedp) | |
2556 | { | |
2557 | TYPE_UNSIGNED (type) = 1; | |
179d2f74 | 2558 | SET_TYPE_MODE (type, mode_for_size (precision, MODE_UACCUM, 0)); |
325217ed CF |
2559 | } |
2560 | else | |
179d2f74 | 2561 | SET_TYPE_MODE (type, mode_for_size (precision, MODE_ACCUM, 0)); |
325217ed CF |
2562 | layout_type (type); |
2563 | ||
2564 | return type; | |
2565 | } | |
2566 | ||
67b88453 | 2567 | /* Initialize sizetypes so layout_type can use them. */ |
fed3cef0 RK |
2568 | |
2569 | void | |
3b9e5d95 | 2570 | initialize_sizetypes (void) |
fed3cef0 | 2571 | { |
67b88453 RG |
2572 | int precision, bprecision; |
2573 | ||
2574 | /* Get sizetypes precision from the SIZE_TYPE target macro. */ | |
18dae016 | 2575 | if (strcmp (SIZETYPE, "unsigned int") == 0) |
67b88453 | 2576 | precision = INT_TYPE_SIZE; |
18dae016 | 2577 | else if (strcmp (SIZETYPE, "long unsigned int") == 0) |
67b88453 | 2578 | precision = LONG_TYPE_SIZE; |
18dae016 | 2579 | else if (strcmp (SIZETYPE, "long long unsigned int") == 0) |
67b88453 | 2580 | precision = LONG_LONG_TYPE_SIZE; |
18dae016 | 2581 | else if (strcmp (SIZETYPE, "short unsigned int") == 0) |
b87ac615 | 2582 | precision = SHORT_TYPE_SIZE; |
67b88453 | 2583 | else |
78a7c317 DD |
2584 | { |
2585 | int i; | |
2586 | ||
2587 | precision = -1; | |
2588 | for (i = 0; i < NUM_INT_N_ENTS; i++) | |
2589 | if (int_n_enabled_p[i]) | |
2590 | { | |
2591 | char name[50]; | |
2592 | sprintf (name, "__int%d unsigned", int_n_data[i].bitsize); | |
2593 | ||
2594 | if (strcmp (name, SIZETYPE) == 0) | |
2595 | { | |
2596 | precision = int_n_data[i].bitsize; | |
2597 | } | |
2598 | } | |
2599 | if (precision == -1) | |
2600 | gcc_unreachable (); | |
2601 | } | |
0ac11108 | 2602 | |
67b88453 RG |
2603 | bprecision |
2604 | = MIN (precision + BITS_PER_UNIT_LOG + 1, MAX_FIXED_MODE_SIZE); | |
2605 | bprecision | |
2606 | = GET_MODE_PRECISION (smallest_mode_for_size (bprecision, MODE_INT)); | |
49ab6098 KZ |
2607 | if (bprecision > HOST_BITS_PER_DOUBLE_INT) |
2608 | bprecision = HOST_BITS_PER_DOUBLE_INT; | |
67b88453 RG |
2609 | |
2610 | /* Create stubs for sizetype and bitsizetype so we can create constants. */ | |
2611 | sizetype = make_node (INTEGER_TYPE); | |
f93fe5a0 | 2612 | TYPE_NAME (sizetype) = get_identifier ("sizetype"); |
67b88453 RG |
2613 | TYPE_PRECISION (sizetype) = precision; |
2614 | TYPE_UNSIGNED (sizetype) = 1; | |
67b88453 RG |
2615 | bitsizetype = make_node (INTEGER_TYPE); |
2616 | TYPE_NAME (bitsizetype) = get_identifier ("bitsizetype"); | |
2617 | TYPE_PRECISION (bitsizetype) = bprecision; | |
2618 | TYPE_UNSIGNED (bitsizetype) = 1; | |
67b88453 RG |
2619 | |
2620 | /* Now layout both types manually. */ | |
2621 | SET_TYPE_MODE (sizetype, smallest_mode_for_size (precision, MODE_INT)); | |
2622 | TYPE_ALIGN (sizetype) = GET_MODE_ALIGNMENT (TYPE_MODE (sizetype)); | |
2623 | TYPE_SIZE (sizetype) = bitsize_int (precision); | |
2624 | TYPE_SIZE_UNIT (sizetype) = size_int (GET_MODE_SIZE (TYPE_MODE (sizetype))); | |
807e902e | 2625 | set_min_and_max_values_for_integral_type (sizetype, precision, UNSIGNED); |
67b88453 RG |
2626 | |
2627 | SET_TYPE_MODE (bitsizetype, smallest_mode_for_size (bprecision, MODE_INT)); | |
2628 | TYPE_ALIGN (bitsizetype) = GET_MODE_ALIGNMENT (TYPE_MODE (bitsizetype)); | |
2629 | TYPE_SIZE (bitsizetype) = bitsize_int (bprecision); | |
2630 | TYPE_SIZE_UNIT (bitsizetype) | |
2631 | = size_int (GET_MODE_SIZE (TYPE_MODE (bitsizetype))); | |
807e902e | 2632 | set_min_and_max_values_for_integral_type (bitsizetype, bprecision, UNSIGNED); |
7f18f917 | 2633 | |
3b9e5d95 | 2634 | /* Create the signed variants of *sizetype. */ |
67b88453 | 2635 | ssizetype = make_signed_type (TYPE_PRECISION (sizetype)); |
f93fe5a0 | 2636 | TYPE_NAME (ssizetype) = get_identifier ("ssizetype"); |
67b88453 | 2637 | sbitsizetype = make_signed_type (TYPE_PRECISION (bitsizetype)); |
f93fe5a0 | 2638 | TYPE_NAME (sbitsizetype) = get_identifier ("sbitsizetype"); |
fed3cef0 RK |
2639 | } |
2640 | \f | |
71d59383 RS |
2641 | /* TYPE is an integral type, i.e., an INTEGRAL_TYPE, ENUMERAL_TYPE |
2642 | or BOOLEAN_TYPE. Set TYPE_MIN_VALUE and TYPE_MAX_VALUE | |
7b6d72fc MM |
2643 | for TYPE, based on the PRECISION and whether or not the TYPE |
2644 | IS_UNSIGNED. PRECISION need not correspond to a width supported | |
2645 | natively by the hardware; for example, on a machine with 8-bit, | |
2646 | 16-bit, and 32-bit register modes, PRECISION might be 7, 23, or | |
2647 | 61. */ | |
2648 | ||
2649 | void | |
2650 | set_min_and_max_values_for_integral_type (tree type, | |
2651 | int precision, | |
807e902e | 2652 | signop sgn) |
7b6d72fc | 2653 | { |
c0e081a9 RB |
2654 | /* For bitfields with zero width we end up creating integer types |
2655 | with zero precision. Don't assign any minimum/maximum values | |
2656 | to those types, they don't have any valid value. */ | |
2657 | if (precision < 1) | |
2658 | return; | |
2659 | ||
807e902e KZ |
2660 | TYPE_MIN_VALUE (type) |
2661 | = wide_int_to_tree (type, wi::min_value (precision, sgn)); | |
2662 | TYPE_MAX_VALUE (type) | |
2663 | = wide_int_to_tree (type, wi::max_value (precision, sgn)); | |
7b6d72fc MM |
2664 | } |
2665 | ||
4cc89e53 | 2666 | /* Set the extreme values of TYPE based on its precision in bits, |
13756074 | 2667 | then lay it out. Used when make_signed_type won't do |
4cc89e53 RS |
2668 | because the tree code is not INTEGER_TYPE. |
2669 | E.g. for Pascal, when the -fsigned-char option is given. */ | |
2670 | ||
2671 | void | |
46c5ad27 | 2672 | fixup_signed_type (tree type) |
4cc89e53 | 2673 | { |
b3694847 | 2674 | int precision = TYPE_PRECISION (type); |
4cc89e53 | 2675 | |
807e902e | 2676 | set_min_and_max_values_for_integral_type (type, precision, SIGNED); |
4cc89e53 RS |
2677 | |
2678 | /* Lay out the type: set its alignment, size, etc. */ | |
4cc89e53 RS |
2679 | layout_type (type); |
2680 | } | |
2681 | ||
7306ed3f | 2682 | /* Set the extreme values of TYPE based on its precision in bits, |
13756074 | 2683 | then lay it out. This is used both in `make_unsigned_type' |
7306ed3f JW |
2684 | and for enumeral types. */ |
2685 | ||
2686 | void | |
46c5ad27 | 2687 | fixup_unsigned_type (tree type) |
7306ed3f | 2688 | { |
b3694847 | 2689 | int precision = TYPE_PRECISION (type); |
7306ed3f | 2690 | |
89b0433e | 2691 | TYPE_UNSIGNED (type) = 1; |
f676971a | 2692 | |
807e902e | 2693 | set_min_and_max_values_for_integral_type (type, precision, UNSIGNED); |
7306ed3f JW |
2694 | |
2695 | /* Lay out the type: set its alignment, size, etc. */ | |
7306ed3f JW |
2696 | layout_type (type); |
2697 | } | |
2698 | \f | |
073a544d RS |
2699 | /* Construct an iterator for a bitfield that spans BITSIZE bits, |
2700 | starting at BITPOS. | |
2701 | ||
2702 | BITREGION_START is the bit position of the first bit in this | |
2703 | sequence of bit fields. BITREGION_END is the last bit in this | |
2704 | sequence. If these two fields are non-zero, we should restrict the | |
2705 | memory access to that range. Otherwise, we are allowed to touch | |
2706 | any adjacent non bit-fields. | |
2707 | ||
2708 | ALIGN is the alignment of the underlying object in bits. | |
2709 | VOLATILEP says whether the bitfield is volatile. */ | |
2710 | ||
2711 | bit_field_mode_iterator | |
2712 | ::bit_field_mode_iterator (HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos, | |
2713 | HOST_WIDE_INT bitregion_start, | |
2714 | HOST_WIDE_INT bitregion_end, | |
2715 | unsigned int align, bool volatilep) | |
65d3284b RS |
2716 | : m_mode (GET_CLASS_NARROWEST_MODE (MODE_INT)), m_bitsize (bitsize), |
2717 | m_bitpos (bitpos), m_bitregion_start (bitregion_start), | |
2718 | m_bitregion_end (bitregion_end), m_align (align), | |
2719 | m_volatilep (volatilep), m_count (0) | |
073a544d | 2720 | { |
65d3284b | 2721 | if (!m_bitregion_end) |
8b7d5dab | 2722 | { |
a37d67b6 EB |
2723 | /* We can assume that any aligned chunk of ALIGN bits that overlaps |
2724 | the bitfield is mapped and won't trap, provided that ALIGN isn't | |
2725 | too large. The cap is the biggest required alignment for data, | |
2726 | or at least the word size. And force one such chunk at least. */ | |
2727 | unsigned HOST_WIDE_INT units | |
2728 | = MIN (align, MAX (BIGGEST_ALIGNMENT, BITS_PER_WORD)); | |
2729 | if (bitsize <= 0) | |
2730 | bitsize = 1; | |
65d3284b RS |
2731 | m_bitregion_end = bitpos + bitsize + units - 1; |
2732 | m_bitregion_end -= m_bitregion_end % units + 1; | |
8b7d5dab | 2733 | } |
073a544d RS |
2734 | } |
2735 | ||
2736 | /* Calls to this function return successively larger modes that can be used | |
2737 | to represent the bitfield. Return true if another bitfield mode is | |
2738 | available, storing it in *OUT_MODE if so. */ | |
2739 | ||
2740 | bool | |
ef4bddc2 | 2741 | bit_field_mode_iterator::next_mode (machine_mode *out_mode) |
073a544d | 2742 | { |
65d3284b | 2743 | for (; m_mode != VOIDmode; m_mode = GET_MODE_WIDER_MODE (m_mode)) |
073a544d | 2744 | { |
65d3284b | 2745 | unsigned int unit = GET_MODE_BITSIZE (m_mode); |
073a544d RS |
2746 | |
2747 | /* Skip modes that don't have full precision. */ | |
65d3284b | 2748 | if (unit != GET_MODE_PRECISION (m_mode)) |
073a544d RS |
2749 | continue; |
2750 | ||
073a544d RS |
2751 | /* Stop if the mode is too wide to handle efficiently. */ |
2752 | if (unit > MAX_FIXED_MODE_SIZE) | |
2753 | break; | |
2754 | ||
2755 | /* Don't deliver more than one multiword mode; the smallest one | |
2756 | should be used. */ | |
65d3284b | 2757 | if (m_count > 0 && unit > BITS_PER_WORD) |
073a544d RS |
2758 | break; |
2759 | ||
ec593a8f | 2760 | /* Skip modes that are too small. */ |
65d3284b RS |
2761 | unsigned HOST_WIDE_INT substart = (unsigned HOST_WIDE_INT) m_bitpos % unit; |
2762 | unsigned HOST_WIDE_INT subend = substart + m_bitsize; | |
ec593a8f RS |
2763 | if (subend > unit) |
2764 | continue; | |
2765 | ||
073a544d | 2766 | /* Stop if the mode goes outside the bitregion. */ |
65d3284b RS |
2767 | HOST_WIDE_INT start = m_bitpos - substart; |
2768 | if (m_bitregion_start && start < m_bitregion_start) | |
073a544d | 2769 | break; |
ec593a8f | 2770 | HOST_WIDE_INT end = start + unit; |
65d3284b | 2771 | if (end > m_bitregion_end + 1) |
8b7d5dab RS |
2772 | break; |
2773 | ||
2774 | /* Stop if the mode requires too much alignment. */ | |
65d3284b RS |
2775 | if (GET_MODE_ALIGNMENT (m_mode) > m_align |
2776 | && SLOW_UNALIGNED_ACCESS (m_mode, m_align)) | |
073a544d RS |
2777 | break; |
2778 | ||
65d3284b RS |
2779 | *out_mode = m_mode; |
2780 | m_mode = GET_MODE_WIDER_MODE (m_mode); | |
2781 | m_count++; | |
073a544d RS |
2782 | return true; |
2783 | } | |
2784 | return false; | |
2785 | } | |
2786 | ||
2787 | /* Return true if smaller modes are generally preferred for this kind | |
2788 | of bitfield. */ | |
2789 | ||
2790 | bool | |
2791 | bit_field_mode_iterator::prefer_smaller_modes () | |
2792 | { | |
65d3284b | 2793 | return (m_volatilep |
073a544d RS |
2794 | ? targetm.narrow_volatile_bitfield () |
2795 | : !SLOW_BYTE_ACCESS); | |
2796 | } | |
2797 | ||
7306ed3f JW |
2798 | /* Find the best machine mode to use when referencing a bit field of length |
2799 | BITSIZE bits starting at BITPOS. | |
2800 | ||
1169e45d AH |
2801 | BITREGION_START is the bit position of the first bit in this |
2802 | sequence of bit fields. BITREGION_END is the last bit in this | |
2803 | sequence. If these two fields are non-zero, we should restrict the | |
073a544d | 2804 | memory access to that range. Otherwise, we are allowed to touch |
1169e45d AH |
2805 | any adjacent non bit-fields. |
2806 | ||
7306ed3f JW |
2807 | The underlying object is known to be aligned to a boundary of ALIGN bits. |
2808 | If LARGEST_MODE is not VOIDmode, it means that we should not use a mode | |
2809 | larger than LARGEST_MODE (usually SImode). | |
2810 | ||
c2a64439 | 2811 | If no mode meets all these conditions, we return VOIDmode. |
0ac11108 | 2812 | |
c2a64439 PB |
2813 | If VOLATILEP is false and SLOW_BYTE_ACCESS is false, we return the |
2814 | smallest mode meeting these conditions. | |
2815 | ||
2816 | If VOLATILEP is false and SLOW_BYTE_ACCESS is true, we return the | |
2817 | largest mode (but a mode no wider than UNITS_PER_WORD) that meets | |
2818 | all the conditions. | |
0ac11108 | 2819 | |
c2a64439 PB |
2820 | If VOLATILEP is true the narrow_volatile_bitfields target hook is used to |
2821 | decide which of the above modes should be used. */ | |
7306ed3f | 2822 | |
ef4bddc2 | 2823 | machine_mode |
1169e45d AH |
2824 | get_best_mode (int bitsize, int bitpos, |
2825 | unsigned HOST_WIDE_INT bitregion_start, | |
2826 | unsigned HOST_WIDE_INT bitregion_end, | |
2827 | unsigned int align, | |
ef4bddc2 | 2828 | machine_mode largest_mode, bool volatilep) |
7306ed3f | 2829 | { |
073a544d RS |
2830 | bit_field_mode_iterator iter (bitsize, bitpos, bitregion_start, |
2831 | bitregion_end, align, volatilep); | |
ef4bddc2 RS |
2832 | machine_mode widest_mode = VOIDmode; |
2833 | machine_mode mode; | |
073a544d | 2834 | while (iter.next_mode (&mode) |
00efe3ea RS |
2835 | /* ??? For historical reasons, reject modes that would normally |
2836 | receive greater alignment, even if unaligned accesses are | |
2837 | acceptable. This has both advantages and disadvantages. | |
8b7d5dab RS |
2838 | Removing this check means that something like: |
2839 | ||
2840 | struct s { unsigned int x; unsigned int y; }; | |
2841 | int f (struct s *s) { return s->x == 0 && s->y == 0; } | |
2842 | ||
2843 | can be implemented using a single load and compare on | |
2844 | 64-bit machines that have no alignment restrictions. | |
2845 | For example, on powerpc64-linux-gnu, we would generate: | |
2846 | ||
2847 | ld 3,0(3) | |
2848 | cntlzd 3,3 | |
2849 | srdi 3,3,6 | |
2850 | blr | |
2851 | ||
2852 | rather than: | |
2853 | ||
2854 | lwz 9,0(3) | |
2855 | cmpwi 7,9,0 | |
2856 | bne 7,.L3 | |
2857 | lwz 3,4(3) | |
2858 | cntlzw 3,3 | |
2859 | srwi 3,3,5 | |
2860 | extsw 3,3 | |
2861 | blr | |
2862 | .p2align 4,,15 | |
2863 | .L3: | |
2864 | li 3,0 | |
2865 | blr | |
2866 | ||
2867 | However, accessing more than one field can make life harder | |
2868 | for the gimple optimizers. For example, gcc.dg/vect/bb-slp-5.c | |
2869 | has a series of unsigned short copies followed by a series of | |
2870 | unsigned short comparisons. With this check, both the copies | |
2871 | and comparisons remain 16-bit accesses and FRE is able | |
2872 | to eliminate the latter. Without the check, the comparisons | |
2873 | can be done using 2 64-bit operations, which FRE isn't able | |
2874 | to handle in the same way. | |
2875 | ||
2876 | Either way, it would probably be worth disabling this check | |
2877 | during expand. One particular example where removing the | |
2878 | check would help is the get_best_mode call in store_bit_field. | |
2879 | If we are given a memory bitregion of 128 bits that is aligned | |
2880 | to a 64-bit boundary, and the bitfield we want to modify is | |
2881 | in the second half of the bitregion, this check causes | |
2882 | store_bitfield to turn the memory into a 64-bit reference | |
2883 | to the _first_ half of the region. We later use | |
2884 | adjust_bitfield_address to get a reference to the correct half, | |
2885 | but doing so looks to adjust_bitfield_address as though we are | |
2886 | moving past the end of the original object, so it drops the | |
2887 | associated MEM_EXPR and MEM_OFFSET. Removing the check | |
2888 | causes store_bit_field to keep a 128-bit memory reference, | |
2889 | so that the final bitfield reference still has a MEM_EXPR | |
2890 | and MEM_OFFSET. */ | |
00efe3ea | 2891 | && GET_MODE_ALIGNMENT (mode) <= align |
073a544d RS |
2892 | && (largest_mode == VOIDmode |
2893 | || GET_MODE_SIZE (mode) <= GET_MODE_SIZE (largest_mode))) | |
7306ed3f | 2894 | { |
073a544d RS |
2895 | widest_mode = mode; |
2896 | if (iter.prefer_smaller_modes ()) | |
7306ed3f JW |
2897 | break; |
2898 | } | |
073a544d | 2899 | return widest_mode; |
7306ed3f | 2900 | } |
d7db6646 | 2901 | |
50654f6c | 2902 | /* Gets minimal and maximal values for MODE (signed or unsigned depending on |
0aea6467 | 2903 | SIGN). The returned constants are made to be usable in TARGET_MODE. */ |
50654f6c ZD |
2904 | |
2905 | void | |
ef4bddc2 RS |
2906 | get_mode_bounds (machine_mode mode, int sign, |
2907 | machine_mode target_mode, | |
0aea6467 | 2908 | rtx *mmin, rtx *mmax) |
50654f6c | 2909 | { |
e1a55837 | 2910 | unsigned size = GET_MODE_PRECISION (mode); |
0aea6467 | 2911 | unsigned HOST_WIDE_INT min_val, max_val; |
50654f6c | 2912 | |
41374e13 | 2913 | gcc_assert (size <= HOST_BITS_PER_WIDE_INT); |
50654f6c | 2914 | |
c15677b6 JJ |
2915 | /* Special case BImode, which has values 0 and STORE_FLAG_VALUE. */ |
2916 | if (mode == BImode) | |
2917 | { | |
2918 | if (STORE_FLAG_VALUE < 0) | |
2919 | { | |
2920 | min_val = STORE_FLAG_VALUE; | |
2921 | max_val = 0; | |
2922 | } | |
2923 | else | |
2924 | { | |
2925 | min_val = 0; | |
2926 | max_val = STORE_FLAG_VALUE; | |
2927 | } | |
2928 | } | |
2929 | else if (sign) | |
50654f6c | 2930 | { |
0aea6467 ZD |
2931 | min_val = -((unsigned HOST_WIDE_INT) 1 << (size - 1)); |
2932 | max_val = ((unsigned HOST_WIDE_INT) 1 << (size - 1)) - 1; | |
50654f6c ZD |
2933 | } |
2934 | else | |
2935 | { | |
0aea6467 ZD |
2936 | min_val = 0; |
2937 | max_val = ((unsigned HOST_WIDE_INT) 1 << (size - 1) << 1) - 1; | |
50654f6c | 2938 | } |
0aea6467 | 2939 | |
bb80db7b KH |
2940 | *mmin = gen_int_mode (min_val, target_mode); |
2941 | *mmax = gen_int_mode (max_val, target_mode); | |
50654f6c ZD |
2942 | } |
2943 | ||
e2500fed | 2944 | #include "gt-stor-layout.h" |