]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/tree-ssa-address.c
re PR middle-end/45379 (~10% slowdown on test_fpu at revision 163278)
[thirdparty/gcc.git] / gcc / tree-ssa-address.c
1 /* Memory address lowering and addressing mode selection.
2 Copyright (C) 2004, 2006, 2007, 2008, 2009, 2010
3 Free Software Foundation, Inc.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published by the
9 Free Software Foundation; either version 3, or (at your option) any
10 later version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT
13 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 /* Utility functions for manipulation with TARGET_MEM_REFs -- tree expressions
22 that directly map to addressing modes of the target. */
23
24 #include "config.h"
25 #include "system.h"
26 #include "coretypes.h"
27 #include "tm.h"
28 #include "tree.h"
29 #include "tm_p.h"
30 #include "basic-block.h"
31 #include "output.h"
32 #include "tree-pretty-print.h"
33 #include "tree-flow.h"
34 #include "tree-dump.h"
35 #include "tree-pass.h"
36 #include "timevar.h"
37 #include "flags.h"
38 #include "tree-inline.h"
39 #include "tree-affine.h"
40
41 /* FIXME: We compute address costs using RTL. */
42 #include "insn-config.h"
43 #include "rtl.h"
44 #include "recog.h"
45 #include "expr.h"
46 #include "ggc.h"
47 #include "target.h"
48
49 /* TODO -- handling of symbols (according to Richard Hendersons
50 comments, http://gcc.gnu.org/ml/gcc-patches/2005-04/msg00949.html):
51
52 There are at least 5 different kinds of symbols that we can run up against:
53
54 (1) binds_local_p, small data area.
55 (2) binds_local_p, eg local statics
56 (3) !binds_local_p, eg global variables
57 (4) thread local, local_exec
58 (5) thread local, !local_exec
59
60 Now, (1) won't appear often in an array context, but it certainly can.
61 All you have to do is set -GN high enough, or explicitly mark any
62 random object __attribute__((section (".sdata"))).
63
64 All of these affect whether or not a symbol is in fact a valid address.
65 The only one tested here is (3). And that result may very well
66 be incorrect for (4) or (5).
67
68 An incorrect result here does not cause incorrect results out the
69 back end, because the expander in expr.c validizes the address. However
70 it would be nice to improve the handling here in order to produce more
71 precise results. */
72
73 /* A "template" for memory address, used to determine whether the address is
74 valid for mode. */
75
76 typedef struct GTY (()) mem_addr_template {
77 rtx ref; /* The template. */
78 rtx * GTY ((skip)) step_p; /* The point in template where the step should be
79 filled in. */
80 rtx * GTY ((skip)) off_p; /* The point in template where the offset should
81 be filled in. */
82 } mem_addr_template;
83
84 DEF_VEC_O (mem_addr_template);
85 DEF_VEC_ALLOC_O (mem_addr_template, gc);
86
87 /* The templates. Each of the low five bits of the index corresponds to one
88 component of TARGET_MEM_REF being present, while the high bits identify
89 the address space. See TEMPL_IDX. */
90
91 static GTY(()) VEC (mem_addr_template, gc) *mem_addr_template_list;
92
93 #define TEMPL_IDX(AS, SYMBOL, BASE, INDEX, STEP, OFFSET) \
94 (((int) (AS) << 5) \
95 | ((SYMBOL != 0) << 4) \
96 | ((BASE != 0) << 3) \
97 | ((INDEX != 0) << 2) \
98 | ((STEP != 0) << 1) \
99 | (OFFSET != 0))
100
101 /* Stores address for memory reference with parameters SYMBOL, BASE, INDEX,
102 STEP and OFFSET to *ADDR using address mode ADDRESS_MODE. Stores pointers
103 to where step is placed to *STEP_P and offset to *OFFSET_P. */
104
105 static void
106 gen_addr_rtx (enum machine_mode address_mode,
107 rtx symbol, rtx base, rtx index, rtx step, rtx offset,
108 rtx *addr, rtx **step_p, rtx **offset_p)
109 {
110 rtx act_elem;
111
112 *addr = NULL_RTX;
113 if (step_p)
114 *step_p = NULL;
115 if (offset_p)
116 *offset_p = NULL;
117
118 if (index)
119 {
120 act_elem = index;
121 if (step)
122 {
123 act_elem = gen_rtx_MULT (address_mode, act_elem, step);
124
125 if (step_p)
126 *step_p = &XEXP (act_elem, 1);
127 }
128
129 *addr = act_elem;
130 }
131
132 if (base)
133 {
134 if (*addr)
135 *addr = simplify_gen_binary (PLUS, address_mode, base, *addr);
136 else
137 *addr = base;
138 }
139
140 if (symbol)
141 {
142 act_elem = symbol;
143 if (offset)
144 {
145 act_elem = gen_rtx_PLUS (address_mode, act_elem, offset);
146
147 if (offset_p)
148 *offset_p = &XEXP (act_elem, 1);
149
150 if (GET_CODE (symbol) == SYMBOL_REF
151 || GET_CODE (symbol) == LABEL_REF
152 || GET_CODE (symbol) == CONST)
153 act_elem = gen_rtx_CONST (address_mode, act_elem);
154 }
155
156 if (*addr)
157 *addr = gen_rtx_PLUS (address_mode, *addr, act_elem);
158 else
159 *addr = act_elem;
160 }
161 else if (offset)
162 {
163 if (*addr)
164 {
165 *addr = gen_rtx_PLUS (address_mode, *addr, offset);
166 if (offset_p)
167 *offset_p = &XEXP (*addr, 1);
168 }
169 else
170 {
171 *addr = offset;
172 if (offset_p)
173 *offset_p = addr;
174 }
175 }
176
177 if (!*addr)
178 *addr = const0_rtx;
179 }
180
181 /* Returns address for TARGET_MEM_REF with parameters given by ADDR
182 in address space AS.
183 If REALLY_EXPAND is false, just make fake registers instead
184 of really expanding the operands, and perform the expansion in-place
185 by using one of the "templates". */
186
187 rtx
188 addr_for_mem_ref (struct mem_address *addr, addr_space_t as,
189 bool really_expand)
190 {
191 enum machine_mode address_mode = targetm.addr_space.address_mode (as);
192 rtx address, sym, bse, idx, st, off;
193 struct mem_addr_template *templ;
194
195 if (addr->step && !integer_onep (addr->step))
196 st = immed_double_int_const (tree_to_double_int (addr->step), address_mode);
197 else
198 st = NULL_RTX;
199
200 if (addr->offset && !integer_zerop (addr->offset))
201 off = immed_double_int_const
202 (double_int_sext (tree_to_double_int (addr->offset),
203 TYPE_PRECISION (TREE_TYPE (addr->offset))),
204 address_mode);
205 else
206 off = NULL_RTX;
207
208 if (!really_expand)
209 {
210 unsigned int templ_index
211 = TEMPL_IDX (as, addr->symbol, addr->base, addr->index, st, off);
212
213 if (templ_index
214 >= VEC_length (mem_addr_template, mem_addr_template_list))
215 VEC_safe_grow_cleared (mem_addr_template, gc, mem_addr_template_list,
216 templ_index + 1);
217
218 /* Reuse the templates for addresses, so that we do not waste memory. */
219 templ = VEC_index (mem_addr_template, mem_addr_template_list, templ_index);
220 if (!templ->ref)
221 {
222 sym = (addr->symbol ?
223 gen_rtx_SYMBOL_REF (address_mode, ggc_strdup ("test_symbol"))
224 : NULL_RTX);
225 bse = (addr->base ?
226 gen_raw_REG (address_mode, LAST_VIRTUAL_REGISTER + 1)
227 : NULL_RTX);
228 idx = (addr->index ?
229 gen_raw_REG (address_mode, LAST_VIRTUAL_REGISTER + 2)
230 : NULL_RTX);
231
232 gen_addr_rtx (address_mode, sym, bse, idx,
233 st? const0_rtx : NULL_RTX,
234 off? const0_rtx : NULL_RTX,
235 &templ->ref,
236 &templ->step_p,
237 &templ->off_p);
238 }
239
240 if (st)
241 *templ->step_p = st;
242 if (off)
243 *templ->off_p = off;
244
245 return templ->ref;
246 }
247
248 /* Otherwise really expand the expressions. */
249 sym = (addr->symbol
250 ? expand_expr (build_addr (addr->symbol, current_function_decl),
251 NULL_RTX, address_mode, EXPAND_NORMAL)
252 : NULL_RTX);
253 bse = (addr->base
254 ? expand_expr (addr->base, NULL_RTX, address_mode, EXPAND_NORMAL)
255 : NULL_RTX);
256 idx = (addr->index
257 ? expand_expr (addr->index, NULL_RTX, address_mode, EXPAND_NORMAL)
258 : NULL_RTX);
259
260 gen_addr_rtx (address_mode, sym, bse, idx, st, off, &address, NULL, NULL);
261 return address;
262 }
263
264 /* Returns address of MEM_REF in TYPE. */
265
266 tree
267 tree_mem_ref_addr (tree type, tree mem_ref)
268 {
269 tree addr;
270 tree act_elem;
271 tree step = TMR_STEP (mem_ref), offset = TMR_OFFSET (mem_ref);
272 tree sym = TMR_SYMBOL (mem_ref), base = TMR_BASE (mem_ref);
273 tree addr_base = NULL_TREE, addr_off = NULL_TREE;
274
275 if (sym)
276 addr_base = fold_convert (type, build_addr (sym, current_function_decl));
277 else if (base && POINTER_TYPE_P (TREE_TYPE (base)))
278 {
279 addr_base = fold_convert (type, base);
280 base = NULL_TREE;
281 }
282
283 act_elem = TMR_INDEX (mem_ref);
284 if (act_elem)
285 {
286 if (step)
287 act_elem = fold_build2 (MULT_EXPR, sizetype, act_elem, step);
288 addr_off = act_elem;
289 }
290
291 act_elem = base;
292 if (act_elem)
293 {
294 if (addr_off)
295 addr_off = fold_build2 (PLUS_EXPR, sizetype, addr_off, act_elem);
296 else
297 addr_off = act_elem;
298 }
299
300 if (offset && !integer_zerop (offset))
301 {
302 offset = fold_convert (sizetype, offset);
303 if (addr_off)
304 addr_off = fold_build2 (PLUS_EXPR, sizetype, addr_off, offset);
305 else
306 addr_off = offset;
307 }
308
309 if (addr_off)
310 {
311 if (addr_base)
312 addr = fold_build2 (POINTER_PLUS_EXPR, type, addr_base, addr_off);
313 else
314 addr = fold_convert (type, addr_off);
315 }
316 else if (addr_base)
317 addr = addr_base;
318 else
319 addr = build_int_cst (type, 0);
320
321 return addr;
322 }
323
324 /* Returns true if a memory reference in MODE and with parameters given by
325 ADDR is valid on the current target. */
326
327 static bool
328 valid_mem_ref_p (enum machine_mode mode, addr_space_t as,
329 struct mem_address *addr)
330 {
331 rtx address;
332
333 address = addr_for_mem_ref (addr, as, false);
334 if (!address)
335 return false;
336
337 return memory_address_addr_space_p (mode, address, as);
338 }
339
340 /* Checks whether a TARGET_MEM_REF with type TYPE and parameters given by ADDR
341 is valid on the current target and if so, creates and returns the
342 TARGET_MEM_REF. */
343
344 static tree
345 create_mem_ref_raw (tree type, tree alias_ptr_type, struct mem_address *addr)
346 {
347 if (!valid_mem_ref_p (TYPE_MODE (type), TYPE_ADDR_SPACE (type), addr))
348 return NULL_TREE;
349
350 if (addr->step && integer_onep (addr->step))
351 addr->step = NULL_TREE;
352
353 if (addr->offset)
354 addr->offset = fold_convert (alias_ptr_type, addr->offset);
355 else
356 addr->offset = build_int_cst (alias_ptr_type, 0);
357
358 /* If possible use a plain MEM_REF instead of a TARGET_MEM_REF. */
359 if (alias_ptr_type
360 && (!addr->index || integer_zerop (addr->index))
361 && (!addr->base || POINTER_TYPE_P (TREE_TYPE (addr->base))))
362 {
363 tree base;
364 gcc_assert (!addr->symbol ^ !addr->base);
365 if (addr->symbol)
366 base = build_fold_addr_expr (addr->symbol);
367 else
368 base = addr->base;
369 return fold_build2 (MEM_REF, type, base, addr->offset);
370 }
371
372 return build5 (TARGET_MEM_REF, type,
373 addr->symbol, addr->base, addr->index,
374 addr->step, addr->offset);
375 }
376
377 /* Returns true if OBJ is an object whose address is a link time constant. */
378
379 static bool
380 fixed_address_object_p (tree obj)
381 {
382 return (TREE_CODE (obj) == VAR_DECL
383 && (TREE_STATIC (obj)
384 || DECL_EXTERNAL (obj))
385 && ! DECL_DLLIMPORT_P (obj));
386 }
387
388 /* If ADDR contains an address of object that is a link time constant,
389 move it to PARTS->symbol. */
390
391 static void
392 move_fixed_address_to_symbol (struct mem_address *parts, aff_tree *addr)
393 {
394 unsigned i;
395 tree val = NULL_TREE;
396
397 for (i = 0; i < addr->n; i++)
398 {
399 if (!double_int_one_p (addr->elts[i].coef))
400 continue;
401
402 val = addr->elts[i].val;
403 if (TREE_CODE (val) == ADDR_EXPR
404 && fixed_address_object_p (TREE_OPERAND (val, 0)))
405 break;
406 }
407
408 if (i == addr->n)
409 return;
410
411 parts->symbol = TREE_OPERAND (val, 0);
412 aff_combination_remove_elt (addr, i);
413 }
414
415 /* If ADDR contains an instance of BASE_HINT, move it to PARTS->base. */
416
417 static void
418 move_hint_to_base (tree type, struct mem_address *parts, tree base_hint,
419 aff_tree *addr)
420 {
421 unsigned i;
422 tree val = NULL_TREE;
423 int qual;
424
425 for (i = 0; i < addr->n; i++)
426 {
427 if (!double_int_one_p (addr->elts[i].coef))
428 continue;
429
430 val = addr->elts[i].val;
431 if (operand_equal_p (val, base_hint, 0))
432 break;
433 }
434
435 if (i == addr->n)
436 return;
437
438 /* Cast value to appropriate pointer type. We cannot use a pointer
439 to TYPE directly, as the back-end will assume registers of pointer
440 type are aligned, and just the base itself may not actually be.
441 We use void pointer to the type's address space instead. */
442 qual = ENCODE_QUAL_ADDR_SPACE (TYPE_ADDR_SPACE (type));
443 type = build_qualified_type (void_type_node, qual);
444 parts->base = fold_convert (build_pointer_type (type), val);
445 aff_combination_remove_elt (addr, i);
446 }
447
448 /* If ADDR contains an address of a dereferenced pointer, move it to
449 PARTS->base. */
450
451 static void
452 move_pointer_to_base (struct mem_address *parts, aff_tree *addr)
453 {
454 unsigned i;
455 tree val = NULL_TREE;
456
457 for (i = 0; i < addr->n; i++)
458 {
459 if (!double_int_one_p (addr->elts[i].coef))
460 continue;
461
462 val = addr->elts[i].val;
463 if (POINTER_TYPE_P (TREE_TYPE (val)))
464 break;
465 }
466
467 if (i == addr->n)
468 return;
469
470 parts->base = val;
471 aff_combination_remove_elt (addr, i);
472 }
473
474 /* Moves the loop variant part V in linear address ADDR to be the index
475 of PARTS. */
476
477 static void
478 move_variant_to_index (struct mem_address *parts, aff_tree *addr, tree v)
479 {
480 unsigned i;
481 tree val = NULL_TREE;
482
483 gcc_assert (!parts->index);
484 for (i = 0; i < addr->n; i++)
485 {
486 val = addr->elts[i].val;
487 if (operand_equal_p (val, v, 0))
488 break;
489 }
490
491 if (i == addr->n)
492 return;
493
494 parts->index = fold_convert (sizetype, val);
495 parts->step = double_int_to_tree (sizetype, addr->elts[i].coef);
496 aff_combination_remove_elt (addr, i);
497 }
498
499 /* Adds ELT to PARTS. */
500
501 static void
502 add_to_parts (struct mem_address *parts, tree elt)
503 {
504 tree type;
505
506 if (!parts->index)
507 {
508 parts->index = fold_convert (sizetype, elt);
509 return;
510 }
511
512 if (!parts->base)
513 {
514 parts->base = elt;
515 return;
516 }
517
518 /* Add ELT to base. */
519 type = TREE_TYPE (parts->base);
520 if (POINTER_TYPE_P (type))
521 parts->base = fold_build2 (POINTER_PLUS_EXPR, type,
522 parts->base,
523 fold_convert (sizetype, elt));
524 else
525 parts->base = fold_build2 (PLUS_EXPR, type,
526 parts->base, elt);
527 }
528
529 /* Finds the most expensive multiplication in ADDR that can be
530 expressed in an addressing mode and move the corresponding
531 element(s) to PARTS. */
532
533 static void
534 most_expensive_mult_to_index (tree type, struct mem_address *parts,
535 aff_tree *addr, bool speed)
536 {
537 addr_space_t as = TYPE_ADDR_SPACE (type);
538 enum machine_mode address_mode = targetm.addr_space.address_mode (as);
539 HOST_WIDE_INT coef;
540 double_int best_mult, amult, amult_neg;
541 unsigned best_mult_cost = 0, acost;
542 tree mult_elt = NULL_TREE, elt;
543 unsigned i, j;
544 enum tree_code op_code;
545
546 best_mult = double_int_zero;
547 for (i = 0; i < addr->n; i++)
548 {
549 if (!double_int_fits_in_shwi_p (addr->elts[i].coef))
550 continue;
551
552 coef = double_int_to_shwi (addr->elts[i].coef);
553 if (coef == 1
554 || !multiplier_allowed_in_address_p (coef, TYPE_MODE (type), as))
555 continue;
556
557 acost = multiply_by_cost (coef, address_mode, speed);
558
559 if (acost > best_mult_cost)
560 {
561 best_mult_cost = acost;
562 best_mult = addr->elts[i].coef;
563 }
564 }
565
566 if (!best_mult_cost)
567 return;
568
569 /* Collect elements multiplied by best_mult. */
570 for (i = j = 0; i < addr->n; i++)
571 {
572 amult = addr->elts[i].coef;
573 amult_neg = double_int_ext_for_comb (double_int_neg (amult), addr);
574
575 if (double_int_equal_p (amult, best_mult))
576 op_code = PLUS_EXPR;
577 else if (double_int_equal_p (amult_neg, best_mult))
578 op_code = MINUS_EXPR;
579 else
580 {
581 addr->elts[j] = addr->elts[i];
582 j++;
583 continue;
584 }
585
586 elt = fold_convert (sizetype, addr->elts[i].val);
587 if (mult_elt)
588 mult_elt = fold_build2 (op_code, sizetype, mult_elt, elt);
589 else if (op_code == PLUS_EXPR)
590 mult_elt = elt;
591 else
592 mult_elt = fold_build1 (NEGATE_EXPR, sizetype, elt);
593 }
594 addr->n = j;
595
596 parts->index = mult_elt;
597 parts->step = double_int_to_tree (sizetype, best_mult);
598 }
599
600 /* Splits address ADDR for a memory access of type TYPE into PARTS.
601 If BASE_HINT is non-NULL, it specifies an SSA name to be used
602 preferentially as base of the reference, and IV_CAND is the selected
603 iv candidate used in ADDR.
604
605 TODO -- be more clever about the distribution of the elements of ADDR
606 to PARTS. Some architectures do not support anything but single
607 register in address, possibly with a small integer offset; while
608 create_mem_ref will simplify the address to an acceptable shape
609 later, it would be more efficient to know that asking for complicated
610 addressing modes is useless. */
611
612 static void
613 addr_to_parts (tree type, aff_tree *addr, tree iv_cand,
614 tree base_hint, struct mem_address *parts,
615 bool speed)
616 {
617 tree part;
618 unsigned i;
619
620 parts->symbol = NULL_TREE;
621 parts->base = NULL_TREE;
622 parts->index = NULL_TREE;
623 parts->step = NULL_TREE;
624
625 if (!double_int_zero_p (addr->offset))
626 parts->offset = double_int_to_tree (sizetype, addr->offset);
627 else
628 parts->offset = NULL_TREE;
629
630 /* Try to find a symbol. */
631 move_fixed_address_to_symbol (parts, addr);
632
633 /* No need to do address parts reassociation if the number of parts
634 is <= 2 -- in that case, no loop invariant code motion can be
635 exposed. */
636
637 if (!base_hint && (addr->n > 2))
638 move_variant_to_index (parts, addr, iv_cand);
639
640 /* First move the most expensive feasible multiplication
641 to index. */
642 if (!parts->index)
643 most_expensive_mult_to_index (type, parts, addr, speed);
644
645 /* Try to find a base of the reference. Since at the moment
646 there is no reliable way how to distinguish between pointer and its
647 offset, this is just a guess. */
648 if (!parts->symbol && base_hint)
649 move_hint_to_base (type, parts, base_hint, addr);
650 if (!parts->symbol && !parts->base)
651 move_pointer_to_base (parts, addr);
652
653 /* Then try to process the remaining elements. */
654 for (i = 0; i < addr->n; i++)
655 {
656 part = fold_convert (sizetype, addr->elts[i].val);
657 if (!double_int_one_p (addr->elts[i].coef))
658 part = fold_build2 (MULT_EXPR, sizetype, part,
659 double_int_to_tree (sizetype, addr->elts[i].coef));
660 add_to_parts (parts, part);
661 }
662 if (addr->rest)
663 add_to_parts (parts, fold_convert (sizetype, addr->rest));
664 }
665
666 /* Force the PARTS to register. */
667
668 static void
669 gimplify_mem_ref_parts (gimple_stmt_iterator *gsi, struct mem_address *parts)
670 {
671 if (parts->base)
672 parts->base = force_gimple_operand_gsi (gsi, parts->base,
673 true, NULL_TREE,
674 true, GSI_SAME_STMT);
675 if (parts->index)
676 parts->index = force_gimple_operand_gsi (gsi, parts->index,
677 true, NULL_TREE,
678 true, GSI_SAME_STMT);
679 }
680
681 /* Creates and returns a TARGET_MEM_REF for address ADDR. If necessary
682 computations are emitted in front of GSI. TYPE is the mode
683 of created memory reference. IV_CAND is the selected iv candidate in ADDR,
684 and BASE_HINT is non NULL if IV_CAND comes from a base address
685 object. */
686
687 tree
688 create_mem_ref (gimple_stmt_iterator *gsi, tree type, aff_tree *addr,
689 tree alias_ptr_type, tree iv_cand, tree base_hint, bool speed)
690 {
691 tree mem_ref, tmp;
692 tree atype;
693 struct mem_address parts;
694
695 addr_to_parts (type, addr, iv_cand, base_hint, &parts, speed);
696 gimplify_mem_ref_parts (gsi, &parts);
697 mem_ref = create_mem_ref_raw (type, alias_ptr_type, &parts);
698 if (mem_ref)
699 return mem_ref;
700
701 /* The expression is too complicated. Try making it simpler. */
702
703 if (parts.step && !integer_onep (parts.step))
704 {
705 /* Move the multiplication to index. */
706 gcc_assert (parts.index);
707 parts.index = force_gimple_operand_gsi (gsi,
708 fold_build2 (MULT_EXPR, sizetype,
709 parts.index, parts.step),
710 true, NULL_TREE, true, GSI_SAME_STMT);
711 parts.step = NULL_TREE;
712
713 mem_ref = create_mem_ref_raw (type, alias_ptr_type, &parts);
714 if (mem_ref)
715 return mem_ref;
716 }
717
718 if (parts.symbol)
719 {
720 tmp = build_addr (parts.symbol, current_function_decl);
721 gcc_assert (is_gimple_val (tmp));
722
723 /* Add the symbol to base, eventually forcing it to register. */
724 if (parts.base)
725 {
726 gcc_assert (useless_type_conversion_p
727 (sizetype, TREE_TYPE (parts.base)));
728
729 if (parts.index)
730 {
731 atype = TREE_TYPE (tmp);
732 parts.base = force_gimple_operand_gsi (gsi,
733 fold_build2 (POINTER_PLUS_EXPR, atype,
734 tmp,
735 fold_convert (sizetype, parts.base)),
736 true, NULL_TREE, true, GSI_SAME_STMT);
737 }
738 else
739 {
740 parts.index = parts.base;
741 parts.base = tmp;
742 }
743 }
744 else
745 parts.base = tmp;
746 parts.symbol = NULL_TREE;
747
748 mem_ref = create_mem_ref_raw (type, alias_ptr_type, &parts);
749 if (mem_ref)
750 return mem_ref;
751 }
752
753 if (parts.index)
754 {
755 /* Add index to base. */
756 if (parts.base)
757 {
758 atype = TREE_TYPE (parts.base);
759 parts.base = force_gimple_operand_gsi (gsi,
760 fold_build2 (POINTER_PLUS_EXPR, atype,
761 parts.base,
762 parts.index),
763 true, NULL_TREE, true, GSI_SAME_STMT);
764 }
765 else
766 parts.base = parts.index;
767 parts.index = NULL_TREE;
768
769 mem_ref = create_mem_ref_raw (type, alias_ptr_type, &parts);
770 if (mem_ref)
771 return mem_ref;
772 }
773
774 if (parts.offset && !integer_zerop (parts.offset))
775 {
776 /* Try adding offset to base. */
777 if (parts.base)
778 {
779 atype = TREE_TYPE (parts.base);
780 parts.base = force_gimple_operand_gsi (gsi,
781 fold_build2 (POINTER_PLUS_EXPR, atype,
782 parts.base,
783 fold_convert (sizetype, parts.offset)),
784 true, NULL_TREE, true, GSI_SAME_STMT);
785 }
786 else
787 parts.base = parts.offset;
788
789 parts.offset = NULL_TREE;
790
791 mem_ref = create_mem_ref_raw (type, alias_ptr_type, &parts);
792 if (mem_ref)
793 return mem_ref;
794 }
795
796 /* Verify that the address is in the simplest possible shape
797 (only a register). If we cannot create such a memory reference,
798 something is really wrong. */
799 gcc_assert (parts.symbol == NULL_TREE);
800 gcc_assert (parts.index == NULL_TREE);
801 gcc_assert (!parts.step || integer_onep (parts.step));
802 gcc_assert (!parts.offset || integer_zerop (parts.offset));
803 gcc_unreachable ();
804 }
805
806 /* Copies components of the address from OP to ADDR. */
807
808 void
809 get_address_description (tree op, struct mem_address *addr)
810 {
811 addr->symbol = TMR_SYMBOL (op);
812 addr->base = TMR_BASE (op);
813 addr->index = TMR_INDEX (op);
814 addr->step = TMR_STEP (op);
815 addr->offset = TMR_OFFSET (op);
816 }
817
818 /* Copies the additional information attached to target_mem_ref FROM to TO. */
819
820 void
821 copy_mem_ref_info (tree to, tree from)
822 {
823 /* And the info about the original reference. */
824 TREE_SIDE_EFFECTS (to) = TREE_SIDE_EFFECTS (from);
825 TREE_THIS_VOLATILE (to) = TREE_THIS_VOLATILE (from);
826 }
827
828 /* Move constants in target_mem_ref REF to offset. Returns the new target
829 mem ref if anything changes, NULL_TREE otherwise. */
830
831 tree
832 maybe_fold_tmr (tree ref)
833 {
834 struct mem_address addr;
835 bool changed = false;
836 tree ret, off;
837
838 get_address_description (ref, &addr);
839
840 if (addr.base && TREE_CODE (addr.base) == INTEGER_CST)
841 {
842 addr.offset = fold_binary_to_constant (PLUS_EXPR,
843 TREE_TYPE (addr.offset),
844 addr.offset, addr.base);
845 addr.base = NULL_TREE;
846 changed = true;
847 }
848
849 if (addr.index && TREE_CODE (addr.index) == INTEGER_CST)
850 {
851 off = addr.index;
852 if (addr.step)
853 {
854 off = fold_binary_to_constant (MULT_EXPR, sizetype,
855 off, addr.step);
856 addr.step = NULL_TREE;
857 }
858
859 addr.offset = fold_binary_to_constant (PLUS_EXPR,
860 TREE_TYPE (addr.offset),
861 addr.offset, off);
862 addr.index = NULL_TREE;
863 changed = true;
864 }
865
866 if (!changed)
867 return NULL_TREE;
868
869 ret = create_mem_ref_raw (TREE_TYPE (ref), TREE_TYPE (addr.offset), &addr);
870 if (!ret)
871 return NULL_TREE;
872
873 copy_mem_ref_info (ret, ref);
874 return ret;
875 }
876
877 /* Dump PARTS to FILE. */
878
879 extern void dump_mem_address (FILE *, struct mem_address *);
880 void
881 dump_mem_address (FILE *file, struct mem_address *parts)
882 {
883 if (parts->symbol)
884 {
885 fprintf (file, "symbol: ");
886 print_generic_expr (file, parts->symbol, TDF_SLIM);
887 fprintf (file, "\n");
888 }
889 if (parts->base)
890 {
891 fprintf (file, "base: ");
892 print_generic_expr (file, parts->base, TDF_SLIM);
893 fprintf (file, "\n");
894 }
895 if (parts->index)
896 {
897 fprintf (file, "index: ");
898 print_generic_expr (file, parts->index, TDF_SLIM);
899 fprintf (file, "\n");
900 }
901 if (parts->step)
902 {
903 fprintf (file, "step: ");
904 print_generic_expr (file, parts->step, TDF_SLIM);
905 fprintf (file, "\n");
906 }
907 if (parts->offset)
908 {
909 fprintf (file, "offset: ");
910 print_generic_expr (file, parts->offset, TDF_SLIM);
911 fprintf (file, "\n");
912 }
913 }
914
915 #include "gt-tree-ssa-address.h"