]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/tree-ssa-address.c
This patch rewrites the old VEC macro-based interface into a new one
[thirdparty/gcc.git] / gcc / tree-ssa-address.c
1 /* Memory address lowering and addressing mode selection.
2 Copyright (C) 2004, 2006, 2007, 2008, 2009, 2010, 2011
3 Free Software Foundation, Inc.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published by the
9 Free Software Foundation; either version 3, or (at your option) any
10 later version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT
13 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 /* Utility functions for manipulation with TARGET_MEM_REFs -- tree expressions
22 that directly map to addressing modes of the target. */
23
24 #include "config.h"
25 #include "system.h"
26 #include "coretypes.h"
27 #include "tm.h"
28 #include "tree.h"
29 #include "tm_p.h"
30 #include "basic-block.h"
31 #include "tree-pretty-print.h"
32 #include "tree-flow.h"
33 #include "dumpfile.h"
34 #include "flags.h"
35 #include "tree-inline.h"
36 #include "tree-affine.h"
37
38 /* FIXME: We compute address costs using RTL. */
39 #include "insn-config.h"
40 #include "rtl.h"
41 #include "recog.h"
42 #include "expr.h"
43 #include "ggc.h"
44 #include "target.h"
45 #include "expmed.h"
46
47 /* TODO -- handling of symbols (according to Richard Hendersons
48 comments, http://gcc.gnu.org/ml/gcc-patches/2005-04/msg00949.html):
49
50 There are at least 5 different kinds of symbols that we can run up against:
51
52 (1) binds_local_p, small data area.
53 (2) binds_local_p, eg local statics
54 (3) !binds_local_p, eg global variables
55 (4) thread local, local_exec
56 (5) thread local, !local_exec
57
58 Now, (1) won't appear often in an array context, but it certainly can.
59 All you have to do is set -GN high enough, or explicitly mark any
60 random object __attribute__((section (".sdata"))).
61
62 All of these affect whether or not a symbol is in fact a valid address.
63 The only one tested here is (3). And that result may very well
64 be incorrect for (4) or (5).
65
66 An incorrect result here does not cause incorrect results out the
67 back end, because the expander in expr.c validizes the address. However
68 it would be nice to improve the handling here in order to produce more
69 precise results. */
70
71 /* A "template" for memory address, used to determine whether the address is
72 valid for mode. */
73
74 typedef struct GTY (()) mem_addr_template {
75 rtx ref; /* The template. */
76 rtx * GTY ((skip)) step_p; /* The point in template where the step should be
77 filled in. */
78 rtx * GTY ((skip)) off_p; /* The point in template where the offset should
79 be filled in. */
80 } mem_addr_template;
81
82
83 /* The templates. Each of the low five bits of the index corresponds to one
84 component of TARGET_MEM_REF being present, while the high bits identify
85 the address space. See TEMPL_IDX. */
86
87 static GTY(()) vec<mem_addr_template, va_gc> *mem_addr_template_list;
88
89 #define TEMPL_IDX(AS, SYMBOL, BASE, INDEX, STEP, OFFSET) \
90 (((int) (AS) << 5) \
91 | ((SYMBOL != 0) << 4) \
92 | ((BASE != 0) << 3) \
93 | ((INDEX != 0) << 2) \
94 | ((STEP != 0) << 1) \
95 | (OFFSET != 0))
96
97 /* Stores address for memory reference with parameters SYMBOL, BASE, INDEX,
98 STEP and OFFSET to *ADDR using address mode ADDRESS_MODE. Stores pointers
99 to where step is placed to *STEP_P and offset to *OFFSET_P. */
100
101 static void
102 gen_addr_rtx (enum machine_mode address_mode,
103 rtx symbol, rtx base, rtx index, rtx step, rtx offset,
104 rtx *addr, rtx **step_p, rtx **offset_p)
105 {
106 rtx act_elem;
107
108 *addr = NULL_RTX;
109 if (step_p)
110 *step_p = NULL;
111 if (offset_p)
112 *offset_p = NULL;
113
114 if (index)
115 {
116 act_elem = index;
117 if (step)
118 {
119 act_elem = gen_rtx_MULT (address_mode, act_elem, step);
120
121 if (step_p)
122 *step_p = &XEXP (act_elem, 1);
123 }
124
125 *addr = act_elem;
126 }
127
128 if (base && base != const0_rtx)
129 {
130 if (*addr)
131 *addr = simplify_gen_binary (PLUS, address_mode, base, *addr);
132 else
133 *addr = base;
134 }
135
136 if (symbol)
137 {
138 act_elem = symbol;
139 if (offset)
140 {
141 act_elem = gen_rtx_PLUS (address_mode, act_elem, offset);
142
143 if (offset_p)
144 *offset_p = &XEXP (act_elem, 1);
145
146 if (GET_CODE (symbol) == SYMBOL_REF
147 || GET_CODE (symbol) == LABEL_REF
148 || GET_CODE (symbol) == CONST)
149 act_elem = gen_rtx_CONST (address_mode, act_elem);
150 }
151
152 if (*addr)
153 *addr = gen_rtx_PLUS (address_mode, *addr, act_elem);
154 else
155 *addr = act_elem;
156 }
157 else if (offset)
158 {
159 if (*addr)
160 {
161 *addr = gen_rtx_PLUS (address_mode, *addr, offset);
162 if (offset_p)
163 *offset_p = &XEXP (*addr, 1);
164 }
165 else
166 {
167 *addr = offset;
168 if (offset_p)
169 *offset_p = addr;
170 }
171 }
172
173 if (!*addr)
174 *addr = const0_rtx;
175 }
176
177 /* Returns address for TARGET_MEM_REF with parameters given by ADDR
178 in address space AS.
179 If REALLY_EXPAND is false, just make fake registers instead
180 of really expanding the operands, and perform the expansion in-place
181 by using one of the "templates". */
182
183 rtx
184 addr_for_mem_ref (struct mem_address *addr, addr_space_t as,
185 bool really_expand)
186 {
187 enum machine_mode address_mode = targetm.addr_space.address_mode (as);
188 enum machine_mode pointer_mode = targetm.addr_space.pointer_mode (as);
189 rtx address, sym, bse, idx, st, off;
190 struct mem_addr_template *templ;
191
192 if (addr->step && !integer_onep (addr->step))
193 st = immed_double_int_const (tree_to_double_int (addr->step), pointer_mode);
194 else
195 st = NULL_RTX;
196
197 if (addr->offset && !integer_zerop (addr->offset))
198 off = immed_double_int_const
199 (tree_to_double_int (addr->offset)
200 .sext (TYPE_PRECISION (TREE_TYPE (addr->offset))),
201 pointer_mode);
202 else
203 off = NULL_RTX;
204
205 if (!really_expand)
206 {
207 unsigned int templ_index
208 = TEMPL_IDX (as, addr->symbol, addr->base, addr->index, st, off);
209
210 if (templ_index >= vec_safe_length (mem_addr_template_list))
211 vec_safe_grow_cleared (mem_addr_template_list, templ_index + 1);
212
213 /* Reuse the templates for addresses, so that we do not waste memory. */
214 templ = &(*mem_addr_template_list)[templ_index];
215 if (!templ->ref)
216 {
217 sym = (addr->symbol ?
218 gen_rtx_SYMBOL_REF (pointer_mode, ggc_strdup ("test_symbol"))
219 : NULL_RTX);
220 bse = (addr->base ?
221 gen_raw_REG (pointer_mode, LAST_VIRTUAL_REGISTER + 1)
222 : NULL_RTX);
223 idx = (addr->index ?
224 gen_raw_REG (pointer_mode, LAST_VIRTUAL_REGISTER + 2)
225 : NULL_RTX);
226
227 gen_addr_rtx (pointer_mode, sym, bse, idx,
228 st? const0_rtx : NULL_RTX,
229 off? const0_rtx : NULL_RTX,
230 &templ->ref,
231 &templ->step_p,
232 &templ->off_p);
233 }
234
235 if (st)
236 *templ->step_p = st;
237 if (off)
238 *templ->off_p = off;
239
240 return templ->ref;
241 }
242
243 /* Otherwise really expand the expressions. */
244 sym = (addr->symbol
245 ? expand_expr (addr->symbol, NULL_RTX, pointer_mode, EXPAND_NORMAL)
246 : NULL_RTX);
247 bse = (addr->base
248 ? expand_expr (addr->base, NULL_RTX, pointer_mode, EXPAND_NORMAL)
249 : NULL_RTX);
250 idx = (addr->index
251 ? expand_expr (addr->index, NULL_RTX, pointer_mode, EXPAND_NORMAL)
252 : NULL_RTX);
253
254 gen_addr_rtx (pointer_mode, sym, bse, idx, st, off, &address, NULL, NULL);
255 if (pointer_mode != address_mode)
256 address = convert_memory_address (address_mode, address);
257 return address;
258 }
259
260 /* Returns address of MEM_REF in TYPE. */
261
262 tree
263 tree_mem_ref_addr (tree type, tree mem_ref)
264 {
265 tree addr;
266 tree act_elem;
267 tree step = TMR_STEP (mem_ref), offset = TMR_OFFSET (mem_ref);
268 tree addr_base = NULL_TREE, addr_off = NULL_TREE;
269
270 addr_base = fold_convert (type, TMR_BASE (mem_ref));
271
272 act_elem = TMR_INDEX (mem_ref);
273 if (act_elem)
274 {
275 if (step)
276 act_elem = fold_build2 (MULT_EXPR, TREE_TYPE (act_elem),
277 act_elem, step);
278 addr_off = act_elem;
279 }
280
281 act_elem = TMR_INDEX2 (mem_ref);
282 if (act_elem)
283 {
284 if (addr_off)
285 addr_off = fold_build2 (PLUS_EXPR, TREE_TYPE (addr_off),
286 addr_off, act_elem);
287 else
288 addr_off = act_elem;
289 }
290
291 if (offset && !integer_zerop (offset))
292 {
293 if (addr_off)
294 addr_off = fold_build2 (PLUS_EXPR, TREE_TYPE (addr_off), addr_off,
295 fold_convert (TREE_TYPE (addr_off), offset));
296 else
297 addr_off = offset;
298 }
299
300 if (addr_off)
301 addr = fold_build_pointer_plus (addr_base, addr_off);
302 else
303 addr = addr_base;
304
305 return addr;
306 }
307
308 /* Returns true if a memory reference in MODE and with parameters given by
309 ADDR is valid on the current target. */
310
311 static bool
312 valid_mem_ref_p (enum machine_mode mode, addr_space_t as,
313 struct mem_address *addr)
314 {
315 rtx address;
316
317 address = addr_for_mem_ref (addr, as, false);
318 if (!address)
319 return false;
320
321 return memory_address_addr_space_p (mode, address, as);
322 }
323
324 /* Checks whether a TARGET_MEM_REF with type TYPE and parameters given by ADDR
325 is valid on the current target and if so, creates and returns the
326 TARGET_MEM_REF. If VERIFY is false omit the verification step. */
327
328 static tree
329 create_mem_ref_raw (tree type, tree alias_ptr_type, struct mem_address *addr,
330 bool verify)
331 {
332 tree base, index2;
333
334 if (verify
335 && !valid_mem_ref_p (TYPE_MODE (type), TYPE_ADDR_SPACE (type), addr))
336 return NULL_TREE;
337
338 if (addr->step && integer_onep (addr->step))
339 addr->step = NULL_TREE;
340
341 if (addr->offset)
342 addr->offset = fold_convert (alias_ptr_type, addr->offset);
343 else
344 addr->offset = build_int_cst (alias_ptr_type, 0);
345
346 if (addr->symbol)
347 {
348 base = addr->symbol;
349 index2 = addr->base;
350 }
351 else if (addr->base
352 && POINTER_TYPE_P (TREE_TYPE (addr->base)))
353 {
354 base = addr->base;
355 index2 = NULL_TREE;
356 }
357 else
358 {
359 base = build_int_cst (ptr_type_node, 0);
360 index2 = addr->base;
361 }
362
363 /* If possible use a plain MEM_REF instead of a TARGET_MEM_REF.
364 ??? As IVOPTs does not follow restrictions to where the base
365 pointer may point to create a MEM_REF only if we know that
366 base is valid. */
367 if ((TREE_CODE (base) == ADDR_EXPR || TREE_CODE (base) == INTEGER_CST)
368 && (!index2 || integer_zerop (index2))
369 && (!addr->index || integer_zerop (addr->index)))
370 return fold_build2 (MEM_REF, type, base, addr->offset);
371
372 return build5 (TARGET_MEM_REF, type,
373 base, addr->offset, addr->index, addr->step, index2);
374 }
375
376 /* Returns true if OBJ is an object whose address is a link time constant. */
377
378 static bool
379 fixed_address_object_p (tree obj)
380 {
381 return (TREE_CODE (obj) == VAR_DECL
382 && (TREE_STATIC (obj)
383 || DECL_EXTERNAL (obj))
384 && ! DECL_DLLIMPORT_P (obj));
385 }
386
387 /* If ADDR contains an address of object that is a link time constant,
388 move it to PARTS->symbol. */
389
390 static void
391 move_fixed_address_to_symbol (struct mem_address *parts, aff_tree *addr)
392 {
393 unsigned i;
394 tree val = NULL_TREE;
395
396 for (i = 0; i < addr->n; i++)
397 {
398 if (!addr->elts[i].coef.is_one ())
399 continue;
400
401 val = addr->elts[i].val;
402 if (TREE_CODE (val) == ADDR_EXPR
403 && fixed_address_object_p (TREE_OPERAND (val, 0)))
404 break;
405 }
406
407 if (i == addr->n)
408 return;
409
410 parts->symbol = val;
411 aff_combination_remove_elt (addr, i);
412 }
413
414 /* If ADDR contains an instance of BASE_HINT, move it to PARTS->base. */
415
416 static void
417 move_hint_to_base (tree type, struct mem_address *parts, tree base_hint,
418 aff_tree *addr)
419 {
420 unsigned i;
421 tree val = NULL_TREE;
422 int qual;
423
424 for (i = 0; i < addr->n; i++)
425 {
426 if (!addr->elts[i].coef.is_one ())
427 continue;
428
429 val = addr->elts[i].val;
430 if (operand_equal_p (val, base_hint, 0))
431 break;
432 }
433
434 if (i == addr->n)
435 return;
436
437 /* Cast value to appropriate pointer type. We cannot use a pointer
438 to TYPE directly, as the back-end will assume registers of pointer
439 type are aligned, and just the base itself may not actually be.
440 We use void pointer to the type's address space instead. */
441 qual = ENCODE_QUAL_ADDR_SPACE (TYPE_ADDR_SPACE (type));
442 type = build_qualified_type (void_type_node, qual);
443 parts->base = fold_convert (build_pointer_type (type), val);
444 aff_combination_remove_elt (addr, i);
445 }
446
447 /* If ADDR contains an address of a dereferenced pointer, move it to
448 PARTS->base. */
449
450 static void
451 move_pointer_to_base (struct mem_address *parts, aff_tree *addr)
452 {
453 unsigned i;
454 tree val = NULL_TREE;
455
456 for (i = 0; i < addr->n; i++)
457 {
458 if (!addr->elts[i].coef.is_one ())
459 continue;
460
461 val = addr->elts[i].val;
462 if (POINTER_TYPE_P (TREE_TYPE (val)))
463 break;
464 }
465
466 if (i == addr->n)
467 return;
468
469 parts->base = val;
470 aff_combination_remove_elt (addr, i);
471 }
472
473 /* Moves the loop variant part V in linear address ADDR to be the index
474 of PARTS. */
475
476 static void
477 move_variant_to_index (struct mem_address *parts, aff_tree *addr, tree v)
478 {
479 unsigned i;
480 tree val = NULL_TREE;
481
482 gcc_assert (!parts->index);
483 for (i = 0; i < addr->n; i++)
484 {
485 val = addr->elts[i].val;
486 if (operand_equal_p (val, v, 0))
487 break;
488 }
489
490 if (i == addr->n)
491 return;
492
493 parts->index = fold_convert (sizetype, val);
494 parts->step = double_int_to_tree (sizetype, addr->elts[i].coef);
495 aff_combination_remove_elt (addr, i);
496 }
497
498 /* Adds ELT to PARTS. */
499
500 static void
501 add_to_parts (struct mem_address *parts, tree elt)
502 {
503 tree type;
504
505 if (!parts->index)
506 {
507 parts->index = fold_convert (sizetype, elt);
508 return;
509 }
510
511 if (!parts->base)
512 {
513 parts->base = elt;
514 return;
515 }
516
517 /* Add ELT to base. */
518 type = TREE_TYPE (parts->base);
519 if (POINTER_TYPE_P (type))
520 parts->base = fold_build_pointer_plus (parts->base, elt);
521 else
522 parts->base = fold_build2 (PLUS_EXPR, type,
523 parts->base, elt);
524 }
525
526 /* Finds the most expensive multiplication in ADDR that can be
527 expressed in an addressing mode and move the corresponding
528 element(s) to PARTS. */
529
530 static void
531 most_expensive_mult_to_index (tree type, struct mem_address *parts,
532 aff_tree *addr, bool speed)
533 {
534 addr_space_t as = TYPE_ADDR_SPACE (type);
535 enum machine_mode address_mode = targetm.addr_space.address_mode (as);
536 HOST_WIDE_INT coef;
537 double_int best_mult, amult, amult_neg;
538 unsigned best_mult_cost = 0, acost;
539 tree mult_elt = NULL_TREE, elt;
540 unsigned i, j;
541 enum tree_code op_code;
542
543 best_mult = double_int_zero;
544 for (i = 0; i < addr->n; i++)
545 {
546 if (!addr->elts[i].coef.fits_shwi ())
547 continue;
548
549 coef = addr->elts[i].coef.to_shwi ();
550 if (coef == 1
551 || !multiplier_allowed_in_address_p (coef, TYPE_MODE (type), as))
552 continue;
553
554 acost = mult_by_coeff_cost (coef, address_mode, speed);
555
556 if (acost > best_mult_cost)
557 {
558 best_mult_cost = acost;
559 best_mult = addr->elts[i].coef;
560 }
561 }
562
563 if (!best_mult_cost)
564 return;
565
566 /* Collect elements multiplied by best_mult. */
567 for (i = j = 0; i < addr->n; i++)
568 {
569 amult = addr->elts[i].coef;
570 amult_neg = double_int_ext_for_comb (-amult, addr);
571
572 if (amult == best_mult)
573 op_code = PLUS_EXPR;
574 else if (amult_neg == best_mult)
575 op_code = MINUS_EXPR;
576 else
577 {
578 addr->elts[j] = addr->elts[i];
579 j++;
580 continue;
581 }
582
583 elt = fold_convert (sizetype, addr->elts[i].val);
584 if (mult_elt)
585 mult_elt = fold_build2 (op_code, sizetype, mult_elt, elt);
586 else if (op_code == PLUS_EXPR)
587 mult_elt = elt;
588 else
589 mult_elt = fold_build1 (NEGATE_EXPR, sizetype, elt);
590 }
591 addr->n = j;
592
593 parts->index = mult_elt;
594 parts->step = double_int_to_tree (sizetype, best_mult);
595 }
596
597 /* Splits address ADDR for a memory access of type TYPE into PARTS.
598 If BASE_HINT is non-NULL, it specifies an SSA name to be used
599 preferentially as base of the reference, and IV_CAND is the selected
600 iv candidate used in ADDR.
601
602 TODO -- be more clever about the distribution of the elements of ADDR
603 to PARTS. Some architectures do not support anything but single
604 register in address, possibly with a small integer offset; while
605 create_mem_ref will simplify the address to an acceptable shape
606 later, it would be more efficient to know that asking for complicated
607 addressing modes is useless. */
608
609 static void
610 addr_to_parts (tree type, aff_tree *addr, tree iv_cand,
611 tree base_hint, struct mem_address *parts,
612 bool speed)
613 {
614 tree part;
615 unsigned i;
616
617 parts->symbol = NULL_TREE;
618 parts->base = NULL_TREE;
619 parts->index = NULL_TREE;
620 parts->step = NULL_TREE;
621
622 if (!addr->offset.is_zero ())
623 parts->offset = double_int_to_tree (sizetype, addr->offset);
624 else
625 parts->offset = NULL_TREE;
626
627 /* Try to find a symbol. */
628 move_fixed_address_to_symbol (parts, addr);
629
630 /* No need to do address parts reassociation if the number of parts
631 is <= 2 -- in that case, no loop invariant code motion can be
632 exposed. */
633
634 if (!base_hint && (addr->n > 2))
635 move_variant_to_index (parts, addr, iv_cand);
636
637 /* First move the most expensive feasible multiplication
638 to index. */
639 if (!parts->index)
640 most_expensive_mult_to_index (type, parts, addr, speed);
641
642 /* Try to find a base of the reference. Since at the moment
643 there is no reliable way how to distinguish between pointer and its
644 offset, this is just a guess. */
645 if (!parts->symbol && base_hint)
646 move_hint_to_base (type, parts, base_hint, addr);
647 if (!parts->symbol && !parts->base)
648 move_pointer_to_base (parts, addr);
649
650 /* Then try to process the remaining elements. */
651 for (i = 0; i < addr->n; i++)
652 {
653 part = fold_convert (sizetype, addr->elts[i].val);
654 if (!addr->elts[i].coef.is_one ())
655 part = fold_build2 (MULT_EXPR, sizetype, part,
656 double_int_to_tree (sizetype, addr->elts[i].coef));
657 add_to_parts (parts, part);
658 }
659 if (addr->rest)
660 add_to_parts (parts, fold_convert (sizetype, addr->rest));
661 }
662
663 /* Force the PARTS to register. */
664
665 static void
666 gimplify_mem_ref_parts (gimple_stmt_iterator *gsi, struct mem_address *parts)
667 {
668 if (parts->base)
669 parts->base = force_gimple_operand_gsi_1 (gsi, parts->base,
670 is_gimple_mem_ref_addr, NULL_TREE,
671 true, GSI_SAME_STMT);
672 if (parts->index)
673 parts->index = force_gimple_operand_gsi (gsi, parts->index,
674 true, NULL_TREE,
675 true, GSI_SAME_STMT);
676 }
677
678 /* Creates and returns a TARGET_MEM_REF for address ADDR. If necessary
679 computations are emitted in front of GSI. TYPE is the mode
680 of created memory reference. IV_CAND is the selected iv candidate in ADDR,
681 and BASE_HINT is non NULL if IV_CAND comes from a base address
682 object. */
683
684 tree
685 create_mem_ref (gimple_stmt_iterator *gsi, tree type, aff_tree *addr,
686 tree alias_ptr_type, tree iv_cand, tree base_hint, bool speed)
687 {
688 tree mem_ref, tmp;
689 struct mem_address parts;
690
691 addr_to_parts (type, addr, iv_cand, base_hint, &parts, speed);
692 gimplify_mem_ref_parts (gsi, &parts);
693 mem_ref = create_mem_ref_raw (type, alias_ptr_type, &parts, true);
694 if (mem_ref)
695 return mem_ref;
696
697 /* The expression is too complicated. Try making it simpler. */
698
699 if (parts.step && !integer_onep (parts.step))
700 {
701 /* Move the multiplication to index. */
702 gcc_assert (parts.index);
703 parts.index = force_gimple_operand_gsi (gsi,
704 fold_build2 (MULT_EXPR, sizetype,
705 parts.index, parts.step),
706 true, NULL_TREE, true, GSI_SAME_STMT);
707 parts.step = NULL_TREE;
708
709 mem_ref = create_mem_ref_raw (type, alias_ptr_type, &parts, true);
710 if (mem_ref)
711 return mem_ref;
712 }
713
714 if (parts.symbol)
715 {
716 tmp = parts.symbol;
717 gcc_assert (is_gimple_val (tmp));
718
719 /* Add the symbol to base, eventually forcing it to register. */
720 if (parts.base)
721 {
722 gcc_assert (useless_type_conversion_p
723 (sizetype, TREE_TYPE (parts.base)));
724
725 if (parts.index)
726 {
727 parts.base = force_gimple_operand_gsi_1 (gsi,
728 fold_build_pointer_plus (tmp, parts.base),
729 is_gimple_mem_ref_addr, NULL_TREE, true, GSI_SAME_STMT);
730 }
731 else
732 {
733 parts.index = parts.base;
734 parts.base = tmp;
735 }
736 }
737 else
738 parts.base = tmp;
739 parts.symbol = NULL_TREE;
740
741 mem_ref = create_mem_ref_raw (type, alias_ptr_type, &parts, true);
742 if (mem_ref)
743 return mem_ref;
744 }
745
746 if (parts.index)
747 {
748 /* Add index to base. */
749 if (parts.base)
750 {
751 parts.base = force_gimple_operand_gsi_1 (gsi,
752 fold_build_pointer_plus (parts.base, parts.index),
753 is_gimple_mem_ref_addr, NULL_TREE, true, GSI_SAME_STMT);
754 }
755 else
756 parts.base = parts.index;
757 parts.index = NULL_TREE;
758
759 mem_ref = create_mem_ref_raw (type, alias_ptr_type, &parts, true);
760 if (mem_ref)
761 return mem_ref;
762 }
763
764 if (parts.offset && !integer_zerop (parts.offset))
765 {
766 /* Try adding offset to base. */
767 if (parts.base)
768 {
769 parts.base = force_gimple_operand_gsi_1 (gsi,
770 fold_build_pointer_plus (parts.base, parts.offset),
771 is_gimple_mem_ref_addr, NULL_TREE, true, GSI_SAME_STMT);
772 }
773 else
774 parts.base = parts.offset;
775
776 parts.offset = NULL_TREE;
777
778 mem_ref = create_mem_ref_raw (type, alias_ptr_type, &parts, true);
779 if (mem_ref)
780 return mem_ref;
781 }
782
783 /* Verify that the address is in the simplest possible shape
784 (only a register). If we cannot create such a memory reference,
785 something is really wrong. */
786 gcc_assert (parts.symbol == NULL_TREE);
787 gcc_assert (parts.index == NULL_TREE);
788 gcc_assert (!parts.step || integer_onep (parts.step));
789 gcc_assert (!parts.offset || integer_zerop (parts.offset));
790 gcc_unreachable ();
791 }
792
793 /* Copies components of the address from OP to ADDR. */
794
795 void
796 get_address_description (tree op, struct mem_address *addr)
797 {
798 if (TREE_CODE (TMR_BASE (op)) == ADDR_EXPR)
799 {
800 addr->symbol = TMR_BASE (op);
801 addr->base = TMR_INDEX2 (op);
802 }
803 else
804 {
805 addr->symbol = NULL_TREE;
806 if (TMR_INDEX2 (op))
807 {
808 gcc_assert (integer_zerop (TMR_BASE (op)));
809 addr->base = TMR_INDEX2 (op);
810 }
811 else
812 addr->base = TMR_BASE (op);
813 }
814 addr->index = TMR_INDEX (op);
815 addr->step = TMR_STEP (op);
816 addr->offset = TMR_OFFSET (op);
817 }
818
819 /* Copies the reference information from OLD_REF to NEW_REF, where
820 NEW_REF should be either a MEM_REF or a TARGET_MEM_REF. */
821
822 void
823 copy_ref_info (tree new_ref, tree old_ref)
824 {
825 tree new_ptr_base = NULL_TREE;
826
827 gcc_assert (TREE_CODE (new_ref) == MEM_REF
828 || TREE_CODE (new_ref) == TARGET_MEM_REF);
829
830 TREE_SIDE_EFFECTS (new_ref) = TREE_SIDE_EFFECTS (old_ref);
831 TREE_THIS_VOLATILE (new_ref) = TREE_THIS_VOLATILE (old_ref);
832
833 new_ptr_base = TREE_OPERAND (new_ref, 0);
834
835 /* We can transfer points-to information from an old pointer
836 or decl base to the new one. */
837 if (new_ptr_base
838 && TREE_CODE (new_ptr_base) == SSA_NAME
839 && !SSA_NAME_PTR_INFO (new_ptr_base))
840 {
841 tree base = get_base_address (old_ref);
842 if (!base)
843 ;
844 else if ((TREE_CODE (base) == MEM_REF
845 || TREE_CODE (base) == TARGET_MEM_REF)
846 && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME
847 && SSA_NAME_PTR_INFO (TREE_OPERAND (base, 0)))
848 {
849 struct ptr_info_def *new_pi;
850 unsigned int align, misalign;
851
852 duplicate_ssa_name_ptr_info
853 (new_ptr_base, SSA_NAME_PTR_INFO (TREE_OPERAND (base, 0)));
854 new_pi = SSA_NAME_PTR_INFO (new_ptr_base);
855 /* We have to be careful about transferring alignment information. */
856 if (get_ptr_info_alignment (new_pi, &align, &misalign)
857 && TREE_CODE (old_ref) == MEM_REF
858 && !(TREE_CODE (new_ref) == TARGET_MEM_REF
859 && (TMR_INDEX2 (new_ref)
860 || (TMR_STEP (new_ref)
861 && (TREE_INT_CST_LOW (TMR_STEP (new_ref))
862 < align)))))
863 {
864 unsigned int inc = (mem_ref_offset (old_ref)
865 - mem_ref_offset (new_ref)).low;
866 adjust_ptr_info_misalignment (new_pi, inc);
867 }
868 else
869 mark_ptr_info_alignment_unknown (new_pi);
870 }
871 else if (TREE_CODE (base) == VAR_DECL
872 || TREE_CODE (base) == PARM_DECL
873 || TREE_CODE (base) == RESULT_DECL)
874 {
875 struct ptr_info_def *pi = get_ptr_info (new_ptr_base);
876 pt_solution_set_var (&pi->pt, base);
877 }
878 }
879 }
880
881 /* Move constants in target_mem_ref REF to offset. Returns the new target
882 mem ref if anything changes, NULL_TREE otherwise. */
883
884 tree
885 maybe_fold_tmr (tree ref)
886 {
887 struct mem_address addr;
888 bool changed = false;
889 tree new_ref, off;
890
891 get_address_description (ref, &addr);
892
893 if (addr.base
894 && TREE_CODE (addr.base) == INTEGER_CST
895 && !integer_zerop (addr.base))
896 {
897 addr.offset = fold_binary_to_constant (PLUS_EXPR,
898 TREE_TYPE (addr.offset),
899 addr.offset, addr.base);
900 addr.base = NULL_TREE;
901 changed = true;
902 }
903
904 if (addr.symbol
905 && TREE_CODE (TREE_OPERAND (addr.symbol, 0)) == MEM_REF)
906 {
907 addr.offset = fold_binary_to_constant
908 (PLUS_EXPR, TREE_TYPE (addr.offset),
909 addr.offset,
910 TREE_OPERAND (TREE_OPERAND (addr.symbol, 0), 1));
911 addr.symbol = TREE_OPERAND (TREE_OPERAND (addr.symbol, 0), 0);
912 changed = true;
913 }
914 else if (addr.symbol
915 && handled_component_p (TREE_OPERAND (addr.symbol, 0)))
916 {
917 HOST_WIDE_INT offset;
918 addr.symbol = build_fold_addr_expr
919 (get_addr_base_and_unit_offset
920 (TREE_OPERAND (addr.symbol, 0), &offset));
921 addr.offset = int_const_binop (PLUS_EXPR,
922 addr.offset, size_int (offset));
923 changed = true;
924 }
925
926 if (addr.index && TREE_CODE (addr.index) == INTEGER_CST)
927 {
928 off = addr.index;
929 if (addr.step)
930 {
931 off = fold_binary_to_constant (MULT_EXPR, sizetype,
932 off, addr.step);
933 addr.step = NULL_TREE;
934 }
935
936 addr.offset = fold_binary_to_constant (PLUS_EXPR,
937 TREE_TYPE (addr.offset),
938 addr.offset, off);
939 addr.index = NULL_TREE;
940 changed = true;
941 }
942
943 if (!changed)
944 return NULL_TREE;
945
946 /* If we have propagated something into this TARGET_MEM_REF and thus
947 ended up folding it, always create a new TARGET_MEM_REF regardless
948 if it is valid in this for on the target - the propagation result
949 wouldn't be anyway. */
950 new_ref = create_mem_ref_raw (TREE_TYPE (ref),
951 TREE_TYPE (addr.offset), &addr, false);
952 TREE_SIDE_EFFECTS (new_ref) = TREE_SIDE_EFFECTS (ref);
953 TREE_THIS_VOLATILE (new_ref) = TREE_THIS_VOLATILE (ref);
954 return new_ref;
955 }
956
957 /* Dump PARTS to FILE. */
958
959 extern void dump_mem_address (FILE *, struct mem_address *);
960 void
961 dump_mem_address (FILE *file, struct mem_address *parts)
962 {
963 if (parts->symbol)
964 {
965 fprintf (file, "symbol: ");
966 print_generic_expr (file, TREE_OPERAND (parts->symbol, 0), TDF_SLIM);
967 fprintf (file, "\n");
968 }
969 if (parts->base)
970 {
971 fprintf (file, "base: ");
972 print_generic_expr (file, parts->base, TDF_SLIM);
973 fprintf (file, "\n");
974 }
975 if (parts->index)
976 {
977 fprintf (file, "index: ");
978 print_generic_expr (file, parts->index, TDF_SLIM);
979 fprintf (file, "\n");
980 }
981 if (parts->step)
982 {
983 fprintf (file, "step: ");
984 print_generic_expr (file, parts->step, TDF_SLIM);
985 fprintf (file, "\n");
986 }
987 if (parts->offset)
988 {
989 fprintf (file, "offset: ");
990 print_generic_expr (file, parts->offset, TDF_SLIM);
991 fprintf (file, "\n");
992 }
993 }
994
995 #include "gt-tree-ssa-address.h"