]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/tree-ssa-address.c
* tree-ssa-loop.h: Remove include files.
[thirdparty/gcc.git] / gcc / tree-ssa-address.c
1 /* Memory address lowering and addressing mode selection.
2 Copyright (C) 2004-2013 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it
7 under the terms of the GNU General Public License as published by the
8 Free Software Foundation; either version 3, or (at your option) any
9 later version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT
12 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 /* Utility functions for manipulation with TARGET_MEM_REFs -- tree expressions
21 that directly map to addressing modes of the target. */
22
23 #include "config.h"
24 #include "system.h"
25 #include "coretypes.h"
26 #include "tm.h"
27 #include "tree.h"
28 #include "tm_p.h"
29 #include "basic-block.h"
30 #include "tree-pretty-print.h"
31 #include "gimple.h"
32 #include "tree-ssanames.h"
33 #include "tree-ssa-loop-ivopts.h"
34 #include "tree-dfa.h"
35 #include "dumpfile.h"
36 #include "flags.h"
37 #include "tree-inline.h"
38 #include "tree-affine.h"
39
40 /* FIXME: We compute address costs using RTL. */
41 #include "insn-config.h"
42 #include "rtl.h"
43 #include "recog.h"
44 #include "expr.h"
45 #include "ggc.h"
46 #include "target.h"
47 #include "expmed.h"
48 #include "tree-ssa-address.h"
49
50 /* TODO -- handling of symbols (according to Richard Hendersons
51 comments, http://gcc.gnu.org/ml/gcc-patches/2005-04/msg00949.html):
52
53 There are at least 5 different kinds of symbols that we can run up against:
54
55 (1) binds_local_p, small data area.
56 (2) binds_local_p, eg local statics
57 (3) !binds_local_p, eg global variables
58 (4) thread local, local_exec
59 (5) thread local, !local_exec
60
61 Now, (1) won't appear often in an array context, but it certainly can.
62 All you have to do is set -GN high enough, or explicitly mark any
63 random object __attribute__((section (".sdata"))).
64
65 All of these affect whether or not a symbol is in fact a valid address.
66 The only one tested here is (3). And that result may very well
67 be incorrect for (4) or (5).
68
69 An incorrect result here does not cause incorrect results out the
70 back end, because the expander in expr.c validizes the address. However
71 it would be nice to improve the handling here in order to produce more
72 precise results. */
73
74 /* A "template" for memory address, used to determine whether the address is
75 valid for mode. */
76
77 typedef struct GTY (()) mem_addr_template {
78 rtx ref; /* The template. */
79 rtx * GTY ((skip)) step_p; /* The point in template where the step should be
80 filled in. */
81 rtx * GTY ((skip)) off_p; /* The point in template where the offset should
82 be filled in. */
83 } mem_addr_template;
84
85
86 /* The templates. Each of the low five bits of the index corresponds to one
87 component of TARGET_MEM_REF being present, while the high bits identify
88 the address space. See TEMPL_IDX. */
89
90 static GTY(()) vec<mem_addr_template, va_gc> *mem_addr_template_list;
91
92 #define TEMPL_IDX(AS, SYMBOL, BASE, INDEX, STEP, OFFSET) \
93 (((int) (AS) << 5) \
94 | ((SYMBOL != 0) << 4) \
95 | ((BASE != 0) << 3) \
96 | ((INDEX != 0) << 2) \
97 | ((STEP != 0) << 1) \
98 | (OFFSET != 0))
99
100 /* Stores address for memory reference with parameters SYMBOL, BASE, INDEX,
101 STEP and OFFSET to *ADDR using address mode ADDRESS_MODE. Stores pointers
102 to where step is placed to *STEP_P and offset to *OFFSET_P. */
103
104 static void
105 gen_addr_rtx (enum machine_mode address_mode,
106 rtx symbol, rtx base, rtx index, rtx step, rtx offset,
107 rtx *addr, rtx **step_p, rtx **offset_p)
108 {
109 rtx act_elem;
110
111 *addr = NULL_RTX;
112 if (step_p)
113 *step_p = NULL;
114 if (offset_p)
115 *offset_p = NULL;
116
117 if (index)
118 {
119 act_elem = index;
120 if (step)
121 {
122 act_elem = gen_rtx_MULT (address_mode, act_elem, step);
123
124 if (step_p)
125 *step_p = &XEXP (act_elem, 1);
126 }
127
128 *addr = act_elem;
129 }
130
131 if (base && base != const0_rtx)
132 {
133 if (*addr)
134 *addr = simplify_gen_binary (PLUS, address_mode, base, *addr);
135 else
136 *addr = base;
137 }
138
139 if (symbol)
140 {
141 act_elem = symbol;
142 if (offset)
143 {
144 act_elem = gen_rtx_PLUS (address_mode, act_elem, offset);
145
146 if (offset_p)
147 *offset_p = &XEXP (act_elem, 1);
148
149 if (GET_CODE (symbol) == SYMBOL_REF
150 || GET_CODE (symbol) == LABEL_REF
151 || GET_CODE (symbol) == CONST)
152 act_elem = gen_rtx_CONST (address_mode, act_elem);
153 }
154
155 if (*addr)
156 *addr = gen_rtx_PLUS (address_mode, *addr, act_elem);
157 else
158 *addr = act_elem;
159 }
160 else if (offset)
161 {
162 if (*addr)
163 {
164 *addr = gen_rtx_PLUS (address_mode, *addr, offset);
165 if (offset_p)
166 *offset_p = &XEXP (*addr, 1);
167 }
168 else
169 {
170 *addr = offset;
171 if (offset_p)
172 *offset_p = addr;
173 }
174 }
175
176 if (!*addr)
177 *addr = const0_rtx;
178 }
179
180 /* Description of a memory address. */
181
182 struct mem_address
183 {
184 tree symbol, base, index, step, offset;
185 };
186
187 /* Returns address for TARGET_MEM_REF with parameters given by ADDR
188 in address space AS.
189 If REALLY_EXPAND is false, just make fake registers instead
190 of really expanding the operands, and perform the expansion in-place
191 by using one of the "templates". */
192
193 rtx
194 addr_for_mem_ref (struct mem_address *addr, addr_space_t as,
195 bool really_expand)
196 {
197 enum machine_mode address_mode = targetm.addr_space.address_mode (as);
198 enum machine_mode pointer_mode = targetm.addr_space.pointer_mode (as);
199 rtx address, sym, bse, idx, st, off;
200 struct mem_addr_template *templ;
201
202 if (addr->step && !integer_onep (addr->step))
203 st = immed_double_int_const (tree_to_double_int (addr->step), pointer_mode);
204 else
205 st = NULL_RTX;
206
207 if (addr->offset && !integer_zerop (addr->offset))
208 off = immed_double_int_const
209 (tree_to_double_int (addr->offset)
210 .sext (TYPE_PRECISION (TREE_TYPE (addr->offset))),
211 pointer_mode);
212 else
213 off = NULL_RTX;
214
215 if (!really_expand)
216 {
217 unsigned int templ_index
218 = TEMPL_IDX (as, addr->symbol, addr->base, addr->index, st, off);
219
220 if (templ_index >= vec_safe_length (mem_addr_template_list))
221 vec_safe_grow_cleared (mem_addr_template_list, templ_index + 1);
222
223 /* Reuse the templates for addresses, so that we do not waste memory. */
224 templ = &(*mem_addr_template_list)[templ_index];
225 if (!templ->ref)
226 {
227 sym = (addr->symbol ?
228 gen_rtx_SYMBOL_REF (pointer_mode, ggc_strdup ("test_symbol"))
229 : NULL_RTX);
230 bse = (addr->base ?
231 gen_raw_REG (pointer_mode, LAST_VIRTUAL_REGISTER + 1)
232 : NULL_RTX);
233 idx = (addr->index ?
234 gen_raw_REG (pointer_mode, LAST_VIRTUAL_REGISTER + 2)
235 : NULL_RTX);
236
237 gen_addr_rtx (pointer_mode, sym, bse, idx,
238 st? const0_rtx : NULL_RTX,
239 off? const0_rtx : NULL_RTX,
240 &templ->ref,
241 &templ->step_p,
242 &templ->off_p);
243 }
244
245 if (st)
246 *templ->step_p = st;
247 if (off)
248 *templ->off_p = off;
249
250 return templ->ref;
251 }
252
253 /* Otherwise really expand the expressions. */
254 sym = (addr->symbol
255 ? expand_expr (addr->symbol, NULL_RTX, pointer_mode, EXPAND_NORMAL)
256 : NULL_RTX);
257 bse = (addr->base
258 ? expand_expr (addr->base, NULL_RTX, pointer_mode, EXPAND_NORMAL)
259 : NULL_RTX);
260 idx = (addr->index
261 ? expand_expr (addr->index, NULL_RTX, pointer_mode, EXPAND_NORMAL)
262 : NULL_RTX);
263
264 gen_addr_rtx (pointer_mode, sym, bse, idx, st, off, &address, NULL, NULL);
265 if (pointer_mode != address_mode)
266 address = convert_memory_address (address_mode, address);
267 return address;
268 }
269
270 /* implement addr_for_mem_ref() directly from a tree, which avoids exporting
271 the mem_address structure. */
272
273 rtx
274 addr_for_mem_ref (tree exp, addr_space_t as, bool really_expand)
275 {
276 struct mem_address addr;
277 get_address_description (exp, &addr);
278 return addr_for_mem_ref (&addr, as, really_expand);
279 }
280
281 /* Returns address of MEM_REF in TYPE. */
282
283 tree
284 tree_mem_ref_addr (tree type, tree mem_ref)
285 {
286 tree addr;
287 tree act_elem;
288 tree step = TMR_STEP (mem_ref), offset = TMR_OFFSET (mem_ref);
289 tree addr_base = NULL_TREE, addr_off = NULL_TREE;
290
291 addr_base = fold_convert (type, TMR_BASE (mem_ref));
292
293 act_elem = TMR_INDEX (mem_ref);
294 if (act_elem)
295 {
296 if (step)
297 act_elem = fold_build2 (MULT_EXPR, TREE_TYPE (act_elem),
298 act_elem, step);
299 addr_off = act_elem;
300 }
301
302 act_elem = TMR_INDEX2 (mem_ref);
303 if (act_elem)
304 {
305 if (addr_off)
306 addr_off = fold_build2 (PLUS_EXPR, TREE_TYPE (addr_off),
307 addr_off, act_elem);
308 else
309 addr_off = act_elem;
310 }
311
312 if (offset && !integer_zerop (offset))
313 {
314 if (addr_off)
315 addr_off = fold_build2 (PLUS_EXPR, TREE_TYPE (addr_off), addr_off,
316 fold_convert (TREE_TYPE (addr_off), offset));
317 else
318 addr_off = offset;
319 }
320
321 if (addr_off)
322 addr = fold_build_pointer_plus (addr_base, addr_off);
323 else
324 addr = addr_base;
325
326 return addr;
327 }
328
329 /* Returns true if a memory reference in MODE and with parameters given by
330 ADDR is valid on the current target. */
331
332 static bool
333 valid_mem_ref_p (enum machine_mode mode, addr_space_t as,
334 struct mem_address *addr)
335 {
336 rtx address;
337
338 address = addr_for_mem_ref (addr, as, false);
339 if (!address)
340 return false;
341
342 return memory_address_addr_space_p (mode, address, as);
343 }
344
345 /* Checks whether a TARGET_MEM_REF with type TYPE and parameters given by ADDR
346 is valid on the current target and if so, creates and returns the
347 TARGET_MEM_REF. If VERIFY is false omit the verification step. */
348
349 static tree
350 create_mem_ref_raw (tree type, tree alias_ptr_type, struct mem_address *addr,
351 bool verify)
352 {
353 tree base, index2;
354
355 if (verify
356 && !valid_mem_ref_p (TYPE_MODE (type), TYPE_ADDR_SPACE (type), addr))
357 return NULL_TREE;
358
359 if (addr->step && integer_onep (addr->step))
360 addr->step = NULL_TREE;
361
362 if (addr->offset)
363 addr->offset = fold_convert (alias_ptr_type, addr->offset);
364 else
365 addr->offset = build_int_cst (alias_ptr_type, 0);
366
367 if (addr->symbol)
368 {
369 base = addr->symbol;
370 index2 = addr->base;
371 }
372 else if (addr->base
373 && POINTER_TYPE_P (TREE_TYPE (addr->base)))
374 {
375 base = addr->base;
376 index2 = NULL_TREE;
377 }
378 else
379 {
380 base = build_int_cst (ptr_type_node, 0);
381 index2 = addr->base;
382 }
383
384 /* If possible use a plain MEM_REF instead of a TARGET_MEM_REF.
385 ??? As IVOPTs does not follow restrictions to where the base
386 pointer may point to create a MEM_REF only if we know that
387 base is valid. */
388 if ((TREE_CODE (base) == ADDR_EXPR || TREE_CODE (base) == INTEGER_CST)
389 && (!index2 || integer_zerop (index2))
390 && (!addr->index || integer_zerop (addr->index)))
391 return fold_build2 (MEM_REF, type, base, addr->offset);
392
393 return build5 (TARGET_MEM_REF, type,
394 base, addr->offset, addr->index, addr->step, index2);
395 }
396
397 /* Returns true if OBJ is an object whose address is a link time constant. */
398
399 static bool
400 fixed_address_object_p (tree obj)
401 {
402 return (TREE_CODE (obj) == VAR_DECL
403 && (TREE_STATIC (obj)
404 || DECL_EXTERNAL (obj))
405 && ! DECL_DLLIMPORT_P (obj));
406 }
407
408 /* If ADDR contains an address of object that is a link time constant,
409 move it to PARTS->symbol. */
410
411 static void
412 move_fixed_address_to_symbol (struct mem_address *parts, aff_tree *addr)
413 {
414 unsigned i;
415 tree val = NULL_TREE;
416
417 for (i = 0; i < addr->n; i++)
418 {
419 if (!addr->elts[i].coef.is_one ())
420 continue;
421
422 val = addr->elts[i].val;
423 if (TREE_CODE (val) == ADDR_EXPR
424 && fixed_address_object_p (TREE_OPERAND (val, 0)))
425 break;
426 }
427
428 if (i == addr->n)
429 return;
430
431 parts->symbol = val;
432 aff_combination_remove_elt (addr, i);
433 }
434
435 /* If ADDR contains an instance of BASE_HINT, move it to PARTS->base. */
436
437 static void
438 move_hint_to_base (tree type, struct mem_address *parts, tree base_hint,
439 aff_tree *addr)
440 {
441 unsigned i;
442 tree val = NULL_TREE;
443 int qual;
444
445 for (i = 0; i < addr->n; i++)
446 {
447 if (!addr->elts[i].coef.is_one ())
448 continue;
449
450 val = addr->elts[i].val;
451 if (operand_equal_p (val, base_hint, 0))
452 break;
453 }
454
455 if (i == addr->n)
456 return;
457
458 /* Cast value to appropriate pointer type. We cannot use a pointer
459 to TYPE directly, as the back-end will assume registers of pointer
460 type are aligned, and just the base itself may not actually be.
461 We use void pointer to the type's address space instead. */
462 qual = ENCODE_QUAL_ADDR_SPACE (TYPE_ADDR_SPACE (type));
463 type = build_qualified_type (void_type_node, qual);
464 parts->base = fold_convert (build_pointer_type (type), val);
465 aff_combination_remove_elt (addr, i);
466 }
467
468 /* If ADDR contains an address of a dereferenced pointer, move it to
469 PARTS->base. */
470
471 static void
472 move_pointer_to_base (struct mem_address *parts, aff_tree *addr)
473 {
474 unsigned i;
475 tree val = NULL_TREE;
476
477 for (i = 0; i < addr->n; i++)
478 {
479 if (!addr->elts[i].coef.is_one ())
480 continue;
481
482 val = addr->elts[i].val;
483 if (POINTER_TYPE_P (TREE_TYPE (val)))
484 break;
485 }
486
487 if (i == addr->n)
488 return;
489
490 parts->base = val;
491 aff_combination_remove_elt (addr, i);
492 }
493
494 /* Moves the loop variant part V in linear address ADDR to be the index
495 of PARTS. */
496
497 static void
498 move_variant_to_index (struct mem_address *parts, aff_tree *addr, tree v)
499 {
500 unsigned i;
501 tree val = NULL_TREE;
502
503 gcc_assert (!parts->index);
504 for (i = 0; i < addr->n; i++)
505 {
506 val = addr->elts[i].val;
507 if (operand_equal_p (val, v, 0))
508 break;
509 }
510
511 if (i == addr->n)
512 return;
513
514 parts->index = fold_convert (sizetype, val);
515 parts->step = double_int_to_tree (sizetype, addr->elts[i].coef);
516 aff_combination_remove_elt (addr, i);
517 }
518
519 /* Adds ELT to PARTS. */
520
521 static void
522 add_to_parts (struct mem_address *parts, tree elt)
523 {
524 tree type;
525
526 if (!parts->index)
527 {
528 parts->index = fold_convert (sizetype, elt);
529 return;
530 }
531
532 if (!parts->base)
533 {
534 parts->base = elt;
535 return;
536 }
537
538 /* Add ELT to base. */
539 type = TREE_TYPE (parts->base);
540 if (POINTER_TYPE_P (type))
541 parts->base = fold_build_pointer_plus (parts->base, elt);
542 else
543 parts->base = fold_build2 (PLUS_EXPR, type,
544 parts->base, elt);
545 }
546
547 /* Finds the most expensive multiplication in ADDR that can be
548 expressed in an addressing mode and move the corresponding
549 element(s) to PARTS. */
550
551 static void
552 most_expensive_mult_to_index (tree type, struct mem_address *parts,
553 aff_tree *addr, bool speed)
554 {
555 addr_space_t as = TYPE_ADDR_SPACE (type);
556 enum machine_mode address_mode = targetm.addr_space.address_mode (as);
557 HOST_WIDE_INT coef;
558 double_int best_mult, amult, amult_neg;
559 unsigned best_mult_cost = 0, acost;
560 tree mult_elt = NULL_TREE, elt;
561 unsigned i, j;
562 enum tree_code op_code;
563
564 best_mult = double_int_zero;
565 for (i = 0; i < addr->n; i++)
566 {
567 if (!addr->elts[i].coef.fits_shwi ())
568 continue;
569
570 coef = addr->elts[i].coef.to_shwi ();
571 if (coef == 1
572 || !multiplier_allowed_in_address_p (coef, TYPE_MODE (type), as))
573 continue;
574
575 acost = mult_by_coeff_cost (coef, address_mode, speed);
576
577 if (acost > best_mult_cost)
578 {
579 best_mult_cost = acost;
580 best_mult = addr->elts[i].coef;
581 }
582 }
583
584 if (!best_mult_cost)
585 return;
586
587 /* Collect elements multiplied by best_mult. */
588 for (i = j = 0; i < addr->n; i++)
589 {
590 amult = addr->elts[i].coef;
591 amult_neg = double_int_ext_for_comb (-amult, addr);
592
593 if (amult == best_mult)
594 op_code = PLUS_EXPR;
595 else if (amult_neg == best_mult)
596 op_code = MINUS_EXPR;
597 else
598 {
599 addr->elts[j] = addr->elts[i];
600 j++;
601 continue;
602 }
603
604 elt = fold_convert (sizetype, addr->elts[i].val);
605 if (mult_elt)
606 mult_elt = fold_build2 (op_code, sizetype, mult_elt, elt);
607 else if (op_code == PLUS_EXPR)
608 mult_elt = elt;
609 else
610 mult_elt = fold_build1 (NEGATE_EXPR, sizetype, elt);
611 }
612 addr->n = j;
613
614 parts->index = mult_elt;
615 parts->step = double_int_to_tree (sizetype, best_mult);
616 }
617
618 /* Splits address ADDR for a memory access of type TYPE into PARTS.
619 If BASE_HINT is non-NULL, it specifies an SSA name to be used
620 preferentially as base of the reference, and IV_CAND is the selected
621 iv candidate used in ADDR.
622
623 TODO -- be more clever about the distribution of the elements of ADDR
624 to PARTS. Some architectures do not support anything but single
625 register in address, possibly with a small integer offset; while
626 create_mem_ref will simplify the address to an acceptable shape
627 later, it would be more efficient to know that asking for complicated
628 addressing modes is useless. */
629
630 static void
631 addr_to_parts (tree type, aff_tree *addr, tree iv_cand,
632 tree base_hint, struct mem_address *parts,
633 bool speed)
634 {
635 tree part;
636 unsigned i;
637
638 parts->symbol = NULL_TREE;
639 parts->base = NULL_TREE;
640 parts->index = NULL_TREE;
641 parts->step = NULL_TREE;
642
643 if (!addr->offset.is_zero ())
644 parts->offset = double_int_to_tree (sizetype, addr->offset);
645 else
646 parts->offset = NULL_TREE;
647
648 /* Try to find a symbol. */
649 move_fixed_address_to_symbol (parts, addr);
650
651 /* No need to do address parts reassociation if the number of parts
652 is <= 2 -- in that case, no loop invariant code motion can be
653 exposed. */
654
655 if (!base_hint && (addr->n > 2))
656 move_variant_to_index (parts, addr, iv_cand);
657
658 /* First move the most expensive feasible multiplication
659 to index. */
660 if (!parts->index)
661 most_expensive_mult_to_index (type, parts, addr, speed);
662
663 /* Try to find a base of the reference. Since at the moment
664 there is no reliable way how to distinguish between pointer and its
665 offset, this is just a guess. */
666 if (!parts->symbol && base_hint)
667 move_hint_to_base (type, parts, base_hint, addr);
668 if (!parts->symbol && !parts->base)
669 move_pointer_to_base (parts, addr);
670
671 /* Then try to process the remaining elements. */
672 for (i = 0; i < addr->n; i++)
673 {
674 part = fold_convert (sizetype, addr->elts[i].val);
675 if (!addr->elts[i].coef.is_one ())
676 part = fold_build2 (MULT_EXPR, sizetype, part,
677 double_int_to_tree (sizetype, addr->elts[i].coef));
678 add_to_parts (parts, part);
679 }
680 if (addr->rest)
681 add_to_parts (parts, fold_convert (sizetype, addr->rest));
682 }
683
684 /* Force the PARTS to register. */
685
686 static void
687 gimplify_mem_ref_parts (gimple_stmt_iterator *gsi, struct mem_address *parts)
688 {
689 if (parts->base)
690 parts->base = force_gimple_operand_gsi_1 (gsi, parts->base,
691 is_gimple_mem_ref_addr, NULL_TREE,
692 true, GSI_SAME_STMT);
693 if (parts->index)
694 parts->index = force_gimple_operand_gsi (gsi, parts->index,
695 true, NULL_TREE,
696 true, GSI_SAME_STMT);
697 }
698
699 /* Creates and returns a TARGET_MEM_REF for address ADDR. If necessary
700 computations are emitted in front of GSI. TYPE is the mode
701 of created memory reference. IV_CAND is the selected iv candidate in ADDR,
702 and BASE_HINT is non NULL if IV_CAND comes from a base address
703 object. */
704
705 tree
706 create_mem_ref (gimple_stmt_iterator *gsi, tree type, aff_tree *addr,
707 tree alias_ptr_type, tree iv_cand, tree base_hint, bool speed)
708 {
709 tree mem_ref, tmp;
710 struct mem_address parts;
711
712 addr_to_parts (type, addr, iv_cand, base_hint, &parts, speed);
713 gimplify_mem_ref_parts (gsi, &parts);
714 mem_ref = create_mem_ref_raw (type, alias_ptr_type, &parts, true);
715 if (mem_ref)
716 return mem_ref;
717
718 /* The expression is too complicated. Try making it simpler. */
719
720 if (parts.step && !integer_onep (parts.step))
721 {
722 /* Move the multiplication to index. */
723 gcc_assert (parts.index);
724 parts.index = force_gimple_operand_gsi (gsi,
725 fold_build2 (MULT_EXPR, sizetype,
726 parts.index, parts.step),
727 true, NULL_TREE, true, GSI_SAME_STMT);
728 parts.step = NULL_TREE;
729
730 mem_ref = create_mem_ref_raw (type, alias_ptr_type, &parts, true);
731 if (mem_ref)
732 return mem_ref;
733 }
734
735 if (parts.symbol)
736 {
737 tmp = parts.symbol;
738 gcc_assert (is_gimple_val (tmp));
739
740 /* Add the symbol to base, eventually forcing it to register. */
741 if (parts.base)
742 {
743 gcc_assert (useless_type_conversion_p
744 (sizetype, TREE_TYPE (parts.base)));
745
746 if (parts.index)
747 {
748 parts.base = force_gimple_operand_gsi_1 (gsi,
749 fold_build_pointer_plus (tmp, parts.base),
750 is_gimple_mem_ref_addr, NULL_TREE, true, GSI_SAME_STMT);
751 }
752 else
753 {
754 parts.index = parts.base;
755 parts.base = tmp;
756 }
757 }
758 else
759 parts.base = tmp;
760 parts.symbol = NULL_TREE;
761
762 mem_ref = create_mem_ref_raw (type, alias_ptr_type, &parts, true);
763 if (mem_ref)
764 return mem_ref;
765 }
766
767 if (parts.index)
768 {
769 /* Add index to base. */
770 if (parts.base)
771 {
772 parts.base = force_gimple_operand_gsi_1 (gsi,
773 fold_build_pointer_plus (parts.base, parts.index),
774 is_gimple_mem_ref_addr, NULL_TREE, true, GSI_SAME_STMT);
775 }
776 else
777 parts.base = parts.index;
778 parts.index = NULL_TREE;
779
780 mem_ref = create_mem_ref_raw (type, alias_ptr_type, &parts, true);
781 if (mem_ref)
782 return mem_ref;
783 }
784
785 if (parts.offset && !integer_zerop (parts.offset))
786 {
787 /* Try adding offset to base. */
788 if (parts.base)
789 {
790 parts.base = force_gimple_operand_gsi_1 (gsi,
791 fold_build_pointer_plus (parts.base, parts.offset),
792 is_gimple_mem_ref_addr, NULL_TREE, true, GSI_SAME_STMT);
793 }
794 else
795 parts.base = parts.offset;
796
797 parts.offset = NULL_TREE;
798
799 mem_ref = create_mem_ref_raw (type, alias_ptr_type, &parts, true);
800 if (mem_ref)
801 return mem_ref;
802 }
803
804 /* Verify that the address is in the simplest possible shape
805 (only a register). If we cannot create such a memory reference,
806 something is really wrong. */
807 gcc_assert (parts.symbol == NULL_TREE);
808 gcc_assert (parts.index == NULL_TREE);
809 gcc_assert (!parts.step || integer_onep (parts.step));
810 gcc_assert (!parts.offset || integer_zerop (parts.offset));
811 gcc_unreachable ();
812 }
813
814 /* Copies components of the address from OP to ADDR. */
815
816 void
817 get_address_description (tree op, struct mem_address *addr)
818 {
819 if (TREE_CODE (TMR_BASE (op)) == ADDR_EXPR)
820 {
821 addr->symbol = TMR_BASE (op);
822 addr->base = TMR_INDEX2 (op);
823 }
824 else
825 {
826 addr->symbol = NULL_TREE;
827 if (TMR_INDEX2 (op))
828 {
829 gcc_assert (integer_zerop (TMR_BASE (op)));
830 addr->base = TMR_INDEX2 (op);
831 }
832 else
833 addr->base = TMR_BASE (op);
834 }
835 addr->index = TMR_INDEX (op);
836 addr->step = TMR_STEP (op);
837 addr->offset = TMR_OFFSET (op);
838 }
839
840 /* Copies the reference information from OLD_REF to NEW_REF, where
841 NEW_REF should be either a MEM_REF or a TARGET_MEM_REF. */
842
843 void
844 copy_ref_info (tree new_ref, tree old_ref)
845 {
846 tree new_ptr_base = NULL_TREE;
847
848 gcc_assert (TREE_CODE (new_ref) == MEM_REF
849 || TREE_CODE (new_ref) == TARGET_MEM_REF);
850
851 TREE_SIDE_EFFECTS (new_ref) = TREE_SIDE_EFFECTS (old_ref);
852 TREE_THIS_VOLATILE (new_ref) = TREE_THIS_VOLATILE (old_ref);
853
854 new_ptr_base = TREE_OPERAND (new_ref, 0);
855
856 /* We can transfer points-to information from an old pointer
857 or decl base to the new one. */
858 if (new_ptr_base
859 && TREE_CODE (new_ptr_base) == SSA_NAME
860 && !SSA_NAME_PTR_INFO (new_ptr_base))
861 {
862 tree base = get_base_address (old_ref);
863 if (!base)
864 ;
865 else if ((TREE_CODE (base) == MEM_REF
866 || TREE_CODE (base) == TARGET_MEM_REF)
867 && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME
868 && SSA_NAME_PTR_INFO (TREE_OPERAND (base, 0)))
869 {
870 struct ptr_info_def *new_pi;
871 unsigned int align, misalign;
872
873 duplicate_ssa_name_ptr_info
874 (new_ptr_base, SSA_NAME_PTR_INFO (TREE_OPERAND (base, 0)));
875 new_pi = SSA_NAME_PTR_INFO (new_ptr_base);
876 /* We have to be careful about transferring alignment information. */
877 if (get_ptr_info_alignment (new_pi, &align, &misalign)
878 && TREE_CODE (old_ref) == MEM_REF
879 && !(TREE_CODE (new_ref) == TARGET_MEM_REF
880 && (TMR_INDEX2 (new_ref)
881 || (TMR_STEP (new_ref)
882 && (TREE_INT_CST_LOW (TMR_STEP (new_ref))
883 < align)))))
884 {
885 unsigned int inc = (mem_ref_offset (old_ref)
886 - mem_ref_offset (new_ref)).low;
887 adjust_ptr_info_misalignment (new_pi, inc);
888 }
889 else
890 mark_ptr_info_alignment_unknown (new_pi);
891 }
892 else if (TREE_CODE (base) == VAR_DECL
893 || TREE_CODE (base) == PARM_DECL
894 || TREE_CODE (base) == RESULT_DECL)
895 {
896 struct ptr_info_def *pi = get_ptr_info (new_ptr_base);
897 pt_solution_set_var (&pi->pt, base);
898 }
899 }
900 }
901
902 /* Move constants in target_mem_ref REF to offset. Returns the new target
903 mem ref if anything changes, NULL_TREE otherwise. */
904
905 tree
906 maybe_fold_tmr (tree ref)
907 {
908 struct mem_address addr;
909 bool changed = false;
910 tree new_ref, off;
911
912 get_address_description (ref, &addr);
913
914 if (addr.base
915 && TREE_CODE (addr.base) == INTEGER_CST
916 && !integer_zerop (addr.base))
917 {
918 addr.offset = fold_binary_to_constant (PLUS_EXPR,
919 TREE_TYPE (addr.offset),
920 addr.offset, addr.base);
921 addr.base = NULL_TREE;
922 changed = true;
923 }
924
925 if (addr.symbol
926 && TREE_CODE (TREE_OPERAND (addr.symbol, 0)) == MEM_REF)
927 {
928 addr.offset = fold_binary_to_constant
929 (PLUS_EXPR, TREE_TYPE (addr.offset),
930 addr.offset,
931 TREE_OPERAND (TREE_OPERAND (addr.symbol, 0), 1));
932 addr.symbol = TREE_OPERAND (TREE_OPERAND (addr.symbol, 0), 0);
933 changed = true;
934 }
935 else if (addr.symbol
936 && handled_component_p (TREE_OPERAND (addr.symbol, 0)))
937 {
938 HOST_WIDE_INT offset;
939 addr.symbol = build_fold_addr_expr
940 (get_addr_base_and_unit_offset
941 (TREE_OPERAND (addr.symbol, 0), &offset));
942 addr.offset = int_const_binop (PLUS_EXPR,
943 addr.offset, size_int (offset));
944 changed = true;
945 }
946
947 if (addr.index && TREE_CODE (addr.index) == INTEGER_CST)
948 {
949 off = addr.index;
950 if (addr.step)
951 {
952 off = fold_binary_to_constant (MULT_EXPR, sizetype,
953 off, addr.step);
954 addr.step = NULL_TREE;
955 }
956
957 addr.offset = fold_binary_to_constant (PLUS_EXPR,
958 TREE_TYPE (addr.offset),
959 addr.offset, off);
960 addr.index = NULL_TREE;
961 changed = true;
962 }
963
964 if (!changed)
965 return NULL_TREE;
966
967 /* If we have propagated something into this TARGET_MEM_REF and thus
968 ended up folding it, always create a new TARGET_MEM_REF regardless
969 if it is valid in this for on the target - the propagation result
970 wouldn't be anyway. */
971 new_ref = create_mem_ref_raw (TREE_TYPE (ref),
972 TREE_TYPE (addr.offset), &addr, false);
973 TREE_SIDE_EFFECTS (new_ref) = TREE_SIDE_EFFECTS (ref);
974 TREE_THIS_VOLATILE (new_ref) = TREE_THIS_VOLATILE (ref);
975 return new_ref;
976 }
977
978 /* Dump PARTS to FILE. */
979
980 extern void dump_mem_address (FILE *, struct mem_address *);
981 void
982 dump_mem_address (FILE *file, struct mem_address *parts)
983 {
984 if (parts->symbol)
985 {
986 fprintf (file, "symbol: ");
987 print_generic_expr (file, TREE_OPERAND (parts->symbol, 0), TDF_SLIM);
988 fprintf (file, "\n");
989 }
990 if (parts->base)
991 {
992 fprintf (file, "base: ");
993 print_generic_expr (file, parts->base, TDF_SLIM);
994 fprintf (file, "\n");
995 }
996 if (parts->index)
997 {
998 fprintf (file, "index: ");
999 print_generic_expr (file, parts->index, TDF_SLIM);
1000 fprintf (file, "\n");
1001 }
1002 if (parts->step)
1003 {
1004 fprintf (file, "step: ");
1005 print_generic_expr (file, parts->step, TDF_SLIM);
1006 fprintf (file, "\n");
1007 }
1008 if (parts->offset)
1009 {
1010 fprintf (file, "offset: ");
1011 print_generic_expr (file, parts->offset, TDF_SLIM);
1012 fprintf (file, "\n");
1013 }
1014 }
1015
1016 #include "gt-tree-ssa-address.h"