]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/tree-ssa-address.c
genattrtab.c (write_header): Include hash-set.h...
[thirdparty/gcc.git] / gcc / tree-ssa-address.c
1 /* Memory address lowering and addressing mode selection.
2 Copyright (C) 2004-2015 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it
7 under the terms of the GNU General Public License as published by the
8 Free Software Foundation; either version 3, or (at your option) any
9 later version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT
12 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 /* Utility functions for manipulation with TARGET_MEM_REFs -- tree expressions
21 that directly map to addressing modes of the target. */
22
23 #include "config.h"
24 #include "system.h"
25 #include "coretypes.h"
26 #include "tm.h"
27 #include "hash-set.h"
28 #include "machmode.h"
29 #include "vec.h"
30 #include "double-int.h"
31 #include "input.h"
32 #include "alias.h"
33 #include "symtab.h"
34 #include "wide-int.h"
35 #include "inchash.h"
36 #include "tree.h"
37 #include "fold-const.h"
38 #include "stor-layout.h"
39 #include "tm_p.h"
40 #include "predict.h"
41 #include "hard-reg-set.h"
42 #include "input.h"
43 #include "function.h"
44 #include "basic-block.h"
45 #include "tree-pretty-print.h"
46 #include "tree-ssa-alias.h"
47 #include "internal-fn.h"
48 #include "gimple-expr.h"
49 #include "is-a.h"
50 #include "gimple.h"
51 #include "gimple-iterator.h"
52 #include "gimplify-me.h"
53 #include "stringpool.h"
54 #include "tree-ssanames.h"
55 #include "tree-ssa-loop-ivopts.h"
56 #include "expr.h"
57 #include "tree-dfa.h"
58 #include "dumpfile.h"
59 #include "flags.h"
60 #include "tree-inline.h"
61 #include "tree-affine.h"
62
63 /* FIXME: We compute address costs using RTL. */
64 #include "insn-config.h"
65 #include "rtl.h"
66 #include "recog.h"
67 #include "expr.h"
68 #include "target.h"
69 #include "expmed.h"
70 #include "tree-ssa-address.h"
71
72 /* TODO -- handling of symbols (according to Richard Hendersons
73 comments, http://gcc.gnu.org/ml/gcc-patches/2005-04/msg00949.html):
74
75 There are at least 5 different kinds of symbols that we can run up against:
76
77 (1) binds_local_p, small data area.
78 (2) binds_local_p, eg local statics
79 (3) !binds_local_p, eg global variables
80 (4) thread local, local_exec
81 (5) thread local, !local_exec
82
83 Now, (1) won't appear often in an array context, but it certainly can.
84 All you have to do is set -GN high enough, or explicitly mark any
85 random object __attribute__((section (".sdata"))).
86
87 All of these affect whether or not a symbol is in fact a valid address.
88 The only one tested here is (3). And that result may very well
89 be incorrect for (4) or (5).
90
91 An incorrect result here does not cause incorrect results out the
92 back end, because the expander in expr.c validizes the address. However
93 it would be nice to improve the handling here in order to produce more
94 precise results. */
95
96 /* A "template" for memory address, used to determine whether the address is
97 valid for mode. */
98
99 typedef struct GTY (()) mem_addr_template {
100 rtx ref; /* The template. */
101 rtx * GTY ((skip)) step_p; /* The point in template where the step should be
102 filled in. */
103 rtx * GTY ((skip)) off_p; /* The point in template where the offset should
104 be filled in. */
105 } mem_addr_template;
106
107
108 /* The templates. Each of the low five bits of the index corresponds to one
109 component of TARGET_MEM_REF being present, while the high bits identify
110 the address space. See TEMPL_IDX. */
111
112 static GTY(()) vec<mem_addr_template, va_gc> *mem_addr_template_list;
113
114 #define TEMPL_IDX(AS, SYMBOL, BASE, INDEX, STEP, OFFSET) \
115 (((int) (AS) << 5) \
116 | ((SYMBOL != 0) << 4) \
117 | ((BASE != 0) << 3) \
118 | ((INDEX != 0) << 2) \
119 | ((STEP != 0) << 1) \
120 | (OFFSET != 0))
121
122 /* Stores address for memory reference with parameters SYMBOL, BASE, INDEX,
123 STEP and OFFSET to *ADDR using address mode ADDRESS_MODE. Stores pointers
124 to where step is placed to *STEP_P and offset to *OFFSET_P. */
125
126 static void
127 gen_addr_rtx (machine_mode address_mode,
128 rtx symbol, rtx base, rtx index, rtx step, rtx offset,
129 rtx *addr, rtx **step_p, rtx **offset_p)
130 {
131 rtx act_elem;
132
133 *addr = NULL_RTX;
134 if (step_p)
135 *step_p = NULL;
136 if (offset_p)
137 *offset_p = NULL;
138
139 if (index)
140 {
141 act_elem = index;
142 if (step)
143 {
144 act_elem = gen_rtx_MULT (address_mode, act_elem, step);
145
146 if (step_p)
147 *step_p = &XEXP (act_elem, 1);
148 }
149
150 *addr = act_elem;
151 }
152
153 if (base && base != const0_rtx)
154 {
155 if (*addr)
156 *addr = simplify_gen_binary (PLUS, address_mode, base, *addr);
157 else
158 *addr = base;
159 }
160
161 if (symbol)
162 {
163 act_elem = symbol;
164 if (offset)
165 {
166 act_elem = gen_rtx_PLUS (address_mode, act_elem, offset);
167
168 if (offset_p)
169 *offset_p = &XEXP (act_elem, 1);
170
171 if (GET_CODE (symbol) == SYMBOL_REF
172 || GET_CODE (symbol) == LABEL_REF
173 || GET_CODE (symbol) == CONST)
174 act_elem = gen_rtx_CONST (address_mode, act_elem);
175 }
176
177 if (*addr)
178 *addr = gen_rtx_PLUS (address_mode, *addr, act_elem);
179 else
180 *addr = act_elem;
181 }
182 else if (offset)
183 {
184 if (*addr)
185 {
186 *addr = gen_rtx_PLUS (address_mode, *addr, offset);
187 if (offset_p)
188 *offset_p = &XEXP (*addr, 1);
189 }
190 else
191 {
192 *addr = offset;
193 if (offset_p)
194 *offset_p = addr;
195 }
196 }
197
198 if (!*addr)
199 *addr = const0_rtx;
200 }
201
202 /* Description of a memory address. */
203
204 struct mem_address
205 {
206 tree symbol, base, index, step, offset;
207 };
208
209 /* Returns address for TARGET_MEM_REF with parameters given by ADDR
210 in address space AS.
211 If REALLY_EXPAND is false, just make fake registers instead
212 of really expanding the operands, and perform the expansion in-place
213 by using one of the "templates". */
214
215 rtx
216 addr_for_mem_ref (struct mem_address *addr, addr_space_t as,
217 bool really_expand)
218 {
219 machine_mode address_mode = targetm.addr_space.address_mode (as);
220 machine_mode pointer_mode = targetm.addr_space.pointer_mode (as);
221 rtx address, sym, bse, idx, st, off;
222 struct mem_addr_template *templ;
223
224 if (addr->step && !integer_onep (addr->step))
225 st = immed_wide_int_const (addr->step, pointer_mode);
226 else
227 st = NULL_RTX;
228
229 if (addr->offset && !integer_zerop (addr->offset))
230 {
231 offset_int dc = offset_int::from (addr->offset, SIGNED);
232 off = immed_wide_int_const (dc, pointer_mode);
233 }
234 else
235 off = NULL_RTX;
236
237 if (!really_expand)
238 {
239 unsigned int templ_index
240 = TEMPL_IDX (as, addr->symbol, addr->base, addr->index, st, off);
241
242 if (templ_index >= vec_safe_length (mem_addr_template_list))
243 vec_safe_grow_cleared (mem_addr_template_list, templ_index + 1);
244
245 /* Reuse the templates for addresses, so that we do not waste memory. */
246 templ = &(*mem_addr_template_list)[templ_index];
247 if (!templ->ref)
248 {
249 sym = (addr->symbol ?
250 gen_rtx_SYMBOL_REF (pointer_mode, ggc_strdup ("test_symbol"))
251 : NULL_RTX);
252 bse = (addr->base ?
253 gen_raw_REG (pointer_mode, LAST_VIRTUAL_REGISTER + 1)
254 : NULL_RTX);
255 idx = (addr->index ?
256 gen_raw_REG (pointer_mode, LAST_VIRTUAL_REGISTER + 2)
257 : NULL_RTX);
258
259 gen_addr_rtx (pointer_mode, sym, bse, idx,
260 st? const0_rtx : NULL_RTX,
261 off? const0_rtx : NULL_RTX,
262 &templ->ref,
263 &templ->step_p,
264 &templ->off_p);
265 }
266
267 if (st)
268 *templ->step_p = st;
269 if (off)
270 *templ->off_p = off;
271
272 return templ->ref;
273 }
274
275 /* Otherwise really expand the expressions. */
276 sym = (addr->symbol
277 ? expand_expr (addr->symbol, NULL_RTX, pointer_mode, EXPAND_NORMAL)
278 : NULL_RTX);
279 bse = (addr->base
280 ? expand_expr (addr->base, NULL_RTX, pointer_mode, EXPAND_NORMAL)
281 : NULL_RTX);
282 idx = (addr->index
283 ? expand_expr (addr->index, NULL_RTX, pointer_mode, EXPAND_NORMAL)
284 : NULL_RTX);
285
286 gen_addr_rtx (pointer_mode, sym, bse, idx, st, off, &address, NULL, NULL);
287 if (pointer_mode != address_mode)
288 address = convert_memory_address (address_mode, address);
289 return address;
290 }
291
292 /* implement addr_for_mem_ref() directly from a tree, which avoids exporting
293 the mem_address structure. */
294
295 rtx
296 addr_for_mem_ref (tree exp, addr_space_t as, bool really_expand)
297 {
298 struct mem_address addr;
299 get_address_description (exp, &addr);
300 return addr_for_mem_ref (&addr, as, really_expand);
301 }
302
303 /* Returns address of MEM_REF in TYPE. */
304
305 tree
306 tree_mem_ref_addr (tree type, tree mem_ref)
307 {
308 tree addr;
309 tree act_elem;
310 tree step = TMR_STEP (mem_ref), offset = TMR_OFFSET (mem_ref);
311 tree addr_base = NULL_TREE, addr_off = NULL_TREE;
312
313 addr_base = fold_convert (type, TMR_BASE (mem_ref));
314
315 act_elem = TMR_INDEX (mem_ref);
316 if (act_elem)
317 {
318 if (step)
319 act_elem = fold_build2 (MULT_EXPR, TREE_TYPE (act_elem),
320 act_elem, step);
321 addr_off = act_elem;
322 }
323
324 act_elem = TMR_INDEX2 (mem_ref);
325 if (act_elem)
326 {
327 if (addr_off)
328 addr_off = fold_build2 (PLUS_EXPR, TREE_TYPE (addr_off),
329 addr_off, act_elem);
330 else
331 addr_off = act_elem;
332 }
333
334 if (offset && !integer_zerop (offset))
335 {
336 if (addr_off)
337 addr_off = fold_build2 (PLUS_EXPR, TREE_TYPE (addr_off), addr_off,
338 fold_convert (TREE_TYPE (addr_off), offset));
339 else
340 addr_off = offset;
341 }
342
343 if (addr_off)
344 addr = fold_build_pointer_plus (addr_base, addr_off);
345 else
346 addr = addr_base;
347
348 return addr;
349 }
350
351 /* Returns true if a memory reference in MODE and with parameters given by
352 ADDR is valid on the current target. */
353
354 static bool
355 valid_mem_ref_p (machine_mode mode, addr_space_t as,
356 struct mem_address *addr)
357 {
358 rtx address;
359
360 address = addr_for_mem_ref (addr, as, false);
361 if (!address)
362 return false;
363
364 return memory_address_addr_space_p (mode, address, as);
365 }
366
367 /* Checks whether a TARGET_MEM_REF with type TYPE and parameters given by ADDR
368 is valid on the current target and if so, creates and returns the
369 TARGET_MEM_REF. If VERIFY is false omit the verification step. */
370
371 static tree
372 create_mem_ref_raw (tree type, tree alias_ptr_type, struct mem_address *addr,
373 bool verify)
374 {
375 tree base, index2;
376
377 if (verify
378 && !valid_mem_ref_p (TYPE_MODE (type), TYPE_ADDR_SPACE (type), addr))
379 return NULL_TREE;
380
381 if (addr->step && integer_onep (addr->step))
382 addr->step = NULL_TREE;
383
384 if (addr->offset)
385 addr->offset = fold_convert (alias_ptr_type, addr->offset);
386 else
387 addr->offset = build_int_cst (alias_ptr_type, 0);
388
389 if (addr->symbol)
390 {
391 base = addr->symbol;
392 index2 = addr->base;
393 }
394 else if (addr->base
395 && POINTER_TYPE_P (TREE_TYPE (addr->base)))
396 {
397 base = addr->base;
398 index2 = NULL_TREE;
399 }
400 else
401 {
402 base = build_int_cst (ptr_type_node, 0);
403 index2 = addr->base;
404 }
405
406 /* If possible use a plain MEM_REF instead of a TARGET_MEM_REF.
407 ??? As IVOPTs does not follow restrictions to where the base
408 pointer may point to create a MEM_REF only if we know that
409 base is valid. */
410 if ((TREE_CODE (base) == ADDR_EXPR || TREE_CODE (base) == INTEGER_CST)
411 && (!index2 || integer_zerop (index2))
412 && (!addr->index || integer_zerop (addr->index)))
413 return fold_build2 (MEM_REF, type, base, addr->offset);
414
415 return build5 (TARGET_MEM_REF, type,
416 base, addr->offset, addr->index, addr->step, index2);
417 }
418
419 /* Returns true if OBJ is an object whose address is a link time constant. */
420
421 static bool
422 fixed_address_object_p (tree obj)
423 {
424 return (TREE_CODE (obj) == VAR_DECL
425 && (TREE_STATIC (obj)
426 || DECL_EXTERNAL (obj))
427 && ! DECL_DLLIMPORT_P (obj));
428 }
429
430 /* If ADDR contains an address of object that is a link time constant,
431 move it to PARTS->symbol. */
432
433 static void
434 move_fixed_address_to_symbol (struct mem_address *parts, aff_tree *addr)
435 {
436 unsigned i;
437 tree val = NULL_TREE;
438
439 for (i = 0; i < addr->n; i++)
440 {
441 if (addr->elts[i].coef != 1)
442 continue;
443
444 val = addr->elts[i].val;
445 if (TREE_CODE (val) == ADDR_EXPR
446 && fixed_address_object_p (TREE_OPERAND (val, 0)))
447 break;
448 }
449
450 if (i == addr->n)
451 return;
452
453 parts->symbol = val;
454 aff_combination_remove_elt (addr, i);
455 }
456
457 /* If ADDR contains an instance of BASE_HINT, move it to PARTS->base. */
458
459 static void
460 move_hint_to_base (tree type, struct mem_address *parts, tree base_hint,
461 aff_tree *addr)
462 {
463 unsigned i;
464 tree val = NULL_TREE;
465 int qual;
466
467 for (i = 0; i < addr->n; i++)
468 {
469 if (addr->elts[i].coef != 1)
470 continue;
471
472 val = addr->elts[i].val;
473 if (operand_equal_p (val, base_hint, 0))
474 break;
475 }
476
477 if (i == addr->n)
478 return;
479
480 /* Cast value to appropriate pointer type. We cannot use a pointer
481 to TYPE directly, as the back-end will assume registers of pointer
482 type are aligned, and just the base itself may not actually be.
483 We use void pointer to the type's address space instead. */
484 qual = ENCODE_QUAL_ADDR_SPACE (TYPE_ADDR_SPACE (type));
485 type = build_qualified_type (void_type_node, qual);
486 parts->base = fold_convert (build_pointer_type (type), val);
487 aff_combination_remove_elt (addr, i);
488 }
489
490 /* If ADDR contains an address of a dereferenced pointer, move it to
491 PARTS->base. */
492
493 static void
494 move_pointer_to_base (struct mem_address *parts, aff_tree *addr)
495 {
496 unsigned i;
497 tree val = NULL_TREE;
498
499 for (i = 0; i < addr->n; i++)
500 {
501 if (addr->elts[i].coef != 1)
502 continue;
503
504 val = addr->elts[i].val;
505 if (POINTER_TYPE_P (TREE_TYPE (val)))
506 break;
507 }
508
509 if (i == addr->n)
510 return;
511
512 parts->base = val;
513 aff_combination_remove_elt (addr, i);
514 }
515
516 /* Moves the loop variant part V in linear address ADDR to be the index
517 of PARTS. */
518
519 static void
520 move_variant_to_index (struct mem_address *parts, aff_tree *addr, tree v)
521 {
522 unsigned i;
523 tree val = NULL_TREE;
524
525 gcc_assert (!parts->index);
526 for (i = 0; i < addr->n; i++)
527 {
528 val = addr->elts[i].val;
529 if (operand_equal_p (val, v, 0))
530 break;
531 }
532
533 if (i == addr->n)
534 return;
535
536 parts->index = fold_convert (sizetype, val);
537 parts->step = wide_int_to_tree (sizetype, addr->elts[i].coef);
538 aff_combination_remove_elt (addr, i);
539 }
540
541 /* Adds ELT to PARTS. */
542
543 static void
544 add_to_parts (struct mem_address *parts, tree elt)
545 {
546 tree type;
547
548 if (!parts->index)
549 {
550 parts->index = fold_convert (sizetype, elt);
551 return;
552 }
553
554 if (!parts->base)
555 {
556 parts->base = elt;
557 return;
558 }
559
560 /* Add ELT to base. */
561 type = TREE_TYPE (parts->base);
562 if (POINTER_TYPE_P (type))
563 parts->base = fold_build_pointer_plus (parts->base, elt);
564 else
565 parts->base = fold_build2 (PLUS_EXPR, type,
566 parts->base, elt);
567 }
568
569 /* Finds the most expensive multiplication in ADDR that can be
570 expressed in an addressing mode and move the corresponding
571 element(s) to PARTS. */
572
573 static void
574 most_expensive_mult_to_index (tree type, struct mem_address *parts,
575 aff_tree *addr, bool speed)
576 {
577 addr_space_t as = TYPE_ADDR_SPACE (type);
578 machine_mode address_mode = targetm.addr_space.address_mode (as);
579 HOST_WIDE_INT coef;
580 unsigned best_mult_cost = 0, acost;
581 tree mult_elt = NULL_TREE, elt;
582 unsigned i, j;
583 enum tree_code op_code;
584
585 offset_int best_mult = 0;
586 for (i = 0; i < addr->n; i++)
587 {
588 if (!wi::fits_shwi_p (addr->elts[i].coef))
589 continue;
590
591 coef = addr->elts[i].coef.to_shwi ();
592 if (coef == 1
593 || !multiplier_allowed_in_address_p (coef, TYPE_MODE (type), as))
594 continue;
595
596 acost = mult_by_coeff_cost (coef, address_mode, speed);
597
598 if (acost > best_mult_cost)
599 {
600 best_mult_cost = acost;
601 best_mult = offset_int::from (addr->elts[i].coef, SIGNED);
602 }
603 }
604
605 if (!best_mult_cost)
606 return;
607
608 /* Collect elements multiplied by best_mult. */
609 for (i = j = 0; i < addr->n; i++)
610 {
611 offset_int amult = offset_int::from (addr->elts[i].coef, SIGNED);
612 offset_int amult_neg = -wi::sext (amult, TYPE_PRECISION (addr->type));
613
614 if (amult == best_mult)
615 op_code = PLUS_EXPR;
616 else if (amult_neg == best_mult)
617 op_code = MINUS_EXPR;
618 else
619 {
620 addr->elts[j] = addr->elts[i];
621 j++;
622 continue;
623 }
624
625 elt = fold_convert (sizetype, addr->elts[i].val);
626 if (mult_elt)
627 mult_elt = fold_build2 (op_code, sizetype, mult_elt, elt);
628 else if (op_code == PLUS_EXPR)
629 mult_elt = elt;
630 else
631 mult_elt = fold_build1 (NEGATE_EXPR, sizetype, elt);
632 }
633 addr->n = j;
634
635 parts->index = mult_elt;
636 parts->step = wide_int_to_tree (sizetype, best_mult);
637 }
638
639 /* Splits address ADDR for a memory access of type TYPE into PARTS.
640 If BASE_HINT is non-NULL, it specifies an SSA name to be used
641 preferentially as base of the reference, and IV_CAND is the selected
642 iv candidate used in ADDR.
643
644 TODO -- be more clever about the distribution of the elements of ADDR
645 to PARTS. Some architectures do not support anything but single
646 register in address, possibly with a small integer offset; while
647 create_mem_ref will simplify the address to an acceptable shape
648 later, it would be more efficient to know that asking for complicated
649 addressing modes is useless. */
650
651 static void
652 addr_to_parts (tree type, aff_tree *addr, tree iv_cand,
653 tree base_hint, struct mem_address *parts,
654 bool speed)
655 {
656 tree part;
657 unsigned i;
658
659 parts->symbol = NULL_TREE;
660 parts->base = NULL_TREE;
661 parts->index = NULL_TREE;
662 parts->step = NULL_TREE;
663
664 if (addr->offset != 0)
665 parts->offset = wide_int_to_tree (sizetype, addr->offset);
666 else
667 parts->offset = NULL_TREE;
668
669 /* Try to find a symbol. */
670 move_fixed_address_to_symbol (parts, addr);
671
672 /* No need to do address parts reassociation if the number of parts
673 is <= 2 -- in that case, no loop invariant code motion can be
674 exposed. */
675
676 if (!base_hint && (addr->n > 2))
677 move_variant_to_index (parts, addr, iv_cand);
678
679 /* First move the most expensive feasible multiplication
680 to index. */
681 if (!parts->index)
682 most_expensive_mult_to_index (type, parts, addr, speed);
683
684 /* Try to find a base of the reference. Since at the moment
685 there is no reliable way how to distinguish between pointer and its
686 offset, this is just a guess. */
687 if (!parts->symbol && base_hint)
688 move_hint_to_base (type, parts, base_hint, addr);
689 if (!parts->symbol && !parts->base)
690 move_pointer_to_base (parts, addr);
691
692 /* Then try to process the remaining elements. */
693 for (i = 0; i < addr->n; i++)
694 {
695 part = fold_convert (sizetype, addr->elts[i].val);
696 if (addr->elts[i].coef != 1)
697 part = fold_build2 (MULT_EXPR, sizetype, part,
698 wide_int_to_tree (sizetype, addr->elts[i].coef));
699 add_to_parts (parts, part);
700 }
701 if (addr->rest)
702 add_to_parts (parts, fold_convert (sizetype, addr->rest));
703 }
704
705 /* Force the PARTS to register. */
706
707 static void
708 gimplify_mem_ref_parts (gimple_stmt_iterator *gsi, struct mem_address *parts)
709 {
710 if (parts->base)
711 parts->base = force_gimple_operand_gsi_1 (gsi, parts->base,
712 is_gimple_mem_ref_addr, NULL_TREE,
713 true, GSI_SAME_STMT);
714 if (parts->index)
715 parts->index = force_gimple_operand_gsi (gsi, parts->index,
716 true, NULL_TREE,
717 true, GSI_SAME_STMT);
718 }
719
720 /* Creates and returns a TARGET_MEM_REF for address ADDR. If necessary
721 computations are emitted in front of GSI. TYPE is the mode
722 of created memory reference. IV_CAND is the selected iv candidate in ADDR,
723 and BASE_HINT is non NULL if IV_CAND comes from a base address
724 object. */
725
726 tree
727 create_mem_ref (gimple_stmt_iterator *gsi, tree type, aff_tree *addr,
728 tree alias_ptr_type, tree iv_cand, tree base_hint, bool speed)
729 {
730 tree mem_ref, tmp;
731 struct mem_address parts;
732
733 addr_to_parts (type, addr, iv_cand, base_hint, &parts, speed);
734 gimplify_mem_ref_parts (gsi, &parts);
735 mem_ref = create_mem_ref_raw (type, alias_ptr_type, &parts, true);
736 if (mem_ref)
737 return mem_ref;
738
739 /* The expression is too complicated. Try making it simpler. */
740
741 if (parts.step && !integer_onep (parts.step))
742 {
743 /* Move the multiplication to index. */
744 gcc_assert (parts.index);
745 parts.index = force_gimple_operand_gsi (gsi,
746 fold_build2 (MULT_EXPR, sizetype,
747 parts.index, parts.step),
748 true, NULL_TREE, true, GSI_SAME_STMT);
749 parts.step = NULL_TREE;
750
751 mem_ref = create_mem_ref_raw (type, alias_ptr_type, &parts, true);
752 if (mem_ref)
753 return mem_ref;
754 }
755
756 if (parts.symbol)
757 {
758 tmp = parts.symbol;
759 gcc_assert (is_gimple_val (tmp));
760
761 /* Add the symbol to base, eventually forcing it to register. */
762 if (parts.base)
763 {
764 gcc_assert (useless_type_conversion_p
765 (sizetype, TREE_TYPE (parts.base)));
766
767 if (parts.index)
768 {
769 parts.base = force_gimple_operand_gsi_1 (gsi,
770 fold_build_pointer_plus (tmp, parts.base),
771 is_gimple_mem_ref_addr, NULL_TREE, true, GSI_SAME_STMT);
772 }
773 else
774 {
775 parts.index = parts.base;
776 parts.base = tmp;
777 }
778 }
779 else
780 parts.base = tmp;
781 parts.symbol = NULL_TREE;
782
783 mem_ref = create_mem_ref_raw (type, alias_ptr_type, &parts, true);
784 if (mem_ref)
785 return mem_ref;
786 }
787
788 if (parts.index)
789 {
790 /* Add index to base. */
791 if (parts.base)
792 {
793 parts.base = force_gimple_operand_gsi_1 (gsi,
794 fold_build_pointer_plus (parts.base, parts.index),
795 is_gimple_mem_ref_addr, NULL_TREE, true, GSI_SAME_STMT);
796 }
797 else
798 parts.base = parts.index;
799 parts.index = NULL_TREE;
800
801 mem_ref = create_mem_ref_raw (type, alias_ptr_type, &parts, true);
802 if (mem_ref)
803 return mem_ref;
804 }
805
806 if (parts.offset && !integer_zerop (parts.offset))
807 {
808 /* Try adding offset to base. */
809 if (parts.base)
810 {
811 parts.base = force_gimple_operand_gsi_1 (gsi,
812 fold_build_pointer_plus (parts.base, parts.offset),
813 is_gimple_mem_ref_addr, NULL_TREE, true, GSI_SAME_STMT);
814 }
815 else
816 parts.base = parts.offset;
817
818 parts.offset = NULL_TREE;
819
820 mem_ref = create_mem_ref_raw (type, alias_ptr_type, &parts, true);
821 if (mem_ref)
822 return mem_ref;
823 }
824
825 /* Verify that the address is in the simplest possible shape
826 (only a register). If we cannot create such a memory reference,
827 something is really wrong. */
828 gcc_assert (parts.symbol == NULL_TREE);
829 gcc_assert (parts.index == NULL_TREE);
830 gcc_assert (!parts.step || integer_onep (parts.step));
831 gcc_assert (!parts.offset || integer_zerop (parts.offset));
832 gcc_unreachable ();
833 }
834
835 /* Copies components of the address from OP to ADDR. */
836
837 void
838 get_address_description (tree op, struct mem_address *addr)
839 {
840 if (TREE_CODE (TMR_BASE (op)) == ADDR_EXPR)
841 {
842 addr->symbol = TMR_BASE (op);
843 addr->base = TMR_INDEX2 (op);
844 }
845 else
846 {
847 addr->symbol = NULL_TREE;
848 if (TMR_INDEX2 (op))
849 {
850 gcc_assert (integer_zerop (TMR_BASE (op)));
851 addr->base = TMR_INDEX2 (op);
852 }
853 else
854 addr->base = TMR_BASE (op);
855 }
856 addr->index = TMR_INDEX (op);
857 addr->step = TMR_STEP (op);
858 addr->offset = TMR_OFFSET (op);
859 }
860
861 /* Copies the reference information from OLD_REF to NEW_REF, where
862 NEW_REF should be either a MEM_REF or a TARGET_MEM_REF. */
863
864 void
865 copy_ref_info (tree new_ref, tree old_ref)
866 {
867 tree new_ptr_base = NULL_TREE;
868
869 gcc_assert (TREE_CODE (new_ref) == MEM_REF
870 || TREE_CODE (new_ref) == TARGET_MEM_REF);
871
872 TREE_SIDE_EFFECTS (new_ref) = TREE_SIDE_EFFECTS (old_ref);
873 TREE_THIS_VOLATILE (new_ref) = TREE_THIS_VOLATILE (old_ref);
874
875 new_ptr_base = TREE_OPERAND (new_ref, 0);
876
877 /* We can transfer points-to information from an old pointer
878 or decl base to the new one. */
879 if (new_ptr_base
880 && TREE_CODE (new_ptr_base) == SSA_NAME
881 && !SSA_NAME_PTR_INFO (new_ptr_base))
882 {
883 tree base = get_base_address (old_ref);
884 if (!base)
885 ;
886 else if ((TREE_CODE (base) == MEM_REF
887 || TREE_CODE (base) == TARGET_MEM_REF)
888 && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME
889 && SSA_NAME_PTR_INFO (TREE_OPERAND (base, 0)))
890 {
891 struct ptr_info_def *new_pi;
892 unsigned int align, misalign;
893
894 duplicate_ssa_name_ptr_info
895 (new_ptr_base, SSA_NAME_PTR_INFO (TREE_OPERAND (base, 0)));
896 new_pi = SSA_NAME_PTR_INFO (new_ptr_base);
897 /* We have to be careful about transferring alignment information. */
898 if (get_ptr_info_alignment (new_pi, &align, &misalign)
899 && TREE_CODE (old_ref) == MEM_REF
900 && !(TREE_CODE (new_ref) == TARGET_MEM_REF
901 && (TMR_INDEX2 (new_ref)
902 || (TMR_STEP (new_ref)
903 && (TREE_INT_CST_LOW (TMR_STEP (new_ref))
904 < align)))))
905 {
906 unsigned int inc = (mem_ref_offset (old_ref).to_short_addr ()
907 - mem_ref_offset (new_ref).to_short_addr ());
908 adjust_ptr_info_misalignment (new_pi, inc);
909 }
910 else
911 mark_ptr_info_alignment_unknown (new_pi);
912 }
913 else if (TREE_CODE (base) == VAR_DECL
914 || TREE_CODE (base) == PARM_DECL
915 || TREE_CODE (base) == RESULT_DECL)
916 {
917 struct ptr_info_def *pi = get_ptr_info (new_ptr_base);
918 pt_solution_set_var (&pi->pt, base);
919 }
920 }
921 }
922
923 /* Move constants in target_mem_ref REF to offset. Returns the new target
924 mem ref if anything changes, NULL_TREE otherwise. */
925
926 tree
927 maybe_fold_tmr (tree ref)
928 {
929 struct mem_address addr;
930 bool changed = false;
931 tree new_ref, off;
932
933 get_address_description (ref, &addr);
934
935 if (addr.base
936 && TREE_CODE (addr.base) == INTEGER_CST
937 && !integer_zerop (addr.base))
938 {
939 addr.offset = fold_binary_to_constant (PLUS_EXPR,
940 TREE_TYPE (addr.offset),
941 addr.offset, addr.base);
942 addr.base = NULL_TREE;
943 changed = true;
944 }
945
946 if (addr.symbol
947 && TREE_CODE (TREE_OPERAND (addr.symbol, 0)) == MEM_REF)
948 {
949 addr.offset = fold_binary_to_constant
950 (PLUS_EXPR, TREE_TYPE (addr.offset),
951 addr.offset,
952 TREE_OPERAND (TREE_OPERAND (addr.symbol, 0), 1));
953 addr.symbol = TREE_OPERAND (TREE_OPERAND (addr.symbol, 0), 0);
954 changed = true;
955 }
956 else if (addr.symbol
957 && handled_component_p (TREE_OPERAND (addr.symbol, 0)))
958 {
959 HOST_WIDE_INT offset;
960 addr.symbol = build_fold_addr_expr
961 (get_addr_base_and_unit_offset
962 (TREE_OPERAND (addr.symbol, 0), &offset));
963 addr.offset = int_const_binop (PLUS_EXPR,
964 addr.offset, size_int (offset));
965 changed = true;
966 }
967
968 if (addr.index && TREE_CODE (addr.index) == INTEGER_CST)
969 {
970 off = addr.index;
971 if (addr.step)
972 {
973 off = fold_binary_to_constant (MULT_EXPR, sizetype,
974 off, addr.step);
975 addr.step = NULL_TREE;
976 }
977
978 addr.offset = fold_binary_to_constant (PLUS_EXPR,
979 TREE_TYPE (addr.offset),
980 addr.offset, off);
981 addr.index = NULL_TREE;
982 changed = true;
983 }
984
985 if (!changed)
986 return NULL_TREE;
987
988 /* If we have propagated something into this TARGET_MEM_REF and thus
989 ended up folding it, always create a new TARGET_MEM_REF regardless
990 if it is valid in this for on the target - the propagation result
991 wouldn't be anyway. */
992 new_ref = create_mem_ref_raw (TREE_TYPE (ref),
993 TREE_TYPE (addr.offset), &addr, false);
994 TREE_SIDE_EFFECTS (new_ref) = TREE_SIDE_EFFECTS (ref);
995 TREE_THIS_VOLATILE (new_ref) = TREE_THIS_VOLATILE (ref);
996 return new_ref;
997 }
998
999 /* Dump PARTS to FILE. */
1000
1001 extern void dump_mem_address (FILE *, struct mem_address *);
1002 void
1003 dump_mem_address (FILE *file, struct mem_address *parts)
1004 {
1005 if (parts->symbol)
1006 {
1007 fprintf (file, "symbol: ");
1008 print_generic_expr (file, TREE_OPERAND (parts->symbol, 0), TDF_SLIM);
1009 fprintf (file, "\n");
1010 }
1011 if (parts->base)
1012 {
1013 fprintf (file, "base: ");
1014 print_generic_expr (file, parts->base, TDF_SLIM);
1015 fprintf (file, "\n");
1016 }
1017 if (parts->index)
1018 {
1019 fprintf (file, "index: ");
1020 print_generic_expr (file, parts->index, TDF_SLIM);
1021 fprintf (file, "\n");
1022 }
1023 if (parts->step)
1024 {
1025 fprintf (file, "step: ");
1026 print_generic_expr (file, parts->step, TDF_SLIM);
1027 fprintf (file, "\n");
1028 }
1029 if (parts->offset)
1030 {
1031 fprintf (file, "offset: ");
1032 print_generic_expr (file, parts->offset, TDF_SLIM);
1033 fprintf (file, "\n");
1034 }
1035 }
1036
1037 #include "gt-tree-ssa-address.h"