]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/tree-ssa-address.c
double-int.c (double_int_multiple_of): New function.
[thirdparty/gcc.git] / gcc / tree-ssa-address.c
1 /* Memory address lowering and addressing mode selection.
2 Copyright (C) 2004, 2006, 2007, 2008, 2009, 2010, 2011
3 Free Software Foundation, Inc.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published by the
9 Free Software Foundation; either version 3, or (at your option) any
10 later version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT
13 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 /* Utility functions for manipulation with TARGET_MEM_REFs -- tree expressions
22 that directly map to addressing modes of the target. */
23
24 #include "config.h"
25 #include "system.h"
26 #include "coretypes.h"
27 #include "tm.h"
28 #include "tree.h"
29 #include "tm_p.h"
30 #include "basic-block.h"
31 #include "tree-pretty-print.h"
32 #include "tree-flow.h"
33 #include "tree-dump.h"
34 #include "tree-pass.h"
35 #include "timevar.h"
36 #include "flags.h"
37 #include "tree-inline.h"
38 #include "tree-affine.h"
39
40 /* FIXME: We compute address costs using RTL. */
41 #include "insn-config.h"
42 #include "rtl.h"
43 #include "recog.h"
44 #include "expr.h"
45 #include "ggc.h"
46 #include "target.h"
47
48 /* TODO -- handling of symbols (according to Richard Hendersons
49 comments, http://gcc.gnu.org/ml/gcc-patches/2005-04/msg00949.html):
50
51 There are at least 5 different kinds of symbols that we can run up against:
52
53 (1) binds_local_p, small data area.
54 (2) binds_local_p, eg local statics
55 (3) !binds_local_p, eg global variables
56 (4) thread local, local_exec
57 (5) thread local, !local_exec
58
59 Now, (1) won't appear often in an array context, but it certainly can.
60 All you have to do is set -GN high enough, or explicitly mark any
61 random object __attribute__((section (".sdata"))).
62
63 All of these affect whether or not a symbol is in fact a valid address.
64 The only one tested here is (3). And that result may very well
65 be incorrect for (4) or (5).
66
67 An incorrect result here does not cause incorrect results out the
68 back end, because the expander in expr.c validizes the address. However
69 it would be nice to improve the handling here in order to produce more
70 precise results. */
71
72 /* A "template" for memory address, used to determine whether the address is
73 valid for mode. */
74
75 typedef struct GTY (()) mem_addr_template {
76 rtx ref; /* The template. */
77 rtx * GTY ((skip)) step_p; /* The point in template where the step should be
78 filled in. */
79 rtx * GTY ((skip)) off_p; /* The point in template where the offset should
80 be filled in. */
81 } mem_addr_template;
82
83 DEF_VEC_O (mem_addr_template);
84 DEF_VEC_ALLOC_O (mem_addr_template, gc);
85
86 /* The templates. Each of the low five bits of the index corresponds to one
87 component of TARGET_MEM_REF being present, while the high bits identify
88 the address space. See TEMPL_IDX. */
89
90 static GTY(()) VEC (mem_addr_template, gc) *mem_addr_template_list;
91
92 #define TEMPL_IDX(AS, SYMBOL, BASE, INDEX, STEP, OFFSET) \
93 (((int) (AS) << 5) \
94 | ((SYMBOL != 0) << 4) \
95 | ((BASE != 0) << 3) \
96 | ((INDEX != 0) << 2) \
97 | ((STEP != 0) << 1) \
98 | (OFFSET != 0))
99
100 /* Stores address for memory reference with parameters SYMBOL, BASE, INDEX,
101 STEP and OFFSET to *ADDR using address mode ADDRESS_MODE. Stores pointers
102 to where step is placed to *STEP_P and offset to *OFFSET_P. */
103
104 static void
105 gen_addr_rtx (enum machine_mode address_mode,
106 rtx symbol, rtx base, rtx index, rtx step, rtx offset,
107 rtx *addr, rtx **step_p, rtx **offset_p)
108 {
109 rtx act_elem;
110
111 *addr = NULL_RTX;
112 if (step_p)
113 *step_p = NULL;
114 if (offset_p)
115 *offset_p = NULL;
116
117 if (index)
118 {
119 act_elem = index;
120 if (step)
121 {
122 act_elem = gen_rtx_MULT (address_mode, act_elem, step);
123
124 if (step_p)
125 *step_p = &XEXP (act_elem, 1);
126 }
127
128 *addr = act_elem;
129 }
130
131 if (base && base != const0_rtx)
132 {
133 if (*addr)
134 *addr = simplify_gen_binary (PLUS, address_mode, base, *addr);
135 else
136 *addr = base;
137 }
138
139 if (symbol)
140 {
141 act_elem = symbol;
142 if (offset)
143 {
144 act_elem = gen_rtx_PLUS (address_mode, act_elem, offset);
145
146 if (offset_p)
147 *offset_p = &XEXP (act_elem, 1);
148
149 if (GET_CODE (symbol) == SYMBOL_REF
150 || GET_CODE (symbol) == LABEL_REF
151 || GET_CODE (symbol) == CONST)
152 act_elem = gen_rtx_CONST (address_mode, act_elem);
153 }
154
155 if (*addr)
156 *addr = gen_rtx_PLUS (address_mode, *addr, act_elem);
157 else
158 *addr = act_elem;
159 }
160 else if (offset)
161 {
162 if (*addr)
163 {
164 *addr = gen_rtx_PLUS (address_mode, *addr, offset);
165 if (offset_p)
166 *offset_p = &XEXP (*addr, 1);
167 }
168 else
169 {
170 *addr = offset;
171 if (offset_p)
172 *offset_p = addr;
173 }
174 }
175
176 if (!*addr)
177 *addr = const0_rtx;
178 }
179
180 /* Returns address for TARGET_MEM_REF with parameters given by ADDR
181 in address space AS.
182 If REALLY_EXPAND is false, just make fake registers instead
183 of really expanding the operands, and perform the expansion in-place
184 by using one of the "templates". */
185
186 rtx
187 addr_for_mem_ref (struct mem_address *addr, addr_space_t as,
188 bool really_expand)
189 {
190 enum machine_mode address_mode = targetm.addr_space.address_mode (as);
191 enum machine_mode pointer_mode = targetm.addr_space.pointer_mode (as);
192 rtx address, sym, bse, idx, st, off;
193 struct mem_addr_template *templ;
194
195 if (addr->step && !integer_onep (addr->step))
196 st = immed_double_int_const (tree_to_double_int (addr->step), pointer_mode);
197 else
198 st = NULL_RTX;
199
200 if (addr->offset && !integer_zerop (addr->offset))
201 off = immed_double_int_const
202 (double_int_sext (tree_to_double_int (addr->offset),
203 TYPE_PRECISION (TREE_TYPE (addr->offset))),
204 pointer_mode);
205 else
206 off = NULL_RTX;
207
208 if (!really_expand)
209 {
210 unsigned int templ_index
211 = TEMPL_IDX (as, addr->symbol, addr->base, addr->index, st, off);
212
213 if (templ_index
214 >= VEC_length (mem_addr_template, mem_addr_template_list))
215 VEC_safe_grow_cleared (mem_addr_template, gc, mem_addr_template_list,
216 templ_index + 1);
217
218 /* Reuse the templates for addresses, so that we do not waste memory. */
219 templ = VEC_index (mem_addr_template, mem_addr_template_list, templ_index);
220 if (!templ->ref)
221 {
222 sym = (addr->symbol ?
223 gen_rtx_SYMBOL_REF (pointer_mode, ggc_strdup ("test_symbol"))
224 : NULL_RTX);
225 bse = (addr->base ?
226 gen_raw_REG (pointer_mode, LAST_VIRTUAL_REGISTER + 1)
227 : NULL_RTX);
228 idx = (addr->index ?
229 gen_raw_REG (pointer_mode, LAST_VIRTUAL_REGISTER + 2)
230 : NULL_RTX);
231
232 gen_addr_rtx (pointer_mode, sym, bse, idx,
233 st? const0_rtx : NULL_RTX,
234 off? const0_rtx : NULL_RTX,
235 &templ->ref,
236 &templ->step_p,
237 &templ->off_p);
238 }
239
240 if (st)
241 *templ->step_p = st;
242 if (off)
243 *templ->off_p = off;
244
245 return templ->ref;
246 }
247
248 /* Otherwise really expand the expressions. */
249 sym = (addr->symbol
250 ? expand_expr (addr->symbol, NULL_RTX, pointer_mode, EXPAND_NORMAL)
251 : NULL_RTX);
252 bse = (addr->base
253 ? expand_expr (addr->base, NULL_RTX, pointer_mode, EXPAND_NORMAL)
254 : NULL_RTX);
255 idx = (addr->index
256 ? expand_expr (addr->index, NULL_RTX, pointer_mode, EXPAND_NORMAL)
257 : NULL_RTX);
258
259 gen_addr_rtx (pointer_mode, sym, bse, idx, st, off, &address, NULL, NULL);
260 if (pointer_mode != address_mode)
261 address = convert_memory_address (address_mode, address);
262 return address;
263 }
264
265 /* Returns address of MEM_REF in TYPE. */
266
267 tree
268 tree_mem_ref_addr (tree type, tree mem_ref)
269 {
270 tree addr;
271 tree act_elem;
272 tree step = TMR_STEP (mem_ref), offset = TMR_OFFSET (mem_ref);
273 tree addr_base = NULL_TREE, addr_off = NULL_TREE;
274
275 addr_base = fold_convert (type, TMR_BASE (mem_ref));
276
277 act_elem = TMR_INDEX (mem_ref);
278 if (act_elem)
279 {
280 if (step)
281 act_elem = fold_build2 (MULT_EXPR, TREE_TYPE (act_elem),
282 act_elem, step);
283 addr_off = act_elem;
284 }
285
286 act_elem = TMR_INDEX2 (mem_ref);
287 if (act_elem)
288 {
289 if (addr_off)
290 addr_off = fold_build2 (PLUS_EXPR, TREE_TYPE (addr_off),
291 addr_off, act_elem);
292 else
293 addr_off = act_elem;
294 }
295
296 if (offset && !integer_zerop (offset))
297 {
298 if (addr_off)
299 addr_off = fold_build2 (PLUS_EXPR, TREE_TYPE (addr_off), addr_off,
300 fold_convert (TREE_TYPE (addr_off), offset));
301 else
302 addr_off = offset;
303 }
304
305 if (addr_off)
306 addr = fold_build_pointer_plus (addr_base, addr_off);
307 else
308 addr = addr_base;
309
310 return addr;
311 }
312
313 /* Returns true if a memory reference in MODE and with parameters given by
314 ADDR is valid on the current target. */
315
316 static bool
317 valid_mem_ref_p (enum machine_mode mode, addr_space_t as,
318 struct mem_address *addr)
319 {
320 rtx address;
321
322 address = addr_for_mem_ref (addr, as, false);
323 if (!address)
324 return false;
325
326 return memory_address_addr_space_p (mode, address, as);
327 }
328
329 /* Checks whether a TARGET_MEM_REF with type TYPE and parameters given by ADDR
330 is valid on the current target and if so, creates and returns the
331 TARGET_MEM_REF. If VERIFY is false omit the verification step. */
332
333 static tree
334 create_mem_ref_raw (tree type, tree alias_ptr_type, struct mem_address *addr,
335 bool verify)
336 {
337 tree base, index2;
338
339 if (verify
340 && !valid_mem_ref_p (TYPE_MODE (type), TYPE_ADDR_SPACE (type), addr))
341 return NULL_TREE;
342
343 if (addr->step && integer_onep (addr->step))
344 addr->step = NULL_TREE;
345
346 if (addr->offset)
347 addr->offset = fold_convert (alias_ptr_type, addr->offset);
348 else
349 addr->offset = build_int_cst (alias_ptr_type, 0);
350
351 if (addr->symbol)
352 {
353 base = addr->symbol;
354 index2 = addr->base;
355 }
356 else if (addr->base
357 && POINTER_TYPE_P (TREE_TYPE (addr->base)))
358 {
359 base = addr->base;
360 index2 = NULL_TREE;
361 }
362 else
363 {
364 base = build_int_cst (ptr_type_node, 0);
365 index2 = addr->base;
366 }
367
368 /* If possible use a plain MEM_REF instead of a TARGET_MEM_REF.
369 ??? As IVOPTs does not follow restrictions to where the base
370 pointer may point to create a MEM_REF only if we know that
371 base is valid. */
372 if ((TREE_CODE (base) == ADDR_EXPR || TREE_CODE (base) == INTEGER_CST)
373 && (!index2 || integer_zerop (index2))
374 && (!addr->index || integer_zerop (addr->index)))
375 return fold_build2 (MEM_REF, type, base, addr->offset);
376
377 return build5 (TARGET_MEM_REF, type,
378 base, addr->offset, addr->index, addr->step, index2);
379 }
380
381 /* Returns true if OBJ is an object whose address is a link time constant. */
382
383 static bool
384 fixed_address_object_p (tree obj)
385 {
386 return (TREE_CODE (obj) == VAR_DECL
387 && (TREE_STATIC (obj)
388 || DECL_EXTERNAL (obj))
389 && ! DECL_DLLIMPORT_P (obj));
390 }
391
392 /* If ADDR contains an address of object that is a link time constant,
393 move it to PARTS->symbol. */
394
395 static void
396 move_fixed_address_to_symbol (struct mem_address *parts, aff_tree *addr)
397 {
398 unsigned i;
399 tree val = NULL_TREE;
400
401 for (i = 0; i < addr->n; i++)
402 {
403 if (!double_int_one_p (addr->elts[i].coef))
404 continue;
405
406 val = addr->elts[i].val;
407 if (TREE_CODE (val) == ADDR_EXPR
408 && fixed_address_object_p (TREE_OPERAND (val, 0)))
409 break;
410 }
411
412 if (i == addr->n)
413 return;
414
415 parts->symbol = val;
416 aff_combination_remove_elt (addr, i);
417 }
418
419 /* If ADDR contains an instance of BASE_HINT, move it to PARTS->base. */
420
421 static void
422 move_hint_to_base (tree type, struct mem_address *parts, tree base_hint,
423 aff_tree *addr)
424 {
425 unsigned i;
426 tree val = NULL_TREE;
427 int qual;
428
429 for (i = 0; i < addr->n; i++)
430 {
431 if (!double_int_one_p (addr->elts[i].coef))
432 continue;
433
434 val = addr->elts[i].val;
435 if (operand_equal_p (val, base_hint, 0))
436 break;
437 }
438
439 if (i == addr->n)
440 return;
441
442 /* Cast value to appropriate pointer type. We cannot use a pointer
443 to TYPE directly, as the back-end will assume registers of pointer
444 type are aligned, and just the base itself may not actually be.
445 We use void pointer to the type's address space instead. */
446 qual = ENCODE_QUAL_ADDR_SPACE (TYPE_ADDR_SPACE (type));
447 type = build_qualified_type (void_type_node, qual);
448 parts->base = fold_convert (build_pointer_type (type), val);
449 aff_combination_remove_elt (addr, i);
450 }
451
452 /* If ADDR contains an address of a dereferenced pointer, move it to
453 PARTS->base. */
454
455 static void
456 move_pointer_to_base (struct mem_address *parts, aff_tree *addr)
457 {
458 unsigned i;
459 tree val = NULL_TREE;
460
461 for (i = 0; i < addr->n; i++)
462 {
463 if (!double_int_one_p (addr->elts[i].coef))
464 continue;
465
466 val = addr->elts[i].val;
467 if (POINTER_TYPE_P (TREE_TYPE (val)))
468 break;
469 }
470
471 if (i == addr->n)
472 return;
473
474 parts->base = val;
475 aff_combination_remove_elt (addr, i);
476 }
477
478 /* Moves the loop variant part V in linear address ADDR to be the index
479 of PARTS. */
480
481 static void
482 move_variant_to_index (struct mem_address *parts, aff_tree *addr, tree v)
483 {
484 unsigned i;
485 tree val = NULL_TREE;
486
487 gcc_assert (!parts->index);
488 for (i = 0; i < addr->n; i++)
489 {
490 val = addr->elts[i].val;
491 if (operand_equal_p (val, v, 0))
492 break;
493 }
494
495 if (i == addr->n)
496 return;
497
498 parts->index = fold_convert (sizetype, val);
499 parts->step = double_int_to_tree (sizetype, addr->elts[i].coef);
500 aff_combination_remove_elt (addr, i);
501 }
502
503 /* Adds ELT to PARTS. */
504
505 static void
506 add_to_parts (struct mem_address *parts, tree elt)
507 {
508 tree type;
509
510 if (!parts->index)
511 {
512 parts->index = fold_convert (sizetype, elt);
513 return;
514 }
515
516 if (!parts->base)
517 {
518 parts->base = elt;
519 return;
520 }
521
522 /* Add ELT to base. */
523 type = TREE_TYPE (parts->base);
524 if (POINTER_TYPE_P (type))
525 parts->base = fold_build_pointer_plus (parts->base, elt);
526 else
527 parts->base = fold_build2 (PLUS_EXPR, type,
528 parts->base, elt);
529 }
530
531 /* Finds the most expensive multiplication in ADDR that can be
532 expressed in an addressing mode and move the corresponding
533 element(s) to PARTS. */
534
535 static void
536 most_expensive_mult_to_index (tree type, struct mem_address *parts,
537 aff_tree *addr, bool speed)
538 {
539 addr_space_t as = TYPE_ADDR_SPACE (type);
540 enum machine_mode address_mode = targetm.addr_space.address_mode (as);
541 HOST_WIDE_INT coef;
542 double_int best_mult, amult, amult_neg;
543 unsigned best_mult_cost = 0, acost;
544 tree mult_elt = NULL_TREE, elt;
545 unsigned i, j;
546 enum tree_code op_code;
547
548 best_mult = double_int_zero;
549 for (i = 0; i < addr->n; i++)
550 {
551 if (!double_int_fits_in_shwi_p (addr->elts[i].coef))
552 continue;
553
554 coef = double_int_to_shwi (addr->elts[i].coef);
555 if (coef == 1
556 || !multiplier_allowed_in_address_p (coef, TYPE_MODE (type), as))
557 continue;
558
559 acost = multiply_by_const_cost (coef, address_mode, speed);
560
561 if (acost > best_mult_cost)
562 {
563 best_mult_cost = acost;
564 best_mult = addr->elts[i].coef;
565 }
566 }
567
568 if (!best_mult_cost)
569 return;
570
571 /* Collect elements multiplied by best_mult. */
572 for (i = j = 0; i < addr->n; i++)
573 {
574 amult = addr->elts[i].coef;
575 amult_neg = double_int_ext_for_comb (double_int_neg (amult), addr);
576
577 if (double_int_equal_p (amult, best_mult))
578 op_code = PLUS_EXPR;
579 else if (double_int_equal_p (amult_neg, best_mult))
580 op_code = MINUS_EXPR;
581 else
582 {
583 addr->elts[j] = addr->elts[i];
584 j++;
585 continue;
586 }
587
588 elt = fold_convert (sizetype, addr->elts[i].val);
589 if (mult_elt)
590 mult_elt = fold_build2 (op_code, sizetype, mult_elt, elt);
591 else if (op_code == PLUS_EXPR)
592 mult_elt = elt;
593 else
594 mult_elt = fold_build1 (NEGATE_EXPR, sizetype, elt);
595 }
596 addr->n = j;
597
598 parts->index = mult_elt;
599 parts->step = double_int_to_tree (sizetype, best_mult);
600 }
601
602 /* Splits address ADDR for a memory access of type TYPE into PARTS.
603 If BASE_HINT is non-NULL, it specifies an SSA name to be used
604 preferentially as base of the reference, and IV_CAND is the selected
605 iv candidate used in ADDR.
606
607 TODO -- be more clever about the distribution of the elements of ADDR
608 to PARTS. Some architectures do not support anything but single
609 register in address, possibly with a small integer offset; while
610 create_mem_ref will simplify the address to an acceptable shape
611 later, it would be more efficient to know that asking for complicated
612 addressing modes is useless. */
613
614 static void
615 addr_to_parts (tree type, aff_tree *addr, tree iv_cand,
616 tree base_hint, struct mem_address *parts,
617 bool speed)
618 {
619 tree part;
620 unsigned i;
621
622 parts->symbol = NULL_TREE;
623 parts->base = NULL_TREE;
624 parts->index = NULL_TREE;
625 parts->step = NULL_TREE;
626
627 if (!double_int_zero_p (addr->offset))
628 parts->offset = double_int_to_tree (sizetype, addr->offset);
629 else
630 parts->offset = NULL_TREE;
631
632 /* Try to find a symbol. */
633 move_fixed_address_to_symbol (parts, addr);
634
635 /* No need to do address parts reassociation if the number of parts
636 is <= 2 -- in that case, no loop invariant code motion can be
637 exposed. */
638
639 if (!base_hint && (addr->n > 2))
640 move_variant_to_index (parts, addr, iv_cand);
641
642 /* First move the most expensive feasible multiplication
643 to index. */
644 if (!parts->index)
645 most_expensive_mult_to_index (type, parts, addr, speed);
646
647 /* Try to find a base of the reference. Since at the moment
648 there is no reliable way how to distinguish between pointer and its
649 offset, this is just a guess. */
650 if (!parts->symbol && base_hint)
651 move_hint_to_base (type, parts, base_hint, addr);
652 if (!parts->symbol && !parts->base)
653 move_pointer_to_base (parts, addr);
654
655 /* Then try to process the remaining elements. */
656 for (i = 0; i < addr->n; i++)
657 {
658 part = fold_convert (sizetype, addr->elts[i].val);
659 if (!double_int_one_p (addr->elts[i].coef))
660 part = fold_build2 (MULT_EXPR, sizetype, part,
661 double_int_to_tree (sizetype, addr->elts[i].coef));
662 add_to_parts (parts, part);
663 }
664 if (addr->rest)
665 add_to_parts (parts, fold_convert (sizetype, addr->rest));
666 }
667
668 /* Force the PARTS to register. */
669
670 static void
671 gimplify_mem_ref_parts (gimple_stmt_iterator *gsi, struct mem_address *parts)
672 {
673 if (parts->base)
674 parts->base = force_gimple_operand_gsi_1 (gsi, parts->base,
675 is_gimple_mem_ref_addr, NULL_TREE,
676 true, GSI_SAME_STMT);
677 if (parts->index)
678 parts->index = force_gimple_operand_gsi (gsi, parts->index,
679 true, NULL_TREE,
680 true, GSI_SAME_STMT);
681 }
682
683 /* Creates and returns a TARGET_MEM_REF for address ADDR. If necessary
684 computations are emitted in front of GSI. TYPE is the mode
685 of created memory reference. IV_CAND is the selected iv candidate in ADDR,
686 and BASE_HINT is non NULL if IV_CAND comes from a base address
687 object. */
688
689 tree
690 create_mem_ref (gimple_stmt_iterator *gsi, tree type, aff_tree *addr,
691 tree alias_ptr_type, tree iv_cand, tree base_hint, bool speed)
692 {
693 tree mem_ref, tmp;
694 struct mem_address parts;
695
696 addr_to_parts (type, addr, iv_cand, base_hint, &parts, speed);
697 gimplify_mem_ref_parts (gsi, &parts);
698 mem_ref = create_mem_ref_raw (type, alias_ptr_type, &parts, true);
699 if (mem_ref)
700 return mem_ref;
701
702 /* The expression is too complicated. Try making it simpler. */
703
704 if (parts.step && !integer_onep (parts.step))
705 {
706 /* Move the multiplication to index. */
707 gcc_assert (parts.index);
708 parts.index = force_gimple_operand_gsi (gsi,
709 fold_build2 (MULT_EXPR, sizetype,
710 parts.index, parts.step),
711 true, NULL_TREE, true, GSI_SAME_STMT);
712 parts.step = NULL_TREE;
713
714 mem_ref = create_mem_ref_raw (type, alias_ptr_type, &parts, true);
715 if (mem_ref)
716 return mem_ref;
717 }
718
719 if (parts.symbol)
720 {
721 tmp = parts.symbol;
722 gcc_assert (is_gimple_val (tmp));
723
724 /* Add the symbol to base, eventually forcing it to register. */
725 if (parts.base)
726 {
727 gcc_assert (useless_type_conversion_p
728 (sizetype, TREE_TYPE (parts.base)));
729
730 if (parts.index)
731 {
732 parts.base = force_gimple_operand_gsi_1 (gsi,
733 fold_build_pointer_plus (tmp, parts.base),
734 is_gimple_mem_ref_addr, NULL_TREE, true, GSI_SAME_STMT);
735 }
736 else
737 {
738 parts.index = parts.base;
739 parts.base = tmp;
740 }
741 }
742 else
743 parts.base = tmp;
744 parts.symbol = NULL_TREE;
745
746 mem_ref = create_mem_ref_raw (type, alias_ptr_type, &parts, true);
747 if (mem_ref)
748 return mem_ref;
749 }
750
751 if (parts.index)
752 {
753 /* Add index to base. */
754 if (parts.base)
755 {
756 parts.base = force_gimple_operand_gsi_1 (gsi,
757 fold_build_pointer_plus (parts.base, parts.index),
758 is_gimple_mem_ref_addr, NULL_TREE, true, GSI_SAME_STMT);
759 }
760 else
761 parts.base = parts.index;
762 parts.index = NULL_TREE;
763
764 mem_ref = create_mem_ref_raw (type, alias_ptr_type, &parts, true);
765 if (mem_ref)
766 return mem_ref;
767 }
768
769 if (parts.offset && !integer_zerop (parts.offset))
770 {
771 /* Try adding offset to base. */
772 if (parts.base)
773 {
774 parts.base = force_gimple_operand_gsi_1 (gsi,
775 fold_build_pointer_plus (parts.base, parts.offset),
776 is_gimple_mem_ref_addr, NULL_TREE, true, GSI_SAME_STMT);
777 }
778 else
779 parts.base = parts.offset;
780
781 parts.offset = NULL_TREE;
782
783 mem_ref = create_mem_ref_raw (type, alias_ptr_type, &parts, true);
784 if (mem_ref)
785 return mem_ref;
786 }
787
788 /* Verify that the address is in the simplest possible shape
789 (only a register). If we cannot create such a memory reference,
790 something is really wrong. */
791 gcc_assert (parts.symbol == NULL_TREE);
792 gcc_assert (parts.index == NULL_TREE);
793 gcc_assert (!parts.step || integer_onep (parts.step));
794 gcc_assert (!parts.offset || integer_zerop (parts.offset));
795 gcc_unreachable ();
796 }
797
798 /* Copies components of the address from OP to ADDR. */
799
800 void
801 get_address_description (tree op, struct mem_address *addr)
802 {
803 if (TREE_CODE (TMR_BASE (op)) == ADDR_EXPR)
804 {
805 addr->symbol = TMR_BASE (op);
806 addr->base = TMR_INDEX2 (op);
807 }
808 else
809 {
810 addr->symbol = NULL_TREE;
811 if (TMR_INDEX2 (op))
812 {
813 gcc_assert (integer_zerop (TMR_BASE (op)));
814 addr->base = TMR_INDEX2 (op);
815 }
816 else
817 addr->base = TMR_BASE (op);
818 }
819 addr->index = TMR_INDEX (op);
820 addr->step = TMR_STEP (op);
821 addr->offset = TMR_OFFSET (op);
822 }
823
824 /* Copies the additional information attached to target_mem_ref FROM to TO. */
825
826 void
827 copy_mem_ref_info (tree to, tree from)
828 {
829 /* And the info about the original reference. */
830 TREE_SIDE_EFFECTS (to) = TREE_SIDE_EFFECTS (from);
831 TREE_THIS_VOLATILE (to) = TREE_THIS_VOLATILE (from);
832 }
833
834 /* Copies the reference information from OLD_REF to NEW_REF, where
835 NEW_REF should be either a MEM_REF or a TARGET_MEM_REF. */
836
837 void
838 copy_ref_info (tree new_ref, tree old_ref)
839 {
840 tree new_ptr_base = NULL_TREE;
841
842 gcc_assert (TREE_CODE (new_ref) == MEM_REF
843 || TREE_CODE (new_ref) == TARGET_MEM_REF);
844
845 TREE_SIDE_EFFECTS (new_ref) = TREE_SIDE_EFFECTS (old_ref);
846 TREE_THIS_VOLATILE (new_ref) = TREE_THIS_VOLATILE (old_ref);
847
848 new_ptr_base = TREE_OPERAND (new_ref, 0);
849
850 /* We can transfer points-to information from an old pointer
851 or decl base to the new one. */
852 if (new_ptr_base
853 && TREE_CODE (new_ptr_base) == SSA_NAME
854 && !SSA_NAME_PTR_INFO (new_ptr_base))
855 {
856 tree base = get_base_address (old_ref);
857 if (!base)
858 ;
859 else if ((TREE_CODE (base) == MEM_REF
860 || TREE_CODE (base) == TARGET_MEM_REF)
861 && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME
862 && SSA_NAME_PTR_INFO (TREE_OPERAND (base, 0)))
863 {
864 struct ptr_info_def *new_pi;
865 unsigned int align, misalign;
866
867 duplicate_ssa_name_ptr_info
868 (new_ptr_base, SSA_NAME_PTR_INFO (TREE_OPERAND (base, 0)));
869 new_pi = SSA_NAME_PTR_INFO (new_ptr_base);
870 /* We have to be careful about transferring alignment information. */
871 if (get_ptr_info_alignment (new_pi, &align, &misalign)
872 && TREE_CODE (old_ref) == MEM_REF
873 && !(TREE_CODE (new_ref) == TARGET_MEM_REF
874 && (TMR_INDEX2 (new_ref)
875 || (TMR_STEP (new_ref)
876 && (TREE_INT_CST_LOW (TMR_STEP (new_ref))
877 < align)))))
878 {
879 unsigned int inc = double_int_sub (mem_ref_offset (old_ref),
880 mem_ref_offset (new_ref)).low;
881 adjust_ptr_info_misalignment (new_pi, inc);
882 }
883 else
884 mark_ptr_info_alignment_unknown (new_pi);
885 }
886 else if (TREE_CODE (base) == VAR_DECL
887 || TREE_CODE (base) == PARM_DECL
888 || TREE_CODE (base) == RESULT_DECL)
889 {
890 struct ptr_info_def *pi = get_ptr_info (new_ptr_base);
891 pt_solution_set_var (&pi->pt, base);
892 }
893 }
894 }
895
896 /* Move constants in target_mem_ref REF to offset. Returns the new target
897 mem ref if anything changes, NULL_TREE otherwise. */
898
899 tree
900 maybe_fold_tmr (tree ref)
901 {
902 struct mem_address addr;
903 bool changed = false;
904 tree ret, off;
905
906 get_address_description (ref, &addr);
907
908 if (addr.base
909 && TREE_CODE (addr.base) == INTEGER_CST
910 && !integer_zerop (addr.base))
911 {
912 addr.offset = fold_binary_to_constant (PLUS_EXPR,
913 TREE_TYPE (addr.offset),
914 addr.offset, addr.base);
915 addr.base = NULL_TREE;
916 changed = true;
917 }
918
919 if (addr.symbol
920 && TREE_CODE (TREE_OPERAND (addr.symbol, 0)) == MEM_REF)
921 {
922 addr.offset = fold_binary_to_constant
923 (PLUS_EXPR, TREE_TYPE (addr.offset),
924 addr.offset,
925 TREE_OPERAND (TREE_OPERAND (addr.symbol, 0), 1));
926 addr.symbol = TREE_OPERAND (TREE_OPERAND (addr.symbol, 0), 0);
927 changed = true;
928 }
929 else if (addr.symbol
930 && handled_component_p (TREE_OPERAND (addr.symbol, 0)))
931 {
932 HOST_WIDE_INT offset;
933 addr.symbol = build_fold_addr_expr
934 (get_addr_base_and_unit_offset
935 (TREE_OPERAND (addr.symbol, 0), &offset));
936 addr.offset = int_const_binop (PLUS_EXPR,
937 addr.offset, size_int (offset));
938 changed = true;
939 }
940
941 if (addr.index && TREE_CODE (addr.index) == INTEGER_CST)
942 {
943 off = addr.index;
944 if (addr.step)
945 {
946 off = fold_binary_to_constant (MULT_EXPR, sizetype,
947 off, addr.step);
948 addr.step = NULL_TREE;
949 }
950
951 addr.offset = fold_binary_to_constant (PLUS_EXPR,
952 TREE_TYPE (addr.offset),
953 addr.offset, off);
954 addr.index = NULL_TREE;
955 changed = true;
956 }
957
958 if (!changed)
959 return NULL_TREE;
960
961 /* If we have propagated something into this TARGET_MEM_REF and thus
962 ended up folding it, always create a new TARGET_MEM_REF regardless
963 if it is valid in this for on the target - the propagation result
964 wouldn't be anyway. */
965 ret = create_mem_ref_raw (TREE_TYPE (ref),
966 TREE_TYPE (addr.offset), &addr, false);
967 copy_mem_ref_info (ret, ref);
968 return ret;
969 }
970
971 /* Dump PARTS to FILE. */
972
973 extern void dump_mem_address (FILE *, struct mem_address *);
974 void
975 dump_mem_address (FILE *file, struct mem_address *parts)
976 {
977 if (parts->symbol)
978 {
979 fprintf (file, "symbol: ");
980 print_generic_expr (file, TREE_OPERAND (parts->symbol, 0), TDF_SLIM);
981 fprintf (file, "\n");
982 }
983 if (parts->base)
984 {
985 fprintf (file, "base: ");
986 print_generic_expr (file, parts->base, TDF_SLIM);
987 fprintf (file, "\n");
988 }
989 if (parts->index)
990 {
991 fprintf (file, "index: ");
992 print_generic_expr (file, parts->index, TDF_SLIM);
993 fprintf (file, "\n");
994 }
995 if (parts->step)
996 {
997 fprintf (file, "step: ");
998 print_generic_expr (file, parts->step, TDF_SLIM);
999 fprintf (file, "\n");
1000 }
1001 if (parts->offset)
1002 {
1003 fprintf (file, "offset: ");
1004 print_generic_expr (file, parts->offset, TDF_SLIM);
1005 fprintf (file, "\n");
1006 }
1007 }
1008
1009 #include "gt-tree-ssa-address.h"