]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/tree-ssa-address.c
coretypes.h: Include machmode.h...
[thirdparty/gcc.git] / gcc / tree-ssa-address.c
1 /* Memory address lowering and addressing mode selection.
2 Copyright (C) 2004-2015 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it
7 under the terms of the GNU General Public License as published by the
8 Free Software Foundation; either version 3, or (at your option) any
9 later version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT
12 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 /* Utility functions for manipulation with TARGET_MEM_REFs -- tree expressions
21 that directly map to addressing modes of the target. */
22
23 #include "config.h"
24 #include "system.h"
25 #include "coretypes.h"
26 #include "tm.h"
27 #include "hash-set.h"
28 #include "vec.h"
29 #include "input.h"
30 #include "alias.h"
31 #include "symtab.h"
32 #include "inchash.h"
33 #include "tree.h"
34 #include "fold-const.h"
35 #include "stor-layout.h"
36 #include "tm_p.h"
37 #include "predict.h"
38 #include "hard-reg-set.h"
39 #include "function.h"
40 #include "basic-block.h"
41 #include "tree-pretty-print.h"
42 #include "tree-ssa-alias.h"
43 #include "internal-fn.h"
44 #include "gimple-expr.h"
45 #include "is-a.h"
46 #include "gimple.h"
47 #include "gimple-iterator.h"
48 #include "gimplify-me.h"
49 #include "stringpool.h"
50 #include "tree-ssanames.h"
51 #include "tree-ssa-loop-ivopts.h"
52 #include "hashtab.h"
53 #include "rtl.h"
54 #include "flags.h"
55 #include "statistics.h"
56 #include "insn-config.h"
57 #include "expmed.h"
58 #include "dojump.h"
59 #include "explow.h"
60 #include "calls.h"
61 #include "emit-rtl.h"
62 #include "varasm.h"
63 #include "stmt.h"
64 #include "expr.h"
65 #include "tree-dfa.h"
66 #include "dumpfile.h"
67 #include "tree-inline.h"
68 #include "tree-affine.h"
69
70 /* FIXME: We compute address costs using RTL. */
71 #include "recog.h"
72 #include "target.h"
73 #include "tree-ssa-address.h"
74
75 /* TODO -- handling of symbols (according to Richard Hendersons
76 comments, http://gcc.gnu.org/ml/gcc-patches/2005-04/msg00949.html):
77
78 There are at least 5 different kinds of symbols that we can run up against:
79
80 (1) binds_local_p, small data area.
81 (2) binds_local_p, eg local statics
82 (3) !binds_local_p, eg global variables
83 (4) thread local, local_exec
84 (5) thread local, !local_exec
85
86 Now, (1) won't appear often in an array context, but it certainly can.
87 All you have to do is set -GN high enough, or explicitly mark any
88 random object __attribute__((section (".sdata"))).
89
90 All of these affect whether or not a symbol is in fact a valid address.
91 The only one tested here is (3). And that result may very well
92 be incorrect for (4) or (5).
93
94 An incorrect result here does not cause incorrect results out the
95 back end, because the expander in expr.c validizes the address. However
96 it would be nice to improve the handling here in order to produce more
97 precise results. */
98
99 /* A "template" for memory address, used to determine whether the address is
100 valid for mode. */
101
102 typedef struct GTY (()) mem_addr_template {
103 rtx ref; /* The template. */
104 rtx * GTY ((skip)) step_p; /* The point in template where the step should be
105 filled in. */
106 rtx * GTY ((skip)) off_p; /* The point in template where the offset should
107 be filled in. */
108 } mem_addr_template;
109
110
111 /* The templates. Each of the low five bits of the index corresponds to one
112 component of TARGET_MEM_REF being present, while the high bits identify
113 the address space. See TEMPL_IDX. */
114
115 static GTY(()) vec<mem_addr_template, va_gc> *mem_addr_template_list;
116
117 #define TEMPL_IDX(AS, SYMBOL, BASE, INDEX, STEP, OFFSET) \
118 (((int) (AS) << 5) \
119 | ((SYMBOL != 0) << 4) \
120 | ((BASE != 0) << 3) \
121 | ((INDEX != 0) << 2) \
122 | ((STEP != 0) << 1) \
123 | (OFFSET != 0))
124
125 /* Stores address for memory reference with parameters SYMBOL, BASE, INDEX,
126 STEP and OFFSET to *ADDR using address mode ADDRESS_MODE. Stores pointers
127 to where step is placed to *STEP_P and offset to *OFFSET_P. */
128
129 static void
130 gen_addr_rtx (machine_mode address_mode,
131 rtx symbol, rtx base, rtx index, rtx step, rtx offset,
132 rtx *addr, rtx **step_p, rtx **offset_p)
133 {
134 rtx act_elem;
135
136 *addr = NULL_RTX;
137 if (step_p)
138 *step_p = NULL;
139 if (offset_p)
140 *offset_p = NULL;
141
142 if (index)
143 {
144 act_elem = index;
145 if (step)
146 {
147 act_elem = gen_rtx_MULT (address_mode, act_elem, step);
148
149 if (step_p)
150 *step_p = &XEXP (act_elem, 1);
151 }
152
153 *addr = act_elem;
154 }
155
156 if (base && base != const0_rtx)
157 {
158 if (*addr)
159 *addr = simplify_gen_binary (PLUS, address_mode, base, *addr);
160 else
161 *addr = base;
162 }
163
164 if (symbol)
165 {
166 act_elem = symbol;
167 if (offset)
168 {
169 act_elem = gen_rtx_PLUS (address_mode, act_elem, offset);
170
171 if (offset_p)
172 *offset_p = &XEXP (act_elem, 1);
173
174 if (GET_CODE (symbol) == SYMBOL_REF
175 || GET_CODE (symbol) == LABEL_REF
176 || GET_CODE (symbol) == CONST)
177 act_elem = gen_rtx_CONST (address_mode, act_elem);
178 }
179
180 if (*addr)
181 *addr = gen_rtx_PLUS (address_mode, *addr, act_elem);
182 else
183 *addr = act_elem;
184 }
185 else if (offset)
186 {
187 if (*addr)
188 {
189 *addr = gen_rtx_PLUS (address_mode, *addr, offset);
190 if (offset_p)
191 *offset_p = &XEXP (*addr, 1);
192 }
193 else
194 {
195 *addr = offset;
196 if (offset_p)
197 *offset_p = addr;
198 }
199 }
200
201 if (!*addr)
202 *addr = const0_rtx;
203 }
204
205 /* Description of a memory address. */
206
207 struct mem_address
208 {
209 tree symbol, base, index, step, offset;
210 };
211
212 /* Returns address for TARGET_MEM_REF with parameters given by ADDR
213 in address space AS.
214 If REALLY_EXPAND is false, just make fake registers instead
215 of really expanding the operands, and perform the expansion in-place
216 by using one of the "templates". */
217
218 rtx
219 addr_for_mem_ref (struct mem_address *addr, addr_space_t as,
220 bool really_expand)
221 {
222 machine_mode address_mode = targetm.addr_space.address_mode (as);
223 machine_mode pointer_mode = targetm.addr_space.pointer_mode (as);
224 rtx address, sym, bse, idx, st, off;
225 struct mem_addr_template *templ;
226
227 if (addr->step && !integer_onep (addr->step))
228 st = immed_wide_int_const (addr->step, pointer_mode);
229 else
230 st = NULL_RTX;
231
232 if (addr->offset && !integer_zerop (addr->offset))
233 {
234 offset_int dc = offset_int::from (addr->offset, SIGNED);
235 off = immed_wide_int_const (dc, pointer_mode);
236 }
237 else
238 off = NULL_RTX;
239
240 if (!really_expand)
241 {
242 unsigned int templ_index
243 = TEMPL_IDX (as, addr->symbol, addr->base, addr->index, st, off);
244
245 if (templ_index >= vec_safe_length (mem_addr_template_list))
246 vec_safe_grow_cleared (mem_addr_template_list, templ_index + 1);
247
248 /* Reuse the templates for addresses, so that we do not waste memory. */
249 templ = &(*mem_addr_template_list)[templ_index];
250 if (!templ->ref)
251 {
252 sym = (addr->symbol ?
253 gen_rtx_SYMBOL_REF (pointer_mode, ggc_strdup ("test_symbol"))
254 : NULL_RTX);
255 bse = (addr->base ?
256 gen_raw_REG (pointer_mode, LAST_VIRTUAL_REGISTER + 1)
257 : NULL_RTX);
258 idx = (addr->index ?
259 gen_raw_REG (pointer_mode, LAST_VIRTUAL_REGISTER + 2)
260 : NULL_RTX);
261
262 gen_addr_rtx (pointer_mode, sym, bse, idx,
263 st? const0_rtx : NULL_RTX,
264 off? const0_rtx : NULL_RTX,
265 &templ->ref,
266 &templ->step_p,
267 &templ->off_p);
268 }
269
270 if (st)
271 *templ->step_p = st;
272 if (off)
273 *templ->off_p = off;
274
275 return templ->ref;
276 }
277
278 /* Otherwise really expand the expressions. */
279 sym = (addr->symbol
280 ? expand_expr (addr->symbol, NULL_RTX, pointer_mode, EXPAND_NORMAL)
281 : NULL_RTX);
282 bse = (addr->base
283 ? expand_expr (addr->base, NULL_RTX, pointer_mode, EXPAND_NORMAL)
284 : NULL_RTX);
285 idx = (addr->index
286 ? expand_expr (addr->index, NULL_RTX, pointer_mode, EXPAND_NORMAL)
287 : NULL_RTX);
288
289 gen_addr_rtx (pointer_mode, sym, bse, idx, st, off, &address, NULL, NULL);
290 if (pointer_mode != address_mode)
291 address = convert_memory_address (address_mode, address);
292 return address;
293 }
294
295 /* implement addr_for_mem_ref() directly from a tree, which avoids exporting
296 the mem_address structure. */
297
298 rtx
299 addr_for_mem_ref (tree exp, addr_space_t as, bool really_expand)
300 {
301 struct mem_address addr;
302 get_address_description (exp, &addr);
303 return addr_for_mem_ref (&addr, as, really_expand);
304 }
305
306 /* Returns address of MEM_REF in TYPE. */
307
308 tree
309 tree_mem_ref_addr (tree type, tree mem_ref)
310 {
311 tree addr;
312 tree act_elem;
313 tree step = TMR_STEP (mem_ref), offset = TMR_OFFSET (mem_ref);
314 tree addr_base = NULL_TREE, addr_off = NULL_TREE;
315
316 addr_base = fold_convert (type, TMR_BASE (mem_ref));
317
318 act_elem = TMR_INDEX (mem_ref);
319 if (act_elem)
320 {
321 if (step)
322 act_elem = fold_build2 (MULT_EXPR, TREE_TYPE (act_elem),
323 act_elem, step);
324 addr_off = act_elem;
325 }
326
327 act_elem = TMR_INDEX2 (mem_ref);
328 if (act_elem)
329 {
330 if (addr_off)
331 addr_off = fold_build2 (PLUS_EXPR, TREE_TYPE (addr_off),
332 addr_off, act_elem);
333 else
334 addr_off = act_elem;
335 }
336
337 if (offset && !integer_zerop (offset))
338 {
339 if (addr_off)
340 addr_off = fold_build2 (PLUS_EXPR, TREE_TYPE (addr_off), addr_off,
341 fold_convert (TREE_TYPE (addr_off), offset));
342 else
343 addr_off = offset;
344 }
345
346 if (addr_off)
347 addr = fold_build_pointer_plus (addr_base, addr_off);
348 else
349 addr = addr_base;
350
351 return addr;
352 }
353
354 /* Returns true if a memory reference in MODE and with parameters given by
355 ADDR is valid on the current target. */
356
357 static bool
358 valid_mem_ref_p (machine_mode mode, addr_space_t as,
359 struct mem_address *addr)
360 {
361 rtx address;
362
363 address = addr_for_mem_ref (addr, as, false);
364 if (!address)
365 return false;
366
367 return memory_address_addr_space_p (mode, address, as);
368 }
369
370 /* Checks whether a TARGET_MEM_REF with type TYPE and parameters given by ADDR
371 is valid on the current target and if so, creates and returns the
372 TARGET_MEM_REF. If VERIFY is false omit the verification step. */
373
374 static tree
375 create_mem_ref_raw (tree type, tree alias_ptr_type, struct mem_address *addr,
376 bool verify)
377 {
378 tree base, index2;
379
380 if (verify
381 && !valid_mem_ref_p (TYPE_MODE (type), TYPE_ADDR_SPACE (type), addr))
382 return NULL_TREE;
383
384 if (addr->step && integer_onep (addr->step))
385 addr->step = NULL_TREE;
386
387 if (addr->offset)
388 addr->offset = fold_convert (alias_ptr_type, addr->offset);
389 else
390 addr->offset = build_int_cst (alias_ptr_type, 0);
391
392 if (addr->symbol)
393 {
394 base = addr->symbol;
395 index2 = addr->base;
396 }
397 else if (addr->base
398 && POINTER_TYPE_P (TREE_TYPE (addr->base)))
399 {
400 base = addr->base;
401 index2 = NULL_TREE;
402 }
403 else
404 {
405 base = build_int_cst (ptr_type_node, 0);
406 index2 = addr->base;
407 }
408
409 /* If possible use a plain MEM_REF instead of a TARGET_MEM_REF.
410 ??? As IVOPTs does not follow restrictions to where the base
411 pointer may point to create a MEM_REF only if we know that
412 base is valid. */
413 if ((TREE_CODE (base) == ADDR_EXPR || TREE_CODE (base) == INTEGER_CST)
414 && (!index2 || integer_zerop (index2))
415 && (!addr->index || integer_zerop (addr->index)))
416 return fold_build2 (MEM_REF, type, base, addr->offset);
417
418 return build5 (TARGET_MEM_REF, type,
419 base, addr->offset, addr->index, addr->step, index2);
420 }
421
422 /* Returns true if OBJ is an object whose address is a link time constant. */
423
424 static bool
425 fixed_address_object_p (tree obj)
426 {
427 return (TREE_CODE (obj) == VAR_DECL
428 && (TREE_STATIC (obj)
429 || DECL_EXTERNAL (obj))
430 && ! DECL_DLLIMPORT_P (obj));
431 }
432
433 /* If ADDR contains an address of object that is a link time constant,
434 move it to PARTS->symbol. */
435
436 static void
437 move_fixed_address_to_symbol (struct mem_address *parts, aff_tree *addr)
438 {
439 unsigned i;
440 tree val = NULL_TREE;
441
442 for (i = 0; i < addr->n; i++)
443 {
444 if (addr->elts[i].coef != 1)
445 continue;
446
447 val = addr->elts[i].val;
448 if (TREE_CODE (val) == ADDR_EXPR
449 && fixed_address_object_p (TREE_OPERAND (val, 0)))
450 break;
451 }
452
453 if (i == addr->n)
454 return;
455
456 parts->symbol = val;
457 aff_combination_remove_elt (addr, i);
458 }
459
460 /* If ADDR contains an instance of BASE_HINT, move it to PARTS->base. */
461
462 static void
463 move_hint_to_base (tree type, struct mem_address *parts, tree base_hint,
464 aff_tree *addr)
465 {
466 unsigned i;
467 tree val = NULL_TREE;
468 int qual;
469
470 for (i = 0; i < addr->n; i++)
471 {
472 if (addr->elts[i].coef != 1)
473 continue;
474
475 val = addr->elts[i].val;
476 if (operand_equal_p (val, base_hint, 0))
477 break;
478 }
479
480 if (i == addr->n)
481 return;
482
483 /* Cast value to appropriate pointer type. We cannot use a pointer
484 to TYPE directly, as the back-end will assume registers of pointer
485 type are aligned, and just the base itself may not actually be.
486 We use void pointer to the type's address space instead. */
487 qual = ENCODE_QUAL_ADDR_SPACE (TYPE_ADDR_SPACE (type));
488 type = build_qualified_type (void_type_node, qual);
489 parts->base = fold_convert (build_pointer_type (type), val);
490 aff_combination_remove_elt (addr, i);
491 }
492
493 /* If ADDR contains an address of a dereferenced pointer, move it to
494 PARTS->base. */
495
496 static void
497 move_pointer_to_base (struct mem_address *parts, aff_tree *addr)
498 {
499 unsigned i;
500 tree val = NULL_TREE;
501
502 for (i = 0; i < addr->n; i++)
503 {
504 if (addr->elts[i].coef != 1)
505 continue;
506
507 val = addr->elts[i].val;
508 if (POINTER_TYPE_P (TREE_TYPE (val)))
509 break;
510 }
511
512 if (i == addr->n)
513 return;
514
515 parts->base = val;
516 aff_combination_remove_elt (addr, i);
517 }
518
519 /* Moves the loop variant part V in linear address ADDR to be the index
520 of PARTS. */
521
522 static void
523 move_variant_to_index (struct mem_address *parts, aff_tree *addr, tree v)
524 {
525 unsigned i;
526 tree val = NULL_TREE;
527
528 gcc_assert (!parts->index);
529 for (i = 0; i < addr->n; i++)
530 {
531 val = addr->elts[i].val;
532 if (operand_equal_p (val, v, 0))
533 break;
534 }
535
536 if (i == addr->n)
537 return;
538
539 parts->index = fold_convert (sizetype, val);
540 parts->step = wide_int_to_tree (sizetype, addr->elts[i].coef);
541 aff_combination_remove_elt (addr, i);
542 }
543
544 /* Adds ELT to PARTS. */
545
546 static void
547 add_to_parts (struct mem_address *parts, tree elt)
548 {
549 tree type;
550
551 if (!parts->index)
552 {
553 parts->index = fold_convert (sizetype, elt);
554 return;
555 }
556
557 if (!parts->base)
558 {
559 parts->base = elt;
560 return;
561 }
562
563 /* Add ELT to base. */
564 type = TREE_TYPE (parts->base);
565 if (POINTER_TYPE_P (type))
566 parts->base = fold_build_pointer_plus (parts->base, elt);
567 else
568 parts->base = fold_build2 (PLUS_EXPR, type,
569 parts->base, elt);
570 }
571
572 /* Finds the most expensive multiplication in ADDR that can be
573 expressed in an addressing mode and move the corresponding
574 element(s) to PARTS. */
575
576 static void
577 most_expensive_mult_to_index (tree type, struct mem_address *parts,
578 aff_tree *addr, bool speed)
579 {
580 addr_space_t as = TYPE_ADDR_SPACE (type);
581 machine_mode address_mode = targetm.addr_space.address_mode (as);
582 HOST_WIDE_INT coef;
583 unsigned best_mult_cost = 0, acost;
584 tree mult_elt = NULL_TREE, elt;
585 unsigned i, j;
586 enum tree_code op_code;
587
588 offset_int best_mult = 0;
589 for (i = 0; i < addr->n; i++)
590 {
591 if (!wi::fits_shwi_p (addr->elts[i].coef))
592 continue;
593
594 coef = addr->elts[i].coef.to_shwi ();
595 if (coef == 1
596 || !multiplier_allowed_in_address_p (coef, TYPE_MODE (type), as))
597 continue;
598
599 acost = mult_by_coeff_cost (coef, address_mode, speed);
600
601 if (acost > best_mult_cost)
602 {
603 best_mult_cost = acost;
604 best_mult = offset_int::from (addr->elts[i].coef, SIGNED);
605 }
606 }
607
608 if (!best_mult_cost)
609 return;
610
611 /* Collect elements multiplied by best_mult. */
612 for (i = j = 0; i < addr->n; i++)
613 {
614 offset_int amult = offset_int::from (addr->elts[i].coef, SIGNED);
615 offset_int amult_neg = -wi::sext (amult, TYPE_PRECISION (addr->type));
616
617 if (amult == best_mult)
618 op_code = PLUS_EXPR;
619 else if (amult_neg == best_mult)
620 op_code = MINUS_EXPR;
621 else
622 {
623 addr->elts[j] = addr->elts[i];
624 j++;
625 continue;
626 }
627
628 elt = fold_convert (sizetype, addr->elts[i].val);
629 if (mult_elt)
630 mult_elt = fold_build2 (op_code, sizetype, mult_elt, elt);
631 else if (op_code == PLUS_EXPR)
632 mult_elt = elt;
633 else
634 mult_elt = fold_build1 (NEGATE_EXPR, sizetype, elt);
635 }
636 addr->n = j;
637
638 parts->index = mult_elt;
639 parts->step = wide_int_to_tree (sizetype, best_mult);
640 }
641
642 /* Splits address ADDR for a memory access of type TYPE into PARTS.
643 If BASE_HINT is non-NULL, it specifies an SSA name to be used
644 preferentially as base of the reference, and IV_CAND is the selected
645 iv candidate used in ADDR.
646
647 TODO -- be more clever about the distribution of the elements of ADDR
648 to PARTS. Some architectures do not support anything but single
649 register in address, possibly with a small integer offset; while
650 create_mem_ref will simplify the address to an acceptable shape
651 later, it would be more efficient to know that asking for complicated
652 addressing modes is useless. */
653
654 static void
655 addr_to_parts (tree type, aff_tree *addr, tree iv_cand,
656 tree base_hint, struct mem_address *parts,
657 bool speed)
658 {
659 tree part;
660 unsigned i;
661
662 parts->symbol = NULL_TREE;
663 parts->base = NULL_TREE;
664 parts->index = NULL_TREE;
665 parts->step = NULL_TREE;
666
667 if (addr->offset != 0)
668 parts->offset = wide_int_to_tree (sizetype, addr->offset);
669 else
670 parts->offset = NULL_TREE;
671
672 /* Try to find a symbol. */
673 move_fixed_address_to_symbol (parts, addr);
674
675 /* No need to do address parts reassociation if the number of parts
676 is <= 2 -- in that case, no loop invariant code motion can be
677 exposed. */
678
679 if (!base_hint && (addr->n > 2))
680 move_variant_to_index (parts, addr, iv_cand);
681
682 /* First move the most expensive feasible multiplication
683 to index. */
684 if (!parts->index)
685 most_expensive_mult_to_index (type, parts, addr, speed);
686
687 /* Try to find a base of the reference. Since at the moment
688 there is no reliable way how to distinguish between pointer and its
689 offset, this is just a guess. */
690 if (!parts->symbol && base_hint)
691 move_hint_to_base (type, parts, base_hint, addr);
692 if (!parts->symbol && !parts->base)
693 move_pointer_to_base (parts, addr);
694
695 /* Then try to process the remaining elements. */
696 for (i = 0; i < addr->n; i++)
697 {
698 part = fold_convert (sizetype, addr->elts[i].val);
699 if (addr->elts[i].coef != 1)
700 part = fold_build2 (MULT_EXPR, sizetype, part,
701 wide_int_to_tree (sizetype, addr->elts[i].coef));
702 add_to_parts (parts, part);
703 }
704 if (addr->rest)
705 add_to_parts (parts, fold_convert (sizetype, addr->rest));
706 }
707
708 /* Force the PARTS to register. */
709
710 static void
711 gimplify_mem_ref_parts (gimple_stmt_iterator *gsi, struct mem_address *parts)
712 {
713 if (parts->base)
714 parts->base = force_gimple_operand_gsi_1 (gsi, parts->base,
715 is_gimple_mem_ref_addr, NULL_TREE,
716 true, GSI_SAME_STMT);
717 if (parts->index)
718 parts->index = force_gimple_operand_gsi (gsi, parts->index,
719 true, NULL_TREE,
720 true, GSI_SAME_STMT);
721 }
722
723 /* Creates and returns a TARGET_MEM_REF for address ADDR. If necessary
724 computations are emitted in front of GSI. TYPE is the mode
725 of created memory reference. IV_CAND is the selected iv candidate in ADDR,
726 and BASE_HINT is non NULL if IV_CAND comes from a base address
727 object. */
728
729 tree
730 create_mem_ref (gimple_stmt_iterator *gsi, tree type, aff_tree *addr,
731 tree alias_ptr_type, tree iv_cand, tree base_hint, bool speed)
732 {
733 tree mem_ref, tmp;
734 struct mem_address parts;
735
736 addr_to_parts (type, addr, iv_cand, base_hint, &parts, speed);
737 gimplify_mem_ref_parts (gsi, &parts);
738 mem_ref = create_mem_ref_raw (type, alias_ptr_type, &parts, true);
739 if (mem_ref)
740 return mem_ref;
741
742 /* The expression is too complicated. Try making it simpler. */
743
744 if (parts.step && !integer_onep (parts.step))
745 {
746 /* Move the multiplication to index. */
747 gcc_assert (parts.index);
748 parts.index = force_gimple_operand_gsi (gsi,
749 fold_build2 (MULT_EXPR, sizetype,
750 parts.index, parts.step),
751 true, NULL_TREE, true, GSI_SAME_STMT);
752 parts.step = NULL_TREE;
753
754 mem_ref = create_mem_ref_raw (type, alias_ptr_type, &parts, true);
755 if (mem_ref)
756 return mem_ref;
757 }
758
759 if (parts.symbol)
760 {
761 tmp = parts.symbol;
762 gcc_assert (is_gimple_val (tmp));
763
764 /* Add the symbol to base, eventually forcing it to register. */
765 if (parts.base)
766 {
767 gcc_assert (useless_type_conversion_p
768 (sizetype, TREE_TYPE (parts.base)));
769
770 if (parts.index)
771 {
772 parts.base = force_gimple_operand_gsi_1 (gsi,
773 fold_build_pointer_plus (tmp, parts.base),
774 is_gimple_mem_ref_addr, NULL_TREE, true, GSI_SAME_STMT);
775 }
776 else
777 {
778 parts.index = parts.base;
779 parts.base = tmp;
780 }
781 }
782 else
783 parts.base = tmp;
784 parts.symbol = NULL_TREE;
785
786 mem_ref = create_mem_ref_raw (type, alias_ptr_type, &parts, true);
787 if (mem_ref)
788 return mem_ref;
789 }
790
791 if (parts.index)
792 {
793 /* Add index to base. */
794 if (parts.base)
795 {
796 parts.base = force_gimple_operand_gsi_1 (gsi,
797 fold_build_pointer_plus (parts.base, parts.index),
798 is_gimple_mem_ref_addr, NULL_TREE, true, GSI_SAME_STMT);
799 }
800 else
801 parts.base = parts.index;
802 parts.index = NULL_TREE;
803
804 mem_ref = create_mem_ref_raw (type, alias_ptr_type, &parts, true);
805 if (mem_ref)
806 return mem_ref;
807 }
808
809 if (parts.offset && !integer_zerop (parts.offset))
810 {
811 /* Try adding offset to base. */
812 if (parts.base)
813 {
814 parts.base = force_gimple_operand_gsi_1 (gsi,
815 fold_build_pointer_plus (parts.base, parts.offset),
816 is_gimple_mem_ref_addr, NULL_TREE, true, GSI_SAME_STMT);
817 }
818 else
819 parts.base = parts.offset;
820
821 parts.offset = NULL_TREE;
822
823 mem_ref = create_mem_ref_raw (type, alias_ptr_type, &parts, true);
824 if (mem_ref)
825 return mem_ref;
826 }
827
828 /* Verify that the address is in the simplest possible shape
829 (only a register). If we cannot create such a memory reference,
830 something is really wrong. */
831 gcc_assert (parts.symbol == NULL_TREE);
832 gcc_assert (parts.index == NULL_TREE);
833 gcc_assert (!parts.step || integer_onep (parts.step));
834 gcc_assert (!parts.offset || integer_zerop (parts.offset));
835 gcc_unreachable ();
836 }
837
838 /* Copies components of the address from OP to ADDR. */
839
840 void
841 get_address_description (tree op, struct mem_address *addr)
842 {
843 if (TREE_CODE (TMR_BASE (op)) == ADDR_EXPR)
844 {
845 addr->symbol = TMR_BASE (op);
846 addr->base = TMR_INDEX2 (op);
847 }
848 else
849 {
850 addr->symbol = NULL_TREE;
851 if (TMR_INDEX2 (op))
852 {
853 gcc_assert (integer_zerop (TMR_BASE (op)));
854 addr->base = TMR_INDEX2 (op);
855 }
856 else
857 addr->base = TMR_BASE (op);
858 }
859 addr->index = TMR_INDEX (op);
860 addr->step = TMR_STEP (op);
861 addr->offset = TMR_OFFSET (op);
862 }
863
864 /* Copies the reference information from OLD_REF to NEW_REF, where
865 NEW_REF should be either a MEM_REF or a TARGET_MEM_REF. */
866
867 void
868 copy_ref_info (tree new_ref, tree old_ref)
869 {
870 tree new_ptr_base = NULL_TREE;
871
872 gcc_assert (TREE_CODE (new_ref) == MEM_REF
873 || TREE_CODE (new_ref) == TARGET_MEM_REF);
874
875 TREE_SIDE_EFFECTS (new_ref) = TREE_SIDE_EFFECTS (old_ref);
876 TREE_THIS_VOLATILE (new_ref) = TREE_THIS_VOLATILE (old_ref);
877
878 new_ptr_base = TREE_OPERAND (new_ref, 0);
879
880 /* We can transfer points-to information from an old pointer
881 or decl base to the new one. */
882 if (new_ptr_base
883 && TREE_CODE (new_ptr_base) == SSA_NAME
884 && !SSA_NAME_PTR_INFO (new_ptr_base))
885 {
886 tree base = get_base_address (old_ref);
887 if (!base)
888 ;
889 else if ((TREE_CODE (base) == MEM_REF
890 || TREE_CODE (base) == TARGET_MEM_REF)
891 && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME
892 && SSA_NAME_PTR_INFO (TREE_OPERAND (base, 0)))
893 {
894 struct ptr_info_def *new_pi;
895 unsigned int align, misalign;
896
897 duplicate_ssa_name_ptr_info
898 (new_ptr_base, SSA_NAME_PTR_INFO (TREE_OPERAND (base, 0)));
899 new_pi = SSA_NAME_PTR_INFO (new_ptr_base);
900 /* We have to be careful about transferring alignment information. */
901 if (get_ptr_info_alignment (new_pi, &align, &misalign)
902 && TREE_CODE (old_ref) == MEM_REF
903 && !(TREE_CODE (new_ref) == TARGET_MEM_REF
904 && (TMR_INDEX2 (new_ref)
905 || (TMR_STEP (new_ref)
906 && (TREE_INT_CST_LOW (TMR_STEP (new_ref))
907 < align)))))
908 {
909 unsigned int inc = (mem_ref_offset (old_ref).to_short_addr ()
910 - mem_ref_offset (new_ref).to_short_addr ());
911 adjust_ptr_info_misalignment (new_pi, inc);
912 }
913 else
914 mark_ptr_info_alignment_unknown (new_pi);
915 }
916 else if (TREE_CODE (base) == VAR_DECL
917 || TREE_CODE (base) == PARM_DECL
918 || TREE_CODE (base) == RESULT_DECL)
919 {
920 struct ptr_info_def *pi = get_ptr_info (new_ptr_base);
921 pt_solution_set_var (&pi->pt, base);
922 }
923 }
924 }
925
926 /* Move constants in target_mem_ref REF to offset. Returns the new target
927 mem ref if anything changes, NULL_TREE otherwise. */
928
929 tree
930 maybe_fold_tmr (tree ref)
931 {
932 struct mem_address addr;
933 bool changed = false;
934 tree new_ref, off;
935
936 get_address_description (ref, &addr);
937
938 if (addr.base
939 && TREE_CODE (addr.base) == INTEGER_CST
940 && !integer_zerop (addr.base))
941 {
942 addr.offset = fold_binary_to_constant (PLUS_EXPR,
943 TREE_TYPE (addr.offset),
944 addr.offset, addr.base);
945 addr.base = NULL_TREE;
946 changed = true;
947 }
948
949 if (addr.symbol
950 && TREE_CODE (TREE_OPERAND (addr.symbol, 0)) == MEM_REF)
951 {
952 addr.offset = fold_binary_to_constant
953 (PLUS_EXPR, TREE_TYPE (addr.offset),
954 addr.offset,
955 TREE_OPERAND (TREE_OPERAND (addr.symbol, 0), 1));
956 addr.symbol = TREE_OPERAND (TREE_OPERAND (addr.symbol, 0), 0);
957 changed = true;
958 }
959 else if (addr.symbol
960 && handled_component_p (TREE_OPERAND (addr.symbol, 0)))
961 {
962 HOST_WIDE_INT offset;
963 addr.symbol = build_fold_addr_expr
964 (get_addr_base_and_unit_offset
965 (TREE_OPERAND (addr.symbol, 0), &offset));
966 addr.offset = int_const_binop (PLUS_EXPR,
967 addr.offset, size_int (offset));
968 changed = true;
969 }
970
971 if (addr.index && TREE_CODE (addr.index) == INTEGER_CST)
972 {
973 off = addr.index;
974 if (addr.step)
975 {
976 off = fold_binary_to_constant (MULT_EXPR, sizetype,
977 off, addr.step);
978 addr.step = NULL_TREE;
979 }
980
981 addr.offset = fold_binary_to_constant (PLUS_EXPR,
982 TREE_TYPE (addr.offset),
983 addr.offset, off);
984 addr.index = NULL_TREE;
985 changed = true;
986 }
987
988 if (!changed)
989 return NULL_TREE;
990
991 /* If we have propagated something into this TARGET_MEM_REF and thus
992 ended up folding it, always create a new TARGET_MEM_REF regardless
993 if it is valid in this for on the target - the propagation result
994 wouldn't be anyway. */
995 new_ref = create_mem_ref_raw (TREE_TYPE (ref),
996 TREE_TYPE (addr.offset), &addr, false);
997 TREE_SIDE_EFFECTS (new_ref) = TREE_SIDE_EFFECTS (ref);
998 TREE_THIS_VOLATILE (new_ref) = TREE_THIS_VOLATILE (ref);
999 return new_ref;
1000 }
1001
1002 /* Dump PARTS to FILE. */
1003
1004 extern void dump_mem_address (FILE *, struct mem_address *);
1005 void
1006 dump_mem_address (FILE *file, struct mem_address *parts)
1007 {
1008 if (parts->symbol)
1009 {
1010 fprintf (file, "symbol: ");
1011 print_generic_expr (file, TREE_OPERAND (parts->symbol, 0), TDF_SLIM);
1012 fprintf (file, "\n");
1013 }
1014 if (parts->base)
1015 {
1016 fprintf (file, "base: ");
1017 print_generic_expr (file, parts->base, TDF_SLIM);
1018 fprintf (file, "\n");
1019 }
1020 if (parts->index)
1021 {
1022 fprintf (file, "index: ");
1023 print_generic_expr (file, parts->index, TDF_SLIM);
1024 fprintf (file, "\n");
1025 }
1026 if (parts->step)
1027 {
1028 fprintf (file, "step: ");
1029 print_generic_expr (file, parts->step, TDF_SLIM);
1030 fprintf (file, "\n");
1031 }
1032 if (parts->offset)
1033 {
1034 fprintf (file, "offset: ");
1035 print_generic_expr (file, parts->offset, TDF_SLIM);
1036 fprintf (file, "\n");
1037 }
1038 }
1039
1040 #include "gt-tree-ssa-address.h"