From 79be8943bb65584a628bcb14c575f52c35b42cb1 Mon Sep 17 00:00:00 2001 From: Eric Botcazou Date: Thu, 5 Jan 2012 22:24:45 +0000 Subject: [PATCH] re PR tree-optimization/51315 (unaligned memory accesses generated with -ftree-sra) PR tree-optimization/51315 * tree-sra.c (tree_non_aligned_mem_for_access_p): New predicate. (build_accesses_from_assign): Use it instead of tree_non_aligned_mem_p. From-SVN: r182933 --- gcc/ChangeLog | 6 ++++ gcc/testsuite/ChangeLog | 4 +++ .../gcc.c-torture/execute/20120105-1.c | 24 +++++++++++++++ gcc/tree-sra.c | 29 ++++++++++++++----- 4 files changed, 55 insertions(+), 8 deletions(-) create mode 100644 gcc/testsuite/gcc.c-torture/execute/20120105-1.c diff --git a/gcc/ChangeLog b/gcc/ChangeLog index c9a2654764b1..c020ffd20e09 100644 --- a/gcc/ChangeLog +++ b/gcc/ChangeLog @@ -1,3 +1,9 @@ +2012-01-05 Eric Botcazou + + PR tree-optimization/51315 + * tree-sra.c (tree_non_aligned_mem_for_access_p): New predicate. + (build_accesses_from_assign): Use it instead of tree_non_aligned_mem_p. + 2012-01-04 Eric Botcazou PR tree-optimization/51624 diff --git a/gcc/testsuite/ChangeLog b/gcc/testsuite/ChangeLog index e9ffe531c493..6b8a0c4e3a31 100644 --- a/gcc/testsuite/ChangeLog +++ b/gcc/testsuite/ChangeLog @@ -1,3 +1,7 @@ +2012-01-05 Eric Botcazou + + * gcc.c-torture/execute/20120104-1.c: New test. + 2012-01-04 Richard Guenther PR tree-optimization/49651 diff --git a/gcc/testsuite/gcc.c-torture/execute/20120105-1.c b/gcc/testsuite/gcc.c-torture/execute/20120105-1.c new file mode 100644 index 000000000000..115ba1509d49 --- /dev/null +++ b/gcc/testsuite/gcc.c-torture/execute/20120105-1.c @@ -0,0 +1,24 @@ +struct __attribute__((packed)) S +{ + int a, b, c; +}; + +static int __attribute__ ((noinline,noclone)) +extract(const char *p) +{ + struct S s; + __builtin_memcpy (&s, p, sizeof(struct S)); + return s.a; +} + +volatile int i; + +int main (void) +{ + char p[sizeof(struct S) + 1]; + + __builtin_memset (p, 0, sizeof(struct S) + 1); + i = extract (p + 1); + + return 0; +} diff --git a/gcc/tree-sra.c b/gcc/tree-sra.c index 016c0387d777..63eec95e1268 100644 --- a/gcc/tree-sra.c +++ b/gcc/tree-sra.c @@ -1047,6 +1047,25 @@ tree_non_aligned_mem_p (tree exp, unsigned int align) return false; } +/* Return true if EXP is a memory reference less aligned than what the access + ACC would require. This is invoked only on strict-alignment targets. */ + +static bool +tree_non_aligned_mem_for_access_p (tree exp, struct access *acc) +{ + unsigned int acc_align; + + /* The alignment of the access is that of its expression. However, it may + have been artificially increased, e.g. by a local alignment promotion, + so we cap it to the alignment of the type of the base, on the grounds + that valid sub-accesses cannot be more aligned than that. */ + acc_align = get_object_alignment (acc->expr, BIGGEST_ALIGNMENT); + if (acc->base && acc_align > TYPE_ALIGN (TREE_TYPE (acc->base))) + acc_align = TYPE_ALIGN (TREE_TYPE (acc->base)); + + return tree_non_aligned_mem_p (exp, acc_align); +} + /* Scan expressions occuring in STMT, create access structures for all accesses to candidates for scalarization and remove those candidates which occur in statements or expressions that prevent them from being split apart. Return @@ -1073,10 +1092,7 @@ build_accesses_from_assign (gimple stmt) if (lacc) { lacc->grp_assignment_write = 1; - if (STRICT_ALIGNMENT - && tree_non_aligned_mem_p (rhs, - get_object_alignment (lhs, - BIGGEST_ALIGNMENT))) + if (STRICT_ALIGNMENT && tree_non_aligned_mem_for_access_p (rhs, lacc)) lacc->grp_unscalarizable_region = 1; } @@ -1086,10 +1102,7 @@ build_accesses_from_assign (gimple stmt) if (should_scalarize_away_bitmap && !gimple_has_volatile_ops (stmt) && !is_gimple_reg_type (racc->type)) bitmap_set_bit (should_scalarize_away_bitmap, DECL_UID (racc->base)); - if (STRICT_ALIGNMENT - && tree_non_aligned_mem_p (lhs, - get_object_alignment (rhs, - BIGGEST_ALIGNMENT))) + if (STRICT_ALIGNMENT && tree_non_aligned_mem_for_access_p (lhs, racc)) racc->grp_unscalarizable_region = 1; } -- 2.47.2