+2006-12-12 Andrew Pinski <andrew_pinski@playstation.sony.com>
+
+ PR tree-opt/28436
+ * tree.h (DECL_COMPLEX_GIMPLE_REG_P): Rename to ...
+ (DECL_GIMPLE_REG_P): This.
+ * fold-const.c (fold_indirect_ref_1): Fold *(foo *)&vectorfoo into
+ using BIT_FIELD_REF.
+ * omp-low.c (omp_copy_decl_2): Use the renamed DECL_GIMPLE_REG_P.
+ * tree-gimple.c (is_gimple_reg): Use the renamed DECL_GIMPLE_REG_P
+ and check for VECTOR_TYPE.
+ * expr.c (get_inner_reference): Set the mode for BIT_FIELD_REF with
+ vector types.
+ * tree-flow-inline.h (var_can_have_subvars): Use the renamed
+ DECL_GIMPLE_REG_P.
+ * gimplify.c (internal_get_tmp_var): Use the renamed DECL_GIMPLE_REG_P
+ and check for VECTOR_TYPE.
+ (gimplify_bind_expr): Likewise.
+ (gimplify_function_tree): Likewise.
+ * expmed.c: Include target.h.
+ (extract_bit_field): For vector mode, try find a better mode first.
+ If that fails use gen_lowpart (for vectors only).
+ * tree-dfa.c (make_rename_temp): Use the renamed DECL_GIMPLE_REG_P
+ and check for VECTOR_TYPE.
+ * tree-ssa-pre.c (create_expressions_by_pieces): Likewise.
+ (insert_into_preds_of_block): Likewise.
+ (insert_fake_stores): Create gimple register store_tmps for
+ vector types.
+ * tree-sra.c (sra_elt): New field, is_vector_lhs.
+ (sra_walk_expr <case BIT_FIELD_REF>): For vector types that
+ are the left hand side, set the element's is_vector_lhs to true.
+ (instantiate_element): For vector types which were on the left
+ hand size, set DECL_GIMPLE_REG_P to false.
+ * tree-nested.c (create_tmp_var_for): Use the renamed DECL_GIMPLE_REG_P. * tree-inline.c (declare_return_variable): Use the renamed
+ DECL_GIMPLE_REG_P
+ and check for VECTOR_TYPE.
+ (copy_decl_to_var): Use the renamed DECL_GIMPLE_REG_P.
+ (copy_result_decl_to_var): Likewise.
+ * tree-vect-transform.c (vect_get_new_vect_var): For vector types,
+ create a gimple register variable.
+ (vect_permute_store_chain): Set DECL_GIMPLE_REG_P to true for the
+ vect_inter_* temp variables.
+ * Makefile.in (expmed.o): Update dependencies.
+
2006-12-12 Peter Bergner <bergner@vnet.ibm.com>
* reload1.c (eliminate_regs_in_insn): Merge the plus_src "else" and
$(CGRAPH_H) except.h sbitmap.h
expmed.o : expmed.c $(CONFIG_H) $(SYSTEM_H) coretypes.h $(TM_H) $(RTL_H) $(TREE_H) \
$(FLAGS_H) insn-config.h $(EXPR_H) $(OPTABS_H) $(RECOG_H) $(REAL_H) \
- toplev.h $(TM_P_H) langhooks.h
+ toplev.h $(TM_P_H) langhooks.h $(TARGET_H)
explow.o : explow.c $(CONFIG_H) $(SYSTEM_H) coretypes.h $(TM_H) $(RTL_H) $(TREE_H) \
$(FLAGS_H) hard-reg-set.h insn-config.h $(EXPR_H) $(OPTABS_H) $(RECOG_H) \
toplev.h $(FUNCTION_H) $(GGC_H) $(TM_P_H) langhooks.h gt-explow.h \
#include "real.h"
#include "recog.h"
#include "langhooks.h"
+#include "target.h"
static void store_fixed_bit_field (rtx, unsigned HOST_WIDE_INT,
unsigned HOST_WIDE_INT,
return op0;
}
+ /* See if we can get a better vector mode before extracting. */
+ if (VECTOR_MODE_P (GET_MODE (op0))
+ && !MEM_P (op0)
+ && GET_MODE_INNER (GET_MODE (op0)) != tmode)
+ {
+ enum machine_mode new_mode;
+ int nunits = GET_MODE_NUNITS (GET_MODE (op0));
+
+ if (GET_MODE_CLASS (tmode) == MODE_FLOAT)
+ new_mode = MIN_MODE_VECTOR_FLOAT;
+ else
+ new_mode = MIN_MODE_VECTOR_INT;
+
+ for (; new_mode != VOIDmode ; new_mode = GET_MODE_WIDER_MODE (new_mode))
+ if (GET_MODE_NUNITS (new_mode) == nunits
+ && GET_MODE_INNER (new_mode) == tmode
+ && targetm.vector_mode_supported_p (new_mode))
+ break;
+ if (new_mode != VOIDmode)
+ op0 = gen_lowpart (new_mode, op0);
+ }
+
/* Use vec_extract patterns for extracting parts of vectors whenever
available. */
if (VECTOR_MODE_P (GET_MODE (op0))
{
emit_insn (seq);
emit_insn (pat);
+ if (mode0 != mode)
+ return gen_lowpart (tmode, dest);
return dest;
}
}
{
size_tree = TREE_OPERAND (exp, 1);
*punsignedp = BIT_FIELD_REF_UNSIGNED (exp);
+
+ /* For vector types, with the correct size of access, use the mode of
+ inner type. */
+ if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == VECTOR_TYPE
+ && TREE_TYPE (exp) == TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0)))
+ && tree_int_cst_equal (size_tree, TYPE_SIZE (TREE_TYPE (exp))))
+ mode = TYPE_MODE (TREE_TYPE (exp));
}
else
{
else if (TREE_CODE (optype) == COMPLEX_TYPE
&& type == TREE_TYPE (optype))
return fold_build1 (REALPART_EXPR, type, op);
+ /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
+ else if (TREE_CODE (optype) == VECTOR_TYPE
+ && type == TREE_TYPE (optype))
+ {
+ tree part_width = TYPE_SIZE (type);
+ tree index = bitsize_int (0);
+ return fold_build3 (BIT_FIELD_REF, type, op, part_width, index);
+ }
}
/* ((foo*)&complexfoo)[1] => __imag__ complexfoo */
}
}
- if (TREE_CODE (TREE_TYPE (t)) == COMPLEX_TYPE)
- DECL_COMPLEX_GIMPLE_REG_P (t) = 1;
+ if (TREE_CODE (TREE_TYPE (t)) == COMPLEX_TYPE
+ || TREE_CODE (TREE_TYPE (t)) == VECTOR_TYPE)
+ DECL_GIMPLE_REG_P (t) = 1;
mod = build2 (INIT_EXPR, TREE_TYPE (t), t, unshare_expr (val));
/* Preliminarily mark non-addressed complex variables as eligible
for promotion to gimple registers. We'll transform their uses
as we find them. */
- if (TREE_CODE (TREE_TYPE (t)) == COMPLEX_TYPE
+ if ((TREE_CODE (TREE_TYPE (t)) == COMPLEX_TYPE
+ || TREE_CODE (TREE_TYPE (t)) == VECTOR_TYPE)
&& !TREE_THIS_VOLATILE (t)
&& (TREE_CODE (t) == VAR_DECL && !DECL_HARD_REGISTER (t))
&& !needs_to_live_in_memory (t))
- DECL_COMPLEX_GIMPLE_REG_P (t) = 1;
+ DECL_GIMPLE_REG_P (t) = 1;
}
gimple_push_bind_expr (bind_expr);
/* Promote partial stores to COMPLEX variables to total stores. *EXPR_P is
a MODIFY_EXPR with a lhs of a REAL/IMAGPART_EXPR of a variable with
- DECL_COMPLEX_GIMPLE_REG_P set. */
+ DECL_GIMPLE_REG_P set. */
static enum gimplify_status
gimplify_modify_expr_complex_part (tree *expr_p, tree *pre_p, bool want_value)
/* Preliminarily mark non-addressed complex variables as eligible
for promotion to gimple registers. We'll transform their uses
as we find them. */
- if (TREE_CODE (TREE_TYPE (parm)) == COMPLEX_TYPE
+ if ((TREE_CODE (TREE_TYPE (parm)) == COMPLEX_TYPE
+ || TREE_CODE (TREE_TYPE (parm)) == VECTOR_TYPE)
&& !TREE_THIS_VOLATILE (parm)
&& !needs_to_live_in_memory (parm))
- DECL_COMPLEX_GIMPLE_REG_P (parm) = 1;
+ DECL_GIMPLE_REG_P (parm) = 1;
}
ret = DECL_RESULT (fndecl);
- if (TREE_CODE (TREE_TYPE (ret)) == COMPLEX_TYPE
+ if ((TREE_CODE (TREE_TYPE (ret)) == COMPLEX_TYPE
+ || TREE_CODE (TREE_TYPE (ret)) == VECTOR_TYPE)
&& !needs_to_live_in_memory (ret))
- DECL_COMPLEX_GIMPLE_REG_P (ret) = 1;
+ DECL_GIMPLE_REG_P (ret) = 1;
gimplify_body (&DECL_SAVED_TREE (fndecl), fndecl, true);
tree copy = build_decl (VAR_DECL, name, type);
TREE_ADDRESSABLE (copy) = TREE_ADDRESSABLE (var);
- DECL_COMPLEX_GIMPLE_REG_P (copy) = DECL_COMPLEX_GIMPLE_REG_P (var);
+ DECL_GIMPLE_REG_P (copy) = DECL_GIMPLE_REG_P (var);
DECL_ARTIFICIAL (copy) = DECL_ARTIFICIAL (var);
DECL_IGNORED_P (copy) = DECL_IGNORED_P (var);
TREE_USED (copy) = 1;
+2006-12-12 Andrew Pinski <andrew_pinski@playstation.sony.com>
+
+ PR tree-opt/28436
+ * gcc.c-torture/compile/vector-1.c: New test.
+ * gcc.c-torture/compile/vector-2.c: New test.
+ * gcc.c-torture/compile/vector-3.c: New test.
+
2006-12-12 Tobias Schlüter <tobias.schlueter@physik.uni-muenchen.de>
* lib/fortran-torture.exp: Update copyright years. Remove
--- /dev/null
+#define vector __attribute__((vector_size(16) ))
+struct ss
+{
+ vector float mVec;
+};
+float getCapsule(vector int t)
+{
+ vector float t1 = (vector float)t;
+ struct ss y = {t1};
+ return *((float*)&y.mVec);
+}
--- /dev/null
+#define vector __attribute__((vector_size(16) ))
+struct ss
+{
+ vector float mVec;
+};
+vector float getCapsule(vector int t)
+{
+ vector float t1 = (vector float)t;
+ struct ss y = {t1};
+ *((float*)&y.mVec) = 1.0;
+ return y.mVec;
+}
--- /dev/null
+#define vector __attribute__((vector_size(16) ))
+vector float g(void)
+{
+ float t = 1.0f;
+ return (vector float){0.0, 0.0, t, 0.0};
+}
+
{
tree t = create_tmp_var (type, prefix);
- if (TREE_CODE (type) == COMPLEX_TYPE)
- DECL_COMPLEX_GIMPLE_REG_P (t) = 1;
+ if (TREE_CODE (TREE_TYPE (t)) == COMPLEX_TYPE
+ || TREE_CODE (TREE_TYPE (t)) == VECTOR_TYPE)
+ DECL_GIMPLE_REG_P (t) = 1;
if (gimple_referenced_vars (cfun))
{
/* Complex types variables which are not also a gimple register can
have subvars. */
if (TREE_CODE (TREE_TYPE (v)) == COMPLEX_TYPE
- && !DECL_COMPLEX_GIMPLE_REG_P (v))
+ && !DECL_GIMPLE_REG_P (v))
return true;
return false;
/* Complex values must have been put into ssa form. That is, no
assignments to the individual components. */
- if (TREE_CODE (TREE_TYPE (t)) == COMPLEX_TYPE)
- return DECL_COMPLEX_GIMPLE_REG_P (t);
+ if (TREE_CODE (TREE_TYPE (t)) == COMPLEX_TYPE
+ || TREE_CODE (TREE_TYPE (t)) == VECTOR_TYPE)
+ return DECL_GIMPLE_REG_P (t);
return true;
}
var = return_slot_addr;
else
var = build_fold_indirect_ref (return_slot_addr);
- if (TREE_CODE (TREE_TYPE (result)) == COMPLEX_TYPE
- && !DECL_COMPLEX_GIMPLE_REG_P (result)
+ if ((TREE_CODE (TREE_TYPE (result)) == COMPLEX_TYPE
+ || TREE_CODE (TREE_TYPE (result)) == VECTOR_TYPE)
+ && !DECL_GIMPLE_REG_P (result)
&& DECL_P (var))
- DECL_COMPLEX_GIMPLE_REG_P (var) = 0;
+ DECL_GIMPLE_REG_P (var) = 0;
use = NULL;
goto done;
}
use_it = false;
else if (is_global_var (base_m))
use_it = false;
- else if (TREE_CODE (TREE_TYPE (result)) == COMPLEX_TYPE
- && !DECL_COMPLEX_GIMPLE_REG_P (result)
- && DECL_COMPLEX_GIMPLE_REG_P (base_m))
+ else if ((TREE_CODE (TREE_TYPE (result)) == COMPLEX_TYPE
+ || TREE_CODE (TREE_TYPE (result)) == VECTOR_TYPE)
+ && !DECL_GIMPLE_REG_P (result)
+ && DECL_GIMPLE_REG_P (base_m))
use_it = false;
else if (!TREE_ADDRESSABLE (base_m))
use_it = true;
TREE_ADDRESSABLE (copy) = TREE_ADDRESSABLE (decl);
TREE_READONLY (copy) = TREE_READONLY (decl);
TREE_THIS_VOLATILE (copy) = TREE_THIS_VOLATILE (decl);
- DECL_COMPLEX_GIMPLE_REG_P (copy) = DECL_COMPLEX_GIMPLE_REG_P (decl);
+ DECL_GIMPLE_REG_P (copy) = DECL_GIMPLE_REG_P (decl);
return copy_decl_for_dup_finish (id, decl, copy);
}
if (!DECL_BY_REFERENCE (decl))
{
TREE_ADDRESSABLE (copy) = TREE_ADDRESSABLE (decl);
- DECL_COMPLEX_GIMPLE_REG_P (copy) = DECL_COMPLEX_GIMPLE_REG_P (decl);
+ DECL_GIMPLE_REG_P (copy) = DECL_GIMPLE_REG_P (decl);
}
return copy_decl_for_dup_finish (id, decl, copy);
TREE_CHAIN (tmp_var) = info->new_local_var_chain;
DECL_SEEN_IN_BIND_EXPR_P (tmp_var) = 1;
if (TREE_CODE (type) == COMPLEX_TYPE)
- DECL_COMPLEX_GIMPLE_REG_P (tmp_var) = 1;
+ DECL_GIMPLE_REG_P (tmp_var) = 1;
info->new_local_var_chain = tmp_var;
/* A flag for use with/after random access traversals. */
bool visited;
+
+ /* True if there is BIT_FIELD_REF on the lhs with a vector. */
+ bool is_vector_lhs;
};
#define IS_ELEMENT_FOR_GROUP(ELEMENT) (TREE_CODE (ELEMENT) == RANGE_EXPR)
break;
case BIT_FIELD_REF:
+ /* A bit field reference to a specific vector is scalarized but for
+ ones for inputs need to be marked as used on the left hand size so
+ when we scalarize it, we can mark that variable as non renamable. */
+ if (is_output && TREE_CODE (TREE_TYPE (TREE_OPERAND (inner, 0))) == VECTOR_TYPE)
+ {
+ struct sra_elt *elt = maybe_lookup_element_for_expr (TREE_OPERAND (inner, 0));
+ elt->is_vector_lhs = true;
+ }
/* A bit field reference (access to *multiple* fields simultaneously)
is not currently scalarized. Consider this an access to the
complete outer element, to which walk_tree will bring us next. */
+
goto use_all;
case VIEW_CONVERT_EXPR:
base = base_elt->element;
elt->replacement = var = make_rename_temp (elt->type, "SR");
+
+ /* For vectors, if used on the left hand side with BIT_FIELD_REF,
+ they are not a gimple register. */
+ if (TREE_CODE (TREE_TYPE (var)) == VECTOR_TYPE && elt->is_vector_lhs)
+ DECL_GIMPLE_REG_P (var) = 0;
+
DECL_SOURCE_LOCATION (var) = DECL_SOURCE_LOCATION (base);
DECL_ARTIFICIAL (var) = 1;
temp = pretemp;
add_referenced_var (temp);
- if (TREE_CODE (TREE_TYPE (expr)) == COMPLEX_TYPE)
- DECL_COMPLEX_GIMPLE_REG_P (temp) = 1;
+ if (TREE_CODE (TREE_TYPE (expr)) == COMPLEX_TYPE
+ || TREE_CODE (TREE_TYPE (expr)) == VECTOR_TYPE)
+ DECL_GIMPLE_REG_P (temp) = 1;
newexpr = build2_gimple (GIMPLE_MODIFY_STMT, temp, newexpr);
name = make_ssa_name (temp, newexpr);
temp = prephitemp;
add_referenced_var (temp);
- if (TREE_CODE (type) == COMPLEX_TYPE)
- DECL_COMPLEX_GIMPLE_REG_P (temp) = 1;
+
+ if (TREE_CODE (type) == COMPLEX_TYPE
+ || TREE_CODE (type) == VECTOR_TYPE)
+ DECL_GIMPLE_REG_P (temp) = 1;
temp = create_phi_node (temp, block);
NECESSARY (temp) = 0;
if (!storetemp || TREE_TYPE (rhs) != TREE_TYPE (storetemp))
{
storetemp = create_tmp_var (TREE_TYPE (rhs), "storetmp");
+ if (TREE_CODE (TREE_TYPE (storetemp)) == VECTOR_TYPE)
+ DECL_GIMPLE_REG_P (storetemp) = 1;
get_var_ann (storetemp);
}
else
new_vect_var = create_tmp_var (type, prefix);
+ /* Mark vector typed variable as a gimple register variable. */
+ if (TREE_CODE (type) == VECTOR_TYPE)
+ DECL_GIMPLE_REG_P (new_vect_var) = true;
+
return new_vect_var;
}
and in the case of little endian:
high = interleave_low (vect1, vect2). */
perm_dest = create_tmp_var (vectype, "vect_inter_high");
+ DECL_GIMPLE_REG_P (perm_dest) = 1;
add_referenced_var (perm_dest);
if (BYTES_BIG_ENDIAN)
perm_stmt = build2 (GIMPLE_MODIFY_STMT, void_type_node, perm_dest,
and in the case of little endian:
low = interleave_high (vect1, vect2). */
perm_dest = create_tmp_var (vectype, "vect_inter_low");
+ DECL_GIMPLE_REG_P (perm_dest) = 1;
add_referenced_var (perm_dest);
if (BYTES_BIG_ENDIAN)
perm_stmt = build2 (GIMPLE_MODIFY_STMT, void_type_node, perm_dest,
#define DECL_PRESERVE_P(DECL) \
DECL_COMMON_CHECK (DECL)->decl_common.preserve_flag
-/* For function local variables of COMPLEX type, indicates that the
- variable is not aliased, and that all modifications to the variable
- have been adjusted so that they are killing assignments. Thus the
- variable may now be treated as a GIMPLE register, and use real
- instead of virtual ops in SSA form. */
-#define DECL_COMPLEX_GIMPLE_REG_P(DECL) \
+/* For function local variables of COMPLEX and VECTOR types,
+ indicates that the variable is not aliased, and that all
+ modifications to the variable have been adjusted so that
+ they are killing assignments. Thus the variable may now
+ be treated as a GIMPLE register, and use real instead of
+ virtual ops in SSA form. */
+#define DECL_GIMPLE_REG_P(DECL) \
DECL_COMMON_CHECK (DECL)->decl_common.gimple_reg_flag
/* This is true if DECL is call clobbered in the current function.