you should swap the two operands if OP0 would be constant. */
rtx
-expand_mult (enum machine_mode mode, rtx op0, rtx op1, rtx target, int unsignedp)
+expand_mult (enum machine_mode mode, rtx op0, rtx op1, rtx target,
+ int unsignedp)
{
rtx const_op1 = op1;
}
}
+ if (GET_CODE (op0) == CONST_DOUBLE)
+ {
+ rtx temp = op0;
+ op0 = op1;
+ op1 = temp;
+ }
+
+ /* Expand x*2.0 as x+x. */
+ if (GET_CODE (op1) == CONST_DOUBLE
+ && GET_MODE_CLASS (mode) == MODE_FLOAT)
+ {
+ REAL_VALUE_TYPE d;
+ REAL_VALUE_FROM_CONST_DOUBLE (d, op1);
+
+ if (REAL_VALUES_EQUAL (d, dconst2))
+ {
+ op0 = force_reg (GET_MODE (op0), op0);
+ return expand_binop (mode, add_optab, op0, op0,
+ target, unsignedp, OPTAB_LIB_WIDEN);
+ }
+ }
+
/* This used to use umul_optab if unsigned, but for non-widening multiply
there is no difference between signed and unsigned. */
op0 = expand_binop (mode,
same));
}
}
+ else
+ {
+ /* See if ARG1 is zero and X + ARG1 reduces to X. */
+ if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 0))
+ return non_lvalue (convert (type, arg0));
- /* See if ARG1 is zero and X + ARG1 reduces to X. */
- else if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 0))
- return non_lvalue (convert (type, arg0));
+ /* Likewise if the operands are reversed. */
+ if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
+ return non_lvalue (convert (type, arg1));
- /* Likewise if the operands are reversed. */
- else if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
- return non_lvalue (convert (type, arg1));
+ /* Convert x+x into x*2.0. */
+ if (operand_equal_p (arg0, arg1, 0))
+ return fold (build (MULT_EXPR, type, arg0,
+ build_real (type, dconst2)));
+
+ /* Convert x*c+x into x*(c+1). */
+ if (flag_unsafe_math_optimizations
+ && TREE_CODE (arg0) == MULT_EXPR
+ && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
+ && ! TREE_CONSTANT_OVERFLOW (TREE_OPERAND (arg0, 1))
+ && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
+ {
+ REAL_VALUE_TYPE c;
+
+ c = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
+ real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
+ return fold (build (MULT_EXPR, type, arg1,
+ build_real (type, c)));
+ }
+
+ /* Convert x+x*c into x*(c+1). */
+ if (flag_unsafe_math_optimizations
+ && TREE_CODE (arg1) == MULT_EXPR
+ && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST
+ && ! TREE_CONSTANT_OVERFLOW (TREE_OPERAND (arg1, 1))
+ && operand_equal_p (TREE_OPERAND (arg1, 0), arg0, 0))
+ {
+ REAL_VALUE_TYPE c;
+
+ c = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
+ real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
+ return fold (build (MULT_EXPR, type, arg0,
+ build_real (type, c)));
+ }
+
+ /* Convert x*c1+x*c2 into x*(c1+c2). */
+ if (flag_unsafe_math_optimizations
+ && TREE_CODE (arg0) == MULT_EXPR
+ && TREE_CODE (arg1) == MULT_EXPR
+ && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
+ && ! TREE_CONSTANT_OVERFLOW (TREE_OPERAND (arg0, 1))
+ && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST
+ && ! TREE_CONSTANT_OVERFLOW (TREE_OPERAND (arg1, 1))
+ && operand_equal_p (TREE_OPERAND (arg0, 0),
+ TREE_OPERAND (arg1, 0), 0))
+ {
+ REAL_VALUE_TYPE c1, c2;
+
+ c1 = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
+ c2 = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
+ real_arithmetic (&c1, PLUS_EXPR, &c1, &c2);
+ return fold (build (MULT_EXPR, type,
+ TREE_OPERAND (arg0, 0),
+ build_real (type, c1)));
+ }
+ }
bit_rotate:
/* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
&& real_minus_onep (arg1))
return fold (build1 (NEGATE_EXPR, type, arg0));
- /* x*2 is x+x */
- if (! wins && real_twop (arg1)
- && (*lang_hooks.decls.global_bindings_p) () == 0
- && ! CONTAINS_PLACEHOLDER_P (arg0))
- {
- tree arg = save_expr (arg0);
- return fold (build (PLUS_EXPR, type, arg, arg));
- }
-
if (flag_unsafe_math_optimizations)
{
enum built_in_function fcode0 = builtin_mathfn_code (arg0);