===================================================================
@@ -6540,6 +6540,23 @@ commutative_tree_code (enum tree_code co
return false;
}
+/* Return true if CODE represents a ternary tree code for which the
+ first two operands are commutative. Otherwise return false. */
+bool
+commutative_ternary_tree_code (enum tree_code code)
+{
+ switch (code)
+ {
+ case WIDEN_MULT_PLUS_EXPR:
+ case WIDEN_MULT_MINUS_EXPR:
+ return true;
+
+ default:
+ break;
+ }
+ return false;
+}
+
/* Generate a hash value for an expression. This can be used iteratively
by passing a previous result as the VAL argument.
===================================================================
@@ -4820,6 +4820,7 @@ extern tree get_callee_fndecl (const_tre
extern int type_num_arguments (const_tree);
extern bool associative_tree_code (enum tree_code);
extern bool commutative_tree_code (enum tree_code);
+extern bool commutative_ternary_tree_code (enum tree_code);
extern tree upper_bound_in_type (tree, tree);
extern tree lower_bound_in_type (tree, tree);
extern int operand_equal_for_phi_arg_p (const_tree, const_tree);
===================================================================
@@ -865,6 +865,8 @@ gimple_assign_nonnegative_warnv_p (gimpl
gimple_assign_rhs1 (stmt),
gimple_assign_rhs2 (stmt),
strict_overflow_p);
+ case GIMPLE_TERNARY_RHS:
+ return false;
case GIMPLE_SINGLE_RHS:
return tree_single_nonnegative_warnv_p (gimple_assign_rhs1 (stmt),
strict_overflow_p);
@@ -936,6 +938,8 @@ gimple_assign_nonzero_warnv_p (gimple st
gimple_assign_rhs1 (stmt),
gimple_assign_rhs2 (stmt),
strict_overflow_p);
+ case GIMPLE_TERNARY_RHS:
+ return false;
case GIMPLE_SINGLE_RHS:
return tree_single_nonzero_warnv_p (gimple_assign_rhs1 (stmt),
strict_overflow_p);
===================================================================
@@ -2352,6 +2352,10 @@ stmt_has_constants (gimple stmt)
case GIMPLE_BINARY_RHS:
return (is_gimple_min_invariant (gimple_assign_rhs1 (stmt))
|| is_gimple_min_invariant (gimple_assign_rhs2 (stmt)));
+ case GIMPLE_TERNARY_RHS:
+ return (is_gimple_min_invariant (gimple_assign_rhs1 (stmt))
+ || is_gimple_min_invariant (gimple_assign_rhs2 (stmt))
+ || is_gimple_min_invariant (gimple_assign_rhs3 (stmt)));
case GIMPLE_SINGLE_RHS:
/* Constants inside reference ops are rarely interesting, but
it can take a lot of looking to find them. */
===================================================================
@@ -839,6 +839,23 @@ ccp_visit_phi_node (gimple phi)
return SSA_PROP_NOT_INTERESTING;
}
+/* Get operand number OPNR from the rhs of STMT. Before returning it,
+ simplify it to a constant if possible. */
+
+static tree
+get_rhs_assign_op_for_ccp (gimple stmt, int opnr)
+{
+ tree op = gimple_op (stmt, opnr);
+
+ if (TREE_CODE (op) == SSA_NAME)
+ {
+ prop_value_t *val = get_value (op);
+ if (val->lattice_val == CONSTANT)
+ op = get_value (op)->value;
+ }
+ return op;
+}
+
/* CCP specific front-end to the non-destructive constant folding
routines.
@@ -961,15 +978,7 @@ ccp_fold (gimple stmt)
Note that we know the single operand must be a constant,
so this should almost always return a simplified RHS. */
tree lhs = gimple_assign_lhs (stmt);
- tree op0 = gimple_assign_rhs1 (stmt);
-
- /* Simplify the operand down to a constant. */
- if (TREE_CODE (op0) == SSA_NAME)
- {
- prop_value_t *val = get_value (op0);
- if (val->lattice_val == CONSTANT)
- op0 = get_value (op0)->value;
- }
+ tree op0 = get_rhs_assign_op_for_ccp (stmt, 1);
/* Conversions are useless for CCP purposes if they are
value-preserving. Thus the restrictions that
@@ -1006,23 +1015,8 @@ ccp_fold (gimple stmt)
case GIMPLE_BINARY_RHS:
{
/* Handle binary operators that can appear in GIMPLE form. */
- tree op0 = gimple_assign_rhs1 (stmt);
- tree op1 = gimple_assign_rhs2 (stmt);
-
- /* Simplify the operands down to constants when appropriate. */
- if (TREE_CODE (op0) == SSA_NAME)
- {
- prop_value_t *val = get_value (op0);
- if (val->lattice_val == CONSTANT)
- op0 = val->value;
- }
-
- if (TREE_CODE (op1) == SSA_NAME)
- {
- prop_value_t *val = get_value (op1);
- if (val->lattice_val == CONSTANT)
- op1 = val->value;
- }
+ tree op0 = get_rhs_assign_op_for_ccp (stmt, 1);
+ tree op1 = get_rhs_assign_op_for_ccp (stmt, 2);
/* Fold &foo + CST into an invariant reference if possible. */
if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR
@@ -1039,6 +1033,17 @@ ccp_fold (gimple stmt)
gimple_expr_type (stmt), op0, op1);
}
+ case GIMPLE_TERNARY_RHS:
+ {
+ /* Handle binary operators that can appear in GIMPLE form. */
+ tree op0 = get_rhs_assign_op_for_ccp (stmt, 1);
+ tree op1 = get_rhs_assign_op_for_ccp (stmt, 2);
+ tree op2 = get_rhs_assign_op_for_ccp (stmt, 3);
+
+ return fold_ternary_loc (loc, subcode,
+ gimple_expr_type (stmt), op0, op1, op2);
+ }
+
default:
gcc_unreachable ();
}
===================================================================
@@ -51,6 +51,7 @@ enum expr_kind
EXPR_SINGLE,
EXPR_UNARY,
EXPR_BINARY,
+ EXPR_TERNARY,
EXPR_CALL
};
@@ -61,7 +62,8 @@ struct hashable_expr
union {
struct { tree rhs; } single;
struct { enum tree_code op; tree opnd; } unary;
- struct { enum tree_code op; tree opnd0; tree opnd1; } binary;
+ struct { enum tree_code op; tree opnd0, opnd1; } binary;
+ struct { enum tree_code op; tree opnd0, opnd1, opnd2; } ternary;
struct { tree fn; bool pure; size_t nargs; tree *args; } call;
} ops;
};
@@ -211,22 +213,30 @@ initialize_hash_element (gimple stmt, tr
switch (get_gimple_rhs_class (subcode))
{
case GIMPLE_SINGLE_RHS:
- expr->kind = EXPR_SINGLE;
- expr->ops.single.rhs = gimple_assign_rhs1 (stmt);
- break;
+ expr->kind = EXPR_SINGLE;
+ expr->ops.single.rhs = gimple_assign_rhs1 (stmt);
+ break;
case GIMPLE_UNARY_RHS:
- expr->kind = EXPR_UNARY;
+ expr->kind = EXPR_UNARY;
expr->type = TREE_TYPE (gimple_assign_lhs (stmt));
- expr->ops.unary.op = subcode;
- expr->ops.unary.opnd = gimple_assign_rhs1 (stmt);
- break;
+ expr->ops.unary.op = subcode;
+ expr->ops.unary.opnd = gimple_assign_rhs1 (stmt);
+ break;
case GIMPLE_BINARY_RHS:
- expr->kind = EXPR_BINARY;
+ expr->kind = EXPR_BINARY;
expr->type = TREE_TYPE (gimple_assign_lhs (stmt));
- expr->ops.binary.op = subcode;
- expr->ops.binary.opnd0 = gimple_assign_rhs1 (stmt);
- expr->ops.binary.opnd1 = gimple_assign_rhs2 (stmt);
- break;
+ expr->ops.binary.op = subcode;
+ expr->ops.binary.opnd0 = gimple_assign_rhs1 (stmt);
+ expr->ops.binary.opnd1 = gimple_assign_rhs2 (stmt);
+ break;
+ case GIMPLE_TERNARY_RHS:
+ expr->kind = EXPR_TERNARY;
+ expr->type = TREE_TYPE (gimple_assign_lhs (stmt));
+ expr->ops.ternary.op = subcode;
+ expr->ops.ternary.opnd0 = gimple_assign_rhs1 (stmt);
+ expr->ops.ternary.opnd1 = gimple_assign_rhs2 (stmt);
+ expr->ops.ternary.opnd2 = gimple_assign_rhs3 (stmt);
+ break;
default:
gcc_unreachable ();
}
@@ -371,23 +381,40 @@ hashable_expr_equal_p (const struct hash
expr1->ops.unary.opnd, 0);
case EXPR_BINARY:
- {
- if (expr0->ops.binary.op != expr1->ops.binary.op)
- return false;
+ if (expr0->ops.binary.op != expr1->ops.binary.op)
+ return false;
- if (operand_equal_p (expr0->ops.binary.opnd0,
- expr1->ops.binary.opnd0, 0)
- && operand_equal_p (expr0->ops.binary.opnd1,
- expr1->ops.binary.opnd1, 0))
- return true;
+ if (operand_equal_p (expr0->ops.binary.opnd0,
+ expr1->ops.binary.opnd0, 0)
+ && operand_equal_p (expr0->ops.binary.opnd1,
+ expr1->ops.binary.opnd1, 0))
+ return true;
- /* For commutative ops, allow the other order. */
- return (commutative_tree_code (expr0->ops.binary.op)
- && operand_equal_p (expr0->ops.binary.opnd0,
- expr1->ops.binary.opnd1, 0)
- && operand_equal_p (expr0->ops.binary.opnd1,
- expr1->ops.binary.opnd0, 0));
- }
+ /* For commutative ops, allow the other order. */
+ return (commutative_tree_code (expr0->ops.binary.op)
+ && operand_equal_p (expr0->ops.binary.opnd0,
+ expr1->ops.binary.opnd1, 0)
+ && operand_equal_p (expr0->ops.binary.opnd1,
+ expr1->ops.binary.opnd0, 0));
+
+ case EXPR_TERNARY:
+ if (expr0->ops.ternary.op != expr1->ops.ternary.op
+ || !operand_equal_p (expr0->ops.ternary.opnd2,
+ expr1->ops.ternary.opnd2, 0))
+ return false;
+
+ if (operand_equal_p (expr0->ops.ternary.opnd0,
+ expr1->ops.ternary.opnd0, 0)
+ && operand_equal_p (expr0->ops.ternary.opnd1,
+ expr1->ops.ternary.opnd1, 0))
+ return true;
+
+ /* For commutative ops, allow the other order. */
+ return (commutative_ternary_tree_code (expr0->ops.ternary.op)
+ && operand_equal_p (expr0->ops.ternary.opnd0,
+ expr1->ops.ternary.opnd1, 0)
+ && operand_equal_p (expr0->ops.ternary.opnd1,
+ expr1->ops.ternary.opnd0, 0));
case EXPR_CALL:
{
@@ -450,8 +477,8 @@ iterative_hash_hashable_expr (const stru
case EXPR_BINARY:
val = iterative_hash_object (expr->ops.binary.op, val);
if (commutative_tree_code (expr->ops.binary.op))
- val = iterative_hash_exprs_commutative (expr->ops.binary.opnd0,
- expr->ops.binary.opnd1, val);
+ val = iterative_hash_exprs_commutative (expr->ops.binary.opnd0,
+ expr->ops.binary.opnd1, val);
else
{
val = iterative_hash_expr (expr->ops.binary.opnd0, val);
@@ -459,6 +486,19 @@ iterative_hash_hashable_expr (const stru
}
break;
+ case EXPR_TERNARY:
+ val = iterative_hash_object (expr->ops.ternary.op, val);
+ if (commutative_ternary_tree_code (expr->ops.ternary.op))
+ val = iterative_hash_exprs_commutative (expr->ops.ternary.opnd0,
+ expr->ops.ternary.opnd1, val);
+ else
+ {
+ val = iterative_hash_expr (expr->ops.ternary.opnd0, val);
+ val = iterative_hash_expr (expr->ops.ternary.opnd1, val);
+ }
+ val = iterative_hash_expr (expr->ops.ternary.opnd2, val);
+ break;
+
case EXPR_CALL:
{
size_t i;
@@ -511,6 +551,16 @@ print_expr_hash_elt (FILE * stream, cons
print_generic_expr (stream, element->expr.ops.binary.opnd1, 0);
break;
+ case EXPR_TERNARY:
+ fprintf (stream, " %s <", tree_code_name[element->expr.ops.ternary.op]);
+ print_generic_expr (stream, element->expr.ops.ternary.opnd0, 0);
+ fputs (", ", stream);
+ print_generic_expr (stream, element->expr.ops.ternary.opnd1, 0);
+ fputs (", ", stream);
+ print_generic_expr (stream, element->expr.ops.ternary.opnd2, 0);
+ fputs (">", stream);
+ break;
+
case EXPR_CALL:
{
size_t i;
===================================================================
@@ -986,6 +986,33 @@ fold_gimple_assign (gimple_stmt_iterator
}
break;
+ case GIMPLE_TERNARY_RHS:
+ result = fold_ternary_loc (loc, subcode,
+ TREE_TYPE (gimple_assign_lhs (stmt)),
+ gimple_assign_rhs1 (stmt),
+ gimple_assign_rhs2 (stmt),
+ gimple_assign_rhs3 (stmt));
+
+ if (result)
+ {
+ STRIP_USELESS_TYPE_CONVERSION (result);
+ if (valid_gimple_rhs_p (result))
+ return result;
+
+ /* Fold might have produced non-GIMPLE, so if we trust it blindly
+ we lose canonicalization opportunities. Do not go again
+ through fold here though, or the same non-GIMPLE will be
+ produced. */
+ if (commutative_ternary_tree_code (subcode)
+ && tree_swap_operands_p (gimple_assign_rhs1 (stmt),
+ gimple_assign_rhs2 (stmt), false))
+ return build3 (subcode, TREE_TYPE (gimple_assign_lhs (stmt)),
+ gimple_assign_rhs2 (stmt),
+ gimple_assign_rhs1 (stmt),
+ gimple_assign_rhs3 (stmt));
+ }
+ break;
+
case GIMPLE_INVALID_RHS:
gcc_unreachable ();
}
===================================================================
@@ -242,14 +242,14 @@ fold_assignment_stmt (gimple stmt)
return fold (rhs);
}
- break;
+
case GIMPLE_UNARY_RHS:
{
tree lhs = gimple_assign_lhs (stmt);
tree op0 = gimple_assign_rhs1 (stmt);
return fold_unary (subcode, TREE_TYPE (lhs), op0);
}
- break;
+
case GIMPLE_BINARY_RHS:
{
tree lhs = gimple_assign_lhs (stmt);
@@ -257,7 +257,16 @@ fold_assignment_stmt (gimple stmt)
tree op1 = gimple_assign_rhs2 (stmt);
return fold_binary (subcode, TREE_TYPE (lhs), op0, op1);
}
- break;
+
+ case GIMPLE_TERNARY_RHS:
+ {
+ tree lhs = gimple_assign_lhs (stmt);
+ tree op0 = gimple_assign_rhs1 (stmt);
+ tree op1 = gimple_assign_rhs2 (stmt);
+ tree op2 = gimple_assign_rhs3 (stmt);
+ return fold_ternary (subcode, TREE_TYPE (lhs), op0, op1, op2);
+ }
+
default:
gcc_unreachable ();
}