Patchwork [tree-optimization] : [1 of 3]: Boolify compares & more

login
register
mail settings
Submitter Kai Tietz
Date July 7, 2011, 4:06 p.m.
Message ID <CAEwic4bEgSHifzK=SKgDVnBaMPc1AeRmMNMoA6cU9ysO6TF_4A@mail.gmail.com>
Download mbox | patch
Permalink /patch/103686/
State New
Headers show

Comments

Kai Tietz - July 7, 2011, 4:06 p.m.
Hello,

This patch - first of series - adds to fold and some helper routines support
for one-bit precision bitwise folding and detection.
This patch is necessary for - next patch of series - boolification of
comparisons.

Bootstrapped and regression tested for all standard-languages (plus
Ada and Obj-C++) on host x86_64-pc-linux-gnu.

Ok for apply?

Regards,
Kai

ChangeLog

2011-07-07  Kai Tietz  <ktietz@redhat.com>

	* fold-const.c (fold_truth_not_expr): Handle
	one bit precision bitwise operations.
	(fold_range_test): Likewise.
	(fold_truthop): Likewise.
	(fold_binary_loc): Likewise.
	(fold_truth_andor): Function replaces truth_andor
	label.
	(fold_ternary_loc): Use truth_value_type_p instead
	of truth_value_p.
	* gimple.c (canonicalize_cond_expr_cond): Likewise.
	* gimplify.c (gimple_boolify): Likewise.
	* tree-ssa-structalias.c (find_func_aliases): Likewise.
	* tree-ssa-forwprop.c (truth_valued_ssa_name): Likewise.
	* tree.h (truth_value_type_p): New function.
	(truth_value_p): Implemented as macro via truth_value_type_p.
Richard Guenther - July 8, 2011, 9:09 a.m.
On Thu, Jul 7, 2011 at 6:06 PM, Kai Tietz <ktietz70@googlemail.com> wrote:
> Hello,
>
> This patch - first of series - adds to fold and some helper routines support
> for one-bit precision bitwise folding and detection.
> This patch is necessary for - next patch of series - boolification of
> comparisons.
>
> Bootstrapped and regression tested for all standard-languages (plus
> Ada and Obj-C++) on host x86_64-pc-linux-gnu.
>
> Ok for apply?

Factoring out fold_truth_andor to a function should be done separately.
A patch that does just that is pre-approved.

Otherwise the patch globs too many changes and lacks reasoning.
Why do we want to handle all this in fold when the boolification
happens only after gimplification?

Thanks,
Richard.

> Regards,
> Kai
>
> ChangeLog
>
> 2011-07-07  Kai Tietz  <ktietz@redhat.com>
>
>        * fold-const.c (fold_truth_not_expr): Handle
>        one bit precision bitwise operations.
>        (fold_range_test): Likewise.
>        (fold_truthop): Likewise.
>        (fold_binary_loc): Likewise.
>        (fold_truth_andor): Function replaces truth_andor
>        label.
>        (fold_ternary_loc): Use truth_value_type_p instead
>        of truth_value_p.
>        * gimple.c (canonicalize_cond_expr_cond): Likewise.
>        * gimplify.c (gimple_boolify): Likewise.
>        * tree-ssa-structalias.c (find_func_aliases): Likewise.
>        * tree-ssa-forwprop.c (truth_valued_ssa_name): Likewise.
>        * tree.h (truth_value_type_p): New function.
>        (truth_value_p): Implemented as macro via truth_value_type_p.
>
>
> Index: gcc-head/gcc/fold-const.c
> ===================================================================
> --- gcc-head.orig/gcc/fold-const.c
> +++ gcc-head/gcc/fold-const.c
> @@ -3074,20 +3074,35 @@ fold_truth_not_expr (location_t loc, tre
>     case INTEGER_CST:
>       return constant_boolean_node (integer_zerop (arg), type);
>
> +    case BIT_AND_EXPR:
> +      if (integer_onep (TREE_OPERAND (arg, 1)))
> +       return build2_loc (loc, EQ_EXPR, type, arg, build_int_cst (type, 0));
> +      if (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg, 0))) != 1)
> +        return NULL_TREE;
> +      /* fall through */
>     case TRUTH_AND_EXPR:
>       loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
>       loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
> -      return build2_loc (loc, TRUTH_OR_EXPR, type,
> +      return build2_loc (loc, (code == BIT_AND_EXPR ? BIT_IOR_EXPR
> +                                                   : TRUTH_OR_EXPR), type,
>                         invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
>                         invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
>
> +    case BIT_IOR_EXPR:
> +      if (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg, 0))) != 1)
> +        return NULL_TREE;
> +      /* fall through.  */
>     case TRUTH_OR_EXPR:
>       loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
>       loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
> -      return build2_loc (loc, TRUTH_AND_EXPR, type,
> +      return build2_loc (loc, (code == BIT_IOR_EXPR ? BIT_AND_EXPR
> +                                                   : TRUTH_AND_EXPR), type,
>                         invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
>                         invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
> -
> +    case BIT_XOR_EXPR:
> +      if (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg, 0))) != 1)
> +        return NULL_TREE;
> +      /* fall through.  */
>     case TRUTH_XOR_EXPR:
>       /* Here we can invert either operand.  We invert the first operand
>         unless the second operand is a TRUTH_NOT_EXPR in which case our
> @@ -3095,10 +3110,14 @@ fold_truth_not_expr (location_t loc, tre
>         negation of the second operand.  */
>
>       if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
> -       return build2_loc (loc, TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
> +       return build2_loc (loc, code, type, TREE_OPERAND (arg, 0),
> +                          TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
> +      else if (TREE_CODE (TREE_OPERAND (arg, 1)) == BIT_NOT_EXPR
> +              && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg, 1))) == 1)
> +       return build2_loc (loc, code, type, TREE_OPERAND (arg, 0),
>                           TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
>       else
> -       return build2_loc (loc, TRUTH_XOR_EXPR, type,
> +       return build2_loc (loc, code, type,
>                           invert_truthvalue_loc (loc, TREE_OPERAND (arg, 0)),
>                           TREE_OPERAND (arg, 1));
>
> @@ -3116,6 +3135,11 @@ fold_truth_not_expr (location_t loc, tre
>                         invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
>                         invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
>
> +
> +    case BIT_NOT_EXPR:
> +      if (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg, 0))) != 1)
> +        return NULL_TREE;
> +      /* fall through */
>     case TRUTH_NOT_EXPR:
>       return TREE_OPERAND (arg, 0);
>
> @@ -3158,11 +3182,6 @@ fold_truth_not_expr (location_t loc, tre
>       return build1_loc (loc, TREE_CODE (arg), type,
>                         invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
>
> -    case BIT_AND_EXPR:
> -      if (!integer_onep (TREE_OPERAND (arg, 1)))
> -       return NULL_TREE;
> -      return build2_loc (loc, EQ_EXPR, type, arg, build_int_cst (type, 0));
> -
>     case SAVE_EXPR:
>       return build1_loc (loc, TRUTH_NOT_EXPR, type, arg);
>
> @@ -4800,7 +4819,7 @@ fold_range_test (location_t loc, enum tr
>                 tree op0, tree op1)
>  {
>   int or_op = (code == TRUTH_ORIF_EXPR
> -              || code == TRUTH_OR_EXPR);
> +              || code == TRUTH_OR_EXPR || code == BIT_IOR_EXPR);
>   int in0_p, in1_p, in_p;
>   tree low0, low1, low, high0, high1, high;
>   bool strict_overflow_p = false;
> @@ -5099,8 +5118,9 @@ fold_truthop (location_t loc, enum tree_
>        }
>     }
>
> -  code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
> -         ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
> +  if (code != BIT_AND_EXPR && code != BIT_IOR_EXPR)
> +    code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
> +           ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
>
>   /* If the RHS can be evaluated unconditionally and its operands are
>      simple, it wins to evaluate the RHS unconditionally on machines
> @@ -5115,7 +5135,7 @@ fold_truthop (location_t loc, enum tree_
>       && simple_operand_p (rr_arg))
>     {
>       /* Convert (a != 0) || (b != 0) into (a | b) != 0.  */
> -      if (code == TRUTH_OR_EXPR
> +      if ((code == TRUTH_OR_EXPR || code == BIT_IOR_EXPR)
>          && lcode == NE_EXPR && integer_zerop (lr_arg)
>          && rcode == NE_EXPR && integer_zerop (rr_arg)
>          && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
> @@ -5126,7 +5146,7 @@ fold_truthop (location_t loc, enum tree_
>                           build_int_cst (TREE_TYPE (ll_arg), 0));
>
>       /* Convert (a == 0) && (b == 0) into (a | b) == 0.  */
> -      if (code == TRUTH_AND_EXPR
> +      if ((code == TRUTH_AND_EXPR || code == BIT_AND_EXPR)
>          && lcode == EQ_EXPR && integer_zerop (lr_arg)
>          && rcode == EQ_EXPR && integer_zerop (rr_arg)
>          && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
> @@ -5190,7 +5210,8 @@ fold_truthop (location_t loc, enum tree_
>      fail.  However, we can convert a one-bit comparison against zero into
>      the opposite comparison against that bit being set in the field.  */
>
> -  wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
> +  wanted_code = ((code == TRUTH_AND_EXPR || code == BIT_AND_EXPR)
> +                ? EQ_EXPR : NE_EXPR);
>   if (lcode != wanted_code)
>     {
>       if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
> @@ -9324,6 +9345,105 @@ get_pointer_modulus_and_residue (tree ex
>   return 1;
>  }
>
> +/* Fold a binary bitwise/truth expression of code CODE and type TYPE
> with operands
> +   OP0 and OP1.  LOC is the location of the resulting expression.
> +   ARG0 and ARG1 are the NOP_STRIPed results of OP0 and OP1.
> +   Return the folded expression if folding is successful.  Otherwise,
> +   return NULL_TREE.  */
> +
> +static tree
> +fold_truth_andor (location_t loc, enum tree_code code, tree type,
> +                 tree arg0, tree arg1, tree op0, tree op1)
> +{
> +  tree tem;
> +
> +  /* We only do these simplifications if we are optimizing.  */
> +  if (!optimize)
> +    return NULL_TREE;
> +  /* If code is BIT_AND_EXPR or BIT_IOR_EXPR, type precision has to be one.  */
> +  if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR)
> +      && (!INTEGRAL_TYPE_P (type) || TYPE_PRECISION (type) != 1))
> +    return NULL_TREE;
> +
> +  /* Check for things like (A || B) && (A || C).  We can convert this
> +     to A || (B && C).  Note that either operator can be any of the four
> +     truth and/or operations and the transformation will still be
> +     valid.   Also note that we only care about order for the
> +     ANDIF and ORIF operators.  If B contains side effects, this
> +     might change the truth-value of A.  */
> +  if (TREE_CODE (arg0) == TREE_CODE (arg1)
> +      && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
> +         || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
> +         || TREE_CODE (arg0) == BIT_AND_EXPR
> +         || TREE_CODE (arg0) == BIT_IOR_EXPR
> +         || TREE_CODE (arg0) == TRUTH_AND_EXPR
> +         || TREE_CODE (arg0) == TRUTH_OR_EXPR)
> +      && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
> +    {
> +      tree a00 = TREE_OPERAND (arg0, 0);
> +      tree a01 = TREE_OPERAND (arg0, 1);
> +      tree a10 = TREE_OPERAND (arg1, 0);
> +      tree a11 = TREE_OPERAND (arg1, 1);
> +      int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
> +                         || TREE_CODE (arg0) == TRUTH_AND_EXPR
> +                         || TREE_CODE (arg0) == BIT_AND_EXPR)
> +                        && (code == TRUTH_AND_EXPR
> +                            || code == TRUTH_OR_EXPR
> +                            || code == BIT_IOR_EXPR));
> +
> +      if (operand_equal_p (a00, a10, 0))
> +       return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
> +                           fold_build2_loc (loc, code, type, a01, a11));
> +      else if (commutative && operand_equal_p (a00, a11, 0))
> +       return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
> +                           fold_build2_loc (loc, code, type, a01, a10));
> +      else if (commutative && operand_equal_p (a01, a10, 0))
> +       return fold_build2_loc (loc, TREE_CODE (arg0), type, a01,
> +                           fold_build2_loc (loc, code, type, a00, a11));
> +
> +      /* This case if tricky because we must either have commutative
> +        operators or else A10 must not have side-effects.  */
> +
> +      else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
> +              && operand_equal_p (a01, a11, 0))
> +       return fold_build2_loc (loc, TREE_CODE (arg0), type,
> +                           fold_build2_loc (loc, code, type, a00, a10),
> +                           a01);
> +    }
> +
> +  /* See if we can build a range comparison. */
> +  if (0 != (tem = fold_range_test (loc, code, type, op0, op1)))
> +    return tem;
> +
> +  if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg0) == TRUTH_ORIF_EXPR)
> +      || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg0) == TRUTH_ANDIF_EXPR))
> +    {
> +      tem = merge_truthop_with_opposite_arm (loc, arg0, arg1, true);
> +      if (tem)
> +       return fold_build2_loc (loc, code, type, tem, arg1);
> +    }
> +
> +  if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg1) == TRUTH_ORIF_EXPR)
> +      || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg1) == TRUTH_ANDIF_EXPR))
> +    {
> +      tem = merge_truthop_with_opposite_arm (loc, arg1, arg0, false);
> +      if (tem)
> +       return fold_build2_loc (loc, code, type, arg0, tem);
> +    }
> +
> +  /* Check for the possibility of merging component references.  If our
> +     lhs is another similar operation, try to merge its rhs with our
> +     rhs.  Then try to merge our lhs and rhs.  */
> +  if (TREE_CODE (arg0) == code
> +      && 0 != (tem = fold_truthop (loc, code, type,
> +                                  TREE_OPERAND (arg0, 1), arg1)))
> +    return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
> +
> +  if ((tem = fold_truthop (loc, code, type, arg0, arg1)) != 0)
> +    return tem;
> +
> +  return NULL_TREE;
> +}
>
>  /* Fold a binary expression of code CODE and type TYPE with operands
>    OP0 and OP1.  LOC is the location of the resulting expression.
> @@ -9424,21 +9544,42 @@ fold_binary_loc (location_t loc,
>
>   if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
>        || code == EQ_EXPR || code == NE_EXPR)
> -      && ((truth_value_p (TREE_CODE (arg0))
> -          && (truth_value_p (TREE_CODE (arg1))
> +      && (!INTEGRAL_TYPE_P (type) || TYPE_PRECISION (type) != 1)
> +      && ((truth_value_type_p (TREE_CODE (arg0), TREE_TYPE (arg0))
> +          && (truth_value_type_p (TREE_CODE (arg1), TREE_TYPE (arg1))
>               || (TREE_CODE (arg1) == BIT_AND_EXPR
>                   && integer_onep (TREE_OPERAND (arg1, 1)))))
> -         || (truth_value_p (TREE_CODE (arg1))
> -             && (truth_value_p (TREE_CODE (arg0))
> +         || (truth_value_type_p (TREE_CODE (arg1), TREE_TYPE (arg1))
> +             && (truth_value_type_p (TREE_CODE (arg0), TREE_TYPE (arg0))
>                  || (TREE_CODE (arg0) == BIT_AND_EXPR
>                      && integer_onep (TREE_OPERAND (arg0, 1)))))))
>     {
>       tem = fold_build2_loc (loc, code == BIT_AND_EXPR ? TRUTH_AND_EXPR
> -                        : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
> -                        : TRUTH_XOR_EXPR,
> -                        boolean_type_node,
> -                        fold_convert_loc (loc, boolean_type_node, arg0),
> -                        fold_convert_loc (loc, boolean_type_node, arg1));
> +                          : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
> +                          : TRUTH_XOR_EXPR,
> +                          boolean_type_node,
> +                          fold_convert_loc (loc, boolean_type_node, arg0),
> +                          fold_convert_loc (loc, boolean_type_node, arg1));
> +
> +      if (code == EQ_EXPR)
> +       tem = invert_truthvalue_loc (loc, tem);
> +
> +      return fold_convert_loc (loc, type, tem);
> +    }
> +  if ((code == EQ_EXPR || code == NE_EXPR)
> +      && ((truth_value_type_p (TREE_CODE (arg0), TREE_TYPE (arg0))
> +          && (truth_value_type_p (TREE_CODE (arg1), TREE_TYPE (arg1))
> +              || (TREE_CODE (arg1) == BIT_AND_EXPR
> +                  && integer_onep (TREE_OPERAND (arg1, 1)))))
> +         || (truth_value_type_p (TREE_CODE (arg1), TREE_TYPE (arg1))
> +             && (truth_value_type_p (TREE_CODE (arg0), TREE_TYPE (arg0))
> +                 || (TREE_CODE (arg0) == BIT_AND_EXPR
> +                     && integer_onep (TREE_OPERAND (arg0, 1)))))))
> +    {
> +      tem = fold_build2_loc (loc, BIT_XOR_EXPR,
> +                          boolean_type_node,
> +                          fold_convert_loc (loc, boolean_type_node, arg0),
> +                          fold_convert_loc (loc, boolean_type_node, arg1));
>
>       if (code == EQ_EXPR)
>        tem = invert_truthvalue_loc (loc, tem);
> @@ -10597,6 +10738,57 @@ fold_binary_loc (location_t loc,
>       if (operand_equal_p (arg0, arg1, 0))
>        return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
>
> +      if (TYPE_PRECISION (type) == 1 && INTEGRAL_TYPE_P (type))
> +        {
> +         /* If either arg is constant zero, drop it.  */
> +         if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
> +           return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
> +         if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1))
> +           return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
> +         /* If second arg is constant true, result is true, but we must
> +            evaluate first arg.  */
> +         if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
> +           return omit_one_operand_loc (loc, type, arg1, arg0);
> +         if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
> +           return omit_one_operand_loc (loc, type, arg0, arg1);
> +
> +         /* !X | X is always true.  */
> +         if ((TREE_CODE (arg0) == TRUTH_NOT_EXPR
> +              || TREE_CODE (arg0) == BIT_NOT_EXPR)
> +             && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
> +           return omit_one_operand_loc (loc, type, integer_one_node, arg1);
> +         /* X | !X is always true.  */
> +         if ((TREE_CODE (arg1) == TRUTH_NOT_EXPR
> +             || TREE_CODE (arg1) == BIT_NOT_EXPR)
> +             && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
> +           return omit_one_operand_loc (loc, type, integer_one_node, arg0);
> +
> +         /* (X & !Y) | (!X & Y) is X ^ Y */
> +         if (TREE_CODE (arg0) == BIT_AND_EXPR
> +             && TREE_CODE (arg1) == BIT_AND_EXPR)
> +           {
> +             tree a0, a1, l0, l1, n0, n1;
> +
> +             a0 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
> +             a1 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
> +
> +             l0 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
> +             l1 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
> +
> +             n0 = fold_build1_loc (loc, TRUTH_NOT_EXPR, type, l0);
> +             n1 = fold_build1_loc (loc, TRUTH_NOT_EXPR, type, l1);
> +
> +             if ((operand_equal_p (n0, a0, 0)
> +                  && operand_equal_p (n1, a1, 0))
> +                 || (operand_equal_p (n0, a1, 0)
> +                     && operand_equal_p (n1, a0, 0)))
> +               return fold_build2_loc (loc, BIT_XOR_EXPR, type, l0, n1);
> +           }
> +         tem = fold_truth_andor (loc, code, type, arg0, arg1, op0, op1);
> +         if (tem)
> +           return tem;
> +        }
> +
>       /* ~X | X is -1.  */
>       if (TREE_CODE (arg0) == BIT_NOT_EXPR
>          && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
> @@ -10758,6 +10950,24 @@ fold_binary_loc (location_t loc,
>       if (operand_equal_p (arg0, arg1, 0))
>        return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
>
> +      if (TYPE_PRECISION (type) == 1 && INTEGRAL_TYPE_P (type))
> +        {
> +         /* If the second arg is constant true, this is a logical inversion.  */
> +         if (integer_onep (arg1))
> +           {
> +             tem = invert_truthvalue_loc (loc, arg0);
> +             return non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
> +           }
> +         /* !X ^ X is always true.  */
> +         if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
> +             && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
> +           return omit_one_operand_loc (loc, type, integer_one_node, arg1);
> +         /* X ^ !X is always true.  */
> +         if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
> +             && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
> +           return omit_one_operand_loc (loc, type, integer_one_node, arg0);
> +       }
> +
>       /* ~X ^ X is -1.  */
>       if (TREE_CODE (arg0) == BIT_NOT_EXPR
>          && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
> @@ -10918,6 +11128,61 @@ fold_binary_loc (location_t loc,
>        return omit_one_operand_loc (loc, type, arg1, arg0);
>       if (operand_equal_p (arg0, arg1, 0))
>        return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
> +      /* Note that the operands of this must be ints
> +        and their values must be 0 or 1.
> +        ("true" is a fixed value perhaps depending on the language.)  */
> +      /* If first arg is constant zero, return it.  */
> +      if (integer_zerop (arg0))
> +       return fold_convert_loc (loc, type, arg0);
> +
> +      if (TYPE_PRECISION (type) == 1 && INTEGRAL_TYPE_P (type))
> +        {
> +         /* If either arg is constant true, drop it.  */
> +         if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
> +           return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
> +         if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1)
> +             /* Preserve sequence points.  */
> +             && (code != TRUTH_ANDIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
> +           return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
> +         /* If second arg is constant zero, result is zero, but first arg
> +            must be evaluated.  */
> +         if (integer_zerop (arg1))
> +           return omit_one_operand_loc (loc, type, arg1, arg0);
> +         /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
> +            case will be handled here.  */
> +         if (integer_zerop (arg0))
> +           return omit_one_operand_loc (loc, type, arg0, arg1);
> +
> +         /* !X && X is always false.  */
> +         if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
> +             && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
> +           return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
> +         /* X & !X is always false.  */
> +         if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
> +             && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
> +           return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
> +
> +         /* A < X & A + 1 > Y ==> A < X & A >= Y.  Normally A + 1 > Y
> +            means A >= Y & A != MAX, but in this case we know that
> +            A < X <= MAX.  */
> +
> +         if (!TREE_SIDE_EFFECTS (arg0)
> +             && !TREE_SIDE_EFFECTS (arg1))
> +           {
> +             tem = fold_to_nonsharp_ineq_using_bound (loc, arg0, arg1);
> +             if (tem && !operand_equal_p (tem, arg0, 0))
> +               return fold_build2_loc (loc, code, type, tem, arg1);
> +
> +             tem = fold_to_nonsharp_ineq_using_bound (loc, arg1, arg0);
> +             if (tem && !operand_equal_p (tem, arg1, 0))
> +               return fold_build2_loc (loc, code, type, arg0, tem);
> +           }
> +
> +         tem = fold_truth_andor (loc, code, type, arg0, arg1, op0, op1);
> +         if (tem)
> +           return tem;
> +
> +       }
>
>       /* ~X & X, (X == 0) & X, and !X & X are always zero.  */
>       if ((TREE_CODE (arg0) == BIT_NOT_EXPR
> @@ -12006,86 +12271,11 @@ fold_binary_loc (location_t loc,
>            return fold_build2_loc (loc, code, type, arg0, tem);
>        }
>
> -    truth_andor:
> -      /* We only do these simplifications if we are optimizing.  */
> -      if (!optimize)
> -       return NULL_TREE;
> -
> -      /* Check for things like (A || B) && (A || C).  We can convert this
> -        to A || (B && C).  Note that either operator can be any of the four
> -        truth and/or operations and the transformation will still be
> -        valid.   Also note that we only care about order for the
> -        ANDIF and ORIF operators.  If B contains side effects, this
> -        might change the truth-value of A.  */
> -      if (TREE_CODE (arg0) == TREE_CODE (arg1)
> -         && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
> -             || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
> -             || TREE_CODE (arg0) == TRUTH_AND_EXPR
> -             || TREE_CODE (arg0) == TRUTH_OR_EXPR)
> -         && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
> -       {
> -         tree a00 = TREE_OPERAND (arg0, 0);
> -         tree a01 = TREE_OPERAND (arg0, 1);
> -         tree a10 = TREE_OPERAND (arg1, 0);
> -         tree a11 = TREE_OPERAND (arg1, 1);
> -         int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
> -                             || TREE_CODE (arg0) == TRUTH_AND_EXPR)
> -                            && (code == TRUTH_AND_EXPR
> -                                || code == TRUTH_OR_EXPR));
> -
> -         if (operand_equal_p (a00, a10, 0))
> -           return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
> -                               fold_build2_loc (loc, code, type, a01, a11));
> -         else if (commutative && operand_equal_p (a00, a11, 0))
> -           return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
> -                               fold_build2_loc (loc, code, type, a01, a10));
> -         else if (commutative && operand_equal_p (a01, a10, 0))
> -           return fold_build2_loc (loc, TREE_CODE (arg0), type, a01,
> -                               fold_build2_loc (loc, code, type, a00, a11));
> -
> -         /* This case if tricky because we must either have commutative
> -            operators or else A10 must not have side-effects.  */
> -
> -         else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
> -                  && operand_equal_p (a01, a11, 0))
> -           return fold_build2_loc (loc, TREE_CODE (arg0), type,
> -                               fold_build2_loc (loc, code, type, a00, a10),
> -                               a01);
> -       }
> -
> -      /* See if we can build a range comparison.  */
> -      if (0 != (tem = fold_range_test (loc, code, type, op0, op1)))
> -       return tem;
> -
> -      if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg0) == TRUTH_ORIF_EXPR)
> -         || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg0) == TRUTH_ANDIF_EXPR))
> -       {
> -         tem = merge_truthop_with_opposite_arm (loc, arg0, arg1, true);
> -         if (tem)
> -           return fold_build2_loc (loc, code, type, tem, arg1);
> -       }
> -
> -      if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg1) == TRUTH_ORIF_EXPR)
> -         || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg1) == TRUTH_ANDIF_EXPR))
> -       {
> -         tem = merge_truthop_with_opposite_arm (loc, arg1, arg0, false);
> -         if (tem)
> -           return fold_build2_loc (loc, code, type, arg0, tem);
> -       }
> -
> -      /* Check for the possibility of merging component references.  If our
> -        lhs is another similar operation, try to merge its rhs with our
> -        rhs.  Then try to merge our lhs and rhs.  */
> -      if (TREE_CODE (arg0) == code
> -         && 0 != (tem = fold_truthop (loc, code, type,
> -                                      TREE_OPERAND (arg0, 1), arg1)))
> -       return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
> -
> -      if ((tem = fold_truthop (loc, code, type, arg0, arg1)) != 0)
> -       return tem;
> +      tem = fold_truth_andor (loc, code, type, arg0, arg1, op0, op1);
> +      if (tem)
> +        return tem;
>
>       return NULL_TREE;
> -
>     case TRUTH_ORIF_EXPR:
>       /* Note that the operands of this must be ints
>         and their values must be 0 or true.
> @@ -12140,7 +12330,11 @@ fold_binary_loc (location_t loc,
>                  && operand_equal_p (n1, a0, 0)))
>            return fold_build2_loc (loc, TRUTH_XOR_EXPR, type, l0, n1);
>        }
> -      goto truth_andor;
> +      tem = fold_truth_andor (loc, code, type, arg0, arg1, op0, op1);
> +      if (tem)
> +        return tem;
> +
> +      return NULL_TREE;
>
>     case TRUTH_XOR_EXPR:
>       /* If the second arg is constant zero, drop it.  */
> @@ -13401,7 +13595,7 @@ fold_ternary_loc (location_t loc, enum t
>
>       /* If the second operand is simpler than the third, swap them
>         since that produces better jump optimization results.  */
> -      if (truth_value_p (TREE_CODE (arg0))
> +      if (truth_value_type_p (TREE_CODE (arg0), TREE_TYPE (arg0))
>          && tree_swap_operands_p (op1, op2, false))
>        {
>          location_t loc0 = expr_location_or (arg0, loc);
> @@ -13427,7 +13621,7 @@ fold_ternary_loc (location_t loc, enum t
>         over COND_EXPR in cases such as floating point comparisons.  */
>       if (integer_zerop (op1)
>          && integer_onep (op2)
> -         && truth_value_p (TREE_CODE (arg0)))
> +         && truth_value_type_p (TREE_CODE (arg0), TREE_TYPE (arg0)))
>        return pedantic_non_lvalue_loc (loc,
>                                    fold_convert_loc (loc, type,
>                                              invert_truthvalue_loc (loc,
> Index: gcc-head/gcc/gimple.c
> ===================================================================
> --- gcc-head.orig/gcc/gimple.c
> +++ gcc-head/gcc/gimple.c
> @@ -3160,7 +3160,8 @@ canonicalize_cond_expr_cond (tree t)
>  {
>   /* Strip conversions around boolean operations.  */
>   if (CONVERT_EXPR_P (t)
> -      && truth_value_p (TREE_CODE (TREE_OPERAND (t, 0))))
> +      && truth_value_type_p (TREE_CODE (TREE_OPERAND (t, 0)),
> +                            TREE_TYPE (TREE_OPERAND (t, 0))))
>     t = TREE_OPERAND (t, 0);
>
>   /* For !x use x == 0.  */
> Index: gcc-head/gcc/gimplify.c
> ===================================================================
> --- gcc-head.orig/gcc/gimplify.c
> +++ gcc-head/gcc/gimplify.c
> @@ -2837,7 +2837,7 @@ gimple_boolify (tree expr)
>              if (TREE_CODE (arg) == NOP_EXPR
>                  && TREE_TYPE (arg) == TREE_TYPE (call))
>                arg = TREE_OPERAND (arg, 0);
> -             if (truth_value_p (TREE_CODE (arg)))
> +             if (truth_value_type_p (TREE_CODE (arg), TREE_TYPE (arg)))
>                {
>                  arg = gimple_boolify (arg);
>                  CALL_EXPR_ARG (call, 0)
> Index: gcc-head/gcc/tree-ssa-structalias.c
> ===================================================================
> --- gcc-head.orig/gcc/tree-ssa-structalias.c
> +++ gcc-head/gcc/tree-ssa-structalias.c
> @@ -4416,7 +4416,8 @@ find_func_aliases (gimple origt)
>                         && !POINTER_TYPE_P (TREE_TYPE (rhsop))))
>                   || gimple_assign_single_p (t))
>            get_constraint_for_rhs (rhsop, &rhsc);
> -         else if (truth_value_p (code))
> +         else if (truth_value_type_p (code,
> +                                      TREE_TYPE (lhsop)))
>            /* Truth value results are not pointer (parts).  Or at least
>               very very unreasonable obfuscation of a part.  */
>            ;
> Index: gcc-head/gcc/tree.h
> ===================================================================
> --- gcc-head.orig/gcc/tree.h
> +++ gcc-head/gcc/tree.h
> @@ -5309,13 +5309,22 @@ extern tree combine_comparisons (locatio
>  extern void debug_fold_checksum (const_tree);
>
>  /* Return nonzero if CODE is a tree code that represents a truth value.  */
> +#define truth_value_p(CODE)  truth_value_type_p ((CODE), NULL_TREE)
> +
> +/* Return nonzero if CODE is a tree code that represents a truth value.
> +   If TYPE is an integral type, unsigned, and has precision of one, then
> +   additionally return for bitwise-binary and bitwise-invert nonzero.  */
>  static inline bool
> -truth_value_p (enum tree_code code)
> +truth_value_type_p (enum tree_code code, tree type)
>  {
>   return (TREE_CODE_CLASS (code) == tcc_comparison
>          || code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR
>          || code == TRUTH_OR_EXPR || code == TRUTH_ORIF_EXPR
> -         || code == TRUTH_XOR_EXPR || code == TRUTH_NOT_EXPR);
> +         || code == TRUTH_XOR_EXPR || code == TRUTH_NOT_EXPR
> +         || ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
> +              || code == BIT_XOR_EXPR || code == BIT_NOT_EXPR)
> +             && type && INTEGRAL_TYPE_P (type)
> +             && TYPE_PRECISION (type) == 1 && TYPE_UNSIGNED (type)));
>  }
>
>
> Index: gcc-head/gcc/tree-ssa-forwprop.c
> ===================================================================
> --- gcc-head.orig/gcc/tree-ssa-forwprop.c
> +++ gcc-head/gcc/tree-ssa-forwprop.c
> @@ -1668,7 +1668,7 @@ truth_valued_ssa_name (tree name)
>     return true;
>   def = SSA_NAME_DEF_STMT (name);
>   if (is_gimple_assign (def))
> -    return truth_value_p (gimple_assign_rhs_code (def));
> +    return truth_value_type_p (gimple_assign_rhs_code (def), type);
>   return false;
>  }
>
Kai Tietz - July 8, 2011, 9:28 a.m.
2011/7/8 Richard Guenther <richard.guenther@gmail.com>:
> On Thu, Jul 7, 2011 at 6:06 PM, Kai Tietz <ktietz70@googlemail.com> wrote:
>> Hello,
>>
>> This patch - first of series - adds to fold and some helper routines support
>> for one-bit precision bitwise folding and detection.
>> This patch is necessary for - next patch of series - boolification of
>> comparisons.
>>
>> Bootstrapped and regression tested for all standard-languages (plus
>> Ada and Obj-C++) on host x86_64-pc-linux-gnu.
>>
>> Ok for apply?
>
> Factoring out fold_truth_andor to a function should be done separately.
> A patch that does just that is pre-approved.

Ok I will sent for this a separate patch. But in fact it makes just
sense together with the 1-bit precision bitwise support, too.

> Otherwise the patch globs too many changes and lacks reasoning.
> Why do we want to handle all this in fold when the boolification
> happens only after gimplification?

We still rely on truth/bitwise folding on fold-const.  Also we need to
handle this for passes, which are using fold_binary to optimize  and
handle boolified operations - like tree-ssa-reassoc, of tree-vect*.
This support in fold-const is necessary when we are preserving casts
from/to boolean, as otherwise we don't fold bitwise-binary with
compares proper anymore.  Additionally we have to take care that we
don't enter TRUTH_(AND|OR|XOR) expressions on boolified trees, as
otherwise tree-cfg will barf. Also we need to take care that types of
comparisons and TRUTH_NOT expressions are boolean one, as otherwise
again tree-cfg will detect incompatible types for those expressions.

> Thanks,
> Richard.
>
>> Regards,
>> Kai
>>
>> ChangeLog
>>
>> 2011-07-07  Kai Tietz  <ktietz@redhat.com>
>>
>>        * fold-const.c (fold_truth_not_expr): Handle
>>        one bit precision bitwise operations.
>>        (fold_range_test): Likewise.
>>        (fold_truthop): Likewise.
>>        (fold_binary_loc): Likewise.
>>        (fold_truth_andor): Function replaces truth_andor
>>        label.
>>        (fold_ternary_loc): Use truth_value_type_p instead
>>        of truth_value_p.
>>        * gimple.c (canonicalize_cond_expr_cond): Likewise.
>>        * gimplify.c (gimple_boolify): Likewise.
>>        * tree-ssa-structalias.c (find_func_aliases): Likewise.
>>        * tree-ssa-forwprop.c (truth_valued_ssa_name): Likewise.
>>        * tree.h (truth_value_type_p): New function.
>>        (truth_value_p): Implemented as macro via truth_value_type_p.
>>
>>
>> Index: gcc-head/gcc/fold-const.c
>> ===================================================================
>> --- gcc-head.orig/gcc/fold-const.c
>> +++ gcc-head/gcc/fold-const.c
>> @@ -3074,20 +3074,35 @@ fold_truth_not_expr (location_t loc, tre
>>     case INTEGER_CST:
>>       return constant_boolean_node (integer_zerop (arg), type);
>>
>> +    case BIT_AND_EXPR:
>> +      if (integer_onep (TREE_OPERAND (arg, 1)))
>> +       return build2_loc (loc, EQ_EXPR, type, arg, build_int_cst (type, 0));
>> +      if (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg, 0))) != 1)
>> +        return NULL_TREE;
>> +      /* fall through */
>>     case TRUTH_AND_EXPR:
>>       loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
>>       loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
>> -      return build2_loc (loc, TRUTH_OR_EXPR, type,
>> +      return build2_loc (loc, (code == BIT_AND_EXPR ? BIT_IOR_EXPR
>> +                                                   : TRUTH_OR_EXPR), type,
>>                         invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
>>                         invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
>>
>> +    case BIT_IOR_EXPR:
>> +      if (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg, 0))) != 1)
>> +        return NULL_TREE;
>> +      /* fall through.  */
>>     case TRUTH_OR_EXPR:
>>       loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
>>       loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
>> -      return build2_loc (loc, TRUTH_AND_EXPR, type,
>> +      return build2_loc (loc, (code == BIT_IOR_EXPR ? BIT_AND_EXPR
>> +                                                   : TRUTH_AND_EXPR), type,
>>                         invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
>>                         invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
>> -
>> +    case BIT_XOR_EXPR:
>> +      if (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg, 0))) != 1)
>> +        return NULL_TREE;
>> +      /* fall through.  */
>>     case TRUTH_XOR_EXPR:
>>       /* Here we can invert either operand.  We invert the first operand
>>         unless the second operand is a TRUTH_NOT_EXPR in which case our
>> @@ -3095,10 +3110,14 @@ fold_truth_not_expr (location_t loc, tre
>>         negation of the second operand.  */
>>
>>       if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
>> -       return build2_loc (loc, TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
>> +       return build2_loc (loc, code, type, TREE_OPERAND (arg, 0),
>> +                          TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
>> +      else if (TREE_CODE (TREE_OPERAND (arg, 1)) == BIT_NOT_EXPR
>> +              && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg, 1))) == 1)
>> +       return build2_loc (loc, code, type, TREE_OPERAND (arg, 0),
>>                           TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
>>       else
>> -       return build2_loc (loc, TRUTH_XOR_EXPR, type,
>> +       return build2_loc (loc, code, type,
>>                           invert_truthvalue_loc (loc, TREE_OPERAND (arg, 0)),
>>                           TREE_OPERAND (arg, 1));
>>
>> @@ -3116,6 +3135,11 @@ fold_truth_not_expr (location_t loc, tre
>>                         invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
>>                         invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
>>
>> +
>> +    case BIT_NOT_EXPR:
>> +      if (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg, 0))) != 1)
>> +        return NULL_TREE;
>> +      /* fall through */
>>     case TRUTH_NOT_EXPR:
>>       return TREE_OPERAND (arg, 0);
>>
>> @@ -3158,11 +3182,6 @@ fold_truth_not_expr (location_t loc, tre
>>       return build1_loc (loc, TREE_CODE (arg), type,
>>                         invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
>>
>> -    case BIT_AND_EXPR:
>> -      if (!integer_onep (TREE_OPERAND (arg, 1)))
>> -       return NULL_TREE;
>> -      return build2_loc (loc, EQ_EXPR, type, arg, build_int_cst (type, 0));
>> -
>>     case SAVE_EXPR:
>>       return build1_loc (loc, TRUTH_NOT_EXPR, type, arg);
>>
>> @@ -4800,7 +4819,7 @@ fold_range_test (location_t loc, enum tr
>>                 tree op0, tree op1)
>>  {
>>   int or_op = (code == TRUTH_ORIF_EXPR
>> -              || code == TRUTH_OR_EXPR);
>> +              || code == TRUTH_OR_EXPR || code == BIT_IOR_EXPR);
>>   int in0_p, in1_p, in_p;
>>   tree low0, low1, low, high0, high1, high;
>>   bool strict_overflow_p = false;
>> @@ -5099,8 +5118,9 @@ fold_truthop (location_t loc, enum tree_
>>        }
>>     }
>>
>> -  code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
>> -         ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
>> +  if (code != BIT_AND_EXPR && code != BIT_IOR_EXPR)
>> +    code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
>> +           ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
>>
>>   /* If the RHS can be evaluated unconditionally and its operands are
>>      simple, it wins to evaluate the RHS unconditionally on machines
>> @@ -5115,7 +5135,7 @@ fold_truthop (location_t loc, enum tree_
>>       && simple_operand_p (rr_arg))
>>     {
>>       /* Convert (a != 0) || (b != 0) into (a | b) != 0.  */
>> -      if (code == TRUTH_OR_EXPR
>> +      if ((code == TRUTH_OR_EXPR || code == BIT_IOR_EXPR)
>>          && lcode == NE_EXPR && integer_zerop (lr_arg)
>>          && rcode == NE_EXPR && integer_zerop (rr_arg)
>>          && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
>> @@ -5126,7 +5146,7 @@ fold_truthop (location_t loc, enum tree_
>>                           build_int_cst (TREE_TYPE (ll_arg), 0));
>>
>>       /* Convert (a == 0) && (b == 0) into (a | b) == 0.  */
>> -      if (code == TRUTH_AND_EXPR
>> +      if ((code == TRUTH_AND_EXPR || code == BIT_AND_EXPR)
>>          && lcode == EQ_EXPR && integer_zerop (lr_arg)
>>          && rcode == EQ_EXPR && integer_zerop (rr_arg)
>>          && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
>> @@ -5190,7 +5210,8 @@ fold_truthop (location_t loc, enum tree_
>>      fail.  However, we can convert a one-bit comparison against zero into
>>      the opposite comparison against that bit being set in the field.  */
>>
>> -  wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
>> +  wanted_code = ((code == TRUTH_AND_EXPR || code == BIT_AND_EXPR)
>> +                ? EQ_EXPR : NE_EXPR);
>>   if (lcode != wanted_code)
>>     {
>>       if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
>> @@ -9324,6 +9345,105 @@ get_pointer_modulus_and_residue (tree ex
>>   return 1;
>>  }
>>
>> +/* Fold a binary bitwise/truth expression of code CODE and type TYPE
>> with operands
>> +   OP0 and OP1.  LOC is the location of the resulting expression.
>> +   ARG0 and ARG1 are the NOP_STRIPed results of OP0 and OP1.
>> +   Return the folded expression if folding is successful.  Otherwise,
>> +   return NULL_TREE.  */
>> +
>> +static tree
>> +fold_truth_andor (location_t loc, enum tree_code code, tree type,
>> +                 tree arg0, tree arg1, tree op0, tree op1)
>> +{
>> +  tree tem;
>> +
>> +  /* We only do these simplifications if we are optimizing.  */
>> +  if (!optimize)
>> +    return NULL_TREE;
>> +  /* If code is BIT_AND_EXPR or BIT_IOR_EXPR, type precision has to be one.  */
>> +  if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR)
>> +      && (!INTEGRAL_TYPE_P (type) || TYPE_PRECISION (type) != 1))
>> +    return NULL_TREE;
>> +
>> +  /* Check for things like (A || B) && (A || C).  We can convert this
>> +     to A || (B && C).  Note that either operator can be any of the four
>> +     truth and/or operations and the transformation will still be
>> +     valid.   Also note that we only care about order for the
>> +     ANDIF and ORIF operators.  If B contains side effects, this
>> +     might change the truth-value of A.  */
>> +  if (TREE_CODE (arg0) == TREE_CODE (arg1)
>> +      && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
>> +         || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
>> +         || TREE_CODE (arg0) == BIT_AND_EXPR
>> +         || TREE_CODE (arg0) == BIT_IOR_EXPR
>> +         || TREE_CODE (arg0) == TRUTH_AND_EXPR
>> +         || TREE_CODE (arg0) == TRUTH_OR_EXPR)
>> +      && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
>> +    {
>> +      tree a00 = TREE_OPERAND (arg0, 0);
>> +      tree a01 = TREE_OPERAND (arg0, 1);
>> +      tree a10 = TREE_OPERAND (arg1, 0);
>> +      tree a11 = TREE_OPERAND (arg1, 1);
>> +      int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
>> +                         || TREE_CODE (arg0) == TRUTH_AND_EXPR
>> +                         || TREE_CODE (arg0) == BIT_AND_EXPR)
>> +                        && (code == TRUTH_AND_EXPR
>> +                            || code == TRUTH_OR_EXPR
>> +                            || code == BIT_IOR_EXPR));
>> +
>> +      if (operand_equal_p (a00, a10, 0))
>> +       return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
>> +                           fold_build2_loc (loc, code, type, a01, a11));
>> +      else if (commutative && operand_equal_p (a00, a11, 0))
>> +       return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
>> +                           fold_build2_loc (loc, code, type, a01, a10));
>> +      else if (commutative && operand_equal_p (a01, a10, 0))
>> +       return fold_build2_loc (loc, TREE_CODE (arg0), type, a01,
>> +                           fold_build2_loc (loc, code, type, a00, a11));
>> +
>> +      /* This case if tricky because we must either have commutative
>> +        operators or else A10 must not have side-effects.  */
>> +
>> +      else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
>> +              && operand_equal_p (a01, a11, 0))
>> +       return fold_build2_loc (loc, TREE_CODE (arg0), type,
>> +                           fold_build2_loc (loc, code, type, a00, a10),
>> +                           a01);
>> +    }
>> +
>> +  /* See if we can build a range comparison. */
>> +  if (0 != (tem = fold_range_test (loc, code, type, op0, op1)))
>> +    return tem;
>> +
>> +  if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg0) == TRUTH_ORIF_EXPR)
>> +      || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg0) == TRUTH_ANDIF_EXPR))
>> +    {
>> +      tem = merge_truthop_with_opposite_arm (loc, arg0, arg1, true);
>> +      if (tem)
>> +       return fold_build2_loc (loc, code, type, tem, arg1);
>> +    }
>> +
>> +  if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg1) == TRUTH_ORIF_EXPR)
>> +      || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg1) == TRUTH_ANDIF_EXPR))
>> +    {
>> +      tem = merge_truthop_with_opposite_arm (loc, arg1, arg0, false);
>> +      if (tem)
>> +       return fold_build2_loc (loc, code, type, arg0, tem);
>> +    }
>> +
>> +  /* Check for the possibility of merging component references.  If our
>> +     lhs is another similar operation, try to merge its rhs with our
>> +     rhs.  Then try to merge our lhs and rhs.  */
>> +  if (TREE_CODE (arg0) == code
>> +      && 0 != (tem = fold_truthop (loc, code, type,
>> +                                  TREE_OPERAND (arg0, 1), arg1)))
>> +    return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
>> +
>> +  if ((tem = fold_truthop (loc, code, type, arg0, arg1)) != 0)
>> +    return tem;
>> +
>> +  return NULL_TREE;
>> +}
>>
>>  /* Fold a binary expression of code CODE and type TYPE with operands
>>    OP0 and OP1.  LOC is the location of the resulting expression.
>> @@ -9424,21 +9544,42 @@ fold_binary_loc (location_t loc,
>>
>>   if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
>>        || code == EQ_EXPR || code == NE_EXPR)
>> -      && ((truth_value_p (TREE_CODE (arg0))
>> -          && (truth_value_p (TREE_CODE (arg1))
>> +      && (!INTEGRAL_TYPE_P (type) || TYPE_PRECISION (type) != 1)
>> +      && ((truth_value_type_p (TREE_CODE (arg0), TREE_TYPE (arg0))
>> +          && (truth_value_type_p (TREE_CODE (arg1), TREE_TYPE (arg1))
>>               || (TREE_CODE (arg1) == BIT_AND_EXPR
>>                   && integer_onep (TREE_OPERAND (arg1, 1)))))
>> -         || (truth_value_p (TREE_CODE (arg1))
>> -             && (truth_value_p (TREE_CODE (arg0))
>> +         || (truth_value_type_p (TREE_CODE (arg1), TREE_TYPE (arg1))
>> +             && (truth_value_type_p (TREE_CODE (arg0), TREE_TYPE (arg0))
>>                  || (TREE_CODE (arg0) == BIT_AND_EXPR
>>                      && integer_onep (TREE_OPERAND (arg0, 1)))))))
>>     {
>>       tem = fold_build2_loc (loc, code == BIT_AND_EXPR ? TRUTH_AND_EXPR
>> -                        : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
>> -                        : TRUTH_XOR_EXPR,
>> -                        boolean_type_node,
>> -                        fold_convert_loc (loc, boolean_type_node, arg0),
>> -                        fold_convert_loc (loc, boolean_type_node, arg1));
>> +                          : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
>> +                          : TRUTH_XOR_EXPR,
>> +                          boolean_type_node,
>> +                          fold_convert_loc (loc, boolean_type_node, arg0),
>> +                          fold_convert_loc (loc, boolean_type_node, arg1));
>> +
>> +      if (code == EQ_EXPR)
>> +       tem = invert_truthvalue_loc (loc, tem);
>> +
>> +      return fold_convert_loc (loc, type, tem);
>> +    }
>> +  if ((code == EQ_EXPR || code == NE_EXPR)
>> +      && ((truth_value_type_p (TREE_CODE (arg0), TREE_TYPE (arg0))
>> +          && (truth_value_type_p (TREE_CODE (arg1), TREE_TYPE (arg1))
>> +              || (TREE_CODE (arg1) == BIT_AND_EXPR
>> +                  && integer_onep (TREE_OPERAND (arg1, 1)))))
>> +         || (truth_value_type_p (TREE_CODE (arg1), TREE_TYPE (arg1))
>> +             && (truth_value_type_p (TREE_CODE (arg0), TREE_TYPE (arg0))
>> +                 || (TREE_CODE (arg0) == BIT_AND_EXPR
>> +                     && integer_onep (TREE_OPERAND (arg0, 1)))))))
>> +    {
>> +      tem = fold_build2_loc (loc, BIT_XOR_EXPR,
>> +                          boolean_type_node,
>> +                          fold_convert_loc (loc, boolean_type_node, arg0),
>> +                          fold_convert_loc (loc, boolean_type_node, arg1));
>>
>>       if (code == EQ_EXPR)
>>        tem = invert_truthvalue_loc (loc, tem);
>> @@ -10597,6 +10738,57 @@ fold_binary_loc (location_t loc,
>>       if (operand_equal_p (arg0, arg1, 0))
>>        return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
>>
>> +      if (TYPE_PRECISION (type) == 1 && INTEGRAL_TYPE_P (type))
>> +        {
>> +         /* If either arg is constant zero, drop it.  */
>> +         if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
>> +           return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
>> +         if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1))
>> +           return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
>> +         /* If second arg is constant true, result is true, but we must
>> +            evaluate first arg.  */
>> +         if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
>> +           return omit_one_operand_loc (loc, type, arg1, arg0);
>> +         if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
>> +           return omit_one_operand_loc (loc, type, arg0, arg1);
>> +
>> +         /* !X | X is always true.  */
>> +         if ((TREE_CODE (arg0) == TRUTH_NOT_EXPR
>> +              || TREE_CODE (arg0) == BIT_NOT_EXPR)
>> +             && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
>> +           return omit_one_operand_loc (loc, type, integer_one_node, arg1);
>> +         /* X | !X is always true.  */
>> +         if ((TREE_CODE (arg1) == TRUTH_NOT_EXPR
>> +             || TREE_CODE (arg1) == BIT_NOT_EXPR)
>> +             && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
>> +           return omit_one_operand_loc (loc, type, integer_one_node, arg0);
>> +
>> +         /* (X & !Y) | (!X & Y) is X ^ Y */
>> +         if (TREE_CODE (arg0) == BIT_AND_EXPR
>> +             && TREE_CODE (arg1) == BIT_AND_EXPR)
>> +           {
>> +             tree a0, a1, l0, l1, n0, n1;
>> +
>> +             a0 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
>> +             a1 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
>> +
>> +             l0 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
>> +             l1 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
>> +
>> +             n0 = fold_build1_loc (loc, TRUTH_NOT_EXPR, type, l0);
>> +             n1 = fold_build1_loc (loc, TRUTH_NOT_EXPR, type, l1);
>> +
>> +             if ((operand_equal_p (n0, a0, 0)
>> +                  && operand_equal_p (n1, a1, 0))
>> +                 || (operand_equal_p (n0, a1, 0)
>> +                     && operand_equal_p (n1, a0, 0)))
>> +               return fold_build2_loc (loc, BIT_XOR_EXPR, type, l0, n1);
>> +           }
>> +         tem = fold_truth_andor (loc, code, type, arg0, arg1, op0, op1);
>> +         if (tem)
>> +           return tem;
>> +        }
>> +
>>       /* ~X | X is -1.  */
>>       if (TREE_CODE (arg0) == BIT_NOT_EXPR
>>          && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
>> @@ -10758,6 +10950,24 @@ fold_binary_loc (location_t loc,
>>       if (operand_equal_p (arg0, arg1, 0))
>>        return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
>>
>> +      if (TYPE_PRECISION (type) == 1 && INTEGRAL_TYPE_P (type))
>> +        {
>> +         /* If the second arg is constant true, this is a logical inversion.  */
>> +         if (integer_onep (arg1))
>> +           {
>> +             tem = invert_truthvalue_loc (loc, arg0);
>> +             return non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
>> +           }
>> +         /* !X ^ X is always true.  */
>> +         if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
>> +             && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
>> +           return omit_one_operand_loc (loc, type, integer_one_node, arg1);
>> +         /* X ^ !X is always true.  */
>> +         if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
>> +             && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
>> +           return omit_one_operand_loc (loc, type, integer_one_node, arg0);
>> +       }
>> +
>>       /* ~X ^ X is -1.  */
>>       if (TREE_CODE (arg0) == BIT_NOT_EXPR
>>          && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
>> @@ -10918,6 +11128,61 @@ fold_binary_loc (location_t loc,
>>        return omit_one_operand_loc (loc, type, arg1, arg0);
>>       if (operand_equal_p (arg0, arg1, 0))
>>        return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
>> +      /* Note that the operands of this must be ints
>> +        and their values must be 0 or 1.
>> +        ("true" is a fixed value perhaps depending on the language.)  */
>> +      /* If first arg is constant zero, return it.  */
>> +      if (integer_zerop (arg0))
>> +       return fold_convert_loc (loc, type, arg0);
>> +
>> +      if (TYPE_PRECISION (type) == 1 && INTEGRAL_TYPE_P (type))
>> +        {
>> +         /* If either arg is constant true, drop it.  */
>> +         if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
>> +           return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
>> +         if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1)
>> +             /* Preserve sequence points.  */
>> +             && (code != TRUTH_ANDIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
>> +           return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
>> +         /* If second arg is constant zero, result is zero, but first arg
>> +            must be evaluated.  */
>> +         if (integer_zerop (arg1))
>> +           return omit_one_operand_loc (loc, type, arg1, arg0);
>> +         /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
>> +            case will be handled here.  */
>> +         if (integer_zerop (arg0))
>> +           return omit_one_operand_loc (loc, type, arg0, arg1);
>> +
>> +         /* !X && X is always false.  */
>> +         if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
>> +             && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
>> +           return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
>> +         /* X & !X is always false.  */
>> +         if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
>> +             && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
>> +           return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
>> +
>> +         /* A < X & A + 1 > Y ==> A < X & A >= Y.  Normally A + 1 > Y
>> +            means A >= Y & A != MAX, but in this case we know that
>> +            A < X <= MAX.  */
>> +
>> +         if (!TREE_SIDE_EFFECTS (arg0)
>> +             && !TREE_SIDE_EFFECTS (arg1))
>> +           {
>> +             tem = fold_to_nonsharp_ineq_using_bound (loc, arg0, arg1);
>> +             if (tem && !operand_equal_p (tem, arg0, 0))
>> +               return fold_build2_loc (loc, code, type, tem, arg1);
>> +
>> +             tem = fold_to_nonsharp_ineq_using_bound (loc, arg1, arg0);
>> +             if (tem && !operand_equal_p (tem, arg1, 0))
>> +               return fold_build2_loc (loc, code, type, arg0, tem);
>> +           }
>> +
>> +         tem = fold_truth_andor (loc, code, type, arg0, arg1, op0, op1);
>> +         if (tem)
>> +           return tem;
>> +
>> +       }
>>
>>       /* ~X & X, (X == 0) & X, and !X & X are always zero.  */
>>       if ((TREE_CODE (arg0) == BIT_NOT_EXPR
>> @@ -12006,86 +12271,11 @@ fold_binary_loc (location_t loc,
>>            return fold_build2_loc (loc, code, type, arg0, tem);
>>        }
>>
>> -    truth_andor:
>> -      /* We only do these simplifications if we are optimizing.  */
>> -      if (!optimize)
>> -       return NULL_TREE;
>> -
>> -      /* Check for things like (A || B) && (A || C).  We can convert this
>> -        to A || (B && C).  Note that either operator can be any of the four
>> -        truth and/or operations and the transformation will still be
>> -        valid.   Also note that we only care about order for the
>> -        ANDIF and ORIF operators.  If B contains side effects, this
>> -        might change the truth-value of A.  */
>> -      if (TREE_CODE (arg0) == TREE_CODE (arg1)
>> -         && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
>> -             || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
>> -             || TREE_CODE (arg0) == TRUTH_AND_EXPR
>> -             || TREE_CODE (arg0) == TRUTH_OR_EXPR)
>> -         && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
>> -       {
>> -         tree a00 = TREE_OPERAND (arg0, 0);
>> -         tree a01 = TREE_OPERAND (arg0, 1);
>> -         tree a10 = TREE_OPERAND (arg1, 0);
>> -         tree a11 = TREE_OPERAND (arg1, 1);
>> -         int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
>> -                             || TREE_CODE (arg0) == TRUTH_AND_EXPR)
>> -                            && (code == TRUTH_AND_EXPR
>> -                                || code == TRUTH_OR_EXPR));
>> -
>> -         if (operand_equal_p (a00, a10, 0))
>> -           return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
>> -                               fold_build2_loc (loc, code, type, a01, a11));
>> -         else if (commutative && operand_equal_p (a00, a11, 0))
>> -           return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
>> -                               fold_build2_loc (loc, code, type, a01, a10));
>> -         else if (commutative && operand_equal_p (a01, a10, 0))
>> -           return fold_build2_loc (loc, TREE_CODE (arg0), type, a01,
>> -                               fold_build2_loc (loc, code, type, a00, a11));
>> -
>> -         /* This case if tricky because we must either have commutative
>> -            operators or else A10 must not have side-effects.  */
>> -
>> -         else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
>> -                  && operand_equal_p (a01, a11, 0))
>> -           return fold_build2_loc (loc, TREE_CODE (arg0), type,
>> -                               fold_build2_loc (loc, code, type, a00, a10),
>> -                               a01);
>> -       }
>> -
>> -      /* See if we can build a range comparison.  */
>> -      if (0 != (tem = fold_range_test (loc, code, type, op0, op1)))
>> -       return tem;
>> -
>> -      if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg0) == TRUTH_ORIF_EXPR)
>> -         || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg0) == TRUTH_ANDIF_EXPR))
>> -       {
>> -         tem = merge_truthop_with_opposite_arm (loc, arg0, arg1, true);
>> -         if (tem)
>> -           return fold_build2_loc (loc, code, type, tem, arg1);
>> -       }
>> -
>> -      if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg1) == TRUTH_ORIF_EXPR)
>> -         || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg1) == TRUTH_ANDIF_EXPR))
>> -       {
>> -         tem = merge_truthop_with_opposite_arm (loc, arg1, arg0, false);
>> -         if (tem)
>> -           return fold_build2_loc (loc, code, type, arg0, tem);
>> -       }
>> -
>> -      /* Check for the possibility of merging component references.  If our
>> -        lhs is another similar operation, try to merge its rhs with our
>> -        rhs.  Then try to merge our lhs and rhs.  */
>> -      if (TREE_CODE (arg0) == code
>> -         && 0 != (tem = fold_truthop (loc, code, type,
>> -                                      TREE_OPERAND (arg0, 1), arg1)))
>> -       return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
>> -
>> -      if ((tem = fold_truthop (loc, code, type, arg0, arg1)) != 0)
>> -       return tem;
>> +      tem = fold_truth_andor (loc, code, type, arg0, arg1, op0, op1);
>> +      if (tem)
>> +        return tem;
>>
>>       return NULL_TREE;
>> -
>>     case TRUTH_ORIF_EXPR:
>>       /* Note that the operands of this must be ints
>>         and their values must be 0 or true.
>> @@ -12140,7 +12330,11 @@ fold_binary_loc (location_t loc,
>>                  && operand_equal_p (n1, a0, 0)))
>>            return fold_build2_loc (loc, TRUTH_XOR_EXPR, type, l0, n1);
>>        }
>> -      goto truth_andor;
>> +      tem = fold_truth_andor (loc, code, type, arg0, arg1, op0, op1);
>> +      if (tem)
>> +        return tem;
>> +
>> +      return NULL_TREE;
>>
>>     case TRUTH_XOR_EXPR:
>>       /* If the second arg is constant zero, drop it.  */
>> @@ -13401,7 +13595,7 @@ fold_ternary_loc (location_t loc, enum t
>>
>>       /* If the second operand is simpler than the third, swap them
>>         since that produces better jump optimization results.  */
>> -      if (truth_value_p (TREE_CODE (arg0))
>> +      if (truth_value_type_p (TREE_CODE (arg0), TREE_TYPE (arg0))
>>          && tree_swap_operands_p (op1, op2, false))
>>        {
>>          location_t loc0 = expr_location_or (arg0, loc);
>> @@ -13427,7 +13621,7 @@ fold_ternary_loc (location_t loc, enum t
>>         over COND_EXPR in cases such as floating point comparisons.  */
>>       if (integer_zerop (op1)
>>          && integer_onep (op2)
>> -         && truth_value_p (TREE_CODE (arg0)))
>> +         && truth_value_type_p (TREE_CODE (arg0), TREE_TYPE (arg0)))
>>        return pedantic_non_lvalue_loc (loc,
>>                                    fold_convert_loc (loc, type,
>>                                              invert_truthvalue_loc (loc,
>> Index: gcc-head/gcc/gimple.c
>> ===================================================================
>> --- gcc-head.orig/gcc/gimple.c
>> +++ gcc-head/gcc/gimple.c
>> @@ -3160,7 +3160,8 @@ canonicalize_cond_expr_cond (tree t)
>>  {
>>   /* Strip conversions around boolean operations.  */
>>   if (CONVERT_EXPR_P (t)
>> -      && truth_value_p (TREE_CODE (TREE_OPERAND (t, 0))))
>> +      && truth_value_type_p (TREE_CODE (TREE_OPERAND (t, 0)),
>> +                            TREE_TYPE (TREE_OPERAND (t, 0))))
>>     t = TREE_OPERAND (t, 0);
>>
>>   /* For !x use x == 0.  */
>> Index: gcc-head/gcc/gimplify.c
>> ===================================================================
>> --- gcc-head.orig/gcc/gimplify.c
>> +++ gcc-head/gcc/gimplify.c
>> @@ -2837,7 +2837,7 @@ gimple_boolify (tree expr)
>>              if (TREE_CODE (arg) == NOP_EXPR
>>                  && TREE_TYPE (arg) == TREE_TYPE (call))
>>                arg = TREE_OPERAND (arg, 0);
>> -             if (truth_value_p (TREE_CODE (arg)))
>> +             if (truth_value_type_p (TREE_CODE (arg), TREE_TYPE (arg)))
>>                {
>>                  arg = gimple_boolify (arg);
>>                  CALL_EXPR_ARG (call, 0)
>> Index: gcc-head/gcc/tree-ssa-structalias.c
>> ===================================================================
>> --- gcc-head.orig/gcc/tree-ssa-structalias.c
>> +++ gcc-head/gcc/tree-ssa-structalias.c
>> @@ -4416,7 +4416,8 @@ find_func_aliases (gimple origt)
>>                         && !POINTER_TYPE_P (TREE_TYPE (rhsop))))
>>                   || gimple_assign_single_p (t))
>>            get_constraint_for_rhs (rhsop, &rhsc);
>> -         else if (truth_value_p (code))
>> +         else if (truth_value_type_p (code,
>> +                                      TREE_TYPE (lhsop)))
>>            /* Truth value results are not pointer (parts).  Or at least
>>               very very unreasonable obfuscation of a part.  */
>>            ;
>> Index: gcc-head/gcc/tree.h
>> ===================================================================
>> --- gcc-head.orig/gcc/tree.h
>> +++ gcc-head/gcc/tree.h
>> @@ -5309,13 +5309,22 @@ extern tree combine_comparisons (locatio
>>  extern void debug_fold_checksum (const_tree);
>>
>>  /* Return nonzero if CODE is a tree code that represents a truth value.  */
>> +#define truth_value_p(CODE)  truth_value_type_p ((CODE), NULL_TREE)
>> +
>> +/* Return nonzero if CODE is a tree code that represents a truth value.
>> +   If TYPE is an integral type, unsigned, and has precision of one, then
>> +   additionally return for bitwise-binary and bitwise-invert nonzero.  */
>>  static inline bool
>> -truth_value_p (enum tree_code code)
>> +truth_value_type_p (enum tree_code code, tree type)
>>  {
>>   return (TREE_CODE_CLASS (code) == tcc_comparison
>>          || code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR
>>          || code == TRUTH_OR_EXPR || code == TRUTH_ORIF_EXPR
>> -         || code == TRUTH_XOR_EXPR || code == TRUTH_NOT_EXPR);
>> +         || code == TRUTH_XOR_EXPR || code == TRUTH_NOT_EXPR
>> +         || ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
>> +              || code == BIT_XOR_EXPR || code == BIT_NOT_EXPR)
>> +             && type && INTEGRAL_TYPE_P (type)
>> +             && TYPE_PRECISION (type) == 1 && TYPE_UNSIGNED (type)));
>>  }
>>
>>
>> Index: gcc-head/gcc/tree-ssa-forwprop.c
>> ===================================================================
>> --- gcc-head.orig/gcc/tree-ssa-forwprop.c
>> +++ gcc-head/gcc/tree-ssa-forwprop.c
>> @@ -1668,7 +1668,7 @@ truth_valued_ssa_name (tree name)
>>     return true;
>>   def = SSA_NAME_DEF_STMT (name);
>>   if (is_gimple_assign (def))
>> -    return truth_value_p (gimple_assign_rhs_code (def));
>> +    return truth_value_type_p (gimple_assign_rhs_code (def), type);
>>   return false;
>>  }
>>
>
Richard Guenther - July 8, 2011, 9:44 a.m.
On Fri, Jul 8, 2011 at 11:28 AM, Kai Tietz <ktietz70@googlemail.com> wrote
> 2011/7/8 Richard Guenther <richard.guenther@gmail.com>:
>> On Thu, Jul 7, 2011 at 6:06 PM, Kai Tietz <ktietz70@googlemail.com> wrote:
>>> Hello,
>>>
>>> This patch - first of series - adds to fold and some helper routines support
>>> for one-bit precision bitwise folding and detection.
>>> This patch is necessary for - next patch of series - boolification of
>>> comparisons.
>>>
>>> Bootstrapped and regression tested for all standard-languages (plus
>>> Ada and Obj-C++) on host x86_64-pc-linux-gnu.
>>>
>>> Ok for apply?
>>
>> Factoring out fold_truth_andor to a function should be done separately.
>> A patch that does just that is pre-approved.
>
> Ok I will sent for this a separate patch. But in fact it makes just
> sense together with the 1-bit precision bitwise support, too.

No, it makes sense anyway to get rid of that goto.  Note _only_ factoring
out the function, not changing anything in it.

>> Otherwise the patch globs too many changes and lacks reasoning.
>> Why do we want to handle all this in fold when the boolification
>> happens only after gimplification?
>
> We still rely on truth/bitwise folding on fold-const.  Also we need to
> handle this for passes, which are using fold_binary to optimize  and
> handle boolified operations - like tree-ssa-reassoc, of tree-vect*.
> This support in fold-const is necessary when we are preserving casts
> from/to boolean, as otherwise we don't fold bitwise-binary with
> compares proper anymore.  Additionally we have to take care that we
> don't enter TRUTH_(AND|OR|XOR) expressions on boolified trees, as
> otherwise tree-cfg will barf. Also we need to take care that types of
> comparisons and TRUTH_NOT expressions are boolean one, as otherwise
> again tree-cfg will detect incompatible types for those expressions.

Sounds like many different things for many individual patches.  Btw,
I'd rather have the tree passes that rely on fold call a gimple specific
wrapper where we can add such things (and also use gimple/SSA
specific optimizations, like less strict typing), like
gimple_fold_binary (....), see also my gimple folding proposal from
earlier this year. http://gcc.gnu.org/ml/gcc-patches/2011-03/msg01099.html

Richard.

>> Thanks,
>> Richard.
>>
>>> Regards,
>>> Kai
>>>
>>> ChangeLog
>>>
>>> 2011-07-07  Kai Tietz  <ktietz@redhat.com>
>>>
>>>        * fold-const.c (fold_truth_not_expr): Handle
>>>        one bit precision bitwise operations.
>>>        (fold_range_test): Likewise.
>>>        (fold_truthop): Likewise.
>>>        (fold_binary_loc): Likewise.
>>>        (fold_truth_andor): Function replaces truth_andor
>>>        label.
>>>        (fold_ternary_loc): Use truth_value_type_p instead
>>>        of truth_value_p.
>>>        * gimple.c (canonicalize_cond_expr_cond): Likewise.
>>>        * gimplify.c (gimple_boolify): Likewise.
>>>        * tree-ssa-structalias.c (find_func_aliases): Likewise.
>>>        * tree-ssa-forwprop.c (truth_valued_ssa_name): Likewise.
>>>        * tree.h (truth_value_type_p): New function.
>>>        (truth_value_p): Implemented as macro via truth_value_type_p.
>>>
>>>
>>> Index: gcc-head/gcc/fold-const.c
>>> ===================================================================
>>> --- gcc-head.orig/gcc/fold-const.c
>>> +++ gcc-head/gcc/fold-const.c
>>> @@ -3074,20 +3074,35 @@ fold_truth_not_expr (location_t loc, tre
>>>     case INTEGER_CST:
>>>       return constant_boolean_node (integer_zerop (arg), type);
>>>
>>> +    case BIT_AND_EXPR:
>>> +      if (integer_onep (TREE_OPERAND (arg, 1)))
>>> +       return build2_loc (loc, EQ_EXPR, type, arg, build_int_cst (type, 0));
>>> +      if (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg, 0))) != 1)
>>> +        return NULL_TREE;
>>> +      /* fall through */
>>>     case TRUTH_AND_EXPR:
>>>       loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
>>>       loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
>>> -      return build2_loc (loc, TRUTH_OR_EXPR, type,
>>> +      return build2_loc (loc, (code == BIT_AND_EXPR ? BIT_IOR_EXPR
>>> +                                                   : TRUTH_OR_EXPR), type,
>>>                         invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
>>>                         invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
>>>
>>> +    case BIT_IOR_EXPR:
>>> +      if (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg, 0))) != 1)
>>> +        return NULL_TREE;
>>> +      /* fall through.  */
>>>     case TRUTH_OR_EXPR:
>>>       loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
>>>       loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
>>> -      return build2_loc (loc, TRUTH_AND_EXPR, type,
>>> +      return build2_loc (loc, (code == BIT_IOR_EXPR ? BIT_AND_EXPR
>>> +                                                   : TRUTH_AND_EXPR), type,
>>>                         invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
>>>                         invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
>>> -
>>> +    case BIT_XOR_EXPR:
>>> +      if (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg, 0))) != 1)
>>> +        return NULL_TREE;
>>> +      /* fall through.  */
>>>     case TRUTH_XOR_EXPR:
>>>       /* Here we can invert either operand.  We invert the first operand
>>>         unless the second operand is a TRUTH_NOT_EXPR in which case our
>>> @@ -3095,10 +3110,14 @@ fold_truth_not_expr (location_t loc, tre
>>>         negation of the second operand.  */
>>>
>>>       if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
>>> -       return build2_loc (loc, TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
>>> +       return build2_loc (loc, code, type, TREE_OPERAND (arg, 0),
>>> +                          TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
>>> +      else if (TREE_CODE (TREE_OPERAND (arg, 1)) == BIT_NOT_EXPR
>>> +              && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg, 1))) == 1)
>>> +       return build2_loc (loc, code, type, TREE_OPERAND (arg, 0),
>>>                           TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
>>>       else
>>> -       return build2_loc (loc, TRUTH_XOR_EXPR, type,
>>> +       return build2_loc (loc, code, type,
>>>                           invert_truthvalue_loc (loc, TREE_OPERAND (arg, 0)),
>>>                           TREE_OPERAND (arg, 1));
>>>
>>> @@ -3116,6 +3135,11 @@ fold_truth_not_expr (location_t loc, tre
>>>                         invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
>>>                         invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
>>>
>>> +
>>> +    case BIT_NOT_EXPR:
>>> +      if (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg, 0))) != 1)
>>> +        return NULL_TREE;
>>> +      /* fall through */
>>>     case TRUTH_NOT_EXPR:
>>>       return TREE_OPERAND (arg, 0);
>>>
>>> @@ -3158,11 +3182,6 @@ fold_truth_not_expr (location_t loc, tre
>>>       return build1_loc (loc, TREE_CODE (arg), type,
>>>                         invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
>>>
>>> -    case BIT_AND_EXPR:
>>> -      if (!integer_onep (TREE_OPERAND (arg, 1)))
>>> -       return NULL_TREE;
>>> -      return build2_loc (loc, EQ_EXPR, type, arg, build_int_cst (type, 0));
>>> -
>>>     case SAVE_EXPR:
>>>       return build1_loc (loc, TRUTH_NOT_EXPR, type, arg);
>>>
>>> @@ -4800,7 +4819,7 @@ fold_range_test (location_t loc, enum tr
>>>                 tree op0, tree op1)
>>>  {
>>>   int or_op = (code == TRUTH_ORIF_EXPR
>>> -              || code == TRUTH_OR_EXPR);
>>> +              || code == TRUTH_OR_EXPR || code == BIT_IOR_EXPR);
>>>   int in0_p, in1_p, in_p;
>>>   tree low0, low1, low, high0, high1, high;
>>>   bool strict_overflow_p = false;
>>> @@ -5099,8 +5118,9 @@ fold_truthop (location_t loc, enum tree_
>>>        }
>>>     }
>>>
>>> -  code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
>>> -         ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
>>> +  if (code != BIT_AND_EXPR && code != BIT_IOR_EXPR)
>>> +    code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
>>> +           ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
>>>
>>>   /* If the RHS can be evaluated unconditionally and its operands are
>>>      simple, it wins to evaluate the RHS unconditionally on machines
>>> @@ -5115,7 +5135,7 @@ fold_truthop (location_t loc, enum tree_
>>>       && simple_operand_p (rr_arg))
>>>     {
>>>       /* Convert (a != 0) || (b != 0) into (a | b) != 0.  */
>>> -      if (code == TRUTH_OR_EXPR
>>> +      if ((code == TRUTH_OR_EXPR || code == BIT_IOR_EXPR)
>>>          && lcode == NE_EXPR && integer_zerop (lr_arg)
>>>          && rcode == NE_EXPR && integer_zerop (rr_arg)
>>>          && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
>>> @@ -5126,7 +5146,7 @@ fold_truthop (location_t loc, enum tree_
>>>                           build_int_cst (TREE_TYPE (ll_arg), 0));
>>>
>>>       /* Convert (a == 0) && (b == 0) into (a | b) == 0.  */
>>> -      if (code == TRUTH_AND_EXPR
>>> +      if ((code == TRUTH_AND_EXPR || code == BIT_AND_EXPR)
>>>          && lcode == EQ_EXPR && integer_zerop (lr_arg)
>>>          && rcode == EQ_EXPR && integer_zerop (rr_arg)
>>>          && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
>>> @@ -5190,7 +5210,8 @@ fold_truthop (location_t loc, enum tree_
>>>      fail.  However, we can convert a one-bit comparison against zero into
>>>      the opposite comparison against that bit being set in the field.  */
>>>
>>> -  wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
>>> +  wanted_code = ((code == TRUTH_AND_EXPR || code == BIT_AND_EXPR)
>>> +                ? EQ_EXPR : NE_EXPR);
>>>   if (lcode != wanted_code)
>>>     {
>>>       if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
>>> @@ -9324,6 +9345,105 @@ get_pointer_modulus_and_residue (tree ex
>>>   return 1;
>>>  }
>>>
>>> +/* Fold a binary bitwise/truth expression of code CODE and type TYPE
>>> with operands
>>> +   OP0 and OP1.  LOC is the location of the resulting expression.
>>> +   ARG0 and ARG1 are the NOP_STRIPed results of OP0 and OP1.
>>> +   Return the folded expression if folding is successful.  Otherwise,
>>> +   return NULL_TREE.  */
>>> +
>>> +static tree
>>> +fold_truth_andor (location_t loc, enum tree_code code, tree type,
>>> +                 tree arg0, tree arg1, tree op0, tree op1)
>>> +{
>>> +  tree tem;
>>> +
>>> +  /* We only do these simplifications if we are optimizing.  */
>>> +  if (!optimize)
>>> +    return NULL_TREE;
>>> +  /* If code is BIT_AND_EXPR or BIT_IOR_EXPR, type precision has to be one.  */
>>> +  if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR)
>>> +      && (!INTEGRAL_TYPE_P (type) || TYPE_PRECISION (type) != 1))
>>> +    return NULL_TREE;
>>> +
>>> +  /* Check for things like (A || B) && (A || C).  We can convert this
>>> +     to A || (B && C).  Note that either operator can be any of the four
>>> +     truth and/or operations and the transformation will still be
>>> +     valid.   Also note that we only care about order for the
>>> +     ANDIF and ORIF operators.  If B contains side effects, this
>>> +     might change the truth-value of A.  */
>>> +  if (TREE_CODE (arg0) == TREE_CODE (arg1)
>>> +      && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
>>> +         || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
>>> +         || TREE_CODE (arg0) == BIT_AND_EXPR
>>> +         || TREE_CODE (arg0) == BIT_IOR_EXPR
>>> +         || TREE_CODE (arg0) == TRUTH_AND_EXPR
>>> +         || TREE_CODE (arg0) == TRUTH_OR_EXPR)
>>> +      && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
>>> +    {
>>> +      tree a00 = TREE_OPERAND (arg0, 0);
>>> +      tree a01 = TREE_OPERAND (arg0, 1);
>>> +      tree a10 = TREE_OPERAND (arg1, 0);
>>> +      tree a11 = TREE_OPERAND (arg1, 1);
>>> +      int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
>>> +                         || TREE_CODE (arg0) == TRUTH_AND_EXPR
>>> +                         || TREE_CODE (arg0) == BIT_AND_EXPR)
>>> +                        && (code == TRUTH_AND_EXPR
>>> +                            || code == TRUTH_OR_EXPR
>>> +                            || code == BIT_IOR_EXPR));
>>> +
>>> +      if (operand_equal_p (a00, a10, 0))
>>> +       return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
>>> +                           fold_build2_loc (loc, code, type, a01, a11));
>>> +      else if (commutative && operand_equal_p (a00, a11, 0))
>>> +       return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
>>> +                           fold_build2_loc (loc, code, type, a01, a10));
>>> +      else if (commutative && operand_equal_p (a01, a10, 0))
>>> +       return fold_build2_loc (loc, TREE_CODE (arg0), type, a01,
>>> +                           fold_build2_loc (loc, code, type, a00, a11));
>>> +
>>> +      /* This case if tricky because we must either have commutative
>>> +        operators or else A10 must not have side-effects.  */
>>> +
>>> +      else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
>>> +              && operand_equal_p (a01, a11, 0))
>>> +       return fold_build2_loc (loc, TREE_CODE (arg0), type,
>>> +                           fold_build2_loc (loc, code, type, a00, a10),
>>> +                           a01);
>>> +    }
>>> +
>>> +  /* See if we can build a range comparison. */
>>> +  if (0 != (tem = fold_range_test (loc, code, type, op0, op1)))
>>> +    return tem;
>>> +
>>> +  if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg0) == TRUTH_ORIF_EXPR)
>>> +      || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg0) == TRUTH_ANDIF_EXPR))
>>> +    {
>>> +      tem = merge_truthop_with_opposite_arm (loc, arg0, arg1, true);
>>> +      if (tem)
>>> +       return fold_build2_loc (loc, code, type, tem, arg1);
>>> +    }
>>> +
>>> +  if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg1) == TRUTH_ORIF_EXPR)
>>> +      || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg1) == TRUTH_ANDIF_EXPR))
>>> +    {
>>> +      tem = merge_truthop_with_opposite_arm (loc, arg1, arg0, false);
>>> +      if (tem)
>>> +       return fold_build2_loc (loc, code, type, arg0, tem);
>>> +    }
>>> +
>>> +  /* Check for the possibility of merging component references.  If our
>>> +     lhs is another similar operation, try to merge its rhs with our
>>> +     rhs.  Then try to merge our lhs and rhs.  */
>>> +  if (TREE_CODE (arg0) == code
>>> +      && 0 != (tem = fold_truthop (loc, code, type,
>>> +                                  TREE_OPERAND (arg0, 1), arg1)))
>>> +    return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
>>> +
>>> +  if ((tem = fold_truthop (loc, code, type, arg0, arg1)) != 0)
>>> +    return tem;
>>> +
>>> +  return NULL_TREE;
>>> +}
>>>
>>>  /* Fold a binary expression of code CODE and type TYPE with operands
>>>    OP0 and OP1.  LOC is the location of the resulting expression.
>>> @@ -9424,21 +9544,42 @@ fold_binary_loc (location_t loc,
>>>
>>>   if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
>>>        || code == EQ_EXPR || code == NE_EXPR)
>>> -      && ((truth_value_p (TREE_CODE (arg0))
>>> -          && (truth_value_p (TREE_CODE (arg1))
>>> +      && (!INTEGRAL_TYPE_P (type) || TYPE_PRECISION (type) != 1)
>>> +      && ((truth_value_type_p (TREE_CODE (arg0), TREE_TYPE (arg0))
>>> +          && (truth_value_type_p (TREE_CODE (arg1), TREE_TYPE (arg1))
>>>               || (TREE_CODE (arg1) == BIT_AND_EXPR
>>>                   && integer_onep (TREE_OPERAND (arg1, 1)))))
>>> -         || (truth_value_p (TREE_CODE (arg1))
>>> -             && (truth_value_p (TREE_CODE (arg0))
>>> +         || (truth_value_type_p (TREE_CODE (arg1), TREE_TYPE (arg1))
>>> +             && (truth_value_type_p (TREE_CODE (arg0), TREE_TYPE (arg0))
>>>                  || (TREE_CODE (arg0) == BIT_AND_EXPR
>>>                      && integer_onep (TREE_OPERAND (arg0, 1)))))))
>>>     {
>>>       tem = fold_build2_loc (loc, code == BIT_AND_EXPR ? TRUTH_AND_EXPR
>>> -                        : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
>>> -                        : TRUTH_XOR_EXPR,
>>> -                        boolean_type_node,
>>> -                        fold_convert_loc (loc, boolean_type_node, arg0),
>>> -                        fold_convert_loc (loc, boolean_type_node, arg1));
>>> +                          : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
>>> +                          : TRUTH_XOR_EXPR,
>>> +                          boolean_type_node,
>>> +                          fold_convert_loc (loc, boolean_type_node, arg0),
>>> +                          fold_convert_loc (loc, boolean_type_node, arg1));
>>> +
>>> +      if (code == EQ_EXPR)
>>> +       tem = invert_truthvalue_loc (loc, tem);
>>> +
>>> +      return fold_convert_loc (loc, type, tem);
>>> +    }
>>> +  if ((code == EQ_EXPR || code == NE_EXPR)
>>> +      && ((truth_value_type_p (TREE_CODE (arg0), TREE_TYPE (arg0))
>>> +          && (truth_value_type_p (TREE_CODE (arg1), TREE_TYPE (arg1))
>>> +              || (TREE_CODE (arg1) == BIT_AND_EXPR
>>> +                  && integer_onep (TREE_OPERAND (arg1, 1)))))
>>> +         || (truth_value_type_p (TREE_CODE (arg1), TREE_TYPE (arg1))
>>> +             && (truth_value_type_p (TREE_CODE (arg0), TREE_TYPE (arg0))
>>> +                 || (TREE_CODE (arg0) == BIT_AND_EXPR
>>> +                     && integer_onep (TREE_OPERAND (arg0, 1)))))))
>>> +    {
>>> +      tem = fold_build2_loc (loc, BIT_XOR_EXPR,
>>> +                          boolean_type_node,
>>> +                          fold_convert_loc (loc, boolean_type_node, arg0),
>>> +                          fold_convert_loc (loc, boolean_type_node, arg1));
>>>
>>>       if (code == EQ_EXPR)
>>>        tem = invert_truthvalue_loc (loc, tem);
>>> @@ -10597,6 +10738,57 @@ fold_binary_loc (location_t loc,
>>>       if (operand_equal_p (arg0, arg1, 0))
>>>        return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
>>>
>>> +      if (TYPE_PRECISION (type) == 1 && INTEGRAL_TYPE_P (type))
>>> +        {
>>> +         /* If either arg is constant zero, drop it.  */
>>> +         if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
>>> +           return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
>>> +         if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1))
>>> +           return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
>>> +         /* If second arg is constant true, result is true, but we must
>>> +            evaluate first arg.  */
>>> +         if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
>>> +           return omit_one_operand_loc (loc, type, arg1, arg0);
>>> +         if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
>>> +           return omit_one_operand_loc (loc, type, arg0, arg1);
>>> +
>>> +         /* !X | X is always true.  */
>>> +         if ((TREE_CODE (arg0) == TRUTH_NOT_EXPR
>>> +              || TREE_CODE (arg0) == BIT_NOT_EXPR)
>>> +             && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
>>> +           return omit_one_operand_loc (loc, type, integer_one_node, arg1);
>>> +         /* X | !X is always true.  */
>>> +         if ((TREE_CODE (arg1) == TRUTH_NOT_EXPR
>>> +             || TREE_CODE (arg1) == BIT_NOT_EXPR)
>>> +             && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
>>> +           return omit_one_operand_loc (loc, type, integer_one_node, arg0);
>>> +
>>> +         /* (X & !Y) | (!X & Y) is X ^ Y */
>>> +         if (TREE_CODE (arg0) == BIT_AND_EXPR
>>> +             && TREE_CODE (arg1) == BIT_AND_EXPR)
>>> +           {
>>> +             tree a0, a1, l0, l1, n0, n1;
>>> +
>>> +             a0 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
>>> +             a1 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
>>> +
>>> +             l0 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
>>> +             l1 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
>>> +
>>> +             n0 = fold_build1_loc (loc, TRUTH_NOT_EXPR, type, l0);
>>> +             n1 = fold_build1_loc (loc, TRUTH_NOT_EXPR, type, l1);
>>> +
>>> +             if ((operand_equal_p (n0, a0, 0)
>>> +                  && operand_equal_p (n1, a1, 0))
>>> +                 || (operand_equal_p (n0, a1, 0)
>>> +                     && operand_equal_p (n1, a0, 0)))
>>> +               return fold_build2_loc (loc, BIT_XOR_EXPR, type, l0, n1);
>>> +           }
>>> +         tem = fold_truth_andor (loc, code, type, arg0, arg1, op0, op1);
>>> +         if (tem)
>>> +           return tem;
>>> +        }
>>> +
>>>       /* ~X | X is -1.  */
>>>       if (TREE_CODE (arg0) == BIT_NOT_EXPR
>>>          && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
>>> @@ -10758,6 +10950,24 @@ fold_binary_loc (location_t loc,
>>>       if (operand_equal_p (arg0, arg1, 0))
>>>        return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
>>>
>>> +      if (TYPE_PRECISION (type) == 1 && INTEGRAL_TYPE_P (type))
>>> +        {
>>> +         /* If the second arg is constant true, this is a logical inversion.  */
>>> +         if (integer_onep (arg1))
>>> +           {
>>> +             tem = invert_truthvalue_loc (loc, arg0);
>>> +             return non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
>>> +           }
>>> +         /* !X ^ X is always true.  */
>>> +         if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
>>> +             && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
>>> +           return omit_one_operand_loc (loc, type, integer_one_node, arg1);
>>> +         /* X ^ !X is always true.  */
>>> +         if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
>>> +             && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
>>> +           return omit_one_operand_loc (loc, type, integer_one_node, arg0);
>>> +       }
>>> +
>>>       /* ~X ^ X is -1.  */
>>>       if (TREE_CODE (arg0) == BIT_NOT_EXPR
>>>          && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
>>> @@ -10918,6 +11128,61 @@ fold_binary_loc (location_t loc,
>>>        return omit_one_operand_loc (loc, type, arg1, arg0);
>>>       if (operand_equal_p (arg0, arg1, 0))
>>>        return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
>>> +      /* Note that the operands of this must be ints
>>> +        and their values must be 0 or 1.
>>> +        ("true" is a fixed value perhaps depending on the language.)  */
>>> +      /* If first arg is constant zero, return it.  */
>>> +      if (integer_zerop (arg0))
>>> +       return fold_convert_loc (loc, type, arg0);
>>> +
>>> +      if (TYPE_PRECISION (type) == 1 && INTEGRAL_TYPE_P (type))
>>> +        {
>>> +         /* If either arg is constant true, drop it.  */
>>> +         if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
>>> +           return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
>>> +         if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1)
>>> +             /* Preserve sequence points.  */
>>> +             && (code != TRUTH_ANDIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
>>> +           return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
>>> +         /* If second arg is constant zero, result is zero, but first arg
>>> +            must be evaluated.  */
>>> +         if (integer_zerop (arg1))
>>> +           return omit_one_operand_loc (loc, type, arg1, arg0);
>>> +         /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
>>> +            case will be handled here.  */
>>> +         if (integer_zerop (arg0))
>>> +           return omit_one_operand_loc (loc, type, arg0, arg1);
>>> +
>>> +         /* !X && X is always false.  */
>>> +         if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
>>> +             && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
>>> +           return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
>>> +         /* X & !X is always false.  */
>>> +         if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
>>> +             && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
>>> +           return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
>>> +
>>> +         /* A < X & A + 1 > Y ==> A < X & A >= Y.  Normally A + 1 > Y
>>> +            means A >= Y & A != MAX, but in this case we know that
>>> +            A < X <= MAX.  */
>>> +
>>> +         if (!TREE_SIDE_EFFECTS (arg0)
>>> +             && !TREE_SIDE_EFFECTS (arg1))
>>> +           {
>>> +             tem = fold_to_nonsharp_ineq_using_bound (loc, arg0, arg1);
>>> +             if (tem && !operand_equal_p (tem, arg0, 0))
>>> +               return fold_build2_loc (loc, code, type, tem, arg1);
>>> +
>>> +             tem = fold_to_nonsharp_ineq_using_bound (loc, arg1, arg0);
>>> +             if (tem && !operand_equal_p (tem, arg1, 0))
>>> +               return fold_build2_loc (loc, code, type, arg0, tem);
>>> +           }
>>> +
>>> +         tem = fold_truth_andor (loc, code, type, arg0, arg1, op0, op1);
>>> +         if (tem)
>>> +           return tem;
>>> +
>>> +       }
>>>
>>>       /* ~X & X, (X == 0) & X, and !X & X are always zero.  */
>>>       if ((TREE_CODE (arg0) == BIT_NOT_EXPR
>>> @@ -12006,86 +12271,11 @@ fold_binary_loc (location_t loc,
>>>            return fold_build2_loc (loc, code, type, arg0, tem);
>>>        }
>>>
>>> -    truth_andor:
>>> -      /* We only do these simplifications if we are optimizing.  */
>>> -      if (!optimize)
>>> -       return NULL_TREE;
>>> -
>>> -      /* Check for things like (A || B) && (A || C).  We can convert this
>>> -        to A || (B && C).  Note that either operator can be any of the four
>>> -        truth and/or operations and the transformation will still be
>>> -        valid.   Also note that we only care about order for the
>>> -        ANDIF and ORIF operators.  If B contains side effects, this
>>> -        might change the truth-value of A.  */
>>> -      if (TREE_CODE (arg0) == TREE_CODE (arg1)
>>> -         && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
>>> -             || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
>>> -             || TREE_CODE (arg0) == TRUTH_AND_EXPR
>>> -             || TREE_CODE (arg0) == TRUTH_OR_EXPR)
>>> -         && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
>>> -       {
>>> -         tree a00 = TREE_OPERAND (arg0, 0);
>>> -         tree a01 = TREE_OPERAND (arg0, 1);
>>> -         tree a10 = TREE_OPERAND (arg1, 0);
>>> -         tree a11 = TREE_OPERAND (arg1, 1);
>>> -         int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
>>> -                             || TREE_CODE (arg0) == TRUTH_AND_EXPR)
>>> -                            && (code == TRUTH_AND_EXPR
>>> -                                || code == TRUTH_OR_EXPR));
>>> -
>>> -         if (operand_equal_p (a00, a10, 0))
>>> -           return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
>>> -                               fold_build2_loc (loc, code, type, a01, a11));
>>> -         else if (commutative && operand_equal_p (a00, a11, 0))
>>> -           return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
>>> -                               fold_build2_loc (loc, code, type, a01, a10));
>>> -         else if (commutative && operand_equal_p (a01, a10, 0))
>>> -           return fold_build2_loc (loc, TREE_CODE (arg0), type, a01,
>>> -                               fold_build2_loc (loc, code, type, a00, a11));
>>> -
>>> -         /* This case if tricky because we must either have commutative
>>> -            operators or else A10 must not have side-effects.  */
>>> -
>>> -         else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
>>> -                  && operand_equal_p (a01, a11, 0))
>>> -           return fold_build2_loc (loc, TREE_CODE (arg0), type,
>>> -                               fold_build2_loc (loc, code, type, a00, a10),
>>> -                               a01);
>>> -       }
>>> -
>>> -      /* See if we can build a range comparison.  */
>>> -      if (0 != (tem = fold_range_test (loc, code, type, op0, op1)))
>>> -       return tem;
>>> -
>>> -      if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg0) == TRUTH_ORIF_EXPR)
>>> -         || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg0) == TRUTH_ANDIF_EXPR))
>>> -       {
>>> -         tem = merge_truthop_with_opposite_arm (loc, arg0, arg1, true);
>>> -         if (tem)
>>> -           return fold_build2_loc (loc, code, type, tem, arg1);
>>> -       }
>>> -
>>> -      if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg1) == TRUTH_ORIF_EXPR)
>>> -         || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg1) == TRUTH_ANDIF_EXPR))
>>> -       {
>>> -         tem = merge_truthop_with_opposite_arm (loc, arg1, arg0, false);
>>> -         if (tem)
>>> -           return fold_build2_loc (loc, code, type, arg0, tem);
>>> -       }
>>> -
>>> -      /* Check for the possibility of merging component references.  If our
>>> -        lhs is another similar operation, try to merge its rhs with our
>>> -        rhs.  Then try to merge our lhs and rhs.  */
>>> -      if (TREE_CODE (arg0) == code
>>> -         && 0 != (tem = fold_truthop (loc, code, type,
>>> -                                      TREE_OPERAND (arg0, 1), arg1)))
>>> -       return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
>>> -
>>> -      if ((tem = fold_truthop (loc, code, type, arg0, arg1)) != 0)
>>> -       return tem;
>>> +      tem = fold_truth_andor (loc, code, type, arg0, arg1, op0, op1);
>>> +      if (tem)
>>> +        return tem;
>>>
>>>       return NULL_TREE;
>>> -
>>>     case TRUTH_ORIF_EXPR:
>>>       /* Note that the operands of this must be ints
>>>         and their values must be 0 or true.
>>> @@ -12140,7 +12330,11 @@ fold_binary_loc (location_t loc,
>>>                  && operand_equal_p (n1, a0, 0)))
>>>            return fold_build2_loc (loc, TRUTH_XOR_EXPR, type, l0, n1);
>>>        }
>>> -      goto truth_andor;
>>> +      tem = fold_truth_andor (loc, code, type, arg0, arg1, op0, op1);
>>> +      if (tem)
>>> +        return tem;
>>> +
>>> +      return NULL_TREE;
>>>
>>>     case TRUTH_XOR_EXPR:
>>>       /* If the second arg is constant zero, drop it.  */
>>> @@ -13401,7 +13595,7 @@ fold_ternary_loc (location_t loc, enum t
>>>
>>>       /* If the second operand is simpler than the third, swap them
>>>         since that produces better jump optimization results.  */
>>> -      if (truth_value_p (TREE_CODE (arg0))
>>> +      if (truth_value_type_p (TREE_CODE (arg0), TREE_TYPE (arg0))
>>>          && tree_swap_operands_p (op1, op2, false))
>>>        {
>>>          location_t loc0 = expr_location_or (arg0, loc);
>>> @@ -13427,7 +13621,7 @@ fold_ternary_loc (location_t loc, enum t
>>>         over COND_EXPR in cases such as floating point comparisons.  */
>>>       if (integer_zerop (op1)
>>>          && integer_onep (op2)
>>> -         && truth_value_p (TREE_CODE (arg0)))
>>> +         && truth_value_type_p (TREE_CODE (arg0), TREE_TYPE (arg0)))
>>>        return pedantic_non_lvalue_loc (loc,
>>>                                    fold_convert_loc (loc, type,
>>>                                              invert_truthvalue_loc (loc,
>>> Index: gcc-head/gcc/gimple.c
>>> ===================================================================
>>> --- gcc-head.orig/gcc/gimple.c
>>> +++ gcc-head/gcc/gimple.c
>>> @@ -3160,7 +3160,8 @@ canonicalize_cond_expr_cond (tree t)
>>>  {
>>>   /* Strip conversions around boolean operations.  */
>>>   if (CONVERT_EXPR_P (t)
>>> -      && truth_value_p (TREE_CODE (TREE_OPERAND (t, 0))))
>>> +      && truth_value_type_p (TREE_CODE (TREE_OPERAND (t, 0)),
>>> +                            TREE_TYPE (TREE_OPERAND (t, 0))))
>>>     t = TREE_OPERAND (t, 0);
>>>
>>>   /* For !x use x == 0.  */
>>> Index: gcc-head/gcc/gimplify.c
>>> ===================================================================
>>> --- gcc-head.orig/gcc/gimplify.c
>>> +++ gcc-head/gcc/gimplify.c
>>> @@ -2837,7 +2837,7 @@ gimple_boolify (tree expr)
>>>              if (TREE_CODE (arg) == NOP_EXPR
>>>                  && TREE_TYPE (arg) == TREE_TYPE (call))
>>>                arg = TREE_OPERAND (arg, 0);
>>> -             if (truth_value_p (TREE_CODE (arg)))
>>> +             if (truth_value_type_p (TREE_CODE (arg), TREE_TYPE (arg)))
>>>                {
>>>                  arg = gimple_boolify (arg);
>>>                  CALL_EXPR_ARG (call, 0)
>>> Index: gcc-head/gcc/tree-ssa-structalias.c
>>> ===================================================================
>>> --- gcc-head.orig/gcc/tree-ssa-structalias.c
>>> +++ gcc-head/gcc/tree-ssa-structalias.c
>>> @@ -4416,7 +4416,8 @@ find_func_aliases (gimple origt)
>>>                         && !POINTER_TYPE_P (TREE_TYPE (rhsop))))
>>>                   || gimple_assign_single_p (t))
>>>            get_constraint_for_rhs (rhsop, &rhsc);
>>> -         else if (truth_value_p (code))
>>> +         else if (truth_value_type_p (code,
>>> +                                      TREE_TYPE (lhsop)))
>>>            /* Truth value results are not pointer (parts).  Or at least
>>>               very very unreasonable obfuscation of a part.  */
>>>            ;
>>> Index: gcc-head/gcc/tree.h
>>> ===================================================================
>>> --- gcc-head.orig/gcc/tree.h
>>> +++ gcc-head/gcc/tree.h
>>> @@ -5309,13 +5309,22 @@ extern tree combine_comparisons (locatio
>>>  extern void debug_fold_checksum (const_tree);
>>>
>>>  /* Return nonzero if CODE is a tree code that represents a truth value.  */
>>> +#define truth_value_p(CODE)  truth_value_type_p ((CODE), NULL_TREE)
>>> +
>>> +/* Return nonzero if CODE is a tree code that represents a truth value.
>>> +   If TYPE is an integral type, unsigned, and has precision of one, then
>>> +   additionally return for bitwise-binary and bitwise-invert nonzero.  */
>>>  static inline bool
>>> -truth_value_p (enum tree_code code)
>>> +truth_value_type_p (enum tree_code code, tree type)
>>>  {
>>>   return (TREE_CODE_CLASS (code) == tcc_comparison
>>>          || code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR
>>>          || code == TRUTH_OR_EXPR || code == TRUTH_ORIF_EXPR
>>> -         || code == TRUTH_XOR_EXPR || code == TRUTH_NOT_EXPR);
>>> +         || code == TRUTH_XOR_EXPR || code == TRUTH_NOT_EXPR
>>> +         || ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
>>> +              || code == BIT_XOR_EXPR || code == BIT_NOT_EXPR)
>>> +             && type && INTEGRAL_TYPE_P (type)
>>> +             && TYPE_PRECISION (type) == 1 && TYPE_UNSIGNED (type)));
>>>  }
>>>
>>>
>>> Index: gcc-head/gcc/tree-ssa-forwprop.c
>>> ===================================================================
>>> --- gcc-head.orig/gcc/tree-ssa-forwprop.c
>>> +++ gcc-head/gcc/tree-ssa-forwprop.c
>>> @@ -1668,7 +1668,7 @@ truth_valued_ssa_name (tree name)
>>>     return true;
>>>   def = SSA_NAME_DEF_STMT (name);
>>>   if (is_gimple_assign (def))
>>> -    return truth_value_p (gimple_assign_rhs_code (def));
>>> +    return truth_value_type_p (gimple_assign_rhs_code (def), type);
>>>   return false;
>>>  }
>>>
>>
>
Kai Tietz - July 8, 2011, 7:07 p.m.
2011/7/8 Richard Guenther <richard.guenther@gmail.com>:
> On Fri, Jul 8, 2011 at 11:28 AM, Kai Tietz <ktietz70@googlemail.com> wrote
>> 2011/7/8 Richard Guenther <richard.guenther@gmail.com>:
>>> On Thu, Jul 7, 2011 at 6:06 PM, Kai Tietz <ktietz70@googlemail.com> wrote:
>>>> Hello,
>>>>
>>>> This patch - first of series - adds to fold and some helper routines support
>>>> for one-bit precision bitwise folding and detection.
>>>> This patch is necessary for - next patch of series - boolification of
>>>> comparisons.
>>>>
>>>> Bootstrapped and regression tested for all standard-languages (plus
>>>> Ada and Obj-C++) on host x86_64-pc-linux-gnu.
>>>>
>>>> Ok for apply?
>>>
>>> Factoring out fold_truth_andor to a function should be done separately.
>>> A patch that does just that is pre-approved.
>>
>> Ok I will sent for this a separate patch. But in fact it makes just
>> sense together with the 1-bit precision bitwise support, too.
>
> No, it makes sense anyway to get rid of that goto.  Note _only_ factoring
> out the function, not changing anything in it.

Done.

>>> Otherwise the patch globs too many changes and lacks reasoning.
>>> Why do we want to handle all this in fold when the boolification
>>> happens only after gimplification?
>>
>> We still rely on truth/bitwise folding on fold-const.  Also we need to
>> handle this for passes, which are using fold_binary to optimize  and
>> handle boolified operations - like tree-ssa-reassoc, of tree-vect*.
>> This support in fold-const is necessary when we are preserving casts
>> from/to boolean, as otherwise we don't fold bitwise-binary with
>> compares proper anymore.  Additionally we have to take care that we
>> don't enter TRUTH_(AND|OR|XOR) expressions on boolified trees, as
>> otherwise tree-cfg will barf. Also we need to take care that types of
>> comparisons and TRUTH_NOT expressions are boolean one, as otherwise
>> again tree-cfg will detect incompatible types for those expressions.
>
> Sounds like many different things for many individual patches.  Btw,
> I'd rather have the tree passes that rely on fold call a gimple specific
> wrapper where we can add such things (and also use gimple/SSA
> specific optimizations, like less strict typing), like
> gimple_fold_binary (....), see also my gimple folding proposal from
> earlier this year. http://gcc.gnu.org/ml/gcc-patches/2011-03/msg01099.html

Well, this is for sure a good thing, but didn't solve the issues about
fold-const and 1-bit precision bitwise-operations. We need to handle
them in fold-const as otherwise even worse things are happening there.
 As fold-const happily decides that bitwise-binaries with comparisons
or thruth valued arguments getting transformed back into
TRUTH_(AND|OR|XOR)[IF]_EXPRs, which is indeed contra-productive on an
already gimplified tree.
For sure it would be better to avoid for such passes fold-const at all
and have instead a pure-ssa-named folding mechanism, but this is a
different story and not part of this patch.  Focus here is that we are
again able to do proper folding on boolified bitwise operations and to
provide to some passes the knowledge that a 1-bit precision bitwise
operation is an equivalent to a TRUTH_(AND|OR|XOR) and can be handled.

Regards,
Kai

Patch

Index: gcc-head/gcc/fold-const.c
===================================================================
--- gcc-head.orig/gcc/fold-const.c
+++ gcc-head/gcc/fold-const.c
@@ -3074,20 +3074,35 @@  fold_truth_not_expr (location_t loc, tre
     case INTEGER_CST:
       return constant_boolean_node (integer_zerop (arg), type);

+    case BIT_AND_EXPR:
+      if (integer_onep (TREE_OPERAND (arg, 1)))
+	return build2_loc (loc, EQ_EXPR, type, arg, build_int_cst (type, 0));
+      if (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg, 0))) != 1)
+        return NULL_TREE;
+      /* fall through */
     case TRUTH_AND_EXPR:
       loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
       loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
-      return build2_loc (loc, TRUTH_OR_EXPR, type,
+      return build2_loc (loc, (code == BIT_AND_EXPR ? BIT_IOR_EXPR
+      						    : TRUTH_OR_EXPR), type,
 			 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
 			 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));

+    case BIT_IOR_EXPR:
+      if (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg, 0))) != 1)
+        return NULL_TREE;
+      /* fall through.  */
     case TRUTH_OR_EXPR:
       loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
       loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
-      return build2_loc (loc, TRUTH_AND_EXPR, type,
+      return build2_loc (loc, (code == BIT_IOR_EXPR ? BIT_AND_EXPR
+      						    : TRUTH_AND_EXPR), type,
 			 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
 			 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
-
+    case BIT_XOR_EXPR:
+      if (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg, 0))) != 1)
+        return NULL_TREE;
+      /* fall through.  */
     case TRUTH_XOR_EXPR:
       /* Here we can invert either operand.  We invert the first operand
 	 unless the second operand is a TRUTH_NOT_EXPR in which case our
@@ -3095,10 +3110,14 @@  fold_truth_not_expr (location_t loc, tre
 	 negation of the second operand.  */

       if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
-	return build2_loc (loc, TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
+	return build2_loc (loc, code, type, TREE_OPERAND (arg, 0),
+			   TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
+      else if (TREE_CODE (TREE_OPERAND (arg, 1)) == BIT_NOT_EXPR
+	       && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg, 1))) == 1)
+	return build2_loc (loc, code, type, TREE_OPERAND (arg, 0),
 			   TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
       else
-	return build2_loc (loc, TRUTH_XOR_EXPR, type,
+	return build2_loc (loc, code, type,
 			   invert_truthvalue_loc (loc, TREE_OPERAND (arg, 0)),
 			   TREE_OPERAND (arg, 1));

@@ -3116,6 +3135,11 @@  fold_truth_not_expr (location_t loc, tre
 			 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
 			 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));

+
+    case BIT_NOT_EXPR:
+      if (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg, 0))) != 1)
+        return NULL_TREE;
+      /* fall through */
     case TRUTH_NOT_EXPR:
       return TREE_OPERAND (arg, 0);

@@ -3158,11 +3182,6 @@  fold_truth_not_expr (location_t loc, tre
       return build1_loc (loc, TREE_CODE (arg), type,
 			 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));

-    case BIT_AND_EXPR:
-      if (!integer_onep (TREE_OPERAND (arg, 1)))
-	return NULL_TREE;
-      return build2_loc (loc, EQ_EXPR, type, arg, build_int_cst (type, 0));
-
     case SAVE_EXPR:
       return build1_loc (loc, TRUTH_NOT_EXPR, type, arg);

@@ -4800,7 +4819,7 @@  fold_range_test (location_t loc, enum tr
 		 tree op0, tree op1)
 {
   int or_op = (code == TRUTH_ORIF_EXPR
-	       || code == TRUTH_OR_EXPR);
+	       || code == TRUTH_OR_EXPR || code == BIT_IOR_EXPR);
   int in0_p, in1_p, in_p;
   tree low0, low1, low, high0, high1, high;
   bool strict_overflow_p = false;
@@ -5099,8 +5118,9 @@  fold_truthop (location_t loc, enum tree_
 	}
     }

-  code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
-	  ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
+  if (code != BIT_AND_EXPR && code != BIT_IOR_EXPR)
+    code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
+	    ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);

   /* If the RHS can be evaluated unconditionally and its operands are
      simple, it wins to evaluate the RHS unconditionally on machines
@@ -5115,7 +5135,7 @@  fold_truthop (location_t loc, enum tree_
       && simple_operand_p (rr_arg))
     {
       /* Convert (a != 0) || (b != 0) into (a | b) != 0.  */
-      if (code == TRUTH_OR_EXPR
+      if ((code == TRUTH_OR_EXPR || code == BIT_IOR_EXPR)
 	  && lcode == NE_EXPR && integer_zerop (lr_arg)
 	  && rcode == NE_EXPR && integer_zerop (rr_arg)
 	  && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
@@ -5126,7 +5146,7 @@  fold_truthop (location_t loc, enum tree_
 			   build_int_cst (TREE_TYPE (ll_arg), 0));

       /* Convert (a == 0) && (b == 0) into (a | b) == 0.  */
-      if (code == TRUTH_AND_EXPR
+      if ((code == TRUTH_AND_EXPR || code == BIT_AND_EXPR)
 	  && lcode == EQ_EXPR && integer_zerop (lr_arg)
 	  && rcode == EQ_EXPR && integer_zerop (rr_arg)
 	  && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
@@ -5190,7 +5210,8 @@  fold_truthop (location_t loc, enum tree_
      fail.  However, we can convert a one-bit comparison against zero into
      the opposite comparison against that bit being set in the field.  */

-  wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
+  wanted_code = ((code == TRUTH_AND_EXPR || code == BIT_AND_EXPR)
+  		 ? EQ_EXPR : NE_EXPR);
   if (lcode != wanted_code)
     {
       if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
@@ -9324,6 +9345,105 @@  get_pointer_modulus_and_residue (tree ex
   return 1;
 }

+/* Fold a binary bitwise/truth expression of code CODE and type TYPE
with operands
+   OP0 and OP1.  LOC is the location of the resulting expression.
+   ARG0 and ARG1 are the NOP_STRIPed results of OP0 and OP1.
+   Return the folded expression if folding is successful.  Otherwise,
+   return NULL_TREE.  */
+
+static tree
+fold_truth_andor (location_t loc, enum tree_code code, tree type,
+		  tree arg0, tree arg1, tree op0, tree op1)
+{
+  tree tem;
+
+  /* We only do these simplifications if we are optimizing.  */
+  if (!optimize)
+    return NULL_TREE;
+  /* If code is BIT_AND_EXPR or BIT_IOR_EXPR, type precision has to be one.  */
+  if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR)
+      && (!INTEGRAL_TYPE_P (type) || TYPE_PRECISION (type) != 1))
+    return NULL_TREE;
+
+  /* Check for things like (A || B) && (A || C).  We can convert this
+     to A || (B && C).  Note that either operator can be any of the four
+     truth and/or operations and the transformation will still be
+     valid.   Also note that we only care about order for the
+     ANDIF and ORIF operators.  If B contains side effects, this
+     might change the truth-value of A.  */
+  if (TREE_CODE (arg0) == TREE_CODE (arg1)
+      && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
+	  || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
+	  || TREE_CODE (arg0) == BIT_AND_EXPR
+	  || TREE_CODE (arg0) == BIT_IOR_EXPR
+	  || TREE_CODE (arg0) == TRUTH_AND_EXPR
+	  || TREE_CODE (arg0) == TRUTH_OR_EXPR)
+      && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
+    {
+      tree a00 = TREE_OPERAND (arg0, 0);
+      tree a01 = TREE_OPERAND (arg0, 1);
+      tree a10 = TREE_OPERAND (arg1, 0);
+      tree a11 = TREE_OPERAND (arg1, 1);
+      int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
+			  || TREE_CODE (arg0) == TRUTH_AND_EXPR
+			  || TREE_CODE (arg0) == BIT_AND_EXPR)
+			 && (code == TRUTH_AND_EXPR
+			     || code == TRUTH_OR_EXPR
+			     || code == BIT_IOR_EXPR));
+
+      if (operand_equal_p (a00, a10, 0))
+	return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
+			    fold_build2_loc (loc, code, type, a01, a11));
+      else if (commutative && operand_equal_p (a00, a11, 0))
+	return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
+			    fold_build2_loc (loc, code, type, a01, a10));
+      else if (commutative && operand_equal_p (a01, a10, 0))
+	return fold_build2_loc (loc, TREE_CODE (arg0), type, a01,
+			    fold_build2_loc (loc, code, type, a00, a11));
+
+      /* This case if tricky because we must either have commutative
+	 operators or else A10 must not have side-effects.  */
+
+      else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
+	       && operand_equal_p (a01, a11, 0))
+	return fold_build2_loc (loc, TREE_CODE (arg0), type,
+			    fold_build2_loc (loc, code, type, a00, a10),
+			    a01);
+    }
+
+  /* See if we can build a range comparison. */
+  if (0 != (tem = fold_range_test (loc, code, type, op0, op1)))
+    return tem;
+
+  if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg0) == TRUTH_ORIF_EXPR)
+      || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg0) == TRUTH_ANDIF_EXPR))
+    {
+      tem = merge_truthop_with_opposite_arm (loc, arg0, arg1, true);
+      if (tem)
+	return fold_build2_loc (loc, code, type, tem, arg1);
+    }
+
+  if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg1) == TRUTH_ORIF_EXPR)
+      || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg1) == TRUTH_ANDIF_EXPR))
+    {
+      tem = merge_truthop_with_opposite_arm (loc, arg1, arg0, false);
+      if (tem)
+	return fold_build2_loc (loc, code, type, arg0, tem);
+    }
+
+  /* Check for the possibility of merging component references.  If our
+     lhs is another similar operation, try to merge its rhs with our
+     rhs.  Then try to merge our lhs and rhs.  */
+  if (TREE_CODE (arg0) == code
+      && 0 != (tem = fold_truthop (loc, code, type,
+				   TREE_OPERAND (arg0, 1), arg1)))
+    return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
+
+  if ((tem = fold_truthop (loc, code, type, arg0, arg1)) != 0)
+    return tem;
+
+  return NULL_TREE;
+}

 /* Fold a binary expression of code CODE and type TYPE with operands
    OP0 and OP1.  LOC is the location of the resulting expression.
@@ -9424,21 +9544,42 @@  fold_binary_loc (location_t loc,

   if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
        || code == EQ_EXPR || code == NE_EXPR)
-      && ((truth_value_p (TREE_CODE (arg0))
-	   && (truth_value_p (TREE_CODE (arg1))
+      && (!INTEGRAL_TYPE_P (type) || TYPE_PRECISION (type) != 1)
+      && ((truth_value_type_p (TREE_CODE (arg0), TREE_TYPE (arg0))
+	   && (truth_value_type_p (TREE_CODE (arg1), TREE_TYPE (arg1))
 	       || (TREE_CODE (arg1) == BIT_AND_EXPR
 		   && integer_onep (TREE_OPERAND (arg1, 1)))))
-	  || (truth_value_p (TREE_CODE (arg1))
-	      && (truth_value_p (TREE_CODE (arg0))
+	  || (truth_value_type_p (TREE_CODE (arg1), TREE_TYPE (arg1))
+	      && (truth_value_type_p (TREE_CODE (arg0), TREE_TYPE (arg0))
 		  || (TREE_CODE (arg0) == BIT_AND_EXPR
 		      && integer_onep (TREE_OPERAND (arg0, 1)))))))
     {
       tem = fold_build2_loc (loc, code == BIT_AND_EXPR ? TRUTH_AND_EXPR
-			 : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
-			 : TRUTH_XOR_EXPR,
-			 boolean_type_node,
-			 fold_convert_loc (loc, boolean_type_node, arg0),
-			 fold_convert_loc (loc, boolean_type_node, arg1));
+			   : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
+			   : TRUTH_XOR_EXPR,
+			   boolean_type_node,
+			   fold_convert_loc (loc, boolean_type_node, arg0),
+			   fold_convert_loc (loc, boolean_type_node, arg1));
+
+      if (code == EQ_EXPR)
+	tem = invert_truthvalue_loc (loc, tem);
+
+      return fold_convert_loc (loc, type, tem);
+    }
+  if ((code == EQ_EXPR || code == NE_EXPR)
+      && ((truth_value_type_p (TREE_CODE (arg0), TREE_TYPE (arg0))
+	   && (truth_value_type_p (TREE_CODE (arg1), TREE_TYPE (arg1))
+	       || (TREE_CODE (arg1) == BIT_AND_EXPR
+		   && integer_onep (TREE_OPERAND (arg1, 1)))))
+	  || (truth_value_type_p (TREE_CODE (arg1), TREE_TYPE (arg1))
+	      && (truth_value_type_p (TREE_CODE (arg0), TREE_TYPE (arg0))
+		  || (TREE_CODE (arg0) == BIT_AND_EXPR
+		      && integer_onep (TREE_OPERAND (arg0, 1)))))))
+    {
+      tem = fold_build2_loc (loc, BIT_XOR_EXPR,
+			   boolean_type_node,
+			   fold_convert_loc (loc, boolean_type_node, arg0),
+			   fold_convert_loc (loc, boolean_type_node, arg1));

       if (code == EQ_EXPR)
 	tem = invert_truthvalue_loc (loc, tem);
@@ -10597,6 +10738,57 @@  fold_binary_loc (location_t loc,
       if (operand_equal_p (arg0, arg1, 0))
 	return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));

+      if (TYPE_PRECISION (type) == 1 && INTEGRAL_TYPE_P (type))
+        {
+	  /* If either arg is constant zero, drop it.  */
+	  if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
+	    return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
+	  if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1))
+	    return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
+	  /* If second arg is constant true, result is true, but we must
+	     evaluate first arg.  */
+	  if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
+	    return omit_one_operand_loc (loc, type, arg1, arg0);
+	  if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
+	    return omit_one_operand_loc (loc, type, arg0, arg1);
+
+	  /* !X | X is always true.  */
+	  if ((TREE_CODE (arg0) == TRUTH_NOT_EXPR
+	       || TREE_CODE (arg0) == BIT_NOT_EXPR)
+	      && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
+	    return omit_one_operand_loc (loc, type, integer_one_node, arg1);
+	  /* X | !X is always true.  */
+	  if ((TREE_CODE (arg1) == TRUTH_NOT_EXPR
+	      || TREE_CODE (arg1) == BIT_NOT_EXPR)
+	      && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
+	    return omit_one_operand_loc (loc, type, integer_one_node, arg0);
+
+	  /* (X & !Y) | (!X & Y) is X ^ Y */
+	  if (TREE_CODE (arg0) == BIT_AND_EXPR
+	      && TREE_CODE (arg1) == BIT_AND_EXPR)
+	    {
+	      tree a0, a1, l0, l1, n0, n1;
+
+	      a0 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
+	      a1 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
+
+	      l0 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
+	      l1 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
+
+	      n0 = fold_build1_loc (loc, TRUTH_NOT_EXPR, type, l0);
+	      n1 = fold_build1_loc (loc, TRUTH_NOT_EXPR, type, l1);
+
+	      if ((operand_equal_p (n0, a0, 0)
+		   && operand_equal_p (n1, a1, 0))
+		  || (operand_equal_p (n0, a1, 0)
+		      && operand_equal_p (n1, a0, 0)))
+		return fold_build2_loc (loc, BIT_XOR_EXPR, type, l0, n1);
+	    }
+	  tem = fold_truth_andor (loc, code, type, arg0, arg1, op0, op1);
+	  if (tem)
+	    return tem;
+        }
+
       /* ~X | X is -1.  */
       if (TREE_CODE (arg0) == BIT_NOT_EXPR
 	  && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
@@ -10758,6 +10950,24 @@  fold_binary_loc (location_t loc,
       if (operand_equal_p (arg0, arg1, 0))
 	return omit_one_operand_loc (loc, type, integer_zero_node, arg0);

+      if (TYPE_PRECISION (type) == 1 && INTEGRAL_TYPE_P (type))
+        {
+	  /* If the second arg is constant true, this is a logical inversion.  */
+	  if (integer_onep (arg1))
+	    {
+	      tem = invert_truthvalue_loc (loc, arg0);
+	      return non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
+	    }
+	  /* !X ^ X is always true.  */
+	  if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
+	      && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
+	    return omit_one_operand_loc (loc, type, integer_one_node, arg1);
+	  /* X ^ !X is always true.  */
+	  if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
+	      && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
+	    return omit_one_operand_loc (loc, type, integer_one_node, arg0);
+	}
+
       /* ~X ^ X is -1.  */
       if (TREE_CODE (arg0) == BIT_NOT_EXPR
 	  && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
@@ -10918,6 +11128,61 @@  fold_binary_loc (location_t loc,
 	return omit_one_operand_loc (loc, type, arg1, arg0);
       if (operand_equal_p (arg0, arg1, 0))
 	return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
+      /* Note that the operands of this must be ints
+	 and their values must be 0 or 1.
+	 ("true" is a fixed value perhaps depending on the language.)  */
+      /* If first arg is constant zero, return it.  */
+      if (integer_zerop (arg0))
+	return fold_convert_loc (loc, type, arg0);
+
+      if (TYPE_PRECISION (type) == 1 && INTEGRAL_TYPE_P (type))
+        {
+	  /* If either arg is constant true, drop it.  */
+	  if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
+	    return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
+	  if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1)
+	      /* Preserve sequence points.  */
+	      && (code != TRUTH_ANDIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
+	    return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
+	  /* If second arg is constant zero, result is zero, but first arg
+	     must be evaluated.  */
+	  if (integer_zerop (arg1))
+	    return omit_one_operand_loc (loc, type, arg1, arg0);
+	  /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
+	     case will be handled here.  */
+	  if (integer_zerop (arg0))
+	    return omit_one_operand_loc (loc, type, arg0, arg1);
+
+	  /* !X && X is always false.  */
+	  if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
+	      && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
+	    return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
+	  /* X & !X is always false.  */
+	  if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
+	      && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
+	    return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
+
+	  /* A < X & A + 1 > Y ==> A < X & A >= Y.  Normally A + 1 > Y
+	     means A >= Y & A != MAX, but in this case we know that
+	     A < X <= MAX.  */
+
+	  if (!TREE_SIDE_EFFECTS (arg0)
+	      && !TREE_SIDE_EFFECTS (arg1))
+	    {
+	      tem = fold_to_nonsharp_ineq_using_bound (loc, arg0, arg1);
+	      if (tem && !operand_equal_p (tem, arg0, 0))
+		return fold_build2_loc (loc, code, type, tem, arg1);
+
+	      tem = fold_to_nonsharp_ineq_using_bound (loc, arg1, arg0);
+	      if (tem && !operand_equal_p (tem, arg1, 0))
+		return fold_build2_loc (loc, code, type, arg0, tem);
+	    }
+
+	  tem = fold_truth_andor (loc, code, type, arg0, arg1, op0, op1);
+	  if (tem)
+	    return tem;
+
+	}

       /* ~X & X, (X == 0) & X, and !X & X are always zero.  */
       if ((TREE_CODE (arg0) == BIT_NOT_EXPR
@@ -12006,86 +12271,11 @@  fold_binary_loc (location_t loc,
 	    return fold_build2_loc (loc, code, type, arg0, tem);
 	}

-    truth_andor:
-      /* We only do these simplifications if we are optimizing.  */
-      if (!optimize)
-	return NULL_TREE;
-
-      /* Check for things like (A || B) && (A || C).  We can convert this
-	 to A || (B && C).  Note that either operator can be any of the four
-	 truth and/or operations and the transformation will still be
-	 valid.   Also note that we only care about order for the
-	 ANDIF and ORIF operators.  If B contains side effects, this
-	 might change the truth-value of A.  */
-      if (TREE_CODE (arg0) == TREE_CODE (arg1)
-	  && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
-	      || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
-	      || TREE_CODE (arg0) == TRUTH_AND_EXPR
-	      || TREE_CODE (arg0) == TRUTH_OR_EXPR)
-	  && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
-	{
-	  tree a00 = TREE_OPERAND (arg0, 0);
-	  tree a01 = TREE_OPERAND (arg0, 1);
-	  tree a10 = TREE_OPERAND (arg1, 0);
-	  tree a11 = TREE_OPERAND (arg1, 1);
-	  int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
-			      || TREE_CODE (arg0) == TRUTH_AND_EXPR)
-			     && (code == TRUTH_AND_EXPR
-				 || code == TRUTH_OR_EXPR));
-
-	  if (operand_equal_p (a00, a10, 0))
-	    return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
-				fold_build2_loc (loc, code, type, a01, a11));
-	  else if (commutative && operand_equal_p (a00, a11, 0))
-	    return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
-				fold_build2_loc (loc, code, type, a01, a10));
-	  else if (commutative && operand_equal_p (a01, a10, 0))
-	    return fold_build2_loc (loc, TREE_CODE (arg0), type, a01,
-				fold_build2_loc (loc, code, type, a00, a11));
-
-	  /* This case if tricky because we must either have commutative
-	     operators or else A10 must not have side-effects.  */
-
-	  else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
-		   && operand_equal_p (a01, a11, 0))
-	    return fold_build2_loc (loc, TREE_CODE (arg0), type,
-				fold_build2_loc (loc, code, type, a00, a10),
-				a01);
-	}
-
-      /* See if we can build a range comparison.  */
-      if (0 != (tem = fold_range_test (loc, code, type, op0, op1)))
-	return tem;
-
-      if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg0) == TRUTH_ORIF_EXPR)
-	  || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg0) == TRUTH_ANDIF_EXPR))
-	{
-	  tem = merge_truthop_with_opposite_arm (loc, arg0, arg1, true);
-	  if (tem)
-	    return fold_build2_loc (loc, code, type, tem, arg1);
-	}
-
-      if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg1) == TRUTH_ORIF_EXPR)
-	  || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg1) == TRUTH_ANDIF_EXPR))
-	{
-	  tem = merge_truthop_with_opposite_arm (loc, arg1, arg0, false);
-	  if (tem)
-	    return fold_build2_loc (loc, code, type, arg0, tem);
-	}
-
-      /* Check for the possibility of merging component references.  If our
-	 lhs is another similar operation, try to merge its rhs with our
-	 rhs.  Then try to merge our lhs and rhs.  */
-      if (TREE_CODE (arg0) == code
-	  && 0 != (tem = fold_truthop (loc, code, type,
-				       TREE_OPERAND (arg0, 1), arg1)))
-	return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
-
-      if ((tem = fold_truthop (loc, code, type, arg0, arg1)) != 0)
-	return tem;
+      tem = fold_truth_andor (loc, code, type, arg0, arg1, op0, op1);
+      if (tem)
+        return tem;

       return NULL_TREE;
-
     case TRUTH_ORIF_EXPR:
       /* Note that the operands of this must be ints
 	 and their values must be 0 or true.
@@ -12140,7 +12330,11 @@  fold_binary_loc (location_t loc,
 		  && operand_equal_p (n1, a0, 0)))
 	    return fold_build2_loc (loc, TRUTH_XOR_EXPR, type, l0, n1);
 	}
-      goto truth_andor;
+      tem = fold_truth_andor (loc, code, type, arg0, arg1, op0, op1);
+      if (tem)
+        return tem;
+
+      return NULL_TREE;

     case TRUTH_XOR_EXPR:
       /* If the second arg is constant zero, drop it.  */
@@ -13401,7 +13595,7 @@  fold_ternary_loc (location_t loc, enum t

       /* If the second operand is simpler than the third, swap them
 	 since that produces better jump optimization results.  */
-      if (truth_value_p (TREE_CODE (arg0))
+      if (truth_value_type_p (TREE_CODE (arg0), TREE_TYPE (arg0))
 	  && tree_swap_operands_p (op1, op2, false))
 	{
 	  location_t loc0 = expr_location_or (arg0, loc);
@@ -13427,7 +13621,7 @@  fold_ternary_loc (location_t loc, enum t
 	 over COND_EXPR in cases such as floating point comparisons.  */
       if (integer_zerop (op1)
 	  && integer_onep (op2)
-	  && truth_value_p (TREE_CODE (arg0)))
+	  && truth_value_type_p (TREE_CODE (arg0), TREE_TYPE (arg0)))
 	return pedantic_non_lvalue_loc (loc,
 				    fold_convert_loc (loc, type,
 					      invert_truthvalue_loc (loc,
Index: gcc-head/gcc/gimple.c
===================================================================
--- gcc-head.orig/gcc/gimple.c
+++ gcc-head/gcc/gimple.c
@@ -3160,7 +3160,8 @@  canonicalize_cond_expr_cond (tree t)
 {
   /* Strip conversions around boolean operations.  */
   if (CONVERT_EXPR_P (t)
-      && truth_value_p (TREE_CODE (TREE_OPERAND (t, 0))))
+      && truth_value_type_p (TREE_CODE (TREE_OPERAND (t, 0)),
+			     TREE_TYPE (TREE_OPERAND (t, 0))))
     t = TREE_OPERAND (t, 0);

   /* For !x use x == 0.  */
Index: gcc-head/gcc/gimplify.c
===================================================================
--- gcc-head.orig/gcc/gimplify.c
+++ gcc-head/gcc/gimplify.c
@@ -2837,7 +2837,7 @@  gimple_boolify (tree expr)
 	      if (TREE_CODE (arg) == NOP_EXPR
 		  && TREE_TYPE (arg) == TREE_TYPE (call))
 		arg = TREE_OPERAND (arg, 0);
-	      if (truth_value_p (TREE_CODE (arg)))
+	      if (truth_value_type_p (TREE_CODE (arg), TREE_TYPE (arg)))
 		{
 		  arg = gimple_boolify (arg);
 		  CALL_EXPR_ARG (call, 0)
Index: gcc-head/gcc/tree-ssa-structalias.c
===================================================================
--- gcc-head.orig/gcc/tree-ssa-structalias.c
+++ gcc-head/gcc/tree-ssa-structalias.c
@@ -4416,7 +4416,8 @@  find_func_aliases (gimple origt)
 			 && !POINTER_TYPE_P (TREE_TYPE (rhsop))))
 		   || gimple_assign_single_p (t))
 	    get_constraint_for_rhs (rhsop, &rhsc);
-	  else if (truth_value_p (code))
+	  else if (truth_value_type_p (code,
+				       TREE_TYPE (lhsop)))
 	    /* Truth value results are not pointer (parts).  Or at least
 	       very very unreasonable obfuscation of a part.  */
 	    ;
Index: gcc-head/gcc/tree.h
===================================================================
--- gcc-head.orig/gcc/tree.h
+++ gcc-head/gcc/tree.h
@@ -5309,13 +5309,22 @@  extern tree combine_comparisons (locatio
 extern void debug_fold_checksum (const_tree);

 /* Return nonzero if CODE is a tree code that represents a truth value.  */
+#define truth_value_p(CODE)  truth_value_type_p ((CODE), NULL_TREE)
+
+/* Return nonzero if CODE is a tree code that represents a truth value.
+   If TYPE is an integral type, unsigned, and has precision of one, then
+   additionally return for bitwise-binary and bitwise-invert nonzero.  */
 static inline bool
-truth_value_p (enum tree_code code)
+truth_value_type_p (enum tree_code code, tree type)
 {
   return (TREE_CODE_CLASS (code) == tcc_comparison
 	  || code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR
 	  || code == TRUTH_OR_EXPR || code == TRUTH_ORIF_EXPR
-	  || code == TRUTH_XOR_EXPR || code == TRUTH_NOT_EXPR);
+	  || code == TRUTH_XOR_EXPR || code == TRUTH_NOT_EXPR
+	  || ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
+	       || code == BIT_XOR_EXPR || code == BIT_NOT_EXPR)
+	      && type && INTEGRAL_TYPE_P (type)
+	      && TYPE_PRECISION (type) == 1 && TYPE_UNSIGNED (type)));
 }


Index: gcc-head/gcc/tree-ssa-forwprop.c
===================================================================
--- gcc-head.orig/gcc/tree-ssa-forwprop.c
+++ gcc-head/gcc/tree-ssa-forwprop.c
@@ -1668,7 +1668,7 @@  truth_valued_ssa_name (tree name)
     return true;
   def = SSA_NAME_DEF_STMT (name);
   if (is_gimple_assign (def))
-    return truth_value_p (gimple_assign_rhs_code (def));
+    return truth_value_type_p (gimple_assign_rhs_code (def), type);
   return false;
 }