diff mbox

[tree-optimization] : 1 of 2: Add normalization of bitwise-operations to tree-ssa-reassoc pass

Message ID CAEwic4YjJ-FHjthHL9F9JGVyAwoJj2HQt_pFkPdXQQuyfQ+rfA@mail.gmail.com
State New
Headers show

Commit Message

Kai Tietz Oct. 4, 2011, 12:16 p.m. UTC
Hello,

This patch (one of two) adds to tree-ssa-reassociation code for
expansion of packed
bitwise-binary operations - like (X | Y) == 0, etc.
Also it normalizes bitwise-not operations on bitwise-binary tree
chains - like ~(X | Y) -> ~X & ~Y.

ChangeLog

2011-10-04  Kai Tietz  <ktietz@redhat.com>

	* tree-ssa-reassoc.c (gimple build_and_add_sum): Add forwader
	and add support for unary expressions.
	(remove_stmt_chain): New function for removing statement-chain
	recursively without checking for visited state.
	(make_new_tmp_statement): New helper function to generate
	temporary register statement.
	(expand_not_bitwise_binary): Helper for applying bitwise-not
	on a tree-chain.
	(expand_cmp_ior): Helper for expand packed bitwise-binary
	combined statement.
	(break_up_bitwise_combined_stmt): New function.
	(break_up_subtract_bb): Rename to
	(break_up_expr_bb): this.
	(do_reassoc): Call break_up_expr_bb instead of break_up_subtract_bb.


ChangeLog

2011-10-04  Kai Tietz  <ktietz@redhat.com>


	* gcc.dg/tree-ssa/reassoc-30.c: New file.
	* gcc.dg/tree-ssa/reassoc-31.c: New file.
	* gcc.dg/tree-ssa/reassoc-26.c: New file.
	* gcc.dg/tree-ssa/reassoc-27.c: New file.
	* gcc.dg/tree-ssa/reassoc-28.c: New file.
	* gcc.dg/tree-ssa/reassoc-29.c: New file.

Bootstrapped and regression tested for all languages (including Ada
and Obj-C++) on host x86_64-pc-linux-gnu.
Ok for apply?

Regards,
Kai
diff mbox

Patch

Index: gcc/gcc/tree-ssa-reassoc.c
===================================================================
--- gcc.orig/gcc/tree-ssa-reassoc.c
+++ gcc/gcc/tree-ssa-reassoc.c
@@ -44,6 +44,10 @@  along with GCC; see the file COPYING3.
 #include "params.h"
 #include "diagnostic-core.h"

+/* Forwarders.  */
+static gimple build_and_add_sum (tree, tree, tree, enum tree_code);
+static void remove_stmt_chain (tree);
+
 /*  This is a simple global reassociation pass.  It is, in part, based
     on the LLVM pass of the same name (They do some things more/less
     than we do, in different orders, etc).
@@ -51,7 +55,9 @@  along with GCC; see the file COPYING3.
     It consists of five steps:

     1. Breaking up subtract operations into addition + negate, where
-    it would promote the reassociation of adds.
+    it would promote the reassociation of adds.  Additionally breaking
+    up combined expression made out of boolean-typed bitwise expressions
+    for improving simplification.

     2. Left linearization of the expression trees, so that (A+B)+(C+D)
     becomes (((A+B)+C)+D), which is easier for us to rewrite later.
@@ -557,6 +563,412 @@  get_unary_op (tree name, enum tree_code
   return NULL_TREE;
 }

+/* Create a temporary register expression with type TYPE, tree code CODE, and
+   operands OP1 and OP2.  If REF_DEF is a valid gimple statement, we use its
+   location information for new generated temporary.
+   Function returns left-hand-side of new generated temporary register.  */
+
+static tree
+make_new_tmp_statement (tree type, enum tree_code code, tree op1, tree op2,
+			gimple ref_def)
+{
+  gimple sum;
+  tree tmpvar = create_tmp_reg (type, NULL);
+  add_referenced_var (tmpvar);
+  sum = build_and_add_sum (tmpvar, op1, op2, code);
+  if (ref_def)
+    gimple_set_location (sum, gimple_location (ref_def));
+  return gimple_get_lhs (sum);
+}
+
+/* Perform on tree LHS with optional definition statement EXPR
+   the logic-not operation.  TYPE is of kind boolean.  */
+
+static tree
+expand_not_bitwise_binary (tree type, tree lhs, gimple expr)
+{
+  enum tree_code code = ERROR_MARK;
+  tree op1 = NULL, op2 = NULL;
+  gimple s1 = NULL, s2 = NULL;
+
+  if (TREE_CODE (lhs) == INTEGER_CST)
+    return fold_build1 (BIT_NOT_EXPR, type, lhs);
+
+  if (expr && is_gimple_assign (expr))
+    code = gimple_assign_rhs_code (expr);
+
+  /* If statement lhs isn't a single-use statement,
+     we don't want to modify it. So we can do only default-case
+     operation for it.  */
+  if (code != ERROR_MARK && !has_single_use (lhs))
+    code = ERROR_MARK;
+
+  if (TREE_CODE_CLASS (code) == tcc_comparison
+      || code == BIT_XOR_EXPR
+      || code == BIT_AND_EXPR
+      || code == BIT_IOR_EXPR)
+    {
+      op1 = gimple_assign_rhs1 (expr);
+      op2 = gimple_assign_rhs2 (expr);
+    }
+  else if (code == BIT_NOT_EXPR)
+    op1 = gimple_assign_rhs1 (expr);
+  else
+    return make_new_tmp_statement (TREE_TYPE (lhs), BIT_NOT_EXPR, lhs,
+				   NULL_TREE, expr);
+
+  /* ~(~X) -> X.  */
+  if (code == BIT_NOT_EXPR)
+    return op1;
+
+  /* Invert comparison if possible, otherwise fall through to
+     default case.  */
+  else if (TREE_CODE_CLASS (code) == tcc_comparison)
+    {
+      enum tree_code ncode;
+      tree op1type = TREE_TYPE (op1);
+
+      ncode = invert_tree_comparison (code,
+				      HONOR_NANS (TYPE_MODE (op1type)));
+      if (ncode != ERROR_MARK)
+	return make_new_tmp_statement (type, ncode, op1, op2, expr);
+    }
+  /* ~(A & B) -> ~A | ~B.  */
+  else if (code == BIT_AND_EXPR)
+    {
+      if (TREE_CODE (op1) != SSA_NAME
+         || !(s1 = SSA_NAME_DEF_STMT (op1))
+         || !is_gimple_assign (s1)
+         || !has_single_use (op1))
+	s1 = NULL;
+      if (TREE_CODE (op2) != SSA_NAME
+         || !(s2 = SSA_NAME_DEF_STMT (op2))
+         || !is_gimple_assign (s2)
+         || !has_single_use (op2))
+	s2 = NULL;
+
+      /* We have to do inversion in one step to avoid looping.  */
+      op1 = expand_not_bitwise_binary (type, op1, s1);
+      op2 = expand_not_bitwise_binary (type, op2, s2);
+      if (TREE_CODE (op2) == INTEGER_CST && integer_zerop (op2))
+        {
+	  remove_stmt_chain (op2);
+	  return op1;
+	}
+      else if (TREE_CODE (op2) == INTEGER_CST && integer_all_onesp (op2))
+        {
+	  remove_stmt_chain (op1);
+	  return op2;
+	}
+      return make_new_tmp_statement (type, BIT_IOR_EXPR, op1, op2, expr);
+    }
+  /* ~(A | B) -> ~A & ~B.  */
+  else if (code == BIT_IOR_EXPR)
+    {
+      if (TREE_CODE (op1) != SSA_NAME
+         || !(s1 = SSA_NAME_DEF_STMT (op1))
+         || !is_gimple_assign (s1)
+         || !has_single_use (op1))
+	s1 = NULL;
+      if (TREE_CODE (op2) != SSA_NAME
+         || !(s2 = SSA_NAME_DEF_STMT (op2))
+         || !is_gimple_assign (s2)
+         || !has_single_use (op2))
+	s2 = NULL;
+      op1 = expand_not_bitwise_binary (type, op1, s1);
+      op2 = expand_not_bitwise_binary (type, op2, s2);
+      if (TREE_CODE (op2) == INTEGER_CST && integer_zerop (op2))
+        {
+	  remove_stmt_chain (op1);
+	  return op2;
+	}
+      else if (TREE_CODE (op2) == INTEGER_CST && integer_all_onesp (op2))
+        {
+	  remove_stmt_chain (op2);
+	  return op1;
+	}
+      return make_new_tmp_statement (type, BIT_AND_EXPR, op1, op2, expr);
+    }
+  /* ~(A ^ B) -> ~A ^ B.  Handle here special cases for B being
+     an integer constant, or being a logical-not.  */
+  else if (code == BIT_XOR_EXPR)
+    {
+      if (TREE_CODE (op1) != SSA_NAME
+         || !(s1 = SSA_NAME_DEF_STMT (op1))
+         || !is_gimple_assign (s1)
+         || !has_single_use (op1))
+	s1 = NULL;
+      if (TREE_CODE (op2) != SSA_NAME
+         || !(s2 = SSA_NAME_DEF_STMT (op2))
+         || !is_gimple_assign (s2)
+         || !has_single_use (op2))
+	s2 = NULL;
+      if (TREE_CODE (op2) != INTEGER_CST
+	  && (!s2 || gimple_assign_rhs_code (s2) != BIT_NOT_EXPR))
+        op1 = expand_not_bitwise_binary (type, op1, s1);
+      else
+        op2 = expand_not_bitwise_binary (type, op2, s2);
+
+      if (TREE_CODE (op2) == INTEGER_CST && integer_zerop (op2))
+        {
+	  remove_stmt_chain (op2);
+	  return op1;
+	}
+
+      if (TREE_CODE (op2) == INTEGER_CST && integer_all_onesp (op2))
+        {
+	  remove_stmt_chain (op2);
+	  return make_new_tmp_statement (type, BIT_NOT_EXPR, op1, NULL, expr);
+	}
+      return make_new_tmp_statement (type, BIT_XOR_EXPR, op1, op2, expr);
+    }
+
+  gcc_assert (types_compatible_p (type, TREE_TYPE (lhs)));
+
+  /* Default case lhs -> ~lhs  */
+  return make_new_tmp_statement (TREE_TYPE (lhs), BIT_NOT_EXPR, lhs,
+				 NULL_TREE, expr);
+}
+
+/* Routine to expand (X | Y) ==/!= 0 and doing
+   simplification on (X cmp Y) ==/!= 0.
+    - (X | Y) == 0 to (X == 0) & (Y == 0)
+    - (X | Y) != 0 to (X != 0) | (Y != 0).
+    - (X cmp Y) == 0 to (X cmp' Y) with cmp'=invert of cmp.
+    - (X cmp Y) != 0 to (X cmp Y).
+
+   The argument CODE can be either NE_EXPR, or EQ_EXPR.  It indicates
+   what kind of expansion is performed.  */
+
+static tree
+expand_cmp_ior (tree op, tree type, enum tree_code code)
+{
+  tree op1, op2;
+  gimple s = NULL;
+  enum tree_code hcode;
+
+  if (TREE_CODE (op) == INTEGER_CST)
+    {
+      if (code == EQ_EXPR)
+        return fold_convert (type, (integer_zerop (op) ? integer_one_node
+        						: integer_zero_node));
+      return fold_convert (type, (!integer_zerop (op) ? integer_one_node
+						      : integer_zero_node));
+    }
+  if (TREE_CODE (op) != SSA_NAME
+      || !(s = SSA_NAME_DEF_STMT (op))
+      || !is_gimple_assign (s)
+      || !has_single_use (op))
+    return make_new_tmp_statement (type, code, op,
+				   build_zero_cst (TREE_TYPE (op)), s);
+  hcode = gimple_assign_rhs_code (s);
+
+  if (TREE_CODE_CLASS (hcode) != tcc_comparison
+      && hcode != BIT_IOR_EXPR)
+    return make_new_tmp_statement (type, code, op,
+				   build_zero_cst (TREE_TYPE (op)), s);
+
+  op1 = gimple_assign_rhs1 (s);
+  op2 = gimple_assign_rhs2 (s);
+
+  if (TREE_CODE_CLASS (hcode) == tcc_comparison)
+    {
+      tree op1type = TREE_TYPE (op1);
+
+      if (code == EQ_EXPR)
+        {
+	  enum tree_code ncode;
+	  ncode = invert_tree_comparison (hcode,
+					  HONOR_NANS (TYPE_MODE (op1type)));
+	  if (ncode != ERROR_MARK)
+	    return make_new_tmp_statement (type, ncode, op1, op2, s);
+        }
+      else
+        return make_new_tmp_statement (type, hcode, op1, op2, s);
+    }
+  if (hcode == BIT_IOR_EXPR)
+    {
+      op1 = expand_cmp_ior (op1, type, code);
+      op2 = expand_cmp_ior (op2, type, code);
+      return make_new_tmp_statement (type, (code == EQ_EXPR ? BIT_AND_EXPR
+      							    : BIT_IOR_EXPR),
+				     op1, op2, s);
+    }
+  return make_new_tmp_statement (type, code, op,
+				 build_zero_cst (TREE_TYPE (op)), s);
+}
+
+/* Break up STMT if it is a combined statement made out of
+   bitwise operations.  Handle expansion of (A | B) !=/== 0, ~(A op B),
+   and transform X ==/!= ~0 to ~X ==/!= 0.  */
+
+static bool
+break_up_bitwise_combined_stmt (gimple stmt)
+{
+  tree op1, op2, old_op1, old_op2;
+  gimple op1_def, op2_def;
+  enum tree_code code = gimple_assign_rhs_code (stmt);
+  tree type = TREE_TYPE (gimple_assign_lhs (stmt));
+  gimple_stmt_iterator gsi;
+
+  op1 = gimple_assign_rhs1 (stmt);
+  old_op1 = op1;
+  old_op2 = op2 = NULL_TREE;
+
+  if (code == EQ_EXPR || code == NE_EXPR)
+    old_op2 = op2 = gimple_assign_rhs2 (stmt);
+
+  /* Check that left-hand operand is a gimple-assignment
+     and has single use.  */
+  if ((code != BIT_NOT_EXPR
+       && code != EQ_EXPR && code != NE_EXPR)
+      || TREE_CODE (op1) != SSA_NAME)
+    return false;
+
+  /* Transform X !=/== ~0 -> ~X !=/== 0.  */
+  if ((code == EQ_EXPR || code == NE_EXPR)
+      && INTEGRAL_TYPE_P (TREE_TYPE (op1))
+      && TREE_CODE (op2) == INTEGER_CST
+      && integer_all_onesp (op2))
+    {
+      op1_def = SSA_NAME_DEF_STMT (op1);
+      if (!op1_def
+	  || !is_gimple_assign (op1_def)
+	  || !has_single_use (op1))
+	op1_def = NULL;
+      op1 = expand_not_bitwise_binary (TREE_TYPE (op1), op1, op1_def);
+      op2 = build_zero_cst (TREE_TYPE (op1));
+
+      gsi = gsi_for_stmt (stmt);
+      gimple_assign_set_rhs_with_ops (&gsi, code, op1, op2);
+      update_stmt (gsi_stmt (gsi));
+      remove_stmt_chain (old_op1);
+      remove_stmt_chain (old_op2);
+      return true;
+    }
+
+  /* If left-hand operand isn't a gimple-assign, or has more then
+  one use, we can do anything.  */
+  op1_def = SSA_NAME_DEF_STMT (op1);
+  if (!op1_def
+      || !is_gimple_assign (op1_def)
+      || !has_single_use (op1))
+    return false;
+
+  if (code == NE_EXPR || code == EQ_EXPR)
+    {
+      tree inner_op1, inner_op2;
+
+      /* Check that operands have integral type and
+         see if it has as second argument a constant zero valued
+         operand.  */
+      if (!INTEGRAL_TYPE_P (TREE_TYPE (op1))
+          || TREE_CODE (op2) != INTEGER_CST
+	  || !integer_zerop (op2))
+	return false;
+
+      /* Check for pattern X | Y == 0 and X | Y != 0  */
+      if (gimple_assign_rhs_code (op1_def) != BIT_IOR_EXPR)
+	return false;
+
+      inner_op1 = gimple_assign_rhs1 (op1_def);
+      inner_op2 = gimple_assign_rhs2 (op1_def);
+
+      /* Expand (X | Y) == 0 -> (X == 0) & (Y == 0)
+         and (X | Y) != 0 -> (X != 0) | (Y != 0)  */
+
+      op1 = expand_cmp_ior (inner_op1, type, code);
+      op2 = expand_cmp_ior (inner_op2, type, code);
+
+      gsi = gsi_for_stmt (stmt);
+      gimple_assign_set_rhs_with_ops (&gsi, (code == NE_EXPR ? BIT_IOR_EXPR
+							     : BIT_AND_EXPR),
+				      op1, op2);
+      update_stmt (gsi_stmt (gsi));
+      remove_stmt_chain (old_op1);
+      remove_stmt_chain (old_op2);
+      return true;
+    }
+  /* Handle expansion for expansion of ~X.  */
+  else if (code == BIT_NOT_EXPR)
+    {
+      enum tree_code inner_code = gimple_assign_rhs_code (op1_def);
+
+      if (inner_code == BIT_NOT_EXPR)
+        {
+	  op1 = gimple_assign_rhs1 (op1_def);
+	  gsi = gsi_for_stmt (stmt);
+	  gimple_assign_set_rhs_from_tree (&gsi, op1);
+	  update_stmt (gsi_stmt (gsi));
+	  remove_stmt_chain (old_op1);
+	  return true;
+	}
+      else if (TREE_CODE_CLASS (inner_code) == tcc_comparison)
+        {
+	  enum tree_code ncode;
+	  tree op1type;
+
+	  op1 = gimple_assign_rhs1 (op1_def);
+	  op2 = gimple_assign_rhs2 (op1_def);
+	  op1type = TREE_TYPE (op1);
+	  ncode = invert_tree_comparison (inner_code,
+					  HONOR_NANS (TYPE_MODE (op1type)));
+	  if (ncode == ERROR_MARK)
+	    return false;
+	  gsi = gsi_for_stmt (stmt);
+	  gimple_assign_set_rhs_with_ops (&gsi, ncode, op1, op2);
+	  update_stmt (gsi_stmt (gsi));
+	  remove_stmt_chain (old_op1);
+	  return true;
+	}
+      if (inner_code != BIT_AND_EXPR && inner_code != BIT_IOR_EXPR
+          && inner_code != BIT_XOR_EXPR)
+	return false;
+
+      op1 = gimple_assign_rhs1 (op1_def);
+      op2 = gimple_assign_rhs2 (op1_def);
+
+      op1_def = op2_def = NULL;
+
+      if (TREE_CODE (op1) != SSA_NAME
+	|| (op1_def = SSA_NAME_DEF_STMT (op1)) == NULL
+	|| !is_gimple_assign (op1_def))
+	op1_def = NULL;
+
+      if (TREE_CODE (op2) != SSA_NAME
+	|| (op2_def = SSA_NAME_DEF_STMT (op2)) == NULL
+	|| !is_gimple_assign (op2_def))
+	op2_def = NULL;
+      /* Transform as best representation either from
+         ~(X ^ Y) to ~X ^ Y, or from ~(X ^ Y) toX ^ Y.  */
+      if (inner_code == BIT_XOR_EXPR)
+	{
+          if (TREE_CODE (op2) != INTEGER_CST
+	      && (!op2_def || !is_gimple_assign (op2_def)
+	          || !has_single_use (op2)
+	          || gimple_assign_rhs_code (op2_def) != BIT_NOT_EXPR))
+            op1 = expand_not_bitwise_binary (type, op1, op1_def);
+          else
+            op2 = expand_not_bitwise_binary (type, op2, op2_def);
+	}
+      /* Transform ~(X | Y) -> ~X & Y, or ~(X & Y) -> ~X | ~Y.  */
+      else
+	{
+          op1 = expand_not_bitwise_binary (type, op1, op1_def);
+          op2 = expand_not_bitwise_binary (type, op2, op2_def);
+	  inner_code = (inner_code == BIT_AND_EXPR ? BIT_IOR_EXPR
+						   : BIT_AND_EXPR);
+	}
+      gsi = gsi_for_stmt (stmt);
+      gimple_assign_set_rhs_with_ops (&gsi, inner_code, op1, op2);
+      update_stmt (gsi_stmt (gsi));
+      remove_stmt_chain (old_op1);
+      return true;
+    }
+
+  return false;
+}
+
 /* If CURR and LAST are a pair of ops that OPCODE allows us to
    eliminate through equivalences, do so, remove them from OPS, and
    return true.  Otherwise, return false.  */
@@ -1018,8 +1430,8 @@  zero_one_operation (tree *def, enum tree
   while (1);
 }

-/* Builds one statement performing OP1 OPCODE OP2 using TMPVAR for
-   the result.  Places the statement after the definition of either
+/* Builds one statement performing OP1 OPCODE OP2, OPCODE op1 using TMPVAR
+   for the result.  Places the statement after the definition of either
    OP1 or OP2.  Returns the new statement.  */

 static gimple
@@ -1038,7 +1450,7 @@  build_and_add_sum (tree tmpvar, tree op1
   /* Find an insertion place and insert.  */
   if (TREE_CODE (op1) == SSA_NAME)
     op1def = SSA_NAME_DEF_STMT (op1);
-  if (TREE_CODE (op2) == SSA_NAME)
+  if (op2 && TREE_CODE (op2) == SSA_NAME)
     op2def = SSA_NAME_DEF_STMT (op2);
   if ((!op1def || gimple_nop_p (op1def))
       && (!op2def || gimple_nop_p (op2def)))
@@ -2073,6 +2485,49 @@  remove_visited_stmt_chain (tree var)
     }
 }

+/* Remove def stmt of VAR if VAR has zero uses and recurse
+   on rhs1, rhs2, and rhs3 operand if so.  */
+
+static void
+remove_stmt_chain (tree var)
+{
+  gimple stmt;
+  gimple_stmt_iterator gsi;
+  tree var2, var3;
+
+  while (1)
+    {
+      if (TREE_CODE (var) != SSA_NAME || !has_zero_uses (var))
+	return;
+      stmt = SSA_NAME_DEF_STMT (var);
+      if (!stmt || !is_gimple_assign (stmt))
+	return;
+      var = gimple_assign_rhs1 (stmt);
+      var2 = var3 = NULL_TREE;
+
+      switch (get_gimple_rhs_class (gimple_assign_rhs_code (stmt)))
+        {
+	case  GIMPLE_TERNARY_RHS:
+	  var3 = gimple_assign_rhs3 (stmt);
+	  /* Fall through.  */
+	case GIMPLE_BINARY_RHS:
+	  var2 = gimple_assign_rhs2 (stmt);
+	  break;
+	default:
+	  break;
+	}
+      gsi = gsi_for_stmt (stmt);
+      gsi_remove (&gsi, true);
+      release_defs (stmt);
+      /* Recurse on optional second and third operand,
+         if those arguments are of kind SSA_NAME.  */
+      if (var2 && TREE_CODE (var2) == SSA_NAME)
+        remove_stmt_chain (var2);
+      if (var3 && TREE_CODE (var3) == SSA_NAME)
+        remove_stmt_chain (var3);
+    }
+}
+
 /* This function checks three consequtive operands in
    passed operands vector OPS starting from OPINDEX and
    swaps two operands if it is profitable for binary operation
@@ -2774,29 +3871,52 @@  can_reassociate_p (tree op)
    we want to break up k = t - q, but we won't until we've transformed q
    = b - r, which won't be broken up until we transform b = c - d.

+   Break up comparison !=/== 0 operations of bitwise-or operations for
+   being able to optimize within combined conditions.
+   (A | B) != 0 -> (A != 0) || (B != 0)
+   (A | B) == 0 -> (A == 0) && (B != 0)
+
+   Break up logical-not expressions of bitwise boolean-typed and/or/xor
+   operations for being able to optimize wihin combined conditions.
+   ~(A | B) -> ~A | ~B
+   ~(A & B) -> ~A & ~B
+   ~(A ^ B) -> A ^ ~B (special case if B is a constant)
+
    En passant, clear the GIMPLE visited flag on every statement.  */

 static void
-break_up_subtract_bb (basic_block bb)
+break_up_expr_bb (basic_block bb)
 {
   gimple_stmt_iterator gsi;
   basic_block son;

-  for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
+  for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); )
     {
       gimple stmt = gsi_stmt (gsi);
       gimple_set_visited (stmt, false);
-
-      if (!is_gimple_assign (stmt)
-	  || !can_reassociate_p (gimple_assign_lhs (stmt)))
+      if (!is_gimple_assign (stmt))
+        {
+	  gsi_next (&gsi);
+	  continue;
+        }
+      if (break_up_bitwise_combined_stmt (stmt))
 	continue;

+      if (!can_reassociate_p (gimple_assign_lhs (stmt)))
+	{
+	  gsi_next (&gsi);
+	  continue;
+	}
+
       /* Look for simple gimple subtract operations.  */
       if (gimple_assign_rhs_code (stmt) == MINUS_EXPR)
 	{
 	  if (!can_reassociate_p (gimple_assign_rhs1 (stmt))
 	      || !can_reassociate_p (gimple_assign_rhs2 (stmt)))
-	    continue;
+	    {
+	      gsi_next (&gsi);
+	      continue;
+	    }

 	  /* Check for a subtract used only in an addition.  If this
 	     is the case, transform it into add of a negate for better
@@ -2808,11 +3928,12 @@  break_up_subtract_bb (basic_block bb)
       else if (gimple_assign_rhs_code (stmt) == NEGATE_EXPR
 	       && can_reassociate_p (gimple_assign_rhs1 (stmt)))
 	VEC_safe_push (tree, heap, plus_negates, gimple_assign_lhs (stmt));
+      gsi_next (&gsi);
     }
   for (son = first_dom_son (CDI_DOMINATORS, bb);
        son;
        son = next_dom_son (CDI_DOMINATORS, son))
-    break_up_subtract_bb (son);
+    break_up_expr_bb (son);
 }

 /* Reassociate expressions in basic block BB and its post-dominator as
@@ -2981,7 +4102,8 @@  debug_ops_vector (VEC (operand_entry_t,
 static void
 do_reassoc (void)
 {
-  break_up_subtract_bb (ENTRY_BLOCK_PTR);
+  break_up_expr_bb (ENTRY_BLOCK_PTR);
+
   reassociate_bb (EXIT_BLOCK_PTR);
 }

Index: gcc/gcc/testsuite/gcc.dg/tree-ssa/reassoc-30.c
===================================================================
--- /dev/null
+++ gcc/gcc/testsuite/gcc.dg/tree-ssa/reassoc-30.c
@@ -0,0 +1,12 @@ 
+/* { dg-do compile } */
+/* { dg-options "-O2 -fdump-tree-optimized -ffast-math" } */
+
+int foo (int a, int b, int c, int d)
+{
+  int r1 = a != 0 & c != 0 & b != 0;
+  int r2 = a == 0 | b != 0 | d == 0;
+  return (r1 != 0 & r2 == 0);
+}
+
+/* { dg-final { scan-tree-dump-times "return 0" 1 "optimized"} } */
+/* { dg-final { cleanup-tree-dump "optimized" } } */
Index: gcc/gcc/testsuite/gcc.dg/tree-ssa/reassoc-31.c
===================================================================
--- /dev/null
+++ gcc/gcc/testsuite/gcc.dg/tree-ssa/reassoc-31.c
@@ -0,0 +1,12 @@ 
+/* { dg-do compile } */
+/* { dg-options "-O2 -fdump-tree-optimized -ffast-math" } */
+
+int foo (int a, int b, int c, int d)
+{
+  int r1 = a != 0 & c != 0 & b != 0;
+  int r2 = a == 0 | b != 0 | d == 0;
+  return (r1 == 0 | r2 != 0);
+}
+
+/* { dg-final { scan-tree-dump-times "return 1" 1 "optimized"} } */
+/* { dg-final { cleanup-tree-dump "optimized" } } */
Index: gcc/gcc/testsuite/gcc.dg/tree-ssa/reassoc-26.c
===================================================================
--- /dev/null
+++ gcc/gcc/testsuite/gcc.dg/tree-ssa/reassoc-26.c
@@ -0,0 +1,12 @@ 
+/* { dg-do compile } */
+/* { dg-options "-O2 -fdump-tree-optimized -ffast-math" } */
+
+int foo (int a, int b, int c, int d)
+{
+  int r1 = (a | b | c) == 0;
+  int r2 = (a | d | c) != 0 | b == 0;
+  return (r1 == 0 | r2 != 0);
+}
+
+/* { dg-final { scan-tree-dump-times "return 1" 1 "optimized"} } */
+/* { dg-final { cleanup-tree-dump "optimized" } } */
Index: gcc/gcc/testsuite/gcc.dg/tree-ssa/reassoc-27.c
===================================================================
--- /dev/null
+++ gcc/gcc/testsuite/gcc.dg/tree-ssa/reassoc-27.c
@@ -0,0 +1,12 @@ 
+/* { dg-do compile } */
+/* { dg-options "-O2 -fdump-tree-optimized -ffast-math" } */
+
+int foo (int a, int b, int c, int d)
+{
+  int r1 = a & ~c & b;
+  int r2 = ~a | b | ~d;
+  return (r1 & ~r2);
+}
+
+/* { dg-final { scan-tree-dump-times "return 0" 1 "optimized"} } */
+/* { dg-final { cleanup-tree-dump "optimized" } } */
Index: gcc/gcc/testsuite/gcc.dg/tree-ssa/reassoc-28.c
===================================================================
--- /dev/null
+++ gcc/gcc/testsuite/gcc.dg/tree-ssa/reassoc-28.c
@@ -0,0 +1,12 @@ 
+/* { dg-do compile } */
+/* { dg-options "-O2 -fdump-tree-optimized -ffast-math" } */
+
+int foo (int a, int b, int c, int d)
+{
+  int r1 = a & ~c & b;
+  int r2 = ~a | b | ~d;
+  return (~r1 | r2);
+}
+
+/* { dg-final { scan-tree-dump-times "return -1" 1 "optimized"} } */
+/* { dg-final { cleanup-tree-dump "optimized" } } */
Index: gcc/gcc/testsuite/gcc.dg/tree-ssa/reassoc-29.c
===================================================================
--- /dev/null
+++ gcc/gcc/testsuite/gcc.dg/tree-ssa/reassoc-29.c
@@ -0,0 +1,12 @@ 
+/* { dg-do compile } */
+/* { dg-options "-O2 -fdump-tree-optimized -ffast-math" } */
+
+int foo (int a, int b, int c, int d)
+{
+  int r1 = ~(a | b | c);
+  int r2 = (a | d | c) | ~b;
+  return (~r1 | r2);
+}
+
+/* { dg-final { scan-tree-dump-times "return -1" 1 "optimized"} } */
+/* { dg-final { cleanup-tree-dump "optimized" } } */