===================================================================
@@ -3110,6 +3110,366 @@ linearize_expr_tree (VEC(operand_entry_t
add_to_ops_vec (ops, binrhs);
}
+/* Split up a binary tree-chain of code CODE - starting at STMT - into three
+ different kinds:
+ - vector VCST stores constant values.
+ - vector VNOT stores bitwise-not expressions.
+ - vector VEXRR stores the remaining rest. */
+
+static void
+walk_bitwise_stmt_elems (gimple stmt, enum tree_code code,
+ VEC(tree, heap) **vcst,
+ VEC(tree, heap) **vnot,
+ VEC(tree, heap) **vexpr)
+{
+ gimple s;
+ tree l;
+
+ l = gimple_assign_rhs1 (stmt);
+ if (TREE_CODE (l) == INTEGER_CST)
+ VEC_safe_push (tree, heap, *vcst, l);
+ else if (TREE_CODE (l) != SSA_NAME
+ || (s = SSA_NAME_DEF_STMT (l)) == NULL
+ || !is_gimple_assign (s)
+ || !has_single_use (l))
+ VEC_safe_push (tree, heap, *vexpr, l);
+ else if (gimple_assign_rhs_code (s) == code)
+ walk_bitwise_stmt_elems (s, code, vcst, vnot, vexpr);
+ else if (gimple_assign_rhs_code (s) == BIT_NOT_EXPR)
+ VEC_safe_push (tree, heap, *vnot, l);
+ else
+ VEC_safe_push (tree, heap, *vexpr, l);
+
+ l = gimple_assign_rhs2 (stmt);
+
+ if (TREE_CODE (l) == INTEGER_CST)
+ {
+ VEC_safe_push (tree, heap, *vcst, l);
+ return;
+ }
+ if (TREE_CODE (l) != SSA_NAME
+ || (s = SSA_NAME_DEF_STMT (l)) == NULL
+ || !is_gimple_assign (s)
+ || !has_single_use (l))
+ VEC_safe_push (tree, heap, *vexpr, l);
+ else if (gimple_assign_rhs_code (s) == code)
+ walk_bitwise_stmt_elems (s, code, vcst, vnot, vexpr);
+ else if (gimple_assign_rhs_code (s) == BIT_NOT_EXPR)
+ VEC_safe_push (tree, heap, *vnot, l);
+ else
+ VEC_safe_push (tree, heap, *vexpr, l);
+}
+
+/* Helper function to rebuild a binary tree of CODE elements
+ from vector VEC.
+ If LASTP is NULL, then all elements are combined and the result is
+ returned. Otherwise the last element of vector VEC is stored in LASTP
+ and all - but the last - elements are merged and returned.
+ Note: for vector with just one element, this element is returned
+ and LASTP is set to NULL, if provided.
+ If INNER_LEFT has value TRUE, then the RHS1 operand of VEC elements
+ are used for combining. Otherwise the VEC elements itself are used. */
+
+static tree
+rebuild_vector_tree (VEC(tree, heap) *vec,
+ enum tree_code code, tree *lastp, bool inner_left)
+{
+ gimple s;
+ unsigned int i = 0;
+ tree r = NULL_TREE, x, r2 = NULL_TREE;
+
+ FOR_EACH_VEC_ELT (tree, vec, i, x)
+ {
+ s = SSA_NAME_DEF_STMT (x);
+
+ if (inner_left)
+ x = gimple_assign_rhs1 (s);
+ if (!r)
+ r = x;
+ else if (!r2)
+ r2 = x;
+ else
+ {
+ r = make_new_tmp_statement (TREE_TYPE (r), code, r, r2, s);
+ r2 = x;
+ }
+ }
+ if (lastp)
+ *lastp = r2;
+ else if (r && r2)
+ {
+ s = SSA_NAME_DEF_STMT (r);
+ r = make_new_tmp_statement (TREE_TYPE (r), code, r, r2, s);
+ }
+ return r;
+}
+
+/* Sink not-expression out of xor-expression-sequences. This sequence
+ is made out of VNOT, VEXPR, and TCST.
+ It returns TRUE, if tree-chain PGSI - starting at STMT - was modified,
+ otherwise FALSE. */
+
+static bool
+operate_bitwise_xor_stmt (gimple_stmt_iterator *pgsi, gimple stmt, tree tcst,
+ VEC(tree, heap) *vnot,
+ VEC(tree, heap) *vexpr)
+{
+ unsigned int i = VEC_length (tree, vnot);
+ tree l = NULL_TREE, r = NULL_TREE;
+ bool inv = false;
+
+ /* If the amount of not-expressions is odd, then we have two cases:
+ a) we have a constant, so we can sink one not into integeral constant
+ as ~X ^ CST -> X ^ CST' with CST' = ~CST.
+ b) we need to add to the hole statment a bitwise-not expression. */
+ if ((i & 1) != 0)
+ {
+ if (tcst)
+ {
+ tcst = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (tcst), tcst);
+ /* See if we can eleminate constant. */
+ if (integer_zerop (tcst))
+ tcst = NULL;
+ }
+ else
+ inv = true;
+ }
+
+ if (vnot && vexpr)
+ {
+ l = rebuild_vector_tree (vnot, BIT_XOR_EXPR, NULL, true);
+ r = rebuild_vector_tree (vexpr, BIT_XOR_EXPR, NULL, false);
+ if (tcst)
+ {
+ l = make_new_tmp_statement (TREE_TYPE (l), BIT_XOR_EXPR, l, r, stmt);
+ r = tcst;
+ }
+ }
+ else if (vnot)
+ {
+ l = rebuild_vector_tree (vnot, BIT_XOR_EXPR, &r, true);
+ if (tcst)
+ {
+ if (!r)
+ r = tcst;
+ else
+ {
+ l = make_new_tmp_statement (TREE_TYPE (l), BIT_XOR_EXPR, l, r, stmt);
+ r = tcst;
+ }
+ }
+ }
+ else if (tcst)
+ {
+ l = rebuild_vector_tree (vexpr, BIT_XOR_EXPR, NULL, false);
+ if (l)
+ r = tcst;
+ else
+ {
+ l = tcst;
+ r = NULL_TREE;
+ }
+ }
+ else
+ l = rebuild_vector_tree (vexpr, BIT_XOR_EXPR, &r, false);
+
+ if (inv)
+ {
+ if (r)
+ l = make_new_tmp_statement (TREE_TYPE (l), BIT_XOR_EXPR, l, r, stmt);
+ gimple_assign_set_rhs_with_ops (pgsi, BIT_NOT_EXPR, l, NULL_TREE);
+ }
+ else
+ {
+ if (r)
+ gimple_assign_set_rhs_with_ops (pgsi, BIT_XOR_EXPR, l, r);
+ else
+ gimple_assign_set_rhs_from_tree (pgsi, l);
+ }
+ return true;
+}
+
+/* Sink bitwise-not expression out of bitwise-binary sequence for STMT. The
+ tree-code of the statement-sequence CODE.
+ Following patterns are used for transformations:
+ ~X and ~Y -> ~(X or Y)
+ ~X or ~Y -> ~(X and Y)
+ ~X xor ~Y -> X xor Y
+ ~X xor Y -> ~(X xor Y)
+ ~X xor CST -> X xor CST' (with CST' = ~CST). */
+
+static bool
+operate_bitwise_stmt (gimple_stmt_iterator *pgsi, gimple stmt, enum
tree_code code)
+{
+ unsigned int i = 0;
+ tree x, tcst = NULL_TREE;
+ tree tnot = NULL_TREE, torg = NULL_TREE;
+ VEC(tree, heap) *vcst = NULL;
+ VEC(tree, heap) *vnot = NULL;
+ VEC(tree, heap) *vexpr = NULL;
+ enum tree_code icode;
+ bool do_repropagate = false;
+
+ gcc_assert (gimple_assign_rhs_code (stmt) == code);
+
+ walk_bitwise_stmt_elems (stmt, code, &vcst, &vnot, &vexpr);
+
+ /* There are more then one constant values, */
+ if (VEC_length (tree, vcst) > 1)
+ do_repropagate = true;
+ /* There are more then one not-expressions, */
+ else if (VEC_length (tree, vnot) > 1)
+ do_repropagate = true;
+ /* If we have (~X & CST), we convert to ~(X | CST') with CST'=~CST.
+ If we have (~X | CST), we convert to ~(X & CST') with CST'=~CST. */
+ else if (VEC_length (tree, vnot) == 1
+ && VEC_length (tree, vexpr) == 0
+ && VEC_length (tree, vcst) >= 1)
+ do_repropagate = true;
+ /* For xor-expressions we do following conversion:
+ a) (~X ^ CST) -> X ^ CST'; with CST'=~CST.
+ b) (~X ^ Y) -> ~(X ^ Y). */
+ else if (code == BIT_XOR_EXPR
+ && VEC_length (tree, vnot) == 1)
+ do_repropagate = true;
+
+ if (!do_repropagate)
+ {
+ VEC_free (tree, heap, vcst);
+ VEC_free (tree, heap, vnot);
+ VEC_free (tree, heap, vexpr);
+ return false;
+ }
+
+ /* First combine integral constant bitwise operations. */
+ FOR_EACH_VEC_ELT (tree, vcst, i, x)
+ {
+ if (!tcst)
+ tcst = x;
+ else
+ tcst = fold_build2 (code, TREE_TYPE (x), tcst, x);
+ }
+ VEC_free (tree, heap, vcst);
+ vcst = NULL;
+
+ /* If we deal only with constants, assign
+ calculated constant. */
+ if (tcst && !vnot && !vexpr)
+ {
+ gimple_assign_set_rhs_from_tree (pgsi, tcst);
+ return true;
+ }
+
+ if (code == BIT_XOR_EXPR)
+ {
+ operate_bitwise_xor_stmt (pgsi, stmt, tcst, vnot, vexpr);
+ VEC_free (tree, heap, vnot);
+ VEC_free (tree, heap, vexpr);
+ return true;
+ }
+
+ /* Propagate bitwise and/or statements. */
+
+ /* Inverse for binary and/or. */
+ icode = (code == BIT_IOR_EXPR ? BIT_AND_EXPR : BIT_IOR_EXPR);
+
+ /* Build inverse tree for bitwise-not part. */
+ if (VEC_length (tree, vnot) > 0)
+ {
+ tnot = rebuild_vector_tree (vnot, icode, NULL, true);
+ torg = rebuild_vector_tree (vexpr, code, NULL, false);
+ if (tnot && !tcst && !torg)
+ {
+ gimple_assign_set_rhs_with_ops (pgsi, BIT_NOT_EXPR, tnot, NULL_TREE);
+ VEC_free (tree, heap, vnot);
+ VEC_free (tree, heap, vexpr);
+ return true;
+ }
+
+ tnot = make_new_tmp_statement (TREE_TYPE (tnot), BIT_NOT_EXPR, tnot,
+ NULL_TREE, SSA_NAME_DEF_STMT (tnot));
+ if (torg)
+ {
+ if (!tcst)
+ {
+ gimple_assign_set_rhs_with_ops (pgsi, code, tnot, torg);
+ VEC_free (tree, heap, vnot);
+ VEC_free (tree, heap, vexpr);
+ return true;
+ }
+ tnot = make_new_tmp_statement (TREE_TYPE (tnot), code, tnot, torg,
+ SSA_NAME_DEF_STMT (tnot));
+ }
+ gimple_assign_set_rhs_with_ops (pgsi, code, tnot, tcst);
+ VEC_free (tree, heap, vnot);
+ VEC_free (tree, heap, vexpr);
+ return true;
+ }
+ if (!tcst)
+ torg = rebuild_vector_tree (vexpr, code, &tcst, false);
+ else
+ torg = rebuild_vector_tree (vexpr, code, NULL, false);
+ if (tcst)
+ gimple_assign_set_rhs_with_ops (pgsi, code, torg, tcst);
+ else
+ gimple_assign_set_rhs_from_tree (pgsi, torg);
+
+ VEC_free (tree, heap, vnot);
+ VEC_free (tree, heap, vexpr);
+
+ return true;
+}
+
+/* Repropagate bitwise-operations back to de-normalized form.
+ ~X op ~Y -> ~(X op' Y). */
+
+static void
+repropagate_bitwise (basic_block bb)
+{
+ gimple_stmt_iterator gsi;
+ basic_block son;
+
+ /* First do combine and bitwise-not sinking. */
+ for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
+ {
+ gimple sub;
+ gimple stmt = gsi_stmt (gsi);
+ enum tree_code code;
+ tree l, r, lhs;
+
+ if (!is_gimple_assign (stmt))
+ continue;
+
+ code = gimple_assign_rhs_code (stmt);
+ if (code != BIT_AND_EXPR && code != BIT_IOR_EXPR
+ && code != BIT_XOR_EXPR)
+ continue;
+
+ /* If user of this statement has same tree-code as this statement and
+ this STMT has single use, then skip the processing for this STMT for
+ performance reasons. */
+ lhs = gimple_assign_lhs (stmt);
+ if (has_single_use (lhs)
+ && (sub = get_single_immediate_use (lhs)) != NULL
+ && is_gimple_assign (sub)
+ && gimple_assign_rhs_code (sub) == code)
+ continue;
+ l = gimple_assign_rhs1 (stmt);
+ r = gimple_assign_rhs2 (stmt);
+ if (!operate_bitwise_stmt (&gsi, stmt, code))
+ continue;
+ stmt = gsi_stmt (gsi);
+ update_stmt (stmt);
+ remove_stmt_chain (l);
+ remove_stmt_chain (r);
+ }
+
+ for (son = first_dom_son (CDI_DOMINATORS, bb);
+ son;
+ son = next_dom_son (CDI_DOMINATORS, son))
+ repropagate_bitwise (son);
+}
+
/* Repropagate the negates back into subtracts, since no other pass
currently does it. */
@@ -3553,6 +3913,7 @@ execute_reassoc (void)
do_reassoc ();
repropagate_negates ();
+ repropagate_bitwise (ENTRY_BLOCK_PTR);
fini_reassoc ();
return 0;
===================================================================
@@ -0,0 +1,26 @@
+/* { dg-do compile } */
+/* { dg-options "-O2 -fdump-tree-optimized -ffast-math" } */
+
+int foo (int a, int b, int c, int d)
+{
+ return (~a & ~b) ^ (~c | ~d);
+}
+
+int foo2 (int a, int b, int c, int d)
+{
+ return (~a & ~b & ~c & ~d) ^ 0xff;
+}
+
+int foo3 (int a, int b, int c, int d)
+{
+ return ~a ^ b ^ ~c ^ ~d ^ 0xff;
+}
+
+int foo4 (int a, int b, int c, int d)
+{
+ return ~a ^ ~b ^ ~c ^ ~d;
+}
+
+/* { dg-final { scan-tree-dump-times " ~" 0 "optimized"} } */
+/* { dg-final { cleanup-tree-dump "optimized" } } */
+