From patchwork Wed Jul 20 13:05:33 2011 Content-Type: text/plain; charset="utf-8" MIME-Version: 1.0 Content-Transfer-Encoding: 7bit X-Patchwork-Submitter: Kai Tietz X-Patchwork-Id: 105680 Return-Path: X-Original-To: incoming@patchwork.ozlabs.org Delivered-To: patchwork-incoming@bilbo.ozlabs.org Received: from sourceware.org (server1.sourceware.org [209.132.180.131]) by ozlabs.org (Postfix) with SMTP id 7995BB6F81 for ; Wed, 20 Jul 2011 23:06:22 +1000 (EST) Received: (qmail 28094 invoked by alias); 20 Jul 2011 13:06:21 -0000 Received: (qmail 28084 invoked by uid 22791); 20 Jul 2011 13:06:18 -0000 X-SWARE-Spam-Status: No, hits=-1.8 required=5.0 tests=AWL, BAYES_00, DKIM_SIGNED, DKIM_VALID, DKIM_VALID_AU, FREEMAIL_ENVFROM_END_DIGIT, FREEMAIL_FROM, RCVD_IN_DNSWL_LOW, TW_TM X-Spam-Check-By: sourceware.org Received: from mail-qy0-f175.google.com (HELO mail-qy0-f175.google.com) (209.85.216.175) by sourceware.org (qpsmtpd/0.43rc1) with ESMTP; Wed, 20 Jul 2011 13:05:35 +0000 Received: by qyk30 with SMTP id 30so2739745qyk.20 for ; Wed, 20 Jul 2011 06:05:33 -0700 (PDT) MIME-Version: 1.0 Received: by 10.229.89.142 with SMTP id e14mr2283078qcm.230.1311167133679; Wed, 20 Jul 2011 06:05:33 -0700 (PDT) Received: by 10.229.38.195 with HTTP; Wed, 20 Jul 2011 06:05:33 -0700 (PDT) In-Reply-To: References: Date: Wed, 20 Jul 2011 15:05:33 +0200 Message-ID: Subject: Re: [patch tree-optimization]: [2 of 3]: Boolify compares & more From: Kai Tietz To: Richard Guenther Cc: GCC Patches X-IsSubscribed: yes Mailing-List: contact gcc-patches-help@gcc.gnu.org; run by ezmlm Precedence: bulk List-Id: List-Unsubscribe: List-Archive: List-Post: List-Help: Sender: gcc-patches-owner@gcc.gnu.org Delivered-To: mailing list gcc-patches@gcc.gnu.org Hello, this is the revised version of the partial pre-approved patch for preserving type-casts from/to boolean-types. It fixes additionally the regression in tree-ssa/builtin-expect-5.c testcase, which was caused by fold_builtin_expect. Additionally there was a regression in gcc.dg/pr28685-1.c, which is fixed by the change in tree-ssa-forwprop.c's function simplify_bitwise_binary. This is just temporary necessary. As soon as we are boolifying comparisons in gimplifier, the pattern-matching in tree-ssa-reassoc will match for 2 branched cases again and we can remove the hunk from forward-propagation again. 2011-07-20 Kai Tietz * fold-const.c (fold_unary_loc): Preserve non-boolean-typed casts. * tree-ssa.c (useless_type_conversion_p): Preserve incompatible casts from/to boolean, * builtins.c (fold_builtin_expect): See through the cast from truthvalue_type_node to long. * tree-ssa-forwprop.c (simplify_bitwise_binary): Add simplification for CMP bitwise-binary CMP. Bootstrapped and regression tested for all standard languages plus Ada and Obj-C++ on host x86_64-pc-linux-gnu. Ok for apply? 2011-07-20 Kai Tietz * gcc.dg/binop-xor1.c: Mark it as expected fail. * gcc.dg/binop-xor3.c: Likewise. Index: gcc-head/gcc/builtins.c =================================================================== --- gcc-head.orig/gcc/builtins.c +++ gcc-head/gcc/builtins.c @@ -6264,13 +6264,22 @@ build_builtin_expect_predicate (location static tree fold_builtin_expect (location_t loc, tree arg0, tree arg1) { - tree inner, fndecl; + tree inner, fndecl, inner_arg0; enum tree_code code; + /* Distribute the expected value over short-circuiting operators. + See through the cast from truthvalue_type_node to long. */ + inner_arg0 = arg0; + while (TREE_CODE (inner_arg0) == NOP_EXPR + && INTEGRAL_TYPE_P (TREE_TYPE (inner_arg0)) + && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (inner_arg0, 0)))) + inner_arg0 = TREE_OPERAND (inner_arg0, 0); + /* If this is a builtin_expect within a builtin_expect keep the inner one. See through a comparison against a constant. It might have been added to create a thruthvalue. */ - inner = arg0; + inner = inner_arg0; + if (COMPARISON_CLASS_P (inner) && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST) inner = TREE_OPERAND (inner, 0); @@ -6281,14 +6290,7 @@ fold_builtin_expect (location_t loc, tre && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_EXPECT) return arg0; - /* Distribute the expected value over short-circuiting operators. - See through the cast from truthvalue_type_node to long. */ - inner = arg0; - while (TREE_CODE (inner) == NOP_EXPR - && INTEGRAL_TYPE_P (TREE_TYPE (inner)) - && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (inner, 0)))) - inner = TREE_OPERAND (inner, 0); - + inner = inner_arg0; code = TREE_CODE (inner); if (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR) { @@ -6303,13 +6305,13 @@ fold_builtin_expect (location_t loc, tre } /* If the argument isn't invariant then there's nothing else we can do. */ - if (!TREE_CONSTANT (arg0)) + if (!TREE_CONSTANT (inner_arg0)) return NULL_TREE; /* If we expect that a comparison against the argument will fold to a constant return the constant. In practice, this means a true constant or the address of a non-weak symbol. */ - inner = arg0; + inner = inner_arg0; STRIP_NOPS (inner); if (TREE_CODE (inner) == ADDR_EXPR) { Index: gcc-head/gcc/fold-const.c =================================================================== --- gcc-head.orig/gcc/fold-const.c +++ gcc-head/gcc/fold-const.c @@ -7665,11 +7665,11 @@ fold_unary_loc (location_t loc, enum tre non-integral type. Do not fold the result as that would not simplify further, also folding again results in recursions. */ - if (INTEGRAL_TYPE_P (type)) + if (TREE_CODE (type) == BOOLEAN_TYPE) return build2_loc (loc, TREE_CODE (op0), type, TREE_OPERAND (op0, 0), TREE_OPERAND (op0, 1)); - else + else if (!INTEGRAL_TYPE_P (type)) return build3_loc (loc, COND_EXPR, type, op0, fold_convert (type, boolean_true_node), fold_convert (type, boolean_false_node)); Index: gcc-head/gcc/tree-ssa.c =================================================================== --- gcc-head.orig/gcc/tree-ssa.c +++ gcc-head/gcc/tree-ssa.c @@ -1306,10 +1306,10 @@ useless_type_conversion_p (tree outer_ty || TYPE_PRECISION (inner_type) != TYPE_PRECISION (outer_type)) return false; - /* Preserve conversions to BOOLEAN_TYPE if it is not of precision - one. */ - if (TREE_CODE (inner_type) != BOOLEAN_TYPE - && TREE_CODE (outer_type) == BOOLEAN_TYPE + /* Preserve conversions to/from BOOLEAN_TYPE if types are not + of precision one. */ + if (((TREE_CODE (inner_type) == BOOLEAN_TYPE) + != (TREE_CODE (outer_type) == BOOLEAN_TYPE)) && TYPE_PRECISION (outer_type) != 1) return false; Index: gcc-head/gcc/testsuite/gcc.dg/binop-xor1.c =================================================================== --- gcc-head.orig/gcc/testsuite/gcc.dg/binop-xor1.c +++ gcc-head/gcc/testsuite/gcc.dg/binop-xor1.c @@ -7,8 +7,5 @@ foo (int a, int b, int c) return ((a && !b && c) || (!a && b && c)); } -/* We expect to see ""; confirm that, so that we know to count - it in the real test. */ -/* { dg-final { scan-tree-dump-times "\]*>" 5 "optimized" } } */ -/* { dg-final { scan-tree-dump-times "\\\^" 1 "optimized" } } */ +/* { dg-final { scan-tree-dump-times "\\\^" 1 "optimized" { xfail *-*-* } } } */ /* { dg-final { cleanup-tree-dump "optimized" } } */ Index: gcc-head/gcc/testsuite/gcc.dg/binop-xor3.c =================================================================== --- gcc-head.orig/gcc/testsuite/gcc.dg/binop-xor3.c +++ gcc-head/gcc/testsuite/gcc.dg/binop-xor3.c @@ -7,8 +7,5 @@ foo (int a, int b) return ((a && !b) || (!a && b)); } -/* We expect to see ""; confirm that, so that we know to count - it in the real test. */ -/* { dg-final { scan-tree-dump-times "\]*>" 1 "optimized" } } */ -/* { dg-final { scan-tree-dump-times "\\\^" 1 "optimized" } } */ +/* { dg-final { scan-tree-dump-times "\\\^" 1 "optimized" { xfail *-*-* } } } */ /* { dg-final { cleanup-tree-dump "optimized" } } */ Index: gcc-head/gcc/tree-ssa-forwprop.c =================================================================== --- gcc-head.orig/gcc/tree-ssa-forwprop.c +++ gcc-head/gcc/tree-ssa-forwprop.c @@ -1874,6 +1874,50 @@ simplify_bitwise_binary (gimple_stmt_ite return true; } + /* See if we can combine comparisons for & or |. */ + if (TREE_CODE_CLASS (def1_code) == tcc_comparison + && TREE_CODE_CLASS (def2_code) == tcc_comparison) + { + if (code == BIT_AND_EXPR) + res = maybe_fold_and_comparisons (def1_code, def1_arg1, + gimple_assign_rhs2 (def1), + def2_code, def2_arg1, + gimple_assign_rhs2 (def2)); + else if (code == BIT_IOR_EXPR) + res = maybe_fold_or_comparisons (def1_code, def1_arg1, + gimple_assign_rhs2 (def1), + def2_code, def2_arg1, + gimple_assign_rhs2 (def2)); + + /* We handle here only constant results and + cases (X cmp-1 Y) | (X cmp-2 Y) -> X cmp-3 Y. */ + if (res && TREE_CODE (res) != INTEGER_CST + && (TREE_CODE_CLASS (TREE_CODE (res)) != tcc_comparison + || TREE_OPERAND (res, 0) != def1_arg1 + || TREE_OPERAND (res, 1) != gimple_assign_rhs2 (def1))) + res = NULL_TREE; + if (res) + { + /* maybe_fold_and_comparisons and maybe_fold_or_comparisons + always give us a boolean_type_node value back. If the original + BIT_AND_EXPR or BIT_IOR_EXPR was of a wider integer type, + we need to convert. */ + if (TREE_CODE (res) == INTEGER_CST) + { + if (!useless_type_conversion_p (TREE_TYPE (arg1), + TREE_TYPE (res))) + res = fold_convert (TREE_TYPE (arg1), res); + gimple_assign_set_rhs_from_tree (gsi, res); + } + else + gimple_assign_set_rhs_with_ops (gsi, TREE_CODE (res), + TREE_OPERAND (res, 0), + TREE_OPERAND (res, 1)); + + update_stmt (gsi_stmt (*gsi)); + return true; + } + } return false; }