Message ID | alpine.DEB.2.02.1305141334420.6987@stedding.saclay.inria.fr |
---|---|
State | New |
Headers | show |
On Tue, May 14, 2013 at 1:47 PM, Marc Glisse <marc.glisse@inria.fr> wrote: > On Tue, 14 May 2013, Richard Biener wrote: > >> On Mon, May 13, 2013 at 1:40 PM, Marc Glisse <marc.glisse@inria.fr> wrote: >>> >>> On Mon, 13 May 2013, Richard Biener wrote: >>> >>>> On Sat, May 11, 2013 at 11:38 AM, Marc Glisse <marc.glisse@inria.fr> >>>> wrote: >>>>> >>>>> @@ -8274,28 +8269,34 @@ fold_unary_loc (location_t loc, enum tre >>>>> { >>>>> elem = VECTOR_CST_ELT (arg0, i); >>>>> elem = fold_unary_loc (loc, BIT_NOT_EXPR, TREE_TYPE >>>>> (type), >>>>> elem); >>>>> if (elem == NULL_TREE) >>>>> break; >>>>> elements[i] = elem; >>>>> } >>>>> if (i == count) >>>>> return build_vector (type, elements); >>>>> } >>>>> + else if (COMPARISON_CLASS_P (arg0) && VECTOR_INTEGER_TYPE_P >>>>> (type)) >>>>> + { >>>>> + tree op_type = TREE_TYPE (TREE_OPERAND (arg0, 0)); >>>>> + enum tree_code subcode = invert_tree_comparison (TREE_CODE >>>>> (arg0), >>>>> + HONOR_NANS (TYPE_MODE (op_type))); >>>>> + if (subcode != ERROR_MARK) >>>>> + return build2_loc (loc, subcode, type, TREE_OPERAND (arg0, >>>>> 0), >>>>> + TREE_OPERAND (arg0, 1)); >>>>> + } >>>>> + >>>> >>>> >>>> >>>> I wonder why you restrict this to VECTOR_INTEGER_TYPE_P - for >>>> TYPE_PRECISION == 1 type this should work, too. >>> >>> >>> >>> If TYPE_PRECISION == 1, wouldn't it be better to turn BIT_NOT_EXPR into >>> TRUTH_NOT_EXPR? Then it will be handled by fold_truth_not_expr. >> >> >> Hmm, not sure - on GIMPLE we are no longer having the TRUTH_* tree >> codes, so we don't want to fold BIT_* to TRUTH_*. > > > ! > I had never noticed that... > > >>>> Also there should >>>> never be a comparison resulting in a non-integer vector type, no? >>> >>> >>> >>> Yes, I was going to write VECTOR_TYPE_P, and adding integer seemed more >>> explicit, but I can go back. >> >> >> Works for me. >> >>>>> return NULL_TREE; >>>>> >>>>> case TRUTH_NOT_EXPR: >>>>> - /* The argument to invert_truthvalue must have Boolean type. */ >>>>> - if (TREE_CODE (TREE_TYPE (arg0)) != BOOLEAN_TYPE) >>>>> - arg0 = fold_convert_loc (loc, boolean_type_node, arg0); >>>>> - >>>>> /* Note that the operand of this must be an int >>>>> and its values must be 0 or 1. >>>>> ("true" is a fixed value perhaps depending on the language, >>>>> but we don't handle values other than 1 correctly yet.) */ >>>>> tem = fold_truth_not_expr (loc, arg0); >>>>> if (!tem) >>>>> return NULL_TREE; >>>>> return fold_convert_loc (loc, type, tem); >>>>> >>>>> case REALPART_EXPR: >>>>> @@ -9579,21 +9580,21 @@ fold_comparison (location_t loc, enum tr >>>>> { >>>>> tree cmp_type = TREE_TYPE (TREE_OPERAND (arg0, 0)); >>>>> return fold_build2_loc (loc, code, type, >>>>> fold_convert_loc (loc, cmp_type, >>>>> TREE_OPERAND (arg1, 0)), >>>>> TREE_OPERAND (arg0, 0)); >>>>> } >>>>> >>>>> /* Fold ~X op C as X op' ~C, where op' is the swapped comparison. >>>>> */ >>>>> if (TREE_CODE (arg0) == BIT_NOT_EXPR >>>>> - && TREE_CODE (arg1) == INTEGER_CST) >>>>> + && (TREE_CODE (arg1) == INTEGER_CST || TREE_CODE (arg1) == >>>>> VECTOR_CST)) >>>>> { >>>>> tree cmp_type = TREE_TYPE (TREE_OPERAND (arg0, 0)); >>>>> return fold_build2_loc (loc, swap_tree_comparison (code), type, >>>>> TREE_OPERAND (arg0, 0), >>>>> fold_build1_loc (loc, BIT_NOT_EXPR, cmp_type, >>>>> fold_convert_loc (loc, cmp_type, >>>>> arg1))); >>>>> } >>>>> >>>>> return NULL_TREE; >>>>> } >>>>> @@ -14030,61 +14031,67 @@ fold_ternary_loc (location_t loc, enum t >>>>> return tem; >>>>> } >>>>> >>>>> if (COMPARISON_CLASS_P (arg0) >>>>> && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0), >>>>> op2, >>>>> TREE_OPERAND (arg0, 1)) >>>>> && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (op2)))) >>>>> { >>>>> location_t loc0 = expr_location_or (arg0, loc); >>>>> - tem = fold_truth_not_expr (loc0, arg0); >>>>> + tem = fold_unary_loc (loc0, VECTOR_TYPE_P (type) >>>>> + ? BIT_NOT_EXPR >>>>> + : TRUTH_NOT_EXPR, >>>>> + TREE_TYPE (arg0), arg0); >>>> >>>> >>>> >>>> since you don't restrict it here either .... >>>> >>>>> if (tem && COMPARISON_CLASS_P (tem)) >>>>> { >>>>> tem = fold_cond_expr_with_comparison (loc, type, tem, >>>>> op2, >>>>> op1); >>>>> if (tem) >>>>> return tem; >>>>> } >>>>> } >>>>> >>>>> - /* ??? Fixup the code below for VEC_COND_EXPR. */ >>>>> - if (code == VEC_COND_EXPR) >>>>> - return NULL_TREE; >>>>> - >>>>> /* If the second operand is simpler than the third, swap them >>>>> since that produces better jump optimization results. */ >>>>> if (truth_value_p (TREE_CODE (arg0)) >>>>> && tree_swap_operands_p (op1, op2, false)) >>>>> { >>>>> location_t loc0 = expr_location_or (arg0, loc); >>>>> /* See if this can be inverted. If it can't, possibly >>>>> because >>>>> it was a floating-point inequality comparison, don't do >>>>> anything. */ >>>>> - tem = fold_truth_not_expr (loc0, arg0); >>>>> + tem = fold_unary_loc (loc0, VECTOR_TYPE_P (type) >>>>> + ? BIT_NOT_EXPR >>>>> + : TRUTH_NOT_EXPR, >>>>> + TREE_TYPE (arg0), arg0); >>>> >>>> >>>> >>>> Btw, splitting this out to a helper would be nice. >>> >>> >>> >>> Called fold_truth_not_expr? Oups ;-) >>> fold_truth_invert? fold_invert_truth? >> >> >> Well, fold_invert_truthvalue maybe? Inverting a truthvalue is >> well-defined >> for vectors and non-vectors and is more appropriate spelling for GIMPLE >> where we don't have any TRUTH_NOT_EXPR anymore. > > > Here is what I tested during the night, I'll just rename the function. > I took the chance to remove an unnecessary alternative in TRUTH_XOR_EXPR. > > Passes bootstrap+testsuite on x86_64-linux-gnu. Ok. Thanks, Richard. > 2013-05-14 Marc Glisse <marc.glisse@inria.fr> > > > gcc/ > * fold-const.c (fold_negate_expr): Handle vectors. > (fold_truth_not_expr): Make it static. > (fold_invert_truth): New static function. > > (invert_truthvalue_loc): Handle vectors. Do not call > fold_truth_not_expr directly. > (fold_unary_loc) <BIT_NOT_EXPR>: Handle comparisons. > > <TRUTH_NOT_EXPR>: Do not cast to boolean. > (fold_comparison): Handle vector constants. > (fold_binary_loc) <TRUTH_XOR_EXPR>: Remove redundant code. > > (fold_ternary_loc) <VEC_COND_EXPR>: Adapt more COND_EXPR > optimizations. > * tree.h (fold_truth_not_expr): Remove declaration. > > gcc/testsuite/ > * g++.dg/ext/vector22.C: New testcase. > * gcc.dg/binop-xor3.c: Remove xfail. > > -- > Marc Glisse > > Index: fold-const.c > =================================================================== > --- fold-const.c (revision 198853) > +++ fold-const.c (working copy) > @@ -519,21 +519,21 @@ fold_negate_expr (location_t loc, tree t > { > tree type = TREE_TYPE (t); > tree tem; > > switch (TREE_CODE (t)) > { > /* Convert - (~A) to A + 1. */ > case BIT_NOT_EXPR: > if (INTEGRAL_TYPE_P (type)) > return fold_build2_loc (loc, PLUS_EXPR, type, TREE_OPERAND (t, 0), > - build_int_cst (type, 1)); > + build_one_cst (type)); > break; > > case INTEGER_CST: > tem = fold_negate_const (t, type); > if (TREE_OVERFLOW (tem) == TREE_OVERFLOW (t) > || !TYPE_OVERFLOW_TRAPS (type)) > return tem; > break; > > case REAL_CST: > @@ -3078,21 +3078,21 @@ omit_two_operands_loc (location_t loc, t > } > > > /* Return a simplified tree node for the truth-negation of ARG. This > never alters ARG itself. We assume that ARG is an operation that > returns a truth value (0 or 1). > > FIXME: one would think we would fold the result, but it causes > problems with the dominator optimizer. */ > > -tree > +static tree > fold_truth_not_expr (location_t loc, tree arg) > { > tree type = TREE_TYPE (arg); > enum tree_code code = TREE_CODE (arg); > location_t loc1, loc2; > > /* If this is a comparison, we can simply invert it, except for > floating-point non-equality comparisons, in which case we just > enclose a TRUTH_NOT_EXPR around what we have. */ > > @@ -3213,40 +3213,50 @@ fold_truth_not_expr (location_t loc, tre > case CLEANUP_POINT_EXPR: > loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc); > return build1_loc (loc, CLEANUP_POINT_EXPR, type, > invert_truthvalue_loc (loc1, TREE_OPERAND (arg, > 0))); > > default: > return NULL_TREE; > } > } > > +/* Fold the truth-negation of ARG. This never alters ARG itself. We > + assume that ARG is an operation that returns a truth value (0 or 1 > + for scalars, 0 or -1 for vectors). Return the folded expression if > + folding is successful. Otherwise, return NULL_TREE. */ > + > +static tree > +fold_invert_truth (location_t loc, tree arg) > +{ > + tree type = TREE_TYPE (arg); > + return fold_unary_loc (loc, VECTOR_TYPE_P (type) > + ? BIT_NOT_EXPR > + : TRUTH_NOT_EXPR, > + type, arg); > +} > + > /* Return a simplified tree node for the truth-negation of ARG. This > never alters ARG itself. We assume that ARG is an operation that > - returns a truth value (0 or 1). > - > - FIXME: one would think we would fold the result, but it causes > - problems with the dominator optimizer. */ > + returns a truth value (0 or 1 for scalars, 0 or -1 for vectors). */ > > tree > invert_truthvalue_loc (location_t loc, tree arg) > { > - tree tem; > - > if (TREE_CODE (arg) == ERROR_MARK) > return arg; > > - tem = fold_truth_not_expr (loc, arg); > - if (!tem) > - tem = build1_loc (loc, TRUTH_NOT_EXPR, TREE_TYPE (arg), arg); > - > - return tem; > + tree type = TREE_TYPE (arg); > + return fold_build1_loc (loc, VECTOR_TYPE_P (type) > + ? BIT_NOT_EXPR > + : TRUTH_NOT_EXPR, > + type, arg); > } > > /* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both > operands are another bit-wise operation with a common input. If so, > distribute the bit operations to save an operation and possibly two if > constants are involved. For example, convert > (A | B) & (A | C) into A | (B & C) > Further simplification will occur if B and C are constants. > > If this optimization cannot be done, 0 will be returned. */ > @@ -8274,28 +8284,36 @@ fold_unary_loc (location_t loc, enum tre > { > elem = VECTOR_CST_ELT (arg0, i); > elem = fold_unary_loc (loc, BIT_NOT_EXPR, TREE_TYPE (type), > elem); > if (elem == NULL_TREE) > break; > elements[i] = elem; > } > if (i == count) > return build_vector (type, elements); > } > + else if (COMPARISON_CLASS_P (arg0) > + && (VECTOR_TYPE_P (type) > + || (INTEGRAL_TYPE_P (type) && TYPE_PRECISION (type) == > 1))) > + { > + tree op_type = TREE_TYPE (TREE_OPERAND (arg0, 0)); > + enum tree_code subcode = invert_tree_comparison (TREE_CODE (arg0), > + HONOR_NANS (TYPE_MODE (op_type))); > + if (subcode != ERROR_MARK) > + return build2_loc (loc, subcode, type, TREE_OPERAND (arg0, 0), > + TREE_OPERAND (arg0, 1)); > + } > + > > return NULL_TREE; > > case TRUTH_NOT_EXPR: > - /* The argument to invert_truthvalue must have Boolean type. */ > - if (TREE_CODE (TREE_TYPE (arg0)) != BOOLEAN_TYPE) > - arg0 = fold_convert_loc (loc, boolean_type_node, arg0); > - > /* Note that the operand of this must be an int > and its values must be 0 or 1. > ("true" is a fixed value perhaps depending on the language, > but we don't handle values other than 1 correctly yet.) */ > tem = fold_truth_not_expr (loc, arg0); > if (!tem) > return NULL_TREE; > return fold_convert_loc (loc, type, tem); > > case REALPART_EXPR: > @@ -9579,21 +9597,21 @@ fold_comparison (location_t loc, enum tr > { > tree cmp_type = TREE_TYPE (TREE_OPERAND (arg0, 0)); > return fold_build2_loc (loc, code, type, > fold_convert_loc (loc, cmp_type, > TREE_OPERAND (arg1, 0)), > TREE_OPERAND (arg0, 0)); > } > > /* Fold ~X op C as X op' ~C, where op' is the swapped comparison. */ > if (TREE_CODE (arg0) == BIT_NOT_EXPR > - && TREE_CODE (arg1) == INTEGER_CST) > + && (TREE_CODE (arg1) == INTEGER_CST || TREE_CODE (arg1) == > VECTOR_CST)) > { > tree cmp_type = TREE_TYPE (TREE_OPERAND (arg0, 0)); > return fold_build2_loc (loc, swap_tree_comparison (code), type, > TREE_OPERAND (arg0, 0), > fold_build1_loc (loc, BIT_NOT_EXPR, cmp_type, > fold_convert_loc (loc, cmp_type, > arg1))); > } > > return NULL_TREE; > } > @@ -12671,25 +12689,21 @@ fold_binary_loc (location_t loc, > > return NULL_TREE; > > case TRUTH_XOR_EXPR: > /* If the second arg is constant zero, drop it. */ > if (integer_zerop (arg1)) > return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0)); > /* If the second arg is constant true, this is a logical inversion. > */ > if (integer_onep (arg1)) > { > - /* Only call invert_truthvalue if operand is a truth value. */ > - if (TREE_CODE (TREE_TYPE (arg0)) != BOOLEAN_TYPE) > - tem = fold_build1_loc (loc, TRUTH_NOT_EXPR, TREE_TYPE (arg0), > arg0); > - else > - tem = invert_truthvalue_loc (loc, arg0); > + tem = invert_truthvalue_loc (loc, arg0); > return non_lvalue_loc (loc, fold_convert_loc (loc, type, tem)); > } > /* Identical arguments cancel to zero. */ > if (operand_equal_p (arg0, arg1, 0)) > return omit_one_operand_loc (loc, type, integer_zero_node, arg0); > > /* !X ^ X is always true. */ > if (TREE_CODE (arg0) == TRUTH_NOT_EXPR > && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)) > return omit_one_operand_loc (loc, type, integer_one_node, arg1); > @@ -14036,61 +14050,61 @@ fold_ternary_loc (location_t loc, enum t > return tem; > } > > if (COMPARISON_CLASS_P (arg0) > && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0), > op2, > TREE_OPERAND (arg0, 1)) > && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (op2)))) > { > location_t loc0 = expr_location_or (arg0, loc); > - tem = fold_truth_not_expr (loc0, arg0); > + tem = fold_invert_truth (loc0, arg0); > if (tem && COMPARISON_CLASS_P (tem)) > { > tem = fold_cond_expr_with_comparison (loc, type, tem, op2, > op1); > if (tem) > return tem; > } > } > > - /* ??? Fixup the code below for VEC_COND_EXPR. */ > - if (code == VEC_COND_EXPR) > - return NULL_TREE; > - > /* If the second operand is simpler than the third, swap them > since that produces better jump optimization results. */ > if (truth_value_p (TREE_CODE (arg0)) > && tree_swap_operands_p (op1, op2, false)) > { > location_t loc0 = expr_location_or (arg0, loc); > /* See if this can be inverted. If it can't, possibly because > it was a floating-point inequality comparison, don't do > anything. */ > - tem = fold_truth_not_expr (loc0, arg0); > + tem = fold_invert_truth (loc0, arg0); > if (tem) > return fold_build3_loc (loc, code, type, tem, op2, op1); > } > > /* Convert A ? 1 : 0 to simply A. */ > - if (integer_onep (op1) > + if ((code == VEC_COND_EXPR ? integer_all_onesp (op1) > + : (integer_onep (op1) > + && !VECTOR_TYPE_P (type))) > && integer_zerop (op2) > /* If we try to convert OP0 to our type, the > call to fold will try to move the conversion inside > a COND, which will recurse. In that case, the COND_EXPR > is probably the best choice, so leave it alone. */ > && type == TREE_TYPE (arg0)) > return pedantic_non_lvalue_loc (loc, arg0); > > /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR > over COND_EXPR in cases such as floating point comparisons. */ > if (integer_zerop (op1) > - && integer_onep (op2) > + && (code == VEC_COND_EXPR ? integer_all_onesp (op2) > + : (integer_onep (op2) > + && !VECTOR_TYPE_P (type))) > && truth_value_p (TREE_CODE (arg0))) > return pedantic_non_lvalue_loc (loc, > fold_convert_loc (loc, type, > invert_truthvalue_loc (loc, > > arg0))); > > /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>). */ > if (TREE_CODE (arg0) == LT_EXPR > && integer_zerop (TREE_OPERAND (arg0, 1)) > && integer_zerop (op2) > @@ -14193,60 +14207,67 @@ fold_ternary_loc (location_t loc, enum t > && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR > && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1), > arg1, OEP_ONLY_CONST)) > return pedantic_non_lvalue_loc (loc, > fold_convert_loc (loc, type, > TREE_OPERAND (arg0, > 0))); > > /* Convert A ? B : 0 into A && B if A and B are truth values. */ > if (integer_zerop (op2) > && truth_value_p (TREE_CODE (arg0)) > - && truth_value_p (TREE_CODE (arg1))) > - return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type, > - fold_convert_loc (loc, type, arg0), > - arg1); > + && truth_value_p (TREE_CODE (arg1)) > + && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type))) > + return fold_build2_loc (loc, code == VEC_COND_EXPR ? BIT_AND_EXPR > + : > TRUTH_ANDIF_EXPR, > + type, fold_convert_loc (loc, type, arg0), > arg1); > > /* Convert A ? B : 1 into !A || B if A and B are truth values. */ > - if (integer_onep (op2) > + if (code == VEC_COND_EXPR ? integer_all_onesp (op2) : integer_onep > (op2) > && truth_value_p (TREE_CODE (arg0)) > - && truth_value_p (TREE_CODE (arg1))) > + && truth_value_p (TREE_CODE (arg1)) > + && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type))) > { > location_t loc0 = expr_location_or (arg0, loc); > /* Only perform transformation if ARG0 is easily inverted. */ > - tem = fold_truth_not_expr (loc0, arg0); > + tem = fold_invert_truth (loc0, arg0); > if (tem) > - return fold_build2_loc (loc, TRUTH_ORIF_EXPR, type, > - fold_convert_loc (loc, type, tem), > - arg1); > + return fold_build2_loc (loc, code == VEC_COND_EXPR > + ? BIT_IOR_EXPR > + : TRUTH_ORIF_EXPR, > + type, fold_convert_loc (loc, type, tem), > + arg1); > } > > /* Convert A ? 0 : B into !A && B if A and B are truth values. */ > if (integer_zerop (arg1) > && truth_value_p (TREE_CODE (arg0)) > - && truth_value_p (TREE_CODE (op2))) > + && truth_value_p (TREE_CODE (op2)) > + && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type))) > { > location_t loc0 = expr_location_or (arg0, loc); > /* Only perform transformation if ARG0 is easily inverted. */ > - tem = fold_truth_not_expr (loc0, arg0); > + tem = fold_invert_truth (loc0, arg0); > if (tem) > - return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type, > - fold_convert_loc (loc, type, tem), > - op2); > + return fold_build2_loc (loc, code == VEC_COND_EXPR > + ? BIT_AND_EXPR : TRUTH_ANDIF_EXPR, > + type, fold_convert_loc (loc, type, tem), > + op2); > } > > /* Convert A ? 1 : B into A || B if A and B are truth values. */ > - if (integer_onep (arg1) > + if (code == VEC_COND_EXPR ? integer_all_onesp (arg1) : integer_onep > (arg1) > && truth_value_p (TREE_CODE (arg0)) > - && truth_value_p (TREE_CODE (op2))) > - return fold_build2_loc (loc, TRUTH_ORIF_EXPR, type, > - fold_convert_loc (loc, type, arg0), > - op2); > + && truth_value_p (TREE_CODE (op2)) > + && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type))) > + return fold_build2_loc (loc, code == VEC_COND_EXPR > + ? BIT_IOR_EXPR : TRUTH_ORIF_EXPR, > + type, fold_convert_loc (loc, type, arg0), > op2); > > return NULL_TREE; > > case CALL_EXPR: > /* CALL_EXPRs used to be ternary exprs. Catch any mistaken uses > of fold_ternary on them. */ > gcc_unreachable (); > > case BIT_FIELD_REF: > if ((TREE_CODE (arg0) == VECTOR_CST > Index: testsuite/gcc.dg/binop-xor3.c > =================================================================== > --- testsuite/gcc.dg/binop-xor3.c (revision 198853) > +++ testsuite/gcc.dg/binop-xor3.c (working copy) > @@ -1,11 +1,11 @@ > /* { dg-do compile } */ > /* { dg-options "-O2 -fdump-tree-optimized" } */ > > int > foo (int a, int b) > { > return ((a && !b) || (!a && b)); > } > > -/* { dg-final { scan-tree-dump-times "\\\^" 1 "optimized" { xfail *-*-* } } > } */ > +/* { dg-final { scan-tree-dump-times "\\\^" 1 "optimized" } } */ > /* { dg-final { cleanup-tree-dump "optimized" } } */ > Index: testsuite/g++.dg/ext/vector22.C > =================================================================== > --- testsuite/g++.dg/ext/vector22.C (revision 0) > +++ testsuite/g++.dg/ext/vector22.C (revision 0) > @@ -0,0 +1,20 @@ > +/* { dg-do compile } */ > +/* { dg-options "-O -fdump-tree-gimple" } */ > + > +typedef unsigned vec __attribute__((vector_size(4*sizeof(int)))); > + > +void f(vec*a,vec*b){ > + *a=(*a)?-1:(*b<10); > + *b=(*b)?(*a<10):0; > +} > +void g(vec*a,vec*b){ > + *a=(*a)?(*a<*a):-1; > + *b=(*b)?-1:(*b<*b); > +} > +void h(vec*a){ > + *a=(~*a==5); > +} > + > +/* { dg-final { scan-tree-dump-not "~" "gimple" } } */ > +/* { dg-final { scan-tree-dump-not "VEC_COND_EXPR" "gimple" } } */ > +/* { dg-final { cleanup-tree-dump "gimple" } } */ > > Property changes on: testsuite/g++.dg/ext/vector22.C > ___________________________________________________________________ > Added: svn:keywords > + Author Date Id Revision URL > Added: svn:eol-style > + native > > Index: tree.h > =================================================================== > --- tree.h (revision 198853) > +++ tree.h (working copy) > @@ -5763,21 +5763,20 @@ extern int operand_equal_p (const_tree, > extern int multiple_of_p (tree, const_tree, const_tree); > #define omit_one_operand(T1,T2,T3)\ > omit_one_operand_loc (UNKNOWN_LOCATION, T1, T2, T3) > extern tree omit_one_operand_loc (location_t, tree, tree, tree); > #define omit_two_operands(T1,T2,T3,T4)\ > omit_two_operands_loc (UNKNOWN_LOCATION, T1, T2, T3, T4) > extern tree omit_two_operands_loc (location_t, tree, tree, tree, tree); > #define invert_truthvalue(T)\ > invert_truthvalue_loc(UNKNOWN_LOCATION, T) > extern tree invert_truthvalue_loc (location_t, tree); > -extern tree fold_truth_not_expr (location_t, tree); > extern tree fold_unary_to_constant (enum tree_code, tree, tree); > extern tree fold_binary_to_constant (enum tree_code, tree, tree, tree); > extern tree fold_read_from_constant_string (tree); > extern tree int_const_binop (enum tree_code, const_tree, const_tree); > #define build_fold_addr_expr(T)\ > build_fold_addr_expr_loc (UNKNOWN_LOCATION, (T)) > extern tree build_fold_addr_expr_loc (location_t, tree); > #define build_fold_addr_expr_with_type(T,TYPE)\ > build_fold_addr_expr_with_type_loc (UNKNOWN_LOCATION, (T), TYPE) > extern tree build_fold_addr_expr_with_type_loc (location_t, tree, tree); >
Index: fold-const.c =================================================================== --- fold-const.c (revision 198853) +++ fold-const.c (working copy) @@ -519,21 +519,21 @@ fold_negate_expr (location_t loc, tree t { tree type = TREE_TYPE (t); tree tem; switch (TREE_CODE (t)) { /* Convert - (~A) to A + 1. */ case BIT_NOT_EXPR: if (INTEGRAL_TYPE_P (type)) return fold_build2_loc (loc, PLUS_EXPR, type, TREE_OPERAND (t, 0), - build_int_cst (type, 1)); + build_one_cst (type)); break; case INTEGER_CST: tem = fold_negate_const (t, type); if (TREE_OVERFLOW (tem) == TREE_OVERFLOW (t) || !TYPE_OVERFLOW_TRAPS (type)) return tem; break; case REAL_CST: @@ -3078,21 +3078,21 @@ omit_two_operands_loc (location_t loc, t } /* Return a simplified tree node for the truth-negation of ARG. This never alters ARG itself. We assume that ARG is an operation that returns a truth value (0 or 1). FIXME: one would think we would fold the result, but it causes problems with the dominator optimizer. */ -tree +static tree fold_truth_not_expr (location_t loc, tree arg) { tree type = TREE_TYPE (arg); enum tree_code code = TREE_CODE (arg); location_t loc1, loc2; /* If this is a comparison, we can simply invert it, except for floating-point non-equality comparisons, in which case we just enclose a TRUTH_NOT_EXPR around what we have. */ @@ -3213,40 +3213,50 @@ fold_truth_not_expr (location_t loc, tre case CLEANUP_POINT_EXPR: loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc); return build1_loc (loc, CLEANUP_POINT_EXPR, type, invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0))); default: return NULL_TREE; } } +/* Fold the truth-negation of ARG. This never alters ARG itself. We + assume that ARG is an operation that returns a truth value (0 or 1 + for scalars, 0 or -1 for vectors). Return the folded expression if + folding is successful. Otherwise, return NULL_TREE. */ + +static tree +fold_invert_truth (location_t loc, tree arg) +{ + tree type = TREE_TYPE (arg); + return fold_unary_loc (loc, VECTOR_TYPE_P (type) + ? BIT_NOT_EXPR + : TRUTH_NOT_EXPR, + type, arg); +} + /* Return a simplified tree node for the truth-negation of ARG. This never alters ARG itself. We assume that ARG is an operation that - returns a truth value (0 or 1). - - FIXME: one would think we would fold the result, but it causes - problems with the dominator optimizer. */ + returns a truth value (0 or 1 for scalars, 0 or -1 for vectors). */ tree invert_truthvalue_loc (location_t loc, tree arg) { - tree tem; - if (TREE_CODE (arg) == ERROR_MARK) return arg; - tem = fold_truth_not_expr (loc, arg); - if (!tem) - tem = build1_loc (loc, TRUTH_NOT_EXPR, TREE_TYPE (arg), arg); - - return tem; + tree type = TREE_TYPE (arg); + return fold_build1_loc (loc, VECTOR_TYPE_P (type) + ? BIT_NOT_EXPR + : TRUTH_NOT_EXPR, + type, arg); } /* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both operands are another bit-wise operation with a common input. If so, distribute the bit operations to save an operation and possibly two if constants are involved. For example, convert (A | B) & (A | C) into A | (B & C) Further simplification will occur if B and C are constants. If this optimization cannot be done, 0 will be returned. */ @@ -8274,28 +8284,36 @@ fold_unary_loc (location_t loc, enum tre { elem = VECTOR_CST_ELT (arg0, i); elem = fold_unary_loc (loc, BIT_NOT_EXPR, TREE_TYPE (type), elem); if (elem == NULL_TREE) break; elements[i] = elem; } if (i == count) return build_vector (type, elements); } + else if (COMPARISON_CLASS_P (arg0) + && (VECTOR_TYPE_P (type) + || (INTEGRAL_TYPE_P (type) && TYPE_PRECISION (type) == 1))) + { + tree op_type = TREE_TYPE (TREE_OPERAND (arg0, 0)); + enum tree_code subcode = invert_tree_comparison (TREE_CODE (arg0), + HONOR_NANS (TYPE_MODE (op_type))); + if (subcode != ERROR_MARK) + return build2_loc (loc, subcode, type, TREE_OPERAND (arg0, 0), + TREE_OPERAND (arg0, 1)); + } + return NULL_TREE; case TRUTH_NOT_EXPR: - /* The argument to invert_truthvalue must have Boolean type. */ - if (TREE_CODE (TREE_TYPE (arg0)) != BOOLEAN_TYPE) - arg0 = fold_convert_loc (loc, boolean_type_node, arg0); - /* Note that the operand of this must be an int and its values must be 0 or 1. ("true" is a fixed value perhaps depending on the language, but we don't handle values other than 1 correctly yet.) */ tem = fold_truth_not_expr (loc, arg0); if (!tem) return NULL_TREE; return fold_convert_loc (loc, type, tem); case REALPART_EXPR: @@ -9579,21 +9597,21 @@ fold_comparison (location_t loc, enum tr { tree cmp_type = TREE_TYPE (TREE_OPERAND (arg0, 0)); return fold_build2_loc (loc, code, type, fold_convert_loc (loc, cmp_type, TREE_OPERAND (arg1, 0)), TREE_OPERAND (arg0, 0)); } /* Fold ~X op C as X op' ~C, where op' is the swapped comparison. */ if (TREE_CODE (arg0) == BIT_NOT_EXPR - && TREE_CODE (arg1) == INTEGER_CST) + && (TREE_CODE (arg1) == INTEGER_CST || TREE_CODE (arg1) == VECTOR_CST)) { tree cmp_type = TREE_TYPE (TREE_OPERAND (arg0, 0)); return fold_build2_loc (loc, swap_tree_comparison (code), type, TREE_OPERAND (arg0, 0), fold_build1_loc (loc, BIT_NOT_EXPR, cmp_type, fold_convert_loc (loc, cmp_type, arg1))); } return NULL_TREE; } @@ -12671,25 +12689,21 @@ fold_binary_loc (location_t loc, return NULL_TREE; case TRUTH_XOR_EXPR: /* If the second arg is constant zero, drop it. */ if (integer_zerop (arg1)) return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0)); /* If the second arg is constant true, this is a logical inversion. */ if (integer_onep (arg1)) { - /* Only call invert_truthvalue if operand is a truth value. */ - if (TREE_CODE (TREE_TYPE (arg0)) != BOOLEAN_TYPE) - tem = fold_build1_loc (loc, TRUTH_NOT_EXPR, TREE_TYPE (arg0), arg0); - else - tem = invert_truthvalue_loc (loc, arg0); + tem = invert_truthvalue_loc (loc, arg0); return non_lvalue_loc (loc, fold_convert_loc (loc, type, tem)); } /* Identical arguments cancel to zero. */ if (operand_equal_p (arg0, arg1, 0)) return omit_one_operand_loc (loc, type, integer_zero_node, arg0); /* !X ^ X is always true. */ if (TREE_CODE (arg0) == TRUTH_NOT_EXPR && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)) return omit_one_operand_loc (loc, type, integer_one_node, arg1); @@ -14036,61 +14050,61 @@ fold_ternary_loc (location_t loc, enum t return tem; } if (COMPARISON_CLASS_P (arg0) && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0), op2, TREE_OPERAND (arg0, 1)) && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (op2)))) { location_t loc0 = expr_location_or (arg0, loc); - tem = fold_truth_not_expr (loc0, arg0); + tem = fold_invert_truth (loc0, arg0); if (tem && COMPARISON_CLASS_P (tem)) { tem = fold_cond_expr_with_comparison (loc, type, tem, op2, op1); if (tem) return tem; } } - /* ??? Fixup the code below for VEC_COND_EXPR. */ - if (code == VEC_COND_EXPR) - return NULL_TREE; - /* If the second operand is simpler than the third, swap them since that produces better jump optimization results. */ if (truth_value_p (TREE_CODE (arg0)) && tree_swap_operands_p (op1, op2, false)) { location_t loc0 = expr_location_or (arg0, loc); /* See if this can be inverted. If it can't, possibly because it was a floating-point inequality comparison, don't do anything. */ - tem = fold_truth_not_expr (loc0, arg0); + tem = fold_invert_truth (loc0, arg0); if (tem) return fold_build3_loc (loc, code, type, tem, op2, op1); } /* Convert A ? 1 : 0 to simply A. */ - if (integer_onep (op1) + if ((code == VEC_COND_EXPR ? integer_all_onesp (op1) + : (integer_onep (op1) + && !VECTOR_TYPE_P (type))) && integer_zerop (op2) /* If we try to convert OP0 to our type, the call to fold will try to move the conversion inside a COND, which will recurse. In that case, the COND_EXPR is probably the best choice, so leave it alone. */ && type == TREE_TYPE (arg0)) return pedantic_non_lvalue_loc (loc, arg0); /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR over COND_EXPR in cases such as floating point comparisons. */ if (integer_zerop (op1) - && integer_onep (op2) + && (code == VEC_COND_EXPR ? integer_all_onesp (op2) + : (integer_onep (op2) + && !VECTOR_TYPE_P (type))) && truth_value_p (TREE_CODE (arg0))) return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, invert_truthvalue_loc (loc, arg0))); /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>). */ if (TREE_CODE (arg0) == LT_EXPR && integer_zerop (TREE_OPERAND (arg0, 1)) && integer_zerop (op2) @@ -14193,60 +14207,67 @@ fold_ternary_loc (location_t loc, enum t && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1), arg1, OEP_ONLY_CONST)) return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0))); /* Convert A ? B : 0 into A && B if A and B are truth values. */ if (integer_zerop (op2) && truth_value_p (TREE_CODE (arg0)) - && truth_value_p (TREE_CODE (arg1))) - return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type, - fold_convert_loc (loc, type, arg0), - arg1); + && truth_value_p (TREE_CODE (arg1)) + && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type))) + return fold_build2_loc (loc, code == VEC_COND_EXPR ? BIT_AND_EXPR + : TRUTH_ANDIF_EXPR, + type, fold_convert_loc (loc, type, arg0), arg1); /* Convert A ? B : 1 into !A || B if A and B are truth values. */ - if (integer_onep (op2) + if (code == VEC_COND_EXPR ? integer_all_onesp (op2) : integer_onep (op2) && truth_value_p (TREE_CODE (arg0)) - && truth_value_p (TREE_CODE (arg1))) + && truth_value_p (TREE_CODE (arg1)) + && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type))) { location_t loc0 = expr_location_or (arg0, loc); /* Only perform transformation if ARG0 is easily inverted. */ - tem = fold_truth_not_expr (loc0, arg0); + tem = fold_invert_truth (loc0, arg0); if (tem) - return fold_build2_loc (loc, TRUTH_ORIF_EXPR, type, - fold_convert_loc (loc, type, tem), - arg1); + return fold_build2_loc (loc, code == VEC_COND_EXPR + ? BIT_IOR_EXPR + : TRUTH_ORIF_EXPR, + type, fold_convert_loc (loc, type, tem), + arg1); } /* Convert A ? 0 : B into !A && B if A and B are truth values. */ if (integer_zerop (arg1) && truth_value_p (TREE_CODE (arg0)) - && truth_value_p (TREE_CODE (op2))) + && truth_value_p (TREE_CODE (op2)) + && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type))) { location_t loc0 = expr_location_or (arg0, loc); /* Only perform transformation if ARG0 is easily inverted. */ - tem = fold_truth_not_expr (loc0, arg0); + tem = fold_invert_truth (loc0, arg0); if (tem) - return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type, - fold_convert_loc (loc, type, tem), - op2); + return fold_build2_loc (loc, code == VEC_COND_EXPR + ? BIT_AND_EXPR : TRUTH_ANDIF_EXPR, + type, fold_convert_loc (loc, type, tem), + op2); } /* Convert A ? 1 : B into A || B if A and B are truth values. */ - if (integer_onep (arg1) + if (code == VEC_COND_EXPR ? integer_all_onesp (arg1) : integer_onep (arg1) && truth_value_p (TREE_CODE (arg0)) - && truth_value_p (TREE_CODE (op2))) - return fold_build2_loc (loc, TRUTH_ORIF_EXPR, type, - fold_convert_loc (loc, type, arg0), - op2); + && truth_value_p (TREE_CODE (op2)) + && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type))) + return fold_build2_loc (loc, code == VEC_COND_EXPR + ? BIT_IOR_EXPR : TRUTH_ORIF_EXPR, + type, fold_convert_loc (loc, type, arg0), op2); return NULL_TREE; case CALL_EXPR: /* CALL_EXPRs used to be ternary exprs. Catch any mistaken uses of fold_ternary on them. */ gcc_unreachable (); case BIT_FIELD_REF: if ((TREE_CODE (arg0) == VECTOR_CST Index: testsuite/gcc.dg/binop-xor3.c =================================================================== --- testsuite/gcc.dg/binop-xor3.c (revision 198853) +++ testsuite/gcc.dg/binop-xor3.c (working copy) @@ -1,11 +1,11 @@ /* { dg-do compile } */ /* { dg-options "-O2 -fdump-tree-optimized" } */ int foo (int a, int b) { return ((a && !b) || (!a && b)); } -/* { dg-final { scan-tree-dump-times "\\\^" 1 "optimized" { xfail *-*-* } } } */ +/* { dg-final { scan-tree-dump-times "\\\^" 1 "optimized" } } */ /* { dg-final { cleanup-tree-dump "optimized" } } */ Index: testsuite/g++.dg/ext/vector22.C =================================================================== --- testsuite/g++.dg/ext/vector22.C (revision 0) +++ testsuite/g++.dg/ext/vector22.C (revision 0) @@ -0,0 +1,20 @@ +/* { dg-do compile } */ +/* { dg-options "-O -fdump-tree-gimple" } */ + +typedef unsigned vec __attribute__((vector_size(4*sizeof(int)))); + +void f(vec*a,vec*b){ + *a=(*a)?-1:(*b<10); + *b=(*b)?(*a<10):0; +} +void g(vec*a,vec*b){ + *a=(*a)?(*a<*a):-1; + *b=(*b)?-1:(*b<*b); +} +void h(vec*a){ + *a=(~*a==5); +} + +/* { dg-final { scan-tree-dump-not "~" "gimple" } } */ +/* { dg-final { scan-tree-dump-not "VEC_COND_EXPR" "gimple" } } */ +/* { dg-final { cleanup-tree-dump "gimple" } } */ Property changes on: testsuite/g++.dg/ext/vector22.C ___________________________________________________________________ Added: svn:keywords + Author Date Id Revision URL Added: svn:eol-style + native Index: tree.h =================================================================== --- tree.h (revision 198853) +++ tree.h (working copy) @@ -5763,21 +5763,20 @@ extern int operand_equal_p (const_tree, extern int multiple_of_p (tree, const_tree, const_tree); #define omit_one_operand(T1,T2,T3)\ omit_one_operand_loc (UNKNOWN_LOCATION, T1, T2, T3) extern tree omit_one_operand_loc (location_t, tree, tree, tree); #define omit_two_operands(T1,T2,T3,T4)\ omit_two_operands_loc (UNKNOWN_LOCATION, T1, T2, T3, T4) extern tree omit_two_operands_loc (location_t, tree, tree, tree, tree); #define invert_truthvalue(T)\ invert_truthvalue_loc(UNKNOWN_LOCATION, T) extern tree invert_truthvalue_loc (location_t, tree); -extern tree fold_truth_not_expr (location_t, tree); extern tree fold_unary_to_constant (enum tree_code, tree, tree); extern tree fold_binary_to_constant (enum tree_code, tree, tree, tree); extern tree fold_read_from_constant_string (tree); extern tree int_const_binop (enum tree_code, const_tree, const_tree); #define build_fold_addr_expr(T)\ build_fold_addr_expr_loc (UNKNOWN_LOCATION, (T)) extern tree build_fold_addr_expr_loc (location_t, tree); #define build_fold_addr_expr_with_type(T,TYPE)\ build_fold_addr_expr_with_type_loc (UNKNOWN_LOCATION, (T), TYPE) extern tree build_fold_addr_expr_with_type_loc (location_t, tree, tree);