diff mbox series

[v2,3/4] tree-object-size: Handle PHI + CST type offsets

Message ID 20240920164029.63843-4-siddhesh@gotplt.org
State New
Headers show
Series tree-object-size: Improved offset handling | expand

Commit Message

Siddhesh Poyarekar Sept. 20, 2024, 4:40 p.m. UTC
Propogate constant additions and subtractions to an offset when
computing an estimate for a PHI of constants.

gcc/ChangeLog:

	PR tree-optimization/PR116556
	* tree-object-size.cc (try_collapsing_offset): New parameters
	CST and CODE.  Use them to propagate PLUS_EXPR and MINUS_EXPR
	through recursively calling TRY_COLLAPSE_OFFSET.
	(plus_stmt_object_size): Adjust call to TRY_COLLAPSE_OFFSET.
	* testsuite/gcc.dg/builtin-object-size-1.c (test13): Test
	constant addition and subtraction.
	* testsuite/gcc.dg/builtin-object-size-3.c (test13): Likewise.

Signed-off-by: Siddhesh Poyarekar <siddhesh@gotplt.org>
---
 gcc/testsuite/gcc.dg/builtin-object-size-1.c | 20 +++++++
 gcc/testsuite/gcc.dg/builtin-object-size-3.c | 20 +++++++
 gcc/tree-object-size.cc                      | 58 ++++++++++++++++----
 3 files changed, 86 insertions(+), 12 deletions(-)

Comments

Jakub Jelinek Sept. 27, 2024, 11:24 a.m. UTC | #1
On Fri, Sep 20, 2024 at 12:40:28PM -0400, Siddhesh Poyarekar wrote:
> --- a/gcc/tree-object-size.cc
> +++ b/gcc/tree-object-size.cc
> @@ -1473,7 +1473,7 @@ merge_object_sizes (struct object_size_info *osi, tree dest, tree orig)
>     with all of its targets are constants.  */
>  
>  static tree
> -try_collapsing_offset (tree op, int object_size_type)
> +try_collapsing_offset (tree op, tree cst, tree_code code, int object_size_type)
>  {
>    gcc_assert (!(object_size_type & OST_DYNAMIC));

I believe you were just returning op here if it isn't SSA_NAME.
Now, if it is INTEGER_CST, it will misbehave whenever cst != NULL,
as it will just return op rather than op + cst or op - cst.

> @@ -1485,13 +1485,41 @@ try_collapsing_offset (tree op, int object_size_type)
>    switch (gimple_code (stmt))
>      {
>      case GIMPLE_ASSIGN:
> -      /* Peek through casts.  */
> -      if (gimple_assign_rhs_code (stmt) == NOP_EXPR)
> +      switch (gimple_assign_rhs_code (stmt))
>  	{
> -	  tree ret = try_collapsing_offset (gimple_assign_rhs1 (stmt),
> -					    object_size_type);
> -	  if (TREE_CODE (ret) == INTEGER_CST)
> -	    return ret;
> +	  /* Peek through casts.  */
> +	case NOP_EXPR:

That would be CASE_CONVERT:

> +	    {

Wrong indentation, { should be 2 columns to the left of case, and tree ret
4 columns.

> +	      tree ret = try_collapsing_offset (gimple_assign_rhs1 (stmt),
> +						NULL_TREE, NOP_EXPR,
> +						object_size_type);

This loses cst and code from the caller if it isn't NULL_TREE, NOP_EXPR,
again causing miscompilations.
I'd think it should just pass through cst, code from the caller here
(but then one needs to be extra careful, because cst can have different
type from op because of the recusion on casts).

> +	      if (TREE_CODE (ret) == INTEGER_CST)
> +		return ret;
> +	      break;
> +	    }
> +	  /* Propagate constant offsets to PHI expressions.  */
> +	case PLUS_EXPR:
> +	case MINUS_EXPR:
> +	    {
> +	      tree rhs1 = gimple_assign_rhs1 (stmt);
> +	      tree rhs2 = gimple_assign_rhs2 (stmt);
> +	      tree ret = NULL_TREE;
> +

And I think this should just punt on code != NOP_EXPR (or cst != NULL_TREE,
one is enough).  Because if you have
  # _2 = PHI...
  _3 = _2 + 3;
  _4 = 7 - _3;
or something similar, it will handle just the inner operation and not the
outer one.

> +	      if (TREE_CODE (rhs1) == INTEGER_CST)
> +		ret = try_collapsing_offset (rhs2, rhs1,
> +					     gimple_assign_rhs_code (stmt),
> +					     object_size_type);
> +	      else if (TREE_CODE (rhs2) == INTEGER_CST)
> +		ret = try_collapsing_offset (rhs1, rhs2,
> +					     gimple_assign_rhs_code (stmt),
> +					     object_size_type);
> +
> +	      if (ret && TREE_CODE (ret) == INTEGER_CST)
> +		return ret;
> +	      break;
> +	    }
> +	default:
> +	  break;
>  	}
>        break;
>      case GIMPLE_PHI:
> @@ -1507,14 +1535,20 @@ try_collapsing_offset (tree op, int object_size_type)
>  	      if (TREE_CODE (rhs) != INTEGER_CST)
>  		return op;
>  
> +	      if (cst)
> +		rhs = fold_build2 (code, sizetype,
> +				   fold_convert (ptrdiff_type_node, rhs),
> +				   fold_convert (ptrdiff_type_node, cst));

This can be done on wide_int too.  But one needs to think through the cast
cases, what happened if the cast was widening (from signed or unsigned),
what happened if the cast was narrowing.

> +	      else
> +		rhs = fold_convert (ptrdiff_type_node, rhs);
> +
>  	      /* Note that this is the *opposite* of what we usually do with
>  		 sizes, because the maximum offset estimate here will give us a
>  		 minimum size estimate and vice versa.  */
> -	      enum tree_code code = (object_size_type & OST_MINIMUM
> -				     ? MAX_EXPR : MIN_EXPR);
> +	      enum tree_code selection_code = (object_size_type & OST_MINIMUM
> +					       ? MAX_EXPR : MIN_EXPR);
>  
> -	      off = fold_build2 (code, ptrdiff_type_node, off,
> -				 fold_convert (ptrdiff_type_node, rhs));
> +	      off = fold_build2 (selection_code, ptrdiff_type_node, off, rhs);
>  	    }
>  	  return fold_convert (sizetype, off);
>  	}
> @@ -1558,7 +1592,7 @@ plus_stmt_object_size (struct object_size_info *osi, tree var, gimple *stmt)
>      return false;
>  
>    if (!(object_size_type & OST_DYNAMIC) && TREE_CODE (op1) != INTEGER_CST)
> -    op1 = try_collapsing_offset (op1, object_size_type);
> +    op1 = try_collapsing_offset (op1, NULL_TREE, NOP_EXPR, object_size_type);
>  
>    /* Handle PTR + OFFSET here.  */
>    if (size_valid_p (op1, object_size_type)
> -- 
> 2.45.1

	Jakub
diff mbox series

Patch

diff --git a/gcc/testsuite/gcc.dg/builtin-object-size-1.c b/gcc/testsuite/gcc.dg/builtin-object-size-1.c
index 1f5cd5d99a1..6ffe12be683 100644
--- a/gcc/testsuite/gcc.dg/builtin-object-size-1.c
+++ b/gcc/testsuite/gcc.dg/builtin-object-size-1.c
@@ -769,6 +769,26 @@  test13 (unsigned cond)
   if (__builtin_object_size (&p[t], 0) != 9)
     FAIL ();
 #endif
+
+  t += 4;
+
+#ifdef __builtin_object_size
+  if (__builtin_object_size (&p[t], 0) != (cond ? 1 : 5))
+    FAIL ();
+#else
+  if (__builtin_object_size (&p[t], 0) != 5)
+    FAIL ();
+#endif
+
+  t -= 5;
+
+#ifdef __builtin_object_size
+  if (__builtin_object_size (&p[t], 0) != (cond ? 6 : 10))
+    FAIL ();
+#else
+  if (__builtin_object_size (&p[t], 0) != 10)
+    FAIL ();
+#endif
 }
 
 int
diff --git a/gcc/testsuite/gcc.dg/builtin-object-size-3.c b/gcc/testsuite/gcc.dg/builtin-object-size-3.c
index 7fad106fc27..c51cb69c01b 100644
--- a/gcc/testsuite/gcc.dg/builtin-object-size-3.c
+++ b/gcc/testsuite/gcc.dg/builtin-object-size-3.c
@@ -777,6 +777,26 @@  test13 (unsigned cond)
   if (__builtin_object_size (&p[t], 2) != 5)
     FAIL ();
 #endif
+
+  t += 4;
+
+#ifdef __builtin_object_size
+  if (__builtin_object_size (&p[t], 2) != (cond ? 1 : 5))
+    FAIL ();
+#else
+  if (__builtin_object_size (&p[t], 2) != 1)
+    FAIL ();
+#endif
+
+  t -= 5;
+
+#ifdef __builtin_object_size
+  if (__builtin_object_size (&p[t], 2) != (cond ? 6 : 10))
+    FAIL ();
+#else
+  if (__builtin_object_size (&p[t], 2) != 6)
+    FAIL ();
+#endif
 }
 
 int
diff --git a/gcc/tree-object-size.cc b/gcc/tree-object-size.cc
index 2dfc28407ab..1b569c3d12b 100644
--- a/gcc/tree-object-size.cc
+++ b/gcc/tree-object-size.cc
@@ -1473,7 +1473,7 @@  merge_object_sizes (struct object_size_info *osi, tree dest, tree orig)
    with all of its targets are constants.  */
 
 static tree
-try_collapsing_offset (tree op, int object_size_type)
+try_collapsing_offset (tree op, tree cst, tree_code code, int object_size_type)
 {
   gcc_assert (!(object_size_type & OST_DYNAMIC));
 
@@ -1485,13 +1485,41 @@  try_collapsing_offset (tree op, int object_size_type)
   switch (gimple_code (stmt))
     {
     case GIMPLE_ASSIGN:
-      /* Peek through casts.  */
-      if (gimple_assign_rhs_code (stmt) == NOP_EXPR)
+      switch (gimple_assign_rhs_code (stmt))
 	{
-	  tree ret = try_collapsing_offset (gimple_assign_rhs1 (stmt),
-					    object_size_type);
-	  if (TREE_CODE (ret) == INTEGER_CST)
-	    return ret;
+	  /* Peek through casts.  */
+	case NOP_EXPR:
+	    {
+	      tree ret = try_collapsing_offset (gimple_assign_rhs1 (stmt),
+						NULL_TREE, NOP_EXPR,
+						object_size_type);
+	      if (TREE_CODE (ret) == INTEGER_CST)
+		return ret;
+	      break;
+	    }
+	  /* Propagate constant offsets to PHI expressions.  */
+	case PLUS_EXPR:
+	case MINUS_EXPR:
+	    {
+	      tree rhs1 = gimple_assign_rhs1 (stmt);
+	      tree rhs2 = gimple_assign_rhs2 (stmt);
+	      tree ret = NULL_TREE;
+
+	      if (TREE_CODE (rhs1) == INTEGER_CST)
+		ret = try_collapsing_offset (rhs2, rhs1,
+					     gimple_assign_rhs_code (stmt),
+					     object_size_type);
+	      else if (TREE_CODE (rhs2) == INTEGER_CST)
+		ret = try_collapsing_offset (rhs1, rhs2,
+					     gimple_assign_rhs_code (stmt),
+					     object_size_type);
+
+	      if (ret && TREE_CODE (ret) == INTEGER_CST)
+		return ret;
+	      break;
+	    }
+	default:
+	  break;
 	}
       break;
     case GIMPLE_PHI:
@@ -1507,14 +1535,20 @@  try_collapsing_offset (tree op, int object_size_type)
 	      if (TREE_CODE (rhs) != INTEGER_CST)
 		return op;
 
+	      if (cst)
+		rhs = fold_build2 (code, sizetype,
+				   fold_convert (ptrdiff_type_node, rhs),
+				   fold_convert (ptrdiff_type_node, cst));
+	      else
+		rhs = fold_convert (ptrdiff_type_node, rhs);
+
 	      /* Note that this is the *opposite* of what we usually do with
 		 sizes, because the maximum offset estimate here will give us a
 		 minimum size estimate and vice versa.  */
-	      enum tree_code code = (object_size_type & OST_MINIMUM
-				     ? MAX_EXPR : MIN_EXPR);
+	      enum tree_code selection_code = (object_size_type & OST_MINIMUM
+					       ? MAX_EXPR : MIN_EXPR);
 
-	      off = fold_build2 (code, ptrdiff_type_node, off,
-				 fold_convert (ptrdiff_type_node, rhs));
+	      off = fold_build2 (selection_code, ptrdiff_type_node, off, rhs);
 	    }
 	  return fold_convert (sizetype, off);
 	}
@@ -1558,7 +1592,7 @@  plus_stmt_object_size (struct object_size_info *osi, tree var, gimple *stmt)
     return false;
 
   if (!(object_size_type & OST_DYNAMIC) && TREE_CODE (op1) != INTEGER_CST)
-    op1 = try_collapsing_offset (op1, object_size_type);
+    op1 = try_collapsing_offset (op1, NULL_TREE, NOP_EXPR, object_size_type);
 
   /* Handle PTR + OFFSET here.  */
   if (size_valid_p (op1, object_size_type)