diff mbox

[3/n] Fix PR50444, misalignment issues and SRA

Message ID alpine.LNX.2.00.1201241633040.4999@zhemvz.fhfr.qr
State New
Headers show

Commit Message

Richard Biener Jan. 24, 2012, 3:38 p.m. UTC
This addresses a defect in handling movmisalign-handled stores
when they are wrapped in component-references (think of a
vector mode aggregate containing a single vector, wrapped with
a BIT_FIELD_REF to store a single vector element).  I ran into
this issue while trying to fix PR50444 (and made SRA create
such wrapped movmisalign-handled stores).  We cannot simply
use expand_normal on the inner reference if that possibly
goes through the movmisalign path as that would produce
an rvalue instead of a MEM we can re-use.  A simple fix is
to delay expanding the destination and instead build a pseudo
with the contents we can store via movmisalign.

That the MEM_REF path now goes the handled_component_p path
unconditionally unconvered an issue in store_field which
sets MEM_IN_STRUCT_P to any non-MEM_SCALAR_P destination.
But that's clearly wrong for MEMs where we have no idea
whether they are scalar or part of a struct (which is the
reason this is a tri-state flag combo).  Thus the 2nd patchlet
below.

Bootstrapped and tested on x86_64-unknown-linux-gnu, ok for trunk?

Thanks,
Richard.

2012-01-24  Richard Guenther  <rguenther@suse.de>

	PR tree-optimization/50444
	* expr.c (expand_assignment): Handle misaligned bases consistently,
	even when wrapped inside component references.

Index: gcc/expr.c
===================================================================
*** gcc/expr.c	(revision 183470)
--- gcc/expr.c	(working copy)
*************** expand_assignment (tree to, tree from, b
*** 4556,4564 ****
  {
    rtx to_rtx = 0;
    rtx result;
-   enum machine_mode mode;
-   unsigned int align;
-   enum insn_code icode;
  
    /* Don't crash if the lhs of the assignment was erroneous.  */
    if (TREE_CODE (to) == ERROR_MARK)
--- 4556,4561 ----
*************** expand_assignment (tree to, tree from, b
*** 4571,4647 ****
    if (operand_equal_p (to, from, 0))
      return;
  
-   mode = TYPE_MODE (TREE_TYPE (to));
-   if ((TREE_CODE (to) == MEM_REF
-        || TREE_CODE (to) == TARGET_MEM_REF)
-       && mode != BLKmode
-       && ((align = get_object_or_type_alignment (to))
- 	  < GET_MODE_ALIGNMENT (mode))
-       && ((icode = optab_handler (movmisalign_optab, mode))
- 	  != CODE_FOR_nothing))
-     {
-       struct expand_operand ops[2];
-       enum machine_mode address_mode;
-       rtx reg, op0, mem;
- 
-       reg = expand_expr (from, NULL_RTX, VOIDmode, EXPAND_NORMAL);
-       reg = force_not_mem (reg);
- 
-       if (TREE_CODE (to) == MEM_REF)
- 	{
- 	  addr_space_t as
- 	    = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (to, 0))));
- 	  tree base = TREE_OPERAND (to, 0);
- 	  address_mode = targetm.addr_space.address_mode (as);
- 	  op0 = expand_expr (base, NULL_RTX, VOIDmode, EXPAND_NORMAL);
- 	  op0 = convert_memory_address_addr_space (address_mode, op0, as);
- 	  if (!integer_zerop (TREE_OPERAND (to, 1)))
- 	    {
- 	      rtx off
- 		  = immed_double_int_const (mem_ref_offset (to), address_mode);
- 	      op0 = simplify_gen_binary (PLUS, address_mode, op0, off);
- 	    }
- 	  op0 = memory_address_addr_space (mode, op0, as);
- 	  mem = gen_rtx_MEM (mode, op0);
- 	  set_mem_attributes (mem, to, 0);
- 	  set_mem_addr_space (mem, as);
- 	}
-       else if (TREE_CODE (to) == TARGET_MEM_REF)
- 	{
- 	  addr_space_t as
- 	    = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (to, 0))));
- 	  struct mem_address addr;
- 
- 	  get_address_description (to, &addr);
- 	  op0 = addr_for_mem_ref (&addr, as, true);
- 	  op0 = memory_address_addr_space (mode, op0, as);
- 	  mem = gen_rtx_MEM (mode, op0);
- 	  set_mem_attributes (mem, to, 0);
- 	  set_mem_addr_space (mem, as);
- 	}
-       else
- 	gcc_unreachable ();
-       if (TREE_THIS_VOLATILE (to))
- 	MEM_VOLATILE_P (mem) = 1;
- 
-       create_fixed_operand (&ops[0], mem);
-       create_input_operand (&ops[1], reg, mode);
-       /* The movmisalign<mode> pattern cannot fail, else the assignment would
-          silently be omitted.  */
-       expand_insn (icode, 2, ops);
-       return;
-     }
- 
    /* Assignment of a structure component needs special treatment
       if the structure component's rtx is not simply a MEM.
       Assignment of an array element at a constant index, and assignment of
       an array element in an unaligned packed structure field, has the same
       problem.  */
    if (handled_component_p (to)
!       /* ???  We only need to handle MEM_REF here if the access is not
!          a full access of the base object.  */
!       || (TREE_CODE (to) == MEM_REF
! 	  && TREE_CODE (TREE_OPERAND (to, 0)) == ADDR_EXPR)
        || TREE_CODE (TREE_TYPE (to)) == ARRAY_TYPE)
      {
        enum machine_mode mode1;
--- 4568,4581 ----
    if (operand_equal_p (to, from, 0))
      return;
  
    /* Assignment of a structure component needs special treatment
       if the structure component's rtx is not simply a MEM.
       Assignment of an array element at a constant index, and assignment of
       an array element in an unaligned packed structure field, has the same
       problem.  */
    if (handled_component_p (to)
!       || TREE_CODE (to) == MEM_REF
!       || TREE_CODE (to) == TARGET_MEM_REF
        || TREE_CODE (TREE_TYPE (to)) == ARRAY_TYPE)
      {
        enum machine_mode mode1;
*************** expand_assignment (tree to, tree from, b
*** 4652,4657 ****
--- 4586,4595 ----
        int unsignedp;
        int volatilep = 0;
        tree tem;
+       enum machine_mode mode;
+       unsigned int align;
+       enum insn_code icode;
+       bool misalignp;
  
        push_temp_slots ();
        tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1,
*************** expand_assignment (tree to, tree from, b
*** 4664,4671 ****
  
        /* If we are going to use store_bit_field and extract_bit_field,
  	 make sure to_rtx will be safe for multiple use.  */
! 
!       to_rtx = expand_normal (tem);
  
        /* If the bitfield is volatile, we want to access it in the
  	 field's mode, not the computed mode.
--- 4602,4624 ----
  
        /* If we are going to use store_bit_field and extract_bit_field,
  	 make sure to_rtx will be safe for multiple use.  */
!       mode = TYPE_MODE (TREE_TYPE (tem));
!       if ((TREE_CODE (tem) == MEM_REF
! 	   || TREE_CODE (tem) == TARGET_MEM_REF)
! 	  && mode != BLKmode
! 	  && ((align = get_object_or_type_alignment (tem))
! 	      < GET_MODE_ALIGNMENT (mode))
! 	  && ((icode = optab_handler (movmisalign_optab, mode))
! 	      != CODE_FOR_nothing))
! 	{
! 	  misalignp = true;
! 	  to_rtx = gen_reg_rtx (mode);
! 	}
!       else
! 	{
! 	  misalignp = false;
! 	  to_rtx = expand_normal (tem);
! 	}
  
        /* If the bitfield is volatile, we want to access it in the
  	 field's mode, not the computed mode.
*************** expand_assignment (tree to, tree from, b
*** 4811,4816 ****
--- 4764,4819 ----
  				  nontemporal);
  	}
  
+       if (misalignp)
+ 	{
+ 	  struct expand_operand ops[2];
+ 	  enum machine_mode address_mode;
+ 	  rtx op0, mem;
+ 
+ 	  if (TREE_CODE (tem) == MEM_REF)
+ 	    {
+ 	      addr_space_t as = TYPE_ADDR_SPACE
+ 		  (TREE_TYPE (TREE_TYPE (TREE_OPERAND (tem, 0))));
+ 	      tree base = TREE_OPERAND (tem, 0);
+ 	      address_mode = targetm.addr_space.address_mode (as);
+ 	      op0 = expand_expr (base, NULL_RTX, VOIDmode, EXPAND_NORMAL);
+ 	      op0 = convert_memory_address_addr_space (address_mode, op0, as);
+ 	      if (!integer_zerop (TREE_OPERAND (tem, 1)))
+ 		{
+ 		  rtx off = immed_double_int_const (mem_ref_offset (tem),
+ 						    address_mode);
+ 		  op0 = simplify_gen_binary (PLUS, address_mode, op0, off);
+ 		}
+ 	      op0 = memory_address_addr_space (mode, op0, as);
+ 	      mem = gen_rtx_MEM (mode, op0);
+ 	      set_mem_attributes (mem, tem, 0);
+ 	      set_mem_addr_space (mem, as);
+ 	    }
+ 	  else if (TREE_CODE (tem) == TARGET_MEM_REF)
+ 	    {
+ 	      addr_space_t as = TYPE_ADDR_SPACE
+ 		  (TREE_TYPE (TREE_TYPE (TREE_OPERAND (tem, 0))));
+ 	      struct mem_address addr;
+ 
+ 	      get_address_description (tem, &addr);
+ 	      op0 = addr_for_mem_ref (&addr, as, true);
+ 	      op0 = memory_address_addr_space (mode, op0, as);
+ 	      mem = gen_rtx_MEM (mode, op0);
+ 	      set_mem_attributes (mem, tem, 0);
+ 	      set_mem_addr_space (mem, as);
+ 	    }
+ 	  else
+ 	    gcc_unreachable ();
+ 	  if (TREE_THIS_VOLATILE (tem))
+ 	    MEM_VOLATILE_P (mem) = 1;
+ 
+ 	  create_fixed_operand (&ops[0], mem);
+ 	  create_input_operand (&ops[1], to_rtx, mode);
+ 	  /* The movmisalign<mode> pattern cannot fail, else the assignment
+ 	     would silently be omitted.  */
+ 	  expand_insn (icode, 2, ops);
+ 	}
+ 
        if (result)
  	preserve_temp_slots (result);
        free_temp_slots ();


2012-01-24  Richard Guenther  <rguenther@suse.de>

	* expr.c (store_field): Do not set MEM_IN_STRUCT_P.

Comments

Jakub Jelinek Jan. 26, 2012, 12:23 p.m. UTC | #1
On Tue, Jan 24, 2012 at 04:38:46PM +0100, Richard Guenther wrote:
> Bootstrapped and tested on x86_64-unknown-linux-gnu, ok for trunk?
> 2012-01-24  Richard Guenther  <rguenther@suse.de>
> 
> 	PR tree-optimization/50444
> 	* expr.c (expand_assignment): Handle misaligned bases consistently,
> 	even when wrapped inside component references.

This is ok for trunk, unless somebody objects within next 24 hours.

	Jakub
diff mbox

Patch

Index: gcc/expr.c
===================================================================
--- gcc/expr.c	(revision 183470)
+++ gcc/expr.c	(working copy)
@@ -6429,8 +6432,6 @@  store_field (rtx target, HOST_WIDE_INT b
       if (to_rtx == target)
 	to_rtx = copy_rtx (to_rtx);
 
-      if (!MEM_SCALAR_P (to_rtx))
-	MEM_IN_STRUCT_P (to_rtx) = 1;
       if (!MEM_KEEP_ALIAS_SET_P (to_rtx) && MEM_ALIAS_SET (to_rtx) != 0)
 	set_mem_alias_set (to_rtx, alias_set);