Patchwork RFA: PATCH to make fold_indirect_ref_1 fold more things

login
register
mail settings
Submitter Jason Merrill
Date Oct. 31, 2010, 8:27 p.m.
Message ID <4CCDD13E.5030403@redhat.com>
Download mbox | patch
Permalink /patch/69736/
State New
Headers show

Comments

Jason Merrill - Oct. 31, 2010, 8:27 p.m.
On 10/29/2010 07:32 PM, Richard Guenther wrote:
>> I guess I can just do this folding in the constexpr expander...
>
> Yeah, I think that should be safer.

OK, how about this patch that only adds folding INDIRECT_REF of 
POINTER_PLUS_EXPR to ARRAY_REF (and combines the two POINTER_PLUS_EXPR 
hunks)?
commit 49fd7e2c159bc5de19055c64c19e26778167cc28
Author: Jason Merrill <jason@redhat.com>
Date:   Tue Oct 26 14:49:37 2010 -0400

    	* fold-const.c (fold_indirect_ref_1): Handle folding
    	POINTER_PLUS_EXPR to ARRAY_REF.
Richard Guenther - Nov. 1, 2010, 10:12 p.m.
On Sun, Oct 31, 2010 at 9:27 PM, Jason Merrill <jason@redhat.com> wrote:
> On 10/29/2010 07:32 PM, Richard Guenther wrote:
>>>
>>> I guess I can just do this folding in the constexpr expander...
>>
>> Yeah, I think that should be safer.
>
> OK, how about this patch that only adds folding INDIRECT_REF of
> POINTER_PLUS_EXPR to ARRAY_REF (and combines the two POINTER_PLUS_EXPR
> hunks)?

That works for me.

Thanks,
Richard.

> commit 49fd7e2c159bc5de19055c64c19e26778167cc28
> Author: Jason Merrill <jason@redhat.com>
> Date:   Tue Oct 26 14:49:37 2010 -0400
>
>        * fold-const.c (fold_indirect_ref_1): Handle folding
>        POINTER_PLUS_EXPR to ARRAY_REF.
>
> diff --git a/gcc/fold-const.c b/gcc/fold-const.c
> index decb0fb..dd69a20 100644
> --- a/gcc/fold-const.c
> +++ b/gcc/fold-const.c
> @@ -15649,53 +15649,59 @@ fold_indirect_ref_1 (location_t loc, tree type,
> tree op0)
>        }
>     }
>
> -  /* ((foo*)&vectorfoo)[1] => BIT_FIELD_REF<vectorfoo,...> */
>   if (TREE_CODE (sub) == POINTER_PLUS_EXPR
>       && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
>     {
>       tree op00 = TREE_OPERAND (sub, 0);
>       tree op01 = TREE_OPERAND (sub, 1);
> -      tree op00type;
>
>       STRIP_NOPS (op00);
> -      op00type = TREE_TYPE (op00);
> -      if (TREE_CODE (op00) == ADDR_EXPR
> -          && TREE_CODE (TREE_TYPE (op00type)) == VECTOR_TYPE
> -          && type == TREE_TYPE (TREE_TYPE (op00type)))
> +      if (TREE_CODE (op00) == ADDR_EXPR)
>        {
> -         HOST_WIDE_INT offset = tree_low_cst (op01, 0);
> -         tree part_width = TYPE_SIZE (type);
> -         unsigned HOST_WIDE_INT part_widthi = tree_low_cst (part_width,
> 0)/BITS_PER_UNIT;
> -         unsigned HOST_WIDE_INT indexi = offset * BITS_PER_UNIT;
> -         tree index = bitsize_int (indexi);
> +         tree op00type;
> +         op00 = TREE_OPERAND (op00, 0);
> +         op00type = TREE_TYPE (op00);
>
> -         if (offset/part_widthi <= TYPE_VECTOR_SUBPARTS (TREE_TYPE
> (op00type)))
> -           return fold_build3_loc (loc,
> -                               BIT_FIELD_REF, type, TREE_OPERAND (op00, 0),
> -                               part_width, index);
> +         /* ((foo*)&vectorfoo)[1] => BIT_FIELD_REF<vectorfoo,...> */
> +         if (TREE_CODE (op00type) == VECTOR_TYPE
> +             && type == TREE_TYPE (op00type))
> +           {
> +             HOST_WIDE_INT offset = tree_low_cst (op01, 0);
> +             tree part_width = TYPE_SIZE (type);
> +             unsigned HOST_WIDE_INT part_widthi = tree_low_cst (part_width,
> 0)/BITS_PER_UNIT;
> +             unsigned HOST_WIDE_INT indexi = offset * BITS_PER_UNIT;
> +             tree index = bitsize_int (indexi);
>
> -       }
> -    }
> +             if (offset/part_widthi <= TYPE_VECTOR_SUBPARTS (op00type))
> +               return fold_build3_loc (loc,
> +                                       BIT_FIELD_REF, type, op00,
> +                                       part_width, index);
>
> -
> -  /* ((foo*)&complexfoo)[1] => __imag__ complexfoo */
> -  if (TREE_CODE (sub) == POINTER_PLUS_EXPR
> -      && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
> -    {
> -      tree op00 = TREE_OPERAND (sub, 0);
> -      tree op01 = TREE_OPERAND (sub, 1);
> -      tree op00type;
> -
> -      STRIP_NOPS (op00);
> -      op00type = TREE_TYPE (op00);
> -      if (TREE_CODE (op00) == ADDR_EXPR
> -         && TREE_CODE (TREE_TYPE (op00type)) == COMPLEX_TYPE
> -         && type == TREE_TYPE (TREE_TYPE (op00type)))
> -       {
> -         tree size = TYPE_SIZE_UNIT (type);
> -         if (tree_int_cst_equal (size, op01))
> -           return fold_build1_loc (loc, IMAGPART_EXPR, type,
> -                               TREE_OPERAND (op00, 0));
> +           }
> +         /* ((foo*)&complexfoo)[1] => __imag__ complexfoo */
> +         else if (TREE_CODE (op00type) == COMPLEX_TYPE
> +                  && type == TREE_TYPE (op00type))
> +           {
> +             tree size = TYPE_SIZE_UNIT (type);
> +             if (tree_int_cst_equal (size, op01))
> +               return fold_build1_loc (loc, IMAGPART_EXPR, type, op00);
> +           }
> +         /* ((foo *)&fooarray)[1] => fooarray[1] */
> +         else if (TREE_CODE (op00type) == ARRAY_TYPE
> +                  && type == TREE_TYPE (op00type))
> +           {
> +             tree type_domain = TYPE_DOMAIN (op00type);
> +             tree min_val = size_zero_node;
> +             if (type_domain && TYPE_MIN_VALUE (type_domain))
> +               min_val = TYPE_MIN_VALUE (type_domain);
> +             op01 = size_binop_loc (loc, EXACT_DIV_EXPR, op01,
> +                                    TYPE_SIZE_UNIT (type));
> +             op01 = size_binop_loc (loc, PLUS_EXPR, op01, min_val);
> +             op0 = build4 (ARRAY_REF, type, op00, op01,
> +                           NULL_TREE, NULL_TREE);
> +             SET_EXPR_LOCATION (op0, loc);
> +             return op0;
> +           }
>        }
>     }
>
>
>

Patch

diff --git a/gcc/fold-const.c b/gcc/fold-const.c
index decb0fb..dd69a20 100644
--- a/gcc/fold-const.c
+++ b/gcc/fold-const.c
@@ -15649,53 +15649,59 @@  fold_indirect_ref_1 (location_t loc, tree type, tree op0)
 	}
     }
 
-  /* ((foo*)&vectorfoo)[1] => BIT_FIELD_REF<vectorfoo,...> */
   if (TREE_CODE (sub) == POINTER_PLUS_EXPR
       && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
     {
       tree op00 = TREE_OPERAND (sub, 0);
       tree op01 = TREE_OPERAND (sub, 1);
-      tree op00type;
 
       STRIP_NOPS (op00);
-      op00type = TREE_TYPE (op00);
-      if (TREE_CODE (op00) == ADDR_EXPR
-          && TREE_CODE (TREE_TYPE (op00type)) == VECTOR_TYPE
-          && type == TREE_TYPE (TREE_TYPE (op00type)))
+      if (TREE_CODE (op00) == ADDR_EXPR)
 	{
-	  HOST_WIDE_INT offset = tree_low_cst (op01, 0);
-	  tree part_width = TYPE_SIZE (type);
-	  unsigned HOST_WIDE_INT part_widthi = tree_low_cst (part_width, 0)/BITS_PER_UNIT;
-	  unsigned HOST_WIDE_INT indexi = offset * BITS_PER_UNIT;
-	  tree index = bitsize_int (indexi);
+	  tree op00type;
+	  op00 = TREE_OPERAND (op00, 0);
+	  op00type = TREE_TYPE (op00);
 
-	  if (offset/part_widthi <= TYPE_VECTOR_SUBPARTS (TREE_TYPE (op00type)))
-	    return fold_build3_loc (loc,
-				BIT_FIELD_REF, type, TREE_OPERAND (op00, 0),
-				part_width, index);
+	  /* ((foo*)&vectorfoo)[1] => BIT_FIELD_REF<vectorfoo,...> */
+	  if (TREE_CODE (op00type) == VECTOR_TYPE
+	      && type == TREE_TYPE (op00type))
+	    {
+	      HOST_WIDE_INT offset = tree_low_cst (op01, 0);
+	      tree part_width = TYPE_SIZE (type);
+	      unsigned HOST_WIDE_INT part_widthi = tree_low_cst (part_width, 0)/BITS_PER_UNIT;
+	      unsigned HOST_WIDE_INT indexi = offset * BITS_PER_UNIT;
+	      tree index = bitsize_int (indexi);
 
-	}
-    }
+	      if (offset/part_widthi <= TYPE_VECTOR_SUBPARTS (op00type))
+		return fold_build3_loc (loc,
+					BIT_FIELD_REF, type, op00,
+					part_width, index);
 
-
-  /* ((foo*)&complexfoo)[1] => __imag__ complexfoo */
-  if (TREE_CODE (sub) == POINTER_PLUS_EXPR
-      && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
-    {
-      tree op00 = TREE_OPERAND (sub, 0);
-      tree op01 = TREE_OPERAND (sub, 1);
-      tree op00type;
-
-      STRIP_NOPS (op00);
-      op00type = TREE_TYPE (op00);
-      if (TREE_CODE (op00) == ADDR_EXPR
- 	  && TREE_CODE (TREE_TYPE (op00type)) == COMPLEX_TYPE
-	  && type == TREE_TYPE (TREE_TYPE (op00type)))
-	{
-	  tree size = TYPE_SIZE_UNIT (type);
-	  if (tree_int_cst_equal (size, op01))
-	    return fold_build1_loc (loc, IMAGPART_EXPR, type,
-				TREE_OPERAND (op00, 0));
+	    }
+	  /* ((foo*)&complexfoo)[1] => __imag__ complexfoo */
+	  else if (TREE_CODE (op00type) == COMPLEX_TYPE
+		   && type == TREE_TYPE (op00type))
+	    {
+	      tree size = TYPE_SIZE_UNIT (type);
+	      if (tree_int_cst_equal (size, op01))
+		return fold_build1_loc (loc, IMAGPART_EXPR, type, op00);
+	    }
+	  /* ((foo *)&fooarray)[1] => fooarray[1] */
+	  else if (TREE_CODE (op00type) == ARRAY_TYPE
+		   && type == TREE_TYPE (op00type))
+	    {
+	      tree type_domain = TYPE_DOMAIN (op00type);
+	      tree min_val = size_zero_node;
+	      if (type_domain && TYPE_MIN_VALUE (type_domain))
+		min_val = TYPE_MIN_VALUE (type_domain);
+	      op01 = size_binop_loc (loc, EXACT_DIV_EXPR, op01,
+				     TYPE_SIZE_UNIT (type));
+	      op01 = size_binop_loc (loc, PLUS_EXPR, op01, min_val);
+	      op0 = build4 (ARRAY_REF, type, op00, op01,
+			    NULL_TREE, NULL_TREE);
+	      SET_EXPR_LOCATION (op0, loc);
+	      return op0;
+	    }
 	}
     }