Add ARRAY_REF based access patch disambiguation
diff mbox series

Message ID 20190719142249.e7fuzf3exlfb437j@kam.mff.cuni.cz
State New
Headers show
Series
  • Add ARRAY_REF based access patch disambiguation
Related show

Commit Message

Jan Hubicka July 19, 2019, 2:22 p.m. UTC
Hi,
this patch adds bare bones of disambiguation of access paths via
ARRAY_REF.  Similarly to COMPONENT_REF we need a matched ARRAY_REF size
and prove that indexes are actually different.

This adds about 20 new disambiguations to tramp3d.

Bootstrapped/regtested x86_64-linux, OK?

	* tree-ssa-alias.c (nonoverlapping_component_refs_since_match_p):
	Rename to ...
	(nonoverlapping_refs_since_match_p): ... this; handle also ARRAY_REFs.
	(alias_stats): Update stats.
	(dump_alias_stats): Likewise.
	(aliasing_matching_component_refs_p): Add partial_overlap argument;
	pass it to nonoverlapping_refs_since_match_p.
	(aliasing_component_refs_walk): Update call of
	aliasing_matching_component_refs_p
	(nonoverlapping_array_refs_p): New function.
	(decl_refs_may_alias_p, indirect_ref_may_alias_decl_p,
	indirect_refs_may_alias_p): Update calls of
	nonoverlapping_refs_since_match_p.
	* gcc.dg/tree-ssa/alias-access-path-10.c: New testcase.

Comments

Richard Biener July 23, 2019, 1:13 p.m. UTC | #1
On Fri, 19 Jul 2019, Jan Hubicka wrote:

> Hi,
> this patch adds bare bones of disambiguation of access paths via
> ARRAY_REF.  Similarly to COMPONENT_REF we need a matched ARRAY_REF size
> and prove that indexes are actually different.
> 
> This adds about 20 new disambiguations to tramp3d.
> 
> Bootstrapped/regtested x86_64-linux, OK?
> 
> 	* tree-ssa-alias.c (nonoverlapping_component_refs_since_match_p):
> 	Rename to ...
> 	(nonoverlapping_refs_since_match_p): ... this; handle also ARRAY_REFs.
> 	(alias_stats): Update stats.
> 	(dump_alias_stats): Likewise.
> 	(aliasing_matching_component_refs_p): Add partial_overlap argument;
> 	pass it to nonoverlapping_refs_since_match_p.
> 	(aliasing_component_refs_walk): Update call of
> 	aliasing_matching_component_refs_p
> 	(nonoverlapping_array_refs_p): New function.
> 	(decl_refs_may_alias_p, indirect_ref_may_alias_decl_p,
> 	indirect_refs_may_alias_p): Update calls of
> 	nonoverlapping_refs_since_match_p.
> 	* gcc.dg/tree-ssa/alias-access-path-10.c: New testcase.
> 
> Index: tree-ssa-alias.c
> ===================================================================
> --- tree-ssa-alias.c	(revision 273570)
> +++ tree-ssa-alias.c	(working copy)
> @@ -87,7 +87,7 @@ along with GCC; see the file COPYING3.
>     this file.  Low-level disambiguators dealing with points-to
>     information are in tree-ssa-structalias.c.  */
>  
> -static int nonoverlapping_component_refs_since_match_p (tree, tree, tree, tree);
> +static int nonoverlapping_refs_since_match_p (tree, tree, tree, tree, bool);
>  static bool nonoverlapping_component_refs_p (const_tree, const_tree);
>  
>  /* Query statistics for the different low-level disambiguators.
> @@ -104,9 +104,9 @@ static struct {
>    unsigned HOST_WIDE_INT aliasing_component_refs_p_no_alias;
>    unsigned HOST_WIDE_INT nonoverlapping_component_refs_p_may_alias;
>    unsigned HOST_WIDE_INT nonoverlapping_component_refs_p_no_alias;
> -  unsigned HOST_WIDE_INT nonoverlapping_component_refs_since_match_p_may_alias;
> -  unsigned HOST_WIDE_INT nonoverlapping_component_refs_since_match_p_must_overlap;
> -  unsigned HOST_WIDE_INT nonoverlapping_component_refs_since_match_p_no_alias;
> +  unsigned HOST_WIDE_INT nonoverlapping_refs_since_match_p_may_alias;
> +  unsigned HOST_WIDE_INT nonoverlapping_refs_since_match_p_must_overlap;
> +  unsigned HOST_WIDE_INT nonoverlapping_refs_since_match_p_no_alias;
>  } alias_stats;
>  
>  void
> @@ -137,15 +137,15 @@ dump_alias_stats (FILE *s)
>  	   alias_stats.nonoverlapping_component_refs_p_no_alias,
>  	   alias_stats.nonoverlapping_component_refs_p_no_alias
>  	   + alias_stats.nonoverlapping_component_refs_p_may_alias);
> -  fprintf (s, "  nonoverlapping_component_refs_since_match_p: "
> +  fprintf (s, "  nonoverlapping_refs_since_match_p: "
>  	   HOST_WIDE_INT_PRINT_DEC" disambiguations, "
>  	   HOST_WIDE_INT_PRINT_DEC" must overlaps, "
>  	   HOST_WIDE_INT_PRINT_DEC" queries\n",
> -	   alias_stats.nonoverlapping_component_refs_since_match_p_no_alias,
> -	   alias_stats.nonoverlapping_component_refs_since_match_p_must_overlap,
> -	   alias_stats.nonoverlapping_component_refs_since_match_p_no_alias
> -	   + alias_stats.nonoverlapping_component_refs_since_match_p_may_alias
> -	   + alias_stats.nonoverlapping_component_refs_since_match_p_must_overlap);
> +	   alias_stats.nonoverlapping_refs_since_match_p_no_alias,
> +	   alias_stats.nonoverlapping_refs_since_match_p_must_overlap,
> +	   alias_stats.nonoverlapping_refs_since_match_p_no_alias
> +	   + alias_stats.nonoverlapping_refs_since_match_p_may_alias
> +	   + alias_stats.nonoverlapping_refs_since_match_p_must_overlap);
>    fprintf (s, "  aliasing_component_refs_p: "
>  	   HOST_WIDE_INT_PRINT_DEC" disambiguations, "
>  	   HOST_WIDE_INT_PRINT_DEC" queries\n",
> @@ -856,7 +856,8 @@ type_has_components_p (tree type)
>  
>  /* MATCH1 and MATCH2 which are part of access path of REF1 and REF2
>     respectively are either pointing to same address or are completely
> -   disjoint.
> +   disjoint. If PARITAL_OVERLAP is true, assume that outermost arrays may
> +   just partly overlap.
>  
>     Try to disambiguate using the access path starting from the match
>     and return false if there is no conflict.
> @@ -867,24 +868,27 @@ static bool
>  aliasing_matching_component_refs_p (tree match1, tree ref1,
>  				    poly_int64 offset1, poly_int64 max_size1,
>  				    tree match2, tree ref2,
> -				    poly_int64 offset2, poly_int64 max_size2)
> +				    poly_int64 offset2, poly_int64 max_size2,
> +				    bool partial_overlap)
>  {
>    poly_int64 offadj, sztmp, msztmp;
>    bool reverse;
>  
> -
> -  get_ref_base_and_extent (match2, &offadj, &sztmp, &msztmp, &reverse);
> -  offset2 -= offadj;
> -  get_ref_base_and_extent (match1, &offadj, &sztmp, &msztmp, &reverse);
> -  offset1 -= offadj;
> -  if (!ranges_maybe_overlap_p (offset1, max_size1, offset2, max_size2))
> +  if (!partial_overlap)
>      {
> -      ++alias_stats.aliasing_component_refs_p_no_alias;
> -      return false;
> +      get_ref_base_and_extent (match2, &offadj, &sztmp, &msztmp, &reverse);
> +      offset2 -= offadj;
> +      get_ref_base_and_extent (match1, &offadj, &sztmp, &msztmp, &reverse);
> +      offset1 -= offadj;
> +      if (!ranges_maybe_overlap_p (offset1, max_size1, offset2, max_size2))
> +	{
> +	  ++alias_stats.aliasing_component_refs_p_no_alias;
> +	  return false;
> +	}
>      }
>  
> -  int cmp = nonoverlapping_component_refs_since_match_p (match1, ref1,
> -							 match2, ref2);
> +  int cmp = nonoverlapping_refs_since_match_p (match1, ref1, match2, ref2,
> +					       partial_overlap);
>    if (cmp == 1
>        || (cmp == -1 && nonoverlapping_component_refs_p (ref1, ref2)))
>      {
> @@ -964,6 +968,8 @@ aliasing_component_refs_walk (tree ref1,
>      }
>    if (same_p == 1)
>      {
> +      bool partial_overlap = false;
> +
>        /* We assume that arrays can overlap by multiple of their elements
>  	 size as tested in gcc.dg/torture/alias-2.c.
>  	 This partial overlap happen only when both arrays are bases of
> @@ -973,15 +979,18 @@ aliasing_component_refs_walk (tree ref1,
>  	  && (!TYPE_SIZE (TREE_TYPE (base1))
>  	      || TREE_CODE (TYPE_SIZE (TREE_TYPE (base1))) != INTEGER_CST
>  	      || ref == base2))
> -	/* Setting maybe_match to true triggers
> -	   nonoverlapping_component_refs_p test later that still may do
> -	   useful disambiguation.  */
> -	*maybe_match = true;
> -      else
> -	return aliasing_matching_component_refs_p (base1, ref1,
> -						   offset1, max_size1,
> -						   ref, ref2,
> -						   offset2, max_size2);
> +	{
> +	  /* Setting maybe_match to true triggers
> +	     nonoverlapping_component_refs_p test later that still may do
> +	     useful disambiguation.  */
> +	  *maybe_match = true;
> +	  partial_overlap = true;
> +	}
> +      return aliasing_matching_component_refs_p (base1, ref1,
> +						 offset1, max_size1,
> +						 ref, ref2,
> +						 offset2, max_size2,
> +						 partial_overlap);
>      }
>    return -1;
>  }
> @@ -1225,10 +1234,57 @@ nonoverlapping_component_refs_p_1 (const
>    return -1;
>  }
>  
> +/* REF1 and REF2 are ARRAY_REFs which either start at the same address or
> +   are completely disjoint.

So the ARRAY_REF array bases are at the same address, not the ARRAY_REF
itself?

> +   Return 1 if the refs are non-overlapping.
> +   Return 0 if they are possibly overlapping but if so the overlap again
> +   starts on the same address.
> +   Return -1 otherwise.  */
> +
> +int
> +nonoverlapping_array_refs_p (tree ref1, tree ref2)
> +{
> +  tree low_bound1 = array_ref_low_bound (ref1);
> +  tree low_bound2 = array_ref_low_bound (ref2);
> +  tree index1 = TREE_OPERAND (ref1, 1);
> +  tree index2 = TREE_OPERAND (ref2, 1);
> +
> +  /* Handle zero offsets first: we do not need to match type size in this
> +     case.  */
> +  if (operand_equal_p (index1, low_bound1, 0)
> +      && operand_equal_p (index2, low_bound2, 0))
> +    return 0;
> +
> +  /* If type sizes are different, give up.  */
> +  if (!operand_equal_p (array_ref_element_size (ref1),
> +			array_ref_element_size (ref2), 0))
> +    return -1;

So both array_ref_low_bound and array_ref_element_size are quite 
expensive.  I wonder if you should resort to operand_equal_p
of TREE_OPERAND (ref1, 2) or the raw TYPE_MIN_VALUE of the
domain here since you are not interested in the actual size
or the actual minimum value.

> +  /* Since we know that type sizes are the same, there is no need to return
> +     -1 after this point, since partial overlap can not be introduced.  */
> +
> +  /* We may need to fold trees in this case.
> +     TODO: Handle integer constant case at least.  */
> +  if (!operand_equal_p (low_bound1, low_bound2, 0))
> +    return 0;
> +
> +  if (TREE_CODE (index1) == INTEGER_CST && TREE_CODE (index2) == INTEGER_CST)
> +    {
> +      if (tree_int_cst_equal (index1, index2))
> +	return 0;
> +      return 1;
> +    }
> +  /* TODO: We can use VRP to further disambiguate here. */
> +  return 0;
> +}
> +
>  /* Try to disambiguate REF1 and REF2 under the assumption that MATCH1 and
>     MATCH2 either point to the same address or are disjoint.
>     MATCH1 and MATCH2 are assumed to be ref in the access path of REF1 and REF2
>     respectively or NULL in the case we established equivalence of bases.
> +   If PARTIAL_OVERLAP is true assume that the toplevel arrays may actually
> +   overlap by exact multiply of their element size.
>  
>     This test works by matching the initial segment of the access path
>     and does not rely on TBAA thus is safe for !flag_strict_aliasing if
> @@ -1247,8 +1303,9 @@ nonoverlapping_component_refs_p_1 (const
>     oracles.  */
>  
>  static int
> -nonoverlapping_component_refs_since_match_p (tree match1, tree ref1,
> -					     tree match2, tree ref2)
> +nonoverlapping_refs_since_match_p (tree match1, tree ref1,
> +				   tree match2, tree ref2,
> +				   bool partial_overlap)
>  {
>    /* Early return if there are no references to match, we do not need
>       to walk the access paths.
> @@ -1301,7 +1358,7 @@ nonoverlapping_component_refs_since_matc
>  	  && !tree_int_cst_equal (TREE_OPERAND (ref1, 1),
>  				  TREE_OPERAND (ref2, 1))))
>      {
> -      ++alias_stats.nonoverlapping_component_refs_since_match_p_may_alias;
> +      ++alias_stats.nonoverlapping_refs_since_match_p_may_alias;
>        return -1;
>      }
>  
> @@ -1318,18 +1375,75 @@ nonoverlapping_component_refs_since_matc
>       case the return value will precisely be false.  */
>    while (true)
>      {
> -      bool seen_noncomponent_ref_p = false;
> +      /* Track if we seen unmatched ref with non-zero offset.  In this case
> +	 we must look for partial overlaps.  */
> +      bool seen_unmatched_ref_p = false;
> +
> +      /* First match ARRAY_REFs an try to disambiguate.  */
> +      if (!component_refs1.is_empty ()
> +	  && !component_refs2.is_empty ())
> +	{
> +	  unsigned int narray_refs1=0, narray_refs2=0;
> +
> +	  /* We generally assume that both access paths starts by same sequence
> +	     of refs.  However if number of array refs is not in sync, try
> +	     to recover and pop elts until number match.  This helps the case
> +	     where one access path starts by array and other by element.  */
> +	  for (narray_refs1 = 0; narray_refs1 < component_refs1.length ();
> +	       narray_refs1++)
> +	    if (TREE_CODE (component_refs1 [component_refs1.length()
> +					    - 1 - narray_refs1]) != ARRAY_REF)
> +	      break;
> +
> +	  for (narray_refs2 = 0; narray_refs2 < component_refs2.length ();
> +	       narray_refs2++)
> +	    if (TREE_CODE (component_refs2 [component_refs2.length()
> +					    - 1 - narray_refs2]) != ARRAY_REF)
> +	      break;

so here we now have narray_refs1,2, the number of ARRAY_REFs at the
end of the component_refs.  I wonder if this part can be easily
done during component_refN construction - simply ++count
if pushing an ARRAY_REF, count = 0 if not?  What about ARRAY_RANGE_REF?
Will those invalidate any of the stuff and thus we need to handle
them conservative in some way?

> +	  for (; narray_refs1 > narray_refs2; narray_refs1--)
> +	    {
> +	      ref1 = component_refs1.pop ();
> +	      if (!operand_equal_p (TREE_OPERAND (ref1, 1),
> +				    array_ref_low_bound (ref1), 0))
> +	        seen_unmatched_ref_p = true;
> +	    }
> +	  for (; narray_refs2 > narray_refs1; narray_refs2--)
> +	    {
> +	      ref2 = component_refs2.pop ();
> +	      if (!operand_equal_p (TREE_OPERAND (ref2, 1),
> +				    array_ref_low_bound (ref2), 0))
> +	        seen_unmatched_ref_p = true;
> +	    }

here we're trying to make them equal by popping.  seen_unmached_ref_p
is magic, you have to add comments.

> +	  /* Try to disambiguate matched arrays.  */
> +	  for (unsigned int i = 0; i < narray_refs1; i++)
> +	    {
> +	      int cmp = nonoverlapping_array_refs_p (component_refs1.pop (),
> +						     component_refs2.pop ());
> +	      if (cmp == 1 && !partial_overlap)
> +		{
> +		  ++alias_stats
> +		    .nonoverlapping_refs_since_match_p_no_alias;
> +		  return 1;
> +		}
> +	      partial_overlap = false;
> +	      if (cmp == -1)
> +		seen_unmatched_ref_p = true;
> +	    }
> +	}
> +
> +      /* Next look for component_refs.  */
> +

So for below we assume that we will never skip an ARRAY_REF without
seeing two COMPONENT_REFs, correct?  And the mismatching ARRAY_REF
counts can only happen for the outermost components (which are
innermost on the component_ref stacks).  So I really wonder if
we should not work from the other side of the component_ref
arrays?  Or handle the above case in the "tail" (I'm not sure
the case of [i][j] vs. [i][j][2] will happen often since
whole-array refs are not common in the IL).

Richard.

>        do
>  	{
>  	  if (component_refs1.is_empty ())
>  	    {
>  	      ++alias_stats
> -		.nonoverlapping_component_refs_since_match_p_must_overlap;
> +		.nonoverlapping_refs_since_match_p_must_overlap;
>  	      return 0;
>  	    }
>  	  ref1 = component_refs1.pop ();
>  	  if (TREE_CODE (ref1) != COMPONENT_REF)
> -	    seen_noncomponent_ref_p = true;
> +	    seen_unmatched_ref_p = true;
>  	}
>        while (!RECORD_OR_UNION_TYPE_P (TREE_TYPE (TREE_OPERAND (ref1, 0))));
>  
> @@ -1338,12 +1452,12 @@ nonoverlapping_component_refs_since_matc
>  	  if (component_refs2.is_empty ())
>  	    {
>  	      ++alias_stats
> -		.nonoverlapping_component_refs_since_match_p_must_overlap;
> +		.nonoverlapping_refs_since_match_p_must_overlap;
>  	      return 0;
>  	    }
>  	  ref2 = component_refs2.pop ();
>  	  if (TREE_CODE (ref2) != COMPONENT_REF)
> -	    seen_noncomponent_ref_p = true;
> +	    seen_unmatched_ref_p = true;
>  	}
>        while (!RECORD_OR_UNION_TYPE_P (TREE_TYPE (TREE_OPERAND (ref2, 0))));
>  
> @@ -1361,13 +1475,15 @@ nonoverlapping_component_refs_since_matc
>        tree type1 = DECL_CONTEXT (field1);
>        tree type2 = DECL_CONTEXT (field2);
>  
> +      partial_overlap = false;
> +
>        /* If we skipped array refs on type of different sizes, we can
>  	 no longer be sure that there are not partial overlaps.  */
> -      if (seen_noncomponent_ref_p
> +      if (seen_unmatched_ref_p
>  	  && !operand_equal_p (TYPE_SIZE (type1), TYPE_SIZE (type2), 0))
>  	{
>  	  ++alias_stats
> -	    .nonoverlapping_component_refs_since_match_p_may_alias;
> +	    .nonoverlapping_refs_since_match_p_may_alias;
>  	  return -1;
>  	}
>  
> @@ -1375,18 +1491,18 @@ nonoverlapping_component_refs_since_matc
>        if (cmp == -1)
>  	{
>  	  ++alias_stats
> -	    .nonoverlapping_component_refs_since_match_p_may_alias;
> +	    .nonoverlapping_refs_since_match_p_may_alias;
>  	  return -1;
>  	}
>        else if (cmp == 1)
>  	{
>  	  ++alias_stats
> -	    .nonoverlapping_component_refs_since_match_p_no_alias;
> +	    .nonoverlapping_refs_since_match_p_no_alias;
>  	  return 1;
>  	}
>      }
>  
> -  ++alias_stats.nonoverlapping_component_refs_since_match_p_must_overlap;
> +  ++alias_stats.nonoverlapping_refs_since_match_p_must_overlap;
>    return 0;
>  }
>  
> @@ -1583,8 +1699,7 @@ decl_refs_may_alias_p (tree ref1, tree b
>       so we disambiguate component references manually.  */
>    if (ref1 && ref2
>        && handled_component_p (ref1) && handled_component_p (ref2)
> -      && nonoverlapping_component_refs_since_match_p (NULL, ref1,
> -						      NULL, ref2) == 1)
> +      && nonoverlapping_refs_since_match_p (NULL, ref1, NULL, ref2, false) == 1)
>      return false;
>  
>    return true;     
> @@ -1709,19 +1824,22 @@ indirect_ref_may_alias_decl_p (tree ref1
>         || (!TMR_INDEX (base1) && !TMR_INDEX2 (base1)))
>         && (TREE_CODE (dbase2) != TARGET_MEM_REF
>  	   || (!TMR_INDEX (dbase2) && !TMR_INDEX2 (dbase2))))
> -      && same_type_for_tbaa (TREE_TYPE (base1), TREE_TYPE (dbase2)) == 1
> -      && (TREE_CODE (TREE_TYPE (base1)) != ARRAY_TYPE
> -	  || (TYPE_SIZE (TREE_TYPE (base1))
> -	      && TREE_CODE (TYPE_SIZE (TREE_TYPE (base1))) == INTEGER_CST)))
> +      && same_type_for_tbaa (TREE_TYPE (base1), TREE_TYPE (dbase2)) == 1)
>      {
> -      if (!ranges_maybe_overlap_p (doffset1, max_size1, doffset2, max_size2))
> +      bool partial_overlap = (TREE_CODE (TREE_TYPE (base1)) == ARRAY_TYPE
> +			      && (TYPE_SIZE (TREE_TYPE (base1))
> +			      && TREE_CODE (TYPE_SIZE (TREE_TYPE (base1)))
> +				 != INTEGER_CST));
> +      if (!partial_overlap
> +	  && !ranges_maybe_overlap_p (doffset1, max_size1, doffset2, max_size2))
>  	return false;
>        if (!ref1 || !ref2
>  	  /* If there is must alias, there is no use disambiguating further.  */
> -	  || (known_eq (size1, max_size1) && known_eq (size2, max_size2)))
> +	  || (!partial_overlap
> +	      && known_eq (size1, max_size1) && known_eq (size2, max_size2)))
>  	return true;
> -      int res = nonoverlapping_component_refs_since_match_p (base1, ref1,
> -							     base2, ref2);
> +      int res = nonoverlapping_refs_since_match_p (base1, ref1, base2, ref2,
> +						   partial_overlap);
>        if (res == -1)
>  	return !nonoverlapping_component_refs_p (ref1, ref2);
>        return !res;
> @@ -1805,8 +1923,8 @@ indirect_refs_may_alias_p (tree ref1 ATT
>  	return true;
>        if (ref1 && ref2)
>  	{
> -	  int res = nonoverlapping_component_refs_since_match_p (NULL, ref1,
> -								 NULL, ref2);
> +	  int res = nonoverlapping_refs_since_match_p (NULL, ref1, NULL, ref2,
> +						       false);
>  	  if (res != -1)
>  	    return !res;
>  	}
> @@ -1844,19 +1962,22 @@ indirect_refs_may_alias_p (tree ref1 ATT
>        && (TREE_CODE (base2) != TARGET_MEM_REF
>  	  || (!TMR_INDEX (base2) && !TMR_INDEX2 (base2)))
>        && same_type_for_tbaa (TREE_TYPE (ptrtype1),
> -			     TREE_TYPE (ptrtype2)) == 1
> +			     TREE_TYPE (ptrtype2)) == 1)
> +    {
>        /* But avoid treating arrays as "objects", instead assume they
>           can overlap by an exact multiple of their element size.
>           See gcc.dg/torture/alias-2.c.  */
> -      && TREE_CODE (TREE_TYPE (ptrtype1)) != ARRAY_TYPE)
> -    {
> -      if (!ranges_maybe_overlap_p (offset1, max_size1, offset2, max_size2))
> +      bool partial_overlap = TREE_CODE (TREE_TYPE (ptrtype1)) == ARRAY_TYPE;
> +
> +      if (!partial_overlap
> +	  && !ranges_maybe_overlap_p (offset1, max_size1, offset2, max_size2))
>  	return false;
>        if (!ref1 || !ref2
> -	  || (known_eq (size1, max_size1) && known_eq (size2, max_size2)))
> +	  || (!partial_overlap
> +	      && known_eq (size1, max_size1) && known_eq (size2, max_size2)))
>  	return true;
> -      int res = nonoverlapping_component_refs_since_match_p (base1, ref1,
> -							     base2, ref2);
> +      int res = nonoverlapping_refs_since_match_p (base1, ref1, base2, ref2,
> +						   partial_overlap);
>        if (res == -1)
>  	return !nonoverlapping_component_refs_p (ref1, ref2);
>        return !res;
> Index: testsuite/gcc.dg/tree-ssa/alias-access-path-10.c
> ===================================================================
> --- testsuite/gcc.dg/tree-ssa/alias-access-path-10.c	(nonexistent)
> +++ testsuite/gcc.dg/tree-ssa/alias-access-path-10.c	(working copy)
> @@ -0,0 +1,12 @@
> +/* { dg-do compile } */
> +/* { dg-options "-O2 -fdump-tree-fre1" } */
> +
> +struct a {int array[3];} a[10];
> +int
> +test(int i,int j)
> +{
> +  a[i].array[1]=123;
> +  a[j].array[2]=2;
> +  return a[i].array[1];
> +}
> +/* { dg-final { scan-tree-dump-times "return 123" 1 "fre1"} } */
>
Jan Hubicka July 29, 2019, 10:20 a.m. UTC | #2
Hello,
I missed your email initially, so sorry for late reaction.
> > +/* REF1 and REF2 are ARRAY_REFs which either start at the same address or
> > +   are completely disjoint.
> 
> So the ARRAY_REF array bases are at the same address, not the ARRAY_REF
> itself?

Yes, updated the comment.

> 
> > +   Return 1 if the refs are non-overlapping.
> > +   Return 0 if they are possibly overlapping but if so the overlap again
> > +   starts on the same address.
> > +   Return -1 otherwise.  */
> > +
> > +int
> > +nonoverlapping_array_refs_p (tree ref1, tree ref2)
> > +{
> > +  tree low_bound1 = array_ref_low_bound (ref1);
> > +  tree low_bound2 = array_ref_low_bound (ref2);
> > +  tree index1 = TREE_OPERAND (ref1, 1);
> > +  tree index2 = TREE_OPERAND (ref2, 1);
> > +
> > +  /* Handle zero offsets first: we do not need to match type size in this
> > +     case.  */
> > +  if (operand_equal_p (index1, low_bound1, 0)
> > +      && operand_equal_p (index2, low_bound2, 0))
> > +    return 0;
> > +
> > +  /* If type sizes are different, give up.  */
> > +  if (!operand_equal_p (array_ref_element_size (ref1),
> > +			array_ref_element_size (ref2), 0))
> > +    return -1;
> 
> So both array_ref_low_bound and array_ref_element_size are quite 
> expensive.  I wonder if you should resort to operand_equal_p
> of TREE_OPERAND (ref1, 2) or the raw TYPE_MIN_VALUE of the
> domain here since you are not interested in the actual size
> or the actual minimum value.

I added the operand_equal_p checks. I think it will be OK for
-fstrict-aliasing. This is because we tend to only walk paths where
types are the same (others are ruled out by the alias checks).
For -fno-strict-aliasing we may want to handle at
least case everyting is constant but that can be done incrementally.
> > +	  /* We generally assume that both access paths starts by same sequence
> > +	     of refs.  However if number of array refs is not in sync, try
> > +	     to recover and pop elts until number match.  This helps the case
> > +	     where one access path starts by array and other by element.  */
> > +	  for (narray_refs1 = 0; narray_refs1 < component_refs1.length ();
> > +	       narray_refs1++)
> > +	    if (TREE_CODE (component_refs1 [component_refs1.length()
> > +					    - 1 - narray_refs1]) != ARRAY_REF)
> > +	      break;
> > +
> > +	  for (narray_refs2 = 0; narray_refs2 < component_refs2.length ();
> > +	       narray_refs2++)
> > +	    if (TREE_CODE (component_refs2 [component_refs2.length()
> > +					    - 1 - narray_refs2]) != ARRAY_REF)
> > +	      break;
> 
> so here we now have narray_refs1,2, the number of ARRAY_REFs at the
> end of the component_refs.  I wonder if this part can be easily

I think this is not completely easy to do since before we hit the
outermost component ref in the walk we do not know which block of array
refs is supposed to match which.

I am also thinking to reorg all the access path functions to work on
vectors summarizing the refs (so we do not need them twice for IPA
mod-ref) and then we do not want to insert such implementation details.

> done during component_refN construction - simply ++count
> if pushing an ARRAY_REF, count = 0 if not?  What about ARRAY_RANGE_REF?
> Will those invalidate any of the stuff and thus we need to handle
> them conservative in some way?

For now I cowardly ignore ARRAY_RANGE_REF.  We will get unmatched_ref
there and only synchronize on next component ref if present.
> 
> > +	  for (; narray_refs1 > narray_refs2; narray_refs1--)
> > +	    {
> > +	      ref1 = component_refs1.pop ();
> > +	      if (!operand_equal_p (TREE_OPERAND (ref1, 1),
> > +				    array_ref_low_bound (ref1), 0))
> > +	        seen_unmatched_ref_p = true;
> > +	    }
> > +	  for (; narray_refs2 > narray_refs1; narray_refs2--)
> > +	    {
> > +	      ref2 = component_refs2.pop ();
> > +	      if (!operand_equal_p (TREE_OPERAND (ref2, 1),
> > +				    array_ref_low_bound (ref2), 0))
> > +	        seen_unmatched_ref_p = true;
> > +	    }
> 
> here we're trying to make them equal by popping.  seen_unmached_ref_p
> is magic, you have to add comments.

Comment added.
> 
> > +	  /* Try to disambiguate matched arrays.  */
> > +	  for (unsigned int i = 0; i < narray_refs1; i++)
> > +	    {
> > +	      int cmp = nonoverlapping_array_refs_p (component_refs1.pop (),
> > +						     component_refs2.pop ());
> > +	      if (cmp == 1 && !partial_overlap)
> > +		{
> > +		  ++alias_stats
> > +		    .nonoverlapping_refs_since_match_p_no_alias;
> > +		  return 1;
> > +		}
> > +	      partial_overlap = false;
> > +	      if (cmp == -1)
> > +		seen_unmatched_ref_p = true;
> > +	    }
> > +	}
> > +
> > +      /* Next look for component_refs.  */
> > +
> 
> So for below we assume that we will never skip an ARRAY_REF without
> seeing two COMPONENT_REFs, correct?  And the mismatching ARRAY_REF
> counts can only happen for the outermost components (which are
> innermost on the component_ref stacks).  So I really wonder if
> we should not work from the other side of the component_ref
> arrays?  Or handle the above case in the "tail" (I'm not sure
> the case of [i][j] vs. [i][j][2] will happen often since
> whole-array refs are not common in the IL).

I think most access paths ends by the type w/o component so we want
to match something like
  a[i][j][k].bar;
with ptr[j][k].bar where ptr==&a

So we want to get rid of the outermost a[i].
You are right if there is access path of arrays which ends by array (in
C I think you can not write it) things will get out of sync and we will
punt.  I may add extra logic trying to match the innermost array, but it
is not very easy to do right now given that we do not have array type
comparsion that would work with LTO (types_same_for_tbaa always return
-1 on array types).

Here is updated patch. Looks better?
Bootstrapped/regtested x86_64
Honza

	* tree-ssa-alias.c (nonoverlapping_component_refs_since_match_p):
	Rename to ...
	(nonoverlapping_refs_since_match_p): ... this; handle also ARRAY_REFs.
	(alias_stats): Update stats.
	(dump_alias_stats): Likewise.
	(cheap_array_ref_low_bound): New function.
	(aliasing_matching_component_refs_p): Add partial_overlap argument;
	pass it to nonoverlapping_refs_since_match_p.
	(aliasing_component_refs_walk): Update call of
	aliasing_matching_component_refs_p
	(nonoverlapping_array_refs_p): New function.
	(decl_refs_may_alias_p, indirect_ref_may_alias_decl_p,
	indirect_refs_may_alias_p): Update calls of
	nonoverlapping_refs_since_match_p.
	* gcc.dg/tree-ssa/alias-access-path-10.c: New testcase.

Index: testsuite/gcc.dg/tree-ssa/alias-access-path-10.c
===================================================================
--- testsuite/gcc.dg/tree-ssa/alias-access-path-10.c	(nonexistent)
+++ testsuite/gcc.dg/tree-ssa/alias-access-path-10.c	(working copy)
@@ -0,0 +1,12 @@
+/* { dg-do compile } */
+/* { dg-options "-O2 -fdump-tree-fre1" } */
+
+struct a {int array[3];} a[10];
+int
+test(int i,int j)
+{
+  a[i].array[1]=123;
+  a[j].array[2]=2;
+  return a[i].array[1];
+}
+/* { dg-final { scan-tree-dump-times "return 123" 1 "fre1"} } */
Index: tree-ssa-alias.c
===================================================================
--- tree-ssa-alias.c	(revision 273865)
+++ tree-ssa-alias.c	(working copy)
@@ -87,7 +87,7 @@ along with GCC; see the file COPYING3.
    this file.  Low-level disambiguators dealing with points-to
    information are in tree-ssa-structalias.c.  */
 
-static int nonoverlapping_component_refs_since_match_p (tree, tree, tree, tree);
+static int nonoverlapping_refs_since_match_p (tree, tree, tree, tree, bool);
 static bool nonoverlapping_component_refs_p (const_tree, const_tree);
 
 /* Query statistics for the different low-level disambiguators.
@@ -104,9 +104,9 @@ static struct {
   unsigned HOST_WIDE_INT aliasing_component_refs_p_no_alias;
   unsigned HOST_WIDE_INT nonoverlapping_component_refs_p_may_alias;
   unsigned HOST_WIDE_INT nonoverlapping_component_refs_p_no_alias;
-  unsigned HOST_WIDE_INT nonoverlapping_component_refs_since_match_p_may_alias;
-  unsigned HOST_WIDE_INT nonoverlapping_component_refs_since_match_p_must_overlap;
-  unsigned HOST_WIDE_INT nonoverlapping_component_refs_since_match_p_no_alias;
+  unsigned HOST_WIDE_INT nonoverlapping_refs_since_match_p_may_alias;
+  unsigned HOST_WIDE_INT nonoverlapping_refs_since_match_p_must_overlap;
+  unsigned HOST_WIDE_INT nonoverlapping_refs_since_match_p_no_alias;
 } alias_stats;
 
 void
@@ -137,15 +137,15 @@ dump_alias_stats (FILE *s)
 	   alias_stats.nonoverlapping_component_refs_p_no_alias,
 	   alias_stats.nonoverlapping_component_refs_p_no_alias
 	   + alias_stats.nonoverlapping_component_refs_p_may_alias);
-  fprintf (s, "  nonoverlapping_component_refs_since_match_p: "
+  fprintf (s, "  nonoverlapping_refs_since_match_p: "
 	   HOST_WIDE_INT_PRINT_DEC" disambiguations, "
 	   HOST_WIDE_INT_PRINT_DEC" must overlaps, "
 	   HOST_WIDE_INT_PRINT_DEC" queries\n",
-	   alias_stats.nonoverlapping_component_refs_since_match_p_no_alias,
-	   alias_stats.nonoverlapping_component_refs_since_match_p_must_overlap,
-	   alias_stats.nonoverlapping_component_refs_since_match_p_no_alias
-	   + alias_stats.nonoverlapping_component_refs_since_match_p_may_alias
-	   + alias_stats.nonoverlapping_component_refs_since_match_p_must_overlap);
+	   alias_stats.nonoverlapping_refs_since_match_p_no_alias,
+	   alias_stats.nonoverlapping_refs_since_match_p_must_overlap,
+	   alias_stats.nonoverlapping_refs_since_match_p_no_alias
+	   + alias_stats.nonoverlapping_refs_since_match_p_may_alias
+	   + alias_stats.nonoverlapping_refs_since_match_p_must_overlap);
   fprintf (s, "  aliasing_component_refs_p: "
 	   HOST_WIDE_INT_PRINT_DEC" disambiguations, "
 	   HOST_WIDE_INT_PRINT_DEC" queries\n",
@@ -856,7 +856,8 @@ type_has_components_p (tree type)
 
 /* MATCH1 and MATCH2 which are part of access path of REF1 and REF2
    respectively are either pointing to same address or are completely
-   disjoint.
+   disjoint. If PARITAL_OVERLAP is true, assume that outermost arrays may
+   just partly overlap.
 
    Try to disambiguate using the access path starting from the match
    and return false if there is no conflict.
@@ -867,24 +868,27 @@ static bool
 aliasing_matching_component_refs_p (tree match1, tree ref1,
 				    poly_int64 offset1, poly_int64 max_size1,
 				    tree match2, tree ref2,
-				    poly_int64 offset2, poly_int64 max_size2)
+				    poly_int64 offset2, poly_int64 max_size2,
+				    bool partial_overlap)
 {
   poly_int64 offadj, sztmp, msztmp;
   bool reverse;
 
-
-  get_ref_base_and_extent (match2, &offadj, &sztmp, &msztmp, &reverse);
-  offset2 -= offadj;
-  get_ref_base_and_extent (match1, &offadj, &sztmp, &msztmp, &reverse);
-  offset1 -= offadj;
-  if (!ranges_maybe_overlap_p (offset1, max_size1, offset2, max_size2))
+  if (!partial_overlap)
     {
-      ++alias_stats.aliasing_component_refs_p_no_alias;
-      return false;
+      get_ref_base_and_extent (match2, &offadj, &sztmp, &msztmp, &reverse);
+      offset2 -= offadj;
+      get_ref_base_and_extent (match1, &offadj, &sztmp, &msztmp, &reverse);
+      offset1 -= offadj;
+      if (!ranges_maybe_overlap_p (offset1, max_size1, offset2, max_size2))
+	{
+	  ++alias_stats.aliasing_component_refs_p_no_alias;
+	  return false;
+	}
     }
 
-  int cmp = nonoverlapping_component_refs_since_match_p (match1, ref1,
-							 match2, ref2);
+  int cmp = nonoverlapping_refs_since_match_p (match1, ref1, match2, ref2,
+					       partial_overlap);
   if (cmp == 1
       || (cmp == -1 && nonoverlapping_component_refs_p (ref1, ref2)))
     {
@@ -964,6 +968,8 @@ aliasing_component_refs_walk (tree ref1,
     }
   if (same_p == 1)
     {
+      bool partial_overlap = false;
+
       /* We assume that arrays can overlap by multiple of their elements
 	 size as tested in gcc.dg/torture/alias-2.c.
 	 This partial overlap happen only when both arrays are bases of
@@ -973,15 +979,18 @@ aliasing_component_refs_walk (tree ref1,
 	  && (!TYPE_SIZE (TREE_TYPE (base1))
 	      || TREE_CODE (TYPE_SIZE (TREE_TYPE (base1))) != INTEGER_CST
 	      || ref == base2))
-	/* Setting maybe_match to true triggers
-	   nonoverlapping_component_refs_p test later that still may do
-	   useful disambiguation.  */
-	*maybe_match = true;
-      else
-	return aliasing_matching_component_refs_p (base1, ref1,
-						   offset1, max_size1,
-						   ref, ref2,
-						   offset2, max_size2);
+	{
+	  /* Setting maybe_match to true triggers
+	     nonoverlapping_component_refs_p test later that still may do
+	     useful disambiguation.  */
+	  *maybe_match = true;
+	  partial_overlap = true;
+	}
+      return aliasing_matching_component_refs_p (base1, ref1,
+						 offset1, max_size1,
+						 ref, ref2,
+						 offset2, max_size2,
+						 partial_overlap);
     }
   return -1;
 }
@@ -1225,10 +1234,98 @@ nonoverlapping_component_refs_p_1 (const
   return -1;
 }
 
+/* Return low bound of array. Do not produce new trees
+   and thus do not care about particular type of integer constant
+   and placeholder exprs.  */
+
+static tree
+cheap_array_ref_low_bound (tree ref)
+{
+  tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (ref, 0)));
+
+  /* Avoid expensive array_ref_low_bound.
+     low bound is either stored in operand2, or it is TYPE_MIN_VALUE of domain
+     type or it is zero.  */
+  if (TREE_OPERAND (ref, 2))
+    return TREE_OPERAND (ref, 2);
+  else if (domain_type && TYPE_MIN_VALUE (domain_type))
+    return TYPE_MIN_VALUE (domain_type);
+  else
+    return integer_zero_node;
+}
+
+/* REF1 and REF2 are ARRAY_REFs with either same base address or which are
+   completely disjoint.
+
+   Return 1 if the refs are non-overlapping.
+   Return 0 if they are possibly overlapping but if so the overlap again
+   starts on the same address.
+   Return -1 otherwise.  */
+
+int
+nonoverlapping_array_refs_p (tree ref1, tree ref2)
+{
+  tree index1 = TREE_OPERAND (ref1, 1);
+  tree index2 = TREE_OPERAND (ref2, 1);
+  tree low_bound1 = cheap_array_ref_low_bound(ref1);
+  tree low_bound2 = cheap_array_ref_low_bound(ref2);
+
+  /* Handle zero offsets first: we do not need to match type size in this
+     case.  */
+  if (operand_equal_p (index1, low_bound1, 0)
+      && operand_equal_p (index2, low_bound2, 0))
+    return 0;
+
+  /* If type sizes are different, give up.
+
+     Avoid expensive array_ref_element_size.
+     If operand 3 is present it denotes size in the alignmnet units.
+     Otherwise size is TYPE_SIZE of the element type.
+     Handle only common cases where types are of the same "kind".  */
+  if ((TREE_OPERAND (ref1, 3) == NULL) != (TREE_OPERAND (ref2, 3) == NULL))
+    return -1;
+
+  tree elmt_type1 = TREE_TYPE (TREE_TYPE (TREE_OPERAND (ref1, 0)));
+  tree elmt_type2 = TREE_TYPE (TREE_TYPE (TREE_OPERAND (ref2, 0)));
+
+  if (TREE_OPERAND (ref1, 3))
+    {
+      if (TYPE_ALIGN (elmt_type1) != TYPE_ALIGN (elmt_type2)
+	  || !operand_equal_p (TREE_OPERAND (ref1, 3),
+			       TREE_OPERAND (ref2, 3), 0))
+	return -1;
+    }
+  else
+    {
+      if (!operand_equal_p (TYPE_SIZE_UNIT (elmt_type1),
+			    TYPE_SIZE_UNIT (elmt_type2), 0))
+	return -1;
+    }
+
+  /* Since we know that type sizes are the same, there is no need to return
+     -1 after this point. Partial overlap can not be introduced.  */
+
+  /* We may need to fold trees in this case.
+     TODO: Handle integer constant case at least.  */
+  if (!operand_equal_p (low_bound1, low_bound2, 0))
+    return 0;
+
+  if (TREE_CODE (index1) == INTEGER_CST && TREE_CODE (index2) == INTEGER_CST)
+    {
+      if (tree_int_cst_equal (index1, index2))
+	return 0;
+      return 1;
+    }
+  /* TODO: We can use VRP to further disambiguate here. */
+  return 0;
+}
+
 /* Try to disambiguate REF1 and REF2 under the assumption that MATCH1 and
    MATCH2 either point to the same address or are disjoint.
    MATCH1 and MATCH2 are assumed to be ref in the access path of REF1 and REF2
    respectively or NULL in the case we established equivalence of bases.
+   If PARTIAL_OVERLAP is true assume that the toplevel arrays may actually
+   overlap by exact multiply of their element size.
 
    This test works by matching the initial segment of the access path
    and does not rely on TBAA thus is safe for !flag_strict_aliasing if
@@ -1247,8 +1344,9 @@ nonoverlapping_component_refs_p_1 (const
    oracles.  */
 
 static int
-nonoverlapping_component_refs_since_match_p (tree match1, tree ref1,
-					     tree match2, tree ref2)
+nonoverlapping_refs_since_match_p (tree match1, tree ref1,
+				   tree match2, tree ref2,
+				   bool partial_overlap)
 {
   /* Early return if there are no references to match, we do not need
      to walk the access paths.
@@ -1301,7 +1399,7 @@ nonoverlapping_component_refs_since_matc
 	  && !tree_int_cst_equal (TREE_OPERAND (ref1, 1),
 				  TREE_OPERAND (ref2, 1))))
     {
-      ++alias_stats.nonoverlapping_component_refs_since_match_p_may_alias;
+      ++alias_stats.nonoverlapping_refs_since_match_p_may_alias;
       return -1;
     }
 
@@ -1318,18 +1416,79 @@ nonoverlapping_component_refs_since_matc
      case the return value will precisely be false.  */
   while (true)
     {
-      bool seen_noncomponent_ref_p = false;
+      /* Track if we seen unmatched ref with non-zero offset.  In this case
+	 we must look for partial overlaps.  */
+      bool seen_unmatched_ref_p = false;
+
+      /* First match ARRAY_REFs an try to disambiguate.  */
+      if (!component_refs1.is_empty ()
+	  && !component_refs2.is_empty ())
+	{
+	  unsigned int narray_refs1=0, narray_refs2=0;
+
+	  /* We generally assume that both access paths starts by same sequence
+	     of refs.  However if number of array refs is not in sync, try
+	     to recover and pop elts until number match.  This helps the case
+	     where one access path starts by array and other by element.  */
+	  for (narray_refs1 = 0; narray_refs1 < component_refs1.length ();
+	       narray_refs1++)
+	    if (TREE_CODE (component_refs1 [component_refs1.length()
+					    - 1 - narray_refs1]) != ARRAY_REF)
+	      break;
+
+	  for (narray_refs2 = 0; narray_refs2 < component_refs2.length ();
+	       narray_refs2++)
+	    if (TREE_CODE (component_refs2 [component_refs2.length()
+					    - 1 - narray_refs2]) != ARRAY_REF)
+	      break;
+	  for (; narray_refs1 > narray_refs2; narray_refs1--)
+	    {
+	      ref1 = component_refs1.pop ();
+	      /* Track whether we possibly introduced partial overlap assuming
+		 that innermost type sizes does not match.  This only can
+		 happen if the offset introduced by the ARRAY_REF
+		 is non-zero.  */
+	      if (!operand_equal_p (TREE_OPERAND (ref1, 1),
+				    cheap_array_ref_low_bound (ref1), 0))
+	        seen_unmatched_ref_p = true;
+	    }
+	  for (; narray_refs2 > narray_refs1; narray_refs2--)
+	    {
+	      ref2 = component_refs2.pop ();
+	      if (!operand_equal_p (TREE_OPERAND (ref2, 1),
+				    cheap_array_ref_low_bound (ref2), 0))
+	        seen_unmatched_ref_p = true;
+	    }
+	  /* Try to disambiguate matched arrays.  */
+	  for (unsigned int i = 0; i < narray_refs1; i++)
+	    {
+	      int cmp = nonoverlapping_array_refs_p (component_refs1.pop (),
+						     component_refs2.pop ());
+	      if (cmp == 1 && !partial_overlap)
+		{
+		  ++alias_stats
+		    .nonoverlapping_refs_since_match_p_no_alias;
+		  return 1;
+		}
+	      partial_overlap = false;
+	      if (cmp == -1)
+		seen_unmatched_ref_p = true;
+	    }
+	}
+
+      /* Next look for component_refs.  */
+
       do
 	{
 	  if (component_refs1.is_empty ())
 	    {
 	      ++alias_stats
-		.nonoverlapping_component_refs_since_match_p_must_overlap;
+		.nonoverlapping_refs_since_match_p_must_overlap;
 	      return 0;
 	    }
 	  ref1 = component_refs1.pop ();
 	  if (TREE_CODE (ref1) != COMPONENT_REF)
-	    seen_noncomponent_ref_p = true;
+	    seen_unmatched_ref_p = true;
 	}
       while (!RECORD_OR_UNION_TYPE_P (TREE_TYPE (TREE_OPERAND (ref1, 0))));
 
@@ -1338,12 +1497,12 @@ nonoverlapping_component_refs_since_matc
 	  if (component_refs2.is_empty ())
 	    {
 	      ++alias_stats
-		.nonoverlapping_component_refs_since_match_p_must_overlap;
+		.nonoverlapping_refs_since_match_p_must_overlap;
 	      return 0;
 	    }
 	  ref2 = component_refs2.pop ();
 	  if (TREE_CODE (ref2) != COMPONENT_REF)
-	    seen_noncomponent_ref_p = true;
+	    seen_unmatched_ref_p = true;
 	}
       while (!RECORD_OR_UNION_TYPE_P (TREE_TYPE (TREE_OPERAND (ref2, 0))));
 
@@ -1361,13 +1520,15 @@ nonoverlapping_component_refs_since_matc
       tree type1 = DECL_CONTEXT (field1);
       tree type2 = DECL_CONTEXT (field2);
 
+      partial_overlap = false;
+
       /* If we skipped array refs on type of different sizes, we can
 	 no longer be sure that there are not partial overlaps.  */
-      if (seen_noncomponent_ref_p
+      if (seen_unmatched_ref_p
 	  && !operand_equal_p (TYPE_SIZE (type1), TYPE_SIZE (type2), 0))
 	{
 	  ++alias_stats
-	    .nonoverlapping_component_refs_since_match_p_may_alias;
+	    .nonoverlapping_refs_since_match_p_may_alias;
 	  return -1;
 	}
 
@@ -1375,18 +1536,18 @@ nonoverlapping_component_refs_since_matc
       if (cmp == -1)
 	{
 	  ++alias_stats
-	    .nonoverlapping_component_refs_since_match_p_may_alias;
+	    .nonoverlapping_refs_since_match_p_may_alias;
 	  return -1;
 	}
       else if (cmp == 1)
 	{
 	  ++alias_stats
-	    .nonoverlapping_component_refs_since_match_p_no_alias;
+	    .nonoverlapping_refs_since_match_p_no_alias;
 	  return 1;
 	}
     }
 
-  ++alias_stats.nonoverlapping_component_refs_since_match_p_must_overlap;
+  ++alias_stats.nonoverlapping_refs_since_match_p_must_overlap;
   return 0;
 }
 
@@ -1583,8 +1744,7 @@ decl_refs_may_alias_p (tree ref1, tree b
      so we disambiguate component references manually.  */
   if (ref1 && ref2
       && handled_component_p (ref1) && handled_component_p (ref2)
-      && nonoverlapping_component_refs_since_match_p (NULL, ref1,
-						      NULL, ref2) == 1)
+      && nonoverlapping_refs_since_match_p (NULL, ref1, NULL, ref2, false) == 1)
     return false;
 
   return true;     
@@ -1709,19 +1869,22 @@ indirect_ref_may_alias_decl_p (tree ref1
        || (!TMR_INDEX (base1) && !TMR_INDEX2 (base1)))
        && (TREE_CODE (dbase2) != TARGET_MEM_REF
 	   || (!TMR_INDEX (dbase2) && !TMR_INDEX2 (dbase2))))
-      && same_type_for_tbaa (TREE_TYPE (base1), TREE_TYPE (dbase2)) == 1
-      && (TREE_CODE (TREE_TYPE (base1)) != ARRAY_TYPE
-	  || (TYPE_SIZE (TREE_TYPE (base1))
-	      && TREE_CODE (TYPE_SIZE (TREE_TYPE (base1))) == INTEGER_CST)))
+      && same_type_for_tbaa (TREE_TYPE (base1), TREE_TYPE (dbase2)) == 1)
     {
-      if (!ranges_maybe_overlap_p (doffset1, max_size1, doffset2, max_size2))
+      bool partial_overlap = (TREE_CODE (TREE_TYPE (base1)) == ARRAY_TYPE
+			      && (TYPE_SIZE (TREE_TYPE (base1))
+			      && TREE_CODE (TYPE_SIZE (TREE_TYPE (base1)))
+				 != INTEGER_CST));
+      if (!partial_overlap
+	  && !ranges_maybe_overlap_p (doffset1, max_size1, doffset2, max_size2))
 	return false;
       if (!ref1 || !ref2
 	  /* If there is must alias, there is no use disambiguating further.  */
-	  || (known_eq (size1, max_size1) && known_eq (size2, max_size2)))
+	  || (!partial_overlap
+	      && known_eq (size1, max_size1) && known_eq (size2, max_size2)))
 	return true;
-      int res = nonoverlapping_component_refs_since_match_p (base1, ref1,
-							     base2, ref2);
+      int res = nonoverlapping_refs_since_match_p (base1, ref1, base2, ref2,
+						   partial_overlap);
       if (res == -1)
 	return !nonoverlapping_component_refs_p (ref1, ref2);
       return !res;
@@ -1805,8 +1968,8 @@ indirect_refs_may_alias_p (tree ref1 ATT
 	return true;
       if (ref1 && ref2)
 	{
-	  int res = nonoverlapping_component_refs_since_match_p (NULL, ref1,
-								 NULL, ref2);
+	  int res = nonoverlapping_refs_since_match_p (NULL, ref1, NULL, ref2,
+						       false);
 	  if (res != -1)
 	    return !res;
 	}
@@ -1844,19 +2007,22 @@ indirect_refs_may_alias_p (tree ref1 ATT
       && (TREE_CODE (base2) != TARGET_MEM_REF
 	  || (!TMR_INDEX (base2) && !TMR_INDEX2 (base2)))
       && same_type_for_tbaa (TREE_TYPE (ptrtype1),
-			     TREE_TYPE (ptrtype2)) == 1
+			     TREE_TYPE (ptrtype2)) == 1)
+    {
       /* But avoid treating arrays as "objects", instead assume they
          can overlap by an exact multiple of their element size.
          See gcc.dg/torture/alias-2.c.  */
-      && TREE_CODE (TREE_TYPE (ptrtype1)) != ARRAY_TYPE)
-    {
-      if (!ranges_maybe_overlap_p (offset1, max_size1, offset2, max_size2))
+      bool partial_overlap = TREE_CODE (TREE_TYPE (ptrtype1)) == ARRAY_TYPE;
+
+      if (!partial_overlap
+	  && !ranges_maybe_overlap_p (offset1, max_size1, offset2, max_size2))
 	return false;
       if (!ref1 || !ref2
-	  || (known_eq (size1, max_size1) && known_eq (size2, max_size2)))
+	  || (!partial_overlap
+	      && known_eq (size1, max_size1) && known_eq (size2, max_size2)))
 	return true;
-      int res = nonoverlapping_component_refs_since_match_p (base1, ref1,
-							     base2, ref2);
+      int res = nonoverlapping_refs_since_match_p (base1, ref1, base2, ref2,
+						   partial_overlap);
       if (res == -1)
 	return !nonoverlapping_component_refs_p (ref1, ref2);
       return !res;
Richard Biener Aug. 1, 2019, 3:13 p.m. UTC | #3
On Mon, 29 Jul 2019, Jan Hubicka wrote:

> Hello,
> I missed your email initially, so sorry for late reaction.
> > > +/* REF1 and REF2 are ARRAY_REFs which either start at the same address or
> > > +   are completely disjoint.
> > 
> > So the ARRAY_REF array bases are at the same address, not the ARRAY_REF
> > itself?
> 
> Yes, updated the comment.
> 
> > 
> > > +   Return 1 if the refs are non-overlapping.
> > > +   Return 0 if they are possibly overlapping but if so the overlap again
> > > +   starts on the same address.
> > > +   Return -1 otherwise.  */
> > > +
> > > +int
> > > +nonoverlapping_array_refs_p (tree ref1, tree ref2)
> > > +{
> > > +  tree low_bound1 = array_ref_low_bound (ref1);
> > > +  tree low_bound2 = array_ref_low_bound (ref2);
> > > +  tree index1 = TREE_OPERAND (ref1, 1);
> > > +  tree index2 = TREE_OPERAND (ref2, 1);
> > > +
> > > +  /* Handle zero offsets first: we do not need to match type size in this
> > > +     case.  */
> > > +  if (operand_equal_p (index1, low_bound1, 0)
> > > +      && operand_equal_p (index2, low_bound2, 0))
> > > +    return 0;
> > > +
> > > +  /* If type sizes are different, give up.  */
> > > +  if (!operand_equal_p (array_ref_element_size (ref1),
> > > +			array_ref_element_size (ref2), 0))
> > > +    return -1;
> > 
> > So both array_ref_low_bound and array_ref_element_size are quite 
> > expensive.  I wonder if you should resort to operand_equal_p
> > of TREE_OPERAND (ref1, 2) or the raw TYPE_MIN_VALUE of the
> > domain here since you are not interested in the actual size
> > or the actual minimum value.
> 
> I added the operand_equal_p checks. I think it will be OK for
> -fstrict-aliasing. This is because we tend to only walk paths where
> types are the same (others are ruled out by the alias checks).
> For -fno-strict-aliasing we may want to handle at
> least case everyting is constant but that can be done incrementally.
> > > +	  /* We generally assume that both access paths starts by same sequence
> > > +	     of refs.  However if number of array refs is not in sync, try
> > > +	     to recover and pop elts until number match.  This helps the case
> > > +	     where one access path starts by array and other by element.  */
> > > +	  for (narray_refs1 = 0; narray_refs1 < component_refs1.length ();
> > > +	       narray_refs1++)
> > > +	    if (TREE_CODE (component_refs1 [component_refs1.length()
> > > +					    - 1 - narray_refs1]) != ARRAY_REF)
> > > +	      break;
> > > +
> > > +	  for (narray_refs2 = 0; narray_refs2 < component_refs2.length ();
> > > +	       narray_refs2++)
> > > +	    if (TREE_CODE (component_refs2 [component_refs2.length()
> > > +					    - 1 - narray_refs2]) != ARRAY_REF)
> > > +	      break;
> > 
> > so here we now have narray_refs1,2, the number of ARRAY_REFs at the
> > end of the component_refs.  I wonder if this part can be easily
> 
> I think this is not completely easy to do since before we hit the
> outermost component ref in the walk we do not know which block of array
> refs is supposed to match which.
> 
> I am also thinking to reorg all the access path functions to work on
> vectors summarizing the refs (so we do not need them twice for IPA
> mod-ref) and then we do not want to insert such implementation details.
> 
> > done during component_refN construction - simply ++count
> > if pushing an ARRAY_REF, count = 0 if not?  What about ARRAY_RANGE_REF?
> > Will those invalidate any of the stuff and thus we need to handle
> > them conservative in some way?
> 
> For now I cowardly ignore ARRAY_RANGE_REF.  We will get unmatched_ref
> there and only synchronize on next component ref if present.
> > 
> > > +	  for (; narray_refs1 > narray_refs2; narray_refs1--)
> > > +	    {
> > > +	      ref1 = component_refs1.pop ();
> > > +	      if (!operand_equal_p (TREE_OPERAND (ref1, 1),
> > > +				    array_ref_low_bound (ref1), 0))
> > > +	        seen_unmatched_ref_p = true;
> > > +	    }
> > > +	  for (; narray_refs2 > narray_refs1; narray_refs2--)
> > > +	    {
> > > +	      ref2 = component_refs2.pop ();
> > > +	      if (!operand_equal_p (TREE_OPERAND (ref2, 1),
> > > +				    array_ref_low_bound (ref2), 0))
> > > +	        seen_unmatched_ref_p = true;
> > > +	    }
> > 
> > here we're trying to make them equal by popping.  seen_unmached_ref_p
> > is magic, you have to add comments.
> 
> Comment added.
> > 
> > > +	  /* Try to disambiguate matched arrays.  */
> > > +	  for (unsigned int i = 0; i < narray_refs1; i++)
> > > +	    {
> > > +	      int cmp = nonoverlapping_array_refs_p (component_refs1.pop (),
> > > +						     component_refs2.pop ());
> > > +	      if (cmp == 1 && !partial_overlap)
> > > +		{
> > > +		  ++alias_stats
> > > +		    .nonoverlapping_refs_since_match_p_no_alias;
> > > +		  return 1;
> > > +		}
> > > +	      partial_overlap = false;
> > > +	      if (cmp == -1)
> > > +		seen_unmatched_ref_p = true;
> > > +	    }
> > > +	}
> > > +
> > > +      /* Next look for component_refs.  */
> > > +
> > 
> > So for below we assume that we will never skip an ARRAY_REF without
> > seeing two COMPONENT_REFs, correct?  And the mismatching ARRAY_REF
> > counts can only happen for the outermost components (which are
> > innermost on the component_ref stacks).  So I really wonder if
> > we should not work from the other side of the component_ref
> > arrays?  Or handle the above case in the "tail" (I'm not sure
> > the case of [i][j] vs. [i][j][2] will happen often since
> > whole-array refs are not common in the IL).
> 
> I think most access paths ends by the type w/o component so we want
> to match something like
>   a[i][j][k].bar;
> with ptr[j][k].bar where ptr==&a
> 
> So we want to get rid of the outermost a[i].
> You are right if there is access path of arrays which ends by array (in
> C I think you can not write it) things will get out of sync and we will
> punt.  I may add extra logic trying to match the innermost array, but it
> is not very easy to do right now given that we do not have array type
> comparsion that would work with LTO (types_same_for_tbaa always return
> -1 on array types).
> 
> Here is updated patch. Looks better?
> Bootstrapped/regtested x86_64
> Honza
> 
> 	* tree-ssa-alias.c (nonoverlapping_component_refs_since_match_p):
> 	Rename to ...
> 	(nonoverlapping_refs_since_match_p): ... this; handle also ARRAY_REFs.
> 	(alias_stats): Update stats.
> 	(dump_alias_stats): Likewise.
> 	(cheap_array_ref_low_bound): New function.
> 	(aliasing_matching_component_refs_p): Add partial_overlap argument;
> 	pass it to nonoverlapping_refs_since_match_p.
> 	(aliasing_component_refs_walk): Update call of
> 	aliasing_matching_component_refs_p
> 	(nonoverlapping_array_refs_p): New function.
> 	(decl_refs_may_alias_p, indirect_ref_may_alias_decl_p,
> 	indirect_refs_may_alias_p): Update calls of
> 	nonoverlapping_refs_since_match_p.
> 	* gcc.dg/tree-ssa/alias-access-path-10.c: New testcase.
> 
> Index: testsuite/gcc.dg/tree-ssa/alias-access-path-10.c
> ===================================================================
> --- testsuite/gcc.dg/tree-ssa/alias-access-path-10.c	(nonexistent)
> +++ testsuite/gcc.dg/tree-ssa/alias-access-path-10.c	(working copy)
> @@ -0,0 +1,12 @@
> +/* { dg-do compile } */
> +/* { dg-options "-O2 -fdump-tree-fre1" } */
> +
> +struct a {int array[3];} a[10];
> +int
> +test(int i,int j)
> +{
> +  a[i].array[1]=123;
> +  a[j].array[2]=2;
> +  return a[i].array[1];
> +}
> +/* { dg-final { scan-tree-dump-times "return 123" 1 "fre1"} } */
> Index: tree-ssa-alias.c
> ===================================================================
> --- tree-ssa-alias.c	(revision 273865)
> +++ tree-ssa-alias.c	(working copy)
> @@ -87,7 +87,7 @@ along with GCC; see the file COPYING3.
>     this file.  Low-level disambiguators dealing with points-to
>     information are in tree-ssa-structalias.c.  */
>  
> -static int nonoverlapping_component_refs_since_match_p (tree, tree, tree, tree);
> +static int nonoverlapping_refs_since_match_p (tree, tree, tree, tree, bool);
>  static bool nonoverlapping_component_refs_p (const_tree, const_tree);
>  
>  /* Query statistics for the different low-level disambiguators.
> @@ -104,9 +104,9 @@ static struct {
>    unsigned HOST_WIDE_INT aliasing_component_refs_p_no_alias;
>    unsigned HOST_WIDE_INT nonoverlapping_component_refs_p_may_alias;
>    unsigned HOST_WIDE_INT nonoverlapping_component_refs_p_no_alias;
> -  unsigned HOST_WIDE_INT nonoverlapping_component_refs_since_match_p_may_alias;
> -  unsigned HOST_WIDE_INT nonoverlapping_component_refs_since_match_p_must_overlap;
> -  unsigned HOST_WIDE_INT nonoverlapping_component_refs_since_match_p_no_alias;
> +  unsigned HOST_WIDE_INT nonoverlapping_refs_since_match_p_may_alias;
> +  unsigned HOST_WIDE_INT nonoverlapping_refs_since_match_p_must_overlap;
> +  unsigned HOST_WIDE_INT nonoverlapping_refs_since_match_p_no_alias;
>  } alias_stats;
>  
>  void
> @@ -137,15 +137,15 @@ dump_alias_stats (FILE *s)
>  	   alias_stats.nonoverlapping_component_refs_p_no_alias,
>  	   alias_stats.nonoverlapping_component_refs_p_no_alias
>  	   + alias_stats.nonoverlapping_component_refs_p_may_alias);
> -  fprintf (s, "  nonoverlapping_component_refs_since_match_p: "
> +  fprintf (s, "  nonoverlapping_refs_since_match_p: "
>  	   HOST_WIDE_INT_PRINT_DEC" disambiguations, "
>  	   HOST_WIDE_INT_PRINT_DEC" must overlaps, "
>  	   HOST_WIDE_INT_PRINT_DEC" queries\n",
> -	   alias_stats.nonoverlapping_component_refs_since_match_p_no_alias,
> -	   alias_stats.nonoverlapping_component_refs_since_match_p_must_overlap,
> -	   alias_stats.nonoverlapping_component_refs_since_match_p_no_alias
> -	   + alias_stats.nonoverlapping_component_refs_since_match_p_may_alias
> -	   + alias_stats.nonoverlapping_component_refs_since_match_p_must_overlap);
> +	   alias_stats.nonoverlapping_refs_since_match_p_no_alias,
> +	   alias_stats.nonoverlapping_refs_since_match_p_must_overlap,
> +	   alias_stats.nonoverlapping_refs_since_match_p_no_alias
> +	   + alias_stats.nonoverlapping_refs_since_match_p_may_alias
> +	   + alias_stats.nonoverlapping_refs_since_match_p_must_overlap);
>    fprintf (s, "  aliasing_component_refs_p: "
>  	   HOST_WIDE_INT_PRINT_DEC" disambiguations, "
>  	   HOST_WIDE_INT_PRINT_DEC" queries\n",
> @@ -856,7 +856,8 @@ type_has_components_p (tree type)
>  
>  /* MATCH1 and MATCH2 which are part of access path of REF1 and REF2
>     respectively are either pointing to same address or are completely
> -   disjoint.
> +   disjoint. If PARITAL_OVERLAP is true, assume that outermost arrays may
> +   just partly overlap.
>  
>     Try to disambiguate using the access path starting from the match
>     and return false if there is no conflict.
> @@ -867,24 +868,27 @@ static bool
>  aliasing_matching_component_refs_p (tree match1, tree ref1,
>  				    poly_int64 offset1, poly_int64 max_size1,
>  				    tree match2, tree ref2,
> -				    poly_int64 offset2, poly_int64 max_size2)
> +				    poly_int64 offset2, poly_int64 max_size2,
> +				    bool partial_overlap)
>  {
>    poly_int64 offadj, sztmp, msztmp;
>    bool reverse;
>  
> -
> -  get_ref_base_and_extent (match2, &offadj, &sztmp, &msztmp, &reverse);
> -  offset2 -= offadj;
> -  get_ref_base_and_extent (match1, &offadj, &sztmp, &msztmp, &reverse);
> -  offset1 -= offadj;
> -  if (!ranges_maybe_overlap_p (offset1, max_size1, offset2, max_size2))
> +  if (!partial_overlap)
>      {
> -      ++alias_stats.aliasing_component_refs_p_no_alias;
> -      return false;
> +      get_ref_base_and_extent (match2, &offadj, &sztmp, &msztmp, &reverse);
> +      offset2 -= offadj;
> +      get_ref_base_and_extent (match1, &offadj, &sztmp, &msztmp, &reverse);
> +      offset1 -= offadj;
> +      if (!ranges_maybe_overlap_p (offset1, max_size1, offset2, max_size2))
> +	{
> +	  ++alias_stats.aliasing_component_refs_p_no_alias;
> +	  return false;
> +	}
>      }
>  
> -  int cmp = nonoverlapping_component_refs_since_match_p (match1, ref1,
> -							 match2, ref2);
> +  int cmp = nonoverlapping_refs_since_match_p (match1, ref1, match2, ref2,
> +					       partial_overlap);
>    if (cmp == 1
>        || (cmp == -1 && nonoverlapping_component_refs_p (ref1, ref2)))
>      {
> @@ -964,6 +968,8 @@ aliasing_component_refs_walk (tree ref1,
>      }
>    if (same_p == 1)
>      {
> +      bool partial_overlap = false;
> +
>        /* We assume that arrays can overlap by multiple of their elements
>  	 size as tested in gcc.dg/torture/alias-2.c.
>  	 This partial overlap happen only when both arrays are bases of
> @@ -973,15 +979,18 @@ aliasing_component_refs_walk (tree ref1,
>  	  && (!TYPE_SIZE (TREE_TYPE (base1))
>  	      || TREE_CODE (TYPE_SIZE (TREE_TYPE (base1))) != INTEGER_CST
>  	      || ref == base2))
> -	/* Setting maybe_match to true triggers
> -	   nonoverlapping_component_refs_p test later that still may do
> -	   useful disambiguation.  */
> -	*maybe_match = true;
> -      else
> -	return aliasing_matching_component_refs_p (base1, ref1,
> -						   offset1, max_size1,
> -						   ref, ref2,
> -						   offset2, max_size2);
> +	{
> +	  /* Setting maybe_match to true triggers
> +	     nonoverlapping_component_refs_p test later that still may do
> +	     useful disambiguation.  */
> +	  *maybe_match = true;
> +	  partial_overlap = true;
> +	}
> +      return aliasing_matching_component_refs_p (base1, ref1,
> +						 offset1, max_size1,
> +						 ref, ref2,
> +						 offset2, max_size2,
> +						 partial_overlap);
>      }
>    return -1;
>  }
> @@ -1225,10 +1234,98 @@ nonoverlapping_component_refs_p_1 (const
>    return -1;
>  }
>  
> +/* Return low bound of array. Do not produce new trees
> +   and thus do not care about particular type of integer constant
> +   and placeholder exprs.  */
> +
> +static tree
> +cheap_array_ref_low_bound (tree ref)
> +{
> +  tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (ref, 0)));
> +
> +  /* Avoid expensive array_ref_low_bound.
> +     low bound is either stored in operand2, or it is TYPE_MIN_VALUE of domain
> +     type or it is zero.  */
> +  if (TREE_OPERAND (ref, 2))
> +    return TREE_OPERAND (ref, 2);
> +  else if (domain_type && TYPE_MIN_VALUE (domain_type))
> +    return TYPE_MIN_VALUE (domain_type);
> +  else
> +    return integer_zero_node;
> +}
> +
> +/* REF1 and REF2 are ARRAY_REFs with either same base address or which are
> +   completely disjoint.
> +
> +   Return 1 if the refs are non-overlapping.
> +   Return 0 if they are possibly overlapping but if so the overlap again
> +   starts on the same address.
> +   Return -1 otherwise.  */
> +
> +int
> +nonoverlapping_array_refs_p (tree ref1, tree ref2)
> +{
> +  tree index1 = TREE_OPERAND (ref1, 1);
> +  tree index2 = TREE_OPERAND (ref2, 1);
> +  tree low_bound1 = cheap_array_ref_low_bound(ref1);
> +  tree low_bound2 = cheap_array_ref_low_bound(ref2);
> +
> +  /* Handle zero offsets first: we do not need to match type size in this
> +     case.  */
> +  if (operand_equal_p (index1, low_bound1, 0)
> +      && operand_equal_p (index2, low_bound2, 0))
> +    return 0;
> +
> +  /* If type sizes are different, give up.
> +
> +     Avoid expensive array_ref_element_size.
> +     If operand 3 is present it denotes size in the alignmnet units.
> +     Otherwise size is TYPE_SIZE of the element type.
> +     Handle only common cases where types are of the same "kind".  */
> +  if ((TREE_OPERAND (ref1, 3) == NULL) != (TREE_OPERAND (ref2, 3) == NULL))
> +    return -1;
> +
> +  tree elmt_type1 = TREE_TYPE (TREE_TYPE (TREE_OPERAND (ref1, 0)));
> +  tree elmt_type2 = TREE_TYPE (TREE_TYPE (TREE_OPERAND (ref2, 0)));
> +
> +  if (TREE_OPERAND (ref1, 3))
> +    {
> +      if (TYPE_ALIGN (elmt_type1) != TYPE_ALIGN (elmt_type2)
> +	  || !operand_equal_p (TREE_OPERAND (ref1, 3),
> +			       TREE_OPERAND (ref2, 3), 0))
> +	return -1;
> +    }
> +  else
> +    {
> +      if (!operand_equal_p (TYPE_SIZE_UNIT (elmt_type1),
> +			    TYPE_SIZE_UNIT (elmt_type2), 0))
> +	return -1;
> +    }

Yep, exactly ;)

> +  /* Since we know that type sizes are the same, there is no need to return
> +     -1 after this point. Partial overlap can not be introduced.  */
> +
> +  /* We may need to fold trees in this case.
> +     TODO: Handle integer constant case at least.  */

Yep, handling that case would be nice.

> +  if (!operand_equal_p (low_bound1, low_bound2, 0))
> +    return 0;
> +
> +  if (TREE_CODE (index1) == INTEGER_CST && TREE_CODE (index2) == INTEGER_CST)
> +    {
> +      if (tree_int_cst_equal (index1, index2))
> +	return 0;
> +      return 1;
> +    }
> +  /* TODO: We can use VRP to further disambiguate here. */
> +  return 0;
> +}
> +
>  /* Try to disambiguate REF1 and REF2 under the assumption that MATCH1 and
>     MATCH2 either point to the same address or are disjoint.
>     MATCH1 and MATCH2 are assumed to be ref in the access path of REF1 and REF2
>     respectively or NULL in the case we established equivalence of bases.
> +   If PARTIAL_OVERLAP is true assume that the toplevel arrays may actually
> +   overlap by exact multiply of their element size.
>  
>     This test works by matching the initial segment of the access path
>     and does not rely on TBAA thus is safe for !flag_strict_aliasing if
> @@ -1247,8 +1344,9 @@ nonoverlapping_component_refs_p_1 (const
>     oracles.  */
>  
>  static int
> -nonoverlapping_component_refs_since_match_p (tree match1, tree ref1,
> -					     tree match2, tree ref2)
> +nonoverlapping_refs_since_match_p (tree match1, tree ref1,
> +				   tree match2, tree ref2,
> +				   bool partial_overlap)
>  {
>    /* Early return if there are no references to match, we do not need
>       to walk the access paths.
> @@ -1301,7 +1399,7 @@ nonoverlapping_component_refs_since_matc
>  	  && !tree_int_cst_equal (TREE_OPERAND (ref1, 1),
>  				  TREE_OPERAND (ref2, 1))))
>      {
> -      ++alias_stats.nonoverlapping_component_refs_since_match_p_may_alias;
> +      ++alias_stats.nonoverlapping_refs_since_match_p_may_alias;
>        return -1;
>      }
>  
> @@ -1318,18 +1416,79 @@ nonoverlapping_component_refs_since_matc
>       case the return value will precisely be false.  */
>    while (true)
>      {
> -      bool seen_noncomponent_ref_p = false;
> +      /* Track if we seen unmatched ref with non-zero offset.  In this case
> +	 we must look for partial overlaps.  */
> +      bool seen_unmatched_ref_p = false;
> +
> +      /* First match ARRAY_REFs an try to disambiguate.  */
> +      if (!component_refs1.is_empty ()
> +	  && !component_refs2.is_empty ())
> +	{
> +	  unsigned int narray_refs1=0, narray_refs2=0;
> +
> +	  /* We generally assume that both access paths starts by same sequence
> +	     of refs.  However if number of array refs is not in sync, try
> +	     to recover and pop elts until number match.  This helps the case
> +	     where one access path starts by array and other by element.  */

I think part of the confusion is that we're doing this in the outer
while loop, so "starts" applies to all sub-paths we consider?

Or only to the innermost?

So - why's this inside the loop?  Some actual access path pair
examples in the comment would help.  And definitely more testcases
since the single one you add is too simplistic to need all this code ;)

> +	  for (narray_refs1 = 0; narray_refs1 < component_refs1.length ();
> +	       narray_refs1++)
> +	    if (TREE_CODE (component_refs1 [component_refs1.length()
> +					    - 1 - narray_refs1]) != ARRAY_REF)
> +	      break;
> +
> +	  for (narray_refs2 = 0; narray_refs2 < component_refs2.length ();
> +	       narray_refs2++)
> +	    if (TREE_CODE (component_refs2 [component_refs2.length()
> +					    - 1 - narray_refs2]) != ARRAY_REF)
> +	      break;
> +	  for (; narray_refs1 > narray_refs2; narray_refs1--)
> +	    {
> +	      ref1 = component_refs1.pop ();
> +	      /* Track whether we possibly introduced partial overlap assuming
> +		 that innermost type sizes does not match.  This only can
> +		 happen if the offset introduced by the ARRAY_REF
> +		 is non-zero.  */
> +	      if (!operand_equal_p (TREE_OPERAND (ref1, 1),
> +				    cheap_array_ref_low_bound (ref1), 0))
> +	        seen_unmatched_ref_p = true;
> +	    }
> +	  for (; narray_refs2 > narray_refs1; narray_refs2--)
> +	    {
> +	      ref2 = component_refs2.pop ();
> +	      if (!operand_equal_p (TREE_OPERAND (ref2, 1),
> +				    cheap_array_ref_low_bound (ref2), 0))
> +	        seen_unmatched_ref_p = true;
> +	    }
> +	  /* Try to disambiguate matched arrays.  */
> +	  for (unsigned int i = 0; i < narray_refs1; i++)
> +	    {
> +	      int cmp = nonoverlapping_array_refs_p (component_refs1.pop (),
> +						     component_refs2.pop ());
> +	      if (cmp == 1 && !partial_overlap)
> +		{
> +		  ++alias_stats
> +		    .nonoverlapping_refs_since_match_p_no_alias;
> +		  return 1;
> +		}
> +	      partial_overlap = false;
> +	      if (cmp == -1)
> +		seen_unmatched_ref_p = true;
> +	    }
> +	}
> +
> +      /* Next look for component_refs.  */
> +

Still ugh ... :/  (excess vertical space above)

So - better but not yet there.

Richard.

>        do
>  	{
>  	  if (component_refs1.is_empty ())
>  	    {
>  	      ++alias_stats
> -		.nonoverlapping_component_refs_since_match_p_must_overlap;
> +		.nonoverlapping_refs_since_match_p_must_overlap;
>  	      return 0;
>  	    }
>  	  ref1 = component_refs1.pop ();
>  	  if (TREE_CODE (ref1) != COMPONENT_REF)
> -	    seen_noncomponent_ref_p = true;
> +	    seen_unmatched_ref_p = true;
>  	}
>        while (!RECORD_OR_UNION_TYPE_P (TREE_TYPE (TREE_OPERAND (ref1, 0))));
>  
> @@ -1338,12 +1497,12 @@ nonoverlapping_component_refs_since_matc
>  	  if (component_refs2.is_empty ())
>  	    {
>  	      ++alias_stats
> -		.nonoverlapping_component_refs_since_match_p_must_overlap;
> +		.nonoverlapping_refs_since_match_p_must_overlap;
>  	      return 0;
>  	    }
>  	  ref2 = component_refs2.pop ();
>  	  if (TREE_CODE (ref2) != COMPONENT_REF)
> -	    seen_noncomponent_ref_p = true;
> +	    seen_unmatched_ref_p = true;
>  	}
>        while (!RECORD_OR_UNION_TYPE_P (TREE_TYPE (TREE_OPERAND (ref2, 0))));
>  
> @@ -1361,13 +1520,15 @@ nonoverlapping_component_refs_since_matc
>        tree type1 = DECL_CONTEXT (field1);
>        tree type2 = DECL_CONTEXT (field2);
>  
> +      partial_overlap = false;
> +
>        /* If we skipped array refs on type of different sizes, we can
>  	 no longer be sure that there are not partial overlaps.  */
> -      if (seen_noncomponent_ref_p
> +      if (seen_unmatched_ref_p
>  	  && !operand_equal_p (TYPE_SIZE (type1), TYPE_SIZE (type2), 0))
>  	{
>  	  ++alias_stats
> -	    .nonoverlapping_component_refs_since_match_p_may_alias;
> +	    .nonoverlapping_refs_since_match_p_may_alias;
>  	  return -1;
>  	}
>  
> @@ -1375,18 +1536,18 @@ nonoverlapping_component_refs_since_matc
>        if (cmp == -1)
>  	{
>  	  ++alias_stats
> -	    .nonoverlapping_component_refs_since_match_p_may_alias;
> +	    .nonoverlapping_refs_since_match_p_may_alias;
>  	  return -1;
>  	}
>        else if (cmp == 1)
>  	{
>  	  ++alias_stats
> -	    .nonoverlapping_component_refs_since_match_p_no_alias;
> +	    .nonoverlapping_refs_since_match_p_no_alias;
>  	  return 1;
>  	}
>      }
>  
> -  ++alias_stats.nonoverlapping_component_refs_since_match_p_must_overlap;
> +  ++alias_stats.nonoverlapping_refs_since_match_p_must_overlap;
>    return 0;
>  }
>  
> @@ -1583,8 +1744,7 @@ decl_refs_may_alias_p (tree ref1, tree b
>       so we disambiguate component references manually.  */
>    if (ref1 && ref2
>        && handled_component_p (ref1) && handled_component_p (ref2)
> -      && nonoverlapping_component_refs_since_match_p (NULL, ref1,
> -						      NULL, ref2) == 1)
> +      && nonoverlapping_refs_since_match_p (NULL, ref1, NULL, ref2, false) == 1)
>      return false;
>  
>    return true;     
> @@ -1709,19 +1869,22 @@ indirect_ref_may_alias_decl_p (tree ref1
>         || (!TMR_INDEX (base1) && !TMR_INDEX2 (base1)))
>         && (TREE_CODE (dbase2) != TARGET_MEM_REF
>  	   || (!TMR_INDEX (dbase2) && !TMR_INDEX2 (dbase2))))
> -      && same_type_for_tbaa (TREE_TYPE (base1), TREE_TYPE (dbase2)) == 1
> -      && (TREE_CODE (TREE_TYPE (base1)) != ARRAY_TYPE
> -	  || (TYPE_SIZE (TREE_TYPE (base1))
> -	      && TREE_CODE (TYPE_SIZE (TREE_TYPE (base1))) == INTEGER_CST)))
> +      && same_type_for_tbaa (TREE_TYPE (base1), TREE_TYPE (dbase2)) == 1)
>      {
> -      if (!ranges_maybe_overlap_p (doffset1, max_size1, doffset2, max_size2))
> +      bool partial_overlap = (TREE_CODE (TREE_TYPE (base1)) == ARRAY_TYPE
> +			      && (TYPE_SIZE (TREE_TYPE (base1))
> +			      && TREE_CODE (TYPE_SIZE (TREE_TYPE (base1)))
> +				 != INTEGER_CST));
> +      if (!partial_overlap
> +	  && !ranges_maybe_overlap_p (doffset1, max_size1, doffset2, max_size2))
>  	return false;
>        if (!ref1 || !ref2
>  	  /* If there is must alias, there is no use disambiguating further.  */
> -	  || (known_eq (size1, max_size1) && known_eq (size2, max_size2)))
> +	  || (!partial_overlap
> +	      && known_eq (size1, max_size1) && known_eq (size2, max_size2)))
>  	return true;
> -      int res = nonoverlapping_component_refs_since_match_p (base1, ref1,
> -							     base2, ref2);
> +      int res = nonoverlapping_refs_since_match_p (base1, ref1, base2, ref2,
> +						   partial_overlap);
>        if (res == -1)
>  	return !nonoverlapping_component_refs_p (ref1, ref2);
>        return !res;
> @@ -1805,8 +1968,8 @@ indirect_refs_may_alias_p (tree ref1 ATT
>  	return true;
>        if (ref1 && ref2)
>  	{
> -	  int res = nonoverlapping_component_refs_since_match_p (NULL, ref1,
> -								 NULL, ref2);
> +	  int res = nonoverlapping_refs_since_match_p (NULL, ref1, NULL, ref2,
> +						       false);
>  	  if (res != -1)
>  	    return !res;
>  	}
> @@ -1844,19 +2007,22 @@ indirect_refs_may_alias_p (tree ref1 ATT
>        && (TREE_CODE (base2) != TARGET_MEM_REF
>  	  || (!TMR_INDEX (base2) && !TMR_INDEX2 (base2)))
>        && same_type_for_tbaa (TREE_TYPE (ptrtype1),
> -			     TREE_TYPE (ptrtype2)) == 1
> +			     TREE_TYPE (ptrtype2)) == 1)
> +    {
>        /* But avoid treating arrays as "objects", instead assume they
>           can overlap by an exact multiple of their element size.
>           See gcc.dg/torture/alias-2.c.  */
> -      && TREE_CODE (TREE_TYPE (ptrtype1)) != ARRAY_TYPE)
> -    {
> -      if (!ranges_maybe_overlap_p (offset1, max_size1, offset2, max_size2))
> +      bool partial_overlap = TREE_CODE (TREE_TYPE (ptrtype1)) == ARRAY_TYPE;
> +
> +      if (!partial_overlap
> +	  && !ranges_maybe_overlap_p (offset1, max_size1, offset2, max_size2))
>  	return false;
>        if (!ref1 || !ref2
> -	  || (known_eq (size1, max_size1) && known_eq (size2, max_size2)))
> +	  || (!partial_overlap
> +	      && known_eq (size1, max_size1) && known_eq (size2, max_size2)))
>  	return true;
> -      int res = nonoverlapping_component_refs_since_match_p (base1, ref1,
> -							     base2, ref2);
> +      int res = nonoverlapping_refs_since_match_p (base1, ref1, base2, ref2,
> +						   partial_overlap);
>        if (res == -1)
>  	return !nonoverlapping_component_refs_p (ref1, ref2);
>        return !res;
>
Jan Hubicka Aug. 15, 2019, 1:22 p.m. UTC | #4
Hi,
here is updated version.
> > +	  /* We generally assume that both access paths starts by same sequence
> > +	     of refs.  However if number of array refs is not in sync, try
> > +	     to recover and pop elts until number match.  This helps the case
> > +	     where one access path starts by array and other by element.  */
> 
> I think part of the confusion is that we're doing this in the outer
> while loop, so "starts" applies to all sub-paths we consider?
> 
> Or only to the innermost?
> 
> So - why's this inside the loop?  Some actual access path pair
> examples in the comment would help.  And definitely more testcases
> since the single one you add is too simplistic to need all this code ;)

I have added a testcase. Basically we hope the chain of array refs to
end by same type and in that case we want to peel out the outermost. You
are right that it does not work always especially if the innermost
reference is array (which is rare).

I suppose one can do type matching once actually have
same_type_for_tbaa_p working on array_ref.

I have added testcase, if you would preffer to move the logic out of the
walking loop, I can do that. I can also just drop it and handle this
later.

I put it into inner loop basically to increase chances that we
succesfully walk access paths of different types in situation
-fno-strict-aliasing is used and the type sequences are not fully
compatible. 

I plan to put some love into -fno-strict-aliasing incrementally.

This patch adds testcase for the access paths of different lengths and
fixes other issues discussed. It is another testcase where fre1 seems to
give up and fre3 is needed.

Before fre1 we get:
test (int i, int j)
{
  int[10][10] * innerptr;
  int[10][10][10] * barptr.0_1;
  int[10][10][10] * barptr.1_2;
  int _9;

  <bb 2> :
  innerptr_4 = barptr;
  barptr.0_1 = barptr;
  (*barptr.0_1)[i_5(D)][2][j_6(D)] = 10;
  (*innerptr_4)[3][j_6(D)] = 11;
  barptr.1_2 = barptr;
  _9 = (*barptr.1_2)[i_5(D)][2][j_6(D)];
  return _9;

}

that is optimized to:
test (int i, int j)
{
  int[10][10] * innerptr;
  int _9;

  <bb 2> :
  innerptr_4 = barptr;
  MEM[(int[10][10][10] *)innerptr_4][i_5(D)][2][j_6(D)] = 10;
  (*innerptr_4)[3][j_6(D)] = 11;
  _9 = MEM[(int[10][10][10] *)innerptr_4][i_5(D)][2][j_6(D)];
  return _9;

}

before fre3 we get:
test (int i, int j)
{
  int[10][10] * innerptr;
  int _7;

  <bb 2> [local count: 1073741824]:
  innerptr_2 = barptr;
  MEM[(int[10][10][10] *)innerptr_2][i_3(D)][2][j_4(D)] = 10;
  (*innerptr_2)[3][j_4(D)] = 11;
  _7 = MEM[(int[10][10][10] *)innerptr_2][i_3(D)][2][j_4(D)];
  return _7;

}
and fre3 does:
test (int i, int j)
{
  int[10][10] * innerptr;

  <bb 2> [local count: 1073741824]:
  innerptr_2 = barptr;
  MEM[(int[10][10][10] *)innerptr_2][i_3(D)][2][j_4(D)] = 10;
  (*innerptr_2)[3][j_4(D)] = 11;
  return 10;

}

Honza

	* tree-ssa-alias.c (nonoverlapping_component_refs_since_match_p):
	Rename to ...
	(nonoverlapping_refs_since_match_p): ... this; handle also
	ARRAY_REFs.
	(alias_stats): Update stats.
	(dump_alias_stats): Likewise.
	(cheap_array_ref_low_bound): New function.
	(aliasing_matching_component_refs_p): Add partial_overlap
	argument;
	pass it to nonoverlapping_refs_since_match_p.
	(aliasing_component_refs_walk): Update call of
	aliasing_matching_component_refs_p
	(nonoverlapping_array_refs_p): New function.
	(decl_refs_may_alias_p, indirect_ref_may_alias_decl_p,
	indirect_refs_may_alias_p): Update calls of
	nonoverlapping_refs_since_match_p.
	* gcc.dg/tree-ssa/alias-access-path-10.c: New testcase.

Index: testsuite/gcc.dg/tree-ssa/alias-access-path-10.c
===================================================================
--- testsuite/gcc.dg/tree-ssa/alias-access-path-10.c	(nonexistent)
+++ testsuite/gcc.dg/tree-ssa/alias-access-path-10.c	(working copy)
@@ -0,0 +1,12 @@
+/* { dg-do compile } */
+/* { dg-options "-O2 -fdump-tree-fre1" } */
+
+struct a {int array[3];} a[10];
+int
+test(int i,int j)
+{
+  a[i].array[1]=123;
+  a[j].array[2]=2;
+  return a[i].array[1];
+}
+/* { dg-final { scan-tree-dump-times "return 123" 1 "fre1"} } */
Index: testsuite/gcc.dg/tree-ssa/alias-access-path-11.c
===================================================================
--- testsuite/gcc.dg/tree-ssa/alias-access-path-11.c	(nonexistent)
+++ testsuite/gcc.dg/tree-ssa/alias-access-path-11.c	(working copy)
@@ -0,0 +1,15 @@
+/* { dg-do compile } */
+/* { dg-options "-O2 -fno-strict-aliasing -fdump-tree-fre3" } */
+typedef int outerarray[10][10][10];
+typedef int innerarray[10][10];
+outerarray *barptr;
+
+int
+test(int i,int j)
+{
+  innerarray *innerptr = (innerarray *)barptr;
+  (*barptr)[i][2][j]=10;;
+  (*innerptr)[3][j]=11;
+  return (*barptr)[i][2][j];
+}
+/* { dg-final { scan-tree-dump-times "return 10" 1 "fre3"} } */
Index: tree-ssa-alias.c
===================================================================
--- tree-ssa-alias.c	(revision 274527)
+++ tree-ssa-alias.c	(working copy)
@@ -87,7 +87,7 @@ along with GCC; see the file COPYING3.
    this file.  Low-level disambiguators dealing with points-to
    information are in tree-ssa-structalias.c.  */
 
-static int nonoverlapping_component_refs_since_match_p (tree, tree, tree, tree);
+static int nonoverlapping_refs_since_match_p (tree, tree, tree, tree, bool);
 static bool nonoverlapping_component_refs_p (const_tree, const_tree);
 
 /* Query statistics for the different low-level disambiguators.
@@ -104,9 +104,9 @@ static struct {
   unsigned HOST_WIDE_INT aliasing_component_refs_p_no_alias;
   unsigned HOST_WIDE_INT nonoverlapping_component_refs_p_may_alias;
   unsigned HOST_WIDE_INT nonoverlapping_component_refs_p_no_alias;
-  unsigned HOST_WIDE_INT nonoverlapping_component_refs_since_match_p_may_alias;
-  unsigned HOST_WIDE_INT nonoverlapping_component_refs_since_match_p_must_overlap;
-  unsigned HOST_WIDE_INT nonoverlapping_component_refs_since_match_p_no_alias;
+  unsigned HOST_WIDE_INT nonoverlapping_refs_since_match_p_may_alias;
+  unsigned HOST_WIDE_INT nonoverlapping_refs_since_match_p_must_overlap;
+  unsigned HOST_WIDE_INT nonoverlapping_refs_since_match_p_no_alias;
 } alias_stats;
 
 void
@@ -137,15 +137,15 @@ dump_alias_stats (FILE *s)
 	   alias_stats.nonoverlapping_component_refs_p_no_alias,
 	   alias_stats.nonoverlapping_component_refs_p_no_alias
 	   + alias_stats.nonoverlapping_component_refs_p_may_alias);
-  fprintf (s, "  nonoverlapping_component_refs_since_match_p: "
+  fprintf (s, "  nonoverlapping_refs_since_match_p: "
 	   HOST_WIDE_INT_PRINT_DEC" disambiguations, "
 	   HOST_WIDE_INT_PRINT_DEC" must overlaps, "
 	   HOST_WIDE_INT_PRINT_DEC" queries\n",
-	   alias_stats.nonoverlapping_component_refs_since_match_p_no_alias,
-	   alias_stats.nonoverlapping_component_refs_since_match_p_must_overlap,
-	   alias_stats.nonoverlapping_component_refs_since_match_p_no_alias
-	   + alias_stats.nonoverlapping_component_refs_since_match_p_may_alias
-	   + alias_stats.nonoverlapping_component_refs_since_match_p_must_overlap);
+	   alias_stats.nonoverlapping_refs_since_match_p_no_alias,
+	   alias_stats.nonoverlapping_refs_since_match_p_must_overlap,
+	   alias_stats.nonoverlapping_refs_since_match_p_no_alias
+	   + alias_stats.nonoverlapping_refs_since_match_p_may_alias
+	   + alias_stats.nonoverlapping_refs_since_match_p_must_overlap);
   fprintf (s, "  aliasing_component_refs_p: "
 	   HOST_WIDE_INT_PRINT_DEC" disambiguations, "
 	   HOST_WIDE_INT_PRINT_DEC" queries\n",
@@ -856,7 +856,8 @@ type_has_components_p (tree type)
 
 /* MATCH1 and MATCH2 which are part of access path of REF1 and REF2
    respectively are either pointing to same address or are completely
-   disjoint.
+   disjoint. If PARITAL_OVERLAP is true, assume that outermost arrays may
+   just partly overlap.
 
    Try to disambiguate using the access path starting from the match
    and return false if there is no conflict.
@@ -867,24 +868,27 @@ static bool
 aliasing_matching_component_refs_p (tree match1, tree ref1,
 				    poly_int64 offset1, poly_int64 max_size1,
 				    tree match2, tree ref2,
-				    poly_int64 offset2, poly_int64 max_size2)
+				    poly_int64 offset2, poly_int64 max_size2,
+				    bool partial_overlap)
 {
   poly_int64 offadj, sztmp, msztmp;
   bool reverse;
 
-
-  get_ref_base_and_extent (match2, &offadj, &sztmp, &msztmp, &reverse);
-  offset2 -= offadj;
-  get_ref_base_and_extent (match1, &offadj, &sztmp, &msztmp, &reverse);
-  offset1 -= offadj;
-  if (!ranges_maybe_overlap_p (offset1, max_size1, offset2, max_size2))
+  if (!partial_overlap)
     {
-      ++alias_stats.aliasing_component_refs_p_no_alias;
-      return false;
+      get_ref_base_and_extent (match2, &offadj, &sztmp, &msztmp, &reverse);
+      offset2 -= offadj;
+      get_ref_base_and_extent (match1, &offadj, &sztmp, &msztmp, &reverse);
+      offset1 -= offadj;
+      if (!ranges_maybe_overlap_p (offset1, max_size1, offset2, max_size2))
+	{
+	  ++alias_stats.aliasing_component_refs_p_no_alias;
+	  return false;
+	}
     }
 
-  int cmp = nonoverlapping_component_refs_since_match_p (match1, ref1,
-							 match2, ref2);
+  int cmp = nonoverlapping_refs_since_match_p (match1, ref1, match2, ref2,
+					       partial_overlap);
   if (cmp == 1
       || (cmp == -1 && nonoverlapping_component_refs_p (ref1, ref2)))
     {
@@ -964,6 +968,8 @@ aliasing_component_refs_walk (tree ref1,
     }
   if (same_p == 1)
     {
+      bool partial_overlap = false;
+
       /* We assume that arrays can overlap by multiple of their elements
 	 size as tested in gcc.dg/torture/alias-2.c.
 	 This partial overlap happen only when both arrays are bases of
@@ -973,15 +979,18 @@ aliasing_component_refs_walk (tree ref1,
 	  && (!TYPE_SIZE (TREE_TYPE (base1))
 	      || TREE_CODE (TYPE_SIZE (TREE_TYPE (base1))) != INTEGER_CST
 	      || ref == base2))
-	/* Setting maybe_match to true triggers
-	   nonoverlapping_component_refs_p test later that still may do
-	   useful disambiguation.  */
-	*maybe_match = true;
-      else
-	return aliasing_matching_component_refs_p (base1, ref1,
-						   offset1, max_size1,
-						   ref, ref2,
-						   offset2, max_size2);
+	{
+	  /* Setting maybe_match to true triggers
+	     nonoverlapping_component_refs_p test later that still may do
+	     useful disambiguation.  */
+	  *maybe_match = true;
+	  partial_overlap = true;
+	}
+      return aliasing_matching_component_refs_p (base1, ref1,
+						 offset1, max_size1,
+						 ref, ref2,
+						 offset2, max_size2,
+						 partial_overlap);
     }
   return -1;
 }
@@ -1225,10 +1234,98 @@ nonoverlapping_component_refs_p_1 (const
   return -1;
 }
 
+/* Return low bound of array. Do not produce new trees
+   and thus do not care about particular type of integer constant
+   and placeholder exprs.  */
+
+static tree
+cheap_array_ref_low_bound (tree ref)
+{
+  tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (ref, 0)));
+
+  /* Avoid expensive array_ref_low_bound.
+     low bound is either stored in operand2, or it is TYPE_MIN_VALUE of domain
+     type or it is zero.  */
+  if (TREE_OPERAND (ref, 2))
+    return TREE_OPERAND (ref, 2);
+  else if (domain_type && TYPE_MIN_VALUE (domain_type))
+    return TYPE_MIN_VALUE (domain_type);
+  else
+    return integer_zero_node;
+}
+
+/* REF1 and REF2 are ARRAY_REFs with either same base address or which are
+   completely disjoint.
+
+   Return 1 if the refs are non-overlapping.
+   Return 0 if they are possibly overlapping but if so the overlap again
+   starts on the same address.
+   Return -1 otherwise.  */
+
+int
+nonoverlapping_array_refs_p (tree ref1, tree ref2)
+{
+  tree index1 = TREE_OPERAND (ref1, 1);
+  tree index2 = TREE_OPERAND (ref2, 1);
+  tree low_bound1 = cheap_array_ref_low_bound(ref1);
+  tree low_bound2 = cheap_array_ref_low_bound(ref2);
+
+  /* Handle zero offsets first: we do not need to match type size in this
+     case.  */
+  if (operand_equal_p (index1, low_bound1, 0)
+      && operand_equal_p (index2, low_bound2, 0))
+    return 0;
+
+  /* If type sizes are different, give up.
+
+     Avoid expensive array_ref_element_size.
+     If operand 3 is present it denotes size in the alignmnet units.
+     Otherwise size is TYPE_SIZE of the element type.
+     Handle only common cases where types are of the same "kind".  */
+  if ((TREE_OPERAND (ref1, 3) == NULL) != (TREE_OPERAND (ref2, 3) == NULL))
+    return -1;
+
+  tree elmt_type1 = TREE_TYPE (TREE_TYPE (TREE_OPERAND (ref1, 0)));
+  tree elmt_type2 = TREE_TYPE (TREE_TYPE (TREE_OPERAND (ref2, 0)));
+
+  if (TREE_OPERAND (ref1, 3))
+    {
+      if (TYPE_ALIGN (elmt_type1) != TYPE_ALIGN (elmt_type2)
+	  || !operand_equal_p (TREE_OPERAND (ref1, 3),
+			       TREE_OPERAND (ref2, 3), 0))
+	return -1;
+    }
+  else
+    {
+      if (!operand_equal_p (TYPE_SIZE_UNIT (elmt_type1),
+			    TYPE_SIZE_UNIT (elmt_type2), 0))
+	return -1;
+    }
+
+  /* Since we know that type sizes are the same, there is no need to return
+     -1 after this point. Partial overlap can not be introduced.  */
+
+  /* We may need to fold trees in this case.
+     TODO: Handle integer constant case at least.  */
+  if (!operand_equal_p (low_bound1, low_bound2, 0))
+    return 0;
+
+  if (TREE_CODE (index1) == INTEGER_CST && TREE_CODE (index2) == INTEGER_CST)
+    {
+      if (tree_int_cst_equal (index1, index2))
+	return 0;
+      return 1;
+    }
+  /* TODO: We can use VRP to further disambiguate here. */
+  return 0;
+}
+
 /* Try to disambiguate REF1 and REF2 under the assumption that MATCH1 and
    MATCH2 either point to the same address or are disjoint.
    MATCH1 and MATCH2 are assumed to be ref in the access path of REF1 and REF2
    respectively or NULL in the case we established equivalence of bases.
+   If PARTIAL_OVERLAP is true assume that the toplevel arrays may actually
+   overlap by exact multiply of their element size.
 
    This test works by matching the initial segment of the access path
    and does not rely on TBAA thus is safe for !flag_strict_aliasing if
@@ -1247,8 +1344,9 @@ nonoverlapping_component_refs_p_1 (const
    oracles.  */
 
 static int
-nonoverlapping_component_refs_since_match_p (tree match1, tree ref1,
-					     tree match2, tree ref2)
+nonoverlapping_refs_since_match_p (tree match1, tree ref1,
+				   tree match2, tree ref2,
+				   bool partial_overlap)
 {
   /* Early return if there are no references to match, we do not need
      to walk the access paths.
@@ -1301,7 +1399,7 @@ nonoverlapping_component_refs_since_matc
 	  && !tree_int_cst_equal (TREE_OPERAND (ref1, 1),
 				  TREE_OPERAND (ref2, 1))))
     {
-      ++alias_stats.nonoverlapping_component_refs_since_match_p_may_alias;
+      ++alias_stats.nonoverlapping_refs_since_match_p_may_alias;
       return -1;
     }
 
@@ -1318,18 +1416,78 @@ nonoverlapping_component_refs_since_matc
      case the return value will precisely be false.  */
   while (true)
     {
-      bool seen_noncomponent_ref_p = false;
+      /* Track if we seen unmatched ref with non-zero offset.  In this case
+	 we must look for partial overlaps.  */
+      bool seen_unmatched_ref_p = false;
+
+      /* First match ARRAY_REFs an try to disambiguate.  */
+      if (!component_refs1.is_empty ()
+	  && !component_refs2.is_empty ())
+	{
+	  unsigned int narray_refs1=0, narray_refs2=0;
+
+	  /* We generally assume that both access paths starts by same sequence
+	     of refs.  However if number of array refs is not in sync, try
+	     to recover and pop elts until number match.  This helps the case
+	     where one access path starts by array and other by element.  */
+	  for (narray_refs1 = 0; narray_refs1 < component_refs1.length ();
+	       narray_refs1++)
+	    if (TREE_CODE (component_refs1 [component_refs1.length()
+					    - 1 - narray_refs1]) != ARRAY_REF)
+	      break;
+
+	  for (narray_refs2 = 0; narray_refs2 < component_refs2.length ();
+	       narray_refs2++)
+	    if (TREE_CODE (component_refs2 [component_refs2.length()
+					    - 1 - narray_refs2]) != ARRAY_REF)
+	      break;
+	  for (; narray_refs1 > narray_refs2; narray_refs1--)
+	    {
+	      ref1 = component_refs1.pop ();
+	      /* Track whether we possibly introduced partial overlap assuming
+		 that innermost type sizes does not match.  This only can
+		 happen if the offset introduced by the ARRAY_REF
+		 is non-zero.  */
+	      if (!operand_equal_p (TREE_OPERAND (ref1, 1),
+				    cheap_array_ref_low_bound (ref1), 0))
+	        seen_unmatched_ref_p = true;
+	    }
+	  for (; narray_refs2 > narray_refs1; narray_refs2--)
+	    {
+	      ref2 = component_refs2.pop ();
+	      if (!operand_equal_p (TREE_OPERAND (ref2, 1),
+				    cheap_array_ref_low_bound (ref2), 0))
+	        seen_unmatched_ref_p = true;
+	    }
+	  /* Try to disambiguate matched arrays.  */
+	  for (unsigned int i = 0; i < narray_refs1; i++)
+	    {
+	      int cmp = nonoverlapping_array_refs_p (component_refs1.pop (),
+						     component_refs2.pop ());
+	      if (cmp == 1 && !partial_overlap)
+		{
+		  ++alias_stats
+		    .nonoverlapping_refs_since_match_p_no_alias;
+		  return 1;
+		}
+	      partial_overlap = false;
+	      if (cmp == -1)
+		seen_unmatched_ref_p = true;
+	    }
+	}
+
+      /* Next look for component_refs.  */
       do
 	{
 	  if (component_refs1.is_empty ())
 	    {
 	      ++alias_stats
-		.nonoverlapping_component_refs_since_match_p_must_overlap;
+		.nonoverlapping_refs_since_match_p_must_overlap;
 	      return 0;
 	    }
 	  ref1 = component_refs1.pop ();
 	  if (TREE_CODE (ref1) != COMPONENT_REF)
-	    seen_noncomponent_ref_p = true;
+	    seen_unmatched_ref_p = true;
 	}
       while (!RECORD_OR_UNION_TYPE_P (TREE_TYPE (TREE_OPERAND (ref1, 0))));
 
@@ -1338,12 +1496,12 @@ nonoverlapping_component_refs_since_matc
 	  if (component_refs2.is_empty ())
 	    {
 	      ++alias_stats
-		.nonoverlapping_component_refs_since_match_p_must_overlap;
+		.nonoverlapping_refs_since_match_p_must_overlap;
 	      return 0;
 	    }
 	  ref2 = component_refs2.pop ();
 	  if (TREE_CODE (ref2) != COMPONENT_REF)
-	    seen_noncomponent_ref_p = true;
+	    seen_unmatched_ref_p = true;
 	}
       while (!RECORD_OR_UNION_TYPE_P (TREE_TYPE (TREE_OPERAND (ref2, 0))));
 
@@ -1361,13 +1519,15 @@ nonoverlapping_component_refs_since_matc
       tree type1 = DECL_CONTEXT (field1);
       tree type2 = DECL_CONTEXT (field2);
 
+      partial_overlap = false;
+
       /* If we skipped array refs on type of different sizes, we can
 	 no longer be sure that there are not partial overlaps.  */
-      if (seen_noncomponent_ref_p
+      if (seen_unmatched_ref_p
 	  && !operand_equal_p (TYPE_SIZE (type1), TYPE_SIZE (type2), 0))
 	{
 	  ++alias_stats
-	    .nonoverlapping_component_refs_since_match_p_may_alias;
+	    .nonoverlapping_refs_since_match_p_may_alias;
 	  return -1;
 	}
 
@@ -1375,18 +1535,18 @@ nonoverlapping_component_refs_since_matc
       if (cmp == -1)
 	{
 	  ++alias_stats
-	    .nonoverlapping_component_refs_since_match_p_may_alias;
+	    .nonoverlapping_refs_since_match_p_may_alias;
 	  return -1;
 	}
       else if (cmp == 1)
 	{
 	  ++alias_stats
-	    .nonoverlapping_component_refs_since_match_p_no_alias;
+	    .nonoverlapping_refs_since_match_p_no_alias;
 	  return 1;
 	}
     }
 
-  ++alias_stats.nonoverlapping_component_refs_since_match_p_must_overlap;
+  ++alias_stats.nonoverlapping_refs_since_match_p_must_overlap;
   return 0;
 }
 
@@ -1583,8 +1743,7 @@ decl_refs_may_alias_p (tree ref1, tree b
      so we disambiguate component references manually.  */
   if (ref1 && ref2
       && handled_component_p (ref1) && handled_component_p (ref2)
-      && nonoverlapping_component_refs_since_match_p (NULL, ref1,
-						      NULL, ref2) == 1)
+      && nonoverlapping_refs_since_match_p (NULL, ref1, NULL, ref2, false) == 1)
     return false;
 
   return true;     
@@ -1709,19 +1868,22 @@ indirect_ref_may_alias_decl_p (tree ref1
        || (!TMR_INDEX (base1) && !TMR_INDEX2 (base1)))
        && (TREE_CODE (dbase2) != TARGET_MEM_REF
 	   || (!TMR_INDEX (dbase2) && !TMR_INDEX2 (dbase2))))
-      && same_type_for_tbaa (TREE_TYPE (base1), TREE_TYPE (dbase2)) == 1
-      && (TREE_CODE (TREE_TYPE (base1)) != ARRAY_TYPE
-	  || (TYPE_SIZE (TREE_TYPE (base1))
-	      && TREE_CODE (TYPE_SIZE (TREE_TYPE (base1))) == INTEGER_CST)))
+      && same_type_for_tbaa (TREE_TYPE (base1), TREE_TYPE (dbase2)) == 1)
     {
-      if (!ranges_maybe_overlap_p (doffset1, max_size1, doffset2, max_size2))
+      bool partial_overlap = (TREE_CODE (TREE_TYPE (base1)) == ARRAY_TYPE
+			      && (TYPE_SIZE (TREE_TYPE (base1))
+			      && TREE_CODE (TYPE_SIZE (TREE_TYPE (base1)))
+				 != INTEGER_CST));
+      if (!partial_overlap
+	  && !ranges_maybe_overlap_p (doffset1, max_size1, doffset2, max_size2))
 	return false;
       if (!ref1 || !ref2
 	  /* If there is must alias, there is no use disambiguating further.  */
-	  || (known_eq (size1, max_size1) && known_eq (size2, max_size2)))
+	  || (!partial_overlap
+	      && known_eq (size1, max_size1) && known_eq (size2, max_size2)))
 	return true;
-      int res = nonoverlapping_component_refs_since_match_p (base1, ref1,
-							     base2, ref2);
+      int res = nonoverlapping_refs_since_match_p (base1, ref1, base2, ref2,
+						   partial_overlap);
       if (res == -1)
 	return !nonoverlapping_component_refs_p (ref1, ref2);
       return !res;
@@ -1805,8 +1967,8 @@ indirect_refs_may_alias_p (tree ref1 ATT
 	return true;
       if (ref1 && ref2)
 	{
-	  int res = nonoverlapping_component_refs_since_match_p (NULL, ref1,
-								 NULL, ref2);
+	  int res = nonoverlapping_refs_since_match_p (NULL, ref1, NULL, ref2,
+						       false);
 	  if (res != -1)
 	    return !res;
 	}
@@ -1844,19 +2006,22 @@ indirect_refs_may_alias_p (tree ref1 ATT
       && (TREE_CODE (base2) != TARGET_MEM_REF
 	  || (!TMR_INDEX (base2) && !TMR_INDEX2 (base2)))
       && same_type_for_tbaa (TREE_TYPE (ptrtype1),
-			     TREE_TYPE (ptrtype2)) == 1
+			     TREE_TYPE (ptrtype2)) == 1)
+    {
       /* But avoid treating arrays as "objects", instead assume they
          can overlap by an exact multiple of their element size.
          See gcc.dg/torture/alias-2.c.  */
-      && TREE_CODE (TREE_TYPE (ptrtype1)) != ARRAY_TYPE)
-    {
-      if (!ranges_maybe_overlap_p (offset1, max_size1, offset2, max_size2))
+      bool partial_overlap = TREE_CODE (TREE_TYPE (ptrtype1)) == ARRAY_TYPE;
+
+      if (!partial_overlap
+	  && !ranges_maybe_overlap_p (offset1, max_size1, offset2, max_size2))
 	return false;
       if (!ref1 || !ref2
-	  || (known_eq (size1, max_size1) && known_eq (size2, max_size2)))
+	  || (!partial_overlap
+	      && known_eq (size1, max_size1) && known_eq (size2, max_size2)))
 	return true;
-      int res = nonoverlapping_component_refs_since_match_p (base1, ref1,
-							     base2, ref2);
+      int res = nonoverlapping_refs_since_match_p (base1, ref1, base2, ref2,
+						   partial_overlap);
       if (res == -1)
 	return !nonoverlapping_component_refs_p (ref1, ref2);
       return !res;

Patch
diff mbox series

Index: tree-ssa-alias.c
===================================================================
--- tree-ssa-alias.c	(revision 273570)
+++ tree-ssa-alias.c	(working copy)
@@ -87,7 +87,7 @@  along with GCC; see the file COPYING3.
    this file.  Low-level disambiguators dealing with points-to
    information are in tree-ssa-structalias.c.  */
 
-static int nonoverlapping_component_refs_since_match_p (tree, tree, tree, tree);
+static int nonoverlapping_refs_since_match_p (tree, tree, tree, tree, bool);
 static bool nonoverlapping_component_refs_p (const_tree, const_tree);
 
 /* Query statistics for the different low-level disambiguators.
@@ -104,9 +104,9 @@  static struct {
   unsigned HOST_WIDE_INT aliasing_component_refs_p_no_alias;
   unsigned HOST_WIDE_INT nonoverlapping_component_refs_p_may_alias;
   unsigned HOST_WIDE_INT nonoverlapping_component_refs_p_no_alias;
-  unsigned HOST_WIDE_INT nonoverlapping_component_refs_since_match_p_may_alias;
-  unsigned HOST_WIDE_INT nonoverlapping_component_refs_since_match_p_must_overlap;
-  unsigned HOST_WIDE_INT nonoverlapping_component_refs_since_match_p_no_alias;
+  unsigned HOST_WIDE_INT nonoverlapping_refs_since_match_p_may_alias;
+  unsigned HOST_WIDE_INT nonoverlapping_refs_since_match_p_must_overlap;
+  unsigned HOST_WIDE_INT nonoverlapping_refs_since_match_p_no_alias;
 } alias_stats;
 
 void
@@ -137,15 +137,15 @@  dump_alias_stats (FILE *s)
 	   alias_stats.nonoverlapping_component_refs_p_no_alias,
 	   alias_stats.nonoverlapping_component_refs_p_no_alias
 	   + alias_stats.nonoverlapping_component_refs_p_may_alias);
-  fprintf (s, "  nonoverlapping_component_refs_since_match_p: "
+  fprintf (s, "  nonoverlapping_refs_since_match_p: "
 	   HOST_WIDE_INT_PRINT_DEC" disambiguations, "
 	   HOST_WIDE_INT_PRINT_DEC" must overlaps, "
 	   HOST_WIDE_INT_PRINT_DEC" queries\n",
-	   alias_stats.nonoverlapping_component_refs_since_match_p_no_alias,
-	   alias_stats.nonoverlapping_component_refs_since_match_p_must_overlap,
-	   alias_stats.nonoverlapping_component_refs_since_match_p_no_alias
-	   + alias_stats.nonoverlapping_component_refs_since_match_p_may_alias
-	   + alias_stats.nonoverlapping_component_refs_since_match_p_must_overlap);
+	   alias_stats.nonoverlapping_refs_since_match_p_no_alias,
+	   alias_stats.nonoverlapping_refs_since_match_p_must_overlap,
+	   alias_stats.nonoverlapping_refs_since_match_p_no_alias
+	   + alias_stats.nonoverlapping_refs_since_match_p_may_alias
+	   + alias_stats.nonoverlapping_refs_since_match_p_must_overlap);
   fprintf (s, "  aliasing_component_refs_p: "
 	   HOST_WIDE_INT_PRINT_DEC" disambiguations, "
 	   HOST_WIDE_INT_PRINT_DEC" queries\n",
@@ -856,7 +856,8 @@  type_has_components_p (tree type)
 
 /* MATCH1 and MATCH2 which are part of access path of REF1 and REF2
    respectively are either pointing to same address or are completely
-   disjoint.
+   disjoint. If PARITAL_OVERLAP is true, assume that outermost arrays may
+   just partly overlap.
 
    Try to disambiguate using the access path starting from the match
    and return false if there is no conflict.
@@ -867,24 +868,27 @@  static bool
 aliasing_matching_component_refs_p (tree match1, tree ref1,
 				    poly_int64 offset1, poly_int64 max_size1,
 				    tree match2, tree ref2,
-				    poly_int64 offset2, poly_int64 max_size2)
+				    poly_int64 offset2, poly_int64 max_size2,
+				    bool partial_overlap)
 {
   poly_int64 offadj, sztmp, msztmp;
   bool reverse;
 
-
-  get_ref_base_and_extent (match2, &offadj, &sztmp, &msztmp, &reverse);
-  offset2 -= offadj;
-  get_ref_base_and_extent (match1, &offadj, &sztmp, &msztmp, &reverse);
-  offset1 -= offadj;
-  if (!ranges_maybe_overlap_p (offset1, max_size1, offset2, max_size2))
+  if (!partial_overlap)
     {
-      ++alias_stats.aliasing_component_refs_p_no_alias;
-      return false;
+      get_ref_base_and_extent (match2, &offadj, &sztmp, &msztmp, &reverse);
+      offset2 -= offadj;
+      get_ref_base_and_extent (match1, &offadj, &sztmp, &msztmp, &reverse);
+      offset1 -= offadj;
+      if (!ranges_maybe_overlap_p (offset1, max_size1, offset2, max_size2))
+	{
+	  ++alias_stats.aliasing_component_refs_p_no_alias;
+	  return false;
+	}
     }
 
-  int cmp = nonoverlapping_component_refs_since_match_p (match1, ref1,
-							 match2, ref2);
+  int cmp = nonoverlapping_refs_since_match_p (match1, ref1, match2, ref2,
+					       partial_overlap);
   if (cmp == 1
       || (cmp == -1 && nonoverlapping_component_refs_p (ref1, ref2)))
     {
@@ -964,6 +968,8 @@  aliasing_component_refs_walk (tree ref1,
     }
   if (same_p == 1)
     {
+      bool partial_overlap = false;
+
       /* We assume that arrays can overlap by multiple of their elements
 	 size as tested in gcc.dg/torture/alias-2.c.
 	 This partial overlap happen only when both arrays are bases of
@@ -973,15 +979,18 @@  aliasing_component_refs_walk (tree ref1,
 	  && (!TYPE_SIZE (TREE_TYPE (base1))
 	      || TREE_CODE (TYPE_SIZE (TREE_TYPE (base1))) != INTEGER_CST
 	      || ref == base2))
-	/* Setting maybe_match to true triggers
-	   nonoverlapping_component_refs_p test later that still may do
-	   useful disambiguation.  */
-	*maybe_match = true;
-      else
-	return aliasing_matching_component_refs_p (base1, ref1,
-						   offset1, max_size1,
-						   ref, ref2,
-						   offset2, max_size2);
+	{
+	  /* Setting maybe_match to true triggers
+	     nonoverlapping_component_refs_p test later that still may do
+	     useful disambiguation.  */
+	  *maybe_match = true;
+	  partial_overlap = true;
+	}
+      return aliasing_matching_component_refs_p (base1, ref1,
+						 offset1, max_size1,
+						 ref, ref2,
+						 offset2, max_size2,
+						 partial_overlap);
     }
   return -1;
 }
@@ -1225,10 +1234,57 @@  nonoverlapping_component_refs_p_1 (const
   return -1;
 }
 
+/* REF1 and REF2 are ARRAY_REFs which either start at the same address or
+   are completely disjoint.
+
+   Return 1 if the refs are non-overlapping.
+   Return 0 if they are possibly overlapping but if so the overlap again
+   starts on the same address.
+   Return -1 otherwise.  */
+
+int
+nonoverlapping_array_refs_p (tree ref1, tree ref2)
+{
+  tree low_bound1 = array_ref_low_bound (ref1);
+  tree low_bound2 = array_ref_low_bound (ref2);
+  tree index1 = TREE_OPERAND (ref1, 1);
+  tree index2 = TREE_OPERAND (ref2, 1);
+
+  /* Handle zero offsets first: we do not need to match type size in this
+     case.  */
+  if (operand_equal_p (index1, low_bound1, 0)
+      && operand_equal_p (index2, low_bound2, 0))
+    return 0;
+
+  /* If type sizes are different, give up.  */
+  if (!operand_equal_p (array_ref_element_size (ref1),
+			array_ref_element_size (ref2), 0))
+    return -1;
+
+  /* Since we know that type sizes are the same, there is no need to return
+     -1 after this point, since partial overlap can not be introduced.  */
+
+  /* We may need to fold trees in this case.
+     TODO: Handle integer constant case at least.  */
+  if (!operand_equal_p (low_bound1, low_bound2, 0))
+    return 0;
+
+  if (TREE_CODE (index1) == INTEGER_CST && TREE_CODE (index2) == INTEGER_CST)
+    {
+      if (tree_int_cst_equal (index1, index2))
+	return 0;
+      return 1;
+    }
+  /* TODO: We can use VRP to further disambiguate here. */
+  return 0;
+}
+
 /* Try to disambiguate REF1 and REF2 under the assumption that MATCH1 and
    MATCH2 either point to the same address or are disjoint.
    MATCH1 and MATCH2 are assumed to be ref in the access path of REF1 and REF2
    respectively or NULL in the case we established equivalence of bases.
+   If PARTIAL_OVERLAP is true assume that the toplevel arrays may actually
+   overlap by exact multiply of their element size.
 
    This test works by matching the initial segment of the access path
    and does not rely on TBAA thus is safe for !flag_strict_aliasing if
@@ -1247,8 +1303,9 @@  nonoverlapping_component_refs_p_1 (const
    oracles.  */
 
 static int
-nonoverlapping_component_refs_since_match_p (tree match1, tree ref1,
-					     tree match2, tree ref2)
+nonoverlapping_refs_since_match_p (tree match1, tree ref1,
+				   tree match2, tree ref2,
+				   bool partial_overlap)
 {
   /* Early return if there are no references to match, we do not need
      to walk the access paths.
@@ -1301,7 +1358,7 @@  nonoverlapping_component_refs_since_matc
 	  && !tree_int_cst_equal (TREE_OPERAND (ref1, 1),
 				  TREE_OPERAND (ref2, 1))))
     {
-      ++alias_stats.nonoverlapping_component_refs_since_match_p_may_alias;
+      ++alias_stats.nonoverlapping_refs_since_match_p_may_alias;
       return -1;
     }
 
@@ -1318,18 +1375,75 @@  nonoverlapping_component_refs_since_matc
      case the return value will precisely be false.  */
   while (true)
     {
-      bool seen_noncomponent_ref_p = false;
+      /* Track if we seen unmatched ref with non-zero offset.  In this case
+	 we must look for partial overlaps.  */
+      bool seen_unmatched_ref_p = false;
+
+      /* First match ARRAY_REFs an try to disambiguate.  */
+      if (!component_refs1.is_empty ()
+	  && !component_refs2.is_empty ())
+	{
+	  unsigned int narray_refs1=0, narray_refs2=0;
+
+	  /* We generally assume that both access paths starts by same sequence
+	     of refs.  However if number of array refs is not in sync, try
+	     to recover and pop elts until number match.  This helps the case
+	     where one access path starts by array and other by element.  */
+	  for (narray_refs1 = 0; narray_refs1 < component_refs1.length ();
+	       narray_refs1++)
+	    if (TREE_CODE (component_refs1 [component_refs1.length()
+					    - 1 - narray_refs1]) != ARRAY_REF)
+	      break;
+
+	  for (narray_refs2 = 0; narray_refs2 < component_refs2.length ();
+	       narray_refs2++)
+	    if (TREE_CODE (component_refs2 [component_refs2.length()
+					    - 1 - narray_refs2]) != ARRAY_REF)
+	      break;
+	  for (; narray_refs1 > narray_refs2; narray_refs1--)
+	    {
+	      ref1 = component_refs1.pop ();
+	      if (!operand_equal_p (TREE_OPERAND (ref1, 1),
+				    array_ref_low_bound (ref1), 0))
+	        seen_unmatched_ref_p = true;
+	    }
+	  for (; narray_refs2 > narray_refs1; narray_refs2--)
+	    {
+	      ref2 = component_refs2.pop ();
+	      if (!operand_equal_p (TREE_OPERAND (ref2, 1),
+				    array_ref_low_bound (ref2), 0))
+	        seen_unmatched_ref_p = true;
+	    }
+	  /* Try to disambiguate matched arrays.  */
+	  for (unsigned int i = 0; i < narray_refs1; i++)
+	    {
+	      int cmp = nonoverlapping_array_refs_p (component_refs1.pop (),
+						     component_refs2.pop ());
+	      if (cmp == 1 && !partial_overlap)
+		{
+		  ++alias_stats
+		    .nonoverlapping_refs_since_match_p_no_alias;
+		  return 1;
+		}
+	      partial_overlap = false;
+	      if (cmp == -1)
+		seen_unmatched_ref_p = true;
+	    }
+	}
+
+      /* Next look for component_refs.  */
+
       do
 	{
 	  if (component_refs1.is_empty ())
 	    {
 	      ++alias_stats
-		.nonoverlapping_component_refs_since_match_p_must_overlap;
+		.nonoverlapping_refs_since_match_p_must_overlap;
 	      return 0;
 	    }
 	  ref1 = component_refs1.pop ();
 	  if (TREE_CODE (ref1) != COMPONENT_REF)
-	    seen_noncomponent_ref_p = true;
+	    seen_unmatched_ref_p = true;
 	}
       while (!RECORD_OR_UNION_TYPE_P (TREE_TYPE (TREE_OPERAND (ref1, 0))));
 
@@ -1338,12 +1452,12 @@  nonoverlapping_component_refs_since_matc
 	  if (component_refs2.is_empty ())
 	    {
 	      ++alias_stats
-		.nonoverlapping_component_refs_since_match_p_must_overlap;
+		.nonoverlapping_refs_since_match_p_must_overlap;
 	      return 0;
 	    }
 	  ref2 = component_refs2.pop ();
 	  if (TREE_CODE (ref2) != COMPONENT_REF)
-	    seen_noncomponent_ref_p = true;
+	    seen_unmatched_ref_p = true;
 	}
       while (!RECORD_OR_UNION_TYPE_P (TREE_TYPE (TREE_OPERAND (ref2, 0))));
 
@@ -1361,13 +1475,15 @@  nonoverlapping_component_refs_since_matc
       tree type1 = DECL_CONTEXT (field1);
       tree type2 = DECL_CONTEXT (field2);
 
+      partial_overlap = false;
+
       /* If we skipped array refs on type of different sizes, we can
 	 no longer be sure that there are not partial overlaps.  */
-      if (seen_noncomponent_ref_p
+      if (seen_unmatched_ref_p
 	  && !operand_equal_p (TYPE_SIZE (type1), TYPE_SIZE (type2), 0))
 	{
 	  ++alias_stats
-	    .nonoverlapping_component_refs_since_match_p_may_alias;
+	    .nonoverlapping_refs_since_match_p_may_alias;
 	  return -1;
 	}
 
@@ -1375,18 +1491,18 @@  nonoverlapping_component_refs_since_matc
       if (cmp == -1)
 	{
 	  ++alias_stats
-	    .nonoverlapping_component_refs_since_match_p_may_alias;
+	    .nonoverlapping_refs_since_match_p_may_alias;
 	  return -1;
 	}
       else if (cmp == 1)
 	{
 	  ++alias_stats
-	    .nonoverlapping_component_refs_since_match_p_no_alias;
+	    .nonoverlapping_refs_since_match_p_no_alias;
 	  return 1;
 	}
     }
 
-  ++alias_stats.nonoverlapping_component_refs_since_match_p_must_overlap;
+  ++alias_stats.nonoverlapping_refs_since_match_p_must_overlap;
   return 0;
 }
 
@@ -1583,8 +1699,7 @@  decl_refs_may_alias_p (tree ref1, tree b
      so we disambiguate component references manually.  */
   if (ref1 && ref2
       && handled_component_p (ref1) && handled_component_p (ref2)
-      && nonoverlapping_component_refs_since_match_p (NULL, ref1,
-						      NULL, ref2) == 1)
+      && nonoverlapping_refs_since_match_p (NULL, ref1, NULL, ref2, false) == 1)
     return false;
 
   return true;     
@@ -1709,19 +1824,22 @@  indirect_ref_may_alias_decl_p (tree ref1
        || (!TMR_INDEX (base1) && !TMR_INDEX2 (base1)))
        && (TREE_CODE (dbase2) != TARGET_MEM_REF
 	   || (!TMR_INDEX (dbase2) && !TMR_INDEX2 (dbase2))))
-      && same_type_for_tbaa (TREE_TYPE (base1), TREE_TYPE (dbase2)) == 1
-      && (TREE_CODE (TREE_TYPE (base1)) != ARRAY_TYPE
-	  || (TYPE_SIZE (TREE_TYPE (base1))
-	      && TREE_CODE (TYPE_SIZE (TREE_TYPE (base1))) == INTEGER_CST)))
+      && same_type_for_tbaa (TREE_TYPE (base1), TREE_TYPE (dbase2)) == 1)
     {
-      if (!ranges_maybe_overlap_p (doffset1, max_size1, doffset2, max_size2))
+      bool partial_overlap = (TREE_CODE (TREE_TYPE (base1)) == ARRAY_TYPE
+			      && (TYPE_SIZE (TREE_TYPE (base1))
+			      && TREE_CODE (TYPE_SIZE (TREE_TYPE (base1)))
+				 != INTEGER_CST));
+      if (!partial_overlap
+	  && !ranges_maybe_overlap_p (doffset1, max_size1, doffset2, max_size2))
 	return false;
       if (!ref1 || !ref2
 	  /* If there is must alias, there is no use disambiguating further.  */
-	  || (known_eq (size1, max_size1) && known_eq (size2, max_size2)))
+	  || (!partial_overlap
+	      && known_eq (size1, max_size1) && known_eq (size2, max_size2)))
 	return true;
-      int res = nonoverlapping_component_refs_since_match_p (base1, ref1,
-							     base2, ref2);
+      int res = nonoverlapping_refs_since_match_p (base1, ref1, base2, ref2,
+						   partial_overlap);
       if (res == -1)
 	return !nonoverlapping_component_refs_p (ref1, ref2);
       return !res;
@@ -1805,8 +1923,8 @@  indirect_refs_may_alias_p (tree ref1 ATT
 	return true;
       if (ref1 && ref2)
 	{
-	  int res = nonoverlapping_component_refs_since_match_p (NULL, ref1,
-								 NULL, ref2);
+	  int res = nonoverlapping_refs_since_match_p (NULL, ref1, NULL, ref2,
+						       false);
 	  if (res != -1)
 	    return !res;
 	}
@@ -1844,19 +1962,22 @@  indirect_refs_may_alias_p (tree ref1 ATT
       && (TREE_CODE (base2) != TARGET_MEM_REF
 	  || (!TMR_INDEX (base2) && !TMR_INDEX2 (base2)))
       && same_type_for_tbaa (TREE_TYPE (ptrtype1),
-			     TREE_TYPE (ptrtype2)) == 1
+			     TREE_TYPE (ptrtype2)) == 1)
+    {
       /* But avoid treating arrays as "objects", instead assume they
          can overlap by an exact multiple of their element size.
          See gcc.dg/torture/alias-2.c.  */
-      && TREE_CODE (TREE_TYPE (ptrtype1)) != ARRAY_TYPE)
-    {
-      if (!ranges_maybe_overlap_p (offset1, max_size1, offset2, max_size2))
+      bool partial_overlap = TREE_CODE (TREE_TYPE (ptrtype1)) == ARRAY_TYPE;
+
+      if (!partial_overlap
+	  && !ranges_maybe_overlap_p (offset1, max_size1, offset2, max_size2))
 	return false;
       if (!ref1 || !ref2
-	  || (known_eq (size1, max_size1) && known_eq (size2, max_size2)))
+	  || (!partial_overlap
+	      && known_eq (size1, max_size1) && known_eq (size2, max_size2)))
 	return true;
-      int res = nonoverlapping_component_refs_since_match_p (base1, ref1,
-							     base2, ref2);
+      int res = nonoverlapping_refs_since_match_p (base1, ref1, base2, ref2,
+						   partial_overlap);
       if (res == -1)
 	return !nonoverlapping_component_refs_p (ref1, ref2);
       return !res;
Index: testsuite/gcc.dg/tree-ssa/alias-access-path-10.c
===================================================================
--- testsuite/gcc.dg/tree-ssa/alias-access-path-10.c	(nonexistent)
+++ testsuite/gcc.dg/tree-ssa/alias-access-path-10.c	(working copy)
@@ -0,0 +1,12 @@ 
+/* { dg-do compile } */
+/* { dg-options "-O2 -fdump-tree-fre1" } */
+
+struct a {int array[3];} a[10];
+int
+test(int i,int j)
+{
+  a[i].array[1]=123;
+  a[j].array[2]=2;
+  return a[i].array[1];
+}
+/* { dg-final { scan-tree-dump-times "return 123" 1 "fre1"} } */