diff mbox series

tree-optimization/105517 - avoid offset truncation during VN

Message ID 20220509114214.C89E113AA5@imap2.suse-dmz.suse.de
State New
Headers show
Series tree-optimization/105517 - avoid offset truncation during VN | expand

Commit Message

Richard Biener May 9, 2022, 11:42 a.m. UTC
When value-numbering an address expression like
&p_74(D)->a1x[4294967295].a1; we are accumulating the byte offset
in an 64bit integer.  When later exploiting the duality between
that and a POINTER_PLUS_EXPR we should avoid truncating that
offset to fit in the target specific sizetype.  While such
overflows are generally undefined behavior, exploiting this
may leads to spurious missing diagnostics.

Bootstrapped and tested on x86_64-unknown-linux-gnu, pushed.

2022-05-09  Richard Biener  <rguenther@suse.de>

	PR tree-optimization/105517
	* tree-ssa-sccvn.cc (vn_reference_lookup): Make sure the accumulated
	offset can be represented in the POINTER_PLUS_EXPR IL.
	(vn_reference_insert): Likewise.
	* poly-int.h (sext_hwi): Add poly version of sext_hwi.
---
 gcc/poly-int.h        | 13 +++++++++++++
 gcc/tree-ssa-sccvn.cc | 14 ++++++++++++--
 2 files changed, 25 insertions(+), 2 deletions(-)
diff mbox series

Patch

diff --git a/gcc/poly-int.h b/gcc/poly-int.h
index 2bf9d98599f..d085544a57e 100644
--- a/gcc/poly-int.h
+++ b/gcc/poly-int.h
@@ -1178,6 +1178,19 @@  lshift (const poly_int_pod<N, Ca> &a, const Cb &b)
 }
 }
 
+/* Poly version of sext_hwi, with the same interface.  */
+
+template<unsigned int N, typename C>
+inline poly_int<N, HOST_WIDE_INT>
+sext_hwi (const poly_int<N, C> &a, unsigned int precision)
+{
+  poly_int_pod<N, HOST_WIDE_INT> r;
+  for (unsigned int i = 0; i < N; i++)
+    r.coeffs[i] = sext_hwi (a.coeffs[i], precision);
+  return r;
+}
+
+
 /* Return true if a0 + a1 * x might equal b0 + b1 * x for some nonnegative
    integer x.  */
 
diff --git a/gcc/tree-ssa-sccvn.cc b/gcc/tree-ssa-sccvn.cc
index 76587632312..3732d06b0bb 100644
--- a/gcc/tree-ssa-sccvn.cc
+++ b/gcc/tree-ssa-sccvn.cc
@@ -3684,7 +3684,12 @@  vn_reference_lookup (tree op, tree vuse, vn_lookup_kind kind,
 	    break;
 	  off += vro->off;
 	}
-      if (i == operands.length () - 1)
+      if (i == operands.length () - 1
+	  /* Make sure we the offset we accumulated in a 64bit int
+	     fits the address computation carried out in target
+	     offset precision.  */
+	  && (off.coeffs[0]
+	      == sext_hwi (off.coeffs[0], TYPE_PRECISION (sizetype))))
 	{
 	  gcc_assert (operands[i-1].opcode == MEM_REF);
 	  tree ops[2];
@@ -3808,7 +3813,12 @@  vn_reference_insert (tree op, tree result, tree vuse, tree vdef)
 	    break;
 	  off += vro->off;
 	}
-      if (i == operands.length () - 1)
+      if (i == operands.length () - 1
+	  /* Make sure we the offset we accumulated in a 64bit int
+	     fits the address computation carried out in target
+	     offset precision.  */
+	  && (off.coeffs[0]
+	      == sext_hwi (off.coeffs[0], TYPE_PRECISION (sizetype))))
 	{
 	  gcc_assert (operands[i-1].opcode == MEM_REF);
 	  tree ops[2];