@@ -1178,6 +1178,19 @@ lshift (const poly_int_pod<N, Ca> &a, const Cb &b)
}
}
+/* Poly version of sext_hwi, with the same interface. */
+
+template<unsigned int N, typename C>
+inline poly_int<N, HOST_WIDE_INT>
+sext_hwi (const poly_int<N, C> &a, unsigned int precision)
+{
+ poly_int_pod<N, HOST_WIDE_INT> r;
+ for (unsigned int i = 0; i < N; i++)
+ r.coeffs[i] = sext_hwi (a.coeffs[i], precision);
+ return r;
+}
+
+
/* Return true if a0 + a1 * x might equal b0 + b1 * x for some nonnegative
integer x. */
@@ -3684,7 +3684,12 @@ vn_reference_lookup (tree op, tree vuse, vn_lookup_kind kind,
break;
off += vro->off;
}
- if (i == operands.length () - 1)
+ if (i == operands.length () - 1
+ /* Make sure we the offset we accumulated in a 64bit int
+ fits the address computation carried out in target
+ offset precision. */
+ && (off.coeffs[0]
+ == sext_hwi (off.coeffs[0], TYPE_PRECISION (sizetype))))
{
gcc_assert (operands[i-1].opcode == MEM_REF);
tree ops[2];
@@ -3808,7 +3813,12 @@ vn_reference_insert (tree op, tree result, tree vuse, tree vdef)
break;
off += vro->off;
}
- if (i == operands.length () - 1)
+ if (i == operands.length () - 1
+ /* Make sure we the offset we accumulated in a 64bit int
+ fits the address computation carried out in target
+ offset precision. */
+ && (off.coeffs[0]
+ == sext_hwi (off.coeffs[0], TYPE_PRECISION (sizetype))))
{
gcc_assert (operands[i-1].opcode == MEM_REF);
tree ops[2];