diff mbox series

tree-optimization/113895 - copy_reference_ops_from_ref vs. bitfields

Message ID 20240213124207.19C343858299@sourceware.org
State New
Headers show
Series tree-optimization/113895 - copy_reference_ops_from_ref vs. bitfields | expand

Commit Message

Richard Biener Feb. 13, 2024, 12:41 p.m. UTC
The recent enhancement to discover constant array indices by range
info used by get_ref_base_and_extent doesn't work when the outermost
component reference is to a bitfield because we track the running
offset in the reference ops as bytes.  The following does as
ao_ref_init_from_vn_reference and recovers that manually, tracking
the offset for the purpose of discovering the constant array index
in bits instead.

Bootstrapped and tested on x86_64-unknown-linux-gnu, pushed.

	PR tree-optimization/113895
	* tree-ssa-sccvn.cc (copy_reference_ops_from_ref): Track
	offset to discover constant array indices in bits, handle
	COMPONENT_REF to bitfields.

	* gcc.dg/torture/pr113895-1.c: New testcase.
---
 gcc/testsuite/gcc.dg/torture/pr113895-1.c | 16 ++++++++++++++
 gcc/tree-ssa-sccvn.cc                     | 26 ++++++++++++++++++++---
 2 files changed, 39 insertions(+), 3 deletions(-)
 create mode 100644 gcc/testsuite/gcc.dg/torture/pr113895-1.c
diff mbox series

Patch

diff --git a/gcc/testsuite/gcc.dg/torture/pr113895-1.c b/gcc/testsuite/gcc.dg/torture/pr113895-1.c
new file mode 100644
index 00000000000..e96cb2f33e1
--- /dev/null
+++ b/gcc/testsuite/gcc.dg/torture/pr113895-1.c
@@ -0,0 +1,16 @@ 
+/* { dg-do compile } */
+
+int main_i;
+void transparent_crc(int);
+#pragma pack(1)
+struct {
+  signed : 17;
+  signed : 6;
+  unsigned : 13;
+  unsigned f6 : 12;
+} g_20[1];
+int main()
+{
+  transparent_crc(g_20[main_i].f6);
+  return 0;
+}
diff --git a/gcc/tree-ssa-sccvn.cc b/gcc/tree-ssa-sccvn.cc
index 95670ae2ed6..d6b8c734e7b 100644
--- a/gcc/tree-ssa-sccvn.cc
+++ b/gcc/tree-ssa-sccvn.cc
@@ -1119,14 +1119,14 @@  copy_reference_ops_from_ref (tree ref, vec<vn_reference_op_s> *result)
 	      unsigned HOST_WIDE_INT elsz
 		= tree_to_uhwi (op.op2) * vn_ref_op_align_unit (&op);
 	      unsigned HOST_WIDE_INT idx
-		= (coffset / BITS_PER_UNIT - off.to_constant ()) / elsz;
+		= (coffset - off.to_constant ()) / BITS_PER_UNIT / elsz;
 	      if (idx == 0)
 		op.op0 = op.op1;
 	      else
 		op.op0 = wide_int_to_tree (TREE_TYPE (op.op0),
 					   wi::to_poly_wide (op.op1) + idx);
 	      op.off = idx * elsz;
-	      off += op.off;
+	      off += op.off * BITS_PER_UNIT;
 	    }
 	  else
 	    {
@@ -1140,10 +1140,30 @@  copy_reference_ops_from_ref (tree ref, vec<vn_reference_op_s> *result)
 		       || TREE_CODE_CLASS (op.opcode) == tcc_constant)
 		/* end-of ref.  */
 		gcc_assert (i == result->length ());
+	      else if (op.opcode == COMPONENT_REF)
+		{
+		  /* op.off is tracked in bytes, re-do it manually
+		     because of bitfields.  */
+		  tree field = op.op0;
+		  /* We do not have a complete COMPONENT_REF tree here so we
+		     cannot use component_ref_field_offset.  Do the interesting
+		     parts manually.  */
+		  tree this_offset = DECL_FIELD_OFFSET (field);
+		  if (op.op1 || !poly_int_tree_p (this_offset))
+		    gcc_unreachable ();
+		  else
+		    {
+		      poly_offset_int woffset
+			= (wi::to_poly_offset (this_offset)
+			   << LOG2_BITS_PER_UNIT);
+		      woffset += wi::to_offset (DECL_FIELD_BIT_OFFSET (field));
+		      off += woffset.force_shwi ();
+		    }
+		}
 	      else
 		{
 		  gcc_assert (known_ne (op.off, -1));
-		  off += op.off;
+		  off += op.off * BITS_PER_UNIT;
 		}
 	    }
 	}