===================================================================
@@ -3445,10 +3445,279 @@ perform_intra_sra (void)
return ret;
}
+/* Bitfield access and hashtable support commoning same base and
+ representative. */
+
+struct bfaccess
+{
+ bfaccess (tree r) : ref (r), count (1) {}
+
+ tree ref;
+ unsigned count;
+
+ /* hash_table support */
+ typedef bfaccess value_type;
+ typedef bfaccess compare_type;
+ static inline hashval_t hash (const bfaccess *);
+ static inline int equal (const bfaccess*, const bfaccess *);
+ static inline void remove (bfaccess*);
+};
+
+hashval_t
+bfaccess::hash (const bfaccess *a)
+{
+ return iterative_hash_hashval_t
+ (iterative_hash_expr (TREE_OPERAND (a->ref, 0), 0),
+ DECL_UID (DECL_BIT_FIELD_REPRESENTATIVE (TREE_OPERAND (a->ref, 1))));
+}
+
+int
+bfaccess::equal (const bfaccess *a, const bfaccess *b)
+{
+ return ((DECL_BIT_FIELD_REPRESENTATIVE (TREE_OPERAND (a->ref, 1))
+ == DECL_BIT_FIELD_REPRESENTATIVE (TREE_OPERAND (b->ref, 1)))
+ && operand_equal_p (TREE_OPERAND (a->ref, 0),
+ TREE_OPERAND (b->ref, 0), 0));
+}
+
+void
+bfaccess::remove (bfaccess *a)
+{
+ delete a;
+}
+
+/* Return whether REF is a bitfield access the bit offset of the bitfield
+ within the representative in *OFF if that is not NULL. */
+
+static bool
+bitfield_access_p (tree ref, unsigned HOST_WIDE_INT *off)
+{
+ if (TREE_CODE (ref) != COMPONENT_REF)
+ return false;
+
+ tree field = TREE_OPERAND (ref, 1);
+ if (!DECL_BIT_FIELD_TYPE (field))
+ return false;
+
+ tree rep = DECL_BIT_FIELD_REPRESENTATIVE (field);
+ if (!rep)
+ return false;
+
+ if (!off)
+ return true;
+
+ if (host_integerp (DECL_FIELD_OFFSET (field), 1)
+ && host_integerp (DECL_FIELD_OFFSET (rep), 1))
+ *off = (tree_low_cst (DECL_FIELD_OFFSET (field), 1)
+ - tree_low_cst (DECL_FIELD_OFFSET (rep), 1)) * BITS_PER_UNIT;
+ else
+ *off = 0;
+ *off += (tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 1)
+ - tree_low_cst (DECL_FIELD_BIT_OFFSET (rep), 1));
+
+ return true;
+}
+
+
+/* Lower the bitfield read at *GSI, the offset of the bitfield
+ relative to the bitfield representative is OFF bits. */
+
+static void
+lower_bitfield_read (gimple_stmt_iterator *gsi, unsigned HOST_WIDE_INT off)
+{
+ gimple stmt = gsi_stmt (*gsi);
+ tree ref = gimple_assign_rhs1 (stmt);
+ tree field = TREE_OPERAND (ref, 1);
+ tree rep = DECL_BIT_FIELD_REPRESENTATIVE (field);
+
+ tree loadres = make_ssa_name (TREE_TYPE (rep), NULL);
+ gimple load
+ = gimple_build_assign (loadres,
+ build3 (COMPONENT_REF, TREE_TYPE (rep),
+ TREE_OPERAND (ref, 0), rep,
+ NULL_TREE));
+ gimple_set_vuse (load, gimple_vuse (stmt));
+ gsi_insert_before (gsi, load, GSI_SAME_STMT);
+ gimple_assign_set_rhs1 (stmt,
+ build3 (BIT_FIELD_REF, TREE_TYPE (ref),
+ loadres,
+ DECL_SIZE (field),
+ bitsize_int (off)));
+ update_stmt (stmt);
+}
+
+/* Lower the bitfield write at *GSI, the offset of the bitfield
+ relative to the bitfield representative is OFF bits. */
+
+static void
+lower_bitfield_write (gimple_stmt_iterator *gsi, unsigned HOST_WIDE_INT off)
+{
+ gimple stmt = gsi_stmt (*gsi);
+ tree ref = gimple_assign_lhs (stmt);
+ tree field = TREE_OPERAND (ref, 1);
+ tree rep = DECL_BIT_FIELD_REPRESENTATIVE (field);
+
+ tree loadres = make_ssa_name (TREE_TYPE (rep), NULL);
+ gimple load
+ = gimple_build_assign (loadres,
+ build3 (COMPONENT_REF, TREE_TYPE (rep),
+ unshare_expr
+ (TREE_OPERAND (ref, 0)),
+ rep,
+ NULL_TREE));
+ gimple_set_vuse (load, gimple_vuse (stmt));
+ gsi_insert_before (gsi, load, GSI_SAME_STMT);
+ /* FIXME: BIT_FIELD_EXPR. */
+ /* Mask out bits. */
+ tree masked = make_ssa_name (TREE_TYPE (rep), NULL);
+ tree mask
+ = double_int_to_tree (TREE_TYPE (rep),
+ ~double_int::mask
+ (TREE_INT_CST_LOW (DECL_SIZE (field)))
+ .lshift (off));
+ gimple tems
+ = gimple_build_assign_with_ops (BIT_AND_EXPR,
+ masked, loadres, mask);
+ gsi_insert_before (gsi, tems, GSI_SAME_STMT);
+ /* Zero-extend the value to representative size. */
+ tree tem2;
+ if (!TYPE_UNSIGNED (TREE_TYPE (field)))
+ {
+ tem2 = make_ssa_name (unsigned_type_for (TREE_TYPE (field)),
+ NULL);
+ tems = gimple_build_assign_with_ops (NOP_EXPR, tem2,
+ gimple_assign_rhs1 (stmt),
+ NULL_TREE);
+ gsi_insert_before (gsi, tems, GSI_SAME_STMT);
+ }
+ else
+ tem2 = gimple_assign_rhs1 (stmt);
+ tree tem = make_ssa_name (TREE_TYPE (rep), NULL);
+ tems = gimple_build_assign_with_ops (NOP_EXPR, tem,
+ tem2, NULL_TREE);
+ gsi_insert_before (gsi, tems, GSI_SAME_STMT);
+ /* Shift the value into place. */
+ if (off != 0)
+ {
+ tem2 = make_ssa_name (TREE_TYPE (rep), NULL);
+ tems = gimple_build_assign_with_ops (LSHIFT_EXPR, tem2, tem,
+ size_int (off));
+ gsi_insert_before (gsi, tems, GSI_SAME_STMT);
+ }
+ else
+ tem2 = tem;
+ /* Merge masked loaded value and value. */
+ tree modres = make_ssa_name (TREE_TYPE (rep), NULL);
+ gimple mod
+ = gimple_build_assign_with_ops (BIT_IOR_EXPR, modres,
+ masked, tem2);
+ gsi_insert_before (gsi, mod, GSI_SAME_STMT);
+ /* Finally adjust the store. */
+ gimple_assign_set_rhs1 (stmt, modres);
+ gimple_assign_set_lhs (stmt,
+ build3 (COMPONENT_REF, TREE_TYPE (rep),
+ TREE_OPERAND (ref, 0), rep,
+ NULL_TREE));
+ update_stmt (stmt);
+}
+
+/* Lower bitfield accesses to accesses of their
+ DECL_BIT_FIELD_REPRESENTATIVE. */
+
+static void
+lower_bitfields (bool all)
+{
+ basic_block bb;
+
+ hash_table <bfaccess> bf;
+ bf.create (1);
+
+ FOR_EACH_BB (bb)
+ {
+ bool any = false;
+ bf.empty ();
+
+ /* We do two passes, the first one identifying interesting
+ bitfield accesses and the second one actually lowering them. */
+ if (!all)
+ for (gimple_stmt_iterator gsi = gsi_start_bb (bb);
+ !gsi_end_p (gsi); gsi_next (&gsi))
+ {
+ gimple stmt = gsi_stmt (gsi);
+ if (!gimple_assign_single_p (stmt)
+ || gimple_has_volatile_ops (stmt))
+ continue;
+
+ tree ref = gimple_assign_rhs1 (stmt);
+ if (bitfield_access_p (ref, NULL))
+ {
+ bfaccess a(ref);
+ bfaccess **slot = bf.find_slot (&a, INSERT);
+ if (*slot)
+ (*slot)->count++;
+ else
+ *slot = new bfaccess(a);
+ if ((*slot)->count > 1)
+ any = true;
+ }
+
+ ref = gimple_assign_lhs (stmt);
+ if (bitfield_access_p (ref, NULL))
+ {
+ bfaccess a(ref);
+ bfaccess **slot = bf.find_slot (&a, INSERT);
+ if (*slot)
+ (*slot)->count++;
+ else
+ *slot = new bfaccess(a);
+ if ((*slot)->count > 1)
+ any = true;
+ }
+ }
+
+ if (!all && !any)
+ continue;
+
+ for (gimple_stmt_iterator gsi = gsi_start_bb (bb);
+ !gsi_end_p (gsi); gsi_next (&gsi))
+ {
+ gimple stmt = gsi_stmt (gsi);
+ if (!gimple_assign_single_p (stmt)
+ || gimple_has_volatile_ops (stmt))
+ continue;
+
+ tree ref;
+ unsigned HOST_WIDE_INT off;
+
+ /* Lower a bitfield read. */
+ ref = gimple_assign_rhs1 (stmt);
+ if (bitfield_access_p (ref, &off))
+ {
+ bfaccess a(ref);
+ bfaccess *aa = bf.find (&a);
+ if (all || (aa->count > 1))
+ lower_bitfield_read (&gsi, off);
+ }
+ /* Lower a bitfield write to a read-modify-write cycle. */
+ ref = gimple_assign_lhs (stmt);
+ if (bitfield_access_p (ref, &off))
+ {
+ bfaccess a(ref);
+ bfaccess *aa = bf.find (&a);
+ if (all || (aa->count > 1))
+ lower_bitfield_write (&gsi, off);
+ }
+ }
+ }
+
+ bf.dispose ();
+}
+
/* Perform early intraprocedural SRA. */
static unsigned int
early_intra_sra (void)
{
+ lower_bitfields (false);
sra_mode = SRA_MODE_EARLY_INTRA;
return perform_intra_sra ();
}