===================================================================
@@ -312,12 +312,13 @@ struct visibility_flags visibility_optio
static tree c_fully_fold_internal (tree expr, bool, bool *, bool *);
static tree check_case_value (tree);
static bool check_case_bounds (tree, tree, tree *, tree *);
static tree handle_packed_attribute (tree *, tree, tree, int, bool *);
+static tree handle_bitorder_attribute (tree *, tree, tree, int, bool *);
static tree handle_nocommon_attribute (tree *, tree, tree, int, bool *);
static tree handle_common_attribute (tree *, tree, tree, int, bool *);
static tree handle_noreturn_attribute (tree *, tree, tree, int, bool *);
static tree handle_hot_attribute (tree *, tree, tree, int, bool *);
static tree handle_cold_attribute (tree *, tree, tree, int, bool *);
static tree handle_noinline_attribute (tree *, tree, tree, int, bool *);
@@ -589,12 +590,14 @@ const unsigned int num_c_common_reswords
const struct attribute_spec c_common_attribute_table[] =
{
/* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler,
affects_type_identity } */
{ "packed", 0, 0, false, false, false,
handle_packed_attribute , false},
+ { "bitorder", 0, 1, false, true, false,
+ handle_bitorder_attribute , false},
{ "nocommon", 0, 0, true, false, false,
handle_nocommon_attribute, false},
{ "common", 0, 0, true, false, false,
handle_common_attribute, false },
/* FIXME: logically, noreturn attributes should be listed as
"false, true, true" and apply to function types. But implementing this
@@ -5764,12 +5767,42 @@ handle_packed_attribute (tree *node, tre
*no_add_attrs = true;
}
return NULL_TREE;
}
+/* Handle a "bitorder" attribute; arguments as in
+ struct attribute_spec.handler. */
+
+static tree
+handle_bitorder_attribute (tree *ARG_UNUSED (node), tree ARG_UNUSED (name),
+ tree ARG_UNUSED (args),
+ int ARG_UNUSED (flags), bool *no_add_attrs)
+{
+ tree bmode;
+ const char *bname;
+
+ /* Allow no arguments to mean "native". */
+ if (args == NULL_TREE)
+ return NULL_TREE;
+
+ bmode = TREE_VALUE (args);
+
+ bname = IDENTIFIER_POINTER (bmode);
+ if (strcmp (bname, "msb")
+ && strcmp (bname, "lsb")
+ && strcmp (bname, "swapped")
+ && strcmp (bname, "native"))
+ {
+ error ("%qE is not a valid bitorder - use lsb, msb, native, or swapped", bmode);
+ *no_add_attrs = true;
+ }
+
+ return NULL_TREE;
+}
+
/* Handle a "nocommon" attribute; arguments as in
struct attribute_spec.handler. */
static tree
handle_nocommon_attribute (tree *node, tree name,
tree ARG_UNUSED (args),
===================================================================
@@ -1716,24 +1716,82 @@ finalize_type_size (tree type)
TYPE_ALIGN (variant) = align;
TYPE_USER_ALIGN (variant) = user_align;
SET_TYPE_MODE (variant, mode);
}
}
}
+
+static void
+reverse_bitfield_layout (record_layout_info rli)
+{
+ tree field, oldtype;
+ for (field = TYPE_FIELDS (rli->t); field; field = TREE_CHAIN (field))
+ {
+ tree type = TREE_TYPE (field);
+ if (TREE_CODE (field) != FIELD_DECL)
+ continue;
+ if (TREE_CODE (field) == ERROR_MARK || TREE_CODE (type) == ERROR_MARK)
+ return;
+ oldtype = TREE_TYPE (DECL_FIELD_BIT_OFFSET (field));
+ DECL_FIELD_BIT_OFFSET (field)
+ = size_binop (MINUS_EXPR,
+ size_binop (MINUS_EXPR, TYPE_SIZE (type),
+ DECL_SIZE (field)),
+ DECL_FIELD_BIT_OFFSET (field));
+ TREE_TYPE (DECL_FIELD_BIT_OFFSET (field)) = oldtype;
+ }
+}
+
+static int
+reverse_bitfields_p (record_layout_info rli)
+{
+ tree st, arg;
+ const char *mode;
+
+ st = rli->t;
+
+ arg = lookup_attribute ("bitorder", TYPE_ATTRIBUTES (st));
+
+ if (!arg)
+ return 0;
+ arg = TREE_VALUE (TREE_VALUE (arg));
+ if (!arg)
+ return 0;
+
+ mode = IDENTIFIER_POINTER (arg);
+
+ if (strcmp (mode, "swapped") == 0)
+ return 1;
+ if (BYTES_BIG_ENDIAN)
+ {
+ if (strcmp (mode, "lsb") == 0)
+ return 1;
+ }
+ else
+ {
+ if (strcmp (mode, "msb") == 0)
+ return 1;
+ }
+
+ return 0;
+}
/* Do all of the work required to layout the type indicated by RLI,
once the fields have been laid out. This function will call `free'
for RLI, unless FREE_P is false. Passing a value other than false
for FREE_P is bad practice; this option only exists to support the
G++ 3.2 ABI. */
void
finish_record_layout (record_layout_info rli, int free_p)
{
tree variant;
+ if (reverse_bitfields_p (rli))
+ reverse_bitfield_layout (rli);
+
/* Compute the final size. */
finalize_record_size (rli);
/* Compute the TYPE_MODE for the record. */
compute_record_mode (rli->t);
===================================================================
@@ -4999,18 +4999,42 @@ output_constructor_bitfield (oc_local_st
}
/* Subroutine of output_constant, used for CONSTRUCTORs (aggregate constants).
Generate at least SIZE bytes, padding if necessary. OUTER designates the
caller output state of relevance in recursive invocations. */
+typedef struct {
+ unsigned HOST_WIDE_INT cnt;
+ tree val;
+ tree index;
+ tree field;
+ int what_to_do;
+} constructor_field_list;
+
+#define WHAT_ARRAY 1
+#define WHAT_REGULAR 2
+#define WHAT_BITFIELD 3
+
+static int
+constructor_field_sort (const void *va, const void *vb)
+{
+ const constructor_field_list *a = (const constructor_field_list *)va;
+ const constructor_field_list *b = (const constructor_field_list *)vb;
+ if (a->what_to_do != WHAT_BITFIELD || b->what_to_do != WHAT_BITFIELD)
+ return a->cnt - b->cnt;
+ return int_bit_position (a->field) - int_bit_position (b->field);
+}
+
static unsigned HOST_WIDE_INT
output_constructor (tree exp, unsigned HOST_WIDE_INT size,
unsigned int align, oc_outer_state * outer)
{
unsigned HOST_WIDE_INT cnt;
constructor_elt *ce;
+ constructor_field_list *constructor_fields;
+ unsigned HOST_WIDE_INT constructor_field_count;
oc_local_state local;
/* Setup our local state to communicate with helpers. */
local.exp = exp;
local.size = size;
@@ -5041,12 +5065,15 @@ output_constructor (tree exp, unsigned H
more one). */
local.field = NULL_TREE;
if (TREE_CODE (local.type) == RECORD_TYPE)
local.field = TYPE_FIELDS (local.type);
+ constructor_field_count = VEC_length (constructor_elt, CONSTRUCTOR_ELTS (exp));
+ constructor_fields = XNEWVEC (constructor_field_list, constructor_field_count);
+
for (cnt = 0;
VEC_iterate (constructor_elt, CONSTRUCTOR_ELTS (exp), cnt, ce);
cnt++, local.field = local.field ? DECL_CHAIN (local.field) : 0)
{
local.val = ce->value;
local.index = NULL_TREE;
@@ -5072,32 +5099,64 @@ output_constructor (tree exp, unsigned H
#endif
/* Eliminate the marker that makes a cast not be an lvalue. */
if (local.val != NULL_TREE)
STRIP_NOPS (local.val);
- /* Output the current element, using the appropriate helper ... */
+ constructor_fields[cnt].cnt = cnt;
+ constructor_fields[cnt].val = local.val;
+ constructor_fields[cnt].index = local.index;
+ constructor_fields[cnt].field = local.field;
/* For an array slice not part of an outer bitfield. */
if (!outer
&& local.index != NULL_TREE
&& TREE_CODE (local.index) == RANGE_EXPR)
- output_constructor_array_range (&local);
+ constructor_fields[cnt].what_to_do = WHAT_ARRAY;
/* For a field that is neither a true bitfield nor part of an outer one,
known to be at least byte aligned and multiple-of-bytes long. */
else if (!outer
&& (local.field == NULL_TREE
|| !CONSTRUCTOR_BITFIELD_P (local.field)))
- output_constructor_regular_field (&local);
+ constructor_fields[cnt].what_to_do = WHAT_REGULAR;
/* For a true bitfield or part of an outer one. */
else
- output_constructor_bitfield (&local, outer);
+ constructor_fields[cnt].what_to_do = WHAT_BITFIELD;
+
}
+ qsort (constructor_fields, constructor_field_count,
+ sizeof(constructor_field_list), constructor_field_sort);
+
+ for (cnt = 0;
+ cnt < constructor_field_count;
+ cnt++)
+ {
+ /* Output the current element, using the appropriate helper ... */
+ local.val = constructor_fields[cnt].val;
+ local.index = constructor_fields[cnt].index;
+ local.field = constructor_fields[cnt].field;
+
+ switch (constructor_fields[cnt].what_to_do)
+ {
+ case WHAT_ARRAY:
+ output_constructor_array_range (&local);
+ break;
+ case WHAT_REGULAR:
+ output_constructor_regular_field (&local);
+ break;
+ case WHAT_BITFIELD:
+ output_constructor_bitfield (&local, outer);
+ break;
+ }
+ }
+
+ XDELETEVEC (constructor_fields);
+
/* If we are not at toplevel, save the pending data for our caller.
Otherwise output the pending data and padding zeros as needed. */
if (outer)
outer->byte = local.byte;
else
{