@@ -252,6 +252,7 @@ make_edges (basic_block min, basic_block max, int update_p)
if (code == JUMP_INSN)
{
rtx tmp;
+ rtx_jump_table_data *table;
/* Recognize a non-local goto as a branch outside the
current function. */
@@ -259,15 +260,15 @@ make_edges (basic_block min, basic_block max, int update_p)
;
/* Recognize a tablejump and do the right thing. */
- else if (tablejump_p (insn, NULL, &tmp))
+ else if (tablejump_p (insn, NULL, &table))
{
rtvec vec;
int j;
- if (GET_CODE (PATTERN (tmp)) == ADDR_VEC)
- vec = XVEC (PATTERN (tmp), 0);
+ if (GET_CODE (PATTERN (table)) == ADDR_VEC)
+ vec = XVEC (PATTERN (table), 0);
else
- vec = XVEC (PATTERN (tmp), 1);
+ vec = XVEC (PATTERN (table), 1);
for (j = GET_NUM_ELEM (vec) - 1; j >= 0; --j)
make_label_edge (edge_cache, bb,
@@ -444,7 +445,7 @@ find_bb_boundaries (basic_block bb)
basic_block orig_bb = bb;
rtx insn = BB_HEAD (bb);
rtx end = BB_END (bb), x;
- rtx table;
+ rtx_jump_table_data *table;
rtx flow_transfer_insn = NULL_RTX;
edge fallthru = NULL;
@@ -704,7 +704,8 @@ static void
merge_blocks_move_successor_nojumps (basic_block a, basic_block b)
{
rtx barrier, real_b_end;
- rtx label, table;
+ rtx label;
+ rtx_jump_table_data *table;
/* If we are partitioning hot/cold basic blocks, we don't want to
mess up unconditional or indirect jumps that cross between hot
@@ -1675,7 +1676,7 @@ outgoing_edges_match (int mode, basic_block bb1, basic_block bb2)
Return true if they are identical. */
{
rtx label1, label2;
- rtx table1, table2;
+ rtx_jump_table_data *table1, *table2;
if (tablejump_p (BB_END (bb1), &label1, &table1)
&& tablejump_p (BB_END (bb2), &label2, &table2)
@@ -1978,7 +1979,7 @@ try_crossjump_to_edge (int mode, edge e1, edge e2,
so replace the references to TABLE1 by references to TABLE2. */
{
rtx label1, label2;
- rtx table1, table2;
+ rtx_jump_table_data *table1, *table2;
if (tablejump_p (BB_END (osrc1), &label1, &table1)
&& tablejump_p (BB_END (osrc2), &label2, &table2)
@@ -1100,7 +1100,8 @@ try_redirect_by_replacing_jump (edge e, basic_block target, bool in_cfglayout)
else
{
rtx target_label = block_label (target);
- rtx barrier, label, table;
+ rtx barrier, label;
+ rtx_jump_table_data *table;
emit_jump_insn_after_noloc (gen_jump (target_label), insn);
JUMP_LABEL (BB_END (src)) = target_label;
@@ -1173,9 +1174,10 @@ try_redirect_by_replacing_jump (edge e, basic_block target, bool in_cfglayout)
static bool
patch_jump_insn (rtx insn, rtx old_label, basic_block new_bb)
{
+ rtx_jump_table_data *table;
rtx tmp;
/* Recognize a tablejump and adjust all matching cases. */
- if (tablejump_p (insn, NULL, &tmp))
+ if (tablejump_p (insn, NULL, &table))
{
rtvec vec;
int j;
@@ -1183,10 +1185,10 @@ patch_jump_insn (rtx insn, rtx old_label, basic_block new_bb)
if (new_bb == EXIT_BLOCK_PTR_FOR_FN (cfun))
return false;
- if (GET_CODE (PATTERN (tmp)) == ADDR_VEC)
- vec = XVEC (PATTERN (tmp), 0);
+ if (GET_CODE (PATTERN (table)) == ADDR_VEC)
+ vec = XVEC (PATTERN (table), 0);
else
- vec = XVEC (PATTERN (tmp), 1);
+ vec = XVEC (PATTERN (table), 1);
for (j = GET_NUM_ELEM (vec) - 1; j >= 0; --j)
if (XEXP (RTVEC_ELT (vec, j), 0) == old_label)
@@ -1608,7 +1610,10 @@ force_nonfallthru_and_redirect (edge e, basic_block target, rtx jump_label)
/* If the old block ended with a tablejump, skip its table
by searching forward from there. Otherwise start searching
forward from the last instruction of the old block. */
- if (!tablejump_p (BB_END (e->src), NULL, ¬e))
+ rtx_jump_table_data *table;
+ if (tablejump_p (BB_END (e->src), NULL, &table))
+ note = table;
+ else
note = BB_END (e->src);
note = NEXT_INSN (note);
@@ -2235,12 +2240,13 @@ update_br_prob_note (basic_block bb)
rtx
get_last_bb_insn (basic_block bb)
{
+ rtx_jump_table_data *table;
rtx tmp;
rtx end = BB_END (bb);
/* Include any jump table following the basic block. */
- if (tablejump_p (end, NULL, &tmp))
- end = tmp;
+ if (tablejump_p (end, NULL, &table))
+ end = table;
/* Include any barriers that may follow the basic block. */
tmp = next_nonnote_insn_bb (end);
@@ -106,7 +106,7 @@ static const char *output_multi_immediate (rtx *, const char *, const char *,
static const char *shift_op (rtx, HOST_WIDE_INT *);
static struct machine_function *arm_init_machine_status (void);
static void thumb_exit (FILE *, int);
-static HOST_WIDE_INT get_jump_table_size (rtx);
+static HOST_WIDE_INT get_jump_table_size (rtx_jump_table_data *);
static Mnode *move_minipool_fix_forward_ref (Mnode *, Mnode *, HOST_WIDE_INT);
static Mnode *add_minipool_forward_ref (Mfix *);
static Mnode *move_minipool_fix_backward_ref (Mnode *, Mnode *, HOST_WIDE_INT);
@@ -16008,7 +16008,7 @@ Mfix * minipool_barrier;
#endif
static HOST_WIDE_INT
-get_jump_table_size (rtx insn)
+get_jump_table_size (rtx_jump_table_data *insn)
{
/* ADDR_VECs only take room if read-only data does into the text
section. */
@@ -16596,7 +16596,7 @@ create_fix_barrier (Mfix *fix, HOST_WIDE_INT max_address)
while (from && count < max_count)
{
- rtx tmp;
+ rtx_jump_table_data *tmp;
int new_cost;
/* This code shouldn't have been called if there was a natural barrier
@@ -17239,7 +17239,7 @@ arm_reorg (void)
push_minipool_barrier (insn, address);
else if (INSN_P (insn))
{
- rtx table;
+ rtx_jump_table_data *table;
note_invalid_constants (insn, address, true);
address += get_attr_length (insn);
@@ -7072,7 +7072,7 @@ s390_chunkify_start (void)
for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
{
- rtx table;
+ rtx_jump_table_data *table;
/* Labels marked with LABEL_PRESERVE_P can be target
of non-local jumps, so we have to mark them.
@@ -2104,7 +2104,7 @@ spu_emit_branch_hint (rtx before, rtx branch, rtx target,
rtx branch_label = 0;
rtx hint;
rtx insn;
- rtx table;
+ rtx_jump_table_data *table;
if (before == 0 || branch == 0 || target == 0)
return;
@@ -2291,14 +2291,16 @@ create_trace_edges (rtx insn)
if (JUMP_P (insn))
{
+ rtx_jump_table_data *table;
+
if (find_reg_note (insn, REG_NON_LOCAL_GOTO, NULL_RTX))
return;
- if (tablejump_p (insn, NULL, &tmp))
+ if (tablejump_p (insn, NULL, &table))
{
rtvec vec;
- tmp = PATTERN (tmp);
+ tmp = PATTERN (table);
vec = XVEC (tmp, GET_CODE (tmp) == ADDR_DIFF_VEC);
n = GET_NUM_ELEM (vec);
@@ -1285,7 +1285,8 @@ delete_related_insns (rtx insn)
if (jump_to_label_p (insn))
{
- rtx lab = JUMP_LABEL (insn), lab_next;
+ rtx lab = JUMP_LABEL (insn);
+ rtx_jump_table_data *lab_next;
if (LABEL_NUSES (lab) == 0)
/* This can delete NEXT or PREV,
@@ -2576,7 +2576,7 @@ extern int inequality_comparisons_p (const_rtx);
extern rtx replace_rtx (rtx, rtx, rtx);
extern int replace_label (rtx *, void *);
extern int rtx_referenced_p (rtx, rtx);
-extern bool tablejump_p (const_rtx, rtx *, rtx *);
+extern bool tablejump_p (const_rtx, rtx *, rtx_jump_table_data **);
extern int computed_jump_p (const_rtx);
typedef int (*rtx_function) (rtx *, void *);
@@ -2778,7 +2778,7 @@ rtx_referenced_p (rtx x, rtx body)
*LABELP and the jump table to *TABLEP. LABELP and TABLEP may be NULL. */
bool
-tablejump_p (const_rtx insn, rtx *labelp, rtx *tablep)
+tablejump_p (const_rtx insn, rtx *labelp, rtx_jump_table_data **tablep)
{
rtx label, table;
@@ -2793,7 +2793,7 @@ tablejump_p (const_rtx insn, rtx *labelp, rtx *tablep)
if (labelp)
*labelp = label;
if (tablep)
- *tablep = table;
+ *tablep = as_a <rtx_jump_table_data *> (table);
return true;
}
return false;
@@ -3794,14 +3794,15 @@ bool
label_is_jump_target_p (const_rtx label, const_rtx jump_insn)
{
rtx tmp = JUMP_LABEL (jump_insn);
+ rtx_jump_table_data *table;
if (label == tmp)
return true;
- if (tablejump_p (jump_insn, NULL, &tmp))
+ if (tablejump_p (jump_insn, NULL, &table))
{
- rtvec vec = XVEC (PATTERN (tmp),
- GET_CODE (PATTERN (tmp)) == ADDR_DIFF_VEC);
+ rtvec vec = XVEC (PATTERN (table),
+ GET_CODE (PATTERN (table)) == ADDR_DIFF_VEC);
int i, veclen = GET_NUM_ELEM (vec);
for (i = 0; i < veclen; ++i)