@@ -5833,7 +5833,7 @@ pass_expand::execute (function *fun)
if (var_ret_seq)
{
- rtx after = return_label;
+ rtx_insn *after = return_label;
rtx_insn *next = NEXT_INSN (after);
if (next && NOTE_INSN_BASIC_BLOCK_P (next))
after = next;
@@ -1604,6 +1604,7 @@ force_nonfallthru_and_redirect (edge e, basic_block target, rtx jump_label)
if (EDGE_COUNT (e->src->succs) >= 2 || abnormal_edge_flags || asm_goto_edge)
{
+ rtx_insn *note;
gcov_type count = e->count;
int probability = e->probability;
/* Create the new structures. */
@@ -513,13 +513,13 @@ target_canonicalize_comparison (enum rtx_code *code, rtx *op0, rtx *op1,
reg_stat vector is made larger if the splitter creates a new
register. */
-static rtx
+static rtx_insn *
combine_split_insns (rtx pattern, rtx insn)
{
- rtx ret;
+ rtx_insn *ret;
unsigned int nregs;
- ret = split_insns (pattern, insn);
+ ret = as_a_nullable <rtx_insn *> (split_insns (pattern, insn));
nregs = max_reg_num ();
if (nregs > reg_stat.length ())
reg_stat.safe_grow_cleared (nregs);
@@ -2266,8 +2266,9 @@ likely_spilled_retval_1 (rtx x, const_rtx set, void *data)
static int
likely_spilled_retval_p (rtx_insn *insn)
{
- rtx use = BB_END (this_basic_block);
- rtx reg, p;
+ rtx_insn *use = BB_END (this_basic_block);
+ rtx reg;
+ rtx_insn *p;
unsigned regno, nregs;
/* We assume here that no machine mode needs more than
32 hard registers when the value overlaps with a register
@@ -3305,13 +3306,14 @@ try_combine (rtx_insn *i3, rtx_insn *i2, rtx_insn *i1, rtx_insn *i0,
if (i1 && insn_code_number < 0 && GET_CODE (newpat) == SET
&& asm_noperands (newpat) < 0)
{
- rtx parallel, m_split, *split;
+ rtx parallel, *split;
+ rtx_insn *m_split_insn;
/* See if the MD file can split NEWPAT. If it can't, see if letting it
use I2DEST as a scratch register will help. In the latter case,
convert I2DEST to the mode of the source of NEWPAT if we can. */
- m_split = combine_split_insns (newpat, i3);
+ m_split_insn = combine_split_insns (newpat, i3);
/* We can only use I2DEST as a scratch reg if it doesn't overlap any
inputs of NEWPAT. */
@@ -3320,7 +3322,7 @@ try_combine (rtx_insn *i3, rtx_insn *i2, rtx_insn *i1, rtx_insn *i0,
possible to try that as a scratch reg. This would require adding
more code to make it work though. */
- if (m_split == 0 && ! reg_overlap_mentioned_p (i2dest, newpat))
+ if (m_split_insn == 0 && ! reg_overlap_mentioned_p (i2dest, newpat))
{
enum machine_mode new_mode = GET_MODE (SET_DEST (newpat));
@@ -3330,11 +3332,11 @@ try_combine (rtx_insn *i3, rtx_insn *i2, rtx_insn *i1, rtx_insn *i0,
gen_rtvec (2, newpat,
gen_rtx_CLOBBER (VOIDmode,
i2dest)));
- m_split = combine_split_insns (parallel, i3);
+ m_split_insn = combine_split_insns (parallel, i3);
/* If that didn't work, try changing the mode of I2DEST if
we can. */
- if (m_split == 0
+ if (m_split_insn == 0
&& new_mode != GET_MODE (i2dest)
&& new_mode != VOIDmode
&& can_change_dest_mode (i2dest, added_sets_2, new_mode))
@@ -3355,9 +3357,9 @@ try_combine (rtx_insn *i3, rtx_insn *i2, rtx_insn *i1, rtx_insn *i0,
gen_rtvec (2, newpat,
gen_rtx_CLOBBER (VOIDmode,
ni2dest))));
- m_split = combine_split_insns (parallel, i3);
+ m_split_insn = combine_split_insns (parallel, i3);
- if (m_split == 0
+ if (m_split_insn == 0
&& REGNO (i2dest) >= FIRST_PSEUDO_REGISTER)
{
struct undo *buf;
@@ -3370,34 +3372,34 @@ try_combine (rtx_insn *i3, rtx_insn *i2, rtx_insn *i1, rtx_insn *i0,
}
}
- i2scratch = m_split != 0;
+ i2scratch = m_split_insn != 0;
}
/* If recog_for_combine has discarded clobbers, try to use them
again for the split. */
- if (m_split == 0 && newpat_vec_with_clobbers)
+ if (m_split_insn == 0 && newpat_vec_with_clobbers)
{
parallel = gen_rtx_PARALLEL (VOIDmode, newpat_vec_with_clobbers);
- m_split = combine_split_insns (parallel, i3);
+ m_split_insn = combine_split_insns (parallel, i3);
}
- if (m_split && NEXT_INSN (m_split) == NULL_RTX)
+ if (m_split_insn && NEXT_INSN (m_split_insn) == NULL_RTX)
{
- m_split = PATTERN (m_split);
- insn_code_number = recog_for_combine (&m_split, i3, &new_i3_notes);
+ rtx m_split_pat = PATTERN (m_split_insn);
+ insn_code_number = recog_for_combine (&m_split_pat, i3, &new_i3_notes);
if (insn_code_number >= 0)
- newpat = m_split;
+ newpat = m_split_pat;
}
- else if (m_split && NEXT_INSN (NEXT_INSN (m_split)) == NULL_RTX
+ else if (m_split_insn && NEXT_INSN (NEXT_INSN (m_split_insn)) == NULL_RTX
&& (next_nonnote_nondebug_insn (i2) == i3
- || ! use_crosses_set_p (PATTERN (m_split), DF_INSN_LUID (i2))))
+ || ! use_crosses_set_p (PATTERN (m_split_insn), DF_INSN_LUID (i2))))
{
rtx i2set, i3set;
- rtx newi3pat = PATTERN (NEXT_INSN (m_split));
- newi2pat = PATTERN (m_split);
+ rtx newi3pat = PATTERN (NEXT_INSN (m_split_insn));
+ newi2pat = PATTERN (m_split_insn);
- i3set = single_set (NEXT_INSN (m_split));
- i2set = single_set (m_split);
+ i3set = single_set (NEXT_INSN (m_split_insn));
+ i2set = single_set (m_split_insn);
i2_code_number = recog_for_combine (&newi2pat, i2, &new_i2_notes);
@@ -4506,9 +4508,9 @@ find_split_point (rtx *loc, rtx_insn *insn, bool set_src)
MEM_ADDR_SPACE (x)))
{
rtx reg = regno_reg_rtx[FIRST_PSEUDO_REGISTER];
- rtx seq = combine_split_insns (gen_rtx_SET (VOIDmode, reg,
- XEXP (x, 0)),
- subst_insn);
+ rtx_insn *seq = combine_split_insns (gen_rtx_SET (VOIDmode, reg,
+ XEXP (x, 0)),
+ subst_insn);
/* This should have produced two insns, each of which sets our
placeholder. If the source of the second is a valid address,
@@ -2664,8 +2664,8 @@ spu_machine_dependent_reorg (void)
label because GCC expects it at the beginning of the block. */
rtx unspec = SET_SRC (XVECEXP (PATTERN (insn), 0, 0));
rtx label_ref = XVECEXP (unspec, 0, 0);
- rtx label = XEXP (label_ref, 0);
- rtx branch;
+ rtx_insn *label = as_a <rtx_insn *> (XEXP (label_ref, 0));
+ rtx_insn *branch;
int offset = 0;
for (branch = NEXT_INSN (label);
!JUMP_P (branch) && !CALL_P (branch);
@@ -812,7 +812,7 @@ free_store_info (insn_info_t insn_info)
typedef struct
{
- rtx first, current;
+ rtx_insn *first, *current;
regset fixed_regs_live;
bool failure;
} note_add_store_info;
@@ -823,7 +823,7 @@ typedef struct
static void
note_add_store (rtx loc, const_rtx expr ATTRIBUTE_UNUSED, void *data)
{
- rtx insn;
+ rtx_insn *insn;
note_add_store_info *info = (note_add_store_info *) data;
int r, n;
@@ -864,7 +864,7 @@ emit_inc_dec_insn_before (rtx mem ATTRIBUTE_UNUSED,
rtx dest, rtx src, rtx srcoff, void *arg)
{
insn_info_t insn_info = (insn_info_t) arg;
- rtx insn = insn_info->insn, new_insn, cur;
+ rtx_insn *insn = insn_info->insn, *new_insn, *cur;
note_add_store_info info;
/* We can reuse all operands without copying, because we are about
@@ -877,7 +877,7 @@ emit_inc_dec_insn_before (rtx mem ATTRIBUTE_UNUSED,
end_sequence ();
}
else
- new_insn = gen_move_insn (dest, src);
+ new_insn = as_a <rtx_insn *> (gen_move_insn (dest, src));
info.first = new_insn;
info.fixed_regs_live = insn_info->fixed_regs_live;
info.failure = false;
@@ -1741,7 +1741,8 @@ find_shift_sequence (int access_size,
GET_MODE_BITSIZE (new_mode) <= BITS_PER_WORD;
new_mode = GET_MODE_WIDER_MODE (new_mode))
{
- rtx target, new_reg, shift_seq, insn, new_lhs;
+ rtx target, new_reg, new_lhs;
+ rtx_insn *shift_seq, *insn;
int cost;
/* If a constant was stored into memory, try to simplify it here,
@@ -1961,7 +1962,8 @@ replace_read (store_info_t store_info, insn_info_t store_insn,
{
enum machine_mode store_mode = GET_MODE (store_info->mem);
enum machine_mode read_mode = GET_MODE (read_info->mem);
- rtx insns, this_insn, read_reg;
+ rtx_insn *insns, *this_insn;
+ rtx read_reg;
basic_block bb;
if (!dbg_cnt (dse))
@@ -113,7 +113,7 @@ typedef struct
HOST_WIDE_INT beg_delay_args_size, end_delay_args_size;
/* The first EH insn in the trace, where beg_delay_args_size must be set. */
- rtx eh_head;
+ rtx_insn *eh_head;
/* The following variables contain data used in interpreting frame related
expressions. These are not part of the "real" row state as defined by
@@ -876,7 +876,7 @@ notice_args_size (rtx insn)
data within the trace related to EH insns and args_size. */
static void
-notice_eh_throw (rtx insn)
+notice_eh_throw (rtx_insn *insn)
{
HOST_WIDE_INT args_size;
@@ -2577,10 +2577,10 @@ create_cfi_notes (void)
/* Return the insn before the first NOTE_INSN_CFI after START. */
-static rtx
-before_next_cfi_note (rtx start)
+static rtx_insn *
+before_next_cfi_note (rtx_insn *start)
{
- rtx prev = start;
+ rtx_insn *prev = start;
while (start)
{
if (NOTE_P (start) && NOTE_KIND (start) == NOTE_INSN_CFI)
@@ -2675,7 +2675,7 @@ connect_traces (void)
if (dump_file && add_cfi_insn != ti->head)
{
- rtx note;
+ rtx_insn *note;
fprintf (dump_file, "Fixup between trace %u and %u:\n",
prev_ti->id, ti->id);
@@ -2741,7 +2741,7 @@ reset_insn_used_flags (rtx insn)
static void
reset_all_used_flags (void)
{
- rtx p;
+ rtx_insn *p;
for (p = get_insns (); p; p = NEXT_INSN (p))
if (INSN_P (p))
@@ -2776,7 +2776,7 @@ verify_insn_sharing (rtx insn)
DEBUG_FUNCTION void
verify_rtl_sharing (void)
{
- rtx p;
+ rtx_insn *p;
timevar_push (TV_VERIFY_RTL_SHARING);
@@ -2802,7 +2802,7 @@ verify_rtl_sharing (void)
Assumes the mark bits are cleared at entry. */
void
-unshare_all_rtl_in_chain (rtx insn)
+unshare_all_rtl_in_chain (rtx_insn *insn)
{
for (; insn; insn = NEXT_INSN (insn))
if (INSN_P (insn))
@@ -3130,7 +3130,7 @@ get_last_insn_anywhere (void)
rtx
get_first_nonnote_insn (void)
{
- rtx insn = get_insns ();
+ rtx_insn *insn = get_insns ();
if (insn)
{
@@ -3143,7 +3143,7 @@ get_first_nonnote_insn (void)
{
if (NONJUMP_INSN_P (insn)
&& GET_CODE (PATTERN (insn)) == SEQUENCE)
- insn = XVECEXP (PATTERN (insn), 0, 0);
+ insn = as_a <rtx_sequence *> (PATTERN (insn))->insn (0);
}
}
@@ -3156,7 +3156,7 @@ get_first_nonnote_insn (void)
rtx
get_last_nonnote_insn (void)
{
- rtx insn = get_last_insn ();
+ rtx_insn *insn = get_last_insn ();
if (insn)
{
@@ -3167,10 +3167,9 @@ get_last_nonnote_insn (void)
continue;
else
{
- if (NONJUMP_INSN_P (insn)
- && GET_CODE (PATTERN (insn)) == SEQUENCE)
- insn = XVECEXP (PATTERN (insn), 0,
- XVECLEN (PATTERN (insn), 0) - 1);
+ if (NONJUMP_INSN_P (insn))
+ if (rtx_sequence *seq = dyn_cast <rtx_sequence *> (PATTERN (insn)))
+ insn = seq->insn (seq->len () - 1);
}
}
@@ -3202,42 +3201,45 @@ get_max_insn_count (void)
of the sequence. */
rtx_insn *
-next_insn (rtx insn)
+next_insn (rtx uncast_insn)
{
+ rtx_insn *insn = as_a_nullable <rtx_insn *> (uncast_insn);
if (insn)
{
insn = NEXT_INSN (insn);
if (insn && NONJUMP_INSN_P (insn)
&& GET_CODE (PATTERN (insn)) == SEQUENCE)
- insn = XVECEXP (PATTERN (insn), 0, 0);
+ insn = as_a <rtx_sequence *> (PATTERN (insn))->insn (0);
}
- return as_a_nullable <rtx_insn *> (insn);
+ return insn;
}
/* Return the previous insn. If it is a SEQUENCE, return the last insn
of the sequence. */
rtx_insn *
-previous_insn (rtx insn)
+previous_insn (rtx uncast_insn)
{
+ rtx_insn *insn = as_a_nullable <rtx_insn *> (uncast_insn);
if (insn)
{
insn = PREV_INSN (insn);
- if (insn && NONJUMP_INSN_P (insn)
- && GET_CODE (PATTERN (insn)) == SEQUENCE)
- insn = XVECEXP (PATTERN (insn), 0, XVECLEN (PATTERN (insn), 0) - 1);
+ if (insn && NONJUMP_INSN_P (insn))
+ if (rtx_sequence *seq = dyn_cast <rtx_sequence *> (PATTERN (insn)))
+ insn = seq->insn (seq->len () - 1);
}
- return as_a_nullable <rtx_insn *> (insn);
+ return insn;
}
/* Return the next insn after INSN that is not a NOTE. This routine does not
look inside SEQUENCEs. */
rtx_insn *
-next_nonnote_insn (rtx insn)
+next_nonnote_insn (rtx uncast_insn)
{
+ rtx_insn *insn = as_a_nullable <rtx_insn *> (uncast_insn);
while (insn)
{
insn = NEXT_INSN (insn);
@@ -3245,7 +3247,7 @@ next_nonnote_insn (rtx insn)
break;
}
- return as_a_nullable <rtx_insn *> (insn);
+ return insn;
}
/* Return the next insn after INSN that is not a NOTE, but stop the
@@ -3253,8 +3255,10 @@ next_nonnote_insn (rtx insn)
look inside SEQUENCEs. */
rtx_insn *
-next_nonnote_insn_bb (rtx insn)
+next_nonnote_insn_bb (rtx uncast_insn)
{
+ rtx_insn *insn = as_a_nullable <rtx_insn *> (uncast_insn);
+
while (insn)
{
insn = NEXT_INSN (insn);
@@ -3264,15 +3268,17 @@ next_nonnote_insn_bb (rtx insn)
return NULL;
}
- return as_a_nullable <rtx_insn *> (insn);
+ return insn;
}
/* Return the previous insn before INSN that is not a NOTE. This routine does
not look inside SEQUENCEs. */
rtx_insn *
-prev_nonnote_insn (rtx insn)
+prev_nonnote_insn (rtx uncast_insn)
{
+ rtx_insn *insn = as_a_nullable <rtx_insn *> (uncast_insn);
+
while (insn)
{
insn = PREV_INSN (insn);
@@ -3280,7 +3286,7 @@ prev_nonnote_insn (rtx insn)
break;
}
- return as_a_nullable <rtx_insn *> (insn);
+ return insn;
}
/* Return the previous insn before INSN that is not a NOTE, but stop
@@ -3288,8 +3294,10 @@ prev_nonnote_insn (rtx insn)
not look inside SEQUENCEs. */
rtx_insn *
-prev_nonnote_insn_bb (rtx insn)
+prev_nonnote_insn_bb (rtx uncast_insn)
{
+ rtx_insn *insn = as_a_nullable <rtx_insn *> (uncast_insn);
+
while (insn)
{
insn = PREV_INSN (insn);
@@ -3299,15 +3307,17 @@ prev_nonnote_insn_bb (rtx insn)
return NULL;
}
- return as_a_nullable <rtx_insn *> (insn);
+ return insn;
}
/* Return the next insn after INSN that is not a DEBUG_INSN. This
routine does not look inside SEQUENCEs. */
rtx_insn *
-next_nondebug_insn (rtx insn)
+next_nondebug_insn (rtx uncast_insn)
{
+ rtx_insn *insn = as_a_nullable <rtx_insn *> (uncast_insn);
+
while (insn)
{
insn = NEXT_INSN (insn);
@@ -3315,15 +3325,17 @@ next_nondebug_insn (rtx insn)
break;
}
- return as_a_nullable <rtx_insn *> (insn);
+ return insn;
}
/* Return the previous insn before INSN that is not a DEBUG_INSN.
This routine does not look inside SEQUENCEs. */
rtx_insn *
-prev_nondebug_insn (rtx insn)
+prev_nondebug_insn (rtx uncast_insn)
{
+ rtx_insn *insn = as_a_nullable <rtx_insn *> (uncast_insn);
+
while (insn)
{
insn = PREV_INSN (insn);
@@ -3331,15 +3343,17 @@ prev_nondebug_insn (rtx insn)
break;
}
- return as_a_nullable <rtx_insn *> (insn);
+ return insn;
}
/* Return the next insn after INSN that is not a NOTE nor DEBUG_INSN.
This routine does not look inside SEQUENCEs. */
rtx_insn *
-next_nonnote_nondebug_insn (rtx insn)
+next_nonnote_nondebug_insn (rtx uncast_insn)
{
+ rtx_insn *insn = as_a_nullable <rtx_insn *> (uncast_insn);
+
while (insn)
{
insn = NEXT_INSN (insn);
@@ -3347,15 +3361,17 @@ next_nonnote_nondebug_insn (rtx insn)
break;
}
- return as_a_nullable <rtx_insn *> (insn);
+ return insn;
}
/* Return the previous insn before INSN that is not a NOTE nor DEBUG_INSN.
This routine does not look inside SEQUENCEs. */
rtx_insn *
-prev_nonnote_nondebug_insn (rtx insn)
+prev_nonnote_nondebug_insn (rtx uncast_insn)
{
+ rtx_insn *insn = as_a_nullable <rtx_insn *> (uncast_insn);
+
while (insn)
{
insn = PREV_INSN (insn);
@@ -3363,7 +3379,7 @@ prev_nonnote_nondebug_insn (rtx insn)
break;
}
- return as_a_nullable <rtx_insn *> (insn);
+ return insn;
}
/* Return the next INSN, CALL_INSN or JUMP_INSN after INSN;
@@ -3371,8 +3387,10 @@ prev_nonnote_nondebug_insn (rtx insn)
SEQUENCEs. */
rtx_insn *
-next_real_insn (rtx insn)
+next_real_insn (rtx uncast_insn)
{
+ rtx_insn *insn = as_a_nullable <rtx_insn *> (uncast_insn);
+
while (insn)
{
insn = NEXT_INSN (insn);
@@ -3380,7 +3398,7 @@ next_real_insn (rtx insn)
break;
}
- return as_a_nullable <rtx_insn *> (insn);
+ return insn;
}
/* Return the last INSN, CALL_INSN or JUMP_INSN before INSN;
@@ -3388,8 +3406,10 @@ next_real_insn (rtx insn)
SEQUENCEs. */
rtx_insn *
-prev_real_insn (rtx insn)
+prev_real_insn (rtx uncast_insn)
{
+ rtx_insn *insn = as_a_nullable <rtx_insn *> (uncast_insn);
+
while (insn)
{
insn = PREV_INSN (insn);
@@ -3397,7 +3417,7 @@ prev_real_insn (rtx insn)
break;
}
- return as_a_nullable <rtx_insn *> (insn);
+ return insn;
}
/* Return the last CALL_INSN in the current list, or 0 if there is none.
@@ -3432,8 +3452,10 @@ active_insn_p (const_rtx insn)
}
rtx_insn *
-next_active_insn (rtx insn)
+next_active_insn (rtx uncast_insn)
{
+ rtx_insn *insn = as_a_nullable <rtx_insn *> (uncast_insn);
+
while (insn)
{
insn = NEXT_INSN (insn);
@@ -3441,7 +3463,7 @@ next_active_insn (rtx insn)
break;
}
- return as_a_nullable <rtx_insn *> (insn);
+ return insn;
}
/* Find the last insn before INSN that really does something. This routine
@@ -3449,8 +3471,10 @@ next_active_insn (rtx insn)
standalone USE and CLOBBER insn. */
rtx_insn *
-prev_active_insn (rtx insn)
+prev_active_insn (rtx uncast_insn)
{
+ rtx_insn *insn = as_a_nullable <rtx_insn *> (uncast_insn);
+
while (insn)
{
insn = PREV_INSN (insn);
@@ -3458,7 +3482,7 @@ prev_active_insn (rtx insn)
break;
}
- return as_a_nullable <rtx_insn *> (insn);
+ return insn;
}
#ifdef HAVE_cc0
@@ -3472,8 +3496,10 @@ prev_active_insn (rtx insn)
Return 0 if we can't find the insn. */
rtx_insn *
-next_cc0_user (rtx insn)
+next_cc0_user (rtx uncast_insn)
{
+ rtx_insn *insn = as_a_nullable <rtx_insn *> (uncast_insn);
+
rtx note = find_reg_note (insn, REG_CC_USER, NULL_RTX);
if (note)
@@ -3481,10 +3507,10 @@ next_cc0_user (rtx insn)
insn = next_nonnote_insn (insn);
if (insn && NONJUMP_INSN_P (insn) && GET_CODE (PATTERN (insn)) == SEQUENCE)
- insn = XVECEXP (PATTERN (insn), 0, 0);
+ insn = as_a <rtx_sequence *> (PATTERN (insn))->insn (0);
if (insn && INSN_P (insn) && reg_mentioned_p (cc0_rtx, PATTERN (insn)))
- return as_a_nullable <rtx_insn *> (insn);
+ return insn;
return 0;
}
@@ -3493,8 +3519,10 @@ next_cc0_user (rtx insn)
note, it is the previous insn. */
rtx_insn *
-prev_cc0_setter (rtx insn)
+prev_cc0_setter (rtx uncast_insn)
{
+ rtx_insn *insn = as_a_nullable <rtx_insn *> (uncast_insn);
+
rtx note = find_reg_note (insn, REG_CC_SETTER, NULL_RTX);
if (note)
@@ -3503,7 +3531,7 @@ prev_cc0_setter (rtx insn)
insn = prev_nonnote_insn (insn);
gcc_assert (sets_cc0_p (PATTERN (insn)));
- return as_a_nullable <rtx_insn *> (insn);
+ return insn;
}
#endif
@@ -3573,27 +3601,29 @@ mark_label_nuses (rtx x)
returns TRIAL. If the insn to be returned can be split, it will be. */
rtx_insn *
-try_split (rtx pat, rtx trial, int last)
+try_split (rtx pat, rtx uncast_trial, int last)
{
+ rtx_insn *trial = as_a <rtx_insn *> (uncast_trial);
rtx_insn *before = PREV_INSN (trial);
rtx_insn *after = NEXT_INSN (trial);
int has_barrier = 0;
- rtx note, seq, tem;
+ rtx note;
+ rtx_insn *seq, *tem;
int probability;
- rtx insn_last, insn;
+ rtx_insn *insn_last, *insn;
int njumps = 0;
rtx call_insn = NULL_RTX;
/* We're not good at redistributing frame information. */
if (RTX_FRAME_RELATED_P (trial))
- return as_a <rtx_insn *> (trial);
+ return trial;
if (any_condjump_p (trial)
&& (note = find_reg_note (trial, REG_BR_PROB, 0)))
split_branch_probability = XINT (note, 0);
probability = split_branch_probability;
- seq = split_insns (pat, trial);
+ seq = as_a_nullable <rtx_insn *> (split_insns (pat, trial));
split_branch_probability = -1;
@@ -3606,7 +3636,7 @@ try_split (rtx pat, rtx trial, int last)
}
if (!seq)
- return as_a <rtx_insn *> (trial);
+ return trial;
/* Avoid infinite loop if any insn of the result matches
the original pattern. */
@@ -3615,7 +3645,7 @@ try_split (rtx pat, rtx trial, int last)
{
if (INSN_P (insn_last)
&& rtx_equal_p (PATTERN (insn_last), pat))
- return as_a <rtx_insn *> (trial);
+ return trial;
if (!NEXT_INSN (insn_last))
break;
insn_last = NEXT_INSN (insn_last);
@@ -3655,7 +3685,8 @@ try_split (rtx pat, rtx trial, int last)
for (insn = insn_last; insn ; insn = PREV_INSN (insn))
if (CALL_P (insn))
{
- rtx next, *p;
+ rtx_insn *next;
+ rtx *p;
gcc_assert (call_insn == NULL_RTX);
call_insn = insn;
@@ -4089,8 +4120,9 @@ set_insn_deleted (rtx insn)
To really delete an insn and related DF information, use delete_insn. */
void
-remove_insn (rtx insn)
+remove_insn (rtx uncast_insn)
{
+ rtx_insn *insn = as_a <rtx_insn *> (uncast_insn);
rtx_insn *next = NEXT_INSN (insn);
rtx_insn *prev = PREV_INSN (insn);
basic_block bb;
@@ -4626,9 +4658,10 @@ emit_note_before (enum insn_note subtype, rtx uncast_before)
MAKE_RAW indicates how to turn PATTERN into a real insn. */
static rtx_insn *
-emit_pattern_after_setloc (rtx pattern, rtx after, int loc,
+emit_pattern_after_setloc (rtx pattern, rtx uncast_after, int loc,
rtx_insn *(*make_raw) (rtx))
{
+ rtx_insn *after = as_a_nullable <rtx_insn *> (uncast_after);
rtx last = emit_pattern_after_noloc (pattern, after, NULL, make_raw);
if (pattern == NULL_RTX || !loc)
@@ -4651,10 +4684,11 @@ emit_pattern_after_setloc (rtx pattern, rtx after, int loc,
any DEBUG_INSNs. */
static rtx_insn *
-emit_pattern_after (rtx pattern, rtx after, bool skip_debug_insns,
+emit_pattern_after (rtx pattern, rtx uncast_after, bool skip_debug_insns,
rtx_insn *(*make_raw) (rtx))
{
- rtx prev = after;
+ rtx_insn *after = as_a_nullable <rtx_insn *> (uncast_after);
+ rtx_insn *prev = after;
if (skip_debug_insns)
while (DEBUG_INSN_P (prev))
@@ -4729,16 +4763,17 @@ emit_debug_insn_after (rtx pattern, rtx after)
CALL_INSN, etc. */
static rtx_insn *
-emit_pattern_before_setloc (rtx pattern, rtx before, int loc, bool insnp,
+emit_pattern_before_setloc (rtx pattern, rtx uncast_before, int loc, bool insnp,
rtx_insn *(*make_raw) (rtx))
{
- rtx first = PREV_INSN (before);
- rtx last = emit_pattern_before_noloc (pattern, before,
- insnp ? before : NULL_RTX,
- NULL, make_raw);
+ rtx_insn *before = as_a <rtx_insn *> (uncast_before);
+ rtx_insn *first = PREV_INSN (before);
+ rtx_insn *last = emit_pattern_before_noloc (pattern, before,
+ insnp ? before : NULL_RTX,
+ NULL, make_raw);
if (pattern == NULL_RTX || !loc)
- return as_a_nullable <rtx_insn *> (last);
+ return last;
if (!first)
first = get_insns ();
@@ -4752,7 +4787,7 @@ emit_pattern_before_setloc (rtx pattern, rtx before, int loc, bool insnp,
break;
first = NEXT_INSN (first);
}
- return as_a_nullable <rtx_insn *> (last);
+ return last;
}
/* Insert PATTERN before BEFORE. MAKE_RAW indicates how to turn PATTERN
@@ -4761,10 +4796,11 @@ emit_pattern_before_setloc (rtx pattern, rtx before, int loc, bool insnp,
INSN as opposed to a JUMP_INSN, CALL_INSN, etc. */
static rtx_insn *
-emit_pattern_before (rtx pattern, rtx before, bool skip_debug_insns,
+emit_pattern_before (rtx pattern, rtx uncast_before, bool skip_debug_insns,
bool insnp, rtx_insn *(*make_raw) (rtx))
{
- rtx next = before;
+ rtx_insn *before = as_a_nullable <rtx_insn *> (uncast_before);
+ rtx_insn *next = before;
if (skip_debug_insns)
while (DEBUG_INSN_P (next))
@@ -1753,9 +1753,10 @@ insn_could_throw_p (const_rtx insn)
to look for a note, or the note itself. */
void
-copy_reg_eh_region_note_forward (rtx note_or_insn, rtx first, rtx last)
+copy_reg_eh_region_note_forward (rtx note_or_insn, rtx_insn *first, rtx last)
{
- rtx insn, note = note_or_insn;
+ rtx_insn *insn;
+ rtx note = note_or_insn;
if (INSN_P (note_or_insn))
{
@@ -1774,9 +1775,10 @@ copy_reg_eh_region_note_forward (rtx note_or_insn, rtx first, rtx last)
/* Likewise, but iterate backward. */
void
-copy_reg_eh_region_note_backward (rtx note_or_insn, rtx last, rtx first)
+copy_reg_eh_region_note_backward (rtx note_or_insn, rtx_insn *last, rtx first)
{
- rtx insn, note = note_or_insn;
+ rtx_insn *insn;
+ rtx note = note_or_insn;
if (INSN_P (note_or_insn))
{
@@ -3931,11 +3931,12 @@ find_args_size_adjust (rtx insn)
}
int
-fixup_args_size_notes (rtx prev, rtx last, int end_args_size)
+fixup_args_size_notes (rtx prev, rtx uncast_last, int end_args_size)
{
+ rtx_insn *last = as_a_nullable <rtx_insn *> (uncast_last);
int args_size = end_args_size;
bool saw_unknown = false;
- rtx insn;
+ rtx_insn *insn;
for (insn = last; insn != prev; insn = PREV_INSN (insn))
{
@@ -120,7 +120,7 @@ static tree *get_block_vector (tree, int *);
extern tree debug_find_var_in_block_tree (tree, tree);
/* We always define `record_insns' even if it's not used so that we
can always export `prologue_epilogue_contains'. */
-static void record_insns (rtx, rtx, htab_t *) ATTRIBUTE_UNUSED;
+static void record_insns (rtx_insn *, rtx, htab_t *) ATTRIBUTE_UNUSED;
static bool contains (const_rtx, htab_t);
static void prepare_function_start (void);
static void do_clobber_return_reg (rtx, void *);
@@ -4952,9 +4952,9 @@ do_warn_unused_parameter (tree fn)
/* Set the location of the insn chain starting at INSN to LOC. */
static void
-set_insn_locations (rtx insn, int loc)
+set_insn_locations (rtx_insn *insn, int loc)
{
- while (insn != NULL_RTX)
+ while (insn != NULL)
{
if (INSN_P (insn))
INSN_LOCATION (insn) = loc;
@@ -5254,9 +5254,9 @@ get_arg_pointer_save_area (void)
for the first time. */
static void
-record_insns (rtx insns, rtx end, htab_t *hashp)
+record_insns (rtx_insn *insns, rtx end, htab_t *hashp)
{
- rtx tmp;
+ rtx_insn *tmp;
htab_t hash = *hashp;
if (hash == NULL)
@@ -5394,8 +5394,9 @@ set_return_jump_label (rtx returnjump)
#if defined (HAVE_return) || defined (HAVE_simple_return)
/* Return true if there are any active insns between HEAD and TAIL. */
bool
-active_insn_between (rtx head, rtx tail)
+active_insn_between (rtx head, rtx uncast_tail)
{
+ rtx_insn *tail = as_a_nullable <rtx_insn *> (uncast_tail);
while (tail)
{
if (active_insn_p (tail))
@@ -5585,9 +5586,8 @@ thread_prologue_and_epilogue_insns (void)
bitmap_head bb_flags;
#endif
rtx_insn *returnjump;
- rtx seq ATTRIBUTE_UNUSED;
rtx_insn *epilogue_end ATTRIBUTE_UNUSED;
- rtx prologue_seq ATTRIBUTE_UNUSED, split_prologue_seq ATTRIBUTE_UNUSED;
+ rtx_insn *prologue_seq ATTRIBUTE_UNUSED, *split_prologue_seq ATTRIBUTE_UNUSED;
edge e, entry_edge, orig_entry_edge, exit_fallthru_edge;
edge_iterator ei;
@@ -5596,7 +5596,6 @@ thread_prologue_and_epilogue_insns (void)
rtl_profile_for_bb (ENTRY_BLOCK_PTR_FOR_FN (cfun));
inserted = false;
- seq = NULL_RTX;
epilogue_end = NULL;
returnjump = NULL;
@@ -5607,7 +5606,7 @@ thread_prologue_and_epilogue_insns (void)
entry_edge = single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun));
orig_entry_edge = entry_edge;
- split_prologue_seq = NULL_RTX;
+ split_prologue_seq = NULL;
if (flag_split_stack
&& (lookup_attribute ("no_split_stack", DECL_ATTRIBUTES (cfun->decl))
== NULL))
@@ -5627,12 +5626,12 @@ thread_prologue_and_epilogue_insns (void)
#endif
}
- prologue_seq = NULL_RTX;
+ prologue_seq = NULL;
#ifdef HAVE_prologue
if (HAVE_prologue)
{
start_sequence ();
- seq = gen_prologue ();
+ rtx_insn *seq = as_a <rtx_insn *> (gen_prologue ());
emit_insn (seq);
/* Insert an explicit USE for the frame pointer
@@ -5769,7 +5768,7 @@ thread_prologue_and_epilogue_insns (void)
{
start_sequence ();
epilogue_end = emit_note (NOTE_INSN_EPILOGUE_BEG);
- seq = gen_epilogue ();
+ rtx_insn *seq = as_a <rtx_insn *> (gen_epilogue ());
if (seq)
emit_jump_insn (seq);
@@ -5870,7 +5869,7 @@ epilogue_done:
start_sequence ();
emit_note (NOTE_INSN_EPILOGUE_BEG);
emit_insn (ep_seq);
- seq = get_insns ();
+ rtx_insn *seq = get_insns ();
end_sequence ();
/* Retain a map of the epilogue insns. Used in life analysis to
@@ -2156,7 +2156,7 @@ process_insert_insn (struct expr *expr)
static void
insert_insn_end_basic_block (struct expr *expr, basic_block bb)
{
- rtx insn = BB_END (bb);
+ rtx_insn *insn = BB_END (bb);
rtx_insn *new_insn;
rtx reg = expr->reaching_reg;
int regno = REGNO (reg);
@@ -2183,7 +2183,7 @@ insert_insn_end_basic_block (struct expr *expr, basic_block bb)
if cc0 isn't set. */
rtx note = find_reg_note (insn, REG_CC_SETTER, NULL_RTX);
if (note)
- insn = XEXP (note, 0);
+ insn = as_a_nullable <rtx_insn *> (XEXP (note, 0));
else
{
rtx_insn *maybe_cc0_setter = prev_nonnote_insn (insn);
@@ -1037,7 +1037,7 @@ static void
initiate_bb_reg_pressure_info (basic_block bb)
{
unsigned int i ATTRIBUTE_UNUSED;
- rtx insn;
+ rtx_insn *insn;
if (current_nr_blocks > 1)
FOR_BB_INSNS (bb, insn)
@@ -1604,7 +1604,7 @@ priority (rtx_insn *insn)
this_priority = insn_cost (insn);
else
{
- rtx prev_first, twin;
+ rtx_insn *prev_first, *twin;
basic_block rec;
/* For recovery check instructions we calculate priority slightly
@@ -2997,7 +2997,7 @@ update_register_pressure (rtx_insn *insn)
meaning in sched-int.h::_haifa_insn_data) for all current BB insns
after insn AFTER. */
static void
-setup_insn_max_reg_pressure (rtx after, bool update_p)
+setup_insn_max_reg_pressure (rtx_insn *after, bool update_p)
{
int i, p;
bool eq_p;
@@ -3060,7 +3060,7 @@ update_reg_and_insn_max_reg_pressure (rtx_insn *insn)
insns starting after insn AFTER. Set up also max register pressure
for all insns of the basic block. */
void
-sched_setup_bb_reg_pressure_info (basic_block bb, rtx after)
+sched_setup_bb_reg_pressure_info (basic_block bb, rtx_insn *after)
{
gcc_assert (sched_pressure == SCHED_PRESSURE_WEIGHTED);
initiate_bb_reg_pressure_info (bb);
@@ -4780,7 +4780,7 @@ get_ebb_head_tail (basic_block beg, basic_block end,
/* Return nonzero if there are no real insns in the range [ HEAD, TAIL ]. */
int
-no_real_insns_p (const_rtx head, const_rtx tail)
+no_real_insns_p (const rtx_insn *head, const rtx_insn *tail)
{
while (head != NEXT_INSN (tail))
{
@@ -5923,7 +5923,7 @@ schedule_block (basic_block *target_bb, state_t init_state)
/* Head/tail info for this block. */
rtx_insn *prev_head = current_sched_info->prev_head;
- rtx next_tail = current_sched_info->next_tail;
+ rtx_insn *next_tail = current_sched_info->next_tail;
rtx_insn *head = NEXT_INSN (prev_head);
rtx_insn *tail = PREV_INSN (next_tail);
@@ -88,10 +88,11 @@ static int count_bb_insns (const_basic_block);
static bool cheap_bb_rtx_cost_p (const_basic_block, int, int);
static rtx_insn *first_active_insn (basic_block);
static rtx_insn *last_active_insn (basic_block, int);
-static rtx find_active_insn_before (basic_block, rtx);
-static rtx find_active_insn_after (basic_block, rtx);
+static rtx_insn *find_active_insn_before (basic_block, rtx_insn *);
+static rtx_insn *find_active_insn_after (basic_block, rtx_insn *);
static basic_block block_fallthru (basic_block);
-static int cond_exec_process_insns (ce_if_block *, rtx, rtx, rtx, int, int);
+static int cond_exec_process_insns (ce_if_block *, rtx_insn *, rtx, rtx, int,
+ int);
static rtx cond_exec_get_condition (rtx);
static rtx noce_get_condition (rtx_insn *, rtx_insn **, bool);
static int noce_operand_ok (const_rtx);
@@ -257,11 +258,11 @@ last_active_insn (basic_block bb, int skip_use_p)
/* Return the active insn before INSN inside basic block CURR_BB. */
-static rtx
-find_active_insn_before (basic_block curr_bb, rtx insn)
+static rtx_insn *
+find_active_insn_before (basic_block curr_bb, rtx_insn *insn)
{
if (!insn || insn == BB_HEAD (curr_bb))
- return NULL_RTX;
+ return NULL;
while ((insn = PREV_INSN (insn)) != NULL_RTX)
{
@@ -270,7 +271,7 @@ find_active_insn_before (basic_block curr_bb, rtx insn)
/* No other active insn all the way to the start of the basic block. */
if (insn == BB_HEAD (curr_bb))
- return NULL_RTX;
+ return NULL;
}
return insn;
@@ -278,11 +279,11 @@ find_active_insn_before (basic_block curr_bb, rtx insn)
/* Return the active insn after INSN inside basic block CURR_BB. */
-static rtx
-find_active_insn_after (basic_block curr_bb, rtx insn)
+static rtx_insn *
+find_active_insn_after (basic_block curr_bb, rtx_insn *insn)
{
if (!insn || insn == BB_END (curr_bb))
- return NULL_RTX;
+ return NULL;
while ((insn = NEXT_INSN (insn)) != NULL_RTX)
{
@@ -291,7 +292,7 @@ find_active_insn_after (basic_block curr_bb, rtx insn)
/* No other active insn all the way to the end of the basic block. */
if (insn == BB_END (curr_bb))
- return NULL_RTX;
+ return NULL;
}
return insn;
@@ -313,14 +314,14 @@ block_fallthru (basic_block bb)
static int
cond_exec_process_insns (ce_if_block *ce_info ATTRIBUTE_UNUSED,
- /* if block information */rtx start,
+ /* if block information */rtx_insn *start,
/* first insn to look at */rtx end,
/* last insn to look at */rtx test,
/* conditional execution test */int prob_val,
/* probability of branch taken. */int mod_ok)
{
int must_be_last = FALSE;
- rtx insn;
+ rtx_insn *insn;
rtx xtest;
rtx pattern;
@@ -445,10 +446,10 @@ cond_exec_process_if_block (ce_if_block * ce_info,
basic_block then_bb = ce_info->then_bb; /* THEN */
basic_block else_bb = ce_info->else_bb; /* ELSE or NULL */
rtx test_expr; /* expression in IF_THEN_ELSE that is tested */
- rtx then_start; /* first insn in THEN block */
- rtx then_end; /* last insn + 1 in THEN block */
- rtx else_start = NULL_RTX; /* first insn in ELSE block or NULL */
- rtx else_end = NULL_RTX; /* last insn + 1 in ELSE block */
+ rtx_insn *then_start; /* first insn in THEN block */
+ rtx_insn *then_end; /* last insn + 1 in THEN block */
+ rtx_insn *else_start = NULL; /* first insn in ELSE block or NULL */
+ rtx_insn *else_end = NULL; /* last insn + 1 in ELSE block */
int max; /* max # of insns to convert. */
int then_mod_ok; /* whether conditional mods are ok in THEN */
rtx true_expr; /* test for else block insns */
@@ -513,9 +514,9 @@ cond_exec_process_if_block (ce_if_block * ce_info,
&then_first_tail, &else_first_tail,
NULL);
if (then_first_tail == BB_HEAD (then_bb))
- then_start = then_end = NULL_RTX;
+ then_start = then_end = NULL;
if (else_first_tail == BB_HEAD (else_bb))
- else_start = else_end = NULL_RTX;
+ else_start = else_end = NULL;
if (n_matching > 0)
{
@@ -541,7 +542,7 @@ cond_exec_process_if_block (ce_if_block * ce_info,
if (n_matching > 0)
{
- rtx insn;
+ rtx_insn *insn;
/* We won't pass the insns in the head sequence to
cond_exec_process_insns, so we need to test them here
@@ -556,9 +557,9 @@ cond_exec_process_if_block (ce_if_block * ce_info,
}
if (then_last_head == then_end)
- then_start = then_end = NULL_RTX;
+ then_start = then_end = NULL;
if (else_last_head == else_end)
- else_start = else_end = NULL_RTX;
+ else_start = else_end = NULL;
if (n_matching > 0)
{
@@ -620,7 +621,7 @@ cond_exec_process_if_block (ce_if_block * ce_info,
do
{
- rtx start, end;
+ rtx_insn *start, *end;
rtx t, f;
enum rtx_code f_code;
@@ -722,7 +723,7 @@ cond_exec_process_if_block (ce_if_block * ce_info,
that the remaining one is executed first for both branches. */
if (then_first_tail)
{
- rtx from = then_first_tail;
+ rtx_insn *from = then_first_tail;
if (!INSN_P (from))
from = find_active_insn_after (then_bb, from);
delete_insn_chain (from, BB_END (then_bb), false);
@@ -2475,7 +2476,7 @@ noce_process_if_block (struct noce_if_info *if_info)
basic_block then_bb = if_info->then_bb; /* THEN */
basic_block else_bb = if_info->else_bb; /* ELSE or NULL */
basic_block join_bb = if_info->join_bb; /* JOIN */
- rtx jump = if_info->jump;
+ rtx_insn *jump = if_info->jump;
rtx cond = if_info->cond;
rtx_insn *insn_a, *insn_b;
rtx set_a, set_b;
@@ -3184,7 +3185,7 @@ merge_if_block (struct ce_if_block * ce_info)
if (EDGE_COUNT (then_bb->succs) == 0
&& EDGE_COUNT (combo_bb->succs) > 1)
{
- rtx end = NEXT_INSN (BB_END (then_bb));
+ rtx_insn *end = NEXT_INSN (BB_END (then_bb));
while (end && NOTE_P (end) && !NOTE_INSN_BASIC_BLOCK_P (end))
end = NEXT_INSN (end);
@@ -3207,7 +3208,7 @@ merge_if_block (struct ce_if_block * ce_info)
if (EDGE_COUNT (else_bb->succs) == 0
&& EDGE_COUNT (combo_bb->succs) > 1)
{
- rtx end = NEXT_INSN (BB_END (else_bb));
+ rtx_insn *end = NEXT_INSN (BB_END (else_bb));
while (end && NOTE_P (end) && !NOTE_INSN_BASIC_BLOCK_P (end))
end = NEXT_INSN (end);
@@ -3551,7 +3552,7 @@ cond_exec_find_if_block (struct ce_if_block * ce_info)
{
if (single_pred_p (else_bb) && else_bb != EXIT_BLOCK_PTR_FOR_FN (cfun))
{
- rtx last_insn = BB_END (then_bb);
+ rtx_insn *last_insn = BB_END (then_bb);
while (last_insn
&& NOTE_P (last_insn)
@@ -1242,8 +1242,9 @@ mark_jump_label_asm (rtx asmop, rtx insn)
subsequent cfg_cleanup pass to delete unreachable code if needed. */
rtx_insn *
-delete_related_insns (rtx insn)
+delete_related_insns (rtx uncast_insn)
{
+ rtx_insn *insn = as_a <rtx_insn *> (uncast_insn);
int was_code_label = (LABEL_P (insn));
rtx note;
rtx_insn *next = NEXT_INSN (insn), *prev = PREV_INSN (insn);
@@ -1271,7 +1272,7 @@ delete_related_insns (rtx insn)
&& GET_CODE (PATTERN (insn)) == SEQUENCE
&& CALL_P (XVECEXP (PATTERN (insn), 0, 0))))
{
- rtx p;
+ rtx_insn *p;
for (p = next && INSN_DELETED_P (next) ? NEXT_INSN (next) : next;
p && NOTE_P (p);
@@ -4574,7 +4574,7 @@ inherit_reload_reg (bool def_p, int original_regno,
" Rejecting inheritance %d->%d "
"as it results in 2 or more insns:\n",
original_regno, REGNO (new_reg));
- dump_rtl_slim (lra_dump_file, new_insns, NULL_RTX, -1, 0);
+ dump_rtl_slim (lra_dump_file, new_insns, NULL, -1, 0);
fprintf (lra_dump_file,
" >>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>\n");
}
@@ -4836,7 +4836,7 @@ split_reg (bool before_p, int original_regno, rtx_insn *insn,
(lra_dump_file,
" Rejecting split %d->%d resulting in > 2 %s save insns:\n",
original_regno, REGNO (new_reg), call_save_p ? "call" : "");
- dump_rtl_slim (lra_dump_file, save, NULL_RTX, -1, 0);
+ dump_rtl_slim (lra_dump_file, save, NULL, -1, 0);
fprintf (lra_dump_file,
" ))))))))))))))))))))))))))))))))))))))))))))))))\n");
}
@@ -4852,7 +4852,7 @@ split_reg (bool before_p, int original_regno, rtx_insn *insn,
" Rejecting split %d->%d "
"resulting in > 2 %s restore insns:\n",
original_regno, REGNO (new_reg), call_save_p ? "call" : "");
- dump_rtl_slim (lra_dump_file, restore, NULL_RTX, -1, 0);
+ dump_rtl_slim (lra_dump_file, restore, NULL, -1, 0);
fprintf (lra_dump_file,
" ))))))))))))))))))))))))))))))))))))))))))))))))\n");
}
@@ -1880,12 +1880,12 @@ lra_process_new_insns (rtx_insn *insn, rtx_insn *before, rtx_insn *after,
if (before != NULL_RTX)
{
fprintf (lra_dump_file," %s before:\n", title);
- dump_rtl_slim (lra_dump_file, before, NULL_RTX, -1, 0);
+ dump_rtl_slim (lra_dump_file, before, NULL, -1, 0);
}
if (after != NULL_RTX)
{
fprintf (lra_dump_file, " %s after:\n", title);
- dump_rtl_slim (lra_dump_file, after, NULL_RTX, -1, 0);
+ dump_rtl_slim (lra_dump_file, after, NULL, -1, 0);
}
fprintf (lra_dump_file, "\n");
}
@@ -212,7 +212,7 @@ static int compute_split_row (sbitmap, int, int, int, ddg_node_ptr);
static int sms_order_nodes (ddg_ptr, int, int *, int *);
static void set_node_sched_params (ddg_ptr);
static partial_schedule_ptr sms_schedule_by_order (ddg_ptr, int, int, int *);
-static void permute_partial_schedule (partial_schedule_ptr, rtx);
+static void permute_partial_schedule (partial_schedule_ptr, rtx_insn *);
static void generate_prolog_epilog (partial_schedule_ptr, struct loop *,
rtx, rtx);
static int calculate_stage_count (partial_schedule_ptr, int);
@@ -876,7 +876,7 @@ reset_sched_times (partial_schedule_ptr ps, int amount)
row ii-1, and position them right before LAST. This schedules
the insns of the loop kernel. */
static void
-permute_partial_schedule (partial_schedule_ptr ps, rtx last)
+permute_partial_schedule (partial_schedule_ptr ps, rtx_insn *last)
{
int ii = ps->ii;
int row;
@@ -177,9 +177,10 @@ optab_libfunc (optab optab, enum machine_mode mode)
try again, ensuring that TARGET is not one of the operands. */
static int
-add_equal_note (rtx insns, rtx target, enum rtx_code code, rtx op0, rtx op1)
+add_equal_note (rtx_insn *insns, rtx target, enum rtx_code code, rtx op0, rtx op1)
{
- rtx last_insn, set;
+ rtx_insn *last_insn;
+ rtx set;
rtx note;
gcc_assert (insns && INSN_P (insns) && NEXT_INSN (insns));
@@ -1502,8 +1503,9 @@ expand_binop_directly (enum machine_mode mode, optab binoptab,
/* If PAT is composed of more than one insn, try to add an appropriate
REG_EQUAL note to it. If we can't because TEMP conflicts with an
operand, call expand_binop again, this time without a target. */
- if (INSN_P (pat) && NEXT_INSN (pat) != NULL_RTX
- && ! add_equal_note (pat, ops[0].value, optab_to_code (binoptab),
+ if (INSN_P (pat) && NEXT_INSN (as_a <rtx_insn *> (pat)) != NULL_RTX
+ && ! add_equal_note (as_a <rtx_insn *> (pat), ops[0].value,
+ optab_to_code (binoptab),
ops[1].value, ops[2].value))
{
delete_insns_since (last);
@@ -3025,8 +3027,9 @@ expand_unop_direct (enum machine_mode mode, optab unoptab, rtx op0, rtx target,
pat = maybe_gen_insn (icode, 2, ops);
if (pat)
{
- if (INSN_P (pat) && NEXT_INSN (pat) != NULL_RTX
- && ! add_equal_note (pat, ops[0].value, optab_to_code (unoptab),
+ if (INSN_P (pat) && NEXT_INSN (as_a <rtx_insn *> (pat)) != NULL_RTX
+ && ! add_equal_note (as_a <rtx_insn *> (pat), ops[0].value,
+ optab_to_code (unoptab),
ops[1].value, NULL_RTX))
{
delete_insns_since (last);
@@ -3826,8 +3829,10 @@ maybe_emit_unop_insn (enum insn_code icode, rtx target, rtx op0,
if (!pat)
return false;
- if (INSN_P (pat) && NEXT_INSN (pat) != NULL_RTX && code != UNKNOWN)
- add_equal_note (pat, ops[0].value, code, ops[1].value, NULL_RTX);
+ if (INSN_P (pat) && NEXT_INSN (as_a <rtx_insn *> (pat)) != NULL_RTX
+ && code != UNKNOWN)
+ add_equal_note (as_a <rtx_insn *> (pat), ops[0].value, code, ops[1].value,
+ NULL_RTX);
emit_insn (pat);
@@ -3350,7 +3350,8 @@ static rtx
peep2_attempt (basic_block bb, rtx insn, int match_len, rtx attempt)
{
int i;
- rtx last, eh_note, as_note, before_try, x;
+ rtx_insn *last, *before_try, *x;
+ rtx eh_note, as_note;
rtx old_insn, new_insn;
bool was_call = false;
@@ -213,7 +213,7 @@ static rtx_insn *delete_from_delay_slot (rtx_insn *);
static void delete_scheduled_jump (rtx);
static void note_delay_statistics (int, int);
#if defined(ANNUL_IFFALSE_SLOTS) || defined(ANNUL_IFTRUE_SLOTS)
-static rtx optimize_skip (rtx);
+static rtx_insn_list *optimize_skip (rtx_insn *);
#endif
static int get_jump_flags (rtx, rtx);
static int mostly_true_jump (rtx);
@@ -765,12 +765,12 @@ note_delay_statistics (int slots_filled, int index)
This should be expanded to skip over N insns, where N is the number
of delay slots required. */
-static rtx
-optimize_skip (rtx insn)
+static rtx_insn_list *
+optimize_skip (rtx_insn *insn)
{
- rtx trial = next_nonnote_insn (insn);
- rtx next_trial = next_active_insn (trial);
- rtx delay_list = 0;
+ rtx_insn *trial = next_nonnote_insn (insn);
+ rtx_insn *next_trial = next_active_insn (trial);
+ rtx_insn_list *delay_list = 0;
int flags;
flags = get_jump_flags (insn, JUMP_LABEL (insn));
@@ -803,7 +803,7 @@ optimize_skip (rtx insn)
return 0;
}
- delay_list = add_to_delay_list (trial, NULL_RTX);
+ delay_list = add_to_delay_list (trial, NULL);
next_trial = next_active_insn (trial);
update_block (trial, trial);
delete_related_insns (trial);
@@ -79,10 +79,10 @@ static HARD_REG_SET pending_dead_regs;
static void update_live_status (rtx, const_rtx, void *);
static int find_basic_block (rtx, int);
-static rtx next_insn_no_annul (rtx);
-static rtx find_dead_or_set_registers (rtx, struct resources*,
- rtx*, int, struct resources,
- struct resources);
+static rtx_insn *next_insn_no_annul (rtx_insn *);
+static rtx_insn *find_dead_or_set_registers (rtx_insn *, struct resources*,
+ rtx_insn **, int, struct resources,
+ struct resources);
/* Utility function called from mark_target_live_regs via note_stores.
It deadens any CLOBBERed registers and livens any SET registers. */
@@ -163,8 +163,8 @@ find_basic_block (rtx insn, int search_limit)
/* Similar to next_insn, but ignores insns in the delay slots of
an annulled branch. */
-static rtx
-next_insn_no_annul (rtx insn)
+static rtx_insn *
+next_insn_no_annul (rtx_insn *insn)
{
if (insn)
{
@@ -187,7 +187,7 @@ next_insn_no_annul (rtx insn)
insn = NEXT_INSN (insn);
if (insn && NONJUMP_INSN_P (insn)
&& GET_CODE (PATTERN (insn)) == SEQUENCE)
- insn = XVECEXP (PATTERN (insn), 0, 0);
+ insn = as_a <rtx_sequence *> (PATTERN (insn))->insn (0);
}
return insn;
@@ -308,7 +308,7 @@ mark_referenced_resources (rtx x, struct resources *res,
However, we may have moved some of the parameter loading insns
into the delay slot of this CALL. If so, the USE's for them
don't count and should be skipped. */
- rtx_insn *insn = PREV_INSN (x);
+ rtx_insn *insn = PREV_INSN (as_a <rtx_insn *> (x));
rtx_sequence *sequence = 0;
int seq_size = 0;
int i;
@@ -420,19 +420,19 @@ mark_referenced_resources (rtx x, struct resources *res,
Stop after passing a few conditional jumps, and/or a small
number of unconditional branches. */
-static rtx
-find_dead_or_set_registers (rtx target, struct resources *res,
- rtx *jump_target, int jump_count,
+static rtx_insn *
+find_dead_or_set_registers (rtx_insn *target, struct resources *res,
+ rtx_insn **jump_target, int jump_count,
struct resources set, struct resources needed)
{
HARD_REG_SET scratch;
- rtx insn, next;
- rtx jump_insn = 0;
+ rtx_insn *insn, *next;
+ rtx_insn *jump_insn = 0;
int i;
for (insn = target; insn; insn = next)
{
- rtx this_jump_insn = insn;
+ rtx_insn *this_jump_insn = insn;
next = NEXT_INSN (insn);
@@ -480,7 +480,7 @@ find_dead_or_set_registers (rtx target, struct resources *res,
of a call, so search for a JUMP_INSN in any position. */
for (i = 0; i < seq->len (); i++)
{
- this_jump_insn = seq->element (i);
+ this_jump_insn = seq->insn (i);
if (JUMP_P (this_jump_insn))
break;
}
@@ -497,14 +497,14 @@ find_dead_or_set_registers (rtx target, struct resources *res,
if (any_uncondjump_p (this_jump_insn)
|| ANY_RETURN_P (PATTERN (this_jump_insn)))
{
- next = JUMP_LABEL (this_jump_insn);
+ next = JUMP_LABEL_AS_INSN (this_jump_insn);
if (ANY_RETURN_P (next))
- next = NULL_RTX;
+ next = NULL;
if (jump_insn == 0)
{
jump_insn = insn;
if (jump_target)
- *jump_target = JUMP_LABEL (this_jump_insn);
+ *jump_target = JUMP_LABEL_AS_INSN (this_jump_insn);
}
}
else if (any_condjump_p (this_jump_insn))
@@ -569,7 +569,7 @@ find_dead_or_set_registers (rtx target, struct resources *res,
AND_COMPL_HARD_REG_SET (fallthrough_res.regs, scratch);
if (!ANY_RETURN_P (JUMP_LABEL (this_jump_insn)))
- find_dead_or_set_registers (JUMP_LABEL (this_jump_insn),
+ find_dead_or_set_registers (JUMP_LABEL_AS_INSN (this_jump_insn),
&target_res, 0, jump_count,
target_set, needed);
find_dead_or_set_registers (next,
@@ -880,14 +880,14 @@ return_insn_p (const_rtx insn)
init_resource_info () was invoked before we are called. */
void
-mark_target_live_regs (rtx insns, rtx target, struct resources *res)
+mark_target_live_regs (rtx_insn *insns, rtx_insn *target, struct resources *res)
{
int b = -1;
unsigned int i;
struct target_info *tinfo = NULL;
- rtx insn;
+ rtx_insn *insn;
rtx jump_insn = 0;
- rtx jump_target;
+ rtx_insn *jump_target;
HARD_REG_SET scratch;
struct resources set, needed;
@@ -965,7 +965,7 @@ mark_target_live_regs (rtx insns, rtx target, struct resources *res)
if (b != -1)
{
regset regs_live = DF_LR_IN (BASIC_BLOCK_FOR_FN (cfun, b));
- rtx start_insn, stop_insn;
+ rtx_insn *start_insn, *stop_insn;
/* Compute hard regs live at start of block. */
REG_SET_TO_HARD_REG_SET (current_live_regs, regs_live);
@@ -978,7 +978,7 @@ mark_target_live_regs (rtx insns, rtx target, struct resources *res)
if (NONJUMP_INSN_P (start_insn)
&& GET_CODE (PATTERN (start_insn)) == SEQUENCE)
- start_insn = XVECEXP (PATTERN (start_insn), 0, 0);
+ start_insn = as_a <rtx_sequence *> (PATTERN (start_insn))->insn (0);
if (NONJUMP_INSN_P (stop_insn)
&& GET_CODE (PATTERN (stop_insn)) == SEQUENCE)
@@ -1122,7 +1122,7 @@ mark_target_live_regs (rtx insns, rtx target, struct resources *res)
if (jump_insn)
{
struct resources new_resources;
- rtx stop_insn = next_active_insn (jump_insn);
+ rtx_insn *stop_insn = next_active_insn (jump_insn);
if (!ANY_RETURN_P (jump_target))
jump_target = next_active_insn (jump_target);
@@ -44,7 +44,7 @@ enum mark_resource_type
MARK_SRC_DEST_CALL = 1
};
-extern void mark_target_live_regs (rtx, rtx, struct resources *);
+extern void mark_target_live_regs (rtx_insn *, rtx_insn *, struct resources *);
extern void mark_set_resources (rtx, struct resources *, int,
enum mark_resource_type);
extern void mark_referenced_resources (rtx, struct resources *, bool);
@@ -2782,8 +2782,8 @@ extern bool can_throw_external (const_rtx);
extern bool insn_could_throw_p (const_rtx);
extern bool insn_nothrow_p (const_rtx);
extern bool can_nonlocal_goto (const_rtx);
-extern void copy_reg_eh_region_note_forward (rtx, rtx, rtx);
-extern void copy_reg_eh_region_note_backward (rtx, rtx, rtx);
+extern void copy_reg_eh_region_note_forward (rtx, rtx_insn *, rtx);
+extern void copy_reg_eh_region_note_backward (rtx, rtx_insn *, rtx);
extern int inequality_comparisons_p (const_rtx);
extern rtx replace_rtx (rtx, rtx, rtx);
extern int replace_label (rtx *, void *);
@@ -3268,7 +3268,7 @@ extern void pop_topmost_sequence (void);
extern void set_new_first_and_last_insn (rtx_insn *, rtx_insn *);
extern unsigned int unshare_all_rtl (void);
extern void unshare_all_rtl_again (rtx_insn *);
-extern void unshare_all_rtl_in_chain (rtx);
+extern void unshare_all_rtl_in_chain (rtx_insn *);
extern void verify_rtl_sharing (void);
extern void add_insn (rtx_insn *);
extern void add_insn_before (rtx, rtx, basic_block);
@@ -3327,7 +3327,8 @@ extern void print_inline_rtx (FILE *, const_rtx, int);
by the scheduler anymore but for all "slim" RTL dumping. */
extern void dump_value_slim (FILE *, const_rtx, int);
extern void dump_insn_slim (FILE *, const_rtx);
-extern void dump_rtl_slim (FILE *, const_rtx, const_rtx, int, int);
+extern void dump_rtl_slim (FILE *, const rtx_insn *, const rtx_insn *,
+ int, int);
extern void print_value (pretty_printer *, const_rtx, int);
extern void print_pattern (pretty_printer *, const_rtx, int);
extern void print_insn (pretty_printer *, const_rtx, int);
@@ -3840,10 +3840,10 @@ delete_dep_nodes_in_back_deps (rtx insn, bool resolved_p)
/* Delete (RESOLVED_P) dependencies between HEAD and TAIL together with
deps_lists. */
void
-sched_free_deps (rtx head, rtx tail, bool resolved_p)
+sched_free_deps (rtx_insn *head, rtx_insn *tail, bool resolved_p)
{
- rtx insn;
- rtx next_tail = NEXT_INSN (tail);
+ rtx_insn *insn;
+ rtx_insn *next_tail = NEXT_INSN (tail);
/* We make two passes since some insns may be scheduled before their
dependencies are resolved. */
@@ -116,8 +116,8 @@ static void
init_ready_list (void)
{
int n = 0;
- rtx prev_head = current_sched_info->prev_head;
- rtx next_tail = current_sched_info->next_tail;
+ rtx_insn *prev_head = current_sched_info->prev_head;
+ rtx_insn *next_tail = current_sched_info->next_tail;
rtx_insn *insn;
sched_rgn_n_insns = 0;
@@ -189,7 +189,7 @@ begin_move_insn (rtx_insn *insn, rtx_insn *last)
else
{
/* Create an empty unreachable block after the INSN. */
- rtx next = NEXT_INSN (insn);
+ rtx_insn *next = NEXT_INSN (insn);
if (next && BARRIER_P (next))
next = NEXT_INSN (next);
bb = create_basic_block (next, NULL_RTX, last_bb);
@@ -1328,7 +1328,7 @@ extern void deps_start_bb (struct deps_desc *, rtx);
extern enum reg_note ds_to_dt (ds_t);
extern bool deps_pools_are_empty_p (void);
-extern void sched_free_deps (rtx, rtx, bool);
+extern void sched_free_deps (rtx_insn *, rtx_insn *, bool);
extern void extend_dependency_caches (int, bool);
extern void debug_ds (ds_t);
@@ -1342,14 +1342,14 @@ extern void free_global_sched_pressure_data (void);
extern int haifa_classify_insn (const_rtx);
extern void get_ebb_head_tail (basic_block, basic_block,
rtx_insn **, rtx_insn **);
-extern int no_real_insns_p (const_rtx, const_rtx);
+extern int no_real_insns_p (const rtx_insn *, const rtx_insn *);
extern int insn_cost (rtx_insn *);
extern int dep_cost_1 (dep_t, dw_t);
extern int dep_cost (dep_t);
extern int set_priorities (rtx_insn *, rtx_insn *);
-extern void sched_setup_bb_reg_pressure_info (basic_block, rtx);
+extern void sched_setup_bb_reg_pressure_info (basic_block, rtx_insn *);
extern bool schedule_block (basic_block *, state_t);
extern int cycle_issued_insns;
@@ -2107,8 +2107,8 @@ schedule_more_p (void)
static void
init_ready_list (void)
{
- rtx prev_head = current_sched_info->prev_head;
- rtx next_tail = current_sched_info->next_tail;
+ rtx_insn *prev_head = current_sched_info->prev_head;
+ rtx_insn *next_tail = current_sched_info->next_tail;
int bb_src;
rtx_insn *insn;
@@ -807,14 +807,14 @@ dump_insn_slim (FILE *f, const_rtx x)
If COUNT < 0 it will stop only at LAST or NULL rtx. */
void
-dump_rtl_slim (FILE *f, const_rtx first, const_rtx last,
+dump_rtl_slim (FILE *f, const rtx_insn *first, const rtx_insn *last,
int count, int flags ATTRIBUTE_UNUSED)
{
- const_rtx insn, tail;
+ const rtx_insn *insn, *tail;
pretty_printer rtl_slim_pp;
rtl_slim_pp.buffer->stream = f;
- tail = last ? NEXT_INSN (last) : NULL_RTX;
+ tail = last ? NEXT_INSN (last) : NULL;
for (insn = first;
(insn != NULL) && (insn != tail) && (count != 0);
insn = NEXT_INSN (insn))
@@ -833,7 +833,7 @@ dump_rtl_slim (FILE *f, const_rtx first, const_rtx last,
void
rtl_dump_bb_for_graph (pretty_printer *pp, basic_block bb)
{
- rtx insn;
+ rtx_insn *insn;
bool first = true;
/* TODO: inter-bb stuff. */
@@ -873,9 +873,11 @@ debug_insn_slim (const_rtx x)
}
/* Same as above, but using dump_rtl_slim. */
-extern void debug_rtl_slim (FILE *, const_rtx, const_rtx, int, int);
+extern void debug_rtl_slim (FILE *, const rtx_insn *, const rtx_insn *,
+ int, int);
DEBUG_FUNCTION void
-debug_rtl_slim (const_rtx first, const_rtx last, int count, int flags)
+debug_rtl_slim (const rtx_insn *first, const rtx_insn *last, int count,
+ int flags)
{
dump_rtl_slim (stderr, first, last, count, flags);
}
@@ -428,13 +428,13 @@ dup_block_and_redirect (basic_block bb, basic_block copy_bb, rtx_insn *before,
void
try_shrink_wrapping (edge *entry_edge, edge orig_entry_edge,
- bitmap_head *bb_flags, rtx prologue_seq)
+ bitmap_head *bb_flags, rtx_insn *prologue_seq)
{
edge e;
edge_iterator ei;
bool nonempty_prologue = false;
unsigned max_grow_size;
- rtx seq;
+ rtx_insn *seq;
for (seq = prologue_seq; seq; seq = NEXT_INSN (seq))
if (!NOTE_P (seq) || NOTE_KIND (seq) != NOTE_INSN_PROLOGUE_END)
@@ -449,7 +449,7 @@ try_shrink_wrapping (edge *entry_edge, edge orig_entry_edge,
{
HARD_REG_SET prologue_clobbered, prologue_used, live_on_edge;
struct hard_reg_set_container set_up_by_prologue;
- rtx p_insn;
+ rtx_insn *p_insn;
vec<basic_block> vec;
basic_block bb;
bitmap_head bb_antic_flags;
@@ -831,7 +831,7 @@ get_unconverted_simple_return (edge exit_fallthru_edge, bitmap_head bb_flags,
void
convert_to_simple_return (edge entry_edge, edge orig_entry_edge,
- bitmap_head bb_flags, rtx returnjump,
+ bitmap_head bb_flags, rtx_insn *returnjump,
vec<edge> unconverted_simple_returns)
{
edge e;
@@ -40,11 +40,12 @@ extern void dup_block_and_redirect (basic_block bb, basic_block copy_bb,
rtx_insn *before,
bitmap_head *need_prologue);
extern void try_shrink_wrapping (edge *entry_edge, edge orig_entry_edge,
- bitmap_head *bb_flags, rtx prologue_seq);
+ bitmap_head *bb_flags, rtx_insn *prologue_seq);
extern edge get_unconverted_simple_return (edge, bitmap_head,
vec<edge> *, rtx_insn **);
extern void convert_to_simple_return (edge entry_edge, edge orig_entry_edge,
- bitmap_head bb_flags, rtx returnjump,
+ bitmap_head bb_flags,
+ rtx_insn *returnjump,
vec<edge> unconverted_simple_returns);
#endif
@@ -177,7 +177,7 @@ propagate_for_debug_subst (rtx from, const_rtx old_rtx, void *data)
of THIS_BASIC_BLOCK. */
void
-propagate_for_debug (rtx_insn *insn, rtx last, rtx dest, rtx src,
+propagate_for_debug (rtx_insn *insn, rtx_insn *last, rtx dest, rtx src,
basic_block this_basic_block)
{
rtx_insn *next, *end = NEXT_INSN (BB_END (this_basic_block));
@@ -149,7 +149,7 @@ extern int dead_debug_insert_temp (struct dead_debug_local *,
unsigned int uregno, rtx insn,
enum debug_temp_where);
-extern void propagate_for_debug (rtx_insn *, rtx, rtx, rtx, basic_block);
+extern void propagate_for_debug (rtx_insn *, rtx_insn *, rtx, rtx, basic_block);
#endif /* GCC_VALTRACK_H */