===================================================================
@@ -2522,8 +2522,7 @@ scan_insn (bb_info_t bb_info, rtx insn)
/* Cselib clears the table for this case, so we have to essentially
do the same. */
if (NONJUMP_INSN_P (insn)
- && GET_CODE (PATTERN (insn)) == ASM_OPERANDS
- && MEM_VOLATILE_P (PATTERN (insn)))
+ && volatile_insn_p (PATTERN (insn)))
{
add_wild_read (bb_info);
insn_info->cannot_delete = true;
===================================================================
@@ -2083,8 +2083,8 @@ remove_node_from_expr_list (const_rtx no
/* Nonzero if X contains any volatile instructions. These are instructions
which may cause unpredictable machine state instructions, and thus no
- instructions should be moved or combined across them. This includes
- only volatile asms and UNSPEC_VOLATILE instructions. */
+ instructions or register uses should be moved or combined across them.
+ This includes only volatile asms and UNSPEC_VOLATILE instructions. */
int
volatile_insn_p (const_rtx x)
===================================================================
@@ -963,7 +963,8 @@ expand_builtin_setjmp_receiver (rtx rece
/* We must not allow the code we just generated to be reordered by
scheduling. Specifically, the update of the frame pointer must
- happen immediately, not later. */
+ happen immediately, not later. Similarly, we must block
+ (frame-related) register values to be used across this code. */
emit_insn (gen_blockage ());
}
===================================================================
@@ -2607,13 +2607,12 @@ cselib_process_insn (rtx insn)
cselib_current_insn = insn;
- /* Forget everything at a CODE_LABEL, a volatile asm, or a setjmp. */
+ /* Forget everything at a CODE_LABEL, a volatile insn, or a setjmp. */
if (LABEL_P (insn)
|| (CALL_P (insn)
&& find_reg_note (insn, REG_SETJMP, NULL))
|| (NONJUMP_INSN_P (insn)
- && GET_CODE (PATTERN (insn)) == ASM_OPERANDS
- && MEM_VOLATILE_P (PATTERN (insn))))
+ && volatile_insn_p (PATTERN (insn))))
{
cselib_reset_table (next_uid);
cselib_current_insn = NULL_RTX;
===================================================================
@@ -5660,10 +5660,9 @@ cse_insn (rtx insn)
invalidate (XEXP (dest, 0), GET_MODE (dest));
}
- /* A volatile ASM invalidates everything. */
+ /* A volatile ASM or an UNSPEC_VOLATILE invalidates everything. */
if (NONJUMP_INSN_P (insn)
- && GET_CODE (PATTERN (insn)) == ASM_OPERANDS
- && MEM_VOLATILE_P (PATTERN (insn)))
+ && volatile_insn_p (PATTERN (insn)))
flush_hash_table ();
/* Don't cse over a call to setjmp; on some machines (eg VAX)
===================================================================
@@ -364,8 +364,8 @@ get_reg_attrs (tree decl, int offset)
#if !HAVE_blockage
-/* Generate an empty ASM_INPUT, which is used to block attempts to schedule
- across this insn. */
+/* Generate an empty ASM_INPUT, which is used to block attempts to schedule,
+ and to block register equivalences to be seen across this insn. */
rtx
gen_blockage (void)
===================================================================
@@ -5832,8 +5832,9 @@ the values of operands 1 and 2.
@item @samp{blockage}
This pattern defines a pseudo insn that prevents the instruction
-scheduler from moving instructions across the boundary defined by the
-blockage insn. Normally an UNSPEC_VOLATILE pattern.
+scheduler and other passes from moving instructions and using register
+equivalences across the boundary defined by the blockage insn.
+This needs to be an UNSPEC_VOLATILE pattern or a volatile ASM.
@cindex @code{memory_barrier} instruction pattern
@item @samp{memory_barrier}