===================================================================
@@ -0,0 +1,1 @@
+BOOT_CFLAGS := -Og $(filter-out -O%, $(BOOT_CFLAGS))
===================================================================
@@ -39,6 +39,12 @@ int optimize_debug
Variable
int optimize_fast
+; True if debug bind stmts and insns are allowed to have an effect on code
+; generation and be treated for code-generation purposes in a similar way
+; to function calls.
+Variable
+bool flag_tangible_debug = false
+
; True if this is the lto front end. This is used to disable gimple
; generation and lowering passes that are normally run on the output
; of a front end. These passes must be bypassed for lto since they
===================================================================
@@ -591,12 +591,7 @@ default_options_optimization (struct gcc
{
case OPT_O:
if (*opt->arg == '\0')
- {
- opts->x_optimize = 1;
- opts->x_optimize_size = 0;
- opts->x_optimize_fast = 0;
- opts->x_optimize_debug = 0;
- }
+ opts->x_optimize = 1;
else
{
const int optimize_val = integral_argument (opt->arg);
@@ -608,11 +603,12 @@ default_options_optimization (struct gcc
opts->x_optimize = optimize_val;
if ((unsigned int) opts->x_optimize > 255)
opts->x_optimize = 255;
- opts->x_optimize_size = 0;
- opts->x_optimize_fast = 0;
- opts->x_optimize_debug = 0;
}
}
+ opts->x_optimize_size = 0;
+ opts->x_optimize_fast = 0;
+ opts->x_optimize_debug = 0;
+ opts->x_flag_tangible_debug = 0;
break;
case OPT_Os:
@@ -622,6 +618,7 @@ default_options_optimization (struct gcc
opts->x_optimize = 2;
opts->x_optimize_fast = 0;
opts->x_optimize_debug = 0;
+ opts->x_flag_tangible_debug = 0;
break;
case OPT_Ofast:
@@ -630,6 +627,7 @@ default_options_optimization (struct gcc
opts->x_optimize = 3;
opts->x_optimize_fast = 1;
opts->x_optimize_debug = 0;
+ opts->x_flag_tangible_debug = 0;
break;
case OPT_Og:
@@ -638,6 +636,7 @@ default_options_optimization (struct gcc
opts->x_optimize = 1;
opts->x_optimize_fast = 0;
opts->x_optimize_debug = 1;
+ opts->x_flag_tangible_debug = 1;
break;
case OPT_fopenacc:
===================================================================
@@ -1537,8 +1537,12 @@ process_options (void)
/* We know which debug output will be used so we can set flag_var_tracking
and flag_var_tracking_uninit if the user has not specified them. */
- if (debug_info_level < DINFO_LEVEL_NORMAL
- || debug_hooks->var_location == do_nothing_debug_hooks.var_location)
+ if ((debug_info_level < DINFO_LEVEL_NORMAL
+ || debug_hooks->var_location == do_nothing_debug_hooks.var_location)
+ /* If we're treating debug binds as "tangible", we need to generate
+ them even if we're not going to emit debug info, so that we get
+ the same code with and without debugging enabled. */
+ && !flag_tangible_debug)
{
if (flag_var_tracking == 1
|| flag_var_tracking_uninit == 1)
===================================================================
@@ -1150,6 +1150,10 @@ #define MAY_HAVE_DEBUG_MARKER_STMTS debu
/* Nonzero if gimple_debug_bind_p() (and thus
gimple_debug_source_bind_p()) may possibly hold. */
#define MAY_HAVE_DEBUG_BIND_STMTS flag_var_tracking_assignments
+/* Nonzero if in addition, such debug bind statements are shadow
+ statements. */
+#define MAY_HAVE_SHADOW_DEBUG_BIND_STMTS \
+ (MAY_HAVE_DEBUG_BIND_STMTS && !flag_tangible_debug)
/* Nonzero if is_gimple_debug() may possibly hold. */
#define MAY_HAVE_DEBUG_STMTS \
(MAY_HAVE_DEBUG_MARKER_STMTS || MAY_HAVE_DEBUG_BIND_STMTS)
===================================================================
@@ -4842,6 +4842,17 @@ gimple_debug_nonbind_marker_p (const gim
return false;
}
+/* Return true if S participates in and has an effect on code generation. */
+
+static inline bool
+tangible_stmt_p (const gimple *s)
+{
+ return (!is_gimple_debug (s)
+ || (flag_tangible_debug
+ && (s->subcode == GIMPLE_DEBUG_BIND
+ || s->subcode == GIMPLE_DEBUG_SOURCE_BIND)));
+}
+
/* Return the line number for EXPR, or return -1 if we have no line
number information for it. */
static inline int
===================================================================
@@ -20,6 +20,7 @@ Software Foundation; either version 3, o
#include "config.h"
#include "system.h"
#include "coretypes.h"
+#include "options.h"
#include "function.h"
#include "basic-block.h"
#include "tree.h"
===================================================================
@@ -846,6 +846,9 @@ #define NONDEBUG_INSN_P(X) (NONJUMP_INSN
#define MAY_HAVE_DEBUG_MARKER_INSNS debug_nonbind_markers_p
/* Nonzero if DEBUG_BIND_INSN_P may possibly hold. */
#define MAY_HAVE_DEBUG_BIND_INSNS flag_var_tracking_assignments
+/* Nonzero if in addition, such DEBUG_BIND_INSNs are shadow instructions. */
+#define MAY_HAVE_SHADOW_DEBUG_BIND_INSNS \
+ (MAY_HAVE_DEBUG_BIND_INSNS && !flag_tangible_debug)
/* Nonzero if DEBUG_INSN_P may possibly hold. */
#define MAY_HAVE_DEBUG_INSNS \
(MAY_HAVE_DEBUG_MARKER_INSNS || MAY_HAVE_DEBUG_BIND_INSNS)
@@ -1760,6 +1763,20 @@ #define DEBUG_IMPLICIT_PTR_DECL(RTX) XCT
/* PARM_DECL DEBUG_PARAMETER_REF references. */
#define DEBUG_PARAMETER_REF_DECL(RTX) XCTREE (RTX, 0, DEBUG_PARAMETER_REF)
+/* True if X is an insn that affects or takes part in code generation. */
+inline bool
+tangible_insn_p (const_rtx x)
+{
+ return NONDEBUG_INSN_P (x) || (DEBUG_BIND_INSN_P (x) && flag_tangible_debug);
+}
+
+/* True if X is an insn that must never affect code generation. */
+inline bool
+shadow_insn_p (const_rtx x)
+{
+ return DEBUG_INSN_P (x) && (!DEBUG_BIND_INSN_P (x) || !flag_tangible_debug);
+}
+
/* Codes that appear in the NOTE_KIND field for kinds of notes
that are not line numbers. These codes are all negative.
===================================================================
@@ -1043,6 +1043,11 @@ make_node (enum tree_code code MEM_STAT_
enum tree_code_class type = TREE_CODE_CLASS (code);
size_t length = tree_code_size (code);
+ /* We must never create DEBUG_EXPR_DECLs when debug stmts are tangible.
+ If we want to create temporaries, we should do so using executable
+ code. */
+ gcc_checking_assert (code != DEBUG_EXPR_DECL || !flag_tangible_debug);
+
record_node_allocation_statistics (code, length);
t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
===================================================================
@@ -310,6 +310,12 @@ insert_debug_temp_for_var_def (gimple_st
if (!MAY_HAVE_DEBUG_BIND_STMTS)
return;
+ /* Uses in debug stmts are first-class uses for flag_tangible_debug,
+ so the caller must take them into account in the same way as for
+ executable code. */
+ if (flag_tangible_debug)
+ return;
+
/* If this name has already been registered for replacement, do nothing
as anything that uses this name isn't in SSA form. */
if (name_registered_for_update_p (var))
===================================================================
@@ -598,7 +598,7 @@ release_ssa_name_fn (struct function *fn
int saved_ssa_name_version = SSA_NAME_VERSION (var);
use_operand_p imm = &(SSA_NAME_IMM_USE_NODE (var));
- if (MAY_HAVE_DEBUG_BIND_STMTS)
+ if (MAY_HAVE_SHADOW_DEBUG_BIND_STMTS)
insert_debug_temp_for_var_def (NULL, var);
if (flag_checking)
===================================================================
@@ -387,7 +387,7 @@ has_zero_uses (const_tree var)
const ssa_use_operand_t *ptr;
for (ptr = head->next; ptr != head; ptr = ptr->next)
- if (USE_STMT (ptr) && !is_gimple_debug (USE_STMT (ptr)))
+ if (USE_STMT (ptr) && tangible_stmt_p (USE_STMT (ptr)))
return false;
return true;
@@ -402,7 +402,7 @@ has_single_use (const_tree var)
bool single = false;
for (ptr = head->next; ptr != head; ptr = ptr->next)
- if (USE_STMT(ptr) && !is_gimple_debug (USE_STMT (ptr)))
+ if (USE_STMT (ptr) && tangible_stmt_p (USE_STMT (ptr)))
{
if (single)
return false;
@@ -432,7 +432,7 @@ single_imm_use (const_tree var, use_oper
/* If there's a single use, check that it's not a debug stmt. */
if (ptr == ptr->next->next)
{
- if (USE_STMT (ptr->next) && !is_gimple_debug (USE_STMT (ptr->next)))
+ if (USE_STMT (ptr->next) && tangible_stmt_p (USE_STMT (ptr->next)))
{
*use_p = ptr->next;
*stmt = ptr->next->loc.stmt;
@@ -453,7 +453,7 @@ num_imm_uses (const_tree var)
const ssa_use_operand_t *ptr;
unsigned int num = 0;
- if (!MAY_HAVE_DEBUG_BIND_STMTS)
+ if (!MAY_HAVE_SHADOW_DEBUG_BIND_STMTS)
{
for (ptr = start->next; ptr != start; ptr = ptr->next)
if (USE_STMT (ptr))
===================================================================
@@ -1330,7 +1330,7 @@ single_imm_use_1 (const ssa_use_operand_
ssa_use_operand_t *ptr, *single_use = 0;
for (ptr = head->next; ptr != head; ptr = ptr->next)
- if (USE_STMT(ptr) && !is_gimple_debug (USE_STMT (ptr)))
+ if (USE_STMT (ptr) && tangible_stmt_p (USE_STMT (ptr)))
{
if (single_use)
{
===================================================================
@@ -3500,13 +3500,14 @@ next_nonnote_insn (rtx_insn *insn)
/* Return the next insn after INSN that is not a DEBUG_INSN. This
routine does not look inside SEQUENCEs. */
+/* FIXME: Rename. */
rtx_insn *
next_nondebug_insn (rtx_insn *insn)
{
while (insn)
{
insn = NEXT_INSN (insn);
- if (insn == 0 || !DEBUG_INSN_P (insn))
+ if (insn == 0 || !shadow_insn_p (insn))
break;
}
@@ -3532,13 +3533,14 @@ prev_nonnote_insn (rtx_insn *insn)
/* Return the previous insn before INSN that is not a DEBUG_INSN.
This routine does not look inside SEQUENCEs. */
+/* FIXME: Rename. */
rtx_insn *
prev_nondebug_insn (rtx_insn *insn)
{
while (insn)
{
insn = PREV_INSN (insn);
- if (insn == 0 || !DEBUG_INSN_P (insn))
+ if (insn == 0 || !shadow_insn_p (insn))
break;
}
@@ -3548,13 +3550,14 @@ prev_nondebug_insn (rtx_insn *insn)
/* Return the next insn after INSN that is not a NOTE nor DEBUG_INSN.
This routine does not look inside SEQUENCEs. */
+/* FIXME: Rename. */
rtx_insn *
next_nonnote_nondebug_insn (rtx_insn *insn)
{
while (insn)
{
insn = NEXT_INSN (insn);
- if (insn == 0 || (!NOTE_P (insn) && !DEBUG_INSN_P (insn)))
+ if (insn == 0 || (!NOTE_P (insn) && !shadow_insn_p (insn)))
break;
}
@@ -3565,6 +3568,7 @@ next_nonnote_nondebug_insn (rtx_insn *in
but stop the search before we enter another basic block. This
routine does not look inside SEQUENCEs. */
+/* FIXME: Rename. */
rtx_insn *
next_nonnote_nondebug_insn_bb (rtx_insn *insn)
{
@@ -3573,7 +3577,7 @@ next_nonnote_nondebug_insn_bb (rtx_insn
insn = NEXT_INSN (insn);
if (insn == 0)
break;
- if (DEBUG_INSN_P (insn))
+ if (shadow_insn_p (insn))
continue;
if (!NOTE_P (insn))
break;
@@ -3587,13 +3591,14 @@ next_nonnote_nondebug_insn_bb (rtx_insn
/* Return the previous insn before INSN that is not a NOTE nor DEBUG_INSN.
This routine does not look inside SEQUENCEs. */
+/* FIXME: Rename. */
rtx_insn *
prev_nonnote_nondebug_insn (rtx_insn *insn)
{
while (insn)
{
insn = PREV_INSN (insn);
- if (insn == 0 || (!NOTE_P (insn) && !DEBUG_INSN_P (insn)))
+ if (insn == 0 || (!NOTE_P (insn) && !shadow_insn_p (insn)))
break;
}
@@ -3604,6 +3609,7 @@ prev_nonnote_nondebug_insn (rtx_insn *in
DEBUG_INSN, but stop the search before we enter another basic
block. This routine does not look inside SEQUENCEs. */
+/* FIXME: Rename. */
rtx_insn *
prev_nonnote_nondebug_insn_bb (rtx_insn *insn)
{
@@ -3612,7 +3618,7 @@ prev_nonnote_nondebug_insn_bb (rtx_insn
insn = PREV_INSN (insn);
if (insn == 0)
break;
- if (DEBUG_INSN_P (insn))
+ if (shadow_insn_p (insn))
continue;
if (!NOTE_P (insn))
break;
@@ -3661,6 +3667,7 @@ prev_real_insn (rtx_insn *insn)
or 0, if there is none. This routine does not look inside
SEQUENCEs. */
+/* FIXME: Rename. */
rtx_insn *
next_real_nondebug_insn (rtx uncast_insn)
{
@@ -3669,7 +3676,7 @@ next_real_nondebug_insn (rtx uncast_insn
while (insn)
{
insn = NEXT_INSN (insn);
- if (insn == 0 || NONDEBUG_INSN_P (insn))
+ if (insn == 0 || tangible_insn_p (insn))
break;
}
@@ -3680,13 +3687,14 @@ next_real_nondebug_insn (rtx uncast_insn
or 0, if there is none. This routine does not look inside
SEQUENCEs. */
+/* FIXME: Rename. */
rtx_insn *
prev_real_nondebug_insn (rtx_insn *insn)
{
while (insn)
{
insn = PREV_INSN (insn);
- if (insn == 0 || NONDEBUG_INSN_P (insn))
+ if (insn == 0 || tangible_insn_p (insn))
break;
}
@@ -4884,17 +4892,17 @@ emit_pattern_after_setloc (rtx pattern,
}
/* Insert PATTERN after AFTER. MAKE_RAW indicates how to turn PATTERN
- into a real insn. SKIP_DEBUG_INSNS indicates whether to insert after
- any DEBUG_INSNs. */
+ into a real insn. SKIP_SHADOW_INSNS indicates whether to insert after
+ any shadow insns. */
static rtx_insn *
-emit_pattern_after (rtx pattern, rtx_insn *after, bool skip_debug_insns,
+emit_pattern_after (rtx pattern, rtx_insn *after, bool skip_shadow_insns,
rtx_insn *(*make_raw) (rtx))
{
rtx_insn *prev = after;
- if (skip_debug_insns)
- while (DEBUG_INSN_P (prev))
+ if (skip_shadow_insns)
+ while (shadow_insn_p (prev))
prev = PREV_INSN (prev);
if (INSN_P (prev))
@@ -4997,18 +5005,18 @@ emit_pattern_before_setloc (rtx pattern,
}
/* Insert PATTERN before BEFORE. MAKE_RAW indicates how to turn PATTERN
- into a real insn. SKIP_DEBUG_INSNS indicates whether to insert
- before any DEBUG_INSNs. INSNP indicates if PATTERN is meant for an
+ into a real insn. SKIP_SHADOW_INSNS indicates whether to insert
+ before any shadow insns. INSNP indicates if PATTERN is meant for an
INSN as opposed to a JUMP_INSN, CALL_INSN, etc. */
static rtx_insn *
-emit_pattern_before (rtx pattern, rtx_insn *before, bool skip_debug_insns,
+emit_pattern_before (rtx pattern, rtx_insn *before, bool skip_shadow_insns,
bool insnp, rtx_insn *(*make_raw) (rtx))
{
rtx_insn *next = before;
- if (skip_debug_insns)
- while (DEBUG_INSN_P (next))
+ if (skip_shadow_insns)
+ while (shadow_insn_p (next))
next = PREV_INSN (next);
if (INSN_P (next))
===================================================================
@@ -4162,7 +4162,14 @@ avoid_deep_ter_for_debug (gimple *stmt,
gimple *g = get_gimple_for_ssa_name (use);
if (g == NULL)
continue;
- if (depth > 6 && !stmt_ends_bb_p (g))
+ /* Use the same depth as avoid_complex_debug_insns for
+ flag_tangible_debug, since we want to enforce it here instead.
+ Also avoid folding any reads from memory, and instead force the
+ result of the read to be available in executable code. */
+ if (flag_tangible_debug
+ && (depth >= 4 || gimple_vuse (g)))
+ bitmap_clear_bit (SA.values, SSA_NAME_VERSION (use));
+ else if (depth > 6 && !stmt_ends_bb_p (g))
{
if (deep_ter_debug_map == NULL)
deep_ter_debug_map = new hash_map<tree, tree>;
@@ -5479,9 +5486,16 @@ expand_debug_locations (void)
}
INSN_VAR_LOCATION_LOC (insn) = val;
- prev_insn = PREV_INSN (insn);
- for (insn2 = insn; insn2 != prev_insn; insn2 = PREV_INSN (insn2))
- avoid_complex_debug_insns (insn2, &INSN_VAR_LOCATION_LOC (insn2), 0);
+ /* Rely on avoid_deep_ter_for_debug for flag_tangible_debug. */
+ if (!flag_tangible_debug)
+ {
+ prev_insn = PREV_INSN (insn);
+ for (insn2 = insn; insn2 != prev_insn; insn2 = PREV_INSN (insn2))
+ {
+ rtx *loc = &INSN_VAR_LOCATION_LOC (insn2);
+ avoid_complex_debug_insns (insn2, loc, 0);
+ }
+ }
}
flag_strict_aliasing = save_strict_alias;
@@ -5683,7 +5697,7 @@ expand_gimple_basic_block (basic_block b
a_2 = ...
#DEBUG ... => #D1
*/
- if (MAY_HAVE_DEBUG_BIND_INSNS
+ if (MAY_HAVE_SHADOW_DEBUG_BIND_INSNS
&& SA.values
&& !is_gimple_debug (stmt))
{
@@ -5854,11 +5868,18 @@ expand_gimple_basic_block (basic_block b
delink_debug_stmt:
/* In order not to generate too many debug temporaries,
- we delink all uses of debug statements we already expanded.
- Therefore debug statements between definition and real
- use of TERed SSA names will continue to use the SSA name,
- and not be replaced with debug temps. */
- delink_stmt_imm_use (stmt);
+ we delink all uses of shadow debug statements we
+ already expanded. Therefore shadow debug statements
+ between the definition and real use of TERed SSA names
+ will continue to use the SSA name, and not be replaced
+ with debug temps.
+
+ Uses in tangible debug stmts are first-class uses,
+ so we shouldn't remove them prematurely. In particular,
+ we don't want an SSA name to appear to have zero uses
+ if it is actually used by tangible debug stmts. */
+ if (!flag_tangible_debug)
+ delink_stmt_imm_use (stmt);
gsi = nsi;
gsi_next (&nsi);
===================================================================
@@ -686,7 +686,7 @@ find_single_use (rtx dest, rtx_insn *ins
for (next = NEXT_INSN (insn);
next && BLOCK_FOR_INSN (next) == bb;
next = NEXT_INSN (next))
- if (NONDEBUG_INSN_P (next) && dead_or_set_p (next, dest))
+ if (tangible_insn_p (next) && dead_or_set_p (next, dest))
{
FOR_EACH_LOG_LINK (link, next)
if (link->insn == insn && link->regno == REGNO (dest))
@@ -1072,7 +1072,7 @@ create_log_links (void)
{
FOR_BB_INSNS_REVERSE (bb, insn)
{
- if (!NONDEBUG_INSN_P (insn))
+ if (!tangible_insn_p (insn))
continue;
/* Log links are created only once. */
@@ -1161,7 +1161,7 @@ combine_instructions (rtx_insn *f, unsig
int new_direct_jump_p = 0;
- for (first = f; first && !NONDEBUG_INSN_P (first); )
+ for (first = f; first && !tangible_insn_p (first); )
first = NEXT_INSN (first);
if (!first)
return 0;
@@ -1279,11 +1279,11 @@ combine_instructions (rtx_insn *f, unsig
insn = next ? next : NEXT_INSN (insn))
{
next = 0;
- if (!NONDEBUG_INSN_P (insn))
+ if (!tangible_insn_p (insn))
continue;
while (last_combined_insn
- && (!NONDEBUG_INSN_P (last_combined_insn)
+ && (!tangible_insn_p (last_combined_insn)
|| last_combined_insn->deleted ()))
last_combined_insn = PREV_INSN (last_combined_insn);
if (last_combined_insn == NULL_RTX
@@ -2336,7 +2336,12 @@ cant_combine_insn_p (rtx_insn *insn)
/* If this isn't really an insn, we can't do anything.
This can occur when flow deletes an insn that it has merged into an
- auto-increment address. */
+ auto-increment address.
+
+ Also, prevent combines into tangible debug insns for now, since we
+ don't have any equivalent of recog to keep the result simple and sane.
+ Combine would be useful on debug insns if that changed, in which case
+ the condition should be !tangible_insn_p instead. */
if (!NONDEBUG_INSN_P (insn))
return 1;
@@ -3847,7 +3852,7 @@ try_combine (rtx_insn *i3, rtx_insn *i2,
/* *SPLIT may be part of I2SRC, so make sure we have the
original expression around for later debug processing.
We should not need I2SRC any more in other cases. */
- if (MAY_HAVE_DEBUG_BIND_INSNS)
+ if (MAY_HAVE_SHADOW_DEBUG_BIND_INSNS)
i2src = copy_rtx (i2src);
else
i2src = NULL;
@@ -4221,7 +4226,7 @@ try_combine (rtx_insn *i3, rtx_insn *i2,
return 0;
}
- if (MAY_HAVE_DEBUG_BIND_INSNS)
+ if (MAY_HAVE_SHADOW_DEBUG_BIND_INSNS)
{
struct undo *undo;
@@ -4366,7 +4371,7 @@ try_combine (rtx_insn *i3, rtx_insn *i2,
for (rtx_insn *insn = NEXT_INSN (i3);
!done
&& insn
- && NONDEBUG_INSN_P (insn)
+ && tangible_insn_p (insn)
&& BLOCK_FOR_INSN (insn) == this_basic_block;
insn = NEXT_INSN (insn))
{
@@ -4512,7 +4517,7 @@ try_combine (rtx_insn *i3, rtx_insn *i2,
&& (this_basic_block->next_bb == EXIT_BLOCK_PTR_FOR_FN (cfun)
|| BB_HEAD (this_basic_block) != temp_insn);
temp_insn = NEXT_INSN (temp_insn))
- if (temp_insn != i3 && NONDEBUG_INSN_P (temp_insn))
+ if (temp_insn != i3 && tangible_insn_p (temp_insn))
FOR_EACH_LOG_LINK (link, temp_insn)
if (link->insn == i2)
link->insn = i3;
@@ -4536,7 +4541,7 @@ try_combine (rtx_insn *i3, rtx_insn *i2,
if (newi2pat)
{
- if (MAY_HAVE_DEBUG_BIND_INSNS && i2scratch)
+ if (MAY_HAVE_SHADOW_DEBUG_BIND_INSNS && i2scratch)
propagate_for_debug (i2, last_combined_insn, i2dest, i2src,
this_basic_block);
INSN_CODE (i2) = i2_code_number;
@@ -4544,7 +4549,7 @@ try_combine (rtx_insn *i3, rtx_insn *i2,
}
else
{
- if (MAY_HAVE_DEBUG_BIND_INSNS && i2src)
+ if (MAY_HAVE_SHADOW_DEBUG_BIND_INSNS && i2src)
propagate_for_debug (i2, last_combined_insn, i2dest, i2src,
this_basic_block);
SET_INSN_DELETED (i2);
@@ -4554,7 +4559,7 @@ try_combine (rtx_insn *i3, rtx_insn *i2,
{
LOG_LINKS (i1) = NULL;
REG_NOTES (i1) = 0;
- if (MAY_HAVE_DEBUG_BIND_INSNS)
+ if (MAY_HAVE_SHADOW_DEBUG_BIND_INSNS)
propagate_for_debug (i1, last_combined_insn, i1dest, i1src,
this_basic_block);
SET_INSN_DELETED (i1);
@@ -4564,7 +4569,7 @@ try_combine (rtx_insn *i3, rtx_insn *i2,
{
LOG_LINKS (i0) = NULL;
REG_NOTES (i0) = 0;
- if (MAY_HAVE_DEBUG_BIND_INSNS)
+ if (MAY_HAVE_SHADOW_DEBUG_BIND_INSNS)
propagate_for_debug (i0, last_combined_insn, i0dest, i0src,
this_basic_block);
SET_INSN_DELETED (i0);
@@ -14603,7 +14608,7 @@ distribute_notes (rtx notes, rtx_insn *f
for (tem_insn = PREV_INSN (tem_insn); place == 0; tem_insn = PREV_INSN (tem_insn))
{
- if (!NONDEBUG_INSN_P (tem_insn))
+ if (!tangible_insn_p (tem_insn))
{
if (tem_insn == BB_HEAD (bb))
break;
@@ -14805,7 +14810,7 @@ distribute_notes (rtx notes, rtx_insn *f
for (tem_insn = PREV_INSN (place); ;
tem_insn = PREV_INSN (tem_insn))
{
- if (!NONDEBUG_INSN_P (tem_insn))
+ if (!tangible_insn_p (tem_insn))
{
if (tem_insn == BB_HEAD (bb))
break;
@@ -14931,7 +14936,7 @@ distribute_links (struct insn_link *link
(insn && (this_basic_block->next_bb == EXIT_BLOCK_PTR_FOR_FN (cfun)
|| BB_HEAD (this_basic_block->next_bb) != insn));
insn = NEXT_INSN (insn))
- if (DEBUG_INSN_P (insn))
+ if (!tangible_insn_p (insn))
continue;
else if (INSN_P (insn) && reg_overlap_mentioned_p (reg, PATTERN (insn)))
{
@@ -15025,7 +15030,7 @@ make_more_copies (void)
FOR_BB_INSNS (bb, insn)
{
- if (!NONDEBUG_INSN_P (insn))
+ if (!NONJUMP_INSN_P (insn))
continue;
rtx set = single_set (insn);
===================================================================
@@ -4243,7 +4243,7 @@ try_back_substitute_reg (rtx set, rtx_in
{
prev = PREV_INSN (prev);
}
- while (prev != bb_head && (NOTE_P (prev) || DEBUG_INSN_P (prev)));
+ while (prev != bb_head && (NOTE_P (prev) || shadow_insn_p (prev)));
/* Do not swap the registers around if the previous instruction
attaches a REG_EQUIV note to REG1.
@@ -6639,7 +6639,7 @@ cse_extended_basic_block (struct cse_bas
FIXME: This is a real kludge and needs to be done some other
way. */
- if (NONDEBUG_INSN_P (insn)
+ if (tangible_insn_p (insn)
&& num_insns++ > PARAM_VALUE (PARAM_MAX_CSE_INSNS))
{
flush_hash_table ();
@@ -6916,6 +6916,8 @@ count_reg_usage (rtx x, int *counts, rtx
return;
case DEBUG_INSN:
+ if (tangible_insn_p (x))
+ count_reg_usage (INSN_VAR_LOCATION_LOC (x), counts, NULL_RTX, incr);
return;
case CALL_INSN:
@@ -7148,7 +7150,7 @@ delete_trivially_dead_insns (rtx_insn *i
timevar_push (TV_DELETE_TRIVIALLY_DEAD);
/* First count the number of times each register is used. */
- if (MAY_HAVE_DEBUG_BIND_INSNS)
+ if (MAY_HAVE_SHADOW_DEBUG_BIND_INSNS)
{
counts = XCNEWVEC (int, nreg * 3);
for (insn = insns; insn; insn = NEXT_INSN (insn))
@@ -7209,7 +7211,7 @@ delete_trivially_dead_insns (rtx_insn *i
if (! live_insn && dbg_cnt (delete_trivial_dead))
{
- if (DEBUG_INSN_P (insn))
+ if (shadow_insn_p (insn))
{
if (DEBUG_BIND_INSN_P (insn))
count_reg_usage (INSN_VAR_LOCATION_LOC (insn), counts + nreg,
@@ -7218,7 +7220,7 @@ delete_trivially_dead_insns (rtx_insn *i
else
{
rtx set;
- if (MAY_HAVE_DEBUG_BIND_INSNS
+ if (MAY_HAVE_SHADOW_DEBUG_BIND_INSNS
&& (set = single_set (insn)) != NULL_RTX
&& is_dead_reg (SET_DEST (set), counts)
/* Used at least once in some DEBUG_INSN. */
@@ -7258,7 +7260,7 @@ delete_trivially_dead_insns (rtx_insn *i
}
}
- if (MAY_HAVE_DEBUG_BIND_INSNS)
+ if (MAY_HAVE_SHADOW_DEBUG_BIND_INSNS)
{
for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
if (DEBUG_BIND_INSN_P (insn))
===================================================================
@@ -552,6 +552,7 @@ reset_unmarked_insns_debug_uses (void)
{
df_ref use;
+ gcc_assert (shadow_insn_p (insn));
FOR_EACH_INSN_USE (use, insn)
{
struct df_link *defs;
@@ -684,7 +685,7 @@ prescan_insns_for_dce (bool fast)
FOR_EACH_BB_FN (bb, cfun)
{
FOR_BB_INSNS_REVERSE_SAFE (bb, insn, prev)
- if (NONDEBUG_INSN_P (insn))
+ if (tangible_insn_p (insn))
{
/* Don't mark argument stores now. They will be marked
if needed when the associated CALL is marked. */
@@ -737,7 +738,7 @@ mark_reg_dependencies (rtx_insn *insn)
struct df_link *defs;
df_ref use;
- if (DEBUG_INSN_P (insn))
+ if (shadow_insn_p (insn))
return;
FOR_EACH_INSN_USE (use, insn)
@@ -820,7 +821,7 @@ rest_of_handle_ud_dce (void)
}
worklist.release ();
- if (MAY_HAVE_DEBUG_BIND_INSNS)
+ if (MAY_HAVE_SHADOW_DEBUG_BIND_INSNS)
reset_unmarked_insns_debug_uses ();
/* Before any insns are deleted, we must remove the chains since
@@ -919,7 +920,7 @@ word_dce_process_block (basic_block bb,
dead_debug_local_init (&debug, NULL, global_debug);
FOR_BB_INSNS_REVERSE (bb, insn)
- if (DEBUG_INSN_P (insn))
+ if (shadow_insn_p (insn))
{
df_ref use;
FOR_EACH_INSN_USE (use, insn)
@@ -1020,7 +1021,7 @@ dce_process_block (basic_block bb, bool
dead_debug_local_init (&debug, NULL, global_debug);
FOR_BB_INSNS_REVERSE (bb, insn)
- if (DEBUG_INSN_P (insn))
+ if (shadow_insn_p (insn))
{
df_ref use;
FOR_EACH_INSN_USE (use, insn)
===================================================================
@@ -835,7 +835,7 @@ df_lr_bb_local_compute (unsigned int bb_
FOR_BB_INSNS_REVERSE (bb, insn)
{
- if (!NONDEBUG_INSN_P (insn))
+ if (!tangible_insn_p (insn))
continue;
df_insn_info *insn_info = DF_INSN_INFO_GET (insn);
@@ -2863,7 +2863,7 @@ df_word_lr_bb_local_compute (unsigned in
FOR_BB_INSNS_REVERSE (bb, insn)
{
- if (!NONDEBUG_INSN_P (insn))
+ if (!tangible_insn_p (insn))
continue;
df_insn_info *insn_info = DF_INSN_INFO_GET (insn);
@@ -3245,7 +3245,7 @@ df_remove_dead_eq_notes (rtx_insn *insn,
static inline void
df_set_note (enum reg_note note_type, rtx_insn *insn, rtx reg)
{
- gcc_checking_assert (!DEBUG_INSN_P (insn));
+ gcc_checking_assert (tangible_insn_p (insn));
add_reg_note (insn, note_type, reg);
}
@@ -3502,9 +3502,9 @@ df_note_bb_compute (unsigned int bb_inde
df_insn_info *insn_info = DF_INSN_INFO_GET (insn);
df_mw_hardreg *mw;
- int debug_insn;
+ int shadow_insn;
- debug_insn = DEBUG_INSN_P (insn);
+ shadow_insn = shadow_insn_p (insn);
bitmap_clear (do_not_gen);
df_remove_dead_and_unused_notes (insn);
@@ -3570,21 +3570,21 @@ df_note_bb_compute (unsigned int bb_inde
if (DF_MWS_REG_USE_P (mw)
&& !df_ignore_stack_reg (mw->start_regno))
{
- bool really_add_notes = debug_insn != 0;
+ bool really_add_notes = shadow_insn != 0;
df_set_dead_notes_for_mw (insn, mw, live, do_not_gen,
artificial_uses,
&really_add_notes);
if (really_add_notes)
- debug_insn = -1;
+ shadow_insn = -1;
}
FOR_EACH_INSN_INFO_USE (use, insn_info)
{
unsigned int uregno = DF_REF_REGNO (use);
- if (REG_DEAD_DEBUGGING && dump_file && !debug_insn)
+ if (REG_DEAD_DEBUGGING && dump_file && !shadow_insn)
{
fprintf (dump_file, " regular looking at use ");
df_ref_debug (use, dump_file);
@@ -3592,9 +3592,9 @@ df_note_bb_compute (unsigned int bb_inde
if (!bitmap_bit_p (live, uregno))
{
- if (debug_insn)
+ if (shadow_insn)
{
- if (debug_insn > 0)
+ if (shadow_insn > 0)
{
/* We won't add REG_UNUSED or REG_DEAD notes for
these, so we don't have to mess with them in
@@ -3630,7 +3630,7 @@ df_note_bb_compute (unsigned int bb_inde
df_remove_dead_eq_notes (insn, live);
- if (debug_insn == -1)
+ if (shadow_insn == -1)
{
/* ??? We could probably do better here, replacing dead
registers with their definitions. */
@@ -3808,7 +3808,7 @@ df_simulate_uses (rtx_insn *insn, bitmap
{
df_ref use;
- if (DEBUG_INSN_P (insn))
+ if (!tangible_insn_p (insn))
return;
FOR_EACH_INSN_USE (use, insn)
@@ -3867,7 +3867,7 @@ df_simulate_initialize_backwards (basic_
void
df_simulate_one_insn_backwards (basic_block bb, rtx_insn *insn, bitmap live)
{
- if (!NONDEBUG_INSN_P (insn))
+ if (!tangible_insn_p (insn))
return;
df_simulate_defs (insn, live);
@@ -4068,9 +4068,9 @@ can_move_insns_across (rtx_insn *from, r
*pmove_upto = NULL;
/* Find real bounds, ignoring debug insns. */
- while (!NONDEBUG_INSN_P (from) && from != to)
+ while (!tangible_insn_p (from) && from != to)
from = NEXT_INSN (from);
- while (!NONDEBUG_INSN_P (to) && from != to)
+ while (!tangible_insn_p (to) && from != to)
to = PREV_INSN (to);
for (insn = across_to; ; insn = next)
@@ -4088,7 +4088,7 @@ can_move_insns_across (rtx_insn *from, r
mem_sets_in_across |= MEMREF_VOLATILE;
}
}
- if (NONDEBUG_INSN_P (insn))
+ if (tangible_insn_p (insn))
{
if (volatile_insn_p (PATTERN (insn)))
return false;
@@ -4125,7 +4125,7 @@ can_move_insns_across (rtx_insn *from, r
df_simulate_initialize_backwards (merge_bb, test_use);
for (insn = across_to; ; insn = next)
{
- if (NONDEBUG_INSN_P (insn))
+ if (tangible_insn_p (insn))
{
df_simulate_find_defs (insn, test_set);
df_simulate_defs (insn, test_use);
@@ -4147,7 +4147,7 @@ can_move_insns_across (rtx_insn *from, r
break;
if (NOTE_P (insn) && NOTE_KIND (insn) == NOTE_INSN_EPILOGUE_BEG)
break;
- if (NONDEBUG_INSN_P (insn))
+ if (tangible_insn_p (insn))
{
if (may_trap_or_fault_p (PATTERN (insn))
&& (trapping_insns_in_across
@@ -4230,7 +4230,7 @@ can_move_insns_across (rtx_insn *from, r
bitmap_and_into (local_merge_live, merge_set);
for (;;)
{
- if (NONDEBUG_INSN_P (insn))
+ if (tangible_insn_p (insn))
{
if (!bitmap_intersect_p (test_set, local_merge_live)
&& (!HAVE_cc0 || !sets_cc0_p (insn)))
===================================================================
@@ -681,7 +681,7 @@ df_install_ref_incremental (df_ref ref)
/* By adding the ref directly, df_insn_rescan my not find any
differences even though the block will have changed. So we need
to mark the block dirty ourselves. */
- if (!DEBUG_INSN_P (DF_REF_INSN (ref)))
+ if (tangible_insn_p (DF_REF_INSN (ref)))
df_set_bb_dirty (bb);
}
@@ -956,7 +956,7 @@ df_insn_delete (rtx_insn *insn)
rescanning time and the mark would blow up.
DEBUG_INSNs do not make a block's data flow solution dirty (at
worst the LUIDs are no longer contiguous). */
- if (bb != NULL && NONDEBUG_INSN_P (insn))
+ if (bb != NULL && tangible_insn_p (insn))
df_set_bb_dirty (bb);
/* The client has deferred rescanning. */
@@ -1090,7 +1090,7 @@ df_insn_rescan (rtx_insn *insn)
}
df_refs_add_to_chains (&collection_rec, bb, insn, copy_all);
- if (!DEBUG_INSN_P (insn))
+ if (tangible_insn_p (insn))
df_set_bb_dirty (bb);
return true;
===================================================================
@@ -2415,7 +2415,8 @@ scan_insn (bb_info_t bb_info, rtx_insn *
if (DEBUG_INSN_P (insn))
{
insn_info->cannot_delete = true;
- return;
+ if (shadow_insn_p (insn))
+ return;
}
/* Look at all of the uses in the insn. */
===================================================================
@@ -68,7 +68,7 @@ initialize_uninitialized_regs (void)
FOR_BB_INSNS (bb, insn)
{
df_ref use;
- if (!NONDEBUG_INSN_P (insn))
+ if (!tangible_insn_p (insn))
continue;
FOR_EACH_INSN_USE (use, insn)
===================================================================
@@ -1919,7 +1919,7 @@ create_bb_allocnos (ira_loop_tree_node_t
curr_bb = bb = bb_node->bb;
ira_assert (bb != NULL);
FOR_BB_INSNS_REVERSE (bb, insn)
- if (NONDEBUG_INSN_P (insn))
+ if (tangible_insn_p (insn))
create_insn_allocnos (PATTERN (insn), NULL, false);
/* It might be a allocno living through from one subloop to
another. */
===================================================================
@@ -427,7 +427,7 @@ add_copies (ira_loop_tree_node_t loop_tr
if (bb == NULL)
return;
FOR_BB_INSNS (bb, insn)
- if (NONDEBUG_INSN_P (insn))
+ if (tangible_insn_p (insn))
add_insn_allocno_copies (insn);
}
===================================================================
@@ -1025,7 +1025,7 @@ find_call_crossed_cheap_reg (rtx_insn *i
while (prev && !(INSN_P (prev)
&& BLOCK_FOR_INSN (prev) != bb))
{
- if (NONDEBUG_INSN_P (prev))
+ if (tangible_insn_p (prev))
{
rtx set = single_set (prev);
@@ -1178,7 +1178,7 @@ process_bb_node_lives (ira_loop_tree_nod
df_ref def, use;
bool call_p;
- if (!NONDEBUG_INSN_P (insn))
+ if (!tangible_insn_p (insn))
continue;
if (internal_flag_ira_verbose > 2 && ira_dump_file != NULL)
===================================================================
@@ -3262,7 +3262,7 @@ memref_used_between_p (rtx memref, rtx_i
insn && insn != NEXT_INSN (end);
insn = NEXT_INSN (insn))
{
- if (!NONDEBUG_INSN_P (insn))
+ if (!tangible_insn_p (insn))
continue;
if (memref_referenced_p (memref, PATTERN (insn), false))
@@ -3378,7 +3378,7 @@ def_dominates_uses (int regno)
if (use_info)
{
rtx_insn *use_insn = DF_REF_INSN (use);
- if (!DEBUG_INSN_P (use_insn))
+ if (tangible_insn_p (use_insn))
{
basic_block use_bb = BLOCK_FOR_INSN (use_insn);
if (use_bb != def_bb
@@ -3413,7 +3413,7 @@ update_equiv_regs (void)
prevent access beyond allocated memory for paradoxical memory subreg. */
FOR_EACH_BB_FN (bb, cfun)
FOR_BB_INSNS (bb, insn)
- if (NONDEBUG_INSN_P (insn))
+ if (tangible_insn_p (insn))
set_paradoxical_subreg (insn);
/* Scan the insns and find which registers have equivalences. Do this
@@ -3759,7 +3759,7 @@ combine_and_move_insns (void)
use = DF_REF_NEXT_REG (use))
if (DF_REF_INSN_INFO (use))
{
- if (DEBUG_INSN_P (DF_REF_INSN (use)))
+ if (!tangible_insn_p (DF_REF_INSN (use)))
continue;
gcc_assert (!use_insn);
use_insn = DF_REF_INSN (use);
@@ -3890,7 +3890,7 @@ combine_and_move_insns (void)
}
/* Last pass - adjust debug insns referencing cleared regs. */
- if (MAY_HAVE_DEBUG_BIND_INSNS)
+ if (MAY_HAVE_SHADOW_DEBUG_BIND_INSNS)
for (rtx_insn *insn = get_insns (); insn; insn = NEXT_INSN (insn))
if (DEBUG_BIND_INSN_P (insn))
{
@@ -4288,7 +4288,7 @@ build_insn_chain (void)
bitmap_and_compl_into (live_relevant_regs, elim_regset);
bitmap_copy (&c->live_throughout, live_relevant_regs);
- if (NONDEBUG_INSN_P (insn))
+ if (tangible_insn_p (insn))
FOR_EACH_INSN_INFO_USE (use, insn_info)
{
unsigned int regno = DF_REF_REGNO (use);
@@ -4587,7 +4587,7 @@ find_moveable_pseudos (void)
bitmap_clear (used);
bitmap_clear (set);
FOR_BB_INSNS (bb, insn)
- if (NONDEBUG_INSN_P (insn))
+ if (tangible_insn_p (insn))
{
df_insn_info *insn_info = DF_INSN_INFO_GET (insn);
df_ref def, use;
@@ -4633,7 +4633,7 @@ find_moveable_pseudos (void)
rtx_insn *insn;
FOR_BB_INSNS (bb, insn)
- if (NONDEBUG_INSN_P (insn))
+ if (tangible_insn_p (insn))
{
df_insn_info *insn_info = DF_INSN_INFO_GET (insn);
rtx_insn *def_insn;
@@ -4687,7 +4687,7 @@ find_moveable_pseudos (void)
break;
}
insn = DF_REF_INSN (use);
- if (DEBUG_INSN_P (insn))
+ if (!tangible_insn_p (insn))
continue;
if (BLOCK_FOR_INSN (insn) != BLOCK_FOR_INSN (def_insn))
all_local = false;
@@ -5043,7 +5043,7 @@ split_live_ranges_for_shrink_wrap (void)
rtx_insn *uin = DF_REF_INSN (use);
next = DF_REF_NEXT_REG (use);
- if (DEBUG_INSN_P (uin))
+ if (!tangible_insn_p (uin))
continue;
basic_block ubb = BLOCK_FOR_INSN (uin);
===================================================================
@@ -5186,7 +5186,7 @@ add_next_usage_insn (int regno, rtx_insn
if (usage_insns[regno].check == curr_usage_insns_check
&& (next_usage_insns = usage_insns[regno].insns) != NULL_RTX
- && DEBUG_INSN_P (insn))
+ && shadow_insn_p (insn))
{
/* Check that we did not add the debug insn yet. */
if (next_usage_insns != insn
@@ -5195,7 +5195,7 @@ add_next_usage_insn (int regno, rtx_insn
usage_insns[regno].insns = gen_rtx_INSN_LIST (VOIDmode, insn,
next_usage_insns);
}
- else if (NONDEBUG_INSN_P (insn))
+ else if (tangible_insn_p (insn))
setup_next_usage_insn (regno, insn, reloads_num, false);
else
usage_insns[regno].check = 0;
@@ -5385,13 +5385,13 @@ inherit_reload_reg (bool def_p, int orig
if (GET_CODE (next_usage_insns) != INSN_LIST)
{
usage_insn = next_usage_insns;
- lra_assert (NONDEBUG_INSN_P (usage_insn));
+ lra_assert (tangible_insn_p (usage_insn));
next_usage_insns = NULL;
}
else
{
usage_insn = XEXP (next_usage_insns, 0);
- lra_assert (DEBUG_INSN_P (usage_insn));
+ lra_assert (shadow_insn_p (usage_insn));
next_usage_insns = XEXP (next_usage_insns, 1);
}
lra_substitute_pseudo (&usage_insn, original_regno, new_reg, false,
@@ -5727,7 +5727,7 @@ split_reg (bool before_p, int original_r
break;
}
usage_insn = XEXP (next_usage_insns, 0);
- lra_assert (DEBUG_INSN_P (usage_insn));
+ lra_assert (shadow_insn_p (usage_insn));
next_usage_insns = XEXP (next_usage_insns, 1);
lra_substitute_pseudo (&usage_insn, original_regno, new_reg, false,
true);
@@ -5740,7 +5740,7 @@ split_reg (bool before_p, int original_r
}
}
}
- lra_assert (NOTE_P (usage_insn) || NONDEBUG_INSN_P (usage_insn));
+ lra_assert (NOTE_P (usage_insn) || tangible_insn_p (usage_insn));
lra_assert (usage_insn != insn || (after_p && before_p));
lra_process_new_insns (as_a <rtx_insn *> (usage_insn),
after_p ? NULL : restore,
@@ -6052,7 +6052,7 @@ update_ebb_live_info (rtx_insn *head, rt
prev_bb = curr_bb;
bitmap_and (&live_regs, &check_only_regs, df_get_live_out (curr_bb));
}
- if (! NONDEBUG_INSN_P (curr_insn))
+ if (! tangible_insn_p (curr_insn))
continue;
curr_id = lra_get_insn_recog_data (curr_insn);
curr_static_id = curr_id->insn_static_data;
@@ -6148,7 +6148,7 @@ get_last_insertion_point (basic_block bb
rtx_insn *insn;
FOR_BB_INSNS_REVERSE (bb, insn)
- if (NONDEBUG_INSN_P (insn) || NOTE_INSN_BASIC_BLOCK_P (insn))
+ if (tangible_insn_p (insn) || NOTE_INSN_BASIC_BLOCK_P (insn))
return insn;
gcc_unreachable ();
}
@@ -6490,7 +6490,7 @@ inherit_in_ebb (rtx_insn *head, rtx_insn
if (usage_insns[src_regno].check == curr_usage_insns_check
&& (next_usage_insns
= usage_insns[src_regno].insns) != NULL_RTX
- && NONDEBUG_INSN_P (curr_insn))
+ && tangible_insn_p (curr_insn))
add_to_inherit (src_regno, next_usage_insns);
else if (usage_insns[src_regno].check
!= -(int) INSN_UID (curr_insn))
@@ -6506,7 +6506,7 @@ inherit_in_ebb (rtx_insn *head, rtx_insn
before_p = (JUMP_P (curr_insn)
|| (CALL_P (curr_insn) && reg->type == OP_IN));
- if (NONDEBUG_INSN_P (curr_insn)
+ if (tangible_insn_p (curr_insn)
&& (! JUMP_P (curr_insn) || reg->type == OP_IN)
&& split_if_necessary (src_regno, reg->biggest_mode,
potential_reload_hard_regs,
@@ -6520,7 +6520,7 @@ inherit_in_ebb (rtx_insn *head, rtx_insn
if (before_p)
use_insn = PREV_INSN (curr_insn);
}
- if (NONDEBUG_INSN_P (curr_insn))
+ if (tangible_insn_p (curr_insn))
{
if (src_regno < FIRST_PSEUDO_REGISTER)
add_to_hard_reg_set (&live_hard_regs,
@@ -6557,8 +6557,7 @@ inherit_in_ebb (rtx_insn *head, rtx_insn
setup_next_usage_insn (src_regno, curr_insn, reloads_num, false);
}
}
- if (update_reloads_num_p
- && NONDEBUG_INSN_P (curr_insn) && curr_set != NULL_RTX)
+ if (update_reloads_num_p && curr_set != NULL_RTX)
{
int regno = -1;
if ((REG_P (SET_DEST (curr_set))
@@ -6577,6 +6576,7 @@ inherit_in_ebb (rtx_insn *head, rtx_insn
reg_class_contents[cl]);
}
}
+ /* Debug insns never set registers, regardless of tangibility. */
if (NONDEBUG_INSN_P (curr_insn))
{
int regno;
@@ -6763,7 +6763,7 @@ delete_move_and_clobber (rtx_insn *insn,
lra_set_insn_deleted (insn);
lra_assert (dregno >= 0);
- if (prev_insn != NULL && NONDEBUG_INSN_P (prev_insn)
+ if (prev_insn != NULL && NONJUMP_INSN_P (prev_insn)
&& GET_CODE (PATTERN (prev_insn)) == CLOBBER
&& dregno == get_regno (XEXP (PATTERN (prev_insn), 0)))
lra_set_insn_deleted (prev_insn);
@@ -6810,7 +6810,7 @@ remove_inheritance_pseudos (bitmap remov
continue;
done_p = false;
sregno = dregno = -1;
- if (change_p && NONDEBUG_INSN_P (curr_insn)
+ if (change_p && NONJUMP_INSN_P (curr_insn)
&& (set = single_set (curr_insn)) != NULL_RTX)
{
dregno = get_regno (SET_DEST (set));
@@ -6887,7 +6887,7 @@ remove_inheritance_pseudos (bitmap remov
change the current insn onto:
original_pseudo <- inherit_or_split_pseudo2. */
for (prev_insn = PREV_INSN (curr_insn);
- prev_insn != NULL_RTX && ! NONDEBUG_INSN_P (prev_insn);
+ prev_insn != NULL_RTX && ! tangible_insn_p (prev_insn);
prev_insn = PREV_INSN (prev_insn))
;
if (prev_insn != NULL_RTX && BLOCK_FOR_INSN (prev_insn) == bb
@@ -6969,7 +6969,7 @@ remove_inheritance_pseudos (bitmap remov
kept_regs_p = true;
}
}
- if (NONDEBUG_INSN_P (curr_insn) && kept_regs_p)
+ if (tangible_insn_p (curr_insn) && kept_regs_p)
{
/* The instruction has changed since the previous
constraints pass. */
===================================================================
@@ -703,7 +703,7 @@ process_bb_lives (basic_block bb, int &c
rtx set;
struct lra_insn_reg *reg, *hr;
- if (!NONDEBUG_INSN_P (curr_insn))
+ if (!tangible_insn_p (curr_insn))
continue;
curr_id = lra_get_insn_recog_data (curr_insn);
===================================================================
@@ -444,7 +444,7 @@ create_cands (void)
/* Create candidates. */
regno_potential_cand = XCNEWVEC (struct potential_cand, max_reg_num ());
for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
- if (NONDEBUG_INSN_P (insn))
+ if (tangible_insn_p (insn))
{
lra_insn_recog_data_t id = lra_get_insn_recog_data (insn);
int keep_regno = -1;
@@ -646,7 +646,7 @@ calculate_local_reg_remat_bb_data (void)
FOR_EACH_BB_FN (bb, cfun)
FOR_BB_INSNS (bb, insn)
- if (NONDEBUG_INSN_P (insn))
+ if (tangible_insn_p (insn))
set_bb_regs (bb, insn);
}
@@ -1080,7 +1080,7 @@ do_remat (void)
bitmap_copy (active_cands, avail_cands);
FOR_BB_INSNS (bb, insn)
{
- if (!NONDEBUG_INSN_P (insn))
+ if (!tangible_insn_p (insn))
continue;
lra_insn_recog_data_t id = lra_get_insn_recog_data (insn);
===================================================================
@@ -552,7 +552,7 @@ new_insn_reg (rtx_insn *insn, int regno,
lra_insn_reg *ir = lra_insn_reg_pool.allocate ();
ir->type = type;
ir->biggest_mode = mode;
- if (NONDEBUG_INSN_P (insn)
+ if (tangible_insn_p (insn)
&& partial_subreg_p (lra_reg_info[regno].biggest_mode, mode))
lra_reg_info[regno].biggest_mode = mode;
ir->subreg_p = subreg_p;
@@ -1576,6 +1576,8 @@ add_regs_to_insn_regno_info (lra_insn_re
static int
get_insn_freq (rtx_insn *insn)
{
+ if (DEBUG_INSN_P (insn))
+ return 0;
basic_block bb = BLOCK_FOR_INSN (insn);
gcc_checking_assert (bb != NULL);
@@ -1589,19 +1591,19 @@ invalidate_insn_data_regno_info (lra_ins
int freq)
{
int uid;
- bool debug_p;
unsigned int i;
struct lra_insn_reg *ir, *next_ir;
uid = INSN_UID (insn);
- debug_p = DEBUG_INSN_P (insn);
+ bool shadow_p = shadow_insn_p (insn);
+ gcc_assert (!DEBUG_INSN_P (insn) || freq == 0);
for (ir = data->regs; ir != NULL; ir = next_ir)
{
i = ir->regno;
next_ir = ir->next;
lra_insn_reg_pool.remove (ir);
bitmap_clear_bit (&lra_reg_info[i].insn_bitmap, uid);
- if (i >= FIRST_PSEUDO_REGISTER && ! debug_p)
+ if (i >= FIRST_PSEUDO_REGISTER && ! shadow_p)
{
lra_reg_info[i].nrefs--;
lra_reg_info[i].freq -= freq;
@@ -1651,7 +1653,7 @@ lra_update_insn_regno_info (rtx_insn *in
return;
data = lra_get_insn_recog_data (insn);
static_data = data->insn_static_data;
- freq = NONDEBUG_INSN_P (insn) ? get_insn_freq (insn) : 0;
+ freq = get_insn_freq (insn);
invalidate_insn_data_regno_info (data, insn, freq);
for (i = static_data->n_operands - 1; i >= 0; i--)
add_regs_to_insn_regno_info (data, *data->operand_loc[i], insn,
@@ -1679,7 +1681,7 @@ lra_update_insn_regno_info (rtx_insn *in
add_regs_to_insn_regno_info (data, XEXP (XEXP (link, 0), 0), insn,
code == USE ? OP_IN : OP_OUT, false, 0);
}
- if (NONDEBUG_INSN_P (insn))
+ if (tangible_insn_p (insn))
setup_insn_reg_info (data, freq);
}
@@ -2306,7 +2308,7 @@ update_inc_notes (void)
FOR_EACH_BB_FN (bb, cfun)
FOR_BB_INSNS (bb, insn)
- if (NONDEBUG_INSN_P (insn))
+ if (tangible_insn_p (insn))
{
pnote = ®_NOTES (insn);
while (*pnote != 0)
===================================================================
@@ -3059,12 +3059,23 @@ convert_regs_1 (basic_block block)
{
subst_all_stack_regs_in_debug_insn (insn, ®stack);
- /* Nothing must ever die at a debug insn. If something
- is referenced in it that becomes dead, it should have
- died before and the reference in the debug insn
- should have been removed so as to avoid changing code
- generation. */
- gcc_assert (!find_reg_note (insn, REG_DEAD, NULL));
+ for (rtx note = REG_NOTES (insn); note; note = XEXP (note, 1))
+ if (REG_NOTE_KIND (note) == REG_DEAD)
+ {
+ /* Registers must never die at a shadow insn. If something
+ is referenced in it that becomes dead, it should have
+ died before and the reference in the shadow insn
+ should have been removed so as to avoid changing code
+ generation. */
+ gcc_assert (!shadow_insn_p (insn));
+ rtx reg = XEXP (note, 0);
+ if (STACK_REG_P (reg))
+ {
+ gcc_assert (TEST_HARD_REG_BIT (regstack.reg_set,
+ REGNO (reg)));
+ emit_pop_insn (insn, ®stack, reg, EMIT_AFTER);
+ }
+ }
}
}
else if (stack_regs_mentioned (insn)
===================================================================
@@ -54,7 +54,7 @@ regstat_init_n_sets_and_refs (void)
regstat_n_sets_and_refs = XNEWVEC (struct regstat_n_sets_and_refs_t, max_regno);
- if (MAY_HAVE_DEBUG_BIND_INSNS)
+ if (MAY_HAVE_SHADOW_DEBUG_BIND_INSNS)
for (i = 0; i < max_regno; i++)
{
int use_count;
@@ -141,7 +141,7 @@ regstat_bb_compute_ri (basic_block bb, b
bitmap_iterator bi;
rtx link;
- if (!NONDEBUG_INSN_P (insn))
+ if (!tangible_insn_p (insn))
continue;
link = REG_NOTES (insn);
@@ -327,7 +327,7 @@ regstat_bb_compute_calls_crossed (unsign
struct df_insn_info *insn_info = DF_INSN_INFO_GET (insn);
unsigned int regno;
- if (!NONDEBUG_INSN_P (insn))
+ if (!tangible_insn_p (insn))
continue;
/* Process the defs. */
===================================================================
@@ -1127,7 +1127,7 @@ reg_used_between_p (const_rtx reg, const
return 0;
for (insn = NEXT_INSN (from_insn); insn != to_insn; insn = NEXT_INSN (insn))
- if (NONDEBUG_INSN_P (insn)
+ if (tangible_insn_p (insn)
&& (reg_overlap_mentioned_p (reg, PATTERN (insn))
|| (CALL_P (insn) && find_reg_fusage (insn, USE, reg))))
return 1;
===================================================================
@@ -309,7 +309,7 @@ move_insn_for_shrink_wrap (basic_block b
move it as far as we can. */
do
{
- if (MAY_HAVE_DEBUG_BIND_INSNS)
+ if (MAY_HAVE_SHADOW_DEBUG_BIND_INSNS)
{
FOR_BB_INSNS_REVERSE (bb, dinsn)
if (DEBUG_BIND_INSN_P (dinsn))
@@ -456,7 +456,7 @@ prepare_shrink_wrap (basic_block entry_b
CLEAR_HARD_REG_SET (defs);
FOR_BB_INSNS_REVERSE_SAFE (entry_block, insn, curr)
- if (NONDEBUG_INSN_P (insn)
+ if (tangible_insn_p (insn)
&& !move_insn_for_shrink_wrap (entry_block, insn, uses, defs,
&split_p, &debug))
{
@@ -682,7 +682,7 @@ try_shrink_wrapping (edge *entry_edge, r
CLEAR_HARD_REG_SET (prologue_clobbered);
CLEAR_HARD_REG_SET (prologue_used);
for (rtx_insn *insn = prologue_seq; insn; insn = NEXT_INSN (insn))
- if (NONDEBUG_INSN_P (insn))
+ if (tangible_insn_p (insn))
{
HARD_REG_SET this_used;
CLEAR_HARD_REG_SET (this_used);
@@ -731,7 +731,7 @@ try_shrink_wrapping (edge *entry_edge, r
{
rtx_insn *insn;
FOR_BB_INSNS (bb, insn)
- if (NONDEBUG_INSN_P (insn)
+ if (tangible_insn_p (insn)
&& requires_stack_frame_p (insn, prologue_used,
set_up_by_prologue.set))
{
===================================================================
@@ -2796,7 +2796,7 @@ first_stmt (basic_block bb)
gimple_stmt_iterator i = gsi_start_bb (bb);
gimple *stmt = NULL;
- while (!gsi_end_p (i) && is_gimple_debug ((stmt = gsi_stmt (i))))
+ while (!gsi_end_p (i) && !tangible_stmt_p ((stmt = gsi_stmt (i))))
{
gsi_next (&i);
stmt = NULL;
@@ -2823,7 +2823,7 @@ last_stmt (basic_block bb)
gimple_stmt_iterator i = gsi_last_bb (bb);
gimple *stmt = NULL;
- while (!gsi_end_p (i) && is_gimple_debug ((stmt = gsi_stmt (i))))
+ while (!gsi_end_p (i) && !tangible_stmt_p ((stmt = gsi_stmt (i))))
{
gsi_prev (&i);
stmt = NULL;
@@ -6114,7 +6114,7 @@ gimple_empty_block_p (basic_block bb)
while (!gsi_end_p (gsi))
{
gimple *stmt = gsi_stmt (gsi);
- if (is_gimple_debug (stmt))
+ if (!tangible_stmt_p (stmt))
;
else if (gimple_code (stmt) == GIMPLE_NOP
|| gimple_code (stmt) == GIMPLE_PREDICT)
===================================================================
@@ -377,6 +377,8 @@ tree_forwarder_block_p (basic_block bb,
/* ??? For now, hope there's a corresponding debug
assignment at the destination. */
case GIMPLE_DEBUG:
+ if (tangible_stmt_p (stmt))
+ return false;
break;
default:
===================================================================
@@ -194,7 +194,7 @@ remap_ssa_name (tree name, copy_body_dat
if (n)
return unshare_expr (*n);
- if (processing_debug_stmt)
+ if (processing_debug_stmt && !flag_tangible_debug)
{
if (SSA_NAME_IS_DEFAULT_DEF (name)
&& TREE_CODE (SSA_NAME_VAR (name)) == PARM_DECL
@@ -344,6 +344,9 @@ remap_decl (tree decl, copy_body_data *i
n = id->decl_map->get (decl);
+ /* This applies even for flag_tangible_debug. If the executable code
+ doesn't do anything with the decl, we should reset the debug stmt
+ rather than make it refer to an uninitialized decl. */
if (!n && processing_debug_stmt)
{
processing_debug_stmt = -1;
@@ -2447,7 +2450,7 @@ copy_edges_for_bb (basic_block bb, profi
{
if (!gsi_end_p (si))
{
- while (!gsi_end_p (si) && is_gimple_debug (gsi_stmt (si)))
+ while (!gsi_end_p (si) && !tangible_stmt_p (gsi_stmt (si)))
gsi_next (&si);
if (gsi_end_p (si))
need_debug_cleanup = true;
===================================================================
@@ -652,7 +652,7 @@ mark_def_sites (basic_block bb, gimple *
set_register_defs (stmt, false);
set_rewrite_uses (stmt, false);
- if (is_gimple_debug (stmt))
+ if (!tangible_stmt_p (stmt))
{
FOR_EACH_SSA_USE_OPERAND (use_p, stmt, iter, SSA_OP_USE)
{
@@ -1257,6 +1257,7 @@ rewrite_debug_stmt_uses (gimple *stmt)
if (TREE_CODE (var) == PARM_DECL
&& single_succ_p (ENTRY_BLOCK_PTR_FOR_FN (cfun)))
{
+ gcc_assert (!flag_tangible_debug);
gimple_stmt_iterator gsi
=
gsi_after_labels (single_succ (ENTRY_BLOCK_PTR_FOR_FN (cfun)));
@@ -1367,7 +1368,7 @@ rewrite_stmt (gimple_stmt_iterator *si)
/* Step 1. Rewrite USES in the statement. */
if (rewrite_uses_p (stmt))
{
- if (is_gimple_debug (stmt))
+ if (!tangible_stmt_p (stmt))
rewrite_debug_stmt_uses (stmt);
else
FOR_EACH_SSA_USE_OPERAND (use_p, stmt, iter, SSA_OP_ALL_USES)
@@ -2006,7 +2007,7 @@ rewrite_update_stmt (gimple *stmt, gimpl
symbol is marked for renaming. */
if (rewrite_uses_p (stmt))
{
- if (is_gimple_debug (stmt))
+ if (!tangible_stmt_p (stmt))
{
bool failed = false;
@@ -2572,7 +2573,7 @@ mark_use_interesting (tree var, gimple *
{
set_rewrite_uses (stmt, true);
- if (is_gimple_debug (stmt))
+ if (!tangible_stmt_p (stmt))
return;
}
===================================================================
@@ -910,7 +910,7 @@ build_ssa_conflict_graph (tree_live_info
&& TREE_CODE (rhs1) == SSA_NAME)
live_track_clear_var (live, rhs1);
}
- else if (is_gimple_debug (stmt))
+ else if (!tangible_stmt_p (stmt))
continue;
/* For stmts with more than one SSA_NAME definition pretend all the
@@ -1090,9 +1090,6 @@ create_coalesce_list_for_region (var_map
{
stmt = gsi_stmt (gsi);
- if (is_gimple_debug (stmt))
- continue;
-
/* Check for copy coalesces. */
switch (gimple_code (stmt))
{
===================================================================
@@ -114,6 +114,26 @@ #define STMT_NECESSARY GF_PLF_1
/* When non-NULL holds map from basic block index into the postorder. */
static int *bb_postorder;
+/* Return true if we'll turn STMT into a debug bind if it turns out
+ to be dead. */
+
+static bool
+replace_with_debug_bind_p (gimple *stmt)
+{
+ gcc_checking_assert (MAY_HAVE_DEBUG_BIND_STMTS);
+ if (gimple_assign_single_p (stmt)
+ && is_gimple_val (gimple_assign_rhs1 (stmt)))
+ {
+ tree lhs = gimple_assign_lhs (stmt);
+ if ((VAR_P (lhs) || TREE_CODE (lhs) == PARM_DECL)
+ && !DECL_IGNORED_P (lhs)
+ && is_gimple_reg_type (TREE_TYPE (lhs))
+ && !is_global_var (lhs)
+ && !DECL_HAS_VALUE_EXPR_P (lhs))
+ return true;
+ }
+ return false;
+}
/* If STMT is not already marked necessary, mark it, and add it to the
worklist if ADD_TO_WORKLIST is true. */
@@ -136,7 +156,7 @@ mark_stmt_necessary (gimple *stmt, bool
gimple_set_plf (stmt, STMT_NECESSARY, true);
if (add_to_worklist)
worklist.safe_push (stmt);
- if (add_to_worklist && bb_contains_live_stmts && !is_gimple_debug (stmt))
+ if (add_to_worklist && bb_contains_live_stmts && tangible_stmt_p (stmt))
bitmap_set_bit (bb_contains_live_stmts, gimple_bb (stmt)->index);
}
@@ -270,7 +290,7 @@ mark_stmt_if_obviously_necessary (gimple
|| !gimple_debug_bind_p (stmt)
|| gimple_debug_bind_has_value_p (stmt)
|| TREE_CODE (gimple_debug_bind_get_var (stmt)) != DEBUG_EXPR_DECL)
- mark_stmt_necessary (stmt, false);
+ mark_stmt_necessary (stmt, tangible_stmt_p (stmt));
return;
case GIMPLE_GOTO:
@@ -311,6 +331,17 @@ mark_stmt_if_obviously_necessary (gimple
return;
}
+ /* If debug stmts are tangible, and if we'd replace the stmt with a
+ debug stmt when deleting it, make sure that its operands are
+ available even then. */
+ if (MAY_HAVE_DEBUG_BIND_STMTS
+ && flag_tangible_debug
+ && replace_with_debug_bind_p (stmt))
+ {
+ worklist.safe_push (stmt);
+ return;
+ }
+
return;
}
@@ -1072,22 +1103,12 @@ remove_dead_stmt (gimple_stmt_iterator *
/* If this is a store into a variable that is being optimized away,
add a debug bind stmt if possible. */
- if (MAY_HAVE_DEBUG_BIND_STMTS
- && gimple_assign_single_p (stmt)
- && is_gimple_val (gimple_assign_rhs1 (stmt)))
+ if (MAY_HAVE_DEBUG_BIND_STMTS && replace_with_debug_bind_p (stmt))
{
tree lhs = gimple_assign_lhs (stmt);
- if ((VAR_P (lhs) || TREE_CODE (lhs) == PARM_DECL)
- && !DECL_IGNORED_P (lhs)
- && is_gimple_reg_type (TREE_TYPE (lhs))
- && !is_global_var (lhs)
- && !DECL_HAS_VALUE_EXPR_P (lhs))
- {
- tree rhs = gimple_assign_rhs1 (stmt);
- gdebug *note
- = gimple_build_debug_bind (lhs, unshare_expr (rhs), stmt);
- gsi_insert_after (i, note, GSI_SAME_STMT);
- }
+ tree rhs = gimple_assign_rhs1 (stmt);
+ gdebug *note = gimple_build_debug_bind (lhs, unshare_expr (rhs), stmt);
+ gsi_insert_after (i, note, GSI_SAME_STMT);
}
unlink_stmt_vdef (stmt);
@@ -1299,7 +1320,7 @@ eliminate_unnecessary_stmts (void)
continue;
}
}
- if (!is_gimple_debug (stmt))
+ if (tangible_stmt_p (stmt))
something_changed = true;
remove_dead_stmt (&gsi, bb, to_remove_edges);
continue;
===================================================================
@@ -762,7 +762,7 @@ remove_unused_locals (void)
entry point marker as used, this would be a good spot to
do it. If the block is not otherwise used, the stmt will
be cleaned up in clean_unused_block_pointer. */
- if (is_gimple_debug (stmt))
+ if (!tangible_stmt_p (stmt))
continue;
if (gimple_clobber_p (stmt))
@@ -1083,9 +1083,7 @@ set_var_live_on_entry (tree ssa_name, tr
if (e->src != def_bb && region_contains_p (live->map, e->src))
add_block = e->src;
}
- else if (is_gimple_debug (use_stmt))
- continue;
- else
+ else if (tangible_stmt_p (use_stmt))
{
/* If its not defined in this block, its live on entry. */
basic_block use_bb = gimple_bb (use_stmt);
===================================================================
@@ -2701,7 +2701,7 @@ find_interesting_uses (struct ivopts_dat
for (bsi = gsi_start_phis (bb); !gsi_end_p (bsi); gsi_next (&bsi))
find_interesting_uses_stmt (data, gsi_stmt (bsi));
for (bsi = gsi_start_bb (bb); !gsi_end_p (bsi); gsi_next (&bsi))
- if (!is_gimple_debug (gsi_stmt (bsi)))
+ if (tangible_stmt_p (gsi_stmt (bsi)))
find_interesting_uses_stmt (data, gsi_stmt (bsi));
}
free (body);
@@ -7308,7 +7308,7 @@ remove_unused_ivs (struct ivopts_data *d
tree def = info->iv->ssa_name;
- if (MAY_HAVE_DEBUG_BIND_STMTS && SSA_NAME_DEF_STMT (def))
+ if (MAY_HAVE_SHADOW_DEBUG_BIND_STMTS && SSA_NAME_DEF_STMT (def))
{
imm_use_iterator imm_iter;
use_operand_p use_p;
===================================================================
@@ -409,7 +409,7 @@ find_uses_to_rename_stmt (gimple *stmt,
tree var;
basic_block bb = gimple_bb (stmt);
- if (is_gimple_debug (stmt))
+ if (!tangible_stmt_p (stmt))
return;
/* FOR_EACH_SSA_TREE_OPERAND iterator does not allows SSA_OP_VIRTUAL_USES
@@ -494,7 +494,7 @@ find_uses_to_rename_def (tree def, bitma
FOR_EACH_IMM_USE_STMT (use_stmt, imm_iter, def)
{
- if (is_gimple_debug (use_stmt))
+ if (!tangible_stmt_p (use_stmt))
continue;
basic_block use_bb = gimple_bb (use_stmt);
@@ -699,7 +699,7 @@ check_loop_closed_ssa_def (basic_block d
imm_use_iterator iterator;
FOR_EACH_IMM_USE_FAST (use_p, iterator, def)
{
- if (is_gimple_debug (USE_STMT (use_p)))
+ if (!tangible_stmt_p (USE_STMT (use_p)))
continue;
basic_block use_bb = gimple_bb (USE_STMT (use_p));
===================================================================
@@ -110,7 +110,7 @@ all_immediate_uses_same_place (def_opera
gimple *firstuse = NULL;
FOR_EACH_IMM_USE_FAST (use_p, imm_iter, var)
{
- if (is_gimple_debug (USE_STMT (use_p)))
+ if (!tangible_stmt_p (USE_STMT (use_p)))
continue;
if (firstuse == NULL)
firstuse = USE_STMT (use_p);
@@ -146,7 +146,7 @@ nearest_common_dominator_of_uses (def_op
useblock = gimple_phi_arg_edge (phi, idx)->src;
}
- else if (is_gimple_debug (usestmt))
+ else if (!tangible_stmt_p (usestmt))
{
*debug_stmts = true;
continue;
@@ -425,7 +425,7 @@ statement_sink_location (gimple *stmt, b
{
FOR_EACH_IMM_USE_FAST (one_use, imm_iter, DEF_FROM_PTR (def_p))
{
- if (is_gimple_debug (USE_STMT (one_use)))
+ if (!tangible_stmt_p (USE_STMT (one_use)))
continue;
break;
}
===================================================================
@@ -585,7 +585,7 @@ find_replaceable_in_bb (temp_expr_table
{
stmt = gsi_stmt (bsi);
- if (is_gimple_debug (stmt))
+ if (!tangible_stmt_p (stmt))
continue;
stmt_replaceable = ter_is_replaceable_p (stmt);
===================================================================
@@ -3373,7 +3373,13 @@ gfc_get_array_descr_info (const_tree typ
base_decl = GFC_TYPE_ARRAY_BASE_DECL (type, indirect);
if (!base_decl)
{
+ /* This DEBUG_EXPR_DECL is just a dummy decl that has the nice
+ property of not affecting the DECL_UIDs of real decls. It's
+ therefore OK to create it even for flag_tangible_debug. */
+ bool save_flag_tangible_debug = flag_tangible_debug;
+ flag_tangible_debug = false;
base_decl = make_node (DEBUG_EXPR_DECL);
+ flag_tangible_debug = save_flag_tangible_debug;
DECL_ARTIFICIAL (base_decl) = 1;
TREE_TYPE (base_decl) = indirect ? build_pointer_type (ptype) : ptype;
SET_DECL_MODE (base_decl, TYPE_MODE (TREE_TYPE (base_decl)));
===================================================================
@@ -0,0 +1,19 @@
+/* { dg-do run } */
+/* { dg-options "-g" } */
+
+int x = 42;
+
+void __attribute__ ((noipa))
+foo (int *ptr)
+{
+ int c = *ptr;
+ ptr[0] = 1;
+ int a = c;
+ ptr[1] = 2; /* { dg-final { gdb-test . a "42" } } */
+}
+
+int
+main (void)
+{
+ foo (&x);
+}
===================================================================
@@ -81,8 +81,22 @@ if {[check_guality "
return 0;
}
"] || 1} {
- gcc-dg-runtest [lsort [glob $srcdir/$subdir/*.c]] "" ""
- gcc-dg-runtest [lsort [glob $srcdir/c-c++-common/guality/*.c]] "" "-Wc++-compat"
+ set general [list]
+ set Og [list]
+ foreach file [lsort [glob $srcdir/c-c++-common/guality/*.c]] {
+ switch -glob -- [file tail $file] {
+ Og-* { lappend Og $file }
+ * { lappend general $file }
+ }
+ }
+
+ gcc-dg-runtest [lsort [glob $srcdir/$subdir/*.c]] "" ""
+ gcc-dg-runtest $general "" "-Wc++-compat"
+ set-torture-options \
+ [list "-O0" "-Og"] \
+ [list {}] \
+ [list "-Og -flto"]
+ gcc-dg-runtest $Og "" "-Wc++-compat"
}
if [info exists guality_gdb_name] {
===================================================================
@@ -66,8 +66,22 @@ if {[check_guality "
return 0;
}
"] || 1} {
- gcc-dg-runtest [lsort [glob $srcdir/$subdir/*.C]] "" ""
- gcc-dg-runtest [lsort [glob $srcdir/c-c++-common/guality/*.c]] "" ""
+ set general [list]
+ set Og [list]
+ foreach file [lsort [glob $srcdir/c-c++-common/guality/*.c]] {
+ switch -glob -- [file tail $file] {
+ Og-* { lappend Og $file }
+ * { lappend general $file }
+ }
+ }
+
+ gcc-dg-runtest [lsort [glob $srcdir/$subdir/*.C]] "" ""
+ gcc-dg-runtest $general "" ""
+ set-torture-options \
+ [list "-O0" "-Og"] \
+ [list {}] \
+ [list "-Og -flto"]
+ gcc-dg-runtest $Og "" ""
}
if [info exists guality_gdb_name] {