@@ -1,5 +1,66 @@
2014-11-06 David Malcolm <dmalcolm@redhat.com>
+ * tree-ssa-loop-ch.c (pass_ch::execute): Add checked cast.
+ * tree-ssa-loop-im.c (movement_possibility): Add checked casts.
+ (stmt_cost): Likewise.
+ (simple_mem_ref_in_stmt): Capture result of
+ gimple_assign_single_p as a new local gassign * "assign_stmt",
+ using it in place of "stmt" for typesafety.
+ (rewrite_reciprocal): Strengthen return type from gimple to
+ gassign *.
+ (rewrite_bittest): Likewise. Replace check against GIMPLE_ASSIGN
+ with a dyn_cast, introducing local gassign * "assign_stmt1", using
+ it in place of "stmt1" for typesafety. More tightly-scope "stmt2".
+ (invariantness_dom_walker::before_dom_children): Replace an
+ is_gimple_assign with a dyn_cast, introducing local gassign *
+ "assign_stmt" and using it in place of "stmt" for typesafety.
+ (sm_set_flag_if_changed::operator): Add a checked cast.
+ * tree-ssa-loop-ivcanon.c (tree_estimate_loop_size): Introduce
+ local gassign * "assign_stmt" via a dyn_cast, using it to replace
+ a pair of checks against GIMPLE_ASSIGN, and in place of "stmt" for
+ typesafety.
+ (propagate_into_all_uses): Replace an is_gimple_assign with a
+ dyn_cast, introducing local gassign * "use_assign", using it in
+ place of "use_stmt" for typesafety.
+ (propagate_constants_for_unrolling): Strengthen local "stmt" from
+ gimple to gassign * by replacing an is_gimple_assign with a
+ dyn_cast.
+ * tree-ssa-loop-ivopts.c (find_givs_in_stmt_scev): Replace a check
+ against GIMPLE_ASSIGN with a dyn_cast, introducing local gassign *
+ "assign_stmt", using it in place of "stmt" for typesafety.
+ (extract_cond_operands): Add a checked cast.
+ (find_interesting_uses_stmt): Replace an is_gimple_assign with a
+ dyn_cast, introducing local "assign_stmt" and using it in place of
+ "stmt" for typesafety.
+ (difference_cannot_overflow_p): Strengthen local "stmt" from
+ gimple to gassign * by replacing a check against GIMPLE_ASSIGN
+ with a dyn_cast.
+ (rewrite_use_nonlinear_expr): Introduce local gassign *
+ "use_assign" via an as_a, and use in place of use->stmt for
+ typesafety. Add an as_a within case GIMPLE_ASSIGN.
+ (adjust_iv_update_pos): Add an as_a.
+ * tree-ssa-loop-niter.c (expand_simple_operations): Replace a
+ check against GIMPLE_ASSIGN with a dyn_cast, introducing local
+ "assign_stmt", using it in place of "stmt" for typesafety.
+ (chain_of_csts_start): Likewise.
+ (get_val_for): Introduce local gassign * "assign_stmt" via an
+ as_a, using it in place of "stmt" for typesafety.
+ (derive_constant_upper_bound_assign): Strengthen param "stmt" from
+ gimple to gassign *.
+ (derive_constant_upper_bound_ops): Add an as_a.
+ (infer_loop_bounds_from_array): Replace is_gimple_assign with a
+ dyn_cast, introducing a local "assign_stmt", using it in place of
+ "stmt" for typesafety.
+ (infer_loop_bounds_from_pointer_arith): Likewise.
+ (infer_loop_bounds_from_signedness): Likewise, replacing a check
+ against GIMPLE_ASSIGN.
+ * tree-ssa-loop-prefetch.c (gather_memory_references): Introduce
+ local gassign * "assign_stmt" via an as_a, using it in place of
+ "stmt" for typesafety.
+ (mark_nontemporal_store): Add an as_a.
+
+2014-11-06 David Malcolm <dmalcolm@redhat.com>
+
* tree-ssa-math-opts.c (is_division_by): Replace is_gimple_assign
with dyn_cast, introducing local gassign * "use_assign", using it
in place of "use_stmt" for typesafety.
@@ -273,7 +273,8 @@ pass_ch::execute (function *fun)
gimple_set_no_warning (stmt, true);
else if (is_gimple_assign (stmt))
{
- enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
+ enum tree_code rhs_code =
+ gimple_assign_rhs_code (as_a <gassign *> (stmt));
if (TREE_CODE_CLASS (rhs_code) == tcc_comparison)
gimple_set_no_warning (stmt, true);
}
@@ -346,7 +346,7 @@ movement_possibility (gimple stmt)
lhs = gimple_call_lhs (stmt);
}
else if (is_gimple_assign (stmt))
- lhs = gimple_assign_lhs (stmt);
+ lhs = gimple_assign_lhs (as_a <gassign *> (stmt));
else
return MOVE_IMPOSSIBLE;
@@ -365,7 +365,7 @@ movement_possibility (gimple stmt)
&& gimple_in_transaction (stmt)
&& gimple_assign_single_p (stmt))
{
- tree rhs = gimple_assign_rhs1 (stmt);
+ tree rhs = gimple_assign_rhs1 (as_a <gassign *> (stmt));
if (DECL_P (rhs) && is_global_var (rhs))
{
if (dump_file)
@@ -503,7 +503,7 @@ stmt_cost (gimple stmt)
if (gimple_code (stmt) != GIMPLE_ASSIGN)
return 1;
- switch (gimple_assign_rhs_code (stmt))
+ switch (gimple_assign_rhs_code (as_a <gassign *> (stmt)))
{
case MULT_EXPR:
case WIDEN_MULT_EXPR:
@@ -535,7 +535,7 @@ stmt_cost (gimple stmt)
case CONSTRUCTOR:
/* Make vector construction cost proportional to the number
of elements. */
- return CONSTRUCTOR_NELTS (gimple_assign_rhs1 (stmt));
+ return CONSTRUCTOR_NELTS (gimple_assign_rhs1 (as_a <gassign *> (stmt)));
case SSA_NAME:
case PAREN_EXPR:
@@ -584,11 +584,12 @@ simple_mem_ref_in_stmt (gimple stmt, bool *is_store)
tree *lhs, *rhs;
/* Recognize SSA_NAME = MEM and MEM = (SSA_NAME | invariant) patterns. */
- if (!gimple_assign_single_p (stmt))
+ gassign *assign_stmt = gimple_assign_single_p (stmt);
+ if (!assign_stmt)
return NULL;
- lhs = gimple_assign_lhs_ptr (stmt);
- rhs = gimple_assign_rhs1_ptr (stmt);
+ lhs = gimple_assign_lhs_ptr (assign_stmt);
+ rhs = gimple_assign_rhs1_ptr (assign_stmt);
if (TREE_CODE (*lhs) == SSA_NAME && gimple_vuse (stmt))
{
@@ -883,7 +884,7 @@ nonpure_call_p (gimple stmt)
/* Rewrite a/b to a*(1/b). Return the invariant stmt to process. */
-static gimple
+static gassign *
rewrite_reciprocal (gimple_stmt_iterator *bsi)
{
gassign *stmt, *stmt1, *stmt2;
@@ -918,12 +919,11 @@ rewrite_reciprocal (gimple_stmt_iterator *bsi)
/* Check if the pattern at *BSI is a bittest of the form
(A >> B) & 1 != 0 and in this case rewrite it to A & (1 << B) != 0. */
-static gimple
+static gassign *
rewrite_bittest (gimple_stmt_iterator *bsi)
{
gassign *stmt;
gimple stmt1;
- gassign *stmt2;
gimple use_stmt;
gcond *cond_stmt;
tree lhs, name, t, a, b;
@@ -947,32 +947,34 @@ rewrite_bittest (gimple_stmt_iterator *bsi)
/* Get at the operands of the shift. The rhs is TMP1 & 1. */
stmt1 = SSA_NAME_DEF_STMT (gimple_assign_rhs1 (stmt));
- if (gimple_code (stmt1) != GIMPLE_ASSIGN)
+ gassign *assign_stmt1 = dyn_cast <gassign *> (stmt1);
+ if (!assign_stmt1)
return stmt;
/* There is a conversion in between possibly inserted by fold. */
- if (CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt1)))
+ if (CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (assign_stmt1)))
{
- t = gimple_assign_rhs1 (stmt1);
+ t = gimple_assign_rhs1 (assign_stmt1);
if (TREE_CODE (t) != SSA_NAME
|| !has_single_use (t))
return stmt;
- stmt1 = SSA_NAME_DEF_STMT (t);
- if (gimple_code (stmt1) != GIMPLE_ASSIGN)
+ assign_stmt1 = dyn_cast <gassign *> (SSA_NAME_DEF_STMT (t));
+ if (!assign_stmt1)
return stmt;
}
/* Verify that B is loop invariant but A is not. Verify that with
all the stmt walking we are still in the same loop. */
- if (gimple_assign_rhs_code (stmt1) != RSHIFT_EXPR
- || loop_containing_stmt (stmt1) != loop_containing_stmt (stmt))
+ if (gimple_assign_rhs_code (assign_stmt1) != RSHIFT_EXPR
+ || loop_containing_stmt (assign_stmt1) != loop_containing_stmt (stmt))
return stmt;
- a = gimple_assign_rhs1 (stmt1);
- b = gimple_assign_rhs2 (stmt1);
+ a = gimple_assign_rhs1 (assign_stmt1);
+ b = gimple_assign_rhs2 (assign_stmt1);
- if (outermost_invariant_loop (b, loop_containing_stmt (stmt1)) != NULL
- && outermost_invariant_loop (a, loop_containing_stmt (stmt1)) == NULL)
+ if (outermost_invariant_loop (b, loop_containing_stmt (assign_stmt1)) != NULL
+ && outermost_invariant_loop (a,
+ loop_containing_stmt (assign_stmt1)) == NULL)
{
gimple_stmt_iterator rsi;
@@ -980,12 +982,12 @@ rewrite_bittest (gimple_stmt_iterator *bsi)
t = fold_build2 (LSHIFT_EXPR, TREE_TYPE (a),
build_int_cst (TREE_TYPE (a), 1), b);
name = make_temp_ssa_name (TREE_TYPE (a), NULL, "shifttmp");
- stmt1 = gimple_build_assign (name, t);
+ gassign *stmt1 = gimple_build_assign (name, t);
/* A & (1 << B) */
t = fold_build2 (BIT_AND_EXPR, TREE_TYPE (a), a, name);
name = make_temp_ssa_name (TREE_TYPE (a), NULL, "shifttmp");
- stmt2 = gimple_build_assign (name, t);
+ gassign *stmt2 = gimple_build_assign (name, t);
/* Replace the SSA_NAME we compare against zero. Adjust
the type of zero accordingly. */
@@ -1102,34 +1104,37 @@ invariantness_dom_walker::before_dom_children (basic_block bb)
continue;
}
- if (is_gimple_assign (stmt)
- && (get_gimple_rhs_class (gimple_assign_rhs_code (stmt))
+ gassign *assign_stmt;
+ if ((assign_stmt = dyn_cast <gassign *> (stmt))
+ && (get_gimple_rhs_class (gimple_assign_rhs_code (assign_stmt))
== GIMPLE_BINARY_RHS))
{
- tree op0 = gimple_assign_rhs1 (stmt);
- tree op1 = gimple_assign_rhs2 (stmt);
+ tree op0 = gimple_assign_rhs1 (assign_stmt);
+ tree op1 = gimple_assign_rhs2 (assign_stmt);
struct loop *ol1 = outermost_invariant_loop (op1,
loop_containing_stmt (stmt));
/* If divisor is invariant, convert a/b to a*(1/b), allowing reciprocal
to be hoisted out of loop, saving expensive divide. */
if (pos == MOVE_POSSIBLE
- && gimple_assign_rhs_code (stmt) == RDIV_EXPR
+ && gimple_assign_rhs_code (assign_stmt) == RDIV_EXPR
&& flag_unsafe_math_optimizations
&& !flag_trapping_math
&& ol1 != NULL
&& outermost_invariant_loop (op0, ol1) == NULL)
- stmt = rewrite_reciprocal (&bsi);
+ assign_stmt = rewrite_reciprocal (&bsi);
/* If the shift count is invariant, convert (A >> B) & 1 to
A & (1 << B) allowing the bit mask to be hoisted out of the loop
saving an expensive shift. */
if (pos == MOVE_POSSIBLE
- && gimple_assign_rhs_code (stmt) == BIT_AND_EXPR
+ && gimple_assign_rhs_code (assign_stmt) == BIT_AND_EXPR
&& integer_onep (op1)
&& TREE_CODE (op0) == SSA_NAME
&& has_single_use (op0))
- stmt = rewrite_bittest (&bsi);
+ assign_stmt = rewrite_bittest (&bsi);
+
+ stmt = assign_stmt;
}
lim_data = init_lim_data (stmt);
@@ -1927,7 +1932,7 @@ sm_set_flag_if_changed::operator () (mem_ref_loc_p loc)
{
/* Only set the flag for writes. */
if (is_gimple_assign (loc->stmt)
- && gimple_assign_lhs_ptr (loc->stmt) == loc->ref)
+ && gimple_assign_lhs_ptr (as_a <gassign *> (loc->stmt)) == loc->ref)
{
gimple_stmt_iterator gsi = gsi_for_stmt (loc->stmt);
gassign *stmt = gimple_build_assign (flag, boolean_true_node);
@@ -261,6 +261,7 @@ tree_estimate_loop_size (struct loop *loop, edge exit, edge edge_to_cancel, stru
for (gsi = gsi_start_bb (body[i]); !gsi_end_p (gsi); gsi_next (&gsi))
{
gimple stmt = gsi_stmt (gsi);
+ gassign *assign_stmt = dyn_cast <gassign *> (stmt);
int num = estimate_num_insns (stmt, &eni_size_weights);
bool likely_eliminated = false;
bool likely_eliminated_last = false;
@@ -294,8 +295,9 @@ tree_estimate_loop_size (struct loop *loop, edge exit, edge edge_to_cancel, stru
likely_eliminated_last = true;
}
/* Sets of IV variables */
- else if (gimple_code (stmt) == GIMPLE_ASSIGN
- && constant_after_peeling (gimple_assign_lhs (stmt), stmt, loop))
+ else if (assign_stmt
+ && constant_after_peeling (gimple_assign_lhs (assign_stmt),
+ assign_stmt, loop))
{
if (dump_file && (dump_flags & TDF_DETAILS))
fprintf (dump_file, " Induction variable computation will"
@@ -303,12 +305,15 @@ tree_estimate_loop_size (struct loop *loop, edge exit, edge edge_to_cancel, stru
likely_eliminated = true;
}
/* Assignments of IV variables. */
- else if (gimple_code (stmt) == GIMPLE_ASSIGN
- && TREE_CODE (gimple_assign_lhs (stmt)) == SSA_NAME
- && constant_after_peeling (gimple_assign_rhs1 (stmt), stmt, loop)
- && (gimple_assign_rhs_class (stmt) != GIMPLE_BINARY_RHS
- || constant_after_peeling (gimple_assign_rhs2 (stmt),
- stmt, loop)))
+ else if (assign_stmt
+ && TREE_CODE (gimple_assign_lhs (assign_stmt)) == SSA_NAME
+ && constant_after_peeling (gimple_assign_rhs1 (assign_stmt),
+ assign_stmt, loop)
+ && ((gimple_assign_rhs_class (assign_stmt)
+ != GIMPLE_BINARY_RHS)
+ || constant_after_peeling (gimple_assign_rhs2 (
+ assign_stmt),
+ assign_stmt, loop)))
{
size->constant_iv = true;
if (dump_file && (dump_flags & TDF_DETAILS))
@@ -1195,15 +1200,15 @@ propagate_into_all_uses (tree ssa_name, tree val)
FOR_EACH_IMM_USE_ON_STMT (use, iter)
SET_USE (use, val);
- if (is_gimple_assign (use_stmt)
- && get_gimple_rhs_class (gimple_assign_rhs_code (use_stmt))
- == GIMPLE_SINGLE_RHS)
- {
- tree rhs = gimple_assign_rhs1 (use_stmt);
+ if (gassign *use_assign = dyn_cast <gassign *> (use_stmt))
+ if (get_gimple_rhs_class (gimple_assign_rhs_code (use_assign))
+ == GIMPLE_SINGLE_RHS)
+ {
+ tree rhs = gimple_assign_rhs1 (use_assign);
- if (TREE_CODE (rhs) == ADDR_EXPR)
- recompute_tree_invariant_for_addr_expr (rhs);
- }
+ if (TREE_CODE (rhs) == ADDR_EXPR)
+ recompute_tree_invariant_for_addr_expr (rhs);
+ }
fold_stmt_inplace (&use_stmt_gsi);
update_stmt (use_stmt);
@@ -1236,10 +1241,11 @@ propagate_constants_for_unrolling (basic_block bb)
/* Look for assignments to SSA names with constant RHS. */
for (gimple_stmt_iterator gsi = gsi_start_bb (bb); !gsi_end_p (gsi); )
{
- gimple stmt = gsi_stmt (gsi);
+ gassign *stmt;
tree lhs;
- if (is_gimple_assign (stmt)
+ stmt = dyn_cast <gassign *> (gsi_stmt (gsi));
+ if (stmt
&& gimple_assign_rhs_code (stmt) == INTEGER_CST
&& (lhs = gimple_assign_lhs (stmt), TREE_CODE (lhs) == SSA_NAME)
&& !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs))
@@ -1163,14 +1163,15 @@ find_givs_in_stmt_scev (struct ivopts_data *data, gimple stmt, affine_iv *iv)
iv->base = NULL_TREE;
iv->step = NULL_TREE;
- if (gimple_code (stmt) != GIMPLE_ASSIGN)
+ gassign *assign_stmt = dyn_cast <gassign *> (stmt);
+ if (!assign_stmt)
return false;
- lhs = gimple_assign_lhs (stmt);
+ lhs = gimple_assign_lhs (assign_stmt);
if (TREE_CODE (lhs) != SSA_NAME)
return false;
- if (!simple_iv (loop, loop_containing_stmt (stmt), lhs, iv, true))
+ if (!simple_iv (loop, loop_containing_stmt (assign_stmt), lhs, iv, true))
return false;
iv->base = expand_simple_operations (iv->base);
@@ -1178,10 +1179,11 @@ find_givs_in_stmt_scev (struct ivopts_data *data, gimple stmt, affine_iv *iv)
|| contains_abnormal_ssa_name_p (iv->step))
return false;
- /* If STMT could throw, then do not consider STMT as defining a GIV.
+ /* If ASSIGN_STMT could throw, then do not consider ASSIGN_STMT as defining
+ a GIV.
While this will suppress optimizations, we can not safely delete this
GIV and associated statements, even if it appears it is not used. */
- if (stmt_could_throw_p (stmt))
+ if (stmt_could_throw_p (assign_stmt))
return false;
return true;
@@ -1197,7 +1199,7 @@ find_givs_in_stmt (struct ivopts_data *data, gimple stmt)
if (!find_givs_in_stmt_scev (data, stmt, &iv))
return;
- set_iv (data, gimple_assign_lhs (stmt), iv.base, iv.step);
+ set_iv (data, gimple_assign_lhs (as_a <gassign *> (stmt)), iv.base, iv.step);
}
/* Finds general ivs in basic block BB. */
@@ -1395,8 +1397,9 @@ extract_cond_operands (struct ivopts_data *data, gimple stmt,
}
else
{
- op0 = gimple_assign_rhs1_ptr (stmt);
- op1 = gimple_assign_rhs2_ptr (stmt);
+ gassign *assign_stmt = as_a <gassign *> (stmt);
+ op0 = gimple_assign_rhs1_ptr (assign_stmt);
+ op1 = gimple_assign_rhs2_ptr (assign_stmt);
}
zero = integer_zero_node;
@@ -1931,10 +1934,10 @@ find_interesting_uses_stmt (struct ivopts_data *data, gimple stmt)
return;
}
- if (is_gimple_assign (stmt))
+ if (gassign *assign_stmt = dyn_cast <gassign *> (stmt))
{
- lhs = gimple_assign_lhs_ptr (stmt);
- rhs = gimple_assign_rhs1_ptr (stmt);
+ lhs = gimple_assign_lhs_ptr (assign_stmt);
+ rhs = gimple_assign_rhs1_ptr (assign_stmt);
if (TREE_CODE (*lhs) == SSA_NAME)
{
@@ -1947,23 +1950,23 @@ find_interesting_uses_stmt (struct ivopts_data *data, gimple stmt)
return;
}
- code = gimple_assign_rhs_code (stmt);
+ code = gimple_assign_rhs_code (assign_stmt);
if (get_gimple_rhs_class (code) == GIMPLE_SINGLE_RHS
&& (REFERENCE_CLASS_P (*rhs)
|| is_gimple_val (*rhs)))
{
if (REFERENCE_CLASS_P (*rhs))
- find_interesting_uses_address (data, stmt, rhs);
+ find_interesting_uses_address (data, assign_stmt, rhs);
else
find_interesting_uses_op (data, *rhs);
if (REFERENCE_CLASS_P (*lhs))
- find_interesting_uses_address (data, stmt, lhs);
+ find_interesting_uses_address (data, assign_stmt, lhs);
return;
}
else if (TREE_CODE_CLASS (code) == tcc_comparison)
{
- find_interesting_uses_cond (data, stmt);
+ find_interesting_uses_cond (data, assign_stmt);
return;
}
@@ -4482,9 +4485,8 @@ difference_cannot_overflow_p (struct ivopts_data *data, tree base, tree offset)
if (TREE_CODE (base) == SSA_NAME)
{
- gimple stmt = SSA_NAME_DEF_STMT (base);
-
- if (gimple_code (stmt) != GIMPLE_ASSIGN)
+ gassign *stmt = dyn_cast <gassign *> (SSA_NAME_DEF_STMT (base));
+ if (!stmt)
return false;
code = gimple_assign_rhs_code (stmt);
@@ -6230,24 +6232,23 @@ rewrite_use_nonlinear_expr (struct ivopts_data *data,
&& cand->incremented_at == use->stmt)
{
enum tree_code stmt_code;
-
- gcc_assert (is_gimple_assign (use->stmt));
- gcc_assert (gimple_assign_lhs (use->stmt) == cand->var_after);
+ gassign *use_assign = as_a <gassign *> (use->stmt);
+ gcc_assert (gimple_assign_lhs (use_assign) == cand->var_after);
/* Check whether we may leave the computation unchanged.
This is the case only if it does not rely on other
computations in the loop -- otherwise, the computation
we rely upon may be removed in remove_unused_ivs,
thus leading to ICE. */
- stmt_code = gimple_assign_rhs_code (use->stmt);
+ stmt_code = gimple_assign_rhs_code (use_assign);
if (stmt_code == PLUS_EXPR
|| stmt_code == MINUS_EXPR
|| stmt_code == POINTER_PLUS_EXPR)
{
- if (gimple_assign_rhs1 (use->stmt) == cand->var_before)
- op = gimple_assign_rhs2 (use->stmt);
- else if (gimple_assign_rhs2 (use->stmt) == cand->var_before)
- op = gimple_assign_rhs1 (use->stmt);
+ if (gimple_assign_rhs1 (use_assign) == cand->var_before)
+ op = gimple_assign_rhs2 (use_assign);
+ else if (gimple_assign_rhs2 (use_assign) == cand->var_before)
+ op = gimple_assign_rhs1 (use_assign);
else
op = NULL_TREE;
}
@@ -6274,7 +6275,7 @@ rewrite_use_nonlinear_expr (struct ivopts_data *data,
break;
case GIMPLE_ASSIGN:
- tgt = gimple_assign_lhs (use->stmt);
+ tgt = gimple_assign_lhs (as_a <gassign *> (use->stmt));
bsi = gsi_for_stmt (use->stmt);
break;
@@ -6380,7 +6381,7 @@ adjust_iv_update_pos (struct iv_cand *cand, struct iv_use *use)
if (stmt != use->stmt)
return;
- if (TREE_CODE (gimple_assign_lhs (stmt)) != SSA_NAME)
+ if (TREE_CODE (gimple_assign_lhs (as_a <gassign *> (stmt))) != SSA_NAME)
return;
if (dump_file && (dump_flags & TDF_DETAILS))
@@ -1610,18 +1610,19 @@ expand_simple_operations (tree expr)
return expand_simple_operations (e);
}
- if (gimple_code (stmt) != GIMPLE_ASSIGN)
+ gassign *assign_stmt = dyn_cast <gassign *> (stmt);
+ if (!assign_stmt)
return expr;
/* Avoid expanding to expressions that contain SSA names that need
to take part in abnormal coalescing. */
ssa_op_iter iter;
- FOR_EACH_SSA_TREE_OPERAND (e, stmt, iter, SSA_OP_USE)
+ FOR_EACH_SSA_TREE_OPERAND (e, assign_stmt, iter, SSA_OP_USE)
if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (e))
return expr;
- e = gimple_assign_rhs1 (stmt);
- code = gimple_assign_rhs_code (stmt);
+ e = gimple_assign_rhs1 (assign_stmt);
+ code = gimple_assign_rhs_code (assign_stmt);
if (get_gimple_rhs_class (code) == GIMPLE_SINGLE_RHS)
{
if (is_gimple_min_invariant (e))
@@ -1647,7 +1648,7 @@ expand_simple_operations (tree expr)
/* Fallthru. */
case POINTER_PLUS_EXPR:
/* And increments and decrements by a constant are simple. */
- e1 = gimple_assign_rhs2 (stmt);
+ e1 = gimple_assign_rhs2 (assign_stmt);
if (!is_gimple_min_invariant (e1))
return expr;
@@ -2188,18 +2189,19 @@ chain_of_csts_start (struct loop *loop, tree x)
return NULL;
}
- if (gimple_code (stmt) != GIMPLE_ASSIGN
- || gimple_assign_rhs_class (stmt) == GIMPLE_TERNARY_RHS)
+ gassign *assign_stmt = dyn_cast <gassign *> (stmt);
+ if (!assign_stmt
+ || gimple_assign_rhs_class (assign_stmt) == GIMPLE_TERNARY_RHS)
return NULL;
- code = gimple_assign_rhs_code (stmt);
- if (gimple_references_memory_p (stmt)
+ code = gimple_assign_rhs_code (assign_stmt);
+ if (gimple_references_memory_p (assign_stmt)
|| TREE_CODE_CLASS (code) == tcc_reference
|| (code == ADDR_EXPR
- && !is_gimple_min_invariant (gimple_assign_rhs1 (stmt))))
+ && !is_gimple_min_invariant (gimple_assign_rhs1 (assign_stmt))))
return NULL;
- use = SINGLE_SSA_TREE_OPERAND (stmt, SSA_OP_USE);
+ use = SINGLE_SSA_TREE_OPERAND (assign_stmt, SSA_OP_USE);
if (use == NULL_TREE)
return NULL;
@@ -2266,32 +2268,32 @@ get_val_for (tree x, tree base)
if (gimple_code (stmt) == GIMPLE_PHI)
return base;
- gcc_checking_assert (is_gimple_assign (stmt));
+ gassign *assign_stmt = as_a <gassign *> (stmt);
/* STMT must be either an assignment of a single SSA name or an
expression involving an SSA name and a constant. Try to fold that
expression using the value for the SSA name. */
if (gassign *assign = gimple_assign_ssa_name_copy_p (stmt))
return get_val_for (gimple_assign_rhs1 (assign), base);
- else if (gimple_assign_rhs_class (stmt) == GIMPLE_UNARY_RHS
- && TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME)
+ else if (gimple_assign_rhs_class (assign_stmt) == GIMPLE_UNARY_RHS
+ && TREE_CODE (gimple_assign_rhs1 (assign_stmt)) == SSA_NAME)
{
- return fold_build1 (gimple_assign_rhs_code (stmt),
- gimple_expr_type (stmt),
- get_val_for (gimple_assign_rhs1 (stmt), base));
+ return fold_build1 (gimple_assign_rhs_code (assign_stmt),
+ gimple_expr_type (assign_stmt),
+ get_val_for (gimple_assign_rhs1 (assign_stmt), base));
}
- else if (gimple_assign_rhs_class (stmt) == GIMPLE_BINARY_RHS)
+ else if (gimple_assign_rhs_class (assign_stmt) == GIMPLE_BINARY_RHS)
{
- tree rhs1 = gimple_assign_rhs1 (stmt);
- tree rhs2 = gimple_assign_rhs2 (stmt);
+ tree rhs1 = gimple_assign_rhs1 (assign_stmt);
+ tree rhs2 = gimple_assign_rhs2 (assign_stmt);
if (TREE_CODE (rhs1) == SSA_NAME)
rhs1 = get_val_for (rhs1, base);
else if (TREE_CODE (rhs2) == SSA_NAME)
rhs2 = get_val_for (rhs2, base);
else
gcc_unreachable ();
- return fold_build2 (gimple_assign_rhs_code (stmt),
- gimple_expr_type (stmt), rhs1, rhs2);
+ return fold_build2 (gimple_assign_rhs_code (assign_stmt),
+ gimple_expr_type (assign_stmt), rhs1, rhs2);
}
else
gcc_unreachable ();
@@ -2451,7 +2453,7 @@ static widest_int derive_constant_upper_bound_ops (tree, tree,
an assignment statement STMT. */
static widest_int
-derive_constant_upper_bound_assign (gimple stmt)
+derive_constant_upper_bound_assign (gassign *stmt)
{
enum tree_code code = gimple_assign_rhs_code (stmt);
tree op0 = gimple_assign_rhs1 (stmt);
@@ -2605,9 +2607,9 @@ derive_constant_upper_bound_ops (tree type, tree op0,
case SSA_NAME:
stmt = SSA_NAME_DEF_STMT (op0);
if (gimple_code (stmt) != GIMPLE_ASSIGN
- || gimple_assign_lhs (stmt) != op0)
+ || gimple_assign_lhs (as_a <gassign *> (stmt)) != op0)
return max;
- return derive_constant_upper_bound_assign (stmt);
+ return derive_constant_upper_bound_assign (as_a <gassign *> (stmt));
default:
return max;
@@ -2903,18 +2905,18 @@ infer_loop_bounds_from_ref (struct loop *loop, gimple stmt, tree ref)
static void
infer_loop_bounds_from_array (struct loop *loop, gimple stmt)
{
- if (is_gimple_assign (stmt))
+ if (gassign *assign_stmt = dyn_cast <gassign *> (stmt))
{
- tree op0 = gimple_assign_lhs (stmt);
- tree op1 = gimple_assign_rhs1 (stmt);
+ tree op0 = gimple_assign_lhs (assign_stmt);
+ tree op1 = gimple_assign_rhs1 (assign_stmt);
/* For each memory access, analyze its access function
and record a bound on the loop iteration domain. */
if (REFERENCE_CLASS_P (op0))
- infer_loop_bounds_from_ref (loop, stmt, op0);
+ infer_loop_bounds_from_ref (loop, assign_stmt, op0);
if (REFERENCE_CLASS_P (op1))
- infer_loop_bounds_from_ref (loop, stmt, op1);
+ infer_loop_bounds_from_ref (loop, assign_stmt, op1);
}
else if (is_gimple_call (stmt))
{
@@ -2943,11 +2945,12 @@ infer_loop_bounds_from_pointer_arith (struct loop *loop, gimple stmt)
tree def, base, step, scev, type, low, high;
tree var, ptr;
- if (!is_gimple_assign (stmt)
- || gimple_assign_rhs_code (stmt) != POINTER_PLUS_EXPR)
+ gassign *assign_stmt = dyn_cast <gassign *> (stmt);
+ if (!assign_stmt
+ || gimple_assign_rhs_code (assign_stmt) != POINTER_PLUS_EXPR)
return;
- def = gimple_assign_lhs (stmt);
+ def = gimple_assign_lhs (assign_stmt);
if (TREE_CODE (def) != SSA_NAME)
return;
@@ -2955,11 +2958,11 @@ infer_loop_bounds_from_pointer_arith (struct loop *loop, gimple stmt)
if (!nowrap_type_p (type))
return;
- ptr = gimple_assign_rhs1 (stmt);
+ ptr = gimple_assign_rhs1 (assign_stmt);
if (!expr_invariant_in_loop_p (loop, ptr))
return;
- var = gimple_assign_rhs2 (stmt);
+ var = gimple_assign_rhs2 (assign_stmt);
if (TYPE_PRECISION (type) != TYPE_PRECISION (TREE_TYPE (var)))
return;
@@ -2989,7 +2992,8 @@ infer_loop_bounds_from_pointer_arith (struct loop *loop, gimple stmt)
if (flag_delete_null_pointer_checks && int_cst_value (low) == 0)
low = build_int_cstu (TREE_TYPE (low), TYPE_ALIGN_UNIT (TREE_TYPE (type)));
- record_nonwrapping_iv (loop, base, step, stmt, low, high, false, true);
+ record_nonwrapping_iv (loop, base, step, assign_stmt, low, high, false,
+ true);
}
/* Determine information about number of iterations of a LOOP from the fact
@@ -3000,10 +3004,11 @@ infer_loop_bounds_from_signedness (struct loop *loop, gimple stmt)
{
tree def, base, step, scev, type, low, high;
- if (gimple_code (stmt) != GIMPLE_ASSIGN)
+ gassign *assign_stmt = dyn_cast <gassign *> (stmt);
+ if (!assign_stmt)
return;
- def = gimple_assign_lhs (stmt);
+ def = gimple_assign_lhs (assign_stmt);
if (TREE_CODE (def) != SSA_NAME)
return;
@@ -3029,7 +3034,7 @@ infer_loop_bounds_from_signedness (struct loop *loop, gimple stmt)
low = lower_bound_in_type (type, type);
high = upper_bound_in_type (type, type);
- record_nonwrapping_iv (loop, base, step, stmt, low, high, false, true);
+ record_nonwrapping_iv (loop, base, step, assign_stmt, low, high, false, true);
}
/* The following analyzers are extracting informations on the bounds
@@ -651,20 +651,21 @@ gather_memory_references (struct loop *loop, bool *no_other_refs, unsigned *ref_
*no_other_refs = false;
continue;
}
+ gassign *assign_stmt = as_a <gassign *> (stmt);
- lhs = gimple_assign_lhs (stmt);
- rhs = gimple_assign_rhs1 (stmt);
+ lhs = gimple_assign_lhs (assign_stmt);
+ rhs = gimple_assign_rhs1 (assign_stmt);
if (REFERENCE_CLASS_P (rhs))
{
*no_other_refs &= gather_memory_references_ref (loop, &refs,
- rhs, false, stmt);
+ rhs, false, assign_stmt);
*ref_count += 1;
}
if (REFERENCE_CLASS_P (lhs))
{
*no_other_refs &= gather_memory_references_ref (loop, &refs,
- lhs, true, stmt);
+ lhs, true, assign_stmt);
*ref_count += 1;
}
}
@@ -1240,7 +1241,7 @@ mark_nontemporal_store (struct mem_ref *ref)
fprintf (dump_file, "Marked reference %p as a nontemporal store.\n",
(void *) ref);
- gimple_assign_set_nontemporal_move (ref->stmt, true);
+ gimple_assign_set_nontemporal_move (as_a <gassign *> (ref->stmt), true);
ref->storent_p = true;
return true;