@@ -52,7 +52,8 @@ extern tree ubsan_create_data (const cha
extern tree ubsan_type_descriptor (tree, enum ubsan_print_style = UBSAN_PRINT_NORMAL);
extern tree ubsan_encode_value (tree, bool = false);
extern bool is_ubsan_builtin_p (tree);
-extern tree ubsan_build_overflow_builtin (tree_code, location_t, tree, tree, tree);
+extern tree ubsan_build_overflow_builtin (tree_code, location_t, tree, tree,
+ tree, tree *);
extern tree ubsan_instrument_float_cast (location_t, tree, tree);
extern tree ubsan_get_source_location_type (void);
@@ -513,7 +513,7 @@ expand_ubsan_result_store (rtx target, r
static void
expand_addsub_overflow (location_t loc, tree_code code, tree lhs,
tree arg0, tree arg1, bool unsr_p, bool uns0_p,
- bool uns1_p, bool is_ubsan)
+ bool uns1_p, bool is_ubsan, tree *datap)
{
rtx res, target = NULL_RTX;
tree fn;
@@ -929,7 +929,7 @@ expand_addsub_overflow (location_t loc,
/* Expand the ubsan builtin call. */
push_temp_slots ();
fn = ubsan_build_overflow_builtin (code, loc, TREE_TYPE (arg0),
- arg0, arg1);
+ arg0, arg1, datap);
expand_normal (fn);
pop_temp_slots ();
do_pending_stack_adjust ();
@@ -958,7 +958,8 @@ expand_addsub_overflow (location_t loc,
/* Add negate overflow checking to the statement STMT. */
static void
-expand_neg_overflow (location_t loc, tree lhs, tree arg1, bool is_ubsan)
+expand_neg_overflow (location_t loc, tree lhs, tree arg1, bool is_ubsan,
+ tree *datap)
{
rtx res, op1;
tree fn;
@@ -1024,7 +1025,7 @@ expand_neg_overflow (location_t loc, tre
/* Expand the ubsan builtin call. */
push_temp_slots ();
fn = ubsan_build_overflow_builtin (NEGATE_EXPR, loc, TREE_TYPE (arg1),
- arg1, NULL_TREE);
+ arg1, NULL_TREE, datap);
expand_normal (fn);
pop_temp_slots ();
do_pending_stack_adjust ();
@@ -1048,7 +1049,8 @@ expand_neg_overflow (location_t loc, tre
static void
expand_mul_overflow (location_t loc, tree lhs, tree arg0, tree arg1,
- bool unsr_p, bool uns0_p, bool uns1_p, bool is_ubsan)
+ bool unsr_p, bool uns0_p, bool uns1_p, bool is_ubsan,
+ tree *datap)
{
rtx res, op0, op1;
tree fn, type;
@@ -1685,7 +1687,7 @@ expand_mul_overflow (location_t loc, tre
/* Expand the ubsan builtin call. */
push_temp_slots ();
fn = ubsan_build_overflow_builtin (MULT_EXPR, loc, TREE_TYPE (arg0),
- arg0, arg1);
+ arg0, arg1, datap);
expand_normal (fn);
pop_temp_slots ();
do_pending_stack_adjust ();
@@ -1734,6 +1736,81 @@ expand_mul_overflow (location_t loc, tre
}
}
+static void
+expand_vector_ubsan_overflow (location_t loc, enum tree_code code, tree lhs,
+ tree arg0, tree arg1)
+{
+ int cnt = TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0));
+ rtx_code_label *done_label = NULL, *beg_label = NULL;
+ rtx cntvar = NULL_RTX;
+ tree eltype = TREE_TYPE (TREE_TYPE (arg0));
+ tree sz = TYPE_SIZE (eltype);
+ tree data = NULL_TREE;
+
+ if (cnt > 4)
+ {
+ do_pending_stack_adjust ();
+ done_label = gen_label_rtx ();
+ beg_label = gen_label_rtx ();
+ cntvar = gen_reg_rtx (word_mode);
+ emit_move_insn (cntvar, const0_rtx);
+ emit_label (beg_label);
+ }
+ for (int i = 0; i < (cnt > 4 ? 1 : cnt); i++)
+ {
+ tree op0, op1;
+ if (cnt > 4)
+ {
+ gcc_unreachable ();
+ }
+ else
+ {
+ tree bitpos = bitsize_int (tree_to_uhwi (sz) * i);
+ op0 = fold_build3 (BIT_FIELD_REF, eltype, arg0, sz, bitpos);
+ op1 = fold_build3 (BIT_FIELD_REF, eltype, arg1, sz, bitpos);
+ }
+ switch (code)
+ {
+ case PLUS_EXPR:
+ expand_addsub_overflow (loc, PLUS_EXPR, NULL_TREE, op0, op1,
+ false, false, false, true, &data);
+ break;
+ case MINUS_EXPR:
+ if (integer_zerop (op0))
+ expand_neg_overflow (loc, NULL_TREE, op1, true, &data);
+ else
+ expand_addsub_overflow (loc, MINUS_EXPR, NULL_TREE, op0, op1,
+ false, false, false, true, &data);
+ break;
+ case MULT_EXPR:
+ expand_mul_overflow (loc, NULL_TREE, op0, op1, false, false, false,
+ true, &data);
+ break;
+ default:
+ gcc_unreachable ();
+ }
+ }
+ if (cnt > 4)
+ {
+ emit_label (done_label);
+ }
+ if (lhs)
+ {
+ struct separate_ops ops;
+ ops.code = code;
+ ops.type = TREE_TYPE (arg0);
+ ops.op0 = arg0;
+ ops.op1 = arg1;
+ ops.op2 = NULL_TREE;
+ ops.location = loc;
+ rtx target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
+ rtx ret = expand_expr_real_2 (&ops, target, TYPE_MODE (TREE_TYPE (arg0)),
+ EXPAND_NORMAL);
+ if (ret != target)
+ emit_move_insn (target, ret);
+ }
+}
+
/* Expand UBSAN_CHECK_ADD call STMT. */
static void
@@ -1743,8 +1820,11 @@ expand_UBSAN_CHECK_ADD (internal_fn, gca
tree lhs = gimple_call_lhs (stmt);
tree arg0 = gimple_call_arg (stmt, 0);
tree arg1 = gimple_call_arg (stmt, 1);
- expand_addsub_overflow (loc, PLUS_EXPR, lhs, arg0, arg1,
- false, false, false, true);
+ if (VECTOR_TYPE_P (TREE_TYPE (arg0)))
+ expand_vector_ubsan_overflow (loc, PLUS_EXPR, lhs, arg0, arg1);
+ else
+ expand_addsub_overflow (loc, PLUS_EXPR, lhs, arg0, arg1,
+ false, false, false, true, NULL);
}
/* Expand UBSAN_CHECK_SUB call STMT. */
@@ -1756,11 +1836,13 @@ expand_UBSAN_CHECK_SUB (internal_fn, gca
tree lhs = gimple_call_lhs (stmt);
tree arg0 = gimple_call_arg (stmt, 0);
tree arg1 = gimple_call_arg (stmt, 1);
- if (integer_zerop (arg0))
- expand_neg_overflow (loc, lhs, arg1, true);
+ if (VECTOR_TYPE_P (TREE_TYPE (arg0)))
+ expand_vector_ubsan_overflow (loc, MINUS_EXPR, lhs, arg0, arg1);
+ else if (integer_zerop (arg0))
+ expand_neg_overflow (loc, lhs, arg1, true, NULL);
else
expand_addsub_overflow (loc, MINUS_EXPR, lhs, arg0, arg1,
- false, false, false, true);
+ false, false, false, true, NULL);
}
/* Expand UBSAN_CHECK_MUL call STMT. */
@@ -1772,7 +1854,11 @@ expand_UBSAN_CHECK_MUL (internal_fn, gca
tree lhs = gimple_call_lhs (stmt);
tree arg0 = gimple_call_arg (stmt, 0);
tree arg1 = gimple_call_arg (stmt, 1);
- expand_mul_overflow (loc, lhs, arg0, arg1, false, false, false, true);
+ if (VECTOR_TYPE_P (TREE_TYPE (arg0)))
+ expand_vector_ubsan_overflow (loc, MINUS_EXPR, lhs, arg0, arg1);
+ else
+ expand_mul_overflow (loc, lhs, arg0, arg1, false, false, false, true,
+ NULL);
}
/* Helper function for {ADD,SUB,MUL}_OVERFLOW call stmt expansion. */
@@ -1864,17 +1950,17 @@ expand_arith_overflow (enum tree_code co
case MINUS_EXPR:
if (integer_zerop (arg0) && !unsr_p)
{
- expand_neg_overflow (loc, lhs, arg1, false);
+ expand_neg_overflow (loc, lhs, arg1, false, NULL);
return;
}
/* FALLTHRU */
case PLUS_EXPR:
- expand_addsub_overflow (loc, code, lhs, arg0, arg1,
- unsr_p, unsr_p, unsr_p, false);
+ expand_addsub_overflow (loc, code, lhs, arg0, arg1, unsr_p,
+ unsr_p, unsr_p, false, NULL);
return;
case MULT_EXPR:
- expand_mul_overflow (loc, lhs, arg0, arg1,
- unsr_p, unsr_p, unsr_p, false);
+ expand_mul_overflow (loc, lhs, arg0, arg1, unsr_p,
+ unsr_p, unsr_p, false, NULL);
return;
default:
gcc_unreachable ();
@@ -1916,10 +2002,10 @@ expand_arith_overflow (enum tree_code co
arg1 = fold_convert_loc (loc, types[uns1_p], arg1);
if (code != MULT_EXPR)
expand_addsub_overflow (loc, code, lhs, arg0, arg1, unsr_p,
- uns0_p, uns1_p, false);
+ uns0_p, uns1_p, false, NULL);
else
expand_mul_overflow (loc, lhs, arg0, arg1, unsr_p,
- uns0_p, uns1_p, false);
+ uns0_p, uns1_p, false, NULL);
return;
}
@@ -1219,14 +1219,20 @@ instrument_null (gimple_stmt_iterator gs
tree
ubsan_build_overflow_builtin (tree_code code, location_t loc, tree lhstype,
- tree op0, tree op1)
+ tree op0, tree op1, tree *datap)
{
if (flag_sanitize_undefined_trap_on_error)
return build_call_expr_loc (loc, builtin_decl_explicit (BUILT_IN_TRAP), 0);
- tree data = ubsan_create_data ("__ubsan_overflow_data", 1, &loc,
- ubsan_type_descriptor (lhstype), NULL_TREE,
- NULL_TREE);
+ tree data;
+ if (datap && *datap)
+ data = *datap;
+ else
+ data = ubsan_create_data ("__ubsan_overflow_data", 1, &loc,
+ ubsan_type_descriptor (lhstype), NULL_TREE,
+ NULL_TREE);
+ if (datap)
+ *datap = data;
enum built_in_function fn_code;
switch (code)
@@ -1272,14 +1278,15 @@ instrument_si_overflow (gimple_stmt_iter
tree_code code = gimple_assign_rhs_code (stmt);
tree lhs = gimple_assign_lhs (stmt);
tree lhstype = TREE_TYPE (lhs);
+ tree lhsinner = VECTOR_TYPE_P (lhstype) ? TREE_TYPE (lhstype) : lhstype;
tree a, b;
gimple *g;
/* If this is not a signed operation, don't instrument anything here.
Also punt on bit-fields. */
- if (!INTEGRAL_TYPE_P (lhstype)
- || TYPE_OVERFLOW_WRAPS (lhstype)
- || GET_MODE_BITSIZE (TYPE_MODE (lhstype)) != TYPE_PRECISION (lhstype))
+ if (!INTEGRAL_TYPE_P (lhsinner)
+ || TYPE_OVERFLOW_WRAPS (lhsinner)
+ || GET_MODE_BITSIZE (TYPE_MODE (lhsinner)) != TYPE_PRECISION (lhsinner))
return;
switch (code)
@@ -1316,7 +1323,7 @@ instrument_si_overflow (gimple_stmt_iter
into
_N = UBSAN_CHECK_SUB (0, u);
i = ABS_EXPR<_N>; */
- a = build_int_cst (lhstype, 0);
+ a = build_zero_cst (lhstype);
b = gimple_assign_rhs1 (stmt);
g = gimple_build_call_internal (IFN_UBSAN_CHECK_SUB, 2, a, b);
a = make_ssa_name (lhstype);
@@ -10023,7 +10023,11 @@ simplify_internal_call_using_ranges (gim
tree op1 = gimple_call_arg (stmt, 1);
tree type;
if (is_ubsan)
- type = TREE_TYPE (op0);
+ {
+ type = TREE_TYPE (op0);
+ if (VECTOR_TYPE_P (type))
+ return false;
+ }
else if (gimple_call_lhs (stmt) == NULL_TREE)
return false;
else