@@ -31,10 +31,14 @@ extern rtx hwasan_with_tag (rtx, poly_int64);
extern void hwasan_tag_init ();
extern rtx hwasan_create_untagged_base (rtx);
extern rtx hwasan_extract_tag (rtx tagged_pointer);
+extern rtx hwasan_base ();
extern void hwasan_emit_prologue (rtx *, rtx *, poly_int64 *, uint8_t *, size_t);
extern rtx_insn *hwasan_emit_uncolour_frame (rtx, rtx, rtx_insn *);
+extern bool hwasan_expand_check_ifn (gimple_stmt_iterator *, bool);
+extern bool hwasan_expand_mark_ifn (gimple_stmt_iterator *);
extern bool memory_tagging_p (void);
extern bool hwasan_sanitize_stack_p (void);
+extern bool gate_hwasan (void);
extern rtx_insn *asan_emit_stack_protection (rtx, rtx, unsigned int,
HOST_WIDE_INT *, tree *, int);
extern rtx_insn *asan_emit_allocas_unpoison (rtx, rtx, rtx_insn *);
@@ -131,6 +135,13 @@ enum asan_mark_flags
#undef DEF
};
+enum hwasan_mark_flags
+{
+#define DEF(X) HWASAN_MARK_##X
+ IFN_ASAN_MARK_FLAGS
+#undef DEF
+};
+
/* Return true if STMT is ASAN_MARK with FLAG as first argument. */
extern bool asan_mark_p (gimple *stmt, enum asan_mark_flags flag);
@@ -180,6 +191,9 @@ extern hash_set<tree> *asan_handled_variables;
static inline bool
asan_intercepted_p (enum built_in_function fcode)
{
+ if (memory_tagging_p ())
+ return false;
+
return fcode == BUILT_IN_INDEX
|| fcode == BUILT_IN_MEMCHR
|| fcode == BUILT_IN_MEMCMP
@@ -208,7 +222,8 @@ asan_intercepted_p (enum built_in_function fcode)
static inline bool
asan_sanitize_use_after_scope (void)
{
- return (flag_sanitize_address_use_after_scope && asan_sanitize_stack_p ());
+ return (flag_sanitize_address_use_after_scope
+ && (asan_sanitize_stack_p () || hwasan_sanitize_stack_p ()));
}
/* Return true if DECL should be guarded on the stack. */
@@ -53,6 +53,7 @@ along with GCC; see the file COPYING3. If not see
#include "dojump.h"
#include "explow.h"
#include "expr.h"
+#include "except.h"
#include "output.h"
#include "langhooks.h"
#include "cfgloop.h"
@@ -579,15 +580,28 @@ get_last_alloca_addr ()
static void
handle_builtin_stack_restore (gcall *call, gimple_stmt_iterator *iter)
{
- if (!iter || !asan_sanitize_allocas_p ())
+ if (!iter
+ || !(asan_sanitize_allocas_p () || hwasan_sanitize_stack_p ()))
return;
- tree last_alloca = get_last_alloca_addr ();
tree restored_stack = gimple_call_arg (call, 0);
- tree fn = builtin_decl_implicit (BUILT_IN_ASAN_ALLOCAS_UNPOISON);
- gimple *g = gimple_build_call (fn, 2, last_alloca, restored_stack);
- gsi_insert_before (iter, g, GSI_SAME_STMT);
- g = gimple_build_assign (last_alloca, restored_stack);
+
+ gimple *g;
+
+ if (hwasan_sanitize_stack_p ())
+ {
+ tree fn = builtin_decl_implicit (BUILT_IN_HWASAN_HANDLE_LONGJMP);
+ g = gimple_build_call (fn, 1, restored_stack);
+ }
+ else
+ {
+ tree last_alloca = get_last_alloca_addr ();
+ tree fn = builtin_decl_implicit (BUILT_IN_ASAN_ALLOCAS_UNPOISON);
+ g = gimple_build_call (fn, 2, last_alloca, restored_stack);
+ gsi_insert_before (iter, g, GSI_SAME_STMT);
+ g = gimple_build_assign (last_alloca, restored_stack);
+ }
+
gsi_insert_before (iter, g, GSI_SAME_STMT);
}
@@ -617,14 +631,12 @@ handle_builtin_stack_restore (gcall *call, gimple_stmt_iterator *iter)
static void
handle_builtin_alloca (gcall *call, gimple_stmt_iterator *iter)
{
- if (!iter || !asan_sanitize_allocas_p ())
+ if (!iter
+ || !(asan_sanitize_allocas_p () || hwasan_sanitize_stack_p ()))
return;
gassign *g;
gcall *gg;
- const HOST_WIDE_INT redzone_mask = ASAN_RED_ZONE_SIZE - 1;
-
- tree last_alloca = get_last_alloca_addr ();
tree callee = gimple_call_fndecl (call);
tree old_size = gimple_call_arg (call, 0);
tree ptr_type = gimple_call_lhs (call) ? TREE_TYPE (gimple_call_lhs (call))
@@ -634,6 +646,85 @@ handle_builtin_alloca (gcall *call, gimple_stmt_iterator *iter)
= DECL_FUNCTION_CODE (callee) == BUILT_IN_ALLOCA
? 0 : tree_to_uhwi (gimple_call_arg (call, 1));
+ if (hwasan_sanitize_stack_p ())
+ {
+ /*
+ HWASAN needs a different expansion.
+
+ addr = __builtin_alloca (size, align);
+
+ should be replaced by
+
+ new_size = size rounded up to HWASAN_TAG_GRANULE_SIZE byte alignment;
+ untagged_addr = __builtin_alloca (new_size, align);
+ colour = __hwasan_choose_alloca_colour ();
+ addr = __hwasan_tag_pointer (untagged_addr, colour);
+ __hwasan_tag_memory (addr, colour, new_size);
+ */
+ /* Ensure alignment at least HWASAN_TAG_GRANULE_SIZE bytes so we start on
+ a tag granule. */
+ align = align > HWASAN_TAG_GRANULE_SIZE ? align : HWASAN_TAG_GRANULE_SIZE;
+
+ /* tree new_size = (old_size + 15) & ~15; */
+ uint8_t tg_mask = HWASAN_TAG_GRANULE_SIZE - 1;
+ tree old_size = gimple_call_arg (call, 0);
+ tree tree_mask = build_int_cst (size_type_node, tg_mask);
+ g = gimple_build_assign (make_ssa_name (size_type_node), PLUS_EXPR,
+ old_size, tree_mask);
+ gsi_insert_before (iter, g, GSI_SAME_STMT);
+ tree oversize = gimple_assign_lhs (g);
+
+ g = gimple_build_assign (make_ssa_name (size_type_node), BIT_NOT_EXPR,
+ tree_mask);
+ tree mask = gimple_assign_lhs (g);
+ gsi_insert_before (iter, g, GSI_SAME_STMT);
+
+ g = gimple_build_assign (make_ssa_name (size_type_node), BIT_AND_EXPR,
+ oversize, mask);
+ gsi_insert_before (iter, g, GSI_SAME_STMT);
+ tree new_size = gimple_assign_lhs (g);
+
+ /* emit the alloca call */
+ tree fn = builtin_decl_implicit (BUILT_IN_ALLOCA_WITH_ALIGN);
+ gg = gimple_build_call (fn, 2, new_size,
+ build_int_cst (size_type_node, align));
+ tree untagged_addr = make_ssa_name (ptr_type, gg);
+ gimple_call_set_lhs (gg, untagged_addr);
+ gsi_insert_before (iter, gg, GSI_SAME_STMT);
+
+ /* Insert code choosing the tag.
+ Here we use an internal function so we can choose the colour at expand
+ time. We need the decision to be made after stack variables have been
+ assigned their colour (i.e. once the tag_offset variable has been set
+ to one after the last stack variables tag). */
+
+ gg = gimple_build_call_internal (IFN_HWASAN_CHOOSE_COLOUR, 0);
+ tree colour = make_ssa_name (unsigned_char_type_node, gg);
+ gimple_call_set_lhs (gg, colour);
+ gsi_insert_before (iter, gg, GSI_SAME_STMT);
+
+ /* Insert code adding tag to pointer. */
+ fn = builtin_decl_implicit (BUILT_IN_HWASAN_TAG_PTR);
+ gg = gimple_build_call (fn, 2, untagged_addr, colour);
+ tree addr = make_ssa_name (ptr_type, gg);
+ gimple_call_set_lhs (gg, addr);
+ gsi_insert_before (iter, gg, GSI_SAME_STMT);
+
+ /* Insert code colouring shadow memory.
+ NOTE: require using `untagged_addr` here for libhwasan API. */
+ fn = builtin_decl_implicit (BUILT_IN_HWASAN_TAG_MEM);
+ gg = gimple_build_call (fn, 3, untagged_addr, colour, new_size);
+ gsi_insert_before (iter, gg, GSI_SAME_STMT);
+
+ /* Finally, replace old alloca ptr with NEW_ALLOCA. */
+ replace_call_with_value (iter, addr);
+ return;
+ }
+
+ tree last_alloca = get_last_alloca_addr ();
+ const HOST_WIDE_INT redzone_mask = ASAN_RED_ZONE_SIZE - 1;
+
+
/* If ALIGN > ASAN_RED_ZONE_SIZE, we embed left redzone into first ALIGN
bytes of allocated space. Otherwise, align alloca to ASAN_RED_ZONE_SIZE
manually. */
@@ -786,6 +877,31 @@ get_mem_refs_of_builtin_call (gcall *call,
break;
case BUILT_IN_STRLEN:
+ /* Special case strlen here since its length is taken from its return
+ value.
+
+ The approach taken by the sanitizers is to check a memory access
+ before it's taken. For ASAN strlen is intercepted by libasan, so no
+ check is inserted by the compiler.
+
+ This function still returns `true` and provides a length to the rest
+ of the ASAN pass in order to record what areas have been checked,
+ avoiding superfluous checks later on.
+
+ HWASAN does not intercept any of these internal functions.
+ This means that checks for memory accesses must be inserted by the
+ compiler.
+ strlen is a special case, because we can tell the length from the
+ return of the function, but that is not known until after the function
+ has returned.
+
+ Hence we can't check the memory access before it happens.
+ We could check the memory access after it has already happened, but
+ for now I'm choosing to just ignore `strlen` calls.
+ This decision was simply made because that means the special case is
+ limited to this one case of this one function. */
+ if (memory_tagging_p ())
+ return false;
source0 = gimple_call_arg (call, 0);
len = gimple_call_lhs (call);
break;
@@ -1848,6 +1964,8 @@ static tree
report_error_func (bool is_store, bool recover_p, HOST_WIDE_INT size_in_bytes,
int *nargs)
{
+ gcc_assert (!memory_tagging_p ());
+
static enum built_in_function report[2][2][6]
= { { { BUILT_IN_ASAN_REPORT_LOAD1, BUILT_IN_ASAN_REPORT_LOAD2,
BUILT_IN_ASAN_REPORT_LOAD4, BUILT_IN_ASAN_REPORT_LOAD8,
@@ -2184,7 +2302,13 @@ build_check_stmt (location_t loc, tree base, tree len,
if (is_scalar_access)
flags |= ASAN_CHECK_SCALAR_ACCESS;
- g = gimple_build_call_internal (IFN_ASAN_CHECK, 4,
+ enum internal_fn fn;
+ if (memory_tagging_p ())
+ fn = IFN_HWASAN_CHECK;
+ else
+ fn = IFN_ASAN_CHECK;
+
+ g = gimple_build_call_internal (fn, 4,
build_int_cst (integer_type_node, flags),
base, len,
build_int_cst (integer_type_node,
@@ -2208,10 +2332,13 @@ static void
instrument_derefs (gimple_stmt_iterator *iter, tree t,
location_t location, bool is_store)
{
- if (is_store && !ASAN_INSTRUMENT_WRITES)
- return;
- if (!is_store && !ASAN_INSTRUMENT_READS)
- return;
+ if (! memory_tagging_p ())
+ {
+ if (is_store && !ASAN_INSTRUMENT_WRITES)
+ return;
+ if (!is_store && !ASAN_INSTRUMENT_READS)
+ return;
+ }
tree type, base;
HOST_WIDE_INT size_in_bytes;
@@ -2271,7 +2398,8 @@ instrument_derefs (gimple_stmt_iterator *iter, tree t,
{
if (DECL_THREAD_LOCAL_P (inner))
return;
- if (!ASAN_GLOBALS && is_global_var (inner))
+ if ((memory_tagging_p () || !ASAN_GLOBALS)
+ && is_global_var (inner))
return;
if (!TREE_STATIC (inner))
{
@@ -2500,10 +2628,13 @@ maybe_instrument_call (gimple_stmt_iterator *iter)
break;
}
}
- tree decl = builtin_decl_implicit (BUILT_IN_ASAN_HANDLE_NO_RETURN);
- gimple *g = gimple_build_call (decl, 0);
- gimple_set_location (g, gimple_location (stmt));
- gsi_insert_before (iter, g, GSI_SAME_STMT);
+ if (! memory_tagging_p ())
+ {
+ tree decl = builtin_decl_implicit (BUILT_IN_ASAN_HANDLE_NO_RETURN);
+ gimple *g = gimple_build_call (decl, 0);
+ gimple_set_location (g, gimple_location (stmt));
+ gsi_insert_before (iter, g, GSI_SAME_STMT);
+ }
}
bool instrumented = false;
@@ -2902,6 +3033,9 @@ initialize_sanitizer_builtins (void)
= build_function_type_list (void_type_node, uint64_type_node,
ptr_type_node, NULL_TREE);
+ tree BT_FN_PTR_CONST_PTR_UINT8
+ = build_function_type_list (ptr_type_node, const_ptr_type_node,
+ unsigned_char_type_node, NULL_TREE);
tree BT_FN_VOID_PTR_UINT8_SIZE
= build_function_type_list (void_type_node, ptr_type_node,
unsigned_char_type_node, size_type_node,
@@ -3237,6 +3371,23 @@ asan_expand_mark_ifn (gimple_stmt_iterator *iter)
unsigned HOST_WIDE_INT size_in_bytes = tree_to_shwi (len);
gcc_assert (size_in_bytes);
+ if (memory_tagging_p ())
+ {
+ gcc_assert (HWASAN_STACK);
+ /* Here we swap ASAN_MARK calls for HWASAN_MARK.
+ This is because we are using the approach of using ASAN_MARK as a
+ synonym until here.
+ That approach means we don't yet have to duplicate all the special
+ cases for ASAN_MARK and ASAN_POISON with the exact same handling but
+ called HWASAN_MARK etc. */
+ gimple *hw_poison_call
+ = gimple_build_call_internal (IFN_HWASAN_MARK, 3,
+ gimple_call_arg (g, 0),
+ base, len);
+ gsi_replace (iter, hw_poison_call, false);
+ return false;
+ }
+
g = gimple_build_assign (make_ssa_name (pointer_sized_int_node),
NOP_EXPR, base);
gimple_set_location (g, loc);
@@ -3298,6 +3449,7 @@ asan_expand_mark_ifn (gimple_stmt_iterator *iter)
bool
asan_expand_check_ifn (gimple_stmt_iterator *iter, bool use_calls)
{
+ gcc_assert (!memory_tagging_p ());
gimple *g = gsi_stmt (*iter);
location_t loc = gimple_location (g);
bool recover_p;
@@ -3571,11 +3723,37 @@ asan_expand_poison_ifn (gimple_stmt_iterator *iter,
int nargs;
bool store_p = gimple_call_internal_p (use, IFN_ASAN_POISON_USE);
- tree fun = report_error_func (store_p, recover_p, tree_to_uhwi (size),
- &nargs);
-
- gcall *call = gimple_build_call (fun, 1,
- build_fold_addr_expr (shadow_var));
+ gcall *call;
+ if (memory_tagging_p ())
+ {
+ tree fun = builtin_decl_implicit (BUILT_IN_HWASAN_TAG_MISMATCH4);
+ /* NOTE: hwasan has no __hwasan_report_* functions like asan does.
+ We use __hwasan_tag_mismatch4 with arguments that tell it the
+ size of access and load to report all tag mismatches. */
+ unsigned HOST_WIDE_INT size_in_bytes = tree_to_uhwi (size);
+ unsigned size_indicator = (size_in_bytes > 16)
+ ? 0xf
+ : exact_log2 (size_in_bytes);
+ unsigned access_info = (0x20 * recover_p)
+ + (0x10 * store_p)
+ + (size_indicator);
+ tree long_pointer_type
+ = build_pointer_type (long_long_unsigned_type_node);
+ call = gimple_build_call (fun, 3,
+ build_fold_addr_expr (shadow_var),
+ build_int_cst (long_long_unsigned_type_node,
+ access_info),
+ build_int_cst (long_pointer_type,
+ 0),
+ size);
+ }
+ else
+ {
+ tree fun = report_error_func (store_p, recover_p, tree_to_uhwi (size),
+ &nargs);
+ call = gimple_build_call (fun, 1,
+ build_fold_addr_expr (shadow_var));
+ }
gimple_set_location (call, gimple_location (use));
gimple *call_to_insert = call;
@@ -3709,6 +3887,16 @@ make_pass_asan_O0 (gcc::context *ctxt)
return new pass_asan_O0 (ctxt);
}
+/* HWASAN */
+static unsigned int
+hwasan_instrument (void)
+{
+ transform_statements ();
+ last_alloca_addr = NULL_TREE;
+
+ return 0;
+}
+
void
hwasan_record_base (rtx base)
{
@@ -3727,6 +3915,15 @@ hwasan_current_tag ()
return tag_offset;
}
+rtx
+hwasan_base ()
+{
+ if (! hwasan_base_ptr)
+ hwasan_record_base (gen_reg_rtx (Pmode));
+
+ return hwasan_base_ptr;
+}
+
void
hwasan_increment_tag ()
{
@@ -3858,6 +4055,12 @@ hwasan_emit_uncolour_frame (rtx dynamic, rtx vars, rtx_insn *before)
do_pending_stack_adjust ();
rtx_insn *insns = get_insns ();
end_sequence ();
+
+ /* Clear the hash_map recording which variables are handled by HWASAN_MARK.
+ The only use in HWASAN is to decide which variables need to be coloured in
+ the prologue and which don't. */
+ delete asan_handled_variables;
+ asan_handled_variables = NULL;
return insns;
}
@@ -3894,4 +4097,214 @@ hwasan_finish_file (void)
flag_sanitize |= SANITIZE_HWADDRESS;
}
+/* Construct a function tree for __hwasan_{load,store}{1,2,4,8,16,_n}.
+ IS_STORE is either 1 (for a store) or 0 (for a load). */
+
+static tree
+hwasan_check_func (bool is_store, bool recover_p, HOST_WIDE_INT size_in_bytes,
+ int *nargs)
+{
+ static enum built_in_function check[2][2][6]
+ = { { { BUILT_IN_HWASAN_LOAD1, BUILT_IN_HWASAN_LOAD2,
+ BUILT_IN_HWASAN_LOAD4, BUILT_IN_HWASAN_LOAD8,
+ BUILT_IN_HWASAN_LOAD16, BUILT_IN_HWASAN_LOADN },
+ { BUILT_IN_HWASAN_STORE1, BUILT_IN_HWASAN_STORE2,
+ BUILT_IN_HWASAN_STORE4, BUILT_IN_HWASAN_STORE8,
+ BUILT_IN_HWASAN_STORE16, BUILT_IN_HWASAN_STOREN } },
+ { { BUILT_IN_HWASAN_LOAD1_NOABORT,
+ BUILT_IN_HWASAN_LOAD2_NOABORT,
+ BUILT_IN_HWASAN_LOAD4_NOABORT,
+ BUILT_IN_HWASAN_LOAD8_NOABORT,
+ BUILT_IN_HWASAN_LOAD16_NOABORT,
+ BUILT_IN_HWASAN_LOADN_NOABORT },
+ { BUILT_IN_HWASAN_STORE1_NOABORT,
+ BUILT_IN_HWASAN_STORE2_NOABORT,
+ BUILT_IN_HWASAN_STORE4_NOABORT,
+ BUILT_IN_HWASAN_STORE8_NOABORT,
+ BUILT_IN_HWASAN_STORE16_NOABORT,
+ BUILT_IN_HWASAN_STOREN_NOABORT } } };
+ if (size_in_bytes == -1)
+ {
+ *nargs = 2;
+ return builtin_decl_implicit (check[recover_p][is_store][5]);
+ }
+ *nargs = 1;
+ int size_log2 = exact_log2 (size_in_bytes);
+ return builtin_decl_implicit (check[recover_p][is_store][size_log2]);
+}
+
+
+bool
+hwasan_expand_check_ifn (gimple_stmt_iterator *iter, bool)
+{
+ gimple *g = gsi_stmt (*iter);
+ location_t loc = gimple_location (g);
+ bool recover_p;
+ if (flag_sanitize & SANITIZE_USER_HWADDRESS)
+ recover_p = (flag_sanitize_recover & SANITIZE_USER_HWADDRESS) != 0;
+ else
+ recover_p = (flag_sanitize_recover & SANITIZE_KERNEL_HWADDRESS) != 0;
+
+ HOST_WIDE_INT flags = tree_to_shwi (gimple_call_arg (g, 0));
+ gcc_assert (flags < ASAN_CHECK_LAST);
+ bool is_scalar_access = (flags & ASAN_CHECK_SCALAR_ACCESS) != 0;
+ bool is_store = (flags & ASAN_CHECK_STORE) != 0;
+ bool is_non_zero_len = (flags & ASAN_CHECK_NON_ZERO_LEN) != 0;
+
+ tree base = gimple_call_arg (g, 1);
+ tree len = gimple_call_arg (g, 2);
+
+ /* `align` is unused for HWASAN_CHECK, but I pass the argument anyway
+ since that way the arguments match ASAN_CHECK. */
+ /* HOST_WIDE_INT align = tree_to_shwi (gimple_call_arg (g, 3)); */
+
+ unsigned HOST_WIDE_INT size_in_bytes
+ = is_scalar_access && tree_fits_shwi_p (len) ? tree_to_shwi (len) : -1;
+
+ gimple_stmt_iterator gsi = *iter;
+
+ if (!is_non_zero_len)
+ {
+ /* So, the length of the memory area to hwasan-protect is
+ non-constant. Let's guard the generated instrumentation code
+ like:
+
+ if (len != 0)
+ {
+ // hwasan instrumentation code goes here.
+ }
+ // falltrough instructions, starting with *ITER. */
+
+ g = gimple_build_cond (NE_EXPR,
+ len,
+ build_int_cst (TREE_TYPE (len), 0),
+ NULL_TREE, NULL_TREE);
+ gimple_set_location (g, loc);
+
+ basic_block then_bb, fallthrough_bb;
+ insert_if_then_before_iter (as_a <gcond *> (g), iter,
+ /*then_more_likely_p=*/true,
+ &then_bb, &fallthrough_bb);
+ /* Note that fallthrough_bb starts with the statement that was
+ pointed to by ITER. */
+
+ /* The 'then block' of the 'if (len != 0) condition is where
+ we'll generate the hwasan instrumentation code now. */
+ gsi = gsi_last_bb (then_bb);
+ }
+
+ g = gimple_build_assign (make_ssa_name (pointer_sized_int_node),
+ NOP_EXPR, base);
+ gimple_set_location (g, loc);
+ gsi_insert_after (&gsi, g, GSI_NEW_STMT);
+ tree base_addr = gimple_assign_lhs (g);
+
+ int nargs = 0;
+ tree fun = hwasan_check_func (is_store, recover_p, size_in_bytes, &nargs);
+ if (nargs == 1)
+ g = gimple_build_call (fun, 1, base_addr);
+ else
+ {
+ g = gimple_build_assign (make_ssa_name (pointer_sized_int_node),
+ NOP_EXPR, len);
+ gimple_set_location (g, loc);
+ gsi_insert_after (&gsi, g, GSI_NEW_STMT);
+ tree sz_arg = gimple_assign_lhs (g);
+ g = gimple_build_call (fun, nargs, base_addr, sz_arg);
+ }
+
+ gimple_set_location (g, loc);
+ gsi_insert_after (&gsi, g, GSI_NEW_STMT);
+ gsi_remove (iter, true);
+ *iter = gsi;
+ return false;
+}
+
+bool
+hwasan_expand_mark_ifn (gimple_stmt_iterator *)
+{
+ /* HWASAN_MARK should only ever be available after the sanopt pass. */
+ gcc_unreachable ();
+}
+
+bool
+gate_hwasan ()
+{
+ return memory_tagging_p ();
+}
+
+namespace {
+
+const pass_data pass_data_hwasan =
+{
+ GIMPLE_PASS, /* type */
+ "hwasan", /* name */
+ OPTGROUP_NONE, /* optinfo_flags */
+ TV_NONE, /* tv_id */
+ ( PROP_ssa | PROP_cfg | PROP_gimple_leh ), /* properties_required */
+ 0, /* properties_provided */
+ 0, /* properties_destroyed */
+ 0, /* todo_flags_start */
+ TODO_update_ssa, /* todo_flags_finish */
+};
+
+class pass_hwasan : public gimple_opt_pass
+{
+public:
+ pass_hwasan (gcc::context *ctxt)
+ : gimple_opt_pass (pass_data_hwasan, ctxt)
+ {}
+
+ /* opt_pass methods: */
+ opt_pass * clone () { return new pass_hwasan (m_ctxt); }
+ virtual bool gate (function *) { return gate_hwasan (); }
+ virtual unsigned int execute (function *) { return hwasan_instrument (); }
+
+}; /* class pass_asan */
+
+} /* anon namespace */
+
+gimple_opt_pass *
+make_pass_hwasan (gcc::context *ctxt)
+{
+ return new pass_hwasan (ctxt);
+}
+
+namespace {
+
+const pass_data pass_data_hwasan_O0 =
+{
+ GIMPLE_PASS, /* type */
+ "hwasan_O0", /* name */
+ OPTGROUP_NONE, /* optinfo_flags */
+ TV_NONE, /* tv_id */
+ ( PROP_ssa | PROP_cfg | PROP_gimple_leh ), /* properties_required */
+ 0, /* properties_provided */
+ 0, /* properties_destroyed */
+ 0, /* todo_flags_start */
+ TODO_update_ssa, /* todo_flags_finish */
+};
+
+class pass_hwasan_O0 : public gimple_opt_pass
+{
+public:
+ pass_hwasan_O0 (gcc::context *ctxt)
+ : gimple_opt_pass (pass_data_hwasan_O0, ctxt)
+ {}
+
+ /* opt_pass methods: */
+ opt_pass * clone () { return new pass_hwasan_O0 (m_ctxt); }
+ virtual bool gate (function *) { return !optimize && gate_hwasan (); }
+ virtual unsigned int execute (function *) { return hwasan_instrument (); }
+
+}; // class pass_asan
+
+} // anon namespace
+
+gimple_opt_pass *
+make_pass_hwasan_O0 (gcc::context *ctxt)
+{
+ return new pass_hwasan_O0 (ctxt);
+}
+
#include "gt-asan.h"
@@ -493,6 +493,7 @@ DEF_FUNCTION_TYPE_2 (BT_FN_INT_FEXCEPT_T_PTR_INT, BT_INT, BT_FEXCEPT_T_PTR,
BT_INT)
DEF_FUNCTION_TYPE_2 (BT_FN_INT_CONST_FEXCEPT_T_PTR_INT, BT_INT,
BT_CONST_FEXCEPT_T_PTR, BT_INT)
+DEF_FUNCTION_TYPE_2 (BT_FN_PTR_CONST_PTR_UINT8, BT_PTR, BT_CONST_PTR, BT_UINT8)
DEF_POINTER_TYPE (BT_PTR_FN_VOID_PTR_PTR, BT_FN_VOID_PTR_PTR)
@@ -750,6 +750,7 @@ dump_gimple_call_args (pretty_printer *buffer, gcall *gs, dump_flags_t flags)
limit = ARRAY_SIZE (reduction_args);
break;
+ case IFN_HWASAN_MARK:
case IFN_ASAN_MARK:
#define DEF(X) #X
static const char *const asan_mark_args[] = {IFN_ASAN_MARK_FLAGS};
@@ -1230,8 +1230,11 @@ asan_poison_variable (tree decl, bool poison, gimple_stmt_iterator *it,
/* It's necessary to have all stack variables aligned to ASAN granularity
bytes. */
- if (DECL_ALIGN_UNIT (decl) <= ASAN_SHADOW_GRANULARITY)
- SET_DECL_ALIGN (decl, BITS_PER_UNIT * ASAN_SHADOW_GRANULARITY);
+ gcc_assert (!memory_tagging_p () || hwasan_sanitize_stack_p ());
+ unsigned shadow_granularity
+ = memory_tagging_p () ? HWASAN_TAG_GRANULE_SIZE : ASAN_SHADOW_GRANULARITY;
+ if (DECL_ALIGN_UNIT (decl) <= shadow_granularity)
+ SET_DECL_ALIGN (decl, BITS_PER_UNIT * shadow_granularity);
HOST_WIDE_INT flags = poison ? ASAN_MARK_POISON : ASAN_MARK_UNPOISON;
@@ -14520,7 +14523,7 @@ gimplify_function_tree (tree fndecl)
&& !needs_to_live_in_memory (ret))
DECL_GIMPLE_REG_P (ret) = 1;
- if (asan_sanitize_use_after_scope () && sanitize_flags_p (SANITIZE_ADDRESS))
+ if (asan_sanitize_use_after_scope ())
asan_poisoned_variables = new hash_set<tree> ();
bind = gimplify_body (fndecl, true);
if (asan_poisoned_variables)
@@ -457,6 +457,74 @@ expand_UBSAN_OBJECT_SIZE (internal_fn, gcall *)
/* This should get expanded in the sanopt pass. */
static void
+expand_HWASAN_CHECK (internal_fn, gcall *)
+{
+ gcc_unreachable ();
+}
+
+static void
+expand_HWASAN_CHOOSE_COLOUR (internal_fn, gcall *gc)
+{
+ tree colour = gimple_call_lhs (gc);
+ rtx target = expand_expr (colour, NULL_RTX, VOIDmode, EXPAND_NORMAL);
+ machine_mode mode = GET_MODE (target);
+ gcc_assert (mode == QImode);
+
+ rtx base_tag = hwasan_extract_tag (hwasan_base ());
+ gcc_assert (base_tag);
+ rtx tag_offset = GEN_INT (hwasan_current_tag ());
+ rtx chosen_tag = expand_simple_binop (QImode, PLUS, base_tag, tag_offset,
+ target, /* unsignedp = */1,
+ OPTAB_WIDEN);
+
+ gcc_assert (chosen_tag);
+ /* Really need to put the tag into the `target` RTX. */
+ if (chosen_tag != target)
+ {
+ rtx temp = chosen_tag;
+ machine_mode ret_mode = GET_MODE (chosen_tag);
+ if (ret_mode != mode)
+ temp = simplify_gen_unary (TRUNCATE, mode, chosen_tag, ret_mode);
+
+ emit_move_insn (target, temp);
+ }
+
+ hwasan_increment_tag ();
+}
+
+static void
+expand_HWASAN_MARK (internal_fn, gcall *gc)
+{
+ HOST_WIDE_INT flag = tree_to_shwi (gimple_call_arg (gc, 0));
+ bool is_poison = ((asan_mark_flags)flag) == ASAN_MARK_POISON;
+
+ tree base = gimple_call_arg (gc, 1);
+ gcc_checking_assert (TREE_CODE (base) == ADDR_EXPR);
+ rtx base_rtx = expand_normal (base);
+
+ rtx tag = is_poison ? const0_rtx : hwasan_extract_tag (base_rtx);
+ rtx address = hwasan_create_untagged_base (base_rtx);
+
+ tree len = gimple_call_arg (gc, 2);
+ gcc_assert (tree_fits_shwi_p (len));
+ unsigned HOST_WIDE_INT size_in_bytes = tree_to_shwi (len);
+ uint8_t tg_mask = HWASAN_TAG_GRANULE_SIZE - 1;
+ gcc_assert (size_in_bytes);
+ size_in_bytes = (size_in_bytes + tg_mask) & ~tg_mask;
+ rtx size = gen_int_mode (size_in_bytes, Pmode);
+
+ rtx func = init_one_libfunc ("__hwasan_tag_memory");
+ emit_library_call (func,
+ LCT_NORMAL,
+ VOIDmode,
+ address, ptr_mode,
+ tag, QImode,
+ size, ptr_mode);
+}
+
+/* This should get expanded in the sanopt pass. */
+
+static void
expand_ASAN_CHECK (internal_fn, gcall *)
{
gcc_unreachable ();
@@ -301,6 +301,9 @@ DEF_INTERNAL_FN (UBSAN_PTR, ECF_LEAF | ECF_NOTHROW, ".R.")
DEF_INTERNAL_FN (UBSAN_OBJECT_SIZE, ECF_LEAF | ECF_NOTHROW, NULL)
DEF_INTERNAL_FN (ABNORMAL_DISPATCHER, ECF_NORETURN, NULL)
DEF_INTERNAL_FN (BUILTIN_EXPECT, ECF_CONST | ECF_LEAF | ECF_NOTHROW, NULL)
+DEF_INTERNAL_FN (HWASAN_CHOOSE_COLOUR, ECF_LEAF | ECF_NOTHROW, ".")
+DEF_INTERNAL_FN (HWASAN_CHECK, ECF_TM_PURE | ECF_LEAF | ECF_NOTHROW, "..R..")
+DEF_INTERNAL_FN (HWASAN_MARK, ECF_LEAF | ECF_NOTHROW, NULL)
DEF_INTERNAL_FN (ASAN_CHECK, ECF_TM_PURE | ECF_LEAF | ECF_NOTHROW, "..R..")
DEF_INTERNAL_FN (ASAN_MARK, ECF_LEAF | ECF_NOTHROW, NULL)
DEF_INTERNAL_FN (ASAN_POISON, ECF_LEAF | ECF_NOTHROW | ECF_NOVOPS, NULL)
@@ -246,6 +246,7 @@ along with GCC; see the file COPYING3. If not see
NEXT_PASS (pass_sink_code);
NEXT_PASS (pass_sancov);
NEXT_PASS (pass_asan);
+ NEXT_PASS (pass_hwasan);
NEXT_PASS (pass_tsan);
NEXT_PASS (pass_dce);
/* Pass group that runs when 1) enabled, 2) there are loops
@@ -364,6 +365,7 @@ along with GCC; see the file COPYING3. If not see
NEXT_PASS (pass_dce);
NEXT_PASS (pass_sancov);
NEXT_PASS (pass_asan);
+ NEXT_PASS (pass_hwasan);
NEXT_PASS (pass_tsan);
/* ??? We do want some kind of loop invariant motion, but we possibly
need to adjust LIM to be more friendly towards preserving accurate
@@ -389,6 +391,7 @@ along with GCC; see the file COPYING3. If not see
NEXT_PASS (pass_sancov_O0);
NEXT_PASS (pass_lower_switch_O0);
NEXT_PASS (pass_asan_O0);
+ NEXT_PASS (pass_hwasan_O0);
NEXT_PASS (pass_tsan_O0);
NEXT_PASS (pass_sanopt);
NEXT_PASS (pass_cleanup_eh);
@@ -183,6 +183,60 @@ DEF_SANITIZER_BUILTIN(BUILT_IN_ASAN_POINTER_SUBTRACT, "__sanitizer_ptr_sub",
/* Hardware Address Sanitizer. */
DEF_SANITIZER_BUILTIN(BUILT_IN_HWASAN_INIT, "__hwasan_init",
BT_FN_VOID, ATTR_NOTHROW_LEAF_LIST)
+DEF_SANITIZER_BUILTIN(BUILT_IN_HWASAN_LOAD1, "__hwasan_load1",
+ BT_FN_VOID_PTR, ATTR_TMPURE_NOTHROW_LEAF_LIST)
+DEF_SANITIZER_BUILTIN(BUILT_IN_HWASAN_LOAD2, "__hwasan_load2",
+ BT_FN_VOID_PTR, ATTR_TMPURE_NOTHROW_LEAF_LIST)
+DEF_SANITIZER_BUILTIN(BUILT_IN_HWASAN_LOAD4, "__hwasan_load4",
+ BT_FN_VOID_PTR, ATTR_TMPURE_NOTHROW_LEAF_LIST)
+DEF_SANITIZER_BUILTIN(BUILT_IN_HWASAN_LOAD8, "__hwasan_load8",
+ BT_FN_VOID_PTR, ATTR_TMPURE_NOTHROW_LEAF_LIST)
+DEF_SANITIZER_BUILTIN(BUILT_IN_HWASAN_LOAD16, "__hwasan_load16",
+ BT_FN_VOID_PTR, ATTR_TMPURE_NOTHROW_LEAF_LIST)
+DEF_SANITIZER_BUILTIN(BUILT_IN_HWASAN_LOADN, "__hwasan_loadN",
+ BT_FN_VOID_PTR_PTRMODE, ATTR_TMPURE_NOTHROW_LEAF_LIST)
+DEF_SANITIZER_BUILTIN(BUILT_IN_HWASAN_STORE1, "__hwasan_store1",
+ BT_FN_VOID_PTR, ATTR_TMPURE_NOTHROW_LEAF_LIST)
+DEF_SANITIZER_BUILTIN(BUILT_IN_HWASAN_STORE2, "__hwasan_store2",
+ BT_FN_VOID_PTR, ATTR_TMPURE_NOTHROW_LEAF_LIST)
+DEF_SANITIZER_BUILTIN(BUILT_IN_HWASAN_STORE4, "__hwasan_store4",
+ BT_FN_VOID_PTR, ATTR_TMPURE_NOTHROW_LEAF_LIST)
+DEF_SANITIZER_BUILTIN(BUILT_IN_HWASAN_STORE8, "__hwasan_store8",
+ BT_FN_VOID_PTR, ATTR_TMPURE_NOTHROW_LEAF_LIST)
+DEF_SANITIZER_BUILTIN(BUILT_IN_HWASAN_STORE16, "__hwasan_store16",
+ BT_FN_VOID_PTR, ATTR_TMPURE_NOTHROW_LEAF_LIST)
+DEF_SANITIZER_BUILTIN(BUILT_IN_HWASAN_STOREN, "__hwasan_storeN",
+ BT_FN_VOID_PTR_PTRMODE, ATTR_TMPURE_NOTHROW_LEAF_LIST)
+DEF_SANITIZER_BUILTIN(BUILT_IN_HWASAN_LOAD1_NOABORT, "__hwasan_load1_noabort",
+ BT_FN_VOID_PTR, ATTR_TMPURE_NOTHROW_LEAF_LIST)
+DEF_SANITIZER_BUILTIN(BUILT_IN_HWASAN_LOAD2_NOABORT, "__hwasan_load2_noabort",
+ BT_FN_VOID_PTR, ATTR_TMPURE_NOTHROW_LEAF_LIST)
+DEF_SANITIZER_BUILTIN(BUILT_IN_HWASAN_LOAD4_NOABORT, "__hwasan_load4_noabort",
+ BT_FN_VOID_PTR, ATTR_TMPURE_NOTHROW_LEAF_LIST)
+DEF_SANITIZER_BUILTIN(BUILT_IN_HWASAN_LOAD8_NOABORT, "__hwasan_load8_noabort",
+ BT_FN_VOID_PTR, ATTR_TMPURE_NOTHROW_LEAF_LIST)
+DEF_SANITIZER_BUILTIN(BUILT_IN_HWASAN_LOAD16_NOABORT, "__hwasan_load16_noabort",
+ BT_FN_VOID_PTR, ATTR_TMPURE_NOTHROW_LEAF_LIST)
+DEF_SANITIZER_BUILTIN(BUILT_IN_HWASAN_LOADN_NOABORT, "__hwasan_loadN_noabort",
+ BT_FN_VOID_PTR_PTRMODE, ATTR_TMPURE_NOTHROW_LEAF_LIST)
+DEF_SANITIZER_BUILTIN(BUILT_IN_HWASAN_STORE1_NOABORT, "__hwasan_store1_noabort",
+ BT_FN_VOID_PTR, ATTR_TMPURE_NOTHROW_LEAF_LIST)
+DEF_SANITIZER_BUILTIN(BUILT_IN_HWASAN_STORE2_NOABORT, "__hwasan_store2_noabort",
+ BT_FN_VOID_PTR, ATTR_TMPURE_NOTHROW_LEAF_LIST)
+DEF_SANITIZER_BUILTIN(BUILT_IN_HWASAN_STORE4_NOABORT, "__hwasan_store4_noabort",
+ BT_FN_VOID_PTR, ATTR_TMPURE_NOTHROW_LEAF_LIST)
+DEF_SANITIZER_BUILTIN(BUILT_IN_HWASAN_STORE8_NOABORT, "__hwasan_store8_noabort",
+ BT_FN_VOID_PTR, ATTR_TMPURE_NOTHROW_LEAF_LIST)
+DEF_SANITIZER_BUILTIN(BUILT_IN_HWASAN_STORE16_NOABORT, "__hwasan_store16_noabort",
+ BT_FN_VOID_PTR, ATTR_TMPURE_NOTHROW_LEAF_LIST)
+DEF_SANITIZER_BUILTIN(BUILT_IN_HWASAN_STOREN_NOABORT, "__hwasan_storeN_noabort",
+ BT_FN_VOID_PTR_PTRMODE, ATTR_TMPURE_NOTHROW_LEAF_LIST)
+DEF_SANITIZER_BUILTIN(BUILT_IN_HWASAN_TAG_MISMATCH4, "__hwasan_tag_mismatch4",
+ BT_FN_VOID_PTR, ATTR_NOTHROW_LIST)
+DEF_SANITIZER_BUILTIN(BUILT_IN_HWASAN_HANDLE_LONGJMP, "__hwasan_handle_longjmp",
+ BT_FN_VOID_CONST_PTR, ATTR_NOTHROW_LIST)
+DEF_SANITIZER_BUILTIN(BUILT_IN_HWASAN_TAG_PTR, "__hwasan_tag_pointer",
+ BT_FN_PTR_CONST_PTR_UINT8, ATTR_TMPURE_NOTHROW_LEAF_LIST)
DEF_SANITIZER_BUILTIN(BUILT_IN_HWASAN_TAG_MEM, "__hwasan_tag_memory",
BT_FN_VOID_PTR_UINT8_SIZE, ATTR_NOTHROW_LIST)
@@ -773,7 +773,8 @@ sanopt_optimize_walker (basic_block bb, class sanopt_ctx *ctx)
basic_block son;
gimple_stmt_iterator gsi;
sanopt_info *info = (sanopt_info *) bb->aux;
- bool asan_check_optimize = (flag_sanitize & SANITIZE_ADDRESS) != 0;
+ bool asan_check_optimize
+ = ((flag_sanitize & SANITIZE_ADDRESS) != 0) || memory_tagging_p ();
for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi);)
{
@@ -803,6 +804,7 @@ sanopt_optimize_walker (basic_block bb, class sanopt_ctx *ctx)
if (asan_check_optimize
&& gimple_call_builtin_p (stmt, BUILT_IN_ASAN_BEFORE_DYNAMIC_INIT))
{
+ gcc_assert (!memory_tagging_p ());
use_operand_p use;
gimple *use_stmt;
if (single_imm_use (gimple_vdef (stmt), &use, &use_stmt))
@@ -831,6 +833,7 @@ sanopt_optimize_walker (basic_block bb, class sanopt_ctx *ctx)
case IFN_UBSAN_PTR:
remove = maybe_optimize_ubsan_ptr_ifn (ctx, stmt);
break;
+ case IFN_HWASAN_CHECK:
case IFN_ASAN_CHECK:
if (asan_check_optimize)
remove = maybe_optimize_asan_check_ifn (ctx, stmt);
@@ -1256,6 +1259,10 @@ sanitize_rewrite_addressable_params (function *fun)
unsigned int
pass_sanopt::execute (function *fun)
{
+ /* n.b. ASAN_MARK is used for both HWASAN and ASAN.
+ asan_num_accesses is hence used to count either HWASAN_CHECK or ASAN_CHECK
+ stuff. This is fine because you can only have one of these active at a
+ time. */
basic_block bb;
int asan_num_accesses = 0;
bool contains_asan_mark = false;
@@ -1263,10 +1270,10 @@ pass_sanopt::execute (function *fun)
/* Try to remove redundant checks. */
if (optimize
&& (flag_sanitize
- & (SANITIZE_NULL | SANITIZE_ALIGNMENT
+ & (SANITIZE_NULL | SANITIZE_ALIGNMENT | SANITIZE_HWADDRESS
| SANITIZE_ADDRESS | SANITIZE_VPTR | SANITIZE_POINTER_OVERFLOW)))
asan_num_accesses = sanopt_optimize (fun, &contains_asan_mark);
- else if (flag_sanitize & SANITIZE_ADDRESS)
+ else if (flag_sanitize & (SANITIZE_ADDRESS | SANITIZE_HWADDRESS))
{
gimple_stmt_iterator gsi;
FOR_EACH_BB_FN (bb, fun)
@@ -1286,7 +1293,7 @@ pass_sanopt::execute (function *fun)
sanitize_asan_mark_poison ();
}
- if (asan_sanitize_stack_p ())
+ if (asan_sanitize_stack_p () || hwasan_sanitize_stack_p ())
sanitize_rewrite_addressable_params (fun);
bool use_calls = ASAN_INSTRUMENTATION_WITH_CALL_THRESHOLD < INT_MAX
@@ -1328,6 +1335,9 @@ pass_sanopt::execute (function *fun)
case IFN_UBSAN_VPTR:
no_next = ubsan_expand_vptr_ifn (&gsi);
break;
+ case IFN_HWASAN_CHECK:
+ no_next = hwasan_expand_check_ifn (&gsi, use_calls);
+ break;
case IFN_ASAN_CHECK:
no_next = asan_expand_check_ifn (&gsi, use_calls);
break;
@@ -1339,6 +1349,9 @@ pass_sanopt::execute (function *fun)
&need_commit_edge_insert,
shadow_vars_mapping);
break;
+ case IFN_HWASAN_MARK:
+ no_next = hwasan_expand_mark_ifn (&gsi);
+ break;
default:
break;
}
@@ -508,7 +508,7 @@ compile_file (void)
if (flag_sanitize & SANITIZE_THREAD)
tsan_finish_file ();
- if (flag_sanitize & SANITIZE_HWADDRESS)
+ if (gate_hwasan ())
hwasan_finish_file ();
omp_finish_file ();
@@ -341,6 +341,8 @@ extern void register_pass (opt_pass* pass, pass_positioning_ops pos,
extern gimple_opt_pass *make_pass_asan (gcc::context *ctxt);
extern gimple_opt_pass *make_pass_asan_O0 (gcc::context *ctxt);
+extern gimple_opt_pass *make_pass_hwasan (gcc::context *ctxt);
+extern gimple_opt_pass *make_pass_hwasan_O0 (gcc::context *ctxt);
extern gimple_opt_pass *make_pass_tsan (gcc::context *ctxt);
extern gimple_opt_pass *make_pass_tsan_O0 (gcc::context *ctxt);
extern gimple_opt_pass *make_pass_sancov (gcc::context *ctxt);