@@ -30,6 +30,7 @@ extern void hwasan_increment_tag ();
extern rtx hwasan_with_tag (rtx, poly_int64);
extern void hwasan_tag_init ();
extern rtx hwasan_create_untagged_base (rtx);
+extern rtx hwasan_base ();
extern void hwasan_emit_prologue (rtx *, rtx *, poly_int64 *, uint8_t *, size_t);
extern rtx_insn *hwasan_emit_uncolour_frame (rtx, rtx, rtx_insn *);
extern bool hwasan_expand_check_ifn (gimple_stmt_iterator *, bool);
@@ -579,15 +579,28 @@ get_last_alloca_addr ()
static void
handle_builtin_stack_restore (gcall *call, gimple_stmt_iterator *iter)
{
- if (!iter || !asan_sanitize_allocas_p ())
+ if (!iter
+ || !(asan_sanitize_allocas_p () || memory_tagging_p ()))
return;
- tree last_alloca = get_last_alloca_addr ();
tree restored_stack = gimple_call_arg (call, 0);
- tree fn = builtin_decl_implicit (BUILT_IN_ASAN_ALLOCAS_UNPOISON);
- gimple *g = gimple_build_call (fn, 2, last_alloca, restored_stack);
- gsi_insert_before (iter, g, GSI_SAME_STMT);
- g = gimple_build_assign (last_alloca, restored_stack);
+
+ gimple *g;
+
+ if (memory_tagging_p ())
+ {
+ tree fn = builtin_decl_implicit (BUILT_IN_HWASAN_HANDLE_LONGJMP);
+ g = gimple_build_call (fn, 1, restored_stack);
+ }
+ else
+ {
+ tree last_alloca = get_last_alloca_addr ();
+ tree fn = builtin_decl_implicit (BUILT_IN_ASAN_ALLOCAS_UNPOISON);
+ g = gimple_build_call (fn, 2, last_alloca, restored_stack);
+ gsi_insert_before (iter, g, GSI_SAME_STMT);
+ g = gimple_build_assign (last_alloca, restored_stack);
+ }
+
gsi_insert_before (iter, g, GSI_SAME_STMT);
}
@@ -617,14 +630,12 @@ handle_builtin_stack_restore (gcall *call, gimple_stmt_iterator *iter)
static void
handle_builtin_alloca (gcall *call, gimple_stmt_iterator *iter)
{
- if (!iter || !asan_sanitize_allocas_p ())
+ if (!iter
+ || !(asan_sanitize_allocas_p () || memory_tagging_p ()))
return;
gassign *g;
gcall *gg;
- const HOST_WIDE_INT redzone_mask = ASAN_RED_ZONE_SIZE - 1;
-
- tree last_alloca = get_last_alloca_addr ();
tree callee = gimple_call_fndecl (call);
tree old_size = gimple_call_arg (call, 0);
tree ptr_type = gimple_call_lhs (call) ? TREE_TYPE (gimple_call_lhs (call))
@@ -634,6 +645,86 @@ handle_builtin_alloca (gcall *call, gimple_stmt_iterator *iter)
= DECL_FUNCTION_CODE (callee) == BUILT_IN_ALLOCA
? 0 : tree_to_uhwi (gimple_call_arg (call, 1));
+ if (memory_tagging_p ())
+ {
+ /*
+ HWASAN needs a different expansion.
+
+ addr = __builtin_alloca (size, align);
+
+ should be replaced by
+
+ new_size = size rounded up to HWASAN_TAG_GRANULE_SIZE byte alignment;
+ untagged_addr = __builtin_alloca (new_size, align);
+ colour = __hwasan_choose_alloca_colour ();
+ addr = __hwasan_tag_pointer (untagged_addr, colour);
+ __hwasan_tag_memory (addr, colour, new_size);
+ */
+ /* Ensure alignment at least HWASAN_TAG_GRANULE_SIZE bytes so we start on
+ a tag granule. aarch64 already has an alignment of 16 bytes by
+ default which is the same as HWASAN_TAG_GRANULE_SIZE at the moment. */
+ align = align > HWASAN_TAG_GRANULE_SIZE ? align : HWASAN_TAG_GRANULE_SIZE;
+
+ /* tree new_size = (old_size + 15) & ~15; */
+ uint8_t tg_mask = HWASAN_TAG_GRANULE_SIZE - 1;
+ tree old_size = gimple_call_arg (call, 0);
+ tree tree_mask = build_int_cst (size_type_node, tg_mask);
+ g = gimple_build_assign (make_ssa_name (size_type_node), PLUS_EXPR,
+ old_size, tree_mask);
+ gsi_insert_before (iter, g, GSI_SAME_STMT);
+ tree oversize = gimple_assign_lhs (g);
+
+ g = gimple_build_assign (make_ssa_name (size_type_node), BIT_NOT_EXPR,
+ tree_mask);
+ tree mask = gimple_assign_lhs (g);
+ gsi_insert_before (iter, g, GSI_SAME_STMT);
+
+ g = gimple_build_assign (make_ssa_name (size_type_node), BIT_AND_EXPR,
+ oversize, mask);
+ gsi_insert_before (iter, g, GSI_SAME_STMT);
+ tree new_size = gimple_assign_lhs (g);
+
+ /* emit the alloca call */
+ tree fn = builtin_decl_implicit (BUILT_IN_ALLOCA_WITH_ALIGN);
+ gg = gimple_build_call (fn, 2, new_size,
+ build_int_cst (size_type_node, align));
+ tree untagged_addr = make_ssa_name (ptr_type, gg);
+ gimple_call_set_lhs (gg, untagged_addr);
+ gsi_insert_before (iter, gg, GSI_SAME_STMT);
+
+ /* Insert code choosing the tag.
+ Here we use an internal function so we can choose the colour at expand
+ time. We want this so the decision is made after stack variables have
+ been assigned their colour (i.e. once the tag_offset variable has been
+ set to one after the last stack variables tag). */
+
+ gg = gimple_build_call_internal (IFN_HWASAN_CHOOSE_COLOUR, 0);
+ tree colour = make_ssa_name (unsigned_char_type_node, gg);
+ gimple_call_set_lhs (gg, colour);
+ gsi_insert_before (iter, gg, GSI_SAME_STMT);
+
+ /* Insert code adding tag to pointer. */
+ fn = builtin_decl_implicit (BUILT_IN_HWASAN_TAG_PTR);
+ gg = gimple_build_call (fn, 2, untagged_addr, colour);
+ tree addr = make_ssa_name (ptr_type, gg);
+ gimple_call_set_lhs (gg, addr);
+ gsi_insert_before (iter, gg, GSI_SAME_STMT);
+
+ /* Insert code colouring shadow memory.
+ NOTE: require using `untagged_addr` here. */
+ fn = builtin_decl_implicit (BUILT_IN_HWASAN_TAG_MEM);
+ gg = gimple_build_call (fn, 3, untagged_addr, colour, new_size);
+ gsi_insert_before (iter, gg, GSI_SAME_STMT);
+
+ /* Finally, replace old alloca ptr with NEW_ALLOCA. */
+ replace_call_with_value (iter, addr);
+ return;
+ }
+
+ tree last_alloca = get_last_alloca_addr ();
+ const HOST_WIDE_INT redzone_mask = ASAN_RED_ZONE_SIZE - 1;
+
+
/* If ALIGN > ASAN_RED_ZONE_SIZE, we embed left redzone into first ALIGN
bytes of allocated space. Otherwise, align alloca to ASAN_RED_ZONE_SIZE
manually. */
@@ -786,6 +877,33 @@ get_mem_refs_of_builtin_call (gcall *call,
break;
case BUILT_IN_STRLEN:
+ /*
+ Special case strlen here because its length is taken from its return
+ value.
+
+ The approach taken by the sanitizers is to check a memory access
+ before it's taken. For ASAN strlen is intercepted by libasan, so no
+ check is inserted by the compiler.
+
+ This function still returns `true` and provides a length to the rest
+ of the ASAN pass in order to record what areas have been checked,
+ avoiding superfluous checks later on.
+
+ HWASAN does not intercept any of these internal functions.
+ This means that checks for memory accesses must be inserted by the
+ compiler.
+ strlen is a special case, because we can tell the length from the
+ return of the function, but that is not known until after the function
+ has returned.
+
+ Hence we can't check the memory access before it happens.
+ We could check the memory access after it has already happened, but
+ for now I'm choosing to just ignore `strlen` calls.
+ This decision was simply made because that means the special case is
+ limited to this one case of this one function.
+ */
+ if (memory_tagging_p ())
+ return false;
source0 = gimple_call_arg (call, 0);
len = gimple_call_lhs (call);
break;
@@ -2493,8 +2611,6 @@ maybe_instrument_assignment (gimple_stmt_iterator *iter)
static bool
maybe_instrument_call (gimple_stmt_iterator *iter)
{
- if (memory_tagging_p ())
- return false;
gimple *stmt = gsi_stmt (*iter);
bool is_builtin = gimple_call_builtin_p (stmt, BUILT_IN_NORMAL);
@@ -2516,10 +2632,13 @@ maybe_instrument_call (gimple_stmt_iterator *iter)
break;
}
}
- tree decl = builtin_decl_implicit (BUILT_IN_ASAN_HANDLE_NO_RETURN);
- gimple *g = gimple_build_call (decl, 0);
- gimple_set_location (g, gimple_location (stmt));
- gsi_insert_before (iter, g, GSI_SAME_STMT);
+ if (! memory_tagging_p ())
+ {
+ tree decl = builtin_decl_implicit (BUILT_IN_ASAN_HANDLE_NO_RETURN);
+ gimple *g = gimple_build_call (decl, 0);
+ gimple_set_location (g, gimple_location (stmt));
+ gsi_insert_before (iter, g, GSI_SAME_STMT);
+ }
}
bool instrumented = false;
@@ -2918,6 +3037,9 @@ initialize_sanitizer_builtins (void)
= build_function_type_list (void_type_node, uint64_type_node,
ptr_type_node, NULL_TREE);
+ tree BT_FN_PTR_CONST_PTR_UINT8
+ = build_function_type_list (ptr_type_node, const_ptr_type_node,
+ unsigned_char_type_node, NULL_TREE);
tree BT_FN_VOID_PTR_UINT8_SIZE
= build_function_type_list (void_type_node, ptr_type_node,
unsigned_char_type_node, size_type_node,
@@ -3749,6 +3871,14 @@ hwasan_record_base (rtx base)
}
uint8_t hwasan_current_tag () { return tag_offset; }
+rtx
+hwasan_base ()
+{
+ if (! hwasan_base_ptr)
+ hwasan_record_base (gen_reg_rtx (Pmode));
+
+ return hwasan_base_ptr;
+}
void
hwasan_increment_tag ()
@@ -493,6 +493,7 @@ DEF_FUNCTION_TYPE_2 (BT_FN_INT_FEXCEPT_T_PTR_INT, BT_INT, BT_FEXCEPT_T_PTR,
BT_INT)
DEF_FUNCTION_TYPE_2 (BT_FN_INT_CONST_FEXCEPT_T_PTR_INT, BT_INT,
BT_CONST_FEXCEPT_T_PTR, BT_INT)
+DEF_FUNCTION_TYPE_2 (BT_FN_PTR_CONST_PTR_UINT8, BT_PTR, BT_CONST_PTR, BT_UINT8)
DEF_POINTER_TYPE (BT_PTR_FN_VOID_PTR_PTR, BT_FN_VOID_PTR_PTR)
@@ -462,6 +462,42 @@ expand_HWASAN_CHECK (internal_fn, gcall *)
}
static void
+expand_HWASAN_CHOOSE_COLOUR (internal_fn, gcall *gc)
+{
+ /* TODO Use shared function somewhere so that MTE can use the same basic
+ functionality when it needs to get a tag for alloca. */
+ tree colour = gimple_call_lhs (gc);
+ rtx target = expand_expr (colour, NULL_RTX, VOIDmode, EXPAND_NORMAL);
+ machine_mode mode = GET_MODE (target);
+ gcc_assert (mode == QImode);
+
+ rtx base_tag = expand_simple_binop (Pmode, LSHIFTRT, hwasan_base (),
+ HWASAN_SHIFT_RTX,
+ NULL_RTX, /* unsignedp = */0,
+ OPTAB_DIRECT);
+
+ gcc_assert (base_tag);
+ rtx tag_offset = const_int_rtx[MAX_SAVED_CONST_INT + hwasan_current_tag ()];
+ rtx chosen_tag = expand_simple_binop (QImode, PLUS, base_tag, tag_offset,
+ target, /* unsignedp = */1,
+ OPTAB_WIDEN);
+
+ gcc_assert (chosen_tag);
+ /* TODO truncate target */
+ if (chosen_tag != target)
+ {
+ rtx temp = chosen_tag;
+ machine_mode ret_mode = GET_MODE (chosen_tag);
+ if (ret_mode != mode)
+ temp = simplify_gen_unary (TRUNCATE, mode, chosen_tag, ret_mode);
+
+ emit_move_insn (target, temp);
+ }
+
+ hwasan_increment_tag ();
+}
+
+static void
expand_ASAN_CHECK (internal_fn, gcall *)
{
gcc_unreachable ();
@@ -288,6 +288,7 @@ DEF_INTERNAL_FN (UBSAN_PTR, ECF_LEAF | ECF_NOTHROW, ".R.")
DEF_INTERNAL_FN (UBSAN_OBJECT_SIZE, ECF_LEAF | ECF_NOTHROW, NULL)
DEF_INTERNAL_FN (ABNORMAL_DISPATCHER, ECF_NORETURN, NULL)
DEF_INTERNAL_FN (BUILTIN_EXPECT, ECF_CONST | ECF_LEAF | ECF_NOTHROW, NULL)
+DEF_INTERNAL_FN (HWASAN_CHOOSE_COLOUR, ECF_LEAF | ECF_NOTHROW, ".")
DEF_INTERNAL_FN (HWASAN_CHECK, ECF_TM_PURE | ECF_LEAF | ECF_NOTHROW, "..R..")
DEF_INTERNAL_FN (ASAN_CHECK, ECF_TM_PURE | ECF_LEAF | ECF_NOTHROW, "..R..")
DEF_INTERNAL_FN (ASAN_MARK, ECF_LEAF | ECF_NOTHROW, NULL)
@@ -187,6 +187,10 @@ DEF_SANITIZER_BUILTIN(BUILT_IN_HWASAN_LOADN, "__hwasan_loadN",
BT_FN_VOID_PTR_PTRMODE, ATTR_TMPURE_NOTHROW_LEAF_LIST)
DEF_SANITIZER_BUILTIN(BUILT_IN_HWASAN_STOREN, "__hwasan_storeN",
BT_FN_VOID_PTR_PTRMODE, ATTR_TMPURE_NOTHROW_LEAF_LIST)
+DEF_SANITIZER_BUILTIN(BUILT_IN_HWASAN_HANDLE_LONGJMP, "__hwasan_handle_longjmp",
+ BT_FN_VOID_CONST_PTR, ATTR_NOTHROW_LIST)
+DEF_SANITIZER_BUILTIN(BUILT_IN_HWASAN_TAG_PTR, "__hwasan_tag_pointer",
+ BT_FN_PTR_CONST_PTR_UINT8, ATTR_TMPURE_NOTHROW_LEAF_LIST)
DEF_SANITIZER_BUILTIN(BUILT_IN_HWASAN_TAG_MEM, "__hwasan_tag_memory",
BT_FN_VOID_PTR_UINT8_SIZE, ATTR_NOTHROW_LIST)