gcc/ChangeLog:
2015-02-25 Marat Zakirov <m.zakirov@samsung.com>
* asan.c (asan_emit_stack_protection): Support for misalign accesses.
(asan_expand_check_ifn): Likewise.
* params.def: New option asan-catch-misaligned.
* params.h: New param ASAN_CATCH_MISALIGNED.
* doc/invoke.texi: New asan param description.
gcc/testsuite/ChangeLog:
2015-02-25 Marat Zakirov <m.zakirov@samsung.com>
* c-c++-common/asan/misalign-catch.c: New test.
@@ -1050,7 +1050,6 @@ asan_emit_stack_protection (rtx base, rtx pbase, unsigned int alignb,
rtx_code_label *lab;
rtx_insn *insns;
char buf[30];
- unsigned char shadow_bytes[4];
HOST_WIDE_INT base_offset = offsets[length - 1];
HOST_WIDE_INT base_align_bias = 0, offset, prev_offset;
HOST_WIDE_INT asan_frame_size = offsets[0] - base_offset;
@@ -1059,6 +1058,8 @@ asan_emit_stack_protection (rtx base, rtx pbase, unsigned int alignb,
unsigned char cur_shadow_byte = ASAN_STACK_MAGIC_LEFT;
tree str_cst, decl, id;
int use_after_return_class = -1;
+ bool misalign = (flag_sanitize & SANITIZE_KERNEL_ADDRESS)
+ || ASAN_CATCH_MISALIGNED;
if (shadow_ptr_types[0] == NULL_TREE)
asan_init_shadow_ptr_types ();
@@ -1193,11 +1194,37 @@ asan_emit_stack_protection (rtx base, rtx pbase, unsigned int alignb,
if (STRICT_ALIGNMENT)
set_mem_align (shadow_mem, (GET_MODE_ALIGNMENT (SImode)));
prev_offset = base_offset;
+
+ vec<rtx> shadow_mems;
+ vec<unsigned char> shadow_bytes;
+
+ shadow_mems.create (0);
+ shadow_bytes.create (0);
+
for (l = length; l; l -= 2)
{
if (l == 2)
cur_shadow_byte = ASAN_STACK_MAGIC_RIGHT;
offset = offsets[l - 1];
+ if (l != length && misalign)
+ {
+ HOST_WIDE_INT aoff
+ = base_offset + ((offset - base_offset)
+ & ~(ASAN_RED_ZONE_SIZE - HOST_WIDE_INT_1))
+ - ASAN_RED_ZONE_SIZE;
+ if (aoff > prev_offset)
+ {
+ shadow_mem = adjust_address (shadow_mem, VOIDmode,
+ (aoff - prev_offset)
+ >> ASAN_SHADOW_SHIFT);
+ prev_offset = aoff;
+ shadow_bytes.safe_push (0);
+ shadow_bytes.safe_push (0);
+ shadow_bytes.safe_push (0);
+ shadow_bytes.safe_push (0);
+ shadow_mems.safe_push (shadow_mem);
+ }
+ }
if ((offset - base_offset) & (ASAN_RED_ZONE_SIZE - 1))
{
int i;
@@ -1212,13 +1239,13 @@ asan_emit_stack_protection (rtx base, rtx pbase, unsigned int alignb,
if (aoff < offset)
{
if (aoff < offset - (1 << ASAN_SHADOW_SHIFT) + 1)
- shadow_bytes[i] = 0;
+ shadow_bytes.safe_push (0);
else
- shadow_bytes[i] = offset - aoff;
+ shadow_bytes.safe_push (offset - aoff);
}
else
- shadow_bytes[i] = ASAN_STACK_MAGIC_PARTIAL;
- emit_move_insn (shadow_mem, asan_shadow_cst (shadow_bytes));
+ shadow_bytes.safe_push (ASAN_STACK_MAGIC_PARTIAL);
+ shadow_mems.safe_push (shadow_mem);
offset = aoff;
}
while (offset <= offsets[l - 2] - ASAN_RED_ZONE_SIZE)
@@ -1227,12 +1254,21 @@ asan_emit_stack_protection (rtx base, rtx pbase, unsigned int alignb,
(offset - prev_offset)
>> ASAN_SHADOW_SHIFT);
prev_offset = offset;
- memset (shadow_bytes, cur_shadow_byte, 4);
- emit_move_insn (shadow_mem, asan_shadow_cst (shadow_bytes));
+ shadow_bytes.safe_push (cur_shadow_byte);
+ shadow_bytes.safe_push (cur_shadow_byte);
+ shadow_bytes.safe_push (cur_shadow_byte);
+ shadow_bytes.safe_push (cur_shadow_byte);
+ shadow_mems.safe_push (shadow_mem);
offset += ASAN_RED_ZONE_SIZE;
}
cur_shadow_byte = ASAN_STACK_MAGIC_MIDDLE;
}
+ for (unsigned i = 0; misalign && i < shadow_bytes.length () - 1; i++)
+ if (shadow_bytes[i] == 0 && shadow_bytes[i + 1] > 0)
+ shadow_bytes[i] = 8 + (shadow_bytes[i + 1] > 7 ? 0 : shadow_bytes[i + 1]);
+ for (unsigned i = 0; i < shadow_mems.length (); i++)
+ emit_move_insn (shadow_mems[i], asan_shadow_cst (&shadow_bytes[i * 4]));
+
do_pending_stack_adjust ();
/* Construct epilogue sequence. */
@@ -1285,33 +1321,15 @@ asan_emit_stack_protection (rtx base, rtx pbase, unsigned int alignb,
if (STRICT_ALIGNMENT)
set_mem_align (shadow_mem, (GET_MODE_ALIGNMENT (SImode)));
- prev_offset = base_offset;
- last_offset = base_offset;
- last_size = 0;
- for (l = length; l; l -= 2)
+ for (unsigned i = 0; i < shadow_mems.length (); i++)
{
- offset = base_offset + ((offsets[l - 1] - base_offset)
- & ~(ASAN_RED_ZONE_SIZE - HOST_WIDE_INT_1));
- if (last_offset + last_size != offset)
+ if (shadow_bytes[i*4+3] == ASAN_STACK_MAGIC_PARTIAL)
{
- shadow_mem = adjust_address (shadow_mem, VOIDmode,
- (last_offset - prev_offset)
- >> ASAN_SHADOW_SHIFT);
- prev_offset = last_offset;
- asan_clear_shadow (shadow_mem, last_size >> ASAN_SHADOW_SHIFT);
- last_offset = offset;
- last_size = 0;
+ asan_clear_shadow (shadow_mems[i], 8);
+ i++;
}
- last_size += base_offset + ((offsets[l - 2] - base_offset)
- & ~(ASAN_RED_ZONE_SIZE - HOST_WIDE_INT_1))
- - offset;
- }
- if (last_size)
- {
- shadow_mem = adjust_address (shadow_mem, VOIDmode,
- (last_offset - prev_offset)
- >> ASAN_SHADOW_SHIFT);
- asan_clear_shadow (shadow_mem, last_size >> ASAN_SHADOW_SHIFT);
+ else
+ asan_clear_shadow (shadow_mems[i], 4);
}
do_pending_stack_adjust ();
@@ -2546,6 +2564,8 @@ asan_expand_check_ifn (gimple_stmt_iterator *iter, bool use_calls)
gimple g = gsi_stmt (*iter);
location_t loc = gimple_location (g);
+ bool misalign = (flag_sanitize & SANITIZE_KERNEL_ADDRESS)
+ || ASAN_CATCH_MISALIGNED;
bool recover_p
= (flag_sanitize & flag_sanitize_recover & SANITIZE_KERNEL_ADDRESS) != 0;
@@ -2643,7 +2663,7 @@ asan_expand_check_ifn (gimple_stmt_iterator *iter, bool use_calls)
tree base_addr = gimple_assign_lhs (g);
tree t = NULL_TREE;
- if (real_size_in_bytes >= 8)
+ if (real_size_in_bytes >= 8 && !misalign)
{
tree shadow = build_shadow_mem_access (&gsi, loc, base_addr,
shadow_ptr_type);
@@ -2662,7 +2682,7 @@ asan_expand_check_ifn (gimple_stmt_iterator *iter, bool use_calls)
/* Aligned (>= 8 bytes) can test just
(real_size_in_bytes - 1 >= shadow), as base_addr & 7 is known
to be 0. */
- if (align < 8)
+ if (align < 8 || misalign)
{
gimple_seq_add_stmt (&seq, build_assign (BIT_AND_EXPR,
base_addr, 7));
@@ -10965,6 +10965,12 @@ is greater or equal to this number, use callbacks instead of inline checks.
E.g. to disable inline code use
@option{--param asan-instrumentation-with-call-threshold=0}.
+@item asan-catch-misaligned
+Catch invalid unaligned memory accesses.
+This option is needed to prevent potential ASan false positives due to
+unaligned to type size memory accesses in some apllication like Linux kernel
+@option{--param asan-catch-misaligned=0}.
+
@item chkp-max-ctor-size
Static constructors generated by Pointer Bounds Checker may become very
large and significantly increase compile time at optimization level
@@ -1151,6 +1151,11 @@ DEFPARAM (PARAM_ASAN_INSTRUMENTATION_WITH_CALL_THRESHOLD,
"in function becomes greater or equal to this number",
7000, 0, INT_MAX)
+DEFPARAM (PARAM_ASAN_CATCH_MISALIGNED,
+ "asan-catch-misaligned",
+ "catch unaligned access",
+ 0, 1, 1)
+
DEFPARAM (PARAM_UNINIT_CONTROL_DEP_ATTEMPTS,
"uninit-control-dep-attempts",
"Maximum number of nested calls to search for control dependencies "
@@ -240,5 +240,7 @@ extern void init_param_values (int *params);
PARAM_VALUE (PARAM_ASAN_USE_AFTER_RETURN)
#define ASAN_INSTRUMENTATION_WITH_CALL_THRESHOLD \
PARAM_VALUE (PARAM_ASAN_INSTRUMENTATION_WITH_CALL_THRESHOLD)
+#define ASAN_CATCH_MISALIGNED \
+ PARAM_VALUE (PARAM_ASAN_CATCH_MISALIGNED)
#endif /* ! GCC_PARAMS_H */
new file mode 100644
@@ -0,0 +1,21 @@
+/* { dg-do run } */
+/* { dg-options "--param asan-catch-misaligned=1" } */
+/* { dg-shouldfail "asan" } */
+
+long long *ptr;
+
+__attribute__((noinline))
+void foo () {
+ ptr = ((long long int *)(((char *)ptr) + 1));
+ *ptr = 1;
+}
+
+int main ()
+{
+ long long int local[9];
+ ptr = (long long *)&local[8];
+ foo ();
+ return 0;
+}
+
+/* { dg-output "ERROR: AddressSanitizer: stack-buffer-overflow.*(\n|\r\n|\r)" } */