diff mbox

[asan] Protection of stack vars (take 2)

Message ID 20121016103119.GG584@tucnak.redhat.com
State New
Headers show

Commit Message

Jakub Jelinek Oct. 16, 2012, 10:31 a.m. UTC
Hi!

Here is an updated patch, which emits also the shadow clearing sequence
at the end of function in the right spot.

2012-10-16  Jakub Jelinek  <jakub@redhat.com>

	* Makefile.in (asan.o): Depend on $(EXPR_H) $(OPTABS_H).
	(cfgexpand.o): Depend on asan.h.
	* asan.c: Include expr.h and optabs.h.
	(asan_shadow_set): New variable.
	(asan_shadow_cst, asan_emit_stack_protection): New functions.
	(asan_init_shadow_ptr_types): Initialize also asan_shadow_set.
	* cfgexpand.c: Include asan.h.  Define HOST_WIDE_INT heap vector.
	(struct stack_vars_data): New type.
	(expand_stack_vars): Add DATA argument.  Fill it in and add needed
	padding in between variables if -fasan.
	(defer_stack_allocation): Defer everything for flag_asan.
	(expand_used_vars): Return var destruction sequence.  Call
	asan_emit_stack_protection if expand_stack_vars added anything
	to the vectors.
	(expand_gimple_basic_block): Add disable_tail_calls argument.
	(gimple_expand_cfg): Pass true to it if expand_used_vars returned
	non-NULL.  Emit the sequence returned by expand_used_vars after
	return_label.
	* asan.h (asan_emit_stack_protection): New prototype.
	(asan_shadow_set): New decl.
	(ASAN_RED_ZONE_SIZE, ASAN_STACK_MAGIC_LEFT, ASAN_STACK_MAGIC_MIDDLE,
	ASAN_STACK_MAGIC_RIGHT, ASAN_STACK_FRAME_MAGIC): Define.
	* toplev.c (process_options): Also disable -fasan on
	!FRAME_GROWS_DOWNWARDS targets.


	Jakub

Comments

Xinliang David Li Oct. 16, 2012, 8:56 p.m. UTC | #1
Looks good except for the following:

1) I am not sure if the stack slot sharing is handled correctly. If I
read the code correctly, the redzone var will be only created for the
representative variable in a partition -- will this lead to false
negatives? As I asked before, should stack slot sharing even turned
on?
2) Performance tuning -- it is probably better to skip those variables
that are compiler generated -- is there any point guarding them?

thanks,

David

On Tue, Oct 16, 2012 at 3:31 AM, Jakub Jelinek <jakub@redhat.com> wrote:
> Hi!
>
> Here is an updated patch, which emits also the shadow clearing sequence
> at the end of function in the right spot.
>
> 2012-10-16  Jakub Jelinek  <jakub@redhat.com>
>
>         * Makefile.in (asan.o): Depend on $(EXPR_H) $(OPTABS_H).
>         (cfgexpand.o): Depend on asan.h.
>         * asan.c: Include expr.h and optabs.h.
>         (asan_shadow_set): New variable.
>         (asan_shadow_cst, asan_emit_stack_protection): New functions.
>         (asan_init_shadow_ptr_types): Initialize also asan_shadow_set.
>         * cfgexpand.c: Include asan.h.  Define HOST_WIDE_INT heap vector.
>         (struct stack_vars_data): New type.
>         (expand_stack_vars): Add DATA argument.  Fill it in and add needed
>         padding in between variables if -fasan.
>         (defer_stack_allocation): Defer everything for flag_asan.
>         (expand_used_vars): Return var destruction sequence.  Call
>         asan_emit_stack_protection if expand_stack_vars added anything
>         to the vectors.
>         (expand_gimple_basic_block): Add disable_tail_calls argument.
>         (gimple_expand_cfg): Pass true to it if expand_used_vars returned
>         non-NULL.  Emit the sequence returned by expand_used_vars after
>         return_label.
>         * asan.h (asan_emit_stack_protection): New prototype.
>         (asan_shadow_set): New decl.
>         (ASAN_RED_ZONE_SIZE, ASAN_STACK_MAGIC_LEFT, ASAN_STACK_MAGIC_MIDDLE,
>         ASAN_STACK_MAGIC_RIGHT, ASAN_STACK_FRAME_MAGIC): Define.
>         * toplev.c (process_options): Also disable -fasan on
>         !FRAME_GROWS_DOWNWARDS targets.
>
> --- gcc/Makefile.in.jj  2012-10-12 23:30:31.635325772 +0200
> +++ gcc/Makefile.in     2012-10-15 09:40:40.287706610 +0200
> @@ -2204,7 +2204,7 @@ stor-layout.o : stor-layout.c $(CONFIG_H
>  asan.o : asan.c asan.h $(CONFIG_H) pointer-set.h \
>     $(SYSTEM_H) $(TREE_H) $(GIMPLE_H) \
>     output.h $(DIAGNOSTIC_H) coretypes.h $(TREE_DUMP_H) $(FLAGS_H) \
> -   tree-pretty-print.h $(TARGET_H)
> +   tree-pretty-print.h $(TARGET_H) $(EXPR_H) $(OPTABS_H)
>  tree-ssa-tail-merge.o: tree-ssa-tail-merge.c \
>     $(SYSTEM_H) $(CONFIG_H) coretypes.h $(TM_H) $(BITMAP_H) \
>     $(FLAGS_H) $(TM_P_H) $(BASIC_BLOCK_H) \
> @@ -3074,7 +3074,7 @@ cfgexpand.o : cfgexpand.c $(TREE_FLOW_H)
>     $(DIAGNOSTIC_H) toplev.h $(DIAGNOSTIC_CORE_H) $(BASIC_BLOCK_H) $(FLAGS_H) debug.h $(PARAMS_H) \
>     value-prof.h $(TREE_INLINE_H) $(TARGET_H) $(SSAEXPAND_H) $(REGS_H) \
>     $(GIMPLE_PRETTY_PRINT_H) $(BITMAP_H) sbitmap.h \
> -   $(INSN_ATTR_H) $(CFGLOOP_H)
> +   $(INSN_ATTR_H) $(CFGLOOP_H) asan.h
>  cfgrtl.o : cfgrtl.c $(CONFIG_H) $(SYSTEM_H) coretypes.h $(TM_H) $(RTL_ERROR_H) \
>     $(FLAGS_H) insn-config.h $(BASIC_BLOCK_H) $(REGS_H) hard-reg-set.h \
>     $(FUNCTION_H) $(EXCEPT_H) $(TM_P_H) $(INSN_ATTR_H) \
> --- gcc/toplev.c.jj     2012-10-12 23:30:31.717325313 +0200
> +++ gcc/toplev.c        2012-10-15 09:40:03.540923935 +0200
> @@ -1544,7 +1544,9 @@ process_options (void)
>      }
>
>    /* Address Sanitizer needs porting to each target architecture.  */
> -  if (flag_asan && targetm.asan_shadow_offset == NULL)
> +  if (flag_asan
> +      && (targetm.asan_shadow_offset == NULL
> +         || !FRAME_GROWS_DOWNWARD))
>      {
>        warning (0, "-fasan not supported for this target");
>        flag_asan = 0;
> --- gcc/asan.c.jj       2012-10-12 23:31:55.423856689 +0200
> +++ gcc/asan.c  2012-10-16 12:18:41.414029844 +0200
> @@ -43,6 +43,8 @@ along with GCC; see the file COPYING3.
>  #include "asan.h"
>  #include "gimple-pretty-print.h"
>  #include "target.h"
> +#include "expr.h"
> +#include "optabs.h"
>
>  /*
>   AddressSanitizer finds out-of-bounds and use-after-free bugs
> @@ -79,10 +81,195 @@ along with GCC; see the file COPYING3.
>   to create redzones for stack and global object and poison them.
>  */
>
> +alias_set_type asan_shadow_set = -1;
> +
>  /* Pointer types to 1 resp. 2 byte integers in shadow memory.  A separate
>     alias set is used for all shadow memory accesses.  */
>  static GTY(()) tree shadow_ptr_types[2];
>
> +/* Return a CONST_INT representing 4 subsequent shadow memory bytes.  */
> +
> +static rtx
> +asan_shadow_cst (unsigned char shadow_bytes[4])
> +{
> +  int i;
> +  unsigned HOST_WIDE_INT val = 0;
> +  gcc_assert (WORDS_BIG_ENDIAN == BYTES_BIG_ENDIAN);
> +  for (i = 0; i < 4; i++)
> +    val |= (unsigned HOST_WIDE_INT) shadow_bytes[BYTES_BIG_ENDIAN ? 3 - i : i]
> +          << (BITS_PER_UNIT * i);
> +  return GEN_INT (trunc_int_for_mode (val, SImode));
> +}
> +
> +/* Insert code to protect stack vars.  The prologue sequence should be emitted
> +   directly, epilogue sequence returned.  BASE is the register holding the
> +   stack base, against which OFFSETS array offsets are relative to, OFFSETS
> +   array contains pairs of offsets in reverse order, always the end offset
> +   of some gap that needs protection followed by starting offset,
> +   and DECLS is an array of representative decls for each var partition.
> +   LENGTH is the length of the OFFSETS array, DECLS array is LENGTH / 2 - 1
> +   elements long (OFFSETS include gap before the first variable as well
> +   as gaps after each stack variable).  */
> +
> +rtx
> +asan_emit_stack_protection (rtx base, HOST_WIDE_INT *offsets, tree *decls,
> +                           int length)
> +{
> +  rtx shadow_base, shadow_mem, ret, mem;
> +  unsigned char shadow_bytes[4];
> +  HOST_WIDE_INT base_offset = offsets[length - 1], offset, prev_offset;
> +  HOST_WIDE_INT last_offset, last_size;
> +  int l;
> +  unsigned char cur_shadow_byte = ASAN_STACK_MAGIC_LEFT;
> +  static pretty_printer pp;
> +  static bool pp_initialized;
> +  const char *buf;
> +  size_t len;
> +  tree str_cst;
> +
> +  /* First of all, prepare the description string.  */
> +  if (!pp_initialized)
> +    {
> +      pp_construct (&pp, /* prefix */NULL, /* line-width */0);
> +      pp_initialized = true;
> +    }
> +  pp_clear_output_area (&pp);
> +  if (DECL_NAME (current_function_decl))
> +    pp_base_tree_identifier (&pp, DECL_NAME (current_function_decl));
> +  else
> +    pp_string (&pp, "<unknown>");
> +  pp_space (&pp);
> +  pp_decimal_int (&pp, length / 2 - 1);
> +  pp_space (&pp);
> +  for (l = length - 2; l; l -= 2)
> +    {
> +      tree decl = decls[l / 2 - 1];
> +      pp_wide_integer (&pp, offsets[l] - base_offset);
> +      pp_space (&pp);
> +      pp_wide_integer (&pp, offsets[l - 1] - offsets[l]);
> +      pp_space (&pp);
> +      if (DECL_P (decl) && DECL_NAME (decl))
> +       {
> +         pp_decimal_int (&pp, IDENTIFIER_LENGTH (DECL_NAME (decl)));
> +         pp_space (&pp);
> +         pp_base_tree_identifier (&pp, DECL_NAME (decl));
> +       }
> +      else
> +       pp_string (&pp, "9 <unknown>");
> +      pp_space (&pp);
> +    }
> +  buf = pp_base_formatted_text (&pp);
> +  len = strlen (buf);
> +  str_cst = build_string (len + 1, buf);
> +  TREE_TYPE (str_cst)
> +    = build_array_type (char_type_node, build_index_type (size_int (len)));
> +  TREE_READONLY (str_cst) = 1;
> +  TREE_STATIC (str_cst) = 1;
> +  str_cst = build1 (ADDR_EXPR, build_pointer_type (char_type_node), str_cst);
> +
> +  /* Emit the prologue sequence.  */
> +  base = expand_binop (Pmode, add_optab, base, GEN_INT (base_offset),
> +                      NULL_RTX, 1, OPTAB_DIRECT);
> +  mem = gen_rtx_MEM (ptr_mode, base);
> +  emit_move_insn (mem, GEN_INT (ASAN_STACK_FRAME_MAGIC));
> +  mem = adjust_address (mem, VOIDmode, GET_MODE_SIZE (ptr_mode));
> +  emit_move_insn (mem, expand_normal (str_cst));
> +  shadow_base = expand_binop (Pmode, lshr_optab, base,
> +                             GEN_INT (ASAN_SHADOW_SHIFT),
> +                             NULL_RTX, 1, OPTAB_DIRECT);
> +  shadow_base = expand_binop (Pmode, add_optab, shadow_base,
> +                             GEN_INT (targetm.asan_shadow_offset ()),
> +                             NULL_RTX, 1, OPTAB_DIRECT);
> +  gcc_assert (asan_shadow_set != -1
> +             && (ASAN_RED_ZONE_SIZE >> ASAN_SHADOW_SHIFT) == 4);
> +  shadow_mem = gen_rtx_MEM (SImode, shadow_base);
> +  set_mem_alias_set (shadow_mem, asan_shadow_set);
> +  prev_offset = base_offset;
> +  for (l = length; l; l -= 2)
> +    {
> +      if (l == 2)
> +       cur_shadow_byte = ASAN_STACK_MAGIC_RIGHT;
> +      offset = offsets[l - 1];
> +      if ((offset - base_offset) & (ASAN_RED_ZONE_SIZE - 1))
> +       {
> +         int i;
> +         HOST_WIDE_INT aoff
> +           = base_offset + ((offset - base_offset)
> +                            & ~(ASAN_RED_ZONE_SIZE - HOST_WIDE_INT_1));
> +         shadow_mem = adjust_address (shadow_mem, VOIDmode,
> +                                      (aoff - prev_offset)
> +                                      >> ASAN_SHADOW_SHIFT);
> +         prev_offset = aoff;
> +         for (i = 0; i < 4; i++, aoff += (1 << ASAN_SHADOW_SHIFT))
> +           if (aoff < offset)
> +             {
> +               if (aoff < offset - (1 << ASAN_SHADOW_SHIFT) + 1)
> +                 shadow_bytes[i] = 0;
> +               else
> +                 shadow_bytes[i] = offset - aoff;
> +             }
> +           else
> +             shadow_bytes[i] = ASAN_STACK_MAGIC_PARTIAL;
> +         emit_move_insn (shadow_mem, asan_shadow_cst (shadow_bytes));
> +         offset = aoff;
> +       }
> +      while (offset <= offsets[l - 2] - ASAN_RED_ZONE_SIZE)
> +       {
> +         shadow_mem = adjust_address (shadow_mem, VOIDmode,
> +                                      (offset - prev_offset)
> +                                      >> ASAN_SHADOW_SHIFT);
> +         prev_offset = offset;
> +         memset (shadow_bytes, cur_shadow_byte, 4);
> +         emit_move_insn (shadow_mem, asan_shadow_cst (shadow_bytes));
> +         offset += ASAN_RED_ZONE_SIZE;
> +       }
> +      cur_shadow_byte = ASAN_STACK_MAGIC_MIDDLE;
> +    }
> +  do_pending_stack_adjust ();
> +
> +  /* Construct epilogue sequence.  */
> +  start_sequence ();
> +
> +  shadow_mem = gen_rtx_MEM (BLKmode, shadow_base);
> +  set_mem_alias_set (shadow_mem, asan_shadow_set);
> +  prev_offset = base_offset;
> +  last_offset = base_offset;
> +  last_size = 0;
> +  for (l = length; l; l -= 2)
> +    {
> +      offset = base_offset + ((offsets[l - 1] - base_offset)
> +                            & ~(ASAN_RED_ZONE_SIZE - HOST_WIDE_INT_1));
> +      if (last_offset + last_size != offset)
> +       {
> +         shadow_mem = adjust_address (shadow_mem, VOIDmode,
> +                                      (last_offset - prev_offset)
> +                                      >> ASAN_SHADOW_SHIFT);
> +         prev_offset = last_offset;
> +         clear_storage (shadow_mem, GEN_INT (last_size >> ASAN_SHADOW_SHIFT),
> +                        BLOCK_OP_NORMAL);
> +         last_offset = offset;
> +         last_size = 0;
> +       }
> +      last_size += base_offset + ((offsets[l - 2] - base_offset)
> +                                 & ~(ASAN_RED_ZONE_SIZE - HOST_WIDE_INT_1))
> +                  - offset;
> +    }
> +  if (last_size)
> +    {
> +      shadow_mem = adjust_address (shadow_mem, VOIDmode,
> +                                  (last_offset - prev_offset)
> +                                  >> ASAN_SHADOW_SHIFT);
> +      clear_storage (shadow_mem, GEN_INT (last_size >> ASAN_SHADOW_SHIFT),
> +                    BLOCK_OP_NORMAL);
> +    }
> +
> +  do_pending_stack_adjust ();
> +
> +  ret = get_insns ();
> +  end_sequence ();
> +  return ret;
> +}
> +
>  /* Construct a function tree for __asan_report_{load,store}{1,2,4,8,16}.
>     IS_STORE is either 1 (for a store) or 0 (for a load).
>     SIZE_IN_BYTES is one of 1, 2, 4, 8, 16.  */
> @@ -401,12 +588,12 @@ asan_finish_file (void)
>  static void
>  asan_init_shadow_ptr_types (void)
>  {
> -  alias_set_type set = new_alias_set ();
> +  asan_shadow_set = new_alias_set ();
>    shadow_ptr_types[0] = build_distinct_type_copy (unsigned_char_type_node);
> -  TYPE_ALIAS_SET (shadow_ptr_types[0]) = set;
> +  TYPE_ALIAS_SET (shadow_ptr_types[0]) = asan_shadow_set;
>    shadow_ptr_types[0] = build_pointer_type (shadow_ptr_types[0]);
>    shadow_ptr_types[1] = build_distinct_type_copy (short_unsigned_type_node);
> -  TYPE_ALIAS_SET (shadow_ptr_types[1]) = set;
> +  TYPE_ALIAS_SET (shadow_ptr_types[1]) = asan_shadow_set;
>    shadow_ptr_types[1] = build_pointer_type (shadow_ptr_types[1]);
>  }
>
> --- gcc/asan.h.jj       2012-10-12 23:30:31.689325469 +0200
> +++ gcc/asan.h  2012-10-15 09:40:03.532923984 +0200
> @@ -21,10 +21,31 @@ along with GCC; see the file COPYING3.
>  #ifndef TREE_ASAN
>  #define TREE_ASAN
>
> -extern void asan_finish_file(void);
> +extern void asan_finish_file (void);
> +extern rtx asan_emit_stack_protection (rtx, HOST_WIDE_INT *, tree *, int);
> +
> +/* Alias set for accessing the shadow memory.  */
> +extern alias_set_type asan_shadow_set;
>
>  /* Shadow memory is found at
>     (address >> ASAN_SHADOW_SHIFT) + targetm.asan_shadow_offset ().  */
>  #define ASAN_SHADOW_SHIFT      3
>
> +/* Red zone size, stack and global variables are padded by ASAN_RED_ZONE_SIZE
> +   up to 2 * ASAN_RED_ZONE_SIZE - 1 bytes.  */
> +#define ASAN_RED_ZONE_SIZE     32
> +
> +/* Shadow memory values for stack protection.  Left is below protected vars,
> +   the first pointer in stack corresponding to that offset contains
> +   ASAN_STACK_FRAME_MAGIC word, the second pointer to a string describing
> +   the frame.  Middle is for padding in between variables, right is
> +   above the last protected variable and partial immediately after variables
> +   up to ASAN_RED_ZONE_SIZE alignment.  */
> +#define ASAN_STACK_MAGIC_LEFT          0xf1
> +#define ASAN_STACK_MAGIC_MIDDLE                0xf2
> +#define ASAN_STACK_MAGIC_RIGHT         0xf3
> +#define ASAN_STACK_MAGIC_PARTIAL       0xf4
> +
> +#define ASAN_STACK_FRAME_MAGIC 0x41b58ab3
> +
>  #endif /* TREE_ASAN */
> --- gcc/cfgexpand.c.jj  2012-10-12 23:30:31.772325005 +0200
> +++ gcc/cfgexpand.c     2012-10-16 12:25:31.756750811 +0200
> @@ -47,6 +47,7 @@ along with GCC; see the file COPYING3.
>  #include "cfgloop.h"
>  #include "regs.h" /* For reg_renumber.  */
>  #include "insn-attr.h" /* For INSN_SCHEDULING.  */
> +#include "asan.h"
>
>  /* This variable holds information helping the rewriting of SSA trees
>     into RTL.  */
> @@ -835,12 +836,26 @@ expand_one_stack_var_at (tree decl, rtx
>    set_rtl (decl, x);
>  }
>
> +DEF_VEC_I(HOST_WIDE_INT);
> +DEF_VEC_ALLOC_I(HOST_WIDE_INT,heap);
> +
> +struct stack_vars_data
> +{
> +  /* Vector of offset pairs, always end of some padding followed
> +     by start of the padding that needs Address Sanitizer protection.
> +     The vector is in reversed, highest offset pairs come first.  */
> +  VEC(HOST_WIDE_INT, heap) *asan_vec;
> +
> +  /* Vector of partition representative decls in between the paddings.  */
> +  VEC(tree, heap) *asan_decl_vec;
> +};
> +
>  /* A subroutine of expand_used_vars.  Give each partition representative
>     a unique location within the stack frame.  Update each partition member
>     with that location.  */
>
>  static void
> -expand_stack_vars (bool (*pred) (tree))
> +expand_stack_vars (bool (*pred) (tree), struct stack_vars_data *data)
>  {
>    size_t si, i, j, n = stack_vars_num;
>    HOST_WIDE_INT large_size = 0, large_alloc = 0;
> @@ -917,7 +932,21 @@ expand_stack_vars (bool (*pred) (tree))
>        alignb = stack_vars[i].alignb;
>        if (alignb * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT)
>         {
> -         offset = alloc_stack_frame_space (stack_vars[i].size, alignb);
> +         if (flag_asan)
> +           {
> +             HOST_WIDE_INT prev_offset = frame_offset;
> +             offset
> +               = alloc_stack_frame_space (stack_vars[i].size
> +                                          + ASAN_RED_ZONE_SIZE,
> +                                          MAX (alignb, ASAN_RED_ZONE_SIZE));
> +             VEC_safe_push (HOST_WIDE_INT, heap, data->asan_vec,
> +                            prev_offset);
> +             VEC_safe_push (HOST_WIDE_INT, heap, data->asan_vec,
> +                            offset + stack_vars[i].size);
> +             VEC_safe_push (tree, heap, data->asan_decl_vec, decl);
> +           }
> +         else
> +           offset = alloc_stack_frame_space (stack_vars[i].size, alignb);
>           base = virtual_stack_vars_rtx;
>           base_align = crtl->max_used_stack_slot_alignment;
>         }
> @@ -1055,8 +1084,9 @@ static bool
>  defer_stack_allocation (tree var, bool toplevel)
>  {
>    /* If stack protection is enabled, *all* stack variables must be deferred,
> -     so that we can re-order the strings to the top of the frame.  */
> -  if (flag_stack_protect)
> +     so that we can re-order the strings to the top of the frame.
> +     Similarly for Address Sanitizer.  */
> +  if (flag_stack_protect || flag_asan)
>      return true;
>
>    /* We handle "large" alignment via dynamic allocation.  We want to handle
> @@ -1446,11 +1476,12 @@ estimated_stack_frame_size (struct cgrap
>
>  /* Expand all variables used in the function.  */
>
> -static void
> +static rtx
>  expand_used_vars (void)
>  {
>    tree var, outer_block = DECL_INITIAL (current_function_decl);
>    VEC(tree,heap) *maybe_local_decls = NULL;
> +  rtx var_end_seq = NULL_RTX;
>    struct pointer_map_t *ssa_name_decls;
>    unsigned i;
>    unsigned len;
> @@ -1601,6 +1632,11 @@ expand_used_vars (void)
>    /* Assign rtl to each variable based on these partitions.  */
>    if (stack_vars_num > 0)
>      {
> +      struct stack_vars_data data;
> +
> +      data.asan_vec = NULL;
> +      data.asan_decl_vec = NULL;
> +
>        /* Reorder decls to be protected by iterating over the variables
>          array multiple times, and allocating out of each phase in turn.  */
>        /* ??? We could probably integrate this into the qsort we did
> @@ -1609,14 +1645,35 @@ expand_used_vars (void)
>        if (has_protected_decls)
>         {
>           /* Phase 1 contains only character arrays.  */
> -         expand_stack_vars (stack_protect_decl_phase_1);
> +         expand_stack_vars (stack_protect_decl_phase_1, &data);
>
>           /* Phase 2 contains other kinds of arrays.  */
>           if (flag_stack_protect == 2)
> -           expand_stack_vars (stack_protect_decl_phase_2);
> +           expand_stack_vars (stack_protect_decl_phase_2, &data);
>         }
>
> -      expand_stack_vars (NULL);
> +      expand_stack_vars (NULL, &data);
> +
> +      if (!VEC_empty (HOST_WIDE_INT, data.asan_vec))
> +       {
> +         HOST_WIDE_INT prev_offset = frame_offset;
> +         HOST_WIDE_INT offset
> +           = alloc_stack_frame_space (ASAN_RED_ZONE_SIZE,
> +                                      ASAN_RED_ZONE_SIZE);
> +         VEC_safe_push (HOST_WIDE_INT, heap, data.asan_vec, prev_offset);
> +         VEC_safe_push (HOST_WIDE_INT, heap, data.asan_vec, offset);
> +
> +         var_end_seq
> +           = asan_emit_stack_protection (virtual_stack_vars_rtx,
> +                                         VEC_address (HOST_WIDE_INT,
> +                                                      data.asan_vec),
> +                                         VEC_address (tree,
> +                                                      data.asan_decl_vec),
> +                                         VEC_length (HOST_WIDE_INT,
> +                                                     data.asan_vec));
> +       }
> +      VEC_free (HOST_WIDE_INT, heap, data.asan_vec);
> +      VEC_free (tree, heap, data.asan_decl_vec);
>      }
>
>    fini_vars_expansion ();
> @@ -1643,6 +1700,8 @@ expand_used_vars (void)
>         frame_offset += align - 1;
>        frame_offset &= -align;
>      }
> +
> +  return var_end_seq;
>  }
>
>
> @@ -3639,7 +3698,7 @@ expand_debug_locations (void)
>  /* Expand basic block BB from GIMPLE trees to RTL.  */
>
>  static basic_block
> -expand_gimple_basic_block (basic_block bb)
> +expand_gimple_basic_block (basic_block bb, bool disable_tail_calls)
>  {
>    gimple_stmt_iterator gsi;
>    gimple_seq stmts;
> @@ -3927,6 +3986,11 @@ expand_gimple_basic_block (basic_block b
>         }
>        else
>         {
> +         if (is_gimple_call (stmt)
> +             && gimple_call_tail_p (stmt)
> +             && disable_tail_calls)
> +           gimple_call_set_tail (stmt, false);
> +
>           if (is_gimple_call (stmt) && gimple_call_tail_p (stmt))
>             {
>               bool can_fallthru;
> @@ -4286,7 +4350,7 @@ gimple_expand_cfg (void)
>    sbitmap blocks;
>    edge_iterator ei;
>    edge e;
> -  rtx var_seq;
> +  rtx var_seq, var_ret_seq;
>    unsigned i;
>
>    timevar_push (TV_OUT_OF_SSA);
> @@ -4346,7 +4410,7 @@ gimple_expand_cfg (void)
>    timevar_push (TV_VAR_EXPAND);
>    start_sequence ();
>
> -  expand_used_vars ();
> +  var_ret_seq = expand_used_vars ();
>
>    var_seq = get_insns ();
>    end_sequence ();
> @@ -4472,7 +4536,7 @@ gimple_expand_cfg (void)
>
>    lab_rtx_for_bb = pointer_map_create ();
>    FOR_BB_BETWEEN (bb, init_block->next_bb, EXIT_BLOCK_PTR, next_bb)
> -    bb = expand_gimple_basic_block (bb);
> +    bb = expand_gimple_basic_block (bb, var_ret_seq != NULL_RTX);
>
>    if (MAY_HAVE_DEBUG_INSNS)
>      expand_debug_locations ();
> @@ -4500,6 +4564,9 @@ gimple_expand_cfg (void)
>    construct_exit_block ();
>    insn_locations_finalize ();
>
> +  if (var_ret_seq)
> +    emit_insn_after (var_ret_seq, return_label);
> +
>    /* Zap the tree EH table.  */
>    set_eh_throw_stmt_table (cfun, NULL);
>
>
>         Jakub
Jakub Jelinek Oct. 16, 2012, 9:52 p.m. UTC | #2
On Tue, Oct 16, 2012 at 01:56:46PM -0700, Xinliang David Li wrote:
> Looks good except for the following:
> 
> 1) I am not sure if the stack slot sharing is handled correctly. If I
> read the code correctly, the redzone var will be only created for the
> representative variable in a partition -- will this lead to false
> negatives? As I asked before, should stack slot sharing even turned
> on?

I thought we don't merge stack slots if the vars have different size,
apparently I've been wrong about that.  So I'll make sure those aren't
shared with flag_asan.  When the size is the same, there should be no false
negatives.

> 2) Performance tuning -- it is probably better to skip those variables
> that are compiler generated -- is there any point guarding them?

In my (admittedly very limited) testing only at -O0 some compiler generated
vars (SSA_NAMEs) got guards.  The trouble here is -fasan -fstack-protector
combination.  I wonder if there can be any DECL_ARTIFICIAL (or non-VAR_DECL/
RESULT_DECL) decls for which stack_protect_decl_phase returns 1 or 2.
If it is unlikely, the best might be to protect all phase 1 and 2 vars and
if flag_asan call expand_stack_vars once more for partitions where any of
the protected vars are user vars, and finally the current expand_stack_vars
(NULL) which would not do any asan protection.

	Jakub
Xinliang David Li Oct. 16, 2012, 10:56 p.m. UTC | #3
On Tue, Oct 16, 2012 at 2:52 PM, Jakub Jelinek <jakub@redhat.com> wrote:
> On Tue, Oct 16, 2012 at 01:56:46PM -0700, Xinliang David Li wrote:
>> Looks good except for the following:
>>
>> 1) I am not sure if the stack slot sharing is handled correctly. If I
>> read the code correctly, the redzone var will be only created for the
>> representative variable in a partition -- will this lead to false
>> negatives? As I asked before, should stack slot sharing even turned
>> on?
>
> I thought we don't merge stack slots if the vars have different size,
> apparently I've been wrong about that.  So I'll make sure those aren't
> shared with flag_asan.  When the size is the same, there should be no false
> negatives.
>
>> 2) Performance tuning -- it is probably better to skip those variables
>> that are compiler generated -- is there any point guarding them?
>
> In my (admittedly very limited) testing only at -O0 some compiler generated
> vars (SSA_NAMEs) got guards.  The trouble here is -fasan -fstack-protector
> combination.  I wonder if there can be any DECL_ARTIFICIAL (or non-VAR_DECL/
> RESULT_DECL) decls for which stack_protect_decl_phase returns 1 or 2.
> If it is unlikely, the best might be to protect all phase 1 and 2 vars and
> if flag_asan call expand_stack_vars once more for partitions where any of
> the protected vars are user vars, and finally the current expand_stack_vars
> (NULL) which would not do any asan protection.
>

Do we expect people to use -fasan together with -fstack-protector?  Is
it enough to just skip those DECL_ARTIFICIAL ones? most of the
ssa_name var decls are already skipped already.

David

>         Jakub
diff mbox

Patch

--- gcc/Makefile.in.jj	2012-10-12 23:30:31.635325772 +0200
+++ gcc/Makefile.in	2012-10-15 09:40:40.287706610 +0200
@@ -2204,7 +2204,7 @@  stor-layout.o : stor-layout.c $(CONFIG_H
 asan.o : asan.c asan.h $(CONFIG_H) pointer-set.h \
    $(SYSTEM_H) $(TREE_H) $(GIMPLE_H) \
    output.h $(DIAGNOSTIC_H) coretypes.h $(TREE_DUMP_H) $(FLAGS_H) \
-   tree-pretty-print.h $(TARGET_H)
+   tree-pretty-print.h $(TARGET_H) $(EXPR_H) $(OPTABS_H)
 tree-ssa-tail-merge.o: tree-ssa-tail-merge.c \
    $(SYSTEM_H) $(CONFIG_H) coretypes.h $(TM_H) $(BITMAP_H) \
    $(FLAGS_H) $(TM_P_H) $(BASIC_BLOCK_H) \
@@ -3074,7 +3074,7 @@  cfgexpand.o : cfgexpand.c $(TREE_FLOW_H)
    $(DIAGNOSTIC_H) toplev.h $(DIAGNOSTIC_CORE_H) $(BASIC_BLOCK_H) $(FLAGS_H) debug.h $(PARAMS_H) \
    value-prof.h $(TREE_INLINE_H) $(TARGET_H) $(SSAEXPAND_H) $(REGS_H) \
    $(GIMPLE_PRETTY_PRINT_H) $(BITMAP_H) sbitmap.h \
-   $(INSN_ATTR_H) $(CFGLOOP_H)
+   $(INSN_ATTR_H) $(CFGLOOP_H) asan.h
 cfgrtl.o : cfgrtl.c $(CONFIG_H) $(SYSTEM_H) coretypes.h $(TM_H) $(RTL_ERROR_H) \
    $(FLAGS_H) insn-config.h $(BASIC_BLOCK_H) $(REGS_H) hard-reg-set.h \
    $(FUNCTION_H) $(EXCEPT_H) $(TM_P_H) $(INSN_ATTR_H) \
--- gcc/toplev.c.jj	2012-10-12 23:30:31.717325313 +0200
+++ gcc/toplev.c	2012-10-15 09:40:03.540923935 +0200
@@ -1544,7 +1544,9 @@  process_options (void)
     }
 
   /* Address Sanitizer needs porting to each target architecture.  */
-  if (flag_asan && targetm.asan_shadow_offset == NULL)
+  if (flag_asan
+      && (targetm.asan_shadow_offset == NULL
+	  || !FRAME_GROWS_DOWNWARD))
     {
       warning (0, "-fasan not supported for this target");
       flag_asan = 0;
--- gcc/asan.c.jj	2012-10-12 23:31:55.423856689 +0200
+++ gcc/asan.c	2012-10-16 12:18:41.414029844 +0200
@@ -43,6 +43,8 @@  along with GCC; see the file COPYING3.
 #include "asan.h"
 #include "gimple-pretty-print.h"
 #include "target.h"
+#include "expr.h"
+#include "optabs.h"
 
 /*
  AddressSanitizer finds out-of-bounds and use-after-free bugs 
@@ -79,10 +81,195 @@  along with GCC; see the file COPYING3.
  to create redzones for stack and global object and poison them.
 */
 
+alias_set_type asan_shadow_set = -1;
+
 /* Pointer types to 1 resp. 2 byte integers in shadow memory.  A separate
    alias set is used for all shadow memory accesses.  */
 static GTY(()) tree shadow_ptr_types[2];
 
+/* Return a CONST_INT representing 4 subsequent shadow memory bytes.  */
+
+static rtx
+asan_shadow_cst (unsigned char shadow_bytes[4])
+{
+  int i;
+  unsigned HOST_WIDE_INT val = 0;
+  gcc_assert (WORDS_BIG_ENDIAN == BYTES_BIG_ENDIAN);
+  for (i = 0; i < 4; i++)
+    val |= (unsigned HOST_WIDE_INT) shadow_bytes[BYTES_BIG_ENDIAN ? 3 - i : i]
+	   << (BITS_PER_UNIT * i);
+  return GEN_INT (trunc_int_for_mode (val, SImode));
+}
+
+/* Insert code to protect stack vars.  The prologue sequence should be emitted
+   directly, epilogue sequence returned.  BASE is the register holding the
+   stack base, against which OFFSETS array offsets are relative to, OFFSETS
+   array contains pairs of offsets in reverse order, always the end offset
+   of some gap that needs protection followed by starting offset,
+   and DECLS is an array of representative decls for each var partition.
+   LENGTH is the length of the OFFSETS array, DECLS array is LENGTH / 2 - 1
+   elements long (OFFSETS include gap before the first variable as well
+   as gaps after each stack variable).  */
+
+rtx
+asan_emit_stack_protection (rtx base, HOST_WIDE_INT *offsets, tree *decls,
+			    int length)
+{
+  rtx shadow_base, shadow_mem, ret, mem;
+  unsigned char shadow_bytes[4];
+  HOST_WIDE_INT base_offset = offsets[length - 1], offset, prev_offset;
+  HOST_WIDE_INT last_offset, last_size;
+  int l;
+  unsigned char cur_shadow_byte = ASAN_STACK_MAGIC_LEFT;
+  static pretty_printer pp;
+  static bool pp_initialized;
+  const char *buf;
+  size_t len;
+  tree str_cst;
+
+  /* First of all, prepare the description string.  */
+  if (!pp_initialized)
+    {
+      pp_construct (&pp, /* prefix */NULL, /* line-width */0);
+      pp_initialized = true;
+    }
+  pp_clear_output_area (&pp);
+  if (DECL_NAME (current_function_decl))
+    pp_base_tree_identifier (&pp, DECL_NAME (current_function_decl));
+  else
+    pp_string (&pp, "<unknown>");
+  pp_space (&pp);
+  pp_decimal_int (&pp, length / 2 - 1);
+  pp_space (&pp);
+  for (l = length - 2; l; l -= 2)
+    {
+      tree decl = decls[l / 2 - 1];
+      pp_wide_integer (&pp, offsets[l] - base_offset);
+      pp_space (&pp);
+      pp_wide_integer (&pp, offsets[l - 1] - offsets[l]);
+      pp_space (&pp);
+      if (DECL_P (decl) && DECL_NAME (decl))
+	{
+	  pp_decimal_int (&pp, IDENTIFIER_LENGTH (DECL_NAME (decl)));
+	  pp_space (&pp);
+	  pp_base_tree_identifier (&pp, DECL_NAME (decl));
+	}
+      else
+	pp_string (&pp, "9 <unknown>");
+      pp_space (&pp);
+    }
+  buf = pp_base_formatted_text (&pp);
+  len = strlen (buf);
+  str_cst = build_string (len + 1, buf);
+  TREE_TYPE (str_cst)
+    = build_array_type (char_type_node, build_index_type (size_int (len)));
+  TREE_READONLY (str_cst) = 1;
+  TREE_STATIC (str_cst) = 1;
+  str_cst = build1 (ADDR_EXPR, build_pointer_type (char_type_node), str_cst);
+
+  /* Emit the prologue sequence.  */
+  base = expand_binop (Pmode, add_optab, base, GEN_INT (base_offset),
+		       NULL_RTX, 1, OPTAB_DIRECT);
+  mem = gen_rtx_MEM (ptr_mode, base);
+  emit_move_insn (mem, GEN_INT (ASAN_STACK_FRAME_MAGIC));
+  mem = adjust_address (mem, VOIDmode, GET_MODE_SIZE (ptr_mode));
+  emit_move_insn (mem, expand_normal (str_cst));
+  shadow_base = expand_binop (Pmode, lshr_optab, base,
+			      GEN_INT (ASAN_SHADOW_SHIFT),
+			      NULL_RTX, 1, OPTAB_DIRECT);
+  shadow_base = expand_binop (Pmode, add_optab, shadow_base,
+			      GEN_INT (targetm.asan_shadow_offset ()),
+			      NULL_RTX, 1, OPTAB_DIRECT);
+  gcc_assert (asan_shadow_set != -1
+	      && (ASAN_RED_ZONE_SIZE >> ASAN_SHADOW_SHIFT) == 4);
+  shadow_mem = gen_rtx_MEM (SImode, shadow_base);
+  set_mem_alias_set (shadow_mem, asan_shadow_set);
+  prev_offset = base_offset;
+  for (l = length; l; l -= 2)
+    {
+      if (l == 2)
+	cur_shadow_byte = ASAN_STACK_MAGIC_RIGHT;
+      offset = offsets[l - 1];
+      if ((offset - base_offset) & (ASAN_RED_ZONE_SIZE - 1))
+	{
+	  int i;
+	  HOST_WIDE_INT aoff
+	    = base_offset + ((offset - base_offset)
+			     & ~(ASAN_RED_ZONE_SIZE - HOST_WIDE_INT_1));
+	  shadow_mem = adjust_address (shadow_mem, VOIDmode,
+				       (aoff - prev_offset)
+				       >> ASAN_SHADOW_SHIFT);
+	  prev_offset = aoff;
+	  for (i = 0; i < 4; i++, aoff += (1 << ASAN_SHADOW_SHIFT))
+	    if (aoff < offset)
+	      {
+		if (aoff < offset - (1 << ASAN_SHADOW_SHIFT) + 1)
+		  shadow_bytes[i] = 0;
+		else
+		  shadow_bytes[i] = offset - aoff;
+	      }
+	    else
+	      shadow_bytes[i] = ASAN_STACK_MAGIC_PARTIAL;
+	  emit_move_insn (shadow_mem, asan_shadow_cst (shadow_bytes));
+	  offset = aoff;
+	}
+      while (offset <= offsets[l - 2] - ASAN_RED_ZONE_SIZE)
+	{
+	  shadow_mem = adjust_address (shadow_mem, VOIDmode,
+				       (offset - prev_offset)
+				       >> ASAN_SHADOW_SHIFT);
+	  prev_offset = offset;
+	  memset (shadow_bytes, cur_shadow_byte, 4);
+	  emit_move_insn (shadow_mem, asan_shadow_cst (shadow_bytes));
+	  offset += ASAN_RED_ZONE_SIZE;
+	}
+      cur_shadow_byte = ASAN_STACK_MAGIC_MIDDLE;
+    }
+  do_pending_stack_adjust ();
+
+  /* Construct epilogue sequence.  */
+  start_sequence ();
+
+  shadow_mem = gen_rtx_MEM (BLKmode, shadow_base);
+  set_mem_alias_set (shadow_mem, asan_shadow_set);
+  prev_offset = base_offset;
+  last_offset = base_offset;
+  last_size = 0;
+  for (l = length; l; l -= 2)
+    {
+      offset = base_offset + ((offsets[l - 1] - base_offset)
+			     & ~(ASAN_RED_ZONE_SIZE - HOST_WIDE_INT_1));
+      if (last_offset + last_size != offset)
+	{
+	  shadow_mem = adjust_address (shadow_mem, VOIDmode,
+				       (last_offset - prev_offset)
+				       >> ASAN_SHADOW_SHIFT);
+	  prev_offset = last_offset;
+	  clear_storage (shadow_mem, GEN_INT (last_size >> ASAN_SHADOW_SHIFT),
+			 BLOCK_OP_NORMAL);
+	  last_offset = offset;
+	  last_size = 0;
+	}
+      last_size += base_offset + ((offsets[l - 2] - base_offset)
+				  & ~(ASAN_RED_ZONE_SIZE - HOST_WIDE_INT_1))
+		   - offset;
+    }
+  if (last_size)
+    {
+      shadow_mem = adjust_address (shadow_mem, VOIDmode,
+				   (last_offset - prev_offset)
+				   >> ASAN_SHADOW_SHIFT);
+      clear_storage (shadow_mem, GEN_INT (last_size >> ASAN_SHADOW_SHIFT),
+		     BLOCK_OP_NORMAL);
+    }
+
+  do_pending_stack_adjust ();
+
+  ret = get_insns ();
+  end_sequence ();
+  return ret;
+}
+
 /* Construct a function tree for __asan_report_{load,store}{1,2,4,8,16}.
    IS_STORE is either 1 (for a store) or 0 (for a load).
    SIZE_IN_BYTES is one of 1, 2, 4, 8, 16.  */
@@ -401,12 +588,12 @@  asan_finish_file (void)
 static void
 asan_init_shadow_ptr_types (void)
 {
-  alias_set_type set = new_alias_set ();
+  asan_shadow_set = new_alias_set ();
   shadow_ptr_types[0] = build_distinct_type_copy (unsigned_char_type_node);
-  TYPE_ALIAS_SET (shadow_ptr_types[0]) = set;
+  TYPE_ALIAS_SET (shadow_ptr_types[0]) = asan_shadow_set;
   shadow_ptr_types[0] = build_pointer_type (shadow_ptr_types[0]);
   shadow_ptr_types[1] = build_distinct_type_copy (short_unsigned_type_node);
-  TYPE_ALIAS_SET (shadow_ptr_types[1]) = set;
+  TYPE_ALIAS_SET (shadow_ptr_types[1]) = asan_shadow_set;
   shadow_ptr_types[1] = build_pointer_type (shadow_ptr_types[1]);
 }
 
--- gcc/asan.h.jj	2012-10-12 23:30:31.689325469 +0200
+++ gcc/asan.h	2012-10-15 09:40:03.532923984 +0200
@@ -21,10 +21,31 @@  along with GCC; see the file COPYING3.
 #ifndef TREE_ASAN
 #define TREE_ASAN
 
-extern void asan_finish_file(void);
+extern void asan_finish_file (void);
+extern rtx asan_emit_stack_protection (rtx, HOST_WIDE_INT *, tree *, int);
+
+/* Alias set for accessing the shadow memory.  */
+extern alias_set_type asan_shadow_set;
 
 /* Shadow memory is found at
    (address >> ASAN_SHADOW_SHIFT) + targetm.asan_shadow_offset ().  */
 #define ASAN_SHADOW_SHIFT	3
 
+/* Red zone size, stack and global variables are padded by ASAN_RED_ZONE_SIZE
+   up to 2 * ASAN_RED_ZONE_SIZE - 1 bytes.  */
+#define ASAN_RED_ZONE_SIZE	32
+
+/* Shadow memory values for stack protection.  Left is below protected vars,
+   the first pointer in stack corresponding to that offset contains
+   ASAN_STACK_FRAME_MAGIC word, the second pointer to a string describing
+   the frame.  Middle is for padding in between variables, right is
+   above the last protected variable and partial immediately after variables
+   up to ASAN_RED_ZONE_SIZE alignment.  */
+#define ASAN_STACK_MAGIC_LEFT		0xf1
+#define ASAN_STACK_MAGIC_MIDDLE		0xf2
+#define ASAN_STACK_MAGIC_RIGHT		0xf3
+#define ASAN_STACK_MAGIC_PARTIAL	0xf4
+
+#define ASAN_STACK_FRAME_MAGIC	0x41b58ab3
+
 #endif /* TREE_ASAN */
--- gcc/cfgexpand.c.jj	2012-10-12 23:30:31.772325005 +0200
+++ gcc/cfgexpand.c	2012-10-16 12:25:31.756750811 +0200
@@ -47,6 +47,7 @@  along with GCC; see the file COPYING3.
 #include "cfgloop.h"
 #include "regs.h" /* For reg_renumber.  */
 #include "insn-attr.h" /* For INSN_SCHEDULING.  */
+#include "asan.h"
 
 /* This variable holds information helping the rewriting of SSA trees
    into RTL.  */
@@ -835,12 +836,26 @@  expand_one_stack_var_at (tree decl, rtx
   set_rtl (decl, x);
 }
 
+DEF_VEC_I(HOST_WIDE_INT);
+DEF_VEC_ALLOC_I(HOST_WIDE_INT,heap);
+
+struct stack_vars_data
+{
+  /* Vector of offset pairs, always end of some padding followed
+     by start of the padding that needs Address Sanitizer protection.
+     The vector is in reversed, highest offset pairs come first.  */
+  VEC(HOST_WIDE_INT, heap) *asan_vec;
+
+  /* Vector of partition representative decls in between the paddings.  */
+  VEC(tree, heap) *asan_decl_vec;
+};
+
 /* A subroutine of expand_used_vars.  Give each partition representative
    a unique location within the stack frame.  Update each partition member
    with that location.  */
 
 static void
-expand_stack_vars (bool (*pred) (tree))
+expand_stack_vars (bool (*pred) (tree), struct stack_vars_data *data)
 {
   size_t si, i, j, n = stack_vars_num;
   HOST_WIDE_INT large_size = 0, large_alloc = 0;
@@ -917,7 +932,21 @@  expand_stack_vars (bool (*pred) (tree))
       alignb = stack_vars[i].alignb;
       if (alignb * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT)
 	{
-	  offset = alloc_stack_frame_space (stack_vars[i].size, alignb);
+	  if (flag_asan)
+	    {
+	      HOST_WIDE_INT prev_offset = frame_offset;
+	      offset
+		= alloc_stack_frame_space (stack_vars[i].size
+					   + ASAN_RED_ZONE_SIZE,
+					   MAX (alignb, ASAN_RED_ZONE_SIZE));
+	      VEC_safe_push (HOST_WIDE_INT, heap, data->asan_vec,
+			     prev_offset);
+	      VEC_safe_push (HOST_WIDE_INT, heap, data->asan_vec,
+			     offset + stack_vars[i].size);
+	      VEC_safe_push (tree, heap, data->asan_decl_vec, decl);
+	    }
+	  else
+	    offset = alloc_stack_frame_space (stack_vars[i].size, alignb);
 	  base = virtual_stack_vars_rtx;
 	  base_align = crtl->max_used_stack_slot_alignment;
 	}
@@ -1055,8 +1084,9 @@  static bool
 defer_stack_allocation (tree var, bool toplevel)
 {
   /* If stack protection is enabled, *all* stack variables must be deferred,
-     so that we can re-order the strings to the top of the frame.  */
-  if (flag_stack_protect)
+     so that we can re-order the strings to the top of the frame.
+     Similarly for Address Sanitizer.  */
+  if (flag_stack_protect || flag_asan)
     return true;
 
   /* We handle "large" alignment via dynamic allocation.  We want to handle
@@ -1446,11 +1476,12 @@  estimated_stack_frame_size (struct cgrap
 
 /* Expand all variables used in the function.  */
 
-static void
+static rtx
 expand_used_vars (void)
 {
   tree var, outer_block = DECL_INITIAL (current_function_decl);
   VEC(tree,heap) *maybe_local_decls = NULL;
+  rtx var_end_seq = NULL_RTX;
   struct pointer_map_t *ssa_name_decls;
   unsigned i;
   unsigned len;
@@ -1601,6 +1632,11 @@  expand_used_vars (void)
   /* Assign rtl to each variable based on these partitions.  */
   if (stack_vars_num > 0)
     {
+      struct stack_vars_data data;
+
+      data.asan_vec = NULL;
+      data.asan_decl_vec = NULL;
+
       /* Reorder decls to be protected by iterating over the variables
 	 array multiple times, and allocating out of each phase in turn.  */
       /* ??? We could probably integrate this into the qsort we did
@@ -1609,14 +1645,35 @@  expand_used_vars (void)
       if (has_protected_decls)
 	{
 	  /* Phase 1 contains only character arrays.  */
-	  expand_stack_vars (stack_protect_decl_phase_1);
+	  expand_stack_vars (stack_protect_decl_phase_1, &data);
 
 	  /* Phase 2 contains other kinds of arrays.  */
 	  if (flag_stack_protect == 2)
-	    expand_stack_vars (stack_protect_decl_phase_2);
+	    expand_stack_vars (stack_protect_decl_phase_2, &data);
 	}
 
-      expand_stack_vars (NULL);
+      expand_stack_vars (NULL, &data);
+
+      if (!VEC_empty (HOST_WIDE_INT, data.asan_vec))
+	{
+	  HOST_WIDE_INT prev_offset = frame_offset;
+	  HOST_WIDE_INT offset
+	    = alloc_stack_frame_space (ASAN_RED_ZONE_SIZE,
+				       ASAN_RED_ZONE_SIZE);
+	  VEC_safe_push (HOST_WIDE_INT, heap, data.asan_vec, prev_offset);
+	  VEC_safe_push (HOST_WIDE_INT, heap, data.asan_vec, offset);
+
+	  var_end_seq
+	    = asan_emit_stack_protection (virtual_stack_vars_rtx,
+					  VEC_address (HOST_WIDE_INT,
+						       data.asan_vec),
+					  VEC_address (tree,
+						       data.asan_decl_vec),
+					  VEC_length (HOST_WIDE_INT,
+						      data.asan_vec));
+	}
+      VEC_free (HOST_WIDE_INT, heap, data.asan_vec);
+      VEC_free (tree, heap, data.asan_decl_vec);
     }
 
   fini_vars_expansion ();
@@ -1643,6 +1700,8 @@  expand_used_vars (void)
 	frame_offset += align - 1;
       frame_offset &= -align;
     }
+
+  return var_end_seq;
 }
 
 
@@ -3639,7 +3698,7 @@  expand_debug_locations (void)
 /* Expand basic block BB from GIMPLE trees to RTL.  */
 
 static basic_block
-expand_gimple_basic_block (basic_block bb)
+expand_gimple_basic_block (basic_block bb, bool disable_tail_calls)
 {
   gimple_stmt_iterator gsi;
   gimple_seq stmts;
@@ -3927,6 +3986,11 @@  expand_gimple_basic_block (basic_block b
 	}
       else
 	{
+	  if (is_gimple_call (stmt)
+	      && gimple_call_tail_p (stmt)
+	      && disable_tail_calls)
+	    gimple_call_set_tail (stmt, false);
+
 	  if (is_gimple_call (stmt) && gimple_call_tail_p (stmt))
 	    {
 	      bool can_fallthru;
@@ -4286,7 +4350,7 @@  gimple_expand_cfg (void)
   sbitmap blocks;
   edge_iterator ei;
   edge e;
-  rtx var_seq;
+  rtx var_seq, var_ret_seq;
   unsigned i;
 
   timevar_push (TV_OUT_OF_SSA);
@@ -4346,7 +4410,7 @@  gimple_expand_cfg (void)
   timevar_push (TV_VAR_EXPAND);
   start_sequence ();
 
-  expand_used_vars ();
+  var_ret_seq = expand_used_vars ();
 
   var_seq = get_insns ();
   end_sequence ();
@@ -4472,7 +4536,7 @@  gimple_expand_cfg (void)
 
   lab_rtx_for_bb = pointer_map_create ();
   FOR_BB_BETWEEN (bb, init_block->next_bb, EXIT_BLOCK_PTR, next_bb)
-    bb = expand_gimple_basic_block (bb);
+    bb = expand_gimple_basic_block (bb, var_ret_seq != NULL_RTX);
 
   if (MAY_HAVE_DEBUG_INSNS)
     expand_debug_locations ();
@@ -4500,6 +4564,9 @@  gimple_expand_cfg (void)
   construct_exit_block ();
   insn_locations_finalize ();
 
+  if (var_ret_seq)
+    emit_insn_after (var_ret_seq, return_label);
+
   /* Zap the tree EH table.  */
   set_eh_throw_stmt_table (cfun, NULL);