diff mbox

Patch ping (stage1-ish patches)

Message ID 20131128082230.GL892@tucnak.redhat.com
State New
Headers show

Commit Message

Jakub Jelinek Nov. 28, 2013, 8:22 a.m. UTC
On Wed, Nov 27, 2013 at 01:06:06PM -0700, Jeff Law wrote:
> +	  HOST_WIDE_INT offset, sz;
> +	  sz = ASAN_RED_ZONE_SIZE;
> +	  sz = data.asan_vec[0] - prev_offset;
> 
> Seems to me like the first assignment to sz is dead.  Clearly
> something isn't right here.

Thanks for catching that out, yeah, the above is from reusing
the sz variable for both red zone size (what is being computed)
and a helper temporary for the total size of asan stack frame so far
that is needed 3 times in the computation.

I've used new redzonesz var for the former to make it clearer.
Here is what I've committed in the end after retesting it on x86_64-linux.

2013-11-28  Jakub Jelinek  <jakub@redhat.com>

	* cfgexpand.c (struct stack_vars_data): Add asan_base and asan_alignb
	fields.
	(expand_stack_vars): For -fsanitize=address, use (and set initially)
	data->asan_base as base for vars and update asan_alignb.
	(expand_used_vars): Initialize data.asan_base and data.asan_alignb.
	Pass them to asan_emit_stack_protection.
	* asan.c (asan_detect_stack_use_after_return): New variable.
	(asan_emit_stack_protection): Add pbase and alignb arguments.
	Implement use after return sanitization.
	* asan.h (asan_emit_stack_protection): Adjust prototype.
	(ASAN_STACK_MAGIC_USE_AFTER_RET, ASAN_STACK_RETIRED_MAGIC): Define.



	Jakub
diff mbox

Patch

--- gcc/asan.c.jj	2013-11-27 18:02:47.984814523 +0100
+++ gcc/asan.c	2013-11-28 08:36:28.740704722 +0100
@@ -237,6 +237,9 @@  alias_set_type asan_shadow_set = -1;
    alias set is used for all shadow memory accesses.  */
 static GTY(()) tree shadow_ptr_types[2];
 
+/* Decl for __asan_option_detect_stack_use_after_return.  */
+static GTY(()) tree asan_detect_stack_use_after_return;
+
 /* Hashtable support for memory references used by gimple
    statements.  */
 
@@ -950,20 +953,26 @@  asan_function_start (void)
    and DECLS is an array of representative decls for each var partition.
    LENGTH is the length of the OFFSETS array, DECLS array is LENGTH / 2 - 1
    elements long (OFFSETS include gap before the first variable as well
-   as gaps after each stack variable).  */
+   as gaps after each stack variable).  PBASE is, if non-NULL, some pseudo
+   register which stack vars DECL_RTLs are based on.  Either BASE should be
+   assigned to PBASE, when not doing use after return protection, or
+   corresponding address based on __asan_stack_malloc* return value.  */
 
 rtx
-asan_emit_stack_protection (rtx base, HOST_WIDE_INT *offsets, tree *decls,
-			    int length)
+asan_emit_stack_protection (rtx base, rtx pbase, unsigned int alignb,
+			    HOST_WIDE_INT *offsets, tree *decls, int length)
 {
-  rtx shadow_base, shadow_mem, ret, mem;
+  rtx shadow_base, shadow_mem, ret, mem, orig_base, lab;
   char buf[30];
   unsigned char shadow_bytes[4];
-  HOST_WIDE_INT base_offset = offsets[length - 1], offset, prev_offset;
+  HOST_WIDE_INT base_offset = offsets[length - 1];
+  HOST_WIDE_INT base_align_bias = 0, offset, prev_offset;
+  HOST_WIDE_INT asan_frame_size = offsets[0] - base_offset;
   HOST_WIDE_INT last_offset, last_size;
   int l;
   unsigned char cur_shadow_byte = ASAN_STACK_MAGIC_LEFT;
   tree str_cst, decl, id;
+  int use_after_return_class = -1;
 
   if (shadow_ptr_types[0] == NULL_TREE)
     asan_init_shadow_ptr_types ();
@@ -993,10 +1002,67 @@  asan_emit_stack_protection (rtx base, HO
   str_cst = asan_pp_string (&asan_pp);
 
   /* Emit the prologue sequence.  */
+  if (asan_frame_size > 32 && asan_frame_size <= 65536 && pbase)
+    {
+      use_after_return_class = floor_log2 (asan_frame_size - 1) - 5;
+      /* __asan_stack_malloc_N guarantees alignment
+         N < 6 ? (64 << N) : 4096 bytes.  */
+      if (alignb > (use_after_return_class < 6
+		    ? (64U << use_after_return_class) : 4096U))
+	use_after_return_class = -1;
+      else if (alignb > ASAN_RED_ZONE_SIZE && (asan_frame_size & (alignb - 1)))
+	base_align_bias = ((asan_frame_size + alignb - 1)
+			   & ~(alignb - HOST_WIDE_INT_1)) - asan_frame_size;
+    }
+  if (use_after_return_class == -1 && pbase)
+    emit_move_insn (pbase, base);
   base = expand_binop (Pmode, add_optab, base,
-		       gen_int_mode (base_offset, Pmode),
+		       gen_int_mode (base_offset - base_align_bias, Pmode),
 		       NULL_RTX, 1, OPTAB_DIRECT);
+  orig_base = NULL_RTX;
+  if (use_after_return_class != -1)
+    {
+      if (asan_detect_stack_use_after_return == NULL_TREE)
+	{
+	  id = get_identifier ("__asan_option_detect_stack_use_after_return");
+	  decl = build_decl (BUILTINS_LOCATION, VAR_DECL, id,
+			     integer_type_node);
+	  SET_DECL_ASSEMBLER_NAME (decl, id);
+	  TREE_ADDRESSABLE (decl) = 1;
+	  DECL_ARTIFICIAL (decl) = 1;
+	  DECL_IGNORED_P (decl) = 1;
+	  DECL_EXTERNAL (decl) = 1;
+	  TREE_STATIC (decl) = 1;
+	  TREE_PUBLIC (decl) = 1;
+	  TREE_USED (decl) = 1;
+	  asan_detect_stack_use_after_return = decl;
+	}
+      orig_base = gen_reg_rtx (Pmode);
+      emit_move_insn (orig_base, base);
+      ret = expand_normal (asan_detect_stack_use_after_return);
+      lab = gen_label_rtx ();
+      int very_likely = REG_BR_PROB_BASE - (REG_BR_PROB_BASE / 2000 - 1);
+      emit_cmp_and_jump_insns (ret, const0_rtx, EQ, NULL_RTX,
+			       VOIDmode, 0, lab, very_likely);
+      snprintf (buf, sizeof buf, "__asan_stack_malloc_%d",
+		use_after_return_class);
+      ret = init_one_libfunc (buf);
+      rtx addr = convert_memory_address (ptr_mode, base);
+      ret = emit_library_call_value (ret, NULL_RTX, LCT_NORMAL, ptr_mode, 2,
+				     GEN_INT (asan_frame_size
+					      + base_align_bias),
+				     TYPE_MODE (pointer_sized_int_node),
+				     addr, ptr_mode);
+      ret = convert_memory_address (Pmode, ret);
+      emit_move_insn (base, ret);
+      emit_label (lab);
+      emit_move_insn (pbase, expand_binop (Pmode, add_optab, base,
+					   gen_int_mode (base_align_bias
+							 - base_offset, Pmode),
+					   NULL_RTX, 1, OPTAB_DIRECT));
+    }
   mem = gen_rtx_MEM (ptr_mode, base);
+  mem = adjust_address (mem, VOIDmode, base_align_bias);
   emit_move_insn (mem, gen_int_mode (ASAN_STACK_FRAME_MAGIC, ptr_mode));
   mem = adjust_address (mem, VOIDmode, GET_MODE_SIZE (ptr_mode));
   emit_move_insn (mem, expand_normal (str_cst));
@@ -1020,10 +1086,10 @@  asan_emit_stack_protection (rtx base, HO
   shadow_base = expand_binop (Pmode, lshr_optab, base,
 			      GEN_INT (ASAN_SHADOW_SHIFT),
 			      NULL_RTX, 1, OPTAB_DIRECT);
-  shadow_base = expand_binop (Pmode, add_optab, shadow_base,
-			      gen_int_mode (targetm.asan_shadow_offset (),
-					    Pmode),
-			      NULL_RTX, 1, OPTAB_DIRECT);
+  shadow_base
+    = plus_constant (Pmode, shadow_base,
+		     targetm.asan_shadow_offset ()
+		     + (base_align_bias >> ASAN_SHADOW_SHIFT));
   gcc_assert (asan_shadow_set != -1
 	      && (ASAN_RED_ZONE_SIZE >> ASAN_SHADOW_SHIFT) == 4);
   shadow_mem = gen_rtx_MEM (SImode, shadow_base);
@@ -1074,6 +1140,47 @@  asan_emit_stack_protection (rtx base, HO
   /* Construct epilogue sequence.  */
   start_sequence ();
 
+  lab = NULL_RTX;  
+  if (use_after_return_class != -1)
+    {
+      rtx lab2 = gen_label_rtx ();
+      char c = (char) ASAN_STACK_MAGIC_USE_AFTER_RET;
+      int very_likely = REG_BR_PROB_BASE - (REG_BR_PROB_BASE / 2000 - 1);
+      emit_cmp_and_jump_insns (orig_base, base, EQ, NULL_RTX,
+			       VOIDmode, 0, lab2, very_likely);
+      shadow_mem = gen_rtx_MEM (BLKmode, shadow_base);
+      set_mem_alias_set (shadow_mem, asan_shadow_set);
+      mem = gen_rtx_MEM (ptr_mode, base);
+      mem = adjust_address (mem, VOIDmode, base_align_bias);
+      emit_move_insn (mem, gen_int_mode (ASAN_STACK_RETIRED_MAGIC, ptr_mode));
+      unsigned HOST_WIDE_INT sz = asan_frame_size >> ASAN_SHADOW_SHIFT;
+      if (use_after_return_class < 5
+	  && can_store_by_pieces (sz, builtin_memset_read_str, &c,
+				  BITS_PER_UNIT, true))
+	store_by_pieces (shadow_mem, sz, builtin_memset_read_str, &c,
+			 BITS_PER_UNIT, true, 0);
+      else if (use_after_return_class >= 5
+	       || !set_storage_via_setmem (shadow_mem,
+					   GEN_INT (sz),
+					   gen_int_mode (c, QImode),
+					   BITS_PER_UNIT, BITS_PER_UNIT,
+					   -1, sz, sz, sz))
+	{
+	  snprintf (buf, sizeof buf, "__asan_stack_free_%d",
+		    use_after_return_class);
+	  ret = init_one_libfunc (buf);
+	  rtx addr = convert_memory_address (ptr_mode, base);
+	  rtx orig_addr = convert_memory_address (ptr_mode, orig_base);
+	  emit_library_call (ret, LCT_NORMAL, ptr_mode, 3, addr, ptr_mode,
+			     GEN_INT (asan_frame_size + base_align_bias),
+			     TYPE_MODE (pointer_sized_int_node),
+			     orig_addr, ptr_mode);
+	}
+      lab = gen_label_rtx ();
+      emit_jump (lab);
+      emit_label (lab2);
+    }
+
   shadow_mem = gen_rtx_MEM (BLKmode, shadow_base);
   set_mem_alias_set (shadow_mem, asan_shadow_set);
   prev_offset = base_offset;
@@ -1106,6 +1213,8 @@  asan_emit_stack_protection (rtx base, HO
     }
 
   do_pending_stack_adjust ();
+  if (lab)
+    emit_label (lab);
 
   ret = get_insns ();
   end_sequence ();
--- gcc/cfgexpand.c.jj	2013-11-27 18:02:48.159813630 +0100
+++ gcc/cfgexpand.c	2013-11-28 08:34:36.960279675 +0100
@@ -890,6 +890,12 @@  struct stack_vars_data
 
   /* Vector of partition representative decls in between the paddings.  */
   vec<tree> asan_decl_vec;
+
+  /* Base pseudo register for Address Sanitizer protected automatic vars.  */
+  rtx asan_base;
+
+  /* Alignment needed for the Address Sanitizer protected automatic vars.  */
+  unsigned int asan_alignb;
 };
 
 /* A subroutine of expand_used_vars.  Give each partition representative
@@ -974,6 +980,7 @@  expand_stack_vars (bool (*pred) (size_t)
       alignb = stack_vars[i].alignb;
       if (alignb * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT)
 	{
+	  base = virtual_stack_vars_rtx;
 	  if ((flag_sanitize & SANITIZE_ADDRESS) && pred)
 	    {
 	      HOST_WIDE_INT prev_offset = frame_offset;
@@ -1002,10 +1009,13 @@  expand_stack_vars (bool (*pred) (size_t)
 	      if (repr_decl == NULL_TREE)
 		repr_decl = stack_vars[i].decl;
 	      data->asan_decl_vec.safe_push (repr_decl);
+	      data->asan_alignb = MAX (data->asan_alignb, alignb);
+	      if (data->asan_base == NULL)
+		data->asan_base = gen_reg_rtx (Pmode);
+	      base = data->asan_base;
 	    }
 	  else
 	    offset = alloc_stack_frame_space (stack_vars[i].size, alignb);
-	  base = virtual_stack_vars_rtx;
 	  base_align = crtl->max_used_stack_slot_alignment;
 	}
       else
@@ -1792,6 +1802,8 @@  expand_used_vars (void)
 
       data.asan_vec = vNULL;
       data.asan_decl_vec = vNULL;
+      data.asan_base = NULL_RTX;
+      data.asan_alignb = 0;
 
       /* Reorder decls to be protected by iterating over the variables
 	 array multiple times, and allocating out of each phase in turn.  */
@@ -1816,16 +1828,25 @@  expand_used_vars (void)
       if (!data.asan_vec.is_empty ())
 	{
 	  HOST_WIDE_INT prev_offset = frame_offset;
-	  HOST_WIDE_INT offset
-	    = alloc_stack_frame_space (ASAN_RED_ZONE_SIZE,
-				       ASAN_RED_ZONE_SIZE);
+	  HOST_WIDE_INT offset, sz, redzonesz;
+	  redzonesz = ASAN_RED_ZONE_SIZE;
+	  sz = data.asan_vec[0] - prev_offset;
+	  if (data.asan_alignb > ASAN_RED_ZONE_SIZE
+	      && data.asan_alignb <= 4096
+	      && sz + ASAN_RED_ZONE_SIZE >= data.asan_alignb)
+	    redzonesz = ((sz + ASAN_RED_ZONE_SIZE + data.asan_alignb - 1)
+			 & ~(data.asan_alignb - HOST_WIDE_INT_1)) - sz;
+	  offset
+	    = alloc_stack_frame_space (redzonesz, ASAN_RED_ZONE_SIZE);
 	  data.asan_vec.safe_push (prev_offset);
 	  data.asan_vec.safe_push (offset);
 
 	  var_end_seq
 	    = asan_emit_stack_protection (virtual_stack_vars_rtx,
+					  data.asan_base,
+					  data.asan_alignb,
 					  data.asan_vec.address (),
-					  data.asan_decl_vec. address (),
+					  data.asan_decl_vec.address (),
 					  data.asan_vec.length ());
 	}
 
--- gcc/asan.h.jj	2013-11-22 21:03:50.784671402 +0100
+++ gcc/asan.h	2013-11-28 08:29:48.115771827 +0100
@@ -23,7 +23,8 @@  along with GCC; see the file COPYING3.
 
 extern void asan_function_start (void);
 extern void asan_finish_file (void);
-extern rtx asan_emit_stack_protection (rtx, HOST_WIDE_INT *, tree *, int);
+extern rtx asan_emit_stack_protection (rtx, rtx, unsigned int, HOST_WIDE_INT *,
+				       tree *, int);
 extern bool asan_protect_global (tree);
 extern void initialize_sanitizer_builtins (void);
 extern tree asan_dynamic_init_call (bool);
@@ -49,8 +50,10 @@  extern alias_set_type asan_shadow_set;
 #define ASAN_STACK_MAGIC_MIDDLE		0xf2
 #define ASAN_STACK_MAGIC_RIGHT		0xf3
 #define ASAN_STACK_MAGIC_PARTIAL	0xf4
+#define ASAN_STACK_MAGIC_USE_AFTER_RET	0xf5
 
-#define ASAN_STACK_FRAME_MAGIC	0x41b58ab3
+#define ASAN_STACK_FRAME_MAGIC		0x41b58ab3
+#define ASAN_STACK_RETIRED_MAGIC	0x45e0360e
 
 /* Return true if DECL should be guarded on the stack.  */