diff mbox

Introduce base class for bb_vec_info and loop_vec_info

Message ID alpine.LSU.2.11.1510060947270.6516@zhemvz.fhfr.qr
State New
Headers show

Commit Message

Richard Biener Oct. 6, 2015, 7:51 a.m. UTC
This is sth I long wanted to have done and now it is required to not make
pushing/popping of the vectorizer state awkward (which is what I'm going
to work on).  It's mostly a 1:1 conversion everywhere so followup TLC
is certainly possible.  At least it shows that the current way of having
one of loop_vec_info or bb_vec_info is quite awkward already:

   else
-    {
-      loop_vec_info loop_vinfo = STMT_VINFO_LOOP_VINFO (stmt_info);
-      bb_vec_info bb_vinfo = STMT_VINFO_BB_VINFO (stmt_info);
-      void *target_cost_data;
-
-      if (loop_vinfo)
-       target_cost_data = LOOP_VINFO_TARGET_COST_DATA (loop_vinfo);
-      else
-       target_cost_data = BB_VINFO_TARGET_COST_DATA (bb_vinfo);
-
-      return add_stmt_cost (target_cost_data, count, kind, stmt_info,
-                           misalign, where);
-    }
+    return add_stmt_cost (stmt_info->vinfo->target_cost_data,
+                         count, kind, stmt_info, misalign, where);

You may notice I didn't bother to introduce all kinds of macros to
access the base class field.  Instead I'll be working towards removing
most of the screaming in the vectorizers code.

The patch moves common members into the base but doesn't bother yet
to do further semantic unification (like vectorization factor could
be in both, being always one in the BB variant).

Bootstrapped and tested on x86_64-unknown-linux-gnu, applied.

Richard.

2015-10-06  Richard Biener  <rguenther@suse.de>

	* tree-vectorizer.h (vec_info): New base class for...
	(_loop_vec_info): ... this and ...
	(_bb_vec_info): ... this.
	(vect_is_simple_use, vect_is_simple_use_1, new_stmt_vec_info,
	vect_analyze_data_refs_alignment, vect_verify_datarefs_alignment,
	vect_analyze_data_ref_accesses, vect_analyze_data_refs,
	vect_schedule_slp, vect_analyze_slp, vect_pattern_recog,
	vect_destroy_datarefs): Adjust interface to take a vec_info *
	rather than both a loop_vec_info and a bb_vec_info argument.
	* tree-vect-data-refs.c (vect_compute_data_refs_alignment,
	vect_verify_datarefs_alignment, vect_enhance_data_refs_alignment,
	vect_analyze_data_refs_alignment, vect_analyze_data_ref_accesses,
	vect_analyze_data_refs, vect_create_data_ref_ptr): Adjust
	accordingly.
	* tree-vect-loop.c (new_loop_vec_info): Initialize base class.
	(destroy_loop_vec_info, vect_analyze_loop_2,
	vect_is_simple_reduction_1, get_initial_def_for_induction,
	vect_create_epilog_for_reduction, vectorizable_reduction,
	vectorizable_live_operation, vect_transform_loop): Adjust.
	* tree-vect-patterns.c (type_conversion_p,
	vect_recog_widen_mult_pattern, vect_recog_widen_shift_pattern,
	vect_recog_rotate_pattern, vect_recog_vector_vector_shift_pattern,
	vect_recog_divmod_pattern, vect_recog_mixed_size_cond_pattern,
	check_bool_pattern, vect_recog_bool_pattern,
	vect_mark_pattern_stmts, vect_pattern_recog): Likewise.
	* tree-vect-slp.c (vect_get_and_check_slp_defs,
	vect_build_slp_tree_1, vect_build_slp_tree, vect_analyze_slp_cost_1,
	vect_analyze_slp_instance, vect_analyze_slp, destroy_bb_vec_info,
	vect_slp_analyze_bb_1, vect_schedule_slp): Likewise.
	(new_bb_vec_info): Initialize base classs.
	* tree-vect-stmts.c (record_stmt_cost, process_use,
	vect_get_vec_def_for_operand, vect_finish_stmt_generation,
	vectorizable_mask_load_store, vectorizable_call,
	vectorizable_simd_clone_call, vectorizable_conversion,
	vectorizable_assignment, vectorizable_shift,
	vectorizable_operation, vectorizable_store,
	vectorizable_load, vect_is_simple_cond, vectorizable_condition,
	new_stmt_vec_info, vect_is_simple_use, vect_is_simple_use_1): Likewise.
	* tree-vectorizer.c (vect_destroy_datarefs): Likewise.
diff mbox

Patch

Index: gcc/tree-vect-data-refs.c
===================================================================
--- gcc/tree-vect-data-refs.c	(revision 228482)
+++ gcc/tree-vect-data-refs.c	(working copy)
@@ -784,23 +784,17 @@  vect_compute_data_ref_alignment (struct
    Return FALSE if a data reference is found that cannot be vectorized.  */
 
 static bool
-vect_compute_data_refs_alignment (loop_vec_info loop_vinfo,
-                                  bb_vec_info bb_vinfo)
+vect_compute_data_refs_alignment (vec_info *vinfo)
 {
-  vec<data_reference_p> datarefs;
+  vec<data_reference_p> datarefs = vinfo->datarefs;
   struct data_reference *dr;
   unsigned int i;
 
-  if (loop_vinfo)
-    datarefs = LOOP_VINFO_DATAREFS (loop_vinfo);
-  else
-    datarefs = BB_VINFO_DATAREFS (bb_vinfo);
-
   FOR_EACH_VEC_ELT (datarefs, i, dr)
     if (STMT_VINFO_VECTORIZABLE (vinfo_for_stmt (DR_STMT (dr)))
         && !vect_compute_data_ref_alignment (dr))
       {
-        if (bb_vinfo)
+        if (is_a <bb_vec_info> (vinfo))
           {
             /* Mark unsupported statement as unvectorizable.  */
             STMT_VINFO_VECTORIZABLE (vinfo_for_stmt (DR_STMT (dr))) = false;
@@ -879,18 +873,13 @@  vect_update_misalignment_for_peel (struc
    handled with respect to alignment.  */
 
 bool
-vect_verify_datarefs_alignment (loop_vec_info loop_vinfo, bb_vec_info bb_vinfo)
+vect_verify_datarefs_alignment (vec_info *vinfo)
 {
-  vec<data_reference_p> datarefs;
+  vec<data_reference_p> datarefs = vinfo->datarefs;
   struct data_reference *dr;
   enum dr_alignment_support supportable_dr_alignment;
   unsigned int i;
 
-  if (loop_vinfo)
-    datarefs = LOOP_VINFO_DATAREFS (loop_vinfo);
-  else
-    datarefs = BB_VINFO_DATAREFS (bb_vinfo);
-
   FOR_EACH_VEC_ELT (datarefs, i, dr)
     {
       gimple *stmt = DR_STMT (dr);
@@ -1679,7 +1668,7 @@  vect_enhance_data_refs_alignment (loop_v
 
       if (do_peeling && known_alignment_for_access_p (dr0) && npeel == 0)
         {
-          stat = vect_verify_datarefs_alignment (loop_vinfo, NULL);
+          stat = vect_verify_datarefs_alignment (loop_vinfo);
           if (!stat)
             do_peeling = false;
           else
@@ -1758,7 +1747,7 @@  vect_enhance_data_refs_alignment (loop_v
 	     Drop the body_cst_vec on the floor here.  */
 	  body_cost_vec.release ();
 
-	  stat = vect_verify_datarefs_alignment (loop_vinfo, NULL);
+	  stat = vect_verify_datarefs_alignment (loop_vinfo);
 	  gcc_assert (stat);
           return stat;
         }
@@ -1875,7 +1864,7 @@  vect_enhance_data_refs_alignment (loop_v
       /* Peeling and versioning can't be done together at this time.  */
       gcc_assert (! (do_peeling && do_versioning));
 
-      stat = vect_verify_datarefs_alignment (loop_vinfo, NULL);
+      stat = vect_verify_datarefs_alignment (loop_vinfo);
       gcc_assert (stat);
       return stat;
     }
@@ -1883,7 +1872,7 @@  vect_enhance_data_refs_alignment (loop_v
   /* This point is reached if neither peeling nor versioning is being done.  */
   gcc_assert (! (do_peeling || do_versioning));
 
-  stat = vect_verify_datarefs_alignment (loop_vinfo, NULL);
+  stat = vect_verify_datarefs_alignment (loop_vinfo);
   return stat;
 }
 
@@ -1967,8 +1956,7 @@  vect_find_same_alignment_drs (struct dat
    Return FALSE if a data reference is found that cannot be vectorized.  */
 
 bool
-vect_analyze_data_refs_alignment (loop_vec_info loop_vinfo,
-                                  bb_vec_info bb_vinfo)
+vect_analyze_data_refs_alignment (vec_info *vinfo)
 {
   if (dump_enabled_p ())
     dump_printf_loc (MSG_NOTE, vect_location,
@@ -1976,17 +1964,17 @@  vect_analyze_data_refs_alignment (loop_v
 
   /* Mark groups of data references with same alignment using
      data dependence information.  */
-  if (loop_vinfo)
+  if (is_a <loop_vec_info> (vinfo))
     {
-      vec<ddr_p> ddrs = LOOP_VINFO_DDRS (loop_vinfo);
+      vec<ddr_p> ddrs = vinfo->ddrs;
       struct data_dependence_relation *ddr;
       unsigned int i;
 
       FOR_EACH_VEC_ELT (ddrs, i, ddr)
-	vect_find_same_alignment_drs (ddr, loop_vinfo);
+	vect_find_same_alignment_drs (ddr, as_a <loop_vec_info> (vinfo));
     }
 
-  if (!vect_compute_data_refs_alignment (loop_vinfo, bb_vinfo))
+  if (!vect_compute_data_refs_alignment (vinfo))
     {
       if (dump_enabled_p ())
 	dump_printf_loc (MSG_MISSED_OPTIMIZATION, vect_location,
@@ -2516,21 +2504,16 @@  dr_group_sort_cmp (const void *dra_, con
    FORNOW: handle only arrays and pointer accesses.  */
 
 bool
-vect_analyze_data_ref_accesses (loop_vec_info loop_vinfo, bb_vec_info bb_vinfo)
+vect_analyze_data_ref_accesses (vec_info *vinfo)
 {
   unsigned int i;
-  vec<data_reference_p> datarefs;
+  vec<data_reference_p> datarefs = vinfo->datarefs;
   struct data_reference *dr;
 
   if (dump_enabled_p ())
     dump_printf_loc (MSG_NOTE, vect_location,
                      "=== vect_analyze_data_ref_accesses ===\n");
 
-  if (loop_vinfo)
-    datarefs = LOOP_VINFO_DATAREFS (loop_vinfo);
-  else
-    datarefs = BB_VINFO_DATAREFS (bb_vinfo);
-
   if (datarefs.is_empty ())
     return true;
 
@@ -2654,7 +2637,7 @@  vect_analyze_data_ref_accesses (loop_vec
 	  dump_printf_loc (MSG_MISSED_OPTIMIZATION, vect_location,
 	                   "not vectorized: complicated access pattern.\n");
 
-        if (bb_vinfo)
+        if (is_a <bb_vec_info> (vinfo))
           {
             /* Mark the statement as not vectorizable.  */
             STMT_VINFO_VECTORIZABLE (vinfo_for_stmt (DR_STMT (dr))) = false;
@@ -3217,9 +3200,7 @@  vect_check_gather_scatter (gimple *stmt,
 */
 
 bool
-vect_analyze_data_refs (loop_vec_info loop_vinfo,
-			bb_vec_info bb_vinfo,
-			int *min_vf, unsigned *n_stmts)
+vect_analyze_data_refs (vec_info *vinfo, int *min_vf, unsigned *n_stmts)
 {
   struct loop *loop = NULL;
   basic_block bb = NULL;
@@ -3232,7 +3213,7 @@  vect_analyze_data_refs (loop_vec_info lo
     dump_printf_loc (MSG_NOTE, vect_location,
                      "=== vect_analyze_data_refs ===\n");
 
-  if (loop_vinfo)
+  if (loop_vec_info loop_vinfo = dyn_cast <loop_vec_info> (vinfo))
     {
       basic_block *bbs = LOOP_VINFO_BBS (loop_vinfo);
 
@@ -3304,6 +3285,7 @@  vect_analyze_data_refs (loop_vec_info lo
     }
   else
     {
+      bb_vec_info bb_vinfo = as_a <bb_vec_info> (vinfo);
       gimple_stmt_iterator gsi;
 
       bb = BB_VINFO_BB (bb_vinfo);
@@ -3381,11 +3363,11 @@  again:
 	      && !TREE_THIS_VOLATILE (DR_REF (dr))
 	      && targetm.vectorize.builtin_scatter != NULL;
 	  bool maybe_simd_lane_access
-	    = loop_vinfo && loop->simduid;
+	    = is_a <loop_vec_info> (vinfo) && loop->simduid;
 
 	  /* If target supports vector gather loads or scatter stores, or if
 	     this might be a SIMD lane access, see if they can't be used.  */
-	  if (loop_vinfo
+	  if (is_a <loop_vec_info> (vinfo)
 	      && (maybe_gather || maybe_scatter || maybe_simd_lane_access)
 	      && !nested_in_vect_loop_p (loop, stmt))
 	    {
@@ -3468,7 +3450,7 @@  again:
                   dump_printf (MSG_MISSED_OPTIMIZATION, "\n");
 		}
 
-	      if (bb_vinfo)
+	      if (is_a <bb_vec_info> (vinfo))
 		break;
 
 	      return false;
@@ -3482,7 +3464,7 @@  again:
                              "not vectorized: base addr of dr is a "
                              "constant\n");
 
-          if (bb_vinfo)
+          if (is_a <bb_vec_info> (vinfo))
 	    break;
 
 	  if (gatherscatter != SG_NONE || simd_lane_access)
@@ -3500,7 +3482,7 @@  again:
               dump_printf (MSG_MISSED_OPTIMIZATION, "\n");
             }
 
-          if (bb_vinfo)
+          if (is_a <bb_vec_info> (vinfo))
 	    break;
 
           return false;
@@ -3517,7 +3499,7 @@  again:
               dump_printf (MSG_MISSED_OPTIMIZATION, "\n");
             }
 
-          if (bb_vinfo)
+          if (is_a <bb_vec_info> (vinfo))
 	    break;
 
 	  if (gatherscatter != SG_NONE || simd_lane_access)
@@ -3537,7 +3519,7 @@  again:
               dump_printf (MSG_MISSED_OPTIMIZATION, "\n");
             }
 
-          if (bb_vinfo)
+          if (is_a <bb_vec_info> (vinfo))
 	    break;
 
 	  if (gatherscatter != SG_NONE || simd_lane_access)
@@ -3562,7 +3544,7 @@  again:
 	      dump_printf (MSG_MISSED_OPTIMIZATION, "\n");
 	    }
 
-	  if (bb_vinfo)
+	  if (is_a <bb_vec_info> (vinfo))
 	    break;
 
 	  if (gatherscatter != SG_NONE || simd_lane_access)
@@ -3700,7 +3682,7 @@  again:
               dump_printf (MSG_MISSED_OPTIMIZATION, "\n");
             }
 
-          if (bb_vinfo)
+          if (is_a <bb_vec_info> (vinfo))
 	    break;
 
 	  if (gatherscatter != SG_NONE || simd_lane_access)
@@ -3733,7 +3715,7 @@  again:
               dump_printf (MSG_MISSED_OPTIMIZATION, "\n");
             }
 
-          if (bb_vinfo)
+          if (is_a <bb_vec_info> (vinfo))
 	    break;
 
 	  if (gatherscatter != SG_NONE || simd_lane_access)
@@ -3766,7 +3748,8 @@  again:
       if (gatherscatter != SG_NONE)
 	{
 	  tree off;
-	  if (!vect_check_gather_scatter (stmt, loop_vinfo, NULL, &off, NULL)
+	  if (!vect_check_gather_scatter (stmt, as_a <loop_vec_info> (vinfo),
+					  NULL, &off, NULL)
 	      || get_vectype_for_scalar_type (TREE_TYPE (off)) == NULL_TREE)
 	    {
 	      STMT_VINFO_DATA_REF (stmt_info) = NULL;
@@ -3789,7 +3772,7 @@  again:
 	  STMT_VINFO_GATHER_SCATTER_P (stmt_info) = gatherscatter;
 	}
 
-      else if (loop_vinfo
+      else if (is_a <loop_vec_info> (vinfo)
 	       && TREE_CODE (DR_STEP (dr)) != INTEGER_CST)
 	{
 	  if (nested_in_vect_loop_p (loop, stmt))
@@ -3814,7 +3797,7 @@  again:
      avoids spending useless time in analyzing their dependence.  */
   if (i != datarefs.length ())
     {
-      gcc_assert (bb_vinfo != NULL);
+      gcc_assert (is_a <bb_vec_info> (vinfo));
       for (unsigned j = i; j < datarefs.length (); ++j)
 	{
 	  data_reference_p dr = datarefs[j];
@@ -4259,7 +4242,7 @@  vect_create_data_ref_ptr (gimple *stmt,
 		 aggr_ptr, loop, &incr_gsi, insert_after,
 		 &indx_before_incr, &indx_after_incr);
       incr = gsi_stmt (incr_gsi);
-      set_vinfo_for_stmt (incr, new_stmt_vec_info (incr, loop_vinfo, NULL));
+      set_vinfo_for_stmt (incr, new_stmt_vec_info (incr, loop_vinfo));
 
       /* Copy the points-to information if it exists. */
       if (DR_PTR_INFO (dr))
@@ -4289,7 +4272,7 @@  vect_create_data_ref_ptr (gimple *stmt,
 		 containing_loop, &incr_gsi, insert_after, &indx_before_incr,
 		 &indx_after_incr);
       incr = gsi_stmt (incr_gsi);
-      set_vinfo_for_stmt (incr, new_stmt_vec_info (incr, loop_vinfo, NULL));
+      set_vinfo_for_stmt (incr, new_stmt_vec_info (incr, loop_vinfo));
 
       /* Copy the points-to information if it exists. */
       if (DR_PTR_INFO (dr))
Index: gcc/tree-vect-loop.c
===================================================================
--- gcc/tree-vect-loop.c	(revision 228482)
+++ gcc/tree-vect-loop.c	(working copy)
@@ -897,6 +897,7 @@  new_loop_vec_info (struct loop *loop)
   unsigned int i, nbbs;
 
   res = (loop_vec_info) xcalloc (1, sizeof (struct _loop_vec_info));
+  res->kind = vec_info::loop;
   LOOP_VINFO_LOOP (res) = loop;
 
   bbs = get_loop_body (loop);
@@ -924,7 +925,7 @@  new_loop_vec_info (struct loop *loop)
               loop_vec_info inner_loop_vinfo =
                 STMT_VINFO_LOOP_VINFO (stmt_info);
               gcc_assert (loop->inner == LOOP_VINFO_LOOP (inner_loop_vinfo));
-              STMT_VINFO_LOOP_VINFO (stmt_info) = res;
+              stmt_info->vinfo = res;
             }
           for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
            {
@@ -933,7 +934,7 @@  new_loop_vec_info (struct loop *loop)
               loop_vec_info inner_loop_vinfo =
                  STMT_VINFO_LOOP_VINFO (stmt_info);
               gcc_assert (loop->inner == LOOP_VINFO_LOOP (inner_loop_vinfo));
-              STMT_VINFO_LOOP_VINFO (stmt_info) = res;
+              stmt_info->vinfo = res;
            }
         }
       else
@@ -943,14 +944,14 @@  new_loop_vec_info (struct loop *loop)
             {
 	      gimple *phi = gsi_stmt (si);
               gimple_set_uid (phi, 0);
-              set_vinfo_for_stmt (phi, new_stmt_vec_info (phi, res, NULL));
+              set_vinfo_for_stmt (phi, new_stmt_vec_info (phi, res));
             }
 
           for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
             {
 	      gimple *stmt = gsi_stmt (si);
               gimple_set_uid (stmt, 0);
-              set_vinfo_for_stmt (stmt, new_stmt_vec_info (stmt, res, NULL));
+              set_vinfo_for_stmt (stmt, new_stmt_vec_info (stmt, res));
             }
         }
     }
@@ -1055,7 +1056,7 @@  destroy_loop_vec_info (loop_vec_info loo
     }
 
   free (LOOP_VINFO_BBS (loop_vinfo));
-  vect_destroy_datarefs (loop_vinfo, NULL);
+  vect_destroy_datarefs (loop_vinfo);
   free_dependence_relations (LOOP_VINFO_DDRS (loop_vinfo));
   LOOP_VINFO_LOOP_NEST (loop_vinfo).release ();
   LOOP_VINFO_MAY_MISALIGN_STMTS (loop_vinfo).release ();
@@ -1767,7 +1768,7 @@  vect_analyze_loop_2 (loop_vec_info loop_
      FORNOW: Handle only simple, array references, which
      alignment can be forced, and aligned pointer-references.  */
 
-  ok = vect_analyze_data_refs (loop_vinfo, NULL, &min_vf, &n_stmts);
+  ok = vect_analyze_data_refs (loop_vinfo, &min_vf, &n_stmts);
   if (!ok)
     {
       if (dump_enabled_p ())
@@ -1781,14 +1782,14 @@  vect_analyze_loop_2 (loop_vec_info loop_
 
   vect_analyze_scalar_cycles (loop_vinfo);
 
-  vect_pattern_recog (loop_vinfo, NULL);
+  vect_pattern_recog (loop_vinfo);
 
   vect_fixup_scalar_cycles_with_patterns (loop_vinfo);
 
   /* Analyze the access patterns of the data-refs in the loop (consecutive,
      complex, etc.). FORNOW: Only handle consecutive access pattern.  */
 
-  ok = vect_analyze_data_ref_accesses (loop_vinfo, NULL);
+  ok = vect_analyze_data_ref_accesses (loop_vinfo);
   if (!ok)
     {
       if (dump_enabled_p ())
@@ -1840,7 +1841,7 @@  vect_analyze_loop_2 (loop_vec_info loop_
     }
 
   /* Check the SLP opportunities in the loop, analyze and build SLP trees.  */
-  ok = vect_analyze_slp (loop_vinfo, NULL, n_stmts);
+  ok = vect_analyze_slp (loop_vinfo, n_stmts);
   if (!ok)
     return false;
 
@@ -1858,7 +1859,7 @@  vect_analyze_loop_2 (loop_vec_info loop_
   /* Analyze the alignment of the data-refs in the loop.
      Fail if a data reference is found that cannot be vectorized.  */
 
-  ok = vect_analyze_data_refs_alignment (loop_vinfo, NULL);
+  ok = vect_analyze_data_refs_alignment (loop_vinfo);
   if (!ok)
     {
       if (dump_enabled_p ())
@@ -2646,7 +2647,7 @@  vect_is_simple_reduction_1 (loop_vec_inf
       gimple *negate_stmt = gimple_build_assign (negrhs, NEGATE_EXPR, rhs);
       gimple_stmt_iterator gsi = gsi_for_stmt (def_stmt);
       set_vinfo_for_stmt (negate_stmt, new_stmt_vec_info (negate_stmt, 
-							  loop_info, NULL));
+							  loop_info));
       gsi_insert_before (&gsi, negate_stmt, GSI_NEW_STMT);
       gimple_assign_set_rhs2 (def_stmt, negrhs);
       gimple_assign_set_rhs_code (def_stmt, PLUS_EXPR);
@@ -3508,7 +3509,7 @@  get_initial_def_for_induction (gimple *i
 						 new_stmt);
 	  gcc_assert (!new_bb);
 	  set_vinfo_for_stmt (new_stmt,
-			      new_stmt_vec_info (new_stmt, loop_vinfo, NULL));
+			      new_stmt_vec_info (new_stmt, loop_vinfo));
 	}
     }
   else
@@ -3610,7 +3611,7 @@  get_initial_def_for_induction (gimple *i
   vec_dest = vect_get_new_vect_var (vectype, vect_simple_var, "vec_iv_");
   induction_phi = create_phi_node (vec_dest, iv_loop->header);
   set_vinfo_for_stmt (induction_phi,
-		      new_stmt_vec_info (induction_phi, loop_vinfo, NULL));
+		      new_stmt_vec_info (induction_phi, loop_vinfo));
   induc_def = PHI_RESULT (induction_phi);
 
   /* Create the iv update inside the loop  */
@@ -3618,8 +3619,7 @@  get_initial_def_for_induction (gimple *i
   vec_def = make_ssa_name (vec_dest, new_stmt);
   gimple_assign_set_lhs (new_stmt, vec_def);
   gsi_insert_before (&si, new_stmt, GSI_SAME_STMT);
-  set_vinfo_for_stmt (new_stmt, new_stmt_vec_info (new_stmt, loop_vinfo,
-                                                   NULL));
+  set_vinfo_for_stmt (new_stmt, new_stmt_vec_info (new_stmt, loop_vinfo));
 
   /* Set the arguments of the phi node:  */
   add_phi_arg (induction_phi, vec_init, pe, UNKNOWN_LOCATION);
@@ -3684,7 +3684,7 @@  get_initial_def_for_induction (gimple *i
 	      gsi_insert_before (&si, new_stmt, GSI_SAME_STMT);
 	    }
 	  set_vinfo_for_stmt (new_stmt,
-			      new_stmt_vec_info (new_stmt, loop_vinfo, NULL));
+			      new_stmt_vec_info (new_stmt, loop_vinfo));
 	  STMT_VINFO_RELATED_STMT (prev_stmt_vinfo) = new_stmt;
 	  prev_stmt_vinfo = vinfo_for_stmt (new_stmt);
 	}
@@ -3752,7 +3752,7 @@  get_initial_def_for_induction (gimple *i
       si = gsi_after_labels (bb);
       gsi_insert_before (&si, new_stmt, GSI_SAME_STMT);
       set_vinfo_for_stmt (new_stmt,
-			  new_stmt_vec_info (new_stmt, loop_vinfo, NULL));
+			  new_stmt_vec_info (new_stmt, loop_vinfo));
       STMT_VINFO_RELATED_STMT (vinfo_for_stmt (new_stmt))
 	= STMT_VINFO_RELATED_STMT (vinfo_for_stmt (induction_phi));
     }
@@ -4177,7 +4177,7 @@  vect_create_epilog_for_reduction (vec<tr
         {
 	  tree new_def = copy_ssa_name (def);
           phi = create_phi_node (new_def, exit_bb);
-          set_vinfo_for_stmt (phi, new_stmt_vec_info (phi, loop_vinfo, NULL));
+          set_vinfo_for_stmt (phi, new_stmt_vec_info (phi, loop_vinfo));
           if (j == 0)
             new_phis.quick_push (phi);
           else
@@ -4205,7 +4205,7 @@  vect_create_epilog_for_reduction (vec<tr
 	  SET_PHI_ARG_DEF (outer_phi, single_exit (loop)->dest_idx,
 			   PHI_RESULT (phi));
 	  set_vinfo_for_stmt (outer_phi, new_stmt_vec_info (outer_phi,
-							    loop_vinfo, NULL));
+							    loop_vinfo));
 	  inner_phis.quick_push (phi);
 	  new_phis[i] = outer_phi;
 	  prev_phi_info = vinfo_for_stmt (outer_phi);
@@ -4217,7 +4217,7 @@  vect_create_epilog_for_reduction (vec<tr
 	      SET_PHI_ARG_DEF (outer_phi, single_exit (loop)->dest_idx,
 			       PHI_RESULT (phi));
 	      set_vinfo_for_stmt (outer_phi, new_stmt_vec_info (outer_phi,
-							loop_vinfo, NULL));
+								loop_vinfo));
 	      STMT_VINFO_RELATED_STMT (prev_phi_info) = outer_phi;
 	      prev_phi_info = vinfo_for_stmt (outer_phi);
 	    }
@@ -4558,8 +4558,7 @@  vect_finalize_reduction:
       if (nested_in_vect_loop)
         {
           set_vinfo_for_stmt (epilog_stmt,
-                              new_stmt_vec_info (epilog_stmt, loop_vinfo,
-                                                 NULL));
+                              new_stmt_vec_info (epilog_stmt, loop_vinfo));
           STMT_VINFO_RELATED_STMT (vinfo_for_stmt (epilog_stmt)) =
                 STMT_VINFO_RELATED_STMT (vinfo_for_stmt (new_phi));
 
@@ -4730,7 +4729,7 @@  vect_finalize_reduction:
                   /* Create vector phi node.  */
                   vect_phi = create_phi_node (vec_initial_def, bb);
                   new_phi_vinfo = new_stmt_vec_info (vect_phi,
-                                    loop_vec_info_for_loop (outer_loop), NULL);
+                                    loop_vec_info_for_loop (outer_loop));
                   set_vinfo_for_stmt (vect_phi, new_phi_vinfo);
 
                   /* Create vs0 - initial def of the double reduction phi.  */
@@ -5037,7 +5036,7 @@  vectorizable_reduction (gimple *stmt, gi
       if (i == 0 && code == COND_EXPR)
         continue;
 
-      is_simple_use = vect_is_simple_use_1 (ops[i], stmt, loop_vinfo, NULL,
+      is_simple_use = vect_is_simple_use_1 (ops[i], stmt, loop_vinfo,
 					    &def_stmt, &def, &dt, &tem);
       if (!vectype_in)
 	vectype_in = tem;
@@ -5058,7 +5057,7 @@  vectorizable_reduction (gimple *stmt, gi
         }
     }
 
-  is_simple_use = vect_is_simple_use_1 (ops[i], stmt, loop_vinfo, NULL,
+  is_simple_use = vect_is_simple_use_1 (ops[i], stmt, loop_vinfo,
 					&def_stmt, &def, &dt, &tem);
   if (!vectype_in)
     vectype_in = tem;
@@ -5387,8 +5386,7 @@  vectorizable_reduction (gimple *stmt, gi
                  operand.  */
               new_phi = create_phi_node (vec_dest, loop->header);
               set_vinfo_for_stmt (new_phi,
-                                  new_stmt_vec_info (new_phi, loop_vinfo,
-                                                     NULL));
+                                  new_stmt_vec_info (new_phi, loop_vinfo));
                if (j == 0 || slp_node)
                  phis.quick_push (new_phi);
             }
@@ -5440,14 +5438,14 @@  vectorizable_reduction (gimple *stmt, gi
 	      gimple *dummy_stmt;
               tree dummy;
 
-              vect_is_simple_use (ops[!reduc_index], stmt, loop_vinfo, NULL,
+              vect_is_simple_use (ops[!reduc_index], stmt, loop_vinfo,
                                   &dummy_stmt, &dummy, &dt);
               loop_vec_def0 = vect_get_vec_def_for_stmt_copy (dt,
                                                               loop_vec_def0);
               vec_oprnds0[0] = loop_vec_def0;
               if (op_type == ternary_op)
                 {
-                  vect_is_simple_use (op1, stmt, loop_vinfo, NULL, &dummy_stmt,
+                  vect_is_simple_use (op1, stmt, loop_vinfo, &dummy_stmt,
                                       &dummy, &dt);
                   loop_vec_def1 = vect_get_vec_def_for_stmt_copy (dt,
                                                                 loop_vec_def1);
@@ -5749,8 +5747,7 @@  vectorizable_live_operation (gimple *stm
       else
 	op = gimple_op (stmt, i + 1);
       if (op
-          && !vect_is_simple_use (op, stmt, loop_vinfo, NULL, &def_stmt, &def,
-				  &dt))
+          && !vect_is_simple_use (op, stmt, loop_vinfo, &def_stmt, &def, &dt))
         {
           if (dump_enabled_p ())
 	    dump_printf_loc (MSG_MISSED_OPTIMIZATION, vect_location,
@@ -6218,7 +6215,7 @@  vect_transform_loop (loop_vec_info loop_
 		    dump_printf_loc (MSG_NOTE, vect_location,
 				     "=== scheduling SLP instances ===\n");
 
-		  vect_schedule_slp (loop_vinfo, NULL);
+		  vect_schedule_slp (loop_vinfo);
 		}
 
 	      /* Hybrid SLP stmts must be vectorized in addition to SLP.  */
Index: gcc/tree-vect-patterns.c
===================================================================
--- gcc/tree-vect-patterns.c	(revision 228482)
+++ gcc/tree-vect-patterns.c	(working copy)
@@ -171,18 +171,14 @@  type_conversion_p (tree name, gimple *us
 {
   tree dummy;
   gimple *dummy_gimple;
-  loop_vec_info loop_vinfo;
   stmt_vec_info stmt_vinfo;
   tree type = TREE_TYPE (name);
   tree oprnd0;
   enum vect_def_type dt;
   tree def;
-  bb_vec_info bb_vinfo;
 
   stmt_vinfo = vinfo_for_stmt (use_stmt);
-  loop_vinfo = STMT_VINFO_LOOP_VINFO (stmt_vinfo);
-  bb_vinfo = STMT_VINFO_BB_VINFO (stmt_vinfo);
-  if (!vect_is_simple_use (name, use_stmt, loop_vinfo, bb_vinfo, def_stmt,
+  if (!vect_is_simple_use (name, use_stmt, stmt_vinfo->vinfo, def_stmt,
 			   &def, &dt))
     return false;
 
@@ -211,8 +207,8 @@  type_conversion_p (tree name, gimple *us
   else
     *promotion = false;
 
-  if (!vect_is_simple_use (oprnd0, *def_stmt, loop_vinfo,
-			   bb_vinfo, &dummy_gimple, &dummy, &dt))
+  if (!vect_is_simple_use (oprnd0, *def_stmt, stmt_vinfo->vinfo,
+			   &dummy_gimple, &dummy, &dt))
     return false;
 
   return true;
@@ -978,8 +974,6 @@  vect_recog_widen_mult_pattern (vec<gimpl
   pattern_stmt = gimple_build_assign (var, WIDEN_MULT_EXPR, oprnd0, oprnd1);
 
   stmt_vec_info stmt_vinfo = vinfo_for_stmt (last_stmt);
-  loop_vec_info loop_vinfo = STMT_VINFO_LOOP_VINFO (stmt_vinfo);
-  bb_vec_info bb_vinfo = STMT_VINFO_BB_VINFO (stmt_vinfo);
   STMT_VINFO_PATTERN_DEF_SEQ (stmt_vinfo) = NULL;
 
   /* If the original two operands have different sizes, we may need to convert
@@ -989,7 +983,7 @@  vect_recog_widen_mult_pattern (vec<gimpl
     {
       append_pattern_def_seq (stmt_vinfo, new_stmt);
       stmt_vec_info new_stmt_info
-        = new_stmt_vec_info (new_stmt, loop_vinfo, bb_vinfo);
+        = new_stmt_vec_info (new_stmt, stmt_vinfo->vinfo);
       set_vinfo_for_stmt (new_stmt, new_stmt_info);
       STMT_VINFO_VECTYPE (new_stmt_info) = vectype;
     }
@@ -1000,7 +994,7 @@  vect_recog_widen_mult_pattern (vec<gimpl
     {
       append_pattern_def_seq (stmt_vinfo, pattern_stmt);
       stmt_vec_info pattern_stmt_info
-        = new_stmt_vec_info (pattern_stmt, loop_vinfo, bb_vinfo);
+        = new_stmt_vec_info (pattern_stmt, stmt_vinfo->vinfo);
       set_vinfo_for_stmt (pattern_stmt, pattern_stmt_info);
       STMT_VINFO_VECTYPE (pattern_stmt_info) = vecitype;
       pattern_stmt = gimple_build_assign (vect_recog_temp_ssa_var (type, NULL),
@@ -1754,11 +1748,9 @@  vect_recog_widen_shift_pattern (vec<gimp
   if (wstmt)
     {
       stmt_vec_info stmt_vinfo = vinfo_for_stmt (last_stmt);
-      loop_vec_info loop_vinfo = STMT_VINFO_LOOP_VINFO (stmt_vinfo);
-      bb_vec_info bb_vinfo = STMT_VINFO_BB_VINFO (stmt_vinfo);
       new_pattern_def_seq (stmt_vinfo, wstmt);
       stmt_vec_info new_stmt_info
-	= new_stmt_vec_info (wstmt, loop_vinfo, bb_vinfo);
+	= new_stmt_vec_info (wstmt, stmt_vinfo->vinfo);
       set_vinfo_for_stmt (wstmt, new_stmt_info);
       STMT_VINFO_VECTYPE (new_stmt_info) = vectype;
     }
@@ -1807,8 +1799,7 @@  vect_recog_rotate_pattern (vec<gimple *>
   gimple *pattern_stmt, *def_stmt;
   enum tree_code rhs_code;
   stmt_vec_info stmt_vinfo = vinfo_for_stmt (last_stmt);
-  loop_vec_info loop_vinfo = STMT_VINFO_LOOP_VINFO (stmt_vinfo);
-  bb_vec_info bb_vinfo = STMT_VINFO_BB_VINFO (stmt_vinfo);
+  vec_info *vinfo = stmt_vinfo->vinfo;
   enum vect_def_type dt;
   optab optab1, optab2;
   edge ext_def = NULL;
@@ -1839,8 +1830,7 @@  vect_recog_rotate_pattern (vec<gimple *>
       || !TYPE_UNSIGNED (type))
     return NULL;
 
-  if (!vect_is_simple_use (oprnd1, last_stmt, loop_vinfo, bb_vinfo, &def_stmt,
-			   &def, &dt))
+  if (!vect_is_simple_use (oprnd1, last_stmt, vinfo, &def_stmt, &def, &dt))
     return NULL;
 
   if (dt != vect_internal_def
@@ -1859,7 +1849,7 @@  vect_recog_rotate_pattern (vec<gimple *>
       && optab_handler (optab1, TYPE_MODE (vectype)) != CODE_FOR_nothing)
     return NULL;
 
-  if (bb_vinfo != NULL || dt != vect_internal_def)
+  if (is_a <bb_vec_info> (vinfo) || dt != vect_internal_def)
     {
       optab2 = optab_for_tree_code (rhs_code, vectype, optab_scalar);
       if (optab2
@@ -1876,7 +1866,7 @@  vect_recog_rotate_pattern (vec<gimple *>
       || !optab2
       || optab_handler (optab2, TYPE_MODE (vectype)) == CODE_FOR_nothing)
     {
-      if (bb_vinfo == NULL && dt == vect_internal_def)
+      if (! is_a <bb_vec_info> (vinfo) && dt == vect_internal_def)
 	return NULL;
       optab1 = optab_for_tree_code (LSHIFT_EXPR, vectype, optab_scalar);
       optab2 = optab_for_tree_code (RSHIFT_EXPR, vectype, optab_scalar);
@@ -1894,9 +1884,9 @@  vect_recog_rotate_pattern (vec<gimple *>
 
   if (dt == vect_external_def
       && TREE_CODE (oprnd1) == SSA_NAME
-      && loop_vinfo)
+      && is_a <loop_vec_info> (vinfo))
     {
-      struct loop *loop = LOOP_VINFO_LOOP (loop_vinfo);
+      struct loop *loop = as_a <loop_vec_info> (vinfo)->loop;
       ext_def = loop_preheader_edge (loop);
       if (!SSA_NAME_IS_DEFAULT_DEF (oprnd1))
 	{
@@ -1963,7 +1953,7 @@  vect_recog_rotate_pattern (vec<gimple *>
 	}
       else
 	{
-	  def_stmt_vinfo = new_stmt_vec_info (def_stmt, loop_vinfo, bb_vinfo);
+	  def_stmt_vinfo = new_stmt_vec_info (def_stmt, vinfo);
 	  set_vinfo_for_stmt (def_stmt, def_stmt_vinfo);
 	  STMT_VINFO_VECTYPE (def_stmt_vinfo) = vecstype;
 	  append_pattern_def_seq (stmt_vinfo, def_stmt);
@@ -1982,7 +1972,7 @@  vect_recog_rotate_pattern (vec<gimple *>
 	}
       else
 	{
-	  def_stmt_vinfo = new_stmt_vec_info (def_stmt, loop_vinfo, bb_vinfo);
+	  def_stmt_vinfo = new_stmt_vec_info (def_stmt, vinfo);
 	  set_vinfo_for_stmt (def_stmt, def_stmt_vinfo);
 	  STMT_VINFO_VECTYPE (def_stmt_vinfo) = vecstype;
 	  append_pattern_def_seq (stmt_vinfo, def_stmt);
@@ -2066,8 +2056,7 @@  vect_recog_vector_vector_shift_pattern (
   gimple *pattern_stmt, *def_stmt;
   enum tree_code rhs_code;
   stmt_vec_info stmt_vinfo = vinfo_for_stmt (last_stmt);
-  loop_vec_info loop_vinfo = STMT_VINFO_LOOP_VINFO (stmt_vinfo);
-  bb_vec_info bb_vinfo = STMT_VINFO_BB_VINFO (stmt_vinfo);
+  vec_info *vinfo = stmt_vinfo->vinfo;
   enum vect_def_type dt;
   tree def;
 
@@ -2101,7 +2090,7 @@  vect_recog_vector_vector_shift_pattern (
 	 != TYPE_PRECISION (TREE_TYPE (oprnd0)))
     return NULL;
 
-  if (!vect_is_simple_use (oprnd1, last_stmt, loop_vinfo, bb_vinfo, &def_stmt,
+  if (!vect_is_simple_use (oprnd1, last_stmt, vinfo, &def_stmt,
 			   &def, &dt))
     return NULL;
 
@@ -2329,8 +2318,7 @@  vect_recog_divmod_pattern (vec<gimple *>
   gimple *pattern_stmt, *def_stmt;
   enum tree_code rhs_code;
   stmt_vec_info stmt_vinfo = vinfo_for_stmt (last_stmt);
-  loop_vec_info loop_vinfo = STMT_VINFO_LOOP_VINFO (stmt_vinfo);
-  bb_vec_info bb_vinfo = STMT_VINFO_BB_VINFO (stmt_vinfo);
+  vec_info *vinfo = stmt_vinfo->vinfo;
   optab optab;
   tree q;
   int dummy_int, prec;
@@ -2435,8 +2423,7 @@  vect_recog_divmod_pattern (vec<gimple *>
 	      def_stmt = gimple_build_assign (var, COND_EXPR, cond,
 					      build_int_cst (utype, -1),
 					      build_int_cst (utype, 0));
-	      def_stmt_vinfo
-		= new_stmt_vec_info (def_stmt, loop_vinfo, bb_vinfo);
+	      def_stmt_vinfo = new_stmt_vec_info (def_stmt, vinfo);
 	      set_vinfo_for_stmt (def_stmt, def_stmt_vinfo);
 	      STMT_VINFO_VECTYPE (def_stmt_vinfo) = vecutype;
 	      append_pattern_def_seq (stmt_vinfo, def_stmt);
@@ -2444,8 +2431,7 @@  vect_recog_divmod_pattern (vec<gimple *>
 	      def_stmt = gimple_build_assign (var, RSHIFT_EXPR,
 					      gimple_assign_lhs (def_stmt),
 					      shift);
-	      def_stmt_vinfo
-		= new_stmt_vec_info (def_stmt, loop_vinfo, bb_vinfo);
+	      def_stmt_vinfo = new_stmt_vec_info (def_stmt, vinfo);
 	      set_vinfo_for_stmt (def_stmt, def_stmt_vinfo);
 	      STMT_VINFO_VECTYPE (def_stmt_vinfo) = vecutype;
 	      append_pattern_def_seq (stmt_vinfo, def_stmt);
@@ -2779,8 +2765,7 @@  vect_recog_mixed_size_cond_pattern (vec<
   stmt_vec_info stmt_vinfo = vinfo_for_stmt (last_stmt), def_stmt_info;
   tree type, vectype, comp_vectype, itype = NULL_TREE, vecitype;
   gimple *pattern_stmt, *def_stmt;
-  loop_vec_info loop_vinfo = STMT_VINFO_LOOP_VINFO (stmt_vinfo);
-  bb_vec_info bb_vinfo = STMT_VINFO_BB_VINFO (stmt_vinfo);
+  vec_info *vinfo = stmt_vinfo->vinfo;
   tree orig_type0 = NULL_TREE, orig_type1 = NULL_TREE;
   gimple *def_stmt0 = NULL, *def_stmt1 = NULL;
   bool promotion;
@@ -2885,7 +2870,7 @@  vect_recog_mixed_size_cond_pattern (vec<
 				      NOP_EXPR, gimple_assign_lhs (def_stmt));
 
   new_pattern_def_seq (stmt_vinfo, def_stmt);
-  def_stmt_info = new_stmt_vec_info (def_stmt, loop_vinfo, bb_vinfo);
+  def_stmt_info = new_stmt_vec_info (def_stmt, vinfo);
   set_vinfo_for_stmt (def_stmt, def_stmt_info);
   STMT_VINFO_VECTYPE (def_stmt_info) = vecitype;
   *type_in = vecitype;
@@ -2903,14 +2888,14 @@  vect_recog_mixed_size_cond_pattern (vec<
    true if bool VAR can be optimized that way.  */
 
 static bool
-check_bool_pattern (tree var, loop_vec_info loop_vinfo, bb_vec_info bb_vinfo)
+check_bool_pattern (tree var, vec_info *vinfo)
 {
   gimple *def_stmt;
   enum vect_def_type dt;
   tree def, rhs1;
   enum tree_code rhs_code;
 
-  if (!vect_is_simple_use (var, NULL, loop_vinfo, bb_vinfo, &def_stmt, &def,
+  if (!vect_is_simple_use (var, NULL, vinfo, &def_stmt, &def,
 			   &dt))
     return false;
 
@@ -2928,25 +2913,24 @@  check_bool_pattern (tree var, loop_vec_i
   switch (rhs_code)
     {
     case SSA_NAME:
-      return check_bool_pattern (rhs1, loop_vinfo, bb_vinfo);
+      return check_bool_pattern (rhs1, vinfo);
 
     CASE_CONVERT:
       if ((TYPE_PRECISION (TREE_TYPE (rhs1)) != 1
 	   || !TYPE_UNSIGNED (TREE_TYPE (rhs1)))
 	  && TREE_CODE (TREE_TYPE (rhs1)) != BOOLEAN_TYPE)
 	return false;
-      return check_bool_pattern (rhs1, loop_vinfo, bb_vinfo);
+      return check_bool_pattern (rhs1, vinfo);
 
     case BIT_NOT_EXPR:
-      return check_bool_pattern (rhs1, loop_vinfo, bb_vinfo);
+      return check_bool_pattern (rhs1, vinfo);
 
     case BIT_AND_EXPR:
     case BIT_IOR_EXPR:
     case BIT_XOR_EXPR:
-      if (!check_bool_pattern (rhs1, loop_vinfo, bb_vinfo))
+      if (!check_bool_pattern (rhs1, vinfo))
 	return false;
-      return check_bool_pattern (gimple_assign_rhs2 (def_stmt), loop_vinfo,
-				 bb_vinfo);
+      return check_bool_pattern (gimple_assign_rhs2 (def_stmt), vinfo);
 
     default:
       if (TREE_CODE_CLASS (rhs_code) == tcc_comparison)
@@ -3243,8 +3227,7 @@  vect_recog_bool_pattern (vec<gimple *> *
   enum tree_code rhs_code;
   tree var, lhs, rhs, vectype;
   stmt_vec_info stmt_vinfo = vinfo_for_stmt (last_stmt);
-  loop_vec_info loop_vinfo = STMT_VINFO_LOOP_VINFO (stmt_vinfo);
-  bb_vec_info bb_vinfo = STMT_VINFO_BB_VINFO (stmt_vinfo);
+  vec_info *vinfo = stmt_vinfo->vinfo;
   gimple *pattern_stmt;
 
   if (!is_gimple_assign (last_stmt))
@@ -3268,7 +3251,7 @@  vect_recog_bool_pattern (vec<gimple *> *
       if (vectype == NULL_TREE)
 	return NULL;
 
-      if (!check_bool_pattern (var, loop_vinfo, bb_vinfo))
+      if (!check_bool_pattern (var, vinfo))
 	return NULL;
 
       rhs = adjust_bool_pattern (var, TREE_TYPE (lhs), NULL_TREE, stmts);
@@ -3306,7 +3289,7 @@  vect_recog_bool_pattern (vec<gimple *> *
       if (get_vectype_for_scalar_type (type) == NULL_TREE)
 	return NULL;
 
-      if (!check_bool_pattern (var, loop_vinfo, bb_vinfo))
+      if (!check_bool_pattern (var, vinfo))
 	return NULL;
 
       rhs = adjust_bool_pattern (var, type, NULL_TREE, stmts);
@@ -3334,7 +3317,7 @@  vect_recog_bool_pattern (vec<gimple *> *
       gcc_assert (vectype != NULL_TREE);
       if (!VECTOR_MODE_P (TYPE_MODE (vectype)))
 	return NULL;
-      if (!check_bool_pattern (var, loop_vinfo, bb_vinfo))
+      if (!check_bool_pattern (var, vinfo))
 	return NULL;
 
       rhs = adjust_bool_pattern (var, TREE_TYPE (vectype), NULL_TREE, stmts);
@@ -3347,8 +3330,7 @@  vect_recog_bool_pattern (vec<gimple *> *
 	  rhs = rhs2;
 	}
       pattern_stmt = gimple_build_assign (lhs, SSA_NAME, rhs);
-      pattern_stmt_info = new_stmt_vec_info (pattern_stmt, loop_vinfo,
-						bb_vinfo);
+      pattern_stmt_info = new_stmt_vec_info (pattern_stmt, vinfo);
       set_vinfo_for_stmt (pattern_stmt, pattern_stmt_info);
       STMT_VINFO_DATA_REF (pattern_stmt_info)
 	= STMT_VINFO_DATA_REF (stmt_vinfo);
@@ -3382,15 +3364,13 @@  vect_mark_pattern_stmts (gimple *orig_st
 {
   stmt_vec_info pattern_stmt_info, def_stmt_info;
   stmt_vec_info orig_stmt_info = vinfo_for_stmt (orig_stmt);
-  loop_vec_info loop_vinfo = STMT_VINFO_LOOP_VINFO (orig_stmt_info);
-  bb_vec_info bb_vinfo = STMT_VINFO_BB_VINFO (orig_stmt_info);
+  vec_info *vinfo = orig_stmt_info->vinfo;
   gimple *def_stmt;
 
   pattern_stmt_info = vinfo_for_stmt (pattern_stmt);
   if (pattern_stmt_info == NULL)
     {
-      pattern_stmt_info = new_stmt_vec_info (pattern_stmt, loop_vinfo,
-						bb_vinfo);
+      pattern_stmt_info = new_stmt_vec_info (pattern_stmt, vinfo);
       set_vinfo_for_stmt (pattern_stmt, pattern_stmt_info);
     }
   gimple_set_bb (pattern_stmt, gimple_bb (orig_stmt));
@@ -3413,8 +3393,7 @@  vect_mark_pattern_stmts (gimple *orig_st
 	  def_stmt_info = vinfo_for_stmt (def_stmt);
 	  if (def_stmt_info == NULL)
 	    {
-	      def_stmt_info = new_stmt_vec_info (def_stmt, loop_vinfo,
-						 bb_vinfo);
+	      def_stmt_info = new_stmt_vec_info (def_stmt, vinfo);
 	      set_vinfo_for_stmt (def_stmt, def_stmt_info);
 	    }
 	  gimple_set_bb (def_stmt, gimple_bb (orig_stmt));
@@ -3629,7 +3608,7 @@  vect_pattern_recog_1 (vect_recog_func_pt
    be recorded in S3.  */
 
 void
-vect_pattern_recog (loop_vec_info loop_vinfo, bb_vec_info bb_vinfo)
+vect_pattern_recog (vec_info *vinfo)
 {
   struct loop *loop;
   basic_block *bbs;
@@ -3644,7 +3623,7 @@  vect_pattern_recog (loop_vec_info loop_v
     dump_printf_loc (MSG_NOTE, vect_location,
                      "=== vect_pattern_recog ===\n");
 
-  if (loop_vinfo)
+  if (loop_vec_info loop_vinfo = dyn_cast <loop_vec_info> (vinfo))
     {
       loop = LOOP_VINFO_LOOP (loop_vinfo);
       bbs = LOOP_VINFO_BBS (loop_vinfo);
@@ -3652,7 +3631,7 @@  vect_pattern_recog (loop_vec_info loop_v
     }
   else
     {
-      bbs = &BB_VINFO_BB (bb_vinfo);
+      bbs = &as_a <bb_vec_info> (vinfo)->bb;
       nbbs = 1;
     }
 
@@ -3663,7 +3642,8 @@  vect_pattern_recog (loop_vec_info loop_v
       basic_block bb = bbs[i];
       for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
         {
-	  if (bb_vinfo && (stmt = gsi_stmt (si))
+	  if (is_a <bb_vec_info> (vinfo)
+	      && (stmt = gsi_stmt (si))
 	      && vinfo_for_stmt (stmt)
 	      && !STMT_VINFO_VECTORIZABLE (vinfo_for_stmt (stmt)))
 	   continue;
Index: gcc/tree-vect-slp.c
===================================================================
--- gcc/tree-vect-slp.c	(revision 228482)
+++ gcc/tree-vect-slp.c	(working copy)
@@ -211,7 +211,7 @@  vect_get_place_in_interleaving_chain (gi
    operation return 1, if everything is ok return 0.  */
 
 static int 
-vect_get_and_check_slp_defs (loop_vec_info loop_vinfo, bb_vec_info bb_vinfo,
+vect_get_and_check_slp_defs (vec_info *vinfo,
 			     gimple *stmt, unsigned stmt_num,
                              vec<slp_oprnd_info> *oprnds_info)
 {
@@ -229,8 +229,8 @@  vect_get_and_check_slp_defs (loop_vec_in
   bool first = stmt_num == 0;
   bool second = stmt_num == 1;
 
-  if (loop_vinfo)
-    loop = LOOP_VINFO_LOOP (loop_vinfo);
+  if (is_a <loop_vec_info> (vinfo))
+    loop = LOOP_VINFO_LOOP (as_a <loop_vec_info> (vinfo));
 
   if (is_gimple_call (stmt))
     {
@@ -270,7 +270,7 @@  again:
 
       oprnd_info = (*oprnds_info)[i];
 
-      if (!vect_is_simple_use (oprnd, NULL, loop_vinfo, bb_vinfo, &def_stmt,
+      if (!vect_is_simple_use (oprnd, NULL, vinfo, &def_stmt,
 			       &def, &dt))
 	{
 	  if (dump_enabled_p ())
@@ -288,8 +288,10 @@  again:
          from the pattern.  Check that all the stmts of the node are in the
          pattern.  */
       if (def_stmt && gimple_bb (def_stmt)
-          && ((loop && flow_bb_inside_loop_p (loop, gimple_bb (def_stmt)))
-	      || (!loop && gimple_bb (def_stmt) == BB_VINFO_BB (bb_vinfo)
+          && ((is_a <loop_vec_info> (vinfo)
+	       && flow_bb_inside_loop_p (loop, gimple_bb (def_stmt)))
+	      || (is_a <bb_vec_info> (vinfo)
+		  && gimple_bb (def_stmt) == as_a <bb_vec_info> (vinfo)->bb
 		  && gimple_code (def_stmt) != GIMPLE_PHI))
           && vinfo_for_stmt (def_stmt)
           && STMT_VINFO_IN_PATTERN_P (vinfo_for_stmt (def_stmt))
@@ -448,7 +450,7 @@  again:
    carried out or the stmts will never be vectorized by SLP.  */
 
 static bool
-vect_build_slp_tree_1 (loop_vec_info loop_vinfo, bb_vec_info bb_vinfo,
+vect_build_slp_tree_1 (vec_info *vinfo,
 		       vec<gimple *> stmts, unsigned int group_size,
 		       unsigned nops, unsigned int *max_nunits,
 		       unsigned int vectorization_factor, bool *matches,
@@ -551,7 +553,7 @@  vect_build_slp_tree_1 (loop_vec_info loo
 
       /* If populating the vector type requires unrolling then fail
          before adjusting *max_nunits for basic-block vectorization.  */
-      if (bb_vinfo
+      if (is_a <bb_vec_info> (vinfo)
 	  && TYPE_VECTOR_SUBPARTS (vectype) > group_size)
 	{
 	  dump_printf_loc (MSG_MISSED_OPTIMIZATION, vect_location, 
@@ -566,7 +568,7 @@  vect_build_slp_tree_1 (loop_vec_info loo
       if (*max_nunits < TYPE_VECTOR_SUBPARTS (vectype))
         {
           *max_nunits = TYPE_VECTOR_SUBPARTS (vectype);
-          if (bb_vinfo)
+          if (is_a <bb_vec_info> (vinfo))
             vectorization_factor = *max_nunits;
         }
 
@@ -745,7 +747,7 @@  vect_build_slp_tree_1 (loop_vec_info loo
                  greater than the SLP group size.  */
 	      unsigned ncopies
 		= vectorization_factor / TYPE_VECTOR_SUBPARTS (vectype);
-              if (loop_vinfo
+              if (is_a <loop_vec_info> (vinfo)
 		  && GROUP_FIRST_ELEMENT (vinfo_for_stmt (stmt)) == stmt
                   && ((GROUP_SIZE (vinfo_for_stmt (stmt))
 		       - GROUP_GAP (vinfo_for_stmt (stmt)))
@@ -907,7 +909,7 @@  vect_build_slp_tree_1 (loop_vec_info loo
    was found.  */
 
 static bool
-vect_build_slp_tree (loop_vec_info loop_vinfo, bb_vec_info bb_vinfo,
+vect_build_slp_tree (vec_info *vinfo,
                      slp_tree *node, unsigned int group_size,
                      unsigned int *max_nunits,
                      vec<slp_tree> *loads,
@@ -933,7 +935,7 @@  vect_build_slp_tree (loop_vec_info loop_
     return false;
 
   bool two_operators = false;
-  if (!vect_build_slp_tree_1 (loop_vinfo, bb_vinfo,
+  if (!vect_build_slp_tree_1 (vinfo,
 			      SLP_TREE_SCALAR_STMTS (*node), group_size, nops,
 			      max_nunits, vectorization_factor, matches,
 			      &two_operators))
@@ -953,8 +955,7 @@  vect_build_slp_tree (loop_vec_info loop_
   slp_oprnd_info oprnd_info;
   FOR_EACH_VEC_ELT (SLP_TREE_SCALAR_STMTS (*node), i, stmt)
     {
-      switch (vect_get_and_check_slp_defs (loop_vinfo, bb_vinfo,
-					   stmt, i, &oprnds_info))
+      switch (vect_get_and_check_slp_defs (vinfo, stmt, i, &oprnds_info))
 	{
 	case 0:
 	  break;
@@ -999,7 +1000,7 @@  vect_build_slp_tree (loop_vec_info loop_
 	  return false;
 	}
 
-      if (vect_build_slp_tree (loop_vinfo, bb_vinfo, &child,
+      if (vect_build_slp_tree (vinfo, &child,
 			       group_size, max_nunits, loads,
 			       vectorization_factor, matches,
 			       npermutes, &this_tree_size, max_tree_size))
@@ -1046,7 +1047,7 @@  vect_build_slp_tree (loop_vec_info loop_
 	 fails (or we don't try permutation below).  Ideally we'd
 	 even compute a permutation that will end up with the maximum
 	 SLP tree size...  */
-      if (bb_vinfo
+      if (is_a <bb_vec_info> (vinfo)
 	  && !matches[0]
 	  /* ???  Rejecting patterns this way doesn't work.  We'd have to
 	     do extra work to cancel the pattern so the uses see the
@@ -1113,7 +1114,7 @@  vect_build_slp_tree (loop_vec_info loop_
 	  dump_printf (MSG_NOTE, "\n");
 	  /* And try again with scratch 'matches' ... */
 	  bool *tem = XALLOCAVEC (bool, group_size);
-	  if (vect_build_slp_tree (loop_vinfo, bb_vinfo, &child,
+	  if (vect_build_slp_tree (vinfo, &child,
 				   group_size, max_nunits, loads,
 				   vectorization_factor,
 				   tem, npermutes, &this_tree_size,
@@ -1542,9 +1543,7 @@  vect_analyze_slp_cost_1 (slp_instance in
       enum vect_def_type dt;
       if (!op || op == lhs)
 	continue;
-      if (vect_is_simple_use (op, NULL, STMT_VINFO_LOOP_VINFO (stmt_info),
-			      STMT_VINFO_BB_VINFO (stmt_info),
-			      &def_stmt, &def, &dt))
+      if (vect_is_simple_use (op, NULL, stmt_info->vinfo, &def_stmt, &def, &dt))
 	{
 	  /* Without looking at the actual initializer a vector of
 	     constants can be implemented as load from the constant pool.
@@ -1632,7 +1631,7 @@  vect_analyze_slp_cost (slp_instance inst
    Return FALSE if it's impossible to SLP any stmt in the loop.  */
 
 static bool
-vect_analyze_slp_instance (loop_vec_info loop_vinfo, bb_vec_info bb_vinfo,
+vect_analyze_slp_instance (vec_info *vinfo,
 			   gimple *stmt, unsigned max_tree_size)
 {
   slp_instance new_instance;
@@ -1657,7 +1656,7 @@  vect_analyze_slp_instance (loop_vec_info
         }
       else
         {
-          gcc_assert (loop_vinfo);
+          gcc_assert (is_a <loop_vec_info> (vinfo));
           vectype = STMT_VINFO_VECTYPE (vinfo_for_stmt (stmt));
         }
 
@@ -1665,9 +1664,9 @@  vect_analyze_slp_instance (loop_vec_info
     }
   else
     {
-      gcc_assert (loop_vinfo);
+      gcc_assert (is_a <loop_vec_info> (vinfo));
       vectype = STMT_VINFO_VECTYPE (vinfo_for_stmt (stmt));
-      group_size = LOOP_VINFO_REDUCTIONS (loop_vinfo).length ();
+      group_size = as_a <loop_vec_info> (vinfo)->reductions.length ();
     }
 
   if (!vectype)
@@ -1684,14 +1683,14 @@  vect_analyze_slp_instance (loop_vec_info
     }
 
   nunits = TYPE_VECTOR_SUBPARTS (vectype);
-  if (loop_vinfo)
-    vectorization_factor = LOOP_VINFO_VECT_FACTOR (loop_vinfo);
+  if (is_a <loop_vec_info> (vinfo))
+    vectorization_factor = as_a <loop_vec_info> (vinfo)->vectorization_factor;
   else
     vectorization_factor = nunits;
 
   /* Calculate the unrolling factor.  */
   unrolling_factor = least_common_multiple (nunits, group_size) / group_size;
-  if (unrolling_factor != 1 && !loop_vinfo)
+  if (unrolling_factor != 1 && is_a <bb_vec_info> (vinfo))
     {
       if (dump_enabled_p ())
         dump_printf_loc (MSG_MISSED_OPTIMIZATION, vect_location,
@@ -1726,7 +1725,7 @@  vect_analyze_slp_instance (loop_vec_info
   else
     {
       /* Collect reduction statements.  */
-      vec<gimple *> reductions = LOOP_VINFO_REDUCTIONS (loop_vinfo);
+      vec<gimple *> reductions = as_a <loop_vec_info> (vinfo)->reductions;
       for (i = 0; reductions.iterate (i, &next); i++)
 	scalar_stmts.safe_push (next);
     }
@@ -1738,7 +1737,7 @@  vect_analyze_slp_instance (loop_vec_info
   /* Build the tree for the SLP instance.  */
   bool *matches = XALLOCAVEC (bool, group_size);
   unsigned npermutes = 0;
-  if (vect_build_slp_tree (loop_vinfo, bb_vinfo, &node, group_size,
+  if (vect_build_slp_tree (vinfo, &node, group_size,
 			   &max_nunits, &loads,
 			   vectorization_factor, matches, &npermutes, NULL,
 			   max_tree_size))
@@ -1748,7 +1747,7 @@  vect_analyze_slp_instance (loop_vec_info
         unrolling_factor = least_common_multiple (max_nunits, group_size)
                            / group_size;
 
-      if (unrolling_factor != 1 && !loop_vinfo)
+      if (unrolling_factor != 1 && is_a <bb_vec_info> (vinfo))
         {
           if (dump_enabled_p ())
             dump_printf_loc (MSG_MISSED_OPTIMIZATION, vect_location,
@@ -1819,11 +1818,7 @@  vect_analyze_slp_instance (loop_vec_info
             }
         }
 
-
-      if (loop_vinfo)
-	LOOP_VINFO_SLP_INSTANCES (loop_vinfo).safe_push (new_instance);
-      else
-        BB_VINFO_SLP_INSTANCES (bb_vinfo).safe_push (new_instance);
+      vinfo->slp_instances.safe_push (new_instance);
 
       if (dump_enabled_p ())
 	vect_print_slp_tree (MSG_NOTE, node);
@@ -1844,54 +1839,43 @@  vect_analyze_slp_instance (loop_vec_info
    trees of packed scalar stmts if SLP is possible.  */
 
 bool
-vect_analyze_slp (loop_vec_info loop_vinfo, bb_vec_info bb_vinfo,
-		  unsigned max_tree_size)
+vect_analyze_slp (vec_info *vinfo, unsigned max_tree_size)
 {
   unsigned int i;
-  vec<gimple *> grouped_stores;
-  vec<gimple *> reductions = vNULL;
-  vec<gimple *> reduc_chains = vNULL;
   gimple *first_element;
   bool ok = false;
 
   if (dump_enabled_p ())
     dump_printf_loc (MSG_NOTE, vect_location, "=== vect_analyze_slp ===\n");
 
-  if (loop_vinfo)
-    {
-      grouped_stores = LOOP_VINFO_GROUPED_STORES (loop_vinfo);
-      reduc_chains = LOOP_VINFO_REDUCTION_CHAINS (loop_vinfo);
-      reductions = LOOP_VINFO_REDUCTIONS (loop_vinfo);
-    }
-  else
-    grouped_stores = BB_VINFO_GROUPED_STORES (bb_vinfo);
-
   /* Find SLP sequences starting from groups of grouped stores.  */
-  FOR_EACH_VEC_ELT (grouped_stores, i, first_element)
-    if (vect_analyze_slp_instance (loop_vinfo, bb_vinfo, first_element,
-				   max_tree_size))
+  FOR_EACH_VEC_ELT (vinfo->grouped_stores, i, first_element)
+    if (vect_analyze_slp_instance (vinfo, first_element, max_tree_size))
       ok = true;
 
-  if (reduc_chains.length () > 0)
+  if (loop_vec_info loop_vinfo = dyn_cast <loop_vec_info> (vinfo))
     {
-      /* Find SLP sequences starting from reduction chains.  */
-      FOR_EACH_VEC_ELT (reduc_chains, i, first_element)
-        if (vect_analyze_slp_instance (loop_vinfo, bb_vinfo, first_element,
-				       max_tree_size))
-          ok = true;
-        else
-          return false;
+      if (loop_vinfo->reduction_chains.length () > 0)
+	{
+	  /* Find SLP sequences starting from reduction chains.  */
+	  FOR_EACH_VEC_ELT (loop_vinfo->reduction_chains, i, first_element)
+	      if (vect_analyze_slp_instance (vinfo, first_element,
+					     max_tree_size))
+		ok = true;
+	      else
+		return false;
 
-      /* Don't try to vectorize SLP reductions if reduction chain was
-         detected.  */
-      return ok;
-    }
+	  /* Don't try to vectorize SLP reductions if reduction chain was
+	     detected.  */
+	  return ok;
+	}
 
-  /* Find SLP sequences starting from groups of reductions.  */
-  if (reductions.length () > 1
-      && vect_analyze_slp_instance (loop_vinfo, bb_vinfo, reductions[0],
-				    max_tree_size))
-    ok = true;
+      /* Find SLP sequences starting from groups of reductions.  */
+      if (loop_vinfo->reductions.length () > 1
+	  && vect_analyze_slp_instance (vinfo, loop_vinfo->reductions[0],
+					max_tree_size))
+	ok = true;
+    }
 
   return true;
 }
@@ -2110,13 +2094,14 @@  new_bb_vec_info (basic_block bb)
   gimple_stmt_iterator gsi;
 
   res = (bb_vec_info) xcalloc (1, sizeof (struct _bb_vec_info));
+  res->kind = vec_info::bb;
   BB_VINFO_BB (res) = bb;
 
   for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
     {
       gimple *stmt = gsi_stmt (gsi);
       gimple_set_uid (stmt, 0);
-      set_vinfo_for_stmt (stmt, new_stmt_vec_info (stmt, NULL, res));
+      set_vinfo_for_stmt (stmt, new_stmt_vec_info (stmt, res));
     }
 
   BB_VINFO_GROUPED_STORES (res).create (10);
@@ -2155,7 +2140,7 @@  destroy_bb_vec_info (bb_vec_info bb_vinf
         free_stmt_vec_info (stmt);
     }
 
-  vect_destroy_datarefs (NULL, bb_vinfo);
+  vect_destroy_datarefs (bb_vinfo);
   free_dependence_relations (BB_VINFO_DDRS (bb_vinfo));
   BB_VINFO_GROUPED_STORES (bb_vinfo).release ();
   slp_instances = BB_VINFO_SLP_INSTANCES (bb_vinfo);
@@ -2367,7 +2352,7 @@  vect_slp_analyze_bb_1 (basic_block bb)
   if (!bb_vinfo)
     return NULL;
 
-  if (!vect_analyze_data_refs (NULL, bb_vinfo, &min_vf, &n_stmts))
+  if (!vect_analyze_data_refs (bb_vinfo, &min_vf, &n_stmts))
     {
       if (dump_enabled_p ())
         dump_printf_loc (MSG_MISSED_OPTIMIZATION, vect_location,
@@ -2389,7 +2374,7 @@  vect_slp_analyze_bb_1 (basic_block bb)
       return NULL;
     }
 
-  if (!vect_analyze_data_ref_accesses (NULL, bb_vinfo))
+  if (!vect_analyze_data_ref_accesses (bb_vinfo))
     {
      if (dump_enabled_p ())
        dump_printf_loc (MSG_MISSED_OPTIMIZATION, vect_location,
@@ -2400,9 +2385,9 @@  vect_slp_analyze_bb_1 (basic_block bb)
       return NULL;
     }
 
-  vect_pattern_recog (NULL, bb_vinfo);
+  vect_pattern_recog (bb_vinfo);
 
-  if (!vect_analyze_data_refs_alignment (NULL, bb_vinfo))
+  if (!vect_analyze_data_refs_alignment (bb_vinfo))
     {
       if (dump_enabled_p ())
         dump_printf_loc (MSG_MISSED_OPTIMIZATION, vect_location,
@@ -2415,7 +2400,7 @@  vect_slp_analyze_bb_1 (basic_block bb)
 
   /* Check the SLP opportunities in the basic block, analyze and build SLP
      trees.  */
-  if (!vect_analyze_slp (NULL, bb_vinfo, n_stmts))
+  if (!vect_analyze_slp (bb_vinfo, n_stmts))
     {
       if (dump_enabled_p ())
 	{
@@ -2463,7 +2448,7 @@  vect_slp_analyze_bb_1 (basic_block bb)
        return NULL;
      }
 
-  if (!vect_verify_datarefs_alignment (NULL, bb_vinfo))
+  if (!vect_verify_datarefs_alignment (bb_vinfo))
     {
       if (dump_enabled_p ())
         dump_printf_loc (MSG_MISSED_OPTIMIZATION, vect_location,
@@ -3480,23 +3465,18 @@  vect_remove_slp_scalar_calls (slp_tree n
 /* Generate vector code for all SLP instances in the loop/basic block.  */
 
 bool
-vect_schedule_slp (loop_vec_info loop_vinfo, bb_vec_info bb_vinfo)
+vect_schedule_slp (vec_info *vinfo)
 {
   vec<slp_instance> slp_instances;
   slp_instance instance;
   unsigned int i, vf;
   bool is_store = false;
 
-  if (loop_vinfo)
-    {
-      slp_instances = LOOP_VINFO_SLP_INSTANCES (loop_vinfo);
-      vf = LOOP_VINFO_VECT_FACTOR (loop_vinfo);
-    }
+  slp_instances = vinfo->slp_instances;
+  if (is_a <loop_vec_info> (vinfo))
+    vf = as_a <loop_vec_info> (vinfo)->vectorization_factor;
   else
-    {
-      slp_instances = BB_VINFO_SLP_INSTANCES (bb_vinfo);
-      vf = 1;
-    }
+    vf = 1;
 
   FOR_EACH_VEC_ELT (slp_instances, i, instance)
     {
@@ -3522,7 +3502,7 @@  vect_schedule_slp (loop_vec_info loop_vi
 	 ???  For BB vectorization we can as well remove scalar
 	 stmts starting from the SLP tree root if they have no
 	 uses.  */
-      if (loop_vinfo)
+      if (is_a <loop_vec_info> (vinfo))
 	vect_remove_slp_scalar_calls (root);
 
       for (j = 0; SLP_TREE_SCALAR_STMTS (root).iterate (j, &store)
@@ -3578,7 +3558,7 @@  vect_slp_transform_bb (basic_block bb)
       /* Schedule all the SLP instances when the first SLP stmt is reached.  */
       if (STMT_SLP_TYPE (stmt_info))
         {
-          vect_schedule_slp (NULL, bb_vinfo);
+          vect_schedule_slp (bb_vinfo);
           break;
         }
     }
Index: gcc/tree-vect-stmts.c
===================================================================
--- gcc/tree-vect-stmts.c	(revision 228482)
+++ gcc/tree-vect-stmts.c	(working copy)
@@ -102,19 +102,8 @@  record_stmt_cost (stmt_vector_for_cost *
 	 
     }
   else
-    {
-      loop_vec_info loop_vinfo = STMT_VINFO_LOOP_VINFO (stmt_info);
-      bb_vec_info bb_vinfo = STMT_VINFO_BB_VINFO (stmt_info);
-      void *target_cost_data;
-
-      if (loop_vinfo)
-	target_cost_data = LOOP_VINFO_TARGET_COST_DATA (loop_vinfo);
-      else
-	target_cost_data = BB_VINFO_TARGET_COST_DATA (bb_vinfo);
-
-      return add_stmt_cost (target_cost_data, count, kind, stmt_info,
-			    misalign, where);
-    }
+    return add_stmt_cost (stmt_info->vinfo->target_cost_data,
+			  count, kind, stmt_info, misalign, where);
 }
 
 /* Return a variable of type ELEM_TYPE[NELEMS].  */
@@ -470,7 +459,7 @@  process_use (gimple *stmt, tree use, loo
   if (!force && !exist_non_indexing_operands_for_use_p (use, stmt))
      return true;
 
-  if (!vect_is_simple_use (use, stmt, loop_vinfo, NULL, &def_stmt, &def, &dt))
+  if (!vect_is_simple_use (use, stmt, loop_vinfo, &def_stmt, &def, &dt))
     {
       if (dump_enabled_p ())
         dump_printf_loc (MSG_MISSED_OPTIMIZATION, vect_location,
@@ -1375,7 +1364,7 @@  vect_get_vec_def_for_operand (tree op, g
       dump_printf (MSG_NOTE, "\n");
     }
 
-  is_simple_use = vect_is_simple_use (op, stmt, loop_vinfo, NULL,
+  is_simple_use = vect_is_simple_use (op, stmt, loop_vinfo,
 				      &def_stmt, &def, &dt);
   gcc_assert (is_simple_use);
   if (dump_enabled_p ())
@@ -1651,8 +1640,7 @@  vect_finish_stmt_generation (gimple *stm
 			     gimple_stmt_iterator *gsi)
 {
   stmt_vec_info stmt_info = vinfo_for_stmt (stmt);
-  loop_vec_info loop_vinfo = STMT_VINFO_LOOP_VINFO (stmt_info);
-  bb_vec_info bb_vinfo = STMT_VINFO_BB_VINFO (stmt_info);
+  vec_info *vinfo = stmt_info->vinfo;
 
   gcc_assert (gimple_code (stmt) != GIMPLE_LABEL);
 
@@ -1684,8 +1672,7 @@  vect_finish_stmt_generation (gimple *stm
     }
   gsi_insert_before (gsi, vec_stmt, GSI_SAME_STMT);
 
-  set_vinfo_for_stmt (vec_stmt, new_stmt_vec_info (vec_stmt, loop_vinfo,
-                                                   bb_vinfo));
+  set_vinfo_for_stmt (vec_stmt, new_stmt_vec_info (vec_stmt, vinfo));
 
   if (dump_enabled_p ())
     {
@@ -1814,7 +1801,7 @@  vectorizable_mask_load_store (gimple *st
       gather_decl = vect_check_gather_scatter (stmt, loop_vinfo, &gather_base,
 				       &gather_off, &gather_scale);
       gcc_assert (gather_decl);
-      if (!vect_is_simple_use_1 (gather_off, NULL, loop_vinfo, NULL,
+      if (!vect_is_simple_use_1 (gather_off, NULL, loop_vinfo,
 				 &def_stmt, &def, &gather_dt,
 				 &gather_off_vectype))
 	{
@@ -1846,14 +1833,14 @@  vectorizable_mask_load_store (gimple *st
   if (TREE_CODE (mask) != SSA_NAME)
     return false;
 
-  if (!vect_is_simple_use (mask, stmt, loop_vinfo, NULL,
+  if (!vect_is_simple_use (mask, stmt, loop_vinfo,
 			   &def_stmt, &def, &dt))
     return false;
 
   if (is_store)
     {
       tree rhs = gimple_call_arg (stmt, 3);
-      if (!vect_is_simple_use (rhs, stmt, loop_vinfo, NULL,
+      if (!vect_is_simple_use (rhs, stmt, loop_vinfo,
 			       &def_stmt, &def, &dt))
 	return false;
     }
@@ -1971,7 +1958,7 @@  vectorizable_mask_load_store (gimple *st
 		vec_mask = vect_get_vec_def_for_operand (mask, stmt, NULL);
 	      else
 		{
-		  vect_is_simple_use (vec_mask, NULL, loop_vinfo, NULL,
+		  vect_is_simple_use (vec_mask, NULL, loop_vinfo,
 				      &def_stmt, &def, &dt);
 		  vec_mask = vect_get_vec_def_for_stmt_copy (dt, vec_mask);
 		}
@@ -2068,10 +2055,10 @@  vectorizable_mask_load_store (gimple *st
 	    }
 	  else
 	    {
-	      vect_is_simple_use (vec_rhs, NULL, loop_vinfo, NULL, &def_stmt,
+	      vect_is_simple_use (vec_rhs, NULL, loop_vinfo, &def_stmt,
 				  &def, &dt);
 	      vec_rhs = vect_get_vec_def_for_stmt_copy (dt, vec_rhs);
-	      vect_is_simple_use (vec_mask, NULL, loop_vinfo, NULL, &def_stmt,
+	      vect_is_simple_use (vec_mask, NULL, loop_vinfo, &def_stmt,
 				  &def, &dt);
 	      vec_mask = vect_get_vec_def_for_stmt_copy (dt, vec_mask);
 	      dataref_ptr = bump_vector_ptr (dataref_ptr, ptr_incr, gsi, stmt,
@@ -2121,7 +2108,7 @@  vectorizable_mask_load_store (gimple *st
 	    }
 	  else
 	    {
-	      vect_is_simple_use (vec_mask, NULL, loop_vinfo, NULL, &def_stmt,
+	      vect_is_simple_use (vec_mask, NULL, loop_vinfo, &def_stmt,
 				  &def, &dt);
 	      vec_mask = vect_get_vec_def_for_stmt_copy (dt, vec_mask);
 	      dataref_ptr = bump_vector_ptr (dataref_ptr, ptr_incr, gsi, stmt,
@@ -2192,6 +2179,7 @@  vectorizable_call (gimple *gs, gimple_st
   int nunits_out;
   loop_vec_info loop_vinfo = STMT_VINFO_LOOP_VINFO (stmt_info);
   bb_vec_info bb_vinfo = STMT_VINFO_BB_VINFO (stmt_info);
+  vec_info *vinfo = stmt_info->vinfo;
   tree fndecl, new_temp, def, rhs_type;
   gimple *def_stmt;
   enum vect_def_type dt[3]
@@ -2265,7 +2253,7 @@  vectorizable_call (gimple *gs, gimple_st
       if (!rhs_type)
 	rhs_type = TREE_TYPE (op);
 
-      if (!vect_is_simple_use_1 (op, stmt, loop_vinfo, bb_vinfo,
+      if (!vect_is_simple_use_1 (op, stmt, vinfo,
 				 &def_stmt, &def, &dt[i], &opvectype))
 	{
 	  if (dump_enabled_p ())
@@ -2715,6 +2703,7 @@  vectorizable_simd_clone_call (gimple *st
   unsigned int nunits;
   loop_vec_info loop_vinfo = STMT_VINFO_LOOP_VINFO (stmt_info);
   bb_vec_info bb_vinfo = STMT_VINFO_BB_VINFO (stmt_info);
+  vec_info *vinfo = stmt_info->vinfo;
   struct loop *loop = loop_vinfo ? LOOP_VINFO_LOOP (loop_vinfo) : NULL;
   tree fndecl, new_temp, def;
   gimple *def_stmt;
@@ -2779,7 +2768,7 @@  vectorizable_simd_clone_call (gimple *st
       thisarginfo.simd_lane_linear = false;
 
       op = gimple_call_arg (stmt, i);
-      if (!vect_is_simple_use_1 (op, stmt, loop_vinfo, bb_vinfo,
+      if (!vect_is_simple_use_1 (op, stmt, vinfo,
 				 &def_stmt, &def, &thisarginfo.dt,
 				 &thisarginfo.vectype)
 	  || thisarginfo.dt == vect_uninitialized_def)
@@ -3139,8 +3128,7 @@  vectorizable_simd_clone_call (gimple *st
 		  tree phi_res = copy_ssa_name (op);
 		  gphi *new_phi = create_phi_node (phi_res, loop->header);
 		  set_vinfo_for_stmt (new_phi,
-				      new_stmt_vec_info (new_phi, loop_vinfo,
-							 NULL));
+				      new_stmt_vec_info (new_phi, loop_vinfo));
 		  add_phi_arg (new_phi, arginfo[i].op,
 			       loop_preheader_edge (loop), UNKNOWN_LOCATION);
 		  enum tree_code code
@@ -3158,8 +3146,7 @@  vectorizable_simd_clone_call (gimple *st
 		  gimple_stmt_iterator si = gsi_after_labels (loop->header);
 		  gsi_insert_after (&si, new_stmt, GSI_NEW_STMT);
 		  set_vinfo_for_stmt (new_stmt,
-				      new_stmt_vec_info (new_stmt, loop_vinfo,
-							 NULL));
+				      new_stmt_vec_info (new_stmt, loop_vinfo));
 		  add_phi_arg (new_phi, phi_arg, loop_latch_edge (loop),
 			       UNKNOWN_LOCATION);
 		  arginfo[i].op = phi_res;
@@ -3580,6 +3567,7 @@  vectorizable_conversion (gimple *stmt, g
   vec<tree> vec_oprnds1 = vNULL;
   tree vop0;
   bb_vec_info bb_vinfo = STMT_VINFO_BB_VINFO (stmt_info);
+  vec_info *vinfo = stmt_info->vinfo;
   int multi_step_cvt = 0;
   vec<tree> vec_dsts = vNULL;
   vec<tree> interm_types = vNULL;
@@ -3642,7 +3630,7 @@  vectorizable_conversion (gimple *stmt, g
     }
 
   /* Check the operands of the operation.  */
-  if (!vect_is_simple_use_1 (op0, stmt, loop_vinfo, bb_vinfo,
+  if (!vect_is_simple_use_1 (op0, stmt, vinfo,
 			     &def_stmt, &def, &dt[0], &vectype_in))
     {
       if (dump_enabled_p ())
@@ -3659,10 +3647,10 @@  vectorizable_conversion (gimple *stmt, g
       /* For WIDEN_MULT_EXPR, if OP0 is a constant, use the type of
 	 OP1.  */
       if (CONSTANT_CLASS_P (op0))
-	ok = vect_is_simple_use_1 (op1, stmt, loop_vinfo, bb_vinfo,
+	ok = vect_is_simple_use_1 (op1, stmt, vinfo,
 				   &def_stmt, &def, &dt[1], &vectype_in);
       else
-	ok = vect_is_simple_use (op1, stmt, loop_vinfo, bb_vinfo, &def_stmt,
+	ok = vect_is_simple_use (op1, stmt, vinfo, &def_stmt,
 				 &def, &dt[1]);
 
       if (!ok)
@@ -4150,6 +4138,7 @@  vectorizable_assignment (gimple *stmt, g
   vec<tree> vec_oprnds = vNULL;
   tree vop;
   bb_vec_info bb_vinfo = STMT_VINFO_BB_VINFO (stmt_info);
+  vec_info *vinfo = stmt_info->vinfo;
   gimple *new_stmt = NULL;
   stmt_vec_info prev_stmt_info = NULL;
   enum tree_code code;
@@ -4193,7 +4182,7 @@  vectorizable_assignment (gimple *stmt, g
 
   gcc_assert (ncopies >= 1);
 
-  if (!vect_is_simple_use_1 (op, stmt, loop_vinfo, bb_vinfo,
+  if (!vect_is_simple_use_1 (op, stmt, vinfo,
 			     &def_stmt, &def, &dt[0], &vectype_in))
     {
       if (dump_enabled_p ())
@@ -4365,6 +4354,7 @@  vectorizable_shift (gimple *stmt, gimple
   unsigned int k;
   bool scalar_shift_arg = true;
   bb_vec_info bb_vinfo = STMT_VINFO_BB_VINFO (stmt_info);
+  vec_info *vinfo = stmt_info->vinfo;
   int vf;
 
   if (!STMT_VINFO_RELEVANT_P (stmt_info) && !bb_vinfo)
@@ -4398,7 +4388,7 @@  vectorizable_shift (gimple *stmt, gimple
     }
 
   op0 = gimple_assign_rhs1 (stmt);
-  if (!vect_is_simple_use_1 (op0, stmt, loop_vinfo, bb_vinfo,
+  if (!vect_is_simple_use_1 (op0, stmt, vinfo,
                              &def_stmt, &def, &dt[0], &vectype))
     {
       if (dump_enabled_p ())
@@ -4426,7 +4416,7 @@  vectorizable_shift (gimple *stmt, gimple
     return false;
 
   op1 = gimple_assign_rhs2 (stmt);
-  if (!vect_is_simple_use_1 (op1, stmt, loop_vinfo, bb_vinfo, &def_stmt,
+  if (!vect_is_simple_use_1 (op1, stmt, vinfo, &def_stmt,
 			     &def, &dt[1], &op1_vectype))
     {
       if (dump_enabled_p ())
@@ -4728,6 +4718,7 @@  vectorizable_operation (gimple *stmt, gi
   vec<tree> vec_oprnds2 = vNULL;
   tree vop0, vop1, vop2;
   bb_vec_info bb_vinfo = STMT_VINFO_BB_VINFO (stmt_info);
+  vec_info *vinfo = stmt_info->vinfo;
   int vf;
 
   if (!STMT_VINFO_RELEVANT_P (stmt_info) && !bb_vinfo)
@@ -4780,7 +4771,7 @@  vectorizable_operation (gimple *stmt, gi
     }
 
   op0 = gimple_assign_rhs1 (stmt);
-  if (!vect_is_simple_use_1 (op0, stmt, loop_vinfo, bb_vinfo,
+  if (!vect_is_simple_use_1 (op0, stmt, vinfo,
 			     &def_stmt, &def, &dt[0], &vectype))
     {
       if (dump_enabled_p ())
@@ -4816,7 +4807,7 @@  vectorizable_operation (gimple *stmt, gi
   if (op_type == binary_op || op_type == ternary_op)
     {
       op1 = gimple_assign_rhs2 (stmt);
-      if (!vect_is_simple_use (op1, stmt, loop_vinfo, bb_vinfo, &def_stmt,
+      if (!vect_is_simple_use (op1, stmt, vinfo, &def_stmt,
 			       &def, &dt[1]))
 	{
 	  if (dump_enabled_p ())
@@ -4828,7 +4819,7 @@  vectorizable_operation (gimple *stmt, gi
   if (op_type == ternary_op)
     {
       op2 = gimple_assign_rhs3 (stmt);
-      if (!vect_is_simple_use (op2, stmt, loop_vinfo, bb_vinfo, &def_stmt,
+      if (!vect_is_simple_use (op2, stmt, vinfo, &def_stmt,
 			       &def, &dt[2]))
 	{
 	  if (dump_enabled_p ())
@@ -5131,6 +5122,7 @@  vectorizable_store (gimple *stmt, gimple
   bool slp = (slp_node != NULL);
   unsigned int vec_num;
   bb_vec_info bb_vinfo = STMT_VINFO_BB_VINFO (stmt_info);
+  vec_info *vinfo = stmt_info->vinfo;
   tree aggr_type;
   tree scatter_base = NULL_TREE, scatter_off = NULL_TREE;
   tree scatter_off_vectype = NULL_TREE, scatter_decl = NULL_TREE;
@@ -5191,7 +5183,7 @@  vectorizable_store (gimple *stmt, gimple
     }
 
   op = gimple_assign_rhs1 (stmt);
-  if (!vect_is_simple_use (op, stmt, loop_vinfo, bb_vinfo, &def_stmt,
+  if (!vect_is_simple_use (op, stmt, vinfo, &def_stmt,
 			   &def, &dt))
     {
       if (dump_enabled_p ())
@@ -5282,7 +5274,7 @@  vectorizable_store (gimple *stmt, gimple
             {
 	      gcc_assert (gimple_assign_single_p (next_stmt));
 	      op = gimple_assign_rhs1 (next_stmt);
-              if (!vect_is_simple_use (op, next_stmt, loop_vinfo, bb_vinfo,
+              if (!vect_is_simple_use (op, next_stmt, vinfo,
 				       &def_stmt, &def, &dt))
                 {
                   if (dump_enabled_p ())
@@ -5302,7 +5294,7 @@  vectorizable_store (gimple *stmt, gimple
       scatter_decl = vect_check_gather_scatter (stmt, loop_vinfo, &scatter_base,
 						&scatter_off, &scatter_scale);
       gcc_assert (scatter_decl);
-      if (!vect_is_simple_use_1 (scatter_off, NULL, loop_vinfo, bb_vinfo,
+      if (!vect_is_simple_use_1 (scatter_off, NULL, vinfo,
 				 &def_stmt, &def, &scatter_idx_dt,
 				 &scatter_off_vectype))
 	{
@@ -5579,7 +5571,7 @@  vectorizable_store (gimple *stmt, gimple
 		 loop, &incr_gsi, insert_after,
 		 &offvar, NULL);
       incr = gsi_stmt (incr_gsi);
-      set_vinfo_for_stmt (incr, new_stmt_vec_info (incr, loop_vinfo, NULL));
+      set_vinfo_for_stmt (incr, new_stmt_vec_info (incr, loop_vinfo));
 
       stride_step = force_gimple_operand (stride_step, &stmts, true, NULL_TREE);
       if (stmts)
@@ -5628,8 +5620,8 @@  vectorizable_store (gimple *stmt, gimple
 		    vec_oprnd = vec_oprnds[j];
 		  else
 		    {
-		      vect_is_simple_use (vec_oprnd, NULL, loop_vinfo,
-					  bb_vinfo, &def_stmt, &def, &dt);
+		      vect_is_simple_use (vec_oprnd, NULL, vinfo,
+					  &def_stmt, &def, &dt);
 		      vec_oprnd = vect_get_vec_def_for_stmt_copy (dt, vec_oprnd);
 		    }
 		}
@@ -5818,7 +5810,7 @@  vectorizable_store (gimple *stmt, gimple
 	  for (i = 0; i < group_size; i++)
 	    {
 	      op = oprnds[i];
-	      vect_is_simple_use (op, NULL, loop_vinfo, bb_vinfo, &def_stmt,
+	      vect_is_simple_use (op, NULL, vinfo, &def_stmt,
 				  &def, &dt);
 	      vec_oprnd = vect_get_vec_def_for_stmt_copy (dt, op);
 	      dr_chain[i] = vec_oprnd;
@@ -6134,6 +6126,7 @@  vectorizable_load (gimple *stmt, gimple_
   tree gather_off_vectype = NULL_TREE, gather_decl = NULL_TREE;
   int gather_scale = 1;
   enum vect_def_type gather_dt = vect_unknown_def_type;
+  vec_info *vinfo = stmt_info->vinfo;
 
   if (!STMT_VINFO_RELEVANT_P (stmt_info) && !bb_vinfo)
     return false;
@@ -6296,7 +6289,7 @@  vectorizable_load (gimple *stmt, gimple_
       gather_decl = vect_check_gather_scatter (stmt, loop_vinfo, &gather_base,
 					       &gather_off, &gather_scale);
       gcc_assert (gather_decl);
-      if (!vect_is_simple_use_1 (gather_off, NULL, loop_vinfo, bb_vinfo,
+      if (!vect_is_simple_use_1 (gather_off, NULL, vinfo,
 				 &def_stmt, &def, &gather_dt,
 				 &gather_off_vectype))
 	{
@@ -6602,7 +6595,7 @@  vectorizable_load (gimple *stmt, gimple_
 		 loop, &incr_gsi, insert_after,
 		 &offvar, NULL);
       incr = gsi_stmt (incr_gsi);
-      set_vinfo_for_stmt (incr, new_stmt_vec_info (incr, loop_vinfo, NULL));
+      set_vinfo_for_stmt (incr, new_stmt_vec_info (incr, loop_vinfo));
 
       stride_step = force_gimple_operand (unshare_expr (stride_step),
 					  &stmts, true, NULL_TREE);
@@ -7146,8 +7139,7 @@  vectorizable_load (gimple *stmt, gimple_
 		    }
 		  new_stmt = SSA_NAME_DEF_STMT (new_temp);
 		  set_vinfo_for_stmt (new_stmt,
-				      new_stmt_vec_info (new_stmt, loop_vinfo,
-							 bb_vinfo));
+				      new_stmt_vec_info (new_stmt, vinfo));
 		}
 
 	      if (negative)
@@ -7229,8 +7221,8 @@  vectorizable_load (gimple *stmt, gimple_
    condition operands are supportable using vec_is_simple_use.  */
 
 static bool
-vect_is_simple_cond (tree cond, gimple *stmt, loop_vec_info loop_vinfo,
-		     bb_vec_info bb_vinfo, tree *comp_vectype)
+vect_is_simple_cond (tree cond, gimple *stmt, vec_info *vinfo,
+		     tree *comp_vectype)
 {
   tree lhs, rhs;
   tree def;
@@ -7246,7 +7238,7 @@  vect_is_simple_cond (tree cond, gimple *
   if (TREE_CODE (lhs) == SSA_NAME)
     {
       gimple *lhs_def_stmt = SSA_NAME_DEF_STMT (lhs);
-      if (!vect_is_simple_use_1 (lhs, stmt, loop_vinfo, bb_vinfo,
+      if (!vect_is_simple_use_1 (lhs, stmt, vinfo,
 				 &lhs_def_stmt, &def, &dt, &vectype1))
 	return false;
     }
@@ -7257,7 +7249,7 @@  vect_is_simple_cond (tree cond, gimple *
   if (TREE_CODE (rhs) == SSA_NAME)
     {
       gimple *rhs_def_stmt = SSA_NAME_DEF_STMT (rhs);
-      if (!vect_is_simple_use_1 (rhs, stmt, loop_vinfo, bb_vinfo,
+      if (!vect_is_simple_use_1 (rhs, stmt, vinfo,
 				 &rhs_def_stmt, &def, &dt, &vectype2))
 	return false;
     }
@@ -7355,15 +7347,14 @@  vectorizable_condition (gimple *stmt, gi
   then_clause = gimple_assign_rhs2 (stmt);
   else_clause = gimple_assign_rhs3 (stmt);
 
-  if (!vect_is_simple_cond (cond_expr, stmt, loop_vinfo, bb_vinfo,
-			    &comp_vectype)
+  if (!vect_is_simple_cond (cond_expr, stmt, stmt_info->vinfo, &comp_vectype)
       || !comp_vectype)
     return false;
 
   if (TREE_CODE (then_clause) == SSA_NAME)
     {
       gimple *then_def_stmt = SSA_NAME_DEF_STMT (then_clause);
-      if (!vect_is_simple_use (then_clause, stmt, loop_vinfo, bb_vinfo,
+      if (!vect_is_simple_use (then_clause, stmt, stmt_info->vinfo,
 			       &then_def_stmt, &def, &dt))
 	return false;
     }
@@ -7375,7 +7366,7 @@  vectorizable_condition (gimple *stmt, gi
   if (TREE_CODE (else_clause) == SSA_NAME)
     {
       gimple *else_def_stmt = SSA_NAME_DEF_STMT (else_clause);
-      if (!vect_is_simple_use (else_clause, stmt, loop_vinfo, bb_vinfo,
+      if (!vect_is_simple_use (else_clause, stmt, stmt_info->vinfo,
 			       &else_def_stmt, &def, &dt))
 	return false;
     }
@@ -7442,13 +7433,13 @@  vectorizable_condition (gimple *stmt, gi
 	      vect_get_vec_def_for_operand (TREE_OPERAND (cond_expr, 0),
 					    stmt, NULL);
 	      vect_is_simple_use (TREE_OPERAND (cond_expr, 0), stmt,
-				  loop_vinfo, NULL, &gtemp, &def, &dts[0]);
+				  loop_vinfo, &gtemp, &def, &dts[0]);
 
 	      vec_cond_rhs =
 		vect_get_vec_def_for_operand (TREE_OPERAND (cond_expr, 1),
 						stmt, NULL);
 	      vect_is_simple_use (TREE_OPERAND (cond_expr, 1), stmt,
-				  loop_vinfo, NULL, &gtemp, &def, &dts[1]);
+				  loop_vinfo, &gtemp, &def, &dts[1]);
 	      if (reduc_index == 1)
 		vec_then_clause = reduc_def;
 	      else
@@ -7456,7 +7447,7 @@  vectorizable_condition (gimple *stmt, gi
 		  vec_then_clause = vect_get_vec_def_for_operand (then_clause,
 		 		  			      stmt, NULL);
 	          vect_is_simple_use (then_clause, stmt, loop_vinfo,
-					  NULL, &gtemp, &def, &dts[2]);
+				      &gtemp, &def, &dts[2]);
 		}
 	      if (reduc_index == 2)
 		vec_else_clause = reduc_def;
@@ -7465,7 +7456,7 @@  vectorizable_condition (gimple *stmt, gi
 		  vec_else_clause = vect_get_vec_def_for_operand (else_clause,
 							      stmt, NULL);
 		  vect_is_simple_use (else_clause, stmt, loop_vinfo,
-				  NULL, &gtemp, &def, &dts[3]);
+				      &gtemp, &def, &dts[3]);
 		}
 	    }
 	}
@@ -7983,16 +7974,14 @@  vect_remove_stores (gimple *first_stmt)
    Create and initialize a new stmt_vec_info struct for STMT.  */
 
 stmt_vec_info
-new_stmt_vec_info (gimple *stmt, loop_vec_info loop_vinfo,
-                   bb_vec_info bb_vinfo)
+new_stmt_vec_info (gimple *stmt, vec_info *vinfo)
 {
   stmt_vec_info res;
   res = (stmt_vec_info) xcalloc (1, sizeof (struct _stmt_vec_info));
 
   STMT_VINFO_TYPE (res) = undef_vec_info_type;
   STMT_VINFO_STMT (res) = stmt;
-  STMT_VINFO_LOOP_VINFO (res) = loop_vinfo;
-  STMT_VINFO_BB_VINFO (res) = bb_vinfo;
+  res->vinfo = vinfo;
   STMT_VINFO_RELEVANT (res) = vect_unused_in_scope;
   STMT_VINFO_LIVE_P (res) = false;
   STMT_VINFO_VECTYPE (res) = NULL;
@@ -8222,9 +8211,8 @@  get_same_sized_vectype (tree scalar_type
    For now, operands defined outside the basic block are not supported.  */
 
 bool
-vect_is_simple_use (tree operand, gimple *stmt, loop_vec_info loop_vinfo,
-                    bb_vec_info bb_vinfo, gimple **def_stmt,
-		    tree *def, enum vect_def_type *dt)
+vect_is_simple_use (tree operand, gimple *stmt, vec_info *vinfo,
+                    gimple **def_stmt, tree *def, enum vect_def_type *dt)
 {
   *def_stmt = NULL;
   *def = NULL_TREE;
@@ -8274,15 +8262,16 @@  vect_is_simple_use (tree operand, gimple
     }
 
   basic_block bb = gimple_bb (*def_stmt);
-  if ((loop_vinfo && !flow_bb_inside_loop_p (LOOP_VINFO_LOOP (loop_vinfo), bb))
-      || (bb_vinfo
-	  && (bb != BB_VINFO_BB (bb_vinfo)
+  if ((is_a <loop_vec_info> (vinfo)
+       && !flow_bb_inside_loop_p (as_a <loop_vec_info> (vinfo)->loop, bb))
+      || (is_a <bb_vec_info> (vinfo)
+	  && (bb != as_a <bb_vec_info> (vinfo)->bb
 	      || gimple_code (*def_stmt) == GIMPLE_PHI)))
     *dt = vect_external_def;
   else
     {
       stmt_vec_info stmt_vinfo = vinfo_for_stmt (*def_stmt);
-      if (bb_vinfo && !STMT_VINFO_VECTORIZABLE (stmt_vinfo))
+      if (is_a <bb_vec_info> (vinfo) && !STMT_VINFO_VECTORIZABLE (stmt_vinfo))
 	*dt = vect_external_def;
       else
 	*dt = STMT_VINFO_DEF_TYPE (stmt_vinfo);
@@ -8369,12 +8358,11 @@  vect_is_simple_use (tree operand, gimple
    scalar operand.  */
 
 bool
-vect_is_simple_use_1 (tree operand, gimple *stmt, loop_vec_info loop_vinfo,
-		      bb_vec_info bb_vinfo, gimple **def_stmt,
+vect_is_simple_use_1 (tree operand, gimple *stmt, vec_info *vinfo,
+		      gimple **def_stmt,
 		      tree *def, enum vect_def_type *dt, tree *vectype)
 {
-  if (!vect_is_simple_use (operand, stmt, loop_vinfo, bb_vinfo, def_stmt,
-			   def, dt))
+  if (!vect_is_simple_use (operand, stmt, vinfo, def_stmt, def, dt))
     return false;
 
   /* Now get a vector type if the def is internal, otherwise supply
Index: gcc/tree-vectorizer.c
===================================================================
--- gcc/tree-vectorizer.c	(revision 228482)
+++ gcc/tree-vectorizer.c	(working copy)
@@ -329,25 +329,19 @@  shrink_simd_arrays
 /* A helper function to free data refs.  */
 
 void
-vect_destroy_datarefs (loop_vec_info loop_vinfo, bb_vec_info bb_vinfo)
+vect_destroy_datarefs (vec_info *vinfo)
 {
-  vec<data_reference_p> datarefs;
   struct data_reference *dr;
   unsigned int i;
 
- if (loop_vinfo)
-    datarefs = LOOP_VINFO_DATAREFS (loop_vinfo);
-  else
-    datarefs = BB_VINFO_DATAREFS (bb_vinfo);
-
-  FOR_EACH_VEC_ELT (datarefs, i, dr)
+  FOR_EACH_VEC_ELT (vinfo->datarefs, i, dr)
     if (dr->aux)
       {
         free (dr->aux);
         dr->aux = NULL;
       }
 
-  free_data_refs (datarefs);
+  free_data_refs (vinfo->datarefs);
 }
 
 
Index: gcc/tree-vectorizer.h
===================================================================
--- gcc/tree-vectorizer.h	(revision 228482)
+++ gcc/tree-vectorizer.h	(working copy)
@@ -230,11 +230,51 @@  peel_info_hasher::equal (const _vect_pee
   return (a->npeel == b->npeel);
 }
 
+/* Vectorizer state common between loop and basic-block vectorization.  */
+struct vec_info {
+  enum { bb, loop } kind;
+
+  /* All SLP instances.  */
+  vec<slp_instance> slp_instances;
+
+  /* All data references.  */
+  vec<data_reference_p> datarefs;
+
+  /* All data dependences.  */
+  vec<ddr_p> ddrs;
+
+  /* All interleaving chains of stores, represented by the first
+     stmt in the chain.  */
+  vec<gimple *> grouped_stores;
+
+  /* Cost data used by the target cost model.  */
+  void *target_cost_data;
+};
+
+struct _loop_vec_info;
+struct _bb_vec_info;
+
+template<>
+template<>
+inline bool
+is_a_helper <_loop_vec_info *>::test (vec_info *i)
+{
+  return i->kind == vec_info::loop;
+}
+
+template<>
+template<>
+inline bool
+is_a_helper <_bb_vec_info *>::test (vec_info *i)
+{
+  return i->kind == vec_info::bb;
+}
+
 
 /*-----------------------------------------------------------------*/
 /* Info on vectorized loops.                                       */
 /*-----------------------------------------------------------------*/
-typedef struct _loop_vec_info {
+typedef struct _loop_vec_info : public vec_info {
 
   /* The loop to which this info struct refers to.  */
   struct loop *loop;
@@ -286,12 +326,6 @@  typedef struct _loop_vec_info {
   /* The loop nest in which the data dependences are computed.  */
   vec<loop_p> loop_nest;
 
-  /* All data references in the loop.  */
-  vec<data_reference_p> datarefs;
-
-  /* All data dependences in the loop.  */
-  vec<ddr_p> ddrs;
-
   /* Data Dependence Relations defining address ranges that are candidates
      for a run-time aliasing check.  */
   vec<ddr_p> may_alias_ddrs;
@@ -304,14 +338,6 @@  typedef struct _loop_vec_info {
      runtime (loop versioning) misalignment check.  */
   vec<gimple *> may_misalign_stmts;
 
-  /* All interleaving chains of stores in the loop, represented by the first
-     stmt in the chain.  */
-  vec<gimple *> grouped_stores;
-
-  /* All SLP instances in the loop. This is a subset of the set of GROUP_STORES
-     of the loop.  */
-  vec<slp_instance> slp_instances;
-
   /* The unrolling factor needed to SLP the loop. In case of that pure SLP is
      applied to the loop, i.e., no unrolling is needed, this is 1.  */
   unsigned slp_unrolling_factor;
@@ -332,9 +358,6 @@  typedef struct _loop_vec_info {
   /* Cost of a single scalar iteration.  */
   int single_scalar_iteration_cost;
 
-  /* Cost data used by the target cost model.  */
-  void *target_cost_data;
-
   /* When we have grouped data accesses with gaps, we may introduce invalid
      memory accesses.  We peel the last iteration of the loop to prevent
      this.  */
@@ -434,26 +457,9 @@  nested_in_vect_loop_p (struct loop *loop
           && (loop->inner == (gimple_bb (stmt))->loop_father));
 }
 
-typedef struct _bb_vec_info {
-
+typedef struct _bb_vec_info : public vec_info
+{
   basic_block bb;
-  /* All interleaving chains of stores in the basic block, represented by the
-     first stmt in the chain.  */
-  vec<gimple *> grouped_stores;
-
-  /* All SLP instances in the basic block. This is a subset of the set of
-     GROUP_STORES of the basic block.  */
-  vec<slp_instance> slp_instances;
-
-  /* All data references in the basic block.  */
-  vec<data_reference_p> datarefs;
-
-  /* All data dependences in the basic block.  */
-  vec<ddr_p> ddrs;
-
-  /* Cost data used by the target cost model.  */
-  void *target_cost_data;
-
 } *bb_vec_info;
 
 #define BB_VINFO_BB(B)               (B)->bb
@@ -551,8 +557,8 @@  typedef struct _stmt_vec_info {
   /* The stmt to which this info struct refers to.  */
   gimple *stmt;
 
-  /* The loop_vec_info with respect to which STMT is vectorized.  */
-  loop_vec_info loop_vinfo;
+  /* The vec_info with respect to which STMT is vectorized.  */
+  vec_info *vinfo;
 
   /* The vector type to be used for the LHS of this statement.  */
   tree vectype;
@@ -639,9 +645,6 @@  typedef struct _stmt_vec_info {
      indicates whether the stmt needs to be vectorized.  */
   enum vect_relevant relevant;
 
-  /* The bb_vec_info with respect to which STMT is vectorized.  */
-  bb_vec_info bb_vinfo;
-
   /* Is this statement vectorizable or should it be skipped in (partial)
      vectorization.  */
   bool vectorizable;
@@ -659,8 +662,20 @@  typedef struct _stmt_vec_info {
 /* Access Functions.  */
 #define STMT_VINFO_TYPE(S)                 (S)->type
 #define STMT_VINFO_STMT(S)                 (S)->stmt
-#define STMT_VINFO_LOOP_VINFO(S)           (S)->loop_vinfo
-#define STMT_VINFO_BB_VINFO(S)             (S)->bb_vinfo
+inline loop_vec_info
+STMT_VINFO_LOOP_VINFO (stmt_vec_info stmt_vinfo)
+{
+  if (loop_vec_info loop_vinfo = dyn_cast <loop_vec_info> (stmt_vinfo->vinfo))
+    return loop_vinfo;
+  return NULL;
+}
+inline bb_vec_info
+STMT_VINFO_BB_VINFO (stmt_vec_info stmt_vinfo)
+{
+  if (bb_vec_info bb_vinfo = dyn_cast <bb_vec_info> (stmt_vinfo->vinfo))
+    return bb_vinfo;
+  return NULL;
+}
 #define STMT_VINFO_RELEVANT(S)             (S)->relevant
 #define STMT_VINFO_LIVE_P(S)               (S)->live
 #define STMT_VINFO_VECTYPE(S)              (S)->vectype
@@ -996,11 +1011,9 @@  extern bool vect_can_advance_ivs_p (loop
 extern unsigned int current_vector_size;
 extern tree get_vectype_for_scalar_type (tree);
 extern tree get_same_sized_vectype (tree, tree);
-extern bool vect_is_simple_use (tree, gimple *, loop_vec_info,
-				bb_vec_info, gimple **,
+extern bool vect_is_simple_use (tree, gimple *, vec_info *, gimple **,
                                 tree *,  enum vect_def_type *);
-extern bool vect_is_simple_use_1 (tree, gimple *, loop_vec_info,
-				  bb_vec_info, gimple **,
+extern bool vect_is_simple_use_1 (tree, gimple *, vec_info *, gimple **,
 				  tree *,  enum vect_def_type *, tree *);
 extern bool supportable_widening_operation (enum tree_code, gimple *, tree,
 					    tree, enum tree_code *,
@@ -1009,8 +1022,7 @@  extern bool supportable_widening_operati
 extern bool supportable_narrowing_operation (enum tree_code, tree, tree,
 					     enum tree_code *,
 					     int *, vec<tree> *);
-extern stmt_vec_info new_stmt_vec_info (gimple *stmt, loop_vec_info,
-                                        bb_vec_info);
+extern stmt_vec_info new_stmt_vec_info (gimple *stmt, vec_info *);
 extern void free_stmt_vec_info (gimple *stmt);
 extern tree vectorizable_function (gcall *, tree, tree);
 extern void vect_model_simple_cost (stmt_vec_info, int, enum vect_def_type *,
@@ -1060,14 +1072,13 @@  extern tree vect_get_smallest_scalar_typ
 extern bool vect_analyze_data_ref_dependences (loop_vec_info, int *);
 extern bool vect_slp_analyze_data_ref_dependences (bb_vec_info);
 extern bool vect_enhance_data_refs_alignment (loop_vec_info);
-extern bool vect_analyze_data_refs_alignment (loop_vec_info, bb_vec_info);
-extern bool vect_verify_datarefs_alignment (loop_vec_info, bb_vec_info);
-extern bool vect_analyze_data_ref_accesses (loop_vec_info, bb_vec_info);
+extern bool vect_analyze_data_refs_alignment (vec_info *);
+extern bool vect_verify_datarefs_alignment (vec_info *);
+extern bool vect_analyze_data_ref_accesses (vec_info *);
 extern bool vect_prune_runtime_alias_test_list (loop_vec_info);
 extern tree vect_check_gather_scatter (gimple *, loop_vec_info, tree *, tree *,
 				       int *);
-extern bool vect_analyze_data_refs (loop_vec_info, bb_vec_info, int *,
-				    unsigned *);
+extern bool vect_analyze_data_refs (vec_info *, int *, unsigned *);
 extern tree vect_create_data_ref_ptr (gimple *, tree, struct loop *, tree,
 				      tree *, gimple_stmt_iterator *,
 				      gimple **, bool, bool *,
@@ -1121,8 +1132,8 @@  extern bool vect_transform_slp_perm_load
                                           slp_instance, bool);
 extern bool vect_slp_analyze_operations (vec<slp_instance> slp_instances,
 					 void *);
-extern bool vect_schedule_slp (loop_vec_info, bb_vec_info);
-extern bool vect_analyze_slp (loop_vec_info, bb_vec_info, unsigned);
+extern bool vect_schedule_slp (vec_info *);
+extern bool vect_analyze_slp (vec_info *, unsigned);
 extern bool vect_make_slp_decision (loop_vec_info);
 extern void vect_detect_hybrid_slp (loop_vec_info);
 extern void vect_get_slp_defs (vec<tree> , slp_tree,
@@ -1138,10 +1149,10 @@  extern void vect_slp_transform_bb (basic
    in the future.  */
 typedef gimple *(* vect_recog_func_ptr) (vec<gimple *> *, tree *, tree *);
 #define NUM_PATTERNS 13
-void vect_pattern_recog (loop_vec_info, bb_vec_info);
+void vect_pattern_recog (vec_info *);
 
 /* In tree-vectorizer.c.  */
 unsigned vectorize_loops (void);
-void vect_destroy_datarefs (loop_vec_info, bb_vec_info);
+void vect_destroy_datarefs (vec_info *);
 
 #endif  /* GCC_TREE_VECTORIZER_H  */