diff mbox

[wide-int,5/8] Use LOG2_BITS_PER_UNIT

Message ID 87r44pdtfn.fsf@talisman.default
State New
Headers show

Commit Message

Richard Sandiford April 22, 2014, 8:02 p.m. UTC
Looks like a few uses of the old idiom:

  BITS_PER_UNIT == 8 ? 3 : exact_log2 (BITS_PER_UNIT)

have crept in.  This patch replaces them with LOG2_BITS_PER_UNIT.

Tested on x86_64-linux-gnu.  OK to install?

Thanks,
Richard

Comments

Kenneth Zadeck April 22, 2014, 8:31 p.m. UTC | #1
On 04/22/2014 04:02 PM, Richard Sandiford wrote:
> Looks like a few uses of the old idiom:
>
>    BITS_PER_UNIT == 8 ? 3 : exact_log2 (BITS_PER_UNIT)
I do not think that these crept in as much as they were never squished out.

> have crept in.  This patch replaces them with LOG2_BITS_PER_UNIT.
>
> Tested on x86_64-linux-gnu.  OK to install?
>
> Thanks,
> Richard
>
>
> Index: gcc/expr.c
> ===================================================================
> --- gcc/expr.c	2014-04-22 20:58:26.969683484 +0100
> +++ gcc/expr.c	2014-04-22 21:00:26.377614881 +0100
> @@ -6801,8 +6801,7 @@ get_inner_reference (tree exp, HOST_WIDE
>   	      if (!integer_zerop (off))
>   		{
>   		  offset_int boff, coff = mem_ref_offset (exp);
> -		  boff = wi::lshift (coff, (BITS_PER_UNIT == 8
> -					    ? 3 : exact_log2 (BITS_PER_UNIT)));
> +		  boff = wi::lshift (coff, LOG2_BITS_PER_UNIT);
>   		  bit_offset += boff;
>   		}
>   	      exp = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
> @@ -6828,8 +6827,7 @@ get_inner_reference (tree exp, HOST_WIDE
>       {
>         offset_int tem = wi::sext (wi::to_offset (offset),
>   				 TYPE_PRECISION (sizetype));
> -      tem = wi::lshift (tem, (BITS_PER_UNIT == 8
> -			      ? 3 : exact_log2 (BITS_PER_UNIT)));
> +      tem = wi::lshift (tem, LOG2_BITS_PER_UNIT);
>         tem += bit_offset;
>         if (wi::fits_shwi_p (tem))
>   	{
> @@ -6844,16 +6842,12 @@ get_inner_reference (tree exp, HOST_WIDE
>         /* Avoid returning a negative bitpos as this may wreak havoc later.  */
>         if (wi::neg_p (bit_offset))
>           {
> -	  offset_int mask
> -	    = wi::mask <offset_int> (BITS_PER_UNIT == 8
> -				     ? 3 : exact_log2 (BITS_PER_UNIT),
> -				     false);
> +	  offset_int mask = wi::mask <offset_int> (LOG2_BITS_PER_UNIT, false);
>   	  offset_int tem = bit_offset.and_not (mask);
>   	  /* TEM is the bitpos rounded to BITS_PER_UNIT towards -Inf.
>   	     Subtract it to BIT_OFFSET and add it (scaled) to OFFSET.  */
>   	  bit_offset -= tem;
> -	  tem = wi::arshift (tem, (BITS_PER_UNIT == 8
> -				   ? 3 : exact_log2 (BITS_PER_UNIT)));
> +	  tem = wi::arshift (tem, LOG2_BITS_PER_UNIT);
>   	  offset = size_binop (PLUS_EXPR, offset,
>   			       wide_int_to_tree (sizetype, tem));
>   	}
> Index: gcc/tree-dfa.c
> ===================================================================
> --- gcc/tree-dfa.c	2014-04-22 20:58:27.020683881 +0100
> +++ gcc/tree-dfa.c	2014-04-22 21:00:26.378614888 +0100
> @@ -463,10 +463,7 @@ get_ref_base_and_extent (tree exp, HOST_
>   			  {
>   			    offset_int tem = (wi::to_offset (ssize)
>   					      - wi::to_offset (fsize));
> -			    if (BITS_PER_UNIT == 8)
> -			      tem = wi::lshift (tem, 3);
> -			    else
> -			      tem *= BITS_PER_UNIT;
> +			    tem = wi::lshift (tem, LOG2_BITS_PER_UNIT);
>   			    tem -= woffset;
>   			    maxsize += tem;
>   			  }
> @@ -583,8 +580,7 @@ get_ref_base_and_extent (tree exp, HOST_
>   	      else
>   		{
>   		  offset_int off = mem_ref_offset (exp);
> -		  off = wi::lshift (off, (BITS_PER_UNIT == 8
> -					  ? 3 : exact_log2 (BITS_PER_UNIT)));
> +		  off = wi::lshift (off, LOG2_BITS_PER_UNIT);
>   		  off += bit_offset;
>   		  if (wi::fits_shwi_p (off))
>   		    {
> Index: gcc/tree-ssa-alias.c
> ===================================================================
> --- gcc/tree-ssa-alias.c	2014-04-22 20:58:26.969683484 +0100
> +++ gcc/tree-ssa-alias.c	2014-04-22 21:00:26.378614888 +0100
> @@ -1041,8 +1041,7 @@ indirect_ref_may_alias_decl_p (tree ref1
>     /* The offset embedded in MEM_REFs can be negative.  Bias them
>        so that the resulting offset adjustment is positive.  */
>     offset_int moff = mem_ref_offset (base1);
> -  moff = wi::lshift (moff, (BITS_PER_UNIT == 8
> -			    ? 3 : exact_log2 (BITS_PER_UNIT)));
> +  moff = wi::lshift (moff, LOG2_BITS_PER_UNIT);
>     if (wi::neg_p (moff))
>       offset2p += (-moff).to_short_addr ();
>     else
> @@ -1118,8 +1117,7 @@ indirect_ref_may_alias_decl_p (tree ref1
>         || TREE_CODE (dbase2) == TARGET_MEM_REF)
>       {
>         offset_int moff = mem_ref_offset (dbase2);
> -      moff = wi::lshift (moff, (BITS_PER_UNIT == 8
> -				? 3 : exact_log2 (BITS_PER_UNIT)));
> +      moff = wi::lshift (moff, LOG2_BITS_PER_UNIT);
>         if (wi::neg_p (moff))
>   	doffset1 -= (-moff).to_short_addr ();
>         else
> @@ -1217,15 +1215,13 @@ indirect_refs_may_alias_p (tree ref1 ATT
>         /* The offset embedded in MEM_REFs can be negative.  Bias them
>   	 so that the resulting offset adjustment is positive.  */
>         moff = mem_ref_offset (base1);
> -      moff = wi::lshift (moff, (BITS_PER_UNIT == 8
> -				? 3 : exact_log2 (BITS_PER_UNIT)));
> +      moff = wi::lshift (moff, LOG2_BITS_PER_UNIT);
>         if (wi::neg_p (moff))
>   	offset2 += (-moff).to_short_addr ();
>         else
>   	offset1 += moff.to_shwi ();
>         moff = mem_ref_offset (base2);
> -      moff = wi::lshift (moff, (BITS_PER_UNIT == 8
> -				? 3 : exact_log2 (BITS_PER_UNIT)));
> +      moff = wi::lshift (moff, LOG2_BITS_PER_UNIT);
>         if (wi::neg_p (moff))
>   	offset1 += (-moff).to_short_addr ();
>         else
> @@ -2200,12 +2196,10 @@ stmt_kills_ref_p_1 (gimple stmt, ao_ref
>   				       TREE_OPERAND (ref->base, 1)))
>   		{
>   		  offset_int off1 = mem_ref_offset (base);
> -		  off1 = wi::lshift (off1, (BITS_PER_UNIT == 8
> -					    ? 3 : exact_log2 (BITS_PER_UNIT)));
> +		  off1 = wi::lshift (off1, LOG2_BITS_PER_UNIT);
>   		  off1 += offset;
>   		  offset_int off2 = mem_ref_offset (ref->base);
> -		  off2 = wi::lshift (off2, (BITS_PER_UNIT == 8
> -					    ? 3 : exact_log2 (BITS_PER_UNIT)));
> +		  off2 = wi::lshift (off2, LOG2_BITS_PER_UNIT);
>   		  off2 += ref_offset;
>   		  if (wi::fits_shwi_p (off1) && wi::fits_shwi_p (off2))
>   		    {
> Index: gcc/tree-ssa-sccvn.c
> ===================================================================
> --- gcc/tree-ssa-sccvn.c	2014-04-22 20:58:26.969683484 +0100
> +++ gcc/tree-ssa-sccvn.c	2014-04-22 21:00:26.378614888 +0100
> @@ -817,8 +817,7 @@ copy_reference_ops_from_ref (tree ref, v
>   		    offset_int off
>   		      = (wi::to_offset (this_offset)
>   			 + wi::lrshift (wi::to_offset (bit_offset),
> -					BITS_PER_UNIT == 8
> -					? 3 : exact_log2 (BITS_PER_UNIT)));
> +					LOG2_BITS_PER_UNIT));
>   		    if (wi::fits_shwi_p (off)
>   			/* Probibit value-numbering zero offset components
>   			   of addresses the same before the pass folding
diff mbox

Patch

Index: gcc/expr.c
===================================================================
--- gcc/expr.c	2014-04-22 20:58:26.969683484 +0100
+++ gcc/expr.c	2014-04-22 21:00:26.377614881 +0100
@@ -6801,8 +6801,7 @@  get_inner_reference (tree exp, HOST_WIDE
 	      if (!integer_zerop (off))
 		{
 		  offset_int boff, coff = mem_ref_offset (exp);
-		  boff = wi::lshift (coff, (BITS_PER_UNIT == 8
-					    ? 3 : exact_log2 (BITS_PER_UNIT)));
+		  boff = wi::lshift (coff, LOG2_BITS_PER_UNIT);
 		  bit_offset += boff;
 		}
 	      exp = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
@@ -6828,8 +6827,7 @@  get_inner_reference (tree exp, HOST_WIDE
     {
       offset_int tem = wi::sext (wi::to_offset (offset),
 				 TYPE_PRECISION (sizetype));
-      tem = wi::lshift (tem, (BITS_PER_UNIT == 8
-			      ? 3 : exact_log2 (BITS_PER_UNIT)));
+      tem = wi::lshift (tem, LOG2_BITS_PER_UNIT);
       tem += bit_offset;
       if (wi::fits_shwi_p (tem))
 	{
@@ -6844,16 +6842,12 @@  get_inner_reference (tree exp, HOST_WIDE
       /* Avoid returning a negative bitpos as this may wreak havoc later.  */
       if (wi::neg_p (bit_offset))
         {
-	  offset_int mask
-	    = wi::mask <offset_int> (BITS_PER_UNIT == 8
-				     ? 3 : exact_log2 (BITS_PER_UNIT),
-				     false);
+	  offset_int mask = wi::mask <offset_int> (LOG2_BITS_PER_UNIT, false);
 	  offset_int tem = bit_offset.and_not (mask);
 	  /* TEM is the bitpos rounded to BITS_PER_UNIT towards -Inf.
 	     Subtract it to BIT_OFFSET and add it (scaled) to OFFSET.  */
 	  bit_offset -= tem;
-	  tem = wi::arshift (tem, (BITS_PER_UNIT == 8
-				   ? 3 : exact_log2 (BITS_PER_UNIT)));
+	  tem = wi::arshift (tem, LOG2_BITS_PER_UNIT);
 	  offset = size_binop (PLUS_EXPR, offset,
 			       wide_int_to_tree (sizetype, tem));
 	}
Index: gcc/tree-dfa.c
===================================================================
--- gcc/tree-dfa.c	2014-04-22 20:58:27.020683881 +0100
+++ gcc/tree-dfa.c	2014-04-22 21:00:26.378614888 +0100
@@ -463,10 +463,7 @@  get_ref_base_and_extent (tree exp, HOST_
 			  {
 			    offset_int tem = (wi::to_offset (ssize)
 					      - wi::to_offset (fsize));
-			    if (BITS_PER_UNIT == 8)
-			      tem = wi::lshift (tem, 3);
-			    else
-			      tem *= BITS_PER_UNIT;
+			    tem = wi::lshift (tem, LOG2_BITS_PER_UNIT);
 			    tem -= woffset;
 			    maxsize += tem;
 			  }
@@ -583,8 +580,7 @@  get_ref_base_and_extent (tree exp, HOST_
 	      else
 		{
 		  offset_int off = mem_ref_offset (exp);
-		  off = wi::lshift (off, (BITS_PER_UNIT == 8
-					  ? 3 : exact_log2 (BITS_PER_UNIT)));
+		  off = wi::lshift (off, LOG2_BITS_PER_UNIT);
 		  off += bit_offset;
 		  if (wi::fits_shwi_p (off))
 		    {
Index: gcc/tree-ssa-alias.c
===================================================================
--- gcc/tree-ssa-alias.c	2014-04-22 20:58:26.969683484 +0100
+++ gcc/tree-ssa-alias.c	2014-04-22 21:00:26.378614888 +0100
@@ -1041,8 +1041,7 @@  indirect_ref_may_alias_decl_p (tree ref1
   /* The offset embedded in MEM_REFs can be negative.  Bias them
      so that the resulting offset adjustment is positive.  */
   offset_int moff = mem_ref_offset (base1);
-  moff = wi::lshift (moff, (BITS_PER_UNIT == 8
-			    ? 3 : exact_log2 (BITS_PER_UNIT)));
+  moff = wi::lshift (moff, LOG2_BITS_PER_UNIT);
   if (wi::neg_p (moff))
     offset2p += (-moff).to_short_addr ();
   else
@@ -1118,8 +1117,7 @@  indirect_ref_may_alias_decl_p (tree ref1
       || TREE_CODE (dbase2) == TARGET_MEM_REF)
     {
       offset_int moff = mem_ref_offset (dbase2);
-      moff = wi::lshift (moff, (BITS_PER_UNIT == 8
-				? 3 : exact_log2 (BITS_PER_UNIT)));
+      moff = wi::lshift (moff, LOG2_BITS_PER_UNIT);
       if (wi::neg_p (moff))
 	doffset1 -= (-moff).to_short_addr ();
       else
@@ -1217,15 +1215,13 @@  indirect_refs_may_alias_p (tree ref1 ATT
       /* The offset embedded in MEM_REFs can be negative.  Bias them
 	 so that the resulting offset adjustment is positive.  */
       moff = mem_ref_offset (base1);
-      moff = wi::lshift (moff, (BITS_PER_UNIT == 8
-				? 3 : exact_log2 (BITS_PER_UNIT)));
+      moff = wi::lshift (moff, LOG2_BITS_PER_UNIT);
       if (wi::neg_p (moff))
 	offset2 += (-moff).to_short_addr ();
       else
 	offset1 += moff.to_shwi ();
       moff = mem_ref_offset (base2);
-      moff = wi::lshift (moff, (BITS_PER_UNIT == 8
-				? 3 : exact_log2 (BITS_PER_UNIT)));
+      moff = wi::lshift (moff, LOG2_BITS_PER_UNIT);
       if (wi::neg_p (moff))
 	offset1 += (-moff).to_short_addr ();
       else
@@ -2200,12 +2196,10 @@  stmt_kills_ref_p_1 (gimple stmt, ao_ref
 				       TREE_OPERAND (ref->base, 1)))
 		{
 		  offset_int off1 = mem_ref_offset (base);
-		  off1 = wi::lshift (off1, (BITS_PER_UNIT == 8
-					    ? 3 : exact_log2 (BITS_PER_UNIT)));
+		  off1 = wi::lshift (off1, LOG2_BITS_PER_UNIT);
 		  off1 += offset;
 		  offset_int off2 = mem_ref_offset (ref->base);
-		  off2 = wi::lshift (off2, (BITS_PER_UNIT == 8
-					    ? 3 : exact_log2 (BITS_PER_UNIT)));
+		  off2 = wi::lshift (off2, LOG2_BITS_PER_UNIT);
 		  off2 += ref_offset;
 		  if (wi::fits_shwi_p (off1) && wi::fits_shwi_p (off2))
 		    {
Index: gcc/tree-ssa-sccvn.c
===================================================================
--- gcc/tree-ssa-sccvn.c	2014-04-22 20:58:26.969683484 +0100
+++ gcc/tree-ssa-sccvn.c	2014-04-22 21:00:26.378614888 +0100
@@ -817,8 +817,7 @@  copy_reference_ops_from_ref (tree ref, v
 		    offset_int off
 		      = (wi::to_offset (this_offset)
 			 + wi::lrshift (wi::to_offset (bit_offset),
-					BITS_PER_UNIT == 8
-					? 3 : exact_log2 (BITS_PER_UNIT)));
+					LOG2_BITS_PER_UNIT));
 		    if (wi::fits_shwi_p (off)
 			/* Probibit value-numbering zero offset components
 			   of addresses the same before the pass folding