diff mbox

Make more use of combined_fn

Message ID 87mvuqq8qo.fsf@e105548-lin.cambridge.arm.com
State New
Headers show

Commit Message

Richard Sandiford Nov. 7, 2015, 12:44 p.m. UTC
This patch generalises fold-const.[hc] routines to use combined_fn
instead of built_in_function.  It also updates gimple-ssa-backprop,c
since the update is simple and it avoids churn on the call to
negate_mathfn_p.

Tested on x86_64-linux-gnu, aarch64-linux-gnu and arm-linux-gnueabi.
OK to install?

Thanks,
Richard

[I've attached a -b form of the patch too since it's easier to read.]


gcc/
	* fold-const.h (negate_mathfn_p): Take a combined_fn rather
	than a built_in_function.
	(tree_call_nonnegative_warnv_p): Take a combined_fn rather than
	a function decl.
	(integer_valued_real_call_p): Likewise.
	* fold-const.c: Include case-cfn-macros.h
	(negate_mathfn_p): Take a combined_fn rather than a built_in_function.
	(negate_expr_p): Update accordingly.
	(tree_call_nonnegative_warnv_p): Take a combined_fn rather than
	a function decl.
	(integer_valued_real_call_p): Likewise.
	(tree_invalid_nonnegative_warnv_p): Update accordingly.
	(integer_valued_real_p): Likewise.
	* gimple-fold.c (gimple_call_nonnegative_warnv_p): Update call
	to tree_call_nonnegative_warnv_p.
	(gimple_call_integer_valued_real_p): Likewise
	integer_valued_real_call_p.
	* gimple-ssa-backprop.c: Include case-cfn-macros.h.
	(backprop::process_builtin_call_use): Extend to combined_fn.
	(strip_sign_op_1): Likewise.
	(backprop::process_use): Don't check for built-in calls here.
	(backprop::execute): Likewise.
	(backprop::optimize_builtin_call): Update call to negate_mathfn_p.

Comments

Jeff Law Nov. 9, 2015, 10:42 p.m. UTC | #1
On 11/07/2015 05:44 AM, Richard Sandiford wrote:
> This patch generalises fold-const.[hc] routines to use combined_fn
> instead of built_in_function.  It also updates gimple-ssa-backprop,c
> since the update is simple and it avoids churn on the call to
> negate_mathfn_p.
>
> Tested on x86_64-linux-gnu, aarch64-linux-gnu and arm-linux-gnueabi.
> OK to install?
>
> Thanks,
> Richard
>
> [I've attached a -b form of the patch too since it's easier to read.]
Thanks for that.  I was thinking that would have made one of the later 
ones easier to read, but it was small enough not to really matter.


>
>
> gcc/
> 	* fold-const.h (negate_mathfn_p): Take a combined_fn rather
> 	than a built_in_function.
> 	(tree_call_nonnegative_warnv_p): Take a combined_fn rather than
> 	a function decl.
> 	(integer_valued_real_call_p): Likewise.
> 	* fold-const.c: Include case-cfn-macros.h
> 	(negate_mathfn_p): Take a combined_fn rather than a built_in_function.
> 	(negate_expr_p): Update accordingly.
> 	(tree_call_nonnegative_warnv_p): Take a combined_fn rather than
> 	a function decl.
> 	(integer_valued_real_call_p): Likewise.
> 	(tree_invalid_nonnegative_warnv_p): Update accordingly.
> 	(integer_valued_real_p): Likewise.
> 	* gimple-fold.c (gimple_call_nonnegative_warnv_p): Update call
> 	to tree_call_nonnegative_warnv_p.
> 	(gimple_call_integer_valued_real_p): Likewise
> 	integer_valued_real_call_p.
> 	* gimple-ssa-backprop.c: Include case-cfn-macros.h.
> 	(backprop::process_builtin_call_use): Extend to combined_fn.
> 	(strip_sign_op_1): Likewise.
> 	(backprop::process_use): Don't check for built-in calls here.
> 	(backprop::execute): Likewise.
> 	(backprop::optimize_builtin_call): Update call to negate_mathfn_p.
>
OK
jeff
diff mbox

Patch

diff --git a/gcc/fold-const.c b/gcc/fold-const.c
index ae28445..a7085ef 100644
--- a/gcc/fold-const.c
+++ b/gcc/fold-const.c
@@ -73,6 +73,7 @@  along with GCC; see the file COPYING3.  If not see
 #include "params.h"
 #include "tree-into-ssa.h"
 #include "md5.h"
+#include "case-cfn-macros.h"
 
 #ifndef LOAD_EXTEND_OP
 #define LOAD_EXTEND_OP(M) UNKNOWN
@@ -313,39 +314,39 @@  fold_overflow_warning (const char* gmsgid, enum warn_strict_overflow_code wc)
    is odd, i.e. -f(x) == f(-x).  */
 
 bool
-negate_mathfn_p (enum built_in_function code)
-{
-  switch (code)
-    {
-    CASE_FLT_FN (BUILT_IN_ASIN):
-    CASE_FLT_FN (BUILT_IN_ASINH):
-    CASE_FLT_FN (BUILT_IN_ATAN):
-    CASE_FLT_FN (BUILT_IN_ATANH):
-    CASE_FLT_FN (BUILT_IN_CASIN):
-    CASE_FLT_FN (BUILT_IN_CASINH):
-    CASE_FLT_FN (BUILT_IN_CATAN):
-    CASE_FLT_FN (BUILT_IN_CATANH):
-    CASE_FLT_FN (BUILT_IN_CBRT):
-    CASE_FLT_FN (BUILT_IN_CPROJ):
-    CASE_FLT_FN (BUILT_IN_CSIN):
-    CASE_FLT_FN (BUILT_IN_CSINH):
-    CASE_FLT_FN (BUILT_IN_CTAN):
-    CASE_FLT_FN (BUILT_IN_CTANH):
-    CASE_FLT_FN (BUILT_IN_ERF):
-    CASE_FLT_FN (BUILT_IN_LLROUND):
-    CASE_FLT_FN (BUILT_IN_LROUND):
-    CASE_FLT_FN (BUILT_IN_ROUND):
-    CASE_FLT_FN (BUILT_IN_SIN):
-    CASE_FLT_FN (BUILT_IN_SINH):
-    CASE_FLT_FN (BUILT_IN_TAN):
-    CASE_FLT_FN (BUILT_IN_TANH):
-    CASE_FLT_FN (BUILT_IN_TRUNC):
+negate_mathfn_p (combined_fn fn)
+{
+  switch (fn)
+    {
+    CASE_CFN_ASIN:
+    CASE_CFN_ASINH:
+    CASE_CFN_ATAN:
+    CASE_CFN_ATANH:
+    CASE_CFN_CASIN:
+    CASE_CFN_CASINH:
+    CASE_CFN_CATAN:
+    CASE_CFN_CATANH:
+    CASE_CFN_CBRT:
+    CASE_CFN_CPROJ:
+    CASE_CFN_CSIN:
+    CASE_CFN_CSINH:
+    CASE_CFN_CTAN:
+    CASE_CFN_CTANH:
+    CASE_CFN_ERF:
+    CASE_CFN_LLROUND:
+    CASE_CFN_LROUND:
+    CASE_CFN_ROUND:
+    CASE_CFN_SIN:
+    CASE_CFN_SINH:
+    CASE_CFN_TAN:
+    CASE_CFN_TANH:
+    CASE_CFN_TRUNC:
       return true;
 
-    CASE_FLT_FN (BUILT_IN_LLRINT):
-    CASE_FLT_FN (BUILT_IN_LRINT):
-    CASE_FLT_FN (BUILT_IN_NEARBYINT):
-    CASE_FLT_FN (BUILT_IN_RINT):
+    CASE_CFN_LLRINT:
+    CASE_CFN_LRINT:
+    CASE_CFN_NEARBYINT:
+    CASE_CFN_RINT:
       return !flag_rounding_math;
 
     default:
@@ -506,7 +507,7 @@  negate_expr_p (tree t)
 
     case CALL_EXPR:
       /* Negate -f(x) as f(-x).  */
-      if (negate_mathfn_p (builtin_mathfn_code (t)))
+      if (negate_mathfn_p (get_call_combined_fn (t)))
 	return negate_expr_p (CALL_EXPR_ARG (t, 0));
       break;
 
@@ -693,7 +694,7 @@  fold_negate_expr (location_t loc, tree t)
 
     case CALL_EXPR:
       /* Negate -f(x) as f(-x).  */
-      if (negate_mathfn_p (builtin_mathfn_code (t))
+      if (negate_mathfn_p (get_call_combined_fn (t))
 	  && negate_expr_p (CALL_EXPR_ARG (t, 0)))
 	{
 	  tree fndecl, arg;
@@ -12905,91 +12906,90 @@  tree_single_nonnegative_warnv_p (tree t, bool *strict_overflow_p, int depth)
    *STRICT_OVERFLOW_P.  DEPTH is the current nesting depth of the query.  */
 
 bool
-tree_call_nonnegative_warnv_p (tree type, tree fndecl, tree arg0, tree arg1,
+tree_call_nonnegative_warnv_p (tree type, combined_fn fn, tree arg0, tree arg1,
 			       bool *strict_overflow_p, int depth)
 {
-  if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
-    switch (DECL_FUNCTION_CODE (fndecl))
-      {
-	CASE_FLT_FN (BUILT_IN_ACOS):
-	CASE_FLT_FN (BUILT_IN_ACOSH):
-	CASE_FLT_FN (BUILT_IN_CABS):
-	CASE_FLT_FN (BUILT_IN_COSH):
-	CASE_FLT_FN (BUILT_IN_ERFC):
-	CASE_FLT_FN (BUILT_IN_EXP):
-	CASE_FLT_FN (BUILT_IN_EXP10):
-	CASE_FLT_FN (BUILT_IN_EXP2):
-	CASE_FLT_FN (BUILT_IN_FABS):
-	CASE_FLT_FN (BUILT_IN_FDIM):
-	CASE_FLT_FN (BUILT_IN_HYPOT):
-	CASE_FLT_FN (BUILT_IN_POW10):
-	CASE_INT_FN (BUILT_IN_FFS):
-	CASE_INT_FN (BUILT_IN_PARITY):
-	CASE_INT_FN (BUILT_IN_POPCOUNT):
-	CASE_INT_FN (BUILT_IN_CLZ):
-	CASE_INT_FN (BUILT_IN_CLRSB):
-      case BUILT_IN_BSWAP32:
-      case BUILT_IN_BSWAP64:
+  switch (fn)
+    {
+    CASE_CFN_ACOS:
+    CASE_CFN_ACOSH:
+    CASE_CFN_CABS:
+    CASE_CFN_COSH:
+    CASE_CFN_ERFC:
+    CASE_CFN_EXP:
+    CASE_CFN_EXP10:
+    CASE_CFN_EXP2:
+    CASE_CFN_FABS:
+    CASE_CFN_FDIM:
+    CASE_CFN_HYPOT:
+    CASE_CFN_POW10:
+    CASE_CFN_FFS:
+    CASE_CFN_PARITY:
+    CASE_CFN_POPCOUNT:
+    CASE_CFN_CLZ:
+    CASE_CFN_CLRSB:
+    case CFN_BUILT_IN_BSWAP32:
+    case CFN_BUILT_IN_BSWAP64:
       /* Always true.  */
       return true;
 
-	CASE_FLT_FN (BUILT_IN_SQRT):
+    CASE_CFN_SQRT:
       /* sqrt(-0.0) is -0.0.  */
       if (!HONOR_SIGNED_ZEROS (element_mode (type)))
 	return true;
       return RECURSE (arg0);
 
-	CASE_FLT_FN (BUILT_IN_ASINH):
-	CASE_FLT_FN (BUILT_IN_ATAN):
-	CASE_FLT_FN (BUILT_IN_ATANH):
-	CASE_FLT_FN (BUILT_IN_CBRT):
-	CASE_FLT_FN (BUILT_IN_CEIL):
-	CASE_FLT_FN (BUILT_IN_ERF):
-	CASE_FLT_FN (BUILT_IN_EXPM1):
-	CASE_FLT_FN (BUILT_IN_FLOOR):
-	CASE_FLT_FN (BUILT_IN_FMOD):
-	CASE_FLT_FN (BUILT_IN_FREXP):
-	CASE_FLT_FN (BUILT_IN_ICEIL):
-	CASE_FLT_FN (BUILT_IN_IFLOOR):
-	CASE_FLT_FN (BUILT_IN_IRINT):
-	CASE_FLT_FN (BUILT_IN_IROUND):
-	CASE_FLT_FN (BUILT_IN_LCEIL):
-	CASE_FLT_FN (BUILT_IN_LDEXP):
-	CASE_FLT_FN (BUILT_IN_LFLOOR):
-	CASE_FLT_FN (BUILT_IN_LLCEIL):
-	CASE_FLT_FN (BUILT_IN_LLFLOOR):
-	CASE_FLT_FN (BUILT_IN_LLRINT):
-	CASE_FLT_FN (BUILT_IN_LLROUND):
-	CASE_FLT_FN (BUILT_IN_LRINT):
-	CASE_FLT_FN (BUILT_IN_LROUND):
-	CASE_FLT_FN (BUILT_IN_MODF):
-	CASE_FLT_FN (BUILT_IN_NEARBYINT):
-	CASE_FLT_FN (BUILT_IN_RINT):
-	CASE_FLT_FN (BUILT_IN_ROUND):
-	CASE_FLT_FN (BUILT_IN_SCALB):
-	CASE_FLT_FN (BUILT_IN_SCALBLN):
-	CASE_FLT_FN (BUILT_IN_SCALBN):
-	CASE_FLT_FN (BUILT_IN_SIGNBIT):
-	CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
-	CASE_FLT_FN (BUILT_IN_SINH):
-	CASE_FLT_FN (BUILT_IN_TANH):
-	CASE_FLT_FN (BUILT_IN_TRUNC):
+    CASE_CFN_ASINH:
+    CASE_CFN_ATAN:
+    CASE_CFN_ATANH:
+    CASE_CFN_CBRT:
+    CASE_CFN_CEIL:
+    CASE_CFN_ERF:
+    CASE_CFN_EXPM1:
+    CASE_CFN_FLOOR:
+    CASE_CFN_FMOD:
+    CASE_CFN_FREXP:
+    CASE_CFN_ICEIL:
+    CASE_CFN_IFLOOR:
+    CASE_CFN_IRINT:
+    CASE_CFN_IROUND:
+    CASE_CFN_LCEIL:
+    CASE_CFN_LDEXP:
+    CASE_CFN_LFLOOR:
+    CASE_CFN_LLCEIL:
+    CASE_CFN_LLFLOOR:
+    CASE_CFN_LLRINT:
+    CASE_CFN_LLROUND:
+    CASE_CFN_LRINT:
+    CASE_CFN_LROUND:
+    CASE_CFN_MODF:
+    CASE_CFN_NEARBYINT:
+    CASE_CFN_RINT:
+    CASE_CFN_ROUND:
+    CASE_CFN_SCALB:
+    CASE_CFN_SCALBLN:
+    CASE_CFN_SCALBN:
+    CASE_CFN_SIGNBIT:
+    CASE_CFN_SIGNIFICAND:
+    CASE_CFN_SINH:
+    CASE_CFN_TANH:
+    CASE_CFN_TRUNC:
       /* True if the 1st argument is nonnegative.  */
       return RECURSE (arg0);
 
-	CASE_FLT_FN (BUILT_IN_FMAX):
+    CASE_CFN_FMAX:
       /* True if the 1st OR 2nd arguments are nonnegative.  */
       return RECURSE (arg0) || RECURSE (arg1);
 
-	CASE_FLT_FN (BUILT_IN_FMIN):
+    CASE_CFN_FMIN:
       /* True if the 1st AND 2nd arguments are nonnegative.  */
       return RECURSE (arg0) && RECURSE (arg1);
 
-	CASE_FLT_FN (BUILT_IN_COPYSIGN):
+    CASE_CFN_COPYSIGN:
       /* True if the 2nd argument is nonnegative.  */
       return RECURSE (arg1);
 
-	CASE_FLT_FN (BUILT_IN_POWI):
+    CASE_CFN_POWI:
       /* True if the 1st argument is nonnegative or the second
 	 argument is an even integer.  */
       if (TREE_CODE (arg1) == INTEGER_CST
@@ -12997,7 +12997,7 @@  tree_call_nonnegative_warnv_p (tree type, tree fndecl, tree arg0, tree arg1,
 	return true;
       return RECURSE (arg0);
 
-	CASE_FLT_FN (BUILT_IN_POW):
+    CASE_CFN_POW:
       /* True if the 1st argument is nonnegative or the second
 	 argument is an even integer valued real.  */
       if (TREE_CODE (arg1) == REAL_CST)
@@ -13074,7 +13074,7 @@  tree_invalid_nonnegative_warnv_p (tree t, bool *strict_overflow_p, int depth)
 	tree arg1 = call_expr_nargs (t) > 1 ?  CALL_EXPR_ARG (t, 1) : NULL_TREE;
 
 	return tree_call_nonnegative_warnv_p (TREE_TYPE (t),
-					      get_callee_fndecl (t),
+					      get_call_combined_fn (t),
 					      arg0,
 					      arg1,
 					      strict_overflow_p, depth);
@@ -13477,21 +13477,20 @@  integer_valued_real_binary_p (tree_code code, tree op0, tree op1, int depth)
    DEPTH is the current nesting depth of the query.  */
 
 bool
-integer_valued_real_call_p (tree fndecl, tree arg0, tree arg1, int depth)
+integer_valued_real_call_p (combined_fn fn, tree arg0, tree arg1, int depth)
 {
-  if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
-    switch (DECL_FUNCTION_CODE (fndecl))
+  switch (fn)
     {
-      CASE_FLT_FN (BUILT_IN_CEIL):
-      CASE_FLT_FN (BUILT_IN_FLOOR):
-      CASE_FLT_FN (BUILT_IN_NEARBYINT):
-      CASE_FLT_FN (BUILT_IN_RINT):
-      CASE_FLT_FN (BUILT_IN_ROUND):
-      CASE_FLT_FN (BUILT_IN_TRUNC):
+    CASE_CFN_CEIL:
+    CASE_CFN_FLOOR:
+    CASE_CFN_NEARBYINT:
+    CASE_CFN_RINT:
+    CASE_CFN_ROUND:
+    CASE_CFN_TRUNC:
       return true;
 
-      CASE_FLT_FN (BUILT_IN_FMIN):
-      CASE_FLT_FN (BUILT_IN_FMAX):
+    CASE_CFN_FMIN:
+    CASE_CFN_FMAX:
       return RECURSE (arg0) && RECURSE (arg1);
 
     default:
@@ -13607,7 +13606,7 @@  integer_valued_real_p (tree t, int depth)
 	tree arg1 = (call_expr_nargs (t) > 1
 		     ? CALL_EXPR_ARG (t, 1)
 		     : NULL_TREE);
-	return integer_valued_real_call_p (get_callee_fndecl (t),
+	return integer_valued_real_call_p (get_call_combined_fn (t),
 					   arg0, arg1, depth);
       }
 
diff --git a/gcc/fold-const.h b/gcc/fold-const.h
index 94a21b7..7741802 100644
--- a/gcc/fold-const.h
+++ b/gcc/fold-const.h
@@ -137,12 +137,12 @@  extern bool tree_unary_nonnegative_warnv_p (enum tree_code, tree, tree,
 extern bool tree_binary_nonnegative_warnv_p (enum tree_code, tree, tree, tree,
 					     bool *, int);
 extern bool tree_single_nonnegative_warnv_p (tree, bool *, int);
-extern bool tree_call_nonnegative_warnv_p (tree, tree, tree, tree, bool *,
-					   int);
+extern bool tree_call_nonnegative_warnv_p (tree, combined_fn, tree, tree,
+					   bool *, int);
 
 extern bool integer_valued_real_unary_p (tree_code, tree, int);
 extern bool integer_valued_real_binary_p (tree_code, tree, tree, int);
-extern bool integer_valued_real_call_p (tree, tree, tree, int);
+extern bool integer_valued_real_call_p (combined_fn, tree, tree, int);
 extern bool integer_valued_real_single_p (tree, int);
 extern bool integer_valued_real_p (tree, int = 0);
 
@@ -179,7 +179,7 @@  extern tree sign_bit_p (tree, const_tree);
 extern tree exact_inverse (tree, tree);
 extern tree const_unop (enum tree_code, tree, tree);
 extern tree const_binop (enum tree_code, tree, tree, tree);
-extern bool negate_mathfn_p (enum built_in_function);
+extern bool negate_mathfn_p (combined_fn);
 extern const char *c_getstr (tree);
 
 /* Return OFF converted to a pointer offset type suitable as offset for
diff --git a/gcc/gimple-fold.c b/gcc/gimple-fold.c
index 45840af..b72ea00 100644
--- a/gcc/gimple-fold.c
+++ b/gcc/gimple-fold.c
@@ -6197,7 +6197,7 @@  gimple_call_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
     gimple_call_arg (stmt, 1) : NULL_TREE;
 
   return tree_call_nonnegative_warnv_p (gimple_expr_type (stmt),
-					gimple_call_fndecl (stmt),
+					gimple_call_combined_fn (stmt),
 					arg0,
 					arg1,
 					strict_overflow_p, depth);
@@ -6290,7 +6290,7 @@  gimple_call_integer_valued_real_p (gimple *stmt, int depth)
   tree arg1 = (gimple_call_num_args (stmt) > 1
 	       ? gimple_call_arg (stmt, 1)
 	       : NULL_TREE);
-  return integer_valued_real_call_p (gimple_call_fndecl (stmt),
+  return integer_valued_real_call_p (gimple_call_combined_fn (stmt),
 				     arg0, arg1, depth);
 }
 
diff --git a/gcc/gimple-ssa-backprop.c b/gcc/gimple-ssa-backprop.c
index 91a3c79..d573651 100644
--- a/gcc/gimple-ssa-backprop.c
+++ b/gcc/gimple-ssa-backprop.c
@@ -102,6 +102,7 @@  along with GCC; see the file COPYING3.  If not see
 #include "gimple-fold.h"
 #include "alloc-pool.h"
 #include "tree-hash-traits.h"
+#include "case-cfn-macros.h"
 
 namespace {
 
@@ -337,26 +338,29 @@  backprop::pop_from_worklist ()
 void
 backprop::process_builtin_call_use (gcall *call, tree rhs, usage_info *info)
 {
-  enum built_in_function fn = DECL_FUNCTION_CODE (gimple_call_fndecl (call));
+  combined_fn fn = gimple_call_combined_fn (call);
   tree lhs = gimple_call_lhs (call);
   switch (fn)
     {
-    CASE_FLT_FN (BUILT_IN_COS):
-    CASE_FLT_FN (BUILT_IN_COSH):
-    CASE_FLT_FN (BUILT_IN_CCOS):
-    CASE_FLT_FN (BUILT_IN_CCOSH):
-    CASE_FLT_FN (BUILT_IN_HYPOT):
+    case CFN_LAST:
+      break;
+
+    CASE_CFN_COS:
+    CASE_CFN_COSH:
+    CASE_CFN_CCOS:
+    CASE_CFN_CCOSH:
+    CASE_CFN_HYPOT:
       /* The signs of all inputs are ignored.  */
       info->flags.ignore_sign = true;
       break;
 
-    CASE_FLT_FN (BUILT_IN_COPYSIGN):
+    CASE_CFN_COPYSIGN:
       /* The sign of the first input is ignored.  */
       if (rhs != gimple_call_arg (call, 1))
 	info->flags.ignore_sign = true;
       break;
 
-    CASE_FLT_FN (BUILT_IN_POW):
+    CASE_CFN_POW:
       {
 	/* The sign of the first input is ignored as long as the second
 	   input is an even real.  */
@@ -369,7 +373,7 @@  backprop::process_builtin_call_use (gcall *call, tree rhs, usage_info *info)
 	break;
       }
 
-    CASE_FLT_FN (BUILT_IN_FMA):
+    CASE_CFN_FMA:
       /* In X * X + Y, where Y is distinct from X, the sign of X doesn't
 	 matter.  */
       if (gimple_call_arg (call, 0) == rhs
@@ -472,10 +476,7 @@  backprop::process_use (gimple *stmt, tree rhs, usage_info *info)
     }
 
   if (gcall *call = dyn_cast <gcall *> (stmt))
-    {
-      if (gimple_call_builtin_p (call, BUILT_IN_NORMAL))
     process_builtin_call_use (call, rhs, info);
-    }
   else if (gassign *assign = dyn_cast <gassign *> (stmt))
     process_assign_use (assign, rhs, info);
   else if (gphi *phi = dyn_cast <gphi *> (stmt))
@@ -686,17 +687,14 @@  strip_sign_op_1 (tree rhs)
 	break;
       }
   else if (gcall *call = dyn_cast <gcall *> (def_stmt))
+    switch (gimple_call_combined_fn (call))
       {
-      if (gimple_call_builtin_p (call, BUILT_IN_NORMAL))
-	switch (DECL_FUNCTION_CODE (gimple_call_fndecl (call)))
-	  {
-	  CASE_FLT_FN (BUILT_IN_COPYSIGN):
+      CASE_CFN_COPYSIGN:
 	return gimple_call_arg (call, 0);
 
       default:
 	break;
       }
-    }
 
   return NULL_TREE;
 }
@@ -758,11 +756,10 @@  backprop::complete_change (gimple *stmt)
 void
 backprop::optimize_builtin_call (gcall *call, tree lhs, const usage_info *info)
 {
-  tree fndecl = gimple_call_fndecl (call);
-  enum built_in_function fn = DECL_FUNCTION_CODE (fndecl);
   /* If we have an f such that -f(x) = f(-x), and if the sign of the result
      doesn't matter, strip any sign operations from the input.  */
-  if (info->flags.ignore_sign && negate_mathfn_p (fn))
+  if (info->flags.ignore_sign
+      && negate_mathfn_p (gimple_call_combined_fn (call)))
     {
       tree new_arg = strip_sign_op (gimple_call_arg (call, 0));
       if (new_arg)
@@ -889,10 +886,7 @@  backprop::execute ()
 	  tree var = m_vars[i].first;
 	  gimple *stmt = SSA_NAME_DEF_STMT (var);
 	  if (gcall *call = dyn_cast <gcall *> (stmt))
-	    {
-	      if (gimple_call_builtin_p (call, BUILT_IN_NORMAL))
 	    optimize_builtin_call (call, var, info);
-	    }
 	  else if (gassign *assign = dyn_cast <gassign *> (stmt))
 	    optimize_assign (assign, var, info);
 	  else if (gphi *phi = dyn_cast <gphi *> (stmt))