diff mbox series

[committed] arm: Make use of RTL predicates

Message ID gkro8k1pas9.fsf@arm.com
State New
Headers show
Series [committed] arm: Make use of RTL predicates | expand

Commit Message

Andrea Corallo Nov. 13, 2020, 2:54 p.m. UTC
Hi all,

this is to fix missing uses of RTL predicates in the arm backend.

Regtested and bootstraped on arm-linux-gnueabihf.

Commited into master as 156edf21fab as pre-approved [1].

   Andrea

[1] <https://gcc.gnu.org/pipermail/gcc-patches/2020-November/558959.html>
From 156edf21fab7dd5891c72db7ec58b38ef7d52bfa Mon Sep 17 00:00:00 2001
From: Andrea Corallo <andrea.corallo@arm.com>
Date: Fri, 13 Nov 2020 11:42:04 +0000
Subject: [PATCH] arm: Make use of RTL predicates

2020-11-13  Andrea Corallo  <andrea.corallo@arm.com>

	* config/arm/aarch-common.c (aarch_accumulator_forwarding): Use
	RTL predicates where possible.
	* config/arm/arm.c (legitimate_pic_operand_p)
	(legitimize_pic_address, arm_is_segment_info_known)
	(can_avoid_literal_pool_for_label_p)
	(thumb1_legitimate_address_p, arm_legitimize_address)
	(arm_tls_referenced_p, thumb_legitimate_constant_p)
	(REG_OR_SUBREG_REG, thumb1_rtx_costs, thumb1_size_rtx_costs)
	(arm_adjust_cost, arm_coproc_mem_operand_wb)
	(neon_vector_mem_operand, neon_struct_mem_operand)
	(symbol_mentioned_p, label_mentioned_p, )
	(load_multiple_sequence, store_multiple_sequence)
	(arm_select_cc_mode, arm_reload_in_hi, arm_reload_out_hi)
	(mem_ok_for_ldrd_strd, arm_emit_call_insn, output_move_neon)
	(arm_attr_length_move_neon, arm_assemble_integer)
	(arm_emit_coreregs_64bit_shift, arm_valid_symbolic_address_p)
	(extract_base_offset_in_addr, fusion_load_store): Likewise.
---
 gcc/config/arm/aarch-common.c |  2 +-
 gcc/config/arm/arm.c          | 90 +++++++++++++++++------------------
 2 files changed, 46 insertions(+), 46 deletions(-)
diff mbox series

Patch

diff --git a/gcc/config/arm/aarch-common.c b/gcc/config/arm/aarch-common.c
index 6bc6ccf9411..e7b13f00fb4 100644
--- a/gcc/config/arm/aarch-common.c
+++ b/gcc/config/arm/aarch-common.c
@@ -485,7 +485,7 @@  aarch_accumulator_forwarding (rtx_insn *producer, rtx_insn *consumer)
 	return 0;
     }
 
-  if (GET_CODE (accumulator) == SUBREG)
+  if (SUBREG_P (accumulator))
     accumulator = SUBREG_REG (accumulator);
 
   if (!REG_P (accumulator))
diff --git a/gcc/config/arm/arm.c b/gcc/config/arm/arm.c
index 5612d1e7e18..04190b1880a 100644
--- a/gcc/config/arm/arm.c
+++ b/gcc/config/arm/arm.c
@@ -7775,7 +7775,7 @@  arm_function_ok_for_sibcall (tree decl, tree exp)
 int
 legitimate_pic_operand_p (rtx x)
 {
-  if (GET_CODE (x) == SYMBOL_REF
+  if (SYMBOL_REF_P (x)
       || (GET_CODE (x) == CONST
 	  && GET_CODE (XEXP (x, 0)) == PLUS
 	  && GET_CODE (XEXP (XEXP (x, 0), 0)) == SYMBOL_REF))
@@ -7904,8 +7904,8 @@  legitimize_pic_address (rtx orig, machine_mode mode, rtx reg, rtx pic_reg,
 {
   gcc_assert (compute_now == (pic_reg != NULL_RTX));
 
-  if (GET_CODE (orig) == SYMBOL_REF
-      || GET_CODE (orig) == LABEL_REF)
+  if (SYMBOL_REF_P (orig)
+      || LABEL_REF_P (orig))
     {
       if (reg == 0)
 	{
@@ -7922,8 +7922,8 @@  legitimize_pic_address (rtx orig, machine_mode mode, rtx reg, rtx pic_reg,
       /* References to weak symbols cannot be resolved locally: they
 	 may be overridden by a non-weak definition at link time.  */
       rtx_insn *insn;
-      if ((GET_CODE (orig) == LABEL_REF
-	   || (GET_CODE (orig) == SYMBOL_REF
+      if ((LABEL_REF_P (orig)
+	   || (SYMBOL_REF_P (orig)
 	       && SYMBOL_REF_LOCAL_P (orig)
 	       && (SYMBOL_REF_DECL (orig)
 		   ? !DECL_WEAK (SYMBOL_REF_DECL (orig)) : 1)
@@ -8177,7 +8177,7 @@  arm_is_segment_info_known (rtx orig, bool *is_readonly)
 {
   *is_readonly = false;
 
-  if (GET_CODE (orig) == LABEL_REF)
+  if (LABEL_REF_P (orig))
     {
       *is_readonly = true;
       return true;
@@ -8437,7 +8437,7 @@  can_avoid_literal_pool_for_label_p (rtx x)
      (set (reg r0) (mem (reg r0))).
      No extra register is required, and (mem (reg r0)) won't cause the use
      of literal pools.  */
-  if (arm_disable_literal_pool && GET_CODE (x) == SYMBOL_REF
+  if (arm_disable_literal_pool && SYMBOL_REF_P (x)
       && CONSTANT_POOL_ADDRESS_P (x))
     return 1;
   return 0;
@@ -8816,7 +8816,7 @@  thumb1_legitimate_address_p (machine_mode mode, rtx x, int strict_p)
 
   /* This is PC relative data before arm_reorg runs.  */
   else if (GET_MODE_SIZE (mode) >= 4 && CONSTANT_P (x)
-	   && GET_CODE (x) == SYMBOL_REF
+	   && SYMBOL_REF_P (x)
 	   && CONSTANT_POOL_ADDRESS_P (x) && !flag_pic
 	   && !arm_disable_literal_pool)
     return 1;
@@ -8824,7 +8824,7 @@  thumb1_legitimate_address_p (machine_mode mode, rtx x, int strict_p)
   /* This is PC relative data after arm_reorg runs.  */
   else if ((GET_MODE_SIZE (mode) >= 4 || mode == HFmode)
 	   && reload_completed
-	   && (GET_CODE (x) == LABEL_REF
+	   && (LABEL_REF_P (x)
 	       || (GET_CODE (x) == CONST
 		   && GET_CODE (XEXP (x, 0)) == PLUS
 		   && GET_CODE (XEXP (XEXP (x, 0), 0)) == LABEL_REF
@@ -8884,7 +8884,7 @@  thumb1_legitimate_address_p (machine_mode mode, rtx x, int strict_p)
 
   else if (GET_MODE_CLASS (mode) != MODE_FLOAT
 	   && GET_MODE_SIZE (mode) == 4
-	   && GET_CODE (x) == SYMBOL_REF
+	   && SYMBOL_REF_P (x)
 	   && CONSTANT_POOL_ADDRESS_P (x)
 	   && !arm_disable_literal_pool
 	   && ! (flag_pic
@@ -9212,7 +9212,7 @@  arm_legitimize_address (rtx x, rtx orig_x, machine_mode mode)
 	  x = XEXP (XEXP (x, 0), 0);
 	}
 
-      if (GET_CODE (x) != SYMBOL_REF)
+      if (!SYMBOL_REF_P (x))
 	return x;
 
       gcc_assert (SYMBOL_REF_TLS_MODEL (x) != 0);
@@ -9421,7 +9421,7 @@  arm_tls_referenced_p (rtx x)
   FOR_EACH_SUBRTX (iter, array, x, ALL)
     {
       const_rtx x = *iter;
-      if (GET_CODE (x) == SYMBOL_REF && SYMBOL_REF_TLS_MODEL (x) != 0)
+      if (SYMBOL_REF_P (x) && SYMBOL_REF_TLS_MODEL (x) != 0)
 	{
 	  /* ARM currently does not provide relocations to encode TLS variables
 	     into AArch32 instructions, only data, so there is no way to
@@ -9467,7 +9467,7 @@  thumb_legitimate_constant_p (machine_mode mode ATTRIBUTE_UNUSED, rtx x)
   return (CONST_INT_P (x)
 	  || CONST_DOUBLE_P (x)
 	  || CONSTANT_ADDRESS_P (x)
-	  || (TARGET_HAVE_MOVT && GET_CODE (x) == SYMBOL_REF)
+	  || (TARGET_HAVE_MOVT && SYMBOL_REF_P (x))
 	  /* On Thumb-1 without MOVT/MOVW and literal pool disabled,
 	     we build the symbol address with upper/lower
 	     relocations.  */
@@ -9511,7 +9511,7 @@  arm_cannot_force_const_mem (machine_mode mode ATTRIBUTE_UNUSED, rtx x)
 
 #define REG_OR_SUBREG_REG(X)						\
   (REG_P (X)							\
-   || (GET_CODE (X) == SUBREG && REG_P (SUBREG_REG (X))))
+   || (SUBREG_P (X) && REG_P (SUBREG_REG (X))))
 
 #define REG_OR_SUBREG_RTX(X)			\
    (REG_P (X) ? (X) : SUBREG_REG (X))
@@ -9622,7 +9622,7 @@  thumb1_rtx_costs (rtx x, enum rtx_code code, enum rtx_code outer)
       /* Memory costs quite a lot for the first word, but subsequent words
 	 load at the equivalent of a single insn each.  */
       return (10 + 4 * ((GET_MODE_SIZE (mode) - 1) / UNITS_PER_WORD)
-	      + ((GET_CODE (x) == SYMBOL_REF && CONSTANT_POOL_ADDRESS_P (x))
+	      + ((SYMBOL_REF_P (x) && CONSTANT_POOL_ADDRESS_P (x))
 		 ? 4 : 0));
 
     case IF_THEN_ELSE:
@@ -9779,7 +9779,7 @@  thumb1_size_rtx_costs (rtx x, enum rtx_code code, enum rtx_code outer)
       return (COSTS_N_INSNS (1)
 	      + COSTS_N_INSNS (1)
 		* ((GET_MODE_SIZE (mode) - 1) / UNITS_PER_WORD)
-              + ((GET_CODE (x) == SYMBOL_REF && CONSTANT_POOL_ADDRESS_P (x))
+              + ((SYMBOL_REF_P (x) && CONSTANT_POOL_ADDRESS_P (x))
                  ? COSTS_N_INSNS (1) : 0));
 
     case IF_THEN_ELSE:
@@ -12399,7 +12399,7 @@  arm_adjust_cost (rtx_insn *insn, int dep_type, rtx_insn *dep, int cost,
 	 constant pool are cached, and that others will miss.  This is a
 	 hack.  */
 
-      if ((GET_CODE (src_mem) == SYMBOL_REF
+      if ((SYMBOL_REF_P (src_mem)
 	   && CONSTANT_POOL_ADDRESS_P (src_mem))
 	  || reg_mentioned_p (stack_pointer_rtx, src_mem)
 	  || reg_mentioned_p (frame_pointer_rtx, src_mem)
@@ -13234,7 +13234,7 @@  arm_coproc_mem_operand_wb (rtx op, int wb_level)
   ind = XEXP (op, 0);
 
   if (reload_completed
-      && (GET_CODE (ind) == LABEL_REF
+      && (LABEL_REF_P (ind)
 	  || (GET_CODE (ind) == CONST
 	      && GET_CODE (XEXP (ind, 0)) == PLUS
 	      && GET_CODE (XEXP (XEXP (ind, 0), 0)) == LABEL_REF
@@ -13410,7 +13410,7 @@  neon_vector_mem_operand (rtx op, int type, bool strict)
   ind = XEXP (op, 0);
 
   if (reload_completed
-      && (GET_CODE (ind) == LABEL_REF
+      && (LABEL_REF_P (ind)
 	  || (GET_CODE (ind) == CONST
 	      && GET_CODE (XEXP (ind, 0)) == PLUS
 	      && GET_CODE (XEXP (XEXP (ind, 0), 0)) == LABEL_REF
@@ -13476,7 +13476,7 @@  neon_struct_mem_operand (rtx op)
   ind = XEXP (op, 0);
 
   if (reload_completed
-      && (GET_CODE (ind) == LABEL_REF
+      && (LABEL_REF_P (ind)
 	  || (GET_CODE (ind) == CONST
 	      && GET_CODE (XEXP (ind, 0)) == PLUS
 	      && GET_CODE (XEXP (XEXP (ind, 0), 0)) == LABEL_REF
@@ -13592,7 +13592,7 @@  symbol_mentioned_p (rtx x)
   const char * fmt;
   int i;
 
-  if (GET_CODE (x) == SYMBOL_REF)
+  if (SYMBOL_REF_P (x))
     return 1;
 
   /* UNSPEC_TLS entries for a symbol include the SYMBOL_REF, but they
@@ -13626,7 +13626,7 @@  label_mentioned_p (rtx x)
   const char * fmt;
   int i;
 
-  if (GET_CODE (x) == LABEL_REF)
+  if (LABEL_REF_P (x))
     return 1;
 
   /* UNSPEC_TLS entries for a symbol include a LABEL_REF for the referencing
@@ -14264,11 +14264,11 @@  load_multiple_sequence (rtx *operands, int nops, int *regs, int *saved_order,
       offset = const0_rtx;
 
       if ((REG_P (reg = XEXP (operands[nops + i], 0))
-	   || (GET_CODE (reg) == SUBREG
+	   || (SUBREG_P (reg)
 	       && REG_P (reg = SUBREG_REG (reg))))
 	  || (GET_CODE (XEXP (operands[nops + i], 0)) == PLUS
 	      && ((REG_P (reg = XEXP (XEXP (operands[nops + i], 0), 0)))
-		  || (GET_CODE (reg) == SUBREG
+		  || (SUBREG_P (reg)
 		      && REG_P (reg = SUBREG_REG (reg))))
 	      && (CONST_INT_P (offset
 		  = XEXP (XEXP (operands[nops + i], 0), 1)))))
@@ -14418,11 +14418,11 @@  store_multiple_sequence (rtx *operands, int nops, int nops_total,
       offset = const0_rtx;
 
       if ((REG_P (reg = XEXP (operands[nops + i], 0))
-	   || (GET_CODE (reg) == SUBREG
+	   || (SUBREG_P (reg)
 	       && REG_P (reg = SUBREG_REG (reg))))
 	  || (GET_CODE (XEXP (operands[nops + i], 0)) == PLUS
 	      && ((REG_P (reg = XEXP (XEXP (operands[nops + i], 0), 0)))
-		  || (GET_CODE (reg) == SUBREG
+		  || (SUBREG_P (reg)
 		      && REG_P (reg = SUBREG_REG (reg))))
 	      && (CONST_INT_P (offset
 		  = XEXP (XEXP (operands[nops + i], 0), 1)))))
@@ -15752,7 +15752,7 @@  arm_select_cc_mode (enum rtx_code op, rtx x, rtx y)
   /* A compare with a shifted operand.  Because of canonicalization, the
      comparison will have to be swapped when we emit the assembler.  */
   if (GET_MODE (y) == SImode
-      && (REG_P (y) || (GET_CODE (y) == SUBREG))
+      && (REG_P (y) || (SUBREG_P (y)))
       && (GET_CODE (x) == ASHIFT || GET_CODE (x) == ASHIFTRT
 	  || GET_CODE (x) == LSHIFTRT || GET_CODE (x) == ROTATE
 	  || GET_CODE (x) == ROTATERT))
@@ -15776,14 +15776,14 @@  arm_select_cc_mode (enum rtx_code op, rtx x, rtx y)
      non-canonical, but arm_gen_compare_reg uses this to generate the
      correct canonical form.  */
   if (GET_MODE (y) == SImode
-      && (REG_P (y) || GET_CODE (y) == SUBREG)
+      && (REG_P (y) || SUBREG_P (y))
       && CONST_INT_P (x))
     return CC_RSBmode;
 
   /* This operation is performed swapped, but since we only rely on the Z
      flag we don't need an additional mode.  */
   if (GET_MODE (y) == SImode
-      && (REG_P (y) || (GET_CODE (y) == SUBREG))
+      && (REG_P (y) || (SUBREG_P (y)))
       && GET_CODE (x) == NEG
       && (op ==	EQ || op == NE))
     return CC_Zmode;
@@ -16175,7 +16175,7 @@  arm_reload_in_hi (rtx *operands)
   rtx base, scratch;
   HOST_WIDE_INT offset = 0;
 
-  if (GET_CODE (ref) == SUBREG)
+  if (SUBREG_P (ref))
     {
       offset = SUBREG_BYTE (ref);
       ref = SUBREG_REG (ref);
@@ -16303,7 +16303,7 @@  arm_reload_out_hi (rtx *operands)
   rtx base, scratch;
   HOST_WIDE_INT offset = 0;
 
-  if (GET_CODE (ref) == SUBREG)
+  if (SUBREG_P (ref))
     {
       offset = SUBREG_BYTE (ref);
       ref = SUBREG_REG (ref);
@@ -16634,7 +16634,7 @@  mem_ok_for_ldrd_strd (rtx mem, rtx *base, rtx *offset, HOST_WIDE_INT *align)
     return false;
 
   /* Can't deal with subregs.  */
-  if (GET_CODE (mem) == SUBREG)
+  if (SUBREG_P (mem))
     return false;
 
   gcc_assert (MEM_P (mem));
@@ -19534,7 +19534,7 @@  arm_emit_call_insn (rtx pat, rtx addr, bool sibcall)
   if (TARGET_VXWORKS_RTP
       && flag_pic
       && !sibcall
-      && GET_CODE (addr) == SYMBOL_REF
+      && SYMBOL_REF_P (addr)
       && (SYMBOL_REF_DECL (addr)
 	  ? !targetm.binds_local_p (SYMBOL_REF_DECL (addr))
 	  : !SYMBOL_REF_LOCAL_P (addr)))
@@ -20381,7 +20381,7 @@  output_move_neon (rtx *operands)
 	      }
 	    else
 	      {
-		if (TARGET_HAVE_MVE && GET_CODE (addr) == LABEL_REF)
+		if (TARGET_HAVE_MVE && LABEL_REF_P (addr))
 		  sprintf (buff, "v%sr.64\t%%P0, %%1", load ? "ld" : "st");
 		else
 		  sprintf (buff, "v%sr%%?\t%%P0, %%1", load ? "ld" : "st");
@@ -20392,7 +20392,7 @@  output_move_neon (rtx *operands)
 	  {
 	    ops[0] = gen_rtx_REG (DImode, REGNO (reg) + 2 * overlap);
 	    ops[1] = adjust_address (mem, SImode, 8 * overlap);
-	    if (TARGET_HAVE_MVE && GET_CODE (addr) == LABEL_REF)
+	    if (TARGET_HAVE_MVE && LABEL_REF_P (addr))
 	      sprintf (buff, "v%sr.32\t%%P0, %%1", load ? "ld" : "st");
 	    else
 	      sprintf (buff, "v%sr%%?\t%%P0, %%1", load ? "ld" : "st");
@@ -20452,7 +20452,7 @@  arm_attr_length_move_neon (rtx_insn *insn)
   if (GET_CODE (addr) == CONST && GET_CODE (XEXP (addr, 0)) == PLUS)
     addr = XEXP (addr, 0);
 
-  if (GET_CODE (addr) == LABEL_REF || GET_CODE (addr) == PLUS)
+  if (LABEL_REF_P (addr) || GET_CODE (addr) == PLUS)
     {
       int insns = REG_NREGS (reg) / 2;
       return insns * 4;
@@ -24446,7 +24446,7 @@  arm_assemble_integer (rtx x, unsigned int size, int aligned_p)
       /* Mark symbols as position independent.  We only do this in the
 	 .text segment, not in the .data segment.  */
       if (NEED_GOT_RELOC && flag_pic && making_const_table &&
-	  (GET_CODE (x) == SYMBOL_REF || GET_CODE (x) == LABEL_REF))
+	  (SYMBOL_REF_P (x) || LABEL_REF_P (x)))
 	{
 	  /* See legitimize_pic_address for an explanation of the
 	     TARGET_VXWORKS_RTP check.  */
@@ -24454,7 +24454,7 @@  arm_assemble_integer (rtx x, unsigned int size, int aligned_p)
 	     they may be overridden by a non-weak definition at link
 	     time.  */
 	  if (!arm_pic_data_is_text_relative
-	      || (GET_CODE (x) == SYMBOL_REF
+	      || (SYMBOL_REF_P (x)
 		  && (!SYMBOL_REF_LOCAL_P (x)
 		      || (SYMBOL_REF_DECL (x)
 			  ? DECL_WEAK (SYMBOL_REF_DECL (x)) : 0)
@@ -31620,13 +31620,13 @@  arm_emit_coreregs_64bit_shift (enum rtx_code code, rtx out, rtx in,
 
   gcc_assert (code == ASHIFT || code == ASHIFTRT || code == LSHIFTRT);
   gcc_assert (out
-	      && (REG_P (out) || GET_CODE (out) == SUBREG)
+	      && (REG_P (out) || SUBREG_P (out))
 	      && GET_MODE (out) == DImode);
   gcc_assert (in
-	      && (REG_P (in) || GET_CODE (in) == SUBREG)
+	      && (REG_P (in) || SUBREG_P (in))
 	      && GET_MODE (in) == DImode);
   gcc_assert (amount
-	      && (((REG_P (amount) || GET_CODE (amount) == SUBREG)
+	      && (((REG_P (amount) || SUBREG_P (amount))
 		   && GET_MODE (amount) == SImode)
 		  || CONST_INT_P (amount)));
   gcc_assert (scratch1 == NULL
@@ -31860,7 +31860,7 @@  arm_valid_symbolic_address_p (rtx addr)
   if (target_word_relocations)
     return false;
 
-  if (GET_CODE (tmp) == SYMBOL_REF || GET_CODE (tmp) == LABEL_REF)
+  if (SYMBOL_REF_P (tmp) || LABEL_REF_P (tmp))
     return true;
 
   /* (const (plus: symbol_ref const_int))  */
@@ -33143,7 +33143,7 @@  extract_base_offset_in_addr (rtx mem, rtx *base, rtx *offset)
   if (GET_CODE (addr) == CONST)
     addr = XEXP (addr, 0);
 
-  if (GET_CODE (addr) == REG)
+  if (REG_P (addr))
     {
       *base = addr;
       *offset = const0_rtx;
@@ -33182,12 +33182,12 @@  fusion_load_store (rtx_insn *insn, rtx *base, rtx *offset, bool *is_load)
 
   src = SET_SRC (x);
   dest = SET_DEST (x);
-  if (GET_CODE (src) == REG && GET_CODE (dest) == MEM)
+  if (REG_P (src) && MEM_P (dest))
     {
       *is_load = false;
       extract_base_offset_in_addr (dest, base, offset);
     }
-  else if (GET_CODE (src) == MEM && GET_CODE (dest) == REG)
+  else if (MEM_P (src) && REG_P (dest))
     {
       *is_load = true;
       extract_base_offset_in_addr (src, base, offset);