diff mbox

[v2,6/6] Do constant folding for unary operations.

Message ID 1307616344-27161-7-git-send-email-batuzovk@ispras.ru
State New
Headers show

Commit Message

Kirill Batuzov June 9, 2011, 10:45 a.m. UTC
Perform constant folding for NOT and EXT{8,16,32}{S,U} operations.

Signed-off-by: Kirill Batuzov <batuzovk@ispras.ru>
---
 tcg/optimize.c |   83 ++++++++++++++++++++++++++++++++++++++++++++++++++++++++
 1 files changed, 83 insertions(+), 0 deletions(-)

Comments

Richard Henderson June 10, 2011, 6:04 p.m. UTC | #1
On 06/09/2011 03:45 AM, Kirill Batuzov wrote:
> +    case INDEX_op_ext8s_i32:
> +        return (int32_t)(int8_t)x;
> +
> +    case INDEX_op_ext16s_i32:
> +        return (int32_t)(int16_t)x;

No need to cast back to a 32-bit type.  They'll be
extended properly for the return type which is TCGArg.
And if you drop these intermediate casts, you can
merge the 32 and 64-bit copies.


r~
diff mbox

Patch

diff --git a/tcg/optimize.c b/tcg/optimize.c
index 653f399..2cdcc29 100644
--- a/tcg/optimize.c
+++ b/tcg/optimize.c
@@ -100,6 +100,11 @@  static int op_bits(int op)
     case INDEX_op_sar_i32:
     case INDEX_op_rotl_i32:
     case INDEX_op_rotr_i32:
+    case INDEX_op_not_i32:
+    case INDEX_op_ext8s_i32:
+    case INDEX_op_ext16s_i32:
+    case INDEX_op_ext8u_i32:
+    case INDEX_op_ext16u_i32:
         return 32;
 #if TCG_TARGET_REG_BITS == 64
     case INDEX_op_mov_i64:
@@ -114,6 +119,13 @@  static int op_bits(int op)
     case INDEX_op_sar_i64:
     case INDEX_op_rotl_i64:
     case INDEX_op_rotr_i64:
+    case INDEX_op_not_i64:
+    case INDEX_op_ext8s_i64:
+    case INDEX_op_ext16s_i64:
+    case INDEX_op_ext32s_i64:
+    case INDEX_op_ext8u_i64:
+    case INDEX_op_ext16u_i64:
+    case INDEX_op_ext32u_i64:
         return 64;
 #endif
     default:
@@ -243,6 +255,44 @@  static TCGArg do_constant_folding_2(int op, TCGArg x, TCGArg y)
         return x;
 #endif
 
+    case INDEX_op_not_i32:
+#if TCG_TARGET_REG_BITS == 64
+    case INDEX_op_not_i64:
+#endif
+        return ~x;
+
+    case INDEX_op_ext8s_i32:
+        return (int32_t)(int8_t)x;
+
+    case INDEX_op_ext16s_i32:
+        return (int32_t)(int16_t)x;
+
+    case INDEX_op_ext8u_i32:
+        return (uint32_t)(uint8_t)x;
+
+    case INDEX_op_ext16u_i32:
+        return (uint32_t)(uint16_t)x;
+
+#if TCG_TARGET_REG_BITS == 64
+    case INDEX_op_ext8s_i64:
+        return (int64_t)(int8_t)x;
+
+    case INDEX_op_ext16s_i64:
+        return (int64_t)(int16_t)x;
+
+    case INDEX_op_ext32s_i64:
+        return (int64_t)(int32_t)x;
+
+    case INDEX_op_ext8u_i64:
+        return (uint64_t)(uint8_t)x;
+
+    case INDEX_op_ext16u_i64:
+        return (uint64_t)(uint16_t)x;
+
+    case INDEX_op_ext32u_i64:
+        return (uint64_t)(uint32_t)x;
+#endif
+
     default:
         fprintf(stderr,
                 "Unrecognized operation %d in do_constant_folding.\n", op);
@@ -447,6 +497,39 @@  static TCGArg *tcg_constant_folding(TCGContext *s, uint16_t *tcg_opc_ptr,
             gen_args += 2;
             args += 2;
             break;
+        case INDEX_op_not_i32:
+        case INDEX_op_ext8s_i32:
+        case INDEX_op_ext16s_i32:
+        case INDEX_op_ext8u_i32:
+        case INDEX_op_ext16u_i32:
+#if TCG_TARGET_REG_BITS == 64
+        case INDEX_op_not_i64:
+        case INDEX_op_ext8s_i64:
+        case INDEX_op_ext16s_i64:
+        case INDEX_op_ext32s_i64:
+        case INDEX_op_ext8u_i64:
+        case INDEX_op_ext16u_i64:
+        case INDEX_op_ext32u_i64:
+#endif
+            if (temps[args[1]].state == TCG_TEMP_CONST) {
+                gen_opc_buf[op_index] = op_to_movi(op);
+                gen_args[0] = args[0];
+                gen_args[1] = do_constant_folding(op, temps[args[1]].val, 0);
+                reset_temp(temps, gen_args[0], nb_temps, nb_globals);
+                temps[gen_args[0]].state = TCG_TEMP_CONST;
+                temps[gen_args[0]].val = gen_args[1];
+                assert(temps[gen_args[0]].num_copies == 0);
+                gen_args += 2;
+                args += 2;
+                break;
+            } else {
+                reset_temp(temps, args[0], nb_temps, nb_globals);
+                gen_args[0] = args[0];
+                gen_args[1] = args[1];
+                gen_args += 2;
+                args += 2;
+                break;
+            }
         case INDEX_op_or_i32:
         case INDEX_op_and_i32:
         case INDEX_op_xor_i32: