diff mbox

[AArch64,5/6] Implement support for Crypto -- SHA256.

Message ID 52B1BF30.4040706@arm.com
State New
Headers show

Commit Message

Tejas Belagod Dec. 18, 2013, 3:28 p.m. UTC
Marcus Shawcroft wrote:
> On 6 December 2013 17:36, Tejas Belagod <tbelagod@arm.com> wrote:
>> Hi,
>>
>> The attached patch implements support for crypto sha256.
> 
> Same comments as previous crypto patch.

Thanks for the review. Here is an improved patch.

Tested on aarch64-none-elf. OK for trunk?

Thanks
Tejas.

2013-12-18  Tejas Belagod  <tejas.belagod@arm.com>

gcc/
	* config/aarch64/aarch64-simd-builtins.def: Update builtins table.
	* config/aarch64/aarch64-simd.md (aarch64_crypto_sha256h<sha256_op>v4si,
	aarch64_crypto_sha256su0v4si, aarch64_crypto_sha256su1v4si): New.
	* config/aarch64/arm_neon.h (vsha256hq_u32, vsha256h2q_u32,
	vsha256su0q_u32, vsha256su1q_u32): New.
	* config/aarch64/iterators.md (UNSPEC_SHA256H<2>, UNSPEC_SHA256SU<01>):
	New.
	(CRYPTO_SHA256): New int iterator.
	(sha256_op): New int attribute.

testsuite/
	* gcc.target/aarch64/sha256_1.c: New.

Comments

Marcus Shawcroft Dec. 18, 2013, 6:28 p.m. UTC | #1
On 18 December 2013 15:28, Tejas Belagod <tbelagod@arm.com> wrote:

> 2013-12-18  Tejas Belagod  <tejas.belagod@arm.com>
>
>
> gcc/
>         * config/aarch64/aarch64-simd-builtins.def: Update builtins table.
>         * config/aarch64/aarch64-simd.md
> (aarch64_crypto_sha256h<sha256_op>v4si,
>         aarch64_crypto_sha256su0v4si, aarch64_crypto_sha256su1v4si): New.
>         * config/aarch64/arm_neon.h (vsha256hq_u32, vsha256h2q_u32,
>         vsha256su0q_u32, vsha256su1q_u32): New.
>         * config/aarch64/iterators.md (UNSPEC_SHA256H<2>,
> UNSPEC_SHA256SU<01>):
>         New.
>         (CRYPTO_SHA256): New int iterator.
>         (sha256_op): New int attribute.
>
> testsuite/
>         * gcc.target/aarch64/sha256_1.c: New.

OK /Marcus
diff mbox

Patch

diff --git a/gcc/config/aarch64/aarch64-simd-builtins.def b/gcc/config/aarch64/aarch64-simd-builtins.def
index 2d3ccb0..adda948 100644
--- a/gcc/config/aarch64/aarch64-simd-builtins.def
+++ b/gcc/config/aarch64/aarch64-simd-builtins.def
@@ -380,3 +380,9 @@ 
   VAR1 (TERNOPU, crypto_sha1m, 0, v4si)
   VAR1 (TERNOPU, crypto_sha1p, 0, v4si)
   VAR1 (TERNOPU, crypto_sha1su0, 0, v4si)
+
+  /* Implemented by aarch64_crypto_sha256<op><mode>.  */
+  VAR1 (TERNOPU, crypto_sha256h, 0, v4si)
+  VAR1 (TERNOPU, crypto_sha256h2, 0, v4si)
+  VAR1 (BINOPU, crypto_sha256su0, 0, v4si)
+  VAR1 (TERNOPU, crypto_sha256su1, 0, v4si)
diff --git a/gcc/config/aarch64/aarch64-simd.md b/gcc/config/aarch64/aarch64-simd.md
index 5b454ca..874d532 100644
--- a/gcc/config/aarch64/aarch64-simd.md
+++ b/gcc/config/aarch64/aarch64-simd.md
@@ -4139,3 +4139,37 @@ 
   "sha1su0\\t%0.4s, %2.4s, %3.4s"
   [(set_attr "type" "crypto_sha1_xor")]
 )
+
+;; sha256
+
+(define_insn "aarch64_crypto_sha256h<sha256_op>v4si"
+  [(set (match_operand:V4SI 0 "register_operand" "=w")
+        (unspec:V4SI [(match_operand:V4SI 1 "register_operand" "0")
+                      (match_operand:V4SI 2 "register_operand" "w")
+                      (match_operand:V4SI 3 "register_operand" "w")]
+         CRYPTO_SHA256))]
+  "TARGET_SIMD && TARGET_CRYPTO"
+  "sha256h<sha256_op>\\t%q0, %q2, %3.4s"
+  [(set_attr "type" "crypto_sha256_slow")]
+)
+
+(define_insn "aarch64_crypto_sha256su0v4si"
+  [(set (match_operand:V4SI 0 "register_operand" "=w")
+        (unspec:V4SI [(match_operand:V4SI 1 "register_operand" "0")
+                      (match_operand:V4SI 2 "register_operand" "w")]
+         UNSPEC_SHA256SU0))]
+  "TARGET_SIMD &&TARGET_CRYPTO"
+  "sha256su0\\t%0.4s, %2.4s"
+  [(set_attr "type" "crypto_sha256_fast")]
+)
+
+(define_insn "aarch64_crypto_sha256su1v4si"
+  [(set (match_operand:V4SI 0 "register_operand" "=w")
+        (unspec:V4SI [(match_operand:V4SI 1 "register_operand" "0")
+                      (match_operand:V4SI 2 "register_operand" "w")
+                      (match_operand:V4SI 3 "register_operand" "w")]
+         UNSPEC_SHA256SU1))]
+  "TARGET_SIMD &&TARGET_CRYPTO"
+  "sha256su1\\t%0.4s, %2.4s, %3.4s"
+  [(set_attr "type" "crypto_sha256_slow")]
+)
diff --git a/gcc/config/aarch64/arm_neon.h b/gcc/config/aarch64/arm_neon.h
index 5a5691d..709c6a1 100644
--- a/gcc/config/aarch64/arm_neon.h
+++ b/gcc/config/aarch64/arm_neon.h
@@ -22990,6 +22990,30 @@  vsha1su1q_u32 (uint32x4_t tw0_3, uint32x4_t w12_15)
   return __builtin_aarch64_crypto_sha1su1v4si_uuu (tw0_3, w12_15);
 }
 
+static __inline uint32x4_t
+vsha256hq_u32 (uint32x4_t hash_abcd, uint32x4_t hash_efgh, uint32x4_t wk)
+{
+  return __builtin_aarch64_crypto_sha256hv4si_uuuu (hash_abcd, hash_efgh, wk);
+}
+
+static __inline uint32x4_t
+vsha256h2q_u32 (uint32x4_t hash_efgh, uint32x4_t hash_abcd, uint32x4_t wk)
+{
+  return __builtin_aarch64_crypto_sha256h2v4si_uuuu (hash_efgh, hash_abcd, wk);
+}
+
+static __inline uint32x4_t
+vsha256su0q_u32 (uint32x4_t w0_3, uint32x4_t w4_7)
+{
+  return __builtin_aarch64_crypto_sha256su0v4si_uuu (w0_3, w4_7);
+}
+
+static __inline uint32x4_t
+vsha256su1q_u32 (uint32x4_t tw0_3, uint32x4_t w8_11, uint32x4_t w12_15)
+{
+  return __builtin_aarch64_crypto_sha256su1v4si_uuuu (tw0_3, w8_11, w12_15);
+}
+
 #endif
 
 /* vshl */
diff --git a/gcc/config/aarch64/iterators.md b/gcc/config/aarch64/iterators.md
index 12de4ac..88edddd 100644
--- a/gcc/config/aarch64/iterators.md
+++ b/gcc/config/aarch64/iterators.md
@@ -277,6 +277,10 @@ 
     UNSPEC_SHA1H        ; Used in aarch64-simd.md.
     UNSPEC_SHA1SU0      ; Used in aarch64-simd.md.
     UNSPEC_SHA1SU1      ; Used in aarch64-simd.md.
+    UNSPEC_SHA256H      ; Used in aarch64-simd.md.
+    UNSPEC_SHA256H2     ; Used in aarch64-simd.md.
+    UNSPEC_SHA256SU0    ; Used in aarch64-simd.md.
+    UNSPEC_SHA256SU1    ; Used in aarch64-simd.md.
 ])
 
 ;; -------------------------------------------------------------------
@@ -863,6 +867,8 @@ 
 
 (define_int_iterator CRYPTO_SHA1 [UNSPEC_SHA1C UNSPEC_SHA1M UNSPEC_SHA1P])
 
+(define_int_iterator CRYPTO_SHA256 [UNSPEC_SHA256H UNSPEC_SHA256H2])
+
 ;; -------------------------------------------------------------------
 ;; Int Iterators Attributes.
 ;; -------------------------------------------------------------------
@@ -985,3 +991,5 @@ 
 
 (define_int_attr sha1_op [(UNSPEC_SHA1C "c") (UNSPEC_SHA1P "p")
 			  (UNSPEC_SHA1M "m")])
+
+(define_int_attr sha256_op [(UNSPEC_SHA256H "") (UNSPEC_SHA256H2 "2")])
diff --git a/gcc/testsuite/gcc.target/aarch64/sha256_1.c b/gcc/testsuite/gcc.target/aarch64/sha256_1.c
new file mode 100644
index 0000000..569817e
--- /dev/null
+++ b/gcc/testsuite/gcc.target/aarch64/sha256_1.c
@@ -0,0 +1,40 @@ 
+
+/* { dg-do compile } */
+/* { dg-options "-march=armv8-a+crypto" } */
+
+#include "arm_neon.h"
+
+uint32x4_t
+test_vsha256hq_u32 (uint32x4_t hash_abcd, uint32x4_t hash_efgh, uint32x4_t wk)
+{
+  return vsha256hq_u32 (hash_abcd, hash_efgh, wk);
+}
+
+/* { dg-final { scan-assembler-times "sha256h\\tq" 1 } } */
+
+uint32x4_t
+test_vsha256h2q_u32 (uint32x4_t hash_efgh, uint32x4_t hash_abcd, uint32x4_t wk)
+{
+  return vsha256h2q_u32 (hash_efgh, hash_abcd, wk);
+}
+
+/* { dg-final { scan-assembler-times "sha256h2\\tq" 1 } } */
+
+uint32x4_t
+test_vsha256su0q_u32 (uint32x4_t w0_3, uint32x4_t w4_7)
+{
+  return vsha256su0q_u32 (w0_3, w4_7);
+}
+
+/* { dg-final { scan-assembler-times "sha256su0\\tv" 1 } } */
+
+uint32x4_t
+test_vsha256su1q_u32 (uint32x4_t tw0_3, uint32x4_t w8_11, uint32x4_t w12_15)
+{
+  return vsha256su1q_u32 (tw0_3, w8_11, w12_15);
+}
+
+/* { dg-final { scan-assembler-times "sha256su1\\tv" 1 } } */
+
+
+/* { dg-final { cleanup-saved-temps } } */