From e7150c85313fff08153197493db568ca8fe2778a Mon Sep 17 00:00:00 2001
From: mwahab <mwahab@138bc75d-0d04-0410-961f-82ee72b054a4>
Date: Mon, 29 Jun 2015 16:03:34 +0000
Subject: [PATCH 1/4] 2015-07-01 Matthew Wahab <matthew.wahab@arm.com>
Backport
PR target/65697
* config/armc/arm.c (arm_split_atomic_op): For ARMv8, replace an
initial acquire barrier with final barrier.
git-svn-id: svn+ssh://gcc.gnu.org/svn/gcc/trunk@225132 138bc75d-0d04-0410-961f-82ee72b054a4
Conflicts:
gcc/ChangeLog
Change-Id: I2074541794ecad8847ada04690cd9132a51b6404
---
gcc/config/arm/arm.c | 10 +++++++++-
1 file changed, 9 insertions(+), 1 deletion(-)
@@ -27807,6 +27807,8 @@ arm_split_atomic_op (enum rtx_code code, rtx old_out, rtx new_out, rtx mem,
rtx_code_label *label;
rtx x;
+ bool is_armv8_sync = arm_arch8 && is_mm_sync (model);
+
bool use_acquire = TARGET_HAVE_LDACQ
&& !(is_mm_relaxed (model) || is_mm_consume (model)
|| is_mm_release (model));
@@ -27815,6 +27817,11 @@ arm_split_atomic_op (enum rtx_code code, rtx old_out, rtx new_out, rtx mem,
&& !(is_mm_relaxed (model) || is_mm_consume (model)
|| is_mm_acquire (model));
+ /* For ARMv8, a load-acquire is too weak for __sync memory orders. Instead,
+ a full barrier is emitted after the store-release. */
+ if (is_armv8_sync)
+ use_acquire = false;
+
/* Checks whether a barrier is needed and emits one accordingly. */
if (!(use_acquire || use_release))
arm_pre_atomic_barrier (model);
@@ -27885,7 +27892,8 @@ arm_split_atomic_op (enum rtx_code code, rtx old_out, rtx new_out, rtx mem,
emit_unlikely_jump (gen_cbranchsi4 (x, cond, const0_rtx, label));
/* Checks whether a barrier is needed and emits one accordingly. */
- if (!(use_acquire || use_release))
+ if (is_armv8_sync
+ || !(use_acquire || use_release))
arm_post_atomic_barrier (model);
}
--
1.9.1