@@ -1860,10 +1860,17 @@
*,*,yes")
])
+; Splitters for loading/storing TLS pointers from/to %a0:DI.
+; Do this only during split2, which runs after reload. At the point when split1
+; runs, some of %a0:DI occurrences might be nested inside other rtxes and thus
+; not matched. As a result, only some occurrences will be split, which will
+; prevent CSE. At the point when split2 runs, reload will have ensured that no
+; nested references exist.
+
(define_split
[(set (match_operand:DI 0 "register_operand" "")
(match_operand:DI 1 "register_operand" ""))]
- "TARGET_ZARCH && ACCESS_REG_P (operands[1])"
+ "TARGET_ZARCH && ACCESS_REG_P (operands[1]) && reload_completed"
[(set (match_dup 2) (match_dup 3))
(set (match_dup 0) (ashift:DI (match_dup 0) (const_int 32)))
(set (strict_low_part (match_dup 2)) (match_dup 4))]
@@ -1873,7 +1880,7 @@
(define_split
[(set (match_operand:DI 0 "register_operand" "")
(match_operand:DI 1 "register_operand" ""))]
- "TARGET_ZARCH && ACCESS_REG_P (operands[0])
+ "TARGET_ZARCH && ACCESS_REG_P (operands[0]) && reload_completed
&& dead_or_set_p (insn, operands[1])"
[(set (match_dup 3) (match_dup 2))
(set (match_dup 1) (lshiftrt:DI (match_dup 1) (const_int 32)))
@@ -1884,7 +1891,7 @@
(define_split
[(set (match_operand:DI 0 "register_operand" "")
(match_operand:DI 1 "register_operand" ""))]
- "TARGET_ZARCH && ACCESS_REG_P (operands[0])
+ "TARGET_ZARCH && ACCESS_REG_P (operands[0]) && reload_completed
&& !dead_or_set_p (insn, operands[1])"
[(set (match_dup 3) (match_dup 2))
(set (match_dup 1) (rotate:DI (match_dup 1) (const_int 32)))
new file mode 100644
@@ -0,0 +1,12 @@
+/* { dg-do compile } */
+/* { dg-options "-O3" } */
+
+__thread void *foo;
+
+void *bar()
+{
+ return (foo = __builtin_thread_pointer());
+}
+
+/* { dg-final { scan-assembler-times {\n\tear\t} 2 { target { lp64 } } } } */
+/* { dg-final { scan-assembler-times {\n\tear\t} 1 { target { ! lp64 } } } } */