From patchwork Thu Aug 18 22:48:41 2011 Content-Type: text/plain; charset="utf-8" MIME-Version: 1.0 Content-Transfer-Encoding: 7bit X-Patchwork-Submitter: Richard Henderson X-Patchwork-Id: 110552 Return-Path: X-Original-To: incoming@patchwork.ozlabs.org Delivered-To: patchwork-incoming@bilbo.ozlabs.org Received: from sourceware.org (server1.sourceware.org [209.132.180.131]) by ozlabs.org (Postfix) with SMTP id 3C537B6F71 for ; Fri, 19 Aug 2011 08:49:19 +1000 (EST) Received: (qmail 16817 invoked by alias); 18 Aug 2011 22:49:16 -0000 Received: (qmail 16799 invoked by uid 22791); 18 Aug 2011 22:49:15 -0000 X-SWARE-Spam-Status: No, hits=-6.9 required=5.0 tests=AWL, BAYES_00, RCVD_IN_DNSWL_HI, RP_MATCHES_RCVD, SPF_HELO_PASS X-Spam-Check-By: sourceware.org Received: from mx1.redhat.com (HELO mx1.redhat.com) (209.132.183.28) by sourceware.org (qpsmtpd/0.43rc1) with ESMTP; Thu, 18 Aug 2011 22:48:56 +0000 Received: from int-mx10.intmail.prod.int.phx2.redhat.com (int-mx10.intmail.prod.int.phx2.redhat.com [10.5.11.23]) by mx1.redhat.com (8.14.4/8.14.4) with ESMTP id p7IMmiSi010733 (version=TLSv1/SSLv3 cipher=DHE-RSA-AES256-SHA bits=256 verify=OK); Thu, 18 Aug 2011 18:48:44 -0400 Received: from pebble.twiddle.home (vpn-229-76.phx2.redhat.com [10.3.229.76]) by int-mx10.intmail.prod.int.phx2.redhat.com (8.14.4/8.14.4) with ESMTP id p7IMmgYY005588; Thu, 18 Aug 2011 18:48:42 -0400 Message-ID: <4E4D96C9.9080103@redhat.com> Date: Thu, 18 Aug 2011 15:48:41 -0700 From: Richard Henderson User-Agent: Mozilla/5.0 (X11; U; Linux x86_64; en-US; rv:1.9.2.18) Gecko/20110621 Fedora/3.1.11-1.fc14 Thunderbird/3.1.11 MIME-Version: 1.0 To: GCC Patches CC: bernds@codesourcery.com, hp@axis.com, ebotcazou@libertysurf.fr, rdsandiford@googlemail.com, kkojima@gcc.gnu.org, nickc@redhat.com Subject: [rtl, delay-slot] Fix overload of "unchanging" bit X-IsSubscribed: yes Mailing-List: contact gcc-patches-help@gcc.gnu.org; run by ezmlm Precedence: bulk List-Id: List-Unsubscribe: List-Archive: List-Post: List-Help: Sender: gcc-patches-owner@gcc.gnu.org Delivered-To: mailing list gcc-patches@gcc.gnu.org As found by a c6x build failure, INSN_ANNULLED_BRANCH_P and RTL_CONST_CALL_P both resolve to the same bit for CALL_INSNs. I want to fix this by restricting INSN_ANNULLED_BRANCH_P to JUMP_INSNs, since annulling the slots for a call or a plain insn doesn't really make sense. The following has passed stage2-gcc on sparc64-linux host (full build still in progress), with --enable-checking=yes,rtl. It surely needs more than that, and I'm asking for help from the relevant maintainers to give this a try. r~ diff --git a/gcc/dwarf2cfi.c b/gcc/dwarf2cfi.c index 80cce32..30e9501 100644 --- a/gcc/dwarf2cfi.c +++ b/gcc/dwarf2cfi.c @@ -2427,7 +2427,7 @@ scan_trace (dw_trace_info *trace) notice_eh_throw (control); dwarf2out_flush_queued_reg_saves (); - if (INSN_ANNULLED_BRANCH_P (control)) + if (JUMP_P (control) && INSN_ANNULLED_BRANCH_P (control)) { /* ??? Hopefully multiple delay slots are not annulled. */ gcc_assert (n == 2); diff --git a/gcc/reorg.c b/gcc/reorg.c index 97d02e9..cf5e7e1 100644 --- a/gcc/reorg.c +++ b/gcc/reorg.c @@ -667,7 +667,7 @@ delete_from_delay_slot (rtx insn) annul flag. */ if (delay_list) trial = emit_delay_sequence (trial, delay_list, XVECLEN (seq, 0) - 2); - else if (INSN_P (trial)) + else if (JUMP_P (trial)) INSN_ANNULLED_BRANCH_P (trial) = 0; INSN_FROM_TARGET_P (insn) = 0; @@ -1060,13 +1060,15 @@ get_branch_condition (rtx insn, rtx target) return const_true_rtx; else if (GET_CODE (src) == IF_THEN_ELSE - && XEXP (XEXP (src, 1), 0) == target - && XEXP (src, 2) == pc_rtx) + && XEXP (src, 2) == pc_rtx + && GET_CODE (XEXP (src, 1)) == LABEL_REF + && XEXP (XEXP (src, 1), 0) == target) return XEXP (src, 0); else if (GET_CODE (src) == IF_THEN_ELSE - && XEXP (XEXP (src, 2), 0) == target - && XEXP (src, 1) == pc_rtx) + && XEXP (src, 1) == pc_rtx + && GET_CODE (XEXP (src, 2)) == LABEL_REF + && XEXP (XEXP (src, 2), 0) == target) { enum rtx_code rev; rev = reversed_comparison_code (XEXP (src, 0), insn); @@ -1433,7 +1435,7 @@ try_merge_delay_insns (rtx insn, rtx thread) { rtx trial, next_trial; rtx delay_insn = XVECEXP (PATTERN (insn), 0, 0); - int annul_p = INSN_ANNULLED_BRANCH_P (delay_insn); + int annul_p = JUMP_P (delay_insn) && INSN_ANNULLED_BRANCH_P (delay_insn); int slot_number = 1; int num_slots = XVECLEN (PATTERN (insn), 0); rtx next_to_match = XVECEXP (PATTERN (insn), 0, slot_number); @@ -1517,7 +1519,8 @@ try_merge_delay_insns (rtx insn, rtx thread) if (slot_number != num_slots && trial && NONJUMP_INSN_P (trial) && GET_CODE (PATTERN (trial)) == SEQUENCE - && ! INSN_ANNULLED_BRANCH_P (XVECEXP (PATTERN (trial), 0, 0))) + && !(JUMP_P (XVECEXP (PATTERN (trial), 0, 0)) + && INSN_ANNULLED_BRANCH_P (XVECEXP (PATTERN (trial), 0, 0)))) { rtx pat = PATTERN (trial); rtx filled_insn = XVECEXP (pat, 0, 0); @@ -1756,24 +1759,30 @@ redundant_insn (rtx insn, rtx target, rtx delay_list) if (GET_CODE (pat) == SEQUENCE) { + bool annul_p = false; + rtx control = XVECEXP (pat, 0, 0); + /* If this is a CALL_INSN and its delay slots, it is hard to track the resource needs properly, so give up. */ - if (CALL_P (XVECEXP (pat, 0, 0))) + if (CALL_P (control)) return 0; /* If this is an INSN or JUMP_INSN with delayed effects, it is hard to track the resource needs properly, so give up. */ #ifdef INSN_SETS_ARE_DELAYED - if (INSN_SETS_ARE_DELAYED (XVECEXP (pat, 0, 0))) + if (INSN_SETS_ARE_DELAYED (control)) return 0; #endif #ifdef INSN_REFERENCES_ARE_DELAYED - if (INSN_REFERENCES_ARE_DELAYED (XVECEXP (pat, 0, 0))) + if (INSN_REFERENCES_ARE_DELAYED (control)) return 0; #endif + if (JUMP_P (control)) + annul_p = INSN_ANNULLED_BRANCH_P (control); + /* See if any of the insns in the delay slot match, updating resource requirements as we go. */ for (i = XVECLEN (pat, 0) - 1; i > 0; i--) @@ -1783,8 +1792,7 @@ redundant_insn (rtx insn, rtx target, rtx delay_list) /* If an insn will be annulled if the branch is false, it isn't considered as a possible duplicate insn. */ if (rtx_equal_p (PATTERN (candidate), ipat) - && ! (INSN_ANNULLED_BRANCH_P (XVECEXP (pat, 0, 0)) - && INSN_FROM_TARGET_P (candidate))) + && ! (annul_p && INSN_FROM_TARGET_P (candidate))) { /* Show that this insn will be used in the sequel. */ INSN_FROM_TARGET_P (candidate) = 0; @@ -1793,15 +1801,14 @@ redundant_insn (rtx insn, rtx target, rtx delay_list) /* Unless this is an annulled insn from the target of a branch, we must stop if it sets anything needed or set by INSN. */ - if ((! INSN_ANNULLED_BRANCH_P (XVECEXP (pat, 0, 0)) - || ! INSN_FROM_TARGET_P (candidate)) + if ((!annul_p || !INSN_FROM_TARGET_P (candidate)) && insn_sets_resource_p (candidate, &needed, true)) return 0; } /* If the insn requiring the delay slot conflicts with INSN, we must stop. */ - if (insn_sets_resource_p (XVECEXP (pat, 0, 0), &needed, true)) + if (insn_sets_resource_p (control, &needed, true)) return 0; } else @@ -3867,7 +3874,8 @@ dbr_schedule (rtx first) { rtx target; - INSN_ANNULLED_BRANCH_P (insn) = 0; + if (JUMP_P (insn)) + INSN_ANNULLED_BRANCH_P (insn) = 0; INSN_FROM_TARGET_P (insn) = 0; /* Skip vector tables. We can't get attributes for them. */ @@ -3977,10 +3985,12 @@ dbr_schedule (rtx first) { if (GET_CODE (PATTERN (insn)) == SEQUENCE) { + rtx control; j = XVECLEN (PATTERN (insn), 0) - 1; if (j > MAX_DELAY_HISTOGRAM) j = MAX_DELAY_HISTOGRAM; - if (INSN_ANNULLED_BRANCH_P (XVECEXP (PATTERN (insn), 0, 0))) + control = XVECEXP (PATTERN (insn), 0, 0); + if (JUMP_P (control) && INSN_ANNULLED_BRANCH_P (control)) total_annul_slots[j]++; else total_delay_slots[j]++; diff --git a/gcc/resource.c b/gcc/resource.c index 8380111..a4b0bbd 100644 --- a/gcc/resource.c +++ b/gcc/resource.c @@ -171,7 +171,7 @@ next_insn_no_annul (rtx insn) { /* If INSN is an annulled branch, skip any insns from the target of the branch. */ - if (INSN_P (insn) + if (JUMP_P (insn) && INSN_ANNULLED_BRANCH_P (insn) && NEXT_INSN (PREV_INSN (insn)) != insn) { @@ -710,10 +710,18 @@ mark_set_resources (rtx x, struct resources *res, int in_dest, return; case SEQUENCE: - for (i = 0; i < XVECLEN (x, 0); i++) - if (! (INSN_ANNULLED_BRANCH_P (XVECEXP (x, 0, 0)) - && INSN_FROM_TARGET_P (XVECEXP (x, 0, i)))) - mark_set_resources (XVECEXP (x, 0, i), res, 0, mark_type); + { + rtx control = XVECEXP (x, 0, 0); + rtx annul_p = JUMP_P (control) && INSN_ANNULLED_BRANCH_P (control); + + mark_set_resources (control, res, 0, mark_type); + for (i = XVECLEN (x, 0) - 1; i >= 0; --i) + { + rtx elt = XVECEXP (x, 0, i); + if (!annul_p && INSN_FROM_TARGET_P (elt)) + mark_set_resources (elt, res, 0, mark_type); + } + } return; case POST_INC: diff --git a/gcc/rtl.h b/gcc/rtl.h index 5323dcc..e8aa7ab 100644 --- a/gcc/rtl.h +++ b/gcc/rtl.h @@ -278,7 +278,7 @@ struct GTY((chain_next ("RTX_NEXT (&%h)"), constants pool. 1 in a CALL_INSN logically equivalent to ECF_CONST and TREE_READONLY. 1 in a NOTE, or EXPR_LIST for a const call. - 1 in a JUMP_INSN, CALL_INSN, or INSN of an annulling branch. */ + 1 in a JUMP_INSN of an annulling branch. */ unsigned int unchanging : 1; /* 1 in a MEM or ASM_OPERANDS expression if the memory reference is volatile. 1 in an INSN, CALL_INSN, JUMP_INSN, CODE_LABEL, BARRIER, or NOTE @@ -834,7 +834,7 @@ extern void rtl_check_failed_flag (const char *, const_rtx, const char *, /* 1 if RTX is a jump_insn, call_insn, or insn that is an annulling branch. */ #define INSN_ANNULLED_BRANCH_P(RTX) \ - (RTL_FLAG_CHECK3("INSN_ANNULLED_BRANCH_P", (RTX), JUMP_INSN, CALL_INSN, INSN)->unchanging) + (RTL_FLAG_CHECK1("INSN_ANNULLED_BRANCH_P", (RTX), JUMP_INSN)->unchanging) /* 1 if RTX is an insn in a delay slot and is from the target of the branch. If the branch insn has INSN_ANNULLED_BRANCH_P set, this insn should only be