From patchwork Tue Jul 27 15:19:07 2010 Content-Type: text/plain; charset="utf-8" MIME-Version: 1.0 Content-Transfer-Encoding: 7bit X-Patchwork-Submitter: Bernd Schmidt X-Patchwork-Id: 60009 Return-Path: X-Original-To: incoming@patchwork.ozlabs.org Delivered-To: patchwork-incoming@bilbo.ozlabs.org Received: from sourceware.org (server1.sourceware.org [209.132.180.131]) by ozlabs.org (Postfix) with SMTP id 36011B70B1 for ; Wed, 28 Jul 2010 01:19:44 +1000 (EST) Received: (qmail 26215 invoked by alias); 27 Jul 2010 15:19:41 -0000 Received: (qmail 26188 invoked by uid 22791); 27 Jul 2010 15:19:39 -0000 X-SWARE-Spam-Status: No, hits=-1.8 required=5.0 tests=AWL, BAYES_00, T_RP_MATCHES_RCVD X-Spam-Check-By: sourceware.org Received: from mail.codesourcery.com (HELO mail.codesourcery.com) (38.113.113.100) by sourceware.org (qpsmtpd/0.43rc1) with ESMTP; Tue, 27 Jul 2010 15:19:28 +0000 Received: (qmail 4641 invoked from network); 27 Jul 2010 15:19:25 -0000 Received: from unknown (HELO ?84.152.198.157?) (bernds@127.0.0.2) by mail.codesourcery.com with ESMTPA; 27 Jul 2010 15:19:25 -0000 Message-ID: <4C4EF8EB.2030608@codesourcery.com> Date: Tue, 27 Jul 2010 17:19:07 +0200 From: Bernd Schmidt User-Agent: Mozilla/5.0 (X11; U; Linux i686; en-US; rv:1.9.2.7) Gecko/20100724 Thunderbird/3.1.1 MIME-Version: 1.0 To: GCC Patches Subject: Small improvement for new code in postreload Mailing-List: contact gcc-patches-help@gcc.gnu.org; run by ezmlm Precedence: bulk List-Id: List-Unsubscribe: List-Archive: List-Post: List-Help: Sender: gcc-patches-owner@gcc.gnu.org Delivered-To: mailing list gcc-patches@gcc.gnu.org Now that my new optimization in postreload is hopefully stable, I've committed the following tweak for it to make it handle more cases. We were being overly conservative about not substituting into an insn that sets the register, e.g. r2 = r3 + 4; r3 = [r2]; // r2 dead can be transformed into r3 = [r3 + 4]; Likewise for - adds r0, r0, #24 - adds r0, r0, #12 + adds r0, r0, #36 Bootstrapped and regression tested on i686-linux. Bernd * postreload.c (try_replace_in_use): New static function. (reload_combine_recognize_const_pattern): Use it here. Allow substituting into a final add insn, and substituting into a memory reference in an insn that sets the reg. Index: postreload.c =================================================================== --- postreload.c (revision 162569) +++ postreload.c (working copy) @@ -871,6 +871,61 @@ fixup_debug_insns (rtx reg, rtx replacem } } +/* Subroutine of reload_combine_recognize_const_pattern. Try to replace REG + with SRC in the insn described by USE, taking costs into account. Return + true if we made the replacement. */ + +static bool +try_replace_in_use (struct reg_use *use, rtx reg, rtx src) +{ + rtx use_insn = use->insn; + rtx mem = use->containing_mem; + bool speed = optimize_bb_for_speed_p (BLOCK_FOR_INSN (use_insn)); + + if (mem != NULL_RTX) + { + addr_space_t as = MEM_ADDR_SPACE (mem); + rtx oldaddr = XEXP (mem, 0); + rtx newaddr = NULL_RTX; + int old_cost = address_cost (oldaddr, GET_MODE (mem), as, speed); + int new_cost; + + newaddr = simplify_replace_rtx (oldaddr, reg, src); + if (memory_address_addr_space_p (GET_MODE (mem), newaddr, as)) + { + XEXP (mem, 0) = newaddr; + new_cost = address_cost (newaddr, GET_MODE (mem), as, speed); + XEXP (mem, 0) = oldaddr; + if (new_cost <= old_cost + && validate_change (use_insn, + &XEXP (mem, 0), newaddr, 0)) + return true; + } + } + else + { + rtx new_set = single_set (use_insn); + if (new_set + && REG_P (SET_DEST (new_set)) + && GET_CODE (SET_SRC (new_set)) == PLUS + && REG_P (XEXP (SET_SRC (new_set), 0)) + && CONSTANT_P (XEXP (SET_SRC (new_set), 1))) + { + rtx new_src; + int old_cost = rtx_cost (SET_SRC (new_set), SET, speed); + + gcc_assert (rtx_equal_p (XEXP (SET_SRC (new_set), 0), reg)); + new_src = simplify_replace_rtx (SET_SRC (new_set), reg, src); + + if (rtx_cost (new_src, SET, speed) <= old_cost + && validate_change (use_insn, &SET_SRC (new_set), + new_src, 0)) + return true; + } + } + return false; +} + /* Called by reload_combine when scanning INSN. This function tries to detect patterns where a constant is added to a register, and the result is used in an address. @@ -940,10 +995,9 @@ reload_combine_recognize_const_pattern ( if (use && GET_MODE (*use->usep) == Pmode) { + bool delete_add = false; rtx use_insn = use->insn; int use_ruid = use->ruid; - rtx mem = use->containing_mem; - bool speed = optimize_bb_for_speed_p (BLOCK_FOR_INSN (use_insn)); /* Avoid moving the add insn past a jump. */ if (must_move_add && use_ruid <= last_jump_ruid) @@ -957,81 +1011,37 @@ reload_combine_recognize_const_pattern ( gcc_assert (reg_state[regno].store_ruid <= use_ruid); /* Avoid moving a use of ADDREG past a point where it is stored. */ - if (reg_state[REGNO (addreg)].store_ruid >= use_ruid) + if (reg_state[REGNO (addreg)].store_ruid > use_ruid) break; - if (mem != NULL_RTX) + /* We also must not move the addition past an insn that sets + the same register, unless we can combine two add insns. */ + if (must_move_add && reg_state[regno].store_ruid == use_ruid) { - addr_space_t as = MEM_ADDR_SPACE (mem); - rtx oldaddr = XEXP (mem, 0); - rtx newaddr = NULL_RTX; - int old_cost = address_cost (oldaddr, GET_MODE (mem), as, speed); - int new_cost; + if (use->containing_mem == NULL_RTX) + delete_add = true; + else + break; + } - newaddr = simplify_replace_rtx (oldaddr, reg, src); - if (memory_address_addr_space_p (GET_MODE (mem), newaddr, as)) - { - XEXP (mem, 0) = newaddr; - new_cost = address_cost (newaddr, GET_MODE (mem), as, speed); - XEXP (mem, 0) = oldaddr; - if (new_cost <= old_cost - && validate_change (use_insn, - &XEXP (mem, 0), newaddr, 0)) - { - reload_combine_purge_insn_uses (use_insn); - reload_combine_note_use (&PATTERN (use_insn), use_insn, - use_ruid, NULL_RTX); + if (try_replace_in_use (use, reg, src)) + { + reload_combine_purge_insn_uses (use_insn); + reload_combine_note_use (&PATTERN (use_insn), use_insn, + use_ruid, NULL_RTX); - if (must_move_add) - { - add_moved_after_insn = use_insn; - add_moved_after_ruid = use_ruid; - } - continue; - } + if (delete_add) + { + fixup_debug_insns (reg, src, insn, use_insn); + delete_insn (insn); + return true; } - } - else - { - rtx new_set = single_set (use_insn); - if (new_set - && REG_P (SET_DEST (new_set)) - && GET_CODE (SET_SRC (new_set)) == PLUS - && REG_P (XEXP (SET_SRC (new_set), 0)) - && CONSTANT_P (XEXP (SET_SRC (new_set), 1))) + if (must_move_add) { - rtx new_src; - int old_cost = rtx_cost (SET_SRC (new_set), SET, speed); - - gcc_assert (rtx_equal_p (XEXP (SET_SRC (new_set), 0), reg)); - new_src = simplify_replace_rtx (SET_SRC (new_set), reg, src); - - if (rtx_cost (new_src, SET, speed) <= old_cost - && validate_change (use_insn, &SET_SRC (new_set), - new_src, 0)) - { - reload_combine_purge_insn_uses (use_insn); - reload_combine_note_use (&SET_SRC (new_set), use_insn, - use_ruid, NULL_RTX); - - if (must_move_add) - { - /* See if that took care of the add insn. */ - if (rtx_equal_p (SET_DEST (new_set), reg)) - { - fixup_debug_insns (reg, src, insn, use_insn); - delete_insn (insn); - return true; - } - else - { - add_moved_after_insn = use_insn; - add_moved_after_ruid = use_ruid; - } - } - continue; - } + add_moved_after_insn = use_insn; + add_moved_after_ruid = use_ruid; } + continue; } } /* If we get here, we couldn't handle this use. */