From patchwork Tue Oct 26 22:35:06 2010 Content-Type: text/plain; charset="utf-8" MIME-Version: 1.0 Content-Transfer-Encoding: 7bit X-Patchwork-Submitter: Jason Merrill X-Patchwork-Id: 69301 Return-Path: X-Original-To: incoming@patchwork.ozlabs.org Delivered-To: patchwork-incoming@bilbo.ozlabs.org Received: from sourceware.org (server1.sourceware.org [209.132.180.131]) by ozlabs.org (Postfix) with SMTP id 4FD09B70D1 for ; Wed, 27 Oct 2010 09:35:19 +1100 (EST) Received: (qmail 8016 invoked by alias); 26 Oct 2010 22:35:16 -0000 Received: (qmail 7917 invoked by uid 22791); 26 Oct 2010 22:35:14 -0000 X-SWARE-Spam-Status: No, hits=-6.1 required=5.0 tests=AWL, BAYES_00, RCVD_IN_DNSWL_HI, SPF_HELO_PASS, T_RP_MATCHES_RCVD X-Spam-Check-By: sourceware.org Received: from mx1.redhat.com (HELO mx1.redhat.com) (209.132.183.28) by sourceware.org (qpsmtpd/0.43rc1) with ESMTP; Tue, 26 Oct 2010 22:35:09 +0000 Received: from int-mx02.intmail.prod.int.phx2.redhat.com (int-mx02.intmail.prod.int.phx2.redhat.com [10.5.11.12]) by mx1.redhat.com (8.13.8/8.13.8) with ESMTP id o9QMZ7Ph006737 (version=TLSv1/SSLv3 cipher=DHE-RSA-AES256-SHA bits=256 verify=OK) for ; Tue, 26 Oct 2010 18:35:07 -0400 Received: from [127.0.0.1] (ovpn01.gateway.prod.ext.phx2.redhat.com [10.5.9.1]) by int-mx02.intmail.prod.int.phx2.redhat.com (8.13.8/8.13.8) with ESMTP id o9QMZ6xo019482 for ; Tue, 26 Oct 2010 18:35:07 -0400 Message-ID: <4CC7579A.6020408@redhat.com> Date: Tue, 26 Oct 2010 18:35:06 -0400 From: Jason Merrill User-Agent: Mozilla/5.0 (X11; U; Linux i686; en-US; rv:1.9.1.14) Gecko/20101020 Lightning/1.0b1 Shredder/3.0.10pre MIME-Version: 1.0 To: gcc-patches List Subject: RFA: PATCH to make fold_indirect_ref_1 fold more things Mailing-List: contact gcc-patches-help@gcc.gnu.org; run by ezmlm Precedence: bulk List-Id: List-Unsubscribe: List-Archive: List-Post: List-Help: Sender: gcc-patches-owner@gcc.gnu.org Delivered-To: mailing list gcc-patches@gcc.gnu.org For constexpr I need to be able to fold some tree forms that fold_indirect_ref_1 didn't handle; this patch extends it to handle folding POINTER_PLUS_EXPR to an ARRAY_REF, and also folding to COMPONENT_REF. Tested x86_64-pc-linux-gnu. OK for trunk? commit c99ff581c364c5ea79d5031bcada6a09732c9129 Author: Jason Merrill Date: Tue Oct 26 14:49:37 2010 -0400 * fold-const.c (fold_indirect_ref_1): Handle folding to COMPONENT_REF, folding POINTER_PLUS_EXPR to ARRAY_REF. diff --git a/gcc/fold-const.c b/gcc/fold-const.c index decb0fb..accb35c 100644 --- a/gcc/fold-const.c +++ b/gcc/fold-const.c @@ -15647,55 +15647,85 @@ fold_indirect_ref_1 (location_t loc, tree type, tree op0) tree index = bitsize_int (0); return fold_build3_loc (loc, BIT_FIELD_REF, type, op, part_width, index); } - } - - /* ((foo*)&vectorfoo)[1] => BIT_FIELD_REF */ - if (TREE_CODE (sub) == POINTER_PLUS_EXPR - && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST) - { - tree op00 = TREE_OPERAND (sub, 0); - tree op01 = TREE_OPERAND (sub, 1); - tree op00type; - - STRIP_NOPS (op00); - op00type = TREE_TYPE (op00); - if (TREE_CODE (op00) == ADDR_EXPR - && TREE_CODE (TREE_TYPE (op00type)) == VECTOR_TYPE - && type == TREE_TYPE (TREE_TYPE (op00type))) + /* *(foo *)&struct_with_foo_field => COMPONENT_REF */ + else if (RECORD_OR_UNION_TYPE_P (optype)) { - HOST_WIDE_INT offset = tree_low_cst (op01, 0); - tree part_width = TYPE_SIZE (type); - unsigned HOST_WIDE_INT part_widthi = tree_low_cst (part_width, 0)/BITS_PER_UNIT; - unsigned HOST_WIDE_INT indexi = offset * BITS_PER_UNIT; - tree index = bitsize_int (indexi); - - if (offset/part_widthi <= TYPE_VECTOR_SUBPARTS (TREE_TYPE (op00type))) - return fold_build3_loc (loc, - BIT_FIELD_REF, type, TREE_OPERAND (op00, 0), - part_width, index); - + tree field = TYPE_FIELDS (optype); + for (; field; field = DECL_CHAIN (field)) + if (TREE_CODE (field) == FIELD_DECL + && integer_zerop (DECL_FIELD_OFFSET (field)) + && (TYPE_MAIN_VARIANT (TREE_TYPE (field)) + == TYPE_MAIN_VARIANT (type))) + return fold_build3_loc (loc, COMPONENT_REF, type, op, field, + NULL_TREE); } } - - /* ((foo*)&complexfoo)[1] => __imag__ complexfoo */ if (TREE_CODE (sub) == POINTER_PLUS_EXPR && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST) { tree op00 = TREE_OPERAND (sub, 0); tree op01 = TREE_OPERAND (sub, 1); - tree op00type; STRIP_NOPS (op00); - op00type = TREE_TYPE (op00); - if (TREE_CODE (op00) == ADDR_EXPR - && TREE_CODE (TREE_TYPE (op00type)) == COMPLEX_TYPE - && type == TREE_TYPE (TREE_TYPE (op00type))) + if (TREE_CODE (op00) == ADDR_EXPR) { - tree size = TYPE_SIZE_UNIT (type); - if (tree_int_cst_equal (size, op01)) - return fold_build1_loc (loc, IMAGPART_EXPR, type, - TREE_OPERAND (op00, 0)); + tree op00type; + op00 = TREE_OPERAND (op00, 0); + op00type = TREE_TYPE (op00); + + /* ((foo*)&vectorfoo)[1] => BIT_FIELD_REF */ + if (TREE_CODE (op00type) == VECTOR_TYPE + && type == TREE_TYPE (op00type)) + { + HOST_WIDE_INT offset = tree_low_cst (op01, 0); + tree part_width = TYPE_SIZE (type); + unsigned HOST_WIDE_INT part_widthi = tree_low_cst (part_width, 0)/BITS_PER_UNIT; + unsigned HOST_WIDE_INT indexi = offset * BITS_PER_UNIT; + tree index = bitsize_int (indexi); + + if (offset/part_widthi <= TYPE_VECTOR_SUBPARTS (op00type)) + return fold_build3_loc (loc, + BIT_FIELD_REF, type, op00, + part_width, index); + + } + /* ((foo*)&complexfoo)[1] => __imag__ complexfoo */ + else if (TREE_CODE (op00type) == COMPLEX_TYPE + && type == TREE_TYPE (op00type)) + { + tree size = TYPE_SIZE_UNIT (type); + if (tree_int_cst_equal (size, op01)) + return fold_build1_loc (loc, IMAGPART_EXPR, type, op00); + } + /* ((foo *)&fooarray)[1] => fooarray[1] */ + else if (TREE_CODE (op00type) == ARRAY_TYPE + && type == TREE_TYPE (op00type)) + { + tree type_domain = TYPE_DOMAIN (op00type); + tree min_val = size_zero_node; + if (type_domain && TYPE_MIN_VALUE (type_domain)) + min_val = TYPE_MIN_VALUE (type_domain); + op01 = size_binop_loc (loc, EXACT_DIV_EXPR, op01, + TYPE_SIZE_UNIT (type)); + op01 = size_binop_loc (loc, PLUS_EXPR, op01, min_val); + op0 = build4 (ARRAY_REF, type, op00, op01, + NULL_TREE, NULL_TREE); + SET_EXPR_LOCATION (op0, loc); + return op0; + } + /* ((foo *)&struct_with_foo_field)[1] => COMPONENT_REF */ + else if (RECORD_OR_UNION_TYPE_P (op00type)) + { + tree field = TYPE_FIELDS (op00type); + for (; field; field = DECL_CHAIN (field)) + if (TREE_CODE (field) == FIELD_DECL + && tree_int_cst_equal (byte_position (field), op01) + && (TYPE_MAIN_VARIANT (TREE_TYPE (field)) + == TYPE_MAIN_VARIANT (type))) + return fold_build3_loc (loc, COMPONENT_REF, type, op00, + field, NULL_TREE); + } } }