From patchwork Wed Oct 9 18:14:31 2013 Content-Type: text/plain; charset="utf-8" MIME-Version: 1.0 Content-Transfer-Encoding: 7bit X-Patchwork-Submitter: Andrew MacLeod X-Patchwork-Id: 281945 Return-Path: X-Original-To: incoming@patchwork.ozlabs.org Delivered-To: patchwork-incoming@bilbo.ozlabs.org Received: from sourceware.org (server1.sourceware.org [209.132.180.131]) (using TLSv1 with cipher DHE-RSA-AES256-SHA (256/256 bits)) (Client did not present a certificate) by ozlabs.org (Postfix) with ESMTPS id 1ABED2C00CA for ; Thu, 10 Oct 2013 05:14:45 +1100 (EST) DomainKey-Signature: a=rsa-sha1; c=nofws; d=gcc.gnu.org; h=list-id :list-unsubscribe:list-archive:list-post:list-help:sender :message-id:date:from:mime-version:to:subject:content-type; q= dns; s=default; b=l48JGwhx8nY/VlnQs5bVMf07Ss+LC3eDg42fm/YvCKs0rw fRzxk023YfvCfBuDE2cwlJD2ihMsafQS43hx8PloJzmu2O8ViAHewu0GJJrBSn2p hrGlSAj6+7w/NVmjzDHKcSEZj6F6cF5J9p3eR87YtrcKqqF6uGaBbG8pTox7g= DKIM-Signature: v=1; a=rsa-sha1; c=relaxed; d=gcc.gnu.org; h=list-id :list-unsubscribe:list-archive:list-post:list-help:sender :message-id:date:from:mime-version:to:subject:content-type; s= default; bh=q2f+vbw6cBzvvKMZA954UbENIg8=; b=KbYnTE/PhbPOFdhiFDtY MWLoemozCTvM5qdVRNK3vR3OVzxlefi4F/Ok3wByKhjGQQZ5KhlnTyj6AFQzT3NM KhEUEa3wFNWAzLKb4S6WmKdhAFmXsC+GZ51kMwIA4/5ebEPr+xcmFpjD4qPHWmTG m5t3fAoqtXrWTmXa0JBPIjg= Received: (qmail 7501 invoked by alias); 9 Oct 2013 18:14:36 -0000 Mailing-List: contact gcc-patches-help@gcc.gnu.org; run by ezmlm Precedence: bulk List-Id: List-Unsubscribe: List-Archive: List-Post: List-Help: Sender: gcc-patches-owner@gcc.gnu.org Delivered-To: mailing list gcc-patches@gcc.gnu.org Received: (qmail 7488 invoked by uid 89); 9 Oct 2013 18:14:36 -0000 Authentication-Results: sourceware.org; auth=none X-Virus-Found: No X-Spam-SWARE-Status: No, score=-3.9 required=5.0 tests=AWL, BAYES_00, RP_MATCHES_RCVD, SPF_HELO_PASS, SPF_PASS autolearn=ham version=3.3.2 X-HELO: mx1.redhat.com Received: from mx1.redhat.com (HELO mx1.redhat.com) (209.132.183.28) by sourceware.org (qpsmtpd/0.93/v0.84-503-g423c35a) with ESMTP; Wed, 09 Oct 2013 18:14:34 +0000 Received: from int-mx11.intmail.prod.int.phx2.redhat.com (int-mx11.intmail.prod.int.phx2.redhat.com [10.5.11.24]) by mx1.redhat.com (8.14.4/8.14.4) with ESMTP id r99IEXwq027258 (version=TLSv1/SSLv3 cipher=DHE-RSA-AES256-SHA bits=256 verify=OK); Wed, 9 Oct 2013 14:14:33 -0400 Received: from [10.10.52.145] (vpn-52-145.rdu2.redhat.com [10.10.52.145]) by int-mx11.intmail.prod.int.phx2.redhat.com (8.14.4/8.14.4) with ESMTP id r99IEVDG011198; Wed, 9 Oct 2013 14:14:32 -0400 Message-ID: <52559D07.1000901@redhat.com> Date: Wed, 09 Oct 2013 14:14:31 -0400 From: Andrew MacLeod User-Agent: Mozilla/5.0 (X11; Linux x86_64; rv:17.0) Gecko/20130805 Thunderbird/17.0.8 MIME-Version: 1.0 To: gcc-patches , Richard Biener Subject: [patch] shuffle a couple of functions. X-IsSubscribed: yes This patch simply moves std_gimplify_va_arg_expr() and the related build_va_arg_indirect_ref to gimplify.c where I think it belongs. It also moves gimple_fold_indirect_ref out of gimplfy.c and into gimple-fold.c. bootstraps on x86_64-unknown-linux-gnu... regressions running. OK? Andrew * builtins.c (std_gimplify_va_arg_expr, build_va_arg_indirect_ref): Move to gimplify.c. * gimplify.c (gimple_fold_indirect_ref): Move to gimple-fold.c. (build_va_arg_indirect_ref): Relocate and make static. (std_gimplify_va_arg_expr): Relocate here. * gimple-fold.c (gimple_fold_indirect_ref): Relocate here. * gimple-fold.h (gimple_fold_indirect_ref): Add prototype. * tree-flow.h (gimple_fold_indirect_ref): Remove prototype. *** R/builtins.c 2013-10-09 13:46:21.687171187 -0400 --- builtins.c 2013-10-09 14:03:42.717421331 -0400 *************** along with GCC; see the file COPYING3. *** 43,49 **** #include "target.h" #include "langhooks.h" #include "basic-block.h" - #include "tree-mudflap.h" #include "tree-ssa.h" #include "value-prof.h" #include "diagnostic-core.h" --- 43,48 ---- *************** along with GCC; see the file COPYING3. *** 51,59 **** #include "ubsan.h" - #ifndef PAD_VARARGS_DOWN - #define PAD_VARARGS_DOWN BYTES_BIG_ENDIAN - #endif static tree do_mpc_arg1 (tree, tree, int (*)(mpc_ptr, mpc_srcptr, mpc_rnd_t)); struct target_builtins default_target_builtins; --- 50,55 ---- *************** expand_builtin_va_start (tree exp) *** 4237,4351 **** return const0_rtx; } - /* The "standard" implementation of va_arg: read the value from the - current (padded) address and increment by the (padded) size. */ - - tree - std_gimplify_va_arg_expr (tree valist, tree type, gimple_seq *pre_p, - gimple_seq *post_p) - { - tree addr, t, type_size, rounded_size, valist_tmp; - unsigned HOST_WIDE_INT align, boundary; - bool indirect; - - #ifdef ARGS_GROW_DOWNWARD - /* All of the alignment and movement below is for args-grow-up machines. - As of 2004, there are only 3 ARGS_GROW_DOWNWARD targets, and they all - implement their own specialized gimplify_va_arg_expr routines. */ - gcc_unreachable (); - #endif - - indirect = pass_by_reference (NULL, TYPE_MODE (type), type, false); - if (indirect) - type = build_pointer_type (type); - - align = PARM_BOUNDARY / BITS_PER_UNIT; - boundary = targetm.calls.function_arg_boundary (TYPE_MODE (type), type); - - /* When we align parameter on stack for caller, if the parameter - alignment is beyond MAX_SUPPORTED_STACK_ALIGNMENT, it will be - aligned at MAX_SUPPORTED_STACK_ALIGNMENT. We will match callee - here with caller. */ - if (boundary > MAX_SUPPORTED_STACK_ALIGNMENT) - boundary = MAX_SUPPORTED_STACK_ALIGNMENT; - - boundary /= BITS_PER_UNIT; - - /* Hoist the valist value into a temporary for the moment. */ - valist_tmp = get_initialized_tmp_var (valist, pre_p, NULL); - - /* va_list pointer is aligned to PARM_BOUNDARY. If argument actually - requires greater alignment, we must perform dynamic alignment. */ - if (boundary > align - && !integer_zerop (TYPE_SIZE (type))) - { - t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist_tmp, - fold_build_pointer_plus_hwi (valist_tmp, boundary - 1)); - gimplify_and_add (t, pre_p); - - t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist_tmp, - fold_build2 (BIT_AND_EXPR, TREE_TYPE (valist), - valist_tmp, - build_int_cst (TREE_TYPE (valist), -boundary))); - gimplify_and_add (t, pre_p); - } - else - boundary = align; - - /* If the actual alignment is less than the alignment of the type, - adjust the type accordingly so that we don't assume strict alignment - when dereferencing the pointer. */ - boundary *= BITS_PER_UNIT; - if (boundary < TYPE_ALIGN (type)) - { - type = build_variant_type_copy (type); - TYPE_ALIGN (type) = boundary; - } - - /* Compute the rounded size of the type. */ - type_size = size_in_bytes (type); - rounded_size = round_up (type_size, align); - - /* Reduce rounded_size so it's sharable with the postqueue. */ - gimplify_expr (&rounded_size, pre_p, post_p, is_gimple_val, fb_rvalue); - - /* Get AP. */ - addr = valist_tmp; - if (PAD_VARARGS_DOWN && !integer_zerop (rounded_size)) - { - /* Small args are padded downward. */ - t = fold_build2_loc (input_location, GT_EXPR, sizetype, - rounded_size, size_int (align)); - t = fold_build3 (COND_EXPR, sizetype, t, size_zero_node, - size_binop (MINUS_EXPR, rounded_size, type_size)); - addr = fold_build_pointer_plus (addr, t); - } - - /* Compute new value for AP. */ - t = fold_build_pointer_plus (valist_tmp, rounded_size); - t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist, t); - gimplify_and_add (t, pre_p); - - addr = fold_convert (build_pointer_type (type), addr); - - if (indirect) - addr = build_va_arg_indirect_ref (addr); - - return build_va_arg_indirect_ref (addr); - } - - /* Build an indirect-ref expression over the given TREE, which represents a - piece of a va_arg() expansion. */ - tree - build_va_arg_indirect_ref (tree addr) - { - addr = build_simple_mem_ref_loc (EXPR_LOCATION (addr), addr); - - if (flag_mudflap) /* Don't instrument va_arg INDIRECT_REF. */ - mf_mark (addr); - - return addr; - } /* Return a dummy expression of type TYPE in order to keep going after an error. */ --- 4233,4238 ---- *** R/gimplify.c 2013-10-09 13:46:21.906421833 -0400 --- gimplify.c 2013-10-09 14:03:59.816421470 -0400 *************** along with GCC; see the file COPYING3. *** 45,50 **** --- 45,51 ---- #include "langhooks-def.h" /* FIXME: for lhd_set_decl_assembler_name */ #include "tree-pass.h" /* FIXME: only for PROP_gimple_any */ + #include "tree-mudflap.h" enum gimplify_omp_var_data { *************** gimplify_init_constructor (tree *expr_p, *** 4076,4203 **** /* Given a pointer value OP0, return a simplified version of an indirection through OP0, or NULL_TREE if no simplification is - possible. Note that the resulting type may be different from - the type pointed to in the sense that it is still compatible - from the langhooks point of view. */ - - tree - gimple_fold_indirect_ref (tree t) - { - tree ptype = TREE_TYPE (t), type = TREE_TYPE (ptype); - tree sub = t; - tree subtype; - - STRIP_NOPS (sub); - subtype = TREE_TYPE (sub); - if (!POINTER_TYPE_P (subtype)) - return NULL_TREE; - - if (TREE_CODE (sub) == ADDR_EXPR) - { - tree op = TREE_OPERAND (sub, 0); - tree optype = TREE_TYPE (op); - /* *&p => p */ - if (useless_type_conversion_p (type, optype)) - return op; - - /* *(foo *)&fooarray => fooarray[0] */ - if (TREE_CODE (optype) == ARRAY_TYPE - && TREE_CODE (TYPE_SIZE (TREE_TYPE (optype))) == INTEGER_CST - && useless_type_conversion_p (type, TREE_TYPE (optype))) - { - tree type_domain = TYPE_DOMAIN (optype); - tree min_val = size_zero_node; - if (type_domain && TYPE_MIN_VALUE (type_domain)) - min_val = TYPE_MIN_VALUE (type_domain); - if (TREE_CODE (min_val) == INTEGER_CST) - return build4 (ARRAY_REF, type, op, min_val, NULL_TREE, NULL_TREE); - } - /* *(foo *)&complexfoo => __real__ complexfoo */ - else if (TREE_CODE (optype) == COMPLEX_TYPE - && useless_type_conversion_p (type, TREE_TYPE (optype))) - return fold_build1 (REALPART_EXPR, type, op); - /* *(foo *)&vectorfoo => BIT_FIELD_REF */ - else if (TREE_CODE (optype) == VECTOR_TYPE - && useless_type_conversion_p (type, TREE_TYPE (optype))) - { - tree part_width = TYPE_SIZE (type); - tree index = bitsize_int (0); - return fold_build3 (BIT_FIELD_REF, type, op, part_width, index); - } - } - - /* *(p + CST) -> ... */ - if (TREE_CODE (sub) == POINTER_PLUS_EXPR - && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST) - { - tree addr = TREE_OPERAND (sub, 0); - tree off = TREE_OPERAND (sub, 1); - tree addrtype; - - STRIP_NOPS (addr); - addrtype = TREE_TYPE (addr); - - /* ((foo*)&vectorfoo)[1] -> BIT_FIELD_REF */ - if (TREE_CODE (addr) == ADDR_EXPR - && TREE_CODE (TREE_TYPE (addrtype)) == VECTOR_TYPE - && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (addrtype))) - && host_integerp (off, 1)) - { - unsigned HOST_WIDE_INT offset = tree_low_cst (off, 1); - tree part_width = TYPE_SIZE (type); - unsigned HOST_WIDE_INT part_widthi - = tree_low_cst (part_width, 0) / BITS_PER_UNIT; - unsigned HOST_WIDE_INT indexi = offset * BITS_PER_UNIT; - tree index = bitsize_int (indexi); - if (offset / part_widthi - <= TYPE_VECTOR_SUBPARTS (TREE_TYPE (addrtype))) - return fold_build3 (BIT_FIELD_REF, type, TREE_OPERAND (addr, 0), - part_width, index); - } - - /* ((foo*)&complexfoo)[1] -> __imag__ complexfoo */ - if (TREE_CODE (addr) == ADDR_EXPR - && TREE_CODE (TREE_TYPE (addrtype)) == COMPLEX_TYPE - && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (addrtype)))) - { - tree size = TYPE_SIZE_UNIT (type); - if (tree_int_cst_equal (size, off)) - return fold_build1 (IMAGPART_EXPR, type, TREE_OPERAND (addr, 0)); - } - - /* *(p + CST) -> MEM_REF . */ - if (TREE_CODE (addr) != ADDR_EXPR - || DECL_P (TREE_OPERAND (addr, 0))) - return fold_build2 (MEM_REF, type, - addr, - build_int_cst_wide (ptype, - TREE_INT_CST_LOW (off), - TREE_INT_CST_HIGH (off))); - } - - /* *(foo *)fooarrptr => (*fooarrptr)[0] */ - if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE - && TREE_CODE (TYPE_SIZE (TREE_TYPE (TREE_TYPE (subtype)))) == INTEGER_CST - && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (subtype)))) - { - tree type_domain; - tree min_val = size_zero_node; - tree osub = sub; - sub = gimple_fold_indirect_ref (sub); - if (! sub) - sub = build1 (INDIRECT_REF, TREE_TYPE (subtype), osub); - type_domain = TYPE_DOMAIN (TREE_TYPE (sub)); - if (type_domain && TYPE_MIN_VALUE (type_domain)) - min_val = TYPE_MIN_VALUE (type_domain); - if (TREE_CODE (min_val) == INTEGER_CST) - return build4 (ARRAY_REF, type, sub, min_val, NULL_TREE, NULL_TREE); - } - - return NULL_TREE; - } - - /* Given a pointer value OP0, return a simplified version of an - indirection through OP0, or NULL_TREE if no simplification is possible. This may only be applied to a rhs of an expression. Note that the resulting type may be different from the type pointed to in the sense that it is still compatible from the langhooks --- 4077,4082 ---- *************** force_gimple_operand_gsi (gimple_stmt_it *** 8656,8660 **** --- 8535,8652 ---- var, before, m); } + #ifndef PAD_VARARGS_DOWN + #define PAD_VARARGS_DOWN BYTES_BIG_ENDIAN + #endif + + /* Build an indirect-ref expression over the given TREE, which represents a + piece of a va_arg() expansion. */ + tree + build_va_arg_indirect_ref (tree addr) + { + addr = build_simple_mem_ref_loc (EXPR_LOCATION (addr), addr); + + if (flag_mudflap) /* Don't instrument va_arg INDIRECT_REF. */ + mf_mark (addr); + + return addr; + } + + /* The "standard" implementation of va_arg: read the value from the + current (padded) address and increment by the (padded) size. */ + + tree + std_gimplify_va_arg_expr (tree valist, tree type, gimple_seq *pre_p, + gimple_seq *post_p) + { + tree addr, t, type_size, rounded_size, valist_tmp; + unsigned HOST_WIDE_INT align, boundary; + bool indirect; + + #ifdef ARGS_GROW_DOWNWARD + /* All of the alignment and movement below is for args-grow-up machines. + As of 2004, there are only 3 ARGS_GROW_DOWNWARD targets, and they all + implement their own specialized gimplify_va_arg_expr routines. */ + gcc_unreachable (); + #endif + + indirect = pass_by_reference (NULL, TYPE_MODE (type), type, false); + if (indirect) + type = build_pointer_type (type); + + align = PARM_BOUNDARY / BITS_PER_UNIT; + boundary = targetm.calls.function_arg_boundary (TYPE_MODE (type), type); + + /* When we align parameter on stack for caller, if the parameter + alignment is beyond MAX_SUPPORTED_STACK_ALIGNMENT, it will be + aligned at MAX_SUPPORTED_STACK_ALIGNMENT. We will match callee + here with caller. */ + if (boundary > MAX_SUPPORTED_STACK_ALIGNMENT) + boundary = MAX_SUPPORTED_STACK_ALIGNMENT; + + boundary /= BITS_PER_UNIT; + + /* Hoist the valist value into a temporary for the moment. */ + valist_tmp = get_initialized_tmp_var (valist, pre_p, NULL); + + /* va_list pointer is aligned to PARM_BOUNDARY. If argument actually + requires greater alignment, we must perform dynamic alignment. */ + if (boundary > align + && !integer_zerop (TYPE_SIZE (type))) + { + t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist_tmp, + fold_build_pointer_plus_hwi (valist_tmp, boundary - 1)); + gimplify_and_add (t, pre_p); + + t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist_tmp, + fold_build2 (BIT_AND_EXPR, TREE_TYPE (valist), + valist_tmp, + build_int_cst (TREE_TYPE (valist), -boundary))); + gimplify_and_add (t, pre_p); + } + else + boundary = align; + + /* If the actual alignment is less than the alignment of the type, + adjust the type accordingly so that we don't assume strict alignment + when dereferencing the pointer. */ + boundary *= BITS_PER_UNIT; + if (boundary < TYPE_ALIGN (type)) + { + type = build_variant_type_copy (type); + TYPE_ALIGN (type) = boundary; + } + + /* Compute the rounded size of the type. */ + type_size = size_in_bytes (type); + rounded_size = round_up (type_size, align); + + /* Reduce rounded_size so it's sharable with the postqueue. */ + gimplify_expr (&rounded_size, pre_p, post_p, is_gimple_val, fb_rvalue); + + /* Get AP. */ + addr = valist_tmp; + if (PAD_VARARGS_DOWN && !integer_zerop (rounded_size)) + { + /* Small args are padded downward. */ + t = fold_build2_loc (input_location, GT_EXPR, sizetype, + rounded_size, size_int (align)); + t = fold_build3 (COND_EXPR, sizetype, t, size_zero_node, + size_binop (MINUS_EXPR, rounded_size, type_size)); + addr = fold_build_pointer_plus (addr, t); + } + + /* Compute new value for AP. */ + t = fold_build_pointer_plus (valist_tmp, rounded_size); + t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist, t); + gimplify_and_add (t, pre_p); + + addr = fold_convert (build_pointer_type (type), addr); + + if (indirect) + addr = build_va_arg_indirect_ref (addr); + + return build_va_arg_indirect_ref (addr); + } #include "gt-gimplify.h" *** R/gimple-fold.c 2013-10-09 13:46:21.903421571 -0400 --- gimple-fold.c 2013-10-09 13:46:26.516310014 -0400 *************** gimple_val_nonnegative_real_p (tree val) *** 3321,3323 **** --- 3321,3445 ---- return false; } + + /* Given a pointer value OP0, return a simplified version of an + indirection through OP0, or NULL_TREE if no simplification is + possible. Note that the resulting type may be different from + the type pointed to in the sense that it is still compatible + from the langhooks point of view. */ + + tree + gimple_fold_indirect_ref (tree t) + { + tree ptype = TREE_TYPE (t), type = TREE_TYPE (ptype); + tree sub = t; + tree subtype; + + STRIP_NOPS (sub); + subtype = TREE_TYPE (sub); + if (!POINTER_TYPE_P (subtype)) + return NULL_TREE; + + if (TREE_CODE (sub) == ADDR_EXPR) + { + tree op = TREE_OPERAND (sub, 0); + tree optype = TREE_TYPE (op); + /* *&p => p */ + if (useless_type_conversion_p (type, optype)) + return op; + + /* *(foo *)&fooarray => fooarray[0] */ + if (TREE_CODE (optype) == ARRAY_TYPE + && TREE_CODE (TYPE_SIZE (TREE_TYPE (optype))) == INTEGER_CST + && useless_type_conversion_p (type, TREE_TYPE (optype))) + { + tree type_domain = TYPE_DOMAIN (optype); + tree min_val = size_zero_node; + if (type_domain && TYPE_MIN_VALUE (type_domain)) + min_val = TYPE_MIN_VALUE (type_domain); + if (TREE_CODE (min_val) == INTEGER_CST) + return build4 (ARRAY_REF, type, op, min_val, NULL_TREE, NULL_TREE); + } + /* *(foo *)&complexfoo => __real__ complexfoo */ + else if (TREE_CODE (optype) == COMPLEX_TYPE + && useless_type_conversion_p (type, TREE_TYPE (optype))) + return fold_build1 (REALPART_EXPR, type, op); + /* *(foo *)&vectorfoo => BIT_FIELD_REF */ + else if (TREE_CODE (optype) == VECTOR_TYPE + && useless_type_conversion_p (type, TREE_TYPE (optype))) + { + tree part_width = TYPE_SIZE (type); + tree index = bitsize_int (0); + return fold_build3 (BIT_FIELD_REF, type, op, part_width, index); + } + } + + /* *(p + CST) -> ... */ + if (TREE_CODE (sub) == POINTER_PLUS_EXPR + && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST) + { + tree addr = TREE_OPERAND (sub, 0); + tree off = TREE_OPERAND (sub, 1); + tree addrtype; + + STRIP_NOPS (addr); + addrtype = TREE_TYPE (addr); + + /* ((foo*)&vectorfoo)[1] -> BIT_FIELD_REF */ + if (TREE_CODE (addr) == ADDR_EXPR + && TREE_CODE (TREE_TYPE (addrtype)) == VECTOR_TYPE + && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (addrtype))) + && host_integerp (off, 1)) + { + unsigned HOST_WIDE_INT offset = tree_low_cst (off, 1); + tree part_width = TYPE_SIZE (type); + unsigned HOST_WIDE_INT part_widthi + = tree_low_cst (part_width, 0) / BITS_PER_UNIT; + unsigned HOST_WIDE_INT indexi = offset * BITS_PER_UNIT; + tree index = bitsize_int (indexi); + if (offset / part_widthi + <= TYPE_VECTOR_SUBPARTS (TREE_TYPE (addrtype))) + return fold_build3 (BIT_FIELD_REF, type, TREE_OPERAND (addr, 0), + part_width, index); + } + + /* ((foo*)&complexfoo)[1] -> __imag__ complexfoo */ + if (TREE_CODE (addr) == ADDR_EXPR + && TREE_CODE (TREE_TYPE (addrtype)) == COMPLEX_TYPE + && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (addrtype)))) + { + tree size = TYPE_SIZE_UNIT (type); + if (tree_int_cst_equal (size, off)) + return fold_build1 (IMAGPART_EXPR, type, TREE_OPERAND (addr, 0)); + } + + /* *(p + CST) -> MEM_REF . */ + if (TREE_CODE (addr) != ADDR_EXPR + || DECL_P (TREE_OPERAND (addr, 0))) + return fold_build2 (MEM_REF, type, + addr, + build_int_cst_wide (ptype, + TREE_INT_CST_LOW (off), + TREE_INT_CST_HIGH (off))); + } + + /* *(foo *)fooarrptr => (*fooarrptr)[0] */ + if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE + && TREE_CODE (TYPE_SIZE (TREE_TYPE (TREE_TYPE (subtype)))) == INTEGER_CST + && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (subtype)))) + { + tree type_domain; + tree min_val = size_zero_node; + tree osub = sub; + sub = gimple_fold_indirect_ref (sub); + if (! sub) + sub = build1 (INDIRECT_REF, TREE_TYPE (subtype), osub); + type_domain = TYPE_DOMAIN (TREE_TYPE (sub)); + if (type_domain && TYPE_MIN_VALUE (type_domain)) + min_val = TYPE_MIN_VALUE (type_domain); + if (TREE_CODE (min_val) == INTEGER_CST) + return build4 (ARRAY_REF, type, sub, min_val, NULL_TREE, NULL_TREE); + } + + return NULL_TREE; + } *** R/gimple-fold.h 2013-10-09 13:46:21.903421571 -0400 --- gimple-fold.h 2013-10-09 13:46:26.516310014 -0400 *************** extern tree fold_const_aggregate_ref_1 ( *** 39,43 **** --- 39,44 ---- extern tree fold_const_aggregate_ref (tree); extern tree gimple_get_virt_method_for_binfo (HOST_WIDE_INT, tree); extern bool gimple_val_nonnegative_real_p (tree); + extern tree gimple_fold_indirect_ref (tree); #endif /* GCC_GIMPLE_FOLD_H */ *** R/tree-flow.h 2013-10-09 13:46:21.956422298 -0400 --- tree-flow.h 2013-10-09 13:47:42.703173508 -0400 *************** tree force_gimple_operand_gsi_1 (gimple_ *** 167,173 **** bool, enum gsi_iterator_update); tree force_gimple_operand_gsi (gimple_stmt_iterator *, tree, bool, tree, bool, enum gsi_iterator_update); - tree gimple_fold_indirect_ref (tree); - #endif /* _TREE_FLOW_H */ --- 167,171 ----