I am testing the following patch to add a location argument to generic_simplify and properly build GENERIC trees with a location (oops). The patch also implements special handling for NON_LVALUE_EXPR by dropping it for GIMPLE and using non_lvalue_loc to build it for GENERIC. This simplifies implementing patterns from fold-const.c.
The patch also folds match-constant-folding.pd (part of the next merge) into match.pd and removes the patterns implemented by that from fold_binary. Bootstrap and regtest running on x86_64-unknown-linux-gnu. Richard. 2014-10-22 Richard Biener <rguent...@suse.de> * fold-const.c (fold_unary_loc): Adjust for generic_simplify prototype change. (fold_binary_loc): Likewise. (fold_ternary_loc): Likewise. (fold_binary_loc): Remove patterns implemented by match-constant-folding.pd. * match.pd: Fold in match-constant-folding.pd and adjust it for the non_lvalue changes and make it match fold-const.c * match-constant-folding.pd: Remove. more closely. * genmatch.c (expr::gen_transform): Use fold_buildN_loc and build_call_expr_loc. (dt_simplify::gen): Drop non_lvalue for GIMPLE, use non_lvalue_loc to build it for GENERIC. (decision_tree::gen_generic): Add location argument to generic_simplify prototype. Index: gcc/fold-const.c =================================================================== --- gcc/fold-const.c (revision 216542) +++ gcc/fold-const.c (working copy) @@ -7564,8 +7564,8 @@ fold_unary_loc (location_t loc, enum tre gcc_assert (IS_EXPR_CODE_CLASS (kind) && TREE_CODE_LENGTH (code) == 1); - extern tree generic_simplify (enum tree_code, tree, tree); - tem = generic_simplify (code, type, op0); + extern tree generic_simplify (location_t, enum tree_code, tree, tree); + tem = generic_simplify (loc, code, type, op0); if (tem) return tem; @@ -9918,8 +9918,8 @@ fold_binary_loc (location_t loc, && tree_swap_operands_p (arg0, arg1, true)) return fold_build2_loc (loc, swap_tree_comparison (code), type, op1, op0); - extern tree generic_simplify (enum tree_code, tree, tree, tree); - tem = generic_simplify (code, type, op0, op1); + extern tree generic_simplify (location_t, enum tree_code, tree, tree, tree); + tem = generic_simplify (loc, code, type, op0, op1); if (tem) return tem; @@ -10045,10 +10045,6 @@ fold_binary_loc (location_t loc, if (integer_zerop (arg0)) return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1)); - /* PTR +p 0 -> PTR */ - if (integer_zerop (arg1)) - return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0)); - /* INT +p INT -> (PTR)(INT + INT). Stripping types allows for this. */ if (INTEGRAL_TYPE_P (TREE_TYPE (arg1)) && INTEGRAL_TYPE_P (TREE_TYPE (arg0))) @@ -10169,9 +10165,6 @@ fold_binary_loc (location_t loc, if (! FLOAT_TYPE_P (type)) { - if (integer_zerop (arg1)) - return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0)); - /* If we are adding two BIT_AND_EXPR's, both of which are and'ing with a constant, and the two constants have no bits in common, we should treat this as a BIT_IOR_EXPR since this may produce more @@ -10657,8 +10650,6 @@ fold_binary_loc (location_t loc, { if (integer_zerop (arg0)) return negate_expr (fold_convert_loc (loc, type, arg1)); - if (integer_zerop (arg1)) - return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0)); /* Fold A - (A & B) into ~B & A. */ if (!TREE_SIDE_EFFECTS (arg0) @@ -10751,16 +10742,6 @@ fold_binary_loc (location_t loc, } } - /* Fold &x - &x. This can happen from &x.foo - &x. - This is unsafe for certain floats even in non-IEEE formats. - In IEEE, it is unsafe because it does wrong for NaNs. - Also note that operand_equal_p is always false if an operand - is volatile. */ - - if ((!FLOAT_TYPE_P (type) || !HONOR_NANS (TYPE_MODE (type))) - && operand_equal_p (arg0, arg1, 0)) - return build_zero_cst (type); - /* A - B -> A + (-B) if B is easily negatable. */ if (negate_expr_p (arg1) && ((FLOAT_TYPE_P (type) @@ -10838,10 +10819,6 @@ fold_binary_loc (location_t loc, if (! FLOAT_TYPE_P (type)) { - if (integer_zerop (arg1)) - return omit_one_operand_loc (loc, type, arg1, arg0); - if (integer_onep (arg1)) - return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0)); /* Transform x * -1 into -x. Make sure to do the negation on the original operand with conversions not stripped because we can only strip non-sign-changing conversions. */ @@ -11138,10 +11115,6 @@ fold_binary_loc (location_t loc, case BIT_IOR_EXPR: bit_ior: - if (integer_all_onesp (arg1)) - return omit_one_operand_loc (loc, type, arg1, arg0); - if (integer_zerop (arg1)) - return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0)); if (operand_equal_p (arg0, arg1, 0)) return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0)); @@ -11280,12 +11253,8 @@ fold_binary_loc (location_t loc, goto bit_rotate; case BIT_XOR_EXPR: - if (integer_zerop (arg1)) - return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0)); if (integer_all_onesp (arg1)) return fold_build1_loc (loc, BIT_NOT_EXPR, type, op0); - if (operand_equal_p (arg0, arg1, 0)) - return omit_one_operand_loc (loc, type, integer_zero_node, arg0); /* ~X ^ X is -1. */ if (TREE_CODE (arg0) == BIT_NOT_EXPR @@ -11443,8 +11412,6 @@ fold_binary_loc (location_t loc, case BIT_AND_EXPR: if (integer_all_onesp (arg1)) return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0)); - if (integer_zerop (arg1)) - return omit_one_operand_loc (loc, type, arg1, arg0); if (operand_equal_p (arg0, arg1, 0)) return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0)); @@ -12195,8 +12162,6 @@ fold_binary_loc (location_t loc, case ROUND_DIV_EXPR: case CEIL_DIV_EXPR: case EXACT_DIV_EXPR: - if (integer_onep (arg1)) - return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0)); if (integer_zerop (arg1)) return NULL_TREE; /* X / -1 is -X. */ @@ -12266,21 +12231,6 @@ fold_binary_loc (location_t loc, case FLOOR_MOD_EXPR: case ROUND_MOD_EXPR: case TRUNC_MOD_EXPR: - /* X % 1 is always zero, but be sure to preserve any side - effects in X. */ - if (integer_onep (arg1)) - return omit_one_operand_loc (loc, type, integer_zero_node, arg0); - - /* X % 0, return X % 0 unchanged so that we can get the - proper warnings and errors. */ - if (integer_zerop (arg1)) - return NULL_TREE; - - /* 0 % X is always zero, but be sure to preserve any side - effects in X. Place this after checking for X == 0. */ - if (integer_zerop (arg0)) - return omit_one_operand_loc (loc, type, integer_zero_node, arg1); - /* X % -1 is zero. */ if (!TYPE_UNSIGNED (type) && TREE_CODE (arg1) == INTEGER_CST @@ -13813,8 +13763,9 @@ fold_ternary_loc (location_t loc, enum t && tree_swap_operands_p (op0, op1, true)) return fold_build3_loc (loc, code, type, op1, op0, op2); - extern tree generic_simplify (enum tree_code, tree, tree, tree, tree); - tem = generic_simplify (code, type, op0, op1, op2); + extern tree generic_simplify (location_t, enum tree_code, + tree, tree, tree, tree); + tem = generic_simplify (loc, code, type, op0, op1, op2); if (tem) return tem; Index: gcc/genmatch.c =================================================================== --- gcc/genmatch.c (revision 216546) +++ gcc/genmatch.c (working copy) @@ -1374,10 +1374,11 @@ expr::gen_transform (FILE *f, const char else { if (operation->kind == id_base::CODE) - fprintf (f, " res = fold_build%d (%s, %s", + fprintf (f, " res = fold_build%d_loc (loc, %s, %s", ops.length(), operation->id, type); else - fprintf (f, " res = build_call_expr (builtin_decl_implicit (%s), %d", + fprintf (f, " res = build_call_expr_loc (loc, " + "builtin_decl_implicit (%s), %d", operation->id, ops.length()); for (unsigned i = 0; i < ops.length (); ++i) fprintf (f, ", ops%d[%u]", depth, i); @@ -1933,16 +1934,22 @@ dt_simplify::gen (FILE *f, bool gimple) output_line_directive (f, s->result_location, true); fprintf (f, ", %%s:%%d\\n\", __FILE__, __LINE__);\n"); - if (!s->result) + operand *result = s->result; + if (!result) { /* If there is no result then this is a predicate implementation. */ fprintf (f, "return true;\n"); } else if (gimple) { - if (s->result->type == operand::OP_EXPR) + /* For GIMPLE simply drop NON_LVALUE_EXPR (which only appears + in outermost position). */ + if (result->type == operand::OP_EXPR + && *as_a <expr *> (result)->operation == NON_LVALUE_EXPR) + result = as_a <expr *> (result)->ops[0]; + if (result->type == operand::OP_EXPR) { - expr *e = as_a <expr *> (s->result); + expr *e = as_a <expr *> (result); bool is_predicate = is_a <predicate_id *> (e->operation); if (!is_predicate) fprintf (f, "*res_code = %s;\n", e->operation->id); @@ -1964,10 +1971,10 @@ dt_simplify::gen (FILE *f, bool gimple) fprintf (f, "gimple_resimplify%d (seq, res_code, type, " "res_ops, valueize);\n", e->ops.length ()); } - else if (s->result->type == operand::OP_CAPTURE - || s->result->type == operand::OP_C_EXPR) + else if (result->type == operand::OP_CAPTURE + || result->type == operand::OP_C_EXPR) { - s->result->gen_transform (f, "res_ops[0]", true, 1, "type", indexes); + result->gen_transform (f, "res_ops[0]", true, 1, "type", indexes); fprintf (f, "*res_code = TREE_CODE (res_ops[0]);\n"); } else @@ -1976,9 +1983,9 @@ dt_simplify::gen (FILE *f, bool gimple) } else /* GENERIC */ { - if (s->result->type == operand::OP_EXPR) + if (result->type == operand::OP_EXPR) { - expr *e = as_a <expr *> (s->result); + expr *e = as_a <expr *> (result); bool is_predicate = is_a <predicate_id *> (e->operation); for (unsigned j = 0; j < e->ops.length (); ++j) { @@ -2001,21 +2008,28 @@ dt_simplify::gen (FILE *f, bool gimple) fprintf (f, "return true;\n"); else { - /* Re-fold the toplevel result. */ - if (e->operation->kind == id_base::CODE) - fprintf (f, " return fold_build%d (%s, type", - e->ops.length (), e->operation->id); + /* Re-fold the toplevel result. Use non_lvalue to + build NON_LVALUE_EXPRs so they get properly + ignored when in GIMPLE form. */ + if (*e->operation == NON_LVALUE_EXPR) + fprintf (f, " return non_lvalue_loc (loc, res_op0);\n"); else - fprintf (f, " return build_call_expr " - "(builtin_decl_implicit (%s), %d", - e->operation->id, e->ops.length()); - for (unsigned j = 0; j < e->ops.length (); ++j) - fprintf (f, ", res_op%d", j); - fprintf (f, ");\n"); + { + if (e->operation->kind == id_base::CODE) + fprintf (f, " return fold_build%d_loc (loc, %s, type", + e->ops.length (), e->operation->id); + else + fprintf (f, " return build_call_expr_loc " + "(loc, builtin_decl_implicit (%s), %d", + e->operation->id, e->ops.length()); + for (unsigned j = 0; j < e->ops.length (); ++j) + fprintf (f, ", res_op%d", j); + fprintf (f, ");\n"); + } } } - else if (s->result->type == operand::OP_CAPTURE - || s->result->type == operand::OP_C_EXPR) + else if (result->type == operand::OP_CAPTURE + || result->type == operand::OP_C_EXPR) { fprintf (f, " tree res;\n"); s->result->gen_transform (f, " res", false, 1, "type", indexes); @@ -2086,7 +2100,7 @@ decision_tree::gen_generic (FILE *f) for (unsigned n = 1; n <= 3; ++n) { fprintf (f, "\ntree\n" - "generic_simplify (enum tree_code code, " + "generic_simplify (location_t loc, enum tree_code code, " "tree type ATTRIBUTE_UNUSED"); for (unsigned i = 0; i < n; ++i) fprintf (f, ", tree op%d", i); Index: gcc/match-constant-folding.pd =================================================================== --- gcc/match-constant-folding.pd (revision 216546) +++ gcc/match-constant-folding.pd (working copy) @@ -1,73 +0,0 @@ -/* match-and-simplify patterns for simple constant foldings to substitute gimple_fold_stmt_to_constant_2 - Copyright (C) 2014 Free Software Foundation, Inc. - -This file is part of GCC. - -GCC is free software; you can redistribute it and/or modify it under -the terms of the GNU General Public License as published by the Free -Software Foundation; either version 3, or (at your option) any later -version. - -GCC is distributed in the hope that it will be useful, but WITHOUT ANY -WARRANTY; without even the implied warranty of MERCHANTABILITY or -FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License -for more details. - -You should have received a copy of the GNU General Public License -along with GCC; see the file COPYING3. If not see -<http://www.gnu.org/licenses/>. */ - -(for op (plus pointer_plus minus bit_ior bit_xor) - (simplify - (op @0 integer_zerop) - (if (GENERIC && !in_gimple_form) - /* ??? fold_binary adds non_lvalue here and "fixes" the C++ - run of Wsizeof-pointer-memaccess1.c, preserving enough of - sizeof (&a) + 0 because sizeof (&a) is maybe_lvalue_p () - for no good reason. The C frontend is fine as it doesn't - fold too early. */ - (non_lvalue @0)) - @0)) - -(simplify - (minus @0 @0) - (if (!HONOR_NANS (TYPE_MODE (type))) - { build_zero_cst (type); })) - -(simplify - (mult @0 integer_zerop@1) - @1) - -/* Make sure to preserve divisions by zero. This is the reason why - we don't simplify x / x to 1 or 0 / x to 0. */ -(for op (mult trunc_div ceil_div floor_div round_div) - (simplify - (op @0 integer_onep) - @0)) - -(simplify - (trunc_mod @0 integer_onep) - { build_zero_cst (type); }) -/* Same applies to modulo operations, but fold is inconsistent here - and simplifies 0 % x to 0. */ -(simplify - (trunc_mod integer_zerop@0 @1) - (if (!integer_zerop (@1)) - @0)) - -/* x | ~0 -> ~0 */ -(simplify - (bit_ior @0 integer_all_onesp@1) - @1) - -/* x & 0 -> 0 */ -(simplify - (bit_and @0 integer_zerop@1) - @1) - -/* x ^ x -> 0 */ -(simplify - (bit_xor @0 @0) - { build_zero_cst (type); }) - - Index: gcc/match.pd =================================================================== --- gcc/match.pd (revision 216546) +++ gcc/match.pd (working copy) @@ -30,6 +30,68 @@ (define_predicates CONSTANT_CLASS_P) +/* Simplifications of operations with one constant operand and + simplifications to constants. */ + +(for op (plus pointer_plus minus bit_ior bit_xor) + (simplify + (op @0 integer_zerop) + (non_lvalue @0))) + +/* Simplify x - x. + This is unsafe for certain floats even in non-IEEE formats. + In IEEE, it is unsafe because it does wrong for NaNs. + Also note that operand_equal_p is always false if an operand + is volatile. */ +(simplify + (minus @0 @0) + (if (!FLOAT_TYPE_P (type) || !HONOR_NANS (TYPE_MODE (type))) + { build_zero_cst (type); })) + +(simplify + (mult @0 integer_zerop@1) + @1) + +/* Make sure to preserve divisions by zero. This is the reason why + we don't simplify x / x to 1 or 0 / x to 0. */ +(for op (mult trunc_div ceil_div floor_div round_div exact_div) + (simplify + (op @0 integer_onep) + (non_lvalue @0))) + +/* Same applies to modulo operations, but fold is inconsistent here + and simplifies 0 % x to 0, only preserving literal 0 % 0. */ +(for op (ceil_mod floor_mod round_mod trunc_mod) + /* 0 % X is always zero. */ + (simplify + (trunc_mod integer_zerop@0 @1) + /* But not for 0 % 0 so that we can get the proper warnings and errors. */ + (if (!integer_zerop (@1)) + @0)) + /* X % 1 is always zero. */ + (simplify + (trunc_mod @0 integer_onep) + { build_zero_cst (type); })) + +/* x | ~0 -> ~0 */ +(simplify + (bit_ior @0 integer_all_onesp@1) + @1) + +/* x & 0 -> 0 */ +(simplify + (bit_and @0 integer_zerop@1) + @1) + +/* x ^ x -> 0 */ +(simplify + (bit_xor @0 @0) + { build_zero_cst (type); }) + + + + + /* Simple example for a user-defined predicate - modeled after fold-const.c:negate_expr_p. */ (match negate_expr_p @@ -161,7 +223,6 @@ (define_predicates #include "match-bitwise.pd" #include "match-rotate.pd" #include "match-builtin.pd" -#include "match-constant-folding.pd" #include "match-comparison.pd" #include "match-conversions.pd"