The following fixes the PRs by not storing expressions inside the reference ops for ARRAY_REFs for their element size but their original operand 3. This requires keeping track of the element alignment and avoids any issues with not folding * /[ex] chains. It also enables "real" VN of those ARRAY_REFs where previously things like valueization wouldn't have worked on expression ops.
Bootstrapped on x86_64-unknown-linux-gnu, testing in progress. Richard. 2016-07-18 Richard Biener <rguent...@suse.de> PR tree-optimization/71901 * tree-ssa-sccvn.h (struct vn_reference_op_struct): Add align member, group stuff with the bitfield. (vn_ref_op_align_unit): New inline. * tree-ssa-sccvn.c (copy_reference_ops_from_ref): For ARRAY_REFs record element alignment and operand 3 unchanged. (ao_ref_init_from_vn_reference): Adjust. (valueize_refs_1): Likewise. * tree-ssa-pre.c (create_component_ref_by_pieces_1): Likewise. * gcc.dg/torture/pr71901.c: New testcase. Index: gcc/tree-ssa-sccvn.h =================================================================== *** gcc/tree-ssa-sccvn.h (revision 238426) --- gcc/tree-ssa-sccvn.h (working copy) *************** typedef const struct vn_phi_s *const_vn_ *** 81,102 **** typedef struct vn_reference_op_struct { ENUM_BITFIELD(tree_code) opcode : 16; - /* 1 for instrumented calls. */ - unsigned with_bounds : 1; /* Dependence info, used for [TARGET_]MEM_REF only. */ unsigned short clique; unsigned short base; /* Constant offset this op adds or -1 if it is variable. */ HOST_WIDE_INT off; tree type; tree op0; tree op1; tree op2; - bool reverse; } vn_reference_op_s; typedef vn_reference_op_s *vn_reference_op_t; typedef const vn_reference_op_s *const_vn_reference_op_t; /* A reference operation in the hashtable is representation as the vuse, representing the memory state at the time of --- 81,109 ---- typedef struct vn_reference_op_struct { ENUM_BITFIELD(tree_code) opcode : 16; /* Dependence info, used for [TARGET_]MEM_REF only. */ unsigned short clique; unsigned short base; + /* 1 for instrumented calls. */ + unsigned with_bounds : 1; + unsigned reverse : 1; + /* For storing TYPE_ALIGN for array ref element size computation. */ + unsigned align : 6; /* Constant offset this op adds or -1 if it is variable. */ HOST_WIDE_INT off; tree type; tree op0; tree op1; tree op2; } vn_reference_op_s; typedef vn_reference_op_s *vn_reference_op_t; typedef const vn_reference_op_s *const_vn_reference_op_t; + inline unsigned + vn_ref_op_align_unit (vn_reference_op_t op) + { + return op->align ? ((unsigned)1 << (op->align - 1)) / BITS_PER_UNIT : 0; + } /* A reference operation in the hashtable is representation as the vuse, representing the memory state at the time of Index: gcc/tree-ssa-pre.c =================================================================== *** gcc/tree-ssa-pre.c (revision 238426) --- gcc/tree-ssa-pre.c (working copy) *************** create_component_ref_by_pieces_1 (basic_ *** 2570,2584 **** here as the element alignment may be not visible. See PR43783. Simply drop the element size for constant sizes. */ ! if (tree_int_cst_equal (genop3, TYPE_SIZE_UNIT (elmt_type))) genop3 = NULL_TREE; else { - genop3 = size_binop (EXACT_DIV_EXPR, genop3, - size_int (TYPE_ALIGN_UNIT (elmt_type))); - /* We may have a useless conversion added by - array_ref_element_size via copy_reference_opts_from_ref. */ - STRIP_USELESS_TYPE_CONVERSION (genop3); genop3 = find_or_generate_expression (block, genop3, stmts); if (!genop3) return NULL_TREE; --- 2581,2593 ---- here as the element alignment may be not visible. See PR43783. Simply drop the element size for constant sizes. */ ! if (TREE_CODE (genop3) == INTEGER_CST ! && wi::eq_p (wi::to_offset (TYPE_SIZE_UNIT (elmt_type)), ! (wi::to_offset (genop3) ! * vn_ref_op_align_unit (currop)))) genop3 = NULL_TREE; else { genop3 = find_or_generate_expression (block, genop3, stmts); if (!genop3) return NULL_TREE; Index: gcc/tree-ssa-sccvn.c =================================================================== *** gcc/tree-ssa-sccvn.c (revision 238426) --- gcc/tree-ssa-sccvn.c (working copy) *************** copy_reference_ops_from_ref (tree ref, v *** 805,828 **** break; case ARRAY_RANGE_REF: case ARRAY_REF: ! /* Record index as operand. */ ! temp.op0 = TREE_OPERAND (ref, 1); ! /* Always record lower bounds and element size. */ ! temp.op1 = array_ref_low_bound (ref); ! temp.op2 = array_ref_element_size (ref); ! /* array_ref_element_size forces the result to sizetype ! even if that is the same as bitsizetype. */ ! STRIP_USELESS_TYPE_CONVERSION (temp.op2); ! if (TREE_CODE (temp.op0) == INTEGER_CST ! && TREE_CODE (temp.op1) == INTEGER_CST ! && TREE_CODE (temp.op2) == INTEGER_CST) ! { ! offset_int off = ((wi::to_offset (temp.op0) ! - wi::to_offset (temp.op1)) ! * wi::to_offset (temp.op2)); ! if (wi::fits_shwi_p (off)) ! temp.off = off.to_shwi(); ! } break; case VAR_DECL: if (DECL_HARD_REGISTER (ref)) --- 805,834 ---- break; case ARRAY_RANGE_REF: case ARRAY_REF: ! { ! tree eltype = TREE_TYPE (TREE_TYPE (TREE_OPERAND (ref, 0))); ! /* Record index as operand. */ ! temp.op0 = TREE_OPERAND (ref, 1); ! /* Always record lower bounds and element size. */ ! temp.op1 = array_ref_low_bound (ref); ! /* But record element size in units of the type alignment. */ ! temp.op2 = TREE_OPERAND (ref, 3); ! temp.align = eltype->type_common.align; ! if (! temp.op2) ! temp.op2 = size_binop (EXACT_DIV_EXPR, TYPE_SIZE_UNIT (eltype), ! size_int (TYPE_ALIGN_UNIT (eltype))); ! if (TREE_CODE (temp.op0) == INTEGER_CST ! && TREE_CODE (temp.op1) == INTEGER_CST ! && TREE_CODE (temp.op2) == INTEGER_CST) ! { ! offset_int off = ((wi::to_offset (temp.op0) ! - wi::to_offset (temp.op1)) ! * wi::to_offset (temp.op2) ! * vn_ref_op_align_unit (&temp)); ! if (wi::fits_shwi_p (off)) ! temp.off = off.to_shwi(); ! } ! } break; case VAR_DECL: if (DECL_HARD_REGISTER (ref)) *************** ao_ref_init_from_vn_reference (ao_ref *r *** 1021,1027 **** offset_int woffset = wi::sext (wi::to_offset (op->op0) - wi::to_offset (op->op1), TYPE_PRECISION (TREE_TYPE (op->op0))); ! woffset *= wi::to_offset (op->op2); woffset <<= LOG2_BITS_PER_UNIT; offset += woffset; } --- 1027,1033 ---- offset_int woffset = wi::sext (wi::to_offset (op->op0) - wi::to_offset (op->op1), TYPE_PRECISION (TREE_TYPE (op->op0))); ! woffset *= wi::to_offset (op->op2) * vn_ref_op_align_unit (op); woffset <<= LOG2_BITS_PER_UNIT; offset += woffset; } *************** valueize_refs_1 (vec<vn_reference_op_s> *** 1471,1477 **** { offset_int off = ((wi::to_offset (vro->op0) - wi::to_offset (vro->op1)) ! * wi::to_offset (vro->op2)); if (wi::fits_shwi_p (off)) vro->off = off.to_shwi (); } --- 1477,1484 ---- { offset_int off = ((wi::to_offset (vro->op0) - wi::to_offset (vro->op1)) ! * wi::to_offset (vro->op2) ! * vn_ref_op_align_unit (vro)); if (wi::fits_shwi_p (off)) vro->off = off.to_shwi (); } Index: gcc/testsuite/gcc.dg/torture/pr71901.c =================================================================== *** gcc/testsuite/gcc.dg/torture/pr71901.c (revision 0) --- gcc/testsuite/gcc.dg/torture/pr71901.c (working copy) *************** *** 0 **** --- 1,14 ---- + /* { dg-do compile } */ + + typedef struct { int _mp_size; } mpz_t[1]; + int a, b; + void fn1() + { + mpz_t c[1][b]; + for (;;) { + int d = 0 >= 0 ? 0 == 0 ? c[0][0]->_mp_size ? -1 : 0 : 0 : 0, + e = 0 >= 0 ? 0 == 0 ? c[1][1]->_mp_size ? -1 : 0 : 0 : 0; + if (d != e) + a++; + } + }