mirror of
https://github.com/gcc-mirror/gcc.git
synced 2026-05-06 23:25:24 +02:00
tree-optimization/119997 - &ptr->field no longer subject to PRE
The following makes PRE handle &ptr->field the same as VN by treating it as a POINTER_PLUS_EXPR when possible and thus as 'nary'. To facilitate this the patch splits out vn_pp_nary_for_addr and adds const overloads for vec::last. The patch also avoids handling an effective zero offset as POINTER_PLUS_EXPR. PR tree-optimization/119997 * vec.h (vec<T, A, vl_embed>::last): Provide const overload. (vec<T, va_heap, vl_ptr>::last): Likewise. * tree-ssa-sccvn.h (vn_pp_nary_for_addr): Declare. * tree-ssa-sccvn.cc (vn_pp_nary_for_addr): Split out from ... (vn_reference_lookup): ... here. (vn_reference_insert): ... and duplicate here. Do not handle zero offset as POINTER_PLUS_EXPR. * tree-ssa-pre.cc (compute_avail): Implement ADDR_EXPR-as-POINTER_PLUS_EXPR special casing. * gcc.dg/tree-ssa/ssa-pre-35.c: New testcase.
This commit is contained in:
committed by
Richard Biener
parent
77687bc0e1
commit
fc62834533
15
gcc/testsuite/gcc.dg/tree-ssa/ssa-pre-35.c
Normal file
15
gcc/testsuite/gcc.dg/tree-ssa/ssa-pre-35.c
Normal file
@@ -0,0 +1,15 @@
|
||||
/* { dg-do compile } */
|
||||
/* { dg-options "-O2 -fdump-tree-pre-stats" } */
|
||||
|
||||
void bar (int *);
|
||||
|
||||
struct X { int a[2]; };
|
||||
void foo (struct X *p, int b)
|
||||
{
|
||||
if (b)
|
||||
bar ((int *)p + 1);
|
||||
bar (&p->a[1]);
|
||||
}
|
||||
|
||||
/* We should PRE and hoist &p->a[1] as (int *)p + 1. */
|
||||
/* { dg-final { scan-tree-dump "HOIST inserted: 1" "pre" } } */
|
||||
@@ -4133,6 +4133,33 @@ compute_avail (function *fun)
|
||||
vec<vn_reference_op_s> operands
|
||||
= vn_reference_operands_for_lookup (rhs1);
|
||||
vn_reference_t ref;
|
||||
|
||||
/* We handle &MEM[ptr + 5].b[1].c as
|
||||
POINTER_PLUS_EXPR. */
|
||||
if (operands[0].opcode == ADDR_EXPR
|
||||
&& operands.last ().opcode == SSA_NAME)
|
||||
{
|
||||
tree ops[2];
|
||||
if (vn_pp_nary_for_addr (operands, ops))
|
||||
{
|
||||
vn_nary_op_t nary;
|
||||
vn_nary_op_lookup_pieces (2, POINTER_PLUS_EXPR,
|
||||
TREE_TYPE (rhs1), ops,
|
||||
&nary);
|
||||
operands.release ();
|
||||
if (nary && !nary->predicated_values)
|
||||
{
|
||||
unsigned value_id = nary->value_id;
|
||||
if (value_id_constant_p (value_id))
|
||||
continue;
|
||||
result = get_or_alloc_expr_for_nary
|
||||
(nary, value_id, gimple_location (stmt));
|
||||
break;
|
||||
}
|
||||
continue;
|
||||
}
|
||||
}
|
||||
|
||||
vn_reference_lookup_pieces (gimple_vuse (stmt), set,
|
||||
base_set, TREE_TYPE (rhs1),
|
||||
operands, &ref, VN_WALK);
|
||||
|
||||
@@ -3998,6 +3998,41 @@ vn_reference_lookup_pieces (tree vuse, alias_set_type set,
|
||||
return NULL_TREE;
|
||||
}
|
||||
|
||||
/* When OPERANDS is an ADDR_EXPR that can be possibly expressed as a
|
||||
POINTER_PLUS_EXPR return true and fill in its operands in OPS. */
|
||||
|
||||
bool
|
||||
vn_pp_nary_for_addr (const vec<vn_reference_op_s>& operands, tree ops[2])
|
||||
{
|
||||
gcc_assert (operands[0].opcode == ADDR_EXPR
|
||||
&& operands.last ().opcode == SSA_NAME);
|
||||
poly_int64 off = 0;
|
||||
vn_reference_op_t vro;
|
||||
unsigned i;
|
||||
for (i = 1; operands.iterate (i, &vro); ++i)
|
||||
{
|
||||
if (vro->opcode == SSA_NAME)
|
||||
break;
|
||||
else if (known_eq (vro->off, -1))
|
||||
break;
|
||||
off += vro->off;
|
||||
}
|
||||
if (i == operands.length () - 1
|
||||
&& maybe_ne (off, 0)
|
||||
/* Make sure we the offset we accumulated in a 64bit int
|
||||
fits the address computation carried out in target
|
||||
offset precision. */
|
||||
&& (off.coeffs[0]
|
||||
== sext_hwi (off.coeffs[0], TYPE_PRECISION (sizetype))))
|
||||
{
|
||||
gcc_assert (operands[i-1].opcode == MEM_REF);
|
||||
ops[0] = operands[i].op0;
|
||||
ops[1] = wide_int_to_tree (sizetype, off);
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
/* Lookup OP in the current hash table, and return the resulting value
|
||||
number if it exists in the hash table. Return NULL_TREE if it does
|
||||
not exist in the hash table or if the result field of the structure
|
||||
@@ -4034,28 +4069,9 @@ vn_reference_lookup (tree op, tree vuse, vn_lookup_kind kind,
|
||||
&& operands[0].opcode == ADDR_EXPR
|
||||
&& operands.last ().opcode == SSA_NAME)
|
||||
{
|
||||
poly_int64 off = 0;
|
||||
vn_reference_op_t vro;
|
||||
unsigned i;
|
||||
for (i = 1; operands.iterate (i, &vro); ++i)
|
||||
tree ops[2];
|
||||
if (vn_pp_nary_for_addr (operands, ops))
|
||||
{
|
||||
if (vro->opcode == SSA_NAME)
|
||||
break;
|
||||
else if (known_eq (vro->off, -1))
|
||||
break;
|
||||
off += vro->off;
|
||||
}
|
||||
if (i == operands.length () - 1
|
||||
/* Make sure we the offset we accumulated in a 64bit int
|
||||
fits the address computation carried out in target
|
||||
offset precision. */
|
||||
&& (off.coeffs[0]
|
||||
== sext_hwi (off.coeffs[0], TYPE_PRECISION (sizetype))))
|
||||
{
|
||||
gcc_assert (operands[i-1].opcode == MEM_REF);
|
||||
tree ops[2];
|
||||
ops[0] = operands[i].op0;
|
||||
ops[1] = wide_int_to_tree (sizetype, off);
|
||||
tree res = vn_nary_op_lookup_pieces (2, POINTER_PLUS_EXPR,
|
||||
TREE_TYPE (op), ops, NULL);
|
||||
if (res)
|
||||
@@ -4178,28 +4194,9 @@ vn_reference_insert (tree op, tree result, tree vuse, tree vdef)
|
||||
&& operands[0].opcode == ADDR_EXPR
|
||||
&& operands.last ().opcode == SSA_NAME)
|
||||
{
|
||||
poly_int64 off = 0;
|
||||
vn_reference_op_t vro;
|
||||
unsigned i;
|
||||
for (i = 1; operands.iterate (i, &vro); ++i)
|
||||
tree ops[2];
|
||||
if (vn_pp_nary_for_addr (operands, ops))
|
||||
{
|
||||
if (vro->opcode == SSA_NAME)
|
||||
break;
|
||||
else if (known_eq (vro->off, -1))
|
||||
break;
|
||||
off += vro->off;
|
||||
}
|
||||
if (i == operands.length () - 1
|
||||
/* Make sure we the offset we accumulated in a 64bit int
|
||||
fits the address computation carried out in target
|
||||
offset precision. */
|
||||
&& (off.coeffs[0]
|
||||
== sext_hwi (off.coeffs[0], TYPE_PRECISION (sizetype))))
|
||||
{
|
||||
gcc_assert (operands[i-1].opcode == MEM_REF);
|
||||
tree ops[2];
|
||||
ops[0] = operands[i].op0;
|
||||
ops[1] = wide_int_to_tree (sizetype, off);
|
||||
vn_nary_op_insert_pieces (2, POINTER_PLUS_EXPR,
|
||||
TREE_TYPE (op), ops, result,
|
||||
VN_INFO (result)->value_id);
|
||||
|
||||
@@ -255,6 +255,7 @@ vn_nary_op_t alloc_vn_nary_op_noinit (unsigned int, struct obstack *);
|
||||
unsigned int vn_nary_length_from_stmt (gimple *);
|
||||
void init_vn_nary_op_from_stmt (vn_nary_op_t, gassign *);
|
||||
hashval_t vn_nary_op_compute_hash (const vn_nary_op_t);
|
||||
bool vn_pp_nary_for_addr (const vec<vn_reference_op_s>&, tree[2]);
|
||||
tree vn_nary_op_lookup_stmt (gimple *, vn_nary_op_t *);
|
||||
tree vn_nary_op_lookup_pieces (unsigned int, enum tree_code,
|
||||
tree, tree *, vn_nary_op_t *);
|
||||
|
||||
11
gcc/vec.h
11
gcc/vec.h
@@ -611,6 +611,7 @@ public:
|
||||
const T *end () const { return address () + length (); }
|
||||
const T &operator[] (unsigned) const;
|
||||
T &operator[] (unsigned);
|
||||
const T &last (void) const;
|
||||
T &last (void);
|
||||
bool space (unsigned) const;
|
||||
bool iterate (unsigned, T *) const;
|
||||
@@ -914,6 +915,14 @@ vec<T, A, vl_embed>::operator[] (unsigned ix)
|
||||
|
||||
/* Get the final element of the vector, which must not be empty. */
|
||||
|
||||
template<typename T, typename A>
|
||||
inline const T &
|
||||
vec<T, A, vl_embed>::last (void) const
|
||||
{
|
||||
gcc_checking_assert (m_vecpfx.m_num > 0);
|
||||
return (*this)[m_vecpfx.m_num - 1];
|
||||
}
|
||||
|
||||
template<typename T, typename A>
|
||||
inline T &
|
||||
vec<T, A, vl_embed>::last (void)
|
||||
@@ -1588,6 +1597,8 @@ public:
|
||||
const T *end () const { return begin () + length (); }
|
||||
const T &operator[] (unsigned ix) const
|
||||
{ return (*m_vec)[ix]; }
|
||||
const T &last (void) const
|
||||
{ return m_vec->last (); }
|
||||
|
||||
bool operator!=(const vec &other) const
|
||||
{ return !(*this == other); }
|
||||
|
||||
Reference in New Issue
Block a user