tree-optimization/100923 - re-do VN with contextual PTA info fix

The following implements the gist of the PR100923 fix in a leaner
(and more complete) way by realizing that all ao_ref_init_from_vn_reference
uses need to have an SSA name in the base valueized with availability
in mind.  Instead of re-valueizing the whole chain of operands we can
simply only and always valueize the SSA name we put in the base.

This handles also two omitted places in vn_reference_lookup_3.

	PR tree-optimization/100923
	* tree-ssa-sccvn.cc (ao_ref_init_from_vn_reference): Valueize
	base SSA_NAME.
	(vn_reference_lookup_3): Adjust vn_context_bb around calls
	to ao_ref_init_from_vn_reference.
	(vn_reference_lookup_pieces): Revert original PR100923 fix.
	(vn_reference_lookup): Likewise.
pull/81/merge
Richard Biener 2024-02-28 10:08:31 +01:00
parent 8bdfb0c2d5
commit 7c469a9fc7
1 changed files with 25 additions and 33 deletions

View File

@ -1201,11 +1201,17 @@ ao_ref_init_from_vn_reference (ao_ref *ref,
case STRING_CST:
/* This can show up in ARRAY_REF bases. */
case INTEGER_CST:
case SSA_NAME:
*op0_p = op->op0;
op0_p = NULL;
break;
case SSA_NAME:
/* SSA names we have to get at one available since it contains
flow-sensitive info. */
*op0_p = vn_valueize (op->op0);
op0_p = NULL;
break;
/* And now the usual component-reference style ops. */
case BIT_FIELD_REF:
offset += wi::to_poly_offset (op->op1);
@ -2725,7 +2731,6 @@ vn_reference_lookup_3 (ao_ref *ref, tree vuse, void *data_,
copy_reference_ops_from_ref (lhs, &lhs_ops);
valueize_refs_1 (&lhs_ops, &valueized_anything, true);
}
vn_context_bb = saved_rpo_bb;
ao_ref_init (&lhs_ref, lhs);
lhs_ref_ok = true;
if (valueized_anything
@ -2734,9 +2739,11 @@ vn_reference_lookup_3 (ao_ref *ref, tree vuse, void *data_,
ao_ref_base_alias_set (&lhs_ref), TREE_TYPE (lhs), lhs_ops)
&& !refs_may_alias_p_1 (ref, &lhs_ref, data->tbaa_p))
{
vn_context_bb = saved_rpo_bb;
*disambiguate_only = TR_VALUEIZE_AND_DISAMBIGUATE;
return NULL;
}
vn_context_bb = saved_rpo_bb;
/* When the def is a CLOBBER we can optimistically disambiguate
against it since any overlap it would be undefined behavior.
@ -3634,13 +3641,19 @@ vn_reference_lookup_3 (ao_ref *ref, tree vuse, void *data_,
/* Adjust *ref from the new operands. */
ao_ref rhs1_ref;
ao_ref_init (&rhs1_ref, rhs1);
basic_block saved_rpo_bb = vn_context_bb;
vn_context_bb = gimple_bb (def_stmt);
if (!ao_ref_init_from_vn_reference (&r,
force_no_tbaa ? 0
: ao_ref_alias_set (&rhs1_ref),
force_no_tbaa ? 0
: ao_ref_base_alias_set (&rhs1_ref),
vr->type, vr->operands))
return (void *)-1;
{
vn_context_bb = saved_rpo_bb;
return (void *)-1;
}
vn_context_bb = saved_rpo_bb;
/* This can happen with bitfields. */
if (maybe_ne (ref->size, r.size))
{
@ -3839,8 +3852,14 @@ vn_reference_lookup_3 (ao_ref *ref, tree vuse, void *data_,
return data->finish (0, 0, val);
/* Adjust *ref from the new operands. */
basic_block saved_rpo_bb = vn_context_bb;
vn_context_bb = gimple_bb (def_stmt);
if (!ao_ref_init_from_vn_reference (&r, 0, 0, vr->type, vr->operands))
return (void *)-1;
{
vn_context_bb = saved_rpo_bb;
return (void *)-1;
}
vn_context_bb = saved_rpo_bb;
/* This can happen with bitfields. */
if (maybe_ne (ref->size, r.size))
return (void *)-1;
@ -3928,31 +3947,13 @@ vn_reference_lookup_pieces (tree vuse, alias_set_type set,
unsigned limit = param_sccvn_max_alias_queries_per_access;
vn_walk_cb_data data (&vr1, NULL_TREE, NULL, kind, true, NULL_TREE,
false);
vec<vn_reference_op_s> ops_for_ref;
if (!valueized_p)
ops_for_ref = vr1.operands;
else
{
/* For ao_ref_from_mem we have to ensure only available SSA names
end up in base and the only convenient way to make this work
for PRE is to re-valueize with that in mind. */
ops_for_ref.create (operands.length ());
ops_for_ref.quick_grow (operands.length ());
memcpy (ops_for_ref.address (),
operands.address (),
sizeof (vn_reference_op_s)
* operands.length ());
valueize_refs_1 (&ops_for_ref, &valueized_p, true);
}
if (ao_ref_init_from_vn_reference (&r, set, base_set, type,
ops_for_ref))
vr1.operands))
*vnresult
= ((vn_reference_t)
walk_non_aliased_vuses (&r, vr1.vuse, true, vn_reference_lookup_2,
vn_reference_lookup_3, vuse_valueize,
limit, &data));
if (ops_for_ref != shared_lookup_references)
ops_for_ref.release ();
gcc_checking_assert (vr1.operands == shared_lookup_references);
if (*vnresult
&& data.same_val
@ -4053,18 +4054,9 @@ vn_reference_lookup (tree op, tree vuse, vn_lookup_kind kind,
vn_reference_t wvnresult;
ao_ref r;
unsigned limit = param_sccvn_max_alias_queries_per_access;
auto_vec<vn_reference_op_s> ops_for_ref;
if (valueized_anything)
{
copy_reference_ops_from_ref (op, &ops_for_ref);
bool tem;
valueize_refs_1 (&ops_for_ref, &tem, true);
}
/* Make sure to use a valueized reference if we valueized anything.
Otherwise preserve the full reference for advanced TBAA. */
if (!valueized_anything
|| !ao_ref_init_from_vn_reference (&r, vr1.set, vr1.base_set,
vr1.type, ops_for_ref))
vr1.type, vr1.operands))
{
ao_ref_init (&r, op);
/* Record the extra info we're getting from the full ref. */