mirror of git://gcc.gnu.org/git/gcc.git
[26/46] Make more use of dyn_cast in tree-vect*
If we use stmt_vec_infos to represent statements in the vectoriser, it's then more natural to use dyn_cast when processing the statement as an assignment, call, etc. This patch does that in a few more places. 2018-07-31 Richard Sandiford <richard.sandiford@arm.com> gcc/ * tree-vect-data-refs.c (vect_check_gather_scatter): Pass the gcall rather than the generic gimple stmt to gimple_call_internal_fn. (vect_get_smallest_scalar_type, can_group_stmts_p): Use dyn_cast to get gassigns and gcalls, rather than operating on generc gimple stmts. * tree-vect-stmts.c (exist_non_indexing_operands_for_use_p) (vect_mark_stmts_to_be_vectorized, vectorizable_store) (vectorizable_load, vect_analyze_stmt): Likewise. * tree-vect-loop.c (vectorizable_reduction): Likewise gphi. From-SVN: r263141
This commit is contained in:
parent
95c68311b6
commit
beb456c375
|
|
@ -1,3 +1,15 @@
|
||||||
|
2018-07-31 Richard Sandiford <richard.sandiford@arm.com>
|
||||||
|
|
||||||
|
* tree-vect-data-refs.c (vect_check_gather_scatter): Pass the
|
||||||
|
gcall rather than the generic gimple stmt to gimple_call_internal_fn.
|
||||||
|
(vect_get_smallest_scalar_type, can_group_stmts_p): Use dyn_cast
|
||||||
|
to get gassigns and gcalls, rather than operating on generc gimple
|
||||||
|
stmts.
|
||||||
|
* tree-vect-stmts.c (exist_non_indexing_operands_for_use_p)
|
||||||
|
(vect_mark_stmts_to_be_vectorized, vectorizable_store)
|
||||||
|
(vectorizable_load, vect_analyze_stmt): Likewise.
|
||||||
|
* tree-vect-loop.c (vectorizable_reduction): Likewise gphi.
|
||||||
|
|
||||||
2018-07-31 Richard Sandiford <richard.sandiford@arm.com>
|
2018-07-31 Richard Sandiford <richard.sandiford@arm.com>
|
||||||
|
|
||||||
* tree-vectorizer.h (get_earlier_stmt, get_later_stmt): Take and
|
* tree-vectorizer.h (get_earlier_stmt, get_later_stmt): Take and
|
||||||
|
|
|
||||||
|
|
@ -130,15 +130,16 @@ vect_get_smallest_scalar_type (gimple *stmt, HOST_WIDE_INT *lhs_size_unit,
|
||||||
|
|
||||||
lhs = rhs = TREE_INT_CST_LOW (TYPE_SIZE_UNIT (scalar_type));
|
lhs = rhs = TREE_INT_CST_LOW (TYPE_SIZE_UNIT (scalar_type));
|
||||||
|
|
||||||
if (is_gimple_assign (stmt)
|
gassign *assign = dyn_cast <gassign *> (stmt);
|
||||||
&& (gimple_assign_cast_p (stmt)
|
if (assign
|
||||||
|| gimple_assign_rhs_code (stmt) == DOT_PROD_EXPR
|
&& (gimple_assign_cast_p (assign)
|
||||||
|| gimple_assign_rhs_code (stmt) == WIDEN_SUM_EXPR
|
|| gimple_assign_rhs_code (assign) == DOT_PROD_EXPR
|
||||||
|| gimple_assign_rhs_code (stmt) == WIDEN_MULT_EXPR
|
|| gimple_assign_rhs_code (assign) == WIDEN_SUM_EXPR
|
||||||
|| gimple_assign_rhs_code (stmt) == WIDEN_LSHIFT_EXPR
|
|| gimple_assign_rhs_code (assign) == WIDEN_MULT_EXPR
|
||||||
|| gimple_assign_rhs_code (stmt) == FLOAT_EXPR))
|
|| gimple_assign_rhs_code (assign) == WIDEN_LSHIFT_EXPR
|
||||||
|
|| gimple_assign_rhs_code (assign) == FLOAT_EXPR))
|
||||||
{
|
{
|
||||||
tree rhs_type = TREE_TYPE (gimple_assign_rhs1 (stmt));
|
tree rhs_type = TREE_TYPE (gimple_assign_rhs1 (assign));
|
||||||
|
|
||||||
rhs = TREE_INT_CST_LOW (TYPE_SIZE_UNIT (rhs_type));
|
rhs = TREE_INT_CST_LOW (TYPE_SIZE_UNIT (rhs_type));
|
||||||
if (rhs < lhs)
|
if (rhs < lhs)
|
||||||
|
|
@ -2850,21 +2851,23 @@ can_group_stmts_p (gimple *stmt1, gimple *stmt2)
|
||||||
if (gimple_assign_single_p (stmt1))
|
if (gimple_assign_single_p (stmt1))
|
||||||
return gimple_assign_single_p (stmt2);
|
return gimple_assign_single_p (stmt2);
|
||||||
|
|
||||||
if (is_gimple_call (stmt1) && gimple_call_internal_p (stmt1))
|
gcall *call1 = dyn_cast <gcall *> (stmt1);
|
||||||
|
if (call1 && gimple_call_internal_p (call1))
|
||||||
{
|
{
|
||||||
/* Check for two masked loads or two masked stores. */
|
/* Check for two masked loads or two masked stores. */
|
||||||
if (!is_gimple_call (stmt2) || !gimple_call_internal_p (stmt2))
|
gcall *call2 = dyn_cast <gcall *> (stmt2);
|
||||||
|
if (!call2 || !gimple_call_internal_p (call2))
|
||||||
return false;
|
return false;
|
||||||
internal_fn ifn = gimple_call_internal_fn (stmt1);
|
internal_fn ifn = gimple_call_internal_fn (call1);
|
||||||
if (ifn != IFN_MASK_LOAD && ifn != IFN_MASK_STORE)
|
if (ifn != IFN_MASK_LOAD && ifn != IFN_MASK_STORE)
|
||||||
return false;
|
return false;
|
||||||
if (ifn != gimple_call_internal_fn (stmt2))
|
if (ifn != gimple_call_internal_fn (call2))
|
||||||
return false;
|
return false;
|
||||||
|
|
||||||
/* Check that the masks are the same. Cope with casts of masks,
|
/* Check that the masks are the same. Cope with casts of masks,
|
||||||
like those created by build_mask_conversion. */
|
like those created by build_mask_conversion. */
|
||||||
tree mask1 = gimple_call_arg (stmt1, 2);
|
tree mask1 = gimple_call_arg (call1, 2);
|
||||||
tree mask2 = gimple_call_arg (stmt2, 2);
|
tree mask2 = gimple_call_arg (call2, 2);
|
||||||
if (!operand_equal_p (mask1, mask2, 0))
|
if (!operand_equal_p (mask1, mask2, 0))
|
||||||
{
|
{
|
||||||
mask1 = strip_conversion (mask1);
|
mask1 = strip_conversion (mask1);
|
||||||
|
|
@ -3665,7 +3668,7 @@ vect_check_gather_scatter (gimple *stmt, loop_vec_info loop_vinfo,
|
||||||
gcall *call = dyn_cast <gcall *> (stmt);
|
gcall *call = dyn_cast <gcall *> (stmt);
|
||||||
if (call && gimple_call_internal_p (call))
|
if (call && gimple_call_internal_p (call))
|
||||||
{
|
{
|
||||||
ifn = gimple_call_internal_fn (stmt);
|
ifn = gimple_call_internal_fn (call);
|
||||||
if (internal_gather_scatter_fn_p (ifn))
|
if (internal_gather_scatter_fn_p (ifn))
|
||||||
{
|
{
|
||||||
vect_describe_gather_scatter_call (call, info);
|
vect_describe_gather_scatter_call (call, info);
|
||||||
|
|
|
||||||
|
|
@ -6109,9 +6109,9 @@ vectorizable_reduction (gimple *stmt, gimple_stmt_iterator *gsi,
|
||||||
gcc_assert (slp_node
|
gcc_assert (slp_node
|
||||||
&& REDUC_GROUP_FIRST_ELEMENT (stmt_info) == stmt_info);
|
&& REDUC_GROUP_FIRST_ELEMENT (stmt_info) == stmt_info);
|
||||||
|
|
||||||
if (gimple_code (stmt) == GIMPLE_PHI)
|
if (gphi *phi = dyn_cast <gphi *> (stmt))
|
||||||
{
|
{
|
||||||
tree phi_result = gimple_phi_result (stmt);
|
tree phi_result = gimple_phi_result (phi);
|
||||||
/* Analysis is fully done on the reduction stmt invocation. */
|
/* Analysis is fully done on the reduction stmt invocation. */
|
||||||
if (! vec_stmt)
|
if (! vec_stmt)
|
||||||
{
|
{
|
||||||
|
|
@ -6141,7 +6141,7 @@ vectorizable_reduction (gimple *stmt, gimple_stmt_iterator *gsi,
|
||||||
for (unsigned k = 1; k < gimple_num_ops (reduc_stmt); ++k)
|
for (unsigned k = 1; k < gimple_num_ops (reduc_stmt); ++k)
|
||||||
{
|
{
|
||||||
tree op = gimple_op (reduc_stmt, k);
|
tree op = gimple_op (reduc_stmt, k);
|
||||||
if (op == gimple_phi_result (stmt))
|
if (op == phi_result)
|
||||||
continue;
|
continue;
|
||||||
if (k == 1
|
if (k == 1
|
||||||
&& gimple_assign_rhs_code (reduc_stmt) == COND_EXPR)
|
&& gimple_assign_rhs_code (reduc_stmt) == COND_EXPR)
|
||||||
|
|
|
||||||
|
|
@ -389,30 +389,31 @@ exist_non_indexing_operands_for_use_p (tree use, gimple *stmt)
|
||||||
Therefore, all we need to check is if STMT falls into the
|
Therefore, all we need to check is if STMT falls into the
|
||||||
first case, and whether var corresponds to USE. */
|
first case, and whether var corresponds to USE. */
|
||||||
|
|
||||||
if (!gimple_assign_copy_p (stmt))
|
gassign *assign = dyn_cast <gassign *> (stmt);
|
||||||
|
if (!assign || !gimple_assign_copy_p (assign))
|
||||||
{
|
{
|
||||||
if (is_gimple_call (stmt)
|
gcall *call = dyn_cast <gcall *> (stmt);
|
||||||
&& gimple_call_internal_p (stmt))
|
if (call && gimple_call_internal_p (call))
|
||||||
{
|
{
|
||||||
internal_fn ifn = gimple_call_internal_fn (stmt);
|
internal_fn ifn = gimple_call_internal_fn (call);
|
||||||
int mask_index = internal_fn_mask_index (ifn);
|
int mask_index = internal_fn_mask_index (ifn);
|
||||||
if (mask_index >= 0
|
if (mask_index >= 0
|
||||||
&& use == gimple_call_arg (stmt, mask_index))
|
&& use == gimple_call_arg (call, mask_index))
|
||||||
return true;
|
return true;
|
||||||
int stored_value_index = internal_fn_stored_value_index (ifn);
|
int stored_value_index = internal_fn_stored_value_index (ifn);
|
||||||
if (stored_value_index >= 0
|
if (stored_value_index >= 0
|
||||||
&& use == gimple_call_arg (stmt, stored_value_index))
|
&& use == gimple_call_arg (call, stored_value_index))
|
||||||
return true;
|
return true;
|
||||||
if (internal_gather_scatter_fn_p (ifn)
|
if (internal_gather_scatter_fn_p (ifn)
|
||||||
&& use == gimple_call_arg (stmt, 1))
|
&& use == gimple_call_arg (call, 1))
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
|
||||||
if (TREE_CODE (gimple_assign_lhs (stmt)) == SSA_NAME)
|
if (TREE_CODE (gimple_assign_lhs (assign)) == SSA_NAME)
|
||||||
return false;
|
return false;
|
||||||
operand = gimple_assign_rhs1 (stmt);
|
operand = gimple_assign_rhs1 (assign);
|
||||||
if (TREE_CODE (operand) != SSA_NAME)
|
if (TREE_CODE (operand) != SSA_NAME)
|
||||||
return false;
|
return false;
|
||||||
|
|
||||||
|
|
@ -739,10 +740,10 @@ vect_mark_stmts_to_be_vectorized (loop_vec_info loop_vinfo)
|
||||||
/* Pattern statements are not inserted into the code, so
|
/* Pattern statements are not inserted into the code, so
|
||||||
FOR_EACH_PHI_OR_STMT_USE optimizes their operands out, and we
|
FOR_EACH_PHI_OR_STMT_USE optimizes their operands out, and we
|
||||||
have to scan the RHS or function arguments instead. */
|
have to scan the RHS or function arguments instead. */
|
||||||
if (is_gimple_assign (stmt))
|
if (gassign *assign = dyn_cast <gassign *> (stmt))
|
||||||
{
|
{
|
||||||
enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
|
enum tree_code rhs_code = gimple_assign_rhs_code (assign);
|
||||||
tree op = gimple_assign_rhs1 (stmt);
|
tree op = gimple_assign_rhs1 (assign);
|
||||||
|
|
||||||
i = 1;
|
i = 1;
|
||||||
if (rhs_code == COND_EXPR && COMPARISON_CLASS_P (op))
|
if (rhs_code == COND_EXPR && COMPARISON_CLASS_P (op))
|
||||||
|
|
@ -754,25 +755,25 @@ vect_mark_stmts_to_be_vectorized (loop_vec_info loop_vinfo)
|
||||||
return false;
|
return false;
|
||||||
i = 2;
|
i = 2;
|
||||||
}
|
}
|
||||||
for (; i < gimple_num_ops (stmt); i++)
|
for (; i < gimple_num_ops (assign); i++)
|
||||||
{
|
{
|
||||||
op = gimple_op (stmt, i);
|
op = gimple_op (assign, i);
|
||||||
if (TREE_CODE (op) == SSA_NAME
|
if (TREE_CODE (op) == SSA_NAME
|
||||||
&& !process_use (stmt, op, loop_vinfo, relevant,
|
&& !process_use (stmt, op, loop_vinfo, relevant,
|
||||||
&worklist, false))
|
&worklist, false))
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
else if (is_gimple_call (stmt))
|
else if (gcall *call = dyn_cast <gcall *> (stmt))
|
||||||
{
|
{
|
||||||
for (i = 0; i < gimple_call_num_args (stmt); i++)
|
for (i = 0; i < gimple_call_num_args (call); i++)
|
||||||
{
|
{
|
||||||
tree arg = gimple_call_arg (stmt, i);
|
tree arg = gimple_call_arg (call, i);
|
||||||
if (!process_use (stmt, arg, loop_vinfo, relevant,
|
if (!process_use (stmt, arg, loop_vinfo, relevant,
|
||||||
&worklist, false))
|
&worklist, false))
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
else
|
else
|
||||||
FOR_EACH_PHI_OR_STMT_USE (use_p, stmt, iter, SSA_OP_USE)
|
FOR_EACH_PHI_OR_STMT_USE (use_p, stmt, iter, SSA_OP_USE)
|
||||||
|
|
@ -6274,9 +6275,9 @@ vectorizable_store (gimple *stmt, gimple_stmt_iterator *gsi,
|
||||||
/* Is vectorizable store? */
|
/* Is vectorizable store? */
|
||||||
|
|
||||||
tree mask = NULL_TREE, mask_vectype = NULL_TREE;
|
tree mask = NULL_TREE, mask_vectype = NULL_TREE;
|
||||||
if (is_gimple_assign (stmt))
|
if (gassign *assign = dyn_cast <gassign *> (stmt))
|
||||||
{
|
{
|
||||||
tree scalar_dest = gimple_assign_lhs (stmt);
|
tree scalar_dest = gimple_assign_lhs (assign);
|
||||||
if (TREE_CODE (scalar_dest) == VIEW_CONVERT_EXPR
|
if (TREE_CODE (scalar_dest) == VIEW_CONVERT_EXPR
|
||||||
&& is_pattern_stmt_p (stmt_info))
|
&& is_pattern_stmt_p (stmt_info))
|
||||||
scalar_dest = TREE_OPERAND (scalar_dest, 0);
|
scalar_dest = TREE_OPERAND (scalar_dest, 0);
|
||||||
|
|
@ -7445,13 +7446,13 @@ vectorizable_load (gimple *stmt, gimple_stmt_iterator *gsi,
|
||||||
return false;
|
return false;
|
||||||
|
|
||||||
tree mask = NULL_TREE, mask_vectype = NULL_TREE;
|
tree mask = NULL_TREE, mask_vectype = NULL_TREE;
|
||||||
if (is_gimple_assign (stmt))
|
if (gassign *assign = dyn_cast <gassign *> (stmt))
|
||||||
{
|
{
|
||||||
scalar_dest = gimple_assign_lhs (stmt);
|
scalar_dest = gimple_assign_lhs (assign);
|
||||||
if (TREE_CODE (scalar_dest) != SSA_NAME)
|
if (TREE_CODE (scalar_dest) != SSA_NAME)
|
||||||
return false;
|
return false;
|
||||||
|
|
||||||
tree_code code = gimple_assign_rhs_code (stmt);
|
tree_code code = gimple_assign_rhs_code (assign);
|
||||||
if (code != ARRAY_REF
|
if (code != ARRAY_REF
|
||||||
&& code != BIT_FIELD_REF
|
&& code != BIT_FIELD_REF
|
||||||
&& code != INDIRECT_REF
|
&& code != INDIRECT_REF
|
||||||
|
|
@ -9557,9 +9558,9 @@ vect_analyze_stmt (gimple *stmt, bool *need_to_vectorize, slp_tree node,
|
||||||
if (STMT_VINFO_RELEVANT_P (stmt_info))
|
if (STMT_VINFO_RELEVANT_P (stmt_info))
|
||||||
{
|
{
|
||||||
gcc_assert (!VECTOR_MODE_P (TYPE_MODE (gimple_expr_type (stmt))));
|
gcc_assert (!VECTOR_MODE_P (TYPE_MODE (gimple_expr_type (stmt))));
|
||||||
|
gcall *call = dyn_cast <gcall *> (stmt);
|
||||||
gcc_assert (STMT_VINFO_VECTYPE (stmt_info)
|
gcc_assert (STMT_VINFO_VECTYPE (stmt_info)
|
||||||
|| (is_gimple_call (stmt)
|
|| (call && gimple_call_lhs (call) == NULL_TREE));
|
||||||
&& gimple_call_lhs (stmt) == NULL_TREE));
|
|
||||||
*need_to_vectorize = true;
|
*need_to_vectorize = true;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
||||||
Loading…
Reference in New Issue