mirror of git://gcc.gnu.org/git/gcc.git
cfgexpand.c (expand_gimple_tailcall): Initialize profile of new edge.
* cfgexpand.c (expand_gimple_tailcall): Initialize profile of new edge. * ipa-inline.c (want_inline_self_recursive_call_p): Watch for missing profile in callgraph edge. * profile-count.h (apply_probability): If THIS is 0, then result is 0 (apply_scale): Likewise. * tree-inline.c (copy_bb, copy_edges_for_bb, copy_cfg_body): Also scale profile when inlining function with zero profile. (initialize_cfun): Update exit block profile even when it is zero. * tree-ssa-threadupdate.c (clear_counts_path): Handle correctly case when profile is read. From-SVN: r248885
This commit is contained in:
parent
2f20e7db2c
commit
aea5e79a63
|
|
@ -1,3 +1,17 @@
|
|||
2017-06-05 Jan Hubicka <hubicka@ucw.cz>
|
||||
|
||||
* cfgexpand.c (expand_gimple_tailcall): Initialize profile of
|
||||
new edge.
|
||||
* ipa-inline.c (want_inline_self_recursive_call_p): Watch for missing
|
||||
profile in callgraph edge.
|
||||
* profile-count.h (apply_probability): If THIS is 0, then result is 0
|
||||
(apply_scale): Likewise.
|
||||
* tree-inline.c (copy_bb, copy_edges_for_bb, copy_cfg_body):
|
||||
Also scale profile when inlining function with zero profile.
|
||||
(initialize_cfun): Update exit block profile even when it is zero.
|
||||
* tree-ssa-threadupdate.c (clear_counts_path): Handle correctly case
|
||||
when profile is read.
|
||||
|
||||
2017-06-05 Michael Meissner <meissner@linux.vnet.ibm.com>
|
||||
|
||||
* config/rs6000/rs6000.c (toplevel): Include attribs.h.
|
||||
|
|
|
|||
|
|
@ -3850,8 +3850,8 @@ expand_gimple_tailcall (basic_block bb, gcall *stmt, bool *can_fallthru)
|
|||
|
||||
e = make_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun), EDGE_ABNORMAL
|
||||
| EDGE_SIBCALL);
|
||||
e->probability += probability;
|
||||
e->count += count;
|
||||
e->probability = probability;
|
||||
e->count = count;
|
||||
BB_END (bb) = last;
|
||||
update_bb_for_insn (bb);
|
||||
|
||||
|
|
|
|||
|
|
@ -912,7 +912,7 @@ want_inline_self_recursive_call_p (struct cgraph_edge *edge,
|
|||
methods. */
|
||||
else
|
||||
{
|
||||
if (max_count > profile_count::zero ()
|
||||
if (max_count > profile_count::zero () && edge->count.initialized_p ()
|
||||
&& (edge->count.to_gcov_type () * 100
|
||||
/ outer_node->count.to_gcov_type ()
|
||||
<= PARAM_VALUE (PARAM_MIN_INLINE_RECURSIVE_PROBABILITY)))
|
||||
|
|
@ -920,7 +920,8 @@ want_inline_self_recursive_call_p (struct cgraph_edge *edge,
|
|||
reason = "profile of recursive call is too small";
|
||||
want_inline = false;
|
||||
}
|
||||
else if (max_count == profile_count::zero ()
|
||||
else if ((max_count == profile_count::zero ()
|
||||
|| !edge->count.initialized_p ())
|
||||
&& (edge->frequency * 100 / caller_freq
|
||||
<= PARAM_VALUE (PARAM_MIN_INLINE_RECURSIVE_PROBABILITY)))
|
||||
{
|
||||
|
|
|
|||
|
|
@ -221,6 +221,8 @@ public:
|
|||
profile_count apply_probability (int prob) const
|
||||
{
|
||||
gcc_checking_assert (prob >= 0 && prob <= REG_BR_PROB_BASE);
|
||||
if (*this == profile_count::zero ())
|
||||
return *this;
|
||||
if (!initialized_p ())
|
||||
return profile_count::uninitialized ();
|
||||
profile_count ret;
|
||||
|
|
@ -230,6 +232,8 @@ public:
|
|||
/* Return *THIS * NUM / DEN. */
|
||||
profile_count apply_scale (int64_t num, int64_t den) const
|
||||
{
|
||||
if (*this == profile_count::zero ())
|
||||
return *this;
|
||||
if (!initialized_p ())
|
||||
return profile_count::uninitialized ();
|
||||
profile_count ret;
|
||||
|
|
@ -243,7 +247,7 @@ public:
|
|||
}
|
||||
profile_count apply_scale (profile_count num, profile_count den) const
|
||||
{
|
||||
if (*this == profile_count::zero ())
|
||||
if (*this == profile_count::zero () || num == profile_count::zero ())
|
||||
return profile_count::zero ();
|
||||
if (!initialized_p () || !num.initialized_p () || !den.initialized_p ())
|
||||
return profile_count::uninitialized ();
|
||||
|
|
|
|||
|
|
@ -561,9 +561,11 @@ handle_simple_exit (edge e)
|
|||
BB_END (old_bb) = end;
|
||||
|
||||
redirect_edge_succ (e, new_bb);
|
||||
new_bb->count = e->count;
|
||||
new_bb->frequency = e->frequency;
|
||||
e->flags |= EDGE_FALLTHRU;
|
||||
|
||||
e = make_edge (new_bb, EXIT_BLOCK_PTR_FOR_FN (cfun), 0);
|
||||
e = make_single_succ_edge (new_bb, EXIT_BLOCK_PTR_FOR_FN (cfun), 0);
|
||||
}
|
||||
|
||||
e->flags &= ~EDGE_FALLTHRU;
|
||||
|
|
|
|||
|
|
@ -1763,7 +1763,8 @@ copy_bb (copy_body_data *id, basic_block bb, int frequency_scale,
|
|||
tree decl;
|
||||
gcov_type freq;
|
||||
basic_block prev;
|
||||
bool scale = num.initialized_p () && den.initialized_p () && den > 0;
|
||||
bool scale = num.initialized_p ()
|
||||
&& (den > 0 || num == profile_count::zero ());
|
||||
|
||||
/* Search for previous copied basic block. */
|
||||
prev = bb->prev_bb;
|
||||
|
|
@ -2211,7 +2212,8 @@ copy_edges_for_bb (basic_block bb, profile_count num, profile_count den,
|
|||
gimple_stmt_iterator si;
|
||||
int flags;
|
||||
bool need_debug_cleanup = false;
|
||||
bool scale = num.initialized_p () && den.initialized_p () && den > 0;
|
||||
bool scale = num.initialized_p ()
|
||||
&& (den > 0 || num == profile_count::zero ());
|
||||
|
||||
/* Use the indices from the original blocks to create edges for the
|
||||
new ones. */
|
||||
|
|
@ -2472,7 +2474,7 @@ initialize_cfun (tree new_fndecl, tree callee_fndecl, profile_count count)
|
|||
*/
|
||||
if (ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count.initialized_p ()
|
||||
&& count.initialized_p ()
|
||||
&& ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count > 0)
|
||||
&& ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count.initialized_p ())
|
||||
{
|
||||
ENTRY_BLOCK_PTR_FOR_FN (cfun)->count =
|
||||
ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count.apply_scale (count,
|
||||
|
|
@ -2683,7 +2685,8 @@ copy_cfg_body (copy_body_data * id, profile_count count, int frequency_scale,
|
|||
profile_count incoming_count = profile_count::zero ();
|
||||
profile_count num = count;
|
||||
profile_count den = ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count;
|
||||
bool scale = num.initialized_p () && den.initialized_p () && den > 0;
|
||||
bool scale = num.initialized_p ()
|
||||
&& (den > 0 || num == profile_count::zero ());
|
||||
|
||||
/* This can happen for COMDAT routines that end up with 0 counts
|
||||
despite being called (see the comments for handle_missing_profiles()
|
||||
|
|
|
|||
|
|
@ -1084,16 +1084,20 @@ clear_counts_path (struct redirection_data *rd)
|
|||
vec<jump_thread_edge *> *path = THREAD_PATH (e);
|
||||
edge ein, esucc;
|
||||
edge_iterator ei;
|
||||
profile_count val = profile_count::uninitialized ();
|
||||
if (profile_status_for_fn (cfun) == PROFILE_READ)
|
||||
val = profile_count::zero ();
|
||||
|
||||
FOR_EACH_EDGE (ein, ei, e->dest->preds)
|
||||
ein->count = profile_count::uninitialized ();
|
||||
ein->count = val;
|
||||
|
||||
/* First clear counts along original path. */
|
||||
for (unsigned int i = 1; i < path->length (); i++)
|
||||
{
|
||||
edge epath = (*path)[i]->e;
|
||||
FOR_EACH_EDGE (esucc, ei, epath->src->succs)
|
||||
esucc->count = profile_count::uninitialized ();
|
||||
epath->src->count = profile_count::uninitialized ();
|
||||
esucc->count = val;
|
||||
epath->src->count = val;
|
||||
}
|
||||
/* Also need to clear the counts along duplicated path. */
|
||||
for (unsigned int i = 0; i < 2; i++)
|
||||
|
|
@ -1102,8 +1106,8 @@ clear_counts_path (struct redirection_data *rd)
|
|||
if (!dup)
|
||||
continue;
|
||||
FOR_EACH_EDGE (esucc, ei, dup->succs)
|
||||
esucc->count = profile_count::uninitialized ();
|
||||
dup->count = profile_count::uninitialized ();
|
||||
esucc->count = val;
|
||||
dup->count = val;
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
|||
Loading…
Reference in New Issue