Bug Summary

File:build/gcc/predict.cc
Warning:line 1432, column 8
Called C++ object pointer is null

Annotated Source Code

Press '?' to see keyboard shortcuts

clang -cc1 -cc1 -triple x86_64-suse-linux -analyze -disable-free -clear-ast-before-backend -disable-llvm-verifier -discard-value-names -main-file-name predict.cc -analyzer-checker=core -analyzer-checker=apiModeling -analyzer-checker=unix -analyzer-checker=deadcode -analyzer-checker=cplusplus -analyzer-checker=security.insecureAPI.UncheckedReturn -analyzer-checker=security.insecureAPI.getpw -analyzer-checker=security.insecureAPI.gets -analyzer-checker=security.insecureAPI.mktemp -analyzer-checker=security.insecureAPI.mkstemp -analyzer-checker=security.insecureAPI.vfork -analyzer-checker=nullability.NullPassedToNonnull -analyzer-checker=nullability.NullReturnedFromNonnull -analyzer-output plist -w -setup-static-analyzer -analyzer-config-compatibility-mode=true -mrelocation-model static -mframe-pointer=none -fmath-errno -ffp-contract=on -fno-rounding-math -mconstructor-aliases -funwind-tables=2 -target-cpu x86-64 -tune-cpu generic -debugger-tuning=gdb -fcoverage-compilation-dir=/buildworker/marxinbox-gcc-clang-static-analyzer/objdir/gcc -resource-dir /usr/lib64/clang/15.0.7 -D IN_GCC -D HAVE_CONFIG_H -I . -I . -I /buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc -I /buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/. -I /buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/../include -I /buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/../libcpp/include -I /buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/../libcody -I /buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/../libdecnumber -I /buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/../libdecnumber/bid -I ../libdecnumber -I /buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/../libbacktrace -internal-isystem /usr/bin/../lib64/gcc/x86_64-suse-linux/13/../../../../include/c++/13 -internal-isystem /usr/bin/../lib64/gcc/x86_64-suse-linux/13/../../../../include/c++/13/x86_64-suse-linux -internal-isystem /usr/bin/../lib64/gcc/x86_64-suse-linux/13/../../../../include/c++/13/backward -internal-isystem /usr/lib64/clang/15.0.7/include -internal-isystem /usr/local/include -internal-isystem /usr/bin/../lib64/gcc/x86_64-suse-linux/13/../../../../x86_64-suse-linux/include -internal-externc-isystem /include -internal-externc-isystem /usr/include -O2 -Wno-narrowing -Wwrite-strings -Wno-long-long -Wno-variadic-macros -Wno-overlength-strings -fdeprecated-macro -fdebug-compilation-dir=/buildworker/marxinbox-gcc-clang-static-analyzer/objdir/gcc -ferror-limit 19 -fno-rtti -fgnuc-version=4.2.1 -vectorize-loops -vectorize-slp -analyzer-output=plist-html -analyzer-config silence-checkers=core.NullDereference -faddrsig -D__GCC_HAVE_DWARF2_CFI_ASM=1 -o /buildworker/marxinbox-gcc-clang-static-analyzer/objdir/clang-static-analyzer/2023-03-27-141847-20772-1/report-HVgI6x.plist -x c++ /buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/predict.cc
1/* Branch prediction routines for the GNU compiler.
2 Copyright (C) 2000-2023 Free Software Foundation, Inc.
3
4This file is part of GCC.
5
6GCC is free software; you can redistribute it and/or modify it under
7the terms of the GNU General Public License as published by the Free
8Software Foundation; either version 3, or (at your option) any later
9version.
10
11GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12WARRANTY; without even the implied warranty of MERCHANTABILITY or
13FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14for more details.
15
16You should have received a copy of the GNU General Public License
17along with GCC; see the file COPYING3. If not see
18<http://www.gnu.org/licenses/>. */
19
20/* References:
21
22 [1] "Branch Prediction for Free"
23 Ball and Larus; PLDI '93.
24 [2] "Static Branch Frequency and Program Profile Analysis"
25 Wu and Larus; MICRO-27.
26 [3] "Corpus-based Static Branch Prediction"
27 Calder, Grunwald, Lindsay, Martin, Mozer, and Zorn; PLDI '95. */
28
29
30#include "config.h"
31#include "system.h"
32#include "coretypes.h"
33#include "backend.h"
34#include "rtl.h"
35#include "tree.h"
36#include "gimple.h"
37#include "cfghooks.h"
38#include "tree-pass.h"
39#include "ssa.h"
40#include "memmodel.h"
41#include "emit-rtl.h"
42#include "cgraph.h"
43#include "coverage.h"
44#include "diagnostic-core.h"
45#include "gimple-predict.h"
46#include "fold-const.h"
47#include "calls.h"
48#include "cfganal.h"
49#include "profile.h"
50#include "sreal.h"
51#include "cfgloop.h"
52#include "gimple-iterator.h"
53#include "tree-cfg.h"
54#include "tree-ssa-loop-niter.h"
55#include "tree-ssa-loop.h"
56#include "tree-scalar-evolution.h"
57#include "ipa-utils.h"
58#include "gimple-pretty-print.h"
59#include "selftest.h"
60#include "cfgrtl.h"
61#include "stringpool.h"
62#include "attribs.h"
63
64/* Enum with reasons why a predictor is ignored. */
65
66enum predictor_reason
67{
68 REASON_NONE,
69 REASON_IGNORED,
70 REASON_SINGLE_EDGE_DUPLICATE,
71 REASON_EDGE_PAIR_DUPLICATE
72};
73
74/* String messages for the aforementioned enum. */
75
76static const char *reason_messages[] = {"", " (ignored)",
77 " (single edge duplicate)", " (edge pair duplicate)"};
78
79
80static void combine_predictions_for_insn (rtx_insn *, basic_block);
81static void dump_prediction (FILE *, enum br_predictor, int, basic_block,
82 enum predictor_reason, edge);
83static void predict_paths_leading_to (basic_block, enum br_predictor,
84 enum prediction,
85 class loop *in_loop = NULLnullptr);
86static void predict_paths_leading_to_edge (edge, enum br_predictor,
87 enum prediction,
88 class loop *in_loop = NULLnullptr);
89static bool can_predict_insn_p (const rtx_insn *);
90static HOST_WIDE_INTlong get_predictor_value (br_predictor, HOST_WIDE_INTlong);
91static void determine_unlikely_bbs ();
92
93/* Information we hold about each branch predictor.
94 Filled using information from predict.def. */
95
96struct predictor_info
97{
98 const char *const name; /* Name used in the debugging dumps. */
99 const int hitrate; /* Expected hitrate used by
100 predict_insn_def call. */
101 const int flags;
102};
103
104/* Use given predictor without Dempster-Shaffer theory if it matches
105 using first_match heuristics. */
106#define PRED_FLAG_FIRST_MATCH1 1
107
108/* Recompute hitrate in percent to our representation. */
109
110#define HITRATE(VAL)((int) ((VAL) * 10000 + 50) / 100) ((int) ((VAL) * REG_BR_PROB_BASE10000 + 50) / 100)
111
112#define DEF_PREDICTOR(ENUM, NAME, HITRATE, FLAGS) {NAME, HITRATE, FLAGS},
113static const struct predictor_info predictor_info[]= {
114#include "predict.def"
115
116 /* Upper bound on predictors. */
117 {NULLnullptr, 0, 0}
118};
119#undef DEF_PREDICTOR
120
121static gcov_type min_count = -1;
122
123/* Determine the threshold for hot BB counts. */
124
125gcov_type
126get_hot_bb_threshold ()
127{
128 if (min_count == -1)
129 {
130 const int hot_frac = param_hot_bb_count_fractionglobal_options.x_param_hot_bb_count_fraction;
131 const gcov_type min_hot_count
132 = hot_frac
133 ? profile_info->sum_max / hot_frac
134 : (gcov_type)profile_count::max_count;
135 set_hot_bb_threshold (min_hot_count);
136 if (dump_file)
137 fprintf (dump_file, "Setting hotness threshold to %" PRId64"l" "d" ".\n",
138 min_hot_count);
139 }
140 return min_count;
141}
142
143/* Set the threshold for hot BB counts. */
144
145void
146set_hot_bb_threshold (gcov_type min)
147{
148 min_count = min;
149}
150
151/* Return TRUE if COUNT is considered to be hot in function FUN. */
152
153bool
154maybe_hot_count_p (struct function *fun, profile_count count)
155{
156 if (!count.initialized_p ())
157 return true;
158 if (count.ipa () == profile_count::zero ())
159 return false;
160 if (!count.ipa_p ())
161 {
162 struct cgraph_node *node = cgraph_node::get (fun->decl);
163 if (!profile_info || profile_status_for_fn (fun)((fun)->cfg->x_profile_status) != PROFILE_READ)
164 {
165 if (node->frequency == NODE_FREQUENCY_UNLIKELY_EXECUTED)
166 return false;
167 if (node->frequency == NODE_FREQUENCY_HOT)
168 return true;
169 }
170 if (profile_status_for_fn (fun)((fun)->cfg->x_profile_status) == PROFILE_ABSENT)
171 return true;
172 if (node->frequency == NODE_FREQUENCY_EXECUTED_ONCE
173 && count < (ENTRY_BLOCK_PTR_FOR_FN (fun)((fun)->cfg->x_entry_block_ptr)->count.apply_scale (2, 3)))
174 return false;
175 if (count * param_hot_bb_frequency_fractionglobal_options.x_param_hot_bb_frequency_fraction
176 < ENTRY_BLOCK_PTR_FOR_FN (fun)((fun)->cfg->x_entry_block_ptr)->count)
177 return false;
178 return true;
179 }
180 /* Code executed at most once is not hot. */
181 if (count <= MAX (profile_info ? profile_info->runs : 1, 1)((profile_info ? profile_info->runs : 1) > (1) ? (profile_info
? profile_info->runs : 1) : (1))
)
182 return false;
183 return (count >= get_hot_bb_threshold ());
184}
185
186/* Return true if basic block BB of function FUN can be CPU intensive
187 and should thus be optimized for maximum performance. */
188
189bool
190maybe_hot_bb_p (struct function *fun, const_basic_block bb)
191{
192 gcc_checking_assert (fun)((void)(!(fun) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/predict.cc"
, 192, __FUNCTION__), 0 : 0))
;
193 return maybe_hot_count_p (fun, bb->count);
194}
195
196/* Return true if edge E can be CPU intensive and should thus be optimized
197 for maximum performance. */
198
199bool
200maybe_hot_edge_p (edge e)
201{
202 return maybe_hot_count_p (cfun(cfun + 0), e->count ());
203}
204
205/* Return true if COUNT is considered to be never executed in function FUN
206 or if function FUN is considered so in the static profile. */
207
208static bool
209probably_never_executed (struct function *fun, profile_count count)
210{
211 gcc_checking_assert (fun)((void)(!(fun) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/predict.cc"
, 211, __FUNCTION__), 0 : 0))
;
212 if (count.ipa () == profile_count::zero ())
213 return true;
214 /* Do not trust adjusted counts. This will make us to drop int cold section
215 code with low execution count as a result of inlining. These low counts
216 are not safe even with read profile and may lead us to dropping
217 code which actually gets executed into cold section of binary that is not
218 desirable. */
219 if (count.precise_p () && profile_status_for_fn (fun)((fun)->cfg->x_profile_status) == PROFILE_READ)
220 {
221 const int unlikely_frac = param_unlikely_bb_count_fractionglobal_options.x_param_unlikely_bb_count_fraction;
222 if (count * unlikely_frac >= profile_info->runs)
223 return false;
224 return true;
225 }
226 if ((!profile_info || profile_status_for_fn (fun)((fun)->cfg->x_profile_status) != PROFILE_READ)
227 && (cgraph_node::get (fun->decl)->frequency
228 == NODE_FREQUENCY_UNLIKELY_EXECUTED))
229 return true;
230 return false;
231}
232
233/* Return true if basic block BB of function FUN is probably never executed. */
234
235bool
236probably_never_executed_bb_p (struct function *fun, const_basic_block bb)
237{
238 return probably_never_executed (fun, bb->count);
239}
240
241/* Return true if edge E is unlikely executed for obvious reasons. */
242
243static bool
244unlikely_executed_edge_p (edge e)
245{
246 return (e->src->count == profile_count::zero ()
247 || e->probability == profile_probability::never ())
248 || (e->flags & (EDGE_EH | EDGE_FAKE));
249}
250
251/* Return true if edge E of function FUN is probably never executed. */
252
253bool
254probably_never_executed_edge_p (struct function *fun, edge e)
255{
256 if (unlikely_executed_edge_p (e))
257 return true;
258 return probably_never_executed (fun, e->count ());
259}
260
261/* Return true if function FUN should always be optimized for size. */
262
263optimize_size_level
264optimize_function_for_size_p (struct function *fun)
265{
266 if (!fun || !fun->decl)
267 return optimize_sizeglobal_options.x_optimize_size ? OPTIMIZE_SIZE_MAX : OPTIMIZE_SIZE_NO;
268 cgraph_node *n = cgraph_node::get (fun->decl);
269 if (n)
270 return n->optimize_for_size_p ();
271 return OPTIMIZE_SIZE_NO;
272}
273
274/* Return true if function FUN should always be optimized for speed. */
275
276bool
277optimize_function_for_speed_p (struct function *fun)
278{
279 return !optimize_function_for_size_p (fun);
280}
281
282/* Return the optimization type that should be used for function FUN. */
283
284optimization_type
285function_optimization_type (struct function *fun)
286{
287 return (optimize_function_for_speed_p (fun)
288 ? OPTIMIZE_FOR_SPEED
289 : OPTIMIZE_FOR_SIZE);
290}
291
292/* Return TRUE if basic block BB should be optimized for size. */
293
294optimize_size_level
295optimize_bb_for_size_p (const_basic_block bb)
296{
297 enum optimize_size_level ret = optimize_function_for_size_p (cfun(cfun + 0));
298
299 if (bb && ret < OPTIMIZE_SIZE_MAX && bb->count == profile_count::zero ())
300 ret = OPTIMIZE_SIZE_MAX;
301 if (bb && ret < OPTIMIZE_SIZE_BALANCED && !maybe_hot_bb_p (cfun(cfun + 0), bb))
302 ret = OPTIMIZE_SIZE_BALANCED;
303 return ret;
304}
305
306/* Return TRUE if basic block BB should be optimized for speed. */
307
308bool
309optimize_bb_for_speed_p (const_basic_block bb)
310{
311 return !optimize_bb_for_size_p (bb);
312}
313
314/* Return the optimization type that should be used for basic block BB. */
315
316optimization_type
317bb_optimization_type (const_basic_block bb)
318{
319 return (optimize_bb_for_speed_p (bb)
320 ? OPTIMIZE_FOR_SPEED
321 : OPTIMIZE_FOR_SIZE);
322}
323
324/* Return TRUE if edge E should be optimized for size. */
325
326optimize_size_level
327optimize_edge_for_size_p (edge e)
328{
329 enum optimize_size_level ret = optimize_function_for_size_p (cfun(cfun + 0));
330
331 if (ret < OPTIMIZE_SIZE_MAX && unlikely_executed_edge_p (e))
332 ret = OPTIMIZE_SIZE_MAX;
333 if (ret < OPTIMIZE_SIZE_BALANCED && !maybe_hot_edge_p (e))
334 ret = OPTIMIZE_SIZE_BALANCED;
335 return ret;
336}
337
338/* Return TRUE if edge E should be optimized for speed. */
339
340bool
341optimize_edge_for_speed_p (edge e)
342{
343 return !optimize_edge_for_size_p (e);
344}
345
346/* Return TRUE if the current function is optimized for size. */
347
348optimize_size_level
349optimize_insn_for_size_p (void)
350{
351 enum optimize_size_level ret = optimize_function_for_size_p (cfun(cfun + 0));
352 if (ret < OPTIMIZE_SIZE_BALANCED && !crtl(&x_rtl)->maybe_hot_insn_p)
353 ret = OPTIMIZE_SIZE_BALANCED;
354 return ret;
355}
356
357/* Return TRUE if the current function is optimized for speed. */
358
359bool
360optimize_insn_for_speed_p (void)
361{
362 return !optimize_insn_for_size_p ();
363}
364
365/* Return the optimization type that should be used for the current
366 instruction. */
367
368optimization_type
369insn_optimization_type ()
370{
371 return (optimize_insn_for_speed_p ()
372 ? OPTIMIZE_FOR_SPEED
373 : OPTIMIZE_FOR_SIZE);
374}
375
376/* Return TRUE if LOOP should be optimized for size. */
377
378optimize_size_level
379optimize_loop_for_size_p (class loop *loop)
380{
381 return optimize_bb_for_size_p (loop->header);
382}
383
384/* Return TRUE if LOOP should be optimized for speed. */
385
386bool
387optimize_loop_for_speed_p (class loop *loop)
388{
389 return optimize_bb_for_speed_p (loop->header);
390}
391
392/* Return TRUE if nest rooted at LOOP should be optimized for speed. */
393
394bool
395optimize_loop_nest_for_speed_p (class loop *loop)
396{
397 class loop *l = loop;
398 if (optimize_loop_for_speed_p (loop))
399 return true;
400 l = loop->inner;
401 while (l && l != loop)
402 {
403 if (optimize_loop_for_speed_p (l))
404 return true;
405 if (l->inner)
406 l = l->inner;
407 else if (l->next)
408 l = l->next;
409 else
410 {
411 while (l != loop && !l->next)
412 l = loop_outer (l);
413 if (l != loop)
414 l = l->next;
415 }
416 }
417 return false;
418}
419
420/* Return TRUE if nest rooted at LOOP should be optimized for size. */
421
422optimize_size_level
423optimize_loop_nest_for_size_p (class loop *loop)
424{
425 enum optimize_size_level ret = optimize_loop_for_size_p (loop);
426 class loop *l = loop;
427
428 l = loop->inner;
429 while (l && l != loop)
430 {
431 if (ret == OPTIMIZE_SIZE_NO)
432 break;
433 ret = MIN (optimize_loop_for_size_p (l), ret)((optimize_loop_for_size_p (l)) < (ret) ? (optimize_loop_for_size_p
(l)) : (ret))
;
434 if (l->inner)
435 l = l->inner;
436 else if (l->next)
437 l = l->next;
438 else
439 {
440 while (l != loop && !l->next)
441 l = loop_outer (l);
442 if (l != loop)
443 l = l->next;
444 }
445 }
446 return ret;
447}
448
449/* Return true if edge E is likely to be well predictable by branch
450 predictor. */
451
452bool
453predictable_edge_p (edge e)
454{
455 if (!e->probability.initialized_p ())
456 return false;
457 if ((e->probability.to_reg_br_prob_base ()
458 <= param_predictable_branch_outcomeglobal_options.x_param_predictable_branch_outcome * REG_BR_PROB_BASE10000 / 100)
459 || (REG_BR_PROB_BASE10000 - e->probability.to_reg_br_prob_base ()
460 <= param_predictable_branch_outcomeglobal_options.x_param_predictable_branch_outcome * REG_BR_PROB_BASE10000 / 100))
461 return true;
462 return false;
463}
464
465
466/* Set RTL expansion for BB profile. */
467
468void
469rtl_profile_for_bb (basic_block bb)
470{
471 crtl(&x_rtl)->maybe_hot_insn_p = maybe_hot_bb_p (cfun(cfun + 0), bb);
472}
473
474/* Set RTL expansion for edge profile. */
475
476void
477rtl_profile_for_edge (edge e)
478{
479 crtl(&x_rtl)->maybe_hot_insn_p = maybe_hot_edge_p (e);
480}
481
482/* Set RTL expansion to default mode (i.e. when profile info is not known). */
483void
484default_rtl_profile (void)
485{
486 crtl(&x_rtl)->maybe_hot_insn_p = true;
487}
488
489/* Return true if the one of outgoing edges is already predicted by
490 PREDICTOR. */
491
492bool
493rtl_predicted_by_p (const_basic_block bb, enum br_predictor predictor)
494{
495 rtx note;
496 if (!INSN_P (BB_END (bb))(((((enum rtx_code) ((bb)->il.x.rtl->end_)->code) ==
INSN) || (((enum rtx_code) ((bb)->il.x.rtl->end_)->
code) == JUMP_INSN) || (((enum rtx_code) ((bb)->il.x.rtl->
end_)->code) == CALL_INSN)) || (((enum rtx_code) ((bb)->
il.x.rtl->end_)->code) == DEBUG_INSN))
)
497 return false;
498 for (note = REG_NOTES (BB_END (bb))((((bb)->il.x.rtl->end_)->u.fld[6]).rt_rtx); note; note = XEXP (note, 1)(((note)->u.fld[1]).rt_rtx))
499 if (REG_NOTE_KIND (note)((enum reg_note) ((machine_mode) (note)->mode)) == REG_BR_PRED
500 && INTVAL (XEXP (XEXP (note, 0), 0))((((((((note)->u.fld[0]).rt_rtx))->u.fld[0]).rt_rtx))->
u.hwint[0])
== (int)predictor)
501 return true;
502 return false;
503}
504
505/* Structure representing predictions in tree level. */
506
507struct edge_prediction {
508 struct edge_prediction *ep_next;
509 edge ep_edge;
510 enum br_predictor ep_predictor;
511 int ep_probability;
512};
513
514/* This map contains for a basic block the list of predictions for the
515 outgoing edges. */
516
517static hash_map<const_basic_block, edge_prediction *> *bb_predictions;
518
519/* Return true if the one of outgoing edges is already predicted by
520 PREDICTOR. */
521
522bool
523gimple_predicted_by_p (const_basic_block bb, enum br_predictor predictor)
524{
525 struct edge_prediction *i;
526 edge_prediction **preds = bb_predictions->get (bb);
527
528 if (!preds)
529 return false;
530
531 for (i = *preds; i; i = i->ep_next)
532 if (i->ep_predictor == predictor)
533 return true;
534 return false;
535}
536
537/* Return true if the one of outgoing edges is already predicted by
538 PREDICTOR for edge E predicted as TAKEN. */
539
540bool
541edge_predicted_by_p (edge e, enum br_predictor predictor, bool taken)
542{
543 struct edge_prediction *i;
544 basic_block bb = e->src;
545 edge_prediction **preds = bb_predictions->get (bb);
546 if (!preds)
547 return false;
548
549 int probability = predictor_info[(int) predictor].hitrate;
550
551 if (taken != TAKEN)
552 probability = REG_BR_PROB_BASE10000 - probability;
553
554 for (i = *preds; i; i = i->ep_next)
555 if (i->ep_predictor == predictor
556 && i->ep_edge == e
557 && i->ep_probability == probability)
558 return true;
559 return false;
560}
561
562/* Same predicate as above, working on edges. */
563bool
564edge_probability_reliable_p (const_edge e)
565{
566 return e->probability.probably_reliable_p ();
567}
568
569/* Same predicate as edge_probability_reliable_p, working on notes. */
570bool
571br_prob_note_reliable_p (const_rtx note)
572{
573 gcc_assert (REG_NOTE_KIND (note) == REG_BR_PROB)((void)(!(((enum reg_note) ((machine_mode) (note)->mode)) ==
REG_BR_PROB) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/predict.cc"
, 573, __FUNCTION__), 0 : 0))
;
574 return profile_probability::from_reg_br_prob_note
575 (XINT (note, 0)(((note)->u.fld[0]).rt_int)).probably_reliable_p ();
576}
577
578static void
579predict_insn (rtx_insn *insn, enum br_predictor predictor, int probability)
580{
581 gcc_assert (any_condjump_p (insn))((void)(!(any_condjump_p (insn)) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/predict.cc"
, 581, __FUNCTION__), 0 : 0))
;
582 if (!flag_guess_branch_probglobal_options.x_flag_guess_branch_prob)
583 return;
584
585 add_reg_note (insn, REG_BR_PRED,
586 gen_rtx_CONCAT (VOIDmode,gen_rtx_fmt_ee_stat ((CONCAT), ((((void) 0, E_VOIDmode))), ((
gen_rtx_CONST_INT (((void) 0, E_VOIDmode), ((int) predictor))
)), ((gen_rtx_CONST_INT (((void) 0, E_VOIDmode), ((int) probability
)))) )
587 GEN_INT ((int) predictor),gen_rtx_fmt_ee_stat ((CONCAT), ((((void) 0, E_VOIDmode))), ((
gen_rtx_CONST_INT (((void) 0, E_VOIDmode), ((int) predictor))
)), ((gen_rtx_CONST_INT (((void) 0, E_VOIDmode), ((int) probability
)))) )
588 GEN_INT ((int) probability))gen_rtx_fmt_ee_stat ((CONCAT), ((((void) 0, E_VOIDmode))), ((
gen_rtx_CONST_INT (((void) 0, E_VOIDmode), ((int) predictor))
)), ((gen_rtx_CONST_INT (((void) 0, E_VOIDmode), ((int) probability
)))) )
);
589}
590
591/* Predict insn by given predictor. */
592
593void
594predict_insn_def (rtx_insn *insn, enum br_predictor predictor,
595 enum prediction taken)
596{
597 int probability = predictor_info[(int) predictor].hitrate;
598 gcc_assert (probability != PROB_UNINITIALIZED)((void)(!(probability != (-1)) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/predict.cc"
, 598, __FUNCTION__), 0 : 0))
;
599
600 if (taken != TAKEN)
601 probability = REG_BR_PROB_BASE10000 - probability;
602
603 predict_insn (insn, predictor, probability);
604}
605
606/* Predict edge E with given probability if possible. */
607
608void
609rtl_predict_edge (edge e, enum br_predictor predictor, int probability)
610{
611 rtx_insn *last_insn;
612 last_insn = BB_END (e->src)(e->src)->il.x.rtl->end_;
613
614 /* We can store the branch prediction information only about
615 conditional jumps. */
616 if (!any_condjump_p (last_insn))
617 return;
618
619 /* We always store probability of branching. */
620 if (e->flags & EDGE_FALLTHRU)
621 probability = REG_BR_PROB_BASE10000 - probability;
622
623 predict_insn (last_insn, predictor, probability);
624}
625
626/* Predict edge E with the given PROBABILITY. */
627void
628gimple_predict_edge (edge e, enum br_predictor predictor, int probability)
629{
630 if (e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun)(((cfun + 0))->cfg->x_entry_block_ptr)
631 && EDGE_COUNT (e->src->succs)vec_safe_length (e->src->succs) > 1
632 && flag_guess_branch_probglobal_options.x_flag_guess_branch_prob
633 && optimizeglobal_options.x_optimize)
634 {
635 struct edge_prediction *i = XNEW (struct edge_prediction)((struct edge_prediction *) xmalloc (sizeof (struct edge_prediction
)))
;
636 edge_prediction *&preds = bb_predictions->get_or_insert (e->src);
637
638 i->ep_next = preds;
639 preds = i;
640 i->ep_probability = probability;
641 i->ep_predictor = predictor;
642 i->ep_edge = e;
643 }
644}
645
646/* Filter edge predictions PREDS by a function FILTER: if FILTER return false
647 the prediction is removed.
648 DATA are passed to the filter function. */
649
650static void
651filter_predictions (edge_prediction **preds,
652 bool (*filter) (edge_prediction *, void *), void *data)
653{
654 if (!bb_predictions)
655 return;
656
657 if (preds)
658 {
659 struct edge_prediction **prediction = preds;
660 struct edge_prediction *next;
661
662 while (*prediction)
663 {
664 if ((*filter) (*prediction, data))
665 prediction = &((*prediction)->ep_next);
666 else
667 {
668 next = (*prediction)->ep_next;
669 free (*prediction);
670 *prediction = next;
671 }
672 }
673 }
674}
675
676/* Filter function predicate that returns true for a edge predicate P
677 if its edge is equal to DATA. */
678
679static bool
680not_equal_edge_p (edge_prediction *p, void *data)
681{
682 return p->ep_edge != (edge)data;
683}
684
685/* Remove all predictions on given basic block that are attached
686 to edge E. */
687void
688remove_predictions_associated_with_edge (edge e)
689{
690 if (!bb_predictions)
691 return;
692
693 edge_prediction **preds = bb_predictions->get (e->src);
694 filter_predictions (preds, not_equal_edge_p, e);
695}
696
697/* Clears the list of predictions stored for BB. */
698
699static void
700clear_bb_predictions (basic_block bb)
701{
702 edge_prediction **preds = bb_predictions->get (bb);
703 struct edge_prediction *pred, *next;
704
705 if (!preds)
706 return;
707
708 for (pred = *preds; pred; pred = next)
709 {
710 next = pred->ep_next;
711 free (pred);
712 }
713 *preds = NULLnullptr;
714}
715
716/* Return true when we can store prediction on insn INSN.
717 At the moment we represent predictions only on conditional
718 jumps, not at computed jump or other complicated cases. */
719static bool
720can_predict_insn_p (const rtx_insn *insn)
721{
722 return (JUMP_P (insn)(((enum rtx_code) (insn)->code) == JUMP_INSN)
723 && any_condjump_p (insn)
724 && EDGE_COUNT (BLOCK_FOR_INSN (insn)->succs)vec_safe_length (BLOCK_FOR_INSN (insn)->succs) >= 2);
725}
726
727/* Predict edge E by given predictor if possible. */
728
729void
730predict_edge_def (edge e, enum br_predictor predictor,
731 enum prediction taken)
732{
733 int probability = predictor_info[(int) predictor].hitrate;
734
735 if (taken != TAKEN)
736 probability = REG_BR_PROB_BASE10000 - probability;
737
738 predict_edge (e, predictor, probability);
739}
740
741/* Invert all branch predictions or probability notes in the INSN. This needs
742 to be done each time we invert the condition used by the jump. */
743
744void
745invert_br_probabilities (rtx insn)
746{
747 rtx note;
748
749 for (note = REG_NOTES (insn)(((insn)->u.fld[6]).rt_rtx); note; note = XEXP (note, 1)(((note)->u.fld[1]).rt_rtx))
750 if (REG_NOTE_KIND (note)((enum reg_note) ((machine_mode) (note)->mode)) == REG_BR_PROB)
751 XINT (note, 0)(((note)->u.fld[0]).rt_int) = profile_probability::from_reg_br_prob_note
752 (XINT (note, 0)(((note)->u.fld[0]).rt_int)).invert ().to_reg_br_prob_note ();
753 else if (REG_NOTE_KIND (note)((enum reg_note) ((machine_mode) (note)->mode)) == REG_BR_PRED)
754 XEXP (XEXP (note, 0), 1)((((((note)->u.fld[0]).rt_rtx))->u.fld[1]).rt_rtx)
755 = GEN_INT (REG_BR_PROB_BASE - INTVAL (XEXP (XEXP (note, 0), 1)))gen_rtx_CONST_INT (((void) 0, E_VOIDmode), (10000 - ((((((((note
)->u.fld[0]).rt_rtx))->u.fld[1]).rt_rtx))->u.hwint[0
])))
;
756}
757
758/* Dump information about the branch prediction to the output file. */
759
760static void
761dump_prediction (FILE *file, enum br_predictor predictor, int probability,
762 basic_block bb, enum predictor_reason reason = REASON_NONE,
763 edge ep_edge = NULLnullptr)
764{
765 edge e = ep_edge;
766 edge_iterator ei;
767
768 if (!file)
769 return;
770
771 if (e == NULLnullptr)
772 FOR_EACH_EDGE (e, ei, bb->succs)for ((ei) = ei_start_1 (&((bb->succs))); ei_cond ((ei)
, &(e)); ei_next (&(ei)))
773 if (! (e->flags & EDGE_FALLTHRU))
774 break;
775
776 char edge_info_str[128];
777 if (ep_edge)
778 sprintf (edge_info_str, " of edge %d->%d", ep_edge->src->index,
779 ep_edge->dest->index);
780 else
781 edge_info_str[0] = '\0';
782
783 fprintf (file, " %s heuristics%s%s: %.2f%%",
784 predictor_info[predictor].name,
785 edge_info_str, reason_messages[reason],
786 probability * 100.0 / REG_BR_PROB_BASE10000);
787
788 if (bb->count.initialized_p ())
789 {
790 fprintf (file, " exec ");
791 bb->count.dump (file);
792 if (e)
793 {
794 fprintf (file, " hit ");
795 e->count ().dump (file);
796 fprintf (file, " (%.1f%%)", e->count ().to_gcov_type() * 100.0
797 / bb->count.to_gcov_type ());
798 }
799 }
800
801 fprintf (file, "\n");
802
803 /* Print output that be easily read by analyze_brprob.py script. We are
804 interested only in counts that are read from GCDA files. */
805 if (dump_file && (dump_flags & TDF_DETAILS)
806 && bb->count.precise_p ()
807 && reason == REASON_NONE)
808 {
809 fprintf (file, ";;heuristics;%s;%" PRId64"l" "d" ";%" PRId64"l" "d" ";%.1f;\n",
810 predictor_info[predictor].name,
811 bb->count.to_gcov_type (), e->count ().to_gcov_type (),
812 probability * 100.0 / REG_BR_PROB_BASE10000);
813 }
814}
815
816/* Return true if STMT is known to be unlikely executed. */
817
818static bool
819unlikely_executed_stmt_p (gimple *stmt)
820{
821 if (!is_gimple_call (stmt))
822 return false;
823 /* NORETURN attribute alone is not strong enough: exit() may be quite
824 likely executed once during program run. */
825 if (gimple_call_fntype (stmt)
826 && lookup_attribute ("cold",
827 TYPE_ATTRIBUTES (gimple_call_fntype (stmt))((tree_class_check ((gimple_call_fntype (stmt)), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/predict.cc"
, 827, __FUNCTION__))->type_common.attributes)
)
828 && !lookup_attribute ("cold", DECL_ATTRIBUTES (current_function_decl)((contains_struct_check ((current_function_decl), (TS_DECL_COMMON
), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/predict.cc"
, 828, __FUNCTION__))->decl_common.attributes)
))
829 return true;
830 tree decl = gimple_call_fndecl (stmt);
831 if (!decl)
832 return false;
833 if (lookup_attribute ("cold", DECL_ATTRIBUTES (decl)((contains_struct_check ((decl), (TS_DECL_COMMON), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/predict.cc"
, 833, __FUNCTION__))->decl_common.attributes)
)
834 && !lookup_attribute ("cold", DECL_ATTRIBUTES (current_function_decl)((contains_struct_check ((current_function_decl), (TS_DECL_COMMON
), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/predict.cc"
, 834, __FUNCTION__))->decl_common.attributes)
))
835 return true;
836
837 cgraph_node *n = cgraph_node::get (decl);
838 if (!n)
839 return false;
840
841 availability avail;
842 n = n->ultimate_alias_target (&avail);
843 if (avail < AVAIL_AVAILABLE)
844 return false;
845 if (!n->analyzed
846 || n->decl == current_function_decl)
847 return false;
848 return n->frequency == NODE_FREQUENCY_UNLIKELY_EXECUTED;
849}
850
851/* Return true if BB is unlikely executed. */
852
853static bool
854unlikely_executed_bb_p (basic_block bb)
855{
856 if (bb->count == profile_count::zero ())
857 return true;
858 if (bb == ENTRY_BLOCK_PTR_FOR_FN (cfun)(((cfun + 0))->cfg->x_entry_block_ptr) || bb == EXIT_BLOCK_PTR_FOR_FN (cfun)(((cfun + 0))->cfg->x_exit_block_ptr))
859 return false;
860 for (gimple_stmt_iterator gsi = gsi_start_bb (bb);
861 !gsi_end_p (gsi); gsi_next (&gsi))
862 {
863 if (unlikely_executed_stmt_p (gsi_stmt (gsi)))
864 return true;
865 if (stmt_can_terminate_bb_p (gsi_stmt (gsi)))
866 return false;
867 }
868 return false;
869}
870
871/* We cannot predict the probabilities of outgoing edges of bb. Set them
872 evenly and hope for the best. If UNLIKELY_EDGES is not null, distribute
873 even probability for all edges not mentioned in the set. These edges
874 are given PROB_VERY_UNLIKELY probability. Similarly for LIKELY_EDGES,
875 if we have exactly one likely edge, make the other edges predicted
876 as not probable. */
877
878static void
879set_even_probabilities (basic_block bb,
880 hash_set<edge> *unlikely_edges = NULLnullptr,
881 hash_set<edge_prediction *> *likely_edges = NULLnullptr)
882{
883 unsigned nedges = 0, unlikely_count = 0;
884 edge e = NULLnullptr;
885 edge_iterator ei;
886 profile_probability all = profile_probability::always ();
887
888 FOR_EACH_EDGE (e, ei, bb->succs)for ((ei) = ei_start_1 (&((bb->succs))); ei_cond ((ei)
, &(e)); ei_next (&(ei)))
889 if (e->probability.initialized_p ())
890 all -= e->probability;
891 else if (!unlikely_executed_edge_p (e))
892 {
893 nedges++;
894 if (unlikely_edges != NULLnullptr && unlikely_edges->contains (e))
895 {
896 all -= profile_probability::very_unlikely ();
897 unlikely_count++;
898 }
899 }
900
901 /* Make the distribution even if all edges are unlikely. */
902 unsigned likely_count = likely_edges ? likely_edges->elements () : 0;
903 if (unlikely_count == nedges)
904 {
905 unlikely_edges = NULLnullptr;
906 unlikely_count = 0;
907 }
908
909 /* If we have one likely edge, then use its probability and distribute
910 remaining probabilities as even. */
911 if (likely_count == 1)
912 {
913 FOR_EACH_EDGE (e, ei, bb->succs)for ((ei) = ei_start_1 (&((bb->succs))); ei_cond ((ei)
, &(e)); ei_next (&(ei)))
914 if (e->probability.initialized_p ())
915 ;
916 else if (!unlikely_executed_edge_p (e))
917 {
918 edge_prediction *prediction = *likely_edges->begin ();
919 int p = prediction->ep_probability;
920 profile_probability prob
921 = profile_probability::from_reg_br_prob_base (p);
922
923 if (prediction->ep_edge == e)
924 e->probability = prob;
925 else if (unlikely_edges != NULLnullptr && unlikely_edges->contains (e))
926 e->probability = profile_probability::very_unlikely ();
927 else
928 {
929 profile_probability remainder = prob.invert ();
930 remainder -= (profile_probability::very_unlikely ()
931 * unlikely_count);
932 int count = nedges - unlikely_count - 1;
933 gcc_assert (count >= 0)((void)(!(count >= 0) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/predict.cc"
, 933, __FUNCTION__), 0 : 0))
;
934
935 e->probability = remainder / count;
936 }
937 }
938 else
939 e->probability = profile_probability::never ();
940 }
941 else
942 {
943 /* Make all unlikely edges unlikely and the rest will have even
944 probability. */
945 unsigned scale = nedges - unlikely_count;
946 FOR_EACH_EDGE (e, ei, bb->succs)for ((ei) = ei_start_1 (&((bb->succs))); ei_cond ((ei)
, &(e)); ei_next (&(ei)))
947 if (e->probability.initialized_p ())
948 ;
949 else if (!unlikely_executed_edge_p (e))
950 {
951 if (unlikely_edges != NULLnullptr && unlikely_edges->contains (e))
952 e->probability = profile_probability::very_unlikely ();
953 else
954 e->probability = all / scale;
955 }
956 else
957 e->probability = profile_probability::never ();
958 }
959}
960
961/* Add REG_BR_PROB note to JUMP with PROB. */
962
963void
964add_reg_br_prob_note (rtx_insn *jump, profile_probability prob)
965{
966 gcc_checking_assert (JUMP_P (jump) && !find_reg_note (jump, REG_BR_PROB, 0))((void)(!((((enum rtx_code) (jump)->code) == JUMP_INSN) &&
!find_reg_note (jump, REG_BR_PROB, 0)) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/predict.cc"
, 966, __FUNCTION__), 0 : 0))
;
967 add_int_reg_note (jump, REG_BR_PROB, prob.to_reg_br_prob_note ());
968}
969
970/* Combine all REG_BR_PRED notes into single probability and attach REG_BR_PROB
971 note if not already present. Remove now useless REG_BR_PRED notes. */
972
973static void
974combine_predictions_for_insn (rtx_insn *insn, basic_block bb)
975{
976 rtx prob_note;
977 rtx *pnote;
978 rtx note;
979 int best_probability = PROB_EVEN(10000 / 2);
980 enum br_predictor best_predictor = END_PREDICTORS;
981 int combined_probability = REG_BR_PROB_BASE10000 / 2;
982 int d;
983 bool first_match = false;
984 bool found = false;
985
986 if (!can_predict_insn_p (insn))
987 {
988 set_even_probabilities (bb);
989 return;
990 }
991
992 prob_note = find_reg_note (insn, REG_BR_PROB, 0);
993 pnote = &REG_NOTES (insn)(((insn)->u.fld[6]).rt_rtx);
994 if (dump_file)
995 fprintf (dump_file, "Predictions for insn %i bb %i\n", INSN_UID (insn),
996 bb->index);
997
998 /* We implement "first match" heuristics and use probability guessed
999 by predictor with smallest index. */
1000 for (note = REG_NOTES (insn)(((insn)->u.fld[6]).rt_rtx); note; note = XEXP (note, 1)(((note)->u.fld[1]).rt_rtx))
1001 if (REG_NOTE_KIND (note)((enum reg_note) ((machine_mode) (note)->mode)) == REG_BR_PRED)
1002 {
1003 enum br_predictor predictor = ((enum br_predictor)
1004 INTVAL (XEXP (XEXP (note, 0), 0))((((((((note)->u.fld[0]).rt_rtx))->u.fld[0]).rt_rtx))->
u.hwint[0])
);
1005 int probability = INTVAL (XEXP (XEXP (note, 0), 1))((((((((note)->u.fld[0]).rt_rtx))->u.fld[1]).rt_rtx))->
u.hwint[0])
;
1006
1007 found = true;
1008 if (best_predictor > predictor
1009 && predictor_info[predictor].flags & PRED_FLAG_FIRST_MATCH1)
1010 best_probability = probability, best_predictor = predictor;
1011
1012 d = (combined_probability * probability
1013 + (REG_BR_PROB_BASE10000 - combined_probability)
1014 * (REG_BR_PROB_BASE10000 - probability));
1015
1016 /* Use FP math to avoid overflows of 32bit integers. */
1017 if (d == 0)
1018 /* If one probability is 0% and one 100%, avoid division by zero. */
1019 combined_probability = REG_BR_PROB_BASE10000 / 2;
1020 else
1021 combined_probability = (((double) combined_probability) * probability
1022 * REG_BR_PROB_BASE10000 / d + 0.5);
1023 }
1024
1025 /* Decide which heuristic to use. In case we didn't match anything,
1026 use no_prediction heuristic, in case we did match, use either
1027 first match or Dempster-Shaffer theory depending on the flags. */
1028
1029 if (best_predictor != END_PREDICTORS)
1030 first_match = true;
1031
1032 if (!found)
1033 dump_prediction (dump_file, PRED_NO_PREDICTION,
1034 combined_probability, bb);
1035 else
1036 {
1037 if (!first_match)
1038 dump_prediction (dump_file, PRED_DS_THEORY, combined_probability,
1039 bb, !first_match ? REASON_NONE : REASON_IGNORED);
1040 else
1041 dump_prediction (dump_file, PRED_FIRST_MATCH, best_probability,
1042 bb, first_match ? REASON_NONE : REASON_IGNORED);
1043 }
1044
1045 if (first_match)
1046 combined_probability = best_probability;
1047 dump_prediction (dump_file, PRED_COMBINED, combined_probability, bb);
1048
1049 while (*pnote)
1050 {
1051 if (REG_NOTE_KIND (*pnote)((enum reg_note) ((machine_mode) (*pnote)->mode)) == REG_BR_PRED)
1052 {
1053 enum br_predictor predictor = ((enum br_predictor)
1054 INTVAL (XEXP (XEXP (*pnote, 0), 0))((((((((*pnote)->u.fld[0]).rt_rtx))->u.fld[0]).rt_rtx))
->u.hwint[0])
);
1055 int probability = INTVAL (XEXP (XEXP (*pnote, 0), 1))((((((((*pnote)->u.fld[0]).rt_rtx))->u.fld[1]).rt_rtx))
->u.hwint[0])
;
1056
1057 dump_prediction (dump_file, predictor, probability, bb,
1058 (!first_match || best_predictor == predictor)
1059 ? REASON_NONE : REASON_IGNORED);
1060 *pnote = XEXP (*pnote, 1)(((*pnote)->u.fld[1]).rt_rtx);
1061 }
1062 else
1063 pnote = &XEXP (*pnote, 1)(((*pnote)->u.fld[1]).rt_rtx);
1064 }
1065
1066 if (!prob_note)
1067 {
1068 profile_probability p
1069 = profile_probability::from_reg_br_prob_base (combined_probability);
1070 add_reg_br_prob_note (insn, p);
1071
1072 /* Save the prediction into CFG in case we are seeing non-degenerated
1073 conditional jump. */
1074 if (!single_succ_p (bb))
1075 {
1076 BRANCH_EDGE (bb)((*((bb))->succs)[(0)]->flags & EDGE_FALLTHRU ? (*(
(bb))->succs)[(1)] : (*((bb))->succs)[(0)])
->probability = p;
1077 FALLTHRU_EDGE (bb)((*((bb))->succs)[(0)]->flags & EDGE_FALLTHRU ? (*(
(bb))->succs)[(0)] : (*((bb))->succs)[(1)])
->probability
1078 = BRANCH_EDGE (bb)((*((bb))->succs)[(0)]->flags & EDGE_FALLTHRU ? (*(
(bb))->succs)[(1)] : (*((bb))->succs)[(0)])
->probability.invert ();
1079 }
1080 }
1081 else if (!single_succ_p (bb))
1082 {
1083 profile_probability prob = profile_probability::from_reg_br_prob_note
1084 (XINT (prob_note, 0)(((prob_note)->u.fld[0]).rt_int));
1085
1086 BRANCH_EDGE (bb)((*((bb))->succs)[(0)]->flags & EDGE_FALLTHRU ? (*(
(bb))->succs)[(1)] : (*((bb))->succs)[(0)])
->probability = prob;
1087 FALLTHRU_EDGE (bb)((*((bb))->succs)[(0)]->flags & EDGE_FALLTHRU ? (*(
(bb))->succs)[(0)] : (*((bb))->succs)[(1)])
->probability = prob.invert ();
1088 }
1089 else
1090 single_succ_edge (bb)->probability = profile_probability::always ();
1091}
1092
1093/* Edge prediction hash traits. */
1094
1095struct predictor_hash: pointer_hash <edge_prediction>
1096{
1097
1098 static inline hashval_t hash (const edge_prediction *);
1099 static inline bool equal (const edge_prediction *, const edge_prediction *);
1100};
1101
1102/* Calculate hash value of an edge prediction P based on predictor and
1103 normalized probability. */
1104
1105inline hashval_t
1106predictor_hash::hash (const edge_prediction *p)
1107{
1108 inchash::hash hstate;
1109 hstate.add_int (p->ep_predictor);
1110
1111 int prob = p->ep_probability;
1112 if (prob > REG_BR_PROB_BASE10000 / 2)
1113 prob = REG_BR_PROB_BASE10000 - prob;
1114
1115 hstate.add_int (prob);
1116
1117 return hstate.end ();
1118}
1119
1120/* Return true whether edge predictions P1 and P2 use the same predictor and
1121 have equal (or opposed probability). */
1122
1123inline bool
1124predictor_hash::equal (const edge_prediction *p1, const edge_prediction *p2)
1125{
1126 return (p1->ep_predictor == p2->ep_predictor
1127 && (p1->ep_probability == p2->ep_probability
1128 || p1->ep_probability == REG_BR_PROB_BASE10000 - p2->ep_probability));
1129}
1130
1131struct predictor_hash_traits: predictor_hash,
1132 typed_noop_remove <edge_prediction *> {};
1133
1134/* Return true if edge prediction P is not in DATA hash set. */
1135
1136static bool
1137not_removed_prediction_p (edge_prediction *p, void *data)
1138{
1139 hash_set<edge_prediction *> *remove = (hash_set<edge_prediction *> *) data;
1140 return !remove->contains (p);
1141}
1142
1143/* Prune predictions for a basic block BB. Currently we do following
1144 clean-up steps:
1145
1146 1) remove duplicate prediction that is guessed with the same probability
1147 (different than 1/2) to both edge
1148 2) remove duplicates for a prediction that belongs with the same probability
1149 to a single edge
1150
1151 */
1152
1153static void
1154prune_predictions_for_bb (basic_block bb)
1155{
1156 edge_prediction **preds = bb_predictions->get (bb);
1157
1158 if (preds)
1159 {
1160 hash_table <predictor_hash_traits> s (13);
1161 hash_set <edge_prediction *> remove;
1162
1163 /* Step 1: identify predictors that should be removed. */
1164 for (edge_prediction *pred = *preds; pred; pred = pred->ep_next)
1165 {
1166 edge_prediction *existing = s.find (pred);
1167 if (existing)
1168 {
1169 if (pred->ep_edge == existing->ep_edge
1170 && pred->ep_probability == existing->ep_probability)
1171 {
1172 /* Remove a duplicate predictor. */
1173 dump_prediction (dump_file, pred->ep_predictor,
1174 pred->ep_probability, bb,
1175 REASON_SINGLE_EDGE_DUPLICATE, pred->ep_edge);
1176
1177 remove.add (pred);
1178 }
1179 else if (pred->ep_edge != existing->ep_edge
1180 && pred->ep_probability == existing->ep_probability
1181 && pred->ep_probability != REG_BR_PROB_BASE10000 / 2)
1182 {
1183 /* Remove both predictors as they predict the same
1184 for both edges. */
1185 dump_prediction (dump_file, existing->ep_predictor,
1186 pred->ep_probability, bb,
1187 REASON_EDGE_PAIR_DUPLICATE,
1188 existing->ep_edge);
1189 dump_prediction (dump_file, pred->ep_predictor,
1190 pred->ep_probability, bb,
1191 REASON_EDGE_PAIR_DUPLICATE,
1192 pred->ep_edge);
1193
1194 remove.add (existing);
1195 remove.add (pred);
1196 }
1197 }
1198
1199 edge_prediction **slot2 = s.find_slot (pred, INSERT);
1200 *slot2 = pred;
1201 }
1202
1203 /* Step 2: Remove predictors. */
1204 filter_predictions (preds, not_removed_prediction_p, &remove);
1205 }
1206}
1207
1208/* Combine predictions into single probability and store them into CFG.
1209 Remove now useless prediction entries.
1210 If DRY_RUN is set, only produce dumps and do not modify profile. */
1211
1212static void
1213combine_predictions_for_bb (basic_block bb, bool dry_run)
1214{
1215 int best_probability = PROB_EVEN(10000 / 2);
1216 enum br_predictor best_predictor = END_PREDICTORS;
1217 int combined_probability = REG_BR_PROB_BASE10000 / 2;
1218 int d;
1219 bool first_match = false;
1220 bool found = false;
1221 struct edge_prediction *pred;
1222 int nedges = 0;
1223 edge e, first = NULLnullptr, second = NULLnullptr;
1224 edge_iterator ei;
1225 int nzero = 0;
1226 int nunknown = 0;
1227
1228 FOR_EACH_EDGE (e, ei, bb->succs)for ((ei) = ei_start_1 (&((bb->succs))); ei_cond ((ei)
, &(e)); ei_next (&(ei)))
1
Loop condition is true. Entering loop body
6
Loop condition is true. Entering loop body
13
Loop condition is false. Execution continues on line 1259
1229 {
1230 if (!unlikely_executed_edge_p (e))
2
Assuming the condition is true
3
Taking true branch
7
Assuming the condition is true
8
Taking true branch
1231 {
1232 nedges ++;
1233 if (first
3.1
'first' is null
8.1
'first' is non-null
&& !second
8.2
'second' is null
)
9
Taking true branch
1234 second = e;
1235 if (!first
3.2
'first' is null
9.1
'first' is non-null
)
4
Taking true branch
10
Taking false branch
1236 first = e;
1237 }
1238 else if (!e->probability.initialized_p ())
1239 e->probability = profile_probability::never ();
1240 if (!e->probability.initialized_p ())
5
Taking true branch
11
Taking false branch
1241 nunknown++;
1242 else if (e->probability == profile_probability::never ())
12
Taking false branch
1243 nzero++;
1244 }
1245
1246 /* When there is no successor or only one choice, prediction is easy.
1247
1248 When we have a basic block with more than 2 successors, the situation
1249 is more complicated as DS theory cannot be used literally.
1250 More precisely, let's assume we predicted edge e1 with probability p1,
1251 thus: m1({b1}) = p1. As we're going to combine more than 2 edges, we
1252 need to find probability of e.g. m1({b2}), which we don't know.
1253 The only approximation is to equally distribute 1-p1 to all edges
1254 different from b1.
1255
1256 According to numbers we've got from SPEC2006 benchark, there's only
1257 one interesting reliable predictor (noreturn call), which can be
1258 handled with a bit easier approach. */
1259 if (nedges
13.1
'nedges' is equal to 2
!= 2)
14
Taking false branch
1260 {
1261 hash_set<edge> unlikely_edges (4);
1262 hash_set<edge_prediction *> likely_edges (4);
1263
1264 /* Identify all edges that have a probability close to very unlikely.
1265 Doing the approach for very unlikely doesn't worth for doing as
1266 there's no such probability in SPEC2006 benchmark. */
1267 edge_prediction **preds = bb_predictions->get (bb);
1268 if (preds)
1269 for (pred = *preds; pred; pred = pred->ep_next)
1270 {
1271 if (pred->ep_probability <= PROB_VERY_UNLIKELY(10000 / 2000 - 1)
1272 || pred->ep_predictor == PRED_COLD_LABEL)
1273 unlikely_edges.add (pred->ep_edge);
1274 else if (pred->ep_probability >= PROB_VERY_LIKELY(10000 - (10000 / 2000 - 1))
1275 || pred->ep_predictor == PRED_BUILTIN_EXPECT
1276 || pred->ep_predictor == PRED_HOT_LABEL)
1277 likely_edges.add (pred);
1278 }
1279
1280 /* It can happen that an edge is both in likely_edges and unlikely_edges.
1281 Clear both sets in that situation. */
1282 for (hash_set<edge_prediction *>::iterator it = likely_edges.begin ();
1283 it != likely_edges.end (); ++it)
1284 if (unlikely_edges.contains ((*it)->ep_edge))
1285 {
1286 likely_edges.empty ();
1287 unlikely_edges.empty ();
1288 break;
1289 }
1290
1291 if (!dry_run)
1292 set_even_probabilities (bb, &unlikely_edges, &likely_edges);
1293 clear_bb_predictions (bb);
1294 if (dump_file)
1295 {
1296 fprintf (dump_file, "Predictions for bb %i\n", bb->index);
1297 if (unlikely_edges.is_empty ())
1298 fprintf (dump_file,
1299 "%i edges in bb %i predicted to even probabilities\n",
1300 nedges, bb->index);
1301 else
1302 {
1303 fprintf (dump_file,
1304 "%i edges in bb %i predicted with some unlikely edges\n",
1305 nedges, bb->index);
1306 FOR_EACH_EDGE (e, ei, bb->succs)for ((ei) = ei_start_1 (&((bb->succs))); ei_cond ((ei)
, &(e)); ei_next (&(ei)))
1307 if (!unlikely_executed_edge_p (e))
1308 dump_prediction (dump_file, PRED_COMBINED,
1309 e->probability.to_reg_br_prob_base (), bb, REASON_NONE, e);
1310 }
1311 }
1312 return;
1313 }
1314
1315 if (dump_file)
15
Assuming 'dump_file' is null
16
Taking false branch
1316 fprintf (dump_file, "Predictions for bb %i\n", bb->index);
1317
1318 prune_predictions_for_bb (bb);
1319
1320 edge_prediction **preds = bb_predictions->get (bb);
1321
1322 if (preds)
17
Assuming 'preds' is null
18
Taking false branch
1323 {
1324 /* We implement "first match" heuristics and use probability guessed
1325 by predictor with smallest index. */
1326 for (pred = *preds; pred; pred = pred->ep_next)
1327 {
1328 enum br_predictor predictor = pred->ep_predictor;
1329 int probability = pred->ep_probability;
1330
1331 if (pred->ep_edge != first)
1332 probability = REG_BR_PROB_BASE10000 - probability;
1333
1334 found = true;
1335 /* First match heuristics would be widly confused if we predicted
1336 both directions. */
1337 if (best_predictor > predictor
1338 && predictor_info[predictor].flags & PRED_FLAG_FIRST_MATCH1)
1339 {
1340 struct edge_prediction *pred2;
1341 int prob = probability;
1342
1343 for (pred2 = (struct edge_prediction *) *preds;
1344 pred2; pred2 = pred2->ep_next)
1345 if (pred2 != pred && pred2->ep_predictor == pred->ep_predictor)
1346 {
1347 int probability2 = pred2->ep_probability;
1348
1349 if (pred2->ep_edge != first)
1350 probability2 = REG_BR_PROB_BASE10000 - probability2;
1351
1352 if ((probability < REG_BR_PROB_BASE10000 / 2) !=
1353 (probability2 < REG_BR_PROB_BASE10000 / 2))
1354 break;
1355
1356 /* If the same predictor later gave better result, go for it! */
1357 if ((probability >= REG_BR_PROB_BASE10000 / 2 && (probability2 > probability))
1358 || (probability <= REG_BR_PROB_BASE10000 / 2 && (probability2 < probability)))
1359 prob = probability2;
1360 }
1361 if (!pred2)
1362 best_probability = prob, best_predictor = predictor;
1363 }
1364
1365 d = (combined_probability * probability
1366 + (REG_BR_PROB_BASE10000 - combined_probability)
1367 * (REG_BR_PROB_BASE10000 - probability));
1368
1369 /* Use FP math to avoid overflows of 32bit integers. */
1370 if (d == 0)
1371 /* If one probability is 0% and one 100%, avoid division by zero. */
1372 combined_probability = REG_BR_PROB_BASE10000 / 2;
1373 else
1374 combined_probability = (((double) combined_probability)
1375 * probability
1376 * REG_BR_PROB_BASE10000 / d + 0.5);
1377 }
1378 }
1379
1380 /* Decide which heuristic to use. In case we didn't match anything,
1381 use no_prediction heuristic, in case we did match, use either
1382 first match or Dempster-Shaffer theory depending on the flags. */
1383
1384 if (best_predictor
18.1
'best_predictor' is equal to END_PREDICTORS
!= END_PREDICTORS)
19
Taking false branch
1385 first_match = true;
1386
1387 if (!found
19.1
'found' is false
)
20
Taking true branch
1388 dump_prediction (dump_file, PRED_NO_PREDICTION, combined_probability, bb);
1389 else
1390 {
1391 if (!first_match)
1392 dump_prediction (dump_file, PRED_DS_THEORY, combined_probability, bb,
1393 !first_match ? REASON_NONE : REASON_IGNORED);
1394 else
1395 dump_prediction (dump_file, PRED_FIRST_MATCH, best_probability, bb,
1396 first_match ? REASON_NONE : REASON_IGNORED);
1397 }
1398
1399 if (first_match
20.1
'first_match' is false
)
21
Taking false branch
1400 combined_probability = best_probability;
1401 dump_prediction (dump_file, PRED_COMBINED, combined_probability, bb);
1402
1403 if (preds
21.1
'preds' is null
)
22
Taking false branch
1404 {
1405 for (pred = (struct edge_prediction *) *preds; pred; pred = pred->ep_next)
1406 {
1407 enum br_predictor predictor = pred->ep_predictor;
1408 int probability = pred->ep_probability;
1409
1410 dump_prediction (dump_file, predictor, probability, bb,
1411 (!first_match || best_predictor == predictor)
1412 ? REASON_NONE : REASON_IGNORED, pred->ep_edge);
1413 }
1414 }
1415 clear_bb_predictions (bb);
1416
1417
1418 /* If we have only one successor which is unknown, we can compute missing
1419 probability. */
1420 if (nunknown
22.1
'nunknown' is equal to 1
== 1)
23
Taking true branch
1421 {
1422 profile_probability prob = profile_probability::always ();
1423 edge missing = NULLnullptr;
24
'missing' initialized to a null pointer value
1424
1425 FOR_EACH_EDGE (e, ei, bb->succs)for ((ei) = ei_start_1 (&((bb->succs))); ei_cond ((ei)
, &(e)); ei_next (&(ei)))
25
Loop condition is false. Execution continues on line 1432
1426 if (e->probability.initialized_p ())
1427 prob -= e->probability;
1428 else if (missing == NULLnullptr)
1429 missing = e;
1430 else
1431 gcc_unreachable ()(fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/predict.cc"
, 1431, __FUNCTION__))
;
1432 missing->probability = prob;
26
Called C++ object pointer is null
1433 }
1434 /* If nothing is unknown, we have nothing to update. */
1435 else if (!nunknown && nzero != (int)EDGE_COUNT (bb->succs)vec_safe_length (bb->succs))
1436 ;
1437 else if (!dry_run)
1438 {
1439 first->probability
1440 = profile_probability::from_reg_br_prob_base (combined_probability);
1441 second->probability = first->probability.invert ();
1442 }
1443}
1444
1445/* Check if T1 and T2 satisfy the IV_COMPARE condition.
1446 Return the SSA_NAME if the condition satisfies, NULL otherwise.
1447
1448 T1 and T2 should be one of the following cases:
1449 1. T1 is SSA_NAME, T2 is NULL
1450 2. T1 is SSA_NAME, T2 is INTEGER_CST between [-4, 4]
1451 3. T2 is SSA_NAME, T1 is INTEGER_CST between [-4, 4] */
1452
1453static tree
1454strips_small_constant (tree t1, tree t2)
1455{
1456 tree ret = NULLnullptr;
1457 int value = 0;
1458
1459 if (!t1)
1460 return NULLnullptr;
1461 else if (TREE_CODE (t1)((enum tree_code) (t1)->base.code) == SSA_NAME)
1462 ret = t1;
1463 else if (tree_fits_shwi_p (t1))
1464 value = tree_to_shwi (t1);
1465 else
1466 return NULLnullptr;
1467
1468 if (!t2)
1469 return ret;
1470 else if (tree_fits_shwi_p (t2))
1471 value = tree_to_shwi (t2);
1472 else if (TREE_CODE (t2)((enum tree_code) (t2)->base.code) == SSA_NAME)
1473 {
1474 if (ret)
1475 return NULLnullptr;
1476 else
1477 ret = t2;
1478 }
1479
1480 if (value <= 4 && value >= -4)
1481 return ret;
1482 else
1483 return NULLnullptr;
1484}
1485
1486/* Return the SSA_NAME in T or T's operands.
1487 Return NULL if SSA_NAME cannot be found. */
1488
1489static tree
1490get_base_value (tree t)
1491{
1492 if (TREE_CODE (t)((enum tree_code) (t)->base.code) == SSA_NAME)
1493 return t;
1494
1495 if (!BINARY_CLASS_P (t)(tree_code_type_tmpl <0>::tree_code_type[(int) (((enum tree_code
) (t)->base.code))] == tcc_binary)
)
1496 return NULLnullptr;
1497
1498 switch (TREE_OPERAND_LENGTH (t)tree_operand_length (t))
1499 {
1500 case 1:
1501 return strips_small_constant (TREE_OPERAND (t, 0)(*((const_cast<tree*> (tree_operand_check ((t), (0), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/predict.cc"
, 1501, __FUNCTION__)))))
, NULLnullptr);
1502 case 2:
1503 return strips_small_constant (TREE_OPERAND (t, 0)(*((const_cast<tree*> (tree_operand_check ((t), (0), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/predict.cc"
, 1503, __FUNCTION__)))))
,
1504 TREE_OPERAND (t, 1)(*((const_cast<tree*> (tree_operand_check ((t), (1), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/predict.cc"
, 1504, __FUNCTION__)))))
);
1505 default:
1506 return NULLnullptr;
1507 }
1508}
1509
1510/* Check the compare STMT in LOOP. If it compares an induction
1511 variable to a loop invariant, return true, and save
1512 LOOP_INVARIANT, COMPARE_CODE and LOOP_STEP.
1513 Otherwise return false and set LOOP_INVAIANT to NULL. */
1514
1515static bool
1516is_comparison_with_loop_invariant_p (gcond *stmt, class loop *loop,
1517 tree *loop_invariant,
1518 enum tree_code *compare_code,
1519 tree *loop_step,
1520 tree *loop_iv_base)
1521{
1522 tree op0, op1, bound, base;
1523 affine_iv iv0, iv1;
1524 enum tree_code code;
1525 tree step;
1526
1527 code = gimple_cond_code (stmt);
1528 *loop_invariant = NULLnullptr;
1529
1530 switch (code)
1531 {
1532 case GT_EXPR:
1533 case GE_EXPR:
1534 case NE_EXPR:
1535 case LT_EXPR:
1536 case LE_EXPR:
1537 case EQ_EXPR:
1538 break;
1539
1540 default:
1541 return false;
1542 }
1543
1544 op0 = gimple_cond_lhs (stmt);
1545 op1 = gimple_cond_rhs (stmt);
1546
1547 if ((TREE_CODE (op0)((enum tree_code) (op0)->base.code) != SSA_NAME && TREE_CODE (op0)((enum tree_code) (op0)->base.code) != INTEGER_CST)
1548 || (TREE_CODE (op1)((enum tree_code) (op1)->base.code) != SSA_NAME && TREE_CODE (op1)((enum tree_code) (op1)->base.code) != INTEGER_CST))
1549 return false;
1550 if (!simple_iv (loop, loop_containing_stmt (stmt), op0, &iv0, true))
1551 return false;
1552 if (!simple_iv (loop, loop_containing_stmt (stmt), op1, &iv1, true))
1553 return false;
1554 if (TREE_CODE (iv0.step)((enum tree_code) (iv0.step)->base.code) != INTEGER_CST
1555 || TREE_CODE (iv1.step)((enum tree_code) (iv1.step)->base.code) != INTEGER_CST)
1556 return false;
1557 if ((integer_zerop (iv0.step) && integer_zerop (iv1.step))
1558 || (!integer_zerop (iv0.step) && !integer_zerop (iv1.step)))
1559 return false;
1560
1561 if (integer_zerop (iv0.step))
1562 {
1563 if (code != NE_EXPR && code != EQ_EXPR)
1564 code = invert_tree_comparison (code, false);
1565 bound = iv0.base;
1566 base = iv1.base;
1567 if (tree_fits_shwi_p (iv1.step))
1568 step = iv1.step;
1569 else
1570 return false;
1571 }
1572 else
1573 {
1574 bound = iv1.base;
1575 base = iv0.base;
1576 if (tree_fits_shwi_p (iv0.step))
1577 step = iv0.step;
1578 else
1579 return false;
1580 }
1581
1582 if (TREE_CODE (bound)((enum tree_code) (bound)->base.code) != INTEGER_CST)
1583 bound = get_base_value (bound);
1584 if (!bound)
1585 return false;
1586 if (TREE_CODE (base)((enum tree_code) (base)->base.code) != INTEGER_CST)
1587 base = get_base_value (base);
1588 if (!base)
1589 return false;
1590
1591 *loop_invariant = bound;
1592 *compare_code = code;
1593 *loop_step = step;
1594 *loop_iv_base = base;
1595 return true;
1596}
1597
1598/* Compare two SSA_NAMEs: returns TRUE if T1 and T2 are value coherent. */
1599
1600static bool
1601expr_coherent_p (tree t1, tree t2)
1602{
1603 gimple *stmt;
1604 tree ssa_name_1 = NULLnullptr;
1605 tree ssa_name_2 = NULLnullptr;
1606
1607 gcc_assert (TREE_CODE (t1) == SSA_NAME || TREE_CODE (t1) == INTEGER_CST)((void)(!(((enum tree_code) (t1)->base.code) == SSA_NAME ||
((enum tree_code) (t1)->base.code) == INTEGER_CST) ? fancy_abort
("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/predict.cc"
, 1607, __FUNCTION__), 0 : 0))
;
1608 gcc_assert (TREE_CODE (t2) == SSA_NAME || TREE_CODE (t2) == INTEGER_CST)((void)(!(((enum tree_code) (t2)->base.code) == SSA_NAME ||
((enum tree_code) (t2)->base.code) == INTEGER_CST) ? fancy_abort
("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/predict.cc"
, 1608, __FUNCTION__), 0 : 0))
;
1609
1610 if (t1 == t2)
1611 return true;
1612
1613 if (TREE_CODE (t1)((enum tree_code) (t1)->base.code) == INTEGER_CST && TREE_CODE (t2)((enum tree_code) (t2)->base.code) == INTEGER_CST)
1614 return true;
1615 if (TREE_CODE (t1)((enum tree_code) (t1)->base.code) == INTEGER_CST || TREE_CODE (t2)((enum tree_code) (t2)->base.code) == INTEGER_CST)
1616 return false;
1617
1618 /* Check to see if t1 is expressed/defined with t2. */
1619 stmt = SSA_NAME_DEF_STMT (t1)(tree_check ((t1), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/predict.cc"
, 1619, __FUNCTION__, (SSA_NAME)))->ssa_name.def_stmt
;
1620 gcc_assert (stmt != NULL)((void)(!(stmt != nullptr) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/predict.cc"
, 1620, __FUNCTION__), 0 : 0))
;
1621 if (is_gimple_assign (stmt))
1622 {
1623 ssa_name_1 = SINGLE_SSA_TREE_OPERAND (stmt, SSA_OP_USE)single_ssa_tree_operand (stmt, 0x01);
1624 if (ssa_name_1 && ssa_name_1 == t2)
1625 return true;
1626 }
1627
1628 /* Check to see if t2 is expressed/defined with t1. */
1629 stmt = SSA_NAME_DEF_STMT (t2)(tree_check ((t2), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/predict.cc"
, 1629, __FUNCTION__, (SSA_NAME)))->ssa_name.def_stmt
;
1630 gcc_assert (stmt != NULL)((void)(!(stmt != nullptr) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/predict.cc"
, 1630, __FUNCTION__), 0 : 0))
;
1631 if (is_gimple_assign (stmt))
1632 {
1633 ssa_name_2 = SINGLE_SSA_TREE_OPERAND (stmt, SSA_OP_USE)single_ssa_tree_operand (stmt, 0x01);
1634 if (ssa_name_2 && ssa_name_2 == t1)
1635 return true;
1636 }
1637
1638 /* Compare if t1 and t2's def_stmts are identical. */
1639 if (ssa_name_2 != NULLnullptr && ssa_name_1 == ssa_name_2)
1640 return true;
1641 else
1642 return false;
1643}
1644
1645/* Return true if E is predicted by one of loop heuristics. */
1646
1647static bool
1648predicted_by_loop_heuristics_p (basic_block bb)
1649{
1650 struct edge_prediction *i;
1651 edge_prediction **preds = bb_predictions->get (bb);
1652
1653 if (!preds)
1654 return false;
1655
1656 for (i = *preds; i; i = i->ep_next)
1657 if (i->ep_predictor == PRED_LOOP_ITERATIONS_GUESSED
1658 || i->ep_predictor == PRED_LOOP_ITERATIONS_MAX
1659 || i->ep_predictor == PRED_LOOP_ITERATIONS
1660 || i->ep_predictor == PRED_LOOP_EXIT
1661 || i->ep_predictor == PRED_LOOP_EXIT_WITH_RECURSION
1662 || i->ep_predictor == PRED_LOOP_EXTRA_EXIT)
1663 return true;
1664 return false;
1665}
1666
1667/* Predict branch probability of BB when BB contains a branch that compares
1668 an induction variable in LOOP with LOOP_IV_BASE_VAR to LOOP_BOUND_VAR. The
1669 loop exit is compared using LOOP_BOUND_CODE, with step of LOOP_BOUND_STEP.
1670
1671 E.g.
1672 for (int i = 0; i < bound; i++) {
1673 if (i < bound - 2)
1674 computation_1();
1675 else
1676 computation_2();
1677 }
1678
1679 In this loop, we will predict the branch inside the loop to be taken. */
1680
1681static void
1682predict_iv_comparison (class loop *loop, basic_block bb,
1683 tree loop_bound_var,
1684 tree loop_iv_base_var,
1685 enum tree_code loop_bound_code,
1686 int loop_bound_step)
1687{
1688 gimple *stmt;
1689 tree compare_var, compare_base;
1690 enum tree_code compare_code;
1691 tree compare_step_var;
1692 edge then_edge;
1693 edge_iterator ei;
1694
1695 if (predicted_by_loop_heuristics_p (bb))
1696 return;
1697
1698 stmt = last_stmt (bb);
1699 if (!stmt || gimple_code (stmt) != GIMPLE_COND)
1700 return;
1701 if (!is_comparison_with_loop_invariant_p (as_a <gcond *> (stmt),
1702 loop, &compare_var,
1703 &compare_code,
1704 &compare_step_var,
1705 &compare_base))
1706 return;
1707
1708 /* Find the taken edge. */
1709 FOR_EACH_EDGE (then_edge, ei, bb->succs)for ((ei) = ei_start_1 (&((bb->succs))); ei_cond ((ei)
, &(then_edge)); ei_next (&(ei)))
1710 if (then_edge->flags & EDGE_TRUE_VALUE)
1711 break;
1712
1713 /* When comparing an IV to a loop invariant, NE is more likely to be
1714 taken while EQ is more likely to be not-taken. */
1715 if (compare_code == NE_EXPR)
1716 {
1717 predict_edge_def (then_edge, PRED_LOOP_IV_COMPARE_GUESS, TAKEN);
1718 return;
1719 }
1720 else if (compare_code == EQ_EXPR)
1721 {
1722 predict_edge_def (then_edge, PRED_LOOP_IV_COMPARE_GUESS, NOT_TAKEN);
1723 return;
1724 }
1725
1726 if (!expr_coherent_p (loop_iv_base_var, compare_base))
1727 return;
1728
1729 /* If loop bound, base and compare bound are all constants, we can
1730 calculate the probability directly. */
1731 if (tree_fits_shwi_p (loop_bound_var)
1732 && tree_fits_shwi_p (compare_var)
1733 && tree_fits_shwi_p (compare_base))
1734 {
1735 int probability;
1736 wi::overflow_type overflow;
1737 bool overall_overflow = false;
1738 widest_int compare_count, tem;
1739
1740 /* (loop_bound - base) / compare_step */
1741 tem = wi::sub (wi::to_widest (loop_bound_var),
1742 wi::to_widest (compare_base), SIGNED, &overflow);
1743 overall_overflow |= overflow;
1744 widest_int loop_count = wi::div_trunc (tem,
1745 wi::to_widest (compare_step_var),
1746 SIGNED, &overflow);
1747 overall_overflow |= overflow;
1748
1749 if (!wi::neg_p (wi::to_widest (compare_step_var))
1750 ^ (compare_code == LT_EXPR || compare_code == LE_EXPR))
1751 {
1752 /* (loop_bound - compare_bound) / compare_step */
1753 tem = wi::sub (wi::to_widest (loop_bound_var),
1754 wi::to_widest (compare_var), SIGNED, &overflow);
1755 overall_overflow |= overflow;
1756 compare_count = wi::div_trunc (tem, wi::to_widest (compare_step_var),
1757 SIGNED, &overflow);
1758 overall_overflow |= overflow;
1759 }
1760 else
1761 {
1762 /* (compare_bound - base) / compare_step */
1763 tem = wi::sub (wi::to_widest (compare_var),
1764 wi::to_widest (compare_base), SIGNED, &overflow);
1765 overall_overflow |= overflow;
1766 compare_count = wi::div_trunc (tem, wi::to_widest (compare_step_var),
1767 SIGNED, &overflow);
1768 overall_overflow |= overflow;
1769 }
1770 if (compare_code == LE_EXPR || compare_code == GE_EXPR)
1771 ++compare_count;
1772 if (loop_bound_code == LE_EXPR || loop_bound_code == GE_EXPR)
1773 ++loop_count;
1774 if (wi::neg_p (compare_count))
1775 compare_count = 0;
1776 if (wi::neg_p (loop_count))
1777 loop_count = 0;
1778 if (loop_count == 0)
1779 probability = 0;
1780 else if (wi::cmps (compare_count, loop_count) == 1)
1781 probability = REG_BR_PROB_BASE10000;
1782 else
1783 {
1784 tem = compare_count * REG_BR_PROB_BASE10000;
1785 tem = wi::udiv_trunc (tem, loop_count);
1786 probability = tem.to_uhwi ();
1787 }
1788
1789 /* FIXME: The branch prediction seems broken. It has only 20% hitrate. */
1790 if (!overall_overflow)
1791 predict_edge (then_edge, PRED_LOOP_IV_COMPARE, probability);
1792
1793 return;
1794 }
1795
1796 if (expr_coherent_p (loop_bound_var, compare_var))
1797 {
1798 if ((loop_bound_code == LT_EXPR || loop_bound_code == LE_EXPR)
1799 && (compare_code == LT_EXPR || compare_code == LE_EXPR))
1800 predict_edge_def (then_edge, PRED_LOOP_IV_COMPARE_GUESS, TAKEN);
1801 else if ((loop_bound_code == GT_EXPR || loop_bound_code == GE_EXPR)
1802 && (compare_code == GT_EXPR || compare_code == GE_EXPR))
1803 predict_edge_def (then_edge, PRED_LOOP_IV_COMPARE_GUESS, TAKEN);
1804 else if (loop_bound_code == NE_EXPR)
1805 {
1806 /* If the loop backedge condition is "(i != bound)", we do
1807 the comparison based on the step of IV:
1808 * step < 0 : backedge condition is like (i > bound)
1809 * step > 0 : backedge condition is like (i < bound) */
1810 gcc_assert (loop_bound_step != 0)((void)(!(loop_bound_step != 0) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/predict.cc"
, 1810, __FUNCTION__), 0 : 0))
;
1811 if (loop_bound_step > 0
1812 && (compare_code == LT_EXPR
1813 || compare_code == LE_EXPR))
1814 predict_edge_def (then_edge, PRED_LOOP_IV_COMPARE_GUESS, TAKEN);
1815 else if (loop_bound_step < 0
1816 && (compare_code == GT_EXPR
1817 || compare_code == GE_EXPR))
1818 predict_edge_def (then_edge, PRED_LOOP_IV_COMPARE_GUESS, TAKEN);
1819 else
1820 predict_edge_def (then_edge, PRED_LOOP_IV_COMPARE_GUESS, NOT_TAKEN);
1821 }
1822 else
1823 /* The branch is predicted not-taken if loop_bound_code is
1824 opposite with compare_code. */
1825 predict_edge_def (then_edge, PRED_LOOP_IV_COMPARE_GUESS, NOT_TAKEN);
1826 }
1827 else if (expr_coherent_p (loop_iv_base_var, compare_var))
1828 {
1829 /* For cases like:
1830 for (i = s; i < h; i++)
1831 if (i > s + 2) ....
1832 The branch should be predicted taken. */
1833 if (loop_bound_step > 0
1834 && (compare_code == GT_EXPR || compare_code == GE_EXPR))
1835 predict_edge_def (then_edge, PRED_LOOP_IV_COMPARE_GUESS, TAKEN);
1836 else if (loop_bound_step < 0
1837 && (compare_code == LT_EXPR || compare_code == LE_EXPR))
1838 predict_edge_def (then_edge, PRED_LOOP_IV_COMPARE_GUESS, TAKEN);
1839 else
1840 predict_edge_def (then_edge, PRED_LOOP_IV_COMPARE_GUESS, NOT_TAKEN);
1841 }
1842}
1843
1844/* Predict for extra loop exits that will lead to EXIT_EDGE. The extra loop
1845 exits are resulted from short-circuit conditions that will generate an
1846 if_tmp. E.g.:
1847
1848 if (foo() || global > 10)
1849 break;
1850
1851 This will be translated into:
1852
1853 BB3:
1854 loop header...
1855 BB4:
1856 if foo() goto BB6 else goto BB5
1857 BB5:
1858 if global > 10 goto BB6 else goto BB7
1859 BB6:
1860 goto BB7
1861 BB7:
1862 iftmp = (PHI 0(BB5), 1(BB6))
1863 if iftmp == 1 goto BB8 else goto BB3
1864 BB8:
1865 outside of the loop...
1866
1867 The edge BB7->BB8 is loop exit because BB8 is outside of the loop.
1868 From the dataflow, we can infer that BB4->BB6 and BB5->BB6 are also loop
1869 exits. This function takes BB7->BB8 as input, and finds out the extra loop
1870 exits to predict them using PRED_LOOP_EXTRA_EXIT. */
1871
1872static void
1873predict_extra_loop_exits (class loop *loop, edge exit_edge)
1874{
1875 unsigned i;
1876 bool check_value_one;
1877 gimple *lhs_def_stmt;
1878 gphi *phi_stmt;
1879 tree cmp_rhs, cmp_lhs;
1880 gimple *last;
1881 gcond *cmp_stmt;
1882
1883 last = last_stmt (exit_edge->src);
1884 if (!last)
1885 return;
1886 cmp_stmt = dyn_cast <gcond *> (last);
1887 if (!cmp_stmt)
1888 return;
1889
1890 cmp_rhs = gimple_cond_rhs (cmp_stmt);
1891 cmp_lhs = gimple_cond_lhs (cmp_stmt);
1892 if (!TREE_CONSTANT (cmp_rhs)((non_type_check ((cmp_rhs), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/predict.cc"
, 1892, __FUNCTION__))->base.constant_flag)
1893 || !(integer_zerop (cmp_rhs) || integer_onep (cmp_rhs)))
1894 return;
1895 if (TREE_CODE (cmp_lhs)((enum tree_code) (cmp_lhs)->base.code) != SSA_NAME)
1896 return;
1897
1898 /* If check_value_one is true, only the phi_args with value '1' will lead
1899 to loop exit. Otherwise, only the phi_args with value '0' will lead to
1900 loop exit. */
1901 check_value_one = (((integer_onep (cmp_rhs))
1902 ^ (gimple_cond_code (cmp_stmt) == EQ_EXPR))
1903 ^ ((exit_edge->flags & EDGE_TRUE_VALUE) != 0));
1904
1905 lhs_def_stmt = SSA_NAME_DEF_STMT (cmp_lhs)(tree_check ((cmp_lhs), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/predict.cc"
, 1905, __FUNCTION__, (SSA_NAME)))->ssa_name.def_stmt
;
1906 if (!lhs_def_stmt)
1907 return;
1908
1909 phi_stmt = dyn_cast <gphi *> (lhs_def_stmt);
1910 if (!phi_stmt)
1911 return;
1912
1913 for (i = 0; i < gimple_phi_num_args (phi_stmt); i++)
1914 {
1915 edge e1;
1916 edge_iterator ei;
1917 tree val = gimple_phi_arg_def (phi_stmt, i);
1918 edge e = gimple_phi_arg_edge (phi_stmt, i);
1919
1920 if (!TREE_CONSTANT (val)((non_type_check ((val), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/predict.cc"
, 1920, __FUNCTION__))->base.constant_flag)
|| !(integer_zerop (val) || integer_onep (val)))
1921 continue;
1922 if ((check_value_one ^ integer_onep (val)) == 1)
1923 continue;
1924 if (EDGE_COUNT (e->src->succs)vec_safe_length (e->src->succs) != 1)
1925 {
1926 predict_paths_leading_to_edge (e, PRED_LOOP_EXTRA_EXIT, NOT_TAKEN,
1927 loop);
1928 continue;
1929 }
1930
1931 FOR_EACH_EDGE (e1, ei, e->src->preds)for ((ei) = ei_start_1 (&((e->src->preds))); ei_cond
((ei), &(e1)); ei_next (&(ei)))
1932 predict_paths_leading_to_edge (e1, PRED_LOOP_EXTRA_EXIT, NOT_TAKEN,
1933 loop);
1934 }
1935}
1936
1937
1938/* Predict edge probabilities by exploiting loop structure. */
1939
1940static void
1941predict_loops (void)
1942{
1943 basic_block bb;
1944 hash_set <class loop *> with_recursion(10);
1945
1946 FOR_EACH_BB_FN (bb, cfun)for (bb = ((cfun + 0))->cfg->x_entry_block_ptr->next_bb
; bb != ((cfun + 0))->cfg->x_exit_block_ptr; bb = bb->
next_bb)
1947 {
1948 gimple_stmt_iterator gsi;
1949 tree decl;
1950
1951 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1952 if (is_gimple_call (gsi_stmt (gsi))
1953 && (decl = gimple_call_fndecl (gsi_stmt (gsi))) != NULLnullptr
1954 && recursive_call_p (current_function_decl, decl))
1955 {
1956 class loop *loop = bb->loop_father;
1957 while (loop && !with_recursion.add (loop))
1958 loop = loop_outer (loop);
1959 }
1960 }
1961
1962 /* Try to predict out blocks in a loop that are not part of a
1963 natural loop. */
1964 for (auto loop : loops_list (cfun(cfun + 0), LI_FROM_INNERMOST))
1965 {
1966 basic_block bb, *bbs;
1967 unsigned j, n_exits = 0;
1968 class tree_niter_desc niter_desc;
1969 edge ex;
1970 class nb_iter_bound *nb_iter;
1971 enum tree_code loop_bound_code = ERROR_MARK;
1972 tree loop_bound_step = NULLnullptr;
1973 tree loop_bound_var = NULLnullptr;
1974 tree loop_iv_base = NULLnullptr;
1975 gcond *stmt = NULLnullptr;
1976 bool recursion = with_recursion.contains (loop);
1977
1978 auto_vec<edge> exits = get_loop_exit_edges (loop);
1979 FOR_EACH_VEC_ELT (exits, j, ex)for (j = 0; (exits).iterate ((j), &(ex)); ++(j))
1980 if (!unlikely_executed_edge_p (ex) && !(ex->flags & EDGE_ABNORMAL_CALL))
1981 n_exits ++;
1982 if (!n_exits)
1983 continue;
1984
1985 if (dump_file && (dump_flags & TDF_DETAILS))
1986 fprintf (dump_file, "Predicting loop %i%s with %i exits.\n",
1987 loop->num, recursion ? " (with recursion)":"", n_exits);
1988 if (dump_file && (dump_flags & TDF_DETAILS)
1989 && max_loop_iterations_int (loop) >= 0)
1990 {
1991 fprintf (dump_file,
1992 "Loop %d iterates at most %i times.\n", loop->num,
1993 (int)max_loop_iterations_int (loop));
1994 }
1995 if (dump_file && (dump_flags & TDF_DETAILS)
1996 && likely_max_loop_iterations_int (loop) >= 0)
1997 {
1998 fprintf (dump_file, "Loop %d likely iterates at most %i times.\n",
1999 loop->num, (int)likely_max_loop_iterations_int (loop));
2000 }
2001
2002 FOR_EACH_VEC_ELT (exits, j, ex)for (j = 0; (exits).iterate ((j), &(ex)); ++(j))
2003 {
2004 tree niter = NULLnullptr;
2005 HOST_WIDE_INTlong nitercst;
2006 int max = param_max_predicted_iterationsglobal_options.x_param_max_predicted_iterations;
2007 int probability;
2008 enum br_predictor predictor;
2009 widest_int nit;
2010
2011 if (unlikely_executed_edge_p (ex)
2012 || (ex->flags & EDGE_ABNORMAL_CALL))
2013 continue;
2014 /* Loop heuristics do not expect exit conditional to be inside
2015 inner loop. We predict from innermost to outermost loop. */
2016 if (predicted_by_loop_heuristics_p (ex->src))
2017 {
2018 if (dump_file && (dump_flags & TDF_DETAILS))
2019 fprintf (dump_file, "Skipping exit %i->%i because "
2020 "it is already predicted.\n",
2021 ex->src->index, ex->dest->index);
2022 continue;
2023 }
2024 predict_extra_loop_exits (loop, ex);
2025
2026 if (number_of_iterations_exit (loop, ex, &niter_desc, false, false))
2027 niter = niter_desc.niter;
2028 if (!niter || TREE_CODE (niter_desc.niter)((enum tree_code) (niter_desc.niter)->base.code) != INTEGER_CST)
2029 niter = loop_niter_by_eval (loop, ex);
2030 if (dump_file && (dump_flags & TDF_DETAILS)
2031 && TREE_CODE (niter)((enum tree_code) (niter)->base.code) == INTEGER_CST)
2032 {
2033 fprintf (dump_file, "Exit %i->%i %d iterates ",
2034 ex->src->index, ex->dest->index,
2035 loop->num);
2036 print_generic_expr (dump_file, niter, TDF_SLIM);
2037 fprintf (dump_file, " times.\n");
2038 }
2039
2040 if (TREE_CODE (niter)((enum tree_code) (niter)->base.code) == INTEGER_CST)
2041 {
2042 if (tree_fits_uhwi_p (niter)
2043 && max
2044 && compare_tree_int (niter, max - 1) == -1)
2045 nitercst = tree_to_uhwi (niter) + 1;
2046 else
2047 nitercst = max;
2048 predictor = PRED_LOOP_ITERATIONS;
2049 }
2050 /* If we have just one exit and we can derive some information about
2051 the number of iterations of the loop from the statements inside
2052 the loop, use it to predict this exit. */
2053 else if (n_exits == 1
2054 && estimated_stmt_executions (loop, &nit))
2055 {
2056 if (wi::gtu_p (nit, max))
2057 nitercst = max;
2058 else
2059 nitercst = nit.to_shwi ();
2060 predictor = PRED_LOOP_ITERATIONS_GUESSED;
2061 }
2062 /* If we have likely upper bound, trust it for very small iteration
2063 counts. Such loops would otherwise get mispredicted by standard
2064 LOOP_EXIT heuristics. */
2065 else if (n_exits == 1
2066 && likely_max_stmt_executions (loop, &nit)
2067 && wi::ltu_p (nit,
2068 RDIV (REG_BR_PROB_BASE,(((10000) + (10000 - predictor_info [recursion ? PRED_LOOP_EXIT_WITH_RECURSION
: PRED_LOOP_EXIT].hitrate) / 2) / (10000 - predictor_info [recursion
? PRED_LOOP_EXIT_WITH_RECURSION : PRED_LOOP_EXIT].hitrate))
2069 REG_BR_PROB_BASE(((10000) + (10000 - predictor_info [recursion ? PRED_LOOP_EXIT_WITH_RECURSION
: PRED_LOOP_EXIT].hitrate) / 2) / (10000 - predictor_info [recursion
? PRED_LOOP_EXIT_WITH_RECURSION : PRED_LOOP_EXIT].hitrate))
2070 - predictor_info(((10000) + (10000 - predictor_info [recursion ? PRED_LOOP_EXIT_WITH_RECURSION
: PRED_LOOP_EXIT].hitrate) / 2) / (10000 - predictor_info [recursion
? PRED_LOOP_EXIT_WITH_RECURSION : PRED_LOOP_EXIT].hitrate))
2071 [recursion(((10000) + (10000 - predictor_info [recursion ? PRED_LOOP_EXIT_WITH_RECURSION
: PRED_LOOP_EXIT].hitrate) / 2) / (10000 - predictor_info [recursion
? PRED_LOOP_EXIT_WITH_RECURSION : PRED_LOOP_EXIT].hitrate))
2072 ? PRED_LOOP_EXIT_WITH_RECURSION(((10000) + (10000 - predictor_info [recursion ? PRED_LOOP_EXIT_WITH_RECURSION
: PRED_LOOP_EXIT].hitrate) / 2) / (10000 - predictor_info [recursion
? PRED_LOOP_EXIT_WITH_RECURSION : PRED_LOOP_EXIT].hitrate))
2073 : PRED_LOOP_EXIT].hitrate)(((10000) + (10000 - predictor_info [recursion ? PRED_LOOP_EXIT_WITH_RECURSION
: PRED_LOOP_EXIT].hitrate) / 2) / (10000 - predictor_info [recursion
? PRED_LOOP_EXIT_WITH_RECURSION : PRED_LOOP_EXIT].hitrate))
))
2074 {
2075 nitercst = nit.to_shwi ();
2076 predictor = PRED_LOOP_ITERATIONS_MAX;
2077 }
2078 else
2079 {
2080 if (dump_file && (dump_flags & TDF_DETAILS))
2081 fprintf (dump_file, "Nothing known about exit %i->%i.\n",
2082 ex->src->index, ex->dest->index);
2083 continue;
2084 }
2085
2086 if (dump_file && (dump_flags & TDF_DETAILS))
2087 fprintf (dump_file, "Recording prediction to %i iterations by %s.\n",
2088 (int)nitercst, predictor_info[predictor].name);
2089 /* If the prediction for number of iterations is zero, do not
2090 predict the exit edges. */
2091 if (nitercst == 0)
2092 continue;
2093
2094 probability = RDIV (REG_BR_PROB_BASE, nitercst)(((10000) + (nitercst) / 2) / (nitercst));
2095 predict_edge (ex, predictor, probability);
2096 }
2097
2098 /* Find information about loop bound variables. */
2099 for (nb_iter = loop->bounds; nb_iter;
2100 nb_iter = nb_iter->next)
2101 if (nb_iter->stmt
2102 && gimple_code (nb_iter->stmt) == GIMPLE_COND)
2103 {
2104 stmt = as_a <gcond *> (nb_iter->stmt);
2105 break;
2106 }
2107 if (!stmt && last_stmt (loop->header)
2108 && gimple_code (last_stmt (loop->header)) == GIMPLE_COND)
2109 stmt = as_a <gcond *> (last_stmt (loop->header));
2110 if (stmt)
2111 is_comparison_with_loop_invariant_p (stmt, loop,
2112 &loop_bound_var,
2113 &loop_bound_code,
2114 &loop_bound_step,
2115 &loop_iv_base);
2116
2117 bbs = get_loop_body (loop);
2118
2119 for (j = 0; j < loop->num_nodes; j++)
2120 {
2121 edge e;
2122 edge_iterator ei;
2123
2124 bb = bbs[j];
2125
2126 /* Bypass loop heuristics on continue statement. These
2127 statements construct loops via "non-loop" constructs
2128 in the source language and are better to be handled
2129 separately. */
2130 if (predicted_by_p (bb, PRED_CONTINUE))
2131 {
2132 if (dump_file && (dump_flags & TDF_DETAILS))
2133 fprintf (dump_file, "BB %i predicted by continue.\n",
2134 bb->index);
2135 continue;
2136 }
2137
2138 /* If we already used more reliable loop exit predictors, do not
2139 bother with PRED_LOOP_EXIT. */
2140 if (!predicted_by_loop_heuristics_p (bb))
2141 {
2142 /* For loop with many exits we don't want to predict all exits
2143 with the pretty large probability, because if all exits are
2144 considered in row, the loop would be predicted to iterate
2145 almost never. The code to divide probability by number of
2146 exits is very rough. It should compute the number of exits
2147 taken in each patch through function (not the overall number
2148 of exits that might be a lot higher for loops with wide switch
2149 statements in them) and compute n-th square root.
2150
2151 We limit the minimal probability by 2% to avoid
2152 EDGE_PROBABILITY_RELIABLE from trusting the branch prediction
2153 as this was causing regression in perl benchmark containing such
2154 a wide loop. */
2155
2156 int probability = ((REG_BR_PROB_BASE10000
2157 - predictor_info
2158 [recursion
2159 ? PRED_LOOP_EXIT_WITH_RECURSION
2160 : PRED_LOOP_EXIT].hitrate)
2161 / n_exits);
2162 if (probability < HITRATE (2)((int) ((2) * 10000 + 50) / 100))
2163 probability = HITRATE (2)((int) ((2) * 10000 + 50) / 100);
2164 FOR_EACH_EDGE (e, ei, bb->succs)for ((ei) = ei_start_1 (&((bb->succs))); ei_cond ((ei)
, &(e)); ei_next (&(ei)))
2165 if (e->dest->index < NUM_FIXED_BLOCKS(2)
2166 || !flow_bb_inside_loop_p (loop, e->dest))
2167 {
2168 if (dump_file && (dump_flags & TDF_DETAILS))
2169 fprintf (dump_file,
2170 "Predicting exit %i->%i with prob %i.\n",
2171 e->src->index, e->dest->index, probability);
2172 predict_edge (e,
2173 recursion ? PRED_LOOP_EXIT_WITH_RECURSION
2174 : PRED_LOOP_EXIT, probability);
2175 }
2176 }
2177 if (loop_bound_var)
2178 predict_iv_comparison (loop, bb, loop_bound_var, loop_iv_base,
2179 loop_bound_code,
2180 tree_to_shwi (loop_bound_step));
2181 }
2182
2183 /* In the following code
2184 for (loop1)
2185 if (cond)
2186 for (loop2)
2187 body;
2188 guess that cond is unlikely. */
2189 if (loop_outer (loop)->num)
2190 {
2191 basic_block bb = NULLnullptr;
2192 edge preheader_edge = loop_preheader_edge (loop);
2193
2194 if (single_pred_p (preheader_edge->src)
2195 && single_succ_p (preheader_edge->src))
2196 preheader_edge = single_pred_edge (preheader_edge->src);
2197
2198 gimple *stmt = last_stmt (preheader_edge->src);
2199 /* Pattern match fortran loop preheader:
2200 _16 = BUILTIN_EXPECT (_15, 1, PRED_FORTRAN_LOOP_PREHEADER);
2201 _17 = (logical(kind=4)) _16;
2202 if (_17 != 0)
2203 goto <bb 11>;
2204 else
2205 goto <bb 13>;
2206
2207 Loop guard branch prediction says nothing about duplicated loop
2208 headers produced by fortran frontend and in this case we want
2209 to predict paths leading to this preheader. */
2210
2211 if (stmt
2212 && gimple_code (stmt) == GIMPLE_COND
2213 && gimple_cond_code (stmt) == NE_EXPR
2214 && TREE_CODE (gimple_cond_lhs (stmt))((enum tree_code) (gimple_cond_lhs (stmt))->base.code) == SSA_NAME
2215 && integer_zerop (gimple_cond_rhs (stmt)))
2216 {
2217 gimple *call_stmt = SSA_NAME_DEF_STMT (gimple_cond_lhs (stmt))(tree_check ((gimple_cond_lhs (stmt)), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/predict.cc"
, 2217, __FUNCTION__, (SSA_NAME)))->ssa_name.def_stmt
;
2218 if (gimple_code (call_stmt) == GIMPLE_ASSIGN
2219 && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (call_stmt))((gimple_assign_rhs_code (call_stmt)) == NOP_EXPR || (gimple_assign_rhs_code
(call_stmt)) == CONVERT_EXPR)
2220 && TREE_CODE (gimple_assign_rhs1 (call_stmt))((enum tree_code) (gimple_assign_rhs1 (call_stmt))->base.code
)
== SSA_NAME)
2221 call_stmt = SSA_NAME_DEF_STMT (gimple_assign_rhs1 (call_stmt))(tree_check ((gimple_assign_rhs1 (call_stmt)), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/predict.cc"
, 2221, __FUNCTION__, (SSA_NAME)))->ssa_name.def_stmt
;
2222 if (gimple_call_internal_p (call_stmt, IFN_BUILTIN_EXPECT)
2223 && TREE_CODE (gimple_call_arg (call_stmt, 2))((enum tree_code) (gimple_call_arg (call_stmt, 2))->base.code
)
== INTEGER_CST
2224 && tree_fits_uhwi_p (gimple_call_arg (call_stmt, 2))
2225 && tree_to_uhwi (gimple_call_arg (call_stmt, 2))
2226 == PRED_FORTRAN_LOOP_PREHEADER)
2227 bb = preheader_edge->src;
2228 }
2229 if (!bb)
2230 {
2231 if (!dominated_by_p (CDI_DOMINATORS,
2232 loop_outer (loop)->latch, loop->header))
2233 predict_paths_leading_to_edge (loop_preheader_edge (loop),
2234 recursion
2235 ? PRED_LOOP_GUARD_WITH_RECURSION
2236 : PRED_LOOP_GUARD,
2237 NOT_TAKEN,
2238 loop_outer (loop));
2239 }
2240 else
2241 {
2242 if (!dominated_by_p (CDI_DOMINATORS,
2243 loop_outer (loop)->latch, bb))
2244 predict_paths_leading_to (bb,
2245 recursion
2246 ? PRED_LOOP_GUARD_WITH_RECURSION
2247 : PRED_LOOP_GUARD,
2248 NOT_TAKEN,
2249 loop_outer (loop));
2250 }
2251 }
2252
2253 /* Free basic blocks from get_loop_body. */
2254 free (bbs);
2255 }
2256}
2257
2258/* Attempt to predict probabilities of BB outgoing edges using local
2259 properties. */
2260static void
2261bb_estimate_probability_locally (basic_block bb)
2262{
2263 rtx_insn *last_insn = BB_END (bb)(bb)->il.x.rtl->end_;
2264 rtx cond;
2265
2266 if (! can_predict_insn_p (last_insn))
2267 return;
2268 cond = get_condition (last_insn, NULLnullptr, false, false);
2269 if (! cond)
2270 return;
2271
2272 /* Try "pointer heuristic."
2273 A comparison ptr == 0 is predicted as false.
2274 Similarly, a comparison ptr1 == ptr2 is predicted as false. */
2275 if (COMPARISON_P (cond)(((rtx_class[(int) (((enum rtx_code) (cond)->code))]) &
(~1)) == (RTX_COMPARE & (~1)))
2276 && ((REG_P (XEXP (cond, 0))(((enum rtx_code) ((((cond)->u.fld[0]).rt_rtx))->code) ==
REG)
&& REG_POINTER (XEXP (cond, 0))(__extension__ ({ __typeof (((((cond)->u.fld[0]).rt_rtx)))
const _rtx = (((((cond)->u.fld[0]).rt_rtx))); if (((enum rtx_code
) (_rtx)->code) != REG) rtl_check_failed_flag ("REG_POINTER"
, _rtx, "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/predict.cc"
, 2276, __FUNCTION__); _rtx; })->frame_related)
)
2277 || (REG_P (XEXP (cond, 1))(((enum rtx_code) ((((cond)->u.fld[1]).rt_rtx))->code) ==
REG)
&& REG_POINTER (XEXP (cond, 1))(__extension__ ({ __typeof (((((cond)->u.fld[1]).rt_rtx)))
const _rtx = (((((cond)->u.fld[1]).rt_rtx))); if (((enum rtx_code
) (_rtx)->code) != REG) rtl_check_failed_flag ("REG_POINTER"
, _rtx, "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/predict.cc"
, 2277, __FUNCTION__); _rtx; })->frame_related)
)))
2278 {
2279 if (GET_CODE (cond)((enum rtx_code) (cond)->code) == EQ)
2280 predict_insn_def (last_insn, PRED_POINTER, NOT_TAKEN);
2281 else if (GET_CODE (cond)((enum rtx_code) (cond)->code) == NE)
2282 predict_insn_def (last_insn, PRED_POINTER, TAKEN);
2283 }
2284 else
2285
2286 /* Try "opcode heuristic."
2287 EQ tests are usually false and NE tests are usually true. Also,
2288 most quantities are positive, so we can make the appropriate guesses
2289 about signed comparisons against zero. */
2290 switch (GET_CODE (cond)((enum rtx_code) (cond)->code))
2291 {
2292 case CONST_INT:
2293 /* Unconditional branch. */
2294 predict_insn_def (last_insn, PRED_UNCONDITIONAL,
2295 cond == const0_rtx(const_int_rtx[64]) ? NOT_TAKEN : TAKEN);
2296 break;
2297
2298 case EQ:
2299 case UNEQ:
2300 /* Floating point comparisons appears to behave in a very
2301 unpredictable way because of special role of = tests in
2302 FP code. */
2303 if (FLOAT_MODE_P (GET_MODE (XEXP (cond, 0)))(((enum mode_class) mode_class[((machine_mode) ((((cond)->
u.fld[0]).rt_rtx))->mode)]) == MODE_FLOAT || ((enum mode_class
) mode_class[((machine_mode) ((((cond)->u.fld[0]).rt_rtx))
->mode)]) == MODE_DECIMAL_FLOAT || ((enum mode_class) mode_class
[((machine_mode) ((((cond)->u.fld[0]).rt_rtx))->mode)])
== MODE_COMPLEX_FLOAT || ((enum mode_class) mode_class[((machine_mode
) ((((cond)->u.fld[0]).rt_rtx))->mode)]) == MODE_VECTOR_FLOAT
)
)
2304 ;
2305 /* Comparisons with 0 are often used for booleans and there is
2306 nothing useful to predict about them. */
2307 else if (XEXP (cond, 1)(((cond)->u.fld[1]).rt_rtx) == const0_rtx(const_int_rtx[64])
2308 || XEXP (cond, 0)(((cond)->u.fld[0]).rt_rtx) == const0_rtx(const_int_rtx[64]))
2309 ;
2310 else
2311 predict_insn_def (last_insn, PRED_OPCODE_NONEQUAL, NOT_TAKEN);
2312 break;
2313
2314 case NE:
2315 case LTGT:
2316 /* Floating point comparisons appears to behave in a very
2317 unpredictable way because of special role of = tests in
2318 FP code. */
2319 if (FLOAT_MODE_P (GET_MODE (XEXP (cond, 0)))(((enum mode_class) mode_class[((machine_mode) ((((cond)->
u.fld[0]).rt_rtx))->mode)]) == MODE_FLOAT || ((enum mode_class
) mode_class[((machine_mode) ((((cond)->u.fld[0]).rt_rtx))
->mode)]) == MODE_DECIMAL_FLOAT || ((enum mode_class) mode_class
[((machine_mode) ((((cond)->u.fld[0]).rt_rtx))->mode)])
== MODE_COMPLEX_FLOAT || ((enum mode_class) mode_class[((machine_mode
) ((((cond)->u.fld[0]).rt_rtx))->mode)]) == MODE_VECTOR_FLOAT
)
)
2320 ;
2321 /* Comparisons with 0 are often used for booleans and there is
2322 nothing useful to predict about them. */
2323 else if (XEXP (cond, 1)(((cond)->u.fld[1]).rt_rtx) == const0_rtx(const_int_rtx[64])
2324 || XEXP (cond, 0)(((cond)->u.fld[0]).rt_rtx) == const0_rtx(const_int_rtx[64]))
2325 ;
2326 else
2327 predict_insn_def (last_insn, PRED_OPCODE_NONEQUAL, TAKEN);
2328 break;
2329
2330 case ORDERED:
2331 predict_insn_def (last_insn, PRED_FPOPCODE, TAKEN);
2332 break;
2333
2334 case UNORDERED:
2335 predict_insn_def (last_insn, PRED_FPOPCODE, NOT_TAKEN);
2336 break;
2337
2338 case LE:
2339 case LT:
2340 if (XEXP (cond, 1)(((cond)->u.fld[1]).rt_rtx) == const0_rtx(const_int_rtx[64]) || XEXP (cond, 1)(((cond)->u.fld[1]).rt_rtx) == const1_rtx(const_int_rtx[64 +1])
2341 || XEXP (cond, 1)(((cond)->u.fld[1]).rt_rtx) == constm1_rtx(const_int_rtx[64 -1]))
2342 predict_insn_def (last_insn, PRED_OPCODE_POSITIVE, NOT_TAKEN);
2343 break;
2344
2345 case GE:
2346 case GT:
2347 if (XEXP (cond, 1)(((cond)->u.fld[1]).rt_rtx) == const0_rtx(const_int_rtx[64]) || XEXP (cond, 1)(((cond)->u.fld[1]).rt_rtx) == const1_rtx(const_int_rtx[64 +1])
2348 || XEXP (cond, 1)(((cond)->u.fld[1]).rt_rtx) == constm1_rtx(const_int_rtx[64 -1]))
2349 predict_insn_def (last_insn, PRED_OPCODE_POSITIVE, TAKEN);
2350 break;
2351
2352 default:
2353 break;
2354 }
2355}
2356
2357/* Set edge->probability for each successor edge of BB. */
2358void
2359guess_outgoing_edge_probabilities (basic_block bb)
2360{
2361 bb_estimate_probability_locally (bb);
2362 combine_predictions_for_insn (BB_END (bb)(bb)->il.x.rtl->end_, bb);
2363}
2364
2365static tree expr_expected_value (tree, bitmap, enum br_predictor *predictor,
2366 HOST_WIDE_INTlong *probability);
2367
2368/* Helper function for expr_expected_value. */
2369
2370static tree
2371expr_expected_value_1 (tree type, tree op0, enum tree_code code,
2372 tree op1, bitmap visited, enum br_predictor *predictor,
2373 HOST_WIDE_INTlong *probability)
2374{
2375 gimple *def;
2376
2377 /* Reset returned probability value. */
2378 *probability = -1;
2379 *predictor = PRED_UNCONDITIONAL;
2380
2381 if (get_gimple_rhs_class (code) == GIMPLE_SINGLE_RHS)
2382 {
2383 if (TREE_CONSTANT (op0)((non_type_check ((op0), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/predict.cc"
, 2383, __FUNCTION__))->base.constant_flag)
)
2384 return op0;
2385
2386 if (code == IMAGPART_EXPR)
2387 {
2388 if (TREE_CODE (TREE_OPERAND (op0, 0))((enum tree_code) ((*((const_cast<tree*> (tree_operand_check
((op0), (0), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/predict.cc"
, 2388, __FUNCTION__))))))->base.code)
== SSA_NAME)
2389 {
2390 def = SSA_NAME_DEF_STMT (TREE_OPERAND (op0, 0))(tree_check (((*((const_cast<tree*> (tree_operand_check
((op0), (0), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/predict.cc"
, 2390, __FUNCTION__)))))), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/predict.cc"
, 2390, __FUNCTION__, (SSA_NAME)))->ssa_name.def_stmt
;
2391 if (is_gimple_call (def)
2392 && gimple_call_internal_p (def)
2393 && (gimple_call_internal_fn (def)
2394 == IFN_ATOMIC_COMPARE_EXCHANGE))
2395 {
2396 /* Assume that any given atomic operation has low contention,
2397 and thus the compare-and-swap operation succeeds. */
2398 *predictor = PRED_COMPARE_AND_SWAP;
2399 return build_one_cst (TREE_TYPE (op0)((contains_struct_check ((op0), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/predict.cc"
, 2399, __FUNCTION__))->typed.type)
);
2400 }
2401 }
2402 }
2403
2404 if (code != SSA_NAME)
2405 return NULL_TREE(tree) nullptr;
2406
2407 def = SSA_NAME_DEF_STMT (op0)(tree_check ((op0), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/predict.cc"
, 2407, __FUNCTION__, (SSA_NAME)))->ssa_name.def_stmt
;
2408
2409 /* If we were already here, break the infinite cycle. */
2410 if (!bitmap_set_bit (visited, SSA_NAME_VERSION (op0)(tree_check ((op0), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/predict.cc"
, 2410, __FUNCTION__, (SSA_NAME)))->base.u.version
))
2411 return NULLnullptr;
2412
2413 if (gimple_code (def) == GIMPLE_PHI)
2414 {
2415 /* All the arguments of the PHI node must have the same constant
2416 length. */
2417 int i, n = gimple_phi_num_args (def);
2418 tree val = NULLnullptr, new_val;
2419
2420 for (i = 0; i < n; i++)
2421 {
2422 tree arg = PHI_ARG_DEF (def, i)gimple_phi_arg_def ((def), (i));
2423 enum br_predictor predictor2;
2424
2425 /* If this PHI has itself as an argument, we cannot
2426 determine the string length of this argument. However,
2427 if we can find an expected constant value for the other
2428 PHI args then we can still be sure that this is
2429 likely a constant. So be optimistic and just
2430 continue with the next argument. */
2431 if (arg == PHI_RESULT (def)get_def_from_ptr (gimple_phi_result_ptr (def)))
2432 continue;
2433
2434 HOST_WIDE_INTlong probability2;
2435 new_val = expr_expected_value (arg, visited, &predictor2,
2436 &probability2);
2437
2438 /* It is difficult to combine value predictors. Simply assume
2439 that later predictor is weaker and take its prediction. */
2440 if (*predictor < predictor2)
2441 {
2442 *predictor = predictor2;
2443 *probability = probability2;
2444 }
2445 if (!new_val)
2446 return NULLnullptr;
2447 if (!val)
2448 val = new_val;
2449 else if (!operand_equal_p (val, new_val, false))
2450 return NULLnullptr;
2451 }
2452 return val;
2453 }
2454 if (is_gimple_assign (def))
2455 {
2456 if (gimple_assign_lhs (def) != op0)
2457 return NULLnullptr;
2458
2459 return expr_expected_value_1 (TREE_TYPE (gimple_assign_lhs (def))((contains_struct_check ((gimple_assign_lhs (def)), (TS_TYPED
), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/predict.cc"
, 2459, __FUNCTION__))->typed.type)
,
2460 gimple_assign_rhs1 (def),
2461 gimple_assign_rhs_code (def),
2462 gimple_assign_rhs2 (def),
2463 visited, predictor, probability);
2464 }
2465
2466 if (is_gimple_call (def))
2467 {
2468 tree decl = gimple_call_fndecl (def);
2469 if (!decl)
2470 {
2471 if (gimple_call_internal_p (def)
2472 && gimple_call_internal_fn (def) == IFN_BUILTIN_EXPECT)
2473 {
2474 gcc_assert (gimple_call_num_args (def) == 3)((void)(!(gimple_call_num_args (def) == 3) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/predict.cc"
, 2474, __FUNCTION__), 0 : 0))
;
2475 tree val = gimple_call_arg (def, 0);
2476 if (TREE_CONSTANT (val)((non_type_check ((val), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/predict.cc"
, 2476, __FUNCTION__))->base.constant_flag)
)
2477 return val;
2478 tree val2 = gimple_call_arg (def, 2);
2479 gcc_assert (TREE_CODE (val2) == INTEGER_CST((void)(!(((enum tree_code) (val2)->base.code) == INTEGER_CST
&& tree_fits_uhwi_p (val2) && tree_to_uhwi (
val2) < END_PREDICTORS) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/predict.cc"
, 2481, __FUNCTION__), 0 : 0))
2480 && tree_fits_uhwi_p (val2)((void)(!(((enum tree_code) (val2)->base.code) == INTEGER_CST
&& tree_fits_uhwi_p (val2) && tree_to_uhwi (
val2) < END_PREDICTORS) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/predict.cc"
, 2481, __FUNCTION__), 0 : 0))
2481 && tree_to_uhwi (val2) < END_PREDICTORS)((void)(!(((enum tree_code) (val2)->base.code) == INTEGER_CST
&& tree_fits_uhwi_p (val2) && tree_to_uhwi (
val2) < END_PREDICTORS) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/predict.cc"
, 2481, __FUNCTION__), 0 : 0))
;
2482 *predictor = (enum br_predictor) tree_to_uhwi (val2);
2483 if (*predictor == PRED_BUILTIN_EXPECT)
2484 *probability
2485 = HITRATE (param_builtin_expect_probability)((int) ((global_options.x_param_builtin_expect_probability) *
10000 + 50) / 100)
;
2486 return gimple_call_arg (def, 1);
2487 }
2488 return NULLnullptr;
2489 }
2490
2491 if (DECL_IS_MALLOC (decl)((tree_check ((decl), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/predict.cc"
, 2491, __FUNCTION__, (FUNCTION_DECL)))->function_decl.malloc_flag
)
|| DECL_IS_OPERATOR_NEW_P (decl)(((tree_check ((decl), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/predict.cc"
, 2491, __FUNCTION__, (FUNCTION_DECL)))->function_decl.decl_type
) == OPERATOR_NEW)
)
2492 {
2493 if (predictor)
2494 *predictor = PRED_MALLOC_NONNULL;
2495 return boolean_true_nodeglobal_trees[TI_BOOLEAN_TRUE];
2496 }
2497
2498 if (DECL_BUILT_IN_CLASS (decl)((built_in_class) (tree_check ((decl), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/predict.cc"
, 2498, __FUNCTION__, (FUNCTION_DECL)))->function_decl.built_in_class
)
== BUILT_IN_NORMAL)
2499 switch (DECL_FUNCTION_CODE (decl))
2500 {
2501 case BUILT_IN_EXPECT:
2502 {
2503 tree val;
2504 if (gimple_call_num_args (def) != 2)
2505 return NULLnullptr;
2506 val = gimple_call_arg (def, 0);
2507 if (TREE_CONSTANT (val)((non_type_check ((val), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/predict.cc"
, 2507, __FUNCTION__))->base.constant_flag)
)
2508 return val;
2509 *predictor = PRED_BUILTIN_EXPECT;
2510 *probability
2511 = HITRATE (param_builtin_expect_probability)((int) ((global_options.x_param_builtin_expect_probability) *
10000 + 50) / 100)
;
2512 return gimple_call_arg (def, 1);
2513 }
2514 case BUILT_IN_EXPECT_WITH_PROBABILITY:
2515 {
2516 tree val;
2517 if (gimple_call_num_args (def) != 3)
2518 return NULLnullptr;
2519 val = gimple_call_arg (def, 0);
2520 if (TREE_CONSTANT (val)((non_type_check ((val), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/predict.cc"
, 2520, __FUNCTION__))->base.constant_flag)
)
2521 return val;
2522 /* Compute final probability as:
2523 probability * REG_BR_PROB_BASE. */
2524 tree prob = gimple_call_arg (def, 2);
2525 tree t = TREE_TYPE (prob)((contains_struct_check ((prob), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/predict.cc"
, 2525, __FUNCTION__))->typed.type)
;
2526 tree base = build_int_cst (integer_type_nodeinteger_types[itk_int],
2527 REG_BR_PROB_BASE10000);
2528 base = build_real_from_int_cst (t, base);
2529 tree r = fold_build2_initializer_loc (UNKNOWN_LOCATION((location_t) 0),
2530 MULT_EXPR, t, prob, base);
2531 if (TREE_CODE (r)((enum tree_code) (r)->base.code) != REAL_CST)
2532 {
2533 error_at (gimple_location (def),
2534 "probability %qE must be "
2535 "constant floating-point expression", prob);
2536 return NULLnullptr;
2537 }
2538 HOST_WIDE_INTlong probi
2539 = real_to_integer (TREE_REAL_CST_PTR (r)(&(tree_check ((r), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/predict.cc"
, 2539, __FUNCTION__, (REAL_CST)))->real_cst.value)
);
2540 if (probi >= 0 && probi <= REG_BR_PROB_BASE10000)
2541 {
2542 *predictor = PRED_BUILTIN_EXPECT_WITH_PROBABILITY;
2543 *probability = probi;
2544 }
2545 else
2546 error_at (gimple_location (def),
2547 "probability %qE is outside "
2548 "the range [0.0, 1.0]", prob);
2549
2550 return gimple_call_arg (def, 1);
2551 }
2552
2553 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_N:
2554 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1:
2555 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_2:
2556 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_4:
2557 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_8:
2558 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_16:
2559 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE:
2560 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_N:
2561 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1:
2562 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_2:
2563 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_4:
2564 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_8:
2565 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_16:
2566 /* Assume that any given atomic operation has low contention,
2567 and thus the compare-and-swap operation succeeds. */
2568 *predictor = PRED_COMPARE_AND_SWAP;
2569 return boolean_true_nodeglobal_trees[TI_BOOLEAN_TRUE];
2570 case BUILT_IN_REALLOC:
2571 if (predictor)
2572 *predictor = PRED_MALLOC_NONNULL;
2573 return boolean_true_nodeglobal_trees[TI_BOOLEAN_TRUE];
2574 default:
2575 break;
2576 }
2577 }
2578
2579 return NULLnullptr;
2580 }
2581
2582 if (get_gimple_rhs_class (code) == GIMPLE_BINARY_RHS)
2583 {
2584 tree res;
2585 enum br_predictor predictor2;
2586 HOST_WIDE_INTlong probability2;
2587 op0 = expr_expected_value (op0, visited, predictor, probability);
2588 if (!op0)
2589 return NULLnullptr;
2590 op1 = expr_expected_value (op1, visited, &predictor2, &probability2);
2591 if (!op1)
2592 return NULLnullptr;
2593 res = fold_build2 (code, type, op0, op1)fold_build2_loc (((location_t) 0), code, type, op0, op1 );
2594 if (TREE_CODE (res)((enum tree_code) (res)->base.code) == INTEGER_CST
2595 && TREE_CODE (op0)((enum tree_code) (op0)->base.code) == INTEGER_CST
2596 && TREE_CODE (op1)((enum tree_code) (op1)->base.code) == INTEGER_CST)
2597 {
2598 /* Combine binary predictions. */
2599 if (*probability != -1 || probability2 != -1)
2600 {
2601 HOST_WIDE_INTlong p1 = get_predictor_value (*predictor, *probability);
2602 HOST_WIDE_INTlong p2 = get_predictor_value (predictor2, probability2);
2603 *probability = RDIV (p1 * p2, REG_BR_PROB_BASE)(((p1 * p2) + (10000) / 2) / (10000));
2604 }
2605
2606 if (*predictor < predictor2)
2607 *predictor = predictor2;
2608
2609 return res;
2610 }
2611 return NULLnullptr;
2612 }
2613 if (get_gimple_rhs_class (code) == GIMPLE_UNARY_RHS)
2614 {
2615 tree res;
2616 op0 = expr_expected_value (op0, visited, predictor, probability);
2617 if (!op0)
2618 return NULLnullptr;
2619 res = fold_build1 (code, type, op0)fold_build1_loc (((location_t) 0), code, type, op0 );
2620 if (TREE_CONSTANT (res)((non_type_check ((res), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/predict.cc"
, 2620, __FUNCTION__))->base.constant_flag)
)
2621 return res;
2622 return NULLnullptr;
2623 }
2624 return NULLnullptr;
2625}
2626
2627/* Return constant EXPR will likely have at execution time, NULL if unknown.
2628 The function is used by builtin_expect branch predictor so the evidence
2629 must come from this construct and additional possible constant folding.
2630
2631 We may want to implement more involved value guess (such as value range
2632 propagation based prediction), but such tricks shall go to new
2633 implementation. */
2634
2635static tree
2636expr_expected_value (tree expr, bitmap visited,
2637 enum br_predictor *predictor,
2638 HOST_WIDE_INTlong *probability)
2639{
2640 enum tree_code code;
2641 tree op0, op1;
2642
2643 if (TREE_CONSTANT (expr)((non_type_check ((expr), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/predict.cc"
, 2643, __FUNCTION__))->base.constant_flag)
)
2644 {
2645 *predictor = PRED_UNCONDITIONAL;
2646 *probability = -1;
2647 return expr;
2648 }
2649
2650 extract_ops_from_tree (expr, &code, &op0, &op1);
2651 return expr_expected_value_1 (TREE_TYPE (expr)((contains_struct_check ((expr), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/predict.cc"
, 2651, __FUNCTION__))->typed.type)
,
2652 op0, code, op1, visited, predictor,
2653 probability);
2654}
2655
2656
2657/* Return probability of a PREDICTOR. If the predictor has variable
2658 probability return passed PROBABILITY. */
2659
2660static HOST_WIDE_INTlong
2661get_predictor_value (br_predictor predictor, HOST_WIDE_INTlong probability)
2662{
2663 switch (predictor)
2664 {
2665 case PRED_BUILTIN_EXPECT:
2666 case PRED_BUILTIN_EXPECT_WITH_PROBABILITY:
2667 gcc_assert (probability != -1)((void)(!(probability != -1) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/predict.cc"
, 2667, __FUNCTION__), 0 : 0))
;
2668 return probability;
2669 default:
2670 gcc_assert (probability == -1)((void)(!(probability == -1) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/predict.cc"
, 2670, __FUNCTION__), 0 : 0))
;
2671 return predictor_info[(int) predictor].hitrate;
2672 }
2673}
2674
2675/* Predict using opcode of the last statement in basic block. */
2676static void
2677tree_predict_by_opcode (basic_block bb)
2678{
2679 gimple *stmt = last_stmt (bb);
2680 edge then_edge;
2681 tree op0, op1;
2682 tree type;
2683 tree val;
2684 enum tree_code cmp;
2685 edge_iterator ei;
2686 enum br_predictor predictor;
2687 HOST_WIDE_INTlong probability;
2688
2689 if (!stmt)
2690 return;
2691
2692 if (gswitch *sw = dyn_cast <gswitch *> (stmt))
2693 {
2694 tree index = gimple_switch_index (sw);
2695 tree val = expr_expected_value (index, auto_bitmap (),
2696 &predictor, &probability);
2697 if (val && TREE_CODE (val)((enum tree_code) (val)->base.code) == INTEGER_CST)
2698 {
2699 edge e = find_taken_edge_switch_expr (sw, val);
2700 if (predictor == PRED_BUILTIN_EXPECT)
2701 {
2702 int percent = param_builtin_expect_probabilityglobal_options.x_param_builtin_expect_probability;
2703 gcc_assert (percent >= 0 && percent <= 100)((void)(!(percent >= 0 && percent <= 100) ? fancy_abort
("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/predict.cc"
, 2703, __FUNCTION__), 0 : 0))
;
2704 predict_edge (e, PRED_BUILTIN_EXPECT,
2705 HITRATE (percent)((int) ((percent) * 10000 + 50) / 100));
2706 }
2707 else
2708 predict_edge_def (e, predictor, TAKEN);
2709 }
2710 }
2711
2712 if (gimple_code (stmt) != GIMPLE_COND)
2713 return;
2714 FOR_EACH_EDGE (then_edge, ei, bb->succs)for ((ei) = ei_start_1 (&((bb->succs))); ei_cond ((ei)
, &(then_edge)); ei_next (&(ei)))
2715 if (then_edge->flags & EDGE_TRUE_VALUE)
2716 break;
2717 op0 = gimple_cond_lhs (stmt);
2718 op1 = gimple_cond_rhs (stmt);
2719 cmp = gimple_cond_code (stmt);
2720 type = TREE_TYPE (op0)((contains_struct_check ((op0), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/predict.cc"
, 2720, __FUNCTION__))->typed.type)
;
2721 val = expr_expected_value_1 (boolean_type_nodeglobal_trees[TI_BOOLEAN_TYPE], op0, cmp, op1, auto_bitmap (),
2722 &predictor, &probability);
2723 if (val && TREE_CODE (val)((enum tree_code) (val)->base.code) == INTEGER_CST)
2724 {
2725 HOST_WIDE_INTlong prob = get_predictor_value (predictor, probability);
2726 if (integer_zerop (val))
2727 prob = REG_BR_PROB_BASE10000 - prob;
2728 predict_edge (then_edge, predictor, prob);
2729 }
2730 /* Try "pointer heuristic."
2731 A comparison ptr == 0 is predicted as false.
2732 Similarly, a comparison ptr1 == ptr2 is predicted as false. */
2733 if (POINTER_TYPE_P (type)(((enum tree_code) (type)->base.code) == POINTER_TYPE || (
(enum tree_code) (type)->base.code) == REFERENCE_TYPE)
)
2734 {
2735 if (cmp == EQ_EXPR)
2736 predict_edge_def (then_edge, PRED_TREE_POINTER, NOT_TAKEN);
2737 else if (cmp == NE_EXPR)
2738 predict_edge_def (then_edge, PRED_TREE_POINTER, TAKEN);
2739 }
2740 else
2741
2742 /* Try "opcode heuristic."
2743 EQ tests are usually false and NE tests are usually true. Also,
2744 most quantities are positive, so we can make the appropriate guesses
2745 about signed comparisons against zero. */
2746 switch (cmp)
2747 {
2748 case EQ_EXPR:
2749 case UNEQ_EXPR:
2750 /* Floating point comparisons appears to behave in a very
2751 unpredictable way because of special role of = tests in
2752 FP code. */
2753 if (FLOAT_TYPE_P (type)((((enum tree_code) (type)->base.code) == REAL_TYPE) || ((
((enum tree_code) (type)->base.code) == COMPLEX_TYPE || ((
(enum tree_code) (type)->base.code) == VECTOR_TYPE)) &&
(((enum tree_code) (((contains_struct_check ((type), (TS_TYPED
), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/predict.cc"
, 2753, __FUNCTION__))->typed.type))->base.code) == REAL_TYPE
)))
)
2754 ;
2755 /* Comparisons with 0 are often used for booleans and there is
2756 nothing useful to predict about them. */
2757 else if (integer_zerop (op0) || integer_zerop (op1))
2758 ;
2759 else
2760 predict_edge_def (then_edge, PRED_TREE_OPCODE_NONEQUAL, NOT_TAKEN);
2761 break;
2762
2763 case NE_EXPR:
2764 case LTGT_EXPR:
2765 /* Floating point comparisons appears to behave in a very
2766 unpredictable way because of special role of = tests in
2767 FP code. */
2768 if (FLOAT_TYPE_P (type)((((enum tree_code) (type)->base.code) == REAL_TYPE) || ((
((enum tree_code) (type)->base.code) == COMPLEX_TYPE || ((
(enum tree_code) (type)->base.code) == VECTOR_TYPE)) &&
(((enum tree_code) (((contains_struct_check ((type), (TS_TYPED
), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/predict.cc"
, 2768, __FUNCTION__))->typed.type))->base.code) == REAL_TYPE
)))
)
2769 ;
2770 /* Comparisons with 0 are often used for booleans and there is
2771 nothing useful to predict about them. */
2772 else if (integer_zerop (op0)
2773 || integer_zerop (op1))
2774 ;
2775 else
2776 predict_edge_def (then_edge, PRED_TREE_OPCODE_NONEQUAL, TAKEN);
2777 break;
2778
2779 case ORDERED_EXPR:
2780 predict_edge_def (then_edge, PRED_TREE_FPOPCODE, TAKEN);
2781 break;
2782
2783 case UNORDERED_EXPR:
2784 predict_edge_def (then_edge, PRED_TREE_FPOPCODE, NOT_TAKEN);
2785 break;
2786
2787 case LE_EXPR:
2788 case LT_EXPR:
2789 if (integer_zerop (op1)
2790 || integer_onep (op1)
2791 || integer_all_onesp (op1)
2792 || real_zerop (op1)
2793 || real_onep (op1)
2794 || real_minus_onep (op1))
2795 predict_edge_def (then_edge, PRED_TREE_OPCODE_POSITIVE, NOT_TAKEN);
2796 break;
2797
2798 case GE_EXPR:
2799 case GT_EXPR:
2800 if (integer_zerop (op1)
2801 || integer_onep (op1)
2802 || integer_all_onesp (op1)
2803 || real_zerop (op1)
2804 || real_onep (op1)
2805 || real_minus_onep (op1))
2806 predict_edge_def (then_edge, PRED_TREE_OPCODE_POSITIVE, TAKEN);
2807 break;
2808
2809 default:
2810 break;
2811 }
2812}
2813
2814/* Returns TRUE if the STMT is exit(0) like statement. */
2815
2816static bool
2817is_exit_with_zero_arg (const gimple *stmt)
2818{
2819 /* This is not exit, _exit or _Exit. */
2820 if (!gimple_call_builtin_p (stmt, BUILT_IN_EXIT)
2821 && !gimple_call_builtin_p (stmt, BUILT_IN__EXIT)
2822 && !gimple_call_builtin_p (stmt, BUILT_IN__EXIT2))
2823 return false;
2824
2825 /* Argument is an interger zero. */
2826 return integer_zerop (gimple_call_arg (stmt, 0));
2827}
2828
2829/* Try to guess whether the value of return means error code. */
2830
2831static enum br_predictor
2832return_prediction (tree val, enum prediction *prediction)
2833{
2834 /* VOID. */
2835 if (!val)
2836 return PRED_NO_PREDICTION;
2837 /* Different heuristics for pointers and scalars. */
2838 if (POINTER_TYPE_P (TREE_TYPE (val))(((enum tree_code) (((contains_struct_check ((val), (TS_TYPED
), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/predict.cc"
, 2838, __FUNCTION__))->typed.type))->base.code) == POINTER_TYPE
|| ((enum tree_code) (((contains_struct_check ((val), (TS_TYPED
), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/predict.cc"
, 2838, __FUNCTION__))->typed.type))->base.code) == REFERENCE_TYPE
)
)
2839 {
2840 /* NULL is usually not returned. */
2841 if (integer_zerop (val))
2842 {
2843 *prediction = NOT_TAKEN;
2844 return PRED_NULL_RETURN;
2845 }
2846 }
2847 else if (INTEGRAL_TYPE_P (TREE_TYPE (val))(((enum tree_code) (((contains_struct_check ((val), (TS_TYPED
), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/predict.cc"
, 2847, __FUNCTION__))->typed.type))->base.code) == ENUMERAL_TYPE
|| ((enum tree_code) (((contains_struct_check ((val), (TS_TYPED
), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/predict.cc"
, 2847, __FUNCTION__))->typed.type))->base.code) == BOOLEAN_TYPE
|| ((enum tree_code) (((contains_struct_check ((val), (TS_TYPED
), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/predict.cc"
, 2847, __FUNCTION__))->typed.type))->base.code) == INTEGER_TYPE
)
)
2848 {
2849 /* Negative return values are often used to indicate
2850 errors. */
2851 if (TREE_CODE (val)((enum tree_code) (val)->base.code) == INTEGER_CST
2852 && tree_int_cst_sgn (val) < 0)
2853 {
2854 *prediction = NOT_TAKEN;
2855 return PRED_NEGATIVE_RETURN;
2856 }
2857 /* Constant return values seems to be commonly taken.
2858 Zero/one often represent booleans so exclude them from the
2859 heuristics. */
2860 if (TREE_CONSTANT (val)((non_type_check ((val), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/predict.cc"
, 2860, __FUNCTION__))->base.constant_flag)
2861 && (!integer_zerop (val) && !integer_onep (val)))
2862 {
2863 *prediction = NOT_TAKEN;
2864 return PRED_CONST_RETURN;
2865 }
2866 }
2867 return PRED_NO_PREDICTION;
2868}
2869
2870/* Return zero if phi result could have values other than -1, 0 or 1,
2871 otherwise return a bitmask, with bits 0, 1 and 2 set if -1, 0 and 1
2872 values are used or likely. */
2873
2874static int
2875zero_one_minusone (gphi *phi, int limit)
2876{
2877 int phi_num_args = gimple_phi_num_args (phi);
2878 int ret = 0;
2879 for (int i = 0; i < phi_num_args; i++)
2880 {
2881 tree t = PHI_ARG_DEF (phi, i)gimple_phi_arg_def ((phi), (i));
2882 if (TREE_CODE (t)((enum tree_code) (t)->base.code) != INTEGER_CST)
2883 continue;
2884 wide_int w = wi::to_wide (t);
2885 if (w == -1)
2886 ret |= 1;
2887 else if (w == 0)
2888 ret |= 2;
2889 else if (w == 1)
2890 ret |= 4;
2891 else
2892 return 0;
2893 }
2894 for (int i = 0; i < phi_num_args; i++)
2895 {
2896 tree t = PHI_ARG_DEF (phi, i)gimple_phi_arg_def ((phi), (i));
2897 if (TREE_CODE (t)((enum tree_code) (t)->base.code) == INTEGER_CST)
2898 continue;
2899 if (TREE_CODE (t)((enum tree_code) (t)->base.code) != SSA_NAME)
2900 return 0;
2901 gimple *g = SSA_NAME_DEF_STMT (t)(tree_check ((t), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/predict.cc"
, 2901, __FUNCTION__, (SSA_NAME)))->ssa_name.def_stmt
;
2902 if (gimple_code (g) == GIMPLE_PHI && limit > 0)
2903 if (int r = zero_one_minusone (as_a <gphi *> (g), limit - 1))
2904 {
2905 ret |= r;
2906 continue;
2907 }
2908 if (!is_gimple_assign (g))
2909 return 0;
2910 if (gimple_assign_cast_p (g))
2911 {
2912 tree rhs1 = gimple_assign_rhs1 (g);
2913 if (TREE_CODE (rhs1)((enum tree_code) (rhs1)->base.code) != SSA_NAME
2914 || !INTEGRAL_TYPE_P (TREE_TYPE (rhs1))(((enum tree_code) (((contains_struct_check ((rhs1), (TS_TYPED
), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/predict.cc"
, 2914, __FUNCTION__))->typed.type))->base.code) == ENUMERAL_TYPE
|| ((enum tree_code) (((contains_struct_check ((rhs1), (TS_TYPED
), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/predict.cc"
, 2914, __FUNCTION__))->typed.type))->base.code) == BOOLEAN_TYPE
|| ((enum tree_code) (((contains_struct_check ((rhs1), (TS_TYPED
), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/predict.cc"
, 2914, __FUNCTION__))->typed.type))->base.code) == INTEGER_TYPE
)
2915 || TYPE_PRECISION (TREE_TYPE (rhs1))((tree_class_check ((((contains_struct_check ((rhs1), (TS_TYPED
), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/predict.cc"
, 2915, __FUNCTION__))->typed.type)), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/predict.cc"
, 2915, __FUNCTION__))->type_common.precision)
!= 1
2916 || !TYPE_UNSIGNED (TREE_TYPE (rhs1))((tree_class_check ((((contains_struct_check ((rhs1), (TS_TYPED
), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/predict.cc"
, 2916, __FUNCTION__))->typed.type)), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/predict.cc"
, 2916, __FUNCTION__))->base.u.bits.unsigned_flag)
)
2917 return 0;
2918 ret |= (2 | 4);
2919 continue;
2920 }
2921 if (TREE_CODE_CLASS (gimple_assign_rhs_code (g))tree_code_type_tmpl <0>::tree_code_type[(int) (gimple_assign_rhs_code
(g))]
!= tcc_comparison)
2922 return 0;
2923 ret |= (2 | 4);
2924 }
2925 return ret;
2926}
2927
2928/* Find the basic block with return expression and look up for possible
2929 return value trying to apply RETURN_PREDICTION heuristics. */
2930static void
2931apply_return_prediction (void)
2932{
2933 greturn *return_stmt = NULLnullptr;
2934 tree return_val;
2935 edge e;
2936 gphi *phi;
2937 int phi_num_args, i;
2938 enum br_predictor pred;
2939 enum prediction direction;
2940 edge_iterator ei;
2941
2942 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds)for ((ei) = ei_start_1 (&(((((cfun + 0))->cfg->x_exit_block_ptr
)->preds))); ei_cond ((ei), &(e)); ei_next (&(ei))
)
2943 {
2944 gimple *last = last_stmt (e->src);
2945 if (last
2946 && gimple_code (last) == GIMPLE_RETURN)
2947 {
2948 return_stmt = as_a <greturn *> (last);
2949 break;
2950 }
2951 }
2952 if (!e)
2953 return;
2954 return_val = gimple_return_retval (return_stmt);
2955 if (!return_val)
2956 return;
2957 if (TREE_CODE (return_val)((enum tree_code) (return_val)->base.code) != SSA_NAME
2958 || !SSA_NAME_DEF_STMT (return_val)(tree_check ((return_val), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/predict.cc"
, 2958, __FUNCTION__, (SSA_NAME)))->ssa_name.def_stmt
2959 || gimple_code (SSA_NAME_DEF_STMT (return_val)(tree_check ((return_val), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/predict.cc"
, 2959, __FUNCTION__, (SSA_NAME)))->ssa_name.def_stmt
) != GIMPLE_PHI)
2960 return;
2961 phi = as_a <gphi *> (SSA_NAME_DEF_STMT (return_val)(tree_check ((return_val), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/predict.cc"
, 2961, __FUNCTION__, (SSA_NAME)))->ssa_name.def_stmt
);
2962 phi_num_args = gimple_phi_num_args (phi);
2963 pred = return_prediction (PHI_ARG_DEF (phi, 0)gimple_phi_arg_def ((phi), (0)), &direction);
2964
2965 /* Avoid the case where the function returns -1, 0 and 1 values and
2966 nothing else. Those could be qsort etc. comparison functions
2967 where the negative return isn't less probable than positive.
2968 For this require that the function returns at least -1 or 1
2969 or -1 and a boolean value or comparison result, so that functions
2970 returning just -1 and 0 are treated as if -1 represents error value. */
2971 if (INTEGRAL_TYPE_P (TREE_TYPE (return_val))(((enum tree_code) (((contains_struct_check ((return_val), (TS_TYPED
), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/predict.cc"
, 2971, __FUNCTION__))->typed.type))->base.code) == ENUMERAL_TYPE
|| ((enum tree_code) (((contains_struct_check ((return_val),
(TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/predict.cc"
, 2971, __FUNCTION__))->typed.type))->base.code) == BOOLEAN_TYPE
|| ((enum tree_code) (((contains_struct_check ((return_val),
(TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/predict.cc"
, 2971, __FUNCTION__))->typed.type))->base.code) == INTEGER_TYPE
)
2972 && !TYPE_UNSIGNED (TREE_TYPE (return_val))((tree_class_check ((((contains_struct_check ((return_val), (
TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/predict.cc"
, 2972, __FUNCTION__))->typed.type)), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/predict.cc"
, 2972, __FUNCTION__))->base.u.bits.unsigned_flag)
2973 && TYPE_PRECISION (TREE_TYPE (return_val))((tree_class_check ((((contains_struct_check ((return_val), (
TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/predict.cc"
, 2973, __FUNCTION__))->typed.type)), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/predict.cc"
, 2973, __FUNCTION__))->type_common.precision)
> 1)
2974 if (int r = zero_one_minusone (phi, 3))
2975 if ((r & (1 | 4)) == (1 | 4))
2976 return;
2977
2978 /* Avoid the degenerate case where all return values form the function
2979 belongs to same category (ie they are all positive constants)
2980 so we can hardly say something about them. */
2981 for (i = 1; i < phi_num_args; i++)
2982 if (pred != return_prediction (PHI_ARG_DEF (phi, i)gimple_phi_arg_def ((phi), (i)), &direction))
2983 break;
2984 if (i != phi_num_args)
2985 for (i = 0; i < phi_num_args; i++)
2986 {
2987 pred = return_prediction (PHI_ARG_DEF (phi, i)gimple_phi_arg_def ((phi), (i)), &direction);
2988 if (pred != PRED_NO_PREDICTION)
2989 predict_paths_leading_to_edge (gimple_phi_arg_edge (phi, i), pred,
2990 direction);
2991 }
2992}
2993
2994/* Look for basic block that contains unlikely to happen events
2995 (such as noreturn calls) and mark all paths leading to execution
2996 of this basic blocks as unlikely. */
2997
2998static void
2999tree_bb_level_predictions (void)
3000{
3001 basic_block bb;
3002 bool has_return_edges = false;
3003 edge e;
3004 edge_iterator ei;
3005
3006 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds)for ((ei) = ei_start_1 (&(((((cfun + 0))->cfg->x_exit_block_ptr
)->preds))); ei_cond ((ei), &(e)); ei_next (&(ei))
)
3007 if (!unlikely_executed_edge_p (e) && !(e->flags & EDGE_ABNORMAL_CALL))
3008 {
3009 has_return_edges = true;
3010 break;
3011 }
3012
3013 apply_return_prediction ();
3014
3015 FOR_EACH_BB_FN (bb, cfun)for (bb = ((cfun + 0))->cfg->x_entry_block_ptr->next_bb
; bb != ((cfun + 0))->cfg->x_exit_block_ptr; bb = bb->
next_bb)
3016 {
3017 gimple_stmt_iterator gsi;
3018
3019 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
3020 {
3021 gimple *stmt = gsi_stmt (gsi);
3022 tree decl;
3023
3024 if (is_gimple_call (stmt))
3025 {
3026 if (gimple_call_noreturn_p (stmt)
3027 && has_return_edges
3028 && !is_exit_with_zero_arg (stmt))
3029 predict_paths_leading_to (bb, PRED_NORETURN,
3030 NOT_TAKEN);
3031 decl = gimple_call_fndecl (stmt);
3032 if (decl
3033 && lookup_attribute ("cold",
3034 DECL_ATTRIBUTES (decl)((contains_struct_check ((decl), (TS_DECL_COMMON), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/predict.cc"
, 3034, __FUNCTION__))->decl_common.attributes)
))
3035 predict_paths_leading_to (bb, PRED_COLD_FUNCTION,
3036 NOT_TAKEN);
3037 if (decl && recursive_call_p (current_function_decl, decl))
3038 predict_paths_leading_to (bb, PRED_RECURSIVE_CALL,
3039 NOT_TAKEN);
3040 }
3041 else if (gimple_code (stmt) == GIMPLE_PREDICT)
3042 {
3043 predict_paths_leading_to (bb, gimple_predict_predictor (stmt),
3044 gimple_predict_outcome (stmt));
3045 /* Keep GIMPLE_PREDICT around so early inlining will propagate
3046 hints to callers. */
3047 }
3048 }
3049 }
3050}
3051
3052/* Callback for hash_map::traverse, asserts that the pointer map is
3053 empty. */
3054
3055bool
3056assert_is_empty (const_basic_block const &, edge_prediction *const &value,
3057 void *)
3058{
3059 gcc_assert (!value)((void)(!(!value) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/predict.cc"
, 3059, __FUNCTION__), 0 : 0))
;
3060 return true;
3061}
3062
3063/* Predict branch probabilities and estimate profile for basic block BB.
3064 When LOCAL_ONLY is set do not use any global properties of CFG. */
3065
3066static void
3067tree_estimate_probability_bb (basic_block bb, bool local_only)
3068{
3069 edge e;
3070 edge_iterator ei;
3071
3072 FOR_EACH_EDGE (e, ei, bb->succs)for ((ei) = ei_start_1 (&((bb->succs))); ei_cond ((ei)
, &(e)); ei_next (&(ei)))
3073 {
3074 /* Look for block we are guarding (ie we dominate it,
3075 but it doesn't postdominate us). */
3076 if (e->dest != EXIT_BLOCK_PTR_FOR_FN (cfun)(((cfun + 0))->cfg->x_exit_block_ptr) && e->dest != bb
3077 && !local_only
3078 && dominated_by_p (CDI_DOMINATORS, e->dest, e->src)
3079 && !dominated_by_p (CDI_POST_DOMINATORS, e->src, e->dest))
3080 {
3081 gimple_stmt_iterator bi;
3082
3083 /* The call heuristic claims that a guarded function call
3084 is improbable. This is because such calls are often used
3085 to signal exceptional situations such as printing error
3086 messages. */
3087 for (bi = gsi_start_bb (e->dest); !gsi_end_p (bi);
3088 gsi_next (&bi))
3089 {
3090 gimple *stmt = gsi_stmt (bi);
3091 if (is_gimple_call (stmt)
3092 && !gimple_inexpensive_call_p (as_a <gcall *> (stmt))
3093 /* Constant and pure calls are hardly used to signalize
3094 something exceptional. */
3095 && gimple_has_side_effects (stmt))
3096 {
3097 if (gimple_call_fndecl (stmt))
3098 predict_edge_def (e, PRED_CALL, NOT_TAKEN);
3099 else if (virtual_method_call_p (gimple_call_fn (stmt)))
3100 predict_edge_def (e, PRED_POLYMORPHIC_CALL, NOT_TAKEN);
3101 else
3102 predict_edge_def (e, PRED_INDIR_CALL, TAKEN);
3103 break;
3104 }
3105 }
3106 }
3107 }
3108 tree_predict_by_opcode (bb);
3109}
3110
3111/* Predict branch probabilities and estimate profile of the tree CFG.
3112 This function can be called from the loop optimizers to recompute
3113 the profile information.
3114 If DRY_RUN is set, do not modify CFG and only produce dump files. */
3115
3116void
3117tree_estimate_probability (bool dry_run)
3118{
3119 basic_block bb;
3120
3121 connect_infinite_loops_to_exit ();
3122 /* We use loop_niter_by_eval, which requires that the loops have
3123 preheaders. */
3124 create_preheaders (CP_SIMPLE_PREHEADERS);
3125 calculate_dominance_info (CDI_POST_DOMINATORS);
3126 /* Decide which edges are known to be unlikely. This improves later
3127 branch prediction. */
3128 determine_unlikely_bbs ();
3129
3130 bb_predictions = new hash_map<const_basic_block, edge_prediction *>;
3131 tree_bb_level_predictions ();
3132 record_loop_exits ();
3133
3134 if (number_of_loops (cfun(cfun + 0)) > 1)
3135 predict_loops ();
3136
3137 FOR_EACH_BB_FN (bb, cfun)for (bb = ((cfun + 0))->cfg->x_entry_block_ptr->next_bb
; bb != ((cfun + 0))->cfg->x_exit_block_ptr; bb = bb->
next_bb)
3138 tree_estimate_probability_bb (bb, false);
3139
3140 FOR_EACH_BB_FN (bb, cfun)for (bb = ((cfun + 0))->cfg->x_entry_block_ptr->next_bb
; bb != ((cfun + 0))->cfg->x_exit_block_ptr; bb = bb->
next_bb)
3141 combine_predictions_for_bb (bb, dry_run);
3142
3143 if (flag_checkingglobal_options.x_flag_checking)
3144 bb_predictions->traverse<void *, assert_is_empty> (NULLnullptr);
3145
3146 delete bb_predictions;
3147 bb_predictions = NULLnullptr;
3148
3149 if (!dry_run)
3150 estimate_bb_frequencies (false);
3151 free_dominance_info (CDI_POST_DOMINATORS);
3152 remove_fake_exit_edges ();
3153}
3154
3155/* Set edge->probability for each successor edge of BB. */
3156void
3157tree_guess_outgoing_edge_probabilities (basic_block bb)
3158{
3159 bb_predictions = new hash_map<const_basic_block, edge_prediction *>;
3160 tree_estimate_probability_bb (bb, true);
3161 combine_predictions_for_bb (bb, false);
3162 if (flag_checkingglobal_options.x_flag_checking)
3163 bb_predictions->traverse<void *, assert_is_empty> (NULLnullptr);
3164 delete bb_predictions;
3165 bb_predictions = NULLnullptr;
3166}
3167
3168/* Filter function predicate that returns true for a edge predicate P
3169 if its edge is equal to DATA. */
3170
3171static bool
3172not_loop_guard_equal_edge_p (edge_prediction *p, void *data)
3173{
3174 return p->ep_edge != (edge)data || p->ep_predictor != PRED_LOOP_GUARD;
3175}
3176
3177/* Predict edge E with PRED unless it is already predicted by some predictor
3178 considered equivalent. */
3179
3180static void
3181maybe_predict_edge (edge e, enum br_predictor pred, enum prediction taken)
3182{
3183 if (edge_predicted_by_p (e, pred, taken))
3184 return;
3185 if (pred == PRED_LOOP_GUARD
3186 && edge_predicted_by_p (e, PRED_LOOP_GUARD_WITH_RECURSION, taken))
3187 return;
3188 /* Consider PRED_LOOP_GUARD_WITH_RECURSION superrior to LOOP_GUARD. */
3189 if (pred == PRED_LOOP_GUARD_WITH_RECURSION)
3190 {
3191 edge_prediction **preds = bb_predictions->get (e->src);
3192 if (preds)
3193 filter_predictions (preds, not_loop_guard_equal_edge_p, e);
3194 }
3195 predict_edge_def (e, pred, taken);
3196}
3197/* Predict edges to successors of CUR whose sources are not postdominated by
3198 BB by PRED and recurse to all postdominators. */
3199
3200static void
3201predict_paths_for_bb (basic_block cur, basic_block bb,
3202 enum br_predictor pred,
3203 enum prediction taken,
3204 bitmap visited, class loop *in_loop = NULLnullptr)
3205{
3206 edge e;
3207 edge_iterator ei;
3208 basic_block son;
3209
3210 /* If we exited the loop or CUR is unconditional in the loop, there is
3211 nothing to do. */
3212 if (in_loop
3213 && (!flow_bb_inside_loop_p (in_loop, cur)
3214 || dominated_by_p (CDI_DOMINATORS, in_loop->latch, cur)))
3215 return;
3216
3217 /* We are looking for all edges forming edge cut induced by
3218 set of all blocks postdominated by BB. */
3219 FOR_EACH_EDGE (e, ei, cur->preds)for ((ei) = ei_start_1 (&((cur->preds))); ei_cond ((ei
), &(e)); ei_next (&(ei)))
3220 if (e->src->index >= NUM_FIXED_BLOCKS(2)
3221 && !dominated_by_p (CDI_POST_DOMINATORS, e->src, bb))
3222 {
3223 edge e2;
3224 edge_iterator ei2;
3225 bool found = false;
3226
3227 /* Ignore fake edges and eh, we predict them as not taken anyway. */
3228 if (unlikely_executed_edge_p (e))
3229 continue;
3230 gcc_assert (bb == cur || dominated_by_p (CDI_POST_DOMINATORS, cur, bb))((void)(!(bb == cur || dominated_by_p (CDI_POST_DOMINATORS, cur
, bb)) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/predict.cc"
, 3230, __FUNCTION__), 0 : 0))
;
3231
3232 /* See if there is an edge from e->src that is not abnormal
3233 and does not lead to BB and does not exit the loop. */
3234 FOR_EACH_EDGE (e2, ei2, e->src->succs)for ((ei2) = ei_start_1 (&((e->src->succs))); ei_cond
((ei2), &(e2)); ei_next (&(ei2)))
3235 if (e2 != e
3236 && !unlikely_executed_edge_p (e2)
3237 && !dominated_by_p (CDI_POST_DOMINATORS, e2->dest, bb)
3238 && (!in_loop || !loop_exit_edge_p (in_loop, e2)))
3239 {
3240 found = true;
3241 break;
3242 }
3243
3244 /* If there is non-abnormal path leaving e->src, predict edge
3245 using predictor. Otherwise we need to look for paths
3246 leading to e->src.
3247
3248 The second may lead to infinite loop in the case we are predicitng
3249 regions that are only reachable by abnormal edges. We simply
3250 prevent visiting given BB twice. */
3251 if (found)
3252 maybe_predict_edge (e, pred, taken);
3253 else if (bitmap_set_bit (visited, e->src->index))
3254 predict_paths_for_bb (e->src, e->src, pred, taken, visited, in_loop);
3255 }
3256 for (son = first_dom_son (CDI_POST_DOMINATORS, cur);
3257 son;
3258 son = next_dom_son (CDI_POST_DOMINATORS, son))
3259 predict_paths_for_bb (son, bb, pred, taken, visited, in_loop);
3260}
3261
3262/* Sets branch probabilities according to PREDiction and
3263 FLAGS. */
3264
3265static void
3266predict_paths_leading_to (basic_block bb, enum br_predictor pred,
3267 enum prediction taken, class loop *in_loop)
3268{
3269 predict_paths_for_bb (bb, bb, pred, taken, auto_bitmap (), in_loop);
3270}
3271
3272/* Like predict_paths_leading_to but take edge instead of basic block. */
3273
3274static void
3275predict_paths_leading_to_edge (edge e, enum br_predictor pred,
3276 enum prediction taken, class loop *in_loop)
3277{
3278 bool has_nonloop_edge = false;
3279 edge_iterator ei;
3280 edge e2;
3281
3282 basic_block bb = e->src;
3283 FOR_EACH_EDGE (e2, ei, bb->succs)for ((ei) = ei_start_1 (&((bb->succs))); ei_cond ((ei)
, &(e2)); ei_next (&(ei)))
3284 if (e2->dest != e->src && e2->dest != e->dest
3285 && !unlikely_executed_edge_p (e2)
3286 && !dominated_by_p (CDI_POST_DOMINATORS, e->src, e2->dest))
3287 {
3288 has_nonloop_edge = true;
3289 break;
3290 }
3291
3292 if (!has_nonloop_edge)
3293 predict_paths_for_bb (bb, bb, pred, taken, auto_bitmap (), in_loop);
3294 else
3295 maybe_predict_edge (e, pred, taken);
3296}
3297
3298/* This is used to carry information about basic blocks. It is
3299 attached to the AUX field of the standard CFG block. */
3300
3301class block_info
3302{
3303public:
3304 /* Estimated frequency of execution of basic_block. */
3305 sreal frequency;
3306
3307 /* To keep queue of basic blocks to process. */
3308 basic_block next;
3309
3310 /* Number of predecessors we need to visit first. */
3311 int npredecessors;
3312};
3313
3314/* Similar information for edges. */
3315class edge_prob_info
3316{
3317public:
3318 /* In case edge is a loopback edge, the probability edge will be reached
3319 in case header is. Estimated number of iterations of the loop can be
3320 then computed as 1 / (1 - back_edge_prob). */
3321 sreal back_edge_prob;
3322 /* True if the edge is a loopback edge in the natural loop. */
3323 unsigned int back_edge:1;
3324};
3325
3326#define BLOCK_INFO(B)((block_info *) (B)->aux) ((block_info *) (B)->aux)
3327#undef EDGE_INFO
3328#define EDGE_INFO(E)((edge_prob_info *) (E)->aux) ((edge_prob_info *) (E)->aux)
3329
3330/* Helper function for estimate_bb_frequencies.
3331 Propagate the frequencies in blocks marked in
3332 TOVISIT, starting in HEAD. */
3333
3334static void
3335propagate_freq (basic_block head, bitmap tovisit,
3336 sreal max_cyclic_prob)
3337{
3338 basic_block bb;
3339 basic_block last;
3340 unsigned i;
3341 edge e;
3342 basic_block nextbb;
3343 bitmap_iterator bi;
3344
3345 /* For each basic block we need to visit count number of his predecessors
3346 we need to visit first. */
3347 EXECUTE_IF_SET_IN_BITMAP (tovisit, 0, i, bi)for (bmp_iter_set_init (&(bi), (tovisit), (0), &(i));
bmp_iter_set (&(bi), &(i)); bmp_iter_next (&(bi)
, &(i)))
3348 {
3349 edge_iterator ei;
3350 int count = 0;
3351
3352 bb = BASIC_BLOCK_FOR_FN (cfun, i)((*(((cfun + 0))->cfg->x_basic_block_info))[(i)]);
3353
3354 FOR_EACH_EDGE (e, ei, bb->preds)for ((ei) = ei_start_1 (&((bb->preds))); ei_cond ((ei)
, &(e)); ei_next (&(ei)))
3355 {
3356 bool visit = bitmap_bit_p (tovisit, e->src->index);
3357
3358 if (visit && !(e->flags & EDGE_DFS_BACK))
3359 count++;
3360 else if (visit && dump_file && !EDGE_INFO (e)((edge_prob_info *) (e)->aux)->back_edge)
3361 fprintf (dump_file,
3362 "Irreducible region hit, ignoring edge to %i->%i\n",
3363 e->src->index, bb->index);
3364 }
3365 BLOCK_INFO (bb)((block_info *) (bb)->aux)->npredecessors = count;
3366 /* When function never returns, we will never process exit block. */
3367 if (!count && bb == EXIT_BLOCK_PTR_FOR_FN (cfun)(((cfun + 0))->cfg->x_exit_block_ptr))
3368 bb->count = profile_count::zero ();
3369 }
3370
3371 BLOCK_INFO (head)((block_info *) (head)->aux)->frequency = 1;
3372 last = head;
3373 for (bb = head; bb; bb = nextbb)
3374 {
3375 edge_iterator ei;
3376 sreal cyclic_probability = 0;
3377 sreal frequency = 0;
3378
3379 nextbb = BLOCK_INFO (bb)((block_info *) (bb)->aux)->next;
3380 BLOCK_INFO (bb)((block_info *) (bb)->aux)->next = NULLnullptr;
3381
3382 /* Compute frequency of basic block. */
3383 if (bb != head)
3384 {
3385 if (flag_checkingglobal_options.x_flag_checking)
3386 FOR_EACH_EDGE (e, ei, bb->preds)for ((ei) = ei_start_1 (&((bb->preds))); ei_cond ((ei)
, &(e)); ei_next (&(ei)))
3387 gcc_assert (!bitmap_bit_p (tovisit, e->src->index)((void)(!(!bitmap_bit_p (tovisit, e->src->index) || (e->
flags & EDGE_DFS_BACK)) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/predict.cc"
, 3388, __FUNCTION__), 0 : 0))
3388 || (e->flags & EDGE_DFS_BACK))((void)(!(!bitmap_bit_p (tovisit, e->src->index) || (e->
flags & EDGE_DFS_BACK)) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/predict.cc"
, 3388, __FUNCTION__), 0 : 0))
;
3389
3390 FOR_EACH_EDGE (e, ei, bb->preds)for ((ei) = ei_start_1 (&((bb->preds))); ei_cond ((ei)
, &(e)); ei_next (&(ei)))
3391 if (EDGE_INFO (e)((edge_prob_info *) (e)->aux)->back_edge)
3392 cyclic_probability += EDGE_INFO (e)((edge_prob_info *) (e)->aux)->back_edge_prob;
3393 else if (!(e->flags & EDGE_DFS_BACK))
3394 {
3395 /* FIXME: Graphite is producing edges with no profile. Once
3396 this is fixed, drop this. */
3397 sreal tmp = e->probability.initialized_p () ?
3398 e->probability.to_sreal () : 0;
3399 frequency += tmp * BLOCK_INFO (e->src)((block_info *) (e->src)->aux)->frequency;
3400 }
3401
3402 if (cyclic_probability == 0)
3403 {
3404 BLOCK_INFO (bb)((block_info *) (bb)->aux)->frequency = frequency;
3405 }
3406 else
3407 {
3408 if (cyclic_probability > max_cyclic_prob)
3409 {
3410 if (dump_file)
3411 fprintf (dump_file,
3412 "cyclic probability of bb %i is %f (capped to %f)"
3413 "; turning freq %f",
3414 bb->index, cyclic_probability.to_double (),
3415 max_cyclic_prob.to_double (),
3416 frequency.to_double ());
3417
3418 cyclic_probability = max_cyclic_prob;
3419 }
3420 else if (dump_file)
3421 fprintf (dump_file,
3422 "cyclic probability of bb %i is %f; turning freq %f",
3423 bb->index, cyclic_probability.to_double (),
3424 frequency.to_double ());
3425
3426 BLOCK_INFO (bb)((block_info *) (bb)->aux)->frequency = frequency
3427 / (sreal (1) - cyclic_probability);
3428 if (dump_file)
3429 fprintf (dump_file, " to %f\n",
3430 BLOCK_INFO (bb)((block_info *) (bb)->aux)->frequency.to_double ());
3431 }
3432 }
3433
3434 bitmap_clear_bit (tovisit, bb->index);
3435
3436 e = find_edge (bb, head);
3437 if (e)
3438 {
3439 /* FIXME: Graphite is producing edges with no profile. Once
3440 this is fixed, drop this. */
3441 sreal tmp = e->probability.initialized_p () ?
3442 e->probability.to_sreal () : 0;
3443 EDGE_INFO (e)((edge_prob_info *) (e)->aux)->back_edge_prob = tmp * BLOCK_INFO (bb)((block_info *) (bb)->aux)->frequency;
3444 }
3445
3446 /* Propagate to successor blocks. */
3447 FOR_EACH_EDGE (e, ei, bb->succs)for ((ei) = ei_start_1 (&((bb->succs))); ei_cond ((ei)
, &(e)); ei_next (&(ei)))
3448 if (!(e->flags & EDGE_DFS_BACK)
3449 && BLOCK_INFO (e->dest)((block_info *) (e->dest)->aux)->npredecessors)
3450 {
3451 BLOCK_INFO (e->dest)((block_info *) (e->dest)->aux)->npredecessors--;
3452 if (!BLOCK_INFO (e->dest)((block_info *) (e->dest)->aux)->npredecessors)
3453 {
3454 if (!nextbb)
3455 nextbb = e->dest;
3456 else
3457 BLOCK_INFO (last)((block_info *) (last)->aux)->next = e->dest;
3458
3459 last = e->dest;
3460 }
3461 }
3462 }
3463}
3464
3465/* Estimate frequencies in loops at same nest level. */
3466
3467static void
3468estimate_loops_at_level (class loop *first_loop, sreal max_cyclic_prob)
3469{
3470 class loop *loop;
3471
3472 for (loop = first_loop; loop; loop = loop->next)
3473 {
3474 edge e;
3475 basic_block *bbs;
3476 unsigned i;
3477 auto_bitmap tovisit;
3478
3479 estimate_loops_at_level (loop->inner, max_cyclic_prob);
3480
3481 /* Find current loop back edge and mark it. */
3482 e = loop_latch_edge (loop);
3483 EDGE_INFO (e)((edge_prob_info *) (e)->aux)->back_edge = 1;
3484
3485 bbs = get_loop_body (loop);
3486 for (i = 0; i < loop->num_nodes; i++)
3487 bitmap_set_bit (tovisit, bbs[i]->index);
3488 free (bbs);
3489 propagate_freq (loop->header, tovisit, max_cyclic_prob);
3490 }
3491}
3492
3493/* Propagates frequencies through structure of loops. */
3494
3495static void
3496estimate_loops (void)
3497{
3498 auto_bitmap tovisit;
3499 basic_block bb;
3500 sreal max_cyclic_prob = (sreal)1
3501 - (sreal)1 / (param_max_predicted_iterationsglobal_options.x_param_max_predicted_iterations + 1);
3502
3503 /* Start by estimating the frequencies in the loops. */
3504 if (number_of_loops (cfun(cfun + 0)) > 1)
3505 estimate_loops_at_level (current_loops((cfun + 0)->x_current_loops)->tree_root->inner, max_cyclic_prob);
3506
3507 /* Now propagate the frequencies through all the blocks. */
3508 FOR_ALL_BB_FN (bb, cfun)for (bb = (((cfun + 0))->cfg->x_entry_block_ptr); bb; bb
= bb->next_bb)
3509 {
3510 bitmap_set_bit (tovisit, bb->index);
3511 }
3512 propagate_freq (ENTRY_BLOCK_PTR_FOR_FN (cfun)(((cfun + 0))->cfg->x_entry_block_ptr), tovisit, max_cyclic_prob);
3513}
3514
3515/* Drop the profile for NODE to guessed, and update its frequency based on
3516 whether it is expected to be hot given the CALL_COUNT. */
3517
3518static void
3519drop_profile (struct cgraph_node *node, profile_count call_count)
3520{
3521 struct function *fn = DECL_STRUCT_FUNCTION (node->decl)((tree_check ((node->decl), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/predict.cc"
, 3521, __FUNCTION__, (FUNCTION_DECL)))->function_decl.f)
;
3522 /* In the case where this was called by another function with a
3523 dropped profile, call_count will be 0. Since there are no
3524 non-zero call counts to this function, we don't know for sure
3525 whether it is hot, and therefore it will be marked normal below. */
3526 bool hot = maybe_hot_count_p (NULLnullptr, call_count);
3527
3528 if (dump_file)
3529 fprintf (dump_file,
3530 "Dropping 0 profile for %s. %s based on calls.\n",
3531 node->dump_name (),
3532 hot ? "Function is hot" : "Function is normal");
3533 /* We only expect to miss profiles for functions that are reached
3534 via non-zero call edges in cases where the function may have
3535 been linked from another module or library (COMDATs and extern
3536 templates). See the comments below for handle_missing_profiles.
3537 Also, only warn in cases where the missing counts exceed the
3538 number of training runs. In certain cases with an execv followed
3539 by a no-return call the profile for the no-return call is not
3540 dumped and there can be a mismatch. */
3541 if (!DECL_COMDAT (node->decl)((contains_struct_check ((node->decl), (TS_DECL_WITH_VIS),
"/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/predict.cc"
, 3541, __FUNCTION__))->decl_with_vis.comdat_flag)
&& !DECL_EXTERNAL (node->decl)((contains_struct_check ((node->decl), (TS_DECL_COMMON), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/predict.cc"
, 3541, __FUNCTION__))->decl_common.decl_flag_1)
3542 && call_count > profile_info->runs)
3543 {
3544 if (flag_profile_correctionglobal_options.x_flag_profile_correction)
3545 {
3546 if (dump_file)
3547 fprintf (dump_file,
3548 "Missing counts for called function %s\n",
3549 node->dump_name ());
3550 }
3551 else
3552 warning (0, "Missing counts for called function %s",
3553 node->dump_name ());
3554 }
3555
3556 basic_block bb;
3557 if (opt_for_fn (node->decl, flag_guess_branch_prob)(opts_for_fn (node->decl)->x_flag_guess_branch_prob))
3558 {
3559 bool clear_zeros
3560 = !ENTRY_BLOCK_PTR_FOR_FN (fn)((fn)->cfg->x_entry_block_ptr)->count.nonzero_p ();
3561 FOR_ALL_BB_FN (bb, fn)for (bb = ((fn)->cfg->x_entry_block_ptr); bb; bb = bb->
next_bb)
3562 if (clear_zeros || !(bb->count == profile_count::zero ()))
3563 bb->count = bb->count.guessed_local ();
3564 fn->cfg->count_max = fn->cfg->count_max.guessed_local ();
3565 }
3566 else
3567 {
3568 FOR_ALL_BB_FN (bb, fn)for (bb = ((fn)->cfg->x_entry_block_ptr); bb; bb = bb->
next_bb)
3569 bb->count = profile_count::uninitialized ();
3570 fn->cfg->count_max = profile_count::uninitialized ();
3571 }
3572
3573 struct cgraph_edge *e;
3574 for (e = node->callees; e; e = e->next_callee)
3575 e->count = gimple_bb (e->call_stmt)->count;
3576 for (e = node->indirect_calls; e; e = e->next_callee)
3577 e->count = gimple_bb (e->call_stmt)->count;
3578 node->count = ENTRY_BLOCK_PTR_FOR_FN (fn)((fn)->cfg->x_entry_block_ptr)->count;
3579
3580 profile_status_for_fn (fn)((fn)->cfg->x_profile_status)
3581 = (flag_guess_branch_probglobal_options.x_flag_guess_branch_prob ? PROFILE_GUESSED : PROFILE_ABSENT);
3582 node->frequency
3583 = hot ? NODE_FREQUENCY_HOT : NODE_FREQUENCY_NORMAL;
3584}
3585
3586/* In the case of COMDAT routines, multiple object files will contain the same
3587 function and the linker will select one for the binary. In that case
3588 all the other copies from the profile instrument binary will be missing
3589 profile counts. Look for cases where this happened, due to non-zero
3590 call counts going to 0-count functions, and drop the profile to guessed
3591 so that we can use the estimated probabilities and avoid optimizing only
3592 for size.
3593
3594 The other case where the profile may be missing is when the routine
3595 is not going to be emitted to the object file, e.g. for "extern template"
3596 class methods. Those will be marked DECL_EXTERNAL. Emit a warning in
3597 all other cases of non-zero calls to 0-count functions. */
3598
3599void
3600handle_missing_profiles (void)
3601{
3602 const int unlikely_frac = param_unlikely_bb_count_fractionglobal_options.x_param_unlikely_bb_count_fraction;
3603 struct cgraph_node *node;
3604 auto_vec<struct cgraph_node *, 64> worklist;
3605
3606 /* See if 0 count function has non-0 count callers. In this case we
3607 lost some profile. Drop its function profile to PROFILE_GUESSED. */
3608 FOR_EACH_DEFINED_FUNCTION (node)for ((node) = symtab->first_defined_function (); (node); (
node) = symtab->next_defined_function ((node)))
3609 {
3610 struct cgraph_edge *e;
3611 profile_count call_count = profile_count::zero ();
3612 gcov_type max_tp_first_run = 0;
3613 struct function *fn = DECL_STRUCT_FUNCTION (node->decl)((tree_check ((node->decl), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/predict.cc"
, 3613, __FUNCTION__, (FUNCTION_DECL)))->function_decl.f)
;
3614
3615 if (node->count.ipa ().nonzero_p ())
3616 continue;
3617 for (e = node->callers; e; e = e->next_caller)
3618 if (e->count.ipa ().initialized_p () && e->count.ipa () > 0)
3619 {
3620 call_count = call_count + e->count.ipa ();
3621
3622 if (e->caller->tp_first_run > max_tp_first_run)
3623 max_tp_first_run = e->caller->tp_first_run;
3624 }
3625
3626 /* If time profile is missing, let assign the maximum that comes from
3627 caller functions. */
3628 if (!node->tp_first_run && max_tp_first_run)
3629 node->tp_first_run = max_tp_first_run + 1;
3630
3631 if (call_count > 0
3632 && fn && fn->cfg
3633 && call_count * unlikely_frac >= profile_info->runs)
3634 {
3635 drop_profile (node, call_count);
3636 worklist.safe_push (node);
3637 }
3638 }
3639
3640 /* Propagate the profile dropping to other 0-count COMDATs that are
3641 potentially called by COMDATs we already dropped the profile on. */
3642 while (worklist.length () > 0)
3643 {
3644 struct cgraph_edge *e;
3645
3646 node = worklist.pop ();
3647 for (e = node->callees; e; e = e->next_caller)
3648 {
3649 struct cgraph_node *callee = e->callee;
3650 struct function *fn = DECL_STRUCT_FUNCTION (callee->decl)((tree_check ((callee->decl), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/predict.cc"
, 3650, __FUNCTION__, (FUNCTION_DECL)))->function_decl.f)
;
3651
3652 if (!(e->count.ipa () == profile_count::zero ())
3653 && callee->count.ipa ().nonzero_p ())
3654 continue;
3655 if ((DECL_COMDAT (callee->decl)((contains_struct_check ((callee->decl), (TS_DECL_WITH_VIS
), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/predict.cc"
, 3655, __FUNCTION__))->decl_with_vis.comdat_flag)
|| DECL_EXTERNAL (callee->decl)((contains_struct_check ((callee->decl), (TS_DECL_COMMON),
"/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/predict.cc"
, 3655, __FUNCTION__))->decl_common.decl_flag_1)
)
3656 && fn && fn->cfg
3657 && profile_status_for_fn (fn)((fn)->cfg->x_profile_status) == PROFILE_READ)
3658 {
3659 drop_profile (node, profile_count::zero ());
3660 worklist.safe_push (callee);
3661 }
3662 }
3663 }
3664}
3665
3666/* Convert counts measured by profile driven feedback to frequencies.
3667 Return nonzero iff there was any nonzero execution count. */
3668
3669bool
3670update_max_bb_count (void)
3671{
3672 profile_count true_count_max = profile_count::uninitialized ();
3673 basic_block bb;
3674
3675 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (cfun), NULL, next_bb)for (bb = (((cfun + 0))->cfg->x_entry_block_ptr); bb !=
nullptr; bb = bb->next_bb)
3676 true_count_max = true_count_max.max (bb->count);
3677
3678 cfun(cfun + 0)->cfg->count_max = true_count_max;
3679
3680 return true_count_max.ipa ().nonzero_p ();
3681}
3682
3683/* Return true if function is likely to be expensive, so there is no point to
3684 optimize performance of prologue, epilogue or do inlining at the expense
3685 of code size growth. THRESHOLD is the limit of number of instructions
3686 function can execute at average to be still considered not expensive. */
3687
3688bool
3689expensive_function_p (int threshold)
3690{
3691 basic_block bb;
3692
3693 /* If profile was scaled in a way entry block has count 0, then the function
3694 is deifnitly taking a lot of time. */
3695 if (!ENTRY_BLOCK_PTR_FOR_FN (cfun)(((cfun + 0))->cfg->x_entry_block_ptr)->count.nonzero_p ())
3696 return true;
3697
3698 profile_count limit = ENTRY_BLOCK_PTR_FOR_FN (cfun)(((cfun + 0))->cfg->x_entry_block_ptr)->count * threshold;
3699 profile_count sum = profile_count::zero ();
3700 FOR_EACH_BB_FN (bb, cfun)for (bb = ((cfun + 0))->cfg->x_entry_block_ptr->next_bb
; bb != ((cfun + 0))->cfg->x_exit_block_ptr; bb = bb->
next_bb)
3701 {
3702 rtx_insn *insn;
3703
3704 if (!bb->count.initialized_p ())
3705 {
3706 if (dump_file)
3707 fprintf (dump_file, "Function is considered expensive because"
3708 " count of bb %i is not initialized\n", bb->index);
3709 return true;
3710 }
3711
3712 FOR_BB_INSNS (bb, insn)for ((insn) = (bb)->il.x.head_; (insn) && (insn) !=
NEXT_INSN ((bb)->il.x.rtl->end_); (insn) = NEXT_INSN (
insn))
3713 if (active_insn_p (insn))
3714 {
3715 sum += bb->count;
3716 if (sum > limit)
3717 return true;
3718 }
3719 }
3720
3721 return false;
3722}
3723
3724/* All basic blocks that are reachable only from unlikely basic blocks are
3725 unlikely. */
3726
3727void
3728propagate_unlikely_bbs_forward (void)
3729{
3730 auto_vec<basic_block, 64> worklist;
3731 basic_block bb;
3732 edge_iterator ei;
3733 edge e;
3734
3735 if (!(ENTRY_BLOCK_PTR_FOR_FN (cfun)(((cfun + 0))->cfg->x_entry_block_ptr)->count == profile_count::zero ()))
3736 {
3737 ENTRY_BLOCK_PTR_FOR_FN (cfun)(((cfun + 0))->cfg->x_entry_block_ptr)->aux = (void *)(size_t) 1;
3738 worklist.safe_push (ENTRY_BLOCK_PTR_FOR_FN (cfun)(((cfun + 0))->cfg->x_entry_block_ptr));
3739
3740 while (worklist.length () > 0)
3741 {
3742 bb = worklist.pop ();
3743 FOR_EACH_EDGE (e, ei, bb->succs)for ((ei) = ei_start_1 (&((bb->succs))); ei_cond ((ei)
, &(e)); ei_next (&(ei)))
3744 if (!(e->count () == profile_count::zero ())
3745 && !(e->dest->count == profile_count::zero ())
3746 && !e->dest->aux)
3747 {
3748 e->dest->aux = (void *)(size_t) 1;
3749 worklist.safe_push (e->dest);
3750 }
3751 }
3752 }
3753
3754 FOR_ALL_BB_FN (bb, cfun)for (bb = (((cfun + 0))->cfg->x_entry_block_ptr); bb; bb
= bb->next_bb)
3755 {
3756 if (!bb->aux)
3757 {
3758 if (!(bb->count == profile_count::zero ())
3759 && (dump_file && (dump_flags & TDF_DETAILS)))
3760 fprintf (dump_file,
3761 "Basic block %i is marked unlikely by forward prop\n",
3762 bb->index);
3763 bb->count = profile_count::zero ();
3764 }
3765 else
3766 bb->aux = NULLnullptr;
3767 }
3768}
3769
3770/* Determine basic blocks/edges that are known to be unlikely executed and set
3771 their counters to zero.
3772 This is done with first identifying obviously unlikely BBs/edges and then
3773 propagating in both directions. */
3774
3775static void
3776determine_unlikely_bbs ()
3777{
3778 basic_block bb;
3779 auto_vec<basic_block, 64> worklist;
3780 edge_iterator ei;
3781 edge e;
3782
3783 FOR_EACH_BB_FN (bb, cfun)for (bb = ((cfun + 0))->cfg->x_entry_block_ptr->next_bb
; bb != ((cfun + 0))->cfg->x_exit_block_ptr; bb = bb->
next_bb)
3784 {
3785 if (!(bb->count == profile_count::zero ())
3786 && unlikely_executed_bb_p (bb))
3787 {
3788 if (dump_file && (dump_flags & TDF_DETAILS))
3789 fprintf (dump_file, "Basic block %i is locally unlikely\n",
3790 bb->index);
3791 bb->count = profile_count::zero ();
3792 }
3793
3794 FOR_EACH_EDGE (e, ei, bb->succs)for ((ei) = ei_start_1 (&((bb->succs))); ei_cond ((ei)
, &(e)); ei_next (&(ei)))
3795 if (!(e->probability == profile_probability::never ())
3796 && unlikely_executed_edge_p (e))
3797 {
3798 if (dump_file && (dump_flags & TDF_DETAILS))
3799 fprintf (dump_file, "Edge %i->%i is locally unlikely\n",
3800 bb->index, e->dest->index);
3801 e->probability = profile_probability::never ();
3802 }
3803
3804 gcc_checking_assert (!bb->aux)((void)(!(!bb->aux) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/predict.cc"
, 3804, __FUNCTION__), 0 : 0))
;
3805 }
3806 propagate_unlikely_bbs_forward ();
3807
3808 auto_vec<int, 64> nsuccs;
3809 nsuccs.safe_grow_cleared (last_basic_block_for_fn (cfun)(((cfun + 0))->cfg->x_last_basic_block), true);
3810 FOR_ALL_BB_FN (bb, cfun)for (bb = (((cfun + 0))->cfg->x_entry_block_ptr); bb; bb
= bb->next_bb)
3811 if (!(bb->count == profile_count::zero ())
3812 && bb != EXIT_BLOCK_PTR_FOR_FN (cfun)(((cfun + 0))->cfg->x_exit_block_ptr))
3813 {
3814 nsuccs[bb->index] = 0;
3815 FOR_EACH_EDGE (e, ei, bb->succs)for ((ei) = ei_start_1 (&((bb->succs))); ei_cond ((ei)
, &(e)); ei_next (&(ei)))
3816 if (!(e->probability == profile_probability::never ())
3817 && !(e->dest->count == profile_count::zero ()))
3818 nsuccs[bb->index]++;
3819 if (!nsuccs[bb->index])
3820 worklist.safe_push (bb);
3821 }
3822 while (worklist.length () > 0)
3823 {
3824 bb = worklist.pop ();
3825 if (bb->count == profile_count::zero ())
3826 continue;
3827 if (bb != ENTRY_BLOCK_PTR_FOR_FN (cfun)(((cfun + 0))->cfg->x_entry_block_ptr))
3828 {
3829 bool found = false;
3830 for (gimple_stmt_iterator gsi = gsi_start_bb (bb);
3831 !gsi_end_p (gsi); gsi_next (&gsi))
3832 if (stmt_can_terminate_bb_p (gsi_stmt (gsi))
3833 /* stmt_can_terminate_bb_p special cases noreturns because it
3834 assumes that fake edges are created. We want to know that
3835 noreturn alone does not imply BB to be unlikely. */
3836 || (is_gimple_call (gsi_stmt (gsi))
3837 && (gimple_call_flags (gsi_stmt (gsi)) & ECF_NORETURN(1 << 3))))
3838 {
3839 found = true;
3840 break;
3841 }
3842 if (found)
3843 continue;
3844 }
3845 if (dump_file && (dump_flags & TDF_DETAILS))
3846 fprintf (dump_file,
3847 "Basic block %i is marked unlikely by backward prop\n",
3848 bb->index);
3849 bb->count = profile_count::zero ();
3850 FOR_EACH_EDGE (e, ei, bb->preds)for ((ei) = ei_start_1 (&((bb->preds))); ei_cond ((ei)
, &(e)); ei_next (&(ei)))
3851 if (!(e->probability == profile_probability::never ()))
3852 {
3853 if (!(e->src->count == profile_count::zero ()))
3854 {
3855 gcc_checking_assert (nsuccs[e->src->index] > 0)((void)(!(nsuccs[e->src->index] > 0) ? fancy_abort (
"/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/predict.cc"
, 3855, __FUNCTION__), 0 : 0))
;
3856 nsuccs[e->src->index]--;
3857 if (!nsuccs[e->src->index])
3858 worklist.safe_push (e->src);
3859 }
3860 }
3861 }
3862 /* Finally all edges from non-0 regions to 0 are unlikely. */
3863 FOR_ALL_BB_FN (bb, cfun)for (bb = (((cfun + 0))->cfg->x_entry_block_ptr); bb; bb
= bb->next_bb)
3864 {
3865 if (!(bb->count == profile_count::zero ()))
3866 FOR_EACH_EDGE (e, ei, bb->succs)for ((ei) = ei_start_1 (&((bb->succs))); ei_cond ((ei)
, &(e)); ei_next (&(ei)))
3867 if (!(e->probability == profile_probability::never ())
3868 && e->dest->count == profile_count::zero ())
3869 {
3870 if (dump_file && (dump_flags & TDF_DETAILS))
3871 fprintf (dump_file, "Edge %i->%i is unlikely because "
3872 "it enters unlikely block\n",
3873 bb->index, e->dest->index);
3874 e->probability = profile_probability::never ();
3875 }
3876
3877 edge other = NULLnullptr;
3878
3879 FOR_EACH_EDGE (e, ei, bb->succs)for ((ei) = ei_start_1 (&((bb->succs))); ei_cond ((ei)
, &(e)); ei_next (&(ei)))
3880 if (e->probability == profile_probability::never ())
3881 ;
3882 else if (other)
3883 {
3884 other = NULLnullptr;
3885 break;
3886 }
3887 else
3888 other = e;
3889 if (other
3890 && !(other->probability == profile_probability::always ()))
3891 {
3892 if (dump_file && (dump_flags & TDF_DETAILS))
3893 fprintf (dump_file, "Edge %i->%i is locally likely\n",
3894 bb->index, other->dest->index);
3895 other->probability = profile_probability::always ();
3896 }
3897 }
3898 if (ENTRY_BLOCK_PTR_FOR_FN (cfun)(((cfun + 0))->cfg->x_entry_block_ptr)->count == profile_count::zero ())
3899 cgraph_node::get (current_function_decl)->count = profile_count::zero ();
3900}
3901
3902/* Estimate and propagate basic block frequencies using the given branch
3903 probabilities. If FORCE is true, the frequencies are used to estimate
3904 the counts even when there are already non-zero profile counts. */
3905
3906void
3907estimate_bb_frequencies (bool force)
3908{
3909 basic_block bb;
3910 sreal freq_max;
3911
3912 determine_unlikely_bbs ();
3913
3914 if (force || profile_status_for_fn (cfun)(((cfun + 0))->cfg->x_profile_status) != PROFILE_READ
3915 || !update_max_bb_count ())
3916 {
3917
3918 mark_dfs_back_edges ();
3919
3920 single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun)(((cfun + 0))->cfg->x_entry_block_ptr))->probability =
3921 profile_probability::always ();
3922
3923 /* Set up block info for each basic block. */
3924 alloc_aux_for_blocks (sizeof (block_info));
3925 alloc_aux_for_edges (sizeof (edge_prob_info));
3926 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (cfun), NULL, next_bb)for (bb = (((cfun + 0))->cfg->x_entry_block_ptr); bb !=
nullptr; bb = bb->next_bb)
3927 {
3928 edge e;
3929 edge_iterator ei;
3930
3931 FOR_EACH_EDGE (e, ei, bb->succs)for ((ei) = ei_start_1 (&((bb->succs))); ei_cond ((ei)
, &(e)); ei_next (&(ei)))
3932 {
3933 /* FIXME: Graphite is producing edges with no profile. Once
3934 this is fixed, drop this. */
3935 if (e->probability.initialized_p ())
3936 EDGE_INFO (e)((edge_prob_info *) (e)->aux)->back_edge_prob
3937 = e->probability.to_sreal ();
3938 else
3939 /* back_edge_prob = 0.5 */
3940 EDGE_INFO (e)((edge_prob_info *) (e)->aux)->back_edge_prob = sreal (1, -1);
3941 }
3942 }
3943
3944 /* First compute frequencies locally for each loop from innermost
3945 to outermost to examine frequencies for back edges. */
3946 estimate_loops ();
3947
3948 freq_max = 0;
3949 FOR_EACH_BB_FN (bb, cfun)for (bb = ((cfun + 0))->cfg->x_entry_block_ptr->next_bb
; bb != ((cfun + 0))->cfg->x_exit_block_ptr; bb = bb->
next_bb)
3950 if (freq_max < BLOCK_INFO (bb)((block_info *) (bb)->aux)->frequency)
3951 freq_max = BLOCK_INFO (bb)((block_info *) (bb)->aux)->frequency;
3952
3953 /* Scaling frequencies up to maximal profile count may result in
3954 frequent overflows especially when inlining loops.
3955 Small scalling results in unnecesary precision loss. Stay in
3956 the half of the (exponential) range. */
3957 freq_max = (sreal (1) << (profile_count::n_bits / 2)) / freq_max;
3958 if (freq_max < 16)
3959 freq_max = 16;
3960 profile_count ipa_count = ENTRY_BLOCK_PTR_FOR_FN (cfun)(((cfun + 0))->cfg->x_entry_block_ptr)->count.ipa ();
3961 cfun(cfun + 0)->cfg->count_max = profile_count::uninitialized ();
3962 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (cfun), NULL, next_bb)for (bb = (((cfun + 0))->cfg->x_entry_block_ptr); bb !=
nullptr; bb = bb->next_bb)
3963 {
3964 sreal tmp = BLOCK_INFO (bb)((block_info *) (bb)->aux)->frequency;
3965 if (tmp >= 1)
3966 {
3967 gimple_stmt_iterator gsi;
3968 tree decl;
3969
3970 /* Self recursive calls can not have frequency greater than 1
3971 or program will never terminate. This will result in an
3972 inconsistent bb profile but it is better than greatly confusing
3973 IPA cost metrics. */
3974 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
3975 if (is_gimple_call (gsi_stmt (gsi))
3976 && (decl = gimple_call_fndecl (gsi_stmt (gsi))) != NULLnullptr
3977 && recursive_call_p (current_function_decl, decl))
3978 {
3979 if (dump_file)
3980 fprintf (dump_file, "Dropping frequency of recursive call"
3981 " in bb %i from %f\n", bb->index,
3982 tmp.to_double ());
3983 tmp = (sreal)9 / (sreal)10;
3984 break;
3985 }
3986 }
3987 tmp = tmp * freq_max + sreal (1, -1);
3988 profile_count count = profile_count::from_gcov_type (tmp.to_int ());
3989
3990 /* If we have profile feedback in which this function was never
3991 executed, then preserve this info. */
3992 if (!(bb->count == profile_count::zero ()))
3993 bb->count = count.guessed_local ().combine_with_ipa_count (ipa_count);
3994 cfun(cfun + 0)->cfg->count_max = cfun(cfun + 0)->cfg->count_max.max (bb->count);
3995 }
3996
3997 free_aux_for_blocks ();
3998 free_aux_for_edges ();
3999 }
4000 compute_function_frequency ();
4001}
4002
4003/* Decide whether function is hot, cold or unlikely executed. */
4004void
4005compute_function_frequency (void)
4006{
4007 basic_block bb;
4008 struct cgraph_node *node = cgraph_node::get (current_function_decl);
4009
4010 if (DECL_STATIC_CONSTRUCTOR (current_function_decl)((tree_check ((current_function_decl), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/predict.cc"
, 4010, __FUNCTION__, (FUNCTION_DECL)))->function_decl.static_ctor_flag
)
4011 || MAIN_NAME_P (DECL_NAME (current_function_decl))((tree_check ((((contains_struct_check ((current_function_decl
), (TS_DECL_MINIMAL), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/predict.cc"
, 4011, __FUNCTION__))->decl_minimal.name)), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/predict.cc"
, 4011, __FUNCTION__, (IDENTIFIER_NODE))) == global_trees[TI_MAIN_IDENTIFIER
])
)
4012 node->only_called_at_startup = true;
4013 if (DECL_STATIC_DESTRUCTOR (current_function_decl)((tree_check ((current_function_decl), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/predict.cc"
, 4013, __FUNCTION__, (FUNCTION_DECL)))->function_decl.static_dtor_flag
)
)
4014 node->only_called_at_exit = true;
4015
4016 if (!ENTRY_BLOCK_PTR_FOR_FN (cfun)(((cfun + 0))->cfg->x_entry_block_ptr)->count.ipa_p ())
4017 {
4018 int flags = flags_from_decl_or_type (current_function_decl);
4019 if (lookup_attribute ("cold", DECL_ATTRIBUTES (current_function_decl)((contains_struct_check ((current_function_decl), (TS_DECL_COMMON
), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/predict.cc"
, 4019, __FUNCTION__))->decl_common.attributes)
)
4020 != NULLnullptr)
4021 node->frequency = NODE_FREQUENCY_UNLIKELY_EXECUTED;
4022 else if (lookup_attribute ("hot", DECL_ATTRIBUTES (current_function_decl)((contains_struct_check ((current_function_decl), (TS_DECL_COMMON
), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/predict.cc"
, 4022, __FUNCTION__))->decl_common.attributes)
)
4023 != NULLnullptr)
4024 node->frequency = NODE_FREQUENCY_HOT;
4025 else if (flags & ECF_NORETURN(1 << 3))
4026 node->frequency = NODE_FREQUENCY_EXECUTED_ONCE;
4027 else if (MAIN_NAME_P (DECL_NAME (current_function_decl))((tree_check ((((contains_struct_check ((current_function_decl
), (TS_DECL_MINIMAL), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/predict.cc"
, 4027, __FUNCTION__))->decl_minimal.name)), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/predict.cc"
, 4027, __FUNCTION__, (IDENTIFIER_NODE))) == global_trees[TI_MAIN_IDENTIFIER
])
)
4028 node->frequency = NODE_FREQUENCY_EXECUTED_ONCE;
4029 else if (DECL_STATIC_CONSTRUCTOR (current_function_decl)((tree_check ((current_function_decl), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/predict.cc"
, 4029, __FUNCTION__, (FUNCTION_DECL)))->function_decl.static_ctor_flag
)
4030 || DECL_STATIC_DESTRUCTOR (current_function_decl)((tree_check ((current_function_decl), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/predict.cc"
, 4030, __FUNCTION__, (FUNCTION_DECL)))->function_decl.static_dtor_flag
)
)
4031 node->frequency = NODE_FREQUENCY_EXECUTED_ONCE;
4032 return;
4033 }
4034
4035 node->frequency = NODE_FREQUENCY_UNLIKELY_EXECUTED;
4036 if (lookup_attribute ("cold", DECL_ATTRIBUTES (current_function_decl)((contains_struct_check ((current_function_decl), (TS_DECL_COMMON
), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/predict.cc"
, 4036, __FUNCTION__))->decl_common.attributes)
)
4037 == NULLnullptr)
4038 warn_function_cold (current_function_decl);
4039 if (ENTRY_BLOCK_PTR_FOR_FN (cfun)(((cfun + 0))->cfg->x_entry_block_ptr)->count.ipa() == profile_count::zero ())
4040 return;
4041 FOR_EACH_BB_FN (bb, cfun)for (bb = ((cfun + 0))->cfg->x_entry_block_ptr->next_bb
; bb != ((cfun + 0))->cfg->x_exit_block_ptr; bb = bb->
next_bb)
4042 {
4043 if (maybe_hot_bb_p (cfun(cfun + 0), bb))
4044 {
4045 node->frequency = NODE_FREQUENCY_HOT;
4046 return;
4047 }
4048 if (!probably_never_executed_bb_p (cfun(cfun + 0), bb))
4049 node->frequency = NODE_FREQUENCY_NORMAL;
4050 }
4051}
4052
4053/* Build PREDICT_EXPR. */
4054tree
4055build_predict_expr (enum br_predictor predictor, enum prediction taken)
4056{
4057 tree t = build1 (PREDICT_EXPR, void_type_nodeglobal_trees[TI_VOID_TYPE],
4058 build_int_cst (integer_type_nodeinteger_types[itk_int], predictor));
4059 SET_PREDICT_EXPR_OUTCOME (t, taken)((tree_check ((t), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/predict.cc"
, 4059, __FUNCTION__, (PREDICT_EXPR)))->base.addressable_flag
= (int) taken)
;
4060 return t;
4061}
4062
4063const char *
4064predictor_name (enum br_predictor predictor)
4065{
4066 return predictor_info[predictor].name;
4067}
4068
4069/* Predict branch probabilities and estimate profile of the tree CFG. */
4070
4071namespace {
4072
4073const pass_data pass_data_profile =
4074{
4075 GIMPLE_PASS, /* type */
4076 "profile_estimate", /* name */
4077 OPTGROUP_NONE, /* optinfo_flags */
4078 TV_BRANCH_PROB, /* tv_id */
4079 PROP_cfg(1 << 3), /* properties_required */
4080 0, /* properties_provided */
4081 0, /* properties_destroyed */
4082 0, /* todo_flags_start */
4083 0, /* todo_flags_finish */
4084};
4085
4086class pass_profile : public gimple_opt_pass
4087{
4088public:
4089 pass_profile (gcc::context *ctxt)
4090 : gimple_opt_pass (pass_data_profile, ctxt)
4091 {}
4092
4093 /* opt_pass methods: */
4094 bool gate (function *) final override { return flag_guess_branch_probglobal_options.x_flag_guess_branch_prob; }
4095 unsigned int execute (function *) final override;
4096
4097}; // class pass_profile
4098
4099unsigned int
4100pass_profile::execute (function *fun)
4101{
4102 unsigned nb_loops;
4103
4104 if (profile_status_for_fn (cfun)(((cfun + 0))->cfg->x_profile_status) == PROFILE_GUESSED)
4105 return 0;
4106
4107 loop_optimizer_init (LOOPS_NORMAL(LOOPS_HAVE_PREHEADERS | LOOPS_HAVE_SIMPLE_LATCHES | LOOPS_HAVE_MARKED_IRREDUCIBLE_REGIONS
)
);
4108 if (dump_file && (dump_flags & TDF_DETAILS))
4109 flow_loops_dump (dump_file, NULLnullptr, 0);
4110
4111 nb_loops = number_of_loops (fun);
4112 if (nb_loops > 1)
4113 scev_initialize ();
4114
4115 tree_estimate_probability (false);
4116
4117 if (nb_loops > 1)
4118 scev_finalize ();
4119
4120 loop_optimizer_finalize ();
4121 if (dump_file && (dump_flags & TDF_DETAILS))
4122 gimple_dump_cfg (dump_file, dump_flags);
4123 if (profile_status_for_fn (fun)((fun)->cfg->x_profile_status) == PROFILE_ABSENT)
4124 profile_status_for_fn (fun)((fun)->cfg->x_profile_status) = PROFILE_GUESSED;
4125 if (dump_file && (dump_flags & TDF_DETAILS))
4126 {
4127 for (auto loop : loops_list (cfun(cfun + 0), LI_FROM_INNERMOST))
4128 if (loop->header->count.initialized_p ())
4129 fprintf (dump_file, "Loop got predicted %d to iterate %i times.\n",
4130 loop->num,
4131 (int)expected_loop_iterations_unbounded (loop));
4132 }
4133 return 0;
4134}
4135
4136} // anon namespace
4137
4138gimple_opt_pass *
4139make_pass_profile (gcc::context *ctxt)
4140{
4141 return new pass_profile (ctxt);
4142}
4143
4144/* Return true when PRED predictor should be removed after early
4145 tree passes. Most of the predictors are beneficial to survive
4146 as early inlining can also distribute then into caller's bodies. */
4147
4148static bool
4149strip_predictor_early (enum br_predictor pred)
4150{
4151 switch (pred)
4152 {
4153 case PRED_TREE_EARLY_RETURN:
4154 return true;
4155 default:
4156 return false;
4157 }
4158}
4159
4160/* Get rid of all builtin_expect calls and GIMPLE_PREDICT statements
4161 we no longer need. EARLY is set to true when called from early
4162 optimizations. */
4163
4164unsigned int
4165strip_predict_hints (function *fun, bool early)
4166{
4167 basic_block bb;
4168 gimple *ass_stmt;
4169 tree var;
4170 bool changed = false;
4171
4172 FOR_EACH_BB_FN (bb, fun)for (bb = (fun)->cfg->x_entry_block_ptr->next_bb; bb
!= (fun)->cfg->x_exit_block_ptr; bb = bb->next_bb)
4173 {
4174 gimple_stmt_iterator bi;
4175 for (bi = gsi_start_bb (bb); !gsi_end_p (bi);)
4176 {
4177 gimple *stmt = gsi_stmt (bi);
4178
4179 if (gimple_code (stmt) == GIMPLE_PREDICT)
4180 {
4181 if (!early
4182 || strip_predictor_early (gimple_predict_predictor (stmt)))
4183 {
4184 gsi_remove (&bi, true);
4185 changed = true;
4186 continue;
4187 }
4188 }
4189 else if (is_gimple_call (stmt))
4190 {
4191 tree fndecl = gimple_call_fndecl (stmt);
4192
4193 if (!early
4194 && ((fndecl != NULL_TREE(tree) nullptr
4195 && fndecl_built_in_p (fndecl, BUILT_IN_EXPECT)
4196 && gimple_call_num_args (stmt) == 2)
4197 || (fndecl != NULL_TREE(tree) nullptr
4198 && fndecl_built_in_p (fndecl,
4199 BUILT_IN_EXPECT_WITH_PROBABILITY)
4200 && gimple_call_num_args (stmt) == 3)
4201 || (gimple_call_internal_p (stmt)
4202 && gimple_call_internal_fn (stmt) == IFN_BUILTIN_EXPECT)))
4203 {
4204 var = gimple_call_lhs (stmt);
4205 changed = true;
4206 if (var)
4207 {
4208 ass_stmt
4209 = gimple_build_assign (var, gimple_call_arg (stmt, 0));
4210 gsi_replace (&bi, ass_stmt, true);
4211 }
4212 else
4213 {
4214 gsi_remove (&bi, true);
4215 continue;
4216 }
4217 }
4218 }
4219 gsi_next (&bi);
4220 }
4221 }
4222 return changed ? TODO_cleanup_cfg(1 << 5) : 0;
4223}
4224
4225namespace {
4226
4227const pass_data pass_data_strip_predict_hints =
4228{
4229 GIMPLE_PASS, /* type */
4230 "*strip_predict_hints", /* name */
4231 OPTGROUP_NONE, /* optinfo_flags */
4232 TV_BRANCH_PROB, /* tv_id */
4233 PROP_cfg(1 << 3), /* properties_required */
4234 0, /* properties_provided */
4235 0, /* properties_destroyed */
4236 0, /* todo_flags_start */
4237 0, /* todo_flags_finish */
4238};
4239
4240class pass_strip_predict_hints : public gimple_opt_pass
4241{
4242public:
4243 pass_strip_predict_hints (gcc::context *ctxt)
4244 : gimple_opt_pass (pass_data_strip_predict_hints, ctxt)
4245 {}
4246
4247 /* opt_pass methods: */
4248 opt_pass * clone () final override
4249 {
4250 return new pass_strip_predict_hints (m_ctxt);
4251 }
4252 void set_pass_param (unsigned int n, bool param) final override
4253 {
4254 gcc_assert (n == 0)((void)(!(n == 0) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/predict.cc"
, 4254, __FUNCTION__), 0 : 0))
;
4255 early_p = param;
4256 }
4257
4258 unsigned int execute (function *) final override;
4259
4260private:
4261 bool early_p;
4262
4263}; // class pass_strip_predict_hints
4264
4265unsigned int
4266pass_strip_predict_hints::execute (function *fun)
4267{
4268 return strip_predict_hints (fun, early_p);
4269}
4270
4271} // anon namespace
4272
4273gimple_opt_pass *
4274make_pass_strip_predict_hints (gcc::context *ctxt)
4275{
4276 return new pass_strip_predict_hints (ctxt);
4277}
4278
4279/* Rebuild function frequencies. Passes are in general expected to
4280 maintain profile by hand, however in some cases this is not possible:
4281 for example when inlining several functions with loops freuqencies might run
4282 out of scale and thus needs to be recomputed. */
4283
4284void
4285rebuild_frequencies (void)
4286{
4287 timevar_push (TV_REBUILD_FREQUENCIES);
4288
4289 /* When the max bb count in the function is small, there is a higher
4290 chance that there were truncation errors in the integer scaling
4291 of counts by inlining and other optimizations. This could lead
4292 to incorrect classification of code as being cold when it isn't.
4293 In that case, force the estimation of bb counts/frequencies from the
4294 branch probabilities, rather than computing frequencies from counts,
4295 which may also lead to frequencies incorrectly reduced to 0. There
4296 is less precision in the probabilities, so we only do this for small
4297 max counts. */
4298 cfun(cfun + 0)->cfg->count_max = profile_count::uninitialized ();
4299 basic_block bb;
4300 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (cfun), NULL, next_bb)for (bb = (((cfun + 0))->cfg->x_entry_block_ptr); bb !=
nullptr; bb = bb->next_bb)
4301 cfun(cfun + 0)->cfg->count_max = cfun(cfun + 0)->cfg->count_max.max (bb->count);
4302
4303 if (profile_status_for_fn (cfun)(((cfun + 0))->cfg->x_profile_status) == PROFILE_GUESSED)
4304 {
4305 loop_optimizer_init (LOOPS_HAVE_MARKED_IRREDUCIBLE_REGIONS);
4306 connect_infinite_loops_to_exit ();
4307 estimate_bb_frequencies (true);
4308 remove_fake_exit_edges ();
4309 loop_optimizer_finalize ();
4310 }
4311 else if (profile_status_for_fn (cfun)(((cfun + 0))->cfg->x_profile_status) == PROFILE_READ)
4312 update_max_bb_count ();
4313 else if (profile_status_for_fn (cfun)(((cfun + 0))->cfg->x_profile_status) == PROFILE_ABSENT
4314 && !flag_guess_branch_probglobal_options.x_flag_guess_branch_prob)
4315 ;
4316 else
4317 gcc_unreachable ()(fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/predict.cc"
, 4317, __FUNCTION__))
;
4318 timevar_pop (TV_REBUILD_FREQUENCIES);
4319}
4320
4321/* Perform a dry run of the branch prediction pass and report comparsion of
4322 the predicted and real profile into the dump file. */
4323
4324void
4325report_predictor_hitrates (void)
4326{
4327 unsigned nb_loops;
4328
4329 loop_optimizer_init (LOOPS_NORMAL(LOOPS_HAVE_PREHEADERS | LOOPS_HAVE_SIMPLE_LATCHES | LOOPS_HAVE_MARKED_IRREDUCIBLE_REGIONS
)
);
4330 if (dump_file && (dump_flags & TDF_DETAILS))
4331 flow_loops_dump (dump_file, NULLnullptr, 0);
4332
4333 nb_loops = number_of_loops (cfun(cfun + 0));
4334 if (nb_loops > 1)
4335 scev_initialize ();
4336
4337 tree_estimate_probability (true);
4338
4339 if (nb_loops > 1)
4340 scev_finalize ();
4341
4342 loop_optimizer_finalize ();
4343}
4344
4345/* Force edge E to be cold.
4346 If IMPOSSIBLE is true, for edge to have count and probability 0 otherwise
4347 keep low probability to represent possible error in a guess. This is used
4348 i.e. in case we predict loop to likely iterate given number of times but
4349 we are not 100% sure.
4350
4351 This function locally updates profile without attempt to keep global
4352 consistency which cannot be reached in full generality without full profile
4353 rebuild from probabilities alone. Doing so is not necessarily a good idea
4354 because frequencies and counts may be more realistic then probabilities.
4355
4356 In some cases (such as for elimination of early exits during full loop
4357 unrolling) the caller can ensure that profile will get consistent
4358 afterwards. */
4359
4360void
4361force_edge_cold (edge e, bool impossible)
4362{
4363 profile_count count_sum = profile_count::zero ();
4364 profile_probability prob_sum = profile_probability::never ();
4365 edge_iterator ei;
4366 edge e2;
4367 bool uninitialized_exit = false;
4368
4369 /* When branch probability guesses are not known, then do nothing. */
4370 if (!impossible && !e->count ().initialized_p ())
4371 return;
4372
4373 profile_probability goal = (impossible ? profile_probability::never ()
4374 : profile_probability::very_unlikely ());
4375
4376 /* If edge is already improbably or cold, just return. */
4377 if (e->probability <= goal
4378 && (!impossible || e->count () == profile_count::zero ()))
4379 return;
4380 FOR_EACH_EDGE (e2, ei, e->src->succs)for ((ei) = ei_start_1 (&((e->src->succs))); ei_cond
((ei), &(e2)); ei_next (&(ei)))
4381 if (e2 != e)
4382 {
4383 if (e->flags & EDGE_FAKE)
4384 continue;
4385 if (e2->count ().initialized_p ())
4386 count_sum += e2->count ();
4387 if (e2->probability.initialized_p ())
4388 prob_sum += e2->probability;
4389 else
4390 uninitialized_exit = true;
4391 }
4392
4393 /* If we are not guessing profiles but have some other edges out,
4394 just assume the control flow goes elsewhere. */
4395 if (uninitialized_exit)
4396 e->probability = goal;
4397 /* If there are other edges out of e->src, redistribute probabilitity
4398 there. */
4399 else if (prob_sum > profile_probability::never ())
4400 {
4401 if (!(e->probability < goal))
4402 e->probability = goal;
4403
4404 profile_probability prob_comp = prob_sum / e->probability.invert ();
4405
4406 if (dump_file && (dump_flags & TDF_DETAILS))
4407 fprintf (dump_file, "Making edge %i->%i %s by redistributing "
4408 "probability to other edges.\n",
4409 e->src->index, e->dest->index,
4410 impossible ? "impossible" : "cold");
4411 FOR_EACH_EDGE (e2, ei, e->src->succs)for ((ei) = ei_start_1 (&((e->src->succs))); ei_cond
((ei), &(e2)); ei_next (&(ei)))
4412 if (e2 != e)
4413 {
4414 e2->probability /= prob_comp;
4415 }
4416 if (current_ir_type () != IR_GIMPLE
4417 && e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun)(((cfun + 0))->cfg->x_entry_block_ptr))
4418 update_br_prob_note (e->src);
4419 }
4420 /* If all edges out of e->src are unlikely, the basic block itself
4421 is unlikely. */
4422 else
4423 {
4424 if (prob_sum == profile_probability::never ())
4425 e->probability = profile_probability::always ();
4426 else
4427 {
4428 if (impossible)
4429 e->probability = profile_probability::never ();
4430 /* If BB has some edges out that are not impossible, we cannot
4431 assume that BB itself is. */
4432 impossible = false;
4433 }
4434 if (current_ir_type () != IR_GIMPLE
4435 && e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun)(((cfun + 0))->cfg->x_entry_block_ptr))
4436 update_br_prob_note (e->src);
4437 if (e->src->count == profile_count::zero ())
4438 return;
4439 if (count_sum == profile_count::zero () && impossible)
4440 {
4441 bool found = false;
4442 if (e->src == ENTRY_BLOCK_PTR_FOR_FN (cfun)(((cfun + 0))->cfg->x_entry_block_ptr))
4443 ;
4444 else if (current_ir_type () == IR_GIMPLE)
4445 for (gimple_stmt_iterator gsi = gsi_start_bb (e->src);
4446 !gsi_end_p (gsi); gsi_next (&gsi))
4447 {
4448 if (stmt_can_terminate_bb_p (gsi_stmt (gsi)))
4449 {
4450 found = true;
4451 break;
4452 }
4453 }
4454 /* FIXME: Implement RTL path. */
4455 else
4456 found = true;
4457 if (!found)
4458 {
4459 if (dump_file && (dump_flags & TDF_DETAILS))
4460 fprintf (dump_file,
4461 "Making bb %i impossible and dropping count to 0.\n",
4462 e->src->index);
4463 e->src->count = profile_count::zero ();
4464 FOR_EACH_EDGE (e2, ei, e->src->preds)for ((ei) = ei_start_1 (&((e->src->preds))); ei_cond
((ei), &(e2)); ei_next (&(ei)))
4465 force_edge_cold (e2, impossible);
4466 return;
4467 }
4468 }
4469
4470 /* If we did not adjusting, the source basic block has no likely edeges
4471 leaving other direction. In that case force that bb cold, too.
4472 This in general is difficult task to do, but handle special case when
4473 BB has only one predecestor. This is common case when we are updating
4474 after loop transforms. */
4475 if (!(prob_sum > profile_probability::never ())
4476 && count_sum == profile_count::zero ()
4477 && single_pred_p (e->src) && e->src->count.to_frequency (cfun(cfun + 0))
4478 > (impossible ? 0 : 1))
4479 {
4480 int old_frequency = e->src->count.to_frequency (cfun(cfun + 0));
4481 if (dump_file && (dump_flags & TDF_DETAILS))
4482 fprintf (dump_file, "Making bb %i %s.\n", e->src->index,
4483 impossible ? "impossible" : "cold");
4484 int new_frequency = MIN (e->src->count.to_frequency (cfun),((e->src->count.to_frequency ((cfun + 0))) < (impossible
? 0 : 1) ? (e->src->count.to_frequency ((cfun + 0))) :
(impossible ? 0 : 1))
4485 impossible ? 0 : 1)((e->src->count.to_frequency ((cfun + 0))) < (impossible
? 0 : 1) ? (e->src->count.to_frequency ((cfun + 0))) :
(impossible ? 0 : 1))
;
4486 if (impossible)
4487 e->src->count = profile_count::zero ();
4488 else
4489 e->src->count = e->count ().apply_scale (new_frequency,
4490 old_frequency);
4491 force_edge_cold (single_pred_edge (e->src), impossible);
4492 }
4493 else if (dump_file && (dump_flags & TDF_DETAILS)
4494 && maybe_hot_bb_p (cfun(cfun + 0), e->src))
4495 fprintf (dump_file, "Giving up on making bb %i %s.\n", e->src->index,
4496 impossible ? "impossible" : "cold");
4497 }
4498}
4499
4500/* Change E's probability to NEW_E_PROB, redistributing the probabilities
4501 of other outgoing edges proportionally.
4502
4503 Note that this function does not change the profile counts of any
4504 basic blocks. The caller must do that instead, using whatever
4505 information it has about the region that needs updating. */
4506
4507void
4508change_edge_frequency (edge e, profile_probability new_e_prob)
4509{
4510 profile_probability old_e_prob = e->probability;
4511 profile_probability old_other_prob = old_e_prob.invert ();
4512 profile_probability new_other_prob = new_e_prob.invert ();
4513
4514 e->probability = new_e_prob;
4515 profile_probability cumulative_prob = new_e_prob;
4516
4517 unsigned int num_other = EDGE_COUNT (e->src->succs)vec_safe_length (e->src->succs) - 1;
4518 edge other_e;
4519 edge_iterator ei;
4520 FOR_EACH_EDGE (other_e, ei, e->src->succs)for ((ei) = ei_start_1 (&((e->src->succs))); ei_cond
((ei), &(other_e)); ei_next (&(ei)))
4521 if (other_e != e)
4522 {
4523 num_other -= 1;
4524 if (num_other == 0)
4525 /* Ensure that the probabilities add up to 1 without
4526 rounding error. */
4527 other_e->probability = cumulative_prob.invert ();
4528 else
4529 {
4530 other_e->probability /= old_other_prob;
4531 other_e->probability *= new_other_prob;
4532 cumulative_prob += other_e->probability;
4533 }
4534 }
4535}
4536
4537#if CHECKING_P1
4538
4539namespace selftest {
4540
4541/* Test that value range of predictor values defined in predict.def is
4542 within range (50, 100]. */
4543
4544struct branch_predictor
4545{
4546 const char *name;
4547 int probability;
4548};
4549
4550#define DEF_PREDICTOR(ENUM, NAME, HITRATE, FLAGS) { NAME, HITRATE },
4551
4552static void
4553test_prediction_value_range ()
4554{
4555 branch_predictor predictors[] = {
4556#include "predict.def"
4557 { NULLnullptr, PROB_UNINITIALIZED(-1) }
4558 };
4559
4560 for (unsigned i = 0; predictors[i].name != NULLnullptr; i++)
4561 {
4562 if (predictors[i].probability == PROB_UNINITIALIZED(-1))
4563 continue;
4564
4565 unsigned p = 100 * predictors[i].probability / REG_BR_PROB_BASE10000;
4566 ASSERT_TRUE (p >= 50 && p <= 100)do { const char *desc_ = "ASSERT_TRUE (" "(p >= 50 && p <= 100)"
")"; bool actual_ = ((p >= 50 && p <= 100)); if
(actual_) ::selftest::pass (((::selftest::location ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/predict.cc"
, 4566, __FUNCTION__))), desc_); else ::selftest::fail (((::selftest
::location ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/predict.cc"
, 4566, __FUNCTION__))), desc_); } while (0)
;
4567 }
4568}
4569
4570#undef DEF_PREDICTOR
4571
4572/* Run all of the selfests within this file. */
4573
4574void
4575predict_cc_tests ()
4576{
4577 test_prediction_value_range ();
4578}
4579
4580} // namespace selftest
4581#endif /* CHECKING_P. */